From f3b25e40438b3c8383caabf4e7b89863145a9f0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Thu, 19 Oct 2023 19:40:41 +0300 Subject: [PATCH 001/859] multimodal : add BakLLaVA conversion support (#3682) --- examples/llava/llava-surgery.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 26294d9bd..515f6b58d 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -16,13 +16,29 @@ checkpoint = torch.load(path) mm_tensors = [k for k, v in checkpoint.items() if k.startswith("model.mm_projector")] # store these tensors in a new dictionary and torch.save them -projector = {name: checkpoint[name] for name in mm_tensors} +projector = {name: checkpoint[name].float() for name in mm_tensors} torch.save(projector, f"{args.model}/llava.projector") # remove these tensors from the checkpoint and save it again for name in mm_tensors: del checkpoint[name] +# BakLLaVA models contain CLIP tensors in it +clip_tensors = [k for k, v in checkpoint.items() if k.startswith("model.vision_tower")] +if len(clip_tensors) > 0: + clip = {name.replace("vision_tower.vision_tower.", ""): checkpoint[name].float() for name in clip_tensors} + torch.save(clip, f"{args.model}/llava.clip") + + # remove these tensors + for name in clip_tensors: + del checkpoint[name] + + # added tokens should be removed to be able to convert Mistral models + if os.path.exists(f"{args.model}/added_tokens.json"): + with open(f"{args.model}/added_tokens.json", "w") as f: + f.write("{}\n") + + torch.save(checkpoint, path) print("Done!") From e78f3ef24af4ca74e77e725644b41ae8ca3b10a5 Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Fri, 20 Oct 2023 01:32:08 -0400 Subject: [PATCH 002/859] convert : restore compat with old Falcon models (#3680) --- convert-falcon-hf-to-gguf.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/convert-falcon-hf-to-gguf.py b/convert-falcon-hf-to-gguf.py index 9252e1c46..1d98c51ad 100755 --- a/convert-falcon-hf-to-gguf.py +++ b/convert-falcon-hf-to-gguf.py @@ -78,7 +78,7 @@ print("gguf: loading model "+dir_model.name) with open(dir_model / "config.json", "r", encoding="utf-8") as f: hparams = json.load(f) -if hparams["architectures"][0] != "FalconForCausalLM": +if hparams["architectures"][0] not in ("RWForCausalLM", "FalconForCausalLM"): print("Model architecture not supported: " + hparams["architectures"][0]) sys.exit(1) @@ -97,7 +97,17 @@ gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) print("gguf: get model metadata") -block_count = hparams["num_hidden_layers"] +block_count = hparams.get("num_hidden_layers") +if block_count is None: + block_count = hparams["n_layer"] # old name + +n_head = hparams.get("num_attention_heads") +if n_head is None: + n_head = hparams["n_head"] # old name + +n_head_kv = hparams.get("num_kv_heads") +if n_head_kv is None: + n_head_kv = hparams.get("n_head_kv", 1) # old name gguf_writer.add_name("Falcon") gguf_writer.add_context_length(2048) # not in config.json @@ -105,11 +115,8 @@ gguf_writer.add_tensor_data_layout("jploski") # qkv tensor transform gguf_writer.add_embedding_length(hparams["hidden_size"]) gguf_writer.add_feed_forward_length(4 * hparams["hidden_size"]) gguf_writer.add_block_count(block_count) -gguf_writer.add_head_count(hparams["num_attention_heads"]) -if "num_kv_heads" in hparams: - gguf_writer.add_head_count_kv(hparams["num_kv_heads"]) -else: - gguf_writer.add_head_count_kv(1) +gguf_writer.add_head_count(n_head) +gguf_writer.add_head_count_kv(n_head_kv) gguf_writer.add_layer_norm_eps(hparams["layer_norm_epsilon"]) gguf_writer.add_file_type(ftype) @@ -152,10 +159,6 @@ special_vocab.add_to_gguf(gguf_writer) tensor_map = gguf.get_tensor_name_map(ARCH,block_count) -# params for qkv transform -n_head = hparams["num_attention_heads"] -n_head_kv = hparams["num_kv_heads"] if "num_kv_heads" in hparams else 1 - head_dim = hparams["hidden_size"] // n_head # tensor info From f439e506e8ae8b01df2ae2156380f8156d7553e3 Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 20 Oct 2023 10:02:12 +0000 Subject: [PATCH 003/859] ggml : fix rope + llama minor optimizations (#3560) * Minor fixes and fixed memleak * Using const auto references in range-based loop C++17 --- common/grammar-parser.cpp | 2 +- common/train.cpp | 2 +- ggml.c | 3 ++- llama.cpp | 7 +++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/common/grammar-parser.cpp b/common/grammar-parser.cpp index 5a545a807..ff51cc803 100644 --- a/common/grammar-parser.cpp +++ b/common/grammar-parser.cpp @@ -399,7 +399,7 @@ namespace grammar_parser { void print_grammar(FILE * file, const parse_state & state) { try { std::map symbol_id_names; - for (auto kv : state.symbol_ids) { + for (const auto & kv : state.symbol_ids) { symbol_id_names[kv.second] = kv.first; } for (size_t i = 0, end = state.rules.size(); i < end; i++) { diff --git a/common/train.cpp b/common/train.cpp index 972eaefe0..154ca56e5 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1425,7 +1425,7 @@ void train_opt_callback(void * vdata, int accum_step, float * sched, bool * canc int impr_plot = -(int)(1 + (opt->loss_before - opt->loss_after) * 10.0f + 0.5f); if (impr_plot > 0) impr_plot = 0; - if (std::isnan(opt->loss_before) || std::isnan(opt->loss_before)) impr_plot = 0; + if (std::isnan(opt->loss_before) || std::isnan(opt->loss_after)) impr_plot = 0; printf("%s: iter=%6d sample=%zu/%zu sched=%f loss=%f", __func__, opt->iter, std::min(1+train->shuffle_next_sample, train->shuffle_sample_count), train->shuffle_sample_count, *sched, opt->loss_after); diff --git a/ggml.c b/ggml.c index 630deb49d..ed157aab0 100644 --- a/ggml.c +++ b/ggml.c @@ -13537,7 +13537,7 @@ static void ggml_compute_forward_rope_f16( dst_data[n_dims] = GGML_FP32_TO_FP16(x2*cos_block_theta - x3*sin_block_theta); dst_data[n_dims/2*3] = GGML_FP32_TO_FP16(x2*sin_block_theta + x3*cos_block_theta); } - } if (!is_neox) { + } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { const float cos_theta = cosf(theta); const float sin_theta = sinf(theta); @@ -19170,6 +19170,7 @@ void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { if (idx == -1) { fprintf(stderr, "%s: failed to find tensor, arg = %d, node = %d\n", __func__, j, i); + fclose(fout); return; } diff --git a/llama.cpp b/llama.cpp index ed8766682..ec8ffad33 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6324,7 +6324,6 @@ struct llm_tokenizer_bpe { llm_symbol sym; size_t char_len = std::min(word.size() - offset, (size_t) ::utf8_len(word[offset])); sym.text = word.c_str() + offset; - sym.n = 1; sym.n = char_len; offset += sym.n; sym.prev = index - 1; @@ -7054,7 +7053,7 @@ static std::vector llama_grammar_reject_candidates_for_ std::vector rejects; if (stack.empty()) { - for (auto tok : candidates) { + for (const auto & tok : candidates) { if (*tok.code_points != 0 || tok.partial_utf8.n_remain != 0) { rejects.push_back(tok); } @@ -7065,7 +7064,7 @@ static std::vector llama_grammar_reject_candidates_for_ const llama_grammar_element * stack_pos = stack.back(); std::vector next_candidates; - for (auto tok : candidates) { + for (const auto & tok : candidates) { if (*tok.code_points == 0) { // reached end of full codepoints in token, reject iff it ended in a partial sequence // that cannot satisfy this position in grammar @@ -7091,7 +7090,7 @@ static std::vector llama_grammar_reject_candidates_for_ llama_grammar_advance_stack(rules, stack_after, next_stacks); auto next_rejects = llama_grammar_reject_candidates(rules, next_stacks, next_candidates); - for (auto tok : next_rejects) { + for (const auto & tok : next_rejects) { rejects.push_back({ tok.index, tok.code_points - 1, tok.partial_utf8 }); } From a0edf73bda31c7c4e649e6f07c6fd30a729929cd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 20 Oct 2023 13:06:10 +0300 Subject: [PATCH 004/859] server : fix uninitialized sampling context (close #3685) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 28b3f3f53..0471528a3 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -198,7 +198,7 @@ struct llama_server_context llama_model *model = nullptr; llama_context *ctx = nullptr; gpt_params params; - llama_sampling_context *ctx_sampling; + llama_sampling_context *ctx_sampling = nullptr; int n_ctx; bool truncated = false; From 8cf19d60dc93809db8e51fedc811595eed9134c5 Mon Sep 17 00:00:00 2001 From: Qin Yue Chen <71813199+chenqiny@users.noreply.github.com> Date: Fri, 20 Oct 2023 06:19:40 -0500 Subject: [PATCH 005/859] gguf : support big endian platform (#3552) * check whether platform is 390x if yes->do not import immintrin.h * support s390x big endian * support --bigendian option for s390x 1. verified with baichuan7b-chat with float 16 on s390x 2. verified with baichuan7b-chat 3. verified with chinese-alpaca-2-13b-f16 * update format based on editor-config checker result * Update convert-baichuan-hf-to-gguf.py * 1. check in ggml.c if endianess is not match 2. update GGUF version 3. change get_pack_prefix to property 4. update information log * always use "GGUF" as beginng of GGUF file * Compare "GGUF" with file header char by char 1. Set GGUF_MAGIC to "GGUF" string instead of int value 2. Compare "GGUF" char by char to ensure its byte order 3. Move bytes swap code from convert.py to gguf.py write_tensor_data --------- Co-authored-by: Georgi Gerganov --- convert-baichuan-hf-to-gguf.py | 8 +- convert.py | 20 +++-- .../convert-llama2c-to-ggml.cpp | 2 +- ggml.c | 19 +++-- ggml.h | 5 +- gguf-py/gguf/gguf.py | 73 ++++++++++++------- gguf-py/pyproject.toml | 2 +- k_quants.c | 2 +- tests/test-double-float.cpp | 2 + 9 files changed, 84 insertions(+), 49 deletions(-) diff --git a/convert-baichuan-hf-to-gguf.py b/convert-baichuan-hf-to-gguf.py index 513a7516a..a1783f71f 100755 --- a/convert-baichuan-hf-to-gguf.py +++ b/convert-baichuan-hf-to-gguf.py @@ -76,6 +76,7 @@ def parse_args() -> argparse.Namespace: "ftype", type=int, choices=[0, 1], default=1, nargs='?', help="output format - use 0 for float32, 1 for float16", ) + parser.add_argument("--bigendian", action="store_true", help="model is executed on big endian machine") return parser.parse_args() args = parse_args() @@ -86,6 +87,11 @@ if not dir_model.is_dir(): print(f'Error: {args.model} is not a directory', file = sys.stderr) sys.exit(1) +endianess = gguf.GGUFEndian.LITTLE +if args.bigendian: + endianess = gguf.GGUFEndian.BIG +endianess_str = "Big Endian" if args.bigendian else "Little Endian" +print(f"gguf: Conversion Endianess {endianess}") # possible tensor data types # ftype == 0 -> float32 # ftype == 1 -> float16 @@ -113,7 +119,7 @@ if hparams["architectures"][0] != "BaichuanForCausalLM": num_parts = count_model_parts(dir_model) print(f"num_parts:{num_parts}\n") ARCH=gguf.MODEL_ARCH.BAICHUAN -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) +gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH], endianess=endianess) print("gguf: get model metadata") diff --git a/convert.py b/convert.py index e9b08d344..24da25efc 100755 --- a/convert.py +++ b/convert.py @@ -803,8 +803,8 @@ def check_vocab_size(params: Params, vocab: Vocab) -> None: class OutputFile: - def __init__(self, fname_out: Path) -> None: - self.gguf = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) + def __init__(self, fname_out: Path, endianess:gguf.GGUFEndian=gguf.GGUFEndian.LITTLE) -> None: + self.gguf = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH], endianess=endianess) def add_meta_arch(self, params: Params) -> None: name = "LLaMA" @@ -875,10 +875,10 @@ class OutputFile: self.gguf.close() @staticmethod - def write_vocab_only(fname_out: Path, params: Params, vocab: Vocab, svocab: gguf.SpecialVocab) -> None: + def write_vocab_only(fname_out: Path, params: Params, vocab: Vocab, svocab: gguf.SpecialVocab, endianess:gguf.GGUFEndian=gguf.GGUFEndian.LITTLE) -> None: check_vocab_size(params, vocab) - of = OutputFile(fname_out) + of = OutputFile(fname_out, endianess=endianess) # meta data of.add_meta_arch(params) @@ -903,10 +903,10 @@ class OutputFile: return dt.quantize(arr) @staticmethod - def write_all(fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY) -> None: + def write_all(fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY, endianess=gguf.GGUFEndian.LITTLE) -> None: check_vocab_size(params, vocab) - of = OutputFile(fname_out) + of = OutputFile(fname_out, endianess=endianess) # meta data of.add_meta_arch(params) @@ -1123,8 +1123,9 @@ def main(args_in: list[str] | None = None) -> None: parser.add_argument("--vocabtype", choices=["spm", "bpe"], help="vocab format (default: spm)", default="spm") parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default = DEFAULT_CONCURRENCY) - args = parser.parse_args(args_in) + parser.add_argument("--bigendian", action="store_true", help="model is executed on big endian machine") + args = parser.parse_args(args_in) if args.dump_single: model_plus = lazy_load_file(args.model) do_dump_model(model_plus) @@ -1138,6 +1139,9 @@ def main(args_in: list[str] | None = None) -> None: if args.dump: do_dump_model(model_plus) return + endianess = gguf.GGUFEndian.LITTLE + if args.bigendian: + endianess = gguf.GGUFEndian.BIG params = Params.load(model_plus) if params.n_ctx == -1: @@ -1185,7 +1189,7 @@ def main(args_in: list[str] | None = None) -> None: params.ftype = ftype print(f"Writing {outfile}, format {ftype}") - OutputFile.write_all(outfile, ftype, params, model, vocab, special_vocab, concurrency = args.concurrency) + OutputFile.write_all(outfile, ftype, params, model, vocab, special_vocab, concurrency = args.concurrency, endianess=endianess) print(f"Wrote {outfile}") diff --git a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp index c291f0adf..cae3bf3c3 100644 --- a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +++ b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp @@ -536,7 +536,7 @@ static bool is_ggml_file(const char * filename) { if (file.size < 4) { return false; } - uint32_t magic = file.read_u32(); + std::string magic = file.read_string(4); return magic == GGUF_MAGIC; } diff --git a/ggml.c b/ggml.c index ed157aab0..49f3b7aba 100644 --- a/ggml.c +++ b/ggml.c @@ -20845,7 +20845,7 @@ struct gguf_kv { }; struct gguf_header { - uint32_t magic; + char magic[4]; uint32_t version; uint64_t n_tensors; // GGUFv2 uint64_t n_kv; // GGUFv2 @@ -20915,7 +20915,7 @@ static bool gguf_fread_str_v1(FILE * file, struct gguf_str * p, size_t * offset) struct gguf_context * gguf_init_empty(void) { struct gguf_context * ctx = GGML_ALIGNED_MALLOC(sizeof(struct gguf_context)); - ctx->header.magic = GGUF_MAGIC; + memcpy(ctx->header.magic, GGUF_MAGIC, sizeof(ctx->header.magic)); ctx->header.version = GGUF_VERSION; ctx->header.n_tensors = 0; ctx->header.n_kv = 0; @@ -20941,16 +20941,18 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p // offset from start of file size_t offset = 0; - uint32_t magic = 0; + char magic[4]; // check the magic before making allocations { gguf_fread_el(file, &magic, sizeof(magic), &offset); - if (magic != GGUF_MAGIC) { - fprintf(stderr, "%s: invalid magic number %08x\n", __func__, magic); - fclose(file); - return NULL; + for (uint32_t i = 0; i < sizeof(magic); i++) { + if (magic[i] != GGUF_MAGIC[i]) { + fprintf(stderr, "%s: invalid magic characters %s.\n", __func__, magic); + fclose(file); + return NULL; + } } } @@ -20960,7 +20962,8 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p // read the header { - ctx->header.magic = magic; + strncpy(ctx->header.magic, magic, 4); + ctx->kv = NULL; ctx->infos = NULL; diff --git a/ggml.h b/ggml.h index 6e35888e9..16aaf169e 100644 --- a/ggml.h +++ b/ggml.h @@ -231,8 +231,9 @@ #define GGML_EXIT_SUCCESS 0 #define GGML_EXIT_ABORTED 1 -#define GGUF_MAGIC 0x46554747 // "GGUF" -#define GGUF_VERSION 2 +#define GGUF_MAGIC "GGUF" + +#define GGUF_VERSION 3 #define GGUF_DEFAULT_ALIGNMENT 32 diff --git a/gguf-py/gguf/gguf.py b/gguf-py/gguf/gguf.py index 557ce7ac0..072c839c4 100644 --- a/gguf-py/gguf/gguf.py +++ b/gguf-py/gguf/gguf.py @@ -19,9 +19,10 @@ import numpy as np # GGUF_MAGIC = 0x46554747 -GGUF_VERSION = 2 +GGUF_VERSION = 3 GGUF_DEFAULT_ALIGNMENT = 32 + # general KEY_GENERAL_ARCHITECTURE = "general.architecture" KEY_GENERAL_QUANTIZATION_VERSION = "general.quantization_version" @@ -597,6 +598,10 @@ class GGMLQuantizationType(IntEnum): Q6_K = 14 Q8_K = 15 +class GGUFEndian(IntEnum): + LITTLE = 0 + BIG = 1 + class GGUFValueType(IntEnum): UINT8 = 0 @@ -644,18 +649,41 @@ class GGUFWriter: temp_file: tempfile.SpooledTemporaryFile[bytes] | None = None tensors: list[tuple[np.ndarray[Any, Any], int]] - def __init__(self, path: os.PathLike[str] | str, arch: str, use_temp_file = True): + @property + def pack_prefix(self): + if self.endianess==GGUFEndian.LITTLE: + return "<" + else: + return ">" + + def __init__(self, path: os.PathLike[str] | str, arch: str, use_temp_file = True, endianess=GGUFEndian.LITTLE): self.fout = open(path, "wb") self.arch = arch + self.endianess = endianess + self._simple_value_packing = { + GGUFValueType.UINT8: f"{self.pack_prefix}B", + GGUFValueType.INT8: f"{self.pack_prefix}b", + GGUFValueType.UINT16: f"{self.pack_prefix}H", + GGUFValueType.INT16: f"{self.pack_prefix}h", + GGUFValueType.UINT32: f"{self.pack_prefix}I", + GGUFValueType.INT32: f"{self.pack_prefix}i", + GGUFValueType.FLOAT32: f"{self.pack_prefix}f", + GGUFValueType.UINT64: f"{self.pack_prefix}Q", + GGUFValueType.INT64: f"{self.pack_prefix}q", + GGUFValueType.FLOAT64: f"{self.pack_prefix}d", + GGUFValueType.BOOL: "?" , + } self.add_architecture() self.use_temp_file = use_temp_file self.tensors = [] + endianess_str = "Big Endian" if self.endianess == GGUFEndian.BIG else "Little Endian" + print(f"This gguf file is for {endianess_str} only") def write_header_to_file(self): self.fout.write(struct.pack(" 0: ltype = GGUFValueType.get_type(val[0]) if not all(GGUFValueType.get_type(i) is ltype for i in val[1:]): raise ValueError("All items in a GGUF array should be of the same type") - self.kv_data += struct.pack(""] packages = [ diff --git a/k_quants.c b/k_quants.c index e168a87bb..801941fbe 100644 --- a/k_quants.c +++ b/k_quants.c @@ -46,7 +46,7 @@ inline static int32_t vaddvq_s32(int32x4_t v) { #if defined(_MSC_VER) || defined(__MINGW32__) #include #else -#if !defined(__riscv) +#if !defined(__riscv) && !defined(__s390__) #include #endif #endif diff --git a/tests/test-double-float.cpp b/tests/test-double-float.cpp index b506f273f..afd7bf77f 100644 --- a/tests/test-double-float.cpp +++ b/tests/test-double-float.cpp @@ -4,7 +4,9 @@ #undef NDEBUG #include +#if !defined(__riscv) && !defined(__s390__) #include +#endif #include #include #include From d1031cf49c3b958b915fd558e23453471c29ac33 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 20 Oct 2023 21:07:23 +0300 Subject: [PATCH 006/859] sampling : refactor init to use llama_sampling_params (#3696) * sampling : refactor init to use llama_sampling_params * llama : combine repetition, frequency and presence penalties in 1 call * examples : remove embd-input and gptneox-wip * sampling : rename penalty params + reduce size of "prev" vector * sampling : add llama_sampling_print helper * sampling : hide prev behind API and apply #3661 ggml-ci --- Makefile | 9 +- README.md | 1 - common/common.cpp | 69 +- common/common.h | 3 +- common/sampling.cpp | 73 +- common/sampling.h | 32 +- examples/CMakeLists.txt | 32 +- examples/embd-input/.gitignore | 4 - examples/embd-input/CMakeLists.txt | 17 - examples/embd-input/README.md | 63 -- examples/embd-input/embd-input-lib.cpp | 221 ----- examples/embd-input/embd-input-test.cpp | 35 - examples/embd-input/embd-input.h | 27 - examples/embd-input/embd_input.py | 72 -- examples/embd-input/llava.py | 71 -- examples/embd-input/minigpt4.py | 129 --- examples/embd-input/panda_gpt.py | 99 -- examples/gptneox-wip/cmpnct_gpt2bpe.hpp | 1133 ----------------------- examples/gptneox-wip/falcon-main.cpp | 1111 ---------------------- examples/gptneox-wip/gptneox-main.cpp | 1083 ---------------------- examples/infill/CMakeLists.txt | 2 +- examples/infill/infill.cpp | 67 +- examples/llava/llava-utils.h | 58 +- examples/main/main.cpp | 28 +- examples/parallel/parallel.cpp | 4 +- examples/server/server.cpp | 227 +++-- examples/speculative/speculative.cpp | 12 +- llama.cpp | 94 +- llama.h | 16 +- tests/test-sampling.cpp | 75 +- 30 files changed, 365 insertions(+), 4502 deletions(-) delete mode 100644 examples/embd-input/.gitignore delete mode 100644 examples/embd-input/CMakeLists.txt delete mode 100644 examples/embd-input/README.md delete mode 100644 examples/embd-input/embd-input-lib.cpp delete mode 100644 examples/embd-input/embd-input-test.cpp delete mode 100644 examples/embd-input/embd-input.h delete mode 100755 examples/embd-input/embd_input.py delete mode 100755 examples/embd-input/llava.py delete mode 100755 examples/embd-input/minigpt4.py delete mode 100755 examples/embd-input/panda_gpt.py delete mode 100644 examples/gptneox-wip/cmpnct_gpt2bpe.hpp delete mode 100644 examples/gptneox-wip/falcon-main.cpp delete mode 100644 examples/gptneox-wip/gptneox-main.cpp diff --git a/Makefile b/Makefile index 04104bee8..325ae747b 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ # Define the default target now so that it is always the first target BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ - simple batched batched-bench save-load-state server embd-input-test gguf llama-bench llava baby-llama beam-search \ + simple batched batched-bench save-load-state server gguf llama-bench llava baby-llama beam-search \ speculative infill benchmark-matmult parallel finetune export-lora tests/test-c.o # Binaries only useful for tests @@ -608,13 +608,6 @@ save-load-state: examples/save-load-state/save-load-state.cpp build-info.h ggml. server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp build-info.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -$(LIB_PRE)embdinput$(DSO_EXT): examples/embd-input/embd-input.h examples/embd-input/embd-input-lib.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) --shared $(CXXFLAGS) $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) - - -embd-input-test: $(LIB_PRE)embdinput$(DSO_EXT) examples/embd-input/embd-input-test.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %$(DSO_EXT),$(filter-out %.h,$(filter-out %.hpp,$^))) -o $@ $(LDFLAGS) -L. -lembdinput - gguf: examples/gguf/gguf.cpp ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) diff --git a/README.md b/README.md index ce63c6f0e..49bb556a8 100644 --- a/README.md +++ b/README.md @@ -962,7 +962,6 @@ docker run --gpus all -v /path/to/models:/models local/llama.cpp:light-cuda -m / - [main](./examples/main/README.md) - [server](./examples/server/README.md) -- [embd-input](./examples/embd-input/README.md) - [jeopardy](./examples/jeopardy/README.md) - [BLIS](./docs/BLIS.md) - [Performance troubleshooting](./docs/token_generation_performance_tips.md) diff --git a/common/common.cpp b/common/common.cpp index ce14d66b8..2ef902bd5 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -107,7 +107,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { std::string arg; gpt_params default_params; const std::string arg_prefix = "--"; - llama_sampling_params & sparams = params.sampling_params; + llama_sampling_params & sparams = params.sparams; for (int i = 1; i < argc; i++) { arg = argv[i]; @@ -241,25 +241,26 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - sparams.repeat_last_n = std::stoi(argv[i]); + sparams.penalty_last_n = std::stoi(argv[i]); + sparams.n_prev = std::max(sparams.n_prev, sparams.penalty_last_n); } else if (arg == "--repeat-penalty") { if (++i >= argc) { invalid_param = true; break; } - sparams.repeat_penalty = std::stof(argv[i]); + sparams.penalty_repeat = std::stof(argv[i]); } else if (arg == "--frequency-penalty") { if (++i >= argc) { invalid_param = true; break; } - sparams.frequency_penalty = std::stof(argv[i]); + sparams.penalty_freq = std::stof(argv[i]); } else if (arg == "--presence-penalty") { if (++i >= argc) { invalid_param = true; break; } - sparams.presence_penalty = std::stof(argv[i]); + sparams.penalty_present = std::stof(argv[i]); } else if (arg == "--mirostat") { if (++i >= argc) { invalid_param = true; @@ -572,7 +573,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.grammar = argv[i]; + sparams.grammar = argv[i]; } else if (arg == "--grammar-file") { if (++i >= argc) { invalid_param = true; @@ -587,7 +588,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { std::copy( std::istreambuf_iterator(file), std::istreambuf_iterator(), - std::back_inserter(params.grammar) + std::back_inserter(sparams.grammar) ); #ifndef LOG_DISABLE_LOGS // Parse args for logging parameters @@ -640,7 +641,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { } void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { - const llama_sampling_params & sparams = params.sampling_params; + const llama_sampling_params & sparams = params.sparams; printf("usage: %s [options]\n", argv[0]); printf("\n"); @@ -678,10 +679,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); printf(" --tfs N tail free sampling, parameter z (default: %.1f, 1.0 = disabled)\n", (double)sparams.tfs_z); printf(" --typical N locally typical sampling, parameter p (default: %.1f, 1.0 = disabled)\n", (double)sparams.typical_p); - printf(" --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled, -1 = ctx_size)\n", sparams.repeat_last_n); - printf(" --repeat-penalty N penalize repeat sequence of tokens (default: %.1f, 1.0 = disabled)\n", (double)sparams.repeat_penalty); - printf(" --presence-penalty N repeat alpha presence penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.presence_penalty); - printf(" --frequency-penalty N repeat alpha frequency penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.frequency_penalty); + printf(" --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled, -1 = ctx_size)\n", sparams.penalty_last_n); + printf(" --repeat-penalty N penalize repeat sequence of tokens (default: %.1f, 1.0 = disabled)\n", (double)sparams.penalty_repeat); + printf(" --presence-penalty N repeat alpha presence penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.penalty_present); + printf(" --frequency-penalty N repeat alpha frequency penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.penalty_freq); printf(" --mirostat N use Mirostat sampling.\n"); printf(" Top K, Nucleus, Tail Free and Locally Typical samplers are ignored if used.\n"); printf(" (default: %d, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0)\n", sparams.mirostat); @@ -878,7 +879,7 @@ std::tuple llama_init_from_gpt_par } if (params.ignore_eos) { - params.sampling_params.logit_bias[llama_token_eos(lctx)] = -INFINITY; + params.sparams.logit_bias[llama_token_eos(lctx)] = -INFINITY; } { @@ -1123,28 +1124,28 @@ std::string get_sortable_timestamp() { void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const llama_context * lctx, const std::string & timestamp, const std::vector & prompt_tokens, const char * model_desc) { - const llama_sampling_params & sparams = params.sampling_params; + const llama_sampling_params & sparams = params.sparams; fprintf(stream, "build_commit: %s\n", BUILD_COMMIT); fprintf(stream, "build_number: %d\n", BUILD_NUMBER); - fprintf(stream, "cpu_has_arm_fma: %s\n", ggml_cpu_has_arm_fma() ? "true" : "false"); - fprintf(stream, "cpu_has_avx: %s\n", ggml_cpu_has_avx() ? "true" : "false"); - fprintf(stream, "cpu_has_avx2: %s\n", ggml_cpu_has_avx2() ? "true" : "false"); - fprintf(stream, "cpu_has_avx512: %s\n", ggml_cpu_has_avx512() ? "true" : "false"); + fprintf(stream, "cpu_has_arm_fma: %s\n", ggml_cpu_has_arm_fma() ? "true" : "false"); + fprintf(stream, "cpu_has_avx: %s\n", ggml_cpu_has_avx() ? "true" : "false"); + fprintf(stream, "cpu_has_avx2: %s\n", ggml_cpu_has_avx2() ? "true" : "false"); + fprintf(stream, "cpu_has_avx512: %s\n", ggml_cpu_has_avx512() ? "true" : "false"); fprintf(stream, "cpu_has_avx512_vbmi: %s\n", ggml_cpu_has_avx512_vbmi() ? "true" : "false"); fprintf(stream, "cpu_has_avx512_vnni: %s\n", ggml_cpu_has_avx512_vnni() ? "true" : "false"); - fprintf(stream, "cpu_has_blas: %s\n", ggml_cpu_has_blas() ? "true" : "false"); - fprintf(stream, "cpu_has_cublas: %s\n", ggml_cpu_has_cublas() ? "true" : "false"); - fprintf(stream, "cpu_has_clblast: %s\n", ggml_cpu_has_clblast() ? "true" : "false"); - fprintf(stream, "cpu_has_fma: %s\n", ggml_cpu_has_fma() ? "true" : "false"); - fprintf(stream, "cpu_has_gpublas: %s\n", ggml_cpu_has_gpublas() ? "true" : "false"); - fprintf(stream, "cpu_has_neon: %s\n", ggml_cpu_has_neon() ? "true" : "false"); - fprintf(stream, "cpu_has_f16c: %s\n", ggml_cpu_has_f16c() ? "true" : "false"); - fprintf(stream, "cpu_has_fp16_va: %s\n", ggml_cpu_has_fp16_va() ? "true" : "false"); - fprintf(stream, "cpu_has_wasm_simd: %s\n", ggml_cpu_has_wasm_simd() ? "true" : "false"); - fprintf(stream, "cpu_has_blas: %s\n", ggml_cpu_has_blas() ? "true" : "false"); - fprintf(stream, "cpu_has_sse3: %s\n", ggml_cpu_has_sse3() ? "true" : "false"); - fprintf(stream, "cpu_has_vsx: %s\n", ggml_cpu_has_vsx() ? "true" : "false"); + fprintf(stream, "cpu_has_blas: %s\n", ggml_cpu_has_blas() ? "true" : "false"); + fprintf(stream, "cpu_has_cublas: %s\n", ggml_cpu_has_cublas() ? "true" : "false"); + fprintf(stream, "cpu_has_clblast: %s\n", ggml_cpu_has_clblast() ? "true" : "false"); + fprintf(stream, "cpu_has_fma: %s\n", ggml_cpu_has_fma() ? "true" : "false"); + fprintf(stream, "cpu_has_gpublas: %s\n", ggml_cpu_has_gpublas() ? "true" : "false"); + fprintf(stream, "cpu_has_neon: %s\n", ggml_cpu_has_neon() ? "true" : "false"); + fprintf(stream, "cpu_has_f16c: %s\n", ggml_cpu_has_f16c() ? "true" : "false"); + fprintf(stream, "cpu_has_fp16_va: %s\n", ggml_cpu_has_fp16_va() ? "true" : "false"); + fprintf(stream, "cpu_has_wasm_simd: %s\n", ggml_cpu_has_wasm_simd() ? "true" : "false"); + fprintf(stream, "cpu_has_blas: %s\n", ggml_cpu_has_blas() ? "true" : "false"); + fprintf(stream, "cpu_has_sse3: %s\n", ggml_cpu_has_sse3() ? "true" : "false"); + fprintf(stream, "cpu_has_vsx: %s\n", ggml_cpu_has_vsx() ? "true" : "false"); #ifdef NDEBUG fprintf(stream, "debug: false\n"); @@ -1178,8 +1179,8 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "ctx_size: %d # default: 512\n", params.n_ctx); fprintf(stream, "escape: %s # default: false\n", params.escape ? "true" : "false"); fprintf(stream, "file: # never logged, see prompt instead. Can still be specified for input.\n"); - fprintf(stream, "frequency_penalty: %f # default: 0.0 \n", sparams.frequency_penalty); - dump_string_yaml_multiline(stream, "grammar", params.grammar.c_str()); + fprintf(stream, "frequency_penalty: %f # default: 0.0 \n", sparams.penalty_freq); + dump_string_yaml_multiline(stream, "grammar", sparams.grammar.c_str()); fprintf(stream, "grammar-file: # never logged, see grammar instead. Can still be specified for input.\n"); fprintf(stream, "hellaswag: %s # default: false\n", params.hellaswag ? "true" : "false"); fprintf(stream, "hellaswag_tasks: %zu # default: 400\n", params.hellaswag_tasks); @@ -1238,14 +1239,14 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "numa: %s # default: false\n", params.numa ? "true" : "false"); fprintf(stream, "ppl_output_type: %d # default: 0\n", params.ppl_output_type); fprintf(stream, "ppl_stride: %d # default: 0\n", params.ppl_stride); - fprintf(stream, "presence_penalty: %f # default: 0.0\n", sparams.presence_penalty); + fprintf(stream, "presence_penalty: %f # default: 0.0\n", sparams.penalty_present); dump_string_yaml_multiline(stream, "prompt", params.prompt.c_str()); fprintf(stream, "prompt_cache: %s\n", params.path_prompt_cache.c_str()); fprintf(stream, "prompt_cache_all: %s # default: false\n", params.prompt_cache_all ? "true" : "false"); fprintf(stream, "prompt_cache_ro: %s # default: false\n", params.prompt_cache_ro ? "true" : "false"); dump_vector_int_yaml(stream, "prompt_tokens", prompt_tokens); fprintf(stream, "random_prompt: %s # default: false\n", params.random_prompt ? "true" : "false"); - fprintf(stream, "repeat_penalty: %f # default: 1.1\n", sparams.repeat_penalty); + fprintf(stream, "repeat_penalty: %f # default: 1.1\n", sparams.penalty_repeat); fprintf(stream, "reverse_prompt:\n"); for (std::string ap : params.antiprompt) { diff --git a/common/common.h b/common/common.h index 65d3d20cd..84523a4fb 100644 --- a/common/common.h +++ b/common/common.h @@ -56,7 +56,7 @@ struct gpt_params { float rope_freq_scale = 0.0f; // RoPE frequency scaling factor // // sampling parameters - struct llama_sampling_params sampling_params; + struct llama_sampling_params sparams; std::string model = "models/7B/ggml-model-f16.gguf"; // model path std::string model_draft = ""; // draft model for speculative decoding @@ -66,7 +66,6 @@ struct gpt_params { std::string path_prompt_cache = ""; // path to file for saving/loading prompt eval state std::string input_prefix = ""; // string to prefix user inputs with std::string input_suffix = ""; // string to suffix user inputs with - std::string grammar = ""; // optional BNF-like grammar to constrain sampling std::vector antiprompt; // string upon seeing which more user input is prompted std::string logdir = ""; // directory in which to save YAML log files diff --git a/common/sampling.cpp b/common/sampling.cpp index 0b2466581..6f0af3c4a 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -1,9 +1,9 @@ #include "sampling.h" -struct llama_sampling_context * llama_sampling_init(const struct gpt_params & params) { +struct llama_sampling_context * llama_sampling_init(const struct llama_sampling_params & params) { struct llama_sampling_context * result = new llama_sampling_context(); - result->params = params.sampling_params; + result->params = params; result->grammar = nullptr; // if there is a grammar, parse it @@ -23,7 +23,7 @@ struct llama_sampling_context * llama_sampling_init(const struct gpt_params & pa grammar_rules.size(), result->parsed_grammar.symbol_ids.at("root")); } - result->prev.resize(params.n_ctx); + result->prev.resize(params.n_prev); return result; } @@ -66,25 +66,56 @@ void llama_sampling_cp(llama_sampling_context * src, llama_sampling_context * ds dst->prev = src->prev; } +llama_token llama_sampling_last(llama_sampling_context * ctx) { + return ctx->prev.back(); +} + +std::string llama_sampling_prev_str(llama_sampling_context * ctx_sampling, llama_context * ctx_main, int n) { + const int size = ctx_sampling->prev.size(); + + n = std::min(n, size); + + std::string result; + + for (int i = size - n; i < size; i++) { + result += llama_token_to_piece(ctx_main, ctx_sampling->prev[i]); + } + + return result; +} + +std::string llama_sampling_print(const llama_sampling_params & params) { + char result[1024]; + + snprintf(result, sizeof(result), + "\trepeat_last_n = %d, repeat_penalty = %.3f, frequency_penalty = %.3f, presence_penalty = %.3f\n" + "\ttop_k = %d, tfs_z = %.3f, top_p = %.3f, typical_p = %.3f, temp = %.3f\n" + "\tmirostat = %d, mirostat_lr = %.3f, mirostat_ent = %.3f", + params.penalty_last_n, params.penalty_repeat, params.penalty_freq, params.penalty_present, + params.top_k, params.tfs_z, params.top_p, params.typical_p, params.temp, + params.mirostat, params.mirostat_eta, params.mirostat_tau); + + return std::string(result); +} + llama_token llama_sampling_sample( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, struct llama_context * ctx_cfg, const int idx) { - const int n_ctx = llama_n_ctx(ctx_main); - const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); - const llama_sampling_params & params = ctx_sampling->params; + const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); + const float temp = params.temp; const int32_t top_k = params.top_k <= 0 ? n_vocab : params.top_k; const float top_p = params.top_p; const float tfs_z = params.tfs_z; const float typical_p = params.typical_p; - const int32_t repeat_last_n = params.repeat_last_n < 0 ? n_ctx : params.repeat_last_n; - const float repeat_penalty = params.repeat_penalty; - const float alpha_presence = params.presence_penalty; - const float alpha_frequency = params.frequency_penalty; + const int32_t penalty_last_n = params.penalty_last_n < 0 ? params.n_prev : params.penalty_last_n; + const float penalty_repeat = params.penalty_repeat; + const float penalty_freq = params.penalty_freq; + const float penalty_present = params.penalty_present; const int mirostat = params.mirostat; const float mirostat_tau = params.mirostat_tau; const float mirostat_eta = params.mirostat_eta; @@ -97,7 +128,7 @@ llama_token llama_sampling_sample( float * logits = llama_get_logits_ith(ctx_main, idx); - // Apply params.logit_bias map + // apply params.logit_bias map for (auto it = params.logit_bias.begin(); it != params.logit_bias.end(); it++) { logits[it->first] += it->second; } @@ -117,14 +148,10 @@ llama_token llama_sampling_sample( // apply penalties if (!prev.empty()) { const float nl_logit = logits[llama_token_nl(ctx_main)]; - const int last_n_repeat = std::min(std::min((int)prev.size(), repeat_last_n), n_ctx); - llama_sample_repetition_penalty(ctx_main, &cur_p, - prev.data() + prev.size() - last_n_repeat, - last_n_repeat, repeat_penalty); - llama_sample_frequency_and_presence_penalties(ctx_main, &cur_p, - prev.data() + prev.size() - last_n_repeat, - last_n_repeat, alpha_frequency, alpha_presence); + llama_sample_repetition_penalties(ctx_main, &cur_p, + prev.data() + prev.size() - penalty_last_n, + penalty_last_n, penalty_repeat, penalty_freq, penalty_present); if (!penalize_nl) { for (size_t idx = 0; idx < cur_p.size; idx++) { @@ -141,7 +168,7 @@ llama_token llama_sampling_sample( } if (temp <= 0) { - // Greedy sampling + // greedy sampling id = llama_sample_token_greedy(ctx_main, &cur_p); } else { if (mirostat == 1) { @@ -152,8 +179,9 @@ llama_token llama_sampling_sample( llama_sample_temp(ctx_main, &cur_p, temp); id = llama_sample_token_mirostat_v2(ctx_main, &cur_p, mirostat_tau, mirostat_eta, &ctx_sampling->mirostat_mu); } else { - // Temperature sampling + // temperature sampling size_t min_keep = std::max(1, params.n_probs); + llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); @@ -183,11 +211,12 @@ llama_token llama_sampling_sample( void llama_sampling_accept( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, - llama_token id) { + llama_token id, + bool apply_grammar) { ctx_sampling->prev.erase(ctx_sampling->prev.begin()); ctx_sampling->prev.push_back(id); - if (ctx_sampling->grammar != NULL) { + if (ctx_sampling->grammar != NULL && apply_grammar) { llama_grammar_accept_token(ctx_main, ctx_sampling->grammar, id); } } diff --git a/common/sampling.h b/common/sampling.h index 50afcbc12..62ea6d4cf 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -10,30 +10,30 @@ // sampling parameters typedef struct llama_sampling_params { + int32_t n_prev = 64; // number of previous tokens to remember + int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. int32_t top_k = 40; // <= 0 to use vocab size float top_p = 0.95f; // 1.0 = disabled float tfs_z = 1.00f; // 1.0 = disabled float typical_p = 1.00f; // 1.0 = disabled float temp = 0.80f; // 1.0 = disabled - float repeat_penalty = 1.10f; // 1.0 = disabled - int32_t repeat_last_n = 64; // last n tokens to penalize (0 = disable penalty, -1 = context size) - float frequency_penalty = 0.00f; // 0.0 = disabled - float presence_penalty = 0.00f; // 0.0 = disabled + int32_t penalty_last_n = 64; // last n tokens to penalize (0 = disable penalty, -1 = context size) + float penalty_repeat = 1.10f; // 1.0 = disabled + float penalty_freq = 0.00f; // 0.0 = disabled + float penalty_present = 0.00f; // 0.0 = disabled int32_t mirostat = 0; // 0 = disabled, 1 = mirostat, 2 = mirostat 2.0 float mirostat_tau = 5.00f; // target entropy float mirostat_eta = 0.10f; // learning rate - bool penalize_nl = true; // consider newlines as a repeatable token - int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. + std::string grammar; // optional BNF-like grammar to constrain sampling // Classifier-Free Guidance // https://arxiv.org/abs/2306.17806 - std::string cfg_negative_prompt; // string to help guidance - float cfg_scale = 1.f; // How strong is guidance + std::string cfg_negative_prompt; // string to help guidance + float cfg_scale = 1.f; // how strong is guidance std::unordered_map logit_bias; // logit bias for specific tokens - } llama_sampling_params; // general sampler context @@ -58,7 +58,7 @@ struct llama_sampling_context { #include "common.h" // Create a new sampling context instance. -struct llama_sampling_context * llama_sampling_init(const struct gpt_params & params); +struct llama_sampling_context * llama_sampling_init(const struct llama_sampling_params & params); void llama_sampling_free(struct llama_sampling_context * ctx); @@ -70,6 +70,15 @@ void llama_sampling_reset(llama_sampling_context * ctx); // Copy the sampler context void llama_sampling_cp(llama_sampling_context * src, llama_sampling_context * dst); +// Get the last sampled token +llama_token llama_sampling_last(llama_sampling_context * ctx); + +// Get a string representation of the last sampled tokens +std::string llama_sampling_prev_str(llama_sampling_context * ctx_sampling, llama_context * ctx_main, int n); + +// Print sampling parameters into a string +std::string llama_sampling_print(const llama_sampling_params & params); + // this is a common sampling function used across the examples for convenience // it can serve as a starting point for implementing your own sampling function // Note: When using multiple sequences, it is the caller's responsibility to call @@ -96,4 +105,5 @@ llama_token llama_sampling_sample( void llama_sampling_accept( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, - llama_token id); + llama_token id, + bool apply_grammar); diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index e16c65f7c..75b8df676 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -12,26 +12,26 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}) if (EMSCRIPTEN) else() - add_subdirectory(main) - add_subdirectory(quantize) - add_subdirectory(quantize-stats) - add_subdirectory(perplexity) - add_subdirectory(embedding) - add_subdirectory(save-load-state) - add_subdirectory(benchmark) add_subdirectory(baby-llama) - add_subdirectory(train-text-from-scratch) - add_subdirectory(finetune) - add_subdirectory(convert-llama2c-to-ggml) - add_subdirectory(simple) add_subdirectory(batched) add_subdirectory(batched-bench) - add_subdirectory(speculative) - add_subdirectory(parallel) - add_subdirectory(embd-input) - add_subdirectory(llava) - add_subdirectory(llama-bench) add_subdirectory(beam-search) + add_subdirectory(benchmark) + add_subdirectory(convert-llama2c-to-ggml) + add_subdirectory(embedding) + add_subdirectory(finetune) + add_subdirectory(infill) + add_subdirectory(llama-bench) + add_subdirectory(llava) + add_subdirectory(main) + add_subdirectory(parallel) + add_subdirectory(perplexity) + add_subdirectory(quantize) + add_subdirectory(quantize-stats) + add_subdirectory(save-load-state) + add_subdirectory(simple) + add_subdirectory(speculative) + add_subdirectory(train-text-from-scratch) if (LLAMA_METAL) add_subdirectory(metal) endif() diff --git a/examples/embd-input/.gitignore b/examples/embd-input/.gitignore deleted file mode 100644 index 87ef68771..000000000 --- a/examples/embd-input/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -PandaGPT -MiniGPT-4 -*.pth - diff --git a/examples/embd-input/CMakeLists.txt b/examples/embd-input/CMakeLists.txt deleted file mode 100644 index 5bbb1ea02..000000000 --- a/examples/embd-input/CMakeLists.txt +++ /dev/null @@ -1,17 +0,0 @@ -set(TARGET embdinput) -add_library(${TARGET} embd-input-lib.cpp embd-input.h) -install(TARGETS ${TARGET} LIBRARY) -target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() - -set(TARGET embd-input-test) -add_executable(${TARGET} embd-input-test.cpp) -install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE common llama embdinput ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/embd-input/README.md b/examples/embd-input/README.md deleted file mode 100644 index 5c4c75ea7..000000000 --- a/examples/embd-input/README.md +++ /dev/null @@ -1,63 +0,0 @@ -### Examples for input embedding directly - -## Requirement -build `libembdinput.so` -run the following comman in main dir (../../). -``` -make -``` - -## [LLaVA](https://github.com/haotian-liu/LLaVA/) example (llava.py) - -1. Obtian LLaVA model (following https://github.com/haotian-liu/LLaVA/ , use https://huggingface.co/liuhaotian/LLaVA-13b-delta-v1-1/). -2. Convert it to ggml format. -3. `llava_projection.pth` is [pytorch_model-00003-of-00003.bin](https://huggingface.co/liuhaotian/LLaVA-13b-delta-v1-1/blob/main/pytorch_model-00003-of-00003.bin). - -``` -import torch - -bin_path = "../LLaVA-13b-delta-v1-1/pytorch_model-00003-of-00003.bin" -pth_path = "./examples/embd-input/llava_projection.pth" - -dic = torch.load(bin_path) -used_key = ["model.mm_projector.weight","model.mm_projector.bias"] -torch.save({k: dic[k] for k in used_key}, pth_path) -``` -4. Check the path of LLaVA model and `llava_projection.pth` in `llava.py`. - - -## [PandaGPT](https://github.com/yxuansu/PandaGPT) example (panda_gpt.py) - -1. Obtian PandaGPT lora model from https://github.com/yxuansu/PandaGPT. Rename the file to `adapter_model.bin`. Use [convert-lora-to-ggml.py](../../convert-lora-to-ggml.py) to convert it to ggml format. -The `adapter_config.json` is -``` -{ - "peft_type": "LORA", - "fan_in_fan_out": false, - "bias": null, - "modules_to_save": null, - "r": 32, - "lora_alpha": 32, - "lora_dropout": 0.1, - "target_modules": ["q_proj", "k_proj", "v_proj", "o_proj"] -} -``` -2. Papare the `vicuna` v0 model. -3. Obtain the [ImageBind](https://dl.fbaipublicfiles.com/imagebind/imagebind_huge.pth) model. -4. Clone the PandaGPT source. -``` -git clone https://github.com/yxuansu/PandaGPT -``` -5. Install the requirement of PandaGPT. -6. Check the path of PandaGPT source, ImageBind model, lora model and vicuna model in panda_gpt.py. - -## [MiniGPT-4](https://github.com/Vision-CAIR/MiniGPT-4/) example (minigpt4.py) - -1. Obtain MiniGPT-4 model from https://github.com/Vision-CAIR/MiniGPT-4/ and put it in `embd-input`. -2. Clone the MiniGPT-4 source. -``` -git clone https://github.com/Vision-CAIR/MiniGPT-4/ -``` -3. Install the requirement of PandaGPT. -4. Papare the `vicuna` v0 model. -5. Check the path of MiniGPT-4 source, MiniGPT-4 model and vicuna model in `minigpt4.py`. diff --git a/examples/embd-input/embd-input-lib.cpp b/examples/embd-input/embd-input-lib.cpp deleted file mode 100644 index 3ce33842c..000000000 --- a/examples/embd-input/embd-input-lib.cpp +++ /dev/null @@ -1,221 +0,0 @@ -#include "build-info.h" -#include "common.h" -#include "embd-input.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -static llama_context ** g_ctx; - -extern "C" { - -struct MyModel* create_mymodel(int argc, char ** argv) { - gpt_params params; - - if (!gpt_params_parse(argc, argv, params)) { - return nullptr; - } - - print_build_info(); - - if (params.seed == LLAMA_DEFAULT_SEED) { - params.seed = uint32_t(time(NULL)); - } - fprintf(stderr, "%s: seed = %d\n", __func__, params.seed); - - llama_backend_init(params.numa); - - llama_model * model; - llama_context * ctx; - - g_ctx = &ctx; - - // load the model and apply lora adapter, if any - std::tie(model, ctx) = llama_init_from_gpt_params(params); - if (model == NULL) { - fprintf(stderr, "%s: error: unable to load model\n", __func__); - return nullptr; - } - - // print system information - { - fprintf(stderr, "\n"); - fprintf(stderr, "%s\n", get_system_info(params).c_str()); - } - struct MyModel * ret = new MyModel(); - ret->ctx = ctx; - ret->params = params; - ret->n_past = 0; - // printf("ctx: %d\n", ret->ctx); - return ret; -} - -void free_mymodel(struct MyModel * mymodel) { - llama_context * ctx = mymodel->ctx; - llama_print_timings(ctx); - llama_free(ctx); - delete mymodel; -} - - -bool eval_float(void * model, float * input, int N){ - MyModel * mymodel = (MyModel*)model; - llama_context * ctx = mymodel->ctx; - gpt_params params = mymodel->params; - int n_emb = llama_n_embd(llama_get_model(ctx)); - int n_past = mymodel->n_past; - int n_batch = N; // params.n_batch; - - for (int i = 0; i < (int) N; i += n_batch) { - int n_eval = (int) N - i; - if (n_eval > n_batch) { - n_eval = n_batch; - } - llama_batch batch = { int32_t(n_eval), nullptr, (input+i*n_emb), nullptr, nullptr, nullptr, nullptr, n_past, 1, 0, }; - if (llama_decode(ctx, batch)) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return false; - } - n_past += n_eval; - } - mymodel->n_past = n_past; - return true; -} - -bool eval_tokens(void * model, std::vector tokens) { - MyModel * mymodel = (MyModel* )model; - llama_context * ctx; - ctx = mymodel->ctx; - gpt_params params = mymodel->params; - int n_past = mymodel->n_past; - for (int i = 0; i < (int) tokens.size(); i += params.n_batch) { - int n_eval = (int) tokens.size() - i; - if (n_eval > params.n_batch) { - n_eval = params.n_batch; - } - if (llama_decode(ctx, llama_batch_get_one(&tokens[i], n_eval, n_past, 0))) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return false; - } - n_past += n_eval; - } - mymodel->n_past = n_past; - return true; -} - -bool eval_id(struct MyModel* mymodel, int id) { - std::vector tokens; - tokens.push_back(id); - return eval_tokens(mymodel, tokens); -} - -bool eval_string(struct MyModel * mymodel,const char* str){ - llama_context * ctx = mymodel->ctx; - std::string str2 = str; - std::vector embd_inp = ::llama_tokenize(ctx, str2, true); - eval_tokens(mymodel, embd_inp); - return true; -} - -llama_token sampling_id(struct MyModel* mymodel) { - llama_context* ctx = mymodel->ctx; - gpt_params params = mymodel->params; - llama_sampling_params & sparams = params.sampling_params; - // int n_ctx = llama_n_ctx(ctx); - - // out of user input, sample next token - const float temp = sparams.temp; - const int32_t top_k = sparams.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx)) : sparams.top_k; - const float top_p = sparams.top_p; - const float tfs_z = sparams.tfs_z; - const float typical_p = sparams.typical_p; - // const int32_t repeat_last_n = params.repeat_last_n < 0 ? n_ctx : params.repeat_last_n; - // const float repeat_penalty = params.repeat_penalty; - // const float alpha_presence = params.presence_penalty; - // const float alpha_frequency = params.frequency_penalty; - const int mirostat = sparams.mirostat; - const float mirostat_tau = sparams.mirostat_tau; - const float mirostat_eta = sparams.mirostat_eta; - // const bool penalize_nl = params.penalize_nl; - - llama_token id = 0; - { - auto logits = llama_get_logits(ctx); - auto n_vocab = llama_n_vocab(llama_get_model(ctx)); - - // Apply params.logit_bias map - for (auto it = sparams.logit_bias.begin(); it != sparams.logit_bias.end(); it++) { - logits[it->first] += it->second; - } - - std::vector candidates; - candidates.reserve(n_vocab); - for (llama_token token_id = 0; token_id < n_vocab; token_id++) { - candidates.emplace_back(llama_token_data{token_id, logits[token_id], 0.0f}); - } - - llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; - - // TODO: Apply penalties - // float nl_logit = logits[llama_token_nl(ctx)]; - // auto last_n_repeat = std::min(std::min((int)last_n_tokens.size(), repeat_last_n), n_ctx); - // llama_sample_repetition_penalty(ctx, &candidates_p, - // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, - // last_n_repeat, repeat_penalty); - // llama_sample_frequency_and_presence_penalties(ctx, &candidates_p, - // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, - // last_n_repeat, alpha_frequency, alpha_presence); - // if (!penalize_nl) { - // logits[llama_token_nl(ctx)] = nl_logit; - // } - - if (temp <= 0) { - // Greedy sampling - id = llama_sample_token_greedy(ctx, &candidates_p); - } else { - if (mirostat == 1) { - static float mirostat_mu = 2.0f * mirostat_tau; - const int mirostat_m = 100; - llama_sample_temp(ctx, &candidates_p, temp); - id = llama_sample_token_mirostat(ctx, &candidates_p, mirostat_tau, mirostat_eta, mirostat_m, &mirostat_mu); - } else if (mirostat == 2) { - static float mirostat_mu = 2.0f * mirostat_tau; - llama_sample_temp(ctx, &candidates_p, temp); - id = llama_sample_token_mirostat_v2(ctx, &candidates_p, mirostat_tau, mirostat_eta, &mirostat_mu); - } else { - // Temperature sampling - llama_sample_top_k(ctx, &candidates_p, top_k, 1); - llama_sample_tail_free(ctx, &candidates_p, tfs_z, 1); - llama_sample_typical(ctx, &candidates_p, typical_p, 1); - llama_sample_top_p(ctx, &candidates_p, top_p, 1); - llama_sample_temp(ctx, &candidates_p, temp); - id = llama_sample_token(ctx, &candidates_p); - } - } - } - - return id; -} - -const char * sampling(struct MyModel * mymodel) { - llama_context * ctx = mymodel->ctx; - int id = sampling_id(mymodel); - static std::string ret; - if (id == llama_token_eos(ctx)) { - ret = ""; - } else { - ret = llama_token_to_piece(ctx, id); - } - eval_id(mymodel, id); - return ret.c_str(); -} - -} diff --git a/examples/embd-input/embd-input-test.cpp b/examples/embd-input/embd-input-test.cpp deleted file mode 100644 index dc4a0e488..000000000 --- a/examples/embd-input/embd-input-test.cpp +++ /dev/null @@ -1,35 +0,0 @@ -#include "embd-input.h" -#include -#include -#include - -int main(int argc, char** argv) { - - auto mymodel = create_mymodel(argc, argv); - int N = 10; - int max_tgt_len = 500; - int n_embd = llama_n_embd(llama_get_model(mymodel->ctx)); - - // add random float embd to test evaluation - float * data = new float[N*n_embd]; - std::default_random_engine e; - std::uniform_real_distribution u(0,1); - for (int i=0;iparams.prompt.c_str()); - const char* tmp; - for (int i=0; i")==0) break; - printf("%s", tmp); - fflush(stdout); - } - printf("\n"); - free_mymodel(mymodel); - return 0; -} diff --git a/examples/embd-input/embd-input.h b/examples/embd-input/embd-input.h deleted file mode 100644 index eff5e3b84..000000000 --- a/examples/embd-input/embd-input.h +++ /dev/null @@ -1,27 +0,0 @@ -#ifndef _EMBD_INPUT_H_ -#define _EMBD_INPUT_H_ 1 - -#include "common.h" -#include "llama.h" - -extern "C" { - -typedef struct MyModel { - llama_context* ctx; - gpt_params params; - int n_past = 0; -} MyModel; - -struct MyModel* create_mymodel(int argc, char ** argv); - -bool eval_float(void* model, float* input, int N); -bool eval_tokens(void* model, std::vector tokens); -bool eval_id(struct MyModel* mymodel, int id); -bool eval_string(struct MyModel* mymodel, const char* str); -const char * sampling(struct MyModel* mymodel); -llama_token sampling_id(struct MyModel* mymodel); -void free_mymodel(struct MyModel* mymodel); - -} - -#endif diff --git a/examples/embd-input/embd_input.py b/examples/embd-input/embd_input.py deleted file mode 100755 index f146acdc1..000000000 --- a/examples/embd-input/embd_input.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -import ctypes -from ctypes import cdll, c_char_p, c_void_p, POINTER, c_float, c_int -import numpy as np -import os - -libc = cdll.LoadLibrary("./libembdinput.so") -libc.sampling.restype=c_char_p -libc.create_mymodel.restype=c_void_p -libc.eval_string.argtypes=[c_void_p, c_char_p] -libc.sampling.argtypes=[c_void_p] -libc.eval_float.argtypes=[c_void_p, POINTER(c_float), c_int] - - -class MyModel: - def __init__(self, args): - argc = len(args) - c_str = [c_char_p(i.encode()) for i in args] - args_c = (c_char_p * argc)(*c_str) - self.model = c_void_p(libc.create_mymodel(argc, args_c)) - self.max_tgt_len = 512 - self.print_string_eval = True - - def __del__(self): - libc.free_mymodel(self.model) - - def eval_float(self, x): - libc.eval_float(self.model, x.astype(np.float32).ctypes.data_as(POINTER(c_float)), x.shape[1]) - - def eval_string(self, x): - libc.eval_string(self.model, x.encode()) # c_char_p(x.encode())) - if self.print_string_eval: - print(x) - - def eval_token(self, x): - libc.eval_id(self.model, x) - - def sampling(self): - s = libc.sampling(self.model) - return s - - def stream_generate(self, end=""): - ret = b"" - end = end.encode() - for _ in range(self.max_tgt_len): - tmp = self.sampling() - ret += tmp - yield tmp - if ret.endswith(end): - break - - def generate_with_print(self, end=""): - ret = b"" - for i in self.stream_generate(end=end): - ret += i - print(i.decode(errors="replace"), end="", flush=True) - print("") - return ret.decode(errors="replace") - - - def generate(self, end=""): - text = b"".join(self.stream_generate(end=end)) - return text.decode(errors="replace") - -if __name__ == "__main__": - model = MyModel(["main", "--model", "../llama.cpp/models/ggml-vic13b-q4_1.bin", "-c", "2048"]) - model.eval_string("""user: what is the color of the flag of UN?""") - x = np.random.random((5120,10))# , dtype=np.float32) - model.eval_float(x) - model.eval_string("""assistant:""") - for i in model.generate(): - print(i.decode(errors="replace"), end="", flush=True) diff --git a/examples/embd-input/llava.py b/examples/embd-input/llava.py deleted file mode 100755 index 06fad55f4..000000000 --- a/examples/embd-input/llava.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python3 -import sys -import os -sys.path.insert(0, os.path.dirname(__file__)) -from embd_input import MyModel -import numpy as np -from torch import nn -import torch -from transformers import CLIPVisionModel, CLIPImageProcessor -from PIL import Image - -# model parameters from 'liuhaotian/LLaVA-13b-delta-v1-1' -vision_tower = "openai/clip-vit-large-patch14" -select_hidden_state_layer = -2 -# (vision_config.image_size // vision_config.patch_size) ** 2 -image_token_len = (224//14)**2 - -class Llava: - def __init__(self, args): - self.image_processor = CLIPImageProcessor.from_pretrained(vision_tower) - self.vision_tower = CLIPVisionModel.from_pretrained(vision_tower) - self.mm_projector = nn.Linear(1024, 5120) - self.model = MyModel(["main", *args]) - - def load_projection(self, path): - state = torch.load(path) - self.mm_projector.load_state_dict({ - "weight": state["model.mm_projector.weight"], - "bias": state["model.mm_projector.bias"]}) - - def chat(self, question): - self.model.eval_string("user: ") - self.model.eval_string(question) - self.model.eval_string("\nassistant: ") - return self.model.generate_with_print() - - def chat_with_image(self, image, question): - with torch.no_grad(): - embd_image = self.image_processor.preprocess(image, return_tensors='pt')['pixel_values'][0] - image_forward_out = self.vision_tower(embd_image.unsqueeze(0), output_hidden_states=True) - select_hidden_state = image_forward_out.hidden_states[select_hidden_state_layer] - image_feature = select_hidden_state[:, 1:] - embd_image = self.mm_projector(image_feature) - embd_image = embd_image.cpu().numpy()[0] - self.model.eval_string("user: ") - self.model.eval_token(32003-2) # im_start - self.model.eval_float(embd_image.T) - for i in range(image_token_len-embd_image.shape[0]): - self.model.eval_token(32003-3) # im_patch - self.model.eval_token(32003-1) # im_end - self.model.eval_string(question) - self.model.eval_string("\nassistant: ") - return self.model.generate_with_print() - - -if __name__=="__main__": - # model form liuhaotian/LLaVA-13b-delta-v1-1 - a = Llava(["--model", "./models/ggml-llava-13b-v1.1.bin", "-c", "2048"]) - # Extract from https://huggingface.co/liuhaotian/LLaVA-13b-delta-v1-1/blob/main/pytorch_model-00003-of-00003.bin. - # Also here can use pytorch_model-00003-of-00003.bin directly. - a.load_projection(os.path.join( - os.path.dirname(__file__) , - "llava_projection.pth")) - respose = a.chat_with_image( - Image.open("./media/llama1-logo.png").convert('RGB'), - "what is the text in the picture?") - respose - a.chat("what is the color of it?") - - - diff --git a/examples/embd-input/minigpt4.py b/examples/embd-input/minigpt4.py deleted file mode 100755 index 7b13e4a5c..000000000 --- a/examples/embd-input/minigpt4.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python3 -import sys -import os -sys.path.insert(0, os.path.dirname(__file__)) -from embd_input import MyModel -import numpy as np -from torch import nn -import torch -from PIL import Image - -minigpt4_path = os.path.join(os.path.dirname(__file__), "MiniGPT-4") -sys.path.insert(0, minigpt4_path) -from minigpt4.models.blip2 import Blip2Base -from minigpt4.processors.blip_processors import Blip2ImageEvalProcessor - - -class MiniGPT4(Blip2Base): - """ - MiniGPT4 model from https://github.com/Vision-CAIR/MiniGPT-4 - """ - def __init__(self, - args, - vit_model="eva_clip_g", - q_former_model="https://storage.googleapis.com/sfr-vision-language-research/LAVIS/models/BLIP2/blip2_pretrained_flant5xxl.pth", - img_size=224, - drop_path_rate=0, - use_grad_checkpoint=False, - vit_precision="fp32", - freeze_vit=True, - freeze_qformer=True, - num_query_token=32, - llama_model="", - prompt_path="", - prompt_template="", - max_txt_len=32, - end_sym='\n', - low_resource=False, # use 8 bit and put vit in cpu - device_8bit=0 - ): - super().__init__() - self.img_size = img_size - self.low_resource = low_resource - self.preprocessor = Blip2ImageEvalProcessor(img_size) - - print('Loading VIT') - self.visual_encoder, self.ln_vision = self.init_vision_encoder( - vit_model, img_size, drop_path_rate, use_grad_checkpoint, vit_precision - ) - print('Loading VIT Done') - print('Loading Q-Former') - self.Qformer, self.query_tokens = self.init_Qformer( - num_query_token, self.visual_encoder.num_features - ) - self.Qformer.cls = None - self.Qformer.bert.embeddings.word_embeddings = None - self.Qformer.bert.embeddings.position_embeddings = None - for layer in self.Qformer.bert.encoder.layer: - layer.output = None - layer.intermediate = None - self.load_from_pretrained(url_or_filename=q_former_model) - print('Loading Q-Former Done') - self.llama_proj = nn.Linear( - self.Qformer.config.hidden_size, 5120 # self.llama_model.config.hidden_size - ) - self.max_txt_len = max_txt_len - self.end_sym = end_sym - self.model = MyModel(["main", *args]) - # system prompt - self.model.eval_string("Give the following image: ImageContent. " - "You will be able to see the image once I provide it to you. Please answer my questions." - "###") - - def encode_img(self, image): - image = self.preprocessor(image) - image = image.unsqueeze(0) - device = image.device - if self.low_resource: - self.vit_to_cpu() - image = image.to("cpu") - - with self.maybe_autocast(): - image_embeds = self.ln_vision(self.visual_encoder(image)).to(device) - image_atts = torch.ones(image_embeds.size()[:-1], dtype=torch.long).to(device) - - query_tokens = self.query_tokens.expand(image_embeds.shape[0], -1, -1) - query_output = self.Qformer.bert( - query_embeds=query_tokens, - encoder_hidden_states=image_embeds, - encoder_attention_mask=image_atts, - return_dict=True, - ) - - inputs_llama = self.llama_proj(query_output.last_hidden_state) - # atts_llama = torch.ones(inputs_llama.size()[:-1], dtype=torch.long).to(image.device) - return inputs_llama - - def load_projection(self, path): - state = torch.load(path)["model"] - self.llama_proj.load_state_dict({ - "weight": state["llama_proj.weight"], - "bias": state["llama_proj.bias"]}) - - def chat(self, question): - self.model.eval_string("Human: ") - self.model.eval_string(question) - self.model.eval_string("\n### Assistant:") - return self.model.generate_with_print(end="###") - - def chat_with_image(self, image, question): - with torch.no_grad(): - embd_image = self.encode_img(image) - embd_image = embd_image.cpu().numpy()[0] - self.model.eval_string("Human: ") - self.model.eval_float(embd_image.T) - self.model.eval_string(" ") - self.model.eval_string(question) - self.model.eval_string("\n### Assistant:") - return self.model.generate_with_print(end="###") - - -if __name__=="__main__": - a = MiniGPT4(["--model", "./models/ggml-vicuna-13b-v0-q4_1.bin", "-c", "2048"]) - a.load_projection(os.path.join( - os.path.dirname(__file__) , - "pretrained_minigpt4.pth")) - respose = a.chat_with_image( - Image.open("./media/llama1-logo.png").convert('RGB'), - "what is the text in the picture?") - a.chat("what is the color of it?") diff --git a/examples/embd-input/panda_gpt.py b/examples/embd-input/panda_gpt.py deleted file mode 100755 index 891ad7cc9..000000000 --- a/examples/embd-input/panda_gpt.py +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env python3 -import sys -import os -sys.path.insert(0, os.path.dirname(__file__)) -from embd_input import MyModel -import numpy as np -from torch import nn -import torch - -# use PandaGPT path -panda_gpt_path = os.path.join(os.path.dirname(__file__), "PandaGPT") -imagebind_ckpt_path = "./models/panda_gpt/" - -sys.path.insert(0, os.path.join(panda_gpt_path,"code","model")) -from ImageBind.models import imagebind_model -from ImageBind import data - -ModalityType = imagebind_model.ModalityType -max_tgt_len = 400 - -class PandaGPT: - def __init__(self, args): - self.visual_encoder,_ = imagebind_model.imagebind_huge(pretrained=True, store_path=imagebind_ckpt_path) - self.visual_encoder.eval() - self.llama_proj = nn.Linear(1024, 5120) # self.visual_hidden_size, 5120) - self.max_tgt_len = max_tgt_len - self.model = MyModel(["main", *args]) - self.generated_text = "" - self.device = "cpu" - - def load_projection(self, path): - state = torch.load(path, map_location="cpu") - self.llama_proj.load_state_dict({ - "weight": state["llama_proj.weight"], - "bias": state["llama_proj.bias"]}) - - def eval_inputs(self, inputs): - self.model.eval_string("") - embds = self.extract_multimoal_feature(inputs) - for i in embds: - self.model.eval_float(i.T) - self.model.eval_string(" ") - - def chat(self, question): - return self.chat_with_image(None, question) - - def chat_with_image(self, inputs, question): - if self.generated_text == "": - self.model.eval_string("###") - self.model.eval_string(" Human: ") - if inputs: - self.eval_inputs(inputs) - self.model.eval_string(question) - self.model.eval_string("\n### Assistant:") - ret = self.model.generate_with_print(end="###") - self.generated_text += ret - return ret - - def extract_multimoal_feature(self, inputs): - features = [] - for key in ["image", "audio", "video", "thermal"]: - if key + "_paths" in inputs: - embeds = self.encode_data(key, inputs[key+"_paths"]) - features.append(embeds) - return features - - def encode_data(self, data_type, data_paths): - - type_map = { - "image": ModalityType.VISION, - "audio": ModalityType.AUDIO, - "video": ModalityType.VISION, - "thermal": ModalityType.THERMAL, - } - load_map = { - "image": data.load_and_transform_vision_data, - "audio": data.load_and_transform_audio_data, - "video": data.load_and_transform_video_data, - "thermal": data.load_and_transform_thermal_data - } - - load_function = load_map[data_type] - key = type_map[data_type] - - inputs = {key: load_function(data_paths, self.device)} - with torch.no_grad(): - embeddings = self.visual_encoder(inputs) - embeds = embeddings[key] - embeds = self.llama_proj(embeds).cpu().numpy() - return embeds - - -if __name__=="__main__": - a = PandaGPT(["--model", "./models/ggml-vicuna-13b-v0-q4_1.bin", "-c", "2048", "--lora", "./models/panda_gpt/ggml-adapter-model.bin","--temp", "0"]) - a.load_projection("./models/panda_gpt/adapter_model.bin") - a.chat_with_image( - {"image_paths": ["./media/llama1-logo.png"]}, - "what is the text in the picture? 'llama' or 'lambda'?") - a.chat("what is the color of it?") diff --git a/examples/gptneox-wip/cmpnct_gpt2bpe.hpp b/examples/gptneox-wip/cmpnct_gpt2bpe.hpp deleted file mode 100644 index 9d433f4b1..000000000 --- a/examples/gptneox-wip/cmpnct_gpt2bpe.hpp +++ /dev/null @@ -1,1133 +0,0 @@ -#ifndef CMPNCT_GPT2BPE -#define CMPNCT_GPT2BPE - -#include -#include -#include -#include -#include -#include -#include -#include -#include - - -// Unicode GPT2 Byte Pair Encoding Tokenizer -// Adapted from https://github.com/cmp-nct/ggllm.cpp [MIT License] -// Removed loading of merges from HF json and parts made for a specific vocab - - -//----------------- -// Unicode library (from cmpnct_unicode.cpp) -//----------------- - -// Minimal library for high performance handling and categorization of UTF8 strings and characters -// Using std::string - -enum CNCTCharType { - DIGIT, // a numerical char in any language - LETTER, // a letter in any language - WHITESPACE, // any form of whitespace - ACCENT_MARK, // letter modifiers like ´ in é - PUNCTUATION, // punctuation including brackets - SYMBOL, // math, currency, other symbols - CONTROL, // control characters - MIXED, // a mix of the above - UNIDENTIFIED // something more exotic like emoji or separators -}; - -struct CNCTUnicode; - -struct CNCTString { - std::string str; - size_t utf8_chars; - - CNCTCharType char_type=UNIDENTIFIED; - bool is_sequential=false; - - size_t seq_offset_bytes=0; - size_t seq_offset_utf8_chars=0; - - bool operator==(const std::string &other) const; - bool operator==(const char other) const; - bool operator==(const CNCTString &other) const; - CNCTString &operator+=(const std::string &other); - CNCTString &operator+=(const char other); - friend CNCTString operator+(CNCTString lhs, const std::string &rhs); - friend CNCTString operator+(CNCTString lhs, const char rhs); - CNCTString& operator+=(const CNCTString& other); - friend CNCTString operator+(CNCTString lhs, const CNCTString& rhs); -}; - -struct CNCTUnicode { - static bool check_code_range(int c, const std::vector>& ranges); - static CNCTCharType get_code_type(int c); - static CNCTCharType get_code_type(const std::string &utf8_char); - static int utf8_len(const char c); - static int strlen_utf8(std::string src); - static std::vector split_utf8(const std::string &src); - static std::vector split_utf8_enhanced(const std::string &src); - static CNCTCharType string_identify(const std::string& str); - static bool string_test(const std::string& str, CNCTCharType chartype); -}; - -static const std::vector> digit_ranges = { -{0x30, 0x39}, {0xB2, 0xB3}, {0xB9, 0xB9}, {0x660, 0x669}, {0x6F0, 0x6F9}, {0x7C0, 0x7C9}, {0x966, 0x96F}, {0x9E6, 0x9EF}, {0xA66, 0xA6F}, {0xAE6, 0xAEF}, {0xB66, 0xB6F}, {0xBE6, 0xBEF}, {0xC66, 0xC6F}, -{0xCE6, 0xCEF}, {0xD66, 0xD6F}, {0xDE6, 0xDEF}, {0xE50, 0xE59}, {0xED0, 0xED9}, {0xF20, 0xF29}, {0x1040, 0x1049}, {0x1090, 0x1099}, {0x1369, 0x1371}, {0x17E0, 0x17E9}, {0x1810, 0x1819}, {0x1946, 0x194F}, -{0x19D0, 0x19DA}, {0x1A80, 0x1A89}, {0x1A90, 0x1A99}, {0x1B50, 0x1B59}, {0x1BB0, 0x1BB9}, {0x1C40, 0x1C49}, {0x1C50, 0x1C59}, {0x2070, 0x2070}, {0x2074, 0x2079}, {0x2080, 0x2089}, {0x2460, 0x2468}, -{0x2474, 0x247C}, {0x2488, 0x2490}, {0x24EA, 0x24EA}, {0x24F5, 0x24FD}, {0x24FF, 0x24FF}, {0x2776, 0x277E}, {0x2780, 0x2788}, {0x278A, 0x2792}, {0xA620, 0xA629}, {0xA8D0, 0xA8D9}, {0xA900, 0xA909}, -{0xA9D0, 0xA9D9}, {0xA9F0, 0xA9F9}, {0xAA50, 0xAA59}, {0xABF0, 0xABF9}, {0xFF10, 0xFF19}, {0x104A0, 0x104A9}, {0x10A40, 0x10A43}, {0x10D30, 0x10D39}, {0x10E60, 0x10E68}, {0x11052, 0x1105A}, -{0x11066, 0x1106F}, {0x110F0, 0x110F9}, {0x11136, 0x1113F}, {0x111D0, 0x111D9}, {0x112F0, 0x112F9}, {0x11450, 0x11459}, {0x114D0, 0x114D9}, {0x11650, 0x11659}, {0x116C0, 0x116C9}, {0x11730, 0x11739}, -{0x118E0, 0x118E9}, {0x11950, 0x11959}, {0x11C50, 0x11C59}, {0x11D50, 0x11D59}, {0x11DA0, 0x11DA9}, {0x16A60, 0x16A69}, {0x16B50, 0x16B59}, {0x1D7CE, 0x1D7FF}, {0x1E140, 0x1E149}, {0x1E2F0, 0x1E2F9}, -{0x1E950, 0x1E959}, {0x1F100, 0x1F10A}, {0x1FBF0, 0x1FBF9}, -}; - -static const std::vector> letter_ranges = { -{0x41, 0x5A}, {0x61, 0x7A}, {0xAA, 0xAA}, {0xB5, 0xB5}, {0xBA, 0xBA}, {0xC0, 0xD6}, {0xD8, 0xF6}, {0xF8, 0x2C1}, {0x2C6, 0x2D1}, {0x2E0, 0x2E4}, {0x2EC, 0x2EC}, {0x2EE, 0x2EE}, {0x370, 0x374}, -{0x376, 0x377}, {0x37A, 0x37D}, {0x37F, 0x37F}, {0x386, 0x386}, {0x388, 0x38A}, {0x38C, 0x38C}, {0x38E, 0x3A1}, {0x3A3, 0x3F5}, {0x3F7, 0x481}, {0x48A, 0x52F}, {0x531, 0x556}, {0x559, 0x559}, -{0x560, 0x588}, {0x5D0, 0x5EA}, {0x5EF, 0x5F2}, {0x620, 0x64A}, {0x66E, 0x66F}, {0x671, 0x6D3}, {0x6D5, 0x6D5}, {0x6E5, 0x6E6}, {0x6EE, 0x6EF}, {0x6FA, 0x6FC}, {0x6FF, 0x6FF}, {0x710, 0x710}, -{0x712, 0x72F}, {0x74D, 0x7A5}, {0x7B1, 0x7B1}, {0x7CA, 0x7EA}, {0x7F4, 0x7F5}, {0x7FA, 0x7FA}, {0x800, 0x815}, {0x81A, 0x81A}, {0x824, 0x824}, {0x828, 0x828}, {0x840, 0x858}, {0x860, 0x86A}, -{0x8A0, 0x8B4}, {0x8B6, 0x8C7}, {0x904, 0x939}, {0x93D, 0x93D}, {0x950, 0x950}, {0x958, 0x961}, {0x971, 0x980}, {0x985, 0x98C}, {0x98F, 0x990}, {0x993, 0x9A8}, {0x9AA, 0x9B0}, {0x9B2, 0x9B2}, -{0x9B6, 0x9B9}, {0x9BD, 0x9BD}, {0x9CE, 0x9CE}, {0x9DC, 0x9DD}, {0x9DF, 0x9E1}, {0x9F0, 0x9F1}, {0x9FC, 0x9FC}, {0xA05, 0xA0A}, {0xA0F, 0xA10}, {0xA13, 0xA28}, {0xA2A, 0xA30}, {0xA32, 0xA33}, -{0xA35, 0xA36}, {0xA38, 0xA39}, {0xA59, 0xA5C}, {0xA5E, 0xA5E}, {0xA72, 0xA74}, {0xA85, 0xA8D}, {0xA8F, 0xA91}, {0xA93, 0xAA8}, {0xAAA, 0xAB0}, {0xAB2, 0xAB3}, {0xAB5, 0xAB9}, {0xABD, 0xABD}, -{0xAD0, 0xAD0}, {0xAE0, 0xAE1}, {0xAF9, 0xAF9}, {0xB05, 0xB0C}, {0xB0F, 0xB10}, {0xB13, 0xB28}, {0xB2A, 0xB30}, {0xB32, 0xB33}, {0xB35, 0xB39}, {0xB3D, 0xB3D}, {0xB5C, 0xB5D}, {0xB5F, 0xB61}, -{0xB71, 0xB71}, {0xB83, 0xB83}, {0xB85, 0xB8A}, {0xB8E, 0xB90}, {0xB92, 0xB95}, {0xB99, 0xB9A}, {0xB9C, 0xB9C}, {0xB9E, 0xB9F}, {0xBA3, 0xBA4}, {0xBA8, 0xBAA}, {0xBAE, 0xBB9}, {0xBD0, 0xBD0}, -{0xC05, 0xC0C}, {0xC0E, 0xC10}, {0xC12, 0xC28}, {0xC2A, 0xC39}, {0xC3D, 0xC3D}, {0xC58, 0xC5A}, {0xC60, 0xC61}, {0xC80, 0xC80}, {0xC85, 0xC8C}, {0xC8E, 0xC90}, {0xC92, 0xCA8}, {0xCAA, 0xCB3}, -{0xCB5, 0xCB9}, {0xCBD, 0xCBD}, {0xCDE, 0xCDE}, {0xCE0, 0xCE1}, {0xCF1, 0xCF2}, {0xD04, 0xD0C}, {0xD0E, 0xD10}, {0xD12, 0xD3A}, {0xD3D, 0xD3D}, {0xD4E, 0xD4E}, {0xD54, 0xD56}, {0xD5F, 0xD61}, -{0xD7A, 0xD7F}, {0xD85, 0xD96}, {0xD9A, 0xDB1}, {0xDB3, 0xDBB}, {0xDBD, 0xDBD}, {0xDC0, 0xDC6}, {0xE01, 0xE30}, {0xE32, 0xE33}, {0xE40, 0xE46}, {0xE81, 0xE82}, {0xE84, 0xE84}, {0xE86, 0xE8A}, -{0xE8C, 0xEA3}, {0xEA5, 0xEA5}, {0xEA7, 0xEB0}, {0xEB2, 0xEB3}, {0xEBD, 0xEBD}, {0xEC0, 0xEC4}, {0xEC6, 0xEC6}, {0xEDC, 0xEDF}, {0xF00, 0xF00}, {0xF40, 0xF47}, {0xF49, 0xF6C}, {0xF88, 0xF8C}, -{0x1000, 0x102A}, {0x103F, 0x103F}, {0x1050, 0x1055}, {0x105A, 0x105D}, {0x1061, 0x1061}, {0x1065, 0x1066}, {0x106E, 0x1070}, {0x1075, 0x1081}, {0x108E, 0x108E}, {0x10A0, 0x10C5}, {0x10C7, 0x10C7}, -{0x10CD, 0x10CD}, {0x10D0, 0x10FA}, {0x10FC, 0x1248}, {0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258}, {0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D}, {0x1290, 0x12B0}, {0x12B2, 0x12B5}, -{0x12B8, 0x12BE}, {0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6}, {0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A}, {0x1380, 0x138F}, {0x13A0, 0x13F5}, {0x13F8, 0x13FD}, {0x1401, 0x166C}, -{0x166F, 0x167F}, {0x1681, 0x169A}, {0x16A0, 0x16EA}, {0x16F1, 0x16F8}, {0x1700, 0x170C}, {0x170E, 0x1711}, {0x1720, 0x1731}, {0x1740, 0x1751}, {0x1760, 0x176C}, {0x176E, 0x1770}, {0x1780, 0x17B3}, -{0x17D7, 0x17D7}, {0x17DC, 0x17DC}, {0x1820, 0x1878}, {0x1880, 0x1884}, {0x1887, 0x18A8}, {0x18AA, 0x18AA}, {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1950, 0x196D}, {0x1970, 0x1974}, {0x1980, 0x19AB}, -{0x19B0, 0x19C9}, {0x1A00, 0x1A16}, {0x1A20, 0x1A54}, {0x1AA7, 0x1AA7}, {0x1B05, 0x1B33}, {0x1B45, 0x1B4B}, {0x1B83, 0x1BA0}, {0x1BAE, 0x1BAF}, {0x1BBA, 0x1BE5}, {0x1C00, 0x1C23}, {0x1C4D, 0x1C4F}, -{0x1C5A, 0x1C7D}, {0x1C80, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CBF}, {0x1CE9, 0x1CEC}, {0x1CEE, 0x1CF3}, {0x1CF5, 0x1CF6}, {0x1CFA, 0x1CFA}, {0x1D00, 0x1DBF}, {0x1E00, 0x1F15}, {0x1F18, 0x1F1D}, -{0x1F20, 0x1F45}, {0x1F48, 0x1F4D}, {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B}, {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4}, {0x1FB6, 0x1FBC}, {0x1FBE, 0x1FBE}, {0x1FC2, 0x1FC4}, -{0x1FC6, 0x1FCC}, {0x1FD0, 0x1FD3}, {0x1FD6, 0x1FDB}, {0x1FE0, 0x1FEC}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFC}, {0x2071, 0x2071}, {0x207F, 0x207F}, {0x2090, 0x209C}, {0x2102, 0x2102}, {0x2107, 0x2107}, -{0x210A, 0x2113}, {0x2115, 0x2115}, {0x2119, 0x211D}, {0x2124, 0x2124}, {0x2126, 0x2126}, {0x2128, 0x2128}, {0x212A, 0x212D}, {0x212F, 0x2139}, {0x213C, 0x213F}, {0x2145, 0x2149}, {0x214E, 0x214E}, -{0x2183, 0x2184}, {0x2C00, 0x2C2E}, {0x2C30, 0x2C5E}, {0x2C60, 0x2CE4}, {0x2CEB, 0x2CEE}, {0x2CF2, 0x2CF3}, {0x2D00, 0x2D25}, {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D6F}, -{0x2D80, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE}, {0x2E2F, 0x2E2F}, {0x3005, 0x3006}, -{0x3031, 0x3035}, {0x303B, 0x303C}, {0x3041, 0x3096}, {0x309D, 0x309F}, {0x30A1, 0x30FA}, {0x30FC, 0x30FF}, {0x3105, 0x312F}, {0x3131, 0x318E}, {0x31A0, 0x31BF}, {0x31F0, 0x31FF}, {0x3400, 0x4DBF}, -{0x4E00, 0x9FFC}, {0xA000, 0xA48C}, {0xA4D0, 0xA4FD}, {0xA500, 0xA60C}, {0xA610, 0xA61F}, {0xA62A, 0xA62B}, {0xA640, 0xA66E}, {0xA67F, 0xA69D}, {0xA6A0, 0xA6E5}, {0xA717, 0xA71F}, {0xA722, 0xA788}, -{0xA78B, 0xA7BF}, {0xA7C2, 0xA7CA}, {0xA7F5, 0xA801}, {0xA803, 0xA805}, {0xA807, 0xA80A}, {0xA80C, 0xA822}, {0xA840, 0xA873}, {0xA882, 0xA8B3}, {0xA8F2, 0xA8F7}, {0xA8FB, 0xA8FB}, {0xA8FD, 0xA8FE}, -{0xA90A, 0xA925}, {0xA930, 0xA946}, {0xA960, 0xA97C}, {0xA984, 0xA9B2}, {0xA9CF, 0xA9CF}, {0xA9E0, 0xA9E4}, {0xA9E6, 0xA9EF}, {0xA9FA, 0xA9FE}, {0xAA00, 0xAA28}, {0xAA40, 0xAA42}, {0xAA44, 0xAA4B}, -{0xAA60, 0xAA76}, {0xAA7A, 0xAA7A}, {0xAA7E, 0xAAAF}, {0xAAB1, 0xAAB1}, {0xAAB5, 0xAAB6}, {0xAAB9, 0xAABD}, {0xAAC0, 0xAAC0}, {0xAAC2, 0xAAC2}, {0xAADB, 0xAADD}, {0xAAE0, 0xAAEA}, {0xAAF2, 0xAAF4}, -{0xAB01, 0xAB06}, {0xAB09, 0xAB0E}, {0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E}, {0xAB30, 0xAB5A}, {0xAB5C, 0xAB69}, {0xAB70, 0xABE2}, {0xAC00, 0xD7A3}, {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, -{0xF900, 0xFA6D}, {0xFA70, 0xFAD9}, {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB1D}, {0xFB1F, 0xFB28}, {0xFB2A, 0xFB36}, {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41}, {0xFB43, 0xFB44}, -{0xFB46, 0xFBB1}, {0xFBD3, 0xFD3D}, {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFB}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC}, {0xFF21, 0xFF3A}, {0xFF41, 0xFF5A}, {0xFF66, 0xFFBE}, {0xFFC2, 0xFFC7}, -{0xFFCA, 0xFFCF}, {0xFFD2, 0xFFD7}, {0xFFDA, 0xFFDC}, {0x10000, 0x1000B}, {0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D}, {0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA}, -{0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x10300, 0x1031F}, {0x1032D, 0x10340}, {0x10342, 0x10349}, {0x10350, 0x10375}, {0x10380, 0x1039D}, {0x103A0, 0x103C3}, {0x103C8, 0x103CF}, {0x10400, 0x1049D}, -{0x104B0, 0x104D3}, {0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563}, {0x10600, 0x10736}, {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808}, {0x1080A, 0x10835}, -{0x10837, 0x10838}, {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10860, 0x10876}, {0x10880, 0x1089E}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x10900, 0x10915}, {0x10920, 0x10939}, {0x10980, 0x109B7}, -{0x109BE, 0x109BF}, {0x10A00, 0x10A00}, {0x10A10, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35}, {0x10A60, 0x10A7C}, {0x10A80, 0x10A9C}, {0x10AC0, 0x10AC7}, {0x10AC9, 0x10AE4}, {0x10B00, 0x10B35}, -{0x10B40, 0x10B55}, {0x10B60, 0x10B72}, {0x10B80, 0x10B91}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2}, {0x10D00, 0x10D23}, {0x10E80, 0x10EA9}, {0x10EB0, 0x10EB1}, {0x10F00, 0x10F1C}, -{0x10F27, 0x10F27}, {0x10F30, 0x10F45}, {0x10FB0, 0x10FC4}, {0x10FE0, 0x10FF6}, {0x11003, 0x11037}, {0x11083, 0x110AF}, {0x110D0, 0x110E8}, {0x11103, 0x11126}, {0x11144, 0x11144}, {0x11147, 0x11147}, -{0x11150, 0x11172}, {0x11176, 0x11176}, {0x11183, 0x111B2}, {0x111C1, 0x111C4}, {0x111DA, 0x111DA}, {0x111DC, 0x111DC}, {0x11200, 0x11211}, {0x11213, 0x1122B}, {0x11280, 0x11286}, {0x11288, 0x11288}, -{0x1128A, 0x1128D}, {0x1128F, 0x1129D}, {0x1129F, 0x112A8}, {0x112B0, 0x112DE}, {0x11305, 0x1130C}, {0x1130F, 0x11310}, {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333}, {0x11335, 0x11339}, -{0x1133D, 0x1133D}, {0x11350, 0x11350}, {0x1135D, 0x11361}, {0x11400, 0x11434}, {0x11447, 0x1144A}, {0x1145F, 0x11461}, {0x11480, 0x114AF}, {0x114C4, 0x114C5}, {0x114C7, 0x114C7}, {0x11580, 0x115AE}, -{0x115D8, 0x115DB}, {0x11600, 0x1162F}, {0x11644, 0x11644}, {0x11680, 0x116AA}, {0x116B8, 0x116B8}, {0x11700, 0x1171A}, {0x11800, 0x1182B}, {0x118A0, 0x118DF}, {0x118FF, 0x11906}, {0x11909, 0x11909}, -{0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x1192F}, {0x1193F, 0x1193F}, {0x11941, 0x11941}, {0x119A0, 0x119A7}, {0x119AA, 0x119D0}, {0x119E1, 0x119E1}, {0x119E3, 0x119E3}, {0x11A00, 0x11A00}, -{0x11A0B, 0x11A32}, {0x11A3A, 0x11A3A}, {0x11A50, 0x11A50}, {0x11A5C, 0x11A89}, {0x11A9D, 0x11A9D}, {0x11AC0, 0x11AF8}, {0x11C00, 0x11C08}, {0x11C0A, 0x11C2E}, {0x11C40, 0x11C40}, {0x11C72, 0x11C8F}, -{0x11D00, 0x11D06}, {0x11D08, 0x11D09}, {0x11D0B, 0x11D30}, {0x11D46, 0x11D46}, {0x11D60, 0x11D65}, {0x11D67, 0x11D68}, {0x11D6A, 0x11D89}, {0x11D98, 0x11D98}, {0x11EE0, 0x11EF2}, {0x11FB0, 0x11FB0}, -{0x12000, 0x12399}, {0x12480, 0x12543}, {0x13000, 0x1342E}, {0x14400, 0x14646}, {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16AD0, 0x16AED}, {0x16B00, 0x16B2F}, {0x16B40, 0x16B43}, {0x16B63, 0x16B77}, -{0x16B7D, 0x16B8F}, {0x16E40, 0x16E7F}, {0x16F00, 0x16F4A}, {0x16F50, 0x16F50}, {0x16F93, 0x16F9F}, {0x16FE0, 0x16FE1}, {0x16FE3, 0x16FE3}, {0x17000, 0x187F7}, {0x18800, 0x18CD5}, {0x18D00, 0x18D08}, -{0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88}, {0x1BC90, 0x1BC99}, {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, -{0x1D49E, 0x1D49F}, {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, -{0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D6C0}, {0x1D6C2, 0x1D6DA}, {0x1D6DC, 0x1D6FA}, -{0x1D6FC, 0x1D714}, {0x1D716, 0x1D734}, {0x1D736, 0x1D74E}, {0x1D750, 0x1D76E}, {0x1D770, 0x1D788}, {0x1D78A, 0x1D7A8}, {0x1D7AA, 0x1D7C2}, {0x1D7C4, 0x1D7CB}, {0x1E100, 0x1E12C}, {0x1E137, 0x1E13D}, -{0x1E14E, 0x1E14E}, {0x1E2C0, 0x1E2EB}, {0x1E800, 0x1E8C4}, {0x1E900, 0x1E943}, {0x1E94B, 0x1E94B}, {0x1EE00, 0x1EE03}, {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24}, {0x1EE27, 0x1EE27}, -{0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37}, {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42}, {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B}, {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, -{0x1EE54, 0x1EE54}, {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B}, {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62}, {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72}, -{0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E}, {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3}, {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x20000, 0x2A6DD}, {0x2A700, 0x2B734}, -{0x2B740, 0x2B81D}, {0x2B820, 0x2CEA1}, {0x2CEB0, 0x2EBE0}, {0x2F800, 0x2FA1D}, {0x30000, 0x3134A}, -}; - -static const std::vector> whitespace_ranges = { -{0x9, 0xD}, {0x1C, 0x20}, {0x85, 0x85}, {0xA0, 0xA0}, {0x1680, 0x1680}, {0x2000, 0x200A}, {0x2028, 0x2029}, {0x202F, 0x202F}, {0x205F, 0x205F}, {0x3000, 0x3000}, -}; - -static const std::vector> accent_mark_ranges = { -{0x300, 0x36F}, {0x483, 0x489}, {0x591, 0x5BD}, {0x5BF, 0x5BF}, {0x5C1, 0x5C2}, {0x5C4, 0x5C5}, {0x5C7, 0x5C7}, {0x610, 0x61A}, {0x64B, 0x65F}, {0x670, 0x670}, {0x6D6, 0x6DC}, {0x6DF, 0x6E4}, -{0x6E7, 0x6E8}, {0x6EA, 0x6ED}, {0x711, 0x711}, {0x730, 0x74A}, {0x7A6, 0x7B0}, {0x7EB, 0x7F3}, {0x7FD, 0x7FD}, {0x816, 0x819}, {0x81B, 0x823}, {0x825, 0x827}, {0x829, 0x82D}, {0x859, 0x85B}, -{0x8D3, 0x8E1}, {0x8E3, 0x903}, {0x93A, 0x93C}, {0x93E, 0x94F}, {0x951, 0x957}, {0x962, 0x963}, {0x981, 0x983}, {0x9BC, 0x9BC}, {0x9BE, 0x9C4}, {0x9C7, 0x9C8}, {0x9CB, 0x9CD}, {0x9D7, 0x9D7}, -{0x9E2, 0x9E3}, {0x9FE, 0x9FE}, {0xA01, 0xA03}, {0xA3C, 0xA3C}, {0xA3E, 0xA42}, {0xA47, 0xA48}, {0xA4B, 0xA4D}, {0xA51, 0xA51}, {0xA70, 0xA71}, {0xA75, 0xA75}, {0xA81, 0xA83}, {0xABC, 0xABC}, -{0xABE, 0xAC5}, {0xAC7, 0xAC9}, {0xACB, 0xACD}, {0xAE2, 0xAE3}, {0xAFA, 0xAFF}, {0xB01, 0xB03}, {0xB3C, 0xB3C}, {0xB3E, 0xB44}, {0xB47, 0xB48}, {0xB4B, 0xB4D}, {0xB55, 0xB57}, {0xB62, 0xB63}, -{0xB82, 0xB82}, {0xBBE, 0xBC2}, {0xBC6, 0xBC8}, {0xBCA, 0xBCD}, {0xBD7, 0xBD7}, {0xC00, 0xC04}, {0xC3E, 0xC44}, {0xC46, 0xC48}, {0xC4A, 0xC4D}, {0xC55, 0xC56}, {0xC62, 0xC63}, {0xC81, 0xC83}, -{0xCBC, 0xCBC}, {0xCBE, 0xCC4}, {0xCC6, 0xCC8}, {0xCCA, 0xCCD}, {0xCD5, 0xCD6}, {0xCE2, 0xCE3}, {0xD00, 0xD03}, {0xD3B, 0xD3C}, {0xD3E, 0xD44}, {0xD46, 0xD48}, {0xD4A, 0xD4D}, {0xD57, 0xD57}, -{0xD62, 0xD63}, {0xD81, 0xD83}, {0xDCA, 0xDCA}, {0xDCF, 0xDD4}, {0xDD6, 0xDD6}, {0xDD8, 0xDDF}, {0xDF2, 0xDF3}, {0xE31, 0xE31}, {0xE34, 0xE3A}, {0xE47, 0xE4E}, {0xEB1, 0xEB1}, {0xEB4, 0xEBC}, -{0xEC8, 0xECD}, {0xF18, 0xF19}, {0xF35, 0xF35}, {0xF37, 0xF37}, {0xF39, 0xF39}, {0xF3E, 0xF3F}, {0xF71, 0xF84}, {0xF86, 0xF87}, {0xF8D, 0xF97}, {0xF99, 0xFBC}, {0xFC6, 0xFC6}, {0x102B, 0x103E}, -{0x1056, 0x1059}, {0x105E, 0x1060}, {0x1062, 0x1064}, {0x1067, 0x106D}, {0x1071, 0x1074}, {0x1082, 0x108D}, {0x108F, 0x108F}, {0x109A, 0x109D}, {0x135D, 0x135F}, {0x1712, 0x1714}, {0x1732, 0x1734}, -{0x1752, 0x1753}, {0x1772, 0x1773}, {0x17B4, 0x17D3}, {0x17DD, 0x17DD}, {0x180B, 0x180D}, {0x1885, 0x1886}, {0x18A9, 0x18A9}, {0x1920, 0x192B}, {0x1930, 0x193B}, {0x1A17, 0x1A1B}, {0x1A55, 0x1A5E}, -{0x1A60, 0x1A7C}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0}, {0x1B00, 0x1B04}, {0x1B34, 0x1B44}, {0x1B6B, 0x1B73}, {0x1B80, 0x1B82}, {0x1BA1, 0x1BAD}, {0x1BE6, 0x1BF3}, {0x1C24, 0x1C37}, {0x1CD0, 0x1CD2}, -{0x1CD4, 0x1CE8}, {0x1CED, 0x1CED}, {0x1CF4, 0x1CF4}, {0x1CF7, 0x1CF9}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF}, {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2D7F, 0x2D7F}, {0x2DE0, 0x2DFF}, {0x302A, 0x302F}, -{0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D}, {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA802, 0xA802}, {0xA806, 0xA806}, {0xA80B, 0xA80B}, {0xA823, 0xA827}, {0xA82C, 0xA82C}, {0xA880, 0xA881}, -{0xA8B4, 0xA8C5}, {0xA8E0, 0xA8F1}, {0xA8FF, 0xA8FF}, {0xA926, 0xA92D}, {0xA947, 0xA953}, {0xA980, 0xA983}, {0xA9B3, 0xA9C0}, {0xA9E5, 0xA9E5}, {0xAA29, 0xAA36}, {0xAA43, 0xAA43}, {0xAA4C, 0xAA4D}, -{0xAA7B, 0xAA7D}, {0xAAB0, 0xAAB0}, {0xAAB2, 0xAAB4}, {0xAAB7, 0xAAB8}, {0xAABE, 0xAABF}, {0xAAC1, 0xAAC1}, {0xAAEB, 0xAAEF}, {0xAAF5, 0xAAF6}, {0xABE3, 0xABEA}, {0xABEC, 0xABED}, {0xFB1E, 0xFB1E}, -{0xFE00, 0xFE0F}, {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x102E0, 0x102E0}, {0x10376, 0x1037A}, {0x10A01, 0x10A03}, {0x10A05, 0x10A06}, {0x10A0C, 0x10A0F}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A3F}, -{0x10AE5, 0x10AE6}, {0x10D24, 0x10D27}, {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11000, 0x11002}, {0x11038, 0x11046}, {0x1107F, 0x11082}, {0x110B0, 0x110BA}, {0x11100, 0x11102}, {0x11127, 0x11134}, -{0x11145, 0x11146}, {0x11173, 0x11173}, {0x11180, 0x11182}, {0x111B3, 0x111C0}, {0x111C9, 0x111CC}, {0x111CE, 0x111CF}, {0x1122C, 0x11237}, {0x1123E, 0x1123E}, {0x112DF, 0x112EA}, {0x11300, 0x11303}, -{0x1133B, 0x1133C}, {0x1133E, 0x11344}, {0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11357, 0x11357}, {0x11362, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x11435, 0x11446}, {0x1145E, 0x1145E}, -{0x114B0, 0x114C3}, {0x115AF, 0x115B5}, {0x115B8, 0x115C0}, {0x115DC, 0x115DD}, {0x11630, 0x11640}, {0x116AB, 0x116B7}, {0x1171D, 0x1172B}, {0x1182C, 0x1183A}, {0x11930, 0x11935}, {0x11937, 0x11938}, -{0x1193B, 0x1193E}, {0x11940, 0x11940}, {0x11942, 0x11943}, {0x119D1, 0x119D7}, {0x119DA, 0x119E0}, {0x119E4, 0x119E4}, {0x11A01, 0x11A0A}, {0x11A33, 0x11A39}, {0x11A3B, 0x11A3E}, {0x11A47, 0x11A47}, -{0x11A51, 0x11A5B}, {0x11A8A, 0x11A99}, {0x11C2F, 0x11C36}, {0x11C38, 0x11C3F}, {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x11D31, 0x11D36}, {0x11D3A, 0x11D3A}, {0x11D3C, 0x11D3D}, {0x11D3F, 0x11D45}, -{0x11D47, 0x11D47}, {0x11D8A, 0x11D8E}, {0x11D90, 0x11D91}, {0x11D93, 0x11D97}, {0x11EF3, 0x11EF6}, {0x16AF0, 0x16AF4}, {0x16B30, 0x16B36}, {0x16F4F, 0x16F4F}, {0x16F51, 0x16F87}, {0x16F8F, 0x16F92}, -{0x16FE4, 0x16FE4}, {0x16FF0, 0x16FF1}, {0x1BC9D, 0x1BC9E}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, {0x1D242, 0x1D244}, {0x1DA00, 0x1DA36}, -{0x1DA3B, 0x1DA6C}, {0x1DA75, 0x1DA75}, {0x1DA84, 0x1DA84}, {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, -{0x1E130, 0x1E136}, {0x1E2EC, 0x1E2EF}, {0x1E8D0, 0x1E8D6}, {0x1E944, 0x1E94A}, {0xE0100, 0xE01EF}, -}; - -static const std::vector> punctuation_ranges = { -{0x21, 0x23}, {0x25, 0x2A}, {0x2C, 0x2F}, {0x3A, 0x3B}, {0x3F, 0x40}, {0x5B, 0x5D}, {0x5F, 0x5F}, {0x7B, 0x7B}, {0x7D, 0x7D}, {0xA1, 0xA1}, {0xA7, 0xA7}, {0xAB, 0xAB}, {0xB6, 0xB7}, {0xBB, 0xBB}, -{0xBF, 0xBF}, {0x37E, 0x37E}, {0x387, 0x387}, {0x55A, 0x55F}, {0x589, 0x58A}, {0x5BE, 0x5BE}, {0x5C0, 0x5C0}, {0x5C3, 0x5C3}, {0x5C6, 0x5C6}, {0x5F3, 0x5F4}, {0x609, 0x60A}, {0x60C, 0x60D}, -{0x61B, 0x61B}, {0x61E, 0x61F}, {0x66A, 0x66D}, {0x6D4, 0x6D4}, {0x700, 0x70D}, {0x7F7, 0x7F9}, {0x830, 0x83E}, {0x85E, 0x85E}, {0x964, 0x965}, {0x970, 0x970}, {0x9FD, 0x9FD}, {0xA76, 0xA76}, -{0xAF0, 0xAF0}, {0xC77, 0xC77}, {0xC84, 0xC84}, {0xDF4, 0xDF4}, {0xE4F, 0xE4F}, {0xE5A, 0xE5B}, {0xF04, 0xF12}, {0xF14, 0xF14}, {0xF3A, 0xF3D}, {0xF85, 0xF85}, {0xFD0, 0xFD4}, {0xFD9, 0xFDA}, -{0x104A, 0x104F}, {0x10FB, 0x10FB}, {0x1360, 0x1368}, {0x1400, 0x1400}, {0x166E, 0x166E}, {0x169B, 0x169C}, {0x16EB, 0x16ED}, {0x1735, 0x1736}, {0x17D4, 0x17D6}, {0x17D8, 0x17DA}, {0x1800, 0x180A}, -{0x1944, 0x1945}, {0x1A1E, 0x1A1F}, {0x1AA0, 0x1AA6}, {0x1AA8, 0x1AAD}, {0x1B5A, 0x1B60}, {0x1BFC, 0x1BFF}, {0x1C3B, 0x1C3F}, {0x1C7E, 0x1C7F}, {0x1CC0, 0x1CC7}, {0x1CD3, 0x1CD3}, {0x2010, 0x2027}, -{0x2030, 0x2043}, {0x2045, 0x2051}, {0x2053, 0x205E}, {0x207D, 0x207E}, {0x208D, 0x208E}, {0x2308, 0x230B}, {0x2329, 0x232A}, {0x2768, 0x2775}, {0x27C5, 0x27C6}, {0x27E6, 0x27EF}, {0x2983, 0x2998}, -{0x29D8, 0x29DB}, {0x29FC, 0x29FD}, {0x2CF9, 0x2CFC}, {0x2CFE, 0x2CFF}, {0x2D70, 0x2D70}, {0x2E00, 0x2E2E}, {0x2E30, 0x2E4F}, {0x2E52, 0x2E52}, {0x3001, 0x3003}, {0x3008, 0x3011}, {0x3014, 0x301F}, -{0x3030, 0x3030}, {0x303D, 0x303D}, {0x30A0, 0x30A0}, {0x30FB, 0x30FB}, {0xA4FE, 0xA4FF}, {0xA60D, 0xA60F}, {0xA673, 0xA673}, {0xA67E, 0xA67E}, {0xA6F2, 0xA6F7}, {0xA874, 0xA877}, {0xA8CE, 0xA8CF}, -{0xA8F8, 0xA8FA}, {0xA8FC, 0xA8FC}, {0xA92E, 0xA92F}, {0xA95F, 0xA95F}, {0xA9C1, 0xA9CD}, {0xA9DE, 0xA9DF}, {0xAA5C, 0xAA5F}, {0xAADE, 0xAADF}, {0xAAF0, 0xAAF1}, {0xABEB, 0xABEB}, {0xFD3E, 0xFD3F}, -{0xFE10, 0xFE19}, {0xFE30, 0xFE52}, {0xFE54, 0xFE61}, {0xFE63, 0xFE63}, {0xFE68, 0xFE68}, {0xFE6A, 0xFE6B}, {0xFF01, 0xFF03}, {0xFF05, 0xFF0A}, {0xFF0C, 0xFF0F}, {0xFF1A, 0xFF1B}, {0xFF1F, 0xFF20}, -{0xFF3B, 0xFF3D}, {0xFF3F, 0xFF3F}, {0xFF5B, 0xFF5B}, {0xFF5D, 0xFF5D}, {0xFF5F, 0xFF65}, {0x10100, 0x10102}, {0x1039F, 0x1039F}, {0x103D0, 0x103D0}, {0x1056F, 0x1056F}, {0x10857, 0x10857}, -{0x1091F, 0x1091F}, {0x1093F, 0x1093F}, {0x10A50, 0x10A58}, {0x10A7F, 0x10A7F}, {0x10AF0, 0x10AF6}, {0x10B39, 0x10B3F}, {0x10B99, 0x10B9C}, {0x10EAD, 0x10EAD}, {0x10F55, 0x10F59}, {0x11047, 0x1104D}, -{0x110BB, 0x110BC}, {0x110BE, 0x110C1}, {0x11140, 0x11143}, {0x11174, 0x11175}, {0x111C5, 0x111C8}, {0x111CD, 0x111CD}, {0x111DB, 0x111DB}, {0x111DD, 0x111DF}, {0x11238, 0x1123D}, {0x112A9, 0x112A9}, -{0x1144B, 0x1144F}, {0x1145A, 0x1145B}, {0x1145D, 0x1145D}, {0x114C6, 0x114C6}, {0x115C1, 0x115D7}, {0x11641, 0x11643}, {0x11660, 0x1166C}, {0x1173C, 0x1173E}, {0x1183B, 0x1183B}, {0x11944, 0x11946}, -{0x119E2, 0x119E2}, {0x11A3F, 0x11A46}, {0x11A9A, 0x11A9C}, {0x11A9E, 0x11AA2}, {0x11C41, 0x11C45}, {0x11C70, 0x11C71}, {0x11EF7, 0x11EF8}, {0x11FFF, 0x11FFF}, {0x12470, 0x12474}, {0x16A6E, 0x16A6F}, -{0x16AF5, 0x16AF5}, {0x16B37, 0x16B3B}, {0x16B44, 0x16B44}, {0x16E97, 0x16E9A}, {0x16FE2, 0x16FE2}, {0x1BC9F, 0x1BC9F}, {0x1DA87, 0x1DA8B}, {0x1E95E, 0x1E95F}, -}; - -static const std::vector> symbol_ranges = { -{0x24, 0x24}, {0x2B, 0x2B}, {0x3C, 0x3E}, {0x5E, 0x5E}, {0x60, 0x60}, {0x7C, 0x7C}, {0x7E, 0x7E}, {0xA2, 0xA6}, {0xA8, 0xA9}, {0xAC, 0xAC}, {0xAE, 0xB1}, {0xB4, 0xB4}, {0xB8, 0xB8}, {0xD7, 0xD7}, -{0xF7, 0xF7}, {0x2C2, 0x2C5}, {0x2D2, 0x2DF}, {0x2E5, 0x2EB}, {0x2ED, 0x2ED}, {0x2EF, 0x2FF}, {0x375, 0x375}, {0x384, 0x385}, {0x3F6, 0x3F6}, {0x482, 0x482}, {0x58D, 0x58F}, {0x606, 0x608}, -{0x60B, 0x60B}, {0x60E, 0x60F}, {0x6DE, 0x6DE}, {0x6E9, 0x6E9}, {0x6FD, 0x6FE}, {0x7F6, 0x7F6}, {0x7FE, 0x7FF}, {0x9F2, 0x9F3}, {0x9FA, 0x9FB}, {0xAF1, 0xAF1}, {0xB70, 0xB70}, {0xBF3, 0xBFA}, -{0xC7F, 0xC7F}, {0xD4F, 0xD4F}, {0xD79, 0xD79}, {0xE3F, 0xE3F}, {0xF01, 0xF03}, {0xF13, 0xF13}, {0xF15, 0xF17}, {0xF1A, 0xF1F}, {0xF34, 0xF34}, {0xF36, 0xF36}, {0xF38, 0xF38}, {0xFBE, 0xFC5}, -{0xFC7, 0xFCC}, {0xFCE, 0xFCF}, {0xFD5, 0xFD8}, {0x109E, 0x109F}, {0x1390, 0x1399}, {0x166D, 0x166D}, {0x17DB, 0x17DB}, {0x1940, 0x1940}, {0x19DE, 0x19FF}, {0x1B61, 0x1B6A}, {0x1B74, 0x1B7C}, -{0x1FBD, 0x1FBD}, {0x1FBF, 0x1FC1}, {0x1FCD, 0x1FCF}, {0x1FDD, 0x1FDF}, {0x1FED, 0x1FEF}, {0x1FFD, 0x1FFE}, {0x2044, 0x2044}, {0x2052, 0x2052}, {0x207A, 0x207C}, {0x208A, 0x208C}, {0x20A0, 0x20BF}, -{0x2100, 0x2101}, {0x2103, 0x2106}, {0x2108, 0x2109}, {0x2114, 0x2114}, {0x2116, 0x2118}, {0x211E, 0x2123}, {0x2125, 0x2125}, {0x2127, 0x2127}, {0x2129, 0x2129}, {0x212E, 0x212E}, {0x213A, 0x213B}, -{0x2140, 0x2144}, {0x214A, 0x214D}, {0x214F, 0x214F}, {0x218A, 0x218B}, {0x2190, 0x2307}, {0x230C, 0x2328}, {0x232B, 0x2426}, {0x2440, 0x244A}, {0x249C, 0x24E9}, {0x2500, 0x2767}, {0x2794, 0x27C4}, -{0x27C7, 0x27E5}, {0x27F0, 0x2982}, {0x2999, 0x29D7}, {0x29DC, 0x29FB}, {0x29FE, 0x2B73}, {0x2B76, 0x2B95}, {0x2B97, 0x2BFF}, {0x2CE5, 0x2CEA}, {0x2E50, 0x2E51}, {0x2E80, 0x2E99}, {0x2E9B, 0x2EF3}, -{0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB}, {0x3004, 0x3004}, {0x3012, 0x3013}, {0x3020, 0x3020}, {0x3036, 0x3037}, {0x303E, 0x303F}, {0x309B, 0x309C}, {0x3190, 0x3191}, {0x3196, 0x319F}, {0x31C0, 0x31E3}, -{0x3200, 0x321E}, {0x322A, 0x3247}, {0x3250, 0x3250}, {0x3260, 0x327F}, {0x328A, 0x32B0}, {0x32C0, 0x33FF}, {0x4DC0, 0x4DFF}, {0xA490, 0xA4C6}, {0xA700, 0xA716}, {0xA720, 0xA721}, {0xA789, 0xA78A}, -{0xA828, 0xA82B}, {0xA836, 0xA839}, {0xAA77, 0xAA79}, {0xAB5B, 0xAB5B}, {0xAB6A, 0xAB6B}, {0xFB29, 0xFB29}, {0xFBB2, 0xFBC1}, {0xFDFC, 0xFDFD}, {0xFE62, 0xFE62}, {0xFE64, 0xFE66}, {0xFE69, 0xFE69}, -{0xFF04, 0xFF04}, {0xFF0B, 0xFF0B}, {0xFF1C, 0xFF1E}, {0xFF3E, 0xFF3E}, {0xFF40, 0xFF40}, {0xFF5C, 0xFF5C}, {0xFF5E, 0xFF5E}, {0xFFE0, 0xFFE6}, {0xFFE8, 0xFFEE}, {0xFFFC, 0xFFFD}, {0x10137, 0x1013F}, -{0x10179, 0x10189}, {0x1018C, 0x1018E}, {0x10190, 0x1019C}, {0x101A0, 0x101A0}, {0x101D0, 0x101FC}, {0x10877, 0x10878}, {0x10AC8, 0x10AC8}, {0x1173F, 0x1173F}, {0x11FD5, 0x11FF1}, {0x16B3C, 0x16B3F}, -{0x16B45, 0x16B45}, {0x1BC9C, 0x1BC9C}, {0x1D000, 0x1D0F5}, {0x1D100, 0x1D126}, {0x1D129, 0x1D164}, {0x1D16A, 0x1D16C}, {0x1D183, 0x1D184}, {0x1D18C, 0x1D1A9}, {0x1D1AE, 0x1D1E8}, {0x1D200, 0x1D241}, -{0x1D245, 0x1D245}, {0x1D300, 0x1D356}, {0x1D6C1, 0x1D6C1}, {0x1D6DB, 0x1D6DB}, {0x1D6FB, 0x1D6FB}, {0x1D715, 0x1D715}, {0x1D735, 0x1D735}, {0x1D74F, 0x1D74F}, {0x1D76F, 0x1D76F}, {0x1D789, 0x1D789}, -{0x1D7A9, 0x1D7A9}, {0x1D7C3, 0x1D7C3}, {0x1D800, 0x1D9FF}, {0x1DA37, 0x1DA3A}, {0x1DA6D, 0x1DA74}, {0x1DA76, 0x1DA83}, {0x1DA85, 0x1DA86}, {0x1E14F, 0x1E14F}, {0x1E2FF, 0x1E2FF}, {0x1ECAC, 0x1ECAC}, -{0x1ECB0, 0x1ECB0}, {0x1ED2E, 0x1ED2E}, {0x1EEF0, 0x1EEF1}, {0x1F000, 0x1F02B}, {0x1F030, 0x1F093}, {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CF}, {0x1F0D1, 0x1F0F5}, {0x1F10D, 0x1F1AD}, -{0x1F1E6, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F6D7}, {0x1F6E0, 0x1F6EC}, {0x1F6F0, 0x1F6FC}, {0x1F700, 0x1F773}, {0x1F780, 0x1F7D8}, -{0x1F7E0, 0x1F7EB}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, {0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, {0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F978}, {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1FA53}, -{0x1FA60, 0x1FA6D}, {0x1FA70, 0x1FA74}, {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8}, {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6}, {0x1FB00, 0x1FB92}, {0x1FB94, 0x1FBCA}, -}; - -static const std::vector> control_ranges = { -{0x0, 0x8}, {0xE, 0x1B}, {0x7F, 0x84}, {0x86, 0x9F}, {0xAD, 0xAD}, {0x378, 0x379}, {0x380, 0x383}, {0x38B, 0x38B}, {0x38D, 0x38D}, {0x3A2, 0x3A2}, {0x530, 0x530}, {0x557, 0x558}, {0x58B, 0x58C}, -{0x590, 0x590}, {0x5C8, 0x5CF}, {0x5EB, 0x5EE}, {0x5F5, 0x605}, {0x61C, 0x61D}, {0x6DD, 0x6DD}, {0x70E, 0x70F}, {0x74B, 0x74C}, {0x7B2, 0x7BF}, {0x7FB, 0x7FC}, {0x82E, 0x82F}, {0x83F, 0x83F}, -{0x85C, 0x85D}, {0x85F, 0x85F}, {0x86B, 0x89F}, {0x8B5, 0x8B5}, {0x8C8, 0x8D2}, {0x8E2, 0x8E2}, {0x984, 0x984}, {0x98D, 0x98E}, {0x991, 0x992}, {0x9A9, 0x9A9}, {0x9B1, 0x9B1}, {0x9B3, 0x9B5}, -{0x9BA, 0x9BB}, {0x9C5, 0x9C6}, {0x9C9, 0x9CA}, {0x9CF, 0x9D6}, {0x9D8, 0x9DB}, {0x9DE, 0x9DE}, {0x9E4, 0x9E5}, {0x9FF, 0xA00}, {0xA04, 0xA04}, {0xA0B, 0xA0E}, {0xA11, 0xA12}, {0xA29, 0xA29}, -{0xA31, 0xA31}, {0xA34, 0xA34}, {0xA37, 0xA37}, {0xA3A, 0xA3B}, {0xA3D, 0xA3D}, {0xA43, 0xA46}, {0xA49, 0xA4A}, {0xA4E, 0xA50}, {0xA52, 0xA58}, {0xA5D, 0xA5D}, {0xA5F, 0xA65}, {0xA77, 0xA80}, -{0xA84, 0xA84}, {0xA8E, 0xA8E}, {0xA92, 0xA92}, {0xAA9, 0xAA9}, {0xAB1, 0xAB1}, {0xAB4, 0xAB4}, {0xABA, 0xABB}, {0xAC6, 0xAC6}, {0xACA, 0xACA}, {0xACE, 0xACF}, {0xAD1, 0xADF}, {0xAE4, 0xAE5}, -{0xAF2, 0xAF8}, {0xB00, 0xB00}, {0xB04, 0xB04}, {0xB0D, 0xB0E}, {0xB11, 0xB12}, {0xB29, 0xB29}, {0xB31, 0xB31}, {0xB34, 0xB34}, {0xB3A, 0xB3B}, {0xB45, 0xB46}, {0xB49, 0xB4A}, {0xB4E, 0xB54}, -{0xB58, 0xB5B}, {0xB5E, 0xB5E}, {0xB64, 0xB65}, {0xB78, 0xB81}, {0xB84, 0xB84}, {0xB8B, 0xB8D}, {0xB91, 0xB91}, {0xB96, 0xB98}, {0xB9B, 0xB9B}, {0xB9D, 0xB9D}, {0xBA0, 0xBA2}, {0xBA5, 0xBA7}, -{0xBAB, 0xBAD}, {0xBBA, 0xBBD}, {0xBC3, 0xBC5}, {0xBC9, 0xBC9}, {0xBCE, 0xBCF}, {0xBD1, 0xBD6}, {0xBD8, 0xBE5}, {0xBFB, 0xBFF}, {0xC0D, 0xC0D}, {0xC11, 0xC11}, {0xC29, 0xC29}, {0xC3A, 0xC3C}, -{0xC45, 0xC45}, {0xC49, 0xC49}, {0xC4E, 0xC54}, {0xC57, 0xC57}, {0xC5B, 0xC5F}, {0xC64, 0xC65}, {0xC70, 0xC76}, {0xC8D, 0xC8D}, {0xC91, 0xC91}, {0xCA9, 0xCA9}, {0xCB4, 0xCB4}, {0xCBA, 0xCBB}, -{0xCC5, 0xCC5}, {0xCC9, 0xCC9}, {0xCCE, 0xCD4}, {0xCD7, 0xCDD}, {0xCDF, 0xCDF}, {0xCE4, 0xCE5}, {0xCF0, 0xCF0}, {0xCF3, 0xCFF}, {0xD0D, 0xD0D}, {0xD11, 0xD11}, {0xD45, 0xD45}, {0xD49, 0xD49}, -{0xD50, 0xD53}, {0xD64, 0xD65}, {0xD80, 0xD80}, {0xD84, 0xD84}, {0xD97, 0xD99}, {0xDB2, 0xDB2}, {0xDBC, 0xDBC}, {0xDBE, 0xDBF}, {0xDC7, 0xDC9}, {0xDCB, 0xDCE}, {0xDD5, 0xDD5}, {0xDD7, 0xDD7}, -{0xDE0, 0xDE5}, {0xDF0, 0xDF1}, {0xDF5, 0xE00}, {0xE3B, 0xE3E}, {0xE5C, 0xE80}, {0xE83, 0xE83}, {0xE85, 0xE85}, {0xE8B, 0xE8B}, {0xEA4, 0xEA4}, {0xEA6, 0xEA6}, {0xEBE, 0xEBF}, {0xEC5, 0xEC5}, -{0xEC7, 0xEC7}, {0xECE, 0xECF}, {0xEDA, 0xEDB}, {0xEE0, 0xEFF}, {0xF48, 0xF48}, {0xF6D, 0xF70}, {0xF98, 0xF98}, {0xFBD, 0xFBD}, {0xFCD, 0xFCD}, {0xFDB, 0xFFF}, {0x10C6, 0x10C6}, {0x10C8, 0x10CC}, -{0x10CE, 0x10CF}, {0x1249, 0x1249}, {0x124E, 0x124F}, {0x1257, 0x1257}, {0x1259, 0x1259}, {0x125E, 0x125F}, {0x1289, 0x1289}, {0x128E, 0x128F}, {0x12B1, 0x12B1}, {0x12B6, 0x12B7}, {0x12BF, 0x12BF}, -{0x12C1, 0x12C1}, {0x12C6, 0x12C7}, {0x12D7, 0x12D7}, {0x1311, 0x1311}, {0x1316, 0x1317}, {0x135B, 0x135C}, {0x137D, 0x137F}, {0x139A, 0x139F}, {0x13F6, 0x13F7}, {0x13FE, 0x13FF}, {0x169D, 0x169F}, -{0x16F9, 0x16FF}, {0x170D, 0x170D}, {0x1715, 0x171F}, {0x1737, 0x173F}, {0x1754, 0x175F}, {0x176D, 0x176D}, {0x1771, 0x1771}, {0x1774, 0x177F}, {0x17DE, 0x17DF}, {0x17EA, 0x17EF}, {0x17FA, 0x17FF}, -{0x180E, 0x180F}, {0x181A, 0x181F}, {0x1879, 0x187F}, {0x18AB, 0x18AF}, {0x18F6, 0x18FF}, {0x191F, 0x191F}, {0x192C, 0x192F}, {0x193C, 0x193F}, {0x1941, 0x1943}, {0x196E, 0x196F}, {0x1975, 0x197F}, -{0x19AC, 0x19AF}, {0x19CA, 0x19CF}, {0x19DB, 0x19DD}, {0x1A1C, 0x1A1D}, {0x1A5F, 0x1A5F}, {0x1A7D, 0x1A7E}, {0x1A8A, 0x1A8F}, {0x1A9A, 0x1A9F}, {0x1AAE, 0x1AAF}, {0x1AC1, 0x1AFF}, {0x1B4C, 0x1B4F}, -{0x1B7D, 0x1B7F}, {0x1BF4, 0x1BFB}, {0x1C38, 0x1C3A}, {0x1C4A, 0x1C4C}, {0x1C89, 0x1C8F}, {0x1CBB, 0x1CBC}, {0x1CC8, 0x1CCF}, {0x1CFB, 0x1CFF}, {0x1DFA, 0x1DFA}, {0x1F16, 0x1F17}, {0x1F1E, 0x1F1F}, -{0x1F46, 0x1F47}, {0x1F4E, 0x1F4F}, {0x1F58, 0x1F58}, {0x1F5A, 0x1F5A}, {0x1F5C, 0x1F5C}, {0x1F5E, 0x1F5E}, {0x1F7E, 0x1F7F}, {0x1FB5, 0x1FB5}, {0x1FC5, 0x1FC5}, {0x1FD4, 0x1FD5}, {0x1FDC, 0x1FDC}, -{0x1FF0, 0x1FF1}, {0x1FF5, 0x1FF5}, {0x1FFF, 0x1FFF}, {0x200B, 0x200F}, {0x202A, 0x202E}, {0x2060, 0x206F}, {0x2072, 0x2073}, {0x208F, 0x208F}, {0x209D, 0x209F}, {0x20C0, 0x20CF}, {0x20F1, 0x20FF}, -{0x218C, 0x218F}, {0x2427, 0x243F}, {0x244B, 0x245F}, {0x2B74, 0x2B75}, {0x2B96, 0x2B96}, {0x2C2F, 0x2C2F}, {0x2C5F, 0x2C5F}, {0x2CF4, 0x2CF8}, {0x2D26, 0x2D26}, {0x2D28, 0x2D2C}, {0x2D2E, 0x2D2F}, -{0x2D68, 0x2D6E}, {0x2D71, 0x2D7E}, {0x2D97, 0x2D9F}, {0x2DA7, 0x2DA7}, {0x2DAF, 0x2DAF}, {0x2DB7, 0x2DB7}, {0x2DBF, 0x2DBF}, {0x2DC7, 0x2DC7}, {0x2DCF, 0x2DCF}, {0x2DD7, 0x2DD7}, {0x2DDF, 0x2DDF}, -{0x2E53, 0x2E7F}, {0x2E9A, 0x2E9A}, {0x2EF4, 0x2EFF}, {0x2FD6, 0x2FEF}, {0x2FFC, 0x2FFF}, {0x3040, 0x3040}, {0x3097, 0x3098}, {0x3100, 0x3104}, {0x3130, 0x3130}, {0x318F, 0x318F}, {0x31E4, 0x31EF}, -{0x321F, 0x321F}, {0x9FFD, 0x9FFF}, {0xA48D, 0xA48F}, {0xA4C7, 0xA4CF}, {0xA62C, 0xA63F}, {0xA6F8, 0xA6FF}, {0xA7C0, 0xA7C1}, {0xA7CB, 0xA7F4}, {0xA82D, 0xA82F}, {0xA83A, 0xA83F}, {0xA878, 0xA87F}, -{0xA8C6, 0xA8CD}, {0xA8DA, 0xA8DF}, {0xA954, 0xA95E}, {0xA97D, 0xA97F}, {0xA9CE, 0xA9CE}, {0xA9DA, 0xA9DD}, {0xA9FF, 0xA9FF}, {0xAA37, 0xAA3F}, {0xAA4E, 0xAA4F}, {0xAA5A, 0xAA5B}, {0xAAC3, 0xAADA}, -{0xAAF7, 0xAB00}, {0xAB07, 0xAB08}, {0xAB0F, 0xAB10}, {0xAB17, 0xAB1F}, {0xAB27, 0xAB27}, {0xAB2F, 0xAB2F}, {0xAB6C, 0xAB6F}, {0xABEE, 0xABEF}, {0xABFA, 0xABFF}, {0xD7A4, 0xD7AF}, {0xD7C7, 0xD7CA}, -{0xD7FC, 0xF8FF}, {0xFA6E, 0xFA6F}, {0xFADA, 0xFAFF}, {0xFB07, 0xFB12}, {0xFB18, 0xFB1C}, {0xFB37, 0xFB37}, {0xFB3D, 0xFB3D}, {0xFB3F, 0xFB3F}, {0xFB42, 0xFB42}, {0xFB45, 0xFB45}, {0xFBC2, 0xFBD2}, -{0xFD40, 0xFD4F}, {0xFD90, 0xFD91}, {0xFDC8, 0xFDEF}, {0xFDFE, 0xFDFF}, {0xFE1A, 0xFE1F}, {0xFE53, 0xFE53}, {0xFE67, 0xFE67}, {0xFE6C, 0xFE6F}, {0xFE75, 0xFE75}, {0xFEFD, 0xFF00}, {0xFFBF, 0xFFC1}, -{0xFFC8, 0xFFC9}, {0xFFD0, 0xFFD1}, {0xFFD8, 0xFFD9}, {0xFFDD, 0xFFDF}, {0xFFE7, 0xFFE7}, {0xFFEF, 0xFFFB}, {0xFFFE, 0xFFFF}, {0x1000C, 0x1000C}, {0x10027, 0x10027}, {0x1003B, 0x1003B}, -{0x1003E, 0x1003E}, {0x1004E, 0x1004F}, {0x1005E, 0x1007F}, {0x100FB, 0x100FF}, {0x10103, 0x10106}, {0x10134, 0x10136}, {0x1018F, 0x1018F}, {0x1019D, 0x1019F}, {0x101A1, 0x101CF}, {0x101FE, 0x1027F}, -{0x1029D, 0x1029F}, {0x102D1, 0x102DF}, {0x102FC, 0x102FF}, {0x10324, 0x1032C}, {0x1034B, 0x1034F}, {0x1037B, 0x1037F}, {0x1039E, 0x1039E}, {0x103C4, 0x103C7}, {0x103D6, 0x103FF}, {0x1049E, 0x1049F}, -{0x104AA, 0x104AF}, {0x104D4, 0x104D7}, {0x104FC, 0x104FF}, {0x10528, 0x1052F}, {0x10564, 0x1056E}, {0x10570, 0x105FF}, {0x10737, 0x1073F}, {0x10756, 0x1075F}, {0x10768, 0x107FF}, {0x10806, 0x10807}, -{0x10809, 0x10809}, {0x10836, 0x10836}, {0x10839, 0x1083B}, {0x1083D, 0x1083E}, {0x10856, 0x10856}, {0x1089F, 0x108A6}, {0x108B0, 0x108DF}, {0x108F3, 0x108F3}, {0x108F6, 0x108FA}, {0x1091C, 0x1091E}, -{0x1093A, 0x1093E}, {0x10940, 0x1097F}, {0x109B8, 0x109BB}, {0x109D0, 0x109D1}, {0x10A04, 0x10A04}, {0x10A07, 0x10A0B}, {0x10A14, 0x10A14}, {0x10A18, 0x10A18}, {0x10A36, 0x10A37}, {0x10A3B, 0x10A3E}, -{0x10A49, 0x10A4F}, {0x10A59, 0x10A5F}, {0x10AA0, 0x10ABF}, {0x10AE7, 0x10AEA}, {0x10AF7, 0x10AFF}, {0x10B36, 0x10B38}, {0x10B56, 0x10B57}, {0x10B73, 0x10B77}, {0x10B92, 0x10B98}, {0x10B9D, 0x10BA8}, -{0x10BB0, 0x10BFF}, {0x10C49, 0x10C7F}, {0x10CB3, 0x10CBF}, {0x10CF3, 0x10CF9}, {0x10D28, 0x10D2F}, {0x10D3A, 0x10E5F}, {0x10E7F, 0x10E7F}, {0x10EAA, 0x10EAA}, {0x10EAE, 0x10EAF}, {0x10EB2, 0x10EFF}, -{0x10F28, 0x10F2F}, {0x10F5A, 0x10FAF}, {0x10FCC, 0x10FDF}, {0x10FF7, 0x10FFF}, {0x1104E, 0x11051}, {0x11070, 0x1107E}, {0x110BD, 0x110BD}, {0x110C2, 0x110CF}, {0x110E9, 0x110EF}, {0x110FA, 0x110FF}, -{0x11135, 0x11135}, {0x11148, 0x1114F}, {0x11177, 0x1117F}, {0x111E0, 0x111E0}, {0x111F5, 0x111FF}, {0x11212, 0x11212}, {0x1123F, 0x1127F}, {0x11287, 0x11287}, {0x11289, 0x11289}, {0x1128E, 0x1128E}, -{0x1129E, 0x1129E}, {0x112AA, 0x112AF}, {0x112EB, 0x112EF}, {0x112FA, 0x112FF}, {0x11304, 0x11304}, {0x1130D, 0x1130E}, {0x11311, 0x11312}, {0x11329, 0x11329}, {0x11331, 0x11331}, {0x11334, 0x11334}, -{0x1133A, 0x1133A}, {0x11345, 0x11346}, {0x11349, 0x1134A}, {0x1134E, 0x1134F}, {0x11351, 0x11356}, {0x11358, 0x1135C}, {0x11364, 0x11365}, {0x1136D, 0x1136F}, {0x11375, 0x113FF}, {0x1145C, 0x1145C}, -{0x11462, 0x1147F}, {0x114C8, 0x114CF}, {0x114DA, 0x1157F}, {0x115B6, 0x115B7}, {0x115DE, 0x115FF}, {0x11645, 0x1164F}, {0x1165A, 0x1165F}, {0x1166D, 0x1167F}, {0x116B9, 0x116BF}, {0x116CA, 0x116FF}, -{0x1171B, 0x1171C}, {0x1172C, 0x1172F}, {0x11740, 0x117FF}, {0x1183C, 0x1189F}, {0x118F3, 0x118FE}, {0x11907, 0x11908}, {0x1190A, 0x1190B}, {0x11914, 0x11914}, {0x11917, 0x11917}, {0x11936, 0x11936}, -{0x11939, 0x1193A}, {0x11947, 0x1194F}, {0x1195A, 0x1199F}, {0x119A8, 0x119A9}, {0x119D8, 0x119D9}, {0x119E5, 0x119FF}, {0x11A48, 0x11A4F}, {0x11AA3, 0x11ABF}, {0x11AF9, 0x11BFF}, {0x11C09, 0x11C09}, -{0x11C37, 0x11C37}, {0x11C46, 0x11C4F}, {0x11C6D, 0x11C6F}, {0x11C90, 0x11C91}, {0x11CA8, 0x11CA8}, {0x11CB7, 0x11CFF}, {0x11D07, 0x11D07}, {0x11D0A, 0x11D0A}, {0x11D37, 0x11D39}, {0x11D3B, 0x11D3B}, -{0x11D3E, 0x11D3E}, {0x11D48, 0x11D4F}, {0x11D5A, 0x11D5F}, {0x11D66, 0x11D66}, {0x11D69, 0x11D69}, {0x11D8F, 0x11D8F}, {0x11D92, 0x11D92}, {0x11D99, 0x11D9F}, {0x11DAA, 0x11EDF}, {0x11EF9, 0x11FAF}, -{0x11FB1, 0x11FBF}, {0x11FF2, 0x11FFE}, {0x1239A, 0x123FF}, {0x1246F, 0x1246F}, {0x12475, 0x1247F}, {0x12544, 0x12FFF}, {0x1342F, 0x143FF}, {0x14647, 0x167FF}, {0x16A39, 0x16A3F}, {0x16A5F, 0x16A5F}, -{0x16A6A, 0x16A6D}, {0x16A70, 0x16ACF}, {0x16AEE, 0x16AEF}, {0x16AF6, 0x16AFF}, {0x16B46, 0x16B4F}, {0x16B5A, 0x16B5A}, {0x16B62, 0x16B62}, {0x16B78, 0x16B7C}, {0x16B90, 0x16E3F}, {0x16E9B, 0x16EFF}, -{0x16F4B, 0x16F4E}, {0x16F88, 0x16F8E}, {0x16FA0, 0x16FDF}, {0x16FE5, 0x16FEF}, {0x16FF2, 0x16FFF}, {0x187F8, 0x187FF}, {0x18CD6, 0x18CFF}, {0x18D09, 0x1AFFF}, {0x1B11F, 0x1B14F}, {0x1B153, 0x1B163}, -{0x1B168, 0x1B16F}, {0x1B2FC, 0x1BBFF}, {0x1BC6B, 0x1BC6F}, {0x1BC7D, 0x1BC7F}, {0x1BC89, 0x1BC8F}, {0x1BC9A, 0x1BC9B}, {0x1BCA0, 0x1CFFF}, {0x1D0F6, 0x1D0FF}, {0x1D127, 0x1D128}, {0x1D173, 0x1D17A}, -{0x1D1E9, 0x1D1FF}, {0x1D246, 0x1D2DF}, {0x1D2F4, 0x1D2FF}, {0x1D357, 0x1D35F}, {0x1D379, 0x1D3FF}, {0x1D455, 0x1D455}, {0x1D49D, 0x1D49D}, {0x1D4A0, 0x1D4A1}, {0x1D4A3, 0x1D4A4}, {0x1D4A7, 0x1D4A8}, -{0x1D4AD, 0x1D4AD}, {0x1D4BA, 0x1D4BA}, {0x1D4BC, 0x1D4BC}, {0x1D4C4, 0x1D4C4}, {0x1D506, 0x1D506}, {0x1D50B, 0x1D50C}, {0x1D515, 0x1D515}, {0x1D51D, 0x1D51D}, {0x1D53A, 0x1D53A}, {0x1D53F, 0x1D53F}, -{0x1D545, 0x1D545}, {0x1D547, 0x1D549}, {0x1D551, 0x1D551}, {0x1D6A6, 0x1D6A7}, {0x1D7CC, 0x1D7CD}, {0x1DA8C, 0x1DA9A}, {0x1DAA0, 0x1DAA0}, {0x1DAB0, 0x1DFFF}, {0x1E007, 0x1E007}, {0x1E019, 0x1E01A}, -{0x1E022, 0x1E022}, {0x1E025, 0x1E025}, {0x1E02B, 0x1E0FF}, {0x1E12D, 0x1E12F}, {0x1E13E, 0x1E13F}, {0x1E14A, 0x1E14D}, {0x1E150, 0x1E2BF}, {0x1E2FA, 0x1E2FE}, {0x1E300, 0x1E7FF}, {0x1E8C5, 0x1E8C6}, -{0x1E8D7, 0x1E8FF}, {0x1E94C, 0x1E94F}, {0x1E95A, 0x1E95D}, {0x1E960, 0x1EC70}, {0x1ECB5, 0x1ED00}, {0x1ED3E, 0x1EDFF}, {0x1EE04, 0x1EE04}, {0x1EE20, 0x1EE20}, {0x1EE23, 0x1EE23}, {0x1EE25, 0x1EE26}, -{0x1EE28, 0x1EE28}, {0x1EE33, 0x1EE33}, {0x1EE38, 0x1EE38}, {0x1EE3A, 0x1EE3A}, {0x1EE3C, 0x1EE41}, {0x1EE43, 0x1EE46}, {0x1EE48, 0x1EE48}, {0x1EE4A, 0x1EE4A}, {0x1EE4C, 0x1EE4C}, {0x1EE50, 0x1EE50}, -{0x1EE53, 0x1EE53}, {0x1EE55, 0x1EE56}, {0x1EE58, 0x1EE58}, {0x1EE5A, 0x1EE5A}, {0x1EE5C, 0x1EE5C}, {0x1EE5E, 0x1EE5E}, {0x1EE60, 0x1EE60}, {0x1EE63, 0x1EE63}, {0x1EE65, 0x1EE66}, {0x1EE6B, 0x1EE6B}, -{0x1EE73, 0x1EE73}, {0x1EE78, 0x1EE78}, {0x1EE7D, 0x1EE7D}, {0x1EE7F, 0x1EE7F}, {0x1EE8A, 0x1EE8A}, {0x1EE9C, 0x1EEA0}, {0x1EEA4, 0x1EEA4}, {0x1EEAA, 0x1EEAA}, {0x1EEBC, 0x1EEEF}, {0x1EEF2, 0x1EFFF}, -{0x1F02C, 0x1F02F}, {0x1F094, 0x1F09F}, {0x1F0AF, 0x1F0B0}, {0x1F0C0, 0x1F0C0}, {0x1F0D0, 0x1F0D0}, {0x1F0F6, 0x1F0FF}, {0x1F1AE, 0x1F1E5}, {0x1F203, 0x1F20F}, {0x1F23C, 0x1F23F}, {0x1F249, 0x1F24F}, -{0x1F252, 0x1F25F}, {0x1F266, 0x1F2FF}, {0x1F6D8, 0x1F6DF}, {0x1F6ED, 0x1F6EF}, {0x1F6FD, 0x1F6FF}, {0x1F774, 0x1F77F}, {0x1F7D9, 0x1F7DF}, {0x1F7EC, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, -{0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8AF}, {0x1F8B2, 0x1F8FF}, {0x1F979, 0x1F979}, {0x1F9CC, 0x1F9CC}, {0x1FA54, 0x1FA5F}, {0x1FA6E, 0x1FA6F}, {0x1FA75, 0x1FA77}, {0x1FA7B, 0x1FA7F}, -{0x1FA87, 0x1FA8F}, {0x1FAA9, 0x1FAAF}, {0x1FAB7, 0x1FABF}, {0x1FAC3, 0x1FACF}, {0x1FAD7, 0x1FAFF}, {0x1FB93, 0x1FB93}, {0x1FBCB, 0x1FBEF}, {0x1FBFA, 0x1FFFF}, {0x2A6DE, 0x2A6FF}, {0x2B735, 0x2B73F}, -{0x2B81E, 0x2B81F}, {0x2CEA2, 0x2CEAF}, {0x2EBE1, 0x2F7FF}, {0x2FA1E, 0x2FFFF}, {0x3134B, 0xE00FF}, {0xE01F0, 0x10FFFF}, -}; - -//String -bool CNCTString::operator==(const std::string& other) const { - return str.compare(other) == 0; -} -bool CNCTString::operator==(const char other) const { - return str.compare(std::string(1, other)) == 0; -} -bool CNCTString::operator==(const CNCTString& other) const { - return str.compare(other.str) == 0; -} -// + operators -CNCTString& CNCTString::operator+=(const std::string& other) { - str += other; - int new_len = CNCTUnicode::strlen_utf8(other); - utf8_chars += new_len; - char_type = CNCTUnicode::string_identify(str); - seq_offset_bytes += other.size(); - seq_offset_utf8_chars += new_len; - return *this; -} - -CNCTString& CNCTString::operator+=(const char other) { - std::string str = std::string(1, other); - *this += str; - return *this; -} - -CNCTString& CNCTString::operator+=(const CNCTString& other) { - str += other.str; - utf8_chars += other.utf8_chars; - char_type = CNCTUnicode::string_identify(str); - seq_offset_bytes += other.str.size(); - seq_offset_utf8_chars += other.utf8_chars; - return *this; -} - -struct CRCompare { - bool operator()(const std::pair& p, int i) { - return p.second < i; - } - bool operator()(int i, const std::pair& p) { - return i < p.first; - } -}; - -// binary search for code range -bool CNCTUnicode::check_code_range(int c, const std::vector> &ranges) { - auto it = std::upper_bound(ranges.begin(), ranges.end(), c, CRCompare()); - if (it != ranges.begin()) { - --it; - } - return c >= it->first && c <= it->second; -} - -// these are binary searches, it takes only a few operations -CNCTCharType CNCTUnicode::get_code_type(int c) { - if (check_code_range(c, letter_ranges)) { - return LETTER; - } - if (check_code_range(c, digit_ranges)) { - return DIGIT; - } - if (check_code_range(c, whitespace_ranges)) { - return WHITESPACE; - } - if (check_code_range(c, punctuation_ranges)) { - return PUNCTUATION; - } - if (check_code_range(c, symbol_ranges)) { - return SYMBOL; - } - if (check_code_range(c, accent_mark_ranges)) { - return ACCENT_MARK; - } - if (check_code_range(c, control_ranges)) { - return CONTROL; - } - return UNIDENTIFIED; -} - -static int utf8_to_unicode(const std::string& utf8_char) { - int c = 0; - int len = (int)utf8_char.size(); - if (len == 1) { - c = utf8_char[0]; - } else if (len == 2) { - c = ((utf8_char[0] & 0x1F) << 6) | (utf8_char[1] & 0x3F); - } else if (len == 3) { - c = ((utf8_char[0] & 0x0F) << 12) | ((utf8_char[1] & 0x3F) << 6) | (utf8_char[2] & 0x3F); - } else if (len == 4) { - c = ((utf8_char[0] & 0x07) << 18) | ((utf8_char[1] & 0x3F) << 12) | ((utf8_char[2] & 0x3F) << 6) | (utf8_char[3] & 0x3F); - } - return c; -} - -CNCTCharType CNCTUnicode::get_code_type(const std::string &utf8_char) { - return get_code_type(utf8_to_unicode(utf8_char)); -} - -int CNCTUnicode::utf8_len(const char c) -{ - if ((c & 0x80) == 0) { - return 1; // ASCII character - } - if ((c & 0xE0) == 0xC0) { - return 2; // 2-byte character - } - if ((c & 0xF0) == 0xE0) { - return 3; // 3-byte character - } - if ((c & 0xF0) == 0xF0) { - return 4; // 4-byte character - } - return 1; // not valid utf8 - // static const uint8_t lookup[] = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4 }; - // return lookup[static_cast(c) >> 4]; -} - -int CNCTUnicode::strlen_utf8(const std::string src) { - int len = 0; - for (std::string::const_iterator it = src.begin(); it != src.end(); ++it) { - int char_len = utf8_len(*it); - if (char_len > 1) { - it += char_len - 1; - } - len += 1; - } - return len; -} - -// split a string into unicode strings -std::vector CNCTUnicode::split_utf8(const std::string &src) { - std::vector result; - for (std::string::const_iterator it = src.begin(); it != src.end(); ++it) { - int char_len = utf8_len(*it); - std::string str(it, it + char_len); - result.push_back(str); - if (char_len > 1) { - it += char_len - 1; - } - } - return result; -} - -// split a string into unicode strings (CNCTString) with sequence information -std::vector CNCTUnicode::split_utf8_enhanced(const std::string &src) { - std::vector result; - int seq_offset_bytes=0; - int seq_offset_utf8_chars=0; - for (std::string::const_iterator it = src.begin(); it != src.end(); ++it) { - int char_len = utf8_len(*it); - std::string str(it, it + char_len); - CNCTString cnct_str; - cnct_str.seq_offset_bytes = seq_offset_bytes; - cnct_str.seq_offset_utf8_chars = seq_offset_utf8_chars; - cnct_str.str = str; - cnct_str.utf8_chars = 1; - cnct_str.char_type = get_code_type(str); - #if 0 - switch (cnct_str.char_type) - { - case DIGIT: - printf("%s = DIGIT\n", str.c_str()); - break; - case LETTER: - printf("%s = LETTER\n", str.c_str()); - break; - case WHITESPACE: - printf("%s = WHITESPACE\n", str.c_str()); - break; - case PUNCTUATION: - printf("%s = PUNCTUATION\n", str.c_str()); - break; - case UNIDENTIFIED: - printf("%s = UNIDENTIFIED\n", str.c_str()); - break; - case SYMBOL: - printf("%s = SYMBOL\n", str.c_str()); - break; - case CONTROL: - printf("%s = CONTROL\n", str.c_str()); - break; - } - #endif - - result.push_back(cnct_str); - seq_offset_bytes += char_len; - seq_offset_utf8_chars += 1; - if (char_len > 1) { - it += char_len - 1; - } - - } - return result; -} - -// return the type of the string -CNCTCharType CNCTUnicode::string_identify(const std::string &str) { - CNCTCharType result = UNIDENTIFIED; - std::string::const_iterator it = str.begin(); - while (it != str.end()) { - int len = utf8_len(*it); - int c = 0; - for (int i = 0; i < len && it != str.end(); ++i, ++it) { - c = (c << 8) | static_cast(*it); - } - switch (get_code_type(c)) { - case DIGIT: - if (result == UNIDENTIFIED) { - result = DIGIT; - } else if (result != DIGIT) { - return MIXED; - } - break; - case LETTER: - if (result == UNIDENTIFIED) { - result = LETTER; - } else if (result != LETTER) { - return MIXED; - } - break; - case WHITESPACE: - if (result == UNIDENTIFIED) { - result = WHITESPACE; - } else if (result != WHITESPACE) { - return MIXED; - } - break; - case PUNCTUATION: - if (result == UNIDENTIFIED) { - result = PUNCTUATION; - } else if (result != PUNCTUATION) { - return MIXED; - } - break; - default: - return MIXED; - break; - } - } - return result; -} - -// verify the content of a string -bool CNCTUnicode::string_test(const std::string &str, CNCTCharType chartype) -{ - std::string::const_iterator it = str.begin(); - while (it != str.end()) { - int len = utf8_len(*it); - int c = 0; - for (int i = 0; i < len && it != str.end(); ++i, ++it) { - c = (c << 8) | static_cast(*it); - } - if (get_code_type(c) != chartype) { - return false; - } - } - return true; -} - -//----------------- -// llama.cpp GPT2 vocab (from libfalcon.cpp) -//----------------- - -std::string replaceAll(std::string str, const std::string& from, const std::string& to) { - size_t start_pos = 0; - while((start_pos = str.find(from, start_pos)) != std::string::npos) { - str.replace(start_pos, from.length(), to); - start_pos += to.length(); // Handles case where 'to' is a substring of 'from' - } - return str; -} - -struct TrieNode { - std::map map; - int32_t Id = -1; -}; - -struct Trie { - TrieNode *root; - - Trie() : root(new TrieNode()) {} - - ~Trie() { - if(root) - deleteTrie(root); - } - - // Move constructor - Trie(Trie&& other) noexcept : root(other.root) { - other.root = nullptr; - } - - // Move assignment operator - Trie& operator=(Trie&& other) noexcept { - if (this != &other) { - if(root) - deleteTrie(root); - root = other.root; - other.root = nullptr; - } - return *this; - } - - void insert(const std::string &token, int32_t Id) { - TrieNode* current = root; - for(auto ch : token) { - if(current->map.find(ch) == current->map.end()) { - current->map[ch] = new TrieNode(); - } - current = current->map[ch]; - } - current->Id = Id; - } - - void reset() { - deleteTrie(root); - root = new TrieNode(); - } - -private: - void deleteTrie(TrieNode* node) { - for(auto &it: node->map) { - deleteTrie(it.second); - } - delete node; - } - -}; - -struct gpt2bpe_vocab { - using id = int32_t; - using token = std::string; - - std::map max_token_length; // max length, for each 2byte prefix - std::map, int> bpe_ranks; - std::vector> bpe_merges; - - id special_bos_id = -1; - id special_eos_id = -1; - id special_unk_id = -1; - id special_sep_id = -1; - id special_pad_id = -1; - - id linefeed_id = -1; - - std::unordered_map token_to_id; - std::unordered_map id_to_token; - - Trie trie; // highspeed access to tokens by prefix tree - - // populate trie from map - void populate_trie_from_map() { - trie.reset(); - for (const auto& pair : token_to_id) { - trie.insert(pair.first, pair.second); - if (pair.first.size() >= 2) { - std::string prefix = pair.first.substr(0, 2); - max_token_length[prefix] = std::max(max_token_length[prefix], (uint32_t)pair.first.size()); - } - } - } - // populate token ranks map - int populate_bpe_ranks(std::vector> bpe_merges_) { - for (int i = 0; i < (int)bpe_merges_.size(); i++) { - bpe_ranks.emplace(bpe_merges_[i], i); - } - bpe_merges = bpe_merges_; - return bpe_merges_.size(); - } - - // Trim whitespace characters from the beginning and end of the string - void trim(std::string& str) { - // Remove whitespace characters from the beginning of the string - str.erase(str.begin(), std::find_if(str.begin(), str.end(), [](int ch) { - return !std::isspace(ch); - })); - - // Remove whitespace characters from the end of the string - str.erase(std::find_if(str.rbegin(), str.rend(), [](int ch) { - return !std::isspace(ch); - }).base(), str.end()); - } - - // get max token length available for a prefix of 2 bytes (string at least 2 bytes long) - int get_max_token_length(const std::string& string) const { - if (string.size() < 2) { - return -1; - } - std::string prefix = string.substr(0, 2); - if (max_token_length.find(prefix) == max_token_length.end()) { - return 0; - } - return max_token_length.at(prefix); - } - - // function to find if two tokens match in bpe_rank, return rank or -1 - int find_bpe_rank(const std::string& token1, const std::string& token2) const { - std::string left_token = token1; - std::string right_token = token2; - left_token = replaceAll(left_token, " ", "Ġ"); - left_token = replaceAll(left_token, "\n", "Ċ"); - right_token = replaceAll(right_token, " ", "Ġ"); - right_token = replaceAll(right_token, "\n", "Ċ"); - - auto it = bpe_ranks.find(std::make_pair(left_token, right_token)); - if (it == bpe_ranks.end()) { - return -1; - } - return it->second; - } - - std::pair find_longest_match(const std::string& snippet) const { - TrieNode* current = trie.root; - gpt2bpe_vocab::id last_matched_id = -1; - std::string last_matched_token = ""; - std::string current_token = ""; - for (auto ch : snippet) { - if (current->map.find(ch) == current->map.end()) { - break; - } - current = current->map[ch]; - current_token += ch; - if (current->Id != -1) { - last_matched_id = current->Id; - last_matched_token = current_token; - } - } - return {last_matched_id, last_matched_token}; - } - -}; - - -// -// tokenizer - bpe type, gpt2 tokenization compatible -// - -struct ggllm_bpe_symbol { - using index = int; - index prev; - index next; - const char * text; - size_t n; -}; - -static_assert(std::is_trivially_copyable::value, "ggllm_bpe_symbol is not trivially copyable"); - -struct ggllm_bpe_bigram { - struct comparator { - bool operator()(ggllm_bpe_bigram & l, ggllm_bpe_bigram & r) { - return l.rank > r.rank || (l.rank == r.rank && l.left > r.left); - } - }; - - using queue_storage = std::vector; - using queue = std::priority_queue; - ggllm_bpe_symbol::index left; - ggllm_bpe_symbol::index right; - std::string text; - int rank; - size_t size; -}; - -struct gpt2bpe_tokenizer { - gpt2bpe_tokenizer(const gpt2bpe_vocab & vocab, bool g2ws_): vocab_(vocab) { flag_g2ws = g2ws_; } - - void tokenize(const std::string & text, std::vector & output) { - int final_prev_index = -1; - // auto start = ggml_time_us(); - auto word_collection = bpe_gpt2_preprocess(text); - // auto end = ggml_time_us(); - // fprintf(stderr, "%s: preprocessing took %0.3f ms\n", __func__, (end - start) / 1000.0); - - symbols_final.clear(); - - for (auto & word : word_collection) { - work_queue_ = ggllm_bpe_bigram::queue(); - symbols_.clear(); - - int index = 0; - size_t offset = 0; - - while (offset < word.size()) { - ggllm_bpe_symbol sym; - size_t char_len = std::min(word.size() - offset, (size_t) CNCTUnicode::utf8_len(word[offset])); - sym.text = word.c_str() + offset; - sym.n = 1; - sym.n = char_len; - offset += sym.n; - sym.prev = index - 1; - sym.next = offset == word.size() ? -1 : index + 1; - index++; - symbols_.emplace_back(sym); - } - for (size_t i = 1; i < symbols_.size(); ++i) { - add_new_bigram(i - 1, i); - } - - // build token(s) - while (!work_queue_.empty()) { - auto bigram = work_queue_.top(); - work_queue_.pop(); - - auto & left_symbol = symbols_[bigram.left]; - auto & right_symbol = symbols_[bigram.right]; - - if (left_symbol.n == 0 || right_symbol.n == 0) { - continue; - } - std::string left_token = std::string(left_symbol.text, left_symbol.n); - std::string right_token = std::string(right_symbol.text, right_symbol.n); - if (left_token + right_token != bigram.text) { - continue; // Skip this bigram if it's outdated - } - - // merge the right sym into the left one - left_symbol.n += right_symbol.n; - right_symbol.n = 0; - - // remove the right sym from the chain - left_symbol.next = right_symbol.next; - if (right_symbol.next >= 0) { - symbols_[right_symbol.next].prev = bigram.left; - } - - add_new_bigram(left_symbol.prev, bigram.left); // left side of current symbol - add_new_bigram(bigram.left, left_symbol.next); // right side of current symbol - } - - // add the fnished tokens to the final list keeping correct order for next and prev - for (auto & sym : symbols_) { - if (sym.n > 0) { - sym.prev = final_prev_index; - sym.next = -1; - if (final_prev_index != -1) { - symbols_final[final_prev_index].next = symbols_final.size(); - } - symbols_final.emplace_back(sym); - final_prev_index = symbols_final.size() - 1; - } - } - } - - symbols_ = symbols_final; - if (symbols_.size()) - for (int i = 0; i != -1; i = symbols_[i].next) { - auto & symbol = symbols_[i]; - if (symbol.n == 0) { - continue; - } - std::string str = std::string(symbol.text, symbol.n); - std::string str_decoded = decode_token(str); - auto token = vocab_.token_to_id.find(str_decoded); - - if (token == vocab_.token_to_id.end()) { - for (auto j = str_decoded.begin(); j != str_decoded.end(); ++j) { - std::string byte_str(1, *j); - auto token_multibyte = vocab_.token_to_id.find(byte_str); - if (token_multibyte == vocab_.token_to_id.end()) { - fprintf(stderr,"ERROR: byte not found in vocab: '%s'\n", byte_str.c_str()); - } - output.push_back((*token_multibyte).second); - } - } else { - output.push_back((*token).second); - } - } - } - -private: - void add_new_bigram(int left, int right) { - if (left == -1 || right == -1) return; - - std::string left_token = std::string(symbols_[left].text, symbols_[left].n); - std::string right_token = std::string(symbols_[right].text, symbols_[right].n); - - int rank_found = -1; - rank_found = vocab_.find_bpe_rank(left_token, right_token); - - if (rank_found < 0) { - return; - } - - ggllm_bpe_bigram bigram; - bigram.left = left; - bigram.right = right; - bigram.rank = rank_found; - bigram.size = left_token.size() + right_token.size(); - bigram.text = left_token + right_token; - work_queue_.push(bigram); - } - - std::unordered_map bytes_to_unicode() { - static std::unordered_map hex_map = { - { 0x21, "\x21" }, { 0x22, "\x22" }, { 0x23, "\x23" }, { 0x24, "\x24" }, { 0x25, "\x25" }, { 0x26, "\x26" }, { 0x27, "\x27" }, { 0x28, "\x28" }, { 0x29, "\x29" }, { 0x2A, "\x2A" }, - { 0x2B, "\x2B" }, { 0x2C, "\x2C" }, { 0x2D, "\x2D" }, { 0x2E, "\x2E" }, { 0x2F, "\x2F" }, { 0x30, "\x30" }, { 0x31, "\x31" }, { 0x32, "\x32" }, { 0x33, "\x33" }, { 0x34, "\x34" }, - { 0x35, "\x35" }, { 0x36, "\x36" }, { 0x37, "\x37" }, { 0x38, "\x38" }, { 0x39, "\x39" }, { 0x3A, "\x3A" }, { 0x3B, "\x3B" }, { 0x3C, "\x3C" }, { 0x3D, "\x3D" }, { 0x3E, "\x3E" }, - { 0x3F, "\x3F" }, { 0x40, "\x40" }, { 0x41, "\x41" }, { 0x42, "\x42" }, { 0x43, "\x43" }, { 0x44, "\x44" }, { 0x45, "\x45" }, { 0x46, "\x46" }, { 0x47, "\x47" }, { 0x48, "\x48" }, - { 0x49, "\x49" }, { 0x4A, "\x4A" }, { 0x4B, "\x4B" }, { 0x4C, "\x4C" }, { 0x4D, "\x4D" }, { 0x4E, "\x4E" }, { 0x4F, "\x4F" }, { 0x50, "\x50" }, { 0x51, "\x51" }, { 0x52, "\x52" }, - { 0x53, "\x53" }, { 0x54, "\x54" }, { 0x55, "\x55" }, { 0x56, "\x56" }, { 0x57, "\x57" }, { 0x58, "\x58" }, { 0x59, "\x59" }, { 0x5A, "\x5A" }, { 0x5B, "\x5B" }, { 0x5C, "\x5C" }, - { 0x5D, "\x5D" }, { 0x5E, "\x5E" }, { 0x5F, "\x5F" }, { 0x60, "\x60" }, { 0x61, "\x61" }, { 0x62, "\x62" }, { 0x63, "\x63" }, { 0x64, "\x64" }, { 0x65, "\x65" }, { 0x66, "\x66" }, - { 0x67, "\x67" }, { 0x68, "\x68" }, { 0x69, "\x69" }, { 0x6A, "\x6A" }, { 0x6B, "\x6B" }, { 0x6C, "\x6C" }, { 0x6D, "\x6D" }, { 0x6E, "\x6E" }, { 0x6F, "\x6F" }, { 0x70, "\x70" }, - { 0x71, "\x71" }, { 0x72, "\x72" }, { 0x73, "\x73" }, { 0x74, "\x74" }, { 0x75, "\x75" }, { 0x76, "\x76" }, { 0x77, "\x77" }, { 0x78, "\x78" }, { 0x79, "\x79" }, { 0x7A, "\x7A" }, - { 0x7B, "\x7B" }, { 0x7C, "\x7C" }, { 0x7D, "\x7D" }, { 0x7E, "\x7E" }, { 0xA1, "\xC2\xA1" }, { 0xA2, "\xC2\xA2" }, { 0xA3, "\xC2\xA3" }, { 0xA4, "\xC2\xA4" }, { 0xA5, "\xC2\xA5" }, - { 0xA6, "\xC2\xA6" }, { 0xA7, "\xC2\xA7" }, { 0xA8, "\xC2\xA8" }, { 0xA9, "\xC2\xA9" }, { 0xAA, "\xC2\xAA" }, { 0xAB, "\xC2\xAB" }, { 0xAC, "\xC2\xAC" }, { 0xAE, "\xC2\xAE" }, - { 0xAF, "\xC2\xAF" }, { 0xB0, "\xC2\xB0" }, { 0xB1, "\xC2\xB1" }, { 0xB2, "\xC2\xB2" }, { 0xB3, "\xC2\xB3" }, { 0xB4, "\xC2\xB4" }, { 0xB5, "\xC2\xB5" }, { 0xB6, "\xC2\xB6" }, - { 0xB7, "\xC2\xB7" }, { 0xB8, "\xC2\xB8" }, { 0xB9, "\xC2\xB9" }, { 0xBA, "\xC2\xBA" }, { 0xBB, "\xC2\xBB" }, { 0xBC, "\xC2\xBC" }, { 0xBD, "\xC2\xBD" }, { 0xBE, "\xC2\xBE" }, - { 0xBF, "\xC2\xBF" }, { 0xC0, "\xC3\x80" }, { 0xC1, "\xC3\x81" }, { 0xC2, "\xC3\x82" }, { 0xC3, "\xC3\x83" }, { 0xC4, "\xC3\x84" }, { 0xC5, "\xC3\x85" }, { 0xC6, "\xC3\x86" }, - { 0xC7, "\xC3\x87" }, { 0xC8, "\xC3\x88" }, { 0xC9, "\xC3\x89" }, { 0xCA, "\xC3\x8A" }, { 0xCB, "\xC3\x8B" }, { 0xCC, "\xC3\x8C" }, { 0xCD, "\xC3\x8D" }, { 0xCE, "\xC3\x8E" }, - { 0xCF, "\xC3\x8F" }, { 0xD0, "\xC3\x90" }, { 0xD1, "\xC3\x91" }, { 0xD2, "\xC3\x92" }, { 0xD3, "\xC3\x93" }, { 0xD4, "\xC3\x94" }, { 0xD5, "\xC3\x95" }, { 0xD6, "\xC3\x96" }, - { 0xD7, "\xC3\x97" }, { 0xD8, "\xC3\x98" }, { 0xD9, "\xC3\x99" }, { 0xDA, "\xC3\x9A" }, { 0xDB, "\xC3\x9B" }, { 0xDC, "\xC3\x9C" }, { 0xDD, "\xC3\x9D" }, { 0xDE, "\xC3\x9E" }, - { 0xDF, "\xC3\x9F" }, { 0xE0, "\xC3\xA0" }, { 0xE1, "\xC3\xA1" }, { 0xE2, "\xC3\xA2" }, { 0xE3, "\xC3\xA3" }, { 0xE4, "\xC3\xA4" }, { 0xE5, "\xC3\xA5" }, { 0xE6, "\xC3\xA6" }, - { 0xE7, "\xC3\xA7" }, { 0xE8, "\xC3\xA8" }, { 0xE9, "\xC3\xA9" }, { 0xEA, "\xC3\xAA" }, { 0xEB, "\xC3\xAB" }, { 0xEC, "\xC3\xAC" }, { 0xED, "\xC3\xAD" }, { 0xEE, "\xC3\xAE" }, - { 0xEF, "\xC3\xAF" }, { 0xF0, "\xC3\xB0" }, { 0xF1, "\xC3\xB1" }, { 0xF2, "\xC3\xB2" }, { 0xF3, "\xC3\xB3" }, { 0xF4, "\xC3\xB4" }, { 0xF5, "\xC3\xB5" }, { 0xF6, "\xC3\xB6" }, - { 0xF7, "\xC3\xB7" }, { 0xF8, "\xC3\xB8" }, { 0xF9, "\xC3\xB9" }, { 0xFA, "\xC3\xBA" }, { 0xFB, "\xC3\xBB" }, { 0xFC, "\xC3\xBC" }, { 0xFD, "\xC3\xBD" }, { 0xFE, "\xC3\xBE" }, - { 0xFF, "\xC3\xBF" }, { 0x00, "\xC4\x80" }, { 0x01, "\xC4\x81" }, { 0x02, "\xC4\x82" }, { 0x03, "\xC4\x83" }, { 0x04, "\xC4\x84" }, { 0x05, "\xC4\x85" }, { 0x06, "\xC4\x86" }, - { 0x07, "\xC4\x87" }, { 0x08, "\xC4\x88" }, { 0x09, "\xC4\x89" }, { 0x0A, "\xC4\x8A" }, { 0x0B, "\xC4\x8B" }, { 0x0C, "\xC4\x8C" }, { 0x0D, "\xC4\x8D" }, { 0x0E, "\xC4\x8E" }, - { 0x0F, "\xC4\x8F" }, { 0x10, "\xC4\x90" }, { 0x11, "\xC4\x91" }, { 0x12, "\xC4\x92" }, { 0x13, "\xC4\x93" }, { 0x14, "\xC4\x94" }, { 0x15, "\xC4\x95" }, { 0x16, "\xC4\x96" }, - { 0x17, "\xC4\x97" }, { 0x18, "\xC4\x98" }, { 0x19, "\xC4\x99" }, { 0x1A, "\xC4\x9A" }, { 0x1B, "\xC4\x9B" }, { 0x1C, "\xC4\x9C" }, { 0x1D, "\xC4\x9D" }, { 0x1E, "\xC4\x9E" }, - { 0x1F, "\xC4\x9F" }, { 0x20, "\xC4\xA0" }, { 0x7F, "\xC4\xA1" }, { 0x80, "\xC4\xA2" }, { 0x81, "\xC4\xA3" }, { 0x82, "\xC4\xA4" }, { 0x83, "\xC4\xA5" }, { 0x84, "\xC4\xA6" }, - { 0x85, "\xC4\xA7" }, { 0x86, "\xC4\xA8" }, { 0x87, "\xC4\xA9" }, { 0x88, "\xC4\xAA" }, { 0x89, "\xC4\xAB" }, { 0x8A, "\xC4\xAC" }, { 0x8B, "\xC4\xAD" }, { 0x8C, "\xC4\xAE" }, - { 0x8D, "\xC4\xAF" }, { 0x8E, "\xC4\xB0" }, { 0x8F, "\xC4\xB1" }, { 0x90, "\xC4\xB2" }, { 0x91, "\xC4\xB3" }, { 0x92, "\xC4\xB4" }, { 0x93, "\xC4\xB5" }, { 0x94, "\xC4\xB6" }, - { 0x95, "\xC4\xB7" }, { 0x96, "\xC4\xB8" }, { 0x97, "\xC4\xB9" }, { 0x98, "\xC4\xBA" }, { 0x99, "\xC4\xBB" }, { 0x9A, "\xC4\xBC" }, { 0x9B, "\xC4\xBD" }, { 0x9C, "\xC4\xBE" }, - { 0x9D, "\xC4\xBF" }, { 0x9E, "\xC5\x80" }, { 0x9F, "\xC5\x81" }, { 0xA0, "\xC5\x82" }, { 0xAD, "\xC5\x83" } - }; - return hex_map; - } - - std::unordered_map unicode_to_bytes() { - static std::unordered_map hex_map = { - { "\x21", 0x21 }, { "\x22", 0x22 }, { "\x23", 0x23 }, { "\x24", 0x24 }, { "\x25", 0x25 }, { "\x26", 0x26 }, { "\x27", 0x27 }, { "\x28", 0x28 }, { "\x29", 0x29 }, { "\x2A", 0x2A }, - { "\x2B", 0x2B }, { "\x2C", 0x2C }, { "\x2D", 0x2D }, { "\x2E", 0x2E }, { "\x2F", 0x2F }, { "\x30", 0x30 }, { "\x31", 0x31 }, { "\x32", 0x32 }, { "\x33", 0x33 }, { "\x34", 0x34 }, - { "\x35", 0x35 }, { "\x36", 0x36 }, { "\x37", 0x37 }, { "\x38", 0x38 }, { "\x39", 0x39 }, { "\x3A", 0x3A }, { "\x3B", 0x3B }, { "\x3C", 0x3C }, { "\x3D", 0x3D }, { "\x3E", 0x3E }, - { "\x3F", 0x3F }, { "\x40", 0x40 }, { "\x41", 0x41 }, { "\x42", 0x42 }, { "\x43", 0x43 }, { "\x44", 0x44 }, { "\x45", 0x45 }, { "\x46", 0x46 }, { "\x47", 0x47 }, { "\x48", 0x48 }, - { "\x49", 0x49 }, { "\x4A", 0x4A }, { "\x4B", 0x4B }, { "\x4C", 0x4C }, { "\x4D", 0x4D }, { "\x4E", 0x4E }, { "\x4F", 0x4F }, { "\x50", 0x50 }, { "\x51", 0x51 }, { "\x52", 0x52 }, - { "\x53", 0x53 }, { "\x54", 0x54 }, { "\x55", 0x55 }, { "\x56", 0x56 }, { "\x57", 0x57 }, { "\x58", 0x58 }, { "\x59", 0x59 }, { "\x5A", 0x5A }, { "\x5B", 0x5B }, { "\x5C", 0x5C }, - { "\x5D", 0x5D }, { "\x5E", 0x5E }, { "\x5F", 0x5F }, { "\x60", 0x60 }, { "\x61", 0x61 }, { "\x62", 0x62 }, { "\x63", 0x63 }, { "\x64", 0x64 }, { "\x65", 0x65 }, { "\x66", 0x66 }, - { "\x67", 0x67 }, { "\x68", 0x68 }, { "\x69", 0x69 }, { "\x6A", 0x6A }, { "\x6B", 0x6B }, { "\x6C", 0x6C }, { "\x6D", 0x6D }, { "\x6E", 0x6E }, { "\x6F", 0x6F }, { "\x70", 0x70 }, - { "\x71", 0x71 }, { "\x72", 0x72 }, { "\x73", 0x73 }, { "\x74", 0x74 }, { "\x75", 0x75 }, { "\x76", 0x76 }, { "\x77", 0x77 }, { "\x78", 0x78 }, { "\x79", 0x79 }, { "\x7A", 0x7A }, - { "\x7B", 0x7B }, { "\x7C", 0x7C }, { "\x7D", 0x7D }, { "\x7E", 0x7E }, { "\xC2\xA1", 0xA1 }, { "\xC2\xA2", 0xA2 }, { "\xC2\xA3", 0xA3 }, { "\xC2\xA4", 0xA4 }, { "\xC2\xA5", 0xA5 }, - { "\xC2\xA6", 0xA6 }, { "\xC2\xA7", 0xA7 }, { "\xC2\xA8", 0xA8 }, { "\xC2\xA9", 0xA9 }, { "\xC2\xAA", 0xAA }, { "\xC2\xAB", 0xAB }, { "\xC2\xAC", 0xAC }, { "\xC2\xAE", 0xAE }, - { "\xC2\xAF", 0xAF }, { "\xC2\xB0", 0xB0 }, { "\xC2\xB1", 0xB1 }, { "\xC2\xB2", 0xB2 }, { "\xC2\xB3", 0xB3 }, { "\xC2\xB4", 0xB4 }, { "\xC2\xB5", 0xB5 }, { "\xC2\xB6", 0xB6 }, - { "\xC2\xB7", 0xB7 }, { "\xC2\xB8", 0xB8 }, { "\xC2\xB9", 0xB9 }, { "\xC2\xBA", 0xBA }, { "\xC2\xBB", 0xBB }, { "\xC2\xBC", 0xBC }, { "\xC2\xBD", 0xBD }, { "\xC2\xBE", 0xBE }, - { "\xC2\xBF", 0xBF }, { "\xC3\x80", 0xC0 }, { "\xC3\x81", 0xC1 }, { "\xC3\x82", 0xC2 }, { "\xC3\x83", 0xC3 }, { "\xC3\x84", 0xC4 }, { "\xC3\x85", 0xC5 }, { "\xC3\x86", 0xC6 }, - { "\xC3\x87", 0xC7 }, { "\xC3\x88", 0xC8 }, { "\xC3\x89", 0xC9 }, { "\xC3\x8A", 0xCA }, { "\xC3\x8B", 0xCB }, { "\xC3\x8C", 0xCC }, { "\xC3\x8D", 0xCD }, { "\xC3\x8E", 0xCE }, - { "\xC3\x8F", 0xCF }, { "\xC3\x90", 0xD0 }, { "\xC3\x91", 0xD1 }, { "\xC3\x92", 0xD2 }, { "\xC3\x93", 0xD3 }, { "\xC3\x94", 0xD4 }, { "\xC3\x95", 0xD5 }, { "\xC3\x96", 0xD6 }, - { "\xC3\x97", 0xD7 }, { "\xC3\x98", 0xD8 }, { "\xC3\x99", 0xD9 }, { "\xC3\x9A", 0xDA }, { "\xC3\x9B", 0xDB }, { "\xC3\x9C", 0xDC }, { "\xC3\x9D", 0xDD }, { "\xC3\x9E", 0xDE }, - { "\xC3\x9F", 0xDF }, { "\xC3\xA0", 0xE0 }, { "\xC3\xA1", 0xE1 }, { "\xC3\xA2", 0xE2 }, { "\xC3\xA3", 0xE3 }, { "\xC3\xA4", 0xE4 }, { "\xC3\xA5", 0xE5 }, { "\xC3\xA6", 0xE6 }, - { "\xC3\xA7", 0xE7 }, { "\xC3\xA8", 0xE8 }, { "\xC3\xA9", 0xE9 }, { "\xC3\xAA", 0xEA }, { "\xC3\xAB", 0xEB }, { "\xC3\xAC", 0xEC }, { "\xC3\xAD", 0xED }, { "\xC3\xAE", 0xEE }, - { "\xC3\xAF", 0xEF }, { "\xC3\xB0", 0xF0 }, { "\xC3\xB1", 0xF1 }, { "\xC3\xB2", 0xF2 }, { "\xC3\xB3", 0xF3 }, { "\xC3\xB4", 0xF4 }, { "\xC3\xB5", 0xF5 }, { "\xC3\xB6", 0xF6 }, - { "\xC3\xB7", 0xF7 }, { "\xC3\xB8", 0xF8 }, { "\xC3\xB9", 0xF9 }, { "\xC3\xBA", 0xFA }, { "\xC3\xBB", 0xFB }, { "\xC3\xBC", 0xFC }, { "\xC3\xBD", 0xFD }, { "\xC3\xBE", 0xFE }, - { "\xC3\xBF", 0xFF }, { "\xC4\x80", 0x00 }, { "\xC4\x81", 0x01 }, { "\xC4\x82", 0x02 }, { "\xC4\x83", 0x03 }, { "\xC4\x84", 0x04 }, { "\xC4\x85", 0x05 }, { "\xC4\x86", 0x06 }, - { "\xC4\x87", 0x07 }, { "\xC4\x88", 0x08 }, { "\xC4\x89", 0x09 }, { "\xC4\x8A", 0x0A }, { "\xC4\x8B", 0x0B }, { "\xC4\x8C", 0x0C }, { "\xC4\x8D", 0x0D }, { "\xC4\x8E", 0x0E }, - { "\xC4\x8F", 0x0F }, { "\xC4\x90", 0x10 }, { "\xC4\x91", 0x11 }, { "\xC4\x92", 0x12 }, { "\xC4\x93", 0x13 }, { "\xC4\x94", 0x14 }, { "\xC4\x95", 0x15 }, { "\xC4\x96", 0x16 }, - { "\xC4\x97", 0x17 }, { "\xC4\x98", 0x18 }, { "\xC4\x99", 0x19 }, { "\xC4\x9A", 0x1A }, { "\xC4\x9B", 0x1B }, { "\xC4\x9C", 0x1C }, { "\xC4\x9D", 0x1D }, { "\xC4\x9E", 0x1E }, - { "\xC4\x9F", 0x1F }, { "\xC4\xA0", 0x20 }, { "\xC4\xA1", 0x7F }, { "\xC4\xA2", 0x80 }, { "\xC4\xA3", 0x81 }, { "\xC4\xA4", 0x82 }, { "\xC4\xA5", 0x83 }, { "\xC4\xA6", 0x84 }, - { "\xC4\xA7", 0x85 }, { "\xC4\xA8", 0x86 }, { "\xC4\xA9", 0x87 }, { "\xC4\xAA", 0x88 }, { "\xC4\xAB", 0x89 }, { "\xC4\xAC", 0x8A }, { "\xC4\xAD", 0x8B }, { "\xC4\xAE", 0x8C }, - { "\xC4\xAF", 0x8D }, { "\xC4\xB0", 0x8E }, { "\xC4\xB1", 0x8F }, { "\xC4\xB2", 0x90 }, { "\xC4\xB3", 0x91 }, { "\xC4\xB4", 0x92 }, { "\xC4\xB5", 0x93 }, { "\xC4\xB6", 0x94 }, - { "\xC4\xB7", 0x95 }, { "\xC4\xB8", 0x96 }, { "\xC4\xB9", 0x97 }, { "\xC4\xBA", 0x98 }, { "\xC4\xBB", 0x99 }, { "\xC4\xBC", 0x9A }, { "\xC4\xBD", 0x9B }, { "\xC4\xBE", 0x9C }, - { "\xC4\xBF", 0x9D }, { "\xC5\x80", 0x9E }, { "\xC5\x81", 0x9F }, { "\xC5\x82", 0xA0 }, { "\xC5\x83", 0xAD } - }; - return hex_map; - } - - // len must be available - bool inline str_is_equal(const char* str1, const char* str2, size_t len) { - for (size_t i = 0; i < len; ++i) { - if (str1[i] != str2[i]) { - return false; - } - } - return true; - } - - std::vector bpe_gpt2_preprocess(const std::string& text) { - static std::unordered_map< unsigned char, std::string> byte_encoder = bytes_to_unicode(); - std::vector bpe_words; - std::vector bpe_encoded_words; - - std::string token=""; - const char *raw_text_p = text.c_str(); - // GPT2 system regex: 's|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+ - bool collecting_numeric = false; - bool collecting_letter = false; - bool collecting_special = false; - bool collecting_whitespace_lookahead = false; - bool collecting=false; - - std::vector text_utf; - text_utf.reserve(text.size()); - bpe_words.reserve(text.size()); - bpe_encoded_words.reserve(text.size()); - - text_utf = CNCTUnicode::split_utf8_enhanced(text); - - for (int i = 0; i < (int)text_utf.size(); i++) { - const CNCTString &utf_char = text_utf[i]; - bool split_condition = false; - const char *text_pos = raw_text_p + utf_char.seq_offset_bytes; - int bytes_remain = strlen(text_pos); - // forward backward lookups - const CNCTString &utf_char_next = (i+1 < (int)text_utf.size()) ? text_utf[i+1] : CNCTString(); - const CNCTString &utf_char_next_next = (i+2 < (int)text_utf.size()) ? text_utf[i+2] : CNCTString(); - // const CNCTString &utf_char_prev = (i > 0) ? text_utf[i-1] : CNCTString(); - - // handling contractions - if (!split_condition && bytes_remain >= 2) { - // 's|'t|'m|'d - if (utf_char == '\'' && (utf_char_next == 's' || utf_char_next == 't' || utf_char_next == 'm' || utf_char_next == 'd')) { - split_condition = true; - } - if (split_condition) { - if (token.size()) { - bpe_words.emplace_back(token); // push previous content as token - } - token = utf_char.str + utf_char_next.str; - bpe_words.emplace_back(token); - token=""; - i++; - continue; - } - } - if (!split_condition && bytes_remain >= 3) { - // 're|'ve|'ll - if (utf_char == '\'' && ( - (utf_char_next == 'r' || utf_char_next_next == 'e') || - (utf_char_next == 'v' || utf_char_next_next == 'e') || - (utf_char_next == 'l' || utf_char_next_next == 'l')) - ) { - split_condition = true; - } - if (split_condition) { - // current token + next token can be defined - if (token.size()) { - bpe_words.emplace_back(token); // push previous content as token - } - token = utf_char.str + utf_char_next.str + utf_char_next_next.str; - bpe_words.emplace_back(token); // the contraction - token=""; - i+=2; - continue; - } - } - - if (!split_condition && !collecting) { - if (utf_char.char_type == CNCTCharType::LETTER || (!token.size() && utf_char==" " && utf_char_next.char_type == CNCTCharType::LETTER)) { - collecting_letter = true; - collecting = true; - } else if (utf_char.char_type == CNCTCharType::DIGIT || (!token.size() && utf_char==" " && utf_char_next.char_type == CNCTCharType::DIGIT)) { - collecting_numeric = true; - collecting = true; - } else if ( - ((utf_char.char_type != CNCTCharType::LETTER && utf_char.char_type != CNCTCharType::DIGIT) && (utf_char.char_type != CNCTCharType::WHITESPACE)) || - (!token.size() && utf_char==" " && utf_char_next.char_type != CNCTCharType::LETTER && utf_char_next.char_type != CNCTCharType::DIGIT && utf_char_next.char_type != CNCTCharType::WHITESPACE) - ) { - collecting_special = true; - collecting = true; - } else if (utf_char.char_type == CNCTCharType::WHITESPACE && utf_char_next.char_type == CNCTCharType::WHITESPACE) { - collecting_whitespace_lookahead = true; - collecting = true; - } else if (utf_char.char_type == CNCTCharType::WHITESPACE) { - split_condition = true; - } - } else if (!split_condition && collecting) { - if (collecting_letter && utf_char.char_type != CNCTCharType::LETTER) { - split_condition = true; - } else if (collecting_numeric && utf_char.char_type != CNCTCharType::DIGIT) { - split_condition = true; - } else if (collecting_special && (utf_char.char_type == CNCTCharType::LETTER || utf_char.char_type == CNCTCharType::DIGIT || utf_char.char_type == CNCTCharType::WHITESPACE)) { - split_condition = true; - } else if (collecting_whitespace_lookahead && utf_char_next.char_type != CNCTCharType::WHITESPACE) { - split_condition = true; - } - } - - if(utf_char_next.str.size() == 0) { - split_condition = true; // final - token += utf_char.str; - } - - if (split_condition) { - if (token.size()) { - bpe_words.emplace_back(token); - } - token = utf_char.str; - collecting = false; - collecting_letter = false; - collecting_numeric = false; - collecting_special = false; - collecting_whitespace_lookahead = false; - } else { - token += utf_char.str; - } - } - - for (std::string& word : bpe_words) { - std::string encoded_token=""; - for (char& c : word) { - encoded_token += byte_encoder[c]; - } - bpe_encoded_words.emplace_back(encoded_token); - } - - return bpe_encoded_words; - } - - // decoder (for one token) - std::string decode_token(const std::string& token) { - static std::unordered_map< std::string, unsigned char> byte_decoder = unicode_to_bytes(); - std::string decoded_token=""; - auto unicode_seqeunces = CNCTUnicode::split_utf8(token); - for (auto& unicode_sequence : unicode_seqeunces) { - decoded_token += byte_decoder[unicode_sequence]; - } - - return decoded_token; - } - - const gpt2bpe_vocab & vocab_; - std::vector symbols_; - std::vector symbols_final; - ggllm_bpe_bigram::queue work_queue_; - bool flag_g2ws=false; -}; - -static std::vector gpt2bpe_tokenize(const gpt2bpe_vocab & vocab, const std::string & text, bool bos, bool g2ws ) { - gpt2bpe_tokenizer tokenizer(vocab, g2ws); - std::vector output; - - if (text.empty()) { - return output; - } - - if (bos && vocab.special_bos_id != -1) { - output.push_back(vocab.special_bos_id); - } - - tokenizer.tokenize(text, output); - return output; -} - -#endif // CMPNCT_GPT2BPE diff --git a/examples/gptneox-wip/falcon-main.cpp b/examples/gptneox-wip/falcon-main.cpp deleted file mode 100644 index e9197f6b5..000000000 --- a/examples/gptneox-wip/falcon-main.cpp +++ /dev/null @@ -1,1111 +0,0 @@ -#include "ggml.h" -#include "cmpnct_gpt2bpe.hpp" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#if defined(_MSC_VER) -#pragma warning(disable: 4244 4267) // possible loss of data -#endif - -// default hparams -struct falcon_hparams { - size_t n_merges = 0; - size_t n_vocab = 0; - uint32_t n_ctx = 0; - uint32_t n_embd = 0; - uint32_t n_head = 0; - uint32_t n_head_kv = 1; // Needs to be 1 for 7B model - uint32_t n_ff = 0; - uint32_t n_block = 0; - float norm_eps = 1e-5; -}; -struct falcon_block { - // normalization - struct ggml_tensor* input_layernorm; - struct ggml_tensor* input_layernorm_b; - struct ggml_tensor* attention_norm; // Falcon-40B only - struct ggml_tensor* attention_norm_b; // Falcon-40B only - - // attention - struct ggml_tensor* query_key_value; - struct ggml_tensor* wo; - - // ff - struct ggml_tensor* ffn_up; - struct ggml_tensor* ffn_down; -}; - -struct falcon_model { - falcon_hparams hparams; - - struct ggml_tensor* tok_embeddings; - struct ggml_tensor* output_norm; - struct ggml_tensor* output_norm_b; - struct ggml_tensor* lm_head; - - std::vector blocks; - - // key + value memory - struct ggml_tensor* memory_k; - struct ggml_tensor* memory_v; - - struct gguf_context * ggufctx; - struct ggml_context * ctx; - struct ggml_context * kvctx; - - std::map tensors; -}; - -struct gpt_params { - int32_t seed = -1; // RNG seed - int32_t n_threads = std::min(4, (int32_t) std::thread::hardware_concurrency()); - uint32_t n_predict = 200; // new tokens to predict - uint32_t n_batch = 512; // batch size for prompt processing - - // sampling parameters - int32_t top_k = 40; - float top_p = 1.0f; - float temp = 0.8f; - int32_t repeat_last_n = 64; - float repeat_penalty = 1.02f; - - std::string model = ""; // model path - std::string prompt = ""; - - std::string token_test = ""; - bool interactive = false; - int32_t interactive_port = -1; - int32_t n_gpu_layers = 0; -}; - -void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { - fprintf(stderr, "usage: %s [options]\n", argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "options:\n"); - fprintf(stderr, " -h, --help show this help message and exit\n"); - fprintf(stderr, " -s SEED, --seed SEED RNG seed (default: -1)\n"); - fprintf(stderr, " -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); - fprintf(stderr, " -ngl N, --gpu-layers N number of layers to offload to GPU on supported models (default: %d)\n", params.n_gpu_layers); - fprintf(stderr, " -p PROMPT, --prompt PROMPT\n"); - fprintf(stderr, " prompt to start generation with (default: random)\n"); - fprintf(stderr, " -f FNAME, --file FNAME\n"); - fprintf(stderr, " load prompt from a file\n"); - fprintf(stderr, " -tt TOKEN_TEST, --token_test TOKEN_TEST\n"); - fprintf(stderr, " test tokenization\n"); - fprintf(stderr, " -n N, --n_predict N number of tokens to predict (default: %d)\n", params.n_predict); - fprintf(stderr, " --top_k N top-k sampling, 0 = n_vocab (default: %d)\n", params.top_k); - fprintf(stderr, " --top_p N top-p sampling (default: %.1f)\n", params.top_p); - fprintf(stderr, " --temp N temperature (default: %.1f)\n", params.temp); - fprintf(stderr, " --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled)\n", params.repeat_last_n); - fprintf(stderr, " --repeat-penalty N penalize repeat sequence of tokens (default: %.2f, 1.0 = disabled)\n", (double)params.repeat_penalty); - fprintf(stderr, " -b N, --batch_size N batch size for prompt processing (default: %d)\n", params.n_batch); - fprintf(stderr, " -m FNAME, --model FNAME\n"); - fprintf(stderr, " model path (default: %s)\n", params.model.c_str()); - fprintf(stderr, "\n"); -} - -// Function to check if the next argument exists -std::string get_next_arg(int& i, int argc, char** argv, const std::string& flag, gpt_params& params) { - if (i + 1 < argc && argv[i + 1][0] != '-') { - return argv[++i]; - } else { - fprintf(stderr, "error: %s requires one argument.\n", flag.c_str()); - gpt_print_usage(argc, argv, params); - exit(0); - } -} - -bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { - for (int i = 1; i < argc; i++) { - std::string arg = argv[i]; - - if (arg == "-s" || arg == "--seed") { - params.seed = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-t" || arg == "--threads") { - params.n_threads = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-ngl" || arg == "--gpu-layers" || arg == "--n-gpu-layers") { - params.n_gpu_layers = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-p" || arg == "--prompt") { - params.prompt = get_next_arg(i, argc, argv, arg, params); - } else if (arg == "-n" || arg == "--n_predict") { - params.n_predict = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--top_k") { - params.top_k = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--top_p") { - params.top_p = std::stof(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--temp") { - params.temp = std::stof(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--repeat-last-n") { - params.repeat_last_n = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--repeat-penalty") { - params.repeat_penalty = std::stof(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-b" || arg == "--batch_size") { - params.n_batch= std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-m" || arg == "--model") { - params.model = get_next_arg(i, argc, argv, arg, params); - } else if (arg == "-i" || arg == "--interactive") { - params.interactive = true; - } else if (arg == "-ip" || arg == "--interactive-port") { - params.interactive = true; - params.interactive_port = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-h" || arg == "--help") { - gpt_print_usage(argc, argv, params); - exit(0); - } else if (arg == "-f" || arg == "--file") { - get_next_arg(i, argc, argv, arg, params); - std::ifstream file(argv[i]); - if (!file) { - fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); - break; - } - std::copy(std::istreambuf_iterator(file), std::istreambuf_iterator(), back_inserter(params.prompt)); - if (params.prompt.back() == '\n') { - params.prompt.pop_back(); - } - } else if (arg == "-tt" || arg == "--token_test") { - params.token_test = get_next_arg(i, argc, argv, arg, params); - } - else { - fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); - gpt_print_usage(argc, argv, params); - exit(0); - } - } - - return true; -} - -gpt2bpe_vocab::id sample_top_k_top_p_repeat( - const gpt2bpe_vocab & vocab, - const float * logits, - const int32_t * last_n_tokens_data, - size_t last_n_tokens_data_size, - int top_k, - double top_p, - double temp, - int repeat_last_n, - float repeat_penalty, - std::mt19937 & rng) { - - int n_logits = vocab.id_to_token.size(); - - const auto * plogits = logits; - - const auto last_n_tokens = std::vector(last_n_tokens_data, last_n_tokens_data + last_n_tokens_data_size); - - if (temp <= 0) { - // select the token with the highest logit directly - float max_logit = plogits[0]; - gpt2bpe_vocab::id max_id = 0; - - for (int i = 1; i < n_logits; ++i) { - if (plogits[i] > max_logit) { - max_logit = plogits[i]; - max_id = i; - } - } - return max_id; - } - - - std::vector> logits_id; - logits_id.reserve(n_logits); - - { - const float scale = 1.0f/temp; - for (int i = 0; i < n_logits; ++i) { - // repetition penalty from ctrl paper (https://arxiv.org/abs/1909.05858) - // credit https://github.com/facebookresearch/llama/compare/main...shawwn:llama:main - if (repeat_last_n > 0 && std::find(last_n_tokens.end()-repeat_last_n, last_n_tokens.end(), i) != last_n_tokens.end()) { - // if score < 0 then repetition penalty has to multiplied to reduce the previous token probability - if (plogits[i] < 0.0f) { - logits_id.push_back(std::make_pair(plogits[i]*scale*repeat_penalty, i)); - } else { - logits_id.push_back(std::make_pair(plogits[i]*scale/repeat_penalty, i)); - } - } else { - logits_id.push_back(std::make_pair(plogits[i]*scale, i)); - } - } - } - - // find the top K tokens - std::partial_sort( - logits_id.begin(), - logits_id.begin() + top_k, logits_id.end(), - [](const std::pair & a, const std::pair & b) { - return a.first > b.first; - }); - - logits_id.resize(top_k); - - double maxl = -INFINITY; - for (const auto & kv : logits_id) { - maxl = std::max(maxl, kv.first); - } - - // compute probs for the top K tokens - std::vector probs; - probs.reserve(logits_id.size()); - - double sum = 0.0; - for (const auto & kv : logits_id) { - double p = exp(kv.first - maxl); - probs.push_back(p); - sum += p; - } - - // normalize the probs - for (auto & p : probs) { - p /= sum; - } - - if (top_p < 1.0f) { - double cumsum = 0.0f; - for (int i = 0; i < top_k; i++) { - cumsum += probs[i]; - if (cumsum >= top_p) { - top_k = i + 1; - probs.resize(top_k); - logits_id.resize(top_k); - break; - } - } - - cumsum = 1.0/cumsum; - for (int i = 0; i < (int) probs.size(); i++) { - probs[i] *= cumsum; - } - } - -// printf("\n"); -// for (int i = 0; i < (int) probs.size(); i++) { -// for (int i = 0; i < 10; i++) { -// printf("%d: '%s' %f\n", i, vocab.id_to_token.at(logits_id[i].second).c_str(), probs[i]); -// } - - std::discrete_distribution<> dist(probs.begin(), probs.end()); - int idx = dist(rng); - - return logits_id[idx].second; - -} - -struct ggml_tensor * get_tensor_ex( struct ggml_context * ctx, std::string name){ - - struct ggml_tensor * cur = ggml_get_tensor(ctx, name.c_str()); - if( cur == NULL ) { - printf("%s: tensor '%s' not found!\n", __func__, name.c_str()); - } else { -// printf("%s: n_dims = %d, name = '%s'\n", __func__, cur->n_dims, cur->name); - } - - return cur; -} - -// load the model's weights from a file -bool falcon_model_load(const std::string & fname, falcon_model & model, gpt2bpe_vocab & vocab) { - printf("%s: loading model from '%s'..\n", __func__, fname.c_str()); - - model.ctx = NULL; - - struct gguf_init_params ggufparams = { - /*.no_alloc = */ false, - /*.ctx = */ &model.ctx, - }; - - auto & ggufctx = model.ggufctx; - - ggufctx = gguf_init_from_file(fname.c_str(), ggufparams); - - if (!ggufctx) { - fprintf(stderr, "%s: gguf_init_from_file() failed\n", __func__); - return false; - } - - printf("%s: gguf version = %d\n", __func__, gguf_get_version(ggufctx)); - printf("%s: gguf alignment = %zu\n", __func__, gguf_get_alignment(ggufctx)); - printf("%s: gguf data offset = %zu\n", __func__, gguf_get_data_offset(ggufctx)); - - // print all kv - #if 0 - { - const int n_kv = gguf_get_n_kv(ggufctx); - - printf("%s: n_kv: %d\n", __func__, n_kv); - - for (int i = 0; i < n_kv; ++i) { - const char * key = gguf_get_key(ggufctx, i); - - printf("%s: kv[%d]: key = %s\n", __func__, i, key); - } - } - #endif - - // print some standard metadata - { - int keyidx; - - keyidx = gguf_find_key(ggufctx, "general.name"); - if (keyidx != -1) { printf("%s: model name = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.description"); - if (keyidx != -1) { printf("%s: model description = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.author"); - if (keyidx != -1) { printf("%s: model author = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.license"); - if (keyidx != -1) { printf("%s: model license = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.architecture"); - if (keyidx != -1) { printf("%s: model architecture = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.file_type"); - if (keyidx != -1) { printf("%s: model file type = %" PRIu32 "\n", __func__, gguf_get_val_u32(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "gptneox.tensor_data_layout"); - if (keyidx != -1) { printf("%s: model data layout = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.source.huggingface.repository"); - if (keyidx != -1) { printf("%s: model source HF repo = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - } - - // check required metadata - { - int keyidx; - - // check model architecture kv - keyidx = gguf_find_key(ggufctx, "general.architecture"); - if (keyidx != -1) { - if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "falcon") != 0) { - printf("%s: model architecture not supported!\n", __func__); - return false; - } - } else { - printf("%s: gguf model architecture not found!\n", __func__); - return false; - } - - // check model tensor data layout kv - keyidx = gguf_find_key(ggufctx, "falcon.tensor_data_layout"); - if (keyidx != -1) { - if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "jploski") != 0) { - printf("%s: model tensor data layout not supported!\n", __func__); - return false; - } - } else { - printf("%s: gguf model tensor data layout not found!\n", __func__); - return false; - } - - } - - // load hparams - { - auto & hparams = model.hparams; - - bool ok = true; - int keyidx; - - if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.context_length"); - if (keyidx != -1) { hparams.n_ctx = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.embedding_length"); - if (keyidx != -1) { hparams.n_embd = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.attention.head_count"); - if (keyidx != -1) { hparams.n_head = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.feed_forward_length"); - if (keyidx != -1) { hparams.n_ff = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.block_count"); - if (keyidx != -1) { hparams.n_block = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "falcon.attention.layer_norm_epsilon"); - if (keyidx != -1) { hparams.norm_eps= gguf_get_val_f32(ggufctx, keyidx); } else { ok = false; } } - - if (!ok) { - fprintf(stderr, "%s: required hparam missing!\n", __func__); - return false; - } - - keyidx = gguf_find_key(ggufctx, "falcon.attention.head_count_kv"); - if (keyidx != -1) { hparams.n_head_kv = gguf_get_val_u32(ggufctx, keyidx); } - - - printf("%s: n_ctx = %d\n", __func__, hparams.n_ctx); - printf("%s: n_embd = %d\n", __func__, hparams.n_embd); - printf("%s: n_head = %d\n", __func__, hparams.n_head); - printf("%s: n_head_kv = %d\n", __func__, hparams.n_head_kv); - printf("%s: n_block = %d\n", __func__, hparams.n_block); - printf("%s: norm_eps = %g\n", __func__, hparams.norm_eps); - - } - - // load vocab - { - auto & hparams = model.hparams; - - int keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.model"); - - if (keyidx != -1) { - if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "gpt2") != 0) { - printf("%s: tokenizer model not supported!\n", __func__); - return false; - } - } else { - printf("%s: tokenizer model not found!\n", __func__); - return false; - } - - - int tokens_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.tokens"); - - if (tokens_keyidx == -1) { - printf("%s: gpt2 tokenizer vocab not found!\n", __func__); - return false; - } - - int merges_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.merges"); - - if (merges_keyidx == -1) { - printf("%s: gpt2 tokenizer merges not found!\n", __func__); - return false; - } - - hparams.n_vocab = gguf_get_arr_n(ggufctx,tokens_keyidx); - hparams.n_merges = gguf_get_arr_n(ggufctx,merges_keyidx); - - printf("%s: gpt2 tokenizer vocab = %zu\n", __func__, hparams.n_vocab); - printf("%s: gpt2 tokenizer merges = %zu\n", __func__, hparams.n_merges); - - for (size_t i = 0; i < hparams.n_vocab; i++) { - std::string word = gguf_get_arr_str(ggufctx, tokens_keyidx, i); - -// printf("token %d = '%s'\n",i,word.c_str() ); - - vocab.token_to_id[word] = i; - vocab.id_to_token[i] = word; - - if( vocab.id_to_token[i] == "\n" ) { - vocab.linefeed_id = i; - } - } - - std::vector> bpe_merges; - - for (size_t i = 0; i < hparams.n_merges; i++) { - - std::string word = gguf_get_arr_str(ggufctx, merges_keyidx, i); - - // Split the merges - std::string first, second; - size_t pos = word.find(' ', 1); // Start the search from the second character - if (pos != std::string::npos) { - first = word.substr(0, pos); - second = word.substr(pos + 1); - } - - bpe_merges.push_back(std::make_pair(first, second)); - } - - vocab.populate_bpe_ranks(bpe_merges); - - - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.bos_token_id"); if( keyidx != -1 ) { vocab.special_bos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.eos_token_id"); if( keyidx != -1 ) { vocab.special_eos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.unknown_token_id"); if( keyidx != -1 ) { vocab.special_unk_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.separator_token_id"); if( keyidx != -1 ) { vocab.special_sep_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.padding_token_id"); if( keyidx != -1 ) { vocab.special_pad_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - - if( vocab.special_bos_id != -1 ) { printf("%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].c_str() ); } - if( vocab.special_eos_id != -1 ) { printf("%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].c_str() ); } - if( vocab.special_unk_id != -1 ) { printf("%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].c_str() ); } - if( vocab.special_sep_id != -1 ) { printf("%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].c_str() ); } - if( vocab.special_pad_id != -1 ) { printf("%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].c_str() ); } - if( vocab.linefeed_id != -1 ) { printf("%s: LF token = %d\n", __func__, vocab.linefeed_id ); } - - } - - - auto & ctx = model.ctx; - size_t ctx_size = ggml_get_mem_size(ctx); - - printf("%s: ggml ctx size = %6.2f MB\n", __func__, ctx_size/(1024.0*1024.0)); - - // print tensor info - #if 0 - { - const int n_tensors = gguf_get_n_tensors(ggufctx); - - printf("%s: n_tensors: %d\n", __func__, n_tensors); - - for (int i = 0; i < n_tensors; ++i) { - const char * name = gguf_get_tensor_name (ggufctx, i); - const size_t offset = gguf_get_tensor_offset(ggufctx, i); - - printf("%s: tensor[%d]: name = %s, offset = %zu\n", __func__, i, name, offset); - } - } - #endif - - // prepare memory for the weights - { - - auto & hparams = model.hparams; - - const int n_block = hparams.n_block; - - model.blocks.resize(n_block); - - model.tok_embeddings = ggml_get_tensor(ctx, "token_embd.weight"); - - model.output_norm = ggml_get_tensor(ctx, "output_norm.weight"); - model.output_norm_b = ggml_get_tensor(ctx, "output_norm.bias"); - model.lm_head = ggml_get_tensor(ctx, "output.weight"); - - // map by name - model.tensors["token_embd.weight"] = model.tok_embeddings; - model.tensors["output_norm.weight"] = model.output_norm; - model.tensors["output_norm.bias"] = model.output_norm_b; - model.tensors["output.weight"] = model.lm_head; - - for (int i = 0; i < n_block; ++i) { - - auto& block = model.blocks[i]; - std::string blocknamestart = "blk." + std::to_string(i) + "."; - - block.input_layernorm = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); - block.input_layernorm_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); - - if ( hparams.n_head_kv == 8 ) { // Falcon-40B - block.attention_norm = get_tensor_ex(ctx, blocknamestart + "attn_norm_2.weight" ); - block.attention_norm_b = get_tensor_ex(ctx, blocknamestart + "attn_norm_2.bias" ); - } - - // query_key_value shape for config.multi_query == True: - block.query_key_value = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); - block.wo = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); - - block.ffn_up = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); - block.ffn_down = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); - - // map by name - if ( hparams.n_head_kv == 8 ) { // Falcon-40B - // Falcon-40B: - model.tensors[blocknamestart + "attn_norm.weight"] = block.input_layernorm; - model.tensors[blocknamestart + "attn_norm.bias"] = block.input_layernorm_b; - model.tensors[blocknamestart + "attn_norm_2.weight"] = block.attention_norm; - model.tensors[blocknamestart + "attn_norm_2.bias"] = block.attention_norm_b; - } else { - // Falcon-7B: - model.tensors[blocknamestart + "attn_norm.weight"] = block.input_layernorm; - model.tensors[blocknamestart + "attn_norm.bias"] = block.input_layernorm_b; - } - - model.tensors[blocknamestart + "attn_qkv.weight"] = block.query_key_value; - model.tensors[blocknamestart + "attn_output.weight"] = block.wo; - - model.tensors[blocknamestart + "ffn_up.weight"] = block.ffn_up; - model.tensors[blocknamestart + "ffn_down.weight"] = block.ffn_down; - } - } - - // key + value memory - { - const auto & kvctx = model.kvctx; - const auto & hparams = model.hparams; - - const int n_block = hparams.n_block; - const int n_ctx = hparams.n_ctx; - const int n_embd = hparams.n_embd; - - const int64_t n_mem = n_block*n_ctx; - const int64_t n_elements = n_embd*n_mem; - - // create the ggml context - { - struct ggml_init_params params = { - /*.mem_size =*/ size_t(n_elements*4+ggml_tensor_overhead()*2), - /*.mem_buffer =*/ NULL, - /*.no_alloc =*/ false, - }; - - model.kvctx = ggml_init(params); - if (!model.kvctx) { - fprintf(stderr, "%s: kv ggml_init() failed\n", __func__); - return false; - } - - } - - - model.memory_k = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); - model.memory_v = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); - - const size_t memory_size = ggml_nbytes(model.memory_k) + ggml_nbytes(model.memory_v); - - printf("%s: memory_size = %8.2f MB, n_mem = %" PRId64 "\n", __func__, memory_size/1024.0/1024.0, n_mem); - } - - return true; -} - - -// evaluate the transformer -// -// - model: the model -// - n_threads: number of threads to use -// - n_past: the context size so far -// - embd_inp: the embeddings of the tokens in the context -// - embd_w: the predicted logits for the next token -// -bool falcon_eval( - const falcon_model & model, - const int n_threads, - const int n_past, - const std::vector & embd_inp, - std::vector & embd_w, - size_t & mem_per_token) { - - - const int N = embd_inp.size(); - - const auto & hparams = model.hparams; - - const int n_embd = hparams.n_embd; - const int n_block = hparams.n_block; - const int n_ctx = hparams.n_ctx; - const int n_head = hparams.n_head; - const int n_head_kv = hparams.n_head_kv; - const int n_vocab = hparams.n_vocab; - const size_t head_dim = n_embd / n_head; - - static size_t buf_size = 256u*1024*1024; - static void * buf = malloc(buf_size); - - // use 2 scratch buffers - // TODO: very hacky solution - reimplement in a more elegant way - static size_t scr0_size = 256u*1024*1024; - static void * scr0 = malloc(scr0_size); - - static size_t scr1_size = 256u*1024*1024; - static void * scr1 = malloc(scr1_size); - - if (mem_per_token > 0 && mem_per_token*N > buf_size) { - const size_t buf_size_new = 1.1*(mem_per_token*N); // add 10% to account for ggml object overhead - //printf("\n%s: reallocating buffer from %zu to %zu bytes\n", __func__, buf_size, buf_size_new); - - // reallocate - buf_size = buf_size_new; - buf = realloc(buf, buf_size); - if (buf == nullptr) { - fprintf(stderr, "%s: failed to allocate %zu bytes\n", __func__, buf_size); - return false; - } - } - - struct ggml_init_params params = { - /*.mem_size =*/ buf_size, - /*.mem_buffer =*/ buf, - /*.no_alloc =*/ false, - }; - - struct ggml_context * ctx0 = ggml_init(params); - struct ggml_cgraph gf = {}; -// gf.n_threads = n_threads; - - struct ggml_tensor * embd = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, N); - memcpy(embd->data, embd_inp.data(), N*ggml_element_size(embd)); - - // wte - struct ggml_tensor * inpL = ggml_get_rows(ctx0, model.tok_embeddings, embd); -// struct ggml_tensor* repeat_dummy = ggml_new_tensor_3d(ctx0, inpL->type, head_dim, N + n_past, n_head); - - ggml_type wtype = GGML_TYPE_F32; - const int sizeof_wtype = ggml_type_sizef(wtype); - - for (int il = 0; il < n_block; ++il) { - struct ggml_tensor * cur; - struct ggml_tensor * layernorm_output; - - ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); - - // self-attention - { - layernorm_output = ggml_norm(ctx0, inpL); - - layernorm_output = ggml_add(ctx0, - ggml_mul(ctx0, - ggml_repeat(ctx0, model.blocks[il].input_layernorm, layernorm_output), - layernorm_output), - ggml_repeat(ctx0, model.blocks[il].input_layernorm_b, layernorm_output)); - - if ( hparams.n_head_kv == 8 ) { // Falcon-40B - cur = ggml_norm(ctx0, inpL); - - cur = ggml_add(ctx0, - ggml_mul(ctx0, - ggml_repeat(ctx0, model.blocks[il].attention_norm, cur), - cur), - ggml_repeat(ctx0, model.blocks[il].attention_norm_b, cur)); - } - else { // Falcon 7B - cur = layernorm_output; - } - - // compute QKV - - cur = ggml_mul_mat(ctx0, model.blocks[il].query_key_value, cur); - - // Note that the strides for Kcur, Vcur are set up so that the - // resulting views are misaligned with the tensor's storage - // (by applying the K/V offset we shift the tensor's original - // view to stick out behind the viewed QKV tensor's allocated - // memory, so to say). This is ok because no actual accesses - // happen to that out-of-range memory, but it can require some - // trickery when trying to accurately dump these views for - // debugging. - - struct ggml_tensor * Qcur = ggml_view_3d( - ctx0, cur, head_dim, n_head, N, - head_dim * sizeof_wtype, - head_dim * (n_head + 2 * n_head_kv) * sizeof_wtype, - 0); - - struct ggml_tensor * Kcur = ggml_view_3d( - ctx0, cur, head_dim, n_head_kv, N, - head_dim * sizeof_wtype, - head_dim * (n_head + 2 * n_head_kv) * sizeof_wtype, - head_dim * n_head * sizeof_wtype); - - struct ggml_tensor * Vcur = ggml_view_3d( - ctx0, cur, head_dim, n_head_kv, N, - head_dim * sizeof_wtype, - head_dim * (n_head + 2 * n_head_kv) * sizeof_wtype, - head_dim * (n_head + n_head_kv) * sizeof_wtype); - - // using mode = 2 for neox mode - Qcur = ggml_rope_inplace(ctx0, Qcur, n_past, head_dim, 2, 0); - Kcur = ggml_rope_inplace(ctx0, Kcur, n_past, head_dim, 2, 0); - - // store key and value to memory - { - struct ggml_tensor* k = ggml_view_1d( - ctx0, model.memory_k, N * n_head_kv * head_dim, - (ggml_element_size(model.memory_k) * n_head_kv * head_dim) * - (il * n_ctx + n_past)); - struct ggml_tensor* v = ggml_view_1d( - ctx0, model.memory_v, N * n_head_kv * head_dim, - (ggml_element_size(model.memory_v) * n_head_kv * head_dim) * - (il * n_ctx + n_past)); - - ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * K = ggml_permute( - ctx0, - ggml_reshape_3d( - ctx0, - ggml_view_1d(ctx0, model.memory_k, (n_past + N) * n_head_kv * head_dim, - il * n_ctx * - ggml_element_size(model.memory_k) * - n_head_kv * - head_dim), - head_dim, n_head_kv, n_past + N), - 0, 2, 1, 3); - - // K * Q - -// K = ggml_cont(ctx0, ggml_repeat2(ctx0, K, repeat_dummy)); - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - - // KQ_scaled = KQ / sqrt(n_embd/n_head) - struct ggml_tensor * KQ_scaled = - ggml_scale_inplace(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrt(float(head_dim))) - ); - - // KQ_masked = mask_past(KQ_scaled) - struct ggml_tensor * KQ_masked = ggml_diag_mask_inf_inplace(ctx0, KQ_scaled, n_past); - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); - - // V_trans = Vmem.view(n_embd/n_head, n_head, n_past + N).permute(1, 2, 0, 3).contiguous() - struct ggml_tensor* V = ggml_permute( - ctx0, - ggml_reshape_3d( - ctx0, - ggml_view_1d(ctx0, model.memory_v, (n_past + N) * n_head_kv * head_dim, - il * n_ctx * - ggml_element_size(model.memory_v) * - n_head_kv * - head_dim), - head_dim, n_head_kv, n_past + N), - 0, 2, 1, 3); - -// V = ggml_cont(ctx0, ggml_transpose(ctx0, ggml_repeat2(ctx0, V, repeat_dummy))); - V = ggml_cont(ctx0, ggml_transpose(ctx0, V)); - - // KQV = transpose(V) * KQ_soft_max - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - - // KQV_merged = KQV.permute(0, 2, 1, 3) - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - - // cur = KQV_merged.contiguous().view(n_embd, N) - cur = ggml_cpy(ctx0, - KQV_merged, - ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); - - // projection - { - cur = ggml_mul_mat(ctx0, - model.blocks[il].wo, - cur); - } - } - - ggml_set_scratch(ctx0, { 0, scr1_size, scr1, }); - - struct ggml_tensor* inpFF = layernorm_output; - struct ggml_tensor* attn_out = ggml_cpy( - ctx0, cur, ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); - - { - cur = ggml_mul_mat(ctx0, model.blocks[il].ffn_up, inpFF); - cur = ggml_gelu(ctx0, cur); - cur = ggml_mul_mat(ctx0, model.blocks[il].ffn_down, cur); - } - - cur = ggml_add(ctx0, cur, attn_out); - cur = ggml_add(ctx0, cur, inpL); - // input for next layer - inpL = cur; - } - - ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); - - // norm - { - inpL = ggml_norm(ctx0, inpL); - - // inpL = ln_f_g*inpL + ln_f_b - inpL = ggml_add(ctx0, - ggml_mul(ctx0, - ggml_repeat(ctx0, model.output_norm, inpL), - inpL), - ggml_repeat(ctx0, model.output_norm_b, inpL)); - } - - ggml_set_scratch(ctx0, { 0, 0, nullptr, }); - - // lm_head - { - inpL = ggml_mul_mat(ctx0, model.lm_head, inpL); - - //inpL = ggml_add(ctx0, - // ggml_repeat(ctx0, model.lmh_b, inpL), - // inpL); - } - - // logits -> probs - //inpL = ggml_soft_max_inplace(ctx0, inpL); - - // run the computation - ggml_build_forward_expand(&gf, inpL); -// ggml_graph_compute (ctx0, &gf); - ggml_graph_compute_with_ctx(ctx0, &gf, n_threads); - - //if (n_past%100 == 0) { - // ggml_graph_print (&gf); - // ggml_graph_dump_dot(&gf, NULL, "gpt-2.dot"); - //} - - // return result for just the last token - embd_w.resize(n_vocab); - memcpy(embd_w.data(), (float *)ggml_get_data(inpL) + (n_vocab * (N - 1)), sizeof(float) * n_vocab); - - if (mem_per_token == 0) { - mem_per_token = ggml_used_mem(ctx0)/N; - } - //printf("used_mem = %zu\n", ggml_used_mem(ctx0)); - - ggml_free(ctx0); - - return true; -} - -int main(int argc, char ** argv) { - ggml_time_init(); - - const int64_t t_main_start_us = ggml_time_us(); - - gpt_params params; - - if (!gpt_params_parse(argc, argv, params)) { - return 1; - } - - int64_t t_load_us = 0; - - gpt2bpe_vocab vocab; - falcon_model model; - - // load the model - { - const int64_t t_start_us = ggml_time_us(); - - if (!falcon_model_load(params.model, model, vocab)) { - fprintf(stderr, "%s: failed to load model from '%s'\n", __func__, params.model.c_str()); - return 1; - } - - t_load_us = ggml_time_us() - t_start_us; - - } - - if (params.seed < 0) { - params.seed = time(NULL); - } - - if (params.top_k == 0) { - params.top_k = model.hparams.n_vocab; - } - - printf("%s: seed = %d\n", __func__, params.seed); - printf("%s: temp = %.3f\n", __func__, params.temp); - printf("%s: top_k = %d\n", __func__, params.top_k); - printf("%s: top_p = %.3f\n", __func__, params.top_p); - printf("%s: repeat_last_n = %d\n", __func__, params.repeat_last_n); - printf("%s: repeat_penalty = %.3f\n", __func__, params.repeat_penalty); - - std::mt19937 rng(params.seed); - - if (params.prompt.empty()) { - params.prompt = "Once upon"; - } - - std::vector last_n_tokens(model.hparams.n_ctx); - std::fill(last_n_tokens.begin(), last_n_tokens.end(), 0); - - int n_past = 0; - - int64_t t_sample_us = 0; - int64_t t_predict_us = 0; - - std::vector logits; - - // tokenize the prompt - std::vector embd_inp = gpt2bpe_tokenize(vocab, params.prompt,false, false); - - params.n_predict = std::min(params.n_predict, model.hparams.n_ctx - (int) embd_inp.size()); - - printf("%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); -// for (size_t i = 0; i < embd_inp.size(); i++) { -// printf("%s: token[%zu] = %6d, %s\n", __func__, i, embd_inp[i], vocab.id_to_token[embd_inp[i]].c_str()); -// } - - if( model.hparams.n_ctx < params.n_predict+embd_inp.size() ) { - params.n_predict = model.hparams.n_ctx-embd_inp.size(); - } - - printf("%s: n_predict = %d\n", __func__, params.n_predict); - printf("\n"); - - std::vector embd; - - // determine the required inference memory per token: - size_t mem_per_token = 0; - falcon_eval(model, params.n_threads, 0, { 0, 1, 2, 3 }, logits, mem_per_token); - - for (size_t i = embd.size(); i < embd_inp.size() + params.n_predict; i++) { - // predict - if (embd.size() > 0) { - const int64_t t_start_us = ggml_time_us(); - - if (!falcon_eval(model, params.n_threads, n_past, embd, logits, mem_per_token)) { - printf("Failed to predict\n"); - return 1; - } - - t_predict_us += ggml_time_us() - t_start_us; - } - - n_past += embd.size(); - embd.clear(); - - if (i >= embd_inp.size()) { - // sample next token - const int top_k = params.top_k; - const float top_p = params.top_p; - const float temp = params.temp; - const int repeat_last_n = params.repeat_last_n; - const float repeat_penalty = params.repeat_penalty; - - const int n_vocab = model.hparams.n_vocab; - - gpt2bpe_vocab::id id = 0; - - { - const int64_t t_start_sample_us = ggml_time_us(); - - id = sample_top_k_top_p_repeat(vocab, logits.data() + (logits.size() - n_vocab), last_n_tokens.data(), last_n_tokens.size(), top_k, top_p, temp, repeat_last_n, repeat_penalty, rng); - - last_n_tokens.erase(last_n_tokens.begin()); - last_n_tokens.push_back(id); - - t_sample_us += ggml_time_us() - t_start_sample_us; - } - - // add it to the context - embd.push_back(id); - } else { - // if here, it means we are still processing the input prompt - for (size_t k = i; k < embd_inp.size(); k++) { - embd.push_back(embd_inp[k]); - if (embd.size() > params.n_batch) { - break; - } - } - i += embd.size() - 1; - } - - // display text - for (auto id : embd) { - printf("%s", vocab.id_to_token[id].c_str() ); - } - fflush(stdout); - - // end of text token - if (vocab.special_eos_id != -1 && embd.back() == vocab.special_eos_id) { - break; - } - } - - // report timing - { - const int64_t t_main_end_us = ggml_time_us(); - - printf("\n\n"); - printf("%s: mem per token = %8zu bytes\n", __func__, mem_per_token); - printf("%s: load time = %8.2f ms\n", __func__, t_load_us/1000.0f); - printf("%s: sample time = %8.2f ms\n", __func__, t_sample_us/1000.0f); - printf("%s: predict time = %8.2f ms / %.2f ms per token\n", __func__, t_predict_us/1000.0f, t_predict_us/1000.0f/n_past); - printf("%s: total time = %8.2f ms\n", __func__, (t_main_end_us - t_main_start_us)/1000.0f); - } - - ggml_free(model.ctx); - - return 0; -} diff --git a/examples/gptneox-wip/gptneox-main.cpp b/examples/gptneox-wip/gptneox-main.cpp deleted file mode 100644 index b76bafaa8..000000000 --- a/examples/gptneox-wip/gptneox-main.cpp +++ /dev/null @@ -1,1083 +0,0 @@ -#include "ggml.h" -#include "cmpnct_gpt2bpe.hpp" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#if defined(_MSC_VER) -#pragma warning(disable: 4244 4267) // possible loss of data -#endif - -// default hparams -struct gpt_neox_hparams { - size_t n_merges = 0; - size_t n_vocab = 0; - uint32_t n_ctx = 0; - uint32_t n_embd = 0; - uint32_t n_head = 0; - uint32_t n_block = 0; - uint32_t n_rot = 0; // rotary_pct * (n_embd / n_head) - bool par_res = true; - float norm_eps = 1e-5; -}; - -struct gpt_neox_block { - // pre normalization - struct ggml_tensor * ln_1_g; - struct ggml_tensor * ln_1_b; - - // attention - struct ggml_tensor * c_attn_attn_w; - struct ggml_tensor * c_attn_attn_b; - - struct ggml_tensor * c_attn_proj_w; - struct ggml_tensor * c_attn_proj_b; - - // post normalization - struct ggml_tensor * ln_2_g; - struct ggml_tensor * ln_2_b; - - // ff - struct ggml_tensor * c_mlp_fc_w; - struct ggml_tensor * c_mlp_fc_b; - - struct ggml_tensor * c_mlp_proj_w; - struct ggml_tensor * c_mlp_proj_b; -}; - -struct gpt_neox_model { - gpt_neox_hparams hparams; - - // normalization - struct ggml_tensor * ln_f_g; - struct ggml_tensor * ln_f_b; - - struct ggml_tensor * wte; // position embedding - - struct ggml_tensor * lmh_g; // language model head - - std::vector blocks; - - // key + value memory - struct ggml_tensor * memory_k; - struct ggml_tensor * memory_v; - - // - struct gguf_context * ggufctx; - struct ggml_context * ctx; - struct ggml_context * kvctx; - - std::map tensors; -}; - -struct gpt_params { - int32_t seed = -1; // RNG seed - int32_t n_threads = std::min(4, (int32_t) std::thread::hardware_concurrency()); - uint32_t n_predict = 200; // new tokens to predict - uint32_t n_batch = 512; // batch size for prompt processing - - // sampling parameters - int32_t top_k = 40; - float top_p = 1.0f; - float temp = 0.8f; - int32_t repeat_last_n = 64; - float repeat_penalty = 1.02f; - - std::string model = ""; // model path - std::string prompt = ""; - - std::string token_test = ""; - bool interactive = false; - int32_t interactive_port = -1; - int32_t n_gpu_layers = 0; -}; - -void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { - fprintf(stderr, "usage: %s [options]\n", argv[0]); - fprintf(stderr, "\n"); - fprintf(stderr, "options:\n"); - fprintf(stderr, " -h, --help show this help message and exit\n"); - fprintf(stderr, " -s SEED, --seed SEED RNG seed (default: -1)\n"); - fprintf(stderr, " -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); - fprintf(stderr, " -ngl N, --gpu-layers N number of layers to offload to GPU on supported models (default: %d)\n", params.n_gpu_layers); - fprintf(stderr, " -p PROMPT, --prompt PROMPT\n"); - fprintf(stderr, " prompt to start generation with (default: random)\n"); - fprintf(stderr, " -f FNAME, --file FNAME\n"); - fprintf(stderr, " load prompt from a file\n"); - fprintf(stderr, " -tt TOKEN_TEST, --token_test TOKEN_TEST\n"); - fprintf(stderr, " test tokenization\n"); - fprintf(stderr, " -n N, --n_predict N number of tokens to predict (default: %d)\n", params.n_predict); - fprintf(stderr, " --top_k N top-k sampling, 0 = n_vocab (default: %d)\n", params.top_k); - fprintf(stderr, " --top_p N top-p sampling (default: %.1f)\n", params.top_p); - fprintf(stderr, " --temp N temperature (default: %.1f)\n", params.temp); - fprintf(stderr, " --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled)\n", params.repeat_last_n); - fprintf(stderr, " --repeat-penalty N penalize repeat sequence of tokens (default: %.2f, 1.0 = disabled)\n", (double)params.repeat_penalty); - fprintf(stderr, " -b N, --batch_size N batch size for prompt processing (default: %d)\n", params.n_batch); - fprintf(stderr, " -m FNAME, --model FNAME\n"); - fprintf(stderr, " model path (default: %s)\n", params.model.c_str()); - fprintf(stderr, "\n"); -} - -// Function to check if the next argument exists -std::string get_next_arg(int& i, int argc, char** argv, const std::string& flag, gpt_params& params) { - if (i + 1 < argc && argv[i + 1][0] != '-') { - return argv[++i]; - } else { - fprintf(stderr, "error: %s requires one argument.\n", flag.c_str()); - gpt_print_usage(argc, argv, params); - exit(0); - } -} - -bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { - for (int i = 1; i < argc; i++) { - std::string arg = argv[i]; - - if (arg == "-s" || arg == "--seed") { - params.seed = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-t" || arg == "--threads") { - params.n_threads = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-ngl" || arg == "--gpu-layers" || arg == "--n-gpu-layers") { - params.n_gpu_layers = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-p" || arg == "--prompt") { - params.prompt = get_next_arg(i, argc, argv, arg, params); - } else if (arg == "-n" || arg == "--n_predict") { - params.n_predict = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--top_k") { - params.top_k = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--top_p") { - params.top_p = std::stof(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--temp") { - params.temp = std::stof(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--repeat-last-n") { - params.repeat_last_n = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "--repeat-penalty") { - params.repeat_penalty = std::stof(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-b" || arg == "--batch_size") { - params.n_batch= std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-m" || arg == "--model") { - params.model = get_next_arg(i, argc, argv, arg, params); - } else if (arg == "-i" || arg == "--interactive") { - params.interactive = true; - } else if (arg == "-ip" || arg == "--interactive-port") { - params.interactive = true; - params.interactive_port = std::stoi(get_next_arg(i, argc, argv, arg, params)); - } else if (arg == "-h" || arg == "--help") { - gpt_print_usage(argc, argv, params); - exit(0); - } else if (arg == "-f" || arg == "--file") { - get_next_arg(i, argc, argv, arg, params); - std::ifstream file(argv[i]); - if (!file) { - fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); - break; - } - std::copy(std::istreambuf_iterator(file), std::istreambuf_iterator(), back_inserter(params.prompt)); - if (params.prompt.back() == '\n') { - params.prompt.pop_back(); - } - } else if (arg == "-tt" || arg == "--token_test") { - params.token_test = get_next_arg(i, argc, argv, arg, params); - } - else { - fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); - gpt_print_usage(argc, argv, params); - exit(0); - } - } - - return true; -} - -gpt2bpe_vocab::id sample_top_k_top_p_repeat( - const gpt2bpe_vocab & vocab, - const float * logits, - const int32_t * last_n_tokens_data, - size_t last_n_tokens_data_size, - int top_k, - double top_p, - double temp, - int repeat_last_n, - float repeat_penalty, - std::mt19937 & rng) { - - int n_logits = vocab.id_to_token.size(); - - const auto * plogits = logits; - - const auto last_n_tokens = std::vector(last_n_tokens_data, last_n_tokens_data + last_n_tokens_data_size); - - if (temp <= 0) { - // select the token with the highest logit directly - float max_logit = plogits[0]; - gpt2bpe_vocab::id max_id = 0; - - for (int i = 1; i < n_logits; ++i) { - if (plogits[i] > max_logit) { - max_logit = plogits[i]; - max_id = i; - } - } - return max_id; - } - - - std::vector> logits_id; - logits_id.reserve(n_logits); - - { - const float scale = 1.0f/temp; - for (int i = 0; i < n_logits; ++i) { - // repetition penalty from ctrl paper (https://arxiv.org/abs/1909.05858) - // credit https://github.com/facebookresearch/llama/compare/main...shawwn:llama:main - if (repeat_last_n > 0 && std::find(last_n_tokens.end()-repeat_last_n, last_n_tokens.end(), i) != last_n_tokens.end()) { - // if score < 0 then repetition penalty has to multiplied to reduce the previous token probability - if (plogits[i] < 0.0f) { - logits_id.push_back(std::make_pair(plogits[i]*scale*repeat_penalty, i)); - } else { - logits_id.push_back(std::make_pair(plogits[i]*scale/repeat_penalty, i)); - } - } else { - logits_id.push_back(std::make_pair(plogits[i]*scale, i)); - } - } - } - - // find the top K tokens - std::partial_sort( - logits_id.begin(), - logits_id.begin() + top_k, logits_id.end(), - [](const std::pair & a, const std::pair & b) { - return a.first > b.first; - }); - - logits_id.resize(top_k); - - double maxl = -INFINITY; - for (const auto & kv : logits_id) { - maxl = std::max(maxl, kv.first); - } - - // compute probs for the top K tokens - std::vector probs; - probs.reserve(logits_id.size()); - - double sum = 0.0; - for (const auto & kv : logits_id) { - double p = exp(kv.first - maxl); - probs.push_back(p); - sum += p; - } - - // normalize the probs - for (auto & p : probs) { - p /= sum; - } - - if (top_p < 1.0f) { - double cumsum = 0.0f; - for (int i = 0; i < top_k; i++) { - cumsum += probs[i]; - if (cumsum >= top_p) { - top_k = i + 1; - probs.resize(top_k); - logits_id.resize(top_k); - break; - } - } - - cumsum = 1.0/cumsum; - for (int i = 0; i < (int) probs.size(); i++) { - probs[i] *= cumsum; - } - } - -// printf("\n"); -// for (int i = 0; i < (int) probs.size(); i++) { -// for (int i = 0; i < 10; i++) { -// printf("%d: '%s' %f\n", i, vocab.id_to_token.at(logits_id[i].second).c_str(), probs[i]); -// } - - std::discrete_distribution<> dist(probs.begin(), probs.end()); - int idx = dist(rng); - - return logits_id[idx].second; - -} - -struct ggml_tensor * get_tensor_ex( struct ggml_context * ctx, std::string name){ - - struct ggml_tensor * cur = ggml_get_tensor(ctx, name.c_str()); - if( cur == NULL ) { - printf("%s: tensor '%s' not found!\n", __func__, name.c_str()); - } else { -// printf("%s: n_dims = %d, name = '%s'\n", __func__, cur->n_dims, cur->name); - } - - return cur; -} - -// load the model's weights from a file -bool gpt_neox_model_load(const std::string & fname, gpt_neox_model & model, gpt2bpe_vocab & vocab) { - printf("%s: loading model from '%s'..\n", __func__, fname.c_str()); - - model.ctx = NULL; - - struct gguf_init_params ggufparams = { - /*.no_alloc = */ false, - /*.ctx = */ &model.ctx, - }; - - auto & ggufctx = model.ggufctx; - - ggufctx = gguf_init_from_file(fname.c_str(), ggufparams); - - if (!ggufctx) { - fprintf(stderr, "%s: gguf_init_from_file() failed\n", __func__); - return false; - } - - printf("%s: gguf version = %d\n", __func__, gguf_get_version(ggufctx)); - printf("%s: gguf alignment = %zu\n", __func__, gguf_get_alignment(ggufctx)); - printf("%s: gguf data offset = %zu\n", __func__, gguf_get_data_offset(ggufctx)); - - // print all kv - #if 0 - { - const int n_kv = gguf_get_n_kv(ggufctx); - - printf("%s: n_kv: %d\n", __func__, n_kv); - - for (int i = 0; i < n_kv; ++i) { - const char * key = gguf_get_key(ggufctx, i); - - printf("%s: kv[%d]: key = %s\n", __func__, i, key); - } - } - #endif - - // print some standard metadata - { - int keyidx; - - keyidx = gguf_find_key(ggufctx, "general.name"); - if (keyidx != -1) { printf("%s: model name = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.description"); - if (keyidx != -1) { printf("%s: model description = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.author"); - if (keyidx != -1) { printf("%s: model author = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.license"); - if (keyidx != -1) { printf("%s: model license = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.architecture"); - if (keyidx != -1) { printf("%s: model architecture = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.file_type"); - if (keyidx != -1) { printf("%s: model file type = %" PRIu32 "\n", __func__, gguf_get_val_u32(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "gptneox.tensor_data_layout"); - if (keyidx != -1) { printf("%s: model data layout = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - keyidx = gguf_find_key(ggufctx, "general.source.huggingface.repository"); - if (keyidx != -1) { printf("%s: model source HF repo = %s\n", __func__, gguf_get_val_str(ggufctx, keyidx)); } - } - - // check required metadata - { - int keyidx; - - // check model architecture kv - keyidx = gguf_find_key(ggufctx, "general.architecture"); - if (keyidx != -1) { - if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "gptneox") != 0) { - printf("%s: model architecture not supported!\n", __func__); - return false; - } - } else { - printf("%s: gguf model architecture not found!\n", __func__); - return false; - } - - } - - // load hparams - { - auto & hparams = model.hparams; - - bool ok = true; - int keyidx; - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.context_length"); - if (keyidx != -1) { hparams.n_ctx = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.embedding_length"); - if (keyidx != -1) { hparams.n_embd = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.attention.head_count"); - if (keyidx != -1) { hparams.n_head = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.block_count"); - if (keyidx != -1) { hparams.n_block = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.rope.dimension_count"); - if (keyidx != -1) { hparams.n_rot = gguf_get_val_u32(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.use_parallel_residual"); - if (keyidx != -1) { hparams.par_res = gguf_get_val_bool(ggufctx, keyidx); } else { ok = false; } } - - if (ok) { keyidx = gguf_find_key(ggufctx, "gptneox.attention.layer_norm_epsilon"); - if (keyidx != -1) { hparams.norm_eps= gguf_get_val_f32(ggufctx, keyidx); } else { ok = false; } } - - if (!ok) { - fprintf(stderr, "%s: required hparam missing!\n", __func__); - return false; - } - - printf("%s: n_ctx = %d\n", __func__, hparams.n_ctx); - printf("%s: n_embd = %d\n", __func__, hparams.n_embd); - printf("%s: n_head = %d\n", __func__, hparams.n_head); - printf("%s: n_block = %d\n", __func__, hparams.n_block); - printf("%s: n_rot = %d\n", __func__, hparams.n_rot); - printf("%s: par_res = %d\n", __func__, hparams.par_res); - printf("%s: norm_eps = %g\n", __func__, hparams.norm_eps); - - } - - // load vocab - { - auto & hparams = model.hparams; - - int keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.model"); - - if (keyidx != -1) { - if ( strcmp(gguf_get_val_str(ggufctx, keyidx), "gpt2") != 0) { - printf("%s: tokenizer model not supported!\n", __func__); - return false; - } - } else { - printf("%s: tokenizer model not found!\n", __func__); - return false; - } - - - int tokens_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.tokens"); - - if (tokens_keyidx == -1) { - printf("%s: gpt2 tokenizer vocab not found!\n", __func__); - return false; - } - - int merges_keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.merges"); - - if (merges_keyidx == -1) { - printf("%s: gpt2 tokenizer merges not found!\n", __func__); - return false; - } - - hparams.n_vocab = gguf_get_arr_n(ggufctx,tokens_keyidx); - hparams.n_merges = gguf_get_arr_n(ggufctx,merges_keyidx); - - printf("%s: gpt2 tokenizer vocab = %zu\n", __func__, hparams.n_vocab); - printf("%s: gpt2 tokenizer merges = %zu\n", __func__, hparams.n_merges); - - for (size_t i = 0; i < hparams.n_vocab; i++) { - std::string word = gguf_get_arr_str(ggufctx, tokens_keyidx, i); - -// printf("token %d = '%s'\n",i,word.c_str() ); - - vocab.token_to_id[word] = i; - vocab.id_to_token[i] = word; - - if( vocab.id_to_token[i] == "\n" ) { - vocab.linefeed_id = i; - } - } - - std::vector> bpe_merges; - - for (size_t i = 0; i < hparams.n_merges; i++) { - - std::string word = gguf_get_arr_str(ggufctx, merges_keyidx, i); - - // Split the merges - std::string first, second; - size_t pos = word.find(' ', 1); // Start the search from the second character - if (pos != std::string::npos) { - first = word.substr(0, pos); - second = word.substr(pos + 1); - } - - bpe_merges.push_back(std::make_pair(first, second)); - } - - vocab.populate_bpe_ranks(bpe_merges); - - - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.bos_token_id"); if( keyidx != -1 ) { vocab.special_bos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.eos_token_id"); if( keyidx != -1 ) { vocab.special_eos_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.unknown_token_id"); if( keyidx != -1 ) { vocab.special_unk_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.separator_token_id"); if( keyidx != -1 ) { vocab.special_sep_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - keyidx = gguf_find_key(ggufctx, "tokenizer.ggml.padding_token_id"); if( keyidx != -1 ) { vocab.special_pad_id = (int32_t)gguf_get_val_u32(ggufctx, keyidx); } - - if( vocab.special_bos_id != -1 ) { printf("%s: BOS token = %d '%s'\n", __func__, vocab.special_bos_id, vocab.id_to_token[vocab.special_bos_id].c_str() ); } - if( vocab.special_eos_id != -1 ) { printf("%s: EOS token = %d '%s'\n", __func__, vocab.special_eos_id, vocab.id_to_token[vocab.special_eos_id].c_str() ); } - if( vocab.special_unk_id != -1 ) { printf("%s: UNK token = %d '%s'\n", __func__, vocab.special_unk_id, vocab.id_to_token[vocab.special_unk_id].c_str() ); } - if( vocab.special_sep_id != -1 ) { printf("%s: SEP token = %d '%s'\n", __func__, vocab.special_sep_id, vocab.id_to_token[vocab.special_sep_id].c_str() ); } - if( vocab.special_pad_id != -1 ) { printf("%s: PAD token = %d '%s'\n", __func__, vocab.special_pad_id, vocab.id_to_token[vocab.special_pad_id].c_str() ); } - if( vocab.linefeed_id != -1 ) { printf("%s: LF token = %d\n", __func__, vocab.linefeed_id ); } - } - - - auto & ctx = model.ctx; - size_t ctx_size = ggml_get_mem_size(ctx); - - printf("%s: ggml ctx size = %6.2f MB\n", __func__, ctx_size/(1024.0*1024.0)); - - // print tensor info - #if 0 - { - const int n_tensors = gguf_get_n_tensors(ggufctx); - - printf("%s: n_tensors: %d\n", __func__, n_tensors); - - for (int i = 0; i < n_tensors; ++i) { - const char * name = gguf_get_tensor_name (ggufctx, i); - const size_t offset = gguf_get_tensor_offset(ggufctx, i); - - printf("%s: tensor[%d]: name = %s, offset = %zu\n", __func__, i, name, offset); - } - } - #endif - - // prepare memory for the weights - { - const int n_block = model.hparams.n_block; - - model.blocks.resize(n_block); - - model.wte = ggml_get_tensor(ctx, "token_embd.weight"); - model.ln_f_g = ggml_get_tensor(ctx, "output_norm.weight"); - model.ln_f_b = ggml_get_tensor(ctx, "output_norm.bias"); - model.lmh_g = ggml_get_tensor(ctx, "output.weight"); - - // map by name - model.tensors["token_embd.weight"] = model.wte; - model.tensors["output_norm.weight"] = model.ln_f_g; - model.tensors["output_norm.bias"] = model.ln_f_b; - model.tensors["output.weight"] = model.lmh_g; - - for (int i = 0; i < n_block; ++i) { - auto & block = model.blocks[i]; - - std::string blocknamestart = "blk." + std::to_string(i) + "."; - - block.ln_1_g = get_tensor_ex(ctx, blocknamestart + "attn_norm.weight" ); - block.ln_1_b = get_tensor_ex(ctx, blocknamestart + "attn_norm.bias" ); - - block.c_attn_attn_w = get_tensor_ex(ctx, blocknamestart + "attn_qkv.weight" ); - block.c_attn_attn_b = get_tensor_ex(ctx ,blocknamestart + "attn_qkv.bias" ); - - block.c_attn_proj_w = get_tensor_ex(ctx, blocknamestart + "attn_output.weight" ); - block.c_attn_proj_b = get_tensor_ex(ctx, blocknamestart + "attn_output.bias" ); - - block.ln_2_g = get_tensor_ex(ctx, blocknamestart + "ffn_norm.weight" ); - block.ln_2_b = get_tensor_ex(ctx, blocknamestart + "ffn_norm.bias"); - - block.c_mlp_fc_w = get_tensor_ex(ctx, blocknamestart + "ffn_up.weight" ); - block.c_mlp_fc_b = get_tensor_ex(ctx, blocknamestart + "ffn_up.bias" ); - - block.c_mlp_proj_w = get_tensor_ex(ctx, blocknamestart + "ffn_down.weight" ); - block.c_mlp_proj_b = get_tensor_ex(ctx, blocknamestart + "ffn_down.bias" ); - - // map by name - model.tensors[blocknamestart + "attn_norm.weight"] = block.ln_1_g; - model.tensors[blocknamestart + "attn_norm.bias"] = block.ln_1_b; - - model.tensors[blocknamestart + "attn_qkv.weight"] = block.c_attn_attn_w; - model.tensors[blocknamestart + "attn_qkv.bias"] = block.c_attn_attn_b; - - model.tensors[blocknamestart + "attn_output.weight"] = block.c_attn_proj_w; - model.tensors[blocknamestart + "attn_output.bias"] = block.c_attn_proj_b; - - model.tensors[blocknamestart + "ffn_norm.weight"] = block.ln_2_g; - model.tensors[blocknamestart + "ffn_norm.bias"] = block.ln_2_b; - - model.tensors[blocknamestart + "ffn_up.weight"] = block.c_mlp_fc_w; - model.tensors[blocknamestart + "ffn_up.bias"] = block.c_mlp_fc_b; - - model.tensors[blocknamestart + "ffn_down.weight"] = block.c_mlp_proj_w; - model.tensors[blocknamestart + "ffn_down.bias"] = block.c_mlp_proj_b; - } - } - - // key + value memory - { - const auto & kvctx = model.kvctx; - const auto & hparams = model.hparams; - - const int n_embd = hparams.n_embd; - const int n_block = hparams.n_block; - const int n_ctx = hparams.n_ctx; - - const int64_t n_mem = n_block*n_ctx; - const int64_t n_elements = n_embd*n_mem; - - // create the ggml context - { - struct ggml_init_params params = { - /*.mem_size =*/ size_t(n_elements*4+ggml_tensor_overhead()*2), - /*.mem_buffer =*/ NULL, - /*.no_alloc =*/ false, - }; - - model.kvctx = ggml_init(params); - if (!model.kvctx) { - fprintf(stderr, "%s: kv ggml_init() failed\n", __func__); - return false; - } - - } - - - model.memory_k = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); - model.memory_v = ggml_new_tensor_1d(kvctx, GGML_TYPE_F16, n_elements); - - const size_t memory_size = ggml_nbytes(model.memory_k) + ggml_nbytes(model.memory_v); - - printf("%s: memory_size = %8.2f MB, n_mem = %" PRId64 "\n", __func__, memory_size/1024.0/1024.0, n_mem); - } - - return true; -} - - -// feed-forward network -ggml_tensor * gpt_neox_ff( - const gpt_neox_block &block, - ggml_context * ctx0, - ggml_tensor * inp, - const gpt_neox_hparams &hparams) { - - ggml_tensor * cur = ggml_norm(ctx0, inp, hparams.norm_eps); - - cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, block.ln_2_g, cur), cur), ggml_repeat(ctx0, block.ln_2_b, cur)); - cur = ggml_mul_mat(ctx0, block.c_mlp_fc_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, block.c_mlp_fc_b, cur), cur); - - // GELU activation - cur = ggml_gelu(ctx0, cur); - - // projection - // cur = proj_w*cur + proj_b - cur = ggml_mul_mat(ctx0, block.c_mlp_proj_w, cur); - - cur = ggml_add(ctx0, ggml_repeat(ctx0, block.c_mlp_proj_b, cur), cur); - return cur; -} - -// evaluate the transformer -// -// - model: the model -// - n_threads: number of threads to use -// - n_past: the context size so far -// - embd_inp: the embeddings of the tokens in the context -// - embd_w: the predicted logits for the next token -// -bool gpt_neox_eval( - const gpt_neox_model & model, - const int n_threads, - const int n_past, - const std::vector & embd_inp, - std::vector & embd_w, - size_t & mem_per_token) { - const int N = embd_inp.size(); - - const auto & hparams = model.hparams; - - const int n_embd = hparams.n_embd; - const int n_block = hparams.n_block; - const int n_ctx = hparams.n_ctx; - const int n_head = hparams.n_head; - const int n_vocab = hparams.n_vocab; - const int n_rot = hparams.n_rot; - - static size_t buf_size = 256u*1024*1024; - static void * buf = malloc(buf_size); - - // use 2 scratch buffers - // TODO: very hacky solution - reimplement in a more elegant way - static size_t scr0_size = 256u*1024*1024; - static void * scr0 = malloc(scr0_size); - - static size_t scr1_size = 256u*1024*1024; - static void * scr1 = malloc(scr1_size); - - if (mem_per_token > 0 && mem_per_token*N > buf_size) { - const size_t buf_size_new = 1.1*(mem_per_token*N); // add 10% to account for ggml object overhead - //printf("\n%s: reallocating buffer from %zu to %zu bytes\n", __func__, buf_size, buf_size_new); - - // reallocate - buf_size = buf_size_new; - buf = realloc(buf, buf_size); - if (buf == nullptr) { - fprintf(stderr, "%s: failed to allocate %zu bytes\n", __func__, buf_size); - return false; - } - } - - struct ggml_init_params params = { - /*.mem_size =*/ buf_size, - /*.mem_buffer =*/ buf, - /*.no_alloc =*/ false, - }; - - struct ggml_context * ctx0 = ggml_init(params); - struct ggml_cgraph gf = {}; - - struct ggml_tensor * embd = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, N); - memcpy(embd->data, embd_inp.data(), N*ggml_element_size(embd)); - - - // wte - struct ggml_tensor * inpL = ggml_get_rows(ctx0, model.wte, embd); - - for (int il = 0; il < n_block; ++il) { - struct ggml_tensor * cur; - - ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); - - // self-attention - { - { - cur = ggml_norm(ctx0, inpL, hparams.norm_eps); - - cur = ggml_add(ctx0, - ggml_mul(ctx0, ggml_repeat(ctx0, model.blocks[il].ln_1_g, cur), cur), - ggml_repeat(ctx0, model.blocks[il].ln_1_b, cur)); - } - - // compute QKV - { - - cur = ggml_mul_mat(ctx0, model.blocks[il].c_attn_attn_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.blocks[il].c_attn_attn_b, cur), cur); - } - - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 0*sizeof(float)*n_embd/n_head)); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 1*sizeof(float)*n_embd/n_head)); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_3d(ctx0, cur, n_embd/n_head, n_head, N, cur->nb[1]/n_head, cur->nb[1], 2*sizeof(float)*n_embd/n_head)); - - // using mode = 2 for GPT-NeoX mode - Qcur = ggml_rope_inplace(ctx0, Qcur, n_past, n_rot, 2, 0); - Kcur = ggml_rope_inplace(ctx0, Kcur, n_past, n_rot, 2, 0); - - // store key and value to memory - { - Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, Vcur, n_embd, N)); - - struct ggml_tensor * k = ggml_view_1d(ctx0, model.memory_k, N*n_embd, (ggml_element_size(model.memory_k)*n_embd)*(il*n_ctx + n_past)); - struct ggml_tensor * v = ggml_view_2d(ctx0, model.memory_v, N, n_embd, - ( n_ctx)*ggml_element_size(model.memory_v), - (il*n_ctx)*ggml_element_size(model.memory_v)*n_embd + n_past*ggml_element_size(model.memory_v)); - - ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(&gf, ggml_cpy(ctx0, Vcur, v)); - } - - // Q = Qcur.contiguous().view(n_embd/n_head, n_head, N).permute(0, 2, 1, 3) - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - - // K = Kmem.view(n_embd/n_head, n_head, n_past + N).permute(0, 2, 1, 3) - struct ggml_tensor * K = - ggml_permute(ctx0, - ggml_reshape_3d(ctx0, - ggml_view_1d(ctx0, model.memory_k, (n_past + N)*n_embd, il*n_ctx*ggml_element_size(model.memory_k)*n_embd), - n_embd/n_head, n_head, n_past + N), - 0, 2, 1, 3); - - // K * Q - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - - // KQ_scaled = KQ / sqrt(n_embd/n_head) - struct ggml_tensor * KQ_scaled = - ggml_scale_inplace(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrt(float(n_embd)/n_head)) - ); - - // KQ_masked = mask_past(KQ_scaled) - struct ggml_tensor * KQ_masked = ggml_diag_mask_inf_inplace(ctx0, KQ_scaled, n_past); - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); - - // V_trans = Vmem.view(n_embd/n_head, n_head, n_past + N).permute(1, 2, 0, 3).contiguous() - struct ggml_tensor * V = - ggml_view_3d(ctx0, model.memory_v, - n_past + N, n_embd/n_head, n_head, - n_ctx*ggml_element_size(model.memory_v), - n_ctx*ggml_element_size(model.memory_v)*n_embd/n_head, - il*n_ctx*ggml_element_size(model.memory_v)*n_embd); - - // KQV = transpose(V) * KQ_soft_max - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - - // KQV_merged = KQV.permute(0, 2, 1, 3) - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - - // cur = KQV_merged.contiguous().view(n_embd, N) - cur = ggml_cpy(ctx0, KQV_merged, ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, N)); - - // projection - { - cur = ggml_mul_mat(ctx0, model.blocks[il].c_attn_proj_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.blocks[il].c_attn_proj_b, cur), cur); - } - } - - ggml_set_scratch(ctx0, { 0, scr1_size, scr1, }); - - if (hparams.par_res == 0) { - struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpL); - - cur = gpt_neox_ff(model.blocks[il], ctx0, inpFF, hparams); - - // input for next layer - inpL = ggml_add(ctx0, cur, inpFF); - } else { - struct ggml_tensor * inpFF = cur; - - // this is independent of the self-attention result, so it could be done in parallel to the self-attention - // note here we pass inpL instead of cur - cur = gpt_neox_ff(model.blocks[il], ctx0, inpL, hparams); - - // layer input + FF - cur = ggml_add(ctx0, cur, inpFF); - - // input for next layer - inpL = ggml_add(ctx0, cur, inpL); - } - } - - ggml_set_scratch(ctx0, { 0, scr0_size, scr0, }); - - // norm - { - inpL = ggml_norm(ctx0, inpL, hparams.norm_eps); - - // inpL = ln_f_g*inpL + ln_f_b - inpL = ggml_add(ctx0, - ggml_mul(ctx0, - ggml_repeat(ctx0, model.ln_f_g, inpL), - inpL), - ggml_repeat(ctx0, model.ln_f_b, inpL)); - } - - ggml_set_scratch(ctx0, { 0, 0, nullptr, }); - - // lm_head - { - inpL = ggml_mul_mat(ctx0, model.lmh_g, inpL); - - //inpL = ggml_add(ctx0, - // ggml_repeat(ctx0, model.lmh_b, inpL), - // inpL); - } - - // logits -> probs - //inpL = ggml_soft_max_inplace(ctx0, inpL); - - // run the computation - ggml_build_forward_expand(&gf, inpL); - ggml_graph_compute_with_ctx(ctx0, &gf, n_threads); - - //if (n_past%100 == 0) { - // ggml_graph_print (&gf); - // ggml_graph_dump_dot(&gf, NULL, "gpt-2.dot"); - //} - - //embd_w.resize(n_vocab*N); - //memcpy(embd_w.data(), ggml_get_data(inpL), sizeof(float)*n_vocab*N); - - // return result for just the last token - embd_w.resize(n_vocab); - memcpy(embd_w.data(), (float *) ggml_get_data(inpL) + (n_vocab*(N-1)), sizeof(float)*n_vocab); - - if (mem_per_token == 0) { - mem_per_token = ggml_used_mem(ctx0)/N; - } - //printf("used_mem = %zu\n", ggml_used_mem(ctx0)); - - ggml_free(ctx0); - - return true; -} - -int main(int argc, char ** argv) { - ggml_time_init(); - - const int64_t t_main_start_us = ggml_time_us(); - - gpt_params params; - - if (!gpt_params_parse(argc, argv, params)) { - return 1; - } - - int64_t t_load_us = 0; - - gpt2bpe_vocab vocab; - gpt_neox_model model; - - // load the model - { - const int64_t t_start_us = ggml_time_us(); - - if (!gpt_neox_model_load(params.model, model, vocab)) { - fprintf(stderr, "%s: failed to load model from '%s'\n", __func__, params.model.c_str()); - return 1; - } - - t_load_us = ggml_time_us() - t_start_us; - - } - - if (params.seed < 0) { - params.seed = time(NULL); - } - - if (params.top_k == 0) { - params.top_k = model.hparams.n_vocab; - } - - printf("%s: seed = %d\n", __func__, params.seed); - printf("%s: temp = %.3f\n", __func__, params.temp); - printf("%s: top_k = %d\n", __func__, params.top_k); - printf("%s: top_p = %.3f\n", __func__, params.top_p); - printf("%s: repeat_last_n = %d\n", __func__, params.repeat_last_n); - printf("%s: repeat_penalty = %.3f\n", __func__, params.repeat_penalty); - - std::mt19937 rng(params.seed); - - if (params.prompt.empty()) { - params.prompt = "Once upon"; - } - - std::vector last_n_tokens(model.hparams.n_ctx); - std::fill(last_n_tokens.begin(), last_n_tokens.end(), 0); - - int n_past = 0; - - int64_t t_sample_us = 0; - int64_t t_predict_us = 0; - - std::vector logits; - - // tokenize the prompt - std::vector embd_inp = gpt2bpe_tokenize(vocab, params.prompt,false, false); - - params.n_predict = std::min(params.n_predict, model.hparams.n_ctx - (int) embd_inp.size()); - - printf("%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); -// for (size_t i = 0; i < embd_inp.size(); i++) { -// printf("%s: token[%zu] = %6d, %s\n", __func__, i, embd_inp[i], vocab.id_to_token[embd_inp[i]].c_str()); -// } - - if( model.hparams.n_ctx < params.n_predict+embd_inp.size() ) { - params.n_predict = model.hparams.n_ctx-embd_inp.size(); - } - - printf("%s: n_predict = %d\n", __func__, params.n_predict); - printf("\n"); - - std::vector embd; - - // determine the required inference memory per token: - size_t mem_per_token = 0; - gpt_neox_eval(model, params.n_threads, 0, { 0, 1, 2, 3 }, logits, mem_per_token); - - for (size_t i = embd.size(); i < embd_inp.size() + params.n_predict; i++) { - // predict - if (embd.size() > 0) { - const int64_t t_start_us = ggml_time_us(); - - if (!gpt_neox_eval(model, params.n_threads, n_past, embd, logits, mem_per_token)) { - printf("Failed to predict\n"); - return 1; - } - - t_predict_us += ggml_time_us() - t_start_us; - } - - n_past += embd.size(); - embd.clear(); - - if (i >= embd_inp.size()) { - // sample next token - const int top_k = params.top_k; - const float top_p = params.top_p; - const float temp = params.temp; - const int repeat_last_n = params.repeat_last_n; - const float repeat_penalty = params.repeat_penalty; - - const int n_vocab = model.hparams.n_vocab; - - gpt2bpe_vocab::id id = 0; - - { - const int64_t t_start_sample_us = ggml_time_us(); - - id = sample_top_k_top_p_repeat(vocab, logits.data() + (logits.size() - n_vocab), last_n_tokens.data(), last_n_tokens.size(), top_k, top_p, temp, repeat_last_n, repeat_penalty, rng); - - last_n_tokens.erase(last_n_tokens.begin()); - last_n_tokens.push_back(id); - - t_sample_us += ggml_time_us() - t_start_sample_us; - } - - // add it to the context - embd.push_back(id); - } else { - // if here, it means we are still processing the input prompt - for (size_t k = i; k < embd_inp.size(); k++) { - embd.push_back(embd_inp[k]); - if (embd.size() > params.n_batch) { - break; - } - } - i += embd.size() - 1; - } - - // display text - for (auto id : embd) { - printf("%s", vocab.id_to_token[id].c_str() ); - } - fflush(stdout); - - // end of text token - if (vocab.special_eos_id != -1 && embd.back() == vocab.special_eos_id) { - break; - } - } - - // report timing - { - const int64_t t_main_end_us = ggml_time_us(); - - printf("\n\n"); - printf("%s: mem per token = %8zu bytes\n", __func__, mem_per_token); - printf("%s: load time = %8.2f ms\n", __func__, t_load_us/1000.0f); - printf("%s: sample time = %8.2f ms\n", __func__, t_sample_us/1000.0f); - printf("%s: predict time = %8.2f ms / %.2f ms per token\n", __func__, t_predict_us/1000.0f, t_predict_us/1000.0f/n_past); - printf("%s: total time = %8.2f ms\n", __func__, (t_main_end_us - t_main_start_us)/1000.0f); - } - - ggml_free(model.ctx); - - return 0; -} diff --git a/examples/infill/CMakeLists.txt b/examples/infill/CMakeLists.txt index 046f9b1e7..57d01cb0b 100644 --- a/examples/infill/CMakeLists.txt +++ b/examples/infill/CMakeLists.txt @@ -4,5 +4,5 @@ install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) + add_dependencies(${TARGET} BUILD_INFO) endif() diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index 128d67080..6331335e3 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -39,8 +39,8 @@ static gpt_params * g_params; static std::vector * g_input_tokens; static std::ostringstream * g_output_ss; static std::vector * g_output_tokens; -static bool is_interacting = false; +static bool is_interacting = false; static void write_logfile( const llama_context * ctx, const gpt_params & params, const llama_model * model, @@ -104,7 +104,7 @@ static void sigint_handler(int signo) { int main(int argc, char ** argv) { gpt_params params; - llama_sampling_params & sparams = params.sampling_params; + llama_sampling_params & sparams = params.sparams; g_params = ¶ms; if (!gpt_params_parse(argc, argv, params)) { @@ -358,36 +358,10 @@ int main(int argc, char ** argv) { LOG_TEE("Input suffix: '%s'\n", params.input_suffix.c_str()); } } - LOG_TEE("sampling: repeat_last_n = %d, repeat_penalty = %f, presence_penalty = %f, frequency_penalty = %f, top_k = %d, tfs_z = %f, top_p = %f, typical_p = %f, temp = %f, mirostat = %d, mirostat_lr = %f, mirostat_ent = %f\n", - sparams.repeat_last_n, sparams.repeat_penalty, sparams.presence_penalty, sparams.frequency_penalty, sparams.top_k, sparams.tfs_z, sparams.top_p, sparams.typical_p, sparams.temp, sparams.mirostat, sparams.mirostat_eta, sparams.mirostat_tau); + LOG_TEE("sampling: \n%s\n", llama_sampling_print(sparams).c_str()); LOG_TEE("generate: n_ctx = %d, n_batch = %d, n_predict = %d, n_keep = %d\n", n_ctx, params.n_batch, params.n_predict, params.n_keep); LOG_TEE("\n\n"); - struct llama_grammar * grammar = NULL; - grammar_parser::parse_state parsed_grammar; - - if (!params.grammar.empty()) { - parsed_grammar = grammar_parser::parse(params.grammar.c_str()); - // will be empty (default) if there are parse errors - if (parsed_grammar.rules.empty()) { - return 1; - } - LOG_TEE("%s: grammar:\n", __func__); - grammar_parser::print_grammar(stderr, parsed_grammar); - LOG_TEE("\n"); - - { - auto it = sparams.logit_bias.find(llama_token_eos(ctx)); - if (it != sparams.logit_bias.end() && it->second == -INFINITY) { - LOG_TEE("%s: warning: EOS token is disabled, which will cause most grammars to fail\n", __func__); - } - } - - std::vector grammar_rules(parsed_grammar.c_rules()); - grammar = llama_grammar_init( - grammar_rules.data(), grammar_rules.size(), parsed_grammar.symbol_ids.at("root")); - } - LOG_TEE("\n##### Infill mode #####\n\n"); if (params.infill) { printf("\n************\n"); @@ -430,7 +404,7 @@ int main(int argc, char ** argv) { std::vector embd; std::vector embd_guidance; - struct llama_sampling_context * ctx_sampling = llama_sampling_init(params); + struct llama_sampling_context * ctx_sampling = llama_sampling_init(sparams); while (n_remain != 0 || params.interactive) { // predict @@ -549,7 +523,7 @@ int main(int argc, char ** argv) { const llama_token id = llama_sampling_sample(ctx_sampling, ctx, ctx_guidance); - llama_sampling_accept(ctx_sampling, ctx, id); + llama_sampling_accept(ctx_sampling, ctx, id, true); LOG("last: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, ctx_sampling->prev).c_str()); @@ -567,8 +541,11 @@ int main(int argc, char ** argv) { LOG("embd_inp.size(): %d, n_consumed: %d\n", (int) embd_inp.size(), n_consumed); while ((int) embd_inp.size() > n_consumed) { embd.push_back(embd_inp[n_consumed]); - ctx_sampling->prev.erase(ctx_sampling->prev.begin()); - ctx_sampling->prev.push_back(embd_inp[n_consumed]); + + // push the prompt in the sampling context in order to apply repetition penalties later + // for the prompt, we don't apply grammar rules + llama_sampling_accept(ctx_sampling, ctx, embd_inp[n_consumed], false); + ++n_consumed; if ((int) embd.size() >= params.n_batch) { break; @@ -600,7 +577,7 @@ int main(int argc, char ** argv) { if ((int) embd_inp.size() <= n_consumed) { // deal with eot token in infill mode - if ((ctx_sampling->prev.back() == llama_token_eot(ctx) || is_interacting) && params.interactive){ + if ((llama_sampling_last(ctx_sampling) == llama_token_eot(ctx) || is_interacting) && params.interactive){ if(is_interacting && !params.interactive_first) { // print an eot token printf("%s", llama_token_to_piece(ctx, llama_token_eot(ctx)).c_str()); @@ -617,7 +594,7 @@ int main(int argc, char ** argv) { buffer += line; } while (another_line); // check if we got an empty line, if so we use the old input - if(!buffer.empty() && !(buffer.length() == 1 && buffer[0] == '\n')) { + if (!buffer.empty() && !(buffer.length() == 1 && buffer[0] == '\n')) { params.input_prefix = buffer; } buffer.clear(); @@ -627,7 +604,7 @@ int main(int argc, char ** argv) { buffer += line; } while (another_line); // check if we got an empty line - if(!buffer.empty() && !(buffer.length() == 1 && buffer[0] == '\n')) { + if (!buffer.empty() && !(buffer.length() == 1 && buffer[0] == '\n')) { params.input_suffix = buffer; } buffer.clear(); @@ -640,7 +617,7 @@ int main(int argc, char ** argv) { process_escapes(params.input_suffix); } suff_rm_leading_spc = params.escape; - if (suff_rm_leading_spc && params.input_suffix.find_first_of(" ") == 0 && params.input_suffix.size() > 1) { + if (suff_rm_leading_spc && params.input_suffix.find_first_of(' ') == 0 && params.input_suffix.size() > 1) { params.input_suffix.erase(0, 1); suff_rm_leading_spc = false; } @@ -667,7 +644,7 @@ int main(int argc, char ** argv) { is_interacting = false; } // deal with end of text token in interactive mode - else if (ctx_sampling->prev.back() == llama_token_eos(ctx)) { + else if (llama_sampling_last(ctx_sampling) == llama_token_eos(ctx)) { LOG("found EOS token\n"); if (params.interactive) { @@ -740,15 +717,7 @@ int main(int argc, char ** argv) { if (n_past > 0) { if (is_interacting) { - // reset grammar state if we're restarting generation - if (grammar != NULL) { - llama_grammar_free(grammar); - - std::vector grammar_rules(parsed_grammar.c_rules()); - grammar = llama_grammar_init( - grammar_rules.data(), grammar_rules.size(), - parsed_grammar.symbol_ids.at("root")); - } + llama_sampling_reset(ctx_sampling); } is_interacting = false; } @@ -778,9 +747,7 @@ int main(int argc, char ** argv) { llama_free(ctx); llama_free_model(model); - if (grammar != NULL) { - llama_grammar_free(grammar); - } + llama_sampling_free(ctx_sampling); llama_backend_free(); #ifndef LOG_DISABLE_LOGS diff --git a/examples/llava/llava-utils.h b/examples/llava/llava-utils.h index e050b59be..45b2b1ad3 100644 --- a/examples/llava/llava-utils.h +++ b/examples/llava/llava-utils.h @@ -58,28 +58,30 @@ inline bool eval_string(struct llama_context * ctx_llama, const char* str, int n // TODO: use common/sampling.h inline llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { - // out of user input, sample next token - const float temp = params.sampling_params.temp; - const int32_t top_k = params.sampling_params.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx_llama)) : params.sampling_params.top_k; - const float top_p = params.sampling_params.top_p; - const float tfs_z = params.sampling_params.tfs_z; - const float typical_p = params.sampling_params.typical_p; - // const int32_t repeat_last_n = params.sampling_params.repeat_last_n < 0 ? n_ctx : params.sampling_params.repeat_last_n; - // const float repeat_penalty = params.sampling_params.repeat_penalty; - // const float alpha_presence = params.sampling_params.presence_penalty; - // const float alpha_frequency = params.sampling_params.frequency_penalty; - const int mirostat = params.sampling_params.mirostat; - const float mirostat_tau = params.sampling_params.mirostat_tau; - const float mirostat_eta = params.sampling_params.mirostat_eta; - // const bool penalize_nl = params.sampling_params.penalize_nl; + auto & sparams = params.sparams; + + // out of user input, sample next token + const float temp = sparams.temp; + const int32_t top_k = sparams.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx_llama)) : sparams.top_k; + const float top_p = sparams.top_p; + const float tfs_z = sparams.tfs_z; + const float typical_p = sparams.typical_p; + // const int32_t repeat_last_n = sparams.repeat_last_n < 0 ? n_ctx : sparams.repeat_last_n; + // const float repeat_penalty = sparams.repeat_penalty; + // const float alpha_presence = sparams.presence_penalty; + // const float alpha_frequency = sparams.frequency_penalty; + const int mirostat = sparams.mirostat; + const float mirostat_tau = sparams.mirostat_tau; + const float mirostat_eta = sparams.mirostat_eta; + // const bool penalize_nl = sparams.penalize_nl; llama_token id = 0; { auto logits = llama_get_logits(ctx_llama); auto n_vocab = llama_n_vocab(llama_get_model(ctx_llama)); - // Apply params.logit_bias map - for (auto it = params.sampling_params.logit_bias.begin(); it != params.sampling_params.logit_bias.end(); it++) { + // Apply params.logit_bias map + for (auto it = sparams.logit_bias.begin(); it != sparams.logit_bias.end(); it++) { logits[it->first] += it->second; } @@ -91,18 +93,18 @@ inline llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; - // TODO: Apply penalties - // float nl_logit = logits[llama_token_nl(ctx)]; - // auto last_n_repeat = std::min(std::min((int)last_n_tokens.size(), repeat_last_n), n_ctx); - // llama_sample_repetition_penalty(ctx, &candidates_p, - // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, - // last_n_repeat, repeat_penalty); - // llama_sample_frequency_and_presence_penalties(ctx, &candidates_p, - // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, - // last_n_repeat, alpha_frequency, alpha_presence); - // if (!penalize_nl) { - // logits[llama_token_nl(ctx)] = nl_logit; - // } + // TODO: Apply penalties + // float nl_logit = logits[llama_token_nl(ctx)]; + // auto last_n_repeat = std::min(std::min((int)last_n_tokens.size(), repeat_last_n), n_ctx); + // llama_sample_repetition_penalty(ctx, &candidates_p, + // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, + // last_n_repeat, repeat_penalty); + // llama_sample_frequency_and_presence_penalties(ctx, &candidates_p, + // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, + // last_n_repeat, alpha_frequency, alpha_presence); + // if (!penalize_nl) { + // logits[llama_token_nl(ctx)] = nl_logit; + // } if (temp <= 0) { // Greedy sampling diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1a5911c56..db5309afe 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -108,7 +108,7 @@ int main(int argc, char ** argv) { if (!gpt_params_parse(argc, argv, params)) { return 1; } - llama_sampling_params & sparams = params.sampling_params; + llama_sampling_params & sparams = params.sparams; #ifndef LOG_DISABLE_LOGS log_set_target(log_filename_generator("main", "log")); @@ -415,8 +415,7 @@ int main(int argc, char ** argv) { } } } - LOG_TEE("sampling: repeat_last_n = %d, repeat_penalty = %f, presence_penalty = %f, frequency_penalty = %f, top_k = %d, tfs_z = %f, top_p = %f, typical_p = %f, temp = %f, mirostat = %d, mirostat_lr = %f, mirostat_ent = %f\n", - sparams.repeat_last_n, sparams.repeat_penalty, sparams.presence_penalty, sparams.frequency_penalty, sparams.top_k, sparams.tfs_z, sparams.top_p, sparams.typical_p, sparams.temp, sparams.mirostat, sparams.mirostat_eta, sparams.mirostat_tau); + LOG_TEE("sampling: \n%s\n", llama_sampling_print(sparams).c_str()); LOG_TEE("generate: n_ctx = %d, n_batch = %d, n_predict = %d, n_keep = %d\n", n_ctx, params.n_batch, params.n_predict, params.n_keep); LOG_TEE("\n\n"); @@ -459,7 +458,7 @@ int main(int argc, char ** argv) { std::vector embd; std::vector embd_guidance; - struct llama_sampling_context * ctx_sampling = llama_sampling_init(params); + struct llama_sampling_context * ctx_sampling = llama_sampling_init(sparams); while ((n_remain != 0 && !is_antiprompt) || params.interactive) { // predict @@ -612,7 +611,7 @@ int main(int argc, char ** argv) { const llama_token id = llama_sampling_sample(ctx_sampling, ctx, ctx_guidance); - llama_sampling_accept(ctx_sampling, ctx, id); + llama_sampling_accept(ctx_sampling, ctx, id, true); LOG("last: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, ctx_sampling->prev).c_str()); @@ -631,12 +630,9 @@ int main(int argc, char ** argv) { while ((int) embd_inp.size() > n_consumed) { embd.push_back(embd_inp[n_consumed]); - // GG: I'm not sure it's a good idea to push the prompt tokens into the sampling context - // Most likely will remove this in the future to avoid exposing "prev" - // Same thing is done in "server". If we stop pushing the prompt tokens, then the repetition - // penalty will be applied only based on the tokens generated by the model. - ctx_sampling->prev.erase(ctx_sampling->prev.begin()); - ctx_sampling->prev.push_back(embd_inp[n_consumed]); + // push the prompt in the sampling context in order to apply repetition penalties later + // for the prompt, we don't apply grammar rules + llama_sampling_accept(ctx_sampling, ctx, embd_inp[n_consumed], false); ++n_consumed; if ((int) embd.size() >= params.n_batch) { @@ -667,12 +663,10 @@ int main(int argc, char ** argv) { // if not currently processing queued inputs; if ((int) embd_inp.size() <= n_consumed) { - // check for reverse prompt + // check for reverse prompt in the last n_prev tokens if (!params.antiprompt.empty()) { - std::string last_output; - for (auto id : ctx_sampling->prev) { - last_output += llama_token_to_piece(ctx, id); - } + const int n_prev = 32; + const std::string last_output = llama_sampling_prev_str(ctx_sampling, ctx, n_prev); is_antiprompt = false; // Check if each of the reverse prompts appears at the end of the output. @@ -699,7 +693,7 @@ int main(int argc, char ** argv) { } // deal with end of text token in interactive mode - if (ctx_sampling->prev.back() == llama_token_eos(ctx)) { + if (llama_sampling_last(ctx_sampling) == llama_token_eos(ctx)) { LOG("found EOS token\n"); if (params.interactive) { diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index 69f9526a4..eb64adef8 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -157,7 +157,7 @@ int main(int argc, char ** argv) { for (size_t i = 0; i < clients.size(); ++i) { auto & client = clients[i]; client.id = i; - client.ctx_sampling = llama_sampling_init(params); + client.ctx_sampling = llama_sampling_init(params.sparams); } std::vector tokens_system; @@ -330,7 +330,7 @@ int main(int argc, char ** argv) { const llama_token id = llama_sampling_sample(client.ctx_sampling, ctx, NULL, client.i_batch - i); - llama_sampling_accept(client.ctx_sampling, ctx, id); + llama_sampling_accept(client.ctx_sampling, ctx, id, true); if (client.n_decoded == 1) { // start measuring generation time after the first token to make sure all concurrent clients diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0471528a3..b5ad3cc99 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -195,10 +195,12 @@ struct llama_server_context json prompt; std::vector embd; + gpt_params params; + llama_model *model = nullptr; llama_context *ctx = nullptr; - gpt_params params; llama_sampling_context *ctx_sampling = nullptr; + int n_ctx; bool truncated = false; @@ -232,7 +234,7 @@ struct llama_server_context void rewind() { params.antiprompt.clear(); - params.grammar.clear(); + params.sparams.grammar.clear(); num_prompt_tokens = 0; num_tokens_predicted = 0; generated_text = ""; @@ -246,11 +248,14 @@ struct llama_server_context multibyte_pending = 0; n_remain = 0; n_past = 0; + params.sparams.n_prev = n_ctx; + } + void initSampling() { if (ctx_sampling != nullptr) { llama_sampling_free(ctx_sampling); } - ctx_sampling = llama_sampling_init(params); + ctx_sampling = llama_sampling_init(params.sparams); } bool loadModel(const gpt_params ¶ms_) @@ -311,16 +316,32 @@ struct llama_server_context return prompt_tokens; } - bool loadGrammar() - { - ctx_sampling = llama_sampling_init(params); - return true; + void truncatePrompt(std::vector &prompt_tokens) { + const int n_left = n_ctx - params.n_keep; + const int n_block_size = n_left / 2; + const int erased_blocks = (prompt_tokens.size() - params.n_keep - n_block_size) / n_block_size; + + // Keep n_keep tokens at start of prompt (at most n_ctx - 4) + std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + params.n_keep); + + new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + params.n_keep + erased_blocks * n_block_size, prompt_tokens.end()); + + LOG_VERBOSE("input truncated", { + {"n_ctx", n_ctx}, + {"n_keep", params.n_keep}, + {"n_left", n_left}, + {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, + {"num_prompt_tokens", new_tokens.size()} + }); + + truncated = true; + prompt_tokens = new_tokens; } void loadInfill() { bool suff_rm_leading_spc = true; - if (params.input_suffix.find_first_of(" ") == 0 && params.input_suffix.size() > 1) { + if (params.input_suffix.find_first_of(' ') == 0 && params.input_suffix.size() > 1) { params.input_suffix.erase(0, 1); suff_rm_leading_spc = false; } @@ -336,6 +357,7 @@ struct llama_server_context prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(ctx)); prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); prefix_tokens.push_back(llama_token_middle(ctx)); + auto prompt_tokens = prefix_tokens; num_prompt_tokens = prompt_tokens.size(); @@ -347,31 +369,18 @@ struct llama_server_context params.n_keep = std::min(params.n_ctx - 4, params.n_keep); // if input prompt is too big, truncate like normal - if (num_prompt_tokens >= (size_t)params.n_ctx) + if (num_prompt_tokens >= (size_t) n_ctx) { - printf("Input prompt is too big, truncating. Can only take %d tokens but got %zu\n", params.n_ctx, num_prompt_tokens); - // todo we probably want to cut from both sides - const int n_left = (params.n_ctx - params.n_keep) / 2; - std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + params.n_keep); - const int erased_blocks = (num_prompt_tokens - params.n_keep - n_left - 1) / n_left; - new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + params.n_keep + erased_blocks * n_left, prompt_tokens.end()); - std::copy(prompt_tokens.end() - params.n_ctx, prompt_tokens.end(), ctx_sampling->prev.begin()); + truncatePrompt(prompt_tokens); + num_prompt_tokens = prompt_tokens.size(); - LOG_VERBOSE("input truncated", { - {"n_ctx", params.n_ctx}, - {"n_keep", params.n_keep}, - {"n_left", n_left}, - {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, - }); - - truncated = true; - prompt_tokens = new_tokens; + GGML_ASSERT(num_prompt_tokens < (size_t)n_ctx); } - else + + // push the prompt into the sampling context (do not apply grammar) + for (auto & token : prompt_tokens) { - const size_t ps = num_prompt_tokens; - std::fill(ctx_sampling->prev.begin(), ctx_sampling->prev.end() - ps, 0); - std::copy(prompt_tokens.begin(), prompt_tokens.end(), ctx_sampling->prev.end() - ps); + llama_sampling_accept(ctx_sampling, ctx, token, false); } // compare the evaluated prompt with the new prompt @@ -409,29 +418,18 @@ struct llama_server_context params.n_keep = std::min(n_ctx - 4, params.n_keep); // if input prompt is too big, truncate like normal - if (num_prompt_tokens >= (size_t)n_ctx) + if (num_prompt_tokens >= (size_t) n_ctx) { - const int n_left = (n_ctx - params.n_keep) / 2; - std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + params.n_keep); - const int erased_blocks = (num_prompt_tokens - params.n_keep - n_left - 1) / n_left; - new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + params.n_keep + erased_blocks * n_left, prompt_tokens.end()); - std::copy(prompt_tokens.end() - n_ctx, prompt_tokens.end(), ctx_sampling->prev.begin()); + truncatePrompt(prompt_tokens); + num_prompt_tokens = prompt_tokens.size(); - LOG_VERBOSE("input truncated", { - {"n_ctx", n_ctx}, - {"n_keep", params.n_keep}, - {"n_left", n_left}, - {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, - }); - - truncated = true; - prompt_tokens = new_tokens; + GGML_ASSERT(num_prompt_tokens < (size_t)n_ctx); } - else + + // push the prompt into the sampling context (do not apply grammar) + for (auto & token : prompt_tokens) { - const size_t ps = num_prompt_tokens; - std::fill(ctx_sampling->prev.begin(), ctx_sampling->prev.end() - ps, 0); - std::copy(prompt_tokens.begin(), prompt_tokens.end(), ctx_sampling->prev.end() - ps); + llama_sampling_accept(ctx_sampling, ctx, token, false); } // compare the evaluated prompt with the new prompt @@ -530,8 +528,8 @@ struct llama_server_context llama_token_data_array cur_p = { ctx_sampling->cur.data(), ctx_sampling->cur.size(), false }; - const int32_t n_probs = params.sampling_params.n_probs; - if (params.sampling_params.temp <= 0 && n_probs > 0) + const int32_t n_probs = params.sparams.n_probs; + if (params.sparams.temp <= 0 && n_probs > 0) { // For llama_sample_token_greedy we need to sort candidates llama_sample_softmax(ctx, &cur_p); @@ -542,7 +540,7 @@ struct llama_server_context result.probs.push_back({cur_p.data[i].id, cur_p.data[i].p}); } - llama_sampling_accept(ctx_sampling, ctx, result.tok); + llama_sampling_accept(ctx_sampling, ctx, result.tok, true); if (tg) { num_tokens_predicted++; @@ -606,7 +604,7 @@ struct llama_server_context const std::string token_text = token_with_probs.tok == -1 ? "" : llama_token_to_piece(ctx, token_with_probs.tok); generated_text += token_text; - if (params.sampling_params.n_probs > 0) + if (params.sparams.n_probs > 0) { generated_token_probs.push_back(token_with_probs); } @@ -1004,36 +1002,36 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, static json format_generation_settings(llama_server_context &llama) { - const auto & sparams = llama.params.sampling_params; + const auto & sparams = llama.params.sparams; const auto eos_bias = sparams.logit_bias.find(llama_token_eos(llama.ctx)); const bool ignore_eos = eos_bias != sparams.logit_bias.end() && eos_bias->second < 0.0f && std::isinf(eos_bias->second); return json{ - {"n_ctx", llama.n_ctx}, - {"model", llama.params.model_alias}, - {"seed", llama.params.seed}, - {"temp", sparams.temp}, - {"top_k", sparams.top_k}, - {"top_p", sparams.top_p}, - {"tfs_z", sparams.tfs_z}, - {"typical_p", sparams.typical_p}, - {"repeat_last_n", sparams.repeat_last_n}, - {"repeat_penalty", sparams.repeat_penalty}, - {"presence_penalty", sparams.presence_penalty}, - {"frequency_penalty", sparams.frequency_penalty}, - {"mirostat", sparams.mirostat}, - {"mirostat_tau", sparams.mirostat_tau}, - {"mirostat_eta", sparams.mirostat_eta}, - {"penalize_nl", sparams.penalize_nl}, - {"stop", llama.params.antiprompt}, - {"n_predict", llama.params.n_predict}, - {"n_keep", llama.params.n_keep}, - {"ignore_eos", ignore_eos}, - {"stream", llama.stream}, - {"logit_bias", sparams.logit_bias}, - {"n_probs", sparams.n_probs}, - {"grammar", llama.params.grammar}, + {"n_ctx", llama.n_ctx}, + {"model", llama.params.model_alias}, + {"seed", llama.params.seed}, + {"temp", sparams.temp}, + {"top_k", sparams.top_k}, + {"top_p", sparams.top_p}, + {"tfs_z", sparams.tfs_z}, + {"typical_p", sparams.typical_p}, + {"repeat_last_n", sparams.penalty_last_n}, + {"repeat_penalty", sparams.penalty_repeat}, + {"frequency_penalty", sparams.penalty_freq}, + {"presence_penalty", sparams.penalty_present}, + {"mirostat", sparams.mirostat}, + {"mirostat_tau", sparams.mirostat_tau}, + {"mirostat_eta", sparams.mirostat_eta}, + {"penalize_nl", sparams.penalize_nl}, + {"stop", llama.params.antiprompt}, + {"n_predict", llama.params.n_predict}, + {"n_keep", llama.params.n_keep}, + {"ignore_eos", ignore_eos}, + {"stream", llama.stream}, + {"logit_bias", sparams.logit_bias}, + {"n_probs", sparams.n_probs}, + {"grammar", llama.params.sparams.grammar}, }; } @@ -1081,7 +1079,7 @@ static json format_final_response(llama_server_context &llama, const std::string {"timings", format_timings(llama)}, }; - if (llama.params.sampling_params.n_probs > 0) + if (llama.params.sparams.n_probs > 0) { res["completion_probabilities"] = probs_vector_to_json(llama.ctx, probs); } @@ -1097,7 +1095,7 @@ static json format_partial_response( {"stop", false}, }; - if (llama.params.sampling_params.n_probs > 0) + if (llama.params.sparams.n_probs > 0) { res["completion_probabilities"] = probs_vector_to_json(llama.ctx, probs); } @@ -1129,28 +1127,30 @@ static T json_value(const json &body, const std::string &key, const T &default_v static void parse_options_completion(const json &body, llama_server_context &llama) { gpt_params default_params; - const auto & default_sparams = default_params.sampling_params; - auto & sparams = llama.params.sampling_params; + const auto & default_sparams = default_params.sparams; - llama.stream = json_value(body, "stream", false); - llama.params.n_predict = json_value(body, "n_predict", default_params.n_predict); - sparams.top_k = json_value(body, "top_k", default_sparams.top_k); - sparams.top_p = json_value(body, "top_p", default_sparams.top_p); - sparams.tfs_z = json_value(body, "tfs_z", default_sparams.tfs_z); - sparams.typical_p = json_value(body, "typical_p", default_sparams.typical_p); - sparams.repeat_last_n = json_value(body, "repeat_last_n", default_sparams.repeat_last_n); - sparams.temp = json_value(body, "temperature", default_sparams.temp); - sparams.repeat_penalty = json_value(body, "repeat_penalty", default_sparams.repeat_penalty); - sparams.presence_penalty = json_value(body, "presence_penalty", default_sparams.presence_penalty); - sparams.frequency_penalty = json_value(body, "frequency_penalty", default_sparams.frequency_penalty); - sparams.mirostat = json_value(body, "mirostat", default_sparams.mirostat); - sparams.mirostat_tau = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); - sparams.mirostat_eta = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); - sparams.penalize_nl = json_value(body, "penalize_nl", default_sparams.penalize_nl); - llama.params.n_keep = json_value(body, "n_keep", default_params.n_keep); - llama.params.seed = json_value(body, "seed", default_params.seed); - llama.params.grammar = json_value(body, "grammar", default_params.grammar); - sparams.n_probs = json_value(body, "n_probs", default_sparams.n_probs); + auto & params = llama.params; + auto & sparams = llama.params.sparams; + + llama.stream = json_value(body, "stream", false); + params.n_predict = json_value(body, "n_predict", default_params.n_predict); + sparams.top_k = json_value(body, "top_k", default_sparams.top_k); + sparams.top_p = json_value(body, "top_p", default_sparams.top_p); + sparams.tfs_z = json_value(body, "tfs_z", default_sparams.tfs_z); + sparams.typical_p = json_value(body, "typical_p", default_sparams.typical_p); + sparams.temp = json_value(body, "temperature", default_sparams.temp); + sparams.penalty_last_n = json_value(body, "repeat_last_n", default_sparams.penalty_last_n); + sparams.penalty_repeat = json_value(body, "repeat_penalty", default_sparams.penalty_repeat); + sparams.penalty_freq = json_value(body, "frequency_penalty", default_sparams.penalty_freq); + sparams.penalty_present = json_value(body, "presence_penalty", default_sparams.penalty_present); + sparams.mirostat = json_value(body, "mirostat", default_sparams.mirostat); + sparams.mirostat_tau = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); + sparams.mirostat_eta = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); + sparams.penalize_nl = json_value(body, "penalize_nl", default_sparams.penalize_nl); + params.n_keep = json_value(body, "n_keep", default_params.n_keep); + params.seed = json_value(body, "seed", default_params.seed); + sparams.grammar = json_value(body, "grammar", default_sparams.grammar); + sparams.n_probs = json_value(body, "n_probs", default_sparams.n_probs); if (body.count("prompt") != 0) { @@ -1204,8 +1204,6 @@ static void parse_options_completion(const json &body, llama_server_context &lla } } - llama.ctx_sampling = llama_sampling_init(llama.params); - LOG_VERBOSE("completion parameters parsed", format_generation_settings(llama)); } @@ -1374,15 +1372,9 @@ int main(int argc, char **argv) llama.rewind(); llama_reset_timings(llama.ctx); - parse_options_completion(json::parse(req.body), llama); - if (!llama.loadGrammar()) - { - res.status = 400; - return; - } - + llama.initSampling(); llama.loadPrompt(); llama.beginCompletion(); @@ -1414,7 +1406,7 @@ int main(int argc, char **argv) } auto probs = llama.generated_token_probs; - if (llama.params.sampling_params.n_probs > 0 && llama.stopped_word) { + if (llama.params.sparams.n_probs > 0 && llama.stopped_word) { const std::vector stop_word_toks = llama_tokenize(llama.ctx, llama.stopping_word, false); probs = std::vector(llama.generated_token_probs.begin(), llama.generated_token_probs.end() - stop_word_toks.size()); } @@ -1466,7 +1458,7 @@ int main(int argc, char **argv) std::vector probs_output = {}; - if (llama.params.sampling_params.n_probs > 0) { + if (llama.params.sparams.n_probs > 0) { const std::vector to_send_toks = llama_tokenize(llama.ctx, to_send, false); size_t probs_pos = std::min(sent_token_probs_index, llama.generated_token_probs.size()); size_t probs_stop_pos = std::min(sent_token_probs_index + to_send_toks.size(), llama.generated_token_probs.size()); @@ -1537,14 +1529,9 @@ int main(int argc, char **argv) llama.rewind(); llama_reset_timings(llama.ctx); - parse_options_infill(json::parse(req.body), llama); - if (!llama.loadGrammar()) - { - res.status = 400; - return; - } + llama.initSampling(); llama.loadInfill(); llama.beginCompletion(); const auto chunked_content_provider = [&](size_t, DataSink & sink) { @@ -1587,7 +1574,7 @@ int main(int argc, char **argv) std::vector probs_output = {}; - if (llama.params.sampling_params.n_probs > 0) { + if (llama.params.sparams.n_probs > 0) { const std::vector to_send_toks = llama_tokenize(llama.ctx, to_send, false); size_t probs_pos = std::min(sent_token_probs_index, llama.generated_token_probs.size()); size_t probs_stop_pos = std::min(sent_token_probs_index + to_send_toks.size(), llama.generated_token_probs.size()); @@ -1694,7 +1681,9 @@ int main(int argc, char **argv) const json body = json::parse(req.body); llama.rewind(); + llama_reset_timings(llama.ctx); + if (body.count("content") != 0) { llama.prompt = body["content"]; @@ -1704,6 +1693,8 @@ int main(int argc, char **argv) llama.prompt = ""; } llama.params.n_predict = 0; + + llama.initSampling(); llama.loadPrompt(); llama.beginCompletion(); llama.doCompletion(); diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 24f49012a..894321ce9 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -112,16 +112,16 @@ int main(int argc, char ** argv) { bool has_eos = false; // target model sampling context - struct llama_sampling_context * ctx_sampling = llama_sampling_init(params); + struct llama_sampling_context * ctx_sampling = llama_sampling_init(params.sparams); // draft sequence data std::vector drafts(n_seq_dft); - params.grammar.clear(); // the draft samplers will copy the target sampler's grammar - params.sampling_params.temp = std::max(0.01f, params.sampling_params.temp); + params.sparams.grammar.clear(); // the draft samplers will copy the target sampler's grammar + params.sparams.temp = std::max(0.01f, params.sparams.temp); for (int s = 0; s < n_seq_dft; ++s) { - drafts[s].ctx_sampling = llama_sampling_init(params); + drafts[s].ctx_sampling = llama_sampling_init(params.sparams); } llama_batch batch_dft = llama_batch_init(params.n_ctx, 0, 1); @@ -154,7 +154,7 @@ int main(int argc, char ** argv) { // sample from the target model llama_token id = llama_sampling_sample(ctx_sampling, ctx_tgt, NULL, drafts[s_keep].i_batch_tgt[i_dft]); - llama_sampling_accept(ctx_sampling, ctx_tgt, id); + llama_sampling_accept(ctx_sampling, ctx_tgt, id, true); //LOG("last: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx_tgt, ctx_sampling->prev).c_str()); @@ -328,7 +328,7 @@ int main(int argc, char ** argv) { const int s = sa[is]; - llama_sampling_accept(drafts[s].ctx_sampling, ctx_dft, id); + llama_sampling_accept(drafts[s].ctx_sampling, ctx_dft, id, true); drafts[s].tokens.push_back(id); diff --git a/llama.cpp b/llama.cpp index ec8ffad33..365349335 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1018,8 +1018,8 @@ enum e_model { }; static const size_t kB = 1024; -static const size_t MB = kB*kB; -static const size_t GB = kB*kB*kB; +static const size_t MB = 1024*kB; +static const size_t GB = 1024*MB; struct llama_hparams { bool vocab_only; @@ -1042,21 +1042,21 @@ struct llama_hparams { float f_max_alibi_bias; bool operator!=(const llama_hparams & other) const { - if (this->vocab_only != other.vocab_only) return true; - if (this->n_vocab != other.n_vocab) return true; + if (this->vocab_only != other.vocab_only) return true; + if (this->n_vocab != other.n_vocab) return true; if (this->n_ctx_train != other.n_ctx_train) return true; - if (this->n_embd != other.n_embd) return true; - if (this->n_head != other.n_head) return true; - if (this->n_head_kv != other.n_head_kv) return true; - if (this->n_layer != other.n_layer) return true; - if (this->n_rot != other.n_rot) return true; - if (this->n_ff != other.n_ff) return true; + if (this->n_embd != other.n_embd) return true; + if (this->n_head != other.n_head) return true; + if (this->n_head_kv != other.n_head_kv) return true; + if (this->n_layer != other.n_layer) return true; + if (this->n_rot != other.n_rot) return true; + if (this->n_ff != other.n_ff) return true; const float EPSILON = 1e-9; - if (!is_float_close(this->f_norm_eps, other.f_norm_eps, EPSILON)) return true; - if (!is_float_close(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return true; - if (!is_float_close(this->rope_freq_base_train, other.rope_freq_base_train, EPSILON)) return true; + if (!is_float_close(this->f_norm_eps, other.f_norm_eps, EPSILON)) return true; + if (!is_float_close(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return true; + if (!is_float_close(this->rope_freq_base_train, other.rope_freq_base_train, EPSILON)) return true; if (!is_float_close(this->rope_freq_scale_train, other.rope_freq_scale_train, EPSILON)) return true; return false; @@ -1195,11 +1195,11 @@ struct llama_vocab { id special_sep_id = -1; id special_pad_id = -1; - id linefeed_id = 13; + id linefeed_id = 13; id special_prefix_id = 32007; id special_middle_id = 32009; id special_suffix_id = 32008; - id special_eot_id = 32010; + id special_eot_id = 32010; int find_bpe_rank(std::string token_left, std::string token_right) const { replace_all(token_left, " ", "\u0120"); @@ -1359,10 +1359,7 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - // TODO: this should be: - // cache.buf.resize(2u*n_elements*ggml_type_size(wtype) + 2u*ggml_tensor_overhead()); - // change it and test that it works - cache.buf.resize(2u*n_elements*ggml_type_size(wtype) + 2u*MB); + cache.buf.resize(2u*n_elements*ggml_type_size(wtype) + 2u*ggml_tensor_overhead()); memset(cache.buf.data, 0, cache.buf.size); struct ggml_init_params params; @@ -7417,37 +7414,15 @@ void llama_sample_temperature(struct llama_context * ctx, llama_token_data_array llama_sample_temp(ctx, candidates_p, temp); } -void llama_sample_repetition_penalty(struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens, size_t last_tokens_size, float penalty) { - if (last_tokens_size == 0 || penalty == 1.0f) { - return; - } - - const int64_t t_start_sample_us = ggml_time_us(); - - for (size_t i = 0; i < candidates->size; ++i) { - const auto * token_iter = std::find(last_tokens, last_tokens + last_tokens_size, candidates->data[i].id); - if (token_iter == last_tokens + last_tokens_size) { - continue; - } - - // The academic publication that described this technique actually just only divided, but that would cause tokens with negative logits to become more likely, which is obviously wrong. - // This is common fix for this problem, which is to multiply by the penalty instead of dividing. - if (candidates->data[i].logit <= 0) { - candidates->data[i].logit *= penalty; - } else { - candidates->data[i].logit /= penalty; - } - } - - candidates->sorted = false; - - if (ctx) { - ctx->t_sample_us += ggml_time_us() - t_start_sample_us; - } -} - -void llama_sample_frequency_and_presence_penalties(struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens_p, size_t last_tokens_size, float alpha_frequency, float alpha_presence) { - if (last_tokens_size == 0 || (alpha_frequency == 0.0f && alpha_presence == 0.0f)) { +void llama_sample_repetition_penalties( + struct llama_context * ctx, + llama_token_data_array * candidates, + const llama_token * last_tokens, + size_t penalty_last_n, + float penalty_repeat, + float penalty_freq, + float penalty_present) { + if (penalty_last_n == 0 || (penalty_repeat == 1.0f && penalty_freq == 0.0f && penalty_present == 0.0f)) { return; } @@ -7455,19 +7430,28 @@ void llama_sample_frequency_and_presence_penalties(struct llama_context * ctx, l // Create a frequency map to count occurrences of each token in last_tokens std::unordered_map token_count; - for (size_t i = 0; i < last_tokens_size; ++i) { - token_count[last_tokens_p[i]]++; + for (size_t i = 0; i < penalty_last_n; ++i) { + token_count[last_tokens[i]]++; } // Apply frequency and presence penalties to the candidates for (size_t i = 0; i < candidates->size; ++i) { - auto token_iter = token_count.find(candidates->data[i].id); + const auto token_iter = token_count.find(candidates->data[i].id); if (token_iter == token_count.end()) { continue; } - int count = token_iter->second; - candidates->data[i].logit -= float(count) * alpha_frequency + float(count > 0) * alpha_presence; + const int count = token_iter->second; + + // The academic publication that described this technique actually just only divided, but that would cause tokens with negative logits to become more likely, which is obviously wrong. + // This is common fix for this problem, which is to multiply by the penalty instead of dividing. + if (candidates->data[i].logit <= 0) { + candidates->data[i].logit *= penalty_repeat; + } else { + candidates->data[i].logit /= penalty_repeat; + } + + candidates->data[i].logit -= float(count) * penalty_freq + float(count > 0) * penalty_present; } candidates->sorted = false; diff --git a/llama.h b/llama.h index 51010e037..306f5b383 100644 --- a/llama.h +++ b/llama.h @@ -560,21 +560,15 @@ extern "C" { LLAMA_API void llama_set_rng_seed(struct llama_context * ctx, uint32_t seed); /// @details Repetition penalty described in CTRL academic paper https://arxiv.org/abs/1909.05858, with negative logit fix. - LLAMA_API void llama_sample_repetition_penalty( - struct llama_context * ctx, - llama_token_data_array * candidates, - const llama_token * last_tokens, - size_t last_tokens_size, - float penalty); - /// @details Frequency and presence penalties described in OpenAI API https://platform.openai.com/docs/api-reference/parameter-details. - LLAMA_API void llama_sample_frequency_and_presence_penalties( + LLAMA_API void llama_sample_repetition_penalties( struct llama_context * ctx, llama_token_data_array * candidates, const llama_token * last_tokens, - size_t last_tokens_size, - float alpha_frequency, - float alpha_presence); + size_t penalty_last_n, + float penalty_repeat, + float penalty_freq, + float penalty_present); /// @details Apply classifier-free guidance to the logits as described in academic paper "Stay on topic with Classifier-Free Guidance" https://arxiv.org/abs/2306.17806 /// @param candidates A vector of `llama_token_data` containing the candidate tokens, the logits must be directly extracted from the original generation context without being sorted. diff --git a/tests/test-sampling.cpp b/tests/test-sampling.cpp index 019c0d462..32e58941c 100644 --- a/tests/test-sampling.cpp +++ b/tests/test-sampling.cpp @@ -8,11 +8,9 @@ #include #include #include -#include #include #include - static void dump(const llama_token_data_array * candidates) { for (size_t i = 0; i < candidates->size; i++) { printf("%d: %f (%f)\n", candidates->data[i].id, candidates->data[i].p, candidates->data[i].logit); @@ -21,7 +19,6 @@ static void dump(const llama_token_data_array * candidates) { #define DUMP(__candidates) do { printf("%s:%d (%s)\n", __FILE__, __LINE__, __func__); dump((__candidates)); printf("-\n"); } while(0) - static void test_top_k(const std::vector & probs, const std::vector & expected_probs, int k) { size_t n_vocab = probs.size(); std::vector candidates; @@ -37,13 +34,12 @@ static void test_top_k(const std::vector & probs, const std::vector & probs, const std::vector & expected_probs, float p) { size_t n_vocab = probs.size(); std::vector candidates; @@ -59,13 +55,12 @@ static void test_top_p(const std::vector & probs, const std::vector & probs, const std::vector & expected_probs, float z) { size_t n_vocab = probs.size(); std::vector candidates; @@ -80,13 +75,12 @@ static void test_tfs(const std::vector & probs, const std::vector llama_sample_tail_free(nullptr, &candidates_p, z, 1); DUMP(&candidates_p); - assert(candidates_p.size == expected_probs.size()); + GGML_ASSERT(candidates_p.size == expected_probs.size()); for (size_t i = 0; i < candidates_p.size; i++) { - assert(fabs(candidates_p.data[i].p - expected_probs[i]) < 1e-3); + GGML_ASSERT(fabs(candidates_p.data[i].p - expected_probs[i]) < 1e-3); } } - static void test_typical(const std::vector & probs, const std::vector & expected_probs, float p) { size_t n_vocab = probs.size(); std::vector candidates; @@ -101,18 +95,17 @@ static void test_typical(const std::vector & probs, const std::vector & probs, const std::vector & last_tokens, - const std::vector & expected_probs, float penalty + const std::vector & expected_probs, float repeat_penalty, float alpha_frequency, float alpha_presence ) { - assert(probs.size() == expected_probs.size()); + GGML_ASSERT(probs.size() == expected_probs.size()); size_t n_vocab = probs.size(); std::vector candidates; @@ -125,41 +118,13 @@ static void test_repetition_penalty( llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; llama_sample_softmax(nullptr, &candidates_p); DUMP(&candidates_p); - llama_sample_repetition_penalty(nullptr, &candidates_p, (const llama_token *) last_tokens.data(), last_tokens.size(), penalty); + llama_sample_repetition_penalties(nullptr, &candidates_p, (const llama_token *) last_tokens.data(), last_tokens.size(), repeat_penalty, alpha_frequency, alpha_presence); llama_sample_softmax(nullptr, &candidates_p); DUMP(&candidates_p); - assert(candidates_p.size == expected_probs.size()); + GGML_ASSERT(candidates_p.size == expected_probs.size()); for (size_t i = 0; i < candidates_p.size; i++) { - assert(fabs(candidates_p.data[i].p - expected_probs[i]) < 1e-6); - } -} - - -static void test_frequency_presence_penalty( - const std::vector & probs, const std::vector & last_tokens, - const std::vector & expected_probs, float alpha_frequency, float alpha_presence -) { - assert(probs.size() == expected_probs.size()); - - size_t n_vocab = probs.size(); - std::vector candidates; - candidates.reserve(n_vocab); - for (llama_token token_id = 0; token_id < (llama_token)n_vocab; token_id++) { - float logit = log(probs[token_id]); - candidates.emplace_back(llama_token_data{token_id, logit, 0.0f}); - } - - llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; - llama_sample_softmax(nullptr, &candidates_p); - // DUMP(&candidates_p); - llama_sample_frequency_and_presence_penalties(nullptr, &candidates_p, (const llama_token *) last_tokens.data(), last_tokens.size(), alpha_frequency, alpha_presence); - llama_sample_softmax(nullptr, &candidates_p); - // DUMP(&candidates_p); - - assert(candidates_p.size == expected_probs.size()); - for (size_t i = 0; i < candidates_p.size; i++) { - assert(fabs(candidates_p.data[i].p - expected_probs[i]) < 1e-3); + GGML_ASSERT(fabs(candidates_p.data[i].p - expected_probs[i]) < 1e-3); } } @@ -181,13 +146,13 @@ int main(void) { test_typical({0.97f, 0.01f, 0.01f, 0.01f}, {0.97f}, 0.5f); test_typical({0.4f, 0.2f, 0.2f, 0.2f}, {0.2f, 0.2f, 0.2f}, 0.5f); - test_repetition_penalty({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0}, {0.25f, 0.25f, 0.25f, 0.25f, 0}, 50.0f); - test_repetition_penalty({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2}, {0.5f, 0.5f, 0, 0, 0}, 50.0f); - test_repetition_penalty({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2, 0, 0}, {0.5f, 0.5f, 0, 0, 0}, 50.0f); + test_repetition_penalties({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0}, {0.25f, 0.25f, 0.25f, 0.25f, 0}, 50.0f, 0.0f, 0.0f); + test_repetition_penalties({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2}, {0.5f, 0.5f, 0, 0, 0}, 50.0f, 0.0f, 0.0f); + test_repetition_penalties({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2, 0, 0}, {0.5f, 0.5f, 0, 0, 0}, 50.0f, 0.0f, 0.0f); - test_frequency_presence_penalty({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0}, {0.249997f, 0.249997f, 0.249997f, 0.249997f, 0.000011f}, 5.0f, 5.0f); - test_frequency_presence_penalty({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2}, {0.499966f, 0.499966f, 0.000023f, 0.000023f, 0.000023f}, 5.0f, 5.0f); - test_frequency_presence_penalty({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2, 0, 0}, {0.499977f, 0.499977f, 0.000023f, 0.000023f, 0.000000f}, 5.0f, 5.0f); + test_repetition_penalties({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0}, {0.249997f, 0.249997f, 0.249997f, 0.249997f, 0.000011f}, 1.0f, 5.0f, 5.0f); + test_repetition_penalties({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2}, {0.499966f, 0.499966f, 0.000023f, 0.000023f, 0.000023f}, 1.0f, 5.0f, 5.0f); + test_repetition_penalties({0.2f, 0.2f, 0.2f, 0.2f, 0.2f}, {0, 1, 2, 0, 0}, {0.499977f, 0.499977f, 0.000023f, 0.000023f, 0.000000f}, 1.0f, 5.0f, 5.0f); printf("OK\n"); From 465219b9143ac01db0990bbcb0a081ef72ec2008 Mon Sep 17 00:00:00 2001 From: shibe2 Date: Thu, 12 Oct 2023 16:01:23 +0400 Subject: [PATCH 007/859] CLBlast: Add outer loops over src0 for broadcasting in mulmat Reduce repeated dequantization of the same data. --- ggml-opencl.cpp | 332 +++++++++++++++++++++++------------------------- 1 file changed, 162 insertions(+), 170 deletions(-) diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index 67ac20eac..202bcb485 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -1489,46 +1489,45 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr cl_mem d_D = ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); size_t x_offset = 0; - int64_t pi02 = -1; - int64_t pi03 = -1; - for (int64_t i13 = 0; i13 < ne13; i13++) { - int64_t i03 = i13 / r3; + for (int64_t i03 = 0; i03 < ne03; i03++) { + // TODO: copy src0 here when r3>1 + for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + if (src0->backend == GGML_BACKEND_GPU) { + x_offset = (i03 * ne02 + i02) * x_ne; + } else { + // copy src0 to device + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_X, 0, src0, i03, i02, NULL)); + } - for (int64_t i12 = 0; i12 < ne12; i12++) { - int64_t i02 = i12 / r2; + for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { + // copy src1 to device + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); - // copy data to device - if (src0->backend == GGML_BACKEND_GPU) { - x_offset = (i03 * ne02 + i02) * x_ne; - } else if (i02 != pi02 || i03 != pi03) { - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_X, 0, src0, i03, i02, NULL)); - pi02 = i02; - pi03 = i03; + CL_CHECK(clFinish(queue)); + + // compute + cl_event ev_sgemm; + clblast::StatusCode status = clblast::Gemm(clblast::Layout::kColMajor, + clblast::Transpose::kYes, clblast::Transpose::kNo, + ne01, ne11, ne10, + alpha, + d_X, x_offset, ne00, + d_Y, 0, ne10, + beta, + d_D, 0, ne01, + &queue, &ev_sgemm); + + if (status != clblast::StatusCode::kSuccess) { + GGML_ASSERT(false); + } + + // copy dst to host + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); + } } - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); - - CL_CHECK(clFinish(queue)); - - // compute - cl_event ev_sgemm; - clblast::StatusCode status = clblast::Gemm(clblast::Layout::kColMajor, - clblast::Transpose::kYes, clblast::Transpose::kNo, - ne01, ne11, ne10, - alpha, - d_X, x_offset, ne00, - d_Y, 0, ne10, - beta, - d_D, 0, ne01, - &queue, &ev_sgemm); - - if (status != clblast::StatusCode::kSuccess) { - GGML_ASSERT(false); - } - - // copy dst to host - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); } } @@ -1589,73 +1588,70 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr bool src1_cont_cols = (size_t)nb11 == ne11*sizeof(float); size_t x_offset = 0; - int64_t pi02 = -1; - int64_t pi03 = -1; - for (int64_t i13 = 0; i13 < ne13; i13++) { - int64_t i03 = i13 / r3; - - for (int64_t i12 = 0; i12 < ne12; i12++) { - int64_t i02 = i12 / r2; - - // copy src0 to device - if (src0->backend == GGML_BACKEND_GPU) { - x_offset = (i03 * ne02 + i02) * x_ne; - } else if (i02 != pi02 || i03 != pi03) { - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_X, 0, src0, i03, i02, NULL)); - pi02 = i02; - pi03 = i03; - } - - // convert src1 to fp16 - // TODO: use multiple threads - char * src1i = (char *) src1->data + i13*nb13 + i12*nb12; - if (src1_cont_rows) { - if (src1_cont_cols) { - ggml_fp32_to_fp16_row((float *) src1i, tmp, ne10*ne11); + for (int64_t i03 = 0; i03 < ne03; i03++) { + // TODO: copy src0 here when r3>1 + for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + if (src0->backend == GGML_BACKEND_GPU) { + x_offset = (i03 * ne02 + i02) * x_ne; + } else { + // copy src0 to device + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_X, 0, src0, i03, i02, NULL)); } - else { - for (int64_t i11 = 0; i11 < ne11; i11++) { - ggml_fp32_to_fp16_row((float *) (src1i + i11*nb11), tmp + i11*ne10, ne10); + + for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { + // convert src1 to fp16 + // TODO: use multiple threads + char * src1i = (char *) src1->data + i13*nb13 + i12*nb12; + if (src1_cont_rows) { + if (src1_cont_cols) { + ggml_fp32_to_fp16_row((float *) src1i, tmp, ne10*ne11); + } + else { + for (int64_t i11 = 0; i11 < ne11; i11++) { + ggml_fp32_to_fp16_row((float *) (src1i + i11*nb11), tmp + i11*ne10, ne10); + } + } } - } - } - else { - for (int64_t i11 = 0; i11 < ne11; i11++) { - for (int64_t i10 = 0; i10 < ne10; i10++) { - // very slow due to no inlining - tmp[i11*ne10 + i10] = ggml_fp32_to_fp16(*(float *) (src1i + i11*nb11 + i10*nb10)); + else { + for (int64_t i11 = 0; i11 < ne11; i11++) { + for (int64_t i10 = 0; i10 < ne10; i10++) { + // very slow due to no inlining + tmp[i11*ne10 + i10] = ggml_fp32_to_fp16(*(float *) (src1i + i11*nb11 + i10*nb10)); + } + } } + + // copy src1 to device + CL_CHECK(clEnqueueWriteBuffer(queue, d_Y, false, 0, sizeof(ggml_fp16_t) * y_ne, tmp, 0, NULL, NULL)); + + CL_CHECK(clFinish(queue)); + + // compute + cl_event ev_sgemm; + clblast::StatusCode status = clblast::Gemm(clblast::Layout::kColMajor, + clblast::Transpose::kYes, clblast::Transpose::kNo, + ne01, ne11, ne10, + alpha, + d_X, x_offset, ne00, + d_Y, 0, ne10, + beta, + d_D, 0, ne01, + &queue, &ev_sgemm); + + if (status != clblast::StatusCode::kSuccess) { + GGML_ASSERT(false); + } + + // copy dst to host, then convert to float + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); + + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + + ggml_fp16_to_fp32_row(tmp, d, d_ne); } } - - // copy src1 to device - CL_CHECK(clEnqueueWriteBuffer(queue, d_Y, false, 0, sizeof(ggml_fp16_t) * y_ne, tmp, 0, NULL, NULL)); - - CL_CHECK(clFinish(queue)); - - // compute - cl_event ev_sgemm; - clblast::StatusCode status = clblast::Gemm(clblast::Layout::kColMajor, - clblast::Transpose::kYes, clblast::Transpose::kNo, - ne01, ne11, ne10, - alpha, - d_X, x_offset, ne00, - d_Y, 0, ne10, - beta, - d_D, 0, ne01, - &queue, &ev_sgemm); - - if (status != clblast::StatusCode::kSuccess) { - GGML_ASSERT(false); - } - - // copy dst to host, then convert to float - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); - - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - - ggml_fp16_to_fp32_row(tmp, d, d_ne); } } @@ -1718,85 +1714,81 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * size_t ev_idx = 0; std::vector events; - int64_t pi02 = -1; - int64_t pi03 = -1; - - for (int64_t i13 = 0; i13 < ne13; i13++) { - int64_t i03 = i13 / r3; - - for (int64_t i12 = 0; i12 < ne12; i12++) { - int64_t i02 = i12 / r2; - - // copy src0 to device if necessary - if (src0->backend == GGML_BACKEND_CPU) { - if (i02 != pi02 || i03 != pi03) { + for (int64_t i03 = 0; i03 < ne03; i03++) { + // TODO: copy and dequantize src0 here when r3>1 + for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + // copy src0 to device if necessary + if (src0->backend == GGML_BACKEND_CPU) { events.emplace_back(); CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Q, 0, src0, i03, i02, events.data() + ev_idx++)); - pi02 = i02; - pi03 = i03; - } - } else if (src0->backend == GGML_BACKEND_GPU) { - d_Q = (cl_mem) src0->extra; - } else { - GGML_ASSERT(false); - } - if (mul_mat_vec) { // specialized dequantize_mul_mat_vec kernel - // copy src1 to device - events.emplace_back(); - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, events.data() + ev_idx++)); - - // compute - const size_t global = ne01 * local; - const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; - const cl_int ncols = ne00; - events.emplace_back(); - CL_CHECK(clSetKernelArg(*dmmv, 0, sizeof(cl_mem), &d_Q)); - CL_CHECK(clSetKernelArg(*dmmv, 1, sizeof(float) * local, NULL)); - CL_CHECK(clSetKernelArg(*dmmv, 2, sizeof(cl_mem), &d_Y)); - CL_CHECK(clSetKernelArg(*dmmv, 3, sizeof(cl_mem), &d_D)); - CL_CHECK(clSetKernelArg(*dmmv, 4, sizeof(cl_int), &ncols)); - CL_CHECK(clEnqueueNDRangeKernel(queue, *dmmv, 1, &offset, &global, &local, events.size() - 1, events.data(), events.data() + ev_idx++)); - } else { // general dequantization kernel + CLBlast matrix matrix multiplication - // convert src0 to fp32 on device - const size_t global = x_ne / global_denom; - const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; - CL_CHECK(clSetKernelArg(*to_fp32_cl, 0, sizeof(cl_mem), &d_Q)); - CL_CHECK(clSetKernelArg(*to_fp32_cl, 1, sizeof(cl_mem), &d_X)); - CL_CHECK(clEnqueueNDRangeKernel(queue, *to_fp32_cl, 1, offset > 0 ? &offset : NULL, &global, local > 0 ? &local : NULL, events.size(), !events.empty() ? events.data() : NULL, NULL)); - - // copy src1 to device - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); - - events.emplace_back(); - - // wait for conversion - CL_CHECK(clFinish(queue)); - - // compute - clblast::StatusCode status = clblast::Gemm(clblast::Layout::kColMajor, - clblast::Transpose::kYes, clblast::Transpose::kNo, - ne01, ne11, ne10, - alpha, - d_X, 0, ne00, - d_Y, 0, ne10, - beta, - d_D, 0, ne01, - &queue, events.data() + ev_idx++); - - if (status != clblast::StatusCode::kSuccess) { + } else if (src0->backend == GGML_BACKEND_GPU) { + d_Q = (cl_mem) src0->extra; + } else { GGML_ASSERT(false); } - } - // copy dst to host - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &events[events.size() - 1], NULL)); - for (auto *event : events) { - clReleaseEvent(event); - } + if (!mul_mat_vec) { + // convert src0 to fp32 on device + const size_t global = x_ne / global_denom; + const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; + CL_CHECK(clSetKernelArg(*to_fp32_cl, 0, sizeof(cl_mem), &d_Q)); + CL_CHECK(clSetKernelArg(*to_fp32_cl, 1, sizeof(cl_mem), &d_X)); + CL_CHECK(clEnqueueNDRangeKernel(queue, *to_fp32_cl, 1, &offset, &global, local > 0 ? &local : NULL, events.size(), !events.empty() ? events.data() : NULL, NULL)); + } - ev_idx = 0; - events.clear(); + for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { + if (mul_mat_vec) { // specialized dequantize_mul_mat_vec kernel + // copy src1 to device + events.emplace_back(); + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, events.data() + ev_idx++)); + + // compute + const size_t global = ne01 * local; + const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; + const cl_int ncols = ne00; + events.emplace_back(); + CL_CHECK(clSetKernelArg(*dmmv, 0, sizeof(cl_mem), &d_Q)); + CL_CHECK(clSetKernelArg(*dmmv, 1, sizeof(float) * local, NULL)); + CL_CHECK(clSetKernelArg(*dmmv, 2, sizeof(cl_mem), &d_Y)); + CL_CHECK(clSetKernelArg(*dmmv, 3, sizeof(cl_mem), &d_D)); + CL_CHECK(clSetKernelArg(*dmmv, 4, sizeof(cl_int), &ncols)); + CL_CHECK(clEnqueueNDRangeKernel(queue, *dmmv, 1, &offset, &global, &local, events.size() - 1, events.data(), events.data() + ev_idx++)); + } else { // CLBlast matrix matrix multiplication + // copy src1 to device + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); + + // wait for conversion + CL_CHECK(clFinish(queue)); + + // compute + events.emplace_back(); + clblast::StatusCode status = clblast::Gemm(clblast::Layout::kColMajor, + clblast::Transpose::kYes, clblast::Transpose::kNo, + ne01, ne11, ne10, + alpha, + d_X, 0, ne00, + d_Y, 0, ne10, + beta, + d_D, 0, ne01, + &queue, events.data() + ev_idx++); + + if (status != clblast::StatusCode::kSuccess) { + GGML_ASSERT(false); + } + } + + // copy dst to host + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &events[events.size() - 1], NULL)); + for (auto *event : events) { + clReleaseEvent(event); + } + + ev_idx = 0; + events.clear(); + } + } } } From 22c69a27945e7acf9690dd3210d316f22182751c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 22 Oct 2023 08:37:20 +0300 Subject: [PATCH 008/859] batched : add len CLI argument --- examples/batched/batched.cpp | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 155212165..2797329b4 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -11,12 +11,16 @@ int main(int argc, char ** argv) { gpt_params params; if (argc == 1 || argv[1][0] == '-') { - printf("usage: %s MODEL_PATH [PROMPT] [PARALLEL]\n" , argv[0]); + printf("usage: %s MODEL_PATH [PROMPT] [PARALLEL] [LEN]\n" , argv[0]); return 1 ; } + // number of parallel batches int n_parallel = 1; + // total length of the sequences including the prompt + int n_len = 32; + if (argc >= 2) { params.model = argv[1]; } @@ -29,13 +33,14 @@ int main(int argc, char ** argv) { n_parallel = std::atoi(argv[3]); } + if (argc >= 5) { + n_len = std::atoi(argv[4]); + } + if (params.prompt.empty()) { params.prompt = "Hello my name is"; } - // total length of the sequences including the prompt - const int n_len = 32; - // init LLM llama_backend_init(params.numa); From d3956aea53369455008159cc405ed4c496976692 Mon Sep 17 00:00:00 2001 From: vvhg1 <94630311+vvhg1@users.noreply.github.com> Date: Sun, 22 Oct 2023 20:09:51 +0200 Subject: [PATCH 009/859] main : escape prompt for cfg_negative_prompt and consecutive inputs in main with interactive (#3623) * infill tokens correction * serverinfill tokens correction * removing any leading whitespace from infill suffix and removing leeading space token from suffix when params.escape * removing any leading whitespace from infill suffix and removing leeading space token from suffix when params.escape * only rm when params.escape, rm space if possible which is added back or rm added space token * only rm when params.escape, rm space if possible which is added back or rm added space token * Revert "only rm when params.escape, rm space if possible which is added back or rm added space token" This reverts commit 63ba0b621f21077c0e3bc6ba6a327534123cb738. * fix interactive prompt escaping and fix server infill leading space handling * rm unnecessary bool check * process escapes for neg prompt and interactive consec prompts * removed unneccessary static string escape --- common/common.cpp | 1 + examples/main/main.cpp | 3 +++ 2 files changed, 4 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index 2ef902bd5..bbd1518ca 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -632,6 +632,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { process_escapes(params.prompt); process_escapes(params.input_prefix); process_escapes(params.input_suffix); + process_escapes(sparams.cfg_negative_prompt); for (auto & antiprompt : params.antiprompt) { process_escapes(antiprompt); } diff --git a/examples/main/main.cpp b/examples/main/main.cpp index db5309afe..2621bd539 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -761,6 +761,9 @@ int main(int argc, char ** argv) { n_consumed = embd_inp.size(); embd_inp.insert(embd_inp.end(), inp_pfx.begin(), inp_pfx.end()); } + if (params.escape) { + process_escapes(buffer); + } const auto line_pfx = ::llama_tokenize(ctx, params.input_prefix, false, true); const auto line_inp = ::llama_tokenize(ctx, buffer, false, false); From a5e7dbd6141128bfa3c40a19c2945a181df625d3 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Sun, 22 Oct 2023 12:14:56 -0600 Subject: [PATCH 010/859] llama : validate special token ids are in range when loading GGUF model (#3635) * Add validation for special token ids to llama.cpp Small optimization for llama_byte_to_token SPM mode * Fix BPE newline check, only I could break something so simple * Killll meeeeee * Account for GGUF_KEY_KEY only setting when the key exists * Minor code cleanups. * Fix convert.py error msg when added tokens are out of range * Make gguf SpecialVocab vocab size-aware Update conversion scripts accordingly * Avoid a string copy Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- convert-baichuan-hf-to-gguf.py | 2 +- convert-bloom-hf-to-gguf.py | 2 +- convert-falcon-hf-to-gguf.py | 2 +- convert-gptneox-hf-to-gguf.py | 2 +- convert-llama-ggml-to-gguf.py | 4 +++- convert-mpt-hf-to-gguf.py | 2 +- convert-refact-hf-to-gguf.py | 2 +- convert-starcoder-hf-to-gguf.py | 2 +- convert.py | 13 ++++++++---- gguf-py/gguf/gguf.py | 36 ++++++++++++++++++++++---------- llama.cpp | 37 +++++++++++++++++++++++++-------- 11 files changed, 72 insertions(+), 32 deletions(-) diff --git a/convert-baichuan-hf-to-gguf.py b/convert-baichuan-hf-to-gguf.py index a1783f71f..3b64ecb88 100755 --- a/convert-baichuan-hf-to-gguf.py +++ b/convert-baichuan-hf-to-gguf.py @@ -230,7 +230,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model) +special_vocab = gguf.SpecialVocab(dir_model, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert-bloom-hf-to-gguf.py b/convert-bloom-hf-to-gguf.py index 7bfc95ec1..14dbd793c 100755 --- a/convert-bloom-hf-to-gguf.py +++ b/convert-bloom-hf-to-gguf.py @@ -129,7 +129,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges=True) +special_vocab = gguf.SpecialVocab(dir_model, load_merges=True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert-falcon-hf-to-gguf.py b/convert-falcon-hf-to-gguf.py index 1d98c51ad..8e8f3c3f8 100755 --- a/convert-falcon-hf-to-gguf.py +++ b/convert-falcon-hf-to-gguf.py @@ -152,7 +152,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True) +special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert-gptneox-hf-to-gguf.py b/convert-gptneox-hf-to-gguf.py index d4e85f518..f1599b0c4 100755 --- a/convert-gptneox-hf-to-gguf.py +++ b/convert-gptneox-hf-to-gguf.py @@ -134,7 +134,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True) +special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert-llama-ggml-to-gguf.py b/convert-llama-ggml-to-gguf.py index b5d3e0b3c..871add64d 100755 --- a/convert-llama-ggml-to-gguf.py +++ b/convert-llama-ggml-to-gguf.py @@ -388,7 +388,9 @@ def handle_metadata(cfg, hp): cfg.vocab_dir if cfg.vocab_dir is not None else cfg.model_metadata_dir, cfg.vocabtype ) # FIXME: Respect cfg.vocab_dir? - svocab = gguf.SpecialVocab(cfg.model_metadata_dir) + svocab = gguf.SpecialVocab(cfg.model_metadata_dir, + load_merges = cfg.vocabtype == 'bpe', + n_vocab = vocab.vocab_size) convert.check_vocab_size(params, vocab) return (params, vocab, svocab) diff --git a/convert-mpt-hf-to-gguf.py b/convert-mpt-hf-to-gguf.py index 19a66820d..21b9fd507 100755 --- a/convert-mpt-hf-to-gguf.py +++ b/convert-mpt-hf-to-gguf.py @@ -139,7 +139,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True) +special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert-refact-hf-to-gguf.py b/convert-refact-hf-to-gguf.py index bfeabc082..934f3852b 100755 --- a/convert-refact-hf-to-gguf.py +++ b/convert-refact-hf-to-gguf.py @@ -150,7 +150,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges=True) +special_vocab = gguf.SpecialVocab(dir_model, load_merges=True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert-starcoder-hf-to-gguf.py b/convert-starcoder-hf-to-gguf.py index 90fa0c32f..fe8815cbf 100755 --- a/convert-starcoder-hf-to-gguf.py +++ b/convert-starcoder-hf-to-gguf.py @@ -122,7 +122,7 @@ gguf_writer.add_token_list(tokens) gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True) +special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) # TENSORS diff --git a/convert.py b/convert.py index 24da25efc..0680f71ea 100755 --- a/convert.py +++ b/convert.py @@ -369,7 +369,7 @@ class SentencePieceVocab: expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) actual_ids = sorted(added_tokens.values()) if expected_ids != actual_ids: - raise Exception(f"Expected added token IDs to be sequential and start at {len(added_tokens)}; got {actual_ids}") + raise Exception(f"Expected added token IDs to be sequential and start at {vocab_size}; got {actual_ids}") items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) self.added_tokens_list = [text for (text, idx) in items] @@ -1163,10 +1163,13 @@ def main(args_in: list[str] | None = None) -> None: vocab: Vocab if args.vocab_only: - assert args.outfile, "need --outfile if using --vocab-only" + if not args.outfile: + raise ValueError("need --outfile if using --vocab-only") # FIXME: Try to respect vocab_dir somehow? vocab = load_vocab(args.vocab_dir or args.model, args.vocabtype) - special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, load_merges = args.vocabtype == 'bpe') + special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, + load_merges = args.vocabtype == 'bpe', + n_vocab = vocab.vocab_size) outfile = args.outfile OutputFile.write_vocab_only(outfile, params, vocab, special_vocab) print(f"Wrote {outfile}") @@ -1178,7 +1181,9 @@ def main(args_in: list[str] | None = None) -> None: vocab_dir = args.vocab_dir if args.vocab_dir else model_plus.paths[0].parent vocab = load_vocab(vocab_dir, args.vocabtype) # FIXME: Try to respect vocab_dir somehow? - special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, load_merges = args.vocabtype == 'bpe') + special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, + load_merges = args.vocabtype == 'bpe', + n_vocab = vocab.vocab_size) model = model_plus.model model = convert_model_names(model, params) diff --git a/gguf-py/gguf/gguf.py b/gguf-py/gguf/gguf.py index 072c839c4..6b7d65429 100644 --- a/gguf-py/gguf/gguf.py +++ b/gguf-py/gguf/gguf.py @@ -987,12 +987,15 @@ class SpecialVocab: merges: list[str] = [] special_token_types: tuple[str, ...] = ('bos', 'eos', 'unk', 'sep', 'pad') special_token_ids: dict[str, int] = {} + n_vocab: int | None = None def __init__( self, path: str | os.PathLike[str], load_merges: bool = False, special_token_types: tuple[str, ...] | None = None, + n_vocab: int | None = None, ): self.special_token_ids = {} + self.n_vocab = n_vocab self.load_merges = load_merges if special_token_types is not None: self.special_token_types = special_token_types @@ -1002,6 +1005,16 @@ class SpecialVocab: if not self._try_load_from_tokenizer_json(path): self._try_load_from_config_json(path) + def _set_special_token(self, typ: str, tid: Any): + if not isinstance(tid, int) or tid < 0: + return + if self.n_vocab is None or tid < self.n_vocab: + self.special_token_ids[typ] = tid + return + print(f'gguf: WARNING: Special token type {typ}, id {tid} out of range, must be under {self.n_vocab} - skipping', + file = sys.stderr) + + def _try_load_from_tokenizer_json(self, path: Path) -> bool: tokenizer_file = path / 'tokenizer.json' if not tokenizer_file.is_file(): @@ -1029,10 +1042,11 @@ class SpecialVocab: tc_content = entry_content else: continue - for maybe_token_id in (atok.get('id') for atok in added_tokens if atok.get('content') == tc_content): - if isinstance(maybe_token_id, int) and maybe_token_id >= 0: - self.special_token_ids[typ] = maybe_token_id - break + # We only need the first match here. + maybe_token_id = next(( + atok.get('id') for atok in added_tokens + if atok.get('content') == tc_content), None) + self._set_special_token(typ, maybe_token_id) return True def _try_load_from_config_json(self, path: Path) -> bool: @@ -1042,21 +1056,21 @@ class SpecialVocab: with open(config_file, encoding = 'utf-8') as f: config = json.load(f) for typ in self.special_token_types: - maybe_token_id = config.get(f'{typ}_token_id') - if isinstance(maybe_token_id, int) and maybe_token_id >= 0: - self.special_token_ids[typ] = maybe_token_id + self._set_special_token(typ, config.get(f'{typ}_token_id')) return True - def add_to_gguf(self, gw: GGUFWriter) -> None: + def add_to_gguf(self, gw: GGUFWriter, quiet: bool = False) -> None: if len(self.merges) > 0: - print(f'gguf: Adding {len(self.merges)} merge(s).') + if not quiet: + print(f'gguf: Adding {len(self.merges)} merge(s).') gw.add_token_merges(self.merges) for typ, tokid in self.special_token_ids.items(): handler: Callable[[int], None] | None = getattr(gw, f'add_{typ}_token_id', None) if handler is None: - print(f'gguf: WARNING: No handler for special token type {typ} with id {tokid} - skipping') + print(f'gguf: WARNING: No handler for special token type {typ} with id {tokid} - skipping', file = sys.stderr) continue - print(f'gguf: Setting special token type {typ} to {tokid}') + if not quiet: + print(f'gguf: Setting special token type {typ} to {tokid}') handler(tokid) def __repr__(self) -> str: diff --git a/llama.cpp b/llama.cpp index 365349335..8d52eaf62 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2238,15 +2238,35 @@ static void llm_load_vocab( if (vocab.type == LLAMA_VOCAB_TYPE_SPM) { vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); } else { - vocab.linefeed_id = llama_tokenize_internal(vocab, "\u010A", false)[0]; + const std::vector ids = llama_tokenize_internal(vocab, "\u010A", false); + GGML_ASSERT(!ids.empty() && "model vocab missing newline token"); + vocab.linefeed_id = ids[0]; } // special tokens - GGUF_GET_KEY(ctx, vocab.special_bos_id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_TOKENIZER_BOS_ID)); - GGUF_GET_KEY(ctx, vocab.special_eos_id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_TOKENIZER_EOS_ID)); - GGUF_GET_KEY(ctx, vocab.special_unk_id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_TOKENIZER_UNK_ID)); - GGUF_GET_KEY(ctx, vocab.special_sep_id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_TOKENIZER_SEP_ID)); - GGUF_GET_KEY(ctx, vocab.special_pad_id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_TOKENIZER_PAD_ID)); + { + const std::vector> special_token_types = { + { LLM_KV_TOKENIZER_BOS_ID, vocab.special_bos_id }, + { LLM_KV_TOKENIZER_EOS_ID, vocab.special_eos_id }, + { LLM_KV_TOKENIZER_UNK_ID, vocab.special_unk_id }, + { LLM_KV_TOKENIZER_SEP_ID, vocab.special_sep_id }, + { LLM_KV_TOKENIZER_PAD_ID, vocab.special_pad_id }, + }; + for (const auto & it : special_token_types) { + const std::string & key = kv(std::get<0>(it)); + int32_t & id = std::get<1>(it), old_id = id; + + GGUF_GET_KEY(ctx, id, gguf_get_val_u32, GGUF_TYPE_UINT32, false, key); + // Must be >= -1 and < vocab size. Since the key is unsigned, -1 + // can only come from the default value, so there's no point in + // validating that. + if (size_t(id + 1) > vocab.id_to_token.size()) { + LLAMA_LOG_WARN("%s: bad special token: '%s' = %d, using default id %d\n", + __func__, key.c_str(), id, old_id); + id = old_id; + } + } + } // build special tokens cache { @@ -6103,11 +6123,10 @@ static uint8_t llama_token_to_byte(const llama_vocab& vocab, llama_token id) { } static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { + static const char * hex = "0123456789ABCDEF"; switch (llama_vocab_get_type(vocab)) { case LLAMA_VOCAB_TYPE_SPM: { - char buf[7]; - int result = snprintf(buf, sizeof(buf), "<0x%02X>", ch); - GGML_ASSERT(0 <= result && result < 7); + const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; return vocab.token_to_id.at(buf); } case LLAMA_VOCAB_TYPE_BPE: { From 5a42a5f8e8a86da9ac88008d748cf232a83aa0e1 Mon Sep 17 00:00:00 2001 From: Ian Scrivener Date: Mon, 23 Oct 2023 05:16:43 +1100 Subject: [PATCH 011/859] readme : remove unsupported node.js library (#3703) - https://github.com/Atome-FE/llama-node is quite out of date - doesn't support recent/current llama.cpp functionality --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 49bb556a8..b56ecaec7 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ as the main playground for developing new features for the [ggml](https://github - Python: [abetlen/llama-cpp-python](https://github.com/abetlen/llama-cpp-python) - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) -- Node.js: [withcatai/node-llama-cpp](https://github.com/withcatai/node-llama-cpp), [hlhr202/llama-node](https://github.com/hlhr202/llama-node) +- Node.js: [withcatai/node-llama-cpp](https://github.com/withcatai/node-llama-cpp) - Ruby: [yoshoku/llama_cpp.rb](https://github.com/yoshoku/llama_cpp.rb) - Rust: [mdrokz/rust-llama.cpp](https://github.com/mdrokz/rust-llama.cpp) - C#/.NET: [SciSharp/LLamaSharp](https://github.com/SciSharp/LLamaSharp) From 9e70cc03229df19ca2d28ce23cc817198f897278 Mon Sep 17 00:00:00 2001 From: goerch Date: Sun, 22 Oct 2023 21:21:42 +0200 Subject: [PATCH 012/859] Add test for MPT tokenization (#3728) * Add test for MPT tokenization * Revert code motion * Remove unnecessary restriction in test case * Clarify logic in conversion --- convert-mpt-hf-to-gguf.py | 15 +++++++++++---- llama.cpp | 17 +++++++++-------- models/ggml-vocab-mpt.gguf | Bin 0 -> 1771406 bytes tests/CMakeLists.txt | 1 + 4 files changed, 21 insertions(+), 12 deletions(-) create mode 100644 models/ggml-vocab-mpt.gguf diff --git a/convert-mpt-hf-to-gguf.py b/convert-mpt-hf-to-gguf.py index 21b9fd507..2d2fa2329 100755 --- a/convert-mpt-hf-to-gguf.py +++ b/convert-mpt-hf-to-gguf.py @@ -128,15 +128,22 @@ vocab_size = hparams["vocab_size"] # ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py tokenizer = AutoTokenizer.from_pretrained(dir_model) +added_vocab = tokenizer.get_added_vocab() reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} for i in range(vocab_size): - tokens.append(reverse_vocab[i] if i in reverse_vocab else f"[PAD{i}]") - scores.append(0.0) # dummy - toktypes.append(gguf.TokenType.NORMAL) + if i not in reverse_vocab: + tokens.append(f"[PAD{i}]") + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + # NOTE: wouldn't we like to distinguish CONTROL tokens here? + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.USER_DEFINED) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) gguf_writer.add_token_list(tokens) -gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) diff --git a/llama.cpp b/llama.cpp index 8d52eaf62..c63e6251c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -975,14 +975,15 @@ static void llama_nop(struct ggml_tensor * tensor) { // don't offload by default (void) tensor; } -static std::string llama_token_to_str(const struct llama_context * ctx, llama_token token) { +static std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token) { std::vector result(8, 0); const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); if (n_tokens < 0) { result.resize(-n_tokens); int check = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); GGML_ASSERT(check == -n_tokens); - } else { + } + else { result.resize(n_tokens); } @@ -1202,10 +1203,10 @@ struct llama_vocab { id special_eot_id = 32010; int find_bpe_rank(std::string token_left, std::string token_right) const { - replace_all(token_left, " ", "\u0120"); - replace_all(token_left, "\n", "\u010A"); - replace_all(token_right, " ", "\u0120"); - replace_all(token_right, "\n", "\u010A"); + GGML_ASSERT(token_left.find(" ") == std::string::npos); + GGML_ASSERT(token_left.find("\n") == std::string::npos); + GGML_ASSERT(token_right.find(" ") == std::string::npos); + GGML_ASSERT(token_right.find("\n") == std::string::npos); auto it = bpe_ranks.find(std::make_pair(token_left, token_right)); if (it == bpe_ranks.end()) { @@ -7499,7 +7500,7 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c for (size_t i = 0; i < candidates->size; ++i) { const llama_token id = candidates->data[i].id; - const std::string piece = llama_token_to_str(ctx, id); + const std::string piece = llama_token_to_piece(ctx, id); if (id == eos) { if (!allow_eos) { candidates->data[i].logit = -INFINITY; @@ -7711,7 +7712,7 @@ void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar GGML_ASSERT(false); } - const std::string piece = llama_token_to_str(ctx, token); + const std::string piece = llama_token_to_piece(ctx, token); // Note terminating 0 in decoded string const auto decoded = decode_utf8(piece.c_str(), grammar->partial_utf8); diff --git a/models/ggml-vocab-mpt.gguf b/models/ggml-vocab-mpt.gguf new file mode 100644 index 0000000000000000000000000000000000000000..6affa34bdd5e377bf3982f8708b89b1fdf728334 GIT binary patch literal 1771406 zcmd?S`IlVRapy_TzVB%*t2G9^=?bvEnA+$diuZS6E`d38wshs{W3pHQ50`xzIT_n zabu6T*RH*F_1}I@fB*Qf`JW&Dcc&iL(`tCSn)Y{_dEK8cruBcoZ~o3`GXGDWd30Qj z$|L{T;rBf9{Kek$vw1t+-<#6OH2reA-;U??lljA8J>Hq`{yXKJpLouP=iSrws8w55TyPfV;(?Ra#v(G*EU#G{a z`Me&_n|6G9x2^{3-F@uGJ~yoP>*>RBI~_f&C$nbQj{iNL`|TedyO@Vpzx`n~nml~; z{$qRWx##|$&;HZ$-{+pQh3c_U_2gkSYjU-_4M@4&S-dg z)DG&Q?a2yw&po#@nV|2+4ZFV^EhyKVdN#n=Ah z_;OMe@PF~OpE&&Lf6A#tCnn^v0*WampLdd*ie>Uhu{l zZ=ChUId7cz#szP@=#7`WanTzud*hNfUh&4Kz3~}uyy}gB#!vY-pY_IuH(vL~rZ+Bo z-gwI!pZCVw-uM}B ze8C(4%p31`I!NB=rnUys%| zqV>&aeJfhuj@Eag_3P34jcEO5w0-VGe-DrI;T7M9&KaAENMeC2F z^(WE#(`fx!wEjF=-;dT`MC&i3^@C{rmA4MQ9IdZJ>*u5OFQWC|M(e+e)_)(ZUx?OM zqxC;T>wk>aFGlN^qV>zs`dYMpC0hSewEktZ{#CU8=V<*e(fVJb^{dhPwP^is(fZ$` z^?yX`|BTlE6|Mg}TFYn!h8+Zk9R!9Q1cn_1h8+Zk9R!9Q1cn_1h8+Zk9R!9Q1cn{_ zPRt;1?I3XNAaLy`Z0wcP>h%PXq z3ykOjBf7wdE-<1CjOYR*y1`Z0wcP>h%PXq3ykOjBf7wd zE-<1CjOYR*y1`Z0wcP>h%PXq3ykOjBf7wdE-<1CjOYR* zy14Cecf*R2-Q^I z3N-CPy$#eHc@vS{J81gqVBY$-!+`Dc&--`POi-=H12l^A=OFk0`s33C!$UpA_GiB9 zlR$OwXZz8r)f#GI)7oF^mND~LJle*>x@CfHtf3kDKuv5-8*ewG_;p9a?D&_Dqnky- zntOWky5=doXk*AKP42jNI^6e{p6AcMT?k$J4(rPFD}SKf^Vhm%PxG2;GF$#$w?u#Q z_o{)vckuP@>nr_ewHZ~>|7;_>rN&tV+I7nfb-wAnbjw-5;&D1w`~GY*-}SK<0f~Ov z#k2l%6R12(KksXfhFAKumwIjz?(f!GI4ZKwqgx))oT~tDfA?brcSSH9d!PNUcZ3%1 zGj0Qz-LeTVbjzEX!DQN|VFLhFAXU}#-s+K_+|!l@PM$gAQ|OkD{iSJ>UJ6K00)W*# zn%(k_=C$80^kJ`>sq3TO_4^BTJ&0bWb$a%kKidXCr$MPFiKe~o{qNROjeAcN`PEnb ziAD%j>tOFD?5v;ov(?IIu|nER%HPl+{Wih1O$Q{lahM3>eP5BN6BJH>m=yz!4QR*J z{(LU=Gy&$achfxf-@E12)J=kZC7M<>fv@!}K-wn4+|$7t27#N3?6vBwTfXhHXeZu! z6dT#hqbrO*n&n-AZrmoMzM#y8ZdS(I@Yxc^m^_8S&9ip7a-?-tj=UjLq+s zx4egL`Mf{e?Ci!w+G&vM$z0T6JHcQowtFo0q?)Fu=ete(P!Ho&F7ievPPG{OOuD5g zb3R$uH0vwgoYq0DC$UvcJyT!(IIyRKIJFC*V^a;a!re;zXFFimE&`*gVem;#{lMQe zI^TU?gAU_e43a+AM!T`lp=*Om?F2pg@V>v(Eg$*5HuXRFu~gI_ozyYQrXOs-TLwj| z`j7ls42|CGmbZQ0O+QOZCmrXyWxtZh_th26j#+)+Z+FX4EVWy{=MS7X?IU4$^=7*W z1X~0JYxR32STsG&V^6efQES|)=lu@q#H8i{k9HQ6t?FrSy5)Iwa=q9_S5$7VmbBZC z4eyphv2OXMrUkiHKm9<%#V`(3Bidex^j4YvMHh(BQ^zYn-;M%6kaAyWHA@^gA!WB5 zgdkY;WBh6s-NQwLRzs75Y*df5hAO^2b5_gbJo~0rlRQH)h*$aZDKRMn~0UGfsrd z4)bSYfM-RNzuRXmB`xqBpU*@yUrHZp`e|?X+Jwz&l&e#~h*#L!odI3^X* zJ1{J5eK5jq`ToN7pp{&p|IaH64s-D3AAZ@rIo%=Q(*9|o7kon=e(0^~!dum2^_?*C z#3_Hjl_*_F=teB|mD|w%a5JoZ!QFDNk$lk;`Nk8aAtCGD4{|^p;|gf{_vi;907DBx z+3A+|b~PAuCH=Apta$p==T;B*ebMTWqZ!PknWizNO1$ub_fU-k&8y6VUW^AsScRx7kCY0z0%kRHW*i)nDBVH3aaWs18S)T>%# z6d8w_T*SS<(gcs**XpVsL{X5ehO<_m5MUB^%%Rf%(T^}1?{->C*&4QC7ptc_AnuXI zL5nQmK-!D>BRn{es(I*)?|jyWK!iPX-yWJL0yf?9m_O7lr(vkdLQ)G-YGpH|H@(q@ zQLWi-f>qTz0LZm5)>Qj&|AA1una8G8`+>H&e}gLQR-qS}fhKm|{Osyywworloi=(l ziPPMM1liB3_a_~C(>e@>EL(jKB>6Q{33ih#V}3zEf*S^*#x|GcRgKgs!kUlU*NJ{F z9lmb)k{%y4+YWfFmcc-u9`#P9P3#7oB6t9sACPRM&9#Z$5niFmLm`5G4VjD&+AXgq zTF4Sbg~52}a{O z&QG^|s_$ou*!X7rNNXK7k20GE(e&G_YSKL3NZ7DbC$y_1qBXKB_wyPiX^eI}OiX`T z_eD_<($h}O5=-ZN$Cey?qx*(K4Z4-4HQ$T#`|gKYdr}XYd73x@8(J(<%uLq)Lic{4 zE~c4yZ|>>`SeI{h|B=2PRcVd8O%SJr^v)(Lslj-_Jzrac+TeYUD#!6=nD(R>Di}MW zwIE7IhilBgSBLRuF_erwNyL9UTW=ovsFl>ZdK#<|ONs`a%@={5NVu7JG}-1=Cju~O zgWOFy(!ScW=Y$j6Ms769KC+SnXu_Ei$OrYdK1}N!buph-x0&#zZto>kwhrovP93y{urGlIuASt&Ru9b+JP^JDU)bpJj)nt!$Lftw zv%uySda3CT#KSNwCP5b(znwl-#s&3Sza8qsxiyttE{rBw5n<^XX*RFpAQG#koqX9r zxtF9tZ0-I;I}Xgo>>yu{Yz+rr`RF$Uh8Y$C@3I}XJDM}0W!@>Ea(w@(BYL8Kk_-S- zO?W7vgKt$>l|J)=#(gwy_w;NYc2N7{)#+d>-Nb!e5lDx+GH{u6TBCz zg7Nzc503k{^L@Dr8u{MNp7jU1<=HUOHM8iU4zCoOuQQD;Nv^!7_kCK6Y#GMLqt3U( z_ByWwhkmljHc#|@4>O_nX_nZ85Fv)Qp-(k&tlMD{Qi-GVKBlHXI#5oZ(zNcsBgVqn z$=0$D5@TcrVU4PT7M=)2y?)|dqI|}=>Xuc|^ltf95@Tiyse1i-5@Jk)VKC-w#&G|D z%p^r@ymqi>yE~L~j%bq4fwGutRB4rs{j52!6LPF~z%V z`jXE|EU(kkzKNyUtl7SC9^d*SPqoCU9lzMRuXf2G(~PUc>yp>25*e7+F&88$Uv`Iu zHIU|r=n&n!E@P~ge^JeE*fEtUQ zPq4*9qW=235}F@`HF|rIcj}1_(nv~kx7=^=XU|^nnP4t#b8u2M@mGogU7FPC)2AKty5+euXY^r^ zT_2-HzP)8#Bn=HUsHTx0~&dGr%Ye!v4^@_-^|kvem1N(|Z@yFnX@Y zAn^_mAA^n@KOuPb(J}nh2gg70bAAY+tN&mAqx~9)8B2S^5*e(IK&Pa)BDf@qj#7bn z)$#hlDQ}&9+1FfUbA4Z872z3agdVI&Fq_RK)}MRPAq=}r+}*5BKE|{SRDm}J{Wtw5 zPUL=QJRDv>Ki$&Q=p-5xb4P@5PDgrhzKhdGP{aDF54}3ky?&MCp3y4*#x@=K6i()g zvGgTjm^lr^q1%3v7`Dx0>u(bCN&bCd6=WR^yOm7jpC*dCl?X_AN~T ztA(}!^qa;JCjOur%euLySf?PTjU-F;YUCCl<$3^(4RD}!%bj5BMtKtiXR>obl73$+ z#$y-un?6|QlgHJeO|mQ|Uw^ker8N;YGkzqm0EVX`ZL0ebRwwTg!CICv*hlK}74N+o zht;cF{_zo4k{I^Y#CCkd zN$}qw2TAZXzlTF{KaOy>JovKUf;({FO-Fw5Qtr>UmWPiENfeWww4 zZLx|@?Zv5wxg|p;x^kE_`dN2-eKA*O77>nvTPOaGg0HLLn%st`+8=8S4g3S#;r)dJ zW3KNp56c8XBC&}&ZEPn5t+QT;j=($ z43L_~KlybJzT$7z(_~p6>-4>m2$>l{QwYj7PI^HQsae2bsR(-daaM(hg&{gcbWAw+9Nc5*Z>menRQ zMFvR?A~q!1rc4oV$XvPQQ|p#eh^CU_sx^OA9o;KHdMaZD5OIL;?ju{!o}`UOWrx(eGAK!xw}`g)^zsnNN816U zeYWS$>1(t+2l(kC!vB?@t7aNv0j~QT)Y}mElMJ-f2a&zD>J?|72H38GI#dz4e*K<5 zz#hxqY5uot^)Pj>P2%YLxEErpUk(V6=iy76N2^Y*HtZ@6MF5%*sbvJ82^>Jo4#z_Z zoq9*lP;tY@x)6SxS3)D~?*=(#vO)@Ent8IGpwj%3!_9okV4N0Bo6Q*D52>1?jL{Ni>FvmKO*#3*s2`o_Ge1hnd~M<@eng<9Df4r zSB6TTp*iy7Xk!wI_zQfXNyy*Y4ILb8NWQpMQGVU>pZUH6@EBk*Ritw@S3F29bKbf0 zLc9q+HGLY@UZBRT66T5be)-f!C_|MJ6c&L*IGc>3IO&1VTHKpD=*f10eck8Y&O?8| zxDo4M17=d^=5wj65Zqxhn4y5g>AiHxmx34uKKmy#_bEcRX{S2G6@(_Kx|L!s;C6Aa za1m))1V#-9LKLU)$+Ji#dD0r*PSVdP`|4O>Quj7)IU2rn!GVL*%bQ@MW{svu06++Q z)O7O42w-v)UGTM_zpIS`wmB%`r3fEHQ%=Z`4W?X^%QBLK9_u^1laVn7m)15@*n|U>qV{7GxW{Z7+5QJ&qi+MBxEirUjO(lm44~EFO z?tB?m5pwz9w5CCn5xS5s>O_d=NZ@IFIR9Y8rZMIg#Eud1dJgLw1tzZr+KEdlrc3Lt3CwnDm|= z*Vok`k3f_;x?2j)stXs!Ogt%r1c}9sDqJH__oO{{##sfHXrIi~qws6KR$A3rH zoex?KVY8a$*`$?j2AZS0M9mNRvN%7pghAU9!XO6eC?S0WR7y-zochgszBU}{J|oN% zO;Vf9hEF(+|JvWhgDY&ob^#Cb`D36@KJaV5f1s77D zIczixM#LQZ+toD1P#N2l1&qj@IO|B~z6MTP75d3!*oNeS@EX2;47M2be7XQ7G^q10 zI$B>zCPQqliati?{)Uzgn&oiZBI|0ESUweCn8RMP7Dae{qPG_Dy@=ys#ABAY6@eIH zAa=f!LflJX)4@gi*;lx$3KqMqP6LFSApFD`Oq>sy8;Q!#l$kz-E*KZva3m5Pozm`i zB_RQ>uWNJQ3I3t6NITyHoUOs)2#}0oD680E{eSGvWk(SVwVEEl(^g*4KEQFLV6yur z(g@L!C-K4hRCcC0RfF6E|A9QrPk8$we?9{QHUHZ9uCLGBU||?PcS*w$!kqNSjV*t< zTi%VMXj_{Q^z21p9JVE+vKj_G|s)Gt|+~^ zn-1AXH4W-{P-$ZTPlD!}CY$70GnM;Xu!_9k;H%aO5Tum2=5cnQn5DyCdH2H*Y0y^C zSF{oo0RK=5dipB_#{1yFB+c>crAt~R@p?`2qXmJ1_j|9Fq8cJgp*C3qn71i!8iQ3x zK-ixCwa*2uL)g&vOs5Lw6HZEh7*nJp{h0_4Thkej9*trGi!3bkA}M4)m~}5`Beu(L zY2gGeC{>+km^e~pK0X%^I#G>cCPRe~WC+9Y7a<1ecvCbbX%vK)$f~iKf}=K)wHcS% zPGBerPK$ZKs6bZz2P;Am8s;MV;M-nelzXO4INny1&;-d7 z^SKnA(m(XG+qdl(7nI}8^)2rR!>~6i>9WU$K^-8nVq~A-&-=$FKS;9RAkg*tWgq5z z5~Rs7eVp*P$f^n5v3dAgH?9aua=P7_w%SD#DY7;BPVC&>E54j#Id2J7+rLk^-z|gv zfjz&O&=Q@@*J-M2q_6Ha;fT1PxGr#B@0!!W7hu@$?P@d%aMivV-URCGQbIH;KOH<3tGXANz#%`76hg+a4Q2h&Uz6BUr-m6QA}qLlm-5wlN&~ zIW#l+GZrSy;W?=5YBG_Kg=A`yfMZ@>pCR6gkVKG#!&OCzlQTYCp%A7w`|dTfa74E` z$^;e|jG$XydE!eydQ^jNS52td7UVUJ;h_UbdbHT!Vm@D+1zw4f2)22iE&W2h88fK4+D zPJt3~!t+C*0D7XM00vrx{+Yul)o{{bO7W1`+fs7=%p^Ji$J7R`mt1GLM2YGXWWWPd zAK$z;f)Z3%xbjovW@Ji>8YNsps+BN>d_PN@MA`Bbu54LWNii`1Y5DVZ{$KvJ>ax%B zM7RbwHXXN#%}!mhyXv=95Meuspec)O2_n{SwQ)5vqnX|P092We_nHV5Y2J$`W|0*P z@dgY0hebZq_+ovsQ$!nYAAIG9U-35jXb|OUp(zz_5%}QGAqoD0);Ca@qk5-F;uTR3 zD-(tzf{DY#EeT7uVT<}?5fuI?lJ{dyudjp%LjBTC2@=w4h!n|&hp}mz&=Lcz>TF(& zc&B0ZDo|kTc^{~02a*qqVbvlze6d#0Bpu`40)L&qSt-fK{0$b2?+9}-`J5^!f#&|G zu9LJqBcnjeB#=|4MEuAh{gW#e8dn1rskV5BoScWhkUAUkjv7m>AfU+l{`NG~EpyYgI8`fA!tF?v3 z`eP9M92Q3{1ci}LD+yAmiqZ8j{`HK&_|ov+ibO$a9$VJE7E%YqTI?Z{T`4ZGTj-Ka zR|`bI3}GK9q|X)<67?p9S*H_wOpdnNo<@C~mm`yab0Y+)pD`eBgho4QG&fT$S5(rM z3ANKSThh+-iDe`~@7`Lekxj*C^&3cR8CR^C}ZI(vR%ohh;7BVH%YpEDU1m4Ne2@mhGqi`FfO|G zg&D>4#o?taeqc84IN}gP7uJfGF8R#JbSLTCR|lBH6A;{0jHlU-9Y+9~qJ!^iZ*ds& zm4y-r_+)G*>rMlPB9_EwiVPcuA`GyiX4P<8^R`-o`qam(?Vpe(f3y3; zfS;HHY&gUK^@`U?)84dTM}4evx>tjm;&a4n7Uy+SVY6*E#8tfn093CuU6Pv9CzAET z=Rh1)U@C^upyX)1J~lCi!JW=r5CYaX4B8+Lrgvp2@V!MkQY6p8 zBT&*}Lmj#pQ&NaLCT{`7ve!kquz45Pj(3V2u6(qoM~|lOcVj)XAe0Bc^wD?y5lq0@ zl{+im5RC*mT60h0PdM+}KNOQj)(UW@&@&Rs(&8q&`^nu@0+(Ru-djpLfsTi#G%-GG zcVG97B2+D`6C=i z=J)b`iR6i1OzYF!2unhO)xj)Qmw47>6yfI_$AHtnoiMcCeoIg6Cf5O{CK#3q`j`f# z(JJ5_$_M-I17oHdZ)=hzzBnk%gH+7%c<>TH!?}o ze`*F2C9Nia!xBl$o5MWgSvK|Go{EaE42tU<06V>))(5wIRErbyUkE*3laTJJir3X_ z*&vM|XfXOP5{H5Xs^=VrKIM@58*H@l5Y2L>djp=ouPs{{ zQ~apCbkUK+%02kE4?_gU-I~BoAVnPsF_64PZ{5m2l>gE=nJPe4@_BM$8kSq1v;_*r z;XCn)e~qs%GkA<4Nl2U|?=I0aoN}M{_3Hu?mj2L<>S?HRmm<=s!I0qN86Qm`CeSJ@ z9`7qNrtUcyO;51sav%8iP5;GQy4eOxdS>c)v=(YduuVE!W^l;5-o&hcK0wuIWpk{R zQKR)oLMePd5$w{y5Um6?Dy<>hdfy6EGoOJYE!hG7z>OOk9JC63kqYUr>KF1H0vFA3 z8F5)=!cJCMR9LgldFLB9eG_<2S%0^Zy`Ai&wL)?^Fj(lnkZwGsZdee1b}HPYQy0`Tn_x=I1ZBnV7Xc3yEq7{;L+70 z1Pm?cd9+rlf*!izb*Pun}w++3ugHG{Z%?n)@F1!-;IsSj*#fSsz}oe5_Y5d`{P?z{PD^p8t>k?i$8Ml zqPJdpS?Ds^Z4+r7TL^`u<5iAUmLE7XSKa>>VHGI{{8O1WII~W;0xK3zm$SI8~GbIqk1m zsfuK4rRsrfU_gT1kQfMcg{_59jCHdOH@>llNoW*MFD1J6BDQ)*+HZq+>hQP)hj)0G z-OW8IMM7qt{;-wydd3Phj@-G&X7F@i*r|(=2~CQC zO0rIZW8JDX2 zDq5+xYCG=_-v8N5VdjTvsaeiLie=B_Bp}L(MM16A<)B>%H!YABwDDgpPA%|eW5d_? zR+xP-IhGh#$pzPQZHZ?5o}i1W-Vf^;^2Rzc;X)HaSg@f!nKXt7ihD(Nz@Smcv zi3C#uLqp?uP$fGeGeV*2mOtsY>{?vYv;X*lPaktk8>l*-6{zw_5EdkIzt%a6Y4K-i1Go}Ee zVtg=~9K3oI_SOp#`fRxb*)nZaof0vupY*kLqzd3E)T#$Lsfq|92lmUC91}_`rGI@r z2ndy9k|iD{vWlTjua&AAnadx0Q-BOv3#*T!*_hQj z&YVRTCPlOwCgN zt3a&)E#V{C%1GF{ES+eG@D88wJtB#G07WE z{gMD^u`T#xR=7;#7uG#aR+N2t{_HyTAKCsV+L)3BmLv+Fx+BHqE8eQ#_tD{Fp)xf} z2p^>x;J4TOEH;(fYK|YC)gQ-`p5;;YS(C(OfQ0W8x&KfIfVs0yiqgEb3Vlu=2>H?^ zig>E)F`8+T7OrIjk5L_>pEL#*cpWE9h*F`j3yu=kAd1{rn$tMELyE01DLZfGZPgBV zIW(oMke&{Hx%+mWJE=j>c-Q6>r4plhFlvA|#d*<6l#6@bon z6TQU+7RWinEab3l#wlRNf&s7%1sCZhsi(&|5G;GtuS_VtnP~zi(i9Hg=1F9sT+dz) zD!*&l+weOFcc~1ag^Wvhk53oJkw3>l%HdCD+s%4D6kkDV3-(^`z1Q|c|B4q8WYdUt z3>Bd>rE-stemAL>%@hB&TmGz80z*uW2E`s-zje(ws!s`NO=eR{)R}V*vpY%d8f2?2 zL@aZNJ^TuR001Q91ytuGl0n0v9^U!prnh7R{L+_vcI4;;k;XI=Z{^#B1QE!#mU1mw zuBfxRR><{<7y%j>_C3LyTt_IcJ`<}$EVO?v(Ip%jhxWAIi_JS6r27S*15Pd3ZHZ9x z@3|;4r$WrlnW{ajr~;%DhIEde}(eH(>3U`NI5d8vFQ7!9h3 z{(=zLvK{EHt^zPUT(x=vg^gkY}nmK8uk-?U9hPa=jXm$xb(DRn$&C>M6s+w|=yYALG zpN?q-n&v<_+oGBDj6zM!Zk0q}@>8qFf&tYN^0eM2^u||2z>y|ot~NM?Ba@+Tpu%f$ zxf3>yq$F+Rdkyd-kCOAZKh&)l#J?xH6U)oq_hWBlWDHZOOZFpR2?mbK|8?d1!;jReB#3|B*Qr3s6f zYAIIn+)u28K0hS?fBmoTa;Z`22yr9%b3YXo8L|FMgCAoK{+5IO#BuHZw%GxFCu$|I zlw=_9-@?(fMYvYMFz;;h4d3vsLjC*`3P`H#*wo1~XSW?Gb(ZoYVeK`;Y|<%^pb4GG zn^2g6Y(m=NK082 z!N$}zm6W|5p=jOm_(>r(xk1>Kr1DxFE*PGJdq{yR-)Fd5lOVEJuP2KV`1SSf&o%x^ z^!5D&&fbLAYt=o{og+#8$O(T2yR^e-D<#MoB@DV+?xs;sCEZxVV`D`>^L6Cv1~5%w z#uwHK63iCaCRQqfYZt9dJzK0Hs3^c^2Tx-$^6GdD7=3i4=m-aC^4&_aIwez6w?Ji~ z!P!LTH+L;SMQbfKbsyOFB^j!Uty#Ze*4KLlCnOhHRMDE$K+sZJk)?^G+>gTkkSlzT zKA(-#Fbx5Qb4RF7( z;lX-0O4-koe7#-FN`elmuU;SEBlAxyl~NDFX#N}5^?!_nD<0sBATQIK)7@~%nQtVx z+9rCbfCeE$Bye~TfcICvP7Bvz=e#?7&OS>!b02+Mr`Z@P0ba=}RB|Cz2^%xocO2IWc#V8I6*^C6dT@g;|kq;ZUqbN#Wm6*uj~X zHSuD9m$XP*&Hfgl7MuWJHpn+l2D z&oMoCrIKnjA=D{WMIMRC!~!$X3&u4y|4nevN5|K8OTR$yB4HDeKf)OhHo~B7!J^5= zwmnP+U_v^O<<;LFK`jA+^FAiAvcVXK#+LK~#3MHH!Z}AKG7$XT2dCcotUtc0@#w!0 z0DjBIy6}?D%2u|6o3fK(!}(^NvwsleGJHbR&-3BgZ*_MkC(6L$(`v1_spy+kb-_HQ z6QR^$7(qfHEPNbpEn^}p z%R*{&TCjm3cIjL+>!npJ#8NOK@-*qw=6DV%eIeNyhV^GUALQxSgX-ilrbD0D$y1I6 z)W*|0+=^*=z5ZLP+ao!A82}dbHi^|(05rQ8K3H=6C>Nl$F^@sQgjZkl0R~brP-EY? z;*-Usk|IheH>Rt;nh%(WKZi7eTg7`L3emR&YkHPSZW7_;OMmg=Ka69Mip>d(m=0pH z77nAq4!-=+cf6aowtU!j82U4Knay*A2pxBX+oN0cG~ZQ5lPN&n>o_V6r8FzE#)Rn6 z6KA=8*QbD)T+l9r(SNB&fhklIaqx7@H=3BVB{Zx#5$0Lzzmkb!wpyC|B83eQa3VLR z<<$5LhV}bGFrpGIR@#4SW`S#`Pru-xB9x~uu6<}ZH0I(^dND0zmdn**i424Xk*Q^) z72lM}q=m6+u#J>Gg!z36=INO71q@jWu$WE0@ddP_^qcWk%$R!FZfC3qaS`6XL1@+U zSBm*av=+8eYM1+5?FxA9^SmWd-Gu^E6jqm0sXFXo6_5mj$4wGnszD?gFGgX`0mr}V z<6(#nn4ft`Ls8o4u->i1;WcFrZe)3W0@2eJ^mNa_^&48Y-6AKfzHr_}V4N9vtT&Kt^}c-@7Yv zQ;&bd%R=sK3IjB+WJJ*IWD!b#yomzQ$SWU9mglE;a?7&kQ>{OU*F(G0K-H-UfzKEy{zzKeFe^LPfHZ0~uW3iKNoEQ*O(U&K(A(TB{cLzOI!cqrFerQ^!1u!AB!dhxk1i z(YrYKy^sFF*Hu4?BVrL{GM;qH%b{UnhDz=fNinSH{SIg$<_t8i$4PG{~}e6wD)TzU(3 z)i-v$5zW(v9R?32G@GElI;xs4oP+IC1pNAi!ld%|m1>Pzdi_K<7`}Yomv!fgFT!xd zJ0)C8otAy_eu^3Yd;a!T5cKPxmpEi`zAKNe_sx7`r-`;d;@^`Mk}999az5BdhaK^@ ziui-k=ZcAss7U58w?K6`Qk-}sm&NOTa(+xW@cIe*n?7l)Rwa`%LxF-j%${Vw!6xFi zRCst29w$t`s22{pt5+SQXr$P7{rlTD95_h3zwd2altJg~V&xXNK_H9XqHjqGt3Z(w zIjk;zI>MZBeUN~m37BB54R6LoCHrgZQ9OdL;0}@}vC02MT+zBxJ`FFd$QJlSs1B5lvhb=yvPG+#nnw;}I?F z!`#_Y&$IlzdAu|^E6jp*!BODp(NjW6coeoh2cs3q@!tlAb@Kc-WVmzMi+iZw5=2*7 z1h_{9#$qProNL3Nmrb}0&;`Dshi?^=vIGWx{f?G)xlT1QOcu;0^?~}tSa8%qqq27X$--mi;cC;QwtH|7Bv|cw5^&&jq z3N9!%r(3?6!vd*5v|c4FWhfz~3&vHz4IFw|0O>V>$=6d&A1DR7rElc@D1S=nFNZ%@ zXeKGhDr_i<>&4K1$%ht-7L%u)dWT-2bBUi>{-oIn-mNR8PAAXG!qz?FwPPXf`M(tSRpnRdM0E#C_lBRtZk2f9g|$ttH9YQPelaDlIj`cZ}VxBN~C)0{;D; z)v`!SuzY++Lm0cCeSp81#EoE1@jV?8KmsR3mTuo zf?6)Ih4PJDUN3tp`vysH7W3wl-q&q!M^5|nR~9Kjhw$4HM4%~Aq65pYPo%_rW$#wO z5^3^yd{crubUjH{q_>Ao&)vP@^?+P@_hvawK@0a!`aDW`dKTjkS`7g{YT|6G8L$E) z*e=z&dp+e3P)`m!sBa5YA_!fH{F$;+mwG%t=dzs^)o0#DJFNguv zB#kjxUWwxvl1Ce9w+O#%HB0WCQOaYvduv0FS?G{DxP4Vq#6Sdf#wIOU zz(@wpcFoO7ahhlBRw@2$ffE-SbW2XtkuxXoh`CbLvMU zdQlPQPPnKjGGrIx_(&X<^J+IK3J58PY50aso;Xx3s3cs;d{Cu#jFLh@%I*ZoefOjf z!8HU)Gcc|eRvlP*CJ5IpK*3=L&~OywmQL$y)5KS~%AhmhTnqeBK2W2-e9VNS`chLy+=USOTf2bHz9R*08G<1m>qR2U+(bZ`772<4 z=q0UI;5#&$gfxtdDjb5S*Fnnhpw)xa*(#n7!?m6_l{}FY95AE+_=;z7&Z?b75HT`* z1pAM_>YHK}A7V>!Hzv2y>yQDxaxJ+ft$1O)1YAPVBjDCMwo>kaA6UP3LWkmFhEOwr zc;e4+9+Q`J^a#|mTko9qpo{y2$UR9a`C)b^WJEABDwfFmCL(Y;RUBHC>HuM%tcrS6y^5*E}qqz zcXTb6DKP%Xi&6iEWFW_i3eS>nV57(%vU07awfAWT!enlpp_OSna7x0$Qu3^oY67bz zTHr#{%)Xx$PrK>VA!@lF z^TaF)ObIRVjdESY0klXXwDmtaG|#N6X6ygGcRY3J4&83~Y3~+x@g&EX!FCY87J&ta z!Lx#DNyN*3gOP40dhx7p+0)}ukB^~~5%{nmjAWnfRob-!YxA<;P9U_*UN~~E>nD7` zDqMp_;?Er;;8|Jh+}QSx?*zU*c-LWgk6S3UGP~F;QFBb_RT4CBfm>q`K1c6+mz7{Rjicb8gfT8$;51b69{KjlzFMiQRcHAYstQWLgwixj1PW^*d0IwTrQ z(Q)LFMdQ#|3~P`j6uCtTusOUmTZ{l}nZ+t5T^e0!k^I+YsN_fqpRU7LraHTe7z+DC zCTSa3==E0Nfr*h(N+mRHl0t*yt!k0S+1iP-h5amV$d=^rXDh*R=rk_tLo0A3dFh;^ zhQ-&U!$S!y{{>@Ttd7O|*D(JSQ%x~}MWm0kI^|f$S+;hJyFA(y7iZxf*b>#}R;U4` zOsHlA&i|D#0a0p!Ib8kTn{p4BV*+P7%i7g=pU@~hTM%6yKEyhNbEpyZ!U7SMmWc+R z0kbAG7z4D_SOs`Y7cy(!-0%sp%&c3K;tE1^?)xYi-3|qKZy>eh%%&57(Z%vuwVg-+ zscuNIt*hfhT9!`gVpfu&9jsRGNB$d3%wey)e${6KFk{b&8i`GY6MHwPv*a>JqA0Y& zOqITV0^)Pn!4}a2dUqRFvBUk!H0VDv0Hwh)4H;1GUxPr)Ru(^hcd@25&FLY2GaZTUY>96jv-VwOpRxUEa!OV#(F285VMnZqL0rqfeS zfgrM1>(09VpiL~^1Sd#q9N*wS_(=Wz>KQSyZ?SX!V~P6=&ae?~ceIfdLT8Ey0(#Ec zxw^Uy!xxR=E=fvIiy&Uz@|93SEI&E`P~0j$53@ZgYV(62NV*dk7NK-w3povjasR;y zpN1&|4vdpu5`EZORUGuk8WyCH-=H)9rCWaTlX`9Ce+=;4xuKVKK{kED&yk24H{WIS zx5Nh6kGyqq-dntYVh!x7JKY3ney$X@^vm71e3BNI?`@R)*#50#6@#C^m1JuK+Q~vj zrI|Ry1TBSW8F}@blg56u$Guqa7MmnYj^5TIU1+l2 z4Y>izr^x@Jl4$wnIQ20I7gld??-Ie~&*4E%`37WFs_Yg4rirePOQv;fa<{w@z9PzT zS}JF)01-1QnB5*hL~t!0Fq2ILv~~z41t^*4)YlDLdOqu8qp)+)C| zw|psXX@aUK^8?RPwx(Fwk(@qRxW2Zk1V_b*Yi6S)u5Q6dv;1J$ybiN=y|@-(aaOxe zbG0HYv9A`l>R7WFCtIzVyVvZYtk)lG7lCPyRYulWYLnE{T;mhLFlqdyNHqwQ_6(sR z$Mzv^f+&u(R#HKcXrO>BHSel?Atj%_;ka?>f_7rR&? zff`(w3JA3vnry9^^?-}WwWV;NP3a4g3r&u5$n92Z0TdV#6P>E(;SC|aJ)OmkRW(gz z^Qe9Z^jSMQupm@T&3J&o@J0?{x{4DdZ52fKH}vF2zM*0hTBM(_CaM27sUPS7e?4j& z-A?in)xDXZpaEO5`yL${b(PdPXs)V+tkTZ3GVBOCqGrVxK`(MXFO{|b4DbEv(+C0kAZyqLePSO)87M?cK&#IL zf908cgS_PpJ9ZY<<_km*3Sg4G1;+TuCr*4?vohOl`VGZJBU#d|cZcaCc7r=8l3w?^ zDleczBmu<)W0IVc$WP?H!|<(Jngd+ZO5*tZxbenUL|yLvptQHw6h``vj34sR5VHib zRhbPR>T+~PNc}MJfj7;-=L>A7S;`>jMsNV)p_3B}8}P<)BI5HSWS{3gH~47}!-uREUsWnL3L&tvc|xwO|=}P!G8KPM^tWfJysu zOf2;QZkv!VctYq(x{F@jDy7Nwd1sjOhAp5_Zxc#cE{XGb5qfN#-En5DaN=uM^~}b) z^8aSaut(thj$WTz5o8ge_?OJ_etD_ zpO|e~lCw{GMplfERva?PyI+JULCc)k`6dU*@HT`$feaU=Wp6r++EGO6VMG3&dLoPr zg6K25l&<~Yg4+O6@lt7S6Ee{C30Qo?S4=QoV)wloOSg7}XxA!kFX^QuUb0cW1ISxs zEM0m@3u1-E*sWoW^*Fjg{@46_*|0(=xh~2>i$;R*{S_PMAV+)6cjsbamH~zevzp;#@#8iR4zIH!N(!R&?gt%IN3|qx^i9g1r%sCc7YqHWlk!g^0*{cI9V$6@=D! zbCN@DkeZ@s{_PS8t?6%bo-@qNGZMX=IC@I>Y`3+?4r3_7!LE%YAHtlq>UTgTB-NH? z`>7VAy+E)L>?0~_*-?dvs;#YoXQ#wftNtr zGiqY58cTE0ia2n&oG}uQWO0)&F@QSC9Q=IuEvxH{Wisq9Y8ZolnI z!%{cbvBFY{cgC)e51If29Hj)9Qj#BWHWj=uml^^P@x#J*jC_;ujqRkH?!7#tuf&X) z5$SE#er{h3aRWz;os{Hk=AMDEUH%@2#RN=qym)^WR1`ewy%6jnJMjuK0SJ_h7}_pck?FmTcsQa_f#ik*_m?3o8E50*kcS1fw8N`UuUSI`X%Mt&jOay2W!?r2}m7 zSHadBDl&+cfMEt5pDeX-LarxqCNS4(u*R`ONZm_(AHNw0<)8wVu&y|(Eg7u(X)>b} zh45*hTe?sv=25SEnXBn{!$P)yn8KB9_khgMD@|)EwTY5 zb;!1s*61KN&ySw88dVgz(ukH!?vI+@uh9>KA`;eG;8^1L)ieyO7@!FT^PRHTbqfa6 zuqFbhtfpcP_fgUIena!&vh-vmAvDA;P0-I)hC@?447E$kF^{%}EgVR)Tx$f$frNlL z>ghxhOO*Y-A+6{MbIm8>WQGdi2Gxe0SiZiccX}-mc>)W*MUqzhv)w{&LWv%?nhgow zIKr~4)oKyta6nUyP4MylEN0Q{mRcb(bn2XHOVc#gm_w_bZx!-PGLbWjzHX&Geq22v z%1o0H%5G6$b7Mb$?U`F^C^MRhTpflLXjiP{Qj%TDMG4yQEt1Z(q2Qz;mR5+hPS5T& z+o9SJ!mQUY8kp|vGqf57zFYT?Xprj8701 zJqxB37eMzj((Mp#^OcyrMFi~JyC*D1%z(=(Ee-Q&@^SZ@X+KAzW7{h6FIqiDcC$1J zO%Q`n3>+G7C+8q6f+SWAZ8Tttn@#U>=Y4Y5H6_wTAfpyxn~Y4P)=GKef)QCNufodp z8iiPEggbAXW5neJ#oUg|29@cNOQt`!qGuwxLGr=*$9eRPd|33cf9&H{OJH$B6e6K zvASxX%e@B4tOIXr9&1FK8e=+42o_2*ka>PEZ26DGGldP}#Pb>gI+SE1sD5bVm(KgU z#9^n6A*&KaHq#6;2(#gGgGk6YIOG~?Pj4|}+f&~cGXsU`jbf|(&wpx=JQBYoeNnds zqD+DDYqXPcIa$N(ErMuVO_IYF!fZAxp}kpnzqSHVs|jV!vN(M@)YQu%cPsu7Vk(!t zI?bS7iGWM>@qsP^!|&~QbN`f139M&KVD>Z#F$4&*}-K%e_Z#E{;iDx z`BpN@=fT@Ff?W-@CSq7l$s)ynvk}>+1usdyrE0kHcKaerAip|CWg~#a*Qsc$;1B=L!|QZGim%pUu_V%Ega=+(Bqg$RHBfnvDbq93-NZ z3JO}L$)!*Z9Z&-Hk?9?qUQh9^E8DCIv1tqKhB328`3tAt(M(49l9t;kgB9u0sFOkh zi}+(!MN`xqH07r56d}T8WflpZY#bEh@oj(l@WaR$n^w7GK0L_Lz8a?rBsYqy&j&MK zcaBCocjk=J3e{cF>wHYIaHD4ZnKS2nl+#C#It-@fxDj15)v;Jcy$9lmk~D0Vvv7Q1 zugm7Ke9_A;7~}B1d(T&ZnGC^Hh>oqd-Afz2a(jwJ6mc86DROr|Zd}_#H4xUAz-x+x zuZkci)zIA-1POooY3*oH*i45o1k@}nj3=4n&6cvC{R`N_;LvLnh5H59B7n^pn8c?S zUK8$;9f?Nfz_m~ZjcjIX)DnkJVWv%`y|E0aGFibBX)KV@qIgxZhuDm73K~?|);Ncy zr_{?%n^WwtuD9X9TFFDcm6zhS7!JzduSwRtqy9TRl1vldKF{hD?lsLz-E@hh@;(x>|tYJw*GR7ntEBN zg!6K(46Lz(TyMZm7kUNB#Ex$F(3#ZR0=lo>(4gS!{$#g&_|T_0nFs5KI%F004Q|uO z2FcwjtX4)Ti8a#+wJRm9xT^Phvr0rClml)mPHy`y&{+Kf49=SQ4#p4PAxlgR$^OnA z$HeWZQh=){7!vjJLJFA#?V`4njwISnM5e74;V?0erM73oD|+cGR)D!_jWVpV7B%x8 z`5;=D<%cFG?bCNZjC-z5yz2`@qOKExK4eT;lDB}ugu-^AJCkrg_j0xeqJ@8t0Z#Lp z!61bTm4zfF$@XnaRMNCp7y6{VEdm5oQ>UPQLdAR^>2r0m?Qt3sIV|; zQwhOAVAu7w&KBI(ZWc(v91q0tCXfd-;>Y#N-JGQ1~Gvks5>ZGx>o(f$&=dc zs+Yp2%!2 zo~0JCK)qYSA$+NK64k92Ltw#D23BdA;l+w2Qf<(M(mQ&u6Q|A z&u$x%qdAsxl7THo$vIP+o2kEH&_+o`Ds!mjiW7kQo_2;hp#)Mzr`>YD;f^=`jsZEB z?koY{*_DUsJ){r{j|6xrPm=F6d3{wa+vh{5PC6NeOr+R4K2H^ah5VPQ!C5sf&5V>N zo1DXyC1znC8>7`5^FH7G@VMYe#2b4M$hdrzvVWdFO+F`!lu!1)*?zuPVHV+S_@ZR- zrDsv6yXGY@29V5S1%zyz3Jq8ivoFH}4pgxAuZchRm631Z?`{$%k#^EFz7UXh@e1YR z`xpr?`}Is-f#T|jF0YW8;`?Pqp1j>H4}R50gePVv_a3Pc!bf9mNqV99ftB^p1_IkD zK_lLmX+_DyQa?dK3${i!DE!>F%%t_4XoDymcmR5fZEoDV;&6I^L(sEWuQ1|h5fnN@a!zQ!wMb68X8hhl6*?~8br>4vdK&|5s^C=2kL!>y z_j691q^@&V6^=YTrCmj;#h{WgJCA<*wvR~kaH13DU-w@s;)!cYo<4mj#)+7ULWPWcRjtZ{->hp$D2E z1Tt8{JI5k}o$E)eR!Kwn#yp)8GIY(PmDFkDI4fqWqeWHJtQFhcA2YoIMMqRAj8Vuj zs@}8gp0M#)UVyLZ;49s?{G1#=5y9a^D^N_+e?Vc^1TekzA8AKSNH4_`bxj@o{|Kb`EIq}i_;9XPu$Zabt-@gABy8Vi^T-FDn)8x z8>9&$QynHi2&_d|Pa4|lX4@teO{Z8^x`V??#|uQ1(bEDyNVm5`REIB>c zXt(^5cV;SoSl?cI&3A2+fF$7s3eZYLNDx5FLIKh<>iS~BkWH7z9w?%jDL-z zOM}d;u%GXmT`M0&g$ay1rPp6R<8AJ6N$=a08L=G{?O&wOQl_%Xbx{AaMI3r8%Gv|G zH8JgG93~oQOg1#VwY4ES=Eama6aw+e-|6EuFyll$jo2av`a;0nBvD*7Nh}oyXiLgO z;5KJ1p^#mG_Bg0|ZDP9jiUIaJI;6P%R^{)KG7QiM2Ro!y!gOZ(bKf9RlPXZ917yKTa@ zdYSEy`9h!J+yqzqk;?mvpUb~~<%nbGu@j$;F~8G=0}b@5#UYJcbApPZsU;T6o9(jq zg_x;%T%VZ-=H%ZI-dV4(;*jXwj=7|R@+SfSK`Kf8I$+pxa|P=?(i`-+MI+f{y*sO_ z(tTSn?k$oay1LKY0YgF7IJb>ieEcnfw3Yf|qANi={u0VZI&_b)@A)ra!C&|+%z86T zVd{sb2m}v*x+QK;NbUt3^MzS>SBsC-H+OSOz0Vf2sG>QD+)cC33v75uvI+bS86`eA zgf!CU-qW(JaJh~s)?X8L^#$QDkoC-cE%;us53e`D2@$=7tna+5Nnv5=huMkg3K=o? z6?8O1l5d0+PoD@%hOaQ}q&py3ea{~Trz9p?T0*i^Sl!lsGRXYw&pIerVM@f@NOC)D zDcNO9kiU+>46kplW5v|)F(tY{yic)ZXz7N=Jsq=x5`*N9&3QKjB5Siz#uA-iT?+B+ zme0SY2gmgyX<6nk%a>nKsWjwsl(?IIypi?2xfTF!j36T6tfuP@_aQfZxd9K2*LM0; zrsHn9nEf|Ir^rsiG#~EzER%e!7G9kb)j+{!HwZeJ_!Q3(t^=w^;$Kc&Z8)U z857Mc>5m^k2Qy{3lVeKYvv^8^fqt+b-j2;Q9{#-)Mz&HKLbtoRHW`dMO;T%T+`F22 zqZzZPpWtWvdV(66kE!Y-L3x?Hp|xQGcI34*RbL)rKT??QBz5rpT$q_NJbWcyuFm2Y z6$C=ed=k+q!>s;WO%|a(w*Oj$r4aKt_r%-OiQbZ$z#_FnJI@$U2#rp51+uAp`V{ih zx;XHOBB0J{E_X`MfJ5;~D90s;NaNy_$#y=R7vQ3gb>y|rc(a*Igg=yhDiQGB`yg>= zE-s=+K^O+5(aKeb`fX#8lCq#0cBNA)9Ah~cGF4HLA;tKZb()!~Ylz00r?Xrkl(-6o zCc`FqJ9#CobgO#d;$&Uz@IjVEx#uY#q19#BRd7i&&CI-Lni5pAO>#9e zSlN{{HFQ$mOPSs zM)w{$j+*$u>IUc|cXH+0xA~MKS2hV*OJ4@3vJeLqRQ zW+~6Ev)S4u6v*5`SdG{2g$d@3dp?V9dFe&Xyhsl|35+96G7#@zJk?tlUUV*TrIeqB zzxWr(Rm>ifpWyu_rARai>C)Tjyx67&g|Yfpac0X^u|B4GlYMIBEo9)CgqK~$CY~mV z2@LVU-&;{?;RqK@C56-63E5+MKJJCW#P_|lKo|$)B&GwU33}fxPkdSgsuW5{7AG(z zVLUE?Sx8!xX7y=WO(*0iN5qrIV)86OO$Ub1lnv>vg% zcmM*VR-DAiKi*j3)pc|u2h!CIe`L~xZF;2`081YjlUQJeN!W=Dn5o`301|H|M~y^% z@IXrte447gtVC{sXRrvi_2QW`5++Lt4GVwwg#&*}h$3GCK2W`A-c^<$k(RIJNTP3x|P1S&hUP!UpK$`>HV>I$kEH>IIq zBWsTo{exZ1 zk<#W%96*s{G?TKAzNC9sd<@)w%K2NqJD_LCUy z2*O~gfZ8_!TjXzQkX4Bea7JB0wATuUahI5ewaR6&a5IpEoVaBnUA~{C(&}%C1`zC8 zN|E{`pkk>yFn3~=8Vmlm$&$KdeVZ&Xe~*l%7z5IUFFbOR<7OkzXIF28zDBSYS`pr_ zFfQC7C*1etodCWRi4gaB6|o`3fmogpR~N`?JxFCWh;%l`ysz&1>)v|xm}4$J$Tmcn>uVH~ zTYPi_HFg_+rFi5VBd=_w01Bv)nNJd_TiF0Wu7q#ci;esR*+agbr&mzmDJH7Z({q5oq*zo8(0rx zI}aT_z^E*{-LCZU_ocFhvzILyX=dAW(U}r zGcRhm-XbC}&YZd6-@{^4t~Ry3{9O!)KAE>}I@s+#8E;b{AR!SNX+PzhQ)D%vk!c=2 zpe4A3Fo=PTYturSwoD@lw2f6tRffTex8bW!RjALuq;7{<#zXWVg22~8{E-gb;U49U zu~Mp9c$L`KY|%#qnWg$wPftZI08v(fkT_nB_*~ecLAuBD2J)3o8}zn`teKw*;zRPG zR!-Oj8L`2#EHE!y)+{(I8+Wd?`6jqhFGQ$LIiyFfG9P^ThhNVB`h@LJh7F}u64lwF z)FLRRCvR^@h&k(m{y31SQ=?I>(1_LyN-CnDTV6aPUg$xgdbp)zcN8o!f2wp7i!LTt z$|twFZ$-XQ!;Nf#K(yZ!w%+@^rU#>s5R1v~#Fv04MtDM8cd3XGr{;y@&-*M{(7KQZ ztxs2MY@uxVzz0A7(RUqsL+nandLTyLpXruweNL+^!%kCW!NY>4L8ITR9;d->!Y{oe z$WqFohas8_?8{!6^L;J^b1stGtx($}zOVR~9==&6(=v=tTBs|E!eBrqGMNzr@-UlCj z(A;`MlRZ?py?0V7eNfy#L9t30Ux=Moi7rU0^v)uHOuoS&PVh#) zbJ#-8;upeGMB-ovYvC6JzEndwo|O8Qjkt`H%|Q5EQVoSNi<11HQ81B|6i0`zMI|Qb z6DDS=wrqyuwJGQ#{l9}CwmxawE!mkBaOFeUpy|JOE zIOF+hLyJ!nW!qikQrCz%NT55(O}64$^O^7q^s}m~P+u{eyBsTDPEDFzMx#~Kgs%qO zbRH~z^Y$%$PZ*oe{+$qsn0t7x9*3G_B?H2uXiN#)Rmn^Z1EV(fshfK*EL2 z<3~a&%d-V|z5+|=7Y1&bzcM(YRYp=jl~v>rFmYIhInDGH!gaJic*vz{ zzXPOxO55`iF5rQ`h@DqxVwpo;JV>k-;SVpVv2K59T796oVwCp}2tecHoMbq=jZJ%I z5^Yj7$OCvs@LLt;Nax{EP$Og62nd4Zqb73%3a2HPiNzF#fdMZ>%zEQ)C{(6I3*R>( zt9J194GhsPJPo@}x4gHP(t^y5oCB*%nHg+EKEWH+s5eOB8O${u-f6{LF6vZ)% zDVdc)9*{h$5KchOMDOn2O6d!L!?$KJj502#9~}2#2w`0(z~9$eSEX#SrG0<9l9)DW*NZ!@;uLRU$e$ZPZt}842C(Uv(eQdsZ zKkp}y#Qlg6_3{J${hqIuR4}~7K4PxQ3OW!Mog;b3YfS}6SNGR|>k11q%YI09^&p`H z+(Gk0&L&btS9mnDSn_VlzQfR9W(z+s-SXD$IK!LbOU=Yo3tyj6OS@P{Z&A$b)Zat$ zQU{N#z7BZ!-$Hg-Gltwsb%bmx+d(kQR2y|PGxT$qK{3J0uzm3L?&}|YT@SBkgmEWB z()CKA)M+qeyuFs7n}|2N{-=%+oR45TTi3PJ@gkiztoq3xh@mFmG)mlnAES1r^4}pW z;zx~II@oi++5L`C7M@$oK8a?5AKAn&QYa&}Ay<$5oo1oqY&5k)ib;QjmLDWV{u_UaKT1#WKLn7Rd#R za)QH6n_wr%%5MP)jf<=nvnXMef95Cj%9GUBDjM8>`ani%x{EW1uD6pk0gU_bDh-8N z@?Fiwd?IBoSs^3rXwYc!*HE@11acB}LtrxfsH4|>P|zwUYrlEU9&%o#ojI(K+0v z>^h2`j{Ih96vj$5B+O1JD^H((C=%|~f&$@S>D;}nncuseK(h+MPV}Wu#fCH^+@W^O zi1x%Jgpxm#+5bY;xk@HSNGkwcm-ry*4^#B;q2rCit4-lVV%BI zgKu{~@HIbuUi*#{At?~Ori8w0H|@gG*!4TNeU%opo03Hs7G*8gVcBqr5TK-_@?AJX z!lhxQ-l5fLMHW1mzd#AJOn;yGequo8J!jw`w}fMxl}+e$;hewr$@^jlhZXz)q!x3t z8czNm;+|x)zkf^9XDczT>+@`3yvdhv?6gnUOd+A6K1$^Bb*ovct+N|26LR-Vn@%O0 zN*2|~ybH<3dhLdT&n`uCnuS-@r}6p?pGGOtEVRk1kob27U`VN}qeglw~s<@NhwsNtKe0cOyIw1#brlbEbW?_qXK+qa=gS;rn|Dw_&j zE_I>iLW)8KO2Ctp*_Ox_A>bWzAI}^4*zoGN!!W>6(@3~DZZSLfI^A?z$>W+zn#rC&d^-nu;r>e0eY_~OcZI-) zl#(6TIs53Sw;2m;9*1-0dJ5&+$^hi1)YvDEb=s$cf8h`zR6u8Vi=*RUBcvh()m28D z=7>xSQ}r(T26=(59+O3&XRp;(~#lyDaBXP}k8%1IFn0x>H(7-@>i>SeAeQF><0KIj5>EhjP8~s8~dQh zq;=0fkf4xlPnhS(?(wsQ!^@Ip{i7AzA(+%LZakxHkPQf@sNwi9cmb-)z1t8Nm)4ruZ7o?OBH+x_ZTdWJ*Dtc`Vl5|e~Ktz z9Ula-%;~Xoh$&m5R#N8)|4nFU>YHiV-fOwXz&~s?JEOF*7J45K8hK8N&*duu>Zv!X=kO0TDQ&x@s8&K2?e(hkgIrU7wNh(I5VEf5yu2YO6~T2-J5i$m9t?pZN`fm6acLbokPj zzNBf-NaRQy1KUI!c!PA|qf(wH;EuOJ$M|MA8n}2No<@!L*@H;XV*g|c$22JO!IwY! zzVEboazhbV5K*gF>}pB>bnts0{YCe?;w>PSmG~f7-uKwDw{pTpgV;Ar)e=j_g&~~J z7TS?sHtW622?Vgxzdwm69J^9VZ(j0gq0Q}&!o*`9H&F+ZL%QGAo*i1?5)RBMVZ9{lc4Qp(et*)s-b{hQXCvSQnKYi-<(PGf$LlvjoCw()aScZO)GL^G0@mKSpKn zjVGIKVMXbE)>u`t+Zc5f>MAhX-B%?1``b*4g!^1hi(9oEu1Ji_kG`pQ&4h;uN0VBM zIN`j(Ko>&WZb%9xmRRFJt%4l9f%>EUD))vYdlOM6l8#t*FgPksXi??vyliyX z&5oN_bON25E==o8Z4g2V=x2h|iH%j%Yxt zzHz$zIV8NbSdKPX{r_L~-nBXM>p1h=udr|S<;Iw5No`gJK$GII zNx1+NfMNqEs0!6+G}VqB%YNaBo!RxPnTdS^$&_VV8p#()*2R)Gy5aA&zvuib^U3P< zZO0oi5gXexlu=dx%Q<=S15Y6)(|{)(~U9^lnAlL-IJPSHbPZX{ya zpnX+5E;OL57khmc8qWt#ggJr`(_rIYpz*~j3x#VBDss;g<{h)U(;lY zQ*Gv6JbQ1%Nf#f^@U`ZeHA(he(@svn)6&1LwX*~FR>CyaVH07He# zgNnbi63eL2h5i1+!_uz~BrC8K4%c`p_<^r`Mol6J1A~zh&mE+uzmsio2ED>iYH*90 z6e}mqgpct8s%xhu@^Qh5oRsR~oW4nKP$KTRv$U}lkDa0kVtxC&sk`3J!yZX%8l2Hj zK0dX6ONCdI&QPnslPi;5KY#v5`6ZV;TPd;u)9wSksu5m_y zRXYqz&Fn`)4`to3#r(*Y-A%$}FqOdT#beYXG+H7rj0f4>qG28bQFZ0tdJKD52Jt+S z@awkPZvMnrJtW$iZv#M*RR83YV321&XO^X!5@?WkG;3KMxp!1%-M<|h1A;0(IWCBw zFFyBNJbkrPiRB>7`xRTBac5wbkM4UUGUaVk-589j?y|@*tE*#sLN&dVpCFJCk->Oq zZmJC9CoeR&im8kH5TH*OLzg40bKH$tWW!H*W^zZZRkPRPlOQ&FWoOB7=L#p12z5njjed zsJgv*B(RY@==H_;n@D3ZS~*DVmk}tp2&NfDjap}l62(&x+{r&I-=~*9ieHGe|1e04 zMQqATN1-Mkdm>1)v1zVil@(F&7hDb{ite~TypraE`JQh+{&u{F3M3{OeyMg$2U&0g zD)lPZizB9&(UjqNQQ`l~<@e)(;7{XvwvLH{l2r>6V8#ACUPD}x5(Y^WSti$Bj)w^C zmm?RG#t*94nTf&dnVEE7^sSQOMj~hWZSMX??9XX^t)e93UAu1?Mw#7AC?!CjjY&Xl zn`HE{MF;gXg*!@uAsedeqT$&~jE%bKGeA%b2|)a1pEge*YJy(}j)-SE-i!-lIUP~> zn)-A&EYgFcP;l+K$x!Z9522jO3aG(aUJcx?PLZGEjH!px9$s*tLQ|NI7eddj>ZycA zgYEh86!$tV$2%-mN`rIeb-kTpnU@u%jwNep58%Fnli%<6(n&%14<*y2YGI0*EJIPE z`dx|X+s7lYelDDxK_$qDPUzuN*uGZg-Xx#d^iXYrR+Y?OA=QvW5ZIGA=pW9Uq<9dK zM&>+9hClEN@!-^G*0agLn0$Ry&XWIw1Vm#e)|?uwo@nc2|1usLUQjZ!vGk>^6?i#*5G0%=BqcjeTR>)0N}arRX0 zX(8teC|axUun{vp3qd)urLP_4^%gv>WGRFPLI2m>EoD=FvLF`6b*5L{E)rneECaML zPnEbxdh?a2SwtREIZq?u1pPkJY*ZF9ss4Q8j3x1q5e4JX6{<|Go7-AmyKO!>0%u_U ziEB9MR})-s;%TLvO04XyG)AeAv<(jIpQS7RB&=unSFz$Exfe*I!B(*P&3Qkp$%C5WVgMCMCu=r3k}m)lF` zF#!U+T3y8xq;I@g&8W&l=N&cR6FXixfMW&{DN%c~XOOiO@y*<@^T4N0*G4(@WQ`zZ!qDmVXt0^Zm;~ ztQ+YvdFJ^{ipFz1%nhlCP8kk;IG{pI6Q1@oLAxul&Qsywyx%W|4w3fz6fN))VtnkU zRn$~H-`rrlWxr1WS5>PdI8BPNMS|j9M3mD!d>}7&%o>+e?mzrWn;r#t@?lBgZSf0O zBS`e<3$Y=HOzxHGpwiWHpGeij7qG!Lf<~*oOj-ta6VKXbky9@v^J#4(enF)e-j7i$ zxM>Ol7q2lZMw&9M8Gfdok}<1sfq^+WiUag(PyLILet7kH!`V6hS`WbHhV;&%{3xB+ zIMjnY>$ifEk`H)K;%VoOCK9=sloFc++vOt6v19VPwaV5)Jdbdmu-*QMSKnO}R9ny_ zJL+c#Za`rp!>7BNv7h;jH{U86Z4CY}?U!*b#!#GQN^WCD+Bb)THReb0t$56NiOuhF zC>^RVTj?Z#<|i}T4UC$V{1+aRkOA>YVRKuw7|=0GTzkXfXjtt(FzdhFQ(-H20tIvt zw_RSmTb(YH7&$4L-7R#pcr(T_Kw7ig$EZ^%Hhzoj1HnlA=+#>Ax268&s)8b{^#Y$N zmL|8iHLJ^u*Io&h$7zwlDaEjQ`8uWi<+LDEF{Q!PrBE8~q^Lb>tz=@j#OEE{AaI5z zZ-Ka^B zn@eUs&fJ+aBoxvZqe8$K)&97)O@~CUl{Q&*|Dn2MAm?~{Tx#V8K71!lX;5ibgDKdB zeOKI|`gZ&%j=XYq%Szor>vKn*FVkbF>RaZsfHjVX}B zucH=i&>fwzkBD*LgPefslH<&Y<)VV;7foJFPt?(vP&4yi@+U%YsW+pC{w4qYGM>X3 znpbx9A^lWAy>`m1fG_MMh>;s#u{5RU@F7L1eTX4L!J4a5jW5b99)ATCaxxZ{FF5VS zMvM%6dfGNOGQUcVk~5y8(iY|$bzjOu7)7P$4`QI8xctsT6K4-WV1H0UPs)9kp&Jw` zrs3ShuRsG2<8+dvYzl9B>g-YDZi+!3q{oGt#xH!?GwBgUDZf$nF37W* zr}3b6rBX#&*$}a7T2gI`AOsUWbM-}YnG(1PW1%%T;Yn)x>O%O^ z))_rs(AKEp0<`rC$p3AD=;LDjHpWMq*n*W>3Y5IBVe0A|{j%8`Fn+89+;k(`sCr0L zatb$44Z{h`>(y@A25oITFVbUa3>eI!_Ia7MMy&Tg#X_8BZNbf20dcjaqmWT*=<1h( zY8&zvJA3tJNuXb?A&}0G?UU}ww;q?I-Y(=Z8w_Y8ur1O|75VLYhyD_L(nJ;Q&~Jlx zIW`FdsE?(;QA|wbQ-hlU?8cuI?un@oa~q5fQ#n?$PY9D9?Ms4Gm=K?)PY$rwY#S?; zYEnvfEcDf4O}sly@5I!15YL29hQ+Z&pN-0wIKa6S-n$FpIG4Li!8x>vazzj!Ejt^(TI|jYsefA8a?-%iw!X!$tP15hBImj*8;Yi zmj_c0SK^>JDyF3qJvT~C1w|K9`a*mJUCe>79GXSkvW4q6O_#5J)#3tEP5yd+v8mQt zw)R^@BL+T=mBd5ku1}tzDAcPyiJDVW4-Up6)yE^lh$w|k6aZ$|y~h;x9;WAO;h-3$QbMx$=HLXb z3#zCD8VeQ+uL^GQHx^~#7Cb7r4NYnm1@B7+T%P>r$C*O5wHZWKF_M(#*RNfVb-(&S zkal@<%QIA|01phWs)6goE{o%ca{{$7#o=vW9;QR9b62F{yw4_KWz+SEi|Lx*c1IP^ zV(L%6^Y{lb)}!Q-bCe)+I*64PnUCvkW=+StpTu+MEB1+}_wnQ?;o$#wtO{0fk@uUj zNlSq&d~Fmq_1lSJw|g|ZReJKp z4W>wliA@4dF9Vdn-U3FFiC4_N35bi3$qjrkvZl zCa4G$5eU9p`g+NMS+@YWa^BCu$gC5R68UHKc{n{mJAxA;hnQSNY<3u_^?AdV4m=3-O0MRK-dj&nLd@8E)AQA9#~5a zQEo4NSvLkHwk`QZPMVEushcj|xkQOXB%$rJh}I8^cU!tZt=>;e$tXzS`cm~lTXi-7 zUWM;^C#+`7b}J!Pt-E5`%b2#cn$j%}NAomwZ3^kh+f|-M$kE+A+MCyf3?ubYzu#0^f0(?Lb%TpkZ>%iqKsZ>$H$fdR&hssRr^0M?8*VR)q=>y6L< zmucW@YS#RB`#&+f%Zo3(6#|G@ftgu`CI&!E_`?t4ubniJHcPM&JU|A9K78Q?)BkoY z#X&LKqTPcZ9LNwzLy|I`KlIL>rvL2P&7)MW?(CM!C8nVA{^c$VUp91Vuk=IANjykB zcG_VzMGzKk4X)hkr+FmDQaP02Q7l|b#C?zy#g{*I&oR27z$)5v`U|F5llw*Z>N}7JlP0G zbN69O$}@d&e>Q)YzmXmx4~dBaj^ey*><#H6K@TWBEc6?5YY}|=hm!&jC0a*Gcyd8( z&2?Y|zKhkrym;l6_#42TS z-7@b%;*Q_F9-Qn4XND|tzWv(F(za?L(~x30xpY_G%EFybzIB;s$+J2Y`uM0ER6G&z zWOMn%1c)Ac$=|KoI(vSS{w(Dod3HAdih4DhrP+W=@ua3+#~Zmlsz^pE6M=$FcBOG2X zw#slD7pbRkf)y||#AVzohAEtT&Nz@D)w$~4p?vS##14$Rh_TG}wNF$4w)j8L(}!f472$WvR?_43b? z>lv#8-+jjqq^Q<@h8QMxd^|X++yi{d8$$4u{Q;4ycp~;y8OA&wxgvK2uW0H)Io3Ie zJ+{4xk7A;^APrgdpeJAK7u)S5H3<6FotdukHx7w_d0xG34sgF@ZHDMFS1`_E{H*7# z_;t(F`yxl*yJ1JET=P|rU2ifNpU z_F`Np3_ta_rZLx$pEnmgLP$ZZnTSH+Uwl14Y*OR8m$+w+wPMn6%J~)hoH@wYc)uh< z{0-k_3la9z=)C%!IHv>_Wr@8!Q{kKjqM&`ST|T^DihtvhH>mnf>Aut=mxTHZIFYTJ zwi^4c&RCG=QQ2qxV^RR>#^Z#l?spI>fLqKk;8HROGQXaLp7Rwqfm3>l>a2}|QjS2G zc_N{YK-_hMgmqnBy!xsirgY=rdB`^@x}L%dc4F#OH=A@eJNqRLhewY-GAne|>ZPlN zqYc6{ZL~W{)>uy3r2Zt{O(bPMo5*at?` zPzy4@nLCyojicjbJCkHmF>P5e)QfW1OcW_FH;@Uau3RZ$@S>MM531G7ir}&68jr>x zT16}$R8Ri!@weh9&=s@w50s`qNn1v67#PuS<_x}WD)uHxx>O-?HO1=B+zBYLV1|}a zx6o%(THslC2u0?HB?m47a>z=pNmU1chiln!;;Ej|{*Y`(6Awk9nJ)fD+9djOAvj7I z(kjM%d2#)vYhD{%-zisdoB4DW`_=#GP3uDLZMxYG&?oCbZV8c2wE)z{z%Zd=h z>f`+@v8|_pugU{rMSVLjz+r5s9!_3BWJazvI7}H5MwWw|M+Mf5f4UsRAG0xoGLw-k zKEF~m8<9&Bv(g!qE0T(?Ew^LMiD{2~qGjBEh}$RqpSVBNSHW++&NidtZ~ zxUh-JmomOClw1;$wKZCz3Ihtp8;qQ21)_E{n1-5b*o=mtt}f=JKl&stlG-o)Tl~Zj zyqsj)_EBJq@`)GO3IFS({{JwQcmEu8{r~^{|JxKWJ9R3(cdMF-SfRp3Y}$REA3`su z@NUVH_ft0m0PtOMkp9#UKkFUhh%|%3-L1}UY=zPJN%11?;DrK4=6EUb_~7%8zVMMB z19}1HNkyaWO?<%MhC!G1AX>-hRI><(j$*c|jx;GJH8LI*b;l8Ibxa%~AfG~osi=od z0jlGtz1s?_gm@vKNh6iwss{ncu^p3Vwi$5v;tTQZ<;Asfbe~Y)F614ivO$cO7q^z* z^})c%PkS{>Y)u}%RG~2;?7f}lD+Cb$L?BT7X%9DU3Bc#t*Aol3 zd~+V!dmOyfV>C}S+ygM`@)hEhx6ar|m~4&e@m%sn6i*6bfNHo_oGD|Fa&ie59#N$| zvXlHxpi<2l@?$u~Y!x*LmqBpcP0tXPL}r^ra6~a|r0?ugX|19U}w(KNFV)bE4GCSjDAj=~gqw_9T_oXVd4SIH)5A(}?V+F((uO02OKA-be!t zKtrj$OUbd*v<43cwF6zJI#*fOK~k$@jIcpzAvA3c?Q!37;o^mNV)jy)oDKg(C8B=) zynhrVD;O5K`C+&c%@$JZ$+gT)-g(sG;xUkkloUX4D)rznjdlFJ7(Ag+J3zSJ zF|AG9zhxa?dEF1V{^px+#;2y#Sp`27_%ufI%%iKG4*D$u+nE}uYSnp*bjm{#d$w}I zHUdIgcFg585qik5co#0L9MVqpCaxfCyg6Dt-m#aPC3-<^qkDN4t7kKhECHf>{l3z7 zOnKU|?mKcqm%7*u>R?mwBiJ!DGPcp*v!z9|Eor#w0p`2$TkqV9c`e^wvKOK&MuO#9 z)M2#-3(9OIi;`sy732H$JEnj_G3@1F0Fokl1^y|88zJdgs`SSwN3&Fa;#z!5LG`XQ znb&)-cb{|Kxs=54-i%3|^-J$MDxY#b_>G8p?Dx2obd{LJ#|1iri^F%28^K8t1Cfd~ zF10^CE}nz|yIYF$u!QA6?#g$;E~ZDz zCnV$;r-w>O#3W$=^MpLyvRv8b!4uTfP=O0Sj70=OT$PJ0qYBeZg}jg zl?o^$79nKj_&r_2lV0)GE1R$X6SJ>C&y1!9kTpP;jm(H zu>v(g04BZW zzh8OTSDnD+!8%~1qyeUqCD%-zZ(DP5*=DO1yjQwgG$(@GNqMVE;1wdY*X0x4M7FsyW7ONaSelX;;16BxDcNv5$1qhnUmtz5Wk3$nrXc(2O0=4MzjP8lC( zm4Z9NJr8H(PQE*tY)z5UxT2~0@u5}04#p#&vBX(0&s>#NNWGNXsLH{11i1k~)^iM@ zipmp@?YYvvFBFParplFl3$1hm!8RU z&J)Ke*osqo(8E{3oZ(qK@^-0PsPvRvyB(zo#%RI+{dJC39`s?@o!r zsd?2?9+OQ~NiR2PcCvJ1Ayx>;ds;2MtE#Fk1v0nKvv)Ihh^3X|b6nZM=g@Y-HzDXmA ze#U1}B>vV6O>h*?&@8BJ9e+=q5oFG-kF@(%k*R$eKd=Pe48sS?REc4ORzgB0e90x_ z-g+yjr>agCpBImR<|(QAIernZIL5(K6)T^7Kb|vGK0v&efHXbX(w)(T-Q!Ap6^@wE z3hQocvCYO#@kw%{O33T4rpW|&%WneN?G6OwX8y5>(ZWG9TMK?CrkqM00S*PG)6*vX zFa-vDdV1i4vsI~8-c1a!b$conTCdgXNDhGTh41AQINCOpsCiJ?!ND(tC*SbES@D^) zyAz~G;?qtK3VMQER~fGH7%b%PdVCvHVU35Xh#GFs+<0=O#$6OaDJsMmX3Oh zQ+{*P987QxzR+gzi-G%;Pb-7H!bF=>z`fb3RYoT?8Vo`d=P1>(`8%=HDksNO<0Cvq zIft?rR2q!x3bFUmO=5na)Jk-%)%j$d&FJ!0Gd(|z(`E3O)*$vb*P$_^vo#S@lVBwZry8>ODat6gpVYE7{Unif0^s*Hp+1vlw$=!9CTj zGZlj>4^%VML(F7L4~_=TUe(a^lNnN1HaE8NdoIOsEMt@o>>c}wT^4wl+M z*4ZiB-jm(Lt}_#n@Z}*;3xulR#p%H+rT{)FaG+7`t`7X8I1y=Q?WdzVnl9S8ND~uU zP>5MqT7waLs5maK`Tn!-el!2cU zykL#vT0mRn7?7n^N(X6ssTPmPg}_IU7A}7qU{G+a3iPQsPDm@2>mc(mXo7OiO8Lhg zK&WD?QhzuGH92y)pvIvx#$$1T6ramphGpPoU$U?i ze~Ty~YbOu$9T!KmvNvg9kCK%mNPQXTY4P#4NKlDvG!45Yj{4Ec5%BOSXDX1ApEo}! z&Q(gXOQ}gc5SMk?pTuj3c$H^y(jb-m1yY`xKbiWlJt5;WW=@flPd%tTP@0x+9U=ybB7_G`hh;nMQ%uE2y4s zS9U2zF}2H!XPymVxO{udFOsV}H^bczuf7;pQ4!SB+fhD8IU-vL9TW`H2Ledrh(mydrdURCrWpRx#zw5h?Hhjb z@|K^`Jeg){w5@mJmX8xkK(y(r@$LF@Jg9e4Yw<1>AU1*>ftc(KZum8m@vukQPN_@O z62p=`x(3L3^56@Aie4!c4#%w!HvVOKeAp`(#hqG(b&li+I5pOVtc$9X?)OS>P4SKN zoU(&d@oMIVg(&F3lluN^{|c+)@*>8qM1vv*IA}{CX#ko$*624& zj>>vhuv_Gox9A_X^NZM3nAsvtG7 z>>&2tOu}jeFs(+TM58*Yo4U_+tP(9?^(|5a*sF0u2NH;Fa?OT)sV{R_R5CKpB*%;8 zqikc`18hiWQuzw^R8Sf}C5WEKe;O|l?!%G+=L%N2M}!Wm!fkP}LPw**%isipNq^&o z7yL%(?mAXW8%rkY!mO#G5;?Qp?o)PB*}!`wdBk(4jZ9*-INR6#;DbrJC)#`?k=##m^xmfsgV~GQed!W*PJYxf6zH+D!!e;3Btp* z!JRVT6fllUseYd!ZPMwC@!LC91@U6W;a(<2*?HgMZlJn<5r3;_k0&`TXiyAc>}c1f zM6I+PuiVVsuM|*wDdsKI+HNKzv8M9j$B$aI+S+5F-~}3;*@vQ!!O0}Dc)2LWZ5ZoR z@A8FjcyG%yCu1>13rPScuHv`!Zrt_Ua|x!S>k=C{emL(HoSQ~mA~%10`IjCO9jC^b zqK$=;;4#iu4@lZLv-UnDWE%7!Tj$L;Uys@2^{zFR5wu+*oq3FjVU!n1rsM?H*lA^G z4R^evTX-h~q4b!+E?B?etQ0K4A_?J&kskZ)w|_fErebi}!=O`SJg9Mhzlx64E*sgm zg8PmS5Yygdl!i&^$v|-KEJX+MuS8kst9$16?ZW;gwsv{(aOm>Z9j<$j_HgU}66}t0 znLcB4;mL4r7%%Y-dakgpgs8*Jpf+pLCR@@K&^M=-x9NuN)I{cPNK(MFEvQe2Os;_ak0={G4c(mySaK49L(+6MccybrdL z=Cmdex>Bj(C3Lj?nPdtSe8AaR=@_h>>$IwFZCY3?L{|pe4W*C0-%F*QJpLEd{3F=2 zLCPw2kC0?fp{>}|;}pi?s`K3J&wnkL0M(-S9@M52&yKYlbP54ZrhV-!Yx+}Eb8H5> zCdCsa0%PooG`jEx#{H6}`W-Cb4FL=qY|O7Hq~tcq^-1;K=kK(N4hh z)vhb6ssN8rjbW2gbEB=2tO-a>35Znc{Frp-hzW|M++>6*C$QP|^8cr-ehEs42QUH%{ zZf>0PtrT;tStC$RQkiDvSn5BpGMq8-rYRegLrRu<%PjU?Cvq<HoGAZG8Ts(|3 zWtAjSxlf4CV!&hc7w_wTeEj`j46duv8^^w!zB8ejF59RC8dkNU=0zkd-mez9i-9iM zfsG*gCI9`(%Abn8qb~^NR_zL)Tp1vzMWMNM)z&Ra#G)65x?+EAa?2xvIPrbJJ_;Kt zxyQH8jcS!ir3R;j#?Nrelj~~xw>A4e|5ZM%e^|->@A0jPvcWnOm9i&y?NP{+l{J%c zzL%0v%@DG2z%y5$_tW6rS_%xHs<%m3Q`PWC6fDJ==tF6|)dEhnV9O`JfB9)J6YZzG z%BpjPS30gcrYN#EknCN=Eigl`^gVWOnTWxevd~kw-ZD<7k2s3kTYdSmtJK3Qoip;Y zmHul8ljaMtKKT#-AW%1BaQ3KE78S zEIOhAznO1{E;?V6RnfVZ2KW4GtOaSqm|DIK<09@Q$5o=T!MY5mARLYwf*BR@0IVXZ$N@|$ zqr6tUFE;+=#n(QGw{6H75I=m3V=0(qo&eNLi6JV+q~f6pTC=7VUFtFVV@7Om6rgx4 zr$SLt=uTIJ%F;Nuh+JovKk5EAMxjb_prc~65c6K@B%&;MiB!37CNuF13~xPzv^M-e zehA1EzX1GRH92@D0q&THQiW|X0$}k&))z8fHeZ~2qFT|+pDQ-zO+bl7QKw}>nOJdW z+$o3ceWG^3m5c6;0)cPE7b=@ zD)JLQ-+rEVQc7ec#R}ghlo@y39~8fG%lK%J+t{AHlmtZ3<9@_?rUX2C^n6@<`?kd= znlWy$V_G1_uXFEvh5+X7dm_!rf=Uy=w?0`QUJ>C0|x6 zv}+nvNVT4%uF{EGCL0cklBV+Rq8XY*ju;syiENL2%!E6Gm=YNM(PAicPjxx{Z&Awp zR@o2E5jd*`J9=^s2Vi3As>s7(p2wfBeQ;<1C+}{o-(bI2@@1yBiruWr7xuo#_8J|P zl%1|dvDEH(+$MQvG4RMaGA99Ufme&Uq@)rwWqVCq2TuZ1<3L@G2^DXltYv(&*4m}1>P>sffS4~v#Re+oUQUr zu_6jEp$;Y-bM2N#bZJ7xWnA<0`y|G<+!y)5PL@bk&V9_#QZ2e5zufv-FlmWu|w8DeZ;QcDZ$>!w|1 zAaS^zgGc`%EincO-_&~dcnZoH4Xc|1%H=3}03w-b#OfmZf{{Rov&WH zDQv+e3Jv{&AFgCGlJnv*Bz^xhrPDFSq4}BOV$8u0XL+TuTd@Ic6a##8H;>;JUx+WM zXAv)dIP{r}wK6Tvvb?nk$twCH~y!mFC#7XfH?hG-oO(GzSsV`AA2;XIlaRq(e!gO8AVlct& zez^zzAt{J(K5UJ6V<|NYAxGlQN1r6<4^uED)qyKmTwc7tb|)^U6_^(s8HBUDx&LB} zJJcoes|bOkLd0(%R?4_qDYpuc5-@`(z1d?`pp3dJaRcQkNz)Hgd+O}`OH*+Zi^HD| zdb|0lI}>6|#1#v4c$g;_;=zaL%7&MYrYD{s?hkgDM?7^6#nlJrgeLuKYa~{^p*H|d z#6aXgi@)L3CI9_#et-A5yU)er@DC|T;xyZCDg(eXMs+GR^~nylH;(d@c8cNgwJ%!{ z$Z|fbpvO)+ggLu=BMX2j@CsJ*NH-F&fF?K0NYa;|jm>oR3#J{a;Nh^?_PfBEgK)~5 zEa6@uIlb7_8E)|hKWclR($cXtI?Fz7EY&{2qK!w(^3T}US;2ME>8ip!WloissvaRV zic>Qq0B$03ko61KcZ!us#b*^ZDaRUvpq{rUa4?XI%tNH+d2G0ztHMfW^g#@`mG})i zCWw1uE&h7$BVV@7YMFu8PBcczG9Od(sxa7GHPmsQsUeu@ywN|Q;)(oK!IH>@zw(e# zjqwzkkuZz^DSC9$!nOFR`D#q?N0JHN+-EdxiSn&?v)?KA~GqqdusWbW5(qN zhbdYx5mOW~uB17+u@AgnsKD%|nSd9@X}<)e6nsM{N{5TVKKkgBXFVE9n7Di6ddeuY zF(gWcxVu+JYO;5 z(ReQxO)I3&Cc@zOV<|Zm+$cV-spKS_$FuhM7k_WlFfN#ZLAKCgOzyq($*uQ?Kyu7K8kq zJGO`vAjUYyha!)?l>!0n=xZVZ#aRVN0(s67$8(#h{4l zXHi~e^0aRop}~ZLItr2GMJ55>%#y9fY0~zO1yzUFtJp$vgcKL>i81b_kDB!|mLAJ) zC+)N4g(Xj{*{fieV31E60W~Ma^%Q&?QxP4) zXB+lI+$h))5&dA0g?CLI`2EmP>`rZb*eH}P9fh>32T}#t21P=$2KVygyD&=;PbBNy zjd`+srur>>VN{Y#V}g_zkCtTG3bQiGQv;M&!7V7Kvmmym=L3@oS`u{Bk?+w`C-w9om$t=pejXc0 z9z?db^z7nvoBENh3BXwR6=&UiWC6?|EEbx~hHui!#oB7&FfU{~?5gLMfP4;ICu=D# zuvAq8NnUdjp7rKxHd)t<3fY^fwMi=46}!j(L4a^|lLry8AVtIwO2CeOVBt%6Qipcz zt<91iL3jnTaJDO%VQ1DCV~g%m*O^*;~9k(0XS2qP2iHk7VqJZ4SKW9ejJZ!N#eyl z!*MoAeP=F{B1ePCk%bvUBr@5~GJmN=trqsQM%zLhyG1N-)*ca<$NPmyJQUvKn2lkQ zX!?AtvKS$ws5`D@DEzGQ3%%EFt884%Zmuc3@ktur@xIUGrv6Tgsufa6B6)LYZw{Oe z+;ZPIYATm?pul*`Ca3sGQuM`k5d0#3#6SNZAOAdN2&-qcubah$S|TzR8tWII`WH+w zx0U&?i^bl8mA;-aTN;WM+k^ChF#(t$DYDXNR`eK(<;A!-2h*cA>7}Ko}3^O^HWI8s|?n13;Zik$y!aeE5X`8_g-^w zKIxBIIG~)SZ{MQAM>#3fd9650NL9z5jZWxi-C}?t4US76inmE;utXYn8`M5Bw&Gzb zeyB2}pn5@6w~ZC@>Vlr|5M>FJA<{RlBKUD@LBy(=KKZl9zcz&r&QFwF%pB^L!Ck^y z_#H&Mip}@OmzmkKesEso2f6S;32vXM%0R-zVAtqC;1%gDQb-_ACB!hPBBs=ETW%&& z)+`s)_=y7DU6B=O&13CV105jV4Q9Cdp@%wm`~8p;H0+B33gqqEPlrn6F3B95!!GYC?gZoE ztKY^IR}MIw>-~K5b^=*8`lay#)j9D-f?kOSq7TH1cPB;JWYi>cro-@*G~Y6JBznXD zdV)v8j44Xq8>7I<`bkh^eKj^U#{{QUiNpWl<6rr=wxiPyWh8?{g(xpcz8qhS+Q|km zVbJuX=9sysog{O4w`XF3Ko=NFFaq69=xY!U=cqy9wireKD3#PsWW^MJ4kkNE24zy> zqIeYU7tT`e7EC<=Ji!Q@XGLShz#y2Xrb_WU+HMY?JK6t!u4-y|H?_8=|1}nP-Tvy9 zM;*^2Fp&I6)z?^`#f4x8;1i>vYZ2j<%p!wI>W}AhuA1=dYGSOX-Zyi*@NS;U&Uq!W56o;R1-_%clmU z2ZQL34~@s-MkV#VkpP{I);ubfZgoCLK^8^fm3KjuTJ~o*lsdR(f_SiCA7;c-hoCQn zqby9kOUOI^qCN|&Co%ZMpcTH#>I!%Tt1EcE4 zAxH#M6fU+Oj${h@qy}37yqO%n5&nUH);mx=1^zqdS(*u6{*hmCt5nN5!^6H~qX z^zrw-EbDku%O|pW;yO7T!lK!vSi!Ul`X~XIkG>c;N)8q<(;r-ZH->_ns0NG)O=(5q z72vM7V)ef8g{%JhR(=g33!$l2gb7kk93Y#@*p7iHkW%%ttpgGs%HkpEr~-yW_p3Hb zF9N(=+JLbYHZvSFPX7l!`Tl-#6j7QV^WQJxaT844m^w;=lP%79XZ!#j+BCM^+Mhl(BczHa3WfOfwng~WUC;ya<@!>eCPrU8-AO7hBURFVMY z(<7jvq7Ce6KbVMGm#ZFhnNJ;uiu5sblsfgjv25KLu_5gSz)Ttz6`fkRi-(y2p1DH76Gv z%VE^3*rhIVEar_TUVo`Hp%pYGe&~R~J0@QS%uVaCFgRN;;xmn#1l>ow&Pw~|F?cwLLB(vAhPw#sp^ zI4GH}Ei~K}{P%NoY;hb{KROy;Eq~KxePG)s?Phv+EMVUHaRgVfppl!gc*rLL!KGxo zydWGVg^e2mJ(dZl7#)TD38_(}c~_Oo$1V!UG|AQP!=S!b&T#^eP%%}DwbGC01UXwR z7BF+|fbv9Zh9$Di!`dJmX`$McgBbKohcooD{)l-7&#xzY5(MWvafW29+Ixb4hUKvk=%fCf1M zq!DAky!c#J^%q#G?#gHu>dhrRqxDhr;{n0=xWdO##Nxt%KqwG6W(}0U&!P(GIq~3w zqN)I;8f3}5GZH#1Z-6DDxri)YlaJ-IbEy)#0Ho%EOFP%9b%itY2&ZUF(c0SPme-#O zt!1Ar5U3=qRcN*JuDz&DwQjMHqMR&!HILmjcL?MJ{LbUQ2`Yd_@o4a-vCB-A7VfEq zf@pB}ys5oFcfPb`zVQvyN844cFsh$K6r1F`nly$5gYPI!b661Wz^90D)VRPH4oo{Oo;J>?=7%481sSkSRhrxUD!6ZxBd|7MI>IPM{>E5Ig0VoDN{Awy zr_Q3x0VAL|M4^q*E2t5FW0w?NcAM=`^CK~R|0;;vj7h)s7U&vv7BIKYp>u#Rv9P7w zkj$tU^h>W^i;FQr2|2LbRa;Zd9O+u=7wno(#z-$OUVF`dFQ;lVnzZs7@o2!}t#{2# zcYs|xf?%6`0S4gzcKIDI2U4bg_?!Pq*UjaHABfYgMug`^19RXQ0AA-@Gf|i2W+}m; z+jFZ+!4e$o!%<26+atlJc%<+go~4yTT-1Aq`VFvpv2B&ICkQU2_E1+;&c;LpM!Q3I zq;b(=m#hn+gmB!)!mjp;ZrDG5NY!=^ntD3LK|{SOv;Z~N@}vRifPJsJ>X?#)`Qqk! z^3IfNLpiRar@cxF zj-L-S0!&RUBgpaARvAB1$VkwLeJ zYb!=yS|_QG%*aDTXGNo^zWFZgFW%5QM;D2O1f`Iy_UNP10dsd+g|q}>1w|Hw+()|r z{;>AbdGPDKtPjWrc23P;G|86@`WZ@FA*bz`6y-o9|H*&BO-xgXWicxP7gbD12`YkW zdSIDI;**M=Yu2X)F*38}z+hva;e3d95kf0)Np%%EJ}oEs32XSB;088|mM7gFw4r)4 zB+cg5MqI}6t-`>GDaXNr!V?4P;_ig3;|n5}#LsY|nNQWiK4^m$fvr%E3<-pyAycb{ zx+hiV+#|8f5%HyktzZz2-9L{(s&=*75X5hI19Dz;6j_PyIsad-1M}=x%9AfIp1bM+ z&tV3Xyzds9X@a})&>c9?50yA;z=D_+MQ`Q#i+4QxTr52KO_q2q#a+0H0!g^Bw&A5l z->+RU*U%z$RL5KJ=1VTbv#(x#-b2#ZTg&TU-JS}NdOddt6-?4^veNp+#Di#g6L%@& zAgA$60y-9k93Q@B;(9-@veIV$CI|(uCisnw3uGjSGFJ*F%%k~M6jXCPG;P{*rK$Oh z`yP)mLeevm=4Dh9mS(^B0H-KTXPjf!s57VKLT~qD*r-5%zxDX%emT*@+BR^F6%T4{ zPl{27dys;nb{lNnDk=q9V|t5LT^m$(gr_kc;Fm05h$(OiN#ez&=0DFOv=LN3CA1ae zxwEn2D@mKeFFq3lSsE41sHKK_nm{H!G0x)H%jhW-BJ_e^c{VG9jW`x#7^H=jgcr`| zmt#;bz51HpLRpE*ipVc`(-i0mS^8zGj^c8na)Nq;G_|y9dhdY1C`dom6lFDH8nUsB zb!io8N}X(gkUx&Ml3?c~if5kr%D+e)IW+u_{pCrb{&4=s>o`km3iWF*#U{C3Y7Rq} zV@$tvxzVmB)iU-~QP`mTOC}OhL`Fg^1^9+|Eh^xCS=EhRfz(ptt$#Q?(IyP=N%LEk z|A}SCo%XUf(tn>#ocOU6*%>u;Nt5kU^9 zqTX{x25~Ph-W`-V(@Kbis5VlkQ?apQ%2H_3%Qdd^RisJ=vy8F1#z2p0& z;HZ~T@i$u{U48UPyicJQhv}A=HiLKynp-zUp8zY5p^~a92hSkc5YTi{FmN~=O6!S1 zs<=e@-N8W4l%l@!K`==wenI7pnj^c{yN^6CRHt}bW;_a6hH)BW2j2V>FQv?AW5629 zCN3nMH*u>gi$RkXZ~k#?zt3OsOJ&rivB?3DyGT`BO1bSZ&n?~y%=!|Ny(jW`3lkCF zFk8y;D{P;?0eKI8$&Vvh7=1Npw0wIpCbKfQmj}Sy#!`&`lV{@EbAys_>cnm5KnLS* za#B*>1Gh!wdmPH(#gZ)R+!~$Zzxf|SGG3Ha4(YvK1E3yg$^IY1jI62|{%s$L=xGwqY0>iZ zJ@m+U7Z=2|u}X`$ZWYhl?_OT6XjPhG+^&atV+NI>rXYaQa}xlE^r+PG+6&j36Bc3* z^Eg3Q49PZ& zVjXcIf3bPe5GFx@uYK9{?Mk2F;PSXgRnLhx!$0j9X|WS06{#12B(h($3?hgqiFc%I z+&Z*56m=HNFI$)~vC$C+!`<@0^{y~O@FEe}`18@#F9i$mh*S&2E(fCvOjnDzITWJ9jwFQ1*yoN(B<2h!0^yC-$E26x%{6 zTcB9V$-LMKi?ES0M!W|wxK+I~Z|VrdcuBb_>nupjN~#-!Ie}WmWRIBfxBz7dhVlYk zdBJoF)X@1UYZv=le%=VvIHInT*%{TC?7IChTgX60#MDpwq z%GhJ4fMe%C_w>|3#U%{!T$E7QoRpllF=91e{F2|+G)$#t)#JI%Jmm7y<2LINb`Ey$2W!e#pNH~jkN?RHXI(6w_q^y9@C@q zKO%+-ZydHP$UH@m<1v2qq=*YsJ9{C1WuLxovcB7ju=#XpW%q?~FNg8s)U6o9MYknc zAQ>0SuImlu@uJ^$k}GbJribNVo@jgC4pp=%BwONFzVU)bDE;+bdUA+At^2pG&11X- zl#j$^d{koD>7;~oaIsGEnB!OM4nKJL=B*N(rpZD2PQLtnkZ!^$>!sJ>7QggqW#IRc ztx|^}2W+PG&##%-*p3R6Q0>d4>)O#`0fdYKyPt0%6(BaO2wd=>`)psce2bTNOAyyX zxLyXJ%oQ)#l*3is8FTQNX9)}~m*vXWVo+zbNV0s)({dL{p3Qddc|_1Ya=T0?#eE9b z^Kr13zVoiiUJE|@4uCJcG+#u4APv)7Fn19BrAA(I_hHkTVvK!X_>z zI8;Hd7F1E!{~|>|mt}8yDRkk1Xd`2hnGTeqX!E= z5vKE=0AHaArKa;&;rcikVlIF-iim%wU#?q)_~}IiA%q$J<+KR?F)zr!y$Pv!}Wjuiz4Mmo249_c~9SQw|xNxxB zD<+x*fHb=xI+4KRvJ=PJoO`T&{El0omn(#Cu6FI3qaT^fiCFqBz%!E|zNkaA$e)v* zP)d12D_RQ+#21*~$~>sG)hJ@AB1L94VjAfwAS{@FbAtWCJjdQsUm#vqHXGuW=5T)0 zJQZSsOjLU;Tm$UDV^TgN8cE^ZXk?N|(`hz$Jo-jM#9L-ldv`W|Nm?A~<@v;EB@1zy zZ-NpK*9)<*b|(hQ?DDijsG~v?MarTIs7%T*_`5<4MW^vk5r2&#(hIaWH%Q?6X*?X~ zTq04()`?9wJ==54RFrB~6eNy=myO6)$4gUa49$yDNc5{U2@B$)XQ@+BT@*?Il`Tq- zmOOBt{2!M;FnQFv{ltEhZ$AEBOs$9HJ1OTbu~2etX2D+aK!S~Rh%k!GK4s^1-Gr@< zuEU%`D0W$?13->)b~li6b7V^G0tllG13S%>VhO=WS3DHS56*i>2O9xU3ccysuf;M@ zyleKrUjUe02(~>uDo93|B(*B!S^tm)s!VWec{5&eYc43L$~G03c55+yk=bJDp~}sf zNUG9P9`Sib^;GPXsRz|AFTVU%EWgqPg$FlS?pA#hu#A<)+MM~vCv6Ofq2|`8a zuF)UUkH@T6YP8AT`Ug@?)|aSI`N`?Xn`O$A8gtB=mHx9)daO<#6wksv+(SXY(+4po zgnuzO(Z+=zh#&FX|G=-~Y-EXu#8(rQLDK?j*NO_xHu_I99kkS{I58iUxsFGZ0b63P z>_%mj#4O~2alGWt%2wR<%%cyVjf+6P_s>(VaW)-o+c*dF0={@VNw3Dm$(S92CO(p< zCGe#wzm0;d;#Hd-mjF1YVv^8 ze>#zFyDF(`BMQ8GJVece?eiC?lo+r{zi~U7J;C?bN#*t~p}5>G#KR0qYFZsuL5g6R z`FRhQKsxk98pgW3_|XSn@w&(X<4@ESEKE$s2uRWl*HQR1G!iO7_$GY4>4a=R1i>T- zJ$lMKNEu9jW89FLE{u|hykv@ z8!yJ?piJX9mv5?K>~HbHHZ)Y2GW2eg?1k z$!9P?(T5iC;MKb2&1-=E<{P#^!jxz)%kl9fr#@5yNvsam?m;bhKy?K%g&)=Wv&Ge2 zXQTKL?wYN`O@?V11eg!^;U3mv9z+hW$7VsQ{wRo`@O)9daj#X}IIWFFPjxwdpK~qY zpUUaOkc@|6=2gFUvopIxxC(K~hjhNIa$0qWE-6mKa;S}l*q4Pc5{yMf-WiH}gHRMC zN8$xDd{TS_2ou&eu2To!M6!4(R0^hVW@aJY#WW3Dp&cJ(s=(9+ z5wzMo&44JTih@R7LCzeN^7N|7M}js{rp3UHr-6ZZ;_4v1UL+<;cE>KcS75pKVDx^8 z-JepepGpElHG+>PZS|575h8!Ze?K#~p>~Z6^W9{l6sF*Kf^(Iz?$K*m@&^I?@pzuuFF{TEZ_JGsIrMOp7 zcfD;?Ru!2a(`XS*zf%?73kK_GfJLWG;`@<}>W>xogLbH;JsVv2sAx$GMK<`J8_s!T z@w`mU5kQ2{SJJsRNN|W_%rX%PmJud}9?s%2yg`SPebZx#;@v5mW{wi01_?j;{^hTO zoS2r%Lrcdgn9$sNZU5IJ7$e6T-Q~rDqvT%_A!9he^;nBC9K{t?`O0%-N5%*0njIDo z>OxnXI7TN8j~!HaaK)Z#j4qM~IFhOaXies4V9E|1;07XSU5c}aBUTx`Wl*_Yt zcqbJA1q~`*hO~`I;WNUuc!P7>3L+zyLtz}?QSl<;5r@ECD&03TK^CyG8I%GiE+pSS zOoQd>qk>|Y$Qc+5K*eSPjv=*yE5%E3giB+oA=BP*OZSTDVo#^}jcuOp47dWD!kZr> zYX6vBR>KV7{IlG^yK^h?enu0k2?1Npd{1soMHb>C>Z=aC_=wzB37YsQAIMQZD^YU@ z9~9g>!mk`gWL!T?cI&I=oQK}ZKgy?-WI@^QdPM{Q0b!u#`qKuY-*f1p!Hx=og>j-9 z8b*aNQN7!X@Nm$gH+c|GrO-X0{`diCT`U2)k2@BWVe3lH#+CsrK0FKUsDx`?C?Q;w zVcp`D2Z<+k3we5L!7_+4i14C)**%I+l*}~ztC)d)QfK|7b))7EOe>5$5>hUQ#ra#<^n_3w9 z$3a~poC(**XBID++gLO`RI>?Er9q8ZjOG=WZZ2&FFW^P_6BzI3X0E1cQry*;h)DyC z_~A`k=IJrK#imc2$xqcO$JE2?C3$LJ2Zic?7L%D!iZGa>KWk)a4Dl06w&AhzJ=b>N zDyKRV$5@%9djwV9WS#43r}>NV(NKD~rzUez5vX`?D-a%hswfi9y&d*&wX`X$Vg(Ze z&`(pQH)>A&?ObHV=Yp*GnOmx$z*ki-4!&V(U^7kXQsSGa*%Ftbz$tR+RM_6yPJdyA zGWy5HRM>`!<@_i3S6~eD(}F4<^7}Y&{V@955C9COnvuCL zm}R?8ik=F*lxP?j15OC4(E{{1x9&NwDy==(}IGvoIXW+t0XP?}6LJW7f`jV6YR)l}{Bx?FW(svC_+5 zjVX~pcv{XM$#g`emG?O*y&?E|_fps>a8sA>zT6k0RpC|i#p=D78gkx0gsJf_?j(QQ z$8JS<+htEI%dGif{%F-_x5dHV!E>7G70JeU~ zv@e3GM8Z-~b~A%ts+dv|4*TE|e8P3(CX6dEx%)4*v-wa>^?)`_MqW=B$1~5gwH|=La_*E z85JSx+(jAK??hY+Quq3`SFMQkrkENzYijx1^#|m#o6HWKv4&; zzfwphDd?|Wy_)N>;in*psyqhkSiC#(_S%C+m(kZ_ByZGG=!qJD@^QgJts6A~mIAUh zIaR=wVOGFA1tDZfNV8Gmx}`W8s!cnmO%kDFyi!!>#;=6B=b3j^7h>gFnR4F^1Sii2 z8i3#9VR0oq<)AR0%0ee?$ikQ}zUT*%>zcc!6`T6c!BPSO#^2Q2|CX&eydK(3I6I0t zu?o8xhKGVun0=4J;KdG7Yx6{CVwt3dO5%EoFci(nVU5S(0Vu00OU0Hpf_YaF8VK~9)o`0i~vHRWmyHpj`LhXsRR>HHMM|F1mRl7y7*u*MlH7hPHOOFIq4HQsh|v1lBja+ z22m-Dpq=7de`k=V1PLa>ro>>bU%wu7LD595@N}AAF*OF@L21P=fZxfxc5pnFU7<>D zB@8FhyFDo00>MCo36PjjJReIbmhS`u08mJW@1qy)dQ`N<072=`+c+lD`> zCC6b81Bqu@IO+oAEj(WyIj|OgR0j8+`;xQ>(uiYE z<6i4$oL9?=ZV}Ib1K6hV@VB1%O#DfifH+aV@lhR_%;UIQjTuZQ)t@^qaxQX}%F3V$ z3{Og|fUY}u0_BtqCcyN4$6J=&s5**i#%fw#rxwhz)LfyYQLTvJ2Px9o08^v3!&_@ZexE%u%p6X-YCSufd50lNo$%JPo=8Pch6n+vPm;Xx@Xeir`18)jlAlGz<-?Q?&A|&* zVwP$#n5qq|M#!_ULTkZUd(fTDv|+#5OACkQlQl_yz9r;&wDR|WL@%)>tRTe_JV)iN z2r*LhaQw1BP-|~U+{m67WgD#ez~KcLj|I=ulffUoc;$+haTnW7$2<>D4b=cYV`0 zs$Nbe8lj2uRu4ocKDX8v9g3ynMQ8xTncy7X(wyNcjSySv$+s_m>Yt~W2FEXZsGn_H zc}T+@Y#wg}A~`AJFE8-47Qh1{n&c~!iCQtro6B{=3pD&UQ^ewv-~apH|EK=*0C29< zOt4MZC}xAvm30(J$xH%x6F7nP;qF_ z;Cr!ttzQD=J0$vI5kM>#)5)|!@o1F-;|f(NQL{c<PFXY~WW#ifRQ{>4* z2jk8h=<*0^6WMLd2@Wod|EZOps6Szb?F0=sdD?sArN;|j%zC6#S81iUV&9$%y@c2) zK`&(+F~U?!=eBP7r`Cd?oWHC)Bh=1|QBbCn@I#2_yN|ATOKQaAWOyLcT0BV_l~Eu8 z&9A$GC$M_kNm86CW`*CN%Ee*nxiLOv|HYGy(;8bt$|+* zp1s>acVc1$Ej*9dyymLSVX5NR+wQ7)$GxAu`eM9>TuwQ_<0DEr$9Y)_)pMyrtYhZl zH>Cn>?z^?wBo8ojKaEu(&83K8UegtxiJviVQEx(&Xs+=K6|=CNf{u7;zkT`b$KQ@C zFUM#t5$4zKwn;bJS^CT#et9-h-Wk907H> zQps#=oSi1)>+nUZ+BLXfeo zNnYt%Xo&o5sZUUX<$Rr)^_E6@9JMCij$A(Nkkl z@Qe-9nEd2fAAwXTLaNP_ncEyZ@Zt(22VmDX$i^KF&uyBn-wb;98K?)>dLns2d#xEk z1*9P&i++wGHY)HE1+au0F|Brn*feRs;6(FJ6revmFEOppeAbk6BIsG`(TQQq> ztFjzYmr3zv8-z}&j8 zUN0qL+F@j#;3RB;k+f-h4<8mrAn29*5Fh_5IZHqad2S=x5G09hdi1rsaTzkUmWfh= zOh}BAY{xZt_|T+2?46xgoo5bgR1GC{g&K;TrszRym>17jK_8Q6!JMMJA8<;fXv8%* z8z|0wuN+0x5~>jsD)JEXx5M@N^{gjtbS1~_5^~DC60q>W=qIlCWE&VE%D?t@YsTT?hZ(*(k(hgH@(t#TH$@zSh!Sl|iRi8a2 z4aaR96E!j%y@&H>L0g@AA$aLY0iY>GxEY5*G>nr3HQZjec}*r+HXja-xPI-Gcnt_B zTu|lco|AVdNOGrjD-0%U)f!CR_5O&Ij+8%lDj;>5MPRBFRY=g)7&TzF1@8q#5^9<5 zmP#Q#!Mcr2?I_lX+akIbTHc>YybsP z1Hl*WcsvL>`ESgS=qVUyWjxyx1zG*D?cSJ%=fOO92)zpgc`sHT0Usf%b4MK%Wf=xM zraAcxx>{ubTXK}2Nbx%txkcT2)022-ra(3_D}LgDtjas|ECrqyrU?V=2hSOmN-)%z z)A;M@?db5VMP$fD-})DNflx7Cj&~DsskL#yO&3=n68W^BMdoE9r2!00d-05>+<9>5 zZKO$+COJ|q>2V@SZ9?4y0#O7IbvRu%o|ChoCGt$21!%tz>`q7HgmL0btc^i&F&_*F zbXq9L0oLk6hBma5U3UP{$6V0Bk9Tbb;z&~8ifOqB^VA9IXLa9FE1fko4KS^S7`LW5 zWpI;r?9z=`I=W57cpiNagJ`7?OsYlbnVPS3hoY0AY2ZQX7r1#pQjqI0Ldl2{s_Y%^ zBh^k1j>=O(Ld2M^UwbJie0xSjeE7U|D7>Up#XPP$4F&jIv`DowOnkxnTG6w3untd8 z_xkm;0Zvb|{A=&5P_%!HTW;nYRan<29`O#9{GlY&PLXpu9fZazj0b4YP6e0(r;pc_ zGHJxlV%cz=p_clrq1XoALPbUEN;mq)yg1t4_Pm~Zu6Ryp9N%xCORwxb8orf?Bq&v6 zTLt`6Q;yHPw5-(JesP*?3+)mdv{=qXq21*^uS(^ua>%y|+CPaU|M;Y!9hw~oPMv!_ zca~bM95BE=_by7Dx(x~*=4@*2a4HsKMZGR6F3A*=z?c^xC{2$BM)ztk5T+iVIkJDD zphC>{#V^KRzm;)s(jkf)WC4$mZQN;KR=(a?t-;6j?Tn^I$#?reH-kW1!;^U1Z+t!m zd}Gq=2ot1pW9Q?6hZbNsfFk(@&BHopXX@Q$m=BvA1S{oj^&6z%Ro4ifWt)w|vkZ<<4VLpHNYpz^1qW6utU1@bhsE`>3QVCI z7;@x!ID0}x+37^>z)}MDjn`vGE1{vd4ds(SH%gA{3a$lr2tdn;8Ei8y~?!&mI~os8xc&I0j+e8NvO7XKd{J zGv9^)F!_AJX6{dlDNYJ{ihF!80-qK?o&3wW13GmgcK|3GvP5RsRz@A( z*H}9ms?Ocs@+!ZPN)94rErvF(Rwt;UL2-Y-!zSQ}w&W9>u{y+CvQuj1*RH4InreYA z*+ucDH|+dx30bW~<&4Ekl#t2CXxYts!KyTA$tgAPY5Z_|X)E>`&ZZjiB;;2`-zqZ8 z47%RZ-J72Li#i#B#AosII}j91ZZ~}TTK~Zn-4PlIe7RK+)BvKVk#aFcai!r?XhhgS*<0M zbU;1E>R^a!iscl9NQ5-1F3B$58zlSQ?`mem%r`+rnOid#qX^bkOvKZ)&3yqk1C`mbuIdYW|GWCzl0xu{=4Ub{TLFf6=-;*W-S2+?6r$8t+ck zI)Ae)WDStL7!;NDiXudo&bW1|kEvnK#+78)d@1(gc{-Hw@a+{b0cNup!*ny0uPYvQ zucA#=&>gMSB54QMQ&FFFVwc%N3#s6qy#i#Va=yrUxuPxD6 z210knpg8@D9@cfT@j#TELUb{1Zqfk%i(j--5tJk!`c|w~Me^cSf~pgR#QYgGWkxM| z^Li}({Fbj$?zA1)cYvrEMunn@vKcekF0Rw91LE#Q?99*L^Y{?_f{E*mNE~=TZgDvs z0qL~dM2|;>(<)0XgDxpmch8iwR}D@A5dEw_!;~|EDd-jKXi{Fko*vrYXfeKDo0!PD zJ&5Norg@$*ccrqJA=)yjmbkAMHWy*(FGw0uK3*IqE#{rtXUq02Vt}J`fIjOjt|k~# znPc=r#g;voH(oQrHXC5gM8-N%(Iz&Qui3>Cwio>*-}x(r+rAkg`}f+caRdXJehL+6YipBB~CeT+?1rB^**d*^AVH zVrER@nGLtJVi|#Rz!0EmrKmi7<`%YmCA|}!xq(^tOOEmXYTi@I$2oPSiU$xH;7Ua8 zaQL2PZj`ntTMxo@Ee5Mx>pIq0>jON*kzb}vn!4U+Mk0^~6e2b^NVVX}xvix|Q)XHz zwM3KXFr<1#GacD_?7CRI=V*oVyjR8L6RQgW%VU&9Ay|A%WJN*Koz?OhI3n{T>OT-v zx>!V8#I!xBC44#i8I(RSM1U6Du`~2mUO?Z z)fK;mkPL@FMY5gHq}^{_$7=NAH1Ve{&hTB(0|C_=m)sIE#E9RSZCfWHIi$?!_#~Vh zCv|ftJ=Q3}0`z)P(&Jer#Uf+&ggD!|tl(pi8V8vT9u=%~FH~?7zq3;2&|njtY3?C4 zTyqos{31FU0$4Hxra zhCYoE>_EojrjudOjId`^mkpWkHAO)dv^WjVqn2$Nw_m&dnir=l9zt2ycrPf;e)S1B zD*&?|HQZ}JIO6d1fL!U^YzjES7~Wn4{D@8lqi~X9cKQJ)RJ5by@5w=Kow=P9=_AFO zJg9x=BoS#}T8o8fh!azdAa5lELcEbCroLEvS27uwMNz&|uKpifmck;`w<&2Qz{w}& z^j0Nd-e`J4dbzf1uM?#pu}!@^XKXmV+blWFTyOSPi~?;pF;%<0B7L^VT8Zi3Ape#8 zB2$>$-8`IIIpvf^cTg%4#4w|^u5(~k*bXPFYhTX=qRgubH}V?46S0-~?cg-4%R|__ z$^cZHNCXMx%~aL<3{H(_$!&a6sEx@rNL7=F+39v^Q;MgS6i~Csv!Bhh_m7^B6$gy(rLbO1CSRJIGz6GWs-oHBc)mTt8 zz3iwV^+gVjSVs78czx7GHln8ElDgm1E z5WE6bOmI@4_J{*{nF()FBEZz6ba&bP)4t3zY38KXLC%=^Tt$)M%sqka^YQJ>fdDa1 zfCc$C`SX7o51~$NT!Lniw`;U&f*OnU?y6Vf`t_IN79rAO0(;~@xzYQk!PN-f=7cY9 zUh#$YY}G(?0HvPMgf*>hiW&g$Nr;A2fVc5Hc3t^=l5lL3r>cd|6M#N_L2{jY`YA@Q zfQlJp^AskC$sn#M@@SEw+;iO;zjHFO67gW5a1^0*`2A|Opc>puH$RHOFy2f`$@hYw z#K|fwe{YhNxPU`+9F5DB9K8#thVYyz^DFyVp?uD|lC zpFwhx!ez1cYd1a5n}FPVC5k&f2PcS^+gETFqQ?4R9Jw{ek5gRpHQAShC-a%5lX%c- zy_@(%E6}ifS_7Me_+E8Y$dQiDmqFk&9_lerD3hg6&Vjk{bx6^V%MlMZEVx=3N|m@W z6(>i<$|2Ra2&U_=T=P)HfSCR1qCh`Q_k|LO`RVIH^R+u0@&0WNJ->{Cai))Q4AO~# z+rByOJ+-B#dK9xwc2qp7HIwn9{bHDeTXCt*6G3GA-wcKYR{Dl&N34eI;$$S=6jog&n zoSY(XWzv?*W1@FtyT=dIn>!A`#AlxRXJ#N!hth>M5d%gC^Q=KjmK@PPd;G(A6Pc8Q z;=RKHs17C_jnLfBflr3needp7AZZ&5Ji@y}u)3pjBG`&=L?7^@6E{l}`k?$rUkNA^hy*eCzx@=t;w z%PY%U@5i0mfpp4DzPh})Q=&+4JYa?pKL-;(e*skwK8vN8jPvx7@-jLq%eIZFDoNwL zi9nwL{3weW6?vSakgF(|V?^mMg3A9SAk)B2a%CMCcH8p?_B9XDBdn| z6%;L1GU6`q95EB+WsoFWY|tG#$9fN6f9ZA83foh;tX>N6OozMbz!Gz&L7CmKVVOIY46&?hhx_Iy@j#?GQX%H}^~taZsf}SuA-d}BWGxEmc(rr; zd=)CYYSv3qI1A;8;gg!RnQq?B{%+XdsrxVA-n8Wr<-ufanj~RBmLmzkmXa9r7-*ms zzZ?xf!OWQBqeoX>jM-D!6-6@md|IutY*RG>5t27{B6VtxQw#sy=RK+$II@Q7wGmH- zAyAs*$!Iw(vAQa}3y3Xq$+}A5ltXnhcWUopGOA>suu9$*N?D?rA45)h&`)`ItY~k_ z5~P0duPyZ8puuhh6wMQCE~^V5GL3|drJJT?%MBl#cvdZ@Ju~a;Z@zXdW;I2C{B3;3 z_f2Ko8IKohZv|#r34VtD>XBH1P(X@<#7{0BfBN`W9t?SL0+b{Jqt`n^^x-Ej1_MxT z?N{*(DcHxUBQRirgn#B=m3;2u(-COqe5Y=1s)RItx)K|)`SE}9jBjl5fwBgaAnrQW z_)Wy{iUVAjwBlmeG^AwdzVE?jQG#giZJ8fYn7Z8a5J`+Q?%O0H;4M!eJhfZ^iyWG{ z>1j;lk`g*P$2xh<4)B<)ir5^q<*{5_KTUTLCzh)OV1ze~G8W_V;*(EeU@cCZxEndn z-}JL-n7gu`CmjtQXuNpWesJLo6RK{z&AtY)j>=v7y`Ppk><(3s91+G|`SSYtmGa3Z5A6Ujtr ziIC=fAS0re;nl>v(8PTKwZte5if`ToX+^4Pub3M%COH+!VpMd6Dv*<|O@p2lbK?je z=MnMfLyPLEPp9G*nrUVMw%^xjTw9W%mE0>qs@84z|03^QpCi4FGtb>WVK*i=_T!4? zm~4t{0br99b+d5mZiu)I3f(Q4EpZD#0Vp{^ zwq(h&E~KT1Zs`Bo-*aAMK3VjJ+21S z&8JhWJ0T`@lL*kr1kh9n#@U3Q$8=;eo-A$$LGCAS@f&y%DCZLgPU;3Iws;4}|15Wi ze2#)C2gyQ;QLxbFZBHI*$sjcosd2E;8IK-a2@b&S|Mim}`8kokT zk;lU5v31gxg3m}xiXW+0={*3h`B0FKSQYAP1#!FTE5a$pB^3Bk47PcmX-N>3f+z?{ z&xcqzVBrGNx_FE0I zV@NLRM?Qgg{e*pq1G=f+tcBO-r5+SmKAs=>0;+@SKmm0-KBuQ%HWvWd0lN_EXRF3g zI=1=FaHkfK+a8zx(yco~*S9jJM!pV5I*#bqhe16t1Q^m@s(@|9KIKs3nDNIng)@wTT0f=CgG!>eQXMgE zBAt0~?-0HnmD0@MUM8~(h+P6an+)8kh>K`jjatI}RUbnfWa7whIP|t~>&YR?y%)6a z0vX0#wh-~qMmQ5wVM7ll)D$F8enJsO(m$|c z*{PV(#^Pf9H~~vg@e&IpsVS5vtALVCN?oK_KxVp`4hBoVNUqJrnCQi0vhwoHRPKve za^*hrOsr0=sz4{1y;QnLQ$8w^V1dQ~@|D$8L{1Y)4Q7u+B{lR5wr5or4IvrDS-ssj zADTzWFC?RNa3IV|s&Oi-oSwAJbrYs*S^~Gk8Yvw~NY9*qe0omDXSIT4DxMpTOXOJI zGK4%vFhdN2BlgHu12MOMU{;{<0(BySatbKrERlEbr(s2T{!jd^m63ArO=2pvj?1sd z!-uth$Y|>joXcL5FVdRhe7q=~c({7=9OZ(M*Vc zpi3_DV$hD1dlU;@pUBOG2B&BjIhr~b$CG=m*Nr#&toX58j+>MDaP{&vA0zY};OGu` z;fLeiF-PA-KlA)lqmdhdO0~5`4fOq3cLS06tWl5Wkx9;Zzr-e(1XS|L({d@8f)%j9`EVQ) z*iTi3oJ;5#LsC^ggQv2ix8egrxy4^a%MUO74!Q9Yc_Enox(H$yFkVJQ8aCg6`?8=@ zRzj&-DD5!33oRTeoy=Rs-9uv8LCIgGXXWI9hA)F)f+&|hgy1ZSSMgyy zEE8M&5}zuk^enYA($bRc2rc&MAD&k*WqSOjEeyYy3aAY)Od)n0;Ez+au7el;eyHLmsM;lr*VV7axa z4+T5N^8~Gz;%C57rOY=WblPQ@&E-3lztP)&IDE+GF^y)|nDyj(!10FYiQ%zs{?1?C zkS9eZj$AZzRUVADH~1mISxX8) zutHE4kA>p=_yCWKk9`0fb&y&*zQ)q29pfY$s0NQC2&lgjYrpjZ?IYp55JPGSj4dAD!}A){A%Pvo z|1_b0Q`0l?mPe1CFC`ZUL9t<`410A}dR5+17hMt>`D2y;N{vJ81qDHrcyh~l)8c~{ zUbyP%-0zpmzO#PddmnSeI9hln*9RxtkFa)Z&Q;h)b#KarA=KeXxugg@pe&{3udr~& z0|rN>8h5AoI%$sFCx*?N9i9~C#$4WtK``}LGufZ~W*U-Vd6NBcHv6fC+@AMs#5=h; zFfw9W&N&CB6@|Ow1CaXVgaJiX>AqBvas4n9TNsa{QKVQ!bt|82kB$@Wxsb*1IYB=G z(wzmo72)tSx41IyQA9o+x9#@&OLygg3r3b!GOW8CC2uP!k@Bepc}~YoqQ!B(`i1yF zF%B$@Scj^&nB<3ODXMvF3N4I_2X6t`jCq6oIik1=867FOK=(}qKS(((A)~K3E#&Tt zrBLhjxfJS(j>ptE+zCWbk__t$hM$^A&-Hdcd4SGS!7C_vqz%O5gQG0-${u`|f7xV< zh(x{+Kh=-@@-KYBRKV+Gk%KcIw$0yF2!Vg`MQ~un#^&6oAAl7c6gT>RPzS(x1({uT zF$wHe&H->SzFN!htgGM%7s_sKw~~5L^kWiJQ&TfBG@58X{XeuY$twp}#CX>XB+vpV z<``m+;ApN9FdOPhj`Ges2PIU<$&d?6-IRjK=tb};9pu+Bu6!Etvl8+Bj^lk`Nx2!E z$1I2{AI;#zoOhX_EhMKQAjp~oIu$nH2J$PGsI=}w20B+2yo$4kBd@fBBr9r<1ar(6 zoA#?QG^9f6`IBMpGeXVhiyGlC1sm=SvNtfx$(JZnN>tycP(oYvU1JBaG~O=xgqsz* zz@^KRB=z~>?foL1*Sj1a(@wn08P{owuPknCtZl?fghq@HWQAs(eU-Q#|4(TU%K>nR z0n;J-%It*-0w`3}UA}O;5cZyV|Hj z@IQXPc#IrG*@}%)w!zU};ge=#Ox`_QS}IHE@fWItmcS?=OR2+tfU-bn z0&_frpi88j`ja~pyz?kGYgu>IeX3Xm!b(4U@q)W!F`i{Hkra^t24Q`y8=80y8OD_o{?L1u89 zUbcz$>OgJV|4u6uCna(|@){NX5`Hh5oe-)f3X^U1(>$-`iM${(5L1d_g_Un=hgAo} z1i4g58m9!2iKj35+FpcHTWygb$n8ZRG)T>4nw}o3yh&;{io6G)5oXth&Kt*JQJQpo z1tBfl(<@Y#uIiBZF9*Y2LT87Ag*==8a%WrYq&yVm&YC5Llz}o_YVjCN7HYfF>8XgYi96gC`qaGxc!=%8ML9RRzPXDFwZkzhN@y+w5pz4J?jydk7_e zFp&VZ_7KrNvE7%QtuR4i{O8_^jYq0wj@=i(XeQyUsxiQvs^TtF8iUc3gX3GsIIaTV zh2G)bIbA`6DB>tX6R3$8Gr*+GpRv`}{r#5HY%(J9cRBrbh0u#ZZQIK}QVh{JtyYjV zc2W?eN)+O3X!kAo1j z$gG`n<^{ax5;D~VAy0jss1iu#`th=KHa|@%(vwVCyLkLC4*@PIT>imuH;3`lXTRky z3kzPD37u7g7gK15+b30eH3N-NR&T`C^p^g3F?b9&%0x}8=J%;#-cw@RL9fNlBIW1t zoCk#nhSWK1$P$k?oFY!m#b<|U5rSKa84=AbEV(s$=xqfYZ+=GJ58UB0_nzM>4Sdm@Q3}oDC}R_T zY^)*L=@vN&`XNC&3hLCZ3&H{Vmqw31Q~* z##mcRG6mR>t`;Gt%k{Pkvr~?_6w0Cw8MPT@V0bTiA5Xb5?_lw~ukbIb+Ztyc+@LPC|W z;|>rLuDKweva_3mKaL)1<9in$#vxNzhxnP96cIqGe{wp?ZFT8%8MwK9m;p1NH|;=* z7F`@WsH!J$WmDgSw3js-LLO}uSf_5K!OmO|{a+j+jAhD(FiGiw%FdGy9r6s*!L*Yl z3GO4liYqHoz*v*0r`k)TidcU3pCQKwV@zxTw+ zIV$=R^T9&RV?hVZdT6N&nny?)a!aB%1JZ0MwtzSeYHzN8Zdlr~$SUCU*5Cvhjk<*) z+dbO+dS+*Ft2tpl-aSouPL1*EwwjxdIV-3BI3?|3%e@Xt;94#5G+g>AF|IGq=Cv(|g=H%^Q;t{b z(HT?Z{Edq+ATLvbF*tk!VbM(Iy53fSn1^DPM01elu{MOOZWh0E=loRltExjs&;m2| z+~9=ph2NmO`L2b9kKv_p=??I;$&2?ZufVkrF!3hLo2b1OOX5t>pQLotxO8y@UE!fp zfJCsXSQ$0|0oBwUd&hb6%eR5TUW#Vn@Mt8ZowG4w21T&p?F0;p#UA`gH|lr+?!b+pE*kxQzZ`ig zLCBw25p=CaUvc5I-$|m0Iku9faXNYlD1Y?qvvZB`9>j z?bFI+*aqP`!`kaow_}~^x3N<$%avNGjxNh0afW;D?QyAXuj>o{0X9X``K8)Q%24 zH`P-tki-U}Yk~-~;Wev>A3;{HSe1c`D95Se&*;l%`9IJ_pmGRy!CDg)00dTJ4DRBbeh~z@X8+@4LvfIP*Rn`0gqNgXeX#g zoyhps)Bp73yYVypG+F#l2v<&#Nk&kx!99ZZ4dx%eoC$Eh@vmkSRa;d*$1E>Cy7;?T z2-z+1XB4rrlen5KG2koVK#H!8U@N!+f+Xa7r3XlU1Ot-gRwOTe2xn}Y!P7^}kY%?R_|&)oe?3@FkUwyKieKl)T@)TXO+5&f8(WX?hA0^1}pq++E!k?JDoKLF)$SY!s;Q>os73Q~x^ z(KHyhdE8Ga2Q`k)vP-wBfaJktGl?dpDcWjRc%m@P;n)6^iFonY<8Pez%jT|AjRmAg zp34LV#Z~sT^s|cRf}QY+$!$7_Dn%#`!Y{}z-?ZW26;#+`>wE1PX$7#VRVdIz%FC|F zdW&OXfU)yx8dFXN28xna;tf=b`Xm;|jrMa}^*|24d+`e^Vn`Tp_vHq}+m4UR4^T6m zo3_+E#mv&Y+I-_30(R9n`)osom@e&PW~`r-uA%DD_;cmBx`fd6qEXDLa&;_gmF(1* zS=TcO?|z7wFMjEB@#B`dSn7V4>pyg>wsDE&KuU6%;>{>e8lFFD0Tntg%9g zsG6;|P*SaKJ^jeoD9Zo;K~Vg7obg&z$^vXS9isy8lz4bMs=$$AgGBf;nOW+R`sK_7 zAD6G9Eoudzh@~2_w7Iml>W?o|p;H2LwpP5O(l?+Pz^UG#P-U&EL!IGHT5bZa8 ze0%Icd>|n;^4B_5zr>3>D?pWkI^LLdr8&k57S@8RMh8X9QO&0ZF`D3u1*VT|+-;zU za&%!U@{c(;#fjK8DbpVfU56J(=qz**FD2%b66<#z*W5D@YeFM=@@UT0206ts91GYx zI5(+O*l&P{pG@E5UTDA|k4M7T>9KDy5bhH1Bc?Si zp^}mDgDl7V_J_p>hp^25z} zX4Fpv8Mi%8$D{b=4|H*OEPVCwQ5FNCH)1f&X4b@|;K+MHJ&?H(7dK~sPZ(3ELb^hn zK<-E1`|n+RFP7eA)x=YqK-pt%%=E#4u8~2>g{6(yK`wSB<@w34;Vm-3re}8@WfT&?E=g~9q4Be);3lxbIFb_=408{{zr)$(!{y@^jKGi*9EG1KXKBft}TM6wR zjaVR}-=i&^9tZ^(w8SktmZU3!h&b7V)l6`0Cs}BdT$*P>-BkCje=>69S>a@R@zXeb*(0F(-X@CH&NX^UA%UAVGM6Muf3+JeCiGS^HfiW|Eas3xL z*@}Nk^!j-UiB~UkS8gpRru@BfktiU?rCGoe&pi5~MGU?c&^&Tdmh%Naa>K-cIPa^G z4gIKgvXj-k5g84t8;c%SxaV0vnO?%wzI?Lk@o0KUaw#490+-RyMSOxR>Of_U8fafo z7U2Bf#LtYGA_V>NEQQ|HD_7&0;X%bRs4=tcP~|b?NxXRcY<`I{b@@VyIDF2UoN!#i zCKQqaD^iRM{XE`D+xhbK!wjoPD8(VTlTpw`+zqD8&gh+r!z5Nm$Iqg{2rPXG-nRfm zGb44FIrvO(w`h|O8o^JQWkFv6N0g8D2*Sq6DlWz$7dT2(W{_dt7X^5=fhzpE*087lIW%Er3MIbR6V{#wPiOdh&#= zH;?Y-kz2DT_R0LUi>Kq0qR^qJDZz$|HKskUHE0BcP1S$xfAnuC58X=}Y%R1_u70t}dlMO-Pf87RU24q>oRAaY7u z^Xv$Mpt8kSga)Q!4tQ<~@%E1=AD6`=WK(+cAi)MmUI zf?58gC*NycrSAnLE}c>3V31xyFNOF{^VGIF+1uqpZSfq_Yu*{uxV}G$ca!Cunt;YB z;!HicD?ha6x9NJ^ZDEk0OxLHxGi3FObq9Bx-pfQS)T@S7sd`U}-E4@?PxMhxl;8^-r@wyk>-Y`v!HZ&-pMG%h zea{m7s!KgROF=w;)V#p)EooA*JIV$U4AGA+%d-&Nfj?gL;O$VMPLv9hB{y`bbNvF< z7g|T9drU4`$+D&jWRMhSvD28+)9+t=-#+SDn&&q0*F zWdC0m9|ncrsU|%QCvdQEp~k!M`6YCT1dS4h92_umMi{yZw;}vSZ<}P;wH1MXpPnW$ zxO!Ean4~eBl)k*eta^OZYX}uz#IIQntxD?yg!8VATRot11|=)$NRzqsU-)v`P_5yE zx;4lD1V!Zo%H;u%#+MyL#YaKA=kEXdEgi0r8$D*p3a)#3tGKS%=O7Fwf+9`clU2gy zewq%k;tKy^6Ra7j7V&jFE7c;veHin`gT8Y=9&0y5%@f3{){5I%`$B=W$~{!a1&_2+ zyBd@2-+V*yG2jz}Yrk?gUMahPSh49c+LGDIV!T1h@^h(QDGr&6&3%|Y3vF!i$O_^O zLN&chsIT|!^6J#Cb|?xfV5OlunT%9IKbt7|FuS~5E>)rGaUv51idDAd_zB@yP*euo zKXHJD{Y>`$596ZosQs)p?qdR=oBK8*<>9***) zFBm1U(wak<#;k)FjaQDd8|he>^G+$}!`UOZVZub}^*o8K!9KP}k=YQnI4ZVDKBZof z7rH|HJ?4wSQDr?!-v{geCO6W>V^3HKxd2{jqZQIjGDP&Clv$7;FeTE);_QNDv&va< z6DQ5!_hR@6%*(!b)o+w)4>%w02L+UFTa=)i4pgEMbuBl1ctD=kVqgNb*UT;f-;TjZ z`>kZ5*wDvnO~<#Kd|QS4=CSs?HjE!h^P-osi`?5Dm;gu$T&}_Vne@@rp475AzQJAm z%isNZ{`SGeZ=QS<7s8)jF=MhVAbLZV0>V>CFvx7Q?R*Bbu*bSNc9#B9mZ zo@_gF@pB7JZXwA#{7Y6L7iz5~87oX62IKmu$bqgmki9RIW^wFrM7gXC$*9&&a*AM# z#scEEz#w{993s3$O|9f&H8ExZrDLHRrm(u?J-}sG07$zd#5}WhYVxsS73de|oM(m& zdT!oa+^}qK;X-L%HRZE+iLH}+F{nWCHA9w~civ)_xyvArM=90Te;B`9Rj=51H+lE# zUcehiqY`kULb%Th{vwe4@wI0Gv-@kS{reeEr}3qy!4;*V9Pwsl_Z5o*4E)JGpTO43 zU8xskhm_S2?;L@<@_c3q@>l{;>%N2xU_L*n&cT&VRN0Q1V}QQ@sTORG0Dppn#{l*#c?7FI#=V1&I)1*sw7lr&=$ygHiqE(xb7m;@^d^dv zwJ=PlEglHQE+SzIP@Cnc^bEoFVui@H;bHOON}c4Cm2i41HE1UVT7_iL>AqBT&y)KzZ>QWayRDiA^450Bl7G|U_ zSO{`h94nHDDCe&OoE#LRa770irz^QcIw~ zp9tI%H8?5xbeA=dy}mP){s!;$pKx=~T8tv(vBXA@q&pX%w{XkIdZ{(xE%2L&Cn5Rr@^Uw>GL-nnWZOsqjIpHfrB z+%n0!3)BR}>z|@^?e@}wFOx=2CRqXM@cK z=fip=fi}*d!^*f6Vne0wqAA>GrVK@a2s_kM(&~f@coMW(>urWmL0SBjzuOLD84KA* z&Zc*e-dGis4)%svp%;UHwOIDTSHb!dGigc#3k-H&%v64z6ZqMKsW@6pm{)8qquEi(U32EDn zztYb-3HuMc60GAy3Ucd~iHM84EHaNvCQoVFNbjQJAp^eHm$28F}98|)$+@-iJ{~I_%g3x#%)5hqa zbEZsJ!MQpB=39W@ltK0B?^!6lmAOt-NysJMY!MT`@Fe4KaBq{^@tM$LW*XG#%w!YN z2kGaz;br5?_K3Z)_zkm2D_$IfV&#9Nx#7 zGL5dpX|A=)lZ)-d@EfLyM7O@&XKAk2;z9(UO~9J9V|>QyAAtUFZZGE~;pvOyrp8kU zD6YYHo5V(*e<3T_9Y48`GF~=rcb8%w#p?<9O1DVNNr*pH$)H?@016BCDa8bH#gEgPZau$W3aR#Y z-X=t(_4~C#BvnVb?#beDmG-WFsr~{L9_);3>7}^L3QnU-j8}~atuI9I8@*s=?k&fc zW~NfL6N_BDZ^?-6c8Rd}H)l^W(TjN&8%VboGfm&tm!HYXm2KQCfZBu0eC*VNV4Q2a zyfrM!Rm2!x8Wi`>WxqXt1NT4630zGpr z!81gvNx1T6f40-`VkGkg-Ai$H zt7fKOA$VIYbH`w)Fa#Jz=}=m*^2GPykgdjAVSqqbQwg>oIM-TE$Ov z%;nhH!pE~@DV}9Dgkid}zh>N_aKS!}R>(i^<#hK??T;?r>1c zF&tIV(we~a=bffp!)Zc);%V%XVr}oKLPBZ^C{&sFOV#C^Fs4%xKj#_N;=~kZ)ETw7 zd^Dt_wrZ?(L$U1nv+78PzBRkD7(~2y{D)uu!;pl;P0;GR?YM#oo5!Pz@Nj{%umtxw zpjTUy$Q2U@UzQA8{0({EuHwf?1xhVN;uY|sHw7zaoaYWDZkRhnm(wH@TlcHrWof+> zS&wZNzLYAbqeJfwJAb-8M7NM#7rP@3CrI<+-52A>p8LjAIWVY#92CYXLlx+!$OlKM zpZ@sCCl^1AF~G!lHnUwWq_<1%E?k#dw1wiOdTkK>_F-}OaL-Ygg(F+BlW!xFf`l?4 zUXBeFOmVNc^g387N7={SJLie2*&jO;LkguvB=!QLbH|x}+~PJBU>Y3wV;}=jlgJ{w5eeHC6|P`fO6mC@#o_QXvxr zv6+zyNXN(V)MoJu+^l{%vaSHhTlb^Dukfa&k6Z7&97|v#VKd6p1D1@MF#aPPHfRY#c4MsRg3yhLf7zh(A#O{?k8goe7awwW+KM_nL zvZc~uzx4YEI&BxsFEye~q2{#5=N9A`)d*LYbver5VAJv~582;BtUE&8oAI%l^9lm2 zmD-wWOFE00@NlvVPZ3k5d}a}gr|>+yW)^FWHme~+46q8~g_@nAi>V>>XZ`zG@hm*0g-{j+F$z2 zOIbjE?e?rGrp8#OH^ZO!d$QnuKg73tBDQ+&){Wp{^0e8S*xSX`#SM!QxK4NFmyQ$a{_wu{B~wg<*4xe2DJ1WR9eN|Alv2s z14Bdb1j&BG*V&9QuLMPmY~6`nP^Z!|Q^I;Mvgv;~;@tbah$mFB7yi5VBRJ{e@e41! z;L*9hEb7UqEHne?T{Sti+1bLp_F;Z5tKJ}o`vp})^@J|x(aWL&xm5c&D;pbTTu9P_ zo?heZ0Wvyof|;4p_KRwn>fK}544#tOEEkW>5KF4KC{zp8i{B{<;s+NW1@i&L)B z4W6jTpjwLNDwO~$x+&@`w`}u&c#|n7Kk*RYIm#2W3pM3@oybaLQHS`$E&_@rXNqmh zKHC0={QK@GD6vnpFgv!;N!;JI=IO~`1mOxdyWZ^j`k8*SM6l4Bj?&RVJ14X74M2`0 zHI=dg`y^+e{DrhDisqmIYxY+ z46{88R&=?aiLy(dV8$ad=QZSwtz9-0mFL9VjC&bmL~(dT;*tZHIB5uB#+0A_@Zzt768LLs zQEFvY4~pPY&VNf9jqj2~l8U6fmL7-N`u)=CdGJu)*H{y(;4ncV)3472o$5j~f!vsr5 z0pf6!o;Ygn+xtZWo0cyXvtStcxnsSR__S*(}J8z0AWmjI>=Y9QRx3yL@A zwD>|;?%4o^$pp~ln#H>|VPStH8DCWX9{BjYa=Mi6>ZwH8##m~Wg<1EeFBz43I2s)X zt;xbMC0cOD!bwTYW+iUPuDwX`U#DQkZ>j{~1G)t~GhPq8@T3GVaZ5=DiBAa`*!%^> zRGt(~*s+-E=1ED&-bY_0-cZ~z5jgK})^dryA)QoIOBUCT<&31}OSD)<9@-JI6d zr>Y6Cm&LD-KQ{(fEs?xa@IxIJb&)exktMUeH{w0;<7MWs$647(^#ed}zo-*J z)O$M+R+RJRWC#24)|OJ`M|9y(`E@(xG9~B6d5t}ErS7NS``w?%3<#gplrj#@Kt9Z5 z_{UFv8khH%zY>4l8t#?EK}w6(992cb!4%e5F}YF==26og&@dIuXU8YP0Y|O|f+Lc) zp22W9kV(n$Z;KC))4jfMX#-fVzr^yvYo-_EheWY(#lv!$eM-UjL8J~=cP#tiZoCu@ zd;iIgf{BDTD2y$adTC=VGpYur@W$wr-Y?Qicz;~BelNKI1f1e+tv`HvS1%qf9Tom1 z+vF@~c=h?16W1nHX;QMTttTxaGbBHnO7`G+D*`0W>op|O(imN668mYz298y zZF*q7{VQ+1>xpsW9}jY6>-{vY&``Hdi?%|E;~3ys7qS7t zFpfV@Uwh3PO94}SXiUxx#VPg@t62NGXV7cC)noUUDE?`QLlj5o#eUJ$Tgb!Y#|?}; z{4_Xz7EiFLLY$?FwVMs`805xVT1{b!o#hGM9AS_I4Y{uH1jSmZeraaWbzbeJq{#DE z-i_BZ=XWfBkY>;hRqF@E6(n?g^Zmv5E*{^TXj!Uu*h$7-ktq#>pQl+x_KF1Ul-QI; zV8egrqe&#Zb2f7bQ&`Mi{^xya#n+1>*DCu>yoF8@*oMi-XtIMWTe`Utgp{0h&A*;& zF~0PyNy?d?Y%ET3P6CEOz&GD4^FoMQ9l=mgrB{yx=(t8%0m{8>DV{+45>cjPi&5QY zr62p-eU&9qT2e6U6*AdON9o1X-5s_%_`xW1OPgLeVAR|LI=$y^5e+-r_pBmUxrR2D zu-wp`B4TL?X@1X++c z+8R$+o@Y!t{uYZSLLbAaRY3`}_BtPRe2Ta2+t~z`GIN94`W7B&*<=hHmkig>c&qiF z2~N1N5=(qQ<$jYIK1f~`>Tiz=*K(yP?mz+ao{-^jlCe^}=6M}Gdhyw3;}y6QiiSop zgZKdQz8*iHaqC+4Ea*J@#a%}hFf~D{wS`-zB*Z4+UF5bP^}U*`BS%Yof4-mM3Q=x& zj(R86*x7DPW84^}SW}IPlM;hdA0dYK@#>5j&yk&G`ZW)KnI>?tc%)~E38@4`1r@Zq zDDrE1pSH%kss1Q1e3m`j@erMWMTZ#PeK`t*R*dhXjPio`jbg}5J*M*Xdry8AKNDyy z=S$G7)@-^kW-A?Ldt<+UkdS^}s5uAEV$%;(to>0Ic*eh6w*B+~PhY(K;_;X7#?MrW z`+hSKDfw5GM_X6Ycu;Y>;s5&L9pHaD|VRsY~1u?$;8bOt4^ArLVXE_y+4 z$YkPFpk8?zhq4V|n>_4_Zw~V}d0A_o{4Dl@cL*ms*JIs(Y8SYc5yAN1@bB-vqz4eJ zoX=;j6nyF1ly@$MI3D2lQxMRUI`~)@=V3u1?7J`(d0d&!Bh}4cfw6w&y9kpBCjV6W zg0Ro=1qNb!OeFLU6xMQlM6p5!C1Me&sX~<0AC5(_P$+g|b7)kqOBaUzwjGRf=S!(|EPgvqF{tgl;c)QZT#wPkRe}6Ylof)Kq+3^2DT< z2dr6c@$tXsKYzTUgm8fjVwSzdGedGK}(|& zViR%<#T+gkzf)+BBZnAWZk@ckCq+#sW8mQY0oavzSyx)uFXPt6R0$Hd_`1F1_Xl|& z5G|vxNQ`pvczNK9_~P-O+>Ld=^`_YhRGj!OsIyliF+*(P0tqFyLWHHp4hc|IAwoRU z=phgcL5-*1dGg(O!FZ>b7Qz4~S#X$=d};>8m5GFOj_2Cy4NsfcGPO$by9B&}XU03o z8;|Se;_=s4mSY}B&qOI|6Sh2_3}qY`F~Vm*`^ESYf_W`|J{eVGXPpGr_=Nn8Id7PX z?%Q%(vJ;E|H!{Ci@Rz`7{l9oOEOZO^6Q5x%`z^yGAX()k61v5!VX33~L%K2S zD_OW6Or&4}FY@%KrsHR#zgcCfVIz*xzuA0vKQ?V=H#0j^TH?{@+;7Kid!m+Xu4N!) zc0gRht5aD#A_DPdmBjZ1@2Db?eAD3>8NQ88Op3_*o%%z4W`QhJ(iC|^2V1=<4&fBCz}Kh%|w z9i+A6HOYMeQjkBRaAfT?wGnJeWcU!gj9FzQp#sH-^fV;A6u)u#Pz)is`uQtgFi)Vf zRt?}}kkzMq*X_6v+%zgg`=#X7O(c6pE3MdEPU-6PSTncVulV=3{yHcc3L{(ZCvxR0 zpYM*23V(qPDBfUaDOLK?7`ueWOR#(Phn&^W5&Gt^s5g(ggUV z&sV2{OhWv1iFn9SN!!5di%C;p5K;lH0BDo0qjkRT+?QsO`D$&-?sJ;I>xQii;1!jiYLxq16>OUHOLv`LQbP+mc# zwe8~fq`YvA;x#W?@BpCHA)y*?*~0i!VcB zDVjUZFcEFiiJP3={x`JJE*^&{ou9MC)_DqAqiR#P)2I{&C<00q+G^}zw54QAl=GRK zbh+05SwEWpIFtTm-PuEmhorXg^e4ajGj9-%>_MUqIT^w@|0H%r4u5YRR$!3KndVuAgC^DrmYm^YKJ_zd1qK`14JG&z(hwgc+KxUJV%K;DT2JSGbhW;_HdyUF?d~}A4<;dkVQDe&0+zJ#H?G%iciY*&7JaNZ`cmKe1Zv;agYp7x=2w_`zpliE$O%3Jbnd@ zrSGK%Di3d2WUE4;gsym;n+Oy_sS<)*@v7wN z1Sg{R_B(zy{f->8W4~&?0mU5-A*xAr=R9@@T8x@{7t9UMwt>r<9x`~!+#~Yswc@q< zq^wnyYEr@Ch-M&xsID=94CC{(F;Hrk2;jdR5tQpRgLqR{`n$df|4?#AG@A40h*OmE z*ec~8C&5wMfS2RXbn}j-8q$C)fIm_xP#eV5I#hML)^ov4q9D4n>8D>SGI@N=A*gt! zongba1ruYMc7Q0)n?hm$_(H3gTiozR7>3~cVsBKky*N6iU&)FOgPp#wwa#R2_ePKs zX~Bi+aoG*l8>9mDP+`6eY?rs-YH@aO+n$z_gNLA>yDeW_Ios-tj5zlzF|tsm{5yh& zvmZ;tc9YvX=1XC%gIZaF$ris~Ly$oTGY?lhAzoSPLFbFd8_bSfibMbOuP=V#W5RXP ze8B#T5_c+q6?RCA@#67w&&B?LfAc#OlLW-%QCb;c))i0F{UjzJ(=67fs%uXCq+dmn zQC3c==m%X6hvVX=?ZA?poER3%<+XU*%Iv1+cF=!7H^J1ZuwKzios_7?>oFZ^$WEed z%<>!AD6uJIKQ^C;-vsri(f`U{KYR@SkWnhRc5LN68bx>9aK;d z9C=Y4BEGB z9lT$@N_#cR0}XLaJ4%cSRYJk2ji}6g#q8U#a|vQW=PtdF5IVfJ`L3=dqqS%F4&vt0k<;Jj~PE+zwH_K)*2>Z&E%lzPwlu zhWy3(yG#%nNmk_BjfhwikH&Pl^(tr1mzK8;I~;3Q08Rg50Z$)4`EgM5vSO20$}IAN z=}dVa5(UdHt27B> z4DRa9C#g`F55`;#u{fqP7h51_e}kFChfn|G#n1iHHD9G@w0LZSV8W%6LsOj6yJ8Jr zU>uagF@EQuB2IByL8b9>z-8o*NTJ%Y9NXy^-GEHy?R$Rmj2=1#7_T&ygk%Gvxf{zz z;f)RBY!Kk}#C2y@sM`PtLh&3o;C612!1YP&!MBsKE*{Uk@`}Zr`u6~S8CAJ0^j#G; zoF-JqbeG+RQ|~nSRc28#(3#Qm_yvS=NM{L- z=DeU*CQvSPM98%X>zdXW`-ZNTMlM`{Y$+A%nm^S!e)WNv9TA>O{$ zGiM8|lV^ZP+uT<#U1(qa7yjoni4EEk72}YsZG6KY0zN(1!jO)&3X&Z^Q`)f{N$T75 zi+((dBO7l(>>xVcxVZ1-RKD-Gs08sOj+zdo|2e+fQvoucRLazY*^Svwzx;aqczaZ0 z#+YlnNP$`r^o;@eeAcsgqGgfd!3zFqO!0%d)p%)ns^HkOaBh5u1O+@19(F8@JnAgMujn56A7}<#0i<12NhL1@tPAcMC|L=hJ0l?OEBF-RQ+`?BPHP4qDFoN^7@P z7vf#083GSv*_@U8rVd}vdpu$)D%n*_frDQFCs=z&BVbw$AQzbU+*A4@U;JGVa(p1rcl<^%5+sA) zSDPaVh>f!_#RH~ni=h0adv4yup%C{57T`veHi}WPj=>LFZpg}~ba|0NM{0Bxk8@{II>F?aJ1L=cmCGl784H_pA{_qI zqDZ)W>gxEFn|3t`iFQ)df_1L+afaa~$MT(Tz4ex<+_d`wRhNBw zn33@LwbkoOH)7)IT-VbwL6oHcc?Za4*>oJ|ZR=%OX-ht6VSZX3ZX* zlh@gcAqNe&emskWB6Y7+RzV4>rXxm}H}0m{9(@5J6{Fy48J*M)NN?~8_kNbQ#piYO zPB~7p+$zsWi#sd4eTa?188)*6dQ<_5gfqBc%H>a<(?sI$(!!!Y;f?m&uLFm`|5>Ah zN_)oJVK1+&#vryZTBpSo*N&eWau;$CP&vw=9k;r)TSR6IfgXZe-Gs%$_67l7;hKmQ zUnZ4or`kwW#o+8Q;49zqMK6_f0V&1LRl2zCTDk8@*K9jRL4BeOOxN#RSIvOJpWB&#ta{c#OJ&V$`%45j?h%Iw zo@(dlw;7^<<5O1wKQ?$6&$V>lYHU(;&A~79mR(mbppru=dqrb_(}L)36&aEZ%w?ZN(@fDLs*?)q2xHx&?~U;Q`qv)k zDvJsrD0^n{BS48R4}bS+Fmw;`?*Z^rIz7Jy9gCm172mAl^3LqW#_a0m(qb=mj5f9| zoJw_Xk9;bVDD372Lohluykj|g5HX(QAr6^6 zV^9Cji+>1)WNrP2$x8Uq5d0Z|lS)#WW?@Aa5=~T+qxxt0*4b#wi)xSoIp$I*k!3YF z-x{H7$HD~L8T_xm$CitOxnQO14#zuI+1Q%|xEK>PdQ<>w5CPAqPc%Vw370admepDX zz{{_B+fYtju>FSe!h+JERSU#%Zf-t47;{cy0HNLh3tO%4tRf^zYy`?1ktxrwb7ASeSxKS);5=m=-=t#DF`nx&FF;R)Zhc$-=?;}Wc3|~Ki zIkG?a!^_>mH2r5}Mp$?~7#x5h&hQ4s1oD>0^01?7_15yTx0ZG?KpadyYC%y|7t1kr zTdfH5x)D^4!w{d*zGrqhxo7dVhmi`xvvH0anzQ+@vVui-*yup zU@WvJb2#`4Fk|WvXSL@V~qaE)?d#?eU>*Jd5{EhY4ae;`v5LDp`%up{)pJ+CU z{m59x!e$(`t+P}mb+NRH2i`<*J{h8wUb|bYNcqs3!dI|i?eOBl5iNGvqSANo&_l`5Q8j^?So-=VhamcR|l5Vb7O^>qpE zj%d)W^m!6lBy0@~;B*wTLz!xIb6f{=>b5@T#0?~or@ zeISF*kz%n5!YA?Nr3urFO&zt$&GGTYFc_lZ; z8Hbgtu=sOsw7`>tS4d$vJElnuZ=XSh)UYExu{WPF%gHJc6^&y+@lVwR^|((w#E;{Y zq9K_U60f>8(VgXCJIE>06|UvquRWQK=DbN&6v3IiOa-Y{V<0Aao&Vy^nkLSKhLnsq zUw!78V9E`VEzSft;}Ocjeh9wz1{p1SLblyiWxIoMdV`u*OU2(`xIM^vjYof!Q`2RKct)o_Q5Dl$}c{ zk06!sR-~G=O$y$^V@01%nPo8z)$p#U3FXVwNvgwf%#5rdH%dpPOf6Ohb?gR3yet3# zzu9m_2*f8RB$)$#@KlKJ#p+#-JsGJ~N2@PBn<6-=ckQcG2^)aaSlsFvQ z_y9i%EnEE?757WCwhpch^vfKy&Y5r&Tn@Q{FIZJQ?9?C;o#UcId~I}Ea{1N}Q8;R2 z^Hkd?_~@M?IG0If7-y7`*=|~N0P92kqT|GN5ZSIyAt7y>M32ue9@81v-&d+UWs2_usr?WH3e@Hdx%vjaUp)jJDr6pTap&0O}q*t%ZBg# z#$vDzDM7KJ55QIw`O62d?Mr0?~{BH`sSeY5fSktbK???y$!06)HHvJ|1)TD<0b z(L*1D`vcMy^=WE;h!k$Ns@KBXF|ay_#yB`}OdtS^%DBiFH_xjl~3pN%T;`fD?>^i?^(;}>H2_Va_iNH76Ti}#=WB&dQh*0n{8 zl~J$iJ0?q0e<6fV;!rqXt1c}cT)b`H9lBR53LP_HOPz5>9jc-&C|+Ue%#kyHkXCq! z8OPl4l;rwIWxAKlcY`QaYP0g#Vz0YG%1bZ$9nH~jT16ciN+6JZWOvc3aBn*7hacgu zVnJ8$zWL@?$#SDLK#5PWTnkPKR_yP>w((}&coSXk zuY+h&>_5JauZPm*<}f^RKz~c3*e$O8Sn|}&HBW=WN@U^(Ejb-ouvJ-2@psbu|2|}R zZ!QL!ouBtWXe-pJ^zUSku$i%exN_Wm4_QzB_O$T&`-IG5?l2@e9goMWl_TTBjzfnQ z^hf2A!;w>5k)0PSMF^_x2NxfBQvEH^ETw$s%~Ubqwx0$yN5nh#lla`XRC6q0j};VT z&_D4sxzpfR%|u-1Q7YP{?~PIK9PcghC@!U#%Ir9UAtcwwf-gPyRq;aM z%P(<9#}tWv76}hvpO%U?EoZBQInWAO5a;U{{SN zzQ#3LKE%H@**lPcl4I&f*8XYr)guEcJfpv}-$J}*0s@lgq`}5~AIGbIn*&VY^SE__ z_XpQiaiFSvRS8JnAF2kp)M@W_da3uzr6ObsUk&=MEic4NhuYJ$J4js2x4rnZAqL_> zXg{H0v>xwPLgnT8ZO~A~X3e1roK41lR-|v>LG{=hShqgwH{@jYl2VX5r|g)s>2L4# zD+~ww%NQsw6zm^QMy=~7U2G&Q>^rwgfz#q)7X(N|_u05#Ayb!dOtjT_fWYR7<>2=( zeiFHglbF7%TIM3CQq@}c>xt49>y zCpfohCnhY~_*7ne69z4y;m_kcql4pg#%~Oe`{n9K7wGB}6~6e~6DIX0#A zF?uWx;vQB_29I6SZepqnV7)4GuIE>^gC>s+sP=^y#Ui zs9!gVPB=OhZ|90<*rXE7NPcO!{{^ zYM4C|(h$sx$5hIU?H!G=(EU~lY7i2hP?2=&mpTiuA(7RzAQW<})%*BO`LsOYbD$7F zL4$GMr*VppX3h`Jf|xHOPZoa{#wj%u2vcD(@TBmXmrU42weO2c5coiH;B?5sGs=dsQ`VsU~kfOoXOd79bk%ZnIldK3C8 zjbbhaA|ahBrbz{EyL-sFi@%d0cU~M%^!SQ{(;yRNnS{sUOOGCr&N4^j})SQ*qsJ+46F9Jv(S%9@eIolSR{( zTuOotW#D-4a18^BD1Zps{<_)Sapn+lPBM~o_04Z)6kvq2y(H&$N`G7g)Wb{Ho^C}G zO`iDkKmIp{9$Y*wj6*|9yTiC<*48bg&{1{2mc&6)-65^N&cTBREQ>Y=Dy>JFx+w`= z2f!1&x0^R-H?3xo5h_V1Y^uMb!bWHN1-CH-6^%LRxgBv$R4ERWK*D5&$@V zAK$}co!<57*QY|L4YD4DG)Y`fg~t}i_y(OABM|OPxD>JBsrdVKYM#a>32zB1HAsTR zN4VZwrnKiOW4exnHS3{MPh)6I(I3ap6p%{C4ivF_rur-;sQl$!Q2ketM ziS&7rG0JV(QzM434Td`pF?;_=I`OnC>^wq(Rx-UvD{mPV61OU`w1`4gP>NOQxI0Reg(gir(H2B}olF&3oq z@7qOq_Gr8f_r{}Wd*6_HIA+PDTw=;A&{mQGsI9IRUHPKi)5k3_cj`>_xP%df~G^NB{A;;nQpp^Tj za4$U8ZvGgfg+c$~$=?Nuu3o+RqN(}pv+v#wf=PACSGNpw!U^#qQmCpw^LVvm5py*- zZug)0wMF}VJXbEQW6uIiW~#ADfk~MxTd&w8o6a<7P(WRb9%cIVS3mD#xI>v3z8oJD zamGme4G<` z6bCB&U+<#+cH5#jbJMTnUC7lo(Ws7N{OwXj5iJ{uSroGCIBz^<#FU_jo{5|j4xk8= zWGQ;@U?SEB@3p;3NkCug;F%u7a5{q&!m0<$tYays_;2Oa{q~iXQWd}B=Xg&=xMLKx zGx5~9DWuU_>PGGR+yJt9^K6VKv6g$(WkTwJLAL~nSEiUDMr`9P}tsd9kgQyAo}SgTy^N_g-<#MnngS=NO@Rdi%@PETG0ADj+m z{3+iCw{C?@^@ggWcrA>se-_q#63y-TqUBDclXc;j1f5Xk&&jHs#!=Qe#$$XdNI-b9 zMC8c?;x?6%?PcvFdm8T0h{oCZo3R&5vUZZq9`c{RDFEJO5&Ob5xDZPL(hl1fqruT9 z2H~{l+Q&%M_zYu6BT(wTFkjVrs{&oQ{{1}iDw6|pe>^#>Ij#d)4c%+ zeAU3jlu%hC8(n02ps2$PW)60Y^_c8b6W|L)9G}WAlMCM8SwXcqNz7s|cNKjwJ*{7S z#s`F4>0zpLcfa|(Px8?KXmxX``EL@q5B^`nXYY63K#RL#` zzu1OsO~YmRea{ zm>GT^9%r19AjeU13>S<8&liuM`BHGl=)4G>n`-VHB<2>g zd*nt@!?#oQP*t5$g>4%-1(o9E%LaaOhNvD}1Z;Dxkw%S9=#}0WL;P5Vk`a8EB&;7$ ztp-d{E?b?^pZvh26FP+E+a(~T3eghcegz4HgWZ@Q=PKUD7mNNwqU|VC0cLN-^C!a+ z0>S-K)Q-zALYDsFOrIJ;tk@FyK%NFh-ASqVG4G^QWkXaCr^;uZ7umI^MC4jtMyg<{ zB*g!qfk{GvCwgPn{7YSZ(g>}eT_VDM>TjWPH)Xw?ixXhOVZJ+bS_=PEK--zK+oeL}judI75KKIQR6odwb<2o(gCa9}%@ zF|ODJG@vc~0`c+($G&Y~*=E-{S3*{mK$5l(;PG-HqOItVI1&i_cCVNS(q`;GqHjcL zwwxfda_D9|(4o(R*sUNDfDrxGvMP(JVur-Rw`iTMvD>)?^^VDQjgAX>E;<=_44fdZTPUueM2s15GD{%UdZfL zvbfNWPk!iIlTl6x)J#TlrSsbxKct8^o(vC9ba{9$uP0@9PH=ty=%M$#A>T-Tg%*YA zNDxa^ViR1BJ`3`{W&BRX1M`d6iM74mLO-oA%2yXkso?tA|L{L{7Kpt?|Ns0c?wAMt z!rE_C$46^oGpxEJsV4XctKS+o^{~S3iDP6}JbAEPpcJW*W;n6kgJLgialyuP0FuX# zql1jzk*+j~KQGO{VVpd5dxV~KEvl@mV%=GKC$usy&}0DC^M=S zo!IC~zWUJ6oyDiO3r^&vj4GL3yRJN23c*4aV(_s7{MVk|*VMi&hF8)~(Y_E2{bCiU z&GRUYTzRI-%$x6OoXRAb*KuKOfcTCV;54bX9@!VNu+kWyDoZ-Fi7uueQ1y&zqtDB} z3lq7p!?j+>+1s%LLWLqQ<-yhTthO2zz-&nCmFddbB&v(1Eh4xdKxGzf%mf z=Cb|Gn~n&f<&0Z#_!F+l;0i4BaY4erq3H6C?_w&Vuf1yVFPoVZ{j^?M_Ds2xAE2R_dAK(R1wlnY-mQWq zQ9BBFE!gwTmtrDItJl}!=ZnYBf5|_u552uZ@`5{$lf}sVMDz;F72Q0>pUbAUcmE*1 z!A*(7!4t+gHz>726naPwZ;d)t&5zqLwn%w#2@#p+)f!-^wVt6+Fdk16Xk#rF!$#Mc$}D)p&uu0j@mk;J*L7gy zDKzi2Ea8n0eK=I97)WC^Qn&C|BjEEgK`*Fz!sKZk9wSnN5aOFxUyr#GrIXVx_*eL^ zF%Z$9a-&1A8ApW_H&t70TF?F>BX3OK|^ zREINpK=WQ`dp$>{mICu#PcE`$@ zY-j*W?ev;QT6d70Jadg5CZU+ww=-WPTSs`svRT)cHckCjRIkXEWhNaPM~9lfGCW-n ziHGqK@p4>Zv8UX8w38`b!gSjfv81eW^WO9a0_c%v&1Suq2GFY8nn~n?OBJK4|&=#Gc=6sq; zm=L*UJEMrbs{V3J8uvvi3GGJ7uTNilJs#qG?#2{Ad7hPo|3<}cy6{P`1E!)72Qhu_YMd#-(r75R#eMt;b z9P;#WN{WD&+h`YCH&(neFvr31;2477vn|#h+dsS9GwnN^PIX&zi6}T)Yj*edvwFbd z#y4VRT{1xbz-N!IHe86V&DeXO^Abk!@|mepOkBDLQ!|$y}%K zy%@aZ2Ys>4a?G3!wPLIv`Iz!aWLrBEKg zTJVJ;LQ+(mE=d;tM{?N4+z6hGDesZ0R;x$XBww|2L2(h%s)Zwj4Z8H*c-JmPdaBV# z_|xpVxd4b@lmLXMA68fmrkA+)>_;F9-O(RVQ+= zg=~t6%rwG<*0g3P3eM2rB6HZF)O!N##4NA;X%L*a!EfRqE~C z3`kM@a%;!M-lD>q49qxWN5k}cpc1D2Oj$ro*I zF^bd$ReA0h3WZ%zi^NZG6%ZFG3iQpfku+Pt9m_¬aM;89Bv1FQ=b-*9Q0)xXGaO z1yUX5mLBNY^d8_VDzBFXA6)s>XJWxrM)>8$$Nm;C#k>e=L(aVDPA9Y=*zmo0iXyBf z6Ro8o;VVFHninwbRqe^GZO*dg@#c*fN2Ro5_t5X-&&b6e#ExLE3$Yzf_0Qgkg-ASB zbSUoSy;fm39^i5!F6ouZ^sLTtdEmKo<8d@)bS$r+MM;iOPGTwuU_8i0C11DJNAeA! zQed&M2CDj%%Z`LhK%KQ-=hp}o^jeTP-v3ZJGDX_YpNz{|VTOYEL0Xen9pB`bef;Eu zB++4Uxpd8N(xiuBaY~GK&;GYd4T`eT|Kxx1ihZyEqoSvBR|>t*l>J!p_~1A_L=>=c zmjw7AeHA-{v<8SfwGygT|9JbdimxRV1p&QM!2t!@Bp5Tcb?g1^D$ zNcE`euo9=@Ti=Q&rDLN3gNN{LFDbnss5&z}6?5WRt>6qgWfcu!ZKD@oA>d_hLfh%6 z6%EJeJzT4iYrl9nEiExRLB7E`n!td3Fh0hyjWCneo^Dd4dZoA{K`9_ z{oqOABOJ#CpJwHS}%Kx zl!Zar6dbnR-|5#YGoY5Iga>F-d>!25q(7!0UTdU{Ia8ajTq|@MIT}I7^g<{)hkGFS z=F;aoA+-;IMy0H5I(I51NHx~u<;a6rMF8-`+MXBEaH&-!)^fib5C%INPrA1W5390D zkXmu+aw9d6MZtqSkXjoHA2GA|SssS?8Nrd+BiBnXwwZEbhS&i4dVI4FkIoapxdSRB z^Qg&2{x9(rBKD{>!6Lf_NW=ZpV9)SE+#R3$J^#g`uYB(L=hJhHH{=x`xMr=jNNwKm z)7P>XJbed0;NSaZ_!bUQdXWFLg{u5uxZ{2c^gY69?8B#=HEv5m`-^cv0U0vIf~Joi zU3oTWKtn#ZAwGHf;gb)2PEnSQiw+x^?#`z<92WHu=|w~KAOkw7YmqI~UIJWc<)m|{ zG3DCLYT1cmCun@IIb8e~Pkzh4-vsYo_E#+cF8sNzG8)7%wG^eJjC_S_Z7TC$^a+9S z%GB#IXBsxSl-Tsk(;kS53bg$}M0rZu%y9uULT?J2`77J(9yWzJd0}>QHmKeL=|70~ zf*PK<)yH5q65Fn_9RPk~<;|&CN~BQVv41ayB_ougO0k&~NiXfdxnIIr5cgtmT>Ss| z;!6MYo)1yPe8F1*c&^Gz%5O#@nQ;nY<-bhJVdC>mJ2nC zeTk#G`~QJA(#r=paM{?v%U99O_^nHMjeR?G(4+y`-ZL3c6dXeAKPa&sd9TL>q`ym@ z3xssNhdT`9gyv!Aevk|EcTg0(Ga5#gO+j}rcpe-@xUcdmrZFYu4r^5r`6ZQgJNSPr z;Ym9w?|(PBs?xNYQWc)aP2QFH%-}bwbnpbH>8JloY$|QfCeifzEuOslCvUy$tx-_| zrdcGM(i`k^RW*>T+vl&z^)_hgf9#=b14k_$;d2d=Dt{vdA&-%5nGwKUVr`Z-tdNp@ zgc4$|m?JYW%q{fBLNF|y1CsJg!BtA$FstCAnVD?qcI9rF3tgrCLR=wi7^;?sxpfbm z$x-UtSlDNv-n~lF1arp~^3s(YQxueJ#*Z$oZm#)V=r5pGQN=>CJxB)+6hg}y*Lp|~ z6o|IrK}I=xk%!(TOz+DC1&~i;7C@i+Dc7$&dK46ytFCPH3o0HtDoEn~lFu1Nt}iRu zI4JEYV(*O#u9-v=uID05xu5RW=pB+XWf5nu0`Vylet(d@UeuYZaX+AUU{2tjc<7B$ z=!?@9tg($96!egP=*dq_YL2A}L)azGF3Rs66cSn~2Lk1guB86BI$H#!Q6L-yA5bg2eMoQsneiG$XNtREo_Hr>EH<%!-&J1( zoo=lNVklkmLYJ6m@Q~01@$KnXuEhjrGq;8N7Q-$cxqfTayRdd}j*s3%=fW)^9zkyL zb;XE}z7Tr@?Eg9M23|zE-NDT9inY}e7S`QsvMt|^(bkICohQdH{{AriBF%SM&P)SW z@)}S7x)cUnYVl$upQ$`;0YUGMc;vVc>$O+o={scm=Z1l2|A2I{Uj+B53|a+ma7kHE zwu_I@PTds9tupi8CA7N1(1lvmim9nfrXifCAoKlVR$f+Ksb{a>DwD)9maCcgYcL1Z zUR(3$Sd)qZ7A*!jh?{MjB0zqtf*uX-9z&uVso=7Ir7HV_B8ZeHJxq&P&FcMjjw9zI z-cE8>zwDh_AE_dnAIEcYbg$fpU3DVI*gI{omk^&jr2h9TTS3Q?TC4P-#@CQG4KVI_Ks@m@c}{Tl$*b2&LPc?4MhQqr{&N4X)-;$~$7=Qzli*GR;AG*{ zNqzg@?hCdSb5xkA9J6D&m^}~@Cgt_Y(rRox(8UB`Ev_uCZidXs47o?CRMu#(bWZPbuPRnElB^s6y(94s%} z`LU*Uhe#)Jkza;FaFWS0_!8bD#pOqyr;3SCI4nk_v2`BPZ+-LmKkztA@-!dgqWbso zl`A|JB&Nm`{=GO7BYqL0FPl>gdT>%}DKY|$?gaLIP``0%Jy0K|Wi4Y*Ga(b3Q_ z3wMKOQB@yz2f**x&+Vc(d%^$ow zhoy^#?xfH{0M+Ou-W&u~rsAhuhP4xHL9hHmq0odq$6mHKE;1~kjCu6Te700Wd zxpF0Q6^RtpVr(FG2<69vio|+UHEcP{3m072FcS_($~pI{VcZB3`|CKPnWVSn1mL>V zI)82PGApV`fek_(9-@B=_9Y{VGgGB5%>&>Ld@oqyj(UTpae9`nP@GJ-4tzhM>7uig z5`^#$axC_Wq7yNqpa&;Kok#wJM_PbEgc<&M#^Mt9giqC8~88MYNvTex)_N6o|kIw(rh^keg`M1LG!$1dEOd;J`Riu;oLhf3U_Bn;fU51O7WPlRKD=)t+#q43rUgQ(tqG_WPQZSVu~ zW9kMs$&m|}o4;?`Lq{ZEt?qT*Y4yVHzg#kB&cI2gTaE*SlM*vJm;vjMxy7nUZe+UK z#ce4VkjkgDbp_4dz862;SO|{pKg<)%zB`KtxfKsD7bAA?P^0n^#b zuP;9G3=pqfUP|n+iioh`dGfH}U2I~wSMDNtlDa~`sc|SlT0PHV-=X~{h1g_B)OgdH zmakqWF*zZh2jzBE9GIzCl&o{LDL`g(ZVz86apqVm;;_ihos;oVDK)hZesn@;EZko3 zRelQI809DCLdFdH;*W9ClHgWP5Ycdq-k_UOs--%WZ*;Nv7*O?g3|nuAQZEdL);8t4 zCOl%mpU=}Jdb2oL)u7L^5J;5Tp(JRVHNro|LLj;!p$#SDD4h;d2}M*9-StGUO=NFO zco}DRl_*&BKA?yALmkTH(8@y>D<#(U-HV@DfT;p@`jq5)kKMWrTF3NTL^ivP1}G!* z^2{}lb{!m4re2|-4wFW2ebH1Q(hON#8i2sauX;&PBdl(+juhrvu@Q(}mzsp+hR3U#wc!;5dm z6R>`gWTn>Eop~i4Aml%N82f;qh*_H^OzRxC3j*3|rmnua*z3hN>Du(G znAnJRxM|5;xtfu|RC~TCNPVGT7@D{ zWzVRWv@@#3--IS@W_jOn)ahvt(aOsMD2o+wkT*ATgAg|kPJyCOJ@Sg$>AEU#ygi^K zC|>)bi{jS6z!$bB*1CcA>KBsRlXH7{YC1HBdj($A0e>m`;-nlH=ktClchBsS|tI10+!9E4GY}VCM>%L{Dj7El=YVN!v^< zx@#YR|C_ycZH_xT&OCQN!ffnBOw6lMyS-}C-7Q>rX|}DlivUQX8#kjsQnFfV7k~m# zB!Ged6iHCijWF7)UsvWCF>qRLf>nDc?wTHS#8Zk?2Kn)BW5(i zHVXLH`Ja;~PoBIynMw7o0<#&lneO-PQHl6hgSJj+d zr}_wkl_~-kWu$&=5wwM3QjHG^D|3DL@8}pT-N$M>3JG{Gpsv}9a#(q|K!>JoL}$vl zlFFqL8K;kHsfLYKDn*RAa>pzdpxd$HlXq6!+&D0kxncLBog5CESkts_4~GaVeIm;J z;rX|{oZAa43+r0WT$~~s;O)%;j{=0}2sUIMWxo1$X`B+CwLm>o+Z@9&qgyXudR4QC zjjBP7h55w|Z8i-1$I`R^Spz-)6wTt}M*uC-)_m4!GH`fC!Ow!HyznWv%iqE$5Ty}Y z_lC8!kcd}THw<@WtB{QJF!ARi|4GD0Vg6CMu`f9FSAKdatW{AE5g2}DsrzVDOK#Q>{vpZzE3huo+sqxb0q ztQ7`ACGXm(h|x%?@guX22$dfO(k$r>*0ofI8>h&7Z2{^)$+huFa`oK}eRuH0&Y)H$ zDyI2TJ|_1HtZ*(JuMng+hfKr2QZYx-E_)aQoBt`QIpyJ!k#^AT51YG^pezjpq0z2$SuOFnxc z3??#ZL*Mo>A#xhXAuSj+>|^oB`|p7e^K`-3qNwY^g_XGl|E+maa%z$8n(K&akR_vB5!wHfwWGt~Y3LNWDua?H&E#9JSxq{GcR5AO84FH-jNK{yC6iWP;e$Kz|iuhAAZ-^gIU%2lh5#29fFG^a^ zs75I}Jc?>#027eotvs=~x$SYfR5ym)6gJU(gS35`2)1|bSo=VP9L8No@^Lv&oXMJc zLHZDI2U8%B!BLbvcNY)Js5mcCs{#*>fHco7vuX?oLli?PO<$kJj_6ppyx4Kw%&%uD z($BtBqS%0h4w6OZmhPr&kd}U8$QK`<|4cuVl9&5~LPs&0t+Xem-zY~Alo?m5-o9Hb zgOeU|FN!;BUst>dDNTc=En5S~8HbPj4wOjak_I*v>lOEH`yM8eUV%hW(a!}|Z~;^L252T7;?m7g8Cc{i6XJy?;~v-1+OMt z$vE&cEhC9lO7KVgu70D{!es*bd-!&$+bKGhac=bIz=;}aKYRVEh_SRdx3JP(&^Gk} z(u;uuTf&tFV6yN&!KqMjJ<(pIgK>cTrbWVW`b1gr;tFlPl}S=;;;%<0m{`b2m#(K$6y>$lPFw>N^u?(`tJ8jd%r{CsTATN!dH5R{q{x-F|Lr*^j)pFTQxef1&4Q1NA>B{w!S#mK3O+m9Q}+#5FL zIJ2-Q000`9cM;|9UB2w|0AMb%rCq6)^Dwl^fmO%e_73BUGAZE@0>$D!T)u;0sdvRJ zP6*t8LJ@qsPvT+}E6~?C?d*r{?(IUQR+&K*B>PKMM=f9o=g^P zPOXUTci>!)Fm%6*uS=UQu zu3ho2Q7l%cVGYm27zwQL&Y%5W>I9J@Ub+%?#geh}VB!YdR`#akra zq$hxW9PJjPwiPIEa5=xPITZ*Rp~CnU9kbHd$DxTv4LDcIjND~h9{MBtHtQ6RN0g($ zHYvB$M4V^|3^V~BTxM|5zes0#Q~jf*&KRlXMXE|qazCBYhI<;4<}c+Wa0pIuJD5FU zwl%g!(e-Qtvs(gcJJ`E8!YZ{lciW-Viy{i3ec`;iDmy{jKM>LB3T0aAS_|)w84U^} zv^>(%AmQDAQsiWhY5N>p#78re@Wo$OFg8&TYUm?(q!)ZDW4>q}!{oltvvHa!p=`4P z>G%WaKdCI!=D1f5afic%OWpfIm@LZoJ0+zv?vtQ5OwG=El^jJ)Ztkvr`r`Y3fR=!! z5ivt}!=HD;AWD|IiAnDO_aEL}&>M$e9%;AmB$sRaWj%3YYDzx>sWjKps;B0>(C!2u zuLKx6WZVyjWj_cI^LbE8-$huJ<5;vKt_J?UCF4;SEfwlNlYH-;f9WPh&~-H>NpA+p?XUj2;?w%9818v!X!UPff4#YA7oUzknJq zmsm@%+Z>o}AN9sTWeJP8nfEhXIUuO?jpKA5b{4hTWlFnhmK(=ZIMnyU%e0%_&w=?-QzRCM%4JrVilzK)Ngh>!pD z;y<029!a4{#ntS;Zxb@nV$hX_d4Ra^v&;n7wfUq`ozqf88X7b?lAhxlKAVswB~^WD z#$15}#MS5^6elsv)vCv#krT6+FnT+!D7i+t*n3_gDG?l7o(QBNpT3IE`u5P?Jxzs7+@-UBm{yZ6b;5*=X44D{XU4F0LV(wIngYFW9v@Gj-i7XjWVV0JL^e-V2Cpf3&5`X=zcM6}^z7 zY@5ll6)6RxB{~lUTKw?lS(Gr;__#XD$!IM_kpVR+YM+IV(qKJ*_U`YL7xDE{;%02q zLlv)_QV#oIH9VQ99Opm7Mo8Je)_xJl(8MC*X}S=o!>dY$lm3i(QsQ`kn4%lEb@(Ak z(#f4jAj34;CE)3-bh;hUW)45@CK|9eJ>>PcdY8Vz{m`#2*QMfeCAufxAPM4lqQ+_S zI}b}Q$nkaCT*q3|?E<037OSN-Ucm+Y&GR3a@&IBC5j-B16hm}WifE<}rkl{ByNmOB z6MmC_(2AProMMTt;&_fk5qM`9bp!mFcwtEbh(lvVh%Ngb5#}HI>CS!x156Z3$NP_a zRC;k%_SSv^?#X%7MBjSl((n2krz7G{$%^6e6el6@$8LD<9W_I+<-=^2Q0AZx@%1wN>8H&Q&@hm}bxMne?j*u2943((Bd^3%dV8rV zH26j&BWW^WsBmgi$7`@suAJh=i8J47;ntD_M~8iAM1u#3iCfD$Ti6|%N2|J;`a>7P zGfg^>@rP*wqR)^KtD)X|#T!Y*De)p-j+lP=n@a>d=6p|Js=49uOtUFPEu-{B;8r!y z&-cq+)EymN)M3M8E8?)N4QlPZru^hdKZ;1D!t(lS#Rs*_b*WW+;Y|(Gs%~K4dl)n+ z<`>$FuitLidt>hM7Z2LyVDS`1p_`Ax-{Z=Kfw(dHC!a7Pg%C8PjhFHYGaq972fnp<0~{v(`n!kN+h8`}`wOvveJ!E-@J~{w`OG!3Z#0?GPAw%0+`h z`uyGV546Pr08O)>H6jp*L+LPVEG{q9b+m=~LP4n@f!_s&Hjn#{eXy{LaDP5FrEyRk zofMDD8c8;hUE()Hzwm?b68Kka29<@0TKMFLl^%XDzp67h4!g;Z09V)_p4FgHq($@Z z11;^9ORv4<=R?a0uf90z<*W-!y84Cf>4n4<r zXJ6KDnbt?%%DFoWbMN@$2rNP&M`+a~%h-AguOnNiQGfr<+}X(66zfdQ+$hW!g|{33 z>06)8TzxPYqx~KI{`2c55e9wWVbW|wast@N#DjZl`qD{Cbuv#m;tH<8SiS~4?my9Du&LddBre=E(-AtD*pb(yXCuM;z^*isHdqYtB_8pa z!GYK+au`iOqFl{O4+QBL3hr@N><dM+DsE8cPZG{#RZRw*+uB4s_ZVpstqnCAN2%ZXa+%2;kB^;H36ne-x%P$unZ7 zZm4)xUa-M^RPk4pm*CXtHnqmIgs;zT@MF z^HXj23j84i616UZk2L!iU;FjfipWHj$1~ZzA-bUxu5Pi+8%xb#FxfL-_Y8r0|r#{Fl7=x&v5~elm}6`Bf(N=8{y6L z`w3j7a~$OolmqWF>YMHPFrAR0SMFwXdoe;d5w$2$Y5MnWRcUy8bxm`goxY-#JtQum zgMvjIy09p}+&ivy6i`uUc5PZlbr!@az$EV~-UWPa>>?YemrdWuIyS8o)I{}qU^@k& z-L< zuUPh^z@8SK1pLvhsRMfejH-}?tgKqsq?M|IsZ6uw07CgAS8?NH>bq~E{$4M zcU;TW%fQ{2zKqq)2UgYGxjCbY3kwS(>#dE+0?x~!h=(rj(s-l~QD16MuI@47Jt}Kz zdx0X3Qancq939NCn705)iJBw{eM4X3RDHh~GqkQpOAzm3$>=-l3v(N$(h(x0RBXXV z6o{pIS|e=sSX+vfPEoq8cp~G!s9kB0F~JNoUtBtRZ`fv<&ef@_`gMo1nLqFM3(7eH z#7>=}nL=^ybZbr~aEx;F9}g)OqY*g|Peq`JmF8U<204rM-$0pb7w@3#TW;@#%(Q+0 z<3xI(-6t0{tv=Q(w};hd*MZ+P?lB#d+-~M5%gpw+qpFy-n@Gi;EX_rxpP{TR=UkM0 z{nIabGn*q_r|bT_@3w$du_V6&1*)00T7MW%#nkUTrqZWg(d}n>?mH=YUwQ96@dO0; zYWEv)f`ZjG|O{u%L?bVp<3i6ufALY5S z3(LiXe&tQWNJqvaur&>ew3<5nH8$LjQ}~8+1|~vNZLXzM4dM`Nq5F%{_fkWlre(dk zz>1>&dw=lH4T>(+MZne07tLYfR_To|87FPW1fa1-sf$60785K&?>djc6qha|hp zg0FiNsna$dCghEzu<;@Pt_c%)u(+@)nv4lkr?!f}-e_yG4;O+abj4(q#UffsDjXi5 zEha~-Q;xEq)jk5VTw9@R4=4Nc0@JGPdWlWAQNZ+7_w0xogn#K00o-q+d}AnXy>yTL z$;JOfbh4;Y$M?zMCsT!jBYk9NB`WGC>}kH9zFrC+-Nd6=vbq6R<0u@fhuVg@kr)C; z0Z@Bf^ZL_qHqPZZe*FBW=CG4cvaPnb*k_7BvO;}^mU?t{VPC|B2QGn!@=v@}hY#hs zL=5i`1ob4-rvJnF$Ki60`H5)nyyFdkLm~*RH(nS$m@nFrux~wA z*r(`I+h@gD7UeWiM-g+TL6`J*?R_-5-|toJm9qBLBq7QeCWlD$jqXJ&6PKTw{{4Ow5TgP8`#V?6?y?HCr&4~+GV?+tAE|X2r zq?N1IacxI>p9wvWJE~_8L)`fkv=?!f!6xr*gEOc;9dZ z@{OA8bTe?kdI6vbL~m;01Z@A_-wk|v1h~C=-1~XlSeiKf{23=`Y`hnpVF_2nM9}E@ zH(Tf^gGL}R#7-!216#~kib`Ih0n~4Rc4@5x!towrV|89rrOwVr=YOkDw2ucC}2BMEJ# zJS#Bl8qz#fPo?+J@b~VoQ)oHUeTo3LXv9~Vu7!^QxH~oeORfV+dAlVe>TPyK6q#SRdH1%r zotI9yOj>C)ZxylE@b_t}re<$woG2xfez4HKhf#7sOngxM^wU?aX!2A5qVBc#i-S4N zeA96VCL(%$DsT&^+DzkT^wU5FsEMhsr%TDzfZf=%% zbl`?Y1QN7jGB~_x@N%;jYer5>00Bu23us1|LD9)rA2*0-B27h_JH_|U#_FkYrI4({ z`EG&NZ1oPQ(yQ;Ed|6w7d!?Q=^b3lev@}}r6xL2LDO%j`xK)tShM*_q*zEJ zJrI8L9@j z2o({!HN*ju1+>za^DT5>v{$hXN*Mk^$`G1$K&wGBzu+V?W<^i>oz6`810hUguOshltN((G*+{Sdha#O0X{<4cR8tXTQB`JVABB!_Hrp*vVZoJd1&J{ zi2dv5c%`V1wmT|hie_&S=1*Bn(b|(@6d}e5GBty?b~jVGj;-Q!+y|qVehLn8PhJ$1 zS+Zxg2_&8(#mwlPmDPJ4bMGN}w?{qVHYrx_*52rFR_MSIIqwuHMGkodK%P?Xw0K<} zjFKq(6a>%P5H#HMm1!;GQ~b~SN2=&WXGOi>dsqyJy?$ExbOIsc$5cTLi7t5) zkkDas@)h^Z+nD);DN=)k>&iSETA~G3t1o~Ahcx4`phWX8eL?i2gS5zxMi>UF7Kuhf z+BT4NQI)v*$7VA^(|djtC(a%Y5fgoX^kh`4LXNuxc(Aq3#vRROeO|BFri+_q_Wq^M z>eu0>*DP@bgof&g+<|VMNECNsFHPDp2rqU<126Y#%o|TfSQq*u6wsRpfFg|Y?x55Y zJfkVi)y(*sxk~kY{Jt5*;?Ow%cMJx2;#*>kcrv>Vyf*#8ko{ou_-vRr5RDGJ*vP!s z6u!jq)z3IB^8#O6?JjsDnuc4%;11rd%pt)Tr=*ybrt@c*v*`sOcM!5^sBTjm91;s9 zCUQkJxhM-&SugJWvJF%K+HZ?U>^4K@R(qtce<)fi67Xzz0D_!4Y>&iIy^ArsW zUs*9-*!q#753^HOB|*5WhuI!(c4|gD9@!ofzHa&}wjS!@j^%}Uk8y7?6S&Twz51?a z0sx$5QJ!xq5($Z+g|(Iygff`P=Ifk4uQvuTHJbS z9OydcAbC)wmKbL@ai@9Tr>|u2ePiKs8y1jpOmKy0e}~)I z;e>`T2dT!xb2Z*h2Jb`>>S!VuGt@9MZxrP;iub>CIhkN~#@c*hXy6h}C)~uT#t|j2$%d=z^9$>G2aba%@8DA(H)nKJtm7ZY zZxFLihoJ_`PoDq4yiSY`uXqOdLyq<~9n0x$Qb03`r%yH*h^RkWvR@tr46)XWQO!7L ze?=N>4yuvUYzyzmPU+Y9hv7aKJ+OI3n_kkeb;+A-;UfCCvgvvf4S_?7oxmt21Lw|zu#ZMIwsJPMlU=8)r=q4!7)4`%RN8uXE1u;oky zBHJ<3EMv~8I^G0N?r)EqE!6ftQMq&6olsxrH`P3`p#y@m3a2~U^Xe+(Q%BxpPlv-&*{pSMHBM}C<#t!|Xmzu$Y4$5i;vp1?dP4zP&^N|SxI~B- zUqAnu3GwomNbE>DMMlEbYaa8(!blp%}%Mee79Tjm_igrd+CF$ z@O{zwdEEc`{NL$a;hpm+0Cq=N)FmRS&{=Cg-VuqfPR~dRW(u?A-YC$zrz5P%l1kWg z?ZV}2*tf&n?0Ie^UwF;;uK6vlbQji(f)Q-N739Bq~)jn#V#8W8e!y5gCDUV_gO zYv8IZwdoe|Esfjk*hdjXUDIo}6h^SVOAVm3ted`Wop%TGK##3~rtwtPdl}^!RcP=g zZoW_wOjNG&sLeY(PWE{QzPGn3$$>{bEoK~NM(OOd)>neoeoB8{DiW@^%kC*B@~2 zzb7u?;6#f{^XLiDp@}P@yE+C5>l;XjaGrVlxNr#<0?gMbf(@y+u2*9=II4%Avcef2 z;;`tKIzr7Xhl>E%N9j=Ok*IRfzYJ?q(`be0YIn4in^`$?zgN1I%*O_LeF6TZ{g{Rc z6&wGrKXS5+YYw+vQ5GAP)BJTv0-2h)d4Rdqdy5&-$B2S^$~v$nIF+R!!3EGs-;dNo zkdI=HnQNH|(V^wSJ_ab?~klDA3q%F++K!5E`rQop2a5IOY=!1Gy_uJv-=+tm;D z5c3o^5&H&;sVu9JSl9er<(j2q7+-(tUSvg{mGgBQ)i|%T#$4Vg$OqemawI+mLov)^ z_SDVty2t0U)2ipc&N}?GT#U@oNH*4W7TBeno8MxDlb}jE9-WW?r-W*dP?}#(dBC`X|7%xhMW}AkSF9Xpnq_(%q1bj zy`9Zl$<mwrn>ZXFeW2B^sc?^7^RZQ0Vl=yaR+J)AqAMFxAM#ft~fU`vs!W?u!w$qGi0 zB&xNrH9p5gBMF%oNwaI2g)!1g+)rfI9%s?kNT{n<_gbJo9apBZa9zYWX`>ul;Gai6 zAsja@J}&FjCE^#_9j9i!4q z=GuYX%88-aM0AfUjKz)yjWv9Wnbx>OkEBo7F}*1PIh*Ly(A4rgODJ~3$BBSj8fxVKzJ&s;pB+tgUhu62g7nsX$nJp-Dkgf z)>yBZ3>}9?yQvJI`KEeIQ|?qLDBScXnlP;r(!D`2HOu}t7(>KVyy#zxpGw^1fgD9$RI*snjB>mt)M*t__$ZLI}9HFu8{&)RO|0O zoajZsc|@5@-RRa*=XO_AJ~}EMAVNUBhudTdnbpGi*?i`rG&;@L)26x6Fc8Wofuka= z20!T)e~E5WMTv&2PfyvwdM~?$l{`kQ20LY(N|x~oqvZC zecElp>jVFMXH7q!F{&pJ2&KZMH;IJ|6PVEYx$Ap_qiS2;tmz+p5aY+`stiAP2t>&W zQhsaafw|S?<<82yr?8G5ow#I}YqZIo6>7i8df+fC6_9->T75b2W18U>|I42zQi+i2 zpuCG2w}J|xM^{Rb_&DzHl0^|0!bA}07LE=r4E1t9GYaEWj5fX5foq~RX^jugfvQTX ztQ`-BSv+OwmN^?WIS)l>=J{bjG^hH1aSaagM38>^lk*H|y#D&@#XlH&2wyJhb!+lh zrO_5;zgtIoUlI*c5f%7nKqyOtS*dEI@YMGq?JSg!{Y*sfRa?_v4Z*T`!Bz4 za*=c#$bA=19D6?Mil$u$agxGw zn-+d%D-;<@-W(O(Z}9M834e~eTs9%0!D48=W+@O$$-DClT}cuYh%wa5{Il*0imR5o zH6nBbVe#-FAn{?kg1~s+G$8y(yA}#Gn2a}xQPLyRbx%V5#3(nR0+m(5n9+$KlHjfA zt)U<~s2Sj|LAt3rOsBBm+oNs7nU^y?TUwSbQ1yAzs3Tz3ds3Cbb-Q|iGkqj#BA|*$ zqVxM!?H6-VG}_u=YwqPqtSxM4^QW)PSSMjyKjCO;=hyubg8NLTNaQTDKu7=!_h`F= z>8}76-AU@zSUrlp|rm>=eCY6WiG{UtHD9dz;BrTYDrZT4`M4?6@h+{-9t;SrJkH zC`M{J!@B_s@?<*fn%?rHy6I_XQXP^}mP0e6j9DHObj#BmptGA{o!@^?PXl(l=Lu|5 z{F52|Z+XLvJ!)igM)U(7_t)+fMQCo1cT(#IL)K$OU(({_!!pwb5vS>=^1!`v99}-l z5H=KubxmT%ztm5M>npFl>&-)-9c4f+q8#NEV*T~Y)>8{Q;$jtqK~B<%^;A4hoqG!+ z{xLOPEXSfAYr&KC0JYbM3Vw9g>$*L0NWs~(@8}ipzyHctvK<7fKt&-R3}GwWh8}buMlZ@(7iKl& zw6|Hr*xQk$G^xPeMDf#W@$*;#eZZM7kz+)_APNb_gvAcnvM6X`f3EL0Wb^EdtgAO4 ze1qeF2lbYZn_3k(G6sG!D)^;HXB1V(x}=lUF?WuKdmKQnOue47l-B(%5-L5(&Czbr zl9%A2E4P9^#@AUiBj@ag%c?xcd;-5?sALBZXv{#(nM<`9EBq&89L z?58Uq0qMw;W1i@gOA`!uL&K7X@O_YW7Ker|)tkt}i_k;=_}^SeR*va^@qcE>O*|tLAc>PR zw!LBDe4~+Hc!bmxSJO7`5cGqO1QIl|yMHVqf-)FwCgHGFJQQ(t+?Yi~#3jM0sCf7_ zvMcP0#Vs0C@sz{8dRj~cwvtYTE8zCxir?9Qk`ApkomC8keRrA)R`|rSDjn)M z;bp(JheYXbTeIAXI=YmkVDd+M>Gh1|ARe&K{C*PXaMk2dj2;wOt`H0S35J9=nJ*Si z0wsiEJ!K9@$8cvRw=^_+@+yp#r6mzGg~aGv1KJ2!z2p233;Zxl%pFYq<6ie+ia(k3ZXvR`xIj4#T1q&iQ%j^%D#@D)ZU^wWhJD_EKUW?TGot0kL4+=jh?C&tybtqQDPv zBJO@s2OYLm$+P^z(_)WcyWs{U!IRQp<3jrrW6-w(V7Cc%ljdAp{a#U&j>m{7G4eK% zNSGE65{Ey{p!fQmCe+&UJj(khU_%mQy_14LY;dNc$dolv*B~;yZInRUjH3<@JMt zUTf!V0kw34iq*T8XBag<8j| zs4gdNIe&Y%S6YAklf?VmfsL;aAF9crf@wWOH2={>r7(xH{MWb^t)9cuLVq=#uq`L9 z16&5WieA6`s=oE&8-V}5uFDM{W>)_v`s-icTJZgdiCjs8k*UD~Dt=h}&nVDo4p3pM z*km{rB2r;0V(V-@;qJ2=uY+z~qGOY+6LRy!}9=r=!}( z6NOW6(!EGuXb!6EDo!;9Q@lgP+0s5NN=I`rK;C|1)Y!MD?Jry8{tDfHAHhwEPNM_mT3u0Ikzz~spE8aOu_i(=(D;|{LD zII?iSKnvZ{oV)?BC%jPpO)G7~rfJIvL(FS0LJbFkg5XN`*qu@7)Wgo9uZeiCUe>}$ zP^^#9GZ%7-p%xE!Jboyb(a@ln%Xi>(Jv)8<4KD^OKa427ZmFbBMV=q)6uH2TM1r4L zEII^Kcyi!oBNwH5tXlSXnl&3?jWp!|JNAspp6LA)v3#2A z=XmvtkDh=3{KpnmE#eeqnr%VBEXP>^DUP+56Ksp0#9xo3Iv`rqcuc`Ssj7nhp&&=^ zk-fm|k9yjrRsVF)*3EH$Z=62WHp(xK1PG*&*BsvH;_qmvi{WD(z15PC#r@ykCG9PR z%x?HRn*5eX;cS*lEO?PpR&uC%xw2*aQS}0(d38BqPx@vo8pr3^X{iXj-izrSK8d)2 z7Zc9hB48Fb2jvq^^Rm}mn>7WdNoXwtSKqqjCRmh)&+~huvJJ5mKN6kAF!hT8JRfq& zU`|N4b?N~SI6sFeB`&6B(*Z!y*3XCivl7jsAPQN%Y-Du3dB zF^kKZGcInRFygF-kInqJ&{3Qyb9r*`fBvsyq47V4)$sq{Rr*GK3{2Z6GSYd9?=4)z z{bP%zXkT^MYJt`J3PPo{Ej8T7@tH92+~o^wIpx@kV>+eM*dlFf=nM(5MNI!(>GZd=u=LWXg@sYuGgi;-T_bodF5X27Mib9cr69Ma#J$Pfn1}$S_e%t1K z@xk+-pZ{EQdX$Txz=emz$gD$-g?u_?@C*Gyl*5bj;qzaZS(b_c1qe_ZfR$IPk}jU~ zyjE(LVx&dOx)j9sS2cdHYirrBph01e(ztn}fM4K+ zp5ZdE9iO-6LL{@YlmWsP4Ww)8y=;!C%nS#`-+=pw$`t0G1^bE|RiZ^rI0f!6RhC2^nnA=y0x9*9y* zdjCA0o>o>h-+BCHoxkB06&FgUp6fHFd|(E+iR^9rs{7p?li;5C82n0M18I36;*vt0 z!!)bNPvu4P(VbL}@K2W8wJ1Q%ZS#RU|jmlDmW`UT+qg$m+*&^r?eWP9d@*3ejZrn4&m| z$v+D}QYzi7syP`ZHW;$v6}|S~cTvD0h?n( z14VQm{Yuwhiwt*eE-o!@e7-yq=Syrn&hOBtsPbAJH(LkENEi+r%xXv+j_Tpq8x^Pj zCi#ongqf>1Ot{+%D+}wCTbkF&c7V`VQ+d_fm}w_i@! zJV_Uu;K%j)7vFyV3oR$|{Z7cjDM#PF6QtDSS#35uDJA+gDk!t6nHujfBQX}@kU*QB z4W%9loe+byfL&_O0#y~jrP`#PLYJB`@BR1YWip=+iV}9OvvhZ%Yj%tBn5g~E2AR2< zhFK&?xD*f_u{}GE6{vC3fzI1{{o5m}1?T{&wl}dP0Yibh3y z?B)Hi`&6tkb3G$Vk$rkxP?ikHoq`6#nC3G)cA@f!dgQo7G1*fm8iVDW@R(5*p-4fh zV71mf+NdZWf0&V^w}++i&N?s=a3`V!$*XgwG#g5Y0ZMyjz;Un`KJYJ4h1lD?EI1xG zFRaI)+ojJBjcZ|DVhq{cx+Wqb%XymbrFuc^2uxA_Ey`ZyB~Q9aHD^|88Yc~qFfN(2 z)`anwmlOR00qA)4V@Gom8sj5$PjoMR8IwD`e{r2^OTYMXQ8LEYhQ5Nl-4H_n>|5`8 z8^H>FJ^q=*Xg@ox`-cXenY~e-|I6p!qkG%8w2>rZqav86DZ8`3ofk9NcVuAX)Uk~+ z5Ex}HtUr)0z)`Jt!lvT}4REO!!j}_K&JGn+2U&{v{kIAw15!IWvY1th20Njn4ID1` zyso|#`m8gVnVm8rYv-Zpo`diqE5Pdqe2NIu=O((zI5GXG56D8R&{-R%*DNAowe#Ic z{|QbxKTK%JGL^CxH#M)RshKIggB|%t_K5##%R9S>OszOhE{3Nr#yeTbXy!`B1VQjo zI=XaZ-KE;EX@m_5Hjf+3w7#{tv9i$Z>WvgdT;I@y@L|Un04sN2$-i*_;%7AYv5`G@YXKQm`MyaP6uAR#RMj9upQ#$Zy zyblV$jx0S6MpBQX+nOnwL`PJol89h6dI2cMANZj|_F22!R244}l_^lN`o;d>!uhCy z<~G)s%(TD@`j0&EPP7(`gka)6g=py0WNVZ}n^3o~9BQ%XcR2njpl$*&=CdcvMyHuH zHG4&~i`-`tlzO%a|NQxf{^Ph%azW7Cii-QgQZ1Yy7QB~C6v^;xu-7+71gXH}$(VvS zJ+0edIs6IGbkYrt02eX_q_43k^D-{#FYYVDBL7M?UXZSE`1F0wXK65xl$;L|sE4l*{{4$8+O(@Hg8!mZ&Wfn)q|n>NEGxKHUM zj(PD^lf+HgeX)B0NJ>CqBa~0g;L6k$?IzbPg)PMN1hG@Uh|ck;w3BC}X8`~FL_bFX zd2c3F`|tpXEh+akKEpmX;n|3^&-QzhHX_R;ay-EWqg>bW{NXeH`ZP@ySm$PL^XgK}eHp#GOgfdQzi#YeJFv>zzg9`q8apa_UN#_vJy>Q^AVk zH0gV-$&OfHZmFZO@#=g>&qTZF^eJh!W z=8eQop}b^D5)Zdg@z<_C&du2iS53LBqn=sObV+i~5mC0& zG@ZUSZGEDZpy4h~Z$Ya~S?ps(gwT0B!V zOMr5Uk(npX4>`Y|1|vxZrv(v@AMH^fAT;pkpqB>!QZEnpXJ2|rn|x&|lOG7q<3%pg zVswN|$wL!sOZ@||Joly#P0-j{G$^J5KR9V|8%!}#h6ItH-t>2)e@XR}&BbBYa+~=3 zK%b&Kj%yZ%2ACpqW+pQ+sg1C>A$s1Y=@O3Vs*M0W0iQGpMU2W}wv}Ln;(2{z#+*PM z4C;9j0kGg*qb|ioQ3t)0d4*oAY17x3+EZro&2fQDk4k;~$nAGi#YhR(x3SE)O+`F1 zOSj9d+>Oi$y%w(*dSKo^3oB0R8OvEu;f#8kNMgMsu7Vm*BqS679cO^{B7jrH4?1t4 zds1U2NIV?Xjl(5={w=?d(jL(WRu|dboBHYP(xC#E?b!FkmE)-q70OSK`B z@1>pwo7dY->+{8*pMT^jaN?r0i#d-n+$d=}ELZ`b6D1>!6Mpxj^Mr7HJI>6M+Dh|K zBEY2HP6ln(L=pM;Ej2doB|s8oY;13C34a~c9#0)3Xl6ZPfg;!X=k(j(efM{>K_}tK z2^@JikgK-FWeBvq@X$&2pJ-R7M!%gty#WOQF>^x-;kzeXt`u!VRd?wyFDTuxE zzN51B!BP4bbQIyZ6*cZtpZ%-^;({AFb#D3s{Hya{N`3>v8(SphM0dx{gha;O3@1bk z=Safm9&o^>BsP?cuJIoHVWFv&Rk5OFjDC>g~7>Ew1QBl0y3h)VrWjTv5V=>6bJsZ7;(^szN(q^w~ z#o$DK=^apM7A_lcJs|_E&593e4x*$*xJCPgMIY02G^IMK>kl;pH0bIQLRE^@=&!^$ zY!z_juo0qOH(azS8ZmpKqDN|&feopUiv~;ytV)uCU|_4!VEO}bjCYG1K)bwJ6cMgw zk%vQFG(0dVv=ym4^@ze)ggt9E4U8SdN+6*Y5NNmUpEs(AqQ`I(rB|oX&=5Yw}Unsu# zYihMe{V%McR`kR1sz7;YY0-7r5aIgWX-d?z3Nz z_`orWc>hq05kO4;X`ZNV>OGe)UwYkpgt>=f$W_<&!=UhMOYNUl(?R3+Xy0SD0121& z%K5WD{$o7_o9Drxa6-GD%;wnS$lTN%!gHElm+3dAO%Iw#q4Sakm6G%X9o?L?49|pw zN{JO}c)@yiBLZAqoPo3m7pEx{p8g^wA_9{KJW$L>$qb#1l15Ah5?L@B<^qwU)kmKz z{Rz>J&RjfdD3Q@M%*?dNxenM9G}8c}1`0-o3vsItPvmttJ2h)!;w}kSzV-x=HNVh6 zwD?V39OfGCGm4%>_Ul*kXrlwbntxf)pUg5*-u`> z`H^ikTanSc?lDy+isX)y{xl811XNEccL$wA6A*dwRO><<{Zi|QkioCNX2uRX8t+3& z7u3Qr48SCB7>O}vRHv4dKF?vB5NoP1=g;2z%DXyux9jB2|Ni_})p1T7J5{Y#UcN-vA7BX{5(N;+ zDI2SW-5LQt14y^#!Hdmhy``Xfi$2b{f5GhKHRgX~1DmpI9#aaT9CFVqkgzu^N-??)Uu4(UV zj!Fmt!&bXAGke`U+h8ClLA4BTgg7dsgsw%C(2nUVZM3q}f)M|@yNAC8k9|1LF z57C~=TmU4TC&PXB-Es-79S^gU4xYt9IS>n|_7Rmc1t0Y@`||>3Jh?jbsG=(XA7*Cu zvvA>TW$Q!iilM*U3O~2pMVmVk?9r^%PX-YKlwj9${3gw}Py*mR@d`OP)Sol!N(|zjKit1^OnR8ROx;t7k6!$RFTK{s;Z8nUt7|^yf+?fI{h$ zdx(C)%XN?&NOqW)Ja!sI;LG~S#SF(?-k|_BYBQfG2Gim_CL)n6)@Hv1-q*Z+0(=pI zr2Jx=#778fo}9i~(mMkx0t@K)ON0F-lQu>McVX9TFd$437tp+F- z<|(5WPK4-#32Ytx{rOM)MG`cM(;E>@z8vPe$cd2^4uJ2$2 zM`;?dIQLnm3-TbQOYoom`r@w)V~vV`saIeF4YteCe$adD1ri49VcF$jN89=HU;pXA zXA`I5MPD}?MDQ(y$QS?FdG_uhkRTQ4qPy%t@>@&WtF@LF(LkcefoBtjWB3$Ikd`o& zuL3PpJb_!}Pp0T&;$M6I*B+{2qp6zXJtr;Ivlh!y$_Ue&nnNb~J_}JG^^ESWlD{K0VSvqyx!`GwX7T&?Rr9u2o6Q9*yin_qN=Zs6TWDfTu-zzc;iX^SoXFnckE zR4e^$P6kgVaKNn#tbGCCgHlUjSHju%ze`yEh6=WtA&7?$v;{aXz-tvUQ0vTdja2u5 zq>F?^rfP^7`)-t3DN$?Pvf~|GlX_p5;#dAl=WshO1H=KUnag@mjBhdxy5Z_QpJ?P0 zMzVhspv{oudKV++$sa}|K^sWi+BAGN+l1sk+44}Bv`zeLu^54!HD5R5eG8bshrl~H zaGfnwmvVTI$*lLPU;O2-|I{?!4K-U+j)Lqu#xqy0_$i8@4AbsC%9p~uXZaLsY_jdEp3|WchbN=l8uXy4&0i$bn?_c_oXGe(4OUJGDEzxZ* zLPdTK=yLywZrR1~aWXxm{2yF!>Wi=b`m6sf|GXQa$kvX50V}~&D;7EZg7ymYEbPPE z+@+d1LSR09{J2SqsC}olfjGht(|0J^>{70jcXKQqyJL8mhU*^2u)c#psdg7auIVu* zdjG6nx|*ZG2mZKGO@xdDP^z0?-{z>%z@_jXi%-$X)1}WD<^cCNTaXu7xJ<#Dn3RBH zIaHod_`SphTNMV>MJyJ*c&pse>~sP&@EVpYU}k#O$1dPy!M#3dND{8iIMtc}`SaZ9 zA|HarN>n&N)A`WZm`l7S((r@R5`_zGJ`&Gz9mH)d#M#Brr+fxF$+P&`zjyrLIIoG?qhjGv&Ior3DP2va)H3l{Azno1-oso!Ar5S0Pww8j zz7lT2rzOo0mnUQRMrIO8w&-Bx>Fy66_NmHOR5PHUC#eg3=C|be`26R!gctwU`A6vQ zKhZ?6uTp)3#SXVy+heUibU!XXy&ku?i;R+7_wn;@dJ%A2x~HPO=vHA(E~tLFSr$QG zh(wpablC?2;zf4`ck@1d>ebFs1}REHTdBMdl?{K_z(=l08~NX)5IqUhLkw0 zUC(vRqA=p;HRHdtPmZuR2S@P%sOy4i#BpEhmyWxsII0%)PW@_B##UJ~b>X$8Ic-#f zErcWy$1JVh){j%}en-OIFV54ic#E3-HN9p%vcL;LqwbVl#QOaCA(3s!bQq|5~n}7o~)z-p-X9K3ca>c!$MA!$`yj>k{zX zlxtlrICO#YztAV#!gR6Fq`8tSGzgD|?+Em0u(A5yI?jD#W=QozRNGQTEFlY6b*=Y( zcihE!Ldv(o6C6uT0Lb>uWqpWzijnxouad>Z=>~5Jly-qc}KJ-+Gafqs3LRBv&18CI^%>) zy$99&xR@hYQ-=@rLcEAuh0mz&e_SdG;%KLfX76-I_Q-!_iKqW}B~T{m6(IF=im=~< z;;fV{pPTkZ;>9o~jb&SgasfcobNE&)x8e^cpvUybnVD;%4vexgr|k%Uk9#Xm&0g~m z5rqmK(D_AO4M;|gI9+&YZcrM`Z&P$4J%^mwl1^Q10nlXwb`qSgWeBw;n3~j-;{G<2IDOt2cCmqKsEG(X z?1U1gxzEmA@l@6c;_DMB5hT`VVw5M#-9<;wfCV-=0AIZ?+6#w6Si2cX3YhfO6?1=B zd=Plg@SxcF5@JyAUuj>}NMn~U-bVido`Wk>nH(S3R0j5k1?aF%I|pq-8;ST^#A~&s z)Jw!(3|&Io{kiPnicjy@pmjW@h`);88LRFu@Tx$F69MH{;l8D zZ&A)OzK?^FWV-VoYd6L*6H7qylzsjm;Ra|uS3KR;nG3Vn4~3f9;0WG`HsR3COUXVG z-TV8+@*>tv>kNZWFCq%L?UX1$11TSGa*mWP<1Nrf>BAy)w7ly7P(cn)dZA{`)XeNXk9EAY96U_^H?rJhD3aJyE-8FH`f=;LB`!!$k2_IHEB z>i>=QSE4B!($#2a`U@NWz^$MUNBBibW&XKVI44}Sq5pbITE^23@#1Qw(#AHmSzHvg zdQkY>9;N||4)J%-Kh!HZ0$(oQ<2-n6AyG>(Btp1`y>heuya4bcQc_@<$oA&)vBX;+ z0u+fFwdQM@#DSYKOcq`lZRSyBW)xuYJl98Q^APzUQPcde(F^|Yqa14SAEzQpH2i{y zYHBh8?9-WkSgdmt=TKzQU(L}>_bvhb9lQd*`w-V--(o1WVpJ}Q|8V}1R~T1gIC(s^ zB^Qvpm|EU@*fQZ3^D9Wv#M3!=+VTB8@TXn;ZXFy(Aanbt^+NFqW@y{SgqM!4#|$a&HNq7b3c zm(4My`Kd^ThT}XPpH-QLC#I$9Ri-0z(9fUjQ?^md1E!)M?+itG_J0SZVrXj0(L4S!> z^u4^b<08RDlJ-H^VkN^Y>Rxf}`@4I++Q9tnkt1ao1M`}9=k`L%M!fSC%0l@GQI0Oe zrqni-GiwolBqi#kUOetMsiz}25;?b8d2~2Wj2Ojev}xlO-vz45g^sZo?YcEnFpRlf zoZd+J`?`h=f|jJ@ac`xxA115vhYPu1)Z~z?(SV2mi1f%{knsQ{imxiQT;`F$MAEpH6h2`mr;sUJ zBh4^SjSWm+DU}8T1dvwN`d!Ob7%$4QIK>SII2@4dF*|kL%ZSd*wK3t`Q?7r_EePx; z9H?Hz&djrq4Sfh9SNeEy?zslUNEYnmLj^dSgo`|e9-fD@utO_$q0b5U+e zP@=wug&CKpW^=NKBBp6~h?Lo_8Ih?9j$h{PMO5nHC|Kh3r6rf~3g`VOATb}yBhVas}UZJZZ>b^bjazi>cvJmC)CXeXG=r`X!=OA=v>xW)?ulZ#kYwoos$ zoEINH|HvFgRFW8>$Q!^;2NqU3OZw6!(4YQN%Z$w3Y&V7fxQSuAo25?X(MbmT#Y~H; z3KJ?^K_`dyQNm((`uoLV>z;xhWdAg!-u_W>2eY#j8#pRE8rhr28OH@6@)vyIZTc{9vqeSYX^Z>i#2$7ZjLxfUM9u4Ck%*N zQSYa$*`0jx`fa6tgQd_azUj$G_D;5nGN8d`er*S17nP!13u73sE97!2Cg!mkm5sIe z=g(frh!AhaoHvWo|ABc$+PYZ(t(Qz58a^^l%@T+8Ul*4uU3L%-gA!qlIBCc>55V0? zU_y+ww3?L=&T0Y3AcjMOTzS2Jb8!w@5z+s`e>CY^E0U+Up;0O|I*5eZDshbNqjk^5f? zY0w^0%lebgXd$5i&zK{KL*Ys*F6uj2JJeV8<2QT7O+>gRax4qR2tF8=1ZO;^QTyA7 zkF~|5CJAhTNKoJ3N2(g;zX*1AxI{ zKKgoJiJ*=jXm0iioC1Civ&UFYkfd8Ko}J(Z=c;q>#}a*}m;3vhlq9T~j^hM-M)83n z6hY>S=5+xzPu_?9_9ws}FduTE^a7#=-*-J)KpSfg`_kgv!b*2R!*eNf!1KEO;>)jU zJ43dM8#%Ac8>LBq39_$q&arqyg~<`Vo%*)))(2o z0NzxmZ)nfNpH0H$J1)UFeZ{{goazts1Ms`oa8>9HBwn|RCX*CNir72LazQ}C!Xy+? z<}aES*{3t>g3A~|wJ2Joc;^vhb=-rH*IMZS8TWBoLU_i2dx)=&hIS{B`2NHT>AGLx zxQ)2;R>EKeFk2ktfj1-7-`}3TGA*JvH%RF_A1Hjs-LGA};cu~OiLe_YZeKsR29rIEX1Xn?k+Vf}jkc=y3G+m_HvBcJh zbD+G5^A{?z?KaV;a=8LTy1?LRPjRte(?+>LuXv%mrVn)5uGtcuu!)Nzj#Hkd599xO zu{1gfhbB1IHB6{@h%KERZY+@Wuk{C@N-eB@lU}M z)t@4fdlixYtS(ZSkG zcG4d_MJoCikwA;I(Mt$gV}+U-zQA5agE{>j;4x|5TF2B(9w1mXiZYe!C%h@mIijhx z@Ra6Er2GR-a|j70!wYjayWU;=&XGt*dv1LJm5MsYu9S4t&!|c4Uopu0Pd)*O!oT{k z_!5Y0xB(#Nbu*=8i1kGI%_G5^Xe@N-8!H|aAN3b%388I>X`(3C=r{whp$qmB41(jv zIdo8V2!bKECNL}UFJ&SZA97I2;ib3y3ZQcp7935S3JId9v#+V6~e zBN5>F=K1$D2WmJb)p&(5%6L?VS^ACb>?OkK&Mi$7?@gF!8jH4sKeVS0%`9D+I)k%Q ziS>HT+S+f$|GWT%VVJ2AUKF36KYRD`yO;GI+-^lro5SCRnNXDSlgB76mKuw&3hbac z0Ke6+Aaj3u{w@7scBWjM^POd_GCVT$@ACwAqEYPFJFh9BKY!bT2uCT2L$0aCp(@e+ z-YJ!gqJpA3BcWq(nlvo{ceu>7*-;HGbvCrBVH5|ObllCYmntGuI(Qhlz7RLM#GgHOkyV3CsNan^PFES$>FC6rqQ-^)gq9a29GC4&H0LwR@e@D9- zwiw=8NpA9-_JWN9f*`TAw5)fvkQ_I}PBBm7vN``@nm)JISC_R=j!TxDThIPt00;BB zWXVx$Kwn`0C)x;6mU6quC|G=Kzlf-p-hb80ME30uU8%5H+&M6ct$wDsR|%GdP6KF4 zdq4}{st_$6i0Ko(vV@A8Yo%YNa)Pp;opyZicv|MPiVrW7EP_; z01vS@c;F9ASG@7Kcn|ON=sjGMxyKy@Cv->*%$9$jGw-;NT@<%2q4kzm-sHFF8kZt) z!30*n`+inOL4_!u&(q$MqiA++zNMO2tE?f;xiw0F?#3`3N(*_-Z{WX53Zll;jNy~V zy`*-y4?cl534^(5&e~vhYJvv%iFa|e9Tk;r{lZD$Yon>coNZLW^lxCew9IGG13k3$ zSI}90y4Q~=rEhRfepJY=N?2q@8wtHeL!Ajv_aA+twk3JArH&`++mK$QhiJ(e?q8+; zAYNt70c{Djp={$Cprw;d3O(%U5p)XHQJirzZ%m0+jLv216@dQYcI8^r1I!PfrFbzc z(C_={dr0k85mOuAa6^FxEV>n}a0YMd5o+tkbTB-`8?!SBN{0Sq4V5n+^-+9%vbl#K zpZ9}mIJ6=0gu->(tUW*Sfb?_cpB=gS1^U@Rk*l##-Xy6tH8p)zhiYOaPdabCJ~U9>DRL09VOLH@T%nHHi*hgDaAtBbuW?^AO8BQ8iaW0 zcDc3ha3C~oSU(3K8n$;%q8{*=4+D3nG479Bu!}9ZoKxK={&VSd4Gvdnhz1`8Cadqf&B66n-A3#3lWZJJ zZ;=v8+4f+!IN=)VT5lP-*8qiWKtLt31fH%s{Y94=zi1OUWk0nw<%$A`{Ha#Tr4Y8H zW<9_Gs-J;*Eo%xpRfnCU6uuA7LLhu=H3hVCcF%4uw6u^RnpL;qEQUqn+~#^CKA`tf zWGo-X?-6daG(%o_>9@r_;fu*znR1i*Js|l}VTL1EBc%<}?!)uH(TnaZbmk=_-@2rw zZgesokrWgC{oPkIem_(lA2Z-Q<8Dd%{>+sb|CKVqsPBash;0gyWq49mAVr+GAqb)t z|L6X&;CxV-9_C_r7H_q=h-)wU*cML3{?(rsFn<6BG83(XxKXwDz}|#;q~}Ai#2*6j zHMKeP$mrOnCwNnM`X>S^_nPNHY~PI}rxLWj-|1>FZif*XSMnXINZup3-AXX%{r4%% zt`WzTy|pK={DFSIH4F=+4BqOzi^m@gz!voOt;C{MUZ4+(pQuN^Yl>%V2dJUpTgmFA zJ3bstS-)%y0Q!w6ni2y%^Yz2QT3jk|j6e{S0=qLevYK#n)lSrqyF=+sv)AVEjOGL- zQM=oGs$y`~S(2JaP9f!2+kXHj<*}lWq=-duFsgrgc}oi$8w(}q(~d6=sLYSUuZu=k z)YE%dNzn#CC!$lgW}CRFgV^t1dQ}sO_+oYsVg}T$g$*>ugW7pJl1r%DYdWBg0iuHi zwDJH6ictv@MBPeKK(t1hv$cK)G^+HgH0(t=pYlXzKi1o->hwSywT$gp^f zp6NF(U1ai`j0>UYrqd_-b9&(`H|I3*SciVbGxR+VaFa`7>!EmmeNB{^F%;zSUJ3eO zxBayk7`gY8^S{vx!%LfvxtVL%H0GPH>1`phm~HwXs2Y&s8MomBFEr3!2Ut8Bn`>~8 zyc9|fwTyDqzS|mJse53Y-p?PhFYXiqT5B_HTH`g z5LrMGw@4@L9``MUA2cRTjlVnpwkUZ0>Xdf2-!XJ2yxI^G}jR9c%FfY?^v(Z`n zi3eKv+}c7-Oh2JuWNoY3B3lf2oNyZs^>@Wb_^tsXn>yGMCwso|=;DKYK8Pa)(r7EQg{W#%9Q~1z9>mE*(K+5F-f);y!&0PejY>aV zvD!}JHQ8~Bt$0I_A%y--(zWQF?T^cx{Jc;|`XuO>G4{5*@K!n=0^6DWW1>)b$>d#L zU-U~O#@0`)**0ZGE#@c&iI?)UTzj|==t*Ccr9&JXAB!H5A9IZGNTr6b%fCDS3$6V3 zKc#yHp^zndxP@5|SX}N}0((CsZs;`9=m;9&P7St8v9ENzFO5ni2qThwvp2K?~(&rGD&JHQi)wEduh9*-FRS%$&lsMF-Ws531_932xpBE{L43N&ds z`t6tv*N3%ebF79ihMQ2)X><6?Djm!s9hQ3!A99mYZXL!*@e~3)@8}qMB;Y^s)$M z(9F)zCG<1RSMxQEKh-$0m1Y1~7xE_}bUwWt{f4H}>{`HGw1Wny4kaLPgnyB126ai| z=yzT-!H5Lr^?&jx6@T>4^uKntqKrCq4t#RZIKWF(pDVr{;QkEec`UAA@MxLfHAb2* zY}@~`PDyL|jac-Q?w~<=l(<04piBMD-l2>{bn|hpXQXEFvt-4?lav*dOpPX1dfU#$ z|McYSY-zsy;%n#M*B>rle$R8D%cxz1SUo7F!@Lg0w9wiB7CI(QXXsYjXdF2aNO*%u(a6i=wrk0*Q@L# zlpdWEWI4L7;|p{V-niSfI0rC0?)aTV!lyv?xOgrhDAXw7Z4X&$;6|CQYyyG6esFi3LP|z;(u+JK*Yf(b5L!H z9j=LX%j$t{i=ODy97Nf!M3L3e(z}-u4w5oLC=2y?@2Az0SdD$d>IAT&)(@BW3PH( z?)wDXV|S9U!eZ@2B&L>W4&#bbfEE1hb;OtV6LH1kW?qb%VqVW-jp2Ix+&_y<`p?TU zJ4JZ?ZfN1~rpUZO%pzUDSz)8l9c>U&id^*f`EmTyXHp>|0dmV3QR_7K?;P1@21kDbI3&*+f)WB&?WHb^>`yT(kBbwZy zHh8)GJ78#tY^L-KXWB{X#)Mu^(k8&x8G@}T35dA;C|ncyKbjkZAOFkTZ+cCB@_(8Z z{753P0ilt;fN=2k;1`;%pRQgrr*^{bjoe3{7|@d!j;@rcmtK0+d%6yH=8em=NsHKIOuYw09L8T1NPaE_Qav(?GK(1NbRoeYYs ztlrlf4O!2J%S>km^MMlWJQ-k|Q78@z7FUTP$9H*ukalM@%Y2o?EE`uT*8mF2xxZ+Hul z9Y_j2$nGTAE&Svp1N+hbVIcSYd7O)(aY1%esMTV(6nMa`NFAAS9G6$u(+N)ZVe1Xd zh5J61XU#DR{-c#P-AZp?41Xv*Y8uV=4%nuV%9nX+?e z8#;BLeJNx2hrLIey@Ig>&fR*Wo4u*Lu(elbFhB!}>_(4`M?8at5g6c(Z4d;hNQ*@s z*#a!>sUpwNWhPYdXz*tCVcPrYT7+K@i?Y&e4G1!W44 z!3{+TqsQzI{AmA(V`&;iNmJl;!L$11VgwQJ!7)cW3}&uh6T1`Qy{UC$mqn;{yXeO6 zR9|JQJLr~koy8^7Z~Dg78=luq+|9W_q$?sawPe)q!bQ06nzg0*P~|SgI*J3)4-|4A z4gyV)yUo_=Wj9++c`L0%`4YpE6^0wdES!7y`uxavVOcoE!aMdlHXa6zi!Xp>#2 z{&1=rltRyV)i{emW>K?sG!E9#OmUD#<5hob72M4hH;?;I28B&_CVHpi)J`Py=MG97 z?CsXBx(tc0L{COs}Tw5#bswDeV(AOAW)yX@#@v{23X&FuA3r|nkF&cX^zfAa6XucZ+B;r`YP z!{G>vq)d+hsmV^Ej0)bMHl{`aN;lX#le+auO{;x2Md((b4*_XRH{fxv0DPiZQW$83 z325?CTNy|2g|68gS&RD%px>|fzhBpyT4@{#Lujsq895yOD=4OkGQIqrlTyU#9WF?5 z1reGOFq7 zvMt;3yB#^Q>?#uazxH=MgYK(Gwo_Xko81D|9%&5aByX{&MZLH>G2zWrqDp5Zfs5u(**>l$ zFL8%XCV02%60yErD`#@%O58dMcX92A!$a2ar&0e@ZzBTfeg<+EX8k7Bt&#t&IG%`I zE_x{Sut)TmxC{GvHIgaP*;lcu*hRc(sddu}rbz0>QzBQFc@!f9`QS%iekEc~zkSxc zl3Ca+mGA3Sx1=(#E3}^i>I=5*_v;n0NeU_PP`~#tYS}PZoSo*Zc+YBrqbD*?A-ke* zbU5^R>d90Jf+6&b@I}eVNK8{fB3TTF7`H9gZA@Cq(r!5tf;bY@N%HoMMIprEsLkB< zg}FDZ%pY{<4>^YD0*vgBQ74)#;Go8M!yOa~vhb=`Lk2k7OH2}v9~Z7E)d!VwUT!C4 z9Op*HlzBl3?<2@bycNx!jox!*qpGm)p=e)|}jgsTVf zGuLfi_Kg~n`}O+Wcg&X-u8+RXI}eI-!N}p88T&aRv2{X#`R{LA+txD_4N5@oxFXhb>XUW4Vx&3CAl? zV%5gclmwK7nD#4+i_Twb@d$4J5(i1Vb~~npE1;N4%EuaMyZgx`a8nbG@Fo43ckcQ~ z$l{X6-*e%_U>~5n?cvS$l#%=RmBt>F^eAUm0S2l!mirSsZnJPNL7OonamPJIk-~fP zSL9 zZ2F($H$JC|ItBYMkCY0ykYaUBO3B~39d`r4 zEpEa{r1Wf*2nFUjD@7pA4hpt(dxQv3P%-wPqrxyjTu>tb8RGTq7P(P7J3)rSetr>d z=`TN`aIWQM#~w%cHxNhVko+be=37= zz@PHHY}C8YzT>YpcpxWGOVlAC5J&zBr)zLjutxv++4qC)!~KMNtquB*PCYLf3tB(> zX&%5?EHGCZW55`wH5%(Y#XmyWBX(TBxX#0JCs|TPyl>COrk6-di>uu&o(nAROkgC^ zOtxk@@({@&A?Y8xc2>@+m}JbQsfLn~$O{)PeBO&9rGRk1yT-1e&jyDOewQOSO$2!~ zCxdt!>u<)7od<~yUY(6Uww6|d>vWIDpQmP~JJ4jAm zh4`J9!K*jI4a0)SR875PYO1)47K^2TrU-1gIJ^}r=^p=&=Ze&CEA{$IY~(G9g!xKa zi_;Xc5tqY+{R)C2$Cq1MTx7C-EJzzfK|G0uR;yY*T`p1gcDHU>c8N%3-!9t@)+EQ|M(nvtro z6%Q{j%zJ{MH>k?yn(#vYd?n7oCJt8|VX+xxPDx_9>{)jw2{Ez;J?;jT=8IiZoC;sVzW0$ zoCP;WKNSW1HKirm)l!jL2mLTlD*WUN>QEtgqyuy?a~uo@2cEz#?on{z<#QCgf|=)5 zJ}kB)9hhiHRat7TU-c~F3kDgV;`GjYkc6F&o_*vOOMBJL>51K5xV9MEzT4X|M**8h zEhK;(Tg{m~p-pNK4Tx;Xp(Qf=Geu{;FFRl%**5P&lYv~8j7$BSp4pl_cEENlQH!Bt z9br8AO^`gCManVDt%LKAq3TcgpQyNL6lUi zaX=&%n(Z;Di;4+u;ygKBkOAC>;OD8-8Pi>_g(*q9EEmr_qQN_bJ+GWk91OKvH z_G8c$z$MQj$YiD!WKuo3ei}eDO*dCiP#0E@19oTGZ`}cc)<@;1HK)%sm^ERxQnupQ z=ocv{KQE%Y2z%-_Bl(bpJ{rU>^rc#qibN_^6XMSF1}-(mMUnAMVKOEgTZ$-y1j)Rl zITWAB$bH~-B8z};$9aB>;Q4pYe)ieX|H+fbCcGb`qYj?pRy2o>dX3Z*e*`W%Hj@fg z@#EyoRa0>&kkY80yD8AxKd(3wFT3A8RsE0{VQDTKy({%Mwoy>AijLjy4)3ElWp57=xj(DhxJLKZb zl+VoaO;5i=dR!2j6S!8w4)-kMn#wUT05uY_IV2uaMW)j`idiCWqv;w=2g_VqD{Jw_ zko05!OzgLHeN(RM*(;f15$qwbdJ~}d}DgOPFo769{p9l+l4@WLSLp zb8mY$RC~{~qrWa%4z8tOLf#Prcla-k5~xU`Qmz88a$1%TXf2Je(tkmPP@B77n_rLz z#O0IlaA%1{O-)YZev!$XcpxD{3pNb(k=w~Z9Z~>df>@E@q{z_M-Fu`(Rr%f~zJ-(sD3RZO_MRz* z(}>l@@2fD3EaTYzv!^e-9G`(XM07s;m4yVZTvZDN;~L*N-|`QlulsLVukpnu;{b{7 zHQXPj;lT;T4jD^#p&CI`t-^U_h(t?$f^l2-amIW2Wet$)=xlcK;-VpZ1(I+faVz;j z3bFzA_Spw*_%PW;Wp!(i5;v8fM28})%1>^|S?FI^5h7Xi{?44AQ(|nb_OeGY@j>={ zh3MZndvEl=aX=@p7VQGfULZvS?V#;mTD+hB@&(hF>g+KDB`Ey&xutba=XM9oP*I*~ zcp-}UM5rYNo9@R+P>(Z#?t%X6k1k!Dw4fQ4a)RCB+vHGokH(6Rqqq|bF#8>rs^IIG zn7GP45end|tc<^XFP|h?X2VOud;yeD-U{H*)0&g=%qb`swWjz*X6~RII++Ij4?jwf zk>1xH7If&8_q&OI#AN%UiXrY6`vqt$L1fW0>b>O30@us+3B`*SnBTTAuIr00m2xR6 z8eB_3aXBot{pESm(ml)TOf1=3O7(HxHmXlk+DoGr4YT6{n1;6L+u;Iq>=x(wX&I!Y z9OT%eb5Dw_!B{iWhP0^t zJ{YQ?3qn+hksopo{UFvXw=-ongPqRP7;C9<(mc2BD@)niGDjE3dT`Hm%@|}bS0L{iQs{N-hC6Yn!Q(vGkSK~pfbtWl_d$@~tE4B{D z{Z>(X&U;rDvmM(B#IgyuAmj~s=QZJ$1S5K|Va9lWTY|RwSHTd|UToDg^#K(tOUC@g znX4g;Uc39H*jtv-0gqh^vGOZsa4bW$^YaLGSYuFFRqSI=@In6AJ!A<>T)7lqQ=)>` zDEt{ZC0cWizh4;Z$!i(S=Q5$-O}sz=?dgJ2TkfYuK-{jI2Dl*LF)|8GjTSZbE_Y5$ zjANg4o1C69LsM%I?g$E>+nD}%zH=Y9Y&qsSzexIctuUntGr@#eSYPnNEFN!FOo6G%NMz)Ky}eqJ_!|YTLhAbg zngAbXTmv=W1P#5o&ZA0ruX(D}&+SF;(sfBb3c=;k6;^PR=z`1N29tWic zmxGQ&?b@Oxg=$R1;%Y)!3~6x94zqUt9R|H87XJNoD@0! zKrp@BU~u}uGBQs9BW7|F!eF?KXC!E-=ut*rUVkBXq`x^#9-w^!lCX6AZThQ?RejNx z?Vl7>4th{Rf6YrY+;EjSa2PQQxi}NbFXwn+WQAbjynpsHK~!NU?bRJ^5b^^#UaSF( zlm4S#0>VaEa?eKFpBFbt)9ICxni*e{Rz{dC-YXYUY|GT-q{%$nrC?y%ZMrM6@Yh=} zzvS=HeTfB`Q=b37XW1Qd0S61C6j0?Z$X4V!C3<{fRm=hIE5;j1xc7^A+5>fAuPV|H zuS8IKZFy;7J{E9mW5L66t-=_FC+xZn1u1RUW;fy~GIfF)R4u+3f1c|VC4Uj1Pkc$` zMf@7X2hKtV#ef@f*VmS-a$(i1RT9$%y%NW~ueJll!T{;Th~*MfRd*GI4zAfv8xL%J zuh`uvXcS0@okk@)qWrag1*_D}+TpLf7VFvV4T`^>yjEB6rd|qnOQ)1u7#?EsMq;gp zW=pIEJpYf+vb5c--#B<+sX@;aAzH&RjPppICP1v1d#P-|beFR>;N4;c;4*8DMlb;s zK1}rhs}%2rUxh`asH?ww-5N1EdQnd??3(oG^w%gRYVf$ywaeUE>!}z&K0U|)Q15W8 zcsQ+^h)QR$ne6O}HhhS-8b{~W+I(vx9f?y@SAD@Veeyhzszqg0PIbwQqF_r(zt6N%c~bVbJB^JFIp|Q?G5l+uXE8`(0jW z$MngYfsh>4)O6HsEe_RT9u&&bD4-H+eC>q`pO2Z~oUZNrI>)Cpx{2BB;JO)e&9shg zSdXFv7J2+*SrQy+Dw@bIJ51FW>9$|YFOc7*2M$@B&P2s2S1ld2ogAGC%VuSiR&~8K zst#4Sh+cmt2Y!WCkmFuyx%&W!Nh}h*KnO*@gQ|ruzx+zD(&Xi7@5b*Eg@a+Wy}N>- zWW5stZle-`9v$-#r6?x#y&s=te1CdCpwx)r-^#hY&5;Y@k!AN~6K`E5X+0`Fyndj-&Yu33 zC!If-W-L#4S?-@)3&Nzti92OHB-L{+py2li=tic1FZKdf| z1ppF*pG%=9cS)(!MezBTp1q>-V5!EP!w;!DzTdNFMu-oa)`T~5I{<$bfGr^HCWolr zIB0UFGW@Mx(dj-R+al2j2nD?HvA2Bkk3klSt$g#@Kg4=fmIl+VYz-|o9HE<rT<8p>qZRmeV7K| z-13}RX`SL4NkNR(JV^4plHi%9H|4L;Mq|1P%LBD9+(P-~=lH(BbVbo;ZB!BT*8#CNP|RQ#&T^(oay*hbl*a^DnIl4zPGLn& znc{aQ9*-as)-Cq%^-r1pdnAx`QnWRf$A>`*6|AdOozyub?_Z9fonr0NycFfa3hJHo z%RHDjIB?S3_KU@UA$fe>^wZ45(XVX+sSK2Ge@1fUw*>@>O(WS8hx%OFQ#9GJJnBh5AT&S_H$f~#Sqv4hr1Ui zWoqh*r7mWo8ge7}f+k2&e5@J#zLzI#YUYYBzR|VWoPrp@(}ll1SkxWQso)PztJJw~DL zZK?7FJmX}TCwsW*mZ(c}*L?+X)#BMss_CvRX+iSg!&m9&(_YsILPPz6vKQo7g~W?$ zuDi$S{CEaI=y+>K9?!?+__O)nGG9xrY)pZS)G8|4ILh$h6cUkC<-=VmvSR=W$@WR4V%SYgh_A+kRCzPjQD(hydTYHx~1RYb2$`#;_DGA==+m z+X#w{T-+D!;pRSC@9(iHAf!_Ml{bA`fbDbiRUdO5I(;{Rx`rE}rDC9RD*dRi~SlmQ0 zXKYH}_f5(hWJo4k5~mY8@x+jMjF|Txr|~Ce-|=n&A1#h$s(X!7OYN-vW!z+2emU?k z`O|w_!8zCOc#@CJMCaNp=he@4hK~?XV>NQ=_@%H^>;jb#(6KTvlf2m<`|**sk2La; zJrDk-6q28tW9n%r2b=UZpcnB90oFLK@hd_Gb+g#NoVdAq&v0wTc5%N-ntihqB!O^BaYko)$ev>>Ma|Uf!yxaG> z38h~EkpcZ(S6@J2UGe7h);?9R5>WN0uirHrLps3&E45f4@w0N4gF<&-_?-EteN^hhDXeFXJCtV?^=+W~ zQSrnLe&?3=5{Q0NB0k0j1>lbqsXzypOFX zOge@az{!Eb%2NIO=Xt%M zC{w(EW0XutQgEh0a-({+zW~hJI4mdS@GrXF$*2a9|) z^A`|PKRNquPK9zUXTPvYaL{YYO&7hMqKm51mXik~Ip#l+d!}hF5DQiwfsn;K6T}+g zuLTGmYzgi=?GT(TA9y;=iJgu`P)lH+!kC3EKn0%^TMu(*)Cq~RoZuWVvSmHgOOj3; zGr=#?AMB)NI5RWjBS#8`f1y@J(1@+(>bGcqFx*!q%u%_x*d#Eeq@R%C@5yu0iDP>% zDtCi!QH-qf0<--RjRVNpy`NG?Iq4NIDdU3r8byq+t4)^SeP>T^*U$|$k2nfe->}tb z5}P+75EG!lUgyTm`I|9cY=efSmNL7F^3^6IXRP)ZZJS;3(<*b>Vd2RuS7T7Tpy+Ub zT@zg<&}pOM#4MJZ43%J&`$XU46V$`fC64==+Rs645Wi}J@+bf z|DGq|%kSda_N#>E0Kbtw1kN>~5KsQ%<;#|9#8>1UmTZW3FLu%@o1B>n@{rO?FT)`6 zYhQlniD#v-| zBMnXWecp)cd5B28b_o*9N&De?OwJRd`9tFY^kG2jk>i6M@ivXX%#A_fdA;~*Zu-*v z#&z?Fs`M6XcaN;!JeDtf?w$0DgAZ0q8?0^1dkzz3y^aY@iCU~ zteDToUynUHs-^WYfD2%moZT|ddz}ZJM-~}#eIWu$nPuWi&@|@o{Qa}{VmHC=c8v=q zify|>oFHRdKDe0CD|ZtWq6CE_tos;sqa2Ya;;0;onw-1&)b?5RC0JC%xScNf9a)IXTM)c z-xY5|%|vPs_nM+f#b)iPeP(Hn~1bp*mO zmU=*efpRMDjcn2)9JjGNg|?5WU2+v2V%@e*sIyQY?P?5Cdty@rw$vwjqZOkK@0WeX zSxW9td^9nWJ-XC!2puh?<9-1{kuy;Yp)Y;;l~|ZalK^0OOR|aOY(^h%^bULm7f7-o z-_Bb?@(T5};&hXZzOm%Cva0cr>cmQQA<6@?{|6A*g#$IPrD5Cl|dJm4#5br6gW5C8Q!HEiulE0vzu8_)|wh0slsF`eCdtIUB}6iES&) z=X&4ILc9>R0T36zaqxfh>^(1J0Joz+=Q07`d@pv3Cz)l!&~cd9q`19QE-X|(h?T|{ zs2cJsnu1xuhr?t0V88uw)>S!s`d37wb8dvj&i*NQk3p;HpmODg^^w^88W5VggE**m4w?{@ETLu>Ohh4XRG=Eb`%4E%wqr*4X}DPzt} zRb201k)8!PKSF5@Z(ibuoTPFwKq)L9FNh>FmRvmw~&cF{9#R za##9A8UE7U_+W(%EME2_?$=RwKr{hw4mGx#bYJHx&$X79W9FmyYs`GT^DqX-Z0}~( z455%xv7(GhGI;}Wei)B0`*C%H%|erM0i35H##d)MCGJAkm1A!(l?cv0i0@5LPY3Pa zdgrCgo~y%(Lm^qFDF=Y2nQ>1t2_ldJQ%_+A(B`;$2lZ#{eZ z`3v#Y_Qv|sYAI#B!-WMV$M0V6AJCLKNJtLPNutZJ^8|8AI~M=}02|e2i?2Vdt_zSQ z*zO*g`<=5AUqwjtigwgH+@t({bsDU+Zw7HlKB~-3GmEX9p?Vt)%_`{l4;qkmdV{X_ zlYH~|4?K|iD5E081S!GwZT^)ucDnVFh&Y`SZ;ZdfI1egt;{FQy;;~wqPo;3EV;?UN z%S5FsYp2= zy>c`w<#GmjXB`BP*97~gh_Yoid8|@Q(-CP*8tdm6c30Wg2?gcpu&Yr>KybdU3`q z)1WpA3(X5{n)17M|9))Za(@^Eo|o6Z-oO}Rr5cpNn?N8^018}FDA#^%q_( zcR#hoB#GtZip3uu6)i=WT?KEwFY5i4$4;gDu;St*J^$9T9}oTjs`tnr@|5Yoi|W7UfXwfYL046S)aN*9qcH%=M=J5NmWn*+8SAC0tczGDa2*i?aV(Aa{;-&3c!{~;OZe>h-<|y`9&KTrAgSU}01OTZu5r4;V*K+L;|b*N9_DN= zdXTmxS{`21cw*&Rd}ZRwMEuxVU5#;0!Qzo4m!GVcdl*^awC!OKd{L$)z%9H{xp)$AhPhmF?k@`7(hL>f^!1b;ac5R)*Z2OwSULAM|zAL3XV zl^sh;JoZgGeJKk(Z~=??L+*4WzU7yKl;CK* zU-r z-oq6eWNNwe64`ftN~OJdyYHG`YVMti2=*Et|)9w6&*sUov~i- zuo?v%f~QiMr6Be;y)I*T$sF{7bRXi|h(Xorjyc6sB%75QM4f}v)IW8uAIFg!g@|8> zTmHAZkALDXF_Fd@Q6@5V<&w7@!qe!GWT!-R;c^^c!xXaUirsB0cIIMMszYT|?7>i z&4q}4NuL=VL61Cd*QU()vrmoJIi2?Oir%(r70+?g$ITsjP ze)vD1Z{P38iyQsP-^Yd-mXdTUjkV+D9kX+s=M1s|dGNEd{mso{Ty77Kf*VwTL!}I3 z-b(M{5_5^jl>{Jv zpq!N(be;xZrC1&5dZm*GiGT>DT_tzMK1Lo(IY8H!Tc-6ox`q$AQfvA5LqAQZgR_#- zOZEj$mBjlkt+oAKHLBqCLHd)Ov=5b*Q&Al4UaJ4MVAfQ;5UWWvaKzQ(zTVF&-`D%; znn#sdjqP~yVU0w0_Q4T&g}TaC+tg2@#sj7*{pr<%ycXtSMtAF+|F;?j@l z(b^_gLdBVESEUXB@g4X*qr>|J3Cg=TizGl(WzJKSJE8>qRjed0KSGuJo5nkB!ldyM zOoqJm_%Y4!SzILj(iaz%XA6H3$0AkrTkET4OR~YaKYchs-ujm}I;AMRHH}yg6 z5^0I~(2BA+2MDM9glOnD<5SuLteRsmt?{E~((qR^yLp-Wx@e4};t#r&Vsj=hUy0W* zaWz*J8)(@5dU=chpmndM0pp+M9i9p@bQfJ9289<1uElR$f%HH0R^g!^4;oUDxQs;! zaI~>x6^>%wQ?LGcYCTP|6tciuu+5ekoTfHvc7<~6`o$A{ZGJ6Cio7YAaB77JRxg$5 zb2sF7eGXOKoeGB~6n_r@P<&3s6SzdAj@>^vJ^o61fBGZ{qM7>LcVCL11x}0YX|2u$ z?YOMI_w3ymOv*&*`UnmV-zR);J&2=;W2mwz8J7N7CN-6J5PJR_3O>4`bQr&8!`_^^ zmHGHPfS8(abq*kJ^vw8*VCIRGl(CCoa-d-AWKfDzQ^pGCF!L$S0%1Dvj`}JVdIq@p z5icuVCA9L!ArjPUf&VjusbOh>nh=naoKK7!*g(QXe=_1fEs)5M5j zYH{H%Wd82$n?Xqq>(6yq1z+&VAc%5Wt`e40b*Tp`r{`#nY`X@$S+OJ(jCgD=NAz)e zVjf2YyMhAwe#(DpEW94XdH%njedzsJY0Z^i`l*&TsMsQVP?ps{Ht)S7dgSIW>cvyE z+UbdlUNHsC(8TimqNt@OX6kZwSzia008+K6>oL%4iDs%O+@S2Q~6tb7m+#AG-A#m!KjdLoM3eWYpks?gAo-1+$|-L&15XI zznRDM+t0r5_8oxDO)6?_)qsS;G~UgJG*Gc-&ppK@Paiz_{&MK|2u%MfHheylpP6W~ zg>HK}S!2hs#D<<}lU@_Qs#F7`$2adRZCsBxDG|7~MFVmwEu*Yi&Yr|mFk6&>q9s7K z?MyxC?`Fchx(3DT@hlclbdO0JjaebQxP8V1_^77OK2Fzo!@}xD+XtyA zOg{NUY)puC%9<7F|HiO*6BnCSs39ufc`>8K#QM7Z5PnT<8FWyzg48u zpQa-BF#RTMcGrO$Oe^1ub6out1a8p9K@HNte;QM!_y$bE8iEI5K?+UAZ=41^N}e&d zZZ|~+Mbc&C$3~$O#adAC2gM$rym&E&5Uhd9fPF@6o_e#>J3d|Gv zGDAr(!IQ;vF=-|JSjrMB=;uZFpy<1xLeI~BNX<9alSF_2?7J~X^_C$6%^Pv!yo+kA zw#hLJ#yRh`@>Wa+*gsXay&(-ip=p9(nFwH&tEi9}`C9d2Bv@u)5Rfm(P%uqoB|^}c z3&G3U7DvWUe1wpygm3d%a<0lykd%zxWNN?f1(BjzDcPfuJL}yCxgFF9CBCsrDg#p~3i zX+KS3u9BY8%v`|)t11yOcJ7_R&FA!NFt{cXI^ly}_o4rAKyac|W12(NsQ1nR(pyQn z{SuZ+(w`!_lx;}a8yts7NF7&NLSB_JnP~j=FBtr zSe6h#1$7_t00%0D5oDFfpz24}w1j8M>470mFwX8UPxIbMIdo_ULU@J%Jax?m_A8^R zucZ0;Mz=QcBSQO|nB0-tq&o0$XKA%8bgTf)68=}mp{VNGBwPhip-H(uSG1rw+f!Gv z=ri{2H$6ULCdC1UH8`6%-xyinI9L8ZibZa`X_nr`T4^FY-gGJceXdy#k2;%D>i{*u zX*~#~TeHh%AZP|6&#aAy5hm4{PAM#?$do4_m0U_e(HGTCM3l zpkETRv3U;3L^+Yj^8=&}m7p3SHhc>~Q>tuw`gmM8r9nFZ)EkNzUaj~SmD)frviRl0sUyh&8 zp59NKMQcCReDoM}`=I&sr7T4O;!Eu>8C#x4E%*J#7I+@ zujG$2rQlqHrqSK+z2u4-xmT0ZqT#QnS2+vse1ZqUEU8lV^EzREsLlR$IwV zrBs^YF*rTrVBp&1K8`ag3ysPvu|gC$8$}Th^mm&Q9r0Z}^qq8dv`N0T{{XK@5kX|# z6}rg_gB0_i_)wfHJ5RqYI`Y-x8R{Ka5r^_XWptVC%^Oe|t#N2hO8byWJMx-x4XI9X zPWN#gc`9u@Y&jf#TrPU8H>xRmohXEl25R&G>*o55U$8}efnx2>n#yP^6h<9}l(O3$965KGB7w6BlIWrBP;K{J!NPpD_ zX;Z>6iKAVXI0&jPWneYSjXBY~DNdEOwqn{#3wL6E>Nr^;gxy2pF)*rUg%C%rG9mX| zR7Hg$eQVksaQte`c% zn#%e69WWw_8^p{A@L=RWw$FYrqc9VZh^r zQOnN=J9O<6LLN~brJHF2OLHkwsky|-@*PeQ)s)4J1Vk^+o!L4rd^5V8%FTwNz!^3a z3xg1tf)!Fy2LAr+7rs;$dx!Woi}Zz4{we5B)V#o@w!4gMuscB$S+k|ZH9lxwjWrkK zCUxq2XWs~dy@s0njk8~v)L3`JgR;K z$}qq?9dCgi+p^63uuSCbe(AjMg<*HITapOLQ#|Qs+RoH;s`*^kMHtU_2_47$W?OS_ z#Htl}9*+6$A&NRTn5nLxmK%;@4Yhv45!vxEd$}1f+mfJUi_%)h6E7TB5fx&f3xM91n$tLwC7$c_Fq=gkh-9XvYsa4=&NYzBCY=!mE(o!!*6B!ZDw+>PhUr%!pSXplV)WBsz3dklwDGEZP zq{fdY{VbPy;lgKMdg;};5MKU#ke6!sn?l6t)rm~C*u1mC z^(tEES;1=r|NM=UQz}0ey_b=j)MyV3&|COh!E1w=+Zzij@iQ5!nMtc~PwG>T*>wFJ2?Ddr*Xj>#whbyDK_d(Xag_HGEb>$9E>$_nl|dzDpy zK&LQvb+N!*P};ibkD;+zQPdD>T0~rQ(jXCZb{tDZqCR@;#hrBs9;lkpx__H z)Ul9zE9>vA1r8m>D%;vgX5u3opmYFHK+dC@Q^+i?l{rR3N@Hk55TluLQzd6f5dh)3(05{efC;oW$vG%g9)KJoL3cg+TX%iy zBODrZ1-eQ`YQi1mwQ*R*nl@W>J_bc3$N^u=WnJs!%8_He#DOA?e-wK*ykA5D)FBk9 zHedpCE%PGke z4l;$$?P7}=1Xn7$DwL7Mp>%2i+osS3B2b)#L~sd=#*_iRmfMj?NfCI$gG$r&T^CVw#a$!vI?)DMB4G*Sl4E`Gxl+D#Fldd^osXFF#A}`+0)bhZf+f@K;V{!xDL3- za&L$<4Y)!9xR|EI)>%1%C}ynMr5#$P`giz-8!tAv0ebvY{~O=9LDd0pW2s|mvi7+D z1^>l2PchvEk_<|Ak*HiPp3R(EKPdUZEWJXJd$!Kp#9R)#=Ora$QoIshs_f<11lrT3 z%0E4sfN!!fYZm8vPZ=)7PT3``sG8jrBES=lpWyVrOF^<8CKjib`dgkocu47O_Fdv( z859o!d8o}nw^LG4mUFC7Wr#UQ4m>wT-cW>562XcYwoocCAOmIa#gV!D)lvuS0T#Uz z3@}BgduXPbznRt(-qPEltlet)E0Yr$PdRT}&@3Uye1ypsFP^$`HGX{g<y4* z8ABO^t*c&4EmYfHY*4+d-cFi3n>5=gpAMT#q!<9& zO7vA;ZsKeY3&a47U>9#c7^J+jNURR#-{@yEnqmae^!|2j07S1LDjGM`0Zglnw>PZee;G5O-pCv^o8P$GXPrq)E$nvCi>$Y>j9E|CP zt^J4e&h$aQ)7=y>GnNFCC#3UpfB8ZTg2%@hO={DZj;p!l!ed~_kGwS0Jm4ZO5&@m>|R2*8gP{>3E6(OfhhS$Y8;{RKS$Zk@h4U>2aR{)qye+9>L z)9ECr+j#&EBG`w!4129SjeUTJ*b+jf87bCdFXa!1O0$kZgf^kbGd>hP3W9|a#)QeG%h_oHA2#IC&P zW+M979CVeR7}J2CW6ZOsmoH6tvD6zMg&Grds*>5wxsTx+F!6q>Zj^be2tu{1BawnC z+wq1aVh9^?lH@2=+tk=2nQOb{0>RiQPPaAmkbYq{+(@b!lI9@s39Pt^>X4oZrp0UL z0AY6G@Qt;Tk`|mJ(40kj8bfef&Ye0vMYZ57@_EPeG=IpOjrbuaD`n3{w{sA0oSC@d z<;#;^g+6ypeF0&p!T?+vKjYtzVmd2|uEg8nyMHt@ZT7Z+JeQ#`U<9y_NlZ+aHus|f zjw;^boyY1!0XTxsJ!li}tf9b%+!(QSqa)Yc zhj^w8{Z=*@7DBv=vT@8cmV&+IDH2*G%uy}@ID|S@DO}H9Oni_&0|jpS zJ6_2F1)!wv{I~zjNLVy6s6U7cFD23+`0cR~L@UuFLr>T#fPb}=CL<>}m2B2}5ichi zPTpKA=MQ`LL9d~k#&aupi4%mogyrXxGb)!EGvT0*_2F3@Wb}RNa>?2yQTvzX7CE^~ zRB4(3ga=&4{bIqfh7{fEVx9PDVCFk$9PxxQ`<>bj*#Fe7uT3u2R6ab$E4!Qsl?mY-~QkUuoLWqd=aadYeTV7h2k4Y-eA?;BVJ9&c5g;c0+S`GQ}#)oj2 z5`)KQ5(YSro7fqITs>B%`As!3PZVh~Q9jdH_4_x;V5hc`!;_kX=J6>FdgbhYgN$t?P zl;BG>wb)CXsShM^O;9k?)P_F35l8A1<+A?ucS)?OxAizC2IF0+XN1~?#MR}L^Lz;C zqJZQdMo3YhcQG8IIEF)b-i#f+)1W$InTiO=O7QPZ6SESS5w2Cf?xK*lNUb;7julsq zH*F3LjQZT`ckNg&7QO7Pm<`3(0S9`i9DAIPFz1*6S6E8-S}YAuo8OP8%vux3gdaZp zWsEM{qJk_Jj#JrZ;r&dSpP9}QJgvi1?x3JD$)$LH#(Z;&7#+B;d?e%s(golnG22N~ zU}kzc#)82J;n9iGv<3xA$i_<%jK8=_c9403lg1bNoAQhmotP}z`}tl~RmuZL45fI3 zXNMGL&Le|68iOka#Q@>h<|v%orN#KH{Yhu~4)!NneJs97kAf||pXAt+nHcnc?X|o7 z|KN>v=;(`jRO?IJ6{E=$B%hXzus(WeJbi~YoG~x3M%ouYs4XDWmHyU0{nO&nldC}RP56d6ssWd`bhi}F?Ic)j!3V(A>Z z-+g3O$MX0M|9&ki#48yU)H8_@&ZE{>>C2fzD>50G8P}gGdW70|^HGprp-dOb-=ncb z5Rj;feBALxB7W~a`)Mo|=h~ypy3xXm!=yzZgQR(DVoGSw42m^7doUx93M&&;I3Yrj zS0$Ax_vP&}g^0eW9Khq^hvO`gTj#I9ZX9-8lHShlBjHzkS>SrcP27Y@gK>5!MreAe zULD?GSk|=I>Cu1JuQ`W5*{wRukPI92Q4BN1u5=Y5x|K#$`R^*rFpa|}7cO{)#6ILJ zApl%VyC3U%ugA^|FLk zPptFA)WzJc5zx3%P($`_b<0cJ)EZCKT3SrqR-Mo^r{7#(x^{gdRwHcj#Zr2F=;$2I z45?0qdiY$oN~w?*&M|h+Ly>lwnmVD@wRAaA&S3* z{5E%2I!~$kF-*BDw+<2oBKuLQxKM~X_H~mhDmI)P)5H6*8PCaIAcgn0{@0s9TbV%` zVGD51VCQ!Vtq=Xke3yGa2!etIZ-;tEL8RaHU+m_6o4cR?vzYK=AGbpcC*Z%gqd^pv z7|l{^HJ)m`a>+SV-)v4T(f#kC5Mvy=|mxu+QGAnQYDvLDznJHkG#`}LBKWjbp!ct zV(Ynr3qYC_RMfY4xZSjJuK2>n2Zc+oX9-1p4pqw{d|0%l$v?P&elaKMo_(2?kFS|$1X@7Cjwy4iU8jzpyWD~h%iYwsIG!8vXW{@} z^A7E_eRUunmR953=nmv2i0?ug9EA3vvFX2BiSJ4`qCB+^B87`-9M%Ly3#ne9x_ykN z=pW9I+zs9NgEVuWJb61+q$Wk#^tpI-DtJlyYAkDNdd8GOomUbp+Z!_kd7(VD7-p6_ zOeNHTmZh^*biI|Wc~$E!(L!XFIslhJl=JsWFoNngTG4FnbHkL%)yo~}UkV1q+zGjT z5R+3EmVu$4kY;l;wj+92WO1(cdtXV{P;2`^{F(MnMZn1=W|6kpAqQCzzpn+; z|8Bkacj?0aMt_TGlRNi)?+S?6Ub)%&TU*1U0{CgFgef&64qQ_P>YHZOodMhWo)?ZQ zwsX3S6p4L7@b`+&jClRCy`Q@fb%sibWSktm9iTo&#;2C(7YorILXM005<;A$b~)C>fVp0Y0meN2l7( z4c0eg#_KN>5VP8J9hVXv=Sjg@PcX0=#s*Dcv~H=-#5%GQgvA!hm|!pCF1#;FtIFUeXujSPKH3J$>cH7vuP-D#PHQ7QSr} z+wA69mhw2rz|T6snhGf1?;Z7OGsRw(E5HjO=8DyllEoF2<#K6y{j1pcxm)Y8 z0ieVPw-Ba@&!%RkEL0A=yZCa(+{NfhhueBjx;Rje6Rd%l*a?Y6$f}^!p8x%`_u~^Z z&(3B#D{r++-r{k$N2@*;U4Z>n`>m)~AkR^^Jby|EON}t81=tuSrc$XJCJ9<+_d!hK z`L{m)o3mg0EZvaPhg9D9T;UeCW;+He>6ZW_!59gmz4G$Q@s6X;>G%;>LJjjPBI6wd z?l0o|pan@8+lw>CFIi7w)OfNUZCby_NQACR;Vcf+&JMFrt`1btV=3l4L^jt-)w-8$a4OXMp!T+%BLr5f!LE1qYm7jZnmLu}xoy;man5b8 zkZk6)stp!-HFUa74P_Dcw;dcZWeyHQPv1zGg^F>dU4#Q;*_%x&V56w`9unAY)MLlY z)fkj@siY>?TK(r0tbFK+j`(Q|3og#IH<1EO8o$ri2BtFFWKAx(cr}ZJyiI@BSkuZI z=Dm&*D0eMpsr*F?E{>#ZDojZ1lG}J`n z3Q^iu9{nl)i+3O>T<{sf19kO-(jR0T1goM_vVSH7<^$BnSLapYM#U%S2N32RPHGpz6 z3vG9maVv8r=DpnCC0Ol=C_I)-jJ-w#C${*DZ~AL!O$Pcb_ew2x$8U2NaP7Y4M=1s1 z(tFRp`|PKl9#lTBW)-{p@@HR;{iEjRp!f!wrq)uV&CWxrSREau>iztO&wlms-^Po% z48OzD;#br&ou}Zl*T??zzK`G%*l#h5>xi1X(RM~9Ug9&^F6q}gM3GDf!(YGT@67hn zqcrmWioLz9IO||EFY317PxTA!rdC<(AK_c94KuRrTp%9lz7t=^2Hwb42q<3oMbJgy zkC+(>Q>U==8|WsUmB!pki6{>t9^3sArCmcv49PxsEk|Rl*x>@1GD_b*>_AA!TN!Pd8_DFfied}`0>^tm9#tbS`L~uL7i!{9RVuF zQuS>wnCgS2je!gW;Ls{rka#MI5W83ihi82cjbHi+QmX|e9?9zWBivH~!2S3gUnRU1 zzjlM8FstHF+0EA6ARHWd6QJGn<;KUG*3TlW+}#~oX{hLXQ4KoiFGdLB{JCHB$UD?a z$~$=Xa2UMv#%u^*rR2oVU!lTI{H(6&Ug0wDG&R-!qgR?zB0QQ(p8f_de^hGkNfT~> z;1#3GMVxQDv)3!8L-TE6KfL!l!}JGmp_H8Zz1pC%>8dppePS87{pVv8wKgti~>hdQxPE)XcHsFK|GeHpr{83sy*Gsm|<%!GQLMlKtJ+Dj|o+GEoZ*6TC2M`jZ zM?xVB@DKy?7;wg9)&xO7uD4q8cmvI`xsd&k3>pQ8%yiuQzHyP$Kk}EcTbjp%W;h0u zC1lL{lwx<`JIc$t^SIdFy+d4Ac6&8dY|TJ`xOd_fn7NvWH1uthc7JV|YcyyBJY`VQ z>Kp8v@?bEiL(U^bMraCkg87DtjN$-ZAO{-s>|Gc67%PmAKQXubKX%CZ)Xe zB7(c{aa0&QP%#Nb(;3v9ng^Sg1+~DbR#AMemY|jYyEA9|gT8XA)a4 zQ!UQ$ovi}j6%bRb?=4R)z&6?>N~)ov5L*T_~}nx^JtS7vtt&X_T7U_hG%hd zK%|r4`FkJ#tuGDbXQ^>=Vc(V27!{9o^&v!*UOJIE90o^`C|6_qh%V+aO0*a8KKLjV;+lVw7>w!DJ)Xoi8 zHN+qSwdg(-MHW|9YV4f9aD!IKpk*96lUN>)yf+B#pJ4koyztBYh$(aj#up zi@%cv=8!8D1JcMqzT&%$uMwC2@=HFSl57o{Ha zWK?a!q$Y5M%V>WLKt}p1np3<(^&xUXc-m^pAP7mk!6AXxXMI9Z4Y0Quw>hCf!qs*g zLfNoH2jGilr2{2eaDT>KL3=H1pq` zGu4=tGy27k0$3F~BB;QTI3B}GAK~c2Q{)HGxXDN_=K?E1Aj8}d8Cgy2*$4B4#s_(v zk&Vq&6MRJcyF_Bo<7Q(5kCU=X{GQ;MJ%SfrDh#o}$+S||G@a!yh}BpYrK(&9aBAji zOl?3+3UxEShIV&#DmDdFKx?*T7TW5Qa>k~)0O?z=z8Z^IURr%49+U}smO&9!Rulz2 zkic@InxEz)sdQMuBSdY9F*KGtcvckY7#_~S$c*1W0}%m<-$;Bdw;oa7)%XN3F(_Y8 zg5r$=zF>F65=cPHJzDz&AX5q$MYhWQI&2)CLAY&F6bb`yekBltp&ulpFOhvSC?Xjl zJ&N0fP+je&B}YZ?>f_SQk_rC0?&LK^O88)?rW7Vu)kH{tj58@e7wrX8iRn>$-wNV+ zHAVKR+8%B4*@9J&STf0D#bT9`zSKIY**Uig%t==FNxt>vmoL2H8G&WshKk<_gP@OF zu_UUCw<+GQ6jEp!`w5Qg+Tyx@Ly0H>m^Bm*h3bQ>Z++%PpJ3wgnE82IHR{A~Jkrr% zn7Ycs?S*w;yPS0OUQ_up%Qlb-V26{5x=zIDo?3s#KUtjNcT~ZmP?kJqFO&f>T#oAA zL{lw;Q>e7m$LcLI_5LAlam=XLAbb@yKy?gmzYN(MY*Cu|6HUpKMO6A;s^1dI>zIEV zr9!bIgn$Wl8U|JdfkG#F$A<_YC8#qb|h{8}F2;-NcEtO$N#Ks`@v?yXu z)XZ~f>f3nl<%^k)4I=5|qqCozo=ve}X~UhPw5_OYka9;RO9n$5k)lNY63svFV&kUf ze_1L|EaSm1_Ri$w)ttvaoV|DUlORW@C`KIgS+5Q0PExn$%SsRX4~Z+_hz{mPeZKo= z7Umtn-I^;iUbeysbF!o$sW#So+qDoD5Ja*z%E)wZH#gHk*-bAQhUibcg4o@A&%P5g z0ykvs9)Xu?A}mn+bu=%<Z<(iLsy>wfQn&#vZ zumb)eg$?&mBUIxqn!g=yKE!DzUxZS=@@!nc6&rN+bSVRfpT2OR)Ks0NcdVL4hj6_- zSqf=NMk-idkc$Xn)M=JjQteMzb39MdXV4VK0@`GdAxj5%KPW!rq!*_6#h5N9plsU- zki@J?gwxF2MWSK3nTP3i1tQTa&E{~a3)hYmiK(GXa0Fv&qdrn7tsZ%I|CF>2e*xby zL5KW;`irR|N=#Uh8FMgEiyhVVu{-0{S6}d0+v?9yy>bYjVl{67t@2n>49FN|EdNd0 zGRhK*_mh74g4s*AE0_hh1EuWWk7I*o`ak+F7`l(6!%ns zk(pfbD_6NA2*Rlr_rNUCU&<4j0dT-qpDMMs-s9i5y{h|}`8+W_2kK{ylUQ6HUswuH$gfK6{(mq6Ckl_D0|{kabGGf=Y!NAt8prJ9e`K$UhU8m!NDil z9EDfb))!1(3gd2PYS(&}X`h*x_DbhCIXL%fxCn1P5u`kt+~q`&%vN$f%QIhVv$AqQ z9*Sd}%31it{GQ{z;mJ0!o?s(-kMt7*gLWVRA2ev{FO{h9UcZz)InOXKJ+P%g(8aTD z8A)jkX<#cTmnNp-F>Zl>CZu1!NU4e}Ws9DceyNu(Tqr32QREFlm6C70(P}IKpw%b( zMX+DpLmPACssXnMWMs@+N{Iv03x}W={Sk&nW>DbTvS4Bb%6T~4_3*VCqk}^DQ0|%; zF|6R!g1aSt2pY5F?IFp$TjYvdX9-H^ZyH~PHj-$zkzHTZL#0Hx(;89x*nGCdrP8LZ zUiQqurOS?qFG}+(M{ZX>w+uziOsDq5X~ZM^Qu@iBf9>P1=^wuRf8i&;w$*@DQ$(+> zx7r~LFHcOzY!oBOeHSXw{b8;(o*uO9kV-4P`{`dmdOB=ErJzy_3Elh{vj^6KW z=8C0>SL~vaUrTsyRGLvTFkZZ|-ZF`scwDcb;8b2V!Q<6(gQa7@f04aFIwSyF%nv8?Q(IgrlI|P*KZ({`5*hsgggE-D z<>*!Yd{hp)w6>=xp%M!`d-|m>f8P9ZU67AhA8~4&H?{l%CPm)&RC>8tOhUB2?F7Bw zed|jxCRH)UyV-xC#E%YypFOGZGkjbnz`EF4S`M;Zc;%(|aqebm^r*6_)GM<_Fi#J!R5Gv88g;6b?KtH1ge13ch6|WK8_2#cT|5N7C3?`=lK9Nh>qb+ zmC{@@=+p(+>IrZ+Uz#*V$6z+E9Gw*R%RUVLuIC7@{vmFsZTp_U=Z9zS z`k-(Xs4`-_QqzT88gwvLd5GEQzA7V9uDuaoee)7Q;3x`-t z{X;oNVv<7sIoJQ{)eI5hRp{>)8?${%sZm!$Q*^%CFZe(S6PDQ4JPc^HY)_p-Of)J! zV)-8wH^F+JJTbsAkAolpGx}Le6*gwhLA*`5GvKV`c6t$Z_xne6U3i~qVQ1E8V+kZc z7W_x>+swq(pr=T|-u}`LZBSLYP>MX~EHB((xIEYE^UHp0r$=>)`s@BdIYlb6TOCeI z!uL>_&eTy0>JHtLLNXHwe#5WoZ6~@^1r2a!M_m zrifI>X->1}d){+gr!1&KGchP07jqL=ewY7!_v|0z%_L{4xr6C{Cz}9Ynkb3#bF1wL zI-Naz`;-60mWl2ICT6NO>oUSlVOxk7uY3Ofvr@62Q+Bckn(%%qA%NmT94Mwb3jU?3 zu?Enq_bA3v+es5+zKgeYJvs1#@%d3@*)6NZ8sOyB$@n7C?RSBv{3wR$9Gnts3Ce@;6fPn@R0hCd)>>qF#8=n5rMKgW$}`kjCbM*9&nEsjcKu&WTbjM}=p|dd6OsizEmv7aEpu$g#x~;0XJJyut}a4^Jqn;=_Hc%BfYM zr2-pfg};F0Qy;n5n3q2NnrBFeCygw0rS(#wfx^&=VU8tj9c-eVQM9KhH$X8??K==C zJrmkWEuVP#`pwml1VZ#!RH)FdK$Tze9Thx&PeqMaDr{j}b?d$R5;?ftrCT@JJ zmU8#roVAPq2&D!fqjMdkTZWsw-OIhKOx*y$!5PSKM5vAN3rRo^IzM@awxVe-9V{vg| z-Tbz^)S6vdUfOswsL?=$oInUvBfA+s|IV|IJQ*@)Q~)BLTSNiiV8pLPzUiG-(7?w- z5>3XcUl4v9>*Ea2sh95ig+oCZEyHlboOe_R_M5si8QV+DWX~@K@H@=wp88Va-@Hw5 zp5q_(DnO7(#<|8HF&Z^&0?Y!__Daz$m{@hg;4{B!Tb+}G?a*NfjBTUCSe9Izn4C7T z`{&4yJSIzX3vHi)RhQhOd{zugTpSaMtEjlyZlLNG;;aD|BsT$QSiD)aNUM&srY7%s zM=BZQ;3#Q=nFEbXYY>QptwjG=Y2!|)WyoRDv8sM>Iw<)ui#T&yy%6AT;Md%sh7w=6 zhJ1T53o4u8NA^8~I+*4dE}t|kkA?%_1bJxN3(K)|u8#g;Mm=u~^TuB7KZx}pV#F2k zYvcH|CV%tJ!g>hxw=Z~q+QZVBRqzzb+~>)azeTPjuLi3}Jsifp2?^>=1@u5Q8Q@v- zasLT;AwDj4$38lS}v^_#uHZ2Z znp`6Y3sF);UcBf>R_0rtCWBKhuH3-*BWm}M6V;e6F~#fsBam&yRN%fJ(gMufj)0cO zn;RjQjtt4;fy&kNsPKX)3|cnia99JDS1y@#6`#9+Snv zh*#AlcCr2-DJWW(AL#S#G;W&BMf7+qRZ5R30BPnb%py*kWYZS(Dbeb}WV)DACu4_q z9qA!l0@Uj;FQl`lZwg5lOMY`9-Unm^KvYBX_@lOE>?7y$oNeb;6BNX&5sXb;`Vj)gc5UtE`i7-mi=dNAOvNgi zlnCFqXHVbxbj%Y^O%Vh)420dp{TI&zPlVCS(Fcz>N_VxGZleJYaxcJh{03ad4`UVB zb))9#-d3t7lv4;Y?POQg$BKq6(91CK?`yNcJgoe%@G_iCbPxT@V=tjCcxoW2kT9MK zw$XUOCH&p7RCrNIB=Il@w#nLxp^*~Y67gk>ax}p_0(gi^z;z*%kvQ+PruX^XqS#DR zT+EPBfDqgXo~BTBxHbKU2juDE!Sf%4fS`VJ(1p2GnDXMfyEF;Cn9>&kXrJTM)oBli zBMzZn2sw9iWzBrmR2)Dk1@p)s8!tb5diU)w#Rt73#}SD#&Xr6vSG1ireNGW_zTOYsDCNKC=DY%;f{4L zVS$*MV#n`zbFM>;_^2rY=l8Z19F8F#0@965{9LyGrZY!;;q2*Oz7St{oYn?6OCj5+ zY`N6})tu`W*5{Neq+`Nyc@as9DUk;-7%NcT?iU1tV3%c;lT)Q87#FLO5t4HEGJ&aI z-Istcr81aR*1M-_o0sx@IepGZh;AvqP2}MmLLT&(yVY)6y0hb$jtZQ`VmLu6Zktou zISyT2FsEZ&OR8L@prALIVjyTSMM@63y#tCHAE&gSW=h3+jhv>j+t`+eC+XQ*CTlX@ zK7A!Sx%5sXNc>L8a2&+`!{(?`f2>nVUoa33#D{4#V)(~*5UAz|5cTnQGAYVE)oQQV z+odFp4qZ<g$xUBA+bRJRb26uA|hyJk?YZc^aQ1nmX{bHIMf*zR3xbLaL>2YB* zYG|W#*aTA4vq9yLyl>t^f$0jd=J;*6pB^*i4x4aFY=RH{F!)>>qWkz>oy3ZoleR7V zpc$4+gk)y5Qi`=c|N6&Y@8r0gd=+Bxt)$7HJCb9+C$C)e`iF;1E6>L5PHHJ+cQq{{ za%l4G;mZ;crO6g22A2Cog|!hV$?PwPT9fNggf5DJ!kOD?6{24JSddXu?3#)jbn`04 zdiyZ8slj^FiJr9v8DxM~|7CL`#~ex9{jKCxAv!aOdVXPUIk=+@AS=UC)6;n{UT*%2 zZ_%y69bXlSl-^w=hlsA~dGY-lz)SPpZ&82g2eB8em3FM4u^6gNEf{99H;MLn!Qkbq z*cOru8<1EGw9+)-HX-Mm+^Z7JSjZ4j5L6Y6C=o4(Rh*~8e!rj-Z}ahEGK~|Gi|Lyu zd5$C-i%mSs@1tVnIe7BqNx6Rk5S()BBsEBh=oHT*4OM$N1=8oToEKC^UIf{`^yL?W zwh9Ck9zE>$j#EyfGuKOExIYH!ot`K*GVAB;6e!(xEik{KE}^Dq;~zaOw%bzUyGO#; z?x2|G1fj;PWN7o#TslD6-4vKtCQ7CL9r?wInkZ|ErslC^@;GWSur-W(J6|C8J6Y1X zxzge>`3k3lf{(kIML@X{giq2CF?NW35`56#KY1LSrTf?>3MN;3E56ZTPSJg%{zAc+HKj zE5O3u8Q&qls2CD$@^S6v2l97oc_Bu4{{F{bv!E1M&-JZjvS$1UjPbCTQrpxKA@FI1 zQ1(W-w~>b|6X%f641OOll+|5y?uYVnn`+58*~)U~`bP!qjTaOX(m3G@pMB}2SPBeV z&i{PV-Ck6V<>4$Luct<{j!#_)L zF0XZO$9H(RpRT0|_a>r%>JQF7Fxi&7M8m4O^CtdGM}EPtsUb9rwsJ_k51+aHd?0VW z7rX*|uK*^{%BlPym#9eqrPUwANuRr@$)5GHEp;3Q-{9X5{4C55{Ig8=H5K1~C0>rB zvN*IUE|ZQSVwSEhOFq|r;!W^PVPa*Ps{-)-tm6c+zW?#pp8XrR{F#PMDx3F8s zi5_gtkBz90--Y-9A(|XEPkLj>TxwBsUF6Z{VkN**3tt_u-{!84Qs=5`dA6UGIZ6ocU|n0eB!yEM zkvI@MWnz7MXh=bf1*Mcy62u&O)OBIf??3ytpS+E~7n3JQC08}_`p#;;dUQ6l$2Nq38A)0a}1Mi3j@4VqzQ{Q7y9wmvB5-n(L_G-y8=%!m58h};y zI_`ty@o2OAXilb|QbI9LDizr6@&#Ji$ph~JR!-YrSPtm`oWEFg*Kz;Edxc)G=k>Fx z1xOo;?}%Fm#RkElI)u6FWpLQR;Pm&wLY7M!53f31WaMeXan%Pvvf?pXYP|E#U2`bq zy@-{ZQWw^ApgN7JG{knvBUKa#-5|Qk%b%Ldy~dMu!XMQOkv(oKOw$&l0IK+<_}NwN zDyQj+G{oX+CJ=R>SyWts)_{SG-n^ zL~Ah%+Hm>y$~AtwLQe(9MG#p%x-wDvgA$;`qrjVkYoGs}YacIdY?!x!57= zalcUtG&StE-}y|Cl}n(o>EzT^D}EWxFIf`TTGv9;ymaZ3*MfzqQfXDoJ3em`RfxGY zlztwIn3a8No;~h8|6k8O^qfg8RP&MeHl$y^VOo;!+}-iR>rF#He=LfWYj~qe(u6-% z3AADhw6@d`u(uT}TdSR^CTA{}ogh)2uq%eXiN6UbbrU_sf02RK!4v8i9?Nhyds7h{ z>D`#PaTv}^6EqQAO1IY(EOd`_bj~X~E%xq+?oBc0b`1-i=Qb2t4mUZT=`WtTIOR2+ zdlrkmsRGw2BcH$f>;sf={`c8;y%!t3lQ_^XeA+9Y18sv(x!9O()T{XB?YxNF+zWAv z7MpaM%1RU#R-vwV?5G&+!t!#KtH7Tnr9Q?|Ir7et*QQROg1k8-PS~sN?wrprm^E(n z5~FVF5^&?Y-?B0aN?s6wSb17v3)Y847T^bV2nGLf?jY`q(=>p0$%ywZjqn+Tc&qGu zVIuINMLp#04%uL|@S%s=*yC*3Ba90zeL8ZB7X|5_+EAKeilT5gjh$q5G6dr7L}yAI z@wVUw&^DbzJKrfhO}afEAiyUj_{q5e;yG3)VvZ0_>>PI1kIz0ZC#i6vn84y_TlViK zs+TjtvW-zi=?Ak0dT zSAK-qmFx6mNc6an^g0Nfd}Gs9C^iwDf$W09Li_=TW!YpIGihoO=D`t~a)ek70<;8< zX!VB8pvZNIgg2-FwGlUDimTzVpD-r_#9g%caeZC6$Asj}Foh2C>*3q|dKG zsgJ4jBss?_*9L@iox@MQ%l~us?U>a$nnM*<-;G(Tg;g-ZgRXAz_%_x7-TsvAH^zOW zH(Bq!nT<5gcn9Y=hBDTH@gLDeU;W8rqFz*fN<1!`F^yr|V2Cp{tN8_x#oUI-16=YshUP{SLHIl|3n@FA4 zjR&Uj7JEZU(C(@4fbibFNzQ@b5Cy3n8!@-M5HnKs(Y%EQG8E$zjJV5}gGf{($a_Ke zEbJAG$4xs>i`BLTvUaiI6dlVNEhUvtM z#I=gW!2%Tr^}c}Z-%Xss%$1COT!Tn4TUrZEdG{9@`;@MF!N5ciu#V`2UX+-7v{J&=3Pe;Kq>qJGmOSrQs zZp-S;4{Lab34t7(;TeO;_3qS!V;wT6} zH5%P&heL|l2v6+Vnim_^?7nH<6m?6c4oVaw(8d32KKT?fzgZe9tl5~2jab^GP*uO< zdGg68pFF;q=?78)@8f|Ii6~?#Z*hCSci(AZvTR!eHQy|9`WXM!VVEOptMks%cTn!d zIwjNG0^$DRN1k?!6uuxYPc2R9k=LS*H-P>gH?=+FF(Wo&BGW z$5QiA^nDfYMjQ95Z0@*|Zl+ua7m3jT!i6-rapSq?uD_tCa#Xfs1s*^DC=F*~+8=s_ zC`N(w>7WGqT4Cvhih?^4nBYmozQBhfzm7WQ<^j6^Ib?QT9cG% z(H=d5VkHuQ`0t`iCx`F6FLZ@$GE;j;MJmgd3&$5jz8ySmN`_tUQ@0tQh|*h3-;`D$ zT%~WC<`ZP)eZ=&KMctzOh)-zf=+p)%`M{*UmAJlN^z_5QXO};c_$%!c%N043iOk?w zhW=^v13E9crT!>GVr(a`N&}@dpZBYlp6VAr{r=#GgOB}_6$j0m;J$ zxHT5&bX5A!Q0ISWYQA=bm3%6>5L!HQ<|mdRBteBIfBj~6mnM?VhLut09%%;VmlNTK z{al3HFvJ|yX?z{nHRM+@9e+CRqcm~6v+aZXA~>~H7@0-YvtJ1BHFdvp)%v5}!-tBR z8PL=2@x4*0M74vu^7n2$e&$7y;5OMTNzP+{Qlva>k%qy#bh*izB<9TPM3-6^;BPdR#N`$CJSm@-86cMK~vS&7dSnxU<+5Pob^13MlmdemZnjw#0g-jy3TLs>?|J)DWfKJH5c-8C=Q_n zwDDU3*=AGrSxJ9Po$!^9>LRekLl1){N4^%&H~boY83XiN0-x?_%&| zBjs6GnCLV-DugtDHTZb=UmwFa68&F%@g*$=Exbie!eVFiSkZ*RB9sV?E_sVGK4jSF z3vRIjmAVIri$Md!TDg8L;b9uY^__Pge?65oA?P9G7`UmnWHtg0hZ;^0DGCI+w_ww%fja1`NH2}t5X z24&rQKJ_Ch2H=RqD}~%pRkI>)UDLH6L#xi~I&E+$xy+zd%&N}fG%C|q|BJ~U? z>J{FnSg02L5>nX%!gNPOdrWut1=^BV!ZHzTM`~0pOG%CJ=Gf%Tzj4^almBO6h5tTx zi4ModfjD1*i?#O&HlaDg(>@taanoPYwuxS3KKnz^%7|n-*-2^=A+7ES;b6a#xbf$g@AyWf9MYIb2eyJLuDTVgt@9v9 zZV&qj;#D)-D_8+}V)bWYB81Yys3w}wPnjsl-dw(Mb~gIcS)E3yI(9_JqhnzHgb`UZ zcF+I=hhvJ2^G$Ygp~w@s}9(gQ@xmU~mZUEBN%g zmp{^l*rFl>Z|_>hoD+kV zfq^9lA5YNOiAEwPIoC2UNTw>4JIca~;{mGTQw=k6VbOWQ%-3q^0|W53|JT%xv?)q2X57od)ES zM)51#K?gzo2P%TWas2m}zg-b^2mmH0MkD)6Eu4gjBj4iVFDG>tWq?$)M4Adu{+W-= zmNv?DTg9WReixX^dN=YQ_2Yr_$j7A#qtovEPB$d0L_#Hwxm>wgho^MaFPy|Gr+ z!f=R*&R=;&KTskii}BO_&T&|**1!(IAxp{@41btbsfige>e4NEFf3bLQ_4nisQBE8 zop8YVCrdW&C##0CtkTMKtL_^*#Z$!*+721A?`CgjB-9d}r|ajpu!`V2_xOzrs?gQpHs=Ic`NJ z=)~krojoMDTvZOsr3Enhig65vM1vxS+As#;YifY^vGjLQ=d@!ggzkVH-?F?!AC zFNmQr|9hiZi+C}h~n&tdywGp!2{nG#)dnPlPdC~HYO;rIOqI?8tET^j-daPZwStBF2~clDLj_nX}`@ zsq^^pz2IsFhH(DtfBAs?!{F5ahF_ddui_O>gOX^MYD%4RP@Ro}1zz1~wp{2}m)88x zFWvi+|5@z^yA7UvDVoOcC?bmnSn{`#xrK$UO-0#`WT06#YPhg1-~$pMl}@LFT(7^o zcia#E*NZPZ&yvGYno5owA4i>@&KL9bSYp6g3hx;f<5(3~*}?`daF#;RJ30GTVfFt3 zTKm&_q-BRcx{bi&f2n4$j>vXg`ZtHZR8 z3tm;?qkRN2=rAIyP;$*5&t{6PhJM@$7eW(@#4q9~ONZn$*RTDlPZCA(7hyXRZ?E_d zgD0~$Q@IzqlI=-Eu*w(?Dw7VL>%?3$;)9b8bpq1iyUEV98Xw_7FG`mak7|t6N%YY^ ze*DY}{>#(1FF(8dg@3|oLk|(PFj~71pd5RCP5%frGsF@kNp8f6n={(X2H`4)s19th zaC*r-X*4%gQ8dBA)p2RI4Ep)dpb&9T0^M}kV`3BK!Vs{BKW z38#f_1bP5j{lbZxsU^=TRh8wefGf2B%u_*1fd)syL^n8*ZpI^(hG7qi86-kpl^oIS zD+bv2kES;#K}_+76h8#Z{7JHgR{*l5Km{lilm&l~B^#j7XkpFuYMDK_!VWIHbOGU$ zk$)r^4D>LJnCXeU|2NNze7hYmU%`~{0y?a-I|o?Hw>su65*47fi$M#RZCzM}*ytg> z^qQ|OPB)~VKL-!fFP({Cu)NH5>e9&b1(@+?gCF~C?2x8zYhFZ3I!}+V#~KY3WAUd$ z1Hi0N9c6NO>sSpUTZv#Q; z*UC|#MTez&ls${W97@wh5tu$I(-tfQ;FPUbs)-iE+GZ-yY)&Bl{mhw(CWR^AYb;cCOjUVY!Eq$?E~3N1qdybLt@RF6L*D5{YHjob`fMV>xqg|;1t2dwcEvG+!EUe7cHICu^p`;B67esbMK2t7SP#n?mt z-5;f$`&$SC$m}ji;de0?e&$18SXgetrc+e&MaDaIRV{4pmj4=$|OC#L3Ik{TbLI=J0k&coa64dDS(@c?7TXu%gV7ga*m$tQF76Ty|opt7(79^)O>NwPz7a`c8mp+OT`$! zm(Yo31~MN*Gue>yR6H|Q_p9r*md}BbW}AAC^#ID}{5EYn(~iL-Y*DoICR{31H*l_j z<*fAT$hozQ4@tiR3&xAO9h(XpvQ&C>0@JpK)D+PS&EkNvOxoFOkE9rYZ~lDnfz|-$ zzo74>dAm>f+E-rjDZ^)}#Dwj0c27<+wb(?Au)!&4IS~G!$*G6}!QbD#Ocn4}sP+#a z(TB6PX$6*(_x-DVD@$b%Hy;XWR?=}vILjtLsmMz$kGP^qE+n2$E!R5e3hi(!{uGB4 zsWT{tzEZEeKCGqhF33wQZ9T0(J!6vk7rRrB_?!wK$<6PdkazG|55D+Yje61ErfXLTmQxkMr?J!z>GXd%tZ(plK6HZzTz=m_b#lZ|Gv7mu^OI+v+jPp5NJ>_{ZT`-nJ5!U#3i4%h<_{O z5E2SE(2}liAMA6U3!Mid2ArCxeTRUrs(G-8fyN_+E{rcO>K}?oz=dJUU`mt3m`urp zDrL(P?uA3JFpHDQtciZO?kw3MMO=a8WoxvB3-+ zAfgwE_iKG7VuzY59=6=WVplnj^b9oXA9i_Gs@(wkeWY26#X-zf?9?Md+dqhAzN@5_ z@c6Cd?!&!et?BKQ(hz8#^jc$iaVe}WD}-QJkx*uc|2u6k$5SxM+BESjpB~|YcdY=3 zh<7>+lKB9U_eng0h@W)wPfyMGQ;2b)wn|`?)c4>(L)cnCNLR!3$X&e?Mc1~0zMy&n zHABLhB1D45PNOJ{RpZtgI{(dJJKP&C?m=s45J{HXxO&+x=yq5f( zcE_fbh=CP+idF<>JD`a>fD-@*$Iddmz0ak&w7k&DbJ--BR^oqom@zSGF&4yY2X)L} zXJCU)Jkq!=oET<7b6)%6OioU?_~w{`(*n{nH8$=n$@WYxzwyaie)qj^zUgHT0?(Z$+0tSl6MpWe z&YeWfYU#D0jma8VOj043R>?j&^n2$wwB8mhLn`@1XvL+e&k;&V#9yb`nO`??>!$xX zJ~OUc@UW)VY&V!^XkDkL35P^R za`-;Ml>MFd763b}O@p1dbLtHYn~w+o^wRKu-q6AzmWp%Qd9E~!eAkx-m(ROSI~WVl z1maFVUdDxJEDuE$k}7fyYU_n@lKh(HA4P{gJk}j|dg;h*gN^sMS|A^VO}U^LyvbOx zXlH0pGh>rJHX!bitbyBmF52FkV+{La=#du-wGRPmo34`fEP;A~bH-;Tv`lt9fa)>C zsK4--qyV1|odFEs>_73!2*ENP11n<@>1iBJD3+x=ql*uQ+zajaCZ#(hdP}hN3ccbi zIXM}~p zlHNuOP;E{FUL8cz1@-dJ5RrT(rlb%89%qsXwEB2;3$D_F__`1W9`cpCn3~TL&+eUT zD0e#t9NLLk^ez_7hcW(6Y+<|}rkwiPS%9)5lo+J7h?o;~0cOyZ*hM)L7^*sRw9iwb zH90u_gh$<0h8@3=)DE)TO zPh5*)XT^uaJ55w)B$lruMTs>VioNSO9RFb;TM`>aY)+VOov6H~so+Ray`*wL_kgoh z8+e7d02J4$Rvnh$gszgE!}Jh}3QKY~3VKMPW)zg=)tYk_5sqwJ*4t^GS5;X3V7?y~ z0yhzI1-UbYVLj%u@ibX?CQxQzIr3r-JmtXz^hk5-3YYE5>;+v=t~F z_m`cXf+bFNXZKK`j{;RoBS63(hwMEct)sRMn`i_#XUAuq`ATOf zA1igwDfk%lWSg}yA9SN3r}c?EsU$sUu9dbq^?3*p<*clAQpn6&)pEiM=KawFX$Tab zx@C=gVCDoO7urAK6ELpUKCL6kl;?nKL*2=h|0r=w!)?Wl*ahfL`4~YU#=>rG$4U|( zT+xVI^vpzd>Aj_>O*(eB=#b61p1?=A11VCGNK-kmIw4e?N_*0}JCx3!PK#mbLT5C# zf?i1{qP>3F;WUf1Tz9{-$t<%vzC&f@`0+up(EVBLF@)4fTEm`toHa5qqEy*HK3FJ0 z^0|MEp$E!K%T5z|D*&079ZjulAk7v+6S&Wgi(}lpJUQ{*7M-|~)4-gnL`QO;@?CR% zCJt!sS-eF1irrA^Zm`gLb*X1$#!LEc&zjsHz#id%xFZ-NH5cGeO_`!Wc*{rTXZ4y! zHt)pEDc@XnfCVd&?DxF?Kx3ihlzC3eAs&%uV}@87WN{V1^VvOa83h(j)0}rsApg)G z>Mue++e)P!%{>jM8-?m9x-P87`Ozdu0`uhbJG_{wFPkM!hKM0V-A|4XIEg58gi+YB z=A3h0!@d3Xmb2pmqgXguv$Hbyy;K35I=i!eE-~jY8*l`F)oFQia_Z)!PoQVH-(l#?6D+gB ziv9?u*Yw@{%%gjLQ_K8UF(@Mu1oe9?@!iLZ)8c@XL#eEwXN^Tlv|Ie#IkM8576JF; z_VHKzwPRBWCIU1LCUtNe4(I<@v#zw3R5P%8(@7vu_=qi1`a`_150}!stUb25v zulDv^qRcwFX%ettm(ZAkhcWtK4=iPWG{zz^%?^c`oX}{QAwO{e5A$Gri3);m$}hzo zmlRTCH8?FXH687Fvfq1R@v3OZ$;ll&yJ5Yi6l5&`UAlB|GEkI5dxX>UPbeIAHOcrA(b5@$(4 zQFKASmgPn5aHgA{n)G)=<+p+s7Ut0kV%_W~PsXrWBX9I8zET3nsovySyYKHS^(8|0 zx0h?2WD?*IL3a3qSb~GDB-U~FK_|7@#M(PdhYU_zbZ3U4^B&e3EfPVNe`&CFKLte1 zMzmZd-G*p@>EqP3UebBj6YAed^$8cL#G*i-xmM^tqDgUU3B#noXk}Q*HeO&&5x3*C zAVxk+x&O{(>ajLgsisQ2%ct$O+{i^76q>&;*@AXJ&2tl2z8UJ3+cjTZ;4imAha~6r zbkkX2S>^jo%TduJgEZ{Mjxaqtd?|*aO)gtl6q(^7?MrX^09MfvL}dzoSP@z5!+>UEX99U+TF-nnjV|*c_CtwC%INz)U*%{X9NesrX+;xchS)?RB7{hcLIXI zFBM6g;oj5d6(SBt5hUi!;Zr)~3ra9odahRHMT+KL*wF_zE?hM@#LX>1t>8?KmxH2 zy29zcS38!zk(`|rw;5KK)-_}v0E3C=0meN^2qB^tSQL14BN$*Qt6JM#N~l|$PjmA+ zROzr;!_kx~jB4?}`X-%8;ob10ZUR5aX!or(K68@GDYSJf0yr8c)C zu1)wXOdpUD-}0L*$1Z7WfDo+ez9v>pMczSEl7gb$=J*1Ma=2fgqh#h2-mSZ^~n8Ri1Xz}%{SHk&O;5d zUidYX76J+z;;gHF_hI~%u=d|y{y^`06)3j!HIfvp)43hYdI;Y6E~#N7i`*4(GWbm}Qj*iq(CTI+ z>tUp@Y^VvxAIiA#j&C~Cg3AD@fEtPM34^-dh)S&ytW#IowA5pI9$1hjcv2Q@rM}{$eeRKy zr+>6hz2A@%Pk%W0<=|5tLI(LNHM0+r?BolB7C0T2@OxeSPgmOmCHdXUkL08`OO`GZC4Z{ri`nUVh;Bmb4WYa=2(?!Y35!3gI$%@~RmuTPaOx6{78(x=PdHikwof`qO(eTG&T^zFQ4S{@POlUq zFSuUOzdEcdscjEjfY8zic&gS+;6#oQ0|x8iOUb~;tYU_G`eL`lU$wKBlP-l%x4*re zJUf;me?lr%NHGq&74f_J5O@aVvZ;uahaI>o+^+Dh^b%&tU};jG`o~cC1&i&N+()>XUiuGA12l3df@J%hL}A zRNeGX5bN+bC2ZrpwKSTM3Ut^?oe(`}hB-3T#OngX#MWcG_BXZ6gpc-%@76iQJaVNy zr&b4;7_-R_r9KRYS$38HWllSybMzLCq&0Unc!N+x?h8r%AG zYE&7nh%V?Kckl%>Q2)rPLeo%0S89<^TIx~7el1mVR!}2#BNg^L!Lc_U zU%#itJm5Izh}ueueUsplHQInQrm|fkiCM9zr!+K;*(>nn1C^436ouEQxMi5`)rNZ>j2#S9cY` zZPuT@vSfHOv%@z+oI-5^*;tqZs--n=)p>fyPH4+=Re=HWfP(0FcCPTz8labE)Hld> z7GXE3_3Nh>fRnJg(s9$Y2oZI4gQjF2gqr-S3M5e!BpFT(9Alh+;zBTR>T5H0Ysx2q zKD|_F`TilMeoTSN#P)mS1_XcutKD4uzPg|+(#gND7B)Ya_qDZozl`E5flWovs`{W< zzdmQBzuSDia8MYo1|Be7CK^55HmN#NaE(| z+Fk7~K^Ai&z;_QH3X8w64}550doY1NJ&bJ03+GetUxgT#{v zB+cA%QVs2VHd}@ukkONLN-A{p@sl7fl1{cBaSyb)w$l^5D{&>OChw=fR*H#Kh^qQL z!6-iKcfmbJ!zP*jD^=l-{R#7{;<6QZq%GTG7)ATm7vIu>dKGzg#v(Fj35uoIjb2wP z5k(!TXaS!Ne&YAwhT>e~e8E%X{N5w2=Tzy;L?J#^WXk~SC-lb~%2b~NaxDe2t>&AB zeVvy`O+pp*wHi|p7h-hI{k0aH7%6*I9EKLF2|K>E($wsm1rZX$7aJXdCD&91cTzsz zO4$-8FNVi&>|Z^OM$rcOc72S!(n-?x+O?F^jx+seKR1mah}Pb}s(9lhX+kLfn{Z5W zzqBk5LJLHa4ngIuhzzid?>Mbqej%D?SvT1UrznJ@w4pzqz0>fCmD*pFF46NjJ4)$n zExGm5Iqf_kW8V4hs-IRw#U>Ikyb4_3J5B)!*RL3Wy?oXfMqjJ5Y zFXYhj>7gZkInCEL4W;i-zy92FPW}Z9TaAgFU`2D#OL_c*!(g$M4S&FTCvF;+w2oYI z4$5C=3uUu?9HgLHs#@8h3U!lmZk=H=aJMLU8b78nq}D)BF;3D3BrM1DIw@S&d9Y_@ zUDB&dtpml3&4_jC;4ta6!==3rzCs*HH&GLX;`;V!0NG)HO3Nup5=PQ#2_{;rQ~wc> z{jNWP3TvIDJbtxrfWZiqP;aAaEu4I=&lK3jZ=CYev$rG{5ITJ#{GqSWJLuUFX0%!k zb`VNQA%Mz}Zq9PWmusZGyBOI>B?QO-A9_A~76vqu@G#G?u%UP*h2+2!fiG9&efpmV zDTNhSw0TVQqzW43K~$%knVIs*<4L3>QXC%hmDjX=@wXM9e-g_mjJrvg48uEdhJmwA zwx(r$_y{OISQG}$^C+T}MBg6z_lw9GUqvf~5lspfTTvj2#wP8qmZ%g5e<c5y>kgvvro#534?=W+ZvOja}ss8Ei!h$=Py`UtIYem(K1YL zPGc@O1;3O7JX)Hg4N&(#pmgZ3r3Iv4TE!yrVec!Mv3>L~DxLIJe6c_`o+b0F)(7>q zO`S|17X8mn#r0=Xt0}=vaFiaG(m;MS9UQFhp+0W5p^3kXHQk7X+i3?tsB!>$-6>ix zDX)G@o4{0xOr7CT33q)tdMO{v|NdHJWB;)E{b{tXsW+qM9Op&KfG`ojnJNk3a9B(J zasp1zL}nXFNHUCo$63e&S>ab6_o9!={E!K-7kwPV*ra5q;a%vmCy`{uvq}vu9b4uO zPtp)yvt_LKN?6YdfAzy|V#sw2W<57lTE)l^N0zH#|iRFOqmR*qpzmR0v4!1v?zLIh{|2H*9CA zN?A1D3&YS=+G?bro|ZB$&gO7LHRIoE=5I^@%;*@t75!HIC>X(jY#f9 zoGB$z8J-~X)@P~OY@hpFv7#)YGNwj(-t0?7%d@H6I_Pk=QNXahF)^jJrjdCAGiBUd9Sv}Sq411KQ?U_{i! zWof~G^ZZ=sIau%}DQ$!M+y+mcebEVgUtjlVBQL9&n#-luh$5`k8YyWt^|hv^vH>?b zP3N#YBkqdo^s7NamD`E0Ia^1vfO$PkiOFXttlUS5ItaDXAZa~GNzi}EHHg2{hk?GB zS&!(j#@V_PGI{1+|EfQa~Qfj-=8&X zAm>!yRha=imq|M#PD;vms=9xbnPe-iBzyNP^L(KYIV?MXzx;)z^haz-VDCDz?{quo za~I3O>o;$XP3TlYxwSHE;viJ8Js`ivK~7tW)pcTOs7iZh@d^D0l$Rd2&fzi=^Y5RG zR&k?YCis>Z%+qfVKJ(XLHz=6^q)4)-ag>4`ij{9*3?#f*d-J4!Jc5+NBT&ka^}-2M z{G?_v;yv~klD=^e0*7SPhfYmuULqs?6FMp-1egx4^Q!Ww}_1r;(=#C^aKwhe3SZ`~xL6NU*}sY)mxxscYV+Bry{ znw3V=@3nLiY@hInV|Jo1Sh!UjQ6Pb&tibS^%)@?!8W9~K|0iZ|>d|#;gOdEa#QA#R z^L+Xr2A}w)z)TNtTE@l%74X}^Pc;IlJZ!zH)BN#`ms8g{T*Lj)e9@33*vOo`4_ncT z(_e5+-%K!&sjeuwbki}NF3N{ z+}pkYT}PI%Xp9e`M}$2|gZX;Yrl+^Zc?^fDrH`WXwrRCK6yTfTV|(V!M`#xQ5FDYipZIS2h*m| zSC`64;(-N{1mpO^ZJmB1!9hLXarOOP>!jnh2m{dPBnW_!5>oOCKcivJk+e+#j-(vT z0TYu8aOS2a0F>ME(KT2rF&}q=#)cfR-!0cR!O>T`n-^_u0Ag!D=iguR?@wKinY=Zn zg&B5UPdX2Po6&w8jKsn)$&5ji{a9(Fma2K05~-^7rf;ToR$XoL2w?t9FD}pz^rp0w z>$XEc%DIWqOo6}g$qE1Zm6t^s6yDwB)fnPHG<_0;qN$b-3ZDxF@I>u8S`l*;y8uHi z*xekspzc5l7+myrrPrB+*}*y~+PV%qp(2xO;!3Ydl8-76R2B&FgAWlRXnu!wY`7r6 zkl5O%9-2!D+>Z(!E+*?A_$JJlj6d?TD37mLI^K_z6k$kRD3BwmS4DKoocwjQJGIr8 zQ*pJoiAWKD8)(h4Vf76;*VtJ*Vf@y>+y+q|vjmA<9Qi?NT1&0zM$ z#@fcZcxaeFh~lERS}7m&>yWw_Z0HI49lf~lm`gCssuz5MXo!`DF2$rSWd^kxRS>$N zfpO4+hVbX$G6~g$1A!D})&JezjP#N|rM)=&(rI7Avm4gbLda|L6QYu4s2i155i35L zL(r_qN%1k@$nA3_vJizZ??z}u<_1kN5yyMa%J^sFuzDvMM+WTy1DF?-xag=~4N*~W z0ixNJIXhyrp|hB$fwFD6Wh&kZJj)bp5vHNYX-d`^BvxpSMhX`ZSw=d;5zdBK{0vGQ z!O>pgg@GC&>GXH{d7Y#fA^~!@8awnh`#akG7SY|JWk)WfMI5r&Qn`u^p^K!i5b303 zku?2KRCQMfMsIL)zR|(uFWE?1@hiJbm(%KFig@tZgZ!8HS6XiXx58!-R zCnv>O2BnyVNUF+WqbB}&6}Rt6Cv27bod-^2tFYILB>(odm8k)m6NX4 zs*QArEtM5mu|N4^pjFm9NMpy6RVWt3+?-@B>VoH7YcQr#nU$AfaeI9ir7cRxnMLhn zvfkn%#lA`1qd12igeRDawk|BF`;|+#V1MC3CypM92tZywB@Ye&>)5>*0O-vTLaY+y z#*dj8stOq&l4PMtA`iB%S_1pR;5$HM`X>WJbVH{B&i}D~AsHm9F_Te$5|xG)Hdd>x z<+WAO&6*>XOlhZ0YiYUZvnFq#TKC6p67)O>n{oQqEiD#7<)h@~0zrx_#khkU+84`~ zpi)eEaXG-LAM@{fx{8bpbOM5X@ur8l1)}xqp*T|7T-dnD5n`DbbY`(PYj~7y92?V| z8vU?Z46L#l!x`pe@n4@ZK@`~AHL1#Jpbya#4KV@`MU>?9f=PI>-wmz;%$J5SN1cvz z;^C$LkRba9{QIFZYKg*v9nra{&V>1J;Os@9`jpo9pn-p&%}Y0sZGZXK{$%12pFrvF z#bt)J)!3|}ed^a!3ho_JDuQD=0(*1B^|fm-*iUeP`eZkW^hkv&+?({C;)NZ>dXUU) zX=|y{5lYuF%S)&vcHphC2`8|bO!z#k^|sH6wE9AD|4f{!O)bYbrIGfbOJI^J42o}> zbA>?$`VJ@*&Y5Is7tFim8vTRthR3*IfFOb~$n@bV)!WNyFa zqhGCkt)N`T#3}H`b2KJ?!+Lwxcr*K@!A+J{imS zw1`LLgAO-LKU4y|HAj*I*jKohOTX^3WN6nb4S$w3znaz!sNEnFH~>1VhfkWyd5LGJ zd4!VYV#@-46gB(BJT6;HP9?kh{ z3z+U0RvV6OGdVM>mD&m6jX0-#{dSk*{SBz^&v z>-*tDV|%ufoqDx_f{v}u(b2eK?Ej6);QmHHi6`OEXQ**7!ZKF%Dx$2zh0?J)Jv%8) zvlLA+IVhkf)dbtgipFBM4&g~$V^!G;j&?;etB6@N;uNbPdcPTb%$7uo_}H721&?Y_ zwGBO|56_tkDr-{YI>Us%H^!!BogcB8QhKbRs z$Bz;Y2rK}O1^W64WjYALzmmvbA z;!}eyDg34TT3kpBsHb)_fcMx0z^qkG^Gf0S*k^$8Hh)_}l@py#qW?TG3HoS6YPRx3_6^gLeeufSG_twI0rLy;`( zbBLtTAuuHUImSx~KcB}!E%X?`zP*u3mzZJx&N@bz-$IL99~c~kS?fQ%{>CF;+O_$O zVmkSSR=X!hDz9*2NjMY1(g6g*XFmM+6EhP&4B{p$nyzV81kG$WMk&vQ4ZjwDMv%#n zNd7%mLsCLeYc1xC2iIxTFer&-C`MA`>(`U>{>X}D`L&$d62$G)KcJ|YFC#Q#j zEj~ZeKhn*(p5e_77 ztEVA7Z0iPBeRbWZai1aS9cJVlhV$DzfFTu%w(55gb4gMa&K!Y^q9{|sjbmI72F{iM zC{%>l3T21Zq$2Rd&2c9Qo>k%~Vdhq=wPsVZw|1ij<0*=)-``~!V~^nJ+mT*jUT{{| zuLK;j6#1*Qlar$WKR^9w@NOD{S?w+lC>E;+whu38Jr{mptn2Ta(ABK~iEL=tQjaBzxOXT&YLXeGUu$~J`4#6h^Slakg(x~QiQvCN zW^%;EDx_2ymLX2(ixhAwQ31VUd^**mwc@$^k(T%ByrzVI2-^t}E=h-oW3vqn0x`+$ zHj@npN)=9Qhgpn~*x%}5Md-Q3%BGxiVlz@psZymU_KEi964I_CFc?wvzHMdAYP}P! zfxq;Us07|F)^Bn)xxaQ;pj6uEgf#ZfIkI0bShonWIH!3=5IV=G&w6l*^Tv8L_TwH% z70XHXU3Y`1tB(^CXg9X=vwz{27_LOqM`9-6&hp$wIvbyR?#+0mbU!lQ#>WyffXV98 zj50cK~nCKl2nOI7`9SQ)8hK+qQl;O)4Y9f)3Z_E z;rfjmH@>9VQHLbCgG}efv6>DpDtm(tHci77AN-^rSaj5)r2ZWtLhD;n1c|@%ZaW=A z%O%lUue}TUO7JqH!Zr(!WI}-I+JM>*`lXGH>NNjyUn=kXf_`DEg zOxYQq#G@788<68PXaPo8DhB=`M4K-&%;A4d&Q7N%rF?4SD<0Lt9b#<&SHNU>uFtOe znm?$s69Y8on=-in;-we-ntHC5$hmAie0-XFELK@CA5No|d$km0H$@g|R-D5r`}A9b z-}3JV8YmD|%dF0I1uZspf&*FnuaeYG+8(AqldL<&<$7X69O zV4-io5?`Lhh;%KlRrC0dGMR{~nGsWD(tjR3eqQIXvDH=0!8m;^@592^>Hbbo^lsXyM#GVf+A{x1#4lekLm*%Q$`hX#S5qv^AZ0oGlvpPUVc6B?+#eZ1y+b6^*>B^Rswa73 z6Jsey6JHt6?}nm3PxnP%nY(L^+s+TT34f}ClR}m%5{CD6GlCKDxV%4X{dGom)?fR|0pdC@lOjzVK#d(lk^fy5i`7OlH zqC7_Fm+k{HA6^`(X5MXw+F3$*|M%cW&b*CIfC2|kUU}seEw)u+J!#X3-k)lR`mD&i zL`o)}HAy@QfpRt|vIffbcY|L`f|d9VqGg2FmnqR4)Z#3}@Kcd1>?W(cX5KhEz4S zvibZcE5a^%A9wGAtO{i4L zNKdnhLojPMXP-k^;7QIPR_ZZO$!DN|H=RiR;J_L@m%8I|0ShJ}u`fcGP!S%p#krWyZx6)*$tdCz0@c-cS zfBhq)11@q-w*#_c!DJ^j9{9iVVG+Be^+nPKj(SPA#@2u|vBb6uaj(excIB32q)s=l zV&%_A&=@}3e_pmugt+>AD9u)_;lCBbox<(@nHY{8xk2`)?|<>_!LR+ngbl2uye`K6 z!q?|hpS(HdyX1b7-P#dW^GW(PH*{F`e zw^zg@^{px)1+K$i|;qCTRW~zc1w&z;3x+-!5?rfA^{t@JNB{dgmge%Fw64 z+GdrizLiqbS8=oW`_@`D7e83>)ddXxm$^Tt(9l7+M%n}%eFmUR z+7GlEoTC=b%fCNwjB^rf-S7tA~?C30Ee_W3;f;Bzwz~d?|3Zx3&j3hC3UgR$WlUIQ>Q;`{N zh*FG|-#-2OFTSnkB8K;KoewZdPX5FSi(7*@)V9j_c=XI4 zxbVbfF{%e~(ovM!ImO(UNxcLI(mUq%R27I~R)!SQ3pqtd(eE85CFA%)*=?uCVN;`Y zr5a}zhYtX5=Z>%5;K{YW&^`xCR&oG!P7O1Vopz^$drD?OQYTAh!w3Q&|Ll1LDVDzK zw59fw|2aK1<`km-C9bnrElNDRGkE8VZ)p{7&Q9F)`ySu>rr&4jROz^(Te7B6noHoWhzAWDwS}R}_SW#`Iu3@D45z$aElm?%_u zmnUY%eX2Ld#wWFhtyod6^|#YCZHaS@ezbPgK?WVDMAFxZ+)pM5rp@{{5uWGO}C}bYeVe&JMe^I0M!D zNhYmV!iB#9t};Jw9VlA{^&+v%L6Tt~B}j=R3OQ1HfJ^Eu;Y$%V_-I*oUmTkxiii)U zyPufpv`=W`G^VtG{z_y*hvroiApzWO+h!pr z-hWGUCWWuPjd~f#dv=t0K{K$iROIA=0b46|P^0Dn+JGT$l0+eJIc)UfZ(ItlX&RLA zF#NElI-PVyy7R6SxPyK+7A|D#lDcxsnQF1N>SRLMoSF3PxDRB|i9{@$=yD`Jk*BiP z8S$J|$ud$zh;os7*D6M!#Qasog#$`lYjfW3U0|g8R3HD1h>Pk-*=Y!>r6wA0)k9NJ z3e2LjJ@yVfBEP`UqV5e#8|#60souM8zDvEmMOaLJnbTCW!(^kAgGN$8vRhNBNZxVp zP-JRZo{8RuN<%?96h*xaj{|;I-BCG!cQkDD|Es7qv-WqB$PujECd?Bq6021lQRm5= z?N1g3epbn21ID2Cq#a(RObzypMsfrzy)QbGhfnpMfx^83swViisOGe+oJDBlT`Us@YlTrLUzV=!SXt$3*9aamMO2RK9jSenhd zgZIDqTTSe>Hlc8TC;AFGu_wulq@OIfGC)oi9oW`VVCB%D<0`bOr=6Lus~xl6O#wth zrXO68Rg=ibL0~~OEyd6WI@*q%cOm1>=(rqs9n$x;t4Rw=+41AIX7mC>m{`t)uM^o$ zXTJvJtr@z&I& zCT-PbDdsRDuaEg-W(qr^H06f{zfCXjm1fA1%C+!R?rCvGUKEM zN)HdFC0%JpP3Fax)Lb|ci_eD%=Y6O%7FLSsML(oMYgy2&P#`AnV>x^F=yJU{KoZuqJYFb=v`=F#qi6aWZ1;-R>t5xPibl zcXt_*(;1T0q{w(2-U-$yjx+-mio9&Y;~=Ryt0$aDAPGu%-anhsfX$G+s3bKJ ztW~ep_RfAXIM0zC;Zc3HBCcI0sh54NFN&b+q)}|Q(@{1)JEKdRxMaH9>0Z;$ab;^I z=~PUiI)Vj0$CCFVIE*!1_$OeIvgIYN>Mv!aFOFBK2fyU{b-S~3OyW9aND_*g?yPRs?SP*0^3&GW{}#pe+qM zX!^rgIrR7KaMNs5Qts7o7Y-{B`3<;VD1EO;Ua2adm=b_F@Q^s1&0AwMGUZs~+O%{k zbx&MaYt$>Du!C*Lfpv<^ll$zecy3TT5$>FG+?gFY2<9?u2|)GYw1TeiW7lt}f;mY7 zVaGH{>B<(=%{q%?i3E3@9vjy(S%O2U*Cf8OG8X0ya$bJ<+A}^4PFp#duG~1BMxsy) zvk@}r`O5?)PwXYWVJXlZNtI9k@iKanTM|%M#%7pdX^0&OaN$5TdGAyh_F0X^3E;EF zg7ewJ%i@44fz^`;^DQ@*R~LO^r6sgqS@Q4UegDxktcU(1GPO-AbhrQz1^S7AASSY} z^9Eu=iTsT|ii$LKyahfzJgFZKeqI_Q|3L4&_9xf$U}_Jr^L&l)_~ZP^zQ2u=$j2=T zjIZ5%En%6~@yCWx1Gs<>(cfUoGPQIfP=M0%V;!9<3DocF2mtO^8tlZ_j6Z7d1k;YR zZl?k~v(Z}e+fYf~@FipiZ&5N|8exeo(*hP6`Zf43DATs1EisHWUmdX;!1&ChZkgk_ zjRVv2FsUr;!`U{aHqKP6E4?&UkW$3h|K7ok)ezCpj@CA0{4 zgfVb$h)8W6P)EIh*1_H&qmp#m<9>~q5$)wrWXY{0WTcIT4M8Y8X_BZ5RA!9bJU=X9 zDWULIpH?vzYtXpy&=91~IU_sZHhHyg5*Puy!V(3wmWF`XN=nF>U~W2=N&5dsBTov6Dhl*-yB( z6@+>E{mZxYCJa?7QlaW*E5p)5r4JSKSHwcAYpbn>rVKv?uN(h$5=4Z=&0(=(tsOc9 zzL(_dy>mcD$6+^Fy)ZzDHDslDm^EXzffxnEbNe7*`v#677~=!081y&P-*2RPdHo{` z(S_vcZ%U)Lav+!)A`Z?4!?>Uj`g5(+N;?go@*s!DNJihBUklRAFE?t{kwPB?!%o?N znt@f6&`F9>gRs%B)KkVEqR;0AdoLDlVp^IOt8V}}9oc!T2@JCdI0xdq2NC145obgA zHkyWmIKWUB-mu1C8N(GTm`9<#VPEl zc~i6<51=mqF;`r;zL_YF>$3r1@u`1}5)xm3_cB$|sv0oahYbF|vZ4>;Q_1Dp;4rtS zj}OC^%9Jx59>A6?{m0BZf0qHXK5Q)PHZMc2#zsI6^vyA`m|5qnD4vBx_-`{Y)|5TKo7!SN$wbbl!0xnSs z+SiaO^g4xy$CTCiOe{yEo6}%miz4|OKoK-vbGl*JnitO>JJ*TOE17IL`(d56WI24r zEiPp%uIB)G(Q<&W_^09E;Md$_`D)4WDJyGt+SJa;lV?3U3K$zhiU&-Alz71q5masU|h-f@hQRv9PNm9~3QxPiLXwneN@$ z(yNis)CKPZa0c+OWV#i`!v&!US33!g;(66yElHcQaI|{fKH|8fL@$*N%IFq*`T^uE+IdXQFb}2bx0zApEA5P0=@uL%} zN?Yr0Ryfb`q8WnN*A?k*T4_=&h}w<$uLOOq$U*Q%{~&b_wP>Hp^~{^Pl=?p~r}tH@ zkuH(XJ?Bp~otBV5gd0%Tba-(ZE<5GS?(RGYWI=IACMKgCPw=PNr)OuqKpk@~;RAG% z&Q3*pM>J=P8~88F^I;moQ)nOJ;VJ=qXR8$hfw^+E7nzR3oRX;aI6XNb6%#yjhH9fU zvtxAXmiml#?uxa|4{SBxs3Vyq8r80mmKU*)31_6(H!AAUDI1Ds`&ios^$`g-K@TMd z&k@)+D`P5IPX<#p3SGCNP{JEezjgVeH?$qh?^aI9^mcPOVUpX^w76(iBW4k)+VnLD z4lD4{B%c@C(E$ak?;i#qX$9E5DUMmQp5ZPOZr2*CA|_P8DGBK`?G*qS;Wjr2-c@+r zAJE0!t;Hfs(I(+s@yhiZnhzQeJH&Sw7BRZ5o&SFET%{p(%+k)u5X2#$ylnTu%5jgz zJI~fBC?;pAg=DS*@DcZf2OEWrdaMUju8S{up% zUi;Kn25&-$C(eQQQQUs(JYi+b@+$98T_kjD($>-l$<#65TG|O;r=$AozDp<#tQa7s zWd>>XNF5C~m8H6PO97|TS~Q-pHf;BOX5>S%6P0&uy}_Lhyn>#O*6Bf9cjY0_7rgh7 zR0^N1-E$btJgsYGG_u*m&~bvHTE?ygKSjcpwt<;B0rYtG#xwqClSaxoXj8!4e;H9R?f8N9)kFkdYL*mi#ToZS+h-!(hYsohVY4?;y>KRsOKV3{lgAD{rNs zoAqMwQDANpd({kE)>_yX=4&gW_ck3(&3~ZHX?dN)`UN3P*FoqwOSEc>wT8c@VD~=O zYtF6JgAeY-YhTxO=$q+O1mMkK?;OEY+lS25$;oo2z|$Tek{AhTo{5yrcHf7!VQG#2 zfV1v?R5PlXe?(5bYp?vdC|oK}oF$(+4wjwnuHmxSivBdPt{bv$IKaW5;7T)xt*GWz zibppdIj9NQPS06Tskt;*1C*f>XaYl1UN1q|EGRujq)`lQEX)&&!uUW}(HBHhJ2rbs z_x#3sHP)?na4}LZu}%R+e+pX2W2Y)wfWd2fd z1{Jt742v0AZKOEw%<zb;Co>`HhQ&59kk1fB9G>JOFtK**Pizo7x<`1zr0#q*_bC7AWmKaRpU2SvJa?ap(N#HnfAmSV7;&?ue#KW zD4QMFMJraJO8aq)kvI<6!?<~ca|Dj_*({JyuRDp>CqB4~O{MYTs0u{_dDz+U!IL)c zL)jsNf`|J@!cIu~kg(+Ge(Um|@^M>A0>O~FVTF)<>^YkTXOPRAYv1%y*D)rw{S3KA zoX&rwEYathQoViD??i1=Q;qHabiTvgdt|egWD;LIVmN2105T2y4q?I!Ti|z&Jfcv3 z3o7th%4}I_$01=!Hto;}pMHPv!@k9f(TJd`K;Jn=jsVfE|j)CWn5VLM- zSy6dWfo6uIdWU&7(t(U8GTSEBZ|@KxqUY`OA3lu2fM07lIn4wV7vQ`|b$B`ttTM4V zB(V3DxVuv4#ztqev$d6!S(GAcZ{Y+Xw)@B@ zFbt3SrxJMjjZX(ZLr$4&4wd%U)%^z1S$QXYdW%JYrlJ&kwQ8S?B zlnKwE6%IfvP!ih#Qy&CWd^8!F1Y1EZNw5*!qDG1nl_Vl6rsb|-$e8Y#SPSPABj4)u z0w~(R+6v){QnM9haZw3B8ocA&ejFhmI>_$$$TEYTwwJ!Dc)riztwDUDp#2(H}4 zsd)uU@twgh#7wtKvSt#@27(E4n67-J-WB*escf&oP;zp=e=Ow?WW_Fi-0;;>XII9h z^|sTk{Zlibd__qvqEDxOty@We=vgx?{F3yziCY2S1rfwn^5;Os&j;W0Pw38QJWiO& zTjS0$WF3!4EoGTmBd^5a7>O20^!p0L+}WKnKylo#AFdP5U zGwrQTX>Q?*b+?^P+AkZ*m z;PXYt`Hj!#*3_)O8o|?z>J+d1rB-~U*GuZGRZ7;G!{KYFD}_1GxlMlSZt7iXH~-z> z>i>K!R&h<&f|L&fjI_7Vjh7c*b3z{O_^lKn4^zN;?~V8Y%Fapl6Yk-RkTLgv1j_IX zbnxgo9R^SS@`W4z;ShIKI_0Tg?7z&_7ME9@j1v>%S~P3{A}-lYf##@cs%SK-xnQF6 zi*IS|Xph-T6$30di;|s&YFYN-`dnh)X7dV9LFmurKs!oX>7ef#;5X&J zpZlEJeemHDQxX(asxy!f;S7ujqDH&g z9{`f*>`4AwaTRj8LT6ap#A#>vay`)Uvyfu?;wtgc8H@ zPTy%^vDPevih$CqXr5&bNsX6ch#kvT()8+v$xh97i4&szXZ{0de2~0N)ZVf9mk%Ev z`N0=|D>I)>LfniKN*FzO!W33tg7u7P-lVpd|9}b9V3xQMOn2ABaTV=aGjDyv8!U(&%3K##_P5_%ZpNVkzU;@B5mkBiaVi2x*v2|K zXobQz~@+ z%>L`!1rTOG#0FU<8Ldm%HZ)T44ykbeRceyOgE+nX1!h-}@Pq}UwMHkmY(cyfap+Eu zPx$~a!m=o7vJZTUcc~P7)CpQp|L~#{ZE5f(^$tRT1V>VdnzC;^_ohFBpw2d`p%c(j zoczVKoI6YmyHis562mqS6)v$CH+(_Z>2$u=C-kiF4S_EF{PJ!8t;D2$=5MyfFfM6l zre;N*al<^-SxP_?o=tym+48eugj#f(4ax_cE z@7(IpHauslaqJn!Ow^*?K&o$t5__Hvaxl}Ut0*r%LzAUq17uUD&&JURP;*N9Z!TrD ztVWfPK4NUey{>8pm$L9b@~PE_aW`LSOADKp>16Qyld~H3 zzmP^(uK!R>eT0;GWBmpkUQL<@It$GQdt!ELWWWOT zND95KQ}@k(1uv@9Cgd%8PyKX12WGUlUkwJY&)Ul4gBwQ9YPS_3P)Jk1!wbCH*GfwZC z9Dd-Vv*Xv!c?sX>TYM`lSYySo@=Aiv2oF>_uM_C2q!v)JBdOG{_BP|>CK~e-HsDX4 zBtTcJ2dRIg_>r|(aE3iae=(RS=ZUjJqnC`}R|Rp0uV^wEu|Jd$>mvDGClj3JBpQHozPeyyRS5%y4}q%`#35s*)}l2$Z(C{>h`9D(z}>|hMsoGN&Su6 zezECYNmB&j*{6b#vX{ZLXaGi6|AfmAYug{g`cOV0F5M~Ry?x$x94DoxWF)4;c~bp$ zsGl1ZSce9fzrdjF+uc;C6UKt+!lYvW@4@I<7a{3v#M|CU3j?ZT++Q;}JEe_6G#xJPA8MfqHgtz!D(8LVl$Dqd;WV@| zQln(HJ17aZu*<{dqOWB2?Bu||Jxhf@0Igz5|cqB zgyYesEs1j1(&_SQJ8T8uz9t-!bb?8@@d<}m#;LXB^38S?NkQ!-OF&1G!l>22upmmU z8^|vQ3>LBuOi^>*`G5{UnUHs}eElbYUn>pg-YQt5guu9UYubN7$+V`azN`A1gw^BJ z8fK;VrLXq(tX;NK6s!gvyNAwUjlOfAPUzcOO;fV$Br{FSm*u1%`xZAoo~`87f323d zx1<7hV+Yk5AvTzuMW*%MOQhXTzXAS}%%$sjUVY4dHFsko4>8l*{ z<>1j?@WbbL*#|C6m@1ZywY!V_I=9c88uKKM~v77<;ucdMvG7^4NQV8^$B2g z{BH0=afwq&4=J%&vQq5;cY4gsMA?^WWzoN#oK6uS*1OmP8A}wk(8cwb`Pl8a6yi8O z>m{^lH7R!ZJn~K_)YHpJ5H?wcIHFVo9->NXEHEcJp-1iMN~7*F@uk1f00xS|C0;l|JQ%|`ro}B*}X(`w2`eZ5%CW{bZ~aOr!|=G zCG8cta~8oHBzFy^;)6TGIL+JV>!S&J^ zYi**PD;=iRLnCvZ^yXIKmTGiOLshL0^Kyr|^veE=sRcIw%R?S02$jbN9Y01b*FW&4iDy^qE zgY?dsv2p*2eNB?QbKa%^1E}fPXZ{1lw4Gqs309vek}52>C}lP>Lz;J!?4t8vn_?Kb zN+9y}h;9kVD`5qEhOlEFWEa&uHKH+<$f9m0fqhJm@1?d{Ks-Qf`rY^%rX!wa4H?tH zK=NHroCEjwQ~9R$CKgCn%7^2&xLb6g?&(4;Y{a^%KCh4X5cs4 z23ANEI7jx3n|IlcdZ#oxFgB&sAdWnwytrlK$fD>8d{LwB%CUU3VDrM~h*ZBiMto#* z>>wv)z4O31X{c!b(0^(6lTkM@KB)_b-SO`wp^>E|hdtF?0iCkno87!vWh|3mB zI-|&mdO&WF_AjfLYS^s+N${DOPjB1@Xr(`hCZyk{C-kAR8wEVu!wQ;s$t9vsb9Iil zFwJ7emc<$AQMqu?=>*fy;p*JBLqQ0(oWV=?=)1a zc_vO})4m6P{4Udgqry7hrNkvr^F)*a8in!^xr|3t!h~XRl#h)~L`_?@q<>xnA`n5(&BzF0;&Nt9ih#vuF$n0Z~kT_irgV{ghc59 zm`u{>e(!-FC*8Ng8^-BIfdZdjtKrS6odV=U>{13Stv!}~Vi^wgk{ZQAJ1qxKp1tO8 zI&P<(!N%zwbsrY9PSl`|>-`j*X>YPsHqE8v?Dq77F940ir?)RZy$noefkDEJiJIdR zI_u_85OKjv08p+Q+6D2^nzr`i$yP1%xM~2A)lQiUx31I}2ZOmw-2k5q6`?8D z8wV?q19KPo^DWKoI0T_NTvhhZicdwy&w8xdmhrQM0Ny^dl`uU(J%(0up_uwbhzGA9r-l>ull6Vd;Frq(INi zj7ch7smDe2c4o20tIjO7(=9!CVR^;BpS(G#N8Tl(;x_}Rj2(`_7P@2Fc{fFc zF@4RK&*AQ`DVmV8y7_3(jN<87Y#aaT3RCd%ANt3ao~5zEnp&$m5wpYop@b_NiSzu7 z;#A2aa7u=Y=0;{zI8qyN#F_BtC|1)rOFf3QLPNFUre=HjSlH9lehIs4R|H$wSdC0c zTtDwSA;Efn;D0YrMAHAJ=4c#yU*aF%0c$l48Znb-j5NDG5qCU0DF!ZI&qP z52&;V3^q0Nig}b>9#IpePZ|~+<8OZbCBJmo?k;TXA~JDzKkY34i2(r(SQY zQNtWXC+QaJ{lmZC$G!cTI1*!o?kM^Uby8<7#1GL6t7)h!@VoM0i9ixxu=0{PwQ>p? zIiaKb?FgAqjotL6hgrky$CJ!X(`2`Khi}dY-TO{Gl&$uve<+c8na~ztE|DtJwqny7 z&Qyk&H=KW0#k60VjBrCodXII&##P|GxSFM7z zl&#(cSyS2zo2J@hemTXq_Uw4j<~r?DD2g4RwBcHFJ=POHg|FFIZbom#abQ#b?eVqe z|Lwm_z!zSSq@!B3l||GzSFO1-qwsMeNKuD1&SkQ}hW*=pa71h7{OY1PrjW_~-huVE ziWR=!Nh`8vu5y1$kMD3{@{Pd$Q zzOAWR>KpRMDQJ#EWlflg%>f=G#ViQ`?u*xMyy$mzACfE_)qFAHzH|AGkBE~K#7y`K z8Ek?QQrdpoFsgnrcHAcZHh(UDnZ3?^|AaY>8&AAo==benT{tDNjuZKp@clUL9zTjU zIcQJ!lb5}@5$wZUI|l)awCWGeY>7UV`K5X7GfooVqZ$)-5fa7FT)M_G^tJ-lUoi6^ z=CuUiP4J3^V6%c2dLWYoL}_6zvD|ts^1te}*3#O%lMh?bI+Qv&Rx$kanyZX0KMmX8 z@V_1=bW(+$B6FKH|*UyxiBTugU=GQLDXns;&$6 z3up=vTq#gDXQ#u%X^tVWFM4BAgz-hEvRiZ%CUT4B5iXb>1*(v$P7w^Z*jbYk%BEJeP0h8G$U}fhtV3htuX8_MMkjvOGaNjw}&}x|Z zgYeO9`dpBP1hYBH{m15B?(SNuj#a85N2e!~9Gz5~y|xuJ*0k6Nlw82~;=2o-juSHM z3_CzmWmtR?J&>~EHJVO^{nQ{tm%Ie^ipwPmJNTbB9>1)Y7yJ|INpNd(x$bl`V#YV<#6Uq(k4PDNHV~o_R?FvWD|uA41Hx+;8+fe0Tw<=_S`zwsc5 zAD^ZHQiNEUqlRuVW5*>jN)v-+1H3?j*vKv=@3Qki0-Ci8 zCqD)6L?`PAp3WnP$s58OR{OnV7!qTMG>Bq6&a;bcXeSnNr>{-7r!~{CcBHeK>FCE{0#mCl zrAyA2ni!*OXb_L&v&YwmYUV4nB(Q7U;3Phw0u0Hi7;{B^6w?(19{a~Ri=>i_G-+6; zL5-N=Et9@yFA#tJ8VF_HZhfQ<@L56^7ucB|!|5MgZ}^+t6^<<{oHiP$K2SMsF3eTl z_bzrV_c)lI^6}sft%&@OWgQOl3W!z~xRv+?)L{Zp6cqXQHGY)dgOIKJk`JW^^rw0X zApwHDPEL66tv&{dqF7TCv%W-=vy-yo-LGPbn-jENX}so7Lg)=4tP$VCUc_C#O)7=> zs((cC5>AXFOMOsg}IjXBUZmLeQms)hWCqoIa& zV7*~Xi}4p*>Ia%oD5|>@uM|Z)bihhM2kRUIz|eQ)^{DMXF+1yw(m#QXlKwK^RxEVA zzd3Ai<&69I@r%zq<3k~)V!5#4r@H>kNU8!UkA5Y)kXkl!Md43h*S@nx#Uqga!|+m%`&JIablC2tHnm@^FFiwBmQBohm>}{G)x+}Y zd1Kg4Dt6Z68*e^xPFwk^KNQE^*sP{sA@GYzthcPMR~q^>4;^(eo=kI>>rst%()EC+ z5J(w`9n7iT;W3aAV>!Rs!nJ9+Q8;|{iqq~N2JdOEmfd0aM~$T5*5twfDDN8kAMz{z zq3<k3&Ts2zVX5{E+{E}7MO?wj%yPo=f1wrJ=VKS>YaoTn4XM6$wVtOTNXBf z8kR8{<- z;RpX|@NHkyt>mWO>so%0rb~UU1M7z7L$G*SuIAW10@Ln45^jWd{FQwMll|}}-@;i* zw8>BIwYyE7+$5J^d<^41qRYln!syo??2*};j+g^DL-b}x(WpE)*u1+|5kJXn>kpGH zRN88zb*gnQJ-7+xDFKp9beYY4>N1gm03i9}|En!d%{yp>2I_C-U=~P) zSWg^QD8to?lr$vB5os0{oRI$t&i|#Jy~jaoSjyB;XxUJ9rvCWrO!9{W#tnPYBX!@P zAHGTc>;+LDrwSu=ZZH%UCaBH~wyt4Id_{&y)nak9431N)@Ryxh^S!=reZmbNApp(9 z{FOh%G}ebn8=#r^mhDs@zd>EX;WaZgt=SL5@@eavIjXrd5Kr=07xn&g`|stC^k~9@ zPKLJ<;07XUJS*GIUoBs=QI+dbja zji!A=^Z08{dpo%n6OAScS}0At+v?+X@W%lv0oUSpqq~f!#zdIx#K4k*q0g_O`5YDlEFOE`7;iQ8b#0P3{m|PTj;^wrZwPieF zvLz=vvhL}(zxXzev*h@rE7SvLHj_rCnb`)CX5QdHq;jFHTs-|hmXZE3|W~BZ8=mcj7G>Vmf_guyW253z{`%L9AY= zdx-z)ye3EOqmp--qZ(GOQEwo}-&&3)OORT0K-0DYbIlbNkV0gH%F7-~z(Oi;30THi z^80KIcqZ0whci3xk9ouZZjbmK6>h1vD%L>^5s@2#XT$8AM!9s%Fb~S&vzqzB70b&H z{*|wM#aC$9!Xjc*6+lWBC3OKP07WbXqfi7vnK!!adnaOU%&PkjpZ~tIK&wN!3(piM z+5_C@lsSfMqRMHB=7T?z2zP{9%zYANshX#5Y-6S!Taiyqhmou< zq~myyv8F*I(4t1v3iU~&a?xXoxDd17ZwAM0A-}D(CjQ%yF9-@?&o9hsMV4U4;p(SW#VT%{15(ygcLw-8hX|TE%0a{QzoVodeg(^B9-OsWdP26>o7M!$3@pH7)zHJ z*uB9<6XVx4?s@b}H?8U1U>(aP8EVml;@M}fUXA%sTLcGJc)|ORsrDbC=!wbeA%?)j zpo1hiOIawd)br$r{V!wlHW>Dvq=r!>-e_uy!4NcTJIAJU=>~6z1;bU#o{I}9HW6RW zq3i|d%1RvRA>?kmQEtf{E$@c0;%Y20(vNl|YiC}H;ZX?{2FMxY201nhD%;T}<&HEk z3koJ1jGj7rTw~BQli9|&DNDn8Ylqg5Y$uCIO?1aqt&gJt;$evw#87vZBs}kK2NvEPDyBkowm0SB~qCTVhqo|j=IEKIRtqq~g2luT@ZGwL8o|8hL*&7^lO-%Ed$fl zxHUkm9eWHBEVw3n_Xn@(pVJ**GIX8nbf|1ukXPH2BmABI2V$-H%CcixP(n_IAVG*z z*KJHoAbjo8ro$gS1k{WlGq}my8wuc5#ixdKAqK(>=ZxX7b3G6f$h3&AJG-B}o9)r* zg-UGLE`Zu0-a}3UOcx0bTE0zC_&VF(N5pV4xYtT>M|vC! z2M+x>AE_OWT9~+<80ylUn2x#9hg=?ae{eVMZc^^r>^R4W7AM5d%HJOZ324HEsFPqZ z;5P@rN$Ibqff<jEZ1TzM2rrK0gPenURo=Z#r4WHQt{bAkpJXZ%BJ`*#1 z#fufM%+5A)Yc^_jMm2vB)?Sgt!~Q zdK!sbz2e{ku+fK2%GV?;BDt|SJz$m23B%4Al*Bf8^q~-SsbKb*mH`Bg>qiy3I*94& zsoTb!l@t~ac1q5sTknq^J$m|@hKDlqI}aqS4uGDKj6gE~hv)BzIk)WOL;=x2mKGX~ z5Ja4tq?q`#Y#S5q4%8yY6)*!g#H#g$RZDfy!}^b}CXh=Bxma01eN8pg6v;%VF%?0R z=szC6F`78TsvTl4#c%M}S+0qbV5fyi7K^=VjV9pH@(Pi~aDu6Lt?TMQ=Cp8buw3Q_hoPwJ^U3@Q_RbN@}*AVo* z&k1Gff^NndcdQHtN29vv(8)oagLvg!FUi|gdiiM7SecgSA~KrFXN9Fx>>o{<$XFU> z1DsG0hX7C!Y zuysRjsDHqD@(<%K^jj^;-26k0&-q$5{G$J<$OV}x5#!a{3~7~49rGx_ zrcqMI$eGK;p?n>6IYVM$>OEJt7FKU-WLK^|rAwat$Md(mS0b9HUw*QmM$B20H!cP* znE`s*tmD55{}`tJ@KEkkp}4CnsDzZdEsqo z34yv32((5%fmBp0C{$Zo)37YV>c|Oeb?=%uD?OBtYj8F$t#G8o3~2>ix*$1cKQ5h% zd`fr?VoG4uQqyJACConaI4(Y1Ll^vx;MzNS-wOR7!kuFwwta`Yp$QV@Gw*w>=h0?6 zMDaWbO$xz<1Vr{=tBZd3n#r0u7krceZWZ>?7d5nE%`>!LOr5CHsLPX z?%qZSH(Ekkegb3++Lc@qD@da!BM*iPN3YR}o%o!{0GAAf#MsflOM8iQ(lT=~AWBkj z^3Cx{Ez0Cns#!1-t%li{uJh4o*w1p@FG(LY*xyiux*kS1l|MDsCgLfd`1?JtuZYtC zy**1aD|dLxl8O+-#_(Dug}{zLE1De_EE9W7N-N*tIe@;JJieyUoBA-lzOXnWxqpYL zwHBFz7Q3V_{>AyvHFRV#bzro1eESZ8fsgt5tzJ(g%Jy08QM?^^NOEJqBdQ@l0MUH@ z;9yP1%s&d})3Nf;|9{O5wSi(TC?~;XC}m(?hMYZMf^pQv23S}b;4L^|M1$Gl2TCj_Bzq=Uq?aof=7v;a1lg z?%lCqeM+jYSO%}Jeu8637`%4(>T4pnp_R;s;e^9wf`dwz;vD}_Y(|`b6C^y8TW*RP zkW`GazE+RPg3q<%GQ|rm)nFj2y2;VHZ6#k)!d_=1>9_dGEtnC**s+YAL}XT=h@yBe zt)#J}BXZRsu{(JOPTTi_ziQ-m-_k;0}SfN|k*F=mVS5Ic!-vc$vokyuI6 zUgeJlxX+(>GLZPNOehPLaYjoWG<+P{x_aOWN)eE)()B>xh4+o9_`o8NS$H|6ejHh6 z4Lys)yakk7bHd2{xc{q|6KW2Q3UnzkKgw1$oxYnu9T9qZ+BP;!Qb3-(pJO;zpL<5H zK_3`up~sGdP?z~qz8a03ZNa^ z0{teEUCo6HZ|PdnK&CwYg)V7!r^ZBa)0p`-n_w5^N?+c*nOtKAuH11ZDoHSLl#EZ@ z(2T1fA>)w*k6L72CO!vJhuegX1#yu%bGXJewA%9RfRUBvd{!UU@)d(LhgjcHCxgCM zVEf4(7K?OCHesxBn~Ijc6Bif(E;5Z6q-1i4<9T%l61ooQ-yX&SysqclX70Kr8;&wW;Kj=CXZ)Pqi zz5$vh9r=JqiB~XPO|fClcvfA+C{YyOST%E?by|A zHZXE;%i&Y27Ps~NK+wy}|IAhv8wsaiyR@h|8eVgoY8v71x6H^6)~*3)v>=t-2AM>4 zkrOP}Ga?CQ6^&B@vOU9@VxH>Y?XnPuP5rl4Xj9HQ$ zrEoP1!dp>o5lCI~ugxdF^_<;S8i;8SnA#l1`ox%-w}!OU0b3rAMGyTbBnfN9CQ6D$ z&2r`SAWA3v&H4Kx6v~~S6^E^R@eocu49v8un(CG1&FtZf%z*h&swP2j9mfVs&vV*XC+zyWI(94)3l1rUeO$lY!^?@@_%fCK8 z;%zmMNW0O5wMmwch=Ygy%k#gBl}1nC{13ayU~fk*Wva(#VT^!5&!ts>Yj0>msA?1* zXnT0BNJK+L@xMi#MQU(mHo3XTfesLh@zUr4=d;8izq;a!Hbd zW^T#0wAhH9V25IB-y6XpPUgktC%Yz(OJ8g;CvHrOCuI_B4R}EO3cHJDgMd>4Y!cJ162jwSCHHbk_j6{RmXnAqh)%lmhVNit+ zpsz)}bRY4T>~;mf0(h|z9u4KbRw58w=hLdqkpVPHi)0Kwy?Hq5=J~S@6J}R1W z#8?+p4SQkBS@OycUej7wWRuhAjFr`WRV$d3{oNc_gkD_#u+b6f5%(Q9=8`^6K69d3 z*GDwqTUJXq21#4rq&SR5JrN`vJgEz<>`@%=e0P+>F(id#)cpbo0ykTLV!g@F+cC%9 zVb97b=Fg)_qpe?4iCAFQ=y$Ia@_v76UJuRu40h6AbOtGc>Y7r|}QjKp}X8l+DR z)as5~9*ebEKmD5za>1nCRy&=!4Z>I=IpRTHqJaI0OZ{i=8(O12tE`|08+*p5gxT(_3h2jeUQv1uZr*v*-CuA z%~?WTn4U8+=DI68rkn#YS(b;|D2MLsC=h0)%E&<7{DUVhpt_cjHNFYYow0NYk^DCHw}NG>zp`fCJ=+?9Kiv5;9h2$%znYo z@5LJx)TdN@3=;^DgI>b1y|SzigdvX-V%IX$B4rrO_1Md$bPp4(fTWia(+S=+$AG2# z7PLpK3RRYABEzBK+xWuL@p@S|V3+-DA{OjL@e-nF~en-wox)()> z(nz`1Fy?|GYKsRY22a-rj%OE+6)cFJn`fQkRAHxR<+li|Sh`GWD+q7EX?tkd4!V&2 zM3z@JJh4P(QsY?^F$k2s%eBhVKNPDw^fM5B}g1c;ZPVjP=}q4%-na#0-w z^adV3=Nj}1c4?X=D|7;iPaI}dl=LXjZDAdw%y~(B&)L$>x6IUyjbu%94PfJExpfu_ zE~TKW`Lm*cIvc{emgu#7YN8hkNtrB;wRrT{ zsGA_X2eUjbT{9CVc|s7sm|xMIZ@-hYmWCmBVh@9PPjRue@Vo;&^PRlDxYmf*l`7CF zBf5QP=Sa-ej%8HrSZPXx6rh`yifD(Qh27mWkqTv$PTcE9aKNNbyx6je!=ExXF{uY) z@;Q>Jz1O)PWajKcqqYW)5r(GQ*)Pw%2YlyvhcGr}P-2~dy;?7xB*}&x`LKu`?@18i zTs?|m(qjYPZj52c>K8yFJ1v7KG$t_bhx_av)HIv=%8FwHOg{_D2`9gwafDLZ8C(fQ zBy{@%I@?cdO|3y{tm_T62W>x3MkdB%GXVUGuyC!O+CcYa>bDi_81 zL9DzP;%C+z$q^Ev)k92`Dyl3oL)QA3zjYsNV9%@4&`R%%^Uvd!QlM&wI7Qq-ed^N8 zPUInTYrn0vpzc@Pj2!?l3F#OUdBJ1LR*PgP%(Mfpp?wsl0csLAm!%O|P*jLA#ZV9_ zB@sy9*Ou27DvPlopbP#qG9-`BYkh5~4#D&;6cqV`7=N+c=w!-RLLT zgTq1hV8^lz2_&2UO;c)EBj3#-uO#$MFuS#CT~qk>pZt+lw?p^4BRvCKbRTp{OrnAz z6zPHncI*EtmIniI#|{!bs|uvS&G*tTJ40bjNHdY%b7jQLt;RXv3GH5cEg%u#E0@F}DiEs6sXmYy}ff&@ou4#yBO?o!)6yK8A=MWO9MF4Gf$#{4a%>#;JVSX7foR=L(bSkim!d_{SDO=~AiqNLQB22L zHU{b&DB@nM391~zC>vbshC7P&ZBh$1E-zgI%9hqZU&N6W-}^+}g#=KGgC+~?z!1V z^ldXaMm8{!DIjG@ni>yB5fCl$1K$(!Op~IF#5?+u#XH>SJkYePz`uLzEh=RSMT&!f zKjG}vuEk*1!3Obz0!F4M&aKa*ozr_*+7~TQ`_$$$t@LE#zN$NpC z&U@~J_L^+Mm>wUj?XrWI*iNT(y${Q9ax&F;D6!7Xz^+Y=#p+ET|F`u!@_XEL^%z@vI@>Lcji|_@P!7+A0kHoF**m%oOleS+qTVX z99NjDFFN}e-qNKU+2y*fbJznn9ntp!IK~=Ea)F*Ss3b6}C&dvJ@NC=QIB!aoJu*v8 zD$XR8;SQd5IN03nAs@-vGi}nRN@&hw3G)lLW0r=Zu;9BmXKdnzCW5`*p_QwP45Aob zcfbGmd+u$h6KN8+ojP&>V<*94XifG&8;;P{e>wk1Z`rcK161(UXFPq)7XG-n8pG7N zW?}1r*tP%oyWfqAsPcIlgex+46R2^>B(f9Wq4XL89{4Qd2^<9nLBOJ1hd1WgyU%(z zpM@3Iw>UBNfYv59lhexbVuQU2*`pyfDFUGvG^9ajg$#o+s%KU9Pn&11>SPSj)RZHo zaN>H=Y>T$WzBW4pG<+;Fc~K?BOQgH>>MNJx#UMEKVwRi(E_NNU0`s3o*q=V6A4q%WBKgAW*a#%B=GZ#kni8*e^jp35 z;(D#&K1&R)UP_2svU=_NsR{z1RdVuTE{mOH(*sK1LB$M~X^^6T0vIDK^;}$%udXaC zuj%UJL2K9jGqKdSUe)ry`pheu@TFI;d^=d-!pFy{o&07ms~<=3y+tpJ1rc;7fv&U+ z)UMtD9532vyv^qRio3ew%lhPJ{g1=A zIBvD+A%vsVq6C`s@ku&}U!a^V2ZEkf2X##lSfs0zP#Y$e?#0h0f`Q6#01ht4ch0}m zH|wb!3^M#j{dZ$o=W-D~wia48-~-)>OurM?PKVit8p{hr9W*w8Qx?sR-&od$c_8Ca zfAo|#A66|9NPRNTb5^b`xyqEL$dtb4M;3+ipB8N_h)GgkEIuv+)*j00$t29Sw_VK| zA9IU|PEY{?*tfQWG0VlaABwjY;@9(y)llo)BZO_KrFiuGWzC*EHFELIfYeywN{c1v z+UK9TB-S(MB$a7gA!lFW!qN;(uIGB}gY40?^XGf0av0ry(2iw9ATbZfsJDLOmCO3) zOE0~oSFEs?Gs>afI@PFgahM^jX(0=lgSEws!CN)Kedm|<;XRkIrQo=8C-lz86HcU~ z>`p1b>Nfm>z(A564o#@9&2@Rd;ov(^^efT-`4&tDHzsZSr?GMwn_eiZ*T+Q;NRJI zZMd;E6Fa2qW$De|a#PDd!Z~F3Jzot~*8Id%@p3X)2o<{2MY#wZ7Z0)CFS|)|C49+T ztRZtqC{A3C5tDSuYl#OFe2uX}Y&QG`NLEB^yuMUp%4tut(~Vfi4XbILR9nN-cuOtT ze>$YSOY!g{Ac+XLX`!3lJp`3FqPm=FyIR))aKX9~777kDr!1n;xOSFD#rvL zLIl;eh`GG*+`;cy~VbU2=6QwoTsz*AD^*m3|KGt=5sML9z~PC0!0w9Nh|q0xU0F3 zpxSlCYhih&=JAq;X4?mE+6fA~MoE!r^DAjj;^(BCWNz#SSx^#Eehcsom?(K9YUBWD ziUuH}o}NBJGk)P-EIu|GwR$4yt)Zk9LWhW3-MrkQjb*m~wrO}y{6?z-V!D$=72r`T z%F^mSk+rIAG2B8g#fkeRXY=3Bx&6yG-w!SiVzn?#;) zf9C70HEu2fwQ89>UGt5{h)vG*npSp3W5BU?TQ42tH-*@9+%);#&!{s;X0 zG}h>~SH3G^uQy(nNPP0%*MENgj-F33ZTGP(EmT9tDb`m>4G$&G)-k4!H$CIN_*gnT zvxmL(*>|3e$(iio2Ai>7PUXt=An^+{q64VYK1vc}i)=sc3kp#32brD_Xb9**JM6f| zE`drK>620f`2AMRGx*y&zI@}KxeP{5jup;U?j76iq_ z4^oq@MQDmhQt}FJo2$ZD6NvA0j=dtMs4B#>IWf|$BBYK_w#CFhhHgTA}W ziV?;B_=V6of~8B2u7?J(H&2;|#IX4M5t;Nrc)t-c~o0k?psKG78~yH(J0=0);Pj0`OWPEiZ21o42Px!FgNp~L8zRR@`1LSUm* zQgO4P|M;oz#!3RoCQ1=~bx1XFKg5gXCJCm5jj>qc)`Q+L|I1vty4&)@_F|wS-t~!* zk16v0@2zyB){yAqx?j;)UPB<(ofp`cdJo`u}=s{hy#LVQ4JWMe#YQ1X6TSGB{wGu2RJ{05!P6SWD=D~<4 zWj#0L@xpP6>&qG#X278OSh{#0Cij`(zAgX_3qXml;FR0AOO0#wh!S~>v>wCh9paY^ z%C)R=Euh^T+hA|7?C73aX>6L*%E)Rtxl8QAHQ{pJY6I-XZf#)nquK>QFj3hmL)w9r zNw@xObjuc9=Z9Q7x3EC;%?u(x7_#6u(`Pykq@rYSN$V)a0&F5@w15Qoy#LF1*5ZP- z;0&JFJxE+zJIu16*04}bPzR0vLk+Oq>OBz0u0{s}!r)C;A9fBRzKj&Npqv5QusXGp z9XwA6RZ6F7B>^y-?k-smstMO9F5T>Md*zK(=nyFGz}(enI>$*Fy?o`>%O0@mnVU5T zb$~sCJ4EKp-S28{n1=KE_gX3@LPlLT^))d^v!6c`Q#V3T4*Ul6Qy82o!4_y>)~B>{ zte4(#zcsq%npY*ZmMPXID2tcz8{QCzlw48J)FIBYp+rr*&1NLWB`8_w5L?M2y2UXR zJVC&T(@#rqitjH8a7(-tHPHC(i**bG@0K;K35WJR{$x$@o6qaP7r7UD08)4?fnE*c zj+BT!i+0X&VRcnwCf(j4uBaTUUs~~G41{4)am?#Ntsk)ky1!;Y#c@AXI-A6Aa{#sn z1e9Qd&+N*Tr?2T5lQ+HeO}#b~^JsK7lDgq}COK24W|fsi2(tvbCoZ#1NlxFbn>nV3 z7yq}mAWFh7`|oQswUt@T=m)RAaaW9u(KqO06hp#C$q{2aFF+G)=FiUG^e2CC{?XSz z3`T{uX*ihZWTk8{FEuQ{cOmSz0nT?bcz~MPse~T01KQURb6#4+U^Nyu0*)v-C zyN@1ewawYaTIJGCP|2kn8YwC(iz7NrnSCiT?X{r_x zu+kXJheGz?sSCrc-jTMkS#Rg>78~yO;#Lf4M*!dBOiYef?V!Od4m~}}unI&s1NmpT1ZUkrPhO38 zl_CF>om7JYtvp3#(2Y5?@ui3@ zO(#w68qPAhdu5Wbe8wDMg|nc8D6BD2tO*v5;~LtydE*6R3YvRq zVdqfT;*o2xMm%Qp#zYVW^AV z|Hdn?IvNmzn-@BYdu(z%Zo~^{gotr|zn%c$%b08NCF)7wx!5WA4r9cZo{onaejwpu z7GA8!z}8mkDQ-;Z*3inpA$`uzub=$n{LAxC{Lx?aKaShYKdI%I8owDN#g{`qU~ty` zHkhpBn}&0u*0X^V*%z9110Hsd0zxldhrOE4`_C}8Aa#(KxyTO_*eX?ANIAJo8w14W9-u~bJN?bL-IrBq|I z7|i?lheX*&PhYvFQJ}Tr=k_oN=}AFafUq?1(3J>)9?~KBajgi=tP-!TENWoY*}0&m zwG4{KSk0WI?Xx07*fYY7MVRSaeA+cPORhQ0-sa6S+7UyOd{9?c+@o*Bi&CHUy17B% zc@6Pa%d-b)1!$nG452h@lTf`bM(@>1VZm1GG7WJ34VqPPv)g56)cT!Ek+*UZ`RDU( z$7dmC+pa#$)oZm|S}0n5JFoU1U%K>dEh*R~>^j~UXZ)ZY2Hd8bTWnPQyZ`uGm*d~9 zJw0dJ4i=7W*E2bX61is)14%^Nap6ZQ`V`Ifu)jIt^f->-I>5T+Iq?uc?H^jz+8Cx~ z(9xxI%eURg%@M8i!g^fSMu9PT>@?-L?(M~XD+h`>_?U69)(lM|y${hV&&1=fm<+xc z6QzB3I@(O~1iNFJ{w~zJw`PmWV;o1u_Y9(jG!C`X~7pO zDFXE5$Nf)(&R8TseA8Y-^LZ(=n=|#iy>}QO$km z_-)cS3Z~M1#Wzd#j8?h-_yLWkiH~o)z`k_pl0?dUGj*@W(GML%s#qOiD)g9=6~smH zL+On7afDba8He_e)PqumXwWNYf*K1!C3v=g)2n@K>Y${;!tGjJ)3DcIe0qJR((OA~ zi#4J)p{;2gSS$0(!Rbffk9>&$5Uj@xko;gnvvV4yjPQ|(+IT|+yNz?HWLd;x$x3Uc zP`MRnQl&AI!XylLz~m53HmU{Q313bJ=vB@vfBEnZoM)ayR(f$z{2kA z&gf8P)^pz#Cfcd|Bq(iRbs}wku+ZBuH?mlP#L0BAKEl2Us}~VO&CEa_nzR9S3BYM* zvE_amh%V%U-atHtktceP5rzm#~2;YaOEO~IU2ZZ$DOSc_EXIEnY4;vRHmk4>M_#YRURLQG8sA9Js3)tu9usu)icaMH*c>q(njEtx?Mr+e}r za-TqoLBP{s@p7@J4Xb)*{)||wdP#lt0O++>UK8uWeJ~L(chwlAXfw439c*z}yZiZ9q~uaqEQ z4vxs}gK?>E7~>hZ`E&aK-bQpSZ3ioznskzB1Z>c&KsfU+z(Sa))2f%iYp5;#$9G@3 zs?{Ts5!^^kEZt)jgeUTW_ksCzR*&X4qKN^?u%1~hS#^jB7LACB3x|W)* zzEN|h>4P=3ZOGjT2=6UGcwb3U5R3sztJzb&^Q_=@q$u8fTB3=HC|~L_%QGd+nE@97 zm`mz)5Qc6ju{WW`k!v=%G)PX5dCWy05&()EMjFbxBOwej_kJD&pYWLy4Wj%Nw?#xwDz_l%j{2&1G` z%n3V?2WZ9xP~*9Vkb^Wn(cf}WQ4Zt5=E0_Id$0ybBesOr!*&!6CF9}6<@jm5if0F@ z-#-7c|EYL@y&Pu_{NGHX>ps1R;uUyPaU=Q80uA2u%KS4aG_E@=g{CC*i^Gu6$&-oq z+Rj||V^FAuI=YdP;s>n-9kOZ`t2ul_a^UJiGl;{TEKDmMBhHwZ7}Y!jIWlx73aMp#l+6wCqso4)UT{lG9?MapLI><{M0%MOl$zF2{o`v4udy06!29 zS{0O4bXeUbMGv~fhFYacQ)7T)4HW|3w0(fy6YD%j0sUYi6pRC3PM(;M4%TEkJMxPW zlg=rcBU9f3o^o&zCi)wq>*ZHo)%c4+Z0wnx7*>ZB7*k9D{fu}@@g{4qsEt2A|A{~G zn^(jpxDJy=WU#8^tL#cf9?fuQ|HPML);BnevM06lbO<<7GHO}%KBxA=Ub zORgotve>EeGnqGS!(+KxgI1r8DJ&B9v={f|Fx2eGRrqoLbG?yIe;o_Qf?M59o+?JR zT=?@(`TL#V7PLY3X2Eub8Egk&)>0e9>D#`uIsDO*#YQX&rghj#lamu#G=pnkq+4hv zDA+qnGo$3naRTnpRl=RU#up*DQE)hx;wfX-CnNw!t)S%RP?|%9fXWHbUZ+92t3n>L=DI<09yi^)7x#jhxAR(}aligF6BR1Sxk6jZiV2-P}x!!a01cz1IH`WG% zT=$@E0e#cpEcMr%8v1tNh}5SiKRf?P|Epjt(*Fu{xRcz&gP7LfDPi$YqaVcCpgg(m zhxy(*|5^FllXDL@H`4gWrbc3A(IVM6egxl~x~XBo>r|78E0<}pjAkB0nzO_Xk%c<` zS%S_roz41>SC$uFj!Gs2^_zz6=6l6UfwG>A*3roc@kI@#UvI-9Z)a948UaZgPn;Ny z_0?5E1aSqYyjAqeQgxmNTS=B7$U2yt)HTs-0Jqr26bZ=R`|$ijO%XXArS5J{B_O?p zFD<)T>y3`WrJ={+8(CXuXkKO-=}OyLz;Tm;%)Q{2|G)=^m4@7PP<&)U)_T%ucaqXk z#V4N9B*@Adz^b)D7Zz*!=h)P!X1g?0Io{4sXN$4Pq_>%hf@>gg_Ns_aIaCG1 zD<5OamfIW*HQPS!qHV`EC#%Nl)Vi@g-+J&T{~TZGvsx*%;k zJ5-in02;m}-rR_b0vc|EgiZX-G3R1;Z#9%OdQ|NQ`PU3h26PZPr zBKnMUr7Ch5zQPq5?gAG8uU>b0cuWqBzC%!_>CP~q@Ols{pcil zz(Eg_6^Xx*?hwJWCERu5C2&_?j;}C7hH-eL`S{`RG~4g+CmyrhI%WL9R!gm@ zLEn7XarL6L&t-aBaVOnRZa@M684<}oJa(mOYHC#508{-#etyS~=gr}5gF`eR+NiYP z2Zy!YS&sc+XDf(14?+=GBn*WkU8E#3p%EIFByIV2sf8uRdUW}XH#|9dU@d@#^H>&V zYoaqn`!HC8K&T&1N_^DbuC7f{r$|>>S#D?>nAj|~cBC>;%D}y^AmQC4+sd)r$(!k< z*=`>n5DiV{uepIt$4X}-Z$hO4Gb1WkhCluj@gGcGs$&+BD55R49J8}%N$NrG0f;a3 zPmCWe6GvHbz3WB9fQHLeE_SG|9b06MR37+83$U=fy1u3X)M~47;Xp}n$3?9)#)H>L3g76BDUFeDAg{aKsVn5hEGv!}Hcw{q+Wp7=+m7(Sf&Uw= zC+UJMOD{kP4z5_6uf41ZO^r|Ka9&Ni<)V3jHJFa!^E=LFtwWqEG%VC3#>ZV7q9%?r z_)IN930CNIikJ@C0TM9dTF;E8+st%`M3L@X^7uS>Ac$|I2QOzME#~a)0&oO8q|aktceT!Z+P31t5=_W_UhH3 z+6rr5zq8P&`TO5H|MmG7l6n2dHG*#)3RP!}&cMx~JM2w+&J311i-w)Ji(}K#I1e9s zA90jc4^Hx$Lk9@qjV=<$1q(WKl$CThM)2F=zLzAy8;Vp7DX?USEpR+RnbJr!*WpG* zH&ZoAmeoLN?xX|POl1J<$x*aH1T1sQ|nk!M z)ikPqY*`Ppr0iI_jbR95N~H<|{uwb6Y@Qv9AQwAif<>ka=~IqoA+^ZPXbl#XaZpz{ zeo@jKa_v$psuf?O!%!L0?bjadzjLzwEVRb$s>8ZXWKwgUyg3#lUTsu#&GJ?)mC2bQ zCsBNbn_xXx-B^gi14JuF6Rc~A&M!zBJ-;;b(er^cId2x;;SAP6z)GPV@AwAYB8hq9 zf4WfKi!u6@{oVhq{r&ZyYhXCmP+wyY!5~OOOetMprbR9bgGbL&!-I^gogdn9-p}on4Eh4oql09&H?!iKpRqL<4qxcom;e=wUX;*v;fX;Gtwm{Ujk; z{~#W<3w2;#y@^6KlswZgXPU{lpx!=V2aE=PFWJGwJ4AO9D%>3fq9tiXl^1o*!dk9? z$rcM`!p^A&epnJ=jdE=oPPggh7I2hUkzPF)3}(tdy(lS>Eaa8x7i!ft(YW|rjj;{2 zCLY2;l|ewJoH&1>n@^0z-N-B3qnz_Dv0#8+I!zj@Gh#Vwo@csNkeo*wL+Qmld^Gih z;ZzI2_T8)YpB9JQO}2Tk;Sr&_&k4_Yn3{r#SatE>u^Uq{jP24*Cq8p2YG;TU*rs&U z)Z3^b`ri4^MPY=F$NG8g?+o)B=Pq)>sM#4Nnc?Jxw{Qp1MT9n%#~KIl#vqJ2 z{!oB?^Hz48X|M(sDFwOw5$BMtCe^Ptc?0IPI&>pjan- zuZNK%@n-+`F%>ILY*60-|27ChlpUZ2 zoO9|hOGTp67N34$H7>@@o?{`4Es#eZFQqk*#!E!nrS>kR1!EJ`OcbYrz%WA3Id%Zp zH|Z3F+OndE6kLlzOtaGY0xVhFW$7%wg?J8@q7BR-A6<&-l2ifUue{ zy4HrOl)KV63gSH{q(PZTBf8&qMFs~In!Y<0y_yI zs}VKuteF%Otq77V>nY}BbQdOPHkwZ)eBhY*n4gdM`AVapM(SoW%0A%iio{X_s6n@3 z9|RgPkJRm~8a)R5Tqj%Q!)G*3Lq)MluH_VeMUn_NZ@6C*YnO~%RIygmqcC$Qan3|! zivBh#e48ef#P!7ZsOAsuAy<{!!16lo9eIx6qetHyO3xgieUe>>&$qXg+e+1NGlSQU z`C0`CpD#IwOS5{nIbvPtY&N&p6;X#|73XQNP;u`RtZxu&W`SzsNgCg`Ov%uhOMQ+c z=uv79)3EBhR4&mb+qma!_@4n;av=4Y1<0Mtb2z?cyTmqvH^3owI%E9;3VSsL$`@iPQ`Deb;KD#1VPfL zN44X`1SnW#q-l%Xh;CvWON`jfV@rOs!$V^N1sPRy0C9aRLQdQq)ua(On+2m0%R)*Y zCsN_)m@OT(n7tk_*WL&=^Lp-D^s7(B{w7O2yhe{6J$m{X&3nVTfw+c26!F)3WD^zt zJ$^pUk^v`=MuWySF`YkD8diU*06JBniLkqTf_tHY{8I|G-00uj5F&y zb}8lD4!2CiyQc@kfyy_LcQ!|>$>imDA=REByhq-8Eez7m zwUl$1ugNQ79dxy3-4h&S$x`0LSL$(mNM^Y|q9%v0so-wViEEHu{%*8g@^uje`m(hZ zJ&6&Ql@~QAPVf1ugbaeII8|MKOa||)%)G3Jbsn6#FOQbeo?>YcGTy04L8Nw0L%nG6 zoIGR}!$U&cPyT*tkDHFI;jPlBEooiLxbj^uLs8gml_0y*5f!Z$WsxXS5yFK=m~aq} z4gpH%=F)NqCW$edD@>Ub7Sh8|dB13o15~b~r?3eM2z+r88xO{` zF1=P1GUs8ewRYKL!L9gm+w3dM@K~PWQx6w&sT3IB;*uam(cP8{OO?5r)@*gL^0MTz zWy-M54tH9tEYAd?;A}D*Q6)qZO3Wc_kir8qN5j$H(k-fo7O@H1sf?Ddd({VU<&khOytFAO;5lw z3?^*d2YAgg!w<0r8#pITA9A_8>Y()|AoXNh5b5+V6G*J)ZmbJQ;MvUF?Iqz;&26vW zIsaY%O`Q?2W{_(G7=G|c_cTg-!kO0DJ;W`g_e$B_U|?JP9Sn;A;-ObOdRi}9s?_z* zYCWZtA5nqo7?7wJ4Sr!d$T!c^*yJ(E&C1ehjMtW^l^p660L?kac4lnuY4sbln2*t# zOTp~r=DyaO!vVZ0x&TJ$X7a?*Musd-`!9w8q9rY#^;RvgQi(Atm%s5w&{WbIK zwQJAuKb@bPgCv(G*sgh14Er8zW0iTy3(2!1Y|{$}#zL(gCx*rSI>e9$&}M});FmUe zP%TjO;B}2pHQt{2o<&wXkG7*+)B2OAW1OXelL$I8GM<3tY!>XExO5RsF=6Rx#Y+%4 zSiWjqSK45z;#ZB<27(jH>IMDJ&i~H;{rIItUAj6*zBM_e_Z1-cX*yUqIZzi|$D**X zHq9-y8yoY~whjWlSOnrMtf)2-cLcnUI;ReaJ3 zq@Mx&EgK?+&bSExSe{)zA`M!kWgz(M-U`edJXGQ#*rw&0{P?)X81QOYvz3p6z~n)K z=|A;l#t8JtaPu@72u8+2HL)gs3p?=B3Atfq_YWP2G~0AU{WOMQ)Y`EiNb{i>$n?O1 zFN>ZqFoN!cC1Y;&B9ZpvB-iye7J4W$9!2v zLRiQ_P5jEWSG39Cb!!B#)i7iQ;XQX{l^!_pF2iz(ZSL|Xh(EyDfrQe7F%M0Zj(e$P z?isn(?f?&wD-2^3qoND}KRcbX0|yrACNYl+f@W;Cgzh0Rd{6>STVvTyUMnw(;t>M^ z1DjnCsJa-&zM{aku+w(4ZBw=6m&ga2UY(7HStAh_Rp731{_Om37Xjw}aikv98K?i0Iqtp35vdnle_CzLG;}7v;XRUuuAsD4eQ?v!e&5YUem)ErmX< zAB-u;8f$N}%~U_ij2|u0EpN?m|5|1^fm{P%dbm_$r!;D{pd4w9^nMDz9Pp}b-EIb^ z6^`T3ajzIK>A?vb&l?|dVHh;WT}1Fx%QpmxC!dPM>*%}UPQrjT$DCqIe@vT+t^f}K znkAk5B>IX^Fm5>$HDtDeDbvOB&=o{;QF&s#%X7JMDS>-0{BBA~cJ$O%8!-e-Z>yc5 zr_x7>o?byn?EC{<21#H6HK35@v8&&l9i45hH?I9sp@PtxMuxMoo0<|4(ZlzZjs}FJ zHEzh(=N_kpZpZC-GoeRURu`6s>L}80)x8cl>>cr3n%?|Y3j?hJF50AYV)h|jzTy>0 zEaipt?oBtLI+g;K=ek^vkjj{pr$j@v5;V5>)L8glEk02<;igF)j`&;AWeX^eeoHB9o<%i4+SZ^*5~RQn|zY zj@FS$H`uYHT>x`kR3XTeI51U6&t(1DM;_M2lMX03=c!o-id?LbEkyEZh<5{LBugIs zrt|P@Cpngl)Hft2I{|(}M^8rzFnq+BcbPINNDg3Zk_*RSmet!B;7vanD>IE46!eTX z8ibOHl^m)bB~#jj$%~65R&uqP44>js;;_hG3m6Dbd@LN+?QzQqX+2N-Khw8>D9i!i zr#!udQBR$T$gRoA!?Wn?-r@|%W(w38=6B$zijg+fZ;PN)<0;!~+mZ<1aorNo>u0)Y zjUyzTbBi#|p%2~KPi!SCf4jW{nk+jW(0oky6w_eUgRcsDcc1*|>mS8p&7iWBI>B)T zTQ;xV_j`==eLW7b-syO>-V`Ep=w-MefIeQR$sjyOr1tE1r|yIRq@--wqZnEKWq0HpGAFiS$-{i;aX2Jtj^?gy@@%%K2M< z90@Qp)IW7}aw=%ZF#;AZcQ+d!^Y*TWnJ!lO@Za&)+pWXU;EMZnsHyyq&fkvH16b3S zU;j`u0PJc|%`^krO6D-qniW8FX^(gVFB0;GQ-kBn3J%6p2H+czK(XF6+`1B2bfR<9 z@D#&1#k2NKSfr0)X;&+2@nODpOCQd7UwZl;Mx-`KweGDS5w+D?4AsJu1Pw7x<18Vp zxP>VfV>dWP)BF)}&9sS+a5qz9)8*6*{ZT>9!5)76Z*~Tvl$+cWC4e0!kvIaA)Ts>)QcuhS<1FQft&AJP4Vt4@69vX+ z6c-Zu!xLdGhyvSZ9`%kRGn;#ER_<8vAu%xcuBc?N(Q_3RLBo01IY<_$(lx1Z9&-dM zWBlc20h-IC*kP9Ax#(~D1ri2XmH4Aa(KF58su#p_u*KZfSm4XCVyJ&PsFk6qe<9SSXpe0$Wx+Ghy0uh`#UfCe>mbOBMig}Fu$a8Mo9q^O1qvf15a{r~#P58Zz-d;tFIe+v56 zaUI9Tw3bD&jR>k)-rTSG`3k4P+rj9Ae5okhb}s^(#>_;^BURf9mLyesA808?HgH<4 z3|2Z0AeY4e-7F?04sW+oEST+f5Nba{%R+{z91;s=in|Ckluud=?Dx<0N(@MPrKhd4 zjr^e0wZ6y0vWICaKiji!Xhmz_+4ZtgS4!RvD;qc{)jw3atwtnq-w!jh?K`O?sS%$> z-9viW)>+m#ZBK=(!AMz-bqxVAc+<1K)?7zqFLHW#!p`-nv2o3e(*wtty9&o{jOkf~ z=0m&LqvN6Yya32Qi3i-FWRM53$a?y`|4Hyut!MraJ+mNa|60f^0WT$V64Atx#hB4AVTD=Uf2#%ygR{en7&IABm%P(?M3&EvD9TwCmRAQPY~ z!Y~=<9606y)-=%E1HNRqpjLD(LER;pI!d~J&#+(;3ZQLGTvN?FYRDW)Z%4WyBpH;kCWlE^@Vn;zk@)oBzMeww;He_za#}>r8im+V|!vXMrt>lfwc0|eo*oK ztzZkdTrGtqs;}IAC3uc{D7~RRMOJ4KX4l2j;Zp-p_uJUKczw%2%B~1LZQzR}LI?s{ z-2VJEaXXQqqH$N!*N`Y%cF!L#&oRG?ii$-(h3 zwEvMst_TkDZyapVV|gyBg~rUE`gYLByms{K8?p_Lh%J2mBPV-x$zw)cFpZx2dwS+# z7%WUOHnw|1wMIx#uJH@9%5 zS)d@ev5LU)lw7n|im8C^_hCo=pB(a3 zQ~LNU7ba*@nN9DYBuylt03~ys9B;}G&wr<(m%=SuJ&n{PeAYu6n2GUv+}Z>AAHO2b z&e_FMYdU5)XgWYsN+wnEip`M#wwH8jBs$D~l3ae6_kgy7PlyPY(yZByL=SGVs9XJ^7y1;yR_%paj7Yrhn z&R@73Iw`7fLPRg==*8)Fq+u+!y~ttb$N}#q#SoJzM3*V3L z;Ybmlft#E8IDzAU`wno=Vh~~&Bc7lIGzAZ+))2r+a#c30R$Qt~fKISD9Zy+|>NbZ< zB1{3kM~u+w3Yf7L)57<*V~J2n*SL1|D)2u&xQTL~cost|DP!9hXZV#8ii`*atgD;v zf%0(65OS6Dy&xaF<7-Bro2JJu(KhOyGmPM9hB3~7i#yx=Jpo)N6CA`8-kadKqeB!f z;_=P28fIQ1FGSzjN;QQAXNhhNZvjB#@PPIt$!X@;PFrNqU`DhCd(df~_}op+PCXpD z#3ZAG#uju5d00x9i6Jll@83%7LzX6R>iC5vFVVLmwt^v}5bo$AG-=i3s8v<8W_>2`d%BCx7&lO^CQGFL1T+<$z1a>8NC<`7G|0sx#) z+|hB}#^LcKp$~B;Zpa#ZajP#Wfu6@N-5>Gf5d$yL=cps@C!8}Ls(Hx zFFy1ie=ky>N2Vt9aCbti%&rKX;Kwth5Y|4}6OFIA zKXi6A$c9M4)nY*Z`26p*T`-zHJO4~vLhXqdBKE?e`<(`g%dmYogO^xLzk$iBr;Oip z?_NO((WBKzXN@>g%C(7TcRLz#DGJYUl$ef{+#U#P3!+Cp%Tvz_Q2=v=n|0Fzi}Y-9j&ymcFZ9y>=(q8pV+1kLVzM_%w*;pBexV`CAh}2-|WUr4ahdy#%|(9caA? zVwlK41}cVm&~_d4LQ(rL&XS9y1#_P2b*LR^Hh6qRbMvrkQ7fgDs0M3?ytSmqg{qZU z!~{)hk78}kzH3ojYbN)R*rlILP;Q!Rgam#aG*gMMU+Cbni}`LK81tD$ZDHg@`rdp% z?yMtwXfC!mKB_K134{x|J0bN8?mMXuHT=(7(v_DrB!e0<1W}2o%3fp$AD=lI!w~4% zGlG!Z+{1w%W8DG3p0#7L$xo6FIKvUzNo$W!jWYP{v>-S{4$K15^7o=7@2Zvi(?VGq zlnV}k0wbfkV`3t8r(q!|YZT|+_Heij6fcflJ(KOSG9$LZDW0*!c(+T}A$?8#W>+T7 zS=Dlc_3dfVv3Ypb-0g|TfaTj88%_*{53g!<<;g0<~X1UK*5)NF9iyyEGa4+5C#Z~*Ux|&(HbB$ECP#a6;z-? zV9x9U#BcFOu?iybTHH-m#1Tm8&}7zU#krTFU=!tMbUS*Ri=XkSi68@V3npUw1pISi z{6>s=mQy3%i_&JUg=EqYEB`e=sYMT!F)q&2m@*^13+v8@4wKa}}twwh?s&idhIFI9sC zPF5zSD@U>hgfH9ryJ}|pJ@#ennNqPc9Y?)Gs!Q#k?D}y9v@6$Xz~$R%r#0DZA0Kl- zw4HxOCOjHxwGp}}XZxugj}H6(XJN7An`+E^!2jZCw)FVaIe^g; z+4R{VrpLSx5het4O=d;CRWw=$zF=^hl!|4whya5VgIKO(ve=2Cn#uhiN4-Go+c| ze%y4;+)k$ArEw<>-qKXQOi=;R)Ov3Zp_A_nVE^LX^{S&n5X$?m#Bd0`50ksNxd5B8 zCYmz`*)PBTGcnZ*gy8iJOiIyWy#aTuzQC}X8u6yQw^tT}6%cxciwFV}!66>2#~<-A z(HhPWD}xD7$yQDlrB&fqJjoNe*W6+@&)3JE2|eOqk>6b-~58fCKBDWSlB z+Chk}iJSVsg@JJ=7#6jCp8WOsPoDg>uH(o#@|0LmkW6JWtQTaaficVc((yU_lBk2+ zB9ap>K{C~y_)mU{e(|NQ9GISgm1Rsb37Ac$4QCCeXb=Tnw6c-0WZRv#YeN6-J6hGF zpw*REuf^-Sty5T8+=t_}gW;`@<`4#Z8EZzU4PO{5q}NgE9Pm(FVA0J|16S{zoVu=k za`;epp?lyYierjAl6UPajA1R|Ob#svy%RZekQk~U@Ni9JWb$0+@FCty_dPU8{G)h? z?pm-_0_oN;kV=f$OiGJurPIsM8KiUsjXMJ$U<|MM2Z+EeQ>}Q0+dw1&Y~8oFiIg0Y z=q&DmynH$7R(KRWSG#r43#J>LaGiQ;WHji7nTb)lnd{g8nELd8MU5HKTs2HiHmt;q zRX#aOD%BbwAT3B~ef^deYIZ(ewT&rgho?3+IVXRrwZ$>2=`kRSL2Br5WUjhS{X>02 z71K^Sjz%XYHHJOX#drLxRco3RRu;BB6`@V=#_#O-8H5|-ceul??%t4QJ5L!QPR-Us zjlt-Wv>a;#mMZb^SPtKyK$Sz#3{*{ZX^@B*7tqj%4ZZ2Kz;-kZUT-@CUzws6I5xAU z)If~lAdjb+$I2?B=yk}N-HnZuqR)A5A=YSd&^ykWPwTj+wc2SNByseTSE63QooE4X zy}}IE{WFhVU}M%&={#@=6a^>%CUI8Bgc*r(`phBM+)qXKk+EZw+IN+il**!^b%Bt@ z-5k33T*U|+&cHmxWY1r|tDAN^M`=H8w@v~BihL-t+{OH2?RITZ&o~S?k14C2u9bc{ z@XhKa4T2z6%{mDQtjhxIawfneFi)bUP?WE~>VK#;=Pg}FRakZF@MRBd%z;iH9x(%5 z;dKuZ)R!x+!LnQz`K+2yb?v3NbiTLa@3NSvzNwo0amNcg6xAkV8p~$|o3(XTONn6` zcHk`toqGV^L8Oyj6D&FnparoxWDiNmRW$98Chk|r< z)d^5^CrZm01hUnQ%1$M2l++Y!v5}I}4hZO!+RZVkCDYb4e{6qD)usbPXG)v+qD<%# z^O0DBd#oNAkEVjgF=jCtj<+0&7#~me+QN)e5gnuBK}VEGsuLbKADH>TL1H1(-^J^2 zreX;>FM)=8CvsK>E;QUKz>e%{6|BzLeH{0OEDmL)0`)3Ok$6~=Kr zmxHxrev8S;r%XtCAiyPDqi+X6z#u#khd6_8-gHyFbO-xFcLDn1u#VXyca9*ozP230 zJVqb{xK9Z@iQ%j~YfNDcCjlII?9q7wANa0H z(sTmGgg3N07Y_iZwtxA;0{q95ABVru=A?gekge3cH*;E90Z-oSf7<`e-~U@ww^WFj z7k80qTfrr4xbuP$S}^B&U!-=Cw_7x<|4HoWhoJmB}iz4H@1|{707eHB24kBxyEUaL{z=z+7CpUDM!ehg(O02V7S~z5Nt~ z>bw14o&PchumaRathYLyG9Xb-zy&4WXqQ$vcHFzhP^6}{M$sE_*Ap_2DyU|J7NHTV zTY9-z{5nW#T0Hl;y}PhnYcw>Z#f2qNchyu%@%9zAz?=O~blKSTQ4PY1A6U0w-PV8n zoo{_B1_1A&)!>2D_hm$)2j7vzD%x66GkwE(!R%txh$>w1(RYDLrEzH$y2qz3Z7DTw zj!E2^>B!o&v=;Qf#AB7?9iTJe7&qDPriaP35QM9?T@6=`L9ADhYHB{kKCvWrR{DaS(h`ESLGkjXC@I7C zi8pNH_Rvg#71ok8ChLJ$KE{qDt1MwyB>OdDHoZf z0V2g>xSsu9UtjekHgxi?s}rSZ>^oWwRBA#78hxYo;=2CLTBjX+++iv)7)}5yo)a_0 zb|5qxYqSuVYtKLX{Ik07qEgAnu^%sTdpMuPtr%@&%^HFs9@kiE1naWot<<;Lnz0i9 z!N^MI!ZeKx@u-QTn@65V!p6Z7a7zE?Sa8fWi=)N+#zs@VEAI9q{GA#AN*93#XKuD? zCIrPq=_ejf8l9Ys4{K4&1#iZ&`}>Lj4ZR*T6KP3$UvXWZ`6Fwv-*V8ejvtVw3~Z2C zomE5uPPf=3h$B!v9C6_+Wy;L#S6o2SH76Vd_(0o^A4O|Ie4`)pGt`+>#=#3N z9ysojZh3%W|C3nCRR9Xf=f(LH`7hX1RB%X8-YsetqVC1UfYpk^z5Z;*@-E?AOY>(boQ%pe(f)Iuyy zLuc?U^nS5|p{{K2#)zyqi3?EnXHvcJmVBhSPuFXr3(r6^>Fv8-GIk@Qx~Soqb_oqW z@CA#$IJRQ3`j02ZMq-`Ow{{K6qm{KzoBcOq;7jbjY^Z`}GQQB|rW>@Ara)OX!ddZR z?u|)!}wS5B^c>-oE(^5OJAvBznJn$6`H8`daTE@TWSTVwaVn*L zo?g{|{OGb12EW=uej-kkm?=gzGb zMsMRD+J`eEfIaU||Hh*fV;(e&!l4t7MN?>})Jc;KB@CBr*gbgt6R=5#fAMeXd{q2i zb1~!g{5Q`icwinWDvcmzP}D?yBJv&wFwAI*6k>n0=mxAs2V?X_|K}2#yQPgTgbOw* z^=ix}PG{rp8TdWjV-eBg?ascdCnFQd-nQE_Q&}DFmKPzI9KWuexIA6eKSw6q?MQX% zPVXr8IRYd#by!Pv^uXrakL#@bNQS+a`%10!nAt^%aJB2j*<(5!-Zsr>VYViMAC&4y!PG2UsdlNQvzU#prGn_|Z`{?VlHi?8ANp&h zl0}VE%R13h&~%&`5DRWjvf0@2DUFf!6=U}w-)m!@Yr3Z5oqy)#I|p4N6JkcUm7J_x zc$acnII+v4T{Xx1Hk!(-Z#Fpt9NxCkaaujP`{-%W5ZuA9W1To!OKpF>*1GJaf;i2G z?3#A$3Br9i3zNb#L!rHW!gDFLV{BqvleIA)JHTXoYAi@ql|P#o#BdP4LqHJ3Kx9|p zwK;YtE3SCd$^B%O0W~=s;KbZ_-5A0a?)I!^L%htgNLr^EXhMxg=Hq+;2m{|=`_v)gbK$7X!` z*}L(`3gB8ty_<(vFK!*V)8weVm$nwgrjqN@TvO;&PLM-7(G1Fj>OqICT`}tOfA5d@ zX_|3rga_30aP`F;o>X8Br`idEh7pT;y15${k4{X5;GjDLmKd39k$vo{wrWj40m1Za1? z>H5LtFy zKY7}?dq|DOb$b#ucxxCPnl3qD9rqpqR-EwSb7t@m3-p!X$fG@>`-V9l9QRX_VWQ?f zbb_mrbjBC-&IRJ=(_pRAMcs?yS;_>M0@367r=I$b7KqvdWJhSkl;@0crg)GZFxf4s#A~O*P#FAMW~HVF&8@A5SXwzI=L{JbRxbxHRXEFIMAcI3g52Sl zAi9~AAd_S{>n3lzrEDHzQ>Afl3+i4;^@L9aN6`zW|JHstDA8IKmAyvXW zMT^nU(J2ZJBK8WRwUnBQ-)vCQqeDfj?_St2&;5V=KbKDMSfz%Ia8L0 z#Xyv#9YP5+yPb}?ZH`p8U$!>V`E)>abj|`_UM2WQq~X)PtX(tk+nc56mNqYm)9vEN50Z|dslo?EcyV$8Al)eG33E~P4>SP~Qy zNIE^x@~PY{$LZv%-u02|6T16ecq(}y0M=}v0$T;A+&ze{*v+jde{>P4+E2zem`o04fF1_C&=i)=9Fu{% z49o8(?FWq6wid8)(!!18`x9`;2Yt`9esJd2>iVIJt&s`$=WI8* z=foGl>vHg%dY2sOu}0Vf4hr9G-H$K0%-74>Pix%Pa0vZd3-MyrR~VdE>R(}HT+MF= zQFUwoacKgh9gG(R`WRF(-ny_nyAm>P;AxD>fgtG|b)6Y8Ti6o%dYZDTc$>lu;B^E; z_3Y;9A<{>mFz9jY#iv_`od-|5a2WZ2{1ePmM55s-3;POY)u@wUz2P_4A!m;8JjBc(1iD9HF+dWSmo09dXs>r1N(VCJXjuzgJ70DVA3 z?In4ExsKCIGh1y`MO)}pY_42P<8}*>e}pY5^^o&uws77i9nE6iJc1NPjf*g+o#jbV z5O2x=GT35xH&tQjxSN#rp}SelE!X3Ccy&>OAL!wRE0mtbL2q^ii53;;n>iIvG-*pAQn`!dX4QQ5Yd!&k_BlwZC)~U{O0v|WF4QX z%VPrGyP-#25H08YtrPe&l&}OGQH{_0@9ERPC-XnLxMOqJo>m}S{CDY@yD?+xZX+>M zChEzqsjKi*EHfdmp(+BL;VX+XdMjbSU-jQgSCcn_@b_2=Ive?Qdim!TgQHvoL!vLdx$i8Ta~q5FtKs|1);PbaK>9E_FwkjGry2XfI!x>Ai+oaHC43q z&&q1UajyTV|9>f#W&bbEp~~7rz~Q=MWGcB|+U<4>3%DvB0Z2v;;XXcO6>5vA0TtAhO|>XdxLjJj(bqTyPd-{5?^8_#+?ao4(ofD83) zOzMWAbG2Ahn{bl`LCB8g>U2Lw>;QNhd*R97QncW2`#+Cyk&Wj*k_qfEv#ly*U~zY1QK`tpvp+mIJT_VrNVH?ORvkS?sLn za#B#Et{i$}+@Lw$9Xypst_$P(WaZLOrcx!XDwb-b;92iRkaM~g$=KYDT~Imf63vNe zFf64&h(+i@)zH7ImBw1E-WuRNed=xQ;fmKMj!}3{pO)yBwV%kcK4^uKxRm%`N#%}# z?OWU>1rM4R4ZtNFu3ULOBoh5lNq!jg8WFJ{q*6n)sw8MfZhB0#g3^iy)l_~B6Tjwm zKnf^$)zNTZ_xEAd_w*Smm0Dl1oaBN8I1)7#X0_~NJ(@5aK0)0MJ=pvi4`b1GN;UfV zf;(iWsn$Fm53EWp10vR|tRQFD3qANuOKg^hMDC@vx&XlZZKOngFFj##C7Z)d|Zg*uH`oy8QTD9}-- z^tJwy>39aI7j(ShFz3TI#;;sEiA<4}{tv+5YXeUkSy4u%DI zT~|>i-fICE5Iv<OWdsO>?R2Y z+&NVhNYDz60{W81;e6Y?m%K^L_pP8Qs;@<^LPxUzVmUtzGpHF-jmt~NAi^U5pjP@X zy0A1|p?46o*dp`HI>kvo-2KMjKRvgx+ji$hW3}dBqvzg8_(@zhq<4U|hCjqO& zP3wAw^H6WMTXJJrHVJ^BI61%2=vm4h=AwmD^#T;_q(=9^ z`o_hEYXB#6lFn5mNC$kc-ij{YN(;%*a0q5|0u;+ONV?p*reWc3y}r07H#DEQN+r0& zD!PQu`C>cRiH#BnH(>0!xsm#8`B6X9>ABvFrF@bK`I9MTIe(fAX&EQ($-45?cl1Kw zn|4lUkoTG!`i9(zGs2z0JJ=+KGm7t*&{N%mw%zi6FV)=+`9ZM-aO3TvIynN@oU|U4 zLT&p{bTNyCpPvQ+VF`yYpfOotGF{B_kfERJD|h!eL}Pe$s}v=^qX)((ajXG8LWbcP zO8dCjJ)#3gPCN*MZPNihIC?1n8-rM?tp$*2)X;B^Uymo0M#{dMEr@?#*LC+(u0g%F z7=s?WaZ@9`?O=Cvl+?tw#2!$dJiXZtCB;C(i1E}y*A z^rUkR3|DxrUmqEXcVmI05hoWnJ&jQIQ96PhnWq%+@1_Ez9w^e zX*Is)j3D-vVPk8sf(10h9HrjEJN*6jxfe`CTDo#%y=@39hzoCZ|C30b5T)5W^{@YL z?b(4%EDrBKzVuyvV~E)PS3wpMQO&!R_BXq;8LAZJL8oIk0qsL6Nvn(x7@4JqmlVj4 z^a-7o_0^{(_FmF4wc~K(J8NEkGq*OG`^}=W)iimcXuoYwdndk6MGkg z-sWxznTU8)I`cE6VQI}iobhq@3vu|p%g@imG_%BDzzrjgM9SrrdJJf`cP}Q{GJr&m zjjqkt>NVYW?J3u2`;XVxs^P4+*opNUY8Tg7N?<`STa~*Igf~Xx)%cwC!F;UF2gDkO zbR^X46FfNC%kn~R%e98742aw^;p(qDJ9IOZ-+y9TCxjDV?>A%W2e<=>{b)eOnt`-i;?_bxzyI3`6|Vdf@o@WboYrA=O0G~1qu}3 zO)t&p1B>{!%f5?>JRge`gr~il84j5o8Uu6JBIQ9>5++IrDnsJz_j0!7f&qbJJV#Wu zcb*;;;mGxMFz?^u$Dxa0Psb`ws|%-%!JUYEM;&T<&GdxaAc|N8Y}qx^Ow?) zn)?U7UrbcxAo~gN?t`Snu34B<3r28{FnT;`i@ZShjFxhkf36|mtsdl5gggfZ*tu_{ z3I{8U)&mpAQ%5*@I*LBI!=IdpMqaeum|! zRTnCYp$s%P(Q#OAV=7VJj>?W(mV|tN)Bi{#eZe1Q?ZMlIR*hK!>E&s@eVa$X-K^zi zyl#e@5H+RXil%H>#AI-dPL9SC5y^J7E=4<7Xo%G;cY$zr3ewabfX+0SMYcK2xHor` zR}^4J!x->sxRTc!Gom>MGa(TwljmkPWPHI&Wt(VhwngyUgfFAIZ=p`2Pr;5=+xcP& z&ZVSAG;-U4CqM3g6f`P@$I&&9k(p-M|Lv&WF}`CS~?2Qt}w7*A<+MZK2ILI zn^R*VBjxQ&`6-Qau6^tW&Hv5ZyFbT$UT2{E^Id;-$G%g}8 zEi^z%mMo_MG=L^94Rxalf(l&|X;L*wRi=`yt(w~XiMB+^l5EM2ql+a=ssda8*M831 zIOjQS&txW<$!3*ECW!9u_r09=yyxf?u_ zGwd!GEz6B$M4(YuV*u;0K%;69K`N|$LkzHbQNX5;5lR>mu0~jf0<5&ufAS|jowbn$ zTen+hA)a=-AQrl*jo~csx@E!a;v$%+-6^WE+b`C#kEb|QzbOvZ`z{|tya~_qb>?*) zi&OgtN>wV?Kq$QCrVYnG&i0c#I0#I_>6X5TGj25w5;O`Xwqy1b5`>1Z%oEcHv*yyD zKH(?%ej15YoRD4D0||nu%n(!So%3JnmxOpmf+8S7eAH+d>T5(sa@ndgT`cAN>!iRV{++~)YdAJoBlB;*=$&sg?4X0R-2KM zk$#7PSFP=7^_3yULfg?W6Zm70SUQkj%?fbYx43u^VN!7qHgtBc8nvD&@Rm@e; zVCX|uu)!*JH4K(iT)lI_i|g+v5fQqAxnyCl5FwGyp^g`oF=EE5ZebdwMV86eQnB)rWfNQVpLh`lZEmdv*@Y!T^fyGXKZV`k%i49bZxENcf2V zgp%XP__!8lmTLZ+%Jv;vvPpc6dEYD@jk~aOSqCbKXGuf&G`6f4olC{_@iBi(WpTFQ zpCO8`ZV8L))dih;u&+RL}aWP#0EIp-=A-0LF&%cPx;|^*^s;6X<>86W68P zh8LsNpQt9wm|5U(-0W<}4P>y-oK}0SPwA?3z`EVq;dfB=hc}Z`8Vz)H4kd6S8zR}q zx4f~Gz!WpUNj2L2PU>`pNw{r!Qq7u1Vb(cCCTgefA>gAQ1npY|jw4==LzSKU6)>0TVMJJ}Ojt}B<3%JI#Mz(xH{!KHhB2Q-}hfM?~l-})_bjfTzy1xtqBlec;9*J z5I7l1v(ec}${1|I9%gJ>f*I1#v5U=Kb3Y}QULZIYI)0Do`rr;YX1)$)yWgUwUt3*) z>;;?GKS1jKGyeTp+LJylXf_(K@>RcRcJBIYiE~`N5pO!U8=)-X5k!IYp5j;>q(ZP^ zwmpH>zz-#NZN0L-7!@2AeM)M{)T8Z%FCwN1Svmj7@QE3b+=`en_uxhfscn*V^$kYm z(T@4>^EaCnm+fyo`>dwIv4SA{zJJxiT3FN+arq)*hS9k|tQ-%%_xdZzcYzhMGh|#* zNC7w)z1DmRFy4@6^<{-QFCjB#aYlVuOS#!2V@e9y#McZ_-^hd34!R$pSwXyL`zlL~ zf9pAszZv`^i73tA`l6v&eYQmNOL)YGd-=hG;4UGKhbnQg`LpMA#yEk-8bb2gC%x7( zn3Vs7aRs}yUfQI0EEkn8IyB_C85Ua6M{vmuf8kFsJ6WD8?@PYL+~QheT}BQUfgunx z{|(A@NG=ukg+U4_o-uO6xC0RXYu7Y_6}Ya^ioQMbYjyu)WjiI>z+{>9`+`BY8RVFh z9wi(H);uh(yXlU5zO%=|>E2w0rES)A>X4<8H87?!-yTC}9vmJ7xW;HC;^dg?x~**5 zPbu~K4Ul~P>pMkI^AQT+Q~vjDmnF0C33yzj{QZ*#F*57`(jbHI+L((Pd~(%P^$Qpf zL`|S&H=>}_D(Y~J|3c&6qhz!nD8(6Kr{@3YwIuJ2?1%t?ONU(+R*No5i_*()Yu}fg z6j{naQ)jWa*+Ff)=$OaXjs!9e$)u*vRm0_heS%K(xtT4P*0i#zD9qI4xSm?C%ts;x z4_s5E@fYy{2@)wZqwn<(fD$jzW;cvkYI8*L5?F;2Yag~T44DZNTc=#R#Haj;SeZsh z5D@Dp9zc_-1JR(l^FG%?+&!hL^SzibsH&Z~sfVmOF<2Ilr)R*cg%@@)Gkqbd{8s9X zwkL7v8Q(9YNU)O>obQ7th}@^RDfu)ZoC@@8@{ zh6z^X>n5ir(p2_MUB%{ASC>Yzd5bUl0#IV%Ltd=a((x;AyY_5Jem?-uyIKBdRnUBbG7bj}NE4 zpYt3fP0p7k1@dd0?*rD z`YZvOj))x&B8S1R6kKF7HHuN?8&*O}#76ZjqoyjTe7*Cc{kmbDefc(zQ_Q$1?t;oe*ZL5cN;hRw zwoCi06zhP*S(XlWJFz6ihp}K9A5CG-21%4w}hm=op2S zjQK%QSUCJ53r1`l;E~122NArS#p>|&APJH9Wh`YA;P;C-o1P3Z6&es<0ub*kX2OW zxYfk9gb?ID!{d->Sh>et9as}@fEThu(ICy_O^;7T@`ltS-jMLKRtFxoyOrT4I9e_P7#fR_C^WjhT{{6eb-vf{ZSvey z0#Zt$sV1NR#5#ZB z1EH!G0D8p9%-IABkD2ZpOZD2P642DDSl$ylr4>H&jmgApSInrt_U*;r%q249+@bgi z^Ld#-Vmtg5a8*Z~1|Oz5bSaj*h($|$X?4Z77^Jd)v`Vs;6o|_$s+h~rXES*Ct#9c! zwZnEYlpu*fh;31m;8u|VGN5*#;$7q8T6HYA)R+YLT+*PY*j#%aJ9#Z2f}}2P)mD5_ zM%qai%^51AsIBX|-2hAuaR6}(Q}OTol|}B*Uk0JH{IG(uG?IvWtHCiEL7vWAo2R%> zD9LIE&hdLs{GrMN1`l7MyVlcEfpe4#o1v+rVE!4e&bQzapcr4P%0)_FUqG!Lw18xW zYGeA=6eER%h@u%*!?%y@NX&hqw&afjENHJD~-cS4GT&FNgDlP(K zX4q17%FXESt`Sp+GAY2I*3mhI96;}wLe>s#(yGq1t5&HT^_Z~bHR9nF8a9N4FV)sH z$TecO$DN$(>~uP@9TPXw^-YN)wFl|5owH4boBjeuTC(sO z?i$e2OjiZ3Kp%TGILE*W__r&Kg~gTm_0<*s3$(b!d2-B-tM??L4PcWtL;FupUeh{E zs+D`=BurAwoJTHYW7Co3TEsr|tARcR-_zjhl#m|p#lg31%6~PZ;x9kr`bt+mhyx*i z8Z-k++8!fb7};ywM3RGxtlh74_Dbog{l`DlDlkH;@$@Z!zNVp@395x-zpWRu>m05o ztHAP4!*Fg6RDifPBNJxq1O}iyN!T}FU4~adNL@FhFgalU6L(s(7Xr%WZ^a#DxJQ!C z$-0=BnDiaF{OwD=hBM8h{YddY{*Qw%qu!@`-*<|raPm~J3~?`fLSQcU$c&{S)p5rC z>FCM*s2^*rFRuH@D>RGKowvNW?n}LhvjT1+<>@BJ$9&c65fLIN!ubw)MmaX=5F}t$ zS1yX#RjW%EamT0sC;sD003BV6lw?|>r6#;xvFBKaeRv0wxv)p~JJhf=Cn+Iw|IBjo zJ-U89t|PpcobPB@NFsbgGuBm?3vxY$O^hBa#KY2U<2vtkJY5PzgfBqvk}3k&zPNnb##I5E>-@9pkcGVlaK z7D>|h)HN590<8Sh>=c(LF2oVzaAPqLA@d}({Y`|REf*j53-ONkv?OaKSD(BW$Sxu@ zLjD_wwvT|}l7XOKRqWSp=EiMO2o{=&oZwFfUk5j>*>o(ZPqxg(s_^Ka<4VIqSO8&t za`YSddhoSYVsJ)iO>EIF;f+1D%U zek|Yu^gY1WM>6{($&cE`nvWm7{)Hu9U2<=RM0BW9leRBsuRl_JL9iH0+DfW?xwy&? z!g%K-jgR{%YK3pff(HIKb|SYamKU=ar1F`vb^OLP8NyiJUtfeb-ttM-tBXDhQ}K7n zr1-BMJoxj=7rIQA@PFe^{w%zDEQp)~nS&%U#~n*6Q_X{76@t8C`dOT<1&`-$>*N+y zKau}NW?@S)1ibtY)W1nk(tO-~*0sR}y>ZgFrbm)0M2Lco1jVSKmbxed-v}1+ByQpF zzaCc$=)0W_fs!6DUNo)@th%$I~U zu>P(%3^{IT2#AD>eHnFZ)HvI*Gy%tWH{$d%E z9+a$RIBdVh#`xBkC-oRrD+$;95bHdpnhuO>X$0zQ+kV7V*a(y+*70bd$XVBDRB$5^Gx+|4OKK@iuV zCJ3ktNWUlHb>sW5wCprg@lRIQQ-INTGWeSxL)Isyym|8#pC-3WKOs>8mRP-OYcOS<2r{_WM4W`vc#Jn>+a{i)!hJH;lR0CK7&6SqcSm`!X zFJ)X~!Y%;%{G~In3HhU5d2r>)C;b;zw&$Zh=!DjuZT0q{*wzaH5D8%U_;0^?!=LuV z1K8qx{R{DGSX)F04E%-?(fI&^W9Fwnc&&8`fFSun77*Hv+xlkJU$bIZN|_m4iZ~4i z>71E<0=fq`Rr**95QE*RIx;H+Jde(rd*K$s@=H=S$^T&`m6~&zF#$1U_ACUPtAH0~ zD^<;YCk4Fkv|^S;2J(J!8K`MvT`r*E4^lm4y%P z1o8fI6j#KY=n7G~v6zMFw3}@FWwVSh%>9j&nsVv;4l>0d=e2JG-c}|&qk^2a0q|S;5ytYcTNjr(hAOE38!C#Mp&Vm>}iop!2@s19u?f60Ce z>dPF^TD>LW1~YWNdBZ=7@owxZzTOtUXeX4Wd4mMjR4&eCLix6@f5q?=85lK zj2)a5*G_QKQov6RJYh5EEnlcf&G*wqRX4x{IS)y~a>w{W^xooRW%GMwIgtl+j?A8q zkSal8vO`+pL&Cg(=^`;(-z;6vT~{DT!-OkwwD79&*j?TddD z^ui1`JFRd#Yxq_X+&{F7O1siN0=a?~bR+4Bmo;po(%nk{RKktur&KFKuTnBIDW|zZ zR1RP=8pY=W1M2-yFJ%BCQYem{bUd}nyVUX;OwEs52L)>Bop6Cc`OW);FzX2@4RZm$ zN^Wh%w|H}LShJNSj~ zgq@yk29KW}iflg&9g1>s3StXit5?_jzQN`&+5w52g}8P9l-tWC^T)4BR7@F~d=qE# zt)S%8;r04l(0=JIS?0tx^unsqr-lN-rZEgbBqg4B@buG{ujtc;BIpI+W+5r|%J22O zgu{vWzMlj5Pma!Nrw0m4E&Cjc?wtfBoB_1-g;vGnx|LlPXY9|1&w{ z@Ihzs#yOh)tLO1=wbkb77eZi`&t+t7^|s8}!X($~5Khgpqv;#heEY%n(zZ%p;G|uk zOo2S?3VllIjL#hnqk0N2l@Kd>!56lQ8*2u*Z}s>8Ok2ocxloP9J|;~EjSx!2HbO&_ zlh^!-RvJKGl$6dt?%%&pl2DRo&9GC+Cnemxg3H6_`1phKN9VuwA>F(0e{bhfp&WK* zDHZEMNJ|)sC%Fc*WJNKZH8SfpD?g%INiBaG@F7~qL4TK#Ii3I1rQq>@7`zpCI<$FiONJ5Ua4MHS(@OL~4ALkh)n0Il>9CTDScqZn zb+*eZWc_9sEEr^{a(pJ-<&CI0;a8-l1jZfmh+Git`#1KDUM~&a@_^z0@wmk(IJj}+ zs`Q-|gF^qn&Sr&sJ13MOq{qHhf-vhwJ%QT+fgA-+%D+ z#`UEaPuM@E#%X)xPpsV|{+43IJ9PPty|C)!e&i&|^%6-+KqbB;urD`{pZ=$2EQ6J0 z{rLEw{N4XXHuS&q*E$B?^4-Y1TJ{o-H%VN|a zDHpKPG$^7jmBot!Z?u+PWuED2z9G2km9S)6zp(I`1fVF7409qYlYrjc-2AAAEpe0+yF|y(70Io%NR!h0VZk z$9=Z0fn#Bf#rtqm(hU5z1*HY%ch=-k$76TNJ~BH%BD;Ez=;-$U=|1KEZ&-}I;L!yP zVjV3Fl3_T~oFqpR84Hsn)Og(uGc9e1RRUq0F$APfQC?*?h0EX_D#hRM?It48&){!V zv{i-sXRTpF?1hnBD)IX@4q&?~lHDQ$r{s3F1JO83Wz13uG9X^w*iC zd@nYe7@RNB;GsWk*18RXN!NS~pqT(pgo?N=q7a>z1p7y!I`Gwg@Z@*>wU`uo9DQKL zM(~~Q{DHQF5E7jH4B1aM_^c=u=6dOV`=73}bZjxi=_TWK-OI20T3S-S6VVqaR+KVt zACt~KI`Qw!r;eOU9eNAgwel37F9$ij%np(4984_Tj7zI4^S&}wD+KeU$15UsZFYmk zcDK3Jm-rDmY!C7GS~W}Q6pzTCIk&hZSO8&_vy5?AG@&mMa#ulIPB62C(KJ1gZqUV* z^|jU6jndp)Z^YvRdpCD?a6zms43>4oj{Yu^(*>i}4+5hdw|euGLU)x9vydp^zm*#N^cLZ~eKVQtQ) zmJW5STn(I_J$pPil4;y~)aBX$b z2X44QT5S7dtt;yN`ye;uLf`X>9Z0GiziAU?@%$6iXbhTgXN{y zQZt*GMA;FGp!(Ar>ua^jvflf`jJ!@9V88~5w`rW55t#*8;14if6_wgi?4=n3%*Rl6 zA$jonkNq_UT8L}R^mnPhAmR{)j=|>2wd}7UFT|+RD!@L7B1&)LYTi_Gf+X2n=O6Ij zw+HY02*@`0TR~-Bpq5Pc`b*YH<>+!pLYHbTCyWiJi`!Dm!`Z@|Fv!_R^txKFYw>Gq zi;V=6Sm^BRaDVDJPhPv`qD<70j2nB!fb@VRlIW4zLVcR=#0G02>fMeh>xjLg^zeQ- zSXrGLjE>K^zIY);(u%(MRDTkp7=Y1WLO$D+nQ17d#{rH@irh_W#{HSrKNoW@CKKVyU{ z1TT6KtSBOC75{IYCntr#He_;o20M>g=gZP;Cg>jTq!B4H;iu~*YFPZwxI(cI=Bsdq z{8w!Xr?kbEb}e*OKD~w|skrO6(wU?jA5|OyJ*pLteMr}@rXZeG*mQ!-!#FT|C8weJ zST3-WB+Njv217}^cuf9LT5i6!B2Igpx=;8wHJ$swH78x_xAbw zIsmZ30@_8~eK{ttPx%`VL;bVjzQu zmoK}He6ga%VAYR$hFwx%(M(PN4)?SbeIN+=C=YK=DlPP!s@1iZcF`%QW#=*#x^Uam zX~pWO^ysw~wcxRa7ijKGZbL?&i!_y}6x4`07-@Mb=5cGKEj4ULT{VORbzsfJWYu4fT~1`)(9;tPvTKA~H^|7hk5bOL@maTB~BCIdBz@6(hwRk&F+ z#z^EMYTGx87kva{22z4%0~Xf*1|(;-nln_;>ol`gb4DgYP}-3pHy|AqhCX z1@*Kr;>Gzk2r&moD+JR?t9ck_j>KE*mM<69DMq`F12ql9@P{U_%L-=_>}r#qCgIB& zwqo#Gz3dhqBqftNB7m6;L|InFr?Zna6umYK-H=YX$142!AwFRm^%P!cO15;-4WHaxG}+N&8an;M2D3gIP2_;yG~{nvLSihl@iwZ~TOO zbW`}L92j(=Kh$ziE&5CO=iyCx`}`}Hv>M{&f8HX|>?+er-XV_nFRd@&AkO*+P2c-( zFmVq1aWeM2g@XSYUXSF_q8CFVXz8q|-1k#G`||3nb{5VctKRU*7}^X^G!F1d=bo@$ z1hjAZ5G@Pme|~WJ!7Kh}T)tRttgx0P#`JIwar-z{3MohLN+fF7v&1f9w^yxynHFr( zFqZKi$Rd3W4F4-%8ykciqi^x_)TDofBv&{y9QhI7i>6$|6qr8~iU?w5{*?JjAWC7q zh{CTZbNatt2IUKP?)X=2yN3oKL6A|PXNFI8Oa7DaNV-lMQGUKdJ~DzKGZA^ z>g&XHeRvkhXR^^4DnX4xx|jCCelQ>Z<>2kG*bwWuGA!~P_JUEtfL9J>-#rc&2YCXs zj_nh1JWK8ZL6WO6qyg+CpAOCg0E=1>Lq-$g4)+QM%ZL$L&?*IaHh4BUZ{=P0AT%Q& zhve`|pBDONVp^&j0!72N;zH=1$429Yh-wYDTlCU(!F=*nbtNgBXWAYsqd!XST7@Ih zO;qeeGBAZGtjre{#NQYC-p9CSQ5D}Z(C8dqy#trys2_f9@8mF&J5)+Qh-*%Q57i>& zQXV*y7=CHRZZ{8dA`ZMYP48v6{k0*QhyUjzE9fU58Cy%#%#X}|Gr}!svLTQx$Au8> z!YNlSAq{GM7&-T6KK?lXOiaDT`__snf~ zw=?~JeGPYnBf&yw9tC)hct$D0%Pb!ZfdF87(?w65-&x?Hc^nOZEben+|8a=|N=i$K zTztk~?1oFkeM!TMWEok^g|Z{e!bDVOn>*yR%Asof4)+awzaIbOo4?RWv+^s;iTk=% zmo{c=J}?Y*#`$*WF4Q%lMr{l3d>dmg0W?{&e$?PG}g=q^JBuV+IQT9 ztr0!II+TJx-<|2}<33Qd$apZBneT>g=TAlQ!cZV)O~6LqdVXV8)9Z7Z4$@t2fWda$ zdFV=TM$$o8Zz`?!9*OlWv_vyxJ(X^l-X> zX;s*QPgn>^_bj9IpDYn^6dQD0;0@X!W?%?Yz;W7f5(|BHAS&7bJzHGX-v?f5u&jIiO`n+= z2HMi~h03~*$fA;iuS<00Lw5`BCi`F;qbL+GapSoCD&5^-c+B5{pmcZd`r2R%mSm3* zbJX@=E^ayo;a?2b?g?`jV7k#HxwGS*IGS*JOU&P={!-WoR&DQ(zz&V*1M6|r>qWrl zJKw+J2MPe2OQvmDvOH&-gaksdW3Y?-aah=w^?(H+jNm( z{mS=#j#HUW)dJ?OFH5zW`jDadEq0PCLVCmYb)6JHwIVwzFTxS%a%_L%V}1O=;8(s~ z8}Si*hj!dA*tl$xP0%M*GtUB%jU6Jl&nW zdVNCgvAj&v%3fw%*dKoHfAAk2e&`*KW8Vf3|KJb `E)5mZaHa?v;Db3g4w7+4s> z5>$I=?1y%N#v!*(O}=bgL6#6vM&UAblzJGK_@0@-%}|Q7xA90r zzHsD2f+@t(yK#anS|de?@nPOBCIKbRE_=|C5nOj^V+DM-rpKletC-<$JMkqT6|`WJ z-&4cdD$QZEy_4gS?qJ-%v3e7KGOZ)h=>{)0V$vF}I_Hp1I{ngmRBxl~YrAxg&|FIn zHb?xHEAv_==qQWXufOSR5@ICtugw5cd{Nfxr{f0|UzMdwLwf=`1I(pMRCTY#7Nrzj zoojHIqV#VcHB$*{^N!dj2M=H9w*8GSs;aXLkC`;PL;N$HEbriE_@Kc zf0J`Fi|C^PH915-wXy|Qu2V(+fwnNBudl3CG`1HrqqA9{(eTJ1nw#+kf(KjXJ^42q zr=ZULht4ra+;?p7@Q?mba=V0i({F~`bm`@%eE+JQif=4|u!ObUJ;03?cRSbH+p^y+ zih=zZmmhWZ2@7KJGyeUOJLp3n93Dz1Dgu9qH>-ip%=?GvzpHG{9M}o~^ghrlPX|L4 z+dNwJVt}_?ci)FUyY!Lqsr4Whm3n;q|D1oMwIN1|xq6s3(Xyn;exA5~RZ77)G0Xx? z<|XtKma0h3`~iJaN4cxvJokyLE+?~(R*gP&{tX>AUJYXq){y18gPc<}lq|1)6v-{?D5Xia2Le*w_kh(~;l z$+7v&AcW2B5Adt8g7ig<5JP0>FILxPYiqSx*UmTF5)4=lemnT7j|jYRgl7N(rR_ha z^qYGuJRo!2f-IiZYwK;d+b6aqdWQj7oCk`Yu&3;(SQzB%#{NPm|@H&!oV(E7OivqhI&9Y70nDlY= ztdb|pEkN9$GDF)o+G+m?c9}gFU$nyX_{ju2m*|y#)5OW7KR8$Riom)2|k3%WI|1nkS|G!B89g@M9`po{%`M14ZR(hkSZ3~KVX{QI>cW5f>H zr6(}MbRotjhjfv@@CD!=@+Vjp+#)Ty0Wd;Kv?~G7EE8aD?|XX%ilQa4;L5Q6$)C5( zxr8DhWUO(}In}eV7k(FH&^zX;O+4K@9MY3i9M&V;4VP@#_FH>=K6Z?FPAY3Cv?&vM zUDY9*KJl5%0CB4vBkeZ*qf+foCgQ>-D2f{lnkAzo8d zko;&R$!lGN9KF^d3N(`NFH|%vnxp^|(HUPi#(J&8MXk6dMWJ0R8iARuv*h7c7!c)Q zNd>Hd*+!nS-0qzncj(5W1zc5k?6AV1PS+5RiJpHT+C?1`EqyPi?TM%{9P)3MC?MKL_9pa2fVJX#0mXxf&fFr6gS@1OE8B{Yq zmLD*V_DB3%SO&BP&`;@yeRYLzgNN#d1ATw56Nbjv*m#Pv)@E+$?RIm<+cAQ_IR9Md z=c0M46vsln?i%SXXc&JCkXfVO*Q#TvrEC_rGL`uLVbTQL1#ve*@x5*N;}rNN53>Xz?XuA;uk)gz1VBM1; zH*Y3tYHz+uKnEjOVNqX4{z9I2l&9fNO@WW#r9vMFjG^(@?*Yxio+~H z&*%@?Y$wtjRx4H0LMI^6s26U5yDbmrn(0ZQ1cuA~a})w-pFv3wJ~uzpS!$@s0mmaxO$!&3dZ|7@@l61PcDc7_$UzDHND3Ws5> z`6M%U+tFvcvRq#Zs}~zWn4A}HX)VB}X&&glqY+@tI<@db2_g+gBJ&JaT}&4g3WPRm zS;Eer+#QVtmu~aQ$HVk(-DuHeNw-08*K3|iPKa=JsfSByYPQo!zRnVNSTc)N(41+Y z@HU>NJ%MEA6+N0v5MNdpja_-)cEv|;g;>j4m+I1e((SpjiN!fTOOx+q5oa zk$k`@IRgMKdMNb&ZlT`L)2guG>HCd>^N?=B$L0z!3(W<(xCM&P5ivMO?%D-Eh^pA? zfHv*w)FaIe<^o8ij74K6cHIUcBvfJ!WjD9&Ygro*RZ8m18X_3JMTU&U@PV4CuA|!N z&_7D!u#8Y4sfZJ2af*zcCzCA!!+P%TYoTb-Bf& zQ&58G6GjR`wy1>Se6@ipwLqh}lDqWB5~G#E+969Bf}d`fXBMMMS2Nd#g-aucR6H&{ zia5--+^@J#oQ{agiTa;X)+oXA4Vzwq4+u0UMiYV!5xoO{ZTFP?)k6%@#t|ba~cmwEXb!sxOCzqy=jJrV<3#0c$>7s+9 zX3TOf;zYXxx<&+HA(7uO-|#`Wd{cI7DD}_=_)9U{t|pl3<2ME$>r5SYObBHo48Ofg z=(*3(^@y2+V4sDr$w&ig6-_R_m>27;JD<@(rD`Vo9_28p6z++jWhzBo0f)2kqib1jn?uFEW!gHj*zT&iLdekRxmdkhno!oT8k| z)UG5W3UkiFOF^T1<&RwWEus5b-``f`2s2G*)li_DC}JOYwX)(PwX)1!C-F^?6!?&T z-@zS@4Kg557#aLfo7KTpQaCL}!?Mc0y>Qhxqd5xz8HJeW_wm`_m#${-r_*$^xv71F zoRw@G#FeeSf+{xgUf$OG^|o_IVZXw#crfi^^yjq z1_Jt#gkWF%30$ILc3fwfs~ie_6>fh9kB03N*h z`lX=XGj?3X`H2ez+1}~_XF!+PQLCek;eOaQFsj^+jZMhb(+Q>lBEWLrj3vs6{PnDN zPTym+Aw*Vt<#T3w1**!L`S&gMBx%RhvzgDfGS<~zmhvQ|w==85SW zTFu%$U}7UM!CEr~xjFl6C{ZWZJyX}O`LO2KD$72thQVL`2P9GCly-TgP_rGy{&B)9 z?Y7gM05P)U&6-4Z0-a|r7CxbrGzhgrpjVm-kZVGvYJ$x9;zVsn{t^_)f=}bs%i;MF zHIS`Ou*tahb6~w#^Q@3wgnoU5e*M@r-xJSjcL;Ny;*j}Y7~={70&)L%i{p$8Jw~Y@ zh7zzvtJyAiJzr8HTqjAWZA?}PtZM*=CX?w+Ohip-p)2^glxnG$Dhe_VKF2Qi6U5a= z_xKb{zZSN7b&*Bu70!}kvm>b5GXlL`(XV4B-Ulys68SQ!(==o=;9>}s^^LWf@6MH{ zp7M!XS}i@_*e0?FSeiWIseqy1KToAdy)#QfMU1hIb&&4_ZUfo|nSS=O$5gEn(5k#N zd#(ve6XGd@htECh<6Nq}=!cBf2b8~PaL z$i|994girA4kg=Y5jCK7KP3>HgNVdtFi%YI{4@Ma0KFdvIkmb0g8ue-awQG(QtdsYZDQuI3BlGVf7|C+OJ7;M#HWXO4_=ynJAP!I4`Z}We4N~?ucHCl&wr-x z&0?j4Z}M3X&gff%dDlTkHLl-Tc)OSQ0F1lY%zl43u03dYtt!W;Q1NO153SXL*>(EL zF}ZU8>6oiTSQu^|Dr0>p;MgOH!-~E;_(+f4G#kixOeBAmUEJo2SiR4ngywM;UHCbO zodnJzKR^V41^3TWW!2xrr1asf6?}(|Iu5FnkNQWbQG_#DOF(MJI_ISYo-ch9HFlas zo7Jc5tjCe_5i%G-Wl{Cx6F#>20>I9keK@{3lDa5Xja$ZJd6fKcE}do~g>MddEBw*; z4!CqV$i4;}hUl#ui>p-^-Idx)5+-t_mUQ(UqYX_sNd3*uN%U^DplK(GJOqO$vUDsI z1vM~Pfo$9LtqDx53joDd+Bsqch!yvOZv!~KZZ}2OW(Xd%PRQgR=sgCmx+oGya(u;^ zd?(@d@ophykXTt$7HG0@#d}dq*r&o&i!cmYhs;<3sIG3vW8Mr<-o*9j#5pxJuGNP< z9KGsOAxy$mB+Z9zlljaIa5So5$(0p@Um`ezNYku2U9hk$*lL|>az3{;PM08NZ z#zlb-X)Pv;%ii70PfzdTy^(F4n2xi8ZNm~~yLY=o39lJmJ~&e6>D0c|-|ZMPijMjs zHsY`!X?z7-#;n$g{GnBYXXc^KQGvDkE4H^g;c1vuBoTSO(+ZVqI?wmM(T2!0LDHHf zt^BiO$~Zx?_?%E$jZ}Wj%)|Dr5Cdvc!;qMy;RAS=e?QU=pZ19Z`v6upmS<{fK7-29 zQL+bnd-zA}ukSvUMeT?I7q*Fy^;8%fvK<3Z5Je$T!AG&baG@ocWx+C3uRj4Xw>#J0 zWiEF*r9(bkJ`oK+6r2U1PG~e8 zZ_Fy}|E(0NpP}FkXC|C|NYOJ?7!OmPIVRLwgLhn1jhuPAbms73)mDP~-!2nEBPB>7 z>8y56H#=vscCg&E_e1o}SKzu%F%dXo(^c1P!$d>fZ3L3B;K|^J9NNLbn((c_x9vzt zLB)e>o45?DIE)y9Zx#s3>B;OTajRl~>f9A8SyM0dUf9)ogJohYrkh|u+~yodBWX!i zP>uViccLgVaSPm)VMSRw*gHl@_pT(;!UD2u=;1*5SlpC{=DUT8~6B|gqB}b4KpefVXxlrC6yyx3z zB-h`(=|7b+{*)76S9c0*DUtQv)VbioiGruK+kRRoHVYv9;&NRhZ*F06hTkxF_{5bb z{N=`mlmRF#ITDbfy@%~&znvJ(dHdKE+aElmH2^$dcshSBb|)vxM@9@Kk^}4TowG37{uh|&TBrds!3lX185 zbAT59eY6O_%yB8w2C`}Z4i>Q7BmNh z%s(G|>7$_J7Y>Bz8M7K?i^R4z?_`b?+%yb8j(0qBdBdM#tPUxPTYPd~ev7`1|Hr_8 zd$r}i?*B@ppx=NXIw>~H0CSYI>C~PCA1Q!K=&!V1_Y?GBnVvT}8%ym`P8uxsx=o_Q zaiC2z-0f@gR&dEA^m9HLyK>TlI>SRv3w=c7^yD$Fgf^wQ}unAn|Ga;asNa)<5kW z?4%_(*TkqveF^SDIH2j!ebNeY{Yk`<2=n?!;aEw0HDt;3J*)fU)My5kdNsE2VZF8r z+D4y-Zq7RQ>PqDt6)MquIT8s0<)hEwL7wdr);LP(+vP1hU}Nm@XPbRt!M^wmm1?M& zk?4V8yILtgh=XC5p}=4lq7yZ|7S%of)c0U^EkRYO@>4PND?fbYWfug(o21&}43KE7 zchY1|*#Y=YIDfv>Kz<3tXrnk5#ulxhBJTYmvC){B-8H`h)+9O8j36YAE{GFd|HvW` zFzwL9^=ZA0=vy>|`_b`sz7>s~!0uBC&7M*HiGoH>IVF=ul_|b*zcVHSN@)LwSRjd|D`63|E$dH4D7jzgbOp;qN zPEJheEs%#6!}lcJo|cA>>I!1j)-5Yk6-uRcyWxQ>l%2(bOVbDbH!<6q(ZYi;QQh3Q z8CsAW(u&Ik#R~#mUBPhQQFy^Kt1ce~G!{G!wn_Am zlsw2nXkt??Y=$(=NWf$k;x$EJN+gxN?Q~OBM$Zg42da8J(Q*2k41~7|jy0FlE#sL>^AsqeHAPUwV|Lq{LIxQ3)4cAqO*hy)m zs0F((oodEvcVT^bNz+*(1*Z~0n~Z;xDv^^t-`Hwo#7qelYFPgqIwVs*ELd5gmdE0G z{!_ygZTcNS)C3ww*js1+`-Q1vDAB=h?7!#llt0^l-yZxzYlKjg%)T#feJN6;<`1Xb z4JFrmaiukCFX$PN0Any_qjR=(&n1zzkX3CgU2dQsWMlR127+LosQ!lD7R4O)<%Q(7 zK%D(&0BXb}vqH4W3G$0t%O7aBigVBLN?vX>RWb^hwDHYe3SIN@B7m~enxM#uw2PEnS$27 z4Ar_qa*rQ<^B3oz`-@rUnPga$>ppENml6%-J4ul=z>|^FHW&gd_N!`f;ARF$|;Ff zf1jTLdnU(qa$>?))%vyos7Z^UB)37B!005!@Dkxp|LC^8+OL;#I6p3j^F!CArTYrs z-m?I>zpsEg)st=T&{EClcb@}jG2mgHGnh2}*;peKuLCOMhLM6hf};#8$iMerWa2#{ zs`I(tjJ11`{DCE7RwN-7a`?_UrP2Nq`V78HeYtKXlV0E5BS>->elvIFqJOh zeWSjpiIm0*h2+513uB5o7@Z0b0&q9!`g*>(Mb=o4pnq`Te$N1IxGNDu-x!wgZWz|| zAqYZ>+RCbb=HT&v$PE~<(GHsdyt&zooYQTTgC8IYtIek2O)kLCyzYC=9+}5CG?1m2 z=9&jcmoLoYcZfgzT5Cu?nnoVk86t)0M`HIRV+IZUX<9^&I{(@EPxL65n$1tso5>l& z7wI3k#W^jf*XK|3p6TRjf#m0cj!4Z)-{(w#fw)?>OrGN(ALHcIq`!f)ayVi=b?%a2 zkBV|i#?mpim=mwlU@DubFp$kC(Yq7}T3f8^!xIxzF7;?qRFF|sC5fqlFweF;1dlkT z{p?3`eE@aJ!QCZmgy+(}jK7egA~e*98BZp%qZbS%;^QhVV{Ff(!N-HQ{7c+_F7B&K ztMmS$;&6>f_K{)4*hVdCz11aH75hjWnT+UuE26K@8xw**kP!9S>YNMbODSq^x$gK5 zcDn{dUNMqM*fh=Ul!Qh55mGZ>MFBm7(D-D|_{aY|c=+nmI;3`aP>YV8Ac-{dLrx8~ zhhGog^^wCM`^DfLU$4c+vc5*WBEeMuAOXoY&p+^=R_)dgD9Gg>rb`5~NHR=X8iP~` zRbzrbzyb5ULyWRD(e3Zj_1tGx?+YurVkB9(P(A?c@T9CTSIMQc;s4`r=Nb==V4a&rnP7EVUPr3i(Hbp`0#E@Z@y68 zot^M2ESiAveae~8Hy~gGI(5j-=#3NC$9*&e90B)7igk|I7`J<*j|~q>lqP+3>V|Ad z($2(9QDhmANMNTR@vZp6*JpNp*5%jm>HfIl(fVfywmM`fvBN{<__&izAcEZ!F#xswut*BU&3a!cng zfRIrNL^Q$3SO^_yvX~ms*=u_VOj1`#$LzT3Ryrpu7*xJ_^~$VIg%Er<43W(~;WM8w z!0mKWW-II3iFs05z7rT@cuAyKd_Bs!HM$^c%HZGO5FfS^yb2X(DlJ|HZEAWMpgte! z*i_<8;~SR!knLT}|D0nekqSQM(=+O!Fb7v!Xb&Heja$D0d9xJFCHtqo1{4Ec(?p@v zuq?3_daK0hpPmz$*jY9D#aU@oceK8L)q?zE+!Cd{OTP zq!!IzGIKZML~%8;!kL)LUN#+;a2GUI>oPPN)#;CUxMUXcm$(IRv|CA~Jl`R@ zOpM9*+Y(H17r+TC0xgSZL6~O#u}>7&>x02Bd=HGP!I#!L8gniB2XP0whshA9HjDOi zk`Sh1_2>R^c>mZvO@v|$7>G0XMcsNL8HH~sFk@95THUO&8=K27xX_r}W(o$4ozU^S zZMY=(8;K5BRt^W@7#|C4d%rsW$`A7N)HSWt{7e-87|3ACKSW$NzWot2IbTbuAj=dh zwYhM6|C>V+{$}tuzYj+6w{RcSBG*6$)YyzuPd;)^T)n;nB(PE(ymq|zI|-;lfoy41 zA1S7ZkCersubc0mwZ9zUgtd})17K;TxGs(X^fd#n{jJ~m$6xZpE6J9!L_e8fe=9!u z&j+7sr#ZO*{Ka6kPC%POc^FrcQK$yZZHa(V1foH;jxc?!UeI5@uKY3nUf-Zb5<3e| zr62$`S-$MZmM(2j#R)!6Fp~q19@HG>+xQ<({3l4TxWx5l6y@ewxYKrX_$LkBoI&Fit4`4` zIT#61Ud%`7GgEoS{%(ei&uVP!`lP@2qWCT;s|GQA|WQFG)#E=ezr{G`pR)L>`NrgGk_ac z5`9tjFKdCjkobXrd+_jEm;KPf^#WEo(stYA+nAG5I}jEa{T9k?WGjH>!a8aL_6@}4 z!S+%Hp2DmYCYm8HlPZqtU2}&thArxc(shE(pxIfs9LkM+C(QpI1fz@z#Wt~MjiS+_ zy+}mNtGBf(xH!(ZBI5?kMI~5BpgKcgtMR4WWG|LUC=DKsM&TJQ46&(J&~^6O0^;VdE6-! zmf_VN9(MJlA>~QTNPJ<9wj(lPNSXb{mF+VvrKEhqg@I!qH&M59Pb!}nhv&NE%clM)r;(-f z^T9jn<%lnW(cccAqF$@M>K4zxXe$lh&>NE}%4WFobh3e*HaV*j3#t)3WhkMFK>IN8NL+I01W2D@rDO~JY;Q1x)$;d zSq_w6rF)+)R!Pm)vldxsb?-~mj2RQ_x5w?}ngPZ8_W2hs1>BiGH>*%eaY?&MVwJ*d z9XOX#|FR|NA^c0V4utV@X*cb~fk1?9KX~|jeovFAn_qHC#l0^Ov~1ta^Uo1kr0{vY3K7t-WpxseCK)57iW4Wm%Z z*p^3Z0)kqx7lwktOcQvn-2m??h1m<>oE|S5RYMUAh{yDXtHh6vA zmV>LqSDSvT`hh*1OA(-@+HAb0a}Ua?xY(@|=2^nVV^%{oLV78sB@KR-u_54_w7$G) zaaMzZ6d2hH_ZS-+DyHA43Nv@V)!hv22s=FQ zT7^co{&gcvi*tjSeFu{+lBNdj(`{1N2a8#rT}^WMsX5m+ckp|6fW^Pz>XAz!&}*ld zqW<^REm=CJ)J5S%X!#FO2maYUu^C@$auFQHW@2Ne4v6&LRxo>(#L-*YI;&p|cc5+T z_2L%6ksVT>=*33X=`TZ^jV37ujU$wHW(f*?wJJSrC8)zPK2h4FGedg$Y%V?Z)H6Oh zpf2Wy`uk9wR!|WZOdX05b;q=_K{~{zD;g`vSas>T7+_lneG>~Zp$gLfScd*`FuX7M zpD#c7UbG@A7x3F<>3%H-Z5SxDupVpXgLJADE!h#x;&dw@um;#WNxqNusw0^}_%iHb z0C&C!tg@MI{(OsclQm2llw~*s;h-~MHybynzCk-q^0q#)WloI$j=2{qT^}uG5}`al z31in2Cmi8q7Hqg6wO3b#&kxzpgRN^QUW?aQz0tutLR@24Pq z{jF@D6#(iJU=X+LUWu8rq}ECCa$l=O*wbKfgk{U`eY+Maw`%@p zyO(?*R%LxZBM^q*58u02FJJL*(LyHKsL)naGNh0-9auRr@3#YSG!jhFun?K@m{M3U>Ng&#hu_4CYOb{+El%bkTws`GuliKNI?NjG1n_+cj z=_TKgJ{B&DNVj=)7>)2jz-#HNMvxn7Q7y7XaF9Zi9tm+h*|Zof=WqKa5^FLp>5uu} zUu%J8`T)fABL)23DC@C(F>@)LCcr`{1e}h-L6Kf$^H+1RvJ*r4Hp<{6Ud9xleARkv@Pj{! z6s&~|Q~qPMn`U>@VgiyAV^bncll}GPHU|jd)Hc)-Lia*>#MLo2b=^(r=U(^iVX^y= zZxNdpC3qpYyN|8Y%mljJ1jd!&D@+@o?Wn=Do@!ie+R$E$*2DU74&-{Wn>K#16?SaWmI3RGiXSwgjhC#1=J1Dd@=8M#SV%c~e{Drf* zMh)745tn3{KH};fJHu6~epOz?;qSpAZf+8Z^K~*#hbdS87F(pKy7b(0*+~1)W6nnC$4DufpRG z&I1^Yd)d4nbj#$_5eeV9MyXs$^9B|`awrmsz!RYJ)Jj$Nh=7}8;tw!a?Pxpg^_q3) zk(#-^*NLsc!?+V?NG%Ql6pXM@Tlh{VgecHX?)^vb$_fcJB(Mw=bu%P=$54I;Cw>cA z#l=bM0_Lm_Eivw#eW=q#D$IDiNX$D;lG5DleK+{5aFH_vdq>iTzR6T%KlPN}NJFmV zWKxOv-rz&cqkUwsjifaV_}caz$96rTXqTM1Z!wkaLD7moeN5oWXKYYK>#>T3(ofb1 zNXK9^u_W6j7i%udAR`yLlRptjA>3`!VT{ihmqFUO6z?N~ZhlqqO!T38^Xw?yKj?az^`1GW1_30m@%^g&`3u(@=7Z2ogk+S@ zaJCy2Mcsy%_dp2 z!hOj2unp5gW=ou0E3pj$W8CwYA_jZHb|p~Yo6;1F9L)8!TDUIG%}I8#e-R;<8SIw? zI^^a(GI~Ru>#7lX=?8Q70>F$bR`o&?mm^nv9jZZNR1l zT>K!A;Opm`9jWa*H#X&%#e1d+?fd?Mj`t80sm`kEg#u$2JDd~mQ= zYQfn6oTFy)WHG(ANizC(E5x8$v79ymG`=f9@gGs67f0vDWFncNo{Dk_9hGI$5^S*k zqHYy??QrlJYdNTr-z|MoVC@hJ17TNt}>Za0}+G7yRdMTKH$RG#wbKjq%M#I*N&s;oSMhCQtqdUVVfZx z74;cLWxxQP%ovyqLSfVQ6&>uOuT<;4hp26e2%+5238@0)Oi`ZNHepTQBAo86@Lhqq zaR{U%=xokh5O-giC9>(hhNi^}LNvuWxS?@Q{1aw4?AY}hkx&YHU)F&~zu4oE1bt(# zROy}sX6@N$zw0lbrQqL(O?*dbT0+`X=Vw$eaR%7{XiBgosYYnW@U|m@2C>)5yu$CJ zeZRGkRyZ4%8dOlZK&rkn?^|GZ2|DVv=tB$~J(ghb@WpUl@E6}0yr=ahq)cH?3U?6f zCK2oE5ahRFA;}Gg>&rHH_=m6ipDl*zOGxEqrJ?I@xOK>{>ISe@ zrm}B@YbodsWPI8Gn*}eDEQAxQh|88p1Em~??Ww1p)Q)rMAqKt3YEE7s_u&E|e$iLQ zj6W-;0I2WlK?>^U{QIS!g8pu-_~7A>uJ|jg7O<$S(20xN@wTO;XbpWCEd#~>yjVXMu7QBc6~0hk77K{n z_t6j?nD94N>I?pog2yRT&x0sCHY^RX5C8N@FNK}(z^x*5gu8U`@adQRH3aGtE_Qxx zbtC#~6C!?Eaz)FbnbHGFzPSnO5fN{>O=HVGv>BapW^HV+OZ7u0z0LsTkzAg^RO20-!2&WgPTS zEF-P4P*>fN+D}3)7J*N_zjlD~VM|F1et54*sg>=G@6F^VeoKj?3!MQNYhvpIDpTGn#agb z6Fto=poL7s?r+U^L8hc-kg!jm{pTcFe2*)Z>WjytQ%I=Bd>5f5b zU$jvy%3k~1y0=AD0dH1l4xzJVYy4eu{G$74T-ZOl@`TIt;Ng5@LCa}Tq@+G~to|_4 zPh(S2XG!+$!$G2dSmXq2QP;@5xggODfy*Do%#H7R+~{wX3XeqFeE?BsXNzon3>6d;AEEyCS2ecH9g0I_QM|xvxF9w8v&86fYBC4YCsk3GGbfR z-cCUWD&>sDpc|*1H2~(M6{(VI$Lxw@cJddVnM$Cl@XS{Wlo2oDy>$PS773EaI$y-efM3WS)Nf2hB#G`SEFAEXT zbm!q&95xg7*?L}anEm0KotaK>h=V*xntp}S1EbN*7sdgq%hzFu-k=&Ns2Ge;p&6Hl zaipTka+8Y+)|~ZF?Hs#$ShOG(>K*YOiDBtql#i#uD?ZL`N$#Gvma$3n$GF<$opW%f~|ImX6&%UDLiREuB z+u0~I%sJs=z2NG&KjrvTLngw$t^5A@puHG2C<{v)jh53%%_uFWcIWHVAiEi^< zqL>gRu$68S(6PrUSW-Gl?2h!Ut7gpb_;i8};Bn#Vh1em$cDMP7LZ)xMK{o;#eMdzsV^3I^r!q!K{ZZNHSJgBTMru!iy_88K6akzZD#DmoERA zKb6P=VT4Xrl3<=n>UWKfSr%s~ol7WuV?Okw6Vj>y7NK!4hR zc>0VzL|i6PKB?LPIU5GeFoKml5Cd^5-&t@^lUitMF-j{9UsTTCkZu_j=u@TA?7Q08 zI`ZQxgvz4hC~w<1!URVfW&?z=v93A7hI@U~VrQeime4f_41c2W=BIAp)|L@vVEQ-A z`eN}(InmoDU*%@yAnenHW5c= zm2#ghA!auY?gogF=V4Dp)RTS*Np{v#JQC}pn1$Gru3)HeC=m>w*qvk5aQ|Kr>x0WF zLXU?W_2wqlH*UBPRq%0A{1=JXK~s~m(GFp18Xz?dgC2(+KnA-A$Op4KWPceFID-+4>yjyDsiiH^!yj z$+iqL0WL)qXfNo@?H`_LCfsR*X__{Rr5tt*{Hq}ag_eUSa4EzWgK9cXI?>6$8+_nP zbYMo5FDy4&TPjq=Q)#%a=Ji){tV^J&hny>G3&pxYl;|(3omp_(l z^KZ^S@l~=?&GzoDB%-Z= zN;_~t6p6``JZ<7f7cg>I3M{mlJ!^aD@8nDYBGrh1 z4uOQzMj7f4UO$}YxNc`Tgk2d<45UyF#S>ZP>^N{8x05ZuO|XQMnZP+f@xXPFs7(6f zf;S;<5cESFRl7^=;~MmA^AuQuFM#1ZLUK2Wb*K9qrD+>qZQ{#d)5Q{noNK@ zI5VxY=N$9#*Q2~k|NT5UzCH-W-ePnlE|W*(HJNo|zle(fiFNi_+SA?RCX zKFWDBi9Agwi@VL@8(U$2w43)^iKpW6FTeSV!54$KeF^Q}3LxUM-_qI`1#bej7G{|p zNekzf4q1dhBy9(HZc^<8o%u`w`-^~sNdK<}QD-w-3)-va;f!h&`#{Ijsy$ljxo%kk ztxof93V)hLC@MpTw}Pia?NC^Q1}GzSpz=ri&X%j6Xa3Mv9j;Q1z&s#A`2M?RpM1r) zmb$l3&cEZ6_-yXs$@tu?CtMGUN{LDH*cq4l%4fqw(EVY2^MY8gpn zA~3uSd>zQlZ>8YrP2ll93P`H_kB#+4EaQ8FFV5fgUyMypOU!HAhghI`a(%U~A11^! zKm`=b_#Q}{l)e`Cx4d)LZJUj!TrJoLg zo$5rG{-he1;6t*TNU^30uNl-$?91gVm$mM#BSX>(a9v$aX8HQ+Y~>{v)w#svzk>g( zMZp))11+0CJMNUNxL~X;+C#IS=MkVVeZL8!-;zL<8ibG%-=@j&NxgKxpCUZB*>E<) zmB{L_Be@ogfd4P`>J?leKCQoiBEoW=QrR3NtA&-(1OPCsmchf9f28liMrr3w->vh} z61DZ$Hz?WC{YPS=kTy%;@E+N2?I$lMeamubh@dp~cu70aP5{Zq{?&R+Opp5u!75P| zs9&u&6Eg!dw^0O1#Ve{+53~_S20Y zMnczIf~^SdE><^u3&*C%l`3JPg*^TcGm3PP?;b8O3VG8PsGyb7#l!6QqUH);NvZ@{ z9`b4C;OWHpRjtKBC+HW}bCYMv@`%-^0KI{I(9|lkfuV@typPHPaaI?uCvUdZ3~K8v z!WD%AK;Ht)(d>okK>y42hzLU1G4GFC-I!SlqpmVHx45=koAsx#Xv8;00#NOP5t`^> zK!S>oR!HsRch29HjM-)yr~*yuPbKGs;nGa@>ssZukLQSh#+QCpG1OAZ)Z&$ZhGhMB ztN<>FUuAAd>Y<=R5H#IS7N#m8tG_+}t#*}K;x~1snq8a_Y|KDE-d;`gMeF^D2N`&b zToa?wm$@$PwyZGL$4@Zlj)ZN26hWwxmKUJ9z;+c@CL=&eguaLk*GuJM93f+?^cCZ} z&8LTeI>jOWOUv&iMBnXpY||4DE`QfQRZfyL&D;E+T z{c{jd|5e=avCP!}Pe~oMp#TPHE=2~`MR%C6$~v@mkNg!;S_E7hM&hJ%B=uu%0-DfZ z>85KXqo^a(;NkjEfqO-7HTKUch`xqgy;7HKqA?K? znALaY$b5vGiAY1eajM~QVBY56pZe@6^kJ$Q-}u5t&1HRq*u1_9t60I>_-~;%IsA9N zpZ_@2{8^XEThwF*c;%%P&3gqO!&mqBh)=YBR$s${tHzh>@8~4)pCPonM9H(zA5=$XvhJ5<~CvS|qY}^H+ z%lf%Ip+`*DW=y0h=R@Ly0Wf~m&uM3`7XVkH=c|1W#*)*SbF zrTbOo8>CW|%0(_>4!ucGC!Ro%vTW~0qXD#tle!zEX_3-28V#U{L&In^K~SbE}Q=O&&$Ll?dHNhVc%8u##^iw$FQs#iU)yL{l9nY>X7RdlD@$tVCf zaFEc}4dw|fR;u2HNX}pazaciWSqp>zL;CFfJVtkicvXDx#PC>(6due|&Sq)npB0RU)Ou+b@XM2i}t)pMsPbJ#!7DlykZ>42ltUX4r z60QrO0izejT!%_M5BOlC^U`&%)0>E(Kc-QwWw%2gx^@#0vR2XF6_Upo(#mo(m7;CX zKMBRF428}2+{@?ekJqo=GSMhP@JP3BKoy(Ab*gAW9hX`j8;$za#b_3flX4+kCzNyw z{;D-@r%xZ9sPBMz-!==AwIDY=vqe{GPbmYMiPwB*H{laxDJH)bg~m-o)ENm2MaM-& z?lVUk8QnM+WRHD1K@F2_9LE&|U9gco|6l~Qkxd${>upGoc;%u$vsvJy!5fMOw&(Uh zI=bC^+K+=pa0yX;a>@npvsf9b8-#C9_a3>5 zFGio5;lp;~50$cG1>EA$J^vA{Iev8VsZAA)&`w6rBY`4{8D}SWAb-2PllrUtu2_CF zD_%4AskPM;;R*UyML*M6Y7B-e+5#mI`3iVqr5 zl#G2mycD>lm^-7#;104cf~LQV9@k>9j#Ss{w__LDyT_ZLw;W4?F?+|}@^C=gm^1}R zADxJ8ffVIt&rK-{2%xA}CwjLkicunI$&LX`3NSQT*U;K+xcjw1wQ)S3HqODM$mK>* zHb%aLB`?Z@_&6xdfNb5&+6s=5I0>o(;%bSD6AN-f$mcT#>*UobJ1c@v*;XON`IYKR z>@ibN14;ggIu%biWOtpOf#?7hBW6MA2hu!90rvJ9q04?Tz+=oSWzpQ>V(B?2yUr#r z#BMTSVMi%VFS7GR@l%vA5g;13Fo?w?=9;UxIK9H+5K4=WR1}emg_!L+g(GJWE)~0F zZd^(6$c@vcwI)=Eq7sE}1`D6%_rO6Spa15$^B3*iLWmH3vmb%RYjClHb;gPRf7acs z^?<_Km=1{0i`_0<3JQL6@`a5=iU=ypcr@^1 zC9i@cfS)qDWWR9X`h+xKMRKL5Kv|BbCgXN#%YFQv!G$fvc5ykgH8>{C5%GIW&Qi~JB%CZ-w# zr*|~lV_S+0q6b`eDZZTEdxzs}^IzejPX6)2ax2C_l_kfAIG24GSkLl|4QK`b%$BCd z;6LKuAG>V@HR4X9Pf^BL^)B{hYR>)3_?nzcMPx!D%@GpBmN#M24LI^s0dfXPgS1PV zBn0BSST`UpQ)s)ngWrp9z)T5)?-$b<0 znD%?(hS3eP$?b4RjF1Sv17yCI;wdY)tFg$#LNU#Eg~*4?r{$mn7GiSc!K) z@z~qRHt%~%WwEQd{TEtxd;BNOa4r!MBpPZ$p&La+^9v!9efIZoCg#lj_e zXWu5kQBtV>GpnvhZysDn!t+F~eGI0$+_bNt+d63RbXja>!YMZgo1eAhf34Zf5m(LRpUuid0LA!<~E0OpIlB6$FYH=LwT$LW#E% zf!bpGA^Q-4-4&epwkIX4$cpTLlZ{(f5`A{7Pu&e6dmE6>?6nZQb@-?936#E%HC*Wu zMRJoKEV0uZE56l9pJWPrG}f#|k~)V2xQvIMh7^hD#k7F5@&ct9^clM5fzz6}AV>7B zkIxeeR?<8mDc5h@Fd@XkOu4MeT*VfJG)62z_e}Us+g%jDJ>)J0L9s-vPkZO!*miF3 zp^XbXeXVitm497A%O29GwN@KTF*$KrK`fhdhN8B|`&du%QHPr+9U#R_k8to~&{XjO zD|eqnIG6BwUz~`gXtv$1P=T2HzU;dn2{AXc>eZT!nPX3q)$|WqUNPyILj80K2?Vpu zT_0ja;)mm8t9|w6Wj7K82~^}E6t!G0lmi6!ZjOO@cr9kyK&vrz+h_6__Tbq@Ero0f z9qO6VHL5%?JJUfVMAIgd6RFZJsXbDs>oFWWcAV5=^n|dBmjPNvUlA+=t^f}_D?f&a zDN^#Q822ibtV}AF4VI&W!!_91ZTvOPwA}&n@;Kz>qPD5docw}#jOv3ezHntMKC!4G%U$Jtir)>S7Ff9ukVv03Ol zT{g*##dYJZVwr5=>#c=ayo%fPkiO@(x{p$`(mfSk#-kU|dENE;o)J-xp_7@3Jl5wS z_H2mNm=&P35Q@e5i!d@la@kXuAL&}Un!AsWy&_GiS~9lFe78bT^7uDD;MRlg+eE_~yBHyyY9!zjd|EO2XX4g$^TT^4e7! zKo#wuTp+bP^3!;4AuRNngrP_q1v5iJXiej(+kPHIffBkcw}ie^Qrjsvcf|vs#9-3- z&^cn#*|pdrX#;>3%ty)!5ydD-yO5v7#K2D{S)0Kn|z)1z88)t^pGjW{&NuGcC zzU?=TJ9mmFj~46Sv85**|6zJWA({=TA^L(rWuFx%sSQ)*+$UdzXv~3j!I=$1oQn4L zy5th|884WI>S6<;tPo{=aXrbdys2@#DdFYTda(D5J*Rk9se9l*p_2~VkLf(c@|u9$ z|Nh(WcoRvib1VqTZqMjKR3Ty?tM-j|?wb^>wb$ZIi4=?Ysq7P5RkDqkU+G$cIB6|@ zi3bwsH+32TEtw4Cg|7NF+H^E&kyr(52-)^usJ+a-{W)RmnwXALVPh)K{IgSE>XgdvDjqGiFT?QTh zdZP{|ztw$Wt2cd|l+dH@rsD;tZhEgM^}Hg07BB1`@(7Q!vV}0E+Uu32>ukrei0-oZ zO>rC>Ph-1hu&v#$6pl8w`0zeP}g`sW|oZ%f-FRG+;B zAxgV=q)X-iqb5?0guns(gq=zJ^$+RlU~kAe#zaV7iB_fD>9(MiRx>8AtC?tc^yq`Y z=LfcrLhYiaY;YWWPh5jn1v?wg#PUk5VUtosoiDm0ee>2egBYU($lyXxN47|UX0p8w z$#rVxrZ!40_pp>gfmcq}mYJH+FtW~;TZA55oqCa{V%l11 zN+Yk;@2+a9Ju#F3=Cs6`C|A*1XWY0gc zB~}jyLhx>Ok!<3|b}^uupfPJtD(r9xLvxRN$>jTZH>G>0+YHktfPxO}$mLtEm4~p8 z+afP3-#kWR*-o|}Mim)_j{vh0nCHFG7o*QYm(6tnl+d;={)W3sxu{ema+@7bZqk4@ z>o)dtuf1YZeEF^WZ@&5OL*A5BD1}L$9l9}f^HwZglQVPRK;goYC9Fc6ywcB}3qXZK zkJ8l&w1FP9AI3MPD-9dccL@CEVgMq{ZX*9#x|;v4W$OsM(?|PG>kI zrljsOwq@*zU6+?-1|^~(WiRzcZ; zGW!neolX7;0kjglw?|W$hxukd6`q3BUF5r%7`DuGWcEu4x->tsH>a+}*GZ9YhRcVU zppxR!?ZV^5>OtP|Y%@mRrdk6edB|9;9klLNKUeJX`C@j2K*0P&#YA#Wzjf|K8#G4b zqc}jbL&c@zXUc7vh`qW^-Z-5FOcDBudJq997a6F5C%YbgFGe6k2iqZTq-k)p+x}o~ z-^><76%s8oQ<@OCvlS@{##pejEMq6 zjN`pOeDVZW(B?ikq}0)`tus{QCA%HZ-GB@_X@!OMM*CPH3j4&GU`QdoSe21CoNws$N>_?F zC1XT|h&Rt%=QL&&uUc#~lr4w&&Bg4_1pd>*9g%H2>H86>IJ$uEYKm$!bHYAjR^=8T-w*!1iEXU_hIXf!wq9Yh%~H~V;`G4F7%2crfQ5wIRnJqfYn2= z(j@ls)yuh$C-%6#`kkj~;E>W}^W%2!@GzBzwl|4b1GHtIV=RlrKX+2TXa+4*>d?R zhf8k>x%N}kgF6WKNJ(T9B<4|^l1RTk0X}MH71K?WTJPiM3mWiC`xXY`E*L|kBOa~* zkF08(UK1{K&GEkN4dlo^Vp!Fjf*!=K2*T|Q6;8)bY}+vFOb?3ClT^DsPK7)19+CpK zUbjyFvF{P0Yaa|fW3ZMBfY9_RY_pwF1c}i^4RQg_h=?|XS{Sr_rITzRhyCd^Mupu{ zo&g)06GhEztP}6tqJxLTEB=z5QpesQ*xFd*l8|2OKeolFR~GG;urkyCLZDvz<&$2z z!QAfq`_VI77b=#&>a2&>V(Or~=x37X8Y6p*Ay*Xc-vB`4p^%Jt7S@pgtiYDPe(UT|qh37ws z#y7IDBX2zw2{R5c2AsWMrBPpM*{BIW`4&xGxo+dbBB$X=OoxmCt{l$0r^TmkUbUTi z@xq1JkQKT33Rl=0?$qLK=uh*a0ln5KB@~>%+u$|16mzVdZs4&PZ@>MvQ)Uz8O>JmKVQtjA210gP$EDgg+nsCZ0?9>!&d!2&3$W(kRAcN%124^AO@&`#z2( zMyoB}gL5f6Pf1HbEZ9d2d)MbNtSWsZqQZRi?46fPSrCQOLfaQ*F%O*a1S?tn2nVTR z9`|-L>2wl5XopN{F#~sWl+e>Q>EwMINB@)w&}OZ|x%4cw#i6bd`*j_X=!V@8@TcH_ zY$4QPW|6N_R~6i=aME7q00%}I_wO(49{i2478KIvv_m#+@8pV~ui4sQjfqdz)&}_Y ze;9q>JHzR>iJ^Ls&UASlr@j)f4C5;Axm`?I&4ziK8|DscFz`a_la%QWy%h({Oj;$FTE6dOfrvI-k!*HLNsA#6herz z6**eO_tViA_EPL_<=B2@D>iw{*+*|)yB4~+fbvurVXvYb{Qg^S+Se*!xIFgD*fxf|NiEK-MY+uP$+&#KyQu8JR6IHnXihCD>>BSg8-qC#fQbgeBSFm=n5Z z$0Hs%KOev?#D7OWu?O<~clQzt|4fg{T?MF827Yh^KHr3~G}*PafY0%I%ts)`{?5Kr zoPMWiUsA=C=efDYK!*-vd9glcTk#m-r0Dgq6GDjV4q-=sH$}&1`rUzx6h);!#`*ZI z2{#YKwAxLm0b(CO{a9`rIccUdrY1SjzhL*C$1SFTuZ>GL`nWS<2>D=(=$U|wm(~OoQZv&%BoSiTuCa+$L=e0FB}4g=4vPudR^Ku3y+qzy-kFV02HYno5~D12tMKaDOo@Y1wwZD;(sH(V*N7-NZP3Cv|Uhh%qZ0}Jo z(_Js2RNv|w*QRXMFtAYW@nC^SB^CC293Ys}Z0)yp-TDP3hk;}>Ugvwe@szc3r>=Cs zyr3x!bg}BV_6HA~qDc(EpV;de9=Am(0;+>IAs&z56b+GF8LO96ZWIaRlE6YzhBv9E zN#!(S`z=|GYX#$J29W5=6x#ITOf)zR1p|ORAbMsJfM+so>p`!Y!=6oIrnXRP)$C)C z%g%-*R1S#M*teMN%x(J?R1rIgF%!i-a{We9Th@yED;4l^zPt6sFTh_ap3MtFzb4E_Cd zVGwE0_8VGv%?~d+$jRh6^8}Sl;+uVUTCw%Rh#_>>-6CNgBbmKxBSAwRyXiy5edtod zg+Hd{4=$k>e_&H%_X)DcDr0i67fF0a&%V3mEuBeJrG)xLvd%v_`5+b)MB@<*ayE_S zX=fWJb@TD)_wmV6x0ADdvjCo=Sb76o;2O7_O|%6il|6NCWg+H#{nmB+%%I zzq1yQHepbD2c#up04t#jODrua6DOo5L#fM ze3cdXbn?1i?f;0=Su6HsS2efASaZY%2vE^zj%J?%JnJF zBnpUdYekpEtrgLchAudA~5$5af{*6N-Ku91}4R=JERp8fr~uu>>f-bB{g{Cb+p;q(A3so76 zF!oQ^62u+hu@X2K9whcECGpa30+1lF&gboI9JC=9M9jGmNO2xw;7W|dU|ITv2%g0n zj877!=dQ8knV2~LqRm44lG$9SV8Fv8>=n!=immMX-m`bBjl|fgW`i1cZ1h?;3Hvk+ zA8*_5&ZPZkg^qK3Wc2LcosTh5h2pmzJF7lW_6$-V(Hcl@iYQ=WrtpFP`EMO>-(F9K z99AA~YwoYZxCw2@ld@GG%16%-SprYP@Mvycb&BDp$ggchrvcc+=vzF}h6|>2@`l|p zXGpe+4er{K#s}Sp_BrUB%>v>6RU)Kdt?k!S*PZFAP0wL}2I^vv<_pqAjjlYS-rd;< zb~(y#mTrf*?js+VT|GD4fqAHQAzjNzgy1eVv2_`vwrtqdDm$~^0XW}HAO=ztmI6js z$6g7&);|g@RlKLeqgY5F1WE2@y6p}7GOoN%@;Lm{2T$+2=(N{d ziRdtHCOjW?J%#Jo0q%WX3x5Rr98+!bckxq$Es7O=qltMNF=?KUVpm8d0&r;8(`bS3 zJ;kNU`uop(hloeynFKZwmz~MuZjf@pFL5K4jw)T1zxO(nNV{%w-Mn($)|0r>V_oaM zD&XjC|D@PpevDZy1N};hP@#0};{-if>=z`o=}x97gbO=j0;-C9?OeMNe;Kiu)fnVW0H2Pr)X%+Lb8?!pXUtCh?vb*wfXyvI)m zR=QXUe?HcKM_-gbe;CiLa5DXkU<+bdH`}=mz6X9FCNYg2l71rMV9Wmi20HsQk@?m5 z6i8}!&MxP&bwPj6h*}90VxHJ@NWeJH4aC+x({Q+Bs>Lm!37a^V%o!tkn3^hVs2e5C zhSP%Yu(f_7b_if+e0+VrYJ!Tf8Y=^t0zF&fAYhV^`TL6mNKH%xp(bI->I@Z=3Sk_7 z$GHQ!k!Y?&K>7O2?1Lq^6KiHoN=RWhYQ>G zF|-sEKiTL}e$5mCEp9j97~6!&o8EEZ>4fJB0ZI^`@Z|W7t4TH7>4~?%=DtmEDu%tU zR@Z(5(le>+t~uAvm0!67uM+C*7moW<4EgLB)EB9f>4Jcy+*sa2LX~S$TC#cC{+xxR$xpmDu zW|Gv(jCUA1Ku0xO6Q6~p&;{I26pndrT6`cL78dzJI#~ejOmAvI?dpz2Cd|fK*il@g z*SzmuRs1rpHV8J-7$<_Gv-Jg1;-Tu;*v+m?W1IG7I27ovvfB0Z4E#PC^bg16x@3uQ0 zl<`~Dq_}{leTl#nZ>3xCb%fG{0@e10&AzoYwC@ulpad{Z`&^^4a zzE)CIzqWy|p?u|2+niV0IJE!g|Eud3gp{-8c;ST??T?#%zj}ol-`o7}d=>>wgho>7 zZ#jH6``vLa$WwL2p7~?3ZN7?yrH=JUx}h3UHm(K<8aq4StN+dx87~`XbsQ1yB_)&5 zu=ZFE4K}gk!q>W7!zmfTuQz1&g`^6|K?ersp#lulrH8bi+lFgo^AmJOvEQ+A6`3lGwj^A&e;>cS#BbyN*g^p zqDxLp1$`@iPygn@zJts*0Hd1R6TwO0tv&I5kt7R93b)RC+&bRW1+Bg=GN|d}xT-KS7iY+}thB89%zl!ql!G_uvv43VkAdK1#HGU4!gi#~T33|fqugW; z&LvViB+hyUg`#MOTQ&`ue{q?r@fBe#E`mZk{95_A4sO$!6pZiwf!?o@|mShQ46ow(GraFErgL5;OAZ7|KnWBiecVluF~mgh%2H!(ucgNGQEb60m%jE|oJM)_ z%1(s(=sq69o$+Q&m~h$U-I~PE1c-^mB9ESZ{pFAd>>E-gY?Zx|>OUC!?8_K)T+I=I zQB)Z15FBtXC&{3K$UcZMeAhk*YCy%PzRrY8iAaR}DeR)8_Fh^s!bieyh8p3b zP)x|4PlzP}5q9w+<>Z=>PGjV3*++M1m27{;itsRGy-{-r0ja-VoP7B|`d#ce_p@Dj zK)uIeHmM1BlNjMl_mA)doN;%qh!J&<1#aqTA5jW+xd;75>!#y}S)E3xY+?WtJ=X5 z`bp=L22d=x``SCu5y-WUDKxkJ`|w_NL}L7Tib2zA*HxZqqo0JJFR#fe^{t*hbp~vH zI36+#Vu>-V;lUX6{C(V+0|>I&c^l3KdeRMXbRMa0Thn*G@6Ft27nbw&N}OQFwcY9> zg$DMWt-dQahQcw7u{Fh=6;i73Pc&=TamG}LRItn9Ygmb)Sub6B@zSLL6`t*A8_yQm zOFJ1_1<{lCn!wW`=%RcvO_S!}Zi=-NYGyriSz;BVz0(YUrqRG4wD-Ow2x&<0>@~&f z;uC|RN{|$FwDmgbrEU$tU{C^jXMCj~%Txi9t_f=(o+B$6uf+1 z`(sz4uWxX%y$1~@ZX%;x@QmFKVu)9wHE&WpZ-6!0w!0OFiMb_ zBw~5&2_Y=S8s;4hO$%FjgpDDNB<}|EuYh(qCE>`Rw*~G!%e*j8z%SGfSJWzX1dV%f z@Y;KF=hM$(ne?axPfRW(9I)tqo4~0nY>H#4?I;aiv%FY~o42_I!~8*tVefG!exGcM z3M_~~>FvqFw-W!22dH64I`GQWcv^-Wx_8Z*x-w}#Wh*7xX^kvXX(t^^LQq7uQW@z2 z2bwD^>>UjvopG|4S5v46f5ZhIrC`5!d86i;5iB zB2L{5vk=2Iwgu-3A1R&>kNEXiF7+Hw@DTc2(!VNKd9kM*H_(rzmlM4dsN_)N2MI7l zN)MCH#&&S%co3+E-@IhsfmAUyavo4`n`kU1Z%)Rb7a;|VpY}lS#7`WD$M)0KG3)_8 zS?a5Paq_2F3R)qh))BaSB-OZLZJ5&)FWD_^;Ln_pVq8KNP+L-hm4L}? zG)*Xc+wKA_;pN(^YKxVSQDRzDbNMhPuU@qqf#79xG0v&N^T_{4{hoUeOS3J9Lp=Xv z^xNP+P#)gV8hHC-dpUNV>{?XT;5EUY8LxeOoJt@_qwXk;>MzAla{-pWMZ7JUKy#Vw zltSEt2-x9xXIm6ggUdcH|IjT=sMf_L3U0jpdY**Ne5RZte8bkvkGYaWrV8P#6GBWPc$$@wLwr51-Gh1vIaSvZRj|JUjCcPYlXG6@lwLz6bd3< zS4_=D*N)Sl=-O-FL)^JfCQl_ z!c_!-%cj9GBK|aHpNoxv%RaNz*VZvY8NG>#mTRYH_txEEjF+qmq!mQIyPkFV&Z0e+ zPKwHHrfncWyU)hhsx-H~#CiQ;98XZMDwW>NkwP@oNU<3%YCjL0jpZcC!7 zPv3}YvmT>|D-}GocEAsGqw27b&0ynHIMh9U4yB4H-0gs8nK^}KQZzWFh3+D#2QJ9l zL-1gG#LWEbg4vUM@~GA58T65Es}o+HZpP+eQt7vhIQC|t`^e#;c$kbO$;R#W;S&?k zD_5qbVlxCZ(Hhz^f%TYOw&OoOi-PvKk2W{-NVy8%cjE9<`u)ezr*<$wsH(-Q&rfg* zlzLL2BE*U*A$mj_jq<;CSJ8jlwy-2_q3k0Cbbvin}Hij>v@MfQmoiaHG#zDhu+x^xsUWGqD*Ys zo=;xuPowwj1iYS!Zz*{}EolSo*AJea&(bP8xU7q9lQMbADghR^O~xhmAT|M`yK$_z z(LdP)5-+%BJV3UDI5tz{og3R@YUqpSe>?gh)=O0vsuGD6nxQl%tP#D0@id9;~xLLGLNPokltaTa3;p1wPFvRt2gbdRE?n!?WfVR zyXnd;ym}5Q;TB>yY(U(wv(9n*@U*#OY!w!Tc#Q+nSc6A^Xl14+KT_$5y_wtU)05xW z0Kqoeilc;mB{jT6zra<{r&Q0jx6en>HugmlA+8WE-@F=UeiPJ2)Z%O%ro!&kN-e}k zb%U^II5!79*6dXp2xtd8$;%w*UjiBseZVai{^#REhP-v5J$d zvNu5d0#e)NfR>M!y>#iN_yN^ttIvp%A2f5v{vHaAH*C*)8|a%P)ztXtz{lKXh|+0t zwsb_i(G2hViQgTgWMi~gBt2BRyih;m{A2fy98{@($AA}#{6JguFl+Y`zxCEo zMO9@WCPFLha3>w#CHR3+H!&M@`tSby!x$-|5e)$0%KGHw_jc+a-Fel<{1meA7wuc> zKZ78}o)-knptv1zoH`iyy;ApL-_xZFx1)^}kC(>E@wVyIjkSqm=`o|p>(}i&B7Y0n z#2EVR=x6pjtTajrul{RkQ(m%dz-Uv8V#aj|&fXT@gRRWy*=EW)a_7WjxMR5g zJY%@;6yDT60nQ|GvhIsfU?<+Eo|{~lSt>HZPmnW@=1VXB?%uMUn*9s`XdUq2_Au1d z__Cl=L+lPmLisATF=S)D;5ckz=UzQ`&bOSjo9sXP9fBC$9K{{&>O%RKcFrX4a)EB9 zyRr3ui7DXtVzAjv+6NnFJTHtOYp;)xFu(GIi>>e1s`iX|rh zX{Ns(&Q)=g(`qaN#&*L)NGi%xO%7(cTeobe>usm7-o0Z7wAPv*qv-8Zdtsbjy7U8k z(CFFJnSN(Dh8D_bt(r8+w6;G3E=hSzahvDcuhsruJh{2=^{&vwq>FC=iYpZ0%^MzS zRcB}~-{j=r*Wgs75HO@TU_Od2wwp~CkRbG|JE9wJ=kkc7HY*$v!e*-@Ey&H0X;0Nn zU6>DBuS(NnH8p%l0@Wd?B7Bct9JI?3L1jF9c}%Pp!fq&(cFNSOK))Z8Wq+X>1|jGI zK;TOB;qg>h@<6`%;jAtuL0W-9FKr9YIg^LtZrY>d)6GFF?mv?75cz5%&s?MZBEZgA zHIaY7a^ZQG*#p%917eI-zH#~18BJWm1wKY%4snlG$W#+>!$wTrUzJNgbP2gFFMLH) z>TUa>u0)Fspxo(6^%8>H_j~&0 zRY%#7JO6_{fSlSR&x&w`;gZFI$i3n|?WaP#6>e9oV*7e+cP;x`Gwg9c+0a6NrB=wd ziC@@_#@>C#KK|04L<;|r?-c&IBtFOWOGW;Vd-vB4TTp#u_w`=7s#Dkw5G>D2>3pXu%ctWriA6FH!2Ivqba-S9zD4h@D| zTYFJDcsyIcW@qb-#Q;fm$eM&yU=exg z(&fw7rc7O_A2;eM@+bA#x(AORym`S^j%}Vvh8wun!JsgG*&N|dArNJvwt@?5>qacK zOxS)07RSkR6fVh8oEujsV+)R;V%uDYSuq9CAV(;H_2?@iu-gCvb(@juG>Vkj&_U!z z^OWMYhOU#(kG?&FGEw)5SXOxz2W+3o8&FnJI>acEX?mr~D6o%^((XNZk_v*P)@4G) zQxRejX}E$Vx%=Me1G~~{!J;*@;E_JOyPmVVkyXqEuIrGl+4`b4U57IE%+FSEu8LjR zo32kyd2`4h`BsBS^^g6^mXJ2BJ>T2cAyu$nFx7>H$dM7L`X?(9xUF!3W7)fpiFk|X zJ;JKft4$l!Y}e@>7{u>TpdXtob)c6+09-B8*gGN@qh}Z2ymTpsHrGs-6vaMjbmL--n0ueBfAZ*hq^*JtQau<(@(vN_?ZFI1Wi z3#G%meOKwl^6KT8__D0wiZ%LUv{o+=qT6IjhaJ`dg9JNID1UbF&}3!}F6Cbi<3QofZ&q_7SS zk$;dXev^m68fgdH24HN$9ppZ`w3-F1o?;xzhwH5-CH0v~h){$4d*(^QYFcAweKI&+ zoWH2^MG*^BrCZ_4(D$V$FuD5iF}0Bn2V~-G3hR}_O6(s?N`=4%_5Pu|Z zrP{m`A5iFdO=9F01vkP64CJ#Af_8`HP{%ejf(?W6qG2j{>k#+j%H@7~5|NX|m4#T+ zgYHmiBHK@51VoAAdfA4lZQIELLC4}k*n-MRK;4|l5CfiA|L^_UV9Z7ZA$LG--TNjU zq&Vrd?l6LHeBU06gduMtAc()2;l#*ZF{vd~(P*YH?@Bpw@!K7c((zOEJ5`VRHV_W` z4ai^4X$%O;muSrXq+q^dT_>;I`fq(=r6%z-`wdVsAe3D^D#VJ$GOP@Q4Iu?IDPSi^ zXT>9%J=b*&IDt${Xb&+dEZSzHt?bO! zYRhi%Zgr1rOllFDwA_w(Sd!04X+T)lSlW+bhsHP8-l6H_m%nd^0e$GM5^;arfE0}- zs*dAxrgaSXJbNs8b#g8eaGS)%3$KN!D%%lAc`Tv(e`OO<4tOzEZXMe+wgRfOQt;)L zC{Xo&&)Gw{K*?*w$90G}WjXo5>i-33l8|nqs#MWiZb3}#;TfiuJqSQd?qH-s-nVSd zph(uBW#onrvjCWmC{gfblk^a*uwx8jRO{4DBE;duJm)&h*+0Ax{DfGRcR*d(drQu? zP_5X&kV6O|lGY+-*6EVE>!ivF@*sFpdyqrhgNS+2El}L>CIR;!Jh*5>6~3wvrN&=W zRNs@cZ-qRsR1SnZmo5_o($1ig6o8g@E3_e?J6tMA~Sf{ zJE~T-je4%ORe!M6TxnKa-P2^?#}2GSrgMck%)SMrngn^=H~NWtHRL>#1J~^I50krF zRrq6XlZZiFIb)UwI(DSXj!fR~Z0K!xFA7Y>2HW4?Pcb`=4dIZia&fCVzCcr#6QFkm zX}1MX)##d!8dv_7{f0}JlFlT0fV+jUi-n%9rBaY_Nn?DxLV%cpbpJTTqZ8+2{}%c* z`STl?FsR(fR76>OT-2;%#GtF8tcY&qE>I#~^rsV2nqG<1HLw1UHqf5>!`ly~r>BYX zYQ1WI4{B(zZ-4*dg>!K{#!+42?(Ek~8H%>pdtxhgyVh8$IgFXhmRNv2pEPWy>Dm|O z!+yuH&boGHzG<6|k4{tx0-74xUmYIDXA3XPNp-+g(rY`Z=Yf|YcC|g;>k+UU(O?&;=<5RS% zO05Lub;LuU2Kz+LlZink>+ho*VL?jR>!FvKiVa+$`|;Rjh?%3R#BUhG;6C&sLNHHU zOog^TUeB%3&BS?Q_=jzU3OkXInDYQq$v|z~2>xj_Fp=HlJKgCW5X_08-9?qKTro6? zL6V~6!sJ$u*$JQD&(j*Xj(NJ=H%K1YjrO!-j6KYC*NO=@vLPZJCvIcBXZ%&u;Sh zF>B;%MxLln3-+!m;6h>vp<>2HI75zWqM2Cer)o*byCSziWeRNKqtTD!e(D@kRS1_$ zH({XRk;KZ34ca2rV+*Oq(e~%(|8ewrEEKg(Y)On$R16A?$GodS36A{a3>=~DfOYO* zSjEdL7#s1>v3(IFQWyWq27_|&E`ee}NL+i*|Ka41@d(_E4wR-I`X6L1#tG{=*iC^T z8$C)fGOo9-IQ=Lr0KFuIVtw%S!AXot4ornUT77Y6hg# zzpx39D`}NvwWXOEW{ent2#S*9Q7Xzx8Pu8DTr!^UAH`4O7yBl3CHW?$X#-eXzjm>n z{kj1-+kfms7XJrtHU*BECO^9v0ihYt0Ynd#i;t#2ZDtyG$jU+Au*x<&vQ2SMDE$LE z-o*3iH($DM0#r~Pf3%+fxl$u%I<{T6Z&WE&v_GF!Ni)6n6I)~KtdiNzeKU85>gI-6 z1vfCYCW8b>>ypx98Po~TMl6X3A0GKl;(+y*Gd7g9upxudN8ZyAm@7-=lWn#t9zm%P zCk{OcGaFZ-y_3O{P)2@j_RCLfEeGASmUZD0gJ+O**6}|5#X@ohWDb%`4Bp^CW>vhm zLD^^2{4-Y=CBkD;NHGIjCa>SJuNL$!fGq!cjEC?5`8ZsbwLyFDiR>_&$lS_m9LUM3 zNqdl-lyx|hVif^b0^=AafXz((o=E^|?4B(@2fI1r{YD@FNH3VYWN-7!A1Bx^;9&N0I-Y604x^XWiGCy5vC7#tGY{58Lnf-WN+2~71+!P|( zP+A10J6j$w{d?-tVWqMEg{UYz0T99#fy6CIE*s(3PXCFISN7D%#eF0K37J*1ln$Mh z5ZW~L61YLU{LI2$hk!t(6px?j4&ata8EOSp2(Q_;-;QuU!QY7a6?^eR{dZLU`{%#0 zDb)Aiti(#TH&UEQXA;~08t71a_I%&Hy5l|HB;wtVv8vOISpfUW#5eB8MY17wP_cQ3 z>@mA_@KQ;k679qhixpO=m!q7)q^f5cM~q@!sE1KXF0`vMT%=XN@WgS&`#S@>19@&z z3x2WfcQ-fQfx{fctJcjbUwyEsk@H-`Qt5J-B5`^RS7wBHUOxAY7%9O}1mCL=Uq>O@ z(i09OCuct?us%$LHx=b0xQHDk&(aVjis`jz1@3y492=HY614(60__u5$$)0f?QuUY z69#-3l*nOv3f3{=pKL>*uGdUDwsD{0@`B0L`D|=d^#9?Erz=1?6;-d`7}`@rH5&t9 z)98X~0>rM`$FAJE;_f;_Qk93TYH6;3(Vc{lu!a4$U}Vg08MH+qsVf~_)r zoFl?hQ;azBLQne1IxFd#|9J8z6Sqg3_g<*^%LaeE+97wufp1f-dfC|A+^_=ZMtr+5w_!i^lN!f{4awEA$GRPR3 zuwwDhLve1Hxb5$IAYrCNCDMxUZEpF`ZE&M!S0^tgW`1II#8+%Tz<{QAD5REdy2@g$ zQIGa!ub!K*t$VZ`I_GtiBvEcNTtpThfR+IapWDZh_w zR6VF=?av3pont#tSZiWwJs@;(fkQ0gjs1V6* z5-O0R9*?R$rhqIdA(~DWE7~582kG9o&f+I-K4aHK@V-RSY|w%Z@UI{Qj>?^D0I;yV zJKlBvKvt;-^OyamSxeA`f}`s95*l&b-!#($vUi;R?iEr4j zFMQj?VVt{3c#O##sW4?e@-F*39v2mR>%(ENmk@fGbznZaM*;4n!Uu;8?A?@4Z-F;X z0=R|063zEE8Zk|-mq+|Eo<)#T#;zYY;!y0eNh)1H|k&?b1dQg6j#G)1*ZcP$&G(B!EYjQIiT@tcR3@p~a}`@+^HDjU=o z-atxk5?*zyg-mT_Is`_YBHbi;^_}~1KrsKvS?Rv^-KN717@!m>xwtjl4pJF5>>ufk!b_mTlKnk(Efv>QX3sPXAPhAI ztk`QOp1yKF#<#jJYNT|M)#7%;SD9SAGcOasR)MgeKrHUfHQNH+$Qhlx-t7i}$bts` z{{#|Z2sHD%TFoj`SbfDCmDFWar#f}7XUxtzJv^O((lU6YstVm zYAfSqdgJRCP1tHUQob9&@}o~9fiMil2qRydIA_vKK5B2c8}Fy%G*?IoS=#umiw6fp z;Y>^;+lPBpV%pnFOwZ0+b-t4Re$gAg5@hK;vh{xc!_lYw`?)<00lE_d>A>^SW)4Z2 zfHC+yjxlAjIK%wfX=A2u+oP728>t04xUuwM@LULH2oyfgBc@i}Z(c1Ot#EXs=fhCFOwRd7KN;Mf)V}kB= zww#F#}9p}wiwTKD>%1rA@bcxw95bqQf@7#s+!?3E}3S+Q99 zX^PJJE}$#*85`Wo=gy_`4vpTs_8T0_#0V;(`zj6?6riUWW1IbjZ+^@E_+aAIkPl#z z;xJYd9D8!TIlUCyLp&A6mu&q{H){4#kjVQn3nIEeS!|Jo0X~RLh4X?ETOxyDMGDO* zi{v!QWnToBx$3wO6e9$+yx&GYedun9as>Ho?f2JHpd5Cstm0^}8sfABLl4=Bc;n<$ zLWxtoiF(R$3J|NAnxsRRNk(!hCN_nH-NyNeJwRk1VNxVm7kDfa90s?ClCc3`g+=57 zg8)wesH)$wg#^I9T698xKVGy(jY&aSB{ib79r8v>D%cS|oa4CH*;o?|1Pyq@P0j`8Oej03yJ|2p%qUv0>&qkpIT0_LTgW#Fj z;%X@Y1v}GmvKRoGzg)+5L=<@tW2J37;ht_H%9Dqr3SDaD&^dO-kW{DMq9_DjKF2%P z=0o^svyl*$gmDg)sSHp_p+ojN7-QxG+L!U)(Wmx|@^|mqYumU?wi<$57Y7t5w*4KM z&JAS?RUU0;^RSm3xcO?!o-9a5hCtp0svo=cV!d8MOZ|sXS)0bx6fMt$YxGiMC#mkx3@!bJ|!oo!9Z{s z*_?0POaLtH`4Cn7NxC66xVJod4VhVA8a`H>x&Ic*h z4OED`(>AD4*23OgqmalhSfN(9weh=brIgt6+!vt2Z%vRPklCaksfh}bkOhSZV-?|YpaWZazG!neVQJ{o{EaOSX8x~& z*8Cwl>-`v7iL>)vJAM1qbjKbxi^(9vG2YHK@`!kFNKOUT#5MN@2@fZP%NsO`@;>tw z3<~^De!hN+^vsFyo$QSV$KlnCCR!*D80Y+UZQ$n?F*$QK1p?$E!n`!$nQO0w zYnODjA7M)*^-fz=h_JwsVz6{SEZoBzgW%O+Y&hzM zhD1@fJ!VziICrXXzxiMM$418bQAO=&S@aYF?Qqi?h!dfKkj=O<(>kN z)h)7_$WRQAejrDFql14x(}(#^3fGQ7iGOb+B_hw|XQvC#I5{3zXxgwa$G&~;MU&V| zKe%Yu$cs!3ap99wJ ziMFF~ibnYbftTOLUMvt9XUKc6VR?bX#pca`zB>7Z{{&+9UcZtB{k{YD`pY-U@W!^pz{ z$w~0q4ImMyzH}@XhvIp&_d&Y>px%BGYwglZ$OeIQB|}+5WPs!S>S4CSG;na`=md71ZAp?4nTy5vTk0xYT~nxqkUFDI)pZ#h zJ)1aZny&R-dll;E&Z5aoZ@FnpBA);q#F4VU;n?6;sL~}4QUPf=r^>>@y%2J$x)uA5 zd?&a?ARVRXHImmTa-u7-dGmbYiQtFc<@~N2f`Fb9tcdLI-kFZaYwgX1{J~^aIOHtP z;&ioUQyp~OsML)>v&WcceE``gw|GnjP&Oys-JWJ?3TlsC7(0Nzaa>M@1axIone@$!``t^>&1uyo0k27YrC$4mab zj}FQflzi6{mZg;6UM-(`cddOGP9fFvA3gsEN(X*)@@XuLK84Z=X}WPEH2^8Zi%C2M zL&5^Ek&}r?PSt`cqU}MV&`UZ98{L26c}y{OtGz=s9u;S@{o2=UH%OXr80~GNXD?lh z>49^mPSQTf`TIk*xv}9?b|D0x?ec|l-->D9?sf%uHV6cl;EzYY)_+evE`R=+%|o%o z>#@aS9!tI%k7}UK4-TCKW?lJBCmMhn*CyH`UOVoM7pG-|--{_XCmO0HQ|+p)xQ^($ zKy`l?HaZ&~ftl#;MEsYgdg4b(ZX?X-$=am-s4eFr^a%Et61}#8N(R+Pe&}__ipbj6 zaD*I&K^Df{MDTYX#qKmk)9SAx*6(hBtS8D{fJw!tr~3oU0^3X8kB%~ph#+%;r$D}n znzz-T!3NI_7fnUEyv|%)+o5RC^YXQ^*2@ z0R4SJa;XhF&hg+7c014kvB`j=go&W)DCPuOxaW!;Fw~OCQyo`#4j0@r=EPJL*iHWA zs)DLC(?A)-5;TYI^00dxhu``TL z<+jZSU@gqj7i?*O3yf1(fm&zRY5qqLBr|~R_WOI~!iz1K0Y`AizcI1utQW!U0-Sc$ zD1t3=i^+RgB3qxn2t8LkAG^D8`PS7~ zghgU_rnZ`1fVkcFtrfkueY_}^-`HHaoByIc+3j|6o#}!Q^#KSmF%CO{YNudI?X4G* zJk!wzS0W}hfW(poj~g6N9#v1LuZ`me+L>~G+e{3K!|s0iu?1MjD}xD2cUGAWg|? zM7t^pgxMGqn{Gp!;!d2bEf)y|YLiY9fJGcyAaJWk9RyjedEeCo1|*ja%s>jue|bFEDvVHMlUO(+o;g^Dh;Pn zPsbdf3T7e0uOvuClhJmh|Y!8R%!KCvS!qnx2DiwKV z*%v6sAb^zK*l2IGyUC+$H7W_gG$_f;9B_sXQz>Vvd2CVu*VO*ch6gG&8Vcfd_5dup z6Q&^An(qqimKfspMI7Pj)Tj;^2}xY%GkfQPtuN1jNl_To$W$Cg(14d4Ttxd?ylC{9egWq;j=pfm=zf*_wsF%joG zNQixGBca?w9o%j|X>V`E&n?wexpy*p_Vn%e9UB6(Us%uhA*Xn*uqfw;DO=m>`}N+T zxD;WGkx~kGbY>(+#DAl7{hNOsE40Bev=sv5-u8B~cVYhQCR2mC@)A7U%Pe3)wr*^qApWj6Nb1@85@Vk~@Bl4CZJ1}fZ- z4LsVBXPg^4q7Y4uv9Gd->OnQGmnT-`89dcbix0Oz{eO~TqkS9%DowAK; zRU9u*0`-0OO>SJdX&-qMP29+9_)(+nO*QfqqJ*KnuS`4}Gttd=pa1Qj|JKGZEWJKM z{o|itt|=}QOF5vLBKZm^6ttoZ1eQ5lOnJwe3DZhjpj4tx@s7E1NLbNztzE`LhbW^-w z6ZuI&Gu+h0<)&?cTp9K*HnXz9dpO8ZASWin*b>5V5tMrK!GuYOVkc41&NknJwlH;C zCCq}+tJsB|VS0z=K*lbhTEAO>TVrO#=_(zLwn5CrxUs8-@hm={WQ@*Ew#j%~sX| zCr=>0Q(sQ>n#9z*-hbqjK{qy%Ds9o4$9Jd0w`{Lbu6(BI6lRh^GEt$hq+?OYq$n!G zmt2TSJ<1+aA78PH?+PuJ?gphqRl5^gPvM`ZWa~@Sh1HqVud^nV4$IkA;HkIUiECvh zOF?&!Ohr6+d#}G{r-8J73?F5SfW%Tv|4Byw5!pZxvq{AH zN+}f%i}Ej_8lmmklZ_GfGocaD2eI#nxudY-U><1NAcottEZFNtJCmlKa1S&!?=OlN^|0iLCyMm60v0ELgD3!u@S{66} z?|;e>h{ihmu~eb z21wG53ac@$)(XUvPWNAIQ8KHJ$`QtZaX@=SfnV^tV;IgFki_4q{EC6dtN;=nhl_knV&A04QabyaiKZKn(F@6ZMd%=0uodbA@6gkyFNZKx z$cfmI$LHB0Mr6Mv8%ylm2D&n)cX3C#*{ZizTglb|r5clv(%BJ{S8U*XTcoy*jpP1T z6xBB4=}X~F^!A_hyb_LnS=>z}T%06})D?zTU@DOh#SG$cPhWc_qzMP=kNo?U{S8P? z>{-DT?DcE-PceG@7aWgn4u7rXf}_$o16i?^QkZ++pQZSzt=gDpEzI&)&!2a~O+ucO zn#D6_0k_+B-Kj0vnc`!WmPt1{>#+gmZR}nCi4U*LCgX#A9fnT~hHgas6MM1-avq$d zeOt+v&+P{}QtrW2b_#Yjp0w50p4ew~R^5KUI$E+E;P*J+{-P5s(eMx3pe+94kZ2{iyAc7c$K8tCc=wr0Ghnt~Bi7EX`xv zWKOPdRElt=jh&pl5(l-Wkaj${Lu4g#df=RKMdP;u$An489{-FuOe`5xHfTOVl{w$k z=C4Yvs2_L+|c zP%k+FM{?KFh`X3i?m}W~F~=AVZfq$#?zrSum!q`&Ho_4aVMZkMG(~VjzRfazmNEvE zkgg>?U2fhFLX-x;>dwGj+oI9=#H@m^C5;S_=KUC?E<)0W={g{;nap#sbEZ{SX00!U zX-8$R_t+M!^i|u?6&K@z=~maC^PGa9P%}8?MMXUk3xG;2#ldc-_x2u+%O>qj^;X5? z^Tp>SYj3iR3i3^W0_SHZ?}ykt?5ZQXeGz33(UT9p)BT=31KCRzf+eMhUmoMDzLc~_MH9cLFy}1 z(^Z}Nh?+z^(Qi-q;wlLn4~9yc$< zX~E^MDZc!|M2hmgRObR&Iu=1PgGoB0M^o)5G!Qw~0`$jTMFg!f9ga309RYfB(26SA z+xQ{9jlu}pz&bGGbu36luwxlP(rEuY%SKpEIZ*HWOGXqoEA~xYjZ)zSop(rJ4wA(r z1_oL(rGyCv8P#v0YDBOifLRcyGA|_mNjHVuE48K__?ZlyCmB{kLa`(>GY$KJaPT1A zWYvaqr(oOwrAx)Ab?&|cQfbKzBCg5R+wqkItoo`wS!o{^MU?Ibqo3M>6L?7)qtH30 zNL~q##T`fz5r4MW?Iz;q-nJ*vnE4NO4gmjg`jXE)hg%VAc$y4FTg2b7*N5tEu#+%2 zce^_sPf1^T5IqxWbF+<*Zg$metiAo+he_de`hdZthHPXo(Mz}6kHQ)yK5PQc1foJo zQBIic=h@SL;sXoNo+7?+7-y+FV~O@$%AhAzJQHNJEtFD<_JrbxDP|RmEYC(-iBH-7 zsWiz?Gf6%^zmbzTXWYXxKGeU_Z{ozQtC8NB6UVKabQRta>+1%9YAZTkH?Y#9+KlrW ztIuS+Y4u7pgACXK?|m4V2hBe#(5T(@_pPvg)dLh)``^XioBAe+0F zWq)^w8VVUd_wviBHfCBv8>?6+Q% z*Sp{wY#A=RnxQfG?5`W$?*&l?^g)CcI>o$VhW43irD?AKpT%Ho2ii~&I-M1YN==rc zr|#rU&)U*M$sRVhv}%WmkaeN~h30eTFvg+s;NL{!7dNTR3W)gV&p!+za+ru6wmF=i`O#;^- zXy7UXd>(uVISH1AU0~;jqlW}+PGwn~wRp!QIH!deCMjYy%FWI=K zw3a7$VQ68gcHM3puPngm+^R2oIvATq-GOaFx0k3iv!P9#zi&&6-WwilCtHVY#{LP* zf0j&l2w#byWK96U*P)Z*f+Tx(^2=CYc<46BoVk%H0Lkn@1L-@pk%H;SM?{v?CDZZgaQp(Za_~ zGsxe(AcCsgot=p2av*+HY9z-!jXiEg&?xkZtgy!s1wVYfb#wZV6+LTjfJmD>D+~I6|_vco|aVW zaKwU;_6!@6CV664R&9jMd5eEygr%gny=4-IS%nQ}YXwwc;MYtEgxTi^63#T6X6+Jr z`u^#^_Ek`TxgTT`#Er+Y1$hLXZ89b}m+0>7X{t^|(%D(pvn$xSc((D4;hw6>OxgCw z&K`-DTrju=t5y;J_*%(5`>|ILbFFI+DRedf3?%d>DgW6-!;7~sHe#VR+p@h~;i`%+ zLYWbHmUa9v>;{m4y^c*eT~di2xvG4SKSq@`32d;?PH|MZOg%5_qL?FQaauzABlAxF zjr@;)Yo8|WL~uEoc#KAvl8nJ=A-uKYo+9P{0U^@pGK!f5+ z4)9LZEy4%6~sR zc`xPw@#rBk*!ChgaGOGFqy@Wm^=il_^fz*_Y)>)Nw_J%2>F?5B_u|sOx)5J0;jozA zxH7)GK5g3LOgFKAjSetTLA_&9*)o;_Pt<+vV)L*|qU68_Sxc2rH?B|F!f|wdAB!aK z4~L`gC+!YlDSP7^;;D02g5WpdZriCw<*uz139XfyeW(2p0`8CE1v-Buh~kS6UUm|i zLYUX~J54X^cq=Dd$8n{Q$$js^LlT*pgQ0yLUOP(q@aAl3>CK(Ob{x&KRBOeP{>TbA)Th1t{??6K_CSp2Coy(Fk5LtlP8T>M<)u_bhXrK<0&<4zRnjkM`i08f_;6DmX8gEBgK|H=eNrBLv7 zl^6!`hT)D{!pAtTT)Uib>h-0WdZSi#-w%}orY-dA?CL_Y#h}-T)CG!Wi<-(_C%Y3W zBG!?O_f_E3i1BZfMx!x6D5m5%?4^M4q68{7?RMq~07b}7`)v1m+xGmjL)RUgtpwcZgA;yRepfqYq3aoJCDxa1P zvb;WK5DU^AJ{fNN0wEsIZHLvi*Bz#O6MnYxgkRYyhu%d=@yzXnpX8Qjn{Xwbt6x!+ z!D#A1xK`}9j9mwF#RIXNZ0elZ_CuSW*phb>y*f9B@Wv#N;^UJy69^G3^?Ix#a?y!1 zBD{^B5m4|J|%U`*5{hGa5E_%vhHQOW$P3=!zpR@-sd-Qy0 z-PUme$)mvO-kx0gx-KZG*c12p#O<71#Ol1OL_DbL9pN#%EdXH57ha+zQanu8#n~61O*EsYI!v`si{Zbf# zn3m#NAL*RgJOP0NIf7To`F*xFLthy1+wCftds{Ix@!{BL$%%RC?S%&usUqcYZIBFq zk-}%~H90vIC+AU54A|u$ha}>>LQ>#IU6vz=*T}--xb^%%kYBOwN6+oJ{#NzV7>Vj4 zD^SBRc8|qmE`Hm@ZICf>)R+pi8hHCXXA*HXY_RL;{ z^GoT8O#-p_7_s=1i3QO#;;v#HjjwOeKEfBTKjiSELTvkm-1LY20}uPr$T6 z3x$=__O|4vyPSY{?Jhm$1Ua_n5{~xgDib9gAD_mjB)XUpsV!&;vndOXhl>YLrH$bE z2gF%_W$e^a1&?|+RheJUBpWpK$CxYiCzQp_S-MV zvPnR}TcyjQvbNUtL(U2Nj;%iceZO?^fwsV@r#X^sZ|`p%5Au!yiSQgFu~9J{+Yn{j zbFn^3Fi8wNdR^GG4tv{A=o2R7$q^ zJvZzJYOkY94ZEz0%Xn@R`WTuRIWG#Sy5C`}Jv;eg^r=0e$1d(gFfRpHfZfN5VaOTz zURi@T$|gkFsK8(8eq141rE*Yv$>`a+^B3cX$s38nk&5viHMq7`VurstWsm;P?7i)C z+}CyInbbZ%&TdW3&d%75qkDQtV6+d@_361xTy{c0(&p-WJ^nP7p ziOIsh^(TA_p>z&6z)8A~ndmp>LxKJ`sBrE+MN|?~<7&z^njr%0J{fW#7q+(2Pdpu~z5C;V`$Ojlzletp}?L2MVYeXJyS0-cBZQjZ!gH=dW@9mQK(HCiZ^V zp}5`=%g)0aqySjv=A99{+t>D9XF$rCHo7!#N5)uo!m=l&0JcWe>=2{niF*>b?991T^VasO2vasemA7D?rc1mhlXAFz|jZ9nL{N+ym6eU@5;@KN+N zeU@1Ii1Q*L_#iED23%*0tyOwfs^=RC!9f1uFl6TP_bs>p}41=gY z!X+TS7ME^%hlkWs$kbEdY{m z?H5JgwyQ;NEZA{U&KGW3x;ZzKIHd!>5s(wP^S*k|{&4D|*C%l2;)wKDR~xH3GMKkc zv*hpkGfT_YeV^hfZDLn6%@vPmu(&JU*)SPW0?HB%fXn?=r)ij->{OD!q;AS>BUR>6X$w}Lg*)&!W`BSE-hWxckoTPnBh=4vPcI{ za+O(DRFsglGAYdofWoA_nWeew`rYcq?=BzOs8AaeHAod!t0K zLy|U!j@qLTjY85Nlbz>&GHw)-GlROWqD2!g_KzEDln3=$*~jtXKkmwz0R54|?!SEc zeIp7_RtNIJ-MkBEs7C4RtbJc+7P#~J5y>(?Vl)+IIoCw^p;2?hv64sDdFWB zs2H~)EK^c!70c9o*U6FuiJ0gnS%BPyqO&CqB}6CG>sq(CVnbFMmg5uH3#9<$&Q-HW zZ-as03YF{5LP-4%HWliodEq$kRq~~^FA*VqlpSWt3Bn+h)B!U%Zgx#GX_erNPbeYC zV#d3p2=PFYwoVi)PxK<(CVQ)X?vA=!of9X9{5(`Y9Q8VR_qk`#gPeF+>hAD;_>1wk zoz2aA`Y@I}NOZ`ouA17qzP-7jkFAynl}@c>a{5}Ef8RGNMJrzXy2GU`O@zzMau`b& zzWKgI*O6%waq_B5Om-da3!nm^&-p&Npu+r>tlJ@Q1&6v5>P$)EisS~m9xq8E5_gd! zAxg-~lDYTq5p?z`m92EmeuMF|BlM-l#=7om4kuuqJAb&vC&EyEa;#{zl+=>|k!c$E zY8qSrMl(nLKbm(oakDr?so4?z1Gqqzd~VCxpo0Ls>j{m_4&g`)<*gbVB*~nl;^Dhj2nLAdetgrW4*&;IH-c0bO=|+iXx5C>5tB> zEE#R9TMb9?kjs+r(dlC21*0O*Fx)$^!!64t?=_wu<8WC}FdKW@-XO{Zlmte^Y1Uyl zxV~x;91L>1%_;j-f|I)w3dF)GzXQF44 z_Jkr2Y%Blu4$}TFwHL|fwzvlhXTM3H7X8HMh?~czD%O;Adht{HG$4cJ#yMYsE?L@2 z8plIEuMk?lbKAMjDoN=@egDV~>Z2(61q^ub(VFUzw7XF*t@Rf0V3Z4M7_IieAr>dS z)fsU+u+O2~OCXoJ#WaEI%ZvI#`-uc%2VwoV(TYn$Na2lDLsyA!$HGtC6HOYm=yW=% zk0o&76D|j)O_Lj+Ke43&?GlvpnQB?ZS6qQDhkv&%*Est8elb`;A5zxi1G|9db-!R| zxAVRd)HyyF_1)u&CjbQm;2sDg^X@Nw-tovOBdLlM{acbWRrY8=X}PrB--0Fg2IPa; z=sCI~mt;EW+l~8cx>(C|OWHX*DVXXoVH{x&Fp_@6ZE6d3Q7~HD*=?Jv;Qe;Uym|vd zEFi>{Yv5$qcP0nB;Xo~#0iJPf>cQGACF$88jsHq}loPI~CYGJ?Opee0;w}9dfnToi z)nW4U&z@Q}NY=9C(&=i9&zI*G9F?G)X@hTsnBD8mu)WGXEuKh=V5H`vJNG#!Flcs~ zhstiZzT^Uy8iX*`W{}Ri#O0VvM`t-Sp~K&4e_8*0|NRi({vwB!S@obmo=|m+Z*;P5 zv#oS|6fE&T0%n_^fJJCh&@!|l;uHq1)Zj3aFolw^v|xN3fuYS4C-P|M(!!#?EH&;J znt#OATnA7!9`4-TjLHXg0w)!MIA9keNVNCWyKB0h=nGV2^ws9pP27(DE9)03G;mFd ztfyQK5C6~^x^r*yrv43=GuJ4g3^3?NzY4VeLfd8&GRxlcc5#{eR4iY!vYq(B&;-L} z8Ea4EAO<^JH_{t&7cui|jEd~6wRa`nh?TJlopzNBFr`j&&M392!1QLCJe~KozMQyr{B{F9G|~2(#~+IGA89g z;-O?Y#B>bCO)ot1?Z9Tcg3$Pi!mn;mSuAOTR<7PTSJDaR8j6QQ(}yZn5vJ_vy@Y7uK6 zpMUw~a9y%oKuQ4J8|@KMmI%5}<9!_D90c)c>`HNI!VKZ9RXM}NG3r1%d2F4QzqEZ; z%4dy^odr_(e0W$6;42e*C8R4Z&CU7S6LV(@NDn4OD7-mTIFZR=#Y;iRc2C5e59e-& zn^D|(+ZfuJBr?1rp`o=qs3W@0J0NAdEKD$VHs>P;ow+hj zGHZ1ysA$f0O1gZa2H7?Y=M3a_A;MPn);cGfs zMvhJX*QbSjDXswr!ktbHXY`VRt4;U}jM6zRLCz`pG}^r%8z7roJQX37IpoevmPdzA zP#zW17kQ68%?@r4zV;8;kRhJ@cQnNmt6v%s;bdS=2uTPygoso5Y;Z9IN@0{UH>HrL z4+TH-zPCWlA>0bEF`rfz*$IR(LLO*QSlomso`k~6=O1U0#Q%<|Eso)44g(|QM8ZTg#XwJk{WnWadq zd5|uihePS=p=L?sKIAar9Q4{#WSJM!UN;$gp&fii600KF+?)cJ-?e}X>7{4n61n~? zRtGD3QAyS5TdnuPrg$8Sv5(K+r#zLDiqD7Y1}bWoql5e|nHxH*oK7W{V}pj%iSf+N z@drAXbARmMqOP@Q9sc|4o&K)rG~r$^LQKIebAQ!*-K_Jdy%``p(fR?Yj+AimFFyT! zH|i(JYN(oG&FGk@Mxs+qzQKb4teeyiZ3jJE?hZ$0gC~cZ%$F&bT>kco4~SseiEWZQftI zuLPi2kc9s~=;r+u1vJO!N*>2=I>$bTSi|_PC3OaeleS0x*-xMTxi9YRt~g18YZrxC zY&2woa~x6%By`}}W)H<)^+Vg+B%CS`2`lVLxJZd=Pyx~cTU6>ZN>I9~!>xjYw4aIR zjaMUel5-T6=FKy6P#{sw?7LsvMj%aaD76!G!1y|8Qm2-~GNG;}zM4}dfJk>O#VU~# z7KnWI@zWpZ?+9N)LR^r5f6&_|tHbIZiar<-`d~GdaD3dvED>jvGpKEr{s;|DiNmxC zj`wm8IIa^C+#E?<*Xt+Y$e8>&|9#>MCH2}$uuz*^!bPboa~sFf17cVD`405~-axO` zXGQDMtws?IUj^RbiV|}Ozb=PN^}(&C%?5k;`%k~^!$Yi<7=cqiq+ryM>kn(fFGU}B zV2LI6q&8c!Q?e@Ts4qFwav?)WiqwAg2tMLY3UWrqxkK8YcU1289gc{-*j9#toE>W$ zXh)b=QhU~ErP)@v?o!OkzPOio$X}DVl?1W%8=Tr)_rjn|FmifjCWnVGSUVvN`->DD=y*w2_{m8p z?8pb*sauB{H+3jd#XwkSOqSk0FsuTc5H`ZuD#a*+CQO>oF5x&I1{E_{C9_@c!`FQR zFca~i`L^2PqA35hET6e*agE=;UKhL!~f}mk$(0e zCEX2|Uw-+LqoJ|Ax^88btNK{C4YrpuNSrNO ztKJjEAHBL6i@;H1fAW4KxQcI*H&2X6r&wH@_y=mc^y9(4j)l0HRWiOwu*c^ z4*ID`jN&0syJ_Q1ZDt!6pHI9L-xv0A^K3ovwA(1BrVJ%WwC z?!-_`Gb#Q`QL)Usy*$67M32wE+Bxwd?bgK3Jw1?;G8;xT*X-!5Ute0$QEh)!|9b0P zZ)b;-&3KdaT~09V_pK|b1+xv|dF8_`*Ymm#P z;DRA-kV8Ybwqn)YR(%mE0)Lq)X{Ae0xPI7U36 z`ewVh9ew6VW~xnL*rV9C)_*B}wC~0Lb($J@MZF31oOmD`cdKNt8m=BekWLRANwruWNkkpNf>tvcP8DJ!pmn9JC)n0~V`WP_2EkFEi58%Dwr&bz+P_vliYRZ_{ zPOeA$!J{O`&eT02q9}TQZ+5yZ6!kzKVP57=M_GiC*zUfIR}s->HY_a|xaC}lrTg|e zmLfx~J!G+Wxi5w)($e)h^pxQtX+TF)X-GYyr%`xG1i~2@3Cbl~F5qqZgoen4@wr}; zV-cpHeF@PTjEQy+p4V!NoS)*gKY<7Qh45P^xz|1=C#V;8ebE(*l@Pux)uM4&phWxN zaOVO<39s1pl)48pDY1^uHd)c(NkV=lm_ofY5^%RK$wHIj?wKUo}0pfnS*{OqEus5Erkytps_J!ZlThl2zcw zxtMd#z10VY2AjsiFZ(LJUJvTWc2|W|zN$&?^Da45R8Q<@e}_VKz|Eh*8!K*jh_FFy zZ0hJq@NPU%L{q6jjxP9Cy^FaV@^v;>Z)vl5@x+71e=!eX>Dr3ZAxd{`l@z`jyNm}6 zQG4oR?y^^DyVu#XgB_A7PKFp&pN&ON3NAXuxAC@8=SJT~pzS&dt6VAQ=Z`O=*8E`n zV}DobFfu}YcRvITNd72iw2T>jJpSwPpX-w#HaKjS78dNW?>+tQ_=hf@xfv0OCmA|$ zL*Jo~3+6-aLkWxcrXnG{9ZNehunBukf1 zz={AX8)50fUX&#AzyCic>prYjdzJ&~Ar9IgL(xwGZ|&+l5_~op0TLtk#p&}m&;Ib! zZ#w^(Si$0Xh`RW02PKOw5EOkp=igJPlLF#2?KEvGm13FU3}mlqFj(WCj_Q zhh|ZTW%pCmvhR-nRFtz%e_!9)5hX~Wz;!3LlzO@JjkPhcO;V|Ot#&DDO0ZfV73E%{ z*4hk3)a}Mqb_yk&^%toWs}pd$gaGx^v+sZU`;HiH57U?hR>enRSs?MGM(mHYDH^`3 zFQeAu{_=e#+m@;aTt1>W!GAWX=I8tWqMUjxG%tl+;SnYV?wy6SVy*=OhQc&VJHpn$ zJ*87gVc!)y?soa>FJ4kon~$13HKcEUMZc90wNuk;7X#whc&ywI=ZEjauzvb8=V@}p z7zGjCHyj9Nh_Uiii*td-At`O_dGE~KntA@Hu{Ao`;5fMx!(l)UzLQbx8=vdiq^MRY zORQj=P#j8}inFccMmsX@as7K^%bTX$1mcvW0)K?7JTa|(7Vm8Cgy*S%BFy_hH5M5; z_o)jnkC90Tqg`ut4T&cHNhOuplu)|@hM6S{rk~(?ZLjJ{4T@iW`oB3jx7XfK6z!16 zjg8$6ZV3EllPcFbo;C!x$hy;IMwzlbMD*OyB2yJq9vn1O??R3`QJaiFdqyib>C{}c zV|;GODxwre!Jq1TSPdZ9C$P&6iJZvr8Zx*H>*9$}if>;;GJLKcNXD2pI6gn6)SmX+ z>KkVgfAX=NJ0;>_rMXDAYldLvJxOTr3ElRB$7LBT41$IkkJO{62D|T`Xrzuu32y6! zliKu)SKoTa?gm62bF(+^y%-9f9|Rd!ulhw1`1_y?sMnz=B?oIC$)lz z8#b#_b=@oXKz=J63TS|?k{hqnf`^p?=ADwmq7#8IeDbe7+W2=&cli4!Nc=y2Lzf5< z^gYDS&y{}RNV5}ZOckyFhp^{OpWKt^*380@O zNaL1>jbc52qjV4hZCiP3vchYhq1P#>%9y5duqKM#)Zx-KIgMhhha!5Ncw+

#Ua(P_*THOAecfjjLn1=N?A!s3EDmAI@9hjq=ZjP26okDZrT$@I}_I zcoqEdf%uT7tOX2Q?YBOhx{k9x4datCrSFHyxb^1d)-4|bdCS!D^m8myq0tze98vSS z1(>v`Yn)wFs>EiLvnJjeNgsqM;;?R0#mZNnjI4nj(!_t$321NC)-Y-;4mA(QiH+|p zvsAgjdlq~r8@J7IBsw2vqwY5;_A>r~e&R>+L6rVA>NUqix0iF;6o7{q}v%_5d;OQ^ylUOEJ18Ze2 zgic?$`l{0;#ZTczunQ&Q*4pj$jc{e}v<@&;P-jg_W@?WV&?RHrTc)JK;aR!*O@;Hd zYu8Pe1862EIT7b<19$(IwaNMkzE(39rKUb1FZa}PUa6!dIhO%xlM8O}&f0QE3;5xd z6IhhHa+X&g-Kc|mS=u6|%03Pj^jn`(b_$y+hNsy!C0mffy1rlJ@@C@-9N;V&euiJ3 zhdp(|`>!MGeA~e1%tujl*H3hXwmEgMqc9M6m88-uU3?~Yg^W+PN*@f)5c(3hBYVaH zMiME;nU{)@h`X3VWI%c2$IVBC_;gHCX_YbQN79i6s4qQFFyiCgwau-s=xQQg z8n3T8o%^`i@b&tFKub|jBL~8Z`WL?W*b$?aoUYL}@&5hW@t^6$NL5JtiS7F6~xLmr5<*T6W%BIv#pzj?QCTd5l0fl3`H2=#NfmHa#TG09Vd^gEUA1GLcz?+ zq_zM`kFFi3MB1l=U?1#K2~J{D22f6W6x%}I*u!2#Yv-A6&A?06gxM=133d=UX&H-KTA0_@&YFjH z(C>+#TpU2Q9rN+{ub%#?6S<0!A`ut=W^yk{(=Ii@F!K7GU$uSIANe&<5*0PrldVBP zZ)eJfXD8H=CM)B36oTS}nz+6)3UH=yaMlYE%45VOpx2>+ueKG95M881nRQ$xVJ&!o z*9Pw?aCwya+m^yY@br(zCK?Z$(L`eb|Aw*Hs*qa<-}Z9_^ojUZkYkDkm0J?##2oa* zSkx=i@Y$|zxRIJbNZUCd(vTw(;^b7C_oWnal!{*s#kO`QN9BCz_fchi0Sk?sbx@d& zj;+=nc?H+6Dfvybcjv8w)^fe0k|0Ihx(9Zh1+IoJwa~QD-9kebYTR$!S@RXaCEIEFO0W0i=t#`7lsnXEP9G70DVl$D!y|^+ zc~e0ILe07eV(#kSb2sMZ^lvOLW9aOlzaD?0tn49sq|}>GG9~k|-wkETb|nbk8d>md z`-DxTyqvZ8*+nElV(5RZ^8z~{yVhAA)y}C+r{v7f&0X{TBbpkfIg6k)loYJ?b4B0L zP9(if+hD(1%`a+phP^O3*o$(h%}M(A=pcB0CUr+?K?%G=oJ}rAb&#~WM@k_fHC9>T zb1_Q6fvpgb{W3MOo$*M%)QFS{pN)I#x7YkrgBz-K0bq=jItc(n^CI`qY3By5J-dJ4 z-4EXWz-6%jiV`tJiiVtoQ4f;Kb<)8;l*9*=FBg9!lM&qY@li-*pN7^W5`+O2O zoY#z^UR)6pzAB*e?j{DIw)NmkIs~5lTmJh6|NXhP-#oC4wt`kqrJ`g>`vB$GnegrQ zsjt_NV|~HzCxuAZ#bN%DB9rqFo{S-BM^oNNxtpF;MkV#DcZ2y!&i+NFo^k?FO$K5K z8)9)dfAi^|_AFoQg)3JaUt16ETd~`=SB96{v2{vcE)kADcffBn-q7B#CH6Rzw(GVN zvv1c#Z3H$_k(RT~~mUR(8L_wYhn_%;A2i@)o%4`o`+|t+kCEeGwxeK;jN_(FtPdalLmG6B>!W z)>c$;r?$<#>*U@echcF)HZgd!WN)mjShwg6I;1E2{BxPHN036iJ57wguo=W!v_OS; z*8Jqw`&|4Lh(?ErC7!v92av2v}-FGMs4RKKD9$O`U zSNtxDlUu8%6mPHJQm~d2kbV&=t!EZmg+4P)CJ)75BGK7v|^0 zpQUpPe`^No>Wzj)8J6dlRIEHS1Ax?Nxj0JS7B8srqq?LwhjkDagy(rkD#}Q7VtpBv zW&4(XAmAGh{G%4_NiwNU9k8;idI@X&^y#1J>a3Eh5|8uV$N8d-azvyV>NAig$NV8Z z{P(--cFeLS%P^)@;J!t{lXCOvu+>e(@FkMT);iA3n-`396{tf}8#n0Vs!h!ttzI zl14HJaYhO)qfW&8Zf=t?;&XL<$r5(O(-BONRl*2T%hF-@MQS9OzQ)rbyYcO}-_pH7 z9MfOn;LoWk1C2X7Iy&5YKOKLfKpY!2QUi>=ml%KaqAYByKJkx zLht$RL>JPIP`7bD`lNBcvm}gQH|ZQm;&zxYiXBsh%TvCVuU}vEt#*r_}nz3hpfRqQgxN%=(2ggy@!fkv6n09n zXkdr4({*OQ_;7nimz&!E5ndY-Jal#L!G#c*($>&;@J>5DoRPrt-eZ?*qg;{W8wJK7 z!8U)Mt4+C0o}-ZFIUwNG#0qPfpZ5@!t7`;1kH##CKCe50B{VTCCHJ*+9KM~f{@0i6 zNXZuFyJh$w#iatMZ;T<6rV|P`IT2v!uamH-QY5xoSg>c`=hmP<)5%@keDD=-rtl+k zWFs91EnEhJN77p>Hv^z`MOUKKsv-X48eXY`sDp1_{L}vAe7*L! zE#)pe7x|6bj?KX-j=EazJ<^7N0vQKUJl-9d`Y-&0h7~4qt7wKHat)L+5h$Q+s-1u3S2huxkrWbw=GF*WiC{~zer#R zfqIG8GGv{r7G4(LViF~iV@_BGJ|_)skU9=_JZ_IeO1{g)xuGKm>Jifto*;r?N}afT zu(5pB?98CEOW=D#~SE zQcKI{YX?`Nxg{i@up}gCW7{Z_CVM5ujL*hQ;KQa@6&}FiSSX!K<~f|(CDS5S(RiH` z4Ei8U80afG+f5`!j3T4^uDpXE*=`g57)eYjl zaY~V+D2WIkqr)l{0B?i|sgu35V$lieI#~V|$DSw`s6CL2IhrcrLROoi0T~3xL*da5 z2xy9x>c6*YVO|#XD%V4;``B)A-q9z(H5}GLiTq+6_;$%g5SeZV_`7z5L4qnVID~YU z>Iw>&_faLZ%?~~dHy=J6n$ACv>YE}JbUX+moJ}@i54SB%_)T1g!Av^ZvyKjck~6@8 z6kE<2f{1dlEtDSaoYFagSJZwO|MKNf8+3apZJ|#4^fuq)_RvqisqJu(NdJ88 z#W2L{%L_W9H<%;5`})|@d^m7*+9Z$CWc2#hmoB2O3I+0Dy|Cg<4z(%KgyDZU{#aR} z9MX7Nj%qypV;}PxHDG%jJkrMCB)fK`dbJd&5GLwlE47+jY}DmG<4#K>B63;60F`AR zx0_IxUk513QCP~p0wS?Ei+IAaDi+8W{u6(mD_Y$7(Rh7F$1yq0hH}dG*ZKPV3$N$^ zC^|n3Y6lm2i7BDYB11nW|H+3_?(&FP^Fmk2UGis0QW5ky`&{ROlr2-YSMP1EzV1KX zm!qEMwS^H&W{CdG5Pv{wv_)_q_~>DkcGh=Q?gKEmOYUtbrE}$3xhh`QP(pN*_^WSO zE>%FGFTS2;*M&FP@W|{^Z$c9Pb>2B?W!}z<{LPQ!deWBc7GTiq>WnX~tSAAnFOC6A z2^s?)M+loC;IxehY)X=<_PF0~n?0$OAGPIoUVKs8;c7BKqI1VQmI5`p(zxmFt*c^o z^*6O`OjqlN%QhDDb-8Su@VLox(yM_+$SUCqCMo*n>)zaZB)8*we{0*8@J5MS_Xr*G z$-I83C`)$37v)?m@jAEMk zKWy(CgWz-sYBa}6TvJ?J)@LdFMIw(WePXdm9;SCRXtPB^Stf)fu9YB!iJyL3x1t=m z1sEj`q2ED`c+W@AXGcugWjma(^LZu{UCLAXJl|ZsqXZukN&*GWH%e?s_Jf~{@3$Y} zGEw~0yCETpgTu`F`)8r?#`>0aEv~XN@*IMCve|```&e*EL7Mnd6kg=**>0BnHL3RK zJAt^|M?~5FMxVwpWy%IFOG&`cMaPT`yl=NV3TOFyOvpho{Inrl?IFh7<_a+hHUzgU zXVWe!ruju#$D=1YkC;4?)})j4#h59|f(g%y3IYi(XwG4vYbX@8C5&Jm|JBp)iIM#D z-+2#m((x4~czpg!xP59zyq!974m3^~Soq2Q+FRM=2ApI4cq%@KSckD+$2yFw4eAo> zv#?q`g{D0+;vtJ`(xro2#~Y2KT7vVT|qzAK3XHQPG=Sd?KEUMdVz; zBpw9hr4vh?UnF3x6IrtYrdR*kiSWi{ zgZ{2)KnEPUc#?i7BaiS$rugRJ;&A5In_iv^l~iyto)}q_tbtG^p{~P0Bp4RXZC@00 zI;`;F|9oLdeRlP%*OU+%=y5?LV|0rMuk0iTB1}w3&&OolZHLpM55Tub%a|#?v=Cyw zBoOXVqe99|=<2{rONiT9-6^Iv#TuaA42y<9O2@skX-8`X?BY>#9(w)84R=zOP~pEK zJMa&m{*{iPB{$XwOMG%`cjtXLaTs@Ja3E*xLn1SEL2+&xCpqev7!#d$5SXuI9*zX> z-rnkA`!RW??(^bmmEYDGU=So$mOEX=$^#ew;O(y3E}@KwX5}sZ&-~gQd>El)E$O< zjxHRQC*rNSkx$>4X9m=tG_1U+i{HUDH|(yTD;*?ZgY{sd zH2Q*ng1J66@q5(ojT|px`XaI(8-o&pgL&}PW^mVv#Uy!(%8@%5AC@hOmGx5x7cP-u z8|SNETG=E-XZrn{TWxa(4X;IJ-zDqwKjhV=c(A;@94pH{MiC<*I* za>uI68Q9IYgZRUkm{^lOI~!7j#Iv11>z_Qe!wh(4-HlS$X7)V$?$e(?{jOqM;_5Q% zIsjblKN)|dpGY~zQ>@R5s*Rm$P(&A#`d6l4@b~-rCYo5?3n`o$(e%Z?k6vb&Y~Tdq zdFS1$I3dOAfv@`_aY+JsiGqITnP{iw1g&@c6^f=iV92;AY4t|+7TOc4wMD2H5n6(d z@#0`X2oG!@j;zwo)qn1j&(VGo)AGu7O;A4HOQXR@t&2kz{_B4D3!kMRlZhAYXC zq>|_?w(T#S!?PhUo?%?@MD6OOmo7PA7^d6;shG2{Qye|24+l;iy|2dr?rmZ=-W_^ABv_vL@b^1t65|B24g7TnpVlv>QbIM{lW zZ_CVsn4%S{@~QlEyo*c$Ag=%DKC?>BqqHYSjP?LKh-wRre(fSb2o z3c?ar^clyMD<^Sva&C4)AP%qI2=1eUTqOxm*~RvkbfPvNZmq6u>nCw@9f$K1L`YFo z!q&Rm;S7Rze~*eZz4vRMy9aHnG0M5_&hWT-;&XYSwgzzHTqU?{_gy#6+MYBn?oWKE z5>{Yyx=@nT1xxNfw$l+EOMMO- za1l8~;!R7vHGdvsmvFVxA{j$mX($77mbTSAuFZU2eYmwnQnfe7xq*=T(YE|Eh1l}q zqR)ez?ExR&z1;eJ6nxgFlPc2Iowzv>%HG{7MM|_i3PeuQ?eq%-9WGf6Z(R*kgdg#r z960%L`0giqWJQT=wnDPgHv6-8aFeT`@Z0FRbLY`splSW167;jig(hLGO9a8rS7r%~a*%Q+cXC5HJ z9$L2;wOEZdEN;G%m3J_ZfhA_MJ2Hwi!X4rx-y97I)3yi7I;+q4_fIe6^k@96H?#ce zpa03f?`18q@T*?^$DjS8GLG5TG)6h|i;9}(e^5sL-1jPHr@vM$X6jqTX8cl@A$+IP z?q4bL?Hfg)exZ=N_^ha!d{l6kPs;b@gJOt3RRmO(_Wss`#;xnu=9ib>dH$P~nQv}9 z|INy^nQ!K1zL}r-W?|-=#hGue&wR5q^UVy+D>F2&%+P#ehUObHG~bw^`Nj;*H)d$Q zF+=l>8Jcg*(0pTt<{Qt`ym0M#nisA;PxHdH=V@NJ_B_oC*Pf?&;o9>wFI;<`=7np| z)4Xu)d72ll&ConIL-X7W&2uv}&&|+0H$(H>49#;hG|$b@JU2u0+zie0Gc?c7&^$jw z^ZX3W^D{Kh&(J(SL-YI$&GR!f&(F}jFhldg49yEOG%w81yf8!a!VJv|Gc+&E(7Z51 z^TG_xi!(GY&d|I#L-XPc&5JWMFV4`sI79Q|49$x(G%wE3e0_%I>oYW8pP~8s49(YP zXudu}^Ys~;ug}nYeTL@iGc+&F(7ZH5^U@5>OEWYt&Ct9wL-W!M%}X;hFU`=rG(+?9 z49&|kG%wH4ygWnm@(j((Gc+&H&^#jt7nWyeUY?X!Yh2M5R9tL5XnRIn;meNmi0ojfUev;6n#`CraKVjwY) z7)T5x1`-2_fy6*!ATf{_NDL$f5(9~W#6V&oF_0KY3?v2;1BrpeKw=;cPQr_v&%Gf6yNM7yR&_^WRsV$s@n^YyZjQ z|NFJ^c{M(lk;^|m48z~e|7h~{uN_ux{j*c`^v}M3TUR~lK>m%-OOxaC|8(-1FNW2j zz2C0-`hIh&vw?oyu6lunI{bYa{%+~xoyy+qR73r?_HNqMo_^c*XPZ8bX4MT1pM<~n z!{56;W4x)^+Euga`}bAHCatbU`Y!Xd7ydrhzdKcJk9Vq;H#4kiZNBSs zf2F#j-!`j(&w8_0?fv(*2C^&{3nPE%8~rLy@L}xhQ0Zy+d|q4rXFLAc^*5`2Ti>KZ z8_B>QuPYlXPsGVLm~^VHlGyR}tG$g*b?grugbok%>&HGtqba2Ags0a7-5mo`?Y%dr zXuYJ3cdIjf^9l=KD=>-(Yw2P%hgHYdw60F|-SPQuq_Q6<+t!!I=lfM1HqD;C{Omi` z_&fiZt?=-$Y7X@`=G8kKIbQj`5jt7*=UE^9ftd^ao$1#d_OXtC`$M@l{YY7tF9?IboE%@>{p%m zrp?lcPc0ye3-Q3IJge%Fe>$z2gV4o`+Qoa-$h+9{w)$|*@cg_!U;AYDD-#tbRb-$y z^~FQ@zLT&M1j1|8Yx+%D=OFzrYNeV++7oOh>q z@!L8`Ag@mgKlKR$(<Ufw9qX`jXLi*+EWqo^m-gBy;+mOQF8`=(kU(pvj2i2hwRrg(7oK!>S zGyCjXlR&`Rx_-&mt;x&&2E4Uno1o1k>#U8YF%bl4HR>r*C7yUbh?`sp|b1<&`(DoyJ=5lpa zzix&{m~NYuku%FWJkktP)TwrzrqjSTM^)QNK*nfaaPFXa4t;Im*TyZIyJ7txdcm7o zuX?)ZpuUI#Xq+_C@9tEbQB8U9GO_wREcpleH(bKxUWfgty|rCRA{l)KkaNM_+6$|H zx%wS#ln#DdiRgO&r%%%ISF)iDRd ztJSOe3?$kvLF(DN%>Q+JCNS~=r(ek~u zfvbTcByFfc$0z`G1i&;;?EOEe#($t6`r?D|**#`s8$~Zwfx!tyu8q{EQJ9@DB-=k? zQyZ&4;e-7$KJQi?U6v=Y+cQy>l%&UK`Nl24(w-0F7}dc)VIzhW#4z+FIWl0=K#)SH z);`HNJ*I@;ZaXl%x~h+~y>^sppk8-%*&Mz*-TAZc>|}g?Q1!h}IOoXqNc;h&6^_pb zE_y-F@9KPv&*!QeI&Ejp=6L1}r4Zd9OvQHWeo&c5@5o;s^<6x&D)tDrvrjNW&74?G zG;RNdjbYw~i3a;iC-=qbZ67cSQ9TL>V{Z>?2iz&J3ThFO@VNb7DFeI^c5onVoN+cy z!RS`CIWEDBnSgDS8hru8xaWh(*9jBTRsTjeGAsM7>R;%qqNWd=Qt<+rIArc$(&zZQ zZ|=h>C9lRV2+9?l=s2uWU76>zv(FMJISj@2&-nbv*9ljJt<&4pJH8sQ5fgq*5TcX; zL+8rk#PV0**S0T39}7PXQ2Vq6aghZ;fKcNOkgpUYJFI_Z*wMLeq7uAI{LlD~q6v9Ck!^bTCK ze^|Y!P4zJt^cmT!CWHhh&e1XLbgwdG?*hvs;vCr9B6o$qc+=~V?TTjN@>5u&jSV-% zsdfdA(FXP-x5Dvbit}7`O`mCXtC4BJkYWnR_`KnGt8v}<0P3I%!I1oiUc0hw$h3aw zzYI`vb@~{{kuPSqQyoQBLcr<24)D$dlI}(bGLHeP*c-NQFv@Lzg2>PSDu_3ZGSm&~ z2^eiTw-U=FEGApQUliNO*6i6w)%YWw&gN*!IvQ4|#t*($ec_>unrBMhU)S~2!F3YI!*zDYflcKch*L>(64~07fjW%{U<=A%)HZD-C-JF$96;J9czaj>ZU)nr zePS#{Tvy)xUaA(3n5{nPSZ_YkM@Ve5d_GrwPUnd^TaK?7 zed3y_oubF!LP7W%|1hp!d(D4lx&vg{>Mc``Yf$7pu#nny2Ug)NFx!vyF%cm_*zN<8 zV;{;+bO11I`j&VljE{k(ps$Kz5%g8|ml9C}W^Y=QNQ0o@p?BaviqhNjRgjMZ+Xpqz z#^nqWb$tG2)aLh)@B2Z!Z&v#-b)N{Pz3ZRmt7}uPkJ^G-xbI?*K*q!dux{IquES1X zwO4eyPNRY=5f+;gaPI^l4NZmxAI`IHR!=|nxfxWQ12Yp574?C)z&zUalPOd&_0Ovp z{Gs-f=uTlUMn5rFTN2rMO@s08R{_zBX7z8fRu+{0H;ttRLOVcZNQ4vqu zZKLbm>RqRcogSRKNH-THwWudLn0Km6I{iWI5ELTILepk> z)NW@Y0hAOLzUx{zjM3(t0Ql%2c+y|-iFmtuOBu`fTwFJ!uEC}AeiYV1jA0QDsdInX zogRd(v}G~ZZF`Q-5x4rw`21lsGvHQk54Z3_**@xs|5gXba1IV_K%(@Gj9K=kImG}| z$@Zz3z}5(_O&MnckFOIh(V0eY9Qd5T8CzY~(Af7u-dE55NC}iM&d78Uc#11;VrOaN z*Fx-5#`n*3BYZ0wuktshmi(VibI6N$rFucXg+b|`Q*Hq9(*@bL$G&YUrN_nf<0F*$ zLZa!yn7m~Pu$WF1Ijl9 z%9tA@L<52<#>#fVyLQk=>V*;Svy&W2aAXsZt%=7_7X%lmtvrfOSlkdGWHroN4F7U5 zg!bMXm@)!ZTn6$XXbprrnw=oO*iBR7jevE}=*2}9ja3{0U8AcnnyaqW2ne~gDso?c z5y=|c5Nd}1je@Np>jGjrffC^|R?lidp3V_$)J&J-YT&E9+a`iE8d#K=4TjA-Y+I!N zh3Z3}tjQj>#l@ZZ2k!lXz-ExVuv=p`&A?WMa2_Q~^gtaig9!}!h00l+bYp$O;&25k zesRv|pTTfq>l$n6px^EJ2t=;!Ss1$wc3ddb)e|KK40qaL@elCq#l=8E*Fh`8(V=8q zu3pqX=iIM}OU_)YbJc?8e2DWp{xgrZe(A$*WzF}13DYb-bV-84`2Vl6KN*O}WISi@-W*!1@4^U)TOb zTh)bKuQqHW#<+t1zT+-4L3P*1Q}}|pnCGfRB}&}6buU;SIGOdeNrc2Bz*mWUDNx7f z2#{<3iSS`?>K?-boKK_ci$>&KVsA#r!CpPJxHuJb@Hkb%YfMFy$l|cp2XXX9OVz&* zX6gwsB&Pv_PlGQW-$BA*OJhXYbbuezjL^MrPqTo_>*@wxvJU;Z!rRsCPZ#zQw>_6=P zzfdy%VK2*D0Xso4z@X8of#Ej8Y@=ND%&v#$w}R~GV+xG=W(mAgy`qybSFPwO;ExK* zNh_#^-b8|`dzizzo z`57LES?|$LbX3^Y{wxYpJqVhrc*}iq@ev1}3LL^5pd*DXfa2YBqWh=>zJ*VS4F&Hu zzJZ|1pihl_X;?PTq!^!*L!}eOPC7B3D(N=*!#<7@GoM7|AK7CBMnfC|xsAE5=$^RL z{?f;akT8zgmT57(NX|%54o^_NEc7uxm!N<>`Z5_-FDqum#L&N)A9Gn@yqW3uLiGh* zm(6O!sUf+f>8m7&?Hd;$;wRA;od6WP=tzWClTx^Hy%G39a9ZJGR@#IYYGV?tGEJlK z_b|7ZV)mf0xgY6diizItn~cTMbT7qYbeYPW2%nyZyx%RPWN23?9SwKohf0M@ql1 zDmj;{%lhX+Wh$Kn6uLIuctYKF6I-T*r`L&)Z-!x4L#~+}*S$?)My~iTRP#y>Uf3Xi zi{Mw1n!f&UAzP4+MBObGBK*@?Kk=`iT1!~Ay%(uxPr=-@Z_MkS{))0C(lh91;_#c- z)sk!zQ}RPri8y-|zLTB!6a6?ofBUWKEyIMU3^s3UZfto!h=8cpQR4t~fE^Iu@J|we zR@%ij^Pcv=eh7mNh9fqaFOoP`+QES>dQIQt%z>;d})r#|zqzmQ*RHeK=2jr!I% zf{4A~DA?s|8Iq^=;YC3i1IS_OW%P$pv%sJ<4N<}fAp?oM zOWvMREk0wjDPF1GR+hj%6PK+orr6UuBZxH8vSOnxoP|EOFhUb}(wh!l#GNlRM)fX^ zM5P4ZarE+`QHcR-df9xns85!VkbXdX417_+uJXYb-Fy0y$L;ZCo&;yj08)K2WTp%` z65;=@QvUhs)mMEk?uNJ%x|A?|oj1`ax-P-as-CN~CkWCGlNziE^p$~4atGYG+EINYQcCF4e%Woq%dqmN+lkl8N6<-!e#0XQYoI<efsEz zxMFk}L@O}0;Dze*`aRuvsQe`#QgH%#_Y&WB>#QUS_JX`QkYG~Kn?#+{9z-SknNPp? z#%)H0Wp#~431EKLpKNlX%9P5l(1Ak}pP{farw_kAl2n&qFGdUY& z1s(l-HSar4^0;(0u9l4W@h`1;{0;h>WR1vn)HjiOhwb1?$9>%C+GlbTJ6$-vJ}ttc zX&k3n*@=^Nw@d^H?vO4a+UQP#ZJ}iaCHp-IAepMP;vrx-zkN-(T!-DRLM&DsR|+6G z>Jm5y&XH(wpobXhIhW2PYl8$^eS9!DGq)B!jQ9D<3}$A?c(>5*@IeiOf7d2Dz~Qq$ z)J2eW)W4bIuva&#w)xmaCBE(mdkocPq}@}-@vCAJYiVoA@+0|m;ThGncDmkK-8Iq3x$W+xXwTrm^ild$=~qc>1oV> zVFR+SK>bZ2I=qC^0NS?U_j7%GvuNGn_gk1gO zG0PD?mi!NG9`8P!RXYX=2A&SVdgWBiWSK?%JU+)05t2WOR&4xVA|~1*L$u(Q#7}Kp z<8#=Zo4Law4JuKL5~J4#AcnGWf*aKh{Y8AAYy|y?riX9O7y^kOeP;-026(%D58^2` zZY7e}Hm96>@o`E5V0X_kC4xod3j5o}C5Ju?s*+6=bT*tAWMwFYooPe3QEe!2m#Rgb zGD*JGza=@t$ABSC@`+;x$!zK>_KO*pKqf|t55r0Vv(6rBo%d^bwng45rIP9r{M$QB zv_bWR-R{q!6?TL20oTP=*5(`4J%3W1@*$W}ShSbKAW*N;1CbEDh{cX{*#Zk`c0J&^ zUy>XI2MQlKQrtIamH3q*8HY43IcEXlUokBjz7+#+U9=q?$UD{5!0_04*o6AqWw%7? zJ}S0T!bGDa>tX^WgPepxLaK%s8zWE0V3pjNTNKPN0LhDJnSwzW0c*x1!?=E<~!OXFdtKeCL3Gv0Mm}_K=@g(REU(wx_K~04*UhZgkXYsSwE~UDN2~QJx}Qz zpWlkHcA79}H=GI$TWOJ?yg3`iUD`6`WuaQ|#z~4bqg=Fw`m`O)$p!{10$wLoFdE`| z9Od_4F#7sO4tLQs_MWri5LT|W%~vbBc4SPJ=+s zl1kt%Mjpv{RrA2)+r%mO3vYDb$4Td_Wo=Nr{q&)eg>*q1_aP3*U5B-YF4?35SMHm8 z7k2ENiTYpd?$>W;Vz}BfHtfk^P{l|XaN1X$A&J@cx4JoC!1_KIVub!_pClP=KsZZ~ zx}qZ4JWbNc=KJB zv{**T5>ayBc^ej|R8T&^q`9cB4k(Ez#&IyLJ#)?R!FD;jIphfPi@4dfGs!)(4#WiU zA(P-~njW3i_Kv9ImSX}>6yK`02TSpy1`E}7U8=R{#TEZ}*Lc0SDL(S)5J;W`jb@_3 z`?wEeInDVsl}$cnONs|D@O6%1ARmLng+&G6~briau&_6ZAGahQD6R*jD7jT7w?vKDSmY)RnE4UU9(Uv`YMUEbKAQJPP8u< z+^EG>-LeE^JT=Y0=5hNPJjUlARG(Ag-zDYRPHu=Lr#}=uKnGkxqy~}s>W25*=?OD1 z@!F`iwT1EdR+Kx)i2!QizK+nll3WHe_wFRt>Yvci!kG+A#K8DT6^vdWU~+8AwkW?% zcYULj>jKPIOFp&8*w$g#r{q)^l9&dQ2u0>RG2RY`gc11KmAHow{^f9d8S~PglUxfE zPVBuHH!E%6*h77eOt*AL(5@P?-DlXKQ_td0d9R>JjA+O z*E}7PW`ZUCV)ddGSRoD)R2edW^zAeE>d>pMkoc9q=fgTi`S3Yp+I-n3v?*OA3-kgf zIu-||m`pxjl0~M=EtW^%731><(S#x$DtL%EK&He=Y%9bHx`H%=b)V$;(a@&uyxBw8 zpA9-ec4G$^=VTZ9yo>*$>nCv+s%4!5q)y56@Ml@0M?r3g@$bh9ikLFR3HmIX2u%U$ z>xR8p(80SzkjYktgnV73`D)G*BhMvQ2}5+mxSbqHSs+f~Yy*$4$ctT_Yh(ZtJ@~42 z2?2D1AmF&c!GXU&7#&DXQDCBS9tGn;r{tCAJe=4OM+yTF65PE9wd^!AfkczGfC@DE ziI0V9N!OSG`b-c((3VB2DwxW;8#CgBZt%QK1gs73*&jXq1xFIJb+|y=nkglGOv>S` z2$>(Oa*)~>Z%lxaf@|JPWAtqkfD=<&iEK&K*n)T@mW?E2r3<@WZEJH9NYlSDkF4pJ zm>_LT__>SI&v9PvbNW;Bc(>XMD(#*ZH(Rf6X@3jRNPv;CGLE5%@f9`-+rfmwy=pDY zo`+>6wChb2y(dNi9{5S0t!hoj_Yy)%37675?)YG%*}h>SO%zMD!yg1Jvl77?Lnv7e zEAntxhr1ZE8R6&VJtUSYktH1v`lg5Y(6x_k9@!&9`yhMV90Tjss_q0NDg3c#0s?E> z)PaTSM(Cv-w0dS+j)9oi$7Syd-AnP$-su_1QS(K?SuBV9W~+BAyYyuq#ecu7bNsl9lL}euL0i8 zm)?hj?rZ}#FubRpo&-}J^az+!p7)6rwJ#cICHF3Rc{p(wI>XByxaGu_{!1M@49|Ny zBzn&hJ2e0g+KF=O<00z6l?I1x+J{%AOtOgm{4Ghh3C3c4-#?hN2V?=FNYocA?SeTv^xlEh@_1t1j;$g zV33t*^BF-=8~)SJa4TH65p;?#dumyuTGj1?Y=}NY0f4uT7U-JxiDw3ubS(35Gg@C+ z&ZUUM*3D%?K|zf|Yp~S6PW4DvPuvo^C?YHXi~hutn5R@k-pKYE%A|W)QAk zfML&-+@biraqGE~E6E7)kkjr+;<@@Zb806bN;FB%%_z2(W8@Jxbj;#D0?@&j0Ko5u zad%NI+^dT~a~oXFm73wn1jLC(1*n4~3w9ay+e6gzq3<`5awaa|Mm(kbf8DD<$8rqh}7 zuZy=w*W^P_gFzRM$r?L4RA$){ebG^3Ku!XA2I|+Uw{>16qsF{+i}9Eieskv-<8#i? zYd4~I=osX1X!IIlmK#&#&>=bJ`Xsw@Dm%2^m)taM6PD-NADFQDB9Bj7z-4#f-Y+@q z>+J8OFZ`JjBR(+VEefU@L%(@)=q6kHNnv>dXxC>%X$*D^NU)9tZ!Rz3ec_;+c)DUr zA&k< zpPq_UJifZEuto7t45xx_WR2>94qoaEnAc~?!{H_L?Dj0955fqXVQ45lgc%GMC~Twwky+&H zPc{gh4~8VM_;MVhuk}JeL0Gqi;W*;V<@LSvI~Q?jV~FNmf=WxVFP7?} zGsWFhGFD3+#!+1#K%X<_yM?BT_ z9tjpLXouJewpm3*7XCdU+Iz)40i3|^HL2DDS;W73ofyI0J9UT!Y zGv>Psqc?HJW#g(#z>VR!1lvKSf| zWyFJN(Ng@;`YLBpV@fF-<&ht!Mt5|b7+=S*sWgj*sDI-n3H0N)H{E}HzFGBMY83x& zZ^{H3pCbi>qJVAUNw|`&WjZox{?jTQ9(i-UOrM*R^FO8#%gGg{{~&ly=u$jtzJ_vW zKP(SK3}R@0gv&L!!=&DhJ5p{`*L`2sQl!#UWBe*QM<^OWsNm-b2^lM~1(F)s+6o8q z>5(hiK8*~bYA6|YQ(A{4-RY~s;bLI|tirHLC}b9DK=0x1YnuiQtJ&4fCmKHlB4WLn9b zb-m+q^?9GY)hW$mNGfE|bO{>x=muCla10&YdbRFzBGDVO4PkJWZgj&-WOfh_hK!;= zh3S_ZLh}>9SNT!6cD1V{>`O|c9QRNXHmm#kTgeT09Ngq`C_J3cw>+lCi(+8Sw7WZgnQad z!bziUX^hYBdo&0CiVlU2gFNqIRAKrn(a+V47`Z$B!&pE=z-bP7VJ*s)U>|J#$lk=- zn_6-F_r}E7Sz-tn8pY`9hQxfBH}2$ti9E^+>M(FDnhr$tww!iwtpK*%lGC^f0=dk_ zK8(+Adg&2Jkhq>UE9R1KN@Rg?LTn8z6c=FNnCuY2ukk}nc;kR%>A}6CH6g_fn$zNK zWD`G0tqx8#@sul3u`VcXL1T~TUNO~;2tx6%1|>&Q5c&r}IooTKYw2gDgfEB4rKDPE zr_6-8z@}8nyCH#8+7Fg4AC2QQs#`jdqIwN2u@prj>X*kh3!M3b>n>^BDHPym}s=#7LI?u z3+}w}`HLZ}z%g+Veo7@8h7L7Fj|vuo z_PAU_vcSLOJc5(XJ`fGYi+zd@{x6lMOcEhNWCxBHKmr_dUs=kVQyzYna7~H<^e#hboItZLX__!Rj)7QoATwBgakxD0K zK?!U0Blp;S*D#_}Y*hT&5$E0!FA&1y$ma9We75CL`Ry*DvhhaJJR%sOld}3a)>AE>E0d7T&NN z?DKsimCJ21nLaWe{85Q2c~w))2RA8UBEPA3)9Ps|*YD8obOx?gFX?dia0u(F62IFU zDZ%9N{3w(bos?A6A4TPuN@%!Lz2zj*=2Xsza3P)2!l6t?bdXU@q4S6WSFdL<+1W}f zg3@N2cBqdNW(f+cHbx0gC_T72=w#pAp8z$^;Ing10qewY@0b3GL&vdok?3nKL-ukk z1KLDiKdeKJnAD^s0aPcJg&BWK>4B}XzMj!9+7NT#Q17E#mahUMN1JW+!#x?|lQ3P9 z6lCKvU;CiCqVx4mb@{5RUJ_yyKQu~((?#85TN$#|J-4_pe{vCNEpV?Sd#j!#2>eD1p4q``jbN& zkuQF+st;{v6Cr5p#aUXN^0WT`nKG8oKd8fc_Ki=!K}DlazoBG_L(E{wh(o3gBWVWL znH*K~Kpei}$V-mVuTADGdA1&|qGWYwY@JKSE=p#{pXfVWdPFyG8Nf#dQvXy*U z&|7h5=&zDC*$i1dWrueCwvZRFajxpREmk*t)3i&i##qEb)hK7b^{%Af2L1Bl5hoRP z7YEF|kZTmY4Dv~!g*(pTyi1a3v#U`tk=`U#q0{` zMjZ+@#0RvzKYegRikv9Ar7*UyQc=A6d!XN^>$r{j&O-P%;%sdp^!z>ekhq0OeAaeZ z>Y!pXd%|qJTq;VId(m+f#vEyQO3&uG#5U|+THcB-&xsPz3*Ij-GBB+XR*XL4WM3fU ziEa@xMMUebf|g_BP~hCE*7Q#t@wz;c6yqZ$n?s-Bk}F?tIS=W*7d&-Z?aP{CG*f8S zMdIWy1f%?)lPmre2-eQtuHN;{f$rIB2h$Lv<`n0#E{9|>IxTo+RS z#Ak`9aX`_#YO&XSABy+Ztw!<39tQ!%vX|U4Z+#mEqT;%k3~{Y7j!ZLJam$dDKArk! z|Ed~)e~P-IfB82s6}W%L1TroGlAbN($w#!aez2%WIqy4?@y9V(!MpYdOPaD4C@*jr zdAT?lOf?-;@Ab`&!(a;OTo*~(9g|oj7VAV5W){;#cHz3QP!0~ouqN#1Vo)g(&{6fz z^{G%6+|<;_q@kCy~1UoGV)IsWva1$}||9N9~|KVk-(S(mqzJRs9p2TL1jL z>i4uQ>Vvp25D%BViX%fGVh((wBuwaZ--&L)Z%HZfaF<@^Nf126mmdpOP@r6bAPx+v z>D(IGVZ-)GNkRAOaxnf=TVL&K{0B;mxM|#-4$4filXT;Fa0!j!C>DF;oPLa;!&-*M zPali%Tr4x44eH7G1pj7r*$j@tM4h)5uIi(M8?-RqbPZTwXLMM+99(gjt%`lG{UHOq zy2CMBeJVp6Q`4iE2s zrG|)4W>->jOg5L(D>k*LeNszJhd}j6tB6~!F2D@YX_r|aSfF5X>dO~*eC{2}>6SQ; z$5KF{!y#Ii6{WBXo)PUBqz4~kv&YG(cotcFdj9exyxp|?dVCZmn@ePEZ`s-8p53hC)gYQ&~6(N9{cA#I*SCp$&l=1 z9C9*bXyL9eDxGfR0(isA7yRN!sTmne!UpK^Bs|GKI1bfS*cg6674fQQor)nA4pfbh z1Ri`7^LWauL2xSu{C7nGHDOnLAYK~}9k^Jo4*xzy6_G-^xe4Mk3Ue1qs(Z*};Aj=J zQ0-smg4|Gm?#ozQSE#1mzP!M(0-yO^@vfV{m(Ac8qKF4G=#5+xVQ`nwyh#{|W7?jX zoOt|}4gmCllmNzKRvM0In|H;_jDX`1RRHwI#8I8$NR>y*3_NcC`maxP)wsX@L;PJ> z_J4?uwNG7kva^^{WkvUdXIV@0Jog?V2D^?K^%4- z``eL%p^mKM63(`cSF5{<0ldD#M)f0Rm3Y0CxpApk@OE}Pn4f+uA6>$|Tg4?F%p~z! zx7s0E#uq>Y_0jbyP6N@kPR;oI>($qNdrTVe5c9HM6g#Spd`7UTkD|r=0Q84}Qw@U+ z;9s0bsr1MKNwQ@c31-JqXT>rdnpv^w*_7k+OV!Jc`+8UWKxDo-LE;(l1O`}_7q=2a zq7x?R6r*G6#Q8JsL$Ag^cXBVZc3Y2d3k1{up7)CKPd=q$=7s9Az5)RuycTK=6|QPJ zH{L>)#@j#)znpr2%=DvwvMl~3E znCmu~cfkx531y)ziCpMjBYVSDCxl=umjE9RrD({0A_}BagL?}7*8RO@*B@AL@@sy> z%P=0RC44=EEFT2X`fwNzi)mylEP5T*v_C82?0-=Q$gfh&mE-zl?sAq}XRnVp+Z!XO zb*z(sY94F+BAg{yrN2r_=`VHMk~St66=1qR551rZxE~a39$#zAkwDSF`1J9IWq!$ZYX9H&%_ItZT0 zfhM;(SCUIj%>hABH~2v~TWJc_wQ9vjF^P?M>2%5yXoIEJXLNgO&?X!x68mw)83Y9^ zu5CX&^I~;HwS4vVmoR`nTrO{Q1aQ>|dqICJx!e9D^T(dlA76cyYPd?Llmwnaov6r6 zp04S>+yG}TL)6z7L)};}o*W~Jki{5EzO+pMmN|)#K2j+ugH__K zlIq?yv3;#t(9TId>lnlA7K!LD2{SUZFZzW6asWshc;^*@i24eqgXF&GZ&G$>LwCz! zye*s?+(MQ^G3M&vL=;ZJ&_%R@!#P={#Pn2y4C=>!xo+F-Z6I(8fzf{V0-q~;C zal?%8y#)JLvT^RjOmw{C!X)%piQ~G7fF_)=bAB8I&$3L+v^>8 z?E*=&a`wn49@h0Fn^+-V#53&6+i9YEpeOnRjPW>DoWkJ}mj`Be99#?Kf@^bqQ1{DC z7R2RW`PS+&Rg%QuOTf`)zk2uO>J?oLDR}Dd9YW85f&uy~vd_XgxWuOB1m}X}%jn?B z$pP_sd7q`ciH}vN-wSwot@?uY1FVCVuv6V#(^kX-MTVck8A;jot3Roq4@vr{Wb451 zv`tB~)z`WNc%#~MAMs^E@OriU-`R%JlA&@xmQSzbup8yzJRVtp_KoW4AM5IKW>@=% z7V{;5<0;Rlq^n(09G|l0rhgC!rvj+;Rc^D_#t}H$IF1R^(IgkkYX(TXR$o~6t14JL zD`hR}l(1fHOuSk8_y0}ayER98U1ypx^A9{XbI~yoTO@6=D2fO1ESXkUp^j7?;6jnK zhNn&d2_RVj63Rptsz}ojZFgUIB0L^njr$IkD6uVjWJ`AY(3a#F7WBX7dH0uVy(>#2 zhYy!chwTE8`F(q@z4ki1>!f~4LAA5ft1Qh2h+-mt+J6P)OlwZ9AF9}0fr)$csU)5# zx5-^2=rZN}Pd@r>^CPFfzmGzT)o~8Vn`Em!7bFxMvt>gfAleaLWzgc~y07_4Yjgp> zc~xM4EX5K7z5zkwJyU7<_(2M))BqAD>FJGeA@Ezc>{tli&JnrF`c-TP_o@7~D|(<~ zKj3f--;TF)sO~Q_zx|C%s6xzNOZ06O^r%iDRt=a~9ZiCc+oMuUm=h`OkMAtI9~GM5 zj207%n%(SW0N#?X(#`l$yjPKdJdEetD`6j@H^9FEPh|B4VSp;1i+`&jimy0v^GV?| z&ZBVqbra%57uYx9B^Q`|%D8L49F^M563*4T34>BYI$4k#JlCuO+1!ceHwPy9!!%J`$#B-~^+b^AsQ9`{v5-IcI8Jvs3_>yQ}& z#6eFb82O7JOzD<(k?>O14HGWa~Ai&IRmrl%loFHEKRSdu^1-|_d_4rWu%L|(}$ z*q6WBDUmktm+I8@$uY{(J%>;<&dq(*vyLAAF>2Efq-1DslHNbOWgYl7W@fq-{H5D% zK^EZzKQ&jX+U#iGb>oz7+j!_uPg?c5BfCj|lu!^t@m1d8ACbXs1OI@9#-Y8`Uhu0Y z2MQom0xJEs2J^cC6bt97ScOYCI_Xu4#f^t>XR9m9OAvaSwoRZ&)kzmy$?ZeYR$%P} zuf`M6cnsM;u@{;z#?q7@lAhwFb|Zc-zoj6JismGyz20v724VOKCNyni_l^(T29*-G zEHy_Xn#+;878e{fEd}BnmhPh8g0lj+n%bs2X@2G8Q2s$}j3Rf6e5QCFVjC%ts-0)m zEK9V0Z2JM=M#gR>c8_s;ybQ=+bI~9E1!6n|3nMpj{X=C|K2<%e)8$q|N)caO?-zBe zWHG<+fRJQS=t@J&LJ=mrwfFvATC>LH^snY0V{cHvUuJKQl(S&N;tU3hthMX$&zJ4B zPH;-y@&unpXSkn&9lCB8pH|ULbfOPIr<5|aOEqHq{xdy0-28w}w;(0O29+4HVgB$l zV7=HG&ZUG`H!ZyAAT6Cywl48{PpP!BTo!%wO?Zj8GhfVAjsKwD@EpD9~zxuxY%l_q4RCo-iqUtbqnEV9TPy{kY z;Nh`x4s@ZSi<`X=IEZ<>U%}(R5vOB+Oirf)8VO#nTzz|!T;JL{=0M^I~pC_UiPK`&()A_DHe-lQ5eQd<& z@1J|87f}#(@pPT5K@iS9pDbMEi8!6oRS(6XT#k6kGF`$If^W6k-XgxzEcLz8JQbn$ zUO-_;r;WW}@*NjGCNb5CHT?>(vslnup7o_}j+3I(cSssSAjJY@E9i=(dG&1j&|nFD{sB98cG%hJ zn@Cfykx8O$4wLy9sV#igKdBL2JauGSOWgLFKSkP0azD0btR2h%x^LMs!RQIag1SwU z5UpBo;XYOSoNc`JWn%)!s*>S10M8O2K~***QkiFwITAyT$V)UNYR+|gHrB)KDk%p8 zsGX>DA7rvq9!vHQ(dW^=1?)0GqOJx%Mon0Q1!b;hm;AF+l6T|XQqr+otVEM7eK_s^?UA_ufh=#fWzbeEfn7`gQukYVoBVlf=+Gi!(eKq7<*BE$5;FV z-Kyh@q&T{_`YD)H*Dk5-&rTUSXZGX67;d}eRlgwEI>aBWhV&+>&;-Ip6r2$&k(c@3 z{MVswxqMtjS}RR^=;oACL27uil`G}3hFjr|64`{SSqODEacEE0qJPDzie=0j-QI@Y zaKWn`cl7*jdfHXvSLNOn$~wYub(yJD&q@Re=M(JOJ0J3?6Koi|a+q032UI!fYE*Bo zxJ^=$;Vl1P4pO6!DmI1;P1|sq4wNpM{Nf^rd(~Fj$f`f?Wiat3G_3786glHWf~;R| zU-q4wt4vl`*}v-Qn!=&%Ia!W9Lj( z>YG@75=m0<)!2uAc*Q@gwTu4VkceC)+Dj(w0Zyd}fE_{G$Y6I=Dq$2q6qONQBrj

u5u>cKS5X;0W4Y~nF2Hlj+M6S4>vORPnwMivZqOU3?h6cQ(!Lh&r zenb)VTCX@-tgkW(PBX8vDEI6XB+?5}kyvqYKzLnacDvm??@#vRHgn6ZcG=5SV?mSv zX;zlL>X3sRDOpv$Z(8A z`0S{7`(2=Bw=g=}$Kg+@!J8ac)x%iEcs*drq!d)hSlgav8~LMlKgit+^)vf`pUMyu zt;HNmXELK|AGD|ARhr84qga$I8{U=Rm_%M)zt@~D4+08QeTYKIfDk6}PnJ$P1`}9n1jAw=Bpd zh!Zp^f0k<-BXb!J0S7vdTdV=hk$#d_;*~05&|sz% zgc@b1k*+1u`T!MO@lTf3(8MU>D1Y0j>?9Ci&6o@lk18wmK09OssFeC;s`j99NK62r z$}k@Ma`pWspi74&giao&qE5va(2!x?uwVFH8IW}@)Ct8ZwBPAiu27^``$ zy%7JrQmekkJ-yOImk!)nsZ*1CUGNb;Afr6Bjj|WgsFlmVD8gSOdB)>ITe+Nvf-;!8 zg?q*VTczQ`C{eYFZ!~_+_Bgc$<*r;1Sl^j$fdLbu-svM~mtz^?4ja+1$P%#5l>P{Sivl!{CfqW>C&-Cp*6*A%^ut)w0WBI z;5|h#h=@Yyd8IsTbo)166WsHH3_{4lUs32mXk>I8zjheVmlOL7AirHf_f zR9Q!{<(|W5Nr9f6l@zjf&KIA1CbaY8`cbQOd-}j8N}=A7g7MIbEROSMsbdEaNBj%dXAbS)LLi z_KO!qs~e{76{c3QW8Rm?+Sk2EGH#2%-eH)0Z&y=A#ew`=R;nLHkqye<8*e9uzT=cH z_#XjTM|tDx9~ZWQ=Uts9gSocr=3cpL_^a&&Qvs>sJ1k!<#_(O`{4?mtk>$c-_OqVNe z9AKVbG;QNwSwdWz<)HuiPzU`ZSXbGvgdk$GxDQ|VO;Ycgk|`;}R{}0i0^OhvQ@K~7 z2k@1KczA$ZE|Zy&@*?qKN@OO_1g6}_Q=$Ees12qgx0cqsxN zT&vn;X7N{4LIBiLIEllCCs%CN+t5CV-*f1(kAk8c-8bUjDn>BdAlOI?qgQe8r0jA8 zB0i0mhgtE!#uN|OH<%?`I#aKpC<4}0RJVCQm@?bmQPU;@aAeQTUCB*6PZLgmsf$mp zqdAWYs2r`_4;zm+_EtAVR`RH7=mSD4dT0KIA?+nTm zi5Ibik{0r&DB%-TN3Lnf^Hh2f9GJI3qY~1^89GLLak?u1=o7-|qcN&)Bt^?iP&p`* z7Ko*-XtW1#NtKiRb~whXgb7u*acf-YK~-_CP8dfyMP*tRTSgjKz;{V?rh4D7@{M*r zj#4;J^`3@A4Bw6^<)qwHcU$0Ed%&kE?OVAxxO05G2E01v>Yhjtn7{k+QGrwLI?L`& z7J}MRp}UUEVSYN+bFE!AC*XL*51NRVY(USZlEk5T!zE(?E<$D7B3&9?~uB^H@H*s+w&wrtHZI=bM&`H(Z|h zDq&FiS|_t^f%+Abqnmhui}>Vc3n^@eKv}Fv`S&Gn`D2StaQQfxF<095 zA*vhM5xZyl`?=hNE_hJr>GnGoHXDtGu~=F0Nm7I=V02ZFjK7_o{#yGre{~s-PNJ>X zmfMu;j(5Blg<^}i+~mi4Z;loe=AieChL=1L!D6U{cvv5P>%r1Ju<3dq^^y8sEErV7 zE-B3EM*kZXb(9+8^o{cBBuWrXJ-1RAD6cMof6U>(a*s%=Bh@uD4gjf9WD_}UO8+oN zD$$fw|M)Z!*^IpCcHBH~x%SZl_)@rmZ@<8k!BF$1RXw;VNP$e8_@#E;I#Yi1Zbg2T zTj^7cfmXp{3bwxYZmbe-eU^%4HHWFJL#RP=jfR_~K_kq{6*tO&nHH>+r5$By4g#Za z%sV)!o*{%WjGjq}^ft22oeh;+U$n^K(A^>B$Jy!im5JOt$hxhu^OBT|KUOoO6?%7T zyypv?YipmG<<7%?_}#Pb zwl98YeppwoK}Q{Y4mUUkCvJ`wOnmXv-)&FFp%QT{2BH)J98vIE1$-+4@rq|Db|%iM zETqv&*6SK@-AawM^3f+G#>8D7lKAqzB83l%&5a?5CiQ*aVnu7aEsQd&*s#-#TE4eHyHDKStIH0i2+Qk?ab_Rg5JE2C&NB0-E+ zj#T+xweN-I*~3pk9OcyEF2+TK9Wndj%swdXXC?E#8!YsCdnvwgc8XuLGJh#^jukcu z!eB>KQx@!iglbb!tKltPCXVNcWH@)o^*{B8 zDf%v_OeYF=N$fd%#k|8UaHqoyZU@Iy9UuH0-+;xMbvI=vV{Zk4Q2fQ2l~{KNBy0;+ znR@Z)0WhLCOgykUMR-2OgF*Q}u%s9;NA+)bJacj~PVvi9P_2t_o@ElvGMNYw=8O{s z^H-cte89p>SJ|g~(*qHki!ffQ%R>rK37ZS)DyJu7BEG3%Em%FVf50nS_i&=)Oma=o z^6@?7D=uM@9DBG_DdF-grD;4DiBR(i)$b#2+`0Bf9Gtw1cmbqP=0$w!R;?WY+IPLn z^%@j8R75~cn#>*0o6pSNtP)*E8q` ztr|O4zC=5=M`Fi6|6PCmDxdZdX&EIch{HTKdv3Hf3lD%Yz%1RPAR=_yj8nrmb{7wJ z>Uw_3ann*r#I6@GLH+@y#_C|E6+DRZOKNshWp&{%`ni4APElOoU0Z+BiGqsEpeSh< zf)ooySyPfkGh@bPAtrznWH=fp7yOVbs&wHhqbTnvB7aAB%NZL~P?QmLLiM~SupVQB z1MK2IlHT{An3kw+tJQ;%vv`CE6`kDMFz~E6D1@7t15qc1UT)CPN2;JGN1Sr?5{2Nd zo0LL99E1_>XQ*r!CydHUf0*Q#?OExcyG&xAV=c#xbhsDk=9M3+{|~abw<>LBkBBvUeDG; zAMu1|JO|`N#K3c4q*{CvH=8gGaeqLx);Eua11?63U`B!ujRcu%*g<2|ktVNXLhAsj*aN%WBe6)g~{tW zQ>DbE4ij}MuulW%{GK2`WJX*+B^ofJA~vtn`Ca)qWEuO$&_nC-v`T_D1YN zE?bHHlFCLe`}iXqumWs88%se*(>kD}dT_kT&{E-ld>Q*;R&=JwT)+)g)w-K1cgAOZ z1x2RH&e)nJr_>8WK{hchpROK7H?81F9R`f;C?5a;;zynb zv?0(Gj|3mfZ3$}3TA=6oj|qRzKks833V`5CKdpmUwZ#jz`F#s`C4pAgRzRU~G0L;3 zC|^Zs8Dt&A;aYOAv0q*N((8Jem1J=8Qv0pg6DCb+BapJ<63Q%nHAr)-ZECouH?t$T zP>-Y61?QuPOFUztwJY>DnSlOP2Q+;3NxunZxWEMU)A2Y-W=O|LktUjF#bSnda5eA{MWdMbw2U&l7u_`eXlkkTp8(NKivxx+(X7Fn*TFm=k5W)&Tlx@3Zd8fyZ!2=N?d2&x<$H%{AOPgeXhsQFFE3ZuNwK;mS^R0zkHjg(Hd0&{ z-!_EMn&X!}$EWDf6Chr4aQ(1-ngy&0ki%U7=d5&iE4l#9= zdq5I~-$C)@;Wm&)tnXHPC$1GX!q%~`Bv3Um!fgVaHIo;dxYVib3_Yw5w@UvWg;CiV zWGp6+3Qvx*N(8YJtA;zG|E@O)e4Hf{3eaFKEK=&L)ru~d&E@0q)*c_(pL)!t4zZUJ z#zXJ&fs@pYA32>$fPT4}>RASZ#epkRhVYQz#eu<29YpbGWKNZQID$wc<3x8FxqKED zGoe@|weisoz{*0HP436}kX95I7MQ9~^d1I}Bf(68g>(EC^50m4k6p1mI*=Bp5wJq} zRNfiGNfoeGB*=%u)$W5)Qo3pW0)DtZp~1)q@7iN-ixC&B&7avvS(?2TE(b=O{=xvYI3kEjwT<&PzO`~i<>l8aEB>o>Lf@3IH$&XuuL-ql z1*uSn41T|q7TEmC94AI5N2uzOON4v&OS0FH=U{;#TUl2@62-KMc_&yt!qaT&0k)qqk3@u(q?kjp>p?1{>SlDg$pj0MT-?tu5961 zIY6aa6@Tn_sB!kOsr!<%U3-#m3LA1@N*V6Op>f7miOK7XSw12pX!D_SV?c)t>q$h9 z3xQ$#X*L%;JJl+GYTk(fs(DS+wmA)52~iW(e0sQUNO568Om_=>$?SCdX5jVig>#)fIs!h(Ac#J z=`EtEQ`pQO=q-i3+DI8Ko9meme*K{GY;;b$g!5+LolSFwQrl_*h7NnMq5PR`(iQ9I zWZ^kQTOnJLGcL3Ze{YlEWSyLaTA5Hw7FS@R3%*fbNbu0!Z7Dl_t zuXg*2;OBY&mq#bS(Qdkxpj>38_{4g%x?)ldqkzm38@!;P;4pCKK<`-Di2gWwY$rW8 zvaAXKzMQz4HxNVIBbwi9PnvAxYLYnapN|y2NY%gDHhlmCpjqqJsw%|odf=Q>4f(V;{M3clS@Sqk>8N1W&_-0`H04bEBQ{mPn8)9oRBhRAPkDBp` z!CS8`F%T$FOLNt|7FQ=u6Svk;VP*Z$R>Mn>D1lBjmw385RePcs{9l!`JtmE3T97&w z|siVTw^=2`ffn`pp-pKUT6GOcv@ag_6Ge3-p5}% zO62b%CxE=tJsgyk73N&&cfBR7Sq9Q@K;@>0w9Hix`Z3;$(Dhv2zrUn3C;Oe4S9j zD+!0QX!I@GXj7)XTT3UN?A;3Ta^Gm{KKG%$Vcth67avH^#WKez z02n8-I%Z4N;SUNcjPHwm9oKofJsV$CgNn*E%gzIZ2^20eX~G9G)t9-gV4T4aXQwZ= z7c3yxxWz@u;4-Q4N3bQ}i8Q>7KYwMH!kCnrdfZoIz1_Gf{g``V384jntnnG;5*-)U zpEpunR&>GusNbXaXDnG0P_rj2Km;nJ9Ln}bx>~{48|}R5F=t99zR2uLORwsUNB(P% z{QQRR9=a8XDR@kPw_iDo@F{?^)avA(-`m&7t78Krz>R`95y;b3PG(VEWTG_Z+E@H0 zaRyLOJV!1;Vid2}lHxaO!pRg63huKA-)vxK(8CMJ+?2a~B zl-dbhC^ttVOB6V~#0vpy$%!B(BIrF7$@mq-G32Z#Epg!-8C99@?Km4eMS4#CByPOs z;@qw0uj3(u)C|WjO1%iTA$?H(0{pk-RSdtVc;FgxC|<>5TUPlCOdIFuA1W=NPdl#p zbc2}2j7q_vNka5|^-iREi!wd{7ozb>Z6mUStVmv$6if=S7>{56Qu|U+wg;e7+z9p; z!f6U&n$M5a-o@MQK_rBb1q(d_P39oUQSJlo#&l9&yZ2A3e6 zQsj;$+U1?oTZsg)Pq15f%s`Y1L07!H+c*Q1P*glf&B@}lK5ZX)b;yrN+G7?)WdjPc`^2ynT97cwrg%wn9S1 zDpIB1&T+P&Td_ovQ}#=9-EOtlzBn3{;jZh{7^SB8w0+%Y4i_vVT;f(Qr{|D?0tC%E z0QI0ogGesGQdsrNwvnhSS@9ilEfyY-u+Lm2(o+w`=~**gK@TM0c!fZja7?&?BcB_k zi_18IsVu9pKyWmzFP+{<`S~n1q%?%sIJcr`)3 z9=RIrWbZT3aPmIvtbhuZ00mJe0$Z!0K8@D$@eq3Mg_=RnOEXkUk*RdtVqxH${z`AV zhh*`R`Us9-pg2;!>Uk#p!r5@D`hn$jJy{NuxCr!>Zn6$3M*83Oy_uy`p{yN&j( zn0r1%LV#Kp7qiHD+*a6u^y+vv3{$j=xRZ#{Jk=CuS4UM{=;qzL6ctllRVy}y8w9Ja zxApiZHgxPvIUs&KIw5T%`Z)$0JUAe^y|@K0woIg4?kOJEgVjDf1`JSMu?>6pckD;F z9ltZ#mIcc`4$?#x#b!F+;U;?~pSBM^c`tTzu6+;-TzSt|fYOor^MHrPS%*&38HK7URm@!^6 z>WurO6afED6I~wdv&n2tjjr;r5QXeA${=WKG`A^=)o5lw@V;mw_Z-6-+(R&Vj+)?o z3d4gfpKf1?ox>YxO(26MS^Q9i1*p}J&oV3XX*1D?SWG=h1j1&VB*+fFg`AcZoum16 z1+sxYg(=w_#WjBUa@vKDrRRe~0H4BDVUw}CgLy00R&jL%vpIS`MqCSi@eV}JyG03M z>SJv4k=(Hi(O6N3GE@>70!2CT#Q~?NXHuIw>9R_^cnO3S2|ca^J@2fvWI!jb6#b3V z-F{K>U68uH+jxDi{OaS9Ri`Dwt#Q`3+cgu6CRE=}mlkuQ+@B@E()7Dn6Nhd9ESYY- z5ID`dXWe-kiCr=byK;p9v67o@!?X~9(YYh!qhQ|(nJW&2wK*nV9n_RcIFAKSGWLk5@XSC3j;p29V`c>q9RgJD zE{dM5Mg_gRz#j9A`^W{Zqbb;dfRq%yAJ@zYAec?5sgjwnH8Sn30LnLnXE zC*+!@IvENiYT#DEk+hCDmr>GP{g&$7FC18<=;a*++#45V-MNlJ{E!*QtaP&!$0@xy zd)1&9siJsO)1JbLL^{abwLM@sm)aHo6V>I~;u9Q>Q%jD}_ zSXsucb}Qb@MV}OjBbp{N;di+!%63c08rd*7lJO7$yeT>aDE@66u`=K$*^g2cHAacG zfm)!2IqXqYy;oX1U(%?+MvT>Tl%0G1q0K)E#g)s$HMNzrhS$__hB{1ggD0A}o3zX$ z8zJ6D#Kl*>ce*JYgh6sxdTL@4M{Q+%9jJd&B20avVLM^^YEXr7 zWVXm)#JYMfP0&ds+nV#h96Lf-(1l(aGAMjYInofNj?tY?+4sD|aUco(AT{aByS+>BXpwP4ry7=hpXH6~9~JRS1OXJvbl5CmplHE)&XU6acq}M^(JQ0#89(E- z_I&&>h11GIHc4vqS!w9ZqGVIEsAWBoUqeTLyow|oSGkRD%T2%&iEeRYsp|J5-Xf@P z`AEaL-fvEq$;4mAyyHvQK2(TVICpUlXU4X8#iHnP!p=mh3y3=Hl}Wx2x~YB|%{eBb zX1oo_0Cp44R?X|@zUBmbmz_w9d3#Zfqw;LQRVG{K{w3i|8Kb^a2b5wcd25~uvj-?m zog+v+uRYJ^i(MK1~z z!H=3VOcO`Kzfe-}q3`0#>0=$_?1Qhh$AX<1iu3iP@#pOu=(P+FkNOv^I=Q&!(rbhjbiLQOdj%it(Nl7;pl9H zi#-`nW|fmtV@j3-3eQVhC&Rnom77Wlb9dLa}I0@?BuhWi%w|m!Icy6;-sF)MH8}EGV zH`=eqzbEbJ$ehOa&@&uMyaWKskHO6s)?>DlX7pR{^vJCLO6%%UNr?pNGZag2tT}0n z1mEM#)g9)#xv2*XShZiE@b0}5gX3qJX+@Fkz@rHO-{QlG%=OOQeiXfFN1j`R7gmgD zt{=&WCd#FS#(~e&Ks~di`JrhUuWcykyP^n(73Bgf{(}uqa6e_7aE&hQ#-T|KTI#Hww zQB63zcsi;clE+U%sK*5wQXdy`Vq;ZnZQWO##&vENqXBxfPl(e$;il$Ar8TRm$XPMb z-cKRcJuT@I2urFo{qZAg|Jo8A4)a=Z;R>7pbZ+jxJY=24XEFNW_5URgS$#>sie%7W zi*1#dV2@(d_$YW=#p&cd6m_;gVs1r{=W4qX1UNf=y}j&hD4BekLtF$WO$wHb+30pw z8nXFvT%1e8R0H@heuEU0ZUC~OHApqRh9xoYFD^}eT4T5Gu z4k}5zRa$cix_zDIk6=BAhp_m%M(~O3D1dE`dpUAD-mKL2SK|kPs6Q?nrUQ$z40Bq>nq2xc!QLaKtT`6aOS+REZgOwl0LpB!Y8b2w~x27CiQzok6d~A0O z(ekELDph(h2=zOl$d*?Gn^Kps-!>PCKF9$oIZ)q{(D(CX^u7|*Rs^~z{OfI0vo*Ai~tuj+sdKP3!BU`5YSAVh($|SfJFIe7u-&@X`YV$ZlxrMIMRW)fX zBqWJM?6GPq496TBBpw&9VxS!<1^l15AE?7Jlt5D@hg%qb=MsGEtq!e#^I>o`SvU1k zY@yN86qc8dFo@&Fcm3iAW?eAKW%`VtC*?3VFROfNdgRxuu-u4bpT3xUzk%Shi`0RW zFaFH3MAfHLdZZ-gA=!A*f8R#_jEE2v-O5cnOw~kYhI=v7o}(7VX_VYy_A>C)!04G# zm=I7nGZOIu@&C7TMfhFEdvRv(w)g!tVu44AY$z$GKwZQgxU($0@N0*EYhR9@fN&6J z%AeCtCQpBw*QsX|#c{zIRlaSYQGHNKc%!f)mZ)xuIlB|+!gc{Uqb~EwTsPOVm_#k8Trn<*wn1SLrER9}JrZd)rNgpBMeU}6bU=vL)Klff`2=qFW(C1u zDg+o6_(9Nx+SmmPU$!0267H4&9|MrP+nG`euoR=hwkFKEb(ehqAR_AXWxp0dlZie2 z<)!!wtcd9pgrgY4y)T8=#=72WH{vtQ^xes%0hO^7c9UnmRoYpow{~GT(!!>>e#)bc zS|QZlm|#b`2E8KCMmavwN~9aThd^#*#%EI&+2OCmwuER_s}$~55=ziU*>JwHafJ2Q zdi(5@1aLzsz@IO+dZJZ>D{-;NfkE+FC2?DW-EIvXkp z8b+wy0HzuZkrNH@pJt_$jfxC^>n{0*^p0{ik%9A+w+xd znGZea)L11qf$^z3<y#^+jFwuz?(Zuea;*@u%B4pXyqV zKsp&z;9AB7`1f|X+*f)+M0$oO!RcWEv#2UBP`7SeO?V|s(QlR42pU@S+Om4rSMpi@ zqjLP&XZx>}Cue33=WhbUSP-$(Q`u)$*#s&=8q8M#5RdY=V%=o+(&- zkES)ey~4iBOW}i|d%HX|yO*GLY3?$+u7K`{Ql;5d#}alwDFjhq$BK_SMIIbWRd(7a zgBWksiaAwqV8xVWUKx`SY^Lb>i?wCKy70f`%FM|RLa3|8rT9Sx{6APFWo=^G0BR;t z)$B{{H6Qu`2Dk%7vUzv2Jmz{k?++p+@E_w|XlxM=^^brLb$o(Sn2(VTB_0ZSd(p?- zj0mUhh(U0ntl)0r*rY>^*EK880P#|PV0-uS&yUmESZlX#`aFPw^E}>dX|qvA>YPKDtIr9G6$@d=R!onYY<+*e^MmEpyBejhhy zUMgy2#255LS05fQ{Z#Di+XAM`#M`t&MD~;c>+MdEj}gk}LNrhx?#+vGTct8r4XV_i zO574(=3Gj;1T&P7baXYr7A(0XSgS*;Sbq-$>0_^(Nvj$4KsRE6`25sDpY8!A7o=O{ z#gS5p&AX;5-GpSqlxuXiHe4I3R!w48v5Oy8yAAMj2!CtYlA6a#bq#dZIxsLKxqtofeBpb^wSy*xW9Lq_lgH00aX(u(8ko zwge-N00?_v*(rNylvb1cjUq^IGZ)dPBMQZNQ|OURs-A7{KNWN&fBR7({}0~$d|$;B zD`w_4x~=v&S(j1lLtM61Lc9lSj#}wzCk?CoL4N`Ig1FA84vH_#TCN(olHj=lP{1_) z=D`&Eu`9i@;DzQ|kmeBwd`@mfTrE=ae%4!D#_rEfFDaQI-ps@fKWa6%|9s@ffc`J?x4^z#2e&qjs#WOdW_!yG)w*)nNj=ul za*WU6e@tjjrRJ*0u@aZY2OfUs>^tp?@5GPt?aNFx@;v0|c4NVTImpu;HC~g6Rd*TW3tok(+Ph^Ma zeq(M*Ua*JsrTjd;x@mt-JqK(%@}d9gWk?F7+xBgDWyFXDunNXsBM_$R?cNmU+uLy# zQi>H2xAz)f=WZ*kTU?UfL3ot$4um~-qjomu>Xg2ZT;zE%1BT)$yGDGwj^7cWp+>tV zT`zAgFz< zQ|s{9{S;5XfA;(BjQ?YrZ9SkrVVZo1&uKyMx}FGDTWdG`HBx$tEiQSmC^TFaEjlSi zI&@LO@xoCFtNL_w^U!M@Z#UZ#zMkcBhScG|58+KFM|q$U$my3CfBYD4Feog8IPa?* z+}+~cFri55(XUjh#E-l#e1q-2s1^5<3^=H4YFrW2nF+o}E$1<#(f~1o{0v>`?3Z#I zB4nk2B2Kjykg+}*-CnYG8GMS7Rl)ThqA9mtU~^*VN{Sh8dn?EQAkIx#^0;v634q

VR@%Ue zrfaXKEFQK>dE$9X){h^RkU%1uZ-O{fmH7UR!~=|%KI5}3TO)wtbUp4@t3t|^W0VDc zVl^?^9~!2$!E4`krIz4zQsmmUI zuburUZs*0e5&uLz1;?RI@U&p?kO#a14vIsSIE=r@T5@KI^41WSByPsv0n9i}Z)z!i zeIPyhJ1Lrs1Xt3nDO0*`ax17u8=c^1 z%p}_mnl_?DtFpm;g`Mkf@O*o3k=VwAYRmUD3s&IQVav<4iPtIB?W^(CUu*AK^F%9E z=o?G#i1+@t|Cy#~n2f*d|33n5{<4qx_uhmmN?=6)f|*&{DA?G@lkO?2$Vp4r98u*X zywH1=`{zFVx?F_K4k;Yl7E{F|p59Qyn8wkSc0H&`y!Pk68&`#ElU~b(c0GtV%m*>2 zsq%j_9w3cYwlxeQD*lvALG8TWp+4r(yU?wxW1yCeuY)wx_Hml_r_^RS(hB;Qd0N;U0M>}!5{GqeOLNK($syF zXsk$+BfYDdSfEqe2>dtzay*I?{!Ld>BF3{6@F@)fHY4>CRws3wonDZ<@NQ+46s<}}qwQA%7^AW%MJn|^i!dS4|v11z82BNq1V!-ORbP=LeCARm(+8#ii(Trh;PTV${YuPw>MX5=K9jZf5Hc z($C+Z*KugTdWqocIw16wIsi9rddgxXaBWh z%N81GqUhyN~w?y5U(;pk=^;QqxrRZO$!kHbYD8e9S)%C_bpgG`)j z|2a5f&HDvs%)lC3+?mD#Hz2cjO9MT-2grtcitXWlj&tkgSLR@CU>fGpYt&#q5EFED zYTgi7&nre%AEy$_SF4@&@q<$5c!etHnPLC;+Hc2FhXlFe^Y)JaaiUlbo^>%dGKXRJ zY3u~dFFZTF zj``r9&QnP0k0PAvxh{t=SSd;V(NmoKcKW25bP7jn;R{||Mx`*T7M<9f7 z4ax=TAuBWoK12{vjWSF8`wsGQl!|k!UM;>hniX^R3K*~PU_i#~-hJtI+lc2Q*k;KI zXQ!z50$XlRRHTNBn69(36f@)}22_C)0z4k~@U(sLz4&2G-F@M_O0SmcQtFPC?m)rZ{Zck!|Ko5RFy!rozDh;JgGOyc~RS>tcBrHCV~0 zoNmz6-;qD6js*lYeXbZ7s`Ly2K+q&!cXq1o1pbM~ZN^U&2(WNJWevk(w4HVXam=P- zdf!u_0nYV&(9*@wyaInrGv#V=JW4&a0`IRWpKK2wyO;?4LMio!yDu!Irv{SzQ~&+K z2(BQla43?*gC;C7{-S1}q@j7^@=sxc_~?apE&i4foS>O}t(rs&PVb~mWzG46P%|L{ z8kip)C1pvO=@T&GEg&mBaUqo~fI@2fUf7zkQ%pbD-(Kn%EOSa-yL->5G{|{ys7Khj z_~)FB9eR{9;=r!TG7Oy6i|5)mOtfv##q!yckuVp7P?zy~_6tv5mfL1rj|kUMNpnzf z{ba|NhQvt1bJJ1AC=?_xeKfEHS2Avtk}NZ5PJ-D%P@NVOfW`4K!spX$BqWf`R8lg$ z@11PMYIXn!72BXE^c50r6TdDjEBS&tcGdckjJrIs4NmN!9DOff$-j2YEQOh3_Lod? z6vr&(GRHbikV#!^)C%Ee=7ganoDgi$E>w3^J&X9K+ec7GZgyc;^^x`+-qPWg6#5O$ z!vzy4{tD?T6}gBsROKYI;(*#6gSz4_@?Dev{6EHG5G~s!ofw;bfHb_nZslN`Rt<;! z2YPAu;dg$g{f_qlPEWwM{0w|EO$GyGOSBycU1kaPyBWgZh9kr9RkThp4|r9f`;}*Fimy7R^#SNZbI$YP1)L zC$Kznc`*qa#2C}@brt%tUTBZ6DF#NT}@}=vMPnPa%Y;KX<&kw#_PCKrBE6n zjG#}kauu#E!5eyz^5_EDDTB%On|kG?a>d9gz6PGS8bf$jNUs`=yZ}*4$_W7HHWRJQ z32tmtT(-HOW)kUsC0szP2CiTqKi2Crf`Q+pL@G7Cp!0)-LLQ^9jE@2YY0kY2+DK8X z<_4`K!96;6|sbBvR2R{Hir(6OuaxaU9rz4)}O&=LhAD0OJ>ZO8y#7XO)iggaq} zq_nJrI@bX!3K+z)s+$`$;K;;*3+;R;gl`#xztL`4y9nCaD)!Nwl*pr_YNd~RXZ%7H z^=DjIdoi%%-}k5lfkCdX!gV0Nt4hW>_N7O9Vwa`J7nK^H8_pFl0AY?-vIQ-jCDbw`sI z&I3=-Jxgx}g9*IA%ACw5`&lHid%4^w_q&%1lynsa@9A{-!gqpeNI}8>ciUT>p|rh8 zuZ<;2lKVv@!TF{Q>1Tzflt53^#vfP7S{9!u6W46M%qp_)Pi(#>O&$^M2G6b22ht-o zqY`ysd$l7dLUNw@0Hp44c#|!k9wm0iDn+bLGi@R^#nDCCU~v{CaKtf;O5}`nNkQ~L zr`JO21uv_e2bf4ROZPyXKI)BRS|!o-6>H~~DW|-oJP!1KaOqR+*L)8?f<}{hhKhQ; zC6T%A=oN6Xm**5ak#apdP`K)#Nb!;zwLQ3cu_V~sGY%^7qY^aA4!kCbS0)t1O~x~0 znvef{5mAli_b+ItsmV#P^FCn0@FJzt@YJZ>>$L%iGTArC#DtkjE;K)>l7i?!@H%`T zQ?q<#ZEx0-wVZcMMj-A&;)sJq_7|}a5^tDE=jVaCVsQn*xH{(Sqv;7MpCT#$YMWcr zqwoF|>6^lUNBb5^0{_T!lbIQ}j}~Ml#}%ZY>#lf8%;#eUOvpKTnq@kTh97kQuwq1X zsL&|vlzhP6LH?c{f0&Rp{FSk=EA2{b|0=MyBKoloVF(&@jB*Tm(ef5i7GJ=eL>%v> zWMh?T9h3G37c%`4RU%!C?U27v3lyZu-6{3#l$7)$=~hija4L1t&FA8eyB#s7OJdev z$7__6@fSgJp#we?lnD7N3)S<-X#*&l{UoHZNs98-Hy}>Qn_cUm*~G2`5W;ujb;zfJ zMKC^PFc*S!#J3P}^V}vn4c8(T4*XI)5>m4MJRXZPkP2I3!i2*4MD4|i4~X%9kJk~7 zR`-;=j{{!yVjOEvRn2`M8MgAKOElOS1V=RXr0p(nAkq||P z<)+63zVcQo1N2~LH3;mvmY5#Q4P7u`t&z~f+&V0zaz$}hhSYxZS&y5-ggz)i-fq5e zo}dA(D@-Sl;r)HTCNjf(MEwobwC?@Xjudhl?M_ ziRXlAN=jm!J6_zVRF>@^mJ6IU1eb_KdI6m@MPK0NDUcQniHcomXAYKD$`;g!-jC9x zl{)ZY1C}gsCM#FF6Pi4lY6Ku8*r2<2v^U-dye_^oKrQixx+|>1`+l#WuNtU)%IuDx zJC>ZgYPWcjsEo13JImhGeoJxAfpRJ;6tVfo2x2%Ay>a zv?bGF^C`SOt$AgK{vh6d(2DgzepE5B5rfLI7X%m2p@iD)gjB5pnPQKsdU3M!Hy!Oh zk8f=(MH7oTjbb&+K&TOF1Pw@=w?_WmG#R?j5nMqYn9xWoik1wDOsjQZkB%?sRVSrX zV0>R#UP?rf;3Pyak4p0ni=Yme-BT82DgH@(Md1d^FxfFMsa+{eCC@U*o2_u_T%E8{ z4YkV@VDTs;F5UhsaF$vAc|oMtGMJfMs{#)KQExxOVHN zZ_EK%Dqt^h)B=~vfC!A{8rIhwOv?*J)Th3Wie#YJju+`zQPJ*oljOzm;WWg)NI$-t z*375v+!MZ)n#$+d0YWLtXk=))lA4OKVvbS-=Y`nJHS8^$Qn#xU`Ar!wu@bTSb-!I% z{0F%*h1BAK^!xssRxBrM7$RE1Q|Qh7mTK8v50vK@=5o|nzyAG8@~2{bSUt9H1)!e& zV|zA^aI&TJ?rCjN*2}TdfK%aF$b{!wjyk(pBMKj>M9@b`ipa8?Yb=5N+*@sCGz3tK zu$>+e`BM}sPD6}~GiN=C!+|96NWtR=wfeB%ZnSfJ9m^FvF>%EMA`i#4b?cUlkIf<( z4~XHD65)gF?R)a@WFL)-yQ4g120+CQzxwXjtBwUSCfO`MoTy2YfE00spagzV+zJv(JOlqpTqPoj&y>`EOVCEK+G zpSCXtsZe0Fvnu;nOV-l9S*gpJoT96FJLrbveH8yzvR6EDSA38bwQ3V@-HyL0NH@I~ zdr+{kIBir`bet*vtYxR9M3~WAEUuZXrEVB|(<+f$9E1k{aQR=0f5b}Prv|em9wNQ4 zk}%911~sjy9NUCz7{!5wtFmMNPdr>U_DqPPH;*q=h*Lwx+UzTN2>glDzUmTKTpSjof)zK5t@K@IEJVuVl37)X zAAHuI1i;9@RtD{xsHjDjoG>e$I5q`6k(zG_OS z_hfniLGmLks&3ngUSmY+S6INBFIirXK?8g6B%_T8Y3vnr zUX5hBs?J{}8jPia^_+(CWzX_bT}Qfbmt75cq7uAh$KG|v{MDx!rZs@bLU9E>2TseM z6b6`@U|7=gJ&RKregiC-R-EF&Y|3BhMPnu#cZg6x0j1WJJQP|{@FTWL&xLtJ*J}Xy8?R zUe}sP2C?f{_4SqB&YberG<^@UDvQXVute=DR^cKf&ix80sVg5B5u48&>S^s}hs&zr z8EEGNS9LJ6eS=z5{vdwiiYfa4f5#cI-B|ku(%#>Irll6F1|9twklwuK$Qo&QlV*+b zjkc)6_KtzQ58Z-5aip}dxjD2m6#D1K(=8#71_u*ImnK2U<&5GIl;Mx#pME_~>NZo0 zXxo&A8m)-@!5aklEa;cP1q=Fze?roa|Gtb?)#iP=%UX(y!MZorG11Y!e@mv^qIVF* zME3&~h5$<+7bH|L1vd30CmQmCjB=_`S?r6@>SRWwIL;D-p)}wk+>vLFAN*NMxRdLR z45+grd-pm9%_WJ>x#8T|>Dzob-YHB)0E-Doc)Z|qpt`I-KCQNs$_R*0_sCCXo9+Ph z#b1;NG0A2b%1WDL&W?0M+QWd9@tr9-$A(}B;J1KPn-~+>8jA|^(rBxbxN;XlcQeRH z{*ha)>9)uY@{YDiiEsv;@E7_n_&|w8aeT8$fpns+ZLAx$𝔠knUuJklc&{zoC_6?4~VxQyE02&FyJVs*1(*s;ktW!a1jD^!6DCCe2%I4wZ9v-)mp| zFcyB$l9?W$B1=q11#LYj6(7Cg-n;L$PvU7*dVKha_Y1=v9L1Vd1GthbNkm$CVexlN zf9yK+x_?2I3U)v0bF>8ogV>7S$4V3xdp)Ql?A3A!4tALY0;bsQ*NN=- z^>YN({(f3Bi1QNz4C6!4)9ImfKjufgNryN|sVh)K#d+p(43(k(SP%(E7dti2O6OQ2 zub2gpX#W8R<~LxG6uCzA+`&=l!%AuyIygQJ0r6yOVovtPu(BNCjup1VTp3ML+DpO5 zVtX}&ZPe-31EqBZOKh|orv43!#|{dd{V@J^QD$x&nHt$x`r+-dUoT9)z$>AOhaB|Y zKywA*@7g4urZTt-cli#P##> zMkKrjebt_zmPcN1le;Boah`0PS$8F359*3znka^kQq4=+22W%^O9gT&YJR^CT7Y6YYuj zyi^*0K>~EGe?{yE&m4(jvZ=R5)&pZOe%|;$-z^}CHZU&JG;qjgwc1%d9Ds#`c33$S zefk_Q_`wPMSB$zME7oZp<-$@G6S-M2Au8^39C?i*Ptr&80)&yO2DRD|N4(52gg4WriN!lC2WjlQsH(tjef3Ac?xM#uGF%`iIssDv?yc!o=zM+hzO2=3rgZbI2k?Ir?kw8+Z|AqeNhvGxJ!{$ZotjPvgi^;+>u4{t$?1*)L8EDNoP zteeVEh{qu@Mv0_NNC6A-63ltUuThXKLCb4*+Ew%EHlfAgj`*yo*x#cMak93OsQ%GM zJzVudJGxQe62z;|wKuZ!^20xAXPE;Gu3UDuBj(+nrZcXh{|=SKbBOd6-{o!esLxxH zW0AO^5VT?{^og=&n8CxQAi~p02LHf2>Y4PF`|sdL9#~eMo&H*T!*2(lV8d}WlqKQZ zHHm>T(*owhL>_Hr^!DuZiT24S@0r*zs~nq>9)WeI>H!~Lk6`;7;MIz-6>@L@pwiyY zkvy7df#)E0Pmw>i+M|ADK5$_jdX}VvN(Jp zuLhvt5J71(qq|i0gwsaudZ{Kj!iH^!&>?tT1xUAj2t#AKcxP{4TUZhUnbu_f zDnvIusW_Y_SojbY{hlAI7KFPQ45EBE73uwUP??O_*2+{<4uC+1bM53qmIYlV+r)S93BE{!T)1a__A4GN z%8M}jNhv8!{gz3eNvHP-biDiTEy6q*H{V9m+246L5q1yZe5I{$ zm6q<^mdUTEQ@4%~ic-5xV&yTw)wTuZzD0w0GzVqrI?f(c*be20Bip-2`RGFamqcu| zyc)E`UP&piEK;<@6$fwC0fqyxKup*9OspPgBOWA?Qc2JY0o>JAO@y!?S8gq8aV0Lo z4LIIgZ2G3L)>($GF0^%@z0ja4PDTwX5DwFAwgQ2A^cg;o0fS?PWhv z9(8_F*Je>H(;f#hR^06iGt&~l;GTsZ0`dO9>~dv_=`(b>4A@c~OMn@L;}&uC@bh-| zH?dlUz;>tcL{e!I4wk~tER#MJqFz(=?5JsWOVOVw9v!0g?`4VyArQwLmFt^{Fe>B! zo7}|zq29InADY<4>FeXyVu82bYIowOI8#eUybp5r&9zgK9B5oV_(_)d$RDUwAO=!7 zC~xTwogX$_fp{khJ$bqpv6SPK$>XOVEo8tK%uH3j8AT6&(mWlwAHiFI>){x`Tl8E1CoBN58j5F+>|0cu$J^v)jS}lW*FJqO{vw+BFSuae3VtTy#u6*yF4bm9 zBEftoKCIY)rW3{~e!Z1JG}oD0Jt@rBi~Y!URDNZgCMut(cIoF{TIrYW1-eoQNJKI# z3ofOU#ATRDC*KI}V)n3_Y&+xnM!Oht{apLtllUC2u*xG!e2BI9zz=amWq%j5SwOzq zf34#q{!jn&fS5j7`SCy6&%dP7<$oM9n5mW?hAw>(2@|?S5V49HP}}|KYr|EFzR?` zOsy%6m>3&f@Kv>)nUjJ zVBbIX0I`2{qCqnpL&d}3P9QFC;}+FRsn>+N|C#@Wkuc3TC!GSw?eJR3-otG1&lp4S zy}vxYzk;K6JfPTUdl)n&Hbbs)7F5Z}xE)NVfa-1YJ~QEwO0S09NmN2v`#xTZ z7TjhY0G~9c8DJMn#u;J*7JbWr`)9F5SsdGot#slqw{$lJ)i2_6oYbCBrr;tloI3Zl zoA>y#MS?C*>X9R^UHjP8^ zBh_}!%!A>hMi|>ED3%~(d>#wGKnu#{syV~GY?oZa1dEt)R0qGF-ywOny8Ab{ti=nO zk1>hi+=AS0g6cEyK=ZtOgM|tq`GVHqnWeEd;KyMGe6G~mRG|QW7CTXZ+fb0?wG&!5 zScEG_6d-%mw0z<9_WJAb<6vIeS->X&NsEOMKs#sJZVox{u6V*?Y!w+-m1&}Us?@9O zf_R5lAx@k>d4lmC3C8%hVyL+~oY(@%l9yHIS8VN*lfgUUfe60fAhIy<`tIhnzyMIy zNX;(4J5~p_DrGi%)j@jqw8txHG?!$@rmDjcLuJQN^a80JdyU&;nv5>XhP|S@*a%y9 zW1H8gIyMeNk0e?o!4uV9r#FT@g*$G4)8YJ*MIN6HWJvp zsQN&H-LbAqt^QHh3H&Y2Lq$b2a5PKojPBtxf%RGNz0_WhXW{2>`bF4JY8>WY$dduI zh);ukW@RI3sj&m0uC-X1c#9&{u_V+FB0RHWlmsG3Ext{K6-a^)VHa0z~K<#u< zI=;}P{kUXI$STs{>3AFl0~*ZlM!DADv!Rd|98^-si`IKV=^Vxi0 z`W;dKs=;j-W+~DXzGa$vkQ0MI9Q$!_-KRu5jUM0YoxSvZf1Vg8C-9|Zp3l>d{6|9b z>VN(s_nqv9|Mi#lKIk5x599)p*FO=#Eyn{ew3>+rbm=B`F}Wqux!lTnQBy zDHKyoLz$>005goM7bls!I0P-Ge#xJbt(gcf)drK2{Hh-Ii(v+|H`7~wO;3WixY6d8gwpWS8t?DT)Hb|!tc+EIZ9Fuzb2Z)=lRU1o>5W$r-H2-P)fYwG20rmSw@hyhjX2T~#04 z2I`mFZo@!(?e2bX7j?PQ;{Oql>&B2D!BYu)q)xb&vt+^AI_hle#7b|cZA%l)eP6X7p*So$4#84XV~HR#Rb}Xa^vVSybdt34#>;liKQ;;|1}l_4$Al58#2=sER)j zdWqnahMR9>A)?Yz9AkONOqjnnH{MhH2MEbWi?aiZt+Wqr5C1&Q2ZNR&)$xACBskVg zky<8S#D{U$xl}K;K+w5JU-}TGUx>mj72;Gi;7-ItRcrl6rY})D zuHee0BrQW(1SR3ICu5djLD0~WM{Iq(=j*UDqajVNYm2oG6A^35&CoO`*CY?I+!b=g z=XCIOlG*@TBLZR*3OT>#O^7E}%410iqfu|%0_?+*VskKQ_o?MbLrF-%=U(b6sVWTfu!Zz%5}G@iW}mTjiO%ui=Z+&@l@^IhjQ5lqp$a zqm(qMp0}nF^~eFc_45&8L?kR$j+AT%gAubR2|?I9vIp53LiFW-y^Vc#YM$8Qx*+|$ zwWAROCJLpwB#Z?s!2=MjW90#tQEQJF`Izy;{{$bd>-hXm@nLDuv1NOR%Fbez;U(FI zQ|Q5gM>*wJLmZ4gmawv<>FL}G90!4szMW>~o)MsIYVf7-jK?WA!wm?2wf(AB&Rl48 zYWfm5(tDhvzj&=E64(oGGDx7JmyLM8ZcEvxTBYU()laJAL~ujXOjSRY*F*naZyOdS z!yFpryTn?dQg)$4+W1sKC9v4wu_TEPahrC`coSf;c8#1ek(- zgIN|I$6?;ftZ`iOpGkj}wD0W)hr(q_5_@l!l&3Kr+N;OoxM-s^C}nxVus-UcI;?~$ zWUZSnuJ?@PW;KMjeg_Ly6*_(@^NdLa04T5wLIZ)~QvNh9N*B}+Ve06o#p z#jk0rm5w+TI|ixOD7Ty@4uykmFmrvf@#F^__1a-RbBcqbiiUY8bKYDlewM3 z9g1gMk^yq9bXFmmvLduluq5?jZm$RjUV>CAq#ClBkBhJzGUdJO7hZq8J#TWqIo;!# zMwWXy*nKbhrS^*VsIaYHrsh5EdTv#vQb6uz?^zoOxIOU4YBt zY8|xQQbHvAoh~~NS7O&oqxX68Ebn{(uoSPAI8Py=LMAR&*#!-n$J~Kbt?0+YLcd;^ zSQ?@Ao4LC>gK4@iDP8Wxo60rw?s}opy_JU|5RQUOaxpl@kdH__<=Q30DNDq~djLMJ z9YO@X>aEa!h^>=MV%9yRrG)B`XA_%mIFsst>)WL9S*a9sw$6 z+BqxE8`Qa`{NB;mu|3Qyc(g_I_L)m@6lZ_X&d9#>`zZ|V-5ulM0xt?eg#rqx0+;1% z9_0jGim!A!1vf9+$N4UrRLd#OoTHRy^-I|TDe9114^y9cDc9yB=s zJH>=m>uKe``Jn)Y1d=)3T26{RaxgS$&2k}d4r;f9 z6l%KknfDL%_E%~O)w@VGhxfVJk&$$TRC0|Awcpm|(Ok0!4sng)z zqm$>e@j;|lnXIuz-3qI*)S*K+&Lwz+*+L|d5fpqTDN--BA_*X4CFC*SG6;ZJDqSS( z&1x7!P9OTf$tp6zUMsIf`jHh^VBG9K7kgp!%hKS+hDauVvF-;S?+ z?(@g3s+I!?TMbzY&w@VI z)zW@FmD8xoBeQGnV5J@2Eq5zS{9@%sw-P4+q0TXJw+mm%P<$=6APdb+aQQIxNmy*Y zGHYb?Y*9jC1A=Cr0&y|6tda1&t0$O#UDU?fbqvasX=a*SvjuR^O{2Q{lDIj1e)d zn1m`n&9syE-lae)w$`;^l>qWj%K6E~hj9DFXvH*`kOsA#klY6EqQJH~ASCN;feMuq zZ4dwO>^mW@nAG9J)kTS)xFwj3-Z=P3q4s&QdP0~ga21>Axeb2(bhFR(;PA~N-2Pqw zMq0c>Vp+v-!LCKX`R;LPSF6!w>2_$S5R{tsW5959pU)C^i`g&| z)4?LW$f7nsB5g#?K1{(!0{`@Krp<&|;i!92ht;uDEsKhTwz^{%G*M#j99zOfQy_?o z8Lv|M&`LfcH=%ammGQl|a&bFA;++6dr+@P$`d|3XoCm@tXElD|c{{fYynmR^s9 z9Vp$~uaX_=TTrRPg(`D`QDuOv`|&t@iQvE_^MW&T&C$#FNI?NcCjkb3*FU1GJQM7w zrTa}0X}cNxezkqx-BYJoY=m%Pb$ z+xzBja2GsNB3XXTnwWs_wSN;nZEO!86FM61?Ecyk;0&O174Fo0u8bflO`uMc1smvX zfzm@+pboMIR8_UkO5Gu$4J)AdtQY~ks<5+c>{87Ne-J+?1#{vpM^lsn1#sm7QR3)j z4jI;vJ73B7s(U&sqY};t@^j*H9+N5hiPkw(lfy1M>K(?^$gF}F=ujBX0v}NLC*y2k z7sgHCv#(h?!1yvUG56z$C`83r$n>>K?d|v`IqI47NqXZEA|w991hFC+`|?Z7jUMTL z>E&rC<9rb$<~HnPLZkG%*Vc{8Ua%XlQK;POl!^0dkX`|Z+FBnePIO;&uI5Jdw!$qd z!cm#xc$X-;SdWaDw|wN7V^nB?KY0D;DWJ9Gr4A#qrQ&K~CaW!qTi$%5eIwS`8)xS9 z94?qlEH7_P!MPxtc-0N?LM~WZ^n^}0n0T1-HH_$U>U6z}ZxVNv?o1&hxbv*H@ z#P8k4Ue;Jif8b-dIEA(<9?%ez1O*15A_#^-+!D1vc%;$Y`N0J zuxGg$IaYt!?5wQso}8H1_#<1hKHad~fenuy^f@?oht%LhTE9l~xaxEmOFMk653Gte zKyUO)%`_zd-9~r$RlJlU1GB*96bL;2@=W_Mu9xhcA|u{Zr7DP+F?H;mBe}E7QL+|Y zBn}Vx{lO{M7$O*t&k??=iA&m(b0nf>OLjdts1Y_cn7^10%k4e$o`T0tFU zTA^R!#i~2XmO4Zf3f!}Yr;QS2tln(h8zFJy29N?+S+({|ASgRnbV%xm~kvTQATHH;| z8<&GzRAa|3x|3yc9tq?Z`xR?}@45RSvJ2*^`QX4%q}8XuN8J`~M*Z^VS!p4U1t3eO zn`A*xKHh3UbC&O_-*xd@U%0E*+6^eW|bW*TL9p zXJ0h~unG_gbJcrYPLSyb`v3kbfx%VJ;NSjJ`?o&A^6$19Srz++hzij7L5Ps+?Usp( z#gQ_#cV^AGG>rKu1$(l4Ya>KuV8=yOfVcO%cINJULBRpnmtoP4KB2=L6Gi5oeHKhY z-Ar-NcJ~*AH4JxS3*0Y_MY^M}5=-Ni%E{!$*KIk)7_$c+j8|PncE{Br{o8iRY^|{>TqS5W{M~pfFhfNR6b=Fw8Lp}x_WwYYW)FCHK$Jz8;T~y3eim^5p;V3 zE=u4g7dAA(as-uFX2HX|^-@i6e3_CJd@>9~h*!~Vu=tIKz%m0wKzxk9Xlk zpvsN#B99K!#h4OYPW$C6wPEypu?;%3g7QH2aoSC|tJn=bqmPy*POL)BX(#tG(1vYX zFcAt$;%b~Hsj25Qqx%03WLmbz*rwr+3N< znq;Frs?CU8F#RLmhecaD5XotM#hV>Q?A63adV@jN0?(}xTbDiNFZFEl729$|Y#(>U zxAZ0|kXBW3OCd0ih@V8!>0y=8+|6aFq4(UwO*Kd69SR?kwm2!fhr`4oUK_WZOwSKL ziPzlj*hu{8Bju?CD=}etYqZiKO**DnjquiN#c|{SWEhw!)ZV=4w{=f&BNR`9Tai$g zNI?q&qGzK2&5)4(H?bTw+@z^qx}IE;yDg0)X<2TON3mQn)A2W55*tL!d4Q5>Y)H;K z6&MFf9mGnG1y9EAN(YXsC@dyehwHW()RbOb%jhSCvE~PnQs_sp?$fO})wFV7sn}si zPi?SPu^y73Okdhg&C^A^h**q+lytu8&k4XO0OuUN(@yrP=X)7@DkUxkc5#X+za5oW z3D!R-20IA?P|i|}UO}h3sca)1?kBM@ylBrw;0hK@zl=TzB8{B;fvcW)l=Med(~u4Q zOnm-kyBR!$2qFlR5Zi|6^TbRE&8B3f3O`Bn4W{av%fJ>V_>qy*MNSRye5i-RvE zZ5XCf7DV)0+JTg~_zXPRTWrK8#py@EJr|4{zW+gcHoha5xzR|&TPIOW?F2H?FYq79 z=?`>BnR5@}bkuzQN{#v}9|Tgas&I<9Nd%~Y5%PY~HC)zYbwMz zvx;xEt6tk(?j5FDfkk_oJ*zNfCHb*2gD~gXZ<`p52S7;220CQ3D9*|qz?c45)|SZ)c@rsDM-Ds>D)0^-|X4ox_3N8mGx6cS5E7>v}I#zuiSy zs!jT<fjYk+gyd|dhab~$CwK_6ML_3I3?$BDhm1a93#qZE^r9y;d zvZWJQ0qaq}Rtc)ThV~^#80(Y_VVEekypEj_q*PGZ;cpC~1>M<99L$2)UmrdYj{vyZ z3_jDB2oS^*2mNks((_y>1+@c)-I@(b=s6rOcu;XzTBo6x<4@p)HQ^pruRZx40B5uT zf?-T*O#N8wr|;PSc(#2devL1e*N9k&wU^v672%;+{maSW9xov7;RF=yU#!_=QdMM^ z5+L~-Wd4te6EFlhTuE`G-7q^avbQT4Uv*`b!Kp!K;$Cz=Dp|8@fQcC=-6Ltu%Ip85 z?Onesy{dE1|KarL&%H1X*v2+448Gu)J}Q-5C0rL#l1&I@$d@8K5Y>F`s zNq-2VyOVF}jQ&yq6CfmzoN$MbFff{8J?ArbQSsr? zZ5u^+PP8Z@k4FwM$gPGWAYJ?;Uh3g-Ij>z2q?Vwnu!%kIq&jk2nz$U?6(YQGF6ZBB z=l^^D0MzQmkfm!a0TB}-HU2tnY|8Xm6v=MeCbaK|th?nSKq?eaJd%*V_i|05+fPMF zmDZY|np9Nab6!-yS1>01ZTxjLW4FKs0z<&T?-BsFcnf5~TLQdL&hwIP+vwJ9Fv@lO z6+cEcO;Q%q?EqhIdsdcxO;tFKA3u2CAXNg(7Jo2ls=o7wUurkriWh2jrkMh%HpAw@ zC%1*b=&l+l1Q{W_Ca5nQR9WJ5Mx#sk!8`-TAI(+ABwgbGbrHSKUCtuaG=i_3Lxhz7 z(VyOwGvy!m(cbwZVP#F-jnyfCTwwDg8RG<1>uf!KwRSMklrG;+qNKW>-cZEW;z|2w zjYPEnK1|=NVQ2hEmr$w?%gF1yXuy8&C=!3U^-R0E?ZUZemEu`)8V@kLh8_7BU?ik{VGe9?ryTqdQ!Brer^k#CG(m}l z3+HvpwP!o*Do*GCyKgy@O&ZRv9MpWaMlT3So=KGX0B(+tY0IP-LI!}9i~lE+Unc&+ z$-Ws5Cr}_;uT#b!4vD`qIy%!>l=y|9)h8+mjg4W~s(Rly1`!tr^GdBsZJ zi!&uknIeopij(970_-Yqa;Nuwns^85m9{iphl_bIE(u2ERV8m_a>l|ReC7T2l-H#; zOP;5LzK=g)I7;i5M#u0E+DEYl$7BHJhE(MonP{V^Pw=Orr3RouZ<*)YvFmMqiceBi z`r-!nA z2KD2MVh|;XQvfrZBba4_(Z;2pqpH~vO|y~|*038?6Z)V)owEqm4F-q|1JNRC6bB?z z`&aQEm7uZ7I27g)Yn-?X7Fgo;yoi1zrtP&aw72a1mC`&_*VsrAC={c!H~jHg-W!i` z@ZI6?srF^BiJ)rjASQFI+F&>OK&{x1aBwNIR3qsqn2(4Ld09eONahN^y+l(h!Im*%Wy@k#Ap5-t8Eg5sc;^OP9YZU)R9c;&@ zS8N?!Y8Sl|wR9w&6^tlsWrp3cw6Ul;hWP8s#dftI*Xfs4{7Q6qO+|ekXKX%B;VKGx zq;DUdS>l8D#-w+V?3Rw2acMH367D(HbqUtDkl5{DDC3LN$E)C7DBs4H_Q!j#O@LVO z@Ck#YWqz0>yJJc8%yB45hcJw*r!XDTLFdAq$!|qK-)6}a)E*yNj-=wn58;pCfYm`Q z(1M`UfpUnb%!)o9N*YS+T=3jW-zbC;QwrGk^Aoinp~k63me zGuT^E?06f76*6Hv1q%0{L&`83UUmj|q|L6zGZf{ETdrI@0c4Yf^l9lUAP1W6C_Z52 z5@xbbcIrV8L#&lbn)k*R%_s_|32qg1q2;O#uScNMzaJzyZ3#9-#ju?;pX3xIfA+gH zOWA{s$VIHk|NceKLN7MbPGGwOhYLcOjTmGlZiR4O)>r`_(9 z4pG#dFQVNh!ug^D3TJ$s%p(`$V#)3LMw0vZ_B&2o171!a3y|%_X-Up0r5K5t) zI;rS@>1VAFjwi#(igg6xR9*h8zs|uXHfIO7#Rs~G;CMGg<-aN{CIHD_ z#fEh`vLE8*Ry@Tu^==PS`;&|RwD?Cb&IPB7a0X0j|R7tj6v{SnEL=W2z#{)SGStoa}|B zmM)AzL@Iaq3TGS->LpfXe3NjsNSlsm331S!%Qz_zGmp6`6Ee5Xq+_~Haf9RI9XR;H z3r5i)G zrBs1DpD329Cs>m2Z6L%!~yhT{yg%{#^Ft;?hFPVH>z)^NxpOf zk>+0XZQ>(-dY>EaYdFj(|A*j!4TvA-2Pc#7jQ48HM{+Mn!z{qzh6m5!n!%WEyGB5) z9mWHCr;vCLm8r!#kYpgye&QRqvb)<;@%2l{DAD=+lge5{$A%A5?nZ3hp_8O&O;wOw= z!UgeVfzx&hpsNBiV zn6b%EbMq0X-sO$fdwv87&d}LrIDmq~^-y;)e@A2DO8`)a;dr6S=zr`__KR^li+LZk zPu%h;>(MGegWJjyf$~GUYDs8Jn}?lD33f2}JoryIuhWB9BW&{T1Wg=18U95OW%zII zxeoC?|8u|PN*k5ZY3*wSXUDeBKcHI7oU$SGK=~1Yo~>)zZZ)_ZlO(mQFn-?!=f2NRBCxE4)znDXiw*A$!+By)K969|Evq# zs@?!1V^>wY0a1I$i)q%wfy|eDXfen{bCCST%{mT8fkIu~*zl$kdfd|3JDC!!(d0Xl zY(G^Z@YnHBmgPp#*ek?9<4&UObph0(>=GT(B9xC*~9QGb?E492}Jou6vuPzh(bY)71P8UI{ zi?+SMo;(xVkoBo?Z-pCXG(bo6$FdM2MCQE@jH0TswcXR?NuQiVaEJ&KSk#k1Ar4XMaQW z?s1DSaO4Q|eg%`vn2=Jeegq}H6rSV;QQMHifkLD>@fyZDKsSA7taur zo|UuayVAw+=ktfJw9k0CVBkLT-!HZiqKZbh0B)c0+7Y8-)T?8^D&p|lp%%V z-KzSJJ)UM^w3VR1_UT+OCst5oDj1i{eyKt!$Os7g-C!wPfe+gGk75M~oQk0sibi6G z9iNxYN;(F_(Ng(#DeI_2hvOILqBcOA4@-%dC8jG5bMVU zO(DVk!gcZr6_~Sd&S-amBMfSYNoR#p5*>Bq_?iYe}{8vR*G3 z+?_j?+O(n}y-z;IguI9S$lKX=ToVk0IDaL#FuGFZ3$Z>t zmC80W?MwS&70Vf+?B!miIY|CEK+;RbRd6bKqOqdMwB0vZPe6-X+)Z$H{9H`I?PAy+ zVO(_AH`XY2*)(etk}SH9iWqqJG*OzT1rK%W3;`aOU;4 z8fVWgxev|?YpiTKyxxb^6la$c!I$ko3GY{T@DP_F7YzmVsaO@uC{d)ILRMPS z^x#_Ikb7IQd{2S;#YWB_eyrtC8AWbrspZ!<%>&msa9>PS8V+-h{Stce!?*EBLP4#F z#MOP!A><-P>B9?t;s@@~(FZ;QnqD3#vn`vF-Xlm;2&Go|m{sygy{~sA)sMy*k0S`_ zYIqd~m^rH)#L)U|s(1%wSt7+|z ztod)0;l1eI@3s&BIhco=R7%wa+~b0pP(#kHHbm2q+0vY1m9lGBLVvw|-4b8dT^=s{ z<=dsoBf^^Ti$UJQt-)M`r7%a#4Z`s;ZM7zE$iIzdS*@b5qZ^wLOCG0vy+V8Fx*Upb zd#8!A96)27O|p@iDyG@Urzdxj$+5KipwwzVQ?NZ4>eC~i1hRzQ;mG)|mEwE+k`Ru! zwAlp`@+9sufjcjAAI62XWdX(+X$*7mY2*|>mV>1*`72rd)Y0Pc-9O;8;yrJgAu+rW zG{JHmKRM5A_B-tzUkgsKIP9A0_S^W{rqpkpS9qwb5%N7=t|~O|Ta2Kre7WlpY)kMm zSOdCuI6p)P!Pi8NS24x6b%|;rbcY-V)dyeNZ@(fO*8vDOGxv(@paWn&DFtv7z!=;1 z{}d36_&lf_C5T{%YE`Ncm|9xd-qQSNjxQwB`NNmmOa5Wq9VTV~pk=+p3xxGju^8N) zm@S=?qi8TvEml-L_IG_!_wXae2e1A2hP;d)QdKiAEGFv;SHi4HH$`&G+Cn+fb-@I= z?!0E#>CNkXK=MQz}?&}Q7vY}$r_<8?Xj8d z1oLc1SamN7)_>0?L^lg(zSIf;^)mM*e!wIbcIlv|nLZEjm*|3bf^tZ4%84m8R=f90 zf)lbRM)nSjw}ZkCrbKNSR)E$jH6PMcY%c|c%92=NeDn4h92{h$mvlL1DjF*kc-CYm z`$*XDFmg4mn+_2QN%dA+_X()qP;u)}Q76UT*`@*;`(#pZXfzffJ{ao~{KhAvfEtO3 zB}9U+bnN;vNM`qsA5yD)g#mNzUB$QjNG-szxRMhLVl< z^r7=68srg>vf4gO$3w^&t|J-92ZB7*)zawehN+bmup5=?l=*2-2U#BUl>y$0rNM|FcqAAHzbww91-aMGE^#0_h{zq~nr@jDsVC(q zIbgEsqR~~$5KkNseD2^xB(Wg7OA?5dCFQ0OrRN2~4IDm(bvr5c!?7gIcjE(GtRz;mxGr%;9g7M41Iw$80%_$PbSEVWsTTq zNsp`b1?LZ+X-~&t1FDomQ)oRT=1%^XV4Ow6`N);g?8*N;e;9AahNuTDRO(TgHVEqC zcmKtT;Tl_-P3Kb=)bG~FhMJYthVXJ=`CV!eWnI$ppdMvXtSd>!0fiDEE@CZMmOkzs_yX*DoFhh##-hbyFk0@%H+yYkZt4jGgI1x|QIVXuuNmQsTD5}glYs7_EWI}koZ*S&p@RA+N>#?I-@4hZ`ooknvJYwx zG9E4EGAl;(PzvsC(;%npQ<=-)BAHH0)6|}9Ph}%T6r6`|kQb=^XB9brG1iBh^pfAE z-ZqtSB@=I-CL9wNPbT#3!iU%3tymEvUEn=mtdlVk1k1}WcoPIqMW%vRYYJ!k5jc^C zQ4P(tzT>|N&HK~1EI_L94nhJv(VkfOe-&?4y6N9b;_mT3)GC zyY7H7Eq?kQjpK;Nmh(^#%q(LT*ik-tY(y=Qe&&-^AOZgKvV`J_&mwG{5xu(9I9b&i zad$u7p7h(XabWv+g7i#YzutnX%x0LY?5Sq>dWpq-k>}s3bNj*crbS-xCetRN5Po-zd5w2yXcg_TOjR&c*U#gWi zkW{D^FR9n^Bg6|YsBo*-0R5*BxDRWYl)_3rA2PE@h+-`C4&^m2LPK`wmUgY`LaxOI zw(xz*T5$JaTI<@bea+O8Jc?CP3gIv9DzvGB>DVtF-ah>&xv4(~ty*5-SvJ* z^ThAoU~is_BCszz!P`y{MMh;thve330$5G;icS~?XY^0_BzrNrXc3%_O&oN5gPoR( z)CXHpxREyp=cB|gaHFSpe5~p~>+ug7RyR%c1x_TLT-wj6E7;^X#f`AI$0fvgX$r5! z@@VB1j4%wT@Kz$@cr#e!2eJ7}K{sCrG1cmB94shbd|m^NJ4XLE-e#)RB*}59@oPA;iPIMB)vg zE@FunUTQDIiC(F#m?+~YmI=U!l;SH42TW+gOl{;2%m|SfHq7gcE{0}Dl~m%x*1E|{ zlwt~7g5Zpu?g5C$W?6y(boGm8mhO*IIsY8{;-nc!S53%mLW#gDy)-_ej#4}|4q3*- zbMa2aiHNx#R>K2Q(AJMLT}uB}FQ!ee>svI7&UG zX%q$TLn~d(#^q3S9}VXIu?p>!wB`y17${t#YFXs>e0$9=m0hgt1|!IeWZOT}lqcZR zCf8=W7C$@!PW*$|?)gI$;%Tv6`Ih=-`R`JN9|WV_sx6UZ9}bt_TmpV?wj)exeB+&V zD^40cIz`czmnm$(`+U;Gca>p9sQHl$5bw$+H5Mu6^aA?qr1-3@$me3SI@^~mh_KVu zXUVGUK*=OHn2Q!qQL$MM+PdGREOYuiCYQr;Tuzl2dFisw2TADtVwI9{G}jD$;vBuk z+LNqIMAX;Yb6zVaKLJ1Goou!Z|8+5!Io^e0=r50-0sdl^oR(Z15Jjqs{Qzh>oBHwV zm)n(sQUK8B{*#|;&&Dc%kKReCrv}Y1LHrHw9`AVp)P*5M{j=uQ+ZVssZuk@kedAEc z04p#L3VBg%sG^L?spKVJcAwgzP@TO$b9!gk7P|hvB?E?HIGy;4N})%YMl783oLyJy ztQXEJC+!;nGSVQnC)-y;B7-U^E%W8`5oPNgcamb8>fN-HZDxA6Gl8x;_Qa%8i_j_& z_48ye;f|XB6NnIhNyx-e`ev$MU#*gAoGE9B}2B{9ah|`tc{6W^uLFwcPoL3rBNS!!H z72@sZi)N=_n|4xZDsiXeZK0Cpndjd-{uFCFxQbezS}p%(8<&q>)2Wm_NK(%J*Yt1x zZH(ym{pPXoQt%KH2$Na5({-l=L|3qR475^6B2PrPxDvI+^+F=FEE6zQy!FIP0; z6H_%E)DQ3P12NU~#+aWG176D@maNv=Rd{Dy1RYj z7~kYg)T9)hKObt_N(KeEVGr+u`*s32JBL5!(9)s<)lMIVcpIqY+sRhCou#1&36R-g zTrNV`biy(5c1aYF8x&8Oc$_cylL)RQ14&R&CCEkAyLYSJ~uBTi*=Z@k(0>vi5jE1xa3Uq)tg<(#+oXRg{^( zc)-Et^65Jp%k6!tAx04P{Nd-?>%k$4`SdtjURoX5s1%}tGF5QaCnzIEVG-q(jm41j zUnKes5YeUN9&DZtw;ae^EfjX?5AKzmD%^N{QUFE4zdFmfAXHMRUaF7q8FsapOs7bk#Bad^w$8&CC8ivYi8uG?Iv0`!y1Qd4zt=8vJ4b8 zC^Y-3@p&nZ;U@QTtyp&tR5^wB*44HZD+8$d&QtL@_D7LTVc{{j&#E$xXtCe6Q=a3Z zo+hGMj=G{u!4@i@6n5BmWLu`aW_kKZFhZ8Xwy4(%c3IKr>KWi+-#(6vXc*StJWouG z)QALzTv3j;SDc$-Druqg6@r_&xl?7WD3yPtetlk8 zoixL(tTnulNECXri zqjFakEew*pYF)eLEn}L=_ng;Dxd7!Y$KF@F?o9c((cTWuAPUoa4#eIsqdms6*Z(8z z-FQVG+Rmnfg;v|;kY7NHibN+UP*nAG)ilPk)KJ09N7R$v;yPO9M@Z>$%=VfG96~A#RwY^-qE<(zQp$ceqXWh+0HJ$`w0JY2a9h+P$Dk zi)5e}7pFO%C}*~29tN)(D+=LRb%dH3VgK>uq8rDD!V{{47COPFF(b|Zsl5c`HrWY( z0IN)#9<&>Yc8;FYy5Ao~Ch6$oqe?g+_bie&v4189qnULx0g8$j0pd5ypf#Q*Y()sd zFHk_9!j53p8|}>?jW9U|2jPny9t*<}yi6`Ltq!3%Zl4_&^#ZP*WtXxE!Kf9_ES3}M zMWYt{KB{I*r-+_(Hxcosu_-JJ2ryu0c;DanM*GG$tlxUo21_X4U__^S^sLqe`&#q65mBLTOuTm(wM^kr7sDU_*!ib%bOf!e|&ZB$?jE1dhjTv96#! z6XpJ?@qd09%SMhN*5;+j^<^bu3*RVjJHpuj1#8~afr(hfy4;ys!W3;OPj$Q8)&pH& za~VIpS7YSko~an^6#Xwo21MbPW2^4cq~4{s>c{z|*zWaqqXbMl20C`%y$2<@f|?t( zTQYKqyX?;jF;%+8AeBH|Sd48++xbT|cFn{WVkadQqf1EM(kmc11Ar&NMqG&!RNWRn ztUD<2{(#2*SMNJ=os!ND0l@O+O1)3Bsi zpc|?e(|ZZMT$&;2OuTuHXgsq`qp>9Y~XU+j5Uk1`mOif z8_l*S1gbFy;u4cecRY%U9d*J+uf(2)%71hDJ>lC5`0tR==TI>g$D*LG=E2PJ89n1r zQt7g{5lO{(TSa#&oZKZ$yjtORmn~DTR@}%TD01vjn{X*k&L1!>h)So}qk80KVz8%# z?wRJYSWBLx&vEEOqJxYes~{vT6ytG^{=S|6tN%K~5rDztuL8B_y&dYQss$k5ZJM^$ z`NR9|Fxee3oEHPMt&Yh@N6vfE_|F6)lby(kRb20bLS=sLeduGMh)Y48YMAaSZ}1Da zhO&ncM!qV*H5u}-B(kVFls+~Gq0%Z6mSmj_eX~O^@!y}`GlAZ0d8-vF?*bt4gC)D}X3W zUL;TOjQaIcWgK54>6S8yOhmV$YHl3nyFa1$GS;IA$YK1T+j}V< zbW*ZE>dJr5FJHMD|JJ(Vw4o~<3-EP)b?MT5c9^LGS=|DX#NVY~-bK{J&yw++bB-mz z3fQkTAxd%=?MG6yy_c{|?mq|MeXwGRK6_utI9c2l!J?R7HZ0BzS$(Cg`l`{aM~v%S0%q>$q?C**kd4dh$6@nGmZva* z$^=0D_g-&9q}o>;$qYY{QYxNyZ|{!%svQM*2b_J`hdvpRn9hqMe1sp_J3uJe8~pv*_Een50+#2X zhlF750|h4oWj7}YDlxd(h^;t{F8O`mmA7?&wDX5AS1iP0CPbjXxqXvE1tUgBV{k~W zgaKz%yNaSk=~5a}u*W_)@A#SaZ89y2hMaewy>JZr>*U0sBtdz;9t8PV`&c{);>z|; zg+pmqX%w}B=*U~vq15x@K^;;OIq;>)+nX;|tO;$Oag*uyThO{1J94L(QLCC%LD_#I z4L11kD1&Yi1NUR?&)2C`P|3YL)H?K)a?hf5sFON)a3!P7J8P=r^GoC3yD#sERwUql z;+^8w?e4m2>ME+HH2+!TWDDGd?s7w`r{JRA__yXd7~v*pXE5KgK!45*z8G}ueTn4~ zTBZ6wghSkC_@V^1&lI9BbEuGrYUSFUm5>z-wDEu^RM`@MP@(f51oH@Hp?hhflg{BF zyZs!GqiXe}*pJA5!aH-!V$%Wf>FG!%+9{0VuuRoofLi7^~O>a?@G7WLuE6KFa;Mf4imD(r|$OXPN%cC z#(7L?a3{Q0-#gF>N`mer5547f=|eu+Ps7fz(+IjI4csBcTGLa=x+MrS(bCf***E2g zV7O%R00S+U5QkOxvh>yfO!MB7n;vmoON-h|qh`Hby&C7FT@~gf@9wh2Ax?T=UK$-M zr#meX2N(e(FBUaDqS*r+TL`z?D-~@73-=c9mVceI9OyvpTZtcfd<;9pgB8L=P^4E& z2WQF&1s(nP2 zW}=NS#}A7IuEGm~$4T->f7Q-^9<+e|*Z|&;I?SbIrBt%d5ldh0oO8wWbIIV_Ef>2e z_Y-u71lPCxu8YJ^YVt4obh}iFLdYDh_Q_ASPX=+mNc7dMaf$1Yg0hWA%f$Gg+EcjS ziv&k133RZA2*oGQs4J~$UAn(;Zzm-xu%}A+pOq`Li7`{0S#BRvFNDte(s{!r4g~>$ z1HzTrHJvoc!@{i!RyzN5f!@+K8ArPVUW;R0?r085CEGfx6b&!L*GFDE%PSg-%~3bw zPxNYwBO8gm;YRTM7wh<6G<29HLmVCj!)kEc72zxz{*DJnF0>kg^v`0fNc`6 znm+9}G39T_Yxq}(8vaEPvgpy6J71!3%vbrx;BSJO9T6T3LEF_AeR(&L^cgWxyg9zs zNy-)UtM5&)PL&=qhxFpC)ATZ*ruN-Ob<>aHs^k3nt;AP$OZdG<+Z~gX(fo2O_Z55) zAG7f#oJo9S6%u-xr(88UzniZK*N{rwiz7iz+$)%e>o5tWK&68uhSy**c)cBYzuscPp20;h`@7y7379W;jXlQ_9!i9E~pKu}II)JwdI z-nrhA>M)8?Dtm959h3$5alU<`0u~U5ls7PvX3+uGL{?(AF8MU_LNDhTlnF zGgwQow0H^Dd>A1Bf=VK`W^stXKe2vRA^h^gbvF*8jnB(@{4T}H20g%itfs`d7~ z+$Ejcw4_;&xQn=-u?k`-O}#$-vm)E0e7O2WeK%f95Pgyn_Xq8{TV5c;n_#$_a3q4m z08R^@opyF$hT6*HZdnS*CkozTE5s-1@%%x&Ty^S1ndW^|U0GrhQ~D2FE* zbQJu!(bjIpQ79&ky%{9czw=sCUj$pEnV}+qac2C{?PrLIR2=kxnQ_hI9j`ypJ{teN zhJhF={p;;3rm!|qIvMvQu!+Zy*uq76X2D>&tujoe$0dp|IyqFRRTgh-N>P9}edoE$ zto5#4sJ=@d;*g@WK)%YZm+~k~!@r1sqT&u1w}5Z z9n5k!Z@B`qY=7D{e6itgk_G}Y7`p`7)Y4+7#ZGA;LGvNR)c{fPMdeygG+c-)5hu0L zR&JUQi++K5)3F+tZdeQt$$L#j1eD1{o@`q5L_GY`QdH{=L2KeqPe0qTvrKREU>+eX zk3KD85B-XHZtX-mWqynWIxBEls@GJ8D9IkL*u>+Df8zqL`C89=7ln9Z2UXSzAubaw z-TiTd0=B!Ugn`|wkK#sw=0blb(K->*Gh|e`(}E} zy&;I2!55E$m^d^57i-SL@wn@CBS+|_8Qh{fQ3{L?5zk6Z$Dbdx=bwDiS7vYM*5+6U zEfiSgo!ljgl~P5&Y#mv7yB<>KB{@Of1%WaZOd;L;qJ82Z^i`=spvMILWKen-KtX=s z%_;>WjTqZTV@cklqfD-jX97sRHD$|8uiqX zIblSHJX}&Hj%??n*v|29R!q&OGVNL362U}1|2|lsCXQ-x9Wwi zd4vj~O+T{0+dlf{@4i|8x78o?Z?8kKs9L1Cshm#iOwMI?tmVvWXNfyx-+Yp|?0k563{?XqWs>#pxXNm9<_egZVjK3j1iRIP3 zz5BrFxX|Ph@aW|9(9sF5;vq*Td!B=u#~+?w{0ARu_>VW(I~c}WQ8UQrrEZjj2j2}o z?d5qnbK{HV(z63%H^>=q*;YK=;3J5KSg*Cu&|R#>{RG|rpn&YpL}Xpo1Gg+VwMfg-#yjnPAR2MuYB zM5ORwoTfDMaj*D~TxZpNTBXrA36bA#Y@7-;v+OQ4%`Jc@Av_eGP~Y#$w$V|nD4cE7 za7C&Cn^N4`%jh$MYcoHROMJk0X4=YdC;~zhGRL@@vUJGHUwx5{(QVmRoZg?;%8){x@(t1>b<9` zNEGj@&c_M`$7`Awl$J-G5rRU3p8p~aY!5T2NO(zLnoGd#@c{+avAN>QeedzNQw5f5 zFQ7-?KL2+6@Z0fWSujNm?2uXO{&3t9v~$pim@0rT6Os1}>>HomYL|lCl)c^y@^JVa z3LDQMLfTF<&3n1sh_!0;Ftca<2KC~FdGDqk=`9H3I zU6Mc_WH6HGK=Jr`+Qn4bknNW)x~!KqKlBi^aLAR{Zb% zH|~(5@(QMZJwB*$vH2|(*p;Ahue8!Q$@uX3!#^}@JpD|2>N#`KT32mW2_SA2_7KZ^ z**IQ9?lF$~!L3@wz6#$|$A60`=%^gpb(L4t#+{?>Ik~3C1{kTCl<#skQ@zY>If=!r z9A;UV1lPEGIlfFW#%a3H@3*I{f6s7ck(Nt+Iyc+ozE8j7IYFxXJ2Zem+tax%AGy&k z2W>GsYZujFM4%}*+R*?Uj1M2GN(76av5B0V71$E|gf`^=nvr*=^4Id7Ndo(iBa}@$ zD-Y=7)rATR3k*?#1vnGPa4@2M+C9dHc{7=3G*0;=x%=k#3+=^h1foFswb;A-vFuEpU7*Is*2m zqf5sR2h)Pryzi6gUEC!NeDsQ}%9Zu&WXqSfWS}Y7m)HJs?P)bAK(me7Ru0XAVgdAk zde1Dau&fza37Ag@%{#m$4G6>~&Z>t{J@{(76`w?cFSW<(RcRr5D6x!JPk6Nt;edLh zvucv9V0_GKN^o~@(u+$vC&7apM6Seht}0olDn;{?YVUx_sr)HEMHoievpYrkZ7La4 z1QxBHg%ELwn*eaOaplx}LQgdrDCfV8BX}AZ*psFO2-^}GADnbTfAMMQ?oTI0@W)G- za37P6xD_i+ALph3k~9b|!kS|^NZXh=M+tv!Gk$AykeE^gJ57&^+Y4fn*xTJ=N+?v5 z&>f8<{!}KsS12?toI+@JR9-byIHsv^5g_zVRz49)xKl1K(YVYt-nib@V*5mwiV)i9 zz3BZ$E%kQN?H&<=f@z})L?!6?=ktdT+O2plH!cxGN?c@w^9Btu{Q@rQkpoP4I+@Vq zJDE38U=}OR_fey-UoBT`P~AvobK6~Y0h?vE1kCQ2%s;)JgzSWWBMg;{>$o!?Yab1E zL910)2V;*yQl1?jz3YjF$mb%(($Mh25hTYM+2fTNrMyN)WjoJ{gT{0ZS_)Sei@^*! zEP*hpY!QqZh@_A!Ng%j07j+30O7!El&_}T#aKO8K#OJZ6CZ}0_p?+)8^2icX#R(mO z6x8Evy7xUPN?@a90l?EwwkKoxWKTZ)t}p5)3yeQuUe(49y#wNXlvVd0;zRJ`JOvLz zPmPtU&9<{BK#L7xabp^0luPIj_p{SteiMtqSOJ0%l%R&I8wha(fAS~oPht%y=2=ou za41at_#R#hORj~dfGAE~q(LM{Tp2G_!&XO$l+G+VZl8cz>L9`Xus&lA*ebX4ZyMl> zn;6GI(wD40gv*F|FV{CoFkDZU(D6o#mtW(m5Ta8EhgMF0HgaBFfir}!dcAEp;Gp79WwATA2G;=F`EA#g0z_qBQ(! z&_wum_EbkY;IOLqALs55;TA!Nlt(KYZQZAaSpxPCTGx{p-=_0?umT_&94GHtW>cNUxfniOw8AQOA`ZDpzX1oNWWg*0 z#g@$|aYKlJmI@9o=&={_a(gw7Nd0$5XH4Mn%DCvcGAwPLGHoCH8(KUx-yD>7GqQxy zUOnV!;(@mtkBcuf0r2V}B>1a~dd_eGA1hmUE*676J56M=LbY|c_qB-#Pmdi_rgy76 zRd1uPyLQ@k2wLQH1|V^RiI3UoRRm5QY8DM@%zC65c3?gKLQ^)DYC3TqG3XrVLc8x0 zo{BB#j^B(WXb4iQ>_Pi-ka=jfV)9)Ql~eyKI9c1GU~jAc+PIWtjI%F+t8nq9gefrV zj}C4lA91h_%cJq0F2w54M4yUf1H-D4roHp5Z94(W$gCb!8;ijEvAeSMemy=GF0A^- z!^QFVuCTp*R*~)up73$32-mZyxAD=tJ+cyn8m__4*b0{16y+M&qssOB{n9=igIxSd z_JGSD7YM1Ow6KpE?;WUCT5t+MVnv&n*T^H0gH9o*F z?dajykD6tHJ4maESj<#)p~xz~n8v(n3eVkH?OZ$UhC-^B39{nJ_`aY z2;1RG_>pNYuvGD398~wROs*ci@OR}R;)=w&@P(?wxq$OmPEZ@&$*kT;4z-Vo`;GTQ z>QJ939Hk}zOgadHlLiLlW*3?6`&r-9p(6+B6C-HS@Pa8_lTQbQAu3y9RM+6!1A`&K zYrwc=^v<5}cx*BDtKO{Hc}AAR%OMlg%XC`8u{~IW-ImNrz;1eake5i6T~gM;UGsL= zRGy=&?)VH?(@qvak6Wo#C%rPwM=Do7de;rLg@!;JaxA#AqF5LOQ=fuUkWQ~bBv5xh(J!*%@ zfRCECvqhS5%W4Q|1A7tS=Z6v-gnQy=Y@2SPTN(;&zl1ykSSq#c_{Yl4n{DNKmSX8r zr12%;bi;SNI@v;mOiCK!O|DJDOt5v zRMVnc@RFmiG%(sP*jJ#wy&x}vs{~autWY*n{E2w-R*+kno0{P!F@~Bq`4CBy(z$Mf zh#n4756r~lo?jZ{D0JCOV_|by`p(^4BR`bxk)*g5bBs*3Fyo6MB%kV)c!U}w|In9C z{=@B4Sz-Pik_<7R3l@vSY{=5=p$1BUG48ihW)c*W%yyG{;Y5}~TXVSxdcyTA%C{aRWtgLp)_3gqDQ2&)D#PDmdZ3n{)|9Dr^$lyA`c zi|j-V2jS_8HB!rCq1tSrG7g1J0yhw<@)EBCMeZjl;%&HO{ph+0Q2kXar zSx~UvGQca-u%9K?TNmW1^i;p6(aao`j|Fu>wm4QR8f3DX;%xNfPJzo12*_agM~3|E zznaDfSUjRS>_umwa_)G1uNX7a3~E7>C;SksSK=wNyZ#MBba7Ai%Y78oCk^JI@W03s zDe?)HxCMvO_Xu(aNTuubbkCq`!@JK9wSXz{I_k~xdVrOHV{om z^F@lWk`|rJCYIC#$BV?B6=2o^YqcB|Sh}lg?W*^8MP7GW_t7aYmE8)%!~J2+GhXTn z^DinP{0DQX3aLo!PIpUOm76z=8eAH(zQW;+%B6*vigq*Qw@OOXw=%xAPl2ha_NiO~ z+FK|qS_D2hQLaU90q-%@Gr-`rOJ?@yZ(Y)WjMhW=J~rnRC1%|DGaB~xtdA?2))J?8 z16BMD?CM$ep;6A#@}4Czg($A=P#HuyL+U+RQmI@ZgGW6RaVKL#J*`X0PvV^sW$%Uf z{dl_-4@RBR&9(toH;+oB75@%Dil8~+t`Mb4Wd_qC4vGKxyX(>;X2{{Nd)zM=TW-}) zOy5)UGA3dWG^nNp_} z&wtFlDlZ4^F+1_O4H9+H5id3=X*ZgevMppj3|1^|&CeqSfz{`GA#9sVS!Kl#YL7q5 zd>4G(JM7(C)b)!@a}WbbIG6>>EXM%=7b?Eoq9afI5vWc3!qu`LA#EZ;5icj2MJ%QC zzTkj<<(2dfo;2`8s+8>rLe{cJA@6}*t$K+zk1dopx7x~$p+)6UguaAySmnH#H%+zpvzB;&nmNDNrR5yp7X1q#icebqqo2gAXekj z_KLyy;SpnqO2?xwwehGFY^l!TwAc^5Z9K0rnK=}>${46-ie3T89rw}QQ#m=Fcj-PG zy)L>-8LN?%ej0l~XBI)Wh1yF!O|gY6-eR+>a6fGW(frU^2CPuTLOgca)ZPH@0`-E> zgHrcQgq1`$+Pr~t^|oN0^M{gx*%Sj|O^uTyLLEI?d4!02Z2(J$2r%LIkeczp$-6kV z$K@0bn0qwIi$nhNq-~d>;|L=_^M!MsYJZT;GQQHDjg2FGr|}M&J)p_I>vLpN$5K-@ zyFz5yEsn3X%*&96ivv>`!+XJ%H2DSX?4O7>rzRj%b2sDj*H+pe#S>(b=Hw@-%?{4+ zd8}ZsRHz)6{xHGrQoDjko#rL&u?l`=-lH3ZMAT7E(RjVRA1_yH!ooyo#+~vc#UX7g zAzDvBu~k0_>BF_Q=BOLQIf>0vGekxx(Ig5$J&q!E-KuDNiacH9U^FEOXBVEhwcTJk8PsNeP_|9$2Eh7Dhk;Ex;avUh$Nfw?^@xX(GW1$OpHH~2G$y-jX8XVgA$ zG*`@GPjmu!acjD_MOPw#G?_>N_TU6s3*5(8Q9>$c!0lH@RWrsR;Z`E@4ES>r28?G7 zsnNn1bF3x|dvk1gEpb%j%Gw(ihC^52d5Lu*+IV%Wf(u2lf01+qreE-?JF^lA$t;uX z>J0DDNC1bXjlULWaf8D1SO}W>v>ZPYwAg7JCWxSzA5z8Pd9Mhe}U@j>D zn=>*4Bx&N+k3OKcbbNGrst}U*i?3E&=HX{p2V6z)6k4buwvQPIEC-Zpen^T4L6k0s zq8_K)evb;o@`69}jrNQ;C-;Hz$R%(ER0?Twx7a5dzlSAPnjFv$0qCU4b$z)}TH@f? zxFU9W0m-u!P9m5xJ_@YOM;HHT@eebMlCao@7&vjLvK38*DY*Yq2#(bLaZtL?Q7aKq z`H?j`C&i3YP#z|3n%;@KdaHfTw}BXm)JLC@W}+hc9nw&6u-F!dDoM!kGiNogbOrm^ zTg1>K8+F9Z^6#*QsT>fDAzs$=UmyN5Rr3%%E*~Gp$&Jq{Ia)G-x+DN#fPHv4AT zBdw^Ahw+OZmRk0kM>{}Qh`i~yL4InSmP#+U@JV$wXF#dlE@pY42b25p{jg5sr3-j~ z*H$RZgYW` zt~o7FK);f=i|4BX?=dQG+481Sl$Lxgk%~gAlZ;e4xES!86uLXX#u@s61qGyBO1yFvxX1QvvRff+7m<_h zf28h6LM(~TDv-*3>4n&1V2OSnzaS4m<8$PliLxc; za3Dk8qPlh#`Ku^=51GcnQ02iY;}MS@euydXTd!)Qx$SyJd|u^B&hfqTetSL$L}l-X zzl{fB?xQ}_U}5pU?E@_QcSLE8ryvRxIrHB)WH`mMH5u{AxW#}=1TT5Qgc$NS<#cF0 zk6EkPY<{3pv?UvXFAfMewxtN05lR@S{@zWJ4<$GH#kOVM#h}xw5qNRTT6KJP6Q02u zj!g(4ktVv}wy5kB9LNpo70n=UWgk@K-=Kb_K=K zwi2I0hvb5={9Gm|Az!1|3d*La!F+?|h3+b$IMQiznjLx3SGZiUx~Wn(n1(?|K_F!) zy1Aop%oVr1zZBM3C2H*(h1gR*mv~|Jmg?MyO(Ui(12$IL#dsxmZvRZTCKVgu%2)hX zR5L58K0Eosv23P8vE(H(ZEB^OJ+ODXY`wrV6#a=PYuqWa#|2GCQ*OukffTO>e?J}GVQ zzz4~KDRB+rfHgz`oFTami~Qighc@rigZcx5M`Q-fQynq;H>_4^>>(Of;Vy$CYN>de zxCQXZI^&ZJG~KWK7!c1w)%5hJk9w1u)G>%Ue-1ZSVxT81V|6sGL?fCYT9PuYj%utb zg3hY^9D*{uZK!_t`z|x|K<&wbLnQu%u#7iJhMc4uh<{Mq+6c)@fnPA%R(snIhJdPK zDp3;dFd9DPAQJoOV5LWYd;Tlj9)DXE{b%jH5-!6?>2ud$EOW0KzZCnd8*?E)R2LZk zMv{(vIC}>g^Fl;FHi_lERN7GbcT3m>S1aNB{dnY7JIl&Ih^@p5^4CifN`kJ-@rs@Y zy5~O=>X+7yN1s1@qLn5?e~e+EPAC5SUDA%Hr^OPce}p0#J~y0cP!1*`#R7`^7k7ID z7C9^v8kH}GdU6!fj3f4YS}gtxRPn{}?z(`1HeThqdwtSttQAv$+m9(0ds77bxA9l# z&{vTL1tD?f*HkzOiMH|ZJ{LHXA6C*xQmS_7E6o5X$*C)&$N z5(vF-xQ06Z1JMdPZ~{9>^OX3K%!v!c=z5SiVtK%C$654ZI19d>ppg)_X{{M!WDd)s zPKSAKCGAOihI_Y%uNu#l#qhvqqKK-HRdI-0(EoNVH(KqsN~vX`%eNvV5Wr8Nq+fD6 zgUm4Fi>5cwFg{&EAD6*Z%<{savUlqwvmN4?B_KV_Bb-#%P^EGRg}C5<1Q*Blm{;OB z?v=H+`evL%4;RtxIk<0&Rh^7X;ko=KwKXz>(*A?=ZjeT<7CxX)$BRKh`HLD} zUuOrw5Ht{okIDirt!=r4#~Cqs`swz`GWcmzVTyVjM2$wf)n$LGeLQx=1*Cu~sDd6U zGKW+wH6}ij?E%DP;_P8s-Ogpu#H^L3GIKghtJclIS%jR84^HriMVTLjs!|@))WzHs zByu@Bt7h$;=GCp&aHncC3IzL^*wkdwCVnXDR%r1WI$0#13?d%z0t!+i#EV-2b{2#> z(js*WY=`aUO(;M&<2P~OYW(=?5C6^|I9{Z4{To;cL18|S1$9~?HCPC&7v&%O;hpu; zyzI+-I3DtNc_h;BD{+8y;C=r2Px3$Ui}QzfF^=i;8LPz$TF&vq$vuo5ucmSJ%RYYC z46`RVR3p%i{b)Hqcq{$|z-)e%FuIk60~GHj|3tL5II}%%7;O4@Uq6{d>PpV}#M*GM z*W%xtLIJb%)=D;~(C3SAg117vV`AwdY3wO-@?}Rw76L4aMV~)ZQEe^(C-!TxVs!o! z?Fqj@Gg|F6oIm^;@F%eseJahImoT|dMetZ5oJ8Tyogz68dx6mCz@eY(U9FK%Q=;Ac zKxAeJ$N3GEua6IuB6O<;04;bQVFzrr4U=jyfD7)79rrfxZR)#nJU`=>_tDK*@n&nn z!$`hKsx$$crm#yqh~T%1@`8IFFXuKeHmX8*!LB#Uyj&_hrJ}Ehcp)l12xkSzNRbN0XFalA`0W#N<<*Fm6uD}fiKZB8?YSpFW zi|W=?8slp@SAWm$ZuxgHHNsXKhVry@jN5`CybSo`PLVMuBy~Z>#~}}R==k)R1_@M5 zIJq6TCu1y1C$gzx^z&DTpexGH_w)`0hDYNPpA+fQq@+t1n0`_A`oX29~&+~w>**_GJIa2`_VW310)5)S;2K}j>j zOHe7LpBT{R?nHZYnEy<|L{(TR5b05xRO-l|BP_uYp{2?@`>5F}O9)Ij_&uM#FL%xDWFWEhW7PGc}h+&o=72n7xo`|e5RHgaUbC;uL@8N%6FO@~pmY@MTqeuFJK)B-%9{3oL7Ci{Q zbPVqbzOzFXH^>fiO3?G6U?qC{5-ju9;Z8gf#O-J`_t9#%ivK_Vj+ZINahTFq-v0+a zekC1?t4?_3<+vqeet)UV@2}!PYQtD7gt({O+z#UC(LXdh3dhv*V&3QOWbfZ##(I>0 zJKiA%s6HofUIlP-`2zD6MZsn1QLWHGpKBj;gpKO!1;M`kOPkW7nk9RekuGA84!qh(wV4wRy^5jI-dMTTHcaNBT3}M5MHtFBd1PqD zyTz^^3L-cU^j9jPp03LWU&Eh{H;#x=W@SX=+x{sC!DXBFVvV>N-S000kd?&FunuIkEbDk`04nY^3>1zV2<#WIT?r;A5fO^m$@mPyHsj_RmbK%?rP)$ zeSL#y#Qo77!F%|iaL7@{qy#cQO`$Ac#A&hYRN=f_Y9}y0{Z!)N?4(_IDb|UpS?8B1 z?KQMoEhfV;aUJ3U@fG10*5e0OHZQ{?k@0+m_aa?oAA&wEBi1iHanzfUnF-lY* z$>Jq)+J52(+?2dIDL`+0C`Ma7CfC8UI3%PC_>yHTZWA$=y)NhdT=YEg&vXW4&s2-L zH+GFrqp7HSf-#AiZ$U z6Qsfrt}20uf*%xdG9HkSn3D>`#xhYeY?Ih`EK^(-r>nbAZ2CEa(J~Ky4Mm#9I#1!A zB_nh7N3~9v+|?){>Nn-!Bm;fq%k7<~g5h*y<_S|JNBXH)3Ag&G($z(M3^)aoOK_2s zGGQ9jKMv-}Dd{lq*S^oxakB1%+^w)lKFh0|Wx9Q|J$l{?!@wxd3J{@}mjME0_WiFWiC(!NYI2Rf}2PEj5MH6ek9n@HEAyMhhLM zqLKH4mrsvydO-(XU zcuetlA%P3t`4HQw(b&EmHgVwMS}_Lew4k3kJfJv4bxM&nPVI6_3PhT#xy~Pc9)6F* zyw#p9J+|Ly7gKd~yhN;05+Xq>y)2FW#xuwqWaC)za&{gMRoXa869}x1;`eUlnp(~ zh*`fV{uBQ4o>@&87vcD0SGVb!S{xB2dI{IYU*CHVc$38qSr0;h$P&PX!H}CI86P)| zeih&E5uf;ha*%_pw}2z|k+)7x>OyA+>mJV<&-`XI`n30hqiD)f(&h6P7RcJ zF4!EiFFSq%Yx8ZVmV@ZZof2_Zz^}LGj#HEq^=_#RMur|(3ZeP2Y*m=+&$Gs&0ybcE z*QHSgA5h8mv&KP>Vt1DQ1iJr0F z%L|NR=MZlfUh2NnW7=L@HB3jefs6f+e&KNySuHQCd3lmPgDV?{inlc`-BuRu(Th#X zLCOp(>b%dSnU#unL5;eJt*QWXq2@+Drg5oX`qwz)%k7G{*&Edc#R!`cf-}XS4vU-@ zTn}+k7S#=Z+cL(i;<*nPep)>;`V~qTXujrq>!C$a}rxQILrKT)v>3fL7IAj@UGz z807b?q{lVBnsKklW7CfdhGjb|mMu;j9d5i|k?xoV7o|4(fC<{v-yM$E@>c%`1TEuq zB%oV?VXRO?;+(6Bk>is>>eT7u-0IRkgss8fsRYqvu6ot{uy%UK1m?pXbZno3l;8I& zz(d)&MQAC`;A;Dl>7*wpLS8sWUQ`B#R7t{1YjNg{`%)a`w!qC~_~cF!*on7%ynQve zf+O}+yVu%lW+L9+8;3)?>VEWa0v@uLDO~YXA1vUa(>Uq#hh(o~*OV~=AIK6egiNsMC)!v1 zgkBs`xb6hsrz;A71R+`?Ni;S*$p&WEJql`bz5Vf8yj}kxcY6m4?iJS&rYT)WZ~`%-dypifmjsf?Vb=J)V%8KG zl;*6vvL|d5zeRMQoGwSWWSbKKUdktokKP08X`uH2mdW5*&qJPMU`WMLv(uoj!n8TS zg!89UPy9(1$o;BqIZey}M2gR9TRZa|6F3{S@~imy#nWK7O6 zcnWat)O|_8?zLPEF02!t8Wx#%7*iW967THp$24zUx9krz$~)fCJN0Hb0!E_;{wQVp zMOf7&lXoLvf6X6ZW*x~qTYBVb+juMJCY8PtKTOmH!VaJ_Duw+Qg*vn=i*E_pQSjpg z32B7(mmmJrnmpcCLrt-o*;Uaf4=S?~T> z{Bf*D

JN8}3myZ|c6(UU(t624O$-tHtQ^R2P(K^O)GN1o3KYfXMpv^f-@n4J;}d zulT3{UnN5duF7+q-T--eIX=iv$EEL=P}7>Pt?}BiT*YQiOMVfN0jk0CMsCt-Kq|6Q zC3?;Cu~@K@a43t|DuSQ%U!6L!ODPuJ`<5bK!fl=UZd^nN!KbD^1D^qAb`*v=5|3F4 zBNh}iW(TR=(C8|~_p<03QN6Y|2*vg__|_jVP7Cpsx*-D%;G7|xxdci zYy!mdhK|N0=340-c>7ekjHUbSWW$u8EZ0$iCntG&%Iq*rX~IbNO&}P7X=>O+&g?0C z`}c9Uy~eJ@VyI>*X2v3H5FeiQIy4h{bU?z8-VD1*XfS?7n{2k5CbPs|5gRm|S@`m% z#?bV%1nu}mw+7QZ>(JGVac#Q5;sFESZh2+obOlSI#4Q$}nK4TN!fmi-744}|XvWjW zDrTgiZR}j5H!6^40afE8ey34a-Vqf}ghh-06rCY`yrW(9Kd7y`z`^%qm>)CL-W0+p zMAjSh*tJE0st@IIyLX9<)7_$^ub~LMDGa+&7)W~tcF_K#umlm!(hTH_ym$F|rAgh4 z&`AvfcT195?BoxH9zp_iniiPs0K73hC0UVjh4a<$PlssY$^TVvkl4gK?YX#=lrhu{ z%Nv;C@h25ql{-ftYg8&Q$sjn4IdtO==8=XZ`V-$*?Z6L%im2A2-n+xil0)(!^U@|* zr^+^7L9(n@m6T+}F{n#RU80VFSmTX~=vNR8wIJhU2+LqQ$C@{3F}k+8N(t_ctPD-Hrw$61?g>O-l1(WW0w@!nEpp)#!ajA=( zgR4T2dS&vi$rXeFS(tsN!A)9iIT9;jyvAw6Z@4lAs1> zktaI^mV#@+bI?O^G<4MCUH>&6D|pDKW9z-Ra5OI+Y&P&$YeO;FG4i8x)4gH75G#KE z)n)wkmQ3h4Q5@npmu}bQ*+*{pJEkV+IUv&+FJkuVW^e<}d)l~4tr(Z3NLarDu|b}4 zT8xA}Byy3zD|{J2G%eAfUfEV;EA>02slioRn>)&Y?!ln27$e-w`)qPzLEIky&kJwo z^~e6x^s~uEg`d4N3ap7h#C8ew6$}Tm_r=(q5FKS6^lr~U`j2QrzhH+TNy%^EV#4VT z;21s+*%d0r4L@W8`y{9^0d?_Lsd+iH5!}sX@Vb4;;M3;2Xw7RO^u6mnC#-bxNhj&Ub|p^2g( zb-ey76_n{fRNS^^tb# zBk|&1+&(>8|BpU^IA1(VXdQG1Syxp1=;-k#)`RUd06k)by|CjT*=L8mfKv;qx#Cv& zp3xEvK+ItgIx_1@SD=fKww%OGK9|8%n7!DF!d$bw@IsydAx!^UQO=5J z5?dX<`5#Tz^M@wO&2}~_hrR)(hMkP^A{%xVGWPQ=W7%+E9R)?J4Y{+?a}C=?5SQMc zu`^;Yk}U3HEAIgaL$U{*TNEjE=F;@}o|J@xj{JfMhJNFv<3c8+JRjXbg%xhG7)j$N z54Sn79}3P))HlnY=mqR7qg>?P8bv~G;~+~5h^B~_Nwvgfd9D57C*$8B@|Gm~ zw5Nk+!0&5+b`l%(<4wneX_beJ55!xE&mR+j%v3+X38Xox>3HzP+EIFy8kZVx z-s|axVy+#4KX<<+0cd^lLzB7KiCpElQR;JCYduLLqy^9AIx2~-RrpmE-&iemnke0= zh@cD}kr`rqUw%@JAa64xnGpB!iThLu4V4{4f!s_B15*M?j}h{^MOv+ZCZSqAN1e*K zS=qE@wn1PA0?Gu9{dJEtcdLH#wuX-BV?tETpz{cfUbh8f<1P}_vSrt|elG)3r zAcMzG3l;~#i0JRVG5l@3T}1#nuF-KBF`=>;u3_iTAONwtc6vW{Bijyo`N%eCOX{J5 zm)eb3KYm#;U}Vsb%cLAVWyylVAQe2lrk<2kJJoF~QQ5_GPyN;x3#Of7J;C85+B0@V zqsNN(B(_=#2DTSqH*Z9?es!iOsJoNC9vzX-Og;5S1w(#-J6U^JVq;@w;4}H5)cua; z$KGgfNeJM*!0k2HD?oHL$d1S_VN9sHog$!n zw{eifPY9=%qVP9qVT%Hd=e&)gvgj)_Ud(G$m|!f_M&1fRpmk$2n&dG`@S?EAd?Vx- z5EXoj{Yh!NgP#lQIrMHye8r2$;~CsR$VL#h}Ku8+ftT z;wRcefUyZ({GdG%e?ZCO#m7%WEvBDOUP(>Xdt@LgAPy)5~HEWvkWCvK{A?;sRyN>N?#kUpGin%>_@{k?gJ`+Z@$mZ zQA3WJ1fC<#x(x7k2#1zX3Jjl#IG9>?su!tTU;0RRw&$f!mIQ)=J>EDP52IvO+7lk0 zfz^mN%i^f;)H6`{9!8Db*y%W){ZFxPl9Z2SoK1C1TGc0csax_r{G&pjsYCkQg$j-y zAETx;Lnyd%LSMaLCWfy0J@-_E=`xAe+Q+RsFuiY zcj24&q|0gNRn&fiNU=BA!*7Qc|J?~vd6w}P&$M^XB;gfER%HW7uzFDo{K4}ukmfJzR6{x(}Xbik~O1r%xKuNt4V5?q?3IveTy}rd_6&M!w z@;x}xytFeU3VlhHObT4u0Fv!r@I9~O6|N8wbP;H9n4XCrKi#LUuP3=<$1$1DMV7S=#o+v{TUWcmvx z0F~GEB>E({aCD4zNRn#$>)+?YKUE*jg_o^W0FzPv~<2f*FiQ1~s z)`da>S-8ol-2HjkJSawj3)Lg$MyrpctOLJs(XP|*x-Ha$;eQID_`@(Qu`|9Oe1XcJ zqOk+7@7@2;YsFWRF zT$pnSyH` zeefYob$=B__%NsGX;k9c&zQw57?FS11r0PL!>2# z*lVzAkE$gh*C-`0A!X>omU7pEatbs2_4)UL@Mw%;TQl&8tIEO>jI9W2!s8ICQeN>W zR68%oA1RAe(PnrnH1{#l<3Elh* z4Dj&({H_H67k@9-tKGcedvaYtKSWuOO1LvnTm|6ACuYTNBY<=wh3Q9Z3G8@uQZN@6 zms}e~WMdCKXnZ$bHt3*Q&R?e4AfW}n@jYSY}ZT~&_LY|0?CM8I*=I`XL zBZdo$u$wW3S*R5u;$7qzvoa96yXp8S$A9%|QRP#$bde)=R1_&b*2JH|BvOb7b9$QI z_+-=)QSb!{-z^$6;J%0P;+hRTUp^HKgdyuh^ueNqQ(R|YwVyEeE#`rh!<3&KYFs(EeF6qL3v1Zimi%_}wr3k5PW-54~=!lI@*iNgiYoSRI1#H>Elzq0@#(z6{o!=Ns*5b1+9HuzHAS zywBbX5&9cJzUo%kif|;X5U@~&q{s+BO^^Jzj6kpiq~IhSQdOV7n3aj@-1L3(5gk7l zHh(N0`bg6HjOMWaL0(DW`SBtBs*QGWGrr5zAYyohCE>^#3%!i3Uk40w9+HvuSIWjW z4H*pSp(zV>z3^s7-vzh6$8ihMiIRZ5o#upkdO$DSUJwi2krc)mc{5i zgbmRv-Hdo7vZ-i2l{ME|3Z!`%$%IQVjJ~ttIp)lq6()r?JuEVuACsX5DN}U7ZKf66 zZ#43eaMU&{T-HhN^sU&Qjvo=hV3My}1 zX=^vFJv!i{8;0YOoZlRBr}A;VX_`@DJeXfj2xZw$5Y6UT?f&~#8{Lneo^Q`!CdV4o znq$M8d9xi&ORiAkB7Dub#YlJm=;&sTpro;5geAJT#LQ`#t13ZCLz01j zGqVpzZZL6>X!IxU`5`Y_9m__prI-Q=2pA?>{F}e~rv4Z2GyWMIB6zPzZEN_HxGTpJ zTcnJ>xWk|s2;LUUSy+aj2B~C>e9@AYGb^H1uw1A3x==26cY#_u$fyJ&qqmSHqUc@n z20wdte(wMwHZ74=I0Sfq6+}pgZ-v$z1Im8R95>Dq;>2~$$D}* zv(iHOfcv>Tq3Q(hMQ62Bs6aY_T+B!YfH!AIX?6q*g+oylxS%*Wq1XyCKu z4T(Qz#e-#o(;6=zYH|LpSTrynOf>>|{>4p>74N{mxHlf(fniPTONz<%L{pHIJZw3P zMNq5o64o|jUgAV?D6xKmjF=Zv-ti+VA{dfF`^MXC{ZcudV}&?X&{8Tj6q->yMcf;Y zK^#nqizDCx`a1rq3=M~1qHdr~d$-RJnB+OTggnh;&^2N*HndcGJmkDkp{CUim9u=# zCI@Q+Slcm9c~@?<&8;{*8MeRl_lJ929;h@nh1Wh_jf$+b8Nj)e1NP2vm}_zhPZKTo zIJvt00=eRPthw(;w+!HZKe0E9jHAASu=9FRWAxA|grvZuYvuG@3IFcr&T$1!O&bDi z*~-gD@#rtkiOT<PM6UeV4YJ5 za&5IY;-C8lAZ#p|{EDI#{$wXh>_~Fw%6Wr z&+30+s%ED8+qR;#L|Kw0ijpkHasq-N35y`v04T*R+M0v=5dj<+2Zt9aN=t~o60#SneR0XqsP z>MAWX3``7S?l@-mkjs6S@#|=1!NwB6c~P3s4}5Pc8kL#G6ne=edlygDO`rr6 z7C&8gLHD2nEQ`w^vRjM}wE{vTiw|-V$MKo=daShWYy43>|BL_?KlW92UPd?Xl6VN! z0Zy0kXvdOYe!;7&+*D^5TaAN~kz%1;cLF9ijwQH$+ZWr~jq4ux_7TTtkD3EwUift6`BtU&}Q^P&xKG~#vF5N+-~Bwr2vl^LVlQV?wjp5e{LBP<7iwm!$WU| zqEAmBeyY9g&#MsSq&VgsJQByb6dFR7JI)r8y;UkcTk1FLyE-$*BXzRpv%zf0GQ|IYymn1&w8*)m0 z5fqU^=U8diLMn+NYl``)mVjlyq|+lWeZ#p{q{t9h{=}MVwDqp?ds-2{UnKf!q*ba* z38Y00<$_dDap3QufeLnk+B$3A;_Usqtg#P zZWYCYuFNAJ3&$;sF2yG;F&>%)1Ito}zT$GkY!f^@J-bq`$=US@rTrzHVegDq;*%zqa%H~XT&vE5PRb(;A0N^|~ zl*Dq0vq0serQy@9w9^MRVt*6&hVotF`o3aoTRfWV(@76>FX?@pH)_`7%#58-_#!S@JOy~)xVbJR`!KcyY^Srx zJe@w;MBKRs$yuCxEltQJc@mKI;|*#QnemYjRm;MPNWYz1CTdxHK@~g&c-ZZ&R&v0? zX_Ievi_}f(VbykKxOMsvf=oP7efknFQm&aEgg#PouZL#Yb;rRv_&wRbqfEpstkh1{ z|KauaA3_F|=$d09^F??I8E!5Sp}48Krht9SOaj-Oc9oCw5f20>5On4r1;(VshxlAC z3EQLuPo6kNoql4q?;VC@e4kXwdMzaL+wPOn7B z3uk4`-b}FBp25~CSlvdH;7XItgH~`w4D5J{BVbM$RNAgwZoPv~| z5v_sWtTf8L#-Td9PM6(_L*2X87w-n&R2K^P1KPuJDdxe&JS?7T4?NgQGD!A9DD|kk zLU2|Yq~b2@pjhgKhi7$|ic({WqQ8H4 z6T)JWc*6jrj78|uLVP}x7%p|}%nu)KYUSaU@Zp+KMd>lPFSO5JlXVHh>o@)GvTy)FjB#g18aCn41xdbmmw>})?V5k^UR5w_O=9C z7lrhdlcYwB*a^9uv8&N#@fUs#4}m4`u_F;X9_Nrv0l|y~ns6fg=d(^yb7ikEJq!|R z|BJ&*!WqxrhehZ?+B^~7(yi=`eBI7KEP?Xgg(*((!A9*3G8Neysq}&=87vlp=ZbxS zbmY0>5tT|OIRjj&p~5RaWJv$GH`RK*>W`R7wJ3Q_&ga|bW1SY(D#2e@5R|Q5BQ$V3 zGQ}lDG;WT>;6u)=6KNMm|5k*3oM;*y{8crbU;|v^SXR7jNXq4EbP%F`_pKE^NFZ;Z zs*B?kvO&~KO(W|Qn&va@d9TA+Qa=3+T8ug$ARO(*SXPc9>+VHIDj!rom#ZLNh|eA| zG~a5lEQ22xMq9|xsKOX(u`vKWh0x`2(3C_)6lnm-V9|>r&GzTpr=EK*wuYS9f``{H zjGQh9So?{h%}P`MxBdH!+a52wc&lBBjbVhxY+2~rrR%V7tR|o!ps6`8r5I13MZL9P zdQXq=j;R_QJLhfM+$R*qsfyRXc%daKDejV3J98SLU&pKXsjRnr95IFc9xDF$W_Bcf z+}(@snP5aZhCN5o4ECV#yzvt;BcwFT*0jf8fAp&|W}IDBxp_|xfOl$1pi0`9)TjT= zo0J6xA!w%$xp(XtzX9EVq{mL^?zggtsza`>rml_K5W-+J@RU1qi8wqeGo}~IS=q9u z$ek2uC-*sAZL3~ymWWLx=k4`gAjM0IzSza{?R>culcXTaQQ25*4n#xi3&5>0aUvrw z;08X0R|9sl-B`56Mr6Vx%xptnmvfyPbFjuh`~lYQ+jp(WpGunU2r5KlJqE z$Eec5YVT4d?}*b;$#v}27PYnY!kYtnxiWfs-)7_@9(-;1%CY4WmOfRY)s!>-cP})m zrWy}+yrXehYoiU+VV&)%9Vu>d6#{PH_P!b_SO!Sq^adI{Aw+GY0=w_R$X(6=aO|P`sIqaxlp+qKrdWQm4&Qla5>%p4_GURS_wsW*WaDOl?L z!a~P7>Bmue2Q;?%Ev93=K;jsxUdcoJE!Cf+ z2uP9GT@ZcfzsAbqY-JI+(#2rhe%t`J{V+ZY(d(sH%bxX&A68iS(xrJp=}nUobXvXY z&_fH6Ox5Gdu|_@a&q%Q29<`}B8jg%)uNR5=3RuH6@sA_ydTA@R!kX`uRDjO4I@hR` zRSb#N%_G>wd5;*NlruDhWAbT`P>bi#W0PAXvIECvOHGUOQtpzxIzo>k+p2?zLqwdv zzpX1xjW=DEyvHgd>sjOpYu9rVt*0a~H_+mc27G}hG;1=gv6wxlkX|=MYtB~K0m7GR3fB_uDL|*`b5z-v+Gy#Q5{h!SEC;v4 zj}AP5basn>DApJ0pi=uK<^-oLS>Ov?#}YENggO_yG&b+?x1s)@em_R8YPoO54}co8 zRG`rOUW&Pk)S4gCiSl+_ff!dbHR?wgI{aZvDd3vMJXbP4+-QMii6h2nG)a_C z5mPz0!oMrQaUBkwt@QX%jpLoGM#z<|j^RK4emnj9SR7@?aN1K-_C5>SMH?!2C{2Dh zBRkb*r5XnnuFYf6ynCY!8<_3;;!_mFf}@sPBXs=v*c3sMq<&(I%C_ZzNq)@o?Q+iw zze7Y#LVhb*l6i2>(HdL6Klc)2xEDKlu6^)yyyo=b+wCb2K@@{RzU(pCw5No*2i-KDK!^{}V=2*2uo>9BCNUSbzeWldV>oCcj*m-b8Qf+(75>fSo^_#Iehg>z?~o+C8_O068u*ev3&e`A zz2%28Kg%Efs-1ou9}U)A z=AHx=rN1WRYg%rpp=cBE1&7Wn($h>k@$7YJeE$&>L$i7RyrvqGw1jkeP!v~TU}_OI zFr+_O?*bQNg z&1Y~<5lXKi`+~?H#6tAsX3l!Vt-;A-z>L5t0)B{P;I_hexh@L$@X_DJ!N#J0;>6-n zzv`FvV6mnbxrDMG^1DSH_5vA>uD^Cko{397$4Y2#W~$@#;W9NlF$Cydy4f}d`+R%a ze4&Gk^rj>Fwd3Tg5%h-b1)XlpcNNx=R|MfLHm28%%{bN!Y|2K z;>pr_OBIQ;csy9)tM-a$FioYT?E8M^a=FYWhk2v@vB$C|vXzs{EYH!t9iT&eGm&mb zStUlO%E34od1nCMrDs1`Lb4$I-7BTMSXGtezZ5XXF&8LZAptdlOdWv7OSBbBq&OA` zE#{89`&G+ScPn^qYzf9AKKOJy_kO%?xIZyI#6aWW#Np7e3y7v>D@1#9L!~`akvLoq zEJdy^)EKYsP_mL5fl2Wok5*~(Z5-}6pQ@SosgJfgG!Ia}@Qrxt0{?0HulR+cE@v7hOqEToSx8zOe-BZ6%gK30R2kv23KWvzX$!_GB6S3GIV;g@-|_WK?%% zA>M9p`)|iG6^{K0eo&P)+!xzk1o2o45jkJ7t#DaN;bLwuYZp2&ziD2i!Syd{lBD9m zc;Yz~oqBaSAozBW`8u)_)4kZ1*S!)vayLhW4RMZ=)F#Ty;~N?mbkXz@EknV=7ffP? zu3Msf%q&v7V?IOs3*0?JF8a-fJx0>jDT$>QDW_YW$G2|y^ov+ zHl@$+F4b$Q{#twv1>klX@G1VTd{sOF11@;V0PgSh=BINS~@b= zI@{%K%k15oUQunVq zgd(DCJOg9_g_S?j_g=~;Ru1R+(NE(CoQ}|v2SqS>4B{>eF5gMXq9kqD# zL0h>MnY4%hwxs62^r_6V)el%Y1zQj87Q6~<8zoH^B^ZKW=d~hdMKqheVm;M|iEs_^ z2v49lDPZIL7%B_p;pQPg5PYRB&@;*5c#>PKb?8ggD;Lnn1n!ZOL;FKzgU3E38y*FA3% z0;)kKY^tZRcV0NWAl&SF;`%ZcrJ>?Twt~V6e~b7>_!$x$gXYC0+Pm5Fr`QWGmd>SA zVoX}vo!9yU7?Pe`3xU%9?~JP_!BBe+)>5KW0!0Y5t>NO0TWvLCAwZ^Ysx+e9)HD1zKufjrCROb=p+2FIU^m@Q#p~f0WE*^Hxqo zdhV}6x`=16{S~gA+*h+7k@(^RzyJH~_kZ8(PiIXu*%VrA%!}pC8pu2Cjz5Y*dRM`9 zy?0D`xtC)jr05CFW6b<0A=xZ&jFqR@>`c$?2~lq$#tBM19|%JkK4+%*fMGB;#aUi8-A)7 zQ<(MyHb4F8_6PArg`+p(2To*-^v^_C9y^ zAC(EjH$>s^5LH4)H}bpjSIQytDji)vq-y%=OR7Teun_OmSFJO24Jut~m%X=W0s!Ec z=jp>2+Kcgn{EKW(0INm{CxK)kqqL6jqA#qwE?qt?B2ri&z~xX;Aw8S_C&nQN;7K89 z&{!1N+fHa(*ZUM_g!@9lB0YA{VV)Q=f2+Oe(d=+}O8ntwy&L@|eCpy^0-BXeM5mKR zbnF)#aO94YpCAvyb#udILsgAl#%BfmiJii=NRm=7Kz#L;SXtDHfdXIx(2W=@_-Q98Qtn_0!wEP)G&+&S*dvWi?6n# zDDPo_veFhvZ0Io-&!-)RAf}kz2EjzCCE6s7rrBol;pW~*ZZCsBgI7GKg%boH(^?h{ zil%wwGI>+0ou|*W&-s|h+1Bi1_&B3^55Zcy?C7oYsC7$#Fn7QFp4!CKYVN#c(lWK) zu3wHd#IpWU=Jm&Z1WYZQWgwyW26~jfqOnL)6O+Rd#t`Zs7cgFtQfbqK8(uX1&9<<* z9_zcJz4BuhDMHQVrlZ7mI{k!7XzFR{n8NZCXAy5!mOB@)b+7ilie|}ZG5&ZbA+xKY zlyxOuE2t_ukLnxzDZ5Xl$}JAmThhgod+CNK)sYy%!p*k6v>NYIu$spOoi{06;~`ZE zb%?Eis*T`~!t#=+J|?lFGe=Km*PA8IiDWTBgU4Sx{U-nSq3(VEi62eeq~g>ND^d+* z2bW1|rZT3T45>zDSUnJ&!+yVch=lOsS*H2v7?p0!xo28w9|mAxhjBqk>e@B|GL|x3 z4_rX3@UFbLyO=7~D=MHfjbeGll_@Da*z*F;r{28@9j$Z(PznB}iY)N^rF>#RFRubj|Ae4qI<}{-((!rxBQo5vc!F$6 zE&mp~|3|S=JrUz0s?HQefp8vS6wD>EpY+x7&hG>Rdk*tWp z;=NYp=3%FOuwU_a%3jaEU~2~30?yINJS3$+#N*=9Md@d^`J)7Td}PIiK}0{K)H8_^ z=%A~mK)f4@i(d8n>8M9?>bOW^kF7;QtX;y_1Va|3dRY%0b0vv~fiJ+Y(?LZ~iN0IW zfdepiw|8F`9w4@r7j&S+u*Pg|A)w-ra^KdVqCE0T+nw)Id^j#gCQBJxKV#kGWq2Xs z+z+mU_1xLDK%mgMe#@6zhSs1Kbb6vl-^nH`W@$>P!T&~UR(!V0rEp{Ch08S{mzP`{qh|k{>7mN+9rSE-9iEB^0`W!xKrKZmk9NeRGt92yq*BXq<^c5B)N&I7cM z{Lavx9ZbvLd25+|p*L=&Pjt1NoMeK5;^gCUX#q!+bRKwIPJ}%6<#s(51@3MwYR7d1 z;_-6QjbL2hoPOIJjQ7w7I_QqAE>^BDnTqjyUS0X9xQ7x| zr>xNhLtPw1@PdAWPbo1Jv%_%irw!-oh#vWkivY=VICItthXj#uXFMr!LbYmdE@JaH zp@hdC6Wj^3Df37ogcnWasJ|*xCsI2yMF>#U4X}74Vit~qp5>Fcx(z*jzb}b@c}Opn z@ox7`;)R?)NzB{}0o781@MQ+Zfj@zsZiE!EjY`$mi&^pJ#RBK9s~FibBrOXd3&qkN zr&q!)u4I^EJpC$$+H;D!A>>M3PU3L{SViO0Nr7{-RludK@j$tZR}zh5-^9c797s2@ zlBuLFQp4jvf@VR6&onp0T49L!;TK{Pat-s)j!m z%cs8EmDf(g8zs3y7{;a98s04|Lg4wq?&t4RXMhhkte&bhz%86@|68a39REYy&bwx{s-Tn_EW(9vZ;-u){8 zit*7ALyN<#%J?+;rw^ZMPlY&0>-bfqo#o6-{HUQzJdcVma2sSkN#dWTgT(-o%peQV zuPJgOx6&VsK%T9h6sx#ig<`M9TWPSj?qTka?or`2F6Zto`Oq#aKAzzm#XhKv zlJn#_DYMWNbj8jQ86XN`2J zH^Lx>tr2b*BDj3Ly%8I?L=F}o#NWN%RxO@NBCB@L5_iPffwtAScoA;eisvN;W&AYh z)YSM1yoMs*cysTpSJOPBEv!=M^+&8!T`S(q!j?qhj?z=)d<&uce0wn-F9&_D*b9*( z$HipA4i0I#M?V!{@z1d~;NYK)#Spi>)Z^&5l&|-j>ec>AOB-LoH?HlHadUC_t7<)c zIM6slJXTs!QH)N6T6?W1i|cWwh{T5Q!NRPVkNU0d6vZOT_wvQ`=U;9YVntL8NFpPK zdQsPt#4u2m-gR{s)0q96vsOmMdVdM__BxG7;{i(*Lr0AZo`{i;@6e;4j7TAf4Yn9n zDT$7utEEyEXS$1Tn&kwRYAv)qlU<+*J<^o~ej;F{3d?QP_ZiLk`y# z=N6Q(V%J7NBfA(cR0?L`E9F}JCD$bQ?KhyX#9IhR*qYoK@vU**k4s8mrwfn|Jm>3x z47)*|u2D9|X_4t?F-!6hC0hH?c&r=oi;AP7rUsjLVw>oG>^7?1VV&zg|zZ zE?Y{EKW+;-{rgxZR-N1qx#vTdx#QZ8)C{$8^eEjdY$YN*rYX!S-uC#f+N1CLVZHm? zc&DW2@B=$lrz{Q!g_aVuMfc=~y0*}nw8QkD#02xXQSS8O!WkC%`oHp@_OJZ?b)RMt zraA@{$Lhvv`;z~xTS4HYZI;fzC1IUcKH7+4N@Zr+*#aC-P<_KIFg%c$67Ua;RR5S-i62NKe07;65#~X0&2$G{j zHWR`PPI-rc^O6&)f$0}|Rpiz4=ZHS_;hHdd|;RECCQtgPhP1coKl$ewb zGC@YMILeq8ayG?kX?P(!=@u5K`Y}%od18)&t4lMdD;$vOFKlutHg>Uq=r^_8;AOc| z*V^*+r5HQTZ>1M@Mq&}Cet(bEg}4PY$q0E?7BaOa8&!&Bh0$RV=8!;qFm<+$SD!Tn zvzV+Gl&%ltxOgWw*8;eaDT6z{?r?XZ2&Q_LaE8R_MC+t8iyb}KXkPvBY}LFQYSS(%{tDC<9Fr@XXh2! zce6IY0ILAPvm@=!8L^%oAmbDs|=m@};>8Zy*#nnQ{~9 z$4)U8TNQQV5*%b~y~oM12@3A80(Va{u2W5Sf0dvzOwwVc`UEB-G$G;u$;c2OCI8guF9(JVfDWt6u)?pASiwt9=#O=MqOY!D)GosMH z2v#;)QS%pth3%FUb9blZ)l`H}ksqiu7{_N6P9GDH=U6BAOp~VG{nmFg)cF~j^ z-E!x6aOR75|M=k}ns9wLo(;ae+pc(GJ(ub`^R~C6 zoRaIPm1@FvC!a`ZixslB{Zw$F()!lIp!)A#Ib>x>W^EtNd}g6eA9B%aEH>d3qm5}@ zZ?~>p^UiaxDVTd9;?ewWXdaYeM!PW<%#)`xKMNX9*FAL$*J9HzEwwkjJ%-EuyMF71 zY;T%8e9NDcsEr3omu^kJ6ie~DD6!H?hQPqZRLdN%hRjLkbZ4YU!qU5+@UL=JDT! z_&?Vm;b^$9%v*4FaXF9m@9-;>?m!KAj4N`){9})p;?;1~icksytn3lIC3Pl{DC4^mu3q=+0eQ9xINEP>A1D*XDhk~l!{h8n|9<-* z1`7hPeUQHUBFqY+qjdElzn^a}#dB9^jtm@7oJpp-5qA2W_KX#GXH*ZKsBzD^_MUgE zQ;N%0cJ4BotMoniQo9+47Qe@5eYuZT+^kt9+JC3GX;Mz+NXGt z@njyPAJofrc1p|)(lbEuo-kewcY~-uL8kdSuQJlt{$z)!s0H7!Dn5P4J;SBR(wnH< zSk~L^GqG12lL;51#}u%Pc{(~d*$5>@v^aU_eWIXP!mCu*7rh6Onm?B*TKh9C?OF<8RFaR3CkwShG%(P zd&&?*)$Slm{8Ss-jY!7+@j{!sTTGosvWmK>4*UIs{brCsATqow(gX#t-=KkLY9?K% z9xq>4U051Vim7Y4kz&(@SSeKQsN_l1HdgUHI<;iGhSIIRgCT@~r`y~prF>VMuzs&Y zrP*{4|EUHg1W)^iWjfVgjM?!Vv-M5BR;`t{PO!B_!HeRr0<%O zE}|d;uErM#VQ^X9NKnR0wrkj}Tp}f3M0oqAm-Q-ROOvpGP0T<}JxTKtITcF+{`&R| zpdW(HIR1;PZ7KeZd?jdc53i9{7K!_)V)CfOb4Q}Ks2x(Acxo79P95_pVH$olN5U$s zU!DXl-@^5PS9?q!Jn6o9y|TYZry7qP94T%gbLAIaq&UhicvB2mYDWEc0nD+@G{SuU zc6=B}5UrpRuUG|KtNT7WcelCITIi0ej!;gR(@G2Nry(8>eao%~Nrn5?6e8WKiBhJS zZ|at4%lVGr2FL&F#9)=~dIyS`l^KVG2qQP02)Zhf!`k#f2LF64#&t5nK1=vtgzYhQ zHDAqO2{`9N0>W4+)dT{smI#SPBft4`FV*0^&$T!pQ$%*xF4Yza4L%p!XCU-Q{EK@i z+Zdp>e3T5;iMRxo`YU|DB=ZQwPPh&;4x9-Az6cc1#M6cKGrg07dPT{|v|?HBoGlBU ztG6!?=*zS!Q2eH3-Nz8fkKy)VX0RQxtU^}49@Bfi-G2M+KeFr#^qr(|f}2b-56HFX z@vWXA%+wd^;1{CreH=nd8PJ9JH<^Gnk0zSMRd*jdf6oJ;MI18Vee5wPY6+Aq-cA-0 zM8~@zfx6%yxo@l9=Wu1Xwk!UQWv#3@L49cE5D-zIF~1&AtHvvBh*!Te{#>F z2)IcsR&y)2CZx;Q7RaZTUcz1ejIL; zQo{a)in7@4SS$`X*39dBnbe~@$BbAF-`cI)ZE!$=%3^eE#%|HGkbNKYYfrrtLcI#k zdjMN==f?pP2=-0PQ z^YlU27;c*?K03BYw}ohZJR%Gisw+`F(LB<50tk}PwE%bh0CmwBL0xsk=I1ea+wCJ! zpgk#MX;wNC9j{Qr&+J{Q*m~2d@dQLJaoONmpKt3a^q=(7Zb^ULWG_$3rLfT}$}4tR zynRl%Rp8D(oco~+r>I@x7@G-F87FO*g$|imWXD!R7(ex?_NfrH4KStZN5K__Ntn;= zd{EDN^8tpK2w+;OtfpW^=;YD^mAft!`Sum_mVqLT9(d>J)2N$lV@$Xo2{_|!5?Z&h2mXW;Bbgy^`O6v ziqi%WkC7`5@Dat`qrvLd2ovm4a{kxa&S_tu7I{P(7 zB9G9%JLNoCq_K`}ie*yj#w?{NRyH*t{Q?Z+EM3pv>Ep;UCG z?o-7^O2ub)f86M=yzc{XLK`F0`&@5R`s^Ws9BNZ%Ye%_8=W7 z@{6ZGhylxGJuV@}l;VS8jUN9kw^xKzh*V2N>$Sic#mURNDAf8{F>Sf&5A)G|gcLby zx)L~&f}R!ng8|Z8PGFa@sBv@|G=yFle~|rlq~ze!5y1@%rcB zvRPzLdIHry)69=R%Rl4o!i@ClkGA%P1i#ltiD#T?uN|R#t(nB3+b`(e#xbxNU#WP2 zS{&>aX2RVuE_dJeqQ#UgI0MIv0XtMJ1>cU7e&brZ6hi@uX3O(1tNu7)@@GY)gH{rs zm9WmaJclzHcox?gY^sm6#O7c>uY3`kZZX!I7iVN)NSu+P&E^tfAo+}-Ds=Q=9xJ?azn&pgB!xfhd+Q(dc z;+{G26*S6WA6vU}2gNV^ZIJhZ!=J5rEaKzuaCwFGJR`*q60avdAT5_&F=&|jLhLdb z*Id`f-%?QX9Sh|3L3H9Ff|u6~h9J(^N6xA;EM6;BQjAFPr-zRO9%Yhbm533<$Ljzw ztrsVPs`!@x@$fo%@?EQrzd8NucFKQQ3Q*OvgO~`Usfix|6PzR3<=IE@gq;0$9v~15 zG7!3#q27?U&N=0JE6-94Q>aA4bzJ^+$B*TlYL8=q1jm=_26ZifxO8aU81yBWHKe}0 zt!p@I@pU1t8DVy4@NQX?yOiuwtt$UHzOJli^`=>P-WiWW{3=y?qzwAj$ZMqDszcv^ z^cL4zIaP}UBKv8SSx!X}MPZD%l!dz@6F&N&UYkL+LuLOc^*B}^It$k=&{(#}Di?=F zK10q2&kJh`KTQ1_>hg2v+s!Ey!8Zl@f46o`jsD&N8pt-fc#g!;ztU%2) zk$8rAFgDu3k#Ynpk)(kHi_L*n1H)+0!u6FvXcE#AO0q?EyP1eQWOIn6 zLPohf(6GQNJRlnQzy_H2-EhLO5nf{Cl~Fsdt;{k64G(JjeqI;obJMdkg$PIt=R3Ou zXkGMY7XWN4>z8deI;2F10Je}{966-K!;1ss$MsH@$v{FLIp(z)iJBNCF2{28fFd3* zNvQYC+-m#zY9U02<#Y{&2%qqfG##kPWam= z$C-HDsYQMp#}h_qG*GU8F*>>;gTGqU3ry~XpMEudW;x>w3`ewpnca6xCJY=9yzTTP zIt7kyqaN%5-3ih0p*?lW9*w;HcQ`;*yW)D#88RzP861Q(50_+w;D&0fhML0ybYbYR z0P;u;W})kuDF#&U%JU~y$#$CeaSz?Rv*hEktdrsZ64sZCWRF_9mO5UU41E4=K=N45 ziP9g16?xWvC=@&u0h310{rkH zDgU_`af+tb9wqEcWt`WeD+H8+e}ld01t)Q0I|bMI6%l~#3@I0L*UC{$iS1&n_ShdK z-z^PLSCri>9v2GXPDaL${|f%Uk2@LC|8E6xPDyh!<)9kgTex z&h7!O1w&`9bi4pu;PDUR2eJ`Hd~dc}C6O|Hz_N}KUkT!sw?<6@n_>j-tl2uCZ81b)^q!7L5wX>YS^(Bt3pC$iBsT?~ zZKYU)z)#9wWr$#MhDkYn_!6`VkL3n20QDtRO`|@$kCm40t{$1IAuJ#Xb@I{2rFe)! z3h~gC@y|e(WT@{Y>$iq%7PY)a*H8(2NUkvMuf}KXua%WZ~v*$F(Qq)>|=#?=FFv3^j-_Mi4@=~%GWNvZU z$9b}&SAOvU)&Fa0=qmM+(2h})K|xc zMNnrw3+w2gIHDr04qlfZg;IUxv+)5DUWTid;{zBmeJg$xx&*T9!}=z$f__;@Zw!WN zZw?g@M9Tclp1$xTS5ev$18$3QcmXWRhv`>^U9~Rj3|0{Va4GbtoAiaGZhW<5>eC#u{O3 z8|TA}c3HFv)uY-08RE;mEHx{Vs5_}=X+jsrUhxz{Dke_Wo?c#n=Di+&>cFl&6uECu zJvy7f6B})v)rWiGNZCNg*~q+u+Ru-V56b!Kj8I?&o0S#r^KSRJRJ+7+qx}tt{JDV~ ziHfLm{V-&s0@RR14F}<=_66@6{?BpQ)+3fm;vKUj82jMqSi24xmdqcqHzde1rYR`i zTqP1PYWG1r{n6&IeQ{h-MlZ$7kn(9^mMCko1^vliCPq;0e{ma$mn&Q#!VT&$(R!43 z*T|WQfdPlxl=0mi#=BQCxJr8VeN1&iZY-T3yuGsgPyRAu5g5R zeYqAG&^P+E3^Dc7hupp8iOL4b8@CAL6)L$kBsqOK4*4j~j5YZ#`4mA|lppz*K}`lO zjQ4ge5rXkV`@;|n6~_Y`#E%db5U=8~KoBAZ4ftL7(wYnr*zFgj2zcQv;Uwy z;e(*A3FGuTZ-2Qx?a>}??*=OAUv{-G>bOs}9IIj)7U*J+5-4H8bek~A>A!orwZwjs z3K2%dD^*N2$hgG~ILv5hztavw?yZDb#E@9r5TGq0{t|M5TOZI8+)y<0N@;%{BO}r*Er2At zV>GMn`H>D!Jc#7Q)|U5A83(=|f7Sj?d|l%B-Ikt^n1$P=Pvxc*A4?abpj}H6n8B@a zuD6e@nmRhkzyynX=2u{nxaFggNnidmh15NLwIwKzUTEM3XoS@Yq>Cz zm;~ZezetxgZ|}rVfkQ^IlKOJ-%P6Yn%W>>*{^C3MR;8nl3Y&`;6zQGpU8a2F)Rg(Z zIV=A6fK>2$TpA87vS;1|yr2po_v?Ef6m__(hz+;K&+ATW`bf&^vMgRDTG;9K>RL-J zd#jL^bqci;q{N^2i)wn+LymHqXxA`fOZ@T;Vz>PS>%qePr#H31Ndy-Iv*XjTs>o^r zR~RCi5gZ#MP$oA!Xa8^Q`8Zu_cW#M5Jy0f|UQ2I<*&+B++k;xS%Z`Pn9nCr0- zo$;+kNg<_ztW*2+Yz}fCpGJeoTGac`&j{KHb7OMY29E@iM!|;mXlUNipLiY2?CNl{ zGCrlk3)6e=7qi4%DP8i$%c+=VBN^Rns!j6*MRoN+FYDijS*I$QdV~ zn8JYpK-~~n5~H}+j%?u&@*#|M&5@$<9ce@>NTKP)9^*z4N-5^Z_j3q#OU%<9EkkdF z&fCX+srioO$S$05WPOghXcjg77^2#*t^T%^O4sRBbQJ>ugh4C%8T$-%eqAwVxr_=k z72AcN5X>t<4kFHv6Sp8-cfNO*BDd0RHv9S zuE{++Jmx3Id6Mcce7*-~YQh6p0*=w43&lZwT|s;Xl7Oh(B_g3lkeK66P3#IZthHm% zI;w%Avwt~MXQaDWdNJTOQp0wR?xXM9!11VK+96Y$@AW2v!kiG2hg^?02M(dvhScKz z_)dOcud|}-IpJX5w@|3J1<~v$b9~Z8*4d}0sJIl)|9Ow#k3P=<6yp9w3JQqJe+s4hb;;AYu5uMPWyJ@QKz=o*-JA)z|@B{fC$aa|v;tlex@2bxY! zL#A<7)Y@6nD1k1yyLp@CIf{!sVj@GK=rJAT4dS$Q*sm0&^^56>0hSFPmC$Bi#htz( zh`51Tb*muM4|aZ*4llwN^n=7|LwZ#ULLYtu9$=6>UbyJK%K)VXxdT~cPyHoZ1bn=utJ3ITLwX%@R&c^KqN zsM6itD+z5PC2ecGd>Ib6cSSJB{c%zvdO?sGHN-ar{T3i8w2YS4syv>|lvyw7RxBbG z?!BV>vBjG+b`tL*CI`eo#e(BKn%EYhP@?OrGmAK5?Q2}KmX|DJu#{s@+OU2Zxkw3zYGbfj7599Jr_fw)%)ce@x$rEr`zZK1H4y@=9p#x zSSEpVmZ(L%oOF9Q$W3i>Q^d#y#v5<9--~ZSRTBmidyXR0X)H~ONbI9}<#1DioW{CS0Kk&HW~=Q+%nVULqd3!ohql??@JH9{)xLDZNjLm*Kk@NFg$x-+ zOM<@R5z&C2J(Mh3pPXD*bV7s{i(7UL%ADr;Ii%gef2^dzwy z3O`wzkX;(fUXOk03Ej+MY|;>oJKDDoVk&B{oNK3QYc){u$T#svVc2>hFcZ-3W6RF^ z!uVYaY-kAB`|Zj21JDX5$Sx!BBFAa)<-Bxp4|0Vs_QiNB15_||l%{C`iY8&Kp7gznBF!^atpL(J9vd|vN z20+LDp`B>3P}Tbl3-1?SB8QYUT#GnN@+;<1B#4`p4MOUV^R14`A);vWeM((&NzVfL zJcs>I_?2*he)DIcP$YL}m2SQR8*XDQR0J;75eN|{y4Z+U6Csg@aQbiYM!U4+J;AR> zL`syv1)GR}GMM>rmBSH5U;t?dmI%z8BdLo`966}RlF^X1%r`AhRFtGjVRj1%O zhi>1=ecglYT(2)3&)Pp?Qs!Gc>DEJ*%5Ur3V36lco_*zO%HKWaEVA@`b ziK-*gPDr!v7(FT72*})$@NrCbu{|gR?5pB>8BbN`ch(>ljYbi=EcrM8yJvGuJMV2B z(A}7UPSZP)t@>TH=^FA|AuOtK#%81nldAwduc$nX6iDb{=5$UUg0EY5tZCW>lM;K6 zpdkIp84zpQ4eEa|da>BVEETHSqEjF;T|f)zy4v7%jl#h=Gadgu{kzyJ0P3=kuf5h@ zvx^1kLbA{i?zxyuxPY((Z_*SREN!l1gip8k-!BZAO?LlGHTo;VbaQ!Vob9f0Jb$~G z4}3i`t3>wke8hAwX%D^l%&nUj25&!PW8qp`^R5icFMuf<5uG?%NAWG++CYVJ_8eKF zO@55}`e1BE`#!RKU2GzmA#VY!zpm9{5a(a07fav<^^n9TqSGQ0UwF{L%J}X}G{}j6 z-)c*1mcV|M;Yy5ciHuL@LHYyt_|2W-)PZSK-#`VIrOFtRELYmdf>}z{Ri=`>Cht9P zoifI)ae0bQCQZ<~-{eAP`gMGyD*a!?D9Mk^;4Iq<1meNAcAZy)e5ixEU#Iq?5$P;& zE$?XWlPS+FB%0FzCpg6Y5@w1Cgy1~>i0+)>S=?=z`xF zsGg%j17a}=3A{v`YAGv!Efz&|zA^@teSSZlMKn!;NqiE&n2VFL$<6XwTZ=!*$WZg? z-4A%Eb}kVn8BborWpGOxqAAxhfOUgV$Dw-tiRYjH4FA6k{}WRdonq|QvCiz53{rgH zF>CT`4-fWM?cOZj%~`M!F9J2;zD=F!?zY8qv^XVecY{(>pNL)of4H6M0mYWMkPyzi zUW-T#cOhe_VKcvW`mGqCoJUgsaawQU$yogb4ppdmkB_5{Qv;@OccNBjR@%nqQG5{CA8%EPPC-23)6mim)*soqg zV_+Ux*!T$cP&sE$eGxDSQMQ=%6bPVId_zt?b$EWLA`3ZnHOe8=amUbR?BAln2*1tYn3Oa9{M?x?MGhCz&j4yEjNu_=pOQ&^@hfhj~X>;c9sKaTV9 zaeMTwFlU^-Vr$MWbxU+(1zQAKGub)$)R}Mh&L{rUKJoKAG0HtrL?*?t#!BzN7aYuY zEC73Wl$fJh_3hWS@#Up>8yVJ+Dx|YDMxi=H?^3keBb`unI(w7El~6N1lTzji8c2-g z#IcIGdhf;1+6)*|LQho#8=P0&1Qs&;mG+hRuo@{skY(>Ow$OH~MN2F-*W@9hIWW5Joa~Pim{WBlDCMjd z33<85bIHez)r6;nD^x7X5G~AOq&z3%>$5C-!Sc}5sCYfqW90IJ%+FbZh_yTQdQwWt zmk5c}P?*(^)lyYmoNoCiSK_cd*UrVb#T9!{!g$Qnruxn2j?*4;ZbN$>-pjCo+Ti%a z<5X$!N|6svOcfWT<2X$SDyCLeFRDCdf1ElVM?-K)OkhV5k(c0z+D~qtK*w8h#Le)f zumQ5q*#x*zoE?j&`gWv~BsF(qy$b%EjFlHZ>}T>MrGvJc#1XaY=mO{1~yJ15kb|$9gl@Th-O7Ia|q%uQa>y41K_GN?nBV3<&3F!Sj^r9 z5$S`>+HO%dH_9#wr?8FwCcf7D!MOqGPAi-&jGb8iO-*ydMt$ZJ|7oB2IeQ78`^?kr z>E~kbLW*AYDd<5dNEP|`inrvwA&>up4p;owhn5fG9%5Kk;`w(Q0vYa&w|u9MF7~5R z!p9bN_x8<0SZ5G9pN0ENWfx{FVna;WqV6#R{Z6~m)eVd8XZ}tXdm>Q zEw^@>k_4N1wt+yPI@#>30wj}}=-Ily5W6uDDZUr)7bkwsub}+{m1VI%C@=L6D@j|O zi_)V>T;9>0c(^*NZCE9})fQHlETvF3VAl-jNf={ErUX;c+5Zt@vVy9*E?mjsD>B1xcUrMo)Opqw`g?n zi4CDz<2wxf(Leg!(Tnq3tcM6e^sflBa?y-4JUF68_R-6uo3oMFE>a;7I9G4PzeymL zyFsUr^700(q1>aWNDkFL`IDG1k=i|(;~2J_I1oeGLJt*m;&9HW<4p=2bDo#iP+4C~ zAozy7BFLV_W-y6^4IeO?-sqCT@`25E>H31lwvM!#?=Kq`<{-4 z5_px%ph!od{Fnc~KzMDppp?W_IUXp~%;QP?sJ0WFrPxmNM~15diSip>5_}Z`y!a^T z)RKhjWofg0FVT*gB}2xl=KqYQi$U`cOO7RuKMD&a=J%d({OeXCN5|RjQ_N`*^<4bH zz0fp`#bfnLINs{@LnK#jBrlzl=^$v)I2&bkuX>j}^tPXf4}peOgXJVY9N?0LqL7FE zlm9g~`>)jx=(jNa`0VXJY45!6Cxb6_=`Ffa4$O%$ZM7UC55pOjQuj9nTymfky~OtR z*v5&Z58f}emYpb6P9r@kOD=Z@C0jyu=Boy$)Q85f6;_>(ExW}K>l!L>Q&!QddYuG{ z1{mUV{=Dx$J$Jih1!(VjT<}<^X@GZNHf4uy-67S_+$|z&9M7F3O9@RUD;Jis$F7h~ z(mU?N#q(YxXa?&yis+&Rm0X>s>b_UoO>fBco7GNk_XsH~Kb;@7J$?bef6X*(;1%)r zSUgecMrktWYy3eh5)~^Yg&_9EXj(G7>g%!WMIfrDIyPo4iXe+%8-PtM%>Ai&J9GFA zcJMAORXu8vbyK5pXgN~xZ~{D%?svB{^&#Ktd28~Xji#*Wz6GFXEytTFKk$8y63y*6 z+M}O%4>;1j!Hn|}iYb+Ih*i^U@8r|ebuyDnBdWp+4u$u_O1PDk8soUeR|dN#D%lt= ztYP=FzUFGnMI}<`1e-$nA)~5%uUAH?Lx{sMPGOprytJrXHfk~UnE1ta&-EIC|E&9M z;(6W2P=UBt5>aD*ql9m%q`VN;e8FX6*m;Gn@t(`=)^%^tjr|9TyS*K1nN_&HojVRf z7|;EneJ&2Z3Syq}P65g4!dNVDyosAH*JJn`XYvo2&v?+a_GbL^Mm4{S_^uoAm;283 zY552UJo3%}mIL5lUG8oRfMkq78rAF1?K64RGFCfVEMSqnhOf3);u~)v_-?dDSqEXt z-#+?z9E{!w(!?2zB8Jgg7>9mH#nmm-PAJ0iCgOLZ5sz}o`;~*;jq}n9h~x+Zq55~x z{i!0a1iWg86q=yJ`@8i6OsG~S^;W9IaG+oTDZzx`B#Oa5Z8Q3UJ1y-yC;o^bD}#fEoddQ%S*O zjCQr5KSPX}7_eT}toGf=IDHLo+24X$={;Fu6Y&`R7>Qfi#;`k?=oTyN=RO}?SvKS3 zRg~x^ocsO%Xm9DaT&3T3p8nr{^9f(8?yXul94Dg_p3zkQogUwOj=(Wi%U0P{N*VjW4Z?z1gAyBG%EZ!#AwmP5h z!onk=pabp&7osmg4!wM?ef~2Z;0EqY%`s&=-6Cz=XC%^Q?hwdCIYb_T{I4&kvKgJuNQC6Yck7 zH0Sim81-nAN{V0Y4rFA296#8y63$jq2^y4l}kZ4>woG!l)3wj~;keh~{4e_|&N=V)NlAlb9 z%Lmti#daU#H6#!!gBOt02FzFg3`_$|>X{wu!1U_GvO$QsZe@tkak(?q4>3A$v%UED zi=S<;#_1A&qu;#Z2hq*2Un&v1)N5Nth3czTWxHk7xv?xWSDW=>b!fF~F&tf~RPO*= z27HU1m>u9l`*j3~r{9n9Ob>7*Nva%`>BEXbpVWJ1gyfELGuh@!DjRKZJp#w0uJ8@a z#;AnbFb+gGP^o4t4TgZPZEuxdkZ8Zd634h9(A3LGVLcHT@C~abmc8?=O2Bs&|D3*C ziIBj4f<2q%uwqTlolR`QR&`S74AlPQwq--g^nSU-shh;vvHI7?8WQNio1sTkdgEtw z#tJ$?$$>v1`|vOu<8+SuE3ev$@phh#GTJ;@I5S+&{44)y|H|K= zv1GKzfBu_4_m6ZUpKHk036Ak+EI-Gn_()WmpmhnEy>{JYJus@bIX+eq+T8kYaMox# zoVf;!qx|5IlDLklfXGu^aV>$dxPe@BP;BOu`##u_7_5@Yq|0Jt;$;cSw;N=dfFOjsZqLtSS2&!Z)eT^6J34 z=-F|(D4|Pe(o&qhwf1J54XKb4n_XfRQF+D7+NCVFn0%msV$$zX1i)>0)zwp*D6E5J zdY6q7&J=Ha;#@nI^~6w3=h{+&Q!>}INj=|QmO_~9N!t1yatPxr2%fOr)51(g#FG@@ zM*#n8yxy^6GP(#-ZD6t#wTDHAtRj-79V+>k{q5~c5uc|$h-Z9{zg8F+5SZ?$8A6`; zQY3&mK`FKv_~xXHl^qC?lZ2rXVSiCZ|H-;t2EMpnz+Y$e~ z(k{gTAX}6QK)u`Sol6hK7p~R79h#rI&xQgLsQjfE95$EaaS4~Pq(n_#4Vpr7R&et?QyBC$9@er!o*LPg@l*GzA%3%E-TTLLfso+IY{b-)iIepNSN_&;DPfo4Mc97 zO-@zi`mNZ71CxLevhTr5iDlnXSJs*&kq_sCvO`tcHd&Cgdf;Bz5Q{Kq&ustbCs~*UW#|a z+&xMq4=L}T+UZArEkZPz_VTWnbMIM%M5O*gvtTM?Lx7u$?E`s3&GI|E%Xk3rmn6ot z8MfqCt>mdxP;^qVgg3MvP#4VWCo!R(=C*X?{pCLus=-a{DT9}ohnKVq(GC~yAZCu> zR;>(3Hajfc^Y*}>(E~;MTxxG#@;hlwPN*@t7otQ3nzqazzyL7s!+P-cbXQIFQg#Nv zu6&7`e8fA5r}ma$F5r0m2{ECqkYUtpj-|BfOw=#;K|;<~E0m4X@t~MJcz5Jy!8l`$ zj#x;ftVBBGpX8}1RE$055M*{!s*hy8N5nOlJ$Raf+81_uJ#TSPsUZbzW;F3;op^xG zjO$37a%s#@(6{#b$XkNvV`$wMK2f+6%)ZL#?`XiGY{NHMQ3 zKiA&=qtKr{B7WD8J3`-WXSxdtUh%9HO|W6{oEKhz@)By5@@#uMey`(_%Y-|S zj3{W2G`x+EQ8w}DJ0YE5Yx1hSPTh%O5=BK)+{r=93>VbFak9v@iXbvY-`tmPqp|yU zMD${7AT1$)w@JVH?=vcb)XjCj0KqTibl2tRSvII?pyItm8jI4j6h(NMx%DIKZ+eRJ z6)$?P$k3w!6jnLOfRro#@FA9e5^D@t zQSLsWi4lpdO&p~7_*rw67@rb{OEHShIKCUpB5lplEd;BRevRp5SJ8Dm01)Q%NAYqX zr_$!|B9d8(FkWo0T(HF9Zwv9MTdJOKTa(%kv)^$CKA|4p@eV-G7yTF-ZFI_95zVV@ zeLZ(iB}mRMLb*W!;gysyez>hiI>JwtdPe%|RRKUgYKJ7wSWKOKLiO%dK$oL@slD)G zxG{HH;q#p}ZA$c#EE ziwU=#1R!JfDSs2)I-BeMN(k1SfjnJbY-=~J$1K0_h4zK`_qq1z+wpHamqLJ*7rLFU z{WCR{d$FaYtP)CQn7q)|(jC?jnH2c~w>OTbM!QsoY?ioc%GxWPSz3=oZ@iy-s+;nb z>fC9UFB9%hDE4D9d>nCnXg}h^ibW{HBIugrsZ;=-o*T|EvPH(+0-*|WDEVJOd$z37u zva}kLqm3u0&sansfyj8^u7%2=mIkxzW5M;lJ`+MHMgJnajdv<=1Jw*6ffM*_{HbqZ z%{?t~r{{WnQt~~DP~m@UcJ8+mdETV6Yg$S)!D+xGAK+*DCe=bO9Z1ECP+ zlprbrluu*xB>31+Kog6C-*-kijW-S4+3&_cdM{R<;bXL>__7)mBx5V?kBO(K<-%k? z%irbSI4j<}?eyi1x2WP(1w$dO;IH&%i<7{24@+M2taFAHs;EG?jGQKa|03OYKLR60HpYYI9wS!Y^aqlG{D=7i{QPAE1v`Y8wTyYZqA-VsC5-xX(^+KpJOrR!~R z$$uT7rW~dCZm#>Fy)JL9hKd8$c8s)e!jB61c$GGQeE$>I0(3mtR6X%}$E8DpvRM6& z_?SSq3gz%={Di6TXKh?77a3if%&_=WG?tVJ9ifyZ^>G0|>AK9W;yOD@_gNy4Sd3mr z&wu?mb_HVgXG3HJec^9R=Uff>Or>Iunu?NwUQkf0_QM4nCzY7z9VZMZB;IvyZqHQW z#TLhY<3-ap_q{Z%Z?vn{-Ke6&&gZ_1v*2&Yk(`!nftu)Z>5iA=tcPrAh$eE;tl+|e z?NtkbsjzUa;IK}C4UC-1eKv9$FX-t_;#6Jh5Dvlf?g9;%QD_WY3=hy2BD#33-Ab&L zP&@zdm`hSW&QCGH+&jW8I%MH7#(EM(l;bO+BQQyfW4D#?W{RxPv7hCR@7|!i-L{Pz zlmMaHRH0*tio5{w12reQ$61-&_Xie~_^x?X5vZAE9z>BXN_44yq%HZZIs?Z^v7&Vi zTDZP`lae?i5I7K>2#kAvGfOjyySkV_>X|Kf-CY$tW`Gm_ZQdMpHKm!SRpS8*{;~Nid zsQd%S3)|B3vGq$s!*T`^pXeA^1?tGr_dG*e zN0$m(!y=pHPh!|uRJ4?J_H`8by{D z+JYrQa2PL1M#)#+Z_mW4vEJiNfAk{TtdzNbvp?=PU=>xyrX7#&#RxI2G%WE?s*Ve# zGu(D0J#AyzNqRIe@_J0$DKw!bbKBl)x>SaJWzaG3yPKxi6DY4t6;zC0_r; zf7zR~5!ZXw@Zo7Dz(N*HhJDz8^6xL{BikjeT_ksNRDit2w!9GI1rplv zH@O!Sd5USDK76Wu#%lE70T_sGxN;az>#i3potFro)zHHydRLlTde^_&p>A|(uGy(sa(Sm7cmx9G9(M^zl=}b;~dH{jH%!=xC%*NVEkbN-?PN+OKk{U zVsN@>m|2%YzdOX)A#DgPCD!qiZX~Gba~%9ZV!et~GIooL#k8y|nC|tzsC92Pv|IVM z#a6rn$;W?(ze$q>q#;3M&Ul;H{n3TGYJt)1q4gF*gb1eu3tk^jN-IA`Gv#`uPr9pL z;m4L+JbykvR2N$= zMSdzYv zy}jO7uM8-$g3`5~(OXCfj$w>?4eaqG--W5Y?I1$h0q+>-EtX5HkTiBr7lA_|x{BH$(R?e(WDX9DKtU ziBmz-u~j5xzWj84^si6>eZvxhvx&ba z3*reM$Ulw=KK?(s`8Q@y+%(r+L%@6i#(KhXj%WdPHZI;N5=^f^Z1-wi7NU3iogN6*EcFd}!`y9F}2K5@thx(yQhj*|G_3@}2 z5OQEDoC05q^AE>3V3Mpo!+OHMR!kY9Xwk>#C=%ZZpdpWg-tqbM=`n!4*f8%y|bBEIxMWp2~s?+gS@GXP6s|lI|Tc>WUp?*#x9C+-y+yEGO=R> zh6FO6;`{B7Vq@0WZ?ACLQtV5rTw!t(K|FrkH8ep$On=&n{vF}+7ML_r;OWvn{(7rL z2;``^ZAD!#<7uCAmS3*WB#pwC=y9I;Vv!jvEu&-ANDqFCH~LM%(j}}|I3$6H@OvG} zbYc?l-ZBDTIJrTF&*Up0127{0v;il^g4+~@v6{99aL&;FjS+yexVfi@V!Tj)M|tFAvcv<3u_<0rgdZPPTv{YZ z;o|hAIC1~|H*tC^P@?6~h}AI?!zOhSZr@ARV&V3AVl@5MDW_DWn9W}O3h~*?b{AB& zD}y-6I0ghn8L^MmUFD5Ug*GcX>=vBH zJ^Q*-3TqKV0)V>XIL~vt@Tx~~iR4qR!Q*e8{Pgzrz$HYS|Gah$Lt+O`s*vavDBUVc;gH;Fnp_a$flcFUR0!;3elW`MT zx2yziVLS!feBL{VUZvetp>yjDgKy7yn}ijrV8K2*+*lYI4#ty0I(S!nhUPeXB1q&y zbtu|ZFF?E(W0@8?_!I_Np~gB(^u79>)5Av2B?FZ%bUO-KM_>1mx+<5Ect75A`jFB^ zD~8?(8AWb!p_%~B6cPFunaxeX3Q)7>#}e{#7+lUHm$GW8o?kFaAM-`f;Cokyd4Jdg zU(XDh(;kg+MRkp_3Wekyad0?mz5r7Bj8{XQVWkKqf}E2v8T4ZRODWfXFA#B929iz`vWlLfNONMa_k%OUPM{i%(Nby6Iv~J zVAmJc+CnVXqEfOO`0idI8HBTLh4Cfq5E?a=K<`~>cw1p^3}j%bd8j1fg!-dWtQXEq zeaGx$39cxg84ppyr}puATFJNXo&JNj1hED1kF~_)ZFr?|tf$O5(W{9 z38l~ZI((Ok>~ieYW$DFu3BmUo$J|Qcpq5whesn@fPc5A2G@j)3JR)M3cF_g#oGLE{ zGG301P~TJ^+$E5Hi*$Tv))#FAdZHvkOm|>MW8w<#1x!our!uTst9C>}gJnA$k3!vC zZY#0aaGjDF1Ewo~Bcu}kCb(<-L~DY>S~IexlJ|JvJMC$Ij$}cqZ5}|-x1YHIr4rm& zgsN55l$C-4JPma@1nVizBTAb{#R(T;nqvLdX0VTk)|42{*hX!;u))$ z1*o(B=h3%ADC+&2;4msS^Y`aFJ7I~CfB&^NC@9_?itPLC(T5=&!r_k-1YyRyPlHTi zocHy%ys}g$C0#4RP^>2&4wc5GMH89OuvWR%wlj;`Hjb-HPRjRczCMEc!hYH|Wq6xa>?VWhj>%PqW@z>hv zx4o5%?dsCv8~z#W1eTQ-h@gPmiHQauzfB!o~+VdvLE?!U+B94FR z5r>z7!mz&bAysOqbSwGXBx*3R7En-G`F8lmeh1jZQAsxRgQs+B%;Y?cc;nM@!rJLC z^G_GrtMP9!$zoXWp~yChl;b5hnj9(hH?KW9*GhIH1V_eK>k&`IJ-8oJL$8f}RaUq5 ziCCb)1T^SAwD@>$`fy-KlDa3>LJ2}XRK^9$qDF{#N}-BW##-A}=q`aXO0OS&y+DfN zot;(^^fWjas0U>%D{b`dW`I$@YcBig!&pB^C~4Zi{FU~s&j!&LYfb7QKFU&;a?bVH zG0BeUUhMpkCx5w!(*5mYAP^Esv21L?v}gqy>eIf0YZn~*l8#7BMuJLTwOG(y8O_v& zbPRnugb4>pNj?4?FA~#lCB8_PJ_4bTpF?q@CdZ}x36Qa{MDBJZVb>1VT9XrMPfFDj zKI2D#pCz>gj)eGs%PqkfRt+i6I+iag+)CL{L$X-W(}%Bu276SC z^2x`U8{DIHIv zl2R{k_#gxF_xo$*B^9y^>IRE2NL)eVFejfC#2hw}o#BpWbBA^lmx%h#t`&LVQx5J;iNl0B*(yix}3O8Ze zo9q>(cBS388RN%FT!@L@Ax&92S_%EOUPcoh*PbjwQXg#Ydx_O7N@^MooHrfBh2jI* zFBD3=lhu7S)~t7AW_?qlfM9hLYJ7N+yz;w6AuTeL3?DVQzCb0_qsC6Uqb}47agSh- z;$kEfIz$ejgcvSXqPyntgFxZpKu{DQpDF(Oo=#`%A5J^gE{~bywjzD1^O2hw-zDA~ zl~_zNuFL^Xk6|e*SgJ>96#au(@gC{mj1%zu=Fj4ZefvYgrPqn|&(2)<}^Fpq4?wx<0@|AI^oy6VZs%)zrp;V>IL3G>poRsBDnCHeud$)R8 z8^oYuz1rL@Ew!as?aS-!t-1F6E4{p)V3rE|5HJ<$?Tv`DV%>Ticnob(g*CCxs$ZR@ z0e~px?ij)gr~-3p_3^{@=%e@|$ML5%NaL$=S3=Y=u6JW^R+rl1x>t^f1Gw9>CD!iz zxO&)o3iO^wuGXEKvOe~Dn25_`bZ>Ps4Z_OB`+Ec$1LFpVm*Wo$1P+;~)=k8P+~4T| zE?ZpVvb+L?t<6?xIFif8-rtiOE{gdNz75Y^<5j4Qs zo2z%hX&cf@vE$Lb@4GR-E!t=l&v`*?=a8XeUBl!n(vI3r{DqxVZ$-OaojEAJtoM+9 zCG;?faSt6SVuhc7sy*RP@?~TyPUc(fl5J|I^f5!f2`5jerJGRq@UTE1q$+5)9B;xI z$_pB+Qz%Scu_zw>&Be3MZTK$16N! z!|QjeT`}tOiA$=Ui*@RK36IKlF*NSz39Hxn2C^ksg%}TUS%E4*G&-v5Z(K&za!lEr z7#Jo%>92STiN0+PxqBA9+0t{Oi!doVf0Fq@O3C0xX?%`mDo*?wEEGM8VsQ~|p)SNp zC@MdOD2Zq?mC7XyMQ#}gSymt8UhmJ@QOK4$;*QI^6(A}2znn!PpCG>XAKUA(ynsYF z__4`6{V*AE1O3H}IH;DVKlO-LP(gH32q`_+o{IG!L+tS9HYXFfZfSyhA`kX^ZDe;* zr5-UNUB5X=NgKMf#y)ij;n7bm{IW}GteXONB+FbvT}t->#@JjBh$BrOzFbko9mQz{ zQN$|OW*dvi>b3qwsE(+N?7l=S9%zMouYcv4_GEZA{dVepjSL-LCii^KX}<;f6_ZoP zYhE0y$XQ8boY$QwJhf?~FGhL>3tZxx%m4vLqm*Rg2Dq(7K6e$bl)6W`SFd?(`Za*Y z9d!^;ZTrdnJTw=R4!7tDT^#gdA1yWW5(-$UjSLmBBYNyB7>IJe+aCRspHA@)v4RD>G+oX`!gOTtK<*5? zLY~9*fe>?3p^8LCjy!f8+(E({OLSF9Z@*j3Xir8V_ZQo>rTE~s5)SdhN?TZaBc3QO z2Dn%}1hk8}PAWTA!Po9A!3PCD+r?FeX^~(MO2n*-MUo}4Vkhmc_YA;G{ZA+LcCJ+d z8u`mI zF8XWvtc6l6Cxz#^2ACYWKE_w^TZzlkXA!eNw~^t&$BLhRObctv1a|~;B`+R4Ntt*^ zZ|?5PiBSq)L$YG1v5s%FH!POw8X!7?7YGJ~Tz@wfL$#KZfL=hfY#%7BFQ6mRBhZg8 zq!%bKfhHLt9Gz6VKPuS~I`b;i?J+5ptWYXe?S7l4Ns%);-tdIM+m4InUK?=-$VsMZ zP!@zq_{H8|6!I~#wkJTg^^ONesUcx-y!qVQ?Po?~?2&O*D?i=|j%l8JJQ0mumuQ=8}GN zI_$Z>O!|q& zw(`J9*9KR`A|Yz^tn8{Hb;9vDPRs2-#q*@ru%(`YbW42h%l7k5AAX4^ydLQEe`7B5 z?NX@C_?P})O#1QvO?w4PRZq?_Q12j8Gy?`Q0EqUYRr-H<^iLi=(b#k|RzvO`t2O+O zpUlr6vvE(vi`A;8^v_l)LdiUf0R0-P*E5S(L_CySLyTb@vGrE-M6wQDbB`=-fQ_bzG^}Gw7v>eVmux6M4o=A zic3rFvULMNP_Ou?cn4h!yppK-z3YK0gLT7NjNX!OWP@sD4|9)Q8y zPQ1Z_7aMpJAcKB0K)108LgcIeCxk5_aO`ryjN~f#iy|704&pIlAy>TNDtHf>aj(Iu zAXQngLJy&B$atz>Gak&W$lXTh62a+1 zn2mQ(-lAX#66gdOiJ>iG?PwN~Pz^eoMmoEDp&s=IB1iYZB5 z#Y3OH-JVVMypSZUb36^ZaY*-`_$ciVs#TdjXn6`pa8mb+17)BXdd1Hz> zLs*pT!=7;*{Tl`8c&F)tT!v)I2U3DT{vqUysb#l-{Xguz+jCvnapoEQC%9*NCMM=( zRjJ&ryQ)a4SS4AqK>#eG1dtE_7Nu56<%9bv0yq#ZBtfYRPc0_yZr|Ev9W!){= zk}O%Wb&DXO|7(8V+Q|I!lr4>8dZJ%E)zKCY&OUpuy;iPVnO}aHiHOsIs2ukcPv^Xk zSypTEORxA7mzN>*A}*BYwzzE4YfRJvBs=CrPz2P1m=guz%L3hLD{&#Leg(e$F!hj+ zpQ*UPdVHl`SXB=}H_%<4bHhvi=FW|F%Ri+W4nPyGOk6#f)4O){ND9@?Rg* zIQGAtClrpE4A|tMt%7|hb-!lr>)0#_fp94>0BKs1S*e#<;CKSoq#y*dJLefMUUqEa zciT(86pr#;rX~iv$!P}DBfeL01`gWFCZvqg-dTIdh+$OPZ8^OWwvb8qPrlI}e<$w# z7Fsix{`}EH;)Rwd(jh|(Fn4|?CeDvSOi*MvN{Qezt?ZGa!+NU0c4}MdjeRl8W{&8Q z8TTRM{TGZUB=ZH+NKXX7R%Bo(_z{|Pm4KOe*75cn+^@!4J@r(3>M5@<)rX>;aXjal z_R+Wzrwe<j=%)T(L`K%pe2ymE-n#gOM)K9 zm@UvbJT`12^1Nt0Qu)mj+6yV-<{j}Cij_f`paL0gtBiP{+9;x4!U{qT;48#($w`dG z_wLAd7e&Pso$b5r4)h;s3*kR)YAkdET3PkPgriYo4%K0px`tLdT# zjnTMe`la89ajmquSXJDbqq{N4_n&JYk1v$4Tk0qsgFu`Xk^$&mZ8D?e(*ukUPx%E^ zRI3)ZhcHly5X9+WCfU;wToJOX;5!N~k1DKURmqCMK?<$)G-{H%I=YAk7K&QC%>@8t zVxg@b_)J=cS%6&N2aXbnXF>MY$(Enwsi&qoUJr3lQ>jtl>5K2gz=R8@DY1MD$lf~! zAymcMctt`uLJTuMOEECYEZVLcy!phtm=H~7+>eNp_I?d7VO&>2KQ-_a81=Bb5mHu& zb5=X5*p9DIF})PNSnFOkOXMx0G$9OfvuZeXR(%X2+86aj{S%5PYI-X_3gN(cNjU5> zq32jH;qa5}vC21>V!{Y8^r8^H^EK4}dn%k^37enujp%NiiIwM%UTp99m+L4LxKI^M zl%tU+#vc7pxgZ`r@KvgnCDx7*$zx_Ou2R$s>ox7!WtI%908(MB0kQ~IpZ2$m=FfP% zXubrK!?bFG|=O}>6z)Y zQMG4?>!>-@DZJE`^qotj+Isz3rg~~=@4t-}%*`IgPYYbJMUd#+vWA|l9MQCpC z($$NdTE5srm_m};m~^4p?A<+^N5^DMTx18--67&G3+4wRl#@iI2E+A1w8Pz_6m7NMB`&QeBRQkM|ylWlT}K+mHGCmcq9EzCuuArS?3ac$!Lq{?bod zlkFUX>_*;^kGFw1(uFY90-LuMsv+}Vq@>EF-Z~KUGnJ&T;OhveCMN#<&$s8}-|) zcSd6h%1r&x+k@yUHGB8UA*f8Gb$ex_^#k!KWnm#T{^3TaxNjGe3aoQr75So$LW50u zEs)0Q#tlxBzlK!o%$84)yIK(d3ec{57AHdkTI{Z(?B+QkfVR>JTWx1K!}Ay=)h3)F zH4B9lQl&lFi(% z+{X7+uJS#fO597fh(k^kqseI1=f3!UdpR`!JMFi6EEZ!MKYBs8}RXcl!}|Y?Fd2Lk7I(`FYN7s=|c;$_B`ArCem;J?VRAG-ASF zcI?^9Ct8N>8f&%yVUwspXdBY%%6?wG*_QKAl9F)sbRw{mbSx2I1ry0D2zgt$`DR;< zXAEf(uf@CEY>OdmyhyK_6^|FsC4NPxO5HN<2m*2bQ!gW#ZeR2#8Z;zu1{ryGGRe2l zsReH(ZljHmSl}ERF^1A21y~AAjI(H|-H zA2JKBkVi2gB`8HPAO#L94d$*uovl(J4dWJtjkJw+(7mz6a!Iok`chb`%OsTjBxoD!)CHg@n%*-qTO_`mK_XYeY2}_fW z4PFsruyWlmTys$T&}|Bdsw3k)&&;u^q+n;n7Cs*G?vGE-Dy3No4+jO49Nix_zQy2^ zZQ()$}skI9``X{fX~>_W=4_(O8{6 zS6*_M`4H{f7ibKRjm>caLnJLk75+TS73s)0uuKT?AcS5-&QU23*=})(C^(aBkh02g z?V*xR^?~-5eN*j}^KJu4M9suBbUTn>Vtl@b5PJAx@wBm5F1n;z`P`VlHwd0xZ{V%$LOT(fTnq#JYpV*rTLN6ytzSdaPAUz3?Fs zcgyo>>}IXUlB3)bVyb{@mR%Ve`{6raWr9XD7S+@fexcD*eI{9#aKK`jvM6m6+sbpKc$De{Vo+jLXRJ<0liD!YW&^vU*N^?xbVgIB$?*l_dyvKlwIghXe<3XJbIn^y_ z@Gf!_Vu0eDB~23zx$bx9woU3|?Bjc-VAYpuItCG)PP(x#MMMC%ySyQUG4_OI9R379 z(Y}bKVF}7M-w_*Sr^GmPmSEl0`t(DxfEfca+S9nlS)Qbep;~pBVRDACn(n3o>Dglj z9*W}995bE;#0Z7q&pnEh{;tx|z=&KJMpi&8>2ul;NJkzfmk2n-^5|KmFB z_;)BwKUv|`Q4t8XTgNQpcAd~xtD4?LE0)cx-HgAPO6&UK3@}YhKPOeGDoqp*62K)G-m7Mt(aYJHQpJZlIb&7s5j&D>FWjQ|3&Ty;vF`Xb+ zg!&pM6PZZybP0+P1F9TVuNo#?`6U4&Q1DV@6s;;v8@D3;A4MC}OyKs`66rfP=R6H7 zSfb|UBZeR&`=n%JfyIilq0+!n>3b^*PIl0HsEzTWl+M^lw3q-vrC%#9NKw9|fXME` zriq{4+`4avOo;>%I&XYb@N_8eIvTxnU6S)*0cJCuu3H{NtwUD=xcbFsMexgsT4u*g zvdq(ByY73*($Brros9ehRS_xmAbXDGIOD5N9uey}&k)qhFTdOh{@Q5}W6Do>dFZQ) zom9g){|P#aKBIp7zmb|F?2JNiicn~D~%#L#X4 zGh<4ITnPF~kCD*n@sutn?yb&B@moBdyOfeN>~ztKx@()bV}C&WVo~tOR?7Mui8Qeu zLx&WZuCbjo7N3eJ6#VN_3{b$#Arpxwf&~;$7@8%QRVGOem0XYMYwAxj-iJdKlci$% zikZh}oRq7z8TDznxOF(y?3b2<9B4W=5R^VaK~%mR>joQ>hiMk@wOri@N4sYeqH|0^xuC&qL43)g_GbOr+{`w z)F=_X2`1H6$!!;B7|JmDK>sz?gFwkj>=I&xkmiN{T6T!KDp*u;O=$QXwoH$a0!G_H zNx)RR4x9&suXco>ed=OjYC(ekH~$_$!}3cH6IUPOfnZ;}*QGbw<@h%U#pQTMBuL;n zp=$ql(4aWj@dNTcE`|7urFAuCn|;Qvj8DBzQfX~0S&*BI&3%~16RB3pzIL1vAQ7++bP=~vB&Ur^)l4z@1pWJ0xOzhg)t#cAR>>QV z2ldnq>f*$t3ZFPhV|(^Sn^}ek65j&KLcNSnirtM0t4R!id}Z;ysKgm_qOdhg56tKI zHR;*^RQrUhfd{5!4JiQLqU+)Zm}nS6`SN7^k-`}7pebsG$~sNpHN*c?o?!zh2{hIl zz&Q~8~-dI~sirK!#7MeZbssg;rPoVWBRub(*P zTX$n-o_(pkZP+<>GD?>6pDJ30K)W42V3RvJPmSx94M7j=(a;71ZyJ zSuVv$c^@_Mr7{C9VECp&Zk`yCCdBZxxQ=!%a{abmh{^x6^FRAn83E^yVorPChIFyd zAN{A6rxdpB_+S!KhejzV(<*7VU2pIl&%_oRQz7$2mV?m2;zAnBzXqcsfA4n zz)9%>exFsH%Fsvm7RW^G9XId5LqvqI^FmN*{1H!j|NUp7e8;DeSfvFq*q=!FKw&Ko zdkUf#3vm^(1~!WcU<(($_!en`A2hWomQw?l*NU0UraRavqydU5;?%415Q7hS0e@yp zd0@+ccs*VW7&hEILGj>MS#jJF(fd6zKc?Ud<4C{*wwzc(`HC0NughBCy&X+BhL$JKF4b0zB3Q5YF3IZ z0$Xb(<_JUaY8I`*mX{)M-r2bCKTr$7`GPLQr zZZmj8MbV*E4z&UDXy}|KR1EH?$8mfp{?JK^ta=XAe(gXacts&r!hniNgF(au)^hlq z<@gYpI;I?tf)PskvaGMN@yg6&3^EQO-7yJ5>L-OMi%olsH&xj_D*!JT{9X#MP${zz zh@RQ1q>5@LuBv~rf3m)(s*q(Q=p!V6nj!_v<;1H%^agciQNN*BV9k_b{$NW)b@ID8 zC%Y-|oi6QRE1E&cCkZVth+`YGK#(2WFhq!!y`hwbi;ceKK32oF)>9=q^6UsM|P8>%3Lfz{5KX9HU`0pee@Zz-mR ztRswvnh|;pQCcU^=hvhC$V3iNVM_Gz$ka^rAy&po5+7FDY>W&A-?QCgGCC;2G z_`4C?+P4L-lCttJX1P&0iLe-f!HN9&1^~SuiTO~%s`w=SJ+ZH&Vlac$Z062D$#3Gb zI3E>vP%Tw#M$w6kZj5pgUAw!pys8UI-{9`j#=Bo8Gs*+NAbIU_j8EHXukDtulC+a1 zj0sBUIk>M7!Dhn7ta0tu^vRkTmC4o#7WC2?S@9R@`s1IMV+j`xFD!*X7@D&q1t7Lx zeYsq|JCOA}dX;*pQE-?}>~EiBqSJocx(d1F6bExlRmzz#dEm)em4OoTCdK28FXOxo&=qvK$Hc zsB*^m(C@R3Wyhcpuzzfwto4zID4v0_yf)(N)jtI6t_%D zmw8_%kONW1Pa$xDL&v#biPxaB9! zHSpxgXDLH+4iv75OFNsr7KB&I$kd4;zD%52FSqAkj9)(arg){_h$R)D=XzYnXbL~! zdYkvDihGZB>FJVR9Jdrff^;H(gwf(!tJC31~*fl;M` zp2adEIK7e5^W+=n|IplRg8XJju4f698Cq1Q0)p3F*6m)AclQx<&7krsk{{EeMCYIT z4giFcULU#>%0h~IQXEiVw&cN2g-<^@kn``^=%q1iuTgYB3pwlngz-IBG| zSOf0Z68z>r$pP&_zj%ToJaaaZ5C%+EG+`oyH^7Sga*`D+!m!X@Y|qDgs^dVtk^-;j z&veOAEZcsJQYnSTzx@E_?(|D#<%=huc9K86m^Aw9KY(`V(I1#FN9wWfsc5uYT!%? z5p7$&ops}nIk5Hrz>WE}hMPybe0!xW-*T-VUVP}p9K+~k-b;xaSbrttwWlOWGhUUT z?%G`V71R7$fJGnP0syD7w%F915vCkG`8wTb&VOvh*F$}33kdM+;jQ6!hZukv9gX~Q zYVM08RO(c6$^iWyvyLr2&r#iXEG3vMH&`K1ok=K73ObM&x}qNs`O4_YQr}6iSwbq(#-r4s zOA5r-Lzh{oJOLPNsjNpJ5Z-PAABFxFy2I#zzfD;)Iz+0TTA+($fhl>I^*7PFU7Gup z{V=Hic&QnX&lPK9_(%iW(-Sv4;DLoT1xe>jut`x>y8+-gWH)t>w$91X%4#pvcx>$U!AZXQ={koZcn zOHEFK@r0?iK1Evgerc7wRTw4y>cwK zw>?B^YN3LRa(}ixAKUIN=UNVxfy#T}H@a*T2lj$r@fzwsnOW<+gn}Sm$xf(c=?eNx z3tWO>c}A%F(KsDFn9i4cbHTcoLb!UDiZsq3sb7hyKg+ivbEui(jC6xHy zkp{MRS+o_VjNf?u)w4AQVvH5&^Ip%E%hprqdgj|aU zszkkVDk^oO5D)gL{Le`a8*O$r)YG%=sgQl476PtvGbjcqhQPMB8mJnkzx3~T!zhSisEVr(VfC0aBu?>R z*2Y8G?%uVZs3JOo7AIDh)jMn0wi+abx9giZ2*qqtuL_$8=H_ z@BC|ze;(h-NGeXC$fU6}@>(}igAxlyVffwY^Mt<%xfy&WMtG;)ipO->JuNChuu%!u zb;jM+rni~us2!t4RJJCQ?Ji$xSL2RgkN6O2oUf=?84hJl?%eF#p^882RV`3OeGt=( zZ6cQ0>yw+ip%7#t(Ljvp3IJU4o>)RhjVMH($Cqd{JlKQ9582rDUhM!Zh(S$?xR0pI zc2CP(3m3C5=XqSPmL4vhY+B6b4-!N}W@0_Bu`iS0@a8p_g2&=G!$J4$WeQk0*C)fw z#C^^m{dW6A>|U~}O3WVRPzw8m$i-#RIFCV^Gt(2{T+4VO9fPiWux!zvbCeE@jG9s;(H zAHlvOOp2pc(Zr&VQZ zk-L}Ma1q}<1|?)`vL2sSIXBJ8xI~@F_(o^Vq$KdO^F*>Odql=ITFP^~Dr~-n4mua+ zG%g^L^1a@5G~QyYHXA3!KSVvV%%CAYEUs(MvC>nB#o;bO3DzC*y}Z(H#Zz<+qv*D6 zEOL7IOL+bi*D4j8P(y5pgPrthv6!4+Lh7%uCjWkwJO+&CDG5-c0zl@g|9yy8Y!YSC zfjFh#eNo|-b@Hr;nzJz5ZZFQPSfe9f0#Ex~#m&U4kO3G!oxMTezRg@)SX@~9Ou2~c zBo`07$YzRVA~ypbahe?*gl#`%O({0)>FOCBpi0Yj+J#T(c*{DsO8s`}Bkc{(Xc{lH_-qMPeyU;P=r9zwN`I*AZ4F zwUDP(G(c=afjYHiGzyP{i%G8iaA=-4)H7n`;wH*Xi_u3A@yGF{_LkoS?ubzO)Un$7 zv$Jun$UQL{Y?XuD))X=OqR*2Pd(DWf|Bk8ELriS0!e$+kzl4SP%E*4@?I602mESRzk zP!w@FBuh#9Q8GoCOi~YGylB}WKXb1zaX5u?MfLRmV%bL`$q@I=-=0})x93+amnt6& z28oV{6+v#(|LiG1t~G|R7it!^kP2~Hr$XM$qn?V!wofoc?0#?<;Kmo{jtWAE>^-XS znw~$`7S&6sHxea8B7`F{e=5Ss)Hw z=l|o0Ov8&Re^T_SD)aRYaj}ZMN60}&``+Rd@(x&G)u{HJ5~ft`SuX(#t{@Lf;Gd9H zFDmX={TFOBt$m(IY$8>%E2XT-{7e5tmWgI(QW9KG*o@ zh4>QHItC$f2@2)2)r4u<^U0;=1!64nb@M#mqm?f`9Q`U$K`s^mhYIilhS(_8n-W42 zIOSM!36;;aXNtwc;Y0#!k$$RdhtP;1y0RJ%Ax8G)$21t?|9I-L%C@nlv2aqLF1I(z zPyXWZ50onO-B@SJCdgFrhJs;Fs!*N?lbt=)6wmii{&I$L>CU@DMAAP7DojyJE@!1oS*k6*e&Tu}ER-ROfp-RP74Th3S^sz@SYwOoz zVjn=9zFzbL>uXiFxfUZNHR;kT@hBGSCt5%LCuCv<&uCoIw)VTf9g}#YSqolqR2P&3 zz)qZ&cntkmPfOAjzPI?DoSI&P>kDh`=KShvJb4w%WMwUeM!2B=1~)Ed3Nu$Ow#9)t z+HzGG_J0q}`3XxX06?xZ6hRRatPIBa#j*`(7v|i4F^^fsrR}tkaHHXM7lsfa|s;O1^C{};YC6);ZXB>1z=2RZ) zqMb`f7`&gjU-w+k|2noXFDcMuy@;m_W$xj%=4AY?wE&`{q$qC#6$B~A8STpAS)9-l zs#S#m0efU2aUJET)VOj8 z?Np;8p#T8b%Q2KI6poB(;jnL1w~2iste`k0m~82LDCsON6N~I0;{&-tN2Q1+rAg5} z>FaC*O%J^?yxoNR^nC07g{NF30@Cd#cVl!he;o@nTN*-J? zHQ`?joud5m-Qt%HvVqzi;L~2j$=7X4w?h_7%efcS^ZWAlo{2^^Pg+3So0hEd{n6xU@6r(I4NlZRTi_^~*!C9_SD+k%kaVx{7ZcQF~s$MS=E z1;%2eBM-i>9ENntNp*SER^d>_3Og@hLuFG@yx%*Qip@K_>ugMJA9FWEC}P~tHOi8E zo;r^RdnezkmrS!B7pPZrUnofc23!?6tR1IQknc4V}QE~aN>w)2i+aR;^f)%e)UZMjryw^maf1AJxzv}(jHti@gL&=?R@ zzZ4WjgTYs#B#;`#cr@#9m^dk8uo9L6bidv%Sz0KMLx&LsXDlAe(4M2LOFSFyV>j#L z751@a39eXN*C|>cdGf6{CagKOms=3Csz-U$;DYKj_aNunW-$#`If?E3fF0uKxbxLK zrPE_*VkpU$^H|QtTV2z~VEds{qsGh|1}l3cAIR?N-j!uu_B!-WTm0|@n0*d{xNQ$k zoPXEPRrw#aVUOeUcDok;q{*}mTQKqq;ErL(-GpW0N5{we1UK&1W-yBiQq@-4#M6l! zrQ7}FPbiw^v9MWzB!2ZrmJ0eNAQi=0nQfnrIaFB?p-R4bP7!>c%t&Ra=r#wvyK|tu z+L)<`5e`y0S?EK#5>Eh{>)pfs1fdWdNWKM!3dpDK68mtZSZ_?>O8a#D{bTgy&bHSB z##d4(1l>=Wm4H=b`Idprx8sanG3(STiif;upfrg%p>Pn9RF~MUf%8XCQT;LAo7qZJbB2~Cu?kQAuXg^6*f8SS zUi6Z%5U%3>sKQXp?oP{SPmjJG#-4av997x1L%MYs8q%9Wf#mJ|G=@W`gx6yTQq&)b zsZ&y6$JkLQw(k*#)K)U+SBrmk z{^*(ZqStSN+Zj(+RzY$X@tT0paxmwQ@DWp^Ar(Ed5X zFM1Eft1{Trv=gxpUJ2O%hUff%{~i;3bk-d?Fa1*Qq(L5R`~;7qM*!0(1(W=*B(i#p zD#}3(vvTaOVLex;yt6RyJH{n!L-{@I8x`&G@grHnr7rteB!B`>Ty#110>65tT@9(k zbH5rh06KC03olJpcjAQfi_NfCr^t!}u$Ge}!T@~+W1p}IMr!q%&%o&ZQ4ED?oI=tt z@K$ehVmMB(XRbV<#OD()c`8I38?Fk-z`&fT`uDuW!-)y&+;*l7K>&hT041u9W z)zM^<$^hfEaB&GJsf|cYrRg4cModwAJ8^1#ynWU?VP7Z6*J3B!$DvE-Pa2Gfep+PD zKi{5v*)zYSIs!obdCKmAvYKss{-_ojJJMsm6^|KV_ZRpG!`YQC&L90j`-8ZUePIJe zWm66$FxFV2fzX>G%@|>!CkawPZhO^>yFR9{O|Qn2I-1QpDdpFfw8y0IQUi8pq9i%2 zb{0V8RdtpQCu&;Ws6Jf7^`dMb1akO!r_74(91B0Q*4Di6O7tV8ES4uYUojoMM`L`c z<-zDL040F{Whv}f>K5dbh;YY5#!W;mDeNl_VmJnQRPUTzEsUITuaYl~I_+0~ zw30ei`*C-krAOoke19;h_~4?31#Ikz6nrYbQMTjpI#^~J8e-tPE$G#Ti@v^+G> z846(>ocdb65}^M3jL>Cmw*4>sTeH;X<{uv1oHT~eoDP%ZCf&i;ttZwxDN3mJte4A$DxD| z*1mXYMMdF99iH{*cie?oem(7(9UP?5gCer{Wqt}_HI~ggnll(W0|>!}{Qxpi<%E@W zQ-u&l?+6Q5qsvv)IEMfJGmQ?h@rnK^oaQ0y&pp?ke?GoaH}^`{6byxbg|!zGx7-%z z1eO3#e_kdI-DBT|l8sxn6j^(d=!negQ=;fuW&q<2P4!&54Vvt>e?aW%mLU(G7 zQ_nd6zF%V|%WRwNm^FY~&U4G=JWnJhT63oXwjPV^l;YJ=g)cLN- zJNQEAkWTxF2@uKhxQKa2XpcbT`C&{bStdk4veH>s<({&A-j;;_ntQZKOq)r;IJAc2 zxk&E+p2lEjONlo1!T;x}2|gDE3}bE|NFv4Lia$hYsbBhMJsnY}z;Oz9&@{gvR|&&c z9a!P`e;a@7E;JEn^_NUb1OpB;x2`S`d5yuZCJy){?F__&oiwrH-aL)A2(isY^BI?? z!fWlyt7=JNk{oT!V*H3}CdDF$xvC$PzicW?nK_Q7?Nt&=YBq$U&H|22`^uHdjD_2? z@69*bkx7Fm3rEKHqmo5he3<_*6D%TRkXMvZwe_di8_2{_@%+9+c+Ednx>E@~p!~^P z+XTjlNk;oJ=^G~=;rOV>*SZT5!k`Qm4ES1b8GN346fuV?4xcH#x)z750?Qy#W@qcT z-AV%o%m*8%SWrNUr4p=&>?7h>sVcC*{^VDG*v8(}fVFDcHK5G#?jR*9mk|Se^2eod zoT5Bomqlmy{S-_NNF<;ZzhCHSbwH=yoLvozY~(5Gu7;l|R$2j9X&Q$HjM4EDR2cXx z-%s_|!BcARNnzg*DTE+j<^0j_wBPk(87-=6J^qmW75yjx)YC-xHd`Sxr~%?Q<*;W+ zXEcs-Dj*E)VOkS+&|EPd9Ffa0wX--)=`zW8b;`x%EA5StG=9FvNDHM$nd4Hr9w}P& zIDHwU$dwZ);r%dP5woSM+ceo_N}VgXj}0D?QzS%)BT2?y~lkx#xXO+>za^1>F$GqT~Fq{<3>TR9N0Kf5EC?a1dtr}-==&A@484T(2Ie?jh7I^@2xgs=Uuu|vsBd;a^hWi zwH`d^(Gq9CDim9_h>l{oxt_4d30}oRR#&T*P~>(eH`|2VzpPGi{^R1jOcg!m<#TG40*L_Cfg&g-jFglfEYL(ti#dG=f zd288$dqpK{Hc}h9j06N)Jy2ry5pufn#;1-kCn5u;HJh|! z{GPA2lakdVmjq`MI!o*y1?+u@KdgG3x{ZKplxDt~E_$m&p z=U6RdxRFf)jV5MKVZMX1VfN_P)$UVL*L&(yy~2NBK_6}(ihq8leI{P$$^R(|>7Tp& zn`-H#nTarRb{tYnCTB!;s$zFB4n-$iAdWI`v>o`iUx|yIxvt2M30h{VXdkQLCsqX@jO zrD`Y_RT&;&x$H-W*A}Sn<=5fd+;Qn_OeVnhy8tSg?$r@DO{pNL;tw2LFinw~7@gR- zH|KqC;%szd3F$|0M;4-S8~Z|79Mqk2hWGF+NhWzqkn~isnmKvQ%CB@7w-fC~gzhPQt*H?%vKPPG!uSzkveF{E! zT31-J8r&X%>bY6Uwf1H#2M5f_nGKg-QIISaa;D8BdcOC$*4Mq6kYK0v#)R!@H7n-s#39n`0CxW%`Otz1fymV+v$< z0KmjY(0ESx?Dnc^E|r5Oz{lqio&ds@;Lr*o7fKbztpJ0jTZN$JM4)6IYRiV(2f*Lm zk@QA@GN1}pTtLc6vUpL%XmI*9ML{}{7>i4xs$QBN~0ft~d&7S0J8efx5RIZ5q> z7upLi#OOP}?)*C*lk5=yAEnqOeMnE0;8p;!nq=LZGY#^$m3uM@a*Q zV)I%&mP8ncez8W9?-E8K>h_xN(=)jwATjM~+ae}UJQf?SNE@iay||Li&63ngV4FuH zS_p?;qB;G_+m<5*oq#UlTk?SI*#M}&77wI!@3O~-J=3ctc|3p@ajE(t#J`!Xbsvui zvdUth6I@_`vKxTC5Oz19CB|C^?@;j!-TyacNGZ+@HcnpJ)sR> zjd{MV10nSNN84xpex3G!2bXFcWmalmuyYf>QjovbZj1eEK$*<95HyGl-~zeAkSMHV zrOyv@5ua|Kd(o1i!>}^FJD^yeUvNm|g`vh$ z$N@@>h4g{xR66Nifw}Pjp)M>r_FXJ=-}Bf`F;U>~g&0GyY=1r0r(^r#FK+Ps7!tK- z+x*bDECk}y?w3xn%h8cjI(jia-iHcq_$hJyd^_?cQ|l6dJDP56m3-u0L~ZK`rY>QH zcy-aMeh}}`6A1~l#keqWH}64gWPbf8st|ReP(l0^y4{v#qch6d3%-q}3tcpkOp>OQcVNynql}$H{ zA+DU1SlTdnoRo2?bE>i!TYJU7yl8J~KD8LiXV?xY7SPOubCu83Y6Gb9kYAk&eO$wV z4LRvdQFfRJqhF7?*{*)@CZ$DjKM*4e=K2vOa&b~Z5J{#l;S5Y5^y&eU>t9?r6A8p| zbPeot&)U%aqC5xB`V`BUsg0FdWUABjdh)gNAGY(qjWI0Gw>Lw;ge+n{715am2>?#a z=(h-=CF+Hd$0F_@dNg#M!vcol9CGmFZ_d99G#hhtyM5XhALwXP=B)n&FAckqEuAxc zfqiulEM5ps>$t>0drx3mq`(cTTY<5i9*Dy3+XD~@{;YojgvNX-#Cvq~KQ;NZ@d`?4 zD0FLd1n^aSse%x{eEjnquDHYB9Wq7#CWcEysB&wikI;&Thm0fiGfWuKA*#o|+B1Gb z8doupPIlul!tzBa6P+fNvP#C6;K0d58LlN32H~9$i0;g9xujKIU#)`C}EaZ8(` z7M9#pxr2Tl=eY##l3@pWYV!!^y>+OIn^um3>hEnfZH{<7_8m)_6Q;O<``K@-4U$txM61D2Fe3Q9>kA7BDs|ji` z)s}A^9vvdH3SF5H5LUOu5@xv9iD)>Sp2igp`0`O*UW*^O(oWNr8%S71a@QawmZen8 zVU!IV*hn;yz~NUMJ(h_J?xt~dx3)5)!>G^`hlDWw+;|h72aWBw%5iJZR>rtr=gD7^ zp5l4wydOb_UWG-rPKf<5$o4poTKuz)Ec#qO=yn{PnYw`ZHUzB4o@~PL6sN3JwSn5H z9c9iS2&q(Ldj6=;GaNq;_4Hs5Z-8D*-s@sp2p~W(je!%#8Ox{c)|E#9^M&oH?&qO$ zUurM=Ksh4MM=^q!DQver!cs92slMZQ;D{<22;@5^imcLnUyB7@Mt7x5ixY4+6}V`- zj0o_S@rR&`vm+NiJ}W{e_{i@(ha6KcO=VwVvIh!X3^hyh_#Sv`6an--0IVows6#dN z%=x1$?UgJ3$uTPOFfBuoo#M^W`K6rAC&`5ZvWthGKe~gFNhVxOfQosJVxU@-NjjiN zaDgh;+Tu$5NGM3z`>M=0E|I$AHphKCRpW9lol;;NS5xBXe0+EQ2)B2XWjzs?<$PD6!zeif7+Y zcBYIF4cst(I4_<{}LPgKwB4K_qZ%RG^DlJ&AuUdV*NNH1&? zTlJ&u!|_H-?RdwR_c~UvzlP%}=1AoLXmDxWvB7r62&#!4#A45X7_+Fel2vx|G-Uw{ zuDNeA18sl;x1&V=ff8b|sV8w#Drs&ITUPtFiksr#kqG$ZSdBs7S;N3&9Ne?GCCE?J zj_~G~%J}mAciM~Z#OO(TgRpRYq1dYj>NSu%jp}T`y^7%!&BL+m%>f^11AEaIuQ3L*4aD84L7P{v)EchnB-DK@_)5EN)s2Yu!copc zRT8#1iy;(Hzpr{WV37#T@;GMUw`>&bn_ushJq%sFrc07OTJ#dHuw!!wY(NK22%Htg ze%DboxW^0iZgzHSKK@R7+I!&`85wNOy)!?60>JqAodc(XJ2oEpd13U$9|yPE=FBy^F{p2_)4kGqtnzb%All6mQPbc0lY7POVJ);x^JzuTYhzL zQZCsmbH>VE3Q1K$+`I9^2UJb=4`+3FQ!B=AuQpn6AvMK%d!JZ1zr(y+s6krN)|x2$ ziS+Bl{*@E?gV?&ruvnTp?tI5%k}6IbAn!p&vC;j56KTnr5YM#O4bkcS412+Ec>HzC zokGt9fIYPUK92g_oN0?ODbI<+>>my}&binEZpUeQsOAuxZYOPoIoV0$z2c4bdM#6j zIc9{w>tjGxK9*(#>vELts*wZR!z z$TYOeyX~X?<pfYANk7Y z12ODT<+{ykj`z0~E*!7t|2zMK2S5B5uX9Etp9(1nzqrSN7%zaIjejxTi=8@$s}jFO zAjZYOj@p>L?1)#I)JVJlvf`}Om$lrW?szbW$| zFakddAhL)tP2zfW@@O=KsNx}5R2O@jpL!-ObIR6gH0qCcxLep2*a{H*Qn)!x4i9}6 z?RWa*9xvpFa6X}aDt59Bp>N6`0yRfFS?H)0lGP;!o>7Pv(#mTgOK~w(Aoy{7O-01v z&VKxiay7ATJjfubyVAL0Hd6z-)$7;C3M=U?qj-#fnJ=o6K0`XG1WLw6`9ym; z*f!L4p88LPo~Qnc@n3H@Lrt$Tc^JSk-Ap}6sh-QDp$s^4gWs@62QIoO0AntI(Q63po&TOx+nDSG$(iROtJ8D*6Kkvy-vJ!7WmEJB5BxJcU|3gqOM6K-3^3`m^sJG`;wc!>jQ zol>v0mpKJFsK=pm%TlHw5&2+oqsYC8sp{Oly(!`lQYzh2n%b$5>GyrhPQ|TtE@9fkcx)ekxT5($kV0O}60U-IYVKo9gtwr~tf>9H%Gd@N8HBBG9Y zp+}n->6*5fS~kdOd-yP(BPq90-8Fogx95-EX;1lgJu)6+C2m(t@!583(XTEqKqhKj zyS!R??!@bm=P#Uk+?^^9cDGR!^`YS4<6XZ9IS(BqDfzq0D078qv~5FjNog9~8Yw>y zsdvsj^&VQ?Qu37g(D?nxJT@pHQ$QI6=_}r8OTL}hT=2UpgN|hp1HtpU)NZ&Ga(nfP zzaeF$gt;(43UUg=)s_40rFfLg-C1@cfPBOpI3_u#JO`DDH~vLb`Eop3&-daS7Yx6;KEwg_Yr@&|Rm7#v}O@q>7hz<;*`9vYeudh>1_XhdZKJMEDQ zA-=<_SK6!bg8?J@i^mz@#ONh#nk3oRDCq8A-fGvchd_W1J&@hhPs5PNJH;6g8EADi zUP(mNYU6-mwk`Q}UVOX?GVQCsw%m)n#8@wijGsR;fOJuqr`tx=dmpOwq80Kio1fxY zzKIi7&HcmOB!_xuC=(Zecc<*f`L?1a)6N)8T;PMGztR@j0b<`~P@Xj<%@+ z(r3+ody|rP$ZNs+FPuuhJ6`Z~=b)LrmQbINyT0qZI+Sfy(I9qfF-(RaGvZ$y0#H4K z2A8uwo)9o$?@|ft&PCcytn|C@wr4|eQ$mXaz_@a6Kmt1}V@1e{%2xSh1aVN$(z}oC zXeZ*-)vlcJhc)I&AyBbhVnFz0 z89;!ma#SPaWTlILSLkPH!P$7^WdQyBx%V+F3aVEv`SyMu`BbzKMJMKy0yv4&%py-foG}h5%U(RGs9_>;b^5UH z{A3KmwqN!w0UwSYZBYwMPaNonvKN7Dlw4kldB5^PM1udzugy3P;$r--Z-%A(s&^03 zH!mNkV`chb^-hI~m3NAQ_pP8eWGC9vsgwZk;L6c40|&$mGYV=xrU zb?q%41pMzuK?%z~68*GHIvow;V}@0kQSd|Dk*MF}9~m~4H=8JNEH5Z^QVm}!7UhTd zuYJg`gO1%MEq_D7<9GoqzUSLJ&&T7yYL1F6Im@eKBUB6G_`U*o7N3QSQXerGXcGy6 zLp=W{GPU9>43lzX&q2?Pw-#!J-&i6#aB6`chEPciPUipL`B;Nq0X>H*YVkdYOKnpK zIUc61ycf3wl%2{h^xMSFJ={@e4x0n=UkR8UB zNXKKQR9YS=F_uB%hA8-$BjUfW7p1{&LtH)F&%ip(z#9po$g|7PQIGM>`M8njUokNd z{EWJh7GDkdo9#w{J1=n-#IF=1p7B_Ap!pK6j`>IHMzqAkn?YKNYEesg;Ztz|RH$#a z$KQ)pm&b`R&mc5OhHM{|`(nbwpI>ZC@kj+(b)Jv6i`S*2?fgeA{krQkXj<|sthz=g z9Fq9E2(Ecn6J2Ga9_5#kk*St>947~xMdz_Ax{KuoJk-;DF;>58)IIXG6Z#o|hj_N~ zf>LC$3a*y8XE(MW>V>q856H18hX}e_xPTW$v7ok}Az!2?O@HKcVRzM zT<{Sy8X(7@vr1yEVC$l4NJy6*6}xGM66|sN-s7OqEr!AqjL+OhPWNv&IIWpe@J@R_t2N1@DKX@e z5sRuKOcrz6014W*R=5taE{3wk&*RvZ zQY&93Zi^|W+{d!U%j1)idgFyS-mDc7Jak7z&XtF|U3k zBznDadf|?aRXW_}%0PPlYODdN9U@1Tz>HA!qbbjre3MsVoXZNO0zT!5N;^~zpz`kP zEL*~1#9@E+<#-J^EtAb+IS@iL4^n$2??O-$$fh{5Lr*P^yUK`}QTlcaY{(<$MPtog zdZ~RPzU&DsqaDZHdqUsiU;4y}VoNdBo>;RQ_acyynWvb;r54Ja5xp$auUJLrOoTxc0bksSuBFnL+7f+eMEG4T#6n_ePxkn?lIV%7CQTl z@me={t#P;!|6_8f&L73YSLWMV{F_7k)tGrT%KPFw@y*6|%b5cp#z4gujW6fn&RLMr zbvSuGm}qho$!*nxsQ zi=*-^sX*}zmZiLb)ID_o;&*SUDvbp}-CT;nfDCAhSTV_DWYKpDm^tJd#KYi|q#sWK zE_R~p)+lyTJ(Fm-7B4A#q~lOAg_-`XV*c^w96Pwid#QWI3NFX%Kr(DgF&453QcH-v zLRS^Pd#M~{q1DWVwP>Ojkx6!VyDAT_?u=|s(%Gy*Erk;se{Jo~gG zTgs!b9PHCJE96f>rPCrY7-NM=h5Dp)##iEnZ_Kxu?11@+_FTL>cK@64Z;an0roGdi zjpuB2`^(!?OtHD%B`|(SsgX0w8mdY`Om-kMo?YqOUTfcqxVas&BiG9VX{5ej;iu5T#tQ800J_ zZ*O}V*YLa^ewhk>y*=PDG1OHc`>oo*;ZB=bjXO$^75q&ZC3s*!5{n5MS-#Ty@2itX z+!w7m*>~3Tv+eie8=!MJRpl8-XITsmQHpJk_N>OSfrFirdlGYsAjmj%KaP+bym*L? zJtRIq>ZYe9(0oBJ`>oX3m6la%L7cJG<0jgZ3yUSx1lZ!6(*s-$Lxc=(y@N|c#@ z%^`7Is=Kb!ZfqN?(uvZmLo#G+30}Lcdzwi2LWiZh0zPop(Yp~r+K}d`#O$9g&J(Mf zFTuL50Jy!)S3&8RoyDGhn{B=IkPb7ki)N@h?|W_%B?pU&FW+y^y%@6FBk`|?iUNQ@ zVe+DB1ir_e<6CwX0tpgUoLcNfQ9F@|n2OG14Y7s`2x3fV#G^-0AtPC1bJzCdkI%mr z3K%OodWl+<3eW}^iD{FiO>m_s#SOKuf+^t#FGl$f&x2YP@QN`~T)gU1-fY)qMaV*8{YGbdR#Huqxt?C<+V{m5b- zL!`u}KgiQu6@U<*tp~o_{HNRU{A}WAYz<5*!mF{Lq)UfBt&l-t^vclxeyltPHXe7e z$yCDXxOhzT;5C0s4ayFS#q7!29B08OrlPmqnBeHd(yTIj z@$cKKZN~Rnlx8cZ{>^qPWAY%Tj<_tVTG?t$40SDIVwJi|w;Zn(IWA&{ST>jOv1^0b zB{>m^9x0Xkh{SV`Nvn?#CME+O&2bd>lgohyOb2eMtt|NAI0OXzvK%jC*(z*kN(XmF zR5@`4tBm$)g`tDay@=r`3k?n_Fj&0s`JbKtDdDo1ir;Hbg}`Ycvc%XBY`zqi-E~(U zQ7&Aw=q(H>WGMv=a|W9@ZmFsODUKg)zZ0vd3JYHUoAd2X%+L8FCGEMbNsk-}P>46u zULiQlTgS8sy@7&n;BrT04>KCFNMdJPit$awPek8$6Q-?N{63`SchvW#(1OK}+RwBl zYKpw>i#b`ST1e@soL{QM;ljq-;ZLwPtS(O`r~FBoQ85=Y)av%z-`D9^nApoSu8e!Z z+nH?(@gNRiDE>e7PlxU_9$GK?p?Qg4bRNiA(&xIunCw-&V4w;>hs}$60O#7`e0dCt z#~v5w7ol?O&zRAk^hJ&P$$by|VAAe+3Jx$Uh2fh+#6!-M#a=lYs8O1urcr>-gz8-@ z^--OOedd)_>H&8R(bK?F7l79vmAI*LyW_e;B(|*2t@WYJpeeN&e3dY`vj<&$lHn*t zYETIAt}vm+ssO^k7 ztl;AZSR-YUbhIQX?!ES2%*L7QoA_`f>?YoPkmhLVOeBd}LL#Klz)7Z}05_#EY!+?z z=vd4s7)WnB=d)G?>^atC(S>?@p}iE7+WkL+(8u?jNGDq@<$R`XsP6{Gr*|A>6vefv zI5*KU(qptiJRp!YoVs{xRevp|Xo$sqA%uGOY?D_;;d*d4Waar*I=`#;v`B{L=pek~ zAw^nw+^UIAQg^K0T?8j?a)$O|@8uD=&3_Q^x3}Zs%a>c0f$LSH&;LHYqJ7P~aUW!v zUJ}M%aqZa8Y_1r*xS4dVj|CPR)*J=69Zql_A2Xs#o>SXD`I<3jYrvYh=X4mH6#th! zFvOg#OE+<$8E@|MSp+&+AmNx)>9th5y`=+=XcEm8T>^U(n!b9b^kg-VhcDvqP; z0ZIE@k2gJkL`%5zSx%W0>R#mVId-?&+cBfmtG*Hw24)XZn!Bj8)*k=ZqZ1yc!??IV z*Lo-L+U97m02H1KVgGRZz4+%s`=kfSL6j!fOgkzl>>4HvWvD|egDhK8V-JC_CdUGA zq{V&DHS+5HI^wS-ynx=k2%5&b>rjt>6QWC-;zgxOycoAp^A0+Odp}crT}2j!a+^Cz z*~5zh@GMIS9IbZV-YFKlTb&2R@We8-(K~ZAzJw8q2k|hPq`HA*qj{dDwia<0Wz2rPBN6UqcrQE ze1pRGo-N!-?3o@Dh#6gxu6DI2G6`j1p=RTP^GD1{vRRxY2qqZRv>_?6%^tQm7MTjsD9{o@82TXG=C+EgXsR9HnW`sM+0<-wUYl z&w>Q@>;CsQ7Qype?8Ejce$qCFk;*3f#a1aQUp_*1+QFLi4VeBK`y!0e@(hH*>_(126(TBvmHkVAX0P&69=YP5VKl01#mH0$uxgKKP&rIn86k3+4NnvEMi=C5 z`p(Ko01+P!9`9>bhsxAaQ=#PE0iTHJQy3a-!$bZQUd%?R`AC{I2GUN|_Uc!|izRkP zWQqY|(#KtH;frZ+dyh%^j9i=!C@QUup~@TnzJ~;`eUMvO)W)S{--fm`7wnPJnqIq{4fQ|q%giH%91VVT(TU4Us82<4BX95#XWf?zR?FlR{B2Ug zm>y&!-WGW7BwKz&L512?4N$g5iKP2zF=}sC#ldI()#~lnslE97uTg&v z=V$~w@*xo6nD{kCmJ)hbXjTGV-F3|IUWh5?`pK6c10C~!;txtDf`K^R@bWG38>L`+ z^JsH0pWNFDW>xL%0Bh1P!NmQgg4AD$4aV-sv2h%LlGvY@9XM0Oj^GQ5nl2$8!BFo&#j$_I0OS{vI4I-onFf6e{J4zuCff2E!O zgNMU>yy}b5*gtAX`InW((7bx9B871@cp^5$nzus+TL@cZ0+^wWi7|Pc(mo=O)j_&E zpX@vfgOm^&Cw{25JjuNzSGd=2^VPV;%5U)>E)tsJ?kdAoQ{NU=Ytpm1=hw)&E55J+|? z5aM$fU!3_LirRU;XJzli?)doAZi;h&rduB+1C-ssx$y;Zv`Zs9%DAAA>PW`gAWBa5 z6BLdpNQl@tD64z0RKj3eaH>3Tkdg#EYE-0bs!o%RRRq?TaoXWH*ee(2xK+U4p@OU1w$8wp@ucePdcz~U z#ws1<`%38`p=s4CE2+pD);xAW$-m!b=jG9X!=n5S{0Q$S;)9@GUGv2=uk|j9$E8YE zFP;&bS?6G_s0QTu6*bdlP-F1$B6<=ENUtd%K=Rv(ml{wm7a zO;#t1i*Dc7qa3or9ghn=Q#z%F8n2=K9*m0(wM}9MD;tYQB*7g8KHc_9jA|bO6MN6+ z{hm(QmVyq1v|mc8ZDR}}RxuyF@udMHQYm?AnI54(C46(KUAkgD0cCl*5U zY}B94EgkG5;5`+C$;>nVU7U1jl65jaTpsZ9)mp`pb}YG^1Mfe<`;WEv>J83La7=oJ%y07`@Dd)0`u5JjWYnVGmB3f_#uQ_|i-MDNB7XJ`pxSgU1Ac za~JZMIs_7lw~N>EXM$v?gkFNhz>=0T`p0?;j}VnQg0no;;jEbJ#sq@vYgnV}u&;X(`42 zCXs8*_R&Gq=ux&~{jb#z`gc5e4x}yA>l*bSk2`4~zGXoR#;HF0kuj9MH#Jj{~j;Dmmdq;f#^;3cR+lvt+>^rjjAGWBsIz@DL#_2pD^2H9O#CvJwai*-%5b+6E zITIH5Syx^-J?Z`Y_ItH#V)wB}eSdj9r;?N7PV6(la2L+QqZAyLT0=leOd?8@ZRclh zw8BI~QfjFrxHlSMUeK+$oe~}Egj@=~=B(3uD4)b~ArJA~*>+=o_LKHV_DAZ`JjEo$ zMk}SWFodB7a0J9yg-(`dZyf>&%3TbB03w};g;n8jT=#KFwwl0c()&xwQ)dGDNY594 zzSqPUck?YCK8tr;t^k{onIk(YUJzRafH`){YFl{I1KXRNA*=3uTH+@00OdR0iw~Ui zt(;JR;XVQ=uK7jFFdSuVlQLRBE@sG9NH!TL7nv0XMN%0M+*LIdM*^(?3iG&I;3&v zeh}}^MAr?B;VrsOvBueU6HWm?8I&_XPHr13F(a`)!|t)6O1i?3tze!Bg1U9&olkHv zQvYlJ@710S#g#FSFR%ojaygVe?BX$z;#-a{&w1CA;{kH3%vgrT~1oJt6 zePB4O7g1MGZOR?S5G`ShiJn9@awgD(TV-Wp%f_;yR&k&x9T&r)2~}zFw8ntE zpvclL9w29PR>CJLLy)Q$mD(!JE9A>&wZpiz3O~d~Qs`ctO1v@)J>3UpmBj;3hX!Pt zEo95TJOQhm)HeWX#HNcqAQhaFGoiG8yz7vn^%swS7;`aD%M0)dV@gk~NbZf!wMHe! z#G@W5&fz__dBG1+ggCC@h$$rHImM=T(&%O-aj(ewi+RBnE31AC^pde@x!|R3)=j8< zG$M%~Qg`ElrIdloFG=Msc;aqa_MP4oQ>1PN_hMQwSnNyO-}dfHB6AJj&|TOZDs z_&w{fQ-*vKUoUMKHUPc%*ma2;v+gGeMe+7ZkIgU1>8KhDcsDU#znUsygX?8nbqNv7 z=d+4mC4%p(*Rc*&EXlG(VJXZMbek_a7FKvzSZtsS--(HZp`8@NWsaiMfIKu>mwMuq5Mr>SIYJaCcujmZUvINk83V|p%vFzHeflq`^r z(1%PthFib}Kgs|nBqK&)<{A5)ifUU41Yc8`pyIGFvzr-Re(t&U+{gTmYPtvY<2Yq% z8MC8^H5dZud%8OvFuoLTxVF+h6aNNpO9DK3B=KiJm76gMSQBj$U(gWlN(@7LmQu6) z&rjl7^0R6^b>SmZ*d%si?I?gxYh6Mue-251J{H89z8}ORy7ZNh{e>%I0~ed-s0tcX zwx079>&!;q*>Sx275>AQdBw{+K(pPkw{Y%5f8nIe+BIKl&&2z4IAIa06;~?%jWOuz zUXg7Xox(`{S|Cttai5o?O_Vm1G{CLCx2%lQBjm*=R@4ryM-Pn?(%e2oJEyG#6|O*R z*Q@6vyB3Om2|7^w0%1G;UoY&u;+gb+4hX_avC0tJYogb3ZbPdrVh4s=ut&@*p4r$y zMPa4k{P}wEOyUz}B>Ci_6LGsRZui_?4Y1`E&xtx0kVt{`>ovQe!lhc0ApqYi1OW?< z)~xBtcQlIel;6vvDXkSY0JS|FmHNc8wcN0F0E6mpk;0n+N=?Y@)atRsMZUQzZJRw5ciE(Gnnc)3bq$01&x#|f>Q8hYQT37K5nqs z=I13jPO4hy*JBLaIuti)in#{%S0=N()24>F8h4bsXa|BS9o7@*q1lbFU zoycqVYf`rY15?P2=l=$O*tU3}5`^6pDY~!yz zMvNhmb2qlEsjCZTYvlv^Q^zuQHVS~Y#J)e8gjYG&*5fTP5Yy10)Ys$hgxo7F=2w3d z;-}-c7cj zYz(-mG`sn>NXD8s!8S-(=W{b{aaY_T=Sa${#8@bcPjjVO>WQ2X&ZC{O_HZbmps;Vc zj~`3OP!GKfGcw(a+Sp->OhM%*y$a{4bvn|X{J85~l%=|>MuR}^pwuol0%`dYi} z$yTgd4!_5tIG_sw11pOJf#$PtlUbzfta|35_pq+Lz6xBVX8F7udgwK7!-Sn1Q{m_z5C6>ko314;oITT|Jtt7z4oVql@E#FNUk6#KrA zEq}T&xJ$)3r${5HBg2jbM>T3nQq=F!L@BW# zM18BC$kI%mq&)_!H*E=dy0(HS+Bvoe|@HJ(dYCF&T00!k(5g0~k$-p91_V3i5PX5HRnE#~ zw2oZ(60(MhD~4qOjNq$Wj&~vY)NX~g$0=0iYOI1{Ldzv4L{)=Sn_Kii@T%j8?qz#s zuxfBcu#A{XZrg!e&v&-=Eu)%&GaFAn=GZ#T5^87zI9YHck|U~Ys69^wu{)Yp$-phA z^f7?{M#?FJV%2IR0O`QY27&aT`al6BN?3 z;x?mE8)cLm{;mRx<9snuLy85nH@S~0V4#+1jGEpx_7~RtC>7B)a!Dl1=|r7By4)_k zlB>y1bwkn4X0H3CgQ~RQjjhJI6g7#M(dM?u39P$RA2?IE7+Jg2vqQPWhYzR5|9$b) zzU%QXP9{B#FP;kC`I>((hdBLeWCx{sWO;*6%PXm-U+O)=iQ}T6cF_BRf_5!2)DinB z@rIa`^GBSqMZ`rYg%6WAd>b!!%@^aHWCFpHul?$eN@uJYwIdm?1G%K+V)TB4uB=wO zD(=II#vcIc?sfK52#AP!&&RhSxT$vN&-}*V=W6K%=PLd7kcprC`(Nv~`-Yg%@=^Rg z^wcdJZKVeQ0aW*ha;u8ypyK@_1S8KD0<2{8!6xxT775m%qT-zhSd8+h#4P3czIN!U zTvE600|$}l;0;Z9jpXAHiC!ALhN3Y<3gku902NrcK9sLUEPc0|DC&)*f8A-b9?Hos zfLX0YwM&Z$_=_?7EXG&PsVUa~C&l%RYU_wqt6reyfXZ**O;`~E>2sb&_+|;IfuN^O zN6y>AiU;=gNW`>4BU9TYO`~_wbu18GtgI2hPbNW9UqoPqpG+y~Ql{#nOdWC=vrJMu ztnR6{6bJOtE_@xXsGTBKaH|!y@QA3H4Zvj38SsQ7<++$h4dtUp*4o} zyLzCCYF|(hnG7Tb85L7v1k#cURxM>`;~}8ml?*h%9OVGqM9faL7;zR;^Uo8-gSIyR z!*2*vzvQ9HDG?@H?fNZBpEJ~nuTmZ_XHG2%yvH>0h9P)z%S_;cqD2*CP{Wugw6i)> z|5T^m-+F2l5UB|9ka=7S&yuT+UR7}duNTMP^tKP{IJVvDAid0wMSoPZEvbJJ<``UO zGhQgHA~6sH?6tF?)%e{TBYUx0n!}V2w$;^3UJDW?YIH?tb6Vm=ul6(hlH~H$wY($e zdl~N{vVv#p8Mr8^Yt?{+kYgrf!GBBVC}sIBVR%aWegK)RHGNlAL6S_9k8*-{?~C5@28{8ge7@r z^DFbdflAy^#W}{JG{6#(!ogQHk80KLD5fRiQT${@Aa|@W7I5B6FIsH!b%p*7LntBR zav=-gUzXqxD3!Meg;oJ1B1tqD%gh^r7g?Xu!Z-}i=_U;vF7KBCXq?je4lXv_|YjhcLYC z2F_Q3kftrk@idH%0KfQE?`@7E8eEPwd<=i9Ts zl*WU4zZe~D(4PFa^K4u_6xhX$^v2wQw5{Zat*s(fSOs|1TgSy!CD$Hv!VlU7VS&2q z2LV~G=OMd{4>dm*Z*zOC&3n;RY##X0bQa2@rWO#=_~x~>_$7;VFBV8e>0ga6aGJ`q zNhmokZUlIzA9r#}AWAY8e-cL)*P(Vz*NF^<=eHwy6ttv7t?#O`T}}6ebg0{wv$J;s zQ7eErkak)+waOnE7Z5^7%i&gWn+PD%1R-QYL_mzfY$`#?mY4-(=BNZxD7o0&(}BJz z>@X(Sa?CQ}_k+kVunJskfUwyldGaUFN@H#zcjuF->tGWO`*uXhgt9kJI!*^edjglU z$S1i|F$?ds;@1E_N(EHaaok+_7n%v%ptz@U`VLW-zV!`$A9Epxqgd8TBJM>J65&z~ zN=d0;uF!G5*GyYU+x{VFacXXkx2eVwca$iBTM$2`-2!*A?B7=#miTzPEzixYB~S~= zrQO;d;i+Py~D90uRr%#n=Swx1OHjSDk}Cp|if&?>Hl?v9a0L*o?i)Q@jg6jmcxQ z3yyoUE5?e0#dR+~4R6Km%wc=Wrk<+)(Fzg(BH~w2 zBqnUpFOan{lMZQq`O5-JmtiwRoVi#;1IbnVuL=&^%ro z%`zeQSIktR3b9q+YlpE+s(u4!$v<0A`-frzs2J??&%YQt1Ec(k7wTVBu}1=0tYWu1 zsVNID5lX$Nw^}Ya#E#s}$j0YdDL=bD)^Z=GDg6F3?Vb4L8O-CJH5!s}d~zl@Xm`tf zoJ3pQNc}9*q+ONnVb;FCLudOTML0x;IJ(R0%|O(ytut2So_p6p3L^K=g>Dq`B>ES( z67aPaQ;m`prk%DK_}bpq4uTz5UvIZ=uEjbj$^4t=KaB5gw6(S6xD(bYqFart;|`Wv zQRv%{dYP9soc?CJ6VnIO|L2c?*7!d^@;h~&B%9n@U*s$pjh(L^_dQc!68V)Vw5i`Y5!@aTML61v+?juA^tL4MhKR_@R;;0X z2*Z`Ou{46;rvRe|eNR45QF{M3b#L2T_j#Oo*4BQ5-F>k;voG$*axBV{ERhr?*|8lE z1W8B$2?L-g$F#HuPs9-c92meM1S-lV_M~c(s+r8bn%Q@>Ek8Ss9mlpZeySp&@3p_{ zKIp#sDDh;bHk;jb$}s_)^Z(xuefQmc_0`?iIq3$cueEtg5WXFSM7UfF+`#4JnyZr* zXvN`T__nL)&xH0z@x}s*XxMuPknhDj)xwcKY|8iAWp7GrNBF{vWF|#m;am2dJALvn+OB!KmCPa->^fl9Pxk7eUoz zQ+pHds{pr3_l>DIlStEJimfJlMT&*sLA*DQ+Z1B#Hy_8b6oXW-6(6e~>#S>u0!G zPy{*02zc(HfS=+!bMNCzlq^{WjumsTn>W`5@>RW-!2alnF$W-yGLR-JEoLVD7|SLC z6#i%YY2|t?=6!?5$QX*ptYW9F-}6zpd`4FFqLaqB!_yrO?TpXjwd|wIDLLwIP;OTS zmg8v37s{eV?BF2Jz1mwScIBC-FOcHQr9CY0KZFf->QMJdZ1HE?$3v)ehjIrEin0Ir z-&4Uk7N^(utVQd3d#i*=brTt1T%n(P91PTta{<8-%ZM}y@*~!dYTgklO9{*T7F*0h zr|g>AmX~txT_@}%_u`M+oA2h;@$aZ4{tp}Pda1Xiw?y9X0!}iikd5-YKFJ;lh><)o zi3$HarY9z9G8sE%$|`s5Jpo$ zkaVyV0;MiTJ~B%0#USr0kQO^h*a|Z_HLZTkZB*$E=dY+&1tm`kGH{K*78Ijbnaa14 z?~kot&pvpe>z~O6h7{)lGn;_t(!upUIvN$+6tYM*Ga~dR+eB(Fr1w+piC7T+)&ROT zNB_k&tn2w-{`c>wHut-}v3a|+L4n;x z$%U9MngpuD(RQdR?vIzP%gKSkDHYkSAkvHjx_K6%G<7UfzD?w)uE6HlrggCE#yW0SUIpmDCNw=b~d>z??Egv37rS-9-QOBXcOG#Jx-e$3&gS^tAtlB@<>3s6;9Sa42^(hlY9G7u|r(BOxVM zQYvFmE@TB&Bz_5+${@f&@Vh44Qc_Vwv+u1Xid)fB=3NK`+8+jB!Bo!xG%Ag zvt}W&dmL#O7*`CamVO2{qAqwH6%KxFoAi0N%N+Gmvp|F~2-0~y#a#?r029fnrzkro zEgSS|4J?Xdxl>K!6Ov(6kq{%^`;N1vn<{nAD$t7$;vuWjtNg_5&kjqS5O!2R4CMz+N|%M^19i=5k)*Z~+q8zQLt&@1^8m$9V?R|g@?Qs=~*xID%G;M#TI0$lih`Q`}Zra$7zjOMxz!n|@QO!z0+T}VZa41WN#;Rd9e39BI z8ImF?XG+Qs`MTb2$AUB72bMKi-x%LI?lzP*V8-g-`0b_quS_R85bFCjk zuKbA~aptg~+bm)c_)nGA!wj5q4+n)OM}}J`4dqUtz;F{MI_F(hM*M=5AOijr-w8qc z{r1^AmU)(wH9yL+5VRu~FQ*lS!Q*QlBBeL8lQ%XbF(Iq?F+G@!o>k461gGY&)GY;YUvU{%P%~wxvx0Mc2d~< z7k=WcM)qt9@SHrnnmq;d1Iovv<5;~}8tkGJ4w5E#`tVO$DK|gM4OUFd8OkGhl9xHt zC6VYlx_6)AGY1*kpv9zzh)2r_wd{d6#Sbb7kO|G=)Hys(5UL`bw{h#F3q!ndxSscb zmSGem3llw9?g4131gb)+)Sk)9MIeB1;;-YGi+Ia1GAehrC9A(f6bB1km_1yp?2fY+a0-6Z7DWqH?FHBkVs~q@uAANnQ#>mWF<^R|E z-b%*?td2o3ByKKUbul|W-vgZ$+mT11UZSZqdY37o@gc2$?BpL5A4b;qb4))t{UP;; zZJq26+dHZySX>m9wjC^&1NrCodvogGwi^ zbxelB2W4E7BY!cb-EY+`pzauge9!H{QE_Zf;7@gvvqSbFi)?b~?MKrWMR=wB`8v;?r;z38hque6)-06d*8cWpiinHcD~-J$~mgO#S>)T~X>1`qG`W*v1zTfY*I z7BGbQN&;%q9M$+@Vtd7K7ADDGAx>muIYKBOZU3l1;)v&d8flQ*KTw|i2k|HYk0kAS z=_1@3f&q4L)1t7@t|TAf0AonS9pjUfHA#!(>8IM$p3-F-04o016y`hj+0o=STnTIHEweZFKxsY+);;{^@H zdr3-3?-O3_g?N;b`RN}Vt4}!e-S<4c5k)KsEZ{Z9bJW+)zQzEeAY7+u=YC)EYKiY< z&J4ywd?;MqMQOr!|CwOY--U<%uHP3}HRi9N$dy;LCvC>BBF#Cu1bP|JnBYh1dzkG9nmtC~z8GVVpB!UG$DY9I5bpTiY0~5YZ{31Q=czQC&bn%xZynnQ-(%fzO*xp#` zr#Z=QHjqgd$eMyQI0H}$_Z$1GSHEDJym`bP&>52w`0<+4hfh9hd6>V_RgX#5$Wq5(9)GW$ek&vgPCB@C zDkiSu`}U8zvdlWlbH^+q499mcWcKpetKewqIb0oX63b6ZOwmYo$94;aLqEXp z49x&?A(XSm4W%b^H~pCZ{^xReP?8dL?-sB`GkWOObPh$dS3(9BxJT+;-FVx)kW;~Q-G^gkn^j_F$zK?R!riHaJ7s*DqAeKIxx3U|1e@<3j_x!e~0_vkZH*!qgs%GL_?fJUqU zhS<~anSD|OFGAK+?KdkO)e*s7;dj;^l&9&uj}CSD`#6#vbm7kTZmksaJ`B>o3kjEh z10yqjL~Z<*oP1B}U~^*c=cGm7`ax_V-$s9$C90Gqvvgi3QP=X&sa0s#nAm8-B<{rl zLxRZ8HNyQJly;2k^x+?geGotPxaR3EVvf3(s_Ua37KLQ0k_Nwx*D55!6`+to;<_9y zvgWFLLP_zRos#OxGH}fRlN*KGKA~VcI(0(J^gL<1GKDcMvpyTO0FF8qcMe+&|8lQp z?#&fI$~7~(tALW1dO-o{MB`8%1RcsGqE>`Qatv^Pce@rW#ZlT+A;W25n$YU-3=Hi>w)^Ty@I^W>$0Xi-w>_2FwpfLP(7qL)yVe%wEN9d(5dARzNHTB& zjbKT(*Vw>U;>AjQ9Xu%5Hcd{D{f*R|rw>2V&c&x!5SaNJr9%A{mm{8RLFkt`e|wvm zY_1HsqrwX4_O-owSS4_@dh9FqoAY0Q95rhLxbP&^1d~%1pGMh`zG5jD?roDv%mK8s zwn$&CToY_ltC}SR;|J?GSn{ZbZCoD)?XhPiB!`o5L@p|_Fe-_L$7n!#DKXZ^|LM_p z8vpMnR)$CxwG(1zh4LqvUJ2KZ;_#OvFvD1PRnnWklGZC14(cP`Bi`vg&PXaf_$10} zF9CutsM=b*R>AeW&w6l5ablSK@c}RqM+{bxhd8)BPe=%1MdJwhvV%f$y|A+SR4Xu+ z9nmjWz*Rm!m36-$mQyXGMp9Cz5LM%0n|5oYo1qI);j#TH^;3i3kT{M~zx5lHj-Tst?V@ePzN5blP} z2|C;l*XcAs$<}&xcFez~>K#&__W{H-F#x{C%VywA=)a~R$^_vPApk?^$lW> zqq02K%Upl=V7#XeAXoQU4^#yfyQ&ex>cm)Z-4TRM!P4{?FSQecDTvQW1np&_ok zU$+mx8iJ^lk>{Q$DrC&E;^4+m0HiuyF@%$l&L#!xM=5`hEED3A0T=Iw29@iqGrm){ ziIUS7Ljt8nxjn#dy{>KIq@2_P^Z+FW+rYf?>xcHo- zeZktbh6YV`Pk$9NpTXf0MKHcDDxSl;OFEG;=+C*D1Bx9zeweWvbZ+LUDYO7M9p74P z*J82I49~~EXKu6`F$Rc2W71ahX&Ax93fS4)yDR#D1)=LU5E=Af!W;U|Tq)+UYN6=h=LTu2&c~rqB0hWEBhhjuVO_ND(|t7 zxWkcHMIpmfs`U+i@kN)5(WRS#UF=@Ug2m);r@q-ve;q5|i+1ho$>6VRB;=>mq#eli?=kEcKR*A5f95G~&{L5jfc^#jY|t+S z$DsZgY8l;83s~cRY~%&_asJcv|H|eQQW9dCVvdHvkn;+tdW9pEU>#_B$Q*tG#sYK> z3|4RA%?@};p?DKnqn-{KQcMYhJN^b1mU@i9x%LEAWDYmhj~MK%VP{Mw1ks5A-7+Rq zHbph(&fx}OB83BP(^n$16E8+p&T_hfB?Z);dLRE(AEWb$NOd=TgJcf6$)UY z23sAT@WL{DrYLr1eC1qw?=25^t7qhv^2S8_dxS%-UyNE7xze&@{>1O6!np(v)>Vc& z?+>n`wY**aL17;}oV*u_uRsCusZ}+DQa=qDKS^s@YN0)dRbknHRMtNhX2xHB)I6AK z8mB+TtdA#Y$i{~+P)y2V!G_RI7>jSGungHZqWxu^;5*=n^jgUoAm^|Zvxz$X*rrnl zLy2BomeYt2=k2&qD<)|1G#Q=i6aAcLwDSrtcsE`He2Bfm*HWrOo(CN6N571{$Cte3 zlPfH#GsWWKldOa9k3Sl4f);Qi-p1Im#{Cdt@#?`|s|U;|09v5~vfVEE7_X1^r!X}3 zxD#P)~(eLnU{$ivdDu#xcl1C`ocyu_KMr8zLHmH0DmRLz=M6N(-0fUvB> z(Wzq;XELBLMo)0j#)EL&YG4Vs`M5x7VE>mJhF6AaCduN zwigvIk61$G*j#T5GZqXyQ^|S!Ldo*VnEx9;fm}tjdu4&Gu&>k+h-IH&s^M&Wj}8IG zUys^WI>Jx5Q=l=Vo~0+-G(33J4r`N{T( zk*Yge&P(gJ7JtEdJjq4p7CcH`0Ci05WAjc}2UI+eELq^u>R|Q$Sw)gsUb=*$d58&h znADD2$^**VR{u^U0?OqP9Nkdq7#QrtItUx5DIJbm%k4%8E$#`E|?=jMJI|r3y7^K0f=omiAcBP?S#b=AeJn6Dww^wxd&M*ebfu zdv&c{S&EfbDr?D1rS)*580UI=0|D@U->0}Y0-;`PA!m>wVXm}*O}u>>Bk|5%DnEQB ztq+_#NQv%3bNcX3dnyLRGP?mK?)hv?3VP@d+N1a5k=?v0U6(1yCvSWIsu%sp5R<8Cz zVJjW3tJ@Py9m{qPpO(%isMAv1p6|PN?(k@!HOq`VXkF%{_E{#Opmk9^~YHk z%f+uGmuzrSE-sawbd3N>sS1!fLMA^J_)~CMAtSBfmpt&Xka0wsMc5$wt&pJ946Nln zLxGm|*;ug0f6-2VX*pYLGqd%lQF<4mce9f|8scZ+eHw^HIFLwx8(6xSBA08`SK$H3&yh4VVq6;VYpI-S1hKGemCVKi)1pmKcwKNuQN@0v=+~K z{GCTXEX|~paN*UX6(~C%AH@`8&c)b0#@t}GUILQ;Mqs2JQH^gs{uWI>Pk$8>0n{Ix zPRi>rMW>q`Lc9Fe8*S`S`R?-kXBHaijcyguW1^$Wl*1LZ1Wqhy>8HO0C$iY+(b4Y_v zeWHELBVPgcny#jgTBo=~Jb~q#7(yXoq!i=v3N7$t<_`v6X+`MpUtRk?I zc=3eCUr;O+UzifeBk~rrpM{_h)+@fPzT4Gqi+BG z#>{HF856&bo-RHT!D)#%uy_rYuE4e822Hb~m#oDsFLyCYEtoZ4`WX`227s9Hn@Lv` zw0b!`Y!AU+j}AlpFwdrfpoj2dVyZW8QgXCVPq;$4K)JQNRDjQH6Chk@iQ~2tUx#vu zu*KB71L+HS7CVERoXQ?p9PydQ)P(qH4F4>_m5Ee(F)q7edy^k2_F?Lj24*Kow6_ZJj_iF;G6x8>G}iSG^|*@k`5{ zLcl|?K)xIE>0BeX1C&sY@&dl!ilwd$!87#{3;kMK&Sm?L?a}vRxWqq&%oI=ZRq}Ux z%5KaTXosZWklE-9_GS!=q!=QxCG@(#4U^@p+j44JLa`mDkN4_iq47%4LK-o%mlkD6 z`C6nt8M;OJ)iEf%F@l6KhRz+cYc8EvcVtB#P6|4;Y$RQLB31-Ox_uhxEtldXLt8I# zggGwVGh{8MvCHD&zvPi&;d_o!4D76dZv2RhL;Ecw*;j9OMiQ$lI%p?I#5{G!IpGgL z)1)8e4}m(xST9|yyjQx7a?Q%N+k`hqze!&zNDanps#zN?*m$erK<-wSjA2+8ctRH26+dwpQRaxS3~#aQIYEAu(li(RZ$ zp}BK$o}tNj6=bg|<`Ylobz)H59v|7rHSrOx!5g4mm4NLMC@R6n@{nEPr|v6&#SH*3 z+^BXS#ghrT#Xwa*E=gWs4|$buAGLB?WvYm#97^@XJ?K}onHflb#!mNZWc{X|f`3P7 z{*%;v-ecI5|eFc9Cl|sfVz1 zt6g63#8^n|my(8q{{!UFl|tUh{T9C9f#6p#Uu~)%q^pepeEfov*}@)-YRhJ&x<(ug zg@8w;J_GI?xCe<0W*ryDPnBClyDjeD|W?LSMkZ^k*!Q7hVC zve^l^tAI_)w=yRdVw9&3VH<~V&)_ZitsSX~Kfyb=6yITY2l=%3;KhE@dgu#d*?4w2 zP9J`veLMvI3+)T>Jg|b7^WXGAvHVR)T51pP&E`_XZ0X2vhy+I#FAsagpQ2l%5+D>W z^MHius@tPqu!Zze>Vp;W9-!T!c_`Zu!h!M}B${(pD1eW}pVcB^ zCFVNEZguD*-mcnr!_o;J|wd1db_G*f$v`xh{w}n zv0{BF+eCj}e(BKw-r)0Gw($m@XQ*Syk%AIT**e4s3!v!<_( z#P+IL`HHvN6W+yc*`HOCj#YpBRlwZm;wKqc<86cHXXS|wRGi3-#_-85d1>}G(K!Rc z53w_EP)g_x&-*K5K;$U{1tk5~Sog8ms=^9#<#V==8sHT85u7`G8m)hvsB`W2;&m8z z!yPXQcge&z(*+gtBIW{69>;6~ZPmlFGZ^oQL~nzLbTGn7Wek-62B%U=fCq~8rVqti zWW2qU%}DRc9y~~E1}3dYV>t*ysM{)U?u9(#4;e^9y>Z44HZ!#bVIet5RQz5N z=gHR#idAS!+RE#Pwg1Z5eONACSuNbA<^tjYV?L#w~Bl9ua#T=J>DQs(?~@lk5NmBT{FO~rz8Y|wv*x&8Y`>11Kd!*siy^XYS;+d_xld8w;Mur%o^GG<&hM-1dsUE6-YJ|Sk_L0+R0JTn zSgt9hhpXpEFJkN`_6#6HNeaM82|LmD64ql(fkg-8941i|j&?Z`EA7@|Oo(DpX%Zo@ zm_FjLc-3HW(63W2b8P~#Eih8?V2@9AG^gtJACYrFu8@s(9^b8^@^tbZute9mt4!fo zA*!+%2SJJGndW(e@UCCjE7)pPz>~cv;L0y#H4HMtlJ z!Pj4}F$3)6u!Q1`L~KV{Hj{=Q_})g#7ckVPQXIEX9T9mg6iE z1220{mvAAC04zR@-O)xebXQnDPvFa{08yR=+?=9t-)--G#vh)=_=Ip!lZ+{a4DOp# zR9}^}T!D=_`7wLoMH3lBD;ij!xU10C*Z7tyw}mZ(hkitrV9*=$SE^u479Gv$aB{5X#Q{j8?S)2l(9%hU zi|ULn;5Q8Rm7zC+`yPjYa0Jf?ZL+99=mvgaO#j@u_H_K4fYd8~6o-Y>Fkr<}&T#u= z0}OY}YvVf?+WC@B!97oj`*wo5iZ|kOqZ{$#EN=5>r9e{|p|(?;MBtgiPl#Q`T*wT= zS&P#y4l~`PJYsM{4vFdmFvBsEQq3d zC38>#8a$OL%oPJ4Os2&4W%2nTB>(E7NVAIBzIn%PvK$B$^Q2WqtYG4Ko+&oQ`VL}j zmyAP&x+-2U?H}Z>$4cnZ8{|qzGY;3{$SG~)$FZT5)+maF^@4wea;SIi^puEPA}H&H zw5AGG>`XVvz3Yn9;J?>;PV3k*f~fDtr>@Z83fsS$>YWyEDo>;BnUZ8zL7EgF#XwN(An=3R)^gce#(beQW745u0^FihULpc~3lN z)QwbEC&3qiJe0C>hy~p`PBR~^^dMGq0cBuG7in8JWQ}{PUI}Uj-ojxS%`EKIsW7WN? z#BI3`xguhN|y zrE6QbkK3`2&wsvs{`2u+TJn;=Lv$yWS9DaG%ddO6W+|dfy4L2PxZ#VqJJXmeV}h7= zRWUS7X<-)GoI3nfWllRz4|r`!X62&Q>|H2nvd@=qf6ii1YxfINZ$bTkX-~ ziN?*jLN-l9DjM^|A-hA%_}B%pvH>-mk~87DZpAp&5%1<=JXT=z>tRLSYBT;6A(V`J zMKheP_u{Mj*t2PO!4h~SM)UO3?bC5a*)>Y`rfEPRcG>53u)%H7fvR*afp;kw8cTx# z(l7(otY?71os1y^Jgo_G*H_wAKYX}H{zJdC(ng?c4!f9qG;#9#jN*0DJYaY}>n<{=IOwNoiF8MJwWe!hx92QqR0-h4Ds6=A? z7);1mW>AVmf0TPWbESL}YoXS+mmA}u_tMDIRA#)=@`29dvZck+VrE)2Zt#33~!ne|Y1DZTZJ&P!h`(4$F1EYP$p3<%0 z^}(Yl6qIe^YlL1EH;o1HfMD*kxv+ri?%5BH9=n7gWk?n;D$o7>(NV)m2 z?Mujq>!sj29^bIsJ-3LiD=RAIwJ_ISkAW-ap+pej|Lqmgr)AE4Fbo1tsr3YS5d7ZXs@}>uoXqIp1E5f06_n3$vsX zSNV?!gCgCaOa<;)R0v~zFf2YX6=<KthdSQ2Ey`QQ#cdMCmr+K}u_- zlf=E=l9c@E_J>czlf;8I5T*5SM@9vDlf{x1iWP4*ZAm+A$jZpJ+P#9g*xD|u00ACnjFU2WSK+S8u0l}ed#n?sy zoMo&K1$K0hyG=0QG4Q-q?(2{Xk5CS=n^?x#N!5kvck>%G%al}emhTs3NjMW-$R}b+as590vsixA z15C;oMelfM$E8QevF(2^J%QhiM|6nagJo60kQJ!QjTe~1ZpXXO4MgyN8=FPN&5|&C z1LbqPd6z8h_&fbmFL`xlMUg?{dJ`rl0w7eJcc`P_+El@asS1U*1ucGJ$tF`(&mV`d zpwblzEb8zojpxAl=}r{mk?_ZpPanS2UVPq@N5_tO6xj@F>Xl*aP;4ckSAhUJ=Q|0q9I*`km{XbPmjsQdda*yC}s{H()F3f9V=1Y1L_cR zsl*Fbafx^1%^3eLdp(qmz>0c3bgk9*5I(H@X3XKg>*VYh*dm`l$oxfM%*TIinOhsK z(^jfdGUS&j|0G89{KfW)rJ&cz<$>r5{}LCM*#Kudv@CavAF<&)@MGOf@uQNsDPiwT z!ab95=Gfh=varxTN`b8Ub}=`yA}4||92COBNoBGCN4#?WuYVLoJa%G(xV1`o4~K66}Hn`SVszJLRjm&|1tLf7KPmvj|A$d6`c5Z zp}}IYDC;r-{deA}?ZPlvQe0c5@Zu8#DI;WNlemr6WR)qSxzaVUJsa3+rs z@AyS#g2tG+m%jX&*gX2fQrac0h)FSuuyS>Ii%32Rz4`Fq{Vy%d&s~Y}_EeyBxaA`X zXSb-*h+y&zKSBr!OPe}JkGPbC)GK@e$&D$%Eg^af2_U8*A0wS{6*M4|#jjI-P`?N2 z_+CyfBSuTTD~aOorLv<~yjz7RFTbHR{^v6R{#|w0>T;c(xpMo>w1YI$6+WKIUFt&eSg z6-OL<4 z*h6JxgEF9KOR!8_>~8H8*I@>OvG{hFVVG6%!Zo_J=9&F8{f!;Ts1Sy5tQZ#UI7CtP z%uyPX{&nn6arJuWux2I*m-KcDQ?%U%IpuG-_2M14M9NRGOwUox$Ul+SkX1Bby=RHA znY|O;bFup5h945vnL;zK%3A{A^9hnM*zXoUpBiE5yzu;s@jy~3s+>e^(Vls4oX zR-Ax1nR*x4pB%|zjwg@{yPRPNG1jzaF&;oNuuqP<3$Xq^E%7`(^v8K05LJ>H@0OU| zV1pC`$zYMzcPRQIB`F5WSHAU#V(b&f{R zCu4^w`b%gX^UjM#Kul#nF!$m^$~PsLT^X2&rPKvW<_|Gwv7CG#sf;&A@jzpZH3U37 zD)=+CJf%Xl^>HR3`;(badCa4ZbPpl?)^*cT@;yNvP&}wirOxGTwe?A5X0w>syQIEqHU-Y^u zV-?dl-ok2A1ufPKyzBJao*0IUSi^ZEfA)z#Y@ZA^{Sfw;somF$O7h*?jHf0V%4e4Duu-<*J$DPAhn%>jd1 z?1nppxn@@8e8#aNAYBOij5nM<1nCr;EuMP%5YIMmx)&~xCh7&a!TGLw2B&*MQt5q1Uzuhpo~|QcI7Kn%TR*cwRT#Q35?r(@VKMXTc)ph8z>|kMlYa6JyhLBLbg{m4ObI_nGji z)%XLRXYm@hz(1(k;875@z7f*aFJ5eiSTYt%$g0ssgyL6iF+Q&LBI-ZjL%cS`@jTfh zSPLb=%l8Df;B)a%g>ErjYvLg`%t4|MR2hCNO*fSUblIM+a4&?&p({n)0BYgZoTOwpYTn zSv&)&6Oyk-w!oQ0A1DqJ^H7-d1Fz||;Q`d07$&O)t<55hye2!gP&u_RdJC1%Ne!I%}nvyvt0&R;iZxSeom(X zCvGSGx@{3zAJLdPD!+M_f5iJ^4H0d%hw-8XON>Nl(k6TYab8s zzt(08lVw|(c|^+NTZp&g)d$3r@`8ezBs)9Cw4BxyQzk;ch6azx6nFM!xQaal& z0fSz(z+J{HwQzvYon3)7m~mAl+vj32CS^AA%E4;M8mUSxLczqE5C&;!*Tc#Fmbg+s z4$tKG+FLOU!fr4)eV8rXNU_4iMKPR9GaKF~ zD!&rEVxlu`Yb&Nf7|2MvqQKvd-Np28-wom@rzWZ>qd+B5s|x_F)nm@oPa;Q*c8u$X2ZJW6EUyjnbyNiWBa1Nc+m2`|Kw zZnupDGm#4^=MXkodH~n&2VoE%Ce1$%5ztN_H333k5!~?DeepHN1~G*vvHcG+%%>m* z)|kfwm#%E(3e}Za!g7aTe54a|`tS{^mXxIT6|M`2!@gs#3u~krO`hVNA?boK-x<26 zhuERLY|^WN7(5fJ$TcEJhzvvdTAY!G69fGtR{Ou?k1=#*t^Uv(1&5nHKykZ3-&tP= zH7!)JNfkt8EX6KSB8Vb(-}HC34z%0f@&e3Y`k%A~R8VY{?i6=nkca0aCVJ-5J?>9E zCM=7ewZ$s36NXIhtTuaVq_SRH0sI(r0%Y_nCC45+@@(N~Ar;imms_SQ=dw<227yg3=Z^ zjTe12T zz~RI62iYyZnxr|J zNc9}1J<7{LN#kkV+de$Xr^&tnZ!ERMj);u6Au{#7^pp4qx(rA%B<}R#GwsPI6-@`om<}Is8m+`4l&VT?N5yC$M{*xE`#wiaI&ts zEM)%pdlWbi$)JwLa%>Gcj64()N7J5!@O8n;X>ao`lcWoG0p_vXcy{N##NG)uNdAs= z%~s4xLB_&`B*XmC_u_BZ#{@hR{=KzQ(})i@hiY+KiDm1(_GLPry2`l%4-0H~yiGDN zmW=lRzf>l947BGqd6h7Yv3z1YkH7ulpSIKA_zb>^tThA^EhaxlkUCN`u zuZnekhJvy2s(n=aw2}}&%Soz)t9Kj9{O9o%&}4Mo;dTsNKjno?TO)C;&C>%A-LaxQ z3zPIS7qjbi+icS+c+DZnT1t{iFLh$lXdS&sE3{%*Sn$+os<}^28o;&D zJ1R+nVwZe3RRY)x2V=Fi7^T4APOP{fodrt+Rp2KklUGIjPOxExR(+x-j&q&)5;2t8 z)+Orjg(B!xT+5kUA*{reou0L5CE}tLbZL^LMFvo6la702u>LJ|PmdWcw%5Eb1FUBj z)he8#hIF0OZ6r_8W4h3ue=&Yi_WFLBdWu$E_BD98*~@OrCO1Oap|6l7M57c_*JTCH zWti~-eSjdC6}-}`q1+%cZaaO#>JE8JY8^w%@VaJ2MO0SsbFkXEBP_pM zY=WiGxI4wOdKDt@M*E^aDTB_8u?5!K?^me) z%%VvcSe;Ow^Vd}uWRS7N$KNLa=p(`E6P)j;WbFRa?Nd+3?vk21I4B6oV1sdX^WO(( ztV(?Q`-!qV{u?a05C1F%z$N@$VOQ}pIWL`P9Rm5;v;VZo|9V@Y(+J7MKaJ5`2V=Ab zB2xy#&FEkEu+rGX9${}{J^!RV6%P@1HB9vF3~JYT@~$7AhzVN!)z=w_3_yQG4Pq(C z7~7{vJ#LfbzWeq}AR$mmaH7bsx|t4^p&)(GIAkcsC}VC=#2*A{Cm|OB73hTZ)U!xC zt|Ss(Zv7=x4FnHbmnXBA*C-amq4zLXX-U~g&B}H%VE&zEOWfWCv@imd}Gtwwc)XM(z4YjaC5H zGUm4-S^%Q5j7is1A}jr#eK}lpv7r8zOO~{&cmVQ`*ULD_s2HRjA~^TEK6GbX2?)N` z9uOhy?l>k%6(AIJyq=I!ZRLU4!I{|`?RcVF16nqzqC^cU!SFcPG-H*1yzO4GdT&n^ zfrP$14tamAU5-~1-*byQ?s$g|SC`wZ#Qzju;N_O^8N%r-iDiYf`$?r_e}?D_YOJ8T zJp#m`B}iCAcUP&X!LAfFtKOe`Ft1p%n5~+H zT!}pxt8RX=_#kAd5kdU9^Z8!-$hI;R!TT}#15OphlK6`JjCT@gMjts_4Fl{YFV^Aq zS%*h27T;BhUOY;X-i-p3p^0!AdbHM$IZ(ARdbega5w4i@eJxeLDl@lR1pb~pHQf1H zs#u7d?|WSXKlbNUsiHq3))9EsQq$ddpK*Lk^pDj8m7NO!fvUuNG+YR^g`x?uo+vN& za4YVbOO);|f0FOv9~WRLW&uP_H@<6J8mirik9|lI%6#Z4maC+iqc~;w}0qwGHE=Ct*Wo+O( z^_OO-fU;f4KD&fMxGVB*Be+*{3RSIah>eiZ88UFo?JYE+CA*SHjmp(n25+?6@dHJG zKK#?zWbog^LpmTxVh`U-5dOcF;QW8@^R0e8R#CxH{5>C}`DI|mm;?v=Alc4G1E{zm zUEQq7E(=T7+fw|6OiK<)JV7-?(qTerbG{P7gpAO>{FF~MT9R*y6CTe}xvh8aSeQ6p zKCWVaXhEwJH=w?e03zhVYfqr@h|6G_wMfdjauMQ9y`W5auhQ*ejnGCg<6zr|8e49c zeXcQ1fNb^jey-z7H&)sT!>|)QqDl&%^|{-nAQ%VFv2E(XC^;bBvfJMA+RTuahS-84 z_;EdawqhtCd^R}>j-CTFSP&$CrX6tEaV2}LyfdY$a6|{a;ol9xm|Hj zgRAENWh^jrI?c8g#VAFQ5s^4Zn+}T4z@<^aW?zXx9?%g{X0cb}JP3pzeyz=1p0Sv% zbD3bI$oY+X{!PMcF{1?tKCi`>yT9$2Rc|Vx@^F3{&IS&+2q1@+HQAdK<7ZTI2}{_g zBNzKP>7;VTJ0x7V)A3_(pX7`yC99VURvg6}@lic$Ty%($8qR1En%2>l}P4+63)v&VKEP6p6_6|QO6FN{)X*`-1EX$7Mbp&FE8k85rgP7DY@)bu5yme_R7T-J zos9hy?n@S545W9GW~u$>xpwX9vVTYCC{|5f^@O$yMIyAW^sop=@n)4HLFg%pF+Q7jz~0CD&QiA`~j zU292f6s{l(AXWsim6O85y!O~v8yn`4|d z^tGr#vz#$g=)S7V%*Oj^`1_QHnvyA1S^x#`0M~YU;4|hRXivzYWurq?7h`$ImHKr% z{h4Kh>ioVTIC0r;l|f~SK1|p^T?N{00E|duXVTKmTvEK4 zueS`Ka_I?=v)y;4y&fajL$y-%k#}Lg=7GB2ZpF%-KAdkE75>DzcCJ*fzRb?5$W@{a zv}k+2j2X?_DHDZOi;`C;2wjpWx!6Q3La2TvYR9G${~&)S3wF7Zh_?zt17+>&Xy1q* zSo|97ITBlT5U&y7Xfb40(CRsVVxFs$NW~?ATO%ZhrMOebgerfYF75rL4eU4DbSrs_ULCG z+Z?D=O~}4MdCM99nX3vCRSbtQ|G1}XL{Xu~UpxI4|9wCH7x-fO7coPOHJ{=UYn+)Q z#PWs-Kj~(JH{}UHk(D!JGjWGTN2q9jLU#?SrShb1s6zwk9XW0;FhCHnP>bsd7(Dk) zVy``bbisd@0wn7q`D4%1t;OdJr=w-zCFn2HMWhjJ?pMgxv2M!gesO+LDPfKli z&WnB-iQ34dN-B27?o%=6qGvv<^L7`@!oQ-J3(JMT_{|i~lVcN~QK&!xf8;rSrJPnQ zC2H%JpoA&_PL&Zj+y)`oATSrFU>yr`RPwk5RoG;jREjG^06x6#GC0yh*+i@;mMYO zwlqA+L8vajq^Bl8XX+}`F+4#`cf7C)Kp@tIKYlDuKKV!9In|liEc!{9W6Pi*r_*dc z(Swpx36Q3Aiekx9{Z_VW$S2rW`l#A5x1)gp{*niI$b-P3Q=v*`@0Qa_S$rVNJbRZY z&Ez#z7x3+Ki7-C?>W5#|e^ruf@-I-p_^1Enm(ReomhBbtabuY@x|jjsvWu}_J%W{T z(fO+GQ+7?57<=L79gAR93AsqB9M)3xvQps?V@GAn37~n7>us-~I6bR~lyF`1KBm3a zwZa-!6{{+glcxxDhAv4Z$yZz8yOT}~lSp3DL2=1p(Ui3zS6sJQtj!K*GFPDc5|v@A z#ts#QJS%1^y&CIDFn7j}pM+J1g^$?_d}qo8w=MFvzU{+@y22?C+L6O}{O6FD*VD$O zIML~kV#}1@c#!R84nh433?>zS;d}^^j`iE)G&6{$yRe?5-jqwccdj6ge`p-e>IArt zS?WIXECB;_qEdVp+s!NS$z!4#-caHxY%yiYV*K3Y+&bRgw{=ePm~^szQHfk?<6=YZ zs1T$xsaOjceWU!kLc52FI6%yL{`q+C>BBEkmd5X;3l2P?n6FwB19Vk0;hsQ;__hj7 z-7Fq3P$uq9O!>~6?Mv}wmD(AWbokSUU#KC!U8Um#T^3aULWsGZ0(o_&%`e20&(UZ+ z{yp1nmhQ>DQ1x`nXr4US_F0hM(=CA1Zo=hJ>!lwRxBMA$;gO0$3Y=iLvV}#srh2I}?i7P-JmW?1RapdnLLBtCueU1;{^}NAb)#EZ)r3&W;T!e> zPz9-*@2oMJ-+94i#p)f<`!DV|#noT)8&L~MkOp0))h5yqn=9KWK)?0&wjc4O_GU~O z#{8tYN{o9}ImktzrHA=NYzGJR=91Y#a^?re@xu4or|!ga)QM21DE`LfUL93?gus{i#gRVWE*2*0 zbsFX2&!~!PyaMyIxUapdlTMOZUt|KPG-58|1Q44td{9>tnschDA*dEl3F%RVnPS1H zJOigQ?_?ZI)NqUlRL&4Br_jMMn&%bTzqk7C0TAtEzpT2@K5RAx?NUe-EuUc8W-&re=9r&JH7O_|pvn-cA8${FC}>{@ z6amB5Fd-GzIvp=BY498Y-DbY3{7Mq;Na}PUO2xhwJnL&%>r}ptjUv*uPnD1oi$~9g#?kMltyg{qFK(=WgMT$~7Crnedkm^WOFoXND-uxWm@oqZH;tfF~-@vSj`C`&j zzT%?yU=+NeJX-U!ZEnSqa)YZPrm7l&7XBru27(9+VEL*{^s-z_a9NeeLt=f50N+(p zwD2vBde=N}8wtC+`?*NHw!vO`CL~!!9q^JLkPyp)?`~^ zs0%w|U2~Q-)75j|c7eTgvlQu_6^Y=;WssP|LHZ-Odv}X*j@DW$s+j`S7d>6b7FhB= z>3h&4aCAythHXrfNuN&E!SlV)UU+^nywiWaa?Ga>W2xcuzW1aDx&qnS^w;ikyq#=@ z6&7PN0z7z7zmCGJKjnwt#6!R!wU@x52VSr(5+5Wslnu&4_(e~&nD_;M{c+cgYLeKegLPY;gj zxa<`itHf6ou6X%H%Xp8sa<|7h{@>cUc+S!ZMDJQyi(8Tz?!nZIg~o4o(mk6sBN*UukdJm)1N+kw|yecJZM%c>IA);UI<-52l)^tFY}>r1~%VW-xTqt zdTHEN#I19iD9}kE@xMC#8=vlD+;m#0<(ccrFvD_$7uCSy?g*+x@eUxXq4suSzn0qN zkTF~@STfezh&~Zp8Hcf|A_ro$gg)TBAh=KmJSLnzzEF$C!O6&?w~||BV#NalF!iL# zcmdvkc#yH&;1c*jJIR$0BwqsTb7>jHXEYLYs@y#i~nbFO@$8m#mZ7k=koO7bL~?xJW-(Yx4@bSMr{bhlYW+#ofj5n)aaR(=2HP;}1 zj*dOIBc-g=q~aOX*f>MMazk0jq>dh3P2Ybc4wDNda^@NOIz<;Al=F$K^Y3O4xgtaM^PEG%K4t=!~Kx2Iz;m@4TMoId=+ z_CyH6gO=eA*17_7XB{kTr1WB(qjF-{-X4bYdJ4243>c%E72|KJQ^g9nN&FQ~83iG1 zA$2ZEDTK(?-ZOY&4%EiZ)Imb88EoYo^eeu4qRX3T0A)eNJQrHR{lM&`F4}4#FoZ4E zQz~K^sl>Ii;!j8_pizh)?7>g!kmulN<`Zs*5TZi>xn7@70bh#&?Ku&qlcInRHisb1 zWFMvC8Q_IfwjmNHXgcHpcN&eK9cmWv_tHAM__BM9agB+R>v#8?0$kp|fn!6T7h|E_ zL=8c^EQm4PWP~;26dl)$r2?HEqzEY)soah;ZDq-Dp|YI@Yuv^hNjvdf8~WTI4X_yo zCj_eE1+t3fWAf1KIB>;oW-KgFg!NwfA0wBc5=E*uypJZ8JW5@>qmtQ)DbYI{`hMj zehpO|0*L+)^jcDYG!b3znnj2ueWPFS@x(A@fC;?5&dzyuSbPdSPEs&rs3> zY^PpPFP5eI-W44t6-g#zMc+0k7Fo0_X)T%Cb7ij0E`-Rhum%n*(t$M__C3vis2Ud^ z=NBMR$@LAFQ}pmy|`W;2NmQ< z(yU<9%9J8GxOK8(JZ%yC6lB*Uy~6tK?+i+5l{tcYESIDv)>)W}dH@LRi(|Nl5m?x? zuk(QiV^ubY4+xb^MKH_dpZsVTqhiAz4O}CM(xLAa13)9vC70{dYtChO1J9K#*NlMF3y&bFyT?m%iMdj8&GZF@lGa&9!lu_W)Nv#S-E?5y0FL%QwDz`j8%s1u9@g zdg$7JhtgA8k*tb~P>vHI9Y95G{yaa4rf`eY*4%WC0x<5&pR;GVerkag3H)xJ=$D^w zrO^wF2C&6q*UHNT_f7mmIZ?F~A_X4~Fur3J+J|?uUjO78jlY4x!rToR1#8V@DhlpN zlclOaF_Q1*vLPb~K3P-z5Q6|c<)?Bh%*N(1(_b|w^k5ZX5!CKWoYhMqVX{Rpd%qt4 z`J>;ol8q}q+7@8HGw$DPpNbc$NE97UEa7yyh0X|C}1yiAcj=O2A#Oz1OAx7n6BKEUd32qeYs(!m|J z((4F+D}9c)+nROXO^i{j<%~1w28P2QlDROFOK7iz%a3IQg*7aPx13p({@@a4ZcS*c zCpv?;z)rBR9*A<72fRyT0%yUJ>U=405nCr4~R)3?q^^e-=2VTpCO_ljyC|Zs|c`+2CHNpp2jxFA6)*yHz>#$<%g|S}MQX*ac9^H-xAB z$lu2@7m>M){JEza?OUFSK`7=|7M$DmurzoWffdDSL2sd(hqriN&Sd7ZLjlnB66?bP z{t!j*IO8N!byaGOmH)r|J7(6)1eje*6}8DV5__)HA#t2j{I1}mZpE^x-2nGdDh+OP zxIa_X{irQcojtAyZuN@kLxn0j=CXJ#n~8$Rfc$&!|E#KvV2RZnkgK?4T<^8Nk<=N8 z#FX+PBdKAJ$M$yv;gy(W*Xvt5qhouG3#blRAiya;Hw14{`7Uudrz~+|=xNdc6hom4 zX?*;{_UJqQHt0DzxmMYg$35+nW6e>7$)kkzGmh0Jm<7^SE5XlWdzgZJTUag^O3bQT z^7PY4q*xhNyfF5j*dxV8ii5R6=>nd3uu-zViG5LWj9=@r4W<6^_SqOBYtN~gQnC~v z>>;(HFcFl4`10-cTFi($6S z-^KzS;gME%;uv8*JaV!PXNCv*xV_H(B>cTLUzyky@-4hrJ@i`cUR|`^nYJY2*HAsO zTG*T<)u8&c9}m!tPZfZeta^y5GoJ`(?~t zL3cOGxp}LtSZnrBEK!Fd%LgMorz6RrAAnU*B2@rh#9(7s%Wc~-)Kl&tzcKxe7+H0# zZE|}Jw~$mviFH5T@QI*sf3NFbQNgvYO`IV*JJ*(0Jz9DBN84~@hU7x4+EKp}NXpxuK%@syB@pLiSq9oY2XLZz+| zQEFNc!eJNZ!t>x_VaYm}=~Z87vsXL=o~C+cH6!xy7}Z&Lm%YplA`@~+jml4k4C0to zz8#0<6_Zg-@hGW9RO7l9PkIsb-V?)cz`ge!X39+p3I?QOCikK0`2%6q9d`Pa& zzd8M-rE%@730-`F)Rl$ysu!4A2Y8--z_L=^4$}9YKua~yxQeLml)~kT5Y$Zcnf2aS zceoz&T*B4z+hGB{RQ8Bc<_DNqko3JPYhqTfVKw;1shP% z!YLDOjiM$!W=~&_n*WH!<+`m`#4F4e3;Q6|&Te2iyqvS*nP6!7H){Z=9PA9rKry)B z2?k1AoNAI1AnnCR9>4$S1Gx?S&!g{qnN~T2p$(pBpYTe|LfjI9UmR(o+%{HdRS@Dy zs@h&Mm5CoHH^Rcz8)#$Ny7x2u82`DG)O(jTUm}njU6~x}Gl#d-%BI=G#3wg3qWec> z^JlrbVh3^P$Cwm_qcACGq86wLnknz5X5Srd?sN<)WNVq)`|&{hYaG(UgzbdYC-NjP z=Oq7hSIv67g=d_evZOa89DXpuhvi#-R+ek|IPy}F`_*lkU&h|Kg3cxC)guzc7I|Cd zc)2X_L%Vf9r3v^QO_jhT%S#E7IGTl-jMd}l2i*m>aR@s3ye5$Mx-zOaQXT{>;qsSw zjz7Wmw!oxUj|;s!9K>Jl4q9G=Cpce~ zTdH_LD+~P^+UX<-3!T-1nkl&)udXC7p35o zG*5mb2)M|AY|UXhl0*S9ea!<_J-;#LNgQjK<%jDyKa9ck1zu~^rSd0;7Ypz~HIx~{ z+aL+TQ<vUH&Sd9L@N2>1VS z`kfe`C_rK}s^IneRw)F6;&rf=Q~)HtfSW_pW?}T8;PDw8Ce+=o?-znH0{Q$U$ro0< zZoW`>pKc$I$tt~k_DT$ycmt6ZYpmPtss$Ct9)yI)_}wcFY%;#A$exDgwY`ElFZ#p% zyxpN*C0LBxr8?p9GzGixNj$LG_Qj>xh0nDoJ`*1V??N6kF8DU0))SX{fBGxGje|fAM742xq0?fEQ~M^f z+em({^|yHSB!cNxdkrCG^n@B?_J1-}=uzbi?|E{a3Nm!VA2I~7sYqn9sWjdT);_kZ zH*{C3Qn_NWgKBK1V;k#4M!f<99-0_nBavV8KnWy+!k*V45+PA5o4_XSfOrcEc_WJq6aL|l#8G^@W%r=B zTVag$@_Ev*Y=|qT%|Z8y<8PvKy!2AcXzrc1pVtrA=&-%*A*n4g;QKhu0YE5UI%lk&0yTs9xJS_~ z;dwq>>RSW;Ro->yOD(f_IZT%|6R(_pv~fg;ByncaCs*z9Q88W>@`2M>x9Dsas*(+=bK)WP zN+5-7LY)#qQ@h2k!nA=XdQhn56yw0tN5G2f3rL8-d3&fqJ_-6;VwS-A2}~i_rMNFv z)th%>RrKzmFBR_8(25ois`=)M&MddWW!vN94`{mQBR+FiDdDg$P$pi>EezZO-02+I zYY<>n4&8z>bMjOPkQtQd@9Y94i&o;4(H6ij!Pp#^Moei(Feh)v;e>Zk;%94&bb$Mv zDO_lYwqr@7540FgXKgpVjn|cPnN`DgTe;5(s%%o^Rr=H0bJl{QUh$Y2!(<+SXX5kw z!dG6)6xmrdgALC^Cm-F9Jwi32iah3Cm1}gcFW7H4J*=BI+LhRMV5Ebg55T1OgoqF^ zfSC!IYBO0NfRqA_4>P_hV&jVEcX$kTtL#Qg(^*xdX>Jt>KZv_EF~$$lv0e|vS&DGk zH6|i*l?M!Zf-#2&kjfZb$fS@vUvA09#P-=3g~WWWeIgc3n(u{Iasq#O`j@AFarzhS z^e?=}xG!W2#CLDErT7ypik=3P-=>J0K8vhbAm_M)QlY@5eER99o{a|pWOBC*a&O*y zroCGScHvSyI*9XFyP?}OdP}B#P5+`G+g*N=U-Y##wQhXQI1@50sE6uBh@D*wpp}lXA{#My&agO+(2rwwXE3sl%my~CvQc$4dlMgj zhl*Q|eq@b0X<6JqQ>jZT0g&5+wY?VaImT*-t`$G_G^mY&J1xtKt~Li8XfC7p{No>- z{&hS3e*Dd5d?b zR7_b2U~mr1eQyLcpBN(G?*TTw-$dX5hZHm;gs9Vr9z1a5v)?HCTAZet85c@f#Gr_U zB;onV=X_pY!*;K64UA=VtSA>Udx>%HZyZW%m4S%jb+4sHz~5n{!x$(KjV#|$DT%?E zoD`iY^CSBb#^^MB`br@{^^VGHM+`&Z?PClrZ@A_T7z&dyR)87|pYl#D!fZVTs0p?A zdP__XyX?gfr;czcDoRAH%xQV9&EzdJswJ5rs(D!aa3GEDnUMgeVl&PLJ^zxk6{so7 zsdCMiMgE;Q--!!6so)rmDm~dneEyDRXI%R+MRUIwQijngs5=&GLW!&h)vcPNq$?T@ zVXVO8Z=8N``u#TfpIEK0wBjJP=brmioRZ!oVdP~dw#jZ+*%ip&qiO!o+nekJ>1+Ot zzNJdQUbr#2VDjg@oljIk6`b{>?bzw}+LuaECB>zjya>W@H1M5pX7}_`jj>!r41*CcW=k3w2 zVvI`=6cWhl)tL|uwSM|;oN=@m?D#h4! zmtR@~Oaf9&_UF32+}`j}1CYmBU~tuZ+DCj+K`SS+lnJ@4nnC~%F_s>pFKfuTO=}B- zk*Hiss8R^MyehcFo~8*1S>U%+D~tVCHMi%zpfp-y2@53~x0KRBHB1j#q;LtzfaBUt zX+a~&Au<4hExVR@f&x6_lFx-k$53=IjEgo&eZBUrO~QT&8TXPGR+&ZDy2yBU#2=Vn zj5AB6np8#mX#PKVFv|)xJ-xixECi0yisZ(o_3j=ivnnSCLe5%4)-PF zfu{HXEHgWF0_}kdCQd-1fE*Z(9N?T{o5>3k^euHXVO%;*3~R00$-xXA|5{6&9T#cG z`x=DHZz_{DmIu!2rA(xN8X>MgC=9YoQT`))OXM(ehNBRbWoQKulNfNi%vamBkelxJ z=in7oplCqaQmi@F&snkYSXLSR+&KxdJl#I$Az&Os=ZW?8!Bw6F6|`d!w%fk-se<9X z*-?!6j0;`N4zX@$mts+r9G=zv`pI)cf_Kp69w|%tt6IGT;z_LtzKnvi*`y$#95=(Y zWL*{$9D9Tv@K(zi;st*jYuUFxdo`$Hn}QXSs$j)BIo56Cx6+l3<1nPvaV*Yp+YQ0y zGCSGDOH)UZ;rjpjPm{HtjWt@LdfKcPLieVZ|MTs0{*&6k)tllFSbx7bj%*KV!Cs=O zwGY4&y@4xuM7*H-4JawF7yNmDiZWn0T3lcuzlaHjZdsYTtZwnt)Dz2&2iHwHfjSj0 zdW{%d&y$$K2)=nrQ<|^Gax9QIu^KP#6(hz;-J7MTQz;K7P6Xg$2qT^|)}U;1OEXd= zerh~?h2u~HK)Ce3#ifS^0fQ$#ql~bHkQ6{JYKr+MZn;uAc~V`DUl^essFnk-^>t}ZbCpbp9gX}&**#>QIJRm5=+3D`K-3(6FHsaK1QSAB2vyu?!u&UPtP__)CjVVE;f%+ zkxlbd0nO@>AOzCOE1=Z82?|a~L*Tu6E+cX*SM=}7s=Sa*Rk?FHC z(`7_Bsr4um@lb)U0%k0Z88q--iE1eIL|-!#?(sKHzkyYD?wJ^qh;1xyyh!(wVss&r zDnRv8=5kX$?*-p-WQj55en-_(B>1%xOaStcr%Lq%;mID!@@D(WnAG@!)ls=Zri{OY z*tWNoJ{-dX0;Rgd`nILvtc+Q?_qLQ=QziftEjJnsKX(!5IBO<+( z1{4QzNQA6jc3m1Ecio-VWh97VVBTp zb&f=q4yJngqi@BZM4a8sU~!MN6r`S+)Vv!&pBT;-tre%lSfq=dM{Df`OsRT37(Ryy zyrYmm5YYNfu|WLMep!h+i46Od+I z|KlGGnTQaQ$ASD@C>8RKxe#FwGH!q5RkHF|<3gFm#f^V|=1%)e{Ck%8Nlf+h;b&U5 zHsgU*^C#YBdOYrdyanqmZf{jmiM?G_K6yqBdP4Nx-k`M7ID$$(zKbO|PR#JR_Sq0_ zm9C_H^G7ZyVjUx^Pk~`d!$S-E+;z^*zKdZ`h;#>uxX2DYIQmx!dVF-PV;f=~Qf3#Q z_e;T~$X)-)xNfxTE3ww|k~z14d*WRCa;z!!nX7*Rd$h2^F&ITTB=Bl&i7BECXFfk^IEzT_&EM_ z`j9lbQg@2H;Pq|m7)b%{k6UJdI6)@^wX&ofP@H*lr`-8#I?pO3l+lY`X(}71oIbpb zF_Vl|V&zXh8_WIp>(r@x^z-;psbknfUP7Sop_ZiIvhgX_6>~V%n^vjdv5;P0wjK~; zP!V|D(q05Ix(nJX`$wd&WI3HalvD2;TCSN~iwU>M%Y_j_it=c;ptsPt<>#TM@r z67wu|t9rKc*JCUl`0)in-xNO{{(}+&*GtO!pcD1F3F+tG7ybDCSRH8O6oW3ETqZU5 z%3&7i9=jy;#vGad@nr%%x{$ov;-gG6ic32=IxopFiMb#*rH*|sMnJxe$EoC^VG8Zz zZ&B5u7KulKuoj9fkOhwXhy{w9*xl*Ieo^o$A5+4?(Di@T3I?zMu5!OP0!nWBNG^`- zEAKn7Hlia3PvR3rPG-*j)8ZdiFW0c!&$y@_GsF00d&Wz87d1$QmsoOyMFzJI_N}%W z$^v&DtcRke1p~#LKIuiflcRXOeK8)UlnWGUJd7mM(_h5)D41Mz`XV+?DHZ|B^tbKw zZ{n*uRV)4~1nwRGY1yoYo9a&-qC_|Fv_a&jxgb82_+C-b%Ros}NNMAEg*-%8+yOGU z-sVayQ@9Ub$W;^C2iD=kGRYa@6R~#S>v!2b+`kyzLMfymO}7?4`yaQ@dR!Y|hNuH? zDj+?Dk3LeESBV1jVU})*OAmMdtZbj%?H-9n`NZQ%L!3B(R&ff^5c5-U+nOuZgAGIZ zLGfMUHE_eY13WY|Aw;ynVTl4#R;Ulv62UuNtg$<G+eF=a|Npb( z$&>lylbMLC6XCHAPtuUPJ=VkV+K6F>qSsPu%6)bmz>X8 zJo#GRk;W#|3-NI1D4+6H-@{H-&7WMFGvY~)io{Y7IHHlSTis$M?t5ZF&v~k z&MDwHYTVn3t2PwNWVgIr-t0jIVYmUQ(RN&P?@aG(l~+d(-+6bykc~X6MAvJAws`;U

YH+01xh6Y!!Q<3;6-T`wSx%C%0`J{1sviNgTUW}gj zux|p_#qM1Hv^^bqi0;~4pj|Ee+dtr0go%1)y2h&zD8e}L916$a=m>-QNsMh~hQj>w z(@6yAxK?bY_$3f&y)IDQv@W&97l41kJ81GtG%X%ZE=JFRjrFFSTLRa;#E;9ckb74Q zc2XHsWjxUY+^SwmC^m}{dvUs<*XD8F5YB{d?b-jsQWvt@y?(LOhhRZilAhf&?YA`IIa>Whz2(X;LB?0F) z%R0ez7n5Nl&mvOvBl*l}9nx{`>+SK+J#TCrTKy@o8M363vc*0jndUd`^c&tBAmx$* zLQwbEOK<^jI+j2ZS!g3D3RDTq*&>8JUX+7f*2SxAp;%C&QW>)2L=bE$^+D%`YZx%D z1G~7^E9Dn$`q>l2Mj3*}aprhIvCQW+lOhD;>gfVmV!Gg)B)vFd4FHO_gMyB83)dCr zC)N(Ov_SIM#j^svg=cu}#rAT1dUlJD4-r2f0SHk#!*YX8uBqSXXB{@!LEbQG`NwtTGf%Op6*5CrK0+m#`37MW7amjy#F#}w!?!WJlr z!2dFPR`}RM9NeW66JAj*^0wJ)?AWq+LXnWa^bBtHm-?X#tU8B1`xxJ9C#mfIMiqtO znd<+=G{hC5z)JkN19OAyjd!uadSERv`g4v*m>1(2GN9`g9X zm&P%q_`$daUb)^$NG=uBmvO}W>v1Pi%#4`OeMM~iIjbiRzGk#J&X8wEOG*Ko*uojk zeLPQ{JihHEp&*gkbEZ))n-{%V8}Ufk@;vp%IauOA*v3{$C46?2hco-X)KVHB{^F!fer$sv@h=mr6^uC z1blOO_U5g0D^;ffF=2yawMeH;af1ACSR#nrRK58{jFs38h6$yGVkG!IDhPy=6hjpq ztSDV&hFps`!3uaO9>p)I8$*O3emE|Mk+KSfbsW9(4>%=UD@EI=g04ghUw!4UgKQOc`vKj>OSn{`COYPlw1jmU5-_Pwr2`ri1LCKFR@v9eKY?;g@*dgf`sutkN8ze?uv(6%#Z#FA%@ z=VA*7Oj<7Hg#lj37B?8|ow74a5NWft^1 zl8m}W&k>UvF1Ek0{7^uWI!Fg#k-K??%p=jZ?eUxwvjj8-cuahm+~L!|^2vnkiHi-A z;8eGF@Y=p}`j5|z|4)0zyEa(k){xYOgreww-X4D~HwXOSYq6pT;l_!2Q)Ylk_(gtK zhSWnZlroWOqk-Og>9E*w9fgL0vv=Shy^dw>OCN`%)_)iWb)3@8p*}2rSPy8YiHAU6 zi&-e$BlDaTeYo+u*IuT|;aP~pqBhu5f&$gZ&IJO1FHHABF0T49x{8K9%gX~iP|i$D zjvIrr05#QxB0k;;ifds};PUyAT!>2G5>YJ8HjRFiQQFI@5oF!o1M5M63h_maBr9!8 zcS%~dVg%jEXcl)!3>6AH66BeJ59M)v{xTfLcs7e%G{tc%cLupIq7Dy}g2s2R#_tf{ zRK0`;1~{7b6~`QDt73v?t~M(?uITkm5&lK*@XjHny=X!Wjta98ZAey7DD{pUZyNjN z1_?oc`W2#s59~i=;S{PI_KYabejvAG9;DZym5_zwB|RVnS)0|&Q;jiB9hsTTqU46E zP@bx}#uo-$@#xn06Sg_#b7m{iW#yRi29PXwy$)+T(6A18awbmg&m;$BJ0@-bpBJoI}uTm+}ftVgpK(EBF)pXPPatQ^%sg}*;mrb?@ zd0ih!;)(A`!uj zO#z5qPuesPH1#t*dstht+2g03Vq4=pGB}Ej31|1D$hgp8)lKupK~NY$u+D9sSZPb1 zo0@_6RWe0NPVO%C@%A@rL5)=;y$A^g9U9v2n%JcY4aqA0mpMgjaO}N2lV`<_S#E3P zvm7h>{`qcL))v=p+cKMG7p5)xlhi4*Q$3HO_lu(>b{H{t?evHYM4TPEer!ljm-I3$ z!|f}j0dxzEbZH=ze!&eXkByqbJkjf#+|A>*r}_)!Vi#7J2cTDnz7p@m1gp_k;$(Yk zqhgy(%KP@a?X@!3G2^ zWJh_GMFzO5Sa`$~>~aGZn=pjorRG6?o9u0OJidnts3j$rBulX#y^8ylm*b`5E{O4@ z6%-~$cdoN8J8X@zTs>vL&w@o%3)O*9mm`e10|E#RT+$+!P&UBtwP_C_bN;Q;X|OB@ z31*-Jw1F{1foC$J<8P3Xehhrf=Q(MFBSQzFFNqR&1TMZq8FJoOs{e9wR7;}XSn%P8 zx$*13Q6bT%k8Za)3k(bl5p4=wL)k5^(sJvsi<+wnc>WbQt5U$rVod-h!|kA&UOCXc zfe=BEJdkDLP^eM1loMp;}?i=e&wdT)5r*_H4yM%p}NS5Umdo#S;fAlFO1TxxG{ z<;mDXM}+!cXn9SoD5OQUdQFZc=V4GKZPJdzzL=bm&HbnC33+<|x9uC})DHz6E(uBlPi1)uK^SqQ;V6lMf`AJz-2-xm#@XoMp%Z>iDI@>ihZg| zt$#{7WX$!c_WoP(LjVfkmK-0;rpCVq@jH3y`bBVqwv$V$n5BwUGFM}W8FuT_Wm%lSD|F$ z((aX;41~P1`K!kv2|QnKhEgECh63vsV$RC%k`06^FQB_L3#TNndP0%_kwqzp0tQWt=gcbSFFP@-+FN@;W^p3+DsZHTaoYmu?SYT|Rkk^UH*J9Pt zk4FR4U@D@nOU7IAQT`)MONTUw?Rqbye}1C9HXVE4i+rV-iy=aVl8_Oi@_Ava>OIv& zJs9;F%nL&1|0sc`&J8d4IMNK7nhEBgYVXES7K97h+*4(tUE;{`@$zh&T=gqi*t&gi zb2z$0e|&a(sTRpw2Sdv6rl>pUDNhaCn*W+#Z*wtastK?96QGhJ$7`5Jg~cW~>KyfA zMFwrRjpAtpq)Xs`4w6n z+3+NK{`v!K!I<)sZ??xDTMYmjDVYc_DZT)``qOWDu;PJHOCnyNY#1_<{Q~sxP}o6E zhnO6S<*;OnBP?W_)fQM-4e}k){(dgTgj+S9KKexa#53{X{J2Vd{E3nEckT4o9>X?J zAgqMiKTczC=_zcVhp}KA*wMCF6vN1}n1lh#|zy6mnzPt*`oXvWvPd=i=>Ej>Ar_^#eA>8Nj ziG)QQ779gdG^G07vP;l5q1e?v+$gXU$eZ~Cq{O8wPSoi(0aT(<=(?mwoIZM$O~=nD zIP%~~C7jhjz-p&gfP&c@dqeOC@{x7u4bD9#3Fvu7XCP^|be*}Bcr47<$`cwF*prrz z-QqyPRt!t$r2@rM)9!{(!MJ&Ls>da)GV6b8#ggu-y{|^zt+qTp z;}L+yo=Z_=s-!6S^Iq8`MUUVjnP25&;S_tRiLSp`Lxq4VDneEHlG{R?3Dw2IuA)!Z zOP6Zdd21WVZy*wC{NRbY?Zgjt-gnY}q7pDxgL<`i)h3aOEY>6XBJN?iS@Z)1w!P_7 z(`{zr_S|aBot(g?vP1E=d@4P|LpUhSCB{`vLWbpu<|+xNlN`pj5ib0Nhz1{duuF2Z z{b1_#rCm15e#K#=8t_98cpe!OkQpykoaeT+57k7J@^6OBnBefYkH6>lzDgXf*sr(* zgS+4@a*F&Q!X(TFJ?7uZDoV_rHX=0ej4{$H`D)B$NjTlQt=4hiN}L+U%Tx?+3x-Ji zMooBLdMV~4_NEu^@8LN^`ui}xySUhH&P{uRWVs(FmFnqsDf}-kqxcQ+1?CbTTx@?D zADlk=T>ISTVvbX41-em+sdPn(@zmM={>?<2oQ~gg6~2+jTTVSbV7Asl%ZWK--6zmU zP|aQ(T{HQrN61gv_M>0omv9jh zd{>?D|FcrNKq!UgemlO53_Ohg#_uaCl=~V`Fimg70V~tjCv|B+8Zp3g_$c63l9`sV znkuD6J+eAZ;F)ZY8@4gL8Oe(OeK3pIz3y$6xE(_!(4dHU>Ys5&&yOrpM+coH9wR)0 z*Cg~M&Sg*3^PiKdie!GvC)=fGyuGL>it{LK-@oRnv7Zd^^pd|$m2l20$?*lcJ<^IP z*}wxF3v@2I*tGe4+EV-ydPVH!nH5OnC6c_7=Epj~`$rArc zDLLOf{n_J8lOYw0h;Muy_u@G}o`b1mLzhvdpYL4U8J}$L#}cvucPQ1K%I3-UPyeZ% ze(W2sa}H1NJ}K$on_Ks+h$k_-TUe!s5B{gK1Eo4}kH4k=efo{FqHrGB4GMYaf~qLQnu)Ka-hNb~{MP0V$*-H$;kEq=!tR2ywv zsMWI)O3X047#}_Um_*3R+wQP zYmDK2NIfMsOY@Olv$w7Yp%qFeRI=VQwP?JV+*Ks4R^#$kD_92(DTrhNd2eD;5SJ}F zcUHm>2Sr>qB&NV%gz>pX$z)8e2ugiaV6SLgc977=n&m(hRH0!4m4_IKtubzo?Z=Rl$jr`%D=AsU^;yBB z864bliiZ2|ji|uv={y9=UW>J-$fohM6=}6%ugkvS=4G~NKnnb{JWRp^;`MO_>b66A z2{ZF6f2-^JTIUCO)O8a2A1o+FHx53RWtwTjdm%yBAPM`aH@F2jnO;LTq>sr>)3GHF zZcjtCsz7qESmSRJkB+t(bo|YxNDM2Y z)FJJmUp0mC+dEU0$e5bvsxehDALbARwm+dm2uB)H>=tOv3ETg}QaZ*n4G(Sl1a$5bwn_*P`2e(Ia5nV4@7S zvVlT!78v}9*HPvT5buIpo6*w8sWOLzi9To4Bs3dEd-g+mMblzdn>0|OmS_(4G+v!iM zph}7Npt@-?<3J7$36B5KzxkH{($9KFdYM_8MY?DzEhe*r%yaGO%yU+K=P_EBT6q@R zn)Bp!$8$N#`nZakQ+O+G!k&+k7v@c(Rk2pWW$;o9&b2^AUmrefR-(7k2-PeUl`@td zMQ~h7=*(d=$7?~SSu%?%1xDq*Y!rb`?qaBHs52!NMLuZlxqhX+^kS^v8Q#Gut^5?U z(n&FZMKQ(VkUmQQN$Eyu@ORpKj1cN}bR3DBX1&I(CS*&-yy4N-?u?Vd^tWG*8v;B0jCm09N2v8j0vUTXuKXpf4W$Q}UC05Z z$j21?jGy6-K!KrM6|i!mDP+p9NUmj|_|V4=PAYffklwPQO7Y~F4|=ka8Afn8)lNGl zQ0ai!&asaWN-}WIbTGs$a*FNL`VJj{?SZw#msi`|idSQ0Oe9K7Ku86PSn#}wjrJa< zfZs8$n-q(VC-qC!z9{Wc*Hzjd{3e7IM&|yoDDn$5cJl}*y}W;iHT2Pkx>c2*#ul@h(Edt?vbnY# z^F+4Ezs7d7h;T`bn29#aB@yqfjEDqY|gvFt)1D`OYanya^hMX$9{>a}r@@oOH{v4#c9wD$c2#|k~$dj%?^V+{$Xe&w6roA zAoSY==%-zIiZMoAaOK(XhI``LM*RsS1yYDhE)D|)N0KA&kRs3!ZPR1_$Zxjn^DQ7Q z%Y+yKaOFd^Q242$<;PU6HTip4uZ;RPN{fx1l1W*K1xPOqOwvhOgkVCHHeT{H&4Oai z_@4D>&MUZ=Zp-F48hU>(Wf7<`p0b1}$Gpy(7bLfX8=AJG#p`042N@g%nI@tWV^=I_ zr#Kj3M)Ej(zQ<;$XQR?+Y(l&n(NQVla+fJbiRlhY{B?MLo4ye&y}bhJrSv#)tOR#< zl}7?ygH*7cqU;gjyOOeH+y)1ODaC}uWSLlv$CAHP!^s4SAz$od3E(H+AdfD7xeiF) zim40log?QsplS^4s8M0e{i!qFRP4!hD!W^~K!D@?7VG`=(Oc~kHXsmPM67#-g?dV` z9K(W5sZCPVx-W&`b}Q9KR2BlBIFb@G?sPUwxTrgLA2CM(z9sp7) z>U*d(=38$Q(Ia>4N$3mB+P#f@bx3I)G1JR15K8zK)DWw$)pprXodULa_W(7R8kRI%K|6l^4!5=z| zK|q=IIf09LDw=KKp~3!fQ$QKmR%aT z0yeHby}2x8(~1={-9dU)>zoV(!HxBW1S7&jyMumf+&Cu%`^6=>G3@zWI+|cqce|vV**NQ*C}FD|E`Y zEqYq21UqWic36_Um53RlL3VhiohmcfzMr@vLdoG_$CrT{5_y|C6BOL!7|>Y&+6NfW z4O94J*Gf(`1G#WW_48Wvz(morK>a-nEX06*pmzKo0uROe_~|yttyY9jD2+1a zfF>vs*b~#s%>-HlzSI?o#R$O^le_@Ag!R%e=p7z1YNpnS_cZ5t-}?Ym-ooB(c~~H$ zaT$2UJ1lWRIM>S)dtvN)zfpF6%8Nko^kmO^YN-d)Ss1^yd-VtD*0P2LIT-Oaf0d1) zop`wd7MOUh_>*2=q#!TTRB6(Ek{l25W$|GK>FtQ+O1a3G-B}2-98$`K;CPDp9EW3U zQp7Ki!(LA_#6ePe(=sNu0El0D@L2rxT8uB>0RJx6;{*qY#lSj?FF-Ro%)O#aWZHD& zChz~?7Xiu1-pvD`PU}!AxG~E=ZJqRb6@y%SIk2jsSSH--k^td4CMSm)+}tL1Df#&s6S49 z%{i0sSkWnFc%UNp37#ox&w=>{JK>ofpgG~R!4PS8`A0bB5VJsh+=h`cO@mwfmZra?* z6E&6e_k{i>!C|Xy`jevv6eEryLi*f+eDM!Ojg!@X7T=Cx_iWq@w-Lg%XUp^kY1h2R ziiKdYIF%T%Mb!dUxaN(ls3l2nbl~Kq=i0k5P{y!>+{B;w)WUbYUU1KGjeKD$shzA@ z74#|iOUJA7A!JR7-s?rCe*N(d4v4_A2Ywb{M%TUPp|DT1OR?}+jX6_p?fyTZ8Y zs4oFB z1RxWs?8_1HW`mI`ggoVIpR>!O_`Y@V4YgwBHSzqjwyUup5b07)0}WT>k+3)p_@K0H zp{*;_I+X%mAG#PvqWZkWgjsHACVa9N@0H9JpObtLR7xDC*LToUHy+I9P%d8v3iBjn zkneBo7K{h`a9gx?30jDNihB-oqH?vjkMmCzA%&#rXD;ay!&! z>$&{7(k7Ny;`g#45cB=Qm0D&5YuJ_>?Ii8> z*|t}545+nvS9aT$R}5wVp<&G0cqfjyhT$#5>rHD@#n-ulx6?|z$0^ZDPehg*`(`Dsob9>hQ`sNX_;%fzX7SJsK9}U z#u-@xtZfCM>ANC!iY=NmL>d$|)?y9FYjh`cAf-ztL+Wuf+Fj2a!_ICeUPSR4(MfPE zt$SaW%Y_ho&%Y|_+*iHG0;D&F1$COH$zcrXMq4aZ{Y3g47Lv&XMj4o!RnrtTFQ8>Q zEcWOu70gAPR@!07j(hs8_DamF*XXZd3M?D+k#G0y6Oq%ze}%?mi3I$_Ve#MiHMolL z)Pr8GI#syv_e2;#l#<3Yi*F4PI3}(YKS}MT7I33`@mW8rxu7~ND3$n7cVDF?30hGB z4}Xp*UiI>?j`PH0r^Sj&%o517{0@Ao+ye7dt6<|Y#6Q}2un%R4;sD-!U=;!rYi?Ag z4V=%;CP)rZ{TP_)Nx%PAjDw;8YpKqa`jvp?Q|;aNi$0jad_3B=SZ4={qSSFxPN zd@UcM=qZBWFJt`RJo#_V)xc{do9ZxE1j(@m4rZ1|OzoT#5{FGY0(Uq8&Q9*d)tB03 z&xYPG_@Nc%!@RlEa%|MP-$v;dNu6(llA|$)0rFtrQ$xh`9~3tI1^@Ho*a9lLz2H4N zQsW^E5!+*sxv+~$5MDKu6+Ej>8rt!PT(@!3#GBbU+6;M8bYp%cK3tOT<_Mb&TWP|n4jmKQFFwMcc9@Lef!DDB zr3Nsy)kvSI-MOJ!w@y;80j8CTYq; z#YUH0*7$i;Lhlodx6;I6dSNwIb^%WT-Q?r2i5a4-kE*Ajx^f&h)BT_RTw7XPUX78{ zzuNLl!kw@Jk%WgO8b#NgE@D#0c{Xm+6!i9zWuZr5|)SZkTFb%GMY zm#Ti(w2<`l7>UtZkKib=(v^ToeQgkv^f#$1HDJ?=^FdRTjdpJ*Ux;GCdMJT>0nN3N z4M7kjeiv1n*NBe&>&5wlJ*I{$wX-rYir)n%Np|Tyo{La)U!ENObIz%ZTQC^7e@L(Q z6(mO$C#WH1PqyCyC@~6NVLxZ_k>wid6T4s+*0nQu1b{F)Kl-z3dm)@Yf!inKlmFUZ zSW<~ZqBBOdyS$9nPBwiZ5zXh0;tn}36hC6kV`s4z+EQex*iXc?A?NSgWtmxR6MnAB zfR@LW_ z@_y%9ypJoju$veD0IBcyTjBZ4vg4Q)KJhc{hBY`Q56W>|fWc#usn^}vj0W~jnxURo zRbx|_LOc`@v?AJfEmBtis~O~n-ARoFhe zdGK+;u-o@CJUj{ELDc0Uc9X(BACjfyIipX=(%gp2XIxTX3R>73xTm@k@zN*%v7LU$ zpJ7t&XG=Gg-#E-VX*0JM;4{uHE_gB322huR=RkX_*%j~hbeo)yhp+KyfS9muNYHxG zJ4x*WSo*#|ph8PSR%-CcsnsO7)HR0g0Cf^$v8D zc`IiUrBbOt1m96)C;is+I8_$+`R6^sW$bP1TT zkbhimXQIN+7?qSFoIRAZO;=n629q=>0*Ai8K!croRa9DC8$HaN6E5w-OEEg9X;sGo&YkBTfd{O&o z6WHKe`}LlsyAOesf+?<#xWV^2-#uP3!^M)ffmkXxz0Jt)@KAYw{Sd_hGkMLOv2Udf zsF0W&6ZD8pdF^10x%Df*s5*Rzo+}}9*9Si%IL2N?-HkQe!OPuAgdB@eJ*s11_(p2a z%ZnVqn$^K^nw*HFhefa##cfF+lj5&)#i}C5b9{Q^z(ZQid$hB;hNYGwl@jCxowL$! zpfCoBez4^#cInA-JgB05&fQ}Nl*s4r$gt8pM+a%R16RXwPnQrj7~Jh5cgvzM?58h_ zCrr`9+QW||autKan~#}oHdo12V4DQhn)^P9Al3dv2yR(a%CfXUMJJ>QrS%FrG0q+> zyyP)r6pC<9EyNLovXwpk_!uNL_8vErz4yw{%cVOs)Pj?;Pm+Q-~B)G4e28=F7I>{prlS4tz9Jse1 zTQ9&T5vCX85blLl@{PPBK03wt{Z{Dd*V>ygl?80CRlj&_Al*H;Y)AKi-g_cjOc{Ku zIGEIwe|AnYe;mk7?cnlO@5%KEf|t}WI=}D*5$@3SJVMki4?5&avEVN99=@b8|cd% z+Uzx7_tfj?$uWwQ=Hsy~2Zgy0S}~BiC^rGVVt>v4mKSm7P$xzOmWN z5#(Uxs=<@CeBM->G)9<)1lQN1IaOW9a4)W!L> zFzcBfoz>Tpuacn{VF64$Wf2WRxFp^{vTb!))C&m~SqzQ%8Brz6EqisW6isl17*klJ z-|=r&rrK-%4F-j%3@5B#thS2k`|A_!jg^=Xy6B{U9^)P!9H@93E6+_bc#s=6&wUni z(RDa-xepC~d0`?x9oL&otq0U#Twoo`Y0v9uc!GuVK&5nizhFhElDrGJ&CtBjUnb$9 z78O2`j~TNBB5+DLHd`i7O7!sLW1Ze^@4O!~U!1x<_6cJ?z1~<2LyMp&W1uL0RUh(Y zC(_tdMpJ;hrYkNcqTaH=$$z8z!iV%`Lb8t}EghawAGl5134YXM+1>vkxV6CwA zl=?`g&0=+lFhD3e200F!)PyoTAl3m*R824S6p;y0bq#co3>GQAUtTNQ5NIR$avT`d z+pAU?gf>y?FgQJVN}M=#f=EDzNxeb364gfTPKc`1qqx|Geu4+IyVH?Hwmtz`nvR4F zC`#`~N)74kr|^BYErswAdORr$r_2w|VH~`Uq^5FN7OJ0~BLT2#F@aRJbA)G1e09CV zknl`YtGy7IW;kH$X@SkbevB8xB5Rkg#Qp(XE6kPd{p4huURklhL`U#3IjkGs@eEE% z0aj5^o13rYO>>DS6wI&=|Bt%BZu>|pN!#>r0JZGa2k z6{G#9{y8Bdj&aC?C_6Zt@iaj8{Af&={Dn;>0wP@GFga41ge4G2^20GXMx#CB)MvU! z1|8b0*q^jfo{KRE+b%Re6dOJXdL=$*`8i=FwTYZYL4LLn9@NfS;}A{kf=Hm(OXcuh z^88LB=C8IXPY^IoXd~BzwWo$I!=>9BmZaC{2tp!b3~>_I51if`v3J7m;}rSf9cDlt z?-Y5w2?K#je10s}4K$zP-KM?5puEfg%OL-rB3Wo#p{qXCo{Leb^deqbZ0) ztyBn~Xnz*p!c{kmn|O;zNw%kEZ?xOxTK?oG+q?O0bYDDO`KVW(w+2GO1&Z3|088Nx zNK6dX$+5_K6gmaKS>C$^GKHuCg2;I-1|&DomZ5|^sL3FO@&NGEU)vke&;;iphKZG( zHDjK>*e+iDly@JO3h*?IF-FDOGA)~5ba%k~Rc)U1KPXQLYDxH8VQ_zQNygXMN{&}k zA-3Flvqaa#4C7$8>P$32poS$q!y4 zkox-Ulz$5t20h=~PncbIF9wPyifMP8K?ar{Aop({|0>pGx!v;fZz#}R`;1X26hVY%INIgfnI%2}x}Tyq+ZD23~B z_Vm0`S^?#K)Wdq{9xsMSei#)zJtjNZ-pnVr%^a*a5eQg(^k?n4XXC#~uq}WpwgDHY zD{pPHlU$B$XABH%Zlbgm8=LAS?$K{-YY zS9p;=G^!)qI}z9!9FdHCTx$|4PEC{WkXqGqu$-wM*hwl}I)z?~K}F%3;*C)(;^h!J z;(;9~_gMA@(0mR$C>Dwg*hEXJ7;gL%LAVD*q3@THy5Qz*n0bRr1aFUj6cPmNECz0D z-OZN4%GrfBQQiNu8px#J$5iFA-=2&oJ^AV>$LQnulMVpVsIkKo!&-}}cidDNhu)6? zCCNIjQb-OeuP>0N-?XvwYZTl{Ktn0CY5z&3rxd^YZhOYVyagNI@7ko@xDJus3tb{! z=w57qQZyK%egwRZVw=6+K6u6^fy9p^-N#vMS6bfU6&7mx8m@UP^c$@lbU{_B@Z+Pu zidU=y8{#0y4}&3ZBrZa#)+~DrrrGeTSoG(fZO{3cJ%O#D&=)7jsT8`=*CWZEmN#Qc7}PQEiS(nA_MsP<8o== z?Xmt4n(V|Flfx&>x+@dRgDQWX8CKeu43X8NSwYaSb7dPl63TLUuq-<1#%M;b2c6QG zzLPi3lFNS@^W8uox3Y2%VFWC?b6W}Yr>wd(dlm)Q#FS4TJ@qu`kzY4hqahs&78@_D z_IxQD%-5Gfie7EY@t>b}PK2%!?umE=4jWW>Gq>o9zyM?XJ!7Ik6ohJ>m#o}tPGB59 z0bvWVAzW{i0>#Ggr}J&b$4Ftd9&4}XT>6mqTU;RVD+NQd8qa=@M#Rv>*)f+F>xW=q z8p{gMwkQAB=?_94Xa|<0)ze3>v{zoSIH*+iA#z;B6yxG$ulp_32cBR7Gxx@~w2eXt zi!0$6*^5M?E^bGBnY2K!L{ zN||$tkikPjU65_=mF)LVyI%^=Fsd~Ip4>}3EX4;WRP|$i7Gom9g9%kEHrOTf`ihTF zD1`^a5Mxnz@vxX=L||14sUREjH^5=*cFCUubuK)yC;$87A2oV<{W4y|orc27D-M

w*v^&Sb^z`SBRFZzR9|`ODN72J{hrN>6vWg=bUOylix z$oD-T>w%GiaOTW(O{5`?23J3)n8c^rg;)NYc#fwD8>PbgBC?dZps3`OIZf%!7%ID|83*j}(N-h#TqU2W-Q&Gt^eV5hO5EAY;i~8NIW8d8odUNIbVB+K@2mHh{XOxAO8sV%S;k5{M)A905q*qcAD2)u|3dayEX4RS zf+=J*xFScr(8Zp7`!OX~KaQ2awbH$ifRS>U(03tS!n}U3BPDb?`c@+#M`=v#sQaW} z+FfE0-SxbtG<{Yya$k1*$0ptC<>gY(&TD;b7?xB{E7C|rH~2s ze5a6-z`lre@~&G5dW!jo-fB|ZZ2hk=`wOTboRDq?V^Fh2c>_Q$cd{jOJrPF_eWx`7-u z1a`q50eguFPqw|(;xMuf^U!Ilnc-0pSIzjC!yCV?={OS2+5wlk%f1yCWUJSHq`(jl zDz|YB86@ae?LFFZm8umBK=bJ~Epoa;n<-i~ITQ28`T`JCRaT2AcqhrhTuHGVf6@NP zOH8Qq>J{s9KxdrrzkB>^{^w_Lt}#t_Pu5Z3#mbOc9%Mji+y=f&k{_+kuZ8@*$EY+# zUqw6=03L#>Nq6#sPxfMg@3)y5OU`;HN#sSyafgtSVt`J%;&+R|lMWVqw=1z;IHQDk z;swtGIL3efQM-64R&5eIa|dSrzS&bUe5m%}S3oe^FGLCj%LzUuhObgdau3sqP+XMj zAzsILUx@!6V)J^Q^I{p-E-44#%ajNX>8IR5wUBSL!F}%(_c&k!6r1AeeE@_TXLAMR zBu^z?O?Q#>Vt`Tl7J4WsgzQu zmfn%_>4!>pHpK-wXM18Bn2an10VpwqXWBbo_;>LK$smTr6lV$%f3FqZ27}@F5%Ti9 zcq)wj?P7IqkY@s#plpJ8yI|~HDroWBke(i3%A`%XXj!U(>=9NfUeOJF;`I4WTrdvS z-~RS1e)^GeA8K_WG2&uhXiEP~EU3^TL?=Fa>4lb(GfzgQH-DN1!!JFTI6^A+ zL>6?!OX0`P(%6rU8@AVDb59>#Xn$s9F&$aEz8%l%55$VXLf<40N9joMPiU@Qa_3xv z3U#vYwY;2ewa>=FZd2Ya{`-1+;rUoKbs_`n|&*cr08oeC9{ot+krTDGN zq^qx1e0Hf_y5LFmM%s7;&~HO>F7y92{jYd95g&o7@i|j!CwB^w`^o=7?yMK>pp_d- zJsDxJmLS}|N}q7-5;;;?6887&@j3C z@oap%2?9K$HamH76#!rH8;|q`!Tb=&&$curx)|*5|J#3==MWFL*Y=) zm8utSS8qaa@ifs^ah_7iywl$L!WaHsNF&i7XBi#=Q6;FTqHH2q#S~<_+$aI$fikx^ zW&Q@m#;2F366W^@=VpOi$#QCr`Z8|-5t;s&&f+$b4rIh}imXQ4Co z^6+AO#+={>U5RzMj1_Y&hSbTWy2t$wn>%aFvK)2XFgE$z3yz5b-w}Wm>pRI8_VI_~ zh1@MKd6}-Zm!6N;Q*!rOmfS^2tYZ*r398Xc*}iJujoICNqvl))mnQAQ3-ra4o&%Vd z1rW#w&&0>teXJ@d>hUkw{#^P;TQ%f4rw*}kPAFAPUD6-N_b0kJtaIn3npD>Co#H35 z`{?&N@hgd1bN>_SN`(k-tf;spdJ@NrZ;KjSpSlFivQ+{*C~8k2s`Q~Akd}Lp0sQIv zg7l6HXBxdFSRBEKeWO;92oGt=Hk+r4U{6%)Y$X-g?_l z7;NC|9F0m_ogpQS4_umS`9b`*fmXvMt>43F>-sl-%IqC^^(%NuWP@DP# z`jK2WY*lQW0=$6y#=^!WSHI<9-^0?-sqjOgnvc7H_@C(HakzC-$k?|ywkp&b`%kx| z*@@)2=p~OH{~!j1K><8!fMaJSDxYOSA@{@0ik-vs#7Gvii z$|yz9{ViHLup&G)84u$+)mc5|4Y&=2L-)#Wh*6`-NNh?T?k!(+D>xEFN`@h^vg@#6 zxEzika~Q2AH$Z7cW~hA7{)3kjQx`(IPK{*& zss^?xK(HT-+Xv#wkwcD*19xPBJ}hhpPw+$E5Z`)90^xbBB4CHcv5siHn&ix`aAQj9 zrI^e3B9SsLBPi2eh@5UnGPp`|F+S2dYlT_sw+f@%IVzSlrwa-56!2z(!oU<_3}`}^ z;zB67*J2d-H>CQ;HbYsv>bH@MgbTG}Qx={oO%eJbh;dJc*<9U7d}_=-6eg33p9(sn8o5b1IN$FnrcjbpDZRf( z`vCPjibIGY1UvEkt=_XLK`FWIQb90@?>Lahq5P=*_AfolH{nB%5}ZY>QF8#%cASZT zKy!pyN2WKY+gpVHwnwf$s0+>Ni#Z^xBe67tcx567E`z7O=W26syM#wR1>8JxJ}@Iat@c3Mvtft1iZl z2f_KOcQP8Cu$F9w(B5y?o*Y??+snwcblmHeK>D{d={zWF#xFvD>}2pPa*tvPlcW;N z$4U#BF^}t}tSGN*uL3*VCr!tXzI=sb-S~5aexRJNhw>V!Due*i%Ep0oVMpnqmn?0Y z2x}`_K1Uof_LrL9HqP1nA@^|%nPc%y_wXXt2Yl+M&i814%ymK`8SjcH4s3xO(|8&b zP&%+#)Q)r+=xq86$XW?$LJJb$mZwzy+O6r>LF@oOi`H? zM&D~}jnC|D)Nl_x^gwZJ0Gd|bT1+3@tYP~k&aqhw$E*N=S(J23t`F&Ry1$iZyPCK& zSI>OzTq8Bbf)XK9XXhm=!pUtqoYm?P$D)<#=HH9Gc)iU|zu{kuGmrD#0H46{#k$lL zhTbl99%no-?5#~dYIk&^B8>4ddJt9MUGKrk-_1FPom_fFu8P8)C+76gG>~7dswT{g zY_NEy?u>Cio4>0$go80WAL9VF;|lQ>9vyU@yFVl_?S3}h#s;5@Wnv2lsi|~v$W4+>uN?0j#<@|Yu? zis7lZ-~XVMCOoQ$x`m5w9XCjrej09cERtLggfIT=96eL%9&oHiW*EX50tSpn!qD80i_2<|Z`BzL(O!MGv9vCkR%ZxA?jnJCd*VI~-g z6H68OS8cdU^E}wUph&5s2jlGx$Or?s20LA8vtuGIEv1Sj&hi zQl^u{KgS*Muq6tC3)OaF`IxeOA6c<^jS|?a6fS5GgM(b?9xX_lnPnUi8s5oLMsk1% z$sQH7nuZGs)VtgIV6)vRE;e`(e3}^Ut8K#KvxGvDX7InP9-=vcGQBX|vE}x>f=v3` zD`2~OI~(Qq{jgG0#{zj5ca6%P@GnBplv}2IG-L$_mOs{KZCAi<{E$y2tt+HS@E zk!XI^uj$Z>3Trrb$th zhu(}{8HB6tIN6iE2@KFhMmAL)fwLd;q}0uoSQ8G5GV&OxpNJuc;H$fJjGbM@521iy zJmNN#AL|O8U?y_hbyWNY2$GlXIY8U_4V7I&)&(4+>hcXX`l`vW_W;*=W~2KS&nzxe zkTTs4-hwjnTjxcCFjhqSCWht33$qKEUV5j8GKrpq^7_3J>zUhkeD)GN_h`cyARMRUFtL;LiF_Ed;1j3xv(z8ruT zr`(H%StggK*d?>7egf9!T2kuMkXh2SgMIvAyr5^^rF$#?Q{)K?F06Af>K+>nMMKpL z45*{rH*@|ys3YJ(W#@UbCUHWzC*r-cD{Xe+X8c864+7Ki<#Evttbq8qn|}}EW0W-{ zH>wE7f&m33FbUiMIUIx_HtDvMk4u0oa~0PhAa~*2m@RPicTTB>`Sr)YeEdDX`$GGp z3x4;BGF#m`TQL}rGBiy&F^8FlKQ^jjpi|SW(C3tJc_+1`(z5|(<3-)jr~6Y9m!AB6 zJN=;#;#|#uiOccHSKH&Cd*JZOUyA3QK9Uzp#fMmzo{O-0D}IWN)ZU35Mgwqx9TX(4 zvkUN3DJPf5gQey`Zf^P*bzT_Wj^q-g~wPk&ze|$+no{Y4oWl zwC6y+DRj6J2Bf{3OBP}qp!c`ZaKMhoV=p!F!8pEYGiEuOPS+1S(;d%&!`+(PF=BkF;k-jj<9ZFSinO6+pYx}**Z$w>Lw11_R- zb_My3_~r#$e|;A?d1r5@IQ(J)^(aua=g(5pZ~qgFI^09ljrSh< zgCw5ICXEj4%yeV~*d^?AZe z3NmjMyr%P6+*=amxY4Bt{Tq5p6G z@3~q?HX(p#{!s+eUsB`X$6oiV?MkvYVa1T_mfDRBhn3aXSZ=OT(E@eCUD#_Z7n!6J zOgl;`2itM~wrOd(H)9RY;!)p@ha-F$VCO|D+?N?cOFCA#*C7cCCiVz{A9nlzpd>el zpq;MnJI*P>9igcKTI7sp=-IErmGg6SwG9fpj>>Hqm%7B_7rLQq6T7kPO%*yT0&306 zK@=<#zfyL{&biwhGDsols>1a$6*6KZ$SDCl-w|Mh1rxyRe>H_8Gv@` zvg(*gdNMcK{A}z8>vAsVK$e_}PdRj{v8issm3Bit`t4e{YSaA5Px|K~Y|`*4HdIt>a}NX$&U)P^ zc5%s*aw1f^P+!nq3lo(rLZ(kU9wi7h)=p`IzYIw_o{YVZlo_C!oiezM?8yxBWj%?O z$}OEk>6lFdDBUAigOBpFz-uQ_whso@2z=aem z^3IblKmI{`C&U^d7jW5c=rRgHbRZIa$GrJid}L%5Uyr&C#P{t>gW0L7u?In%JDYTg zc=aDozZC-++fgM-BFaJ5DhAMHo_dKgj@eJF;%;SA(vi?u;J48^>394#R6M<0Vw{&Qv@gWKdOF6a+{emqz~~6E z5wf#~4V-qR*c~NexGf4&j!BqLSz)lslNjM`l=oI*CdUwpvw_nzF9c7?yY<2=?N7b* zs^*OTDuE3Es7*{S5*u`qWBZk~dE2lph&cqN1{Nt~+)J4W8s|?fm(H*xdTc_kp)PZ+ zkhqMbADLrkMphq%)$0mb#bfpA!HgYA@%#m9#fTS#40@q;U1J=0XTI2$CUZ~4L_JrH z7VlucMCY;hW2t2YujQVcKALUWg92RzpEVYDaCC>=25Kt+D&8RlcLv2=g)>1$kL43) zEDp#yIE%6iqddMBpKVWj>Oe~3JhJziUrb#T!&DoypgwN4wu%p1Dv=NYE!7P~M6y^z z;3csl#QLpv;h9(hsHNNp-W19hP0rn(B0A&cy(O+({FsPT`reV%RSc?GY!8(x9;F{G zKKA~`Ng>g$Awsq2!30%Kh&RM=aYZ=RX86o8Kq;A6k{M{cCE_FcMv)aV#A-}c@LrlV z0?CG(F_Q;vBL|6Lczm8@v&n^J{r%*l-+r0KkoCGB*EHi?#ke~xe=y9`syS6s!-e*x zm;!}*4@=4_97yvMN(sl*{vrU;81-aeyMj^N3 zQs4fVY5)@0%532axQ^{h!BQ_bN=t zm@6VNj{|XUaB^o@(jJI09ZP&ICgN7BPj4JKoJzL+ydiYVmQ<5K@NRXvdJo$@l#vHR zoA!qfv&QP@+G=b;_jqGsT+dwCrBBQ7ey=z$m^Wq%KZmm`9f0)K)!Hf7-#~=EACHPt zJ7$BY&!;{=Y_&HR{Yi4d-7rh;w@hP^CqO=D3>Xiv70`u)rf@CpMVYOvj-A2>8dEhY z6GEjOFPHS8F$0{+`j3-h*^OD=2gEb zO>j3mXee;=k&hFB3yeH?;VK>Ox80y-E|6hqn!HTI+u{0p!R}v#|M-Z%HIRj05w!(>kes1)HT@4V{;_UOYN7kSNHf zitup>FqCra#|V9^&6C)qziX#IvlP6>sRREkYQM9oa)IO_5Ed**1~%u)YAgs{z176? z~46TKu6J)L*v% zPsKom1m5;Q*)~kpVtaFOQ&2cPkkxY^5=nqHSuVFFoE2=l5R5UN5-0aB+Pm>0!Hs)F z(o&6KSt=wDKO`PV@nx%wj&ezhoJ)+{2QvVCi-#^#A&5rBPCw@8fh$$>hc!Ek=*0YG zdi^q%iG-DxV+fSV!uF1_zta9ZmKGTStS^RmuZQnD4wt4={Tz{!7zey&t8m$f}&H(Ajy~*rEhx!?|XJ3miHnJhjiBL35@KsSWPrgUSpf48A6@gG)i6)OCGTqFMjp6hZ4nJn5 z>x-d1e*2g4J$^Q*8W}B(I`!a)6=F^LZ&6f-#V+cldd4hoPZE3$_9MIFe{|1dQ^8$n8_`~KX*eBWtrIxCqZE9xd5lwSKDvjjExi0f$=5=#8q~eVC5YwN$ z-DX$)EW*fxd+9o=rZF4Nwn4%m;x*W8^c|MSm3p1{6+C=@Lzr5qtm_a+;?;Q5)Xa<7%Wvl3 z#Dw42=CJM$({<)TqqbBwR}%&|+D&^`5#EWc0{#wV`QtedH8;>psqK4mVT@pgo!%?r zy-5RBP0M4#L2>RRu%ACserLI~ly#7Z8XVZ)z+{Urk0XmeiH#-w$`m_@`Y-$V=ssoM zI8H7r0@8hWjMW`8BK?ByP_9- zuqehm{Y92OwNON5e&*R=iVomk`{E;!zI%YyniEjV6r%ILtN}D(OQ)s#WY87E8BXoZYcTVoWp zOKl-MtLBl+x0T5v^edvjCI9V%51zsB@wtNI(M!^(^lgX{a9KIOJy{{v5u28r)tCSW za5srHf&gqVka+ODMm5gj@!e`mbCV%t-BN{h9c$S0cB;v=*H#yoV?`EuFIN+mL8`qm znLl~|JrV1&g;ap{ka#t$8>ccU8Ic^DR2V(Mgqt(uW~6G}6oo!2hjwx#w_bZ0gHdQz zbv1!FVp$a5HMF$aBUh;_SGx(59*4~R?ev#4ptxi3JrrN4lv{8}1uN5km(;d`*NPwo zLo!yCLIl_ht*{Q|Rx&G2Rjjm{laAJWaR2Txg_mLK#WP?-H8l;7zp;N=eW#`$$*L0D zncFKQhGJhO=JYB;;$N@e2v?r0q~e=T{&OrHdjPH!67|kI?VUnIfFlku8YFQ)_lP~j zjuKJV6B0O=UM| zA`lJ|1$Ip|W^ zXO3pPhoH2=8l_}15-9mh|CNz^UHtSCQMyM>iY=9miY-Om6jXD2jLL+hZVkWKjzoos zsrE!iH&vnzNkrfho8hG&x}rv>dR)E|Z@boBe!=HUd;p~WWY6kTDM~xzXk!e5)$aXC zQrBG?drZH!B1U0|TZ0M+HM`Po#u`r2K4drk9h}T_5e>VF3>6aHC4w#HQl{iZ9&l+s z?m{acI4vJi3G(r8REsv=FyB5GZw3}wS@9@m$35VSa(nX4(_fKt6RRUAI+o(x?Q1=b z?ttLdy^Et@VJNVS?t!vEIb#0LPDv{*R~fyE6NXVMU+I{l>}17kfX1iRmxFXGRHlw2 zJ40fCw`G#c5W~p-p!8!X3;qoiIz&rM*%$SBWhzbrW*m$mZwHuCMVc4d+;YhJBo-Z# zo%9_VyIhaYR7O!f&*MQj*`wp*Ot?Ht_KI)6IwD?0#i&?kSWw#!q-TW7dn%OP9g&Le zy_OZI%d;Dcv0-@XdF&Nk0URvRmN(mUk#%Uiz^*%a@F-1{>PM}VL{jkk*97raM|4uH z1rv=^p9rptn6f+?bVv+~W-M9hagx|3R!V9%BKjx|-E)4KdIOSO8A~KzLfX*#NO-gZ z%Kioh9<>_c{Wn#>`yl^3Lu)Ph(5VO`K$a?~EO0J@tWEWM2968X(d16{@0dXVSzE<%bbB-LBH=Vk^_+HLk5*KE1~0fqr{*xf z$f@;kslg@IXsSMpEbyHeAmzR9#1aUC=6da~w@i&=m|Y9jt)AWoh{|gZSBfqe-?t>? z!udhH2pJjY97j{uUZNIxjD^DcH{-vrwd*B(hddztcsG@|rhXYJz+UrFpp6K`p!rdn z1Kb^ndt;JR`x8V6C)y&WDBY;$4{7cUR?dnSbcqXrYT7ACa)}8rhr+;>C80MI?Q`uv zc<<&^Iih&uB$?kq;3^4j3i9Q~ga7e=7$Zy&cYYUR`v2SCArNo0*D`&74&C7WP{%z5 z=kfp&+W5M6V+sVqTd;ZILIL0HL!iW4q)?6 z;mvK5BaX#aQ+{xSixNb^-pOb?H3A?__A9HV=iGqTrfaOE8i2QMNWWLYzC? z&iF`u6k-wK3HCyBnU5rb$5BZ{X2Q6B2PBU9mVsrUDvO>8k^^81fyhRJYK;j~CKorx zi#{YV*GgEX8HK+3oYGLCfOP3Exrqr6&p4g{;sP8PfAuEF#RMQHmh9vdFU|{2Voa19 ze+@#yVK!#&QFiLT=4C`OBs~;ES$@7H7$r?z0(98cvNYlVaA>De8;Hy0^Lo@{dE8+o zAOVoig@AqGfgOpFY;#O8Gj0xx)NT4zLZv7?ooNi$odbj5qKoEsa8bks6=MWNUcVFD z4>|9CNZgrc@csB~h2C1Ryh0AHT=Q%WhcHnw=c+sklPUzg1BeYmL;a)KePN6E-U(bm z>d`->&QfE^gmhulZEVys(dz3gf?mqUg$wP11+OP1uJN51H0pT&9+R5%^Vi^sP5g7i1#z)>7plOZ{>K>wuhZdMR2J9z~jr?4VF=z8k|*PewNo zS;tHx#Wzl_F6|}v{*Y!zEvEEQYj zvn}|HG9k)JbjC|8hJMhqkG;kK$EfiV!jVEKH9oNB{uX=D3 zZ7X|X49~N-_T$2w$n#5sMBFe7O6fjWU7ytKc_!CcN+uF zA z9|M!ne@i*+l=z{r`o2Tnwzc6lsT4>_Wgo2IYyw=wiO2oeEq(%yCaC@XmX&Ykc;Fd} z8<%K>$OJgFJ_wmD!*4CFOXPASw5EXPIu!1hvr=qG%)pfNWBSY3n-hxpS<>io31KY- z8B+z1BOas1=g2u6h4>c)T@4xNn&tymII;bR}yL_tgEUy1z#3d4NyNbC zU-WwPo(|tmXA1(!DF0xZwT87Q_k?_K8e+qhGLe`6)O0O_IL8)m9dG+Y}sKWDz7RJvO>!+?~DDUJ~&l7F5#UMqu!D{3cVfD$40`JJ-C0ASuz2wL3UKBkWfVYLiXHxw!0>xMR$ac z;zKC7*vIkmV=-k<4#EN|t=+%}pz)lfrOnyos`s6KPhb;>T0E&IpB|)g?NGLBrBa-g zGIH^nc{5+vdu>i17|>F90fY3WM9)oA>*8}Q+jW7&Wd$IV^UvJ4;#GzTP976Jk$aYZ zt%?0#=l}rdFiAp61fwdDnn!G*EiO#Qlf-_hIohj2x~MrFO8}1h5-kL*UBbBU-b2U~ zV&o!e({yt7<7e9JTu2)P;%j~|VACkGxKQ*~0=Orwgwy9ysry5D89PVTjw&Yyg9!sA zg=B1_dE(d*6w}Ce2-v*+Kc5jzKK%#&zxL@6e1Y7mWbBQcSG;rYWY6k_6NnN5H6fDW znXy+}i9=#2>N*cOS#Ei)>WUHc>|U#?IH2kuX;s7YIHFlk)jO1t+TYK*KXeeu`OVzM zY>zDw9qpt11D5hP;t$#_+UzH<*thMcKGi<;sTk;DoA!*Pw94AU?W0aPci3U-YUs+B zyg6)*fJzkyh=)u_b=CTo6A1C>Ug$T%R*Z#&fTZ@1@SFV4D?m4L?VK28|tC6hZa zbRBGw(SI$=M=#!P3-P4x&7%%OLa96X&ihZtAgRV#Yoo}ys)<^vbWOME__k_q!LIV* zSXFb5BUmIH4yPTKU#Xd$@?^hDNa0tOUR*2$@V@^+3q}bEuNYY=>N&T* zr!&foQ%at=uk*0q|#maR<43EpVAg=Og%{8Yp z!>j(+vm<}{hYRF98>5|}cg9LD0dAL?rX|)%=;3Rjp`y$8$yd0*Y9NS^D12u=sH_vK|TsDX1oO1&b82}Zk zT^J%?D)|;8mVU;P_lRdP0%)2jIf+q<9CIUnFt&3`EbH!w1Z~zJxBu?2m9645)Bh?S zMf-!Dl1kN68jgnP(@ZUrO;01v`1-B$2qY$5;be&*l0h0MY61)5hjVSw0cYfc;H|SX z#pfhm{5N}&m{Cu9&P5rCh7+)QiBD`?YpR;;KL-9lk0l%Qv7n~p{fI9A@-gF@%Y0S8+SH8+{=nj z8h8PfXoiBvML)^-J{~K_sY=Ks4({D}nJ@)#Q435DU5q`ZSbgBLR(6Jr#Ql`=58ii| zH;g+xcgHwZZd=Y|Kn-_GA2X~|HP7e@EC$0^r`oM4AJKy`_t~Sv??W>IYSqdfa)<~n z`DdV<$|CXR>=LE~E=~IlitTQOi%}I2$cVp+%N9xu7ZbX442)>>>aE48SW|2(ZU|%h z3m7NG#g3fGFarkdi}9ZiTOq$qP;0opk-$;uIDygla+2?!e4AVfPmtR9!dU$eve&7q z;2BSH@97qc8E~PD(786d*0-TGp;eWe8XFBvay?OGVQEF&W#tw#{s!i1sWG@m&0cxm z&^BX{a6@s@ch=+KL?lLeC@B4k}QV=gtBbm&Bk0B4Th z65xw3_cW|ZIl>B|o~UC zxDvfAzPmyf^@6}s{RgLfVlovp6=aHn1{BG^2D@}EDz%?EIjVF7Mil~w2ParI9VcDg zZQbiK*j*oq_HB`bHDSbSSz*=!oKXHat_^P4QT+GC589{wOel6D5|R8Cp9A{M$7&8K znFpnwg*Hg&EONs1T)VLKoJSfHl9dAk7z?cjD z_f*R=CnU0TUghW@&zu@tl>^?#Y>W*Pdd2u`i?AC?yz{r7wq#WUIX8` zG{5u@*ol~UPwF{5%schzi|t~j)Qs69UQk8VA5i{1-5+86`^7L_l2h*OSa|bXAgC7x z<4mNDhSmCWnXbRoe}oiG<52h!2S=@p^%gM$=YgY%#%^3qFBhJ)Z>*^t4T7Sf;3$PtNr6*bO!yS%1-91$Dp8bP)za1K zu#%IsJ+BB*`Y>ZAQO)rKcikiwZRs4-K{S)o&pW%9 zkfWN73d;S^t}rbcVcHYD>i2!hoM2o5<9RH{X%yd$!H9uC=4)#0J0-UMx;a&^WuW}Mv=bwF=Zjc+@>jC7R ztfI(#|4iQ(77nX(pZUc5B@~!zrao?QO%xHK`x+<0+u9qQQI$E)^V9AQbG5zva-1se za)hlIR8xcIu=i3=Efw(~q#Ow9(o;{xptw|}P#ipb`l<6tJ<3#$FLQv|of0rpY-7nM zmI~F-3&neIVHi~~9sTQY-~ambug}$at)*tY%SY=298+v3%ixq4B4sJ59e|x%ctQ9- z5dUJ5y^l!pvZc0}Be!E(K&n%yP*F^6L@z?NqbdxJoHj5dQ5R(KIY95LBQr_oxyjqJ ztDXe#ala_aL8ah0J8tBewQ`Mqke*H*9B$?1s-u@NeCkaUW0Oi@iuLW6cp>LoU=)th zekw(th;I|Cas$7n0tbq2788@Mq>LD*xF@kp;RQlnppOq@Z|Q2nLh)DF(hi?=PhI4`NVDR$ig8BuS9hyznlAaa43MK zScnSIJ4}k?^6S`ap`U_4zwU{uY9&$22W7W<;TIyZp{iPFS8hEN)wHD>Ifu!i{||`|3h9SX(jy6ue@wvJ4Et@F2*mRbv<7y@1vhH zYR>(mO8*tST~#!JK9zi8C4-B?x>_jIv-BJl^(N#~&-o7TfP&=jE4AeugXQ$mbI-Lu zij5s}5%IXya6aD3U5{?6`2DK#V|)(4yFTT=?hM=O@w~Mi20*kAfWtI1dI)wXl5&7V@C%7XvAH*~uFC1)vLB zKVGCTkIH0w$+s0!but=7%}7@(^vFGOM1+CHx)RbAN5~JjJLr7C_?)?_sFzo0lABL$ zaJ!lApOM)tzEy0ss{DK_o-reJTuEi*+RLp%SK4?Gk~gKEw2cH{N33Cwd6k2Wtx z-AZ z=kXv}*X?1kh1YRudWvn#MZu_pOf4a$`np%6(=^&z5vT5Zdt$xJs{;kyJNeC7xof|G zs(eXuc(2v`8THauRHs^9+r;27+KLLs1mz5a>-mujAkzK|gg-$y?E^wW5ZG%#P^ii% zdCBFf75Xu9ZiZHxY>uL`D6P0zYs$Tc-#0Ff<%QT;C8lnLjX5BuxJL+Cr4(zQRlJnl zv_MP{;dB1Sdb?9Fn{h7b8K=gZvoT!54bwdvuOUrnw^m-*ws(|2ma{>9) zl<8D&srAgsk4dsS;A!bN+(uQ-&Vk7zSleKWu7Yi z^q2lEpu+C%zp_t%RtRcb4oc_puVUna{3sb!qG75ASR$mPi#{yKc-E4sw8F!oC;^?U z<1xI}78c(Ki6K~hwT$-%-MNePR?6f_64qpcNZ#V>0)-m~Hl-W3-#6OhUweNz>MFLZ zh)M3D!d7;g;5hBXW&UUFg1262sR~tvIFe0KV;C^hF2@w`0?Kx5w1ZsVzv&U$ijd-K+6{y-+ScV|ignSn9Fi4BsUWY;J;x<O>Tj$6YId|kFyI??Wjnmh3z`DshAN= zlY@iwWl?f4NL>C9l5C3fv(M6RCC06m13l*H;KS)78mN)qA0mDIrAp3%Sj9Tm(1gm{ zWi7EMUnW7?Q(D1*IlL2F-WSPlCY_jmPsJwf7w60IZK`h%GViQMCzTo>^Hmzal^1-} zZpt8a&NaT9Yul=3Lt+v=H)@z&m^fweNP^)1(6&O4Pqoi^sPssXs;DI-;HZwRmWm*B z#d#19B*J^n#~d(_c;?Cp`Ut$0>3b5YlZ8mc0iRaEjy8unCb(1V!pTFPM6f!cVLa|M zo?v(-cE3tCO@UFe8>i25_3v9-h>ofGa)hS#S~}Oa6x& za83ZiVjaYX*;$KygLx8DD+;p+=vr5Nk%gpC;RTa1A!r;5Olq!EjPKFH&RX^IQH z>Af7@r-9~}ED=ZR?e=agQm41NAEdHHEjzxcmI$w;lhmnbss3F;Rye9`Z_dMtSX71gZyYYdS5-_@g;3e zWk^?U822J)ywoyv6mhwIm5bHt83<*3aih%HCD$JA+3yd^n`t+&*1ZJiVgNjff|6<% zv9y;Y`4wB*M)OdrX4edrO-wNxzw{ic`YUK zUIM%q*JI)g$@9@?dE&vCcCyqCVuTwtlx_3#?2`VD;`ACdZUXBjZ5pta{X|NEPBiiM zTcoy+($Ug-8y1=gW67u;-Xlnp33=Jh*TA$m_=~jkZ_522 z+H#P8b*QXq3;GzUNBMSDv++wVsv&^ln`3;pD6i}t^436w(sb*hZAvcb>as0oALrb+ zPn4=(=ADz&L!y}n6lA|GFHe?>JwVN&9CQWDyTf-J>kB#c*?9F8bhNRhGzuv?+k)I{ z)s7i)xu66BsGv^RKFYF3#OBk2=@XGHBeKhlhmgxuI#}AZ_}A8q)4b;blx6TxcdF@} z)TDXrNC}98uK~{=L{RrtB@5m9W26XM5hUxZMezQi2-9r5>5D&^(7VED0bA)VUP;=m zyy$0)elXA9kvuc{L3={|hu*~!V})BB;=RUMaC@TJ7Zg@JBKFlow;qhqRdHYlCAS3^ z*UEKU))Ax!f|61q_3X)W zdz4`TlOsisqG_PL5dd>p-iSA1D;@X(Ygf>&`dJ!tDd2iJKHcKu_hQ2>22&iVy+}}Y zLoJG9gUH{sLC}(_k_dVrJ@G@+IKT#Z@>?}Lc+XzDx>Xa{_G8VL?mFrcla&T2VEEx% zw>%E$NE~c^6;!dhVn(W8v39~1#;I?hIb%_KoV^+`J@QYFg+67ASR)c92RKcUklQ|`b9v9FCo++)(MMi=<# znELKm5CY-1C0#&t*?}#QQ5zTS-!D=F6zsgf`fSru&8|h~2@AL}p*U4q z9+faC&5Ti?zluj5)N9!m-UZn{qn2V8-MryN45UU|U_1*iQ*)m8Z!WAjodqONoA*4KN_keCHB8VNv)v~vD%@i6M@c-Zc{R1FX z+a#8YMl53{1t%(;LKXH*RE&ZFG7jmwEK=jnxV9QMG;&;qx7catUvA-cQW_jLbSrq1 z2*glrRYHa~I1+Ad{A!FYk~s4qH2`u&-IJO`fgQLFDxYHPn(NY;JUtboyW{b%3foBi zHd^FAM|bSHlc)^XpIxcf7!&@QodNkutpt0`2uUDG0X_S)dU$?pGKsv6^iu?VH^b?s zAwg7OUMm~xv3u?hx+ zhL<7E;#X_k8b64fBXb)DgA^el)$~TNV4L0!Xcvt9c#XPC&wyJ12JEJE7AC47FlZ#B z8@bS}l$zeeQP!9XU9@=YW>YR^hoPVLztDt4jmI&cJt0AdEP$rFF8Fbg`aCn1U(<6W zx1zT<*-TiLer<8YBrcwK!j{Tkv*enS{q7Fdkh_<`Ys+wEtx7f-L$FMnO2pb!{Z&{1 zl+x3lrq+y)ye8|hTrYJ%fFiC2AzfxV-1F7qO8=rve;PXk=((%1)tmrcDDz3*_l3nx z*e{4Q+)XwRm9075_QY4plR+CL$i4A0@$QjIQd82vO*j(>HQRDAPIX_^U%7FZhU;Y4 z1mJ2TGwcbNkV!UB&suKLzS;izlA6sY>!{y7t?r1B2XVM(&)J=!hv7r^-6{|I7Hl{y z$!>X(HKbFUgT+(t-sc?Bjkar$Je9I%6U=w4}6~t2vKUPSPvV_-Nd8s5c_WsVDK} z*LYDzBsmUSJg};rl#1vmAYtF#P&9yY5L=<@H|#ZPm`7bgEUX1JQ?1oT6HT_0p{N15 zW-pzdJYAl*CrOi}GSw?2*jMAR1hA<2awme-_Y#t*HdrHU4oE3kxfGv(Sh44$SCn|N zO>xVyo438c&b4yQxb#cqrma`N;V}6YN^7HSXpaAWmObO_?L0`7Ayc?W4h>x>SK`$@ z0ke|Tu}P4Z3leC4_PPBJKosH2_8BJmIVCK7|3HZ`NM7R^b(Ye@D0 z!Cugi1bpoY0Sh#KF1mV9_njRZZ5 zb@yPsU6$gH`vWi-#S11L4Ymb~jG>&M9OxrZN=K08cDm-GTn#qbR@burkP;1#o2#9R zi74wwM1V4m168jfF^4Tsjju@EP_aYV_5&kjk&w-YbC9os$;0+IV(HXGTxP zIQgcD4js9P@9n4d!W2QNJd}v~p}PqL6JZImC&ssH3J?ZZn~Y`jD!YtV)JaGdt~lbA zjrb80J*QJom~F5i*Bt*|SS;h+R@w8)Pr2dk4I{D;gijiga2UH2oHkDw0GWc#G1`Oi zb$ED<{NC2Djyu$x1Q$VNI`4%7VFCpvFIvQ2s{BQYC#vuj^T$s@K;>v%i`>qMrmHzw z0PcnF?kgf>tI_tb2kb|M0w9DfZWGSwQDkf@k?s6RoGj;vn9qhzyLQm9r`74D_dm36 z6Q4YAtwe18jd&8e7fGH^oh-)wabWS}ZH(n#*wO^Pk;n>Ct0*0Tmddg&piJZQ#HeoN z%TMDRv4)){TD_KurI@$y6?`Zj(U(t_OsO$d3dx;Udc#cq9n81v;6_{#{}ZQjOtA% zRor+-3=>0UIlDC*WOso`#>F7O4qB!72+`+r9|CB~Fker20cs}HLoqP$b7~gtOtb;>8Igxr`3}&nxukJ&Mip{C8&9q&5Ux#1 zywvWY7wwcz(N(6wzg+&(7Ky$=QQGI!3Y^(1abknJ+(22!_W2$ZE-B-)Lzf)gvLt<9 z#tUS&eEQ85Qk1&Z@tVCZ_u8H*E?DZ=L4qdaeW-&iqlH4ESM6pT?NoVR%}hHZ7WF@}Wdi)VW&gf*wY(f@*Lhe)vPelwqY~yR!Yrr+d|k5Fqi@7lqd|f! z{y~~N05}DEDH|5u0*t|)Ep$q&WAwhF7sXk@Y|osvZKBa6&G&5Gi(G#D_s78x1yyBk z9+A{ZL+NV&+?;GcMJhw)CK0|Ut*c@{;$A?7aV>%tN<}_%##a9#=XoauNJx)FDR4z= zJ{!B?(01KCM*_>Uvj?g1;6cipUB^Hky7~Y>4GH($CwuU1jVt>0^xfF=WS==5-0wWS zmDTJkC}I>;&Y^n{Hwt~aIoOJ400KvS%Y2DN%dJWqY%i3JRy=QSNLP-uiS;r!Z)Y-3 zG%EP2hVN)CplFCaoth4b06TnnESK%u3>beV3>{2J2wTHZfO+jRYs8X*fLyjSu`*>3 zZ@KJPz>ME^mujh9F1sZ{2hiO24JI3=lq9*rewz0vWc}o@7d|QN6$CHTo4MW<^~%_y)qPpP4a46H>-D?F53-mBOXY)FsT1~_DnFTN$ZdVKUTTyupfpQ zvcF2esvtseUlaHZ%|&&lJ&SHfZgq3f_&W+G1TrTFOR_%`1iVeTUp{2g{mqPf-)1cb zsm0f7@3gc7FpE86jiKDwbl9rK6H4@5TVISHH<3PW5EIk$9vVyd<5xDW8Ur-5QEe}a zl<$wfF3`!u961UlRU3 zk_^sQ*f_f1Y0sS95n{qUMLNNi4SSwYmH}|V;EWk4`(8E~M?igbua{#Z@@8V1T%C;< z{CvPRY5zw@w0OcYRUP4J>qd3vO-U-Adp5{%OVvrh?Lyv@h@~@*yl^n!M*An8EKdZJ zv3G1VSB^5+&$?Ts*AbtHPg6=uoFK)D5bR3Wjyxj65|SMS1P3gbT{T&mu zJd~;mHp#JAK%dHIOamh!!`3d^W98t6zeKmD+d+TyEm0cHNKWkr9=vz|C*}U9w)WBC zHZoNWK7$dklL21O2*%m`${3GaG4CW>$;A&2V#ONqL_|H-vaII!=AH(bwg_7(JmewJ zyQOVgTVE}!4jEa;LIlZVe?x-hY@pyl$wZ}9Ou|p&M#$Vv_u#5jO|1xSr42*oa`TNv>{H0xLuJ-J zG5Qc`LQ>UP8^*O&0i^pvj5m4b>isCEy}KJ)Ls>lzN1B|QXqVx^N7Fyh%gcV$VBeW~ z8zOMMcm|wAV5s)k!ep{TkV^QbEle)P*#wYilPoZDAk*f+0&m1d6)r4qPJE-s{avL^ z@Jo%>0PH-FE#G%|+rGUn;}&x@$${2pxea_J$`U~;T)y01x0Nc$^{G9XrmBf#R|RY7 zlMQo|iWB&LNgZQv$PK4-AT9_h2}z{uZPy6@nesi(%DeMbEVT2hc8~%LSll`6g?9v) zAf816o9i=48>I1o5TbE@>iu2D6WieQFhB-JP-~Mam+%cJ$Zrt=fB!Ech^K-R=?1 zRAjLS1C7Y-*lV$%QXG(J7&rk$meQtPC=M`C6C`Z+q`>J*igk7nP9Nr*9#9X>=!E5j z+(;G)t7&TP>=W%3z{9Pj;A4SRMsA;wO~W+_fCRC=-9m0pe+@&VmiLhUKmhdG<9`OyRKG*YMwK@??vujsU`UkUjDu?WU?uLS#?KPv#Y_ z+p@k^{`}AE^hc<~(Lqq^3UuzJ2hs2Wgn(xsDN}iue0wk<;0G8iH4LNAb9d-r3>?P3 zRu&sn|2>4M1D*+8DR*Z3WkM=bV18rElXN?O$!=5^Q`2pEO|~UCXkR46iD+~BqRa;X z1xdva#CBtV!pYD7D%OIZLf$8&Nxw!DWj)0ESGfH4{xzLo5V<3z@q+;6ElwkwL zN)(t3V-Rl>y~FX?w->l?_G)D~u_Qcu2nKQxf$DP<0@$~ReIz+X*`VLqOCSitec@c> z9!RIFI?c$b>D#yf!4N|8$z%y)9TkuIBg&*{`aAaa1>{0(2=|&SV%j0Z6N830rso&G z!YB237q(0HRvZ`hrkwulQeGR=^Pg6Mr!5xR)4XRK1;S#V zw$FEM%XL7EXHRNYShh{dR{>?h8}N7`vuStynw+C3ItWY?&EY0?1F&pc92A9*d~r?L z+R(iaGRi*g08ONN!uX~F-VVo0(NEKkY7z z9j_q09j&iGuGOYQMjGH6@Ui_gJz)hG!<~?0S?y<}mpjvVbX-s-^tw$BjM$?^8USk1 z!58zWLUMQNMQk^S+z%5|zI*?t#x`n2mx7NfoWr)y*YyZqRXe+2Pk!JA;Q_>*9^V-f zunI?*nu63erb%n}15Qb^+&EjVeC_&^P6&Hf7c6eI^1vh>VMaTEN9j@?1SDa3E*6MC zrYy!C*D!1dKC`dV@@Lh)3UDM1hMS(q214M+vuJ0@&RCm~5Jn?#nx@oYz7s99hpX{~ zeU$4rVc26g2|wLpT)iY+u#q4zklWnTG1b%uYsN6f9JyNVsvTA`UTz3u0>2CG8YiT( zT1Az8<6|XGZPoOnxE_11$Qx=>-ERcV{;X{Z8;gA%j$HCkLp$`DE$YVO&0h$r?cXxC zyb~xFxp8jo2oSM5xEnX;0pq1VvA1nO;iUff;~c;%&-Gy*Wu2n~=`oUeLAi4kB*#)Xnmg&!t#a*~8GBC$Llb)jfnetc#IIaDYY%Mg0+2s;6AX?BwAv+d zCq@zaCieUBhy^+g+g9Z|)f$oD15A7}p`|=>{ zEFe=DW|y;Kdp1&ZB;x+Rlt6mFM@>(j=|C+=>t``tFfxuuf+BU1C#+C>a z#uw&Deqtvi@2I1;6IiQuOn67?GZoDd)>g~n%7#7S@N_Es?y<4nSIet*t6zPxykZ34 zc0B&I;1(rQ@l%-Pm*bBw<0jlOF|DBLLLfr z+w(R4cev%2;CWh9+s^3S!B{2x3F$)W-+rp0Mpwu$mc{tP)zVoD3$)P%P3_9>?3mwv zzC331pnw#`TnF3Oy2ZlE_1MRmYx$_Lse-@iq2->1ZZ$DL+mGTlOjk4(5f>f_B9WxW zRoI$@JNQd`!MPdqDh-J=JJSC6&@1IJ`}c2tvwYLuw2JVid1!2$fa@LG0=RzYI&%xz z5cTbw$>XeTS$CbS@Oj>F@qIv?Cqei*h>|rT@i$(O0#DKwt>v~Y^zo<5BSvA>VB&8n zuwZ|d`(qSUTyv%k;#RHO@hA!&&IRwN_3BL|)6=$RS|bq~3ihfn9t^rslBsBCwTUpQ z5!xHzxf$zYaUQxS`rx~tf2TGnjMF2HML0LybX5ie;tz*GUI|zx#v<6>IvZud9*<@# zJzH%JPCaKQgm10`agwN7Ie+qO>f5g}F|xt#1gLLtpb2@Bf_L=EHvuaqtv!$h?FgBN z$|rdsw?IGvJ0AMfDf1$k9h5v8NFI+6+(C;?_!dx3vfD0~xsE*sQ8N;BE*a((spZ2Z z#UXQ{Q$kZjfCbs4G^(9dm;5f0&j`yP#MFM9F*Y22STCA!k#D=+swN&d_r&sK|M6dD0oF~U|Bz!oteKLTl0dJFRn<4X>JPtuB zQuoIek%Z_uqU6oBl^~l!Q^=bte~n#Gu8a?e!_z!)6f8ig_@3f=NQ7{6$7%XSMo*q*71SREQ~+TZBegUJJ_8yO>ed#6k{|(AlI$S59A$M7_#T83_ux$t6D60mGL< zBK1QM&d%_`JJWZjpO&YM9#R5yRX0j1Zp@y#d$u;3N_WShfajFQWa!)e%%o0C65_3H zZ3^-b&tDP3$qq)>a%;;TuE3!GNl=L#>_K}E>2qV>RFC}Kus6%Im%el=DE2`(G%GF6P&IgGH1S*@^8+vY7 z8`xZT3IiqmPV$9_%YF&})i~&CdC_?JJmKkh1m2`tFNIi@3S3@_tq{+}`yw`~aIN&Z_+ld<%(_3>Rf~0-=LcS|HVO;mph#DekN! zSP+!rz~b)hCVAlfm>Ibae5FdBUTx{*?$7k(;qr}O4o0tIL?Qa^y2%sp&dTTreMvkfC?EQfF!8<|YHbxY}#L+M! z%QLCUw?WwAogQv&(fa> )_1KFpyCH6V?!-(N9kv?Ibx=xTi9XY~gZo-n1Wt zr%K={Du_%o*^9vXB|D09>!*a~;b5zNKHu+&-c>3L+q(52*~!K{h!+Y;6Zj zdft=n6MTV{Z0nVmmWK3^C4fzhZ!BEniev#r8l%eeU4kFjoB5>z;lMONH zv==$jtH@9%n)DO7IXj3246~$Fo2Jz17kkw;Oe{(q?;tJ*jQ|uZ@~49m`E{B8#5P6n zh^!aWu!C!88<1EWP+XQLZOnI4YeBF=ABm4`t(2Zm?izcOOZ9lKAQ4@+W46m)01QwQ zhh$k#5eDvxkBHXEVWFqBowJ%GOL`kE1)(GMOv8X2x1?of>KY5s{M5&{WfAC4rt@Qu zmscP2lp{tQ4-De%9g|-pP-N=!X8DF~RW9&V`(cB-;(d(R$>BIn+^X??*LHMWe>Ma# za|qN2LF^0SY=$}aYmWrj*j|34c+OyYLMIq+HBZ@omMVB%oBnTnbfhd)5Q%8N}?Q_n30+1QUI5x^0>PMco2a$pDE?op37K}Dk zi1d>A6Qx^G8l}80|T`QIqC7l>7KPl4>ZC&lMdf86q zFBznH(+Dj8mbp6+ITlCs7w<#CG8SWef;a$$_KyGfBYM_<{_PNnKzkHSO|%oHMp__f za2-S6&V7rr5)xzyUxEhgrnaN(9)^yLn0)ax35wLULI|Ng_Jtl)0yN$S_}X)YuuxSd z_I_4}&Mcl@U7~$!t~sF%ex8C z{w{bxXloxJdOT8RWrwX+NbQ@Gpza#c5bTbF49&gTC;(^NZ+)wL>szsTcsi~o?&fA% zyykX3X}oc>y#2PZgCdp)XEamnRFresi5<K;m+Gzl!B|`91V_5Rf!2m0 zcC$PjK}G6Hf(bS-oQDY;rq)+9@1gaLn<45qsIqXQ5f&GopJbwufKN!u+Eb}1o}Wjd z+Rtk1@Zj6`Kd@CU(&sO=uvFQCbNI0gi%q;l>Wg}J8jeCiNv zU%ad-(;c=H<%vpEfUipEREegHn0S%rx(Hf>8T78$J4G&xM>!nXN<J zF^@+)AC!cPs?}_4p)6Z+w^%)N^~s4);CN3h#5Hp#9@68yRdpU~BA526LsrRjDx~10 z4Cl(l^YLXue)SFpmESDu8?odSyi#O={TYJ^kI>%4?LvOL^w9)`N8-`T$Zq7IU){y> z(z<<9?!oH1+mJFMI-M{a0lfX1e|`pB@&51bf9mAOg* zA9Nc`!H5w{ZsAMEM^FC>rsXc#KEaL?owsL8j>c={sVC{Sut+LweQ|yxc1{F@X_Vdl z(5J}~&ialLFc~-TY$^wOUbn-^RsY)-1sZr5h`n`y7)BVhP5+i%`3+#C8aM*C+6FEX zOo*4DcOsXsCfBiYVg{sCZn-=D_9NsWO^6Y|MKDAGQj5qzkX%!k87gHTma8*C$e6I^ z#2aCp4f?gRx@2pdzg(`w8gai5Qw{&f}|MoJEGCI*2?w0*K&ZXl6CB%nr%Umhz?e>etA6Q`-IR0pP z^lNtJK;HzGV;Fk`|NA=stiHpBwt%{rR^m$UatFYhAY|1elF9XUcPDj2DQU z$47_zIxmS(yH_$9Cb#0UR`sQjY1Zf@qj4d-K5Orl@?<;{*Z{eIDPtoG*}+~ipsFAX z?k3|K5Pf+`AyaGqwSVi+|2c8%r{dBzW58`?C=iJTv1T%|4#>6Oy~OsH=n@x;W2n~j zmqC5f6_^mY?TRcNLSsR&J%at1?oF819t&nP1p?P;PS$R9+M$}?tT#l7uzYjY3A|t_ zVsFHP*0FLuRHN%sz0A0r1a>H|VvDh81*qkC#6tu=`i*fAH;_FI`%V;Pm$2@0Q_mGv zl6Q@FsR*(iuyWw82+?#Dy?kg}r_E7W?by5FX)%)hu}Z4c?}9Wo8H{pA^#ZZJ#upg` zx$4*t=1?LRXS9EJla0W5_SG5*XQ?Ujb{ErMIEgKnA&(*f?4n#P-8=SWEDUllxbPvI zXe^_tjMW4Tl4GA9Am8G(=qfZWIy24ygHK24FJ;1P3SR9%Pi~Bp5F<=>tC4r{I;mt^ zmfj|Z+Z(FTcZ#bX49M{@5laW2AnmJH%Sv2~1FCIesFk24*J3;BtqW~)ZUD1Uc|RpB z<2@~$Q*!MDI{D5xQwjJEPnSqx|T*6|3R%fl~M9JaU zq{`c&@-$qX`1%Pf0p830;ykvi<{;S`JLOt3eAhT)Vv1e~@!hNBQ9IF%LKyD=%A z$f@8+U|H=s*rRkRMHA$J4jGTD3C@9}5o&t2%v9z*0mT&RJqEOo%rs6xi+x($&Q=e0 zd6%h_MsXjcx?K&S#>-|x-Lig!ItE}+L@y;(n(hy2QBrU541#nyzJO*S$DMy8%#38G zaYd774-;y7a`Tn)gq^H1O|~IIN(4pid&GciYndSA0d|Ypm8KJ<5>J=|&qeSEG7&Zc zA|6aL)@S3V`SloCxlA<2{z8{9ezFBrqjDyg3@(%cFOa)IjtzxhjD7@84vs4n%&gg) zdo{!`=koUmCeoo9St#5^ABEW3*rW)doAi5xS6C_&oCyt2_!x8@Go6Q_ECNz z-)g6%vS(z+soAi7){qc!H`^3`{I6*s9)HGWJB%72nqA$p_0?9`=X{4BQ3zsR6R<|f z=GZZcA3f0b;FQKR3HBZNH~PX{I1_@WH`oDoEKfAYQ9E2F5Fai*@TH(cuqey_4Wabd zIsrRo$9v5ylR2I>o4fjRd|UZYBh7Z{rzXL*8<_*28MNVHLIebjFQV0t$cVKI@-9qO zoFomPiOax;hzn;Z24&N@>ea3eKCF5ESmdbR$09-#xRa^=eX&D1ygbDYut8`!03QOW z=GG2IxfK9H$%Rz<08|h`x_AO%l$?R%?8rpND-_4Y^N7#u4K5`}ZvKwH8kZmpl7dq> zsq4%}brA}G6kMW+HK8}B1vL?5K27K@WSoS32^1?m_|*i&IFaoPQj}OAr(G02H!FzN zmigREy)#aS_d52vI_Zk9hXf*SO2awCoe`5PPHk;v%tMpev$pNKW#?$mud=MzQ7;H4 z0JRHm*h1A$<#t$Rpjer)Fd~ty_XRK90oV;aQM2|Qz6?w(7Rg!BAO)i#9;R|JQbaV4 zmJ*MLtvM#!=(4tROnv}D#Wh^prMjS_meZrG{Dk76cNW%4Y>G0Gh>KJCgGAc%qed*} z6gIFRWzX!zNJP11C8@<_?h{R6;k-RNScNFwGGaegUU}8_(uA?a$4xz$urafIQyQ?Z77 zpRDQ*Xbtb03hPOItmY3ro0`9f@S8ADFv#6lf1Ll#rnu00t>8ANF+34zF{ zuLocNGp2U~e#*0^0ViFENA*w=0{6x1TV%bkyQ$kZTHzpsNqT|ap0=+Rd`dN6I2qyx zm2JGkKR>ar%Tshf8=v@tdIgJha%~jQN+rhAg{LxJ@e49sMmLV>+T0S@R+B=)L&=;7 z_LHKDpH?uRV`Cjb^C>PJyA+d1p3rCA_Jib;YK{lcX2|_5d7DGOknnj-I;Ip&KulYw zE?*Tm@!T5RwI3h+`2P3H^!LGiD%HhC*LQ-J%1Cm|&6DESyItj*8C{{t750cagZOk5 zu~*@fL+qu!oTJ5RoI3`uoGaJ<&jH7@_0h2$i{=mU;T1{_DF}26Q8t$fql{2EO5maF zr~22F>7kBf>E-CARK17MPJMsHo#5EIZLe(9#S6wcZMiOX*f{=GLB@e9*F4!H=;Q}V zN^@y)GsyUM(=PCIZZOQi#o@+Er*FD1)f23MaJy6hpz0Q^u6aNmnW zQW0~zOw117F3J93lnThnpiS*_J_BA_Bmio=qC4(cTQSaprdQkcrFkQ9L&;n+EM+kv z8_B%e!PSU45guF9!!Za%o@G{?K&-(S2IE~G9zx9792vw?9|BwERU)2`{<8wnPuVsU z5x?b9SrkuCl3l|pXAIq~u3A>6072t%+<|-CzXo*W&3R=S6PX%$6p4r+ z!4ac0B1ouCyV&)b(oCQlFxk{m&l)Ul+e$5Hdp17MR<99)iJm{Ebnkq+;Dl`k9%Q4r zeM0eafR%+|w>ZJ2+J-ltFD9i=S0r?AP=JgK3+5UeC=GkKmrKfM`f)2YWNtgv4F=kfCPYi~W4^|VGE!22k?vo zEN_&R;PI-}fq{DFkaRJ*Lt=t{v6V2g#a3QQ(D&=|`6KxFHU|7#-WuW5?EyRLz8eN# z28r_`!1@V%B4fxYEU(JPy$;o_E~>xKw;j7XlEw{0E=qsY3dLjG>Rsk6j`N$6_kaYG zTH6|eqNhSHRrDd8DDy=XCeMl&BC&C1U2n5g6xhDc^5GWLVK7Zj%@}G4?ldM?;vVj$ z#)qfy(ts880Aw3`Nqg1|h@AtOQ%tk48zcxujPMF}y+W;=*v#HX-QwD{4O>aOUDo2W zr?E~C=d|xi)JR3x=N~O^Kk4Qf^QCRZH2-2RP<3Y~*`*+EF4=PRH>mCam8%Fkci}Ph zq;3=>PVvvbnZ8?Gus@*p;m_^3o4ZCS4%U0{{KnOGdD)f=2IZ1z;)xr_1b<3EZF`Cn9ARC=Tk;&jQ>2X&fNQLM9GPdncZmRN|1He!}Zz@tsKYU{WPmvPk1-+c9H+x`WtCwnZdDd^GoD_ibY zbK(A$loKVAZf3IrUTRBVH`u{JWk{m~bJg-t%+`A1iSpD_PdpK{T!lf_%Z(V81B9DUm2n)t?d}Ed&qq|Tv^-6x&-v^ z05nnojR12gRRL;l#wtsr7!UJ8oj3QOa1o+&51>z6;8e1F86QVDE#m<6`tHQ;1e)iZF}83D zotAxUWs@1f@m6L^N3oQm0oQVmpG6`H@ff;XCQRXSm+(4$2kq|^1QSfP*9f-hjNEV- zc7Po;rzq!e+wb=#@twF+sxwZr;2%tXF#Syb{rO+piV083@Q$k|CPK>MF4{+Q ztp>Sc74cD(Yo|UYYY1nl1S1!c*oUVqu3T>sB3UrYu!U=WIJn*5&fEi4E*ZXx10ADj zu;1NlUxPL@+%S3|;0{w=Db+(vr`3tO+7iyFwI1KXy=p$z$4~aRE6Y zfEoUN#Zbvg+54$SDjvxLTSA!Ig?g*JWFK4Hph<-BF>pyVY4l%G(~H4f*VHH7+|ANR zi4^e0Euft6I*vTus)p^UuH2f{q$t7aTzR2AzhShm(QVxXZg@D)vuT`}RB22RzWKz> zC(2EqL~K@p>87fVYIM!a+Nf6JEtkS~kIOxK zBjhG$fqIEw5Sdrn#SW3;8~nTMjeq5d@{s+t9v6L+!~0bIh(mNds%BaFIJr8Eh8G8^ zuy525J0^_MC?qM)`LfiqD_4EednvquRY=gPe+e=;^scQyjs z=^7z9c~Go*RMB)5tzl23xab|?nDH5)=~PXS^9+1@N9IFq*J#IuYScpfSkPmP=?#a2 zW{ zRS1>fT49GiAzSgac9rv~yF1=^cjD}=f)?nCrICcgraH~44`r-oPMnZGA~&AgO-cm6 z$kgcF;5=rR;f#xY%(h;RwrxZK4swXm&<Fh-$?X6+)Knr)wYn>i~+5(8Aitdi(#<*)jfsX@`im=P7;F6k$Wh#TbzYm(F$_4)sr+J?^82^_I0&|I^eDa z11u<_mMw1rV@G9s;=%+G1>2?&8P`?g5nU{B?E}=#Rg{xMGnsgWf=$>maeJ7l(>O2L z?!w|i7H)ITWBqfJ%}p|0i6O|P2F&tl~{%6yH~xcb!I%f}gXJ!__EmvJ!x zNPL#YJz%WBx)HFva6V83s^`s3^LljsrCsfT#|`NO{^glU>r*UQa)ODpPc%s>?nWC1 zqaMpol+3CyG|9ADTrQX6PzZ{+{OTj-xJuCmX$?arLm!19Yo;jMnM1EdiX3v?&Fxz2 z9QJs%9Q__{zp3kE^LKbe<6o4hq;{=N*RFl zf-qKs4=Os;pMBClR+I+zPS1wnXmyd5qjg{~l?#fKuefZ+)`b~11%F7wSgAiM0OL|C zljXNg89R{r+kUV1YK$=q{(Hqu&n{^Y-e;?BY0hMq2yezNJoqV*jQfAEGbQi*+p=V8 z;GsO`4HpF9dQL|Ws|1Q7=PsCt@?qLZ0Xy>kxKApwYo-wh+MVFHL#0NO)lz$dI3jO; zsZ|!2ZS``xsreB?BTPl)2|ejp!Tx~z91rf)j6wtjYki1|c*Ppj_96I?lQ`%OU2;(o zLvelld07Cwpc*Qua3CcB8|W0EE9V@&Z3NYgAzB)@dv@{H4uFSP`*3&6k86_7FmSz2L`~{b5}3 zM0q&YI;Z{!9%n=9y#2_N#_m8~N%y(ALr!M;Gkb7Xj?xGG^NTEA6cMv^DN$T+s;!GK z*lqi|%0F-n>%@`x9ru{z|325wzH^D0a`r%OF*j>F0x>uy)Br11U|U-*+h(c%yVL&^ z`xie??AgRA!bsT3NKNIo2Ycj&Kq@%`1ARm$!VZY_pve%MDM3J6$qj0y=B{lR5rvL{ z;nJ75Gz@A@GEg@lBlpGw;uX*?5^hflsUEAuBJ)wF`0=`hEG$G>#ql&2VJzdBi_~Mq zGnij<)~Ng{%?RRBBIO~pdZ)`)HPW`C<_*8*VjK!FJkiKrgiEisB5|nq20c9BEx=jE zb*BIAs9OD7`=dLJDWn)|qA(woMd{*G@L^d%emjBC6n6yZ%`g*xno~PiGOLm6<)Vyn z)LeukDHw1=?O+DMjm7pwTEv55f@cvuiJ@RuH1RnFfL=g^B!R%VA|GZ3pKwCJUGi%e zNRIKmYz!_E&yk3?GotjAjT&8fia>AnBPT?JF{PpU3P(G{6)M7YgRzmA@?NJUR0F?Wo?Ru{_4_DJ;0v4YJMQ&C)8!swWqJb205p1PvxD*o)XDCueZpAOH zES8RWf5ELFB;EKqNrd&U+=Ei{dR=_Q|JX{7a zaf(C)xsEtFMMsTXO`p~Hdl%D?^o+9sHe~Bc+m3UM(#4nKl~;Hl$O!u@e_UCJ)8Zmf zkSwFdh{U!bWXdv-H&Lrh`#U_qM?85dQ8`SalTZ!ug9wfZQBnaVQi!pI-h>n>o~7|4 z@%+@f4g%viD9;f4pagsN5!6(nH}Xe-i4#?K^+*Loc`Y7raI9LQp%FqM6=z6LJu}?X zlv*{QH`_~#wi~MTXVB#l;AAtSG&>lBnTz3le4Z}eIRp!NLjNajdhETLEBL^{tOS_5 z1lg)`jA;<@qHd0{f`M{Lfda%~CZQ~{dLR1#c+mG1$ z;N*4tcgjanqlp8C+Z|F8dg#61Unp}yR6urWi;}k0r>*u{96!e==ODgui&`sjlknF7o_bBdI)2Y+s<-IkEXx(AL!t_ zWS?Gxr(jCZ84^ z;cIq20v@>WN&r()J=>pnYv8^Jm=I6=5DlzD!9Ix#gwqEv6L_{K3s~-WfbN}=(BC3% zLrUXDf;p7p5a^9_?lFCo4fwrb#HUx9%*h4LE~^}sd&Xw6RvVOKj4^|Wa2wE9_C{)S z$XhcH+XNu9A$-)fhSZhmr)hU=PUQ@VO(_8vH+U*tmVJP3%KRNSTTfXfjg)h1<#McU6PKiT=!=?z zT{O=2|8@#3n4WyGM>J3#`%e&v5aPt8eXrayt{3o}*>vq?rt}8;?AJ)ePvasmaJL55 z99^so@z8xB>q3|0aa36fHOh2&=F4Bq9R)G0MIm&Fu%sCRXv33+;t~?`C zrG7186;!->NtdjRON3qKk`al}L`^u=km592CX3e?_O|Q^el6!^Ovqh@*|&_3le*gY{nrw!NlmlX*EjzImA`j?<@DLfe*=NpW zke6`B9>J5`ml_lbtE9@q$fF|}Z3WA%0nb4dkVLglC~-61i`SB%C7;<(`Umu&QbdXI zWMiO|>=BCOK9?}iv=LuinQ2XnS2l4jQ0Bxz{>{X;&+N>|Tt_$RJ$mRVn;7o_MCP54 zh?S|Nj?!~Gp;0Zx%zIxc>+$8wtjgY_8h}goJxYLOQ^t>-#qt;J_))!ReianL`o;zt z+>75Fq8lMkaxDo-14V{L^09*f#scqq0JTOCFU4cjjhG~#s|So)j80FO5{KrpZ2SIR z0h@C3iP#?@6t`EmAk4Px;9wU@>10jj0Hg-PH^SZIKvkDryu0`Xz34EUJl9kUI*m4q zO`(5;1Dh^EDiqc1L8CTFjiVG5U&CIj-AgiQ_OQ?b*u>#d_UCzgB3&TxXKF_g975y7 zI(74?Cl6vHscB4P_f9C|isEsVPKl zaoB7#Z7BlLb#>fYD4kdyb#610mLV*jN#Y^32^<2KsL`W1=C%1;VpAhJK9X)jxA#Fv@Xff z9IP=#=iK?0VG5it&SA{Ke*T>hkqhe2q;dr(2hIXej(o5{q~!b=Jq5QMjU2o|zJgdR z3(bI70ZnN<#rFfoN6;Km>AxUp;|>Nl2B z#DqHvdL}l)ZxkITL=&H4iX&As!Fc5Phc5IMfIh3o2pr@t7_sdtk_ceNlZmzMV=v+W z+Fn$||N84eFlt_6FFdsuOXzpE>SYTh(^8-MLZfc?%2Frp+46{p5gl|qT~No?&p=w=(AvKri+Mrm{)nB_ z&hl#5r<`&S48={_UjerGDEJwLjx8uy-mV#;baw?Rj2X_#_Zh>?*9KPh6PWC~*+yBArx-=(4Bl2nOGpkyR|Z?QsIk)A0xBlVq>U2h z@a!ekI5=ltA{NFe391p{NQ7#gB8#yjbW!}F;Y8lDk0uHBP(yUs;k|g~u1q7a4nbiG z{WxTv7Wda0av{*;bHJiFP{dO#m$3kDS050b=Z{9&)obO-Rg;A3o~u-8>a)}aSr#L| z#bd-R?3AFbLN}(rC*_SKAp``=Jv~S`Q61xc7i1&Hf>rNPd^|?hMj0f15~&q_A~ggFEru9a|3#nZjWqn!4uIKiHdd5D**l!6ls;)q*81p zTE7eirzBzFXD>YV&#SV2D%QE6tg}P%YGK6twLaqh`$letYU;w+k0S!5oMZ?GKxL`8qj9x!;?<8`t5qI5FB+zL8SZVtAn>qD_M$$k{j8J*NB!3k^iLofW1h zq4RusaY(qBb`7{-DdW%VQtS(NSde=t$BHks`zg3)^o^)!v2;9&T>RQzjfZ^LrR`V9 zi7g}#SO%Bogq>(>KJs|?Q{^o?Xo5gvmtovB@_!clsco%YaR$}f8jY~)DpO$%wi@hI zPi423dfLgz0zJ+qalL>MZ-K!0V=^Xe{}QLqdA;RE*sok(sV1+0L#3QMAEcOvZx21% zNcn^)PSsIqj>-D z7^#?=DPG5tj14l;w7Z{T5>jXc+0*Acsp>-zGnzy@R7Pc6G*O8D+J6CRJZ4Y=8b<{{ zp*}ju79_8Se4H^UBAVEfwroUcDc)?KuEu$k^Ada})`kdInV@}!U*caJSjc^^+_pzO z_+goT6vwq9cw)pCWbnQa?8;~kwpta69q9A~TfS%afcFS&trQG>M*qM6sgZ>L+Jf&- z?S22C_w}c?$UnB8IPBh_+>5Ir&o}xvsNXMw!)*zx z_-|QphCzzOs;Lp|6t3-D3N{M0`B_|VV&|+#rbte!5D-ugUDCOL`0WNfU!JxbAb&3f zq{c`bg3W25qb^fL{8Hr|l(@k4S`}6O(nx0>d#2xa94i~1t@04Q~ zRp_d5a)-8Xb5LC#d1B#=Xuwdnoyk#&v3Ir7km!an2<8dNLu?fLxDo-YE4Gct9xYFQ zB~(CnQ_)5IAa^72$M9LBj2IatGTw0Z;leee11=wSl6LDufyAOVzWn13!}4g{+UWqJ05HmCTO_buEQZ=Na2V zaKsdyi(YJshwu`rU|3#!@nDF@EMwfr%*h;-E50e%=4qNjfHbwoclki1WWYgiM9C7|!l@)Kmm|H(2B8V;0dL^jhG&a_Y6@n9rs6O^ z(CNu_yn6et44ZYY_(%SfuK7Yd2;QjK0foDwC_@d4z}taqjdMEKAJE)ye@G_BE)JZ* z_a$|V?3YVxK4ot&bLne0nYc8&Ok9IW>=$ZmOP=c}B^rdGKv3Ed06+xmnz7FK0MFU? z5o^RSn2v6=;FePC;E15nyeK8@f#KYflJ6Pm_h)$4Yr&G)V5kT znR=a%2{HgH_(MF6Ihd>_T5qGS8;E->zFY3>V7$h6Zj&O;(rtS3a=6?S9=S_DJpo^T z!SVHq5N3Nt7_G3PWq7-5CY4D5(Ka+8X`n_T_A?Pt_1$-I(G+(__Z9s-NKGT^y)$O3 zLowbn`9wcaB5*X-4dF+>+8I&!7yQ{y=M0ViY!#R?^4Sv1th#^fz>W46*RiO{SqJLI+|$r_XDm3I7lV=VkVV0OQ<#Zq|cb_e$e zzh>VfaMdSi;I8&3sS-K~s&di508F4B2Pf2l(K2GmKoV z$&W*CStvoq>rS-O;2 zBO4Y|HfS_}=Mr}Z_|#eZ7QM;Slr)Y=Lrg;9ZhaviiWE?Th_pk zHCM{?Ku5QJB^XdlM(*on&JLoxkFT*aG3sN!(*n5X^g$|);miI`#PSKgK|xx($XK_> zAmaZ}rmnu$7}#1bL&Qv#-M;@pab;P1@!^!(oAHv=#w(Y~fVMt76s9Ue%2i{MyAn~R z%9#fh1r48Vfgtb0-l9I;I5QsBYlC6ph6RoBSLcI!=E=$>Ufdk?#7v1ZxU$&a0C?kE z9xjg?RiGw4`xE58x`L;ZT+Eb2z^KOP-BJsUzBm0@EQUbJo;{e1T5&X7)8F44EDNfb ztS31D!WhOf7BKBnA4800$iTFwAj=KNkn1IhTi2Aaz5Zd9HHK8CI_N-%f6r;mT`QN3 z&mS$(RZh*W>=~GsQP*=GlO;r|jBD-<*DfH*6ybYAIxH`RFJb{0mu!J{W$`PZ5acH4 z<{kS+Q;yTDx|bT(Z2(e6W{wc=#@4Q)?>OJ#eiP-05wXZh(&?Jk=Dy?zR}I;R5b=1` zYi|{gSItY{a2=9a*r7et`i7B(oYuMXwwfBJf5xa`ksN2v&XJgLvlW)Dfj8~Wp1`=7UAHr_%3WWfwMixi`GF}m z`!rIXnK8})gQ-x$7qiQ>&fLQfxFw0cr2jBcX@|GVs3zMQNI|mFaXX2k;|`|d>JJYO zISEiEz6N?&88K_al=0E=ZWicY_-FK z)V6as0)zsEq266!833ANG8&*Ve^NgGE8{uP$r1B|k-1zREK;P&brWmMzUtLlu0 zAhrE$p|1QIlC4V-f5ZgWFqngmD$-AO;UoetJZC@gQJ!l*sZ~vM7JKlY?th=@8SzJf z%+| zNF@dorJ>K{dIz1vw`#K+=edOtbJOhKmap5-)|X zfma4KE)f|~jBnX4_FCmXh_S~XM0!tlCSASDhP<0oyE|bY-+p2kG5`XOFrnx264%eQ~iiA?XiF2cAufuUcA3 z$D}6((Gf;5NRMKy0o7P%_lxk;O*y8JCKvm+HtEHmssK^SGM_xG`&fA_S$qe$B)5!& zM0h5_ov`&;`wQJN&c?>^sg-Z;=LshNFY`&hr$$y3quYu#>J`Nj_3|LK@n!93G)4r# znEgoGxt{7Y>(_f)NR!-|uoL;wgC>ym2uv<^6^Hm58NCWjMegb_^u*_|{{ zqAVgVCQ6wwX0TvK@)_Y1k0j2AB4Vc0>@}1M9JEu@P*5q{b#xx_d86A%!ck4!>=)1t5tnAL z=+w_$v>OZRzA5&qwO+Q!u|SFO1gU%?+r}wa2XK)`W1cUM-Lx;_ULT}5#uiKy8hQl4 zx@rqoB};84<1QIjQ~u%DFD`L^G(IMd1@6+$N4-s;G7SiVM8rcM<)uEp(+(%=w$1oVwFiv=J|6CuBc@ChIGV{V~4LQBqT@qI28b z{6_g^eEKSo8h<5k9J|6H){X%2n6|Dr2A7e5$P~w^_WE|=)|%GuLs;}E*GDvSdP##Y zBFdHp2MH64IG%BO>qJO(3{6K|OEYclufJY=Ov8i!JpCm(`};o%{vh2B;>x+2I~E;k zNS-OW=yC-&gHmyOFq$%fna8;EgU#F}g|x1#dKkU@$@`um_sOl>EE0BN^Vf0qV` z?RFgj2(m7TyEjJzHjr#zdP?M3t952mD*B{O2KbaL;@6Tp6QTvzMN0)=;A_lAUn|7UVzC9v56}hr^pGi z)c9|Mp_V0R>4Is@3G9-BWiRPkXevIi1>(VT=n+b>;u-48j@{tHq=ugq5XaB5Koo+<(l*~uN&uaS!qYN0G^}!pzp3uzx<8; zr~o78LhM8mEVmdI>Kbz+mJ6%|4&EhYXcD9Lz>W`WaQER=6n*^eGTw5NAv2*PO_MvN zx4}ch{>Nqww8{%XIUFh=X3GBaRD138j<+UC#X_xs0TRBGvQ`t|7G|yj&r>K?7?&GIku2mfWkN($-l5K@J zQYo<83_npA!Rlb>eDKTZ&zt`)`3ECPtp$ob!n~x`P1#k2dW;1~ho209uk*oDoSI0N zR7hCkP2SnL8@wD(keW$W0F8A~zQ}5aLmbKu^+W|yO6kbRXd#RT0=l|AgL!x?EO@X@ z)7d14+qu*{)7=DlG-QE8zHb3~r?PzHWt19a*Yys*I2sjLk+;^>;Ip^@P5aTXC#;3WF41cqH| zUZoX@Hk+tTRZ`BhmaM_DVmQAp8MO7-o0QCK@k+ja`OjV)9xpoRw9Quy+gJqcq~7@znNHNT${ zXIH7($?CCx45i7w9wuIF9U??>MxrVgk9!DTlB%NCxa-IQ1*^gzCK*TmErTNOaIs@z z3|C@z*r*93x5o5#58@UP%g~KTDSg~Rq`1;0fs#kgxGk(KZO4Ty(gmsq*_t`JklA>3 zdID;Nae)V{Y3+_QKV{V5arqz{cV z2IfT^?_n~%nj+S&J$Rk0W+yhY2>&uV+5QUfHwV5L|0~Falj1{q^yP#ZcO348DF${2je@T zqU4Nfo?dK)9APR+C)07DjGQe*J{w6OzYQJTX$Siv6$nl5WFLjP=Zo?1jnpIx&z;dp z_TWY=hhG9*4%5dAyOlB^Z0+l8i&IyEsOt)-GQ5Xt6$iJ5KtW8UoD{;1yG4(+lyy@7 z7Q&OKo{F8K5F#Eq?9y-~9y>=?vYC&%YCsdkHrGLJ{XW`&*G`{`t%tn-`)xr;y>v`rVTg z{e3%rOj{uwQ-n}DFlxl8J3!i=C{W_T6VzcvyW6)k{du8ftPzdI?fxy4#q04|nNIQ3`En*rp4zX4 zI$m}cbT-0Rw`;Sfn$E;3lzV()#F(S!y^*Q{@fA28+h1`SGJ5c^ zTcJYsD5gY#eets+2$)WCdUAn`ZZHBi;7*-!VBdc5(UertJ8@x`Ap-%sSj{%i7w_0N zcnh{ZJ>a*bk3?wLE__JbmhFn82IBJq=Kq>k@ zY0-Jas-neqwwMgzg%jm$>I+k)2hzzMQ~A1MZ$1l6X%`bg-s#B`<@MMyQC#vF;IZ&QU%S_ z2TMEzn9CMI28c-2D6zx}0fMd3uG>dvXs#F>#v{G}dzipu1MfRRtD9%+@~)J*dHZ*s z;(^;C(ecsbARbBhBOhnfC*@OIz$TzRl0 zDu^6+y$+lr$zSRpRZITTu98glrKI6xSaLfZr@DB#wC!5%Da+VTl|7s(N5^%y*Vkm4 zeO%p_Uw`YS{n)f>NN(H#>MM0iV_3YZwd_%{yqsZpM@jCfBAdBX{SYuRI4}3=qzHQ@ zh0Kqd?zB#s2Omy9n`zs-P!c{BXGcwF1TX8SQFo_v3%1eXKoLSAGSYph5XTq%{5Q&Q zCx=0?K=1#`J073TR!$fzAXK|7Rh-l+fvEtc$(A4w+5;wz~IJ6En2y*CcD{HInSF1v-lEa;Gzd_FTDe-cDXE z2^qOjj>HcOOz-?bA_bNx6KzJpQ!p zUfr>S)*4qEo!E{ETe};)3dGY6jc&lX^qwwYo4Te4*`0r+Jasc3+!8L_oBq`hysZG` z>2aikA(;FSSR^}Vr<{@S#?W&b?So1@EwZ$y@rfPd>gT`kpYr_kwgbwiqI(p?yfqqV2D?Z#r6$lX-qa$}@xWuQ(%jQRw-H%fnx{&C+;;6oNB9 z{ftvfXo76O$suqYpVF6_*%cI4-6Eaq!m3fJEWeTHT3$NrX>zt0b;xAAg<<#CZUeUImuBgtoLyh@m}q~l=4u&g5+F+ z1Cm6r6H}+kRC2hr>g4iaU-?(Rj1A0ooZReEnicz1#=%({7Aq z+t6?<15f%So@4EVc#eGxy^SA1V%3K}H{+>RmuwFY{!6+4V>{e7t#wyd?0ks9D8L#o zAm@Z1!)WBdM?Y4+We0(At#(~A)x}q6=Z+xnC&|@6q;+C4+qSVK25wod@Zq>1IBQTa zHiJ3C3vPRkK-q&cdW7u{N29%Q>(n3n%0Kd-@{j!eE4IW3|FulL1@-}w%6CTc9unC; zWnI7aR-71NW9$?t5nPPa+y$Z=daq4??;!DIVny7*M07AAU~Y@6j;n3JckOi}jRJ@B z8#IQtH3%)5i{16aSdn*R2;oLh1t@u(`j$ds?vZWwN>YV;qeC%NG<8CAf_e@fti2a` zf0S?|+pDD+K_~^epg;(0auy^LsGd;EfH+UwuVWxQ!JS_(kA5xIxl`l5MhBVN^zuIP z1hMXU?E7>CJOaiol%7Bo4qvJu+c+lKvYcEjdKt?O>0z&<$3erQ@d}nJ%874{Z(8Nr zhIa@Y4vy}TR@%v?9abOFVY~5zT>|`@39N8Lg8_-$frk_Oq_Q_-Uhhbe*+af#WI?tG z@!)cn3!S2g>j5>aNLe_ORF27Hkm2Xd=J;kwVqb1?ALIr*i_-zCjW+;i$Klf}QCT1-0x;kzeJ=HULIVetg0V`CV-|_eU zG(#{k5QvTbs!ZRvqb8k;rRpcosj+bv)7S<8M&se+p7WPcrbs*D)A`=7FB+98 zTXFmG#j;=oR4d~5;~V7Ep0jT-Cc%Am*_a%yoeHlvw5WZW?g0tP8i>6pX4jKOc>0mDhR zV<%Z+#8-E>?3#AatOrzO7(0L+`VrtsBMk%k+%vyc_FX*_*m}l3Ll(iig?Bc}>RRkU z@FJS3CiW4eGb1LpC(^A!6a_;*oBQ|-pOyULwc#q8*g=S zVh_`lBbs725l4t^M6mf{!De?vG@9t8OdC+8%d0_F4sDiE1ce(NjGc8A?c*vLG3GEC zLcMxY$+IUaYFGzl3AH%^Q%USn)^P?SOBw>4qXMJSRcqnH&K zhY{=q@0OOGpqzurUL*&JfsYP*l#15E0sB+~PPCt39E6&k?+rn(Q4qE=MYUpOz3OW0 zj@)=s)N^go9z{Q*R(UyOy__|I28e`|f@lVDxw3nhaw85Dcj{B zmEJK`tLxc;>oWnV8hQoyZhI0G9ttGL4Q{S86I;NTx^1flIjz>Jzf-Z#FM@}$5=P*1 z;}^{c?&Nh{F4tD<&m_ap@sQCdv&CfCJhlr4OL9lS3L=HJ(KR~a8_gA1Q@=>|B7xI$Y3F{6*a{Cp2k8`-D+%3w`qiqk~nbHLI16%rOyy|~;`N@3}u)^^VwvI>3lQ)9` z8j*kkOJ(CnJLu&a42hSMUV?2Bp=3p@E-l#0$=m*vLb>z>6WA)QepRbECl72MV6q$~a(0 zXJTL7|HS@q1-rohy|q?4j?MlL)Bn5HiGMK>W% z%0!5pp!5i4#|-CPLC3ndx=!$Kn0lU|xDequxm7sDgV4?g{DH)h9U17vDu6TQ z6M|rytG9Ywmihz|8}Zm30s44ZM7<~5O);Cjh8AljnItVF0tD!$XW+I&Pld1I{yg|w z&0PH3=|>?kFr|iXdX?g?{8!`4iVexo7{bxA#oXw{`y=}G_jVw>5bMGACEH22C(`n& zRM9f+^9;wE(Gau<@{9cfDI*|OwiLzSxUcrj+8)SdIDec0NYe_SiaBhq*yn)${81l@ zI@)A*W{S$vg70i_FEDQ<6so0U8xr^hn}?8s-PEs^$KwT3Q!y^9fjGMrYjcoBDa20z z3<^yS+=y2|nRAUzo-LQ2i<>Y{F!3lp`-^slzSs==x$?_W; zwV0IHu^;AJrL$oaC!E3w_)$-gXCy=7udqRJrLl~nq_bF1>KgZ6K4@ZTQn}m|4l0(o zEB7PwXBW^YJQmi^bNfgi*u}!KmP%Gr0egYMPsCR35h4)c zBO|GRT*%T(^Ci?HGKyZ&h000bN89;Gc)w@w#6eX_=LCK<4yc}#L;8fk9daN2>vUH< zb0&^xK&d34z=WebV2_Fy+oFY^VB^u4mh687!qqDT!z3N&5h-y)}4?76Ya>7MIG)wny2K5w2BJ(#QVHzS#5?n8|LRE9cMIV>+aM z;{?|!Zt8i}-Kccp z!9I~bqg}D@klYAv=NrukaKP!VYG`diIt%`D$EZA-~zuI zYjn?UDu=mJ_IhDp(|%781x9=(>=^Tc;Ju*JHXd0F#1P+Z%aX(Tulm|3hEL<-QsLr( zdw~S&-p`H1dxIUd3QC4PQ;{W}Z*#{@+}!17u52~m;$HQIHBNY;tK4C{msl)GGA=>< zOfNDvV??B}#xNQC_+3Y3+sJ`^_XnAGvhm(EE>c(EHr)<&B4AVu1$g2yI``z@7Lt5Oc9eZHy^tBP|<%GRXXSr50G}&D2uh-e+95@XfI-1csuaB^n`t+ zwO&%(k8*12-ZHkK3P+_?jp;QSNOIYA&L1p1F9YC8l#9Na~0SH`ot?pf=eV zb*tK695PIj8>uXojZPdnDut=#0W4qFxRYVms3RLkW~ds78#QRTITNd6#uvUu>i;74 zun!{$-YG5?l#I`lR&SIWZ9Dd31!Qg2^=o;KJEnJ+sik(z3rm5Wkj%a9bc7|Mf5?OP z%k($47l=BMe+=r_*;Bp->eJQ$f_67?hT0Nqi0DyL>M-wkwAFQ;BTsmc&Y4B>h8dMS zX1E+Q+07#kJ2KYOArWyK^=x-nN~;yD#Lks>GyCkNGPKKf$|i2OtB@nl+VcUCZkR1# z*}K)RSiw;m=5mLhRfKD2$0#zozSyE32MuB`7SuS`1HOTEZ-1k_s2tNEHFyj$rG8>5 zUc(?u0?_v4E!sTU!Ozn-fTFv&YdvMPvFsd6UL)e3P<-&WW%{wvN}ib9u^-ygh5?igDKIOcPt6 z7UelxHl;W52jWTVaZ|ShKvCi}MJ_s+*L36x^3&5sQKFn*rk_SP85wtW2G}SxKg_M~ zNnG#Q7Nv6C7JKFy4Ek6Du?T14#TV!^v`)+SILoZ>ye-e~IhOn0O^tqXyz322Q_XY=Jgk zZ@nC!KHxG|;@St)A7ObdK7b-p{xX9VH%}tuzw8ha7TUsj|C;}s;)r%@Ps0GQlSj)p>~HEuT&BMd0_~uNM4u!QsQVpv z!?;t&<+-@&U`$EHC28JOupO~yuK=Y^aq=hjqY!c&yZyM#{lF`V^P}p2vJ>s{0d??eWP8PK8@Rtm9*~gD=s1js_i5Po`CrF|;NuKk&BDF>s8-bZdv>r>lTqbYCrV+~oE^_L7 z<0>Gowte^Mhl`uv)01muV?I(QN^LRkht3i;q?ocVswz{X&m>QuJsV3v;yNeRK)SPv zu$7#2@ET6RQQ6nD4x`@m=dDh}>F;{2x z|NW2c3@EBuwvdn&(FrodElc(qFss--vtH=Qyt;&GVnw*^T`)r`57W z$`U2d~+d+oX*+6m2&TwQ9%or6<<+W+rm+Peev|^wfuhEtXIrrK3YiUXT<)`*bxYzrY znkGBlhDjW~;zc1Y)O)yQf2~RLkjvt)Ts-sk6MOuWoQHG>EI-8&GC9C3b=nXcAp2(~+)}$7#{Pcsi{+_UHuYNE^K*vs!##3=Lo~BmI`Q^NYU2&Q+s*^4E5hrhpF({1FEkYU-HWbe1m=Y$Ouxqh^yi z2NZlAPXu!5p@;WcxS2O?7fBN<=S<8L(7@HUzjeSG-RG4{xVj!wx(8h%`@q-B4X+`8 z4feVXmRofKiK()vO|9Z&a0@E-mrxm844`2?h;`Z>#k{sEe#Y&>3fkURLKjo=E#SU} zclVKP{>x=b9Y|Y2^^R@^U=uyy4uC3PKQ2{)dE};ta5&zUxSz_lO3)fr(~}OpUgpc5CP-Qme1L|ReNh~#WoK51p!dJTvp0H0d&Wv+b-aU0`%_Y z0#`8J4nX_~CfiqNCOnfvQL(SZ)>4=y-A&49A`MmcY)k4bJ$4V8Apg&>$IPU9IU*o^ z@@Y_UXrq{;IO;spGtRmPs!27*zKgfc^F2rcyfzsk1axT&XG51+a_v$)R*ts55r4l~ z7CW&TwLWzYrY+pBr9O2LCdUc%P;@m$V@oZC(6AH(xWCmY^Y(Wv1L9SLkJ#%t7$jOM zbX!lB4N}V}_D_I2?NL!6n~-71X}hWplN#NE$)xGbG^cs|cPx9XTGM<2^NalZv`&Mv z?JiQ4ip*RBo=JN{Z?wuL45tw>BMVYl%GwIk&m@Xnwxv~{6Iz9Ypx+=w>DD8QNk}R- zg^FxRL;>lAQrVyHALITerABUv|HNA`3{3)sm}5cN$_jO%Krps@T)2BkUCoCLY*FYH zW)odBb6fyKh0h9vanOoj#9i04r;q-!{H5*3dO6O~6H>B|U6K{GKw=Sh8>SGq8L?m` zalW~rKxSj(s2@K~5vJ4{V}mR3)Bi%g;+mpC>k8A3;S))gS^k3StMj zh8^NT^)b>J?7-eDi6+K9McQV*AC;rj_tjAEG~dA*E@Vz_X`L(o%MVH;dFtFnB<&)SV*b1pQe3vD+|TG&VsPuL4bdr zGWhYTeIW$z*=pfVk_2|sL>*+b9Un#N5EFZk*jei>Qv`rl2WWz7d7HSPvvz$S#IBqv zg4ZUS3S|Q~Hl9JV6O9$9-$w8jXm4yT4ps$c?F}8(F|@Hi{@|2a8z0#>SIc7Z%>Y$K z&`C|cbkC2sst*+%r?$@)8Tsp~H0oF}vyu-;3u6^>QLJ%o z0s-N$8!+p4ST>v09>+8B-dYd@(cD-xw(rbyGdF{5iMZjKikiG{Pm^atZO3$PbMUD2 zi9Yqn^B)EV962$|^Ys z6pXnM!imv$#>(H$ylPXOafO+ta!c&$d^h zd1@X~d|^jeJC>%s>ZSts_QQs(lXx%1&##8gY_4Q>MV!`{=K>n?m~6kk%MJzMRJ`2A zpt|6Ix~{^AB3H}7;VEjrDxQ+r3>{qu8DWu4Am+V|bh+GGO1vAY*o10QgX?i>#Q^R? zX}g&WMH*vb6pfNyJ#LDp09TB>WI5!f(CH_v(kwD)!x87sQ6wO;couV{ zq8pZfjh2tCkYB8jT9YZ#iQ#x zpbTL$Pxrg(1_=2g?jz+*q@zoal&Qao(z25PxA&ec1|bk?tm3-qd0{rJV#eZh z#V=GIiAA$tpaBcTw}WW`+La9zG&Nd`iRyu_9s$( zIsY+(HQ>^z-?)J177a1ks6BPBxGwI{(KKm#m@8%H_-H4s$=o3xmR~>E-`(954}2BL zk7PdLMsWwSw2|&B_@dmV>__ZzOSPsz^KGr5viwH10)H7wnJ!^cm4_2W@9k3CDtB%6 zwOi^-wlK)WT}J}JX#~E<>mV9n8;w#bX2ZMSLrwuYKvQozHq+T?ZIngNf2y!uQgRao zo3%~0v{*}uuG)_yShQF_Had{X;%L3D-&+Zq1Pj^PG6@jR50+O}A^fUWS|*sEp*n9& zOa*mm1M$GV&N$J$^7<0hf?D2X5LJI3ei0%{^2)l0J%~B*4`q(p1Yja^^LZO&DsbVH z`W-a~Jm-#y^awt<4RC3ud$QWeB!w^32~*+FFQUSQ>ik;^TYa;f{ujcB zu^ej0PG1Q>q>#>VLK7r*o|d^@ElII{bdt%|zbl`Q6VKtLJP{F;FhJHRNH8;P)_!~t zFUN1MY)0D)b)3L;68;n)s>wg8*iAL^gYqGMM)KdUm&NtYMpz`&+morl*@}^(wN>R& zmXH0CDgk(~<4x6q^_xcL<>JK{$qJ-ZPONLZ8*q{rLagzl-5fT#9F} zmDUZ9d7<*t?y0{eF^Lc&;-M9Rvo#<~qG9pJ{naJXJ>1M|ve%@W2bhA-d~guwVNK;4 zxJJW9!6Ig<_am*~iH9mq9bf4jDH(^W$FDh#xrhqB03edE!EIP)HNX6(Lc|#aw|dCD zAQx(U1wFovtP;ksUE(N)M@39Xj*QN*oi;S4eX=1xg=`54f_9~4W5}=8;e9hJr z+sIYaVp&*7N4N1DC!6+Bi14Z5^VYE3R}jUgh#j!79zt0I;+t^mf;9?aC``bcZHRCh zF1*0y7d=aUv3%K2%++h;N{^T-}Wy0t3`OubZ!7)yp5B()JoP&CDSVSO2spx5o)b08jb zXy9QM8FBF^X=u0YNu(Ux5U!uRZ;`fgsCD7;3aRr%#DcV9XlYnDr>FQ;9xO8}4G}WvIOV_m&jNw0mk=5qCzmExtQaBnTsp;y#W^77k z-A$oDurV0jp!;@c4)OT09KCcBhTwSHRgN@*b}V*42HFANk0q8fBMedVik$_RUfdQ$LM6KfzOyw%eBt=W{@usq>n01#n?CY$Ux%FT#!skKffQd?FnG4AZ4L36qk6UM?s zw>!88f}Ioi?kOO!W4OTE#R;supFX!Mwob&AIWsXhspAx)Q&+94aT3VaLjR*@_S^ED2sw00?iZi<^OQGp`shvd zj(j?l({)6Zdg9s1llLjhI^VoYDiCPv*D{VroVYns2}c8%#I7nKwAa{lx=d8+P!S}P zTUltn7B?5llD&ZMLQ|n;nXRp&;GjwNMU>>08x=2*rh3JsiF)WTy&^axMK;&+LKw&v z#dDf$Qq9qJlB7rsEc>aF&|JZt^=^zS51X&*EIq){R+(JbGck@G_1*Bx7=%>;)kNiP z>cw#z+glenGe@44*vB#A=E`=i=wQ5cm(p1}3n(Q1{MB{g; z?%&we49B=D>Y_W`(tchZW@^ zsPd9__RM09RcO&_LS`D?k%>-E9@;o*L$x8{n8bzLB#?s25@e5c0dgl80PGmM!&wB{ ziixzzvW=gh!9K7n;ho|cLTFp8G8+xKD^S4Fd`Xsg!DOWa3}upu-^qgNG{(xWfh(5H z-54WzqFCR6jq#avwr4olieGG!r#3!x;7T6COE1N!DP_BEqkc7K=_z;OJsS;T!kYmL z(#GLDOcNJyf(&ep$8HmnD3%NelmNH%urK4g`>bTI(c6jdQ(i%HvG2 zugjjTnAjusgccUl;>wMU)fM{4FB=5yI1Y18E<6;DgZ;Im z#&zjJ!4JWEv{QRgxq=}o7JdC`=;tPdTxsO?u|dOeRf@LB#LHzNW>HP%SWV;sH5tJy zU4+Vbv@c-lSKrW?gT3NWOjR?fY~Q2A!6czkWG8gFLv+%^9L+dCaN;H|%}56>nY|B! z?htJffdR$CE?=>|I{&%oOEuRewBy$H*fO79VlLHenI{Mb(LZinkJxdEPdJ9u%RGcs&W= zA@5t=o*+z@XfC(piEa$b>hc+o`}F@u|0#Notq>dDA5i8dE!h$zdZ-ol35-}EW-yx= z^A46H(Us&O5x&C{8*)uW$oVSSOeXl49g`f0>L?g;=w2|9czOc4HGoWxk`l*59}HZ8 zYQ!`l^lSL{DA0qrQpgSD@s%JIzvQ4$7lmsx>bgTtut}a#JO-)JB#p<3J~mdxI2jui zif!UWk=CwW3m320(-gR>4otlMaEGS!jEW-Qwk9Mjv0*$%xNCCAme@fz#l?M~&YukhvxyLTcS^TeJBZlo z?oV?@@SvlPGV$H-(6G;ohF7bTWwy!Jnbq7_XwGpE`XWI&xJs`pdj9<>)@Ht}CRT1u zI&||!_Cl&SGB*U%_O*YP+K3RwKDI}3xbSnsb^vcWv%9j5O`kR0!uBPl2d*#10^R|) zl!+OtSZ?ZU>=c(@0pwf+SCX4>57gx5jw27Xas71L>!}mRB{?eIU9#8p7_JZ47YJ|m)YaXGk>L&rOiW}yV``-1|X5R z@}m72=_I5x!!rQC!j{U!&f0BV-gYc;!;eY%AsClreV95ti`&?b%A1)fqAN5oxROfM zX!;~TceIIAiV2Q>dMC&*_DX2FYpMG8bt2jJRnd}^)EF9|hB|r_%ks2Zbf|a&NmHuz z+h!^?xnBD>#xrP-AjEuYhJL%SejAe8LdRx0fO?5n6W^k7kN~4eym=8Ef%viOFK!wD zpx7QP;}=7l_4uPA?vnd9Sl|amm z^7`6>J(Q{|Y71}sAT3;Lb=Xv{T`L!DDf{K^5Ri4*fK&>P-z>v0zsD(!1;yxKY0zV!lxbYG!a{(|L@4WsO$ybzDy9gd`6u^ti%LWD-rYl}e6f!oAzfHCh(FqE|9oxF$wfjef2rmBJp zz@Y}+^kA9LLXL?O+35EpV{v>I53=X0hTsb}QO*noKn#6fgk_*5oUVW4vPniZCxk#H znLkZ#Ls|;r5X2`#XzJxww&>+%Q*GTYtq_{qP)X!|*l^$yo_JFS(Xwr5T8GTxyyY@_&a+J@s(^`vQhb;(PBL1bcQLImw9+f~Bt~}QX64N9g z89U5cRmkjEzY-lKR^%L~>ygV?9?&SAP^mkZq+CfspTpu4%SE)7SgaKU7>KvKF298QSeCExPx?6HM%25I|K`PxqYC`lchH-PoLyvlF6iTcn zX{@k1KGXWhaTO{(zy*jS(H4Uwf!25oZ^=TX`+=Q#8SVTJmdM|q{@7Lzd&AEmh301} zl-=R-^V7Qqna%wi4$w2UiYoduK1y&U{S#C;&Ed+gmGfV)BLmQW@%h*hb-C{zZadcy z$2m(K=OG1*#cl+B-u8Qz&Zc7D+T)lq&b68w*bWgQ;ir&vs!8nO6Vu1Q#zFSc2azkT}NDVa|EBRt%j6ML|?>eQz`)dKtEUHdfA6e0bNBnsLA*@=ag?@RVRc27JIU3TDCInl7~fyWeO zCA(*(VC)`8%{Fm+3KE?VC?zu_3k{3ei`>cb4#&^2MRRISP77LFphFqQFJLT@vwQ>^pWFgm4YytOq0ETb3m_P?jN%wdgY!i zLcP=yLF){(LS1ETQM*g>Lenzul!xc-336lZxkH7Fy9+@^k88wsyR_dhGxJ}M=o}Kh zHG(Uzu%n&6(?<`=J(~d9+F2U1K7&A0Yx9F!+Sl-B_mh~`raCxN;7cYZimGtlqADcj zO|qH^5pvkZQF;mSHjrl`4`C~k3y)1Y9kLuKS@(cSBy}ijuY#rwo16unW z|0(*vzzY%PdLP$n{V^9M{gcw8OEETVxG(AxI@P4~iSkBFful)j^F*N*5u0^Sc~*l1 z+*&Z>c+VEdCh9^;$tH(g#qDn@s#YIJ?W3;q;r-3HKd4JU2p`8gHJBcYNh6Kw7``(2 zZrEFw0C%ucmTG9mmWj|+GAVQwQ_8_X5!sy&-?Y!-$6vDXanC|iisS;+Tn_V1Sg=i+ z8n{8LCIsD4@}X-DjG8rWLZddpi}AxNrDHz;fEF+KOIewVHlo&1rpu|)UoQTNVe7S& zxF>(f|G@PzGi^mA;dre_DBr^b?;lVCVRMLEhe>2_dj6&21!C9BiV5B0kLenA^6ii- zc4R>LdwEYo7h`Tm**G?x^UBloC%kt;GsKVbkouYkyX{(WH%u?`?ZH5U_3}3Tj@Hwr z)G8$*oHf7@7hQ8znizk*d^H40>J_P1davK~%7I4*RDMZ(GU^-#yuYl@+nsoCn~Pu|u4>X42@<}_oiM5hmJ(Ye=NS?kiBe8=SE@xP~Lq1zN}zh|4> zEXRIvtAx*yUjPa&vZ7oPu~)d~=JD0z<)R7QV1RYV#>O0^lhO9BmZ39Z*~F5<&?$Rf zC<4<^W}y8UR}^W<&Rypi@dg`nJB$JfvPQX#Y}Rbb7I96JaW>Y!qHL*u^=!b2<^I9s(K6u%_afY`}8+)hhW`s zyvJ~_(!s-8ANEs180hd0i&|e47CWLBVj!M9mAq_PGABBox}`r7SPX`FQ?kgqvDlY( z74!ODnh4v-+S#A=%tcSidFiF{LMTwu=2SyQ`<_+#9p({Y$IUT*dm3M=4VM%Z0HRMf zuv(b?G)4iRpZyxd*Dww+Z2$iBJ^tkv_H^8RHIQ?w427d;gD3TJwtz5`NsW_12rP~b z%)zM#i<*1P1JJBqA`(KGIr!Xvb2!?ILT!|TxBr>n{-=W6_WKBS+i(6W|BTbOu&)Rr zVrt421mtSSJf4Ee=liZ~Pb3@f#d2|tA9?@i$3)hCV`6d>;4T*hT;YP^5XY^xs8R$} zNtNo0HzI0i%6tkG0*){=n+?$c&@m*V(LAm`C7<14^eqN2XU|ieuvNu9(BTmjgb1QC zNhU-q{8bgGY?{JD@8%YM6B#r6IfTEgg9RKMqr$?WMTSRP=w*QWk|Lv>CbN@_37fpN zT+i&yC4%m~ka>9IB(|W&39ck^0F6ZC&jruEnjj(8tM?V0@Bbg>G?OOs-}AraKYzK2 zUk~#vK_p@&0g>dIL~}xJX&niqKo_0=FBVzEGaFXFq-)1L>1*#8{dn638cEB{JqM zEIdv_^<(TDRhRz0ivW0KT0VQ=Jcui4zYsfc{DAjTbp2Y9~8wyb9OQv32 zzK}&$ZrJKj_m!+m+Y=ma(&4JH62rVp8?kA~DTpibUF9Ygl33fBDA|Lr(`hfLAp)6> zGwEd!eIT+(LL=lI?d)~kW~MxJx~6IUzOlYKf2*xzu6QwCEZOjMgG%>ftEXh$_{?x$ z?K%Zg*)FS+$z_{#lP377L^+l@#h{xo z_9g&=8xZ834`~}TM3jRs2G-81l8rNT{G{gT~}@Y7J6MH{lrM9R0}cC+)$e zJ1iF-H+;j1opMYAG7P34GYtXfpscFtK`;1(J=?-MtU&O>m;rtWq? z5T*cZ2$*c^NH?qGcZ`8FK59YlJI*V5@huxWVC_%K>GxtOfw1uEz)ekxwv>dI5|t~; zg*_OrhYtf#6w~JxtC+OCqq9-g7h21<=ckWeEpy>i<5O)lsIEe(LHomo6T`$F!ZrQZ zoEnoAL_bL-DTZ8Zet;gnQ@(C*!!F3>Bp4g<9vxSTSI3b1XLfu_|S$)yMUqH+hE0KxoK6|+p9L1)g&wo1?83&Y-9|QMNzGZ zSTHOfOdBOR&c+GaWo<1sg*@HGPGZm89B*wQ{mFR&1G*U3&?{u6DVq#IjfX7Z7WDm` zp_pZwE9WHSW$kcWZ-cb|6zZ%BF_a{jGqyGZcJS)e3_MD6mD69xU@9w#(rP4D#6zf7 zjRDa;4wVL0adedWekodQVq0Achmxn?Z^I1?wlA=BRz!9${* zye+nqrB4Quu54^iTc~a~!%n=K*#q_Z3|?3EmrX~mov#cr)2V!X`y;g`28mh(!gW@D z%O=~Yu|!Oc&2*X%RaA_!uJ#l`ZwEFwc)khyBu$MOdxao+$z$x+_)_d%jpUA#B#PqA z4kH@zp#v^P07?_T8^C1I>4e)6jt9{UJJ&d*OcllMGC<-tO5T60T>9eC2eCWdf*@1{ z6wKL(?WMwMQ?CL5J;p@Pk}9hy9J|Ollu{KY7ayZz7qC_WD)9! z(FRbPzSs{+Tfb({XGMge#@@_f%xiCVNVyDccGh)1!MBwZbNX}Bg7xg3-6uabl}r^$ zZjo+6Mh}h!`wu-xLme3MuRn|zZ7CUmyroUoV^=V@ZM1a!61Wq>z-aH4%vc@VM}&OO znmh&f=x`aM9?|pUk=j5h_AcAeaUu9A=g*t&eXX=&dDxHrNoB(YHY*z$a{Tmuj0+=a z0(Uj$b#`TwkQC2q*XkCq8#tOV5(=?&Cn--%s(s33#8>0_>yQMg5@-k15*H3rj95fm zZg8(yqD!LI;L!@k>(HSTDvg1vdTg~j(+Pt)m{LY|hkYECg9DCrU;%Ab-LNstjCdZU zk>Ql-wz7Ak@1a{qu^9!fQMdj+E}IXde+=2eAC|}e@g(J!iaAzPMAN}=E-XxCGhMjQ zwh3`aHHw*Fa*t3SrIzLxjWv>;Y+aegi#KBIo5x24*K+Oz3I!@_-){gxcfgPPPUZY*j}=osU;+1WCyTy;XP$=UOk}p zS8gqjGjn4{$pXlsShcb29PYb=W6pLgDz!{FlGW(8T^F;DHOeXM8}bEb;z%?e=9FbZ z91nkMs+yUshK-B@&d=IP&-0AC>0pyas>FEegSviz5swrcj$_fNB*6*T^V5nkNr7(L za&_{>?T919=AFSsF_~>(HkvKRFWRt{NjbEKa4FEI>-X`_+Z^2?Snst=X4yS&QYeIv;z#J#BKGY;NEi*n>n6eJ^PB7!Zx(2(Q?1$#V_n z#6#+LziEGwck4TWp$KZvg%R&S4RCw97FL2a!HQV&g{dI?gmtZMl$#5QC9_yVul_px zVmC@EnLR=|Ai-Rijs{#SFJ7^)DK6=5ni!=s9wtpF&*GN0;8d7FmAEdZWsozVRYJ!T zF4^&8M*=E!gN?B~DVZF!mNjkK(*@tIAlRc!Mn#J`dYfS!DJ^ z?B8S#@56@c+0t_=D%to0`+D_lQEUJvHqFQ1r&a^|sq)M;3M@YU4>+1`S8@`H-%mCEkboD8Q4#L; zTZJ~Q?!O^;9D^x#nd730;3Lk74hbNMvr4Usn6CDli8pBx?{r6EC;GJ8dgI|J49veh{o8W- zH_d--j0criXL2m>h^BUSDk#b9uoSwHJrX-GBWYad+G+XnT~Q6-rtY`$E4(Z*BwPA*HV(WFi5fhJ%I*)(JWQ-R)|^q8L5zlyOP z;Lh>-WHnL+dj#WwhsJHN4?g87ZfBu^&&LZ8*!}I84$cM|fdcP`@M_ElsirZOaj}1~ zb=_jH@u&@6?8RaLdt~J)4=_foY+*G~jQ3gJwofg4HSXCa`>If~rXIYB9pq;<`OTT1 z`pvexdkR_5ODQ>p{DxXxm1B^~c53HU-I3qtC;_e%(3xVe0k}Q>HYi1$Ph~n*N`%Q^ zd+8QgKQ=!V%bkUBaX2t6;nQ(tps^(I-abeNyXR8 z!*{+Of0om;Iip8d7q)^VT-!EM+%MUfrHL+i;l}EsB2xBjRCjTUMezclt@!54<<@-6 zmPAt#tKt`xzB=?ekJx|NHAoPLqF;|*aW!kuLlkLC3cVbQ1*g>Jn##d zU{$R>O;tCWCY=6S>eF7^fwgMXi3-eycU5OZH>q?>e<9<#!PHEc4=$F#wNg*(k>N(`mvdmcT2KT zVCdj*n7j-QIbl9ks;M^+qrFOa_;$H#M@X&1Yk)2Yo21IgDcUr4%D-Mphw)Bq=wKE?y5Kg+N(kBgP!uDP$gcnPShu6e&FAgl$ zoH7gi;I=;1nE-!d>bGm{P)1rv*t@TArtBgiLR>RNF-3nS2knn07&vHD!AvLdCWgD? zlqC727M$EHXL)B~8&B{$L?#_4h`Q#w9?^2g*5FX;_}jMMkAGNBe`+FqE7#m2ROifF=1;c!b^dIDoY#na<9lIk=vU^-_mDCDQh6u?LK@!0#4b=@qKJ3}>0L44yPRIbr zvTEqs@n+h!seRjy#-sAZf8l@Z(P&Aq@PnlF|E&C(9a1cCj8hdvPNu0gQXBZ&kU>fT z0_fF>j4_A38sBpDK=AFrkyWlb%mLZAwk5Q9edf)0B3+_rQKZZ{SHiNdfotLg*hibY zVT@XW(|a*`&RpXbV zFWORRbz-+5^I-0HHJbv&PSiw- ziN$&oD+Eead0%@BSF5ZlTVIv4c**{@rku8^7ug_WkT}w`dD$P|g9KlDwZ!(pTY4#G zN%KT%_&U1P!b*xo-=YYe^mMGL8ed>XM+~ASwDNzc%Rs$UB1R~enLH000GU_w8#nDI z1db{(AM7UPWeOR0LHjT{~8-sUri8J>kXkIxA?@m?~$5(zI?8-lQ;Z z97ZApoKED&-r1sLc#QHjwkb4FwUxb5AIJ%avA+nki={_X*tgXRW8lno@S@b{_w>Jqdz3 zTidWy^i}p#3Xe^3Au!8XJew(824x@Hs~`VA<>cd7FCb_c*L(5Eb`6F5Cd-t#`a(#` zxF<+NK1#?m^=kyPZl^0_H173|^}ARNv_a2iwYpqV5m;h@(hTV~@8O#8o6N~wD4SCy z`^0O}nzRj|UuA$CXXq59hjCcKdnLu!m@rY{n$0PGjV()ENE>TM^%Ik{>gL+?@E3mh zC#kJm4=8u0ppC{*yXyF*fKJka*^aP{08dFLH23S@pZ+NJWlnWoQm(WxOB?G$h)(D1 z2W6A|LwpZ)h`u>Yn1-q!sZqNvJ#NI7O%(T}mwV@cD9AL6$H^cPgC~Box;_ubXkp%L zsaMJ{WQS5Hzb~iXv9T#@*5NDh%aVW`sMrH{r@M94HTA|JENL0WTcNrgX}jW8+e#`3 zKW3dbrQkmC2ceKQXH~be)n`!naEazeVKMb|nfwoKYQeP^^8)FGA-p4p-M-A7g_d*C zBHZG}{)?%S90>VWZSWhw0No>TFYGt$EI9^3o@^g9_m{5NOCU$$r@C&UTqnD7*X`pD z1G7izS{LW1?<0hwgK6zQauP7p5+9O`=TS|12Ar^cl0A{EEZtB%A`kAe-IHu_A2vqgA`fRZ&IABRhEEwX$Y&W?|8T?nevS=c&%#!DG1;A7wdusjM=2 zfS6pSi}e!dFHPMko1s$jQf!`HuV*~E&?Y@Sg;Sc$D4cY{e#!?i{Z|XG4|`fb7p@EC zsb}mo+%NMsOxe*CvoH~CmpNw>D|?zd+CB+vkBlW4@Q$KAF&}WDTCgZ-aRe+XPYJ16 zirv~McUI#kg4{3JD$KoJHWutpwEZb73n55L92nS+ZZ@%wlr(d*NeG#%?|sOjb`(*- zzn2#NQ^0&gSYYnf&Pq8ok|x4(s@>Ys#b-~UsXAH`L)TUs+_>s8`C9}p_ za`8X&@*ds2V^=$PBZ?~6uCJHzab&k&rD*d^*_-Hm2*-6MD}IqI)1+X*)(FNp{fO7d z@^lOkn32sit?F zEx3v;Pdu6)h9bQ)Vz3CEz={~kljoi)7h>%Ay~m?L9D`a0zSa+snIEKtr=LxUNAI0{ zOsG1ZDNi_Ez|>yqDz(_X?bAm~fH!HySYIv%Q($!bxoZE;h2e?yEEFFL`3Du1BWdaj z&G0e*YYLIXqRdg-I({NzL#vHLokjZ{>SRR}pt0 z1s;e*5TB5a;_5Id9BxXv=z@tc<<43#LTJUMbo7wJ6>L$qJaQDpkxadE!OP{FBm<`{ z0|YA|*84R)sF1!kX~Rt4et|ZVOof)9pTf!SY@OE$^9)jy1m(4r0Aj^qquN?qZJZ!s zr3jTStDR(QV6X6nR9l|QV>)RPIg;YWrm^AQ!=eea@MXLNTf7@(**8=mNfK=?_o3GU zQ=n~W$s`mnN|h=L1wO|v0@zJ3ZZfNu3hY?cd$x6)Yn8(yv}o&AZGKr0>B0MNsn~^} zrPx3kt%FA(1tzK@E)e^-9~(mGNA}oe_mbE?M>i;2i&}{OC7VB9l~eyk|Exu7Qv_;` zMFAS%%qR9dwy_%sr2n0N#-4_9NI`zgIzk|JTyzjG+EPGa9Ls+S8Z-w87K=~l5fInu z`}SS(xJoui&uUqT*pOGQwsV&49V-Vt4@sXXsIB6cvIOj1a*=`a>|NZRB~5}F-gCFy z@#Zdsifz*s9uUiVL<8b@F-C_ZB#!Be7^L>$8dyr8)e;VZgT-XTq(WXSVor}&#gUMo zHGU$&=>21w;%s z~Mv18@`FwRyLe*T@R8pXgS~V9eTit=zUhNXlnA{h(9#XJ}R%I|I%R6S6bQl}3A? zLyAn;n8>M#YKcTuiNV_Qp>WN>@HOm{7#ffIQ#Fm3eoP+v9gK<6y%(nlAQ?fNQG6IS zEcWl$O;H_?$t>7+j1{$7X`1%oAClg@pwXDk^n1ko&o13e zmDl40$U^Y)EF!N^s4RB2N2N`&H{t;sn0vYe>^Hh+wrg3Rcw}4ELE{-6s9x}cLe~K{ zkO)}qNIL-4%fS$~aTBniB|{i9>YCKhTI{)N+=DgwHl8aS0ELM?;8zWdP*icMfey8h zUDLy288Hj>)vQuf`#R~v9S*1RvD1iZWCg-wysORL?hey!s8Am9UU_u3I2FXnTxp~q z1IDH;7VLD_rpQ4De4)~`i7bfo@U56HU&I9L(iT23vd-@IPZe#FVhzk3gsT=;ZE#K7 z17RzeP514mC;!C!HU=jSCsfYh4K7#2N&lCN7M=7%Q zJPzYz*Fiffnx#w=iCj7*f`AVaXvYHZw(M2W=bJu5d7W9vew4t~I>T1AU1@w)S5S0E zSzn5lMbrs-$;2Hsg2an!7gK2*JL*-Y4{4`Z4qIAq2BL?1Zme(dAJ`A}XIu(WCvpZ; zEffQk1@Phy3G~+6vH1hI-c@f0 zTYRpyntK0-5PxtJEJp+_UryArWMK{xlDv%bn4^){oJbf(wkf#_<+FBPFj#)Ty>BnA z;)ZOdlU_;8J+!<{0_Mhw4I1?cH*MUD)g@l@V}-ik2M_SQt28q=c=Xf4e8h7SDIl#* zr!os{6KQ5b=?D9Xpk-nZ&}WJah@aT(9+Y$pp{n#^Qz?<%1fl$PxvBdrW{+(v#LK3- zOR`3cQ(^H6fq@@~Cd3?K-%w;G1GQL*m~kHKROVW>?PHm^Ndqycf7$xzexC&wYE);`l?GUoi?`hy=q~ps3g#8@9Kkg?z!Ja}bY0B}MhcRh zx2C}FWEir@=%tk8mf5a!aMQZp29S%K6FY!##c9$o0LZuuM7qmY!{G=KP2 zVu4bqgqY|0tb*T0C*nxMZQD z`fmPk956QHd6<6&%4t*)4zEu%CH`U-M{{aLa6grQX{p!AQ)dXDxy z+%M~;otOrf>G&BdM%g!LiZevk1%?#|A8?EVq~M~h43WHxHi8?F7?y3Nje$*;E*{Q9IzzP&^VsVaR2LHeH?9Dfk{qb*ZcI$61_09gDRi)> zli2q_wWuLCZBkza*|B*DM+uh&kWSzRR!=;IUpXJhAtB;Uj3 zwBHasP?SA2DiCx7_%zFIJ`i&NZ4!dB7`f^;TorLVN{2v$WBE4Iy@rzp&|+UuIQ6od zuT8C!ABD1R(iN-Lb=4s{ZAnde(Lr+mZ8}oiO*4~d_D(FYmQ4cQuOXHcGy+;jucxZ^ zB$N;LK!9yGlrtK-qS}}~l-33|$%1|I!c*m`nAaLn6w_~tB0??BG9ouE`aDAa|B%|O zF^9=9UqG>$aIbM@H}l0q;7M)or2XT7KnlRO2lGkS7?+XKc>GDHk3L_XwIMGpl-Eo{ zkB@3rSH7%AQ&5B zwc{lwo3m9cDRHefh@eTaNy-u+)52?=(e}<(Zw>V=lBm6f4gL;I*WbB}3rfYaZm+2(A{zn|*TJPRop{!JBoHmha)G3quZVH3#i4Ry!&-fO#PNJqZ zjvH+}eAvM6nAnabW1&O3>Y093>@D0W#b z)3`pt#`fJ)lYyVpks@FzHJLD_}TC+X`}m^kMjU1hFmn>M#<>aCE^^21hVF zM)5W{eK_|D+SIq1)W{_qr#n?BH3eQmOA^TA<|T*^XL8SG4XG$t@dBjlm6{|T$15xc ziduam7^Cp(yaP6Ig-fw5K%EzKZO*N+@~BU#uP8@us}KSumc>0U2g073S7F(SHcQr zy~6^;4BbwO8mDUnNh2PQJPjxAVE$!^I(M*SQfP%@;;=hBzvwRA1wu$+D*(!bT*^SW z<_ApYN%cO{aqW*cIb*U4ZHL99a4!iE0lhk&P-rJ~#Ueb5rYm^yZ|RPnkM$!kj7f{i zO@;*`-y5fZH`1OPvm!W2j9vzGar>@nQ4@|1bUOTo*j3myhq&fSmt?*0MEt|!RcdV zKcIshBvvHq$bB}p!(9k0X;wW3{R&)lolG;&Z$&*OCJe~7Wb@a>crP1xUNYA{0s9qm zI-cDl7QW3Q$3CE%^*RAPu{mt())vthD}$KL{Ysgg56c?t#UoKX_FpE9`7aYea+VDm6B4>m&%HLy+!*$ zlOl{x!U0py0|8aMoN#cZZE(1fioobYj=)mlOmYzc_@bt43TOlNF~!zuYPN9jz9iVsCD`7LTB zt2(7zW?3v~5yBS}{({yZ84(Uk;9)i1KB5_V%!a@@cSgjw>5zQ#`^rB#`JtTwB@D+t z&2s``#rKX8VA;pcXp-$CPb>Q%n0ZhwouI%7ByK)wLYr)EEE3x;G44Nm#>f7zRJ4CK zSB!o(j?x@W8jeyumYZF`aOb_KRz}2$(cl7s#!iv=YWYG8sLB;B9Ae%>1PxR)&4P;y zzvTxuDaa205sHHS*i`%Q*q%7l(fo+cU_8v08&ulQk=P*~?_V!>?0l&qjDKNqxva&G zb~j}oyAO2w=!tUS2^$901wq3Sg-Kh1h$LuzkHpt7F^|Udxq$0udh*Z8GqEhH)1WLJ zdpc&a&Z7-d5R->%HdOl2qzE?{&)dGH+ZgLC7FT6?&>%e7zCac{=Q_TC>V5uF{0NC4 zh^F{OMKEKW4jG&Ts-k3$qMhwE`vCL{JSI;6C>0~?>EtHlk&0GUHXtT^LoyIxWfvBs zLnH)bj{!D_dD_MTNnMFa-o{B2>pR%1>%}wOh+6!%oc_UNnf!j0Q1O;#tHm>pfdzXs zit`h!9fq3Bn*m#G*OtpIdsHnElMwR;n22=-m%8*fo(Ew`)Bk<`>Ba6t35nIZ&IyibT;ew9`v~i9YLJF5q#lP0^k4Xu&Jw0uDfcjq<~=tH^h1 zoU)(A_jMyk1>3=+DC^KEk=(?tcoo|CpF-^N0SppAtYMETBRvU^k)D5kc&S=wXRT_fil z$;PRWseg(LbXJEF6R3fhAu#$NA+2~y?Eer4%7$(N=A`ioouB;LRM`A_Nqp;9Yc#~9 z?#-{1H{UQ>sf^ad$Rn~-*2ZaO_t+VzT-Yp5xFg@BZ z>JD=Z?d?~dB52+86)Y5*srn%h?E%_h>>cpSwRkD91$-UadU+&6B~3Y>!|9$a1A0Gs z9Tb?aRBsGQTj~jFpMl3}>;Q7_IAu~p@Vi_H!JLf-D3t1h0mG0?pa8K-5=8zNy|!Pn zN9*z$L5Q`V>9wbDmS5;CwRynxi$`vP_=2fa(eME)8y2iI)_D$DNr#8MiGKE!2w--T zosd(t;gpg~j2_`*Tz~e&Hl{fjb5!pltm)39Fo;*}lek#|kaCM-`Zv57FQas`Kni7G z*{qQZqWZgV`-$)a#ie^s%p>EGR&=-3<37Z5wg%f2ur8a}0i>6MnWTe;>MI-ZCUv|S zZV!{O2Q?{juCI^@84rWA2gc(|C^F8)xFFG`wKAr^i~ztNQwr(hzoSBYJd9qG^j>!> z3sFRks4a0U;sYevbTy_mZsX03<)wJU5^Tqm&0D8t--bsFKxZ4@E=dW9O2gIeYDO}b zE6ChL+IM{xNT6U*+sqDz@qYAk##1M*kmzn4k3l*9pU(_0@taR63c){4-mgFC|G#(o zQ7ju`144t(ZaQg=(hD|==3+6)AqE8SzT=lDV9cxWAWFD%E^*E6%gT)y+&7^GeQCF2 zDRUvVi4|m zh-H^qr|^y}iO~3Jv(X*&z5dD5UoX$Z+krT64iqAkgAhA*c5eix`S?BhZk+tq2Eg)@ zzZ!?AF*(wfa|Pn?p;=-vI!d}lPlpNwlo*df*TNFw(gb*kfmOO@%SE52I05MXt)UQE)gRqU<_@OBrUM)j|t{meTOn3PiJ24yr zu{thawq2q?giyX%F!zVVf*xt*G}J8KH_g< z09feU-XSkbLxh#h^(~;ynaU!>j&xO4I3BQ8O}QAx;f|oAJx2@VKP=j}$0HdJ?h|zA z2OP6%8v2N(=?4PvIT|5T*S3;M;BTed^5rgGQZFse_5htL{gG1^C=M*OWl{$7lh}gk z!VdjxS|*GDudDX4QF;9Db7vp%9{0M=-~!QcH|dUOQnrlHcdMDj8%^AwDessps1<}P zCa6L1YU1c!=XOR2ZLJ=^4=v~9=drRcclWS{HnW=tH^v>p?W2;ZtfDOAfCUE0Mw0IL z?RXyL_p2`&BY-5pVe$~xZO(Xe_H98#+`X1u$paaah^x(VrA%!Ns(x&sG+XD-f4+SF z^R~|PcDWK`;dIT|b@bVRW#J_z(CE39d&TH@Z49f0nQDSxsSF5(hdUYfKeTmP6`d>zQW~>PIz?QE7m`t1_ zoGwfh8)4%gyC}SO`b&~K?2Ec|ZB19{R(HcbC-MN+NE|6nH(KvljR<0`*Erz7d%n$J z_C&n#z#ZL&@RAyAP{DzYY1eI!U(b;fHlH@XHfn|4@A}Y0o@ag-u zh>M-F9AE9DEjW_)Sy0|@l#?IYkE=C~?k3s!@|AR7VE`q>eh!Ba#&*QbfvPc9a58O? zMBdA$%^?Bj?I^vt2K~kT_7$15${e(j$dyRy4;%Zq+(Yr(tguOv+i|J9bU9vos}op8 zTkA*p7!R6ZEpNopCDt?;!g2JqWM>I6C&{k`(&e3p1ovQb?WS9I5noHZ22N3YXk4gL zpQiINTi%T^&SOH^{%fILvxz-VP%b{k#JmXN7V&X8HE~imcqwVP7((`_=gyUL&SJZV zMQ)FUq~L{|h^uF>4|&a83JOYasmz2wx{5Rm{rgJFxHluB`_*8N!n3s@avVfTxpI19 zb&whk(57FVI2XBENM8X^i9^_3M1FD+JRZOxhus)NL#*B0Au<`lUfE@*AK4l_e5O2U z@}*=i9uoVd9z z`_2OZv$`C|kP37%Zo_!r8f@WSx_Pw7?kAVIb=G<%Mh8irr#m8Blgi!b#nFZ8vY!w& zBGkIzLP8hS_$^(u4={*MKZw5rVb;98_{}R*VGffNo>Sr6AhF4^R;b*zL5R-6ya@w~ zgU$1vEgy&eS`=>rAA-;UtEd+2&010YbjMxSGBv#y;+>7^x{eJQRZ&@?1s8{7dYCHWEzCKX~A3`5kS!Z0?47Xcyd-&@!?vAIC-lv+-) zbNo-$RPbp?*H6iIilut`>GFn6O1?u+$eW)Qh3&SdDWqIow*hrz27W(KsnmTXVJs+T z@^1U-OO&OArnjd(ix(r7mrP{)gty3ial`dG_#(8J%wi;Wg}Wnv{l%BU`h{=6LU2&= z_2RT04#*`vj|gbrK}|F1D9lPm{>BRR%7o)g{_5! zXhj0Log*()bV%$qltx^msOo-FAVg=qIy}oMVEwqU5)rBhHZ83ML3T)BXd(tQOFubW_<+tk! z2WJIq&o-8X^?BP7YzwN#*iSeaV2~!%6cvhcPJ;DX8zzD(8ADvs_6QZigAOP`9mkJD zPnk1HBU^!zXQi zwQialt}E2Z93@u3O$6His4Uwh@9R7*_iR;^vwJZnuEIUy2E-0*4iA-ZAbZq?{uC)z zHimWGEcfm3Dr=KqkMGSJZrZE`Jvh5}Zz2W`gZ&zGm4HBG>6#6wwm%Ob1i5Osy-kK}ypk_- z;vopdsN_3k&2_oFn6U;R6)@H`YBkQQ+Zk(rIB1&J=J#;>%RflJLP9tf3!1j?Zt927Vk!xTaKRqI$pHTiN)lPppMz^PW}|S z3Ax{yPB1X&W%NH8TjiUl-*m=E5LWEzt(BFs;87p~e-tz)`e!Ez0+?={@UJOfHyGKM zl>vUR^Qq|J&zjsFI2kr(&bmh&RkJGrJd|(dLdc6pC5#~_C3lSEN;(k5wCh<4pE>(a zv462wedR(n@d%=!1_pjC!6kk~^dRJbGY_)KUYYD{aNOA@z`vLy)|*!)KYc_B8wx71 zI0yUU$SbMW2C;$Wi3Qx9Rvw$ZK_TTzk8(Uj$&mIo1Q6uhHkgG_+}pk?tQduXUbPQ! zUk$kv?E~DlXI(Yo^&7%~$8OOnJ~oa8I2^!X73gJS$3(ee9^hYCQm>irbog~*@NCg`g`kRtx9!<4cr&|(<6p&q3}Imgm3 zk5b5%4OCo*iVwoZM`A%T%|EQ;Y6Zfgf~_t?*d~N51L{hL0L2 zu%VW)59Gka98{FSd!S>CcyT9ewkXDi&66|R*NEm{$DX1RO}ZaWAAMPc|Kg2Qm!Pl& za!l=Bo3e^sV6=TN-TGo!+ko!UBNB17IpO`uBICxi-5yk3Zps)qXmi&-3}c08y84I3 zGU3t)Ean0B2Hn2+P448 ztg**Tfpv6Z?Nv&NOrY8v+GR5(_7O0YlNV#zJm&6B*Om;Im}{3FZ@6-P-h@8TI5OUj zXed)Ye$g4I&y~k5+T~8H6NYD(b|ai%d;gYH$4txKT7|CiMMuZ+O}bGE=itG%MZzMk zT(tG3GJ$Z-^aV1KFr&D(Vlo@VrljobOXvmOeH@}RfzX3r2z4{E%BIY(o1(nf+$@RB zm&`$B<%h-+cW?&a;VLG=omz8q*csQ6s5|@r#sd4F;+_0fL=+q5bOb>pVpX*+-ZLb-6EWly9UJO#+Ur>gM(*grWQ|M(y5_r60_J!YR5VDoPL41!MZGp`U$ z5!dE##;RgbNF=&j$g_?68bw26CtIc636*?69sgRc*FL;Q@055Ph9B}zG@sJtHg(zR zeklW!pcL)P@05pMu-zh4o!k9epM1k6hb2qxv~hA=u;K~Sv;Y2`zdQMPz0gCBmT;zc zp95dic>4{7-cCP`LE^xi)j1yLUI2g4zUpb7$h6tz_Dk(u8pcvT1?dz#N!74d*8o35 z&glvOq$u4L3y5urSYj>M9BMMS4uh>1Sr~hM)y(AE}jQtT)KjSu?9xGJGc30Y@M|Gx|wbti5bVSughup^C`9Po*xqeThNc z#-zI`5Q^@wM{pNRlEnc~7=5FFfs@sQ*UruHAfyqtpe%LX`M?beG&ys#(L-o!#ALI0!YI^x7{u?i9YON}VG=4`+qXrmqO<5K_ZWE*^$_CW3L~-?jQBcCy zCML(==rB<$YiSUdClGG=5@D<;IE86~hwhu%y(@Sdtjl&6xs@XwP`g-EI1h=!Nn{xc zOSDX-EGa46v4#D^8ihzgb!S+vd*mPcvGih4YX5=m(?sGC!mgAmZkxaZxwGsi0c6(` zK(@3nA0j-yf2^&wHIi=w&K0TwKY;EeU?YPRY2HMU&X9@3o{MGI3m4~OcmVJ~V!4Fi zN}+yYx*|4ECXNbHEU@;m2mvKvIp`-7?@GCNEoMh0on*?6xVMzkH<BC!(fYpt^Dalu-PH8Bstu5fBn7F*>%abwuSQOg(sF@;;Dz2uC|2S`fK ziALf=-LRJu_IG@jAR~TTOsh|Oto?0|gbVOaLfKSlt!_m2r0;=Cq;8abF8v>rx5x~6 z*RCL4Iqqmc^@%iJ0H|S#O_A+bg9@%4v=!(Qd(adR#cgN>gtk>yJk%bEb!-e?mn};Z z*!ynCwBWP8yD`H94lH^j4$a`K#*#gt)p;-kNAKhRI_i{4L9_x=f4*hETCejYAogR)jQ6mP|b z>!qDy8TaV~o4B0D8$I$63N|%e_7qhD1olW=C$(o_@vC=kZ&4r-L!h5jGRuNeo~De% zdWmvX$CtK8WO?uT;Q&SnS`148a>RXa1IKPd@%upsA4z3XmG+Qqo3l$Z1FDlLP6*DV zy}d;l((0nyFYx~rSl&#K_wKOIZKp7>4PuFsDCBe5kKEKU3*xq3@a{7vKez*^thk|R z%ns)H3)3C-h^87m#E+G(9KCvLBi7ujcfLf<{O3Mb&c73HpF2cPOzy!}x3{w_`|8aR zpMe*PO^O5WE1`w%g3zQKR&J}U^c8O0#2%NXCAHo@{*i(Rh1b9{`O?zWu-PTFm zg08Eh4IY&eNAPu;XpOz=^wGKU+4$-mZjRW7MS{G_@4|md3IMtkhT!3zJ>v2IRZbFd zghPSB7Wz&`nBPqD>=HW@eam7f89!bMreEu_3Q7wlNUK=EGN^Z1(YEH$hG2L6D!$AL zIXGwMU#^qJ16h?h(1;~ zwV%g`)IPm-RYFDCiY>fW7TW%e@1KyK`a>J71d=N@M|v=eROc8q8S0<I{O_kB(IC0>q>rUI7*VSw-L}BxIUKfBizCRhvXl2zd3Y`h%+Nz= zV##$AOOT7}O!vB{M^uLXKa@yF&WZ{2<1gk=+SNm@Os{1sfWZQ5w}Rz+Ix6O92UXaDxv{!c}twK40Fkt>fHK7f!up z(Hrs0>%*f5vcI~yv^kNK+LFLl9>;(#l`(FP1cQv9F%|EH7l#1{aD1E$x?9m7k9AF{ zCL(c~q^`BgvTa9Ab~yCa?i@9BD*cw)!J`KhaXvXUApLJtcCS%y6F>Hen;&Z{#=DGD z6sZ_oODQJ7kO&GVKWmxl5r@&<9r{Q2L9}HA!ECag>~+8Z)Ib2mw8g7+U|zHD6eW~7#=GEY2rxX)rF=^pb`8zS+c-SNLY^Y#C!M&f5J#^aDFY=iCrf{(XsapC<6 z88Z1JQZ`$N>SYzh*&igWT$3F<5Y_{U7MC{;)*6|tx<=7Mwlnyn-zZ!5Q;eYK<7 zgk+S(I=~IVcL{c=QWvO~9E;bcG!Bt@^o>14?kEpQqOeY?4(i=^_YSEEzGpa0xE|>fDMv1RMHO0I*GJAvMB*tDA zJ{^f%(j0OmXiaTU9jC%&MvM4v(wuRC|lO9%o-3^;ZjAN#2o-ys3u zT0t#C*dS2`P123U$w^^xCgr)cQRz32eq!BB{r zJqJPYl$~YF5d09+uCJHrb7yMizo~H3v(LsxV>W$2HHq|#M_?HzABM(2d{NcJI#OZ6 zvu@Bowapik|H!ZUD{xY6_}u!>$9f-8@!qAk)~X6Tp>|#v?Em%Ij2^%H$-DoO{}Dd? zmpvdhi#7UPxezPTA`-5od;qpsH=5(^<7e2X2wJ><3MlmcC;#iI$DOy8nVHoX2uKvF zUZ%}~y|l9G!8yY(z%50P)!+{E&Q_OUBpkH%V@gr0rcl<$_F?vDY{SD^tFaqUhDZZn z=CBPiMSzGUmlFJF8y4(mxjmU+w?q?Iun2=`#6YC zCwjC16kUzK;1;=2P5Gf@KneCQ6F=Ncm`0yMFc#A94yo}i?=$7to zNY~h(BBaDeWGcJ=Ms~aU;h6x}ln81?*r%21iI*NS^I(jjG)!Oxr&>U)eSv;Z*GQ#{ zF$(yR;hh9mg*>{_LD&rOx3XNn z9#5CGj;xtzVMvw4kHFM}vq+jqD26tUF6c~B9k->v+#C={?-v`~UJTom74;SznV1P~ zZ(_P!d1;na4su2PB0JYe8Ffo|)l9@0ibuyL(@@SirZ(6{XRTnfgn^?fbnv$5LTXY- zy7G;i8+Od_PHmx?NaC z$!}A)4M15uEGwHj73{!w!5GPy1#RzNvL)ybnqJ!pbU|&8H|4S;>#+=qlyJaUESv2W zc#t+b%)&pE)9>3;g@N(O>~h~Bq7~W5q?#?q9jD5qQiuMuADx21{VJYzTiP<95`2*< z4!rFe`cP6w!MSu;D(izW*nYfPskM>ni;am2Fj)~Pml3cG)l=t%*n>5On~lw z>=8Jv@J+y8`yH?XC5)N^)N$iBp{J%NR4O!%eFwCsZI2OOXB@K(C`!2VQWo2`N5xG* zS7s&b+Ep6@wli@I-~`HinCMX6H1#;ch1=SSRaVq)C|YVfp~7*1>bW)4LCrHusz;tU zvHM&T2gdzuRV89PFi6}C!+nGhmpC27MXT#E^>e&T9ZR82w&p^&I_qIQ-6dKvW}E4h zVGBa}PI_Je-^B#>x)UiIXsafV(8YyQCIlM;N}0KcrBl7IGE$J_1~;=E5fx7@TMiHQ z$eW%?PsCa5_m87_`Q7PARhg7t1j~Dn%r63K-2>vH@|pd9lpk_A5z`5(EOyq`(@gqE#Y+oU*kF%?i5q0sZtyFee?(dD^4}8l&eS< zQON2tVIrxW_{~~Pn$3ADbI4j8gsmar3p}ZML)hC%$}?fSDf6DBcyN2$f7w`3`ASGt zZpex9XvUCa3e8embH(ba&}1C^)_x(4(bT-~jAT>+Qqg8SW<)|LXDRc@!4p*4cSi2_ zu`jEIXFCL6?j?Hz;hs}J^(Y9~0}2t|9Rxf;pjae~1Xr~s4&0M1)7v6cWf(=i>X!VcOen2fYQjB~|PaHv&Ix;O? z6?Vc7LH!~L2a|aJ7OXDC=Fl^?dvrgdRB(hpVCqpjP!hDGPlnE_S?_KL5s011JX|X) zH*9Z|^}qx6-=5kml<(_na{B1C=uv`i{4RBzLtRp~MY(Ya03wWsCP+rEj9!9E@w!Q6 z4o4ueHA0h1K9KCl-Z*{q=j9z6(FS0lPeyU2RwpjpvwfBMm<$5one6wCRM^SsUpVz> zzF4h!G;S}MDM#d@${34J5I2W6WqbbAQ{~AJ31#LXDH9J?Q{wNE4P9LL*KENwR515h zTJg6|Af1xlXbSo0OHSWlCtmB1ydVJ$kXt$AC$j<}7Dzp!GaX3F|?Ut=g-<=^d#SKnQR@!EhH~k z3t{U3_CrOv@7a$MF?4^7kZ64+HWLQ4qu512x_&SlhtK||RNV2sn9DX=l|vDKUMzF- z_S51GlJDl@T}|OD9p}WIm4JJ2+-F$L zinNbZtEhHp>>Xii+OY*%J60SkWGugrhL9-Gt~)mXYK{#+m4-ncv!cQbj!AXTVqNws zz@G?dP5BW&r-uYSKKX_%R1Lg8u!AM2VP0>^7c%n=+j43im`xMRS)^;kgipf!Hi3Jw#M;saA*_GGm|Q9 z$xXP|6XNN%Osw*NuA@;Nvej>ri2bUCN$8dkSzj2h+!+IxRAqZF&iPW6>udJ!B%rvR zh8bF@`kq^i2t|y2Q1dup1-j#fA$U1y5_}5~d8ErGX9195 z2;i2erJT+5U<@g#139(vuq~`X88_kJRKRivO>Y+B)8%tu~N!=6rDJNSXBdtb%O1YrFv*(N3i ze5tVV(N1Fi&7r?2B$^0D9ne`-NpXN@VSS^#9s}qD!Jt3Qh8F5e#BaDgol$&hwtyuZ zUjr&5}#0`^mIA$!o(>7(i%BeY3X2#yOQj_$cP%hyfp5Q?kzNd}<$Z%H|? zDYcd>!R^i(Lj_*9y^~3VB-pbF%Rxi5(~lW7?E%oqKkUc#w?3dUYmGCZ*pnKq4Swt6 zY(RbR8gz9ETR*vc$1z%>Rb-5gRol;e^0V2{_*gv~p_@FK&0aN++|49u3hrIO{r z<;O)Ej*_Y1ZY2p*hFJXxik2cHV|c3 zPNUm!jR{`1KN8@c9_Fmm9`8iu#7YSN5*={6d0m*(0&0+lUrgx0^(%VB+su0>WCxlhsOduIXZJ+g*Sjg<0j)qI65Rr z4K-ZvlGy&i{Y`oUXaIq1BAi^w_3$O%HX44rseq^Rgh{_L6*@O#jfvUde%Y)r1W{X9 zlqNwi)h^mv5<>kke9lRGxE^&XDR1%1!{4qKj zHil1v+OofaV?jHb-<|x*o==l?WZeO>cA)*srM3P_%s|nZUZhNO-)Y%w{taiv<}Wmw zIInLLKBZL8*iA&0!cqHH$88}&I9HGt?M9y~wxc$}bF~8~kHcxNx;w!6=U~~&V*PkY z2f4Cfmo`S&5oNV)i&k$WF}9tRNW6`QJ$OKT!-dGkSO@*0kI07fo zQhv3vE|Wwq0ZK4|i><;2zl;m08CCV1{u>i9&R{$Bt$4?-z z8GSh8_SiMWRlz^(=&yr=@5OOdr)Z7=w8>ZmIM4Evbq%C5rwlFnRs@Tz3--s4X3NQE z=#%%M`dB*|Q&S#5Gz;Vk0YY~B5bS87W6I~Pl9myJ4)GB zw-=uwmD-Vy!IJU+A)sxj6Qn!EHAH#1RF(VjhpIgH6PsTb!HgUd1ygW#Y-{l)Li?cq z9RwA&K%B)sCvw12XwHUWwMR-2#br`yj1F-bKR30J?XaIraMgFE*aqj@)QsqK(C1=} z+9T@o3*NNhzi<)v_=-ym)qiSlCG}T2{Tmw|wdleDgiOVRkx2XN@GYid&tp_PKBNKp z%f%fbs}9u!9YR^umcaJqT271`QlgY^7Q>dmNDMQ>q;vQ!DVnOwH3cY^iM6WFl243y z;NIC)gfC>HJy0)qPdOsWiR>@iACrjQ4^GF!}k%%&t{NH3+#0d z;`6nMBdWk&K})2$vG<8URgcnLvZ9=v?N7O37{*ci{EX={-XD6omRfKndV7eZAR^fXo@jbFz0)U^fPb-#{YbE@#>Kxliu zq#Hnev32n50o_eX0zlwlf9*cGLZr;(i%4W{?OGE3(O~hWQAvlO##P3kXJ`Kd-B1=~a^_)qM;Yjd2} zmG4)Te1fD>mCB1$ZPT11QkEr3Bt^=WJcAoSh`0$2l9o+b?gkn_6OC@T8%+?D>B@}9 zsXFswYVsy$rt*fiZoZ8zS-xMZ2>5&D_umiJ1@g%`ev?$H61!{(K=uY`Y-DWUW;RSR6qc-02gz#aG@ zBqJdoRbImI5Y*0QwJa_gAKu?6d!AOo(?cNG)>tF+&k`A%sECmf7zVj4C)0EH%2(}A z#~&(3^sce4D0DheCDvMNiwN&+*1>!51!;uiLYUCLEFRu=T@^SG>Cj z113if4yRwtTr1wEK^34}(tX+>v`G{us_kYKGaX4O8wpzIp?!p@+eB=oRuws`DRxD( zU2o)t#Mt!{dwjlpA=dp8Ejlg*7ob`Ys~M+wzl_u9SnfYv`~=`NI>JkMm34ThT~f6l z;eWK<`Io1^wO^`9iDH(@hB@*B`GEi+Pi&M^#LYA18X!7!Py|OSz(y|FbFV)08(?uI zRa~f5m&Klsy%I@F@o9u>aIU~k*m7C=M^p^feuM7@eVEOn2nP1JvB7HT`2^6~Uut95 zlpMP|!UiDbk)g2_8Ie+eGiY?oW;50gVKHj4oZ+#ku<&U45N8Ig@8zpOfACq&m?m30 z*S6`9FYRhOSDEJ*;~7w;p9sBM_PxgRHdg;b)vRWNh*c_flS7n>FATbfF|Sy|{8dL0 zyFK_=uy@>hO*WQ5k#KkCq%llG(lIKzW(aMm;N~?P&zS%CJK)4X2Y8e9)0uJJ2}Skm zev=8$~W=W{k^}NTiRRFGjIfziBI_gSAc6+zgcm#i6!!z#Yw}Y!^Z^B_5+l zFt{IssIg%&fa5$)V>|YM`&1~nFGRg!6S{Fj=<&szDp7>M+fFpj^DD+}Q)sm96mA!O zatmERel5-qhOE{~T2D(rrwn(XwhtU!EciiN@R)Y+@nk^ewg);>P(QnLWIjg%^2>jB zqO*kF=u9CK7O!{ysG*i%ozpn13$BahYhV34+uaQ*J)d#+MtR+ixi+l`b%6+yO~={B zgYSY3;ije>4HBY32G~<7Ka3J;_BJ5eAOYH~bH;Cx(0X7jd5w%`h)4#QL#{D!Fk`kU zwb0o}ja;Bn97ziYgL!U&`!|R1w6{#`gNI<|1nL?Cs>W2HcsJ%cSPdmw zdcmF)DymKqT4iZlhj~SvCjB6#qU|P3Wdy1$CgY0cYs1ktxRdc-atxVx4>>}7lYX1f zEyUJDi>zyxVvBdxnmaW_mE)T(IFe^Cah$4Yp`OfzAb7-Omx;;Qvf|6rhFv4YP6%8Wx49+l13#(uWPSvmcquf={l8i;FoO_tABdV0J zicC7ASr3R#H?_Pu{)l;$C%-#+&pse6Sq64{KVSYPXtjlK8}G*3n-*7F!A{(L(k%+) z21_zjuiNXJ5U#O)K6aoonCR?7=H=sZ@>AR7EIm3+XuM9>yV9ywO5AHHg?{(QZ&}UU z1i^@srSNhUDOKv&Gm(gDbjGBSq(SAlfCRQ+biNY^fyU~?$sNXTtu zVsYsEB05Y0(_V4%gn6R52y9Ql>9|ErkpNelv2^hrrPCAcbXsD|&y3#17vn@Q?^ItK zAw>#tl3%gD!pM?0jj?a!IWNnyje}0|YT~g?*6|cx8zo$2?B~Y2m($av67m zd7C;NXC*sA;=5Pl-2l^4i0Uq`m+mOT4mSBOh1+2uga*S zlur^QHi@r-f0TVI$I`cSyz-}Ap;m~_C+B!yocn+*@WEXmFeh`e5E3x^lZrFS^w^MX~0>+S1&tJqki%R+2-oUqD6j`e*w5 zkMXvNLs2f(bvNPo1Nw9XD~@W{#(J0C3M05BZ?&E5?X{_L*A~W{fPXysgGqjPC4ve83vapqIDvKSJk!4<)fAz_*bpF(~A^7Qv{=*Rz1{`~DYxb{dcm3r;jk>4#V zYqm&PSQ8JP@e^bR8Jqjvqm>>|U&RA#FN%8I53Yo1utx2obU`ULrkA}BtI=6Rm=6kw z;>#2YLU{YKvb1C>kpi|rT=g?|t}Qp9o$fj*+k`T`%Xe*==N^}fU$p1YWZ?|j@Wv_3 zERGo6f`g^Obs_YiMTUTlWd;!5W|xF&cQadx)1-bG64pN~bTeIq)yC&A z7Rt?{Wd(irj2I_RrV(@)hQ>}3NVn&V({3v<5?`lhVuRPRzaw1Qd!MmWe)5082KH-v z#~tdYi-VrtEXzTLWqCD4XI%X2fZi+B*_Ub9uqnIMz1WYcXEP3js<#)6Y zK(egL?0_|l^aO5(0*`am;vKU9Ns?7v&Aa%5eBV(QGs<>|G2%h%6(ae@xCG(*>Bzff zx}D4sgyUqCDhk#h8@S1;Ka5^vZWv1a&f_=$rmD7Vp`iWVEl+=HKh~UCK;kta;qs;U zCRXruW9{k+GEPF2iLZCS^tf&yL`a8R+e#hXW&34KUWBsKFZVrc4Ui$qTPx=I2l0OV z#l?8n4Pt(&Hc()7>7ucyTG8)f_;I9kk#lCM|DnCr&M8UV-jg_UJn z1Z_UxE?b0zC5>n8N5Ux_tJi8D|09tDPj4;N_;UhqFYI9}qSuh7SG9|nlZz5Jl zusEz|T;D6PZ7R=lqie$P80JnTQ0N*=WSrB?+ZxuI{dfUW1S|9^9!e^C>^qU>xog}r zq>M4XxueU6LmDSdQHu1OJ*on;Pkv@U);wz<{-`Z~b9LRmJjbNy8s-cFZBSw#0*n|E z09X4so`qh%Ikct0*PeBenKrj>{7apVfCuUL9`Fwgpqv+`Wt1zzE=^BZt~xt=5}c==-m`# zrVx^Pu%;Xwppy=wc$kieliTNAAp_VW`uJfyw9{3(6HIeZ0b6naa|tzU=Y~7~f!z6@ z#HXnLsU;ps-2zts4Tov=n7XNe&}|xz3#s7O_4r>k@#$Zm{?IlqvC>Yq*n_6^pDaxi z0OMMt*6yc)UcYYaQ)WHED!=0gUmk4r@zbA{{(IA0;6k<@RMbAyXUT($V zwD*p-X^_`<RnD=Y0n zJkH@O>-6%?Hc9ZL-CD0bbY%YOK*2mgpO3b0%Y zwG@nir=Skyv05zOjXv|Qk^wP?eNHu}+>l<|zFN)lZ*i>rv*c-MAlt`Kspy)z!^w zOzB*7tpg+-@0S=Qa!;r_HnB;q9VeGh2~pzmsD|;=`|l_eY6MwTJ$rJL2>PtYVkn75 zwe##`&CZvVS=&3!+1geeTbLS| z&2#PHS5Ny(?Uob2NuWicNR$i_)l^Oxy7I; zl~4>kv%gm8Ue>?Fw0aNDe0`~`yQ9(6M8en%}?(B*YmyNN@k zxw-Q$aQUO$XOD(p{$+LDq34upOpH)dqwS^?Rf#JaQsjg#xUHI~GnxqGfc4WUx{=|V* zpD8Kcw0f(|1nm*|WTlWqvHX`*uN9YbS=gXqKUrRAO)je(&uE}XEiNs6jE|SPOu<>8 zNoE{Ac}3hYZ3nwEO%>^ZBZi4ZQ#<3S3Pou`Kkj-+JAsgkt084VzlK<-61{X_a`+l< zu{=O~s$wnj4}_Qb$WG_w^76Idnk6jzZ5NDH0eZtdfpY^&Ca_hY&4KX}lM14GbBIVz z;ee@V8l^xxF(VU(`O58O8fw^SOL?~+nB?XLwv$ZMj~Fp=P=bzMwjXO=>nPcbJ@VfWJHq>`8a2PDq81kPI0yI4?sg~;dD-n{fzW*|>9jFwvZ zygg=ZZoS-ITMpu=EgxPJCRzNBNhh`6;$~8l3kdr>u4IdNGVT)7yOX%RWKpKu~UP#H<&Ao?w$=5+;`Gc#xFm*AkAi;1#s^&K;yaX}dIfo#(F z%1Rmcy>^d?kg!Fe5?{Iw+%{OV8A6j%PVy`zU>~pjyc+DjhLBXi$GAYbCj01( zvNo53T1^vbv`BoLq9~L_ge?@86AVRNQEgpaWoc;-n1;xaoUqGt##L;5xR6XZG|E|X(dWGK|A zjtP>v%?PNgiEMVNNx_68RLGgcdB!?>9#=ceDWaO~M| zg%wjOsNrDS+WoUyh>6404*wBx4unG*6ZS_02<|*_;PHPe^?=|9kyifh0oSWos!xsW9r!2b)w+DPExPam; zw-XVS%8$3b?wrzVm%!MtD#(wrFXD;nXv|UD=@9vQ$ETlC*U!~z{5PX>i{D{f8$S|v zddXHYBL$NjyE?1S@9~>uF+7af475pSxQcg~K!8j{i`D{Q;arkRSPqjhc&C9jz~t7wXyL)xt{)<;!*`3^qRbAU&t)`W?K5Pl$Z+Gt}%; zZ`iFy4n#`h63rv-QM)NzKyM3BK4M~|a=d1mwmIbNbB$L0?q=e2d5Wj;xdhhi^k^)` z1Ep5ud8!Di{|PyB(0<4s%qT zKVVHnGp0L@`H+r@gCVP~LZ>=!JvKqK7937?d_ii+hwXcU2FIUd3ssH|+k6xCj7Zwd z15o=!LJ4}P_otl_!5(u8>{u*P89U|XDD-a^9tOs*PktL}fzhu2k_QpU^Lk_yMIa<` z21I3h+x~Qol$pI4aZjYWmn?e3Di88FHs?#OAhReMdN&9~=n`myFtvBdrA|kDh&n@6 zVrE4e85JZ9S=hwHw|`%fhEn@`QpvF^l8&{NQ5zD~ObRIiv7q_^dhmx|v_j?Pnr%@T4Sm`5-F5JE2AWq@pztz70p!JycF8t1T__DT zT|5(kQsc#lch(NOMT2YX+ZBnB0uf9JV8MAn(muEyUF?+Xt%-#c<~jdrId2b~K+-6) z@8Ob=gRmVU2ay{@!o!B6X`j*x*#SNNK$TqY83i9-JYUWmd0(eQ0q-}hpGsT*iSQ3o z8jQm*&7qYSC{sZ~3yX|DoM{V!&2%I8$I#wxxWbQa9ncxS9Bp{KAI2)n>n3q<+zyVE zuufI+wTF`0IFX>TjbIp$yee*?ykR7RBzUDEtz?D?5uix6QLgqu)hmn;e4zGnkIl9? zB?fU5ELKU18~h4#R2Eq1IQ4n)H=DJD{JM3J+5N0o+d2Tn=*LqMleS;rS}O< zvSizhyu&~o(ahL|K00Lhz%J4X<_d;oy=X?DHa=5|f$nm5(NVsn@11}i`8Xu(Jgs#| zy=M0U<%oyIfyUW@>8Q=q?ImLnUG7a16MVs*|9tt{y{~@t?}9az=b_XXs(aq{W#sBB zLDH7u7=f#9djAQ?61SG?qoJFmNNaFDzDpVur@(z-6&OUd)5Uw`IWGZJxk$V3ATCVP z`$lYqcc#{C2e=* ztIZJETV_J(X8ES^#-$BmXyRu=Bp8Ucz-FSHuRw8DVJ~q2o1&eVY z{W`ANi4us77H$Ul`E^hllLY!DvTWdj#-iHlM$>P{J_(-k0i^_O-7FMXS?b6Ss#YqW z*_z3H+9$Xp7?+Wgm>rg+(V{|c{#5=po>}ct`!l6H8uaEtS*3)ozhoco4ZuldZ}m3* za-Ih|N00Wn83$ceWuY(atgA249#HNb+iZa0$`~0Tm!{7Ece$>6{x`wIHvf}D^Z&9& z;{VCtoHmX4^ec^8VpT8(#sPOR0tP~d+28xMNGaL)$_{NE^A@jP%R}!;=?=HUt8Fl( ziku9|aN4UdUzJ9)tz(r^-+v}$Gl#N`jrEV{2MORQ8Lr&$rafS=3WIr6PLy2nFO_r8 z8QUlk`@r5P)X+pzzvCCGnP*paE!x!j=4&IPh%w&o0)g$O`RD%Fvy}JCH)1KoQ7{(- zqOvVdLl=?}_IScA$dq9G{XQd2D1(jPP@{$^n$6$i?_g<$;I9esvOhkf@mI>`W{7pD zj~pf++w_UDwJ@UQ+nlDY1(Ah#jVq2jF$&axQWV`>o>{#r2Q(^xB7lpDy!!|odk+=% zM3_>y8c8f%>Vr=YGX~c2=Wp9lFq*r8#|AC-CyyRsuQ{uZ4@lvLengk17)t|di0y2_ zT$3bG7CGAX5qJ-mwVeuY3tmT)^-@OgmSm_AODVLWgQ5sSJoOGQLTIn9+BP7J2E}yT z1O_=k6V0H_tRkDiowtkU>Bv!T^WhwMP}|Wu=qRohpqm|ZhI$&GF(sIe&Vg)pgAHgC zbwIoTI|@T4&v==h^y?Ufc5oGJt>JDmb#2R1`cw0HuaKWWcrBiwVMJs{xR}P@^x(Q2g6Y)F3bYX<-gQGC^@dQ3 zw;hsqPh4ClMLqqMky%XLa~Fv|vE#DSOARb2XytOV|KOPyB3MRTh*KLRMZYe)9#cAY zBb+;7d)8pP0oLUyI`mOF3MZ4ZBJ9H7#WJx63?)@n#@&>h;uER+h~V>0{MS|rB;!(^ zmkl$!z<8%T_J|+8B@x1G1l__iJ4nUc?QOCI0*ph4Sk#gN%l(wyn?;~C3k}eW3^kQ1 z7`=5|v2{vHt#->guO?Zl(eipr`Sf@vz8J5C|{LH?HVYnykE5Vd_ z!;V4|@&$18gCsNPG+(cbIr~98a8PZgxwqU#AW80ag}`QkiWDG18-r^p`T;+ z=9HkZb9{yW;E28QihXyZCwp$<`5R`_rKms31lwSy3mkW#jxycv zQ3zryVyf13S}tKr;@R;9VXdf{**ZuuUS7RJb0B=B>`ils%(*2p2S_|fPY4Tj))_uD z>MQ^}wLjBTAJ0V7Q9YbObRcO=Gal8N`iT*W5(aQU|NQi0dnD6quUJZUw!9wurwN1X z|8zNQu4sY?n9RO~g{YEL=YMW{u6u7mHCNupAC{*dgbGCgw>KLBXggZs zvTwVCDM(s7dW$Z!K!VQNN2P6t32j1O8Xw!G*c)zP2efJUJjXvuQ`tyI=CWMc;kjl; z3LwJ#P-*|Vu)JXfGIvSe6D-x{WQc;-~xqnW|v|aSzGLHZ7Q=B27(?Zm)-=tL6`A z35-5>t`_Qi-DH3Ri{DgSHXh#Fqq9y$3WDbGoZXkpLXhT7B&gS`L*Jf~9+8~q>pg+| z;%Mi}QY-dLsvg;rYjEepnGcOAm()`a!_9+zR2Qxrp_%5Kd^O4~a%+G5djh&>NzWoF zny{53n%G8oCsjY$>u|r3Ub|ga2UC@@v3el$YjL)_q;G0xmE_a_OHJn3+i$;Jyfrj_ z1k;z{94Fqi{c$V~lh+pl;mJlD%hlML z{eVuJIF<=tS^)|p>07vqCYiCp86j5zE%wT$E#W6Yt{D^%@d;nredj7A)8;@|Rc61BE3?$b`4LEkY%xK%p>koY&VUajEt%{h~{iiC8rBJBmNlXffb8)a2(nW!)pD<_S4l&2M`Ej;1OfgANy zs?q($d(3Sx;?7Q?_E)6%B8nwYP*ifhANyZX8%<7*4dzN7Qa13IJqO;drGB}Ep$0?5 z-!<5T4msW`tD@*Ya2o;0Hs4q4K|4!gmWaBoEoXl+tN>G!umsx)-6P|=7A03kMPj3$ z(3Q>JK{=1i4HqwCOp`h^(U#d8J_I4irSOX+F7EBwnGyRP?|SI_v|(w+Q7tmxB=Sgr zadysVa(Y%?zx2{yiFI_4tBw}e!OhQ_1vJrKuG$aOxsz>aTMijX?R+?ovfp$hGJOqau9O9fY49MElWb@HT_ z%2MiT2*3r?Bzaf6iph186L+JlhCd^ZFDr}p>IIxW)ZJ zZ3~3BFD+Wf|disMi-r9U?+UM;U&wST9f)#VWU4MI62!jTV&r|UoWdI`(bfmx+GC`pC*Rb*(HHDZTkgf5(a%bNguq1H{-OAN5E?81Q=BD8Z(A-Gdgv#D zg-o>XVVp?y?)x><)g48k2A#Y?!$_G8%Fo0?WMX4V1#4n(M;BWZ$-omYviR*>=)QnT zNJ3?=-p3SKRW=$3K(u4pT}qEn=frxgFGTom8Km0V*`D&TG)~BKmgqN1&5B(; z?nbH{S2NrzvAYVi9rpLVr}URg%f5(H4d%)u6j~V^FuBwuq1MZd^>|hV*x~4;gVsZ>!JrbWCpd2XqRxAjQMCyw_IlJHqC9;p_=x=*C+^%M-qjVzBbR626yog zc)uU+p@d2UuyA2<{5~$6Jqz(&D^J}v5F|jXX56rjc<@36L_4N#obslYA!E#_L|~8` zlP65wC#(vm+WRmysGCieL^UjM*KwRA!L=j!<4B<$rm)zTh1kS$Gy0O-CXpDA>P&ilL}iX? zMD3t+F3wJw@dA&x%3I!Bt~P4_fxV2CN-c#n8L3RO!i?-ZKybvU16dGIKRypgSo!WG zVy~-0EtRO9nwZJJCD0{sa$Ho(%%p2vAgHbAlIz8`0Ymy+KILzlmWyapuDEI33^;{m-5&-?A0xB9rk5^0RtUyDDa|=bb$H zO4*qhF_dgf(25Ic_VJ^q?*~BuI1dl;V~uZCrenaw@%NvC`~7WvxT%WEY1|t?>o_Q5 z$}RQo>!4lj{Rqzpbj_F5#aJfAZddGMxR`AR4;>8Q`%a)M$t3L1PM^e4tYlTWcd}RU zxTP|?W{*30@@n~vF}i?^)SLL@>uskOe*7Irf{rXfyfyFs-aNT2nBl~v2xrQ z`_39Y;wgnRGRVrS?sqJhgGEr4iG8FZXPDpW-vONUV}Y|iwoR;~YrF>$A0MJ^U4;!* zRv$Z9q>?q_{AYuq)OmmHozL4!o4yt9o`YmccR`?tZp!V_5OlCTHiMaADP>NS5Il%X zhDY}r$~Z@pUH<#vZ?)l=PZDpetK!}gF|S$^R@Qe^Gho!M;hdB`5gdRV(WeZ#n^pcL zDMu<+k+=kr6Ns+jpNy(nPNFKMhD>sZv&LiNB+-T%rXJUf*`^?Z6yOV*?@>HalEq5- zm6C`!pk|faDv6c@`T&)O!4L;(DZ=H52d*;)I--HP*trgaY;8?G$D;P(!KUSs9j?ZA z!FjgV7q66SDIQHnkQj*f+49c$p!T>OaSCLNEWK#tv#ymgJ2F6Xp%nHcv~&oiEg%?1uMs553ni_#8pU0z2n_o6$VIMo@D6Iah7dz_ji zwy3c-vY-O6bUi{fJFJThLP`yyOK}118^lt$eW^8{e(a>frWgy&r?1#cxuGP#G7Xxp zc-)}agyM4P;t=G3C@w6?$>)fynk}H(>@;cjdj7dsAZAW(EJei+?yTa5(NswdWvJAM z@6a`SEwx2v_L$^U4&`@1(8rFZHBR}KvB_Cb2zf^IrkhA7PyRzWA8%4szMsde=6KM7 z$sVL6RSdO5K7-YxBUO+|^BMpAZLdUOnaSXkJ<1r;JPnRLDD;!NJisl)D>Cd`5xYrv zYa~&;D}3vK;VD~2=xHpY6jn!t*^tv zgTvqy!Aug1A|={e_5dabLpHp8`*>~Vv7HX5PF3_}a*rKapjggH0_66E$O9xaGy%3sp79|<2jrL$MX9N7831@OYwF^x(} zvPQ7Y1T|F^FQT&7@v4s@uUAJJnupoy1+U<@@#C8GMZErv(jpHSIRO?dpINb|2xlU> zEr#OELA5xI@FBYGov*13FY<(Vu(g+fgLJ?@x$;-fnwKxxcTb*BO0C#&{Ni_{goCX_z4)f1}0+S-i2~= z#&}aUcH+(bAqHQ9rN~SouluP~)f_i9H(jF>AP%0=c|ivu<@lPnT}1`r;3yXocXPdV zCTN}>->kt^C}`{FKsB<}$RT&{OcU?!9@>($XT&n5*2;X#c4hdbU&MP)gW~%Xy(D|= z#nakJqodl>Y%;Lh9;B+i)!C`_Ia}B=K|rP#+2@J>DA4T-6=+~>!EPl+`9o%bso#q2 zq&7fLfo6pt6nc2q4u^!i&+oy-@k}u28U=40E%7qVj$@USK&#%&xD5dRMjAY|4+sqq zR;+=vhKDJHOltad*|!0y8yg+c~T+V5Zu(6<^&_z&9))^bfCR`nu--cLAUI+T5$b|Y2tRkuPDee* zra4EPORl!9nD*%jv$TY#V7uo*mwiyR(v!I}C~!1T&~^95V~b@u^w zCD04?G=uG~?0TaEH^5P#B$^ay$+54gvbttGI1QKf-F?DeiITovIcf*7cPg5Ik-A3G z;~$rkKiXsHE7}XhF&T>Wlo#EsnYJ^WFhg&%QAG5(bzq7}rL;FZeia(sD5LZY^55PG z{{khpKgw*y!nV~Zgz<*aW1Y&i_#9U(+%BvRnn5o+gz05Uxr5DXfBx^OUFswBrqShdpDWMVzti)^6@D$B7;OI!zLN@9Lq3eL?nxWa zAkT=fr%r>YBu~9o+X^itL+cMgNov!}qofJfR0bCLBZcXG5Xl$He=w5Q<|&he5tE@O zBmvftJ)A36-75P|hp;jUtzca~KqD>8()d+H|J>~zcz-=~FWj};?VAL6p*$1%UhwfT z9n*wSwbS0y&?hGuVc?&Zr@yi_f!&mm<~I_QN$OH~K<;mxONFtL2DQhPokj=4!v6gn zB}4Yy*P2Gu-Yyc~f!tc^HM%K4n~Ws;HW^I^I&KM?Z8t=LE>Bz#OT?rGkEn#Xoh_;j zV=|2yqn>iJwzr!;T5uIhn;Pz&YgA3vqN&cFXEM9;@a$}8Z%IeskL|}=K#|+6NHT^a zghdaii(iDAQo=xm=9}HH64c&8(P!l-9C?oKH<7@QGfj5$42%g2kQwB5kZ2tIhEU%c z?}!mz`$T1fR7Hlp#QF&L-bi@~I~;;8QfKZIVt1oYS-zhIcfg3NP7}4jNvs-6YRv3!`x`>0;2-$SHDNS^~oo87@#Q- zMDY_iV3bw7K|*KA^jlM{=&1LDX)fvI@pm) zC0ZF$sQd5~;g4@-?;>P@b+DhV--31(#6NlRH|6%cbMPhqhV`5Cz6KaAiPeWHk%(Uf9dZz3JhJU^@`N$54Q=!$cbnCL4X)(8Z?| zKn$1??dCw76XUc^KYm;ur)h#>2`Vvzc@Rb&`>bGkcWuQT;#3?^(4Sy5ANE$Q$Sq;h z1#B23Dk{>3wUh21`l!av>@A8vB^1@3oMrapcWIh_591yCG~wkvzo7DnJ+iyNs4>Tv zkj?!FhwXoMOTdah&i=EOf+X6tCorYGN_?Dbe(4a|lI}ASOv4YBR~Fo_T)Ym}J;%8( zmzlZM)Hz~AotSibnCW{UK}Fd34WbDLADUnJU(|NOt?$tyy++h^fyMlRS-gfwU<9hXz}T1UEC&aJmO zc8m{)_Yv!bbn$)oGWiu?{kUs>QCr0O6|c6X?X!7B$kJNt=R3L9QsNuMK=;6)BRWmJ zeVkwRL7}-)BdO|PTJR78>&E(4xmocO;I9{rE0Ek#d8Td*^E1If9is2yh;XX=K_tuz z1_VyP1lrm3?w5)Edkes$y%2cE)Y_Z&6P7-T$zI=4O5eUj{iOGAI(c%h{EbPQaZ{>g z3tLjGb1y)S^dZRY%{yWs`Tjn*x>Z~V9RKg-`)dAMWQy`-`nJ0k9} zYb>fnbd6(~EeI2__|rgH%mL~b?_ga!f^Pct(tt5;o4@nfXP={^Z{%z~HGkeNl9D-u z|J&{b=@&|(J48izit&LEY*`gfD+z@<41x-TwONr{u^Uw&ZThJI`{ZRz!wFMnzw()4 zqi*`NJIcK8#An76>g)(3aodfA$euI^2t<`ciF#ATw<^35iAhOs{BpB<+MXo`q|}g* z<_&g~3U5!+(-m~Ukd&7bMAM+|G?Q`R4yQk7PauDpBsG@^+m=H$c7?Iut4QJZ>bdT4igCH}Zf>pho)Dp|v7%7NFra@UekH7!HlnRbe_QrZSzCgNh37lW2a z1}09VDU}JdcgrWiqV=!;)5(9VO*riD*z6yHc{jPF$-|v69Pzk-QZ_^Ie0fyF)TSmu ziKViz=x=y3tL{HpGgfr`*6^WyVF?b4aUn{))Zm@{neJmNcf2Hk@N4bx{tNpvS01@% z2Xksa0mCznk?Rk*g@3$v@@}woGi)?|W_AE3jeSWsh`IrZ{m3-XC4q7!d#}<(BM%Sp z-XkH$e>whd|C`1-`P5}SYoYxAKmC7!0>)i3o0P!}?&=PJxg!lxMR9SN)6h(%h#36F z=!oQ~X6d1NiNdF36HN!Xid&!R(Q>nkeHU^H^Bu*tgxwMM5ipm+7U>rnbrYZ!?6Tal zp61&K4EMUt%{MW=+|F})33)Hg#a(RSu#+ZBnSlX!^gyQAe`Ta{KX4cSljb6wO)Wwo4(#>XgzF zHq9=*63;KC(3wJ$-Az}#2q*gglJqojsvrl|0H(rEb4N9a^mBgrC{!=OrCQ4y(hzxKf)u>*?Dg0AVhh!o;$#pf3t7`UA5Rxd)qCKqkh1))Juj({!H867%ubFO} zW&)}ZOMzZt741naoHu&Y+U}P%mdS`ThwUGfAYZF4Rc;EwQ!fSSmr83XqAWK^fk?4@ zIPAH@0~~$bF)_zKIQbQ!-uOO{3rFwJDCpFZ59Uw?Gz!x&4b_e~zW%TrrOF^u;mi&f z@oZmmH=W_-GO8XgSvvEm(R3EIVBLe8L>G`w@n_jJSQKGlSdjNm1bTlLi^81rIIz4k zV)fBMxic>5D#(O(%C-_VpWp$rjJ_K}SV8pB1BL|VmZ>uk!~I(_3HZ_xi{;2qQH1h* z?BEqxJp=8XAs{tO*Y!|$FL#rcy{@t35w+ttO@sTO4Fd`Jn%mTTLhrU?z$6@YLVS=W z?hlVd%^+IqOhia}d^3j)H?d^A&01^-ZZEh;W>56h9dZNk#xc0j>@zq?$ox*vQ#>rT&8Rk< zE-ZFisQO6~UUikL#Qe#hnsh~~kjFHRtPfW6$Xl%lOoO)?>!gvmvlSZ3Gn#Wyq^>dU zZ1-WUVIUKd-7T1nR?P#~q-t;sc)@cvn2<^~+xg-C64ry zBjMY9Gu_I<3A5`#U46)zMMQ$5mz7UpRf&IY z;Wa+8i}>P;<=VA4rvs9ZH9#9Lyj^ar7=NAKV}J_gCj`$5BLf|nR* zQFjVeR8p3RwoR+yB%_uv=YrAh-YiSTw!&!s+@|d(n?cFJ3|KeZ9n}`tz6qad?i&-^;wTRhs3T8jp(TIFxJO$nb2={DQAoynOtr2w zb!tK73=xPP$Qg*3G0)1U`5odnJjD)XCv?nVJjY;=&O6I|xV^ zMT@RY>yp5`n4nPNUqj~<3gfI(eqL#O^gwq4UXKp{8u9 zz<)~5gqpAt1V_nC*1U2EiS~#9vy6eEa~c>JpA3od$Z9`HQD%A>;6@(mOyep0u=FX4 z(D;=G-|p`U9{5sRz+uxD=TR{CD~xlrb)yY>u97Z3SI!6HEHeTd%A9GA0~AJ9oDm9a z`iLRqv9u09^CczgEZ9KomSzC`D&DK2u*116OH^2oV2g$AG8H$6W|0#HXh@JDYVDn} zPsuvlmumjTk3s2acf#PSM@M^!8Jdu^OXL+D6ETUyIB0tHCR~kgT-q<)!C|v zyj)MYf!z=Xh^s5MN5lbN<~?!UnxP39?}2}B2xmBOZP_E=^0j3h zrDK}`*k}xOl`5yJwrsei_V0^gzGiUB*rOE$vvJ+hw1OSnJt2yuN$tp26&Bh^SAUsY zTOYXO9%tXI^Uz?o;E9Trf^Kr7 zg3~uNO1oDJ#q6sybUQ*Og>Ch|P}u(-FP+Xq(P>PG;k6gN-rKepOC> zVt>VqL<8+!J_R3~HeLFw|G{KRI~~}Jf||)Vn^+om=JE&z*^wV)ltN9ONfw_08YpXu z!u2fget8f=sITd0w3FU+;#9NZ`YS=umeAdh^h|P0#rqTs#sa=FC@V%YoU(Lf)8V0u&QH9BD^y1`a>Btp^otU<$KG=83+L!BXQ!&AGa^piOUBRV9!R%ChaLWuBdHWN!5bgN|-@rbv|Jx^m} z3ILL`qr!>wsa{gbbc2k=$+;T3*>IyQq?+%WqWv#p?|?8WE8%d)1J>|xHj=#t7o~#Y zI`0DU;odr(GN#tuSt10*kZC)?rpW!LFP`2m#L6#EzW0~^uL#b6_7ApUVyRByHKMd$ zKQN1l9NPip)im+$CLZndln3EF16U=A>)}8JE3#)e%jBYP9Geh9q7HdYLK#7=DfKAw ztS4pc?eUH+n*g=XB-kr=Oj67-&5)uIYDVmXnDm|qrWMz>>^_`4xpsEaC%)&%A~?*x zH5Q~!G3;`1T>&bqiB!!Xh)1rs%QxaDh6H+@y4q7EWs9a_`z}Cg5Gqgw4GYEslhQ}6nh$-WKdG}Gz}?`x<#lM#Uxu%P#Bi+p*7}2 zaDh?=y%iy?IRvD@ZE;R54n2DGVTDj}MswPD!wL~+BFEjtPIwx)XN(_7d9_u|Wr?j5 z;n`A@govZPt9DtU^e!Gv%TQh(?V)C9hiacD8AW&`W&$3Y`?6T*m#f(ME(DCk$+PtgA5a_N;bV*Y?U#&*ad1z|#ePk}>F zuQV*#FL$S`!*&g-JP(FaAuk-HY~9 zZNm4q{j{K{yvrX#0%zrVi!{f6a5U&!yF!F0p8npR!U+(jGbtvmiOU%@Y6^of3{ws4 zDpn#zey;ZB2m@qKa#k_U+#;<)3z4I?fLjAt!?<^KT0yo4X&Ve4N9W>X=a-u ztq(JY0L1~qMg+Qs5oMoIYw?2wsF0I{-#>J?zkt_j{}Ss349GI7FCPrBN-XeEDyUP; zR8^7*NY*|;e50(LqQ~EtU-iD3B6F!HAYmJin#zssd0$KAi*r%>CjCu4Y=A^AE}pfK z+?8@uxNNd61Gi?m+}C{G^64xGi9X4{U+}*_7?m_Mz2`?ya6QT!1?gB^j35aCKHgh> z&9ikms7IBR#&L8Xl5nW=KDus#F*Utq^tgcmU<}4$XcqgwoqXSZv9SYtsB@_;u}y7J zc-kjVE`XeP*8V6ETq~(X6%*#OlPoqqKmIPK^7O}X0hxz9F8H!;cMA^L+dhA4TAw_5 zyU=(1zxbb6$A!<73qb-v?K-789m3wP%vO2m68b#Wn%Su=Wsm+X^wF^20)#Yn7Yv_sPI|mDv(1zfHXvG#qYh&`h$?O179y zG^QeQCJ8mwTb+c$cT#(;_I>-i;^wmSGSUmn2pDYUlj*WJ)pE=-4QPAzeI<^YCD;lS z-N5urkYQZt(;Sq2<}*$H+;JI1nXr!IC|Aljmk(gTQ#S(n%nezHURHR!FUJ>WZk833 z#Oi$eaxnAUgnNTFbj^f#^VIcXxoKb1S~hHZC~ziH%FD1*P=sSAhR(|KO^AZi*q80I z&Q%&7_}l^JNqc;_KvUd{mt3U}O z6CeF~$^1_90B~DsbZTEwq8j_rA0hrGndg#EC<^e%2kzRQD{o!2FR!AJ^MEGa-|>t% z-a=lRf?#s??1a_3%?@%hiGjQ0hm@~_ouzwz7b=AyGGYshs=4IG=k($xt7_ZXEK>(laky%N zS}P#8LUEHs8#A?~H>$uABs))bcf->0|!)M}G01@sCp2d;13tF!^Hnrac=zgezN z2@t|rE3Fh$ha+`SV>w9CO=n523zc3TV_(zlvyakg0kWqAq}odj{YmucVSt_^MrzBV zSN?n7W~~wi%%e1-fW3kF?GpQi^$2JRFSEasBtWM}gP&S$ZahUAxEHc>jn0UhCTDcM zGHodRs0VNBR?uYSws&lZHeew@R?5tJ@K~)_R(rv~P^=^U;(2p(WyP&@WZ0pt4t+{} zSuhI*E^y|A15gi{@G0>U%7Zw(Q$;yAF#Yj-`KH|-Nm%y+NyEsUNnvj?8?`JV8tJRH zd*p#5?k7eFlRg69b>W%vYOEC9g)N${-GzJQv$ic!aB3;`k2m?~QCwv@LL${vH4`5J z_t@LBAEmj;I>k41^>jOPa*A`|YMnv(sxepU_p6B+uus&oQVYydyw(Gg0cXLndupb~ z4cSQq?x>M1Mn)JX0KfV&ws7H-Vpj~uguo1547B442cvPX3I9n03u0~Cluw1f6FpF!rstbhPI0^GP(v@dV7(T=KKM!IZv2wAn)rx_>PhObC&a z(!ltCY9jGa@JRc(?21V$g} zJwp;XDfY=#Q=llzava8u;szC=^#oN%W%Lrb3>UuRJ5W6172|kCnD}F`O)Z`f<}(gp zy38%c0h|^Z$2X>k$J2Zx0^pBp)-=AcIMh_!6xf)m9&^Tj(h8Q?cXn(53Ib8?1~f%! z@EkR5mFd;El{e8uQASJKhe>=E4&g~=QQan76MH)(tZ#pL?wN8fZWk^HRBtGI54F9Z_SGk#3;c-+q$rIC_<)r zWl4*6B4yQ!2BLV%`h*=45}~qDP;^X3UR-D6tCvem5hIZy7 zQnPsv*TI@G8cw!9!NDMe{3{I;hIM0Se4i%av5%UV*EZ4_V$2PPjBAM1j>z10$j%sh zDYxc%dL%_MK7m(`(=gJSy66xIPiE%|I`cuVZDu6%+iZPnL~m{{uBQ~6T2F5Iad*_y z0nzALWWtIGTZgsPvAZVO(oNwH<$*}|C8)&09=WpGuqbXBVT;-})!l%v@bdTdPzws?TYf&HJu$7$2KI1R}gj+2MbQO*p;3At3I z%Nb}q8g`IGFtEDxXM`*-`^X3|TMOag@C}zIEHyoT2G2{q2JEPaR@6$tpgyB%=96uQ z$J|zfEVrCU8qzS`UMR&vsu7I2YkisjuH1lvm*GQSKIl%Nwv$MoQtZB|LJ0vwP)*&% zh0cg6b{N@{cCh*CXy~wfqqP!+K}r?^{w3-PAHP;Uf6q4Bpn?8qKMJr-1*-}0c3CiP znL!|WZ?{j0ID$XzM?nfDRk_`HO7-640PZ5SKp@80WnW%l!bNr;HvP6SbluR?9l^Ap zEufHN7AfrkHG+%_uw5~P=C)B*akOh!?P=<-#U_Uy zVHC%c#2H|x#5ZOa%A3Y85{z#eUm+S=Wp>KmgZt^-di0*Fm+cwz@*tDMM7#7{#G|r* zn5P{mj8$9B2D2mcM_7q%TlXB{C(1Ru?#0k$8(M>@kr*oqkD@r*jQg&o;jW!ljAEj}=d-M@Pdqh#I zb^C$-|Kx+({PWL$Z=_!ssP8X4XD_~iK4Tn~F~}TAFn z!cMG(5Lg+2!{V;j&T<(Iid&h|^lNBZqo&mC$jf=?B#92mYDJ?Sxh*j-a#@)iOsv>7 zZ<7y*J7-jR{#^NDEV{!hxG7#ZOo}^(MkvBcj@6E+W;iCP>(*4w6=4UYV57?P9>i|f zS>UEiY!arBQV>n!1EJfbITH?XJW(8^nLTMAL!iiO&7=g${DGY(PFB=2xsvl2i zOB!fLKP_7x)9NJjl9~Y;Frhqs#*rI2b>{X?jN=zQZo%5fc80cU z2J_%HByLqmC(Z)I6<$vCc%|uHDv{h8i7r^v@dX?#WiH%n!IGaXtHwRtQ$nV;S8&`U zMmnB*z6tP*v@qy1TfK(f97HsEe|&^6P$z8^v3q^Dm=I)NAj)>_YFsAgcHAZsixo5YjLi zXiV@=akn@@dd=d}0@Af>x6J??jMF_|HmzM8Css$4!2MBT#6$?aW5W|XT-yl&MzuE# zk)#758o}=D%u$+hC}1^cZRDf}*3W1ZVHBw#V`Q8Cj5WvQjpxc+Y6bvCREp<9_FnS) zWHPeGidM8IsaF@B$K7uvD(yL<*(P3-NW>GA%KyZcdGh3?tAH+zVnjNLd=iGTlWJj7 zBuj4zg`M$=Q%iGeL~Gsn`9e8&(a!Zj*$n~qI^ju`lB$!bs|4t~Pq@zKleXsGxUB|x z3sWAar7z2SXUAv%yAV+y+WK&nVzCrm_0JSQ531Be?xy|85bgKl>@ar(4WYE%bqq~& zF53qzldKH#yK&3LvKZ5>LI&3hxlLBZRwhpLwAB(JRBoxUvcfzo>+#dG<>D7(iS(TT zOnw*#+(M9T?1pTE(J>8`i_q1NFU}K%pyz_E0Pz(KE=X!&k5N#s$XcAfY{H}+Bmu^G zkoW^CC%fL2avjbO8W6~?psIKp0r$MgdEBbFI5Ms#%rzONPAvqkH|b2Jtz=dBdbv5b z8jo$sa|>P~KCX!&^%nSj!+96})xJbL=|VotBd7se000tRXVY$46Q~(YLrE z0L$H%04vA{Xu>rSDIj)44<5`9tRFkm>5}dnCAW_CBQ8LJ`mf?}i1Z<8?-Oly_ez?bSbY!x?fcx7ZO{m=FXGJl4+w9sv9$*6?2;MkRSxO zM&2Jchh7?+-H1n>Jh`Lw+PSPF-Fq9shMKm3naluXV>T>DrRVJ~yzp%Kj9nelgx!%k zCTR(=$@+|LNE2cr!nef(0Sj;Yc#f*Wo_=80_%7WZ)`Q+Hik8@Ogc=a5aUTd@V@zE5O8WJPm1@G3GCRHWe}W!o^#o{0aCLdiv6=jR$?j4zLn4CY} z{Ey_XLJD;EI*J7F0dHtEU2fU)=qw4%|6+*MzK``fZDRZsN{Wp#>lOX2+94ag>yUU7rY%~FP$bs<-zzR zbxW-rpRuPe!fidj`hwExXO~JikaUD0{DfFm?1cnwKOk7R#P#G-QvyULsGrO zn4#>5yBxp~1UolI+8~UMg^x2S@e|U1O4lw92L>m-T%Ark5LV*t1HLiQtq9vkS&s!@a{;`@?3sw2d6=x7#q34<IN){anN3IE(h}FGbDEfk*B)vk@Wz{`4lq+ntz^2wd6kq zvx1%x!YU8NZpmB~9z1)FiK;5K!ay8?;xYm(5?h6`!60?^9VWgJx&rNZpb@$f2&1(g ztgQxuk;Wfx5+{rXSUVE2HNIlrScgv*GQB_Obd8mMa!YSTW`^sjgIkB#;IQ`#-_kCPwz zXEZqP>G$mQOo!R`YDpCvG}VNZAsER>X4B%d-k7kI*hCUDH$lwX^D7c4=A2MXs&rVi z-XSVM>d!&)GbqA=+ew(`gcEITYJSzW+lQ;}_9@tES}vet?2o{nJ8FX~OMYfOMcWQtEjcwJHFTz4aBEXc?6%bC&%HHpmZ>4%*_S z0ilTA${|#Nv2QmMkAyVJ0LvB*DXi;iN1`#POb#P!KXeIONjE_Ve>MG%8~r-?X2QyL zHBh(U%zDF+`7jf9iDFu|!+q>!<7XZ0R4TsNJF0~mXMyMj4G$L%hIR-SDy$yLn4y3v z6^G-vk<0Cy=5)x0xN-PDaC@5w*TCKPd*yRM4(7yW9O*(W3?5DP9UCIMEQXnisZ(rx z7EBFhG#w>u=L{_KG(H39K8v?>(DAbd;xn8z$jykc;Z z^bG>HN^8v)K+DL46~*7Vo{F%E>U07ApuqgZ>4<+?uxLa98}}2e5&QAv3F3Ay(UhD) z(~l(O8tE`X6Y3wuk`~J9R6L=DpOZ=`R(Gg6+oy=dXaImMT!yAsv0nxp&`r9DcxDWO zqfEvrI-!&;Ap<`$Nye#Q1F*s1mTJ)e;S*hbH~--rP{~{u{pq7 zizgftTKicXU!6gQ@-eHMDw}_12$ec?^Dlpj57JGTK+v|60FBi19n`v~)G>ce(b&ur ze03Zmq)k!F5If*e?#sW|t3_i-g4ob4Oue|bGzBBwL=4|8!xo;6qe5iyKuJk1(|F2I zVStPX7}&370cxYel#~uqaqV<5VEqWI(wIN(Xk$os@)%l+OBU#0IX-G&hArJsbzJRD z&K(63!?;0IASFq&*l|#6|09mW&$_&o(hQXMLq-o;K~ou<#)34PUtP3KVbxcM2CveH zSJRT7s=N2r#X=c=$V4gqi9fsPLVCrPbqbiZN9lB5PG}t3W`NgrP=+v*Q>h>Tk#fbp zieyJN$H{NYm;Qm~L;N6qxrkLlc;am0IxIRAtwg<>fMmr4Y_Rnxo-#ae-Y1+Q{u2ppiMaUG zw;0+qVS=)u`q%$^9U$~y{&;L~3oVofHLcE;btCke=B#Oa{|OkkUx0evgJuwn#-=jiGDABW2B&Ky&f}ft2mzROqKX8~0qDH-lkiKSTwx^49R5`bWmMux^H=PIIE7gR6Vsikm810I$)LXi-`@;dUADpxQ#e za|D-F94yl{NJO>J$RULIDL-eDr4iu1gx`CyqMGH<_w9s124vJi%i5^56DS8&MI3E6 z1%##;ay{y}w@Xjtk`PjFP%=L_Fboi$&ai@w!^pdKg3SlrL$%IPeU-c9_hr7T5HpCXE@jR* z3Tdsh?2kul%RH z_L{8-imG{nqrMeVhxr{TJqJpOz>X{yBV|v#5!Yc#P7=uod!l-i2uWwminFkhH18DF zj-JR#*#mEK7e+k~$Ui7f5ujUu;INls^YZYCdQB2d&(~7oI7u`s5(bNk>mFa6*&#;_ z6FGE;drU;{(2Xp_9D&+oRRX&Wh5<2)GjQ@xwrJQ3W$Ic~;j`pGVL8@5s$nMn?YWp| z|LnKQ1&cmER~{Rsh_&{T;#Pmmxu7ayCYg<7?m}pa@ z`c*GJ!G$J@F-%G8DzbN&*=fsBC76-wndsM}rd*U;x?-o-fxFn)Oc^`6eseAc3eZ`p zybv@cFGIr17AHO)cwa3VN3p2Pvfp5y$A_i_j9D?HEpVf-qNX|cDj?()3@FO#jvq`S+uHS~RV z;D00n_8xthjV*{J;Ip2|rjpx!vt_#??v#}El#+W+_{V^o?Q?WLe=e@a>>6zm>=l3& z=tcy8M?``4OV1i$ihvt`JMJ?)5?H%tJK*)VRw@F~il zp+Hgrd=EauUx+<2#6$M8H?%-|G5z@g9jtAg2+Xku073*F$ zG9eIY{=?l<%GpSUb@yFf;q0i^dNGKB>XLT42NX`lAZ3Eyc$b*&@vw@W2IXd};#y!R z+2T*0kis|lxn90vpOPHDvt-nzX-vy*YISfBXxauATC&#uI#>}&l!5TZe!2*%T?$NaXLD0O@o;P1^ zFBnhD^=5@evKmW5J-aR(VL4~RlJPqT`(#}&G(Fdd?btI~3dRtTN@2Luj)i;_8_@`L z=|yH=8oME-@GJA>0?AblBZ48ke{}JueWz4O*YnUy%kC#XLSQ0#A%qKj{I5?xJpQ*6 zYHR-W=?{Zce2&;(2FSf!UQR=(NHNhjC9!gaxNY_Q=3$uR6yaiRhF~151bDRfthA02 z5gj@^j+1Qnl@b9HYh3ke3{qG5A{W_cry(g9jNlmP7YoKe0`zN69kx zih&oLd>98u%VuPdzf#Sk-K&wBrrttmy!gU2W)O9TMpqUhLo#j=sW~8HaX9ppYws2f z|1qja-Nh(xP`YPTB?jOI(Ks*neHM&pydLX_%CqF@fEt6V@Rni;{ECv*SRr*${RH4u z`;g&2Va*h>CN{wJrvWIK(_jK2@PwM7x-Xs%sx39~0B={8jqMiX=Fby|aKr>rYl0wn z65)5<9>6g8Py%HJNm>+D+b%g;#WsGKx(ux$VaHux8V=ZhE<&J~J&}Y@HyRjv_6|)@ zTV@4qrj}OAE!!M0aYhA{XYBM=;f9)hqz=k{2UGOv*rJGG>PxgYjp9|QdqiG(E--1_!O$dBhOI?$a6lXB%TO{A><&%-+qAj~3ZF+tDVPNpUvi4* zN30oNw#NSKF)}u$r;QklyhNe7txs*0ODASN-Ae{8a=Yg2h_1yprx|U32AqplceYVa zr!3#N^D6ia&tit;L+qy(H*QeLWrw)FQf}BOX(Wz&oHG6%O7lMAEaCy0LS9Wdd%zLr zZvRe~oy)dqq1f8F`tF4nUMQ~y$&~lq>Sdt@KkcP#IH>Tb3q~K41WH_Ti;KL6JshG9 z8H!xOI?a_IGTADOFO(K0u!dCD(SNb8n+Q}1fH;}*_;KpZ)5JI8dhT=JQM%LKID!>f zZ=Bk`nw)H>iF4M$nEKNQR??!m8+|A5b>G)20#&}RJ{H*cN>>Qonm*;|MB)%0!j0ZFB>qYzS z0&Pb@?p3#oOLZvuhcQ#9eWZ>*UGf@^+lIZV?Sp!}Q0qYVd=) zGj{zkf>IO}*WPW64Hb&>$sjImzz*>&Qw;M^G56?a@ml$MPz{a_Bp>A-?c#Ib@3#Bu z&Jtq1c1=~oJfyY}T4e>-FU9kODDqd=rUA-w*+F2hLBHK(Ah<4fAr9trHLf9jwVF0* zengnjj+kB~a5e~arr;lX`c$3S64#_8{JO#SYmq=E8<`_O!-UR4ToBoaFRB5=u19X_ zUxyESoBT^kkl<{*WQWEC#4Gmil19P4c$jikk!`^U^Azafye4XMmH7~@#19gJWooZo zq&JKlKuBZG;WGew<0y{*Q#tvSy`XxObZz{^zIaUMocH3xYaD7a)Mr+1mu3Ix_shxe zV)a{WSt$0XyCGo{^@XaU;kw$+?=gGPUc-p^kIPdB3Mzm`R?z=i19`s-UTHDcA^@Xw zS5*Z>q}h*uN?U&Was&EHg~zH|g?)uqwtd{?79``EmX){z+&6Y&WK#URwzD+mA^t&^ z2zv{5A!|VX4(?%Q@y*ZW$1Wh&rHYm*?%ozFE#cA8R0@qZp(8 z!rW~iLtYzI$fvM9;ZvYm!nUdIUI~|0V?x#0+FMI~qj>ZVU!0|#artNogV7P#XC|eAwz?D_pfZcggJv}9@kI5Ph>3LaQWw^2F|(!Xf(dCFtE52i zx?Q4^Cy4ZT;E`gLB?ZGbyA(4);WK^%fqpj@lx++&zseFQ=CxhIPeilYsb&vsGyw<~ zQSvlarsD`OWi)EYGi#L5@4CP9O1b8C0LeqfGCG0-v8z}iF*Vzm?xmk(tr3mk95q6y z+?8tveDo#k^;kEg1|A=WJ+-B*y9QwTcWPJI*b2cJINON-Rc0m_AJq_3w7THf-UMNm z=b{sjlFhErj>!OOns{6+Xb;MS$JIis5V(oEOx1|0jR0}RS7iB3;(_`~;zp{n9~!Z6 zDs#gKVuRSF8@1!I^ER&k+xBd{P`S0~7^ljN+fA9`v3+8$aAbPHwtW)5hu>q$qIv5X2l3Alxi`_P+?4 zRhQr!UUZ3Jbj5G*9%D}IhprG%aSN(3v^{ZuRN=lbl{7|ALqp|r8QUz((+XxLIq#=q zqmKTkQ=6n_LUMaHrhH&rpZw02aQwdl+mEvV@9GskVtFiQ`{c=!H_Dnzg8z6@|6BBr z_&NZ>Mf<7NBzf`OD$pXH*7FvUv=%Df)Q0;`)S+wS&IlG$Tb#|z(i@aOLk#fi zMh*I>V%WJIfEYc48=93HKp{dP5ojH8F35j~$)l8KJPwyL&>1y_NJz)_N35s}lRwti zxHUFl^in$9El&L~NN`kXTM{I8{oW*tW&aMOCtY!kY)Ro5)S}Xehy!}pjHYoMtl{A%74TMvM zRN>4hB&CyFaX9T^p1u7#O$uTJ2{JkkU%4o3hWLeoFZ5*$5wJ;&yC$TK{B#pb2(m5k z9k$v9_=3ERk>EPQ)YzK4@!ko|lei*Jzi*$%TALxX7VlCjNMk#aT$t?9%ube#rv~tu zUWs#(>n@in(&@FOl=0FcLt=AV$GJVs3clizQeD0om#4zYNrvYC#_|}kp#DuEqLAOR zT3DQI>44@W=y4+ff%F+8f%++M_cpsbN1G|KN-7qE5qy01yRodgDHaFPP^L_}1K?hA z5m;qA9s ze){R|1^kWmWPs60@12;E^cSc%=6Wcud2|U?$ep!0>3tl+^X^j=EHP070leQ&zBD>l zI6dN#^aKG&_7+b#7FPtmlzhRI|M;1ZIUK}V-R{@+E3uI=!o_Q`6N%ZEZ(UPCR0dZSeoX)P7QF z=&%zTyTA>YV)3}04y9lM%BUyZZ5b7O+^|V5gTT&o(p>`1kOn%>eFe;5?}gI^(qY?& zJ3l@I5hez0+eZi_IH9DYUa|McPH3zxhov&9VpSc`ptovdAs*iq02Sd{Do6I|`w@Sqk{Q7dx3al>e@xP0>NrG` zDe2Qgwe_}sf<8eJ2zG=txgB93z2Yn_3v3we`DGA-MtYZ9JN zkF(sHxL~mK?1>$bB0f!_oZdYe$fsLCcRsy#L)wP0J=MG+y87>{exCxSzqtZcVbD?JDEOX_y#4Iq5w5_qM)rEq!+=(nXfH?GpP!VZc6d6iW^Fi#{?F|;m`0AxiN={!U_`_I!d@02esW!!qR(nF{2cj8bX(= zeas3Q$B@hjOs`}o0o#B#41p)?GhBsev>i{gyH@6Gj&J5>d97+oB(sb&j43O1$kSpe zd4pQRBZQ{jVN;}Xg@0Dwc+=i(q!)W3zKES21#qYu7%PZ6r6>k%;&t z)a}_S+@xe0dmx>Sehc9x28wvXC+QLdk3E4(C}m~ek@Z^cyKa*mN*!Puz`Mk3-*I+_ zQl<+z(?93Gzck51s&eax4N)?I>|ziwWe=5G6*w7^h^=Dp5CL+K{Jth3kU$z>s=;9t z6IOV*y_9%epq@I!5HFBzo8`;gOKf!%!u+#j-u$e4u4+=AhmGRzlhIR(O^$-4tF)}7z=7>nh3Z0R(dQqO z(;wPK&;`Al^bRa`MR#JWC*WcjFQm*v$eDmGb|fAtZ`tEc{<55YX@iHJs5ePxAMBM|Nr!d^ zun`rj_Zt>IM%|it71K){5PdsVczVq$^dgFDyoePHVl~V-dey={r3~8F2%`UuY{Qz!z8f^)m~$)T$_ygNoNMD7v@p$m8)n zX8#65O+1svq;^%(I*e!%p$`dx%e%udV5QUNIwnNqzXj+%494HrTzs+&%D6LFE1R0h|=^9I&cC&$r9vMf} z#ZjChUa*P5Gjjjpu@|RVRI2ZQ78U>~PY82W&?MhwakIk4}F<(M)?!<$9B&Ym+?rZ`fTiN!CNOUL?#VsKc3! z32z`Qcu_XtHh+vkb}7n-@B|zH#qLjSMldMJ2e6+S_;1lf@XWKaL7}I@21W@Gi3tkR z6Hk;kZNR`wpINc6hc-o&7o*DkI^kTUy0!ob99Db8w!n^ktr*6o%~K7exiB}~pkJkc zU9+*iu;K&&EG)h;@(Ep|km=23TNf$=y^+R)<9Iu%K|oVjp!?yykW+A&6z2q6%IaiP z#l!Yg6_Oqte-;Zr*Bc)a13vAF?wVwrXt;;JKmn7s4Ul89920liqP4cB?Tzl z{{DsE1A>SJlbjNj301+*F!^lD$SOaF0Hves&)Dq@Cpv-=$*4v30>#(O;k3gh zYMpCOK2PmAW{(-xLX+VmD$zjD=ArxKuZ$~j&Vt)I+@by+R?uM2&I$=!(ZN*=-VQWo zTF|-8h+!KwBHOI{?rxyw9H5^mwe&sy`vE`v)Xt}nwa`vVc%YYoYmMswd+RnN3}f-2 z!|bc3SRHsIV8OoqE^qkm>4&&rzmPBXYdb467J1HIE0?Mf-UC|!liN)3YWDHCG4#Vi05t~=R8wgx*pWhZ4j%=VP;(AlxE zNjKu6+4`|-L)c`T6*(NA2u z9ZXcLG<&2n&N&&fbA2(X0nf?GxijN^JdL4yLxbwga`+Vaj3?orP$fp^#pH}Dn9n4o zp?Zf9;Z2h(N14DZS+8iQcuzIUALWI6+RJPW~Eb4 zm`x!=%nOg8-?bIU*Wr7yY)$U$#1kQ14FX;~7^5YT(J2eS^|);(g>CS@QDDqy1+I_Y z-qCmOzs<^v3&|T0!-$dH!~+YqFTz%rrDu;qxrpl?KQ3YAIdDc> z1n%r%Sf;QB95&r3cWea86?3kM$}hRHg#OhGOLvw4gS-z%`zO_wlTU{gc@PVC4OzfI z$%+TRRz;&=|HmBL)sKyz!RNuhO^AqfD}S5#s0|lV<66r;PGv@V&4e{Ji5ZaS6e~)` z*`jUAcgnlT2IU}wI)zd=7}t_Q_979Q+$JV-^WpIih4_DX{9b6%nE`$~O5?Zh=Tx*n@vAJtywPhddP_XQ;PCttG=;RVfNnd`mJncoXXd@x!wcFbr z1zNjab4yG*lJFR+$r?)V9-`(;(mU&y>a2*9heY;Ie;vB4T79z7RYSxcBLWvT?cLZ` zrL~~!{8;0ue7~IjHYS2syDPII>^!;ghhuvL@tjVVODZzkITgdC;W^Wlwi(fx8p=EV z$ktA^8re!)ViJ3vWXZK zxQk6h>D)4gV!VR2Pm}@TYVT2CUoLd~yv*~W zE+9q-#Yv)eFkXAz*{(zW6M+(57&$92NS9_-7HxX8(*Tx2bKMI$6kIi^Mlxfw6>2S) ziaOQw4K>1)aJ~{iTe}^% z?MaK+CGMbfz%RL(Y`YaxrFDmnY(BR7c;n z?)LweLUk{dm*TycutUYTKL{`$uUr=X7M{Pu0C;z3z0X5_R1A31#xglwb>QTZM zMo)+$sEwL^$51Jmhf&CxO<6okRp??jui;$U5x^Ve1n>yIgZKLs7Pu2{BsMbiyNZAT zb$;Seqq5;9mAK&vYJgifuKb1=94?DW7a|n)1eq0aM`MvF+cb3RHAZG5!4#B232itj zU=HH6m-H1849(ulSD;76_sjUUuNeZw3+{wbNg~8J#RL`Rt@uK$5QUXUgUSW!gg?0o z?qE5BibaQEvdFY)sJqZ!D-^xW2^$Sk9HE0?@!L`;Xwtc5vps$AGP{(zYj7=zv84cC zd;@qsF=B~7(oW3&D)_wLjf1DdKIY?NCa&lQ1%d*OL| zM+GKQ7^RIl7WES^l?yM}e6$?gBe5Xs^uUM&5zKWUyiNi$7HYg!>N^^Solr^ld$BKA z!@{yW;e&RTtXa00@oZ|oQ5+a@?iI&0#eS}wd&edtJM6jhHbP?LV$Zlm1AKswK5%1M zg|u6a+ygt!+B28d%WAxEeR4wMCE!(_#)PfHJN%h7r7~TSU0RllUu$DhpJkk@w2JUb zi2s`wsZI|d>eRj4R0VVA4__~H^YOUa$>;;yqw1g!{7%6qVsy?-U4bH*UPL4j03u)T zld?;uiecs#&*4A$`1CiTP<-$B!&7f31UGpkRzVDh=2aL;vB9hCpNeF~?RujV%dR_``3NKePwX?$v|+fBMbxxHkzq z9Q0U@E*MBRgb2?a6wzMDjj4<+n-cOv0WZ2AnCx)mK(tYJhY6tFQUxzxQi=;=00SJ_ zP%FLIpZK?W;{QDUiH#N|d|+>1B*?=TO`xew}*d|fIxacS^XCpcJKnV7a?Z-XbsP2IWm|3EJ$Sqv6TIh~?xPF_g z%wDHMPOLI=N7h058HBT+AZkrj8JZR6v9)64WEyS^+@GUH4<0?0&74)h%^KX*nueGT z#&$>GMohCY-d3e7G^njGrC3Gngp(GB5|`L>rrvt1JRM7kG(-HJN}6oY>AvH70wb=x zY8$hJtL>LQ-W}N^dmCxW)a&dV>_E$t%C;|Yl}5l=h5ttstGChOqM$*<*s~SF;bP5b zsAQ8I^>5z?1@6*V#EG0_`FQi8(+97X1<$uzFAG=WQR^ypwCh_4g@nUv10%7qq&hM~ zF1K3gF^tVOzVXI4${XK^g*>2NZR~iPe3gBV7|P>dr|b|6xE^esxsB}6Q;MA8gV5Rb7cEZYbj;{;UE4geP1A;RH(e0Q zq}huF!eMtiXN-S&b}Hg|JLL6J17?(70lJG{hA|qSx=AZMk+Qb!uA9#g0)`IN!2L!! ze;(M=R;m&#o^l7STW(k*rJ2 z4$sp>E(pD&;RQ3rT)PH?_# z0Xf=Ko{&zm6Uq^!T;Z-4rtK9R;h!b;*&K~wBOiZ_;APsR()#A(cXBwQloeBOMToz!Wn)H78_aHkd zEz49;D^jabb;qBDde}jaH~@&wMs+{5h2U7jY_Xq~rYfu|_DDPUQMzNWaJkq41WYim zlUTTDUss2rS%+2dk^1QRj9Niv;Jmrn0US`aLJ}ZsO;FC*_|>yqXj{!U%j-@ylnt?z z@N;=Hm1_}S-%_n8JntCbYry{YuuGKgv3~=Yr8G?u3J`{bddOqRE%YGu0u?GHFs;re zE&%J53$JsgCrJf8?N4W6g84XGmq@1KMC;dgL(-X8R+@4okq@Xv`IX z0XAdkPA}N^C_R|&G!lQ5p=2wI3lV_vL6tmgR8wh9Gcw~@K#?!lm!V_?p$jE&+x3NN z&S2Nr*EoG27sonIAG|h6+A!7DAnO<8CLvEy0)Wl}gH}8x7m7OnoDESsExgGk2ZM@^ zLH1~nNuo1>s}cfpEk?J+^Rf4v{cT0H01x8PD!Y*K5=ia`lTUC42FD&8kpye<`7H%L z;{CV*a0TM$<!Pzx7P~8N7S}LFML`%$RE19LwM%N)8?kKRP=npkeN>gJ zOv*p6EZXMkzq7t$YQI|OB)W3POd?#)`^e5Yv zMQTR&<*plH6sUE&1tU?!Fjb6W6DUa)BUQcB*o-a2tR6iRiK~+H`1ER-OGWc4|4GFl z!XFXAs0Pa5^7@X_JGlc6{<{{e5NMtRXkNfK!zM16Em0mS`uRbi{)ZlzN6>)cfY+<<6u!lv>$ zlOashz8z1c(1sBFq{>J|Cn^xfuyvNK3S92C2GFs~Gx;IsLVb|)g`<#N&HgsZI^~A0 z*Nopv9I|s&wV?d~R9{k5kytE7o=L(1Wnpv(?Prn9ur&VO4ke~4gOA}r{-GX@;37LY zpxQb-x!Yb%D5=4LSQlOUYRej1gRyhRty3jby=a0)iL$G)UQLoQCf>*UUb91%TeR{a ze2ZqmosBUqiz1%sB4U0W;A_glwf86kEWXn1Q&F*#I0#ttd(eNB>~G7V)T>Ju>^taj z1>J{fbLrhkuPTwzb9v^b_zk&Y-8Q$~q+f?Wnk$s7m)M6|6~S%H(+79MYzB}~&4%>H zZWP`IQ|$t|**bH5ZPOjueiZcp0kt3Br9mr&er!R=5OFAilXc&w-QUALIt)k~JY-+V zKjafiADX18o8OBeD`fB03T_z_6gh8lz|pn#c){8S2`59#vs+LfeL2)TRUN5q+-P#T z*|lukppqAD5Cp$6V@qls05Jq{gR4}!-8WGg2g+)*hiwpquz%mDi9>A34Gu$u@90CS zlQcREwTYp%S!zQ8q7wFu)zX@^!JYgKnI`sTWvI(SayHM2x}yB1ylx-B41%U`7^Ok^ zz~_{}j=!_M*f8l>uoBOQJ;SkJEx8f{KdWg#2~ixvw39f+Ggn!hDc;V)5FntlV}QY0 zc_prN)oMj&calx8q4+9G9P8HR(%p5I0vUL(+S=SN_Y=Y4<(^=5{O8!M1AuICPi+C= zDBX5%j)WiJSCb+b)tpp&EAX2t1gA$;KcC6kCxkUBdSqjvS-Ia7IPJ+P#RKqwH=nv- zPY~T+G{^W^1f@a3$r{(!>CK!?AgFPmIoUxXCkuPrJ*sAzZ)Gv3d35Xz;-yrR_nCK#N{5uzr*OaDzL)uDfeK5d!y;?uJ_@Q{VOljq5Qhq7{M!G9<2sMdVkf zV+wDM3M@8*zW5j2`*EMZB&tLr%_{`0cNaeiUn=(U4)SrBus+}`P6|8}Tga3DUXDMo zA5I_KC}Vr4Kx-t+mO-#D&#IL=;*EIY`?}ZdQ=Il*Ri~U*dr++`)@(mz=H2-HPXQ3R+{usbbSEC(l2R?)FMXSZjHY}18byM*%qa}yhl+SRZNy{o-YF@JsoL@Os29l-e)FAoZClg{9IczS1!Ixs%Tp|zCEljw zsaV-vc)Xm=sV_DWv;E$7bcPbEb61E-e6M{E2p)ibycbG!i;liKlUf|65}(X`TF{;6 z;t4o!TsYIRwt7~3R;rr8F5D-U#NJCkT3TH=cvNjOKfO5- zM~(6J=|%6~+*0&*lrqf3%fsa}(XIXWF2>OX;M-!b6{fUlLg1Imn*3iZl+LrTk&wYT zWE5Q+B~_ag!b5gOiRm(gl`?e~k}|cFA3_R)B=9iBs7F}D%I>q}5M>7$4H4KR7KB7X zWOyr`u!m$z)FG6rJ!Og42W14t2N(aGYUtz&JX}tI$QCS z4H7rxmSnq1+*@u}-#ZZ{GAFcN9K)OcZ&$z+$`(p4U7M?#&M4i>+lVDVkI9)>WF4pF zD0dXzLG=|(?=%Pd5P=UpPxPtsSg6oSuA|}R2`y5b0SSnN7m$NRb21-Q$S0P*eUD@c z$!Q4B4cSWe3Hu5P&NdbMzVbE_D-KMR;o1jCtdM;h=Cw{t&s9688z)ypQU^lLmA5jHH{6HaM8-p4hEUXXO-)m{)~i zTWjHwn54c4s7ql5no@kEFE{n0IPY{PsG;5X#5Iuk4L7U!b-(n5j^i?C&Cu4ewe;)X0JAI&hQX8Sj;((s^94zDA=p-@ZPB_SbrVzZOz12b~ zUPEH)sCbtn(r^ZMW7HGq;jDw2t?tTFX)W1EFlv;G@^9_fKln$=5a+_~Yxo|qu(l{u-SC=5)Xm#4_uutQ4dwoJABkY%P(88pK?xkwUtiBj_S+6QS-A&D;DcmpkME^`n9|N{Lygvy6Q(-6F3sW0O#M zld70=DnPqzyI4cR%Vh_Fuo)n{$Cv>$G1TTTnT&g?C&{2pCJC_RUhXp(C!mfdY?KuS zX-l^UTZ@b9kzr-%UY!lCtSv*|PlU#n6>RR2@_L9z;}E7wpA-PfC;j^~?fmuZzY@Q& zk)FdHdpwS7quqp|9Pe0XnPVV56+b3=R^wx#F7}mgEEHkjx=-yFKemG=UosxKuvVJ! zLgIplJ--Rq9Q+nXfE1-~BrJXgumGIZ7rvszwNi&UZmVU>W~MK1#HK0ertCu-NX?r{ zhO}-awuuv9&sr`w?OE*tJR0k;C4!1{(9FGsq#>_jMPr`FJ234wN(x@6pG@-8c8Kq* zj6ukS!dY%63HVikAbdMpNgV0RHi6Rz-zF{@6Wril7EEVjULehB*?s`A8ywhEU=?4s zNw&+k?Vqogx8iARe67D@4^Z=g^*BQlj~5Kyezgh_xc1M<8fC?n6z5hsX(4YT zs@epuaq@Qp$KH!+)7yn_#0P-WFkGFBDnA~tPXtN#xj3G~4xTdTi zHV`d2OINn%5p?Wqq^=V%S`je18Nh0NXvABl3SBjNiz8rtW$7i@U z=U!O^fQHUO?!?ume@1$`A1`FnY+<}_|OfMQ^&|}DN zqB?M!=jnqh<;s=#NlLVq`{u%4DhIAh+WZwg}L~e zjgsbpj=N4WWYcX$+L(yofds+|#x4}b=upHej)cm##2OJDxDX?5qjOwgKrM%>$F9WR zmyw6WyKYo3)=SU8E9JVaAbfUwA+RQT`3%_&!0)*&F*G`FOFSu$=sAiaxW{=)V_X;4fHqudYmyHBJL z?2VN1MIG5xSDt*+F~r)OD4J?jA?Aq zPy{DeD zN=S%AjRl2Dq`(AjBw$RE%r+qn6~|3`&^o~D>$w$4u>x-C^zwZOF6@)6jWE+t;pB2r z=uo;m2EX4sf>wr8k-lFITe&>F^xH|He{dXJfcPw6xkMIIF;{Niz^_wIL7ID;40qKh zF$Kazb2b=mJ*cEpu3@>-GRPK2MM6iE-3OjdHtK*fP7DVW+~oEM=enf0$ZVgqN8XdW zX^?h)=-Y}_=+<|G3bJAenTgTt<*fix869M)VC?#n@LS)X749QiGdy>4|s{q<8l3zeFE;nK<%|i}BlovQB8|(Y0Ka1Jd zsEH2Vou1RCvN|uP&g8 zFxkDFbq)zwuWbM~1i`8}iX*f|owmL9gtGk~+KUt{r4~+1xlN=KYNkEyMkPnuPgIr% z60@I_N%`a>wz-mQ<~un%m(ILnB~AQG{!<&`83kR@3p98on_i1uQ8^k;2x#p4aQA8uk^_j#J*hK$s*pa%(#+QN&ODyi56Jm0e!K@pV2V=t=i)`j?q zRdW)6AVI2H#Z|fm91)Pm>4TTbf3k0?ThCr{J4MLujodYosrU;F#)qdru6E;Be{Nq~ zm!a!el$vVI+T3t@I@&Zh7({*c2#N|z5)V(P67C7sxZ~Sum*M@+YqjTH@9PN?-gi&( z>LiG3D|GVVSAU+Q3i8FS@l)`R-j|QW{aQyXxZH@m%7(=K|6ZLKo-HqdjlJb&DN z=#X5uZy>_e@|yi#Zbnkw`#l^XwqnnZg-$e`E^NXTRXF}U6eQP{I5SXqaJgL>vN?=2 z#Hp(^UH_{J&_}L^7PRFc>Kv&u2W6Qu60usO=3+>3L2ZWSpi?Pr~0Jxw5jJzOuDNgcTv&wjI(!V0qPh zBFCB;pezJe^)0?sF<%?BR#+qpTVhtb5X7_}882(;#0l=Q-J%Ub4XiVeRXnCdIrW2-`+L*|~#0 zhjY`V_!6l}HWxu9srZMSALKn&b+`ZaSP4r}!io?tR-k|x+rO(8VT$!zMf9RR7abe$ z^}N?KAgrr7ofp8CnD@2f1qWe!0vW|a7fX-AxUtS{GKFm(q-kVwl}a6^QVf)P2U<=~ z!dSkYYu-7^Oc@<$jVes`QlKFUM80k7K-7xWW4rB6H<1e8k4M#Gonn9g}>f{O1@Hr;+<1 zsgK%G$lgOmH;?`y$q2&_Y@O~CccA2^3wk9i=G<&}5Cad3Q;#wlM`2ogf+9ctB9tjK zhxgUGE>-DTYOT+&P&qur4c-m&mU6K1fhs(2JEFRo-IN^D*hDfb(ZW^=A8=IZGSkH3 zX1V*nJceu(nEyZj|IZL_P9NB&am{@oU-An(7nMB{lccT^TT{!Z+Byx%5i-%hX8xj_ zeri8rj)~C`s|<~4aF1o4swjwihM!6;jQHG94~tp)#Vxx{&ehhI?Z-8t)yZRAf^RTM z(GKM!3rQHaO-;z$UFq?VjLxKAV00GYwxhCDk~`FTfQjIz`?p;=o_wzye`x~(<%520 zi>BO%K`a`z4e_OEf0L97Hv&*06eM~{2gnM5pj5V+J5{MWOYUk4W>Q8 z7x1!dB^Ck3#n*Bx78$$B8ki(C%5uZz&XMP;b1IjZS*>o!WNnOv}cQ*R_06J$$t!I$Gr zgu44%&}9ih#1glOu~EC;QJ-aDAOMQVzJp~d6M~e@oUfu37E&TiN#b}`&(_A0Au@ei zefWYp@pt9;H(@bx!YlOu3FrMi8!xh@Op|E?fgE?XjWVtATD$?;$izkwP|WuDoJw2d zLM6)4N;$hNY!EW7?fYaD>Va`6@gaznC$hsLXE5GAkAsmdINI|Gw2M=|L%lvyJnZ>W z-RwZDzi8{iR^l7O3bVQ8sEVI#q+g~34s*PJsIKzK{iwyD@2BONy$N%EHs-UbwwgOB zBbuVNjL^nc(?O%033xYJPnpCvKKaF{fs7nN4Kv!r(ha>1kgiF>lIF83>+8fEFn97H`ovy#1gyz zMTEC9rG=TB`Cf6mZK+z_Kz2M(@fG}pFOzM-)w0-(Az)K1#?~uGt#W>B`Z9WtKctVF z{eldKg}K&p(~$_4@B188TfT{p^%Fbeq^+mGI1S;RiP0&FF7F7XW8;}^q4<~o%|Bs) ze66fkt5zVk7lZ$S=_X8)nkzf^FpIfSO;YIg{eqjB3Sj&Fd`P4%l#hy?ziJN#%2ywV zIFA!XN1)(J0<3JeP**a4Yoj%a=p|{0k*E^KS}euH@T|%BwsL6MgyPo+5;4!SIeqYY z`2!o4l%9O->;`s7_5uwKRX~z9@?l@84PdE~bl-&?n^3y0@BSW)H{Gvs)@uJrTNzo| zlrFQ)Ybyh1-2_F@UQHpf*YS%UdWv!-;ef9W@5%B|?3G|}$RWS%<9SAHDn{0=&K*!< zykQSyfHJE{=`1r(E4yvK;X zPm=ZUg$`0bN^FiZkK9&eKy5_spL~JOLzvwlGGL;0F~wTB?(w*7oKkHNo?;`=HdO&8 zqoJ*;FdMF1m&)>5qirKTc~Fjj9bdensai9qNv^zyaA!HIKg? zr|*!x^2=Ja{K7_~ZXJ@HV3D1B2Q&fz%IWNl6B`0fH4#)KKI{i-=?=k0Q@nXk5lUuZ z6O|bDyuJ*E5g3*73~iPo<4u)wXbsETwxrsiesAo|C!*ri)${up$Y9C~WqZE_0rlgp zI`&gb%2Fwx&JG2>W=Z~nma;m&LG4yjzMm*v7vY_X^-G1_P^MhLO3D!Z$;J=kmOaf^ zF*_1RcH=BfHP&KDXi#h~g3Vfz0%S7HCVJgb(M(==*5(4^EW^i=Z5;5#w}HirslV;* zk?_W8%(>~(d5L;V_Ao;F5fNJheM3RTva)5hYg-}#kIg3aQ{q|OBow{cW^IF5z%dpMz( zXbniHb1f4bEipDwHzu~QPqvkgX^0Q#FZfUd(REWx!chPrmE3tcw9^qCs0Fq=)z7yd zsP^^TbM`~^<8AnyJOa0$+RHJ}&MMT{h$}hwa5sfrH!2CEJNCdhE5_lo+k&0?TwxvU z*3bFxuk2Om6mCsVOhTNnd#KE5<2wHw^KkP9n3!S2{tCPcs9Zg>dgelou;OmpSZTs4r$_?{ zqK^=N5aGBqUPabnWW_HkAt!_CI4xJ*V_~(>BB`U^63{d*y33*LL^!cAk0I>-O>b56 z);n*Mcft}t?8qkIlF_0jmdbda(Q_dv))zvFD^X3uxO2zFuOhlhy}aHWP3yCOh{+E_RRCO2dFUsvVBKR&a!02?{yk|!)w1Me#~G#f2O(S$xhmhBuyj9*BSL-7 z8zEAi%^^ay5zeJ@k-gmPilDl31S7@9fW~s8s@7L_jjIHQm8Di_aYFdahq@+iPt@Ds zVoDVrP=W~C2dKW|2o(Dpv`+nD4UG-1F2$naBUm;t7;UVxy{*oe^!ucjcR(-!U%3y$M}UX|sL86b zR|>$!7+6Q7v!4!2o0Zz5K5NfiOt>6=QQRxIZuWK-4q-Xm^ScR5=E}2ST(Dv^L#BO@ zmM-2ozjCvdA`H>&Y}RU?j4eL(nlj%AU4&pM-{liKIJh&^zB4T}Tdvt|Tvghp{qXE_ zRR6ZcTf(LQ#!m`-VVO*vSxWBO=jHGushQL2^wT9s8K}+MnQW`(B7<$){B;0B`#0IL zKK4a*Weok)#_j6+@?L>K9`DD5u_=O8@t?SyHWfh<64t@bP3H$Q9@s6`i6qM?zI2~r z4B2km=g!XM`DvcVbqVp%hR_5q_ACH}I&*PT=vNnG+s@ieqAX(DKzcZh5%X=rM(2fn zkJ8MRrfd5)t8cqA$yScmMh%rai%H#JBT|bM`gs5&RMRKzmkO_NCE6CS&JOLi>5gYp ztcZ4qX}dGLjTVE}ag^(?Ze$31o<>J8@q-D}h0Sb7p|MqCmASAg+>W`$3BGD`BvMr4 z5Ah^?ZjPkxLEClVm7U~2-YAWAn-I*eizf2di~BOx2hkoeno@6s&;)6wEvmpw@Pd?N z0o7${qnAp1)*jcc0g=#3)Ulw3Rk|X8W=h|-ik3zs8vn$8L>Sej(0-IZ3`S*-R(@3{ z6?37g28*hDDn2n^7MASm6rxwqGWHIRD~2dq2s1cE92BdxtwBp^fpby`f#ETxUfe4q zD7u^iPFEbH;RtQe->_H1q!o+Jew5=*dDVDHn`@7tUi|$P9Rm|Ny7WByr2P$tM6uym zQN;mj_{&TwV4|ZQyGZuhVY=65DH#Zv-ESs}1k>0;I?E$k;^9Cuisd5_?MnQ_sY+f| zQ_&r8`xMrD8FRYhMx~sQ#Qc4uyz#opqGHt{*Gj58a;u~S%0E_?)cgEGr z<$8PFwt_2ikfm@L6*Nfh^Qv7nQzoS>G3tuNz{n;N{IIzlVZj;Z8>dSyxB{o~l?~O| zt-ZFAeqN*%if9lf8x*fUbO2c(ZFb{T$KYBhGKG!!d);uGp7L(`Nj}gBo{+UBo{Ce( zl#P$!os=Ay!JaQOT8U%|Hp6xc+g{;s+jXH^c{5Kwq~^!zPi!8|mDTc^Z5d|1bX$O4 zG|Ew4OFBTcb56KA=UysL#gXihtWpW_uKC)vp1vbHBLAeS7z$9*`xNCS=f*~XVfwi& z)L+Ddn+TIqwBoE!D#e8MsQl&2@eQ6@Hzknt>T8;gLgNioO%W+J-Ffca@Y?Q9-) z>SBfqr!pH<@24w|4xcU7GXuCmCP;-DE|h!nP*)^f72%`CjK! z&6}w;mcwcF0@p9bHfDzS*eKn+lQPsplZVDR$a-s~bI2${7Mhqc15wb=RX8`Fq zrmFZ(i<(@h{~noqat}lIz)~kh9t&H0WyT%RMSNMPZI5SwVblBC1}3HsG^qGG(!-J` zwb?H_`~U8Y&G5~y{ioQ^CQ!`bpzce8kJ{ryqLvBCfBg0G&3GYP6xg{Q1W;j;f`1

@@ -540,7 +583,7 @@ data; message = html`<${Markdownish} text=${template(text)} />` } - if(user) { + if (user) { return html`

${template(user)}: ${message}

` } else { return html`

${message}

` @@ -549,6 +592,7 @@ return html`
+ ${messages.flatMap(chatLine)}
`; }; @@ -567,7 +611,7 @@ const converter = new SchemaConverter( grammarJsonSchemaPropOrder.value .split(',') - .reduce((acc, cur, i) => ({...acc, [cur.trim()]: i}), {}) + .reduce((acc, cur, i) => ({ ...acc, [cur.trim()]: i }), {}) ) converter.visit(schema, '') params.value = { @@ -579,7 +623,7 @@ } } - const FloatField = ({label, max, min, name, step, value}) => { + const FloatField = ({ label, max, min, name, step, value }) => { return html`
@@ -589,7 +633,7 @@ ` }; - const IntField = ({label, max, min, name, value}) => { + const IntField = ({ label, max, min, name, value }) => { return html`
@@ -672,7 +716,7 @@ ${GrammarControl()} ` - ); + ); const CompletionConfigForm = () => ( html` @@ -694,20 +738,20 @@ ${session.value.type === 'chat' ? ChatConfigForm() : CompletionConfigForm()}
- ${IntField({label: "Predictions", max: 2048, min: -1, name: "n_predict", value: params.value.n_predict})} - ${FloatField({label: "Temperature", max: 1.5, min: 0.0, name: "temperature", step: 0.01, value: params.value.temperature})} - ${FloatField({label: "Penalize repeat sequence", max: 2.0, min: 0.0, name: "repeat_penalty", step: 0.01, value: params.value.repeat_penalty})} - ${IntField({label: "Consider N tokens for penalize", max: 2048, min: 0, name: "repeat_last_n", value: params.value.repeat_last_n})} - ${IntField({label: "Top-K sampling", max: 100, min: -1, name: "top_k", value: params.value.top_k})} - ${FloatField({label: "Top-P sampling", max: 1.0, min: 0.0, name: "top_p", step: 0.01, value: params.value.top_p})} + ${IntField({ label: "Predictions", max: 2048, min: -1, name: "n_predict", value: params.value.n_predict })} + ${FloatField({ label: "Temperature", max: 1.5, min: 0.0, name: "temperature", step: 0.01, value: params.value.temperature })} + ${FloatField({ label: "Penalize repeat sequence", max: 2.0, min: 0.0, name: "repeat_penalty", step: 0.01, value: params.value.repeat_penalty })} + ${IntField({ label: "Consider N tokens for penalize", max: 2048, min: 0, name: "repeat_last_n", value: params.value.repeat_last_n })} + ${IntField({ label: "Top-K sampling", max: 100, min: -1, name: "top_k", value: params.value.top_k })} + ${FloatField({ label: "Top-P sampling", max: 1.0, min: 0.0, name: "top_p", step: 0.01, value: params.value.top_p })}
More options
- ${FloatField({label: "TFS-Z", max: 1.0, min: 0.0, name: "tfs_z", step: 0.01, value: params.value.tfs_z})} - ${FloatField({label: "Typical P", max: 1.0, min: 0.0, name: "typical_p", step: 0.01, value: params.value.typical_p})} - ${FloatField({label: "Presence penalty", max: 1.0, min: 0.0, name: "presence_penalty", step: 0.01, value: params.value.presence_penalty})} - ${FloatField({label: "Frequency penalty", max: 1.0, min: 0.0, name: "frequency_penalty", step: 0.01, value: params.value.frequency_penalty})} + ${FloatField({ label: "TFS-Z", max: 1.0, min: 0.0, name: "tfs_z", step: 0.01, value: params.value.tfs_z })} + ${FloatField({ label: "Typical P", max: 1.0, min: 0.0, name: "typical_p", step: 0.01, value: params.value.typical_p })} + ${FloatField({ label: "Presence penalty", max: 1.0, min: 0.0, name: "presence_penalty", step: 0.01, value: params.value.presence_penalty })} + ${FloatField({ label: "Frequency penalty", max: 1.0, min: 0.0, name: "frequency_penalty", step: 0.01, value: params.value.frequency_penalty })}

@@ -716,11 +760,11 @@
- ${FloatField({label: "Mirostat tau", max: 10.0, min: 0.0, name: "mirostat_tau", step: 0.01, value: params.value.mirostat_tau})} - ${FloatField({label: "Mirostat eta", max: 1.0, min: 0.0, name: "mirostat_eta", step: 0.01, value: params.value.mirostat_eta})} + ${FloatField({ label: "Mirostat tau", max: 10.0, min: 0.0, name: "mirostat_tau", step: 0.01, value: params.value.mirostat_tau })} + ${FloatField({ label: "Mirostat eta", max: 1.0, min: 0.0, name: "mirostat_eta", step: 0.01, value: params.value.mirostat_eta })}
- ${IntField({label: "Show Probabilities", max: 10, min: 0, name: "n_probs", value: params.value.n_probs})} + ${IntField({ label: "Show Probabilities", max: 10, min: 0, name: "n_probs", value: params.value.n_probs })}
@@ -759,20 +803,20 @@ const popoverChildren = html`
${probs.map((p, index) => { - return html` + return html`
${p.tok_str}: ${Math.floor(p.prob * 100)}%
` - })} + })}
` @@ -851,9 +895,9 @@ ref=${popoverRef} class="popover-content" style=${{ - top: position.value.top, - left: position.value.left, - }} + top: position.value.top, + left: position.value.left, + }} > ${props.popoverChildren}
@@ -952,8 +996,11 @@ -
+
+ +
+ diff --git a/examples/server/server.cpp b/examples/server/server.cpp index b5ad3cc99..c3279dbc9 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1,6 +1,11 @@ #include "common.h" #include "llama.h" #include "build-info.h" +#include "grammar-parser.h" + +#include "../llava/clip.h" + +#include "stb_image.h" #ifndef NDEBUG // crash the server in debug mode, otherwise send an http 500 error @@ -17,12 +22,14 @@ #include "json-schema-to-grammar.mjs.hpp" #include +#include +#include +#include #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 #endif -using namespace httplib; using json = nlohmann::json; struct server_params @@ -34,6 +41,165 @@ struct server_params int32_t write_timeout = 600; }; +static bool server_verbose = false; + +#if SERVER_VERBOSE != 1 +#define LOG_VERBOSE(MSG, ...) +#else +#define LOG_VERBOSE(MSG, ...) \ + do \ + { \ + if (server_verbose) \ + { \ + server_log("VERBOSE", __func__, __LINE__, MSG, __VA_ARGS__); \ + } \ + } while (0) +#endif + +#define LOG_ERROR( MSG, ...) server_log("ERROR", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_WARNING(MSG, ...) server_log("WARNING", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) + +// +// base64 utils (TODO: move to common in the future) +// + +static const std::string base64_chars = + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789+/"; + +static inline bool is_base64(uint8_t c) +{ + return (isalnum(c) || (c == '+') || (c == '/')); +} + +static std::vector base64_decode(std::string const &encoded_string) +{ + int i = 0; + int j = 0; + int in_ = 0; + + int in_len = encoded_string.size(); + + uint8_t char_array_4[4]; + uint8_t char_array_3[3]; + + std::vector ret; + + while (in_len-- && (encoded_string[in_] != '=') && is_base64(encoded_string[in_])) + { + char_array_4[i++] = encoded_string[in_]; in_++; + if (i == 4) + { + for (i = 0; i <4; i++) + { + char_array_4[i] = base64_chars.find(char_array_4[i]); + } + + char_array_3[0] = ((char_array_4[0] ) << 2) + ((char_array_4[1] & 0x30) >> 4); + char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2); + char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3]; + + for (i = 0; (i < 3); i++) + { + ret.push_back(char_array_3[i]); + } + i = 0; + } + } + + if (i) + { + for (j = i; j <4; j++) + { + char_array_4[j] = 0; + } + + for (j = 0; j <4; j++) + { + char_array_4[j] = base64_chars.find(char_array_4[j]); + } + + char_array_3[0] = ((char_array_4[0] ) << 2) + ((char_array_4[1] & 0x30) >> 4); + char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2); + char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3]; + + for (j = 0; (j < i - 1); j++) + { + ret.push_back(char_array_3[j]); + } + } + + return ret; +} + +// +// parallel +// + +enum task_type { + COMPLETION_TASK, + CANCEL_TASK +}; + +struct task_server { + int id; + int target_id; + task_type type; + json data; + bool infill_mode = false; +}; + +struct task_result { + int id; + bool stop; + bool error; + json result_json; +}; + +// TODO: can become bool if we can't find use of more states +enum slot_state +{ + IDLE, + PROCESSING, +}; + +enum slot_command +{ + NONE, + LOAD_PROMPT, + RELEASE, +}; + +struct slot_params +{ + bool stream = true; + bool cache_prompt = false; // remember the prompt to avoid reprocessing all prompt + + uint32_t seed = -1; // RNG seed + int32_t n_keep = 0; // number of tokens to keep from initial prompt + int32_t n_predict = -1; // new tokens to predict + + std::vector antiprompt; + + json input_prefix; + json input_suffix; +}; + +struct slot_image +{ + int32_t id; + + bool request_encode_image = false; + float* image_embedding = nullptr; + int32_t image_tokens = 0; + + clip_image_u8 img_data; + + std::string prefix_prompt; // before of this image +}; + // completion token output with probabilities struct completion_token_output { @@ -45,6 +211,7 @@ struct completion_token_output std::vector probs; llama_token tok; + std::string text_to_send; }; static size_t common_part(const std::vector &a, const std::vector &b) @@ -89,6 +256,7 @@ static size_t find_partial_stop_string(const std::string &stop, return std::string::npos; } +// TODO: reuse llama_detokenize template static std::string tokens_to_str(llama_context *ctx, Iter begin, Iter end) { @@ -103,12 +271,13 @@ static std::string tokens_to_str(llama_context *ctx, Iter begin, Iter end) static void server_log(const char *level, const char *function, int line, const char *message, const nlohmann::ordered_json &extra) { - nlohmann::ordered_json log{ + nlohmann::ordered_json log + { {"timestamp", time(nullptr)}, - {"level", level}, - {"function", function}, - {"line", line}, - {"message", message}, + {"level", level}, + {"function", function}, + {"line", line}, + {"message", message}, }; if (!extra.empty()) @@ -138,7 +307,7 @@ static std::string tokens_to_output_formatted_string(const llama_context *ctx, c } // convert a vector of completion_token_output to json -static json probs_vector_to_json(const llama_context *ctx, const std::vector & probs) +static json probs_vector_to_json(const llama_context *ctx, const std::vector &probs) { json out = json::array(); for (const auto &prob : probs) @@ -147,76 +316,211 @@ static json probs_vector_to_json(const llama_context *ctx, const std::vector +static T json_value(const json &body, const std::string &key, const T &default_value) { - bool stream = false; - bool has_next_token = false; - std::string generated_text; - std::vector generated_token_probs; + // Fallback null to default value + return body.contains(key) && !body.at(key).is_null() + ? body.value(key, default_value) + : default_value; +} - size_t num_prompt_tokens = 0; - size_t num_tokens_predicted = 0; - size_t n_past = 0; - size_t n_remain = 0; +struct llama_client_slot +{ + int id; + int task_id = -1; + + struct slot_params params; + + slot_state state = IDLE; + slot_command command = NONE; + + // used to determine the slot that has been used the longest + int64_t t_last_used = -1; + + // generation props + int32_t n_ctx = 0; // context size per slot + int32_t n_past = 0; + int32_t n_decoded = 0; + int32_t n_remaining = -1; + int32_t i_batch = -1; + + int32_t num_prompt_tokens = 0; + int32_t num_prompt_tokens_processed = 0; + int32_t multibyte_pending = 0; json prompt; - std::vector embd; - - gpt_params params; - - llama_model *model = nullptr; - llama_context *ctx = nullptr; - llama_sampling_context *ctx_sampling = nullptr; - - int n_ctx; + std::string generated_text; + llama_token sampled; + std::vector cache_tokens; + std::vector generated_token_probs; + bool infill = false; + bool has_next_token = true; bool truncated = false; bool stopped_eos = false; bool stopped_word = false; bool stopped_limit = false; + std::string stopping_word; - int32_t multibyte_pending = 0; - std::mutex mutex; + // sampling + struct llama_sampling_params sparams; + llama_sampling_context *ctx_sampling = nullptr; - std::unique_lock lock() - { - return std::unique_lock(mutex); + // multimodal + std::vector images; + + // stats + size_t sent_count = 0; + size_t sent_token_probs_index = 0; + + int64_t t_start_process_prompt; + int64_t t_start_genereration; + + double t_prompt_processing; // ms + double t_token_generation; // ms + + void reset() { + num_prompt_tokens = 0; + generated_text = ""; + truncated = false; + stopped_eos = false; + stopped_word = false; + stopped_limit = false; + stopping_word = ""; + multibyte_pending = 0; + n_past = 0; + sent_count = 0; + sent_token_probs_index = 0; + infill = false; + + generated_token_probs.clear(); + + for (slot_image &img : images) + { + free(img.image_embedding); + delete[] img.img_data.data; + img.prefix_prompt = ""; + } + + images.clear(); + // llama_set_rng_seed(ctx, params.seed); in batched the seed matter??????? } + bool has_budget(gpt_params &global_params) { + n_remaining = -1; + if(params.n_predict != -1) + { + n_remaining = params.n_predict - n_decoded; + } + else if (global_params.n_predict != -1) + { + n_remaining = global_params.n_predict - n_decoded; + } + return n_remaining > 0 || n_remaining == -1; // no budget || limitless + } + + bool available() const { + return state == IDLE && command == NONE; + } + + bool is_processing() const { + return (state == IDLE && command == LOAD_PROMPT) || state == PROCESSING; + } + + void add_token_string(const completion_token_output &token) { + if (command == RELEASE) + { + return; + } + cache_tokens.push_back(token.tok); + generated_token_probs.push_back(token); + } + + void release() { + if (state == PROCESSING) + { + t_token_generation = (ggml_time_us() - t_start_genereration) / 1e3; + command = RELEASE; + } + } + + json get_formated_timings() { + return json + { + {"prompt_n", num_prompt_tokens_processed}, + {"prompt_ms", t_prompt_processing}, + {"prompt_per_token_ms", t_prompt_processing / num_prompt_tokens_processed}, + {"prompt_per_second", 1e3 / t_prompt_processing * num_prompt_tokens_processed}, + + {"predicted_n", n_decoded}, + {"predicted_ms", t_token_generation}, + {"predicted_per_token_ms", t_token_generation / n_decoded}, + {"predicted_per_second", 1e3 / t_token_generation * n_decoded}, + }; + } + + void print_timings() { + LOG_TEE("\n"); + LOG_TEE("%s: prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", + __func__, t_prompt_processing, num_prompt_tokens_processed, t_prompt_processing / num_prompt_tokens_processed, 1e3 / t_prompt_processing * num_prompt_tokens_processed); + LOG_TEE("%s: eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", + __func__, t_token_generation, n_decoded,t_token_generation / n_decoded, 1e3 / t_token_generation * n_decoded); + LOG_TEE("%s: total time = %10.2f ms\n", __func__, t_prompt_processing + t_token_generation); + } +}; + +struct llama_server_context +{ + llama_model *model = nullptr; + llama_context *ctx = nullptr; + + clip_ctx *clp_ctx = nullptr; + + gpt_params params; + + llama_batch batch; + + bool multimodal = false; + bool clean_kv_cache = true; + bool all_slots_are_idle = false; + + int32_t id_gen; + int32_t n_ctx; // total context for all clients / slots + + // system prompt + bool system_need_update = false; + + std::string system_prompt; + std::vector system_tokens; + + std::string name_user; // this should be the antiprompt + std::string name_assistant; + + // slots / clients + std::vector slots; + + std::vector queue_tasks; + std::vector queue_results; + std::mutex mutex_tasks; + std::mutex mutex_results; + ~llama_server_context() { if (ctx) @@ -231,46 +535,74 @@ struct llama_server_context } } - void rewind() - { - params.antiprompt.clear(); - params.sparams.grammar.clear(); - num_prompt_tokens = 0; - num_tokens_predicted = 0; - generated_text = ""; - generated_text.reserve(n_ctx); - generated_token_probs.clear(); - truncated = false; - stopped_eos = false; - stopped_word = false; - stopped_limit = false; - stopping_word = ""; - multibyte_pending = 0; - n_remain = 0; - n_past = 0; - params.sparams.n_prev = n_ctx; - } - - void initSampling() { - if (ctx_sampling != nullptr) { - llama_sampling_free(ctx_sampling); - } - ctx_sampling = llama_sampling_init(params.sparams); - } - - bool loadModel(const gpt_params ¶ms_) + bool load_model(const gpt_params ¶ms_) { params = params_; + if (!params.mmproj.empty()) { + multimodal = true; + LOG_TEE("Multi Modal Mode Enabled"); + clp_ctx = clip_model_load(params.mmproj.c_str(), /*verbosity=*/ 1); + if(clp_ctx == nullptr) { + LOG_ERROR("unable to load clip model", {{"model", params.mmproj}}); + return false; + } + + if (params.n_ctx < 2048) { // request larger context for the image embedding + params.n_ctx = 2048; + } + } + std::tie(model, ctx) = llama_init_from_gpt_params(params); if (model == nullptr) { - LOG_ERROR("unable to load model", {{"model", params_.model}}); + LOG_ERROR("unable to load model", {{"model", params.model}}); return false; } + + if (multimodal) { + const int n_embd_clip = clip_n_mmproj_embd(clp_ctx); + const int n_embd_llm = llama_n_embd(model); + if (n_embd_clip != n_embd_llm) { + LOG_TEE("%s: embedding dim of the multimodal projector (%d) is not equal to that of LLaMA (%d). Make sure that you use the correct mmproj file.\n", __func__, n_embd_clip, n_embd_llm); + llama_free(ctx); + llama_free_model(model); + return false; + } + } + n_ctx = llama_n_ctx(ctx); + return true; } + void initialize() { + id_gen = 0; + + // create slots + all_slots_are_idle = true; + + const int32_t n_ctx_slot = n_ctx / params.n_parallel; + + LOG_TEE("Available slots:\n"); + for (int i = 0; i < params.n_parallel; i++) + { + llama_client_slot slot; + + slot.id = i; + slot.n_ctx = n_ctx_slot; + slot.reset(); + + LOG_TEE(" -> Slot %i - max context: %i\n", slot.id, n_ctx_slot); + slots.push_back(slot); + } + + batch = llama_batch_init(n_ctx, 0, params.n_parallel); + + // empty system prompt + system_prompt = ""; + system_tokens.clear(); + } + std::vector tokenize(const json & json_prompt, bool add_bos) const { // If `add_bos` is true, we only add BOS, when json_prompt is a string, @@ -316,260 +648,301 @@ struct llama_server_context return prompt_tokens; } - void truncatePrompt(std::vector &prompt_tokens) { - const int n_left = n_ctx - params.n_keep; - const int n_block_size = n_left / 2; - const int erased_blocks = (prompt_tokens.size() - params.n_keep - n_block_size) / n_block_size; + llama_client_slot* get_slot(int id) { + int64_t t_last = ggml_time_us(); + llama_client_slot *last_used = nullptr; - // Keep n_keep tokens at start of prompt (at most n_ctx - 4) - std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + params.n_keep); + for (llama_client_slot & slot : slots) + { + if (slot.id == id && slot.available()) + { + return &slot; + } - new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + params.n_keep + erased_blocks * n_block_size, prompt_tokens.end()); + if (slot.available() && slot.t_last_used < t_last) + { + last_used = &slot; + t_last = slot.t_last_used; + } + } - LOG_VERBOSE("input truncated", { - {"n_ctx", n_ctx}, - {"n_keep", params.n_keep}, - {"n_left", n_left}, - {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, - {"num_prompt_tokens", new_tokens.size()} - }); - - truncated = true; - prompt_tokens = new_tokens; + return last_used; } - void loadInfill() - { - bool suff_rm_leading_spc = true; - if (params.input_suffix.find_first_of(' ') == 0 && params.input_suffix.size() > 1) { - params.input_suffix.erase(0, 1); - suff_rm_leading_spc = false; - } + bool launch_slot_with_data(llama_client_slot* &slot, json data) { + slot_params default_params; + llama_sampling_params default_sparams; - auto prefix_tokens = tokenize(params.input_prefix, false); - auto suffix_tokens = tokenize(params.input_suffix, false); - const int space_token = 29871; - if (suff_rm_leading_spc && suffix_tokens[0] == space_token) { - suffix_tokens.erase(suffix_tokens.begin()); - } - prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(ctx)); - prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(ctx)); // always add BOS - prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(ctx)); - prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); - prefix_tokens.push_back(llama_token_middle(ctx)); + slot->params.stream = json_value(data, "stream", false); + slot->params.cache_prompt = json_value(data, "cache_prompt", false); + slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); + slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); + slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); + slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); + slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); + slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); + slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); + slot->sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); + slot->sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); + slot->sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); + slot->sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); + slot->sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); + slot->sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); + slot->sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); + slot->params.n_keep = json_value(data, "n_keep", slot->params.n_keep); + slot->params.seed = json_value(data, "seed", default_params.seed); + slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); + slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); - auto prompt_tokens = prefix_tokens; - - num_prompt_tokens = prompt_tokens.size(); - - if (params.n_keep < 0) + // infill + if (data.count("input_prefix") != 0) { - params.n_keep = (int)num_prompt_tokens; + slot->params.input_prefix = data["input_prefix"]; } - params.n_keep = std::min(params.n_ctx - 4, params.n_keep); - - // if input prompt is too big, truncate like normal - if (num_prompt_tokens >= (size_t) n_ctx) + else { - truncatePrompt(prompt_tokens); - num_prompt_tokens = prompt_tokens.size(); - - GGML_ASSERT(num_prompt_tokens < (size_t)n_ctx); + slot->params.input_prefix = ""; } - // push the prompt into the sampling context (do not apply grammar) - for (auto & token : prompt_tokens) + if (data.count("input_suffix") != 0) { - llama_sampling_accept(ctx_sampling, ctx, token, false); + slot->params.input_suffix = data["input_suffix"]; } - - // compare the evaluated prompt with the new prompt - n_past = common_part(embd, prompt_tokens); - embd = prompt_tokens; - - if (n_past == num_prompt_tokens) + else { - // we have to evaluate at least 1 token to generate logits. - printf("we have to evaluate at least 1 token to generate logits\n"); - n_past--; + slot->params.input_suffix = ""; } - // since #3228 we now have to manually manage the KV cache - llama_kv_cache_seq_rm(ctx, 0, n_past, -1); - - LOG_VERBOSE("prompt ingested", { - {"n_past", n_past}, - {"cached", tokens_to_str(ctx, embd.cbegin(), embd.cbegin() + n_past)}, - {"to_eval", tokens_to_str(ctx, embd.cbegin() + n_past, embd.cend())}, - }); - - has_next_token = true; - } - void loadPrompt() - { - auto prompt_tokens = tokenize(prompt, true); // always add BOS - - num_prompt_tokens = prompt_tokens.size(); - - if (params.n_keep < 0) + if (data.count("prompt") != 0) { - params.n_keep = (int)num_prompt_tokens; + slot->prompt = data["prompt"]; } - params.n_keep = std::min(n_ctx - 4, params.n_keep); - - // if input prompt is too big, truncate like normal - if (num_prompt_tokens >= (size_t) n_ctx) + else { - truncatePrompt(prompt_tokens); - num_prompt_tokens = prompt_tokens.size(); - - GGML_ASSERT(num_prompt_tokens < (size_t)n_ctx); + slot->prompt = ""; } - // push the prompt into the sampling context (do not apply grammar) - for (auto & token : prompt_tokens) + slot->sparams.logit_bias.clear(); + + if (json_value(data, "ignore_eos", false)) { - llama_sampling_accept(ctx_sampling, ctx, token, false); + slot->sparams.logit_bias[llama_token_eos(ctx)] = -INFINITY; } - // compare the evaluated prompt with the new prompt - n_past = common_part(embd, prompt_tokens); - - embd = prompt_tokens; - if (n_past == num_prompt_tokens) + const auto &logit_bias = data.find("logit_bias"); + if (logit_bias != data.end() && logit_bias->is_array()) { - // we have to evaluate at least 1 token to generate logits. - n_past--; + const int n_vocab = llama_n_vocab(model); + for (const auto &el : *logit_bias) + { + if (el.is_array() && el.size() == 2 && el[0].is_number_integer()) + { + llama_token tok = el[0].get(); + if (tok >= 0 && tok < n_vocab) + { + if (el[1].is_number()) + { + slot->sparams.logit_bias[tok] = el[1].get(); + } + else if (el[1].is_boolean() && !el[1].get()) + { + slot->sparams.logit_bias[tok] = -INFINITY; + } + } + } + } } - // since #3228 we now have to manually manage the KV cache - llama_kv_cache_seq_rm(ctx, 0, n_past, -1); + slot->params.antiprompt.clear(); + const auto &stop = data.find("stop"); + if (stop != data.end() && stop->is_array()) + { + for (const auto &word : *stop) + { + if (!word.empty()) + { + slot->params.antiprompt.push_back(word); + } + } + } - LOG_VERBOSE("prompt ingested", { - {"n_past", n_past}, - {"cached", tokens_to_str(ctx, embd.cbegin(), embd.cbegin() + n_past)}, - {"to_eval", tokens_to_str(ctx, embd.cbegin() + n_past, embd.cend())}, - }); + if (multimodal) + { + const auto &images_data = data.find("image_data"); + if (images_data != data.end() && images_data->is_array()) + { + for (const auto &img : *images_data) + { + std::string data_b64 = img["data"].get(); + slot_image img_sl; + img_sl.id = img.count("id") != 0 ? img["id"].get() : slot->images.size(); + int width, height, channels; + std::vector image_buffer = base64_decode(data_b64); + data_b64.clear(); + auto data = stbi_load_from_memory(image_buffer.data(), image_buffer.size(), &width, &height, &channels, 3); + if (!data) { + LOG_TEE("slot %i - failed to load image [id: %i]\n", slot->id, img_sl.id); + return false; + } + LOG_TEE("slot %i - image loaded [id: %i] resolution (%i x %i)\n", slot->id, img_sl.id, width, height); + img_sl.img_data.nx = width; + img_sl.img_data.ny = height; + img_sl.img_data.size = width * height * 3; + img_sl.img_data.data = new uint8_t[width * height * 3](); + memcpy(img_sl.img_data.data, data, width * height * 3); + stbi_image_free(data); + img_sl.request_encode_image = true; + slot->images.push_back(img_sl); + } + // process prompt + // example: system prompt [img-102] user [img-103] describe [img-134] -> [{id: 102, prefix: 'system prompt '}, {id: 103, prefix: ' user '}, {id: 134, prefix: ' describe '}]} + if (slot->images.size() > 0 && !slot->prompt.is_array()) + { + std::string prompt = slot->prompt.get(); + size_t pos = 0, begin_prefix = 0; + std::string pattern = "[img-"; + while ((pos = prompt.find(pattern, pos)) != std::string::npos) { + size_t end_prefix = pos; + pos += pattern.length(); + size_t end_pos = prompt.find("]", pos); + if (end_pos != std::string::npos) + { + std::string image_id = prompt.substr(pos, end_pos - pos); + try + { + int img_id = std::stoi(image_id); + bool found = false; + for (slot_image &img : slot->images) + { + if (img.id == img_id) { + found = true; + img.prefix_prompt = prompt.substr(begin_prefix, end_prefix - begin_prefix); + begin_prefix = end_pos + 1; + break; + } + } + if (!found) { + LOG_TEE("ERROR: Image with id: %i, not found.\n", img_id); + slot->images.clear(); + return false; + } + } catch (const std::invalid_argument& e) { + LOG_TEE("Invalid image number id in prompt\n"); + slot->images.clear(); + return false; + } + } + } + slot->prompt = ""; + slot->params.input_suffix = prompt.substr(begin_prefix); + slot->params.cache_prompt = false; // multimodal doesn't support cache prompt + } + } + } - has_next_token = true; + if (slot->ctx_sampling != nullptr) + { + llama_sampling_free(slot->ctx_sampling); + } + slot->ctx_sampling = llama_sampling_init(slot->sparams); + slot->command = LOAD_PROMPT; + + all_slots_are_idle = false; + + LOG_TEE("slot %i is processing [task id: %i]\n", slot->id, slot->task_id); + + return true; } - void beginCompletion() - { - // number of tokens to keep when resetting context - n_remain = params.n_predict; - llama_set_rng_seed(ctx, params.seed); + void kv_cache_clear() { + // clear the entire KV cache + llama_kv_cache_tokens_rm(ctx, -1, -1); + clean_kv_cache = false; } - completion_token_output nextToken() - { - completion_token_output result; - result.tok = -1; + void update_system_prompt() { + system_tokens = ::llama_tokenize(ctx, system_prompt, true); - if (embd.size() >= (size_t)n_ctx) + llama_batch_clear(batch); + + kv_cache_clear(); + + for (int32_t i = 0; i < batch.n_tokens; ++i) { - // Shift context - - const int n_left = n_past - params.n_keep - 1; - const int n_discard = n_left/2; - - llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); - - for (size_t i = params.n_keep + 1 + n_discard; i < embd.size(); i++) - { - embd[i - n_discard] = embd[i]; - } - embd.resize(embd.size() - n_discard); - - n_past -= n_discard; - - truncated = true; - LOG_VERBOSE("input truncated", { - {"n_ctx", n_ctx}, - {"n_keep", params.n_keep}, - {"n_left", n_left}, - }); + llama_batch_add(batch, system_tokens[i], i, { 0 }, false); } - bool tg = true; - while (n_past < embd.size()) + if (llama_decode(ctx, batch) != 0) { - int n_eval = (int)embd.size() - n_past; - tg = n_eval == 1; - if (n_eval > params.n_batch) - { - n_eval = params.n_batch; - } - - if (llama_decode(ctx, llama_batch_get_one(&embd[n_past], n_eval, n_past, 0))) - { - LOG_ERROR("failed to eval", { - {"n_eval", n_eval}, - {"n_past", n_past}, - {"embd", tokens_to_str(ctx, embd.cbegin() + n_past, embd.cend())}, - }); - has_next_token = false; - return result; - } - n_past += n_eval; + LOG_TEE("%s: llama_decode() failed\n", __func__); + return; } - if (params.n_predict == 0) + // assign the system KV cache to all parallel sequences + for (int32_t i = 1; i < params.n_parallel; ++i) { - has_next_token = false; - result.tok = llama_token_eos(ctx); - return result; + llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); } - { - // out of user input, sample next token - result.tok = llama_sampling_sample(ctx_sampling, ctx, NULL); - - llama_token_data_array cur_p = { ctx_sampling->cur.data(), ctx_sampling->cur.size(), false }; - - const int32_t n_probs = params.sparams.n_probs; - if (params.sparams.temp <= 0 && n_probs > 0) - { - // For llama_sample_token_greedy we need to sort candidates - llama_sample_softmax(ctx, &cur_p); - } - - for (size_t i = 0; i < std::min(cur_p.size, (size_t)n_probs); ++i) - { - result.probs.push_back({cur_p.data[i].id, cur_p.data[i].p}); - } - - llama_sampling_accept(ctx_sampling, ctx, result.tok, true); - - if (tg) { - num_tokens_predicted++; - } - } - - // add it to the context - embd.push_back(result.tok); - // decrement remaining sampling budget - --n_remain; - - if (!embd.empty() && embd.back() == llama_token_eos(ctx)) - { - // stopping_word = llama_token_to_piece(ctx, embd.back()); - has_next_token = false; - stopped_eos = true; - LOG_VERBOSE("eos token found", {}); - return result; - } - - has_next_token = params.n_predict == -1 || n_remain != 0; - return result; + LOG_TEE("system prompt updated\n"); + system_need_update = false; } - size_t findStoppingStrings(const std::string &text, const size_t last_token_size, - const stop_type type) + void notify_system_prompt_changed() { + // release all slots + for (llama_client_slot &slot : slots) + { + slot.release(); + } + wait_all_are_idle(); + all_slots_are_idle = true; + + // wait until system prompt load + system_need_update = true; + while (system_need_update) + { + std::this_thread::sleep_for(std::chrono::milliseconds(5)); + } + // system prompt loaded, continue + } + + void process_system_prompt_data(const json &sys_props) { + system_prompt = sys_props.value("prompt", ""); + name_user = sys_props.value("anti_prompt", ""); + name_assistant = sys_props.value("assistant_name", ""); + + if (slots.size() > 0) + { + notify_system_prompt_changed(); + } + else + { + system_need_update = true; + } + } + + void wait_all_are_idle() { + bool wait = true; + while (wait) + { + wait = false; + for (auto &slot : slots) + { + if (!slot.available()) + { + wait = true; + break; + } + } + } + } + + static size_t find_stopping_strings(const std::string &text, const size_t last_token_size, + const stop_type type, llama_client_slot &slot) { size_t stop_pos = std::string::npos; - for (const std::string &word : params.antiprompt) + + for (const std::string &word : slot.params.antiprompt) { size_t pos; if (type == STOP_FULL) @@ -587,95 +960,803 @@ struct llama_server_context { if (type == STOP_FULL) { - stopping_word = word; - stopped_word = true; - has_next_token = false; + slot.stopped_word = true; + slot.stopping_word = word; + slot.has_next_token = false; } stop_pos = pos; + } } + return stop_pos; } - completion_token_output doCompletion() - { - auto token_with_probs = nextToken(); + bool process_token(completion_token_output &result, llama_client_slot &slot) { + // remember which tokens were sampled - used for repetition penalties during sampling + const std::string token_str = llama_token_to_piece(ctx, result.tok); + slot.sampled = result.tok; - const std::string token_text = token_with_probs.tok == -1 ? "" : llama_token_to_piece(ctx, token_with_probs.tok); - generated_text += token_text; + // search stop word and delete it + slot.generated_text += token_str; + slot.has_next_token = true; - if (params.sparams.n_probs > 0) + if (slot.multibyte_pending > 0) { - generated_token_probs.push_back(token_with_probs); + slot.multibyte_pending -= token_str.size(); } - - if (multibyte_pending > 0) + else if (token_str.size() == 1) { - multibyte_pending -= token_text.size(); - } - else if (token_text.size() == 1) - { - const char c = token_text[0]; + const char c = token_str[0]; // 2-byte characters: 110xxxxx 10xxxxxx if ((c & 0xE0) == 0xC0) { - multibyte_pending = 1; + slot.multibyte_pending = 1; // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx } else if ((c & 0xF0) == 0xE0) { - multibyte_pending = 2; + slot.multibyte_pending = 2; // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx } else if ((c & 0xF8) == 0xF0) { - multibyte_pending = 3; + slot.multibyte_pending = 3; } else { - multibyte_pending = 0; + slot.multibyte_pending = 0; } } - if (multibyte_pending > 0 && !has_next_token) + if (slot.multibyte_pending == 0) { - has_next_token = true; - n_remain++; + size_t pos = std::min(slot.sent_count, slot.generated_text.size()); + const std::string str_test = slot.generated_text.substr(pos); + bool is_stop_full = false; + size_t stop_pos = find_stopping_strings(str_test, token_str.size(), STOP_FULL, slot); + if (stop_pos != std::string::npos) + { + is_stop_full = true; + slot.generated_text.erase( + slot.generated_text.begin() + pos + stop_pos, + slot.generated_text.end()); + pos = std::min(slot.sent_count, slot.generated_text.size()); + } + else + { + is_stop_full = false; + stop_pos = find_stopping_strings(str_test, token_str.size(), STOP_PARTIAL, slot); + } + + // check if there is any token to predict + if (stop_pos == std::string::npos || (!slot.has_next_token && !is_stop_full && stop_pos > 0)) + { + // no send the stop word in the response + result.text_to_send = slot.generated_text.substr(pos, std::string::npos); + slot.sent_count += result.text_to_send.size(); + // add the token to slot queue and cache + } + slot.add_token_string(result); + if (slot.params.stream) + { + send_partial_response(slot, result); + } } - if (!has_next_token && n_remain == 0) + if (slot.multibyte_pending > 0 && !slot.has_next_token) { - stopped_limit = true; + slot.has_next_token = true; + } + + // check the limits + if (slot.n_decoded > 2 && slot.has_next_token && !slot.has_budget(params)) + { + slot.stopped_limit = true; + slot.has_next_token = false; + } + + if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(ctx)) + { + slot.stopped_eos = true; + slot.has_next_token = false; + LOG_VERBOSE("eos token found", {}); } LOG_VERBOSE("next token", { - {"token", token_with_probs.tok}, - {"token_text", tokens_to_output_formatted_string(ctx, token_with_probs.tok)}, - {"has_next_token", has_next_token}, - {"n_remain", n_remain}, - {"num_tokens_predicted", num_tokens_predicted}, - {"stopped_eos", stopped_eos}, - {"stopped_word", stopped_word}, - {"stopped_limit", stopped_limit}, - {"stopping_word", stopping_word}, + {"token", result.tok}, + {"token_text", tokens_to_output_formatted_string(ctx, result.tok)}, + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"num_tokens_predicted", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, }); - return token_with_probs; + return slot.has_next_token; // continue } - std::vector getEmbedding() + bool process_images(llama_client_slot &slot) const { - static const int n_embd = llama_n_embd(model); + for (slot_image &img : slot.images) + { + if (!img.request_encode_image) + { + continue; + } + clip_image_f32 img_res; + if (!clip_image_preprocess(clp_ctx, &img.img_data, &img_res, /*pad2square =*/ true)) + { + LOG_TEE("Error processing the given image"); + clip_free(clp_ctx); + return false; + } + img.image_tokens = clip_n_patches(clp_ctx); + img.image_embedding = (float *)malloc(clip_embd_nbytes(clp_ctx)); + if (!img.image_embedding) + { + LOG_TEE("Unable to allocate memory for image embeddings\n"); + clip_free(clp_ctx); + return false; + } + LOG_TEE("slot %i - encoding image [id: %i]\n", slot.id, img.id); + if (!clip_image_encode(clp_ctx, params.n_threads, &img_res, img.image_embedding)) + { + LOG_TEE("Unable to encode image\n"); + return false; + } + img.request_encode_image = false; + } + + return slot.images.size() > 0; + } + + void send_error(int id, std::string error) + { + std::lock_guard lock(mutex_results); + task_result res; + res.id = id; + res.error = true; + res.result_json = { { "content", error } }; + queue_results.push_back(res); + } + + json get_model_props() + { + return get_formated_generation(slots[0]); + } + + json get_formated_generation(llama_client_slot &slot) + { + const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(ctx)); + const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && + eos_bias->second < 0.0f && std::isinf(eos_bias->second); + return json { + {"n_ctx", slot.n_ctx}, + {"model", params.model_alias}, + {"seed", slot.params.seed}, + {"temp", slot.sparams.temp}, + {"top_k", slot.sparams.top_k}, + {"top_p", slot.sparams.top_p}, + {"tfs_z", slot.sparams.tfs_z}, + {"typical_p", slot.sparams.typical_p}, + {"repeat_last_n", slot.sparams.penalty_last_n}, + {"repeat_penalty", slot.sparams.penalty_repeat}, + {"presence_penalty", slot.sparams.penalty_present}, + {"frequency_penalty", slot.sparams.penalty_freq}, + {"mirostat", slot.sparams.mirostat}, + {"mirostat_tau", slot.sparams.mirostat_tau}, + {"mirostat_eta", slot.sparams.mirostat_eta}, + {"penalize_nl", slot.sparams.penalize_nl}, + {"stop", slot.params.antiprompt}, + {"n_predict", slot.params.n_predict}, + {"n_keep", params.n_keep}, + {"ignore_eos", ignore_eos}, + {"stream", slot.params.stream}, + {"logit_bias", slot.sparams.logit_bias}, + {"n_probs", slot.sparams.n_probs}, + {"grammar", slot.sparams.grammar}, + }; + } + + void send_partial_response(llama_client_slot &slot, completion_token_output tkn) + { + std::lock_guard lock(mutex_results); + task_result res; + res.id = slot.task_id; + res.error = false; + res.stop = false; + + res.result_json = json + { + {"content", tkn.text_to_send}, + {"stop", false}, + {"slot_id", slot.id}, + {"multimodal", multimodal} + }; + + if (slot.sparams.n_probs > 0) + { + std::vector probs_output = {}; + const std::vector to_send_toks = llama_tokenize(ctx, tkn.text_to_send, false); + size_t probs_pos = std::min(slot.sent_token_probs_index, slot.generated_token_probs.size()); + size_t probs_stop_pos = std::min(slot.sent_token_probs_index + to_send_toks.size(), slot.generated_token_probs.size()); + if (probs_pos < probs_stop_pos) + { + probs_output = std::vector(slot.generated_token_probs.begin() + probs_pos, slot.generated_token_probs.begin() + probs_stop_pos); + } + slot.sent_token_probs_index = probs_stop_pos; + res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs_output); + } + + queue_results.push_back(res); + } + + void send_final_response(llama_client_slot &slot) + { + std::lock_guard lock(mutex_results); + task_result res; + res.id = slot.task_id; + res.error = false; + res.stop = true; + + res.result_json = json + { + {"content", !slot.params.stream ? slot.generated_text : ""}, + {"slot_id", slot.id}, + {"stop", true}, + {"model", params.model_alias}, + {"tokens_predicted", slot.n_decoded}, + {"tokens_evaluated", slot.num_prompt_tokens}, + {"generation_settings", get_formated_generation(slot)}, + {"prompt", slot.prompt}, + {"truncated", slot.truncated}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + {"tokens_cached", slot.n_past}, + {"timings", slot.get_formated_timings()} + }; + + if (slot.sparams.n_probs > 0) + { + std::vector probs = {}; + if (!slot.params.stream && slot.stopped_word) + { + const std::vector stop_word_toks = llama_tokenize(ctx, slot.stopping_word, false); + probs = std::vector(slot.generated_token_probs.begin(), slot.generated_token_probs.end() - stop_word_toks.size()); + } + else + { + probs = std::vector( + slot.generated_token_probs.begin(), + slot.generated_token_probs.begin() + slot.sent_token_probs_index); + } + res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs); + } + + queue_results.push_back(res); + } + + void send_embedding(llama_client_slot &slot) + { + std::lock_guard lock(mutex_results); + task_result res; + res.id = slot.task_id; + res.error = false; + res.stop = true; + + const int n_embd = llama_n_embd(model); if (!params.embedding) { LOG_WARNING("embedding disabled", { {"params.embedding", params.embedding}, }); - return std::vector(n_embd, 0.0f); + res.result_json = json + { + {"embedding", std::vector(n_embd, 0.0f)}, + }; } - const float *data = llama_get_embeddings(ctx); - std::vector embedding(data, data + n_embd); - return embedding; + else + { + const float *data = llama_get_embeddings(ctx); + std::vector embedding(data, data + n_embd); + res.result_json = json + { + {"embedding", embedding }, + }; + } + queue_results.push_back(res); + } + + int request_completion(json data, bool infill) + { + std::lock_guard lock(mutex_tasks); + task_server task; + task.id = id_gen++; + task.data = data; + task.infill_mode = infill; + task.type = COMPLETION_TASK; + queue_tasks.push_back(task); + return task.id; + } + + task_result next_result(int task_id) + { + while (true) + { + std::this_thread::sleep_for(std::chrono::microseconds(5)); + std::lock_guard lock(mutex_results); + + if (queue_results.empty()) + { + continue; + } + + for (int i = 0; i < (int) queue_results.size(); i++) + { + if (queue_results[i].id == task_id) + { + task_result res = queue_results[i]; + queue_results.erase(queue_results.begin() + i); + return res; + } + } + } + + // never reached + //return task_result{-1, false, false, {}}; + } + + // for multiple images processing + bool ingest_images(llama_client_slot &slot, int n_batch) + { + int image_idx = 0; + + while (image_idx < (int) slot.images.size()) + { + slot_image &img = slot.images[image_idx]; + + // process prefix prompt + for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) + { + const int32_t n_tokens = std::min(n_batch, (int32_t) (batch.n_tokens - i)); + llama_batch batch_view = { + n_tokens, + batch.token + i, + nullptr, + batch.pos + i, + batch.n_seq_id + i, + batch.seq_id + i, + batch.logits + i, + 0, 0, 0, // unused + }; + if (llama_decode(ctx, batch_view)) + { + LOG_TEE("%s : failed to eval\n", __func__); + return false; + } + } + + // process image with llm + for (int i = 0; i < img.image_tokens; i += n_batch) + { + int n_eval = img.image_tokens - i; + if (n_eval > n_batch) + { + n_eval = n_batch; + } + + const int n_embd = llama_n_embd(model); + llama_batch batch_img = { n_eval, nullptr, (img.image_embedding + i * n_embd), nullptr, nullptr, nullptr, nullptr, slot.n_past, 1, 0, }; + if (llama_decode(ctx, batch_img)) + { + LOG_TEE("%s : failed to eval image\n", __func__); + return false; + } + slot.n_past += n_eval; + } + image_idx++; + + llama_batch_clear(batch); + + // append prefix of next image + const auto json_prompt = (image_idx >= (int) slot.images.size()) ? + slot.params.input_suffix : // no more images, then process suffix prompt + (json)(slot.images[image_idx].prefix_prompt); + + std::vector append_tokens = tokenize(json_prompt, false); // has next image + for (int i = 0; i < (int) append_tokens.size(); ++i) + { + llama_batch_add(batch, append_tokens[i], slot.n_past, { slot.id }, true); + slot.n_past += 1; + } + } + + return true; + } + + void request_cancel(int task_id) + { + std::lock_guard lock(mutex_tasks); + task_server task; + task.id = id_gen++; + task.type = CANCEL_TASK; + task.target_id = task_id; + queue_tasks.push_back(task); + } + + void process_tasks() + { + std::lock_guard lock(mutex_tasks); + while (!queue_tasks.empty()) + { + task_server task = queue_tasks.front(); + queue_tasks.erase(queue_tasks.begin()); + switch (task.type) + { + case COMPLETION_TASK: { + llama_client_slot *slot = get_slot(json_value(task.data, "slot_id", -1)); + if (slot == nullptr) + { + LOG_TEE("slot unavailable\n"); + // send error result + send_error(task.id, "slot unavaliable"); + return; + } + + if (task.data.contains("system_prompt")) + { + process_system_prompt_data(task.data["system_prompt"]); + } + + slot->reset(); + + slot->infill = task.infill_mode; + slot->task_id = task.id; + + if (!launch_slot_with_data(slot, task.data)) + { + // send error result + send_error(task.id, "internal_error"); + break; + } + } break; + case CANCEL_TASK: { // release slot linked with the task id + for (auto & slot : slots) + { + if (slot.task_id == task.target_id) + { + slot.release(); + break; + } + } + } break; + } + } + } + + bool update_slots() { + // attend tasks + process_tasks(); + + // update the system prompt wait until all slots are idle state + if (system_need_update) + { + LOG_TEE("updating system prompt\n"); + update_system_prompt(); + } + + llama_batch_clear(batch); + + if (all_slots_are_idle) + { + if (system_prompt.empty() && clean_kv_cache) + { + LOG_TEE("all slots are idle and system prompt is empty, clear the KV cache\n"); + kv_cache_clear(); + } + // avoid 100% usage of cpu all time + std::this_thread::sleep_for(std::chrono::milliseconds(5)); + } + + for (llama_client_slot &slot : slots) + { + if (slot.is_processing() && slot.cache_tokens.size() >= (size_t) slot.n_ctx) + { + // Shift context + const int n_left = slot.n_past - slot.params.n_keep - 1; + const int n_discard = n_left / 2; + + LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, slot.params.n_keep, n_left, n_discard); + llama_kv_cache_seq_rm (ctx, slot.id, slot.params.n_keep + 1 , slot.params.n_keep + n_discard + 1); + llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, slot.n_past, -n_discard); + + for (size_t i = slot.params.n_keep + 1 + n_discard; i < slot.cache_tokens.size(); i++) + { + slot.cache_tokens[i - n_discard] = slot.cache_tokens[i]; + } + + slot.cache_tokens.resize(slot.cache_tokens.size() - n_discard); + + slot.n_past -= n_discard; + + slot.truncated = true; + + LOG_VERBOSE("context shift", { + {"n_ctx", n_ctx}, + {"n_keep", params.n_keep}, + {"n_left", n_left}, + }); + } + } + + // decode any currently ongoing sequences + for (auto & slot : slots) + { + // release the slot + if (slot.state == PROCESSING && slot.command == RELEASE) + { + slot.state = IDLE; + slot.command = NONE; + slot.t_last_used = ggml_time_us(); + + LOG_TEE("slot %d released (%d tokens in cache)\n", slot.id, (int) slot.cache_tokens.size()); + + continue; + } + + if (slot.state == IDLE || slot.command == RELEASE) + { + continue; + } + + slot.i_batch = batch.n_tokens; + + llama_batch_add(batch, slot.sampled, system_tokens.size() + slot.n_past, { slot.id }, true); + + slot.n_decoded += 1; + slot.n_past += 1; + } + + // process in chunks of params.n_batch + int32_t n_batch = params.n_batch; + + // assign workload to the slots + if (params.cont_batching || batch.n_tokens == 0) + { + for (auto & slot : slots) + { + // need process the prompt + if (slot.state == IDLE && slot.command == LOAD_PROMPT) + { + slot.state = PROCESSING; + slot.command = NONE; + std::vector prompt_tokens; + slot.t_start_process_prompt = ggml_time_us(); + slot.t_start_genereration = 0; + + if (slot.infill) + { + bool suff_rm_leading_spc = true; + if (params.input_suffix.find_first_of(' ') == 0 && params.input_suffix.size() > 1) + { + params.input_suffix.erase(0, 1); + suff_rm_leading_spc = false; + } + auto prefix_tokens = tokenize(slot.params.input_prefix, false); + auto suffix_tokens = tokenize(slot.params.input_suffix, false); + + const int space_token = 29871; // TODO: this should not be hardcoded + if (suff_rm_leading_spc && !suffix_tokens.empty() && suffix_tokens[0] == space_token) { + suffix_tokens.erase(suffix_tokens.begin()); + } + + prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(ctx)); + prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(ctx)); // always add BOS + prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(ctx)); + prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); + prefix_tokens.push_back(llama_token_middle(ctx)); + prompt_tokens = prefix_tokens; + } + else + { + prompt_tokens = tokenize(slot.prompt, system_prompt.empty()); // add BOS if there isn't system prompt + } + + slot.num_prompt_tokens = prompt_tokens.size(); + + if (!slot.params.cache_prompt) + { + llama_sampling_reset(slot.ctx_sampling); + + slot.n_past = 0; + slot.num_prompt_tokens_processed = slot.num_prompt_tokens; + } + else + { + if (slot.params.n_keep < 0) + { + slot.params.n_keep = slot.num_prompt_tokens; + } + slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); + + // if input prompt is too big, truncate it + if (slot.num_prompt_tokens >= slot.n_ctx) + { + const int n_left = slot.n_ctx - slot.params.n_keep; + const int n_block_size = n_left / 2; + const int erased_blocks = (slot.num_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; + + std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + slot.params.n_keep); + new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, prompt_tokens.end()); + + LOG_VERBOSE("input truncated", { + {"n_ctx", slot.n_ctx}, + {"n_keep", slot.params.n_keep}, + {"n_left", n_left}, + {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, + }); + slot.truncated = true; + prompt_tokens = new_tokens; + + slot.num_prompt_tokens = prompt_tokens.size(); + GGML_ASSERT(slot.num_prompt_tokens < slot.n_ctx); + } + + // push the prompt into the sampling context (do not apply grammar) + for (auto &token : prompt_tokens) + { + llama_sampling_accept(slot.ctx_sampling, ctx, token, false); + } + + slot.n_past = common_part(slot.cache_tokens, prompt_tokens); + slot.num_prompt_tokens_processed = slot.num_prompt_tokens - slot.n_past; + + LOG_TEE("slot %d : in cache: %i tokens | to process: %i tokens\n", slot.id, slot.n_past, slot.num_prompt_tokens_processed); + } + + LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); + + llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); + + slot.cache_tokens = prompt_tokens; + + if (slot.n_past == slot.num_prompt_tokens) + { + // we have to evaluate at least 1 token to generate logits. + LOG_TEE("slot %d : we have to evaluate at least 1 token to generate logits\n", slot.id); + slot.n_past--; + } + + LOG_VERBOSE("prompt ingested", { + {"n_past", slot.n_past}, + {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, + {"to_eval", tokens_to_str(ctx, slot.cache_tokens.cbegin() + slot.n_past, slot.cache_tokens.cend())}, + }); + + const bool has_images = process_images(slot); + + // process the prefix of first image + std::vector prefix_tokens = has_images ? tokenize(slot.images[0].prefix_prompt, true) : prompt_tokens; + for (; slot.n_past < (int) prefix_tokens.size(); ++slot.n_past) + { + llama_batch_add(batch, prefix_tokens[slot.n_past], system_tokens.size() + slot.n_past, { slot.id }, false); + } + + if (has_images && !ingest_images(slot, n_batch)) + { + LOG_TEE("failed processing images\n"); + return false; + } + + // extract the logits only for the last token + if (batch.n_tokens > 0) + { + batch.logits[batch.n_tokens - 1] = true; + } + + slot.n_decoded = 0; + slot.i_batch = batch.n_tokens - 1; + } + } + } + + if (batch.n_tokens == 0) + { + all_slots_are_idle = true; + return true; + } + + for (int32_t i = 0; i < (int32_t) batch.n_tokens; i += n_batch) + { + const int32_t n_tokens = std::min(n_batch, (int32_t) (batch.n_tokens - i)); + llama_batch batch_view = + { + n_tokens, + batch.token + i, + nullptr, + batch.pos + i, + batch.n_seq_id + i, + batch.seq_id + i, + batch.logits + i, + 0, 0, 0, // unused + }; + + const int ret = llama_decode(ctx, batch_view); + if (ret != 0) + { + if (n_batch == 1 || ret < 0) + { + // if you get here, it means the KV cache is full - try increasing it via the context size + LOG_TEE("%s : failed to decode the batch, n_batch = %d, ret = %d\n", __func__, n_batch, ret); + return false; + } + + LOG_TEE("%s : failed to find free space in the KV cache, retrying with smaller n_batch = %d\n", __func__, n_batch / 2); + + // retry with half the batch size to try to find a free slot in the KV cache + n_batch /= 2; + i -= n_batch; + continue; + } + + for (auto & slot : slots) + { + if (slot.i_batch < (int) i || slot.i_batch >= (int) (i + n_tokens)) + { + continue; + } + + // prompt evaluated for embedding + if (params.embedding) + { + send_embedding(slot); + slot.release(); + slot.i_batch = -1; + return true; + } + + completion_token_output result; + const llama_token id = llama_sampling_sample(slot.ctx_sampling, ctx, NULL, slot.i_batch - i); + + llama_sampling_accept(slot.ctx_sampling, ctx, id, true); + + if (slot.n_decoded == 1) + { + slot.t_start_genereration = ggml_time_us(); + slot.t_prompt_processing = (slot.t_start_genereration - slot.t_start_process_prompt) / 1e3; + } + + llama_token_data_array cur_p = { slot.ctx_sampling->cur.data(), slot.ctx_sampling->cur.size(), false }; + result.tok = id; + + const int32_t n_probs = slot.sparams.n_probs; + if (slot.sparams.temp <= 0 && n_probs > 0) + { + // for llama_sample_token_greedy we need to sort candidates + llama_sample_softmax(ctx, &cur_p); + } + + for (size_t i = 0; i < std::min(cur_p.size, (size_t)n_probs); ++i) + { + result.probs.push_back({cur_p.data[i].id, cur_p.data[i].p}); + } + + if (!process_token(result, slot)) + { + slot.release(); + send_final_response(slot); + slot.print_timings(); + } + + slot.i_batch = -1; + } + } + return true; } }; @@ -685,16 +1766,15 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf("usage: %s [options]\n", argv0); printf("\n"); printf("options:\n"); - printf(" -h, --help show this help message and exit\n"); - printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); - printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); - printf(" -tb N, --threads-batch N number of threads to use during batch and prompt processing (default: same as --threads)\n"); - printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); - printf(" --rope-freq-base N RoPE base frequency (default: loaded from model)\n"); - printf(" --rope-freq-scale N RoPE frequency scaling factor (default: loaded from model)\n"); - printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); - printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); + printf(" -h, --help show this help message and exit\n"); + printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); + printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); + printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); + printf(" --rope-freq-base N RoPE base frequency (default: loaded from model)\n"); + printf(" --rope-freq-scale N RoPE frequency scaling factor (default: loaded from model)\n"); + printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); + printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); + printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); if (llama_mlock_supported()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); @@ -725,11 +1805,16 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); + printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); + printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); + printf(" -spf FNAME, --system-prompt-file FNAME\n"); + printf(" Set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); + printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf("\n"); } static void server_params_parse(int argc, char **argv, server_params &sparams, - gpt_params ¶ms) + gpt_params ¶ms, llama_server_context& llama) { gpt_params default_params; server_params default_sparams; @@ -839,15 +1924,6 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.n_threads = std::stoi(argv[i]); } - else if (arg == "--threads-batch" || arg == "-tb") - { - if (++i >= argc) - { - invalid_param = true; - break; - } - params.n_threads_batch = std::stoi(argv[i]); - } else if (arg == "-b" || arg == "--batch-size") { if (++i >= argc) @@ -984,6 +2060,56 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, { params.embedding = true; } + else if (arg == "-cb" || arg == "--cont-batching") + { + params.cont_batching = true; + } + else if (arg == "-np" || arg == "--parallel") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + params.n_parallel = std::stoi(argv[i]); + } else if (arg == "-n" || arg == "--n-predict") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + params.n_predict = std::stoi(argv[i]); + } else if (arg == "-spf" || arg == "--system-prompt-file") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + std::ifstream file(argv[i]); + if (!file) { + fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); + invalid_param = true; + break; + } + std::string systm_content; + std::copy( + std::istreambuf_iterator(file), + std::istreambuf_iterator(), + std::back_inserter(systm_content) + ); + llama.process_system_prompt_data(json::parse(systm_content)); + } + else if(arg == "--mmproj") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + params.mmproj = argv[i]; + } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); @@ -1000,102 +2126,18 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } -static json format_generation_settings(llama_server_context &llama) -{ - const auto & sparams = llama.params.sparams; - const auto eos_bias = sparams.logit_bias.find(llama_token_eos(llama.ctx)); - const bool ignore_eos = eos_bias != sparams.logit_bias.end() && - eos_bias->second < 0.0f && std::isinf(eos_bias->second); - - return json{ - {"n_ctx", llama.n_ctx}, - {"model", llama.params.model_alias}, - {"seed", llama.params.seed}, - {"temp", sparams.temp}, - {"top_k", sparams.top_k}, - {"top_p", sparams.top_p}, - {"tfs_z", sparams.tfs_z}, - {"typical_p", sparams.typical_p}, - {"repeat_last_n", sparams.penalty_last_n}, - {"repeat_penalty", sparams.penalty_repeat}, - {"frequency_penalty", sparams.penalty_freq}, - {"presence_penalty", sparams.penalty_present}, - {"mirostat", sparams.mirostat}, - {"mirostat_tau", sparams.mirostat_tau}, - {"mirostat_eta", sparams.mirostat_eta}, - {"penalize_nl", sparams.penalize_nl}, - {"stop", llama.params.antiprompt}, - {"n_predict", llama.params.n_predict}, - {"n_keep", llama.params.n_keep}, - {"ignore_eos", ignore_eos}, - {"stream", llama.stream}, - {"logit_bias", sparams.logit_bias}, - {"n_probs", sparams.n_probs}, - {"grammar", llama.params.sparams.grammar}, - }; -} - -static json format_embedding_response(llama_server_context &llama) -{ - return json{ - {"embedding", llama.getEmbedding()}, - }; -} - -static json format_timings(llama_server_context &llama) -{ - const auto timings = llama_get_timings(llama.ctx); - - return json{ - {"prompt_n", timings.n_p_eval}, - {"prompt_ms", timings.t_p_eval_ms}, - {"prompt_per_token_ms", timings.t_p_eval_ms / timings.n_p_eval}, - {"prompt_per_second", 1e3 / timings.t_p_eval_ms * timings.n_p_eval}, - - {"predicted_n", timings.n_eval}, - {"predicted_ms", timings.t_eval_ms}, - {"predicted_per_token_ms", timings.t_eval_ms / timings.n_eval}, - {"predicted_per_second", 1e3 / timings.t_eval_ms * timings.n_eval}, - }; -} - -static json format_final_response(llama_server_context &llama, const std::string &content, const std::vector &probs) -{ - - json res = json{ - {"content", content}, - {"stop", true}, - {"model", llama.params.model_alias}, - {"tokens_predicted", llama.num_tokens_predicted}, - {"tokens_evaluated", llama.num_prompt_tokens}, - {"generation_settings", format_generation_settings(llama)}, - {"prompt", llama.prompt}, - {"truncated", llama.truncated}, - {"stopped_eos", llama.stopped_eos}, - {"stopped_word", llama.stopped_word}, - {"stopped_limit", llama.stopped_limit}, - {"stopping_word", llama.stopping_word}, - {"tokens_cached", llama.n_past}, - {"timings", format_timings(llama)}, - }; - - if (llama.params.sparams.n_probs > 0) - { - res["completion_probabilities"] = probs_vector_to_json(llama.ctx, probs); - } - - return res; -} - static json format_partial_response( - llama_server_context &llama, const std::string &content, const std::vector &probs + llama_server_context &llama, llama_client_slot *slot, const std::string &content, const std::vector &probs ) { - json res = json{ - {"content", content}, - {"stop", false}, + json res = json + { + {"content", content }, + {"stop", false}, + {"slot_id", slot->id }, + {"multimodal", llama.multimodal } }; - if (llama.params.sparams.n_probs > 0) + if (slot->sparams.n_probs > 0) { res["completion_probabilities"] = probs_vector_to_json(llama.ctx, probs); } @@ -1115,120 +2157,8 @@ static json format_detokenized_response(std::string content) {"content", content}}; } -template -static T json_value(const json &body, const std::string &key, const T &default_value) -{ - // Fallback null to default value - return body.contains(key) && !body.at(key).is_null() - ? body.value(key, default_value) - : default_value; -} -static void parse_options_completion(const json &body, llama_server_context &llama) -{ - gpt_params default_params; - const auto & default_sparams = default_params.sparams; - - auto & params = llama.params; - auto & sparams = llama.params.sparams; - - llama.stream = json_value(body, "stream", false); - params.n_predict = json_value(body, "n_predict", default_params.n_predict); - sparams.top_k = json_value(body, "top_k", default_sparams.top_k); - sparams.top_p = json_value(body, "top_p", default_sparams.top_p); - sparams.tfs_z = json_value(body, "tfs_z", default_sparams.tfs_z); - sparams.typical_p = json_value(body, "typical_p", default_sparams.typical_p); - sparams.temp = json_value(body, "temperature", default_sparams.temp); - sparams.penalty_last_n = json_value(body, "repeat_last_n", default_sparams.penalty_last_n); - sparams.penalty_repeat = json_value(body, "repeat_penalty", default_sparams.penalty_repeat); - sparams.penalty_freq = json_value(body, "frequency_penalty", default_sparams.penalty_freq); - sparams.penalty_present = json_value(body, "presence_penalty", default_sparams.penalty_present); - sparams.mirostat = json_value(body, "mirostat", default_sparams.mirostat); - sparams.mirostat_tau = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); - sparams.mirostat_eta = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); - sparams.penalize_nl = json_value(body, "penalize_nl", default_sparams.penalize_nl); - params.n_keep = json_value(body, "n_keep", default_params.n_keep); - params.seed = json_value(body, "seed", default_params.seed); - sparams.grammar = json_value(body, "grammar", default_sparams.grammar); - sparams.n_probs = json_value(body, "n_probs", default_sparams.n_probs); - - if (body.count("prompt") != 0) - { - llama.prompt = body["prompt"]; - } - else - { - llama.prompt = ""; - } - - sparams.logit_bias.clear(); - if (json_value(body, "ignore_eos", false)) - { - sparams.logit_bias[llama_token_eos(llama.ctx)] = -INFINITY; - } - - const auto &logit_bias = body.find("logit_bias"); - if (logit_bias != body.end() && logit_bias->is_array()) - { - const int n_vocab = llama_n_vocab(llama.model); - for (const auto &el : *logit_bias) - { - if (el.is_array() && el.size() == 2 && el[0].is_number_integer()) - { - llama_token tok = el[0].get(); - if (tok >= 0 && tok < n_vocab) - { - if (el[1].is_number()) - { - sparams.logit_bias[tok] = el[1].get(); - } - else if (el[1].is_boolean() && !el[1].get()) - { - sparams.logit_bias[tok] = -INFINITY; - } - } - } - } - } - - llama.params.antiprompt.clear(); - const auto &stop = body.find("stop"); - if (stop != body.end() && stop->is_array()) - { - for (const auto &word : *stop) - { - if (!word.empty()) - { - llama.params.antiprompt.push_back(word); - } - } - } - - LOG_VERBOSE("completion parameters parsed", format_generation_settings(llama)); -} - -static void parse_options_infill(const json &body, llama_server_context &llama) -{ - if (body.count("input_prefix") != 0) - { - llama.params.input_prefix = body["input_prefix"]; - } - else - { - llama.params.input_prefix = ""; - } - if (body.count("input_suffix") != 0) - { - llama.params.input_suffix = body["input_suffix"]; - } - else - { - llama.params.input_suffix = ""; - } - parse_options_completion(body, llama); -} - -static void log_server_request(const Request &req, const Response &res) +static void log_server_request(const httplib::Request &req, const httplib::Response &res) { LOG_INFO("request", { {"remote_addr", req.remote_addr}, @@ -1245,60 +2175,26 @@ static void log_server_request(const Request &req, const Response &res) }); } -static bool is_at_eob(llama_server_context &server_context, const llama_token *tokens, const size_t n_tokens) { - return n_tokens && tokens[n_tokens-1] == llama_token_eos(server_context.ctx); -} - -// Function matching type llama_beam_search_callback_fn_t. -// Custom callback example is called each time the beams lengths increase: -// * Show progress by printing ',' following by number of convergent beam tokens if any. -// * When all beams converge to a common prefix, they are made available in beams_state.beams[0]. -// This is also called when the stop condition is met. -// Collect tokens into std::vector response which is pointed to by callback_data. -static void beam_search_callback(void *callback_data, llama_beams_state beams_state) { - auto & llama = *static_cast(callback_data); - // Mark beams as EOS as needed. - for (size_t i = 0 ; i < beams_state.n_beams ; ++i) { - llama_beam_view& beam_view = beams_state.beam_views[i]; - if (!beam_view.eob && is_at_eob(llama, beam_view.tokens, beam_view.n_tokens)) { - beam_view.eob = true; - } - } - printf(","); // Show progress - if (const size_t n = beams_state.common_prefix_length) { - llama.generated_token_probs.resize(llama.generated_token_probs.size() + n); - assert(0u < beams_state.n_beams); - const llama_token * tokens = beams_state.beam_views[0].tokens; - const auto map = [](llama_token tok) { return completion_token_output{{},tok}; }; - std::transform(tokens, tokens + n, llama.generated_token_probs.end() - n, map); - printf("%zu", n); - } - fflush(stdout); -#if 0 // DEBUG: print current beams for this iteration - std::cout << "\n\nCurrent beams:\n"; - for (size_t i=0 ; i < beams_state.n_beams ; ++i) { - std::cout << "beams["<generated_token_probs; auto translator = token_translator{llama.ctx}; auto add_strlen = [=](size_t sum, const completion_token_output & cto) { return sum + translator(cto).size(); }; const size_t len = std::accumulate(gtps.begin(), gtps.end(), size_t(0), add_strlen); - if (llama.generated_text.capacity() < llama.generated_text.size() + len) { - llama.generated_text.reserve(llama.generated_text.size() + len); + if (slot->generated_text.capacity() < slot->generated_text.size() + len) + { + slot->generated_text.reserve(slot->generated_text.size() + len); } - for (const completion_token_output & cto : gtps) { - llama.generated_text += translator(cto); + for (const completion_token_output & cto : gtps) + { + slot->generated_text += translator(cto); } } @@ -1311,7 +2207,7 @@ int main(int argc, char **argv) // struct that contains llama context and inference llama_server_context llama; - server_params_parse(argc, argv, sparams, params); + server_params_parse(argc, argv, sparams, params, llama); if (params.model_alias == "unknown") { @@ -1322,6 +2218,7 @@ int main(int argc, char **argv) LOG_INFO("build info", {{"build", BUILD_NUMBER}, {"commit", BUILD_COMMIT}}); + LOG_INFO("system info", { {"n_threads", params.n_threads}, {"n_threads_batch", params.n_threads_batch}, @@ -1330,405 +2227,261 @@ int main(int argc, char **argv) }); // load the model - if (!llama.loadModel(params)) + if (!llama.load_model(params)) { return 1; } - Server svr; + llama.initialize(); + + httplib::Server svr; svr.set_default_headers({{"Server", "llama.cpp"}, {"Access-Control-Allow-Origin", "*"}, {"Access-Control-Allow-Headers", "content-type"}}); // this is only called if no index.html is found in the public --path - svr.Get("/", [](const Request &, Response &res) + svr.Get("/", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html"); - return false; }); + res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html"); + return false; + }); // this is only called if no index.js is found in the public --path - svr.Get("/index.js", [](const Request &, Response &res) + svr.Get("/index.js", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript"); - return false; }); + res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript"); + return false; + }); // this is only called if no index.html is found in the public --path - svr.Get("/completion.js", [](const Request &, Response &res) + svr.Get("/completion.js", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript"); - return false; }); + res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript"); + return false; + }); // this is only called if no index.html is found in the public --path - svr.Get("/json-schema-to-grammar.mjs", [](const Request &, Response &res) + svr.Get("/json-schema-to-grammar.mjs", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript"); - return false; }); + res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript"); + return false; + }); - svr.Post("/completion", [&llama](const Request &req, Response &res) - { - auto lock = llama.lock(); + svr.Get("/props", [&llama](const httplib::Request & /*req*/, httplib::Response &res) + { + res.set_header("Access-Control-Allow-Origin", "*"); + json data = { + { "user_name", llama.name_user.c_str() }, + { "assistant_name", llama.name_assistant.c_str() } + }; + res.set_content(data.dump(), "application/json"); + }); - llama.rewind(); - - llama_reset_timings(llama.ctx); - parse_options_completion(json::parse(req.body), llama); - - llama.initSampling(); - llama.loadPrompt(); - llama.beginCompletion(); - - if (!llama.stream) { - if (llama.params.n_beams) { - // Fill llama.generated_token_probs vector with final beam. - llama_beam_search(llama.ctx, beam_search_callback, &llama, llama.params.n_beams, - llama.n_past, llama.n_remain); - // Translate llama.generated_token_probs to llama.generated_text. - append_to_generated_text_from_generated_token_probs(llama); - } else { - size_t stop_pos = std::string::npos; - - while (llama.has_next_token) { - const completion_token_output token_with_probs = llama.doCompletion(); - const std::string token_text = token_with_probs.tok == -1 ? "" : llama_token_to_piece(llama.ctx, token_with_probs.tok); - - stop_pos = llama.findStoppingStrings(llama.generated_text, - token_text.size(), STOP_FULL); - } - - if (stop_pos == std::string::npos) { - stop_pos = llama.findStoppingStrings(llama.generated_text, 0, STOP_PARTIAL); - } - if (stop_pos != std::string::npos) { - llama.generated_text.erase(llama.generated_text.begin() + stop_pos, - llama.generated_text.end()); - } - } - - auto probs = llama.generated_token_probs; - if (llama.params.sparams.n_probs > 0 && llama.stopped_word) { - const std::vector stop_word_toks = llama_tokenize(llama.ctx, llama.stopping_word, false); - probs = std::vector(llama.generated_token_probs.begin(), llama.generated_token_probs.end() - stop_word_toks.size()); - } - - const json data = format_final_response(llama, llama.generated_text, probs); - - llama_print_timings(llama.ctx); - - res.set_content(data.dump(-1, ' ', false, json::error_handler_t::replace), - "application/json"); - } else { - const auto chunked_content_provider = [&](size_t, DataSink & sink) { - size_t sent_count = 0; - size_t sent_token_probs_index = 0; - - while (llama.has_next_token) { - const completion_token_output token_with_probs = llama.doCompletion(); - if (token_with_probs.tok == -1 || llama.multibyte_pending > 0) { - continue; + svr.Post("/completion", [&llama](const httplib::Request &req, httplib::Response &res) + { + json data = json::parse(req.body); + const int task_id = llama.request_completion(data, false); + if (!json_value(data, "stream", false)) { + std::string completion_text; + task_result result = llama.next_result(task_id); + if(!result.error && result.stop) { + res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); } - const std::string token_text = llama_token_to_piece(llama.ctx, token_with_probs.tok); - - size_t pos = std::min(sent_count, llama.generated_text.size()); - - const std::string str_test = llama.generated_text.substr(pos); - bool is_stop_full = false; - size_t stop_pos = - llama.findStoppingStrings(str_test, token_text.size(), STOP_FULL); - if (stop_pos != std::string::npos) { - is_stop_full = true; - llama.generated_text.erase( - llama.generated_text.begin() + pos + stop_pos, - llama.generated_text.end()); - pos = std::min(sent_count, llama.generated_text.size()); - } else { - is_stop_full = false; - stop_pos = llama.findStoppingStrings(str_test, token_text.size(), - STOP_PARTIAL); + else + { + res.status = 404; + res.set_content(result.result_json["content"], "text/plain"); + return; } - - if ( - stop_pos == std::string::npos || - // Send rest of the text if we are at the end of the generation - (!llama.has_next_token && !is_stop_full && stop_pos > 0) - ) { - const std::string to_send = llama.generated_text.substr(pos, std::string::npos); - - sent_count += to_send.size(); - - std::vector probs_output = {}; - - if (llama.params.sparams.n_probs > 0) { - const std::vector to_send_toks = llama_tokenize(llama.ctx, to_send, false); - size_t probs_pos = std::min(sent_token_probs_index, llama.generated_token_probs.size()); - size_t probs_stop_pos = std::min(sent_token_probs_index + to_send_toks.size(), llama.generated_token_probs.size()); - if (probs_pos < probs_stop_pos) { - probs_output = std::vector(llama.generated_token_probs.begin() + probs_pos, llama.generated_token_probs.begin() + probs_stop_pos); - } - sent_token_probs_index = probs_stop_pos; - } - - const json data = format_partial_response(llama, to_send, probs_output); - - const std::string str = - "data: " + - data.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - - if (!sink.write(str.data(), str.size())) { - LOG_VERBOSE("stream closed", {}); - llama_print_timings(llama.ctx); - return false; - } - } - - if (!llama.has_next_token) { - // Generation is done, send extra information. - const json data = format_final_response( - llama, - "", - std::vector(llama.generated_token_probs.begin(), llama.generated_token_probs.begin() + sent_token_probs_index) - ); - - const std::string str = - "data: " + - data.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - - if (!sink.write(str.data(), str.size())) { - LOG_VERBOSE("stream closed", {}); - llama_print_timings(llama.ctx); - return false; - } - } - } - - llama_print_timings(llama.ctx); - sink.done(); - return true; - }; - const auto on_complete = [&](bool) { - llama.mutex.unlock(); - }; - lock.release(); - res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); - } }); - - svr.Post("/infill", [&llama](const Request &req, Response &res) - { - auto lock = llama.lock(); - - llama.rewind(); - - llama_reset_timings(llama.ctx); - parse_options_infill(json::parse(req.body), llama); - - llama.initSampling(); - llama.loadInfill(); - llama.beginCompletion(); - const auto chunked_content_provider = [&](size_t, DataSink & sink) { - size_t sent_count = 0; - size_t sent_token_probs_index = 0; - - while (llama.has_next_token) { - const completion_token_output token_with_probs = llama.doCompletion(); - if (token_with_probs.tok == -1 || llama.multibyte_pending > 0) { - continue; - } - const std::string token_text = llama_token_to_piece(llama.ctx, token_with_probs.tok); - - size_t pos = std::min(sent_count, llama.generated_text.size()); - - const std::string str_test = llama.generated_text.substr(pos); - bool is_stop_full = false; - size_t stop_pos = - llama.findStoppingStrings(str_test, token_text.size(), STOP_FULL); - if (stop_pos != std::string::npos) { - is_stop_full = true; - llama.generated_text.erase( - llama.generated_text.begin() + pos + stop_pos, - llama.generated_text.end()); - pos = std::min(sent_count, llama.generated_text.size()); } else { - is_stop_full = false; - stop_pos = llama.findStoppingStrings(str_test, token_text.size(), - STOP_PARTIAL); - } - - if ( - stop_pos == std::string::npos || - // Send rest of the text if we are at the end of the generation - (!llama.has_next_token && !is_stop_full && stop_pos > 0) - ) { - const std::string to_send = llama.generated_text.substr(pos, std::string::npos); - - sent_count += to_send.size(); - - std::vector probs_output = {}; - - if (llama.params.sparams.n_probs > 0) { - const std::vector to_send_toks = llama_tokenize(llama.ctx, to_send, false); - size_t probs_pos = std::min(sent_token_probs_index, llama.generated_token_probs.size()); - size_t probs_stop_pos = std::min(sent_token_probs_index + to_send_toks.size(), llama.generated_token_probs.size()); - if (probs_pos < probs_stop_pos) { - probs_output = std::vector(llama.generated_token_probs.begin() + probs_pos, llama.generated_token_probs.begin() + probs_stop_pos); + const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink & sink) + { + while (true) + { + task_result result = llama.next_result(task_id); + if (!result.error) { + const std::string str = + "data: " + + result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; + LOG_VERBOSE("data stream", { + { "to_send", str } + }); + if (!sink.write(str.c_str(), str.size())) + { + return false; + } + if(result.stop) { + break; + } + } else { + break; + } } - sent_token_probs_index = probs_stop_pos; - } + sink.done(); + return true; + }; - const json data = format_partial_response(llama, to_send, probs_output); + auto on_complete = [task_id, &llama] (bool) + { + // cancel + llama.request_cancel(task_id); + }; - const std::string str = - "data: " + - data.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - - if (!sink.write(str.data(), str.size())) { - LOG_VERBOSE("stream closed", {}); - llama_print_timings(llama.ctx); - return false; - } + res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); } + }); - if (!llama.has_next_token) { - // Generation is done, send extra information. - const json data = format_final_response( - llama, - "", - std::vector(llama.generated_token_probs.begin(), llama.generated_token_probs.begin() + sent_token_probs_index) - ); - - const std::string str = - "data: " + - data.dump(-1, ' ', false, json::error_handler_t::replace) + - "\n\n"; - - LOG_VERBOSE("data stream", { - { "to_send", str } - }); - - if (!sink.write(str.data(), str.size())) { - LOG_VERBOSE("stream closed", {}); - llama_print_timings(llama.ctx); - return false; - } - } - } - - llama_print_timings(llama.ctx); - sink.done(); - return true; - }; - const auto on_complete = [&](bool) { - llama.mutex.unlock(); - }; - lock.release(); - res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); - }); - - svr.Get("/model.json", [&llama](const Request &, Response &res) + svr.Post("/infill", [&llama](const httplib::Request &req, httplib::Response &res) { - const json data = format_generation_settings(llama); - return res.set_content(data.dump(), "application/json"); }); + json data = json::parse(req.body); + const int task_id = llama.request_completion(data, true); + if (!json_value(data, "stream", false)) { + std::string completion_text; + task_result result = llama.next_result(task_id); + if (!result.error && result.stop) + { + res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); + } + else + { + res.status = 404; + res.set_content(result.result_json["content"], "text/plain"); + return; + } + } else { + const auto chunked_content_provider = [task_id, &llama](size_t, httplib::DataSink & sink) { + while (true) + { + task_result result = llama.next_result(task_id); + if (!result.error) { + const std::string str = + "data: " + + result.result_json.dump(-1, ' ', false, json::error_handler_t::replace) + + "\n\n"; + LOG_VERBOSE("data stream", { + { "to_send", str } + }); + if (!sink.write(str.c_str(), str.size())) + { + return false; + } + if (result.stop) + { + break; + } + } + else + { + break; + } + } - svr.Options(R"(/.*)", [](const Request &, Response &res) + sink.done(); + + return true; + }; + + auto on_complete = [task_id, &llama] (bool) + { + // cancel + llama.request_cancel(task_id); + }; + + res.set_chunked_content_provider("text/event-stream", chunked_content_provider, on_complete); + } + }); + + svr.Get("/model.json", [&llama](const httplib::Request &, httplib::Response &res) + { + const json data = llama.get_model_props(); + return res.set_content(data.dump(), "application/json"); + }); + + svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response &res) { return res.set_content("", "application/json"); }); - svr.Post("/tokenize", [&llama](const Request &req, Response &res) - { - auto lock = llama.lock(); + svr.Post("/tokenize", [&llama](const httplib::Request &req, httplib::Response &res) + { + const json body = json::parse(req.body); + std::vector tokens; + if (body.count("content") != 0) + { + tokens = llama.tokenize(body["content"], false); + } + const json data = format_tokenizer_response(tokens); + return res.set_content(data.dump(), "application/json"); + }); - const json body = json::parse(req.body); - std::vector tokens; - if (body.count("content") != 0) - { - tokens = llama.tokenize(body["content"], false); - } - const json data = format_tokenizer_response(tokens); - return res.set_content(data.dump(), "application/json"); }); + svr.Post("/detokenize", [&llama](const httplib::Request &req, httplib::Response &res) + { + const json body = json::parse(req.body); + std::string content; + if (body.count("tokens") != 0) + { + const std::vector tokens = body["tokens"]; + content = tokens_to_str(llama.ctx, tokens.cbegin(), tokens.cend()); + } - svr.Post("/detokenize", [&llama](const Request &req, Response &res) - { - auto lock = llama.lock(); + const json data = format_detokenized_response(content); + return res.set_content(data.dump(), "application/json"); + }); - const json body = json::parse(req.body); - std::string content; - if (body.count("tokens") != 0) - { - const std::vector tokens = body["tokens"]; - content = tokens_to_str(llama.ctx, tokens.cbegin(), tokens.cend()); - } - - const json data = format_detokenized_response(content); - return res.set_content(data.dump(), "application/json"); }); - - svr.Post("/embedding", [&llama](const Request &req, Response &res) - { - auto lock = llama.lock(); - - const json body = json::parse(req.body); - - llama.rewind(); - - llama_reset_timings(llama.ctx); - - if (body.count("content") != 0) - { - llama.prompt = body["content"]; - } - else - { - llama.prompt = ""; - } - llama.params.n_predict = 0; - - llama.initSampling(); - llama.loadPrompt(); - llama.beginCompletion(); - llama.doCompletion(); - - const json data = format_embedding_response(llama); - return res.set_content(data.dump(), "application/json"); }); + svr.Post("/embedding", [&llama](const httplib::Request &req, httplib::Response &res) + { + const json body = json::parse(req.body); + json prompt; + if (body.count("content") != 0) + { + prompt = body["content"]; + } + else + { + prompt = ""; + } + const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false); + task_result result = llama.next_result(task_id); + return res.set_content(result.result_json.dump(), "application/json"); + }); svr.set_logger(log_server_request); - svr.set_exception_handler([](const Request &, Response &res, std::exception_ptr ep) - { - const char fmt[] = "500 Internal Server Error\n%s"; - char buf[BUFSIZ]; - try { - std::rethrow_exception(std::move(ep)); - } catch (std::exception & e) { - snprintf(buf, sizeof(buf), fmt, e.what()); - } catch (...) { - snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); - } - res.set_content(buf, "text/plain"); - res.status = 500; }); + svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) + { + const char fmt[] = "500 Internal Server Error\n%s"; + char buf[BUFSIZ]; + try + { + std::rethrow_exception(std::move(ep)); + } + catch (std::exception &e) + { + snprintf(buf, sizeof(buf), fmt, e.what()); + } + catch (...) + { + snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); + } + res.set_content(buf, "text/plain"); + res.status = 500; + }); - svr.set_error_handler([](const Request &, Response &res) - { - if (res.status == 400) { - res.set_content("Invalid request", "text/plain"); - } else if (res.status != 500) { - res.set_content("File Not Found", "text/plain"); - res.status = 404; - } }); + svr.set_error_handler([](const httplib::Request &, httplib::Response &res) + { + if (res.status == 400) + { + res.set_content("Invalid request", "text/plain"); + } + else if (res.status != 500) + { + res.set_content("File Not Found", "text/plain"); + res.status = 404; + } + }); // set timeouts and change hostname and port - svr.set_read_timeout(sparams.read_timeout); + svr.set_read_timeout (sparams.read_timeout); svr.set_write_timeout(sparams.write_timeout); if (!svr.bind_to_port(sparams.hostname, sparams.port)) @@ -1741,20 +2494,38 @@ int main(int argc, char **argv) svr.set_base_dir(sparams.public_path); // to make it ctrl+clickable: - printf("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); + LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); LOG_INFO("HTTP server listening", { {"hostname", sparams.hostname}, {"port", sparams.port}, }); - if (!svr.listen_after_bind()) + // run the HTTP server in a thread - see comment below + std::thread t([&]() + { + if (!svr.listen_after_bind()) + { + return 1; + } + + return 0; + }); + + // GG: if I put the main loop inside a thread, it crashes on the first request when build in Debug!? + // "Bus error: 10" - this is on macOS, it does not crash on Linux + //std::thread t2([&]() { - return 1; + bool running = true; + while (running) + { + running = llama.update_slots(); + } } + //); + + t.join(); - llama_sampling_free(llama.ctx_sampling); llama_backend_free(); - return 0; } From 96981f37b1e3f450d9e63e571514217bf60f0a7f Mon Sep 17 00:00:00 2001 From: Alex Date: Sun, 22 Oct 2023 15:56:53 -0400 Subject: [PATCH 014/859] make : add optional CUDA_NATIVE_ARCH (#2482) Use the environment variable `CUDA_NATIVE_ARCH` if present to set NVCC arch. Otherwise, use `native`. --- Makefile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 80179631f..705c4acb4 100644 --- a/Makefile +++ b/Makefile @@ -391,9 +391,12 @@ else endif #LLAMA_CUDA_NVCC ifdef CUDA_DOCKER_ARCH NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH) +endif # CUDA_DOCKER_ARCH +ifdef CUDA_NATIVE_ARCH + NVCCFLAGS += -arch=$(CUDA_NATIVE_ARCH) else NVCCFLAGS += -arch=native -endif # CUDA_DOCKER_ARCH +endif # CUDA_NATIVE_ARCH ifdef LLAMA_CUDA_FORCE_DMMV NVCCFLAGS += -DGGML_CUDA_FORCE_DMMV endif # LLAMA_CUDA_FORCE_DMMV From 6336701c9378c23c85d1c0e464b663ca2bbb8e60 Mon Sep 17 00:00:00 2001 From: Galunid Date: Mon, 23 Oct 2023 17:47:03 +0200 Subject: [PATCH 015/859] Fix baichuan convert script not detecing model (#3739) It seems nobody objects. --- convert-baichuan-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-baichuan-hf-to-gguf.py b/convert-baichuan-hf-to-gguf.py index 3b64ecb88..5ee99be73 100755 --- a/convert-baichuan-hf-to-gguf.py +++ b/convert-baichuan-hf-to-gguf.py @@ -110,7 +110,7 @@ print("gguf: loading model "+dir_model.name) with open(dir_model / "config.json", "r", encoding="utf-8") as f: hparams = json.load(f) print("hello print: ",hparams["architectures"][0]) -if hparams["architectures"][0] != "BaichuanForCausalLM": +if hparams["architectures"][0] != "BaichuanForCausalLM" and hparams["architectures"][0] != "BaiChuanForCausalLM": print("Model architecture not supported: " + hparams["architectures"][0]) sys.exit() From 5be6c803fa5378f62a1590f3ad8c6b64c7c0c2ce Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Mon, 23 Oct 2023 12:40:03 -0700 Subject: [PATCH 016/859] llama : remove token functions with `context` args in favor of `model` (#3720) * added `llama_model_token_*` variants to all the `llama_token_*` functions. * added `LLAMA_API` * formatting Co-authored-by: Georgi Gerganov * removed old `llama_token` functions * changed 3 more functions to take in model - `llama_token_get_text` - `llama_token_get_score` - `llama_token_get_type` * added back docs * fixed main.cpp * changed token functions to use new model variants * changed token functions to use new model variants --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 8 ++--- common/sampling.cpp | 4 +-- common/train.cpp | 6 ++-- examples/batched/batched.cpp | 2 +- examples/beam-search/beam-search.cpp | 2 +- examples/infill/infill.cpp | 30 ++++++++--------- examples/llama-bench/llama-bench.cpp | 4 +-- examples/llava/llava-utils.h | 2 +- examples/main/main.cpp | 8 ++--- examples/parallel/parallel.cpp | 2 +- examples/perplexity/perplexity.cpp | 4 +-- examples/server/server.cpp | 14 ++++---- examples/simple/simple.cpp | 2 +- examples/speculative/speculative.cpp | 2 +- llama.cpp | 49 ++++++++++++++-------------- llama.h | 21 ++++++------ 16 files changed, 81 insertions(+), 79 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index bbd1518ca..44bb76618 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -880,13 +880,13 @@ std::tuple llama_init_from_gpt_par } if (params.ignore_eos) { - params.sparams.logit_bias[llama_token_eos(lctx)] = -INFINITY; + params.sparams.logit_bias[llama_token_eos(model)] = -INFINITY; } { LOG("warming up the model with an empty run\n"); - std::vector tmp = { llama_token_bos(lctx), llama_token_eos(lctx), }; + std::vector tmp = { llama_token_bos(model), llama_token_eos(model), }; llama_decode(lctx, llama_batch_get_one(tmp.data(), std::min(tmp.size(), (size_t) params.n_batch), 0, 0)); llama_kv_cache_tokens_rm(lctx, -1, -1); llama_reset_timings(lctx); @@ -941,7 +941,7 @@ std::string llama_token_to_piece(const struct llama_context * ctx, llama_token t } std::string llama_detokenize_spm(llama_context * ctx, const std::vector & tokens) { - const llama_token bos_id = llama_token_bos(ctx); + const llama_token bos_id = llama_token_bos(llama_get_model(ctx)); std::string piece; std::string result; @@ -1186,7 +1186,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "hellaswag: %s # default: false\n", params.hellaswag ? "true" : "false"); fprintf(stream, "hellaswag_tasks: %zu # default: 400\n", params.hellaswag_tasks); - const auto logit_bias_eos = sparams.logit_bias.find(llama_token_eos(lctx)); + const auto logit_bias_eos = sparams.logit_bias.find(llama_token_eos(llama_get_model(lctx))); const bool ignore_eos = logit_bias_eos != sparams.logit_bias.end() && logit_bias_eos->second == -INFINITY; fprintf(stream, "ignore_eos: %s # default: false\n", ignore_eos ? "true" : "false"); diff --git a/common/sampling.cpp b/common/sampling.cpp index 6f0af3c4a..5258d4e82 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -147,7 +147,7 @@ llama_token llama_sampling_sample( // apply penalties if (!prev.empty()) { - const float nl_logit = logits[llama_token_nl(ctx_main)]; + const float nl_logit = logits[llama_token_nl(llama_get_model(ctx_main))]; llama_sample_repetition_penalties(ctx_main, &cur_p, prev.data() + prev.size() - penalty_last_n, @@ -155,7 +155,7 @@ llama_token llama_sampling_sample( if (!penalize_nl) { for (size_t idx = 0; idx < cur_p.size; idx++) { - if (cur_p.data[idx].id == llama_token_nl(ctx_main)) { + if (cur_p.data[idx].id == llama_token_nl(llama_get_model(ctx_main))) { cur_p.data[idx].logit = nl_logit; break; } diff --git a/common/train.cpp b/common/train.cpp index 154ca56e5..3cce5da26 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -236,8 +236,8 @@ int64_t get_example_targets_batch( int64_t used_samples = 0; ggml_set_f32(target_probs, 0.0f); - llama_token bos = llama_token_bos(lctx); - llama_token eos = llama_token_eos(lctx); + llama_token bos = llama_token_bos(llama_get_model(lctx)); + llama_token eos = llama_token_eos(llama_get_model(lctx)); // printf("%s: example_id=%d n_batch=%d n_train_samples=%zu\n", __func__, example_id, n_batch, n_train_samples); for (int k=0; k mark the stream as finished - if (new_token_id == llama_token_eos(ctx) || n_cur == n_len) { + if (new_token_id == llama_token_eos(model) || n_cur == n_len) { i_batch[i] = -1; LOG_TEE("\n"); if (n_parallel > 1) { diff --git a/examples/beam-search/beam-search.cpp b/examples/beam-search/beam-search.cpp index f078ab8a8..679b382e1 100644 --- a/examples/beam-search/beam-search.cpp +++ b/examples/beam-search/beam-search.cpp @@ -47,7 +47,7 @@ struct beam_search_callback_data { // In this case, end-of-beam (eob) is equivalent to end-of-sentence (eos) but this need not always be the same. // For example, eob can be flagged due to maximum token length, stop words, etc. static bool is_at_eob(const beam_search_callback_data & callback_data, const llama_token * tokens, size_t n_tokens) { - return n_tokens && tokens[n_tokens-1] == llama_token_eos(callback_data.ctx); + return n_tokens && tokens[n_tokens-1] == llama_token_eos(llama_get_model(callback_data.ctx)); } // Function matching type llama_beam_search_callback_fn_t. diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index 6331335e3..9c52b7bba 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -246,14 +246,14 @@ int main(int argc, char ** argv) { if (suff_rm_leading_spc && inp_sfx[0] == space_token) { inp_sfx.erase(inp_sfx.begin()); } - inp_pfx.insert(inp_pfx.begin(), llama_token_prefix(ctx)); + inp_pfx.insert(inp_pfx.begin(), llama_token_prefix(model)); if (add_bos) { - inp_pfx.insert(inp_pfx.begin(), llama_token_bos(ctx)); + inp_pfx.insert(inp_pfx.begin(), llama_token_bos(model)); } - inp_sfx.insert(inp_sfx.begin(), llama_token_suffix(ctx)); + inp_sfx.insert(inp_sfx.begin(), llama_token_suffix(model)); embd_inp = inp_pfx; embd_inp.insert(embd_inp.end(), inp_sfx.begin(), inp_sfx.end()); - embd_inp.push_back(llama_token_middle(ctx)); + embd_inp.push_back(llama_token_middle(model)); LOG("prefix: \"%s\"\n", log_tostr(params.input_prefix)); LOG("suffix: \"%s\"\n", log_tostr(params.input_suffix)); @@ -261,7 +261,7 @@ int main(int argc, char ** argv) { // Should not run without any tokens if (embd_inp.empty()) { - embd_inp.push_back(llama_token_bos(ctx)); + embd_inp.push_back(llama_token_bos(model)); LOG("embd_inp was considered empty and bos was added: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, embd_inp).c_str()); } @@ -577,10 +577,10 @@ int main(int argc, char ** argv) { if ((int) embd_inp.size() <= n_consumed) { // deal with eot token in infill mode - if ((llama_sampling_last(ctx_sampling) == llama_token_eot(ctx) || is_interacting) && params.interactive){ + if ((llama_sampling_last(ctx_sampling) == llama_token_eot(model) || is_interacting) && params.interactive){ if(is_interacting && !params.interactive_first) { // print an eot token - printf("%s", llama_token_to_piece(ctx, llama_token_eot(ctx)).c_str()); + printf("%s", llama_token_to_piece(ctx, llama_token_eot(model)).c_str()); } fflush(stdout); printf("\n"); @@ -627,14 +627,14 @@ int main(int argc, char ** argv) { if (suff_rm_leading_spc && inp_sfx[0] == space_token) { inp_sfx.erase(inp_sfx.begin()); } - inp_pfx.insert(inp_pfx.begin(), llama_token_prefix(ctx)); + inp_pfx.insert(inp_pfx.begin(), llama_token_prefix(model)); if (add_bos) { - inp_pfx.insert(inp_pfx.begin(), llama_token_bos(ctx)); + inp_pfx.insert(inp_pfx.begin(), llama_token_bos(model)); } - inp_sfx.insert(inp_sfx.begin(), llama_token_suffix(ctx)); + inp_sfx.insert(inp_sfx.begin(), llama_token_suffix(model)); embd_inp = inp_pfx; embd_inp.insert(embd_inp.end(), inp_sfx.begin(), inp_sfx.end()); - embd_inp.push_back(llama_token_middle(ctx)); + embd_inp.push_back(llama_token_middle(model)); embd.clear(); embd_guidance.clear(); n_remain = params.n_predict; @@ -644,7 +644,7 @@ int main(int argc, char ** argv) { is_interacting = false; } // deal with end of text token in interactive mode - else if (llama_sampling_last(ctx_sampling) == llama_token_eos(ctx)) { + else if (llama_sampling_last(ctx_sampling) == llama_token_eos(model)) { LOG("found EOS token\n"); if (params.interactive) { @@ -661,7 +661,7 @@ int main(int argc, char ** argv) { if (params.input_prefix_bos) { LOG("adding input prefix BOS token\n"); - embd_inp.push_back(llama_token_bos(ctx)); + embd_inp.push_back(llama_token_bos(model)); } std::string buffer; @@ -724,7 +724,7 @@ int main(int argc, char ** argv) { } // end of text token - if (!embd.empty() && embd.back() == llama_token_eos(ctx) && !params.interactive) { + if (!embd.empty() && embd.back() == llama_token_eos(model) && !params.interactive) { break; } @@ -736,7 +736,7 @@ int main(int argc, char ** argv) { } } if (!params.interactive && n_remain <= 0) { - printf("%s", llama_token_to_piece(ctx, llama_token_eot(ctx)).c_str()); + printf("%s", llama_token_to_piece(ctx, llama_token_eot(model)).c_str()); fflush(stdout); } diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index a04115c96..20767d555 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -933,7 +933,7 @@ struct sql_printer : public printer { }; static void test_prompt(llama_context * ctx, int n_prompt, int n_past, int n_batch, int n_threads) { - std::vector tokens(n_batch, llama_token_bos(ctx)); + std::vector tokens(n_batch, llama_token_bos(llama_get_model(ctx))); int n_processed = 0; llama_set_n_threads(ctx, n_threads, n_threads); @@ -946,7 +946,7 @@ static void test_prompt(llama_context * ctx, int n_prompt, int n_past, int n_bat } static void test_gen(llama_context * ctx, int n_gen, int n_past, int n_threads) { - llama_token token = llama_token_bos(ctx); + llama_token token = llama_token_bos(llama_get_model(ctx)); llama_set_n_threads(ctx, n_threads, n_threads); diff --git a/examples/llava/llava-utils.h b/examples/llava/llava-utils.h index 45b2b1ad3..320c71967 100644 --- a/examples/llava/llava-utils.h +++ b/examples/llava/llava-utils.h @@ -137,7 +137,7 @@ inline llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { inline const char * sample(struct llama_context * ctx_llama, gpt_params & params, int * n_past) { int id = sample_id(ctx_llama, params); static std::string ret; - if (id == llama_token_eos(ctx_llama)) { + if (id == llama_token_eos(llama_get_model(ctx_llama))) { ret = ""; } else { ret = llama_token_to_piece(ctx_llama, id); diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 2621bd539..3d9f670b9 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -248,7 +248,7 @@ int main(int argc, char ** argv) { // Should not run without any tokens if (embd_inp.empty()) { - embd_inp.push_back(llama_token_bos(ctx)); + embd_inp.push_back(llama_token_bos(model)); LOG("embd_inp was considered empty and bos was added: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, embd_inp).c_str()); } @@ -693,7 +693,7 @@ int main(int argc, char ** argv) { } // deal with end of text token in interactive mode - if (llama_sampling_last(ctx_sampling) == llama_token_eos(ctx)) { + if (llama_sampling_last(ctx_sampling) == llama_token_eos(model)) { LOG("found EOS token\n"); if (params.interactive) { @@ -720,7 +720,7 @@ int main(int argc, char ** argv) { if (params.input_prefix_bos) { LOG("adding input prefix BOS token\n"); - embd_inp.push_back(llama_token_bos(ctx)); + embd_inp.push_back(llama_token_bos(model)); } std::string buffer; @@ -804,7 +804,7 @@ int main(int argc, char ** argv) { } // end of text token - if (!embd.empty() && embd.back() == llama_token_eos(ctx) && !(params.instruct || params.interactive)) { + if (!embd.empty() && embd.back() == llama_token_eos(model) && !(params.instruct || params.interactive)) { LOG_TEE(" [end of text]\n"); break; } diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index eb64adef8..9a0b9c183 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -347,7 +347,7 @@ int main(int argc, char ** argv) { // client.id, client.seq_id, id, client.n_decoded, client.i_batch, token_str.c_str()); if (client.n_decoded > 2 && - (id == llama_token_eos(ctx) || + (id == llama_token_eos(model) || (params.n_predict > 0 && client.n_decoded + client.n_prompt >= params.n_predict) || client.response.find("User:") != std::string::npos || client.response.find('\n') != std::string::npos)) { diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 7d0038bd4..3c2542e8c 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -227,7 +227,7 @@ static results_perplexity perplexity_v2(llama_context * ctx, const gpt_params & // add BOS token for the first batch of each chunk if (add_bos && j == 0) { - tokens[batch_start] = llama_token_bos(ctx); + tokens[batch_start] = llama_token_bos(llama_get_model(ctx)); } const auto batch_logits = llama_get_logits(ctx); @@ -350,7 +350,7 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par // add BOS token for the first batch of each chunk if (add_bos && j == 0) { - tokens[batch_start] = llama_token_bos(ctx); + tokens[batch_start] = llama_token_bos(llama_get_model(ctx)); } if (llama_decode(ctx, llama_batch_get_one(tokens.data() + batch_start, batch_size, j * n_batch, 0))) { diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c3279dbc9..693f9b773 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -726,7 +726,7 @@ struct llama_server_context if (json_value(data, "ignore_eos", false)) { - slot->sparams.logit_bias[llama_token_eos(ctx)] = -INFINITY; + slot->sparams.logit_bias[llama_token_eos(model)] = -INFINITY; } const auto &logit_bias = data.find("logit_bias"); @@ -1056,7 +1056,7 @@ struct llama_server_context slot.has_next_token = false; } - if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(ctx)) + if (!slot.cache_tokens.empty() && result.tok == llama_token_eos(model)) { slot.stopped_eos = true; slot.has_next_token = false; @@ -1130,7 +1130,7 @@ struct llama_server_context json get_formated_generation(llama_client_slot &slot) { - const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(ctx)); + const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && eos_bias->second < 0.0f && std::isinf(eos_bias->second); return json { @@ -1555,11 +1555,11 @@ struct llama_server_context suffix_tokens.erase(suffix_tokens.begin()); } - prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(ctx)); - prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(ctx)); // always add BOS - prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(ctx)); + prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(model)); + prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(model)); // always add BOS + prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); - prefix_tokens.push_back(llama_token_middle(ctx)); + prefix_tokens.push_back(llama_token_middle(model)); prompt_tokens = prefix_tokens; } else diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index 55385f566..f376c0509 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -138,7 +138,7 @@ int main(int argc, char ** argv) { const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); // is it an end of stream? - if (new_token_id == llama_token_eos(ctx) || n_cur == n_len) { + if (new_token_id == llama_token_eos(model) || n_cur == n_len) { LOG_TEE("\n"); break; diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 894321ce9..92ad27e8e 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -163,7 +163,7 @@ int main(int argc, char ** argv) { printf("%s", token_str.c_str()); fflush(stdout); - if (id == llama_token_eos(ctx_tgt)) { + if (id == llama_token_eos(model_tgt)) { has_eos = true; } diff --git a/llama.cpp b/llama.cpp index c63e6251c..61f30c398 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7493,7 +7493,7 @@ void llama_sample_grammar(struct llama_context * ctx, llama_token_data_array * c } } - const llama_token eos = llama_token_eos(ctx); + const llama_token eos = llama_token_eos(&ctx->model); std::vector, llama_partial_utf8>> candidates_decoded; std::vector candidates_grammar; @@ -7703,7 +7703,7 @@ llama_token llama_sample_token(struct llama_context * ctx, llama_token_data_arra void llama_grammar_accept_token(struct llama_context * ctx, struct llama_grammar * grammar, llama_token token) { const int64_t t_start_sample_us = ggml_time_us(); - if (token == llama_token_eos(ctx)) { + if (token == llama_token_eos(&ctx->model)) { for (const auto & stack : grammar->stacks) { if (stack.empty()) { return; @@ -8912,7 +8912,7 @@ struct llama_context * llama_new_context_with_model( // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); int n_past = cparams.n_ctx - n_tokens; - llama_token token = llama_token_bos(ctx); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph + llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); #ifdef GGML_USE_METAL @@ -9673,43 +9673,44 @@ float * llama_get_embeddings(struct llama_context * ctx) { return ctx->embedding.data(); } -const char * llama_token_get_text(const struct llama_context * ctx, llama_token token) { - return ctx->model.vocab.id_to_token[token].text.c_str(); +const char * llama_token_get_text(const struct llama_model * model, llama_token token) { + return model->vocab.id_to_token[token].text.c_str(); } -float llama_token_get_score(const struct llama_context * ctx, llama_token token) { - return ctx->model.vocab.id_to_token[token].score; +float llama_token_get_score(const struct llama_model * model, llama_token token) { + return model->vocab.id_to_token[token].score; } -llama_token_type llama_token_get_type(const struct llama_context * ctx, llama_token token) { - return ctx->model.vocab.id_to_token[token].type; +llama_token_type llama_token_get_type(const struct llama_model * model, llama_token token) { + return model->vocab.id_to_token[token].type; } -llama_token llama_token_bos(const struct llama_context * ctx) { - return ctx->model.vocab.special_bos_id; +llama_token llama_token_bos(const struct llama_model * model) { + return model->vocab.special_bos_id; } -llama_token llama_token_eos(const struct llama_context * ctx) { - return ctx->model.vocab.special_eos_id; +llama_token llama_token_eos(const struct llama_model * model) { + return model->vocab.special_eos_id; } -llama_token llama_token_nl(const struct llama_context * ctx) { - return ctx->model.vocab.linefeed_id; -} -llama_token llama_token_prefix(const struct llama_context * ctx) { - return ctx->model.vocab.special_prefix_id; +llama_token llama_token_nl(const struct llama_model * model) { + return model->vocab.linefeed_id; } -llama_token llama_token_middle(const struct llama_context * ctx) { - return ctx->model.vocab.special_middle_id; +llama_token llama_token_prefix(const struct llama_model * model) { + return model->vocab.special_prefix_id; } -llama_token llama_token_suffix(const struct llama_context * ctx) { - return ctx->model.vocab.special_suffix_id; +llama_token llama_token_middle(const struct llama_model * model) { + return model->vocab.special_middle_id; } -llama_token llama_token_eot(const struct llama_context * ctx) { - return ctx->model.vocab.special_eot_id; +llama_token llama_token_suffix(const struct llama_model * model) { + return model->vocab.special_suffix_id; +} + +llama_token llama_token_eot(const struct llama_model * model) { + return model->vocab.special_eot_id; } int llama_tokenize( diff --git a/llama.h b/llama.h index 306f5b383..2f2fee0e2 100644 --- a/llama.h +++ b/llama.h @@ -494,21 +494,22 @@ extern "C" { // Vocab // - LLAMA_API const char * llama_token_get_text(const struct llama_context * ctx, llama_token token); + LLAMA_API const char * llama_token_get_text(const struct llama_model * model, llama_token token); - LLAMA_API float llama_token_get_score(const struct llama_context * ctx, llama_token token); + LLAMA_API float llama_token_get_score(const struct llama_model * model, llama_token token); - LLAMA_API enum llama_token_type llama_token_get_type(const struct llama_context * ctx, llama_token token); + LLAMA_API enum llama_token_type llama_token_get_type(const struct llama_model * model, llama_token token); // Special tokens - LLAMA_API llama_token llama_token_bos(const struct llama_context * ctx); // beginning-of-sentence - LLAMA_API llama_token llama_token_eos(const struct llama_context * ctx); // end-of-sentence - LLAMA_API llama_token llama_token_nl (const struct llama_context * ctx); // next-line + LLAMA_API llama_token llama_token_bos(const struct llama_model * model); // beginning-of-sentence + LLAMA_API llama_token llama_token_eos(const struct llama_model * model); // end-of-sentence + LLAMA_API llama_token llama_token_nl (const struct llama_model * model); // next-line + // codellama infill tokens - LLAMA_API llama_token llama_token_prefix(const struct llama_context * ctx); // Beginning of infill prefix - LLAMA_API llama_token llama_token_middle(const struct llama_context * ctx); // Beginning of infill middle - LLAMA_API llama_token llama_token_suffix(const struct llama_context * ctx); // Beginning of infill suffix - LLAMA_API llama_token llama_token_eot (const struct llama_context * ctx); // End of infill middle + LLAMA_API llama_token llama_token_prefix(const struct llama_model * model); // Beginning of infill prefix + LLAMA_API llama_token llama_token_middle(const struct llama_model * model); // Beginning of infill middle + LLAMA_API llama_token llama_token_suffix(const struct llama_model * model); // Beginning of infill suffix + LLAMA_API llama_token llama_token_eot (const struct llama_model * model); // End of infill middle // // Tokenization From 69a6735087c3634963c642fd69f0851ac479cd78 Mon Sep 17 00:00:00 2001 From: Galunid Date: Mon, 23 Oct 2023 21:46:00 +0200 Subject: [PATCH 017/859] Update special token handling in conversion scripts for gpt2 derived tokenizers (#3746) We still have the heads up in `README.md` regarding `bpe` tokenizers and this patch is needed for - a couple of tokenizer tests - some more `special` and `non-special` added tokens handling (as far as I understand it) * Update special token handling * Add mpt --- convert-bloom-hf-to-gguf.py | 17 +++++++++++++---- convert-gptneox-hf-to-gguf.py | 17 +++++++++++++---- convert-mpt-hf-to-gguf.py | 6 ++++-- convert-refact-hf-to-gguf.py | 17 +++++++++++++---- convert-starcoder-hf-to-gguf.py | 18 +++++++++++++----- 5 files changed, 56 insertions(+), 19 deletions(-) diff --git a/convert-bloom-hf-to-gguf.py b/convert-bloom-hf-to-gguf.py index 14dbd793c..6e866d943 100755 --- a/convert-bloom-hf-to-gguf.py +++ b/convert-bloom-hf-to-gguf.py @@ -118,15 +118,24 @@ tokenizer = AutoTokenizer.from_pretrained(dir_model) vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) assert max(tokenizer.vocab.values()) < vocab_size +added_vocab = tokenizer.get_added_vocab() reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} for i in range(vocab_size): - tokens.append(reverse_vocab[i] if i in reverse_vocab else f"[PAD{i}]") - scores.append(0.0) # dummy - toktypes.append(gguf.TokenType.NORMAL) + if i not in reverse_vocab: + tokens.append(f"[PAD{i}]") + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + tokens.append(reverse_vocab[i]) + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) gguf_writer.add_token_list(tokens) -gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) special_vocab = gguf.SpecialVocab(dir_model, load_merges=True, n_vocab = len(tokens)) diff --git a/convert-gptneox-hf-to-gguf.py b/convert-gptneox-hf-to-gguf.py index f1599b0c4..02d1fdf16 100755 --- a/convert-gptneox-hf-to-gguf.py +++ b/convert-gptneox-hf-to-gguf.py @@ -123,15 +123,24 @@ tokenizer = AutoTokenizer.from_pretrained(dir_model) vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) assert max(tokenizer.vocab.values()) < vocab_size +added_vocab = tokenizer.get_added_vocab() reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} for i in range(vocab_size): - tokens.append(reverse_vocab[i] if i in reverse_vocab else f"[PAD{i}]") - scores.append(0.0) # dummy - toktypes.append(gguf.TokenType.NORMAL) + if i not in reverse_vocab: + tokens.append(f"[PAD{i}]") + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + tokens.append(reverse_vocab[i]) + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) gguf_writer.add_token_list(tokens) -gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) diff --git a/convert-mpt-hf-to-gguf.py b/convert-mpt-hf-to-gguf.py index 2d2fa2329..70d154b3f 100755 --- a/convert-mpt-hf-to-gguf.py +++ b/convert-mpt-hf-to-gguf.py @@ -136,9 +136,11 @@ for i in range(vocab_size): tokens.append(f"[PAD{i}]") toktypes.append(gguf.TokenType.USER_DEFINED) elif reverse_vocab[i] in added_vocab: - # NOTE: wouldn't we like to distinguish CONTROL tokens here? tokens.append(reverse_vocab[i]) - toktypes.append(gguf.TokenType.USER_DEFINED) + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) else: tokens.append(reverse_vocab[i]) toktypes.append(gguf.TokenType.NORMAL) diff --git a/convert-refact-hf-to-gguf.py b/convert-refact-hf-to-gguf.py index 934f3852b..f0cfe84d8 100755 --- a/convert-refact-hf-to-gguf.py +++ b/convert-refact-hf-to-gguf.py @@ -139,15 +139,24 @@ tokenizer = AutoTokenizer.from_pretrained(dir_model) vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) assert max(tokenizer.vocab.values()) < vocab_size +added_vocab = tokenizer.get_added_vocab() reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} for i in range(vocab_size): - tokens.append(reverse_vocab[i] if i in reverse_vocab else f"[PAD{i}]") - scores.append(0.0) # dummy - toktypes.append(gguf.TokenType.NORMAL) + if i not in reverse_vocab: + tokens.append(f"[PAD{i}]") + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + tokens.append(reverse_vocab[i]) + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) gguf_writer.add_token_list(tokens) -gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) special_vocab = gguf.SpecialVocab(dir_model, load_merges=True, n_vocab = len(tokens)) diff --git a/convert-starcoder-hf-to-gguf.py b/convert-starcoder-hf-to-gguf.py index fe8815cbf..a9bfed85e 100755 --- a/convert-starcoder-hf-to-gguf.py +++ b/convert-starcoder-hf-to-gguf.py @@ -111,17 +111,25 @@ tokenizer = AutoTokenizer.from_pretrained(dir_model) vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) assert max(tokenizer.vocab.values()) < vocab_size +added_vocab = tokenizer.get_added_vocab() reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} for i in range(vocab_size): - tokens.append(reverse_vocab[i] if i in reverse_vocab else f"[PAD{i}]") - scores.append(0.0) # dummy - toktypes.append(gguf.TokenType.NORMAL) + if i not in reverse_vocab: + tokens.append(f"[PAD{i}]") + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + tokens.append(reverse_vocab[i]) + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) gguf_writer.add_token_list(tokens) -gguf_writer.add_token_scores(scores) gguf_writer.add_token_types(toktypes) - special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) special_vocab.add_to_gguf(gguf_writer) From 9d02956443e5c1ded29b7b5ed8a21bc01ba6f563 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Mon, 23 Oct 2023 22:57:16 +0300 Subject: [PATCH 018/859] issues : separate bug and enhancement template + no default title (#3748) --- .github/ISSUE_TEMPLATE/{custom.md => bug.md} | 9 +++---- .github/ISSUE_TEMPLATE/enhancement.md | 28 ++++++++++++++++++++ 2 files changed, 32 insertions(+), 5 deletions(-) rename .github/ISSUE_TEMPLATE/{custom.md => bug.md} (96%) create mode 100644 .github/ISSUE_TEMPLATE/enhancement.md diff --git a/.github/ISSUE_TEMPLATE/custom.md b/.github/ISSUE_TEMPLATE/bug.md similarity index 96% rename from .github/ISSUE_TEMPLATE/custom.md rename to .github/ISSUE_TEMPLATE/bug.md index 8fd955356..d7879b232 100644 --- a/.github/ISSUE_TEMPLATE/custom.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -1,8 +1,7 @@ --- -name: Issue and enhancement template -about: Used to report issues and request enhancements for llama.cpp -title: "[User] Insert summary of your issue or enhancement.." -labels: '' +name: Bug template +about: Used to report bugs in llama.cpp +labels: ["bug"] assignees: '' --- @@ -46,7 +45,7 @@ $ g++ --version # Failure Information (for bugs) -Please help provide information about the failure if this is a bug. If it is not a bug, please remove the rest of this template. +Please help provide information about the failure / bug. # Steps to Reproduce diff --git a/.github/ISSUE_TEMPLATE/enhancement.md b/.github/ISSUE_TEMPLATE/enhancement.md new file mode 100644 index 000000000..dcffda750 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/enhancement.md @@ -0,0 +1,28 @@ +--- +name: Enhancement template +about: Used to request enhancements for llama.cpp +labels: ["enhancement"] +assignees: '' + +--- + +# Prerequisites + +Please answer the following questions for yourself before submitting an issue. + +- [ ] I am running the latest code. Development is very rapid so there are no tagged versions as of now. +- [ ] I carefully followed the [README.md](https://github.com/ggerganov/llama.cpp/blob/master/README.md). +- [ ] I [searched using keywords relevant to my issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/filtering-and-searching-issues-and-pull-requests) to make sure that I am creating a new issue that is not already open (or closed). +- [ ] I reviewed the [Discussions](https://github.com/ggerganov/llama.cpp/discussions), and have a new bug or useful enhancement to share. + +# Feature Description + +Please provide a detailed written description of what you were trying to do, and what you expected `llama.cpp` to do as an enhancement. + +# Motivation + +Please provide a detailed written description of reasons why this feature is necessary and how it is useful to `llama.cpp` users. + +# Possible Implementation + +If you have an idea as to how it can be implemented, please write a detailed description. Feel free to give links to external sources or share visuals that might be helpful to understand the details better. From e3932593d46c30145301a13097895f9376cba509 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 23 Oct 2023 23:46:05 +0300 Subject: [PATCH 019/859] Revert "make : add optional CUDA_NATIVE_ARCH (#2482)" This reverts commit 96981f37b1e3f450d9e63e571514217bf60f0a7f. See: https://github.com/ggerganov/llama.cpp/pull/2482#issuecomment-1775975866 --- Makefile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 705c4acb4..80179631f 100644 --- a/Makefile +++ b/Makefile @@ -391,12 +391,9 @@ else endif #LLAMA_CUDA_NVCC ifdef CUDA_DOCKER_ARCH NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH) -endif # CUDA_DOCKER_ARCH -ifdef CUDA_NATIVE_ARCH - NVCCFLAGS += -arch=$(CUDA_NATIVE_ARCH) else NVCCFLAGS += -arch=native -endif # CUDA_NATIVE_ARCH +endif # CUDA_DOCKER_ARCH ifdef LLAMA_CUDA_FORCE_DMMV NVCCFLAGS += -DGGML_CUDA_FORCE_DMMV endif # LLAMA_CUDA_FORCE_DMMV From 469c9addef75893e6be12edda852d12e840bf064 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 24 Oct 2023 09:46:50 +0300 Subject: [PATCH 020/859] metal : handle ggml_scale for n%4 != 0 (close #3754) ggml-ci --- ggml-metal.m | 18 +++++++++++++----- ggml-metal.metal | 10 +++++++++- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index c908106be..c1901dca7 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -62,6 +62,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul); GGML_METAL_DECL_KERNEL(mul_row); // TODO: avoid this extra kernel, instead extend the "mul" kernel to support broadcast GGML_METAL_DECL_KERNEL(scale); + GGML_METAL_DECL_KERNEL(scale_4); GGML_METAL_DECL_KERNEL(silu); GGML_METAL_DECL_KERNEL(relu); GGML_METAL_DECL_KERNEL(gelu); @@ -249,6 +250,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul); GGML_METAL_ADD_KERNEL(mul_row); GGML_METAL_ADD_KERNEL(scale); + GGML_METAL_ADD_KERNEL(scale_4); GGML_METAL_ADD_KERNEL(silu); GGML_METAL_ADD_KERNEL(relu); GGML_METAL_ADD_KERNEL(gelu); @@ -347,6 +349,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul); GGML_METAL_DEL_KERNEL(mul_row); GGML_METAL_DEL_KERNEL(scale); + GGML_METAL_DEL_KERNEL(scale_4); GGML_METAL_DEL_KERNEL(silu); GGML_METAL_DEL_KERNEL(relu); GGML_METAL_DEL_KERNEL(gelu); @@ -923,15 +926,20 @@ void ggml_metal_graph_compute( const float scale = *(const float *) src1->data; - [encoder setComputePipelineState:ctx->pipeline_scale]; + int64_t n = ggml_nelements(dst); + + if (n % 4 == 0) { + n /= 4; + [encoder setComputePipelineState:ctx->pipeline_scale_4]; + } else { + [encoder setComputePipelineState:ctx->pipeline_scale]; + } + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; case GGML_OP_UNARY: switch (ggml_get_unary_op(gf->nodes[i])) { diff --git a/ggml-metal.metal b/ggml-metal.metal index 69fc71362..f4b460564 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -125,9 +125,17 @@ kernel void kernel_mul_row( } kernel void kernel_scale( + device const float * src0, + device float * dst, + constant float & scale, + uint tpig[[thread_position_in_grid]]) { + dst[tpig] = src0[tpig] * scale; +} + +kernel void kernel_scale_4( device const float4 * src0, device float4 * dst, - constant float & scale, + constant float & scale, uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] * scale; } From daab3d7f45832e10773c99f3484b0d5b14d86c0c Mon Sep 17 00:00:00 2001 From: Galunid Date: Tue, 24 Oct 2023 09:17:17 +0200 Subject: [PATCH 021/859] Add more tokenizer tests (#3742) * Add more tokenizer tests * Add starcoder * Update test vocab files * Restrict bpe tokenizer tests to unicode planes * Update comment * Comment cosmetics * Remove bloom vocab/test --- models/ggml-vocab-baichuan.gguf | Bin 0 -> 1340998 bytes models/ggml-vocab-gpt-neox.gguf | Bin 0 -> 1771431 bytes models/ggml-vocab-refact.gguf | Bin 0 -> 1720666 bytes models/ggml-vocab-starcoder.gguf | Bin 0 -> 1719281 bytes tests/CMakeLists.txt | 4 ++++ tests/test-tokenizer-1-bpe.cpp | 15 ++++++++++++--- 6 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 models/ggml-vocab-baichuan.gguf create mode 100644 models/ggml-vocab-gpt-neox.gguf create mode 100644 models/ggml-vocab-refact.gguf create mode 100644 models/ggml-vocab-starcoder.gguf diff --git a/models/ggml-vocab-baichuan.gguf b/models/ggml-vocab-baichuan.gguf new file mode 100644 index 0000000000000000000000000000000000000000..7caaf8239b052b1fc2f2b74441fe377a73758bd1 GIT binary patch literal 1340998 zcmZs^`Ey;@mG9@wc`rk~N+p#_C8_FmhHm$>ED4(ZY$X5)4r)$KP-dK=+y@s2z!5KS z;5!hal2lQWDN!ROYqn?&wnkebB}1#eOOi* z=bnA`-fQi(*Ix79x9|SF|K4}fzyIRKpa12J|6eZG%3jg^LDB21RD-fJ81~Bln0NM{ z{aYxi&dRV@`%fp2-|_Zq#cI>;Ke_Sy%#C(8-8J*y-uTHs{lTDI>({-XEf#~~XWim- zJseo?K7&K$p!kdF1AkHXs^w~}=>Em}VCBEw-9@Kf87Y>d9TynkNWpEt45O_-Q2%7 z+RV=#O%JmN=kNV@ZVdG={f~bU->pogrYlo7r-z$)6Rj3{>BT}%Tk*!p`bYgiuU=ce z*XC~jZam+dez^HYi|BXd*3|FHt=cbd0?hR^k@cI?-#5$eD|bjocVr#ik-k{DOIhRx7uszE*D6 zzIc-+xKJ;iP7@l~V>ZF&eUL-7x_qxusQvp94>zZ$o9$iTc_B??LGdiV4;Iq|2a7sE z-Z*vL;(y%ge@yuwxA`Bp`yY4sA9wm6cljT8`ycoC6#wb#=hC0gr$2w0{(K?*`C|I> zrS#{^>CacvpRcArUrT>J?SKB`nMeH3fBO32^yi!Y=Rcm2Uw`Gl{?j+_It)MXKmOAH z_+Px$Kc0Cq{rOb-GhqG4GXd*Ao_RJkIPZV{{ont6(9mDrbd&#l(@n9sH{Ep8znqZ3 zcYgGjdydQ=|LM`Wd;I|VQLpTmy;HE}TUvg*wdJ>|mfvn``R(?W-|lGn?ar3p?rQn% z?v~%~x%WSF$O4XAZ)yAS*0vv~+J3yP?Z?~Oe!QdY$2;48ysPcUyW4(jgJ`M^qNz5B zrrIEyYJ+I14Wg+wh^E>gnree+stuy4Hi&L(gXp$4h;D0x=(aY9Zfk?+wl;`vYlG;v zHi&L(gXp$4h;DC#==L^hbVZH4XDR@iQBh3(c>*bpRIU_+8<`?VD|WQn%6t*{|Y zw6$%84SAxiZ7Xa@6fJELDcXK*g$<#it!*o8h!t&ZTVX@6XlvUF8=^&9+g8{RE?U|m zU9|n$3LEl8TiaIHkTBZXw!(&t(bl#VHl&P}wul*RzqZ1LpwZU06*feTwzjRXA#Aj@ zZG{bSqpfW#YzQ1JZIL+Ier<&fnWL?3D{M#|ZEag&L+)s6+X@?!M@w5okG5Z1VMF+6 zYugGN;zwKCR@e|g+S<0lh6vKuwiPynke0ScA#K05!iF5u*0vQkB$2kZt*{}Bw6$%8 z4QZsME#gSqudT2lkhHaJg$iX=~dC+te*>uua|42HVsvZLm$< z(gxeqEp4z(-O>iz)GckWP2JK4+te*>uua|43R|mOGSw=VOts1-Q>}8zRI6Mv)hd@v zwaO(^t#Zj!t6VbGDwj;P$|X~+a>*1Xj(<)MQ>}8z6h@A=wyo=inWL?3>$+j+XlvWL zZmn|36t<3*9$Mv+DXblBZChc(-qF^!6*ep$ZEag&!{*V_wpA{fYL!c-FnhH1&hDwj-Q6KUz8RW6yrD$>@r z6*lZ5ZEag&!!pv=wiPyPBQ0%P<&vpZxnv6SNLvrBuwfu+YugGNCX%+ct*~JvX=~dG zTdQ0$g`K3OhgP{{3QI{_+g8}Hm9({Og$-*-TiaIHu$P3t?Uo0TNA&5|>}e`{x-EOU zJ$t$%d%81wx+{CSJA2CKGM&Af$)0Ajr#;!zT=ukApO8reK4cMn%HTux(03Vp$Qt@C zgAdt4-(~P2OX#}{zFRZ+Zq4AkHG}Wg48B`4_>fCv1mqEY%HTu((03Vp*g^DN1|N10 zeV4(9okQPc@J(g#O=a*+W$;a9@J(g#A)Cku$Rzrd!G{c@?=tw1IrLoyA2No%%iu$% z(03Vpw`K6%mce&h2H$NNe79xrA)m+y$R+xe!H0cB-(~P&57BoSeAqwqT?QZa4tL3w@WtcSi=_ z9T|LgWboaQ!FNXnA99L}fPA7)8GP7D^j!uYb`gD-!G|40-(~P&_t1A4e0OH>-I>96 zX9nM$8GLtU@FAJkU{ia1|KqqzRTc4#?W^ee0OE=-Ic+2R|el* z8GLtT@FB0r2*@e=l);C+MBiobVIR?V8GP77^j!uY_78oR!FP8C-`yE}cW3b3oxyi^ z1|KqujDW17PZ@m3Ci*Ue4_QRtW$+<;=(`NQT-LZJgYTXUzI!tG?#bZ0Cxh>v48G|M zzUd6U=?uQ<48G|MzUd6U=?uQ<48G|MzUgfJrZf1aGx(-6_@*=XrZf1aGx%mQ_+~Qr zW-|C@GWcdP_+~QrW-|C@GWcdP_+~QrW-|C@GWcdP_+~QrW-|C@GWcdQ_+~TsW;6I^ zGx%mR_+~TsW;6I^Gx%mR_+~TsW;6I^Gx%mR_+~TsW;6I^Gx+vo@a@Uq+mpe!CxdTK z2H&0xzC9UydouX;Wbp0D;MS%Q^S|nN+bf;(TTd&cNBQy8_twS0|Ei;ha!p!p`1>OIE#vD-=hu<1YX7S0%dblw zTS!2-H&|QrzsmTl=*may{=Oc{uM7Tb(eqeUz^c~dVNlCMy(G9O))u9!Rqv`&2o__m z386&*-VMaptJ2kqe?&NW^xBna3|9Q-UZAM2NSycgrGD1awd;D5j((U{M}Ivolbe;l zr{(VerBbZK0-e&a5BS)1%wiaj7JTDbDaqx$_WA^(1Wly2aHtPv*Q+4&zy#nkp@4N_%_5)!6 zRcPmh&anvMC|7-)vaf5uu_6Srp7uhuBop{+`TL+xrWb6mS4kh_qe$z+-n!iI@!9kK zV_KwoT^6($c-yKUe-;B}qMdqG#yp?KrpZBv;24$3eWrTCmOVNuBxE{Q27^HhL z$fZsViv=H|8l3h&dl@S_<NiNWh<4^bGeKMJa(>@t!@`!AqB@hz5R3t)99>$4K57V+{$4@_?DA%HO(N#{~2~?BBB$Hh!PL=XFSgHCxOUO(n z)Q=M+VTT1&rhmVGS!D-lOpbgnZ&CLLy58ABb(QkhYHheu^kuRe@DT5?SI00#!Y(;% z0>Zi?WEgOksh4^+(7qhxQglu_P1nJZiu7CjRjdn4og!AgT1&mI$b9-47l^IJ#&pW6 zhuz>FRpGxvZx8s4%8vh4X>ap^>4hYrEhRZ99R^j}nk4wi5P$1`b3z5K))QwbmUW$= zHlN7)K-P%Ur6-RtvUOjwsuw6G1Ixee>6v^W}?C2BibR3YP7pwp@oc7c*CWP=?1^?h71d?GaF|B?g z(`Nt4v^zoHRXRr2<3}?3d^XiXKASpGc3q#8&K6ZJU5g1=;+RZeA#Kv4A877$r3w33 zmUNZ#!N)E-A)J(HomUmJRmiR7T1Q!rWp;W$^;&c}>_(S;1z=Wp(l7=qPU>LC6W8nL z@54B>3nn3;ZLHJ+8Y`yMI0&(+5!;j%oh~zmyc*N5mP0^Z3=UQI7G=yzWcCFaeWUIv zL9dk?vY3Myp{PQTLV%1@nH*jjgY#9$a$|+9?vzuUl6D|o_R~-OZ%*4_0YzUsUQKMS z$fnD_ml2BvVg5zpGgT)$D!%IkaH`A*ZYjhwOVss7Wg@()tIL}RrK9pS*&P;`x^)~YbuWRiq3o?11k0%mUDDG+nRIeUD0Luo)(y5?C0gi} zx*k6tDk7GJ-WDsR%i36^uP)od|Lj(o6nM&iFS|kRbw-uFGMF=o*p67V|M(j^ig_1n z%QC-yN9L#V>8oZ{i?UYrl1{d!a39bmT32Sb0qqK&2PBNksOw4M9%z57%8iopQiqm# zQn3jFlR~+~-;TDW^e15Wuepw~T!v&Yh8YMFG==_o`iLJU=`MxCgoPQ(ur zc6!|i&fMt^RZwMmtI~fTQB58>qLpr;qgCNP8zx7|67N@J7i+st?8H^-I#?@WL+lcE z_WdE;6^pt;C*${C6^CSVS`=NGZnruae6}7g$5Dn9n{=XKKiV+j|0wV*8N~_e|9s{J zr|~t2+*fp^4DE1jQPw<(a~#3mxGKYvv9>Hzg^FrVj%OO{rICY=23CTqddUp)V6dM; z&#Wrs&mu{Iocg}@>v1Oa71v8aqIIpTcd=A9xv`Ql#3ICu2T;6xgztS{AbUX6uwfBG zRh12XB4Di$tOOC&nh~fF)v=i7Vi9bqS_|!9J$mXgBmWSNo2kD26{XFUdPA1^aIz{4 zdm&aV1)Z@xYm}-G5V3IR>AsM(g$*4B;ZlYv7K1Z(GooJ$S-BH(<+`1uj@|=$tjyNB z1q)+9GP6}QK%r=ppJL5XN!FCS2OA;yu|b4y4`Sm0dM9T_iWSQf_c?agop|}?fA;ip z{vNC}yU*WRu@9N=Iz=?Ri$OX?x^?U}_=qnmy)A?o!NZSZlVygia@9W=E^x%j5-gO8 z96(MY!yrz@rJ=Hvm9neqXSEjA1lUjF2}meDnO?mi0QLvT&;aw7H}zUNW-Hl@NiFVH z(PtU7gY+s33?(M{ccogF{*R;c$Rm0{P=X;cly*!dy--MwhpJjFCHwh0JIfc)pca%; zKE)@=S>b*8#Pb)>vea0j)x=2-Ag0DCFU$u9Oz2M%#g+N`TFZb2WWHOy#12^nrSQ43hsIYN73GNV& zGIwk=#Zy`%Jtfft6IyJ?($>e}TrOf(Xi7mqRTtB+lizw6Jl4a$JNok7&1)BA zxbi`iD0!wBQ#W?v_T#@Befg4p$orWhv2{mvS=7jp;9rRFT=SjV>CJWaT9FrOL%fyYLM|s0Qj#vx_ z>8M*q-u5M9zdFrS4?25bE-M)5<&qziNM$=gdMso@+#knb5Qkhnln?sa z4Knm`K!gM(oMSy1iy*Zy-J|HIGtiDKS@(q!R_rv{QQIpO>Qvxhb4Nfi=%^&Y!Mmn=9%Az;oQQMGRzXjtWwW@+iY=pz3a$5tjV9z(5czuH#`!Gt zu$rV}q+CJ!bhEX6QTX_HcnzvSQccT&PJ6M^3832*U_eG&s#HH%z!C2FV1WPI2OI7( z`K{(=+*6efi6Z3`QS@|q7D^opfh7)b#-f0pe}y9BlfadtRJjO)^vUDb!w%_B^sqKm z|Mz04K{czDVsfE0VMIPfv6LsO8v0TXJF)&_xYRmbVPzNnDmua;EtT&pQf7gY& zg+x~24D#43)!VE9b&|a=1FbU*$+$+-)N?Yk+W+~?i@uypFcDeZ3B`;;ld|mwE7(hA zYJ=QuG9Tu+w$5EC}5pb#vCr+*Yvk z`3jEz@b>t^6HZLs>_FLD>vqM(k<3&ZV#a2Uf9KutH?|sIy&eaOjN3)mm4*k_RDIed zPcPOPEyF6}bgUC2EX=&^#BGuNJ z5B<+)UiCQ~h^4R@dtYd9u@u9;?ok#ucq$g_XjODWp)I=^zZfnEVb17TwKp(PE(;&U zPGFLaAf9Tk0vSd>TxxOBO0Y1}J!`FT(^WGt*KMj4KcAAeW6#Tt}c7=;zX z71FRNI1y4aSKh#ZRjlc7)+z!`!jk!!d9Ko>FBEEVJcp*$6fVf2Bh z=0oxt4r1BZ=&@{7rm!q#HYWl>lIgN|K&OOf&ucI;%ES)JDemNh*c4__xOnpKaUMnp z9Z?A}jdF0>VHkb#>G;)4-jf-I1o*|wE>ah9#Ef70V&|35eL;$~*cE0n5c)fyT&lGjx`D{I|d5V;Qe>10Q>%^RLwb> z)SSNBdHaJruOFzV{@A8z$d+V`Qy>+*MvhX42AJ9)tkK~e?x;Z>;fhz85YYT zHgg}A)^n<}!I8pVg&|x~)32L?%r~M^&}fo94r+@{M~|H_67c{aOEQ~f1TUHDsY2UQ zy;^0R8RdHGnCyLPqLC|NvGs>7VVHBozJ$yygdB(u$q9o(-h$P2vn&d-78~6^UzWvT z@}B_TaRsN71!2u*Qsn$fs9>g_m@!Kj*+c~7H$hId7J&z+U~z&)t6}cQKp)iQJfPAk z-Xw2^a2RWZ>nZKexnlw_hI=G+!AMhR-TOcmij(Ju$tomqADsJ6T0KF+5{7xfV?W8V1nO9r*lgUlJH4ue*ghb2ui# zW?-f8d-(=Z)Uq1?udhb848A|iFtwP88Pnaa_E+{ytZ^EsY@_(P(Rl<*kIsHHI)B;U z^w`5bef9{f*E`^wdO<+NN}q?^bW&XZYEVc|NQWNopO^0X;e#y~gd8xRCo}ujuYR$4 z<%$ezK{P3Dq0AUMi>jhTW+&?8kw|eoqtvJ*2qgwH?G?%M{r&F>Y8#tY#tdpxQ+>Ip zpf1F;LBxS$bo-mvx8HtRph1C_K+t66R!`F~v(F|Gfcp3{3^JK=Co^&=U&KdX&>_cVbh`Le}=z8v+1!Xz_V31ZQEbk0{^jmO*^Qpw`n9uIh*zO8CKs3QZfC z2@!1}NjY_(9W74r0atOfGnxAeMb;Y1i@faC$~oD?`D3O@94GcrNC8uT>8B1sy%Io= z-g{*H#s!%XXLxWxX7o4y4HNaUc+JNPkgXY>4L(OLu$t_6^Mrnl5j#m zBV-_b9LlUN>*+8d94k)#dY~u08JIe zUmc<@f>l_o2+UE~7B==mB__C7rkvVUBHk~nd8kk_;&d|8nq!prOM13rX$ZCaz+q{% zFZ2^WITBl;>V%}+6pI;TA8uE!<->hsrjXwTx^zn1D>VrBW6e>&-uEHHT z>_`<;H1y){`w?deH(py5=@__I8>fQcVhL$Tu-N#tQYbvKQFu)1jI+-cO3(dOh{bWi zaS{9&wSIKwH35pvF5CMHy&z{~i!4rQ)vOfq;d%;( zhp+nZW*u41)@Bislg}2DM+s%mKjUVjG9;FYbJbq}!SULcrpk=nvzdN&vP_Z~;TSnf zLzD7_TqtGjCJHC^0Ux34DQ*Tm4Ib_i5-O;)AoLtoJK>X(G=Ax|?MMC~T+ZT%c&~;V zB17S5vNJ$>I*f}>I3?@wH}xd)vbPw^ZeGHmkJ0OIZC<`2-NNj%Md+2W$A)lCo#bT0 z2bTr^c8`_gr$~crz5M&}qi21cPVEW;#DXZNAiWHFdKz$XlELffE(qnI z^ZOH_KkNtJ8U&<;`fBuICTL&oSykU zrk^Cu(fQ{`pT6XX8+H|ZWL&j&;>YBS2yh@Q{|r^jf3A?9W$q?j$_jVQqwHVRbx-Ep zYzC#e`*DFLR|s?QMFe>>O8@Iw8t_qlMYyt1&2;0XOKLI-!|3@qwR_ciaDefJXGTxH zE!zkuvF!n0ujykBZ}C9AOF6?L9P3(y_S9`+Z~QBcX$5tu(e!1ejZqwVQBb!5%M8#q zuf9w$lh5X_zvn0|mef+zku1H<%g?WEI3Nju3$J&>0O`_C&Q1q5`0!$7vJ7`?lGrJ_IF~@=rR5~9j`6R z;+oaKq6-Ca7DAP5VuXn#oveB&3+Wi9*u!r5pv=ECNz-S{{3FU{nB_e%$z&)t6~I>} z56>rANeL9A46$#5?-TGG8Jty5#5wx-z0I%Q@~PAaa{?**7#oTska^-%m5wa1L)BBv zY)PPRvRjnrV=4;y6)TsD%?p`Kjo~>~kTK_ny0t+)nu$yan)S7#@ji7+_SGdpH1c>(>??z<%61I(;vH7>6h}Ci%O&UOm+6I?83}YZi|H`Fx9b-ac|ee`i(s*hJzUWZ$iS90}(k5dySWa zQmrLOwyKEEu9#iYxAcrIeg~qF^)Qjx+&!`$t0wI_FqV!G_{GQOzk{!RLm%MCG5LH} zQzfg8jy=oRjtM$gU&*$}Mwuc>ZB9;qn_oY?dHGiXcrOcPK2+(!K5w=>pH=dQjIN&> zz4I&IUPUsZl&c}tGao|td`zB-{z;kZ0W^o0f>_8A(npW|QNZ6obWoF!KElGxMZ@D zgU{6ZMR+22&ircoo5%ef`8|jQ!hHtyJV# znw2u8NOIf-gN18LJCh04bulL~FF~f*h#VY@>EpM)l$jHIA1k=?=ryn+F9}4AI5CFs zX%fJw5hz4>-nbN%2oL4Vb|H;7kRHB{REW zw-tjL4sG4G9Zk{3OT8lWPJAkmOA>o>jZ|n1J|1IFYefdzZ@xvy5AkMJLn270UQ=0c zVm6i40%f{oNgt_#>EbLB!H9J1^v%D={QuaP`*drfed2-0i0~|OVMpumjlcy9}2M=_o{h%d~`y7P<;G}mTM^t zmZ$$!kZ3vliZ?ugChz;V7>n>R`%@Ej9W;X&A?Rh{V=)-xKx{XXTBO+$0q70_8zY^X zM2z_`HsQ>9gbtHjXcXm${Yy)d()~N^ALCCS5tLaLo$S3yRu=FG{D&>V#5)*$`G>8m z=lyBt?XR|8zbG7)ZLyxbUyQw&6;<0<3AFKUdKl6+ygV3Yu~;ZJK!6O;2%vqjBuyeG zMQ{g88a{7Auar#LdiImig%2FPOyYr1N~&I025JO5)qL#)_dSb$M1@C^D^jlX5!H#& z`$J)`BqSLJXowRB=AApAjvizZVIq)fNs-SnSY^g|wnB()NJD}D8cj<;-czm%R%_DO!)sHKEu~98Pi(uR z6*xMuN%@q(uH@9Pe0MpI%x!Erc#aVQc9nR|WP&1=tb@Hm9gX<%sz@bmVwIH-p>CVQ zCfH^b&~1M8rbuZpsPQMy`wVIvCIZVW!G;10JT=*BhV1(+&UR7#N%sv?NueH90Ho7- zyfBXG(ZlbKu6!9!AHRqHQ@5E}ycO4rL|%DU(4Uzfq&ZdXgp51QYp0xL^~ocaoA@24 z$LQT_!llW5mTp+my=9aeE5NH@1P@9|qNe4(*rPnu9Zkk1jWQLNW4}l*T~qnxd0=Xz zgGonbAQT*p-+J4ZwJM{yMki@X=5X=$;|ouX&Oarct>E48m!q%GjNX6BpBQOA%bq(~ zo)%te+`CvqN|2eF+b?26_TFSPZz6K*J`I;RD7E{r21JC3EdZV+Jt7ab7e|MFJbD}0t`o!xtEv>SrIFcpaj04 zP8Ft0ghj+@cMy<$MPMJ3zy+bjpL4)sa&z8_b2}2U56t?9^odH6XFVdA3k=3M1Ywvj z80hxb9df30xhP4KB#SqLm9(s+^T;#TZVSqhxvJ>0@h z;&Xd^7?xYn2}Ot9c2&6!{^?94BF8$7nymnbZsOKEk3WM@?W;GsdS(0KbqBq1rb+y@N!YM$<*-l3AoN>?fWYY9C%@08 zy3ba!$G;t_`e+#iF-Hq0yz;t0W0FhE#Z*A)eI^TTIgtlMl9^D+%AppqJ>pm$|N7nW zl?Zua*MSb7U9wplWNW&IW;wfL0*hIF;n^dygZ4MsAp!{bwjoeI6rx%WdXx^2q*|~2 z;CwVmVkz_}Y$l8089#Yud?C`GwB*oo7(Y4GI_1Oql;g8N5y)OC5KqD zy`gl6UN>A4tK}2pr#|$qSj)vy%`x-H9P)BdJm_>r(?hyxZIVHum}{{0Nz<-+1`xUY z6=a?AZZ<8?~-`j(m2bEw3my{z#C81sbdZM}YZboMhr zMIRH3p6sNohbe(^;H5sy+=C7{F~?{eL4Owm$ z!sh(7NiVNna&lx6dupLKlc}>vZ?v*J#b%mNB-M4yo1@1nbxuO*ruXj)5wSWcHlxizj{zliAm6$!?W|~2cvV3`Jz`notuR=D$~0W-sPlvY)&SJAd?ET znu;wL#6AUW`G`P8q@_MBhv{e`9p2V#s4>ar2M@LAm=ZEfAPCphkB}^7eWt0>#?N*m>iZzZ!jgUZ!SAq5__= zD2^jR%tDDJ&q>~Ian`N={ihb zU}r8wG1id!Fg}wy-H1ULqN=0JD!Ca5yve*=c-G;-KCjIy?~h)1*tg>GTEIqP$_#Ww zq05YC*en4k3X^v+e_WW_-$ijoio}FkG&=Xm=9R~M2BViRj6eE~KLIa60Wu5hPbvGB zy%ZR=z4zY3!l11Xk<}=aNm}&3Plw>8Bxo(C7&MdfGK1wpNjQwpXY}0DXqgTu!F5qE zX!F`zF!6YUT;~guU!`z5Gfh(wM`VGc0w{*9%>Tau5elU(O{{N-5i`M z+F-A(f??#?p~wM0GOl3NwnMcxjHRz8@jW^ZFc?B*+10W4V?dDQ7@W=m zPKN7h`I*Ths!%60n=hpYBT&d}iN36q3zA4Ai{SL7+kW`Pt3=1j*yFU|Cj+LROQcyhTgGv^{u}z#>R#)Ew zIfjB$G}Wxc(6MACL?Rr2_JOp88+SC+ZW1FVq+>P2FeK>k7QGo<%fdM>JmCm!Dug+# zK(~;hZHYn0U*qq#&pbT3e9k{JZ9ka6UwvN&L*Bqa>0i+!_Cm)z2}1sYM5H|9kx)me zb%!Z8m==8;O*n@UU?lS%iBs_&X@=k}v`jbz;UJYBrjv;7K*i|Vr=u61@kOJ92c55N zM37{ytoC@197n-51t5*j2>8q0niw~k6T3h@z%Y(KB3zHNDF@Utck>{n{pqqbv*`&9 za9n4b9;=kve@M5~_uY+rm^w)rZeuD{&8 z{PNaIPy6CjNiwfHDr;J_vl6Hkz)9S?%#2&pR^eDMh^yMn^*#ook4%+lN-55?TIstc zJ1x!=1i8_ZuM4%{gV-&JF-hID#}LDuR*2#a;c-4pF9?8eBd#?%1c4V3z zBBUWgWi+bjU?b1@+E@lqTJG`r=Qgj$p+ti5=Czk(U^b;XmO#qkQkMcb4hG?%7prkO z*K{Bi(~9nVa{QYpYM;*N`bEOsWVIUZuO{dv}t&d<@~ zSLC>-i`7)$jG&Y8AD{K^>uwWg;{cOACi6D)D&mL!QmBs8MjeDfx=17(O48WPIT&GVEmY-pKavYJ|AuU4lBb8Fad2I|-)A@#<)p zTaysUR`i9nG``@Zb+W}4Xci%#0v(QdN1?&WDeXO2dR~}tdAFg}h^ekr1*p6{uoltE zO-XD0@TQw&cQ-|URN`-3w-(zcjW`xrF2^J%E8EARSG%B+0i+GKP2gy&nqUn-U|D0)$O(UVhSBc2oi2 zZo_n6$6tTf5yBZ6CwaZDmSgjkXJszHK#-tR?*Ae8wK)tR5$|eP=vmw(d+>P^Os;Kv zLk;~hku5Dq6WS=Xka>*vpSP zI=J5;XuX2ThuRKghmv`O%=|bbst06}jZN0M#83j$v7h+QL{Y>vv9g8S!4UJQ{zXx$ zOL8Li2?zr=frG$@)0JunoyPJiulHnI|K^+J)$w#a>RKTPweV+xOC2$0^l`eHQ;!3= zI*5ESvqMtF71P+xuhkSohLvCo(^f|bz%JLxiZWRaX|1vksb%mqnWniK@vR6zi}g@A zz@(r=ep*nP&XNY?n?2cORz(+cukbI*OQA^m@(t6WF#77V(fLmV_Vtz188e&&uqK|| zByMGX2Gk~%6w3!0Nu+x_I*dd5T&cg$N#svhqbQ&fO9XH8NfI9_y`U0WZgOCWR0y#& zRf~yEG2g^8&*(B+xN}w?Xpk4kna|9{li13jY&_hICwRgxq}1JX9x!5K*}D3sw{CV} zrIguZES^#|{6M}#z01F5K~&cteQv8$v=MFw2IN(5c=qC4ip6P*>dtGiGsbUyHGbxzte9m-?5csbZgq2f3!V(61ab_x z$1guVdirH=lPYOJQz>bMb1nGK&#U&Z=ZQ|`_fBJO>}gy@I4Oa^WSmEr@+Mgj*3%dK!g=vFiZ@2CeE*8^b^Ku zF6YMG9>Zk{3ZkWjx(b9OWq@p1V(vPNEx}MhbO4cGRD;i1kay$Y3Ec{M)vN-(rl7Vj zm{s!hJ&Eu$I{&0U(Qctos}4worsB{Xz4S z6;g{3>^Y5*sTZt7s*nTsr)KUvY!rl}h5`aQk1WY*Q&9*VMShr6Ht<=}oI0CaEjdcb%W<@%`xNUztTLGpf)2BiTP~`9ja5+k9gyuHjm`3UnlKx0T&$|A*gOw8 zm%-Q&VPRI!B;B@rI#>u$BL1ktYdU4jr&v{6!ldGbT1DWDI^kKQI>$6UpQ@s)22X>q zF%sscLKbd&Id`ZbO-<40h5zf%H>6!%{e7f9N{c250twH%7`=IAeBmR1 z0#_muP#r@L>jgWky(L5M$wKM~&efMh%}P^52BiQ_2FP4sB;_x;$MHeoDfac|b}OhJ zSH1th$X4~Z>c|g!dzOHtU`~QXG^T-k8xysX@Iprv(9?D569yo?nn`SRRd~;2+}s?B zozV8JthQMXd~rFE1PwpQYL)Dz(v-h6Z{Uk>`V%v&R#n!+jV|i(a_~OL0kO{qo6U;C z84ZT0Y&aCiES(`$7BWvfXREAGQ@%M{GsHuy)+2pz?BGU}AFz@l3;JX*qEmm`e*dwZ-#jnGIw==k>jeeP z6)jfO-#@~syz|;~K4To(g7|q9u$19xW>IfPacZwTHI-y{S1Qi6qh9ROROWC)thoJ1C$uI8pXDoAAZ$4 z-n{Y-)f4=Qj!z`8$>Z6+^q7o-Eh;N_1J{%40DO#&CCwMWy3)lm$)%28{Bx;xIA4-` zsh{pOAw?*+Yyyd7n%MSaBrA%m(uiFaB^-bhaxL|;_2M75Fa6p(+x+UO(R0uGMxfA{ zuO;`6suj&VD(o2H@=b`E`8J%ee)cojMA(v0=d(p4lpq4mnjAj=a9Xtt^!FqO4p&v= zq0iWKW_%Y6QFfG^VFzQ_3(qO~eZ&R-_4odbF95%0Df9CiGWSWvwKRA0MzUTq99}Y! zmoRX4^_X7Rm1asE6Id()A3K7QQ!hC6j|~4g3p99WrlL<^-WeO`0;_ zYC6W&^=A?H9eBb9vJ$zLW16$!rOd#C`l^QnAzoMnlhM;*7~ekg^ycLkWxT~g!|IPxUQNjJM&Vl-w9l<9@`yhrn7ieN zDC0khRapBPHii`+8hMt0SfS5jNG-N^W4G?AW&jto=x7;yMoMZJ)97}kJMwHB8?G!! zMOh;)8bExp=oR#EIJ-T(L-#%zUVi(HPo0oNR9FGc3TU$R{r&jKcSp}XBXhZ7h1YZC zhUZ(L!P64^%5KIp)+A$)6Jha6KE~D)UyaWF-jAh8Mc+xNv(qa^75SW9j3}p@vp=AE zieTrsT|K1StGU|Ew^Of90hU(s*XKbp!pK$cQxpLgW7lI`rRVH9J^!&A>**nH> zH~vfmhZZLZGLC^P691SR7O< z;ddkdQ6Qzhe^V^eXUxD90+A=&kPfKbPvCDz+L6*Od`0+xkSAhm*hWw)sR-qrbkG*1 zVW&IPt4`4LS!tO%Zq<~x#xVUd@~3zz7IaDRMk~qQXGRpC@Al`v-Mp&B=Q zsOHV+g_c>LRK7|`Kr9k6M2LT8?vjzFQwgzDHeh_=45F>K;@Xta*>{ybCbtnLTD)`~ z>htEKD=%(ede^Br3&C8)+bjK061ll3RF|IP(y8J!o?)kbI9oEdADI`nWO@J8<&!?R zSs{d(1Bdtdya?Er#l;*EJ96vw&&SWc>s)Uj7w=tudVJw^pAVrkAp>-TS#Mtba{SiU zKFs#}zaalO=6x!!f=ZkNlkm1iyF_YGMrY&4BVBoklc zVh3Ly(D>~i^c{gh-q-e(-)~-f!JlZ^Pgm3s?3FF+v8FPJj62_=r8d57?+?w1P3uFm z39Wj9zhtnznMuVgk-;babmrSnyrrFaA8Gh5l_>R=&o{4Zy?sVdX+>R|maWEZs+l*eU~uu z|0$@lC`_<6B??j!BSm{6){ZPAEo zYBSR@t0f;;eMK0u_q|x168M&|JNP5CZP7;hX;W)F4k1uFtAZ#&9HFoC?r=~S516DK z+eD!vIQbKWVsTi@AeK{!V(ss3kYZhE*-Fhdu_HcDTEI;so2pojWNv&1-6#`PnGad0 zgIKVXCAIy>Uub=g3vWUJGK2o<$V+Clp%VX!)}S+1^zpm;VODf#q15Xp8dbtJrL?az zV;l6FHj6}~6e=b)Zdg6WxUfE5+S{BPJ^&j|loNthoCvDED(FqRO)f%Ar)!R}#A`ex z*WP5AURGc^>5oXJK6t}_g6!&H&;uA!*ihCTJzWmT=T_N3J>Vx-rw+Qb9-^sdi zlx77>;q|F3!aqFm4^IfKnPm{?qEHe!SK(&BbU>*Y;W))2r#vFFkG=KCSEI+TI-j(l z8tG{@=ktBM!Kq3IFmpYp_rucz!!TzRixHE+4wN1(^i(Eq>KhptK74POZiJy|G?9Ao zhPYh+8`I-uK^Dc>6K`P}XmRqEGhsD$dqe0>j#jobVzdYgaY)GJ89np3GykPf1x6R2 zMzfYB<$Dk!xV0P;oXNp6xKWZ~lLGxw5>%ywYM^<|rCB9?62Q?fqH-&?K0(^?iIONl zA|7SMJonX>Lm>k%Qvc8)y6Gn0G6>|-2Gu^yW7tSCe{07g`nB;K-cgyreZ=^>m=h!XWurRPB>=|xogaY||u8GWvZxZxF?J>pG>>g#D*!0Ztmzj7jAD{3~E zV&EvpyTCPAA>>2ZHd#YS%w^tl$sUB^G$r4gZnaC&!ZxK<6w$7VKsEsy>W;@FvP)K4 zw@Qvqs9Ud`9bfvTkP+dB>TS0;>eQOOwe$N|Whw0>S3;fPpm#!j=%K7(E(l@5l0o;Q zfXhZazB1A14e2+|CR$IYbJ(KZvV!%Nv7+UR zM_+xOo25)<58DWF!~!g8#bR8UaUe!)*qc`Rq3$`lf;CIn9dwgQ~ z0kWDriR+Qi%^nbpOtVMgJhG5{XQ{Xmd|YNhoQv=wtKOOi^Y_s|Wn%Mr)GC`a4kh>p z<_Pb)sfKsapQx0_Deh0>mp>Uj_LdB5aYVU#Mr-(Fy2FuGwO7||Pm5pfTNcr~4x7Vd@Q|cs( zZ`&`IbIyk^h$D)y8bQ5PjrI|$b>nM77IxuSW}mPqS~+$`=YJ>lQHJDhs90_3(-5!s zK05o%_UB(Yt_XTh!CpW9u@7qL$yd*i{OWL`_dn!4fZshS>>dlpFtAOCHyPQo*(X<# zQ6IxU2J+1-5d?o)y@$baq(bfJonLSHkElb9_-wTYvV?2_ z2ahlNwkcHdKgKf4cC#=keQiRiBZG5QX{IUDfi=co+XX+ev?nBMnna$g+6IH{4ijzS z;F2|wsZv!ecEjdZ?`>cF#8>z4;Bz6ZZC!m~{6-XX8ZId8n9swZ%tzCU5%D!2D%atN(ZTQ>x0l zi+bdCQC|*jP#InM*!!Q{WUEFTc1md}%rc{Ci(+^3q_1Ek%$D=SiIpPM-l)p&m%y;5 z-0=7$Gsx#qnAI)W#3U<#+%GAthJ`W?zk*zguy_$DM0A5sBM%lqS4}D+)h9Qv{NBf9 z_RAbu0w}6#mOI&$$QG`GB}B=vKU~nSEiheQ7oAMsl0-E|o=LOf2_u_K5at z+U?U@0-k)6cqzwtk9*Rj3tam{)mm@p2RdCV2&reRsh#jH!Ojq^Hpu4nYdfz+O&*Gx zW-fIL7u4&q@jvO2JjpOs-0a25;!`*ARi=*FIhBm2Xw>N3FMSftMKGD zpSG^QBkxGR-unGpzCgu7*kw*?hS;ATmZ8nPDmcNsNv(4E!9vF~mjEgv;=7N({p^<{ zMEVneA4(_3<~P6I`PBtkM-Br^2#NWfQsuMBPfl0qSW!Da7A;mKIJ(_UBrCZP9PO+|{H&sEw zp~HyKEwqixxrs25y|GqGtu!VZ;;*(J|IDA5=uA>@M!)`*D6G5K@-(rgg3E)Y zRo4{+AS`8`mKBPY_%LufoFD!~94k|lPpw9k;-Om*;JzZneS$0|dEBK+AA=P-i9un@ zdQXkHV@Oj=CyJ4Fp~p?1V)HT=yOB4Md=5Uri6Qcd-?3ZkF|NZE^ zY^O<;gHuXPq{k)GFIxBb<&Pc4W(7ls>7VXI+&T`7u)5M`*gZJ6WHe;rreMBf2Cd%+ zM7GQ{0oa``kfIC{uG=V2KM&GGgVqJexNc}3x~Ec_p3bTJkd!EpS>bJ|fk zP`*d{PPXT``WrWh<6-OS`&(B7E2B@&BeeQ9zyO%t-F2KAjKX}(*OdkRm>nTAgON*( z^c^gIdR=cB$2q69vVQyQ&TAhE6j%y_Z!V-TJi~?{3awn_Kr?jY39=ieO*^ogm*3&G z02u_H7nm45``GCGA7r^GIgcD8%Ya6kMN}-*gD`Mc+09Bbn9_w5OuClrG<3f0ix-8M zntPt`ddgUkZh{{tg|7+lCpOH(!BS^v$44l4xS%$#KjpB~cCl8!4q3`3rwKMTihdZn ztzNHudMIhvmKu?!)-BWki5!$iMRDdfQlmg-mHb0kJ8(z{iYg^}sT?zMLJ-7usKvrb=(TFKj&t`p+o zh#GzTDwjP9ta~jP(l<4{C{Q|j`t10rkNi8vlV~%%>7ys3;1KU9myj#PB=u$`OCc}z zs>4wzpJEI1GKpy8M}CVGEU?Yyk>kshMoHHhBH)KvlyfPgyl$mQE5m+MCr?&$lD38| zR0!S5z?(xv_68E99k;rWO_3tU4dLb#4Rst^NiJWdEXr-`{Y&FFqR#t)BrZSq3#ml0 zeGvu_N|+h-l6b-SuzB^Y%wv+h;iIt*eP!=xpxh^*3PV$6N>Dv|_RZ1jZ~4sewI`PS z=2zPngXxa{aCPhDS7e-2N|YMxn^!-Q)!INe@O3^Niq7`uZxWdA7#n?acIWM@-XC6{ zv}{l#6BHPUUtryYVrNY|j-DQWZ1m3K{sacffd;J{#l_*G?XhH8F%#NX=VXb^$0`NR za3&y9I9`ZP4zZFEX6KIhcpUVY;)Hr8IoZi}g0zoKOePPr&!F4e#IksQ^nQH*oFH?N zLbg7COy-W)X|*3MSW27s0DnRE@K4BW@)sDUc3%2MW>TR2E;I4?gQw5t=Wg|EmYHqsY^!pUDbYvhn!X&**is!3qaWA1F0-$*w zb{*{VlGtMP5S=$5MwVLPz-@*)VGQTVEsk)Nmhy#%x1P{y3loLj(QxDXE@1;^H=`to z!$iqB*q*$%lx^xm`FrSeJ{2ab*H1U4n*~sU(wpnC0vc0}{lQG9!N(>}dG8oY#Q4c~ ze4aK8RWuy8m?BjhvoM`uHYd`Qydyq6SZi3yQmIKbyXvS}K%!a-W(o9`eZo0N^tFO# zW$XwGbUs1#1R3~vc4ZDZ0;YX95Pc$pWxsI3Qk4iv$Gaf`o#}c$fBgQH(U@ z7c0drX@pcXci8bv1;1v2utz(qdnkeen%_Gb?9wE9s_BcscYN!z9F{4{YJ`+EcgVpT z{pJlk%(ADsGAAWPZhwO`_krM%EONG#G+*D9OLU5IW4`DVeG#a!L)Z=UKCp3x%+-tm z)v6fzds6cdrTmFQfwM#)GUp0j&(N~xf`06F0+}Fmo1#0mpW`C?H@s5-AANmA+FD+? zk_BM~*-_VfHl2O=g zP6xBoPAl9oq3$CKd^4)@hDUZu-G)G68|cRPt7jnu-B`}`rf#c|=Kz(9dQ>fOeU8%eY7^roT`%J#@=v*GM9@2( zbZyQNU!wo%`_cq9@89BN_9n;Yy$QyN(R1&~%oj>?#S0Q=S16e^AhO;2A%(}k2JrdJ zAwqcku1cdUy6k*B=;JqE@Xv8Sa1oW0C!HOb7y1D6`KHKBy%TxGF3LRI7^`ow0A3WEC|c)%b3Y7o<3nYb@K7iNx2{Y7~fY` z$GN#?G8VR}Ql519*WVg{{EkC|F;LC<97Dv*dAH!9S2e(+Piw|{)h)3zH(3^Nb$`ag7 z&@@T2QUe-T*U}tJbWzj42nbc?zC=`9Y{lb;9BGGfipnUIe6?!-vdx+s$V^5RTA1ft zh=UuO=FTruF^x~oL)EfxIpkPS`Ph7^Z! zTv>QM#lDME!d;sJDzuSLnyQ)tD4jQ#iv_KL(7#L939bxQRrj{*!{Ruj+@5$l`0oDs zX@|(TW^A&5IA@tJ{)(XXL5Ab!*(J$SLcqQ@~LDZ&2_(0i$EZmi!$_ z{YsYzP<2Eob9VVuxFb_q%};1`KY!M%fP4yQ+59BN3Lu=@~przO>{~I{G|S* zS`dy~rjbt+C%pQJ<`5}#`;ppiCQBjpeWg_fk_By?-C3yG2Brq@-^!v3l88{xqG3!= z)i|p8$(=~j{X*5GfC`9Md18m49HlrJd^_@Cllq%Fmb73q<_2BX)nZ$0sNtbd=S z69mpSMqH`QW;OjThq;y2$c>O_5`HSC+b3ytCVCi;FwpD4 z2`x6(lV4G|@Dvt_4>Am@Vbl2!{1BtT@{f&&b?SfU*qd zrmi}?#4*#$Tpm#`tdPSZ3XL??XVB-;wn}I1k1zorMTwu%l&7SfTvT_r{4zf|h!YSg2X(C0s$|U2@Ux=-e0a^vYX#sU*7|kiaj8?jSO?@H4w1 z#A;r}p3d%FI+kJ4g_+>EDVdcD$B7rl1FgS;}Hr_mrA#upjz%pbkv?Fk9!o z7++E@Y4+`)4!a{Gi?qMt*%f-S@_cY;LDB;xb9wi8--%v!tT4mmpBg{=yYb6fKLs(4 z*!BV!IUrgnPf>Yqf}IVV$0IAuyIFf^BqaZU+- zFfp0oIu)a}0<0>56L!aG>XWmIegfd3bWT`3-18B$Hh%3Q(SS}m7R9aN1^3LERO7F~ z&3b@OTe0dK{?=1NLCf> z3{bTopCT8_Dqfk&EwkR(@CQtzS?q^7t|110-PnhH8bCpzGJnEmeb%pyF8FfbFBIwN|W=6GpyW{v~u>D!M4)NZJb05k~X?Q60sK83E_Aw+dB05x9 zC8>W<3p&YcAPOfP+7(rNa&>RMLzIdmg@~(kZ{JEL8cfpWbga6=?SVqKeT*!-%O*6p z0AX^L1w3K`wxha1d{j**>7ohC^y%<+`lvKLT%U4k%1W&_7cx0@5qc{epvgE*dAnAU*fQLJ*WZC;G+e+x$Vo}QDu4bv6f(<*b>DMWbW*S z*ps8*{SupzBl@o*S$OBdnenHga*RLz?dbAlA7SUkGou$T%DUZG`xdd}$G5)3Jmqbf z>3yYQs<}lB=Ho-o%*)I+*s~5Ux#Qj|oitIGv8%=;)L*{rGaz=URC)3K!?H3=nz&@P~@TCyy1*%c;~@Qbeqp)C|aimt7$ zgTES`dmUGTeA=l2G`otYIZAx(j!pYwQ4`ub;nr8ebo{`wn|1^@XkJ;`KlzZoDC5I2 z&mZ>>`$SaAOjbB1BnYs`?zUbhOeJ}Rs^zdUw3Q9sJ@V?N0wsqUP4i%0_pntbTk?syag{#l5E6U{Nd zmP*I0scWf^k2)+ZC2Ev^a9U-%zg4~e>X)N4U&|hZT$Rxgm96sWDMDa;wsti^K*c&| zG}>vxdj%nc&vGvMd@A)3oyc?6jwclf3_}Rg3pBtH=L^~x2))a6NF z5oh3YWotW1Nd%!1rgToozmc>2(x{gylnellrti60l;VLhHg*mzXwtC?bv`bdzay3G zsbnRk%T90TD~>FzjFe<7Q88#r$vyAIn3yJRnp106Dl+aM?!xfJr3NhFZ5$oKE{*>!Foj}~W~%yGMQa#nR#}I_146reRjx&a?*$dQaJHEZJ(lG4 zYoo``$^tauRHagYUC5LdeK^UD%-+GyYwvI+j7-;zu35|-VbxhJDt7%^pVC{tP*tJU zjeT9}&1rs|xtc9uL6)6xGu`|Yj}J8p$m<0=X$0r?y&6<3sV5!0pS?7?{#48*Rd$fN zZ85RT(Xm5pbmicZS8wfzKlBwcbM z)gmg?qauMJ`u+6U_O(bF0eEVQ$XYZ!R&hfDEKFSHL6(AMxZaP2Pu0ESFm~y$Nq-C~ zJ#ti#JiUPJ*{8L6`SH=&M|~Kp@<+G+a@e<=YWZ}ZneqKCF{|LgH^Opw;!%#_9axuy zR!Kg>VX3vaCRs7rd588Z5U`ohhJ7&PW1?5$>FGyDuYWERgYwdD2Dy8i+*V%|O3#Fa zut%*f2iF>Mw)-aMaX!qg%{?CSW;$54ZQ#sVg6?7Y8PY2it>PpkyF~?Hq;;im3{#oP zf5bAA9n8l5`nnL`y743;_q+iy8*FL8gZWw1f-q-+0S<-bvZ;Dc9UU%r^}c|9A>Exk z55rDcy4~&++||t4+Ra85?2lP`^yJ&#sog#K+_SQ(lRR{pF%e%skGhIgmZoA3kVGID zq*#Tc=#Sl2L=x6M7;E}rwaPv>Cb0=Vx_S9s zstn0I`$LobeGuXuGe*CE$cB{GR^%_uy(t4N)Qr}vVgz-tV_Q-UfGl9g*7(uS*!Iq$ zPm8;5IXSlp(+G(JmzPj?6VS^c;@yqDe1oeoz6ft?&xvsHOW(C0!7Q)_2 z$TZXmADw?i#@mY?W#xbc-IUKxmjS^iLz-vNur_9!CCe2cR7c2*);~!F(9r}k$*IYZ z&(yN*=iU{36GyA1KxZmJ0qBu^$vCIfxMty!TF_bkzHefqzLoU#PYCGSx34& zK8#q3BZr)X_`T|i^k~_E3zmZ84TwTd`)l*-SGV3jBg2}SBqs~K9ZU8@o~R%zttZZi zuMRI?VSdTu4*V4NwNroIV5nYD@hgaZ(f&7WOCjE7n z(HG4EAwTS0e?IeTV#;OP%~_{{3~Gd_Y=cuS^j_*H^FJS*YdcgH`~2h0uk_X<{D^kD zRG^$2UwkE)P#mM&nSkv<=7TkJsLBRrRoUHC>>s^z(Wd~yDZK2vfIX0B>{^Vi@hES; z_2CtK-9j_urD4ExT;JapR8n046#!jEriMaMMtU@3bBBU=Jv#r}t!p1Tfarp8WH&I7r4%KTi2AZ%Y5`saTe=%aBb*@4q;D{#9AC4eTNg#e5i3EfVbg zX9Ok~*yLXDm?w1)9eYhClK_hfR4(~TlJe5^R!7jZe3qk&zZstkpALzY;|u5fBOqAT zK;46=g8m)c0Ig1F8y^Z%cHSN*p8$EBT`Kc4UaB2fIS`Xsgq7{Xy+ih`d<2tDQMcwB zd+Tdyf5VXOb7cN-botuUZmoDnj6f!$TR1|jJ5q46u+i0?>`u<+LiJ#>dvycO56WXJsf29LVJaOo_vPx2o7|%(YXjXBnpDcH z5btD2^SbIIXmS#R8%4BG_w=e3t}~;R(+U-JIxDgiR%;ym2^?LhN7aP`VIW{SGkT4Y zU89hefrBJA4lO%bh{2J)GQzsWb^79uuDw7lK;KjcjtRmb6Lb#wfod|8PVg_S$i-_P zI$*dDQXMwZH=rn?3o8Wob=h`#BK-l@kh^mHaf3` z2w{UTGWFtEoDRxQl7sAWrMJ%sNXKlwl^@z^3M5G1%=M_VE^{IH|5(yS*Qt&2fpHyC!9IH8?118$#>HOb=BjJ#SK4rjtP7actNiGM~NGGQe1NO2svNf^O&n7LICa2rVL6?b*WY%tA6njlfJrZ)>pjCg8fG@oxLaZ9R=%G}L`k|ji0 zQDX2eBkiFIs^iNt&3sDZ7oL$~QKL`a$gjgXux2+TNk7~I8Qi3y_Hd{-rFI4L2o#Bl zz#brHUwJB<`uOy*I#f-bBL{P*zvhnV(I?N#_&>pxDuc|D1rV@|&V5KNPRG{qY5!{} zjI~@BFgh2h)HOA_6_~kXb0PqvR`tv(`sa2(tV*Q;lw%96 zwW;BlnGCpC&cZT%dbl7m;e;cZ2oKo?Or@K0B1TWXL12;3cp;U@z$BZ{Q}n|h){P^8 zrI;*;bl#+A#+g7|i+D^~^ttrvtIJ#Oo%g-gE7l`Ln6Ot_Fk3)b@JW3!rMZb&B~$=_QU^7%$&)7lsVJt}%yS{ecT3eY4!J{vZUz6q|0Q6kkp& zgMQ_Gf7-ryUFragt|9k7=l#u{h*sC$+rIvoKQX1%BFeK8IHJYY8h3mE$A)qm;Hx8Q zpP$5w$TH10HFN~8_th(F>?9wyqR>i8FMzpb>Ad(w%=zsbrZP`AJlRL*Rc0|_lad6Y zN(9w0Q-0vVSz+enUScCk`JsE_bhcYzWDUWx@4HMIz5ajEzB?(D#X^?7q>4x>5x-Lmr5$q_P7;?_YK>tQ5-rE~O!q`ODA%Mw>~ zi+nr?QIqe#jR2^|u@J{OAw8%Jfrp zaN(@3OR>1?@H>0w?7MI5-lye|czHW=`G|HYzna ztrid9-N_2768YOuqVq5EWip8lLw2v@FSFa3>^9Y3ek`Q3fJ4D-NP_93M1nZ3HZQ+G z#Tl93B+(-{4$$GwQqT%ZT=mZ>?nk*KeJcVt$6{_&4}=CmR1OaMcoMwgKvnk!%eQn0suNiU-|Ap_H*%!p$r zOA)i=e?f;?E6rCXtGmqErW5LGJP}?zoep;Z1F?KO-R*}4G#;?bWDlbOUtLF()^`<~eG2ESpU+~x47e6FtNml*9ac^ss(U`>L zhvrHrJtL9FV^(|s|8njyr)E|%O{#F+^`j&*4tAdiA0Oz(k%iZ{soWtH!^LpxVQ)bk z^EqdJ7J4CG+^3Mmg;RIB()vUc2GI@J1W@M(_8hX#ip=lOF!V&b$4V8I69|;~mOEnR zTv={uthl}Vz7F*{NAQVF|bj+?S$=n}yBaN~Ba^G4xYJTz-nj0~=^uG1OW@+B`Z}EGNI^TVT-^E1tG*?Mp?}{{Xk=2;)EY^e z9zo8M+IaRO(vf^=pq~YVWuHC_gfe9!>JeACLQF!EK>2|Ev91KWN}DXw1FZ=b=Pc~o zKYXhp*N4l2t)IUI)!}qz;bmC~hfc-$c0Rnm^P#dyb}7~#$6TjZEJX+sQ#?8=0PaQe zl}D9M;UKBNJXM#i(gjoX);0G>$q3&!y~=eu0>yMrl&) zDW2h5udip_eMtbHpBFkifg0!^%+AS+l5n7a`J>-#fAg%oJjlu6sGU1}T;42KOBx2U zU@&caTv>cE>QTyKepp9i86#1C?f1?u%t*(WQY~T8}8pS zFyWm3G`jL4s=Y5b?~oobU-|OcZ8$=PlW^%u%g^JFUF!$3D7j1pMaRxVNr586holo< zzKL#{X?t@`$b`U4)V~f*C64A6ZzrImcAcy*-u(yt=L%Uiu*$JzrfKq%i3X4!!S7jC|pk!C@?!QroIhG9%txCGYzNs~6a_Ntl2bvNI zX1f#0#chl0*_Ug$9&^E=uf+t3CpEfAHbdrqd_(P3aBVZ@`P!H-%h!7A^~;;zydBfV z(i4*Qp1rbDnwt}-O)oz+iAPj;6O=X8R5B2_u_u@@^D0&sgy`0ZZIuq~>L$A}*XIVU zZ9Vs>bWicJZ2NLtH<`XNViO0b%1?70O)`!n(~KWIJ9_Mo@(rada)oqKSKZNtPNpzo zt2hB-m z#)f}Fx)JJ$13&YMVFnGebfLMnP2DK0-GaZ962WPO!Owo`i}I&O(&<56u&t|a%7RWT zMUGYw#T~{%XOKn&=Ef&5))|V~kt|0tJFUaix&Lt|YkMCru|~nDc$oP?{Ec+WR-E_6YKmx5T&a*TeA!T8#ftyuQ->?6p5Nzq?($ zI2TV>Znf8+3xS$f&O##OPh@A5I1v7gq5ED`5PO_P(Jzj9*^PlRA3~1oRBd5K#kg+V z0kSomo_EZ@%9AggY;T;9x4C=_g4xH_{OnwNL!Fd(Qe|PO5TKl>L@V_hZmgnlRGm^t zB!&1sSg^me1VII+zighVLW=b z2SGa5{0NE_WRi#hPw2rSDG1wkd~UVcRSc@leFEOHH%{NZHDCW~sw5%;HJI55#4@^W zyQPIP-)83pA0n18sG*2h>F~@iN0mBCujV7p`ovL0)o9 z}ypPo;Pqx4g;9~%d46uC)68In38wN#SpZtLYENK9mYuyRyp7-BR*FH~n z3CB(l3%>K?mok`+kvw*dNFI#~fIT0mD)2q-5d*i($|(0>#u1`lA|O!K+x!ec@>c%% zrdFu~7CsjyJH%pa)5K6F4 zh}cFUH%_qcJ*6rYh0^l4u;#=#7cZoRnp_*6bZj?R10oJ~MPXd=fusKshBXYP9|ifCpa zy*3RQ*om!H2}4Q8Iw7hs!dMtOc8N6KF_l)>b^44%@n&g?`qxBNkk#s0@{`UQ2LOE9Y%6p{cp^C;0l~Czv)z>Z**J2%cd3UOeR@d-a z{Bz-4!F%S-!*g0&Yw`5pUh`2zw-sx*NZgirAZQXE;)l@GIvMn2BS}*2jVGK|{nFs< z)&ECRL<=P0(!fAMV}pp>S1a8hOzam>kK7MP9$N+H%$MBB(+Ck)qRWbk=Uj_%7ZgUHs zi_eaH(SE9d_ZXh0>l1n$3CRoQW>H|3Fu8XSeE;)=Y!HBG`H2U~j8e-olL^9BN~gMT z4r2?^YPorsq2%{JVP{b`QX;zq2L+&d=gYVQ?V71kwos^?q6^`}9O6c(k>V&L!Y@nS z?>nGeW93`Y2oUgLuqTFw>RIG<`7$!*;c8^;5@kbCHW@z#-l`_ybpY*ZqJ|Aoe%_OTsH5on zZwUOYAMg5;(N6uKGogVY9~S3W+1>!vxcrk&a*_|y0%2lU6hdyTM^ttNfZ@0pcfP#| zD!#Wtw;v^=1QyNw zUfDj+5|)E#^%*myWYD73%H45Yvgtmu(qxsOpBoDr&PT?lt(9I*xUNlqb)sjLGo57Y zZG>O&P~-qxYdQ2d^`}DVV?ldsVaz(nQafQHt4910K$hrOLLXfdP#`n&mucp|`cAZd zx+g#ZJ6~OF-P@8+%hQ8c?w^`lUq)wuR*DN|9G;PldlQjF>ICyO~po|k)T!;h!1>4z*Aus zzB%;tKB#N{vWoTUbqUi}j>&1aujIKYjxejF3Zq`Lv0#SSj*g*-H@Tou|bhCGJ_s)mN=~^*NynvZi|%DJJYIK|rA%5$t9$E+#awpu~cS-)xDA+w9`WyJ>y3 z-v0EC43hM66^5wkB2g1yXx&Lr4pKR_xp9t|4jC=6C0Rft2FOVDWQ0~%M&6#OFnQU& zvI3EwLcr9hji|XQllW774=qSUPt4musW@E{=R9CcyIMMac1A4XpE-dPvG@ ze3>d@IhwIgwB4IxFARj^OuDNty>4E=>D-vSZ|{VUh>ws76SWIL%Qlda64w;qCUP9D z3=a~Hw>n)7$Y)yJ+bdlRWzlh!39zPo#dvi(g2cFeLf+*E$`&a+Z5cKMA;@~gLDcpJMm&XRC^Cq^8kp;@wQ4!S0A5`b!muIndd#ilh)8#FZOAZ*mKT$Wf^>SUNLHKqS zy8+F}uVZx#+`b{8gX(H>J$S2hNVuMC1U20PLEiZE4RvB-_0WYciyMZ{jtM{M<(bA- zJVJN5?gU-gA7jCDB`1@ybWMcy1Kx}E!zqs6MhFT>GL!wG60tDv{k>4{e!?Yq)7KP} zNai~s@DIzsFThZV(VE^}ObH-ldFC*8m?#z$jLJ;#WxS_!j^!LHQ21@w6cxj}4ANKa zyCV(t6A(q!2^tXcIcNbKGI5yGOd|wK?yOLBk+i5_<8sV^ZWK4;#v-}GJLKW((w!JufxYsoP3N->k~9Lv~12u znI-QZw%0dd!#>}@u0JeptoUpW2NhGyrFgDl*GzpWl#hXE zd;9fKVAH&(oqLyy)o^8$xmssrze>X}_)qdDF0Xej>*g?;dWR$d5D?|$++`Bu<*ZoI zuXjad;+-ooO4jfF#+-cd7QT2~3DHs!)cREo-vA_(@Bz3~vkFfK%UEw|8>Q1ge-owA zJBhQtM%3*Q(-yx?)-(pFm$`EI*j>6Bs2}k635Y6=-RhL0^K)bFV__tqS&pkari@(7 za!e3j>>VKptgCEiMK=p;epypPvqVdtA8TqQ(|-(DIL67%BA2OI8l?+d%t*-@+pzGh zxDQ$vpCV*A(?Ivk;8Lubt(m)}{jP=CI5!Ms=hACo9uwpgWpWsDj(L4Gte9|8r9>-v z5kcX=UF6aD6ndryVFrMxS|I@GlT2blf@I2e@@%1f^K6Oew2HzKb|p(wd#TvR9}9pB zAIJsLK5`Lzp05CYL|Y^2zq|D6OaJc!vVmqE(lmFIOkku_G00b(zJ~jpzKhjW%5eG< z23HAqUc#Zsq@-i!li&2Fk!7)_K<;X=iA5C5rIbKY@zwH(el#EJ!?5RoCfmfTUbLD&9{(Ep`t|t_OFycMeO9@YCT5<(M&C?bni=PlYrg(CJ>*IfGJpU zna*gL=`N?F>n#GIOo*zA3nBL5_VOVUJ^?t2^tqpyTmO-uF2?6imO#DvgLKoFXDc8% z-$UacGAzPbusX?VVCcv47}0PBP!&>?*?*Eb4AUps?I2vbJTa=+ByZWj+8bcR*g3;> zVW&CBiQ*7k^FE9AryCNB#3IjUDa4;NW>tdUYy1umW!PWZPjBFJ@WwDckgw-YGzQ+c zJdq$IPlVg}lbIf`pYTI5P*GY${pFhSiSLgPCNz_G?%26fI`CG0`(E9|Z>x={(P^xf zaOoylFYlszuW)m_y30gjZwZV4}RWH>lKApTXosALtKCu#^%EI^S@+lTppELd29*-lGZD6rDp|lXu6cY#GN}=Q zbVIR+el#7;wwfpjpEBJ7pV%pYAPriEnlB=E($1?0&Ks-vA^YvIiic*HOc0gm^yz?f zBCpWIWQZuU(_uri=wSKHptN)$=PTvu@bM7X(K-=^L?Z3luOl3t>3o!_>;E^wqeANM zTSikR)`E=(TaZ@_d_=@1OS;@z*?zVrRJ8bKqS|#fJ?5>|;|~mlX^Zj=c@@+Fmp`CG zVAt?Jl+|-JT7sGMrEn&m%E+J~-O`kRfl8KbFDj<)G?o{uO4C+ZI zQpRQZLKo||o3}Jn*RUQCxI&d=RC|)SuekZFSXJjIQEU;9#jA!rkj3i3Av2{I!NIQg zSsf4oNwtub2*;E8;)==i1oHa@D47oT4*BZxKn!rT5&WL1XSL=l6n&V8d10&&mt*_< z-F#lVNg|Q6GKmc|mhAjlu3Z^qztOqBZ22>CXOSY%e)UYoIKL>EX-X6r4oNjY?2ns<5wFX6^#&+b}4>6g*eIpWg+di zHjyMAum-(Jn=UmCN|sj3GRzLV z-kq%8*x$Hcd`9q{3=@AE)m?TQKN+&8^b5dF+QQ?vkpE?voSktj2EN7gTVK%IWUe}u z)&HC*8Hny6AUcc5F|@sT3L25#k)H!bqB53Zl1V@0zL$PSt5H1#Uh9`dBg3p9?u-je zCd=g}AU;!2qqhjS7UoQusd04f8gCew2N@+;RAFYulcpAMP;gR(^y1icLcU}GQ$+eE zNq?6{)R1oFgJD_cYkKy%IS)!J7GIydBT#>Wu_caQie6Ge2S2({>|4~7?8-13eL{Vl zP~zyAf1Q#r@1>CMvXH7Q$sqcdj2{n#KN04(bMdM_F}I){l?<>LPG@|^ z*P>z3NYVjbA4q>eZ{7-E+KJLT>4n}S=ZMOvK5w*uxD+sg=V^zAXr&$1X938_j^JWu z@+^<2Rs40XQw!8x@+4mQ1lWgxFmkXu9V?C;0I(+BNmAS|IYE~QRr|^h$N047!V{Ee z!6@GS;H3X+N(p>-M(!VH)!e91R9BglG9IZk!IW`SVT?l-iz6gnx{{5@$f9*X(v$KE4+X|h&D$(Kb0B1_TLw_p82OqcA-h@m{;e)3_H47&YF zE#SSiMaty*IkBWbX}y%D(n%nHi99nO8DQm3cin^XQ72d|9k)B*Hy>W~{w+4-!9D+O zbdMhv(^G|;RpjStupq&8y?c+eiH<`%?E7Tp02-X{Q6nXv7S|r`e5K+d zDo{P_h@*|MIK75FC1XkTO)M8why<1yyam4rc59y<&Ik-K{siH+Km9qLHqQ`n;ZMzz zD{vF(ZGQ;BECcR3>2Hx>Tw$ihT518$LM|BC3>MfY#Hg{N`cwPKGrDwcF&*0vP7hJdG8D70nsd}YGy~A4 z7Rj{`j{HuC0r)oejg0y~sj1}uM1l2%EfdIfDPc!ChovPmNBD=-B99R%Hw) zWvZaizW-VKv?iqz2bdPUH1}sGkc%6+$wyYgc6-N-!-r)}rja=S9MIfIgWJd$6>L|T z;2?O6GW2dxm(jL;=;#q0|G4w2eG=us#!=48N79?w2P_Lt4OJfP6|(>SER$*%UO?YS z?wjI)Rt7AE56pE!$sO2ySiS6!A@KRQ85%5`QY-R}iHBt_xN`GkcaWF2I@xiwEl|NIhj79n*nzE=^Gl5kEs<} z;v4$MKxPmy>QD4U{G>ktLyRR>)}BjbcHXWY*`U(ftu@sRv}>Tq6fb zmj5G2C{hX#veN~^qF`6{rg-`waZdJvIOq_CON0MC9Y&e4=-)O^Smhh}8N)AM`yV>S zZw{-!Um8Y(JAvEhZ`wYYDKBUb(Tr#(|E~S|JSH=LvJuyUBbIhiKkdU+&Fe7GJc}*# zUzcYhUXeD2>WYFqVo*KK%SfP1o@`V!jKeZgLU6Ib`MdDQEU>dL1>He!-08AQ)wxgV z%Ob_DY+(!bX_?<;Qzd3Y?lC;+c;W6d)>5|kpslBmqcM})cKW4G)n8Yx{y`>Su#{<4 zx)x$CcDqEn%4j`V$_Qps?lTP3zGjgab_Isq@SEd5YF^YjREW(ni?xTva|H`AG=K?J zdxzeJ$oUsU^|l|}^G&QTMmKEYz|U#!oE1)WKgPv<+u;guo1Z?5Yoy3-RyclgE}%vrWJ zG*g>xMA8Q?TfJp#CFZ0LfbR#@e^xSJUnI*O^?%RRvluiI0hJaoF>p0vqLLxQD5p}r zO-j{ei)6lcwn*L|DYNr^U<4>9;gqz-ovYu(%2fWG@4#e6S=0kU9ajyl5(7gZlbpLH zyQG~ykPF)P6;#?N7a+&jh;GDLnSSMMo~kxDvIOp?oIR9|nRgV|fldv383my#)ucvj zSQLZ*%6(T|kTNVYwL`K>9K3?5z!y?0hX3m>4S`F77NEB_~3GFdxGts^Hwm&G!(pjrPG79*`973r=4 zu*zlX<1Wr!Xr8_+Zxb#f{N!Qr@{+guewI#y#$Ju^j^VfywqL!2Ox2HjG6Q%aHW2Hs z?+CL7p)vE3fR3c1xDgGY&N9RiqJsQ1qcO&`$!+y%(Ja!jKcPrc*Uq1IRzEMEU-7Nt z%`#70l{$oVwXy(zSe(1;=NWtQTu9k`Juu`ABP$TXA~Wj&r#{FX$a}f=5wE>L+8EvA z^PxD2>M}!l)iJ`NbP!$K?qz|nO0Sr55>%_C>db)9+0w1Wj zUD)R(5f5u;wK6LQwG$@`%kwegNr>wR$Yu31fFm6mN{LJ{U5j#s+9z#< zYM|}j70*Op`x9bAaV8@C`K^+29C?~=s5(W8G|~MrXyXT25m@{codAlME{s!#O@#`g zovI-TTaHj9bI{wHr>;@;#o;jX!KGDM8BSdVACBxA=??^2${#HFAoHemje|Feh?Fb&28_gb+WUm0v9dSOk} z8)HEjq@RauPu!1;vAhT)aMM!ZgF?XT{KYvgff}-4l@gLJUi|O;&v&0SpG+Z8Je6_f zws<~HRx-(P*H4HYu~`2S&w%XihyiN|uCI|#7BWSgh3X=>rAynApJnh)l6@N+VN{Vo z?vrjfV|E=Y!y?m1*nD@6S&ivH;+gK;C)Y=ghJ|Gu@)0oNE>$%F7Hp^?ty97$D8bPE z&RzDEg7vF3&k&PIjwC}a66uy8qAWRewsOcMR@t==V3fGSWT%$0vuNU`Z2unaGam_G zMy5R$n^f1P#N`Ob4S5X7W_I>;^W1Iktl0b%RZdtpz};s0KcF0{Q@%)<_8yc5*$2BG zWy+N?XkLCM&_;7hdMp@Eent+Tl&`IVcq}{~Dsg(VYlHjbOQ18A>ENa>W8vkE;ryUm zz^9%v)&;Fu-rt4L5^M8%wSDi1FEvi2;AR#@6ARkfx+qh&o3dzfNxj|`{^2Z{l$-ze6j%(EF{f#i{QukLH8z4-kn%zOsNmfieyh2 zyt(tT`ROojV7|$4&n_>VpggqgImNG0EHx! zl-?2l+}7sKeW8Z(^JMwK6@{tN--Z?__9TNtc}=mQw&zA6YDqHi9*w|;^pHx0J&bTp zy!qzOkBc*^wOBp1%;Yk?920MDTx$IoSbg-}i~Jn-KPFOe41KY&FNMtxhZNY}a?;J^ zVt!$Nc^=WnM^tRy%j2KS8B^XaPTC0gSwuYufn|c95XESjG%2fQA#5Szn3vJJwoq5u z%00-iB?`KHy9YOGD2k|FH55(h%MzzDwz=NYG`b zC=6$H{JP7K73pZM12`%B4y7Ab3{&L zS*PUxu^r(io5XBmVyAr5WA>s2MLpRiHuiBabetBqw@wnA=Uj9sMGN3eZ$JIT*>d0i zCHpXplB4FwJjkc@-~|79`yB2Z8j!#Tvqn2{A`*qS?2`!S0MsRHc5KlEASdH$g?o!t z8arHL4TQwWZL4C(&mhe{5Eg3u;;x!|aoLJ#haQJ35lqydyxXHGOw?%9N>ENe$yw1}51539%3XKf!^*@gr zu9_UwPW9--+ncwZy<-X~-Bhv^a? z|Dx|Z?1OtFLbH4Nb<%i$rS@g>;b!~wb#I1Q6N2u|@U(ZK@@IKms-;>DMqG{1&^hyA zQ5!S?)0?ysDC^H3V9pL{8Ry;nAosD#)Alj3+SoV-tMk zwO{?KeIqh=BrgpYval6da|EOsMtNQ0U4*9j-eL2lzIf}(>h`Nseh@UEsrl@3ar%m% zN6KLM3XrCbr0WBk50kge@cKiPe;MJR0c$3N!1?Kr8mz`$O8aE-3Cxqhqf<{CdHZPX zg|ZT@a7p0hRRI}mEc2sNHXnQ#n^rIV{r(f-&{@M6Ov?P8bY_X7@Y&MNXtE{pgi7sm z@7g7_^bs}@mkx>t-*+@FX=PWsMcS{gHh;PuFJJ!-TR#D8vTlGu)MpL^Z#1l#iVFVe{?RU!tkXx|+bs<(b9RU>HQO#kD09pb@7N_)M4z znbz@ADcjN6)A@h}dNlqiSwuuNkA>(OhbyMav7_chXmL=eqO%MvS zJ0gMjN5g<{TA*I*4-OokHr#6;_;>AWo8e{?w4UQ>DW%RC5sT19e2xTh%=4YJ#NX6TTAzk zZwJ-#;`P4Bq~G4UOzk6>jI?f1#bu8&9vFTcUQqv6)jMQSOl(I3zwrzF;vsly!PcB?d` z)H4=VyVrdT#A4wH@-Z-?9z34@t;M^&6a5bWWTyL|(W-tzV5LQT$kNR>K=MTt_BL%{ zs38|@kt|Qdboup@1{N?G5Og0p4j<#F2%Q$#%I(eP{;mOn@PB<~ygQo7&Y1d8Hwdor zwX&fw{LBpBReF(RI3-G`x5;V|?%>~dsvZ^B9ykGUdB=$>H5VTmJsG$6J7@4i`Jo4& zw>MxRY|bu9OQm<*y!sU|5&r#0z5Wsn_w13wL;&bi8RqA&j>p-v*)GqH)kAo&L76G+ z9j7z~KvME#1uUIYw{Phn;()IKjdY<^r=YP;v5sth876BF97`j9-vWchM6W$*UVi9L zv`%S1p{BBq7<2R&nj06}zg+Xl_VmiItwOAV^jfvCU~<-QJ&6Rxs}=r}K@T7Zcpv!D zu#Nar@$EyPr~R@k+c5)D97hY^Ntb?W^?vI_bDmn6ZI{Ic`SXP5%z+a+QxBkmqV=j7_ZXAXOj zR|)OQlyfdWg4*K3V{v{{kRog=S=51PmcbVH$5qW|;B)wT^F(&HKLN16{p_AoKg=ha zHWyX$g>$_Uz~~=tZ=N8Z$e*lE#OkKs4&>(wiM|p|MmCYeZv4PW!K7; zQxdZpg|cy2PcViB=PRyLCp<&|<_VKWTyP38om*6h_R0~skMxGsiKFT>;`0NkIy`ar zJ~=9E_;`6|EJb@7pJ7u7K>FVxriBQuNp;E2feCa4UL;uTM8 z=fR}#CkBH!C9Bv~GZcKq_ZdYC;+FGH`C^pNcA2Y%W`s=4?H1}2T&(>v>OO-m>)Zv5 z4VBc*pbc84^w2$1R||!uw4&Gf=M=sEMfxw(jeXvW3D|~_4kwnx)2>Q*lDI&;wF=Yo zD{Ow1fCNn6Edzh>InAjg0-s1;VO=Tb!F4-%%a$8rma2F64GN47$ z!F)i)`sw2GXWk~JdjG+c=A+ui$o|;=){_lt&d6Gf)hId>@f{`Xs`Q7yJH@Mx$RZQ% zo}s%;aStmu>EU$A0WYI!vZdXnj<{d0HXq%T*ZJBC$_d-tYq7u*TQQ0uz~@U?0<1>n z#BpTVL|Q~zUotkWRz3jHh*gzaiumKLlTKlTqQtccqMiSF62g&=EQhPE)25PM=rS1M z!ud^62T}A8{j%{|cw80+2&TJIaG5HWCA9&$7wp5_N(mdkYjp46J{cmTkTRUjpKtE0 zd?g#aPomZ_`4y{AeN~~@#bv$q_mqiMvP+%H$#E0(aP4cM_dT!@Zwv^2kX!vQn725t zfBFHbSWg|3Q<}#x5|?~e5*zsLBjgO<8SsF*EV5?-t$7`V0I3I*|4#x(i<)829-#qQL7Ui`#XRy zCHjrfGH@Nd8+>+$ghAn~&8P^GW-^ts!xXXV=icD;GC zwVAj<2`M9&NDg}+P`mOS70I(gu5VFiR0Zyu(z|Kn9K`RlLDU2@Lx~a?Uj%ibLDUrT zDl+l+V08@pbz1lK%b$?eyvBg4&p+7_(;Goq-wPAH* z-jtDAx;J2EzF&V6bX`KX`!+WJYl=QRoF(fKF9t(U(Uwucp7p4fF_g+| z-uruR9eX66Zr^8)#Hhyi81PvFOq$dty7qb#pmdMdq88@9KgoWVeg%IIe#v>4MIld% zkW^hYQuRYWojP6pS+I zD}ZQN(WGLW!tcP6jV=(d7nP(AM7$-&R%dQDS_)Ox^~k^-_XpozXaKE~Z6hZgx(#fq zkES>t~E-H-zL3v&@Rw$e+yGYFRu& zU0j@bxf1RS!oz%umh_mm2s>o!xn?+s|%wR$azS`L6w{x(vJxJ(Q)gV9m@# zXl8)maI9rzBH;sal!~KKksW+$S-yRsuu0dYQ14XKJ1W0m9;t+wgKQMXwzYMvee`SD zaT0f|bh7Upg+ArSk!8l2rHjM=edTZkkl7)fBBoa2a{EwI^Jh7~oN0CEp|@2g@O6b4 zh3mIC^VxuW!E)jG^1<2%sTC0j+ro<;@EGg2<;JT#egkL#$o`5g3fUt0<#v`#9- z0yPX%zJh=Tblhwh#N$h=HyUxn&6O>R0l6%NgH?AIYo{6Z{7Z6ta>M$v0dP5CG-fp4 z5#f;__AFEfJq@!!*iPo^2TD((98M(fgLc4W1CeWliGWbG{W1_|ic80NN__$=N ztTtbrZ^Mnhzkjmv3czYfpF6Qlxb1^89nwb1x(v6Da*|-^$A5M;RJ)*%!2#3)R;o) zkd~L!9BL8u{$_FWY;ih@Q;gOn%XB^oem0h@$ZRRiA$Y}Y#2fhFmZtR{J|Jd41Gn-% zd3R(v=TN(I5ubZ_C&Tt`PdQL!D*8Q;7o`d>U1AbG3Ol>i8$sX=>ox&c2HN$O`(|pXjbS z6&C?dAW+Cqx<+JPpdyPr)-|)J#{LmXS^G%pS#b<@OK(&9=&h)K1@Q>qQUY)01&i{| zE;Z7o6PaUqY@a$OJf!<=ZDOBPYwHOK1U?!-^85#|Rx!ZJlsd^9zPPp+1V;JWR>?I* z%=06-7Tzc~sV~z~jg(>E5R8kl3@AGOXN7;Xh8jVk#re-rQM^lj8?8jiHl9AJhIj=Z zW;o3!TDG`TJ2CDHKIqRX3=VYpfJveEPKq-}w>OXbQ}gt3a;oKiuaB8%9Cnfhyx6Bo z`+EY7yOA(Z`1@jIT=;MY?3H`jdnt~s%6-K9-StD#=MZq>wRy!$Kome5YeVkYUSk0d-e`-FWX?urcM%bMdCd-P- zX3+FRc~};1G;GK?ufFiX+OlWGdN4M%pJ*0i*W8k(_+x=rNME2>vWpr|DGTNEL$RNW zYjI%IXM?uVej0AY)}^cM?^a|r5xkPP#{kLycakNJ$s6H^eO)8o6Hmj_a{f7VmOY`j zHrh=hPbwrgZv@^9CG#LfY3o4v_nT{9wl0MhfNDm~co_mXHa_yjw+g)M%`kRic^pL) zaiYMEpf}_QfT})8RCF0i*Qm6B<2bR2b7xSl<;Q7iYRbNjG(a&|D*muj`%T&&itB;c zXW%$w0T~1&pFDuAAHUvye!{=SJe6`6+DA^~b?}K{$x+c8`!9BBAC}dediW@JoS5N0 zqagwlQMG(oy>^W+I-$usD{+FBCjBI0gn0D_`<;5 zW+Ml(f5iLbC)|BLd910~LW7yw8 zOecLycqB?ae8!=K{4by!NWH(?{7#Ey>? z-2mKoZ_4Uu`7exmwh=^cbAZDtDSr&ch!B!RXla}cK_v-p04EFx&)RfVeP+gnEAwH_ ztdVFb_$#J&_@@-^#`Nz`K;;vG;!limF(oWnPL~oW=xvP*Eyq&E5(Mh*U^vC!F8slsDqx@y+T4jrVGm;RAGvI(o(7R<**2`t7l|~xPve}%*8U(#>YrO{(kf728w~)*+eODA488A&h5DBc6MTYyXD#{p7%dhh@&;};Josdt+C(^t% zqJkFn(AhFAV6~RPTcCgn6UEm@i|cU{6|0Z7pWk{%O$jHRtjI`;^_0;E=OIW*T z;r1FaFEZ0I@D~(8|DrTfo+utgr6BA;*+t(im)jRd5JSA__Qn$qhAcaY^YijJfH3Xv z4wka2)6Jr?la6U9&L0axdcXw!$y96TQ*% zoIi2pVe*!ShDb}4RU&$PVhpKN-oVqPAx#w1jM)Lcg@r@j1;`}3Fz&u&rF{1NgK}(_ zOXo0Zz3&G+o`hds_v1(TNTuy=5;FX!#!Oawyc#NIZ&ojYl_Z)^sL0Ge+z;}Xu1VsB zl0B+B`1C8w9apD9{Eo^^k!9C z`)1zBM{VF(54z=b>{tY`%d#W1DE1e|W4r|%I~lf#v1)W8ww!!9IhyY)=O95f#p9Lr zt$nnZa0p8BiImyo{*xgOKK)vT1%<}~;Hqg|{?SR{kfx%TOG-!(eCo2Is=vD{e0NNJ zL=V9(@m%fh@?j7d+J5+j>>;jc z7HNKDiB$3wse@jap-GT(z}n0EhBV6>_yjEI_-CyeWQ<7`i2NMNU)IzcStWE`QpxeZ zTzlZn;Y_3ups!~2uzw(yr&+M!fSrFH5jp!a2jz&#mb_qU0rEzW)G%ZE3^|4BfP zze9_=8WH*1U-7fc84O8ydlN_aN#_83f$YXj?oC*J!*$KOUnJ#Sso_6482 zXZ3@3kZOkrD$arohbSN9z0o~}Wgq4|cn|(>OAhxLQbJV5YpF#Gp~92g$~o6%Lv)jL zoc6{&JQ>p8Vk3fESyId_-V1mi{uw!~d{jg7&onW8*?|TMi!FM&xc1{eTbz(IMi#WPqEM++ymLnM`)%O#WqywZ6ZkIT=<%1UGeq2}e}{lVi>HbJ zq+PrGRp8_Zi1G~|fWG|SZ(jc8ly4mt)7?8+Q4F)8&PITk1?9}3oJ zqJ6qZKTlJi2+q8jhHk|S(e!msIo(nY?lJPoL#59XmP z3aXcZyg7a0ZISQxL@;pI(2xwjgL@$_ei-tbF`ZWdWCN6Ccs~#U6iez4i`r<){TR>cDY7XQ5kt1hn})Km^TLeLq)v z!~|NHhziynq!gKufe9pQ5>ai`fA>>{wAyQio z8RYb*c>k}(JdrhLdZ^L2&$|Y1u;W4ms-#+ah(P&o& zM7ao5aGc5F)f(kdeDu7Flpc}@xhG+|;HTfd_et>ZKmVswu*C&QXS)W6eS}g-tawNu z&J*cp2(7}uGUw-`PlYHA+ef|dNG?`fPu_qxH587Cdd^GJqTeyX#dov5ELAWw3WJf1 z+!d}Js3^2r42%%A%y&<(&$m97uJqQ{8o~Si%fInIA)U8nQ^V>AOEx%`wD#vuH*Y-^ zns1coy1sgSiB6c!RDEXBe`(Cb{eV~mGM8Gqv2&WDPxiTM|7mKKMHFk@s)1+6D$(o)5N&R=(tb=7@AirlX4zR z3la1ne*Pd4Bq&|YTQ6jdtgiS>)Sv+NEDme&@J_M%D}*LQFbQZ{xY}rWtd8k0STBE| zerh_3d1ddBB^)EZQg(=B%;d-!vXl4J;bRG7heBIUDl@gDn!j2TU`F#UU4G_t(Y*L; zZnx`B9&(1cC%M75U)(9yANzO3`HdW67cXV7mVL={W>c*ZjFKzvC^J3Bn4z|9roJPY*xF_G76xMHc>p7Om7wke6j+nZlt)^{fH zL7boB)#}cX<3gdph7oC&(kYk+LC=IfdC&ZzzoOIPYMh^x49Cq)iWk|j9M9#wCUkO2gCDkUwe`SWf5U#9^iMITLzTEovZT_3$AqfmT@%*J0QTM*$S z8eTa|#dCt2eRP(=s@qcrpX<>_a@E41v*_pgb5EYaL0uN$oV;;^9g)U~TOb!ImesQJ zmLiydM3e?1g$nlgfSHJ_Yu5yKqdXf%tV-D<9RL-dIzVl$2N!(_i7fMRp)`kM4*Ibw zvi^-l@+U0f=9&t|^{Kf2AqhjhbL#5r^Y6D`T_b?NpLlI3u{)kV39C`--WJ)v{^HJ; zkBila1mMNwB;o-h0OFmjBm(>_5FJ9qz2Fm2wL}G_4zh>$U{iS%3LZIJ+;zSlI4fgW zI}4E2(0oiNx8RqIHP{u&eN@9*)xyRmKg zzd>t?ii91wrslUR)c*1Nf-m3uKfM_XQ9T(Z{L)e2RL*-q@8n(X6F=C=YiPW(coygg zrA36u90oxZeK`Y^<4UHye&IrR&4_%(+9q^q2FQ>z7!05kPAP{Y`qKWMvg3&qF22{g z)w>P+L5t9oZcjFT>3{!UI{yPDg_Ik0c*}JOPt~JT2G8la#Q+-{f+WOx==c}NrWSG| zUUMGKDsKR$I1`ry9%1=uoCJi-M99`)iHf&5ZN7yB7!-Fd%dY)Rbwh-CQgShSmQsLvKdq-f*bKJX1SmpE_>#y+bTh4yi1a@48n4d2*DP@ z#z2Jict(}&H@BS>GvG1{)X$YzZUQ`bnX|0~B!Uz9c?Fwu(KyVIr7j)y3u=>B?yZI;-7P-R>wERQ8lg|LZ6 z+t5R59-Ks2=k8&P^6j&zwy;T(SCoaZ6DnF!s~jh?BlP-8m~%Rz41foXc}1qMk1Fbz zlzkVvKJxF1wc}Fd)lT>6Ff8|?y=4b*4`O)usF~`bv8;&i;txZb{ls&3BVw)4T}3Ct zxDmp+8KmMQr9#EU>KVMALWb4Zv9PlL?!Wm2THimXva^t%7k2b@Els8VHe(_&Ll_6) zD9yI?ft_Yb4`+%%22py(W$XKmxxcAxGx(UdGFa7WS=mWzTmjfr3 zfL6$%{vm^<42kq(8CVi6vryAI69c{e9noLKnuZ^6%s~3#>vt+Mv+8;XIqFHYj{L9 zpi>{kXIDIW)V}*#wvZ$t!Hsx+v5?Aew*f%?-seujg)HWhp;%`pmk7s(Di*zPBTY?U z97!g@PEPQ&jB=!mVDwQEyCw24Gt2!Vh|komktOY|XNbK>TAd5b+auK*YpE|NH6k+W zJ`o9_kdxa<#p3qLs*kPxB9t&_-fIP(qw(?2cy@JWn*v8wJstcrvJ1UYR#JGFDgg0|$-kq_lzcgOpW#{6Q*~G?X7w9JpZ|D^5;g1HR0{uDNf2R{RiLsBs=Z^ zA)CEa)npyw^tEr_Z9jP;W2A=j{$%+q&OF+=8m*B?LddhvbR^5fI6z?~IajEt2ZFR} z$-CC1G}_kzuWgRdd!PDXn`b}AB`1SLt|;YqX_q@Huq7l2tQLHPKTKA*;}k{}HPb@36(R?irZOwn7qA#AQZnn)WMdI%5?TI< zx+Q?BOiY9Cgm;wnKSTT=jtTRb&$NB~G43~iD!%xE0Ofc(d8>H+qYrF=5-`#+9sz_^ z6+=vlOQ;woWytG;Goan35`puCKsc=-VR|3fCgTpVP&Xe5u4DpSWIsk5=!|;d??Kge z7Q-?P4*Sc+vtKzJ*n|DoWtGKndBa@%T|UHog^OG^psZf%-}1tH|0w6G)GN5V}KIM>0!K&v3y>f73wU zCFPK)EGnYL%O_Wgr}u?CQ9106@Ff|pYVNru9ivC2@&zb0b7G}tjQdjlBC+wozW9Yu zHR;D97hohl4i<@+CgFX1&}jWf2vc;)5~*IX(V-oaf|b$P(I-YByZbHv)=1i0wPFHi zuK2>^;))y0rZG6`q)x*y-&T34-JY)&JyrF zb!DPe$5@HaL3@2262L%;eQK`%f*OY8~k=?miK5{c?9~@3AhM|FfIQMro1?< znkO%{zKa>K3ZJBu)As9^%`Ez01OSJqTx3MyLpSWCGe=}A_fqo9hX@xB!Q5o65*$Zp zwZ~#>v_cFHLaXzZbG%WaL8FLFW}8 z6S1Ac51g;RiupBTZoAFi4|u9j^dt&@u07QdS$v?Z=1?O<2`VO$SnrEMXh+?+i7dLFzMl?~~%%pipL*2@OCenPUNH5}s~3>4xd! zvtfk2m4KY*X`+!G-3<=+x%$#Q;C~#cg;R{NBL2%%kk^ZAFNL0{{DR{yNQMuHa`Q2@wqBzf%gU6c zYos{~{J$QWIoSssqO`^|Hd@~=pV!As2FwCk`n!BTc^l1&#(hRT!@|qP7eG|@aqZmr zy!qu_Y1pt;tw@>E6ez8k7t9j>vJJ$VCyQXgQ)8Mc`q&#sG{?Gt znM9f!sC-h_9J6j%i+|1RrjyB}ef!(y^)vok>!*wDjZ6Mi#*F+RX9^p4)L66-C*?*J zFe)*|$G02jh%ED`PMVV}vju!0hzjEEHK1vI_a*&7Z%~wR^`KyED3hm0>P2dP?C0#7ynD80lZYu7FJ%s%kZ)ix$ zOkON*q^gkMNDBKyD&W(Psu~rrR5_ICP?Ho&ZkRVf7HbyJYQuNY)7>=#U`$Y0ap$wv z%ddPvYAR}UXQ4Od^q1p<5Ib&o`A%I757mhPDxt0s#oj5-Zsj3_V;Cr;L4y{yJ6{^y zO_prt*X@ef6bK+oM(8ps@ipJMceS~>p>0$5_Z=tq40X)a09H95QAmXG627lS zPdezn!m|un~xU8n7t96Znk423A{k<2U8%sSPq<(s=uxIt~d)cQlW$@$*RavKD8EpqkiCo zoOb^4ALR&8dN?j0JTXEP)Zt316T{q-CBCQXsHBU-yu9&$>G#d$lf9|5Ey9og(7OJj zb>o^pkrYJ>&W+1zhW-;Rp!s^syJ|AHUduq1_x|Z0{=uI*dAqWg_}Q@Q$;lrfc#nh- z|6u3pc}y1mRFZPYbmJj%%qE9K`}PVzJTe+01jnbnN2FH>q9bB^qu6L9e5+1Q373fz zBH2c9BAR#3fR5o)-reWxLg+^lODHM|1n$uA_ACj%y(lzYhMYkyGp_bhj^%nr1BYb;6gV@T|ATk-L} z?k;I+NWJ?1LPgRruW$*)3VmE1lNCnoN@ob*De|s;DQLex!#hMa=8nqek5kP&K|Wbf z4xKc{!{$TDdpXZ&&+$5tSkhZ>3VEmR4yC^1{m1u{j z!ODIS54ckerB9Y;9Ni!vw)us_+*J~U4}|7E3MPU0im2ktX=S_qruIl?Or+q91$PN8 z5Hy-(e%k*3d@J{K$y=Ps!Kj|-BgtCxB7GRY!$@XZdUN7Hf5da^1%#@RDeO7O(5?~|a66h9n z{2C$QW3EgV$gwKJma$PCiWTv-+Z7u`5&FES4=@br?BT*|e|p(hk(kx2R$|BY$KQM{ zcfPrvI}V9fUJ4G+<`J_F6pDONoI6eMsdE>r&9V6vKzwAN$=FAy`C?K2(1qV48P3u6 zk=BG5M&M6zg=q(InI#X$c9Jr4ulGaDPP#Pe871PtEbSsb74{8FmsF5743v}Cq*t)T zQUWq!Fqt4S?P0`0d3k7Yc|33o-@JJ9(6_X?`8~c7@0Eo>1t-_Vf8Kt68k>_p?Y#J+ zIQhuwmlPQJZm^76l`di0l_-Ak-`>+>Q^C789G7ZUW)ZxxAa&f$GhY?o z{^&zPF7<8ZQr0_)_>zSsL$~l1BGI*2`w>mkzpW%4%c>&1J|?5ESD}(~MFle~?w=w{$cNuPdKyd_C&x0L*?+=qOq88Z$BI?Rg)oa2j6ESd z#E8Um=mFjk-tDYp8Ww0}qlORQ-Ryuak^soiOo=k~X2_KoSBLkodJRT0||M~}ACBCpUPhD4?UWrmDKQEAC>ubX` zt?~odDL9kTufIK3f_G~hnMd6T#gy_qK@qy7VRwcyhm z5lmsQUPL1i<2K2NQhyA01|!fSQ@17 ze0A=yf84+-JvZe)bj^o@gX#33LL`;OY;?hm-ZkD&)(w_PZMs<$N z3nM*3S2$+M=mH`CrD-^j4~O>`=VMy1W4i*qvDy@>kbF6Yrm1=}yAt$ALtN3y&5&-5 zOuQ#}s|ZIeyj9OHIv;&&Y7kJmGlU@M1>0q);K?&yr|5?aj8M=Fxoi@p*8AOlegWsE zKNUY85r$46K*v@fE7mg&!DhbWWpl`TOPEt2CFQ>%j0v!9FOS$$KoD`n_&WWg*coDtx`vH2Fx^sR!_ zERb!-(#6rn;U=q0%3tz`;T_9dIE%zqXIIw0vPeI4f z!`|75R>SdKocr15z}bQQt72q1%j0T2D8uLV4J$a5X_51A3p}dI0|4M3(*b;3(O>6l z;F|dm7#ydwkpIjEaYi^D=SrWfO7FfxGgnCgqtUPJaj)ya>$`}x# zE+c&sr;C1i79vc(G?m5qA@d|vkG1j(n-Ma{XI%^GkwdM}OKE_Onyw!7rrP(v-8p{J zpX}pIBCp~SHvlMk;gjk?X9f-I-m>uAjyNX-Gi&{8bV>gd`vCAhLdYFC3k#7i1ik;$ zKl|&Zd}kDfAcfy1UDQ4GutN-mU`Z2zWOy5PJFaSei%BObfd{3u`PF5iL950gXMF@4 zhD7kf`iU5H%IkQ@VaxJCW6u1Y%7jTE0awpASHzc#_{N{)ciCKP6n8X`EXvIk(I>EX!5VWT3u8p)QnZ* z@HHLMdkyI-lx^l;S97GLzCu3#+uMY-|6Kde6XpMT`~S;|^wWx_ahQKjq|}|e>pS1x z6uK`h*EH?9aPKz#pKCskJz3BjmmVeBGx50_mrFW8WSPs^YhC=M{aHkQwa?$(xu|Z4 z(p8}R_D|K2iRdIntdaT9x~4~ddU;-EGh4=3r+|9avA3r3z=w)e%W2Cdz+If%d@U{3 z35HN#A^`lrnQV>O-I#^;YknV4TL5qdgpJ=J~qh23QWm$t1;UeaF^rik-{JRi~B+Xlv z-1?>*0*vBxnR!&OavTUFIcCr7M~pz#kWdZGV>L1mEJb8VwI9}ff{`f?Cf@OIDBxDQ zM)qk}6>F{x4bm-b-@fc4fp@cSx1R+Tr7P^PbvTOOTKr&iLkXLL-v;W+XUi(6 z-c8>}Le89emE~Z&S+oZiH8Ub{(zj(GnFKJvkWwb&`M7j6`gT(6s5quJ20{Rrt+tZT z(tN$F7bR}WE+wRoaG&uig(W?nff%Xk3 zB3YYRk1Y4FG1mz1L$Bf#{)zi8m}+zF2SVv(bwE&_QxUB;6e~;I@|APGHn;|LXqIB@ zFBTie$S3uu_P49suh(UcLtWkm)Tjjbz9Co1R#-VpZ-x5mxZ#~T=YvV(g=<|~m+>C} z@=l4>o+GRwm4UG3Yv}guU0kTJAKu^2)z83Am3BuAP{Id}r#&v)nt|*1XH;z@@lHs! zV-v^>0acKPWBkRd6%rd{b@nKMjaM4)BzuD{WhGL)znxFlTkGLFk(;D_W&;zBjDLx| z9_9bYZi|@X!}!jESS2c1{x^@`yb@|;Az`lmyTw$&6?`0g{eGfND}Qyzf~}>0@&uAh z!s}zTT%%pAT1h`|9>oI9Jg{#Elh5Vl7vM6;u<$F*OXSe2Q^n?*bZa1(acg(d!hOu( zDK{T{BkvM^v7oTs*ipkkZnC8Cq>+v#Q$FsdD5|b+mz_=MIeNSCeR1KX=tGn~u{&vI zS^xGo80;&gH!7SO>9J%#$|oHQ7BRR^L%?lk7YIUVsiw)ASsvVYrur;ZCcyM?@-O!~E|CB<0<(g~M@Q!>D4f=JiS@pc!v=aNEu`@67@2q0ot(qpB7Lq=$e1{U2q~ zf#|S$e!ikuLLo74Y@8AXvK_{&gN-#V#-&AC%vIH3TW2@mcg_T-YW@mj<_1DZ=!DvX?2(_zUNHLgqTRejsSjWNS+eqc z2?(ZZ-cZ%ZF?rYP*n++o>ca`Fh548j7PS%sN_I_0L7M;U~#3>XETYf#v) z8Q)CG^{xr@&2@N~vBi$~?}&^{(N{w>40Z(+aQ~q)HYUB6=Dc8quwB9tWJ+UIu)kAT zG4gD@nSqRCam-Xy51l0C6w32W2Cg+b_~59Lf2}E`E2s0iOQr(HK9dSSG`??_DF{Ai z?2RIvx9@*etX=gzXRF4riN>qzr_rBe>csFYBvWi}Rh>WY&}HWYwoEs8UIbGx<^iHfGs`bM;U|&Kmumtb3@0fb+><6v z*|Qp%G4Ge(0NF2}TVd0XG7QGRIE*YBa|`KoraBp7OiB9Vz6*HM)RIZzlVecf+ zLf)Jv4$D_UO8K?cHu=wYjCqaMgXi~A?l?}}Z@&gaJ&P10YOY;IDU^OFT#*j-4@dAc z`}k^efNFYA1RgL%|H;b1$5%#)iO~_0=Wp>tSC7x3w;GoE88Bwj%xj0tQZJ{i3x zzt%!D3xe)SZUDNYIBZs*#u?U!HpQlR+F78Uq8q_VvZGG_yC+dUBs??9vAsmF2z#2?Fzo z94bTXH-K8<9-|&wCUiD<2w5!TELL}Dy4QOw&V7kp&z~qqpNsi}c8Pg+9q^JY(PiH3 z5m_x~tO<Bl;4U7OA9XqlQjo$|< z%UPPwKGEZv7d|UqKGMcmcKH@tUwKgsYZYnsHfyRY%Cd8^G!Ypt`# zQ>)lsgq+wM1LD%VwR3>|^J48z^YQ~(nNdCoK8IB()97KF!M@n_#cAL6zNG21Gkqc# ztRDp>L1?p6c*Z+?YkOCRDLkZu=IHOxAY3rnku5YtG{z&>+b<(j5z!q19+LL?@p1oh z=r-N`G7_rnkcI5MkRwcmoQ&e;(e4s=HPjksB?eXG|0Zs7)0cs|*4|!co~b>OX}~!t zUViQ)t9@erI46^RJ^pv%WI!YFW*9$W;sL!y;Lb=D{yH!G(?2PxjNw2O%0Unc>~wOG z^(QRXp&!2oqLi$SMHw%szZctqD!OPaxXxG|`Q`)kze?EKIwgF7A@g;zXi6E^;__Uy zUjsS}M(l5rI~q!h&7T}b{|U?hy;hK#_%4N{FH)#ImI|>4yg-C*KOAKRH5tNte_grm zJ9nTGeUMuBi%iQfg_7V-XbnIF&5N&#=K+F(Z;@(kz9G%)RLy=aGL%_r01))wk(@L} z<=(ks>$(5dy7;(x@~-c_gGb4EMpY$0ek#l<)ISwsCyyrQ<~SoWsd*0-IRocse^}aK z+Tp?|?mkn%bId^Z&&ZmA;Hq+}fi!@C>ML9_vDmAzRD!*4moIcJrBG?T=<9eC&&r~8 z;ll$9()WRz0aOQqnSG^@Z*W0+SD4h|#zdHF{`#Gd2>SW>V}GbOxV*fDot3Z9e|=a4 zV#zE4@e3sqM(i(P3h=LKcVFMf{zU0yN`HC-!<`5h|FQM+tKxW63FX65V^u?PrzlgV zOpd|F^%W?HIMtL64vxQ*iM0sR)Idh7;Rpz&&zpZ{0bO>7TNI*?kQRn|qxz18usnR^ z-;Qv*X(AAwg1L-QLUE=HZ2sV@1QwqR4TnG$yo9gFgeckSPE?Ol6`>wW1PjaX1{Z@s z+n--8&fRgoIT2zkOcUH!rwEWTB72+Z?QR5P0@$3va*n zFo^_5S)O3UqTq;-i{`XS3n+%7MnD~)OUTa)#H&~;pcKSBSgFe<&fBB@ek&)_~; zB_#X_iy@Pp-HSTO+WDMlW*T#Yyg+CRQUAdjd7--+@i{z=hhZKjI#~-_N;io};lFD? z{D%LeRe2+x=b@uQ{>I7#g#fZkWr(A9L<2N!gf zgl&dc3+Dn_UHULbu10=wo8zgJH6mgy0)om+lLXKM6Nn4k{DM%3e251)6nz%2M}-}$ z@tFCdW@|I5d@j|2Y7?=70Ow*;<-#6J8?odd>|^keuBT=ykrzgmVX`Lgiqd5S6t(Y@ zkPsL!`#9u&2LAmIdOR9NNB+}?d*vs<# z!MCFGOA;j8_l~u$UG`sDzg_j*!$gWd){{7rWH?hCR@Pvko~ab*K4}@UsLa4fguGx?@9Lr4x@^7AicCi}Ls@7aE-JkccW!*+oWw+;rI6eAj*Q9-7VEVc zSxsC{bIH@y{Ol%#?0s6wp1%rM8pR<16cTO#3nx}HZ6&X6(=&H5;?m9~F*>OZo(%r-izT6CUzuxN1v|9bR$;oC^tMfi1Na>`+Z_AdJ zE1M&fu!W#N>Cj@c#Edn|p&&~ti%7OWJ(V|x6XZQJ#_;8IMmkckWq!e0=Gf9=e;ygmZUfAyau2sJ5LIZng3-N zZ$9^yiS<)kq1_++Cm*vzc0%RjfB%Eezqs%iMOR+7G%hSy{(I{#HE7SdLP5f${{RL9 ziJ9^t%j*? zgdQQh{xkatOb{L-8}VqYwL!r{ZJ*#z+eF@`{hnD zHsftEPCW7np|Ln~uKh3|HxH-PBEA&mM~J%bka-WYpjhIN7`D7)Xk`{rE@ON1^i60h ze>o<_My?Rwang_NJ9UlU$Brh=LO*p|cAo@`I%|bief4<$DAn#W$bzz%rVz@Qg?Pjb z(T@|sdxoE=r^e)Y%a`$=>oc_=Cl(4ES!orynV=Vcm2yQYnN3zDAchf=VC-+Agk=|u z@>GJiC;}mqClfhKXrQTssiYfBx)a~-YeDW1p;)p3-FejuTxRA1c8Ub$Rr=~IbXLZJ z>ZzI5Y}E{eS@oKfG9gFEvLcQBg_1*hGVr86yym0Z&6~kR+Uwtu3nNEzpyTWGw=oVE zn-PrA{N^5_zW>HLNLe-#q?6#abM+FUkPNCmb|8mFGx``$L)<*sIu|O#_KOS6BavzF zmzBR9b!Id)=sQ?Js`e#BjFqm+w3bN@q>s}gaIUM23RzU+~;ji!9;8-ZE%2|p<0leKC#Bwo1xNzE7CbGS$HP+r} zgc!$>oUh=bhFDH4vdpDKHj%+|6>4q=mZPkWisgit_%gmYt@{s&UK4V)P$N=0W&2Gs zhJ_^F)WIn87AQq537Sl2Bw^?8L5B7BiaS5xr1z)x`MbozIlW1O1x&4UrpS=`Su`?i zP#ES)hd#6!+?Vt6V#I*GHW@DZU=cMm?`mR zd4d6z1dx)1SPaSYprq4KzztYDKY>>BNVb*)%@qA`C~4u00To$RgQzTGA5~>G^l_mv ztK+w+6{n|W>KPOzhWw3=xSYvpdz4q(##mUz` ze6v#czb)WMHlLeeF)^_7YZzF(4a;KQ{M6T^bp4sD(toCZPM}j8xEMYK3u4w=aB(Tm z;78I!S5w~u^9beuoY)nHb*3R5D11(Dj)rN;0Xl&*wPNoV616;tbFgAHE^My57_g@(@$d3r`q9VJEfs|Ab`;upXV}-N zZ!Ua2{C!uK4-V8{;W;pp*5#;ubSM=>MD@uMqz5wOKT}9T2(Hd7W$+GGVkFLF0j_Kc z^f7&6ecgvb5I0yYvKp4?YK7YTh4!zfATEExqYDa?FC_e4Pb;xtB$hBUVdgzn88`E1 zAOmC>lvujVK<#ZJA!zVnmluiilkm6j{vvv;t^jVj-Mr zsZiHG^-O9D6{lB=^FK=4pEN?BmAZV`23lty%EZiglQx=S_2g*zGBZL$=aHQpGC-wd zZ{Y>pbL5P8gB>ug_C~mRc%z>*T3I;oZ>%~)06bv|*1Kts-F|-X>eK+&ygNjTcw9PY zaqy85PvTQz4~Miy;CsLK<}m*xLmr8MM9BES87C?oPBf>Y;la42kY&Yr#haE8>njbs zDCf$*+MT6Pj?GY&JWe3h?C8_SVs7lvj9d4PkeuaBHXq$YWbhZU*QWg;8*Jx?(|$}a zyN7>=Va+tg1Bv+$zw?`BKx^ohyJe?S$PRB?4G||Hs#;9e$?u306gmLUB2dVhD^~jj z#69^P>>1U2I9N5%C4zxqV8;pIQ#wSzjpBBK@NVV^-wTtPUS4lq{LU8yArv2te+}+; zaxE}z@L_yG0Edh?mgezKai0tAnq36q6;i}5f;BshlE#QO@?r2~X1NrUXg`;xD&Ote zjDqn{SSqDSn>RN(F*y)ib+TNe%NfkezJ?*-%CTolCiKR!jE<%M^)0#sj{NP{w=LKB z#g*dRQ<XCwRMctEtxNDEQ)8!Io%cc^?!WNTW9y1M_0*{#xob~U48Y=34~t#M+-2z zp5l{EUudp<>A!*EvHc?aie_58d&PfaJA3!VbzEG#hNvT3ut+3u@HO5Ac%3L@pOc9^ z!c4G`Qzgjhf|}pb5Ncb7n`bd$B0JOc9>LnhFTL~SO-I|H7%YonxtnM26GI}UAx*+3 z3_F~JhLJ?5qaPBsv>t)Hw-eT1j=%ZEe`&uy&N=rdfGv^Ngi`zP?@8?mIw2>*C)4q5>@@9F9Bv8a<$LKx;r`-u`bfDk=`P=PCK;WgWDRNzX zELc55{jY+PAmb{t^Zmr}9l5RuXbjF5A914n?W%WDOYmCF&DG|UBi;p(pK*yHVZ!IU z0VG7yC8SGAY|W`WWELfiGJKPWwmA zv!6FJSW5IEN*!V#@~vm%|9`gLtU0PI%lGVW&`)jF&(+=cR+inidhe=RX08DwkdRU? zAoF%rUvo(=$w87UBsm0>iAASMI^1ObwOhx`J>S4h+G;al`?9cQim zKT$U=ER7>SXAf(yz4sdbOX&FeG*GqvF^jXjH0cXbKN8z04*>bnTi~^Fzdz(~n*M;P zwxLVEs{t#>(%Vs<1D?rF!|V#ot5dvTiOk77Lis_7Ume`nFI%jVS8R-96DDZB+~h=e z5|xYf8L>iR3ptXOohYNkPnR!X$&-JuyH5j#E#1SWiUK8i#2@2WFc^NkbIg=s2I)2MLfH)1v1f3H`H=DW%k45oHXWHNvB~IQ z94**~!{bDbZQuj2a7uldtrOwTybK^iNWnGArb#Ia0y7VQkqa;On{|fM%R3~T89RYc za_{04l2=^LJLt*ZrLltx5t%$4soJf#S6Z8Qy3Tx`8Sj>H=A@6!Mv`if437WVXU4{n z4n~vkh+^7YJ?4u@x7>?cWT~axs*%#%u7)Hh z4Oqg6Q=@i3*b7U|Atzz;!u{;wbAMv$Qz`gj`!zDR4vD(tH6dGNZlWdqcv>Ye#rj%{xC479~u%&tf^!uhiHK>oTm;dK*V|cHtTRac75WDnmd~F0>N= zfq(&TjcM(y1&^B=mGC;i`H#~PNUo5f^1?H5S5@aIlc|oO`b77PmYuL-^j{7GBb}B; z2W1FURukeanmah!=jEP497}Iy-U=|!=%IzCY(5@u1St>w7E~PPYNDm0mV?>14bCFu zSOb7h=!e;l=UX?fIJ1D-!ZA~jlNv;Kx;Qi(yPsk1NNdW|FfPBko@Ll-QT&A2_ChBo zLg(6M>-d(BpU+M0D#Y$l$f$r}X4z~ngvJ=+mdHr)86y*hx4o}GKWzAB@mWf?#Kl4Y zB)motG#Pg?nq_p>bnr7gte3ulC@5aA!kEi3R*wh>yyVV!mByz|*qlNIfN)%di5}#! zvLzkYLq)E&1Cn!Y4u^qkAM%PD6Ktn9j z@zwTI4Y}XDQi#Shr+mtI1538^ZCppad2*4AK2IA7FJvX*B3mv-0z%Bzi79#Hw6>h` z;2A77+ZzFaOgz1}4x;9NC>>^)H8@4mS>F8OV)Kj84;h}7eU{bfg$bO^Q%&8TF(*cY zOC(9GEa#zwcV?QAdZBY)altNxIQzSNK@1{xTgnJ_3ET(mkH#&@1ID1d;*TBzhGvH| z(ivi^qA5`9K3p75AzGNKPBThj2UP17s|4q?ZV-eLB;^B>2n*pTlfI2rPR%F(0J z2lotH-W%i`DfmKZzEE}K|NK`UI(B2<#~_S`rKk1wX7ffAQDI1Xd!;kRFOo5Wu0r6G zlq-C^dK~0pU&!_^Uv@r^d#-uz0>B2+tZlqh->Dt|EWj?Mk{*Yv0cBHLNJVS5Ut2V} zPlbq+fCjUTqE`4`uylMtH4*_A^adX#Ho_{~J3Af<=@c3a4DR#M<|3w9<>gppgIi#q zuK^$I54hpK|HFUrKAX?3v0tO1Zh(3+?(igYS^%OCo7`FM843BjNLwi@2qZ0!X} znSPNIjEeUwrHbz;zzeL zs)DR*=KW66c|O&f&O+Rh5zQ2>hc|p%!iG*zga}A3d{*ujf0LLzLoRAxZW4Z zj1X_1XIC$Hn~Z*XDRu~3pm5p{jc&9GDXjD5rR?%W`F1>|qvnzSoNoRrMq6@3{R#OP zAXaBfdL^VQAqsWAxk#lZe|P)sdmst@$;t@5{7M#sE7hvmsQsoqiH(NSN7}cVrr`si zKgzF$sKz5FKN(X_7XCmnM_TnxZGbhs_mMq{U81Pt39G5D4d@QilGq?=Wgua^WoN$^ z9QyXVwe0DZEG9gLJ6`R7!&sohJ^~U0`&!?(QY2LkwnoP4qVUM#_cW&TdZS8#9!vlC zzc`y=Z}+*g$gr7jcnB57Yzp7Y0G6vWdfJ<_L_-OP=C=wSsnvH?F~KQIw!>%qPcJy=9+(l`na6IiB8rP1K0&o>3&b zNY=J-HI_81x`*9$aPCM%hVq8tX?kd-oY0iAo^D=duq+~ZBdgAvDrT0V2wUM$+cM(Hf-G4RvaDts6>7-ydm?5|*&G<5q6F<1~7%7I7 z-~SMgPH&9`jUHMwmvhJ|Q)&|Es~dv}OYX=|Y54Od61Sj(DdQ(M^~l{E4}imcnXpD;VBm zK>oPQ{Ke+Ewe0Ox@A||059%$%#;M!#Fvi~kXcmrxq$hM!vWfE)#v>P4S$!u0N_2ej zI|1P17AoZM5%@A|XqQfK0(y{FWZYN`sIcdaVK^tqQaUCmJ&aDwzRXoFY?kwq@bG?m z!y_i&AAo0l#|-Z2_kOa|w*lvoE7dB5S@NB~(0;YnDpV;RO)>!E7)lW5JbmI{@$bdi zFuf6M#${Mv_}@OpLKv>Vg^RyqYTsQ%e56dkLp8#u3c>iTjrUSVk$(CbaUby1|0kKt zC>e8_p_N@e$y|ge7Jlpt!p8%3OL-Eq_5ysSxC16Zl7*vcn|abJ>@3+EoLzYiL>M)3 zmj_xd)lX}m>KK=-g#z5%6p?oo4P?auM(lq<8$?(pm&M85zIP$JeA=Iym+v66`eKvi z@0$U?hq^jArt82``DdWZ;Aj!*OcZU?L>N!b%E&ntkr$I$CcC|XJ>KuuiSSA{Hy6oyeTzB>tj2&OAL6xMzkXI02frap{Xt}mvk)yKR8jA)V6YEXHpISJ_mjXP1 zk4MKwyjPxf)(`pWREQz>O~5ZBn$)+8>(EyQo4=t)diR7FSjqu4PGp4Wx45eO3tAf| zWkpyNTSOgSU`o?zzx)LmNm!rg@np;3O&_HNmP@WT5pUAWgDBy^NJvx;P0o~uOuv`WB#5{U4#WYg? z7()DQz}QmSNqye?Mrv$$5Em~WvC+`^Co;9xB)b5Y8kUE@`sNKv6Ic@ zWB+Qi^UdUO#xGE_52^c=G9S=M$6W3y>eB{uAM@MD6%fw*GbV z&IOq>!T}aa_L$7ns`QxICO*3Sn%oYVX`V8xD*A31%Hr94pCJau$9KLXzSoCF<UmwJdQE!A^0hRI3$XhcP(bkvFrobq~hOz-xKlKlBeiv^pE}Fbh zP$FK0G7$@=;N}Zegut{ev|H+lIKOgrQN7`W{h7rgfppvPFO%j_D};$-(kd8f0on?4V;8s)$X}lYf|+@6eHysS!_>65-w98LJ#cUs0IBWBjV^GwB&J+1$`9)42R?vF1o$n#w zaUI&Bnd;<{vIU#?vJ>QyLWPlkPTmLeQYK?INUVeEYCt;*`D=UQ^CE;Rj{f=JfZY9N zq!137;7E}J*({G(*9#z~T(*TU*LhAj6@`F~QtU#C=2E892{CUaVT19Zt_5l&w6)fq zW7*?)IZp_TsGIHAS36&;d2oO@CEr4{WN;qIKtjnF0G)+Y02D8n>L`mxs6wG)#sCQB zb8H!Tg>afe)S{)qAB^xqN`eR%&){4km`VOJM#4QI?=lLi)0@X4EJIxx){aYgiFx7K zP9aWAj?@QM!Kp}MaPxU=wdNTPJhily7N#^v&h&DPr?A~dHk#0ibpFTR_&Km|RQQ4T zETz~)owy4h1v_W^^-DhsaH1uv2)9SR$>QDC)>XQY)xy!inHGGGKFt~pG;tfGI%B*GwO*@CyilMi;b8coO$5+5`J9-iDSWWryAhukSn$y>IxKXDL~ zhav1XW5;nyRH~Eg7Qx}P+*46mYsf5FilEd!dAxo62cJMMxJ2Gj``B4-ROJe}YcT9Aol`B|Iu{VDpPL&eQ6J#)uDw-Q}Hvbvpcy#Aq-@Nu|^?(IFFD?8|mil_PNm4kdLzqik1V6N)Uue?k)E@ zv+}e*yxKhURYI5LC z>l^fMumh0N;7{0I#3O_^xB2R4|IH+n*NO_agAffT!-tn2e(|Z{Z5M{_`X`c8i|Nc; zeV)jN$qRlbF91n@;3haVviCQeC*M<&&PQQ4-p%jiHX1j~mw25(>L=fd53hN~%pCr6 zPD;|iG&avFOcujIti+{Kf4}91sei>H%|I(mC{b)dnQ?XDK;8C(>=42+@zQ9c#N8h( z;K^7CX)C)Ty!oI0lQh8{Z0V!Y&oG(9ia4BII-9M%azZgz;34F7438D+oT#7MlusZQ zQ)Ke?>pR)`$aN))llx4X7@0dVzepgO)04#jy9kbvaiS_1Shh@~Jm6!3|Ff;H{A)(o zpju|3nY6y>;h)^T{_*#c7ZyQ_1jf0@n=U$jO3>69g51 zn<;*H;Tv_&h)@CA(S+^-apK) zoNe8IDE^33!q?h~33d;KX$hC3X*({UKV&|Apk&xuB5mtX8 zZ4RMy2>Xrf7DmaFuv0AccsI-4=fcz|rQlD*fg{MtY-|)sgKs{&jfqWuo@f}Vm5eZt zDwodtyXdVlFT1|;`no=LEL35+819odiP<-2K+67XJ-XZa_L9$&4Nx?O5n`e4#cwx7 zzs}ab$zGq3^_;}c<0By7IWEnvrJaNbCkX@^p4{oLnz7F3PsE5Rkp;+ueO!ehL;`qI zMdlH5IaYolN*vFih8Fnc9#V zF2Etg&7UmN&6{f?MQ7JPqhyqfYnr5`Fd8tDgK3gH?dBx%Lv&7kg4dZ9OsM_T~@SRysjWJRu&&xnukN$!>_gvyP#WN@vTJNez`5 zYRD+N4j>IVMKVZ129Uh8U?Nc6hqsup;ySBekv``Hz~n(5fRCx`_w3vXoSgRK;~=2; z&$Qc3Q}nN!mnebtP^eCT^pc+CyKH61^z(6)lh1EQStd(5$-!v!+vBi>_i5Qz>u;K0 zT=gafhlc#k*2b^==TF(%X=0q^V4Z9%0Y5bv*BgO=aYU1XM<%OxViU`St7z-ud(24w zGtvn*HW8G8HfVnvDUy8MKk26qUc6KlAX_^|3D{U*3XBM?tej+Afd>{NGJxjxVf2@Y z3}MD^aGLw*cw^>B*laC6>zUR;UdA*M(1g6(L)k$8w0-Qnlqp*#^2F&1DxkkajM?wt z`pIk)_Oo09%(yZ_q5uvuz=$ElIuu4ta2f*4{U86~ALOf#p2?6kISXTFno)B-@VBXo zkuFHIgOCO>pino$2W~huTVd9!ne#Q?gb+l^`J36}%ls#o365ohSWRv5{>Vfq80H)e zE?%mnFv4q3o1cfzl5ff*-UVUp4Ad8Y5dLz0;Shd-I8irsXjUtVq=|Hhw?EzwMUikw#7AF%ZWq1Dy}@3)$G4C>ynfzq;Xe3o6l z>9XqhsMGwzX5zfwQc2Dm>UoDlz4h=NTCXK!zd0kc!W@BUk@%4!bP08o6$CJX=u_Fx zRt{Zd>q)cxYIf03H_F6kitQ>nosDz)q_K(mrv=t6FNIiYuT}!Cgj%ZJASZ3|N>HMp zdFQFj#|%V-o%sS-wW$Jlo`t+qbthSt=?8^x3I~F06Az$Np{25rqm2o9hfvK6Oh#=^ zy-4Q>2={@Im`G`acslWd!s!03C8-fCmqOdP)BHDv*H<<3gnwF_u(B&MR`Wer7HI6j zak`by%YgSJt1)U;j49`bv)(Y5wg6=rF4!f_>oh<^pF829mQog9p81{-6o1N|Unh>( zc^+JQ|0l&VWLQLk%*=+XhiT48ZvNV9E^qJ6e5~-4vpaJQYpq3PoZj@Fn@=8+8RAdb z+s$m_y7$<*xsF$aE6$tgA#KY?*8fRE4j62M1hy^*59Rn!tIoAZLscVrbaUZ45qdsW z<^ft%h`u}y5IF1aVz5Ndk#{Mp8a}Ks;<*x!qp$l{93rj9fgi#FosI;`8EtP|^Jxqx z^nmTni(+pz<31{hzgJ!fLmLA$^;w35B{AtCgR-;6$iC)Ol~*S!Nz3L=LIr5vNddn% zo^^hfgog9&<7a$Ei@7=5mxeJ+Sr|;HhDVEsckVXVjYdE;G?35jjbxf7nqNH!b5;>e zZMC?93t?%a_!8s(Y+d}G3t6_8JDJikytTFcMg=f55G?emvOV!?5}aj74XId_F+`FR zCsVSwL3UY|oe(?^CJ;<=RGM`Q~oRE{F=kQJM&1;z$87pa&)1}p|IKNogc zMOl9!HoI_nqJ?{lJz;1qq*fGLzX+?dEpLINBui-g^) zP(b+GVhCiEnnfZF*DW26Eag@TSsVZ{!n;kqBCtIuJ>;X$)*g{(5>L-p!GDw8K9XlI z3-k2On!Ae=#J9r`DG|Q(wdIK9ZzLKU7>0%j;XY*6-lZkFTvUSudT%98{5jGsQiKZA z+pSv6VS>ZWW-secnX?t{WFK99Zczc3%8kh|6Qd4TPSxvUJ`#-g+4>1zNK3$sYoYo2 zHLCJ{A9*Ok_mSyZC8Q?1iH&{(F4 zg?*{97>vdl2#?F}J1p=ZftVQT`0aJSR_VUX<_;#}naSvJNL;zW(#QwnB+2TbWGSAu zH2zBMXwkV&n>%_2owPh5oCsed)L=_k&=xIfkIH9-W0x=*Zvq8baK08To+fBL*xT=9 zKUmcUb~ffTbOhEQ$3jF_6Vx{zIyU*#hqS>TKw6Vc z*Z*lnvm7y|Sc-5Vj-!Wip|N#(YC%fcT0ywgJx@@4l1lNoV`LI;A%LZJpn;z^X!ZJK zF*}@E!Bmw{&2E`w;3}aD(hQJCIbc%URSaZjB?%}!!e5nXtF{}zH4aESc0f>TCEa;Ib>Hky(Y#dzT2$@#p+@$55Ew{tk`LcOKV0XGwP9w7<>}!PrgyWkYg$7Pf(j zMl28qAE%zT!)J3x2>sWSgZgQJ`Ne{idUyL6I$ypc9$C61mj07)ZjqxX$?e&*&x#ID z(4OTU?ILMOQwAKQd_!Hf7&FFXa&w)jhB@dkh{zOXDogx98iVc$1cl7dm6#3X*L{aj zqt+(|889tLi`Wn#f$URLj7?$kFSzOOQTrK5)x<((Pi5iA;Yt|CEUZ;nCif#2uwMI7 z%qQbeG+d;s6fM8)eNn{=1F=kR7;BsEZrm<$LbuLP)byu7;N|J=iR|s?z$}vLfd5+f zv+Ka*?DCEX8%4hHC*qW4Jq@~RQUhAJ0DlMjJZi=b8D^h_BMMOscs#{PHBbK_sNocQ zkq`x_E2cCeQ}nX?SWzvGg@5??KktbHvJ2!cQzf>AGA{&oITPhuIz_4zT5e{@Bpx-u zg-^hY@k9tYOfRw6NCJ%l6AReu@us{Y!(ra~3;E7?iIs0ScHgoP`mvrV7j3t^MRL z6X{I|gg-I=p2AX_>r`!ULx!5A^4l(?djur?6QXh#4o>v3(cK^Z$@>P( z1$1ti*dT5Nl=bRiVLgCVD`CB8UwE2b4U_T&Y5K8^E!X(e%P?;ZN)x-;7$NpDg8c)& z`cxXD3>J}NPHE~d2UV#?L1#W#P~5Om_Efs#5o`_1!st4`*puEwVFD z&3?AI>Bkm9X+oF5eZqp{-LyY{vpN$OA%UK33!gw2G$mWTCvDiSPL@0v<45m_ zjNpSRHgcVX-I~$^7?Px^@j7LQIefe++dU2adX(@1|B-_lpf))u&{_3R#dVeRN@I?^ zmVqy%`Tm6=xQ7hALpDnej9_*T>0uaigKvSOnp{kj4oD!$=9aCW$Mz{-4uH}rjdS2x z;DFwe_qN}J(*Q4U_I%YRO6_pYJ%55G^#q35$40U9*MGH(DX}l_j^uHn{xjz8_S4X; zTAMHb13pf-T6nGE)BOMbU51V4EKg=HED5$9Z=U`z#tT^qc$$ORof(WoD!`lgYzM7( zHWeVc<&izU2WfQNoHTkP?ERz5)id>@x0ot%f)SorMS!2I_s-hj%pm(sQZGi&=Nhs5PSkFnGe`#`%I3+t(EY zjkxxPY^7vQf23>C8Yys2TC7tsdv>?|^twL*rZ3eHnEjs0QJE8Zc5F8@0FX84Ujm8O zKK6z5ib8^AS`V6`dV^pO6g?j_YahZq2~F9%!2aQ+MOZHSOzIP2n`+&^-ManGI|q?S zP(A==EMoVU*w-?P-&Q|_)1fCNrjXXXk&4h75_VnX;L;M>cU;XB=tb_7MhA%|k@X$U zL&g0ML`rrI**@Ux^j$EpoDNpS*-wy0g6_ijUQKoQ*#AIW$N>oYMIM2zduyW3sS~zd zE|Jhrve)0D7l^EEFy%KNI|mMG1{8NyQsWSfP-T302a-v8wh|VM+EiTz*3HO3&hY8O zN1#O(udhDAl;w9cJg{s0K;#d7jfqNl__F86n?HpLj`4ZfNR<$HF-2 zE4b!oLqTtIKt73RL&BLmcUFmH@R!l1*bhGWr2>@gtYSOhc_DPpj}0rdq;_>I6C7R& ze_i1KFwx69V%U2skvF;h`WdOJvRM2~l(F;6BJtfa{Yg{?@0k}@A{l%b_n%<|Rb8>< zT5rF3pd1v^ZARFpUw4Q$~*Xj7ncD10q0*oa56nf03c~R3_AB zgC&-2j_4uWgJG_UJ%1&xHx8Y2MNot`dw9VKL4uN2O2|W;L-uR_@+*=2!crwLO@nS& zFo8r1K49)R6?`}RW0V#rDjEPVtTuA8!GDvmv;4*M82;jm*?Ik-efyk0VN>C(b{<+x zZamI^cw%J8Mq?`ks;EH1N)Pk`0)(=wFURhUVFaXgIzw zBVcQhR^a0;|woJ@22>^f;jRz9_hjb^n1g!8K3_^s(nqqxpIQ zw5dX|!%6POh=@u(%lGF%ls5@owKu1&vDys!e`Aq6thRZ7o>w})_Af}2t-0|FcNtJY)1 z9EuypIntm~5&Zld{?Gqejsc`(C{#QPKf-qx+$_{tRWXXPFV<>uSuu;oLRdOTU?{j1 z=jx4M#6E0q<`57%+le^?{+g4dy?P8NNq+)o4x^56Ho-_3a@DbOz^Z)(|BTneios&E zKfje-dMFeH90zDlUljt@RG`Y@G zN`(V0v?T=Iy+cDXC&Mcwu+$m*u!%6v@U{A)tk^O;(t>6bJ=x)K)mDOuv^FiEd;qYa zSot(QJpT~KWario+gmSvQdS=O&Udmp)0QEvC+@Rrlo%I+_EpIiQ9B3_cX3*>x6Q01 z$Ivv~5EozTI@uXfFZ`f}x(!thbF8Lo9BL0VG{mbHm%Psh!CFBg;V7%Xqf$QjUtr8h zb^|+0Pui{#)2RT8I0}|e(tu4by%z1r01=)jQ|-)4QTmFJQz$FOIJ<$7z9E^(!Riqm z+#nYU)`wacDkgwU^|A2!ZZWQY1}H)xxIYP2A$5f2CU!f+*(6dkfBWB z{IQ5Z8N3Cjq|=VsloqkLJKY$aC6f93&6{6Yxsor!H-+P<5KGiki%)6FP-~o|?bAPC zwiKo$d$1Dc4-J##T^K1*+(k8$QrRJ`kW8$AS{Pm<2$X)ysYFrs;K%HLW4JhLy`g_esb{RKm3EVWU&qlYT3_g7n)=yFHM>G zAkNRqWXL1UpEsK)Ps^K_AUF#A58c{348Z?D@5_~G;2eB#*+T&N6?UL3BS)jd{vp%) z6EW9vDB`9vQBNA@>^NM=hTg&bg^gG~8pi6)6)AtAkQb>@F{Bn*XWwgzwPj972sbg| zdM@B9q4tT-r@3`}dqX4nhS^Jg2z}W2?S%9T>>2X4n&fAXSS2Jm8YuNm2v5-}LxFTz z%%Ogv&kli^06&BrFdr9wjpmJT$pg4R0;K=e{Nf!oqNN!#4#_wv>}7zw@&Z9Ys>4>M z>bWsdz9GsOAucIwr>=CqzUvFsUDu+lh~|K)!H3j-x=D~Vh4GXZs_689vnX8< zys!kmk`EN<5FcK4zz|kIAghXm2tn^4AEDkdp=zeqO5H zKcRsV>EzB~1On>HRftUy$!<8du;h!j-`{Iqd?W(`0#%;&jt$8%xJ1oNWmsGa_vr( zuWYF@J{z)}J6Dg%>h0i!1prp|8>uymtY`8(Qs<0#5^v7@(7HXX7VA8D^r6x+hrvS0 zsm1DKE@ert$R3!p18k2^#^dbaxXT{?;N&|ztFaJM90UbQ%wVJ{7y!BVy?cNR^mCo1U8z2qZe`vq|f&yYfv%_Za zOhyu8yHjB|An0aEJx3sg^M15-^)9*lP6BqLigYDnJN!i`4Irp6fupX6?9beGRS(C? zQtB7ahtLezjiem$a<3MYAGe!cTF=XqT8fJIPn=ZtE@~uwj7~nkDXFuw?B5hANeAOK ztiJv|&xMHrlaV`9s)uWe1wJR|%VU>)k_uL#3g=|2uzR z=85Fl+NNZ-5@V*C#`_;ZIq)$zne(Z*^pR7UcKr6@JAkESFgTsW)`7D~nq^i~fBjz8 zg1B`FciK1y43jrZN-qXke*zSn08`;LjtN|sVI zT~u1xKjT;syi4B4JS}ykSG@Cs`Z&nO2?ci; zUiKLg5(DPAE*?m-3ejxvesHtQ65#i+Ngu`e`yUU;*tI5Qam&c~ErP4yE!l;NFyTal z+I{_gUQjkH{CL^@i#T?q9TZ_&7S>rN_yK{Vu*)r|nEr|bv>IbYQ!-;L2!lj5v$sPO zuYFo@3Ax+P$4nGjLEx>8R@8`3lrCHxlBMxqPgy1cZg3GG1_g@kBX8{Ib||RFTtjB# zae?tqfAYnoabiw|D420)@iI7rl*;4&YU{`QU|Tz>myV=-h89&PFGh|^>*kuA3v@7B ziFhGIMj2-AtuwchDVHajAK=FXkUI+!jR;NSp5xPj zF=Po+;9 z-tTxV?o@sNPtHki?(4T#{i%KV5f~2s)Vz06a`myUs6t2P0R`Qi?`U*7oaH>M;BP$~ zI}aX#(ZjyM?>&GkSLbAZN_zw8{f0qM{vo(dr z0y2qy=siIN0>XV%yTIJ_U)nEU14<#SSoW=QHv%W-F{i!R5<^$Wp1ozy_)f6BNx)LN zk@KVpjjpVSPQqf71mmM%e4LL$Q>1@lqmr+Q7kc;X@z=ArTUZW#TkxAzvk*FBIyAqT z&$nDV8Z^&7yUn(icMa5yb4(~e6laPl5E!M?SAfu$_j-xz3aH?H^@tNQtnTxVs9EW& zv?Db0|M5S33Kp|+>$1!k8%V<-#eLA`6O|cG6m@E{6b+lgX%7jAyur@n>tr+ue0A(@ zlUgsuoV1pw5&Uks;K>C_0H|cYfB@Bg`WaWHkK7>DA0Kl%{MV2F^O4xRuH6zL2|Fv7 zwVw@?)$n#0;#dK}22eNujqmaM_!EvW0=I-1!^kQ=0Gi2#44*lPt7(UI?tf48rH_qQ z!K4tyl|M|RPOX{JI}fYT^< z;Q2t2`J9#}#+l>BpZRL1fuKn@C5zLJ8+f2kR;5RL0RS|U<0=r2-BUTHfBHEAuKq+f zgURp3$zOnBtA3iNw+i7gClJL}5LFi&ij0SPI?z*}Fu8-JftVTsH zd;942;zmzfg>$uePxmd>of+JJ>S@Pqk|HFT9B{PWSabER6cRHhVe-wi9)F*0D4S0c zaNr-(OlcxEEW5S2dd#^3zfG8zFjL_Rl3b!0P*{=@+dQ{MdV}1BABR5~GDve3f%7zF zt2(rTS`jLMiuRmCtnRc->6rOeeU8J|_X+PEg;qZZpr zp1C57MyQgW_Bno9Ap&a{`b(qDpB^_~{_G=zlH*w5LtySIy<|3sLQyY-K4XajuT(a{ zB3{O1-{yEDw-8ZDhF$&C00Y=da?>o3fXt}mFO#*UNs!H*f*iF}74&^MUs0n0H=Lrc z_Ys8VyBSS-DNW4FqY~v`vHuGbSHGh_)+vEPtF!At}|1ti#{x_SfaH2 zdxOfzF)I9L#x|SwVSD3804k+>Jm4#pMp!HU`apFfiU%qOp|2Ak;0;r@jEh2MMo>~M zqNpYeeZpVOp08nk@+U@ww&v7g?gfSLFnp~?E!C57bT~UKPEAXXy;g_9o6UYc1}=xc zc&Mgu2KgQ>QaMK&j-#aW#m_jXgf4J9@=y?&F%Q(#x{l}plEb9B$t^OWxVvpw}Ax^88PRG=Ws3l@&; zghDuJUjJI=I@uKz}{g-9H=7+P-z z6843HhF2R_acc3?j#R6Gr_#IzC@eNo0!?2^aA;lJY<;e9Ih;8WAx$H;%qc3Si*2V= zLGI;J3HBUCTubK4_PZO|c`Y2eoT~}x4mC|N69nxZZyJnGCv2ptXRRZg1WoJ;g_dfJ_84nQ3xYcJ*@*7UQKHr;rl3 z-UlaAQEKl^u8wT|i`LsK{wCR-xl~S9e#R9+7``ufITDKbiI9k5F%rrP8hS){enA)z zV&s8x$=gY6I4msZ=ZG#gqUFLt=b7q~hCUyz&Y_Vd^D5;?;xcJHe%g8&il@bFtVFsm zBOb;x=u;W`vrvuZQX(Kcl0uwtHMTajT&RTrZmuHR3z7C$3|dt9U=O4bvM-ZEKjF?Azwqo2?r^%KK*g zw^F@Jm_4($t|`!S&Z$KV56DtMi7a{R6&K&J!qK z1pyI05icqBSXuz*g0BR*5wep|!W&LX+8oJ7k4CF^N+6V}mFYpGG4UQ*K{*(7t@q*8#A= zphYheFL3z?Mg6$CmZB&gBN!fX7Q=f|IIzqYQ&Sy}+}OwfnFfbNk4brJd3jE;cR? zA~T)OEr0ROIsWrM&=)mVNP`g0Koy&+HDdO8xNO>h?YD%EUXj-i)h6(@rqlDsTav3a zBM89H-by)??aj6AcVTpEzk8dlA9KbBVAjk$zTlGy2Z7<5@3o;JFmtt!m`-vOm*C7) zRvWZgAxa-v1Y~O!GJ|Ft#L=Qk`?)?D5f$d+e|nn`f#?0|Z2N?2*rOs2VoxsiZA8&p*x9UdkF5T(xCz5t8gdflZ7b-d&EC zbPlv6|ClSLqI$#5qK1-NfRfOat)0xaq82bB0$6cEWQr;jkl9LyooVf6jWOU{;I`ZN zOlX+$=J+SgS7!(* zby6^}yflq!3~V-xYN3>EzrNABP7vu?CnbA91v$=eoInLr7V;lagW9G+k|6c_cA{JC zAR56tUQx{kJG@oXi!t8>lru=c$BiWg_rXIG4pWlU3R%QdVoUe;kT-}&km1N^D6y(? z4k*?0J-}I*j#{Dz z{autJaG>Q~A|*-MO~n)%lqK}pnUyN`7ids4>OUQzeqnexn_rO^8-NU0I977svJ=xl z2>nce40Dyk^B8=V62Ec%g`K6ZU+91_2pF5V=lFO!x1Y$?v+T0@P|NT%V)Bdpl%0fO zYKlYmN`bqb9AFz^b<`fs$G5DBUDIeTCI`oG8O3Pqg6#H_;zeK<`MK}|bK^x{rZ}Hi zbD~f>T4Ptbb!%i^RJI|{(Z|`k_AAPr{9Gc_u#gBy1$UA){2Zsa3$W*flt*((%PQd9 zgrC@?-YC3kIm6G7J3E`vyPnEBkwq8gP>dNkaJ@4K(ldu@C}bq7&vKvA@V^s(3Flz@ zX+;0IrdAoe%haC zhMI*wd!B~LT1&z!f0#}pX@dZPbjtACK*7#vf+39OjbZm9;X3>Exs%db7l&2*@>^8g z0=cNrlg{eawJ+Mo)p~@ZL!(>zkYnX*TS(~RoHaybvQHB5pyv5w{$zkN1ppL4(iDW; z5akMPw%tZ}|GJciu6Qr(EWL_Ia0s(1vn|`|M|wgx?vqSQ;TWJR6RJWf)iQ&2*t6tC zuyiKk5Xvq*&(>ck8)GIZc%tZhw1Bx)1+kOC3B!Dh0SSK`oKOBE=dtukA)i8^)E;VQ zPHEg{ZDFAvr99}gzgHG}h$6yHwmdH|aAe?pK;mWbaD#x^5r;&!wb43p+M7CTh2+vD zyWfDCl64d~qxRY3&gk`-kTO{enaVhRgV50M9yJQ47ei_=t#bpT=P?XT}WXkC49M9?K&ercy zmCeVCgUL0DY4yLL$E#46$AqwYK3r_LI2XK;{mDvo2o=SNdq7O_B-(L~%Jc^sW;2mL zkPgN!@(!TKbV6`ypSXi@-Z_cidT|(gH&jgy?<_xci?T3QA6+@c7;*K0Rw`TWFv%?7 zQKei{iV|p4;>YvaM@68^_GSc{w4U6^PJb?~5tP%7fA;s=>tA=iSd-=XD31H@+1eWN zQh~pT`0|Mn@_A5xvoM3uS(qqRSNPMLA}*YiT_;fq;{>;wjO}BH)THW}+{(yHi^;!| z3p4E11BTmN30I zyVMSbXuvT_)~PJtcw^}xqJu2T(k#V#<&Av^DtORjyoN`Z_6KF}Rn};LZWuhD`yWE! zC(T_iM#GO3rf$qNd=!QRW>zkLX4(~U=0gmboLxY!oY93hpGyH5hddq}^gK(dx4+Z`|X4i|RzsffEkaEbTA6$h88Id^1mH)_sO@7Ayr-39ejjsID@( z1p1AzMB#JR&lsu~kK-)WsUc7xW{UMK6zf z$Z~X~(?eJo8MRs!hlo52w-8tAQcb3K0Aomy)DFRzU2AJ&`%Oei zlT!v_rjG(`3){OtQKSpq$Ddm3tJ%dc(G$jQkx(hnnLKP7k`Z_7Ko@rEAe9QdIs7rL z#~L$+or`7CJ+@PGm_B>K1?T&UTXcKt7GNoU@IXF}1uTCmgaBV2&Q6dig96#sb<#Jz zEw(Oc`ovB8W~VJGTxcc%3`uTX%2;flI1k-qjmQ7wM-}YpGi0N8et0O|G_R8mq%jne znCpOt^bhEGFsUUxEmuRg0#J-XF5cyE4V*k5@5ejMANh`$;7I=Bk%f{U%LwnjyU7I6 z^5KJF_2aAlzHxuekaT2ayVq_~-0a$W^6mDUP&D`&;Y>}UK)=o%A;Bef6|WpFl<39$ zFxSvUI{g{p2eQ7XtG~Ng2(;0g8BaJa25i?(a{55|N6VE1wYETH)9>8BMKoyQD==B4 z(~%9GU3%oCAS5I%O>m^cLQG#f8)2O^TxK-cWG2{Jg~yynb~RLWVcPQxp>K~42siXL z!W>O@(!rWigJ2VmeP;*)ELIeJ10!t%){+nQcOysSFOzuHxT(DfbGZjAQtZ3^Ud08# zQ)-D*NzZ7%KP7DumoiP(nJ@`Kbp`BUP=1I<@yTVEe=3S2SW~gw`On*LzR2#zk&0yk zGq$g4m)j&y_ru!{uYZdJjfKE&?30>2Lfogf%9qV&Fa4?c(>Lv3F8R8Wr>Ned0bJeQ z1}%=DoPc7i9Jm*{xNVN z`-(9b7@q3&2nolO;@tr{TL@;ea!3zPdUtfbTT!=A{D^Bwc2j=mD03R1O%to+PuZ`J zvWwx?;7t_eS)cKT_a9z6l}S`@-8|Jg`%3NuW6)^m=^Iif{E3Dp>S`|P&*N$-Bkfb- zjb36vR4Q4jlT;*&Z*dQ#@dna7pOi~J>Bc-z&ps9em_u-cj~fC6JG0~x07?^JKNevh z$~Z7IQ3AYAgY}h@AN3OtBU{mr;l<=TrNzdAq7_?YkB^HV>r+#4iGZL^{F-0L?GH1Gg$YJtZ^Jzp4?p=)XV>YI-j%P$3YHe=A}k86CIAFd5r=326GkG99bGUkbB@8*ZIwJp{$kq2(1c~6_+mu{&%x}Wr@!~Tn(Km(Ic;ULVWBCk02TnE!~FZ!#WStvFMQg+M+^Mw z0e)P6HG6!OG@*Dx?R=&%zZREbLXr}VB341|_Pg8dbKlD5vQvPR*ED4&3Ev5k2|wmq zC>V5yY7t3eufwwz{T|cF&VJQdjgnow@gY#wc^DY=(Nnp-UO1-MYK%qX^(IJ(sg4^T z6TBi+#rRmLdu58&yjs5cxg?Ij{N7{s`Hk%Q1An5usamj}Wza^@2(U;7HXv_~7uIhu zFSPDIcTQc&p5Jrs{_yNWWH(x5Ou+)rrJH!4+mPsr`0nLzC%i?QY9oA4*|TGvn@{Ao zz2zR~7Nh9+@Q|N5fBC~dd3!_jEcQaaJwfrq^Mu zN}#*EM4U_rSn$D;hMf(u(Vbw1Nrc4g1gp)5h=(5oLzzr?M^$s5a8Oz);&51QZ(eI{ zhBnFwK55k11#&_I&1FpG2LrE=1yn?}pf<9B$NFN7P$1C<&`(Y7Mb!i*3_d%B#_qtv zq190o$hwaETWLW&J^c)x$k6M+oa9G!yp2bjkD z=CK$|JBV8rQ;h$PC9m`CX*VJL6H=6sz8{mXnT>T6yjIr4;h*1>ydJ~`jK{qz6&(d9*i2!UQi zGLdZ3+Pvef){f$g^?Ci>zsbU5=Q4kq%%@ILTOl>3^YxuF1C`Kk?AX@3&tg4jC>WGP3W^5!x2+pjwzszY^xRP;P#uH155PR_{wB1+m*`LQ ziC@c~h+ZVI(@B|~zq!5fQ}+6}|ACh+O1ySRh(RS4Gaxvjs!6UkpRww$-#uua3zL>* z1-w(lG~h?$TE_t$hgQwF)3-C2w{j|xWwC7KgZ8*(`#+FpVJ^{v6pnbnp>ZDM=tZ^YTkFK*L=&4NK@pR2Q3Eddc8%^O77KOe=j?6BUzmBsT8J^9i&wg!vML8vFhF+s3^lT4OlImRKy>W+ zh~e&pU73x%Oq_(~|CPgHzeH;@&OHrPjOQ&V&zk`N*LwwkrW>i`v#nz`@yTJ6>3nn1 zzZx7L@PASgR~D3*=7MP!bgB@}_w|p-oBbSzaV3<-`n)64U%m|sMqY^JC8vRqj;geF z#>-Y;CaHnHq=Ve>*OSo=3QWV2)ffq4kblj=Wd#ba-IsdVY#$;8oC+o(N$+`3VKN2L zb^-QbI%Q<%45Et8esl+K)j%}-RNK$42twyzUm-B%rsBwjp+!88C=x=)9HY=@!wXi6 zkf?;hs`Of=5cy{1cPRV)mMG#2(An5*o%q(rhIup<@FR*vc8=MI?rcyA*S~D9zs61H zj6pSZZ-imZYASj!#~ml9Ff1*{fydj~Rv;Cbli6`DZ7jxv2!8!49F^*jX5Qkuz zPs;F`e4upzO~PbjfV8Xkk?0UR`~?QEr1%5i9_FD0a_a2CibhrIWI*n`yVZPtUxwXB zLLL5H8PzO+lEE^JinLIqPkp0`h@@vPNsc(BLr281M9!tJhQZ(8ek)_zRVBsDLUn~y zWMCF)w?7YH^ES#=J6+`c9PeGd1#?^t7|WblM#lPN4`_KSCPX-kOt(ydq}Y`NW5hIg zljsBKbOB3if*{*|`yL%h&M*+2QHqx1D&h7}qLCn-ws8CGbE48+5IhjJUE8gG(6DTE zX&&1{&5k*zN$cdd?ORu5B4gBi4aR2he=ekYAYJImhqk4IGaIrBDofQQ_O@=lEt=kk z@%ak}QIFm>iO|~$cRQ1EI;Krw!Rs%+Yo5I1zcS2HDDWmdpo2?8utmuD*w+$^%~op| za-Dvo4d|7!KO0rH}}rES8uF_>7SBJ9qD8uU|X43!GA>`j%6lctHP% zF+%KWO0VJ%h6_@tTHE!nr}RrJpc;p5_I?e7N||#nG4xJ61CdVkgNQXHaF@Iye;a!U z`cz>s234e?T;_t={K;tu#TJzRffJ6)g^~Bqsh5X((TLOjDL?lO`ncN1zGPqe6VRvB zS`do!7a6^h%!f#`>sP)Cu zq@_j6C`-5(!my{wxG=VrQzFIX6R?#t z6o!@pGS$fRwB`1xN`XrR*NaD)8-4DbgGBf|LE~*c{k)4RC`)Hil(GAo*IspQf9ABn z*905MQh|eDzye`JnGKlay> z8L)fUab;cXEfi{n~8G6=B=#U_4aDu?&52hXSf+lssSaxv2q2A(OiI{JzZAUQ}r zEjj6g?RdYKqVR7@zrgfOT5PLu_)`5y!FjxAm#o+vIBNw=i8;;;`8N2uH#@gPzJD%%80vVPn>$> zva6NF5*ySB$+1%KHJMw(iQUbh=!TXHxh!-@HHT?*B-zb|<_}D-Po(qka&}R#2Wm#f zq4enyBwK$oJ9Vjj{E2{Z2l5nLjnTYufknGmoOTpPZ@)yu2{^S5&-Z+PU~or%wMpFWE@j@ zy*z1TzQ%4gzqhRL3OL^Kjb**mBIB9>b3~1kMZ*uNC0m8@$z*CuRCpSh7%^*D~gdLN5sDvUSOiVz?>U zqy3)pggxVy~%|9XOdviuw`5T>v!*FA(}}t38Sw0^b!D_!kE>? z1ej<*5(2)x_M8nPEzOz3H%8q88YV^fY`;FkP=uys$TRPo^N)Ld%`4bvq)G05g0=m} z-%fpaEvq+bnPk{po!yvP(h?vVNe_fY$OmRYv3grUpW9!zHm}Cho0r*!Y9wWg*Y{ts zY_KQ&iPm8~AD7AL!p39ytI0vr+wXU3ec`AN7QEELVDT4H1;Pn37g@q&87OCQ*^q7~ zF~6N}?%_%Bk^F7-?*Ai;H(Qy~(`&H{%22QiTIEUj*GJe_`m@h5qsYXWH~zH!`fBTO z1Z=gQyd%HVKdRL~l}-(lULN;VmjWyavh675FjDh#Ijpt6t$m1k)Y`@knb&qEa~Zo z6Si4*)x9akG0{*svIXQv%MS3SELe+?uZQ&na7=XTkRtZ?JExGAKN-xlr*cSWYgduK zma7ahc8)eO6O*|rFDIZ1kp&gZ1#Z~jU>HdD5HFbmHvoKzJ>9(fVEbLAhCizd8sTHv ze)9}bKnOwhCIRS4|Hl&5{5ryF@!A9)GW&f&4GD;iW{~tHV_udRTaU#Uu1xyy`E9a3 z6T6l}H5xM6of^K_)qIEqwF_`k`~5|nv_A1%*Yxe&C(Vpi@l+VPMckjnsv;IREwZPd zWv5S(fY*Mk(&6p}i{X)B5)~8&AcWf9{MEa*fMV4r27edvz#Mz(YLO;TDdn8;E0Ur>upySy?7bCApFf*74`CLbiT5#y#sCLt(Ap4~}ds z`OY8YB4tmvNNEBw!iUaamk+A93ky<8uLfYk1Bie4M}SL)O=GNojDS{`_IoLywxcJddTF^K2Eag3*u$U@(t zGc~W@Y9Id@kZ14B%=8a$_!C}qL?CCrS)hlcll=Zn2BPN8uknQl5A3jm*VmZpn@)U2 zGhROeR*L_&Cn=BQ+P9y5?cMWcUvdKE{bHK^S5uAWzZ;K5DK~87qfBu33&$A=$@7ZaLR; z)0V_dwBB#FpI(t}I2ZK<8pZIo5~Eo(i7`{2js*++h% z?2m*Yx|k0`9oG{8?>-cM>fa;n^)DQ#E9rWwb2Grh%0}o0sK{w&c~sW}mYdV8;Ow%i zBJIC2Wf!C^0?WHFgEE|*<1w8x7l@m5>NC>cj(vD1Uz`4$i-SURL)f%XUukf>sBjWC ziqR&RfeJ#x(KDzljNiW|Sf;35VttkC8zAA}Su6?dli>l%7j=QeA8C6zm&etmjzLQ; zf|Jxp$d5zcBZmr=tccS`dQY@O(+qWoDf^h|dThk^j-{9S4+t9zp_>|znbVms1{V(b zYgVjMof$y-r6Rc78=sSaCu?9nh1~cjJVecf6^#x6Z#UKIqBBy7eTIMcU;fTVVNk>8 zZ^&vxX{C-?C4YYn&Q}ijOsS5i33fY|YSYs`B~!j*O))?uHI32Ws!8`}?x81SM3xQk zS%YPbVLp0K;I*F^d~Y(!vSw|=f#~}1pl`&TT66oO^CLO4;5gnD7M`~l(3KP@Vmi?K zFjbI3$FVBZE)iR#l1;CO*4f#6>2quS#_V`#gVo%{FAUIdF zD%3}@TG~=JV#$AheOtaJGcLr!=0kN(98Wlo9AP+2oq1??e;l4X?5=3RGmzDiQSEQ5 zKl7ikFi9h!Ll5p87?+KMDbRHNr6^f&G*!wpxcV2HtVgz$X8-y5*28y%*a~ZMU~t|= z5>;M|UVbK`)Z}O36(pmEO8Vo-#nlvSXcp+pCuHI!y$ZUS?_ohCehJ(kX}oM%7s4KP zr=wVCcp^Ss^4py|Qt}UE?MtW)EHN!RpEiH_6%p3?lzLE@w6Wa!zNb=)ifKfQAo$1y zu-BrrOJp`r(O%C zj9cI^1?hE_xI=TdihRy_Lnz74-bcf>mxus-MLt9E+n3ivq?-SwvKlfGa91JQN^5 z=;P6ag=D2C@^`Xteiw0HE1)EEf&rE#mc5M^_%nToIJH?CKVW)GD)2Hd()OK+EwETt zlxQzIR)k;}{ZJB}x7gJMkYA{WoFaz{$1t8N3`RZ_mY^7Mi{&q+oKQe8yBs%6<*1oA zoO&M-Fl1i?mRL2>O9nh(4&zFUkC=d(ZW`MAMD;~0&RZyi_0Rw8lz?KDM}?(%&b$EO&$Hx z*q#FB9=eMi-!gnkK;#gK&L|K*9V6}I4_c4DlaVr6ZG1e%-!{)&1lv>^rbPUTmAmqW zO~eT^I@g8-tQZLp0DaQ!r*|dXogasa)fK6iE{sbKLY^-Wsc{GxVLn=FEUrujlZ}}@ z!h6}?dWLz-M`!-)n3)yR-n`*I0iKpNIHTEBNdbmn=e#)Y?1K1c_UY&$`}{?AGtQg# zyAzx+ev$1Q_HlH+ywmzRxt;H<7J*FUSrv`S}@1E{p%Y0#>qK?Da`23itcdA@LbxC>Ojm zACp)M5x9o6SXlmvM#Ml_320e}vD$JV#-b4t;+S@lr3$T|cY=~MMo9~QVjO!0hU96g z5+|6IPKjLuRsx&b&pr4&exZrVCk-JvLd`+Us4_Z>xbR2hE@LTqzs!{e|N0B1sVVG1 zq474VN1e&($xc8O32AL%t{RHv$eaeiV!fV9u`!kdo{DeN)&eLdQ>uln)JfxUT9~;$ zMND>1T>u0~*l_PbGuF#2xLYkL$!9_PT$ACUYIH5W-Z;MwB+nPA^vM;%Pt0G<4!uCS zv$f~KoYp!1O&99hnpyX^6m?7PT( zLg)ayFusZtN! zC7mi}h)8wT@V~W{i$>6EY*8_SWtqRA>a%{QKyc@ z&1ke1dp<%n{_?LrKv=sC()!uJUTm)(%fA0i)?%!ICCyK;KQmk(0-pkU5DihKf)g_p ztYb{OYibD~hb(S+ZX29v-V(2p(C2gBJ2oWUml{)r;C5u*N^H!0-b+)7y!>ITp|OM{ zr_ZYKQAT+&5O)Up~xnOd#2ryVH){pPF zoqPzEe3=4irz+|->K;Mrm)=i7%mOeBK_%p{`4W=fA=}Ix(b(oSs;Ol9D17A;0xdgv z2AXHDqL2CtG0a{Z%dzI9MH#}>?YGW(ynrs!D41b6or$9>xN)jWmQE(e9z(XgIwt@{ z2D5AH((3>rqB`arI|t*EEcq~_M%PXn3)QZ;@cc?)WMxc1K2Wk^n%ADf_x=ZdAHwmG zmc;aoJK@*EV8DJBLgrABRQr(jab|~|wKP89^n}_=aexxf?USU(@sy!I*t=WkVS2Gd z9~cJP+6y+h?|sCLIG2c1@FufIH;JE(r;VGPs~bYa+GF7A$q|g@1?y>y=>Emd^;1q52+$wZX>m>ZTWnp-j6!SR7tH7# zx@hy-`|QFcXK*Y6vIxryg+MRL=gEvH4QL{*cfw`Xr{1u|{lNhK zA-hwmV$uwV=Q7@T%821u2x*5GxLuGOgp_sy$Cfh}fSL`*@$TxbrJ_)tCE)#&nuUxa z6SEqmkX`@62?M_ek)}g@Wi;c68LqZ`@`+E9ABKu@Ej6|@U>Yk2OXE#~RwmmoKg`LY z2@}hQS08>6nizPo)vfif%Srjo)+~9(=Vpgrs#AfsN&$BNsI+I?cYqeIV&MH{|DK1L zS|}NS)`TVQAi8d-POTR=vS(pbunH%ykIVAx!Qc~qR+`+q5P~pIBx&e*wxg?K`Y5Ms zk?h_+`xFEPZ?{`HtdQZx>}tzui5w@6r~uQb@&sF~!!yylbf!UsSxDcl^_#^sBD07U%-3vFlObiiSSa(7o-)6D z!pbiQsgNKx{KqnFA=ptZGbRTdNb}g1G{#8??S1aVj&+_K_pMx0o1aB57XmK0xx)=_ zS&Lr)@ueMj$oRD7g)4u~012{EVF>?_R$>F~P z)zrT5)So&hu4KR7^QY|5nb!Gr=RQWT6FaKR-=Z;7iTU=8AE-;{3|>nGJ_m>TkKAe@%EoBfpxrXKAgTkgnc*|_%IN2}P+vRk@j_3cZE^8$chCum? z*o$4Ifs|=qSo)kgBJ2jL8d$KtRLw7slYbFUPk%&Ya>DTCD1|5Gk!+I?*e&>LX<2<; zM8Ko$X*o$^kHZT{Rso`k_XN6c`>ED<_*jxL@z?q*#8RO?<-)ms{SVt)sJ>rv<@o5z zLkw^~p%7?tlUk5ucNKx$`+^RK&?1w1L7Xc7H1F9K zRcr>$Yn$1rP5G7_Z@5JMKol=lVIK>hW7n6)#({v9OL+h{M~+Y|AN*}qMM6}fYIu)l zRY#@%Y}`9QF^KIqyLg{UInpb5m?6qcoB7eZs~t6bOQDtJVe51zD07)GYne_LXrg$# z46)@Ej&GD5r-#dYUI|C0jpS^Z@deievBpy>%I5W;d{@96K4e01MFY>?yaE>?Zdk1U zp+$o^W40J?LL5Y7wKT7vhQDOpEDNxZjRZ>qLWs%3ceEuqdPljR!bfCDIt$^|CY(*D z0eoJ{*|o)t&nq4f|OVEf?5ls%ixEj&?5SD#q z*+4lwlT4UGnoQYWU@+qrJ>YZV@RMVO;JViHa5NHtl|7Bx@m&mu>{%GDaBh(M?+uY` zMAD7izr&yuGHuNuA>Ax4OhXtv{<92=QPrcC_4rbuf)erN8c#H2KEi^CWz}B*Qcu?j zc*@eYA^*pL4hargI8m@aaOeL4smF?WVfmK%n2m+6VoT8ss0h-@_z~Ka=kf}FaHcX5 z(Me@7e#w^HJaIO=amU{S(Jr*2!?`EGzoO(d@p-b@hpN-*zMHCqI&ioeUVm7h4bZ%o z?uI)T+TW~6BVfF#K!l=#ViwBtV3Ovo>fMuY;WaN^rmVSGF`>p-MI&4}9cSvO@i7~2 zP>ION?w%rX#%Ilr3tt$)F57Ru>c;a5H7znlC4WNhqiH$^Gu{b>dja#MKV=sIjCt?- zneDNp$KVdq;^V}u0S*wpdiWF95gA!B2Egmm=IM>kVrip5G+J7SEI!bM_Vuj!nNe!n@)_GI1~rF>Od-UX|}w_83vj9ch(o;{4LZb&XXR>Fekm9tf=ib5B#JGP4TzEC^pv&1$RoQzYp^TYXU{h=%e*j5Fhu$v<AO}1qxdoE$N5vllcoNqgbkVl6_J5dQ6UB z8XHRYz46hbn@&IwJKE=N5*0^@X6bwnc)w0hK1akSPQkz74`kC?qJI)E`5Q^u1$eEi zWQ+TjF+n|ViNeHKN8W{3 z{9FNr-WzRx@d(^|-?2S=Wd`PrGK7f`m>9rprJnJiWeySwN^^FRMb*yVN-9BVVrhD(^H*1D34$p=+#X%qC(2|JWL~Le~hQErUk(` z7wI#W(GIm=AIXgIao4nxp_Lww(`QfrxX*saU`+4zX7lsVoHzz3A;5fP$cqFB`K}-g zDlRSt$7J9VdAFZ1V1LZvqYJs_wuuUd!$4lkMCeELTTR0$*HufiqAz#UTummK<+J*N zFrX53I}$@R?*nSBZewy`=AByHMOK$7?a<3qZ#nCmm$vproJX%}sehF4$^mtoA`(yP zSV*-xoLuu@hO$&nb?n(h*Ev&76!!3b(r4qCX}^4$z1_63-PQ?=k(riC4ix-hbByI|p!1vOsx7^F%tPVJE5$>u6X0ZWn~0^(AGNw7gi zMp>QcKW83O2b}s0#KdS7Yv(!2RAd5|Get@A{_bjk8c|rsDaqZ9Vj(QhJ8O8HzK)!- zh3M%XYK5{Vgp|bZn|;kcz!i~_`bZ2X5W%}bDkkmL7Y+PW|DpBv%jQpNL)(#L>+N^H zw9cxH2}$*L9*IgrmjmJf0K_Hg9gX(wkm;2x^CZZ~@_iJQk{AK001}ipCYO$YP=k$D zX3fcnO)`#Nz9HG%=YznE5bd2>3iswKvDfo5xB&h$$j+2zxqwD`~Mdl#}F zKHq+O)&IcPV>#xTj0SqGRF=aHjD2NR)&FH|wmzRO;70T2t8Dd__t^Q80FF!k1|1Nj zhnZcZ#padon`acDV*pJ+@0HvP9Og3;mi!c>H7if-qes)XGqNBSjpNsG9w*jH{aC}8 zA-N&q_RLP-W~Bc=S8w(dSGJ^k_J7c?{d~IGs!my5yYF@FJODyKra&A(QfJjIYYQL{ zkphAOLQ1J_nQ2G_8UQI%V@gWRshou7lmHE8_q_Il@~!)|jlCoIFZ$;@#VWd>6!9cG!l8#zf6nbE4lJm2T;4Ix0 zcULe~c^&Ae;n2xeenFwiPCUek>TJ*l833=d;5l9m(Jcd(Hf793d& z%pT08Wl~|52WY!=cSCPXM-|Hn;yh{lg4y)c05{7a%Z~58A*O|+(_6V`k5t}Sh3%;( zfz0{hJx1SGK>cuRqL6=*pVSK(=$4a$gp8?sh;E;Hq6nTU?f=saG9+mP;Q%PDFxsIW zc&l3!!GW>RQNT$PVJ*8I$ zFN=vIdwju%3^>P}YRy~(!~EBe57C=U-#gOk;MYJ3>7X&qXDePJ(3jG>=c1T$J@6s@ zsE(^SegG^E$Dl|Z|7~4e(ioHQStq-19a@bLMfP+vd-BHHN!~tz!&=zc_Yopx>}v_| zL{s`JU;20a8f@s{Y?k!$MvZBV%Zkq*@JT*^J>34}NU<17mYfWUrM7Pb28ybN(uTyZ z;lJ+jW)QtEd;|7oC9ELEN3913MANK87kfdcCDVTbQ$NY$rebR{UP$w<`UmsGp_dkG zffU9_>}>(C!l@vex*=Tk=r-!*YpXJD7#U`@g{%*5SzaDj+;W7|XWYQJbBO35l`)OT zWsJW|<}snuX|ufaBbGU7o8?&;WrTc5Oh-3bA1>v0?n*hvY~zjQN;76QF>m9}rQ;yK z43kDm8c>k7dqF(hKjbqNlAXvV!UIzXrLUGQBHo-C*C4rx zaO`99l}-@m29Mf8;Dg4KSCk8ux4-og?y5EaMa3g^Wg_?cko0Faj@a#e2U2M2{e2_B-1N;Pk#Lgwjp^q zENTOxwyZlBB36<^vJLbZ#aBQm=vDWSjM6~2lsP_^LdM$Y5vW>SfZ6H~J6A&+Xstgd zFRfQO1FL|DrGra(%&?u~Cz;$bdY=G#p<>dc$}H^sVUo;m{>;5Y6mYoPvB5~6+R!wq z@u&r)c}rU#u0c;A{W?mnnRxi>Yg94+)Br?kaoh!lXDYFa?A-Vbl%ABtHA1=j=KtL# zV3Dt0DDSXgCBKMql-La;l4#h}s!Rm}69ddb=|;SOlS`M#G-K_B^DUyP>3yedBoXrZ zv8lSvRN0q-l|M8hWI!Mfutg;d20x$)s z9>WI+&~ykP!=8Tco>_v|zmr5W?4YeDVXNpH*yS<tzbQ6ruL`alTZeDsOwQ?{R zUOsvN^>w_Aw*p(ecb%+JUI$IfzqX-aUgL-rt044n|6EP1W27Kso$rcM+WI zzp@KoV>fhp;p2<<^7Ci7EBbpoC$8qF73Yl(MEDmMOBp$1_(ugzM|QTjl`mfid2T*} zZI;z5pJ3Ru$(KH%n|%rV&^md(b@GAlO7mmoGDR!u|2Ht&qja?S;-W1EOia5Y zf9Rf4JKiWE^r5fubjD(PUAR~l%nC2dYxoHAo}cupYW2fX6<2l84&EpV`@C|@f9;0Q zF$Hw?%>WtqN7=V{a#AD-%Wb#5w#av^Gl)6f##i{7If`B$tvyRhuW;sPYY#wk${79UfAbfqeXOQD47+jZ8Rwd` z$GrK1-CGFexfaw4CqBrVNF9FExU}sZ==M%npO2qtu0F}`o$|U#+6pg}|Dq-PE0JmB z-~Pe>4o3r4W^X(px5O@L(Ak2ulUQUMTg?UkucaQ2Mq zb{}8K82yVx4qH~=M^RDh<^AT{E8bDs)JEvCnRug%6aXS7qTZ@=$q>UQy+?I><#3jo zF-50Y_?DoA(>|NG-(G7zI?h%jW!sgCb40%HL`RdKTqSh&mziK_7luHc#|;!WOXlTb zZN0k0nENjjO3}aQ$WWrT?8XtwD)d)jG{|3`^U2Cq7Mk;9C1w;QbRP|~3BQK>{d;~4 zB=UWGv6O4}PeG}ItC@f!84n}3HspHr0n-+Y`681Z=tyKD>}LK)y8&9^C~%SIkMB3{ zt|M~)!_KX5$cK#8OTIT_Y1ohPnl`Uo1x%2CA6PeQJ;qfpgP$|3@O1ja)?HCN@_b{I zy<8g19W_)TeB&feld=r`H50dEXw=e*$&YV|8qqp_K`d(oz|sT}_m>H+$qcUhc1oPS z%(o6^Ck!M0S#$G6^T$>HMaOWv`!eW&vn|Rex&-=6f5eL;4X=RV^4@#`9^DM3-e7N6 zuKSyq<@`-b=qhMl z2Z6wF@4^n_SERvk<#DrUUW&$JZDe)HFfP=OM&oQpdUe7jNBlhpqBo{v=nH>(~%QAYnm@IhJSEUi#cpnicL@prwePS0RO*G11 zwxI5KMvS1DVMO=`02NNvCe+;XS$%S5MsLZc^rAn6;CjN|W8Us$3#Bdny3d0nj2#kx zyhTn)rzjE#M|29ZvM`TO5cGB4YJMFiv>~Wu`2BbPAj1Q&L#>wN4J@ff+>B?#+)K=F z45gtoZbj4*$|1NEDfY8QTD(+=Mcs#!eFZ?Dyo$CJ*4RBLLG>At@cvKKY(D&y)!FGq zwU)ncZ7lmQ9t|et&)7c{E>A?BF@VMN=52Hf&|yB8h+wwtE>Imm{y_xY%>t{ZKzKfz zNEkT-VE!@WfD{bgqU>Ap$X}AC@7^n2BL`s#wPL~iYqsWmMGThcHrc(+63L8Z~O^E|YmEg7IPhT=++9Gfm9j2CLipPS7N&@L^jA2mFphEd`YZGXhdvk&^_`EC`nII76p7ubRD|4R5s6aD zC*y;UE?)0M=mQ8d_5%XSDq*w20vmQTT4FZUx*8DB8Gs^}HVC?xs70Z9%YFzwqIu^M zS;>fvc2N?bDT9P!3a4|~G{}O7SBS;s^DGV90|AFafex5gF?hZg(hhk~lHntHqL#oA| zCX-{7F=r%XQj>K_t(E2(N|9AN_iyL7F8G4mZc}X)u5jsSPla@_z9m%!B$=PRO00p@ z#T1G=`d=-uOj?HG1(Yd9ktirpryl;6JycvdCe}7O&8NlQ8M`|9QpC9DZAJo^vI^Kv zoKm|R&@l+83BdM`$EBGQcy_$t?X5-r`2bKFrz8#c$;Kac_j}(t!gL{;-i;kmzGbIH z#gI-Hh`Myyr?fFf^hQZ<2+idFnCt>!^hQ9j|{E zLB7=~e*z9DlvaA!6Nx=>pAmx^+jH~GX7kK*>3zqPf#)Osnr{yfHIn=(T(qHrm2vJR zXhwdb@x)cqlxw6XtG|mq<{;4+B8{OscAl8ATrQKR%211s;}~tdB#Gz=uZoQY&i>eH zSk$%~-g)b@cRDfkSf!J74ej>?ay@41feH9*jFOMl>2c^7nfCztB_!1#V z^@uJDv%0~IEWO=Tow04sdxJhI+e5x`d;9GJe`Y&iDW%LXrw`$frA7OB+}nm1aOcIh zTfcF;mQiOOV8>iYCSYg--aPh>At=iW?O$WF2QeJKfbHB2lK{6voz=&p#OCvLc3D|c zkju{nl)JO$#ZgyhB@Mv^l9%f-P+$v7#P`5I00Kp&c7TVE-=gfjU;+}FKSpjksu=|H z>2rghSc4gSf15uHB#xE*?tQLY{)_{W*(#$3j91Tnh9$rJlv7OvY>OJ%gpq!p7zbYn zIHkO3Zt?`8#;+USXYao8U)({lMoYCrZKS2_(VYqr!3@1nWW1112erURdeu{;T6@pK3-BOLu=+sA5M6vrPa3|V}_m%f{Cp)e2qHo-iaqR6LQcM&&-dH4y?ltJFyoMlHFa_ilH^ zm}Y(Xf~Zd0DnzqF_tCm|HM^h=^jSD{ycCA5H?eKkOPL`1$95GuiN6a2TI_8@ z6Bq5V2~e95aXBxh{0W=~_q@bP4n{f(e_+&DvXlr$m=WkHsoj<%cNBHOVy(zFuCgi3 zk1<0(dl4ls|I}O!#AK&M@Fs*eXLKXLrWv71yl_Rg>X-&?w{P7Rj^|)@K0LR8dA-&yR2n-8g+oce&1!EDYI4YXV zXXz$|E=?4g>+|{QkFAvpJJ%LlPvWxAMMO39)*`8no=7dlR_KzWL0=%*`VpC#%q`4v zX@lIkejYEYe__-WZ!G@ZyV1IED_e?d5YiVDG}zCg%XU1?CGW8C8$A_CC&=?W6Tm^gOL_&*g|@tcaceU07HgI3I(Y| zblK+AWL&%pWU<6)By-5p*^Za!YzXdWg=r<^3yqVL4T8WV%UIym{$HQ|=T*SPz+NY9 z#Ft(3v-~gr;S&MxHwOOJ`)`FXJHud|(iLFl=BDOr8^%JfVW^7XB39@S{wN;;r2d)S z>5J?rv7!h_BND`022C?a0`(#c6$awOJV$B&^&kCpe)R7h>h|Vfy7K1O(n{BN%oh2h zpiJCZT4g5sW@1W+i;eQkaH?`PE^;q)*c?K-lMjIK`09gJ!U=z)ox5Yi$dij`Oa-Brg8SrA^qgyxb&5E8#d)-Mf8Q<{bi+n9 zEV$a+y8Y&TzWTFQj!B8g1j6BEbbAb=D%E2(K`P4sPqY}!q-C4yLY+-g7U_v>Iif(| zr6=L2*GX3a;sb^BCLz`_v7kM8)cX3OzsxgEWi(Nr<<(J0R;9II=Tf{t>%=vTPSSlS zL9k)Zmz>NUCHpIcW#IgMP`Lz1&ApfaN<>}lj;oSCibb^4k6;3)(h6H8*{mRGZ^0>>QV?b>o@sOj&mCiWw zzsGQmSd@?&OZLqak(tp`$dl*cyYuo39NT*t1%rRbnFO7YzetE3h+ZF%?RP)r%P(QD zY`%-#zIlZU_PW1s!+oUUYwswFPljXONDo8W7EikP&<74Er+)>iAV81V`xE)4-+b-> z$Ermro=w9yoYdcW5gh}=UPSGydc;H0B%={8ptfF+;n-6h}0z!kA~xf6~(%c zTIZ*#RhY!pIfM5Tq&yCD>G0<;K1mhWXi*N+*sY&wzKg}R{cbgT@Y3fntsM-f8A+4P z2>c+NBq0Aq>zhll@5O6@m`5j3jl^6>NI=>L5bubKYM`@A<_R|o**n>`(R!u-x)ruTfVXYk*k!Ro-rfSDh$8iU(J(VfF+p;2iLn@`K0UUE_*=@hQLzKD{3}Hf4(s`>9@D(I5{|0 zOBp9{wpx;P?+k+S|0LatZ?%lB@m8ZC&IBQD9eb)Y1Zb8545q7C$YXNyeaV|bB9WM@ zmOT906R&inbR74v2_@C=ho$V{3zr1W0EmxeOsKA=3Ly4eTs_i*Pr!~8lxLG~HEbC+ zgE4-~q+t_8n@M1WhAEv|BfW@i86WO;!m2?t$1d(|%^r~ZA-Rdy??F5%q66j-n>LaC zYCZiD!pNA0&0oH6y}IF(gA*P{HM3N=UU(WOyjD82r&>CADkxtX>YiVFj}6-AOJ!;{ zcmqrr#(ox)sX{#^(O^VI8ovC^(H87po$$6x_lBjIkMDjzareDjQ# zNglRz9qHcF-8G^OFp?EEC)onJYIE4(y{w)p+M32@ag`&avwBc>Y!I2fq%J9aEC>1L@+N4lNw3D8bl1>~O7i&j;B`Wm2a?$FyBb;Yf2~GZ;NXz6dLm4c|Ea^-nIH z{Om;x{r-zI@Tlw}4>}7_jL08ZApO|D<0Q_$c?Kb@SJ7XMndQb4TE zh2Suyk{VNh@Gp+TvJ|wJ+ z(%L5LoNl;ia2gUcVJ)Ur>pJ-gbW527O9= zHt275TE+@*JlGx#xiU|8PjTT1{RJw$Y%>hGBr|V437fiY#A{DTSt4H#)nOXY?b}eM zg+W?GKMg;Hp#uFo3%f2-zRAQiXZ*NnO7dN2b*|uW_i-T})&G;so+74#pWJ)MJOI(+ zb@8hogh6Qw8mf=a)dF@`@G&M+hplq%P0HiaF-D9v$;rKj{MFC-+h3#x{2BC6cFa$c z&09NF3xo^p>5D*brn{7(WMccUUZ7)@?8rq#K}8KYkb;1FlC84&@cZVY zfPC*^yJXO$Z8;nnG|Y?ybA*o+6=oWdYKOy zWs*M((6($F?R3fB0t?Y{(HhkK^f&&C7-WtMe-DD#SRvL@(zyGJ{M=s)#gMxhHy;0t z%X%nDgac`&jtSfvQbb>8zpUWV2F!WV**>#-V)OQKr_Mbn1v9GMOxI9nxQmpE!RjTW zM&7+pHAv#b!66UP^0i7c7jj`Z|VhQsI2kB$<}5Wc3viQo=`V4%XK{WjDfJN!b%A(G0)(>w$OU7Zh>QlOjk~PXTCA%$}dlE;Nj3F_0E%z&T8| zxAe5@Aj@jlejusDtIG=^&}WOyuLF17PLTsY3jQD;G1|o%M>}?3+X(NxMwWh}aydH^dK&Ou)@rtXhSOVSmtFfs_LdS%F_e~B zMUG{Xqh-&|%ICXL)Z}wGz|su^HIjcJ7x-?^2VWa9s839gmen6O&s^Vm7g3Z%Cyt;gcQz({$NH~@1bShEBU~76Hw$2xyZvhAUpDCK^+{8 zZQ{zly5w^ia2xtfzIj1DK$)=QGa>-`QVk+gw4&zvU4ipjY=+{FdX1qb?ef_>D6ql| zPGU{=j1T@p&mn$o^!HrSsbxi!MEiBD1NS#^7u8xU;MMTwHu1*J6*8uGeM^Mxq_u zTO6J~lw?)SPhFPE3N0)4cT-)TeeZi_<4{CFU{Z?dK&D!TO3J``z%J}n&;t^_c^Ugk zm^(am$$iX=g!=d%!mkN{P=y=Piy~B)t7!Z>v6TIwj#)CCjjIZ4v;^Vp=_`dGVqjBX z6ln{?9u5Kzk$NRCI0?GZc8HYXkDovZ9wv?X@OFO{}{(v_J zOXf##&8ADrp3*N9m_3}dYF;ny;^yj8i6bS!VKSal^yjs>aPq4Ue6H`gpH(Cfd8WwI z=gY!EVxxw&AK0!T!}f<&CbPFayYMS^ENLn0M{zF2fe2RK3$u;t!g;Q%x|zY>q}CZj zSS$svcx_hWhYZQ#b+fhLIQEk1To?OvRqeQVi)Lk)1ic6rL=f$KbFiTcW^d(?xl?5c zbr7M7InC>A)L7}^{zq?@uJ<&Ks;2ZAbdWxs;e3Iuw#}UtVw9jw8^trrXhzZ-@Medr z;XwWP_V-gha5Q@$t;M+nisHfUG&cEnFL!PP6%o(+YjnoDM5{HaG7)^=Avim2R zn{QlnwA-Rpkb0Wd`TDu$l^}=(zJszM?}LK0) zGfi73H=8F{{V}bWsYMLWsDyquA8mndmHr+v@-&&=#8Z*{E}6uJjiUpm62GN^axr!l z=o+_Pl2XcRH1TY_l$mJ3R)}3D9MH%k@)HUf-80DZ6gCQ+ICM!GISMR4OygBs7*fzM zPB0%A=8UwVzhT^Vu~~xRW!A`-pO*>FNbVE|re&~_l0bPbDBt0YcSzCuo*zGsL(`S0 zfA|Og`_A20aAS&W%~4{q_15+cYi=^Ien)jAju3xpO0*Zpb2e;|N~fJ%@*9S<5{0rM zfYl;Ns0g<@=~X)zTlMy?I^cEP-7p_;FZTwIRzfLjGpYcR^<|EqbxilZh04KbI=Lt? z1W3&WPDK#13d}(XQCjIX$5m@QZs$g9=nIi&eENVRG^GX5Wc$ z6S^ty0?mu-E?J~}80sS-6fS7jFUS|OMBK&0yQf-bpUM21Cn*jtXCDh1v&ZI}zfX=v zu1xW3?J?Z5-cKy`Z7Zh>l8-;DT6h4+ZJ+EDme55?x8zW|cFldLygP(aK&0&za%Ign zuXvIeO!Ca+%1ds_~^4{bk66rTVb~ZQfG!v0CJpwK#)pc}4OMZbI z@Qg@x2sKpth5h8HPCJawwm*m@m!Fl>0mFF(eYX^0%P-70<~UvYIeY!gtF(MO7^~!c z(zVxuUL4s>%yAPh`I{$?vaRc1jn~THf!hU|%S48FDOx36PF-=-)A@U$^ zWW4%@q>3(FcxoA_O;0xhtGgTNyJ(T!Fy3FEBQq4crypxTfs%Y|-BLrU+n>?4U5$KbUE^^c5b z5BDYSO6&M3*qFR5?h-mPM-}W$GY0A@ieo;sT8;wt7%wotya+$N7ij5&M5=~{Drg#{ zul-e^?b15pN9+DMM0&cciNi@wD3r+T^!cb3-G1$&RK<0&$hxUJtU;Z^=7NpJM1Sg? z)NR4;ULuMTH$<61SiPA-{us*x1{rzWYqLV5XE?cxXgVx_2$0LY5HlC1j2Klev}i(J zyj{vm@PNX5E=`3fIp$`qG9SRX0+x7LxE5KvS$O&S%4ccrVF# z+`Rp8=ft;CJSc`*IkyKc(qyK~m#^T5VFCNPM+&>+q%wY)vv=ju$9pn_rc4+KM2LuQ z#~QlL8JvzY*}G%RW)EL@YkB+M8+GnPvO1W6N4YY`HZZi;|C2*2iXqM|UDE~;pOE6BUN_XhmBG@^VG*!w_Xr8(z)!yEP$Hff0orFP^=Gs||9E1jB5Hn%^`ZcLET)fcmrlCYQgGWhC64M<*Q%+Sci^sEE49cMLeV8I- z1`aE1YU4SS2+?{hdhe76#}a2s&#m&xVXe#JJQBpDGLHL6Y9djJE@R9LwFu~`n&(ag zWe%AdmP&66b?qq`o<2_of$Z^v=DA;ezOec*|H)Ryr@;NCnV9)7u*GyVz4(XNJG&ST z7d2n*QcC*X%BXZ_7vtp3Gq~Kz4X=ZxI#Z0H>PsN8+d)VE4d_#hlz#44V>M;#_h7X4 ziFdF*?X#0jSLJb_q!mN-1)-<=1X!n+k+8grk>(FQTTcGz6+xGqHS0q?eDGO*+_M>cFSurbV$yxKTk;45+@@BEO_Z(>^6p~q?--Dw#u5^{^iX%l zE`O^rA1*>8#Cm%i@CX=m+59Y9A!huIAeC3IKDw&j{KLvQ%12kklAJaoMN$ zkkeOH3a(wV72WGeo`h*VOpR>x+Z(H1fKgJR=>-V_sY!ft*}X=3%ne)3CuGC6yzl<< z>||-Q4phgji}Lv>v{61Lc3~>O1kueiH(DoyJY{c>|2;czLANeEOK0%)8aJ7pyBkt6 z2-{e>mK5;AE0I^PMfS`D^9w`zMOvbST;DzKBUc{{tASBurG#7-Zoe6@h1T8U@V$Fa z%*9dA4$0~%%opX(_o6bm#A(6-b`(`%O?#ONeZM0v(V#yKwj*C+2sx;3X;P;WWL=77 zVpm~1OZjAsEU*^VTkeWn$#4*2qH?*?A~-iWlVws4r`tJ;QVmH!`cf{l=948163t&U z)%PB*Y(6cZF^>P{h8zlqrARn$DH5a&Mh;eZEPO;rT;_77iJgRVmf$2bne-VV_%6$4 zgGo>1MGoJTe%r2C>l+d%O{x90@Le>2`aqnUGzdvrGV#*gahEZWvYUs3n4Mn1+9kAS z6uKv=9Ib~Rn8DtouD$)fyZ`6k`6S0bEZm$>dJ3N>L7Lgh72mojdS4hU^E2O;87@A> z;l@G-R44J$+3GdI_GQ@^M76pU^q<|O`CWyyZ@BrB3=T#&KP|C z4I0WeWOFxxH2P~s@lc;o9|bcNw7pJn2&NM8xOqjk!V*KFcqXL>7*S#^vr#g&W7^G6 zY8V|z8jJwpfB5~{$5$@Wzdicf{n+rcTNhB)ea_)2vLuR+0&GYF2=PuhPOfX1wOLDE zSU1ehrK6>wOjIwb<*885 zN_U*e^kf+5e#%TuoOJ+*@;ggFhP>g1w*bPU2VPOVDyHgw{t*A{yf8)oINXBUKQN`ILK+n>Fl1j$yuSyzqzBnS+`+dlx%3;33^pfWP494@&@PUMEQt z>fd88Pm;R)l53-M2!e267AfI4a~kZ;&ZTwlp_!(y{^rLQD!4yjg~2Ez`vKuJrAur( zSxiZu7)e;8vToa|Fs=?+NMs(01Kf07#a4rNL6`TJ4K+mDy`z?m&85gtkUkf74Eck_ z@ur=U9Oumor=m9x9uX)XzbwT4YyXvB;XpnkJta#}2`@t+b2>pD+yp7+FT?9bewD=0 z**x*f&ZR{!nD*0k7l(=d>Du<&>)UT%`WvJiU}k#R z@~eKtU9@BzZNB?C`{g`dNyUkMa~?~qH=Hj>`c`tmnN6#3+PX+BqO@Qc_4dI~6Gn$3 z(|{_fby{zDVt<*Qhn>-KGCU0h8`D+3a?fj_C5MbXEp|8*DsX+ifVt{O&dRCed$Wsm zqN;;SH?HuN!yrd>4Hz#m&5=t38=R%IaB0H7z%k@4YTY?LV>3Df`Fkg;w&%!gK2b^UJ*Mcts6HMQOP52CW7Ukq^)DD7q)!)WTFD~ zvuJ*;d%uW|LQ7`j8c%7co<1gZ*!^9w2&81C>@X*{3#9e+dj?FZzTfG3pxF+Wj`1VFYC;A~1DoZI_>1;4~q zK42y!4my`PZ%=XYNpAPg>S2OlzNc-K7rDiS2$)=m$os^@&+4)I@~ewjY-Ef^+q#el z3}=5SNe;7sZQloCuraDvZ7!6CVDZZGms{LZViyFzBJPpw#7*XqZz{CQT<0Ji>T!CT zeE{+dg`3*loOR?C;5vZXP1=uoV3hCvJbQc<^sw}%@c)2G_7|Gxf95*xEu;~BG}0gP zvpXfK#Ms|@9malSK&AqJGrtYY=xeEmb{CqS#Nl%fvQ{_cJZ#tqBr#f>`%qr0jDJbPOZ?hI zig1(mQ8-dP$a?V=_vgR!xkWya{OkjE7LpUoP|uR$+ZW%Q`3Zfn7@BbZ{C@iPD=r!= zH2yyKw3LtB{MH-GfL5jOUUD>eyT%~ZNY`*APL_z7V`?!*PqGkz@Hbn4gmFc95BR2o zBk3sTYG~lIW=%^-9|&`fd+XSTXEYdb8aujW_X;~87Gw>++zX-4k(ho|)>}E|!a(ba z?IK%W%GR&DfQK|flFJ|qKY<^1Rn^=K98&#jJ#^oN(hDecv^;u#>LPgwX{HcN4=t(N6oli2Td*rBv z&l}!FZwHpQY%xNqa2=Nw4k#&frQlx-fC-~w&QdREdxPM(d;ZJ9^_MUDurh7xfp^Yc zKj$7JeJW@vOJTDFj82n%{-BFvPJtxswrLkI&(EvZWLuEVj>Fk5Khda_l6D>+Dq5{x z5DmUMt-%$$8?j#4*AwoYLH-O5B`JIizoz^g=or~6J;Um2wk>&qV>r}NN-Ox)3>YI@ ztfdD^_cEJT1Fl#`Hg_{IemEuN?yjY+^DD zK2%=9m>TSa(j#^)?jWJU@cRFK!gty|#ciR4{j#A5n-DsD?8-K6jTBoNc`z`l)wzR# zTilN^262dd2xhEJ#B*BE1%bUhZ|4N?M)4d)v$TkzVSg=;xwMGm+JgLJ+pyfiC2JD_ zk@$H^PSGmadQ0*1ByfV^qBdH(KUxyVbdc`c`X2BLW8oA0-~P?FYDht(Z8HtjK3`LG zrN4em1`e${Ruw8eJ~$bv!i=5rQ}gjq%IjEA*Ti6$41pCzU;w|#R^-(}+lr&ee`TAe zn-6b!yZ_@qq&d6l4OJBexjdB+5q;y8$vB>X8Y=asB{)>#jW&N`kpXH+#ugm`LKf+z zp%O5P$#K(0vj}l#1E^yXJ$>Vjd@OZ{^w_O+)%Jn+mkd0YFTxY&ZOAU3CHffaRLH|D z{o%Z&^x>lcQbU1l1cQ@ZK5uU}?*ffi>yr7E2?&oXHYjjac~HmZ!$;X=1b7iFl=Am;YZJ2f_W_vs%vbK(j`;Gm_L z?I3C7Z=?rc;c%Aw`Ys5w{0BG6xo{U5&sl7L2#NS>QnR~w^GjRJ%Mbk*`Vf!@X$-{t z5dZsQ8{1kaL#T#2Q;6Y!P$guEZ-y9o`SSC`i9mE~ep2sVAg5<6t*fgR6^?cPlr$$2 zelT1M1Z!8n3j`8<5}URxy!IFWtk|Va`Iup%#OpX89$qsAh8C8eS|nyk^hx}%hR{(( z$iN(xaBt23yH|rq28IuKyV1-PV+PZDN#!1>wwc*vO*(%Mz_TY*ajo%># z+w5U+3$XkkGGZLGsEppcoii`7G{?NKoTgqIE#Fn0Py5@Y_hlw?jseiFdWS$P7C!1S zr2w2N@EyUTu3H0C2h5OzlX`D@KHH^>j; zuN^7*@2X!FDt4?s?02ODZk+R;W7%848Zt;ltgr$;uc{NKQg#E`kRg7=bgUclehlWo z{3;QziO7rr%64)U>mD?#xFhfm2qC@f;dhbjCE1xrEDP^(cHtC8VxNkGQ^Lvi;Aakz zcyDzJlbX-4T9F<`XAmcY0T<)-Z_bNVXQYn0>)+BoyUt3->!XEme5$v((7~qrI4pdn z=C6^-f@jg+CTB4uzB@}7Wu{bMm-_>Xj$H!8hO_}zgvix6QvE6w_nm2o57zw;?5oYQ zzY+H6f1v$63`t3G!JULIL{>8IKN>us9_b@@eR9S}2!aU#pUlCAPZ$2M2cF`%kdfF$ z@FdxrA2>L@1^ImrEgs(2oKtEU$ctVvKzt1kNkCozU;SF)ToY$~eTh6$}?`o(t(B!8%iA+@!OD)C7zGn z{I_?HI+Ei7>KDY|pK%bG4i~Vo#(3NwV6oG!m;xi2#^3#ik2$~ryjflvV%Y=vNR-ZR zRHmdn0=g`7mygU5-`& z$rVZz91*4d*iJJr3yUwjC?HLNzM=(yib!L?;pUBI?@N;mr~+r9w~6H5*_xUIIzUCM z-Z9rOFsIjKR(5Yj2Gkj+W`ggx-u+m{cMp(_QFR0m8N}3v_G^y(C*OJ_3k0FhR#YQn zZj1dZTY8)wPkPK-%ZuUdpvQ$W5hP`MO~;g{Bj%9N3m+4oK<9)uqJ#=+`lKD0va=k7 z@=&40Uzn*h1b5~q+Te0A1e`1h=#QsnO~I4WxR?X=N~`1m2n9|oxO?O!9KFCm(qPfD zj(Jt-JEu?+rEl0dp=$JPY*%%nLfR((IcC7v(sTuuZPozq1nC^b%SdG-ld8;ikuw@C zLaH-2RyB-+_Q8O({u4qzmOTkSVCk06hY5{LByFIKLAlBI-3O22!%Z0L=GJx2KmUIF z-3L602wjdndZ^xS!v&^5T@cA79Mk|d7cw;I7T8%rdE8k7|LN74bAY66Clv?U&qn9Z z>HHWbo@7eq596g=X`>ZOPw%qs0I4=sleRNhcI>($F&2>phia-S_t#l-C5uC0T^QHg zqwhc9@|5{kV7_hYdgUzC)(6d$)y4MA5{Pt6#CQ5a#QamjRA5P4C^?9Tkh5gsKFf^Z zk7?mc0h@#>YU4zX`Y)V%N)FqF8Q<8(;xH2RgwKLq#7k# zYnP=!^cR>VM03dGp=^RzIH0{i3rD*dJ4(K~+z}_RWa&{9% z&Gh$%(kJ#!wkjsP)Pwe9hD@#6n2GR*#56Ih@xynZjC@uY^TcwXB@7F9WrLfSKj!{M z(3f}IY*?G;$-3izpdk};^}^8Lo*;peU)5fBmMc#h-`yU6jJ0!AQLNva;8O9Fs= zNutJwHc6=?rJx;vQfUOAoCd}0dJeB1GLn7y-3dW=h@0oc2BnV)89sYg=KVnfifdq4 zYyCL^FVcfyZaSs#WeXLD^iSlurwU%6tcF-t5IH_*M&IyWAD(RaR{r>*H<_~r=S9l- z+_!gNScZo5kRcetyySj}E~xs^I7jhReKxk0cD6`1aw&|dr*SDW^q>DEFZOez zl%Ehegzb~m7tJ3|NcY(S%>&{6p|1l*G~m63%Ab5VGNZdd!?4EVz^lys?U7Fd`k!8lw>rpP!o<&lLofAu5Vy!T+ia0-UV#X~ARRzqL4BjAHpnr`G90!U2YQBYZM#1Yt zh2O=$MTllbinni`Bk7TJ`UuKxj3U~ZjvNPyK;j715h^PzZw#x~!v3z11iBB{+v~I$ znuvjTW>K*vjeKnGsNo1idS%wZHZsV|gBfDymPW$h5!9^_+qLNfz89kG`j-$GHDBKG z-W!Nt$H+1+UVrD~45pfdYUG0<54Crbw)Z11Z>Pmg{QM0VEia$vUrlSM4E6)daHrZ` zM^|u68sHHft%TAuYi{fnmdM~ll#Vh^WWUUsQT#t_7jZGgoG|N}^h`jVX_R?IlEc2p zlPz5$C$VC*dhtTVDU>~UMxqXXnJ^<=T_7vVo!^x(M$|f_*96QXTm@ZFSXnTKc(J_8 z_W70XAA>j~{6*Z7tRi_4`p#D_f4-{~WHKTXt^`c!Qi#N85x5(Ze0OasmJ%Vv+4={s zZ2Qdv6dC`Ot(_+(Q0nRe@11Vr#1LjHi4cDDNOZ1?bQj6~y>t7;&b^C18KJk4UH20$ zy4Og5=4(+5zxtDJAAW=U@|sUwp6#>vN)gPLq=xc>W-CS>C>5E%jccbr-#PJ^-AKE} zR-POt`Q!Tlv!(3GS}MDoG70Ul_5QRpt7ob{8tuqVKhKsTYe20qB!pY2_1oF})- z^oTXv9BO(jqSnRx7L$4TgRhg(YE5xOWA%fmh$w+RuB{K($`RJJgQa8xL|ueAf7qD6 zy_+WUit<>1&Pvf3>8`=!p21;Pxv4gFk?pM=3g%rv z+T^B&L0iUep-!@nw7u`CH)O7$FMxQbJ<^wsQMGo!JX)ReUGjhY51G0oyGoWRT1nUh zhzPa(a3JD;m8r7O7*(AK)|j!$)U)Y|VkAb*-)Yt^Z(8%w?bfTfRkj&yq4S5!(2J=;5}Ty1eGY zr9Z($lcqpogUKT%bAIVP-X-rS&44At+W@A-z$)p+$Ry>q3<%ZV;Qb*kAt^C5f7lyK zvQWpuFm7}@OLs*^9W(Psq#%?JpkFS~16&q0C}EJ;)*2Xoe@;dF`@Gp4|Jlh!Evuvt zk=hm;i7&>1FNVA0!xc-@;=hXHHr5uXalm(89|`74xHSd<dGmChH!t_ zLi1CtzZwuHincoq{JS%{#qNd$&qZ~hnw%xd(5jF-mG1OdWf#x=CPw}8AUwm zxU%JX01b)}t$1^{w^sAV4`lZ+!f?1Pd?Y(YbZ*v+PMyNM=X7u1pciEsT#~tTxQIQF zf-SC`+30T*A(Nj8w-2`p=Bd0rRjtvIc$vZ-SPPf20&^|G6{?q^RTw>O*p z5r0BC2dg@ZJD?3rB)_H$zl&QfFAte@iD{uU`{>6w-;+-yTe*`zx#VMF9}$4>O|^*d zn5QE|eoG$>ZTwXUMX-;%k~DRCR)N;6lc^Y&`Gt5fJMmlg^pdZIkFQId`uk?4L}6?_ zc}aAguK?36PrW0lyEhhFnoQI66ZwmKvaD=>j(Nj&n?HRi6AfG$N-#5BI!Jtsjg-$N zSjfC4r1ZOD#EsuyB>07x1;I;}6`EZEw8LxwH$}Fpr(5?-1bkD;-+pv)=D#hsj$f5J z>?$n1JBWf8cf)Me62HmIxW)Bg`u&RZ(o{!wFDc6$Y0({^^A&C+BC7cccx3b$6&0KG zG*?I$)%?Hf-Gs@F@}YY11R{|$1N?9S{E|uta2lIL>j{JaN`TpqA3v0C5tAHSzHwn* zyq0Y|C)QBP!JaqI1~L`)%ti@p*J;;Tz55XU8CtL9(^!1j`t82YI7Xz1>xyY)bEu>YBrR>cu>Fp36=URxE?oSs+h_YVD#TsCReAU|^ z<*J^4pEY1#b15ODnogbGf`&A9hT0)bG6MZ%d`R`fs&U~8BX4Xn&W|iY-E;w?(fHe{^FcJjAIQEOgU`14)Mp7X{Sua8cs{mzg(FpE~3!{*kXlmPY!hS8+oB z3KMIbk=)(MJ>cIC4|yAKM5ags8q`0w#~ZtId~N6EbD!GgK9(`>8}1zN7fE~t#el!q z%Zt(}bE!0@rK$ZyX~iINQ8sRhH)p*)X1R*pu>gjYtpA3oNBx!J#$fr2HAe?#ry6XB zzE%F^Pyf5OCHr=nlQ7>3pE)l>m0hg_%p8_moVflz#1d`xRd3p`r9ARp{(d4&38*JI z5L_At=3H48Q60d%FDHW8NLg3@I<0{INVe-LT;Fy#^cO(kq1||^xR$gYod!|st>XK1 zEc2t%P%z=1f~GjcO*6OOa>x5lR)bw+jNDm|474kB(h_qJVKR-l+GT4i=m*j%til?H z!PJ2Z$7O}TMt2rG@?3{!LPH)vh0-Wh4vE9DSL2ZkPckW;UrM(JBJ-=APLR`X{D6DU z#fB(G5;ZRO3Y??a+cj?!Er`PpshAV}da@|4fHTqr+m=&NNStO429DCG=ssNVO7oT5 zxFWrcW0Qok%E%UMG>okN33btk1$=mEHKDYSu6jGs&-8MHBSdBvl0 zb7@@Tz=$LX(+`@*&b_N56?`Ex-1^kO0cMs^vXaGQ>?rFZPVf941Qu$kCbTQ0h^buh zYM4pOMVYB_UXM-m2_s2%0dOb9u`bfGOR3yI$lxP80PNaONXw7{d*GjZ{+S_Z6FEcv zBWabxx}BfBhY!yu(O%C!5$2_N4|k5A@%223{YAMCy9p?xOaBe zU**?CNlt}HDhx~A^h`!!L?XNBRyvW0-Fd1J855?@L6GMH!`KxY4r+euWzZHril|d6-skSIXAzePW1xb>1{%SdiCRqTqG`uNt1nZ zo{Y<6VUjsv`*idgYi)iq6dIi0?Hq@)_Q=_D&^9yKyt_#pHCythzWWwGwND22%Bi}P zFlYNJ)JQ!Xo3<;qOc*EB2=tB)-@! z9-A}Ywq6Ab4#)Y<)%9}j_+n#Bn+(1@T(Vx_-mcGOzGchFEdAh7e)pj~>*yMh=~e?l zEk7+yh07sZ`Nghyk2Lm>AQz}g1jz8}21&AI^hVWIKoo77_{10!@&hSbOpb-WDPA9o z#9TN4Mo4&rInA2_^c2K89Bk552n_W=TfnCUM!+usrJQTzDVSMkXxzQ!omZEZ>5aGo zmX~DA4TW*pDcqYSGM+EyroxS|*Sq-J!7 zI^&XJZfINU{W6D+m(X7it3!eAsA&d1^3lz3MT;4Dn7fk#&n7Dt5-BY>0+br(m;A+m zO!L)r*{sGqHyZl);cbRzDFU_UflgF|3u)$WkEKWbpTafco{o{?EQ(I8&Tfwg4M+(I9J#0y0a3Ix(91>5jA`4 zD_=N;9Xj?cmhP^Cn$aH|}h>+qu`@=htTfz2A7=T#EgPw4o)v zvlm@T=OA)v95f~bFRZn4zjge$|LPm)8QDAFiwsKsxLQ%^+$2=tVX+`+v32&OFS6|| z9N-3{_doDH*PhW@R0IlvOt@ZA;e%AC;O@TN9kL9J@;|iK=A~y?+I>&NCYX@g+3D+4 z>a*d${8_5+;@(z^wMKh7>zchfTRbC-q&yxd88r`c@rY%@Q?3Ylx)B*_;XX2*Olc<3 zhfk4%CBNSRl_Snq>^a_9?m;ugjqKec_N<;%>vs@%-SSt9JBz$Z7Q1x2#gT{e-Q~M; z?{v1P`0u62iE$qiM4{K80Pd@C(lRMNo5YKIGMNN-5% z+(VRM?}3>(?Qr!0`-lJT@8QL-HKx5>+QaS;GBT4dS0>%~?25+TYI!agvs*cPg^Os>(L{9eD>hhKK{A4J*mg-7sN4Z>RSk`85 zBI^UJ8SV-n1atT($G1h7rAbDMux*dW5c7Ksb=s%rpm|tjn1-rHbs3ZKGnQ@Z)oU<) zB34U2HB*jAfX@k#u{V{vU`Cs4#DS4LeVIK?)cgj=KgY!*#`*sDP@a0f_}cD6I!kcj z5$6)nG8BUliyhf(58=;~&q+C*$j$qPhkg8ODCk}%Fpliul8eJytQ+%CWzy$%Uz`Mo z6>P!L9Gp_06o4BSN&D(`Vdgv}L*GZ34@$m2tz-0$|DLTLbID;RG~(LL)6%eyH@wZ* zH@TepyR>;fGhn77e}In+jJ=oK7#|NFEZ&}H{9{z{`$L-UpHgG%(S1B)G9@s5#3{<~ zloNk-%uhnR=%2&<&VXs8!s24Kd>Z>we*U2pd8jrXmu(uBw0F~x6YCSrEyk(ph4*Q`)CRpj{ z?m{yQ!$NlRbiQ@hE6!KH*}48e-Zsun@@o6@)|1z`bp02->#T-?-N9kPhU^cs_v_7H zFUn{gtjva~Y`($8&6gN{4D^wt>$K5Atm*vnqBI4^F-hH&RLAy6<8Sv~;}B1=Wd`5h zR2nD}=my{bP=^}AV6+vjjy-^X-g3(d0TRjw1MrK@iW{+SB7`2$wlVJ@9D^9vU*{=W z9o{n^W+EgN=F|@4QcvVCnOs7KD_3zI`Zx}kkSOz*6H#7EdVlpRqo~q-Wx6sQx*3VF zFkpy8r|L&JPL*H#pa0nx03zXrk>5Jiym8i>LuWcl>Q4`9zdvL)=a<7})Xvehy|uOd zP7^Bk5Is`LFpX$f_};_4LtjYqfIizj+IP;*FILTR*;S>{WPM!q6-hO|M%bJw52L>)Qc9#hbYsDqn%>>lCG;OwOpjwfD zK?2pJfgOakEp0buXSHg8(=aZGSj5)cJud2ziaP8o#CN6LY-Nok$G%AUYG~Md3>85+ zwLEC~*#+%LJB~u{W4Qdr!)V@i!n+WTvOjo(h(eRT^t1PCHf;vdq9D_i!I%LzH+}ZQ zdJY!2Z5i^vwTT1Ufo~$l{SdPncFJF)f7P!5sTi-Gw`KIG>>sQ7wfAx*!ZQlpGgc{S zEvx|P5}d95OoD3}d9rF*iaD1d6?RtC%-Z(VN^wM~{Co1)VSrf-`xhUZAF2`RgR$Aj z+8ROik*bF*+M$HL9yX*&;?UeVdB4rLDKoDNcVj9124;{-Xp2ROC9ClG(bz2s1Po;! zb(Q%kjWL2{m^7rL?50U2%Wi!~1iQDxPOi1H2wMt3^CX9p>X~E#!2apO27R#$8K4gs zMgN7=2J0&F&c@WiN?1p_35kk}(FCL{GB>_(vIuV&>}qWS5GkBoF){{~@p@Q(LB1Z& zjEwdp83tPyA@#i}oyElhxzzreNECgDZaBlI;w6iB9GNXV_=#2i4Z4fc=Z_! z7P&f0URPqk=VIl3Sq+sB06TfHUF@IyWeKxB#AawpfwYWl#AzH!f~gn#QQO$3wA}_d zbn)Ao?C_D{2gY9imET*NT#2>bp+ zpX1o`Ekit`;I&gAL?Hfad*dAJ$o?z;_1^Zz1s4!u#Rl8oOcV&^aO@F-s{6|nTV&%1 zdF(B?+I49mUUmxqfT@bM0L5(l;ln9Z-ZGd7oUI+* zeXe=Ocy&+OVRr6)%{-7{=3E=qK*a(^C>mYYi;zHvU0}<2d zJ5=TRI3K2s16@+_e*7yH{|-A$j1sAwaQAuhIV=EC%6G$9pH*2Is2*uMvs@Z>xhHmT zF+$v-04=$O3w|F^PK9GH3*rJThBkK)@;?(q_bR46AOB~?#n^n2r<17Vb;-8|Bb26|k`5&ksQ^!mS z*Ug_V?_3LnL0z-PcM*CGyO4Z}8+APCU}TqqDN_1^9c7Gm%V2OMrnKbjRirOY5%z08 zSVxKwa~CyO9RK=Lqzf40C-bQB07wG zU8A2rEP^)|+d|v~SQsv_!_^Q_e)e19Tty_b%lIb}{F=e<=~r(QzG77#_pvSLZu*Jd zM>ncr(4>{<00nl>sV!?iHaG7bQ>WU9S8-yH7|);Chl3s7!Ti;Ag6fz~{)e_!hlVL~ zB%e{i*yd8VnT*{h8poB|>?~^`f#Rn6GfRU7FOw98WwVCM`c-~vOpR=_-b~g`e*CgD z!rTrPHsbrzF>vQvlE48--$*93=+V9JEsI%7W`iz(av)|2?pHR<>eSTRuzhPu4*MOvu{B4oU|V}`1gy}Jw_ zjxU4Zib4cCt8Aa48s~T3povKXjFm1Vw6pbf>&){QrW@}#*O@T#IxzVXe-Z)EMUFAA zyFRWK3UftxX*7Ad_LUr##-Q=!rS~s;c(wKViH|}BrVIaXsDxs$p(rN$$3>idxSD_c zA-_pZJk5+f%*|SIBVmETgWwI^Hy0OS!%Dj7G1v1^=#CgH<27!-iZC3azXUFLyMZGS ztmmb74~_VP?DPvPg8tx(s+NIK$x>LlXt=bsFObn1CXVnnBf756UH&$mvDLhJL)I~x z16pQ`BM3rhkiG8dQj|r0M_D5bNyA2$h+pzI7x?FOwNu7q$FHY9JcFpCxwYnn|KmUY zqxYCrWw);SF9M8Nvfd-Q(8Egbhpm&hT3@eup)AbiuRmZ%^2g0h`1F4931IA4KIAs> ztEQ6xsq}TyX*`u)k6pJGq$;zKs*cyy9R))3%CgRC- z$}pQD9`|NKL9JH}^NQl9^RYp}kj)D~56IV)Fo1T0&=oqk*4j_43qQIbd5o_ne9-Wg z;MRAM^Kf{#AI@We{)0XoJdHplDg>S|mA&_*O8KC(=-k2i437+ycMgWkJ^f;>_n7E&rVWn1nCRpZr3qN)fnuIT-?(aU3W! z^F~YRg%oxZyDYf~dIr6=>H^Ht5raf-&ZMT*7mm)RHJX3BNEEa8o*9%T9P0S?n=8%B z=e-R^4s~XUIPrY*f)D-vzU~8FY4-Fq54|yaW9HIX(vV=iiWgV6DA_9|`fovpv~Rae z7sG`90r7weg#T5I#*hlA;PFjWXZ7$QCk%)k8OJ?Xl1l}!a9pai*0>vzX4zW)8PYg! zJC8vj$*AuIs~`_|RpOLGAl7;fN4igLaqvBZoVQRL^_tU^;@l% zx4gN?0soc%YTmjFwWl|cXEdX%=QnS}foW77r|&ec{L(rX`*eQxS$;f>xn}HS%IFvp z{4H&Tegj8?*G_%2r5O1RJg9*^=SWInoxIAeM23Le6|vrjtDw>q!|V|8u0FAxI5E0*P}dFComL{x)Ke3H5mc3xW5b z_3U(OF(6oMM*aYw8!T!*d%5q&VsBn~o}GQ+67Ms9=1QnzEC?I&ZD0x zmaUylQ5R$T{PU7w9>BEd-9vCBIXY6h<-nXFOX=Rl=0@CZc^xI@5B??;b5zpUhE1`2 zuN539;_iIbPy~7OVyf~}_-k-`O4c&7Q$Abt#bVx8nf^n@9N+?i*Guo4l?c4%novsS z)bMi}?}FUTU`_xH|Q)@u1&(V_}vlLx72WpVN6wE&{;0 z0vgWFZ@{)0-$QS8GcL}5|L1>-2e*zBfFbSRvRG2f#~K(75<7r#_^dzoui^4Xef3{R zbbsPV<=4QzJ$w8icJwmMBiUJqI1S~545Y2AaZ~!_EKdlg?GLxenBaxsZ=o=sE7bI_ z|0+L*KbIj#dyTDum_oQhy%38;x%fi5j>89!=^%DKt30+<3u=$daEC(qvz*LpSEO^33)4yoO2sVN zG66OAGegPB+#3MtDjQ%eMN}C@8c%%NK=BAQgGhWF6y9Z+o|-@F*48!0G*JY?e7&O4 z0t8{BZ&J3jqi9!6wPLo_>-|=a%{>?UVTy*0)ehvk$+pBxOqMA-q(C zZ0sx8mwn*!bEjJ$PWZ3ZZ;KFe$=j2VdJ{b%f59S&|2NK@?alAD@MOpv@Zh{F_3MQ%$)A3trcLwbuWaSVoZ@*P`Lej1)yEcc45y444j7@3je+pXg z1&-HFW2%Q52S;PC#M{Ab-22$hUk;AQ#by+%Y4e5xfJ_{&y7lzR-Uh!R)W$-Ak91H! zyVrT2tEyXhth4#0+BW%T%m#nZ(=V*pcnGaKAw{Ru$; z*@ejJ(-4DxwoZbuqzB=Dc)KA(Li9^)2h6(W>Jv0a7qpqQ@iT2clJ@VZlWNYZr9u6D z;aDLt1p_RFFI2u#AM-BDc_P*%OZ@Ws7I$i&6R0fBM=LqG1W&*6Dq44c%-%iq7V$5*AS6DmWoa#_IhRAeTIwV~b>8KgJaAFERD`t-Jm!f3wI==e@VP%o4uh z_yZ;N5)+uNU2JNt)$jf5;>INEDD=P}E?N-!US1co|^CWQ;9%ParWaXQ;1B+3{fO z0LCSze?C9ZyVqZ@5X}&GK}&3L_o^$Ny~&whME?kYlcNKkIB$J(^Jmu`*eRJN| zaJS(e3iyfFNd0>w@hm$5m%;^dTNGw6S{;qbF40-d>N7$D!q8i(ud8FoTQW(; z7;ip&jSb!BCb+4nffi`#n3y?JR+2WvX6N{K92(xOBIju-OM^v=u`o&(pMGSCkIAKE zZA%=bJ$?Rnh>GOm4dLAOhg1H{uqtz*I=3%DaZ7P z4@wulFif~VHIMY$$kV_Uk6+xM<- zB+(5k<6jzIW1LNuBUM%O5h5sX|rIK%ys}{*fx@n z+3ZSs`GH*`X>YCT-@!)j4X3IR!fzAvdw%`|qu_ls1=|O)Z>Zaav`ylkr$La7tJWJw zao~t#Vtsg~*h@6QclV&)@gkX4Q6$VSCsss!ozS>V3Q;6hXFU1-Iu&(I)%>!uaM=Hz zUs=lUF8eS1VH|PZnXWPdl_2SKA*VdB^uC{P(ocK} zDXe2EtS~G%&Q1T)Y<3S{_%G%QZdm`-JoOkWmH(oYfzp)+O0Y_Z9U2xNf!W%ax75CA zU_?zSKf@t})jY@Thlb>pn8AMKtjLgE8LX`T!CXIQl^Dk|?>vaz0{}jIf>d4@=EuT& zN_7*METgiG+xWj_n)egzHC9Qnz2N3y+Vm+qz>z60cPtQ3h=T3&e)w}Qmug5pDkFOU zx>A=;Usq^WyvICOdn5jj$~cKsVovc2IpC$>1C?-PVxs42ObpugyA_`|tcA3WfcPU% z5r-jo*|uGpHpS0xwN^AE?5?ALro63{v5FT=L+Yyai(6(EM@hP~1MCIHj^{-BKh&7l zoUuUJ>=uzm-2_ohMuOv8fX3zXUC^idvgFmh*f9%j*u~P^x<=+jZ**@p?&aM@WO+$x zo-EiCrB@wF_19n6J!Fs@v&PU>;`BBItzD|5MMh122>N3^G#X56pE>OLQ?fWsfeC$~ z?e{6EI7QIOAMS0O`W8$UQvry7Yw3lz!K@hTII1`E@4t4bkfWw`GW46)%U4Vm7b+}`X$o|e=x$+r9J+!(y_rHEhKi1C2PU@=W)=S7VMP$Sl;=UIbm@w;N z0AJA1A7I^TL)t|=6Q=Y9MGQ{>SB^awd<#ft=DSFfqhls6_O$ablZ$m(4N{d|ddIUn zedS>3cpq*|Ot@h9evYgXcds6jKWh$kog(!j(z=zx-o_usD)Nn>z2TF=C zUM>`PS)K&@*I4{ADt0WEa9%b{1!;%00_9~ah*Hd>v9u}rrCpl0zD1w&dC_`*3VV|O zGR~#ri@tVtr;Xak$!?~2_oXGGSG{u(n8d}DlLe@ zf7`seC}M?Z?1*$|kn=)!#R5e&!OZZjb|EfOh?Y!;QI$!$_N5EZ;<*=reZKL5unlho z4&;mZ)itjkaGTi}y!ry`#ys~K08?W^qO?A|YyR|`7th`b2tk}U0MU1rR$WLR-+z4P zQnsv0$qJfn{kHu!-RYKAIApvz^nhs)2OdB7SK-=F>zwJ<<`9#agSq$+F~nXSUv=%$ zD3F5q!@Ja9fuYY|ts7%^$gjEe0jeW^(Y$wmTv^4&E317MN`RT$y*%KkWVdjEx0_!* z#v0Z`c$6UP_~6OQa(L==HLO{DzAu1MP&lE$r(h{oSlrimu>g9H`}Tw;&<@vW6EOMw zZX{ei$cSuRJ*@;9dz&Es1CJ_00wODV$R31u!Y2XiZTtNV7McHnrYsZ;QeGI@$|MO< zZP1|LKHGS?I4iw4TRs_nG7db67+fM0}ON=MobLA9AeEo&$}Z{zSnN{DGGLl=MDetWf?F}c41 zQdeZgE{{cnGDNgy4+JL}{y)yS5<3_))p(mrQLuznPRH;Y;nM1B?+17ZZ0%m9QE8lh z!P&}%&6hu6M#&x9pZI=E0u4RIL4QT!R~QKw!sW$HS(0_|-B5?i#FPmi?}~xK5^Nvy zayV39%`?GA*WEx;$?`?A)>GA9TUIE{Ck z_Wb29fAP`b7eoeO_{+}T&K~H#Ia-^QJ=foU`qqcyK>T3V2Ispe|O#1DOr`gIC8M6X;1rWgdZCj+zAB5WLA5fRMNs>j6x9-fsgS941yxJf%@Sv48VPcEmI!;A*}Or(5mo79n=$v@2lZo z!fMRUtu95-$ba;L!)6IwKg^?}z|?}W#U^*P5_sMG{6#>tK0SIHVg2X5Jr%{jv9lLZ z2i|ytl;ekxuD4|oE1eQZtLa7W+cV;$$OEp7WwcU^fq4x*!6r|9QaVcU+XS8xLfRub z9Sp`hSJTA31kf|JV+R8AK*j;UuCjM@OtE&uEKF6ZYCwLOt$!yX?t-7pM>&7}j0G#} zoXw?Jt@8dCd-n~=sNsw?)owvjokfFc2~Hq$Yso0njeh~tQ0HMnHuUBEhXS{A9} zOOAjcpd#K}V$JY(`>*EBw}AhBN2<;oi*7SdU^E3?=~Ib6z#48krT4@r`~6@1t$>YT z6yEtcd~)C%OKiya+?Z0*e(2s3ArIosxorRKgzt2!Uig!a%E(} z6v4ghFBA?rG;iM?{*OQZlRu&=i!h66!f!xp>=-UAlIHFlR}+V6SvJ&C!T|C80rT2L z29dcUH>y`RoI-dp+wB~`RaSN_jT|3+YX9;-qnn66eB+<3Ai|?4}GOfFall8JWA~|E_;eO61e~2Ti z`RJnm0;flQF8@VCN7LQJ)U?GFDTZrB^w3Zwr6iX`bQmVki^{ItY%V|Z#_v5a5D(T* zNhUcq9AX%y4i5i@e+5CLcNsfccHL~N{s(e&;|M?oUM>_gl_T(FUKK{&nMTy^4E+AgRx=~QjnT*_Q3WS*nE9E~jyDyp(`i-^NyEuk}CR)UuF zZ$E|7yr;ri@k18oQt{A0oc;KLba&6unXpD=_Krc$U}S<`Q?{8cO%Ox2gkIx8AD`ieNzq= z@(q~dUaIP40;SQqd|GUfFu!AO^B&=_HW##vx>-_^uFQzEqqU@K1Jwnyl*LZpdjB3m zL)M6It0)Yi#R7In84-7mpDf%ht1h#?UER_TY~Dm~#-UEV= zM$4u?d||cZuceg!JpnNThRkv1k~C7c1XLsv71ydyFTUG5C?fr<@ra8Q=4<6d!n|+V zWswI#J|h;s6!V0Y55%W8o{v5U0xG>D@ZRvBQc;m7sC-Cway?Az=K4B(SyEzmB{|vw z6yT8c{@Nuc8BiOumX64u5f%jv56TFgiitQ{VvB4J+LEWvTm-*#ir8^y# z%QaaS$Z#bOQB$WezDU8Sxs3o_Ae;^q2#E*fN&s?9y4uMVNj~pKcMaX-PyhTU?||`l zZiEZnaG=jL1s{iGEEce}B-WGlfMc~b7NoEP%C*jQt?+}%+oqOIsdHog2+jv@$M)N6 zFzUvytt~(){;Tx_Ejd_Zjj+(iVo!}k8Bzx%Lfqh_+uW$;N}6YJr;#s)$#^jrla8}; zs>~3zVDLI`NPhJW2bh0NZ~=<~d$d1hREE+;G+Vt4yx5=Frw^C?Q@qNg9?VZX@jo>0 zT*B}#?^D+9j=xGzdK$GUy&Fx}fs{xp8aQd7FZSvzgH_@nfey}TRU0n|7_O@#|ct<@Bcik;aShRYYDBM z-1K6gZjaU3n-ozsv3v6=4Eh?`E0&?GJ{{;Bp2LL&#?;HUgIor37>H)rQm{#qI@OED zfYy8$aUR$Q45wbuQ7r}>caV35y1+?PCa`bmIXuX9#=#rK8r6BQ3Ght;TUME&h_N_q zEb+(tYyK&zU5TTT)`PQ{44-*j`6L=AHi^Oa5Ai`^1NL8Ztnpi{Q^?bhRHaZP%Z2d0 zlev^g0{?RR{dqBVLFu)aY`XM>xLxdp=r%eZM)93UNhjD+zApIUuec=rj)Pf1RvIHF zv8cSt;R9K|x5UP+Hu^uid-(dX?D}2c8sz0``{4Y&?D$QrpZ<$Oo&Ws2AZP5P10k%r6oHV#OJX8T$k;SI#zTn1&j)&ees)49)FJndfCdd7YXhy ziEZPRy{IVO*hNG$<~Y7vet{}3eM26gYjH;d*{9)NEjLyp0K?9>8I&5#N`8|EK>ZujfpKmY&j6~2({-7Ws} zSFyg8rQx{ch$@ODNyBSZalNhF64X8qKNnNa>u1@G7Z4hH#XQDmiZjEj0@@xEn;pBs zosxYQWC*p%K58HD$2cxJgvim~nJQH)$z zU0O%J2fX0lVxMcAI4>KzcZT>_pK@#~e^AOWD(^Rb(-EaHrO`B%nN#y8JzSwYHsU?mMW93zSmDKQ%H8=x*;2Cot| zBs&fl(P^wMvD7H?Yia#Fx_Alz_^RY%g(L!nvHxm5S#6#Vp0yn*=Q28KQ4=b|`6DBY zQ7W+>G`@Y@xVYh6?HwpdJ0<|)7Yr$#<@o~pNeQ39ek0g$ErAQx$a`@aHtrm2{P4=(-nFn0rS8I9 zp{OprZQy}oQfCS|bAY_W;cU35!{2#RvvWT}3g9zE!}rHI#}C-juqDZ!$?1DoO+MYc zQv<{f;3+wQdP-e`G99BNW7X@n$7D)YwC@3mFm5yXY0(@~Cs2?X^CjubnKTzYLLA4) z;Ey{v|Ir^3D5Y`cWZP5ZZ%`vCFo4YaAT&;X*S!5wS^$-OI#r%MgR23h&EGaw){8+R z&N-SBEA_@XGFr&Wxd#DMilrL?OHIu|d@Awk|O zdNn_*v4q&*G5;no1g4e5X5M&r5lFg>h#3abA%=upByGie8P*20JaV^qy|52}p_1ce z2jR@Dcz>S5oCDTRpi1lMck*_g6n9L|ukLD2_bb%_izTwa-OT~&3%B;eo8I%40{Ru8Yvrk$Y;d@d|>189b}Xg zX`pVLCV*W&&6A#lDo407B~DPAkbr7FvqxnIt&czYduuhK-24^mH2o6j5P$i+AU*&;<~Ef^FsyOS@iy?0 z5OTAP6J%AF!Wlq|u9v^CgR#)*U+`s<$3EDBYcpYYq4ZAUDhIi$MK{B}Gc_z_tWm$x^8o?QoNruu}F_bX5jVo6W zTBK-F0n`e4dh&F?2^K=_13i06J^}pPgXkm$R1O(t)0^Hn{S&v0|Kjc41~4-4Zs+^pf9mHlT*)@KzCR3cr29rer|W!fqnO!+7yzjTyyb4)c^Y_N6S1 zw0K|npubP>0)~XvdE&c&aN)#cI{9{Yii>F-xg%@|l+>Gqg+o}Jz1!S=ZP1An^L4T? zb=Sr)RHoC_$RO{%^y*p8Dg=V6D-J<*&e)=KsgUhiPBb7F+m|A~n*(WY9jlAC#GW1x zixC{WXzAW;I<$+^tvo4zAm)w0La7bI4yAYp(uGLKμTLDX)J&O=- z<{R**jHw;mTpUtC+2RS~YPL=xCd%`UIW%Ckni9OmF(-ZaM|0l<$h3` z{rR(hl~V!QrAm;Au$|gAQtu0mC61aMBn=#G=P38Ww>Pd8J#q()dBFD=gI$v&X$)D` zmoRL2DFGoE6BR^FM5@iF5a+>x@Bo_4v2b+J?ZkbEaisBUAVTWXxnop%gRcW&U;cn1 z!Ek$LTCXoO9^aSAB?w{s55Sj7i5U*(Yh`30?LI-;<#9LJnACp44y*0el6M=TJ9ZEbAa5O0 zG|=lysk!z5hk!rISzP=PVQbAZtKRL#MKYaSg*n*&Krw)l$Ok5EB=T7grLM`!guiY# z&-LplWnO&i$tAE8@-bvmNo-lUkFFc7pPvud&VMXj`#WDYu=@M=`P3$OT6X@MHWREI zIhf~JQ(=9bug3(_m3lH%5d2ERyqP8pUfPtk$I;QI1@NBXwn1&>x{?iPVUiX~>7}&3 z*=QXNf||zlm)YuD5?J`e?=Ja^RkE3=~LkiCpdMMfvrG~u#~R2bNXVjA&i z`M+xrXLvo>?2BYQM2-|LH}F6)l6En^4r5)mMykzCX`&Gz#zEY8x!!tx)T?2Z=O;s9 z1~*gC#t;zlFdA7fmJ1KH-Y2OP-5Fn6GWMXovZV`I2V^1L$^S+xV?f*s~ z)&QTW`XQtxVBX$nLY}v;g~Np|bnJ`O|9b#L@wmfP=zHN4Pg}gV@U;vOPNf2o(=3NF zRxx>_hqF~x=+A3~MC7PCdy@qVa)TSccuj)?JN=)fN3PPeuIai5qV&*RwTEe`< zP?u^GIxMEc`Ls0GVb5Nd8UOHt@UWL3H0>~gc-#m%>5|V6MPnh;(}ySl?_(hKYQ8pLor>+7UzGLjO5gTVgbt%n?7J~3I~c|nbd zF(R12-vijHn!=UCt&K=jGP$VO8$*W72`aYn{Y#A?yLlWlgqMomC|;&TF7R}>FbG7t zlxO1v;16Io!NR1k&t?x|JM!45d~{TBl%nYsQfC-AHN}B9PHvzAfkO0=x5OzoP7CUv zNPmt(e+&f7ru$4_lc^NG07AlKe%)m3&1_sck*!^n7e3J%=%A78J7=Kwp)a#0U0T>( zA?zYruymVakS6(Y`Ok4pcNZX0Bjy(MCIG!a3XNY+VN5?Qdk!a3qHy{Bam09_ZY_k_ z4djFYr(Uajxov|E&?f$rBuhcFXgC3{wDtHwcHxxwk32t#@28lPaX&U4RqBR-Xs4^% zKzJu20RY2)&`4Lk*hTba*;ZIJN8vQ|CK%d3?Nq)+!C{m6(DH{N;{6IXAOOnYVI(%2 zKus?!yLY_tLKoj&cvj=#vrDaq5z0xyvr!5^`cqz+5AQ#o{tz3RXWh5}WC>gH>)a3J zCG*N(Yq6IBao`V?nJf_@8U5v8-(HF|7<{?Q!s+8c#8?jcNY>@JscRfVerOC0MNFsB zhDo!a?wifYS$*uXx0~7PA0#y$zCFpj1B{Fn=jBeV8g-fcSc#+A* zJUlYl^B~hgc{kluW(a>FCm&VGc4(i>I^!K^)SS6mFI-YZ6T&>kcgpeZK|xuL(}>|f z(BXvj?u{72SK%+az^6o%$M5&lHo?9{!41`dvZi6G(f(oX2xD$_UxX!ZA3oj&j(e>T z?Tg_GFD{S$8DK~7BL1s+?fv%hxBiQ?%uJ0;H;gEi2_TFjLUhtbf8Xwk^S}5n8eE=< z_yBUSHJ-na88FutCdfiU1t@Xm%F;v70#kKgE0otUHv?u%{xFQaq!7i`J7ZubX&)9T z{rTbfhc~{u$UhZFdEN2_N>2%4vfmi5*hdP?66? zJvh4DSkl0NoS{=qOstdwEQJl8+Z*EOLoxI@Ahs~I2)`;$9dCopND50NYH(})oj(j1tZOj72?ZYfni3U;Y4n5K450XB}tF4hYX{O?= zFl-f^B4jAY9$tih!FMDj4A;iz$Wd1L+81PR$AH(*V~%i49bPDE2>;2{e^n-5rGHsl zoc~`wp`cAt|KYSH&N}r9#-qlQZ(B#!nn%Kwj-6i<5@+{*Cf?5nx=Q#_Ay|8%5^0Rg zhJkvIRwsSPQLtpg3Y-Z3EZ+e10p;J6Q|}cZLoO3WL0*9%85==d=(D6FbRh4jK{{=Y@>7%@#eE3iQBntz^i~MNux2cA`Eo);6 zRaKhBsa7!2AIx30{x#r4X{6Cm--x7+Y`utT=&vX174Tmve;g!$I@*RYJdrF)-hk0F zMLpu=1+e0sBa|+g!bwR7aL-5328@ukge1$(o+I;ybaV!}$NRzCEPELyqTjFzCaT3M ztPcQYG1SV0&Q)?3xl}n+tB~ED<{<2X(oeB;|=vyGnM#zzsYSG3yxs-omy{m*y(RvM}@q%{)v2upnMqCP9oJ5(+ zw$lh2&u@4$ z=>k|{Be3^_;P*$NK-)41L4D_lm#|GjtCizEtcLl$`h?TY|3Hyo%EbsnkQwr~udR}{ zygCnaTwLGeBF|O=Mo2j+yDV7Lr;Q1<-hW2AU2ZMFy?)RKxSJ z0v5srXUYdg%P|*qr18)|w;w^r6eVR3-twQU6;oGrOt6?%^+?2bCBaJ1en7sFubH_6 zAsNGDVHqu@!?Q&-W|jlL5V~ak2a+u%)sM4GBF7tt&o_=f^bxZFLLU&$C(J4S>%;no z-#)yT-G^1WN`3)Z*3V8ti0AKDiYcFjejtTsyqQBFG9m#ydzF-l0dChlg+0zR$*9)XPz(+9^ zZTvha@Ga3#>_$9aXdE)>fD~c{Z9ljMgx6OZEIo>Yu^TL8eq$3Z1b7(mE}qiz3cUOW z;)JZtC41HTXW7x|}%wSSm=u#@?Tm?a)ndQlbDy zwg$|`5EBM6E1jxzi+p}m8 zXocXTT5zebBK(`?`ia)sFY?V7c}RY=oK}ujCShyBpX0}oWlR3nf08eahn?EI zWK+;?s2)ELX{Q%Ku^6ME2xgRg_m=mEgMi1z7oG^bVdsPpTn+)F;FX`V^#l#YnfRt) z2;cX|EC5ph4*vbIN-h8lqWLsoa4s~k!QqnkD;5t7D+FV1-x$Hll=R;7dz zcsTiKU@xzYa9@(E`6^|b;Btf4R{HlzfGp%wtH4+o_v~O_x11pCt;(p&sT`mM;(*U< zlQSxav9)U9UPo8}6_3JADh+QQqeIR*S#}sl?XdLcgex@Hk9u_wYbBI0pBnQ;ii;+U z&SwM(IKsX6qA|sA@KGtjS^1Ttp&^f)@=rgkrK7?qEbji|ZRyYP|8iF4B&%T4eRK`4 zarP-eZPFj`7+~P^uI<2rm1uXnEVgc2j` zGkM;B4O+lS7$a;LOhA%t#6341XT+ySgk&OuhD6ty9iC#apyDq^y1euJ>eBT!WpED_ z1wjKQPA=XiYlx2kvAmN~(fnk6!T9q1G1_9tu&5^}xw5iH3^1EcSRCZrm%`XUHH6Ff z+O$du<47)i?7k$NQ@G`~XSUXZL!$NMKFMyj-d~coVNY7X-VkF(nRPfI5=#PneVoH8 z91Y^drG+w3-JQO4@tJuqEseO(Tl3K-ik8gN0ajjIuZT3++whh*A72#x2Y4<4hhEP1 z-EWAikiFMgn~tRd6L;&kfI4Jr-w`=3uMQA6CmlE9Xp3(0ZeTAT)wm!NcFboJ=k;7N zPt{qV1zE=guo@b!7l!hm6bgoP?_AxxK*m+B3u#xGKyV*3V@LGaH4H(X7gQF>0qz9hKTF)sUgzK+hCNPCsOVg?8L9{H?FN=FZ4GW&({%M{8#qm z7<~BB4hS6VNOM7ZpjV!C(WWh0dxMWY}%Z`~v z%*QcAt8cmg;}_tIg6$L05YxOb4-+iZ@mi{LBghaAwKt`&1YBc?pR5|+K%Eu|B(2&0 z?iKEk?Cc$xexSQk#>ypUZ#NDq(nOAhO%1qyx-yC1FdVVGKU%e90ki2Qrq?;iQCy^; zXB+1tW~59nyotOD7JNLs?VPkh&NLa{fB5a=(uX(Fa~QMW{ZfB0+o~onwCM;`>mv-r zyaR=^nvI(%dB9A!?15auUq$VS9!M$!wL)4<{emxd^Y)NSQsr&UumAc zBK_>0FP9@P&S0Du)kaDbR1x+eL3$XLrKemToDLEkgCg5@ zXnV=VW_9e0G~BRuQ*rxnQYW8q;}R*lAIZBo-HUF1i#w>4@!)y4Cc#L1RZJ{Y$Nmed zC0xHgpCwjd>=Zz7(-or#l%C8lLJ@9>p8N}2KzqEfY?U~xpt~VeP+=~xC;%ms!VJus zBb=l!9XVB;{Gq$NC?;xvPl!K-$GBcJpAser3z{HBWx7q856*!dV|&TUEP(>zMJWk#%sds7Hms~2O#}|41^cL^ygU8nCJ_Z^c+@} z9#h27gfCw>(zK?bf(x=VYe@#xF##Jx0Y*4q0f|wN%@HrdqfSkoxIq$5=2OPU=XcPP ze1rFw)fU%t2udU$$L!ei>_nhW@FZ|k`ZM!?o=qWj=T86@l?f?T;E{}t+1OmdIq09F zpt5~re#roxJN~>$y*qcwddM}})N3q!=G(dv(Za|z1ub!&6A8m@0fdBoJU7*Q3(anU zws`KmSJ|Cp#O&Oa=JoJvwpWkFkAM_E{?flAJ-C;di=D=O?fq<=`mXu%p`Yg6-TqB^ zzlNNTl9npnG>H6I#-fDV=SXqs6)>GB%Yucm7`KGk+P-@%yJdz;|2jK+6gk>|k#-lT ziX0Yj>C9_dD)!tw%gf(~iviS{{~{Wp^-ZLxM7#;iNLSCGFJ`!q;wYi9WK;{;)Q+y? z;UZK>08D6NA)x0-(Lg42F81o;#$=mN3l>u1t)sHO0@utzOB3Wzx7JrkWa+;ezdmo= zxbAb&(W!dxR|=5>VTPRIhXhqv1ekkG!(;jv&2c_qd(jSP`}MAiAr`8Z9F%FB2@ zg}&tUG1&3EExf$5Ix1!YfIVAltI{$HN;N&kzi2`(edRbH7}>pSbp&#~*UVj)7CaH) z*$WN47^b^>xi)-;YRN}r$H>b>2YvP&fCR4_hkf?=3dm{c+?4!c0TG%i2MZa(lj7{t&1;%#i+dThDF?kee{ zv49s;l{{z(IGwMworK`|)MMwvN#?U@))EZIUV2+}FOI;h8UOqQ>L2zpO_=mD?I>J6 z>4hM>HQ!%ot_NzFpjGz14C<_MxzPxN=R#YShR-+en#}t@s#S7qa$f?1l z20PYb^WvC8_VUI81?N8Vr7@dQ=)&7BphHHztGB~jggIRW=bhk2#zPbY*xpVED>HUL z_3fT%Zn7j=9R7ua(MQX+h>1+P!Di~ov19Gv<}tfgYNZt%$#@> z-#*`HoenoV;XlGqmXRm#(tUrkwRr||AL*Fpt+nrcbSZ`_ z4^2j@sv0XU;;zrej#@3qLX5{Jwh^``OpwClO=;aJ@U3xkrE%-Lmtv?!9|ah-U;yz^ z=uS+K231&9{!}0Zr1XrLFp%fX<8sps=HYL#SgIygS&BOEU$(s5Sid9i{l?OLIWrn3 zjYT%*!0epd26>E|K&POL<-0&S zaS%g^fd%9xn+N{l&H99B@YO<@a8$s>DqrM8?8&7BgAbGMP2z*$LbftvM%0<#36~fW zfzP#J1#(svYU`Qo16~6ItDl_UokzbCz5c=QDMQq0e(;* z(&h?rX+AV`y8F{f7cc+q-+m_DWCI&EUKR=%Xy!|*DiIf7*mhJc@tD-nh5S1g_lsqd zlLkzhx@|G;UWZ8#j~}qFz1!u33JEG9e}#j3Urrkr!(esQM!g_Bt=W@zsM|ia7V4B3 z0k^N6-M$_SHqar+&9w`z7_;zt69x~4>B1zQ<;PwmHBQx3%Vbd`Ws4SY5&65^a!6~w zv?~};y`_{gLPT0R8{rv+x=9*jfe?$hYkYNLG){hb(w~5a@wO5PCc`^qPRDt$Xfk6R z<3+pC&bhiQ^d6y4$9=_Sl1~ji5DS@RPt=kM!OzMe0}OhH`NLSj<GEZ7 zX`5Hw(82dE`{#|vKayj?yR}d`B<+|t%ay-KJwr)p-TwZeI3{@3W^&+klTPl0zSZlt zS3V^ujz!4w<2oXjn%B|%nTnGF+f(-4 z&(L_ubBrl!r#n!Att#R&1g=26M0=!2AF|tR-&yDO^7%8uEt02DS*M zR`~E;zR*a>s#+ASHq|F|bLGy4x(UJ_i* zJEjV4+65(1;Uw#;jxH)aYM)t(62JS0=N|(Ol$}3=m6gOn-e!s~vA+@m(_$y&AvzOa z0PhfpN3wqSSoQY|1bG3ff<~E_4kox0C^woTW%e2LT901fr87}S!WC?-ZJb8 zev(rgECWPh;7si;OgvgDa_d>ucHa_dak2%kc)`qp*a&=TCt0_rFnBb!K1th?9w4IGDp7q=`71 z=MwF}{@MX=870(4_jzNwh=M5ZjnL$M<$~g2_HK58%w%5;@zjg@x-th{qff}La6EL3 z7Zd|BpJDd!kKnpsNlHrzOOyykps!fhd^pG_qUH=}XHA-s!5~o7aFGT1-te!!1xr;; z=d#OOF>~UW+In{z_r4E?`Fa|u*3%-5{e z|HKqTL^-Imgv-F7LdG<&70|mk4UIg8xV`UNzkP$>&rkM&A?agzoXB}qMaVF1hqAZ1 zaq73msVn}AMh*`2_}=6fv8HHGatZUd@tTS#pO>f0g)z9e^)^f~2Z_L&)Kv~58lXaL z&f8k6%5V)Jy~sMrQ^zC=vSo{ecjf3$yqf>rVHoG+p_;L10kEh*J8C%`%;Ih3dU9RpIb!GP^Z z_QK3(|K_EvNW2KfOO!ZkfPa!K&nI-Mic>|*x}evtD=AVZtom}cFeL@UMs7-$DVqiU zQ=gUUVWVlswR!{TLGRuWUM=5%KWY4Qr*$TRmUyqPtoGLFdRRq?@ocU|!a{TSJ$_Ww zpa>V%@t@#S`vYB*#qKHjOyk=35=cTW6T`X7q?Q&e1T+qNnpZUGbk|d??l2$)?-eR% z<3Q4 znC#0Wk&zwRT7KJju`bii+L+g?%OI~l+v+0h1b2c`n2`_y{xn`FlDT@KuC;D6?9mG z?xm%>{or!rq!N#e&Edn;(%7r!Z1$nE8LO#Vck9jN=A|mXyAikVbvjnq@K72M>zAR#P-*qY}w+_yU{ z>cxk;apNXlf8SQ@&MaqRjmIb7Vo~*94Dwj{fQ%wXw4zILTB8C|VHkTj_k}DP)KgN;Oabo9Ufv;QDX zTv7^na?tQnHf-Jl{+7e!k|z7{$cK%OOY+`mI`b+!7blq6r#3cZdTblc)m)@F5Oy?NB}5t&!x?v=)z`(?W!)I~$}7s9C&8@r zfBtX2v3RRIhTVVxOS67u6&t93$s4O#QLpzHGR?SmVOGvo0~w4MxqWfT zcP);qF@CxOiY#$Fihg=i>N#oiok#S=B1G>w9K9#hLQ0AIC-*MjW4{f+-c-YJ3 zr~?iyACjcK0AA^ws^h@rn2-S*hiut<^|ybKk9&-{!i(Noy9e}4I`xUvARP_(IkwhM z`};7(5ohRg1d=PPsq_Z|>w^^yGzNxI;jZ#lwUZO5*9m?oq}_zjF~RWd8y`W69ELz5 zVKqNIONh|Mo0mkG$g&+J6BE`1IbA=wmqX$)mHkF!4TM{(4IA^m(Njo8rE)GNVjz71 z@2v)8W40{XVp(A3^8{l(^|tja5`T0ifbNLpsSaQouCgL6){#4no1x5- zGEN9XIDqF7e|?M@GsE#nFMyOFqIf5_?_Oy>*G?kZqI1YNqT`z^EPvs^G1#1pEGqEA zB(6*!8*(#y1vH?)ssdpb5-H-gKi|Ipym9%Ow~wXWyte`V%MUR=|9IBd7y7@p3?0pY z2wrJN|By5izCUXDgOQI3_L?Y|Ji&Vlx7Q4a&xI;wdgFTG>MFp{M;2$jfb8rGoDo22 zWgMo;2j}tF#NiK&pm`*C4VuS~3UU4RqmAuHujD)c*w51OjuT8(QDgsDIi+Gu}E3GR^xx#3*3_ zpYda@Kd_RY!=UPm#{2@lM9AFaO{HICLT+sY6>8p56v2Y54iqM*>yq>GOM(sKRQ~-= zTsu&wz(?%Ov`GJ#x8<`uB42jpUWwGCYEY>%X5(#QJ~@X;je?2_Fh`2mh4?Jp8G*ZC zEJTPB@@+Kc%(8%EdoZ-rrlo19QE5M#_dY=;kf4eC6ifo4A~eEBe9y2f{&}NJW%#^S zgAPBt?(H3};0Z~AA_P6dd>OXL#*rUeOMf7;g^7*QP!D7n@GYR)OR`XdA}LIpp~RSG zlsAdf&r&5slRx`gZy#3)fCK-vXymi8-LKT5MnD${?niS_%o zt&`8&rzU3kt+x>{x|4grOQMn;_3(cT8MU|++c7$t=xGP>6PTXmO|2i&MK_C0O1^~I zdn#!2sl))(D1fO890`b7vP?NsamjjjQ8Lqt$(asg{bZXsn8Q~{8mYpKQ$185vNl-k zsq5Y_ya8tn-z3#$*KZ@}`EZ!w^kiK9#$27li{$-|9F*YA#7mJJ!R497a5I^G{aRpr z@G?kn3+#B=(lu`rgM;s;s&HvE1;p5@VvI}VNYFH+xCOxf#ZJzSuY-G(A}#Y_Xfkbs zU$(OBXWwU_ZDsecKik+npW5=i=!oV2*}UrG1L#gReJ+*7nYX_dq(N}SVz^`HdVxbJ z&piG8YXNH2VXkgJ4tPoqB*hIG;hs6e{Ed7YSK=5SLgJ9F0A4VgiJVm6gO`6|Kgi&8 zfnd_Wm71}*6@;;STN~(b~M_(_SX~yZ?KF zGt`Tu4SUEwEI*AFM2_$=z(v@_LT&P<$lL$87Uj^KPhv zZEDKKH{q=)_4M_0jrfksE|4Ac%^wmuEQgG1r^t_i``Nz!3cgG4Au^h}a)^@gT}x~K zkAL$;1IJY3n-fT6+t=DQuI_^eN$tkN(jd>HZ9W^q*))HeT)!1zBLmVOp*KJu7d^}MhBNs zDsjX?#DRUnJA`g&w?OW5@wtNay|C3M&|n!ZFKV#E*W8DjA0lvMcS@ImzjRzdDO2Vg zm8E3-%zAKi7ZV_xQ6q6SIYEbyDAohiejf)j^NX{EaR7@xTQHp(Z&!%E=r%I`Ba>V` z-kCCzn-@ztT;Y0RkP2NzQw!IkN8Tue+-!N>mteO1ipV}zfm8+1F{RKx5Jw9Y%`O^X z$4i1FG`kov&n0+wQZ2l^H|f}<72I0;RjMc-6c{b!L70D+<7$Ou_3=>$a`{u>hNLpS1em4%ut+j`ulef;l2mR~ALKRTR1I&;=0CuKM z@4TIFYY+V~jHPY#q&JxY`pN~+XO0=bXRIw|?Z##_$IzPxKEt2a<|loP)7SAZVoW6J zOT&Rqh2=RPm9dH>^=bS-0M{!y$l%Fg|0nx4pP&#+gZ>f2NA!^1qg%=6IBxL_CDEp} z^aAQUIaE1@jepboFwoK8nuk& z0r9M%em3qL0oy7qnV75SW!s(&pDcdC;KIS-jUzf2DjR7W8>*@kWfHL3F^r0veAeFl zI@rQkYTUgzreqz=0RE7wr>Yh+?MsoiST3D12!&V)cF0``t9bVQY2#`lxV&~&;2M10 z&*t#b+tMVPStf004&H6jRYhNKoJ2Q3Wi*rzFxj7tj&cKLFO%XL~pgU_#I z$IkkU09y5;TN@{v%c1teLW1)|#-YA9IlYXZ%twzH*@D0`Ts)C;eWiG^bS79Q7meQZ zPzvUUowsgeaL8Ln&pL)!I?5 zVLFEvDtULNDvL3x6iP-q8ICXU6X}vOFrCa>e|>793hS_sQpKbIpEe3ZC=rT@#>*?M zJAv@betKq*>>KKYHj8s8YA`xs29wf8Fw`aMCB-ky!R#k59IS9%$I;L@4de1<8JPZ| zK#}utp2(+4l##!IyIHtHEw^q04V$eB{mfrwb)zNwueK!NWAKjz>4kcMiBlC~g4dH3 zuyy(8?d4zn9K!w(*CVk~Mh+(f&CbrEd{nBB8jjuT0I=IUdEAG$wFzIxPh{tGsiiN%!Xdp<*EANlgLn_Un8t-~ z$P(edw(qVmk!)^MSaHNmo7i`x#0Vu z*jonPzqR6zILr$)sY3{RlkS#ouAgiCw$e6!Sp3YhOET15<%ziHet#5-C3TgEf$^rp z8I4llqsPP1@)Q;_pKcVw7$JPw;wa*Y9?>5iB^!39C&YRt@;D#I{sZmIIApPweP!Nx$R7>sw zT;<+e)`c;3k^oKCnl`o}x6vpN^o4X-Yg<@z-pA2Y^*5UzLh z_Vvi1prEnilM5L0GAe>}Gk4*pO-cD!CZ%DP!L52gKIj6&!~G|9>_LFHm>}sckptTM z!GkG)85^Ssi?5$aC8gO4$RYWIWr9xhh*-taKJpC(UR%`b?A)rq8e$K^$TEUVTm}3Ho2ubA&FhId@Z}Df#ABr>_r)bK~52DVLr6k>KO}Sx%xyFQO;fl=Fi`u@KPPasm=$H zboJ@z7jGP#Ob*w|41=#uP5?7tqvIL|!)7+%f)3V(9~Ve!Myol~iVp>qkWayXVM+kJ zCrtz<%=VX_f(y2jU6v%eLsEkQD`a=WxC`%!`p!wg3IuaT?VV;_Hdv3W__6ZbtpB6{ zOSa^*KD9Wb<#B*iA%=|0�lEMA|zepbNuF3H>K=Q`Y_^R(esxXEeSjb;+!;nd?Aw z!+azWD!JI{O#zI{2yP`YjNYxN~JViG;?%l}>;*RqOWB$;RnNUM?zU5sFER#%nRk zy>?(s;DPc6H&-4te~nv%m&fWcg#6lF^$F%je|7wz95&56zhX1@si`H$46gzq_7?Gr zgRC;4+rOKY;w{LZ$9lFXhGeMb8E|eX36CGq(7s7=!V>D6++_o`fMfAxZ--O?MW__h zwWv8@b4(dIlDt^Hwdjh-iZoqGP^2f<8!v+3r5G+u&XF0FCb3H(c&B79qvg0vDu*gz zT>0?sV<=$cs^M;tA^+0M&IS3Q8*S11HA)OgZPtI$Y?5;LudO#L_?6<<+C%g<=^(d8 zN@>gz+Iq9$GYJ=hY_ie%-tezd!}zGQ&a!E<9_4K~rj*1Hx>aG{Y4Qtez#7Ivoa<;#COTDFww5 zVgv3#hI>FFTkxZwh{m;EkO>cK%3^3(C4lm8A`NDb-eDNf*8y| zTlg$zXOE+>%F_6ftJQ*jq`(q92(q{>`m`orz&HcPdJ2^yMH$Xx{{TNYmv=oRU0U1! zl{XzTY2xE#Z$i7%eOYxR2Ml$z8dP0SKHaC z+00V>Ku4*oBRF(>fH+H4OyR7H0rjxoQYlCR=`ezhZCfujyS3K5vxHjZV4l9*>uC6ez-awGD zQ0emrAF|KOksyJTB?TbZbRtnjB!+X}3&~DCF?+?Opt+_&gL#mOkBkF7rqn1jTwX-u z2!Uby>Sp#Zl6sRcD9-&n`Z&aVBEp+@H90DmV$t9OgEmg>lr^6YI?gX+rhOW)T zQb^yP3*rT*5`;O0aPx80i(;aHTo;HM>Bv9z_sNMcYDzi)HlO7p=^N@wL>UhamPUNA zC}ygzn*cjY$nX{zWzqm&4u})pnYdx0U8^1RgNZWTUGZH@X3#<5M4?O$0Jj7lYpH~gW~+=M<7!^t?j_4YyY zVmKJv$vAE`lh2;s$acpnvm>8(Zlczr(3y{(PS=0k&Ji=FI4FVSp0kD$J|4)2OZ~Zje7lzj6y>}%1 z@af1$ikJ(j1UK4V{vvN+L9D85lPAAK8;(JslwFCTwhFkI%Pb{!<1wM;_~v{*et)F) zB}At@Gl=&D;wo-@{{@a`QF^&@M3OKwaquYk(x93g-emWvf4zMZ7}5nf;yb~u`$OUn z$&(}xza}xI|Lnkwj_Bb{23aG^4V{I&JN}35YnNL0H%WlvrxE_S6xNfwU1T)v+bN%y zt{vVOOHQ3wb?M7E;$7+pdMi#5nI@D|wsa(_-_z|yR+KlU4QfjA6XmiB$?zzvockMM zi4^BGS`H1G*GnlTo#AfpP9K!MO6aQ40hwxjDFDZeUIF_sS0SB4K|HjWSds+PbyL8( z2!yEUI7T{FCMMdnt9(Ewo#(l*>r=-Ym|N5`(lLv2rqWY?Ezj1fianKLlqn$7O{No% zh8=OgYL+qf^rDSq%0sMXw(N@{(AD|eKg+nY7QS>U$pm34!FSbSVJm`Y}Gn$R{!4ar= zd~uTmc0OdI<}ar~X#2nuiEeOrcU5L@H({~?f&>m7z_4GNUjc8E&J5LO_LWDy=a^Ug z8Ihpy>?2qC@Y?!28ca@b;Uy&Y7gLHNorNG%L+UveU}H#|2?KSJu*EnIV<|J9mg{di zRC4~%^@lZyE^U00-ZI|US3cl<=ou*agT|?6&?w1fn+MEg`lavIxlv!q+s9Xc6X0tj z!;62Oy}z5SzV+c9!+cAGfsc>1lfM*s*5Gq6`f)FmcY0O*b9K|~9qcvIxA%<-)u8E1M zx~yBU0*0oSo$L&y(WE>{**$e6^B5hyI%$_{n6jdS~|4Q(9+ghFesZ<1V<$D|&B@Ltj5Ib@+ zKr-R$Z@!iWX#GCXUQ%kQPLL#QN8UoS>pwd}3w;pj`NZ$9;#O^3VUU8zw)*LR-U=X9 zjaALV%?T{nDRB4yv2>CD{Qd0pD&89(48pv1tF?ViD#3S|*-hjHtiPkFWj9Vrx(k~9 zc{$QzWlx`(8GtyfH$ThCyTHMkNA5&>o*v7NMQ{#fu$48wVt(4#8<8UsWs7d~HIJ=N;qiG)q20eU*to+0eROz?*!M zD+|GTH%g$Zf3b%owcfr_^3S8c_!q-HJG>c`wL5ZwY#;tD+xS_otm(E%RQ`fBpY+uG zy`ZyToVjFF*|Jqzsi=MBHjA391s(ZK` zh#>V>DRX|T>aXIKk)bk{zr@Rk)Cvk)S2`rfr=^cvqR_*qokh_^|$9+3fn*WVb$o++&QWW|~%%t`X%=aKJyw!ZuS3Zc5LiV}kxWIbg zd-Cu9!RLCWT=fBW197lTtUi8eWT`S!N~YR4EW{E##QK3dt;t=G5k zmU$~Jc6j9}L+F3teds_zzWW}~tnU`IoqUkxY4!P|YzJzj?N_2ynFg|{6?it$q6wXs z!RaEiH(_etO&YR?W#a83P|S>iPc@QuryY!GAwU;|+g_F-z}6|l9{AAW5+ZtED(EjZ zX%wEhq!OYV7-81H8S5(bt^=r!|5f_ny%V{{^b9 zb>xo!qSu2vc6g@^s&JmY8QG~yaM?-wF${*zoCoW| z|9pn+Jd7vjPd4wzQ9x_DN};tH1n}Co{wC15?e7lzFN}w%q`sVgzZ0gcj=#n$OLxfl z;=l0MaV*MYm1pOquDvJ4oQ#LTD1Fb;V}KovQ#b-6 zH}FoRlQ{Mveh-5n=yz7DOsC~Gf<(e=;(WlQ=)cf+v($Cs)9_~bv*sFrhVP=un}q+I z-Fkm69zQ-Q!T4>>K3+cYz1hnio|Yc(B+>Q6G5pOTHl z3m9a!AH~r`SFzZk?|I3@P&F?{0tDm@y;-nq>lt>S3SK=KO|oat2p;x{|1zaU`MeC_ ziZ|9&IPaBVXP8y5n>Dt(kf*EzGk{(uq$R7c%k=~3Z@e_iF$84Ye^T1K4B7V5ML}j~ zKmL*}E6rs)0a&a8KK?xEgq#*_1xc>_&e#mLHR?f~VA06fihLM`6WXSRN)qPS(J@(K zuqR-t53@#hH9r5ccgX7mgPSl-zD(Lc znh&E?_jZ%xBmP-f1E72pl$mikamwG@SJCqNkvSv-EGvWC$eY%WdXb`^ESk>tyU$#n z8Dg(q+{R52*Bt$tz++TEo^`ixMh&Ri#R;gk5^z!#0O*uIfc^(W>9@#V{_I~GV*2JCc@d9@?2^{|Ut2HrZ2T);4F9(A>Zk1OvX4Ce zR>Km@T@B53+6Odhz7Fg;0Bv@B+`ia+^va7vS(XS>1x4%DYT)oc~BqITtoW-Wceb2{#`mb(ex4?(ncN$MFaR+kC$Y+@K zXBo0M^vFZgdaZV`J+jwZ*H1SKQ0G1~?A5$YR5?@8KQ2Ml967rx|C{IL--fKZP@BX55?S+vLG<@Um}<4bIQ z-wS|I0img~V_a}{PN`#MWU#vf!n#sxT0l3pi`kHf7!iy$tGe=Fsp4p#ZjtX}v?ROmHN%EmYF3AzLW&8@qo>JBvul z(H7mmcO2NejQE^MAw}x$%2Wv0II~(0zx7THjtu(18G~#rz2?KS{O7G^)V>(#DRl+3 zcz$6K*97-okO||cP#^nP9pj<_>b;*7!S#l$iYDa3)-;ax!t%%ETA2?-CQvGcpc6A} zhHozrW*3XDERy=3yRf~iW=In3F!)jjAT*2X4N=v+d#^+6Mj!u|$WKg*aSNeH3bLm4 z>J81>LPUwgaZ+&LJ`ldk)wLNhDoQ3+ePdU&OE)kYBs(-uxzvey@e6L}fG>0&^&HZJ z1i~_wh?oT%Kioyt^0kedSUw#tv;Z)wx0qrVYU3f^_RQ$M11+1dm{=t6Oh@ley|5GX z{D~sVBM2K52d@~PTz3Dsbe6OO1@8eX+5!s{!GxDI_F?w+6*ehf1T>Yy1?kk=UYml2 zVn&sDj#~?5@i8EP!ka?WJ4{eM;S}{>KJ&Lol;|b$_wL$WRZ|9P5`X>rKCTk!4mRFu z!0=0kKu8)ze|cXmo`D54#b0}3Ui;XH)C<|Iw-rZ~NEAHlt7xPEt31zcBBdeBpd$F( zAk>)>dgVSaHaQvlfM*E9j7dn(j(D9otDC1nR7AMubAQI0&1)OdZ$Q=aO4i@WF%h)` z959#hnuO<|X56;64f~ z{?_~JsJdPcu-ewO3xv)4+cwU%TA<=ccA2fkRf5G%6dkJTRYGkb zMu;WLXMI)0eU8<=HQA{jOsZU2_El&cM=@-z?`}ff1!q5y4C(vMgf6$tuA4{wSz~Fv z_2XeLo1+m8y2yCc!||+0)^3zO|C*3J!Q1&5pnO{N*p|85`FK5{Q0xL^Oo~qKDYCGV~li?d5|HlnX^5Cr7?Vp$B;yT z!1%naL=;L+A$UWk{k-o1Unxx;qKa@Abqsgy^7lZshF2Hl@^Ak>lOM zDGYk#FVLGW3S83ogxRcWDB(sRUOw&pQOhoHK$~WQf;VSxr54UEvTORiFghXQ1Gs`) zU1P_L$_Z&(3dM<-_Eh8KD|A1v!aV9S8(5OzTlR`RFZsVd`+wx6A&Q&USl3y*z#OB*S}uUV7SU~XK+pvpr2WawtAXy9Jof<_0{>e828EabSZ&11ZJ#TVAdoCeJ8 zG24W%E6U%8P$1>7?h5*J2O^5j6j%&$dZuK0*c;jrPC|#)ym*6Dwm$s?N29Yy6=pq( za1?Goa|!s%it5G5Nioyy}aM;%O zhM2hh^EcU%=iX@!vBs%OWJ&T#GV46e_gXB9V>GpA=@R#^uaB;{Fo<+*9lq#G;PYAt z2#gv8`Fe8^|5k{p9YaS&TkxBc5Ax^$8?b2O)hA7YR1$gtX)eTy?8dFuvoO(fri89T zQC!sQOYaM*xFHPiQh5x-Te_O9os!`0Hm9_NE5qNO-4A;dK$+IVAEhiKOHNJRT6>O= zDYJ#&HG6i#2f5pFvrNTC`(md*pivlVW2qo}aRU0JVzgIreg{M~lQM2N$2sSa`F$Rs z&y!UM7@af=8r+WL#z`N;+`PQe4mR`g%FZ2WE-N9QWocr4>ls3n6M3VOE)88;<8FXi z@-bFqn<)Ww(03R%-$KH{$;wn*7Z4qX(7(4FlUQSUq%)2R=1HNtKfL~*mk$K*EV8Pq zCxBS&#Pbo`TK}?s!r{XZ z%i#bE1aVPgYW12Ym|rZx$Pn0cdlT0}(W)?g{g0_UZq$s7|L z&1k&80p+OosQKvS*88*miy%p0YVvJALjm|$rqz-+aPp`gWbA##kVe}jg(Lvk&Eu^* z;m{;~G}n<2V;eYx+#_88y%{15ZkDA1(RrOX?ZLuDezIZM5{Wc9AH2&6s#`fvV>_vI4P-s=iIb7#bksnJ`!bHrKEC>aPn?-%qgKB%22^5W=gvDsIOM%4WHqycc&UR zu1IZW0l{|oImEZ|2*%xB2GRP~%cPfs<6)B~ zMx+q831v2@%V=`r@q@;*lW27QwYm7uX~JKQeK0r@jFz2eTP~%;j$Co4$#C9$sH)fm zCRr~Mn0t{Lc&`zmMGI-~K;7rFW-Hl$nB<#w()q_Ehmzfb>IilxOt)BTTB~n-D0_-b zDYzL_lt)Esy^H~CTzmou!z(r9u^tV*?ce~Zm_2Huy9+A=!`{)>>Ly&k{tF;Crhjjc zr0Ar>_yJLiaiaLz=e*2L-Qq-uaa}uRmh9Jd)fd)x(a3`D)vg{d2Dk!#2R`wW+3+Zr*kJ(6A)B=}U$@TuB2SGGD`tKB`whqed{ilT zHY6*cy4*%mOJ6xr%VhUnHm_gw2IZo27(StS!52mM{4weQ4*3}4aTQAt@zs&n{N)F- zjdB?KSOJA-9e*i>nbla%&U%;DsBmc;ia*rxpJLwH>HUA!uT%ZPtI&$UV%6 zMpKZ+DP2aWeeLZg4f~2j-m-Tvu$!~bU#L$ZuckgiMq8v8gP{SRr8l?j2f>=*f5>tp zE0o&aS*SbJYCc*;=8nS;k+N|;9A;*;+qmcF&vbZ{h@0D53!fcAId_c@4|L9aCf#YH z?PHC0%xUd}3rQ?x>$fx6SruiCd6N{sUN)PVgU5emO9ab2l7S~nPeCy=th^*%-&0n- z9$xB#&AdKi)8?bbG(y5nzDilHy+(0{4N3aeWzdGIhMeHy1;fu<^5Gbif(E3KKNF|C;Or?RkB08W1D8$+cCMZvJ*0ZcB|9(9e1`?g7yw>8 ziQ2evOs(`)HCh9`7&Bp5T88*U>BMOV>(ndZRNxel{u)HofH?hP zjy!YNQH-VfFR787~f z(l&`;O1ruN#{291EG2-m-9_Gzjss}us+^cbK5>Dgw^FLV3{nx82B(u$JOanCaKdlB zyMmoZhNnD60w#0!%82%}NBk#kF|?Go)Wo-QkNx}X%sSpj+zc>F`Umu-Um}%qHltWF zjT=8Bws?Da6=NWEd+B!WtX-7T01>zVgGPE$`V{slTx>p_X6Dvg>;QqON~>8OGphY) ziw$~5Ah&Rg70^sOFtCMaZkZ9+*SwWz-@y0$S39ah+Pq*}-Ot;OfAY?5tuJl8J;B0| zf!Ym&m_LV!1uFp0{SUOS(+m^g9N`Z`Jy9)kVI^)b>x-qT_WJ=#Z~*x18BiH%A$=W> z^O(o`3Mm+NOjQoKFhXRnH;0xL(U}GiR<{D$k|wEuJzH^u(8mJZ@Tr1jG}+2cM=b~H zjrQ~`Ce^&1?a=!L)I63VM+0f=rCa!m=2iJMIPZL6zK1IZW%MnOP>#3JT5ed?sLM|# z`k7EN>ZBJBcdChi=g)YTFd8K)-5ye;h4jJ5k(3v$#^(-W8k)7V1g%vY4$mw`9gTf8 z_Qv^+Sq-!Mgr*D4s4|1o+sov#0@bE)Z8T8}`jg?BCtNG80!H0J76PZm?~cEE*Nl=v zAq=~Syn|RHel=V zgXTRY`@zu4-RElq+iBX#l-XCE^}_>)mpAio{^WgCd1LD>q>W*vZEd`5zSC>ui>|&7 zxyd@nfizmK%8>HBRPu({B*b7euG}UJMfwhGX08^e)eZ|ePS-CHB|_nA=#~_`f?Gn6 z*UgpFNa50`37|FP|MNl3mazrC-M&ktd|=dQSPza6Us!AilcIdu%!Km(J85aBg=49G zoD5p#kMe%E@ihg)5ui@+t$K+z5Cw!b41$#qD0qrdP)z0g;eY@0-$<9B)goPM+lb6W209a9Fu~ zK(LC#WSxDdAS`e=h~3hnU3N=Kl|u+yNS(gqc5oqk^GN5_y8GByD5-Y*0UhZb==N&) z1yT|`f&8HZ8E!I$B|CeY$ScQhX6H^}Jv5jsoWUz`(6`>4txG`bF;O1y|om1IBNhyWXwt$ zkTwkG-c%tPrZ;CGh#?%oTjK3zV~OI3H*ot(SZ-`r>5yV&_s_D8!$5CkEDR4N>i`TB z!9OXuar?-6_H0>OT)|A}!^K!G8hu|{HaG8QmjdV8xP&mJYUQkjetJ_{8_S$(a&Uav zFYO}AQ5REA_gjd3Lhi?19by?#hX>6uEXiyGizHYD>AA@iD&oTp zFF_|@cL1({$I?zONgH43C6$-gWldX^S zX4SlX>3e7hQObvpPW`o#B8EZ*LSw@8lOz=oY z5~n>)_h-;WW1{dXrV5mk+#=DX&dMT`XwvK(g3RypVmu7cNhEca zs8B=>TO0mL>(z_wp8A-}TzbBk3^arya$^mvTD11!2CGBPzILwZBuTBDZoYdYt?WQy zG7^3-z!q#ZrVycom@RoWR*gHnbI6aT;n?DMhVjk%lv7q+-F4)1tOY`thyVJo-cNp5 zm`3uTHdasL_+{C99lLsEOAg^=@#Yg8ea;WGs%d@<(gaqiPt*`E)5y zUg^WmG5^2*PwoGinHwnb#&2ZqX+3!jprHAJ8}K%6G5KVmwm(rF6QNQ(pt~@>B!K&~ zBbc~+BcUZt`M=CG8>3A~{L&201fSL9BP2r%M+eCdyKBPJb1W{kRtSwsAC$xI9KXjyU*O2;!A( zGQ7h($!EjKZ_Y1WId2{{UYwK~ITN8%CVa`jJcyA(tMU`z@BW+jp|NseYwbk*di4q~7*g15Mx zX%0-5vyN~9)0L#wbp(ChRdKyi!6C23r3nizSZ*n3DGiI z8|Sm9;R#r@yh~m_G`$oj-5IT>KtF8XTkj4_UG1p*h=C?UM_d>%HpIUzkVxAD^kwm2 zt)|u;;Cuz?xx1hQ68({F#F^2&hCp<|$AB>&HU_;o>B!K6tmWJEzkcRj%g+7+FO_x; zqm@70dVdMah3wh^!@fX_6)zZAJ-~ba{_p?p4!E$|~_jHtY^$zSD865B}O!{g$HYaPNqY=*n98~wmV13~a z1Z2q0)w#HRfiz1;`|HM-SistLd|gBleF|UDqk$bl^j6^-#fVYo=1CSnumgI;K~BuJ z^$s#-;PVWRMXm+NpuI-dx6%>LUOn5J9&x0+>9F(9OTWCcE|S+Sa?5gb2vt z2jh7Va4~I=F=eZFAhVGL53^K(n&AjmNSU%hi=Fmb=oEGbLop>4l)v?VIphVl6Q0SL zgDZ{rP-*xum^q*(GK>ZXnk*;u6(w@I$|r1nNM7Q01h~L(_@Lu43G)u&5NHv;GN%h7 z?dBo;|F|D1^+&YdJO~MMRQg0vi~Z91WjfK8Ua8Go^bC!f2b4K zI2BUFoJ_dIhh0=Q_#L@bO;$j4#-!XhSS;t}vg#x|(RI_3+*&jGEN>aEAcAteArR|= z_l5RH)_6b8dcA7k6~rT~S{5i82oL0wjs+5a$!}zc#^cs) z+cf2^&h!X~Q(KvA)gRbmOES&%( zy@=+y+o1NPmn4-az(-5vsRL%J_5&NZVRq~V7}o8(SEYN7_XBwk3&wL)XYg9-blbkVj>U((1^#*Chc^ugo;v#rcJJ*gFMaljt-$&V z-IZ5R-+1(Zda-H))dJ++8HwakV$ zZh5iB6!q*<5t>7cv{BT>D=QH7keN^uJHq`TvKXv?*~&@piJ={06wbXAxo~H4C*zzb ztEM!$h}ww7!#~`5d$e`@h5zEbDLUskP0UsvubVyn%5j-nDBFp~nF^C;dAx8o;{-@V z2VCHKu=mbPFsi=VxdsxJ+hBWo$-*2Ty6zvY?%DZ()tO<6RUhUEoL_>n2urq$3x2aO2`DFM0%&y0?dtjAveo!i>=la`AP;E8Qip8N)pSWc-2|>PBQ5 zqVQ;Eru^RI#T<=Az349X>e^Mmr z1oi6bF>WN;zZiuIqRlK`kx!xk1@7$jp=U0}Bg*YybwXV#rlv%|J72*x z$Jlva_`{%Se+MP}1rJ^Gu;L8xrzQRyz8LQ8^+vI+*%AH=i>|O_3q5gd<8?zJppBN1aV!1uGlETmfCC-KqCu3Ov_b3YVw+?%sgQEGWrY z4-xwC{qUmPgo?e$h4n7e4gKQ&?V<)=lx1pPUKLXB>_TdH0L<#LhG<@qsYW) zoys^zd%8raVpqfQbUgieFwEm;KsUTKcF?T6P}hL^vXAwY-@cdMIUL*8SSUE{T(IR- z3h}h{*`^y1e|c2Of5cUj9G}+b)wn^(LNVt%zp#fS%puTeqwL;s%`}8g(d38e!C*RN1U#{J5Yuezq2fBQ5)eKORBuxS=Sx5Y4N6&T7P@EG?EPY@;J5H}GHP1Uv? zyK7h@mQWR!p%%E&5ymkv!xsl=vaW);R7rg#tIWPS_fJ1DbG>d8&YWUL&W}xW*#+_^Wyf0 zM<)IPvQI|Gjdxc%CnMz?>0Vea25pwSN- zhHbKC{zd(qpIgdLJHnQqNBcE3{j*=D5$2cbzIgpq0kbAh!?~lK)Auz`#6`L+S8#W! zGQ#SEUSU$rH?i6HQ(Vfe1{_HC_Ia5EzmI4ll}W=uVXDm_E9<`1NdxBj)U4z6`b3nD zNm9u&Ryq0AYmhXDK|~Mp{lnM2O*eiG7pb1nO9~>vN97_1;{o~dTCX3 zzPgq_@kNKx1id6wzXkexwGA*5QJ$eflw*qooRL8KFTek{dUx~wx7nK~vZZfx>7g-E z%Bse$13VsdqxQ;e5*o3%HI$?9>c@4{4HZ!}`RgnB&(UOSajjv6n)4+AG#I}vjs2km z_$n~XB~%VCEVK|={jh~W4EU5YfbsiC^Aq)jn$W)W6KbuV@U>#Z(h@|1_36Uv%9W|1 z`r{w}7}`ml6rBvQr#78Vl#QBT-L zVes@u_^S~+aR;a$h(tW;YisXZ_!k}4FN_FL4X8e9nf!BCb59%iReyvQT1f7 z(?mE1$OShD$yjum(#pX)>87G7ooGzUw(Ap{Jvqmo(Uco6hZhcpZ}o)cjj}3@c|x_l zi{OEQVCZcEC-QY?e&lW71dRnt^`WTf>HyS?Y{$vuul=8;bk^azv{s}AxfrS;)r7{;k_Y#GaP9OuLhcfe@ zZzD@t)YTeJhIyCsI?Fe*Gndp328oMTFY@%e--T-W*->TY zZmg!36hfbayTTC@I@+~U)JxHl+w1#&BVJU-2+2o7)WA)9%C6QPcY9gW@beg&XhD!> z{st5dw>{uY5|D&G;}0mw&BSo#kJq&&`EGE>&VDs}^ANcn{T?OVu^DMXFN&+E1c?8z zf{RH9Vs&&R6buic%Ldf5wS+^@zkdrNJG65Lrvo>xOJ$vBr?V3mG!P3hdwZn_NDAL> z!zXL=s`W>0SOXF~29S5Q{G1K0;bPI4FXe|r+0$E_SMO-Kfd`HD7yK{_b1~=$<|_km z`Kf`BkKC^HNUh_BPI1=_U~k7QLA%KLrS)XY7jZicq8@Uo$3K0dgz#NXv{PZNw0$hD>9mV{Zn?qa~hIjivLs_EYo;N z!VTX$4U9pn$Z`)RW|V;f2jIV*y1BXPh+VQ0RDAJPn-+vDK(w39p0*+?1Vgb zOl`5Buze?xU=4yvSN4Ie>P2E1X%>wR%LE$01!qBELmA~^^ju@%w)|_puc-{81{53PT~PrdOpx3?C-g5cnyjR?Frb%o9$ z7$CIiDA?DCdgdseF5s8@RM9@1U3zn{8N>Vrn+Ygq$lmwveIqABZ8{Rl6?q-ifcz!F zt;E4UQP04vL$|A+=?zKC>j!-zVT#W#xnapJf@rzP()Gyk)O`e@X4LNP)E_(B>+L1`myI4-;81B&$uhIJgac)mbXh zbWl8A4;<({U|Z_WsZ+#|h1H7<3WH&^yYyK2;EWrU7wZXnoS%M|pIZ*cuK9Xnm|_t^ z3f5Vzqz}Vj?ZOuXwP8<4s+kcvsK@yH2}jZjF>>F~`_E@+NO%TLurF#9TL^4Rm^aR4 zbQcxOj)$sVTL_7xq{B95$>|);25`9V8>ciMP{Tpbdu2AKmrmd;Qt_KVUPgM zfIK}BfSs?MSJmd-i93d?es;k_%^0nZH&bRy9``h&ZDki$n5Z>xG5nR*4{vk011SoB z=)-o9NOsJpwe&&_%g;XK-d4l-`XB$Op9q+2uel7sPc`!5gkqu3g=$8B=tXkGV4KE% z^T-dJVfxvuDMk8l9dTX5I35JU3&+>02yYCW+G-z1Aju;?oNn4zfrjfr;Msp%?3muFNR_ zp7D8`S;o~IOwuaG7U2iqNBCF3oa#5K*qdC8JT)CJ$^%9ll%6mgs3)+uCsAhO?Ghse#AeIZ0IElDF?)rdXB;uJdQ6EQ;3iX3%!af^T z-biaKf4pn8Mw07hCoyai?#h48_c~;ou^b|o8iF0u=wzlgcu3l z04ky}!BzqNyGjCl1|(4A&FPsAaGPfiPv>Vir2oNm?_oYL+m0b3|a2 z-641m2woU>@{i_URTu3fNK8}ResE>u?R~%&Dw=9*?)2`5#SoXFro)2CpT5J&%reyb z3+5-*9;NK!mMPgQ3CjsS1_UF=iC64$`D;wjUA%LDOAwVZ; zb2@B#cWUI3+R%P#3)Fbi^7_0KJ3mKaQ?+ZIKQM(1;<}C5fZjOkA;0&!FP?{LEhrP15gpK4*t~Fy95du_P(21;{3gamanjBo zn2Tt^{{5W5G4}=vOi9G;X-!NRyG7{OuZMpJb)M=9qef5)icpeUR){Bz)mz!@2$2dflW^g^SF7$y`!JGip_ zbA6T{`HKJa)INJWTk^Z$UB+}*n^Ni7xi{^ju7+S- z%ud|ZZ$>?P{6(1e{(`8lL}aeGwAZeT|ajI7T=ArMoa{|1ER`iy5>VSdxFm!V_hOp*3^lH1KNY$l{_vHrIvAyy{ zgJuiVe}Y>G=X6+3{a9Q>iUgWPhG3Wy!UXor?2R!@D1LHKG38sOc91n4 z4;OR8CQy3!g2)bE3)9a%o~}ttQ1Lcu#QZwrJ|q|sI`Dt}Ur9T0^DmgMK1U9PB(>Z)- zE?{)YCbH}6?UON;nQ2;nHyH`k`iCHdVU9(>4JY0&Kl{t=>(9yS9OiJSKI@Y4=DiiN z&uPgU3wsKJ=%$E4fY8r0yi)Qu41qBZt>`B+0A9vqM~CVUD~FF|{@vg5?_BiiG9(09 zjN-SLpC}%OJ2`F)2^S`gT^&$kP)MM(hh5$%z}F?}T`pZjNmt+7YwtQIqwgL5euSu4 zhuKU@RD0bSgpE*Zn;b}CNG&8vqm-Td3e>QEEMVk`p_x8{If|Zz0+q)p8^&?6wzo^w zq`&2-V6|N9QVfNzn%&qkLTPtX_&{_94THM&R-GZ@@opH#%Gemkb6ADLje`fBaD}28 zvlhk~OsR#P)9uQz3XzO6oF01`2%aUP?H#={d+3KQ4sLx{GsZ+_FYpgd6`0sNNH9;A=ODf*xGXQ!{%{IArb<%K4CxiZj9WlNZH_A`4hP0LXE)83 zf8;X7s;N<*sAJYEERL!j4gl7Jo@iuGD+*d;NaOh4e3E(L^HP{Rj#P^?PNeE6N!@!- z4RuyCm~b>0@ZBUw24#-y)7v~000QoGmpB+^a3xwuqjpc1BC1VG)`I-|zyEtxQozR5 zGwh}4reUBU(BJJs1OPg;EHk5fA;gKny} z7`a<#H3f9A3RH-#uBL6gT?#Ve=AExNc2vxPaYtYE_xG!3eW*zuVorg??2UVrE>KvM zxCF-L_uhKc@p|wiH)2<6g|Jw*RYRHq-C7)g5iBz<;RBP*o;qtKwk|Xe4Hh|IW{K@_ zoI4J#SX+TyXySZSAA4)j8#GTW!6M^8~#$u6PufAGsmK*=l3R96(Ie5G;g0msa2+<~-ZyLVr_IY$HJrQ%3Kk?vJKut*$ID8EXC(RN$ zGQ+ic<}&L+Bf{v-!2_WQ0m1wKl@*6O)de%;v;;ipvIb$-*BI=WBqcGOQ4cV)F92Xn z+!mY=?UN_e1KteTNjKK6gtS2QH{; zDPOBuw(Jk=nVVL`d*dvzpea}+E32-?9GWXP00|hgD`&JwcI$gnOZkiA8Q6_|Ns0S_3#BKSHEM?YB?=KPpn+m zQnDXkATH|HVasyIDd(MAdDJzP>km`|5eA+vH7i@b8(i0$cTVJY&z4?>2FhP%n-bWd zrBryfZ)NKjvXZp&1<}!dh~}>`p+<@)Sohs}`dMp2yX9a1F;q&J8}g0j3LRaig5fMx zJ5mz9C#=PUWQ0*4hfj$Nt$K_8no+0V22l;!>*tuwa7t^qdFjv5`qcA+xc%(}i4cpRyd2|UR|OAlehNK zkBy7_sXIW;$^GuF8pV<~UWN}x^#{JM%mb=)Ig3Axri-$h+ao$|t0i#wTw{N|#HR?7 z*(K8K*o{Ei-9G&oqJS{A0}CnE0+(4BDgL>6{0u@l39hv1mPT*}9^}{t5V83`5|+%`dL0@X9#aut$RfWb?V8tj|js(_wFL>bLCPTW-HF zfbge4>Xm5As`Z&kwSbavtL};RCK8Vp&=ALMm?c|n;{B2S-te_CuehwjuPj2mKYE>0 z4!dz!RHT@-%w1t+j8;QaVaMCYq3PdqCNo5ulHO*L^yxH z+Rz7oVo(4u2Tr8DX0Gv)rZtF7g118>EwcYNVH&2dM0`-ar)0B-8Ox+cyqW1sp>9m14F!*NK&hjODY_e)Iv|Nb8+nt7r$Rv-9*4Cyt&rZ(ay)^>VAp zwiC*jZ|-hs!m(z$p~PBZ7DcZeMF{!^T~mMf2d@e;?xizfzPh0T>H4Rnu>*4`Uiu=x zb{^E8+EBRHn{iDO;8eDNA(ImT=pJZxOd_ZSHN)EEI0>6%8@vZ@BLu!x3O9a!a&zs9 zCY`n+;fw)6PcmL-=`ApmNb#8ByLm#222^Gx$18F{u)Sc9l5l$ z^YVv{wKcs?6Gn}&Bs2;2+JLGlE-X9p5DT70Y&+~e)UK)M^|PJ@z4^%Dr>1}u##Bu& zUK#4nf}8dlxh;Qw+$< zJCWA8cgOb8Fa^K7LlW}xKitf(eH)haY`q22t?6158HY7c^o#=#>Qw!~MBO5$E|iW( ze8xSopYF#{d@r;DSkK&Ef+yGS9%Cl`_MOg$BAc)ZE;xj+_NnHs;erLGey|=ohriZS z`}SiPvGplf#0)?IHj-rI*bP-R`VJUz%Zx!wUPnejoyfiH`Hr%;=$u z3@aK)_Sd|N4p#fVT1>n}nz4B^0!Rj#uDkq&9GxvcBmYL{p?eLidjLh9jQ~IewieaZ zXa>ERonPvDXZW4EiZ#f1(gV!D1X*45aH3hVu$VI_ulWRPeM+)|JYsVTfXSew)l zgc;PKRQ%HC-YG;V6ETwOj$emBmaB6`U0fo}-yLVr;%{DEB1DCtE-k%J zkihYHz~XX5>cMHERzg_G%GM{;-A^n+$z?v;*ge{GeTp}LI{>m#$Bb!Mh1p@cayM)Y z17fDzkL>HmF8I4gPh>y*q>8aEAS{r^*c7QX8lUF;y z*)AQBZ&GS6PL9xo;Ra~~LnlXxHX__@LAT_N zcw%FJ4RO8{hF=C_WcYlcg^d$asJa0$046v+p*~`D)U0)mJjIN}xvxK1fdo+{1Go_t z>)Xzmr`hV|_^KtCb)mb4<5z8-p3_+XlX58^>SwJzbFql#Jj$dt$G)21H6ZmZdN6bh zFM&m~M7$R1N+!rg8}3sxf^s*cC6XUK)j9b>bH>FUN3*|SZmm#9Z33WDWE8yhq;_-}R?w7xQP%a%WRO zT2i$*CNkuwf&Q?Nc7d~3A)qdZCecX1!bY47Hg@up>kYoHl=SH#_M)Z&DP!~UTKoNZ z)yv36`^hwJB$(Obr8z#$RTB!Y4pqadMj=aw6je_)-SM~a_8Dh|z670C&fvF|Bri#u z1BRoFRquk-cM?E1?vzqkPr419h!ue{v6^1_???8D~L z8$DsF97`}tQbxN4ee=T4Y;O$?b3!6+#G^30!*#-RL$zoLn84lM0y$Ra!G-XC1%J0U z3PAP78y^$Pb{g-(ZdSzf@(FDH5qgav4_e(v6mW)Ll6_5>Uwq?Me`9I>yWjtdz9J|l zZLbZ8E>`Y@-g1!xCJSd}-TY6r@pzj~VpoX3zH!h{OqIr#;lqEW>HuWSL^I+X$}#>< zEw5oLRT!K?g+LaIoG&U4ToBn8)x_=rOwL{Xf#uuH{1j#cvyp|wX<=1*d&7@=ii}A< z#>{X}%iE4(4NY2TO|^vo^bRE~Mb$Bwv@ro% z+CSYxY|&iedLZoq7ALigU|px@s?E02JqF7OXW!BrEu)Gt@h5l9=AE0^oAo2+%V+>( zWXi}^Zb$+KNifZGH zg~gTw;dsOktipjzBX5g`3i>2J`u>8Ff*ZTuR7qp2zE^H@fKOU6f_JkH*bFk%NLH%c;43cBfZ5_#rWBdz7Gq7&4zI2_p^~TrBtR^Z~V^5 zzdD(H_(_d1FyxJB-Gys7#u668bt@p#A7?MAo1AES!Jz?2B}9g*f?7-!95tM5P){Hsq;uTMg_4Pi5CTzl@D3TQxo#M`r0kw25Fw53oPHD=AdCUu2=&eVuzOm`}!;dgN>D~~-X zss!71cK;ZFH9xN4b05M^BDu^`X4m$Ve9YoJIL84M(_->_c7#E=Db-Y-sW4MQK#_^QnbDp_^3 zO6T>h&1=h{bCZTOEy+8i$+8<68uJ-4HGA;g%ydmIp7s7%SJ*<`9#R@3{In{tIpQ< zVNp}NNdL(Fu4Bd|i?m}ezudfbQ7>X$-dJ5x8==AmSB+4n<+U%EIqS379er=j*V#h^weNkWAsJhA?^7W{9Vhu|IKArynP^g7@cT`UcE6YydGNJTATQ{-!IjOzxo={MfomU#YvR&8F7MvY#h*wENT9%6vdA=i7Eu)%zhfWjl$IQ&wPp#(M;3RxRr~to_K!z3ZKNtHIf8n33=OL) zzUGw2%&*gpKh*Y@T(zJ%*lUQ;Dvwuw_lHn@moOG{d3a%~1M&(u>Tl9z&9_h!G;}2P z#&jC$Hv;D{w;~AD`{+2?*CnIe4Dn2@Xq`1TF7xPBKoOb0jo!|Yh7sGP7!HGM7c@!g zLIJ!{jdL@t;QeGjS^lqN3;&cnI%NnW1RY_T<)Tj2CJ2R22?rJ+PmEA0a5%>Q8>Qf0 zl&1s=WyJgHC`>MVO+ifcRWr@nm3dpAz_|!jx>Y>(_;KgQBgJA(AcOS+zM1hFpdOcPktGwpa;v&9tZ*sXC2_`u&c}r3f^wf z-8&UOqEG^zkDxgf?P%L@!M2MZU?f&Z;E3h+#j4ef)J;Sz8L0 z6+cD+QQ0O>PFrkTbMDUG)<5FyK6LY+{_jIKLP=)c4vu^}HqhIvPP6dPP<8Y|4C(G% z;`+y;AHGPeRGVHhq%PX{J zYO3+jFQ3;PS?Tku$|Uh;?(Ai!Sx$u`Z}lA=-dP#d6BYw-0HL9^xj9l#l-vq4V?OSb zMm{?rU#jAtQ!v0=D80Kt@$6?q}``J#|h#BawriXhpTzQjJ@>G*RJ4@x~` zHAaBuxD~^X7GbUNZ7sTmokIi3UM<4)Yno-^?ASbXn0#L<)Tk6cI8`f-oas_unnp$y zmIZ#XXr?#==|UK*F+0F!Tna2@r4L761A z!&F?3oWGNesem2hCGI-1wuJGqFgF#_M9pG7BxkfC_m>`rw;@W7s}~lwS`@7*5Oj>2SMxL}P25K#n_(8ONMls?>;yEZSZ$jVEC z2srUQE;@c>mvk715zsY>-?Q`br|kMOK;^0wG84KT)(w_gi%`wEaB->h{QKL)Kl;-eDYhJCFlO>rG?`Wj4vt3=vpj9qiCflo0E&Tb->7%p9OU9z(F^_Sdu z>Ld-Q?RLu;Pj+3@9I?S4NP0v-X9(alkzn<;mL6}6^r*CK8JMu+iO9zVfKY27f`14| z3C*-bCp1$%;noDVh;b{-B1CA>M2}L$#SqzA%tTlxY|amOjwm!bb=^8f@EumbWQZZb zT*HN1qs_YSrcQ1_lH3Nlt8j%8AQb+DWfe0>I#7`(Asp055Kf$b??%KL`6W#?Dm7W; zI&lwGQ5IY>ulMPAe;;x z6bas_N9*XcdaaeX9Jn*_@PZ>%qv-r+&4phc=}Vq_8__f~3KqidB=r!3EVpDtZ=m#G zZAw>NZYfFJ!B2tnb*ombnOk)KKHg{7#mFk$aBLwTZ6DPK2TSJlEd$#%311{C<-0!; zG!|ACN*B(*BpPrgwQn9$iM22NyyP^7?`88&OeEIBsT8eqowFRhC&TLC6f=ON#ta}d zgj0GlAVMh-DA$l_^g5m_#YZR$`z@K|)9BQr`aX?#qCL%+utUofV~AWP+s0jtaJx8d zI;p-h0-LXvN#CJ0MnMPNHOJm?bs09A5+MZSz_kd{rDQWGyMbfmFQFgfwX$z?^)9eT zwE$Lww!5drmJBke||S(ely2^2MVCj3!E97(9>(!_v0A zF)mt>7mX+gRYPNfNf56&?~l~%3DPr{S!Vj=QW`}EJD(m@z7F!iPPEj;k%y-*%Kggzu%ar&Ck zi*x8?e%gnYV5#!XG~GU(=A3v*zG84k&{MT)+2VfCV=c#NqPgWo^x=p#TsuYHK=0w{ z>zzYKweuQd4ve=75Z=MOmr}fKh0aK44X;esN+c~Kh!COT2}ZLl1EcJfsd|QapuD8+dw3V8qu5dvVYbGx-pf~RC0%g94tn=x)PJm_F^6(W;flRlr+4x64pQFr)K1i z#UyQ(_7teK;J)_4d+V4mVsyYP5_|{+gxZN+G{5wN76e(l_J^8Shx=kp*g|m{pj(C2 zT({hrrsAta2#ksU3U3tJ4^Dlz3ht;@HG~HnzH5M9!Uz&-pG5HYi9tufTCI6wOomV^ z>+v81+b|I`*`Z$NSI%ns`%>`Q#h>#hU#qob96*@W+B802q+qoU25}u(_JbP>vJ=;U z+J-J-QY-ji{8%!fXZ?f9fuC085ciaX1-j9tH4T%u<^dB%)xJc&@AJs1`|zf8b=UR`sYx|d^ubl)od{B zY+V-Gp;6TgM-fCJ|eC2BS-7K16J zC3^Bq{``ZAFq`hPWdLuQ8_c%Zhga&oacApQ#tNMrl`Xxx1~Y}$?;LWb_rXMv>+AZG z7E%L{ozSMC8nTE=LeC0HrLi=h_2^vqN6ZrAuf78n8m~m;e>3aocO$oW`ZiRAcWu#g zCH%whv`nUJF(&2Xi;wFcU;gUAz|PUzopU#{GfsQm4JT2D7z#@bQ*s|5DbF-OPN^a0 zOj_~i2w*ng{PaDX{kZs%KSDdJrh({Vu9MAsPxNHbqbJ?n1(Ym5mh;vEn&OVr_m++L@Np&lOCug`>?fyk^*>xnT65KY~IQ5Xao7q&$=$vAJlcBp{Bif zLpKi{BQ!6J&wPD~13H}R3sI3T5?;hagti4Dva#YLLh*p1nH~8j7s4;3(t1 zgqttKrD^1P^`C-gY6o5P7%5guDU8TWdL^&uITZ6Vs+sTmlORn%GW28Wk|o`7{5utIZ%{HT^1j?(#R{g~S0@q9=wi=klXJA$v zwFTT98kE{%L<27N8R{llO?M3p4r*lR0`Wp6?;Cmvj!3LdnjPMOE_*(OUfUmr`a=!0Dm)wmDV!NY_t z`6rN*8b%Xd9-QMp{efYG8w2z}Y*-6k%DLEC$S-H3J~L$I(eMSSjAojpe594VSc*dn z8LkA6#WlpOqpl1O`ZQYjpG80UG(op=DdIN>Qq^yxwF3lVhDJd3I}2;6dRD~FoWY6_ zS}}HjuoevslG1PZQ!p681E2~vzdIdhK=3_wsqNJ4YBE{@338Je1IjhW*&V8_&$s5h zUA$0XA(t3s7j~CDti2GqsTt-PPK~nAkT*_UN(i-tlU)uq%uih*9ITl5;=4L0|NF0dh45BeE&V16?y zYg@>7frb-4Am>>boq{M(OEOJW29dnHU6r$;}q>2b&u7?d&B>$7I>MpU>E_*sliN|D6*sfe=7TLiT(FW5@}?$b*RKaI0kdy6oWF231-=}CBD=J4#%2%; zlg;TmTs}!8c#5)#O}(D^(rh<(`4i1?>9?bx#yn z0Dk0WFCd(#3!U4T!Cq?Z?yU6chwS(=>{!|*lwX?}BmG&JO*SiV+mv5v&;XQbP<632 z;p+3h{=G^CavZ{4Heujej*Jrf5bjf4$56vrN(Q!Xt!mpt;0m#FnxLf}$WD%-C@vhM;!=|JVz{al`1& z2vO*vwX@A_vJT%jz8`t=>ZaiD>%R>kerAM+m$ z;g`ss-qK6`GmV1;!K9?!ux6F+C*)Lut4hKm*1DJ!h0SAFN*7%_uOEi(Ln~%$&Lxt= zyojOh?6V*UAzUCx`h|i`fE~k_YmFk;ruc1&BUz_@;wj0P2S%CN&r+=m1BsbL%MkPyZ%tR(?$ow<+_s%trgSwWSu3;lzoT?6TVqaOu?= zp~QXYg;5shX_Cx-YFZ1CxDYJTvxKwnR=M&+Ay87f8Ix}Y zv4kYu2y+mSI;b_mNc52oFe)2c$g4Hk+#ebOE{{#4p}_))RIc4y`?3~a?JH%pr)3=1 zdeveHw2zt#O_gVN;QOoU0Z>&$y?BUPDrp#5qxRE}J=-K@fYIOxtg+|PS0jp7w`7R9DR`vzdQ=w}GFo(W{I?>wV zY07-n-DYUPK)kIwX%c9t@a^EhPFFtp5ns+iG@qChjfYvQgjEw+*6QU%@|#y~m6g@L zN(ll0d)`@BeKWI3v$Vye31$Ge*C{=Z41iI~zZ1_TZvE&X!MM zk%n1Nz2u0t#h~A*m+W9jv|5D(Luq&r#PTODW)q>(dFE1^ISN)E=%fA-m!#WY{mmHm zj-Am$$?mQI8bMrySxF`1nI`~3PaogoK6n>eRGWZ=#MkrY?KMz>dYJ}Sc2&dESSv9j z88wQA@BnTgy_WrKkR(0jZ{Bnsc$%SJyJ^CbsvhV%1hw#4a~Ky>2oXH0`Pm0v){`GL z-k#LpHs+x!nT+TAC{6k;dkEScCz8glGFS~u+}Pr9aOubKW<1Jf>Kt3sE=b{y;6HE4 zrI-23!+PRE>a3T8^hnwh#+WR6a5ILQdurs$PJWDRWqtF0WDo5#FC&+`pM;%5UlTa2 zB0mL(mZ^cjLpNOS?AzHVxnj@;RTE{1NtMFva?sOFtsee>x&-;(Qk(nSb z@UeY*D|_Qvr3_fW=Je%iY<%YebLz%CT< z3{ha{253R9ade-wT-`}We#u3%p*l>B8}@)hn-L&URn-nqOeg^0A=QUUE9P5pG#H3q zUj(1BCfsnIE?PHgN2)iM-eotQh34Wh0)ZJMB|aH0*o(qwVqG^0)tmO=Xy^7Jq`ojH z9LCK^?$hJ8HVPc9`RJPq;l1Zh$=-gzp{UCU2;Qh5}1eMYD* zl8gp~8sSq0MWvrDIp+g6RoDnLW4s5@bBjt3(*Wr^{!pM)DuHiyq=~g@1CG1$H+_;V zT_cK{RL?ks)p$$!Vm9`0Zj%-la8$y};JtxiNOhPvb>ofJX?3@IrKlXiY_~Utl^Fsx z%)VO68nKIbZogs{n{zH|unS0gn24;*c~lVnNY~ky)>M&G4O^Kv&PeKBhrSd9jAcP( zGyy(Y!zCxt@F(M1X&n?K9U4M9X?qd*xnx-6s11C8T#KalR-4)vpSMrBISexkj_J_C z=9tBIln_e<@iI8HIQK>SX}O$w)51ogOV>Jb>HOX1xhcdW7Vj#i_fl{*xC9T?S=J0Dr)D(ET-SJ^5 z>CRtKq2&9hc|5emy7ozQGY;f{zk9=k^J|+4GZ+vY#($U@>TO zEup694@@R<2#Z3k;16H)hhM)W`gqafzij1(AMtqs5!To>$C{ymSp0lBfnLGhtoK@D zV;Y<6<)eJnC2*5zZ#+}^X1qrcf8#h80>AM#>u1KC-#}A_I(Na!ucGbqyEioK6Xke% zo4{nwHKF085kl!(27%x$J>?gkAlip&`Z<*Kqls5H52+fXoz)4|J>-!m%PJ>IU@0@v zbZur54NzZEH4g4VYjvp@h%nA7MS%dO1}Fr+2A9I-wHvxW_z=0RB$yZByO1wNk36!? zlwlu3cP2tw*a>vzQhpsM#0$WR`b*x3&KXC6npH9!p;-AW#3u^nJy6f=@u9Rp##0_L z$J>TZ2~8QQ9cYG;C^}$(Rn<->KQA#y)IfYAN&9XF+5sh>E~uW*R9FrS`S2B@OY*}G zXr(YR%d2*X-AyghBJD}G6yCFh87I%^RsJxF>qjdQ&xs{qesd~L#q|7vXs4UQ-h!D_ z>jXo9>!ikgJ?C4cv+|rXR7Io9cQgHH0B~<_u6oX=d7>sv*9#5f!j9A3Zy-`sJ&l7I zaE`+?P<=>x$g8%iFr-fK`RAu)-~*;PLTdD_z38x|@k zk{M|7e23-FQI*bzb|H_zS?cYg0O-E(!)aw5aDu)3yUReOH9wnI*77HJ)S~fV;3BQ| z<|9WyQs8t8?1L{N`i#s8g{4X&i#0KndNL)tlxhorYM`?n813)VtK5!UsG9I@j+Cy3 z9|i!6$(|OOVD%A_d;a*jrh(Q~Aj~es^I`)Z$F3X3h-?WldJ7yO?qU_#KD>?-NCnyn zOZND+NBRRb>^Xpy-VGhbtrr9LF%uYzl}570Y^i9zt?qWgf)EPAvs5k>wI3PKIP?_@ z8Jy!KAUygwF?zvlUmegQo5bYK`c;8NlIwfHRZ5Oh?rjY(ZL#86g(;yYyj=;nML({6hPfA9W5xvp1gY4QbMIglfbq8vs<0>S8wtcw>4%& z%yJcVo_Z8p{_2~}qleYW{?Tok5VPoEkvM1OBz?_@n9QCGMV&i8k_4$L&K1LC5FWV+?yHps~dkwiP7)B(UBwgskS z7c=n{1bQ9>4+v^CLs(aUABLgW>Ihtl-SS(Giy||rpAbYS;;2;fJ1<{lufId_(;rN! zdHa%VkfB}Zer^w?f`0Ns%|-OXug5=n_y)$!2muAf-`Znxsm9}%d(lnL@nb>aepJ|& zM!NToo`;X%iJFEeoh_Y4t5?qr%H#7yR*d}RWmvuRac#;x;bGToH*Agyv+?KlnT@@CW*XF+lxoEl!4`Y~cVDs;r){+(nW`*2MAr&2266*5qUws3oY~UF9ZD+c=z;--K!^2oRiB8jcAdf-?}> z>y1&+jbN9XQ_<{H!D(wa2lZv56s)@ijy(j8C_955jbV?lqtp*_hw4nAI}8^768;vJ zmoe3=9X2i9j0+l3l;+LLE1aupI zPk`RWyUU#J8t$zFRn1d==3M?bhAKhklT>h;P)P?k26)1AR0Bep?_b`-oP-BQ#iD`b zCm-vHpBG9Yro)`5b3^Z9R{S56oE;>uDTxaWt^PfEj1o9vVEw-u#e-v$Nx6FlITaQy_t3nn9fMv}e zPez}VTvoPbSkJR@m{{>Fq9o@WzJ^+?ItdIygHr=&WMGaAQb_}MOt`M4J* z>c*{+z$`{p^4q&|%1~|1sQBO;!vDy7s%T?vY4e5~ut~9saHn4F!(|@Y42s$82Kp7+ zuh24u7Ls7(|P?AmrWROZmyYH*xQh_m~8Kp2R%2I4UGVi4_LUv3WD}4 z%?`L?B9>JZHScSJVA zB7WQ_t%9#q5Q~`Jf(s->w{N_;!HlQ`jBo9=8yXP?V5^~<{A&5U|1WXkfZp#5&wy!D zm@wZGUrvON2C(=87JAsFq99YTRDtmihHER?a)w(e4~;nmXZ$kS)AD`C_1x6{w!osa zdafTM&{qAk9`%og3Hd~+uirbwF5f*D7Go8unFKcuqZt@>(E{fo?)}g`W3C6LlYB{? zSI-avRdx|Zep9n{&{Y-MWQje)GWxtZ?hZA69d6CDFf=J-CbJ`d)@6cx4M)X8BIrOo zgxRn&AgcO(c*lIR=SP=V%4%8rtNW;kx+{khRNJzRtawB%O|jtAFnV+NGswiFa;=Ht(5<2bHW&Ezj`VePU+vZuq#wFygvp_hAF(L;eQVG4$-rDzaun*B-4 z#lBT4-~zZAbW*j-I81h&#qLB=+r$;@8lD1@vRQF1R`UoFiz8jNRaz4bcLM<&^;FPm zoE*^&!JxOv^PFFf z&hL-cQlh5X6a%U^k@N}Y4QqFp{A?zSM@ejS8jFu#M4gp&G^j8`+aPRC&P4+#zI>wj zs%&vTN;k?;<3+O%I=}$%3JMo8w@S4%Q}9$IB2`}J7-NdGusLAKSkUrkUO*O{j8ixa z6SN^|!!EYWb%a_(U2%}KgFYJP-k0m_X+h!MP^&C?&W%F>AO+=`OP;LS?9{FN(h0S2 z;**h4g>Srh3CF3Pi1N+OcLNRKS$G)$cnQ=fd_M+rR(ON}+|@IS_el5f#`|SP zR8{5APv)=P@k~|!ld8>568N^3y?NrV{*4rOj^9VrY`<^?9OG`i6vkSEQ7jAw9K%x< zV5PE%jKT%1C#an`oK&%`8zl02v=ozu%|DWu4que{4fOfwrjHN#wG;QT|$B&(>qYDCFUTCv^KmTU>Gnr|@(sO_KC z_o*t6NS zC5nP=p29t{vq!j|!hDY)yr~--?_O@K|DY$3C#<9NbQ`mbo;vqFY^+|>69$6dob4Q0 z?<6ZTttN*cJ_fC}VaF!W=i&Yl71CM%4q`Ap@f-Np&{OvQc%7p*8l$!pXu0qbau)6? zlr0s5-wyAc4-R@-n$NXw*s?mysc!+RDhHw@k6l>vL(#;b8sSnB$iTpLKAZ~^YDk8f zbDvNkPJLHpc*N`)K8NNNL{j*Cs4>Gy7~0c=XOo!?f2kV&w0=;X+g!iexthG-j~|;Y z^vS)_b5Q_XVM&7#y?fev;EhxVp4Wc*57b=+|ZempT)C)x1C~czB%ywic0a zivZ@_%)r~^y}|3D)>}@X+a6V5Kkw2!p@B7`CjoC8{$Pwcp|Z*tesPt>*Z}p&~8v#n<>wYUSZyXnmYfeb35jP`53_U7_n8c+=_(9{+TMMig{}@a zpvz5p#cI-c> zEui1pM<2A0-c{NBI97w3P0i%loHROpw(QtG8eTP{TRaU1Ey5ulC;4|bh$IR_k3G=@ zJWU$6*XE%k=+&y=04G7HpbGWxjDD+N9t!P()G4Y{+t8U43J)IUGFc@Z87g#sRZS!a z`cV)r+uuIVkN%_)?3}yDjTH7DE^vCw3W!a>q!C(+C%cs68THyE@u;8^EaUehIIItxZVe=KeLl!#-{MFk~K>R^x>JO%P{dfWYiVA2n_c#1<#CHh@rc4e= zz8b7Q*af|aLrX?hwd9HrEw{{-(ep6GQM}k3bC)mbZ$#yOG1{x{@bQ*`5tYr;=Pe`I z($DZ20I?50PlKXKo^^Ugo7ES-{&-Jc5(<|sz0?zeT|g0ZKclN*atD9IufLURCmbfI z%$5<@jdAT7N|At4ylg+w} z^YO*6o*0fF|MZ2{sfGE3Mp`^&vm~65E5o8W!WKpLav(&}$%t`z6i!tmSOn$bhSco& z;R4|rO=06WPh+!3iAhdD|HGt)GN(!yn?a1q@wwO1Pif7-!~wZ&SbKJ%)ZDZ}&w(0f z9NTM9Y(IeH6}3*6+H737<`_?7+KsE?bkopTa`P}OWOs0^PyW5#E@~AjBI_{p0cCdD z#Vs23#cboREk9OFxLZD_k1bEx`!ivRddZwPTWe??q3G+?2|JuT;dO71{%(qQy7|aa z3Dad*%eXnPqmKD{C;Fc|A00AOYOYRnukaVly*Mts?)>ND7-+-q$T#2*qVcCt>H{Yr zz@QLe$1qfD(L&FGWcQvXlkv2Hd3;;FvJVzr#XuC?ym~kMHkk&LH@W-6Z+-n$6>A#J z+T$3*i2L~4<6*L3Uzm#r1;`-~DPFNPBgLO*hpu96)7&&?9GWsrSH|$CsTBkiZM;3F zr8ERaP5%Z@;TUR+1b)QO`)v7{zQ=TxKR*)2dwY%8&W4K>kd=54C+0-_2;E%Y-853+ zy9H2^CF{_!?2Mnm0Jl(u)iMMJU!-&!7CUX5&)u*?)`@Te*x)o~_PT0P;ncK9*?62l z`|Tne?@wUBQ75sJb=F-K-MseOqj{hP9>BT>+S@L=PHl46dq7!ETS-l#P&dZ1SSGV-kesDU(fbr&@ zTHKxh$+0OUj6aPfl@|d2_RlXg6l~#HGlV8Xo~_Df%Q{lAUALberv>ewJiHU=ec8Hz(=dH*V@5Y!c112#)5@pL9~ zoQu})O`1Vx!@}geCtxSt%vb}+7{I0o7hdnc=wBqq%|W>2^rkQ+5aZzX7c7M!TlL|m zC9kGtryc?4(~q=+*qsQX?ds@0)$O(A&5MUnBQ#(Tm_fog8&42chQCjN!;_?YaTgx3 z5-}AwR#!mA7W%9PH3|+CJ#Ak75iM8;Ab$DCK|0oISTd-1IaE;c(LuEz7_)r!9MS;k zGxRr_1oa+hUn_pniAHLhXcvxkaH|yKW7P)oxkw&7IuAzea z_A$8$sK47wyKMnMh!;65<2|i;>w`L+zdw`RbT4}Q?O~!e^&2fG;9hO!oV1m->TX0t zBS5R9Zc;1x8Ga4&-HlkC8osa_(}K+LbHw5TA~9%qiO^!I`lG8c!%@{dddXGBmqReKD`Jc zMK@7pbW|_nGXqu$yN%jo<1|0k!0k{99j?saD%JAo96Fp|e&A2H9};pFHs~;O5ZP$m7SJ_ zQ$@|qo?gg~MIZVWgk$|UQlwYVQ+D^K?EZ1h0%x+r6^jm*OfyCwR|Eitc8r$o!stLL zd<8-?yWdq6!-A%%s_K|{wE+308E;!l{pljMnOau!8bz~&cO+p_Zy19=bnKPR&aqpIN)nPrjKN1uc#GKk4;}&2AKE^!Hx2VN29oaHz9v$sZxKHQ7N$it8%FB1NTW8fi zz9!PVB;KM8!LUf1!!&K>g3wTs39lK3qp&mcb~v~oF{nc_G3U{$dt*q0t!1pkTHv%` z&Vn8e?C}=<9LXr*(&FUGPrlX9d{C{W>;2g~+>9Fk?D(=y|Dj>5&6r%$tc$2%XqRztiza~K|OML(-BJ_dQ3DhMSFb@ZY218)l>W+Vp9 zJ&CNva%N0vSINEY{DT&n(WZxi9K=VBVKOuU%hKMMvJ(ifYBt)5cTRv# ztYuitMmV7%W^r_$-VEUqv|xe7TaV7AE^}tuVZ1Xs-BJ-?h@gZc15nsgSSk`f`C_w* zJ+S1is!jvoug|*-)=lFbEq$bJn{bJo3crNEDDD{!UqXsq8e`Lgj0v3_Y|O;ZA-+#U z8GDnj3pPcTHH}wq%#@B){IfHv7(Advulg6vF0HYM(dgyKu$X&Qk6KW}bM>$Oo9_R( z(-8J8*~Deb5`kd>q@wDhYZc;5(+!-xzm%QuvFs&sn)AZ%2<$klGR<=5=}TmEtrswS z?vCddGeurGi%2Aq^gsNM`VO67Up?^WxDkLRVcbqm^!&*4?BoMAxP9>&(2WpI z$Y~ehR7QjuO2+E3+m8wiU)k~xDrxiPciEda99H4=UEBIc`t<@0>5A217Z4y8an$*Z zo14#XXye1zq<<1J9Eusj>yRQ}(1o+J#}LZ3cc}=n&hgzCdtKe;ZwRjutS|(P(0~8` zH>W^25F0feqo5i7JG~yH>o_%<5VU0uJ2luANkO-&q6n>VD=e4f)E+mYl9VW0y&MK` zY`%=MC7TKugK%hI^lwZAy~=`6&s@S%uGLG&Nzqi(su}H3v$EIsvt`S15vr?#kB(9y zTuezEnlj5?URqZr6tmg-CbD6&QOnGk61^z z_mX=dJNtpuaM`M7=4!89$!|PXN3w4Yw@+HSG!0#L?=6RiezwJi%Qa=2$!}clJoWhM z!kXjIKhj==^c#HoWea$KU)ZRF8*e=Tw6f6fHN0h)X1iE(6M*Vwnsb^IbYX{@KmwG{j+ zWMK_!Xxb<(LZfl^8*Zx+Q%MY0Mn=^jim&f?b_kwoIGN)2#=v1lYORpU_KN!dIr~Mc zg)g_&=MU{h zzO=DIK~e+x+aidd5_+rwUGS{OO7AsuZFt=%+e;1;^U`9TQIql$%Uttewue!c$;2F1 z(R|%9`<0v+r?NNBBS)JmM8D5F6oNk>qq@e2+DLh!Nx+OCc=HCFUV{B!W5O9)8iB%? zW2;Z2KY*oOpHk63CoUhJhHD3gla)Tdd91ViwdRFETf~|d+R?+>OEMt{eh_KQeOQ1M z_=CM41WA)XM8#2C9{}C(at}OszAN&ZH}IFMq3x5`H`Xqy3v*B!=-)7;`AtV8AV)A- zqz^uTa)2zImr#_iX@nLJ1Z@RQ+Rih#SJ8MlT{wGHFFecH`4{?n{{pLK?gkx#gC&{-(BRNW#G`t_^28=dykHF=m1F45G-8Tc%z|Lem6LR} zW=R2z!LO@{W(xL>7=U`Yu89Pr;avzh3A&e;~d?GSSQLAJ_ocHVx zc*7Q~>ERQST_Z!x#@-a^D3X8+=9LH{oHQWWH8L29@qlbzJ=Z0fP=iU;$iC3dVHgY& z3e+Gukg}&ovZqPHSO-7qt{KB7-gbXJpi9F|*_uDN{9Z z3?~nxEkaz1YMDiFe{jPvrymja#CB_`5r@j{S{;_CAA_T?q zV}`A@NZ(kEp#rX@Bo;h-Nf3Yvv#ig05v!{mF>3V&p^^7qgmS_DA>nyJ;0!?Is-O`( zaA5@lZ$XmR!2rU0B8K>odypH}U0x$#)AZ`R0l>oI?y+DGHK~30N&E0mp-JNfQ-EV# zc6-cw*G-oOhOHzrJClapZ-xk>{4HilO;xzHiZaCOKnVtD{@ z)M{f7`u3?mT{?zgLBjZsZTfrX!B_2DE&YC@z#FyAuJV= zBsOCkM+#QEaAI&@OyGctSM#TQM7|WEs&lCZ8Lji+h}w$5AbSv_&Mj$UVuEH&00@N1 z8R@1W*ZBQ<T8U%bzt90@l_`y*8xcS&_be>@b>8D%q1doUK@#AtD0?8scN?Dj6t_3_Cp;X-4;+PB4Lt^? z(41PUuyciWz-?wCau^YeE5!9qdB_X2HFkieyy=!gX89Q`Lj;#<=*`%F?E5Z{glZ;6 zt(5e&w~3W5Ne%hZiR{~WB!l|)@gitCTaP~d&lnkX-$I0*+*hBN^*+*s>PKV~L_w`~ zvwBl$37j>3V9vJ(A8$cFNQ6uLVW8KSGEay;D!Lqe1%VH1vTSrO5`=}Z+{U>#7Q!&v zD~Et3s^vIh%mkn@q%v68OkhdTB>b(nGVBd72(3rk?oL^4IkjB==96jGv=1dwdK>RN z^n+hVd&2yK;h7yvhc_#W=`A0LLqwmb$$U;oj-PZqLcplz@dO?*zRFC5N3?8rOCy)0 z6$|_>ZaRAT=B#T0>#y3U==QPv$U|Pz#8TJ5tl5cFd7;tY)u~h26Jr_4sc!K1&=f-f zhk5GXUr>Yk*2BP8lG{m0vo9k4KgTui{z||RmRA5A|C9S+WDN|*2@o1Qf)JEc9;45E zcMTUA(zvz^z1qH0%_Q3icBgn}7SRAixqF{~1Xj5k>YQklvJqy>-{W{yc@%EkC!s-$ zG_h0x5>^8yeAj8PNHFGV9Xf2{&9$cDj@H>Y@1J<)&4}W~67r>>WTd8NG^X^YFNY<;EDP*{|m+?&QTbe@f z*6eQ83m~jy#M@E8AVbvvU`zZB3+W8ZFFk{mO*bNVUAN{<1K!t^GnEUqK&wecVgVCDfSRs~X!6KM?k)u73Ra*O)3}be`DzTH-K&b{;y|(7HXfFUt^- zq1t{6WB1}n?52vcrJu4hiMIUm9n23JNjl0@m(Bnj6D~nF=j_%AQwL&g0t32lRczF_ zXjK!U4S2k8f%gDVj7NoC1T`nd&QE^BJ)>n)ZQ!pDmqwu-!o=ZwqRXMCmKWgepcUNu zKL62;d4<`o1WF=H5xk=5nr1lhjH}_0Tmd^&(X}apLrRGO=b96SKaZ#WKGvf?&)%*< zeW|^}0j_`cCcoc6K@eYe17kM=fb(SY_}5{80TLaAo#J4a>uQaJ6Y;#n2+}Z3Q15V- zZM^+HKkB*}vpT$EQ1bvT+wNm;e38B)HJ*IMg!Gq}EctT$KqY@u*Jh)&^#$>b`u7;# zdkq`4VrFoO#VAhRIXng^V|L8lcK8wShH3cnZ{Oo`)XfOZME3KkE|n)bl4|OyjX9$c zFjKedQ0pF?bt&cX4qsnf12H^u-Kcq0C!Y=Wx}lmUjvDs(!XDS$%*Uc3hE}`C&%S zuwIILXv&utwGcVzCKj-1o40*v+H3C#w+|mz$EOL7adC%_$q6bD1%m-Ar%{fro-^QQ z$f}|~!GQ(*UQfT=H9(|x-(u5As`68(x-?5cAT!k5O^32JCKo;Sl2#DR9ZyBB-Vzz3 z;SORSPnW~z{@idkE~_ZP@(oQ?FuH0I+7Dk~^$m?8fDoZj{UPyQNU~I!A#afn)t=@U zh9kXc67CPr!dC3XsuxaE(t%MxJCCIYOu**$A_xH{lIZpBM{ia(#8Piv;OD zTRvjhf?wZ5p;TLJ_>$L-BQB7V$Uf`<#$Y)4%RXDpW)_j!^`@HvG2!njp?-METZr}OlQ`c5D*h}`^g1nMo! zmeJN`Z`u<@GKh`$L|RkieoQHTX7&&Eso%B)xJ~@l{kz|%1i@%|CUvZO($Zo9kTu2p zd*1VluduJEwWxF~bKErgxnblQQLce{f7*O_`hqeGUfO6*D#A34;!9#9s$^0{CNM3T zqeG+JKS#{igoo~Qc{18(pOQN-R6+n^piZ%b@+JM2`;S_~`-O3>bl){oo_ZQ>>>EuR zRQ1##*UOi+_AQpte|gFE_F5aX8+O*!F++V7AQh#9yPp=CH7g21RU=pOo}#K6jU~X$ zpB)Tps{Flg!^$HuLjxe6E4zRA&uR#r*xF)60~mEUT6aswtLm5$dZ;Ix$1ZRZhP4Ae zrS8E?5ULX>zF+e~`BNa9!`ql5<{hnp*)2a2kD@QCzH4naCUAegcANibR4A&K1s)~0 zDtHbz)>;x@EuH2JZiR63*j7Nu4q+DgiT33N<_FG4zt z(iv?{^R0vx*;b!n|Es)>cjRcgRwm=vNjse|k;1l^_Ph`y*2W>+phjPk-B?*aY#XyB z+b|)d1+|+G1M#+wETMeP3nK^(!(d1^R-m7NywsP-_yDn^!)P7w8-VXH(U32>K4~mc zDf>yZq+Nl(Wv&=}X@RX5s4-)`RfF|0A} z227K2B)5+2(A&1|ulP-dl?awB{YH~VOVnU}>`%As^$;qPEHO;hL)>MJ*O$=+S%Fa)=#b~WL|Nyzzu>JEKJ2uLY);2^e0eU!a@ z4z56NT2_bXuWpdPyokQ=*DoE*PrTL>0Ygw3=?Mzp%^$zk#Aa{bwC^v|8QtTbC;A-v z)t8b+X0LCP?pCuQsNHF&%TI$=KZ2R+ti^R(Ge(yq9%gPP1ls z!raUvY5D)9E5Cj(g2gh2zk=6UK^=2raM)QT9N!|-f3DETZSDlf=3)m(%Vc2oO@H-` z&+V4T|FE@v_MvZ8@yGQKvq^q#xy`KSY*AJ$n5jyV{@vg>{T&h~J zu3QH3PfMh|A4(^vKY?@E^7eC7@%AC9>`uCoKVXIBqa~$pL1{ zgSH}?*#Vu$VwnhO;%FMKQv$JtXx!DTG!3pY#Q%>VRket4CAj`^4r>R zF8Y6bCC8taE?sGTc>}u3>#uqbZ+u?FTxK{`uoEn^uzF19^{QSqm$b~gz%swa^SoG& zWhu{0kj-6YlFIrnion6hU{JB^UOS#yPF$yFZ@l#*|GjW~n4UCyNjA0xv|Kn& zW$QL$@6}wI*nz|jHJ~vY<;T~-!0oShETi_NfYS>e{uek+*eixOpP1Tk>AY50_YK(r z&v3M?Q@KbjT<|RGaPCWXQH}6YFRvC!?XoP=cAPi9ldZ;F&pyP}ouP^)E^?-CCd&_f z>tA~7{S0cB3ig-&%hiwa8=0T{>X*e8hlAAC_R4jOX+~8|3m(3dKKjiU0r1&`-nV!m z0JC)Vls```-Q1im+#AMVrrSSMz(2i#y)D07nb9=K5NGw{7@^*ggkTJIlQAt#vLRuU*?_H1^6nZ)GsCG({$mGuCb6zJC?Ih+=6-c82_G=^K*X-fa1n zwB(!YPxH;r?WPu$Yss{Y$Q;5}2^U?pPFPl6U$|vrfJn7to1Oga(C)J=ME_c^-5#1= zmocfAzP?(3+K0F5o?n^wPxgXcW!?Dd(&g4wa~ZIBC1`_|Y1Whq$M7Ol%<`<9wPKF@ zbEyuFtro8T5ib3U-T7k%L}=s|@@>&?#_zK@wXf_y@6N(M?RMG!vL4=ei(o%z^)H-jV(BQ9y2G4#u(g{9jD<*gPlauEC679M-L%IH;VgFtP4F^tl$u-o z(-&?J4#FS5xE9Dzs<4I^p(akYR_o}}y`|N@x>S_ii1b7AT0r{Pblhi|qs^9Wx`c11 zE!?h4Ew|@4JVV&Q%S>F^-kF54`y1E>O&1CSce5U>xaP%9!gOSJ9#-ki*Hs|#N4awS z_Iv)59&FeN!2dG$9ut!;$3BEUYH30&4n3QYnR(1Mk?RS+zWSGi`66c!uN8SzEhaw) zzSXn=JFsb4E)^4kKD^Y;q_$oE%N(_{IuajVxfE$%WXm#p98b=iCW@|wk9W9={79Stp#_VXu26sE(%{{G-P_TNe`*+q0V%|~aCc750W ze!d~t=Be%8mEH=yC|*x?;Z4j^#=4TeDqoVqqwG})O(`{xv+aE?3#29 zX9;l%@L9G4pQHWx>ZM#d7j<5JL(LJ`3D`opi<04XfA45-PQ=*xH{W^PvD5%$M|AUH z@-J-&J6se+8T}P|Vh+xqaXsN^BWxqRnEm{dOXu&4!zt0wOn~NBlCd(&^6}aAqH>F` z%F0}(ew2=~BSe_1vqU{{11aZhjF*b*YoAgP34dHyJA~v4-7T5#w+`Obj$m)k7v5MeYDPINTQDMh$3pXrmfFmEkVTx9!YWJ?-rH9y=8CCx~THQC!Qzx*$J zl;W^mcal+*-3)wHL%Tgq~S56K9jOlvTNnqKGxjZ8=}u_~skC za`agiyJD2YgwMsgimD`#I`+UknteEfvnk__1{L-$&B55~rm-IEuKX8%wz~S++*)^}^S3PzHDsmKL zKrt6q%S?62+uZQ0$`F;LX9@+keRT|8`w_B#7*yMkBS81bA&Ak56M>k*te^F@M zXIMMzF;^G=%Uo_{0ikUc3Tvl+nP7y+cw-pcHwDk%HwUe(R!2DA)u{$YL(5>s5T0{0 z&E>N0)9fTH2gboC%pjF*!pEOD&f2Au)iKI_V$37vI`o>too#>45#mMtBx9sL!Q$?5 zAV1Vw_z$zgnckn>_u0|Y0+W1Pb2cnIkbDoWeFCB|M6i>j*$#ca7cTFPT=wgg&n$*N z``gmD&=6?uhUVbuzg|>NG`FS<$t2pGu@DL?FPJgD;vkTD7-@a4)(!dZFB`dkda1%m zk&J62cUd<5;!-XE%hP3}`sD1|JIv3_w&Bw$_!*DVMQW0xi5HbSvMsd~Etd^KCsus4+t*-C^M@%GEkqAafs!n5tY_pL(ZhvrEN z=_!kLCd{3wfp0_dm$MVtD>#ZZ9=t{G{-B&({9R&x;HgS;)KHubJ?+y9yuV0}FcmDA(uS zEndHdj6(KvE|~1x^%0U;ere9LNEULOY1Gyh*%yJbgrM+7_77)`>fH^bex zuGuN%l5+RvHLECZej>hdJ!%NUbB?X=8OOy>#RCEM-dRP$wVS?V9#9=!DFjcmN) zvfebmY&{n8Z>C;`J``4riCwe8W?$K+n`VPvwPQnvkf+wH9ABD9k9?l0LCRyW_DbJ?5J5)mL7| z+XgcqTFX{wgA~pyNRB{(-&n>Q$UAtP+=% z^s`JOwwP>oFIg7Ntj~))YjC}}Qpj^xPQGEC-vUi65#;AMKKz32J3q5>_l3Jjwu?W# zY6%AI$me_hijjju#JYMT4{W!%t@B4`zJ;$DuwZ4kSIt6fzJb0#i%YPa_WsvJ)E7`8 zt}GZk$R=T=X)Y~HR&lK;;|GQNOG_egajD`X+Z3-t09>=|bGh5yD_5-8wU7J4HtR5y zuG@Bvo}F0i%3UW{v*i|7S`Yx1?OAtD957}_%Q5G5b5V3$vChwxqWb##;%V)0fSq0(RVUthYYIs_1D4(097r3ai>`Srke{G&*bb6J=0}Ffc zk<8);=4r2fUa0HPu`NHz9}B7aSYKbWe050I_x5W(ik%(`m#K!EJ`L;3DtJ(Zv814h zY5VP0%SYuO+h2AvwJTPi)N3XjvmZ@Qv*WQ(!On*UK;PjDUqJuK{Ecnf@R!Ahwjfd^ z9JhvBNRnz*P}6u%Zo#p-%yrcL`)~4?$-ilbf()>46?ZlndRV_EyO}EKV!np! z4nn)1HdV>+D?_=LuiCwLajEL_xn>_girbma@WAI&+#vI$Y*iX8RN1>ScfiqTh0k8ZOmZf7r5B7D^nUgz3JQIN&_(7XfF3e9OeW1^M|)vR&|{(R>n-s7&H z>J&JQC5C6dr!;c&@&&ND6aO+F^`~|ev7jl(I`eQtc%sFQgMWT#v(IIxxi0*=0C%t= zd?UYbY&EvU7@0JBt{ten^}&@7z1J81Ic8w@LFK&3(r09(RQQ!Zvof2m+2ef(ThPz) z3&qduk{dWtxYHnehh?tHJyGmvaN{D=rBB0pveSem5^_-3gAeWME}O^C3i;!+)U`!v z%~QF4^~-Cty=&*G<$&;a%?c6sda}(gbiv{P?l0)}VHkvX4M#_o@Gjea(`pORl07k9 z)3So&d%FZOlX3R@PtB}mR*7sawlVM}H~yEq1YV?Y%hzqXNdBq0pX5&VX7IX_zimGF z`P+8Xz}1Vv@THFnCrV$a`Hq_Y>bqzA%KJ#M0G!?iym-)R);|Bs`uWMPfn)E#^AkiP z;m{QKz}nX;+3zn9s7$5H%eZJ7%p4z zefGf@MRhap&gkDPpc(W{9m>;H7v~uWa9i9TQ!M!BmNwc#_p|N1^0DQnx(ZJbOMi^j zwOGB7q8Om-jxS$_#$z47_0lW8Yzy)+#q@<%>d)*AE1qJZ_I6yw;5{7{j-JikSZi*t zdDrBph5TJQFRVb8vg>v% z^7Y!8ydc|4=hLF<%r#SmeFfSzBU3$&WJUQ5Ftj2M-Qc>vXhnuCv7Y~BgTM15To=5k z-|55JZX%j|{suEOcj z+?MRh+xPOl>-n+#YZIG*g9VyfT_79OmuXi&OhRSXo0b~VXIFSu_qY0<*N(d_IOOtC zt6=L*7VS9fFWIdA^o6^|Rb=V?(5TgC_S&m3pW5Y>`9b?MUi$3AOPKM|jSnw{wG`GK zvkl~;+Du!P!O+_wS1Q8@=8BS6VtUvx0-6$ZG{D z`>vgbY>_0fIJb~x`Ih}A*}?6f`Q-(&K{MoKr+do?D-*ch31esGvXSwqkFES*!Pm1# z7d5xD>O^*OJ9x}PN!uOSpZ2wWO}cyEPQ%t!maCvhFKD-68E8MSOhLKATVxH-ud>YE zbm4fOv9ThN_T~9m4z257et5}yKEd$2>zk4$;7nPuGRH4C}-7m6*q zVYlsHflTbl`&T|O6*5EYw~Sv{YF;Kez4A-QC$)1!)4o%E;oRIgY{cZ0ZA=C>4D0&s z>mORm0LK7(U;z@@)c(w}Z+KVsV-Za8O)e?_%ycFH*ea-GZXbsDmiEVgOEYV+XEIyC ztA;sb^-psmwyKlY zbDOgro{0jM)u(XQ$WXvcVs`hJFZ`9lPM^u2w{O7SU^BahWGXs)pmPn`^37wfu!{jb zOeG1e``K8Q9V$Q6nVI*Zrk}HcH;V(C!?!S*HPy#;xcPp1)nZ0{y9#$On1wBsAG}eRbs%3ym^%?xWdRUE7!ZFN4XI zFTQlcSy8U%_Ne99GfI5nzUfo8_l%#wjoL0d@`IcC^4awdj)U)A&&kv@P$N^o)wq1^ z0NJap-b|Bc_7r|jU;#hh>BQ^u0PsggvnDdoy)%6d-48JOT%3!~@zTW!i$Cq+p zyl(lQV9F?N`k8M)&U#HO`L0?}(53iVbGk4)1Qapr`u%G#kXU}F>{Qfr96DEh@#fWK z=#X3N3X_YS5p!W-ZjQ#?d_C6pfBeVNDp$SnYj0ff#&vI8^Tw~d@ufGu^2TT0__;Se z_QprvxZ#b@z43)NKJ~^Y-uR6-{<$~4_Qr?a_zQ3R(i;c(rkS$5Y4>Mn9`1V6JYRi^|ARj>P1~RSD}QE=w`jI*dectI-ZWc~H!Ty4 zH%&oDbAs=#V+Cdq@)yj1#G7Uj@ur1Eded}&Z`v`)n|1-{O>;j%b02H?PyLxyqxYuW zCwbG1p=cfw^u68wR9kz!X+qQ?^FR8tO5LD0?OF}ZO+?OFItPDh=MHb$iNl)~2jNX~ zKJ=!^N;Gey_?^|H@~3t-^k$`Qz?)`OL9-oAyC}uegA$r2b>1|)fj3P~d(+a8d($F1 zy=g4po0j9z0rH>wv&^Dl4!8a^%MWZpZ2r_LYkSkoCf?OQ^JjJiie@Je#V)}8sm1Vm z(`q|-)4X!MY1i`JG~XBKhkPo)i<#113= z)C~9DG*5l^S$TBkbNSEwSF3I7O}kH!Y9`ukQIn z*#~R33pamiuGbFy|Jk2eoS8SxR~XGvZb>7#7AeajEBxKih%J#LEZibCeXRe=-!y+v zZ)T|xEn&C6`7ixvyWaPv1yXubCOE-HY_sK>^cU=u?M-tA@}|YodeiPaylF|XylMXE zXj1*Pyh{Ewt2Al{D}QR~-2i#;Wjnz5@8$*JP0Nw)P1(e_79QYFtqhqr&6ea%^A+@_ zT?={BVxhg6xj=>;tkOJk`~@qr<4v=TqPY*joW+Eu(t^KWDObH|btS!NwnlI2+JbM| zKlNvox>0XxH=tyWS^m^q3;d)1$)8CRsMzWu_*1(-@}_wNpgDrwv(ubEl^zQFF09t> z!u$m*|L#pQ$a>SfMtt`klR{R2337;an=7(^RXZRn>H)aS7t~*{tfAht`aa&2R!rBd zt9ZIZdYHv$b}V)%{jYelx6vniACM77;#+DL|NSupsa=`-_u&}q&1ymUQ|Sf~1+q=c zY>pSYP|V>m@Y52);r9tB0NMt?*ma-3P&)+0+SRZ>HS2>n%@f6&mdwPPwPRp-?Ep+M z9AgOGL1%W~fX@!FyOtc%pW4-)H>DN8Gk-OIYTlV_Vo$TY{;~ybYPJM_*9t;=)AAL1 z(_+!RY0*sHG!GPSnj?uft=^C~Eu_htx`iDKtr*xm8~p|80mHLfHh*fqWZtxFn%=a` zlHRn)C~sOeQEytXsyEG})|(cP;Z4g^=S|C3=1ogPjphnAKzjID34pT70{*Vn0r|{O z;!iDIgE!+YHa51ghkDSA*<}4)OA3uP?GB_oiilM{^sSY5pkw)Jk=F zQ>pw~HcCA0Cgq(W9`q6Vvd|!Z)viapX_<<=Y4@IJ9xS8T)ap&0#Dc>jXi!#wM73is zZ3}=*6Ii7gtbh&ZK!-pdQa9#`n9Z@@6=m@n&2nUz-MS%tYuf$SL8pi0N2-guf65`9cfV-bdh^0UHwYs-}6oTRS1b z9e_YAotqC~31ipA}FY6B20``THr_azHCI%Bn#Ku-PLPfC-SihUQ`46) zb?pGw*Qi38H-&#@Sr^f4#k-b@$)8#_6f`%u%z6OIQVRMDmQexC<~?s(T_11S4J(?x z=)n>Y_)`mgL~{Yuq{AQ)b5ryeLK_g#+-Ur%Ow4=JYFK;IF2%iRp=;g@Cj|8t_M&!( z`KBiF!;JfOQH-5pH1n16eyym6H>Dk1($bLo)7lw8oyM^EaD>iu6#9uA5zlUcfmpR( z@60X;y{SCE#B~ZG@EYyz_gAgRu{SMn%9~bF(wlJ|gIUU0e_A{3@uvBrpt%C3VY!s> zbQt=UrO?CEcEHJQwEb!A6cl2iht4qn7PM{dyZ)-SgX>5$prCcoo8kfp#FBUVXVSyH zbwIavFz73L3?$TbP>QVMshk1( z!w_I+PAWdJc$X8~1N8L(=q6ntkJ{O}e`dkl-n1Ok-n58LKPUV@c(#n4w(L~?)H2(8 zGtzu)argez@=SZvV$8j1X_UPgyMfQr2Z~5_CA@G39$+yT{xqxrSM0hGPuro0T0S6u zD$O7-X$8jH9j?C+daym_RqRhKHK#YtOV*nfk>JfZ$4#Mm!ko(d1&hJ<4SocWTV-7T zT{keBcoTg|3&_jjDEwUu_Vi}#+VF1g0eEQ&eT7MEss-TtS79BrY+gwIRHmR7LG-Qg> z8XNE8UGvKG(WDp4j2%Eji_-8HEJJ}e(-?_n4EPk+@q!GHNwflyYloomkQw(z0lqMd zIn<6h#3(l2oacQGR=L@m7BA~f9VSM0W9nfA93^f5s43IkJFr=@4VXzUQRf^KWcmAj zG>eS&rW_G_d%^b75awVR6Y*|4kUDcguriJQg83(TGwxzRvIUUHA{KAiRQz3?0;5Px zz-n`S^%tZEEHG|iI`I^AQ#-!k+!UNIl!me1(5j-7$*smS3Gs+`NljZN$j<9WUA82NUqWuMJJokw`0zl#(7@MBp zbQO2mK_BF|d4+mkAtUGw@{BH+aUVmLgf<|rP7xV4@Kk7Su|&^4LgHXn*2AC55m96f z=x;9Kcwq{2Ok%mIO=-i@EH^J^1DGpK*E%!czds@?>;(mD zX8xy^C^T1FmM#ptL~J<)YSxZHwzARegWU%{hShp>&_&#)B{mEKj9N$T)`oE{E1>s~ zcsCa}fj8O#HWa5pNmh8Fjv9 z0-$RLc#p0Doh(qm`?ausZ^m;#Bz6E~HC`#|2%gG1W)_CgL+xnX7knSn)lDF*cORC6=Q`{UV*|>=P~8n z1b|pO1Z`M&viB_$CxP%f zPg2co)&RcM(8g$sKsM#My3&kE)=seN@f5(9ZqQWh08wf;RvZ>Fb@L7P(R3D_1>RQ6 zB2XwzfdlIXxy%s2CyS7Z@vPbV4ZE0`l|u8UO3`jvtNf|7W0JBD#FAe0V97xJT`RZX zO*v|I^V|RK&%zp(BhB0X)G`2~ISw&o5w!kPi4B%*(x0YfEXOjc_)|;i>&>(TVH_A- zpm%}$R!rL8tsO%Pv!YD?G@NqjT|_vZ>~)Jc(gQ%(4#BZfJywwppaE&yZD%6)fLUP$ zq9M>JGB$4cY=rmlGE5X10@F;{QK1tU5N@L>&E&P`7&uOVT(tqKw5+f`a9ja>=oZ+8 zPGARXr$9ZW)hOJ?URiY??<}m5>8%s>wt>g7gsljf3BD6sq2plJ;h1uS`j+eiIyE|n z%1W=#LHo!Yc*?Aufx(4ALZ=aIGfhEUTF@mr>jj#{UEqKm0VQRLLWj;~(#4JK!3${= zO>F_Ml2(GG8|W)7bv_3<1owyy0IFpCk``gTOU6eNiF9MMumkwkjzHSdhasdNJw#eP zr4DoxyMQ}smG5qV@x^^G0`2Ay1HjZkFKx?G?#omzUZ>da7&;4WKqI9`gWd(EY8&LN z?gA0hDA<-Rmz*>oH~F~nF4hwpA<5GOP%;wZ!aDXfodM0V5|*j12mKBJQMDTfo%>i> z?G(Z>%>gmuIho`Ho`xe#OefK8TnCE_nW{a2rH2g=wDbbC;sz)_bb>p_681-10WoO; z*;op5Uz!lpbqc7ZJ>-GYn0nm6B-13eN{%2?Lo3!MUC@rK7`Ts?TEKC`on>qw1VpMM z?T>TjLs+F!_psn|2cv?FF0mXW7Fl}gem_7h` z+{e6Xhd}wb2Y~1#n4z{~UU3VCk4KyUJ#cE|=&1zElV4cw&r=KF zECZOY#hH5#dVo1-JEwkf=r7155IwHrvseOE$rh(x!qnpiuv>U>9vZ-pQ#~+CHX(sx zLjwlR+(qb$B=ul$vI+R=K9EdLfElrmLomuGoWK-!4>*vY8OJ^>8>q|3}MjQ4`cOgiH zZYZ)2vd|s$D|38G7Y^+J_~Zml?V)U2;t-5&i%nn;+6QGeY+64WB}G}0kps`PRQ z@|skS9)wPF+722@qfioJ1Jx&$%~gODt>lTGg|@+__6pmSL!d)|cRoW_7UiI!_UggPc$ zpy;%OA;K9%L?t_&9EfyBh`pq=r7< z?fYPHRw4l#v`@50qaF`%5@%~>gpx z3oxD9@K=SFtvCmQ*KzWkGtQ=M!MnW=R;FWEr0$U+Y!VCSaLM-wY_XQbRrNv}OCt<$ z!RbL-ARTlb@UrTSSi&e!Hw{1;h>dt!I|Pt)y~96?4H(zT&f@9rfj2D&w>M)GSchg2 zoF_o0&;s$SZJ4^=0h*^dpqRAQp}`M4rQ9D<&B-w~k5h zjv5_9v=bPhqd?d+2OcZ)SiDT~s7?R6FaVN>8-Tj*K)!@yu>3GiJ2X@@%RTO35T458 zIToSqm`&UQla6~}YM}(+1YU8+b0A6R!@lYQwAQeOA*^Z{=CH=;&Y>xDq`DPwbnU}l z#cmSVLPq0wH%<~xbn*-{KnKg`;&VtPV1_Q^GodL&XuAxP*fv?fiuEP{RjC8=X&s)X zNhnycggMj>01{c}3r%M}Gu_6!q15kBWt+sp6K$Z$Ewo{?YP5AoAE1*mZ?f|)=Pr*h zLfnPs7aOowIti^d)Pp<72Ii<8+@LXRkk!ienWsU3Oqzg2RvW@!&<@N&PjSzBA8av> zfy!$qkfP}pWL28SyXgc^q z%S|YDx(VT$vJ0;>aGH1qRxfj)c&ps!qv;)Tuv3hd=-i{bm~UKVXMDlxPJ3sOIV1uV z#(2gdcHy1%XxzV&O(3#P0cBH$z0;H+BLw;+wnEp3y|dT=9Y|1_e?#XX`f4Z3zO=Z3=}0{$spD8sm}0*Tn2xkS z8w{;L$W(_HVlVJbZUScUE+I`1q;QM7)&aSrZJ_G>J&Z;w}B%y za*x^xR-6U_2jP6v7~l~1FtC+L^?_pv`j2h{_hfktNWxv&;#M%Hh?VX^UzB0OTesK* zhAefY1V$0ZK9F4+2heo~d!=JQWI3Ppi9G>F(LSOyyBebc&;q&@Uj2u909Td>ns#7| z)?--h$5Uws>~s&DH#DIsM6xu3O%v)au|zJh106gr01IOirc=v;kMDwNv;+Ej+$0ue zHhh{BbPCicWPmyfl#Ux%+drOy^M-k{rxo_l3NdL_!m#h#)XnMHB5kljd^_f^T5V%* zhGBFdYm2_C-$Fr-3{{jZfJvr+Zo)(x;TF;S0NrW{yfW~vRhB3mda*xQefioENJ9GR ze7Z6Zh}AMb_;X_64&;xV0drCVn?FXI21%NCz0lYd5Tu2L8aA*LUKLQyNg4Mg2hpssF#&JyF+0=-{D12!trIYFlgN8W-;J9gq8HVFSW2(ApO}z-(oz z-QTt9@7|1iB*qhfI-LTiV?QZ7i^}66P5MEt!r=9IhN$|-VEt(fXr?q5%1v$?!~Me% z-jys<3s09}f^3nI|2hKET+4`flo;2K1#2DHeC+_+7Q45cT)&+Fm^SgMY*SLTgYM!a zFkQAmVwGl)jdVe11>T6K1L#hXQ9?3{wMWK|#w%FiWW%Cte^c6PL&_NofsSLTE({J6Ld{qzLVRsW1hJ^q{k_2BHyW zVv$jxzn1PeHr$7LC%3Vlcn-t}Gx#boSgLjih=djtaDTv8Cx)Id%Y*rreR0l+N1fQ;P4MulMn?*0Jg7bXE1WmJeT@+h7{E(U5iVH>zFUBKML z>=tPif-{nF1p3R%DWqEH254d2QP=avLS?=Bj29BH1c53IOzhx@*YhYg54%`m? zV8+r7x{g_P!A@%D*^%iC#2k)JeJiye`=={-8df=QmdK?Zdlg%;fsdf`1txWjdjO=I z0U~ja)9vGe7lHes6EM{#pquUkdT|46S6VP#T>v`BFp+2g0z;<3OLd2QnMd?M@6xyl zeqTGrZdaPISK1GC^#Mpm?_jfI9~P|j;2d!mT5#$o3bfJ>4{Hb~fPUNs9Bb!TSv*6x z!c1j>F}Jz{QW4(S32)JK4D_Kp*p#pWb|L$q+i*;nFCjeUTOeUzaJVqu=>DpsSnV9W z>Ht~dEMAZ%?qp-Ve-?WI1?>RcCSEv)%wg;B)cMIHz*c%OZ)J{ArPEqkqsqt2Rape$ zCFX^dP9o_557qRv5k}$13CO=hYk)G5kKO?;$Qqcnw)FcV?h_!)(^jFiIZZ-!`?lUc zdIa6Oc8JZ1y}*RjjA`j8>4B0bFkC^XS|{eIiM+0~idxvH4RC;eJV4~wJ^ZeFR8LEo zjb`@L>D%aw#%d`6SCrnUdKVl*x*#OdAV@A{cT&>;bC`!}mu7$#VHS#J%91H|g61l9 zSX%7>I2hJ22Wd%~KsVD005F6Vd@4dE_?fpaI-GFu+ldphC$4IGT2F zI{W;LkqPMlrv~Z2<4_MHSX*ocO=vq_&|OH0+5sRL>bbFDVx4xl{8RuE(CCvu&1 zJBfV@3JN?#Ci<*t?hb|xvPynFvDgJb3Qui;>9gb%ClfgU7f4LTDEnA@q|1PIa|h2L zntux<(i!smafGQq!bds;Ss6PB55@%47-|~FzzQq%pyxm%w=&POETJ=L2Qq|nqR0aD zn$!R|KLUZ(vb(2d2!zP|U5TEnvO%#p1H4IfKnq>KQ*8sB(=AB(Gy>Kx>ws9K(@+?J zaENs15A?i=3^f#b)#)A;#9^}DHlU{TW3Oc%5-f3pLzb6igzxrXE0pKWcm%$uSx9>` zB)T-gK6oF%2_*N}0~7UQ?jJ3#lDPvi{5mZWhqlPk4|(qBAc+%SQ$3)d2O#mVgo&lh zHIqjly-QZd6S~lQL5`s>g--5eKT&}3tg%Ga%zO;WD6Mew*hyHK*;kIB|Aiq)zDK}w zX@&(+XTVpb7dWJ?VBLvshLIMxIEQps_rP~`63dYeY--5#Y|H_WFNLNwnKoXB_uca z0qj*zA=6?L7(hINmi!P%9?9^ufoKILRvE=U$snQBh+Z|zo=DX~mN4%`=U17cnzs(F zk@~SYvXWsVjDWQ>vdm626Yq;<0cu9%IV|;ozAJUu>c^OQtixBrT$w4$|JOn$)`~2M z53uQRll#QTcHJWmZ*j{uF|Lwb#V+ny7XTfb>wrOhoMQ**tac0|;eE=DZSY3v0--7m z!lAPp-SWmv*^J$og3>@2mavE_qiqr+(5f_g#;o>0J1s3Gs2On&APs#`F697VNX-;Y z2f%<-hdC%yW#}|=-TVOPN;5>QY(nJ>$B;3ST?aF&Oa`%QX=>QvBvZUq#blHQN!frN z69%xd*n`=GV^C`B<80&kezz++Tk*oO{58`EjBxdC(*M?jWBsT}(`vk}Z8&103) zLH#K6TgkFZGmBfK!OzMLJ8(h+=u3HNkVZJk1t?~b5fzbsg5l1Ii`^fvYuZNrrx&}X z+p9jVbU`Z$Gnhge0D0&r7`L{-R`VF$raJ78bQ1GcLD*&H>kd0O4e2bFUONG$NS2+q zA9GBj6hnI?M&@Cvoxqe9S0Pbk3VWFbu*y`Ay%g5lh-YBpX$%ZQhT9xYIs&EUKEael zqtG`^8!>KVVyZX?)RQK_NqfQdl-ic`p82>!hks?TR2YFCkw!uJVHLY3^wp{zfZT;w zw_zFZR~ndQ6kw2cEHe&aI%x*MsZX#!u@jrE451_j z7teq>RciNP0^}jAC=iE9msz+9LzRVf8{z=2=^sK@ks}~v?F3+w0pLYA27A(e%sjI8 zh;9SPB9n^8Zpe(pthA{F%~(%>7}7#dEA%CF{?~16kKTgx*FMl?7zD~&Jwl+)HiymZ|n%E)PR~Bo_{LGs9 z+RXsei8(8z2N)Y}V=v@$KJmfDRC=vK~Po0fsa1H)+Gt!!+@UkqUYQ zfXF&HzRn`P`QbS>xEpjWTUd_X1rX94m}xwO()|cfObwV!nxFM&=ol{i+`G+lC%Z_l>wluo^dA|Rvc{}f!Kw~8I-Bt zWC+SkEi011(`%ImTOGz$Dr1zShXl+-0HJNpH#jwcBZX}wet!aMkG4(tZ2t|q3XBdC z#?a~xWNFF_aQ)jpR$$hkR1YB@8VQA7h@qIp@-gr}O+a4MXmE&|6zvQT4tX+aV5W9K z=eQ39sAQQLhp|dsz}PJz%!AWPspaS2o#cJP_>lF%~q6-%)uMlD6!b1Dp3P5r=y$WmHD zQ)ihM#&U}g8K=`K2wdq2rqk5oXzLb6_6jc?6ZPrT4{S;kFgZSfo*4STLgF4+o-_b6 z(-Lq{S||egKvT-FwV1hcHsCXD14t#09R&-JE=Za360x)&l0jJ@^7#~$taJ;jh~psJ zIF8Y@A7|$KoOvSzp)7zO-NVC3MjDuPKTKhYVQvciren>U(hN$JjFZkoyHAW)5y}hM z1^oq{`*j;T5$hmT^%OEqc!eJ6_mXBf2THoo%2k3?!)=T!vv5zjk6n+=*qt;*W-x#o zx8Kn$bAoM1S&-2>B${^jIFxh^?C3tWsdn1uzXt~XDifROtvP=|Iy$@=dI=;ZEQ*7K zS0|*s?1LkwWu!{~4h%p>c*lu_S7?V0S*g)6starAK!>1oDZ^QH5NH+XFA(Xfn+Ay= zbal~VxYIoZ-s%iES71)yGzuU{6EpzbfzFgzuQxF3j4*&cur@-VF)j{)F_oEuvyz!F zrvayw`B~CILdKyV=|12T+cC+&Y z3G56^P^PP}w+eB#+5mJArb1CN&@=-1rnHuZW?YlrN4JT_w!}o(%J7;n4SlV44i+5` z(F{y~Qac2V{`*0kQ2sq*h=sMW;|74PO8{irhRjml&nVq};{>b)-{Sl(jLlRkC0Y;M z)LkOZ5ROA}mWF(YgDQQRt4xZeL)0{Pfzjavm=|WCsb~i^AXdW^W+r}!$!L}waTJ=0 zWF?2Ez&pY)2qp4LMd@4>8QU3_A@8g8Ahj}eO6|tXYv-75;6{XVgv2}nW~R)FxQe63 z!)+(_*hA24W08rW+_Apd-&NighpBphnwCI+fw!xn8G9C(cPBQIUmu~VZNR|_3-v`_ z%1H?*7FhzUG7V%2w?OUEOpR|0C?dQns4!P^Si&k~7W^+V1xT324kspT3oQMTm`yD- zK?cee2r$vBR`)<(X&zHg(;&;(KITA`EC+OFE1s@kZPJV$0u8I-4#bi&C2MSj_}3k< znJ@}`3afx$Abny0Vx)Cm7<7}4L8D<78grx%u5RI4 z?J-pD8cXY?W^|C2ARUxNDkA+gQxm90n18Xn2$0oSxkxv`8)6;gKo|n_bO9ifF2G3! zfN^mV_$_s}9Hx=9P@#oQm`z#wG0DVW;PN8w?jJ*|(^244?8G88OV_#DgK>8uK0+(t z6q>*XWB{TwvM58UBN!-2G|LxEmLN>HqxH~KzIx9Mq&>H=K}~WVfv`@5|Ce302(30V=qz_eGjTkgPjgg=>?AtRMpch zi21kx23VsjN?_$SWqvzdz#OafSh;SanX>R3hC}L1quY*t2QrWWpn@>hi>%{?+7Vue z^jj*^8&t+&JJMk)Ndw?%k%!Po!5Ky&++!1vDNKP3VhMZp1O$<8W0A55)UTa^(S@5p zc$om9>DbwS@*lnU@Bf<_0|o?Ml*#lw_KFu)75W5(9qeo9$0Rk=&S>~lW?c)n0UcR` zTAt`E7Is0Fp$>4+9eft1fYCAl$ro3^q@|O`M2rxb(>mM)x`c70_j-VHYP<_&UzWf% zV+kk_Rsr-liYZDz*oBSK$Np5AuR7esxS!?ZXRY2tYMAn-3`3&pKGA6lj8JEQZ#oKUmVJOB zQomDrnT7^HSI1ynNUZRj&d_hQ9>?u_oETFyM0(BY8K^EyfTexcfaS38VPZ779>9s| zJ}R4j4@}TLz(9I1mG8(A=}Rcf*j=Tgt?mO8s!Rc;bO8=o#)>7ZKr@$EqHr9?#N+_z zF3X?{Jp?O$z&-B(E~Tv04zt+^KtXNPOw&4#`fz3WP;oTl+zGla}zRIPOx6R3l0{JaVhy7W}tO|{Ucy#m6?Xa z9O%5f$ggJTagVuFna(uvo?qx37&1cw{azyrl2;p`mkKNTCbF7p3Hx8I2dcyp4#rBM z66lv4rU}v8xc~YcIuPE$R(Qrs^hb)!&t1)eTc%)Sk$GrLVFXu0Pk_QdKq4shWj%(Q z?jy+9$lI|VpZUGU}Eo@|1C6nxjOb8v|fuSFwM(4p_WNhAPAhHOj zGC_6dDQEdkT7VW|5+JXgQyt2@;IqpM zX6i|+fUgXI-GyZ=UN%5HiQ048;em$XO^J40U4RIw5Mg6G;HykZS*^!t$~-Bxb27&k zVksTFLMI5gT95Jp4*`alCEck5Khg;xx2{1n=_b%2-o*wfJ+Z?KHYl6`jqiczYKH)b zoIol_6TXV$m~EsLI<`VUD_xhuJhCG_0&ffRm{(j}b-W0?&x>@SjLd(av_Q!$NJUtI zNO^H8TTl$b2H;p;go;_A9s=7;Or~99lu3;h#AT8km|l;{YZF}s zAJV%-krAr2Es*?)j;~=1ddz*yymkn=oQCf5q}u}u4vj#J&`4UGC#5ukszQnUtr>HW zF7)tx3WkJ-cr`VF+T$*GT4EN;cn+#kI>MKEijVZk5c+D#z=&g(u-bc?#KitE1ULxq zX)9UsR$j-Hxv#nej15^%kWqA&hNy4nfm0}vcYo6-rc2~QSpe@iK&nisA5I|gpHMJP z1ERkJ8izjcp0I*5%{_o4W+Ewb8rX%@i>$OPM-;au?7!^5wo}O*#K)n^hC9IcXYF9X z_u=sN0MbJGiOwtsu07zHk;MoSZ$APlYO@kQ(Ug!rV zMF`XUj48487VxK9kAw2};4z7A!qpP^MxaECBV=84%`RtUW#|MGI{>inu=$U0UXcOJ zFit`R`wkcHPjI?QRFA?2R0!?BhAQ(~YdZj=%*PbyW|!zg98MM;m1PYqLHa?_G7Z{I z6VP*%cDpnP!4yU@okUji`yEc_0Hak)uona}%vjp>=~M{u${66FcOfj4t{0JsOu{fy zSUrIp4XXfoVD(rTz(xws$7v8`D1&@~Q>ZW>t+WGOBZYlh1{Db%R8k#gE~n5V{J)20DYzs*j*)aa+t!2u$EO?%CbhS9Qam*Y4W>O z&|hHrIPHLvApI~1eh-V$Uy~Qqceq&p0o+R3z-yFNx5!ZD*oieJTD4^mTrw^IZ)5;k zX0;Ccr}QdHBj`-FLH4l$nU#J=;P-&F1-iFOA4nolln7H?RqMh2YxDpO^AIp{fH`Uh zl<`1DnwqkiLl`|D#wyE<9u2JG8YeMCY68sDAmn#kgmoZHLx4+mTQx&c>&C8Ei8tvG zXZP=cnR18IrDu3-0v)6Q;9#61n;C>jIrN|@+iVVCe{>5o*GXWT)^o3DIf zXa*mO=McL0Aq|8{H9{kp^#hL82u7DiA)B-n@c0(HuJxq8dGfh->||vKWb_Bd)UH(*zpWDj)A z*KH&U`rVxK4cP=*$QY2NM&%_mKwt>X_|grY7^$NRi|6QF(##SM)=im}yEg z>D13fc0%s{j%QLPCDN?o(H_{8+`nKFFuzh+C zb64#EI;JvxqP9{wS>bHXpd@MwPM76Vpyu-t%;EQ}j2+MDc|+=Beu(F+`5k+J#oA0> z4{q~x7W5sv7MuDY3LE~BQWS8GEZ~)6bvscg9^eVSV7oqab&Bp7v{q;MZ5PMU9}GV3TwB(G1g+xEU@(I|H-o?0;x&OQB9tUQHlorrj zVn*Rg7H-!JW)N8KK6YR$16@!9J*AW(H03j(_Wf>5Yz#~+FdFsgEQmI-lvr#97{VP& zZQf_;7{wAF{mZ3^guqV7GJ<`d?(S?#eLKH>G>AZeXz3hr#ZH*Cgh8)>%Nie1P-{ zzlE%+FBA&=oVG(IJnO)`$_E%XZuB_yN9KJ9C)gvUnIeoK`2Ha{V`7#N zJpxW^D*|5r28xfY0FT1}FgEZcP-ae{FbsH=7dhAtpin-91{p>Huu22BoTtrlmKu#| z*@gF=f#ni(6PoMuMxb(-g3J!Iu}KTmugWk4L>zFMV2XJ@g+pqUPV4dPw)c=GF>X8r%+sw6 z=hD9eV+qrcU($-LjC*tb>ffRD186HzpewVcY8#NTvIdEv4UlB1320Ys0Qu@H`W5Ci z`@;~F;D-?H71l)1Y~j05L4`sqWK-D2#Dt05;uay!WL7c>milY}+NLl&cbo#c$_7^X zY;Mv4b`MAsvP$Zii%y~g?SpN* zz2QW6aao5>o%+FENX<~VTM1Q_CgxlBqP#KjMaph%9Q5mwY z8?@(uB9vzSaDo}cF7SXr*W<{_Hfad3OHApd>&?D|pKMfWa!0 zP?v>XiE##7RmmJ%S>r_4!P|5Jvin&Uc+RN3@(L7cy^G`WJ*au1cgg85^kKSk3&0C& zz>CO4&|&Zn-H=#iJr0t=_CdzV8knNqg!&e@z~~-=i&X2UFde2FFEgQM?HCgBAJ5@S z{1D`m#zEI%09-`3rqS(9IMdz3A|rDM$T;@oIZf^|43Nh?Y-P37>Xh~!j{pA-Xr(P6 z`wCrC;y&+7z~IZgvK5-?WfU{4okQIYOCXbQ0-;;2Cyvs(Um1q-A)|OfPYI5GPIn8_ zNdvG*>0PjhxPe4g4}o@(Rj$G?pB|utkY%`SfEnsr=*7?Jd=;};L@M_(#ie(HG-_wS zVLgWNC{XN6X2@Y4BN+xdhY1L`e+PI7kA&I)k(Zi?mPf!~r9X*o11V&L@2-LWhIO2` z!V*E=g$;6M85C3Iwx?0_kXFI*0*g|o0jyF^!8V>U^;2MsaHIg$yBMuX+qq`m zvO`!{3>n ziubE&85^$$SX*eqbcBK8F_YI!pHXQenzf*Z=j{}sjNsBSJk>q$`#1?AuXJFKV;}4~ z4*{~ktijcKJe3KkAJ1C>=`xcerWSCj=h=Ny7DdYJ7;z8ETZI?dnq`3Iab8!zn4i*5 zFAjhl6SplM9J&YZ8MswGTJnWIeC+kCpI9%r2Ifzlx>s?-87$*m**s=`P%8Kl~| z4B+VjQ6wuGxUt}3?;j|N7}%Fq!L-5%IqL%GCuSBWM(mfDfrMclT1%V+^q;N5*X&y? z_W=edv;IYyg*#*kOHf*C!fgOSvkK4fhEflQ9-gwKGT?)ECT_` zi&Q{Ykb&$zKojcu)fiZ;Ft2gS(uEvCzle-Ct+aw0=qXNAKR|edVeHQHDeBYIRb?C8 zSav~)|nv z5#r_fH0UohgV@Ul&>5v4{84!luN^=P|DG9rwF4|bra`u;37U(}VZO?H7-7++c(CrP zh)gJ2Eg^fucO>?4+^yUr=Vwk1DG_2v*pwO_f#Mv%A6Tt7Fw8hG0v`&;PUHYJ|=}nyV0*jdl?^ePYiHH8ERbE@9p-orU9$~|(4PX|L zo~PwSu6q?CPx?XC+6t-;>tG>q0V==l(7yy7ROxP}sGK*Y!-qr$YUPiLX5{|%n^79E=1eSO;(Fd4C26xLXo|{*Ju(AD?LsI*&S><);VcH0`Ty5ZIGiUx; zWPa;1qb_u3%U_T+z(Ko9{xtD2Gh}t4XMpsH;QRd^xL37=7jzjePa zvcyTu?CC7Hi}a%3lx0rl?LmbGgbMA3Pmds7p0aAQ)B`mlb7@58v4}mq9-MQCL|U@T zhuDfZ&aKE&)YG_@vI`&9LymRO@XA@t?gpk?I|aGvT?n|q`%q<;zSsd03ca8kWqiKe z-1b!nQ|hNdY*4xbzqr2x^Qa8-D;8bTaR?=06`9Hi_9qT3`(UXA)T(5u?#96P;s($= z(MMRCApO$JU-|Q9*oP98p$bh~&$ppeR9UY=`oXXN0RWXl;9!|XJ!QmRVgfxG z0&JBwejNcol^SdsMZbx6D9`9NRpCv(^v=4v{t#kK8OK>WCO}6(U5R%$k%^O@Wey$( z+|w*eV|Ms#1fXlDfNmOxxKsvzNIyVUse{6m#=!iAUPVPxU0J z0dUwb0X7vghwB~yy3z^?e|iK!$8&JlnC0|wk03bb`1wNNg0fX2M6nn;afbt@g)h7@#(#4}6C{E~_R;CdzVfx>%jWzy`((k7DStQ!P4LJ6CY`pMK!i5|TtR1>H=S6QGewh;OQz?yIlfW#(n zOxb390yL}gGAt628J?vb>eMqj6l!+wd4x?1^jnuv2+F?#%hfH+D<0xF`~&pM+A&dO zhhN>rM*R)qn*VOe5wLs+m9kpGV$vWqFzJOTk#=Z5&zTz`?7$`RZy+?vi*UU7dkDVJ z3c(lGF@rxaqt&xJSYa9@s;$9d99yYl^Db7FAZqRb8p7C*=k!1eli(f|y0pe2JPmX? z)-G(mkQ-J@n1XhKZ7JQx0<+F2GyBCJA|u0|t0kaEwE-KU3_qwgfD@DtLA^4AX88;| z9jJFycqR7=i;=RRd{PBrtg7{y+OSuCQ$ z3Zh{ZB=UuWC9LUW_7RGd4aPx@%?e`(yB5e02jYI94^EM6)-nd)L_@VAv`0l z?-JJ_1^%Y7P&Ura+gm7TqZ^wZcKCu<>(6)%awHAPO3PRiTN)@g95l)xZL;Aoj>(Q! zC{h}Mzu$G^l5UVpx5eIkrH%G8y6TjN<8C=xm=1@zccNTbdk%RF8YFE;9A=%&RNO`_ z+w!lY(j%S5M>dpM*p`#heCD}9o>c1)N}AfK6>G=kBHa#OD1q%T1(S_}b_MTYdl} zT;Ui-V2t!tZp1VcfiZxlPE?nu>ED@jbFJZsFb@{ZjcA=bT{pkB4g87K&SJ7jq`-GR0*``tm6lu zdD!9+aTx~N3w~bEHA?Em>uGcXaJdgp;zvAML2E3*0UI0n1DmkEUse zcF1zA(p962(`EC46Hrq7>$n4OWYv%41NQT#DVXjScRUS4C*MIy$6ughR`Yweh1|4; zF%!9q?>3*fr}`09bs8^dl`;<1fMpGl%6C}aFo_TRBhE#rfk#$x6ZQe2d;l)w^Nmu1 z+zz0dH}N^%)fYCZsb}s$`@%Ki=%+cE|AD*w3h)Glc7g_=g(YM-=oDv4&H5D#^77Fd z-5B{Wh#^cp82N@W_d^{$`vj=BCioauSz=OZq*YRi9yrOiu9-IQ8+Ir6X$7O4PqNgv zP-3rpLT>W~u#*;eSQvaNIg&NxEoG1gd0V`lEQ2Zk z#PPCGOxqvWAU_LUT4lt?2q>GCaGa-jZ5o1osTZhkD;OGdf7Viijp6~M&pm4;L7XO- zhE+-62M7`sqp@uQ_05zFcFf^tV>I ze%{AMSsw}1U%~9RN_23H)ucOgBdhmUoS2p|&}hyuN#Xrh2;JyJlY=^co5s$zhVROi zhj|QboIx~OQtGc-&HicmD^n}e zV`r5_z!}ij)Xg++@ZL~q;-%qQxh^uTfh@TjPq%dnTr`xhk~GyQ4uYPc4?3C(e%&n( z%W{=MKiCtL))sZoUnqDIE>Xs$YnyZjI5`bnwtW~FC$S9eT|%P`RY??;@g?J2CVWCR zLycqUuUJHPgVX#kbn5LC_B2$(7}S(3tKCbOL@e4`KpZU}c1o#FoYF<3 zjkAWN{&T5!S309vpjmb(Ux3_k4RMJV7}(G7uXRG~^`9ItV-1uswV2FG5e~Xz@*2?- z?{HJHBG6GyXdFZE;tSg~RW+nuq^;3S zq1PuGalY=BjPlF;KFE{K`|F~(uA1M@Ps`usPDIR94&}D-Uefx9^1ChFAmt6@xuHfL zZU?1Hibb|op zIll0Ag?hG!IXnJ}Z`;u5wzvZlx7F4??LmWf4|dLU8Eb={*E_u~z<>(b^`_|oX)5uuLwNA+qc)>w~?;|aQ3WHRi^Z8@LUn5dr4HeUqAt18M0p1%C-(5RsOhQim7xDN!tB+eI>aSbVC3@R3XPU4Izf&c$Jpe3Zwa;!TEy+|W(0swq{S~|n>I4BePi-KWP+4*5o#)8W`W!Erh#1 zgT>v!0mRHtMtl1u}SdR4^XK0w|e>2pguTxnzYy-(UnFQ zqSIdE1x__?whRZ|F~o^C+WQtONGm7hd)*c$#1HT>UP9?Kg>#;+;7WUj7-QUkZS8dk z8CCof=kfG&jYWEOnT%44w#J#5ZdBU}D5W7d;TMqgq!w5~-C@0YM5ZegI<6Bcu_dyc zer1<&Rjj;&{$Y?!6aqf4F56*xqdeLC#3tPhA6u&^P@|Iew~M{Xx`t~H1I}%^VM)G> z$$FZ`fM)d{bE=MQt%2TN&91@`_!E@j7c{#j?r>K8nXNZ9wZYxN_PC8Y=WZfSaRUop zXr^zYGVOR4nx`vPlU^`lrY@oRgUlrC@s<1Ab#x*sKAx_S(p+gKqKvnAi942h(Zzf~ zdeSQE98eO|kObz1R)ojZTAh4kim1tjJ1KV>D+H?;T zmtkwGZ>oGnRzI(>iICa7@b4JSFD`Lt{4!?=Qy*-1%Xj?%)-EZjH7vfAFAg`*)YR0- zX+UWE97?-8R+cMOB&ux7Q|!2*@I@S;VE<)0F6CC+ydhf|-`F*y#i(@(U69 zKk$qSWz7Y}B(lnKy@mlt&A3Ygf?d?h=_u5E0kPsXhgDlRFU7T`shPHhth{T^ed-|4 zn8tB0+zubp3Lo7aXdNVCwuiK(e&9M(>K&gr#o06a`2wetn$Ngf$Xq-CQ#Obiw3L)l z=(;Mb0Yh0miR1P`Nrt3SPgDDCQ>m>z?8xt&#BkaWo40xFURL1CJt6VooFkN}tAnX~ ztUJc*^L22jsMJVRrk$!KW$xpu{UZpPr?42-ix^uMtjNpT<>uYlsPVCvb}zC#m#<+WLz6$;hwT0cnYm0unz({PlhYY!|YN)*xn>XX%&qR}lX>nz;R zt$orooaPf!5tR8HZ&0=T41a@?cx;8(`4_?=jZOe3bU@LTk`S5?C`uYYJKD-nb9dOv ztZR~>Zt(6NY;il7!a{Ah8=V|oyqdJWtM!G~lm#0@y+Wmtn_6SF7bvv&0-2HuL9;Sb zLmkhzd5@pqt^r7|rX4{I8QhM<=>`t_Ib=Vq;eTWad|1X^vIjuVN%TIUQjJbD(JMM1 z>cDmLg$Fd`zK6=Kx8hmp3G?NS*_yjW-rOygEKTy=w^Q8nw^O(lt`V|$x>M?+sf$cH z6HHZ{XX%oSv-G6Nd|A_mKCuM%1g{JEGVUDWw0g1PMekag}PY8(tzAQp{EjpGKm9Mx&XCL~yu zrd66V*wS~sJk(I*72yIxH*`mv)%r6kM4seo_+_$xKI16*WgeRp67?#D3L3nbZdkdW zN3hZ+@|ZU;o8cb&*U(@vo91fc6?t_}h|t?9&hAG9#1CKzldhSas-t~BN1s%20_Jd$ z{w1DQ`i`h)|MxCtU;O|%{4mI4`|LJra)(t$%p9YhxyH*H-R$-gv9??j{whG66-mj{ z*QJI22q?G{9OQ^xin+VPkqtv~sF zPIjpdAuZ%&zr1NEEUZij-&)^O?WxKvFjFfN|I&g49H^dJod`ufq z?CtzRg*NgtYeYTUTOH+t;|De$rVz}i_O)&wu7qi5mTs|b;S$lcexNHTrj_pw%C{wz z2ABpb7ahn@>;$U|U90E&$MOk5vu45+x%e^_5_Ot&oHG8}jO| zF@$~v|0nmuv?O^Ax=c@<6%ADBfNcU9t}>Kx{;;HUdyX%7+12;~>327%sBPfl{>B5m zG&HC_Fz?_-#B;X(I~wd)z^_yp!ncQOkaeHe+5yU(CZC&<_R;_nkhUr}S(E(&C3$~d znMqNX;#v26QCX4ivx9nsGT^g@7C3dmaMxf;I7FL*g2A~`4XRPONr`TGlOs(Og5{aH zQpdhx^8El`O>#lg0&4CTQ2say-O~&@mMQ^Yowb4{ZDv{e)&=ODF3^y-b4*jAysSKh zss9mta=I-JJ&1A`0D8kBb~N^5c(U4Hw{}^1=#po1yH;Tk0ORL~xaEDURIDR(ZU>bQ zCqPu*fM!k^sJ^aM&Y%oI4*DyuxL0M7sf~Z$MqAp$?4qegD6JKC{616B<8h|Mz5;@2 z3K@voSbnRFx>ZI{e8B#u1Adkt??khCq9H$OYs!sJH_$0-=0>53by2M%!Xki~hQKqg zDP*BfobTrgVO*S_J1KD~p2CW@sMFAWT2_{FYahsLDB(P6FonBgZ|UZ`d_}nI5FPsQ zh8kV^hAx=$KAM#)cj${;|NIAH=I6D_OPfY zT!V2*_a13XmRX~|+%Y)o6{oU8gs(M91G#^&8n?w8;sbHcudKjV?wUTFmcpACFtEkF zwB`ncvtvNV79nY?2Pw;SBr+-Y>s@2N!yb;jo$Qn^PETyoWVyaekZtg!qE`H_xWgE< zKX=Q~ zm}ZT(hjWmp2`q;TXx(06xu*W3_R=AWpdxLx$!V%(gAMhR-%SfV%-0b`>tM&Zk43h0 z&+Al55sz6}p{Ap)J9cdH+q3LnR=Tp=);V1^IPFe|-TkO{(ZPZn9sDjVvi7w8Qf@9z zqA+dcm4zMbv9F`vp)!hl2XriSH4w(+`#+#uxrZReE0P*272E@3WLW7Df#&FkMgNeTp>x&i}p2p==cta63RsmPhB5NgY=a zyF|)>NZ&wv^bQ+mbY1DP5ZcW^D}?yHi*gl>Zf1hRdhfGywKq2YgL^)S3@ym{ltD zbRm}ZK#z2cR%A7{j9Y9utsIHgW2k^T#%Ow7RQmzWFF{ucL4!@x6ab&^P%Wpf?qP_f zwUr!IJOEbd18zqi5f+W(`R5H*X6ratc0#!O2N|I?;uxPKavexfO9`QIh-;Xi(AT(# z;(5tzql;$+IheMG1OKKL2QB4Kwsc&3@4FJsH6X*OJGWn!v6)8!;yKD~I;43G6-i4l zFn1v9NpbYHni9HeAgcL-N+;!(r(3>|w@z>m73r1WlG~9lG>*{cS*nVEAoOEG z8g~k-z4}kbhsptV5fk+@v~ic$#*U((&c#q`SgCI=zr(fW^ zU16=l0HPZzg0aSaUs3FEkDn7d^ng|DdrLOCxyXMYr6~${Au$3N*l&xGy}Q&?RwNW4XB- zVTj5eurVC{ya*@Ss&tDT9IE~Rh2kMD$ZuGEuT-*DH+PUPsC}+em*BLn$0{k-tKuii zyjdm2s4km9tr@d!{##>&B_Gl-<}s;qC+_kyJBD(( z4$tB;Jj)NlrctbcJ!`IzBk{?)CyAPFWUsv3_ejJ>r8x_-i zf2wib`-r?Z)!QVgR=mAI>eOH9h5g&O6sE??!!Z6p8fN8wp8a?=j);4lPJKVTPA7Yn z2crn4G4I779;R1+u&jDQx3y97GH7~*-Qb!fWmDJ$OG}f$rc;@;ml%di5Y$zsyp3Q+ zZ1Sdjv2Dmv+s9ANl`P;>ki#~SjMh2djgikpp?c%^b>?2_wiR>#}P;y zXzrV`rQJ0fGW8Tnr$|O#SS(rLbocHx5iYz`7=-4=bDRb{gcU_Mgenx_Z9am5ttsf= zR%5|beFg= z?zs|uQ6jf{=66L0^7Mr_gejKjHLy9XK>pSTZSnxYXmldSrlhN+0DG&4oxOs`#RJ3* z2RJVF2!q2U=n)s-iPb?Rdw}{$-A-~%`Y|YB*yae~XMySP0`;0B}R8oycFl~jJoQEN3mej>2ccA-0jidZBjJ0J^s)nV_U6o>O{Pmzt3Av)inveIs2oN~Un+d9YvJ3ndFTOO?fGW=Lg}4s< zraL%W$aod10xvG%MVQCwx0$m{IxZS%lEof zzAS721h4END1R#|bmWdX;}(~Qy`LK@pOq?|wiSd3NBAt^qGF2?>U>V6$g4161yc~` zy304bk0Ag>2jBIgUDV()8zCx?>pf+b!zWCTyT{5?!)} zD8AQc^sxF^iAq1=@D=LZ7e2u3*gsf`zYRd?+zF*~?WyuyL@uk!tG$Ht7lsk2q+y0m z$&keZeDl*_m|w1#^|4A@*&6c>e+SgvEud}JaISqWHN8O~@&?=pO3yXT`p&B6Xs^Jd zwr+325j$$>kWnZn*DL!cs}J1!B+H5CNLov&k!_)8p#m5iWi1BH)^f{Af_ zwe*26rytUrr+;5AY3U^ov5pIn#&P(P8n=W?+=Vy- zkMcRg>V5`LoLWEI2}zfo z50wCVKi3+c`1Bi_bhjv$dq$4EQVyEWtSk25O4$qOl9c0|RlVabL84*+k+&=Hni`ic z_azNNr5D;MNE&;gtz97tS$%BsA>Yla0Bv1E+vW>9OFBR1bEwc7MqZmrd<)%`3-21H z$~^-Jz8_%p)0{xsJF;btu^p*Q`=P0Xb1bC|Sl{=}G-_5FV`QC{WSoE-VlfRc2 z*m6GKEuRoHt1)Rv!J8r{eud?@EdjBrV#6alP6}nGMe6rHvfOk8-*YcLZT>-m%2)m! z)}cr20Gs1D95y8+*dw5rP9S;S7uOW;4_8QZJOpr3kCr%sCM7u)jZU1optMiZ&{RKr zRZ=u61RPc2Wsd+#dPY-`INzvExPlT$Xjy9TNJ0+u2u zP8xLfwaP4oc|bp@nOz>i)5?mbIXNC#lZ|sV$8JgvDqig$ILlu3CtjV>!W{eyE1WCC zJ+4ih=X4TxD$PjyrLZ@+CVm8&^>av0Sk@y3u(?48va}6uCGjfH@v}lLP(lZI7|)iZ z0J<~_sul{*JlJ+P(xx*O@pjFv^zX#DiU(NOPy%>d=T~_L3zOA7$(^&ZaKguU2?(Z6 z#LUZD_&Uy5T!ZuR1Y2#g0H*rD_hCfx3rl-~NTTok+ zsIC`rg`Gw9c~6y(S2#4kA-nF0_cl5(w9Q+H8;`-*tjmY?Il@xt*cYZSAn)UqV!M8P z$B%$zP=arg|C=rV`m9%E^;m6hu=KdcnIfLuODA?ks2jW!J37`k##Ay6{JULDm)}7t zl17o|IUeSeG2VOuTEhr@3pYI1Iw=|dfi$Fl0T{(4`)R6WOIQb#(oUu4dkt^wmF7IZ zAWm){A#|Gf87KH_uG9f70+IQ#l2~8o#ZG5QSLtu2yvHct()uw`b|?-i)AmhSJ?(j* zp{+a#r&j-kBJ+MlNcPA+(>dhJYLlH0*-=oQV0cDQZ47xyo5bec=Sdm=PyX-J49ML> zzC!(7fjD7sONZqXn$IvkXj+^tY>8y^Ts(cjsyMY!vM!!rI_IVlj_t4dgbIgy zwqt`BsJt*&uFFqj9inn4(h=`!=}hoGk00-Jswk959hIeJ8^jlX!nyaVfC@_>b~p!F zqcSH=+38s`Mcl5OPc_E1-I;Pfc}CCGJi9kdy?ob(i|+k0sQycL`RO+Rv=Bb}O6qJk zwkn;X;)SyKy;6(38uy1?9f#(S(L4pG@&S5X2)Tm>1}EiKWVHZl58#Zv&sO5>yFAT_ ztzMEBdPOHyk*7tXY55KY{uA|TsOCR)Bj!oN0b0G4T2>87^((O09+15>D^)tjvCi{+ z_r13g5uHSm8*0LBo46rqmlJkY^08CdOrg+cL%FF@#i(hDh>hEaUBxgk95p7P-OaJW z4*)N|av?c^caoFkihV9RSa;MgSW`x^>6n{F z2r1-V-rJtxqdR8h?i?H_RIwOF;icVFau(Go-)V$ep$4R0oqC$ehH7fURlJn#)1(_a z=H}%G3`o%pR5ZF+ZW1<@bXNkKd*Ww)9yc*6ie^3FY-?3V4UC+-M?c&> zj*g$z5(bycmzjL9B$Jz70N;E|8t4zaq0lX|S2M=uBk&O>G2eEKEwL*UA?+hX=>)Fj zb*}DzgQ9+!?S>hYDykhtT;~l@m(R9^;Fxk$?2NN-l6RT*am&&%33ac?qubvq3Fg#= z+UaH>%*w;A%=poaE!)F$w@cWQ)cY(wR?i|#h5MZ>m@iPt>G06hWoGK(9G5oXZ#vZ- zKEx06Ku@?qD&i{k&aUud?FFVL4G?Y5-j)RMD|j0XO+pXqE+3Sl8wW5}CY-Zb4&P}E z!Amj&c}l0%88jk4SAeiay#H6=x}j9gu*!N(8O*kEU+yBEC~h!=5dOrK#y@2+_K;k6 z37MjbkK-0zY+93C9RcR)u2OeiLs5b*)uXD>Z4tpqy1z)HFf%JZGv5LI|HT@dYAXuW z7=;x8H0?p%@0}bvzaSUIJu45ltkza>xv)!>Z+d^ebQR z-1Y!4xfva-SuhZ>V(o~pIqh(p?y6ZD}H{I1IdSwYMi9njAdQ8mp0u_qydI>%;p zv&>IWs6{BKfh*?&*%T^yU;P#M=crU|u0GX0J5T^keQTZ!r zUWlN^&2Lv|X?q4ej{__xE?}MGDKyQR*qk+}`9+yTnMLxFt70!gvzwt5r)z|LF+J+)D~bB(>E|J9r511BZH;nn^kJmh{uI zCXWb|t+BME^!=nJce#%R=Ruy3Z*d*+Gvd-#%=LZrvE*OT!H)bq&ZXC-QR>2p|B6y% zRn@kJCQ6>Xf~H@fjk|z*#REFw`vCzz0B$yBBtQd_iUlAv9AHPInsvly#Kr~@OuNF^ zTcus38ldDgPUP(!*85&*w4?Of@7;)QxScO)m)4;%uc`iX#8Ywbxfj?)LQY8QMl2`O+SI@amTn zw}#oHI#SvNT4QQv&|V~j_W!PRa_{wp!{2`+c~0qD#Srugl>mcg1lkp<)Yjaf_AGGm zO-XfONedg~Y;E@<*fofh9)Y3U%SZPCfpw}t@k=}*i~?`r2}>VOFlJflarqn*_=+&O zM|8Y+fPKZ|X-ViXK30CTBf!=k@rsi=q$G`LD+W2)7K1#jc&dEF`S+FkVYQI-OUOvG zVyw0St*DOKc^t!*SHVT637>_MGz$f>y?kC@sTNVimsh{|u!gaUS0I&*@M6=ng!G}} zHO#>wuc0mBoJRyT){h^MNju|pxrVaknM$3Cvd;<)h4c#8#a^@}F38&J)NGSrwml*d z=@_Rd*EpkO6??aOs<(xb)l)>G(3}LXjyFMDN@`Tl8bl7>mn%*{okW>05XGH9?P3Ce z2nQfvR2(4Qu(@=I)yzr=Za;u*Zinv%mE6W<{GxadP3#DB?Y1y!#XTn2FRhje6Bb!y znyeHI?obB)3Y2>_g7yo%I1HhaVF*Imgmh9@0(l?oa_Z#bDpTpw8q1j~vAQWdvV6fU@!vsccMr3}2Ie4+qY7~sp^uehg+7*$ zMiK9@&pa%zVtTWHJu67}b~!2XOr7a- zoiy7&v5ZcwO5EXdi6u^r9UGbf85U5Bc*G*o9IVZYd~Ba$|9`=W@heiiJ6;y|P?30q zDBCoq%+v)j34xNjyyPL=um4>gC++1kQ0=Z+d6;AG;fyr|r7qYEmdrM%ON!VIZjc?Z zi=@ibbjddJEMVlGh2`^rLY{$otra}h-w{c7fSwmCFh0zoyFo5-R6m`llW0;xLw>B3 zc8pg_S87yg{7So{ z>o(Poz`-!IQ9d_lv~!UA5Uycc9D}cUiV(#Y5Y;Jd+bQeIt1-1#MUEYCVld@Qr@hM7 zZp(%F8Q{z>W9LF81Ahw+W?i1QG$qSzVGO(~w7d#={F0tKg**inL7Hx;v+ggm&Z6%l zTy_4AQaGi{`dMfmRhDN1kkpR)%MF{tQE32m&1!Vs?pqi9OcNRUJK|s5os&mxO_TCPWpqJ5m&!lk zFStGVqL=uLUcF?R_vm~4fL5h`4hd-mbDyN|xjIIopZlv<<{JHuF%M_Rc^txvh#IsI zt6hls*>|0iom8n%$!MW~xmRsK=mAbmt&&ZL;H0)DSywc9hefupD}I&EDM7<;Y!ZAj zjVMoQE0fPFIip*Zx}Pn2js8IP&c6{~uP}M@7U>Lw$U_)~Euqq!Yq(+&$Kb##NhY7= z65dv1fP-03TbZD#4N-|erdAE94kF}3T%mjp*tMs@oPSJ};mI#-!962R#oAM;?(qeG z+7y0FDhA3rh-URHZJ%+L`+?p54bSH*R-jOtck}7Fd~4{y&IegQ8<7NQooo5&-;MQf*W8#O{a{@+m1?SXkk z$3ywBpt`=eh^EC0WIx`+Qd_p&n!`70LqS81Axl{w9liFG2HJ;mE!}~( zDs%^mTH}c6D;B{ismAF>rs$TUTmqY3s_Iq16*TrKs+Kk0AaPb{ z@TZ<8`L=Q&HHl;Hw%CPJhcmCqLSciYSsm-0ACQvwF}&vYI!@UiaI=u&`5BlMPIzp5 z!mGA1Y`U$(V|zhn(`cn2a}0fSx@mXkD1)yw{9eK|h8nwwoA?bTugod~J}b?dRUn!V zSWmvg%l|CCY3@px#6t+DQ>EC)JG}9ves+1kl47X&RqjmUso~vDraY+1#?;(94e;i) zrjHVn)-IfHU4w${AzZEBA3$`hmfO?E5B$ho;vW|)H{~u1#aq4lqj)vQ_A4+xbikC* zi+Kq(9K)dRMkTz*%gyp_Ntu{QeYf%l3}4of;#)_>lTufAry-)n1WukGfZMSPp|yFu zY18Z|t5jvFMMc`;6qP=pdATmW?f`90X>P?x1&r86RJumo*K*YR%QPcC9deRL&lrX* zyz}xon-8*&%P3Yvj*I@iSoW)P2L9 z0R&!I^M07+`Z~J|HSA41(I%b|u5`s0@-*_Fbq~?LtMt1ZulQQE2OW5}Nd)S+6G^CBMLa(RN`H_h6 zPdwPGjw$q`{BZ*Cj4H0N4NR%sqNHgZXC`a#P+qHCn$$=b_=&~5BGXR2*9)b8`k952 zgJEN){1}z`6xICKwpm!70%`I+$e5qk#LSTweXb!-#T5>opM^($2{>!0DnF=uX6 zPoZ~N;~`oD@UpF@%EWRA60I$rz!`W=}Ob0IU*CEG5$`~rOhY2j!?M{e#SJ|1bSu4II#}^ zMN)cj+6Uxv4dH6vB8cDCx`aCY7`@M2aQ*RIlG5v*$*&xhe{E`HTF_XmI4*Onh7Puk zpxPlmkR3`1$Jtt%<_T#5isUswC!h1MtXkH$Q*LGcGG3bSKT)i!s+|p)e#b^erN8& zp9D?*#dh5#=OaId+Ie+XF@ATv)cp6 ziwYH2wn`0+!?-_jULQ3XA}Y5yijlU+6S9&c-)cmWUuhEY@BQ*!P6-x;YOsq-UR!7! zo?ql=UR{ktC;AmuQHk&Z8rTbNL0Xc5QTAotfIF?b0ioM;#n;+FGXH|R@u~z1Jkg?( z+pIHW(2S~Z3x>xI%u>`@&c+03YRr*z`;xmcR=JnG=NUZCRwoYS_b#qd|GHGtJSdbM zRNo&5FnncC1u^_nR>Z~yl}xsWy|M_LH??NJuS7{ zY_BkwCmy99H?s9${Gx#^r2r^{*>h6g*H zF#G_#3UaH1dhf?25X(l<%hZMRrY%$4ztT@GsXeIa$NW@;WDmZ<-bG3r?}q z1)SUGtA&OF6)J0LR+{>&64T_*=M7e#&nt~DYgF+6L`UGyoZ-DX+BUkNLQ_^G+#q|Q zl1p=iQkZUAP19fOOrkX^&{D!rI>kCA{VdP2hP(**^Clm^cUBV5Y6N^g0)|eleB2|d z-F!rCLWe{^Z771QX%rI0No;qNCt<1|ur7qil(=ik+p|YVojZFxNZGw_Hqo(D=BhC%+77?^j*kGeD*Cruiu3eXe$@^R==JOH$`cR2_ zO;>tZ*ExIq4-Td~MZ(-YAmE44ML*A9!#V;KDqVlhctTv?Di;w|6p$VX`F+M?awi00 zoLZ*4Q{0N;k`tKM<*u(|J(|j~4Jt#8Y9eogtj0!xI@2we?chPAiZxu3Ts_X0;_w+w z{=y2}F;2hN#d}kCePNjGgk2UBRDo-!XtJq~x+zsN^LywkK>ZwIj9>I>xq(6)FR*HrH0x<_ z*A}oesSd01I(vK51z$_eB-#?`@Ab;P^%Ua&nToJKS$UyArSC-V{2{^+W>K23Cf(m; zHDQmH#|fSrRkIcqUx;c_WF!2{bZ2j>#>xkkOWf+nnL2qgf8}KO4^9bx@>sV69m5j# zD;%L{!OPBB_5>8XvAR>;68JR{ERtpY9`_C z0h~hR(Z0S=KDE)q3c_q>`PVRq`xZ9z*G)XvsHt;t3@<63qHFO2pt3~?>=G8=lm}oB z1ZwOFY)H!KOaqwUw1cyf=j!Epvl^?lE_qm6=`Y{rD&3K`6rUpl{XgOVzY*OavlSjt z@u*4k@eCs#m40ZtFtzLBa*1gR`;!z>&gYn|wwgM$ci6-C&dRuksm&5hf5-Cvixxcp zf~jtobrrpEC-lL-FbY9~uJBCQlvX8o7s!5A-CnLE#O>-7((Ou$@DAbh&p1n;Y5(^R zfY9wBZH01>3zdNTExyovl=`hg(V#IDv6o%OMc^@RK9-D#CqR!~q2Z=Vn0$v(YA@rH zzju-V`BCRHB-{5P@BTzEcflru+{ieL|6zUD02{$$uvrd{ri##{BL6&w%b2GCp|(o4 z+oxnr{)L4(#iR<|kQS<453A^JxP!P+sX*}v!L&;CjDn7~2yxQ_qMbJJWz#jg%$5F* zI*{fMP$VmAkuMPLwrX3xZ&u={y_MUw8hZT+xAiCTOuir%ZXX5~gGhWajJOpVz~@&m zpp70l+*BG`xPWBg3il!o!0b4QDU6jt)MISTDjoDRcOZ>n`;%sD0h}xd3W++y!UNl{+&^g{$T8R(xKCb~_O`Rfx!bxEV%m@nO2k|KC zx<9JeDQbdVyp`2>RNTD=H%twHZ5=ynl_BznI7dnSHPWE8Y=%Q#nwPOz06>y#L7u1> zOx)&g9dP-3cO`)G;|3yRQ?~%WO#I#j$OPrR`#xDK&c0HNrM8A zRRM^Oy}6g=WX>BTs$l!# zud1Ky7Wc~@!I!kfX*WFqHR&0GZFOsDYZ_}?Ej!+;)lCzX zJ{a{%W4w7nX`kWyXG;6OV$j?c8*Y*FqODP5 zIBY{yze4mh=H7J?a*xTDh<$LAiKgN?V`h3E>Vp##tacs#-3-Kz^&ShNSC?bc{SDWrwHhmr@<` z22W_I5mQS8LR)K<;P4ne=Px+RulJ=Z{U^ChUpZC%8~Akh7|Ego1@+T#%zDtkR3PPPpj@?jt{w{Po}P)aj_}G|aoGbQXI7|Kb2HYg|ErqqM+u=W5FIu*X`t za?^nzsqIm^(eb1kz0VgXx)+?pvKA}AOtFeY`ym9&FJXTC3U)CJv*0j^%?WF~IVhzr zjzHg7XFYL`x5WqOXBx|AeW;SnBLqqJvgryzOiCch3oIvpz?8JcLA~!?mBwpZID|hT z{{P7?o$#cXKst*TAlA>ZHLtDtC7dPi>S&_ zD~50pLO(Dc25~at5KeYfv+by=@~Fc`d?p%d%Jj1*!Gmh~(*Uuwqd0Z%)k)sQ>a-NBZV!Uk?=zK%VXYE8pROFx>f9mx!b^&d=Tg;+d$hiI#J}+}uYL(7 z3jJVPkPjVHX&k$elUV62_;OIbEh%=IMsaG=GH{^dbW?m9bc(~_>98-oln0*y03itHxla(C55N3(y4imUy$fcK-#d1egx%j zhD$gS2Qk#~mbcjmw6}Toom8rjlv|f}LAcz*+wv$F*}kvby3AvOK2w19J6v$P=&;ib zj#G5PsjZaLoMv|pcijc6ahDhccZG-Nu34D722KmjR1I^WR(N7%u?w(^gFMW-!N^nt zv6HH)k_x2rNX7T;t>g>+{~EmG(76X#P;`U2UN)p5AP5s$@(s%fcc6Aupw%W}xYd9& z(~LJ$a<4t&_oj!ZQmt|)0F%|sHD7@H`5p|*_YkPHQ|T;FCyezo&vj}k?H>7SF@)e3 zYY4tyfX&TEggLAMI$?_&`fwuFsh?9EVr6j+g^$M~i6S31DUQ!TU)vxwVv1g-VK$#s z|CJ6{WIBQ^=}5ZufWgV?%ak|yLO#|PRLIg&Rd;)yR>r@Q`u1h00_8)k=f*}1dh^0~2cbFE;WDIM9ykzRY|XGyc!lhR_-JRV!xKu(jI(Y@Cz zd0{mo8NUPR?uw=R6-1zUcU!JEtd8^X1NtB9l7=(B5Or@DFEJ}N&|f~mG#ks7kTToW zjw?aw3ruNJ54o&y7FqG-e237#omB3)dMmXLsrjEN7@XvTCmlP|1z3Hq3F*<739dP?8uMtU9Z59*F9C3Mn}Q|?o?3s->6=M@sTxHrAxoU_I8Eb z+a1KU2Y8)^u*hi}%b9LjdR8Cn_DUt$XN$dG)4qv-LB#_KEtS>0E8+3m9E<$6MDoKASeB3c?G=IkO#bw5sEpGQuINCR{Loy< z20sGjLJt@awZo{nIW{i2QrfBAqdrM9Q&J6aOsc-i-KbBlbgx;!kIBnGe^!FXx6W$c zG>pf;(RQbnr|zJ)+?&(rN%zDniXMEaLJf}n3c~MI;@y0a=hgv4he2KuhR}{MfounD zC{|_=%J++Bc<`nJxlIV)x>;lznk+YzR?z08o)}q`EqRGGwAEU9U0{|tp222C+D``$knncZ$yr^`87nL{Ahb-HbFL+yiLQvlNfsghgu;1RTl=#gM zG`i8_rW$TGbtVkD!wFlsBVk|K-OCf=FwSUH zY|qr9z!XKY$|bToTBUK8o;2+&o#23_Cz*;Kc#;P(L3v7FP{Xi%jzH!s5T>R1FD*@S zYd=8!?_-q!WSyhOr=3!VoEm334JUN_ki;F~PZX75TZ$q0Np|7qf$8QQ^bM6sY!Ap= z+z_cN@mncH)Uhi*pfxrIR@)fhXwy)~7QrYxUM{z3Pn@gL49-;AKzP#y0G+$!%Jw2K zSr#!bqVV53s`Cbr>5p%-)w}Tv+pj?$O31Pw}o>){| zByLnhL5kFiFq!gYtd1=+4P~%ZOrR-6%mhAZ0E3Xm*hNydZCa3Z>BRrd{X{VGf|!27 zUt2r+cnSEldn*OO{gn%-{)+3pML6>l*OGr(D-F=!$x`?`8+OV8DTcrZuaqgj#4ej^ zF5Z-$gmqvj=>9mWXGBzc%s7s=$0Z;uo}oRll53}&HtWJ9m~x14B)^PapCQHvpJmF2PV&86vZnRcV@%E;Uqf>pAetIL7C`!E& zo8IOY`7_1#zoA!OF)&Wo22Pp4PQ#0wrf3!20G#gv1bp9axn)0&!|dk;vI9tU7{O-> zb8Ifie-E0A95fp{E}-bKGWdNHcOh2#)v6`Fjq)?Ap+hG3#?;O+scT|V*?5`;Y*M9D z@g^)v`-n;I!UxNJNI^@ZLf*$OOBMPHocNhD*?(hR+#~k1P-UlAiv~Y}z4l zJAssTRx4MQlsTG|Xqlw^N#(3r^{#o3C*%XfFP~vG^F1DUtA+r#?iO{CI9*A2*_OEn zw%swd(kV}+P@CCC7dvVyZLN8Wi`7&CLh~7{ZayQ-&1Y5_dclOyhi4qrA})-v&oCoL zL6eHYHGV+Q6<2r$5Mu{Vh~2p4adV<%M%+ap;u+Xz!@SLg@xyHdLfJGAvlHag)Lg(G z(8Ht}?4*ei=@{Wjw}@1_t2&Kf}fR%xp@jJas5 z^7Nao1;2N#N_cuJcLb9at2ec)?@V)c`-s*19~|^TQ-ypVyYmC^-Vbup@$-@%ITN7| zFD5A6GwcA}L9Mc*Mg>LL|F{cUL}k3i11xJ)wMD$-gc`3UGLmZ>#fF;tnb{6bVp8|s zGz~-~T`i_cO}*}*Q>ClPEIi7Jv}6Uovc|Dx^#f_GV5CK7=t)L6Z67WSE;8#msn8`i|Tq3 z`@x{7R#x!=(6mwMsfK^29$uV!5y_-P#iZ(tbdL<=VSSthVzSC{^C^}*KRuSa$j_Av zp&FX(KEPvlkE$0Vm`1;h(1sC=b{J(5K_hI!7D5}80TGl#96R7v+=eDmx0>+B(DymYt} zgk2+@u^TCh{roHrV*7^d$+#Z5gjPU$WNs5t&9YowPPAn&^K*q3&euX3Ka=6xC?*vsn z&?9QnW$c4JQM1OP+Mq<$%SIWHc!q|?ODVQ`c}8`Qwl3t&Mv!@%;-qaFIB1Iqq$z9O zwj_E-9Iun2>!}}rOX{?n)WbGSu!f|zWJ#?>lV+nN)t#nUh?;gmpQP>_=^4K=%U{SU zjmewOCBWqy+>BgFg+84uztA3rI`8Uy{jL`N-#Zi;SRJo?-Xdc52{Cc&XuLaxXkHrc zHDt;!qx}9HzJwk?Gz{tEundHbB0<+Jq0)|a2lqI9fV1J?wA^w~Pk^9=;@Hg^;s{TO z8X6QAvFuSzDx)I2ahDy(5BPlX0#GpxG&ODB)DqOB0w$l&j@Z1NaP%~_RWW7Zm@X*n zNMe2d-X%sO@(@-aEVi(dr1~_TLQ+nztDVa5kIBS4p9zy@~F#4aT5$mi9_I$&` zTC-MuZX+i$GQDQxa^EG(3yQ0PeE#aKTNFM_Iv!YGUl8rmFoP>i5nPC?yI zqB8Ac?{Z0!xQ22@b=ix@V0~1*ZM?#Fh_`@AyyegrRmfmnP{)RmR2##bn1(^yID|4? zEt>MhY!SAY2G83jSYbM?o5Jy?yNEQ5>zZo#acO}?rd6abZQu;09afh1kcV^tsgqhn zrhBw7sX{Ur=H|q23CJ9FMmW{fkiRxh*H^H{J%`qTql1e;6 z_hO|5&%NMCfhX&SZ8k3NXbM3!wJNlA)@$c{H>pfE^@CzbxSiA%AdO)Dlj0CbvmKJ= zz9prvrae5>q~^_eUY@&9H!B>~Rx{)FR$bA8{z_D6v~q=2ui!$RA@Co`cKX|J$z`XZ zPfjU@UYQ_XF1erO=o1vF4XQ8*0+TS0=7eQD&9I5_4I0}WROcOZzzC-(QS6eYtC~70 zpdZ!AATA&Vaf!D@rR2pu3`x}eMZA$H-oo0b^gZjwA2HoS+c=_VQ->u{O#>}#0YNgg zd9!uwux$g&b|ZVKK2>Q7=t?IjUpn6|-O0|@&&^oEybdOe!gOin*%NDnCpUCI=EBQP>aei`YyyYH|oni&*1i7j~ zMk}22-Ppx0qT2q&32b(p;@UGR1t+SuFehTz_VOFeU zHYoIJ1Hi5gS>)+*_xIS`?Sn+vyeUO z&N44!v9nGac^yg3+W>5qz0H+TCvpn%0~(MYiQl}*t!*ihsHIkxZN+5T8!WxOOX%;t zi|4-AI|P~&mGF73QeV4GU&^1!0QdvP!|BN5Zg8lJ4s_2eC8(*^EX@~U0ihSH2QVORgX-b;0lJ?M?bR)T!bCbI< zBUuHac?!kIN}kCJ=u_Us5as>u5_R(djLk>LLq3LlSyQL-BP#p$!WUZ8;7VIv5ZiMV z`#wp2&?jzmUb!Lo4_;eL=;I3RVxfsqg)RyGGOzXL5IXcB4M7rOpS zOAJ-pme+B_@`k>ki`BeK;3DrL4|yMa$%+(YMPl+D{>7Wxqqcf^Li-#%{8sM~q>fk4 zq}x=<{)CnP1=Mj;CU=OGxC_=>sJXSD2Xz{{Zf(lXYCf>}rdpB*wYLv_g3(c8@nK5R zS4q@Shy8E@3&RDR4;QRAXlQ)wVsCLwKwQT_#(AA%7Z9!}LlzZlhzEnE?8I9PS(FVi zjkC8Q7HPwrA50@dY#Qx0z1U9X%2(JO%SnTT?$Y>V`GutB)k(%bsXjT0W=Z!rX&)Cc z9qJVi!ekZ9CzT6i>3r@7C9`&s=K=Vv_tc7AkU3s2t=I1my0UajfyHn)%nB^PbJaf10r2X(Ugisn(2J}Z4GFH4qJS#Lh-D_54)`Z-@9XW!JY z|3akTC!zK>VWeIpY|#juS{?oMl6C z0j!OhgBrJ&O1?z-h*5b6@m{K>=)ZNa2I~XzOga2E&D(4THMCvO#x&sDG`rbeAaxq! zxoHP0n|9!4+QXSh=csH_XN>g78qy2eo0T$}r#YGD8BAba=N0)FC~Zx_x7HMkY~Nv8 z-{&aR{lAt+z49^Y)&8)AIsXQ`+%6k+`}i?V5>a&EG8VmvSD~p4z7BXh-`Td74G9h3ZxDtl{nta1Z5 zQOP@>u6%!v6WiZF(?2AaD#3P|b>~jts5@hI?wmkIp-NmokInQ8Y_)lh#cRqD2|au_ zs10sV?s`xKdr(ewSZ57EF|MGd2;l>Y9S&e)(CFoGg6|d7>nYsG*U^NDpmyM~6M%~< z$Bsi2CGVncW1_r-D1RVcu=IGZz-%X$z{VhnsZp!dq!0=(TOFdB#+RF}_3Tt%Q1XsF zf{RvZHmIV{G$6?qTqLCvB-I)w)p(^DkRz?Kp|lO&rZe`IE}=u7;;&gimNhFjAEJI) z4bJo3hLm9HiR*4MnEy^!fPZk{`)^8@{s*yehX{+)yt$^D$_35n2|WiTtAqTfFvP=x zXb|SG0bvdE6Sl#qu#N2vCwhVcWZ^HYp+8Vneotakc{lz-}>ANl61q^BdCuLYP!Yk{&q)CQUldN|4Dr zFd*+qEYA5^YlXl5e@wmClB`K?t@-Mywboi|t+lw}X*o?AtC}@DY{AH^fU4ZPie~cy z!<$%N+=WF({=d5eSb)0&3*VN7Prr_6`F`DKJ9C|GV*p;zIqUz?;s2j^TYB}Qa>f3s z>w}c&Veira@7--ueVyOlkD~KUNK}o%2CvSY`2-6)NYbW*D7iGqNCVTXoo7S2+N6b$1D}UE`V@j46bWMl_txhc7wjLJn zmBpTnJ5Dsnpb$M?C4lUZX=aBkIy;0*Ijq1f*JLcYC)bq^np;xNn=md`p9Wb&3jAwC zi&|vU7ZF%N%T_e>`1URn`o5x-{B@^|dwNfFvwsrz^q&j~bc=X#%>;1ha({9NZ6fXI)f)a;`)!D}Qfh@-lAM~~p zp_pIa>04UB_^TXt(0nZ@2Z>%OWejM>K^c2Y$aMr~q(Lf9KoS&`1jV$JAZE4d!R=VU z^cQ!G;aNtXGa62EM6*wLwIxOKzmtLg;jU$_e-WL*U-Z`>_17O&1@KpS zwx5*y|2KhfV?&G8*p!ktwlwPojkFtwI$vl&1lhQd5E@s&#kC@UPrS+}g@d0fLXJ<^ zgn#brZ!LH8RRCykgW3|`ZfIeR^Co*#MCd)zD(6nnVKDP`Om#!xuB0 zqQt#EW`~w9ha}h>lWioW$4P&_q{=JT1vt4cz{vyIQJ(UHTRHor`$TPOeQQVHUvy+G zcF(^IDwz5rN8+nz1cR9to!}pJZ~udC>;JmiGM%5aIsS)eCH^5l_}@B(|8MC;gMl`- zC7Q;rl(BInc{I4?ZJb~mr&7ko8Q*d|)3}hIYrF|3KA`9L7}a8Z&S+$x+yH*ANq;_x zq<`*_6S4t?&qgmF8`H>aOm57^1?n~{qcr*$+2KaZRPFdhAKBeGF|pJB&nKl5zC^f5 z?_^)aD7OUlzO9(Uj}%+^1;4%2ESz(ACwK^UjnShq1gUueU1W?Yj))1pI41RqnATFq z3U0*eT_oG!8q=9{ zK{Zi0?wS^+6*+cw+}DzEooP!^R07nkHnGJ$=eI4X-1lysJN!Y#L4R6q8RehUUhdEO z-M^`C?2jV5{!x?_KWaz!qn`Iy`OLo_wx0Zxe)s<>c>YgaVEorq>v#X9;Q7CF>Gn5u zZT*|x`ws!hKNL{^Lm~4&1wH?~Y(4qEak3ji$HvA%YegHIQnCg)e2q=ug#^3CmXNeT zZBPTIt_Flz4X8pIWXCtQrQMBv9=5NDq;dGHksa}aBSokUl%a9V4=B`VoJbiOr;2LiO&uJ^(QI%P12~EkHX95tDKhgNt!k;TQZIs4K{t-l9t$kfW!_#@=I-D zjVnR#l;wJl{J3+-(A#bDv zc`s=u5SMRq3)QC}u{by`Ix`ogwuNPVZOgL0!O{M0LE`?lBB=OA7WTI_MR32(v|Y}Q zwef0A2-E&WdxJl!WbV%@Q~HY#>Vpc(j|oZmV_shFgJP+V7wPn8zcli5jE}2Ykk2Q@ z3(a>utog1;#^_aMeRBLZq4PH5zS>~vFnTZAnj$3I!Nm4?*go;iiFDfNMrNlPubuM) z#bZG=aI7gDhz){|Egf0~ zRUUC6TZs$FE$$Ti2j|prkN(3lk%QWQfsOU;Zn+jcA@w=+SJ-3l-5MZ$DABD`0DhWD{qAJ}d!nNvOH z)3ODp49Ce7^L5G9w`5}upgPv?=Q0b2T_GS;33xtZ0Eb0ZX^boU2yUKYN~#_te8hq# z6zrLUuHLbs7%7glBylMd3Ys7X`n9}4V25mu;eZ%*X3)IPWHK_I4#?H z)*ng6$sK#bIiGYY%?AxVpY*Xh7@zAJWP*EnC@jk>i6vqB%qJ~LKDC*smZWyIBy}r?S5LBldX|GI z(#(rn#;@ItmI%KN_1iD_%)jomP~Rx6`UY>zcL=_}kF@#Gk!$?x)L`4^wzRORTbZ=Z z2mY>6G%gg@d~^$}K6>?oj}0yB$9sQk9-ni_HyZp7mVs6UI1KIzIT?nwyXC+Yv=sJ|Q9coYu_e zwfpvY>4}r4?^M|NhKBAOmN}g{{7_2gV4}b2xzQmnA3Zaz@kXyE9~@@}eOP0HQ}hY^ z?j+Bhk~CvVYZfpK1QjL0RZ&n(AL|-=a7Gi{pTwbHEKX!P!ImR9_zn;_o@5!p!Ep9U z@=45`)TAW+$a6~YnDnE~St&ubl~>WLGFOGQxuu~e>D75Z+7dv%X|G>BTFIgzd~Hah z>Oc>xLj{XPqxrhKZ9TUh@%?T0dX$BKb;(tI^-6AE(^}szBCfA}Y2Vk;2~g47)@x!( z$A*8@z4^b3LG1_K68wjBudypX-#F4}8zy+VHw@whOU;fyJ__s+dh(X0+_+x_+!r+&%3lc@e%2hk4i@z78@Uz@%y5**SS`8_-cGr!IQ5^IelGb&lZyewK*437fJT?uEF6b)d_u@(YSKYtIC(7ZE6}j(#OW37Qb;Ph5cOCa{Sz*P`pWt_~xU8+I$uoHs2M{S;ul~k*rTTX8m$T zHmEq+#-(G%#@Tjw*q)*y;~Lx`nlbtw8R@gejj>vBblcWgbCpOpq29yIRSh z1T*$E`8bgN#sS|l`8bt-4LV{6iOBKD6W$ovo4_Nx23q=?+_&U_G&RTML6TF_gik#e z6)@zg5F-iClZF^c4pg3M_m^-NslVVu4Vppr!b=KduO zcMoLtb|hUih;{8kajcU^?OAHLH7-4?NlCsq=P4@LYE2kj8#*4UZRt)y z0aC{@)8e$MF6Cb8N}y0T3Xkemt6tn4eS46~eIIGt29LC*nNPG4itMzVu$tM4hu z`;%_4ez{qfxN$I!|5Dj zN4E?$NH~mPtwXQ{j0w5lfF~{{*cHP^8qi<_wJ<^EU~EV%u_~h+jW&qih?0Np6r%mQ(aDB%q%E6%rmgw=UT+Jf{vYM{{YANa|EhZ0 ze`sscI8ngUIFZ;Igu@MZLmS}I=c1OOiA6U#1#G@4#BFkfY8~<&29LarXmM;zwrgaL z7~~H&%MN!*R%%PKN27VJt?#uYXj=kW+X9TXc-uBX*Pgtk9ZJLo*^hB(W^5LWa~NY! zZ*Y^?n*?udsM05V_92;@)1S!c3hiSWj^jjp7GxuU@Oi0@uS!@BiJKF%I{4vRQUc!w z`XD{>BSkJw;fSBf|M``6O5WC5_y){u{;IIW9er%c>Lp?TjB>$i3C2oOC@2jp|7@RHWB_U(mO|b+#3|kIv!_ zW|bKE|7~)4>sIG?D*pWs*;eCNmfWCp`g248`^m=R=MD|N`7F^k-{eP{Z<2=f zNF$7FT_Z=vz&RV%>}*7y$T-QfaY@6tCooD4ZKk)SX~qeQEyx^fMe(?;9q6~)f(4_a ztL+Q&>`)M5N6Zb#vl9v2Afqz~DC|^vVrMJ~-MH;WaAUxu-3oc^UM6XeLMeOKj?%m2 zPo1dD`>$KmbgBcLq!6Fh#@RWFa=LQ*yu{)QiVS^O=Ig6kU*{;s*Q6y*#)fZ8LcXV% z%3&?=1IBwO0B|y=oxDUplleLuMyKD4Kg!~qUVBdWj_A+j?6$B+2oY^VooW$>GqJ6&qO&$clHj<Qoy5^HKxyijlllyF=;hGHz6AT_@n*fR7>9r}Tnau+Bgqb!c zziOP7*_yP-ptZ9tDYb3mdvs&xp8RDx31poMr$47Y43Afk*3?Ga;A028p)#H3>EATA7;gJKomv(9ff z6(Yu_@GiLRh&@R@_9cLze|j8BJ>ygd1#vDH5a+V)fO;!P1C4teB&6-xAtXq0kFrDR znVouWazm7TGXLzC@#cW$mV*kel76B&tT80L=Wb#R0<-MF!azT-A`nF)@DN50cCMne;)hHDHzUvNk2n+R~a8^&LghQ(a1bi{$3I6B5)NTh@Eo zalL5yzBvE=Hl!H+`?%ET*DJZae^KK7Pg=YN_1%pZd4P`|IpYty=X^X%F+Rzr|D4y3 z;uD^o&lQ=?=d+OdXU-g&4;n+0{b&N#%?XHY zUNN(+Xl}MDtFR5}o^5JEMkTkA!(|6}H;}{*g_1_+Y&#PA8buCvEIc%tyc^_?#vPzN zY1QpjU}dkY14kgv9iVq=nohrB?~#@{eLb8MQ14?qH6Ro7Va>vay#&_@qq1 z=Y+n#AY1W8T=24R*2x%iNKTwKcfPH-(qWqMBOKiso}1GUztrKa-$~qlCmB1eo=z5+ zKgllql^4G;q4%*C06FXp(Q)1SHo7#opkG3CODCgGkPw4xdpN#`VJ&J9io`f4m1GXY z#JpaN0Af-)8Bt(aGB=xFr z=xbOA^Q~J;^6f#t{l<>$`|5p|O)hDdHpHNRX*|;>H`dc$?z6vUBNcL+Z5|Y6;ZM2&;`o7qN9%l3%11wMVy* zJD`(}alRdw#m2a{5-}kpj4457OiO=*8oXH6G=p7GaBviB($pYE4q7<{bY8J9PzX*q z<3vs}PK9T2CNzq(^OiK@Txt|oav5($t6YmhZA*Ibk(7OD2Nzz-IseYK?UzLsT>MN7h>tz@mf>uq$4 zs%^nw?MZ~S&)W|9!MPT-u6P*Ry}DsI5AvpkZ}|o4%&$?oudfN&<`>1b-=I49m6Dxb zDRB7hp{?L|beQ;P?1j%$+kBpDyA9rH`@AdCu75deEm7l0POfpm$4ffQt;UXW36ASWdmA6gF(X}hOhx`h73|- z8jm(wU4V@wP54H+T|_aNj5gF=&x&zqhpxcEE&4p|N9rK<5(U zX47ul!W*Hf-OH=k!?0fQxTBA4eL0?GNcJXSdA}y)LxLn9?QA{4$MAyV(g!D_+TpwM zS-k6+l^--VYf1p_b1NWvhOqh}w_kbNxi2v7buv ze#H+U`*+Ac95i)E44uwT{zT+J*HPz&%&{SVl}<(nA3J3i(a9Nkj|Lfpn$bsK03mb0 zKO0y}K(rO(ybY?$psFcQt7L8Mnf-c#vwuBkKz>QoJf?4=P*Z6-V14txgWcvs)6% z9_eI~R-d$L${}eDNf6IseN+JlZ$fexhiqzy4-JW2{m#{ zOOkuc?U0D%P|KHKRG!Fq^Hg3i;q%Emg$v1Xc@l`{gX}HwBKgR-uf#v^@;KFb*HVq@ zmIc(1Fsg<%w}MBeMzl~hDpjsg$)d;^EC^9+TuV}1dleJ|H7n4qS)NM(S|p^_(vyY` zd0VY%p=w=fRO_;fLTy&EW6!>Gxt;`lCMmBpYi$F4NeTem72q z`;9YMQ{zfH+ql)Ps&OZu_R*o=evC*DKPLIOu2AE%U()^@lDGXF=G!%i?eker_c>A%(I{Nz-5iHh2ncN2_i-!e=Ab*67J$ zbeXk%N#6Du&H<})sKCY!g`) zGB{K1R%m6n(muNv?AQ}>duHe_+C3PZ8|)R0f}zN%7xb>KmLfW7md^?5Cyb1Uu( zl8`S;?VVe3M`?XSmgO|maY~?li&yMQOPmS_Ka^M;P8&br+cOr5Dj27Ws^3c7{wS~L zPd$5NnN+|@6B}CJ*y0u2l3VP_cmvdk1A$F&vlC~# ztyPaR$s#Uz#pSGia4EG9=*$9|v>*dA?pZUgz2o(=^|0)dG31bzFGpk-IVy1{H`R%? zBqbdQMP<@qG3mXN^Te^7dM8&Oxg~JSZG};}tq>+LhNK%~?#tCB^woLD+(=)}L#=O; zOOVGBOVZ>iPbJMH3nf9?-Y{W@UJ^s)IsyLHPNtWVZpxDgwYqcz zy+NU5u+bP9qV_Hb_fBm+ouYn+oz8n?>E27w#U{r`WP471w2x>)PNINAap;r6ZHMyT zXQcMNAm8B&jA4U(Jgz69&KlWl;RlFO2)9n5+ky=7?EYgh}JCN z#)?syQ_!w3#`!nu7h*~$kwHt(fKn?UkB>Qd_n^^VkV+Vf3h{z1cC2biVoh7X*brO= z{gz{cSI{;*D2RzI!tGraYL9P^WJ5t(LQwV)r$U%Oje-lAIM<#wAgPUW=|fxybmKza zJuZs&$^~g^!lImuI#fs) zPm*+~T;d7b>*XptkaZzqLK~8_2~0T0lalb<6J+Kd5935O$$?6)TND1IJYCiLas!fA z(%+=fVBRu$ZU&Qc!la92l0lnoy)GV>^|lAZ{Uq0s6a}iaARDSBtz<390t(vKqFklukXxLh z*19BA8`8(l2kV= z6#aaP@{fX>r|uXIr!Vz#S7)?2%azX*^TxPiu*!mb9(*0_+d^9a)#$fRU+g z-F(|E4Yh8)*t(^x)+4WPz5KRMN@b8u*uYgw4{T7=v_a{N4YBbU5}q4YZ9|KC0%brp zrj@h_o=e+cn_*;hGc`J9*rIlR2A`!ZD`>K1#YDEet1)o$W2<}%Wt43_=(pQ~2-}fr z*nyzk4%jvvuq4#`83C*v3Ek{SU|>gbMMm2PgU86O#_ShYP! z5gwo77pQ~Q@H1pDEWc3&$)s3Be?-T)?UF+nI`Qj{fyxdlsX-Z{X$mg7YGZs8YgqY zp)_=e@cn@?JR;*~p{J7+=0unNCYuj#5`(Tu0p)9Su~+C88U@L6K|xgX3YCMqoS--& zh9u({B2FKcas}!7F(Kp%T0z8=bSI{6TGkTN@{BRdb7{UDOY%6ebfAAjxngZx->yk_ zf@U#6S!0lQ7F#mu*kTr23WS36j@Xu=2Td`94SA5yAA5>u0zQG*mnz2rJ1KhO22}V# zEqMTVafF0Ukme~%4Q*wd33q~v`#6&)iz^oOO70*?$_!%UKvUyU=o#e61-aeimC?Oe1O3RXb zm4xRZX|tNA-7RZL*esH;E-6Dxc$E^$t)xj+-Y|I%;q#8kbBLT2Ips4-K;3N;4d$zY z^Ca;v-(>;G!B2H)&8k}nQ$5nu>XE_}g(XF?Z9(l`{W6gP@Cx?0Vi#Vcg4d!Hp=jn< z6qXb{I*a?}qOhcv6hsyr0JSPNUTccaYK>{qTD>-;v<2I6;Vf%U23cHe*1k-y_T|Zn zy2XO6sm|G7U9jpGw=IpTE0*tC5L7pe>_*5{x4i9M(ROj3Tu)MhBAKCHwIsiwQD0s9 z_G?aOqhDm6eDBc@zOO8_#`|lBO!~L(HXiN;5rF;A`tm5-q&;2L{O>i47;*piv)F8Z zFN&}~i|h8^1T2jMO{oFzU*lGOufZMg2gJc2!)L7#f52Y&0Ta&$-ME?$3SFA4TJuFR zYO;V$(gUnp(UJ8KWD{~4i2&9kA8F*w8mw|QBKK}%(i59f1Yz{TOP5%eauUC4R6Z+1s)lqpiuM*qWBw))gh%y42P-6inEL?AtbkXtv46Z5E2!2-}s6 z?LZ)8@SNI#B0nSb+PL*L;M}-uv?B@Dju|iHM0O&avr~bEog#o!=60$e#m-J!%4Ni_ zMmnl-a%R^GT@0HwD#z`K^?i}j+Pjv-J7s*{sULW!Cgfdwq;lMQ6eT*uVcsVRdA}^# z`?U@}K$uQ_gAd9Wd`PO{Lt0;_jjY4LF(5#iPHwUkV@mibaL3%I{DZI-vQpf+~MTu$;&_>gg{s zNN4ZvZ+!d42JOAOWi3IfTtG+`JyMP6k?942N07P`eUe)Y>ew)bb$$@TavecJc8to7 zV@z|42?;i4q$M${p~swL9CL@7#k>|O<^>9|AoUD-e+9}FOR~4%ejqp_ij_XSVue?% z$_K=%AU@WZ1r_A6&Tuw`XR#ygiyc{aKxG}ftj?ZvCk~{)LFcaE;yqw%4w5?KROi7# z%1WFnUWqe-Se&nE#uo~zf^6xyL@czrj%${L>-e}4RKy)ahrlxKSu=Xf$0JI>*;Bmn zigyh%X-1ip%4N3zI=kf|ljF{$B0Kw~>)9`{) zsa$fqnlo}qIXlr3K+aC`kuBbwyhtv{-jdtZ1W0pHBFsgGz9_NelAKyD34(HkLDKRu zSA`n6s#CG#$|Olp%njjWZpaFA6IpEYgdN#Xg37tezwI$dEFg~+xFns%@|d}Ck&-af z!!smJck@M&P2vNpQ}IJVxLJfVg$)%)xz(ekE{;BGK!dCSMUFL~ zk=3BYT^xwiu#li&XQ@#+%c45IXf|GSUM|AsTG65wH0MQSVFA!ZDs54@SzH4Y#~HOF z)uti}b8^~k)fSB7kTJgi^qYXge0ZLsa5 zkkkg{{*8drhIrWUSPSNiDhnGCz8ZPsMvo$!*7DhmLPwj?^4YAG&t~8CiUkD)hG#Vn zPYri!OLBX*q@c@a=V_!O+lHK>Z89OsXl+NhVmneU+fle}_0YF#N3@%fZ=+EXz)GHfpDg4DJwPZ)6wD4IT`;6uuf2z(x|E zT`LSS@*V7k7jvC&w+bDN1R1;27R4x4FnqI-QDsleg4_su;khq}^!6OyshJJM&Q z)jp%J*k@#rKFbegk2LgINyy=GbXxcMf^gE8wUWLn0CBjveN$nZ(=^O!wcr%e`JSxX zISuv$IYU2^Gjy6*`!VA^5fu9=FFq4wI<0mc(pf*3D*L4*@0ZduzmjS=H0XZA4{jJ4 zjR2gpRlk!RJ7v}WfWmOe@6^=!3&MIq#&jj~x09CKqC>b7-4b_@%N%_gWb{eg(WlKw z3<}I+Nb3+o3jBf&1TiMv3HV`xPBwu~#f3RBgo8IofhS+1eMViclk01(``B(Oj4LCs~xaF3ZXdsYY({+f6}nVvxD5cs*gR zOpGikOi4P4B)M5h`b?fE{L0IDtpg;_c`bWO?*0-YvwV=DCk0vgtjIAbGs@RVz2X&F zyvrTryX?3+^n{|Uq`3R59tpO3wURX;!4@|LHK11%3SL8U`86afEHV*_GutA>F6eA) zM82@-)K;T{_~IU+Xva}BHmPw1-Zi1frY5DoMN8J2k$>$ z6vtpYvL1G9ebNh~IK*ITvB52kVNfo_Xn1c!f&n8t&v3g&8)6$*_-qq=oRm=*O+IW| zKE|fyG;LbzYgC%pJincn#@f8l*~o{nMc&4xk}a_gWPaNcZ(El7+p?kn*!FzTJ?X*@hy2+u#+n-LNfLu5Aehj80LuJuYEwBV#%w*)GZj?||(q;4&g! zJ6P4jAOth+U5y5Gb|Tkgrwo!_s&*qdG$;rR3Rt^i40rM|M(VviGMs0`!fwep0<wR(y-Y=+dEX#+4az3PR z-^ucFXlR`*FQ*@|Pe@CAa#2H{R5F72AxBL z4jRG1c5tvA@OejmkdKKYB?|n-VgU%d5XO4hZH|SOn{j!UI zK{ud~k0BXD&@d>5<#A#}8Wp3GP|ylBpiz!7A$ZURD=5N=X$c@W_X~<6VpfI|vkJ9i zR>F!|)?raF7+f#MGT*KWA7Yh}(K0gFH3qSGY{;?31|r;KINS0Uu_H8!9oCFI(xAb8 z?8&l%2A~1iR#37R`*OE&Af1c@`G7b?jSd;kp?HGnWPNxgs#n z75T!1Dm&K%Hn}cyO4ulqC^I(|80MzXJ~#JUFqoU#p(MmRNpHN|L9n|5^TghApP{q8 zP8t~|*H(G>suz>)omBMa2@1nuYM#pL<%M?ed5PREksIW8d4tm4wS8o~4+xe%r%B;m zk{X^*4E>oUc}B2r8bftR@&#RRb;?YO9bk3wZI2vPu{|s9l&Vj7R)m&CtZ=cYge{2AYg7{|O81JrbWyrj6VkqdP_3o}{535!s(I!%FP$t{y^Gu0T2#bT zOVWp0Rt!@sQrhAws#f{8RSB!exh{58wIxs}x}_9}*0n7ysU3d1D?}{(P;ro1hYA<# zn15rJR3}ooI$@CTRMa^#zK|a(7%7TttGYy*H`=b(jqtZ_6$;ladIs%4-68IK#U%v= zaB-Mg@Wj`H9CkgR>koqMf_=Nlj;bdaPCfB&+&9#V482}t==Cbg`lV00^98x~cNmm^ zrAO;;G>ZJKSGx1tLtANje;aA+h-xnWXMN&{T8z25w$E+9Us&b)^+P`IEAi$3%(hOr z{&?N`^rMnteiUuiPm`^`{zKwwaJbl@Aw`2`1&v#2Pvc3lZ@kF8H(uq)Ke{#j4|2ag zARqr2)W3beoAcQrRs4ij9N{;(RiEC zpfu?P-h`UH`6eG}0IYRt=teG(aaM2CZCQ`TYkdMu8#-^fDjU{P8*#jGWNs7k$Tp+U z&`7ekSq}7OHDj9_)hiZ+l(ry0Z425C8dp9>h+$N78401bA}ca>-nOF9#nzb6x^&0L z0<{f6sezQXEj%)qqilz7$qlvLRn2WzKxp(?Gx%4GJ+`sOwgbVKky2{ZTpNskc8Iu- zVB0C*(&5N1P>m~rkkRVJXm(_`ii_zmSQryHto2_KxZUfAgE^u4vu3>#t_@GtYFt0aLfh?Y_T(|x8e1IlC{|3x%k3>S}OL0)d5>) z93TMbQsanQ6}OgITRv za#Nr5O~^T+M$SvYb6!B23yM>7L1M|Jv6h18GBd`c6Q`Ezi~8*b!lJ-4Xb!&Bs8iwjo zR!#^%~rFjD&p59$B`(2craIIS~P^U19IpP-Av67DK*IX!(ks4t zv?SjlvHL!$_5H1@jgfF(cJex}+9HZo73W z6t^CpOS!wz9M$@SWi}woHrgE6s0_hIdE1yU$7uXw<1$8@Qm|=LT0WcNVbcmhZCcA` z(=5phPnhAww7sx-VVNz+M2+?dwkXrLMb?3|3tJKd+A?qBCdnXfwhcvWw#kdPWMH<< z_Cn34}sg(~;5X$mo-D1wN_C`=kWm1Q$N7D8Xms*qz!#pOs*J zR_5f~9y@vaPSLY3C=&D~VXD&^!k6VUeOZF_6~?nCzPeuiN2kbcgK&v`Lf)_%#0uLYTYqu|1CbPnWqs2trf{Z5|2?|B>5Gya6`V4Dt4 zx6?P--?hF$Vq$bj?W0SQk8VjQDCG_4=Yo=h=#|Y!uT(Auc*3AmE{3$Xh+(;zpsiC- zI}o%P4jP5WsP+~y&4gyOe1YKv1up@0M$AgE!5%i|Wn?kWD;9fOx*m&C<-h?38Q!t1 zcqf*%S&C)(<5*$F>(c7jkYf${b_ThGLF4$KEo4BH82duy*jE4-2P_F^sX_adAUz_E z6ln+S^Ks0Q9LxNJE};RTM$ls;sNaoq#C&N6fz*Vk5|-)mZR~`a46de*c`J%!%6zhWv^zO zaDHW<)+{;Y&3?TiX*!q#x~)ml=960A34}Q(sK_}1NRncobP7o>DU(aeT#$3mMTWD;AeZEPa+z;e75wL# zKrCsxo9oiETxX%kpig=yCB87ZRZ03KCY<59!;E*38k`FX5hByoVrb!3uEpO*>`l4c}%C6i8)4HGW%gkv{vn8hvAq{(NJ9-R2M z#2}M0`=o7p(ylAnBIXMtquWkWK$EY+vm~=4-{jmA-twZgSCN@tUB@k1RCkv?!cbnM zHde1DU%dk8f=j>pC9L9#rUryN#o=%bN(l-s{ldBn8o%NWr-p@51+Qj}u4w3^=X{h6 z6$i;Rt}~4qm;0&-nNv;3>(!*3Wx)wpGp~9D~h6=8>T9y^oGV8k{ zbE*{~ctOQhs|s(6`j{e>xS-9fb!LJ2*9OnsoYRE1SRIODikqF<7M>NMaP7!D)vk21 z;MFYJU)3HG+Si_;pzNpv=0+-U(eAfMK`6X*k>yyY!jd`_KGd1woq`vuE?8=+0}8^8 zy1_)Cc&j@hS3UAvqSvCQYd!C2eP0wH*9+PQ?M=a_QE!sPFPx%(QJDQjvGo`GtlxTO zIb$1bOTXGxN5cPEKT`L>KMb@^(*CTDBL6nt`tB!@peo&nIL9)WfoD})vUE2HEuhsorQLy+keLa7M-?4eBU3U|@ zs3x}tP13@fZ-N@@fx*>y|!Qw*b@N7qeakGse+?^=s9Q zI({4G6~kH*e#uP{xHw-o*q_!Ca`9`#3vvO0m$io&{GuZeH z3O=JBfI(DY%Yt%S5fIyotj$&x4%wQp)7BO3808YSA!{>k(`tN`%owJ3 z+h%okSoJ-|%f*CIC~f;fGb6+?3L}h6Zab2Z*%AMCq+Ng=3ulcJXggs-r;2FpRPN8X z@G$oHb}q+kbi}YrM0mv*AaXU>FYOwggdEUrkm4^DGpACo!xnBdi6 z|L}2!KFM$3j_q5Hn{ z*Y}ad0pA`nhC>F)xxXI?x%`;9o$!QHo^W=cXm}qeEyP&v_;2C2-@>qu3~Vdo+e#t$OsysYRcn)Zhdp2BZ%Gd0^0QEO7ZTEY%3S ze~b?5#Z;yQ)*a*l1WDF0%Zq2FQ8CZ*E%4k0dFi0=B$k={s?;b}nI_%0gX4){e-d!{ z$EF-gY${?3&Lm<>UN5%fyJK74Kgje9$aG={aqmdMW0yhhN@;`Itk`F6WO~Md3?~lI zodYRt(AX*t+rH%$N7{+Rv0}u4s4Y&I+bLtX;0YJ34#WX*C4-D>wB!c0zrjRq-lQ<3 zQwIetajyfUphtTUOb5KZ@r0H<%N=Bg6ec?~&7`$J_Q+ZibIRVS*2t2(vn1~(spHCi zEmRIj`;vYFIiPJx!qA%>Ipm0tD@P=(9MS3|lpIL|&m?&}N2NxIz2z9gAx%5SC4i(; zdU7+K+>Gb6qPCZ%Oi16W5$j8swJjIJaaXxg`gf+i2eoL*J1fN{C2v zM<$&+XwI(K>4o}!A}+mxo#lqUg&JV5*K-Fe6>j%M`)n#U*icL`HoUMfz_%YFUZ z6|bOdI&WmWd8hCz@3E|Vp?wlT=L6Fu!zJNfOzzf`28&4qoun-#6-oK3twz2nYRfmx zqPW$oZXJeHk1U|zCa4}kTXBC^y~4i2-BzD)vic`lYgYZT`5Msb6#YMo{+|V7ehtZu z*RVubBXUVKqWGakB&?$3qei5u1$jY@F|sj9sJO_eah^Lak5iKhSqu98f_}e9ttk#v zirc~>+AbQ8)}mCSmgcl(?9ppQA$F~5t5HPTwJs~H4OvUEM=#P`YXb>w^I|e*3e&57 zmimAfQ~y-7k0?@Ni$>_heL@{e5sT7{Izi=5MU7|Zz z@_KbGU@G!yiY9J#gTmbED579Xtvfl^;>Nx1S)F@fRNb@G52)M|%Jn2qRy4`3XStbr zmQyRrrixv2!NOXv=nmX~ML_?{uwMLi-qF&tZ(I?68#rx!`x~{1zddwt>Nz&V=kZ5A zPww-1XM@kXgRS$<-z~I0{o$hZ=}-FWzl(MKhrZTdf6#aTcc=B;-}K!--do@OQ^8$> z#;6Uh!Wx_tH4cPz4X(5s$8v2A$fp|+tv602^v0=7t#PjKuyHLgY><)FxY2n-<4y|F zxYNIV4C-MYWIKG2TJ^ak@A=7&`twR~+GKawe3J4vpLH(Ke3n8t>E+OTmH%zNYHrPU ztz`3EI@si}rTH!tHp-c-Lq63yHC`h_#JCExZrQkz?rkJ;8O^PXdME1@5Lv%Gt@Udg zVguT6*nol>gZskORkf63Y&d8$QYo zksDh=JCb`fnrYZET5^I|P7ycVy6gSS}cB=s1$Z9ui z-X2*8Znur29iv;ay$SveJ9bi?9r7!uaLMVR?3~efmxSd#iXyyMTJ4l2Iv&RR*I<6oLgw8x2;qC?l4pNgf!JBbgSi)l8{d_hAG7WPA;@_ zh~resJ0YLXDX4KQ*6GdYOR{5MmPR=nI$sq|`kGYP*A$66IVQf&qHbuj>X@EyNlTpa zc;C_4kM9V%oEG&?i+Vqlzi>8FPX3o4@q;7zAwNPj;Lq{nwr?3PHG>Y7t)DRC6FD_M zmAiHNIQrR23xfPiQM;cBAN*V&xH~r7WsBj_z;v$ z#F8K=VBHFOx5hFnNo9Ym$V*{{!{CM{xXcau zT*WrOrN>NwB|#c@?D1P7^Ps~<&&8&t2wxuT%pGB++UoNF|SJeRmZe{YdIo!$ac*|-ys23=+29>G2`3pi@x3FU$; zJHV2772w4iYV-zbyel?JqWJ8TRwtEENg$Bj5=-{V&1A2vCHn+HNyCrqlT*un0d$i6 zor7A`9AspJ5=#yX5p!4}YmP7m_~dhxA8_fKV?1F@E;LC}%LzHEoM4bs(x@aiC}-YU zHlK8>%~>Wt$1LbLn$XZEJ?L_Qhb>BXlCC7VBsEId$8w4BE=x;tSw1#b6wc%-Bjds- z*Ely^c-tcMuB%1_@zqLK&UA ztQl?Glj7CnMky&?O}d`u5xRaPOwA({oQvN)mP5&7#Zh@ID@-`l@&xTWl}k!EN%BlC zG|z;;c_u%U=Mz${3pt6TB};NAl~=6UHH&(qSUGPM1?8OrfxN>Y?@%Ku!tx$zLVTSM ziWKr`K!be7S_pEI7VPaozXj%B9BHFH6RQV`}93@CNN!^k(QQDu@~fp^~y@Vf&_1R*f0H>#TR?@ zZ-i9e*(LqD)1y=Kxs4@#ULU#V^Vlh$r`PyA*LEsD-jpSeuUm%|al?}o!+Y|`c7K=(VF(WO)w)rhS z%AngaLR8z6+S@)N+(%33-eBZE*#ZAXeuEtfdhJj!ZHGMUNS?q5XYB}C(Eq`>kTuxk zjW%a?%5!lxcBZ(=E)*Nv6>_^qLf6`W8YvV;F__)R{OwjYWVdp~c884b<>HMH)$Y;M zd!evBAnpe&>ruYi9+3s%tdVVHPbdMXVHmnS%aIzrtn5wEwb63fJ7h!Np%}|y2Xx|H zr_{oGght*g2=czJmfCwC&+U`iJM_3t2MO;NdV2q=o(qMrkyP1_m?k8Bej;me$ie+w4#m%f1b)#e-MJL-J7o@jEhKR26&wm# zzeh_R4)m}`8LvMH_?_O#{-Qmu(>vMUPy&jLyscsKyLOWPt~HAe=|j*xF*>!|icXnn zz)ljKQjO@+(1X0H=n*&u#v8r-+kmtr24q74w_k8)9YfOf7?HIEg_=PW1E=fbGCKzA`jRGvmI;$!PHC5v(*vzF%V}wK&PpG0Rtlc8 zOp|6wNijiEE}e4>eV##Ln@MU(F3DOF(%f9`*2o|#$z@~#V|%Vj^0_J>o2ycSTvcqJ zYl@X~jYVD4CMcmhOezp_UB-|bynfj)E zWxUD#RdP6#u&E>k*Lffxn*{WE$mEY0dXNpJ@ zL(fYE>G}UNbzWVOMMswY>v&UmPXUFu1r**DP`t>tR*%-wwA3SwMw9mzCk#-T{V+dZ z`X%>w-*P3Fo5w`#h&U%=#|4Xe$wF}f9+ybprLa1%-9gulxM#Z`n9z5jeUS4Qkiy5K zj6^&Ne*+IXsPqm_W8w)Rcw)1j*sN!%bmDC%)e+gN31y#L;iMT=Qfi$2np;wsl!L=v z70Ds#P*Qi4pJZn8la@NiG{}V0C?|vjIUy{`39V$pT%VHy(wx#d1C`Ur3M#W7=a>Iw?Fzs*w_vPRw|651q%X&hdA> z_gL7MCsN*|&qGpSpQvbD>e5qd2s)%5%Bn3FS*( zJ!lr!$oloIzP%BwBn8L`^I+bxB)690=yKFf~b~&1cQ{jb5v7^Rjv$oLc@u{eRJTrM;~8wl@VU?Jd2b z(|gvPe5Y@f6X2;v1EA@1zMsC}^YSsD50s7l8@2fPoz8iG_q}_P`|mm<{VcM|3@d+|Jv&Q@Bh{A zv?x_-(QCW4CERO4f85&ASyYQ2;Vm47*0!9i){ah4S_e|4)`5V$MVqdW=krbiD z@m%Xjg4H@==$y&5xIu0m3x->yrM9S|XdMf1TR=(+%7WIZ)VXykb#9%mbVaCjDxGLS z*WCIhaBiJ3p)$JGvY0-131-(Iwl#@NAMNYCAn+BY^jFHQU!SD7 zuTMg)S190K>5BPEHFfFLcuSw8uaIY3xbZIi8gChpfEUv5%78$$aN}Ku<+hhmc`9X0 zenx=v_zKXEIMxZKLz7wV=99c#-y(s$2$$-XVn zBb7%vPUT4+MR}IjSe|*{2OEp>T8}o%sJu2Rr>svevwkV0agk>OvgS6ZrM4lyrN6Tc z^X;fcX535~HIz0kg){;Rn-Ch=gx1$4WoB$jl4?}A7+G*e7Mx9MH*H$h)Mg}SHY=Gl z(rAt2L|YUD*&;7nlJpsrM@E*ct;o?cblxC)vsIZdTjkj`iH5;5ZeQfF**cS7XQ4JH zx*A}c5(3-eTTTmYTVAeh%UBu*4z?@nWRyDFKD)WkN^(JH5G2^ao`%C+p^=zkRJGYx zDUQ)^&PZG{vQg|r3TdY@2X@Nxo$cy}&ybySOSC1A&3&%&4;Q z9>t|j!HxGyTbxq{@7J0+%~6~?e~0Vbp@{Th{%x2;@?lAogBnhXnnMlepLmvRHUA{! z@-f*KhYa1vCBHr{%j@u<`GjEGCnb|UCz10xxuH%^Y+vBpCArPMyw(*&|19M46@Frs zXIFW{8mse#F|6|j7|)y&35T%A>4fLp-ucd{hC}l@-)95%zvzK|$)xW~k9}YMq#sDQ zoRdXA6mmH|@BEPWl7Z%51!De{Z8>IJ2r8U19EbMNaU~ocG^c%mpUSX0d1+3vs#9>| z=d8oI{A#~ske6~h{7UHQ^fzz_&HPqa?RRYKz3hc^-ss<1$tN97IEwC+*7y%}^F`Pf z5F|v85GH!HsKLQu(Ag;Z^s<25DCis#eUDvv2z-JVkW9vaa3@Ia2|99pj3Uf_l4v>NV(O5RjC`3@bUy7-pq!LDEo=G!*la z)L3At7j$SBiwtM!lU5P}*jSQ622@J1tnfFM<`QTCAL%=gZpHzd zg;x=W(xHGKJ`OjvBuA_Rj1R$mWe}_clxT6Hv(z|YZk&0>iNG<)hYi}M#5dWYIG1&b z3x;#S+-N@@++xPHJoC7g8ya*)i96Kk?ouvsy&1g6kv=om= zwi}viBX%8=GVmC0)yNNT{46LZcj!jZLcXa#RAI zWD4acX;yx^?k-<`QaqoKr{=gEft-*$zv%uWwTAH7AWtX2>${Dj!C$a>&Ol?QMtkL;RWRuva=<#nAG;?4lkp?IB80n z6!PSrv@q%Tk|a6hzCw-MM@kN4Q1XB=5E>owcTx!={mhz+=om}H9j{w>CJ!R`{i*y?;i4bzma{vuRD+5$j55Y zF{R>=w;GTUt7x}T(G05MFspi!kk*tc)nI5%a^lY`S*}-Zv|jOoU$G^H)9}(OFf6?? z&;l06evRtyGQtCxl|uea8I>g{W4r-M^D-tq zDA0rz!qNhxV3|~4P^Kh~WmY&|=I*+fU*;u)Wu9jjB=H6KFWi}zMTSEI(1PPymRRb~ zvQcG4VqVCCE30zj%9?Pdtnux-T-3q|R-t`Rp?`JRkew_W{O|@Z+YvOB9d>?~858c7 z0~yD1D0nJg*-cVS%U9t{;jXJdabHfjq<}oPa4J=(Wh$4xYdRYa5XTw-LFhHX`M<5jj={A(+uh#6IzFV_F>>lR6n`sfNR5 zlLC61y67&dO>0qYM#^Bcj<*>Zexnh!ktS#J(iWSS$QhUFwkRmJ#TWfBB_*~jp*3ii z?6VAreU_Biia^0=*JY6P*&547U8H?sH|aQUFfiFVLq}t6Q=)8iytHkJoNdb~w;e`C zdtcjQb)erh2$qZzYNI~I_Ss`ju#6Kd+h@Gw$rxlzb|^(RJTK#h+_(X=V@37Gp_82m zckEPAhMls$G#<8ZD8o5ReU6;b=fmiWZP#-44b-q3`4)D=dnqn7>Za`h;d(#=Am}ub zLhJ{#c+s=oGtiYu?~(2H9^tR!G~tOSPg-jKB-HS6p^;Mn?J#)y1WPg@k@GnTjL)$o zD7sTI;)~J?UzWY_Ww{dmS$5Mg!cOg&udx?vyn)14{~~SiFT5Axd#6pe!@27a?fRBr z+euDw&WRl2Cf^ZOJ9TDGEwoeh?DPq9RN4>vBpL_o#eu-l4<+_~sK~+Tlj}$FK%7f$ z|H@|3J znqM=VYn_bwjSQ6&HaXYVP6XvQtl2GUO2Vw)p{DnWpd214zgJA-4~*eEQ1M;H(!UE< z{JV^$Cx$a7k0~a!0Wqb`iYcK+Ov`bJX+coT z=wK#Dqm3EqP|QmDVwM@t@oOMg;KRhcq%Y|98}rO~L54LJweztkJ&q+|Ni4Cx%hG`O zEHR4DlIkGT2)YIZb#Z~e6Le7v4xQo)(_9xK2E{o6(Nw@L6;MLNHmkEOlncsmf_}k4 zgc7^Rd0;b@UC!U$xGrCD(ZIXDCxJlWk31y#tBKzbq z<$%VJvCqrp zq!-D3e69(Y5~#>^Ay;x%k=v4}+>wKkJG_A&1G&q#P@9)<(j}a9N&BBXP?VIU>*b-G zk;LCgx{c(qjAe3fo@C%AaZ8@cd?m@ac`9{Ev^!6k1)OyGO^BHEP0lm9K6x&{%X9Yl zoIO5AH1L@6LJ?KcO*t%1yhFI?iIDd~!~{!{BhciY zKdHgbN5N}8$+#y?nDd!mdzQYvuby|s^VgCrPMZRuHqAxb2u%C1aWXvA;Aoj_aIwt3 za56mCcz*f1o3s8`<+lGzBWgh{);bnGR^tNZid)_agss5G>RGx|f8rxeL~43X{G^uh zmAa}|T1mV_owjECce^%Ajmjfjy_- z?-h7M%ZPNRkkL@!0xh3}kA)JeGA=+Wv}h}2M3zZ`SDBQ!ml+w7LY_mJl_Zrp31FG$ z-(WE)w6rbcHWg@t3ymPkiZHLxgu1NB6D?~uT@fy8f~i7XRr&Iw2Pms78!R;*SlJYo zmM!UT*%AtsZB9&yJ`2fD1^&K5a9JpME=O{g%U9Oy4p)EJkfI`ED@||a&$29b3-b-Fld66C$@`}ofCT~5` zZtIcXU_Ba{^-5c;SI};~`ZXil$ojNH)~BHx{aTDz%m!rA-=A*_*X zXv6&4u=K*Xn=;%Z88;}Qbn;K#=KH5#cJtHbwjpQN=tpf^vswrA*tmx`;vhTVSuS|&P@a+69&~bHa2VNl2Kim)z^II} zM;!_n@s<6Mr|&&l9q$o9JFE-N<(2nub%ntDWu2UCBOjFP_>i#AhlFQN%P8lx$43N? z4wm>R{|1Gb(=y8G;^Fj(@^R)iu0Y?%CCW~tIVT;-=k_!=S~WWj;e1JMxO1uNbXs+e zVw^^*PN3v)`uYm1&V7@wJZTIRHaNbl)63bZc5;$_{ELD$Uzdvbx*QkZkihtcG{B)+ zaGHNOS5MBVjPI}x+#5TcrW__ur**xPvf~HB58z6s?AO?O(;EY6{-2CX5PZ0f@X260J{le;i z*DwZz8ZjWJHwI<9V~8;fYoUV6?cnY@Am@$o>8`c}o7 zEg*o5d3hBa7D%h;3#}!=IqxPwYxtVo#_Mdx+5< zdjX|V>`QrLUx7k!^Be$3P=y_oL&Y&0a3YU1D2Iv@^5Nr;S_1S#isR?%3FS*5SL{y7;b0ExxnVkMd#S3F&)AG@e=Y7a5eKmvi=LW3yN8 zO!f=q5{*qRmUD;)h#8XTAV*|oa#Zd?e$sa5n1m}a%}JBkoRD9g6Z{)0os-hzq#bNh z)|gX_VY=~AYnIc(out??$;!(aZA;F`NaUh+J{N^@3FlHS$vP$dzH({5t4_JhQqu)2 z*R)v)AAhdT>EGz$ksHFv+>naorbIb6`L`|Ei`g!f zIdi*^_nKJ$1Qe1M(0R=d-^xnot$feKqURl=d@uW!-v#kW`9Q*rl+WzuGi&zD4|6s0 zMl|z2CM0-=N%x%u?)M{&;e)c8j|V};2WNx7V0&*bX>~f>zI0x$I1L?s=JWUypZ9wC zyg%GMCHaN%nKgNyzsLuXx|Ei zlZDKkvMA*%OM;t1okXEjtE>t73sFv?w4rS1Fs(opQ=rx<k>O)uQqmg~nZ-j`d!d7-;(x%;jM9%LiS1Ka%| zK`akKh{Ewe`Ob_{kpdffq0ww13%~r3td|!7Rv~IA+!I*8cE|?gT-u;k$DmBJAXznTq5Hjz~(h$o7V=|qD+c` ziMAw-H8`G(mS6@18NRuZkZpM3#=*RiFk#!$H>2~8ZS%trm>XBBw!>akHQk8v9mof z`5*oIwMM_L9;uU4pzP##d9QTSNsaJ6VX8wp;)5Kt5N`XBTv;EMy>L3XJGtae*0_&q zQJqwA|0F^5G3l{SXi**S-bu;w31%_DAm_C6KF`Mm;ja^U`=a~@U*rum-trasgT5mF z!6}e)N_!nHQiqGwX@%vR3eX(e;%K4o2$g+DV&rI{Q#R))q?6a=`=ecX@O_09PJ?zo z5~li*Fv{_){fKFPmC!mpYaFBI#p?02E*BuqZL@lz!nAy8PrV#wEVFvzdH6La$jro{G&T<9PX@dl zaU|z7j^tOz5pN*VC}_|TXklEkZ=^j3A!%I6?+ZA*gR|-2WI1Rz8Fwt|1401PPJCyx z=!g;D5w7pI+LrHZ_ajnGL=_N%#}9t^MHrQIu}vzd5~_{tlNe>cRy}E5o6v9NfOa!E zdQRfP9MaC`u<$JLJ(Ies9M)zf-7|7T4tB##7Qc_e+yW2tmf6_u1wBv)Q} zF1X6`Z<;*4I`V>P!sMA3(!#t#l&{(DYgQ6gzr6mTpSVGcAo$B0WapMey_F?Pj=vL1 z@q_{_zcUN6auOc#eB`}P2*Hy8I-lg1x(p^ z;CPiD$x`W;xRyboX&ILPQ)mlUh(ZdqWMxt&s!U5k3l$z^PB>p+D=PDn+_I>niL%5G zvpQv2YgRr>PYQX`WmSGv`JxRdU*tKIFUwu^D!4*rT^d}d8!cr2lugNK*<_mB-jyxg z82h-7ojiA+5L7t4 zT0X%b=d`{)C#&akl0GMm-4_IzPVTy6-+W1qi&J#y^bheB8EjwC9{Z{k$H`>zHAc3^ zv+$evx&+ZTrQ;{hwLT>&|yIE zBLTD%>N_3y{3{!av2>dM_zA-~VHV%`;WMTQF_d2l%>7Dw>{p1!6(hTn#c}#}`n6nR zzhQmh=l46b<&Hguy&7-y zDtL)r0e=i^bnRvg%E5?1*|!+hQpd0sDu#K(s0JBhnnl2n65Ke&q+Gz5(DVM$I3;&W0OkdqRlguE;%eo1)Daz=}qGXjvDkg~!yZV7ag&Y!s>la$a1BQO?Q)v?7U!OY2levx#czD36n|QNL%uTU%QdMB}wRcix}PV0JjEtCrr&d zeht=>yp#WscdReTt4TRZJ|Vxb1mq6^|J$tmz7I&zetpumw5gtJWBogP+v#gG%9?5% z)Xg>y>Sh~>Y8wsG$K8#_pF4>a8zzQt*?y-Pe1` z-zzPfUtz?0rPt#t&C_0S;R-HXA#;;MX2x%gUqFsXWRe7y5UV zXI}Qq%bw+R6^f#J zR)mN~Z(t+G#;8Iut|E*?b|X*S_9XU3!%w3vw^2i4`x07%W7-bn9T}A+cF5$(Q#T@d zgX71J*}`L?ryX$Y1&?v_>m zE*La!+3Zo_iaiQ{jcie)6Q+?4?LGPl@6qI)!*=h}AiYnb?ETs|r}u@^%+7}-;6B8^ zQG)Ix(iR_?>@JCq@NeA0`>32BAC<;B3Dy2do8{w*Z+)Ui?}e<#=Oop>pq=+cp|aDX z!pXsRVr=I!-061bFv$Cg;salkb~{}0zSekPSA+Z_l=CkNj+`b~zAlsG>w-4lRNUb- z<8?Y$_%>U(!*;_7;k&FaRbRf#d(lnbWBCqb7X6TOs-wn7#b5qa0OItWaH_@~YIQ%8 zO8c3>+|T8n`i0ytzZli>L9OmIoA3)}K|4IZkvci`;ZA+H-$;iXK6R&;r4#BpH>Zxp z@fTS#e_^3sBxlhhosVA4BDltiK7o1k$y0~{zJ)_HhUEUouvRCAc^1m<7?tbl z@`T#~ZFo>Z7V9rvSr779gI?T0G#%Rl=-6Rydt-XTp42qJFmpEZJ;R=safkKda8W+5mU^y-nMFxRQT*`#T<+g@?$uzHIB!d2w0ku^;$b<%& zrSTn^d=%KkBRllOqCO$^P%tHAOG(;8_G!l1Khzy>Qg59@0=y)LG>5f(IV_pX5uPPK zBu6FdIm!d0@<8&F@F73(GP=j*n1n0GXEaC-#dAVZk`wYZlB4pRl*Z|0I=Kyq3L zlheG6p0qh5=QJ0GHHKwvK%#HS;dQRbz9l64$tiS_dXeigx5?F1Zb7Cb0THPQ+EDTdXe8) zR2Wq9?7QA@&bFMh)F@{#+bO@x_~dsP_rxnsyy7Hq%qLWY%dIzf!`^$O^Y1vl@1F$xACLiktV$3+&Sm|7 zVY}Oq`nKsl+v(9Bci=zi;9zv_8+m85ryOm~f8q1z#xr|!e1D*B@&6j=9%}txjlOlM z9dD6eP(fE!QBP7qB3`X%ziQf1)x!eNdRSq44Xsp7u4}y@T&TI~s`005O67lYUi1?t z<5vz4UMZ`4eUSdXK4=WDPxAC%e<<*HrD)*wMW9&N@q&w1dgZN^Ug1LNlOZWcN9mWi z7reDXEnOKFR2Cwh@=42AsE;dSa{bDV96%a|4T zU&^JRr(A013)JF;GL!-jW`P*Iz@A%fg_?!Vx8*^|T@bE<-%!4@S&yt4?78JhX0|-b zg(%M~>WjR0>ye5WO$LnPSL@R>txqy({St6PA&o{phKFFZk~DmJn-=cc3SRa49ix8Bj&8ILWLX-mb?jIIZuEgQIQfl!j0Pva zL7Ze~@;vNJsBByv8OhuRfr(K$Vh^nE!>q;&H>>@SvtTdG_(l3=pr`jrsvYrkYNwoQ z35V0rhh%1*OCtXyEp)DGoygBgqwoc}QcjHLOY*>cMZn}Me7maU^EJVW)3M7pG_ODVLKVf}|ul$s`;iCD4Y?j|h z&YW9(zm@mnx3U3#C!zIwMTvedm3Bz({8_T&&urEY7WIdWn*ZPpG~W)c{DOXF(JOTd z3Wb8p|G+1RL5(2>HT2*xGN_adyp2j zEo#ik90VtjK_8^xFfu4dkIxc~;8ZbIg>pfI$ynoCT4x7ocR`pF;kn2{>hA zTfR|nei)!<>Z}JA?n=~s(%Z&@+LtMx-$0fqWQDR(u(cG?NH{fLA6Sq{o5( z5ZrkM7w<;cxSlw@N%BA*O4jpmT|+-)smXFrTJt4rta-|A zp31i5DQoskXq1$9C#T{`aZ=vMH%fZsdL3{i`(d;j{{Mx-zhW5Cmy3Nggd%F=pPc(8*CYK1N zCm;AcJ=*x($TwNI0fd)TBI(wuB2Zz9eC<- zZB9)FTRovbv0j#9*UM6gdRdrM<5Jh_g6Vo!JNg6(A`+uWcetLFnZJ( zq=B}hZL!@0y@AGbwy$u^U{bIHL69BFg)({v*jIMwnC0V$+D;`#_H9r97B~ebcoX;J1m)a?cb&ds{HY@%` zD&l1N`nr(7H<-niJPzMtn%fc$-)7CWc`tQ<&b5i}3lW{h@(#DXA4=ApDto7Sv47RZ zI&Aa)jWzowcgxRs*%@ngE`#D15)G%|-Y;}~=a>B3r5p+;MsN(aU$H~i&)N%ks+{8* zzm@7aExi1mxlv{BaLDHb7e9~*sAk)Xd;^yvg3`i08TFZUpZWQT&TTQQ`a z4=xB|M80M~oDrjI8V<~|L3vwDX{m#bS21;~f18yxjaddcD|aU57$m%gv7qgaMJ;N; z{~pW2v%pP?Wo=e)WE51z1qb7?CVU9a$YWC?7n=gS*pj~+TMT`>U(?*?-?n*{#Lw6j z62zYLE%tTl7qrBRW1&$TOFV=2-9a5|z>FBD$ip|b=SV*osUboiva~f zz`Y(fyo@dkaVI>B2gdLqZHWijtbiFYAYzLro_&!Ilk{%MK7np>G?{}v-3>@e%W_a= zF^8oLNrGm6(xN6%obgxN!3Q0dspL*9L!R;8bH!PCE`y!Wgyy+0D$$m_ z5-KMyN?uDh6WYS0&?>2}%3DEO-XW=XGCp~aq!N?mqi`o_dh|wdl5GIcfb4NM)%WS$Ge~YdDQ*%FJW5?>1v$|Hd^#FZb7osx|Vy@;&7sM zt1YTN$qlN;l0QODF{Y)Xd9617!in(erELbF&g|C zE}oJ2VpCFYoBrHgDC5e*W+hz4<($DQZEz79Ts*ccs5S2CY)x8dBs|-eq|dhGt{9!~ zj4Wr{lRDYHcGC`Iee8grIARQ6Wen}BJQt&0*GSQ_6Cr_}vYTgY3l_!B*iE7egH*?^ zCE#|glO?02xp8G@_p&I)NvJ*Zh98Ov>}9{ZS>7u>cB=H8k`b z7tP{G+u~GT`zdq#CeiRSZMW0h)j6y7i%GrrQntk}S#>%QINe7a-0?f!a5txy-4TXD zzvfi=`Mqp_KM2H}oE4{3GkP`oASo@VW{N>6Sx~|hL&THA^7dj_coyWO#;Eo%H22~2RT7O--~ zmSiWkxhve3D;C>=_}JE7#E#s-*yZ2$g`RP^tGOM?%?wgQj#&~;*y2RdN{~kt zr;Lo!zPMoDE)aq%DNfw*GU|)tmY02(v5ZHSjW?$xJBdlk87XhlvmloRVhJ!M^zXSQuPxWKI=QCF=Q=;UA(_lg z>3rh2C*(ozDX)SyJTm;|SF!;~U$vxJLf$ZoI~MAWefutW%_pI9V$||kLi=_l#d+`3 zv+uCUz8^^a-w6yq=;-i4@5f(Lnp?Y9$kpyW>56juvn);r$m}dPZVd+;pC=kOi<6D- zXB+9ebA1FebNh|YCymdKd|qiZxLzIM`@P0H_Y?~Kez_as{82UT|MsQ(-QVWBpZ-&~ zk$;ztw@!srElzP-*D_44Yi(K8t3#SsZlxr=o9G1>ZouAg8A0y8kDb z^m;ETeubOA^vdll^tvg1dbW^5Trgr~M4(m1q)CMWh%zB%EmN|;Wlq{r=4ENiqL!pA z3I59$;ceOAWt#%SvMDGlR1uUD8N6~T5Gb^sFK1e)0=-1Jl3JFVwXW!v8@ZC@mbi*; zW#xy=ib2q3J({NV>SYF3HzB{kUb7$(rmyZ4Qui8yN(RTYcCiG2G;*d!C zIZMLHqF?cEcZyK`o@ZfF@CPPD<(rdftF)PiASxHpP@sT!Fv8Y823b*64+@e5AVpS>~ zYd0D?gDJdneS`U!3(a}-y7jQ5U9Jaa+^NjWJ;AScK4vMHXOmK8~I zKoU0ITqOQk)&}HfJaJNflA8Y9mf|ES<4LYYa*mPvQieRZ(jeiMOW1Am_*@Ub_L5wY z=Qrtmp2?j_8ZqU?gWhn(N?vP^lQye)BizXwLM-@>5~G$d!z8J(dB^f$-|~SOf0wo- zzSJ)+1ngn~|cw!ISnrt{r-(xAg}+Y3*K#MtfXVq%*7ccJ7C9#its~@v}<) z`>&1phPyuBxZ7B6+<)x#cU|&7$W8ylQ}??+ed~Vu4^{X5Rg_i#ssH`^diS}%OF>#+ zHSpFq?OKcE{c1?=MK!8jt7v;x(YU92k~Gxg(#)FFlbS-`*9V#FSE?>we+b}TsTz2t z3hI@++|r|cFZ7)&lo}Sec*}@Du#5<%`s@aI9FUA1d2&>IzM0%B~!$ zaw1(RXOhTrE*nzt@5+_dtXylU3nj9Jqn1KlY`GOAlt;l*d6w-rD3T31wLyg+Hl#t? zm>wup0s?#Q}dD4=!PT{zd_$6AH>|gRVL;D`OC|Qk+R|sNR0uvDX#j21tR;5m{Cn1Ofok;{$ zhjFBhjjvME_$mmBV}AHV+a2dp^Z+&Do@XEA4h4w<@%>1%c$CP+qt-Wj=p?e6qyyF2}wP-jX@ z-Fdmd9hzvQfK1Hr{nR#}ryD$<^NrKag$AL)QlnAza-$w&cbfO?UGw=sX&}E7=jR_( zYw@R9xk!iIPyf`kTE_~EThI+uP+3-^60vGrOIytdsj5|>dqvl(idJhiy`JlNK~4=7 z`%ezBf0Cy3dM`0A{aWxs;&CBHJ&G}9e}LqFkTdYO+&zYqcKioQv=$cIF&>N zzHyLg5;t0?xRZ7Vau#%Gi)Vg<>$i9j5+prYl05SqRLGK4SSMFNIgO>AkxJ*H#6IDJ z$Oo#YHHuLruql~4gDgq(RR-Armr^6o_cb}v~^ z62kIPb9;vo{ryDt;-g=p@c~iF$G&9!<3LN@Ue>qmWi3>DyJX}A07KW@99{w2U`ebuVBp0#`x-gPytK~@WbwrWM5LA9b-sqU9Ks9`Cq=LJml z(rkB8>m`MR^_O9N`$a!d(>vwofWH078Sv}9tkdhWR{izmrTendt3@qjiI+ZX;h1*5 z5Tq4YOv;wtTL@vxw&bB~E9faZjgMNA0wNabcgwj{q+IF{ryysAW0mqK<8Jg>v3?TEMH@kl&zAEu_(mLq7FB zp^?2GX}lkhPkjt%hdy=&x(oHeiQBJ1{abrX`qt@HbkON*7_y;8+lTQ+6~M$5q29zj zpQjp?1yhZro4H09I{%gDHyQ?XS0UqX2^pulCh@Pm-A{kl!nEl1-GU{*^(=v|aI`DX zr&<{QEKS5>Ls}Y^%qHIP2a{Er~GG+MByhF{67b#yZiK$ zZkaz{^xjvFPG28opi3XNZ%_hVhIwFI@L%R6S7kveQ5L0jWktVM$cZbblK4X7l|m+M zf#s?|VpX25yAvunuH{)sWpoI$5nW)|_*(bhjH-W|8`8J9^|qjuw9oRRjS?zb<=<8% z54Nf;v}1)$b}C%7GZ`j>tA%SAbjiMN_`{m4xoIMh-nIYvpp zfxPE|?^=g^;)kEKv2XBRyi*hMJ}&5er~K|?Ua;~pFAx6%0Qq2$zc>c!^hlID@Vs?; zPrG8&;rzUF-?-)+YD6IujqJv$RRDOp0pv4{bG_LHU@wmG{O5*2SZ(mEtTw=YPvu^J zITmQ|bwB+}y3~Skw?&C;>qPp|B0IlY5>8ZPk5}IXTQzP$&CN{Bvo&dGH3jfBRIEQ~ zZT{LXB!7KU7+HqpEEH6w;9!+)DMg{?q;O4NNFpn@JbNd}vVMWOao%Y|z}C1h+h%05 zZ5D{zmZxMCSlH2VS6b{yzM-A)?5PCZZlr?tEYPtRDUkPS9h|IAhvviQWukpvfa-hF zXFt;VI@IM(8ibz-lKo5z)#;}~c zn3lrFqJAxwwOO&OXf8-o4)WiEE)uaXmm&^i#)5l_xD|W{_CC@5WYu#JmvUHpoTCEY zByi3NZESwlO6H0bGC6+DFZ*4A$qmITxvSO5eFKi{aKoo`&MEHpgng$5X}jPw50#wE^b2^`um4;5F8|Uhw7$yKX?>Nhv@QfS)rgd|BEPGm3bmS*##bA1Gb-q+ zs%NQK-7gQR9+#c0CxlS-B+pJM*sDo7uGgi5Kl=ojuRr8=7U(TYpXOF5Dl8lhmKz~o zxfNy<8W5BplBe=R4p5=Yue?Y&4Kf*<0)3}723wGIvsDFfwx*vjXjtq}Ky1gtSv!`J z8ZEpW9w+aUv*eI3I2m|O2;g6|Z@#Tx^F}@;`7J>rP;dbk^ZK{oASU*-u>lEn;Ex5A zS#c{tj7J5%@uY|(d$hhu22zfJapQ6%a#CuZtGbrUP1&YAl=LOZnfV09Jqz~V*0hE1 z5G}rQo&FAC^*f22A6SbI(p-N*b=D!w@ANe^ez0-HI`tVOm}$i0Ge7u#q3Mb>GT`x|<^Y2!>xbT;)?x%k$hHPER_pIpJT+#BcnwFiY7Uj@a8!~Yf&U6JmLcO4+uUU+G z?XGKdYAB_DayIdk_8nzVzgD);RkCGm@3#9gAJq2ykZvY?SVq!EG-Jmk`;h>}&*jkj zxx!SZuF5ITbMghES3ex<64#(uEjEP#0i##IE*%#dWPI16CKWVE{r}svp!sbz+1^<2$o@L+7O-lT&-h>U86B zWTsJ!HPZ-P78>VK%MI(d(qOb$Z7^5tHj1R4`ai3bt!{w$$^Iikh_Ynj8F| z&>p=$YdD4c!h-sg9Z67urP^i$c(y13G16`9KvH4QyBPhh?T$A*=qC&s1|yXBQ3b2c zp`*`h=)Ndf@o#bqolNCek`ay-;d+qy7!-pB)PI$bj+Ft zH9vbK6+Z{G|33%hKfccC!mV&7Q_d6x+Spik=(Z_|x33aiAG+^8;2V0cZ{eDq$jNow zPUqOb8}%YvGH|gY!x?*8zJRqc*rA|(W&F?}gRaBbFa1gy1?PzTv0NJJ<GiWx2@% z9Qfs_{IR!r4f5?=OZ`rT*E{{g->KSqKiBHK-^&nvoN13go@H5o4eQx9E&bZCY_ykT z_S(=dwwJY%9rBGkQ_tOLcIM^%b>OV)d={c~J~xclc%xa+^cku*(=dW_4d-pX5mwGO z`)N0x-)rz8ABZ~X_c~SkJ(&gSw*8;-Q(Lr(Yr((My3%4)y+ZJcjjm>}WOGuaYDp4S z;p$YR$5)TS^_ps+n)W;Ox(>W*dNlup{Nm?;%t0BJ*HgxIYE($cE?oB*DJr%tU&y$C zvu7=dj|ks9+*xSc=}k0F zC8ipu6*CQypKbVMa}CS0+&D>D{*GGgHtqoTsm$5zmp}F!MJvB~?Aok9-gQ6y57prO zZK?a+KNP^W4i#y%z?asOwx${pd{m=a*a}&zxO1&wE2-eWsmQ>qDDtbGMs8Pf6Rn`bIxo-^)$;3A@!#Sjm3U1NA3>S!l*v zwj}T6QSx4hLM4p^vy>h5?F8pIDFQufN6RV`Yz zDmS&}E~1{4dsZ*VXw*ykx0?K-8jD|(7FyH2vPQydQZh@gq^pcbc&uOF8th7TCi%2` z8AKmJmbrG03&}?I5<^UB9g?=Xd80LZBO&dL9wu+klFzpnYy$PZ?-SaKcj!>x=k)AH zzkKNr*jYd5(fa{O^)EOoeyz&`Y@>T^n)I|6HpUyWJ-&r(k2kVSCL6^KQ;l~{HLTQ3!(sb8#Cvv|da=m&d+UwQa>stBRujM9 z@9M%IuDYN8Bs!SC=-mAuI+g#2>P-Ksi`0M07in={);iZlw=O5U@YLcirS(IHT@|*y z>X#E#jY=@9IR$RjoItjkmjhC9UsfSu6@7T?AqjCkk1Ebf*y;sAZ%qnTy(V|^=YX8H zp98WxKktPBug#(h$!Hmn$`lfQ$~@MH@-W-gjP00@XBxV5tn3s(Ic**MPOIY&Qc!=E zwg*?rxhUh9^cc%Sxt@6~v+{N&BlZTV@;hyT-}kf^??;;3J4LYXlyHA=#Q(9Z<@;FE zct6(lvJY6CeoZQV`UNXPo1P)z2Rfe{_G+TxZB90Hak6nAHrqHUTxfWnON{_)x%s(aKR@?$HTBFLFka z!W&)c-ahNuw>8CcZ$IRZzjKc8J}k5Nz98rRedV~j4)5#w_yI(Iz*+K(l;U4YLaKJJ z&W_vAyS8b&-DauVhw_3t0|NREg=-yB@;X!U(mOL+>dvy(p>sch&6{lah0_f?I^EFA z>4t5dCc@j8Z$yEs4V%5&h%68Ky1Mu~9c=tTcJ@!|uKd@Yht)!=T6aR?*0XG7 zMN)e;qJ^!-wM!L@hZV|GP0Nr~8?wO_!dO9!R`rjv@AehBp!!Eb#jg#S4pz$q}@7pj!4Px5ZarEQ@{qjGVbi%#Fv-26po5QAFZ zn36pS(z=3vG(iX!U}ap(T;zZtHa`h2az?{RlF#4Z(0Jo4|LwDs@{Rg|_c5*H`f|6s=ABtoE|jp}i^LYHwjoVQ6S?2@TrY0`xYg zpzS-^j84A*veU1f@2m)7JJ*8E&dUp0JKV5{6ODV}$%e-})x23~+#}C4-ZS6GcA1~% z{fmuvF7NRDN&_KQ8yPVhP3_m6-G6QDHqd3aaqF?yu-pgSA#RTFyZh(5&;G4)Uw$jn z#or4r{wN>(-}Kzybno{M?S1P|8`Czq{dW}GC>ushN^kKU6GSet&VktxuQvFh5K54mt0qmyx~bQRij(=sNB~Y zLXDbg=6XtRs3}RS7c`-IN$XJ4_oXH|yIu$V_9Wspgo;1mNBGH=^v}sS0ybgq9=T>kH>S^2B@pZ+@5{g1z{bU*!F5&z#+ z+w%A4?qC0*Gqit7F%{J)i?QFceExsW5^gRvT zFL>Zyy64}eaNf9%qCz&Pu?@0tV^dyv91Eo4cA-0lJdhCNfgHD_quLu}v2V}>zm4h~ z^aT~h#PLC9)Go$tGa9Yt>7Fc(vZRy|;cJ3Ruu!9ZvVzA*VjW?2YCz=NmjRW_I z#sTG28#OE+34ecjD% zolE^%lry!y%fh#y4r|e$yY<9RK(bqn=!l~NWvWqm<<*paxS9qw=cHZLlFkV#qLb=D znpok`)mXuLSTRdYmzSEX+nS=cdQx`3rqyjtWBZz<+Im5eK~2tHjfJllHH&&hz)-J9 z!)qvV>Msg)YAQl%+RWB>ICL+1!_R)L+0OxK*Uy13-7S3O9J#<$RQ9xHHZNqhM;Rb{ z(a?js_MmJfE)>q@njD#=B0RaFd+X8U-{2v8o01>+Mi-s8OZioAkMgVDCp5_SIfc~k z2a?|psJK2zf&DlV`2QM{^!=hT=GT|$?n3>73bEZI0dM!nlC*m+yU(`!B;aj02HMl3 zdVoy1_BP+rjipTvU#DlF`)?f>hC4KS?vPpCnGo=Hu4P|3ca8g!@kVjjL=)~BP&VC& z9%mYU*i6H3n`=aatBnYAwE<;YjY#!C*_^+X2lxAju6OnOm+q%O3a0*~NblbTVtltH=nb?j?2Alhm^&yQdzN)YaUT)Euyoc}lQ$d)qw8ZybA2jMtS<$X^`%Vs zPfoF3Vb`!b`CIlwy6#_Ow}Zr_xYChzJZf(FNo$sHW8{%^=WRec^foLdc$*>Ky3{P* z7oWNt@XoEsJJgjQeR2vvJ`q_>>fe4rzSail+Ay=UH#Ot-rgWzbX>X@TcA`TR)`1Yd z)2~JCP}k6zm67bso^)r?Atj^p(l}%p=Wt|WyiqVP+3*~u8qZ8OL$8KMH2sYOoteh_ z7aD52+)(G0hNrR8@TT?~-pGNT`S0Xxc^!Y&mFZv3yGr~Qsrdg~>wfn?#Vq%C<-z?^ zA=kg8b}fk5TTjc~^{T+!Y7o5~kxN;P$}m)8GSbyJ+Bz!>TFvrr+=WydlHH2xv+82Eee;-71ACJPaj~^1QUvQ`Yf)}CPBX6eNqj0+2Ces5i7ph;P$EqRmZg zds!}58#ey-rl#4U9ZiRDuQRJ@c4oDxotMUS(0Ietk2j8b#v3liL<5s18sAShiaw?r z$4WB|mt;m5^Isctjo(^m{MK^gJ)avc%u3^#l}4J@Y6CC#8plxkePHB%BL(H45y>4i zF!G>rNPeIw@V~K8@A>b~iot#(#l^hPGqE8=lDYxvR*AH=c^t)TaC!yu8^;a>bYu6YE+F2WU5K9loO$9 z3e?2ytybmeS0trZ8^We)1B@o=zj~5t)O}~&Y1aKxje0^jRxgg}0m8w0S*IKIvXr)_ zAxcf4T0ipMN8$O;0hyxLD@7QE+}{FR7d$GX>z^&?4YsQH8hIvmtR1prxkg5(M0*tO z`mk)E!w={?S{(1xzG1|1RZ|~wQa%s}x z&Z$FUaHm&N-RTt)bowMloj$Eur;lg3^X$N1(HYS+J7lADrexGR(7knDIH;c+X?UhX z4fLLDc#e||Xq>$y_m-*0u*@H<@y{7xACM-?Fc50xzbnc!8S?O#535&SQD-+#zT z{7tF!{}R%)zDjUf$CAR-DbLbKt92=HYH{Gwx{-*qU_@^{$rGzME2;(sMHTm7 z6}1u-+`-ipSEEzH>}ploSV5IhJt-Kio)tgVv{0-20VLe5Y-T= z*I21~N()ubV=UnDu0Jb`t3ONpYpT9#+RoJ!kN%`~{B=M<`714*UP;P$T^sLOsnRdB zE1Lqz@~AhIMzbl%I30-dK|!a_$V5AoMNSEVQ<4=FTnA<$=Jm3mOMTMI@&7aRUfYcu z+ZyhxQ#p|mDJoI{3RKQHbkNP&?Y2GE>SK3r+pFCHRR9U1NLG2AwIAU04gU{!7!p{R zHEZUYA!kNS|b!T*% zT{?Jo>1f%lv%;A~a+mb=F4r@?Q7M+*g3SEhB367;`l$ExI5@%{7xVoQfzv0-&?l3~ zle+4M#s@3-F#+i-GJ!m8rTwBgdu6Pz)fSI<#3n~4+Y}cp~4EYLUky#jj9ii zAehGrf_%Inq$`E!UZrqDTPX77jBj7g^QVmg3?$hl+!I2LKO{}*#AV0ePG)w zxCr|aH&LHeGqXB>Gq*eVu&K!= zGV_S(c~QKfiJw^(vziE-9x-Ab6=X1U547Wg*N!7Yr(|W(ONe-ii-e#PM5L!>v=v+xEOvW_wRz)b^8fUz_m|85z$RL$b3~oo05Uy=C+{ z>Wq#JAnVX@w=*U0yF-}1b0yBtDYee0<%8sO2+en257y0_k`noa$S9|GghL*LgN6N9Q2|MmdkaaN=yEP;$_eEVw!@JB4?{3MN=x$4#cJ~x|=pKl6 zy3E|@UP@1PZv-9{gQ-ie^%i1Yr=YeSx4C4 zkZJ8pq9~6=?wOC|$3^y@ffJrS-kzc-j}_rbZt|Dni@?`HNgE1uEe-e}VOWe4Mr3XU z9Op1LF`%e0ro9C^2Zafpe3;N-1-h07LaBi^tf3*A43A=l@K`X-$5xQy<-%2XxnRoA z7IKg03eVMoSzj&G#nol9|Cl9Ls2y4;WJazOo>vR+tQB08{epJdFQf+?6ux!vBKWsQ zl$P+{%W>UWy;Ekm*Q`^ z4}U9<>6^$oZb}-&hvLMTI)L~Tmw}IU$`1F497FH zO*B+{8)T)Klaw)a*_&olqR=oK#ypGO%`+N`cGh-WY;9-sf}N4QlSe6_gB zYQ2*DYcZO%Jt35|nPt=76_9P3fwoCE$Vk!1W@JETbxDq_DeBAiiCa-il|9H0%bs+a z9X8bA>L4EykomZPbzx+ZbSbnF&$}w~t)p>4I zI>X_sII0L7VO7V_NLLp9I_fuEBpBR@Xwore!#(J@-HVu^dm!@cO~{q*aTdL0ZKy}4 zeUAj*-kxN8?^5gbOVV-u4Sm=XGWNqdZa=I5lphtNdm0z}vSJRNXrW(`0p*D=`E}WI zewzDa z>s9gAa`9#VlsuXT8Dg~ugx#peOg6lL}h@Z_BT&KB0 z46Isk>#7Bps#<`##vY?uA#OKcxTUU35&pQnDY$j@!fnq|LBX#S6n3MqmuBJpy+W?r zUIFI)g3GyIxUoJF4F6XeD!PPfr1}4-2>zW)4c;lSpF9W=>+sQ0{KlB{NJUv2XTd7n>t#EORbX);!3WGp{Ae z&1<|~l2YxsK5WPJf}K!s(oXQggzREFA*5RD4?DvP)b82^AFb2tAP)zap%An#1^YNdmtidhlG$`7N1Qen(!qr{RkylfoaC2AlWC zk`MlbpPk4*2}3&LFd~!$M$ZMJVu8-vVd8l3iNHl*Ajve8b((=If-o!U3rwsF^B5A# zVy8g8O<2*vhKBYQ9)-#9f{w113!%>ONpyF)klj!zIH;9V-kB?8J?{9!3zvQ#+Qg+=|0%vifD2C~$5 z*j1s=Qru!UZ?(dfc;D_yw%S8s$5P9bFq$=N;ZYbBod+$qL0aIL6*|@ zIc2~~%2b0ESM9A)sixK}x^_p(qV-0GQJZi=wj|}2adnU#i%PN+*>u?jf4!15lsyUF z4$Vf}57BE-&-#B7Il^7*9bA!n3tUK0#?L-@_Nh2Q+GG`kxX9vt0Y+_+qB zH=%=dQ__BJTE>~15qY{Qf2|30cOo(CUWrZJ8<9qLPAt-;xpVh^YCv4wdo9+Z_Nqrn zqc?+!o>OqBM{A_snHKB47IykX&_0eC%oY;+KVLzbd)onI7)9BsKiD$lh;@ z8T_t@)-!|1pX=N_oE4}J3B#hZKu%{E*&pCHjEcCzn06e-MO^Vp=&?~RgbAXNLcDpbpaRQBsOPzYt6VL(!Sj48 zsu!LY3W=8uxx_ziPYdd~Sx}|TC%)Az#M%!EuJ}QrwC+gn{J3o$8T|Igvl3r^dOc9d zKdaN$uTr|d$r=7#j`aWP4)#yQzyG6K*3Y`jjQ2#W@xB;5rsOg{6(z;zV(|ESazH5Y zjl86obiSAol7=)Z!#%wj)(U1+@mRxfI5Q@xX}D`MGomC@mTom=34BwPUNd!hD5fqF zHgz$rq5X_m(%uY{AxuN6(`*9omSmH86m|@`f#&J$06TU_n$ZqR%G(hc|8`v9*l}^S zrHaE&NTJ%v7p+TWhNYp0txI@WdgNHrCGDaDigrb;Y#V%d6L82NwO8BvFhc4v0c-HM1Ru2aU z;2v~Xj`4_H8pm{p#hYCQk#$L0=wkPF7iF4sw}jMgQ(V}kd3%=;UOh5_dSeo8J#J)s zQ`jTqQ1r+x>#d7LdPg$GdXM^WpD8o_ac#bjxaxCB-Cq{-`jQs&bc6JiB>IxP7|)nJ z&-5lgDwOzT?afmp=gBzpkg{K?4o=9gU^g@Xr@@D}WTN?PA<8#(0RC8>yFZsG_EgIF z`-#EV0wo!N%?AqMLrEkThNMseS$cs)gfJ`-A7({o0V^=fOFjf9nFo6Hg@|kKT44}J zy(IPfPc$aA{7*DqxVkPEyy;3IyLPr1Nhn;6R|_6@m8eBDKR!_PKO+2v@78ODzZaqq z3x$l`g~E--Vxc~5xsW8bTrgXf3%4PS!mUT6@P4BZ6^RNFkL>~;ngw|F3XzSyg88#o zFn{(7d)Y6nb0Bl?hnb`6_WzMd^G+ig-YKH+PM7iTcu#E#e^PGn&uUHbK{pJ)%k=rE z+k`(9)A)~K8lRPN@LBDYzNoVQo1}ldEz*t;TmV9no#ty7tSIz1)?X0N8(gw*ch#o8hlPuL| zmL4URR{Zu<8s3tmXHOLYw`a2T?bZ2!R4jy5o4t1{)YZLvspEc9h3l3dx1XfmUWA$t;1vnRdKS&^{m(9F1VbT&Aa z&asZA!&OY@M2=49rlxma1WwNE@tlq!Im5Q{HC^E5>&?Lm`HrM#z9)9dDK*NE_!%(d z=Yk=><%I{~ET>jKf08@lO5$V3!MX_eUFf(SG0pCr3+TJemBS z0ehYinI4zX6TS2!(qx|M63@IAKc)llW1=WOCa>R@#TR~Atmjvx)clI1ypE@HR>h&lzQovV2H-RTQmmD z(HO8r6>t(&p`WM<4x=hKiE7|Es&f`my|8{A*rIyD&sr$Jvj{Ck%g|@E3=Krf1sIkK z`&ur9jn~+Fw06VZ*KT>D?+h9U%0QJmIPboFGIn_{@R zGgaQLN`aW_<3Nd-`2!xM<;)_EBBr`olb$q;TQSU7F`F_PO(ZMAJV><}CK;P2sTK1q zJ!xJ6FT+L6Yk+($bJn~D$T#xpEx9umC&Hp9?WpiziC5ccoGBWXTIQG9vg{jMmX&5J zcLVgK^J-N^)Qf#5LGBo%^Iw#XCt;@1a7jyvWne3a^xJa%wr6@#eNgUAHkzLxl zlv&@pld;rdUR!%Y*lBYg&?d6b=Ek*spm0K)p2%%V`ZDHtW)nh5wkpswQev_r!JA#m zoXrYXB)4M7>`p#S_8_s6J?YvbdzKTCG3}*8quCDi!ktBtQD;e1-B}fzb|T5MPNc7O zA|bVNBGTxb2){W6@%g0aH)k+nJ}r94NoLDwa+q()AI{0G%Na@RD0_2s6?8bZZc38C zmBmi3qFuOkVbX1gQr))D=e7m9+n0=T*V6+^b=Pt&+`ZI{BmcKMBJ_1p2VKG{U21f? zbbjw%%FF0pj}5-oy%x^8HZKXJzF6t$Ld;N*(&DPLki$gXg;# zH^wN8_oTPueF=~FP*y^GC_65u^f11aDIVXI2ck8;lirRWWJbiyI5lIEL}pAji5Zh{ zHZ-R(gc{AX_}!GHGYvz34JMMQ@Ipn9nTo`vVV0(uq!iCZiKitUOh%95R8H>EQz7O{=QPL`B4yCoyQZh^in**caH z^>znD(LBTM0^}~}Ys$c}O)-N#6pPqH(Yieq?bvJZdtKAH-2o)&VivP9!RJnvw9SN> zI?0B_O45{Lm0U?0B{y<260-Ob^8Ax0Nx77nMJW@nQWBTa2^~O65ps&9l9I!eGXE}( za64)1k?uRLxehttogrE9opEixGcBU+&96=?AmUf$Be}~KG=vM9MW#~>M3EZg^tD~inyLlQ=s=F0Kx;tqn_as5;UW?JY z!(zxTEn~amG7P(9K6Z(#bg7l@RwPKfv*MvHquaWRe1f#ZEXC}mt4QnhP^`Vxt-;rNO$nynA&<{OU+80?xGrh$pSYKS;uO zGFJk}5*X7SXuT0iVzN*YONU{dTcA;IAO|{(iXH+nt}rIj2out=f#UiwaXuifFd-2X z%2Kz1kWiprJIpHR9B@klC61w{lMjTj!aVEFOP&QZdZ^3S3pC6RD`NtZGY%_qn8Jz< zAW(D|=)oS=Wg`Zf#|1`@g$GH3@Bm64xm>T7x!|l7%5iIjfef`m;GkA)em=}abu`8$ zbW|o#5|s-Vy2TRWN+F;yt4qy)qPfCl?HrfZQMK@`TH%7W_CTmJt zexg!J-6elgu;wTAp8Q$!lYdrwk)L(d`?CThzffhROW$9$-mgj*{+E9HZ^80!{q2Lo zKpzCbZ)$(?n}RyO36|fK&itFc_8S-43hMkW41HAB&5!!+Cv{8v-<3fy=?`6%|D^=z z{}3+H)afq@M19eJzbOFrO)GzwDHiX@YKWVPMa26uAmV*V^Y}#iD?XLU6f?XuzU192 zUbvR1jj#FZJ@3+ZHpU@}83|-c@+ZxZB($NogBg-uG{aigkmY8ECH%}-b08hfn53?m zKGt8SWfT~?P8jYy3^Q#Eos~^R*fER~HFGj}3^Py6oV2j1vLQPE8rsC0d5Kz6myKe` z%`$W3gJTS4u+)1z-YC+5c_lEPcK0 zq|7!uB}c$gb7ZG=ZgyHIv1JjsC75Z;z);b#SbEahy13BRg>}nFI=cusi^7@3+_q%+ z+Eo#kZRjlQn#kU6$X2skk_47H2zHBI&``&2$pf)X@tbYF5`GV5FWOTXJoZ$P1WQXb zdnI$wUW=+N89v6-IA-8UhuOE;Wxe3VWrnXNlKTk^sN}ORnl3sm{ZwW6L+@`c)U`m zq4j2bu;Vs8?Avn!y^U+xUKKd)HKC-vrVqEdF>d2Qw{h#*FT!tDl9bCx3D58{vzlT( z88Q5f4P}vF$cUO{w~~Dsh{~Yf>|RWk-3#lTX)#V`MxwYgFPEyr_{GkG7_ze{q1z$< zszbS7XHP`i;nJr=1!RXFWStYKgU*SluX7@4)HxLwc5VQjoT_|Oq?}JlM&-;N$jOe# z$t}xkmjfcmm*lGDT$krN5@PwT$S6M$59J2}GG~;1e$HPB7Uu+^^Lt?@f0Rp=KZ{%P zx3Ux*wuWO~xudDI!|`_G5(;iy(#X*O){zP1=q2nX1%sn%!qLOrk=Ws8ga=2;pQAmU z<3`@q6!>&_@s4(qZd2#xHYKawfkd&p6s5Pb&mZRkS0-1NAqGJg-c%cR! znBo;^*c57S2Im&$MFfFj!cZ611BO6AkOs`#up&|pD-uBg(H2N54(k%d;emRAYLk#v zwMh6Zh%G9|;W$IRSFKnBQpkgvFNDhGcL?Jx6yRAXE8;F{adSiQ1I%50)4+J$o0Dh0sO92K5CVZ%F6m!9enqrp!%p9kxzQ>li>TLkmMiw z+n@UGUkY9RCA|Eltg*io!27I~KP%Apw^sg}u$}htRXJ*3Rax^@IQXhvewURHZ%Y@) z+p;s_9m&XeM}|ne%g+u9k8%fM#xce;rj3s!hT~%yIPs}uS$rWC9$(5LjxPb{TE0+x zEgckJE5H|D^X`p&xA;c7BED5LE4~BXJ0N?_hiP0Gb0-?VmDL!(u-J=WFqB!E;nF}( z8+-smPc<`ksTU^Y2pTRd%@pf0`Nq()#!%U7xSuu^epb=m45JpztWaX;u4w4kYG$R- z%$$rCLoJG#;}dhzVrEX*F>^QCTU9#Vko#n+e1eAXrY8T~;D?xb5sazpgbY2e%mN!) z0)}PjHbZ+r!zHh2h)N9c4?`xtS(h$0oANcyrW`>N@wIJGP0wPpFQ3pHaK>EZnm ztK+t_veoUZEMhw+F=#QT>=Jupc#$Qi$})AqHYB4g6lEiwn~ii>HUia=PTod3EW0HG z-IC#NvFq)Y3^KbVo6l~uE@L_@WB2TyNZB3$*|GT8o|bfOr;>8^iUT0lXK5f|??4m{ zn=LU<`wV`c!8)m9mK0D6RVO8BhNL7I5`2K9s)$cAFV8lqYn#c^%V0wZmmA53L|Z~n zv1DJEOb#S>k|U9QLhe(-n3;qbO$kwkq)_#{qO~HTZ8b#Itu?LC z+7i}VJG_7g+G3zg>p)&`>sDBAJ?M(2^-A)rJt{|~P1Z%5A*=0i0ok4qowdozYR}gP z^w6f~V4H5d?KK^DR+9Y97($kD3zRYPC!3e#%18mtAi0cw$r)^#?F!zE9J}mXdNaEe zLuNSI*{yu;jAX9tPKq_Vmwd=b!OzI{>JaqkED6<}4RJtcO97qEmYA%=9dC!;L!EOe ziO#bmSBHwm&TGksd`hgBPYd3BM!GPkFg7oX=kv0tI;U?+UKNlzV;S>hfs?NZllg{} za8AW&&OonxPXXgq{3^buW0l6HVq1V)UH&UH(dE;fQoN#sfLJ-?LtQ>=8+!ZTuh2ZYNUG$E0 zSiLjxMUTc|y;rRJ@_IleeKH06V?tk_Yvewy^ZLtjp!*HUhkip6vA-sY>T?0x-w-=_ ztRPy@Uc!n_h5vfp5yFt%%Fh8md`AMPA zb35uW^Zk^Bxo6;7Dlz+Z9zbWHI#QXPTb!U1LoTvY|e5d|`;!;rofnCTO^a|*+f4`EpN4I?sS z1I!R82nwSLLIvg;hSBH22?ZL5g|Tt|D)|t`By7SsU&AE`lsE*;+d!+zFeyV-Yl=^qv?7WTCU-lDZaYWO<1iX!^GMN#3M?ZW-fc47S;&MRtiUeR9RdB0GD zzF)X6+ArX0zp$5s6L5Q6NIgF(++Up(_I1MEx7N9<*(2Tj$DNGsk$yZ64c7m^(y-3r zM=G}9`EggHn}~NxG=8U>h<8eaey<@D@AcpJx+!_TJGe7>uQciR>IL?*ZfJg1FR))! zOaJTH;J3f(j^kgdc>kAfUVh_FN0srv>sINbdP09x3iwCek9<^O^(VdeX=d=lD7rsED_uy<)76 zn4;X6NsaNL%-@(PiSdziaC{^=8Xrr=#wT*9Vvqd6kjR!5?{$A zh_7T^#n&?0;v1=p7y~4}m9Y}v0@?l2K)1*DvT@>jS(fnw>(X;MevlmyzXIOZQpfRY zS)uW3DV+GN?5y}L@4m=Xj$dR>8Vn{wZjTw_g&_fHMs5bXFrzXm%$PoG#tMJc37K&j z=4SFqD@@ABG*eQsW=g?DGsR+bPBUaX8sa~uBCMN=3=mV1pI~NR^kLeS8j^Gjb1V(D zyrw3iHDnr^dEwU(*fVtt;X+F@7E3d2OWPH@ zDPe9IOK&4Exur;i-4g9sDhli_pzlr$NW(TE4O(|wx<^=Fy+G1Qk$p-IWI88Nk;0qlhW3_j-w#ld?(kP?4^#SA zrPLm#2O{P4LgblVD*TY5htgX`KvT-uT4O?Li+qDtLo2izdjrT?Yl;N7)}*LfyJE7| ziG)JyLb9)QrN~L^Rw2^XgP5W9BqnQuXtEG?^`nV6qjRafYFlHH4jPpT|ROGdqz0$`*_L4H(DbLnc#O`m@Y`V) zPv=D9wR0t+?OeYZ&{^k3wo&I+vZV7UqoMPp1L!>SiPw^Soi_kUMOWvIba#&7l8@dF zKAaOn&1s;Jb0e8g%jU{wq#1Iq1M-RlNREM#&x&#Kxx%APKA+R(^Qw*|CsLW$bVB)( zG)_)?svO%sXR>6@!25htA~PrJC#U8vr-CxyVHb$Od{2&UPTER-+7SAf?2%uH!1GIy za{h{KQikZ5{NqYOs-u09W7dYlPIYw4azxr4*=256WaNgmdB>C-t*{FElQM}<8_v2^;>u9U)NXv9%F9V#padEf9Ds~g1GdCedcN5Zuj$1G{DW&F! z%DX9XmZL7%O^f~Av`*7ai@4m(xFDO6fObs6bj0-?HN>tghIEt@J9>;b%8A{q2;9wz z^&I)2ZkA)I%8GXc{#{k#)m6pPt}3I<)vmOQb!i;84x-k9VM9{G(WlaFLomnZT8#Pa z?gC6+iUS<&sT?h3+$G@PWI6&h?osxRd*a+kU~?p}b+LE4%!=ubYV%#>S(o{)U9w%e zb?vRYCe_hB(ucc@_Us-@#CK1`B3*`Qch9Agy4P}Sy42Qp(Z^kqn0u7W^q4od`i+N4&3hA&aSZ zRd|%C*1Hx`dnD)e-by9)USu)#ha?gEWr@r_sRw;3ZTd_z?=Op#`}C^sFSA%fe9_0y z>N5nWPiA5to6A!c<;ffJcoM!O5#)!&S$;?&({r$%Zug$1Yko*%kdnG zczR^`VI84oT&E{h!VinzJol=8L=xPO2z{REZ+=v?<448PeoR)7C;Q2h{p81V@*XSR z6Zr5@wVwb{6QT!Crk|gXm*gj9$a;#q{FF$}Pf0lXDG{xwD~K<%<1#z0NK|+-mwi=E zg{R`vlUVPo;ucRfoUgKORa(T?ssoYf=cQ0QZ6Z9aK>UgfIL}?F=YrJJCEKro>U9yg z-;jp%8{mN^oqiLf(!$h}cI6}KW52};TrT+S4Q&X`;CDES9cYrKHNMFUP5ES={69~g zwBM5<;P<6XJ=xR#5KJC(7UzI|DY@fmKIMNsn9U4V+j*Fi$D{Y zFv$T-J_zemoDeBbfm@9*0}RNAP#)C^OxFu#5n?EdxB?xmLq)608E2V_*nOsEJL(yrg}gtworZObzqE zJI~&Tc7!@7&xH9>A?nWWSrg08# z8<&OWLYKw0!uo4X-dQW)uvvh4ukfwC0&VOU_P$?8|Jg5`^*$+A(S8B8eRw1~DB$_v z74RGt;6E*B}%uR1K4|F3kT^?#%Y6`yscKmIN_e>{s%e>|@ZQfJ;NndRO2;BP-Et>q`BwfwZH z|0>1iCk@f~NvSUHm2C4~tGw5H@AZxMcZ2nQR;tU-Q~H|*1OBX}oL`iv^NS$(MdLYs z(aOIlo#t2l_g5ti{i?$4U)9s`SAFAO`rE&h!1QmW3jKR$0N)1{bAM1;&5ILxuEzXq7+IKlD$fY5l3Rsy~I7KXrnC>IDB( zLeyVMZu?97_)E0=m)84BC-avQ&;HVR{6}>ES^NDg8vm>veHPaK*6IJPgss1YgTDpo z-)f5Rw-WNc=(k^l@h=+0`9+-aMNJ332<9*P#up`aeNjKsFM{uj==7^r{wh3w)yiMR zzh6ZqUv~#I`Bh*0Dja;($$S+A-$W7LL}A~w-|zbFcTwkeoyT{5wKW4tX>F5Z^47;np3jG4I+bG;dFv)Hz*{CGz; zWW1x;OT4o*u-s$(+;~T}UcAFLcV$+`yYgV-U3s+eu7WdhQ-R)iPZn#uuh?w750D4) ze&YkV^YHJMGnS_M+2K3zk89|izM!t4@!^z(&nitiWtNgD31^1RT&5(@4aLxgu(BD_j?Iur&JaB}1c=PAVkBndY5=4e z5tWz`ZPO6wHRIZa85c^-xCqgVYj0*;g2_w@lZJ?`nbaMNAx>ka&IHbs$jD3!zh+wT z2{X+nW<(E$>o>!Vjwu5=6*h(uv!~>WW#y0pFm57!qVY?=!W)Vzw zeNAVtb;)Yk;ay;<#zHd}TdOzz8qwm2*H06N3JutzIe>_iM{PbHx3DeOeOgFTf>vgds5 zT;`X(gf}lG6zrwUW=ooay<&wc2{C)kHtBF@uQ~2(*?sm_BHrGCvpcC~`zUM0KEf@J zQg!wb{62!H=aR5PZG?RV3@@^n5^@s~CM+c-!I02gIN@?Kp=E1AIzd7@K{6^J6KatX zI$I=6V@j%`>ZB$ul2F@|P+XkUbyx`(LCJ#bhXflYVPa>pAQ6--O2Q<}_e#jnOqS$M zCQGt+l4Y5e2`M89Ge46R(N3}=<1`^1CSmYv!r;M#@t+BUJ`=bw*=EO76(oD2^@I%A zP1xqJiYO@K^DC@(c_V0wfHzOjbzGPg%rh_a<`k#YV+xw@SAdrn^M7_(tS9k z5-wfT-cs6vrOeYyS4A2rMtVx7b-K=uH`vgI=pl_HiqlB)ET!vW%825WvYB*SdNbXY zZb^4!N2Pl@w{%bRkWx029*Q?p#@D7yI!K8HrB{6IMjAc6mDWq|Sd4*1=>rG*045&* z^0ju{q78OyMEbZjBAMD^v_Oky6s=Kd>DH9&)E0yMT8mO@trel9g@kD}B;s4l0%~mw z^wyqEsC6KP(mK+?wvHvlT1>uZo$|FSNu$<{Hs4|hM(bXtPmAJ|7K!yOW>2?XNpH63 zb=e*kt+z=NYr{kBF=@RvqsH6wl1A;ijLx@?Iu5CdO`bu z4V{E z8R=2kqmD3RXh-%We<-7~(fF2S9fA z4M%R`@eH0BeHWY8P#E&X6I&DKcmdO zb1RkKxtB@Rd6r=6JhS6h!24Px*LkDk?z|OW%+HT@-I!CnGkz(jZB@rFXMjay|+^Ep!$mn+r4sqllI(ox7auD6PILi@- zapMvSZd?cJh|V|?lih@f*3om>VFEfL-j2*f2Rk`pHf~CU=xEXA=sNDEq)^-x_@%tt zQQqyQrGy<55#5YbsGF$|Xworp$IXbP-3(tVLuX}8^{ox>TEYH)XnaN!agh^X{&&({1Wlx_fN% zKx(mzLDZ$8NSEM1_foQ?d(8^hQXO4xmAhO7cb_jaq*lwXvhF zHPagrqI%q9_LwHrqX}=1O2Zx(l|3#hdx-7cl*p(zEsfrrE)STuH!bh0M>pMGSwzsQ z$n)vZ8ojr`UzcSV_EvO+Jqp@;4D{}Cwbi3EwZ|y;-kvD6x5r|K@(X)M(wjZHIQ32> zVS1O6M!icBOz(;v6BzB0Ptdy&ANOv>=sh|?^q8&Cdy-h`asAVKEoSJw*%N->$Svu; z5h?d*&C;itQXdBX78L{iB z4fb4O`5|c$KO_a{hok`gu++F87QCL+0zWLj(Bu1itU*5_{Q41bfTv`|k4TGnN>==c z2;38V@gs6m{fLOm(<;=DNP7Aaq1sc+?CIL$iRAcEF^(SvNUG!gsO)u*zVM?WBhTbm zKejX=4Nu_1kBRL4m^jwcg4B;o?s!UJ{DdTo=Vs6oC-4-v`bqJopJXxW_B}nrJ;4`0 zwJCU~BozD<2RqHW2uIH~qn~C&(_#@%>W0S*^)rw%g*Uz|yTq3{dFFY0Y)?;RjxUS- zeOac2hr2zqzkP*csmLYp71?sWB5C9+a(_M6tDluN^>eb!J(rrE8XQlTbWgt!PdScf zLXWRP8dVv4o~W>|LT6O*c>ZE2r<`b+s$Uf^=qH;_9G z5XEQ;&s-V5CJFADaqQRl8ZGVp`jz0NU69{EKyR?O4feJnZR$5TLMB9bTIG8h4*5+u zAX?XlnG^1}An+|jCKFTr7IJb+;TyjVt#3;$`fbIS{5I=u!~Ue!`faejBVW(&Km3ay2%So8D+i+{We*Qu#hKw?)RYIJy3!j<@aRX`n_i%bsy{;ASVwM z&hv-7a418-AA?B}WBoDGhoeFE-e7?KDILptt2HGkmhFbs=m!!Y1rM}=V) z84tk#fLr2}H5Ov}9_S zJ{nMJn3nw;uwlZq9NjPtY0OBjhcaJdNJ1#fjtcaI31vP}hG2-F1>BNQmQ@ijTtWph zs)&aIZCOJ_@+{DyH87ko5Wxr)&W-jOfw)GPl^zPjDW+{Aseub0@q4`WR<`TQ>bzB^YRYD zJbXOQanlJc%u6r@8ViPbNzYK1E(~?HSqFV}&bSWfbv4Hs%R^S#cA45Y*IB>Ha*5nKZdJlzlneJf&>=3{V+*yWQ zsid$A?d&2liGBrCN1#(}c%uBcTB5YII=ZUz(dzU8&y_-nb)mGlT9^({Ep*hcHYqo) zHYt0qHtBd@ZBp)AZPJ;)dP)g#^^8*0TB-0nsuIwDYNNbg8>KwEHo7nPYoj+j7p4Hz zs+1Ymswaha=#XElQp#Jav9I}I;GZ9+>;C*GWwi6-lz-1pv&Z=v%4O#(lpoiJIhXnf zrL*-h_Ff-TiR?f1aqw9mr{udn&bR6nmDm2W)ZEn`m-Y(J``~ct0D6e%S1efx}~D zRCEG8Zw+%UTO;tx*2tZHyEO_8Z!G}x))6$Ybr1fxXQ8X@74X0P0{(Z-kOMn6oJDj0 zS>J7*@Y}sY%ZB}Z*4Z!M>R=w&4p!l*Lnev*b6lXYX3Qy-JkdtN8i7zWZLK{_nNk zdzC@IR~hs#Dr){kWy`-3bB9JD!;2R{&zwDyP*GFMcg0N0^y@@@KHGUs1o^)YFY4UVu0sQD|%94_a~KZ zf6~tXS1bP^`2Nsu|4@ndAKK-glY^E2)Vcks9sQ|q{HY@NKUH%7*U;d%e+kmRwBBFZ z?_X*c@z;`mp`!J_bTa=@x&D6y>3_7h|J)3I`yavbS>OGvh5?_|G~u(T?z0*deAajW zRyq9N!uS`#{6&=WMO^Wbtn- zyYTY8F@WH^zV=bjZz<1H^cjYm~TMA&tTXKS8#%0G_ z@`qx&$HiOn(c>*Scro*|V{-Ol0*x`1c`=oF@wUwTctROot8hrXr&vYI-EB-hVoXYOybtL6 z07;^Be4yY~d@wffnqwl7@c|zusX9JXye}r(Ek08CF+SSYVn_0MV{#(nBi22ZCmbKk zO^un+8WYQmkL8%h$E-lfPkf@VP<#UIoG3UDpU5GNPZdpyNxO_s6)}vNoe-a~?wNua z@fpW`21?GLD9Rw>^97-V#;)zJ|D@jdiQQff^8Z%k!rOiVX^ zfQ%j>f=7iS;wRly#B|$_nc*Ch>l2eR5y9WQuHf+rL2?qmEx)K8_w-5=k^v- zehWP?7A7XiD1Jfqy(mBt)4w);QG7CfQFJ_hfs9^&>;)=$0a4VO8m3koV$fztEMjOb zYq-`oLt57mnKna;&X^&ESq$-NGpr36BGZOzeM7#d;fCLgNCX*@JPq9?&4^Ckj7nr0 zN@Wf8WQOjNh5}kc%ULs~b2F6H8e-69T*A?ei)hXG#(5WkrT ziDEU*PXws1HY3NjIrhsfp5ko_&wIK`CkOgX{MTllv!9z1Gk~3V& z8%hNXY1xJ$@P?~;L+f)x>vJIWE+wQ%p%mc2$1yuG$dx3 zMcJu__TXlTPb^9P8tOs~#Ri5FQA47hS)P{wS!S_i7F&jS84hB|{W2@EQ_TwaU6BGb zOcydM5cmpWWfl6RT+FPp!m3OOvkIbCK}iG98Kq+yyxWj%WE%X8sXAtjU6B7^=tFJj zu5Q+lv}@4WI+AN0jk6AwtS<}I>$iH`nB|dmNrf<*fF2>XBgA&ZVv&q8Lq9P?iJjSo z-?ljm(i;qeybUP}W*4E*lrLhK9cG#!m5c%s%h#b8ZKSt3%Ym zp=?CMuunrxu{i`$hmg?`Jb%owoFKVQK;J1ed5VNNl_z0NVcyeA9pNd5bqapz@MS1a zHq1IR7s&MsHh;ll7vPsME9MFynKEj~bTHRY)HMfits60O%`UEimxfq|(o=KG&+Z`m zJAk|c$a~m{CRgSj`S5^wK;tU&zzS5`nMX+D5k_Z>n0W+ek7((~*E$v~bMu59^#mC) zjl(>lvz|D440`hf=ugnY6WGDbH&57z^gS@o!0-$V&*<)FDC!mb_G(KTdIitF!svY+ z=>-O5nb&el%p1P;2BcC#Zb(u#I2Gm%_%QRcdG+R5xVpF0LJ0-KjGVjMuiBc`oA}ltzolkdke!nYZmB)9405vM|Jo`)Bb&wIK3JNp*gAV7W!El9INAkxa{+!Mr`J-QVi%-a zEC#(@gp?Phvn+Fj?Bc5Mz(^sx1ZOQ59)W?HZHujJS2+0TE+{7Tb`%unn2mmcdAt+8s-di*3NX4H*r#A+ye6Vq5Zx zEy=_dv)Qg8GD$JE8&Jsx#1#QAMz5tt%+h<;ZXq(aV1_MtlNi6<;#juemMugkB}#S+ zM3H!Gxeu^gG6d|_v1o_PWXpwt-9nZygvOGuY{^%)TqD>mj(bZ6hNW$=rERb!N!jkm zey}^T2`uwO?Jo37l9AnoqNujBO@vJoDM!AtZOXu~O(bno2EA<}UYiiXo?HUEC$qyc zgVpY{0$oDv0WciM__qgq;s9B4C{N9j+iVX(Dvp9Z=D1HFqZ4?OEM|KO$(=%HrwIO2 zln?C>E#ZE9D!<#FLX)S6ic_Q!9U1H?EPV=#oO0awE%p?YoI8#}GK@_b+jBGyJya~2>h^M2km0G>OUUyQV}$m}_L6hEw3onei4wlT zq`HDHt_m+GPGGMPHeAS9oIy+TW6LlpdkY?JS>YBvMB==?3#TlM9?eNEqEv|P}B=5>4l%Yh<=lj zPBSU#6A4`*l2NJ9gm%OUJ&2QWNuz{WQ3+m0G9jcU6A~*4#dHb9bjgJHEukH8!X&ta zc}fY1$4QM()Yb-Pk&suM)FeQX+9@x{K1*uCSwb>#LLfIGked+5P3nBD4kqg&xr9J& zLLfI;0M*!X$)XfhLdZ5*l%!1-rRS4Hv466tGftMk1ASbQrMh;!Eb2>WF`O(*9VE-Z zuq<_uP+^-agEOM63Gviqg|DqhTqR6;N>)XP3AS9)kbFoQV&0^|-Z0e?0+`AAmi9)? zc0#p&vLSapp%-YfA>o*80`I0ws$^5LB-xa1Nj9O9O?W5*sS)%`-_V47jf6qR3C2^h z1FCm8*d1WlkuFSj0H?{`D9TFc8=5qwgcB;UlBSe!(u5Eh2bfTwl~CNC?7=wu&>52f zlYI&PgurOBj{w06O6Yr#97xS12grv5AUl8v$g4<*vL=VnFTI75L%?Accyh#H9r3dx zU^oJ2N6_yPbVia!LJmf93}4WeBVnXMa)O*Zm7Y&drEU`}n*;+RIeig@ z>5z0*IwU-#!;JCHJ;YQrfF?KRKobpbY4W8&dYB}sT@lw3`^_MS?K~BA_|yt{ho6D zo^p?#F5U}IO*EFKFr*QbX`23 zu7fBF!_p0~Ln(c_0qC0&3Tea&5nm(ZmD0C4-8vFDm`Ujt>+VSTq&x7%4(smfiX^3s zJ>4z5z;P1`N}JF+0id)A+3!hYrh9VC(>w)7T>cnhuHasYQ6_Z^!@VW(*6^iE!LdJmmFphX^7m)qa; zffIT_tUSwHY>f&JEsDBZqXNA(Dsj~s)v>g2t6N;8wdg|F;-aU;9ZG9iyxCfw7$CK^ ztPQo6MMkYv!P{B|hKA0#gp@h~!Y*lXo!z4AT-M-PwLKx`ZBK|M+Y>T{+Z4CA zCk14i=8$d5q1%KE+7unP=Y@whGn(6!47Ujmv={jBF^?xwGwl;`K%2;E`wej_SrzmcSEzZFt5`ZQ#8^31qZ&L*S)vuP>ej7d%z zhCnti>nf{@W3##-%LqDT6h3E!7cwRVWHg(}=*F1Q;VWC$=CerK%$ic6SyM>O7!H&j z3cuO8s4u$^elsGc88OoAQm2_+$s)~eBwjOmerL3*&2FVdvU_Qq?EcMwz_SMl#|*)g zJ@7N82W5}qi|kRSnLV=Z8}{~A*y)TaqScv@tnN(A4PfY0MGqb7@;h`g>eK~qhj|hm z!dRU}?V^K`(^(OIJKQ*Sni5x?JqiBKJ}VGj=+G3VvoF)9b13=SIb?5#?D$CbL+41+ zsKaQK&XGu?b0j9~90|Xj<1u09SoGUD7OFdBZgoyT>IvXbbkX4srNg~Q=adhhN#k_R z0sUI;WQV?hof{ydOMmA^hCt^QtUpSob_iW}a1lH7eCa$rYsb$sdVB?J|zX1Pw7~4 zCOPHP!g@|_M?S-cXN2E;M&?0ISIWFBEuEKjZh4t^Nx;s@f5>Mg9CHRh@+-b}0|(sl z!mYGZeh0x2u*&Zx#PSC*Z;t(t|NrdW`=2#iehB`BNG1f0gho&Z8WDw{5flj;K_O@a zMH1=Att3HzSAZ;OxS-SngD z*S;VAmi%$@TjBO`U7&q_U(!CmFKHjwp1P0gxUia4vzbKd5IrhPu2DhubXB6@OmuVKb#jcPs6)^c-_YPJYVKMe*xJ) z{E>Cp9$t@|`#b~HK0p1l&y!K@A6bt$`$zJ!J+k)H{iAC?+vgdm_K!Weww8bR+vjUy z`}~~GKF>?F->f6;K2ICG&(lurH}72k?q)s1+i&uk%me>EKPj}|tUK90-_f{#Jhzsw z%k3Y3&DzA{d7b5Z4EHZt_kew#=O=~sc|xpxp0jhGZ{OR$EO*bBtvm1jWx1C;xgIz7 z`H`Rflk3>Ge|heGFJBKn`+TF`{^fZKb$QwH^qu=W?bN;9?r zbaDUGdidJs$9VQnDC%?Rucvzb-eCr>)$-F8_o4l9_#etKt6jdDqLo zdhTDpp2+Us@bKEf-;no!{1U2tzJqPw=d}E~sePW6a{of^;4iG_1pEB-#Quf6JH4Wg`{d?B0 z%KP``{`LE9<)z4%kZv~X zuzi#7b-&3sxZk{F?aDX#*6^D=#NXsMy578e-CJ(*tbRB7)|i`a9YAjKTQP3(?JhU3 zS_hk({2s@fe5=V#eqQh9>2*}Tc{(q}c0IP-wd>`_o7b(q`X+xRxXIV} zZ}PhtZ{D1{##{52f9pDA-sJlnZ{D_c+MBnp!`02(*AeC>-)497&irxSb+K;EeChh; z-RlwOCZ84GHj;x>1=+dQ}CZ9XTs%@>nzpInb`xB1<} zw=Z9J$lH7m{cV25#%+El%k5L^-g28?D0=&$-Imz~|d zV?E^G<~fLO^JSsicjk5Wu5~cK9r9)wa_`IEA#dNa9+Ga~yMAfAeQ#b5@68+FeQRI8 z&3nsje%kK#{kh?MjsNz8>u!4c!MwTiYesKBoY&5W^HSuiNw*)(8}^&?+WF>nw7<=7 z>$?5edVIT|pTxSK-#~Exkw<>C!`#pB1i1gmx_0j8ZxZ((UqA2t{QS`UJXYV&kI>xD zkI>xDXWRGlkLUOEkLUOE8%plyGwAzIuEWaxC)eTX{*&tHbq||GodzI*8rRH(1_()p}|5{?mD-=J)>I z&rfIHf6cnF?!RUo&f2z{NTs^{8aM&{7&vUPs=gqlZH9I{zo_LoUaqi zdC;9-ydJRU7q8uD&XaY_`N_OFUj~>TT8}OBL+c(eKeX;R^FwQwn;%-c^8C;@t{>;2 z^=LXjwC*Bvp3P&t>nnt($PZw|<+M z@2zJm^S#%tAM##aihJwX>wIrLPo49%iuv9;IL`Oh?*a3@59U9%4(9W{_1of{-_kng zpGD_<1!K-9WpjQ9-JGWtJP>xei%k{AAwyqNjQ^!(^LfXt8Pjg_xg&yVIN@(;mtzNvc7 zH&xG%t;68_*m{aRKeq0h^J95yK9<)`p1ga`mrmx*x}(pV++j9(;Wzn@{@V$rE|QK9L8p zJWu+Z4^;CL>)F-(L|)>TJ+l7YJPrDsZ}^&DwtlghUzXQoo>pdlS>AE-Z^Cna<=~tz zAm z+`;qhWpjRi;hcY)ne#IWbAE<&&WGFisk|5FCl}_Ya+k|D9L!IxhtN6CMl-)64+^iy zoghz{KELA0b-U&J+U8fR2jTe@xwTikXZ^RY%tw(|=9QY~KA&Hi4{EQ<{p?k_|Gp}B zf>-6kVty!g&Xd^8ugcr_)w!R&dObOv^BgtvtMezzkH60O1>f_l^YJ+UA~rvr*Td6! zkIg@h%}?jg`*dEm{G7y`C#IQSlg|nA8{y~IJncU@`$xHD}9u)E$?B_hS&75C@Ip^14 z&iTfRIbY$NpUF%7%=%qq&W{1l&*X)FHh0=*^A>tG?}g9i5BY39U_YCWS<>&H_lP`zN&*c-M=kj8{Hm~*9 z=2iXL_563v7k%eEAOD=6Z<$}Weo>fTmzVf;`J^oW9y{l&!t?9$p7r{?@cBih^Xqdj zd3`=~0{kDhPHXT|v;>ABCRNqznd`4Q(i zUpJcjJaXj+pXWZ0JNZ7+Ie$l=^OUD^ekyr>{>5v5cs}=s=kvJpd|tNa*GmTT^Z7$Q zpEvCDd0=`zFZ}a)aC|<0S$RG$MVs%pWq(z&hu9WaoTi z?3|yip5K^9m^bEKph`Iq?lO?ffjls|f&c6H8ggP7lxw{ds@4P2?9D&+mS2-Q4fa{p>w){jcxITk}16 zMZG6?x%cLSWxo0~zc-J{@6DrJerRZZU;dJr=X;&spIdu>{wDGM+}A&lH^2w-p)OB5 zHGd!vEBX4_{DFMV^?}^*2XgQG;JtN;KbTkF2lM&*E;qc(OT5dUH@}l*&a+j`ySxED zl*h9V=Qa6}yqF)&AM#`C5VDKPM#`@sVbsu{0=z9L};L&vmdhqCabb9dU+Rq+5nm6I2 zA6i>`G;ia_*53CZ-`nvZ-!b~&vGtVd!DH*O@WEqm`?Tr21%E5G8izV?g0`fI-AYd=}+6aU}%kNn?%^zr}y@ip-AHSqB@@bNY9@ip-AHSqB@ z@bNY9@ip-Ovj$=a@nimb3ZWv>lb8tK8o~huB^5OdEs+f!TY4@S*pawo_YU(a9X z8~7XiP5u_~+e0M0SV?$DNw}vZJgg)lDhWF!;X_Kohn0knCW42BPHScl!Sk*Bz(V;@K2P4D<$EdDhWTJB>Xca;Rlt3f376_kdp8(l!Qr1_?Jq; zzfuzZwUY2}l!Sk)B>X!i;omC>*Gj? zKPd_SSxNXWO2U6t68@W#aHk~vcO~IRl!X7GB>Yb$;eRO!|657;KT2%iP!UOlAEQnr z5q_*Xkwo}$>O>OZ6V!<$!jD%ck_exuP9zaNNu5X{yhxo$B7Cwskwo|j>O>OZC#n-k zgilc?k_bOZok$}5WOX8m@Ke-@B*K9@kwo~Z>O>OZr>PT3grBZXBoRJUok${lnmUn0 z_!;U%65(g66G?(q%P!mn2+k_f*+ok$`as}o6t->6O`5q^_8kwo~->O>OZbJU3>!f#P0k_f+5 zok$}5HgzJ2P^lA1gx{`CBoTgxI*~;9Ty-Le@OkP)65)5M6G?>MrA{Ofez!W2L^x3= zk_f*?ok$}5UUedg@cYz>B*N#b6G?>MuTCTp{(w4>MEHa1L=vG^Cz1$%NS#O`{9$z> ziSS3%i6p`os1r$qKdMe75&oDukwp08>O>OZRGmm7{0Vg;iSQ@Yi6p|GQYVrKU#Lzb z5&pC~kwo}2>O>OZ&#Dtighri6BK$dZB8l+l)rlm+7pW6TgfCVnk_ca-P9zcjf;y2z z_)>KuiEyS)BoV$$ok$}5MRg*H@a5`665%VE9u}vfA<~nW2#<(U(GclLOoT_psc4AwBqqXR;#4$5dJ+?1BThv_q$e>E z9v7#gA<~nW2rm(*q9M|gmDr=lU! zlb8sv7N?>i(vz47Pm5F05a~%wgx82u(GclLOoXjC6%CP|#6);ToQj4>PhuiGD^5j2 zq$e>Eo)f2{A<~nW2(J~Vq9M|gmEr=lU!lb8sv7pI~j(vz47ZxE-VA<~nW2)#HJ z4UwM2M0j4DiiSu}Vj{dzoQj4>PhujxNt}v?NKaxSyjh%zhDc9hBD_VMiiSu}Vj{dC zPDMkcCovJ;Do#a1q$e>EF2t#5i1Z{T!rR2DXo&PACc@jrsc4AwBqqW;#HnbA^du(2 zH;Ply5a~%wgm;Ql(GclLOoVrdQ_&FVNlb)q5~rde(vz47gE$opk)Fgvc(*te4UwM2 zM0k%l6%CP|#6)6%CP|#6R&gpCB0Y(T@NME$G(>t56XDy% zsc4AwBqqXlh*QxJ=}An4zb#HhL!>7$5x!HLiiSu}Vj}zE z{;oI`4UwM2MEEXoDjFg^iHY#{#HnbA^du(2cZ*Ze5a~%wgugFNMMI<~F%iB;oQj4> zPhukc192)EB0Y(TFp5*r5a~%wgnuYbMMI<~F%iC3oQj4>PhukcBXKGkB0Y(T@O|P` zG(>t56X73=Q_&FVNlb+A7pI~j(vz47|3sXMhDc9hB3y}6(GclLOoV?bPDMkcCovIz zK%9z(NKaxS{4;SX8X`T3iSUEsR5V0-5)(KyiiSu} zV!}=q3Mv{RJ&B3%FU6^7i1Z{T!oL!yq9M|gmPhukc8*wTcB0Y(T@NdPb zXo&PACc?iHr=lU!lb8tqUYv@CNKaxST#HlD5a~%wgdY~Cq9M|gms5a~%wga_hOG(>t56JZgjq9M|gmPhukcM{z0|B0Y(T@Snt~Xo&PACc=Lfr=lU!lb8tqMVyL;NKaxS{8w=*8X`T3 ziSXaVsc4AwBqqY0I28?%p2S4>@8VQ6M0yev;YY-&Xo&PACc^&^r=lU!lb8tqQ=E#1 zNKaxS{4a4T8X`T3iSWP0sc4AwBqqZD5vQUd(vz5QfDH!}lvLC-v_v|31`;C^3*pDe zr=X;wrlBR$(KC=3nOF!vRz3wK6*Ublk&d2$#K^=#_;KlvLC-v_v|31`;C^3*nRG zQ&3V-)6f#>=ov_iOe};K$)}*CqNbrG($O=J7@1fIpDdq(l8Ty!mPkj>Kw@NKA^ZgS z6qHodG_*uIdIl0B6AR%d%BP^DqNbrG($O=J7@1fIpCX@vl8Ty!mPkj>Kw@NKA^ary z6qHodG_*uIdIl0B6AR%d%cr2EqNbrG($O=J7@1fIKSe$TB^5OdEs>6%fyBtfLO765 zK}khTLrbKiXCN^$u@HW$d- zf|81whL%W2&p={iVj+C0d8>F60qj7%(q&z4U? zNkvUVOQfS`ATctr5PqF}3Q8(!8d@S9Jp+l6iG}d%8>F60qj7%(q-yolY zl8Ty!mPkj>Kw@NKAsox6proRvp(WDMGmsdWSO~vSJ_RKeH4QD1j-G+U$izbUP4X!y zsi zB^5OdEs>6%fyBtfLip|SDJZF^X=sUb^b90MCKkf)kWWELMNLCXq@!mbF*30bK36^k zB^5OdEs>6%fyBtfLijxS6qHodG_*uIdIl0B6AR&Y%BP^DqNbrG($O=J7@1fIze_#^ zB^5OdEs>6%fyBtfLipYCDJZF^X=sUb^b90MCKkepduQ&3V-)6f#>=ov_iOe};yET4js zikgO&NJq~=Vq{_={1N#SlvLC-v_v|31`;C^3*igoQ&3V-)6f#>=ov_iOe};yDxZRq zikgO&NJq~=Vq{_={4x0ylvLC-v_v|31`;C^3*nE;r=X;wrlBR$(KC=3nOF#?@+l~( zsA*`4bo2})MkW@*pO8;MNkvUVOQfS`ATctr5dNfm3Q8(!8d@S9Jp+l6iG}c|m zQPa>8>F60qj7%(qFO*L~NkvUVOQfS`ATctr5dO4$3Q8(!8d@S9Jp+l6iG}cIm zQPa>8>F60qj7%(qKP#Vtl8Ty!mPkj>Kw@NK!H??UfP#{WnueB0N6$cFWMU!wIr$Wn zRMa%IL^^r~5+f4};m^ybproRvp(WDMGmsdWSO{MvpMsK#nueB0N6$cFWMUzFv3v?j zDry>9A{{*giIItg@FnsoD5Q1q@t#wCDPF|kQkX*2wx$ef|81whL%W2&p={iVj+B`dmQPa>8>F60qj7%(quaQqdNkvUVOQfS`ATctr5WZGE1tk?V z4K0z5o`J;3#6tKw`4p5?)HJk2I(h~YBNGeZFUzN(q@t#wCDPF|kQkX*2!BOB1tk?V z4K0z5o`J;3#6tM1@+l~(sA*`4bo2})MkW@*xqJ#rDry>9A{{*giIItg@Ym#1P*PFT z&=TqB8AyywEQGI@PeDmVO+!nhqh}y7GO-Z;x_k;sDry>9A{{*giIItg@D1`QD59A{{*giIItg@VDesP*PFT z&=TqB8AyywEV!XAyodu1DLA6!n2Hl>PH8x!<($Zdjx9YG4D3i;GIGVlH4Ary7i*sb z4kaYD^04QI5R6WP$QrRRcy9f?auu9&!H z;g0aI_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZvv5avMEe|YNWl>$$5fn9 zb4tS*E$2iwbZqIlU|>h$l94MWu35MvJgR*TIHcf+l4B}Ps5zzKjFxjE8#=c1TrjXB zammOP6W1)<5gyY%2OLsxM9DD~C)AwMa7N2HkqsSNdM+5)k+@{!iiv9$?g$(0bHE`5 zN0b~>aYD^04QI5R6WP$QrRRcy9f?auu9&!H;g0aQ_Br5?f+I?fsW_qLl!h}}&WUX3 z*wS;sz>dTvBUemZvv5aviS{|*kb)yhj;T1I=9GprTF!}V=-AS8!N88hB_mf%T(fXT zc&YX|;E;kNN{*>Gq2`o^Gg{7xZ0Ok1bHTul#3dtFOkA^YM|eW}9B@d%5hce|oKSO0 z!x=5-L^gD6>A7HFN8*x^D<-a4xFftw`y6mc!4W0LRGd(AO2Zi~=R`JiZ0WgRU`OJT zkt-&yS-2xSseKMOq~M5>V=7LlIi=x@mUAK-I=1v&Ft8(W$;cHG*DTx-Uaox(IHcf+ zl4B}Ps5zzKjFxjE8#=c1TrjXBammOP6W1)<5jyR2z##=klpIrWLd_`+XSAFX+0e12 z=YoM9iAzSVn7C%)j_{QBIpC0jBT9~`IHBg0hBI2uiEQZD(sRMUj>IJ+S4>>9a7TEB z_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZvv5avrS>`Ckb)yhj;T1I=9Gpr zTF!}V=-AS8!N88hB_mf%T(fXTc$M}!;E;kNN{*>Gq2`o^Gg{7xZ0Ok1bHTul#3dtF zOkA^YM|idNIpC0jBT9~`IHBg0hBI2uiEQZD(sRMUj>IJ+S4>>9a7TDr`y6mc!4W0L zRGd(AO2Zi~=R`JiZ0WgRU`OJTkt-&yS-2y-M*AFaNWl>$$5fn9b4tS*E$2iwbZqIl zU|>h$l94MWu35MvY_-n;hZG!9a!kbuHK#P3(Q-~?L&uh$3kG&1E*ZIE;+lm!!ZX_E zfI|w7C^@F$gql+t&S*I&vY}&3&jkZJ5|@lzF>%ep9pPE+bHE`5N0b~>aYD^04QI5R z6WP$QrRRcy9f?auu9&!H;g0Z}_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZ zvv5avt@b(Kkb)yhj;T1I=9GprTF!}V=-AS8!N88hB_mf%T(fXTc%Ak+;E;kNN{*>G zq2`o^Gg{7xZ0Ok1bHTul#3dtFOkA^YM|i#VIpC0jBT9~`IHBg0hBI2uiEQZD(sRMU zj>IJ+S4>>9a7TE9_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZvv7xp>B5UR z;E;kNN{*>Gq2`o^Gg{7xZ0Ok1bHTul#3dtFOkA^YM|fWQ9B@d%5hce|oKSO0!x=5- zL^gD6>A7HFN8*x^D<-a4xFft#`y6mc!4W0LRGd(AO2Zi~=R`JiZ0WgRU`OJTkt-&y zS-2y-N&6gdNWl>$$5fn9b4tS*E$2iwbZqIlU|>h$l94MWu35MvyjlAka7e)sCC5~p zP;*Md87=2THgs(1xnN*N;*yaoCazhyBfLfX9B@d%5hce|oKSO0!x=5-L^gD6>A7HF zN8*x^D<-a4xFfuveGWLJ;E0lADo&_5rQwX0b0Ql$w)9*uup@EF$Q2XUEZh;^s(lVP zq~M5>V=7LlIi=x@mUAK-I=1v&Ft8(W$;cHG*DTx-F0{`9hZG!9a!kbuHK#P3(Q-~? zL&uh$3kG&1E*ZIE;+lm!!rQdZ0f!VEQF2Vh2{orQoY8VlWJAZ6o(l$cBrX}bV&a;G zJHp$w&jE)N98q#i#R)a1G@Q|LPGm#JmYxd+b|fwtxnkm)g*(DKw9f&D6dX}_}WPa>c|o3wMNX(mn?qQgB4cF%>7&oYHVc%Q=w^ z9b0-X7}$}xWaNs8YZmSZgZ4S#kb)yhj;T1I=9GprTF!}V=-AS8!N88hB_mf%T(fXT zc(?XB;E;kNN{*>Gq2`o^Gg{7xZ0Ok1bHTul#3dtFOkA^YM|h9+IpC0jBT9~`IHBg0 zhBI2uiEQZD(sRMUj>IJ+S4>>9a7TEr_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTv zBUemZvv5avpY}Q6kb)yhj;T1I=9GprTF!}V=-AS8!N88hB_mf%T(fXTc)#{J;E;kN zN{*>Gq2`o^Gg{7xZ0Ok1bHTul#3dtFOkA^YNBDsDIpC0jBT9~`IHBg0hBI2uiEQZD z(sRMUj>IJ+S4>>9a7Xx{_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZvv5b) zX`cfQDLA6!n2Hl>PH8x!<($Zdjx9YG4D3i;GIGVlH4Ary4{4tR4k_}WPa>c|o3wMN%YM%oRDLA6! zn2Hl>PH8x!<($Zdjx9YG4D3i;GIGVlH4AryZ`M8s98z#Z$uSit)SS|AM$0*o4INv0 zE*RL6xMbvtiE9?_2;ZW84mhOXh>~L}PN+Gh;f$7ZA{#ok^jt8oBXP;d6%*Gi+z~#e zeGWLJ;E0lADo&_5rQwX0b0Ql$w)9*uup@EF$Q2XUEZkuto$w+KIHcf+l4B}Ps5zzK zjFxjE8#=c1TrjXBammOP6W1)<5x!OX9B@d%5hce|oKSO0!x=5-L^gD6>A7HFN8*x^ zD<-a4xFdX<_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZvv5cFcI|V(Aq7X2 z98+;Z%_$9Mw44*!(6OcGf`J{0OGd7kxMtyw@EzLcfI|w7C^@F$gql+t&S*I&vY}&3 z&jkZJ5|@lzF>%ep9pP_lp92mlIHKg3iW6#1X*i?hoXCcbEj_}WPa>c|o3wMO? z)IJ9sQgB4cF%>7&oYHVc%Q=w^9b0-X7}$}xWaNs8YZmSZe@FWqa7e)sCC5~pP;*Md z87=2THgs(1xnN*N;*yaoCazhy!^#OS;($X6jwm^%;)I%08qR1rC$gbqOV0%ZI}(?S zTrqLY!X4r7YM%oRDLA6!n2Hl>PH8x!<($Zdjx9YG4D3i;GIGVlH4Ary@6tX898z#Z z$uSit)SS|AM$0*o4INv0E*RL6xMbvtiE9?_2!Bue9B@d%5hce|oKSO0!x=5-L^gD6 z>A7HFN8*x^D<-a4xFdYG_Br5?f+I?fsW_qLl!h}}&WUX3*wS;sz>dTvBUemZvv5cF z``YJ#Lkf;4Ii})-no}CiXgMdcp<_$W1p_-0myBF7am~UV;d`{t0f!VEQF2Vh2{orQ zoY8VlWJAZ6o(l$cBrX}bV&a;GJHkKEJ_j69a74*56(`i3(r`x0Igt$=TY4@S*pawo z_}WPa>c|o3wMNnqV=7LlIi=x@mUAK-I=1v&Ft8(W$;cHG*DTx- zzEArca7e)sCC5~pP;*Md87=2THgs(1xnN*N;*yaoCazhyBm86SbHE`5N0b~>aYD^0 z4QI5R6WP$QrRRcy9f?auu9&!H;g0bA+UI~n3XUi_rs9N}QyR`_IVZBAV@uBk13MCz zj9f8s&B7hwpJ<;04kPH8x!<($Zd zjx9YG4D3i;GIGVlH4AryAJ9Gr98z#Z$uSit)SS|AM$0*o4INv0E*RL6xMbvtiE9?_ z2>(p`9B@d%5hce|oKSO0!x=5-L^gD6>A7HFN8*x^D<-a4xFh_a_Br5?f+I?fsW_qL zl!h}}&WUX3*wS;sz>dTvBUemZvv5cF=i29hLkf;4Ii})-no}CiXgMdcp<_$W1p_-0 zmyBF7am~UV;fJ)(0f!VEQF2Vh2{orQoY8VlWJAZ6o(l$cBrX}bV&a;GJHo%vJ_j69 za74*56(`i3(r`x0Igt$=TY4@S*pawo~L}PW~T;?j8)`H-PtX_#=c4A%qY@2qANIK7rO%KtQ|2sL zvt`ebGgt0BdGjUkU)d*2lsHM!WXV&cOqDuK+H~nNWXzN~OV(`JbL7mGJ5Szx2|Vl* zCQ6(nX|m)gQl?6sCT+U(88T+doF!|v>^XAg%AF@~z6Abj`-F)SCrO$td5V;&Qm09q zE`5fKnKEa|nk{>doVjx6$(t{M|HeLHqQps(CQF_oWvbL^(xywFA!DY@S+Zu!o+D?j z+fY68P`z6DCTWBx$nbDN?3NohEI%^cgZ{ z%A6%@w(L1_=E|KXZ@vWnd;5fm5+_NTEP0BQsZys&n=XBZjF~cL$(k*Dj-0u2=gFHd zf&aliVWPxIk|s-@B4w)7Y0{=kpCMzW%vrK#%bp`=uH1R@=1bsZpDNIK7rO%KtQ|2sLvt`ebGgt0BdGjUkKiVfulsHM!WXV&cOqDuK+H~nNWXzN~OV(`J zbL7mGJ5Szx3H(p?2@@qwk~CTJ6e&}sPLnoW`V1K}WzLc{TlO3|bLGyHH(vt(vwgxu ziIXHvmOMquRH@UXO_x4H#!Q*BWX+a6N6uWi^W@DJ4?+V$!bFLaBu$n)MaopE)1*z8 zK10S#nX_cgmOV$#T)Fe)&6mLcVxKTk;v`9vB~OtuRq8Zp)1}XlF;nI&S+ix&kuz8B zJbCja@W0w8Oq4iD(qzd~q)e4MP1mK~l{-(~ddoVjx6$(t{MkA1>KiIXHvmOMquRH@UXO_x4H#!Q*B zWX+a6N6uWi^W@E!!2f2SFj3+pNs}c{kup{4G-=bN&yX=w<}6vWWzUf_SMEG{^Cj@V z+b2wvI7!lE$y20El{!t@bm=o>%#=Ax)@<2xjPu_e9{2%rS6D3ZPG+FW#DO06R zlQv!Y3>hNIK7rO%KtQ|2sLvt`ebGgt0BdGjUkf7>TalsHM!WXV&cOqDuK+H~nN zWXzN~OV(`JbL7mGJ5Szx3H^XAg%AF@~ zz6Aas`-F)SCrO$td5V;&Qm09qE`5fKnKEa|nk{>doVjx6$(t{M|JOcYqQps(CQF_o zWvbL^(xywFA!DY@S+Zu!o+D?j+e<42@)bqgeWoMBuJ7XO@=Hv@)RgiqD+M< zHR?2I(xOd=E?9BH|{)m^5V^hFFyi*=qE^s zFcG4}h?5{miZmIrNIH5qD_Y` zJ^Bn7GGfexDKq9QSh8ZxhAlhx95`~~%!MmA?mT$%;?0LIKLUS_eu9Ju6Cp~BI0=%Z zNRuH;jywg5lqgf7N{u=VnzU%sp-YcG1BQ$kGhxb%ISZDoShHcvjy(sCoH%pg%8fe@ zp1gST;meP}pR1oBA;Lt65+hE6Bq`El$dV&Zfg&Z!RH#y;PJ<>b+H~mBqtAdLBgRaa zGGoqyB`el!*s^2Kfg>l*T)1-M&Vwf}-hBA-!^cm7KL`>cOoS*g;v`6tB29)YIr0=J zQldY$E%!Da3<}6sUV$FsvJN6tna^lQ|D>v>uc=F=Shc7<@ zf1ZAVga{KMN{l!OlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#ufk3Iv2j2JUv%8WS+maJH_ zVatv^2acRLbK%O3I}e_`c=O@QkHDX=pCBQ^M2He2PJ$#U(qzbzBTs=MCCXH&Qln0T zCN0`@=+dLlfFUEsOqeob&VnT?)@<0aW6yyjC(c~Ba^uc}CokT7`0^w07w9KQh%gbN z#E6q1Ns2TXvgF89ph$@_6{^&z)1XO{E)M?P9MVk&?dh{7E zWW<;WQ)bLruw=!W4O@2XIdJ5}nG08L+9DN&|El^S&#G-=VMLzfwCT{LN1p*hMvR#-WyYKZ zOIEDeuw}=d14mArxp3viod-`|y!r6uN8m5kPmmB{B1DN1Cqa@FX) zsZpmvlNN0{bm`G&z>pDRCQO+zXTg#cYc_1zvFE^%6K5`5xpC*glNWD3eEH$S=fEEX z2@xhjlo)XmBuSAbLzWzQ3KS_(rb3k(bs98j(WXO}9(@K388K$Ulo@jtELpK;!@#e#qAAvuipCBQ^M2He2PJ$#U(qzbzBTs=MCCXH&Qln0TCN0`@ z=+dLlfFUEsOqeob&VnT?)@<0aW6yyjC(c~Ba^uc}CokT7`0^w0m*^)*h%gbN#E6q1 zNs2TXvgF89ph$@_6{^&z)1XOhk`2@)bqgeWoMBuJ7XO@=Hv@)RgiqD+M?9BH|{)m^5V^hFFyhi{R9aSCPI`LaS|j+ktRcy9C->9 zDN&|El^S&#G-=VMLzfwCT{LN1p*hMvR#-WyYKZOIEDe zuw}=d14mArxp3viod-`|y!r6uN8m5lPmmB{B1DN1Cqa@FX)sZpmv zlNN0{bm`G&z>pDRCQO+zXTg#cYc_1zvFE^%6K5`5xpC*glNWD3eEAXhEA$g2M3@Ls zV#G<1Bt@DGS#snlP^3he3RP;oCQl(tl6+-$DRX6PMo=L<;I-{PhPzF@a0F~uhdVF5Md%ji4iA3k`!q& zWXX}IK#>wyPYE?l{B=fRT~ zZ$5na5%{b06C^~K2vK6hNsuH(nhaTT^X4c#F-0MZrpkBFqrbCw=eFh8}F=oP)8FLmaS+QormK}Qz96533!j&6$9z1#R=EIjCftY@R zga{KMN{l!OlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#ufk3Iv2j2JUv%8WS+maJH_Vatv^ z2acRLbK%O3I}e_`c=O@QkHBA}pCBQ^M2He2PJ$#U(qzbzBTs=MCCXH&Qln0TCN0`@ z=+dLlfFUEsOqeob&VnT?)@<0aW6yyjC(c~Ba^uc}CokT7`0^w0*Xk!oh%gbN#E6q1 zNs2TXvgF89ph$@_6{^&z)1XOf;W2@)bqgeWoMBuJ7XO@=Hv@)RgiqD+M?9BH|{)m^5V^hFF*Y79QcDEA;Lt65+hE6Bq`El$dV&Z zfg&Z!RH#y;PJ<>b+H~mBqtAdLBgRaaGGoqyB`el!*s^2Kfg>l*T)1-M&Vwf}-hBA- zBki8B|j+_>}L$%{81zWfOM4f+WZB20uRG2$dhk|IrpEIINNC{m(Kg(@}b zG-%SIO@}T$`V1H{V$6goGv+KoCQl(tl6+-$DRX6PMo=L<;I-{PhPzF@a0F~Z_-bY5Md%ji4iA3k`!q& zWXX}IK#>wyPYE?l{B=fRT~ zZ$5na5%`<+6C^~K2vK6hNsuH(nhaTT^X4c#F-0MZrpkBFqrbCw=eFh8}F=oP)8FLmaS+QormK}Qz96533!j&6$9z1#R=EIjC4m%0_ zL68t(B1DN1Cqa@FX)sZpmvlNN0{bm`G&z>pDRCQO+zXTg#cYc_1z zvFE^%6K5`5xpC*glNWD3eEAXhTlEtpM3@LsV#G<1Bt@DGS#snlP^3he3RP;h(I!ju_v7A#q@X2X^pdk!2qapuC6 z8+RT&dGY4Mmmh(@T|YrWgozL(Mw|pmQl!a{B}bkDMM{*ZP^Ctl22EPD>CmM|p8-Qg zjF~WH#+(I9R;<~uWyhWaM^2o%aOK9G2TxwS`S9gOAf=xmA;Lt65+hE6Bq`El$dV&Z zfg&Z!RH#y;PJ<>b+H~mBqtAdLBgRaaGGoqyB`el!*s^2Kfg>l*T)1-M&Vwf}-hBA- zBk*_VCrF4e5u(J1lORcoG#Rqw$Wx$5i82+c)Tq;-NsBfey7cHXV91Cu6Q<0VvtY@J zH5<0<*mK~>i8B|j+_>}L$%{81zWfOMo%#t9B20uRG2$dhk|IrpEIINNC{m(Kg(@}b zG-%SIO@}T$`V1H{V$6goGv+Kjjp2@)bqgeWoMBuJ7X zO@=Hv@)RgiqD+M?9BH|{)m z^5V^hFFyi*kA8xL2ooVnj5rCBq)3w?OO8ARij*i*p-PQ94VtuQ)1ga`J_Ck~7&BqY zj5!OItXQ*Q%Z@znz~8H%AR)p;h!P`Cf+Q)@WXO^uPk|yO z%2cRQqfUb+E!uSG(xcCSAtT02m@;F|f+Z`~Y}m46&w(Q+&Rn>1sZpmvlNN0{bm`G&z>pDRCQO+zXTg#cYc_1z zvFE^%6K5`5xpC*glNWD3eEAXh`}7keM3@LsV#G<1Bt@DGS#snlP^3he3RP;g1PKu)LX;SB z5+q5HCPS7Sc?uLMQKmwb8g&{pY0;)bmmYlv3>h(I!ju_v7A#q@X2X^pdk!2qapuC6 z8+RT&dGY4Mmmh(DKtDl3gozL(Mw|pmQl!a{B}bkDMM{*ZP^Ctl22EPD>CmM|p8-Qg zjF~WH#+(I9R;<~uWyhWaM^2o%aOK9G2TxwS`S9h3Lrw#K5F|vH2vK6hNsuH(nhaTT z^X4c#F-0MZrpkBFqrbCw=eFh8}F=oP)8FLma zS+QormK}Qz96533!j&6$9z1#R=EIjCfqzIpK|+Ly5G6*O1W8h)$&e*So&rTml&Mgq zMx6#tTD0lVrAMCuLq?35FlEM^1xr?}*|25Do&!ftoVjr2#+?UGUcCA6C6lpSK$&sf(krHJpRH;#?L6a73I&|sLXTXpVVBzZ*|F!q zkrQVwT)A=Q!IKwnK79ER$mu6Yh%gbN#E6q1Ns2TXvgF89ph$@_6{^&z)1XOc`Z2@)bqgeWoMBuJ7X zO@=Hv@)RgiqD+M?9BH|{)m z^5V^hFFykRsD6Tk2ooVnj5rCBq)3w?OO8ARij*i*p-PQ94VtuQ)1ga`J_Ck~7&BqY zj5!OItXQ*Q%Z@znz(1y+AR)p;h!P`Cf+Q)@WXO^uPk|yO z%2cRQqfUb+E!uSG(xcCSAtT02m@;F|f+Z`~Y}m46&w(Q+&Rn>1{0J2E6C^~K2vK6hNsuH(nhaTT z^X4c#F-0MZrpkBFqrbCw=eFh8}F=oP)8FLma zS+QormK}Qz96533!j&6$9z1#R=EIjCfqz;*K|+Ly5G6*O1W8h)$&e*So&rTml&Mgq zMx6#tTD0lVrAMCuLq?35FlEM^1xr?}*|25Do&!ftoVjr2#+?UGUcCA6C6lpSK$&sf(krHJpRH;#?L6a73I&|sLXTXpVVBzZ*|F!q zkrQVwT)A=Q!IKwnK79G%a+AOx1PKu)LX;SB5+q5HCPS7Sc?uLMQKmwb8g&{pY0;)b zmmYlv3>h(I!ju_v7A#q@X2X^pdk!2qapuC68+RT&dGY4Mmmh(DRzE>PgozL(Mw|pm zQl!a{B}bkDMM{*ZP^Ctl22EPD>CmM|p8-QgjF~WH#+(I9R;<~uWyhWaM^2o%aOK9G z2TxwS`S9gO;Gfe^kPu-aM2Qh6L6Q_{GGxh-r$CVsWhzvuQKvzZ7Hv9o>CtDvkP%}h zOqnrf!IBkgHf-6k=fIH@XD(d1ap%F47jHg%`4RY&`Uw&uOoS*g;v`6tB29)YIr0=J zQldY$E%!Da3<}6sUV$FsvJN6tna^lQ|D>v>uc=F=Shc7<@ zCH({m5hg;E7;zFLNs%T)mK=Es6e&@rLX{eI8Z>FqrbCw=eFh8}F=oP)8FLmaS+Qor zmK}Qz96533!j&6$9z1#R=EIjCfj_06AR)p;h!P`Cf+Q)@WXO^uPk|yO%2cRQqfUb+ zE!uSG(xcCSAtT02m@;F|f+Z`~Y}m46&w(Q+&Rn>1nBKvFcG4} zh?5{miZmIrNIH5qD_Y`J^Bn7 zGGfexDKq9QSh8ZxhAlhx95`~~%!MmA?mT$%;?0LIKU{1U_=6xJ!bFG?BTj-ODbi%f zk|R%nA|=XHs8XX&gC;H7bm-Ef&wwE##!Q$pW6pvlE7ok-vSZJIBPY&WxN_spgC{TE zeE9Ms@Gt5oNQf{IqQr=kAW4ce8M5TaQ=mwRG8L-SsMDZHi#8p)^yo8S$cQl$rp%bL zV9AO#8@BA&bKuB{GZ(Jhxbxu2i#H#>{0RI@`Uw&uOoS*g;v`6tB29)YIr0=JQldY$E%!Da3<}6sUV$FsvJN6tna^lQ|D>v>uc=F=Shc7<@|FV99 zga{KMN{l!OlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#ufk3Iv2j2JUv%8WS+maJH_Vatv^ z2acRLbK%O3I}e_`c=O@Qk3dB~K|+Ly5G6*O1W8h)$&e*So&rTml&MgqMx6#tTD0lV zrAMCuLq?35FlEM^1xr?}*|25Do&!ftoVjr2#+?UGUcCA6C z6lpSK$&sf(krHJpRH;#?L6a73I&|sLXTXpVVBzZ*|F!qkrQVwT)A=Q z!IKwnK79ER_*eB4Bt)19QDVeNkR(N#3|VsIDNv+DnF>{E)M?P9MVk&?dh{7EWW<;W zQ)bLruw=!W4O@2XIdJ5}nG08L+9 zDN&|El^S&#G-=VMLzfCtDvkP%}hOqnrf!IBkg zHf-6k=fIH@XD(d1ap%F47jHg%`4RZn^%EpSm@#e#qAAx^UKS4r-i4Y}5oCHZyq{)yaN1g&jN|dQkrAD0wOoCQl(tl6+-$DRX6PMo=L<;I-{PhPzF@a0FKrk@}o!bFG?BTj-ODbi%f zk|R%nA|=XHs8XX&gC;H7bm-Ef&wwE##!Q$pW6pvlE7ok-vSZJIBPY&WxN_spgC{TE zeE9Ms@NelSNQf{IqQr=kAW4ce8M5TaQ=mwRG8L-SsMDZHi#8p)^yo8S$cQl$rp%bL zV9AO#8@BA&bKuB{GZ(Jhxbxu2i#H#>{0RKp`Uw&uOoS*g;v`6tB29)YIr0=JQldY$E%!Da3<}6sUV$FsvJN6tna^lQ|D>v>uc=F=Shc7<@|Bilw zga{KMN{l!OlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#ufk3Iv2j2JUv%8WS+maJH_Vatv^ z2acRLbK%O3I}e_`c=O@Q4;PvT{vb$*FcG4}h?5{miZmIrNIH5qD_Y`J^Bn7GGfexDKq9QSh8ZxhAlhx95`~~%!MmA z?mT$%;?0LIKLY=reu9Ju6Cp~BI0=%ZNRuH;jywg5lqgf7N{u=VnzU%sp-YcG1BQ$k zGhxb%ISZDoShHcvjy(sCoH%pg%8fe@p1gST;meP}zptMlA;Lt65+hE6Bq`El$dV&Z zfg&Z!RH#y;PJ<>b+H~mBqtAdLBgRaaGGoqyB`el!*s^2Kfg>l*T)1-M&Vwf}-hBA- zBhb)KkPu-aM2Qh6L6Q_{GGxh-r$CVsWhzvuQKvzZ7Hv9o>CtDvkP%}hOqnrf!IBkg zHf-6k=fIH@XD(d1ap%F47jHg%`4RXJ^b;gRm@#e#qAA$c!KS4r-i4Y}5oCHZyq{)yaN1g&jN|dQkrAD0wOoCQl(tl6+-$DRX6PMo=L<;I-{PhPzF@a0FKsh=Pr!bFG?BTj-ODbi%f zk|R%nA|=XHs8XX&gC;H7bm-Ef&wwE##!Q$pW6pvlE7ok-vSZJIBPY&WxN_spgC{TE zeE9Ms@E_|ZNQf{IqQr=kAW4ce8M5TaQ=mwRG8L-SsMDZHi#8p)^yo8S$cQl$rp%bL zV9AO#8@BA&bKuB{GZ(Jhxbxu2i#H#>{0RIf`Uw&uOoS*g;v`6tB29)YIr0=JQldY$E%!Da3<}6sUV$FsvJN6tna^lQ|D>v>uc=F=Shc7<@|EYe0 zga{KMN{l!OlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#ufk3Iv2j2JUv%8WS+maJH_Vatv^ z2acRLbK%O3I}e_`c=O@Qk3dU5K|+Ly5G6*O1W8h)$&e*So&rTml&MgqMx6#tTD0lV zrAMCuLq?35FlEM^1xr?}*|25Do&!ftoVjr2#+?UGUcCA6C z6lpSK$&sf(krHJpRH;#?L6a73I&|sLXTXpVVBzZ*|F!qkrQVwT)A=Q z!IKwnK79ER_|NqdBt)19QDVeNkR(N#3|VsIDNv+DnF>{E)M?P9MVk&?dh{7EWW<;W zQ)bLruw=!W4O@2XIdJ5}nG08L+9 zDN&|El^S&#G-=VMLzfpbIg=zwk|arz zBuSDaNs^>JAD$oZ_Aau_D(h^r%`W>Ka?B~`Tyo7V_dN2r%`XA}i++L$C7ej2 zi6x#yl1U|T-nPr|umRV(;O}5!( zpF@s0<(x~dx#gZmo_Xb+PrmsjphG{wgc439(Zmu@BFUtZPA1vpl20MUlu}M5)znf? zBh9qZPAA>;($65nj55w7)66o@BFn6@&L-RJvdKa?B~`Tyo7V_dN2r%`X9+`UxhKa3YB&mUt3LCY5wD$tIV43Mr

R&>86){1{r3QaVD8&mU$LgW|eg|*=Co04mswOb1u2&mU|v~=9PCo z`R12^|5ZQ1gc439(Zmu@BFUtZPA1vpl20MUlu}M5)znf?Bh9qZPAA>;($65nj55w7 z)66o@BFn6@&L-RJvd=KVVoE8el4@$Hr;%n_X{VEJdg*77VMZBel4)j{XOU%AS!a`NcG>5UV@^5el51|c z=aFY#dFPXFehK(T`UxhKa3YB&mUt3LCY5wD$tIV43MrR&>86){ z1{r3QaVD8&mU$LgW|eg|*=Co04mswOb1u2&mU|v~=9PCo`R12^Zv6xkN;r{36H7da zB$G-ynPihoK7|xhN;#EOQ%gOKG}B5uopjSnKZ6W2$~couGs`@SEVIfwn{2boK8GB0 z$~l)@bIU!CJoCyspM3L6!2hP7U_uEel4xRyCy``QNhgzRa>=KVVoE8el4@$Hr;%n_ zX{VEJdg*77VMZBel4)j{XOU%AS!a`NcG>5UV@^5el51|c=aFY#dFPXFehKK&PcWf` z6G=3&#FI!esic!hHo4?eNHL|9Q%N-NhXzaGRY>Fd%RYx3bILiFTyx7kk393rJD+^>OTa(YPcWf`6G=3&#FI!esic!hHo4?e zNHL|9Q%N{<}nN`-=WSd>~IpmmA&bj28Tkd(}nOENV9JqKPG*M3PA* zolLUHC7(iyDW#lBs;Q-(Mw)4*old&xrJq5D8D*SFrkQ1)MV47*olUmcWuHTiIpv&7 zuDRu&N1l1*olm~`C7}N(;2#i35W$2HN*Lip5J?o##1Kmy@g$H)63L{HN*d{8kVzKV zk0sqv00tq6R5JCwfoCqR`BAOUti6fo_5=kPN6jDhe zoeVO`BAXm?$s?Zv3MrzP5=tqfoC+$bqM90NsiU3-8fl`L7FubeoenzbqMIIi>7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w z;+h+7x#OM(9(m%K7hZYeoew_w;+r3S`6FP!e*y_2m=HnT31QJOi znG{k*l~0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q>7tt+ zdg-H|0R|ajm=Q)9W1I;lnPQq5W|?E21r}LinH5%9W1S5)*!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{& zM;vp)DQBE>!6jE*bHgon-1ERAPdxL&D{s8>!6#pQ^TRKH1PuC5AVCBZLMUN`6G0?V zL=!_Sam15AB1t5ZLMmyblR+j~WRpWKdE`?-Aw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2Z zLMv^w(?KU)bkjpGee^THAVUl@!YE^mGr=TNOf$nQbIh~AB1 z7~w<^Nfgn<5KA2KB#=lF$)u1<8tG(^Nfz1UkV_u<6i`SJ#gtG=8Rb+^Nfp)9P)i;4 zG|)&B&9u-;8|`$^Nf+Jp&`Tfv3^2$L!;CP>7~@PZ$rRJfFv}eCEU?HD%dD`<8tZJZ z$rjt}u*)9%9B{}H$DDA=8RuMZ$rab!aLXO{Jn+a9&%E%;8}EGZ$rs=J@XH?oL;e#; z5W$2HN*Lip5J?o##1Kmy@g$H)63L{HN*d{8kVzKVk z0sp7}1QJ9rA%qe}I1xk=MKm$Q5=T4qRnJo3adFTC=`J0E=V z#Wz3v@<+h1{{#|5Fd>8zMmP~f5=AsI#1cn52_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3GcUaI z#ycN;^2Ikl{PIV@Klh(Nf(RyrP{If&f=Hr>CWctzh$n$Wl1L_nRMJQ%gG{o>CWl<| z$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8SjqQN|c& zf=Q;BW`we^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrIMw)1*g;v^Vr-M$q=%$BW`sinXL53J+ zgi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_)*k*@a_SoluLykD+gj3Eq=YmVFxaNji z?zrcHN1k}*g;(Bq=Yvna_~wUS{s{ON{u4+L!GsV>7~w<^Nfgn<5KA2KB#=lF$)u1< z8tG(^Nfz1UkV_u<6i`SJ#gtG=8Rb+^Nfp)9P)i;4G|)&B&9u-;8|`$^Nf+Jp&`Tfv z3^2$L!;CP>7~@PZ$rRJfFv}eCEU?HD%dD`<8tZJZ$rjt}u*)9%9B{}H$DDA=8RuMZ z$rab!aLXO{Jn+a9&%E%;8}EGZ$rs=J@XH?oqy7^}5W$2HN*Lip5J?o##1Kmy@g$H) z63L{HN*d{8kVzKVk0sqo}0tq6R5JCwfoCqR`BAOUt zi6fo_5=kPN6jDheoeVO`BAXm?$s?Zv3MrzP5=tqfoC+$bqM90NsiU3-8fl`L7Fube zoenzbqMIIi>7$7FlAM6;@eeoeehGVw)Xy*<+sr z4mskO6HYnfoC_|w;+h+7x#OM(9(m%K7hZYeoew_w;+r3S`6FP=e*y_2m=HnT31QJOinG{k*l~0tzXjm=a1UqnrvVsiK-1YN?~11{!Ii znHE}Uqn!>q>7tt+dg-H|0R|ajm=Q)9W1I;lnPQq5W|?E21r}LinH5%9W1S5)*CWctzh$n$Wl1L_nRMJQ%gG{o>CWl<|$ftlpiYTUpQpzZ&f=a5WriNPT zsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8SjqQN|c&f=Q;BW`we^2n!vLW(G+gi^{Vr-DkV zsHTQm>ZqrIMw)1*g;v^Vr-M$q=%$BW`sinXL53J+gi*#AXM#zlm}Z7q=9p)JMV44* zg;myAXM;_)*k*@a_SoluLykD+gj3Eq=YmVFxaNji?zrcHN1k}*g;(Bq=Yvna_~wUS z{s{QL{U?whf(ap%Fv5u-k|?5yA(lAeNg$CVl1U+zG}6f+lPt2yA(uSzDWH%diYcL# zGRmo-k}9gHp_V%8X`qoNnrWexHrnZ+lP%GCj2LmAc6@YlrX}HAd)Dei6NFa;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@ zB8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T>7kcC`Wax5A%+=YlrhGcV3H}O znPHYW=2>8oC6-xXl{MDcV3RGj*{a3Y8#ifCepC60I!NF<45Qb;9@bTY^!i)?bp zC69axD5QvDN+_j_aw@2#ifU@8rH*b~@;!i*9=8rH_6F7-WcHMi^y` zaVD5#ifLw;WsZ3kSY(N1R#;_?bvD>!i*0t;WsiLhIOK?9PB`U^b1t~#ifeAT<&JwE zc;ty^UU=n=cRu*!i*J7T<&S_#{|O|BU_uBbjBp}|B#LNah$W7A5=bP8WKu{ajdU`| zB#Ufv$R&?_3MizAVoE5bjB+Zdq>5^4sHKj28fc`6W?E>ajdnWdq>FBP=%tT-1{h?B zVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2bjB_ry7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO z6HYnfoC_|w;+h+7x#OM(9(m%K7hZYeoew_w;+r3S`6J+8`A;B01QS9iVT2PwBvC{Y zLo9K`lRzR#B$GlaX{3`uCRt>YLoRvbQ$Qg_6jMSeWt3AvB~?^YLoIdG(?BClG}A&W zZM4%tCtY;YLoa>wGr%B23^T$gV~jJwBvVW?!z^>mv%n%tEVIHYYpk=uCR=Q?!!CR5 zbHE`-9CN}cXPk4vC0AT?!!38*^S~odJoCaUZ@lxtCtrN?!!LgXO#4qDK?D;*C}D&X zK_pQ`6GJR<#FIcGNhFg(DruyXK_*#blS3|f!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5 zv%@ZX>~p{&M;vp)DQBE>!6jE*bHgon-1ERAPdxL&D{s8>!6#pQ^TRKH1pI6N2_%SM zLI@>{a3Y8#ifCepC60I!NF<45Qb;9@bTY^!i)?bpC69axD5QvDN+_j_aw@2#ifU@8 zrH*b~@;!i*9=8rH_6F7-WcHMi^y`aVD5#ifLw;WsZ3kSY(N1R#;_? zbvD>!i*0t;WsiLhIOK?9PB`U^b1t~#ifeAT<&JwEc;ty^UU=n=cRu*!i*J7T<&S_F z{|O|BU_uBbjBp}|B#LNah$W7A5=bP8WKu{ajdU`|B#Ufv$R&?_3MizAVoE5bjB+Zd zq>5^4sHKj28fc`6W?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7 zWmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2bjB_ry%G=KLp+Ac6@YlrX}HAd)Dei6NFa;z=NpB$7!Xl{C`H zAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T>7kcC`Wax5 zA%+=YlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*qRnJo3adFTC=`J0E=V#Wz3v@<+hD{{#|5Fd>8zMmP~f5=AsI#1cn5 z2_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3GcUaI#ycN;^2Ikl{PIV@zxSU&f(RyrP{If&f=Hr> zCWctzh$n$Wl1L_nRMJQ%gG{o>CWl<|$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnl zR@!K%gHF2WriWho=x2aIh8SjqQN|c&f=Q;BW`we^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrI zMw)1*g;v^Vr-M$q=%$BW`sinXL53J+gi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_) z*k*@a_SoluLykD+gj3Eq=YmVFxaNji?zrcHN1k}*g;(Bq=Yvna_~wUS{s{OF{u4+L z!GsV>7~w<^Nfgn<5KA2KB#=lF$)u1<8tG(^Nfz1UkV_u<6i`SJ#gtG=8Rb+^Nfp)9 zP)i;4G|)&B&9u-;8|`$^Nf+Jp&`Tfv3^2$L!;CP>7~@PZ$rRJfFv}eCEU?HD%dD`< z8tZJZ$rjt}u*)9%9B{}H$DDA=8RuMZ$rab!aLXO{Jn+a9&%E%;8}EGZ$rs=J@XH?o zi~bWx5W$2HN*Lip5J?o##1Kmy@g$H)63L{HN*d{8kVzKVk0sql|0tq6R5JCwfoCqR`BAOUti6fo_5=kPN6jDheoeVO`BAXm?$s?Zv3MrzP z5=tqfoC+$bqM90NsiU3-8fl`L7FubeoenzbqMIIi>7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;+h+7x#OM(9(m%K7hZYe zoew_w;+r3S`6FP-e*y_2m=HnT31QJOinG{k*l~ z0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q>7tt+dg-H|0R|ajm=Q)9W1I;l znPQq5W|?E21r}LinH5%9W1S5)*!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{&M;vp)DQBE>!6jE*bHgon z-1ERAPdxL&D{s8>!6#pQ^TRKH1T6bcAVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyb zlR+j~WRpWKdE`?-Aw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^TH zAVUl@!YE^mGr=TNOf$nQbIh~AB15^4sHKj28fc`6W?E>ajdnWdq>FBP z=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2b zjB_ryyl*dg0fiJ%ObMlwQBDPwR8dV0wbW5h z1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM%nGZlvCamY zY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc>zx)yK|M^cK zK?D;*C}D&XK_pQ`6GJR<#FIcGNhFg(DruyXK_*#blS3|f!6Z{mGs7%%%(K8EODwa( zDr>B>!6sX5v%@ZX>~p{&M;vp)DQBE>!6jE*bHgon-1ERAPdxL&D{s8>!6#pQ^TRKH z1g!Z_AVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyblR+j~WRpWKdE`?-Aw?8ZLMdgG zQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^THAVUl@!YE^mGr=TNOf$nQbIh~A zB15^4sHKj28fc`6W?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8K zm}QQ67FcA7WmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2bjB_ryyl* zdKY;`hObDTb5l#e=L=jC4vBVKi0*NG%ObV%_ zkxmAgWRXn{x#W>g0fiJ%ObMlwQBDPwR8dV0wbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~ z0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM%nGZlvCamYY_ZJ_yX>*g0f!uM%n7HQan1#o zTyf0}x7=~h1CKoM%nPr)@y-XIeDTc>zx)yK-~A_$Ac6@YlrX}HAd)Dei6NFa;z=Np zB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T z>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=uIp$elktLQ{VU;!3*5^4sHKj28fc`6 zW?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf z*kzA>4mjk9V@^2bjB_ryyl*dg0fiJ%ObMlw z{ZEGe{}tRQ4&bx(Ns=TwGr%B23^T$gV~jJwBvVW?!z^>mv%n%tEVIHYYpk=uCR=Q?!!CR5bHE`-9CN}c zXPk4vC0AT?!!38*^S~odJoCaUZ@lxtCtrN?!!LgX{0I6IND#q<5K0)~L=Z_7(Zmo- z9PuQOND|4UkV+cqWROV~+2oK*9{ChdND;-9P)Zr)R8UD3)znZ+9rZNONE6Mp&`KNa zbkIo`-Sp5)AN>q4$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E*9{U_{ z$Pve!aLO6yTyV)1*W7T+9rrx&$P>@J@X8zSeDKK^-~8~)9|1f16G#xjgb+#?;Y1Kg z6w$;GOC0eekVq2Aq>xG)>12>e7TM&GOCI?YP)HHQlu$|;uj*e7TfHw z%O3k2aL5tIoN&q+=Ui~f71!Kw%N_ST@W>O-yzt5!?|ksd7vKEw%O3&%q5cFCL@*(Q z5=J-?L=r_bF~kx_JP9O{L^3I)l14fiWRgWTIpmT@J_Qs~L@_0lQbsuyR8mDXHPli^ zJqh9qTyn)VH{5c^Jr6wc#4|6v^2R$KeDcLNKm77Xz^?uT z5=1Z|gc3$L5kwM2G%>^yM?486l0-5oq>@HD8Dx@0HaX;yM?M7yM?DQR(nK>Yw9-a99dyz~H$C*yM?V7$GQ=<=j55YJ6HGG2G&9UH$2OwwoN~rF7hH11H8g0fiJ%ObMlw zQBDPwR8dV0wbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei z0*frM%nGZlvCamYY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XI zeDTc>zx)xfr$2!N5ljf7gb_{zkwg(q46(!!PXdV~kxUAyq>)YrnPibo4!Pu!PXUD# zQA`P?lu=Fvl~hqp4Ykx!PXmoK(M$`iw9!rnopjMn554r!&j5oAG0X^~j4{pxlT0zq z471EJ&jO1qvCImqtg+4pn{2Vo4!i8J&jE)Vam)#)oN>+tmt1kp4Y%BJ&jXJ<@yrXa zyz$NlpM3Gn55N2o@E_|>AVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyblR+j~WRpWK zdE`?-Aw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^THAVUl@!YE^m zGr=TNOf$nQbIh~AB1Kq5&blR_$Kq?17= zS!9z#E_virKp{mGQ$i_alv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~m zGr}lij5EO`Q%p0%EOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$ zEqC1Wz#~sQ^TI1{yz{{)Uwre!FMkC5C;Age5W$2HN*Lip5J?o##1Kmy@g$H)63L{H zN*d{8kVzKVk0SEdMND#q<5K0)~L=Z_7(Zmo-9PuQO zND|4UkV+cqWROV~+2oK*9{ChdND;-9P)Zr)R8UD3)znZ+9rZNONE6Mp&`KNabkIo` z-Sp5)AN>q4$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E*9{U_{$Pve! zaLO6yTyV)1*W7T+9rrx&$P>@J@X8zSeDKK^-~8~)9|8ZV{sa<4Fd>8zMmP~f5=AsI z#1cn52_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3GcUaI#ycN;^2Ikl{PIV@q5cFCL@*(Q5=J-? zL=r_bF~kx_JP9O{L^3I)l14fiWRgWTIpmT@J_Qs~L@_0lQbsuyR8mDXHPli^Jqh9qTyn)VH{5c^Jr6wc#4|6v^2R$KeDcLNKm77Xz<;Jcfdmmu z2%&@#P6Uxe5lsxS#1T&di6oIs3aO-#P6nA|kxdS{g0fiJ%ObMlwQBDPw zR8dV0wbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM z%nGZlvCamYY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc> zzx)yKpX*N`K?D;*C}D&XK_pQ`6GJR<#FIcGNhFg(DruyXK_*#blS3|f!6Z{mGs7%% z%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{&M;vp)DQBE>!6jE*bHgon-1ERAPdxL&D{s8> z!6#pQ^TRKHxZnc*5l9fhgb+#?;Y1Kg6w$;GOC0eekVq2Aq>xG)>12>e7TM&GOCI?Y zP)HHQlu$|;uj*e7TfHw%O3k2aL5tIoN&q+=Ui~f71!Kw%N_ST@W>O- zyzt5!?|ksd7vKEw%O3&%h5iH*L@*(Q5=J-?L=r_bF~kx_JP9O{L^3I)l14fiWRgWT zIpmT@J_Qs~L@_0lQbsuyR8mDXHPli^Jqh9qTyn)VH{5c^ zJr6wc#4|6v^2R$KeDcLNKm78Cn@zw!0tq6R5JCwfoCqR`BAOUti6fo_5=kPN6jDhe zoeVO`BAXm?$s?Zv3MrzP5=tqfoC+$bqM90NsiU3-8fl`L7FubeoenzbqMIIi>7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w z;+h+7x#OM(9(m%K7hZYeoew_w;+r3S`6J-J)Sp0t2quJ3!U!jVNTP@)hFIc=CxJwg zNG63;(nu$ROtQ!(hg|Z=r+`9=D5iu`$|$FTN~)-)hFa>Vr-4SAXr_f$+GwYPPP*u( zhhF;VXMjP57-ob~#u#UUNv4=)hFRvAXMshQSZ0M))>vnQO}5x(hh6sA=YT_wIOc>? z&N%0SORl))hFk8q=YdC_c;we^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrIMw)1*g;v^V zr-M$q=%$BW`sinXL53J+gi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_)*k*@a_Solu zLykD+gj3Eq=YmVFxaNji?zrcHN1k}*g;(Bq=Yvna_~wUS{s{Q5^e2!Yf(ap%Fv5u- zk|?5yA(lAeNg$CVl1U+zG}6f+lPt2yA(uSzDWH%diYcL#GRmo-k}9gHp_V%8X`qoN znrWexHrnZ+lP(WWCr{rkRXBy zA(Sw}i6D|FqKP4vIO0hlktC8yA(b@J$sm&~vdJNrJn|`^kRpmHp_DSpsi2Z7s;Qxt zI_hblktUjHp_Mk;>7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=uIp$elktLQ{VU;!3 z*7~w<^Nfgn<5KA2KB#=lF$)u1<8tG(^Nfz1UkV_u<6i`SJ#gtG=8Rb+^ zNfp)9P)i;4G|)&B&9u-;8|`$^Nf+Jp&`Tfv3^2$L!;CP>7~@PZ$rRJfFv}eCEU?HD z%dD`<8tZJZ$rjt}u*)9%9B{}H$DDA=8RuMZ$rab!aLXO{Jn+a9&%E%;8}EGZ$rs=J z@XH@QFa`W0kRXByA(Sw}i6D|FqKP4vIO0hlktC8yA(b@J$sm&~vdJNrJn|`^kRpmH zp_DSpsi2Z7s;QxtI_hblktUjHp_Mk;>7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=u zIp$elktLQ{VU;!3*7~w<^Nfgn<5KA2KB#=lF$)u1<8tG(^Nfz1UkV_u< z6i`SJ#gtG=8Rb+^Nfp)9P)i;4G|)&B&9u-;8|`$^Nf+Jp&`Tfv3^2$L!;CP>7~@PZ z$rRJfFv}eCEU?HD%dD`<8tZJZ$rjt}u*)9%9B{}H$DDA=8RuMZ$rab!aLXO{Jn+a9 z&%E%;8}EGZ$rs=J@XH@QE(iQ0kRXByA(Sw}i6D|FqKP4vIO0hlktC8yA(b@J$sm&~ zvdJNrJn|`^kRpmHp_DSpsi2Z7s;QxtI_hblktUjHp_Mk;>7bJ?y6K^pKKdD8kRgT{ zVU#h(nP8GBrkP=uIp$elktLQ{VU;!3*5^4sHKj28fc`6W?E>ajdnWdq>FBP=%tT- z1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2bjB_ry zYDyXE2YHFyZj(Qqsq={x)Xr+yII_RW} zZhGjYkA4OiWQbu#7-fucCYWT3X=a#Zj(HYXWQk=~SY?fMHrQl~ZFbmYk9`g}T31QJOinG{k*l~0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}U zqn!>q>7tt+dg-H|0R|ajm=Q)9W1I;lnPQq5W|?E21r}LinH5%9W1S5)*CWTbeNGF3#vdAWfT=K}LfI^BWri4<;D5ru-s;H)h zTI#5$fkv8WriE78Xs3fty6C2dUi#=~fI)^BW`t437-xb>rkG}iS>~8$fkl>BW`$MO zSZ9Mxw%BHeUG~`LfJ2Tr=7dwuIOl>(uDIrgTkg2$fk&Qr=7m?@c;|ypzWC;cU;eP6 zB;X%`1QARKp@b1m1d&7$O$@Qb5l;e%B#}%Csicuk2AO1$O%A!_kxv1I6j4kGrIb-l z1(j4$O%1iwQBMPnG|@~8t+dfj2c2}$O%J{F(a!*b3^B|Iql_`m1d~iL%?z{5G0y^v zEV0ZAtE{ok2AgcL%?`WlvCjdA9C6GEr<`%l1(#fL%?-EQanA#fJn_s6ue|Zj2cLZL z%@4o)5%AyXPar`A6GA9qgcCs|QA86%EOEq>Kq5&blR_$Kq?17=S!9z#E_virKp{mG zQ$i_alv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~mGr}lij5EO`Q%p0% zEOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$EqC1Wz#~sQ^TI1{ zyz{{)Uwre!FMrrf7VwWif(RyrP{If&f=Hr>CWctzh$n$Wl1L_nRMJQ%gG{o>CWl<| z$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8SjqQN|c& zf=Q;BW`9>yCy*e52_cj)!igY~D58lWmN?=`Adw`JNg!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{&M;vp)DQBE> z!6jE*bHgon-1ERAPdxL&D{s8>!6#pQ^TRKH1pE*B6G#xjgb+#?;Y1Kg6w$;GOC0ee zkVq2Aq>xG)>12>e7TM&GOCI?YP)HHQlu$|;uj*e7TfHw%O3k2aL5tI zoN&q+=Ui~f71!Kw%N_ST@W>O-yzt5!?|ksd7vKEw%O5t82mB+DAc6@YlrX}HAd)De zi6NFa;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8siBrS>S>^nCYouX zl{VVxpp!1T>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*{ za3Y8#ifCepC60I!NF<45Qb;9@bTY^!i)?bpC69axD5QvDN+_j_aw@2#ifU@8rH*b~@;!i*9=8rH_6F7-WcHMi^y`aVD5#ifLw;WsZ3kSY(N1R#;_?bvD>! zi*0t;WsiLhIOK?9PB`U^b1t~#ifeAT<&JwEc;ty^UU=n=cRu*!i*J7Tk z0soW!1QJ9rA%qe}I1xk=MKm$Q5=T4qRnJo3adFTC=`J0E=V z#Wz3v@`uA?0sjajh+skpC5&()h$M<=Vu&Syl*d7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;+h+7x#OM( z9(m%K7hZYeoew_w;+r3S`NP4{fPVxML@*(Q5=J-?L=r_bF~kx_JP9O{L^3I)l14fi zWRgWTIpmT@J_Qs~L@_0lQbsuyR8mDXHPli^Jqh9qTyn)V zH{5c^Jr6wc#4|6v^2R$KeDcLNKm77X!2hB@fdmmu2%&@#P6Uxe5lsxS#1T&di6oIs z3aO-#P6nA|kxdS{BBA5_D2_u{cB8eiJ7-ESd zo&*v}BAFCYNh6&MGRY#F9CFDcp8^UgqL>m&DWjYUDygEH8fvMdo(39eqL~(2X``JE zI_aXD9(w7cp8*CLVwe#|8DpFYCYfTI8D^Pdo&^?JVwn|IS!10IHrZmE9d_Acp92m# z;+PXoIpdrQF1g~G8*aJdo(CRz;+Yp-dE=cAKKbICAAb2G;D6PhK!OM+giyi=CxS?# zh$ewe^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrIMw)1* zg;v^Vr-M$q=%$BW`sinXL53J+gi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_)*k*@a z_SoluLykD+gj3Eq=YmVFxaNji?zrcHN1k}*g;(Bq=Yvna_~wUS{xC2i;2(hm5ljf7 zgb_{zkwg(q46(!!PXdV~kxUAyq>)YrnPibo4!Pu!PXUD#QA`P?lu=Fvl~hqp4Ykx! zPXmoK(M$`iw9!rnopjMn554r!&j5oAG0X^~j4{pxlT0zq471EJ&jO1qvCImqtg+4p zn{2Vo4!i8J&jE)Vam)#)oN>+tmt1kp4Y%BJ&jXJ<@yrXayz$NlpM3Gn55N2o@W1I# zAVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyblR+j~WRpWKdE`?-Aw?8ZLMdgGQ$ZzF zR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^THAVUl@!YE^mGr=TNOf$nQbIh~AB1Vr-4SAXr_f$+GwYPPP*u(hhF;VXMjP57-ob~#u#UUNv4=)hFRvA zXMshQSZ0M))>vnQO}5x(hh6sA=YT_wIOc>?&N%0SORl))hFk8q=YdC_c;kRXByA(Sw}i6D|FqKP4vIO0hlktC8yA(b@J$sm&~vdJNrJn|`^ zkRpmHp_DSpsi2Z7s;QxtI_hblktUjHp_Mk;>7bJ?y6K^pKKdD8kRgT{VU#h(nP8GB zrkP=uIp$elktLQ{VU;!3*Y zLoRvbQ$Qg_6jMSeWt3AvB~?^YLoIdG(?BClG}A&WZM4%tCtY;YLoa>wGr%B23^T$g zV~jJwBvVW?!z^>mv%n%tEVIHYYpk=uCR=Q?!!CR5bHE`-9CN}cXPk4vC0AT?!!38* z^S~odJoCaUZ@lxtCtrN?!!LgX{2%%gND#q<5K0)~L=Z_7(Zmo-9PuQOND|4UkV+cq zWROV~+2oK*9{ChdND;-9P)Zr)R8UD3)znZ+9rZNONE6Mp&`KNabkIo`-Sp5)AN>q4 z$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E*9{U_{$Pve!aLO6yTyV)1 z*W7T+9rrx&$P>@J@X8zSeDKK^-~8~)ABMyQ{3DPcf(ap%Fv5u-k|?5yA(lAeNg$CV zl1U+zG}6f+lPt2yA(uSzDWH%diYcL#GRmo-k}9gHp_V%8X`qoNnrWexHrnZ+lP%G{!jf0B#2-_2qlbgB8Vi4Xkv&Z zj(8GCB#C5FNF|MQGRP#0Y;wpYk9-O!q=;flD5Z>YDyXE2YHFyZj(Qqsq={x)Xr+yI zI_RW}ZhGjYkA4OiWQbu#7-fucCYWT3X=a#Zj(HYXWQk=~SY?fMHrQl~ZFbmYk9`g} z7~w<^ zNfgn<5KA2KB#=lF$)u1<8tG(^Nfz1UkV_u<6i`SJ#gtG=8Rb+^Nfp)9P)i;4G|)&B z&9u-;8|`$^Nf+Jp&`Tfv3^2$L!;CP>7~@PZ$rRJfFv}eCEU?HD%dD`<8tZJZ$rjt} zu*)9%9B{}H$DDA=8RuMZ$rab!aLXO{Jn+a9&%E%;8}EGZ$rs=J@XH?o|Cjy*5=1Z| zgc3$L5kwM2G%>^yM?486l0-5oq>@HD8Dx@0HaX;yM?M7y zM?DQR(nK>Yw9-a99dyz~H$C*yM?V7$GQ=<=j55YJ6HGG2G&9UH$2OwwoN~rF7hH11H85^4sHKj28fc`6W?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7 zWmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2bjB_ry8zMmP~f5=AsI#1cn52_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3 zGcUaI#ycN;^2Ikl{PIV@|D!*F1QARKp@b1m1d&7$O$@Qb5l;e%B#}%Csicuk2AO1$ zO%A!_kxv1I6j4kGrIb-l1(j4$O%1iwQBMPnG|@~8t+dfj2c2}$O%J{F(a!*b3^B|I zql_`m1d~iL%?z{5G0y^vEV0ZAtE{ok2AgcL%?`WlvCjdA9C6GEr<`%l1(#fL%?-EQ zanA#fJn_s6ue|Zj2cLZL%@4o)VJLjSKLQCNm=HnT31QJOinG{k< zBb^K~$s(H^a>*l~0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q>7tt+dg-H| z0R|ajm=Q)9W1I;lnPQq5W|?E21r}LinH5%9W1S5)*CWctzh$n$W zl1L_nRMJQ%gG{o>CWl<|$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2W zriWho=x2aIh8SjqQN|c&f=Q;BW`8zMmP~f5=AsI z#1cn52_%w4GAX2zMmiZ}l0`N-L%VH?n7DaPnXT`Q{+qP}n zwr$(C?d;gLZ9BK!n$^BwR(~Gls6ZvEP>mYYq7L z(SSxYp&2b`MH|}DflhRx8$IYnANnzXK@4FSBN)XP#xa3OOko-`n8h6Cv4BM^VHqn} z#TwSJflX{-8#~y=9`C8 z6rd1AC`JiNQHF9gpZUsne)5~Y1p4Lw1SL2j2~AkS6F?-Q5RDkbA`bCL zKq8Wmj1;6I4e7{0CbE!?9ONPo`6xgkicpLal%fpfs6ZvEP>mYYq7L3QFqXeZW zLpdr?i7Hg12DPX|JsQx6CN!f3t!P6#I?#zObfX8o=tDmSFo+=xV+5lZ!#E}|i78BD z2D6yMJQlEsB`jkFt60N2Hn52;Y-0zz*uy>!aEK!u<2WZd%~{TKk;`1=Iybq^UGDRc z$2{dZFL}*d-t&>qeC0bo`ORMf{qcW-5}c5PCM@9zAQDlCMhs#Rhj=6)5lKi!3R01V zbYvhCS;$5Xa*>C86rd1AC`JiNQHF9gpZUsne)5~Y7=a5En4knFB%ujQ zcmjw-6rvG>Si~V72}nc|l97T`q#+#{$V3*hk%L_1As+=OL=lQnf>M;B92KZU6{=B# zTGXK)4QNCYn$dz*w4ogx=tLK~(Su&}p&tVn#1Mutf>DfN921zt6s9qQS3QFqXeZWLpdr?i7Hg12DPX|JsQx6CN!f3t!P6#I?#zObfX8o=tDmSFo+=x zV+5lZ!#E}|i78BD2D6yMJQlEsB`jkFt60N2Hn52;Y-0zz*uy>!aEK!u<2WZd%~{TK zk;`1=Iybq^UGDRc$2{dZFL}*d-t&>qeC0bo`ORMpKnD6BfeAuTf)Sh$gd`N92}4-I z5uOMH5RphkCJIrBMs#8jlUT$i4snS`d=ik5L?k8&Nl8X>Qjn5Vq$Uk%Nk@7zkdaJe zCJR}~Ms{+LlU(E`4|&N)ehN^KLKLP5MJYycN>Gwgl%@=2DMxuKP?1VhrV3T5Ms;dX zlUmfK4t1$VeHze^Ml_}gO=(7RTF{bKw5APhX-9iH(2-7brVCx^Mt6G9lV0?u4}IxJ ze+Dp+K@4UHLm9?!Mlh05jAjgD8OL}gFp)`2W(rf8#&l*dlUdAW4s)5ud={{fMJ#3s zOIgNpR)p--*HZG zl2e@K3}-pVc`k5~OI+p(SGmS@Zg7)Z+~y87T#^At=EJP6$F0iqM21Ea3=G1OkXiBq9@q zs6-<=F^EYlViSkB#3MclNJt_QlZ2!sBRMHZNh(s4hP0$3JsHSICNh(StYjlQImk&a za+8O=lxi$tXrMhOvxeJQJA6BqlS3sZ3)!GnmONW;2Jm%ws+aSjZw4vxKEAV>v5W$tqT} zhPA9?Jsa4_CN{H$t!!gEJJ`uCcC&}Q>|;L%ILILmbA+QD<3Embf|H!$G-o)=InHx| zi(KL|SGdYGu5*K%+~PKOxXV56^MHpu;xSKn$}^txf|tDFHE(#!JKpnwk9^`YU--&5 zzVm~h{Ngu%_{%?xKL`3BfeAuTf)Sh$gd`N92}4-I5uOMH5RphkCJIrBMs#8jlUT$i z4snS`d=ik5L?k8&Nl8X>Qjn5Vq$Uk%Nk@7zkdaJeCJR}~Ms{+LlU(E`4|&N)ehN^K zLKLP5MJYycN>Gwgl%@=2DMxuKP?1VhrV3T5Ms;dXlUmfK4t1$VeHze^Ml_}gO=(7R zTF{bKw5APhX-9iH(2-7brVCx^Mt6G9lV0?u4}IxJe+Dp+K@4UHLm9?!Mlh05jAjgD z8OL}gFp)`2W(rf8#&l*dlUdAW4s)5ud={{fMJ#3sOIgNpR)oS9OnclImKztaF%nN=K>eG#AU83dBtnq@RoPH=K~-4#Am+nm2Z6K2S546Z~pL? ze*_Ame*zPPpadg0AqYt*LKB9tgd;o=2p}Snh)fis5{>A@ASSVhO&sD9kN6}YA&E## z5|WaPRR07f2tf%( za6%B0P=qE7VF^cgA`n1CA`zJ=L?s&0i9t+a5t}%~B_8ofKtd9cm?R`68OcdON>Y)U zG^8aR>B&GwGLe}qWF;Hf$w5wXk()f^B_H`IKtT#om?9LV7{w_;NlH=yOIp#IHngQ3?dd>AI?r62tnz(58um>~>h7{eLCNJcT5F^pv#;I&HLPVF>)F6YHnEv4Y-JnU*}+bBv70^YWgq)Fz(Edi zm?IqJ82@pc6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67I4f|8V?G-W7DIm%Okid3R9Rj5ies#AlS z)S@Q6^rAO?=u1EP zGk}2%VlYD($}omAf{~13G-DXcIL0%9iA-WLQ<%y$rZa|!^2*vmflbAW>!;xI=z$}#@qI43yC zDNb{Svz+5R7r4kJE^~#eT;n=7xXCSUbBDX!<30~~$Ri%}gr_{?IWKt0D_--4x4h#$ zANa^8KJ$gIeB(Pm_{lGR^M}9uBT#Vt6PO?bB^bd8K}bRonlOYV9N~#T01=5qWTFt2 zXhbIlF^NTN;t-d3#3um>Nkn3jkd$O3Cj}`YE-8NHK|2y>QI+@)TaRr zX+&e1(3EC0rv)u(MQhs7mUgtK10Cr^XS&dpZgi&yJ?TYn`p}nt^k)DA8N^_QFqB~o zX9Ob|#c0MbmT`<{0u!0UWTr5cX-sDZGnvI~<}jCe%x3`$S;S(Nu#{yiX9X)+#cI~D zmUXOW0~^`IX11`EZER-;JK4o<_OO?I?B@UnImBU(aFk>G$8k<@l2e@K3}-pVc`k5~ zOI+p(SGmS@Zg7)Z+~y8BiO57DD$$5e3}Ovz-t?g_{pimC1~Q1j3}Gn47|sYrGK$fRVJzbq z&jcniiOEc1D$|(G3}!Nm+00=s^O(;97P5%NEMY0jSk4MovWnHLVJ+)e&jvQKiOpbe9;3TIw%^A*ej`LjLBA2+#6|QoP>)hZb zx46w6?sAX&Jm4XZc+3-?@{H%a;3cnk%^TkGj`w`vBcJ%p7rye1@BH8=zxd4`{_>AN zA@xsSf)JEo1SbR`2}Nka5SDO+CjtRPBodK{LR6v=ofyO<7O{y#T;dU*1SBL8iAh3I zl98Mgq$CxoNkdxFk)8}>Bomp*LRPYoogCyO7rDtpUhrl%y1; zDMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-; zq!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X) zof*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWR zUiPt{103WKhdIJgj`1JIIl)OzahfxnS|UJKW_S_j$lW z9`TqbJmneBdBICw@tQZhhfil%qTqs7NI$ zQ-!KjqdGOHNiAwqhq~0GJ`HF{BO23$rZl5DEoezATGNKMw4*&8=tw6z(}k{dqdPt5 zNiTZShraZqKLZ%ZAO&aK$t-3whq=sS zJ_}gLA{MiRr7UAPD_F@YR>(8$u4%YhrR4$KLWRILkTCbAgLo;xbpb$~CTYgPYvqHg~woJ?`^>hdkmjPk72Rp7Vm2 zyy7))c*{H9^MQ|i;xk|P$~V6AgP;83H-GrcKLUl;KYWF`w) z$wqc^kds{GCJ%YZM}7)WkU|uu2t_GIaY|5Y(34*DrVoATM}Gz| zkUq#cl3zmwVjj0S|e^W1jGoXFTTxFL}jl-td-pyypWS z`NU_w@Re_T=LbLe#c%%bmwyBbqkjSugrEc?I3Wm0C_)p4u!JK#5eOh6k%&wbq7seh z#2_ZIh)o>g5|8*KAR&oJOcIikjO3&sC8HNAm8eV=s#1;W)SxD{s7)Q}QjhvHpdpQD zOcR>YjOMhUC9P;p8`{#2_H>{lo#;##y3&pA^q?ob=uIE`(vSWOU?77S%n*h$jNy!6 zB%>J37{)S=@l0SMlbFmDrZSD`%wQ(7n9UsKGLQKzU?GcG%o3KejODCgC97D?8rHIo z^=x1xo7l`2wz7@w>|iIm*v%gHvXA{7;2?)M%n^=qjQ=>!2~Ki~)12Wf=Qz&=E^>*> zT;VF$xXul3a*NyC;V$>M&jTLvh{rtPDbIM$3tsYy*Sz5^?|9D#KJtmreBmqK_|6Z0 z@{8a6;V=IP6juKPCI~?ZMsPw9l2C*u3}FdJcp?x$L?RKHC`2V1(TPD!ViB7-#3df_ zNkBppk(eYTB^k*{K}u4Qnlz*(9qGwHMlz9^EMz4c*~vjpa*>-nMQr5Vj>K}%ZE znl`kh9qs8rM>^4&E_9_E-RVJ3deNIc^ravD8NfgWF_<9?Wf;R5!AM3inlX%J9OIe5 zL?$trDNJP=)0x3cW-*&N%w-<)S-?UTv6v++Wf{v^!Ae%Knl-Ft9qZY^MmDjTEo@~Q z+u6ZRcCnj1>}4POIlw^76<6rwOiC`vJkQ-YF|qBLbF zOF7C@fr?b3GF7NbHL6pCn$)5;b*M`{>eGORG@>z0Xi77h(}I??qBU)3OFP=rfsS;d zGhOIPH@eeEMhTBSjsY%vx1eZVl``6%R1JxfsJfpGh5ioHny{ao$O*ad)Ui9 z_H%%P9O5uXILa~p<2WZc$tg~AhO?aGJQujgB`$M?t6bwcH@L|yZgYpb+~YnEc*r9j z^Mt27<2f&Q$tzy-hPS-qJsKlsTne)EUF{3B3!{S%lV1SJ^32|-9g z5t=ZBB^=?2KmZYmL}a26m1smK1~G|6Y~m1?c*G|G2}wj^l8}^SBqs$aNkwYXkd}0$ zCj%MDL}s#(m26}u2RX?_Zt{?qeB`G91t~;ficpkd6sH6wDMe|@P?mC(rveqJL}jW_ zm1+=(3W!$9XPrkxN|W3Rk(tb#8EzTioUjce%%X9`KMyJmv{cdB$^I@RC=& z<_&Lo$9q2TkxzW)3t#!hcYg4bU;O3|fB8qC2>K^5K?q7Pf)j#}gd#Ly2unD^6M+CC z5{bw}Au7>`P7Goai`c{=F7b#@0uqvl#3Ugp$w*ELQj&_)q#-ToNKXbbl8MY@AuHL) zP7ZRCi`?WPFZsw%0SZ!x!W5w>#VAe*N>Yl_l%Xu;C{G0{Qi;k`p(@p=P7P{Oi`vwo zF7>ES0~*qZ#x$WR&1g;wTGEQvw4p8SXio<^(uvM=p)1|!P7iw0i{A91Fa7Ax00uIM z!3<$2!x+v8Mly=gjA1O}7|#SIGKtAdVJg#@&J1QUi`mR!F7uer0v57}#Vlbd%UI3| zR;3J>- z%oo1$jqm*6C%^d3AO7->KmqzEFhK}PFoF|;kc1*MVF*h&!V`f2A`*$nL?J5Ch)xV* z5{uZxAujQVPXZE>h{PlzDalAq3R04a)TALT=}1ooGLnhRWFafr$W9J&l8fBrAusvJ zPXP*2h{6=1D8(pF2})9m(v+brs7?)PQj6Nup)U2PPXij#h{iOb zDa~k33tG~O*0iB5?PyO2I?{>GbfGKV=uQuM(u>~op)dXD&j1E8h`|hDD8m@e2u3oB z(Trg%;~38bCNhc1Okpb1n9dAlGK<;FVJ`ES&jJ>*h{Y^nDa%;S3Rbd;)vRGH>sZeQ zHnNG$Y+)>6Q1&n=e*!0uXxQH-tvz3eBdLW_{Z7q7j`K#3UB6i9=lC5uXGk zBoT>8LQ;~EoD`%a6{$%>TGEl83}hq|naM&{vXPw}F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ z>|__a*~4D;v7ZAR7T#^ zAt=EJP6$F0iqM21Ea3=G1OkXiBq9@qs6-<=F^EYlViSkB#3MclNJt_QlZ2!sBRMHZ zNh(s4hP0$3JsHSICNh(StYjlQImk&aa+8O=lxi$tXrMhOvxeJQJA6BqlS3sZ3)!GnmON zW;2Jm%ws+aSjZw4vxKEAV>v5W$tqT}hPA9?Jsa4_CN{H$t!!gEJJ`uCcC&}Q>|;L% zILILmbA+QD<3Embf|H!$G-o)=InHx|i(KL|SGdYGu5*K%+~PKOxXV56^MHpu;xSKn z$}^txf|tDFHE(#!JKpnwk9^`YU--&5zVm~h{Ngu%_{%>6MbthbYc*bSi~j{afwHK5|EHYBqj+-Nk(!~kdjoSCJkvxM|v`l zkxXPJ3t7oVc5;xDT;wJXdC5n93Q&+j6s8D8DMoQhP?A!VrVM2%M|mnxkxEpi3RS5_ zb!t$PTGXZvb*V>v8qknNG^PnnX-0Ee(2`cPrVVXrM|(QZkxq1`3tj0(cY4s1Ui799 zed$Mk1~8C83}y&J8OCr%Fp^P>W(;E)$9N_%kx5Ku3R9WJbY?J- zEM^HyS;lf!u##1*W({ju$9gufkxgu73tQR7c6P9nUF>ELd)dc+4seh|9Oei|ImUk+ z=L9D?#c9rPmUEov0vEZ&Wv+0QYh33BH@U@a?r@iT+~)xgdBkI$@RVmf=LIi$#cSU1 zmUq1810VUsXTI>2Z+zzmKl#OP{_vN71d5`60uzLw1S2>h2uUbH6Na#aBRmlZAR>{7 zOcbILjp)Q6Cb5W39O4p>_#_}9iAYQml9G(%q#z}!NKG2jl8*FbAS0Q`Oct_|jqKzg zC%MQ?9`cfp{1l)dg(yrBic*Z?l%OP~C`}p4QjYRepdyv1Ockn9jq22(Cbg(d9qLk# z`ZS;+jc800n$nEsw4f!eXiXd1(vJ3Ypd+2=Oc%P+jqdcIC%x!RANtad{tRFsgBZ*Z zhBA!dj9?_A7|j^QGLG>~U?P*4%oL_Fjp@u_CbO8$9Og2Q`7B@|i&)GOma>fHtY9Up zSj`&NvX1p^U?ZE@%oet?jqU7UC%f3q9`>@2{T$#Rhd9g;j&h9uIL--9a*ETO;VkDk z&jl`WiOXE!D%ZHq4Q_Ia+uY$U_qfjk9`cCCJmD$Nc+Lx6@`~5I;VtiY&j&v8iO+oD zE8qCe4}S8C-~8b({|FRS{{$uoK?z21LJ*QrgeDAO2}gJ$5I{sC5t%4NB^uF*K}=#1 zn>fTJ9`Q*)LK2afBqSvn$w@&^$tANeUj zK?+frA{3<<#VJ8aN>Q3Jl%*WysX#?4QJE@Kr5e?#K}~8=n>y5`9`$KJLmJVTCN!lP z&1pePTG5&|w51*G=|D$1(U~rEr5oMpK~H+on?CfVAN?7?Kn5|GAq-_0!x_OyMlqT( zjAb0-nZQIQF_|e$Wg63&!Axc`n>oy79`jkiLKd-@B`jqb%UQunRUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS zF-b^DGLn;ml%ygxX-G>t(vyLVWFj+J$VxV{lY^Y(A~$)+OFr^bfPxgFFhwXzF^W@y zl9Zw}WhhHI%2R=gRH8Cfs7f`eQ-hk+qBeD?OFin-fQB@pF->SnGn&(amb9WZZD>n7 z+S7rKbfPm|=t?)b(}SM$qBni$OF#NEfPoBRFhdy1ForXNk&I$AV;IXg#xsG5Oky%q zn94M!GlQATVm5P_%RJ_@fQ2k#F-us=GM2M~m8@blYgo%V*0X_)Y+^H8*vdAxvxA-N zVmEu(%RcsVfP)<3Fh@AbG5+H?CpgI|PIHE{oZ~zfxX2|gbA_v1<2pCE$t`Ykhr8V4 zJ`Z@vBOddFr#$01FL=o-Uh{^xyyHC|_{b+d^M$W`<2yh2$uEBMhrj$IP;~thm>>it z7{LiaNJ0^sFoY!>;fX*15s5@(q7ap6L?;F@iA8MU5SMtwCjkjbL}HSVlw>3)1u02I zYSNIFbfhN(8OcOuvXGT*WG4qX$whARke7VqrvL>hL}7|hlwuU81SKg&Y06NRa+Ie6 z6{$pJs!)|`RHp_tsYPw-P?vhtrvVLVL}QxJlx8%i1ubbsYueD3cC@Dh9qB}8y3mzw zbf*VB=|yk)(3gJnX8;2k#9)Rnlwk~K1S1*6XvQ#>ag1jI6Pd(hrZAOhOlJl&nZ<18 zFqe7EX8{XY#A24Plw~Yu1uI#_YSyrpb*yIt8`;EWwy>3LY-a~M*~M=5u$O)8=Ku#e z#9@wblwW_xyE&FaFbiy<_>qc$9*2~kVib`2~T;( zb6)V0SG?v8Z+XXiKJbxGeC7*Z`NnsC@RMKs<_~}QN1zz`Con+>N-%;Gf{=tFG+_u! zIKmTw03s5J$V4G3(TGkAViJqk#33&6h))6%l8D44At}j7P6|?ziqxbbE$K*41~QU~ z%w!=e*~m@~a*~VODP6Jl zYEp~Z)S)i*s80hL(ul@1p()L1P77Mniq^EDE$wJe2RhP;&UB$G-RMpadeV#D^r0{P z=+6KKGKj$pVJO2G&Im>_iqVW=EaMo@1ST?x$xLA?)0oZ-W-^P}%waC`n9l+hvWUej zVJXX4&I(qtiq))PE$dj%1~#&Z&1_*S+t|(ycCw4z>|rna*v|nDa)`qm;V8%WkK>%+ zB&Rsd8P0N!^IYH}m$=Lou5yj*+~6j+xXm5za*z8w;31EA%oCpSjOV=IC9inR8{YDc z_k7?ZpZLrdzVeOl{NN|Q_{|^w@{d3<^-o}e5R_m9Cj=o0MQFkhmT-h80s%xM5|N2Q zRH6}`7{nwNv57-m;t`(&BqR}uNkUSRk(?ByBo(PiLt4_2o(yCp6Pd|ERP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEt zG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8 zF`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf z!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tn@gK)I!AVYWnlqf`9Ot>f zMJ{ofD_rFo*SWz>ZgHDC+~pqkdB8&+@t7w({N*2kV(Fj21R*HF2u=t>5{l4-AuQntPXq#pNF*W?g{VX$Ix&bzEMgOf zxWpqq2}npH5|f0aBqKQ~NJ%PElZLdUBRv_&NG39qg{)*FJ2}WnE^?EHyyPQ41t>@% z3R8rl6r(sLC`l}a>$Rs8+g{e$qIy0EbEM_x@xy)le3s}e^7PEw-V?7(#$R;+k zg{^F3J3H9PE_Snrz3gK@2RO(f4s(Q~9OFNZbApqc;xuPC%Q?<-fs0(?GFQ0DHLi1m zo800yceu+v?(=|$JmN7=c*--L^MaSW;x%u0%RAolfscIRGhg`1H@@?OpZwxCfB4Hk z0>#!pfeAuTf)Sh$gd`N92}4-I5uOMH5RphkCJIrBMs#8jlUT$i4snS`d=ik5L?k8& zNl8X>Qjn5Vq$Uk%Nk@7zkdaJeCJR}~Ms{+LlU(E`4|&N)ehN^KLKLP5MJYycN>Gwg zl%@=2DMxuKP?1VhrV3T5Ms;dXlUmfK4t1$VeHze^Ml_}gO=(7RTF{bKw5APhX-9iH z(2-7brVCx^Mt6G9lV0?u4}IxJe+Dp+K@4UHLm9?!Mlh05jAjgD8OL}gFp)`2W(rf8 z#&l*dlUdAW4s)5ud={{fMJ#3sOIgNpR)oS9OnclImKztaF%nN=K>eG#AU83dBtnq@RoPH=K~-4#Am+nm2Z6K2S546Z~pL?e*}u7e*zPPpadg0 zAqYt*LKB9tgd;o=2p}Snh)fis5{>A@ASSVhO&sD9kN6}YA&E##5|WaP|6`aQwtxTt z0D$$fZQHhO+qP}nwr$(CZQHhMhke45k(?ByBo(PiLt4_2o(yCp6Pd|ERP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEt zG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8 zF`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf z!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJBomp*LRPYoogCyO7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esV zw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{ zF`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d& z!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS~-sYydx(vhAFWF!-r$wF4Lk)0gmBp12KLtgTcp8^!55QQm1QHoKV5|pGA zr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4 zbfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2 zF`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H z!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Z7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a6{$%> zTGEl83}hq|naM&{vXPw} zF`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZARP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~ zwW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O z3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75 zv78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJBomp*LRPYoogCyO7rDtp zUhrl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA* zjcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HG zjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^ zv7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS~-sYydx(vhAFWF!-r$wF4Lk)0gmBp12KLtgTcp8^!55QQm1 zQHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=l zt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4= zOk@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~ zv7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Z7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~E zoD`%a6{$%>TGEl83}hq|naM&{vXPw}F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D; zv7ZARP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb! zRjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLa zz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfI zEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tn zahwyJBomp*LRPYo zogCyO7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBE zUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0Ssgi zgBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_ ztYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21b zah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS~-sYydx(vhAFWF!-r$wF4Lk)0gmBp12KLtgTc zp8^!55QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg= zQ<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLM zqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)q zY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vh zah)67Z7q7j`K#3UB6i9=lC5uXGk zBoT>8LQ;~EoD`%a6{$%>TGEl83}hq|naM&{vXPw}F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ z>|__a*~4D;v7ZARP^DMC?-QJfN#q!gtoLs`mE zo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@Wo zSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rB zvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ> z9OMv(Il@tnahwyJ zBomp*LRPYoogCyO7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3! zof_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62 zU;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{ zi&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5 zoa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y z@tzNS~-sYydx(vhAFWF!-r$wF4Lk)0gm zBp12KLtgTcp8^!55QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}o zp9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=Q zP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_x zt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a# zT;vj$xx!Vhah)67Z7q7j`K#3UB6 zi9=lC5uXGkBoT>8LQ;~EoD`%a6{$%>TGEl83}hq|naM&{vXPw}F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+ zo7uuvwy~WZ>|__a*~4D;v7ZARP^DMC?-QJfN# zq!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO> zo(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2 zRHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQm zyV=8D_OYJ>9OMv(Il@tnahwyJBomp*LRPYoogCyO7rDtpUhrl%y1;DMMMxQJxA^ zq!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%c zogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbd zT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WK zhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER) zyyO+HdBa=Y@tzNS~-sYydx(vhAFWF!-r z$wF4Lk)0gmBp12KLtgTcp8^!55QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJosp zq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQ zp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*E zQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A} zr#Zt}&T*a#T;vj$xx!Vhah)67Z7 zq7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a6{$%>TGEl83}hq|naM&{vXPw}F`or2WD$#5!cvy8oE5BO6{}gp zTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZARP^ zDMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~v zq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^K zo(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww> zR<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJBomp*LRPYoogCyO7rDtpUhrl%y1; zDMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-; zq!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X) zof*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWR zUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5v zk9opVp7ER)yyO+HdBa=Y@tzNS~-sYydx z(vhAFWF!-r$wF4Lk)0gmBp12KLtgTcp8^!55QQm1QHoKV5|pGAr71&M%2A#QRHPD> zsX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUj zq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Su zp9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO! zQI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Z7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a6{$%>TGEl83}hq|naM&{ zvXPw}F`or2WD$#5!cvy8 zoE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZARP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!Y zX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;M zWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{ zo(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJBomp*LRPYoogCyO7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cS zX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxb zWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAi zogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lY zUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS~-sYydx(vhAFWF!-r$wF4Lk)0gmBp12KLtgTcp8^!55QQm1QHoKV5|pGAr71&M z%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV z=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIA zWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5W zp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Z7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a6{$%>TGEl8 z3}hq|naM&{vXPw}F`or2 zWD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZARP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0 z>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_4 z8NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mG zWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJBomp*LRPYoogCyO7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGzt zn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs} z8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZV zWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNSb5P=CoP=XPh5QHQYp$S7+!V#VbL?jZCi9%GO5uF&sBo?uWLtNq!p9CZ% z5s67cQj(FJ6r>~-sYydx(vhAFWF!-r$wF4Lk)0gmBp12KLtgTcp8^!55QQm1QHoKV z5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB z+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1 znZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4 zWEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67Z7q7j`K#3UB6i9=lC5uXGkBoT>8LQ;~EoD`%a z6{$%>TGEl83}hq|naM&{vXPw}F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZAR z94AVol7zh9Wu)f;1IkoNn z+O}=mwr$(CZQHhOOcpspNKOh;l8V%%AuZ`hPX;oQiOggnE7{0S4sw!<+~grI`N&TJ z0w_o!3R8rl6r(sLC`l}a>$Rs8+g{e$qIy0EbEM_x@xy)le3s}e^7PEw-V?7(# z$R;+kg{^F3J3H9PE_Snrz3gK@2RO(f4s(Q~9OF1AILRqabB42=<2)C*$R#dwg{xfS zIybnA@ASSVhO&sD9kN6}YA&E## z5|WaP|42>>Qj&_)q#-ToNKXbbl8MY@AuHL)P7ZRCi`?WPFZsw%0RkvUAqrE3q7hfil%qTqs7NI$Q-!KjqdGOHNiAwqhq~0GJ`HF{BO23$rZl5DEoezATGNKM zw4*&8=tw6z(}k{dqdPt5NiTZShraZqKLZ%ZAO&aK$t-3whq=sSJ_}gLA{MiRr7UAPD_F@YR>(8 z$u4%YhrR4$KLg5|8*KAR&oJOcIikjQ>bZ3R04a z)TALT=}1ooGLnhRWFafr$W9J&l8fBrAusvJPXPibNFfSSgrXFqI3*}aDN0j@vXrAd z6{tuhDpQ53RHHgIs7WnqQ-`|LqdpC2NFy54gr+p3IW1^OD_YZrwzQ)?9q33WI@5)& zbfY^x=t(bn(}%wFqdx-}$RGwYgrN*$I3pOzC`L1ev5aFp6PU;(CNqVpOk+ATn8_?= zGl#j%V?GO5$RZZAgrzKFIV)JnDps?GwX9=38`#JuHnWATY-2k+*vT$-vxmLxV?PHt z$RQ4MgrgkeI43yCDNb{Svz+5R7r4kJE^~#eT;n=7xXCSUbBDX!<30~~$Ri%}gr_{? zIWKt0D_--4x4h#$ANa^8KJ$gIeB(Pm_{lGR^M}9uBT#<-2}}@z5{%%4AS9s(O&G!w zj_^bvB9Vwp6rvK1=)@olxi$tXrMhOvxeJQJA6BqlS3sZ3)!GnmONW;2Jm%ws+aSjZw4 zvxKEAV>v5W$tqT}hPA9?Jsa4_CN{H$t!!gEJJ`uCcC&}Q>|;L%ILILmbA+QD<2WZc z$tg~AhO?aGJQujgB`$M?t6bwcH@L|yZgYpb+~YnEc*r9j^Mt27<2f&Q$tzy-hPS-q zJsKlsTne)EUF{3B2S{|QVGf)b42gdilL2u&Em5{~dhAR>{7OcbIL zjp)Q6Cb5W39O4p>_#_}9iAYQml9G)7NKOh;l8V%%AuZ`hPX;oQiOggnE7{0S4sw!< z+~grI`N&TJ0w_o!3R8rl6r(sLC`l}a>$Rs8+g{e$qIy0EbEM_x@xy)le3s}e^7PEw-V?7(#$R;+kg{^F3J3H9PE_Snrz3gK@2RO(f4s(Q~9OF1AILRqabB42=<2)C* z$R#dwg{xfSIybnCL?#MRiAHo{5R+KMCJu3l zM|={HkVGUV2}w!De|Y(vX&Pq$dLz$wX$dkd009)F z5QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A7 z7PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k z#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg z*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67 zF`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a z*~4D;v7ZAR2 z5Ry=YCJbQ-M|dI-kw`=)3Q>thbYc*bSi~j{afwHK5|EHYBqj+-NydL9Cj}`P^DMC?-QJfN#q!gtoLs`mEo(fc? z5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i z9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO z<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv( zIl@tnahwyJ9K{{$uoK?z21LJ*QrgeDAO2}gJ$ z5RphkCJIrBMs#8jlUT$i4snS`d=ik5L?k8&NlC_kBqs$aNkwYXkd}0$Cj%MDL}s#( zm26}u2RX?_Zt{?qeB`G90TiSVg(*T&icy>rl%y1;DMMMxQJxA^q!N{>LRG3!of_1n z7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX z0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ zma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7Xz zIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNS zCL?#MRiAHo{ z5R+KMCJu3lM|={HkVGUV2}w!De|Y(vX&Pq$dLz$wX$dkd009)F5QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf z5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_ z5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ z*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$ zxx!Vhah)67F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuv zwy~WZ>|__a*~4D;v7ZAR25Ry=YCJbQ-M|dI-kw`=)3Q>thbYc*bSi~j{afwHK5|EHYBqj+-NydL9 zCj}`P^DMC?-QJfN#q!gto zLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R z6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV z8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D z_OYJ>9OMv(Il@tnahwyJ9S{{$uoK?z21LJ*Qr zgeDAO2}gJ$5RphkCJIrBMs#8jlUT$i4snS`d=ik5L?k8&NlC_kBqs$aNkwYXkd}0$ zCj%MDL}s#(m26}u2RX?_Zt{?qeB`G90TiSVg(*T&icy>rl%y1;DMMMxQJxA^q!N{> zLRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb0 z7rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K z1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJg zj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+H zdBa=Y@tzNSC zL?#MRiAHo{5R+KMCJu3lM|={HkVGUV2}w!De|Y(vX&Pq$dLz$wX$dkd009)F5QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQ zLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH z5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot z6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt} z&T*a#T;vj$xx!Vhah)67F`or2WD$#5!cvy8oE5BO6{}gpTGp|i z4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZAR25Ry=YCJbQ-M|dI-kw`=)3Q>thbYc*bSi~j{afwHK5|EHY zBqj+-NydL9Cj}`P^DMC?- zQJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1c zLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W83 z5|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O7 z9qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ9O{{$uo zK?z21LJ*QrgeDAO2}gJ$5RphkCJIrBMs#8jlUT$i4snS`d=ik5L?k8&NlC_kBqs$a zNkwYXkd}0$Cj%MDL}s#(m26}u2RX?_Zt{?qeB`G90TiSVg(*T&icy>rl%y1;DMMMx zQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu(Vh-;q!XR# zLRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^!c?X)of*tz z7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C7rWWRUiPt{ z103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opV zp7ER)yyO+HdBa=Y@tzNSCL?#MRiAHo{5R+KMCJu3lM|={HkVGUV2}w!De|Y(vX&Pq$dLz z$wX$dkd009)F5QQm1QHoKV5|pGAr71&M%2A#QRHPD>sX|q% zQJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We!(VZUjq!+#E zLtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT!(8Sup9L&r z5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^5QjO!QI2t( z6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67F`or2WD$#5!cvy8oE5BO z6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZAR25Ry=YCJbQ-M|dI-kw`=)3Q>thbYc*bSi~j{ zafwHK5|EHYBqj+-NydL9Cj}`P^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEtG^7!YX+l$) z(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r z!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf!&=s{o(*hd z6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ9W{{$uoK?z21LJ*QrgeDAO2}gJ$5RphkCJIrBMs#8jlUT$i4snS`d=ik5L?k8& zNlC_kBqs$aNkwYXkd}0$Cj%MDL}s#(m26}u2RX?_Zt{?qeB`G90TiSVg(*T&icy>r zl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esVw4@cSX+vAu z(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{F`fxbWD=8^ z!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d&!dAAiogM6C z7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z8 z2R!5vk9opVp7ER)yyO+HdBa=Y@tzNSCL?#MRiAHo{5R+KMCJu3lM|={HkVGUV2}w!De|Y z(vX&Pq$dLz$wX$dkd009)F5QQm1QHoKV5|pGAr71&M%2A#Q zRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4bfgoV=|We! z(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2F`XIAWEQiT z!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H!(R5Wp937^ z5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67F`or2WD$#5 z!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZAR?1SSYU2}W>25Ry=YCJbQ-M|dI-kw`=)3Q>th zbYc*bSi~j{afwHK5|EHYBqj+-NydL9Cj}`P^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~wW&j0>QSEt zG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O3}g_48NyJ8 zF`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75v78mGWEHDf z!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ9M{{$uoK?z21LJ*QrgeDAO2}gJ$5RphkCJIrBMs#8jlUT$i4snS` zd=ik5L?k8&NlC_kBqs$aNkwYXkd}0$Cj%MDL}s#(m26}u2RX?_Zt{?qeB`G90TiSV zg(*T&icy>rl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGztn$esV zw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs}8N*n{ zF`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^v7QZVWD}d& z!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNSCL?#MRiAHo{5R+KMCJu3lM|={HkVGUV2}w!D ze|Y(vX&Pq$dLz$wX$dkd009)F5QQm1QHoKV5|pGA zr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=lt!YDB+R>g4 zbfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4=Ok@(1nZi`2 zF`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~v7H_4WEZ>H z!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67 zF`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D;v7ZAR25Ry=YCJbQ-M|dI- zkw`=)3Q>thbYc*bSi~j{afwHK5|EHYBqj+-NydL9Cj}`P^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb!RjN^)8q}l~ zwW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLaz3D?=`q7^O z3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfIEMyUjS;A75 zv78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tnahwyJ9U{{$uoK?z21LJ*QrgeDAO2}gJ$5RphkCJIrBMs#8j zlUT$i4snS`d=ik5L?k8&NlC_kBqs$aNkwYXkd}0$Cj%MDL}s#(m26}u2RX?_Zt{?q zeB`G90TiSVg(*T&icy>rl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA* zjcGztn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HG zjARs}8N*n{F`fxbWD=8^!c?X)of*tz7PFbdT;?&K1uSF{i&?@_ma&`_tYj6dS;Jb^ zv7QZVWD}d&!dAAiogM6C7rWWRUiPt{103WKhdIJgj&Yn5oa7XzIm21bah?lYUG8z82R!5vk9opVp7ER)yyO+HdBa=Y@tzNSCL?#MRiAHo{5R+KMCJu3lM|={H zkVGUV2}w!De|Y(vX&Pq$dLz$wX$dkd009)F5QQm1 zQHoKV5|pGAr71&M%2A#QRHPD>sX|q%QJospq!zWQLtW}op9VCf5shg=Q<~A77PO=l zt!YDB+R>g4bfgoV=|We!(VZUjq!+#ELtpyQp8*VH5Q7=QP=+y_5sYLMqZz|k#xb4= zOk@(1nZi`2F`XIAWEQiT!(8Sup9L&r5sO*EQkJot6|7_xt69TZ*0G)qY-AIg*}_(~ zv7H_4WEZ>H!(R5Wp937^5QjO!QI2t(6P)A}r#Zt}&T*a#T;vj$xx!Vhah)67F`or2WD$#5!cvy8oE5BO6{}gpTGp|i4Qyl+o7uuvwy~WZ>|__a*~4D; zv7ZAR25Ry=Y zCJbQ-M|dI-kw`=)3Q>thbYc*bSi~j{afwHK5|EHYBqj+-NydL9Cj}`P^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb! zRjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLa zz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&t^Ko(W835|f$2RHiYV8O&rBvzfzO<}sfI zEMyUjS;A75v78mGWEHDf!&=s{o(*hd6Pww>R<^O79qeQmyV=8D_OYJ>9OMv(Il@tn zahwyJ9Q{{$uoK?z21LJ*QrgeDAO2}gJ$5Rphk zCJIrBMs#8jlUT$i4snS`d=ik5L?k8&NlC{4?bT%vm318VX_gcb6@w5FknZm8?(XjH zGyq8j1(a@!lu!^9P)P*^C6!PxK|w-k)V-eP+2_UX%+Bol{hgU>?m6fD=YDa|=X*|e za*&f;@0trU*qTMsZ3|l2VkW3}q=tc`8tmN>ru_RjEdGYEY9})TRz~ zsYiVp(2zznrU^}HMsr%wl2){)4Q**hdpgjOPIRUVUFk-5deDeQenwWv)U>QayT zG@v1kXiO8D(v0S`pe3znO&i+Mj`nn*Bc13>7rN4o?)0E1z35FJ`qGd73}7IG7|alc zGK}GjU?ig$%^1e=9MAItFEWntOyDIZGKtAdVJg#jnOB(33|{3mW-^P}yv`fUVJ`ES z&jJ>*h{Y^nDa%;S3f|-`-sTspRtSG>|rna*w5#D!2u5PC0}uf!+gy*9N{R(IL--9@-5$Siqo9oEZ_43=Qz)g z{KU`v!ms?s1upVCe{hM*{K*xra*gZ!#oyfECbziF9qw|E`~1Vd{6~WJ_D@0*k(eYT zB^eKpoCit4L!{(k9^p|^@feTu1W%HhG^8aRPm!Ld$-pyYBomo=mMml?8`;T0PI8f( zJme)G`6)m_3Q?FM6r~u&DM3j}QJON8r5xp{Kt(E1nJQGJ8r7*mO=?k_I@F~e^=Uvu z8qt_0G^H8MX+cX`(V8~2r5)|*Ku0>!nJ#pt8{O$aPkPatKJ=v@{TaYO1~Hf+3}qO@ z8No*a)9{2f&fBBCD9qgZkBqA|MNJ=su zAUO|`f`>@S!#u*Hq~b9i=Lw!9HEBpoI-VjuPm_UX$VetK^DJ4&N;a~SgPi0dH+jfQ zKJrt5f)t`KMJP%!ic^A;l%h0cC`&oYQ-O+9qB2#eN;RregPPQ$Hg%{=J?hhdhBTrv zO=wCpn$v=ow4ya_XiGcV(}9k3qBC9SN;kUGgP!!FH+|?!Kl(F(fed0WLm0|1hBJbZ zjAArn7|U}!&kMZBIL0%9mzc;TCNqVpOygxv5$lec)AcX*eTtYS55Sj#%zV?7(#$R^(B12(gT5828#KH_7xvxA*{!l!)3 zE_Snrz3gK@pYsI=ILMcL#UT#!HQ#WAqa5QnCpgKse8(wHbB42g&kvmAJU{XiKl2N} z@*5Yp$nX5YB`)(PSGdYGuJadvbAy}Q;x>1<%RTP%5C8HX2|C(82}wj^l8}^SJV0_D zBn1zVl81SOM@hwFJkAq5NovxNmUKKtdY&c&&ybN!Wae42kd z00k*TVTw?cViczYB`HN|%21Yal&1m}sYGR}P?c&_rv^2tMQ!R(mwMEv0S#$HW17&E zW;CY-Eont-+R&DEw5J0d=|pF`(3Ngb>3hObD76{7O;>-EM^Hy zS;lf!@Fs8ZHt+B*D_O;A*07d!yvKSru#ru?&j)N~3m>wTZG6PXY-a~M`GimTj9u(z z4}00iem>_54seh!`HDjv=4-y;2uC@_aZYfOZ~2Z>oaPK?`JNv*$9aC_Cw}G^e&shV zaFO5ngG*fIPp)v4Yh33q{^kZZxy5bnaF=`B=O6y%KN57Ze-e_2#3Ugp$#{U|JV**2 zA|(&=2#=DA$9SA4c#_nlAuZ{6iu6292A&}!naIquWFafr$W9J&l8fBrAusvJPXP*2 zh{6=1D8(pF2})9m(v+brs7?)PQj6Nup)U2PPXij#h{iObDa~k3 z3tG~O*0iB5?PyO2I?{>GbfGKV=uQuM(u>~op)dXD&j1E8h`|hDD8m@e2u3oB(Trg% z&+$Aj@FL?F&jem#B9oZR6s9tbmwAQh%-~gCVfH ztl&-F;%(mHT~@M+)vRGH>v)g#Y+xgsc%Kj0%oaXmE8F;pkJ-)+cJc|I@)^6>%^vo$ zkNtek7aZUqU-A`)ILz04!x4^hjN_c(B;WEKr#Q_S&hkA!aE|l*$WQ#tFZ{}HT;L+V z^9Pr>%%5E0D%ZHqU;NDtZgPv;+~F?wxX(ZQ%YP*3Z2u%A5s67cQj+li$$5|zJVZ(! z<`Etx6_4>aPw*tENkdxF@f7KKnhZQcMlz9^XURfVvXPw}F`fy$#6%`BnJG+V8ZYw-)0x4myv9ssF`L(UgE`D)9`jkiLKd-@B`jqb%UQvj zyv5tR!@I0x6{}gpTGsI%>)F6YHt{|mu$e7<$X2%T5g)Ui9qi;2KIJoZv70^YWgq+b zoG&=QLB8ZG4sn>T`GzALlxi$tXrMhOs=y^Sr=| zjAJ|#c!`NjVlq>h$~0c)6{a(TS9y(@%wjgL^9FO6%RJ_@fQ2k#F-us=GM2M~H+hS< zd53pd$tqT}hPABYJ=U{iq^<0C$1J3H9PCw$6h>|!^2*vmfl^EqE| zfP;L=R~+InU-Jz|ILa}ObApq6%XggOG-o)=_x!*)&hsNb@iV{hE5C7pi~Pv8qknNG^PnnX-0Ee(2`cPrVVXr zM|(QZkxq1`3tj0(cY4s1Ui799ed$Mk1~8C83}y&J8OCr%Fp^P>W(;F_j^}xS7a7NR zCh!synZ#tKFqLV%%qvW12CwoOGnvI~Ugr(wFqe7EX8{XY#A24Plw~Yu1#j{eZ}SfC zvXWJ-W({ju$9t@20~^`I`+UG=w(uca*~UkF%yxFLlTY}R&)CIo_OO?I?B{d7-~b2t zlCL<#VZP=Yj&PJ?9Oncl`IhfE#c9rPmhbt2bDZZ#e&T0-;a7g+0vGw6Ke)tY{^SZ* zxyE(=;%{znlUv;84tKf7eg5HJ{v$y*`zIlZNK6uvl8gsP&V!`jAyV=%kMJm|c#Ow+ zf+tB$8q$)Er%2D!WZ)Sxl8MYbOBS+{jqKzgC%MQ?9`cfp{1l)dg(yrBic*Z?l%OP~ zC`}p4QjYRepdyv1Ockn9jq22(Cbg(d9qLk#`ZS;+jc800n$nEsw4f!eXiXd1(vJ3Y zpd+2=Oc%P+jqdcIC%x!RANtad{tRFsgBZ*ZhBA!dj9?_A7|j^Q@*L0e0xvR-@l4<) zCNhc1Okpb1c$rt2&J14VHD)r4*}Tph%waC`n9l+hvWUejVJXX4&I;b-E#BrG-eo1L zSj`&NvX1vy&jvQKiTC+{&1~UAwz7?n_?YeNU?-pODW9>6-Rxm6``FLte8B+@@+Dt! zh{Jr%Hyq(8$2iUjPVz0^af;KN;Vj?t1Lru;kNm{X{KBvN#sx0&JAZJA%lyd|u5yj* z{Keng;3l`Y%^mJ?kNfB^%kvK~8d!n>^$tANeUjK?+frA{3<<#VJ8aN>Q3J zl%*WysX#?4QJE@Kr5e?#K}~8=n>y5`9`$KJLmJVTCN!lP&1pePTG5&|w51*G=|D$1 z(U~rEr5oMpK~H+on?CfVAN?7?Kn5|GAq-_0!x_OyMlqT(jO96==LKG59OIe5OH5=E zlbOO)rtvbbFr68^%4^JI7PEPsH<-g*<}sfIEMyUjS;A75v78mW$y>b5JG{$ERhkyBx1U>AZgd`#{Nk~dE9w0dnl7fdw$-_Ltqom?79_I<3BsFPB zOFEt+Jx`N?XUIqSnGn&(amb9WZZD>n7+S7rKbfPm| z=t?)b(}SM$qBni$OF#NEfPoBRFhdy1ForXNk&I$AV;IYGJkJZf$T-F`ftQ%bBqlS3 zsZ8T#UST>jc$L?f$t-5`I&UzCxy)le3s}e^7PEw6{zw#RwxXADP!6h#9Cs(-2HLmj)e{+MI z+~PKOxXV56^AG>>9|?NeKM6@hVv>-QWIRA}9wY@1k&=gbghxrmV?53iJV|QOkd|~j zMS7kl1J96=Ol0O+vXGT*WG4qX$whARke7VqrvL>hL}7|hlwuU81SKg&Y06NRa+Ie6 z6{$pJs!)|`RHp_tsYPw-P?vhtrvVLVL}QxJlx8%i1ubbsYueD3cC@Dh9qB}8y3mzw zbf*VB=|yk)(3gJnX8;2k#9)Rnlwk~K1S1*6XvQ#>=XjnMc#(08X96!Vkx5Ku3R9WJ z%e=yLX7DPnF_T%$=5^j+4s)5ud={{fMJ#3sOIgNpR`4co@iy=9E-P8ZYSyrpb-c%V zHn5RRyw3-0W(yy(m2G^)$82W@JNbl9`HWrcW)FMW$9_KN3l4CQFZqf?9Oi4j;Rr`L z#&J$?l5hErQ=H}uXZfBVILCQ@DP6JlYEp~Z)S)i*s80hL(ul@1p()L1P77Mniq^EDE$wJe2RhP;&UB$G-RMpa zdeV#D^r0{P=+6KKGKj$pVJO2G&Im>_iqVW=EYI;gFYqGc7|#S=Vj`26%oL_FjhA_a z>CE6&USlS+n9b|F!5ro?kNGTMA&Xed5|*-z<*eXM-r{ZE;ayg;iq))PE$euX^=x1x zn|PlO*vu9_WGmbFh>zLM4tDYhpYj>I*v%gHvXA|I&KDfuAYbwohd9jFe8Ulra*X4g z;3VJj9j7?W8P4)OKX8uo{K!xI%rE@PZ(QIazw-x|xXhni;VRd-&R_h^4Q_Ia+uY$U z_qfkL{L6nN=xzTbBoT>8LQ<0P0Lgif6g)&q9_A4qB^8hHI8X2-sYydx((x4Od72D7 zLq;-@nPP^DMC?-QJfN#q!gtoLs`mEo(fc?5|yb! zRjN^)8q}l~wW&j0>QSEtG^7!YX+l$)(VP~vq!q1cLtEO>o(^=R6P@WoSGv)i9`vLa zz3D?=`q7^O3}g_48NyJ8F`N;MWE7(r!&si}4PO`J68}z(Ky`D-Lm(ula@}9OW3tIl)Q3 z?=lPMJ_?ch$mEX9)MSkZGE^(PZxx!Vhah<>Tn;YEZ7Pq;>UG8z8 zfB2XGNYKarNk}3RlZ2!s;{lTMASrl=lswEMJW47a<8hwgNm7%Bw4~!H((^PKc!rE* zA~VmDg{)*FJ2}WnE^?EHyyPQ41t>@%3R8rl6r(sLC`l<`t$hgI9Tt znapA~uk!|Tn9Drovw(#xVlhit$}*O-f;V}Kw|R$mS;;C^vxc>-<2}~1fsJh9eLi3_ zTlkQzY~v$7W;;9B$tQfuXY67(d)Ui9_VYPkaDan+$yXfWFkkZxM>xtcj&p*Oe9L#7 z;xuPC%lG`iInMJVKk+la@GHM@fs6dkA6()xe{zMZT;n=_@i#ZP$t`Ykhr8V4KL7A9 z|B;}t{gaSHBqj+-NyY;t=Rs2N5Gi?>M|hM}JjUZZ!IPvW4QWZoQ>5oGwgl%@=2DMxuKP?1VhrV3T5Ms;dX zlUmfK4t1$VeHze^Ml_}gO=(7RTF{bKw5APhX-9iH(2-7brVCx^Mt6G9lV0?u4}IxJ ze+Dp+K@4UHLm9?!Mlh05jAjgDd5-6KffpIacqZ@?6Pd(hrZAOhyv!?1X9lnG8Z(*2 zY+mOL<}jCe%x3`$S;S(Nu#{yiX9aKa7H{(o@3N9rtY!^sS;u>tl+W13ZuYR3eeCCRzTf}{`I4_V#9_YX8;)?4V;tuMC;67|IK^qs zaF*}+fpeVaM}FdGe&JVs;{q4?ojqc$9?|cU;ZOO zKl>*kiAYQml9G%ENX~<#;2~1-Fpuyksd$XXd4eZNO&ZdYj;Bb^(`4WoGLnhRJWCd` zl8x--ASb!VO&;=+kNgy%AcZJQ5sFfb;*_8yr6^4q%2JN|iIK@F}0Mi{0#DFZ-@#v+~6j+xXm5za*zA`!@vATg8ueT zLK2afBqSvn50IP(Nx?&;=yOIp#IHngQ3?dd>AI?r62tnz(58u zm>~>h7{eLCNJcT5F^uIop63N#WE|s}z)MVI5|f$2RHpGVuP~h%yvl3LWEQh|oi~`n zT;?&K1uSF{i&?@_ma&`_yvbX<%{#oyN>;I&HLPVF@3Ec@Y-AJf^8uUL!iQ{S8z1p8 z+u6ZRKH*b7V;8&G!(R5WpU?S%103W_zTyyv`I>Jy!cmTKoD-bnTfXBIr#Zt}zUK$d zah@OfiJ$p}U-^v-T;zBD;1ZYllPg^18rS)Yzq!FpZgHDC+~pqk`GI4f|8V?G-W7DIm%Okid3R9Rj5ies#AlS)S@Q6^rAO?=u1EPGk}2%VlYD( z$}omAf{~13G-DXcb3D%ryvR7lGl7?w$Rs8+g{e&AWnN)AGkBHPn8_?=^Ez)Zhq=sS zJ_}gLA{MiRr7UAPD|nN)c$;^4mzAtyHEUSQI^JVF8`#Ju-sb~0vxN`Y$~HdYW45z{ zoqWQle8w(zvxmLxV?Uqs1qV3Dmwd$`4)ZnNaD<~A<2WZc$+vvRDNb{SvwY7FoZ~z{ z@)JMv3%~Lk7r4mp{J|wI^Cwrh$~CU@7k_huo800yceu+v?(+}-@*fEX+CK?NL}HSV zlw>?WavmfF50R3Gd4xwv#bZ3q6Ff<3(vX&PJVkn*CIioqkxXRfS+bCoY-A?~ImtzC z@{pH&YE-8NHK|2y>QI+@)TaRr zX+&e1(3EC0rv)u(MQhs7mUgtK10Cr^XS&dpZgi&yJ?TYn`p}nt^k)DA8N^_QFqB~o zX9Ob|#c0Mbmgjh$7kH6zjAsHbF_B43W(rf8#>>3IbY}1>uQ8KZ%;t68U=DMc$9xvB zkVPzJ2}@bVa#rvrZ}B$o@GdJ^#cI~DmUX}C&p*~fl9=L-&SkT3a)LmcL7zTpT*ImU5LaFTENj#HfG3}^YCA2`Q(e&i>9 z<`;hDH!g6I-}!?}T;@-%aFuIZ=P&-|1~<9IZSHWFd)((A{^dUs46=U`l8D44At}jt zfaE+#3LYXQ5Az6*l8VQ8oF{ma)TALT>3E9tJWU3kAtRZ{%(G-6E7{0S4sw!<+~grI z`N&TJ3Q~x|6rm`^C{77VQi{@)p)BPnPX#JciON)=D%Ge?4Qf)0+SH*g^{7t+8q$cy zG@&WYXif`S(u&r!p)KubPX{{EiOzJPE8XZ$4|>vz-t?g_{pimC1~Q1j3}Gn47|sYr zGK$fRVJy$_JTLGf;~38bUScAXn9LNWGL4sch3U-TRbFEzvzX26yulpiGLQKzU?GcG z%o3KejODE0P2S>d-r-$VvWnHLVJ+);kM(R|Bb#`i57^8WK4dG~_=u0$&JK3+37_&A zyV%Vh_Og%te9jjf;2>Y}6^A&?*L=efj&h9SoZuwi@*Sr*%^A+}JwI@c^Zdw9{LC-> z%5PlYBERzom$=NIT;VF$xXxev%?)mHi`(4cF88?4Km5yoBp7V}BqR}uNkUSR@c_wr zkQ6*bN*?AB9wilz@i3NzAJVQn@k(p=7LRPYoogCyO7rDtpUhrl%y1;DMMMxQJxA^q!N{>LRG3!of_1n7PYBEUFuPv1~jA*jcGzt zn$esVw4@cSX+vAu(Vh-;q!XR#LRY%cogVb07rp62U;5FX0SsgigBik5hB2HGjARs} z8N*nf<9S}-MaD6n3B1HaCNY^QOl2A`^9s|M!K=K+OlC2g*Lj0E%w-<)S-?UTv6v++ zWf{v^!JE9r+q}cOtYj6dS;Jb^@gD2hz(zLlJ|D1|EqusUw($`kvz;C6hfil%qTqs7NI$Q-!KjqdGOHNiAwqhq~0GJ`HF{BO23$rZl5D zEoezATGNKMw4*&8=tw6z(}k{dqdPt5NiTZShraZqKLZ%ZAOADyJQH|{iA-WLQ<%y$Ugj01GlN%ojhW11Hm~yrbC}CK=Cgo>EMhTBSjsY% zvw}Byi??})cUj3QRR*vTh+%4h6iH+$I2 zKKAoDUvPkfe92cF;xJ$H4M#Z2F^+SBlYGl}oZ>WRILr6^z&XzIBR}yozwj%+ae<5c z&L3RjGJkS~t6bwcfAKdrxXCSUbBDX!<39iJFaMEXsQr_WL?k8&NlC^7BY(34*DrVoATM}Gz|kUZ4dyVHdCX@43t7Zsmavp%EN2C8 z@)mFN4)3y(Rjg(WYgxy8tY-ro*~I&Nz-G4aAzRtTM|{k7cCeFA_>|As#cuYnmwoK# zbH3mJ2lNP2&Ty9R`GIqs=SP0xXMW*Ve&Yfc`JF$w z#AW{E3Rk(tb^hXSZg7)Z+~y8HNAm8eV=s#1;W)SxD{s7)Q}QjhvHpdpQDOcR>YjOMhUC9P;p z8`{#2_H>{lo#;##y3&pA^q?ob=uIE`(vSWOU?77S%n*h$jNy!6B%>J37{>A(&+`H= zGLG>~;3Xz9iOEc1D${tGSD4NWUgb4rGK<-~&Kt~OF7uer0v57}#Vlbd%UI3|-sCOb z<{jQ;C97D?8rHIo_gK#cHnNHL`GC!A;X}5vjgR=4?d)JDpYSQ4v5Vd8VK4jG&*yx> z0S@vdUvY@Te9bo;;V8#A&IwNPE#Gm9)12Wf-}3|KIM0v##LxV~ul&XZF7i8naEZ(O z$rY}0jqCiy-`wCPx46w6?sAX&{KLQeM}pz@PeKxrm?R`684r-02T8$0q~u{9;ZaiY z7?1M=Pm-E6q$M3sk)Ef?z%yhd6PbCIEMz4c*~vjpa*>-nMQr5Vj>K}%ZEnl`kh z9qs8rM>^4&E_9_E-RVJ3deNIc^ravD8NfgWF_<9?Wf;R5!AM3inlX&!IiBYQUSu5O znZQd-WD=8^!c?a5GOsY58NAAC%w!g`d7U?y!(8Sup9L&r5sO*EQkJot6}-t?yv;kj z%Su+Unl-Ft9q+N84Qyl+@ACnh*}{ixWg8#yG27X}PCnsNK4TZV*~4D;v7gWRf&(1n zOTOX|hxwXsIKokmahwyJ%di@&+SO>S|UJKW_S_xXo^`Huu6?4N`rA~8uwN-`cGIS-P8he*l8Ji?=-;xQiQ z37#Z1X-G>to+3R@lYwW*NG3A#ELq4(HnNk0oa7=mdB{sX@>76<6rwOiC`vJkQ-YF| zqBLbFOF7C@fr?b3GF7NbHL6pCn$)5;b*M`{>eGORG@>z0Xi77h(}I??qBU)3OFP=r zfsS;dGhOIPH@eePcS|Lb5vmqh=U|HC!JGl&MFfoLEa_}>jA^}i6}8AJpB z|7_rY{|>}8{u82sXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2L8V`kUqixdJP@Wr(6GCqq_~x-=jyrzWF;37}DW?e$=5?*ThczkK6sf j9PRf1IGWJWjQ{JV3H~2<9oVUB*Ixa5{NL{UKTiIikumof literal 0 HcmV?d00001 diff --git a/models/ggml-vocab-gpt-neox.gguf b/models/ggml-vocab-gpt-neox.gguf new file mode 100644 index 0000000000000000000000000000000000000000..b9af16845ccb4d4888c15f1293b1f47291928412 GIT binary patch literal 1771431 zcmd?S`IlVRapx(|zV>v_cINc-%&{O?SOkLDxIhwV6$%SMVKb^IT1|lP%gk3*kI2lI z%msxa1)XlmJu}vfZFyFE)@-fbZOQv4pTm0kzvdG+E8-gosoQq{FuzPu6mMp}cbB+v zV~@BuZoGBIZauE2)$nXJ?e8`7x<6k`>wkye{%!w#G9TCN)BnIf z9z8y;M&+^ielmZ-o_pcKx!!;7&#qoQ+i%D7`sw`9upaNu_x=t0l@b1n=boxZy?QWc z#=H4B9{Nw7d#*QZ`;Q;>+r@ZpU7g|I|MIyV9MAq&&-J^Q)sH3>fF0JuN7H)N3>MYU zVEPlspLv8=v&X32&VA#wl-{_Qt2YamE`jc;l=$Ui8K}Z=CnW z1#evR#wBmObQF#x-wT z_r?uxyy1e9;^K z*cHx9h-VGey=Z+uT7M9&KaAENMeC2F^(WE# z(`fx!wEjF=e-W*}jMiU8>j%;LYi}KXC0bvN*3U=lpGE7xiPnD`t^Y1szYwjjMeDzh z*8dQ#UyRl-MeCQN_4R1|O0@pRX#G#o`k$lqzeMYQjn@AbtzV7SuSM&BkJkSYt^YGx z|5vpB?`Zu$(OO0;Fzhfe>@YCwFfi;eFzhfe>@YCwFfi;eFzhfe>@YCwFfi=!cVY&C zYlnerhkYlnerhk`Z0wcP>h%PXq3ykOjBf7wdE-<1CjOYR*y1`Z0wcP>h%PXq3ykOjBf7wdE-<1CjOYR*y1`Z z0wcP>h%PXq6GjB}>4N%nL4CTQK3!0sE~rlz)TayT(*^bEg8FnpeSR4D_rt)y9|r#Y z@T)F>b<1w)(ss9;t64eL?$zFE#@?F7ckQ@HXYdE>$^Q8_k9Y;W>D7AbuU2V{nSWW2 zHE^ruzM6f1w27CRxqsJ2@AbgHtK+#Urd~zw*VpZik5EnZtw7T* z)Z0MKkvEaXy@RH&4(6?YI}F&q@PdC=%>>nIJox(oVh8%_zyA0%!EmL%l>M3S`6SQ^ z{MmlAYPE)%*tGVSx@F9K7LT^Eux^>48*6BWK2Q@|)5hD)D1P15FuVTcljvrVux49N z-qbvW7i|n#rOBQ2PKO8n(hL0gw+o?5-(g*u>f{f!`~F(D>}g(8O=ipA>z1f;{$4fk z_YS|&eZ%x4`?JGQ6?Jg+N#iU6?YiZLI^XnOy5%fj@gyCq1An%e@A=q^fJ8s-;(7nM z2~?h?pAR%g!zz8+PdzsY_xEZo9L?G1(JhZ@&Q*Z7zxR{2Wd7E1_eVc@M{}*R4PbW5 zCcw}wZ)*mVX`6-(091igRnOO7kM!iewlr|^+&P~@w|wF+O`G&mKzb4Ytme_|miIKT zgLa`0`_)WcANQ``U#ROr^nxy~XD|4(Z2)u{lzNhA+7*9iubyh$ZBgXcUh^j!Ayln{ zy_c}Fe(KLwD;vgY`!XqiLxc3&1lKklkT?^=L>M3VibS2Da00}vSZ!=TJFfQUbE&5Z zFju{s=86B_Ew86;67(z4w5kbwt!Dw!HWB8Y4%RRT+*D+*RcGDuZJ$Lu@z&$m$Yvg0 zVGGhM?+bL}HX-#b&3>=)Hg=!6Ki@a{PsE;7)AaOwuZbV(VVufE-sr@s7Gs}Dw-ja0C+mi0 zea)NGI;iy|wyLRT>Z>0I_G}QRb|G|Zs-af6SBd}Z1nk;HU^KQe@0wFT@HdUlcYoo( zz!u|OER{ajM!T`lp=*Om?FK#i@PWV6Eg$>7Huc}LBf;M~-SkH%bH~kfTaIF>-SRzu;M7?k32Ur3+eIMQ zA}Cm^-z&kQ>1iJOqGgL(<5oTIcTguLH4k{Sv!HBMPkYlXFRGK9#gMrI?|v<5w;vnc zErnv;@=Z+(a;<*)frg7=9H>ULy%OoIGX0A#5TU1zSAf191%M#szR+rxIB-JBZaD}+ zu=_tq^x~$DDRT41MjGV^R^l z1H;nR2P5p3?=M^rTFC|af6ZwxiQixO;aC3S)kc1-J0e`#LoM`@!|Kt8-kL7FRXtJP z2_sLP@%LMa(v^g6#A08$4ebv%!`c@N@-~t$dLrNLG;I`Q-TOffh+|vw&^FM1EvIMBSxJm{r(K!jDu z+ATk?rPLwJ)m0F1oPIvDL9N5jlA0vyLO9jWETPRP)+P4M`A>*kO(yZ`FLb})ZN!U2 z7)A(wK8+KJ1{uW;%G~ZJMuK-O@E-*+2=4?5%f~=kXuQ{_X*9HlSPoiW9PM^G5JHZs zM8zg~dn)aON0 zdQvC;Ity0UEk7&xoWJZ{9t0WcmS6CxOzVV|TWQc)Sdbpady8psrC}4l?`Mj;7u2g- zWE2^Pnq0)azt#kgKhWx`9z;=)tcJ5zpAcXYcFdvD|DKa4^zPL^-tDxOvNdeOE>=%< zK#m;MbTLAfa3Jl+{1F};NYy-a#&eYeRBLvcU{$pa0CH`NHPt>mcqr6v=CNtjL7*+}-=GSZQ}lSRj-5BRyMCDMrHSpN zjh;>7G`Ar^_Ot5!Nr&FF4uc`fR^J0je$7;Z-6YGHUlfqwhC!$?2c>ycBXx?f=HvEt zqTfr0uUo#X#|O=h10JhoFwkemy_0DZy8)*N9>C@YBpYdSZDM!CU1;)9h@f9XCgX#4 z%bST7vP4l~FeH{TZTB5PcZ06&q(wDS#?PPg&ilhf$cuY*QXMw}ecEB7hpi-w^Opsq zaUJKUTRzkGvqfxtGk&bK4x7iBO@nCqZB{jD9&aRU*sT-VRT9w}ovVIc!z7K-j)#fq zPwT!Y3PO6?saay_obT9@!*6!qbf`hM(zNFLaem+ZP-{=>K{HPiCtyR1MT(ip+F$72 z57fmp6YtGE{Q&Fo&F(+e*P|+}ajyyDMA(TVYm=4KU_9WSudP9C@V-Y?yf#dG(u>~M z5v>JLIyziq{=GVkKZ~Jc>`5a2JK1{k$VaWD*45KsjaX7N=xn|S^hCnV#G}bJuR0Nc zNgL#D%8~Zfp1&ZR*fDaWQ4Wxm96%G!lt4bHcl2Re@2ZRWv>MO!kzgemMe1P)=!@hC znWTlimuwiH_=H2GTfXA+KwXYGnI5ZYJgG`Rka0d!80}9#63wG^%@&d(iKtT7L+yOM zua}$g6MqImo_L!HU+Q){sj_uYPju>_HAI66G;r-C-?e&Zp5TG-75KtNk9Rd3*gIBl ze3}I|x6n&Xe;^))VKE81(DHA1~di=$%+U|*GRK@9S4zEE$!s1 z4$A!`4PtBeC)#mfHf9I;dSq)j{OU))Au!Ca2zZz6u-(<12`}?b0hQzXPao40_0wbk zm}EF%|Jx$tK%8)%QKjgx;rFViQ7y7~Y0H)x@!Ghe=4CHi6YsQy?8EXU}L_58e@D z;p}8<*$0U+GJ~*2)jemw~>rok}i>z^QNf8^b9qiej&KDFW-shF9lgz@sv=b87PP41!H{&elG?}X&eqHkzFM`{1{zCvl z#qiT}PW~&F81_^!wiedd^dcj273vObHUC~=(uN`=qPoD@k?05Y%>|Ix^Q zqE}Cm>`FZI6;4y?6P9bxRh-X)HW;CBlX zQ@qQjFZry*@;W{3n^>yNnjIMD@vT4hOiL`sFShP$T{6ft<0|pGZuOWNJ?|J+)6|fKJ5b#KOR(n*7sm<6Eon2W|ecG zEc%+q9%NfvVya6V`DPf0v6=>c&cf(tQTX2F$?#8~f7v@X*O&~ebe(A1eieOp%P;vc zK+5wDZ{CT6l%VauZU4Q_swTeVVnCNBb@uF8$GmQN z;oLcW7-ZMSsF81PSrHulmq?YMh>12~^Hyfv|j@;V`*<#B7^l2=#=zU z1eZk7Q7SO6I?6sgn(k1=fn zRp5<5|4sji6L}CC4~N&!Pq#ERI*A6w+z}z1(~%yW@8R?j)UdwlL$6MBuU{p(XS7O| zSsm{RPUefTye46oISs_2+kTQ5cFbezZ^3|rsCLUXXxX%x1l=Q^!5`m22+%Amh1Wg$ zFqy9>rja`E3jqFDFOBLTdo>`4nc%KZn=v6r$b0CP7?tC>)BO21A%=Ui8iy>qkfZ0= z>yAHn?`R5GEwl}w-!zUe@dwpd*3C7=It4jxBw4CgBe(b{Hv?#FfCH^t?gm>o%9|iK zlbsWi^!r*d9=ovL^uaoxJgyFHl4UXZ`n%;Bt%G+ESgMe5a0G|BpDmFz9M zZ5xQwEjQb+byb$h=MzO2_a^jS=K-}RIY%FU82-v``GH2{Gz3CDeDF{|4Ded|r-IPG zr6soGV@`q-Xa4bo*WWU~hePooj&Qd;{EFa$J8>GCv60MB!8f)A_xZH(4~R5!9k+vI zCcWM!No#~X%*URETXs-|oNzqs0i3)#7;ZBQf=7O%_b?AM2Z8^{VUKmroBeEMLLurszXao!o z$s=M$^~AQZ4%EpyiohWAz;0ms*0%O-h;SWP-YuW%mtAwa1|X6*@JT%oNt)HB0{COY zdMAjynO4K1{5FmwU;M^>-}!171Q~|qL*tIyT?_oUB@^wC4fnw?7KUM#y$?=PO{JgA zu;2SmBkzjFlqF&?sjD{S7#Oxj)Pk#{+@!btKpj5hNs#eYYYwi z1AOZJg#%-*?=cU{1X7JcuQBTp1|NTYVcNB}f-^@jh5u-)4K~o`SQ{*6VLijTJ5>*Z zV!jZ3Z-(z<^S*zJ(!)9Q zQR{v~{)j;dM05ZZo*jW%2V;qWxHgIcqbEoB-@Hp=n8mP;pYbIf{)$rN+TbeHD6%1{ z2;N2f2pQsTVEr)3?oXO}U%O>WS+MlB3d<#7X%6PKSa9qJJ}a1SaNlE}_HUmg3G+m( zzzi1}wA51&1dJ-`$6YMMNf1FM@~#nX{xrQs0ItIg$|C(8XVtg#7*b8F?~Cn1h}Mkk ztboRK^O(X*!?NMKb3g zp5O+EVYfuai&0tTv9^sPzj?%n1PX-wp)k?Bcox*7->uAsF9?5Ja3q`&a%=6xc7Vz0 z?#gL0Gbeq>L1C7 zfNbN_!tszor{3|ejR%IlaVh*bpAL<%zZc|`$qFfuY39j#f=csC4rg}Bn|nDH`%WDu zTLhDrgcSIALb-v=E|L<%FiC}kl8lpN1Y4pXR+7bc339GD zZ@7SRdGNaWL-=vWi8E*SLkc}3kT}4LpoF61n0E37FP>sO{fN+CVXJbSJD4e5XR?iFdK6oz4R%-U)2CJg4uM2-xO$K0pF z2}x5PHfhRut^2{m(PJ!H#9u==OuMvAo`UZT))n6iPRSHG*WVd1tS{l@F@MOEB5a4g zP1ACI{S}i_yyKzrTk$6b%v+4|{z$j{S#PHJq-q*^Y&ntMIeBI23`2I3@NV9TjC&S{ z=tEkdQJD0eoz&OWAd%9miEQo5d<#~%Zsa~kA=8q2zgG48KQpq-duRq_r|hO!D$B=S*T$l(osVC2&j~pq&W4P z+rBm&>OLdP6HQW^&4y1njsM!;!-Feq!FB-;^Z8?-Pvh>PJ&CsC=QEGJKZ60l~_I%V3@;Rvlc~oeWJG(@x6%S zVZ>vWxD|mIVjyL=G=c=O!hFVPz;Atx_ zX&>M?QZU(l6KRC#$dmYBeJZ=toT@?Yf&V}r<|n-Uh(DhJf|`GAeAm}!Zm=+nU%0H{ z2w_h8o;6ds&jqW_d&rOv0nLSVcP4ouP<&tJZ*RT8h)BtKdZ77V;t&^m+-ZO?S7U_Rlb^oKD;I?|tu@US(V@%Zs5Ca}oDLNAg+ z4uVj}oOO#T8UY=8&E;>gu3K7tTkN7-ne;0kLEhO#+@u;HFNx*%w*4 zgA|%D&8~S=`zDd**G{5sJ_-7ug5j_7bDq zH*Lc4wwi<{NS>I_rRbFYp`YEpZNIpn9B*!Jc}EzAy;(_@JuwXG0Ff0V`via9KQ{S6 zk_88Wt~amxFc*^`O^)f4gvUizP3Vrz!{54fO;D24?cTK2E}BS@t;zRd=k8zg<($ZQ zOQ_oZeZu{28SD@2`OSou=w!Z5Q(Yr{b*~9W#3jXbf%AIToDRMK!+vj9qfvmX9%duq zEL7f`n~u)rKpd<13I&oKJQpu}j|jR+^fezRLV*9+C$!IBIg#A<*bqX*DN!E55>B4_ zjISA@kcG01;n2^anbDuKFkueQL0#9AiG(a9Q8`d5z%9qf$8k$8yTaEmb)f9_R$_9$_5yXI*w;3dAR%l+7X&fb& z4^@HKLQMi}qE@zeik$HL5Ga72=qP}JR-u39FiJI?beK{+B=)wHoIf*(PQWp>LF*;g zSuRnc`UDy90M*Ag?~R}Y6&9}i6uB9h(xOHQmyl{DOd;RT(k4;CJcTP;mQ_+r3_x1` zyq$Z&^&cNx<+D5$uEC8>$8BP>Q&;S+`fU|N*iIs7%3@oBi1k}-T#d|VW_LdTRp#Tp zCPGD;_u`3JWCcUK!2qCQy!g+FRh1e{)92@!<)rJWKaq}LEBk_``I z(=?$a23XbEd?n(YhS{q?fw32Spr##2J}ib+i{$XdT0xU^jC%|Gb^d0hBp>rPSTMdL z%qz*~R6z+e_s4aer0p3Q1zIM7oH8ZiM-J(qT(Qu&8n8&+^~lfgxmvhdIx=sKEjh?{ z5b6+&qu~JhA_$}L=CgM{^o~myNASVzC=Xykw7$l@FjG|TwP_o1cuIlmmN$Y%lxV_0 z$r@48yL$G>r%4b`$kt7AC!6T$lUuPtf1}wI(s||~17a=q zkjb7D7uYRy$)>9XB4CEFj}y{o3kr#Plftaii9IGqTkTAvKF+I=Nx-=gg4E9$kT*i3 zoiv)8DV8fL>C1%LX__r*XZpl4lAyP}R%&EZF{z846lhoYv>XT+JG3UN^^zcXD{c9e zK$daTXgPU8@)DVl4N|e-Hc@Pg85jkJEsM%YiU4ISd_}e^*%Yy(822Vgw=ahg0Y2$q zLd4K)U;)NO*S;{Ln7%l=t;G+_#yv+IV(7wJ@$zM#8JX@Ref#R{mXqKK2<|Gz)9l2K zBLGd&!4I^zIE?wqLJ0(NbHXH>78EsnGB%TSrvXC|OX4#{h7Cgz23S$EYPh3$TP;C- z>f_b+PsozL+5KU_Ps{-}9Abca#bBXnZ(6XUK2|y1t3gfiIpQ^o^SY_9*)|*Es$K#B zs#lsWNzLgK$$H^)AdV_96~kyya!MuPyo+ncJ4FsxKH9V6$J6(Fv7T8F z%EMp!=zIPMCgAMKofU70MuHrzxhL@_T=eZ9ib*4D1vpda83|=+ag)7+(Ls+LlzpuWLVnC>_kgW<*E`{pZ>wV+jE#bhs%=hkcn7ZN|) z#m(wa!$*|ErdRTWC`C0aEKB%f8Jt2)OJZmzF@Nwr@e3`+%lOoP&B74Qz_gMIgbF;k6qG|3WQ92DkZD(3icUoEP7A`0dP^w30qkVX)4q=k7|sEdGCkIm}ieeyd8Ogmt?n+b4>NsbI6pj!5T%P;9I zyRRxF%5fMqMol8|_0!}qe>J^KyqD@G5brBxY_tls@wuACI&=3mKiv78`};U=#6_DnM28 zc@B%^)+cR&f^qmxecHdq*OwVQ#*ic=PLg+*Xc|tr&-><0feA~0=tlK4)Va$M>C|9I z@bQd~CJ+;76&8>8l^Ij_9E_$XSai7$eD}8hVlLfmgC#vPbv#-NwIkRjoh>ssWL${h0Ds`tEe#G@g}z9I z^w;zYc@BY#=D3WwEHhyzt1K$4S?9d-joZEnJg2O`JIUTocG6m*xRE4rZZ(LWp1tdH zC)!KMDccHSE?V9f|C>rfpT_Nw% zS?z9$;w0@dA4iN>)f$Ug1b9n~XtMgS(Y`6EAl#w}CP_JfB)Y9!`MJ=KT}-)~iU|y;GzJ7m|j+3<+-OYDnSo@q zzVRt`%j2?6FN!C{r#22iqSi|om@d*e?rGmlq4Zbp2D@H_dxhktAiwYlpCDm&SS=k$ z-7j`u(_~FEH+O*liedMU{TLUAH3EyTxhFTSC1%NPbt&u}ALcs&PB{chWT9p=TyqPS zjF>o8lmj{IuUV;zWNW4Bfoxzvg5A(Tg}TDlLMX<%*@he6*ux|=3aFP7U3(E*y({gv zK|FPM+=0V8Jk0K9DdzrLLu8q6$HnuaSi}cODk+xJB>EyDGf#imN_#zLg&Ifh+!Hf+ zIxy_i#mIyvML;E4C&97qZVSr$@M53cOG#QQ))yZipIFWroEuN}?QX}RY;FWR@G2+L zVTFh+4OjSNl8&8V4S<9;N~CH(aqxb65%U|OT}}%=>fG0G^M*hHWPB4^LMXg#(MsM% z-%h@w7EAKA0R!jH~2=o4K|`Gk#CdMOE*I^=x(Xu26u`jIi|-N}U*%i31L+ zZuwGTxMcVU|0xQaNH8TZG&GI}Rk9;8BNVD``BQ$&uEjMy|Bo;D>ccsO@vi06txCFTv_?`O&u=2>TJ(vs(#cR>6!xlu_`qAc;$)O%XTL zg+)csUUpyeS(pQeYJ)zDrtlu&Epd?Ln?z$r7X0GR^Lp~VgR~BhcRvho*~3$3CDGtv z0;|mAsoBgURHZ^qj1NYWgIABj-g+rQpDmXlTc)k5QzC}-)4sNjQ~^ANTJ<0&RS`kt zz<%|zV?v3g^sjFQ0ikkCvc$tgRx#9xe7%X`(!sD8*PlCzuKw}0%>MS@`dSXT3c~(% z_+h;XWV5);n5pGxX~C%3^3M2e*ETg+RxS4Ma>O{1)125EF;)D|jAj*bC}C~& zfaYhWPKT;N!PG4EzY5d}&=Nk9t&D`N%hHL42=DL-ZxczZmEygAKXDl@AR?{FA4M5S z2z$FqRa>!YTj+N>XcPf(v?3@?TVZldNqEuREm;hB>kDi;mba^yARiW0A;qHUCHWLc zK@(La53g|I1lb!+{gMD^u`T%0tZW|||&+{nztVv=sK*INlJa{Aoz}#6U zMQPqzg+AX%LcTPKBA)7cjAojog&WzxV^oLeCyjvxUdIU&qEsmCf}_MWh$1(Z<}?oP zkYX!L%Fdg4SG5CP4NYk)q^HAQ?!J@fPHNC|-nBVJsf52X42Xi)tehF&F5+gV!p&7G z7WnHwo6AwJ0?_$hqPMuf0y$@xg&elcI0ejDFaWlp;3B;w_4GIgf@P2Tl?kOcGfl8E zswf=3&6CJNxtYBlRDRd8ci?vp?ot^-3mKR29-l6bBY%#Al*6CQwwv{QD87Q!7VN#= zd#~+@{uM7G$fgnP7%D<%O649O{eDs{o2UM5xBNM+1csO#4T?RwdFO_2RG$*kn#`t@ zsB;$_X7`fZHON+5h*;(j`}h?C0RTwI3#iUXB!h-SJ-qYHO>fBt_~kGA?8wmxB8_P# z-paQL2_lefE#*eCTv2Crt&rzG<>QqSQKO&oF zs%QBmieQ?CY-`B_M9gdCO-PFmMD^8}6YOIx^0ZF~lRwv_6zOI<5`6%qmH?i?zJo#? zu%lx3yi~ptj0ROie?bUr*$(tpR{@wFu5{8@U-o^%XlTaKkVVf$tnHNg_39|-!p02; z|=y^-> zW@-9jRZTg`UH9vpPsg+ZO>-ceZP846MxiEVw@M-~`Ki?t!GP)sd0KB1dgCi1;7AiP zR~sC{k;%|EP~r8s+zA^;Qj#|Ey$1M^N6GoyAL>>N;@=Z_uzY4^XAtx+_YCUc8W4A2 zQd$c6`&IB9Q}Jb8A=P|ci6uh_p5R9l*DP9=qYu%#$r(+a#} zT6ZZ`n}}B_Mq0|M2sWm!sif@P2u16bCr=Bh$qmA;B$e0daKZ2#+(Qan`98zdngo&k zdOcZ`z^`v~f3ERYqOb2KaP}s=UaRhr?i@+#$4>b(*rgptTPZ=#C}GgmayN~7D(S`= z9vdtAnXe;PH-KphGrqW1kYKjRHnCC>T)Svx>e*rqK}7*RJ9rw4kypoK!04kRMMpSD zlkZlV)hU^px&9s{axhv#G3eI z7b*C9%p8viRL^2O4iDD5QObUvobH87&U_=m)i%*f1vCg5B7wt$0KC8Uby~O%JLlcubM{%? z3x{GgN(%pm!Vb>8s)-l-yTr|nh-9)FaoPBK zC#`n8FlPq`V}RJARrM3FQsLzn`O`Y7*ql%akuw>cEJ8P;J@KUZtj@h6c_ww+-z029)IEU*6d32F%lT=X%Cl?}!?G`6G{ARe)imo7Lmk%8dv zK0Ncz=lt;}Q5!N6 z*3@3YGK^*VNI7&HF9pd~5r9PMgg>=69}P2jis-_+Qk`qUcomUIdP)*IhNXUkL(h`> zf{N8vTfjH$X=LP^EDNd8X~71D*rjvPtd~}?5KF;`$kU`xo8vj8^o3++7}lTZe2}MO z52}+Vm=1knr_VSRP#aJ0a4V+e_4;qEZja>fWdK;z+ay+F0nqGX_+ZKLqg;U2#ykcI z6JC4W2N+1jK#hIlnoky!N{T3@+?cNVYCd2h{v6T>ZWZs1C`8{9tm#=QxlM$ZFa4!g z{-Zb+so0#ri0L3EYvC{&?C>ieeb>8rYs-gihoL`%m)Sf=h|oz#xP7`+PxC!xG?@bA zy^f>OP)f5hYfOk9J#m(s_k9YO$p!5~82y)e6qrIa5eHAVe4~j;TSCK{6JegU{%e^i zW~-&SFH+b50Vi@}T2776U|7E|1S2ZZVx|4JW)`@1_UwxeDnfbs;@U@+Lt`!urI*q| zX1QD~mdHSO5SdywTJdd}Oj;PL2HQy4Lzv&EV4jXCU%-&H0E^k=8(%;>O1~Lz#f+(! z?RLg`5EtS73xrlZf3290L~CIirFOZ`)vkcoKF?bc)mC zr5Z${@nRI_9B}-5J|2eXfcd$XH58?tj_Tby99~!E;6|3`ClEbtK~MJ#+`Oe_+bwd! z>Pr{B(`U~fXiP7Bmd%u_^5AKGx2uGa&6pYwI&xftA#x5fh2&tFc_P_^lw*uLS&2#5 zO#V{9pV?1B3rM?#xcJN|@T*mC=7?RZ?uK*GAxz$E;1cm4dI?XtKVz)G+m<;jz)|T4 z2c~90oo*A8rCr+3<;EyY-0D^*h~1H6o3j(+aBWMX50<$Tq}9rZtdD8Y+YV=|>Xc0x zh)$v=R)SKKGnITn)@{g@^D4<)R-j1baZWvR2$(y}SA&LNWWmQa-BcH;z4Ee?ix*FO z>*hU&Fp$xm^xOAEZtC%mcv;AuO<{oMm5d0w-7G>WF|+N5&~GU+gN4r+LqQEm_8FVm zH>GD$UnM{>9K=L@Tuf$CahrjyKcQpg7!%ZL{flsmNUr0sP z(UMFUa;PI({FU$w>$+u#`;Y8-vQUxiE4CuEE>wL;3OJJ53j;YyesC6(BQ9vo`F)Z?`R)T z_S7-Ys-B|}s6+gojObk){@zD_>FcT=#}ToJG8s?0<<-zIF+(MHit*Z5w3#Hb!@14= zHUfW=2KeB?KMZkDCZzS2Pm$ZO;!O_PT6_zYK{y7!hnu&&^-}0;lnPKC0rUYYqZ?7T>H_EtlScUG*A z4Ti5?^kv<<=8G^K@lFZXQm18~yq{vm|GvMy6$Jg}7bFf@obSq`>wPod*lD8ekNNjx zg`~tIjua;z$z^fHPtK1C2VOrxf72&z)v9Dt zW++f_huPEYH`qknmI@C~!sCR=7xltHcm2A96pa+yu77{`mIDWA_xHVxi!$hZU98*! zHwa|WTl6hSVHGG+B8Sz*&qSCrt`8D0GyxN=wc*W}sAPX_y*!NNmnEItYC#axEW%8B z<+_r2;j+ND0)wh-9Xs_IUv(R+gLY%*R*wXJ*kwTe;6sHySQ4^h3K)>7w@D<~=!hn+ z3UqsQVr~$QknxBX_F?XAspnaK-aKBKoE2uly5K1A?D!d>Bs>b+o`cbfft-Zq%47fU%#uRU9D4%43h=3 zNqwL`F%}$m(5UVcg>3Q2`>Iy{Zz1?w_qC=QBxGp1z4Zk9v4xyDEbQ7iy>~AKK|s^< z`FU$w%h=C1^->fCHNa$nYzm1Mkz-(DBbNX@B2v4%#PNh zXcd{8jMnQ0qF#jOTfqgz=5)(9b66l1h}Nrwr3@vcbiud^xPe2j3Lw2EF!^Sx=>w%e zxAcvC5amxP{pIlI3e6-1S%nQnalI7UFZs|y(PHwnQ}56#bT08T%bzqmq>sUo4ioLO z<7fT!^xEV;E{4?PEV)|m2&pl<0EB8N8o2Us=Sd*CQ@YQGG}Df^yXAY~VuVLJIHoDO zzXZ1@{yXOjL3h#wqpSvDIktEo;W$#qc>a%^2mK4IE5>xf36wt#=XZ?!Da5-cCz(GbS&XCL4%rg1ITEE$Bb8WD5qvxQzoNdnrDB|<*k zRl&T3Wd5|5$u%MPpY@vK3SJ%s)%DzDooW;EPqz`o4+p)7g{gp*Z&>{S`1QBRT zl<2@R>=P+5U)j4=utb_X9^aJU4qZ=@73uAv({p!kcs(GO-o05)Q_#YL(>{+m=5L`o$Gy~&mVby^)MR~?o!*Sf^7+5wqIe+YuslYCqRU+B=c$1;P z3NT1spoBs4WN?q4xiNipz(xs{CLHNdtjh4wTu2qsU2CKa=4Nadca8#d&3rGbs z_XwhKA_7l!NHuF-y{a#V=GK-!GbcPT;auS`iFew)GFgkgQwwLk9BLpW$BNVH=mQvx z060|03d<#EOW}8a)Q)Vb?tw<|>-vdf&<{n$I1YZD6nA48-ozpZG&@jR2S&AgKTiKlu5fH?sD?yAJ zXpH(_MfwWJD@k=N5N37#lwt}9XVng_f+&+Q%%W%nO!EH*iQJX4lPSO9mEdP94MR3x z>LskK5@~8ylf+O~v&WRQr>ovxPGsTP8NrNz%}DPz+8xvu4LOVPtNcfd-%dfk6!E#1 z7@MM7pkkJDmQK^k0uexNsXONzO%3Kjboo-!bFIvvKU550!0(e#-z{$e{mh5DQGQ=U*2`G$8MKZ|c48ahO z^&$~vZX%#eiv-02^paL9@EsXVLK;R!6%Ikv>mcQL(CR_zY!%Ok;abm|N}fmx4j57Z ze8sakXVvZ^h!`0@g8j!|^G&ge53!}V8a9+(>RoD_$5c0hdtp2)Ol*t(1G< z2iC9M(4n}PA=C^Yp7=AI$K)j)Jp%RY);p&?=;A&ha!-;fTHGLU0Mg=fh(uuS-Doz+WRyEVKTSQ(8{zO zI3;0WDS6gPHG$Or`>ew5VhQod14j?ri7OGM!7EH09vFG+WPMwnP*m2v-SVpJD$39hi#BU?Aghv$H&mg2z*o!MzYV&D(%{VwRu%= zClFd@FC4iy^%Fi|6|TV|@#l^a@T@F$ZtQr+_X6J@zUwgD<`zn=%q})d)EpCfm00y1 za4YQ{+O=wO8)ZvkeF=chZg2JyBUqOH?o&%mtFc6s;4ah%nYQlDI zk)qVkY)-{gheU%ZI*vTDXdD`gVGXi`BDY8ZHb<9cixFTgvslHXOQS0-lKAWAJThpXT1DffUmCUB;+tX+){2#wOS1<~ct{_C`zK@d8?NEUC22xwj zY&roLT`Z4P+ld5_>V_2Cx;j3hW$C0YW+fTg!D{t>ZW$|yOwP#yNdi4Be`RJmK;mH+e9@v{yfW|=gIySfy zK!}NyIV@6bIz8nS2qOEn?yTz%+Qi~baDueP@eTfikJR6)Eg zM;l2Ybf$#N%^e9;*0lB5K+2;$W(KOJg_IMk+)9Hdy5xPtbtv1r<)+n&y~WKe!2U$PtxM@y^WF|+rO2p zV(=5Vl5CAYJ6Xu6G!uuIprtS^BTxQ8f(!}xp;=-X79}a^@?_f!0@Uk>JI~&0v$yZg zhTfw%e#sQqv0d2&n5yV8J`}-f`widEYqvz^O7XbBB#2`6!5m%Bp#y&M!8=YAcH3!C z`n711G(75Y-~$%hkQ<FBoPHVbm|HbP1% zn`GVjZk#;s7>H26twGB5q0t{z`oH;oR=Fj*<;!tP6I4Z+A9$9sHO0z~_hy2E25iahdvs{jRZ{1mxvCPfN;}iaup{V*niXFJy~z2* zbm@o}qnMSRIi`q-HA?%VF?#9_%@N6C8u1-{1S4NE3~fydBYaI$e8cL{IykN+FmZoh zq7=P+a~)=yv_o)k9|_?U@xt*FJ{;_TQgCrDkl|rl{ti@-Of`pXZ>_+q6Fu*dOBRwy z2^;WwdfgROUOM67uZg-ltIvq-~hx!Cnpv*;Em%%#OFuM zdf3f1Ba4~KL=^WZ6kbDcgX33x(PZpNIVk&mNr#NoXd&Fzrp`R}omHt*2jgPe^R|jX`(2b*`se|k>zi+N z-4iOH;48KpbdrS!341MiZtnmYOcZ_CCSV`V7il_ z4vq)L7-bCK6U1Xno!ui?KzPf^U$3aYNcDT|maX+H2QC{QCpW$&$rRh4ANDgRd_%bP z`kb@Bzk&HZ&kntd+ihS7spZKHXxB$640RZpmS!{xzhe@+DoR9Zo6D7?!cyY1`>F8N z#*vLx`&AMEu$`y}q6l*C`R!Z63f$TPJQCqI^wTE3s#I(gLSSX{gj`$6V{3H~Bn7R? zuX<0c#;eI}y;Chf~HvD62+YeK%@385?LE_!vVlqT2b-C@ofwtzytO(On&OYfGSus9ZamXa^ei5bwEpuk)n;am+I}rW^GF+6Fz3DJ& zM-i!q4f*@(i7+wU+E?yB9Yihn?t#6qZYRI zb4P#L7|UcdOjAc@gvlinb{Y!8`htj>%gP-Utz^=HIg@L)W;EOl_C`FK?1CKHRIGCq zA|mVAwV(A>5L)BSNe;O|YKo%ycS|I+roY2^&M-I6Nc3{*_!;4|-PRsEjG+hzyEc-1 z2y@n|-vO17R9l+ur&^5m0>MVGuY}lKt`vv()Q6uC7q>!K&*@@s1HkC9L}2;i@6^f8 zZdvkiDr-0dUIJ~;sENI5EX_qL;=tu{#z;Jp#Z9`z0O}}n`19Sjwerz{Wl_h>OrJ`| zsy7#bT}SoL(mAm}jl`*s0#uUww%q%h(F^rViy728)(yD@d%n*R(JtV>0)m#hx*Q>i zA87pP%9zdYY2nYv*IDmFh%yo&WLx=z-0@C@nxYwEX7LYI4?naYtP&cBRGtHsnLTOa z>TGYNvO6`q`?fC)OWj<@3QH;88M{J0XaWpyloDV{Nq)rHRPe%FY6w8Y4-4Nh@=d}w zwwrFc_wtOs5;I~(q_MI z#4E@II0&+-$WqEQNQoFN_5tA`0!xZInxUtK+`qkck48CYg2Qu#A*qr)ZQ_2mt%yLX z*QBx*J;T=n^KJqjmif1v3vZy}SQwek4@H=v4q8jO)~wj29b1UDFo_6Stkqpd1=a6d zZDn*iX`d+lOi52r%c;;4pCt2ADLLax`T@JU#^E%d|D=ok|B^f*iTsYHlvGZ_0^-`J z)*=l7Ev-ZD=nuq)7riihZC>-K*yXt0w|tw9R-^Tyi`rRCiqPtM0qo=6g9#r^3LUay z;RVj#DLLw9{&KPk-uqgBUc~BKvXLLkt$Y4NzRn0PsQ5z*EYfBZjDkGrBQ%5R*xwqq zKIV(*7SCao4zR^v1zT^Z$RJt*h8c8xved!}xt_$Cz+9`r8pjeLbwBZa{AM7Og9=!} zy5g+1WU%U|$&6AI!l!|5=|Z8HN4@T4uEu9=^*|DUyzpPlCk^c^#=_@Wq_+EZ zBNHzpi}P7rvIZO5i5DgrYEnrsG5c#o5TsZeh|wD3!F2;kbJ(JYg&nPUNy8Bdsca(j z1L#&e;`ex4usF=~o~=}pq)u+X;bV~U8)Mj2b{QgeS$J3lKlngHm@T)^X78n2KKQKf zk{Jai!k3*@jRFA$qEzfnj>#+eM3Gz*)MUNBy7VK&s>Z8S5WCKX*kZmok(LrvWA3bR`swi@$5iObAA2q#SqaOuDB&@Z-vBdGKX&6{B zKobn+J7ux!77VCiO$1I^O~oAUqoVEohUUX%>B&e!Xoy{!pr5S_ho*QGYL}K{9&HU< zIFMwy)(DaV2?2A|(}^UODEoawTG12cnoq>Z3>Cr+str4_yt1WtdMy!o0t>!Hl2-h) z-9m0ci5|C_4GG>j!m_K?Y7ymdKvRuP@bUgEX3^}HS|KrX>YQpz(=^tYL#v%{74l3n zku!_FZlyhWQavHcOp_7HZc$)!V?TfGxm#-}Gn$HA9fcKWSFGexl3mJ03EJ>2lFqcD z;G`jzR*1Du&+a!nq1q6_tk*CanC|Q|v>FAzTMtfXUE3CatdCq%D&Jr)Za{{nn$<1w zc+PN`o>vUUCy0uk1yhO(p!*r=b_lomO3dCO0(Q2yh2@ADa9O3LVLnYh?tU}v=SXyH zTP6NQtH;Q0mPVloVi1afL*wn_9E3%X#Hyi<2263Y>0R!;Pwu*=M7juM)FNz?k%`n= zDNkH5B1`3USh-%K5X&@b3#^>>S#B%ASxJ^KK~yusO9qMa)!5y``fXM!zW8ru`_OM3 z@pKo6rSBw3&irFAf~>aBr|g2DYW@yF8y!|Adajf{EsO9I3AtAMEfG(23CDrAZryTx zLI2WHkO+dSzWXUh*t=1Kk!lqF<>d?BdK{LSEftEM%S-@mPBl0&?fUUy0h7H(yt^p% z2+(nA%);bT-l{_>S>z)_+H*hYlcnsJWY21rWD%}ra@d)c4)4SytKF#vRw7HEOGs;w zHcK)NiTL(CJyt4kg}x5K7tAdvCPbzNGnWH>?IscCa*%epln;`d;PhF4sU#l?8&)~P zX@BuM=+`4nLBL%_0gj?{(!`fk zo72MrI0pU%GVeU8ybLuQBw}5RxoelYkLOWZ&;lUWcI-V4h~webhL%4YK{4y;Paf$V zul)^wIzX0)9o9&!uG;5vuR${Fz}uQ98quc4m<|(yg^~o6SmSZx-IKtw7XjLYcEHPM;1n z^>WDFia&&y%4M%kGpJW0;8J~jpo_rpdpq7dIHOYn>lqW6JxxLk0fKFiEVLqoSpgBg zhr*aGAB7ww#?J%3b*CvXpjSwCSW!U;rT68QOZv_h>6yBI9|V{r%J-(G`vM` z-SX$3)cxasYhysZm5lOv@HUNLS3|9d7?xAANb%onMD}UHOOkJ?8m_$EzQ_{Dug;N~ zkeGtNwK%GL6G@D{FEVJ6asCV0@{jZOuCz7bvlX@vxD6GDvhp=(@bWpEvp4r*bPjXm z>ss_^T!*lV6`$wmr9w$_Ku@8UeZK2&TX7(t5;p{t_$tYja zayMnLB3&AFQb=GCf6S_AikgF_+|->SM7XTXBEgf5gJL|s?av;47#U;JDwoWM2RYhT z<5Ypu?t+hU_V{s!!PFc#qHCr)7R#vj zKpat$hRt#ojt}g0**umndf5YG9Nzc0eFd1w5KM*W*m~Rjw9zZKr&vT0x1pONcMsym zwS80rVT}pArbzgz2y#*l-Hkzz@MoXVjuwT@bO=L0&BDTXk~!XNDF@lVfGrFTy+%>E zUvMn~*o=Wme0uS9;V#*cXk-pt3w6-QX0}ExarhKw+Em&b%YZ7A6+Dr~0vRoeS0#Ih z&G?p}L6vQdb5wdtz3jF*#SZIw8xE|MJmgz>Ic|$V@ro08_Y3~}<78yDF!MEhB@Fy= z0%Xa_McfnZEjH~`4G)tavRPgrD1X^;nDjS?R0$y1i84Q+S-xbX-zMb=K9;e*35S? ze)tYqVroeC_wG3+?nIRWTt&fQVU@M0nfJ&C(ZVc0G&yOXz58L@b9L%nUmy~7oe1<1W6F}e1r#O}whP^vgaf*l zvpoR@Z(E|0roFn*C+%$!AfTE$1@%)Z=KDyWuaj+$(+6nd*oj4jg-M%A2o3_fuD5lz;I?+NKnnJNg+(vc%YJ}GIah#ZjJ|3iW_G0hgKsbe zFv18>@{D}}+t===mn^F!TQ==-I1w>(K+F>l2jh?#G|^R*C=^Iv5m^RWa9ng+xUq|y zI)+iXACp2Dw26J>{&|ND!kG5@+_{Sar+G}WH^>rIe8;U)ngV@Kzu+sabR8MQ1cIRM zpk(P<^;4%$YqzUj3ZF77PR}DF-xPF) z=!(9aHW#y6K_|4xt0~6gwkji-9*_)uXQKpZJ5Nzu#^PA7kAG)ND@R-`;=LoQd5IFw z0WiF%-NjmJxDxZXu%2J^cL`y+bJaJXTOKDafA^ZJiGW>?ctQr=dcMPXZ5O zmMVWz7y?|yhva3tc5JqTo6K2F&`&z>cplSRrWd*AFJ z->Wc-@HTu=viQ=osMCG(5*PzW=CJ}oHco{GEQ#5dVF3p!So<%DKlhc9Z{hE55+;#$ z(lovhkoNEj<>UJV2`~HgOkRQF>WD6{k(uKAWksI6-7OD))klOUW+(SPsS(0QV{J)# zq4(eIdW&stY+rLYy}g&L zV{=qB;e!Q#HtC$ruxBdJ!RjICS*%wWakK~uogq0VwBK4Jr(H9CZ=nhuSMNIv4RgJX z0X9`|D(1&^NSOOMCr(n=IjjoDo}JOIBGqD0$(Wr-zkAn5Bzic}3G=V}FBNgcP6G#F zId#>_pW;-+naBL~C%%J*yHO*}w2s(Yrks;R5Gw771Jrd(Y zOvQ1=LM4n4HGHI1YcgB;eh(`E)kZLom6y?jy*8l+C9+d#HzdoW1TCK<{3iJ-3JJ1% z+q}1Ojfl_#O%MVZEa9CKk-^UOBUY=VA$((=P6-*hX3|RPv~ipjv(?d}Dr(k>?e346 zUV)+`suad38Y^Em&XWkBrsYZWS=rK!p#* zah}Cuf?Sm%wXhA+1d*wZ5+DTDBCID3ZFRG4lZvKOEGyl?VWr~*BFgA#fghyX+aYqJ zI1y&uj}Mld9&EH*e%U)Sl|QU+ufOiQwn;#e@B#&Br6ME6vn&RW|WiPIU}l z*6Yi+%V_P-N!0B?1PpT;!s>Eh2+~UTNs!h_KrpFI?)P5qr7LB?5KFmCrp&(8gwe~y zQ5D0guNOu4fltq(cg>Y4ticI0Pss_$oLr-)pDpb_O_NC{D-O2#P+%#E6d1ORr0od1 zTfeV~Tt4StySI>EyJ6zKHc4bCv2SxiADYTTSta2UH z|7;P59*eT}0B=o9yBUXx1{#wMO>b>&h>m$NB@Tr^yz+PYcn!=rQBNbbh=INka5qU5 z*G&>j#R1xqG7-4VSxYEn7oa@}s$QFz?!97w{jLrvuD@0JyCnHg9Y}j;6~9mZ9UNX3 zVXV_Jg2aK(su*fsEJ1^CA*!6{@!4Y6u5FTnOK=}IhJW-|dZiR0PFQERCdJae{?Q*g zXxW*AMCI<9u&rKZ2V=g_XE-;()qbS%{^IBIuRndvG4#Z#&%~JD?ZSZude!2PMy@$Q zMbXp}i{;IB+51Aw)I4s^%mZ`!?+EX#S6FdK^lryo(n0wX0e~Qtq<$SRY`M9D^*-qh zdfcLsY_i^+RaNP}BN+D+2;i| zJS5o!evgb29~?p&>2vRC*;cq*M-=O?3A_4&a2UvX=Drquuh@qxO>ja)FCptY?`l$5 z82Vv$V!A>`%zXtN&5-09Va3xYf|B7Y3_Ix#2v*a>gtf0goxnpzQ4S~qo zY?QG?Cs>z4JiFx!uj|2ay+~S?`OEUfEy!-NI0wMy32jY zO5ZGR*3~)npOsWBac~SPC(Zb5FcYo#-v82`o}OwELU^h0y3^ zS0J0pr%xe2t&0PnC<5xN=5ntD4LB5^hH_kjh%_!yl^SjS%ftT&sr zjWthaxk4y$6$(v;P4aH?N?Pew^}@x;y4vA`EQ@l_Q$9kg%do59l4hEjc{yQ%=Z@}& zTm=HCe~VxbQRVhE5%86?0nH1c@WI0*A%8YDVw#J0k+~lp^Pf87+i~L40`XWO?fpol zhu&yPP|Y^U)y!aJSJK>2>fyJfv&U+=Mknib7%qf(gp?3R=yTb;Cu}7!KH)ATFvR|S zN}&52@1c^o4Fvs(M5b_(+ zuUS739pLu;B>9@9JiE?jYnME|^LRr@0fd$Mj;{3x$dA`)PqN4#-JN2TBw4zFVI9 zj0jXIl#nb=U`oPxTmZ9>v?$H$)3lmS$We}nr%%R6I5v~oct@rTUP-ASZJHf&gerUF z_%8SqJLT}TBzD1tTxc%m_TO-%-1aGu>{5j?Gf9Us`j!HxdooVBG}eT=gvu(EG0B7{M{E0{4JF?Bs(ICdZ~O#e%AkQY*nI7_D0N2qLhSYeHQFbDyF8w1cFYm0cQ7Oc zL3#*}azwayH8YG7IDvIvKwm#8?DSQw{)%1HrLCMSQW(!Fq&e$Wb}ttm8Si>^Y7<$o zyKB`Mcd7aZyO<-T&6hZUBFAVZWgmS>+t+*y+<)aCM-;+;g(&ETtE1${?2Z`Xr!YsX8!sVwD;T{pxIc*y_jwhuA;p1Mo)A|T$Z9=EWi^O&HpjfL?&cM5y>`Me z7awFBBFxP-ipecLx`7(IjlWVna*mPLwo(8E)X2;yiPWuZ03cVwH|)hm{(|hIY2Ypr zf{`AY)AER*TFV5nB)s4?t6H$lNaX0K4md9SWU#uAmBLEuXplFtea22eal;KuKoHc- z_k{?wpj})e|L4B!M)kzEsOaO5qp=5L!*k=CW>J5tObNSL$jxq%+oEs9Q(7p8j|AuU z-t9&K;l;*mMm<0+9MOZaATw$Q#B@05-_ab1RF4^PxYiOchv=v>G9bZ~3m%J+PhUet zN;ytWdCqm`WZRvGjvioCmfh}K+Od9nqe^-<`ngqVa;&@_QY_w{}$5@BZACQ{ij0$mMmyp$b;6WD>k-JwtV2jpa1B44!t3Er7%4ZBk#|2%eOwSRhD6=sj}cvLDQho z?^RFIV7K9y-VtOe<VJP}O4 z+)=RPcdjACF>B>hA|L@-JUJ zDSlB@F2DD|N8h-0%bz`cO8al+IAPL0F^ocCex8z=s5#t#6HJ5IH25!){`_XPvT|&4 zj^MSF)ruNLFNwvh68m(j&K~_~f~=ir%JJDS;<%9cxB0K5)Xb9VeT{f4-f*Zux&4@7 z*}=<^^_!=t+QC>BJp#HTO!nA|uRAoi-q2)^6mIXGlu92I_fJr)62=!|=XIhBk}AEs z2q2ShFo+Ypk?$O~khA!O@D!0a*uh%(1%WTsP>v_1zGWjW<76`sK9^KOq0FKrKWG$8 zBqhbs;cHQeN&19|nW`=0T;aID zPeC3_;&hl}YYM^f?jm_hAbP|!NwBdb=!hahxxq^yF0|XyrO(eU>j}d0R!Yo4^+aY! zbGv~eqAf3-ChNdGq{cusq?sao_`FGazvgX(-@g!T!+;4_Lf`cH;8@Ab?$WEm<`Lo7 zf~%#TImX`F&{Lf8e6^v)r-`!dEpn-A#2h5h-Q*@)@vQkw_yzh|)m5mk7|wl;l`p3z zO)jI+Dr&-4gKjzx7QcD-j=m?1&1e5kh(ydiJXcRbO|p^!VNtXw!cahw@1I~o^wW9r z)R}00_&^}xLg(>gA(iFX0z6-VCG-mex6EG|9MLKxsh`R!atN3>EW?~;`U>GX+8;dR zQnhhqC*R>dDlL1IkYJT7k7xNqW>`7GokAgYg61u=(HH zc1WE*eabgtk^Jufsh`sJyo3vQ;4fn56`EM)kQWaUt3~+3OKPm!Uz%1QXs#IL{R0Bf zI5{U7&TeDVo|#0OR1NX~-Vyv(g*noBcpTKoST+KJVEMSo9D%}V$z@_Og<)X83lX#4 zxE~6YDbd3BO~|Spe0>8$vs~YtNNj!tOrsG=^ zVeU`b4WFVoW-%qRGROmxM-{>e$eHNf{W~dr0dV-%EQV3Wjd~OwANKA zn=HA3VY$q}J%meb6BbqkJ$iF|tCF+{ow4vNJzWg6R-eb8);*H6! z!pyQCl3hJWC;@lS{E)MWRM8b4%`BF@m$L6LG?>}K4@|ebb2rZLrub4bG1bD?XVlUz z*3nxOGduIIAbF{SCskhuJp6AVyQ~>Q?xZ?GHkF+q7-p)CI+_{!In1D#;APl8{6_bU zkG`RY*E7PnlOgGPrBLcL7&6{oOVCZkn_d4?#|X|xu$`@&TIzU_P8(MJx3M<@%=EoPrYv%rsRVjD~sy9s;Xqt26>S)s0i2{%I5 z(i`krV32EN3304`96mqnXGkB3c1vw#sSQ6QnaTD&|Y(DdTDy}Rpjot@I@uxp!rZAkeJkq++%;JIHcZPmFT29Hl{<$(V>Tm!JM~>r zlF=NzT7zW-t%=E_=4nj!7rOF~Ak;q~XTk@r3GNq8>`-LQ_j>cTULp8L(h@eRMS|BV z2}-dHaI-~n!J?etaMLE(39|BAKtkgptHmrzSmmGj3BC3-^|guy_n$tHk(%z}%%SV; zCQSh2KDC}?!9wuCDS@l4(=LqA)wo)$%Jei;W-65coO`j-@I z6S9_MxK>F6*o$)Nb16b)nvQ^o=`H28Z~`3(9Icr2MM2QAvVyJI?L|S`SD37hNh3>Y z9cX8HwnB6cw<)`hqGuz&85@PMQVj{SQ_9M-XCI1$d$piII9NJ&FKg!8cN1tiEE9zJrsad@>UoM_BM0eeJj z#tu`Yd6vczDR%gs?gzf+XD?{qaUvuIqSutrckQNKSQ@*2@2;=Xf_77~2*aYR#X2k- zE)fEhlvKV8XGpj-tkgTSI<3fp2lE#wftKm-Ge1ZS$h_wa9ORa8Y_qZnoi1JQ*FO0` z?BJ+^AAr0eR(VS-CRrP6Hx#iO+McRc&kg`Y-gUzIM^eaoZ z5E(F8%d#PZ6mfI;R$Cft>V_Go)&19Pae3FRM(@@vnMKtEp zB0UKK5Ah2@sEU)sYZaGx2`^1;q!N?^haBegBV+=7Ca|B?EB0|C-!6r_nnAKSgUN*| zH4bOOM16#A`T6+ztg?+xa(|#O#PZ(cs;3SJ=UuC_@<}2@$b8tbrI`^tOq{*o)RP^C zfcrmlaKCj;k6cmE5Sf$NB?_a04kcupjV!O<7efu-WDPKbCZsiNW1PffJ$etbW7@tA zRmwW?P*d4d=yItGH5XD8Do_HRq|CNNwg>_5nfrL&$j63Pza54Fj+#co#c_+-!PkKW zJ#-a(kr7pg+_)EB0m>?>X82lBpL^L`!0zPe87wTxEsuC%2H`}bOrTG^(5}GJitajerm9sVd*(wf?CNxlvATL(_zgwzi{QAPtc-+KH)`#gBOzn) zeiUn}yIAbCm z40}z2UQuz9rJ}eQp&~vRQ&@a!_)TtW3(FEOM3ByIEv_Cg*&%Z^Ec^ zFU06>NwTpIicDJf`~wLJ+4h8aj_e*kTR6NdY1Thlu^oa*9per{$)fhFo+0QeC&Chh z)^E)e7wc!8u|L?~M;|k%jKFJ(FfL-j8u$=gEHrb!Ma?^it z|A0dXRih1~H<1>Azz;DIOL__!&Obc1@B~5REmLB=dxJhc>gZ#y4s5VQxbUuxt%!;E zC3^g@Jph0{2H6XWLo&%DVX99lH`?(m8^V&kq~w2ryXvFezT;8>?3_8v&wZV zr=vw&N3W4piX^NgKHz(7cC@iXosEZFMD9?-IIVtW2uRAxA$Tpkrd+DvOW0?()l5g?>MIcb$wIGux0Da~+ z2v$~p)Y0L~U;eVDJtL7LaSUt|ZQu>kg^x;ko`5^v1|8$u;b`FEg?Jh@-e(UYL5uy9 zDIC+F%!gn3=r4Sy&6697z=DWcy<%5O`lrL+`{*yb-xY5Gv8==g!ScSxmc5k|HX6jf zVXBr`GA<0^e74Yz^s-s+WlkV~mHz!nMB&(#QhM{UPYZ4CU=$`E^SFsRm>kmmw)X7E z0+(=LP8lnyUFBNrtV@=wToOGqsYhZyK>QP*#%U`Penud@T7}ebbbpejvC8HbDi(1q z<(G3Yhp+q5haV_kK)0-kxIY?~zzR+HY&_qI7Z z($5>&{rv=$!8e|4x`h>`_gQ0A$!=rRRj8}LYqb zIH5(AyYsTqVJ|zbuAr~#Sv%M{UM?y~ByGlSA|{5(<35d7bCmF=tPIi)`!~UVhYrSw zQ4pX1m%Vpwj{G{#JohW?n|--4rdkr4BB^fPL`tSC+`1bi(0~CnDGr;I3qS!VHh_Yv zP>n`Y?bxyG7oOOeUB8-{*f)?&S(c@de34{bENP<~{$Bfg&c8CBtX|)Cyb%+zu{}c> zRrSA|lP6D}ygZrt%om^W3#t0X?uzTs@Mf?a!{YUq$EI#-cH&Gqa+JjcW~i@&1sw6R znhxEnLRX~@Zbt0RIA&-EUG$Is$)GAxy%EmCSTD3#{O$3HxNHBc&%H^PAK6^2rSTg;?bIcX+*j2BQ{J1voq3QpvtR2S#;O?rb8anGHljjed>6ipE8+uu#y z^>!ZiNLth2jDGU*sr6ebyrOi5S_Ph5nFP;+UaaCx2oO8SPH;i^SGHvHF#4OY&&+%B zaYOPLi5hi{GXkvIVOVNrKN5N<>xM1nN4D&45-x+O1YR#5qaLBr5_w@f$nF*m^B9P# zEC1GG*uyf2=aGb8x7Bv@C&ua_(bjw$0FtEo#~%lSJo_25EY*}igT$j*%j(FzqcZFM z?bsL)RPo7iLHvCFx#!~PtEEaT2Vvf?*z$}!1G9W|-y@MJZ=34IU{rONMTS{j9orMC z>81PxfsBX@#zS*cWf(tsp}AE|UEGHNg$hixgAJjM`VrFK$`uO-E&tmDP(LaAy_%@_ zQBVpj-HbA<4HnedEdrJ@bf8mfRC<>!punq{NtuCa8(={)tqa0o8Cz?n&kxqI4wHC- zyAGK{j!NZ!m93T*TllyC0F8!&A%?u3d?;YBYq3p68A-W$Gbne9IpL>@*UM^F@46Bh z+(Y)n)j-z-!SF}b?ad>BjpRYEFUH?Q8jI1&L2AE@K)FRQ%_wTrI#ZM=o`T>`{$cq( zx%_ebLahBqL0T+gQ(ig>HTl>RL7I(Ca}}$sh=RZ1awt)B#|7e*G#AYGeDm?Q<2_U$ zG0E^twPQNSf+J9=SHWH!F};kY49|-S|6eYD5Dx@@8rQRROca!?T9^PU_80LQ;*yjw zNTSFxx%P5AL}}EnK0s3rA0&3eNqmL~*sHZ91Q4$QlOf?`Mj;xGHOc>+-r{5o(% zJk#-JTo}vgh{D&@r^8{99vp>&Yu8PNa<6&_!Wg({2wGB8auJ()L}+{Wpdrz z*7DkI^T`o71M?>bu@8&|pe5yqMsM?0$9^7qQ#C~kw^`aAw`-jM!m`S|58BXLBBPRz zu3j;vX)fdn&?vBTGiPW;_uVhW*g2i92g@zjRF#clSG|p{5buY1SNu|3nMXMs&ct{W zuI!(d$tyPRDlty2;!J(rVco|rZGBo!nw zUt&XlKKr}eUNVmf5a8A7DxM&H3WW=0ax_155 z_?xx-tN5GmUk+m3NSDbo&u3CJp5tL|NJVtYaOlGU6=It3w5JK$U5Ry`3J2%?elc{2 zwBM&_ftL{DV?V8;rt10T2IDRJeG0g$S|!10Qj9GU6!#*coaW&Jd9h>GxTJFb;aA%9 zD8Q2sOA2p`U%(ncqDP;L4MAjbuS^G(u9o{mswTdG4Ym<9TJ2@hGPs*~);^1zdMTMt zYa8(kD#h@Aj9S4>Qy92-jaf0$lxfZIGxd~=S&a(}%*jz4pkI6HUySsFtIr$G&hgiJ z05&(IcMj!8>BPpN9^_fS6_k{Gz$ z_D8(>?xLXDf+pEfKRa*(3L6Aw@8AZ3Gc+M5>=DWQZm=2&1_p0pUP@DK502j#}Z48B+_1sa7r zrW9U2vvTi7O@iE9GV^if&ZHrskj5Al0>-HJ$F*%bBzmp1$*TJg)g=Qt$J^slD>v}r zJ84RTO1m0N!7l8(;s({XwOwKZN`>%^(A6NI`|< zgF=ggBK>bnfgFAvwP=Ix=!|_ti~}F!1XPzCXHF~^6+Az0@?v_Tj>d$Vng5bM5rRv- z89nqb`R`Zp9L~_Zva1j2rwZz|Q)UHxVJAV1-1v&6DLsb|DN5}_3>ga6T$O5kQD*V@ zE1-~*v9Nr>X*V`vWZ=`&wz-k{Rce%+@f?-5FyE;AQXax6Dm{M?0|mw9cOIHJdk6yi zgBp5L?z0TtpinUlU$3MUlL$?L$iIID8h99|lN@DJc+*p7j~aJV4Duj7F4Q!B;me*$ zk0?s{jk0$^p4B{!2em7eD$>e^h+Wf?YFh*$nDCjaFPh7gz*QIvt-%qeRkvv!^+^o! z+=Nv3%x_Q^!k4zr=<$NKMim#Jtye()ZwW*n7wfk%KGMV%tkhDV23R&9pUQz5gi|;xuauZq^Ej zt2G^^#HRc_L}!o`88KrFD;be$NFK}S8PiK)h9@#5^lqQr9C@t#;PFYxK)~Kp*_wP4 zSQ77z%Zo=>zZ6v4khj>`t2avm{b~(?bbf4~bWgtZxFq#roMxCCVVn1jwSkRRKCOkewONl6pw`KQ6kQd zSR7<#JE$PHhCzI%lXcJPoqGDj0yke|3FecHl?4ul<5G)8l&oAs=6KNPx#wJLi1|o9 znX)jPQA@ZMu-&{om~yxh2gOk_EuHAOQEDnEx{%Tr;v?u{4us{JY(xF-6nMK4?)Kmfi0?rm*)gJzon4 z#VC~$lEpU%CvaU*MJ3Q!uvmCiaErgOC=0jXQNe9!QnM&{UoznGo6tb<&AhL>) zq%^;N?Ru>H)%Sz6%bQ!Cp-KgKV0cvxTqkx}97miJsEsKOZv*o%9a^2cA`Rz#HVG@6 zu1{P{*Zj6Ss(=<#fAXEjKa8;+C6}C|1ew!8thC5{Tz4~TI^O*_ojOm;CN{E_JSLOZSg)>%F;ZBCn<#lNEgV!}x{fH@Pa5jqvVit~QSb4f?e>2^;nn%Noyfbtp^3$3jrZ zA*g~1TT2OYg1K?pE|R$cD_wSJQdWA;UAJ-3$*+}DyAyvMl`7QJ5NLIVsi;Mg2i_Un zD{^nzrFxL(Gvy~t0RpxdN~y&l@$vF9M_)l29a>R2($VvJ#gv!ReRx#%2(lO!3(5%X zkOw`7n9I&mnTc*NMM6w$5^#DM_D8TUpY)atIcY!EzF)J0n zPUeDDzVrC+EVmH7WYEO$XEMC6JnNm0(L=$Za++LVBR}+G_V92R>W|u60pfV+p>eP$ zg&s>(P#`zu+}1TgMWBd4@ZHkaOAgGs1;~{vcRfA|;s}RU_r1(4sAUp?MawKfbg=L8 z-=7vQfJ~TD-<;wJHR}-zZl=^Y8^+{5J;O2%8cG!_yL=tARr*>;b8y8=VS^F3=Dkj- zwxXuv+aw;SC~J8O9%|o8HYCShUX6wQ6Z|mWj=Otu>)`V-!P-HOf$c}Av^|k^f6-Fe zOX_4Te*L?XvIE%?1thE$MFRLaoeQ;O&=3vpob^Gnrp(5N9nt~wgh%U6*2M+FZivhD zsnm37*hKWeT55=Ld+E!%F(|QZ$uDx!Y-CH_bn(t5N+co)ZKp-Feo(yI(gkYuequ^S zK?>KGst?+#vjOlbeAhc+HDk7039)M370X`6w5`>YZgDu8r>SdGNKf9b@-#w@?&i_n zye?!In0W8@v*SJ9z!(v3iUNAwoFpJ_a8jHODl+Htcwk%pCf;~sJva^wFlJN@c<=$R zX1oc*D+O6^eEz>o17B0K=D*wjiQ!#deD2KQ!1*53E&Nq{J=iVMei!NgJ?|ro($Z`e zKXuf{iA(TgBOJ}$hb<}3^u_(z{9XP=dW1YACJH!;^Rlrwq>BVSp!BfNZ_KSl@a-Q? z3P6--9VOw(1+g{Pff4vFR{!$il~>|#U@N)To_UmhxVarw3_@PS9Pz^x7#Ql|7yN=| zw-HH@*^uU=m5-MY!-o~a;|I!ms0tNysSczVR@hA23y@o3l zg#^lsrlUSZu(gzrz@clF*yW%|Av@D%3&GCn`0iHOQD8>;`_ydBJ;Xll&~L@=PI72CYkGps;B8l)sU4q>dI*uYIw z={e%(6QfCEV|nYAc@GkI{O-OPSw`g^ON*vDG$lBy8%$ttJy5g22_eCHT62)2)-K+ zUghO2Itf~=U0!_VO|K+2c`e-wf+7}>JzVOJ!%`sd9MrqKxI+4ri8?u!El|*GbX+Xr z0nXq^@#P%h@N%(LhTFJEJ%tmjfMIbjLDd%7@F4`5Q%fR%GbjS^8244tA>oZ93iJlC zsPIrV`!z@vKlP}XQ(zXW4ccm{e18D;j7?Fdp&pSnqepUJw(iu@hKY?KFi2p8iX|3C zQ)WP(+M=$Pf1X^=SQYs0JANQVwe~Z_FtOv~!BOQN;8WfZf~V{ch+M@Jv9HQ7=JCiC zxg&T*QxD3q&PnXC?M-|X6U_x_$f^fD`C`A=ZYQZh(6{c)bd|qxNCeFD>TPp?`z32L zM3=dOaTeoeJ#WR|bNut_Y|!0=NSb#JNEaBPDmwVhQnAHFSD*S?wl!j7eu8rQwVJ#(xTlZI2y zuh{3zLB_`WB@yCp_%2(Bu%|}n)$hbPC9o(+<5&SN$-h8wbxrzDd#b6kf0sQ>VJwq_f%CFL5|L zdi0@Lp{rIeT`e4K5T0qH-AS^>a?&RCr}1tgDf`((X4@^d;eV~T5cwqiNwY@<)%q6+eNln5}=HH2q21GJ?avh<-C?@O4wMH%Zc^3W=*J zR)6MBK#2u2w2ZoiKAX}4&$>e>GCwRia1oG0R%%VEIsiOe%Z?LI^^Eq1WILL8C<@JV z@i)>Y(Vq*!QOb~3G49KY>n~mN+Ti+5xr*D&r?c3(S9uO<)+j#*FV>|V5@&Y0=a;?E(%8)R!9OOJIux9+z zZ7~yJ=E!C0Bd>cy`u z$8nL=e&OHZCx+nVB-^%+0$Y?%yvR=YUmx}VhpD{#=b-EV|L_0brhwV0Q|Y~1)l9?+ z6*gki?(_T*dO3x6OP0K!x)}g~?~;S`XMXru?+{0%85Hhrb#`MbjLuJr7jXwK6fiQ! zONqw^pMUtd5B(U>3ph_I8f|al0|qw?y0iz;I!33OML={EvsHDZNja&J@u;Xfj&Q4E z;s^ox6e>(bJ!}e49Y5{eR#+v(3js|UsT5Z|2tbbQm^`!1fWzlsh;J`1u9c(vg!*MD|*TnyU-H5^X($VNX? zKv3Kh+)GSu!wf?Llq~HS8TkL1xGb0xrC!D=E=^0fnlZK~skA(|g`<4qAFT5SI zm%`+1_$Mk6^_%DYqaazqu+Ys9!N0D@D&m#7cYSJOc< z0iO|+M|atSql*!^%AEbyF+FwR#o}|^#@$CvYu@K)V1gp3Vh;1eQj}Ft@YpXmmpo~# z-fqS{DA9kyzxeSYD%3|@I!%5V>Hh^y6Wkm-y*P`se!6i zowrD*JS4GaD<^CtAf#o-Tuu|AhYX8%;lj!x?No2#3c|*lqs8MLd%0Pn7t}VomuInh zHuK05AiCG@D}Be5ryc9QBPVpJi`}3OHU&R|9aAG?8~r_7S~S~|hN~W6z8k;w&b^q| z^6e#iA-ZBDSgu7KR%@`J%vQ1}S>{kNzF)s%3MdrAUJeEzDWX^4pHjFHlAfhXe~fZ8 zOZ6wN#kUkx?@E(-z4v{ANeu6enABOn^sb}wDd&UVh?vKIk4s5ciCKJ9pfk8Q zd8CudbE5(LXL5IsFXxZ5(Y3&$ipqmm2Dn8K}`)6xbVYRL?Fafx!5uq0ZONj zX7JHba>wR|$G%#rfHGncLS~NN(=|Nl6>q(=DVs0KBiwDKy9ML>$z!lGCZ03&r<|p;n7{y&p&_*9|oZxy?NJ) z6lmbCZHzA_PBu9HH&gh=bN|dg-=<=b$d_VR|BrL9XEs!N=DhJsYH3#fxV)+v#PYg~M#I%HiV&Lryzjg&@fVdGbe> zpZL^)eh{9DQZp0)kWX(Ygp^;o*w2%sWxGna@o`ouxHH`Ia7OOryOYV*6e*1>nyMckS{3YIJn|V!oCWjDRau49OSz4z z9BfCB8vtZI#}KNhJn=~GBR(9H98oqn-i#){d^3I!xI2Ep-Dq38L4C_^lFbx*ZZ#c; z@2xNSg=lc;nLOt_ah!s!IJE~od=<YPsz30QJP?k7X05|=V;|Y59a!v zPScL3tuA;}pgxMgk}(}mpf37xu{QZn*vIe}4f8Z5JT zPofl^9|(wr-jiiG?+!<~MkID+UKfWwt%_58DH+_EXB?k1)E#%-g`j@S|I=BpqUmL* zjXq*I9r66`lqj5U%wXA z-$Ust{I!e?8RDoE6cC24c%wIZ1?51%{IHbEKn@GDEFlnVf!Adgu5be+04^&JozzC5 zr(#|>n3Cw5G?M6Nd=^FGZ_UsINAV2Jg4)*c_tY6d=G^*7yKfbl+NbdYOW@5ge4tE~ z7&d4nBviteTr%#>H-mbr>SXbG@%ZPSlB%EM7x9W?96VLA^2rb4IYZ?G#Cr)y(~~XT z8C}>tuEba2h#9T0?#33|Z0r=DBsZ#ry#8vMOn|riCXn6kKtOKhpO_de95l1F;CEum zsnikRP+&SeZPE`@V8ExR2R=Akm0IQ9!~k2jr(&V?TD^|s00>|BUQU6dZ9|Eg2bCQh z{6cv0bq|~spGmtrL3$)U?ew6aC&+b`;Tn&@LjJDDw?P%wc&Lh~;r7gpCs%6RMG=&u zLX457wa3=9##ue(H#g0}1lQmTZ5F>6xKH`CGT19jv^fRbo2^=9bV8%SAVhJFQZ1Xm z6HBdfa!fTo!ef+kD0@Mr!Kkhfdmr5-<_AixMAurKPuAItE^jr{^OHF?zdotTy0RrP z?hXsWqZyH-u7~C;|IMoEP8=Kd4M#PsSS`w(x@{A?f+ZiC2|E9*T2kF#lP?L|mY~TmS$hLU!jm=n|1^CM|#+aVz z9mR={@)Oq%PYx1c%YmXk#04h1drpb(iu09KpCng1Y%iqV6Es4hg9W{kEuE!!#?g08 zh0HLEF$WghQ{6gKF{tuDHA6kbOt$pkXyEKs4Lv`ZA$4VQV=KSwQXE&V#+T>zO)=Yg zze<|7bZ+lpsU2jUowDsc*-h*^GZ6`29s;#Ms0v=39;{*t;G+Tu8rAOVz(0x;k#^R8 zI=Z9jqMeI0F`)&8n02K!7_o8l$h_cD?9;__FeBKwT zT0n^_M3$L2YgKt+Xol#C?6qq7{)}}>5Z0e}rBbC+ZGOY=rljhDO;@RFBr<}HBeZ2Q z;@hJpM%kyFfGxUE)=f+k1Q`t~jB=|>YcOkWRuQR|7CD^%fFtUU{oWnaI8?@XEH04Z zbJ@$V47}`17M9{~5hY~pON=bGpHK_;UvM&3RcnuM+@+?joq>{ft%2V?vQy;b`WPHZVDRT0O z2ek)E)AFrDM8RDq0nNL}Q!uBK=s572(7xoj@ItF>G$1*awfXSK(LEk>QXSi+SB6Ow?nhYJ*}c02-OH%itZROk!WYt4gqKtY;D zH&`gsDA0NZ)zj_DF2yLOc6ss4vmp$ZZ*TcUa&_lsxckA?7vm}_f_i#8%I7FYWGkVA zf?@hV0BIaCZx*fYN_^Oe&f;Nj-UqgCj8Cl?;%^EVXRC(~6M2B1^Llxm+M?7H%P7ti z!(ZCih}F1#!%tq`@-vzz(@c%F^-kRKQ9=obHhnd|U0;p|^-gLn-lYP>MzA9glfA(W zzh*KX_DI_)b%|PHSh7dg069+{d;w6=D}}=0xD~?2zbuardj+GoQ>(DfksJZ1#=4Mo zQFYS&Ug@nVzLB0&c91Gw&D^li%K{Oo!9wqqt6OE=dO&hg-+%32VRc+y#JH7cP{aTS zZ3!d|K$FKBeX-=ItanAOy>+$;R>zSytqhFgGk)(HF2x(kTDHcGbIK7gnIv9ekQ3$5 zULnud>)kvw>DXvGb_3Bl^J%t`z!s?Ydu5kQRpfG-O-l|KxPfPc;S{Dp341-}fcw@( z8M(5D4P8=z7pRd2fZS&{Md_59h<7=U{K1YendUWe&>SORa~QD-tG^WcgX+F{&ed=u z$$Ea&SQ{{i56A-j$)sC`#+8-e;mkZ%H@$ch9gY6Xk3@T}Hr)8@?y!ySf)lIoP^;Wu zmW~jH1>yVeewtBw!U z<=er)b7d#P^s$t!1ivSWGr&RIxrMhw>=6ZtaZ~fXpx3(As4T7JUyIA!E3Bik4W21! zxj@H!c^9-vl>_Wgudud)VVEN`!h*I|F&MIpQW;;mz38`ec>b9DXWleF93f=m_IX5K z+=CUVs({6k?0_C6@27dE(XeY)3#DzTP*dx#tB<68>Zmuec)gA&n8%Pa>4R;Lh8H7J za5L6M8@s3qQWMJ#V&BaqtVRISYBWkTs-wE8`&`E=(E?WAB1M3`8Ygrhf!HS3Y}l9j zGKWPaBlAphyjVWUHpV@`hJ+@SuW(NVrSVgO=z08S@e<)aEE#aFV3m7B=&&l>78fgY zG%CCdP9T`{w_bR`Z-nlyW3{xgWTGz2ni?vRGwba>Wha#lyhoBpJa^j2Bvy;FeccZ} zn52tBAa*Y~8}(4;gy(>(@=yJVG6e}hKS>+(@?!0FFz~I1Ue*lkrDg*HrW}{;y!EMu zd%qPQiYbspi-Wyj0~XQP!T41F(EA^F@OT#l5G?BM0&9$^<0YCJc`+dc277kR$)foO zopYw*TN#`nJX{;xDFaRcI>Wnz?__bu)Qs{0r5w~F?7 zlGB0)#Sq4hc5O=3O55?u&CLBu0kxN6-a@VIW-=0MDj$CQs8y@2Jq8M1pwXFqDEb(j zOd^Yyi&ET%u}<|aU--KBwmfq(7E`p41aRUieoODfUC%w2U^==kv4P`<^IpNZX~ZRR z^Cy>o=`qo9YMd$BSSSe|<9zjiq>VFc??XbSK@YNZ-gx5+F?+n;wZ<}nwo9Zlk1;Wf z@*>HUoWL48tqiTo=U0f+biaAzU%iW54~*@5IPd3@&>Zbc&1z zHSX_M(XrZPBl}ix-|+!r+MA5hFeyD52+p0Q=s^CJC<}dc&-}h!*uTWqE-xMqUEaFG zbq~@WZv9_^-BB*nXAIZXBp@nE&)^#sk+6&F#ozwCN!V%!#*A7K2hcRPkC(@zVI4eUd%eOVfCu6Zc$j;8s6_Pv~f6*Y6pO zc-$}N?j8&ivsl7)l^$;;_wAs=nsotY492OSjr%Uz$nji?>rqs^owO$XCM8Dtr{&QH z%)40IVBeGX!B*0o)+9n#DmA=>jTXtjpkRuuN;IG94|Lv zrT`qRcu0aCQ1`%l@R#*`e)W0`?3HW9{|~1rsX7RPyMPu(lqB&NpmCIY5gP>gqIFHf ziN(io3a&v4;PK7Pjg!8WVvaRy1j&_z425k$Y@zh7JVQ?YmS1;N~^T>+FU1LU+QG`FtWxU`gQWB~eLN*R~=IZl)8oXOefdN$YHtA}r8vclar8pCPD2=yTz^N8& z`Q#5SKM7`{{ghW(b*}JA$92aPMfL`gy^FX7X6Ti^$L=i?F*s8edJ5ND#_9ACM{#?r zFJE?*dU&OCMt-)^e+^;Md?D7SA6?BtLdQc}C<4mn`P-a-o*s>b6|0D;(`(%F7`1Sl z06`81R5b=g+SmAEOmC?URiT|JyVDvD=hS<9#jZlh85ZfraLAZ3fjdJ?O0^}X>gCM% z6Qpe5Ff-A|_o{OC!^9|8O=WDVmI``7xo?nf%AZ-{^%eSFC+6@UGr;0w@b+d*S zN`=xf5y}2cCY!KtF-)cMZF?cqh#T=5FQ*#V;C=>e21msWa54$9&mDGQ>}2y9+d9^i zRO{Gc;CY#N>nyjm*EtI!%;&p zqaq%FRU{QTfJtSP*NXSW#=pGy+Q;#>4LJkihmUY91(VDZfSM^WM8%j?JXAqz*0iEa zJw|`bi0zF66p!UpC@Kow>55QU8s`>~>+JHU-T%fYR7nnWRE!p4-ba^)GoMk(Y;Y1@U0l5PL;jllN)muoPl@p@04dvvA8!VKWx)x z+k85w=w)jLd#tn8%3&i+!?mf>Cz}$ULq&ZnoY2x?R$LiPdGKui`TVPM`?bd_R zh$?;rx#vrFa=g&H2w^q6&6ACz%(-(G``-YCITYZj1A!daVE#7EWQhkerLlNB|l;Vo16FjCRO)NIpd^)~9 z`0&NJUS=d7Oiz{P;bhCh4DT~i&Od7}vaBH;PQ1JTS^g$w+*Frxh?3GM zf>CBx^}UI+RlX@!L;)t$!GvS3-SUX;4M?i5o;(4L;V9qR`a~tX=;}8-_py4b#-Qb$ zn3P4?>L-?v`oFPQlNxxZtY0!Dd28t(c=BhLrTiU!KDpRq9UkQX77!rtmFL28(SSEY zj15d`X@Py+w95=64!3jg=s%<-#vtLFTJIiDK{=ygbyGmO97PX6Br}azU1VP{66ljJ zl)tkVLX;cDL+P$|mlyB99~Yy;7EB$EDV(0H*C;1JMG+G~bShwGssXK~*4v_S->*(C z^g+&dfftPoY(8U)Y@GbMV7iUTN%BY(N{u z03Y4WE(1OO|sQU_>dMcD$37VO5(`wNf$h$O<^Q(1&7 z{em8w8fY~tDd06MXpWiIV$9H<8p0(d-C7LV#kZWuF$?4?L-pDrWXd5Xi-%`L7?L0V6&yP%?I~#zfsI$SQ@k<-*>n1F8luFtZ}xkp5^hd z%`E>fOtm{{5O5fknsHQwV4HXyyEIs95SJHkyiq1`QhbCvLkw(_2nb{9OH>WQcNt?` zL7%rUU6-;LOmMqj?m>S@3L=~jTO;0BO3gyZk+}2G#|iqw6ii8V;0hL(7w@gziOXpP z<^@Lv;p}eizZl~Vb&32cLg1(n@f(PhGOkw2tpcP3%pgi{_E;4tqwY%FKzT~i^uyGi zIy?W;RNTbk@TY^`Zhq>{gcuWX#R453=E;S4@FBXg;iaSLiRXv=gB|7(PhCTC^}#uz zN&ng!iB)gt4S*9d5INA|Z+Lade}9zU-+k`xbMZL*LrRi3&9Wfg4I0IjRY*9$qh4-^yOz`GhO|hX@@F! zI4rjPF0kexobo11xK~I{FE(|CTl~R~+8(I1bZm{zvQHaJwNJ2UaGi9z zsxVKPQ{|d8O4e3fSny3G5yosEVtRMTq z6^~z#kaz;37f9D2gKV%9@t7j|LGBcYCh@rBX_AdqJj-vpll+a+Np4iG41L64+?Q-5{_O`}-`~+oVZRot)!lnV4=#5w_Q^Jp^-)BY+`kVkP{@{jjs9&v*iM_@|qmegw(+7 zqcIforE;9bAbeS0M2GO%hW!vX3U)+9KNw`;9a9H>KXeqkQyU*P3Z+X&A?@mcQ~|a@k&vvx zz5MtN%u>V?$vSsqo-CiKehXh1l_b-cASK45C7Jd(yF99pME}eZLbh!ydmYaOoDqNa zwzHJYT<6_frT$t6}yP1W8d# zrg20H%M#10OeJ13(Dy%h;8Zlz+{4!1RZtcd$iO^ zJw3>!Z84o+#0HWFk*zH~yExsZeq?I`FcyBrSvMb905b@Sg(kD%o3wJVwpuvM3)v34 z>bWH#p99y)T8axSRnig@fsPA=k^0)(7{=8UC(pEfwJPS zv*0-)*qQj`-e_2sok(oI8W^q3l46g3I|gUXsZdj#$_H`?0f-4IPz*4SVF3Go9t4;q zdJoZz*CVb+pH8;Zz8Iy*XtiEyFArK6bOpN6K}_f=qTS0(wV@ybwMybWxHl8TMLmJ| z8j~Wov$|TP$fZ9P%NI|@!{f891N8U_ufAa!K)!n(>P+V;*J!I#t3x`TyL?=rit8AO z=3nGltBLQ$;%Qh0{>(BkDt}(>KN#$!;{xYQsklkCgEX@_p4OMmdlG#N$%Y`>*G?(8 zLISE#x)y<#7k9tx88eEKFO9>NqhG=DSOc_);DPz2xDDlaMxjLj&eUlWxTLVfyEtTn z-fXj<#A8~LcyZ5goJ~^SnaiZe(O`09Va5=NOt!PkUn)_ng*~m&wh+f|5zCvkM+D~a zejySMg*Q27W0)kGJ|C+rM#w1Yj%yhTKdbyg@3q@18yB;iYYK0ClE!zu?=!inztf^> zg;bJA-W=MS1E&MG+&7Mz%4HoWFy6AsDSna^eX$(`zlRIjUW-+0b zh|Gn?`uV5+1yjszW&Z18vA1BQuV>7bhN8vxAbns=047L^tTdVxJ%(a=F)q%*^r%gG zX{j%+#=V>&)&EIptO*h3G`T_7lzLJ^NkDg#w3T?a-XpznmgZohNklJpf`+=!>1a&< za>^V)eh*T1lIizD^8(JWqjE&x(no9pt@4R05OC-E4IvM$tb?CAAY- zF~y&Q$xf0%nUuIF9)eXM35Bu*Tlq6K4IUc7Xz_P<(Xk^Gy=-J1{;WD;k1iLjb5 z#p80g0HXNvsR8N1Ao}A&q^L=6a;@PEN}V2s3c?qdop_nu;4dN!#-KtF`K=T z=GY3)Fs5?qoD50Yb!&G&PsJ^I{T z6PkxklVQrhsQPgT62TOOi|vOanSws4!BzloCWo(wf8d|>4pdKp|IT@qW`dW0>{s0C zmE1~Z(__oTR4+ey`~xq`I-b<>iL9QuP7a5#Xf`QUFztdqN&x1g&&Q3Dg9Xg=hnL@t zq2MN}0b@c_T9J4Kxa+N0z0ZB_s=vOKUqi@3XsQ)qf|L^n$fh#3V;~BoRQ+u0fP{y# zct|>`fFaTSs?E}i056v|U~GlW3gMl;$V=_se+P1XDMrj*{SHi*w!? zKY)ie&GD;Yvzv49GmQ%Pd~p2r(y)Yj4DR}&Vu!u2TX_SZ-ET=D@gBVRPN?|c>X(vf zKqR!1yz&~ABtZG}2xzEi1AE#JCZg8mss~->Q^%nqeGDC?PJM4ITX#lmNV@?rlZHh_ zrxtEGNI?{$Sj507Lv>Vy-pZiDt|!$bXi-}PRIGnkpd4ks4VqGIKr%T|Z?7E5ZJZB&NlQ|ys3u$%=|`iEtO-L}PzLKn)<_DSagVt(fi!>{hl^P_znc^1gTYUM!Gx1<`CGvwoD!R_; zzg>zSI-RJ*IO$L=EXueFzOzvVQOKW&2~uh&_8dC&Czn5r7b7EzS=7MqF(ZMZ5?-RP zzFm5lJt>u_;m*cNanD4GIxMigig2WQ*h)9NVlGygu`nf7xMRqr%0I?>O}WIaWYHO3 z*J7%)V}Y!#avUrUN~UWI4R;0q{Tv-z9LLpcEZ-*j0Y*!D@gncf`>n74i$!Bs41 z&M&c#k!eXsY z&k_@*t`tRGqHRQ~9u4%8)TeMrdbx0rALWt0AP-+*xD^_`#(*0rw?yPr?}0^%aSVnJ zOuy41@#FkulYk4d;55=iu&Fgmm{mWR}6^&I$FZ4 zavq}s7PInxAGq3#&I91sOA*mk^A}_(8nh~a6@`F-9ZJx}Q~KgnKM2PG2xNbr zYni!zHC9C1IwrzgEgINdlCdBvky14g!XEyK%6_R+(nn^lFnzn@7 z&RhjlwWlnAE*uDi0)b=J zKneUTs(_vo4?ZZW3Q(#+mdraNp~Lb9SR$H>$l^8mNIpB4DxnKNYA(36bFEreI5UrM zipCVJt!-|3{i)De_Sph~O2S%&R!i^Li`rD{77HoL$%v5RNo?0k~26xY!+6#2&OIzj}-!Of&UDXPs`bk8wNxrK|V^}cwj?y%T1>p{S ziWo764Upug1F6?^jmL%u2E+J zbL$*B2M7}jTgnZ|jEX_O^y;;^7$cOB1It~tHPy_Ku9beluK8q)^z!1h*ZlW#sy3rZ zE3Xlc1}xrs*UWSW*tH`Fw#gS@0RC^6-|=!FW%`F-{8zeeE-(B*oOU%LJU1Gc1IGaH zI_H{+x-2(K2@c(!TU`p4;9wt)O5)!h2|mRmh3D`rtsLT_-aFK9fYpm_tCT%Ka3Qsa zx}tJ6CL%D}9l9fpix#_NT?i$F<31L4wO4e*{_#VqwtLXj(Rq7)sJWIW4L}F% zd(~CPlpM?#H`kMQrd%7!aYfF0w5YbgOlE>mZ}rbGse^e{EOmKeQrCk*cj+~`(_w4I6?g5^Ejws5w@?;xo+UQrM^ZXH7be*HzvUJc zup>m+on@aEIA4n8gG4VX25B>J(x8BXa24&R`~W^-`gbn%nFzFkS!%LZLP2Vj)GMF& z5o%UD*!6aX$fhd(2na?3MxMq)j%l;U!r^2)=tZ%xq#dHSi2oM;F?5XzDBlkHb$UQC zOED&PO$8!fc5~!ISjp@H6VaBaWx$F zNv8N9q>GIVx+6C~3wV%#|U+-moKsK;*Y6hc8zHHFXP}&MPZO^1A2O{}T{tIqmno2B- zSrNFXVnRw#5meIy%R~~NRPGq%v)f*vcHn%q7GLCN*22M;l4i*%i7*H2?CuAL85V<6Nh7-+vsuuP^8?*>) zg>qy_AQTOmS~b)?sXFH#iDiz6FD+~ZgK+GAF9xaF)oMc!zu^tYdC^g1CBEnUf4L6K zvtKDszPxzuss}uW8Bp@RTWsg_e-5trM~b_4QJ8owUYH`wfcdGQYW(=*kN*DOS#qzo zt_}bTLR#+@dmif4+Q95FzVW09?#4rR;6OiA;;aD+VpbHrmFF+s@$7T4@Z>jH;v+#ysjNx#WT z>lYIbqUBB8rHq4|#xn`%SQv7A_?n69{lLmfoB5j{6ug??H#RPiks!)kDVQ*i=37xv z&GpcblJjMt~&q$h=QB7Ey{o(_hqBNaxj#;D5oR$l{-H%|S0{#8g<6rpY zL=S7*z%^DpsI@&QMj7rw3X0louyw1b6ljgJ8G=(yHmb4FaPe z{Zvzw)re`x#xmBWRir6(vH?Q=B;HDbos%e@dFCtsB5~x<@IUsKCyDyQ`5&+2EUhWj zue}tTf{nF(|yP8zX*jGhigYqw#NJtSG39%I58{)O7fcs@tH+lt9OO3bw z;qXM8Fu*6xZ&m&$mK}H6%ic%}>huk)*#nqE8^?zBQ?4 z7-gAijHPHOeU-XU!sJw&;!$Rthj;~d`G^YADlH&bu!;9M*j zLwUEqZB|AEIiQMq&lwrSy}Wp5Q07c4Ar_+ANS#i_#*QgtJrX|l=B20$kiSV?Srsde z7r^Q6<39Jc?~{U~UPi^=Y>9OB(Z}&Vg;wflu-57lWtUQKFs;V43gJeTM z(?!9+;czIeCkCnF66tpb136QQ`pWykB&qlXl{adR>|XCa^1M);;%%AnC}bJNX^b6s z^H05$GNX+FYbcwzkaXU}t*$HvO|J@D;zAvQW>UmO^Rb;4vt<__8a7s<$kEP!>~$Uy&L~`Z zkZKC21j&Y8c(OWO=e6Zb6*?$`YBu(GG}!P~WaeFRd5roD2Jl{i(x@=9s%H4N zeI%l%Nj#@T%hUJJBja6M5YxsgE#kUWJa4~ydAXuhX^L^X9_Ec1REC;@07}nI036bz zQp;;ETx(8Ph&{~X1YI#C4+WT1i032^gOZb&~e0%FGQ(Y$# z?wHCY)~+UEK5HrYJSv9%HZJsgG5qVVU5holRSKNEH7u&YhUM(s;W$ItBZ?^%NPr?f zgbkh8d%ja_3#DvoR9>CyM_0GJhBM{>y<)*B&ATcYcZVcuGY88_` zV#ebFlqDF-3v}fL(-G0F-kjcJhrN8yY#kp793`arIj_AQ{M9hqe{R)Cw(B(Cz^_`w zxke6-ud)!yvqLCjkDUUJodey|QwJ57FvN3FLSb`Ka@xj-)qL?wep}Npm6}zL=T?`7 zn2RU1xf*{z`NPZa#RM-eCdZRPWp1J9jDMBuFJ5}L)EtrK^|X&~3h|4}Ke!uf2~uo0 zJSuO&VCFrhN9TV;3>DruY*~}>>U$Hy<;N_dQN^qJc2kATc^7BEu38$== zUWZ%!(x;Vy-%GYi9flmRnbtqQW@2MIDo{eTFO#loM~ejzG79W|zJ*kP*svmS!GrFz zea-SMUfwN1To2)T8GtfZykJufS9NF1!DpT&Ftl8jD_@I2oz)`A@-a`#T_kxn+qvfv zLHo$Q1I-$Y8x2MBo%ECDWJ zwa5fI$X6%?yOT$l&U*rUg(j4m&R>P=<79}r0NN-b{_TFbZWZFE7ZHRIX84!WBKXI= zAper@gGUY~$qh14?iJ_f+&*sb7$b&w(w=b?-{PjqI2K;y5J@3NIR97i1Zp%CS-LSi zuZVUe^bh00!EUdZXc7R@?1Jb-0*}j19BXs#vG(yhZh>B|5WczEwQG)kWHKjW>AL{W zOoI5L4$&fiPI^KqN^;HHJtp z(Bj-6f$L}SaGZ0AL?K%zHr@1W&oNU`s##HxI1XMmB3m6VO`$O~FG?ZNuht|ih>M=3 zPDOQ5CjAHQS0^-`%%96`1>)n9+K~*oV&zA$+ejUd&vU{Hr64+ zC^Gw$o!4~}wmP~Fa|WT>Ll&qq!L8-Zc*(7~pr9(-R9xDv z#rQ>Li=~GuH)kTLN>6#j=NZ*gu~ViVRJ*+R@|&^zN*9pJD0d#q(#w2Klq?Xd{jnD* zl?TOA;(;Ux6`{LEe@s6fvtFsuCVT52NHtksqC({-rz3BcDNkz5F>6-(&qwL8I(<+) z3-@pj1p!YV#F!BN#o$C67k(gq#B={czmBt!B_a}EO;iR=3#?r$DmdHdKh1Q|Qmf*` zd{pK-9!&;piM_HLl~EG2kO#)`k~=F~ao01CK6o}R0{z}UPr1g~bhK^b9Lx*&;_W28 z8W$&Hb_kmINS>C!m!|wS3bKk+<4vH=kUlOXiyDf1v@gqiB{lWLi(OY~$yW@hoH0o(1LFxA|~c)VN{=n?-s1dhg> z{Oye(!<&WwP7ZC>Njr~wIm1u`O@5Z%9`03qzMe&ww48LLj?a>RLV{yyhpL!2m@cTEv4_>y|gK0s5P7*a8VtqP;A~N0XfTPzfZlI#|00wcr8O z6~q*NRO`Ci9)_7${oc*a><-~7#3>)r`LfDs)gij1I1S68HX34I7Q#p{ z78QACDDDkHQIH&o7tHW+@ev?QSlhTx9ej_AH)(S~Xp62wDXQ6>Wv%8Z;MY;X7Xc1l>1_YyVtTTugd)%818ol_xe zhTL8&Za!g0%~R;##7=^8>!mQ~iqtn>*x(#_z?0`tt+Xu;AWI@t`i!WYIdDVu#|hR` z?!}{MFJQLW2E>CFWSkT_o#2Ll?Rickrx^?X)E8s$YguiLOyUENTv}tlYL>Vp&M~cn z)GZ?Kecpw&tM;!H&o7kfBT(LxRW#Krt-d38Y?@MK9$J#Q?^{;M--|)t+>C+3Gi}G1 z8fe-BP8*lvUPayYwozGCWPVJeMKt|RRd_ELtfK)Ioi>T@M>eWIR@@KTp_cY+aNVP# zB`p-$;CpU3=aI$pGBrm45kg-{=iVT}A&xQ2L?l>7m=tJA zz{+M&3Y@r*eE%>FmaC5nie(~aU@QO?n+Z6E)CR5;FU1iqjirW6d&e!^E2fJ*o#r>T zdAc*;3Tz5*evGL7V|G~$Gl28Yas%(ot;G8oO{^vaY&G*exiuA8h>xhRI`HBna$hBA z;-h>ZNByis%^`eHaPJ7eau|_u{V>_BubOindMp1ZpH`9uWxwkc5d;K;ftu@28;E|- zp@#-LDhL+FiE3yV6~;vMZZE>aL5tqxK|Ga0_k{Z62cUJa1mr&MSWt$oD>)lm2C(?> zEVQE%u6dz^a8ZVJi&q{bp4ctq>8%CJAj%-Zi}q#rC_Yg#)9|li2Kq^z^^?|(nmd$V zl|6t5)!QHOxqpoEm8>uDu}*`c9-Pn^iWm^y$Qa+`qaY3Whe0!C#vB)YLE-(;wB)JB zr{tLaa4&9ZVd$R(b%}5$TpyoVyku@;(ezNwCPP#GCWs>d@RC$wiuB)BqFUChh>D`{1%tb|@;=Qdvc<`yBNI3U) z*u&M*rm%_?ObkFjO_|=PjcL<;@!24Ihmo?CkIbrtR)`7j-181ne7v(b4fm2@_J(bJ zv4Wgan_6U1{3&=*UN^P*1dD&0Gx4Z zL%gfH*WW7Vw6eb;V@$it)WvGSe;{=>=s*poqMEV6c3SBn&s#N(VhJ72Xr0EAsZnj@3yEhU z%u6Nh7199wI60V1cbt#ZDM+RM(_ivW4HHnU_Gx$l$8Zo0KI)b8e}R)Sqb182(K!e@Lp(W(6zaCkO--l;^hnf<5w$bRT%tnq!xAM{DGtn4 zGX&~kQjJuV41;XFx4snAB!==U&t$uAIfv1Aziek=IFz@aalhUJw~NQDo%6t8CzLCn zDuUV%Bne`rm%kcQB7yLdoeZS zynhH&<6qoK{!-QkA zra?mp7)-ti)T>6S)X5;n@=B)6F^$5HYO3bZM%L;+KJwN&!ZKm4Q3Uos&2 z@5f!7xCa1i{gP>41XGEGrJ(F)2ESA>r6e5o!6o>Pl|hPY#P^a3Mk>*sw_yte&TPY@ z@qRVQ>5x&6RIHBSPO8^fG8xZ%@~y{-6QM3%(PF`vUY*wQ(yQ5hhQjSZ!*@v{ zP5TL-g%X5f5zaCyLe{y9GO*uMc+a;~=6Klo;r$x*YncLrVhrygseW46dXvrM2{gkz zm}}0E}TN ztio}D?utV~QqzpYc1I&SEgR*gi$y*8&f~v~H$VB#-+$9A0}#Epy8}Es){~CKE^dfw z@`_(UbED=%_cuqaFhs3_HtjxJyhs<1|1yY)|4rv*toQYoUXC@z=YTsUe!u}5w>0YQ zl$?vR!Rdjb4qktykW5n0U%h%Y*JHy^K@wGY4A!xDcjWE02aPVHUx<;sUQ3}TY5>Z| z1q-!q)C5=x$kyak0au1u0rM1ukR>6_Mv3c|;%KNg?VL7AgpTn_QJovV66&63-c?OTid2?!W}Q*ZxU zw&w79XgA^PDCWc}>}D7q3QA%2Jqm*tJ4mg~6QPM^k{T+B>nXxeG$)5O9)|~@tgb8- zTiOWbjkVPnEgQ8Y?BH&jU<6xNIvEbr1*!D#RQr$`t;vchdV82|s7$K%%_cig&C~`tVSeemx8>B7t1;LdCyO2y41%TeGZg>7@?@8@ zH8-jTYvSXwEBqjl5>uihVNE-x7*N;qJw`^o7Ty(~l#6cP5DaD%6#@;b$TQr-5XbQp zJLsJnt7^U)Iw`~yDAUCJh8f5qEa=R}HI%@3$}R8iAxu|O0dVW$gT)xN+yXeM!I$Nv zPvoS6GE_;T%C#Frr7(haif{exL7oyMmv3y`<)e0k)&YTS9iEeoUw?kjgc4lY|?+6am_&umf| z+!X?dMmFw0VNg_1_KI_mgr zP_|D4T_#P2P@xd`U6(pwy@q-CoXp5D3N)YUqrrEUEdu~ji-(Y9GWH*r`vC50p zJ97gcM*MK~`7HOK1a30`wRv%^nFKou%lLG{ThDqT6+Iau1f)Jm^2WnAcMjsuI~z-W z78REdQ#v#UFI0(Hs>NWcHn18Y&%z3=1!wI+cQ(_8{bnyM9GXwoB>nl8kmu3L-vbi8 z#G0^z6ie_NmA4|qNYTUb%K|~Iy&-WUdt#Jru<8Sc7hpUVJWo#sfAr#&D_+XIR`3XxRMLsBaO1CP0Q#_bt%Lq;wz6FUk{lbh$FuOC+hC3MJzv-hkjp8l7jU>f(nmi z_c*6lzii$0P2Z?`IhkmLCdykq5S{qkT3>W1mW~&p0T5?`b9_s4hO0C}Y^f*TzWkYg zo?;ptzwDuYwr%Ah4R^44yb*}xq>R73z|&d)4~S@zuS_Ot#VBts*9kAs@ZU@ki%TM@Uai*9ReuF9(ho$Gn_>}z@Pii*J|89pIfzH3Jo)&5t z;kS~kGY_{0eld9VZU^0oi4nB$JYw^jt2T$FieGQLtL7c|e){T*@fvbDo*@j#w!ajX)xaoK$~hdoj1TIK1hxpl?Wy#9JT zPFeT~fJMHg^)3HT%!#O}>Sg=m?r^577w4~9vE>1MdGXP(U>lY@kV3g#YDA3S^5V@m z-;8+=b`Ej`)Zt1k|H=0*za0~W9u&)rr)k?Fru`$1qeOXiIkXPGt+n2tR6Ko&$V$Ig)GF(|{sowPna|yC#8oJec?a_-zDy;8fQHW{y}bDCT=uKaJ^Ml|D0-(3nlgV? zP$tk<)wl69P!&u0=x)hqUC@g8aa%(;$@IS68vpt6y=^MmK9x6(iS4~8)z44c#(*mN zTB}X&DX*fZ#-iXE8>TV&@v}YxsZfMen<+E5Ie6g36-W-iu5XZyI~tzbG+n0TP)AMha8ZrZqomw0%v}@i`pcQbkH>a3CJV|d3p$}VzHGgtozbnjJ3`ch>KIDUqTP*WhfRIQPAB6j4j4Mog&4L(Ja}*X!4_!r)#hH1O;b6oSiN2WMQx{Pffp zYnY=Ms<^G9f74t1z)YGQ9mH(OJJ=fJGcWa5hofHB zSWcyml#{V(8*AmQ)*-tWaNbgoWf@nn*+;~v8GxEV8JU9MruM8+uO zv?%CL4*i@>Eo&wi*EkOagditzym}Kk8IQv!f#DQ1=a9mZAuJu2Do59s5-6Sl1}AQ| zd`oV@(Fr70i4;Fpr*T2pw44XLUj~cfO~5`d)?+WnPk~~ zI5^_^wO8UbAfRwTm7{x3-k~7Loz|@|n5KCGSz6~F#f zJYQHt8Feu{+@7Vr5-DG7)u$X4gxz+>e-PQv|77*^e@Pjgf9*9fa&*k%jv*X0NR<=S zspv0waw#HGkiMu{FU5*0g{Y{vC@Hn0SSN0a=w4`feuzYQGO!D?_A^-b?XgJ;_aCN*~qN;i374K@6fXpcwU$$ z46q+OXH+V|P-9Nxucx=8!?PBVAs2n?U+4uw#dta1O~|Fz#sN27T!BdB(|#72mx+`H zFf{GOGn#Ve!J)U2CRLi`NVTNLi6pfNbrT3g5kS=8blG@L&W4uAGj$fA{X(!i9gP#l zi8HY_2F1mEFd)!rp&$oXs}C94&`x&U0Yo2jK?6VDwHb&bNqsA(IaEn&yZp^VXs8l2R4(xau?%;B(O;)ygpO z1@CJ`&*H&4JU!j(*V6_#Ja&Jo8+Z#96|F1X=pXaqXnV`^dhWU6IiYcUzkx2j zviE5CRw9z1RFQ2J@J~%SKJ(JDQgi#oX|gS}OK{L)ITwX?m;1abmAA?v-zsSTG?x6M zlY(|=b|5%)?)BVRYPE8}0QcOxC~@jGD0rB&sky_cSd10*x~RA$Q%nM5UVxx9JsKF@ ztHD5+dVJ={{-uHnG20hEAAkLJ#=S|0C~lAiJVLf{r-51ddSkT)AJw-rni?hF?E~Em z0&NXX;%&e6*%ztjbce~vc1c-NDf4w-8zWims zhA!VVJ$mEzTwx;8HAeVh(k^@c;{qRArg(;R-;TS2+a*~{8rkeYdE8i}AKFHi7vGYg z!!p-(#^ndjs%$KlP3i1^=P3}Zl(*Gykb+lTBY2i=HV)4+I6^g8&XXWf?<^G@Sgo+; zT=O0l*UKs}g=%2Pk>}y;2^D3h6SV_N3EbDe5Ib544aIFJp9H#5a$Hw%Ex1DfT29Ph zn{gp0(b0P*bsv=>uIiUg!8d>`#lry5_dXhyt#Zy@&KI}#14U5D0##zL(+n{b%8Z`; zp+&RWd^Ct%YznlNm%ZC)g=I!LMpt3C+f!RQNOXDe?3LV(^m%;2KWrEsxT#hs zW<0Ej#Cm|WJ)MQ!@8Qi%5D?beLC>4|AGGgPmkC#D$xYdl5)X3N{D$tp-U~jWHiW1D z#>;V;_L=mS7%7gopI&}De!ct;m;e6qk1zk&v?TD=#Wp;^Y4Csc&`?3G5)8&M2=nJ- z<-Wx|;H6jdmJ}*D0Dfl1ZEkB9(|xlP$`Pt(kOJ(pZg;Axe3U2j`;G?SVAA+*F_s_r z*DcBj?jJm3W9OeScRX&L`Z<8*cb0<5=LhQkC+R;#T?)H{f`SnzC5GiXhv~jgMK@|;(`}=J+0Y|hYpWuwuA>NXm zQY*i9JssCn3v|gYiZ{J(=YLDcY9%UXEMB67Og=`-Zr%%4rAbRpsew=9huceAvDa`m z)rcn{zbg7xky&QY^_K45{DkqO7i)_uaq5VeQVxb2?t5TQwQ%pcMwmoXML2y6swwjN z>v21n=kI&>pn7+eQv4Otrx_th)WJuYwTUD^yVKZe$_U$hkm1k4!>8a@LUtBY@p!n4 z)Q@S8ZR^NtEuo|X>M>RaLsU~NryxWkq)~NAcJbaI+4p`|Gb3ic2`b9mnz8>JZ48+owIWmZu0XQV#4EVzs1$qBW~fP46h*5$b# z_mktUjEUEHcdFL;n`I$ufb7MfsH|5MA+mJFty6tW4RbcGB*W%Qu@}$Np^S%buZRgS zo5dKWo1uJN@vwUpZK{IqXss4WJHVcb`m7VX%pO`u1^4U~AS;yvcIo?G<3tTyTB5z6 zVtD}n<%vPp8D<-^TRXNq5-zaTbW`CHzxdrwT*Zb)0~@4EqiKI=vEC+}JAhs(_~h&9 z!sc0@3@CeTiM}!rx-$mF>0k7)u9J-iqU02!i*a+42KZn6qLqrEB>B*{Vznxg7q=2r zohT&c&!{OgYRMbdW9jF&e3f#i?ZCbRM8z;F6it-Pn8|i=oo*cvcQ0aReg>b%hu{}X zTyI3;zyorN%jpP6r{yMkJSv=4S!x+{NvXPfrkuTMa1wy%XZ;zboDobxuV6=$^7{4k z(Ee77@%_fcMAq#=Jby9G^NhJGmBkFvmPxh5eZ8=`2upuK(unf$;xK73@6#w z9Hj&FS#NPQ!H~)vqaP}^?7_VLnhCbq0Ba^P)`^NXv9Wy3E|##>nES>3C}eL+tILbe zef|ZHn7~GcBrvUuO2cGveEG`${(g=Q<*LY8iFUcBd?HebkgB5Usa@O#zdM-$txkNfT|Kvbr9v6 z_KGUukn+i1qz)7_V-nA7xTO`#2%H0k08J}J<>528u;nZ1o#@OB%(`E3jQ>~jo>D%} zsVh}HfY1O}B5H@j_cU{(v_;u^5Uy)6Smj#RvBp{-;31CuGG)@#^*%Eafi$2HvAIF2 z1y9axEiIZd(@Lo&nnZ^o)hn9m$kt=m#o|3jE1c)ODlVT`T?kknqbv%+;#(ps3YzY$ zme;@$nI}>IfuPdGBHALR?MW@+%h}JMZ14DC$jQ&%_3FcrJ0N(?dQwR|W44@}e)$xF zU@*>TMhO<6*OQVS&nhVv8M7zE+0JDJAA{65$ZYVaV5NJZf}8l= zl`@A0o9Ik)52@j*%R>CZBZ$&-RR1a0qrhZAF5W=4U!L6n3@tu4x8}CI8d6WuFk%}m zZDbi>9TRN0m=81bX^db8G9EXb42x!jJ)^p8$b7FU3bLTZX?PyBY}2^?+V$7GI9>4& z%DTpTL234@Prz9LnDwaPUIW4rho=YRO6O)%z!Apq_9Ea%bTSx)lN7Vl4>+Ns9VLHH z4sz?v?W9N_Dc0mc?K>xlNc+-SEJQ<`m}&%hDg73O!|B~-$!X?#v$tXtXuFB2+U*tT zvqjcQO#cS?ujCh*!sPDe;oQn8r!2aIQjs8r8Lf4l1GB<*I9Xl$dM*%UURAh}*Z7@? zt;}x+r&(Pd!sb;5pyEU#NGNZns@`XCYCKDBg1lqg4~sSgdzey%N{2zZ|y+kscG+BL~Wj z-Zu@dM({Q#d~x%NFSKW?2BHHf^^7L0X?0W70Dw)g{%F?t15%pjYmFhNWPaYd0wixlOa>(=<4laZB(2Lpwp2&KdCSF;7x;9k1< zQ4EIhW>QMN9|R>%R$=*jldL4an+Kk3!V-1dtY-Qy$yDZKa$*Eyiid+p7!}yS0R?0e zkhO#f7i@6-l~?@?l9LoJi?v_7>3QA+0)g0m1c)(_*ztvP<2;+n6? zz9c-E&n%t9gI4R^#3x#ThUL>5*d)aFs;fedbacK90-y0vkAXs&EPZkg%#E)@ihf*< zc(`G~)yhz+#FeQyIVx5TslG)pU4P}8hbjid>`xa3`f0i^lt9c+Uk{qE-PwruZ*%DR zWfY7veUxL6P7K`k&2jIkEj87nm~FD7;!&-cj2|U$L>g{%d;NJ_hbzg-jM9o~W(V4N z%IB5-UYR)sRsHZyErQvgg&*>4L^GL`63hw<2Njf+>%G9s^mxi8I*e0!hJGW$@`gQSEiF4(L6y zPg1}>`7f7$8U$HhS>Adt?$i#XQ)cqj<;9&6MS|l2Glcj#nE3e%sCw{OEX8D;r;n7E z(NS5pZA?{38t+X6`V8PlS=6Y=;~a%tMZp{+N*DXRC{i3fnO3s67?%;<+$fWpY$Lql zvpg*#SI`i)+@abXu0_5KG6S|LEJ;t6q^{hoEk!f%$|xor@Zxns!?%er>BZ+;pNM4BTNVvb**42zK37^W1W ztL{$LqL7YPJGakQp|Y!Hy(ERRP@Wh*sac!p=I!k7h7F#&|MKlkTOLs!OxC7J5(Z>B zk^pQei7}6X23qmU(Et?8j5$7fbmhgEJ(XQiB!kbV)hf$2RTB^)d1EJ1r{*}d@ZWph zqq>13Yp7lu@njeRr8%CAmeUfetHQg0*fN)_s{~FtR5x>{_8umqO7;n>KFgoLJtla>{dY0Ji+F&x&R{6NZ44qX-c--@WF{^)neK+v%dbu zYu92{Qv}H0#%FxrRK}h0c(L|YV5XJeXXvjUi4_P1q&P_Y*Cfo9Hk>gJ|ONaLp~u@Rde|EJIR z#ugtaYd{I&u49egL=3Muz=cUGE{07*N|x^X9()!hi1zN5`4NSw%RLW~#7N`5O(Fu` z@&v+D%LTB=p_!YW#zZbDp`&xGlh^D3kIAZt%|Tlp%eD2>bQf`Axk>;=c+)6jF)lAY z{x}BK;>3x&k>mUgKbuzGMKsXtEn2?>(ObHi-3YjD;vG-^sJ0T_oC})3jd2OU#&r53 zgR4gsmtjf>&Y@GxFN#}uZ5K!^7$zhYQ)QB-^AP_wN>@;9nyulYWnexl$n&6D@C&1f zcZSkQsb941rm~I2e$xGKOafJWKHjBN3mSjyFlAs#zlfP z_EP~TGMX}xOq7-gY2F7iB6=BKP0R~T+!s(wjMAX^=1q`Rq^kCcxiMpsQ;{r2MOUZ- zIqBLo=vgs0j^J?~5syBwsGj)FT763L$Q8x>> z?uLllpwQit*%G$^6o6s_Cq1(J=!X8U z{XOSJ=95KlSe~`LUzT>;0P3yto|7j}p1eGnS#b-^G_wHP8|XH!Ey>VI?v)@_>o!ak z2qdnj-jlPw-oV&=I>ovZVp2DW0F6ulO@&~bP560CM<(OR;&u?^e)1N-ffs>tK5^ir zZh&HocX0gAa)-$0D424PETk9(3vJ%^Kl!o0+0uF6 zDPA8G12v<8X*?QvEQ}soCv7SCjI^Zqk$RQh1K^qu1?h-Yp}tlSx2wJ)oMK!;fgiopwG@TToZ`4ybO zc2I-1`x%T!Spq;4>)xUVIAs@tInXT=jzx$vv6mQwx8oV5u{vwh<<&Tv;+2$PR5&y^KbZd z4fPwZ+a#a0Z(5#WU|{qYwu5$+36#wi@5Z%HL*1Yto$``b!Qf5QX;9v9Jcl+}N-R<& zF_rl%S3dW=sYLx7Zdw0MVa%OsvOfP@tO0duY9>g4A?>9K*jDUQ4mFM$e@s(2!zifr zQ_4K3Bzh~=5z{8pnFsd{;oDIu%?$2kGP{7-CBU=Ez@3V?h_=2F~mV8jtqxG zZwt4c9HQKNLHjO{Vcca45nn!2v~d84WK@tY6SVqtJ1Nza@cyE7ef;DHJ_oJrWL1=j zK1!IzXlN_r;)>vU#^kW3Okva5mg%Y2EE*1J+{!4q@yM3!2Jq!)u3r5d|LcwKTF1x2 zac@k7cHvGrH}gHyuX%qF({_pNz?2>oO9B1fzZ32T<`G{dk5zg`ymM!SGcgr5^k70w zK?3C`6k#O&13Q+TiWzMzF2;`¨w^u|SfVLV2eQ+VbduRirHeG>qaq0wXdED4SxrUcG?CO`_Bd2hL%(2q zR&~)3l2M%1+l}*~d6fJ@GFk@*!mOkkr?SfFN!wgEVY;Rza7(O_(vgJp%=yQs=X88l zD@dl|x#74(j^!;w$YTUE#2`3gk6bknbNdHo1sX3Elae*k)O;q<7Tx-->iZ_E8uN+Hpu?eu#dln2^W#-HPtBfq4;6s@2D&kGIufgN0($t;|)Gxs{$? zbxe=pcTpG3g!l)#XhV(0+|H>RU4gR14UqpnD}=mj-Bj|2Q&lVAD!=O9aEF%i zF?yp@UW`vay!iVkADIr2K~;aeXoOQcc14Z>W~UlLQN6jw@FNrnN!iZUhs1J$+vH^E z%|W5om7nWJ)2JR%@H*L?7E#_X6v1l5Lp})Rn4bE4{J6PUj>MgQp|m>u9Spr9PDM)) zO8L1{u+j~x5{y5}9*#H;Od}AYGA6wmU1|cLT4KwQ(FHF26j$VMhpWRINd*jJa8xWD zV7wt~$Y5@#T=oYSKlV3I`=tGPhq-!~P%slaJkC0<&F5Br;}E7B3a=CTEOIz_V`*N= z1SZu3D%3S>lK;f9&~oiEI%?V7zTrs$d;X>CkuUW4Us~Ki-;Z@S5Sh;!^>`kcuTZzsfJtz+g==ehcb^#XfEgfspRwj?qB0gIMxqSz+nWxA2df4 zP`x}Jldm&7)+sK?zhC=9RGwIi8lvbvtb_j0OBeJ+krpqGnjoLX^T^elf$pZ_d302I zJdHUzV94@nj)1&1aZtYUhjtjj<~V~tbyat1BgxC(dGgyhx1_hnpHUji^@m14C7(Pk zmx3u+0SlZD$1#EZR8`2igq|@ZRrND?Dm!{BJ|L7^{8hC4@WStq8$Xd3g6Xe|Aa()c zWmKeL^9{H!3p!;bl&Xc&4#T_9!jaO+yj9#iB$geN{8f5ZP9A9ZG6+UalMg;%Y_=hY za_K_|&Z2k~AI8HnvBfX(sd7rsQad9pE!mFHVxRuuc?DCZ$6wmQ@QbN{+VH{@V#gtF z3jzp7-uJUB>n1QF35UhQagw9CKxmv#0A;g1|3BH-8)J zRtpfpRAT#N{+1Stgf`RlLAd^dC!gs5#(K#?ht2BLi3!+G(LXQVXJvZyYxKOciU!5p zAwyoMAT-Wk+}?xu=~mufLZhU;LI;S1V{%*L zT5ldc>Xm*WRPp$_X zZ-|~49_!}s{N)Yl@&yEo%|V}#Nx{2}{ElyPj$XAa#Lz%>+nuaPqYJm6Dz)OsMKf3B z!FYRv9|D}UqyPjf1ZDA9D9(=$@VNNc2f$GWsiosj%E~F-MG}g=cboaI*afYscnXg?&`_rd$|89iEg+ ziogTPQd<5B3uioFa8#;scZ#o*=E!|w*u2@{Nnvix<*gV5Q;#*1{mE~pAsLn@*&k=K zpIXT6dEZ97lbZu0Bevz7b6{FgxH~=osb5YQP-K1?!H(GwO*e~p|0q7OpU{xKm;Ypu)bjUshRX#Z}*c2=sXp?f|5ttKs-J; z$}+F)!FTzWO}2hyiOK5IP+oK{9T0*_!nOU2Ucus&VBj;SiwPY zqyGnW0E}0V*<}}#z;5Ln02kw{wG7X?3XX80?B;eWsRuml@hZavB1HtVy6#VFPX;zha3>>poH8;>O0>Myy0=#P~o~Xx7?UZ@~|LPg!xD+Q9Vx;wk*SM-NU7&vV9NY2q-LYYdjJ|? zc5UdqaU2$8yYrbF1&n{zN2-!nCMvf(vTA6KBf$PrXkFx;9_(0lnCCWF4s zjuzIy;z+lLQ1S;831Djv5$zM(ec9Ox6C}oe?ycB(q*~_KeesKC65gsB1I(!^?lPq@ z7(F>SzJ-kADga*S9qyge6*Pz*et#?a(DdO|o^)biE&Jt^Q+~H;OHnBydL|#>3 zD5_OUeN(BN<7A3^_?8@LxUPTDUSLmMMtAc&o zcg2dQA3pgw2tkX?+Bs)lz5mtK$8e)e)U;}TpBm;p zCAJ;(THGvBejd+xP>5hiox_GK@p!{2;?!Jxc9<3+xTTm8(cHq4Tcd~GR>1M*XXHID zt%=TrtxSYGJgI<5@W;f+yz%ur+OlHZkSjivs%irQCH<%kLC9&2aTMk(WN$sM%E-0X zF)r!?OHfiNE_q@(EY}!+P(rVQdFx zVLaX6VlAE!WwY4NufDP$t5n{SrZ@Vx%<(NyMEb5R^n^6Xa_mUUVjKep>%0E(l z&>B32#T}*VnA&F~pKZSw7wGS)ZC9_?SmL6vaq)|M>S;&;(@9-anqh^8kf%#oHnTd% zT+rQml|U&ZR0%uo05Rd33-T#DyE*vd=%F^gcky8yGIe!`pP5M!0i^mTr=#3fmrj>~ zo7;yOF!OoS4y0(&#j%5`dIDEA^*u;?S+gPJ(N=+V>Q)-;%mvZ^#Ua92rfdk4lpd(; zJo(Te&oCWKJ6V$8KH{smvJwT1H5p&ocSjpKpcbJ;zSUvYOrhKNFP{E4(+Lm>`lNqX zbt4;fN(JzHPn?{iqAxKYEYv&}bik~Kmb##Mgrp(2Bx*At&6Z*dh~uF4=KAM`r7erB z0#0uYPN31KTNtw4qs^~pb{5y_Y)lVVgK7 z{^<)|`Kwb?!3C5)_bcJu)0F4b7_V-tx%rs0a_Wy$(k`~#>!1X#)e=v`rJoYx`todE z+k#kFwt_R|c(op#F-6YbxCjICG9?&;!#5BX&2+BoZ54=lC}v4C2WcK_L%8Z@@k@8k zPgTFFI&=grFk{aRP6%K44a%GET3GlPUK*F~08g8|c)#)rT>AhMZ^FEZ+H0{S&IJ8Q zN;i#57e~+)9x4S$1iOlrVFM6QP2I6~oHxIG8z}6hXci8ymh$Si3SvEhG^BWn0|^Si z9OlpjGEEL`G;u8!6;s5n`BChjQhti#q1P{Wyx7$_8zW{=1RLH?z@S*{!Jl-aju+q# z+z9HT(eL-mk*5-b{D~Do*J|_?7f$<~B$}9GD`^_1qnCj4N6$Wc*E>wyk6;1ciWd`2 zn(%TF?5m(Ul=XD);)I4g`9A8WS2#uoBjN$SEKxG%LJ~9XRb!(WJytW6a74zlfi}W( z-dA|VL$p+aLI>PFtxSe(5Uw+&JQXPcb&Di}$4W4e-Z2mi# zq(OCQ5v6L=34ZzFs+6Ke)@JTLHn;hFRn}*W(*P7EotTR1_r}bs+!Y&TPRAfrMp4ZB zn=b`h-I;LbUms?<6H-IU*#SQei&@=@lCCBgLMd%Vxw&Gh6NNctc{0sl@3-AKYpIvO zXTfGb%@=3tacu}eQ?KesnlBw^xaZy;m)iEazVIJlQ^Y)bi_44io3VJR4t_R%29i>? zqLts@x7U~xhtSQ?E5=)qrT`fSCtW(OMMPw1IUQ-~#%d6SeIArdEVcYosZ&T36D3J( z|GWT-$x!Tq1~@0o6zUmPn4O*$SOKxg%j-esFFun+=H+TZP#6LNVA$i4L}w<$mvUHC zZsjIP>BuXRgltFtkcAc3vMgM4tfk4yX(c>jQjc?+pxn?R6Q$8(4 zB~YGgAs!S5)Msmntw9Esv){ZtfmWpqclrukn?!12?(Ajg@Vgdk4P%tPfxxZKf_Ox#s7qG~8pE`t68P!5Mh zW}rQl>J6wMh3FeigK?Y3{giT0u*PfA9 z0IOPs0!^g6?3%2%I3@-dJFliOC}}0$K((k(VsYGPKets64__+K4HPg9iOU+ZvEX}LUH{Ky&SB@+vDi7~w&W8FDZA#%g7l;uFlr*kg)o*3`14 zqf~~036%)SJeN9Pvp8CAdAbztz_W`1h_a8!7* zr&0mRqV{;Rt&Dj_WwEMed)9>3X)ZcM1vCgn23xMLH8BqsIv!9sx8T*nA0S^r@iVC| zZCdqGBC5k0E0l<;*=h?V)#}#MkBp6?{Qn;W#gE4suSKOSz=qQ?D)3H;hqt2&94R(P zgfEksr7o#o&P?!e`6}9?Rsf1vsu4?@OKYqC_%anbB``P6Up)DlnFL(PptL`~Y=-2W zfTI*FiKz|Ie$&Ud#~#E75>g|7tyA?&ytp$S7|G8Nm=ue61hi=%j@sZwRlI6VT>KI^ zdgc0xZEj`BihFZw0OT-E(^m?{aIRlcM>EzNfkKNg1=V3;=yV?>V!gnnQD^ zaTxqn^iNz_>`DK)oExG21qFxgV#TOZVR4J(AjagTkW2|6dEx2lQP_!J?e0mV0 z3BFih`pCxJ28t+07q%k*m~&H{h+UI1{n5~McyWZzLKpE;VooWse%Eo$Jp-{OG?FKe z=3H%%QyjywfW3orlS+mC28j5{^gZr{1`P6eB#fON`xZm~>=3kYGm80r^r7S=!5@o! z=^6>)F7ZBMTGJ9L85uvwa?EdkSbT5@+dRZF{bI8?Mekjd6w)^+`{^FqxBG=dT#eta z7he$-^r6=JJ%3_G{X~#)+w*ihieLUf7l+5fR}UX$F%Wtq2GeY2O0MS$JhcgwJ?6$t9}MUk8I)XD+K3(GVpme0pZpr$ zA`@(C=H=k5m9@oQCb|NM8s>5yJrmE+ZF;*vkw^jaz|;&t1u%KKMs4K}Bwg%N-6O_Q zGR5a(nxMOt(C*QQ1tR)A+S2KPP=G;8+_Gayx*~{(lTBF71lM+wg*M5hc_!3Nb>I3Y zBS)SUPL^tgoeuWeA@&;PYJC$TMzLAkWNfK_BWRSS1*N1)4I0j~{gv$w^`aPfIdA`JZE@4P{!_Y63Ssu}p9i)!oy@YnMk(WwtA}lq3PFN7|+x z_Oq+NQ6eh@x>8C<9EmUfp%=)lt8}{{B8s`|`rIoq0!1zRc>oFVB5CY!Pz5bHh=S?o zT-2?hLP~If02`|_@_j#pQn&`^)h$m)`DWn-zyi10&-lM1w8T0qc2*-;A;WRBPV4! zU+^P0Obm$gz8cxkk7_47S; zC&;1>RMx10_621D&i_sP%$O-c&@azY=v}>XHJ%wBR4juUGusYT9z&kQi^tFAmnc)0 zFQkaW=d8&I$0ck+At|sT#mLal6y&x0uEaeIMo9fnHcOz zcy7EEaks<#-3oJs1X2{7nabrt9gdfFUPVKd%~j_4<$+g?q1qm5x2i^H>b1||J} z#2JA)j*XxvPuP0%=x!dlHG5*8%wM~BIzA~19g3O~Y`9osYP4yHA2Ag@)(UY#H0}&` zP^TC0qDLmI0ee~PZWlyUHVLxcAS4a2#x$SB*KDRa=x33()|*8|QQ|DXfQeYdl`@-w z6725~1`7our?fTCjvxpsTZ~0$U@GQ-=cXV}1a9=VxRGHO%P3A1U}BE={o=m@%1l=Q znol;ZFkV4z#;YNiLT`trX&oRB`ok5N3 z`;&M#S)RVjNLu-DUuE*UL1_{b^eM&q-RZXYFL%3*R|?P za70Xs^wSjVsKQ0*Cp}SFo$;cEq)^(RrD}`S)mW-5v|@3PRmli`zP0{4J9pTtmCcBK zy)2Sl6kkptCLVkcyTd(8VdbKUx6H6q65I7}DN-~jd5`4FT(!FRPpZXLk^KEXdwK^d z_~pZiw4<0nbx|(S6TS*)ykYq7%DuHwuqI${wA|R)>6zEfhUol69|c7TzQA$%>nFdC z-w+?XD0cbj2N&P>EWxk3)YG#R#PdhZ3mo5)CKbD*Y#_l9{phkh3&9=u<5dsd4i)M| zsW4e`Lzg<&FHn7At0qn@RCZWDh^RF(sHl0;iP z+BVTTJeTa6AQMLR|8?r8pBEH%PY*P$49+}Q1M0ln&r@{v_3#M@7lQ411e`wvXYK8 znOpybFQ*OF8a}97bNo+GR6d|w9`I;<*+Eo%6tsKp{;%KC;TpNoW0tJox|g?#>xz92 z!eAmO(&Rl^C0y>O=@2Wf@EgHRJNM(Uc0<%WLA+|MxSh2x z6j-a=Lv>v6NGr9gG1>mjHxwTOJ~6oVD|h3SvI~e6n=Yd*nXN3w8>B2hm-?0Bkg3?* zhv~D>#uksPAl@KU)4PQFdfzUuPVH)kqOby18oHCoNG0^MiINYq%gg0b6{;R5GEtyd zWm}G)5RL^!Wx)Lt2WZ&OWbgklE*g*8-^v~3Ev81pOfUyo4e4+s)S!ACG2pk}{j90i zHCN%&m=NOOC{Ox=Q4%YyIfQAMj)ghzlyW|tJ#rf+Or&1Vlh_*UV|x^t z4N;4uVw>bs>J@pRE5zSpz8D-;)}!=&u>NmyBV9c9gq4sB;H5TNAIuQ=N+p7>u;vN*0O@eXQ1We9OtVRk&{+YtL)L_>nX(dMUfez3qVsfTY0X8qA+b zA5HB^Et}&T+{M5A-Jj=gA6)$A$wzS^{OJ`lCffp{H)JUwJe354%tqVJXFv`HNz+_3TqqrB0Gir+6%c2DNB?^cOcmVB9(> zx#3EOQgTGhmMrbbwlfz$x6tGklDxyeWEFCu)>@LW!USS4u8)cw=z0U$`$B0J#|}r7 z%gT_9YV9Pa2*zkEAbtxBqKCyH!dukTN-kCtV-`?47P?^yt4rPkTy_P3v^zq~Gi#?N zA1hXYesRuuX4s(T=FPW* z@yk{9ij8-Zcfalhym2%t0VgVi`@G;U0?8j=dloRezqZ=Hp8<6mUwRr`Q7Xz2Z&r3+ zu_(a6pWO2aY`xr-dQo;rSq<^d5x6VQXO=VVOUMA`^MmRfTfne++61D)fS)NMI5Nt12h+G>U7B8;UNlsY_r?*mrc2b~K zNCutmOI7zw$uKM^^3h&D0B~sW-}&8l^v^%V{tGeLqE0azT)GIRSk+)${A;Z)baeL&i;v4txGQwsRG24%6)9rTPR)Zd9R7K7ykn|l%}zA87OozP$5X)* zWHE;3gW2EYoS*2`^4)acoiGRSwdA!@Z_E!|{6x+?Zys5bL{m&EC7?{m56<^{7k?A4 zmnK>qeR%9ig5kK7tRO}E&>x{h%kc1Pc6#z%5aOlafz&Sp(VYJ45Mj@LvB3HwUf7C_)~KJR9A!jzsa)f3R%W z9-d4%;&E$q4fGhlS{! zs|Lcv8r1SBHC4{oY2;**6`(F3#ASJh7=qwXu?vVib7{tB z1O$jRnSSedB(#4%tVa@P;|w~ij7uRlRO&98!hL4SP!x!;Lp>#}PPl+4L7TPSW(XCO z#b5cm?J$e@w{Dr}DAQu_tp}L}xWYl??-S!pUTtk8<0GU( zsgAAMhsh{6t|xRt5}(uI6U`>bDN~7!yDx3Vi^p%hXln1CD5w|broV-8ZSEYYKY}~F z&K8H_u_|}xMZ9!$Kel}Hdi=DXA))oEGUE{{EQhjCS#0|C*Itk1D=JehnFc)#D$dTe zPLc5|bVsQwY6|%O%cw%EjcTh^)OSHMZnJnD+Q)x=^0VNR=~uHlkZ!QFO#6yd=IQ#* z#rI-(RE++vr?bM#O4|Ic?itysL7chV%B9(5k8#{j2hu)8q0PtWoD`^cc|daTThRt@ z7Exx>Xj2yicTi6H@CenWTzssavOZ>mQ`{oeOV>kp92^$fsBCjg6Ai;bC49?WirezP zfiolsjR!Jqj1D?y%5)W+s{>%Z1^7)FRGqM1=T;k0ZG4TseG7bm#HmMz- z2|Z?}L7mP_HX(hGex4g%Hok0+*c*%AFpIR}#W5&WPAMKP=O&6#-pOL`L(47hdFdH4 z89ZET&tzX=6WqIMDW=JtXVt8XRQ7K=nv=i za!wMSzDRCrJcWSb8jQC|Y~=YDvVz_5llv&+W#e{tDdthUo`A1(i^QCSXaqVi5_BRC z)(c+_8X)?A5+lo5F<~1~uP#2LY#=W0IQaKnrA6WES4!7rf*uW?X;G`BdO$5lX;H6D zc1ZeKZ*A3+1(EbA824;?T&Jd|eH!t|;ivM}wm5ezOO9KAr0P2p*X zu5|Z-_νa0rRGP4-;aSTjv-j%vI}=`tx=fSvq7nE^UOa+XXBhvcu^eK7_cQEs^w zP77RYE&J6Ytg2|N8FzTpb#@Hq0q1>v*&ny8r3tx(U3!SRRMh5eiVJWiD;QLTr)oE3 zGVz85nhZfEFX$sGZpS(lC#w*f=4;`)Un@jXb(HI#EFM>B@9LN8FHqsZ&bXFdip#9vG`hri)ripg zLIl6j3uflta(roKDpfnN$i@4XjOcEc2z!5X_9PR%m}jwpbbB$=^lg3lnXFvd#?1n# zJ-E!rPCW?5xwgw&!=hY8jNzq0asONvoGACtWp4!UZWI440GeL)M~~imH>-4j2IaM0 z)Xj+m`cW#-GuIM4L!_F7D{uB^ORZ&GB6Kj1;V6P=!&WphQj)7{s&tp#8J^fd2mSDQ zp>POsZRS>=YX}yD>sHKJT<6gFgM}IkRBEJ&OaX@b6>xKZd?0iZif=3kTuVU%bsmdg zIOZirGGEZW6lb?;X8IL^x7AVyO$Jz1l8>g`AiQ8puaDUiw)=jG9+3hCP{qPX#%DZ= z(z&)C)5fb+{6xoGj;$?xJWH11Syn?BrYrkv#vKZ09D@N{cs~e_??N~qFF-R)&o921 zyqzDUa9`jK2ell-Q57w%30!~PY05R6CiEws#x5z=_MR#vq^5vEm5IMpUCs$(Iu-GA zo?$IcOmRk?QH#q*LrQ9^##%QN%bq{0j&$f-vnz{1#EZv&_~kzgNl4rTt&x^$*{%WkoWB>evDM0)KVl~0WW$}uyV$E?oi@}xkGe0 zO){}{zY1QK)=QD~*k<8Nsd73x^zN|pr`tnx3)ywCJJN81G%wzLF@Eg1Z# zAhC79PFq)K!n!PtIf{J88;H0r7d0N`+V!bR`ckulSe3AVCwYFJe)QyTf&o-xbzrE^ zCbf*>f?OySGC>fV8L5DDd>l`07QevF>X#$y3Xr^YKMMQ`Z(91e_0G$&1jdpDJGhXS zgL@RQ#+pmk8__t=#1eO+3^al4-2f~rkaE0x0rN|(vF#%x-NmdtdpjOPE7C;);etH8 zT}hI$ao_{IDY#hD`d0TQe$O3VPy(`tZ5`$q);vob+=3#CFly&T3xBSMcD zCQSFn^#s6fA7{ohj-63K0SWPg>R5w92+CHBr7R67d9mMMgp;(uC`pBZFu_9XUa6xo zhqNt+qG|RM!89UUDlPU)zmK5PcES8oBia;dPJ4WAL5@+4aCKRiqYMr5Qzx1Ub%51N|oCOjwZF_?A1y7@PmT2HQ)gI+Rwgl^{ zX{#om2Y7(ZotOB0258Y$Ke#BZRncp7J}i+{E4evH&dN1Nx-HZQ5%IaJUUnCl&C^T` zsQejk&~zL$)nq9G?qGCYwZ_j+O~r$)Jx#1^iTgI%Qx?_|mXB0;OO#niH!iGBIRQZ9 zX&Mp`891!{rO&*S1?1Om&zfRtjCFc5{E5FO3-0$re7h%NtLJXr2p%R+o2`kxU0hw< zuo!{sgeW;JnL!xpwR-ib`lWm6IHB$j?|VNkEsS{W^tE)QDNh98#l)GutfW$o3g2%) zOTR&-Roo4-UG6_HGz3qO>^FR!%^34aP{hdAo!A9+DlIc5tOp~T{)Z#Zz2A#?LKS=A zzk5G|lP(^=@WKloo$Je@o{Y*uGl1SzlT(|WEzD~l=I65N4RW|&P&HIf=yD#tEGm#o zwU4v1v0=uABrWLaHO?L&qw^-1nJI0*sFtbTJ%-KTDXGnJ@z@Npq>76|wNSnIouVLq zaPd(vA5dJbac`5ZqL#-K9D_3EK~9G$1TJ&TiwteO>LxZ2p<2LMExBgm#@RnREQwvS zLb1j%;EK=lDk$8=kF6A7rw`?~Z~J`$P+~V+)u-!Pj3DJb5{ z%`vqIiwmst!8VcOuPj5;pb8a=Dr%GhfnFlpuA-h+r=OBY{^UL1frBHi#qCA~Q;zz^ z_$uRVyo&++l?F_tOf#lXur!V0{vl2kMan!9b!!XWw|S3#91l*rm4#P`WC+p}Orm-|`nQ~1t1(wuw>9tS6Er%>DD1rjeBjWz&q*AK`J1X!{U)5*Eoioo^gCjsShn9DCRsI zdqITYcn8*n$DB0^5f??#eOuO4OnweR#?Ry;W{|2=@nRk?JfA8I71iS%xTu`*y}q#+ zdvRI6D2C6=>-}Ly z=Vw=M#QYYI_GBIhvr!h}Hy1yQ*^<7KmEbr8De18BW1b)d<7wf0f*kk~hnaMOENvxp z1-|q);VNRD6HOCZ;c`TNj6zh*mNI^lQM#(IG0MT?|ih7e{< z`RNZY{yHduzor(YR%Z2}2rlLPx1`bdE=eS*NXl#Jaj31|FRh*j59NK0HK7U)6C^VI z`bc9>mm1X^;EL!8+0QZ%3v!zXfr*=t!qJP0r@(0F(ETR7=Ih`qY<==m7u;_;i$ z`OSou3&>5@OBXTpW}#@J=W`c&m93Iz{Mz7#+sT3$4}%j*niP~NVJ&(L*xRghoqV2C zPwjGCg)u)&uyhn44oB&UqxQbNUo^03`BE_phLN8;);sLyc%qA?7Ary3uj-Q;@08he zh=b(p*ZI$BY-$$O%I37tzYp0nTxB0WDfS)SQGv;PcgUYYp&l!WYk^i|>w#of~dAbCD0$F{!r!IB{QCHFV{p|H$vXu<)NxT4Ib#)BGTVD2-UB~gW)6Ft zm5o$C0QB~YIw3^8w*z5CId4vOupe)2DOG+%7Y>zQw^J@ta&Da0*h5$9e)_%N{dvrQ z@JUT6kF4Qfc5%IEFZjPdO?0j6bn~8ESK4*6pSB4 z>R@%pvLEioOX0BhpZqA8NO*(7*m9|tHr6tuYG4X)j85tOBE5w7$7SpHk_$k4!k;_=c^;a{>%&T@uVpN~0lZBmscCF|PCV({Lj^OXt5`I?$}#e$^W1kGhel`r2^ zA|oe3e&pEu&BflPr!2SK;GuuVgFmRo5W9@&TyxEp@SJ9B7iNhS3zb`B9gv=P@{PZZ%DsC zDk>XMU-@8ySk3a$td;7Q zW)@xN)ox0PJb&fgcujMD$MOeh2JKL_eo$ONLdQ4XUwrT4@x6(brD}(rWb750(lGdW znpI@4NYGA+O=$!+{AWIzM8Z2~GlwvR#q8yO-ltZ4y(n_6vfsp8=p=z{n2d}jJIJ!7 zn=3&`$ywL@>$w)=OV665oaxEN;uPm3U>F2^^UX3Zgt*la32Y z3A8T}WlFXf)qPg_vA^9{SrVls1+!iulg)ILUQFHHVXK25j54>h>4gJE%{`#gd+rv| zu(N&7Dsq);Xk!V>4b3^;tU#byWV4?g*0$gApE29xdGfYPDLdfKpQLwNJ#ZO1zQ%E@ z*`2b5(8EfQ1&O1r@pR>R#-!tKv1lUnF`QZzlrU?r^HImAc-y|6O<*ZAH>j;|;enP- z#=vpOaQ%$8TK}2ggc~cd#0OOFH>u%+F0%-bMP!S{V>JaA7z1O{L5wAPY>|) z#oI3)fBA0wOr^LVn3(83m#vJI!xE8J{bORWi!Gt|BAJ6B9vynlNXg69EfwS zoc*a4z%j#cKxJ@3DbZ&rO;(|Fn^PmwdTpsVlQ}yaH4ZP*8Qh;fomBN zjQsP*uFqvTTPo*yi`y5|jAhyRuLhnFfEyqU`D`Ze27Ll4NL`nVOSQHC|VmCI2))i<& zpqnxrF=-ev1Igf8K$XzBVl*K-*xyYHI0(BM!c(4Iq*MeU2YqvJicEEk|T`s!c z22Y&0CF+=USi*I+*W$f&Vd!t$!8mumlv>B)x8oFp+Rhse2mj6W7!6+dIsdZGVE-`T zZm&&E#n&ZIOnQ01n&lQB|9k%P$16$*7sxOcLdJXugL|dXFtX%rtjyZLg}BZuL2hhy z%+r`BzbPBEG%6uBA;(b6;o|W-h4wgdh|%TN$*X%()O0ci4$dEdU5S@Zf#7J zAaRSY+e?0bkoN)6GWv?dC>M{H2fm0e9{a>#3n9~P+}`Y zSZeH$096$t#50W^0>Kc}c>0|u-;EcHcZz8t3}BK4hbhUYW>8$2NJ!^+uC3niw23WK zt0ccmz#DjGyo0>)xNa^Ue|=>+=7ID~l%h6a%j3yV#(@zdeDAjGCTs3RZb$I zTf7>UI;uaU8^gYmh3mmY3MTL(Pk(AUekS^xRi+v?;wb%_&4>46({^?5N}pVd{6L>DiZluk~&?#>(*^gV)tYmjJ;kIvvnfW zerWpT(hBW#grVq>5BtRV<56AQY0hBV5uYd|zAg-8UON48>DGv}$ZDqu=ZwCADH~?` z>%%-~Gc#G3W@>8Y^JaV8R>x9lUOr}>8RrPEcQ zJ)rQHzl;1sT?yGiT035o+!r7P`7;Vf)=pC!!KOrp55dcrRYnpjP>e`VL&8h(8li8!$jm@Cm3L6qMz;@U-dSo44)+{;C z?Oy5{KxQFLfKU2-bt=dt#9x<)ha8o(4ZOaXGzA7B6;Mt#59oxOn8zLDC4jP+JYMxQ zk7w667dPSq?6+ing2tt!FhL1S?Vz|)d=w5b1;F{IPo&eKlyl%65sn%bOLLi;yWi#C z`zQ%N^{8F8TX54>HP4nt4^m=MC?>EzN)?@=x#J8I(I%a^$=U6HLo4m#afs6SIa_R0j2JJ*0R@Y8y{~^1DCt2I0saB*78$_l>dS%Zq0l_t5<}fk`8eG)Bv>t)Ua&GVf@<5zZWuJ_T%G;8M|}fTBeV7} zb21?o*#Ad9?0nGDC}RCIKD7eSJis+T#rwG@#;}gZ9-dTe9BsUt%@m}VnVOmMHWF8- zP}+pMMH;-P(?Ue0&=qCcN>Q$M$dj9QhQ&5O^3$U8f8vdnA` z7g`a6*A@Dqq`XDm7IJJa6%zy!h0LpQeAqvCD}_z$VIg^ZeHMsA!TzD`MF$hlccCFR z3Xkk5xD}f_hDzM?rWz3Bc7OW}b=?{8ktVg4uBVwPyhfj7U5m&|Y@tKafZtDwVKuuH zr*}OQex7~n-PmNjyy*m4ghSjc7Qjf%x|OW>q+H+JDL?jx?a<37m{1u9$>5}m6a}@f zLcHFRe(TNSSI}7cUTUE7@RmikDg;XCipRN$Kp~VWF{n;BGjoR(d*cr!vPF?()eZFf zdy3cQ7g`-({50OkJ4>54*KVaMa`Wsg{o6v!mmLuDQB~%H1_U>+uLgl1fTN39cm0yn z&7{<=VR2cg8xH{B7>1*2Q)KQNruXS!_>R{BE2D#%qo|m2hASlf&m4ubvBa(Yjf5qx z*?Yr*IwcsdO0G_DB6@GX<7daT2-kg6)cWu1`>$s8Ux5MK2IA1rFMw`{@W2jxlS{PH+7}I>znWo zC3i%lIe(5gMJbQ1QvPug9JLL2IsQyH?^voK4afrcBb5TRK}@YfRkv$B7u+NYqC1;@ z`n4jH$HyFkif7sxHf&okF{Wt;i1NHCBnE&lw2HaK4S$4T2)-}&MkU*eqhtD&toSh4 z>HAviOy+iP1SydgT&NzG-C(^zDo_s<=G(w_c?+%Q9(^2L?2t<{=ize6!eKujK`l@VrL@ig5}VgfSF zVtuN*=EP6>RU{c@<&=tk(B*JAE^gWmEXm1?TJXU^?d1b3%F=F6DJYD7W6e{pMmo%vF0g^UR+yFJ1EEJtl3m^|p9apZY z7#w+7Z1i0k10esK+u8?A(xOiYjOlxrHiu-C|I!X67W~tHvq#ZZc-Go&T#bkz;JuRglm_8 z&Qq|~(&`&Q^F_L*xbG`y+>N?w&QVoC>2!(rs9KBjYbz^W3UIafWhBI3{IZ1=og!Fe zrfMLezth@51qH#87bS)_o{mcpO1)(j7&O*)W;&LFnmdt;GCs!kXZ|5skeZ6_{7nBp zC^7x==i|koeaqIt`{k>&SED@85ZAP$#Hdgu6pY%4%FI{Hz6~oEo4-=z3X{S+Z%uGz zb~RY910z>Vm5d@BsnS7$oGL1nfryz`t_1>zSjhe7wC{!m7-}JiX2B5Y-Fx z8>8_i<@4gpi}hg0U!1?o1d)+sMZVpLh&AzOOqW})a^`$#dE2nVv33Q}^e-0h^zoA) z2Q@D%HhHDYA}^TEl=mS~u)xO1#fQlHXxe2v3wNX*f7op0bWmBcV>mU4S*mN&v658=N1WEpTr(~I~nWZ@ysi)Sj?$^ z58#(kmD@t!Rbj(vLUl}c*=;!0eqWsOF<;;cI8|bU2r4&lUVe{&8z09ODQr+>79|6n z89k3*Kq!ZFmf&d43uW17Eljd z;iyRFmSz*;?Q1=Aw!k`h28guHef83X_T_)!e?F7gpe<1`4$0caH~b;s(}OJx=~$~E z+3_=_9m|oVzD>XA$Fn%H@dm^WqT`K=`(94v`+kc`5KrQ$=}`KgcGJpq8o8j%3y2(Zd7Tb7!~Un{GjEAtb9tB7b$e4MpyATcP6D1Opdve5?WWe zeBzg}usJ8f;ZH5f#0^z_@jAEF#I;}ix$6xm=7ikkrPallKvy}SOrRFqQ5OXtb(}ur zy9b$YRi%XMpOn-)IhwXRuYJ4(hYGb)lI`FlVv?~e zA_cU{^|Ef(?9n-SoxK=x&~WR=vq&gX_ey0Il%Q%lVuX3)Zkp}U7Z6f03a*yXN$r62 z2Cs1MXL(zEUPteg<0Q+i@|?7|v%=el*eIM~GdrM16`)8sgA1ly{^U7LB>pZfEcz4P zXuthBa0vXLH9DxYXS^Nu^2%xqVhf{nT3m7M_^BaxAqN4KqYT<{t4q5@WX2HaA-L5| zSS)OB5a1QAiCFPvQpt9zjZ{?(&K?84@-0ukKr<zZV&S>+_^+Dt-X;9K)`3I=3>Bq*tAyYb~cR7R!Mg?uk&r z@zbxq;zKW76^d>_Vl@(7!RO-?QyQMr~rbpXBIyKl<4yCcdrIR_YnUc06(SE^IOod_=#Kb%_=VM%x-MV zu5K zma_*D<4GRkkl8c#^#8p0hhRw7)_<6+gdYvTpAk5zB&BHJ6~4)%wmVLZZY* zpu7>8^87l7rk#qY3-;3CG)g}r$kdg1C2X6%i`$DE#d0Q*v<8TdWXq?&o0A+9l^A?j zb7=5Bf>g)w^%IyQ`-4Bc+$~Jge@146h1Y|@0T|*8Z%|AiZ+R>aJE~T1EiZd(X(t23 z!Q`VB6jgPx9AmfDiZHJmLFG6M@fq!VW|xzD7H@kPsQ}G+>p#&ju@y4UO9;K~7f*f? zV^I};=k)8b_J798|2fq8^fSQ|ilm+uw;MSiBprFKjVgpd6Nk*6`%;F3kZGLzT3{YW zZRs1|n%MenH}L_+LVGfYgRcNHrVfI$r(E-MXcP5R2~rBk$2)g_1mhK#=y7ZH;x%+# zPZ`mF!qtSA0g2(6t)TY!HyIgmQiOvbRu6;5%;!}q$lMCNW~bOdJYIy@GTMA`liWcN z$q$lt&{;oCiHpbbRa&kIFVMx%{jTL95IV12+Sf^zD}MJ;%#aw_VXDFN4H%UMHHqgl zFUN>0ROKA)6m4z3zZF*GTQ7Jn3U_vDo;`N8P8~Vg@ou#D8o;?euKCX2SdSeSh}a83 z6|TSx_0sf-W~11TjAblr#!=fkOI1=AORIR`O$6tYAzJCRyTyvcpV1Pty+YIp3cM?T zA%VSLe}V(B?EOp2c@dX?S9sXy!*Kz3fJ|$l!oXno+Sl=(+s3gPHc?uHE8ld zt(%)GMOX>sT+uMIbMv0y{608ZJf^i{@XEM^Q9FUeLWBIF{{&g7!fEAbp4$5zYP)U; z+@K6m%K}|rm*DPLn`rE+7k!1}gB;?)tZIG(dK6Xw7K}?b27wN=s*9P;_lvh>{-ow5 zfJDXX7xYq$IesQ3FG;_#c-{SXpd;%F?OeGZQ;{UA74D4dfj5Hb#9U8)=gxZ={a&w$1I8G@Vl4&9Fs%sP7Ssu27oFZM}TK@goli6s_n^Z*+oXN{nkZLssVxrgi zFW#(a;!J2r$$0bCXPybB+z{E~OmH(Ep)Bl&Fg#8g;v%sHE3#lUy6yP-8!?s9xCcz9qNTTV_;G9=9VR1m;PrltdHyp%qzwr)wkEJ+iXZL>iY z%*x`KS5ZURxs>t^y!pY7Sm7-?~0mGzD%8@IvmH$$Qp8^bX3aJ zVr5XrZcxO_0ub<<4OfIfd~!mPIp7CRm002!*CqVqV_O;MzdH%t7m%2}i-@kSq9tRn@~z4HD5gE;_{5 zMyDm0Zw(QJqc%2AwT*(0-YJ4}nN)^xMj4syrbP#^KIAVtPHYE}?dlW~(zZ$T`26BA zoq_#*rOH#L$R2(k#u*q-Q#}6Y%%!yALTP)F1mO(J)Bjyl@FuZ`Sd|hN;%B_mS*Wrl zX<^#Lt1z-`_|9)E2J4U#6dU>gY(+sXw(fgo;$t8ST=R);xSk$NIW~$#0~E9Nu;R#I z*B2AuTq&&a`bW>c6f?vpGA{i-$rqt-4oV*p5kE3FF45TApbAM%^QZVfgBDjFy?Zw# z9~CR`50a1)Z`~a|^f9OD0>G$@i;O|8(}vPa zy$e3yHt;6c>!J#i>}2sHOV6b@2oROq<0$>vr~t5&kL`boK6=Z+ zwPWA|!85@Wz$KCgk_0(@q3$MRV$Smd#L!#61 zc)VIUGCu4$bZ9|;R4zFjImH#(d9hN2pxSIV$bJq&Ta&gedaTt2`1V) zpU_vpT5?9>J3`!fncL?C1PGs%j1ak0Y3SlHa-V%@EQi`d}jx!iS za(yiL(sN%GFBHD~5_fb=k?3b}z|ObV$JnFuw6`b#5i7vv47ixx_yC)ZDfb{*LYJf|f_HL(_dcRyMLZ$84CPG&DD z1*vn&jyaqD_FlijaIn9Of#O2J{_$kgx_;8dM#93rbE_0MEgp73fJAhkjr$ccbqU8r zTa5<@Y@S#Se*fZ^@d0YdxH#%N;s(K^}AKRBYoyKWRV& z`TZv!n$Nm=MDcxsbDMTz!lI2&<;6E)&;lC%JiaqJI8JB$#sIlru6}fZu0B!mJKzci zO(ryF&f>oit>T~esn%B??oG7yZ2gfSw3*=S0m;!p6fAbU-qiA|U4u6F2efVW!ld>9 zb>OP5oiCbWQ%WDB$KoLFVbx^t*fs4Yrn&&ut0L!mZY7OdFu7h{t^0)rO8rCVqm0I` z6DO;h=^jj^$@3xRbMwoJhaOqvv8> zNIsG!yy*y+p-MF6mOZ|8q)X6OB03Bxz+4Y8>QRAF5kf}-H#Numep%Z)r2(8@Jo11r z5o(H1`=!)&0@(4XwWB-Pd|r@2n&C$x^M}X58eCUr2r)Hc_MfDok$^imo$$jnlvO7% z>&v<_y|&Awf2X5{*&`th!Mu1(rOepg(HINeZ>69HA>j!XNw)az2C)fgbM+=;% znXSIOh@qx8p|8>?=3*cc(y3yaRN%I|hn&0kI~j84#qmUsuQ)gjGEtUEcr3p3=n+XC z=H=s&GHkQ2?9Wl9xGb8aGJY~LGVWxR!x(fihnl=xNp8CJZh z)O>73NFp+e^A%9uiRm%By9ei?i1iB;2GysN&}Mq#G}?7W9R9e|wi5u#~O?vZ`;494|#gGs<~#h-$+ACQh1Kht^RXPz&lMMpYI zE96iGb5bh-fb;k9Jv`RwU7vn^Dumh~>p@79#Pw8oY=Mk#(1|eu;m(9h5gVS0zh9^3 zX>5}4mY`CDBv^ce>%C=4d#*C3>quC$9y;|jhQ<{Aar{gHsdP-P-`ez`bPWcH7mq2w z*|S+eJxS6D920xcoUtB5Fk2Tln7;OM4A-Y1vmZk{0U~4aw~|>Cd#B@oJM^UJFN+Ub zlW8kes?c`8KADq9pC=ik+?G8xVhGz{xbrZ6eBthw{4Jp=dAa3r*SUvFUVYPJ1z;9G zHbDq-4ojH5tMgDr)w9n(=cj1`Sqt|F+G5oTb9;F{cDnx#>|S_hV+5Y3rg5;qa&OT( za=ep#3dzc6ToU+dVxuk|zx>LScVKNxM!e;XpaWxRG`X|nTo;!=!D){)H!K+tpvOT7 zH85_FN>v?WK`Q^gU4&+LU6APN)vGU>n$JG_?%g1mRHuA(%Rncb5Fa9i zs`@jJS34FlSA*kr|CwJ~wBN^b<!lTm$jkVm_b%D`AV2W zo?I1-@uw$=!QvhxydLky=%zTXjCxOf1$R3niOHgSA>p_hn^t%NT-;nu*K#xvzL<)EuPD3Btg0v93pt9%b8EdA&;f z2s0EL!EeLIIgv+kpu+$4F6wW$Es8TY{Yu`2Tx}DL>Nv*VE>#rKvXPiYA-j(A##2U2 z35w{M$T{HviZDr*qW2CaVtw#l+pClW^tBG2=`jqaGe{w!R$kq2Uuh{- z@jHHw_f&*CMo~KxPo0}W8m*;n)V|LRAe%SO#&{BIxkp_lqz)K#3*bvgCw5-EP)9tr z{h2?CZI|6x3uVjS5bMT1S0}C*K&F`wq{^Qv2Pi&;LEehB%GIuf2McXT_{vV zM@HxL0rj6@@;VIR>)Lus5*+*!sz;EVcjRu+@3F5?nF9S7k)|531$AAtjcK| zWu0R@#`k$Pg!Cf#Uf(9W!s-N}arowXGMW9|2M-^i!?7#8*3P zmcruY`faGnBG0HZFRVu#BCM7EmN`rLTqJblbq6<5kvF8poaXG$xbx^zEH&Rsq8Yj;QgHyRGX8; zEcS9&(FfDh`o(8_K**IIrb>7Bo6q|s9}R$3H23ZhDZ#m{cZSCyfZlvI z9$#Oa-!y5dmBodb;pgFT#u*8693{ta!8q`I@%Wi91$T_ji_p2L=FUN4ZXun;;FTr8 z{9`tA*Uf!UdZfw$@jX-z^;z>t1mh(mUxRs>%B_zT5ncI3%!e|Mmg#WgWB`rQM5)G+ z9uGz?MwG|9o`APUZWJ|qJ5>)=)hSijwvkg%DPF#8;3sE@>aj(@Hpd!i)aZm>>5VbO zk7Xzs!G}r0`T^Byz!c@O)fxTC4@^3tLukHT0%EEVEg|k#kU%)tjR|tD;%$7f=szUd zjxrTs_EtQ9GAtnw+%HA#xC|p?=^xJYsUgIQEs+o8X<*cyl!_noPD)iaMD=j0eCBzP zU3*GIuH|K<3Z_az{0|zKBoug}H)hSh)YT`A(E8aWBJ8LB7AkjB*2}p#0Y+crU0eN~ zgY+8@#$MTNwlJ}>fKr+P6$Bg|r87p#HX+(n>pA6FjrD)O8pi~;koTK51EA}IMOA^s z*>!Xpt6DxP;;&;2d4=;Bu998Akn!2}IbV!ln8Nmr7#`KRz%ua4y4T`!-+VDn?z3O? zmv;JW&(tv^sHcztOdOBO>fjbHCORHI4r!WnYU`3=0@W-i zevf@B;mWrWa{$7cT*9$s;P?bjS1Q0NrXOotEFB)W%1^V>gW9qc8Y;E9I5+nD0CCb; zAf19xK~MkFBc-(iVle*fxvI~ ziisd?#{MJvMwDjD2{J2(Zngs*`aFo;3K9Vb(QhrQvZyL%NGyC?0?glU5p%O4t{69E z!Tgf#6I_5fO8~-p^cC8m1(jCP5=Ownael!HwXEp`9W|d~le06BFD6#OaMUN--=nR)`o&i=b&%WP=54+qq zxG=XJsV41Tg&pRTa+2Cu>*QkfR-~Mz=yh$2_C0Hhk5af zFF1@POjJzsbT4AIuy66wrNssBFy{ZmVS!jsJ~^pDB98q{!uz;$bgwYknJTXp#}#NJ zHLLK1WqSF7Nm*rXpHi6abMiOoF@{ zmuSymH8i(`{Bl*aQWdc)Q!}r{W8?&6Sz1)ocw+k6s}}#VnMu)4>!oGSlsowW8j6{R zdvj0_R21ahDp(S=qkz|fJ>PsOCbG18eJy^zc>MgA{PX(I+dCvLxbrw!jLc6&udrOv z&13wzY-)S=58@l#lsFtbVVrY=QY%EEhve|qs8iMaxE*7Qloyu}k$G-SblCz3IF-I6 zuui;he8_@}jHR7xe!Zm~;9E6E?b0TGLXN5~9`IM|843mC@ic)p)?zVibe*Zpf;ar! zX7UlQ^=*D#2R5EU^G?eW-uTdmLzRkwG*%;Z3x72NJ}(pWf|@5xp4QVc&&HqMrprOe;le$3nR!sIy~|FF_3of{)lQifCdhH#7PA!U zptyvCm!M#GtenY)2C&pluX&_(2ieIp*VthaikW>o^F^|CgjXz^b$w~m)Ne)gifmbC z(y?)LsQD|y(*==u7#|TY$0Zhf%FRbRnc^i(w{1~Q6@EF*yKVT+JDGkxgG*6CbhbfIYU`?tmj2V|O5TSqk!!XyirA~_FUO>DU!;=IZj}7`^tIRHAA{g>-RQ#q3p9DK#DhhEB z)90?nnG!6G_QM6jQB4oX#p5@h_37`i*y4Ht)`NUV0X`;uASies3p_}RtSnY`=aEWh zN7Gob@8Em<{TyU3$~`t1E^xzRb{=ihy|z#q2R zW8H2QOxD(y#302XPamhG2za@TcCmG1#XAFY92^ghAqYO(V(qd0v&%ixzQgHMw>6iD zf}^!&cYi;t2P|%UBUaWW1N0Ak_V{YUh3MLhy$3okVH7W)nJUG^rF$?nbGd@zX=$I! z;}+k0`C7JaxwY7f!CQXNCo3_d#V$ybqKqp79=tHE+%x z8OceuzcM7 zsE1XS_bHtiQ+39`-M$ycVKU z6(~KFB1L~_5+%lf6vZ#Mc3kW&Dy+%Cj6-%bOuq*zVcO4>1;lg>wrJb+mlq$zw7TSi zIFW>Zse_U8hfWovNL^5s=Z>LJ*afvn`~+74agm}x-y9oBvjyC-EVR{``E-$yQ|$9{ z`nh*)fRBNj3`$=h)lqKgfu2q80luR0dRg$nm0x`(7EEP?UtWCdZ}C#hi=Z~-%!}@H zLJNWo-;1Xx!df!XS{f3*0_3K70n=X9p4{5zEL$FL-iUEjN;`HB{XYJTT>L@o2==-V z+woNY?44MM#A8K=;$Gfs6^7#hE+^uWUa3sa>KvB`o;x=lM^i?}@(Nm%{ui&<2MaJNdMbCN&TQN`{~3XY~v@$=T#qL`<+5_N^yUq?uV3;?gct5}|3zD>)EMJ4AK zHICdcUgyNGyc60Fo)kX9aV(UG=Ad*&)*V>eH8e}7v2fnxv|G70<-rJ#{=Fxwyf{|l zkC{<9T(c?<>ixik8sLmgYgM-c1>!xo-XLpEqISgJ3B1-|NR-xzk9#2NQpA!QrZR$# z-Xtby;&w)(q~lakebfpzNZ1GA-6~W3p zE9I@#Y4$SiW2VCP6lS#+pjgC+pu8n?4mKp~Cr&7e!HwYkm#)NQLG_#$gO+P!8;o7< zY5#m;(-figvbRWC7?e%HVe9>!e!VgSYI#a{fHuX~!97m;V+!K6M%tJ&wfV}mLZ^|V z5p+y1grak}2ZC=deZCV?`w(bU%F3p5r&5AcV?ADuJcv~U08gy#c_9s#T18?l_saoc zu(R=`dz)=k4h6PvRi;O+&>NW3@^mp@wwmgUo85{=bnE)J;!)M zUh#oz)>@0y<_$l6EsMd^ckl!Ly>EtZ;UJ|4`A=J@$`6J+?zceSBb>%Qe9BqlwiL9# z7zY%PAyX`9`smS>XM+YbVUtUVO}{+tfvBiJ+aE-fr=-ms7eFKQrm&g6vd!*cQ<#$% zW;bVp>MfA|gLp5f;fY&)3}z#-?JC;=;5Sy@oSLOX3iTcP_hMKwLMf^gn@N%M(hi*a zC7cCuF9yfO|Bo-O^iS`3Aw-c7N)y3SRToJDZk=byh%~_$yj8$O0H&5o69;|bj}!OF9bD)(%T-0N zbE?FlQ=d1!;752KMMPO3ND(N$(C+c?v}aGRoXAa z6~cz0YI&Gj_rRGPrM``YeFo~?t0YY@cU&PaUCA*;LCI$P=+f%un%{-~0(uoyEF{~5 zbnrkSw5)Nhhx9;!Xd516l%p4U=v~6}zD!U6`7~w$^r@e6{mP?9L6N!Y%0|DS;*q0* zB3)sgAvseParP<@pCaM+2kGlYow*wK z19}JM1m1~<-WY|xIBmfi+t@)t5BZ0l{M4l8SgJ6DUE=Jb{N6z!p_OtVP!8!z>W{0l zMLwZhwn1P72AuTgZSxC`cq zcOu4O6Z`jF^+nL>)`}p8(j_l+iHQae2~7~+o_^(8OmH@HTexpA?BbE@w^qFiYX|4} z=uLDk+!Epu-MuE;^6eOHt%%)ua{S`& z57RHwe3#|SG;k%a@$|1tVZfypFGlj2%F`AQ^zMj9jtj9~do`ZEL$-fz7 zaG%PcRqzIvlm%tG_z3OPO@Z7hGw)qOs~Zems70-qn!02f!g&fZ-!Eq6W#yH6_WG?d zNgQLjnu)&#b5QNIHGhsZsVHF4VvvKl*|sSH&53V7Fut{6dn+;Pj#F~YFfKr19}pCGyO^_Y2PJy7mlZY zUOYH6J!1k(8&e65vPsC#3Bb$Ru#{<{VtP~m;f`t;!Y!l_#p&BX1_W=i6lxV#qNf<| z^)uYR0iey4Uj(~XQO595*{RFhz@eV+T>Qf9P8*o9+CbWVS?`Yz)~%TQt46=;4wA!t zl2!`#)y>6?#h}8l70(YUAhk{kEL2x-J)I=YO%}CYu2?Qq6bELMfP~~P_y1~5gSmCA zW=}B*?lb^S7EYbixBuN9(OaRv6%Hrx~$jqIz zEeXSu^b&6&SW?5IZ-b8_S%6b&!V1bznmkVYE0qZiz6}O7a{uMNzzo_@fq$~HJ8L=TADgiDb&lBAEpbc z9F;~>JpJC2pTxK$D&WM=s#8@aJvuK@|I2cGor)iODc$p02$`$TWE#%HVq~hA8r@h- zH3HoyWc9_ed&JY7TTP&pOqt^G#Vb+$GAHnU%a-zZ7^aHI$DHm6%NuhV9PP)C)1QAW zXsdv|%IA9wXwS7%bY0+>1w9EwVK0vf*2Fk7Ik!vsuZ^`{OoGl9{UQOEY)6j|k4gnV z{3qFf`}7bU4Gpt!H+U9R^>KFq{Eq$HE{d}k{7-+2V%bUo^$xCWlZ0hCc^P&lQ7j07 zq{d0SQ-0s-E5j=p0sDW+GJ8PHq72<^$N9Q|W&`oHWbSHaYQS&6D_S{b!Wcu2SzbnJXfE3&i?-%1` z4fh;1P)rO0ie&mUVuUcbSCy;Y?cjH0K(S*MaJ7tTCg|Qof3Mw9xmX{7jowG$_8b9@ zA;5hy_BvE?yy}@NS29+7nO2X6@0;}H(1+Ma4*|K#h#`zFE-5=LrTAf(!wfNJ4H zEbF<^PhIa;Z!uK5tvs5|E#lEZXL3>bEU(Dxgf?XKSA(~q!}uAlZS51p_TQQSN6|El zS(OeZ0yESGKOjG*Zg7(vxp2Ap`=&i~MDo?@Ue}#gFYNxyC3EHsoMgJ?I6yclF{6VS zunw79teWITrn_C-mVyDPd`eqa(CqDd@zafk;OPFtJi+X{vv`nO@$hmnVh0bE7a4nw z`tgmJ5xzj?XAa9}i%y^HoxEab)E9)P>T*o(z# zC0z@U&(;4|CTPr8x}hb8+{`rotGS4jag__U24Uuiu9-P#_mQD&jIamAA_Enbn(@rk z%kj>6nnT9-K}-Gm;v>%h@!I93#15;72pgU!4-4MKCWd?EE|MpyD+HVxhZ3aK^DOoq z+J91rO@>5`H?3*;>SYp>6Y_abZdb*DnTkcpI#-(lWH#sa@Rbs0j-?_Fi|pJv86TBW zQ~TgYCxphr?FC=ur{IlIeqt_U%&;&17$+?WZuJBa4aevWx+$evs#Ez!7mJSpRd2_z z^@b?*!f19pPXyaU_Qr&laduaUf<^BGdWb*NpCLYO)5}bJ+an<^2N^_pyI*t|%p^7?hM1a~ z&RVEYH;Xj9_+~r->nBN8YJJ_ASJDAO{?muC5BQ0gwQ17y6}HwSC*WLiH}(fc&_OSJ z4@FLp71XBdKVjJto`(;Dt?s^i*PoKFsz~aA-IXur@-~qhp2X>ZwAiAd)5KKuVolDa z!^|s?Z{k(cR_UC1uOR;a=Hiq1a6{#AEy@XHUtbSGUOayH8SgC>Z^&m(l&H+-F-l?T z>Z^;rUVM|TO}~nXjcA9Pmdur_85wLfOT7ojrLxR*^qkf_B1pEfxZ)>io|O6O!%?Yb zPZr*Bt4EQCb^wXsGU8-q#+NDX`;(Z>^yf>-uephm z5Yw+-Gb5o@DDqVHjCx5sqgwn;Xwqhu_Z>%_p7s!}ygY!iSP=(#b2B#xapT|=C<@gh zuc)1_s{+T{14@G8wJ*9TZVe24VS8e&8)&b7A-O#{x0k1;Lvy%S;8k7DHiy)FO_~aI zYLJhnDk>4z4(!%1l}#0{xkS8Jy6T4MG|3=2EGJX4Gc7-?v94;$IEgI-!xjGdP)Oc}38- zUB!7&-s-NdXkBl<`KErn^jq)h$AOdoIA#>9O4ays3HjYjK3E65sPA^VsawMEm2)=x z_C^IFa<{it%nv%%M;NSB5x6KL^&^X*EfkY#d{9`K>%)IX$6)C`R@+fXzjuoH0v*PB)ftkz=yBF=`aM;9} zrgeKbL|Ew)QSJ}VzvboJURYUJ*K+3K6xjf8Zw`1AAUsE~A@eBn)wfIIl<=$t>Y>`^ z7>*g;dhycBnni3>4QedRFK%eFVc0*Ep8c;H==rB;79T$XXpy$&vrdzN!!rtg7ChyJ zPq|(G7CwO}jo7+3tfhrSyt=w!xGP(QWTc0QKNtB=B0dW956g{x!KuIW(@SBkih>wt zh0s*OnDEYF$~4=}>)~7z5oAi9IbECsbe5uguycX#(>oe8@ncOOiVE_5pNuXBSe5(i zKS4j_Mok&LM<-ydFc2zv*G5H*MoNtznRP^{{4kJaNpG;Or83+&Mc!)*PzOq`jYpEJ z?{4V3gU5CTwJK3Lg=jB$MkCg;#}Tn%KN;;w(SX_>Gk!UMp(O66^9Z}WgbjC5Q&V3I zouZ-UMO9kHkooOZ<8ZHGd~$x`v}yKKH~79Uk(-1W$34vTcOW;Zd`FMt;*q&1iWh9} zO-?-{OAxHW(mjV-u0D`AJH#~NYfNL5-FXvt!o+I%&aiu5*mY{-3NJ9S%=Jr^r%X#8V*3=8qhk!en0)Y&UqU5=|cu+>gd5KyTcyI)yd2X3iV?Y?97)ojS`ZRV# z$HL{sj_YQAHA|7c@j{7W0}?t&7M)wVo324x`iUW*e|-K^{Y*+;?hgtb#b~zDo|t~E z96?ZKT&a5dZm|qbddR&f?yP-X?RI?$MN11jwqag*YnSm+*J#?_+0pQ^|4i{SR%#*z z@)YG!b0)xE?60UFlW0MB`Z&`$s5LQJooMSgGHEQuaE=fOM{h1u0}T!9|GSHeKWZVI z|L~vw*DTZv4i_7@uqU;GU1x6%mIxcvi{VUgF3JVls(CXnk#G9i4UuBV5sL}xb2KE1 z#CpGvv~d)?ns6oKz|XXdBvvWGAMv~TjaCbn3FzisGT9>pc(WXXaYRj`a3Lzic~I!P-!JX`4vD8yh=&MYMPhV1 z-f`SK44=?}FMj#5NEF2z{2UdhBarG}><>64RRBd`;F0DPn6YAvQEk!G#@+~d>dOnx z+^3|7whAZd^FKNNQ1giJn%M8srL5UL?&hP1=1{AAb|~=}#lI$Ys|!UvhuuC9fH1%{ z02ObyEErhI;mH``DdGl~#SLtZAQd2b0_C-YL&)XC3EC|RC0^RNmmjsJ>FYN%3C{A5 z^fOTQ#>Y+nnqI7(!btkd@(7sLeLvrB@i4agx ztg_0VsE+CiL2TRcs3y42fw4-6`+sQ(aukJb(J>&v@ew&M^OvBMPYH zOn^&zY8R0Y@RLD75fSv4FWOQSG0myMR(*w7Rs zrz&ngZZLCi*qGzY!lD2GXk^|+l)rcRvd;s6xyY7wrC!d%&?*O39edk5j4R5dghL1v zi~Df-4u+-P6|*=YaQ_KK@U1?Hi&3mVU*oj1AG*7@3zb@B22qp%V8&rVdb!`kTF4TS z>*1jPC{IS2gWEKv(7-iclz-$kk|9Z(%#1<)x1mOR8fee^!*MXc z>;^)qzZ>1Nye35c_xys8R^`^m3H#$`AL?Oz1niKv^Tz4@9qlW= z-`b~j?`(TAS+qH|BDUk9yJy&eMIY*hCyY!KiKkorgbYkyD|Hs*xDYH&G1nyuWp3a_ zDBBU;^PBDwGjes6E)L^-$)!v0S_hBA9ifJ%dzqrBJx%*6&TMYk3tvSZ2^!=@At~<; zBA@#6A)SiZz7y7V2|R7|2B5{X3 z(TGYbu8|Ti-9etQUs_IQ{cUf5i`LZ>6Xi4P*nj?)=Ref-b9^?DL65IEm4Yx4AWV&| zZ-Gtov4ktB3}r!y6t z^$P}e8x1XWuejsg5AXFCE`8a&hN_x&U%1nH$JsO$@`R_tVjz^Vt-K^wZ{5<$xi-_j zJ}qkN-DHTFscb`sspB&qQ>WS&ibG1p`?$z7KO#}=N_jl)dP+_p2Suq!A1`p%U{m@* zcq0j~P&gHDk$97y0QzyXTa4OPpuEB5{J!Q?AZUaN<6CshN@E{~CK@&1Tq!ehmvMRM zkLcU1Q#>9~jsn}H+)fj5q9riU1blFr!A1Wjo#{>WkCr-Pq?Q+{Dm}^lbV?iUX-JyC zl#{?AIK}N?_K4Zm*cwIGvklB{38?L0@8Srn)ZW}}hf*(!D1i2b^XjVX1Z{s$M5im1 zX{l>1ygz0%D2&kZNK1o+cmGk5lRc*Gb8rzK%}m1Qe_g@YL_w&bkKB=7@TrXXqInFH z`##6UX{Lm-%?hOB51{{~vP_%fUOB`a4iheQ?+anFDBtgtl+L(Mg5EGSJL^?)6g9cI zyZY%1@B0B-0-8p|4B-ub-U)*!S?(q#y#w68e|JG|9DZq}-NKVxuJIT3#Eq#b{RpJe zTuZB-n)5=t6MVc9VCax>KOC0*AVAFLK`DI~VO5S}(T=zp`2UuSN8t%=28kv~QQJH@ zDAN%U)|9yM!o<~+SZjqhFr`jrbZ~x69`rwrCDq@U<{)j$c5*U$Xw)f7rmxM40&zbz zy~?YhtStNjYP?)xEx~ScV77hO8wZsoEaGO~&v4~{pwicm(|y=k)M}S0?W$RB98=*? z-w!X-ZeELmjT9^NVw&YQttteOAPqr+2kkrbrhsUPA+X`~5V0WohieLidK}t%{;g*} zJO7#fG^!mU~%>|3Tgn9)$t(Hr$d zk(Bw#Z zj%)aALY9MSRtwG>4L)TF3=7CuUY_5A6( zzg1qu*Gq|;u}u$EymCr8?1RDj81tGv-N& z;{jrdZrs-4ha^cScOrod(`c7~r?b-Oc0`*w{J5KFz~1zb*W>D4`Udwyzq(wPip!Pg zo_K>Kh~tSGr_Jv?EWIGd*KKnhYfZNcgc@6{mezO)7w|XEzi-L|h%rR)cvMmh(NQU) znLe0qLW}M$&g)J1P5wbEYNm6FCAx~^ITA(Sonh1s@Mq$MB?%x7jTIrb?7Kvmf8eJ( z`wx5zrK-^28T2o_T@24O=|ILGrU{5XLq@EIdhaD~Bo(K`i+nj^`sHsf5%7rfJ%Op_hQ~9_ zrWCb|(ieeS)jU7fFLzOQbaYXN4Uese!?rf4wfCCxlPCQsB9#iutFIIv)H2tlR`L1Q zHB76zfqm~`(4?4OXfM8gyJ7E*xyxTXXqSV z$}7;=JtBJHK1#=vo@Yn2E>v&`A<~CxbvDde7Y#rDn0Hfec)lzY(#Pb*vZ6$ch>Z!la%UYo}`|H z6Ks@nVfwXF-vi-}cNV0JHyuVK7{V5d4&Z*HB8Ao#RdCDgL%BC%eBK{e{BO>GSqw_SXAl&TGsY4s%^ZEfbgz-UxQ@Dfnde-f&?SBa4m zBsNXg?j``Xhdoou(PA-0Y_K-7qSAKpbI19Kz?}GPW;Pp6HErj|`G+gl^&?0hO$L8- z^bp@F@sp$BQ3Q`B4t|5iAlAhdT!XQE4S3vtti@ncyE92#xM`*%bTF|auSw5Fg7<-4 zX(Vm17~)Gj;xmH-u~pa|GMYXZ4vq+9(IU9c>ONpOG%gF-@pcf z7f~zxo~RW!%wu$SuZ(*C{zEjyp zzdwI^eQL^6nPb6jYPh#K%+rOL_d%-l#2X)JGJtfy?!CN84M)>x5%eKOyT)$|N7c|| zU;hmag4BH5#}VhJ+U^zjLkJ{lT?8L#_RqiitFIQ3iDH4s3T&DugjQK5bkXEV1SY$u z9Cu=W|W=eBE-E*`({04|GjgP*ewNnQSsODkHBz&IZ0w5_5 zqHsrorO-CQo9XuxxJu_Z$|WcV-euG`+w);MAw#d+&FJ=Ggm5BiQKHiH@7=1>@b>DO z<~%!nMJsziTtEi}i#T*)QGU61Th-{O3PQg%4U5i3njJ9Gw69oBBS#4+aL@v+?D=y(`{jNYgj6dE&p^tr&~f!1 zZq;&s1CB0pc)}6W9-;dl5AwW(l3Qj&%0g&x8uN&3RZ}atme&`?#`8$=Ej$kRqgzu4_5c`FAqiPowXR7kRRvR-X3GJD@<+0q2PJQ7 zv`KGHiwj&DwW{v8maCV6yDxnitDE<&s=0G>Mi&L`bRFf{!Q=OZBuy*zA$E6f2#gbX)O6#(z<}(ja4k8D_q?boAb^%`}~>Q&;ut z4reoe-tQNba|DQ;Iz=;u;@s)hoJ`;t<>)^eQY=Ozavq+DKoKj=yEF`P7VE!(GS@EN zLD{$5-V2#&{Q$;^^gz2$E^1nRtXFOitIw_jziZrMIw-l_%u$w^?QKU@F>5!GialAH zi%dU5SzFGzDEaEAU+`u&N4ieey?Nhl0jpw3egz6tGi|m0FrJF3-+M%*Prahs&+^=N zQu4m^-h1K+2=I%G=fKM{AFhJdepVObGRD+Ps=lcW>LP zG1(R570o}&b7dEniwXVGn}(5&j7MN=8Wd?Yb@*#+xF4tR4d)C@gr?eDOQ{;fA=pCq z7p31x4TYMP_2vRAiu&*U!9O=Bx>OedS36_i`Jt+H|>mp-jY z4tiz1TSH>=c@4H$5aGL$4|@|dyXL_aI!qR7l?AmKVLdgsMd?Ca3CFB?VGairrg?3`65P}qBi6+4x3Cbk<}Y4;QHn%lj8UC< z4@VkWvnjca`yd0P{Dn8_+k8cw$$O%bhQl|sbkPfGGMI5;d?LMMPKv-ywBH_Qp`}1X ziOK8@Eww?op&p+jy9eH`Fi?#DSUJjk7mj023(D! zaI79`8|Fr02pk1K?QzZPPsiCfm*e>Hv!9s5PD07H+TvoLDFVp~^%+{~(bh~ z1Rlyi@lqW=l;;vLyh9Mw@1&yFX-^~ETv`y>P^MF3S?+Y-)>!ZxJ|2A^HKIod-fP`~ z4AkK=;g|Sm&lp^i}R1eF?V6Xmr2dtJ*7N?W;*blrc;Wk?8SB zN_#ZC!KOzA!dwDd-5!l`GnT_i$D*jU*rRnu1)!Y{ORM=^V5qi^KmU?n67hQTR-~H~ z7qG^N5?oy-o1jT6SFPjPj`ThgdLDOF&me}l^C@UA;w*zr>h&keR>1J#F0R4S?nY;0 zp({@1Xg&76;R@s%HQDKA;DGf4Kof}G)WQka{=L5&`1A;Hd-b^Y^SH4zar*gFPSDtR zFFL~#u84`C(X(%~&`}1BKw^lUP~rx*n6VUosZ7{ zR-b4e4=l#BMaSC?Fhi}p3=VZv6c(m{O{$}@_y-yap9S}t--%q%Ltl8qzy4hB@OR`g z{WbHiK(v49XRY)3|NZP^^Gw8en4dRlYj1=pWd|0gI_|ljc+xbh%WT`GM13^=uLtGx zPoI6`1-+4kwo;xI7%uuq7e1e;IQ?Ugw^l+k!=li?`r?ab#=H06&4J5pY(AKl;H#mr*3hA7eW}!qj|P?5yV2T5VHXy$ z9N}fB`V2QWOFTMoLn8tSS}_?M-ZXf*S&KCzrzL=Zq=p4FBg~-aWUP-HL^P46BF&xR z`)6bI)VNYe*5Q1&z-zX8hg9j+_fNj0Ex^4}&l>s##ZFopt#}G+r&y7zxi=SM_+#T<-`*=v_TqXOUOT;%I<{7KRL_O)O$j^RTDHp3hBYa z0rIj*LR!^fazccP2;Cau0LcPc>C5>RIxyO+SO+Bxe<5WE%{rjfpqXEA5*f3iC;d)m zru=~rtw)M_3t%Tbb=t&6kPv1Ssj!ELD~t4mV~eNn-nyRZMak=9zXf{eD|j$A^#}Tm zRPjNmnelu|wuVsRey~d6?jpc#y$-D-)99UU(7B9yni{M zeELaangT{!KtnaxAU!`>^c=?s8nwa!wl_7*>m)ohe~2&=jRm*aD5!ETZ0fMRS0U(DhwTYTVXKe+}4lK!Uwo zN|)@PJz*Z&xD8_e`Z-=H>Z9$B3YntWTZH*j7E`qLq!>krae_?Epsn4_RIX#II34%F z=%t^6L)?=W1!b1(nQa1zr${j~dS_+zosPNpki6TY9&wu#D|c&ebT}(?V2PZ!iUX;a>|=ZflV7#gA$z>|cI)1f?%ViXTa8HA0nJxnk~Z!75s z*RQ1-&v8gJGJQV(Uaxp-}#XC<4On`Zu-i!mhmb6=lvs9 z^rEw(Uhv}U8Y4=t&=*aad5I`4>Mg`q4pJ zWFS8KLPtKZ+A)4~K|}zCU_As#PJ!T>?DVT4&>qX0tx8 zS8UV8O*4D{(i{49xal=ZTmhk>dLnnAn{69!&p>>DpYlSqy7pj>u8=U>)O!wM1|I66mzV#A|? z)AIxyrWy*Ihs~y`XerIXT3RsySrkOPGCNKNmg83q4u|Pyp$HKP_GqAlF3h9Bjx7aQ za_&oM07YA!6fAV{121JF1^OVYAsi3%{>L@SV8RyzkRjGWfo+@YxLu z$T%jrLbSib?d)(uLzsh9~5sVpXn3>m#@*2I84a^yWthFN0I60 z{g0X@tx02wUKpDsnL#oC!o!vq0j2DI>DO!^eLUqygJWMe4G_>cD^+m3c;#?K#Vk&3 zarcb98gUPpzz?ZnlUl|5U%Z@5Fgs&yJ~1?KiKY{7;#A{^lGkLz)%E#>b-e?}L6mp! zsgIj8x+>Q3kK;FpS*OELgXPE1zHeS9Mu%5C1N&2*M9JIe84K@eW$Z58PcVwsZYyACipNk&YJflr7Y1q2tO*ZjbXxQq6A$8A^ zmTJNK#Tz9(v;#1`WW044hdynqT#qazx$g$BDgkNt_2|kNHExPCIXS|m}!IqpuVuk)K~p4iX zqiTUf`#llB3KzEsum?^<euwkyQoEZ0b zXx-Bh)?`T~Y`S*g@-^(+VQ%(3HqWr`w&05OX;`Pe7f~zoB}S}; z$zs%z$eWI`MaLaS2=%q+ye?WWP%J|BBo_1W1PILJ5rAzv5 zNbDpJ*}vZtQ@{Q@pV5zl<7|?3i_4%We?Yrj?2Y0=h@>Pn2UtIYtj}`h)UAF+zkdtd ztS0hS4?fo)aPPk-F5%!ri%aw93DKd6E1|nO1_|pMNQiKrdHc9<2^Rv)*C~Pxskg3I zV>URdho7>-86M!U=$AS|%`AtD0N6+AQ0tMXa?!sGYf{r_h3IN`w3VA#Idi{Px|Phw z26}w~{-ynxh6xoL|F1uCvW#mEw_Q;d8ke#IlCLQ!MAx?l2 zaJlO{KB0L zvn*Zf<+``4AM63l`Mb(BOUE$2{?xt5iaaal>o%%!UTKZFyit%3 zwh84(d<=$Sn8oa=o8@(r?=ji5D-OtRxql+W;53%du?X;p2oDE%o@_zW_Z)M`FI}5} zfCuEwFOD9ayh(%^(wJIbq$T92ES1*1o&XNE4YHI(GcJlvs#b2+!dtXVD>pN6&+THs zv`nD#-gPjN<_S{q((ie9xj37x=Yhi1Aj)_JRGB1*5*D?!Zq?fp#$gDliqOFloSFi51Kz}-}Ol9G^ zh;h#1Pid?U=ub1#CG!oW<^&A(Xmng0PVZlO z@nx+t(3Za5RktFv4h;-L>R_y-GS5nvFS|y}=gJ5cw{ckXah^ctd z$F%Y;;i!lar1eK^FS?c#k#SH{c!Ir)QZ#erszeAvy|j=)gupa8$|zbvdjjxruWWZ1 zJp5fF1+J*p-+M68i-7ZpGMBp1t)Kl&iX zkJD8de((^8k`<)<*3JWStINxsm3dEL9X&d6$uQSwlRGQaev$RSVOA<2`%tv{a^S}_ z!!7=oKTo6*A=N>77c*`J6+(}$lp^tQ+~FmQA})l9AkZxw9adb0!9 zL~YUZ`_5b1;9OQ{0{q)D@8Pa(5)mMvu zF!T_X(lpl-grY7#wZtD`f+x8N)x)% z-$zB$u7fy9;kiuSg{}_XWjOOWhg~I)boxco2~IFkL}lyl)y1{-a$B1sY7oo5U#Tk?Fc8p?+eNn^1wu zDq+m%L=Z{vR`k|T5FOME@Yf*SR2`;MSn#dUHsZ|7nVu~zOBbm6JZaPsFzY?8%HX

@+8(4HnjQE*JiAfu&tkPw6yc(n}L944abadI7Zj==oo1owpa(#i-kOBO0F6hm6=9 zfruKQA(|NOwqlJFlMc1J#5s}Ldla<5A+&ZujZY*Y|2Kd13^92FDr2E?V^&L>zA>w% zg_rgCfDop?*aROIik_aDoe>{^rnxTKjY>Wobk75QEGZ8cBl@f4)r@{m6uV56lihsLBz;unv4|F=p zln59?JN4JOJDoLx{rX!E8#MVACvUG{Q3wL(8i{jJP);P^LZ{F@N~eY)jq@kvK8rH! zhuHhtAD#!in&&@6pB1*qIr=j{c{6Mwzi1{oKKZUX<>@D~o_fxLNG*8H_Q%_pd18L1dbdJS}>G1YA%ric~A>uG`}}9cIU^ zL6mLdMa2))ABlK?<$L$Vf9%0$bvou&m-GzYw~D^+J|ZrCQlObDoej@-oJhyZ^ld`Z zD~bDJ)n~C=lzK#EgHbpAOfTUU}D>hdw*XfLug5$|=P9>zA#k7IehLDhPv| zq!a6@c%C}%EQt8W)OfKRi+ZdDPu2s}ULz{_(Os|W_QW9#pDPSzevOu~k!sYxEGqxm zugj$Tb=ij^V3g-hyYu|%ALQ*3^$&+)n1|Jq(hdEt2vr0EA$GPkf7fEU7X>f%BETf3 z7yY@nDH^qB_I}i#Ow}b@z*_?j!`d6h6iyf?AFjNuSG@oJOJB}*5U2tbg?uoCt#BK9 z(0v%aC}Uli)sWNPW)Wj=MUv8_0(%q1Pp`$#V+Hg+XTC&^5dniJBp4GGJ7CMApo#st zzF(8gvoo@;-hA*4jsqUlTRv`TRp7`N_{pf?mm-}}R2}P*PFBatdd^Z> z_qRx>^dvV&yG2W4I-BAA@-iC6GGEO-cYuGyr6c2tI zSxNB;4D66kiL{5M+Q|ZV`X_=bESkv z7L!0dMS(C1CK|eF+r#;rq{gIa^RTLMKs6vV8Wb2`XVHwDvmY+2@*wjG{EnfL9XzCY z6-RbVZ4TQ!prxSUTu2ifp9vJ}?H2$3I0M49c9uV<=|BIIXWvQt;o7y=^m`cAVvzf9 zEo+)X4DOKHM4_{vu6&G33WpUMUNa}5bZx5s+N3`kB3JxzWn7O@!-2vn{0mlZA`dS@5B=l+a3NVarvKvq z%#fRSMkYWKCueMX!@~JSBfsznsVT0eZQLQ~2OkL}Xk>T)NJIo>FxpJQVXb&5;_A3D zi-?Fzf>TlP@M~mO*cFRgG^pY!hkNz3mV7*$um?Zp+pvjHU?T5CG17zq3B zG!?AyiDgwf)N{hier*ql(%-gbxfOMEDM`WPkM`2*8OuRDV4wN@B+%ii$)gxOD6(83 z7WiWf32ic8ESdyL2*rBJ9FC6R&P;A;X!hh)7%NLlB4`SU(YFS)5wLp4`5zYeVVIcV zd-ht_TfcID#+8Qz(uPNK>p+oLMXWm^<~H|nz?T^yNa#GdwO2Ihn?T`x8xHu>*_v-z1m(_y2HmJ4M%@L zKaVJ@VA=%Y1PPRPTYz`#&9~NB-&maU^VI7n7;sePwE=2PV^i&=#6H>)^Nj;y!Me}U zgI%A=h|)xXAL2yZ{h|&!Y^#!I`Gu#&9>I3Q4N8J1rNPF9_9w=mZw0_^6Y3_-xw!hh zq9`4Y5m92~Z6c8{EgmEef0{w>^*K$bwdHx3_ff!xB*=Ov1%ue&OhuEUAHCM7bQAFh z%t~6OUJ#M^-hN@umZ_UCmGInyp3yt!?O6;>AuZSj=GRgXT)jf!an%aHeGVUMAx+?< z34-{`)7P>Sd9-)qGV^zDj*g4^*~N*fRr1np%G>9&7-tiWe#yO{?;KW?3%~{A;FT-wAAz&J-BMZ&<)YksY*u z>(*r06eXpLS|ENvV0bI3*dAFynI3_sX=TGSX;UhoOVW5EccGd<@0|F+uG2q)ARn%> z8i^l1%$%8pjXSI9)4(A%D7B6@Lwh2wbMn7jEb-Twj{oy_|C0j$lLG%^P{2}Zd2ovF z%qdlYXn2^{4+?s%owo(l(hVwB?^>Q=)ck;d-?Pje2XcxbwuWk*_tE*&cTgUEp)4Vw zJ*n2Om{_uO%4@I>mj1~k1i^OIh^Ic#gAX9t><3{{P#6oZul^>`ajWM|N7Q~??+7JN*atz4Hi)G z!{UENflhOP3R}e{!>JIF3R4kVXX^oX>7c|$o44r{9lhu=JeHh6F3%5LhI|BeQwyBy ztl6s;rKhm#$8a2P%JkI)6oE)1G_2KQk)jpmmoO+Y)MfyT>P-Rzfsxg@29V*V2_F+*vnu95?$l@IhfkFY}YKhUk zvl#pUn2VPL9grYNs?;1gcO6Hunl1^xorXKrsI5q$r}ijZ_j*6-GH7-Ek>~*?N2b@n zx!G70E6*8sa1F+hg#!j!=$7W>4S+r2h4OD&X&W|8TSgdSUV9O0I1m&BSGvdUj7p~- zb`E__#C!R&7Dj?%eT<&DkW&n`c(~*7L%EEG2E|;y1E=fR>Fcj~FR1$HD7{LEs}A)vyO12-GFDAi-tvd1e&;o10_-yQ=Lp9$gAqjb|m05f@oJPix> zd%;~=&Ft)~ws%Bz;9lh@vPc}$!H0U?ZziT$5o&?4BtEQ>0Of4<6E*|o?}-ZGD8~un zP1!cCjqWJH26{9U`DY(}!S96BK^<=t{t2oU0s+-m!VoJhCt-!dD1pZY>4gOAhb1_R z^J+iFeBHs8|4P_|r%BJFTC{`JwziR)0U3_hOYMqouK-%9mQrtDk@L?0e@wvZ!hirzq2G3kqgA&I(9zti7CITl^&cdL-2W(W1s< z3I7a+~6%L#kjH)GK_KF>}| zMd0;bOz-e<#0|WdaOM^Pv$#1ZpKzL&z2@4iDKJe!YZ+kV^)0Lb|O}4}ie=8B8g0F*TbeIrR`$#hth<&2L8Wx4#>B4|~#3=BUL`X199W0Mc47H_NhqKJ1^BXch%g$m(Sy)5{!ucyP3df@`wB zH8?mZg6W-(R=^drxU4zj;sy#M&U*03%#RBl#fdVPCkOxM|2h^L|6^DU|NmX3Z`4P? zw0$BYou~NT!ZqAKwpfbxRfnw>SiL_!2XLaCmX+X6s&gi)6BU`OkVbv9S<6sjil1F(5un?+`N5P?# zMhbm}ss<>*2d87hK+QeCzac?U<&sdhzilc)hf zI*PfL1a2VG8E&9vW9kcR>W|L9t4BeSut0Pq{UUtIxL{4w;3!aj#dx2)aP?~yq#dnU zUR(rc>PC`6jy@q4MI<@pgYWjz6&{gKLs73!iLnrdW+nw1HQ{lXY(nQCG1%sC9QUXl ztikU0N~r5Mej{gvNGaO+;1Fd#a4F)9&S)2t*=J^ONXjL~r|~C&&8R^*dVpuJdiu3% zW|Wh``bxRF@664KUtju^j|gXmLKXhpz*ajxGPsUVs>AQTWrqNQ*r8idC^B#&V4boD zZ%o*rWo*oE+q};|c=ogNpJ`4Ha}gA{@Q@gpb;z-hPp1rip`VL#cyT^__H#4KQZb+a z0cr!V@@iGm#gm@bO6^jNv}jqEg81I5#t(KJmGg|Zr1(MtRn_12d5M}vC*uN&nOrnG zb6tZGJ4FlAG9xv*i7vArt#r6y3O5HDK+j{s5wKvYX145u26!@6>~WbRJ0x*!E&CNT zDC|)hH*Xa13%t-XTn4t|^VVF5WLB0kK-i*zbWOdN%@LKE;h^{%a34{b!u+#fUy-9q zw5Unv)$N-bp2%dmB`)GZhhd|Gcl|ft6io&XN<XEGxO;_$-o;a2`HuEvp-K zt4k{6QcWG(7-ki}K#!!4cV=eR47oNe+9t;bsuXrdWkoXUZCt-mS*p-15EI!_8b|)O z`Ii$PMB)8nQn8Lm$tZ5zTb!J+^Hscq-nB<1egjh0)>r-dap`5T)$D6o{Wy+3b#Tfl zM0P|Wnv5P(6eltHXW>UmrJGeXC&R=BLsq<^*Z!M)J7O)ehw!nan7SwlR&y7|*p0hO zopnoHmmV5k!-0cY z4T-~1Jsf+Z;`HAne^Hw-bM=M^cY9%FVV!bI^Ew$X{Gm4w3=OUoZ*AJdFG>|Yg5w(FM4PT~zNuQ!tr-bd%0C0%T7xL*JKThD&3pssgxFo77Y2QZweg|Jl4u=JP>O!rtjD-CgLK z-J(1uYQM8VX0E1T76}qA1w=<|&yHgSYTR_7^R{09*2roBIzX!JO)M4bhY}%NmQiHeQjjWGtu>D}D$2(nW+dsYVQIXx4on2xi6}wx>YORfh7w|c(w-S`94v+p z{0mee_BJmIjt9;Q>oMqd>GMP5T3DADLw2{WiHOK@p5}Y0UJyG1Qp z#Z2}c85lWrY@-YWMwtui52OolRO_9v>9|1yTr7^5r%cG&c__N)AbiLQ@cIFtBEs~!iEc7ZOh4)avd}7Y z)`sadi%3}Qe0S1+j8o1J6I!xNrL4tG&1-6EW=ij1NB)sL;=kJR&MqQTD~^+k;i-%9 zPF6CSxsowK5PX!5E*)8SsrG9cVS|Fr;|4RWZ!K=DEOfhiBSjI{H#8x9*zpCx3L;DN zK95MR9%ttrLgVN9XQCj}pfScL;krzpJ?><2MiyJ`SA z#9by5MkLc;L`O&-Ie&VWYp^)r>3Mee!1JR9NpuMDauIeou{<>3wc=BV(C^{y>0a^s zqhgTN6c?XI#h)5Qm$XlRls>FT9$TEZDGNx1VDFHK$lW)-E?y#S27b z3Y4sVu|K$QK5C%3jrAonE%1W=Lr=UDtpy_?n7B_N8u~QZ8YR&t)GaKBS}gi)j(-ZM zn?Q{D>o!;pe*!d}bVDP+g^U5|Yb?sVjEnkVkqA)F`D?9-7+nJ>|a3`0juxu>dD;5>VI(4>YIeUw`(?^Pg&* za2(S%1dI^Wha(QZ2ZOIU4OEUC$&P5dLe*WdWn1cn2| zQVs$J8KJFuMDMP7Be7E`FPReg7EGPmEp-i^ur-<#Xv31k!);XjwX2VEbN0ejQ!eYM zXI3;_lALoyl{+0pJ*j$xQo+U&}vf_`3y#c4fbIqNB$QBM;|tarp!P~(Y&gaV-B z4A5Q#aH{x0=M8jEYRm+Qhoic2xWv!D=@(MkBO1Z#BD;H2KfP5tRN%55`<}RRJT;<1 z`RP%A^Fi$euyonVC{sX&y=hnAF?Jpv{^nA|Jn{#>Tw_NTQ65?aeLWucO-Isbd7qtVb+RuA}J@j zJ8mW3#c9do;5;V&pk+f^giT0BPJ%GkxB?!(> zrfMxFwNf-PRrrF?df)h5SaR}A#HScCalc&~Fy~KSd|7-IF2Fnv;H(owhH)`4sxXq> z^6JZjym7G~ zy80t?!*9tAtqs{kh#n8JncdymR6^UmOBDfe!PM16kY5xo&aFE>3p2-CyFBiGTLQ`( zt*DnsfBuxBVBhe}NZO~KP_affLYVs)#r2M|J}7~K=x`bp#mlV#pI}&)vj{U5gM8ex z(R(<3<$5k{_PSOKPUIKf0hMOqvJux4GQirb_^{?6N?L?lv|m{C5lu%^s-wF8Kr=vt zt}Y=|rC5#rN{qu+0ap$iA?kI*MVq1#vnMKgq=p&TkP5kIz?8tMBq<06wi*qlKM==w zx5xps%d15Z;c6CnIMhYM1Cv5qk-Ae4DU3zfvu4x4*iozm5^4c~cH18M6-XgmsQLE( zLqCXi_K)T(s((%-k3=HP`N#fyBu-b~bwpqa7jd=&6HsI?cx(Eg6<^c>>Dn<>Z39^H zlj?0Ahamrr;)_q07r0hP^t=e?Ns6|h4TOm}tJXPx;bze1{r;C2-BD%)gh=q4FTAfG z>0f4cL_8g$frl?pr~eDTr6*VJE^BC1uT21jB|_Y4%1~D{PcvPlJy}8(QDz=~)Vgtj z8@@2u-PP;f_`Jjij#0$>hhmHXV){?>M0Hc|xqSK3tKK8bJsd->y0#w%gM7Vf4+e!3+Vx~M$0kSSrsfcy)AYJbzcy`p&_oKImo%u9 zq$lX;=A>nKCL~l!tWd)X*1H=K;OgQGq(!(mO`-7g7by`Dm^|QtVm?Y{=xmfUVlt4( zg3&M+h#ajx`dsNxh< z=L;{VBLvTW@*2*MY^&LdjOKNZsWMR{cbxR6X#ggmdP=!F=p34W$de~p7vks_T1SKo ze)Ux|cG%H)A5yxY7LH*6CV9h1j4`7+wWRcU4%>uSQ-wKy`ren{)w#QM&o4qvY7mKX z+BrgpBahP~q?c>qs4y!t_sOVr5@Tcig>VFC3=QdlV=hVun&at9M1hY|z3q5dq$W-Q z3clDO!H$%xEZm#9Z`YQn++=4OxRK>hDqv8Y15o*7GwC@hakfCHMrheH zdD{jpC~jN$@PPYS0&f^X#tgzu7+s+vcTt-Ijv8B~Y|glFgWdQ%i#rkNqcPuqk>+^% z(CXBQeJ@?QoXK%5c|vq(1-Q1k`h3{emKHq5$tZ(JQEIGugj!b7aak_b5{N7@Cqg8A zPsRVvor`cyduMZ0LI@bP+NGJ<>*m=813?L@Wq2dRQ6VLCEt-UOOka7;l-(h!+)AQW zTZ}t$w_p7Ts3CiZ_EhEqAmKb2?z``lOK9zQn4NU+EDp+nSU|OpsGKSIu%FqV7ck?= z)uBffT>v09X03iQh!~&*yPo4WX}*OL0Pl%c zNU|%9RInW|_bP-FINM3#+~!3Lw{R8olqJuE&XBi0&j)BHQN79x$ee=)b%$}5_P76i z>RSI?QADrs0&zn@FtK0M6^|}?`g~Ci=9m7?MRpYEn|x-Bhx?A6x$q-@fG_zU^t)zK zVlL93E0q8WrBCi5`UNl7L2e+~VP5jsX%vAk>n9g89Cvw#0@SF@e4-dki}#p_M6y_$ z{StUz^Y#hwMF^7ei)|7gA*gwB`f5q<45$b!pyM}nW!4X_j1GK~C`uhgt^!EuC+3H# zR<-WIl7+Mypjeovj9xeqq7Nppb@cb=KlT?%&?ruCL^SzwnC~JdqB2(Kd4WbERMWI~ z?PS36qYS#fgAE*|X~g2(XPGX@gP1PCfBNh5zch?BD*mNjfekd+E=T)*?~xZs7_5h7 zmxmo~=TCq2#{-{DoQfBH-E0uSw-6#<{3qwxyN5u6RG^FQvIog;Ep4yXT3$p0i6RG{ zO&E^hQ!qhV!c@Krv{3N`ZjnEkqK}Dx_1Ry0sD_QEYL54uxS+V@o?9)PKmGLY_;pyH zl$_LOoE|WJl@N3Z%VK5QC4Nm>;~?3qN1|m$b%5bxUq8v){GP9@>DOjO!@1&7i)$_U zdiKfZ?2T!!5P?nI4d1^W3^CHn{WV7l+V8n_O z;Zc;&%u5iD_kQYc(Ta<^K>N-f5&GsAS{rb+uK##6+>S&A{Sj||(G|LZcO#|P+ZX{a z6vCt}w)lhW#TZhp^tU+~Jf6S-w=S^u1%MAqErnePXW#!WVf||=*lLC#9zM_(;Jg5@ zRmecCGtV_r-2;*?5)zrJA!6*iQD&t?t#!+ew{cDCeO-!Q`7fQr?Ys;S2dHK)>p?NT z$u#JOtM`1Okxv-O{!M^3LyqfRjF=~X7>xvNAaQHc@Y!q=lKW)KLt)Z3@vp^V1a{VZ z-Hi7wVE!Hg@8H07woqNl;XNj^-m8B8=fC=6(|kA7Y)v@|vg;VnT)E<>D1tIf({K8E zxeo5v_Hd|OfdDjH5a!Dr!&!Emqj$UIfD_%@%|kO}C7RFq)AzsZiQfc_uGzhR>5HBn zAu=x=x7s&Fx48%v`8lA={l~gx7sJQN^pNtucfqO8zw)cE{I~q`ZiFIRI|c@<1XHb8 z}K4+K%+~aIP zUS#1i1#eWSc|zg$5*KV$7*H3nSoGqpa!0e%3DCf6SgwGX=~*AUfR_dL`lKOA zxHjWdYXaoYbEAuV2pTI<;Q&qNLuX?y@tR1(4^B%IF0}bbJj- z;%5W*MRH+vvm&T~M`81MAqdOXS4ylO_&DOcCTfq0g-1Cf+$p4VHI-7!#AAhc5uJMv za{Yuju#r8vch>cla1%Z$X@ zKBpx-|G&;ZLVy3UCW3vH>KiO}xZTxW!#$l;paPpMArNfZNhN741d0 z3Ttvf^~=q&2>L=Ky8OkB#Ag?Y4x^#oO1U&683(6o_@t!)aQ5(X2dtN;7Z9%U z5nJalRdwP3-k+nr?283~n7!tJ3%3=U$(x0vW8zb7x_JKAa{at0B{cPRuH5j^x`o0! zEG8O8B0gA`faj)M>uSNF3!ML%KH(Ooi-ji5m0Y1gcr<)Rphts^)%VtM?i({hsvn}- zmMUTiS-`4mz3;o@F3uBDz7?L}SZV@5wr?)$L*!GWynO%v5MRw)D-PDqT!|fQQgz1> z$9A8?=y`kn7XQ=yTm1j;&E%m2pd?g7l1qtSYBJ~)on?I+t&ig2-cQauqCL?z>sde* znPZ+M9)Z&tCuHh9sOHDT9Ko78e4rQNMcgWUMs@$=Qc(~`J6$w;r#rGo{wqs7{l6=L zGD)uhsi#wf{T>u&rEK}!wAT_ZhB0X@+cK030Ggh|w_>>!e>ed>ra#WiToZL*l$AMc zM*w`>TX|~snumxeRPcb#FY0PQGIGS}!b@|5(qMj@q7&&kfJR@>Y_G@XL03*o};w3eguc@69s=po_pw{dJ~}BUwA|IfN~*Z z2C0)G0e|3EUL3yk1w_-Tc0lyD7hieN{CpJUg>8sEkFp=Xy!5JwI3%RZ*)R#n!6IlU z!TCytP+Nkj*&Y4Ceu|md*EAl=0cK_%wTGb`XqC5+jT-p%&wD`;=?>4Q=09?^YL03z zJu)b8t?-eKVfs2rq7?sdq0=ZwZ^Y}J0uiBml-u;e7hd*~TZNId9($u5{Rt)RZ$pXG z=Z#?(8>ohwh`_^6C}EoW?93HUWt||tK9LeZVvQz7d9vJHbo2~ZV3Pyz)%&8oa5#ju zn~|h|Nnc$t_lCs>f%gm#ik&YZ2KD}x_En8Eb_wHc^e^B!xH6T=@qtZcV1HPE4%@VI z&?dBzh`&X=R$EHFMC`@TCA8h2%^t4!^o|W$$5V>cQb zq21(CUeM&<{B8XfyYfYx)x(`}u(FpK?AsF@9p z;EiY#4&A(z>?6^=zh5jbV%@aPF!=N$qLAB8i2^i`^6@4YS#`r#SNqLjn#v7|_qXl8 zABhw@#V(|TIBGjQVuss1rktbu|IS=39(1%0!V@r%A0I2?=~24Ka4$=1mD0-_FfvE2 zETJ}Ji716OAgV2#|AS8L+QMz|4kr+0rSJvaZlH9AL<4l^Q-UUSN_L7jDq8()lTwlI z`MdLPKKnovx%*i^N$ix!cWo4T(JwfoVAa*5H5~N@rPN7uJOmU3yrbfR=sI;6L3h6k z{|6qs`kN?LSV73h(x33}+nP5x?gCEbkng$f?YE*kpV+QxU*F`vADsW4CK+i0KEfeM z_HhaFejly{g5cG1?FLdv=b?+F+|vufr5uQ#iv7TYTdu6rAp!FeT!}^+2&52|T2602 zywraE4trY9`YK#r%cgCU4bU#eyfJzY(&tTUF6_Hv;fBsQQL!aTQEdmeYek$P_u7s= zE@n4O)5C0kH#n^R-)Mg&nzA8XjfSSbu;CBf3i@z_U!+v#pKFD4!c`mkueYRSJnaxK zu2w2-Y*U-XMNz8O&E$qFr07f2pC{xHf_c`3Z&s-!|sTdue@q`kIo!U zY7qt*Ovpn!CNV1kK6AAoupbW7P6TxMp-#{my^uu%46+9)XJg8=MruS75nAd^HQF}9 zRqsX4lNJz#2#vmMjw#JgMKUxT=jr&Y$}~JNEmf~F9hrlE{$!uBjanWs75#W;SgK6C zeHa)Jy)@$2r(plIU;%UifiBB-Rso`=5LK0DZ?0;*StHo7g9Fj zov%<9%1?-LbRjmSwyB(1i})icQ784{alc7D9l?>vxz);}!+B!FC`O}A8@Ko_P*pB; zjJ;^rt(k&h%=RjK7NCr$4fP!!}>UlG-bIZW1%3Y*DMPHdba%A#aq z0t()L_vLprVO3)9K7dG5e`&47WIdJM_We?O`u05&_6lptsWk#}qES>js03X+av~w(rQVw2i zHlBw^YH!t6_$}ac$*0@BRBDDT>)Ew&p8w_fcXj;20nPD*JA9*^U^1U#Yr8K=gfZe8 zFAPjBVpZ8fz0h)=fB5Vpa}-fYVuT`Z06QI6Sm`Y3OP4@@`b#Y{GIz7x6#nBThV5>a zI+;f&8SED`EvhO^sBi_H9NI?-i{0t(7mKZX3VM+J)0BGqN5vh?&QfgPsO)HDZyskH z7l6nQ^vw|w2lFjSKX7iSJgLLrIH3Ny_8hOI7#C{fA4jFYq|4T{rWSPH1i6%2t$lr7vUq=OGbUVui?4K05so3m5l(t z`1l_8((_GVvsd6pl(GfHbmzj+efeMMe_93YCrjv%4?UuZIdL%c2e6E&Ki|s$6B=?+ zRWHiDwbSF>1W={Y!#YIhGAOSB^)_{EWdRnBwWeUuoTT?E9{zvS^WW%DlxB|=zp|HaoC8o@+u@`vXqT?eH zn4S+$I2j}NzZBA-J*1ZPN1xF`LIa*LM-Yd?l~!ETcd&M-ujQ+oL~^$ytKAM^STe_h3ebhfEwOQlp3x|hd{np35B z?s5-^LR8Z*CGk*k9;$LmnD9xtGH(|3rAPnedgg8qA8Lj{w2Y;Z0Z`kUuI&xEH4}Gu z{`B)Nc>)IjgTs9E^}Z589Y4_A>=QTz{2peHv78`Dw_H3s!41w;=iZMc`b;nP_ctj? zSTh~R3HFTQ14Sr;%oWY+0&1ST5Bu#;fInb9w- zb#*Ti2#_QaBmeQs?`nq$r5u#Jo-ipJCXL}V8}jbfP32~G1ir_6*~1-K9is62=*bhk z0UB6I4Jlk;3-5kPln=M#`a7E7b}er?x4Pt|hANGK)Prl?f&j+vdb;?r!VsZnv ziab3cTz|6Xxl`jK+yW&XaE$`3p?h-p#E-|DJHFKhO`r?_R@Ip*N6t-7cC;QY0y2 z?<~s&0SOC}P(+!(Xj){S&a4YAV+7TrXp!QbN08NV4?h~C^FrSE*8@Ev!*cJ+q8wR-E8 zwi{dX@z}|4MDRT;)ObR>T`eLSI-23a^Vaf#MEM9=<3;w_jI4CbOHB}51x0GlpV~t* zu9VSqk!r^hTO-ba@+Qt-sK~b4M4!s#3J~c6gQq>k#ez*6Binh$)t;RTAzd2Drgwb+Ie?Hj1ar$6F^YRAXBFx*r%LBEuV z9-hF@MXAO=1y77?@{0dk%%~KZxi?NF5b@r7?8PJkli0sv zkoO;d0uqIP^+E9^5ZQ18K+fxCO34uGiSnCAf;Z7v=+HM-JSslwFVqr3+Yr-4QLfQ( z24X`O>?Ifk$BlF7pzII?LvBrAR^ngEL@qw$pp?T)Z~7HL=PE2Xnm82_MA3n79HdGY z^(Kr$ec<%EaQJw|A=)Cjv4~4jwY8N{YQN&lK#vqh zrw`36U70$Avr~!ndd1q>Z^Zw+0EA(fsS#cjpPoN`_wu`!^&Z@AMNgZ<--ek`l=72D zC@q#6i?9mppg91))vzFQe{%j!{b6>dT%7ZrWvwziGW75B1b3oQ?ASZ6DWN}q%Yq0; zDTza_sl}lx(f!^jm5ic-qB|p@V{n=@EdY18%(U524J~yxw5nkg2b*-<&8?RzB2+qf z5V^h(NiSZpLv)L<=XFsIf+P zNmK==qhC|wH9bf-DSf0B5}3MEymT{D#o@cr@eD}j#v?B>BOEUr^qy0PebJ&LLjW>4 zMGFASH=cc4yBf9_-dag+@|^a9jRJxov9+|UceRilH^fdcPvWvU|3R8Qx7JsewNQ>r zmYiG9{$c)s?GIh4uvy$WFp8~y zrnpxLmW56OXi9rP3*V{`FeYZPNgwfsgNzH#T%FbT=GJMSqUW$X!!?{2bgM6tS|M6> z!wh)4NHtCsrG=F7D|uZlm|a755stJ{=TCbGXuwVVW8!S=&hH%-`nU4}FSJ zNt;h-!lf2Xt>FLA$X{Ye&jys=gvPna`g-Jvx6d6W1+lBQfq2z`l=4y zW`Rexk{nURMg(eb(GNuHxI+mwSeoGCjiOB=AT86cWx+d2s-56f$;)jJm6uYAg|h2j zB+ozm)mJnK@zCvZYv18OXxgxT4nQ<)@2r@1t>!ZKPlY*edDCRX zos4HQ7fo)_vtN3Z7Zpx4!AKP7YM7H=rnXTd$b`DrJF^YY9ruCpP7Zf=PQuX2C}|Ym z4Zo455Wuk7O*$^tf8qoOF2o&Y{_Ty(0`dqv|3FLR`%Rzo6w;>BO78Ry_N0POsM%HJ z{@49kuU=o+(2vtMrbTC(2z~io{eEjFX~i+2a8MAT+c>^LvziJ`drkya>J z#e$t*5xPJIGEldC6u!5!ESNFHPp4}fCE%N1M^ze6n3f(J4Y#eAD)Fk_||F)Xyxpl-CSsCAwx8)Zo^p& zi^jRl^+tR^@1@9CK8)WZ+-PZry!66vihIHrleaSECiQzj@}t5GN3cdp8>HQb=YOLY z-C5|&OGv(XNlV@6WI7@#Ci?rkFKPUKs5(AozMEpWr9)P3o#Je6e7#; zq^dxQIB`P|L@oZ${b9lRpfWwk#qccNYI70SUi7gooQnOcKQCbZ01RX%S_g5XYVU!) z3G+zLhhm981mbIIbLf%Lu}x3#rttJn1XSK>o(HjgHzH zkKlGI!Jzlwr!c!l99Q<%p1kyX`u)~0ERZsItMe`%e>ebJ(A&2Xi&}YsJ}7>o9{H{* zp0OREhK6q?tCQ~da4=>4vM~VY*P>`j4Dig?4+m>;sl+h?K~M_p&fLgq!p&7XQA6$y zr8mu9o5M4j6O=^lZu6;%!C7ZXY9cv>lwWQC0i2Y_ib9ei7Qw-&{^{i{Eo^Kol%P*L zzBr&VKMubx8eLIO-@!_XHUK&iow_yK#7!N(&fAe( zLfu}|0d))z9W0=g2S`wiN|+$(R+0juHNt&_j?3O}J^z#Q4`tMb>k*L_#7dsT%Zekw z-{-C!6bVL##Uu1gzj5gzliy@q2t_xYKGC1k3tzc8r-{cp^fR8J?|OinToPLk#QW=O zqRfn;AdmJ+&z}A-TAjf!RuG2w6p!zbf83(@V-O=JuLy= zK&bi@MJ9L2cW9ju19VtOAnI$RB;#_#cJcR?9kj=uE;J2&6a%tYX3|h?|8_DCd32g;>Ft)s ztx2Mu>1D0qp^q#;=&0dgFeIopG2)}Fb~;-Vw{Hy|Yo9=+QGhq4!?K$dQW5*K_KFD@ zP5RHsIx9m^F2NDVPksLKWh*ty_##mpGr(h004N@ACShUm7vCeqznGDJyC*M=?mel&9s|!+k(c`l2iy;^6pL^oabJV}wU4 zHH2OM)A^ri<-hwW-7^SPB%E#$2p1q~RR1#82JcN%u z5Mm*~j3(#8E?iYWl_2O3GTUNm_BB6=yr+Vo<7+_EhrMi*4Se{pFzacApdyn)OYa<& zB)mslLfRjx#slhtGa$BDLNDo=~Li2Nm>q#Q4uL3>HV7Cig|1X))sH znBWm9&c0NjNz2i1$85MhtVNq+HH0zTgo;j^!-p2-hkXDlk*VlkZJf25^EKa$0*`qe zbB&VFPz)PtgBY_(neGOp6iZ>Z=Rrp8$`}te%63|-Aj-=wtZue0OU5P=zbjC!-TgCk zNRclBp_cWVktgYu^a0ehq;t&%KxI1T!(2OsK}FXndx+MdJ#9Saw=Au$&+F4@KZ@q* z6J{_4OAKvMg^AVljCrlsQ^8mgygY&1KTS!;dm8lY#B1>stP#~UK zD&rK2An`NZ8kDXn^Wa z0s=?)7rAComn4pU>lG7>NMK(7Cl6EcNB>O!YiBFUs8i>_CkKrKyhQc6;@bi4&tRU% z;tB?jmI+>Cr1`?O{V(a1w3gq9MNjDt8k9$g3$zTn)ZgqK%1A^vALn{TY8F3BRy;UK zSwYFvXkw+e?Ogm%PtMMk=F88&dj38A;qvA8JO{dr+C_-fgJL?&>tIX^tqowIW74f#0Fuq4_Y;f)!d;Ft(&!^3S1HZ|vVlczx)RBWjR;u9`a{n&euy5P&e!(Y}2I0$Gr z>U$(=$3?xE;h`aF*d`Yxf+tX@-3Zk0WF>HdTy`XJiU=9|_F5w4u?FAB6K(~^%^nL& zi`|YsHvE3Q%1%P*(K$huqw6}pKo{YSyIqTO0JGzczZx!dwG*dn-0zj7oMtGywKAe$ zaGXjp8984!ZLqPTur;(1_12N6E~ygfY`F5>xRicLkE{)^K1#nDHc9}TD6y_{gU>KG zD3aOJoI!iZKu&Kn%C(scO*1Oy8x`j+imz%|6x4D7agmiwJW42~V$4i&qQ0rnLBlBi z*M5RCa{?Xci-yV?ny5m-pHj;vfFHIO31V;m+6(9}&cCC{ zhQiJ1GaF%b5fIjdkZ|?zmwdsRJ^|it6PJy)iwG#Xp@xWKl0Ini9QVtVA4WIaAeJQ4 zyO%F(ln70pmbmfnkMOE^cTe`l&ES1c1u~KFrI&&|fLg4Tu41!rK>cUr2B2EKnF7iB z(?!O9dQ<|XcbEqfAldSo9t{P7fOAfB0~ZR|3X6%bULsf3j!o`$?cWGrm>-R1#A9HH zSaB&UgK&H(a-27Lm<#7W7KK3=%`KP?JFpmRYdY@to}}T5mL3!I{1@<^)M6c=Fw4T1 zQE>h2o!&9_st4x2PryBPCkZPo)=orXYMJIRt~do)!QWm-e0e_+S3GX!#i%Lf^&Hk1 zuD8$rtH`8(UY6M@!s~ZK3x_vF<_%&N=>pCQ8-?y@gOF0>qQB3N;-5Z~3K0pATmHiG zIN!^6Riu-_EwvX%7s-cPA{7Ia$6pe-%+mP%8drn+qt>$D&?8%l;qHrU{~twx&!38B z2Y7`Zd&^HyN&IQ8)YA>;wN7e;D2pqe$|c|a)gSAe&d!u-PV7(sTB}|bZbs~3$vvbm z%128~2#-|?>E?~IXPUBEag2G^+qE_VK$zGwU}3X5w`c-?XU-*Pb~s!(&W)!APHQ5g zkyzSy`S)$nvu0k+N%Y)wf(#O+7nn#ljr+!*}$ z-{yYPYx0x-)3o465{V56jr0YCgSQ92&~*KD^_n@g6LxRpKKjIfp1g2$rA)o_!pq*% zb+|KcT%NV!ggW>2>TKx}9TTx=6kIK}-tlJ#=^NizEm5iw^|@P1Co#&PSFnO}#JriU zP6mb+bRF(wP-JEGp5Ay6scWJpp0WfoeEJK&t?wiDvBM|SulfEa0S66okDE9zHSR_e zeLcbpsld+>*UGaUajWQrWHPraA=2)sS3L3)Hk{hz(ziNElwe}3_I{GFBF zTyYDLY*fL_GgGzJWoO;*ow;(=EI&FTe{y2^C`wd9_ijhbV@4b4%TZrsHQs_Z;C&6yvCnp)$kM<7(x$n>8Tnvp1vZF$+7Q3au18zm?$du!_ zyty2*qrt-qpUY)@J4Jfi3JvJWk3>HRU zfIGH95Tqh47IkC`u(YR&JVTe6P{pIco7snH@26`KemyM8N;gs_4~rAzK10D@o>=s( z7opZanZx<+`%d;mXs>C>b3k6Hp3Ie84*3!q{W1A)c`9*NL{$rI3)Pl}dvuc2%tOwi zN9g1E)B6;ZDLe)@6eWxvv)}il{UeU0X%r<*f!77k>X(ZVM8F5f9PKcexqeOTPKfuW z){R{jq1x@D8^2wBm96feTh4VBmrTFu8&_|5UN>xU-sfMFXcA@&iscujTJ>ym5EC!iH&DPO4SVJ?#K^l!${jpVWH(T61?mr$BHrbiz zosLsGk;SX0CXq za4ZEWB_n2TWD+X`wG$dA8^Y4Aru)&-S7mj3Stqnpt}HRCt4*GrwYTQxfiD=_`Z zzx}?JLg1hUaVQL-xe{jNaQLsFm?p~f z@^?;35vO;!AjK6#XiCIzotT!%gH%1<(pwM@TwGcwL94@3a%%~%4ow>-4bYP_V(qDL zp$(KMQNxUB4)jPXzUEeYn>`a#b>_MfVgC{XB*_Q42*^V4p zb`=SIul-&BLHE@o+o`Rbld{gCgPHk1+;`u7_uUUycS}2KCZpXILqXA)yX_CN!%TUW zK7!SRzv++8_2wqi>R>; zBMDqIf6Df8BzcNEbTYxaRi}vc?OHjLGgso&QMij^M;soqhOb8bQ@xA`r284jU6}Qo zRJTU{x8isrcDd+BsfRtH$HZOO&!dq{kQuZd&O&3 z6C6E|c?#JTg`>l!&s|TZQVFLW|BO1(WB~^?#vAUSP>_XJy&5vW(OzPZc>FkU zO{qSpl=E^sDdX5TGN#N6N_ZbZPU5X-PUZK`cB5J#1A~Sr30u9`d+nub=Fa{8DVT}e z?e^Qpqe-}W5Px&r=3(EcA-P_!-+jk?iSfwU_wRncne9uN8+ER{n?|Omg0Wcv{O%v$ z4Qdg!{^he@#3zBFtE+%#(TAjykeb)uxf{&ZR9&l%BVkMhkPMnkUc3^2&TekTe4l^s z?E9fgU%#E-e(Tu}ewYUYhe~E)6Ub`K`!WB?VCFNs{wB7w=61b@e zNBEHb%sY3zC1i2Q+>X0};1)MwBvN`bN`wOQoRuOFX9opax;;V!D5w}~&{1KSATFp8fDG|^R*T%I zot+@VVLvZ~8+!X0#>5^ijY(GN+q9i_LVx(ktOs3?aw%asZ0@ImER0If11s=Bi~>!N zZU=|yzC_R1&Gx>4matpLBm|+MGEBfCsW^xi4~~a+Y_1R<@WLwepFYnun9*_uuw6JJ zJsFCw;-AW(9Pp=nFAMeVv+wxQ1`p%}YKb}o1meh-u)79F1#9%5pM5{*KHN{Z*V>@} z=+xtqv7q_0p5_6Z#R7AsF$Ro*TB9+~Q~V=@Jz~Z6!F3*%JIRtV;(dBHHoZhzT3qdJ z@myeeX96RUX0kQQmWN0N2}%FhwX^w+x@ak-Q*;-l+uG2XlUr)_U$IC~+cmDT-xbVr7T8x+d zKIT=!1(hy8rdtj;<#QDyQ2xL)j;;}QsSx#%_HT_5_9##lc4?EI5~MppyL4P~V)UPv zuW=$a#;qBSCZwccWNnGBz8LdW`D6m?{oNGJTfG5izi-vigyI=2r3@25K|Z>qO9__U z_y+x0R*;;$3h_HHgI7<48-@jushWDp)Kqa7Efz}wO%d2~ad<0M(mnnk&lRcPR_gUl zEaWYUg!xEYi`^8m5tqY+{R)C2$HlEJ4l-Fk7NiZLAnrs%t5q!@vO~M6L{D9wi02?; zi3ouquZ8JGXr&Y;h%GQWvC_znHQTcaEHDZ$&8VnK?$WK@k}bd02ZmKPH4D@|!5*@L z)SST$p1gcD76w4zN%3-!9t@)+ zEQ|M(nvtro6*n(0%zJ>KH>k?yn(#vYd?oh5CJt8|VX+uwPDx_9>{)jw2{Ez;{oD;I z%_Gr@mr=eG&AnUPy6RPov%LC@uXpx$;)Miz`XJ7#gIw9ail19l(ySENmfw7*wZ7h3 z-FP#$#Aa`hI15gWekuz1Yf4MBtED2hHu_=iRQSmU)S*J~NC)U(<~SG*4m^Ne+@s*a z%kL<71vAg7d{}HpIxx|YsLDJ$1z#DTg4xQax>_M$u*x9L{6-A672eZ>Cs zQH%?7tGF*QsH+$I1|e7zldyzTAg?Vka{&w#{xoU{hqTAjLjNt=)-K)JeYUf~1okTu zt8*_isU*-1NCLGOON^n1LnN5}^30S6s4ylbuU}6r$z>^Vddj&}O;FM#RQjfzLI|n> z2^~Qlf+(q2B8z};$9{f`;Q4pYe)ieX|H+-lBD^0@M;$!Ht!NG%^%|)= z{s>%jEG8AK;?K#MtES>mAf-_~cT=FXe_nAWUUt8Gs`?@Egr&J`^sdz3SVlp`Dmr$* zJG_tLnEe2#^pzdmsSujXAd6%xjW3e6Yo`e+%#;^k0$ui$VSn0Nu}jLy1vaVidU~M% zIpU!%?vRT!Q{FSnH$D6g>2X19cHmkGJKVF3YbwXY1E`UZ%^~raDl(nkQOpu~8%@_} zI#|ZiT3L%X9(PZMS$~Ml41R`|wecbnItsv>7?ApdpmW3$;VVF=OXh35x3!t$^`6>B zmx${5SLHF!<{7-H8nYr>qRDW;(>$+E!Z&BM@}aPbw~k-0b;=rT&MixrJ2XZ{A0XU|L8F<(-gg&O_Nt=5^@hzl8 zK#BbJv-eCfoJPzpKCi+svW#Q-&z`>Ua{LD75YhSUR~8bua#bx9jB8wTzT_W7U-#d# zUgN?h;{b{7Iouzn;lU2Y4jD^#p&CI`t-^U_h(t?$f^l2-amM@c%X&c0qqEt`gNugn z6-dH?#Hr*PDaZ!c+j}3h;lpGXmDR05O59Yw6CH}ID&M&!XQ4l?B1E$2{hc}Ar^MJ= z?PZT*;)Cq@3emrD_TK1!V}nj!E!qW|y+Dcv+CkgBw0J-Lrs^IIGn7GP45DMU{tc-#9;fQiXrY6`vqt$L1fW0>b>O30@us&3B`*Sm`__6 z*Y(AhO1Tsj4X&l2I2@MRzIdLrbdT~n155UnQhl7Kjq1~s_R^?D!|b>KrlD>6b~peX zyTy5aS{~9;4sxu~I_P8zkVdFcOrny3C}x8@Hes6Mnstsr0||!{a;a&6mz6_65M=Oy zV_SB8@y=}b_Q_v&apBe5riv7T|KaxsFJJX=(B&l}t7;5I;h)lhUHO6gohnwy^hF+d zW2_lzLt4~+9}HE{1tF@$$PYP(eh_n(+nF+(!A|FCjJ4D_X`Wm6m8I-unWKwi-MHtv zW;|q6gsRwVYy{s-HWC~fV`^YzBQaBs*W*3guAgW@#hYWzAy}?Iknu?q{g9Q9PmSbA zg?5YBVcbt4x{dgoiDc0G)E6ks)p!tVok@z~ z9`54ZilxJGzg5(p^WK%kY{zy2v24OE2zf)^c}=(_!H6Dgm@!`8mY}WvC>Ubei>;cb zKA>V{$(X-5b2Ws~Yj?jCYs)k`;IV5VRz6||$23$sKeteaIR=GQ#Xi;qALNhSLzb|_ zl}mA%5*0j0;m^=1(VFx5`-QQdyq3{?4igIA#0vz_o-Qc0<$h`e#O=CifC~a1Pe!4s z(W1uQ<;;nJvF(#?lhadXXlf0@9YNu98`D3}ckbhsE!$lC7fBzl6{a*{CYUe_>kGb_ z$>XhxA&{3;LLDjvj;wUAB$FJFX1d~5+4 zIG1I^qS0+&0j2i@H%Re3>m_IYsYERat;JxPefTg9V;^^{!nt zBjrq(lOo3-2&R_{3{F2-M&=1%#7u5N7!0@Zj06o8J<90I>o4Sr^f!me1GG;-5|)m? zO@FnqsxMly{gZ;qK@UpkuX%}v8?G`34kJb(7iU8G_s@PNh$`%)y}F|X zLVh5}i#338(tp%TK-dUN?$K!Z^WY|FI=xa-GvhL8WrWG%y>cPNvP?}*n#{9Z3I?X# zrn4dof4%kcOTLcoOH9a|^8Eij%kG#9I9PZ}0afmTY(=h9qQ^T{#T?+iV!V-rd%uXM zJx~|+sv`aHN(80XmX{XhV*_*&0rcO|Us>K)M>$zT0 z@)rU6#6>DE;@2QPa27f!2HcRdzP4PI3#(?Wl9)E=l{n^owH+uH21qZSSPn5&byrd7 z;F{gE@xaFSirtNZMuCJ_X;iW!%3u2p~Bm&yiAcR6bV-Yr%D zF0*E91QS5v!&DEjO7UL!Raiudy87Dd)`-#3i+YM-*Q7_MzeX`pgU6MwUB=d0PsRB0 z=|Kj7dWU1h!)et-R62vrWM@~j;X}05*gCh?=35)-NSvCw>I0tPljng{Eh?*Ws!L`R z1&gX(_WV1~ejYP!Y6ALUbcE# zo0GO^zsn2l7(RJ35R$E$nvS}y#ilyUjY3%(1yo{=uf1^L^Dz>f)3tqH=lGOHH!+$W zTsLE`nby$_>rs@zB9DJ8OM)#;MHBgDhp8GP-S&(51@gP}z#)s%nW#ABs->g0lcQZ> z*{qDxs;;+2)u9R((d*CTz^~8>a@;E|cOL*TiAkat2%+eAP_^*omtP51n!G&i)%aba za4@X4cUKUUtan1dZB!!APscn&DT+aTxfDp;>_H4$8&6)$4(E!$%2gigetbeCU$U>5 z6Wy(pp=icd?PYY;4uqHwkzW}6Kr}U_Kx@1nI=QsPOMn|y%?g62ap82j$yLyLBWNo5 zJ~&D3XP)Dl#HO^r7H_U@#6xgJe&_6GF_h~Y8*A|p)V~hepf@krk-EuC{Xx9(2gPxB z?M_e%FEiC$`1f-^Mu0T-nDsTGXLg%X@+l|ThQ<2C9<8UBnybnU9#fzwGakYC1o{)t zZR{`3n{GN^<)%h4ZPv#e!4{^`y>Fl8`Tq5QK&cVKzm;=)n=Kc_Bg^i~Cf>SA()y|R z@cMxQJA3+Do^-x2&6u9fvRps87KBNO7b*Qep4@~PzVFw*3HGp{v)W_^#BFC!KlQ|P z0iO{+fJ96IOar5z#P@Ws-j5W3-!Woqmsk#j>`=pM9Cs6K&vU|Bz>nyz8LDv$9uTlwa*e~9_2EDfezSsGew zI6^lyb=hYSaqLk>fAH0C2N+O1X|&f&C^jbH{M>3REd1iZV^o8!bH9zX{UK>imi{AY zt`jlD_hA}58Jy+^8bx%K@=BP|RQ#_Hw35vOSVHl*a^D znIl4zPGLn&nc{aQ9*-as)-BfX^-r1pdnAx`QnWRP$A>`*6|AdOozyub&tJBnonr0N zJQU@^3hJHo%QzS}IB?S3_KU@UA$fe>^wZ45)~{^>sSK2BS)RPUq0ag^`;pejo#Oj7t^o8nd-Z`hxbYu`#Fxr zVhC)2!`+LWGBtI@QWrB(4Y?6~K@+4XKGqC=-^(2~HFL!W-{{zEamUz3UyxQHi_C zp6Ugn89~X$6CIpA$Igl$HYFQP42UyK=Wu`Iy>fA;CR`LL{w1G|qXc^uQBNcw5k4TW z%El$Y*-KY0IHg*+a+^XTGCzA;CXMtECe-K}F>X?xOVN)tqEKuo)8_igN&5F%oM164 zYCR`?L7pGG9xrY)pZS)G8|4ILh$h6cUkC<;_M> zKC;Au6R0zPR79iV1Svx{62!wf_J5z1a-O5&??Kz{p&MZGfZF(8z7ez+k41ryxbN)g zUt~A1zA7W)=ZZ6R`=RH_!FXQo&(DGFQmN?QuVE?hZ2MK=JjESCAp(5oURcZ%j**lW z8^cn(glKNcpe_(T2n^TIzNt2P}vxz%n5 z|86$vtEo}#_8$h>uCrpXd^$27`^gL24fNK(?hi|^nd=mh^gK>wpB5pr(cdl}LLtBL zzy=W-#^NN3F=JEuyl+z8AVV_Qk~p2%i3f(vpT$)Dca3eLHH$Af%qCOX$<*{{C0Gkk=A8mo~@$1jDYVil-_fR2@UndHs> z*td_geWa1M?0N7vrI7sGY*PjgJT!)Xif5vhxb+f_gHYQekr& zply{w9BhwjlCD@4<%vk9pO1(*$N};=xiUOJqC9x!W$XpN;a@BErQ$8H_$T^;lLrb! zF4jm^TE8F;NZv>CDVxBm9=U6MDk_bp>Pm5)%^8buX!~vPKU! zJ+wAON=97|ssnaSp!8kG(#i7+0)Y+v^|K$vnht2+6n`scIVg1Zh0mFP+DD~6oWgqM zxI=kXQQrot9~F1p;4`PZr$F?Z67lhDPyqf(K^_-+6qZ1q^THh+a^GAZe6dSvSgDqJ z_VkNi3_eh^hM_5gd+1)XXU5L$#okGg?M=VgDzF!}?MUT(D|S7iJ!LUrBxzaI9%tAV zVzUF1Sd=6f6gg^q7UjsMWLB<76Z4#kOL5p@l&7G$gGLmDduXe5@=5{JEPN%O+!V_IfAIBcxyY)9|nGUQd(18hpC&g z&$0iy_dKpQ6lIDRu#J)lNea$1NVb$irYaX|wxFW5;6g!}DR;`n(c3-EDmcqkNCWtE z(1^wurVP5+*PeY8Ph07wic1Z;HNW-vP|1zue8-#paiGeQO^v;br ztEq>a!@(k7&HM$#)KAX7n?s>o%h@li5*+lJa??exr|6=pwB_KzNRIhWKnE?O=9yz1Y!ad*z?@DIe#<8i*3-b)KX?wQNG$_WRKMzqiwS*ep+QNJ1jhT zFDN=3VAn*K33S@1I5CRlCPO7y;&p$kE@GjFN$2=H{omsxeED5m+kTbM9N;(7hrqrj6ym{OynNYmjrfYZ!;%g0?!``8Ws@^g zK^{_i>17y1e(lTeJn^VhR;GgsG9N+zA8XtlAUT7k25LMDYYQHXg#hSDX&^g??^M*| z3X<#bAH=m+-L5fhm~lQP0Y0M$SXDvh-@C!?qZ0Q;C?b`*kGUZ(5d4K^F8;Vcvy^l* zQIskpL}yRG{9+8?PhbDj0;7{pfv|vzG{oXfO-*}o=gv*B!k?}Ag%I!$9t2O&n0Ej4 z3-> z)I=#T4i2L@z(_;Wd7mfZdTt_8uU&!!bJBje9)t71X#UW60KFN|dgSrpL-|$;^2eT(#B*1<5HU&;QY(^thLr#E5Tz3 z;sWH{M0||pJ1ge%@z-OGj%sOr4B!G-CTF*d^Iqpc=aEImTwjQQQf8UB5;To5Jb(Y} zy;x1KyItc#iDKKX5GTkOmk%yR^vc~tg(yMc21h=Xy(1R&W6(Qmb%SQ+HG1W3& zve6rg-gN}RF{XMzfq}9s?u~5HA{@7|JcYK8pOzzUWd9E!unWJUm+n(4B3OIz z=K6|%J+B;JaLh8V&~f5&x+fRC7nOxjxuqmtG9{!UB`q<|ivk?(dizsHLIM9qa{6Yh zE;$>7a7uhV_xy z{2P-_J=EmCPUV9t*Q);PiIA1JU()HAwGg$%g4sKz)bDohaYJkKGllbU&*s6qE)4vE zsi$s=vMFQCO;ud4U*q1RvXO!p9?LS3%i@?P-cm+>o*nKHJe1d~mfn*ok55?B^_vp= z1fs)l+N4})`3PvDU$jyNMO}_0KJAV^{<^u0$14vQQ#K|iBvbCz*7{1^DPw$CT;$p} zfM{SUivgt?R97&iAE8@3p)tUJ5WKQ>vt1~BdBGVZ@u$UX7ZtUef{jcxaivQg4su)Xs<bd|~g6-}nbH1}z;!=b}uV_cT!#&FHSEs>B`(_Y_mG_%;s5vsS*(5!-v z|DXYBr#I+&Kgl{`-BPye7A*{m4{?3WP@SB(67R!1 z;1m@yQ!mb#Wg65*VWD}UO;djN?%$6^T<#Bp!1MC@*Ap0HtW<+icoPUj3P6EtY6WB( zDa4TM3l+uJqABSJlM-XS*L`Gu4Y?p5U3@Bqq2rTC+f2~ED$q3KFBz_ z&no5?e*J}4%h^wDF-c-MxnlB%M@35!W>>*muZw!W<+fAlKCHOdNzcFa?8k#Yfa*Q+ zM|q4ivar_F1kaXItjZL*Xa|FAcwK>r4+3-#k`|M0u?AHAqmRE9Y^J6(FBBo5kd2d` zcKMqSR&XA%gf8_glf5P{rt=2s#%ufTPR&-WDeF_0r-L0O_nbo3Dya%8KwBdVP2eC^ zHih}d0{awbFum?bKaQyp-yasU3@T14ZHf@1ED zw}YL5;T0nW6cgKq64QBdLLXu~+tmTU$*QVJ9s@N$Ihwu2LB%x)i=Yk8e9AK9X^YBw z&HRHto2fujC6#pL&>wc&a5F=y@*4QHbC_kW_OQ{KU0(3Xh)5$Uhu|-p0%8(|^aDtj zd(bTi$%iwG_lSY^YgEL{I{Otwi-*7wW2@^d&Dnjibvtj zsijg^s2&|p_|*A-Wp!B~;m(6|kqz}c#yqhBkA1HK$c1}!T&sD9SL&u*Ig$llDQKej zX0Ya4FW>d$v-07pW_*m0pjgxY%(qi=XNXbkNpdW)D~2Qk29eNh6L*AYBr_oqc!#|` z--%mZc%f3eN~SzcYHw!dxP$&E^)^VN{NOy`Ru}Ggs7>W(qRT_l^88bqCg)6_q; zuOG*j8-<8ph*SQzyN`e3n;1yrj3@({x^l_O4&iBZNU~F+x^OuTuwe>Wbj0p96+3e> zE7hSgD)L;(u0$0Weax*vX}|*8Zrqv`#EJM-%2jSQX$w;(`y1{8h;ahuI~kl%G__Pa!zLZm}7SL$n!4r4H`=+kWn27=PR;N%0 zT>_Y6tT_i5OMdu2pD*9<$cr0&S=;7}>U7`M{9IK*5cawP%CBMT!QdZEcD`MLKNpk-K`h_SWWT`C6-nv_i2fJC|y z8^5FC&K`+Mx7Bm2Ikqb}@7mg}P)n|773h~f6&q(Br&27%e+$(35m zzaRQ;LLKatlwPtgaH=HUZ)vUVYt^WN*9YlOcG5mnT24iAw0o)k--20F@j|R7(ZCT` zi~D*%t9)PYr)wTnYBiSQ$&)81Fha+&O->Isg+7U!QJV_}jiRp%gg43E|}ITI?*WVaO_ezhQSOKm@K>>tJp2e% z?r$3Jvy)o-n@nk~r&=lt~M1bOQZZ*)pg z)^(LjB5vx9*d@{u zD#hYVUcM5qU*c%4DmKut`}Ob`0YK|oO9RF~%{x34WauusKs*#)B)ArzI0ET^=%vC# zKOQusB5`>ZCBV_fl2zD>c}~6h=c)BH$x_GyFTplTW^kI?sM!_Dw(A#9^tJi5ASv>u zWWuQxB3QjtrqA7w-}OFJb$2QpmQegX06_6O6;I$2kvdlY;Pm(_>HX=GAc$t_ci(*} z{uVebmZ!Bk7qsKB`rfm5<6%-JO4mnlaQHsqd+R}LO&mj&P06tI&oZg0yo1p5cPRMi ziqc{Hnhkq%=2qt8djK&t;p!Yf-sqWeiD2f5l$5cHU~-^f>ts-hQ&YwY=P=_b&H`aN z@Q(T@7J3G_`4KNGUL~~h#vu~aYk~hWgQ;O@ftnDIlblbC8`wa?ZA8w3-qRDM4DwcK zm-X7)iPOZ0VQ6vSE@b}h?VCYKHtWx|Sp{G4&LD_#T8RIyjig% z6pVOmE=TmSdtw|%1-pU*`F_fOYAn1S#CiU|pMB`{S!vCc5B*fj8&qtOJt)iSADidi z5j}GAgL?54t#*3iqGwD2Gc>W>zbIiW&$32q9TX{$tMF_e`{ z6c5J-b^@~3wfdmBU?OAHMTZA+$|j=q?5X@MK8wg4X&NzSkYH5EH+HbPfi>3Fn8An& z0q&L($YwI8+271<`t4`mcl!>&<|Y-jwrW5^VH&UILmH@9v*(=Rkf#rxe1G}q_Xte? zDi(Y`lb;!AvV~52Iay=HvBZWRX_Hr^l6dmNu@(o0JG#+oAzEm6lQFEN4&R zDHttEK+zH)+jgd&^mj91UR{IY^>~&L_PO&u{q{R1{492g)Gm{!Y0Ohp))!y;^q+XN z6f+$!w|s)FlE8=ga|8)Om-)|JM818**)BmH`e z&rayG8l%HVf;jOB8I4d2niF9tl>kAkm3W#Qx1jV)6rE$zMq^Y6FHWB^0Y0kfvyam? z-mtK`(e_3v3X^v}5gQX?ow8;{`oA$O-o(YG6>5mecV5hBF|odGKZIXXTLv8ztswQz zLqavh$=Ik+oHFsK>50iQoKD%W5HFVz;}tAGZAi4*+PE1L#UWVC3Br~zELvy{ueCwO zAW6(rl20|BO;X)COu{s7fi1)F7GFeh`?3|(LOAy*TLA}~>3LHpP)u(1=7wu#!T%&X zDD%lnj9YY{-lwU^Jxsp|i`{kL2Gh#t;v7dm1%VrMaZrOa@SnzzDZT-dFo)nlSdc=K z@rm7lN691R)a|Coph&uG{8%V-qL>Q`{-D_7lNT?>BLr*Xh4GQ2|8TtKVB?gC(aDP- z@Low8Re^aTUuG!jC3v#9F9xloA4^$c1^qk-9~6BTROtEM52^XadXnhxpM5vRsNOPU zpm`x~oOe-;*)}K;3254LcoIODy5#M!NbzPoVZexv{m9E#%#S0@y)XmU;!1F$4I2XC z7NeHCx(LvwW?S%81DmOC#2wNEd2dZMX|lw}*G-Lr^E4&lO7J)HCNdE0fYk~q@g)aK z>BLGyvUr`kH0`@d%vI7;nwcw@U{xg|#?HA@xcQu(4F=alLMMFC>pt`s2LvZdHKsXK zje71JAib58TXwZ_n8^X+yC4_am5rED9!yEuPkHK7w%~)a_k->$Lc5yN=1#ZL%v?Gi zbX+`kx%H;k{Y_@qe`s>-6Le>L<;H}!rRao^V6BRH<}?B=xnIl&grkBAF#(l!K)QNV zH_ny+k7AM=Z0J$yVa?9!l}0BR3Nb{h1K z@_ic=2QSv$8w+oGvQ3GGzOtyw0=87%BzWYSlG^N>Q~|Yk`QkZrAtr{RW4(i2q*iso zBBz%XG+1j22G>0-)qScHFB>er$xhGUxPdW^DU*$ zKTq^4L%Oyo$P|Rcr~6Uua=C*uCZq@E4DA9>3NX28f-XV_oi6-wyl1Q1E2SP0^Nj(c zSH)C7GFU-td^MHx_d8%j6gP;G5#Yh*^U$~Y_+I4Y^6+T7$|do9^rOq!S&iNSbTaUE zB-f+vPQrl438R*u5q9X>CxkqrJW4my1eWGfq*8N91GFa;~5qzwH1*)M#kEcOoZZ5HVZr~FgUpQw3(OKo>~vcc{IO=QiM z7T5Toc{SEtkek%0@11=k2=*Fk@;AnOtj?{vHcdTh%w^UX4mxBI2@!WV|!&2C8|Bv0|ApJ_W&)2ZfjTo++H z-z9V$zM6aSE~E&DO9gs*ek|J7f`Jzt@QPznr=Q^F0~g-tf8H^DPvculHzGdxpm ztFJ~|L5P`4SzMFAa12IASp!&(!H`S)THJD@;sgoZNPURM1Ks$Le`#y75fmF|KFlZmSU5{9&c(4-V}_o zD6$9<4ByMPlfzySfYJ?=O`Tc=E{0T1l*(3E4=pYAVlV?h?ZQ7Fy>@WkgQ zHY|WDc!??bVaj5#i==71BmiOzXl7z2MlFpPKvDd%T^YQ6YbCz#byEXDNg=X;v)B|O zPOna6s>SA&6|Ps&LXQewBlzblPEM)(SoB_=+@wbP!2rF5zZJYTn7O^Nuo8chp_-Xw ztNl_QQ3WQB68theDS!{@5O_N>9?;|v1~ndPM;Jm$v!Cw;7mk3&9{f7x!)1HLSWe-! z;CiIndX`Y3Xo}T6Su5G|Wv=+{udSrAO48jz(WubOp^^8;xmYkrm~E=ktyvO|QttRl zlfRcgk%PsgzG*&|RiiYw?snPAPua@(FV=5?K&(VuOp<;eZd>yF??3*V_<>{ME17Qt zFa(V)egX`!l|B(fZ4eg>GUC~vP_(|`PXlr-g3O?nDwY7|prdVWy7`4K#!}8NEVgbf zZ}=%vQQ}8b7B#~4k6`}Nj-&V%x|0(k*T{7=`8nG+d-U5(rSMC4U;gq-e-bmqg6bD| zkm;|*gw-KfBlU><_hFuZz%+M&3q0|!OxTKheygMeN0Mto|=kGoH*4euu+^)}h zG$<>$=j>Hh0Rrv9Sk=V>cR^|Eray+pYDG~)sA&>$&`EDe*vQ2?=cZAcRsk_J~)8B_(=458g+Wfsn?vL@2rMZj3tD|Vmu2@9CV2rs1tEb&B0 z$1_>&o`8aX6hp^C9v0P3<@oU2nF|{s&?)X_Qj}tLh*1jgq#h=vr(i`cQtGZ0|L5=+ z#fkmw&XFofp<$Iy=6E_j__-0;Ry-p+y zgCHciziEP$0K`8%h+Kw$qeA$#TGns>9dz31?Zi;!V)jtadoMPu5YrRPN!k5%sW2)( z2&&$kzqOo_OyM9?_}nhGm_cx*qN_p~SsY5c7O-s!T_6I*Sx5wzz-SB^;A=S@iIfz9 zCp@S$T@U`eV&thOiNHYaG`QfxZ^!V0odJ8aDh+CNPaemg%iROBu+XjYHk4SXY7HPL zK(`MHTusoR3Y2Ynz%t693l{sY&%ROg9?CWn3LMMBqg*OY+=2wpzy0h77O9d=X<;=g zusV9iwWtg?VppH@X|YAldzV#s1trps_r|(5%bl^0izAkt6QEbT7lPTxT+W`J_IGpX zKm`J~G{kklIhJcfq-nqr3c$rQCAQAY5kxU!)h_MOGS#2q8&14f;0EaNQ~hsTaf7M@ z;KowN)L`v#{|o+$D^D@q1(FO(c9EzYEgsFBT0bcH!7RN(k$aZT+{9Q8y5}V&V^F*j zU#jfoSOnVBrOH1&nSgJyF>4m*dQTZH#ZK8Jt*Dya6e7SAj-TN4ze_=~9wrvMmik*B zJ$OjzZT4N_VHp$;0(q#-Mz>Q^QI>P8P-TcYNDe$Vp1h$5qa=bAGi;$$U_b`S;EOGD z_p7B2*aIwjCm3LgQ1{SGHGea$C%mP%Ls`4k@+FfK8BaNHThL4)$b5v!7B8NK6tlmzVJDW7y zDZdVzL!=l0T1xa)UT)%S4-3Qqj9?dUKNzGuvq-EC=HKXNGn*wTUE8U3YHSnDBZ}o>eAmWz>!L(l;KZKPR8(NrZ2_h?{u1c z0|J3DAbj`f?Ilk8y|@Y4YU<7rr4QMpngudf* z1@$C{9q`dA6PK4(#Du~5s&vLX&67HU2q=-i6GLmFY0Xa0Rk}2hsJh9^Q$Ys0RT6Tc z7fEu+4ht3gNa}U3bV8AZl+LrsE7LKe{_&B*EPm(;XbLe=Oj4B;`{~d{>TGwLT4?b* zCvR`7)N==cQKA92R7}bklI4Zlv1Z7n(pF-Hv%*vXR!vt=LK-Qj8XUI`1tRqWO0uQw zyYN;45raaBm#h&(jlXeD7C5AWd}t5`G+kebgJkJO{I!wDq%~od3+Swg+aBOZDN5xE z3ZtqX#Y&3FFbibhSiqEoj8xN}TzKo9SP#e|$hxVEsQ86lDi28bLArpsEUMIOXS1l0 z1S{`5P@|gW!4t@C`i~Ea6j8eg6t80Ab#~Nc5B_uSpgYt=5WLM}ngdH1WTn8$_k&&w zEnkECFgpl#Hi0HpnD5HMiswWX3UUp`@SjStu&068PLC*u#ageQrz_hy#X#qE-o$rIA~xxaiN9)icm8BJ=_myWBs<-%tR3^gb| za=9Bs(rI4osXtVu!KO4w3Y8t6?94I&zKT~5In4USH~~54HYJJ#1kkKU{Y5ZMs1qRz zr>E_5Ru)aLK`OW z7Ons=nZ5+abJOW0sM~n}4kFlxvkZH!+>L#Jhu9KArWq;LV=v{4L#0{ALxeV}t+5G( zb@h9Y@A7S|rl(6D-fDmFu)9lAXZ$)fGh=GZfqg(WnwHH8J%$X#8t4v>Vs>~+t&f5aD=9A& zsrykd17cTRbTbisY!150PmE!}&+*K&r2^fZRxwj4WkdWvenCGvU4{WO2bla2TxJ1b?+ zMz?biZ=9L9;_1tiUWGnqO??4jsKNjo8b9OTk7770imt@l;k$n{Gi~;^fIOE+VZaDr zAA^{fE^Y2d1sqkp#Ve21hXQZ}pZlRryt9S^A97;E(vk8Bc){E|N|$pmXs(HsSEt_b zFhe%9d#Lvg4F;p{GnXgGOut(-sX-3PsfZW{Nk;3ZBF?h=-tch0C>W{iZ5KGuh4agfpXsmmp6 zmqhJfnp@=LE>Wdv0uX-SGVT`(jy0s{Ru}8UcLOuuLF0%!l-cjpcEJ9pc71Jfu%_}s zYJ~Th4il7tu_JF}npJ=%arD%O78jS+&78*vZsrDZY7cm-{(gFMaLfN9^EPlboO0sC1LxKkS`L1- zRWn7v3fIOa z8buqeI??3f^RY~9uWbiifC%>u!-G*-1mFHel)VXjLK0^Z08jE z>ezgk=k>wR_28A1#=ige=dAgADj8McjJ6QB1!uPgKD!Q8*1{XE`-q4VYD95&U9=r9 zEA^@(y>|N65eX4Ttz90P;#uGBq3 z?Ly+}a>{u;1awhA@{18t6zE+%4pAJBLwMed6}{7-I%ArO2*^tC=cb8S3CsxBDqnX| z$Xle=n{3C5E61BQhXzJ{?)AHNtQU)3_EwCB;_H9|Jyo_n&PSMYOn@sarE@K&hNsQ% z$5Upl31q?#pZzkPF59AlEEkSb*=OSYOq!pW&JsMW!&A}Cah2>K^8_c23;LV#j1`@jEZY0| zUR71f4Mz;6c!Ot$6lcyYgF707D;|mg!nVz)uy27NsMqFwZ2MU&Kz2i$;ik!{#4N;)W(~S zg8T|)x={WejV*$JL{;SDjthzSz5DE^F`;u*^isV#yuq-nX|dCz|E^zi4u7&+b(Tl+*r1PMm??Ips}RwxG@{CXS6PN> z96q^l!6PL0Azujr;IbM-zfICbhLfue%wo+W1%}Z}!*|wYQLxHMN=Un!4p5ganMmZy zp>@Xa@d~fZ2XScX{4EP8bOwN#c|5-U!e4$SSmcBn*PWu0EwbNs`lV!`JQInvyu(%> z{7?V#3ID36CA4~CohPO)=5mdI#*Km+vVW^vUfQPCc&gUYV(PMLho(9G=K9jL>l-l} zVT&)8(&Ix%=Wu37bt=@u=eku&g|u*vv3h>4gPl;lq*3YpXn zo?V=}oY}2}X?u?KW7dAd9TzWlADC%>lS{C8MqAeXC9mRZZ zbV`ung%{uPNN{|~7Z@|dh(q+myJhCUO}RTb?v>3CrTl7*hHQ2zy73Ua=;Skb*)&#M zYqc^S0jAMz*p4%>UH0n=S{0T2gA3>vbCT}amudOiD_KEeNsP zO?`{)xe;Fz2k@MCXs7L?1M#r58v90PASXdw3u$l=+K0xX|7sunv(#ZKp$@bx?X9Bgt!&M!T6c*SBD2&1xI9ETf3E~1sD7gr&DK6AOsQPG z+>!pJU_gwWklP0_ICWte82SlmHaAlaKUE1(icIBs+}hs;$W+RO3E5Olubb4CW-P&! z*B zR(&1?r5rcO6G(=j03P|&je)$p$G;!NH2(PQcsrV>yeDxJ?unEh#A?VCwO#DimoPPqa_kp8v1t zDG72CW}`ub<&ZR1P&kY^rq|j@I4;3zOM|aH`&O*zvEuY`{nk-hjZ;@kVIpuqs;>}x z1*7O-b8uRG!4SKV;6*DVO8r1Crg#w*V=VhS$GfhDE0e*Gz73$~u%N9uvGH+MPd@Gn z0#3!v3?n30&A7ay@NQyrkKp2!l;#r?lb5_Buo=!zyXq_>`@j%J6-~rpHfoSpA9*pT zx3^u~3R?)#;3NWexK-Z`I9CIz=pA!(OGkG|yupZ;1MJ?=E;u<%`iGBG)5ejG-s53R zPL}0rD=fR4HkuN|Q};i6`o+zfF57MduzC18f>H)q?5q9WSJE}q+I|pU)845FIJv|u z(l$HfAS>ecwP5<+t@r*eUHISVyBIdPbKm!>fQaptldZqCH9RVSpQcKfQZr)1HD#c_ zX;$4Cu&nQS;<#cvr^`r@SQi9;uUIWBPS&8~-$x(+jfcYN{_~Z}j|~>9alFL)a6WWU zr&5efO(!#RPLxD;l@?8}9n7ZUNfmX?ywJ9omqmvRR}A}A_k~;Zd5l$T9X9f{9^|-jPIMTiD|=$%V)Yx63wByEuVzKnU>ux7%4mv@D(3^M z#m9S8TpX)9*5bfQl~0lKRHBiZNW^@Q)QkQ}soX;@&}I*Qc|Xr3T_DF=5a{gbD=)qn z+ecLy1_!n9ZHw4uH}@)ssYS7r$3X^u)&bU3K=FR>s8^dQ_Oe_7UI;N)%$Afaj-V`; zOVjIL#lp|sT8{+)B}TY~Fire6H8W+Qa@gI)mow%rMpruA)_c;$fqI-^4aCGwNGw8D z1*P`<@1MONzo2<`Hq%*ot6lOIkGnlu^||N*?62ByMZE%fj=JUXQ$kp3gh?&H#xOCJ zO5HF?&^o&hVi?cA_3_`F{n~r!hMYd6^2YB9x41RiF<42z1Q-d%ND%FnmtT%|9Cc2| zx405&7+(21JP(hESnC}qTTq{-UT(-ffQ164<+jfo+Sgi)T-r27)0##~e zSElE-Q76VZx4lBLndhoDSmf2v={7Z#Mcm(ZaLAN7I1D{~BViUQ#+7yv4vb|lHmQJ( zqT+K%V7pO|9Wz&BP}ZrEnp|u3pI5N*rYAb$yD=;{IMd!l3N&f_-d`J-%4m}{x!~f} zEE4iI{aIs9D{q+hI!d72wV0*y7cIEhlCr5hl&OhJ@qt9xqtvPp46{C$j=Q_ftx^Yr zb>!9w)~0EwiN+D4w6EOyQ~Ve2Kv1~gGlU1~>Kmm$$T$dAMWtl@ObE;esE@DCv&4;x zPtZ5;gyK6|ts#ppT?ff^tS7jqbXhpEL!)HCg;;Ir4q{`0zz;1bwxF^cPmnmo~VMkQY2H`y-f*E&Rz zOb5eXzvOFXd+AXc`G3XUURIoSFq$WITkxlPLA$9{7VAg&7Hh+dEISv7N4oFC*Rg;% zvK0b~7k&|R5%?oUhQib-?ED70iASYpZly$&hY*kLeu>hqAtZ)mpSzYLx#*(;njRcI zK0eWaaY12xEHYQHp^jyY2q?;A#@m5ee&g&rri)g`N0YUDG|oW!`COs{Kc=G^Ip%G?hGk z1ulP7YOhHXZh+twqsu{@Z@aVCE2cwpwXh#v`<-F>12|AhPW@hOP}y|V8j3zK4cz|o z@f5W-E`+0wKaw+k6wd1MCl*dauzogRhYT}85e58FT0YlHw$0^<%U(h%Ks7zDOd0MY zr^s(@Z5Ia+5~N2$ArtT;2IMi|jLEDCf`A-vwc_yxnqzYy`z9GQ3J#g+IQMks>2Bg*w4}!$d}L056aa8uaX47y0B*iphdd>jjlK z)2XkS{-Y+Py!0Z1yYO*T7(7rh2}RQx)SQ|Jo3@0-nykaR6(s89ridZXgf>O*l8TK; zkfR?3y(VW8TQ5^B_VAso|A+wRSc%6GFSz9aP}@;$Gs{W2#o77OAUXZwz!t@=Z@=^E zUEhkbKD(Mm*C+0zL%O=ZqX*72pRfAu+zPa zNRzQ1xbsWxTyRxG3?fjA?o&Y?#*X8{Zy^y?73NawK96|3Z`%R^L(KL(r`{wuknbbL zBHSYKu$&jv4mf;B8LHIe2=rkCmS8Z5IV5==H~Xxyhec_irj>Qgo-8njT&Wn4Mh5Z~-)&q*T>8r|d4Ec>HE7!S{p{&m@7y)JY@u<+ zUR1yz3RZikLmiS)wF#4&z!ff|{V@O;>8of?@eb98$N}MLt0{vZB=H7^1X`c<4n;M< z-r~8<4h<5nw%ZWOh9x=x7n+q0lxV^E8B?Od0|?%D{=p-YqIDpS#E|w^cOq>gNT4cO zcy+5|a8}XGe|OGQV^+@SgKq_}Ds)6pfh}=7hL=9V(S@hTH=uEokzV!%R)Ro=xgs*M zn%J{9<_V1tayug%o1-T9i1>Gj#Gc2^#snTaWtaFp!83aVFT7M3Vu77$rL1W>%QuMC zm=>j~TnBJ!=4uRWKuijCGcH5ByE+w%0xF<2+cFDn^+`En(OiJ^tyf=-Nh~j|z7aRd z1U<{3h$<_Jf_{*|a-*7`<|C6zO<8oP&`WpFjf<0f|o}zLryu zsPAh00+<+-uLnW#Mgd>2J7NkXpye8^eFBgv1&ktF<$N7B4$mOmHYo~)0ocD1h{4bg zlF^sQz8MsejF2A1?Lw%ocGHrhqIdOi>14?Se;s%7nj$5r`!zHu-G9Do8Auf&vR_bY`In#Ov9=OA!l6)okoB$4yyzWFJRUPY zPpd|q_>G@*G#I9?vT%E0-N!CFUA@;-zRa=>qypICWTLJUak{70pYcx?XZRgeuqc!z zkJ%IDffz1Fb#0=lmcc1hTIyr<7MXhg5VtsHR4fp_iW;Cg2De`x*&A$8n)wq=$&^J@ z`d+Hv63XkCe;cJju_J_l33eNnj#UDXOOc}73UfpQ$CP-00=In*@LSUW$UIDf3JGBx zY94C`(}P%o97g4Q+V4J!=TMbM+Fal9XZ92q2Vdc{P;5N6btV|Aa5Q3!*k8Yfi=UWQP?850+s`@7a!&tMxkN(zX=P<{}`FGX7_ z!;pxLLF{2s#GI&^$I{fd@!HE5GaU;=(#J<WNnlu)4|=`Ob2B*z2q@O zf8rIy?%sR$ofr|gA#3*tyi^lmf#R>Dc_}70YFIh0X|}4JhDWn3D1Kk&wptGk-89@B>g$t#o z>MXru)hs%M>*dK(NK-OW!SaG!L=dA+GsTi>f5M#Oev&?erZ^VRCW8!FI>75e@gXO@ zFvKs$aM=N6*-n5YW>q4bX67yu4a>KuW`#b zpC!eBj8UHD@3btVEU|b$>6b5z;QS#Ud0%KrU07HGD==_k!*4@VD=g9Fc1YEo2( zcqR759te@*o(eEBgG+wpD0c)wIMw1Fm?ip4c|tP)4*2R*rPkJa{QI_Nbw4woC#Gk- zI_nLbHfFTZ$RUXrVLKN_Tax)$)(WD1vzq zG0-0gto}#mh6!~u-s9i9zwiIXq}af@714wfFOP>PHD9DQkQa|y@hbRt zodW1u1>>>`s24w7juxF)&AZBC;QrtwNQa>!6;oycBvuV&Ph2YQOU329k@{mbPQ|VR z@Ty*`9eN@-_#~U7@XFfyg2_u^-0e*5TF)}=GZWLE=^Q%;=UxpL;ms$4lv|UtoCuQH zO3r6_=4)+MW-iD>ag0+r3!j+ZbG$b^*(TN#Y((#oeqvzI4kX}%22K5?5*6O-my##v z83v{YwloO3xVJ4MDXk$r*b2&}iK)1aQ{bNo>6Z&BRgtA^(bLi|_0ok41?4}AydkJk z@~t;ojU@oI`b577)~kDHV~$)k;1+?5jB!gTabSAk5cHyd!qCVJ3S3(jOw2$z4~M&c ze67alpb$QkyJkiVD>${_Ziyd)#_V``NHXshxgytDf)e_h##f<@B${nx*BA9rDG~0p zM$|qQpJj2Ww5h9?Ju-0VvSZ>xX?|tP?aJqtp{SYZ)SlRlc!Xa{KiTuIef%~3!?*u0 z{N>lS8n9}L=+*UBJA~ooiRl=PVkEilLIt`%%(ceTgO(jqX{C2R{R>D>hfSyyREmM2 zU|`n~n(?%@4vV*<$(7)AlGB7%F8BroRhDMDdTN~ zltHb=#?RM3|Bq)MZKdM){6C-lAl60dM)ndQ1+Tbvkit`wv60xCMB`(HfKvT!JOMT~ z(yi$rj(7`M5#?E9TAVSS){!u+?0qjkfX+!5=!+hNUj=sj#+Vwx=?z zrM|}H=)_1h|_IO&X(PFq>D7PKx_w9|nKdV+2?K5I0@gM6HKG z40WxxeNW)?!?Sn2Q8)@z88KdRgFb;py;=qwooXh6wR0^mmJm**>MzsH>qVI$!M0*Rf9abxsH$8jMV@n(7j7_I9_#h_W#6{bqdG;i7r(^1DqK-FEo*`+_&!e?k~L+ zQ`|mpDj$QJ8hOVcq8XN+vqiNF0nX1jajIdMK7UIS0p1=RBRP1M$o$P@oyq`)4 zp!g66im8r*e`#v00rcuUif5_qq=_-##oIb^oW%vNy>$0WvGRBJ5(e4rnO)!PlrZ-4 zY>*L6^y|;Qtq#LGm@?tLxp;+wnx@8EP$)S-MhS=%pmv?G%@q6MQe+`XK5L)X<%uhfw$LBNAZRbBR1$7!Kvr9 z=uy;hPzJ14_dwos+QyvJR`MvvM5&ge!ZTz&V=v1^5(JhD4NExW*kTHBg#AGt;RK_H zClpoj<~~;C)T+=@feo|57a;l6M=loTrBA=+5fb7_BMV(=y;NwRFtlQrV@X>Fn`mbg z?J3F)P>fUi4g^Zigtk)4CtkjOb2TJ^5Iq(ZDzqz5CD=r-#ZJ=_7sD|5q}Nnyji=46 z-3sans?FGf1Mp8;qd`xwUlg6Ea6Po?xx(uzjym8s%`*DIDuBEl6+C`VMU7YE#Sro4 z`X^5vf>6*gMomR@pWGA<=}_J{#i^23Yww`ZV1j1gDCf+BpxYst7Zdd-ecMg)(GL4D z=D}g6hd4#{{OPC*#UF@NFQc+$E}(ic4^PJFDEa`xVwwTu7=r3N6QeI2A*hMT*=Df-G!QcROs zo66fgkSGbOB}Uv%dvanrRS?d-8U;ks_z4SCpm3mCK^ebp6i!Ij4o>(YN;x%3)iHd1 zMW4KJb14PJ;^M-(`E7ZrHM_LDwDD$8qk#%Jfe@%hb~Akboo63;Fl5fC07TrkhyuXI zh>t|R>77>4z{f)pO~$HU5Plo$;|$QLm+t$8LqQoW!*Ii#cT@=Wo4Pa^%S+5;&o2h> zJIv#r`cmTGJWX((;~(}cK#)krxyBzc8Z~SJ%mUN)O3^KtSarkTGrwwEos)y@&|wLT zZKK0jmRy{eoHnuh=g5!TCQEY*ZSR3qm)xU#Ry>xtI3^TFQE{`~K-De8SpzOeZUWG- zc(ZDeRvl+eP2Tg2R5HlHQPKo62O62yAP@;#iT*Lu#+^{hki(>7RsG;}Q1W9Iaptso zBEa3iuem`DCBASC`SfBGR5rtp>~jWnFwHSsK51AU4F|vpa?`dKmSgH19sR?Mdfpi3 ziM`r?5c5IAh%4e_xtek@)}&}X$0u)bFoxjhP@-eb^m+fOcYxy8Y^-!#-snDz3vZ0cCHxR_PnbMp z7=6>@FYOLj@R$Zot`UTVC@CT@Ui2+1^DPgPhf^-DoWS@aYWI*6)tE0a#q0eekZr|O z;JhEw0?gcwfR@Lb6CtOL49V?*%GLC!@PsG~S{CGRSOb<}sbXboQN-yTcln;p119jS087{*av&$|P$`QcZ>2&?Be-CIKY1mtSywf!Dnf3P z!4lhviXlIb!Qx=Vt7;OvSbvZd6s^k-^tn2Xn`UtlJ${xdrNUEe0(%I8Dg`|rqzqt_a12O_2EKQD$!qOa_ICqPc&^{p; z7vw;tNl(^rlYk*Ujbvc_QQI>1k#o7vwsWZo3gXoW#-=X)2mxbqUT58u)MWv?wsv!U z!&0wB&`BkxVirwGgwNZvr|*0^#)+q<2!b01!fxXJi~E5m!sun|gGU^tyIM@Q(SQfJ z7T`I41FqwTF$?UvQS)>!E7cRqE(Do&va9N2MZ*^8WtjN)wb@`EW`0sg40_E*~K_CcrS!OvoRceB9 zuqqiLDR(aunEKUy2?$dvgIQ(0d#bj1DbJVF=Zu8tmg3t)9?l`;L65mx?Y5;mD~{=? zz*$U&9i-y6Ii#KA(A5QVI>xo6%2f&qdXXsxf)-PxoFCgbhXSF)2!??i&c@01M3LF_L!N0s_xo>KaPfp8!`OrsIQKdwQbnj=8e z$MKZW;;VQvU| zU?$_drwXT^3!70x8=b=@kfNRqDu3jC^BM|FSBN#or{R8j%#=H9!YQ!{KJ?Atb8U$3 z<9l@yD{4;Kw(x^SSPl`Anbk@u=KB2WAAh}*&t>PU5Q|SG4gTDb9P2%K<)Y_5JY-n8 zH*R-QOCh_fX%Uf+CeI$eED=$fY_VftxldGB8-bF{{(`7Axei6>q6jFQxt&%a>cx)* z88yYOsklKmk7BI14`Z1ctT&zLnQM?i259wPHal|6k+j|4N?sMBGn1(27v`3OJK6xU zGAuPcog3rj=D)a#ZVm4Es!*i#?jkuvbXCub>u&%r&3C^={iPqoTC`T$F@wfps5Z4= zn91HG+UEg-m#<=5NHT0dV)3ArrUADJIp5@7m1xF7hKPcos$fKkXgRE6KOOe_1)X@C zpC6NHoRC~h-`vS_B-vPO;#qzl6)X3_lP6Eg`3r#Hlv5|EK}tlYcphn}+RGu3K9}XZ zpfd6z$o8c#zZkStAfWK*VZV2ravGhv9umX-F;MUHM6r>XKYOP@>9%Wu`3-dmHANf$ z=xMRtmKxtZ625i^#XKhnHD)D4o1f;?0m|;Cz`Qb1D)sNkFILnS!Qbp0+_yd(Z$K@hfW0{uZ2{4S z|H>D<2w%i&ZggD%7WU5g4*5mJkZ6;SYd1fTzgx=-@r38^fBZEIN`duU-%2KH#*e@l z4~r?aO&t*epH>KEZyUFxqC8z!7&JqIe3UtR=tu1$Wa97^rX1YIE=&XW5TWt z02bjbjETat18kg9steuCc9eK5-S`#O;&o7`vOz6DF1dZNqyXFo)DnZnslS&Zvx|Ez ze(2CsAZ0uJvlRRCTK9Hb!_ED4Els#L5d~C#aQ1=8w%jEeR@I$1@n<^n3w})vp_#Om zL*jk-%_3fb{1u+(sQc6h>bLdgmg-O5v?AyNcHvV2ro*d@A z_XhK8gG&&OVO&gQ1{LdxkL2uacMRjaGjf*2m>M$=&!?A@y#17XR9@KY7o!AB8Jfgk zyLb_Zs5EUIbO44d{nYC*MMYjq>H!-TO-Lb{awG%qh#T*`;ZalHV^D4-iIWm7Xl?dt z$uj7sTN)aGRrWmYgXHnkX7|yYOh2WBVxCkgu-fGUTG`11?+2`$w!g3((g8SsvFfhl z{)zVry53WQD&9p&Y#=5Vj^WbN=r^+05e8w=C4 z#VCL(ekp!-m3#RQ=d$qk*BiI#D_{)eh7Lbb_5_(h(^e~_P5xZWpLF4Goc$`Mw$GMI zE=aR@mL568_4)w{Df0Mgb*%cptwuu0ua2&>`&VAWJ9GnHediDa4&8oAXl zF054~r1px}%9dy?W8s?P>WJFD(< z^>1zBVlL&r=N8sBJVwclJ-`pWEkC1s`1rKhzh)5ZPMt8tdqT_6z6&;?{vlurv!Nb( zJys}uSN4k{-&Fhd?=NAw|Jd)x%I^|?-zvHmG0Y#H$1#8H!K3Xf<^Y?Uz{_*<9yU)R zwo@Zd#}A1nd(AlCCG>Z%pLjOLdtiEFKEp=n;abjfqU!c?iW zs^uM@H;F35+!{(hk4enRzBSJt_n!Z+XCHdZq!z0ANL&r+mv5MsN`E0!rOPPw_W0&^mZR z{la4z?q+W)f+M{f6E`-)d1-xkwqjwS;`h`z>=5wHJ@F^D?vyFNc zSKiKpxXrl`yJ)dVr>U$&VPO^OirbEg(Jm}6d$|hySyJlbSt>`~Ir7}p2~?0bhr|hc z)zzKz{sptfiC$vVO{wDcDEjCi z4Pg+A-9h^N8kG8&N>7q=%yMl&NY^?1&fHlv`(fgOFsfpv zjfX96BvzyWnTqr1#khqYF)sq(t zcvUM@u&<~K+fF9DdbQTJi6EFMB*w(!vzrw9>ssH8!Qc0E6!q`9B|}^0IoTK%1w#U5 z4VD=V(QAhLk~MR6L(Wo!Vwwin-eYll&^k?5`rI2hfmAaks5lCW$KpJ61ER(f&n-g9 ztsoxe#zr~h)!PsV{U>aTDGzvI1avNqXmCT_CzDFqQaLQ(Z|KR{Nx6|1@kZfpu3(hu|8kUlO`GEI*T3Tp6bQ-3t}%FEf>;a)-JHRYw0 z+*BiJeAz_mJa0TOjknkfN`iJzeFud1_DOOM1cxX{?O2Gp<%JlLs*mO=G?1YfzhJ~& zz8plN8bO{5Iw$XWBx>2%C-pWi#Vee&8KE?azuIDN6wV-4} zkc4BtUqV6z8I&mJaR`6LX)%h5dTQiqt5|^trtJRW-orYg6M9i%?$JsKSChv@qCFi2 z$E*_-=`P{UrnoJ$H$SZ59VP^Fh+>gG$R|(U${z5%wC5v&1MXam#blAW{iI%Ny?~8? zl0K&BwYO<&7u+0ScRYY5+zwtwPnk_&rF$GCxU6E6e(PGNU`pneTkUpin3YvO{rG5D za!cb7XBH;AdJ7(&l;{jV4Q%4AJm}fw`(^P)nH_jXiu_Z+ou~YN+`Z{@w!8ym5-NujEK$MfWqPd<5kGt)di`@Q>46O(1z8mReZk<-WcuMWc; zSzDcVmcD~>FV-oU<`xL|7gq`rrW1Z^0QgvxrxBdvRl_t42y7s+xyxR5d3Epgz`&jMx1-wnE6+NGkX1Kl zW6;*B{O|1ld_0z#kD~9ZcsJU(UuAR0opdwhLbynb1`sZ!!HpZwJ$L;DJ(Z)f9V_to z`A2Cu6Vv|CD?~90q)!JW(ANq}FH{uViNFL;BK8G76!~@3F)x?iNnzFAxwhWcRaITC z)0po7NF3Fu$A#KkVls(9OETW9o#jd{h^KEgpIpG&%CMfWG0^@Y|^D_&pyZ${;`a zSW5J5D|{D&CmSix!oozS;ZY%^`K!Ul!~gmizLDtv;)^e7F=*i}auOChqsNLS3>Kk8 zXmrV2l<^_MMqhA?6{yrbKwJzO7}m=5YY7k2Ag=Gc`}pgrtO-F6A;-W?wI#C=a5&U( zf=E%YpymjK57@5N7Y|L^icyRdhiHy~!5g)zk76TxD!`#BEOTmJDPI{Z(^Ox$VzuQ| zet@G0uS!4?7cwa8-t(y+NihINBwi`xhN_wsaqG&S>(UN8TWI!pH~z!rZ+&Fzq?$Xg zm(nO7moh0I`6fm1pnzyvC5y?K%-jmuBM$b1^U}HIPE!wAL@}1>gKsXMN$oAFo^aNQ zuTZ}IDHo|{KvA#oKE*<{=$DYn9uTHGBHClRyD!j|yb_j)U^`NyYFSEZgg3_~Z~l$L zCZ7C111tRZxl437J`TkB3S6wcPp}Ei8J_mZaEhD$k|u|ErQMs;Vf!#u@@ntk80=JcEUn)AB(>Fn&V=)9;vU7Pd|FBJuT()5%U!lL%>bPY4J5mBfudzkJ6xBIS_AL^`k) zRB_dW3(I;~q_=31BJ;#}z(ku?w*(ds z`n^B+Xz-yQacJt+hDGb^)vILYQS>nx@n}4TeU9b8)6&_EJlM&pSzp6y?~cF3updm- zM*xFEcwfP%-@W{iF2oj9A?)d?C`Jb7oFoH$t`dz`OJbns9s7gkI<}Zc9(A)8BowEI zX|Egzns0;o7-UBAf2)d-%==T|#le`>N}OS+9{x6>es5DzL3}veT`r4nhwFEzbD-US z1$ukeGUl8ZvsM^BT>>W$;5hg9&Cw!PCR%d3L z4-O5ty6!X}pEQbJ*$z4g>OW8s436W!zx?frs6zlSIWZd9UuxkbOdRtFwi z0Gk(_BY7qEl0(JkPV9sO)<0RYaX(o#lx3CPPJ=t24qQGZk}xL2iyQO#gHWd4zkJ&-$G_I{ zUo1(y9~yskxsnR+@B>P#GZ}l{GRLx6MJ5zt$REpmlPePA_Hzu|clh7m`4xE9BI5%S zYI5AUV(?^YEEVx2o?Ji?F8U_N$6XfS+a7_q_xFjIX)CD}FZQSAXkD@-qAYF9>R@IO zMd)7?1c(6q5alMD5!N5a7Dx0zS0oS=`hd9w)6y*iveF31Rf`A-q5yW676loH04s5x zg_8rW$Nu>Q;r5=>fHI|aVkGN#&H~RG*zrLhB`h5>)ZwJyxnT77w|nx5yxL1>?owG} z*bmzWkNL!Dqp=Yf6C4mw2d{R9E|qnlS`I4~_jvKrYy3S@y}F_H)yqEdXK*sHICchW zSz(By(}Sx5r)eZ77kNWkk}sl3532VGs2#SGQymbj-6NzD7UnyH&u={MlLdRkeEb!b zQk5#MlF4x^GC?OMZ|dwJ!R4xQSS~Gq(N~OPFeDljIn;(R2wzhJypN^7i#n$rQz3K* z?D&@DCHklgen+Ket@p4rP!hS;LOCg0bIRUl7lRJdt)zt?P^(riLq^$(B{rvCukpTb zVr6KfsLU_|Gy5TWn#?|ZuN>JTx$ z)R)9{tjnAoH%^_$kM9LnGcbhnU;oPoshDQ-uG{BO-jm#}9bZsiib|eGMvQfi@Z2=#U z0I7639prlb-M!;}_`hC!*?E>6j?z?e2(CX4f!=Pp4FwM6Ji;jQZJorH(qxJxfs#W$EOpP7R3zx zkC2@t99SKubzJbO5+Cg&kU@tLS%s2o{&+T1Y&G=bPPh=7SR{TCM_D=~pSgbRPkoXo zioXckk$8K>e;7QOy_w3r(3NaYB7#-Ma8Q|a@LVV6nh_tIbf^=M4&P07rq%ce4|-9$ zoOo1Yq)wub_VMFqUhrR@zJ2-Gmiz`S%a+xqN;?u% zGEeYbUsL5DQcO54bR*CM$m$nP+)OQbPN}LaX9ZlL{b!yEQVKLU5+=IAiF7j_sWc3G zP|P3^@~Y&BZeKCLzJD~mISFElKcx5}SmsZXHM|0lEd?q-p`a}IgDlwqg+>c&u2;+K z!4-CJ;iU@*pN#w?$zY&|VZ=;NDfcXlhgcs0Zo!vRWTE5jWZ;_|~wOtHa zz-;TnD#S()>800vb#b~O{row2n11O@1cT*eu2Yvro-e?RKO6kmZ)1lvbzAcyQqp;P zggw@1pcsok9U1^;jp`^HY?xtc(xg6tuJC7Dr|{54v7DD`DIA82gNuVeIIghs#R(-3 zVkA6j8HU(NKBg6=RU?}=-Gu$-%Nh;vPMoA#wPB)wl`PN_ zvQ(B2(<(bqQpqyZ={$tXHs`pQ-@9uDgoe~d0o_iorwBW`^=S$#QTD?|s0DAMSUW|{ zwo8RTfPEVXO21Z)0xdc$)uZfL6y{KxHj2RXQJJ=2Apoary;4oI7}hpZfo5|8@$YBO zOf)G>`DV+b9mChwYped)d#``VdEQFsmcksd4nvGV@yh$m&9okJzi2?I(AP#VB#*;`vC?wI;{jh<37& z#R!Itl?FRVZ^hK3vaFV&3T_n!a!$bUYi&i6fm83LbY`l`;|h)=p?47-4j%oPNN%ln zpc?W{@3B*XNU^BAZEB*+S!?8G=qmE`IV-g7Ks;cLpNPFTn)7<5A;7_N_}Fh0gY%Q? zE<)((2`a`O`tSZI?cCo&5I|;kK?=W%x$rX|`oh9;qh1RsX^tCTGO#~@6-5Y!XK+^b zzi)xfPC9|+PYBcaV_7Flx$_HeO@(vSYH_Hl0!9BsfhA5}p6<`U7BPpnOTweDvCONk zLCzz1ohKi^@#s0-j=NwhS%p@D$O>VJ%~@8Cy^(YDWRH@IKJ2ZnaK+#W!lmYmV}>dy ztF&V*m|QBx_`QTqG&7L-7@En3oTuWMvASPfueE#*lr-Dad#ndgKIgY-+nII@9$|~3 zr8nVHp}K){4J>D+S4YmRWqe5b9au14)a}?**pQ{tqZ636J*1|HW@r`%lx5P+W_u*X z0DSZ3gAcR@IR6EGFU{M1%GbW~icc9nOC=_3pR;>%lBvZeVuTG&LCb;g2Te{z6bSzQ z?q#Zgw?egl0Es@FwM{FqoV@Q}?ORzYgSh!nP_vSbOTt+;0ZK()YI(#JO>!aed}_JY zL04#pTk)qjq)44XIrNnpR+3D(ugP1p{@YNrbd&F;Kf5L)!c0(D#1tQ==m6DqyhDzC)}4SCv0{GL-rlx^Yz zBg1-Fj8oiGDx+9*Mc3?DSqz=)Pk->m-)hv0_BLI+O0b-&2tJLaen_YP!(n}czw@CR zJmB*C{;7-Zauz%1ZpU1-*n2J}zuE`*8)YFeHzb6V%#hh4qxW$2{9bJrc zPJoNT35pG7-~bW5NW5R`GZ8z~T=B5w9u~XGd8B8cS^u!hvr_E_(C;J7QY;Q)u41Pi z5!(JiH1l00t%S#KC3he06>Cjzr<8_3^Q6}r%Zp24by*<RI$*Y>9l75`k09ftTkJ{BE%=vq8(eYO|1m9 zIqhUaK9Mm&=M%r>)cW3QxaKy?YUdol9Qg~Z6hxXU(E8**Z1?v&$@p1mpU0#)itw>$ z&S>(eP2siV@3cEMtwap0;8V0BFxvr5+yR^bI5>8e;q84c&86jqR-Vfy$+Qyx%fpO` zQH!x4UOT8`{yGC2bmEc5ZQ;Z)3!3xV7iV&E!o@eo6r2{2o~f~MXGykaa`}x<-txQe zee+Gfo1FnDHI{@;ydL5FnXz%d1ZZnc}E8rk&Bq~r|RC+Ce|5Xh8e9~XG; zG|8401DWu1KXvXTYF10H1#L{$z+#dL!L&;D$)VpnzoGTEU>Q=$CqgSOO?{3~N+SL` z&CdL~iCZ`Q&+(aY-GYZjHDn-OFIAkq-%hp)*6wdZTy6G%>GgpP%-SWn`2}C(3!EwHA6Q^Z9@S2 zv)P0ani-q$doU$Mz}2=@v2mhUwG3QoP84&H``;FZ6-Uzfzyy6d`%Uw<09}fxnYmIT za=U4=jrNZ7yKTjzt91sZ<8Pv;S^RBmufd`}AW%hr%oE=dcP#xs=E1pY9foyfy>rK} zBqxjmF|jU#Op)|9T7YVE8u02Mk}jy1e};(UD={U75b!vYOrX`rt6OlD7R1+uIPj3K z)Wy_%mUwpWTtm6rIpEMv#G-exXg-YbcVY|U^)Ths*Ukcz9ihY^twqF~s0%QIuEZ|N znZQuhnWKH460OO>=_juc@IY(Q?vE)VPza{Iv*hgrt6>`x2GAN*FX99tnyezetChM) zf;Kav>5{exG~M9B{vh(JbE_|UieD3vO<}={vKNQ$!t$ywyg8Y=JEAAAGUX^_{8wJ| zSI|6bJy?+7bV5JSl5YzUbdm!#TU-4SISy^6Qo?*rdQxfvZF=u@O2%onz*bRuH9%B} zINPuVkwWRWgMQ*#6gw+EB;IMFIwP@s9Vtqz*--3V&*AtF1KE<;Fk*AUeCtHzHBALa zis~hm1G)#CrP{zN#08+ZPPOW=1SfQr>>Q?tP*hlwyHU_X3N@pkEU(s_vxsnHId`vun@S3kSpkAr@N(VY1)lWH>C^P;b5Kuculh-wVfIySWM zI$AN#gQ2ZJ>A1h_^el(zniq$pJNj=n%Dj&I7}x2U{Cw<7c-W7_irN9JcF5wV|M&#a=L-|;#drrZ}peNg`jrpJ(4LPk(7#)~c2h zUNG;E9!Nu=_|z?HyzFRdWdmup5SqY!c3d3e=HoajcbI;->+E?s`Qg?%e)~icBBQsvocYD_4{s8s}2gDu07^%4chib|c z4Z>SKGC!-=G_rXoZch2;vI8twiDbX${RbKgEvL+LS`P7uJR38_(jbeg0G`k8amy&M zaGK`4a{~E?{!o7r`q@@0?P%_4K;0-*N6~d*HO`MFK@ylJr{CelOnuobaWX^ad(07Z}CD$(o&&x$mV4;MCcj^>c|ihuMH5_^VFKo0C&FCw&4v z%l!^RXP#i06;|{|FukVl-e(@&^P5`czluQ_i6E%oV~OuRUYr&Oq#R0R1wCslQlj1B z=gyIp-n0m~AGeRc;;$W>N-z5m@^I+|idD0u~T9dF=B* zOu}nPtd}@T3W}l&`n4=CYKJr3^wgxk8!EpQw6HLbRuJoEKY22S%^G>5U-6X^Ku+~0 z&)R)|U#Twn-Sxk@!v;$1#%x8+7I;-Jv{eaRNI18Sa|!1B#duiUQr z>H>ec6*?q2x2K!V0?R7jXIhSmCK;q*H+F>S+2Knu6m4?Z!lK9w7inL5(+9AMjvy*i z@WYDe`j3s}YD-LV4CG7_0H{R?F=z)y?{)JYr$Np4Q&sT!&Zg#D-->3zi+GeU9I$B$pWrx60zA%WZ<4c4FJMm-vFiH}8LZlphM)0AD> z!RrF_Mz7=GMh%7w^hhgdu30_%Y+%g6;H0clbeiuBey4XMix8B#5fATHwHKOy`$G+f zAG!kG^C$s_Uo7eJB@Fx%-&iwa(=OMr^vs&kB#I{w?JGF>_bR#=|A%tX#B+%^HrTRAH3U`>M=|(+A~ZM!w;d%ADA`wCoIFwtmu+cs4IA8_%lv z>Oo75+Jdu#N!F2EsjVJX2erLU2sV4cqGVAezM&UZ-- z8(HLbl#;YGkvLi=W+!~`>zEWgWxB9sgT@$$8lxRmsOdR`LlVXzO0Q`#aSs;0 z22Cyq*&q6hF3EY5pJ|QdlAyI7=z-hQiyVP9CN1+T>X$8&At^f#aFwCO#)y%V5thMk zf{~J(hK5!*BUuk4g=M?5;JH%&U95HAA!doCI1q!x-A!B~L`O+b_=ya))#*S8?tOy71hK&ua~lE$CewQo3^w%gjkTIW?nesX}WQ{_JYB!`H&!8dhP` zpf3o;crx|Ke{=b+Ke@WHp;w|ihUUW1nU~%Q{vS;{HCSR(LCTUQYhlt#x*07xKs284%s2;$W88>F`#ZI7Irf;cy+k2q;KBorVFD?WBPx^>*O&>pgNn4jKM#ht} zU@P?%AMJCGoIL%bed_&&oOt@f!7m4&>JT!>SE-qOkYp!c7_`9Yu!P_1;(xl@9w^E0 zUVdc%9ehy!`;i7T*V!`&VMnii(s@S0J!8C{Pgkzzqh2VxRAp|8xuaEP*(_-!IM|bVA)D( zN~;iU?;J;t&JvWEk}R9EX-upVd&t$Pff%Fa(WAr1KpJA>wq}m@V|n>eI|od1*bxZX zhlCHbC(8y(_yXQ%wUdzCaYmqYQiG8-jPXfTzI?*Ty0?AONope5#c`G+ zO^I?i;c$AT5P8A%ivHDMT}f?w-~xn}M!-|GW&$U2j2JLj4_`_KK4uj&)YBKcCH|_N zy_|F@e7gPZ?c~|96!{ZUsX~fz(8b7`hQ>Wj3Gn+}m?=A_2+B}kI&L03x&GIh)=-u~ zc^{lHb!mp}g0ZsF(xkTMbSpXtuqTdOUsy9P4BjcOAjV7LVa`rjkhY2YT(uL-J{>TtMFsB9{ndBFaMN+___NW>U|a zGgC6z`_b6er&FUEr~hraZhpX7Nkeo&|G0xMn1T97Ru!6tBDzwGgwj%vD)wusnzMo$ zsT--V-wBSr@%Z{ZE#?8oIY-o1QtX=qm#oo7jFHGoBo^WV^EIMB0YCUeNP_vme?c?; z$HC8>8<>B0*m72{ou`SSA$n4yj9h z&U#B#hrGJ02yV0f^pz#Uo0%QH5#khT6UfHG98fK-d8^LTJ9a`_ma7U3kOve*$Fp;V zkJbRaG^4&jwzCMkNv&T$y#Snq)s>E$rbURTs~a>W^B~mZS5+X1q9Dm|YTy{-{1X>~ zfm2_bv0GC<3H0fuO3U{TG4*2#R3^6HBR3!b99Zq<;`h}BWsy$)g|)Ex!Mv}n&HH5( zUkPj~dREm3#rpL*EB)Q(^M!-La5eCN=|VYhS0e7=PsC*SZ173uKSXVbwJHJ|5d+ro z-~JK)(ig-WRvVFySxVu5^HKBwpp&i^k_*lC6!LQ!w%JV(v)jlL(Hrw15R@P>lx473 z4TqSyAx086SJ&=pA6cCnFjk`;6ku#laI)UQ$^Ze@4!0M+)7&7Cx_RjR+exc-{FsZ@ ziJK|XK^!EWL?CJAmXm5|=d;-|1c8j6q*GF%qmQ2iaglVg^@w|*&9$AL;9ZF;Sv7e- z1-4R5q(W5H=LtsfS-%VJIT|*}^k1n8f9y|~Ulo_Fz$0zh7Q-mox4!t64%Dm2vojWv zIZIG1#cuSvT8Sv?NJR_ybnp|u2R9Vw8s`h1BIoxWX+5V(XC?~qsUlkjSU;gZ-cY9c z9FS`%kZm>JEbQyNL~0VMsIS$Sg18W)bMCLT;KWGTtKu-USWVdRt(B%`-zxahVl-V`+V?c%ELgP%54IjmUq*t`BK! zY$EFGq=@maoxG+kCO9jK>Zm*9b}u9mR`+|C@A_+S7QA=)Yk!Ru>DLkho>|&jXGQSBGBZ0AOK8k)fb4ywZuIZ*cECRzgi=Auwl7Hy`8KpWt z8Fhd96d#rA9ep8(mQN2Y>C0)pwrMDRfBN<3o^$dqVAyI*zx9~=gYt!(%M z);n?2u%vb5l5~B<1m|eFF?epoDrGU2EawbA6`3E`H;bpPs!Xxq#5=6X6ei zjov}ejxeLua%&Jt@xh`nXr4zAr6l_H(7#_q&iE=?A&h8Ju-J+MQ8YGbceO;N zIQT<3&j~oTAB6i4?Uj>S8TxgcfBrI}00Doze(gmOa}yoU*yKRxN`#pcb z%35XK-;I`GdUG0c!72Eq9N^K?9BqKQ{{f{#e=RK_{n9EHkq>)c$&Brzhf(RIx8jQh zy74TTXSF`4uWjmN`mpGKZYr)nn_5i?Zi1uqxReI+tLflieGm0@y6^0*g$ROW|FfW7GB7{(?gI}PtbmpzFj zE1p$qXzAE8cX*P9_?j(a#aF_5R`{zQb`wLcV=(Kvq0%ZwhB&h9Ex);ak{HVpSJPQx z9to()^^z{I8-q=*#wFP{;6%0~1Y-#}=S(>=DbrE?V1cZ--T$2CAK=U3$bdC!%g{6@enHKjkw_{YIu>vv%%*4Eu}(0axd87 zz|HA=GQ43sOI6CE`Cb@?uF_T`1@*L)adBQRC%In^e_5S4Uu}4AQQSkc*n%p4Bia9=g>5j*&@T{qH^e6oNPhyef(y&{D z?oft?V!5#kDY7#n0S-F5MDr3#fWlj&o8#fbRL`f;rpcO)B%DpZ)VogzKy=d)Yj`XG zKpKoiwaMd&O*=g^yZYZXOzpsQ=NXQZgxDp1yB#rL6qG%hw1?{wi>2mM!4+zCXYy3q zoA?ZqqBlM}>x)x1MbzHuA@9-Hgb_E4;X~e>RGKE$c}6tr(PkKE2`Bm8WD%Zcut{l{ zfJvVg6>mgxC*n*gk;?D{nYTVm)n@zL=ZY0&36(K5%JXJlDq5aZ{otI4t%l%C9$O5W zuI0p}=TV_yD-3O%4|{>JSm-^_?nZe&+1Alh+yTQx_!#%#$xE+%`6ZX0^m+A#HI2gQ zPdQSG9=MLWo6<5)FAky>%GOqwNc+!|6fdzm3w!#^EXyx~H&xCye;amZs1t7`s0>dK zbJOgTxN1(QX;N{4Hn&u=iUf<7-tU ztEsOwHI)sx(P=t|k$tDzIiI^&4qm@`b8JGV63VTWVG{?Tg6#qMH4bvxQmn2MQ$tnSJBv@~KcKwy zxOEPfk(ht~WVDJK4Ku;F#9*F&d+?dR2D?GY1RzC{J&mIj>`<(H17jfJ#oC)E{o@g& z93FvEhO8G(pyDSrixKa!zmW8egAh0*t3GsUQu7iS>7US1IZ=H(k@1lPJ!^w6P@>Ul z!u8Jm+6p)?e;Kl8BGaACo@yqbVkNwWX%yB7G%Bc&nIi53j<9W5V}I)=8J#dp*h*DG z`N)N|M%B(i`qZp6ntrdPlVJOVPaLxoeZj)5;)ntXBxMDL*JK{{Bh-lK2>CxTdsC0D zTN{+*-zCo13!mrH|1kK(F9l|LfYUNICa8el4t}Z;K;>cURh{OKZ@iqk&fyyFhvtih zB*8}J(Eb%!_S9C%f?>RiK{fLJ{`35`+*cg#FN)$lt> z@BH-gZ4nbE>2@c%Q);_Gqq((I;tJ!olkfu-!^E6Q-po^>4DDO*9VV=(Zc6hcCG1?k z_ll3l>NaQB_~`{QM5pUsAJC9N3Q_(=V2UcLdx1+Pi(q%RcAcO+^sz@fCa=a22cqedAQVltd{FpYD1aww z*U^fYqu2!)YQgU2zy);&Qo!J%w=2EQB+L%hNzvAI*a;PxToYG%Rg!#Ed7!dDh#!22 z5JB@hv}3~s0fxlZKK0OCO5lD}=x{Mv|G+n4#$^1FpGA3m#nSP9q@)N#>Oz4WNxdqf zTju1itKF%sww#Kqy-h@l_}f5hmJO?K$hpSO+6m*g_BDDpwtNv!&b3NbIojrJHLvt7 zRbGsJOl$_TFE-XT*2P1^1VR)Sz12$jpkIg7#b85E(C_HQg~wciVOG816GTI-G;}E@ zbtyBb)u@8d4GoNg7BqxE2bW2xCL9Q)D69VO_GYA)^eOGd*_TfH8lK&-rWQh8o1YMs zG(+8}w2D~q(Hw$iMNW#30Y`41E0KjLgn2hYBQiH=l8HFpdsfCj8;8|9$v84-4;a9_ zpu|N-{c4Dcf(sDMuFTmHn+=`CJPnj>%Pmv!R^VBtV2dyfMNU()&LFWub2L)8h{!V1 z8IEu^#NuaA;s}oR5-$wY2uY{E)6eT9#SjUQyVcmCx7pv(?zf2U7A-q+87<eT7ITC5xo#FSIIpq~-V1eKcXRx<`GD@ViRjJ|W{kAw5L59J9|Ntj=0O@emaIatAm-*IYf%?G=URg?mCCHV6pP#I zyC`i@Le4B|CzJIS7b*5l>K?^8^dLOJRJ3(rLEW!hx&`|S4?1!5P(%Ro@+o<609eQF z#Q;EWju2v%C^vr0#86ep_>d$EO%i#qb=4Bs7Y5$}BGW$^7@`|G4RHRC^$W=$QH`05 z`je zWGTiSfGDCQrx#4Zi~VkJ z6=1$Jj5+Fbq!SM>{f7kEKj7aFol#2^7VL=5MRg|3hXZFX0@bIqz6TBb18rWqfo%KB zzxF2+kN5;ie=jaGw5`Tw741{Mo>Fk{kWvvG(-GL4Bd)JqgTa1+1JozGNu)yF z_Y^PeDAt2yW=mU3m5xxlj#*wpC9wlWP5OS@p+Egx48A7T@nfK=vJkvg1}TV!AdCp{@>VzR1z zR~c|g)ZT1^^ARZxL4}_0UsTuvNBP*pRSnEmdxToL7;^>$_Qx;ubBUV9qK*l;vU7q0 zShj@I@@lpuvnKk`T-E1h!GZKXb{w`PV@UD|RlK0YM|5cq7RcKG4l+Ho8-81o0r4~V zc#vhonx!gvM)9B&0|2XclDKt^I71j>Ya1&`k+GaI+l0Ppn>yG^P3lxl%p6z+C@ZUa z&LD-((kv=v-9&cy9g&|+BOGt!73m67WlEZ3sKHcGAy?g>SbG4@HE~n}9qA}cJ-{v} z^m`RiIuERP62`4hW=FLg(HQB}c9I|nc-eEW2*TJohVp7V+vf-`BT}-@OPX33v`pO` zBhImCpXLwK$co}ehv`^c@D<&*2ytZ%kQ}0W3_Drg>P$O$^5Q+8+2pNBf0;E4#wOLn zOnE&yp^RX98|V;iHZ~@rWbR?72eloY zX$X=a9`wmr#-~L*Dj#&XVfvvG;H^259KgQ9yIipfC%J*g(xPF6G)yLAXp;u@>UUU0N4nps86q7kQ94bl6};A6HV zQpCsJq%3$;gQ{)lIemD}Tu@n)BG(xv^t~}QHS7F{&6Lt(4JBtCMYC;=Hptnc-LnM5 zRG$_}gZB{)o;-e(a6n)Ia4h(b1SI4@BxPm7pK(xZIPoU1G|1xPTdS`wYieG<230>q zD{35qIJgWEAQhh)Y)RoS-PhtmVn98$n*qGXCIDuwYMNIH-^V@!jJNsQ5~`f&#IpO) z%E?OgWnEjj+w?h5(>u8Z$Swr>dYa%*h8DVr#i_RgLT*P?FW}4!xUpJsvZ3ekYJUZ` zLTnBChaZY$VV^@JjShh!>CZ7_ z;q^Bj`O>b=)S_6 zE^PR<_%nh`hD7r3u^N&Rf?8`aXFRx0qlQ6AEJHDpB459rocBjoEX%LuEo*HhB} zDp#F@rPhs2Ma38flCYs*n#fV}qx`gFrzN>D%zsGE<37E&*+r1_ z)V8yq#ydGZ3~cdXvg!du(Mv3yRs@cTR{GG2%RX0Fz;0mPCew6Q3`@lW2Ndg3jo!&l zv^TvE?aTV7IJ?07_;#}XGsGLA%{d1_Y9JXtp!(3mr**JH%0dBpB8-U-P)D@+$GXKq zhCS>wIgfB4aa%nN>0w(pxazCxK8^bfN$)Tt=P;b#<^c?;P_$LQi8j17lr(--NDi z1t2eU2@nh{F@Mm4Jj(^B_G>X6&UjAJ6iQ@6!%CHP^I$xxKQ;7=b9plrf9<3G6-H)`qSLZb){6pAIh;T_d zL>!xKXb^}=Znv3iI8dr^Vmr)YjKuy{4=X~?Emk(=oD-XoT1u5FJ+V);HXbt?OmqaD-cCmhwv&sFn!vdw!Mkl1Pcg~UhdcnFyki|L8GlI}LMt#ymmYUc1`P3(qlA=8S?4e{egiGfDp?QUI1Z9>kCdcJWWunOa+(&`PZu5b z?wjWAgPWd>`VQA`+_>>2&5k-G$sJ@mH;&bGa8cPCbg*d}uK3_5{lKE57A5uX2oYM} zk|Id_op;;m7+Nlg-g@m_&{u-bk^QsR4YxA`4?)uEZ0)&NRw)b9+!W9?&_EKTW^(C8 zhh!PZz%ZD=P4Sdvtc7FOf>0RgNsdB%Uo+~pu-$bhT^Y7QsuBiM3<6^}A&1u+Yj>9C zbsuI{Zahc$)sph{0aYl<`ifwX<~DPL38sKD&LW)$J(LqYMY!BV?<)> z3)*~{|HbEpAY;nT_#_^!_}+jVr$Gxa!csBt4b%Jt^f=BVX~T7VZ#h z1GoYv%X58p)z|z%ot+q*3?m++(rIg86V7wcM+vD7z`L zP_yD3PT8m58vK@jKhQvds9I)qt}AGeuOsKln3@7t zh?;iJH5Ve~B?un1N>Oykpw5-b3SvChGXaoWG1~;sYP4g7`kO5m|tNaKbp^u5Vvyps7*c7WiHO`yQ?P%9;^#$#SQf9&;uPM%h z^rF8BqR4L{eir31O22d;kooZ9NHz0rJJik+%KN_uKXT@6bOIDOc=F0CuV}HY66;Bu zM)dwvJJe@I-X&5p@vKSWQ3#Z?L6J33w!a(vS`w_pcMvTjyuM6{=Aag5A%>rdTwym^ z(PNMeFMQ&_G@&6u> zIR07Ei%1ibz1BYgK^bExg{C#ecBw*=YY7PIb8158q_s_&|?8ptWKYjm;Zx4R$ z4<>A2CFON7_7}cBpZes@G2bQkqZZ!s5$a6SN=At@48!Q7+3$3dX8{`wWLUTW=%^x_ z2&Y*(i-GGGVe2pdJuf^z!mGJi2?;;PG0!W^$A{WT&7slbT=n#Om){?J>J!JLL3GFA zX?Y*VDTM|k=kK-Q0_1dNV(T;*fb`W^)1i@wQ_=@NsT}e{4#B7hf?$Ml8f3lw77ATh z85Y#0*jbRP+Inrh=FfcgnHQbLmV=*Vkp;-E9Cm*7jc-N>cy=uLlGbTrt*&^6@>Y5) zHBfg%A&L3r%sG=-rAD- z{Y@gv3OM0k_|SX|KELW=1Uab36kEqu(n2==)G|Ty=lFdo#{hQA?fQ0!Tll+It$;@g zT-Q4n8C8Zp1=cpJRQ0Wtn!bvg#oxEqs=4^VimxtcVCV%_nqCqb|I)U3fqc(>`A>W} zrOnc~ge_<7Z~52iApAndROf(R$)lVga@^_nCpTVn3RTJF z1Sd-+dOeL~yx>GbDIKM#Ip;Gmjc_AdkrFmrDByO|ryYB9%3sM^YhHD5nz_vVF@=T> z!Zp$+;OH{|Wzv42)!-bpa9;lX@taZC`KsKgzw)hqDE|G;vPc>MGLzQbzV-i#n5JCR zDBdUpXIJP0Crc5f$tBFOi^*=p$v~r1%d+i&h!SjJ)fU0Y%>+24%~{~@e*TTG|9k%% zpMUT;@eAkKw^D2#CIFW>;_M(;wzZd(x$RD=;|ZqP-)fP?14`3$!&V?w@Ma{5vGXE- z8JN5Zq?wA$a6^=0to-)r-+%FKJr^;&pX+>pQF8LX{`&JyJ9C8`hqRcP3a+R(t6JO| z#G$rT#>b;){=kJNE{jn;h?9<@)Xpj9zD(*RIFQ~kx2LK=6tgmsB>zVf$X$9CEQao3z9lnG8;w^ z`1ohfBS^9IRi`brpZw41sWGPz^)GRq#cEOF;hn)dUwlieaC3Izrr-DY-Z%X|OQ%Z5 z4c(G8jnb5YSjkRk1vjt$@yC(~is7ZL)#>ji!0Xfh^YU#G=s2ArlwbrGm?Ak~AFw6` zHsGvd0pIiYSgiLV8l=Ad;te0ZEo&P7^z4|<1FNKhz;Gs^%I88=AF!T%Yz>qX-*5F; zQd$dyiEkxR6CMv+%>vh!LhH}KvzWFV;bZ{0g586a&|y>jqC-4r*r+WGUADJ|Hz)tq zSK}15MLs(EcYl<^?Gsvz&gL9tc$A$7kqdZEX}&OG*u{)dDw-{ke~A<^ecFX-eByH? zFZ(Y92!C~%(pGtVVh;Mfqzs1=kRc_`d9qQf`=S<+r38@L1Ei*gJ={DJ;AYADU>1O) zOBz}{#k$?HP1cWe$=I`zS$5BvHtb+5=otX9-`5u)#;m zy8GhTBvC|sFx~ycOs9Q98>ca)1@u=U6FM}nng|KtcH`dbP9RoryX0uT z9a@xXAOv%cqa-^?s<|ovacT>99d56}h-ag!tpfy-f| zAAjRga81*ojECWeHPz{)E7F~JrNABZv$1d?TbI<8Th3IAwN)n*%I3_ZZ^wNggH9x3 z*+iEk@rgW@z0Qc|tV))VB0`jl)Vo$O0ww0JDlQyQ;#!;Ye(wS!)u;OSZ$w;FN6Jn^ zP%Smlc&i?oic(+}o$axA;1T%+h8A^iSlU<*yi4`ob@N^7?JdG$^2?m2njIz^og6fh z3XYp(}-$MCLp#0L>(Bg7Q@E?N-V`;@hC6CLZ zayq~n`p432-W|OE#ouaTueAw<`#aHB$ca5kW+eS&!Ic4Wvgp9JmI5n>1|3(SRXy#@ zbY1P3^==9v8Z!Ohf~=ZEMh*fCs%a^PKG4y2?7RyZcSgtM!0V8{uU$=AP|A)Uzcr&5 z7{bJICVZX9c1r7lo2PVM)1zo#KCMqxg7!W)Jox=Xoi@vjgzBkRA{th97GZej&I>XC z{O_?b*^IZQCN*iRHcK&w5h>q7Y1dGZyYym*QZ5HxPj%M4#H*M9$D37|Hqf(93OL+& zTGQKYD;@1BZ^DOKU8~o98Sy{}YqQ9O8kUtU>{wp|J@n@I*qDFnmG(Cp!EG!~YAg6l zFtCD7NRSyPHBfqZFfHjyJ8Cj7wxs65kyv~_OgQgDox#YvB%tdS@b%YL(7Z%@B#;$^ z#bWaZ5gxoXAr?9{dCk)3=i?j$hea^WLIPR$hMF(ZQ38X~W`{MAgR9#P5QX_?M~{;c zWAAo9iNFm6rn$Szketp4#Z>qLmDVt|K8T%+Ps}CIIJLG=tQk`nM8ni=&2%NVpdro> z{vH}it|y_f*TW|&wg5m!FR;t+1Vit z7~ZEySd2v3m&$|vaKX%FwKNP@K9YzM%L zT)7Tel@6Cu{x~WX@tg29uJE`#CS34l{bpB94p!>X)q1jO&T2vym?o% zh`UQuozT*Wkr9uUk%10aisp=>)eO`CyQV}2e2?G?MJmpzA_kOn_4x7EeS?;Q@!Fu~ zx`JJ;%3bap3*VJu@b}}OeHiD4AtOb`Oe`YG7%FPjyM3@yVG=Hgh zsNRj9+18S=_;LJK3Xcj<{`%{$NEE_VrEZ^A`>r+Bh@6^wxN0mYxt(mJ?vb^2;c(Th zlvV;ED6U7&V$4_sM=NQ;Qv4kf0Z8gl{J3%0;Z-<*^qmpv3hP!ZBfyi&b{X*$`Me<5j`NWg}%z=l*;cVU- zqme1c8rPlC6!djzV356YOLk_G{WS-n-U&V8S+KF)Ioa4^y$U!idVM_q27pE0; zg&(_qLlw+P5(qn{NlI6?pl;S#97`m)>-5;TmdO$vO1&oWm6fqDZ;i)g z$#mt$;WQG3T9}QHLC;?%D0yNp@eNCX?ntV9`j3~yq& z!m!V3EKUHQH5Q!D7G4$yR0*t}M3`^6xxBjQ6DuvD{mPPm5AXYrreQtwACakTQlY~I zfGE&U1OzdWeVsQD8%pGF^ifo#spBp1>ETKJc<}Sm82JZ!=e0k%rUz4dfSu=SgvTG} zPxk$7oJ2luQDA)S=4%Pdw2nVEgc`sFe2D%AQwr4y1+)(K1{sy4(;oM0%#3I+hayXEB_ShiG;9b$;YpK3 zU7#{!?B@Al2}=ouxB9e-u~>t~jfaLHb

t0k_GkeUrcl*cFy2sI@c%#8zs;TOD?| z)(Ya*iX&ojGF1*DGVEKVvT|D#I#&Y3n=ibQ-c3@2II#A@IEQ$_>>1ZJVrA5=KNZaW`4O* ztBw@F@xyWa&R<-ub%>*zV&rNZWWSph`3jDTSFP8iJBHp`^LB zDviZjcm`oLvq+sTxCwBGMTJNlPH4mJga17i&6{QXfB(Na$#F>h6aW5(^Zloy^u&1J z<*KD-j}vf-QqaDJRH4@?JUphX&Szpd8r_@*16vfy-vElB@tV^O!`8fb{@A%rgkH&H z%h?a>tR>6gD{gTqTX8)H$cvT(ge@oOzOo}w=*`Y_yqAxK>n=h!;81~o6YPm{M<(Yb z)j2YA+n=J&Qmsil8qtnAZknmy(CFsFmEeC8ca&45)Kqv&K>Hn|1L$5V);jeG(4~S5 z1pk3YI5ikKvgAC};R*&4hn-{(9&}5?Orxqu9)L^AM=c;wGe|Y5VHZ4`l#hj775SiO zDSSE$4bOD%&X!({gr+WdCxA16hb7akFdi-lO}N@gXk}_AgkH6H*Lj3S>;C60VbM<8>EL6y_YlL6GE#Ms#Pv1wy;UK*R^@R3Ltgg6$sE1`Ij&NVIe@JH< z)TPw_fjPaeYK?S>eC|1as_C?Z1R~skx~9X6({R}-XLfhzK_Cl?LozWL<#>WW%|1Ol z>jmnVa|s`ylXP|}+B>2-Tin2ZS)LEm5S~K&5D!-g;5%Eb7zoUjtG&o{9Ojfnwa4kn z38|RinKM)yrI{V0Q@7M-v~yRiZGK>@`9>YdB+;mLjkLUoeM~qb#lBHdk51W8Jln_G zHmHwCxCweFIe3o1zF8Sl$$B!Fs!`~=6@?Psc>1l&AHAXNV1Bo9N~X7)%L$X*o~Fe` zvl=mrNY$pVL2y`sk0$xN*p3b;SbhI6_(&_j?oDybn)M8Kp>Vs_SQRm$0!~Rtr)jSM z$OyN&LGZ4^>;8Z)?rtp>S&B9Z=ZaUZ-_U%}c-SGn!?1|aZSDN`i{~m0sbiLQPKF>3 z`Q&B04_1zQG~Ri(PC+p_OD!aG6@ag(jvZhTO{87o6U!|zcQ|&RKj2zw*xp4(9VHdQ z)Q3?Y(z?0!ef*P?H~lGh(uqA3w6RGW98t+&HzVKNJg#`WeC@>-U&{aQ^twZofn*6L zkSU6M4At6D4)EHizA|_dLOgK}ypQ7cTjvQYW0qHWhw37sW0SU)Mo6ZP`PR}-_&Occ zU-w-?X<)?wF)cGlvq$P^xT!4F#ajwEoz|l9gtcM2?=vGGlAWl$YwHc}bl?^Ae6&sv z;<_sjfxh6qhon;YZ0(-IXy$2ME2ELkCWej^4AnArE%+%CwzLh*%n6{!vp1gcN1HTK z#zC6`=Kjkt6V;zhr`BKkFX%AXNIqJJu7!-G$hG8eF>a%08X5*0#_vRtvU~?=Uaazm zC1!}49$a}V1>LL{gO37po7k&n*s|8bzA#@~5xuwRXlnigZBEPU9M&%gX}S(V$62CP zTdXzwH3hr(v0ig-tsZ=EFJAk)u0!8Ury>Av4twVarrJJarcO?lGXRo%~&qd)-LLNI+(JD8C_Jb$OQEwapa~3B<+v>UD zJxXprOegb~iZiIdoncta$Z8|Sd1sEVUTnb|FI?ADE%eNa44r}+dMAE!2ogc5~v!#v66j2B?~1fFUzzi z<^b!3MS0bwUPRgKz%E*`3RT*VV~oUcz#hiUE1V;6oX=)~gnHdcv_A2{U2H0i7e`en z63D~Ojt`!+c^}FS85BI+KN5CA(uag4Pxo7w|CEp0QW6M;)D0_y@`W*HUK7N;?h-OR{N)PWbfugC7n)w*Owfr&(9fm(q&Yy9ej(K1f|L z5OEAVcZ8UAQ_G6ViwZO|9MwC_vyl#DJdxQpv3`4p2oXJRr~mL_6bAfS%gJdbptu0% zMXJNod0>@^%^|5(-C+aTCYFv|+*TTB8y)1Qy~M8^kL)jh2i+G~O)Vf@TY)w>4?GB{ zKAJT7V?IhYe)27_Wehzzf=%PJ?}m=bVQ;Z{6VF;fQo}WkEBGk&BlL>qy2*Z37)sS1n+BW(Cnyqd`7{TbH&}2IyW{to1Lw#q|Ble zS$hj72(jHqK7nC))IXKL({Fq__!)A_WOJyr$F?RCUenm{2Vr>)accg!YC*>M<0zY_ zK9H>UMT(jMEvHO)2CZ-aT7i<-4w(8NpyH#+&?MLjYDt2P=oU3noTwxbQ86ud4MWCs z&%|0drx^KGrx!rc2G&*xSCpEqD2t0q_|f1U=l(m9NQNOw7{OnO=3|K-$><@If-+UC zoJ(osvPN*_E>6uWSc>lqej#SMU6M7EU^Wm;ki&H4BlWJp-$`YA4Th4F`~71nhaf9< z@#BWCmO8sKF0HqnZtb6%0p%-7auIzx^=sWq0z}W6Vd0mg$4%S{056Cjwvs;wDt@H-4)uPoM@K9xqTcH94^&gpwv2pr|vqB*(<$1QFP6x*#7+_fyyccaW<6^&Pn z$UN+@9Z_G-aGGgvbxLy!U#z?BbmG=)^FBHpSsNOit$i=4upFMG{QEOn`6pc1lw#AZ z>*)9@#!@^7zWKiVreB7e47gSJmO@R}3yaK~?@2zi(S)_ZTn4^VbavY&7d zXM~Ko|07U_XP|>e&*?CD@|Q2%@DGQ$tI{b?1!MnZuC}SUal7}ug<3lMS1ZVEI< zRZ~TyQOyMtonL%QYe##`UaA;i!C92-G*rv757*}s`!<_bcnU&)E(h9C+Da#VToI83 z{UYJdcLu*H|NY$O)b4{1clsi=Qp^OIjk=b(-KHr0`lUyHA05q7u7sUeMVpeKs8XGQ zga~I~L=ZLF)&2mGL}y3x--@e{%N07q+9pmr!Co_*XO!FqSz5EBPsK52UiOr`>;qu(H4X?s78@74T(0zKp2K zvyW2|K*BcG(MjW~fvij$axS-~rK*}ii=1x5c(=c>y0P5SV<@D2o({o29D;w6;v5>i zEuuM2{hLyu^Jn&7-!6bK`yn>SD#>VF%C@1Aig!qb`>#@yEFQ$^r0J3M4p^O4O8nMLS;pdlc`)?&C z^)r97HHL9XJ2N#a>Xf4m2?%9<(095gR%6va_KxYop_FvEoS=;V2*E8xtw%>{J7owi zw05*oqL!msGJfY)hqmE4Q;lQKFlM3_?FLePJCxY-Y>XZLx^n&hLK8s4Yd?TK{>-~I}6|Z5>S*`G#&Q@Pa4jGVn&Y08+ z#U_{jvueFu%@d%tB1xf?)sZ-U{odEpOm| zCl%@hsHA`;Q83}rJ?#*J_*OW=$}TvYx4LNy8dTd!W`X@ULhf)R`42FXj%$$DKcKi= zGKiMeSE@c5Q~yAzI=Re#$qLG0Ypvn)Un4P=6M}LOesz^{2-|^ww#<>6;ykpfjsLdZWVm(OxBgK!b#ey^JDf)}SL^)5K9U8r41ivbXJA6fx$%y@-gjg5J z?>d>F^Lw?qBhsK7cj!2AUUZQt&uLY*)cOcy2{19%Tc&$`){X zCzFH*D1~=QrDlpH`m^g_^EcMPxjA77ESXC;i=(F>4nDj5mD2(a@tNmCP@>lMJqfHF zYo{J-XOox=A|V`)Hf>3iyOvIuSKDDL0QWWFkfak#x{Xgb#4=8;C6{lut4IoJCs_hI zk`zX*28IPuYTZD7Ibg7mbzq8`^Ueo!0Lp~Ci{uew`c9LouXhh=-54U4r}zC`*cFz)@quPWha?wV!kXV{n)p- z@$qaWul{Sb#JwdIxEnjD)(Ek|>?|^^_io~Xl&)vhtT4f2R+Qq)0623H_+Z|AGYXqk ziH1>q&q-h9pf3lH-eOIXv{O*RO?dhBR``?*oXq+AmYnUQSfm>4aeNwj6vfK6Rnfq6 zxfwi;)`~MT0~RD$DLv%yIH9|60{+aWMShIG7vIqj2A}ySldwno3Lb-JwW1Y6nUHw4 za@W_rm;fs3jyDy5ZzBCln|Ws{92+7poTaEUo8gsytF!M@HqWSm0y$#DE+|(P_A*+8 zVrgI!WT{U8tK)ZrABszyN_t3%#gdh32e{K?W+uwMR4a@A?c{Wd0I}Z19>`dtsD&=B z$IQoW$E6U*@mVjSO{+<GQ<(38t@QRT4RAZ(Fr|jPgfdsmx(X^ zjTUHy1o32(0w&wHJPKVfPs@pm|M7nqdQS#_5c{88DF46y)7Ss*?a1yWqN9y$eTj&F z_@RTd<2|jxd@pIQ$eptY-XQ5K>$mk7YAKoe>l{zira1rLZD(eLD6V9G&q;SbL7H}k zO&dbjCkd{X&RA;`?Of@AH9HFRwmHzNmWiYEiSJ&%>+iz9|NS(llhEkVAV=z*p5e0< zc!nQyX3q{f7}|NLfvxJgrVr~He@lF>z>^Y>0AtOb)@cmr9Vg={?VM8_YSTKb`Jg#U zLM8737*9Gg(p5cv?Zjkn+1xe9`qrw(woT0h0s)ePtd*4}xT-=c@v>?jDneGimrhGY zPI~cXk5Oqo%^9S3&Ww%wPwZ=wnZGm+6ren zZ&cRjV>1K4(KfI`qQE(_XWYEYcGNqi(Sflkr3P{2A?3v_8%GvJPvDChbytq%qXnB6 zK1Zbb)iL5Dn_~w#DeIjF&PhW>`-lEZv!9H*iSbEI;DmrX;pog|@M=O$S!W6k4c)PP zWJzK0x4#Q-u-T(NBKm1X2#0P{+qp!kb(I7)6^noE+BNM4OOdjSbzlA!tcwStli@>7 zj#CQeYUj*+vZ3C>&Yzj-t4(i_*3w@sZ$^`oNDlaY6xxVtfOZ}l8q0)u(sk8!pR)w_I3LkBESM@{wGhg|iv+6HQq@5=N5DdG%C3hS2jA=Bs!QY| z>&aj-^+sH_Skf6qPSgW(gS3BH#Z<#?1xSL=%zS#|K0qt|K{O%#Ha(#amE9=d*&bHV z#7iy_eVVItyoG5NJGLy&NRP^egH9)y=H~2F1_+m?f%@<%k{_4hftV7=2=F&jsK{@!&e`eSDyn zSq$#gXK4j=I_$*si1L~P3OwhGa@u-zrCnRBEfu4l=d?k7Vf_q7o()i=%vOY$9SJ zouh}{EQ*P5g?>Rh#MYWGttI}D0joVZF)<}%Dy}PEg6s%wW_5+G1$px~ zD^cVQnIj}h55Q!SM)!LU{5a{p72YsTHwqN^{8|lfR_zoZCt{Z}U}^2K>=VmysF&0z z7TRe!c=GHuf75Y0?F=?f@2LB*n02BCbzJYK;7ogyt+Ht@C18QXE#i+!w^_+XW}BDzz&yScM72W^7Oudc1u{Q0<}b6#f) zUk*#>6D9?EW@b!M;YvL&s<$(XHC}aQshw`=!3)bP{{7_5Nj>r|5f#4~KxOP;l=>8B zt+UV_)6TmoDvarCzI+aMe@)SZoYl=ogJu*@$70*~S67&Vm;cZ|zVs}O71q>R)rpuL z_75do*+`t{XB4MO9)VLbTr@W_qr#Eeh$GH~KS!~e##!nytQ8un6*o28%g4f=ruIwN zUArRK!p3T3O5*x?-w6rU^8^2Tfg+OrH#JA&*!vRy_zqaBY0!w7L}S!xYTv^3Ax=q9 z(&@?q;BK=-VShlSMPRV0p;yeK?DB}3D1Fkf;23}N>o57G!+tNxslH6bMTBR-*^jAU z;|opjX)-!ptZl6NL(HH>bophEAHVVFx{r*O=}FFFh2-oKKrFPl^h=2VaM+5|TULSn zluGy`2R!w9V~rZ-C^|{ESnnVH{XXvP&%}`!BXmd6XQ-1pYaxD!URX^-U4h?~2TKH! z_=1&}#Hp22(8viL-ET+8d}{2bFFni}WY;44SN%hY%*%wf z2y=;4nYI<1)^MhJZ0n?#4gyrOqf>ya*=n>x7H8=i(1CzFm|Vs>tfg%AF36hFUf49%9`nm7wzX%+gErS`pF&aW0HqDrn(MKi_$hqN#&R=y zD~#*Gv`+q z%`t^c?)MI?zg4X8{Z3kuJ#&@&Q+l+o(69{G4Qj)wJHJ+%jrp|(wbLpuW-JYefaHTi zhy)~lWlh0JmcNa!!=DF4A_@OZast@hPSfgY%l{mk3Xh0oO~zT?17G1o#qo#pA~|Nz zD@v~p3FfCCeerEg)l%P(KTbh&94c$VOl%JD7%65+0B~QtcH>3AtNV~-;i%?|5%-OLX2QjZD0B?d8DUVT*t zP>)*ewNrInuwOt^h~P?rx;Z-?9!_%%iG9%>q>^e9k;RCS7A zxW&$zoEX;(j>!Si5)f@nHtIHQM*(ppva~vBIe8=a4=HO@D!23-Qn4b!h`&-RT966n zw73X1H{(WF#na9M4m_t-ZM8DDQuFa6>?%HJvvv;91pG?N9?1YAaE-a0;sO0oFz|g* zj)>T^GdtU3R}ve?%R7Tl{`mh&O)!QphM72Yz83jaXbG6iJ^?E`rvan%CprUorh#0> zhJpLeNrqO#%pZi0Zqw(2G$feKQSLuB_i}gFQgy6S4LLeJndIoC+U&Khps}XKMxf*Z zz8BwJ=yaTrVQ1I@nkvKMljwnz6|d2BD(t5QA-d!xs8?JrQP{!%yz%&Dy}aO`P)~we zo6B{ln<0;c0h&W69`QIAhm)3q8hFkzoI7|0cajJtnhCGvaw-3HI@|cIn>wF48A}w_ z2mRW5ORpNAzUj20F^(z6z954qH|}W^EOIJFXD-cDjCF~Mj2Jt)I3XSSHcer2f%VKw z8jv-d2m28ECQ;pk({TFKO4}OLMyFk+Z+A5aN`t8f9xixX^oJ=UtJQ^__7XSU2XvlYY(qP-h&z34x;?F#hP5M| z)l5e}4ilJKbtzqPzSP7RWkZ8_B%eLLK2$Sbp(TM`>jo$B2^C;SPQ{oj>Z6#hAn@2f z&RHatWTZ*MIt^;X6mOaIJ$r%p^VdKq^LFbab%4(jy12m3^cYV6=z7E7?5=QZS>d$N zK=pyjadTm=^1gSmYq`h4^puYWcW6cAe=O^8m{&lwvcRpxFQ5(+fTEztzpwG5^d5w4 z-IshQJ)l3;QwRwV>~(U&gKzaQP!z?Qnwa$^nw*`K74Lo(Q{0@O^-AM4e-c7(2w{!* z9`+*c@@-Nn#8>?zl9zB|6j|znG9$+v06P6pI`Nz&C?aAW$8tsuUI~ofYH!Sewy_l9 z*itRzPaF+3v;*r6V_J;A*it{xghEl>rFf+%+Mxqh3OZQl7yyR8E3Zdw|B2aIXO#X4 zY?Snu`L<%A>;27Piz{c`$B$op<{2LfF%`>&4L{ZOXGT&LNO|-t*@e`ykt+&+`nvX= zH7YK_Zo%OVxb$z_iv5oRH1&V}|Fn{)TkRc7dk`(=K>;MPkx@p%lzu+$fyeTBk#g7$ zP^*d8f(;9$10y1-B*_ z20(e&*#D4U`44@sxl$!9de)ng!}&!7K2nr$@BZ}i$EC>k`#uTey(C%xSOyC9nfS+N z1a^^Z7eRwoYU=ULiVM{U-&q)5lJt!io^e4*@w3209B^ElC^`4_eeSW|Wm4}XguwJ< z6iOyqq1m#q3DmHRk;u#aUGQ|2yg^7>+wh<9?@##mGYQLK88!S;e%s8LGN7m>dKkfY zb_lhgv!k(P9_`X%KoY>z`(*jpDV~vkifPhNWIPyCE5_y9Kb z79DH-bsV{7MTsD-k%W$GH@@VHJb1F-x#N%8?4nwO5Vx8Fx)-lmV$R)Rx}^q7J-;N4 zp8jC)1O0{_od`epPlIp!nrf|Q51mj~E{}Ej_juJ+{_F#|9)^x-iz!{=9JBmi-!NKO; zwTk#jW?O%lY@yOt8?96AdkNdLlMkrBX}0?rg4vgph=P@3^@)epicTOdOf4GBn^ZmBLvi#Q?V4LGq;joI+omvA$<<&hqBjtZ^8+_#VFCaKG@W;eH zV~voLRbONJNt_}u3qeC6d;Nu1ob=}SPwv4@Fi#1PWTMM#?o*eE3_3S+kTEkMNhC<7RvNQF^UuTj( zBrtB+lOC!22L13&@@FrI`Z!e>sdIy&urNV&X0UY)TjDD+OsW=(qh)ZMT7|#t)SB=0 zed`l$_y_@LCg!jFA*QiDOxgg=#J6mx`uGj%5)QAKscFrA7?w|4-^@|XrGa>o$GWKZ zpWA;gf22nf7IZSal>j#osdTr=S`Iq{1u4b$(D6!`M^9U=C8xz^FZ@Urizx6;&AC3p zeI?mZKi=*MpKdhm8=A*obK2X0UJT)f5WG4oe zG|b9&Lcil4)#8>UwkxEB)?mA{NrWd2uLn9K*>m`yyCGc+ihpsGVhSf6+#o(sgTv&a z$P+iGC9N&v5tA)B(UEmezx~CxahxT`A6=myIJ22FGR@33kTmlK2O^aVZRO(W|FM*e z;J+@oE61<;s7S_s=kinS2z*T0=Uyx3f?tdyte=4&b6cUETOAQZWxNw_$q>`&D}$AD zo>K8a%oFr~WssPlJ_~S67x- zYm$8XU8Z@+>OV0(+!tg?5UO^y*v-l_qp&Uv5i1oz(N|poWm<{wr>r-hr65 z$na9qAy7qZIYA2l3wv+&9QSqPd&c|;GchqS5A$NRy17`QKw=?aUwK?86c!PissK{5 zD5(oT0VrZA7=umo##l@~EDG}7Yl3nu;^ z`Pm{0s+)V837V3ZAqiWO-u@yI5IMd8@chqV;(8?(Rb!;Ywym8`TEnh%rT zR&^jU!0Wh0gt%ZZV$ipiSG-vINc1FTl)uCrdRvsm2<$nm2pzzrgJxtdO!l2~ELrRC zo_`g~faBc?e{$&hLJ=GZQ)BkHt<&V2PX*0U)3cF7OVPBa$F5(O)M;)ds&5@RCI9o^ zcNS=MD0ktR;zWCZ`<&8eE(N3O3v=^pel=Qatig<_qv7wWY;npkHA`{IGUbqbzo2rD zW{4u?JoZ!a@l{C&i>KG>^%b{*u|lXPA~KVyT+%%B)b-_swc5;67mV|d0wnwkz2D9< z4-63DWkZ=wqAXSO)QxS- zv|}sssp&A1)rE8%4>Hy?Xari+h+3gOX;dzHOc57i_WRA?xGm(jmDa?68}bD~0c_lG zvN`c;nTF>vUu2No7HR{h;oP(jViT^9i=h;iM>!ES1v2yAJAX@8L+6Q^NNg)T&s;t#d%iT8@Nktku`` zm@O+y2d?^e(9z)og1uUxg;0iBz|1s76 zBNRO`c|F7sm>6`BBxflL<&}D#{ILIJY~BXL-jmcYio_dDO)(gPhHdBAlrG)i4Y6Rj zYT0vfA;l)*%Q=+209{#$BRz!NZ8yp-xufOXFjicRB}V$uj%4l3OEEkup~3(;gWMp; zWT$HX)v{F?3eL6|D!6S5sl?~$7TGBY zZLrh!7NUeIT`6Rj0M#T-?$Glxtb~OrR%~>a5tx#R4RA&sB!9mNn2?}3l+xv(ZP6qc{3GQf*y!yH*V)(($ z6ym&e?YSVm`Q}2JbpNo7ARbd+7y5TSz0QYhgR#OYwi>9JU>rLkhntU@*rTT&O4 zr8qut5m}*>LDD&%{LR;Yrp*fYr0eR9soA8UYmN_QV-(cAA>?2H1PyY!om|sh{8aFA zl+E8jR%s(ONc_N|ALk>r<53F}w-ZBMx)akeSNf34s@p=(W-&9=TZ)iK1+;xn>E7O zqbD~8Av4{HPo|Tc0;_oOQ$)^yNxmCKMy^lABq&KRRB&L9BC^{tC#y-)LDZ)~qAhjP zaa=WT89jo6TY)HaLih2e-K>Vz{m!(uCgXmn-yVHmlPl5qhY5j02m5_Afv8H;u^xLd z3+q1~O}H&5qLo!igtIYtvPg;1<3=%V)3^yvrA&GK+n_(J zyPoIjV8dr(hOcL_tp@k*il6TmUxuuu1uvgheDbHm3)y(m7$+IfIhe29G`zqAnH8KGQOQ zz;XSkLRSYdT|IT%n6r|?;=xYI*>vmu(W6ICU(@hVW`5^^q}2h?Gm;TV=Kt{g9Wm#Y zot!8j8pzT@qY;9LbCVPkf0k`y!rg&d-`#nzV|tyOkL2;SmTbB;oxXg7ack|sB;jnoa-fdyGkz~jT$S{5?w?_Q~9j0 zbc+3>NfQ}Mqilc^3gQreEb4WJ;x`Ok3b{1Uv*v*dJHrD7C;=FD2`2a|8EgXJL`)h@ zrqlz_2(4VafeE&5$PM)mI8Xjz+=YItMVXs_s1aHrPpw?(p?O`O;EiAOKNYzkGbLiY zdYd7w(y3z}1=uu7>KHk5nK+cMqb_GiEKI%U>ej;QZH?^8wWoB+lmB@BmiJ0T^YqJ4 z_S1+tYx2g$;3YFaPn(>a($i4HD~<+5KY9D>AH~$Lqr%fwsXAQPK(Z)d;Y}yRM8Z|d zwxW%^=vAnfYCUqPc`gfRFjf_bTq=(1E(GKU(T2r(Fd5p*>12Iul3BtroVF+alQz-- z9b6Oc9VIWkEiEBXcLIUd$S07BY6XRAOKTdIWmp|KVXf|66KAD|@^KB$#-$aGl$ar{ zfJ+x7=j_L&bCFL8uR%-+tXgWiY`TQmXCBAJhimA9-w|AUNAFvq|3kQQEX20&a5pqT zqI~9kkM%s-Y=M8$nrOUfRiin8w5ty|DTBRt39=>M z2wJE|^v5RLMcdum2;oLcNXt)vj6u7SOJW6S^kn40aN+1RTCo$K6B*!=p^z9m`gdtB zkxp7>E(SzN3QoQ`KB+~SoJutdW}?+F8`E_@8V&ndj{7C)qXzpMicr_X=%(_g#@a+Y zr1XC(LUFtyeqQ_y0U)WyFz|G9>aET#^O){bxAAu#YUKfl%MiA32xi#>|B0}n}V z40uE}1PCCS&mSDD>6rOP;e0w){`vo}xuG^t%mw8nxD2HX%*&9o2TU-Iy4U~;sED3|x?p}RO1UIyj`7oStxJ+PAmUKj}8YFfn@4#vMUhr3q+;0340irK~Z8Sl#lAR&@DOzIj$8MKH zLqsCFo#K~USdmued}zp99P979OTs>srr5CxR^pa$y10Tu5#U(?ahSJ&a%)Z)nIHFm6>~z(!BK%OCFV!js;1L-6R0CXPfy#%hDi#@ zllOBB=jwCM=r!mALoM{!k#Kyligc!U>*)2#;5FJV;hNM-t-3H#w7469MA`AW8#^o} zObXo|X*K%pMamW@#bmQtiUy#^YMDkxZj5OgJ-Yi1-DmsR+npXKwc!>ni5MwWO;Wu{ zGm!KEFhv2hV_Tr#M6#>7aN#XoOB%?O$G^}e&F<8gC~g`v-)0l+qFm|An>Ukd%)pg9 z&O{{%CXSNvi5r@66(nRllHgH`%*({*K)O|Ud{<*M{&A+AsjSr$z5jc?aIal^E=1f; zhxP|uhvLo51;sZ&)1)IG@F?*LrmHD7%o)$Bix?$};v1`Gt~2rBLZhm?@Bm>t<)R=~ zM7x_$V|t6a6B6u=U38VuVhO-XaB84%nH0hmV) zOiokCLhDVY6 z^RL$yBM1jX2K23Z?97y3H!W zupUY7TVS96X;^rVIa9JX3mSvX(6e8Cqo6l|9kHAlm1Zc|Mo{QZ_0*}>X1 z0F4%;lG`AYs4jAX<$6XW!K|WjNsx1PkOa8U_5)yInuzz{}cd^pw37r37HyP~h$fZp6_$-VOFzC6o z3UKWWO$b$u!UJs&&lQP?=uB~Tk35L8C#O=2`&|USH%ogyr6!~I9^zcjeyTm|M4NnE zk1l&L6fzFXmK z#X;jRs7x+Na?s2z*_IX?u@mf2Z0&m^IK;`k*!*PI48 zaKI_PL)KjqLuNUZW{vUgCPmN2cLOtTX z1IJv_$H`|-H0%0^27JqE>Bb;w>zfpZ(Wobago7t_p_M&~M5DaFv*-9cF4w=#{_`9o5CEj*ABslq6t9CSR=xt)~mj~8M+UW z+2mCb-XU9w&$l^C$P3eRCdOQMWyh3rASTQ5P#fjYoxQw=Fd;{UHo*52BzR~4>?qaG zS=kr>*?}rZIw^fR5x2TtUl0RW#QD&*OOmnl#bS-rv3V_UY5Jz&FLa#~2h9Xx@SGz! zU=Q5OOq1C!`1!qfqk{UBijQFe0dmkwIJQ@o^?@+tQ9|rmW?G~Sqq!b?xs>iQnc?s(}U&d*Jr78dPRb ziSW$Lszo29YFH@cPU`KGbQU)E@3$=4t~Wplie|^B3Z_PIUMY0YC1jy}Yk`0LoA7U9 zZ`1F{8AA4J z4LEHNE!#mCvY*KE%7!PF$V}>d^Yqh~V-N@Ut$GIux5H5mMXhU2Xl)!G@Nj%4L7VxB zk5B|wl97H;U#_H+*c|Msf|EV|Bsp{Fk67Td@o*e`$vHNIubcxmSfNz5rv~2uTNIN< z@EoAgxlp7CTxC&1EZ9nIuA?JIvsx6eRFF-R?d}ohb8Q6rg25@tXo+Z)a)1Ew@>7gs z(=qfuwp=c%gMi+^U$E9m#!X!@!;urHPy7TRKlGf5Npw z*B93s@w!q4I%Pz+5A7U@x!SReiXAIWiI4(x(^3)b@UyVHnTx^e_eyxXw}R=5;1V0TohCnyPJegykE<3KC63%C~fYC zOt-iYf#^XHUA#?-^Y8ng>l5X6hmr_p+Z%B|T!I?OV+~CyA_1X#N#n7IaSDvr4joN~ z6q;UHUyG^npu_L?d}?Q#``ED9Ym%fVo9AUNYk6fh=qiT5$yiQvW0sJG_HEieVG%&QW0{vpVi#z$rpL5$vhy8<5mD z7^6uqWLzBE&1T|?xo+J|~$T%Fbj~2zWrz!Ox-(JyVX(5Hy65lMJ{VxhQ zTlvn<3tHu(I6sJ$H$(i)nj<+vBD8vlsZvFiC1%K4AM>~FqYdnNRT^69eR2MI+)@fu z?GUGkTc}T6n%RjwWNz)ZwHDOHFI9+CpV976ddx$DMhYRT*21zv`>6|6JE{6xV>k>7!NV25cJJ zxDfqGwfsG`RU*`ekP*tf{=+uJ3muqPx)mmvT3s zoqwSRA&|zW#8f!6Tq7=~qEiLG24o1TfK@Ur8_%liO;3LK^`8Y*mkf;?l7Qy#MNdw$ zcsV>btE^H`qJf-ws}l@k9fQpUnk@=;trczfQWS#zh#r>F-7e-Ly3hbMkpAPB7Gi^S&#>Hx zai=0>TgN>&`-r}6CdbGICNc%2EJ;)2;V1&4C4S(0LY`?-l#zHxU$S_I8=VK5mKFGS zZ@opOOrc0|5b!6Qy&CpCx|)ie#>Q?0DXA3)So%I#Rt*VF6P}E>Rpf2ZW0uEWWZ z2wS|c4tSiLx*0?=kP+GieCfNy3jFMplUajWS*^vYz#0_h-b!+96(6&P1fa;ugNP6o zmQNnoshg9bi<{A9>L|j(ElYY%polk*T)U-+okl^2GdZ|!r3Ow|*4}d5z-=bM-}MAH!R^lq0)b*L4ni;HD$`UI52fLrE^slLnOpX7!{vq5__6 z8yx3Nsj^3AsY%6|q%z#W(+&rlyFKJ1IeVr}`cw(cc`RXm;dac@P!tw?7w3#k+|Wd@ z*E_Uwb&)|7!|U$%AAiri4Rs<-;HMPlIqp=57Kt4w*!D0z8ynL%;){WjukS;2;QC zl$BZ9)QM(@MudU_Z^pCH`nSa>f8t1Zc3!KUz=-o0-X6K9aO|J; ze-UHdX`RN0dfOvF_lfXuN{NdnZ%zd>ph$7bBc)H0B4+glL=ITPJ)BzQZf7Nj>@b`Fs60(~ugs^#M;*)ZaohmeOE02)&j90dDMaUZcl+=3-_vvY zkDq?!ZmcDZCkcy0I{zNwoWm10MrpsAfT=(c@dzQs^iJ!t7(4C6dR!!5cpV#o1lAl| z$6Hh4)sB9v*Irz&HQZ;3!PQF%QA<{@eLqz}0JKU@Ud&~&lWclG={u;H!7>d}6i@(T zgr%N~OY+r~h2=F}eLQIGx_>5?`qryj{#T!QMH9aC>XmN?D_r>aIJJ}C>}B=i2)?)I zWw9WF?j+EamVw&U8-U|Q8;!TwJ%WG{EWjt4Wx(1) zSv{GA+4i=pS>t1FG0_PsKmhyJb}(kS*!Dy5)5 zz8R1jD_m)@1YP_5QA#<>{m@#;(Cb;kX(muTB61Ef^ zckYDV`FO&Kbd=pGNb<^3O{2SV?dfqW))@EXdbiFLS`CD#k8Av#X?7ru#p~{+{cq(2_1`DA=m%1nyf#c#K z*862QX|9AXnTs`K4hhAH%Q0e-E_p5SV1lnPR*21p-vG&qXpPsGYD_upX?D61>$qVx zt&?hNcp7i1#rjW&w09{Uegq^D0XHplv%80&5=T^*Q*Br4Ish(MR|4Zs6G68zza66x z(JjUkbY3=)x(6n>K~f!*hDG}S%Nl01YJc< zM@$?hTtnrU;6sR@`ucLvsJL6;_w))f%}I#e(y6_Wt8DR*eDc1>bxwhQ_04@>ie; zA~tCyp9gm}_YqXPu6Qjh&(u6#^3ZJi;7vP0Vb>@rGHre(?MeKcl#|Sj{U8fULdtIe zz5x>@k3@|e08P;VMAXyMM`*?`+>6D>Mx$0wB)v71v_j|*ajTn`TePvv_TM%Q&xzk? zbwEsalBfbaYDHOE-KX43Na5{&@{eZy;Np)yn=t3lPR4Bw1=;p_oLW{+Za)BQbR5vQ zz(w91kBves9hpi6D~GUd?)Or_3uV>t7PuTBp0+J66|FRqXj>#PEDka=7T$bIxQF8V z<{@}K&0~|u6YkG^y|u>8MW9wKlc#IG@ffkmxn9%C&S(rc)^6*igM57altVX|O9&uy z3f{mZgPi=s3mtDyiY2#MwH= z^zo)=+!r59hiCS%mp=Q>voSf7J=|b3*2}3}xgI2bfkt!yb=pTsVr-G^$9+KoO8y|z z69NqZJ!pp=*VrXcNh5tyieO*U{4El+=lo+{O=#}Tv?=Y{jpK0Ia;!FY>8hPNl2ObD zSs}_2=Z+^YgG&+p*vbX7(}QpEyn2aI-02h03mL{D92C_ zJ(%9gBFDh2LGlDIb_NhZLcXq6t*ka;GRyE>Y=w)&tCG*5bpkAw8zN(g5V;)&yB>jj zJISoNUc#IdtZMm$ndw6E1y#3|qw@YE-|;$^oAdd|E^{)v&HrNAj_ft4U0P&7CDC^B>y zJ+tZ{6HEwflu9aYHuN7q_1#!WAlXDIqOT6AChmuL(cC1#l&~=tYutL!JLZ3xD_3`0 ze%M|NRK&YJF)|W5Lxq4HsKE%GV?(DXf6d~QC$4=!h3ly*B&21)F|Jy{T8SbU%kTGp zC1O{?v&ONt6Dx$R*Go1C^(2up8Z7Jief=8*Ov1aA=8{FK>J5#^5#crt`B<*w*0!B5 z2^mYs7N_@NFQG)L72gSmMI{gqp@e522?afraWFaPH}x%1H%j$R3A$h@5AIi6WrGYpkVFUGILByAl z;ue%MU>jDaRp?Z)8pWlXU2d!!XY=4kfwXJYC`2+D!qfPM;t zQzh5}4b1wKc8>MZJMOne*Ie_e#MUy!+5~0sGJeAw0+Es{3Yt2^SvHiYiMQE|`@Um!bw5-+i%;Vc^}erZwTv-p8M;DSq>LJ@_K` zLJvR+k0sEnVcd}tv1ifFIWDZOYRshDJH!>0L-k85o{WJoOe&6fU8waVwm|pSET}l{ zr%Gp&_-zir_JDvAZ19;~x$^WiJ!A5wm%gdjW?~+V&PGxU))qub_+|fnji$CTs~P>^^*8Q{u`&7veT-sA_$WDIZ07}Ng3bKd z`J4Xa56(aO`iH@&ur>_`6P>J-4d$hW1^6z6{Wie)ZUzrfQ#+N=W46FKl9UB#mGq78 zVMNCQ!-#tpHH#Nbw>;bgdsvQfkP!vH>(RKy4uvU#sp(MzKWT!7iqG}bfDJtmhkhfa zIhqm*WBYRkhh4nW%FJ+V#A^uH_x2qRh>Hq5XV7sK7XZnr8Rj@&#IrNwpYkUr3zkA3 zAa~xev?Y5+D}VRVBdxYM+gPhy+6gMTltUv$Wo2RF3hgMb7dP*!y>J%S`kQ$aOn)B$9Ru5PSXtDJ25NdESdAD+Lbv7zLr zLTzOKnz-?M&6$yoFH3-XP);5|V<4QbhM6^P7pu@v0p(n8l%|M;TUu=w=}Q z442@nJLt))@vc%tf|n$E@jes;jli@45ate9LYC0bN$)1$3DS-;)y-=oXs45EP@t8k zs0_L>hc>jG3R^sK4c3UqjNX_CqF~;_#z8e$#B@+CI|ptuOc*hh&x7RvID?ij zS9CmpFrnC70gVtb&hOU~Abc5fExtrO2|O1&1>a$e z_|ntyP{R)-T+G6Y^%&UNNOtNzDvyZI-z98=>r zgQWO!$OjD0y59zqm3-51PSkofa3cFcvu?n{?omMK#p|$F(|JF$K+A=N>RQYcn}37n z#^Dy4Wj=zo!1@HN2`z~h5ZXeljv1mF%M1mKJj__{xO`~njpKtFxNx9IF*%m% z=%byw@S~J!j244=AODaj`{?N_*E9;WR{Y!^1|dBuNDB~_1|GT+0nkG_1V647p_x_U z)s;mJtU5av)U=jC@ffR_leB$SWC(jkxUmQ`or_Pq=4Qz?huPb_c}6>8Xp#@=>WX{x zt$0!DvtBnh2t2PL-fDUF0IdKGl$9ZrW^EFx*Tv|)S}82pYF(xQuD?OEDsFbW%#2#U zb1CvxP9pz&zU}xd#BAHuhq-#Kc1sIIt8eGk{^LuRzO5w%yM$fG8{>>0w8MbgbaRW1 zs(<$%f9rDmyS1n1Y}>)YvF&;$=TIW|EMg#uXge&2v z0;v5%t6Cewv)I$VCXbz_9M`?Q*l*=PF$W(r4%V8XNu>87 zdgYmT92S$o7h|Hd?@mXXNuFTW?q0a6uqZYvqUB{8`g*R5debqm;sU>Y-n~)qm&UoGEp0Es9?8o zE|n~ccr00I%@iuP;!LVEW>T1h;SQJ_qRB?Jz&qi~=>WaTnS~soLk@JSr{i?NA%L#o z%t8p*;V4f&IRCK!W*Sk4j`vQ%jkY5)R_ev|gS2NyDvI zW@C4@aR^x0o!uE7%FKH1yTU{}m7fHqEv!zY%?}oO8|Fq9E08#uF4jlbH(~W6f~c7p z=tGk>z%Bte?JTz3PXiGTKEwnQDKsMAqM1ax*mC0!lOh1%?|w5@{iu6p4xG*oK1&hE z>;eD@uGUn#p0tW7de?q}!L5!xE+`GvJk9L9P*@CGGGMj%9q_JJsrQ!>Pci(covA69 z)5@(TW(aE$EzZUVMYIul@!B;^nRygA{G1 z_Mn5UPQ$z8X2Z#5t@ZQ_3``C?utLsz=#B^TDRz&PK?lgU{rnU{aI|1Rn1qkmeNeY57Kxs95%6Fa>+>R8*yH86rQ4!@! zU1oWvggGe<2QYlm7DHYY~T2Peg zu+e@1wes`xx8lnN(%u!{js3lN*^Q|&pVH6!ztAnHHx`PF8FKboKCpg4mkfi*Ml5G! zWMU*F#zt!wWd}1Yd;VyRL}1^yzNu_P5Jh>0{6+u$nCj*+9cOpNNjJxDgl>c3g9X4T zt#5ZWQ@Khj>f@-<`HFkGYTxBzNA*AQ}$#wYq)E-K1lJlH(gv~3U80BOXQ(0bU8qM>9w zyto`cjaTvPAobhlU-my053rZx%z^)#Np#((7g4+dZz^si-&vr+n_iiJCWXdzho#Vz zgnn@t5;}P@@m|}R%YF*&0;l&Z%7VYeP{-8xRZrxrDMby6BDDF z=Y!pog9HSa>7=SY_380@5PbuDH z4HmWW=jT81Cw}va*aX*MvWN^;)!UcT&Qb^Y`Lej}=p+!d-^eMm5QZc{YaF15&aB*7 z*1xG2?eZ3%Pjtz(L|7I(RemP(rfqmES8LGf(=mlb!k+fxejJ9H9k~iW?tiW~^69T* z;aG61yUA0<$d(I#{waUI6WoF}$lfg2&M<@R0L)rygE)QLcQ%JVTC&)PMZvTVTWNA~ zLW^c_4UBXP%>)H|M`>o1Tscm_9lA=mv)A|{1UCu}$5K3H?D~WR0I3y}{2WSis1Q&& z0ov>Iw3aO5fr*>mtSnk28^@2}n^QM6EO?!2GI8ZHEtb*D zgGh6h*dek|$3IKZxu&yO|MAN5;>%IVWT1Z2u-$yGcqvfUlhHalIU&BNq4eu*IOOfj zibW$JY2%3#qp`laN{Aq?;FPzDep#x{(_kyfG6Y!%bCbFzdJW(f+n6E&`FkIpf2b)U zr=!%}&8Y;WxA3K9S8Kh|QMfepID8{(3k}W7Oe0-sTMIaDQjob9-0~mzz_8MgyAFzv zOvqYKI_*wUI;!}@QSp!(LHt51)P5&I58r5uT^5IA5BgTULo#E|NoY|Np2X&y0dv~kOXWaIk3`luHdRq(@L55s*oqhohHnae zuHH!;Z(t&`2vbC#k*-uljw=*-xSY>Z3z*=EwmqVmBS&IOtx~VX`9eH_{ydWyJX2>uOk2D`Y9G<*){z;}1slptL z1j{vprS^kZCo3oKuuGEIk{*SNi^Di3i{^~4L$}f=gH7b&xXIBZ>hU96rjBO&9sa~) zmRqNcKiF!i6*cIa4?C`2wD!47Z!7Mk`^gPR03ahG*@wrjR838dY8zmxf5^}8`0>0s zylrrZ21FZ`7X09_wmZwQAM9)eapyrO0*i#9aHNZrL?$#sasY`XtA`(Tk#g=1s z7A;9V=sf`Oh5m{0qh;bKE3S9Fh#1gtxyr>3^|fP*%#q3i|7ZagmRHx;G=N%dH7*<| z3GTS4m8RTjzh8RvjH^L|&XyN{>(O}d8cE?By)mUR@(tv5w>x!(+?Zv>5yR%mOkTVH z*nis*9ysuSqxB?Rux055D8a!MYxA|2HKD2TDILzMNw-`y53mN)F?@c<*{pSlbA^V5 zdc^p+YeUq;aR#5MB`CoPolX(cK|4SKW?btT6skz$OS;}d-Bl87oUdYD=nG)ic!Wl2 z_EbVHvpG9tQc*ur^E*snRdZ1!J7O;SnGF!>aoiMZ>4O-YZ85E11MSdvxM_lC8=WoJ z6ARi(m)<3@_L^opF?mA|1U&zC|1%BoTPdA)y9CDqe~GBybZ1)A5>11e5P~&vf&UF} zTXOa4v(H|=8dO_h?dx|I8a03ad*{DC|3Wga|F}l*twW*ejL{jmIdq4;Y0sI#QfJYy z6L)cJIvVHUL+>Mw((1uUUUTRGA-vH=;<#WzhmNw6?#2jyJKXn@BzQxSsv!lI46y}{ zCn!@IiRL=osOV;@M#-`oNX?ye;F_rnfIT^iHi&>_Zn=Er8NFbqbz*i`PNJhkDV-h6 zx2NSk+Ds0gb*h?1^^YynnSK#YDKl;YjhYYL%RLiqy2YI)}MvexLtKvw~0(@&XYICV#KSB zimqASs--eHGvp+SuW%Er=c*eEQFwr8>(HgX^1JM3(U00Wnu8>S!#HY zakX<~@>;YRpYY=M3(L1FiwiS4^r*9Ik<@_+t;eH{!!q$S+>U6#t`D!`6AC@d<`}z~ z90)v=EUBL)MC%{KqjsSV%&Rw1sD_ef8sGI zt*G*%u31>i6)@Rip-k90^}r8HBCJuaO~dInz1#wh5-ZZH=YqjZ`KK2pC6a}_GW|lW zx+WSIpQ|ypq1MDhIH)oR$dnW3FLd*XvA7#~WqXu!-X#_c&`YODV|7L>XU+3W*9wyJ zXk#e7n1_$1o-mwh0ocBK)&A4skh{q?4>mj^RQEaIIS*4)FcGUR9z1qqDu%ILy6MDc zE=BDOF$3F_j+%NKHALS#|G6lP(D7J5ul=22e&gJQj;-VdA|7?&Ymf4UyV$C6Lme5M zvdm2(i-t50&K|}vQKJdM#b;c^NoVAtqC0NxmcF~=VYkZ<0~jx!C`z+y!NGdt~JvgZp(0PJ2XZK4HEHb{k^D7I+5*mtH- z#WZd&cyw`l$Th@iA?LK2B9{6jFjFwbAkjNnZFosFN~`@8#>3?+SDuNPQq~BMW$X!h z)EuXM)fp7)gzxn*awOjD|30Q-#fc5-8{pptL5NbMOsm3K#Q>cA-5#aCLQ^*}V9cHOWRNCUxFRaGJxY=_oWU&SE$m6B72GV$mNW0YDrL2vl6p?~!F^FkaI$wY#i@PkH#kUa8!BVt=8RVl&QC;$CME+pfsqphDAkM@l2oSSS%^6-!eU*MS+9 z<1Dz&NtF%yF=RYFvFBM^Q~ES2t22Q+Ngqk{MYVR)pq3Mc6CL*N#br%|k)x& z>(qUShhq>TZeu3^HY_@SR&mg7h!e6?W6;x7Wy2qy9@N~h8akT9QS;0?085Aw?}<8~ zoqYmF5m{g-A!Ie82A(yOVxkp6l4U)`oQ&?mDRt9`%T;;%>&;pPqZYhvw^k&7zU zYI+oA4kgZ+h)mJnMul(Fq>{Lv7$4RA!9C=vQX5!a$Gs!Z5q$LMn?vcD1GG=F3-S5( zwsKpk8g6Fr`Y~Ut0O9i`=WuCO?>0xQ3!Tm87P}(qkgVc74Hhcyor3iZV$Cd2Z9GZi z`<5vgI&-Pdkpw+T?O_^LeV585+GHE|oDKgoK ziw&rOfgrs76C-?JxDhYm_zd1hY2cM^%ELY;n3j|UOtkBck3ub4Skj2i0zI?H>rUum z2m=Z@@xK;(G6Y$}yxHN^GBmu4VJvN1!I*XL@CYxCW=%F=uC}&hogJJ+9DnFGQOc?K zjkJz9BZweK`t+!FoR|OwtBf>lksHxXjAMxryLoKMZ+3WSOrRj6Y7QW-k44Cdo1>aE z;%2j8G-6pu>ElEy938WzqZYH*1LoQr!De31U5kG8so39SiHFzd(W6ICKcjhXST_*Y zFo+`lT90g^;=jkw$5}Gq*oqm;n#|IUG~R#z2Y-CYw&K_UQEZ;|f>g*K?wl6o!yJZ*m|Tdy=CaNZV<;WI$pA(8!Py#iu1u4{f_RH~pq(uk za*`(ObAWMXea9}PoZI1+iFo()U^r0uCi2eaXf>I<951BW6QmdHdsi6q4Qv3=%I}*Y z+(LbxG`odC+PRi;4)Zm6MXZCa)~tJigDhFfoA^pSjt|K!_ea#^@HG|O4LWfRvdiC% zwoAS)f@YZ+I*>t!ekyR8yrcRHe?^`a~i zMJhtL&D*je4#6ZbW^;uplfpuJ7%J}<4RV0Wb@UWAp+{1HbtPrWHdcaR z;1hu_PGaN1nAWA&ibCc*jJ4J-n=H5$Uv8Uyg&7{pQ+(>-VlI^e<6B%3q$s-Ea$%`5 zSJRrUE>>QaT((RZ*4g1siAQYTUW+SSEXhMlOgl*jXuvCN7x2*DJWXK~W+DXz` z-bFXub!@f^#K2*aX&XJPk`uSD>z!1K3|elZif7jj$(t})%$j5nrk#90AZkeu(Zm6N zzZPRZ!uU9D>pNTs&S~?*yiqbl24EzOzhd1X+go(>qVc9HT6oi1=%8RDB}1=yB>+r; z-j0d97_I3EScbubt@{A4S!Vbl)?fqYr0GL0mscIM-UOtcYzrct9%cfG)!dDBAqhO2 znY+Cte5$$a^*iUk>%XZp0@e(2Z2-d$KIxuDX-_!QI=hFsrSx7YyBiE_i@$?m5kNfj zibqfDMN5^s{#mW3l=34gP#psj^`gNqOb7Yqc^aEMCb?NzT8;7A619>;odTda=h)7S z%{{GtgBJ5KT5~Cwz1-Z_dUH5{H$@k~DBVn+INHdN#cBV=FhI1V<+I+Z1y(9CM&

!yQPr^4O0`1k!ZTL z8w^iDX0nP;I)U^vfWKu!#LyWx0RYRh%SWU^i?j>`pWR!5nS+N)JOtacT$3Lk_ZS0S zEo-*&Q4p9sNHG1UzRVbb9vN<)CIi99Sg0n}#BX5-emWsHtnB`w1CeH%j;No;FpOF| z76fTN6a$$aSny@h69xvc>@5!x67%ZqqX$c7Oz2j#04roR zUJqzftLwR(#k-yWWhO;0;)oK1RwWf%a5E{p1DI`&lhI9CD`BxC�j^;2Xg_RK9U@ zjYau_pO3{X^;8H8IjD(Wx%P@S8N6e1Hb9UgsBHbkBQ9;m*&6dzTB!&-4plNF? z+sSL?MNvFrKwx0A3j$Ra!`N38*cNu$ZnkZzmi!X=K+~(U@i1#7;-U)NHO`-%|Lr2c z+<*L!7bFdWbdP#2kT>>3$^kaFwnP`fV8=tV(|;A5O;B9d zz*OxVhpeU0r}cv|1zBV5ZMK=}N15@XCA#IU8SY=p3@4Cl089^;YV4FotrnCct&!eO z;g{t@XyWUn*1(deg{oHg;1} zA|iVDzS7ZvkhI1P+4|h$w9xIi9d9P|=*sHC@=zT``mMUx0f)UKo=el4-)do?Rlr4? zlupb(q{~;lB8jEEklwxNCRE2#!17#|>oKy~g=M_}H1w2ch*pBe7M~gm->bzZ>PCDP zp+oj7@UkILxwo8w0%Q zCu3!%5rcxB(ME$%Qn8Xl)uUuen=pBCk;F=_R+HgVTuK}k*=qp<;farh!@50gIU%j* zY5!;X77&Fw0Q{7vw=n9dGZDEpIeB;%ecfA}0ohD}8pHe!991#W#`}1 z;XAHd0($*SH?47mq;qZ&raAPXTl7HU5ta|WOLGSL9AAS9! zSgaXTmQp7;u3*dNwflaLk-o3TA=Wz`kJg()WDdOyHw4hf3pE)8Xw?(OIED<;9p5cA zJ!h6xD35aAAj+@%8f^q%_hPpayJ5w1(mRKMrTg2#qJ_nY$kK-RFFldoif^%z@S(@V zsfZALvr##J%a0=gW`_Ewj!sSm4LL@@;^ppU<73|5)iBe=Dj)tk-g>)r7#du0pAI#Z z|Izu|ae4r2`ts`^Y6gH^4XT-DKwHTiMq0B1h%W6BZ{S5j-f(Jgd|AQ4n92Zr0}?3K zyM|j=0*g*`ZW^9q7^isF-U*BJQ7r9hWi39;*KX;<8ShI^-@}O12C3G)^&_ITT8p7t zc#@zY#%Y`-gcY|i@e%H3YHYfknxQ``s5#ifkN?fiK$M&$V;}F) zq1CGKX_MOf`s+cRMRXC3f4iNkU2HbDjcelssCZptPJk@n5KXufa44ixm63YU>+E(mf@qjcbdI*fUF=U26j5e(M(p)zg9#?=j$?#MEAZH)5Qn9I=&g zW4J+c^lGBO_>AI0LVtK7tOZeE`^=->ab#w5&&|pm3qB+U2HzEx3^sbM!Xjum?>Yy` z0#&*uHO^y>U}cQI+$=zInG`$Bay%FPO}{|GAgdC8^eB3!`CIjZcn-FhyE@BxBzCS^ ztr&e+Z9mA`!3-iP(pAZuM{)EO1CiA2m%hOqnZ@ea6vY^o2>s|U-_Z? z4~7rGfBjEE-#V`2*qGL`D7FznHOrg(H9ueBRCqfWeUL8|h1>2$VAGhHXnCY+Tfvf~ zYVQLr#mELutChh@#{uNB7@(WQq{QLvR*D6)-3~(SM`&5d5S2q>!Ax-%p@#BFi-G%G*4LWri0nm94^P;+J~cM3nQ?mH z7;{(Q*o`qgi_m;%H+ytE6rUFW`6uy!JCqFaAQo9qpZ7lreya7%AEIX#1Z~_|*~c9e zT-3l4v>1eldx%3x^fUmmd5+5x#wO%M#X$sYDt=`pk=dB7t)yR2=MV=B3Ja>Jrm=Z^ zc9d(2-40{|bVV2@JiwX;ntQ;P3>Vajt|h3uBvVI8*Y6n?OhN&)t%+-@nTOmp zZ&0+sun=OgtVM(h-cb#iL+R~E7lb5(GS=iU=?Z?=+&>bZ9^BVc=zaY2Sd?Y(Vqt2d zP`;o6T4@MMu@|u)v+W#jq;=r%FtSq9tj0*~ zhBJ^>e%cQzzP}Z00hg<#utfEhyRQV#Q4ggz)ThYmEW+%%cshJ)0P21ln-{Nd8A#a` z!KV#;kwgeVK#SX-zb0-c5|o@W-9zgqmG*l0h0Xzq7EstMm9DG)w;_2|jq1 zUXBX1>m@lj9)|Wmvd9&|LH><{EqW}^MYYhF`BUEx8kyIQetko>;SsTguYcrZuP%Aa zs0*giQ-4p-TnvMSNyf%@Z>ZJ?2?~}y=1vWaf#@h?IZ~X)VgmGrFHCpPyh#mPS=Pue zU%K{O2=V3?t~3i21UFU@IG&P=_DV4o(Ea}Gzv};jXa82uE|m!MY>N4Zy`&goGKJ_e1(ken%jnXu z?QG3e=BzF#N-P{H!ZUDlGan~#9B|(O?pX{%3}eI-w1B4I0o57;I7zO`hSiEol?l)Z z7N_GWi&5R?a7lzI!1ss|T3rD%)?!-t-gYbzD(M>6u3iQHrw2Dt?i0^qXeDKA8{-VW zQbLgtp@4OD^F2@=ZW%(ZlD-$@gLiz*=yTKb*d^LV-E)Q!9L+Gs8E|oDo4+T3>tuq1 zn8JG#9Cvhx!bLp3nO4KhOXP*G;IXdE8Uo+LTV9NTG&3>wUc)?g1h z%@d!ysoANALzkFjbkNv>E+G#~=`u0o<^TO#iG9e@1Wp~lu;eBBR>W2?WE3Lajp3O= zbrqd=Rqqr~wVuuKpo=E0njE#Niq@>p1b$C<(HY|g%GG3v)PMZ4o2k=HH-P((uTM@m zOxYY_Nml@X6N)=JuG=`AUpUJ|$R18^stD*KPA6RIzSoV152MTiA6qG<-1d*SuY?GK zQWSonMq~&p%IU?2{^RdO>hs9dgkFvokLiBEpEJa zwl+iw4gxQP(wJG)!z`C&EQnj&1=!&AhQZQzwX4_e1Y4sRviuPpqz|74@%&Q*03v^D0tjJSuA>w} zf4P@nx3~kXH$e;&8OT7zFb~?UgI*|VAI4d7k+fjWQ@swg1I-4HuV`)_b}ee9v=Y@| z?U1*Y^te#95{sCiN$pXr&DnP?ifhf}J`%h1lL^XAlZ}wTuY+bP@%0NGTy`OV)1MfXQxU8TRmYQCPNda6WXADr)7(7XrKtw=VfZgX}B1Acc!@-Ng z3DaopdOqc1(f{3bRU?NtG%=LL+tuKVYLPCQ*H&u-K1u6tjV{4~BsPEQiY|t7gn9C= zr|o=!A9=_G(eSbPe))NepI^t4ZnV7G9Kjq1Gyy31vhSrp0hJ|1g#*F>Ve$GIP$OCc zgoZ_6QLTasR0zzOU4Zy4{wP*KBwmZV$%;4vDIJ>3`m8wjQWR{W{ETi#Z*%c8J~a_! zKyJZAY@dLCPK@7(QO|N}#CuWN%(akA8e-+Y<|no2p|X5x21K+GejFfRM9-#~0~#;+ zC$WO1inOVP@k$HbAR^q%>af{MD+))|zV?Q@VyYH+;S>+5Hr6WjHBn(?WNci6Fg^FW z-0Y=lkif~x#B}9I)`0M3TYp#0Y`@38j6G8-cBbQ~cSv=q{gYiku7GysIt{pd zJMFY4o9*Lc4v4n%&&Y&FBds<<_vCCpwd1kE28av7XT&)~cz|m^*YGSXmV8r%bQbZj(~6tQHYqaAFY4bxamJF;p|T z|AU-m{{VvHU*1*rPd_mh5>pYJn;6ov<9g0H3kR-L(nlp1+^Mj+396o@=%+b}APN)J zDgN!$k86fB6WouRu9@4(RJ=6qq`_O7%9klBAevh5?ICpXodN7$yt`g?R0u+O-<22+ zq4!~O7dIDRQ`SUt<{8KkGL~$+ z({@eh-+f1`dK9#}^6IsCUAJ`#D~tPZymm0W_0b%{U@v3M2({r0V}MnE-oJ4Ul{AB zd+EN1CW(I(57Au0#C*2BVD0NypLX#H7ZsN4ofqf3<2&v%<>4wx=Sr3Eudf9Y2F`WBd+xxYgYo z(ro7`BgCoMdZ;lNU6Ph#ZNO3`9v;i#8x*K=2%3SasV)r?5#s_H8nK}_ofg=RroroN zXW%PSv;xOw)|48EQ5@v)H1k+lWfZ**S+l#bky7+I&n?6nEe?9eS@UTf_q0|!t%D?v zUh+!RE4ULa;H_7f!McCu(F<(MS}L6fE`g!|1;8ZE>X!=E=ZXLet zfsHxP>BA#tpewxYL4x{n#Wh%#>mr|36RNJg6qnBTcKlry6V*3WlRxfwVTYpHgiK@k ztYEXY&T1(!Ov4Vm1)*~f;5&$P(rbc6rvbDeHizs%E6QgOl(dSLL8bw}{<_!nCW!m# zlfOCtK(u4(hlZljDx1$M7Q#GaE)-ga_>%R=ZYv>2iTw=MMW7(H9%#h#n|l5p&h!*D z+@~yEs^Ud8G}H!x>4Dw#Ep`gVl6gOQrZClol?6wCbeYRn&yw~kEz;p zfapwV6JL}GU1B~GOK^|XBjeFj&^X2{Cd2WTLlNWS$zEHSaVnx?bUf&Y5=nK!1Lp%X zA2>)XWcs^!9nMrNA?GE~aPLIU%D_d(VXWaIKxmB8N=gbq9N5p>*oEW{R)ZhGl)~H>uV0DpdDg zB+<_u1-0soMoXGbz?krcR_EdY;MDdnUs!&dFZO&#Q|rfIx?ErdbOm$CaxTWWazdfI_W%14_a>L6i7!g1tU2M zVGU7+2=%@@%gs+kFGKe?n(r2u5W{5VwJ0Fr zNndmeYC034sar%n&&G1}f~iXlzKkHGFekfQdw7F(^GEc1iPzy%BMKSD8Pi#BXyU|A zZlt{A+iQN{{h$4z|AJH4Qm!&tWws&1yNUnE^1g3?8|Eu#~#sF4;`iS*br&9(b$_coj~xfX(O)wZkQ$}x!b3Q{f3 z0Ez1(C?jj<6eFkXYFQK!fI2E&! z&1Zu`4cm!`f%pwLE^{HfU@(By28Cr~k#P!oWd>k6EA?BtyFv+yf!imR#Lh}zuv1z> zkTxh@z7!>8*go-wZQLH332?lb_~y<~+aHcvv%^ykbiG>D#h{aae*V)S7G~Q{unr4E z&#k2L54`DURKYB4wb)Jb?1CZprx`&MU-5CySKM_@e+1uj*hb?CX|UPb$hPP$PU@uV zlv^ntAR>Q$UviS)LvZIzgg?FgO58* zB?iL@V8wG{rq~XIW@C*OB6IEeXP0Fqmks%&6adh*@6G_-OI08=T-y92$xn^;+c;DD)%6G-xeuTeM z13>8_@ZikNR?UQob334fb0O z`ql9R(v*P>605U{D8T6!n*?zLs)r*koTW?|Jp?<`6n(9Emz**u*>>{0O*c0v^OY|AyiZ{l{WJ$`P8cDjoh5JLB|y9y|Nt4iKnA9M76MKvK~4_w)PE@5yAya(raCs zdzu*}1CLsW#cAjazJ=Z|Rxs3+?cErW6(?~4%Kl8M7v7SOH23LxO?2TIXePaV*GtB3 zWKM_+$MW!R*S(m2W+NH&i~H9w_!Wt#EV1m`#T?D1)9n z-8v&JR8Ql{PsgAZwOy5dIhhx_&SSCL)m@$sR zA(2e8#vo3m)X&qa`i~!7cEW&|N++tJG!1|g?;Y@@CP*OftA4U$ZV_M9cdP*TY-gB| zE{}fjnyx_+`r!QiAj(R?%O9s_oZ#@#;Mz2nci$9`Cm3){cQI$tTIpvKynG6`IrX2s zfBr?hh}?S`O|@LzYn&u;STPS`+Op#}#^X+_q-E!R96uK^nsit13;NXye?}>RHhzi7=ciB`+$$-0KxL-4IgUqo9t} zY4fM_A76bv=Gf?M+(Y|tW(2V3{psI$lw!<-hEX_l;<0E7?UXucvY~|Gk`22DuYUqI z>F_W9O`VU5-)k;r+@AmD83hl_BSobVqzsChs82-R;{b*kO_4(Ej~3m4wdi1szUcp4 zLUXsY(S>lqMx|bj*~IB=+&u%or+X|Sdc57)clBgsBH7z^duA%D}wG)@8 ztNQ22gu5N7PTlDp#Xd)Xq^1sQsg54locnQ|l^@Bl_i|sUl^!#@C=rf#Sv-zJiZ87<7#MDT-B9VwWaWh>P#wR9G<(X~_%ef^EQ z`cx9Ulj%c$tyHq8acWs7dJ3A3GXrA5%}F*JJ3eKyA9v0`>$o9g=hH@V?4Fcrj6NXj zq~GSMlrsq$$f<nysl#G{^NUX%yUiGRJ`-gynN@N zOJqXK=(duRl?(4uE(<4id9jBBzs=3@t#j8BaPsjBj4^MV)- z!gmM=f*6SGD!eww?qtOkk2<-Z%rc-RhXb6L`>q>9*uveO)oh5DSr$p_Gy_ej@yL9f zF92bHdx#rq54azbS0{iK%-i6wNhiQ}u3XdZppt7)l~ROJ35hyVZYZv-dG5!^405bMRQBX^n{wfEB2qS#b&U7BkOoyrMvNGF;>nNU6G zu(c~jeg5zL5kE~cPL1$@njWscn1kFjXMIT$NzaoCtl?BULC`Q_QBOB_)xRMSRj`a=dg11oX+fI56_4?+@H($9tZ4S; zVy4W-wwnO$t~Xsj_`c|H;l^UUlas=h1Y}hjCqY)jnrbl(MjYQ#XvL(g$D1hmlN_6% z7|U)*{-$@%KkO$@8+Q+>(YS6;q6TjbqeIgr2dv}XBfyFiUVP3B9%6yM5*&H7Cv@L1 z$Aja3N-|8;+=otZRg%v5g5J469DN$BRl2BqQ9MhT08=1(Jpa^F-_Zh5dw}c+jhOPB zQO*<((gP-&C`~xbdaDF2$*7`Ma#%i=TNf)zWfj!Pl>di#G(=+3<4pLJe%=~^3IBw} zr2sUilrs=coc#oo#}Ax?!b__@E1Y=kR2T|_f6J`Y^q{%5)euW7$K;$L1Hc$HB?CyJ49sxbjC!eO=kr?cH2ks(1lb@*eb&n?NgIzcMa?_q(xp71>qUG$j;C9 zA!%2V9Hq6$%A(#uhwYzf0c*?km1V61jt7Vy!z0RZQB6h;pUZ!$yrbKE>l3epa)e^=a=^De=@$!8!(js@T}NSxi#eUtuSF6%c?MI1hhSGVF*v3F;< z#pXjT1tp|Pn5Sql8ag^f!9m1cL9~`qQ}LS(N_uptX!YF-8|JzHkAHkQ{%hY$wqiN? zdE*8GtaW|31=q;L7SvNiWYvpR+-6?q*7g@ ztf|OPtT<=N@~{|)lC(o8VP?0}F}KZ;>h{alMmnDksE*EA;HzAqq`_V#p|8n#q){V- zO_K{}i?u~KZiYIC2OW8blFTNLWy4;vQ_cDAJD0v447zKMN-+UkN8s$lH8*x^OmxtX z7Wv@n!3J(MN2Nfcq3U|JIx*5Egi+-6suFrUoH0Ts+t*@>%KDmD-fFi_9@_e8oW;+A zFH@_B9}JIaQ0{~@WF%zE{au?>u!#nR=1{Z_hN6PyHUaCESX^%V$wR8E>S|VvNL^U`SS93uVqv!f_x=^!)j1!uUOTeE#ib&bM)ZMnk=oQL|6I`Y29wkh-lOSemC~}=;TdZJ>7E))?AD^7QcD{ z+tZ~~MHEYdVggC02UT9VBR$p#d%!{AyRG~21(*4HS^H^?+Zqm`e`_ILtojOr z^Gf|Itc#+0SX%`M7|Bruyd5TChd}UUfdx!g0QP*uy zbA2Ilo$2ob#0sjgX5r4|L?al+Ngjd?2d~)cwQt@H^3gJt`V-NJgzUN$sljjCu1or=wsi)q|$0rHQqC8Zv69?cfc+oYpe%$rA$ z!l-c(=CrdsNebdk89)YG4DY5YEFE`~(mr%ItGVTR91pK9YVZR++;D}`(>UnOt{|~v z*p9K^7D4L<7WG38W3R1QO4c(|6_BHN_qtl~b3$5o=o-_eO@(WCXk z?EZ*KsSXvlmGT^#7kTU0?~=2Nb9xVvMsKUK)(a*!&c7g(_5;p%%f$Z6{(I&Z5(yB< zdKM)3NWZ3vmi}2;Z8*;LKlT4F#j@=G#W_@2dk8pOcZ^IW_e;Coj$r{;r6T~z$RXUv zrxJu`Y-}tJ4O5P~C)>bMxjYDM^@cWGM_ZZ?GOq+Ei%X(IU;AO&9EdC)7mRJbd5Vt4 zK0~9z?WU)KqiCnf1_&VBkp|XBK*U2hBq^elI&xL8-$b2~Z{B8f|F)p$ZzVC@t zX55M03=@zc5O_^(F!~x+`Y-2yClc|dNP{SviruWYfYi6K>0(@eS%U9HMbZ~Y(u_)~ z_?7T$=`Pg#a6h9qWXvhz;i9IfUWtd&g3R%;Vii#1S)(^cgC(tcyrq?(n8b1bwm|G` zX}^8zN<5356$Gd~4^2l{zT%W958p>3vq*cXIjTAiV-3W3{*CH94 zyRi!@hh3sMF%5>L6bP{hJ*XP`ceT=3i`82Lyr)mS%{^T4`ou8`&*{?=-Lm!*S=I-w zP!g9C|0}87F|d7$yQJVj^P&N`gu|69&xb^!A1cWYgI*&d_JdSvh*p&Z?Z{1!iB?cr z@t~T@uVLcX+zv^X zQ3DH~WxaZp^z#Fc$1W_stXUzQ!o5fZzRQ24HD9aJ6RPXI>Xd%>{QY=t_126YHdjgwx_O7JpnrDEY_p*L;R4-k)#02r9U5DjzN&|r zev8(u@nAq;wrU0&t@K=i)uonKN?Ni)6Kp;3V!DG$;2}EjTS4IrEJPe2UU3|1l6Y1f zW7Oxx{u--gF13K%`u%B`O<_{f{?yzkg+15C64=D&5_fKQIV=AD8XvNAa ztY(RuwVmB0!GJrbssagGp;16z(m0%NoA;78iTS=2G)48b$W`cQ7C*<Z|3w#;rYrOgVisFuo>`|j$%nh&82qQ_Hg?xT3W zu-5R0IIU7o&Z{jVn|nFDZKBKqa^faSq+_3Oz$E^7-Tr97EyA8c$1`V$K4QV~ z5Udl2hED)`tr3e_AM~fxwRJGm?WTxfdIbue6|L3n)|WInR5l2kH{Cl*^N^0KTI}RE zeOnWx&F$yz+^K0@&u|{+~SspUe1JVR+87rRGv;K+#wVX$pFzz0V!1z=+kOSQH6757QtCISU6q7yw%2Y#YV>fPUq_-XHZjO?AY@cnE%;JWOcQsnLeVajKE7(17$Z41;2I*`+ z#SgsChRx-Zmzth*&Vk_y&-LpgBk^u5a5UoN;-;q&%05a*uw(Ni*%5<0WD}CfYQ`)# z4D@j`pKz4fqg9aEw0^#hO-%(W+(%};e_M{O|1Q<@!(zfk{F_pJ5La<*!!;~Nk~ZTP z#DU3aOi*$|7@NCp(>33O|E)bcu!+Ut{l}NSt8WYu+y5%aLL#bpx6=M*cQ!+nqCDty>?WXnC?#o? z@c|>V^zf1b`H?=M)3Uz$v;@EJrcFN+JzJ=t$$*>Q-{oIE<^b?k9BbB4xO^~y}5mi6wA z<)9W_|7T+Fg3#OCEg=&Tk4k5LhBPd#*@rVe?tUQ-zjyihnV4pl7!0^!#F0q3yi$(= z&GzoaBwGfM$g$D2`C7fE`>s9Z8g2ja`dT%d^%gs^enaiz8cPW*2xhBt7lQD{XuKMq zvp$%Q)%k!}yFqHw3TP9rnb!UfertloYj?FgD#)g>?T9u^?Zll~0Bnpl! zwOwNcpMU;R8d7ur!1s%ZsvKlLA>MtEl-M;3b85i|?h!_hCvA}z=$_G14)f171iaOQ zoQja=zyLei2ojp5DG;4zkiY3VIj5#+! ztE7cAO*7e=hintLIwopxJ-mi~;+tn_mzSnP%^Zk&ReL*$yV;q#*4=z)rN6Sq*FtK> zc5Z_89yF$i8^{bahmw@7Kl!HSNxh)eWvvd;0-|TO;=YiWCtFD&=ys@FkbvC2?ptlh4nv9JT5~Wigb2<|aB0%WX_0%G*)dam$jB?{E4aX{0as!>m1c+t8{pDF=Fa1)}Y6kO4i4U3ozuF=WScp@U%uGXb!2MZ0cn&mDK&Q3v^+5^y; z2D8XEhZ*}VJRJ`Gp$dSga3=U^rzLS^#Y?1qdlSgC9ijm@?Qew*-RRQD~^ zN%Se$v1&VCOu@O7)QCoIJMiSk{f~l1r7$^q4uU3;rM2Uh1^&oRhM&yjOhZdY0ooM? z7Ayq%-_YmDWA}eE_wLVepVyh^AF;Dr`_t@1kt0%;XpjI1P>!868jXvHOA8HYpbSqf1)i>vLsuww`+YCxJ@2_Z=Q&e1CVUy8w=eWlKF;}eH!hkcNz)J|a{D=pLEISQ+IQyO z8OO$dFhqU)aCC;<<)US|k&Fm5>S_#N9TsR*4I)T|wQq<4Rxb+J^f5vSL&DVv%TR!o zmikZr#HX`1(qQX$>nz06ZWqKtH?=XGOTK4f2r|LJw!Fu22 zV~985dA`oPu48d(|3Ilq_{Z6Pat8;2NjTlo7jed|#zBHc!NhjVov|wTFqIi%YQ1y*OZ}1%&qz=NM2L?X4MTm6$Ve_* zb*76&f15Tj(!MmFXJv&G9=krJcO2ozJ(ARrAX#uVJYT8mb9+oKos!yGMRL$1@9?Z;{}QZf?yaFsYo!5-?b=y73j>=hy;(z)DZR`JiVR&?;E zgJ1Y_4uM{1-C;dOg=0my;BZgxMVx}<8>#wGFI}qPlSRL@m~PL`fms+p(Ou^M_*wtc z_rK#SN*xIw@t;s~92p=Rbe^c4MLrXS^uQBhNrK52db}s8cCGjk22%pB5^`djB zxIRASZ>cQKHvBV0@zpJ1alN{rQ;&Q?3K|#^mWb+Ee--M&iYoN!Jp#bkaQ==361o28 zb!-B?Z+zmqwA=7vwE7d(WEnFH9FCiv?YMyq_Lv5>ElfT?aJv_lhSnAe%Ec)C~W8J@PO%i>?rCi2;AStNuIzv~2n{J4tl}hj;AKf%>Y05wZK;av_jFBw8BZ$FV84;Hfx;1G5aFwO=T!S>wb6JuJW9uPSca*ejkQEg zA<$VMpLEeJP@ACVnP&*3iE>n5Bnvxt&TM_P7ZX>fe8&b4|M>g zNUk*jA`I_4Zyf?BLuocTJ4qRXP1wVXO-nFC8aj5d*=z2nz3@fEG$AYJKN&tTBa&MYGv*%L zNFlXNvaY_t$UNFHAAbI3v*NP-t!JOrR5(@;gx~kCT38E(Aq)w z12ij$7j0ijco5tr#PLuiE;fJmoX!|0 z&{#uAUi+liS_YHypD?aqm)1+0^p54C@rha zlYUk;A$-dJzU{JP7Cr%wi81=E^VHWh`L znjF_t>y`OPq~L*TiZuQrJ|ICNg=X}<{sBthc(w4t zE@q}LM3vu4z0vk0EagG;4BFaVl}yPO%_*c_byJY){Sm>#_8u@24k%?d;w5;aSa4tono8ljKg3bu74VH&aPql$L%2O<5}M*^aV1IC-5CTm2M zhGWDshWzp2wD)tKgD7Udn}DV`W)~4IDFcAaC6w)(f$MF`J~pj?R~eSI)kbUxE@O`D zTilvZ!z+GW{v(&=|0s;W_iv#e)Y-GxceJPI=Ju>~&_LdNqO*&Q>PDjhIf9H0Iu&rw zlI}4ESYP0I`%9lCK+_Si!$IUQ_?3d|juYPEYE6=A4XV2%h&CFUppx~a9`L;GFwi84 zpR+5&Xu^CPpg+2y6o#}8KB`7Bs(iyrNQu~}o@LZj1(mONUbJ5~tg|oQ=5dM{7sXvr zIp|s+0#oUxY|3_NpOs=AkT}cI;ch3Er1&s4?%92v3>=_R=Ob^gy&uK2<)YqL_W9jp z=B?z?H?7ip7>xIiJbw3^HwPd1PiBJW3YJYH)g%v`;R&_UC%U7%st&2!MMxlpbckXZ zg`6yQTBr4^lz%0P%N{bW;ff&&sf9VcRb zrFb0W;SaKksvNhPxRwxt+-G^bS>RMa` zrQ2fy@6BQSoC)|+MYuV-SSK8mt&nSTji-Ipn-mVXp61w~Oi5uReO9tVrl?&cN(BQL z51$snNMIz|o?58~EFm9hd*t_M8yoxpyhZ|l&sZ?tAH3Cts~1PhWdK8CF%pGF*SBjY zAg9jv`n^q_n@T`RDKymtc6Io3@iVXsB!u**GDpS7zeRE4)}Bn59Z*J4J^T91aX;yo zmZZYk(rnF~zeuu5q|6gLKI5=zDwTFrH_r=P3u%S5@(%afOfSXwS<kCf3cqFV}>h zEGU`u(qW7(-7!n;V9ww^;3^FV)a-?3XcwYBUlIMWMuflB;|$tG70z~enJnW4N6p=o zR_Y&th$Yej(LT z9TJBMbBS2zFMJ?W)dE0|IGH({VBs;-ePgL!`&0s&S{2KCLZ`ICXTC9+nC*%g_1C_= z_?x*zhMYSTUtvBk6G&`_zXGo6h|}Q1G>0z5au=~^i7&0L_!fgy_K#Lc){+8oxkVLo z8TxDn55M&-{ib%7qG9WfZk_UAG&6$srCPZec3^oxifk9s0{4l$IY>P?km#ac?y^ zMkC16d290&_X#Cg?Z7#H?}FW!qwSyLr%usDi-~iqQLM zzntq7W=X|GfXobAs!q8X{oOTU3Q;Bn7}PpCr;r2a9aG5Kp-o!VnReAGm7^XLw!B6> zyh6i^3>t7Z;r@ERfDW(w*KIKW;A z+lwqrYdeCf4}ZTek^aVY0zC|U#TI84mloGw@*kHke9PVnX@^P}QQk^a;z{OSyxIsq1&p+vB9uM~8YeW*sFzB6xg6Z?Al0@qhMT;a&t9lj(!Cck*uC!l|!Ur_t zUZMlK*;4x$!W}n)KC7`FK^mMg){b4&3S-tEZS8Ad;2w5CeWqJ{jhtC)N8i}$0GW)? zjJ@iHhTW*G`AA<#l|%r?AGthRB$XwVugHB!zb^e6%8;E-C$?kaM!LQ!QKa@DeYSJ9 z$#BzOz(`9LUc+4jTAJyq;1%d&uLkEBSONcbrLnNMGQYmM;(vh_w>VFZ*>UxrM6>~H z(q?G?>B(zahe@?^Z=8flikb7srEF|El3a_}hkiBCr{H@Ue4P@~oy; zXIx+D$_H^EX`<*(m7cd6BCoZBbUE@$=7hEd9)uX{>T4u@MYBdbnp945fx6J3YH=6g--~~ z>#z^+Kr$Eh=zfP9 zmgXcSWbU6?PQFLiug7(S_mcA+4GT$xZ)nE4>T*G@r?839gN1llx@}zNy^g0#<3j2C zLVr*YF8VtJAJSL5hI|VSqG7}^t=~#bxX^ZFvC5_1NmqS|9hT&TDq&&azBcI#XcZ@B z`s=;jJxd0jK*%CV8lSr6LQ;U0pPHTG^2CKWVjONP1|nphgtotl5VYmu!+s&&@t&4s zt>o&H_X62Pght4J1JU*oFkCVa^s9>f+Rfa!O$xz6Gm#Vg>EP?&rZt<61@+06xmXn* z{c~JtSO^OstWS=9BVP}`)=JE-CcL9nK!wA|eVwQTX4(mx@W$YC@I4>-QwWIU8U)-> z0M5nW@U_A|F$qa$d1Gx+i*?%yN3#oJF5F1sjhmieN@i|E9+STh5A$9)-qsekzq{vC z-h?Hm6*&8PW!;YjT!6j@`1(j@e{l`j`p`9T=(oTTw_A4RS3Em_dO|He+_HpTK{7K2niQ?`!ZxF$mw%lqq#@Wxv{ z$$E9sXJIP-E}0bn)q@9re)&R|$rAo={K=n%SC0jeb0Bk&MCQ0-X=SQ;P^?0bS4=;P zv$f#y+-;rQqUtB|-^eU%DTaWT|AG292}+ueyU)5dxS%&q`quPFQiTXnu#un`HPliU zW#AjZLY~Ae{QcMCiUOAEAK?qxaG97I_t)V7SZ`(jpzY&C{vCKeU#kniUowCuR3SK| z+RO7IHiP++um;xO6^9|mEe!#YF!4n6b;S8I%i_{cWxxpSBa$w-nxWLqrZ&`>4}O>& zicGNC(e%I|Qt4tdEt1)}rB(kLE}K>LrL#aVmZotca7X2tC#)|J^505CYcLmOTqw-h zXp)@~aAR%l)?&5R@VT|Us)L+`-<6+mzcdU7lP0ogF`(oX+0= z>Z#w0e$v)Nxx=7{tVvEA$0-80{!+c>lcq|W-m|{kMPY&j^f66OU-MPleoec-hM9$v zufJGsZ=Z71w02|$Ps|{XgHnC*MSUD|hJ{KI$%~UW8oHk|`H2)6j0{?~Mk|qkG1q1? z3fTjSKQxqrV&l-JuTNgRDjN|@3a67!SGw*kuI+XNatsMHi6ep+8o+GjCH-*JNxUM5 zTyW_SfZ`Gb#DfQypS+?6Lq3gHV`Ut^{#gCQ1(es^SHF0CAA!sXw?+e%{&8u(q{mWM z&Ej*Q;?_rR!3O7Bz5{`jil;im#Dhx|99vg?W*mTAL4mAVTnQ)T--O6v7lT)}KIWd% z)o~=oxt}60<-*yaMUD$sX6mJkYfRV$K%c*K1~wso)GH6JJo%*m!pio1)CZl=+Ow_RJ`~$}Apjx) zEFb^vH*fgUo_GLToUeZ&UJYxD2!Vm$P$D`XKyb|b^aroCP5}@kKga??yK!6JtomzK z3`;39gG&*o;UJwe(@#M6;HF9+YXM@gJ5@(!g@EVLS#vMkLRfxD$|m_gtfW$NE;A+| zrp%s&fO8e_!fd6g+3%!)_nlVEvdBQ*FD~O0_FQSFMoc(t zy5@T3kF>Jz!JR-p;e(;e^%sL*`|{B;{1caQ6e0^;-e1lgWJmU*yVbGHlX@gRms6|BD zesuY}{(!4Du4S4>@DW`hN;ej>Fr9XjjlXP`5r(4HpYahnS>TJ!wOz(mt`YUba@(G@Y;`beS{1e>8Pwc}EgbD_v z71*$0mYnjZm=?!-8qwEBcl|U&XT)t3KG^K)ijQ^d>a}ryO>OBV|KsFT!dA@3r>WCU z)e+U94dpM{Z$W*T16r%MMBHG8&NpxPM={=weZ|+?;ur0N(ll?7z?#a%xlAbE_VuqA ze!?ffeO=DO3jSj8>v(Uq=>-vIf;4hJ z0ukmLqbT^sbs@|bL8z_)c{It6AN5_V})G&MY_(;I=kHS8WzA% zY4FueI83p8(y-T>i9qe7dC!b39~G$sj$G(AYv>SG0QO8PDVb6?4A3`w16K0H9jVuE zVuIl5wbMND-HWk~m>}mNNm%X}Ux?mYoUCkq zuPi6>fXOWNq;LXn)L226(4xB>Ss1{c+`>>MXik4=yTXC>rxDyTcKNr{8$n!xY2|x9 zqBS}R`jX@Acf!xPbm@{m#~3Z4c}v-ntq!J-D_ZCtzC8a*4_R9dU%pW8{DXh6f{Q8a z{>);8ioSjEkAhy9;bx~5Zf6bODuVlmc2Q|p+D9N)(1LCxJ@K-JZB)8@34ltt5&e{E zMd(#ZW+vq{cZkXXOh%*ld|*JmAL^wHKtu|~v6GIcR(Y3NUW2LmaqFN!Exi*iFetxy zpAcp}0i|Isz<0dfOqV*xm>^L^0x;_yyy736p3YG#VrFq%(M~C8)ax6CqAW#>F!B-7 zG*<79e=R2+^J&qggG!Eu|CzX5P*YCbxaQ-ioFpU@7g9e~&J0GuIdR5u%0K7+O7aK* z6#trDl!@)exMa8(ZAku(u^T~kxVGZb^!TImFVBA>kzwI!&jRZsE9Rd8V2yF%Gq_Bp znEwVIo_7bo@SU*Jv(4b~(?gN%hoM7JE>1yg;cNBkn%_6r97a1Jk+TrD?w@jdxn%zM zRf&ozLz8dfOuiMAoI1Q-p9|VA-6hMM*oIzMHTu+0AlNj9A&8{J6Azw#`tlWh+E4_& z0NgAj#a{Wno|kYq5#RT70RPERx+B1YV;Kr?iZ8gh5~uR7AHVU<8}_e%8?-<-@_a^9 z;(JmB3hI9*ha5iWEZ#Ur(|`3m{;js!JpDol%<{R6tgYUbIa`?IS{=fvId(LC7WrpiP%PHXmawJKha79=!=rl`N#eH_el~;@~jzlD*2>@n^$mo_#7X9aQ^80w?3qM z_xXrn5$7y=LV{R4b|FPXj(g>p1A|GBT(0pSl!0 z{ttt<;!da5&-ZTOEN)LL$_oWeeu||-g=<6bRWCdvA~+duW7wk6Sg+~M-OLkoni=;& zT^vSLmRc~YeFqPrXH7sPm_2*(Nh1(aGE)!TrGS$$d&f) zCQLq^gxHyr0O~Lma>5a4_Js7BB?71;M<;1IchHmA zk0B_CK(7~`sin#fOpg5QG1>TY13Z@=M0^17_+c`RdsdugEI$H1nWt+x9~S~hLyr_n z1}DYR65#Q*El^1uH7TABbvV9qBrNSrOBP9rLt5c05Ay?O#s^O2@@HC!K8QgYg{0aG zPB9%;QV|O=%)QQbd4;Us41)!O3{{TLguA>EH7ERv)Re%uLmrU}f_?wSzR~NY!CM|M z{68MI7zGD6Zd{eVvtm%_AK2NfaBt^?GK6%m_4N(!g0YE)^u;MXVt%E!!^MWrtM70< z|BCDR5&!!SzTUXL^x_Ho$J97&kNkoIwa)+R+mK9c|x<&hlvS;+8` zQXrZT3}}EGu+&QW-^Wh!ALOVD%8Fr5WMvZ2yPKOI^{^$5a$=Y07`h_4l@@7IdY?Qq zC&Nu6l4qr60OYvO)-`Y}tg(0>Zc3Vg-?pH%!2Hgd9O`)NF4;$B2S{XB?-3o{{y*KP z{QnJ$u@^kLfI+OIr9mEZrjf%FaaR00|Y>2%ul1oJ%K~0vsI!v{vuRQHfh4B5+H*abPMI=ES$TDM~ zJ9dE#Ba{9*vy|_}W)p++B^o^Rhs|2IK``l>j{!6jz==>1w?!19^O9iyC{zc&+7F)m zuD=$OLXV>ltk?*?^PNA?mJmXMlb<2`$p)VlrNUe<-EaTXb(W4ThB&=s+^&21bze(M z>USdg0>z3_=IvwBxko4do%z&}bE!jbfxA|o;`8Mor-`~$b@Z5AEEd0$C6ZtMMedU03e9-aQ|c5`cv1{R{f|Mt(Jj4 zOr*|{b&LD@N>YY)tM0ph>D&72wbjUH&C;}FD}_F3g=ozC(vHNw;F(UV4J%T_n#>~t z+>@>>G&TtdA2ST<{#IbeqNT=c>c?+b>8&vSuMA;vm#_HumRqQ5|ACGvyEUKD^UwRA zUJFNpJL9#KRzMe6S*Qg2Zs&LMOFzA^eSYnGf@d87i#4Jo{m^ev>7=?Brg_h&xj!3V zct!}7g+Hv#+0@dZj+Lvy)w8T99)*vtTy_YZ?)zS!>zDK-m^XilwdCbc`aeeIEhY27fE4%nQ_#>0W=yI;k98?nvlT&Evb)DZEdlUU=j^Gzb^cO$%9UnNx0suzF8M(gV0yDWDXW4h`MlPw?$rmf7FUjY!@T24o zBYFj5WP=(+{FM<_CM6Kfgt@Grh5ZH!HPS0THG3$I;n#}~=(7Q%cL&XE7AP2rTb*wU_ruF9v^up||C{Z=}Yl;fj{BcMmM;;|3u`qdP~vkIF| zka-vfX0PNlG#|?ac9Mh{NY-E|X%~;lKT6BZ*H*-7k5l&v|E9(>KpySh_Jd3XY4Sb` z@;E||IPY2@VQO~eT3M`t*3Bci=wxSONAs^-8%}J$&o>R5D_R=mo z1-0y4ra~8PdpfOH9hDxv)}j_X*6;$&oyl#;=yQ>#5|x4)F$W_pPsKcLt+b_v&8SOf z1we@J<>29ddzLeLim@}>$mi^dpNR%kGE00W6B2WCab5%7Is>$r zq2$d2@Ppf-q_G3rNRtoPXnMCbfYMYFmRq9*@sA81!uIKsY79or@Tma7VYm429{*zS z>EJ8h+^Z9l{J(nn8ii-+v+Q!XpEQ&;LJRBR-i%&7d392nt#U&9!x7C^G22K-ut0G$ z<4{rirn$p+yx$7FRSf^YSKqiExHk;5>XNTNTs_I*8%cc6G$zBkn0bZPx0|)<3e^~$S1A6ei zXMLe&Eh;1d$G4!K7Dl`{-v%M(;An+lI%zczAeyf+=!h@t_Qbz#mVGdb#z#CSE<>|X zeED#3DCdozkdJN(Ka~T6F7$_54yr|eDgQjYDQ};D<&suIy!_8wB${1iTFE=a@&2Xt z1sud#|Dfr6{|zS2VLwjBp0`l&U&HH>JX-W(NCYjN6_xvbs%Kwboz>358D!NPJ{d!s z;fclpKIz;O){B7lO&_9V;r!1JEUkOAhtQS%E)#;rjoSr#aB>k9@e~l6f|6@hbMvE2oQYm8V9%LjstcDiBDOdYc zd<6O;q48r<*Ryd+3E-x$Fv4vp&Udw@s2vK^6#=1b+Q;Xg`sWD%SRf7<5!#2ErKh3u4GmGz=1mut$Ug^_9-%LzPbwi+N*j8K!z4O>;ybw{X;dYB&x-OVczN)Syh4W0? zV`cP5$z7{(M7oKJok#|z5QUZb!h-nwLf`us_bjU7TLv1P!>f1Tavb%;ukD>2MskNr z2?%k`N${asq+H4aXA;9Nt=R46K~BVhx2Ea647a~FMDy_fd}Iav6U; z1x+>tlI6G%qFp%U$|a;htq&vT{>;Ze2PN_ky16XY*}q5zXjV@@;eV!FHcSHxjmNTC zOi*Y_#FVME;>NVd;z4-7KvO=^h>lyuvyqGto~NyXJVQ~66p8;0|vZEPb zr5o-KIq06b?e2D_|F5s%j&LMc2+gAa?-9=^MR=LzgCP(AOmDjAiSs)PJT#A^0g%Og zPV7G}Q9wy)DUplM_>0|eiMTImc#$k4i@8vCgjtw~>TGj|oK`thjo;zEf$!JjpM3Kd zI%!sZWjS$Q*Xq*7Y|RITq0Ttp4&8;iCe)~H!JTho>?MFEYu1k%9)?r`wK-OEi-X3R z`Cxub_*MIko3J&a2Uv$v@aMZTeSO>qiWV6UCNuNh@a_DmNM0BU#H752UpS)~U&tjVs6!BFZRSrjAk%;}YL96Sx^l6RVZrgdRO> z>4Y%%F6uTOX~-9jd`K{bSb8^3kVR{xC^0_F+r=cH#MxyJIx>RmE^VxU@7DC#bYc}V z{B0+`1f+r%Z1Q_*SX-qzjJ9`jJklME`!`l^;!mb^L^|Ez#YRk8!&T=T(n+UZT94{& zlznZN&Jmhx$-(A`-*RPM%LE-|G5hs5olQcFWd5}oV2Urwdi`|#pyI2tRB32WKxcrt zbcw3&wb-JRqN{Ta4pWr=?W1NYL2cd<`{dx^3;lK^>Pz^lSIF8^_@{kDh~7vkeZRQv z@Pd3Psb;_7zp0~k`n&$tt%C$QB~f+MIR!ZkK}LjbQH9rUXkN6M>K{blrm~R&4LdNE zq0zK_#(<8W7rSWnbo`TusZHlg$c!ta6ED8KIJ>?eNvZUAB%>UCs6Pv&@NVe9>(i;y zwzP)LB-e!x0{CxoZe|gEG@vGj=%-e;;L3HX$Uo2)M)dWSwTi~}VrFzU3p5%Y8ANk4 z-aznR%e*K5X5$ppx&P2P=7{@_4Ici{A4+bQFmL+JaGNf@{FLuswNvqpB@mXdwz~(o z(c*6BdV5>;yG1duKjZSF&OTv5EPlqnUvdY1=!3&U=|n}~5AkL-(3yGv5dC+R&6xvR z0f62IdgbY0sA8K(t6mK7mh0~O@Mo7kGCs8)#G+D%@DQVED zK*#+QUH7qq=EDFC8SjoP%)^wlVT!{pR#s#i*OIN8uY756=*p(tCtCr!fBar<*kd$y znU2EVZ5gdl+8D7ckFPv?_4_^2{%-Jdtv{{JVQ`JW6;U@AjRy~2zvO=gO#d5w#|o{9 z4C*fcnj7(muQ53`pBaR(x%~ltHCB+mh!J9l4E@FG+H7sDHtX8?W?O;*%fW94KlKrT zH;(WOK%lh!$CQ3^kA(+hj$4q$vwCg4?RNXbwnXnRAgg?UHTZJ%yAsY1*8^@SUu56} z_cQE@cs9AV-v1JYjZv%{^n_Nez@Ah37VN}H}U!l1?n$?tf8W z*P~gsi5rtXuAWu$gt-NX8&qa!+eSO>AHgoO=i-Z2m>xfwVCNFO(r=ph8m_r-23L8q zXNqr}Pw+(CUM0k_?~=X)AwEy>A1xXUFx(#slS;s|EZf@)r>*v4-G^!{B$+J5MipR; zsjK7q+yVSIdK0Be&2*pB^Q1G(mH6AyeJNKW|%I-*yNBd@)y1U+(Z5Z%Ys{^MK=IOXo+?u0Gee2tnGbouRu|> zBo~;R}U%*+RMp2taDyo++f-mqjg#H}8DC?tPF_}>&95Gb-1V%*Q6-4i$xW&>&7}V(+;&JiPHH0TGJ#|F`p6#3+ zv=l(MLO@W+EN+o_my)NSRRP7L^N$o@Ab$&@P8!MxQKesl#?k(We+$ci)&Tk`{jjgD@NMu=-Eg4q?{&h^7#kZ;QP$ebExp}t z&UiaU@E7Nw>-=0aPnF_WsMlR1-31Nfj{!1k^!r+M47HTa;#Q^--#<*6fV&{>Mkv0w zEq|N>-$cTP{h(3nE?uT(N(q#&;X-a~vJN(&l(K4XR+?J{WuZaMr?!6taV@yyPq&Tb z=FgDa7ljw+x5PS?541up3kmpAkWl=>XSYT-PK*Hm)eIq3|5eG|iNdIgN(D5NG9X+b zd+C-ppkgK|wuJ34oGI;!8R4=(>8iqA&x^cuemVAxh(MB6+=8IpVN;ftTD z{lcGEb|_t9-+3b178{%K4Ht~n<-kFghG-Um(8`&8%OMSXwl=d74rnPl@%iBmyX*iTxECo;i=f#4ixg&R zDp-EZcTaJcCFmLbA)D<)n!{?PYFg+7BpUU?EpWHx;aoF4DU`r)xnB;J&!a+zZJnKY zoc}ao_zO}-+mfu7TX4(oJJ0x_$mF!Pv`!VsI<&C2X)j=uPFh$lRR zY4XKtSeHaN|5N74BE)?V9Ulh|pZu0)F=xzs8rzbRBbEaB=1muN+XI9}M}ewVg+)N{ z_rn~-J=ge&tDB-Kp z!q)fb>Q&(|tTmrx=59OsY*&`+OJVh5LkN@e;w`NO*fh-p-FGwsj9I4^o+v@2;Yehj z;i`-2f3ZtDtw;;Yb z;HFn}c6h!14t)wrFnz*ELC6-BP@JzeP^A`VG*@z${#atPQdm1=DMRqn4fD)mROxEw z`mk_mz<#7r;F=LC8OLUtw?@P?qL}NSK zqHknH0Ru`BuilVi89dxrTZ$o!UB97+qG=5PrO_AP2O6IA5%z=G`y%3F_rL)7n|8`S z{!5&cZ9u1rJQkt{i+9P6UtYbX-7UD|k*YtcCEh*%wLcy`&SkE{+&|foufA!}CL&1G z>V|5eUS&kGZ&y}=H=njCS0y6cGKa@z+>k{}+dyGzFWA~~NEB}X{j5$+2KMCA^pSBl zsA6ICo+w>(aMX-h&PAMPcR<&OAS@*E8|E862$yflZVja#+5mqkX4};SQ+@o#;A5Ss z8u(GbQ4AF1Fu$Ae56*E z+3O^}36cUI^6xvi!?8gI1PUXAA8NBYxJnAA#b{Vo*|!(2`erm|0U)Cg6a79u8~oDM z?EQ3_ZZKavpai$8%&RLqX+EOV7Zp|8U2&wzaN2AyghO7B3uY$2sjESe@v z6^1dPFaFBk0e`!UQ|J{U%JGCOK@v@zTd+XhCJ+p)0;*?Ky`G(ZGc z?whehIg!7f_0H*gj5dVGYOj3GOs_yySu_8>#hxVXc)koY5cOCnDI`?x;cbv51k`+| zxLww&DakxBeM75Ry9Z2c1SVK(rXV+GpA9AIfqkI=6lyXJf1S?vyC&QlyR{|jSWAwVGR zA8&D-k)g*Z6~s^iwrDlm1+V8zN`&hq3AK&MDuHzk;Lv0;y@`paDJ^sbKbKN1^-@Ja z#=+;<<$i*=`sf~?g6Y@7RM?4iU^!w+j6sdP+NvMc1*0B!qoxp8C+aS}= ze)gEERRUU-muAm3L1{ufW$^I1XMLPYwHN)6(fWY$7flXU#C{Y#8Uno6PSw?>!wSmN zUt?ig@V|0Q;0@VWk;nlcvcjQc8!e&+wC<+_f^!g&*bL^037&t3p9!G%;~=M2H$c$e zK2NTsVSb9z_IW-j({IO*%=2N4)`^dk zd-ZiRK>PX6^u1ZEbns0+3&I(FYcTIR$f(BkI}3035+8tZH=Eh-5686!4X;(@7!@i$ z?f;>*S}?m#UpXdM?mr!Kl?V&N%|m6Z4+R{1Bym{LcLyKov72TC8IOtNud<8Vd=abn z8I;gG&Y}xH2eFgDS>y+Z0I=Zxd8(}Xo0yb7ytRVw&{4-hb@EaF2sMguCTj^u?O5l$ zw7~PFZ=%LdvuLyWbe;7$ay~)^Bd9E@etg2mHeUePnX?baH%C$z#j0`3cr1^SAI_!I zOr-G5A#a608s7nzE(h7yV8al-bz^b0>Y}?+dr87Xj?|K_zGJkZ2?wda**S^ctrj%x zB$0<;@I;o5g`%JaCM%F_yS_DniFE;>*h)J`tN^j%Uhr)I$Jgzq=-LdygVqU|`~$tm zz*QGT;z*9KIFs)r+&O7s= zm-@RMV@ADx`}3_vWIedEW%b%M$vvy^%6YR3Qg2!q`Q#73Hjs z{#1+5CQE)3-Ue1PYm}_NiByJ@P3&c)+fJia)7PqIRK(?MsDjP(6DKi#s#LLe8ZIY~ z-#dTDhX&7kE{L>>G&Cf0_QhGN%xNpOnmYkp1I*{^ax)517{rQyvof1F=^L$+et_L+ z;+}%D0MrSMrsIuSh5f&kLiIBgoZ-xbvkxhHh6>|h$}`7=dTa2Gi>i?`Z`$G$VkK+prQQp>T5qsSjKy>l z42avD<7gx;$qK4*|MX52MJ8^6yE3dOO9y+$2@#Vb#i^z$YlmV<}jw z*^`I0WhWr`V+>jVbk6?!fy)oohuV+Xe(G&>cm{FxC^X)t#igSasu`9j)CjLm3q)Rw z4U{0VwMggoGyim&(r2YiXRx6Ym%;+YQZQpTTh;_K>8fh{GnzMDE1TsTqiw07H13&myugkM~)Yvj!> zEY9#71`nUO@`S(K*pM;+g(XJT|*>u|Qnsr$AX2{5@92|K17X%JCb zV8B%C{%bPsR(=l9!oQCeEYh5Itj7qim7b*5;kek%F6s0m$)=XD)B}Q;gLiMRAKy z?#pk{xAFfN_;0Va{MY?oi4^o35JV@%h8bXvk~W>%li(u-a0&gD*6V(P9xT)ICTC-* zJ<3Ug#a_2blsN7*f?_{aQ;NHNZQcrw+x;H5tqTu#BpF0o;W%zXYJ!KFM+rP`r2rkL z!NM)OikX?`f-z%I67ypIUSCFao6dAOvpkZk+N15zlmyqmc{81)Z7K`3%Wy~Ofy-p^ zK_a_+EW>_ysLs@cgT*$@-TnhD_H3md^r!~5r*ErXvC;}7@zkymALCed4XswL9S$Tu ztv{U0G~N2Aor9gU9lB3iL9RcESQ24g|0opP^C>H8T=DFl<*V1qg94>@pM>>_T*+hS#FH=b!o>%&sMTbuzBjrC5N%qcqn-wEf>mm0_~ff#KR$HLg66;#B%KO{C9Gqbzqcfgt?XPOa& z#L)$DqU#@71OlcVnz%l#w-KFda^WeMPOCs%p>{ty-p;q8u@l&RDxujks(*a=Jrs0q z7e=ULFJz2Y$F+*5929Ca+8Ir#n7Ly0$*Ncxo4Vmk%FSYp(Wq}$Wd@KIestlGL|g{nfS)NVICkcG0dSa50j!2c#@ zTQgdC5GJad8#hA>l0#Z?xuAGKpbPAP+{>-wO%lLeOBggc#hm_G_HcxKh* z!+^$ur@=Of9+Hv=SqM#R%7x94rWpyC%tE}T2uz8jvbUXXs>*0uheU>_rN*w~D$x)& z;DndV)1&~XZ?daeL|groPhL+IuXKv->e2vaaZMK%n$I}k;w^-uzZyir+T*_+Bvz+| z!lU833K2UgjTE(D_oY+KSnV#XFE432OQhgbB50HGZ&D?4vgaFHjf|Knp+XJopF@Xa z%7+CjE7bB>JkNh>n4(R;BZ!(n;|P1}?0>&7bqpms_>KMd{GIY=`|sO>UucaGijvv) z#jP(ziq!n!l)ItidM~cDM(qVX0}@~i#%y%Xw(hwk(iXC+jit*C^n+}yzTH3&%oEk$ z(A%Pz!@j(b+!lzl{|rElm}FLnRyjd_QET}F?N)K_IbI2DMrPCa`1QP6&qA%fz#Y`n zR%l3oAJL<}6kiZO3trgSHic+rQxgS`OWxFbH!?fKz(6pBEi=DX({i-#nZGhZnFNY{ zOa68Np93W>MxCoyGPtJje7Lw=pAA0sxdZ4rjL`l*c|v{5F7j|4D1&d3p7(u!`p3`c ztrnIm`6g4)+LxhPS4i&hqi_D={BwUX>pYVTi*ntkP32Oep?oJPk_LEEvO!W>pe&=7 zg%fDixkMXO$ykU{|L?&&ewx1XEg${mry`Gy_j8;M$8z&5*k5=ik+K6_O#HGhol;xK2(?_^Mjp763JA5tQUM2oo5cq!?Zz-02_PwpaW0QV!?G z<#2xJy0mm(;oEx_0QdJ5P^Wsb4IWym8U5~a04)YQtaApFrav2NgyMBTW!x}Qa7S>I zVFmg3{)k;%1F5K@KzzugL zV(1&g65b8Nnmz_=X0u^wM1O0O|6DdHfFXr(bIg$w$-3BRfN+F#SmEo@C6Rfj>=)=uziCJO7Cu z1yi&6X?im`WB4Nd1GhM*<@EaeY2GuPTrH6NT+k7zS?T+n2`~^>%a+M={NrPsoSO7E za8?dStf$Uh66{e?PRUq0#ujtpbs9`%GZhB186|p`;y`PQb$xhZV#=i+O^OOKs;VS0 zH4x_6mWSXG$F!gQXs!>SPC2-{WR37#+L!ScQdES78ZqO^M0WIop+tOK#bu1`c{KQV z@Romx`_ILFb!m0pKU5s95y?I>j2PRfMXk5G1gl~ni6fH{-ET$o^?74L@COp2UR#}W z;e07Y?Jd_G-@$IzfXFLGG6|cext)@*Xg@+~=Bp^6XAl~n%o+dqp9c?LeOiaqE)Qza zu@fYbW`4-2q4x0W!Mi?k7-YW~yyNS&*jU!rs8=MI>K`N^`R4ft{?n@6`T+&G{KIsK zU=~S+NlRmpDxqpj@CP_xzITXGwkEp$UAmt8%<6q%C0C3j3m3`OCLrH#dP4V;`tt9>jBmqA@c?RW(v#jWvZ8(dik(Pq$SQZSc3r#e&LJw;F%|Vo}@YW zzV-yEeiTCf!`C0Y@}rmiTTkT}_-0>dZ^EuX6oTU7x_EyV>1yF`s!{x*=cb~?+@9MV% z-mIKRe{mCJ0C+G>34TW<<1o`LiK>w_YO}thtOc}vKKZd5)1pi%ty7>S8N!*83ZwU5 zrx{1W5R-tRd@yqy0n2Sa_||LLZM@3-k2TFZ(>q>fW^Wj6MvKKx~mq z5*HudP3g@S%Db}@euYI7FuqSY6Z!@OY(S?D*%`fY;`+FchJYjB{z$RT5gX%nkMyzO zL5b3&uTI^NElJv$xG9P(0}=`B6ePYCU-s9_6! zNx}(1B0>PvM~_Hb>k&UYga1?P~ zY~9-f%w`(fw~BHD>rmP0MhwKx|2qXxIr3kh-%>l$9^9oM4osQEuy*-L)1T|7T)5r5 zW(Srqratq7e(ub1VO|;p)u#mQ2qb?#-l2StL{BO!D^do;a}L`H0)aJ$K_%j}S}hpn zn;69!$80UZi>d^de5VbwtdGGN?SjM7S(Nu|9?LS4D%>nTiExJnwmIG<-jT)oq9b=w zLhxFH=TC0w`~?s)N`Z(b7#RzpBTW`l13G(cFM&zwD(RRVSKUhIWCerDH?Llq^{Eho z?}j0=*(ZGF69%}QPReX$T{|&PO3QZwV+=2e6pOD%Ik!d^WK9|TI~?M}c7j)-;!LH* z%b-n7F9X!)Lmit+ylH&HvLCX&i}{~(3?)**$9#H5Jrw5PN(=4bBeHSpS0HbeqPb-M z)YpJwz-yW)lp2;L)~-5NN~&u~QVdPx12Qr=OJ}2`d&W zjHRly0|uFO*eH1ipAKm>g8{f+qJG-$TC89nnI!--^AvbxpqnF5Z!4ZHHY)@6uhrLT zbBizP-R990m7LU~`AcT*W}GOlMpifzQ`yU=!xHX-#%f)LMx#32R%yDYj~tiGLjDrB z0FHJmsg&nCM3;#%`F>l1DeeL|VMU;25iJPQ%s=*t;(C2B_=WF*aW(kTT1R89MgJh~ zK=&{i;?!o*eohj?RIL8oKMwC7yQhgzi~$32=Dw&~Pb8!84FzVbibJcLRd!=@`2`mm zbK6Y8ps^D=ezy&m1b-vZ0n5taAROakfo<&CY~ zf+pu{DHUXyVx=}0Zts6{NW$L?{^s|=2>urCgIeSo$bcG~aq7uO&WWqncYp*|ii6jV z_kJeI$Bb=~S@@@butrXYAF@U~iz_q{iJOB7get0F> zQkLi^Gwg50C;$22Q|&Y-7l6MQtkwx=b0`nvN-_%7pt&s(P>Mh_sMZmtkJStM%h#1Z z#^383)JS4y;i(h^peDQ4XQZ7#|dU~;L(Gc!+g6Rscx!Xh7{_qzO3THD2#oR z@ZLI_aJN-59ubHbo1S#x{qsNe(Qx&OT%B+!M!a5b=9K>g2^N>Q-i)H$JPUW)ZVvyX zp_?;kykgZU`XvV=A?04*RtjDqu%f?T}C9tq?CpUZ{N?BXPxIEZi%D_{YmBK_by|^gk?(~0--BS3 zF`?Kd7OhbcJ6qUgc^lYYlptTf;ix89w-is6s zhjJvCL8_A^iyqI3ZQ}@nJ2?6`p5+~d7L>9xA6@^zI&iq8tXCFS{2V>_;gkN?m#az=)B^Nc@P9tieHl`=i zwOgq+q(6^4g~Bqt+QY-Ho-~9!Vp!<3Vf@AwSux|ym*n*a-*v&Gxb-b)n%?y3U?{|!xO%=r||E`{%U$Z zWSV|_Yg~?$^&x1V=vPZqG$B7*lOi zW5GYC*3$;BuiJ8Pb@*!2Z&g3AhjS?cv{ajo*L3bdITaVXb;3ML*m%rps76RHrL?5M z&oVXye3RCfH!aR;P>=#6d*L2q!$T^T^!PfJ(=KR6sfD5GKa#fv0Gf{arA`(b9JT1Fh^w(#co`6C8|44WHA_8IlFP?xO~l)J(^?+&o| zH(Wh(DFk}$6jRjy-nu1A=ajlAya+A-A?m=`>Jz=#$U6OHh_lfo#h`J7(#|YFp|4h@r>z8aSjHzxn{;MK zFQ3h&r=EJoM+el!+)#fXs?!Q8!h)$oF{196RyIh7_;f{M1sSU@T^9pv3!!gfK_*l| z`X9^CUk--%CI9o~2j7cUMCAg0yDZ(W<)94%g%;Lh&3uqfwW1|EqFJ171q9Xrdnd{F zv0il~GYDUXT@2vP7lBna)6Ji6k#4evNrSQshaenu2JB|z=F~T6$4TDSC$`Lq@!v7` zLZ$1Y#Y`fU=Omqx4(V) zb-i__OXmF)gs;Dq?Xv6tQ-1oPQiK zoa`O{eMe_Wn^)KB`XouDUP?~lj|2PgNnoq` z`?y8u^(9+^SUktu>RqHK3(XYaNd3(bm|e>N_)_d8=_YjSAEYB*3A8AdMRs9jy*6K4 z^F^`LO{kG|Kmh(-kSmtIMfCk8gNK*?NMF0%N?EGK!&0Dv*O3k|$QOcZ0YrTKpU)F! zpfILMLt*s9g@G58;Blx%EO`V(t_0C$qZ_nXk+haIzbl1#W}5oy0txy0{1biC7lg<8 z0XCO|e}5r8V`q%|XYGk~b~gcThJGQ&RVrll8L+}bBVO-*bpGl2qZr9Yw5s{FMnXo5 zq~gf>)?Tbtn_KYutVVDM5{?d8!s0sJKk>_*w4%z|A|KG){zZ< ze#xojI}@Gsv9T{`TXx}emP%N%vT5^2T#lc5a488yJ2E!JxSI*W#f36dlgSpZ9d1%P zoveN8yk#@2t}MOe8`8(ZMG@&XuMVRTJ_vX%eboqZLoKRBwg?VVXwoAgt|yxoqviZ< z-$Y_f#wGnR|NCn#&`ckIn0};yzZ+#ewl8Kbh0_FB2!(*tQ8*~li>%%&L9~$`UpOQ` z>lbOM4j2NIv~$q?*R+4738ep2C4tt7Jc?xz4?;bl+*5X9NZ&>ooW#qR z0+g>>j}3nCN0EZHkYUPytaj7vZdy!0a$;;sq-nCh-rVK@A)MNVT0-bvD37>0#-^^j zDgE5*zCA2@8!+OMEYn9^y<=y%O4YB*i#Yr}IK<6OB5}S>#_2HS>fd6Ev^!+2 zwKg{?LWn!fSZ=HiTHmJqfcEOb24x$3Cx84see{VZQo4ZSl1(u55gp^R!E5LV|+}?k@cd3pG*DNZ7=&)WuJH7KyvMMLu8u%iDEAUwywUY8Q z>~~~bVAHZRv-YCi*|$3l_2D~UCCrJ{cw+jh@4La}JLWwkqnOH4qC=fSeFoaEO{k!g zh#r$2-Sbs={K0tuqj4{r_k(VkoH`=mJJ%?cD{0=q0!R);A`y53be>wN>K+krb4>gJ z=BgcSr@daY4n0ycxA!`+HFy|z;tZ+9A%KDrHfjss>4Xpk+R45D2wqtsp@syOfue4P zr0*EY@8HC5A*;AJX^Wm7Gi}5#JkpsCl%H47QQ9rU75uzT?=gClu|HGxsf~vOOqT@u!aoT=|R* zs%Sk{u~7QS8Ug7TY$ldu+vH-+Wf^4TLU;1#0&3F|ShhPZanKO&`vJB@^igOfE`$SX z^r$cPAdihxOk+bGvi^|~>_r-I&7vZq)>kTF%rCAq)+IpM_*RV(BuYB0X z<{JuZa(c`MJ#~G|2aQ1qBdh;ZgDz$(>y%D$eRilU6da zR)^&33RzCh$F5N;2^>5aHq#n0W>#fAV+JKBsB$ud+@=x_{86%aYUWd$_t#>kB#P&) zvn@L}aqNE3j=q!tJA9)t%NkJueHln$^mbzs-Bv_wtoQ*d(HC}cWzL@f*jY(m)5g%g z2N=KLg%wFgeKvxK|N7O``md^*F-rR-3p+Li?MiFNwjYy%qS(hlqsm?iTFh)T7FQsD zbB)5yOOQ5SK?s0B5%d9!(6HMK^r6^0+<*5ATzpX{S5c;*c4ydUg87 zwB}%e4y@TEi&nS~86UP`ddO^vlWQfmAz+Mq9#h0%PuQ*m3Vc(Vf{}x{o>mLj#ko1j zF7_`XL4BCnjpxrNS&dzsL2+&XrJZ>Ne6+No>s#FKPYP2L!R z=ECW;wdV&D@hObOa~WCzo2%L<|&G1g>h?V4MAmmzL6wdTby55_v`R}xSMno z^#Ky(hR65R_ZLf$k`#Yp346kTl7ermIf}uZ4+1aD(L+K*{#*c4)*{#EanOpK9tR~A z=nq@=gec=1-MHa1rq058ex^VJs+C^itkB&wYmEAGA;k11uTT1XFu%SW{LCMSOTq$X zH9%W>-%5g)oM9`y6UZwH^~LAQ{e?CWgyJ1HbrL7ITXB+0jQKkxSCp&%6g z{MaWmi@k0QZamA`$Xkt8dl{qE{rbm+tY8y~a z-!360*x^#=4)hLSK79%Ql*Dc(v;d?$LU%$|;ltb}lxCVa<>iGv|!v^4k@L%4|jXog2QGHMM12~C5`na)|R5I*eg~K2s*$I=VazLobN=B zLQ4_sgekh&Ivc5>+NJ{dOf6II_74cKkq$HuqLnNivNp|Q`i>xS`It{6TyJ%4u~A?1 zqXxDTz?csX_DU@{8-R1vOr9*J*EUH;|89jCR4bO#CV<9w1t|U_YV_jh+?Y%xGt^U2 zE}^5cOj?2s)?d`EVy_(zK4a~t#!WkPy0++)nSTD(xd^~5U?fvfODx$p7;{G5($TiL zH5rG$ygSJYdI6I9}xppzK`lR+qK`o5xree{)T-S-f+O%WlK8#*CXfSf7HQ`;u2 z>05-;y%oMIFgFf?bOfEvnG53XOS42a-Ph2xctMD!I0rX0&WV4*42K=NULz7pLGQ~t z@aPwNJd&Vq?3F6rlfbM!`|Nl9<+Bw0`>=`cC{0U9o9g_G>Ltz~8vsoSmL$~(?HJy6 zM9?7iTA5e)eYEel7Sal5<5GhPDi=uASLS^S>@Gn^y%v3lfuqL~3?9B1t_%L+8-w?> z-h`AX>`CDcg54xyT^#})A%~TXxSk<1mD8%2ayBHn;c$J~1`q%6b^o))FntNBysR{I z{SCJc`BmKj*2+}&jc_dm-GPiR`+u|GMUsVZVij@O5^126*GFLAjB{F>X`9o#S{SbeLYA){hWWl^i$B^jTIj}{LvMEh1CKUl@&U1aXa3&R+-iQ z!c5rB?F!AdrTC|$*IWe;9_|4sM+$d~ez)DH zA)c65J9m4b+HP0YR**~`8P=31TT0Xy!Mr(~)`W)vqmA_K_WqBHu*%@!KN=qL>2D&l zfKq(b48$~CHlH~$j?V^f`%k5CH%01i522zYmR!J^)>p}YD2|c;KnY;yR1<&{a0G6! zI5X>?T%2F2&1(9EdB%=z(x5#~z)=cm6o{q;8MVUP(cLq3K)d|l%9A00w@5#fTRrU- z*N2&TdH?|$E1h(?qy<&@W^4IHpa+Z{$hY}dRsQk^4Rxq5~8D#!7v`UsCWmh3a__Wygl4A@<>)KIx^f6CSu#gpP2R4jw-J zvcHBveZs}gudQxGe{Dj%3rS`gW{A0b}$JLe?wW zrC>3xz5ad}A3PSTkQQbkeq3gYGd|PGT;2aTaV-oYOgxT!MrO?5cE@!HSS%HT9Jg?B zwnqR|rLK&F9*Sk8H5TfsJ5u{esKp}isrT0oP(ExaX<^(`l2B-?Y^qyrBo>7n0}Hm% zN+xI)pZ>N^4|b7!}*I7fwh$tPklB{aGY;H6i7>=H#c zPyZM%CNh?kXq~G8r5)*8D?G)D@PV?CY_&fX{?B%DZn^SIw_^~?z+7uVJ6Mt$IDT5z z(y1_e;E-IQQyU-2$LQ6Chcz13O0$-)ua1ccL!tVd0l zo0egPOHN;?;v|;_16m_93)6*F*l1V=9fP3}T~htp_!--k(1%9T5I+naPK{mDD-mfL zivkDm`B3v1`Dvo3nFX|vY1sX(`7X$mv9hZwM2qin#ZrCoc(nXMI!AX6I+CiQ zu&UYf5|#7;aSd;!tzK$b#XFXX4ov5$TGE5Q!n8fkJ-8_R>Z@U*S#=XWw(1@&4HDLU zvkwk2AOx1W$@SC^Antk6hh@hnG_#rdh+BZK*t%G7?C<>mFH?R*!U5Hx-I^onvA@$- zsV9MQz!sUIuS^dK@d=vI1A<1f0i`!u!DmHK;BU|0k~s0gBdB)meCPlWfu|5PbR`(C zFV8>oBYo7l7x(B#fWN+cT>h(Xn`6kurU6w=uJj|cDryb8Rnf@SnV5?utCpw&O0OX< zF}=h|%0}HVLhx7yY{z`GlMi zxJTJNaXj5IXzhzOibdILe_Qvqs4C#i3e6#O)@+TxOO9W3AB_w9M^~P3c^*8RZ!Bmz zEsB)X=Z@7MM*3-ND(WoBzI`}I^bd=iU@htzxi=RinjvucqnNqzeUBUc%~Ii!XuA&} z>g;ThjgNuDF7(;pJz5QXasG)vn%=H_!uPQn;pgSb+D%_m*5HI7Yw)#j53wFT-#VsO zr2SARBnD$QaJ*0&oiM~mJeOwmemqD}`(sWZKZwuqMuSX+vZGRHHvS+KE za-*ara*2Dc=U-S?#J>EA zbrNzYP@m!&m>Hcq>u6aRNHl8@appj>f9d!jz4&ti|1{B8-4wSpzK#pZp18Y9>o>Jr zLQ-jCC~}oO`H}As0tOOwqOT~B?tSTZzT`qo0@c0@9FFSsft_KGV~X+j2hinJSrcQUv~p z$M7$S7WVP`@Hzd`Pk|+a0^@*|!cp*bJM2hrZ0*H>@UOX){6j<)mZazkOa>s7MX|UL z{6h_>qFqL8tJ>Qs=s=~Mu^4pYw6g}loU|fUa_yL1am=n>8=shTGrH92{ng;&|NMnl z$K)%KudrU3!TiuBi|MmxFd1qO6J8=eBN5%U4y=%8CdIfykV5i-txBbg#>$x^5tbu- zGYFMZaa?sNNKT!(HN=RyP~mKsGhtht2p{7y&0|v)W@Ej!?7yO%N;r9|iGgU6qBRM^ z43u~@F7ag{BAV_zJd49-!aiHiD-N?ie6ust2@Y|P2T9YfP#4AC1@ z0|ga>5h^s}@-U86R9S9vQNfzC9;%&VR}YI8#6rCz-XjqW_A(v0g17jT|Gqu=*k5mz z;;bXY>JqwJgq-}E!Qu<0Lq z@Zi~3bUd;Ajb%FMD!()6m}Oa zlQ!5gBILD`;%N1ai7sM=`|Wg*U$B%iIYh&YM+GLde;RsmlPA|2`Yn)kZ z2Q$N$3^Bp!GRjy#X%_?opYgw+``;M}0WmjIW=R*&`GGC-@exCcEGm9OCy}a}y3A!o z?%ZZUI)VR)aGgKROnv_|G}2)Lz{sM&L?0sXFzXoaFDYj-g09p^9QJ*SO{#NFy&av< zjP7M?-)|k1>kv<$v4@DuM9L>sJ0NGnpczK6k_TcSZsj`*?rBmBO)W-grQwUp*&EU= zqXK=ZRGNKPJ6lJ7T!m0sbR6Yv8%LPnXv1uPFgDgTN7!($k6P?()YlTa27%#EG~WEw z4cyu?q6|#`hFM=MJ}DgFCod!dWuJ4eH610d(ssQ6%HkW;S;-a ztQzj$D`I_cIYsF4kfYw*#QMe!7orM2PKy5`5j$vVQa0KlOicr%reV+oYC9*p=eVds z4Vtbo;IeGv{?RbeXXqT;ow@zPGtGoMZ7@yKX0epRu7Q6wq@d7p@B}V}_+n5^ z$4MtT`FDd4e2EUsi1LNyMr%ujs(31GcWI^)wPIF5s@kYZ%1>aP_(;oFI$hWy(Nf88 zD&2IYV&MgZ48VsnHZ~RXF=p3NpP0yd@bZl5PIY`6s?gRtg#+r@_NJorp9qbPm$2D>_=l%RU=?CJ{#pn868fG@l}w zm@WMt^PEJq6;NphE{GyAd6K71{0Ku2^2EjEB};Q~j1 zWwh#tJs~WrX;*G8`fwo&n%0A=-40iA#2ipZgab4V7&3t3*)vNz*MN-xqOn~usoy{mP>(!HnV4K z5B;5-DL|we5zrx!aM~zC{lV*p^BmXhEQhcw!-;_u%At57%bXntuH$yH<+llza557( z2Phu6E)tbVe_Ze;#0`Reh@)zEseN37o^752OYj9SoJUCRCb8~xe}g>MC&$+Zq1aoDj>Kj1h`c7VZj2l;IW*cMY{sws zWG|^EfieVr%gjePZzhqa>11)YS$tzF?2mTyek<`*JpSc3e=+!C@U}0Z-CF@feD+&f z8>8S&z}CVnvmuOQ6+h-c8|8(+EXn=5 zMxaS^X`D1m*wM9EW3_rS9s8k&ITE(K&s=@fQ)5^Ci?g*x)xU^=6*WkD)hx7r7B}#3 zfHq9_|4J<*iA)5Bw}Gz%nfa|0JiQ4#-bVpRmH)A^-iT#1m01ZTk=l zR8Ow2*7d`Lm(2p z!u90Mwwgh0okh5!Pypy#fH|7IFdgWBxgHTg2s`HeajP3MOJUSi=H?dHmTR;A6c&y6 z#z+9FeK0~3Jq$=t@zDyYef-Y(yOJ^6OaoP*N&TthoG@IP$$nj{-1hMt5zzS3&nkvm zN|{=`63~#W|Be;FCGo4wElE8TbO?f``^myoC1mxt=fBmiQcL`%&Q!CD6M~Hy=*Qcu ziN0vPAMqdqkCAI)H2N~v#od+_#`^dP=G>96Es!DzRnqbTR2SH;!pdX>D2dP)vEh2D zT#O@RY?Z!ZT(|l35KyN$#D8h|y@cqy-HvT~;=$$b`lrfClBSs(Vu7@YD1IcA-Y?Qq z%ss(%=vi)9gSmeW0_wktJ3f|~`u{1Zqc#-4AkC%7z`E!T6INM=*6xwNB1(&ZYr{yK zbdIEc%uPTO8Z6y(tz;B+WEwnNA1ZLK=n+Va@ zkgHefl1(%wLISh;&K#MKa5E8Us5eeEJPyp;{QFa%J%v6@HRBs!*r>UzZxEZ;S78+^ zSR4N>^d^V@&iC^lhnhd@QhAG-%mA;vw4!;h;A8me{vPp(*3ar|Sa8+&a{V2hB>ppm zcK4`6U-j(X2nk4qLYcE%Yr&PlA@F(P4qDkRd+%c=JTahHv|Ky+C@mnZay>Dr8mH069qd@umU&t1_nav}V`?7dra+~<|wTs zxrjORCPAHe0zt~My&H`N&>~LiZjh!$O4DdGfF=$NqtOIGnXZgyPpUE($-dh0`}e$!b$~N7d)H(tl}be}$pq2;|G$Uzu6G@t^{iys-tMPJpnxC! zO^-0Kg2YNN4DHP;<7!-6V}(#bk8p0{=`(cEi=Sjt#iwx(54zYeHm7>k^Sa9iu9?Xj zrBFq8x}A&ya03SkZQWp=z+$E9ZHVLyCh!|#Gn=(A_&=o2-p^xncZgTT2X9=v96wB6 zyJ^x|@-sqYX486Y7>Sp4w$Am0x~bUwE>1Z3erxBU2Ux2>t?f;MkA7l5Ae74sF}zs< zATccfF#)WH+Na2N9DgcSx>2NtWiohl=8_oSx#4=uE;kmbnp$46mv12DzV2eG%+m$G z=~bUYM})x){ht_!XkOQ4~SS>o0X5M`;a!O-JKl zE{LFBS+2xmCn$0N3h)mnBt^r34v5g}_8?Oh>x zj3KQoH&ZFv2K|#zyvk76e9ygn&i;7)+AR}}A_R|g`vz38Ib5fTCe(4M<+0JIUtNr5 z@i-|L!gWGPr{J$z<97P=(TVyFnD=e7Fj)(7(=%IirS_CEpqY5hcXks#L6&0jYf)(2 zG(??|uuybdROCK$q><5$b3yjlrxVmL*~W2PLC^&o+4B!ZP#f8#(YoG-1c_HJ`ZJpa zJ{r8CXkdG852T~py{G*sSY5X|+4LPRPdjZHXD_-!fZDc(TSXNiTn9aGy7&CO(Wj$d zy)yp47{UDV!c48vw5Kc}@-`X@Rj%JipM*CRZ^}M7Pe9fp3vblzJ3^Yde9ty2i?vu- zl~ZythPI^(RQ$F*SVLv@(?+m8YP;b_Sa)UjaeQM9W>f(3i6pHwgaVfk)hDN106&YB zp}Il%_H^%&tN3E{sTn?OC;m_=J66Cg4&Cz~(VF8&C!g9>(FpBi^gI$MqL^`Zf(P=q z+dHYh%I}KhM^g@@;7k7O!$M|0JNfzZk4GPzd=+wao2*QRweR#@5!mw2etqko3`s*cM1pZuZ=ivVZ`JdUc|AtD+bsl9uckz@z{}lXVTP-G;kg8&n&| z^J(K8Op07?1Z897OIY%vJcy5j(hSJf&8)597>SdhDj=?wxHz#OH-vmXW3WzMowBnc z2$gLWQk-9@zQi6g1vQZ5pQuyughO`M=^2O)U@>AAlzt%1gA`zIuMxWJ7Xv)Tyiyj; z9WIuhbF%Af@3B=Y%ho;!ch-YtX((Kq`MXuJj& zJ6LC&`2T0!%~}sAtc~el_{0QNBob0RoCw?DhwE$paEvNh^fq1&TDjQm!lj_#Hz!}% zNTi6MvW!OqKUVT8NCNmNqf7P+7p_m3n1*MKMfDl0OUE30iR$O@eBvc+&UjEEN6=!o zd-yEzp+$lv)SYHKcDe^<$tD87A#qr{19nqHrb8mtKZaqP8o_d@T=UIk7)R?f zD~q;nh^RdEbYQ780cw+P@ESp1Qjb_|y|#T%ChqgU`}5z}N_4iEs{K-WjEsC5Top>i zScNr?2@YAZ?*AlDgF=b+^F>rcEvpu$@$RK*ab(iAH>AiP2&NlxQF6!hTFD$oW3{+Wie28<|hk^Ah z&)9%g@Xu^%dJO&}{{6ArR!}4EB>EI(j8*SqU#8~Vzl^WRxl}|Z6w(|aL2P*wCf$G| zKNTQnpfpIkv`IoBzKeAO;xdJ{n>+X|3A{r^i&l@b6%(7UEyUtAM3G>>6R;|2RNCk; z`25wymYeAc&5PjKtEUi_oBasYu43&WJ$#x|>&lVe;ZV9&3d2>3&aCy1 zun9J8T`C8gCH$wrZUN)QQ$gM+4P7s&HSJf>dPencDx3lNa`P6@d>BNc+jqGB~PPcG-HF0U@N?%9$_b3Zb{Zgc8lzWjvo5F!NI;5GY6 z8I9a1b@qIdRf0vsiU>OZsUiuC5ebtF7b@CL3c9U>7EhPOW+vC9dW!CQxa zDxW~<`&h%3E>R>m>A?~^&9UNJo%Bhjz(-@vS|q7+IDpG|=xIoih+a$!NGmT;nn9nT zYaTeQi3@T>@A~*Wv0x?51Cnz6#tjofEXRis@lA|#R zt?$QDH((`-JGEqySW&qtwtXx)(&k!pue+Jsb`Z_?4rnsIV?3nn*%wF{ye$9L(9%9diSdCc$N(-S_oWBSo6C{^Ch53=LRc>jw(|9`{h~bcar*TJ=B?#4O z(~_tYB-nRaUBwCuG+vfR2&E^9ZCPteHQQEhu68emyNLzkR^sF>mxZcMlnV*voH0U7 zCD6y6gb2b%SNtaTch+B_6ht2wFWc>MBZe@F?rv<|#|l#ran*Jd!`Vy978-BFbDEe= z>9MVObcU@Ft{&A4z=Lh>G+&O#W6CC6Wyyv5$t48iF%vPn5%)_@(S_GuNr6;#HvT35 zDuJX_`p@F@o~ zV%AiE?(vb>=A)jsr;=*pxHxmxTrb&tRZ4fGc!ff5csJQh+k|hPd&gV8QTcSW%OgLH_ZGrJpGg>sv{5iKB!t#9p1SSlK@=#V+j2|jJ0-Q9 za&uQa07?udoe!NOCY@c2Es{0>Xu*7>tPoL*g0u_ySv=18o$2FLN;zFg_yC+#FuHMO zNIetR36SLZm+#wtFvO{7Z?8)(QJ?XGX{atXAj%3+))&{4?8=)O$D0yfZmkD<&)9Q{XO+4K{u4Us zu>F|MQ!K9u$o=oX{f;-0#5%`8h@Z+nu~jA8 zi20SSC5V&O;+J?Jfqqk`5zvy!Fka}YU!zS&lNO0}aH$aJ_}@l{7wnC-GIRM-GBt|D$KCOEdQ1>sM~s zgGnxW&Fc(TN9=5=EOTEKB_&neQGt+c5BEgZzuaJjOrFNk4Q$6)e=@GB@>2{Y1}<=$ zDy;Hbaqo6KxUtC1$j@zu&fP0vYx*U{6|sZOv(ppXooxnjjZ@3omj5)?7L(&B5Qw(Z zz}3j!#@A)g@vk@PVDektC$@Ui$4Lo2>TWt-aO$S_ic-%j0%-BV?jeuxI4fHSQ>wjQ zS-Q@4EQ{zai{BK-kA2;iY-y+#K9rjxU^{0RS zq5ZbBJwo-_OAw;8i$}U-4lrsW9cvY~o;Y=*A)EYJ^Mb!DCJJL6AT{DO=N`MS5 z^mJs4Bxokv>yTWhW^QVuEwuyyP+lPje|Mh-*o*ez?}jb z=U+Q-&l(EkY#Y@hJ$lfyIia~qv$8k{q#o4!lz_Z_132zyqxWsKo=%)UZwvW4*lk$i z>^-raGSb8#Zi^Q=N3p$lxJ8oC6qymrJKFn>XM9aRl4AuzC_D z!1I3?eN6WJBU@tia3BQlW*5mOZfq9=stFpi_N2lNmoPN3mZ2~Ch zz>ZwL*8;?tCWjM zMIyJ^@#H2AXtQo(Klj=zHpQ3Uy8q^z|32hRNrh6FpMVCQ&%TVBsOqoZ0U=`!9`&|GfPxQ zMri;|jiI!u_ugs8#Dt=RSU!U;;hjlL&Kx_`x_V^W%E=_z-6t++V{YaiIt5Yjr*Fri z%g&2+p`f+rAwcsQe46<9z$9@Ztqbw8WSrx zT8Wx1+TwJELt=XRZ!rJY|C3{u|Lcn*|DXG$tx*v~kLcYcS1n1h-|h$ykuD!1^vcb- zgj2s9kY*K>9VoN!u-@6^pAbMR!Fzi&g?X57_EX_0NZm!gi-}>&Oh;zFgrH0FBYSh| zT6~=p`DVC$mO*VF29L#0)2xya#&c4LoYyl z@aMm^CC8X3FvK|C`@<(sa0PAdgF{Lk{n|Q1MP9Po@!So_pp#ZuXm7NS6{4_DtOOvW@!84{Q0qIeg-$n4*#N+G#Vez+c5sc>+_D4)7oj~qiCl%ut$Ol@MAs(t z%gOnMUaxeeh*L5~RET)<%ymvmCn8?Ig(4=-K1PPHTsRc@Vbyy}d*_I7>K%`XS%O(KC9n zxw}WCPH4@zW2rps zFcaCyscUh;j2)8zk8~)(utkDsEtkF@kvfhksfowQZ|$SVY0n)CD1ksSrvi4W>DF_z z($TY5JPJer1VCHTGc8PH%+r{FK*6PbolT&-=6N5s?l{~qHiSsSS~>Qyx#L2wIAW^C zc$70h+znVg1S?HqFJHZ!`*>oH+pFJsng$LjO*TJn_YMzJX=rIkePmV+ z*-)P~N7yYLk#)HAmXK>dMLoEKaF3KkHbG(@#VLvO>l5Ikc2+UnM5*;Ye!idqzqD^* zAnt-OL^|T(3h>CP#_2WTLf0Jc+ulHq>?4L%%_-Wxc~@FufjIl8AXs7P1GP4;Eaf9L#Ty8+gCct z26EV+PGeNqE#(=op*d01%*HzL&Mi84NW9`N*(r7G9fGZmH7*J1wfBzr5nuczP}$mvvr|j`Ah!&%)T&r2@|=QP&407!L)xIk7uF;?L4NqFyjS4=qS`j`rgk+zqJA0TeML_LT0;O zx_jF$kocdAO@egzm_rgmy>P}9KgH^rgp8wl3Cl|eMfGItkS!i@83=egDM~U6;?(82 zK@EV79RG-*kc|sB?ZdEuxC+N%Y!}@{X?Xi8=%IG-=+&F^wHcc%_g+6C3z}8$o1$nE z4ir6k!e4m)lW2S+8$0sWQ;{&^5M#jE3sxHSrIwAF@RM)R)RpTtJ}hz?p2T#>7~sm` zyn9-F>gHA3sTVI?hz(hhi?48nz2QzR-bP+KwbxjKQkG+5D_>A!XY6VWz_2-FtBDMc ziy>-g$YQUy;&7eX8<r6}3(rF`nB4V+NXmQoHuPXMw&`kCnKG;}YMe z-jS2&W9))ke#fbn<3$X@Kn~I%u3- zu;~QR?45|jhC>5nD5-{0w}!n(g;gjJSJoiSdynkf2yvwm+qVr~lS?tj+UW)!i}Ci` zZ#z{fMmUWk3H#aMeeIhub#8g_OUD=%WHR{KAwu}`(Qo3pgui|oLxM2MekhG%OqLWc zvOfc2AM|AI4!h&Q5N&S z8Beg1)sJwHD&}!-H2Aq!u%9M@I=gZIe#kw{i4OnE-9pDx6EtLR%c_8nIv3 zA&G9-4FP`&4#*Zl9cC8!Ds@%Cy$UDobq;V~q;dcL!tTM}2x~zhZB9F6)AmlT`1zWx z4c3_WWNmGLZ~uqU2fj0$ew!Go2kA_g*Kz7A0n0G1@}Aqpl+}C~TtuC|AKOAbCf*@? zVW}hgAm&SzEo-e}j~w73L6C3wJE}s;JYNunhro)yzH{s(H3B$+e6r zYHVz{IFm`C$}JuSo=$xWY>OIRGU5~X=;ZU0FYW07RoE$43yf!&>%#9> zPbr6)wqTU*(()qbLs7g zTqi^mc19tDI9rjUMSMRUePJ)f?pBWNSGHo4x14?S=Cx~~n+qsUg%S2D%E9ly^`?ET z5{BCofHNTho7A%VF+L_V>G32S&ytp~b#Ap*n)YXUlqD7O5Wp2CYl6(JreYKYCo;6G zT-0*r)1J=Mo7II%-R-+em+r@gb59a{4+V#ee&qrVcqh;QjP!rjM2S(0r0A za!6Q`4URdXdv-kHf%EeL+(P_!^b>m^-+y;6vGC9IsN7Y6DrMjYN8s~K7)z5~TMPIc zug81@V(jniJH_dDn)W4COnIK0YYcSgFqRkVbG8+a5l)I;4?7`*xb6^k^mkKqe5T(W zxJXe{`eU4r-0B0o0G>wvm%&I%8^*6a5Qz?|Ix}8u;3{bfb?uBZiO< zwuqkD2S^K`J_xXSC|KR1Y@KBe_czUGIr26zs>Im|Gh*`UwRm2O)Od!w@do9iBm26o zt0xTRbQnw1P zuFaG=eNLcV^F%L_d!{ct^hAzQxHAqBiWB*8HY!BVJ$Mk60I4|Ey+*9?X8G9{# zd{Y+ko98da!xS>y*s!&Kqh(TwAof!08R>(PR(BymWxBP5b)#B0znn>~9U(&|Pc5un zdv0L>js;rh?J|38NeFGhT$HTTheL%ZV!Zgo0Nm^Wy45jm*j4)*8HuWj>v)v?b>C!u zx9{~HmCp7a^)lV{5=!;0zHx2JRt*CS{I(?|>-V zNo|C8=*`gIPZtJ}_H4hQb=UmxqJx}Fo-x%h^O*P*T}b=T;VC&ev~U zx6cgvZO4Ed_WL_)0cjHkrFTGDA_lNR8kU?$JZW*JVNV)8d+lp+SVX|#57uElqmJ9a zj1mE~@O@#J;c=fbqXcDZkJ>!HpBpe-ZMs2$Fcv>FM#QPqwajd`&qEbf_D?J7;f@S zC!&Q6zovN$=bpDu2dJ@8d z39Bq5woc@&u2rxpl(Yk$9mQ7yH{~!e!$=D_y92#<>?U3f6`9aBk(!7DLkl;U7S33% zy8y?N6$qgP20D(J*#fuI!oh&neH&*mn0E?H~ek4-vN*9<8)ucxzx%+`2JtHU$r|4u_9W=Snp9}o zA{1&hkGxQo!3bmjbS**L5gsdngW*A9uTl~(?Ir*T66<{4-o`;2azVtL3xO2pAqK9* zNDP*xPl(`ItikvsQF`tgTb_xD^Do*gv@e;>g$f2dJi=bVY@*o8zVAJIx7tXIooY6y zamPllb(649)9~@O{q9WKe^%%?w?{_L{@wW)6ICdF+p)9i17*)3^%1Rs1?(U9GY+`yGJu z-2`GFMPVsmbam{N&};pp&{D;FIy{Pn1VWJHZl>GburCukL`)--L@6Guknv6Robthq zQxo=8B%Wy7wjX#_!h86x6VS|c+t+U;=VF`7!ybZLk8aLnK$z#D(C~_wsfQogL?MsE zKYj4@zKc$K&6S7_<7UG1QP)$rjve6M=e6)hu+K5oCVv+{HQ1t9(Kniyw-J-(`6zaU zR3ZR}c0G+2_})`os;s~N%y)=*M4m}t6LHy@JnjZ57yJ@8Qt7DDRr!0bLy5HOCfCg? z*KIwCD?Qe=?yCZh-u6$54d%y~)iThpqzDyC$39Nblf`~PLYwYnibA-sBPO7#$oGzd zPb%@(MD63XDm--sh0*fsSmt?*-}s=+ao1O*SNp>qU!1wQR(z1slfVou(CaSD0Jd7m zyi&(%)?Cf2VxS_*dggBA`Z6v z4`86PKNFc>jZcB3cIWJJE?XD$_l&5OKq2OdJ%__zqj^Ct`;HcE-oo=c^{D7^|@|kSWl!H4XwM37NmYNPyJDL=b8c zhOEv|F{u#7@pqg%kQ<5SN<`k{!7UXij~^UgQxuTpvV|SDqO<8^ZJi;BMQU))v3G1O zD~-9P&Eynx^6PlvX^55{9?9$(cdPL-7lUOxWZ68BRLGhD~9_80e5zyjx1CFsxn7rv77oJXdt`MLE@d;0k-?*Ao!=0XZ z3vBM&1gB!y`)YOVHy}Ney6&2D?Too_?3=K@xb#B__T|Z!HeEE@;XyK=7p4E=Y%+z< z%QVO_n6>?bS7Q)3thlVPTDybIXi`;cwC=6c;=}MeV2jz$xN6D+CZrp`CoLMwd)m0# z?<*jyeuok({(xBV7mCjO;m{e+6brGTlEGKNu!Bt&KwVk$g7m~P zVYDO&AocM$WMMAdk*`Evz!n@rld4UWCB|9IE{Hh|qKY?n67=5m6X zEVjKr`;}YQykjOwt;~3bp#yYOvo-NqSPEUh{Y2rI=cdI6;$dNtFQk(N;Lh}>7Syip zSY*O%tc4xLHG0ka?p4Jv<7$IoBaLw)I67NjASE8Ej*Z>y$~3lVZ-zsG?kcNouVI5$ z8ur69@ltm+K)yQ1UGKOZi%L)tTcG3Vu;)J={n4I)rxRdJpf)LsMp;V9vd4sh&Cvor z#!ZLno&IjS<3Sm}RZWTuXxf(uOz~E_1z$%fO(;-pZ`kZxTSNOkAp%MOM~LfpJU z*$%qh%z;GmCjuwLD}R3STN?^UIk7q0LSDX^n7a;-P~_iRjee0Y( z@tfr)a;mh^vm?6X#8l9?;`j7#9_%~FYy&W=$vqLA6yDks-xo=;fTVEiyvMEMOmq}iKCU~2foKfTVcTz%O{p%oH*|4^Y|Bc^s?Y2vxk@>BgFXuf0`nLMPDWfRY%Od@ zx~z3YDL%?g=HOf+wL^}*98BV@XHY1LcDQBJkogyvsTyAqW@7^xMCfZ}PPs{9a-H4* zfpOaeaV}7;7{9J6&fw-bLT$gIvYc#2?y#t!9^%O+nPTV*7H+%V>-Iv^og%>lHljSm z$;Y;mAhii|*-u#1m1g4J8Fvbe&L%iuya;M6+aj#7qV6KJ)1Xlv3+ox-N zJOE^YClQ4fPA3^D{dn}b&5+DxKUr|omEDX_JKu;y_ zHXZieJv+$&;u-J1arLs@%Uf<+j*DwT__*XipahgaLDI+F1Q|n21g|VL*7jOzJRijt zymIMluf=JUC$H>8sE_XBG29t%#)Ju%UEZxp3{8NTNG$T`+1FnViNL-gRl-);E2;j2 zvCqDYF~`*$5g0{<(N10?EUX8TYh#C~$OCz30D`dT3;>QNqU_d`LY@7DyO+>rE!ad6 zdFZGk_7EE)W(-7(9BAuhhrq$W%A2|!T$|Z?>5AhCzOio#v z)L?6l=2t3vwpl)X`t+Ojcn-k<_i~a9Dv0cZ7{hn%gP;afjOy!5xRi)Q$e+S4N^0+= z6(f8k{AQ>TE(*nj?D>RP5)fe*FH%mf3F$OO&X#?2hgQkPrw;>=ZY9n_ehRR$a6{0azJcnYu~BXR#LV2 zLARv2Vgb0p<=Dkvpi{&%YpV>Y84>LOo{5ao3zH;p_9m{?&raSoCAoFz(0Gm?fgxK% zWIaC|@`V36Q|b4;*Xa@Vg0H+{%tvv?RAHj3cbCD{y&l)TW37<$7%{P+oMtVF2S0Aj zZb35GYzs7-lW#vuBa=mPz!L`rt)-ahFzo^t4IpTjhJ8pMY#sXBemu#91Anx2z(GF~ z3cKo!82fN1#HXg`sv#R|^c8Vae!I6HD>B@JgF|hB?Zv%^L;GX%dyd4fFIF1&;^9=c zn+Ox(KC&-NU32D?(X;zsw;hIP4L{UIvxrT$J9OQS8cq(t8j>L=*qdlJ{Rlt*Powwj zi88*{Jg}-AETNxtK4}2Og1fK1108`}>zG1w+rJO*Wk)2&pQji!t#)1Ii8lI42>SAx ztWw|V=~HLG=7-}U!yuLz!x|opLC@dEojHIYo1M4eY@jFI5J%^c>b5m~=lkBweRg3v zU$4Xoc3j)7E>dV<-`VQBa$_hQ!x&pr+*u)|3jaj2h8<^2g-8XvEWU=77@GCcr57(< z3Q*zMj<)e^k-fB&p;Zt)X|D-99fB^(7t=Ip4(_H{JE3OQGnXY+G1@!L0B9Nw3_^SF zOM;Mw1kYYmye>X57^(zGQAb;^qh9LP01O5tpm)Ys3bITUAnBU02I4v5Ld2YMZQE6X zufz1+!n=4i$$pvE`r9&9f(8ZDiOQk8ybc?YJ5DFeK1@={h9hIXACp#p zcJ49zEr#zeMqijZ!A4q(*d1aAV;cZs!h|M_p#4Qp5EEalFT{e*bjdHIi(9DSjs9c% zv?4NYkVwJH=e1u}mX_~0Hky#3Xn%ffzvR$9PB|gjDp)%vBJzI!{5Smf{n3wYpehid zom%`TF8UM!BKS;*Q&AdwM{g57WDO-m}aL^91}t z{cuICQb*9Z7YDDsCwD&mES5=+O7O(wLc#%y?zah?y27S7mfDWe;5EyOwYYhkTQJNY zq!{)dXX5wCwy3~@2$bHQEPN~R-*|u;hNJ_pOpT{y$f0}Jtf?!L=2Ny(qMg>rGL?4H zu_OdVWGj`CE^wf^!ouFsAkrBpdwDg5iZBkk>wZb1aDJ(jn(0Oye{3haYEQEm%4Ucs z?H6%Pj1=EVhmHq40;h#!1!qo^iKT5 zad>P$Z5_iN;FG1k>K7+}ilv|xQfeK6yGK%uE7pcNUGb9L(gyy_2`R=UWI=9cmCk*s zYYep|C0GfV%tq6M!nf@%&=Ov*y{fiY2^l4(MKzZXWAf@%yAcRpHW%ZZDm;(;f7I`} z2eCBUayZ2EPe#8D{sZOV9j$@4Kem@+=gF=`Wer{v{F(9E$H%Dzf;8%m(y0DY{4^I} z`CG)>k_j}I$xbQ6J&1rEj(4_2F*UgCRWvT1w`36`X^;kksfwD#K+@$6zn15!+Z!DS5qE%j(un zKOMOvF(fCu7yVj-nO^!U!&v#`5KX!{VP4tub-IIn52~8AIQb+Tclv68Yy*~RZp2s? zU^jJ7vQ+K)V+qgD!@-mb9;yPNyQLxupjA8qLnNpn#UQgcK>Y30E?^+WJ%i#ay~b@S z6s{CWvz=H71~s8sw2JZNu~0^+noaD3O7}zqLsT0CHBoS@N+fHL!`X(8L+Itdsk~NL zOB*jG3{IgS;&sK;Y;^57{fVx<_C3U%`!r5omzqM7A~5GBOA|T5^z>T|GETBxNvf1f zuuW=1E0OLDv?^Rh0Jv-#93$dSWA?e&2)OJsOMPt}Bb3pbm}t3ndUkK!9maUcsz6#n ztOKsTxm3)u`dPK86=9#uI<>_W@9wwE3%ZOud7P^ld9*T#_ zSdwhqZXZ4|0lji%YAQBEKohN@EfZLe*=0Nau^Kc(M)9DQmB z6NIW-y!!kEw?L^U1u8G>?JvV+UI*fuGXr>qiSaoc2E zVh>^yFuEJZiW~itO(5}tYsLd)ONe7LMc%owJ*I}fc>cGe4`RJkb)hPeSfLq8W6}z3 z9`B}KF7_UW2;H4uv|)mq`<4B<-l{EzJaovD0W4-x@pmNm#+V(K)+%647cuVf-z)QI z`T^-37Agl@$8!_!*dJ+_Z2LC{NI5y)O$=tz2DVYK(0If4ytjeANm5OX zj}CmyZH6eFCTB}W#2d}Ti>0R%QwAs)@c+yK25RnVCzWR7Js*oppNlgwyG2N z-3J89bv2F>#o848JPC=tb;Az_kvqRCCSE)rE4M^zPJ(>N(hvdyQ^}TPjzDfMR$db< z;Q;-dQ1A3RAFJS^$ZZo=vGJ7hk8Q7SR^lWqw)ch`IDGib**VxHXKCM1D#6#A%%GP{8@eiVBN#fGd#*%-s4yO3*V{GH5j>flp;g;Eo^tKIxnY@0Z1km%3OI!7~1=RZ36nF$t*@!KI-F$TZ~ zkPF3P%9tQUQfBDOhZkF!i9URuyp~w%k zMGv!fFY#M%4OLWC24W(#!VY)R@m+!+76ao;O-FZMlMx^O$%Sn+sitQ>EfPTg3WIF=qW zn!JA9z9aItkWGxC-;RD}zr#wSr10v$mNw-j+Xjp_wOGz34J~F|m*DJe;XT;OjGk?# zoFjKmJcc`l`_D6m`%d9a?GxZk5-01v7zK9Xed@W%g_)%yBm4w8^Ju>G;_vP)+o{>l z5P;SJ4{i@bU5zgbIyJ=Za3qwkVjDv?<_nI)CU)-CbLV`^NxRAZv)>_z(allZ(XK9( ze`)7T@-7$XX1W_&|Cg8ojxPqA&7^&>5h@deEKAVq#JHFDOSeL_i%AvhqEwL$ZL!j- z&d2Z8XshAigPS)eZ<)NUr97%A`WL{#bmu(d%@QZFHFX)e=}z&c+?rD$slP zFQ8Kn07EI2{rTn7lY&b44bZ#)>i^mtC$IWqHim1iz-NXj7s<6>_+z799w?spAQA0A z%b*@TuBBLF@}Fk<>)~7#M>(y=B4BJcJcOj8Jk{i2mb-P!hPvK%3hUiFc0g;b`7w&# zKD8Id>7`3Qum_EvO`Yj?hGS@f9*Z|6% zo>VU(xP8B;Z(enj4Y~6_*aOI^J@TvwR~Rl?EQs7I?$drM#9QHZ#VWS1*LK&kuQkIS z_md4R^jB(ye4F@%-DvFHXYAuI?MbBYANfw!T%Zf0hj;%OLjWL@YkcB28=kf) z27x!IT4`gZ=UBk?sbppYoE{sjp%XeFF}VNv>!^YvW0PJz0Q8yeKENtvq%n~Ls;1NN zgVPNkB<0XxxV5zxrGv-Qw$9Z53?Jj6uZ1emnWA77PtGy7DJ7JVjF{B&b+f;|-rn`-?o!`4hnY;mI6{SOr5}Bq~s*D2r2r2E} zlP9SlNNQasR6G?S7LkT4SdzQ%jXtm|trjd=GYcN+!@KJ_yBk@>T;RG6>6)!Cdee0% zW6%6-1?Q^RmA&cu)RZ@e43cj(h*baBuWSiv)7taBeH~H-`vp^7Scn`Mk*a^P5`o(a z7dV!^`axr>#@y$z@VrX;C zbV*VCl=ep|(p<#Y z@-lk%;G&I)EXXfLpW8;9d-|5Wq&*Hb*glXM zcDQ#+3}mmCC{^yodi6V99~byxVt`UM#Op9|fMDveazHILl$5_!_Rudn3=p!#9zMpO*(2W{n|S zVi#t?*PT^n^7@U~h5mG#B_PD zszTTWbh8ML;&m+uTYa44{C!9bbk@Yda7M#|z;?r@ApVw=$>de%wrK0}@j^weF~)8$ zgP~FBaa$BV_^T|yU^oXvPqM&Nz)SkqrRS-Vu`_|y4CM3B-Mw=UChSEkFu5>lTW?Y; zo+;&9X+#R^;1Kx-sp22N?M&Ze+lIjqF~v7`hPdfkn< zKe_+urU~&!@>Z(NJMjU9p4TKsZc%U}e850H3n6HCSPpe;LnGKQC@&hOg0~KFKdxNv zrza6PSzK9&B|Yd4l_s+NBt}4#D6W@nnA*0TED&@oE`%+ptOV4}nG7-DiS_^9uMNg* zR1k6p=lz*LKTf>3iGa%6BobT0Vy3n zRlig9sBZ(|u-}0E<($TVpnQqO>`w~jJJxmb+O7ZACst|_PqW_uB?Cg)#iK&3Xe`6Z zK-dscK$8M?f^=3qve|Q8*MJkqw1oB$lft5HHd-#Q4*{qUp7e(!3sOZAV#%L-6TRBPRw(z!<_xY3&Br_ zWqAkGg}t}rYzx(j4GcMi5F%+UVrHE#sk=_9oFET^C$$GTv^|KJ7u^EI4Q~=~|G|Tc zHdNuO3Q=nOMMd>JIr~<~^GfAF$aCp3K_Kl6DoFtds@hCA2+`Ufp?Eh@;D*X5WkLsM zJ1WX|vd?m{4^wt|S(IiebT#;%wl(&J1%?_63{wc`{rmSrKz9^E_47#}INsCS@)(_J zLe^;jZ!0o`hrOd}RokfNYFqUOTg{bb)zv*s27c_oN@O}$n8WN_K&nZQ$9DV|7Zj4 zsXx5^PjVLt449P6xWXXcx>>Ge#KKzG9i#=kdXF?-u%mm~842kG&?9o`SNKH5@rx zGT5}&k3Bv`tE$vWU|vT&1ZuEPg)x)BznguNbmnW@;o6}lgfZHAaRs!IHZ zF%0fQFCql<#Klx-`{VW88r@8sH->-MR;aKO35humAe9W%#*N^gMgtSsO}^8e-T}d! z7}{M_3Ck5jqZlM9N-j)p^_ZRTnmCV%`hhS2+&6K!0P+;Tv#kz=@bB2Txrn4x;_Y*^ zn8%b4MGUmQ#Fkij+xObu!@i>{Re;&+dj#mlwHxs;jSZNZ4Yp91d{oM|-j9_!gkC@I z1YkOkJ?(P&#X^AUN=j6qtTBc;CWF(`PM14gwQO|V)bSa7-HjCAJs7U7kqfZJ!JY6+ zZz-E~I~0|NzE{|MO%%Q{kL4XAgKaa?ww%8ddd1a+R!Z7XtG?kITA~(IYnyI?W7C#d zS!ZWj=lbj>pC7YEu4d$k>a<|*ssb(~mJljtY=kr9xF(v3g?_4*l)Ni)8&syiCO#Vd zIPRy;F;#_dxpWf-8Xifk%-EnUQa!eiY8-8Ue*PavpT|N`+r*Z{I7P*vzY@KZ z)?%Eno`c;K2(rx$El!UE7sQYh93Umx6bKR{1{0i8RqpQM>2XsL)g;}ud- zboFcQJiYWU|6@x4&cOlDySwv#ut(3n6Py;y3(E~VHBc@gZK+-v8{LU0@M+NyvGb#6 z=l^|d%OgCASksmqD=Y&O_pL)0_m^LO`P=8>lS*A~b$5~haD;rG?E$crfT@eff1FQY z@FBp@Vg_h}rir1&W`?Lm+ckjR2kwibq23EB`01g0wKN;*4^n|!>DQZ~Tv7{XV8aT6 z*djt6x0!xf-tr5Z@VJszNmg5$nPJ9=5s08DIUc2=tdv2Wsm&$h3I9?2G=8yfLRXS+ zQkph^#r10!>)EdxfV2I_K4kHK@Mcrsm}&B}ixCi-0Ubc}P`UVM3e;w%afhrN^bM0>fE-`opNoO7J(_btkXF%p4 zxy0ZN4rErvdmEH}M$JESg;63rCWRC;ux0Z4E&FOg?*hp3pT~Fz50H<;Wmy}v_nycO zvx&^Dtj2+yoSL);$w^sou9l{ojla<+z z$CZt~gv3oDq79`*V7jyA0n@*yE*(}H`(KEP!V>@?Y!OJ@lH{@xe(m(1_;_Vcja=ME zB9M?-HB0HxSqY&{Q!jxV#LLet>~#nTR7&yqneG5?nUtYcP=)ZCZTsyA_Y?e$m|w9M zKh%Fm<-dRa8=FFX56()gWP2mUnRF(x{jY%zwP(-w-K#s^^Gzb&{TQn{&6owSuS|U7 zeq1CQVh0tQcgP;ITL&+d6e`h99I;qog?c&48BD5rrg6k5)`fZ)rQ|}pD#Jxu1q@Fd zSG>P7use|FCbi%f+kSU*;~hB6LA+|+tn$?diyAr4H7u1bhba=L*KlPqK|6v6*@%Jh9!9u)e{f9LoepMP zi9VFuwWfXg^5v0^_2K!*Hru zqmgSr(O(VRjz;%qs3t`eobA{nXcO`BB08`M6T{)wu5a$t6$kSa_8%&ZJgmXdRb1AS z2&~Kyj(idup)iH00hSeBKUWyWI3mQF+RA~X`{l{Iwg9LQ{8Z1`4%CVv9*J);Zl08# zcqKQIYb1k=p$RJ%4?Ps;hKbw$z6TO!N>n1P2;b(G|J(*QdUkd4a$@EuR!4lr_5%!P zdWS-4>87hJ)*AI_Z}#fB3ER3y+o5w_M@bTu7hRugY?cy)t9N8;*4|i!VyYgj2=FPq zf2NJ+Ka%qM$VSzJTGsx2Fx)w|1BJCFmevD8cV}1w_IY=F1``7dp5j*z@=x8oX1_ko zG#22fX$?k;HiLfD{sPYw?~K5e4FVDKY{+6ix27bLvD~slGkSJu;v1nyp@@LtFt)Hs z%5OWg5weS7)-IxM{F-vSJK@aVHA73^HHB9yopWgAO%#h z1>_FJgBBS^%w(>eE{;dqeled{cK;^s4UT?Fq-B+Ujb=m7r; zLg1*}xds3W+q>gk=MQ9+dN6<4Z<@6PT_`xJelMXR2k=Yazea&NDhvfHq)6yFMI+XL zif%^g8j>G!=^TSG3xnAHf+wr|r(#(*x^WAThX zJ^5|y0pTiCNV+J;M5FO4#rN6?;n_VHmi$_b3_iGc9^8rYT+`_TCPd*gn*+)`zBJr( zU+J}PU7Yxa4g12kO&rF#n}o-hypal1<|FU2zvFRHv9~@P273vihgk>aqk9zKPAYtG z$iUuB`Scce<0ODv_$$$TZ=(^@5R;EK>)G5+Ul2_lk9|r{UkDQh6Yu{};?0^AEkwOkC z_G);QDuuclYjam6XMOi($U)t;+2eltT)9|=Zk&r-!|foIVZ;8B-YC2TIxN}WQ`b^) zU1j!6!vMliW59~NcH-$P_hWpk`=UllCs{3SH++@J#XIvd0c;fr`w7J2-dwXS(2bnY zx$E6-0EjGT;Qvn`A%;LRKa2(!KB5*o?M=ht6u0W3fIG!h8IV2m*G#fft!&E%u@hP&~8I!<$i zl#r#3-@15kKorizG_rlTMF*c4;VVIw-XmM@=RX{M%DCFKyo zr5J5(Upg1RYL+A7hwR*|?}Xt|fZZGsiD6s-hw8E?iBh!GMn;NyE|zAy?Z--l#PK%u zcla!ZQ}(|p$X+^L=oLHwTi4=e4I(6m_1THs%abQj(-5s)pH95!Xc1VFcwT!a z_M%jiVKpY`UT4dhc%e9rSI=PY_uX@Zig%1Yv$djaUQsq<2{Und^Spz9@E7pEboJUM zeCvVzsp&qC?gsJ`p?FJz50@rx1i3uL9gjuDwpSzhSpH_q??~Vx?4I)myz;=0RiQSp zky2C_UvIRkwyl&yCx=1xJX`5!AXjIy-Gm5fk{s%rs;zae?^)pRC5xw~A6=Ia)`r1> zz{*~UGLRLErJttgtnUK4QlGKGy?pLmI`7cvy=%Y0u}qAh61uPAfI$IznlZN7U-;&? z?2iv7UJdyGCMgbMHNmkb*PGKzu|337VSLHf|8%2f9|eiLAG08$3zWqcSs37h*i<+# zD6u6n7*?dvjIu~hqg?hyaG9%)3qdhLP|N#m^wWp#mMBM%&(?l_Jq5~P*UBo62CE@X zOEC11orpJ1P9>B$)tjiN9H#)WnyE=TgqdU{mtta5NZ4(hpV$LL_7NsUf^~t%GQnYR zdng$j09IH;E-(o21c0jg9a~5M?5jm5^!MXMYt)z&lvPqAYG0aTBQtn7QHYD0A;S_+ zs4C~CQ|Fs^r?L=2xZB?HI#T)+9Vd$cp!v&nY)3?q2QgOKwiE8@CZarfNUG4KRt}wGXADVo z>Me>w;N^3?b8S9^k2V_#QArr*P?^dAl@vN;zk@MmKA?RW{~dj5&nSQQp1rn>%Vet| z$aQf*fnwX=k?Gt}wov8Kb~X=t$$^`%w(QA*bYuwRU7-5Gi-A*p9O&Wd)^N`)6KvhK zhdkk!@%-6F*KI%WsA|^i&hT2#OC>OWigi~uAzXw-lzOJiZ*cpp^^a`kltrY@eRq31 zB>^&jw;2>J1#P_+(A&Xl6Pju(%Z(go6KG>NAX_bhZ?Gu}STB{Z3==DygPm z_jn9V&rsdY7((kH4+@zba5L_Nh<@4XB}xT)%H>Mjv6{?g3w{F{sT8G~33300nEpQO zlX11wRcR;}-_8|QP;_xaP*c!$Mo3+iq1#j^Z+TXC+--Ev#h#3^j<<3G9xlH)@!Bh~ z&?_7S#9X#mBqJXQgRPs^3(%WkM-vQ+pN1eq1$WYk&j# zvs|5X4A3ET53%dQEs*k6pyvPg(NAI|^_7Y}i(z%!DN(+nN*a2(+JWFEakIN{XS#Gl z<_|Z3XIucPadRc#Tq!(B$Awwg6R4TLJQ?#8 zod`-!IkEWm^G{EHY3oK|n;O65o~lsAeP$G}@i9?cazombF-d&X>I<8gl^bWOw#56M zP$O+X`-sbv%-p;y;vbnXF^Ss;>GZ+016xh<&6@iG0FbF6MuE(ah%v&fBMdOpgA==? zEn{UkKnOZVI%0u0ucTRY`fHf`Hr;8^#-`Yd$G`yEl>X3e=tFXZYVL1=Fp#7 zi1n`fJ4*c2p27y+NC=A~Zk0H*pw%h~8$EgJx_#{t-FA|Zv?;w_LbkS2L~@FAE+fI- zOn;K^zwLB`H?BF~_jkV&BVXZuPnM!CYlYBrupF(G0+IxRPOU;ftF{BEry;>Xw-QTX zhK&PriPVd&GRVx9Z4IuExpcx^;((r(f7u5XD|0baeEDutEFdir1MbUg9fNoZj%TsO zMRgR19H+oG_yAs$F_6^;M)%ddbp!SNGge@>2>R-=xCAznE?rBq-X8QH#gi1eR{4JbGCJyyX1k_9+@2ep1mlU4 zsE#r=OesRMHEqKnez!j!AEpyA(~DG@a$Yt;WibS6vB}6xDy4*J9MAu5^i^n+;hsXQ zT!wKG3Z4-2T67$FsMJ19R$x=Hp%EsulO@wUrDOm!@@U<6_SyO6l)<6tA&9hCqVI0C zQ`Dj(ia8IEst60M6x&=~sMHtjy}~~FRU#IMbgivaTFIKea`U>ge@r}m>z!CC<#W~n zq1!8P^HjqZAHV`6=9(iM-N$_aD*V<283LJ2`jMKbAPHGeh%iH>DFo` zfnBhiu;O=fH<%UKvTh`CTW9y~+^cV%Gg%_0O+?N=iCuP#8jq2|RBSszghFLyzBr`6 z$AwvxN5qDsZfHmpb=zZB)s1tf8uy$3#eZyMtRGd>j+RAFA85+PZr^f_iLJBJw8a#|0u}zp)62l|Kk9-NVS^}5Hbmj3btGr_Udhdl6Et<00 z>{KC{dmdlm6t%$&*&$=JJ-URY;=@i~WF1kddGm(b)C5UB;NKs|mlqZ6O_wCjKkYHa zI~(JAlw!?e8jV8LDRpTB$Z<><#YJJRdbbUD?8T9FUv@uiXF=f$B@ga&ai0H+vtn8vyFgZ(HjCo?k@ga!v2v5PKA2GNQW9Yg5doE7U2_DbZ5eQ;hSB>GLlg&0n_ z*wd(fWl~+2!O^pcbEfH9-?dkve(o%q%=DIa}A@5pz8TLjWkie4jmjUp$y5}P;AC!Pp?=v~h5x*-VYDZz@!{_dUWc)Zr$ zOvoQhW`#q}@+?kQYc|zE*NsZu2sC?)dDaJzjdF{}WB_Gz(%tQ8mZqTgsJYd8>`#?p z+4_`t$x&ch!JcK4jZO)mWaDVn3u_Tk4EuhQ*aP6rgs&b`c#R8GO%N|%x#Bv2ybVj2 zOljaZM{&I5-}~sGY(dF)Jz-f&`Rsdv}fhv5`bJ^#`3f1q^WM<<`g!st^dosgy* zH&O$TLcEy7Q!pef02?`(h~!i)s3O`PBnrKxgRs&4C!WU?W4GEnMB`C$Cfl!l-FAbd z35U_%HhT8b#h4yAXX+&FlbpXlWSbisPGuKD@Yyb3IQOlX_U&$0fMSVOS~RiJm#_FoAIay>ipo)NnqBM-*ln@sBvwgE#kG~-gt3ZHu$}ma&w}g zS~AtH+KTIlt_xK6cVVNm;SrdL?oPyiX{slFl;k$TjGnAb+K<|DE<%rBk15e>8>nPZ zjpT=3cdUr4eGNy*VHjj#+)V_3_fhOlQ#7sqDq{Wa2FQA%+y$6ae0sV+z$~!6ND^uO zCP%er0Xc;%KnT#^CnT5Ju;UyL4q>+g9T1xgI7*lZs*Yk#poM#`*a1T=nLO2Tb?0!w zJ!4KxRe{~)Pp&GcN;3_VK`cRY=q?Yt$8q>gV*O~ecL!Y`5su0~;UX)Bth${ug~xqQ z2QdQ?)5IQC7FTZDd;r$MEPcV22DrdDg%zlEcAe&b1VJ(b*lxeSS1!ERf*EiGhx{88 ztIm26+%CXrSB)arBDa{lmnE|G>5I^F#Z&Iol0&i8rQ@0{xD+LKL&uf2tV^0iJ6M)v z2L7?T8<%fgjYU`_hG%N4=>>?}ecxKqd)vp0V)>2DmAm;b+LPUGC)b%S2vHw^5EJ9D z6R36yrqtegA;~ixZEz)GVgpDlS@5{Q0p(Hkbo$yjexRKx=eNzopg8RArypB@g}gGD zpj5u4S76_Pg$jB5#1@LL_(q#N|>bqRDcU21zj$^2r5{ zXOo|J`ZO_T6qi1P2!lM(^X<+t?f>@D*6hPpu$emcV6GLIfXakS3}6`Oy25Yx{M!TCl%2uP25KM!T%*+92=rEOXrkckl1#nI6|7>`m zQlp_DUS|)$qB~&A{R@>j z`>DT1L33K*mP$u5z*%Zkm_qvk+2x;{d}J@aq&u%g`m@6;A z!_CZ&Q#UWiqd2C*A{(KnkbL5l-tRZU(uXAVZXBjs2_awx!f<#*gR@Y|pM)W}Wky=_et%N1{%?dqChQTF^|VW$18EzQOE;KXIKW7L&(u1kB&5!pTeA`6u1 zbJ@>F{S+EF9hDns7baf%mW==x=Btz6*-N?qI?AEhZ^@+h_mkOJ5z-;o^>bV4RNLp} zo(6f+)YU25s8+@C@+45-ci-g3m7DgFN72NMyoMh&+TK(nPa#Se+WX4HvoRCheE0d^ z{`qfh48zjvGt@u+3FeyOLa~$sswtAMfI>kl+CX5LvsIoDX2BlJQho8~AKFi3Q&}zh zpn8%g9Y)H@enSpmZ~K4BnS5pQ|CxVngd}z|31EX9hfS9g-G@gs1Bd~h?a+(I%?8q_ zcs2Gcz!*cadU3u8K}qg%)?=oyIfT2tP+6L@<@o;jm*d^y%8cvf)baElrz%O8IUcVj zf*H0%afiaoQyVmh4!r8)dmiP%Cxv4wdqk5_a*a#z!Jq#o6jht7C8$u_7f1##zT?e{ zNIbRXLl!X5ggqtkXS2I;H8DZ4ReKKN#V4pN*-QkyT5Z(gcA(d)x+D4#1WI%3a{QW# zl5$PO$~7q`k1jrdYw8YoLqVAGq1e-hM(-y3>(R3d6Y;2Hh1gPXmQpwoUY{|!XZn6m zlcfrTbLkfDx3&`QL>A)F+%IS72pY@NvC^f*Rpe+6$eh_S-kS1DxWVWmPmg*OvL!@| z)8%`GdOGK?)D92Y^UHy=!xgeZ0r z1?_C}J!lJ4r&Yo%7`=*J*cqmGXbxoT0;=`91-Lb4Mx3tF;b|~pax0Ncyg=eNZPRGe5;;k3nwx{BoG5aKbm8#n=uy^tV;yd-_M6XFqz3cr)P8oD#BdO9Bt$BQRI(*Ca8s*Aos!m}h86*=G z3QIZ`g-nW~GJMH}sMMqEG4=5kyZElqV(D&BN>sHwvGo-Gc}ljvR9#q|N&Px&Qt7aq zZ3UisyPddJX0jA?_sCSlgSYqkYjzq)>&Ng>wg^Zp#Z>M#t@2OIg~UoDp4hCd@3J_D zN|(eFR#xivrg|qW6nIDgF}CM~pUzspbIZ#!%@`Mz=#;tI1jdKJI(HOOp}#<##rvOR z^dFH81TmXLoUfEp;jk$G5~>l}o;}$ZVLuZZ5q%IVPtGY$()2ORQ0%6MVMUJ~v!jF2 zy1SQcJl_Cq+(o=9rM?-zvl&CH?CawbGUo#aVM;sRO>41V{_y#H_v<+RA5M*d5_(VEMliT5B>$z2VTEV$1=3dcw0P2yvrnKR6mw2;r~H2sHn=P3h#0%o zp^8!|e5YlB1MvR${US!|WzfeA6Cl4DIp_=DPIZau-c6b;<|`Ia+j(W6=>B3e67D)( zAv+`r7_?1Ez-s#>f8I+_KQ2CE?skywbsZ*k9N`W{Bi!<$o*KM+Zhj+J4Hs{B-Eu0> zi=Qo8$Z}8iS1h>p#;5mFAcw6=I{2t-PoV&vmE-MqF2%B3xgM>!;IvE`H6n0P zw+8o;!}N~;t=WRSE*fzQ<*p=PAvumSnP~iOO4#BEuebR?T4lw#e8o2+v{K(&CpTad zzB%OuAqe{Wqkk+1;NzIiQgyl>FA?{!O3g#WAgn^f*a3Uu)YV(@Te+`zAHfYO3KyH| zLKm=PJiE3Fgc(!82F)3&y6ziW=T506WNmy?&C%rXm|{W8iZm!dbKpn0Ol^eI(^Wg0 zs-|x5tas^FpJIR{?WnLC<7#a*f_28cSbOwdTJ5u)%_YTdTrAJjmpoR5aYrsZ!Z{71 zpyiea5*Pw|1~`n$4+Tm5oyxBmh|CHg(Q&xQ$0YVmdMu^0znN&d0U5oJ%vXdC!UbEQ zPVf#refn|;Lxr4(9eI479b!cGOR}-V&TXJ8V|o{Nl$))3YqgbZ9Z;$<2`QZ&F?q!X z&bLKs>)1H%e??JkGoHQ_-b8QzNzW_c=$FOaRKmqcvPfNFcm<{s`B2Ot9{2RMS3;U_ zp#I3eU)kS)MIBj)q-MnQl;F7L1mcl+Le0(L)O?F)Qb9nmyk1hTln#bjF-$RVw}#+HUb6Nk+o&Ml1SoKRcJh9R&BLxbvfCF?_7FY!;5*&#*)x#6R3TVW ziumO*z6#8DMI>XkD=ax3CVh7agFLbYU%eY^pgIw3nizxHhvp&}6|ds9$DkK*djfO> z7@b9n=YbzRB6(y7WnWDZQgFAcbqi`A^`xBbajDz;2gil6Jf_CoGTad@vVB4gPJw~g zTyD?VpB|*XLN#6G$pYdqmiE93Hk^4j)*m9_%8c3oS6)B3|G?gPie#`i-Alox1(>vA z)lq*UBTCwL<_w{V?8lfZ&L~krTPhk&{Au)?7#%`)SW@q?^cW4O8T*3F!Y8hhu<>B1 zw3$4ec{Ue2D%0U; zUzQ1HdakFCI)YT{z zUeI}m1m+-FOk!Z5B~wb6V31M$7OF-BD*~7WaVqmd@}G25$h}f)+JT?R(0P(!B_tF} zGBeY#9|#8z(oI%vICl!h4N$sNj9TaJJ0O*o+#uqbT)iD%Nx-VF+LM*`aZyC+elYr} zEjWRfq%jJebBg4Z@L1e|BoXmvi`{M_e(r615{;SvVCMkvAEz(*%yYOEv4*F~P_#w- z9eaJK?gl#vgLAjL)A5w_r3cY7p*A<$2yZtDv zQR2fU;7lMYloaKJ*?yiq{U<)K0PQK_8;5b0x-*t&&!r4{QpGbtM%zLurD#tmewbob zvB>glq?P!T?Vn1M{4|r~ywq_M~f$1$gEo$x#j~;RzM{Wmcd!$>qn{ff&W0w z$#mUTR!IGnoVvZo_S?yQ`t*YR@ku*b1C_Di9Ba}jyj>ZH3_5ZP>CSa~cltCAJt7qE zhBdEF$qur)ds+5(hp3^D@pCV~oN8mHCA6`MbuvXMb&qzC8yW)yhe32Jyo3hw8Q-ob zcyDi_&m^W!{BpeuzQLB^!mAk?bI<;|(fwW!Wk4T9XrWWgD`se)saBfy3h-GB#&)0$ z1)-#jxpmS_>WRPOT^k-5!eG6utnomo~-yoXt2r#r@1g?+TY2(J^7Oq?#Ul)?AsaF zKz<1ftXC~C{YuqT1CeW_XtC{VMB=~146ucxwmEpc$;rK7Z{@mI`*JSMocM3HU;=L0 zC>GkDCf`teFLB%|N#=`#`TbsiRyyN!wvB)^*n?}2vk`+)T;Ck4vAj^oMbzq9f zuJNTyZ`mYp4T1))GQjt-k&T@Z8ww@1uD4F@etXpmFFde855DqbcgVj2ujuj5PTgvDK1E|XD7dm1%`)igUp#5nF7F!ejg4Kkl7*J zY2qYE<{0uGB)-FaOn}_?ezp&0%ku-LOldBeE94GhBxy5Jw%{LpW0^W@A}wT(V;G$}^C@QyVFm zj(kL98C~k|uPVwHV+emiBml0)2Vz+ZXG(}`44nh_Qr{RVsLQ#ssXs!C6Y})>1IkWP zcI7sA>mDt9+%$vy%?l!^%H7$Ch%N`>SEWXB+|$_OW(18wugD5}98u2IO2cU$us8{T zhJ{s(AB{F@M+#@o2ly@@;S+_N8h1u@sU_K-qn1HeG?A46#uwrTjs%+Fa) zHzaB#2UV8WN{r`G4kL^$FyZ0dcxu&}p$--~K&p+39g*XK`xOg$yPxu(O*FiC`(h&& zYO^id+ZC>=_#%`Uk!M-Q55sN%3E1n{l+z`Z=#i_+2l-=ES(Cs93+)s~mCMxgvM!1_ zViu<*v_CTM_pP;5K1V~ZhElSeP|3FP^qwhCNHSFvme`io2?c^D^!O_PGrHQ4R6ZTi#ZZK z?xy=^O?9HNv5SP)?X&Qd9TO3YISQ{5#9Lfytp%I#_n#(HPXTV904i=MrVm>M@F{L_ z5`%vKlfC?R76M z{i_S{wGs}C>5VJnyX(`YJx5SFqxz9F7EcO?jZ6YjR1YEvW~ZM!gU;13Yi>@#mE;x_BR*WtCJqz`Y-mX_Y!DQw5l zJWI7!Jn4^Yp%jV_>`Hyw%kOX9xMdH-h<*}d2lN5@h^Ay{4Xepp=4ouueVstfmOgI7f; zhDfYziT4XG6n|C921H3=UqW+&@9;}V&{`$^lBQp%+>H-63W$v z=w_E)_U2%-J#hTloKA@!K>(JiI)i3=&VM1xGE@Mw0NtA}fYoNxL}wwv*V{vQQ?nt| zoC&o7Z`uz3=Gz~|sWham#OT?>;bVt2z5Id+1}C(-V|{Y}i~LUUMvTM(7WoHZ>f^DN z@G{yfUW%gnT}TJSmBK0=rYa1zSQUFyJegasqouS(08f_~1&PyAd{;ogu}K7TNgT%` zjv!(M6rE0dwOWZ`5N{ans3m-i^UAf$38!9PnyEKxRrmc+IbhmCzs{~MBwGx6ok(4v zXtt=S>~*p`p(0`(*?3BFG2ro*YV$*JCo&ZpU?6l8zueaTQ z6tB7qXxqLxOGF|C`3~3Ih5=ri+-{Ixg_k&ngxwFN7n^R65=K4gsSPUKLK9+)*Z@kS zrli0MN3QZ|=^)GNV+OGx&Eb>bwl5Ik0o`_3ZF}8e$~WO>D^K{9opR`1loZe0PWVY~ zdA12x;<@@2MH!5y9)xSfe#_W(FjqVf%gLtBnQcF``H3xgH_@weV+e0d0x3Q|c{70! z!BVfsDk2x1I75D2kUpj^#&G2-$cj-FkcZJ%VSWYbACY zlroty8`#9eJ2rlt_$ya!Tzm_pL`s;ETkdTk9$dg04@ zFp(-!4%Y_B@E0k3)?Sm7Q*m-0^~8W(4su8$&MPDZe$-_-f_RNAJdRt>4+QxY+kW)i zj_Yq#KaG*7F0ukO9Ao!bOy=UZP22_<6Gx4yKufOIP76Ul4uPbwct7KT2t+9W)=tHf zJ=`h5bODndhV(dTpl?`dn`RM_IC2Q;J-}<_Z^F zAD#T%h6uL6-nvj&Ic;xCZo10}h}Z7YV@{A`doJN;ZojZRqe)!Mqz3p?{*LCNa)ILAX zZcWY3&hCr-MOGqG5h>z<0O&-XX@VdLiv(x@lx$0~w$W$+O*Hxk^#egjR%&C{zKE+P zlYOzZZ>FYT%XUWgB(V}ZcAUhnB3u90e$H*2hh|$-Nw&6Tx9Tt>54!vJyZ79CzxO=f zbFVLj5{{f0?{EfJ=SnKWuPkfR1InvNL`*?_^@UgLf~9BQ`}7-6Kk;qP?a^{5Pvo*b zeo)szCK56zhKp@#StY56qmSZ9fn!lDfgs)LO;G?^FLsJg@CD z+nF`_tUg4kil?2j+5Hmc5m`bLKzdW`+~Wp0d}8^ zKhpNe?+SQn_plH+YNs=FudcbRU7x)Z4rVOR&FdIneaq)aDshT4$hWcFa>DA80pXD* zbiKbpq8Lde6|HaIRUSu7i?Ye++RD?&oDfNh7W2dxYx$aa0VQKwfhy@vJZ*->`22T& z^EI1WDG46U&$aF#IGrevi)L+5;i|lkl!rcQh6))N!!})AEU{j8UD$uz9st&n;6Jm>`@g>l^j#$r z$AdmgEkgJx`kFpVEPcdzkq~^4mN)~hv&GivH{aJOA3bVXY0{ewi~AC?mNI6XbFfmq zr|n%!xw!had8sAw6ntv;5=A%-=PVoj0HoE;4ZmgF0WEobWdgJjJP9Q=Tt5pn2RN2= z{M4A$KQ}kRy~+b@Bf=6U1+cde=CO>v6i5fey3^Q9)N-)e8B%P^A}o^qz@v`91_#Nz zXpg#(L_~%`)F0sz5MPVV^L}%eqY_Hz+L;Ln4h&GiU4Our=v~M?OQK)ukDODsg@TDH z*@we&Qg-)+~gJjNx1flqHo*PqBj=oI4S1~w=CV9n@OC~f!_$oiQIW#y=Q+obJU674K}A3@HI+i3Y&s{;E?pU=t~l zx#V_ZW6fN?1aD=@o!_FYTpYjB_u+3?NlM{sFTC{PCH;wWJwze&6HQ?b>j;;YuIoGa zCS1&Ls2o|OgD1JlEGsHX$Xc0{<^(`tQr^tc+;x3%my-_qcb7vkN`%0&YxBt-`q<3- z@wr_6>zkUBg}5hzO9hn`OV?Z{IPXKPmiaknj;rEGF-(YqJFEmz0QUeM_<6jQxp`mN zy^grOu$;Y7BG@5Gn?pzKQHVw%>5s|Ib3Yk33dxy4U02bfi5L6FjWx=H`mF5Zc<~>1 zr+ir?48D^{6) z7@x0P3uem$u^n4M1*gh6`dNGE2Fae>)QT3xh#jj0KO}94+t{5cHKq1X6iA#`L^^s) zq>;n$3$T>%at&0B+Ypv1DYlAbYQF1aNrFU7bdxMV?n2So5{D9^6Y6!XTU@arD-Fx> z3G9VZ0CMN5S){kYKyZc1b!Q=@eg~Thb<@0XocAjE(%P4ZkUq)|v*ZL}5K8KR85}pe zrkS)#aKGD-=gctG>JHQ)g>mo4)+C60nq1spIlI3{z}&E5V(Rv-3fK3BymM@ z16_}oBoT?b$dM2wWM#?R`}YVs`;^L7x@Nz@_}LNqQe$IXcQuC-FwdPo+~N~qC_gz? zv|38)Nr1>S4SY3?t$(ALBmW=GJDa#!9HP|hi2eawAWJ^CrNuMRGf8_wkq5Sw|9S^$|Cic}3xrTxXS}^rF6hWC!(8l>7n)Joso$bx7LXD3{iH3wSWf zg*A*;d*Bd@liuo#xE+Rjaqa%oz%w?xbO*=1JkC-jnALh(tvge%K1#Stl}%Kz?Q?m+m>q_eSW_fET9i5 z>+ykI!1KCau(R8FUkU0QAB_6$am5pW0s?Rkgpqmomp<=!z<+&y8oShU*b(k=YFb5b(KjJpEg}Nvht?lf# z%~kMzJ7iwH0U;I;;>tB}GVD8(gWYhT7R><9xHk1*?Us`C?2pEOr9H|CS5yt9TS-06%Iz9@Pcpw3@%}>B0G%08qS`l#y16OKrm`RvINmyDiK90c9=7|$| zG<0cUQD2rCcMQ!x;%cq~s2UG1=>(3? z-xz6UI8_;w@*we0vK->M@UB{@9rUfwh<;vJU*xH`8>`!|s1En{$o|!}fTgx9-x9gR zeW%W9;ATDYfJ+vq?oM^vd4kT$!io-x)IMa0ulvmowqB3`sB4Sb5a^WSiIj;@TFPC1 zK35-vKp3@%wU5ug{BpQ1SuP+YfbNa#j`FvAnBFNF=EJXm>38K7i+TJ4b{V1qJ zBz~Lok%P`$87G;wx)fA2=Q<@_K2d{g8-{ZRa=Q>=D|>67h+ix+M7mVi+)@>N$@d^0 zE3Ql_M_6@WWHuwmCjaZx!oC#OfCJ%9r-n0n$-vbndfw+CPQ2W-d?PyRcaVv5x-jfikEFeij01RO%dseCrL z7y_j*%9)!|$kT^{pLyR~pym*61=yHRFi1e=8Bhq9$5cGw@Pjx0Umdq{1~NEty4V6~>2cHhpdU3tlwUrBK!!B-T7g7th0?boEfPByt~em~aky?J2U%i)pW$jJ?nfJ|l@$k!)^Gfy?h& zz=ibEGjfSse-^8Q6}_mW>h!JFdtp;Nj>Xu==kHUV%1OoNLv;fcwad{#ewWM*omEb! z63ekc!|B9$X6N_=9n85uc5qSGTC@)T{q;_N*L0e2uNNVv;Fh_+YQApPdDPwv5T0oL zfK*3HxcC>J{=OUalVmkiO|fQlOjIM$sV3jxK>*fG>W8+29xiu>BeTJi!%gPPluIsu zd&LJtFzv)P&-k44Y)Th-ws`QgSG*8jXMBL%i2gY|RDs=c--WkNxktZ!lam5Z9LYOk z-dq?Tx`{ULuiaMyP%KEo{~vVo{)z&c<8vjC<2RjSpF^x+eAkjXgTqPNBmeBDPygH( z_jXsDB*C?d!Yno#GQl|xDFqTb@NBb(Vz2t4?QIfH6^Mit_9R@SL^Y@YX@M;&^%*57 z-PGY$!9m*3#Pi0hkvhpa3QP0mnK>wssAl%vuWcicCODMZ2|8eWoiwRa%VC*NR}){& zsS-e>yOv^=$O#KXKKuCT5A=70FCif=NWeenZIjhubq_@!j0k z>d5tnHQ|?{k2|o$5_?jcE!inq6?W8@oN2j`p(I6WKYIipaVG^iBjem5?aw$K8rD_nOeW@TU8OFZPSN!&_;*!m4lZLWJ^P^Orz3GaI@ zm`ewXCg7oQ>y7o0$#9dyLl~@`kcRz5 z3J!F@uoJjjf>AGUW)Gv`?z_w9(dYqlv8q+t#wqW-QY;) z&VXRw(9<5lMqhVgD5jYdf2F8c=G|VNUs0mR=U?rd_>gvMV&|S7NJ*IuBbsY=bk?sg zt>~z>zp8(|^{%(G!^vj6N%}4)n09`;B!6VHKurv6Vk_^Ivv z#tWB!$C)+AWm9m$kT%GnAzWKAYXU>+qp_4+`e^)bb?EC-@lc6xZ0h9UO+w5&j)u)A zZrwlowwPr;PKx-WE(ha(1GkP-&4~i4cWKM;WOBd#(@rM_JUFKYJMmHNok@NwsS`@? zUfmv8(Di=FS_+9#DO=UEGd7b0jm>rZDVLY+LKU6hGSc;{Q5L4ZNb>1bR+9kd3=lvR4gP zk03~=hb`W=H2KSK|88iK>zQ)&Yzs{Rr}pDSoK~A+)-(;=i#T?)`1w!>^$af_T3YY5l>2t<9ZHSAbDl`p31P zG31uOZdWZ4i||ybC!+010ZrQnuz89y#cSekQm){rwvu|I6BB35^G80Xw=n0AgDhw@ ziP+)#8il}M(Q`&zRqY`+p;jpwpWnD~!$06T^$(2)7J)>=5`XN05hAw*>lQleSzQ=e znx{V~3-pPvwOkD%Wqa^-M1xW^u#IXcKCxP)^jriH-`HyC91$XMN~uQ2TI(oDU;Oc>-_UiFlO25w7~hs3{^ppP&wbEl&$!bog)-^HtlXfqp@ z77W~SuEf%PdmT%Wq1GO<*t^^pLltT1dL4So@Q^g1qp38c9?{b%yd(nQ42%Tjk}VhT zwtYfFQ3%kDPip5F@ zUzTdoI4n@2eQ>yQ0iuLgYoj94e|O_Ori3p*rB^ z&)|&}H#|hxAT~C2^dxvU9w?%z)F4L}e5>BYTn_mQ#pw{G zyS7RSUyWVH1BR$Q^)YwZtF+zg?AgH%$rL9;jH=JZq9+9xo#NYgTd8xSZzIrlorG1c z6!i1Qmr-keF#fT>D|Hwdp}xBxf(9gilrvh!j6NR!_4v>ANe~+xHcJZ&_SpBHes}ys z7th>`h{Tf&9k`+I(8mSyA@`w#MSN3{5Z;cZ9VpfEeeHsTe~MW67%A3)L8R;XhE!D0 z5Bt+q3^*EATc z@lQu}%D!>suiX>pwBu)sgJ<~4w6Ep)`I{GW`Rc2G|CyYAL#ZaAWn@(yxwbD%@!fU^ zhvSLjjG@C~e}rygf`8e^nAj$%)Vx-^ z6g4GSt&fUwFHvi4h9c^A<0?Ca63+UIREpIJxLrbk`svyCKmC1247Z1A%mS<8Be5)y zcv2(wN7@t(-_@5<>v4bizLITA)dMacQJmmEn^g1j{eMwTJr9vaiacn$RZiw^4 zcVbvS{h9MLIbw{0i0&H>1T(}~d8);^K;w{YJ8!d0G_);^1OHh04F zR6r5teV`hPjGX(_g_p<3B!tnfwYr8x6aS==%4|xgT>-<)5(d*xaJ{xy^`r*HFF*a? zoSfThZzzg(NaV)G?glpmezQrHYaLG;f?H(W=`y2C*&ZT#ZfKFI3Mvl{8mf08N1dom zMxZ^T6`XWxuG%p^w`3Jjilg99^*yWx5bP7!<%UE~WOxl3T!wY=L@33#FCrN}R}Um( zOdA}ZpHga1`)&1&Gl@U>*v_32@vzcdq}w$^F!P=yH28#Wd%@$f3>F4K!;DAjQB;H7 zcTY4@$D;(d^}ODh4vw5(Gdn=@5{9i+QCsc zaq%(!wIAw2y-!A5*MTcpWPS<}H5my6+vN-p- zb9wVt@c5HjLBtK4RjIn}m3tt+6%GY7Kv&6)*J;7SN&)ju$zjonKo~yx*B)*BJElAQ z{SzeqpT40>gb4Z`;^*f|zi=fVA9YVIiUsQQ4E9V)5DNYI_!Gy&4ta6DKjom&ndIR5 zR~R)|a~$0DuOy%~b>x-HWA^tI_^%@if_ZhngDxfv(zyp(gg9b#G~#l40@HN}$oWvo zs8%wm6^8`SPZFeY*O-(*GM+}Up1)B#h=I1Pyfs6)BI zG1fy7y-vKb0e@GF=8&z31oS>`HdbH1v$gp!MzwI&-Cp0>b_(zA*b%+umFqr3Ta(M0 zy|u>|wbMOJiw$ZUnHZfVT&yhHsX|nteREC_0^tPaRKEE=5!qh#qp{KODSneR)=whc zV&M{M0lDa);OrYgULLv!dw{8Z-0?Boom}#XM9>Kk<=M{5QMKbE{xhBG;l{d^rMfY{ zs8~K41wSr_tAL4jA$dm}_QVqc@?EQ{X32aKXz1&8jL~&5IhiD8MGneD~J>;DI zK^gRi@8GG4MG44a495l6NP~6O%Lypj^1UU8&BVslvD|YHqj=PiRNfEgt?)+q=ffuM zwxAT?ObPfR>sP!A{`f$ANK@7V2Cnv7A5LAzS)YdSNtx33!(`lgb93vK4}rX8YI*uO z7OBu^3{H-ydEEj`TGTbpE-F=GGs;;LZ;hl6!W3~>x2a;~D^Et&zz%8Rzv%?Dw`yw` zH5P}OhvUS?ca~YIT;M$mzLSmH<~S0a53^DCn-qH)|3E+SBl#do{~Gn0W1?G4)CwoD z&FY!Wsyv65slVLx@Dbt;Z5;bs_n{Om3?&UPEheWuo&I_k{g1h{{8QqBdOfG_B%D$B zbxUHEq8%G8aOpFI;`qX_Dfna3k1-l5uP8_WDM+vUgesm@25VCM7eqM+)eYvF$BW zQsMBdT>YlP`P#MXrpo~|lari?bGCuI|I6BB{RCgD8H-X=pOBY(YB{e|(vqCZfV9a4 zH+W}lxuXUAaLWlS%3V3jtB-Ef!M!YP5mRL!2MhYG&nY{FO%=n_Y@3oTNMT*yFLHUa zaRm-=mJC0`FVDlCI^q4-k#)Xp;B)4qD7xz>x6I=%lem{jkuQzc*PPCM+-&%IeLu_*&6r#*^op>OPAFCwo?M~v-ahw{FXbboA)-iY;C18^*#|!Y#&a--$kR= z4ct2mU>uNv6D(DX9hVLdw(hCx=brntL`OkCDS8TiEx&t9s_`{pMUX7fCGC!CJc=Zk zQ*Tc$Q~ub$k&`22Jep1}Rd#&#ux7b^GJxUPzZ?JZ>5p~1?BizOC2PX$6_Erxh@7;H z#Vsw&>uYDt!#e2q#7`~`Alr`lc>GsS|I~?G#YmBei+?k@m!xTz8ekZCea^4iKI)JB z8Yqd18tlo|prE%i<-@ZR>PVB7aXbn^aY9X8Ul|2BQ#d&5g$U&_;u6s7(7;#QibjYo zQliW{u9C18Jiu#%_Y}B1O8sq1VIg?>$72(Xhs|iBv4DTWSZr0uErf6TxdQq`d@IN? z#e&K$33Fl&dSWc(`Y0Cfd96RzYjI-cd=AqHf&-yUqevLzh};+UWAKl>~uj zn8etENwh$aPZ z9V+)W@A-$v?QkP9yIVZgrYUjq3%%Fp5))$H!7E85&67#ekT=L)^obKje2h#oDTxb< zk|4$?h24qk+cK&^In8R1qGLM9VZm;pp$j$cH}0(Yir|v%w0x!4dvbIn=2^-e>NKa1 zh`a?`PlD< zGG)6Ggl~;3__lq*CQ@F`TKw!Hk{~hkzt(wy9gtn?ERSmE)TUE%=I7?F`Th}24bz-O zP#Q`KR{Oc4?`S8IUZ-uaU#;dBH9Nyzm>ldyxzy$){d;r}JU^4VqqLv|-XYE=m!mpJ zTHPb1kdPXyEb+M*rQpC;2*`e!8rjZxBwuPoN`=qHz4hB`{;9zY)w%#MMoOIofT4Mj zd+4-tgVvtiKk)7cZ-3yjSO7(dm?A|(PQs`M$>lofU>{221Im|+zmdrZ?)vyBBr&3mXSH|Ri~x6($#SV1vJ}oF z)`&19is;IUl4H)KDaj*}z=_lPLU0Tp#$k*NX@x1JPfO+*+}WRA?>&|r0{kwjN%_p8{ zjN1p9>%~Y*VX40)P2g{J5=PyQrK3Gq-?*a_e;8vq#0Px6DCQV64rJIKJljWpPu$+V z+6@?t1U>u#)QNo?>#g=a4zuYh!Q7T)TGTnu~Vk=g0i_+{F)fH{X;3Ufvl;^CfQLnf6l7j0}R^ zfKqb%f28{WIpsC(ZoRH6z)35+U*X!^yj|vSzf^e(Ty}k9b^X@b#*V&-kq{tphq>qk zvGlm!JBkU7L|0z=H7L3?~yy{Y-O7myjik0R#vQA^adT$lYRcV%-ADHA>N%P z#$VVBVl7&rLOg4Ja_fCA{t85+!^9HL+(m@+aJ!_i>>ZE z6o-a5D0GjllD{i{7sbh~Ra1($*Ka9U%Lz!oiK0OkurorkhC(HM3cAB|3(wF#iZOa<)OrEiND)c8?d(woCNhzr8= zJR}umBs#IajLNcoOFt0sjR*cwi}oa$RHqJD*;T!SwSM~a&vbQG$yJHRdGF(V(MCBU z(hT((NRwm!kRJZ~-E})=S(9ZL(<*S^qTtCnf|#8_oJ$t~&Vpw!CerU<563G`Bmoj$ zCOV&zI~Z~_(#{K#tO!l|8s51zZ53qYcId}ivA+S-=hy7<-P<}RBgdc ze~JK5$#{_SUOAj0ObQYfd?@vS2nkszZGFJxnbBph> zj|lkqKEm;7NzEu$$3b83)ss85bUrz&AvG>_K$E-yTwgAUNI61qx$%8{?uPIFTf_|6 z?2cv*8MOU&BuS~kHSX+?z#NBJCg)?d6=tW8)c3p5u+s_R=+Brqs7BBj*G$4}qEl$c zliK+1y&(!arC2nu!`bONvtN9;y`#%b?f(d`4GA8)I``m02ux{fXgqkQogU6eV0rJc z%e7Ii$nlK=V~}8*KhM>s+$PUaNb?*J@M>a(wam|Z2+P$qf}KZWmPDV|oxl>B7?zUz z+Bpv2PFVlzOLnAW3-jGFe30T&0n|6fkV(@Cg`1oRF!a|+SX3zzTP-Zuv+r|j(4Xn# zu5Lc~iZ@gEkvVd>^8m-0Io+fVJAox*2$x^GqDUm`x!hS0yvy5CmD$`EI4i>$c!_Rt z*V|q(2())}zMt{mPrc1mfWjFQ->xuBQPq9wmgZLcaT=7lcgL=nVai)zQnl0?^BU%t zKm2Wf7yk?e6{e~bod`S9X{vGP_kpuOJ zX$emdK`^CGTt3)XK5KGn(|irvt0AxUey1ChQZcp4xmza{v1`TcNs*euyLfGVS;oQbzUC1qIy`P=tz`s%|(P2zzk@vH_e*E#I{j~D4|FMq~Ar3=F0 zwSYhgml|z-s?<0axb)V`{v%<;qqeaysW^a@>#&{Z?BqzO!gDVc&ErT|7q6%{jq^p< z0Ok_z3_=yDQG2DhU$g5LW2eWZPjb|-jZf8^;V_O3V{#QL zC%kkRRHEtzao;$l$WfF;gpbi-l?s42!i3bxURtr}1a%!Oe~V*Jlnc}z$i*B@m2e@e zP0@f1g5#m^Xa@u|#Y*+xTeUDRi+Yvoq1JtDw>ax?{RzR zr{C0eI7p;_zV>1m;`QYP9nl-i5#D`$Y-v6mxH@f;M`<#8ed|jX(N~26d9Yqs@g|4b zlxV{6KOBFoEKv?=JS|5xp8m0qd5s#dJq{jeV{npPJ5s$`3RDOa^|6&&O)fU-a-VUh zr4bRiEMb7kGLYL%sLQVdl;kKZWnTf2Se!*XVObRm_l-etIs`SEVVyzpYzQj@$x_5{J<5phmpsqvx|DrtGpEPT2W8lZh_nDSe)AuHI3C4+$lK0_Phg zHYEGOPsaD#k8qhNe(K$j5XHe^X8rxM(0F5gOS=|V*%^5bK|R^*!pVIsxTGLWd?^Yq z^7d>uOa7Wvd-R<^T<#;HY=5IqY*pcsDI5U%zR z<85<=m;@VwTb8qF7Zua|qO9Z56P-s)9!YD`N%~^U6lKAL=S2m91Q#^tFwiv=irNxJ zFpvN0>G#A)e){jc2RZ5ZiV{3NeLwIkk69XSUYrwlCoWPk0gY;pt6v3@)iA4IIf z*so(9#?=OOiS=1nEuKQt9vShF#Wm^D!L8$ua%aXc6zs)w;30#1)blV#_k|Da{Ew*U z%`QF>&&48gu3!=m0`k&{rOq!BFxH8zS%GpI(CN4*u{B+lmAM6-BZ)z@%tbBk>pjcC zV4z&%rY~JzoY(Cu6+xoj2d5Vh>CfT;Ar5P5A#MV6v4C~~0uNx5I~g*kz8}CC5hVex zc7M~Wf9*tgPYCZy+MvhKFSY0(GZ zTcl;o6kl2hv0f4g_oz`JWhQiW;H4$R?X2z;Q=4K9P;Z7sLm;K&-r2OHwE}kWs5uY4 ze&dEaDNCsEUy&X7hfn`XN6?ZR>w_gexwX6VKAbp=yE8bDv-Tm8nYy4jH;t1Vbxe$j z&N~RqS27Prf_HCk^|1Yzyi)ghaka{C>kKdmk}pn7f2Z8($c~$d2iLcaTpARd4_t@C zU6-L9^mSJpcF*89--NnBsO<@67NH%HQxkYNS_B=ILA ziG9%@OwQ9e7-_a#6l}b#*R}{Dp0Vc0mtErfTphrs_9a9l*QT6GNehabh5aP*7Dk6# zJ!mi7s0Ec=Np)fy=(eBxytA|F7)Hzg#rP*WSgt3w)XiJ#+ZJWQ4}fUZMQa^}+Ls3$ zX$YyUoKETv!#zhA4omX3rYw29l^i{E|1gn3V^I=r7rq$gbP{aQ{;ynl%b5=4SUYlX zS%9pw#+e{-AlgE++da}QaS@AwHgP2d{bNw?qoe41;^CMIlCZ&gFi{$P!9T%VADj3+>i0&D7cqSiS&xlD3BkcU_-Zq_YsF%cJVoWm9gGjl z7RAc?se=ob$gqv`)i14V5~4Hx{!Om*Bv7hje?;`w;|tt%sXuU7mp&CRIBrAw{^Stk zFx*co*+GEY3J%c_sQpIzZ9WUY*FfCe$hXy%!j+ZZq|N&mP;dk z=F|yOFKJ9ZAntZUM;)0Hy4c6w?Hi3Pu7ce1;oc=vmqWxCr9$n{74dB>mz-- z#JNh*FaL&^H^y1C$tkTJT9e|;vD_L^fay##5o`z|3iWZo-c1q;B!9y@+ChP$7)H>_ z(OrO*j!`nnVhh~oQ_N#WFXdu<+e)^i6`EhQdTyQK-q4w9?%NL{$-Fb!R=opv_FxgP z&$!3CJRk!=x=F}g}>9qFiHg{^@CRwx!(#dAczqEw+@hM*-QZ{f4o-%`@Lz`=xx33 z`(eA?_98dn9_3NdxHLbni}|pJw~LE-!&$h=si^26?QoXmcv7|ipN~TZyH`R3mVVDw zcR>rHf(bT~X*CqDARgs2zP`HYGmZ()u&A-a*mP>1{qWOoD8(mTBTx$4ur)d%FM~`T zR)u3y#xA5YFZoyxy7gn9MFb|fKIkzz2`ElvP@XnJkj@U+*#*-QEK+VH2cq45PG_j* zht4l1r*4XxhZ&+{xW8t2Be)Q~3AeBuz3FYP#?ugI2~T`f{`dRxzhn8|?~eaO=V%M= z>{CiDW?vj^J<9RsEJ0TEcKh8@2vNh4EPpSvA%MsT=j z(PaW#pVOs2hYh%h93t_irQVu9kFiU*T4|AtA+9u(0Xa+CY97~SKCeF9+9Ijio8#O- z$o*(r{+U8-d2!L_LC*Go5AR-X{XPml>(faUY3okhoCsy_Zj~Y>+8zZWC+T+j1%eKj zEQYtP1}egj_)iX;{5X8~6Fst`L^fL?*=d{o**mz&RZ#eCblti0XfIH;g|2bxv;0VW zfTTa`z@#`ER*HGN#Xg`-pHsr5@x($2OuKvd5pHH5@}wjMocTpX&GSDfBY*CDm9x`ds}?i$ ztzt8Nsml<)(`omw6#4dzB2d3j$X$F^R82lAxXUNy`|?3C#Gfhxs!Dr*>p|nz^=tFX z%kMn@&C1L-H=h4y<=V_Qb2Hz}&wR5m^UdPSH`iyrS(^D~hUS$Snpb9MzA;1djTxG6 z%+P#ehUObHG~bw^`Nj;*H)d$QF+=l>=V@NJ_B_oC*Pf?&;o9>wFI;<`=7np|)4Xu) zd72llJx}w(wdZMGxb{5F3)g07o|~b0ZieQ$8Jg#2Xr7y)d2WX0xfz=0W@w(9p?PkG z=J^?#=Vxf1pP_kvhUWPhn&)R|o}Zz4eun1x8Jg#3XkM71d0~d;g&CR`W@uiRp?P73 z=7kxW7iMT)n4x)LhUUc?nipqiUYwzMafar_8JZVoXkMJ5d2xp3#Tl9xXK21YL-X|+ zny=5$e0_%I>oYW8pP~8s49(YPXudu}^Ys~;mu6^QnxT1VhUTRinwMs1UYenKX@=&d z8Jd@7XkMD3d3lEBV z`}O=U=O8hV7)T5x1`-2_fy6*!ATf{_NDL$f5(9~W#6V&oF_0KY3?v2;1BrpeKw=;< zkQhh|BnA=#iGjpGVjwY)7)T5x1`-2_fy6*!ATf{_NDL$f5(9~W#6V&oF_0KY3?v2; z1BrpeKw=;{&_|N(8 zE6?PSU;DNHWb*(0+W5R0pUcSQA0LL{@8*9r`TExmtG52xse1Zn-@mP^o^&Ap#^d@YASABiIIn~)fziwB(Ktmn=J`I1j^zlw*Z+5Doep`Dt?P^cIZTquL zA4jw5hK5hV-}~Y3U7s=D)gL;&>cD@ft9|XNS@r$w)f$0jc)hn^Ux2(#E^hnZ9|21+WzuMTE6kj)^$3OBJ zYE^N)=k$q=?}fqC6K75`)Qr#l<7w3yS`VG64RqM{>1Z)DrHK|^@G*dY?Y>nV{)SD> z1pNBH^f~>1+So9RyRK}W>7!$F!-xA$%1PC)WA_HyW@R{RR=Y}6yE^u#PvgoPn6FjW zlpMNxtZ(+KPJGj5>BOfNki~^~;8dPf^~gV+R?R`^;zjM^y=vrL?0H*#xMp~MUZ1ah zvip^Zijyib(3|?=A$;FS*a-sRwdyr}s1q>Q4*cS<3b<}pPxN^>T=-#HoZR)^z)P5R zB!j-s1RKt~Q@r?X9VC#~r-h&T1c7Olcvtub3%GJcpKSN1X6%+W)~O!rBXoT`HWt1?^Prfj0?*QzEW94`jKzrc`vQX4l7X*kr=>m3^OAUa=j~WN6xj z@RL1#{n>ZMNSnXb-xyKX7LWJsZ}cJi8(A3iG2@2^&i6j3-qtbG;*k=^K>yKmy!(P@ z>(j73qq)OO==Zc@?6cXbj`Vw0(`JQdHgz1l+l>0h%L>);Ils70`>l)qox=CEiSaoY z*M4aGkv?;|x~g9{!y`<$&C1A`WgQ-AhAHY)yH3+-;G3hW?Ia*$v@bY!&^(8}w(x7? zmd)L;eh|IjO|4fwUG(w!DQw#?({gN#&jT)b|BFtehzoCxca*SE*ENhzkT1=^^}(F2 zCH&SJ;_G(b$!alMCT2vKXdko{6orW9@%f&sy3ML(T!L4u8%EMGQBjWqQ)*U6KB%U1 zjBC}L4_%mqu70yRiS!0Z^%@e-7#R%97(D#SxDxxw01IM__waXAb|31%YxbRX-KkET zrrl~Z6uVFUv7V6FawY|=zol>0zW=UPSM}SG!w;!|66-BPV&IUKaEQjFA3FN?|KG)K z`s|0ou;OsvQnF4eAH+E#MxSRthxI>2Z;q>G!oF3t^bMB$1N|E=VREm-e$?LDE+vtS zJ_E?PU~lb()xTW*jy6gMzpX^{MTYAS$h`12cA>F_y{L&DIBaFcwJF|i8G-w*Yrj^# zptx%eSufWarVwHR8uH#7shKEBjQlIwjQlY*R?~i-eWUvH8~<3>T6v?drjC%d7j+Kb ztv=8{`_)MQL?n3^5yO|OSF{!QmhbOiY={}4voRJT^aX6kw$1^NuR2}hL0DTRH`s1r zEk&YeYp5*F6JSHFz-b4SDg44-%nBTz!-u@rAqsE!ts0r0^mrZW-Rtw(4v z`V3}J=)LNggW=WcRec5$ZI>YR>|N&nx;+yZd4M^OyspC@pSN8>ZC6d3DR#a=4aV*4 zL`K;OdcA1*-rB&`KoOEQ)SzP&fI0$TnkV-DA5`N%&<}m_LHO(*v$2h$m#VeDF9P8gEyAF-*8)t~Ueei@&4tBx+qli2N`p_K0m1X-Y1-M z$`aP>< zj$WAy?;d7nRCd^*wx(#mwsmRhD?a}nWQR!zaeW6O@g;iOqIW6{Sy&f)nTHn0C5X8M1eQfX>}`>|!e6}U^~iQb zGjaJTtkK4X8{$;Eg2!kB`;lAW_%X$KuDYhrw7S*Ev|vav1!R2QaJ<#HZhQcB(1l<~ z{zI=_**0WaKlEP)D7iX)4CKfcv)ieTqADTa^j`;fX97ugBLtbpfK}`b+cy~Hwm(5+ zXaE(&8%G)H2K5AtwwzmuWfB&Xt>7<;ZDecq?4xS@kxplGG-VwPt5f3#U#mXw`H0pL zdp3*%xoaK)EM(Cx)d@B}pD@^8cstM>#KvlM=s%(D24(!ne}EB&QDdzb`x^yI9W~#i z9!k6YEoP#QmMDrlU1-);7`W&&;(j;XP?pT6@f3+{?aDx%Mg*`0WiV=+HjB~MbmLjez?|v`@`r0L1w-ebCAPqxci|T`7meL2PxIBaDc47BK`q>Ou}2_d zVgp#WZAaH(C$QQpI$ftx!IcP$O$oSn0+5C#!-5az**B}FAN$-4s?LF#iHM5&z*}G* zZTra-s+juc)eHVm`$=@CFc_ns7_2Rc?7XJI_;;&Uoq{8@`BVRRfkCL<=XyfRcGEk z4b~@aX@~*k8v{kYDVkDe*?Yx@Yv_qKd{Uj)1Pw)fdfG z*J=cW+*%d6ufK?7jco`u!~aIXR*-c8F`Yn(a2cy-wIEOD2sUb_%W*aERo-nAK^hG# zO3Vhs<{h>z(*HvBp-Psk|lbej+em%2K_?i zEKa(yK4EdVf)&3wXY|itII(q&wRF($_Iv~)*Y+%o-3B`@6zb}U5(9=i?XdUsy>YsD&*Tf}fuGP6}L32LDc^&_mM_a%2VYhK__4K{*XYrZ|$BEf0 z_;@^&f~PT#qJ^bsX2CliVZbN|3yErBxR!W>ZI`-xoSx}>9nR?;?hN6 zoit#50Liawf1<7G!md{vwh?1oL4V(Imzkit>*FbW!CcI9)uIw5?%cW;tPh;b`r0Hy z;t}AhM7|WL<8uVaHUC8TFgSIO;Q`L4(e*_m@-DGAqvK$&o?2X-3Oab4D&aMzB1&X& zSnGp0dZVT4-v=}Ggcy?30Kuohmm`sGk>;jpDKqHH?Ak7-6|=P`UoAX8Crsk-dLWLzE}dgoI0 zx!<=Pp?mfp_Wxff8UL`C<*k67pcr7#=+wY)n_;$5u6kzI!}D7~cJwg?Mt!pc-l<;E z$(XBFbQSPN1?8j_)IiB83O(WM9tPtQtITbe`RcmT2My7+%*Uav6x`m>gm`w(N3s5F zL!Y_1B>P`C-uV0skHf6@=qEZV>}r1&g{d9{O;x<*KDqda15X7GVGhud!WKaBZaUF@ zR07|^C&Y$=cN^b8P-W1kM!qyGn`cst&&i?E31cUn7*CaS8~tG)M~RtFqVkXIF#@9@ z4uRapTvv2YTxx&mV?{_9M{Ucr7+xf2Bq)a`C|?%(7@tc}z#e^>46BzFGh$-s-^`D> ztT5ip^n0QDg09PEwc*r|T+;Ma62$h63lQ;>=!;GO3SM+1LaRwBT)Ex|{2(~3@G&cG z!V9%A309e=QTTh9TTC%~P}tm$bTY+6Z}&~cVrjaU;xW2RZOe!v^~5?x&b;FEG3TA$ zk%xZvam5y`*?R$RmNBTDpeMzT#u3Gl#X7NpVyi$EZXG6S8 zrt3A2;{1`1?xc$&!}EHPmrgQ2HG?1P0AXV=8=tQd5K<~&$|Gmj^lZZs`QqEDH#)9% z-FkTE1CM$$zW2Rrv8xS$?^JKU<3Ax^98Z(2@WU|Zy7xKd^ifs#)%;rDE3qz}-7{xl z5cTXEK3B~X@jL0DCA`!zuSBET@gLX%9sMLh>XtFcdM~CI+(wr9$d0Q~%X|)a?{(?1 zMNNevJgE({e{rSjLpWv31x1hMY=9IUrQrT2`n)(u^>08QxFdVgfFpeD)ONoF;=Om| zgXtSgl!YGY-9@jc@j9^|Rv#!SJl_w=CXFiaY9ftB8BEonB+8Yqh2$)`v!f(O*pp!2 z%sQZnS*IhV-&d8K%hhH5bD=VoP67&Dn{GUzZo7#s)56p1M94S8u&W{0%#Q2crZ6K{ z{1>WuB?m8TkiSLnD@jdXf4Gn>NJpaX77G#n>8zjlS5U1bEZg3T)U&5xZrV5Ibx(gq z*%Ij)^fPhz&FgAOwuveEA*)23y$avSPW*{}9G}1aR`r%)LR1EuH#Rr6ydOkBRO_g5 z06M@9h;R5O2|z3DVw-tSdtg6=!3M(-o6HwU94qbMz!trxZ*u0qx5{?4<8PsY!~V5H zaP!1}BVgI~Llu%;X5K0&+jD-0^9*yxL@aIY2Ni|&wqU@U)uAg}jcQ9fM)frjAd*vw zXb#(?fb|Ut- zNj(*_j;8EQG#YR6lye-YOkoBdz)qDiMmnW^!Z>X8Szz$uSP++l&Jgy1{en}U`PW~_ zuQi*l_~=G`>l;DDUhtKYolCZlcft5g0HM?d4Mrm7=C+hrVc(k}N3V~jqGKALuY0g- zwc66}C3iEh4QR1#DY~3KCKAS27yd6p86&gZG(LaK)Qzk;J`gTYoZuKJ`&#D&J>2;5 zM%D11LeAYWStl`FpnCRAeH~9vh?L-gFx!JTx3BbIgrp3~Q~U6upo{_JF!eI}!>Czc zP@0A)VT6!@#NH)uPpKB4vDp-_RBtOwV4sQ0))!OkX`K;78fjUv(H71^pIaEA2|Vdd zhc4pI7aF5_7e}H}g6}wb`Ov7ufHl2rzFO2LOGrpRAU+1Zs9;z5;EV1(eaYkYcrs6d zvt|IPJ{dAoh8&6Te^)90eD&(9J{NaG+zDMun7+=NXcS$SU}sg&RoW8-X@^M-)&%;> zz$Uo^?p$uZ=>qr$SfMxz8Uzw1vnIZSees@Umr=TORyKC1$*cO z!5dhtAANAG+E6l?Ah!)MT5W-v)l>`)2!^o$+}x6f&_O+mk@1qC&9MRvVxNRo&=CgRa)^7Fr44MCS0z= zZdV}|D~>A#kQ{XhoCD`bv^da14E3B#XOgu+f~`J27@V0~iyp@Nd}RhRGi1D5Xm|La zhQYsU6CL32*&pg6$U5rZ%yHPO8&%tU?4lB1cZ5BLYBTbLtB-;yCr<8QWDA4p1-(#! z&J1Xuc=3WUbm&g^sMGZ&G_P;q(+i$3Eb|b1x{XVJYSK(?ehDDz%)*7jLu_1UAPh_# zTl(a0c<}TzX2386TX-_!{QW%)^r_GycB*d_@dUO^TamzzANXTcf(M8F9+U=vYnH4a z*S4!9^V`%*&=h-5X}?rm`ES)7a87$7l&9<*c9Dmt&rmZ>9G~DF^VLSF+V|XEJPQ{Q zA)UjzTOT;1K$q@Yc94YLgH^N5ruKFvB4?{g2-3P|D!!_#Es1^B5hL`I2XG|MTmnX> z4b4{zu@geB{_&XQ2p>!Shc=IQAI_>Bg9HOlhhV*ODrU0GqJAEqnG?9R>H;gANEC`O6V>jMx&**L+C>W2OzzE3uSeniv5w`UB2#E-r+ z1T+J@-M$C$lp41Z$!nWa&b|0JB>}Lz=a>?~B65ZOZR3(d9|l#)rV2V6P7Ja#l)}!m zA>61o6u3*(qE4A4-|F9zoZ(}@kS6)WF@t0_brt)?j7uOBBgKbdC4pII54FzwwLIG* zZbqW6M9VXhKdctn^=gL&jq091DNgwi%qT3{OJWeHSLuOBh+f2E zN4ji*g*3Y!@Z2v+4uS)Pj~prP8?;LN%8-mh8kd~20P(Mw77gEufwwN&jt=CV>S|zk z>^y8j{q3?_B6S}X+bLn9QId5rfs#Q^!XP14LyV1)r(>{6?#wL;W*C6vMYK%8AdG-D zqw|^@2WEj6)*2x{NGM^cxkeN(YF7WT)~8 z1)7f$9fu{;rx@Iq^>22%aYLAosX~*Dt$2WGM|L3mELbW;N@U$U7$XP%f?h%}!Mv;= zR+khdOx&KQbdJw&#aKH{n6n#Bg@&!PNKoFKjp8nCnewtwEqLQ3#hOtr+CqKW4(4P7 zgB1aa2F$wWW1_*VDfF^6#RuZI`HG9^VPC8DBgbh(8)r&ppE+w2js59 z+C!IY(t#`Y&AkgdcFsiouXgwAw=*$Z?HL>P4AMEkAl5N#1h(9kVN zG@X7R;848zE=yW0qhyIFx$nFUi&H8nA7Ij4R96R-L=@vV7}lP-=J;T{oZTF91o=hW z?An>+o>>QCg7}b0@H9=2&T4x{)N#u(fhUS@RojE5cu|9e>bfq~TJ++If4pnFUfdKP z`E&>*Pl84>(cpdD2eO>z{F=%ppRy&z0~q)^$1sqO!QsN941%=;a&0F!#FEn=iXNZ?E+JBb z$b5Cf`|b3E8JKu&RNLCZ_CZ{7g$XD2UW}U+w~<1FGfUPGu+NE@I{K;IJHS-b*I-D7O(x3y5wIC7 z3?H}Tuln&Lyp_hcL@Uw8$LHdH@VA7EMOz0WD-?ntp5zLfIQqV=WQn2Wu&H+m8}uKW znR|8URaZ#-%HQ*09ix2s95QXb z>=W9QE|LX$ffF5zgHlW;pD)QG)8!V+Bk+px`GaUekq#9+L>wSf;v}{eVg+46n!&nH za{Op$Q+M9%A?(ivogllh1B`RB3w_?jf6?`mxC_;?P61M<uQF4BB8XNi&LlBTj-)IQr*O7`$5-UV zuFf?w0Er%aRl9@$IzbR{+~DBA-ye()B&R4aQ8|x-@t{-k%5xr0?1&?U0SF21-h*0p znwdbNNn1b#n*7AaLbar8%m95Rh#+XoB2^VkW!;S#aY8qE-X;RphWG4`p8kR(3EDbb zpl!{R5BN&wh6$AsjWn|Bx-CyJQB-B60*{TU9Yya zISHic-THYWVs#p&lbFZVh9sd>Cx?FE%~&x@O_SGTmkg=i$e$XFT2(8Tx( z8-?v)Lg8Mu7G}@GvJ%?$CW_t@qW};5q|a8hrsI1FA*F;%X&!fcu+ePaFp(yTrP|>S z0+v~cV2vS^EQb|&xU0im4B3qEbMqb&OO?oy4hVhILwxAk$2O1bk)eH%J#LPH^=egj z0+JN|*fRlvwQcIaLUkkb(hgcZvn|I!Ozh*bcZKeycx+L8FjFFugczMkl3{;GXHxh~ zR4U1~>FS1__^!l)cdD6d!ca=y`22Pid`@ySQT%2;=O}Mob2Y(k9EhUjhe$bDHrj3B zI>p7H1fv5Qo4^AZ+84#e1km^#9bT6aLFQy&Lv(_PDyjBCd)tnhaO;pMAr{gOQJslFgXNjE}fCueFx%Kgo^#LY5KT8h6Do(_a?!jO=H3KYceZpppImF`Z z3}s2s%vmlUgyir9JxGc*ct~9mUVRXPmO)G?Fvh}j-KhtOxb`Dc7nhgA4*n2yz;oIi zgf&Fc#uEbN9A+@c%Cz~6pr{T1>1VhVF5Cz@#g{#`tWmA%_CYp8AEE%jTSp6YP5Z<% z14}xVdAJ#^uPoR+dNq^l=x30)Ks7Jx;6VoA(XDk5)WdktmM zy{xFEpMFyZMLD6iNdQ*I$$M{vZV(gwMVGcDS4^fPp#;uny#eVaM86A&euBh#NX) zaUTKbU`znu_rtims21+kMWDG2F6TiN+3n@Bkm7w~as z8j}B|!^u@>i5k@YKij=_}xu{T13C8xOhA{iCzz?rS4!^Z*) zY*A7=!y=UfxB;AE|JcOAzmA0Ear(u}{3AERvoqK+fuXb0Ee?q|8kpsPGgIjl z^<@-#*hkapO!?Qv+oNmpp{K#1i^pV*9UUsOY>B?;C@~->fjk5CYt`F2uaZ$?Ub@A2 zObfrcbByshXXv#X(K~buayT@44Kd4&DRStLoO6AWT{)E!s?Y z&^kP>gLSM6Deg~C#VQ_O-B#G5_$P)_K{s+7bdh*gT$bJt%BM{uffQHRm)r>>nQb~S z(Sw{YBu|m*Q{Pkv{bo2G&WYGEH9E3J^*{$Nbq37qv*h9M5_)!fmeB`c1kNxtlpewi zh6@xn(tyY;^7SVhgw6*;l308>j?ve8A)p|v+rn@hab|K`MS&DrBsW#Z_YT} zhmDQ_d_K-w=0@&4(l_D-~SZ z;k7KTRf~>T_}odiHfAH!UB@Fwl++1(@)WHhgb|yM(VS3i+~Uxg^ix#v)Yz5n_q9ocp$9zkUpGI4oY@o9ca%>%KzIdOhqXcbSMer60q(*_mfn9V_WY`6%76j zod7%=k;i~NkJ~ch!L(>8{%C!bv#2qpl#TMpk5i*Nx=xI*W7t%hMMKoT@sb4k@!Om3 zKR(~A`Ytt!f44Vf0*%j+fIkX>^2O&HtFKa1MX{s@P6`dm#jUZI;^Mr(q zmDmDFjcjd&1NrpG6>XnJ22nMX47(|Xy2Vz#nGS7VhvFmI3%QS2RvN82zXfHc9tMghvr*w-hc~v<2^~mbGZemEhKCH>91dZ(g95xV@VWtB&Xaa#0gsE5VBg$q% zolzg}?pZReWX`(Yak=`u&)({k<}oA{GHAL44SaM1tR6Uqj&8kL_c@X1joF4UI7>IW z;UzLVhzCPP(VxQfOAev=iQlXIC|tYRRTB0krBRN1C<&X@ef_QE20RXKayb+p+J~pa zY*U~%47;;c^s=z~fF`b3?FW~W50w~m^VlkiQ#&F4AKo`*YN(|VU&0T=WHu+E7VRPA zW|m-(ZdumfEYo|{J*PtUjv3MHhu2>7-{h#0p|^y(O}dCLEhcLoUc~E_jz+vONDLGG zft)O#YB0h*Z6@KQQMWY4=l4CDgMUSbLdQX#cQL9k{gvqFYDSFQo&I4gpdsKihrF;B zIR1NMV(ct21PqO0bag{wKFk|;^1wtMP7^i_*a9HBPj^|gP@%4waK;g zvr@vBL*!CYt+Z2S!dzfeD&^gfKq~DAOP7zvaT?Vvok&r=hL%{0A`x~4375vowyQOL z1-XOzrOQ`Dn{I*SYFYnWs#X-aKnLwB@Nm3j1j=0P$Ac_+1yR8%Wt_tHq0qYVJ`s`H zHhWAoT5k)-Ki>s+-uV2*5LV!rxClR`5)DI#C9V+HR4B3*I07KHl`UDx`aa2)`X?R{ zofGk-8>>eJ3qgBat|3|AUveJ7NoOC32IIv(#RvbF%2OtZ5FxSyQPyxdx9MKOMfV(i zz~B`(Fu}_@T=dr|?E$dG*zx4>o9#;}rF2za#XVm4LwF2s-*>-b@dO@>^Op{9!a>Ncw#Z;!(; z9TTV5qpmq|X(=UA{sl*+u|gO=*1t{@M41^*yjT$_c?!V{?zc&E;gTSF4wF zxO+H+bybPq?TwUR@_2p}N{dcPD(a7-a!e&OT&mu35@~ZPXGFM=PHEv#CL=n?D5lVP zM1iZPGvrRkH#|g6p1y&oQgeR09+#GbWZ|+Zk8fWm?Ij4YiVz~E9|HPr= z*t$scHJ2fKIhFx!qOTv;AxBJVQj!3w6U)MkzoqoRR#{)q=of8>IdG`=(JjkYfsv!l zw))|o4Dm^rE=dZqahb1uP+igadZ)U4)m1MEF^V4=rNZf=?y;>5+3KEKT$sPPZg9%z zye#@51JtkSaKc`VqaZkLvC><I=s(By|UvcCm$LQB4bCx_?4_8sN{X}&|$T)Z;8Bz3+C_pqAVmc?T z6Y!)94|v&1zAWgixHI%uNtEdE`1$3hhg&N`mTHc>NxFJPO6x~u7+gGV5Uj04L@6&bMMtx@?{2Ot$wh(&$ z9(+jL!X!RxyDW83v6($#wq7n3CCk0&xC&#AG(4qe^IT#Zb}ub&MVIG9iRcCI7Z(|r z)(9&`A91oT5b{K~2$>?H^;bd5v2iGHZdGggCysbs9!ZMvk&?}!&v40=ueY3s^xg}e zI<59)O);7&H0vU9@)v?p{?Ex3{|W?aXKz>U`sP6Q?6rewh*5Kj^H`TdvKXBfJTr1e z-i%I@gd+S!C_+yWQfMVH8Zke~f4FgeeT}+0!))+^o z8LhZwNJ^hh{j+~njlVxdUD3b%8<+~*zheR!mjFr67V_jH+F3tXRHU5u9m)9P7_8u3 zdxRxTSqqdGIE=hpoD8O#4yyP1X2)SLg>6GhhkV0 z_H!|)6bb04`seyoC<|_CYGEm2=SjC1uS9wsT+qsad5gzOf|V~C8f2V7DX|fqAtGBm z1#=|9HGh|Mo?U0GC9({Bg4e3-OAlb7?R5;dF%*CY#?F|cx%{s_c_NEL%!u8oxL@C> z?&v~NK;MBXW`kRe*Q$A)Kn@@{?F9aUGrh%}MS*mBD4bNA7fxjw49=r=P#>`sg%@cb ztJSLhiA}A4{$BNa+7|UeTo{Ol%U;Eip${t`=q3xdv!S&|EaC7_BH+kB}Uvd?oJ0~rr1fkaXh$$#&8si zy>U)IM$lm`L*u89#dt23na&3FWPE~uv$|{sM`5DQTMJk9(ZLN`7;m};EU+^=EM5+- zILubXzSsVc0bbqVn5{mQp^d5OQA~uLmu?^d?bz83GhHh3xh5dnl+yh+qvx{M$;RvRDN20rCJIDFJ5ZXq_26FI+;=Xu{qOz8`T}1r_E|x z|GerRBB?U$l$=sS#3!>WDLE#a%jp%HTGT$NrKUrmdZbmvEms#{hUm1*tPdp1*agpsb_~*kkFnX~WK=whEIvJdc@o}kT7Eq~ijqwe zwPmuQVx>KFwn`cdjhxAi0MUgC$BTzMCpuzo3eE zRkTjU5DN#YMo0n=zKMA}<<%g#6$AdeqJWyPD?SjfjfW0gELVqrpQ4ILA>G^r@fn4= z3nkS(WHNBH3R$M912vE<4#-OsTS>d&2US3;woO>qQnc`zEEuBmX^p>T-3-5dnZ( z&U-LKEU+LBJCFVCNWoA?)^Q1ETgR)_UBv)iUty#A5wl9X-pbs#)GT;AyB*9=KbDU! z;ohy{5)WpQ_^n&*5G~^iAcFen`V^;u=vt>{eE#+7>%KiE4S0xo*)NJ6RYyJ}*wjbS zVtxSn!@#MA!3OXzPNY3`3AMfoS6QZe&Fby;75 zfDm2_wT22;HJux8Axq=!A*ry!oDdw_7zWN(M49(4Iw8ctEI9c!zu{#VkJS>s9zvE6f@pm>jEBWEvK1D+4r|(<6>;{zr~~9zDdx&?{W5ns z%dNB5$D8eq5!5=?NkBD^HGUD!60Fi+C8hM2I&MiDlZy(pQBulm%F3R7`hDjKh;d;l z)0gsfVWobx!!sQ9KXa-Wv-vlORs4ilh4(BMi2HVs8TY|ai_$LP!_e#o3;9An)=nm= zgGN6}tM?!ka)NIPeyPuqj1k;|6BZszkP;ROxi7aXv-k+AOcZ*@yn8>iu+ zVk(YPDn=ay&*VUp+ng)OrKaY9AgCMsAe^lkp2YJ|O@KbG8W|B?A)PwJ1azDhM* zrBg}*PoYj!WF}A7bYE_OGnXOiZJ3GWz{OYwlm+dA(2XZNG^(3U%Jj0MtT2ucI;NRH zFdu;+8&TiJP$-44d;ugCMr5~-W>w;{N`@S$+3??lncAUltQSv?5k<&i3?*OMCIHKv zL`Wa0l$60L@m5K7@0!@YRxN1fB%gJRVRnl|^p}Jg8QK^9!T>n{qz%0D3PD7D1=B%t zU-UOAJG7y@Wij3s&JAuM%b^%^b#NjIr(o!Ve`Nd^5KrQJ7^x@9HUtz30V&TB&$fvz z7sg*IWjpWexAC}PM)+QW{VUlxcVZ?w-f>|P`m4lo-Q`QAq3`MF#XZ8_Q0mw>Cb6b6 zPi27v2OG$&MblrhfQAT8?NQja!3%dRL-S%5NEgeCpa#-$H_n;(0e~a90>+CZOVM-9 z7%Ck(Zt(5(4!m}Oq**z8u2Qxjv}-`vOZMu*6G9h@qTK?~BLutuSxgX1?S8~{ma&R7xtUvok z_4JQ*^*OVv{X>iS62S44=Tp+vE-8*rS##4r2!vAs)cPv7*=yqn9BmxOgz0FKi{&)~ z{%`W$tvS-`I@64qf8e>9i;judB59LFQ9Oue$+Wr(b)@P57mB1cJaqy{0LcQ7P$sfa zMVgLiyZgcu;qmxt+;^}y3)eiVg2T&J)-tCgdgEb2v*O?Crxa8>JH5)%e1Iq>@~8b*K+d%0)cT={ z-4&R)N1saKiE^9VHG(cv&i~}2-!?yT`uqDRv{)VIki1E@+H*ld!7*DlBm$xx;Z+7L zUatF^ue3%N@S9f!_Qz5zG2j~zG~P3nmX9B#ph^uOVUnKS7#9M+h0BhG;O!idtE^we zc5t7{U%R3QI`#t&$MEfVJBRB2GV|NtxP&Ui{Ix{iRzZ*I6k^qYiPh00=(s&9#e_ML z(*F3)vinh?3C?ISv8dV2UIyST`6}IvAH{nW8OXzUzP%Fm5qbms8}LL{Ul0bU^11l8 z8lw1$6E~j}KI1$Jw_i6QPIQ5N6JBzG*{6)V_RCSJ-7Mi;eNG4DmQGi)WC_q0{zZlD zIV}_YM#e?vDR*NkjhbGZYwduN@qJQ;yDJQZI{n0dq^gWRdQHMjW>dFs)ZlSnMbuph zo70mM&$AAhAwV4TRDzMe2*Q+ZX;(gRUcEDc=?U8?knQzo-Bf2=n^lEwv;`YK+nvW&Q1AnPbU7sAIEZuVmMdRGuS3T?K;UA+m{Xj~F_9p55!&}yY zZ)0YrTftwt-49&oB4)vr}uRF4v^hXH=ArxQb9sUs+>^ATZ zSZEyDOYH@}dUBuuLM5QmZ)-5W8$hveu8LK-grk#QrC8i}2zR!+qPzs5w`to1id3C+ zv6b9D6m138PVj0x5sk-?{S$kk`C=?h`61~kUTQbu_wri`(x_-oV%qENwr>!IpI}1M zMt1M`z->?|am!M3G@`j2scUh;QPWZ&&SB{;`YkvsfUBu(x|8NtP7dWC)W#@sr^siD z=OMO{0;$@0R?V_R>&Lbq0B&ULR$})Ux5vwX{52Q-;a?!eL$EM%BiBDvX5~}W!#Z7V zC8QMb)%AW+w@McC3l9iM7KN@fv@8^1vRixa-=#HcY)=1b{xS9j1^i|9_DDGkHZ0Cy zu*h1w9{+sVUh4#>)Gbf&d31*RDcGUwcJXNy-9#t)5Ohi@Q@d0nw(mdFv%}2~=yVHG zQfyF(AsgloKLgf_o#CuH)I+e&qDJ1R4Q`e1GdL;4XBXcpo0~g~oR=_nNbmA@Z*Kw8}z1Cg~Mp<_U z!p=-V2>_h(?BN{aX+s$1$MJ{_Y)`4-n*7(Anu-vfNI51$)@UZh=}HE!N`Jx$&#-+rR8zK1GGcfGVmE zV~5F4fDJ_;V+0-^8|Oe5D!RDY3xR`}m)m9UN;YyfqPf+s2eD+h|7HB3LXK1^h+G9E zd!m0OiHpC@QwQ3g$IT!=pvs1)_GfX%?KrcP;4= z5rlARR7<7TkJ?VJo2W~|Y$eW4yzc8kAj%_6AdHuq5UT7VO?8UTk`*5eL-HL^MEoQZ zqUs3vo10d^Dj^l_9Ev4aj!_~gO;a>wCX%hU4Zi>?l<)AgQ30nBC7i4U7hOD_G54Yx z6$*}%!L6U>fFK-{t={Z3{wllktER{up%b>H4*?9ZwF#b^yaQm3tnhgvis96Fq&%JP z`tvto6xhc`jQ;+)cX|;8Q5R3wxf%rF?DNUORi22`DP8qY9LnX0r!3PYTp{>YyX`IF zE6q~hE6r08dhZ1khIHE43nt%j;bRh0omkVa06U8Xz2#Y7>gG5pI(>(vAp}w^P_}}u zNSasAwhztl-Lxr)s*p=BpUbew=-5zCn11(-wh^!BF%A}2(vgtKNE_J3ARZS{KgJF= zYwI7db7zO0oxX`Q^%|KZ+U78skCEEKXZ@2J(Zy3owzb4LI`=^)JLR!t{}6p1?OVVu6C~VJ?Kl6V0`OkBU`p7|;q5dkp>t+GK8)eETVC}Gf~`aR!D>ivk_t^A zY(&8su@ZTi|IL3L>XysLRiw4jw1;j^DHWuKCtJBv9&5N2?kJH>xSEAfcN2&9WG(tv ztg2YXywUA#=nWUV+HptE@200+HGWm@U7@Ta3|E(#O7*Nnpm09HuD$aipE|*Yp(}@( zm2^OrldeYf=8D@SB^l2059S~>`lw=K$k4P6r|CfHqRB5Vg1A?0rH!om<6Z_6Z$iV` zomJx{Nid$3?6k;Yb;zlVaX$PsfU=W*e$Nuym>@8lPo$dQs&)%%9Y&yI&`sxxMl=>U8t|1#c!xIQTbLF#IfHu0sK zbrw8#08=2I+l$YRinredYIX~wvwa-?lp4IraaBExWsKJYmP|@Pg^acBX||C+YWIWO zy-+{15BRAJG0|Ggv2-Rgs`f#9I$ouzJU@y>$+F>H364qR<@I~bsRro(N*&!IKjQcS zy}A<7=qoJRVV!GKVobeXjO~~a#SSGVHG>%si(`XZ!6h`YDb#W|uH|h??RBeN^vycj z(*Vyhn{Lok1y50?Svf~^(X@0<2FmnIWm(oiKQa_+XqT|G1Kk^Lp-fnHR1zQOgJvr_ z9A_05p(vZ1ZEQBr*4fFyx|Jity`qfVm|%?2Ax}$YQ|UbrMBz*I>~!ZMxjg^1oFMz# zN}mi&4N*hFm2VhoMse}N11PBdbZE(6DCfUkj9#L&A(dXOW01kkQ{;0_=}~b@YmU5t z8rZ=MpnS`MT!J`3lk#V|wn36UJ}DE{I1hK!1Q4jT`^!vtlVA2W;)C-sM=fVbI$1@z z-X_s9iMT+GNUGzaC#d%2M*4X)8)g}$vglP$tLiXHeY={v44_U~

Q5O)(&N)g@nEP3TMhL5+cMkQxsG5XL8W~Xo7)* zMitC-5^rL-J5EWGPin!*&2pA^7Pj#OuR~?O2LF8nb zY9qCQzYQ6r6d-_Jt;iU2*sJj^R}Tc=4jftKs>2+ReQR4ni`Qq~j3?41iWWu6oO2~C z`x>|=UVweHxf{l)B{;noqX*5<;W~0*hn0{(7iCEZ(F(~EzFuuL)KBS74@$WThoBb3^H(}T#gGrnE*_A_|8Oli~n8183zE%7Vj`4E!_xkJ<3 zP(H8H^j$T6Ir^e4l~yNq3o;Mpj#uMp?+g^5^F(q2mbErl4L+xp-nC7TIr3IFd5*8L zlup{H{_I3eq?lN&H?cyXRF(I&$8fdEs`lSoAH5&`D3nnyT>yc67ubYC8YMvbX* zRwzyDCgDvA^TuH$Lcr-ne(aqsN`}WMUt^m>165nu8})&lfEfFWK)YCaG=+UztuO}8 zYzHq&jebubeYX60Ol=8U3v|bQ!E@z}r))s&acSAcD>sO6jg4q~GzIaAih10p;Cw%@ zsVjWSzQuh@%?;fYroGUO%2@kDr6=2{>Bqr>nAm+q-n;!EAi=qVU6to>p!m9)E|WoG zgTC-|l-iRZsI#>VOGRI0Kc(>46c+-soW--5!evnQvAz27|5Hvrj`ae9mT|ook8Iaa zxNovdiK{P!q>OulMC7A{OjEx`Fza@@GDhQG?^wT!)j%8cY*wqwB^7}s7AVb-Zu1_l z3BSpl+=a3^RkBaK7OhF!0Qyx1$Z>{FL3$X6CA?Qse2obc6|UKw;@8-+)P=OMc2qwx zNvm$IO%H$Jmw%Gl%JqP9XA0VA47IC{Ukd0XEtu^H+X(QKbV75#{{88XVqfM|=OyJz z3$wJbK7{CW&VEof$v?#RP>1N7!-Q$5`jHy7+tTAkY}rI{PkOm`4v2zGvv`~gA~AU4 zH>>OOaEun_&6awl3`2G(h4TAy`W+javSuB=62B}7$bpJIaCf>}S6x$Y9Kw>8VZ0Tp z+mW^_UbU^Hg79P3c~c7R6Mqm2X>(R}J6nAQg%6i#eiRl{PnXI6;HDN_doeGNUKqkV za@g(5+*xQj7cIgqZtTC9D#?M6f7J%R0SwSR68FM>!_JapAmqvRL34lUioFDKBz~&v zCdzfPD|g*K?l3TWl&*Dge)>K_C_0$d{v#&=GcEBU$#@>sq-Veh+b7u*$;#3V#Ut|I zF8hrs$bz$JigQcdQh1d83=%8ChCf=hn_LxDggml?7hWrCHfI(VJ?MV4pnaa|>>WIo zOYu>bqnFAmlLv^&Wx7}|f&S9eow6A!B`?M1>GgWXqYG`)(^EL5$&A8DC+w$u5YvCP z@cOW)1$5!MP@a0mPQ(2&Z^M)wO)(1-!FHK*HnFm&xuflq!1l;kf&uR++7t5u7pet| zk`_n6qVklGnx)vSjdEu-ej>>IlC8qr>t$oX{zTiKva%3@w8Vjd?dWC`>qtp6H=Bfz zx%%FR9BM}q1^jzy;Xeh;M}!6DZtbj;QzL02ET`J7EnR%}6q>4|B{6hurNND>E|V`7 z&@F)(+`$p25?rlvzLzBmM$0nGcWJa-8*)*gEyk6g6;Zx86QV>`&EiI&y>B1&WCVZXR_iK$udm} z7Ho}RjMI;JjZE%%?`;~%vDH3MJa$#AOnt}i$x;)9$Zz9&syYe0l2Q)wQj&15r4m0A z)qUcQw8QsmX~n*&PZwZ*=%tO?%Ou`pl_FxHmGtT>iSw~Za4+;j3#B;f;z-0+Qr=Vk z!^{MBj*)75$Jv6b*z&}q>0v0+J0k{*&ntc>&Bg-B$Cu za{N}lr*su@2U6gHNCfc-=_sxalfvPql#4Ey7*p=71tWx3TuMg|Ib6XOWy>Q+Q5?zC zD;K<6zDY80+A=_}0%E;i!-ERxYm+w2^z9dDGs#qF3Hm9V{La>SoiNWJMM+RzTL~am z95$-0wbjN65>|>(>9X2M#s>BZUr4p(xjd$mCXpj4ZfqJG{yi+3Knq{SOR&YeQI>r} z1(GDu=5il;EieVzmX=IH@uF0zvQXf2>>_~O1mh;NYN^1EWxZ!x$GKKHEJBO6Zq??O z1(6=S|CWkf2wI8_q|rKf1X5t4D&hjMfBUf^gnnd?ZFVn-?Q?X4vbCs%=wGt=<5fBJ zU-Zvfq&7vM=2#S<0nU74&tn_Afk67-`Dg5DD2Ei}$E+g+V#h@X@uDpS6vnaqr=US| zfMBusgdPELoxX41C6B9QgY>MHm52>_m~2`3)}>y?e_5LqsQN( z(5dMW;5*gX@kYw0kDe@_i9Zq|pfYI;b18+e6s97j)((h(K%NwM85{Cm$;iVieA%`c zvm|A@fnrOQJ369M;|Uq~P4W!)XQa0xIEFQ zY|&nz^@|gC$P*-R=QjX1(OsK&Yk7^VAp41Kr3}Wbo!823`-7x>rqd5Pb$^CtwXrkc z>@Xoavs`Jk_c^4$QNWPz-JK<8eKl)o+a(sohss=OI5P!x|R1SScv0_PcU&Srv0 zZkOd)g!U>c+*)}(PJk=~FV7!U0g2*aLpm zzz9VZryA%`3)wY2ES3?oP+!d|MYXS!KHTAODjz$Is76*GJjT1)?CtI_-G&O~5$}~p zXNyxooXnL*`Y~W^+G4>@cWsIsbifxXO`FJqI1k^7`SL|fz%Fg!6C>;FZvRx#CMnjy z%t5$nan%Obv^@~Eg4uN64&1r&7qLbwwd5hRA{(k|kR7`DP$9l#8weJ`-#<8PbW%`hIisi7S1!o|7xaY?D7XN|$ zV1LG?Aax>VFx5gaKv@7U?vOxly&aq1knCp?bV)R#t?JKXRwP_^eFsQYIVxKLTJ$P~ zPCi1?P-|?<%t5$=w%9QFQv%QnAx0GY+B=YRQ!g@WF=d=j_H1M2W(5j^0m8|$i7UlR z>^J+YE}+Qq`P6MB^q;zY3=f&NAt~tK3`#(iax)d=()``A($$~hlubVYd|U^IYi2_t zT|a)eoc=P-BAIWJ*DzL^Cv1cK1H%O3za)aJU=Fw<>4wk01%~uWBisyspYV8;VI_r{FpUWMh!WQ0B^Yvr9TeRb!Fx z#EDAV&){A4cCf|gTC1t|e+cmhH^Fj5(DLO(ElU>WAR)=iIFC6RiOq?GVPu<U1izz&4R)CX{}#p9oqe1_6Dh$bk5X&F(=-#}KMYFE*7D*-a42Z#5ffeuMimcacDx!A@&VLW-?HVm53SVu})>KRogz6iJMf=x9S>Z zznCj(BD6rF8LwEQz94sel>e8lkM8$baG^$ZMqO!uHMw}(y@Bp>f1+Ss5zi4^lLIUP zTtU~>oM5CN*?DUU{7!};i;P}MNp6|#N(VQs>umtJ$T_hyDBtzcYT3B}-0j}8b>8CP zu?ZchjUa?JG)0L}*!C76NVK(mdPA#EG&6w>H%0G_D&Ty8$L)m*qk?`#;(jGiGFK+l zZLG3U_9mh(gqUnwIoUdV6nzQdSBdA$=ilibFq0#!>e#$?rQgo%=e%x8p5@Vn4(pb` zfjtUI*-i6@UnLgk1xjn>@(8{FlSDPr)e-ebxrJ?ExoF0*XY|kmvZ5D5xKX!&lJ}3& z`KQGbH;qdcI;!vH561yxGoFW8DN!T6#%)Aq+mNnVN89R(+1WBm!=SGqN7$bg%TE|u zwE{~2n5^e$&%^z)UfPLiaG8#uv0{{cgQhq`WL;obaqt1hI6w+6+R6~gyJ#c00f}MR zcD;kInES}!q`HLH+cLrYI>l2EqR<%FWa;AJJft&J`!J8aZb5Y+@qgnAfGNp=+U~~0 zlxhGVy`Mq{dpe1I4^)d9a?>XDRgfKdaoMG?P3adYVqZ8v55 zM0qxLPDt`STu%E9!2?CvQ=-A zX#g$u1%*>DyZPGGI{8s3>n2^XYF$?yqSKbtlouT&_ur-?#oaVBiDvJ_0&CeM;Qbn6 zNkJo^b@Y0wYEMG>a1R97c0)O%p)0D5=|gF4V3RD^Coeozo{D*`5k)cmrYIuR;w&R_ z)1uEK^#2d3%^Gu<9P9JDxz}|s!Ld-z0WGzWR?Nj;%MEj}hfp*H*qO>f8pyW7LR$09hY}zjq zn6qhf(F%gGAyzwHVzN0~#gY=&YJ&)x6q}?h0WvMT)){T@Z1vVq-y(_HTiD?5&~!b% zJ1<9rYA!Y<+_Nc|W2A1Y9%8zNH*}d!lIwrOA+GiA?G?&Og~n+UsYabLx$mYh*q-=+ z)%1*SQR^gXYU8-k#>0mV{Emt3STYt$1Ta}=2rVCn^!3!U7+zvdT5WAZxxvY(r5bIn zHB2JM*iMU%@HjC8A#(tQFsu~Fxe4zq(L2_$Bw+`Isvu^=WSfG|F*!b!Z7}OKo0k0s zfDJ%JOoL*V)iRCi6KrhXJvABlnGOO&^s-ItM$?oj$ZFgUF!M56y~vkMNQfV+TZQ|S z)tRPXa7an_tSpYbfJ2AlIuq{xuPl_;<6#pxFbcBUN0r*{(dKCn zyvJLsS6q6W&Iot&@F=V=lo5BvCQfkGK(^k~z6g`PRImbugS4$6CrBTLpGOeeg0BuE zfec3%EM;&6!($Y0gVTp|ub@qRn@Npa!g0D&g;G=CCA1`gJZ@fs_;4ooY}Sy9f)y`7 zx?ZVC;&Hsfa-gWyH-a$=zs@^g6IZwt+XB?Cvdy*-fJnObY@@5Lq&i0Y3`uNObBhU6 zy_Pt?bE@{f2Gac9dc#|_GtI&2>JPP-c01?ou;|Ya!LRT!pvuL`47yp*- z==oSb0>hZJsN7^&Ao9I&3V0*!xiKq(lf>v{Ko_^~sunfj=s>5#Z-`xmU2}*F|NUfJ z5pSvxG5XZDM`_-jRCPc^S6_#gN}Ub+^dpMuQB)B5KxITejxD-}^alZL<<@eUTUfVs zBUZJatk%hnJU{l|20+m5omRL zijIZudwXSt9MTa#D_0)Rln6rt@^x=0+9_v-U_XV5ZO{t8WL>VqlwB);PCk!ClPrrZ zUx3H|ffk%TR`vrr*g;}NqK@2WV>{f1z>;RwW6-a_RoBTh^ZZuSV`9R9Y)dwOU5xj# zf#)T2?GvzHF{k6%J!0Y8EOP7vs!6_w?K0trsI3G%HQODovrtm}qg@T01g|Xt#H1fF z_M+@`i)^5UPRHH@_x9v{8zsFz_-NR4HAxiJ%OcoawcaY%W1BYVf#mUC;@;P6tzIc9 z1%0Wk*wdOkPk&a>s?gfz~V@r*JHhH1#*q)-+#ZeKK9yBBNg3)k~L^+XRSrg=9-#(m@FLRX6 zw5a${=! zlan9X8BoG-?9)6aAXa?u7y*`j?2IPaKJv7(4}zHo)zS$Hj6mY%lP0vu=Efqi?Goeu zvuAwl|4K#sXLH5qXX7Z%(WK!h)nmEY1q^rIi)v*=oEQx*5NPZaiLaI~#DJve> z8p8M&7MIIf>}YpW_Obgwr;nZ}7oMOlkE#+!E>(T z3#i`bFU60L2!d#eUsMD$#_5p3NuVl9<|x|PUb7ECzrbVS^p8?8vYt+ELLRATWn}|m z!Z#!X0akWlF*-y-K=v45gP5mnERfWdnB;AoG_k&ey}DjJ(~YRbZ_DW)OqR*-R|yqw zX|`HC;}}@5N254D!P;S{$-Eh`)pl*U+_Fd25-|xeZ-9wdXK<-Yf8%)&mNfm}=bv8e zK1A*S^oRxkNkd|UD3IV+Ci;iV%E^r|gaNHd+_JTxd!KE5EwQzY+Y>=|KFeu{4=hd& zy8`6>@&ro+aq)IXcp0&!T&shl2NN#%ct9KHh#{nK303@S9sih=P+N{f0ysH1R3qR3 z1lTA)47-Yar^YG!S$tnNf>f{_Jc_aoof64S?21>RjZeOj4j!Ow*ZUyp4!-h2Ja$>> zf6(nsbe3t%35J;hXq3Np2Us;eznqU`Yt1WF*Bq2yreCh0DNN|@Ndvhyqa0jvfNLJ> z&~q#4qwRO4jH_gF&}3<}mG(lsY+@SGr3{lj%MA^lYtDTP8Hh9Vm8v|wVI#yu;x37m zsTsPQviN&zt+Yc2uPv0-wJ7Ec7i4(IXft7ob9YcPQ1Wdogb3PLm_d~9JSMwGb%A1P z%bTS=hTS!C&XH`K3Yq$+$UtXxC^3N=h#3N-4-(Rfr^NmbaiDDICSXn)uh99)uT6!` zub0HPezitJOzPhJN_q1Qlaf6;6EC400kuMvb;`*Y6IHrHY zi}5l_Hw&at2A0hlxj@<{o|2^afEq)lO$W5R(=qT59V*cMcgQ=01fZ^XdbOVjKVVGT zmGMe()q47fBEz;>_{ifR=KyLimxV+vtVsaqa62Gfsy`&@7S#lIlBTNmJT1fAs>u*w zpB!PEBKx6SljOr}!cJg8Lv-g9OPhg`=CP#40Z>((T3bIpOrgH6S`F*6i5)NU5KW993;tGlG#_<@GW)+oJTvuG|BlN@3|0Pj0~i2}yF8V{m`JLeMD+`g>b zh{1gmTF{qvJ2qaO*r?yi#>W#na@FN@$FFFwjopz%O7+OW5=RL@qA~*g#nJ-TY$LKx zVjy2mJK2C6oXf;>RPP%Y%;}*ETzg>Iw(Mi2&uqHhW%Pr$O*B+fOM)3CyXVj1)3Pga z(KarSLl42>tsv_;9$dU++uQnUqLEr12JpJ|ZOuQY41Lr^?LOBSrV`uk9V49EL zqwmJaZ*2f9Kl!V1h#He4Z8=vU4j-B&7Neu2Tl93OKtPG{D0D3>AudgTmq(8;lnQtq9pGMUpDGq3pME{pzdWn8 zWjea{2GbLE6ne1NinRr3p5~{_SavcY5({L-j*nHgIL(Km7UD8;mzmT<52YNvA`rGu z|9I{6edQzmHU@x&&g~uYvNS|k*<9ZO+MKB@LhMLaWrgDbYt@vCVI1xVI@)uzK>ov` zeS18T@!&o|hkn2@tEQokSekwy0H31~GIeb$sRaI3x-DPs;wAOc;%pDlxzZmwWr5kKXs9e0!Nh$dyr2z|GjS-jE2 z{h9KP$%0xz$YO#T1g|EJ-gRzggwWRN@%zwnPJSLM`*L>=YiKjOd2nOgA>2MHnaV24 zG7eZ^kZdIBe&3GgQGUPrqA>zU0vsj}Vcq78H)r1#M8w@|$(1~iF^Ra^ELY0Z)}ZRg z21>Ja{`}|5=Ra@jOmCMfF&0kOj9o{c9at7#VgikxTe(+^j@QPpT9~OO_?60lP>8q} zpP+rtTKoiN^J09y#qCs%(GE$j7LanzSA2+yxj5s%ds({*LZ??p0??QEOsM~5lxN0@ zU=M8h3V_MPIl}3}M6nSz{;`X~d#Ar7xx>DwOV`$Pm2P!6>~kUyV2#9);&h|+j@5`D z)_RQt4!q~v3}#Qn8xP#kZ3r)^!3Gr^=$Llh_W1Q2Ibri@^J}A4*!`{#O;iq#|H*Oe z_EWSSl}co2w`kT z+#IMHV+AME7D?p2Y}y%HrH;tbrNT6#^91GMV@%A8AZ`&Kms1lbb%U3ZhKnI& zk9zK0Ip-|4dsyW5SV#(9$ceam_WF?5%%z~91eeN8_@k>x!_dF4q>OtrBD!A<_9#4C z8zRR+q?9YCCsqfk;Q(#=)roVFtA+Fx0F^j|-9_Xl7s2BJ4070wK{UkL%^f0>A?%f1 zcKVU6!NX_DlO|tE_TnM2UrKI_+6iBIhrGP6ymuY9lq~805Ge|aSW+IC*wAZ_pQMe?xmYYi|l@KnOkSAS7LOKEX&(*C`L zjT4&-1W&2u6g$WNR80k+hIIXuY^PYNr=Kow*ren;^n|?mX;Ij2dzwPZ)pZ+CM`qyn z1C>hMR}#j8awhM#pT0y{N@#j}+Ov2uVtL6#woiDA%ojIYuY)f_i^(iTa#y%J^4DK{ zDXd@k1}p>z6<;q-+u?vL(*Bg7J7|Kf*nHWTH_ZTQ8BtzY+xu?%3OSFreHlZ_zJ!7t zCKB2KR}?6-O@$Lsn_=0e2z2GV>Ac&ow#U?bVPOx3u9k6lq6#ULo1u%5eEX9R4LQPSjDCZWmXO8@0=&}lEj7HenT5Jk-bz#lBY879G_}@ znS_v^fjE59=2z>cx#7A(oy<{U1>8iS{g2ABUGl!p({j&NMLD|{W8x~@BW^(Kz~=B! z2?w%AZRk&tVr64k*UfU@4zIE{3HJEjyzyrF^VnVZmg274W6r-(o{Tr6K4z*LlUhUl z&~KgmIyPyU4M^>YDivOMn!Q1y3EL*u5@;x3HG9xzeExiS`dQNi+(tZf2xQ`TIU%ZnLn08#;CO`}%hytC(0}rlEOJrz+g9{aw|Ok16bk5Gaa!i1*qQ=TFtGas$PE^BHDXBd^-G zs&k5_BJ!5&L~P?)BgMnb5H1SFi2TJu#*FDSeey>;1J_C zp&RN^I+SxP{Sw(+TxD0U*!uz4YF=#8U>yOkqUhN{M$F00mbXlGyEwJzUKRL3_t*-b zb#UaXzF_#MaRM7^3Hv|}Ow2(=8N3HN#)ubp!e)zNY}hwpDZ$NOxx{2)#av)frB=8 z?ZYruh^DK5SS%ARjlg0aU~kavi{E5VIDPb)a^BSW>7!Q=!Fx{b?h`tdHyK8Dx37Cw z%2L0NJ%VSh+Rqw$%oJEhC)Qr2l*k0C&7oa3Q(_+hLpgadmd#`C?sRR*aEZBg>G6gu z=jTo61C1l&?TCgl<>MEff%;r|+@f9X#5!Skc4;@l3AXodNp;M$?5$PkDqnPT9N(lH zrEm@&Y+EEO;>tx^Zz>ZA*Gyj^BMCE#Ybz$RL2OFO&c1|R;N8a|N)re@_=Qk6Bdcu6 z{JJU1i_OiF*nG(xR91dyEO7^C03NPlBHXDpH;0{Z9f`WL|8Fd?|0&+dZ$(70VNOR7 zL?WiO(Smbfhwowe%9FP9=P#5C7h3j2s=-r$?0c#T|BwBXy-> z#?K(=1V8f%(G+oQ{${Ky7KKEjyM;X4xUW$(Gb;U9o`JmWU*Z z?E6&evfGy!)NM?aj6vP zIE8&$<*oRWst(3c7B*DxeDg6*zp18|kK(`aqNdiWa!BKM#58JvA=i{;(c?BjilS^l zElU(v9~cECd~ITK42}*HwX&85ad`samM;;;ih@&^CV1$+nccgBx52t>cad8;(gC%L zMTPT_D4ax=p|C{DRLYW)!W~=KKde!RBvf~X<+?}yu^&q>2Br2N=sry(9wF>Xsp7T? zJdiufeiA@-Jpp7(3-cku-cD-T|2* zHFN%6F3a|o9dutb5N^@57%LJh(Z1FyyB-&;wOAAL5bO%4CS|cz{u4KbJsh=+5fD?j zRoY9=*nEJb^qgoUF4PTsDPe!dcL_4$x5c#jw8z@t_DHw@?mDcJ;WKa4YxJ2qk z+2_*#L3xYJkaz70(v{$@8>JmA2hH{#F?&T1^#18Obwu9pB)c}kTCbE-KG^ikTi zC2GPG>an%v1SJB@vYflLgS0b(RA_UfK>t(E*g!Fj2lBIRYov^odgK@D+9AjF$W2M1 zur&OeB-;RkthUFbkFit6Rd_vBum`UZ2L!9t1|dHB>-Gcv^uFl*^mOl^kcCa8^yKH^ zayBSyg+uXHY`9+9DVA}cPOyo~X}r-R520XF(`8RlB|u=0#C1}81{S}1=k^u_5-|k& zNhPx^DCKF&NUWDAS9N@8dqkG^o*xcil%U116d*_3_cn0sHWa@fbnuZ>HdSd4$+kJW zG&7(&nc{@tOxoL9lp(Dyy8QzGUxDS#1bOcc``mU41KS{$D2YNom;K01EwdnQ>jm#V zWAcMLfXa#+n#Sy4p1&~NQIBYB`Zow>DzUy?We!%(7@Yo?0voaawj3H#trkdFW`zAL1FqzY|B0))pRovuHHMvH8U@o(Mlk>VsoSivq*K0QInzm zsec2Iz{l({@?20zVc!y#{DoqZ|G@u#8WIhXOHcY(ih~igTGDL`T%N;WOSL$HJS#hS zPnCz~vcL>IlqQy3H?ah{xXyI1dwN7==>J2BgygKKutV4`q`ob^Zo`{mk4vY8Inh1? zWo$G+3zuREu(l7JA_hI?0 zN7gIkn&MHbR27PKdZhv*ao?zOjsj>oV%zQOmO+qhYzo{Zs<{-9UW5zsU19eKoRGYQv=ffMrHRJ^)~Ti zpSbz4wqm@?I7N|)!L^iP5)6r;aPqU3sUC3{?cJe&bRR@pMi9&<>&adR3_uM8P)u9A zS_kGuyQ0+FcSr+aHX+9IiV}4E7Nz1Aw-~BCo&Y%~xi_+z*)kayw$WcNUyM&bQJzQ< z8y&N3B4XBIE6>>Ji$R7f^4<0iWFybY1s)D?KJpA}lhc-k~QMDY)mZk=JyD_PKw z+gN4VbNTUg>Os0l<7P%`i7NBNbBFsZMw0F^PqiTuAKD%N+cRJPpK2t2)?z#knZh>c z9w7L5%N7^juaF^=KO$wbg{WRuQJnoj(#kd2!2@AEkZ5sv<6y0k$*OA4T@h^yQ~i|AH;JovhE$Gn6faKwOP z*YUBRit!y108TzNnZiFPr$3GbAMRIH4|@_%K>H3&A~3eeIaAJxUtU97b@1ozPYXNS-bU5OOhhe~VH*$@vA1aSB-Xk`~(9MX)R1W$u9uMbW z-xZuZ{Lyg|M1+Eif74SFz=RSW!gPso1QP`Ps-tE?w9*QPWO1dlL1loY7*!wn-FAwG z&fJR*ARY{bsM&K66i?Y%#tgv^G41+#nLc->X8xNBH$D4oY&2%m2UL?tzjy?eaq?kk z48#{zO{^moCOqo~{Zrd~G5L@Ds=op!)rQZl|9q_X5f$%UdTXt!z!Pfcg~9$`pUvp; zyPv%KFZmzg!++TWVzXGI-<1ooA}u1}O3DXdi*=(p-adYYeTtyP`=@|H?|<^Yo_gGQ zTbY?zje&qfq3UJY9N0@Ms~(&)`~uuk1X&I4Fz;-28AifEYd@wGwQ34weQY0QkH$7U zthE}u0cD6Z@MR9$AX5a0SaK=BkG5gqzSK_Rq3+}@!(STNKsQIVr@57SjV}`aq)5Mg zOeoBDrHoV{BR;l9cJY47cYrp9C%ScP<#|BaJYoNj^@rb?&cT%;kXZOSwZ_ma4Z zB#gRyp>Y;Oaj35k-e{}bYRB^`!o~gP8UU~5YHT@F3j#Pv_aBV$(UNXrxk~b(wv_;h z(-beD;b1cYJ4k9_8umUmcO+87Q6z3viQ=BtDuX~FSZoAS3Jc|i86vWkQhEN(W9ma7 zMJ_#G=4!K*LK1s1i1pHOz#LBN`KO;wXPDPHp2a>Y9@MqRfNET~$B4ThA$@6hEWS<8 zL#Z~+)n-~~Gs&GSfGx`Piesxo_ zM1A$u7kn$r_3QC;S?kD}i57-bN&E;*JvfV`iG*TkdVamf%JZ{!R^Jc zO<7TI!I6oX;Pxh_%axaAS>+&C)GxAgjg(QhgjdZ(oS}GhY%&eyoMUQ(ZFJTOHcJ>d zszL{Ei!P)lg`_LrxVd4+4DZwyY6*wJ#wxDI-QnO~Y^O4#9qz4YvdT;*4$7j9Jk3{v}(2{-Ei#oj@1V_IOh+JF*_j zphyV^jK#9qUV#T`v%@U>LplAvJyjSOpUf`z9U@whjZCW9a@=vMOe%HgPy5j+7~HSo zX}6^<11iB6nc~3PuAvVlbrhUSho!PU7=!J{tCd8w2E}mHb;=jf?tE-!wMT~%i?SoP{{TfZPN%iy?Uqz#o zOvIdQe98pq?#CX1(+b}N?6uzkD^S9yDL@@JZWDTHdP1c_C%!i2%typD6?S`VI#uF+W2dJJ~ zLmkvSv!r_Di4(ieHF03v&sJ3;#sh=I%`n_Y2yuziL0q)D9#cQZ%ha(H+GJ}kbgQ!- z*3(_06=SxUP8qf!l<%bH74Th5V6QuovVpd0@(5jAIAubxF`$&0i<3o9c9Np5g6 z+YwRm)UxI9V2`}%ne;@Q#eV-dnwQ_5j#QON=|!--2g&>*u+}{wE-Ihd???F|mlH9a zpvq!rZ9U#Thq*Gf?L@}h-(yK}tx@^iWDa2$_PKsN)Y1}OH}EyCli^M=m6|HGao+w=##U#X;B_628EbsyBqaouoVy#+x$lNs0%z zxBZum6_u}qRON=8D34|gNv6;&wKZ3)t_n@Y!Efys(ilz63(rVK6(ALD#$!e#gmRWL zj~qNfrG01Qejod?T6nfY@a0~zHxTYQ^;3_6fIXlP;oU*N0|d&I^Vzf%tBVtX9PZOT z)~n!Vs<+y*pok6*{3kMzzne=xw`GoJ+7EMfO9{1uSrfJFDX$9u!7l@PvGXcF60LSw zv*p;An9&#_IM3WG;u$9xY=H_uSpeX?B@=52U^5-lkt*zNw4zhxpL0|p8j_qyZDWagJQLSvw(>PwK7_|i_zw8s##9eeiK@&((md2U`Wo+tn; zg7L%=RH-A=(p6z6>=4v1l5jAI_iw@KQfv-AW4lN9BT5BF_yeXMwF4zVJNjhkteW-i zh7f_+nasnrvU0=rMp+L$VE^r@%|iLUz9y%SUW*COROWC5B3mOgx#R=Mj_i%oM}J=4u@P+m7W!lqS88?Q!adtpnUBdJ0G`Qy z-$;d>oc@JVkLHWjnn&aIl9_TuE~<>N_ylotcvH6LPd!zh43SV~9+EQgU^ONFF4@q< zg@4T!OhW~8pQROl>jcs%>5cYqUhP|P`si!rg1t#uxmu=#CM=S~Yu}?6+k6a|v}w+A z)&{yS$)c;@kNaU&xQLC91Ro4k2&`m6$M5)(u6p>rv5DLZD#PVA<~Gs8_-mhe<^qW5 zjfv~ZJFRu9S*_d%L6_MJ&ZKa+UCrt`!Ygvij{ZRR2%cIw!<$YYeHufXDR%y>Ek;lB z4VTH*QQSiEg0&E~4q!i2l>46jC=o;V#|VkmS7I|^Ks$0`0nU1EA*E090uhY}%j?mkB%}@qq9$+8REj=*M5PFO8`@)-3RKbfaP`)j{ZaLhpAP9-MH$SvlxJ%k z9|wmv;XE^`(w5wWdp#kZZp*|f59m4?@V!wp3UdAbF3bXz|e1TBJ*atO_6IP%*UKoOxlP1Bp z0Fg(!Y;qO=8HNCEiCW6pOb^D8k~)x68xPyU8kBJp4o(FuchK}^AwFF`7e{(eEt%~{ zRo21nwGZAZer@ARK407hrDgfJ6kN1cjY%%z*$f#fn>kst&h6^yfNF{Z$2dVcZEU&!$SS(8%o5uWaI3KaWb8Ht| zZmU;|x%f|~k6tX#UyRXWvI4b%XSqFy2M`tZ1W)H8%S|~RSr7JjElt?G6AUjh><1VA zU&X*|xmzk(9$bE0wBaa8@ZCx@O38+J1DI$CZ4w$)E#FLXumq&jq~^zx;zWS4cD09W&0xm?&)F9I_>dJR8H&!#sq;AU#f?yA@-y#98x%A4xz6Q zN~iZ}SN)zx=7F$6KFQ1F0OaKP>+q0v%hog!vI$d&jB}I@UY)EK$>u8~jLeiYWF{s4 zq|Fg*_UU`4 z^djXSc3nJCJ{KdA6*Nt@0cV25W_#Zjg66SoI>2`p>>-Ec?HJ=SmwrmGzDGv>0p&$( z40w-FK;DzuiNo3G-FsxLhfIMXY6tBXur#?P_9GxpR!#E!dZ3Ot(tda>z>}jh_f>cU zC^T*|ZiJ&llGISc^)89+AKc%hH-H8Z$R@(cm0S;B@@=Exx0?!hI!~DND^sC!GuD`x z4epoC3PTXJg+*x+1XJyzttBDUA5-4JepFo+6hQpA&OuMb{ix51w_E37;?}`?q2v8#orUqxs#*uk86WSx43#AZrKOzg$}Dufz-#o#{o&H20mB z&F0^5R&4%4qlxqSHsMoB^^Dy_R4E*_Z*|-j5`=RFdC_k4xnetNBRp3-fbuw;_Nu!B zoPQ3Mtt{4$mvoRT3wCK^gdI^<+qP)+MiOJ&S&78E7*%uuRkx+jhpco}xs6N!wd{>$ zLDE#1$%Z3v0xjiNE9){zqVSM}q0-Mo?Gj5MvQ(P7N!;bzsIQU*1S9OZ!7(knhMS$}xKUvp6I&;dL8ipqv?GvJk2JyLg3VN1jQ$yg9v!gnR0va8B(bo`4}u2{~rR{hB`sIQ(Qxohf7ttAAhLIb3d{9brH~kUqEQRK5C{}x<1W{ZjmB#20m+^B`8`%!~$plw@SBh=DP6vH1)~G$AKEL2i8~zIyagVRK#8CaG_Eu7VrPIH$;Zch&96-oaOc;r@zYgDG zD)u}^#p6R7kiT5q5whw~P0%5fMQsUeU#{iExFID<`DQU}`HRFbGfX;%-;$!Kx?EF$ zVwqU0`YidxhzIVST}AjpHrfOAa`%)YqMXS7vi(t+i;*cpYI3d8OImn{${aO8xi)+s zk^F21skFde_aHuBn>eBh>=m>`nj3qc2vqeb-6bo^$=Uvt8-`&Vwa?F(KI8qNmusm7 zSE9FvND3kX<+e#J%uSp(RMmlLm_@aU)b&xX*R z&%PPjg7!_dcS1H@t0ok^*WW{{Hha`TNgb1kQ{g+<$G-MX`73)3H)HNPvJd1F#7ulZ zB-iDCkHir!`iMTF8@197$sCpK2XRH!tR)rU+BXEC;_;+bf_n+~G!iQ@0QPBxG$a9}HoZjHJn%~RH<=PjKU1~9 z#f(>NPz~IHA3`z`@=@g_3=cu=Y*x$SqVeJVjk4!y6+Arzf^Cg8GXE@*v5ATp8G&Jt z%W^V3cdvZa{&f7IazyVM>xx3B6IEiZwYG>5pNUI(&Q~2xOY|IX+!lP{V)?o~n`puj z84p`O?0Chyi!fkvW4MLklVWQe@Rx#6&l(Lbag&x{Rn7U2G zR%%s|vzlU8G~4w?UPz2xKe5N>%NJtZFVUjoQg8vP1+kiOiucPnjgIC1)5T8!UZW$t zgjZRIciJUY`w{*}+ns-T`dj;@nv^JJsce`dKad{?0P@5}IYrz&Q?3D`LkC50v;u78 zl0En8Grs{AS5n1=T6J0M`PeIwv=pC4xCZA6?1U|srGG@lVC^^fZqSF>EQ(-Yj~g4T zmYz=lt^K7oc1_8#yCZA>VjdY9Taghd1vrC7$80uZ{SX$T7RwnPiwX;mmJe}e!1`Xk z8uSOB)r@JfwR3Ho4*AlqwsVzveleZ_Rr-n0yJg>NOmAcLKUB?XHi%fIVmCQNsrbU6 zix~5YHOyai6tUZbj|F?jz1L)82^0x;cTO6^G$b9Pl52+0mI`iO!|{yykG}&>40M1u zSwEc_=bcbgzfOM6KF=9o!EA0P5yn_;MBPD{+ux>4vY%o{Ghe35gPreHT=<|hq0K#9 z{C4s-<~iQD(Sr#-K{p)Ux5_(SDu;}x^RgWXhEh&P&!~?@Xid0y==m#}CtbDm++L4% z*!cbEI-`_o`{}R|KjLWYxw=v0VrIs;Oo>GL==ow4d-a>PQaV`MM9s}mc~Bf`TL;|H ze9CqqG*jX+iUfoEF^C!)76UlW<21Ho54cZ-g8M?$D>k7UH-sKv%&8JZ2)yk?<2=7& z+%|1cPBba=#9=4GGXz0=Z_j{3D!A{!@A(QSibhvzq8%lkka!RcW;!}?U-xR ziclAbAlY=BZ9Moc*br`N%F!Sp8f1VyrSiilp=NIbq74$D-8yId7748f#*){_c!r2% zfH~wE0|zr^n^Fs%jnv2m8pV;ca4?wX7Px0_qNNw?Nui?Z6roj?wsn|S)M?TWQYzYR!c<0}%3?CEXudWaZG$@*?4GD9_7cabnilHGTnGXvu|Q{t+HS%S3k4sLBg5QP@ycZLIPDxEGAh-wB2E} z=O_p9I~-OnaAH7T%kBx-Giu+L=hD5CgC5=;x{Y`s9Vu*s+;*HLzqaxO&X!-Ef&Xg& zf^fIC9aWkpQR7Bm{L729pa2KnAQ$~Gh;LgjoK-V%#wfri+s-eIhrkoK8m*?nF)?jC*7fI|QD zseWQ%Pa_vRv%~N++eM{>E{Od?Stj-RapjIt`@RhGKbcrq*r$!Duap6K4e}#E+;>=P0rGz(}c$BbiFIBYNf=z zmQv_>0-&p8S#~kw)5z22Z6u$W@@GjkO&FsxDL0*FQs&6nNi|sgQH_==ZAuTd3x029kMwAN5V5>ZN@<_2(+W=|&7wyZla||uBFU#~( zuk6ZlLCRUG=xZ3oz9cMc@?)0g&S2EUBd6-d=%TXgjkFx}`N-+1ZhN*gVzDh-HRUM3 zD^K6EKhbymdT=I);5`iHc)==7VB;r32AAzQn$rjo?MK3~ImAoYKvmgK!nn^bdD9sCMxOJsEZaEfB(EkO+f+``yDE6qT8CGb2y)Ex z6p$9~yC|1&Cz!XX({Wa^BP70iHQo&{ErqD=;(7`HZo+7VsHp~!1$~ZxNCqaBuq-c+ zFT`&cSAjCk$PMmaEo_e_v6v^(2kGR=SIgV+eGM3vUmW^`z`|*e+?LToW^_a2&LBZ* zzy07$Im@W%IBga|x?XZT7AsWG4zKE^{xv|gRg)J4x>Bw?69WVy*O3C9>!rOD3#Cj)w?aN4YYy!6fE)jb+M*AIJN`ZY`-Mpd+)PsV)v?*| z(96P1xoyXL>3X>w`7E{#^#J1SVcn7Bvx6lXK>ViHF^V-{I4+x!SX<@G@gszpq#~*% zkgXbUIQII|b#7wNX+!OOlrBqGn0% z#lkuAAdcFDl-F!$QiaIfu^TX&Fum>(R-q5(?V&5T%k-jKol_KRF03uh&Dx_NBxWT! z1p5V4B(Hy_&;J;2n>ZBZQeAfwjz6GJN3i0kc5SS8*{v{wTk=-h$=+U@DtB#R%nA6% zlRuc`hgUNAOp#zfB`F&{6c=YcWh;vzVH#W!3>gxpiSQ}Z_b*R>ABTSY59QC_j)QBD z-0FMTGgFfGEC9p&*2}FDpw+rV=S&3&d4FbLZM}1KR1Xld?@H)4P1vmU-@Rx%fqU z4ow!$pbc-F!p!1`(JeSw8eA7b4_agh*jQ!|!e{=kMB05CN&o4ePm%TCFx%z--gf4H zX_p6=0ASU0M3U@wPfpW}Ssp_}CD`jNh!a#f`*0BAOD+ycJ0h-h-h*p`hY@FId+S}K zr2hF>gj#-q$OAEG+*}*vo&4H<6y|zmc|P8a*-iA=xMvpZT}4nOumq=C`?V7Ej<4NN zI?DZMxL$314r8I*ELv93ch87%@?;u8hhb>!B!P5$&N%J15+m_-dL}k_E&DsdrM>qV zJLM<;7i?g^ws+j2e!4j5>CLhnWLTD$WF-G|Yo+p9kd%tp8kCH>eI){!#~r$fp4h|Sn2C>43X+_0 zBKU+JlN-3k@$vY$tnJRAjwb;8ldNMVvFxS@6KAiUay^PE;!VJ$Qxv2(#&Rbo801)> zw>S|m-31e16xv@l=6L|lT%Nh{97>?d?F`r@AVg}{p!$?oyW+?DDS1sN#3y>sP)z!RDbupuChZrLsv|b^S zZ;VS2zMqb~Tc+E|96>lvMyaA;4YGlotop;~MdpT~)bBiw17NCZ%N7dS@7?nBr}ksb znFS{CUpLmSt{~$iM49+{2TYIa210~%$hEE1(OtG**5pMfJNw>QjI?+0QbTkwj!EcH$T{) zWsnE29#-J|t1a)H3~hC)9IDb}D-L@Z7|_J2sS+JWg<_>BvHFn&>&Nj8bx<87qrc8J zRx4A&yYVJsbp(sUddBs=65FQoEH}C)9FJk{R04&r!9>P6&AhE)t=W$kFh#IJui~Mk zlE=OiX`Z{rJwwVE&E|7`VW&A#MO0IgjXh6K|G+93PHCR z_lHy$uGYVB4p6bZ5eMQGW1q?=kRJCeV6-jnBn5FH?N!1ZcuvrA*V7$eBE1=cia;mF zjyR5cLSzR{9_~Y67g-pb59~m&tQ(xMr{(DY8ITV$_8xIo74BXN)>cC{x$w9J=ru+6 z;l)@Kx9)(Pc8=Gw&eMFIT~usOss!Vlh-RTuyy4>U`}m0`KeWSH<=*5a|4Dp``kz|jk<=|<_1|!qW{;_x3JBe%@wku*eqE3MRTH27_3007;}R?F zWQ#p$TK~z?GyyQKHEQjC8tC=w#y(}%6Rh$(e(>eNRv$n8X*vG>$@@?LV4IMM!SUz1 zjy|nUBGEfr+nL*%RF;*iGU;&%UE%_&z@K^aBMF3fMN?y%G9BDcr7Xi@BoGY}_(vfI zm0I-NGU-93TJ3%a5W?+K z5B&rfrMt4yF2v&;zOv4b^qonb0il@TmV1=A8@>H_%Ra7Zbe=_5rSgs?7u$ki< zGdv)$f!EJSJ{;o&#bEB(u#XZ8x&yG=LKfCib2}?txR;m1F5tD&50OVxWj8kHx-60B$evlavH-ELxz{tpn8JGbqv#new zGEQbUotU_wb#p|=VfV{CD6+<=I>*4cSKW_}YOX{h!jf3OUp6}q0>v^^Ws{?8bwjIZVK)%tn-t7% z>}B%RHGV2yAoQ0~jnP1apFl0dG4Jy;-N~H*>MiK#0IaZFE=~T`FIuf+#ixsRad4;0 ziwnf4gLY4jSbS`WcB%jZ3DwxLt7{Gt&UQ5%6Yq^SfO|PoMM8J7=bscHIE`Z0UApXt z&wB8WIHLf|wNOjJ2zUzWP#&wr^4;h&|0)>}bJ*upbIJ|rwe73bEIGRn@a6 zM~R@%dMt*LXjD7TPS)&vS(&xHgAHqYT%xC)e8~Uab*oQ}Wc#UM(^!)>QD&Z!QP&B7 zfSRps)v<-Ck=Z=g9)9(-ztnCy@tXu%6pBR25K&F#WYV?KSQE_gAXf|H1G_nn%q9VG zDUB?u`kh-0ic$&1z%%=6h3;kjOH8Zx;LO*T%DOumO?@=dG6=i6)9VwHCf{z)oO;An zazP5JuEGZ58*7BQnzrPzs=6^hO1bbaUb2092Hbv80ge>rrxZp@pdh=!< zm4|Lmg0Fl0PJDL1T@I717*x17`f4XxX6nTdsaoep!4HW!<@pn*V+a*p|C7Cgto4u2 z;EcMdtrzly30pVAZEoV3h8c|uyiXp6`*z^_z$}<_jOa>`jwZngQ(Cb4+idtr)aiHB zk_BB}m$jQXRGOPR?*f-U%6;}|2P=#q9H|2=z`m-i8`Z+ zKrSd$(@_fv;wiN7D+w)(98$ZjV4!t6UMNZUO-Wh?OsWZ(9HOyL9n1h#RB(ZwxP z5N${HyW~$CSoN8b;!Uf!%1qE6kxy0%NfgU}N%dNBIhTbE8upXrh1TS<%JGZ_iqzuL z(#QCCsmm0c1)5~W;geUy9n*HOJJVE=9ynr{STwaWo~lrkCiLU3hqMz2xwslqCiH8F zg(}fY2PTKF;TFpSw5KZ8BL6^mnUCyrUM??R3$9thvfp;WSQVf*+!HuApkxAD1=<`K zFEObgsyBy--QJuhRx^dmjwFQU~H!Xa19*);^aFTwIO;GhAajcz#SZD{X#QxlxARL2(w)1?R?2 z^uoFFssR>OO1fLsrauE;L)sz++No*eQu{s@T#BKujj_`kva*|aN#cpi*DkEqlKKZAgoX*LD^N>-QWq>HjlrPy3*IcV8+7BN8!^!NYXYS zsv%Rnex5sHKapK}_Sp-GtcT=x@sOe!Pc<~agBwrDWZu|KDv`*01xo~miB^Iy+j3PJ znsSd*Wq!)&bOVWQ>mcBGHVpb}n4*rj{m7nlm+FL6_z2065u+nJ*Fm{&OH_z_ZtcxW ze`N;pG{b1ArO(@A*5=mB?X~3~j@t6!HDQv)@0fH_`z>xJHMxMW&*MtAh)08G>AROk z1@mN6r4;UBRd7^z`p=b%L75hjQ~L=g5(JfzL=K|gB5Gb&kU2ARwtfi?y1AGr>sH?} z0~!~E5g*7Vjjybfao=nAhzJQ=6e{th>%eV;HJc$cDdi;3QUc}y*Toho9L%f1?rR81 z1$>MPlxwn&-Y9ExDX7&np+<|uw<(H3Swz@EaXGF9Ch3B%0Jbo zR?S?F2SC+w4$Dwcr98vSE&QXxU<0nZwynaWfex6B?1A zd~Dz^he0}_%eg2I<-qg#oUNrT1EB(Y+<+bec`y|0*WI#} zRmZ`0$m3LeFSU5!JshaDd@(p;j49!6gkrdX^_%8?7<;Mx7m^{-vadD^$%c4iLKNPX z2&4jIBxqXA9>9e`Q#zMVKoh}FMlpJMU$TGyT6y-lkaDV^Qs?Qj&azZvQV>{Oi*xZt zyayt>eHS1PN*WP9P`EFpnncLhz?`U7cBDlm!pKqw9Xk!=8#Ub36Cquko&5$$SvDTQ~ZcKanIYnvzkd2|K>HceZJ zP#x1WTn#pnR}R!Xq~XSn8r=e|FyftmzT7kJCv>5nJzg!mv{1fmhr(dvlMm8!ny%l$ zTlj>?7e7PIKJ|v(YUDtqG%nFR;vTh|!UgoU0OcblMk>c^rfHi)&OX;@)$eX5PM4>6 z8lOvG%}$TTVmwf4HJ+!6p!%PXGY9R59HZJm8xP(4W_kM^AIl652gnaf5S^67E7Dc> zBnbIMiam{}^f~CnRaBEdqSMt{GE%EjmH2fMP^8&=o%l2U6e&A9NvuY$3^cZ1kokgL zn7(Fv6AkKAf?N)E{Lkg&_wmddoNg{pqe2rOxR)R(>pT>xt2Q&9t-eN)nsEX5e(Vu) z3@qhxhe1oppjvQ}SbYpvfpEk+F#0mWuq@9St*(?8U$Mut%?&SHpOuD+{nm2M;_f*S zr;31PLAlNqGW!G8L^NZ%)0hwGm^c`+>MC@q^VVY%L~Fs}WXBhzhJ4t*Cunf|Nw!es z=&;Q(#P*m>m06Bya z5MWu=U;mx{#g3K9HYLU9rk2Z0?3Us}`JlGj(|MHf_okIbTE+2?%b)+j{*EqzHV9LD zmt5*}#D}OeR3&Csq>)iU!jOecOnm$IC21(NzbBO(yCUgWTRBx}Vzi`nRV#=gyFgP^ zW0b90i+wAIO`HyiXUn9JA`lCzAD{<+_(dyJZm!uDmC?|bUEf^?PiLTcRRsz!GZa8x z3~85aQ`3dgP}9XT5hyiYjCg15uv;{^#=c#V2q_T3lmHf-2PEx-+tI~N$=;e+NMWAy zua@)nzzHOcGW#Ab2{{PcA#xD8K_onEIGXk;t&kni;}2BH^`24i@x}Ayypi{HN)+&Z z)B35j^`8j;Fr~pb4AUH1d4Vz&B($)|_`{jDAlOVda(@i%?S?D-=+*(9@ypSM$NOQd zvb=5*2gmK;ND1py6<>QOsf`l}D%%K#@yM&<7Rnn&GDw0~8q!K;m=FPqWEZbRffGlxqxjq`YNs6=v=i|GiL2(M)7gm8mR6AX~SDy0{ zK$VNM`wrs5G`(-cR(NM>&315`&tXp33B&M=Y0iy;X^b%5Fw#toMwAsV;xpUR)ZQk_ zVvau~CVuh@JFwaek-cRmlx~)98gE?M5QZjxCPadPXbWs6%J~WuXGQ*|*b<<&NTMqe zv+HBCdsVO)2hy+Os+}l-*l6Kqke^=%r7=mMUn0u}E@&*Ot!^~^cI=bjDIZWu(ALdD zft96>{Ge*3@|mrf+^2nlJA!c;If>a}Ng6FG^yW|HZ{wNO9<@JH%A-MV9+Xu|==w|c z;obn8RQ6VH<1goVpmX$SkDGDORaF-H($2d2672!y-m%RF7_N+w5prqz3~-n0y61lr zOl^YK)-iAK`n5dt zo|NuzJG|NkL#oKhkPN523iDNIG}}5>DfRtlQZ{oa+t^tDh<=a&o|56p4R6{52CFcb zN99Dx75`E>_nfhf60r~LjY17gH1#`vp_+MiW!Iujt#7_IGKv`E?Jf}5Zkm7Yk3CCy zzkDN>LL3EiK_DvI@-%cI8DWnn+=5IA#^3KV(u6YD_zg8`n4;PIJ^l`sW(fY85HI`V zGa7%TY;J~Fhx*82^07^yC|e67YQD{B+FB4@2z_IsGVNZl9b*quY!lgd=^e|&!9e@6|9R;Jg8+dHcVt?}J5%!w1>iB>ZZsZ)x6!e~%T$4y|612oYL+RQ4l8Qgihc%F_NF6BDW;fV?HcQiG8i6~hu-89Ml7BM?S7*SQg_xHsz#%gkstEO6OANwmsTB3fM6u!vbNP= z>p{}3vb44kw@`g_RfCb2GXsz&wsIFgnf40#353_;2^vO3c7%&*{7nz8%ORLf-K;FE^igjJwRqbhdH2M{byC#RUm2Oj)IE2R=o33GJH6Dvf`V2qH~SBsc_D&j#DzGu zK~nVVvgiON6Sij!wi{qwo}xn^m7{PnIV-|0{9P;)d%#dqRb|{w$tgaOx{nAx z-^71yr9d(+<$2jKvkQ!O%43iC;ad_R%tp{HEVF}D%-!B5OCZ2FbcjVQDX`p6*}Yi= zTC>mq&B#zwse;kFHdOQ&e6<;ES2M`|1B%e|ED?er*glE;0Ji$By?1z_E@O&qlKKMC zK{&#wAOYZJR}UQ;y0?jG55v1+6u=IF-=1It#%^rtbkc;U*y(4{i;8u61e#$ftjN#o zix`G`!oCtri8t&hG$CIAM?Xk1gHH4H%9yhs)JM_>lBi*mMnTxUQBPcEo*7rkw+{JB zyD;z;_8j^-W^Yaj8au~V_z#ZQE3ephH+r(?CZ4}xHeHJPqfD^Eg*y3V5dZjJ%hMmn;xKuA zArPKyw6R=`t=SLgw25Px@TC=?Fp|E7yJ(Ub8=Mhx70_a@Y}yij66Bge0TG|DN1r@- zr}#icMKW~)_GLK-3_gtw)5QZE;y5+W062|qj0KRW!&sxgeY|m&K>aY_wozOip zu4_?pWmF_K`Uzdx>>ZTz$lP%8GR8EiLlbS8z2QR;f?Nu}NaEt&o}C%7-|?=8zE2yL zW*pTb^Gza;1Q=)Mj3%dN<@HN1{gqfp2RWYpOZj}zMpz<3=(f+3CvVaO=ayaLo7f-5 zIVV3XC%?7zpFB|=uYF5Mb!ccjD>f424|hlMC$lcBKfZQ_QW&pdYHxp2u`K9JW+& z#l->L23IFfdZ{d>u7&_yFinzowX2w1H#u=Px@!0{^7yi{c(1+?x{X%+Y=gEEh#O)u7-v^<=0x-o{ zg7miKa-oNQB3Q^o`yR%LRPVlDGhN+L1ZvR98#IiR*`WMPEJP+Yrc|&d26uF^MUf0V z@gj@g&V}v^sDva`_Ue61kyT}*fdIq~mAp<}9^R$&_;gOJ*ZM+)@0LNTy`AkTA4}te zJZFi1qtvX})#GlY%5gQry%M{tK-*z|-+M}bxwPzyIMraTOhTcR!2y#?O%iIo+*pri zRX`r2gZja*9o55+U_;Mxpp1tcmvbJ4;dIyk=cm82+kg?S_O^QoOtMA84@FqQaa5ne zGlnr)Z=qdtdCg4)1s;WzrV|DhZN=oqr6Acy1TybIo-F#-m`Qeq>&9xw553g-Fb0#U zx~&hn>>g50d^w)Fj*_h%AT$Q^xEpE@ruG zPma2&&3{clsJ)IZaU3ogCb}+qphIR*%ZPRfm&urq)^y8d)^5|>W*w?&AAF61;CLia zSm|r?Y-w;84}tgl;T}q;Gyn@17RT@7!r8MB-?j48Z3969#A?P3>xc(0R6w+2>c%N= zY8f)dj7kIsxiNXd)P2ILaH_ozQ-iwMWJy%R0+$U2jOPv%#h{O|=R01{HJyX9@5NG6 z8@q$yAwh>y1J=$9bpN?#q?GlC0*S|i-Koxjs}x*2fDZc&ohSz2$17_8-{GSgF)fNRyGuG%L)=&I1HTj5?47 z0rlhafP|IrP9pZYD%4Vm+Np_|3|s2srOZsa#sz}vdeWMj7?WC|#+vSwX`>Kk z-ob&#Z)WEJm#@#+C)MZsMlj11rLK99(9CSPWwaP##WJ4Ue#9U8J;5B~BTx;Ru@PR_ zR9{`1?7A#0m6f;zi~s`USQ*pajU*aTmY3Wq8WP2^QUF!^Be}#Cx0hnLmOUbytp&=9;S)9N;Z)aAX`_m*FiT@gfFJ63s z$xucZ1QP;$&nT?2lCRq*3GCuF$8-B_VjRQ$q?(lV$X2FgddDw660L06i?HF493BRw z8uEd?M@~zaR*3R$**V%w#Jv%{Ezui<)cl(m#uH~QERWLxF46z&x$-Srfi5x`k03v* zC$+0$27BJgldqJWi4jA|#ssanpk^OGdis741c3AK5I@%VW@S1COdNmzDY)O?#)q4# zxSYnl0kn>TGN#;8@4gP&)!vWrj6m0XSzU}}QtWocK8B0gcJR=_5WepOx{^%74(;?w z9K}jjm3t?96^~mgvupOalP9m1&lsZ%$Vk13Kfc~}df~_4aU|%-5;U&Z)36?iJE$58 zRgF-v7ZNMSow4t%(IcKxNF#%+yy||(f;m_ORhigFDsqPTt^OUrSw9vy>tox*I=aSt z5b^OL+SXOrP-XS8gGDM?BhG&|7)qV@*WUTOt+eS|;qEy|rgRqsis+`?E)78k+ha4B z8J1G!LPXR-5SnG*%QG5 z$Ps5Q&dwuarxt8M5bOeckc%LoroDXV`+YzTg#>mo(Mn3CWDYGMErOeCP z+jlz@)@Ez!5)^%59h>jra~-v)58vWM+>^PUTR32LyfWXj zRptA6%xaDY9hmGvN>as8JLEH1JvveanKYmA&)@b+6qcC`PT8Z3Ahpxyu9G zLcAiwz7?^XgttZ##k<0{4j7)YWrUu_GD=}}R9Fr)UM*K&Hf2ehQb$#tJZU_Mq>}Da znr0tWYyOZ_HD|>IhdJy6*UA;|dRe{| z+`UgTNLhwGcR|Qat4<>u2Fj)jKV|RStQ{?k6XY9!$aZUfqZ)mt7o^HCyH61SD1$i2 zilQ7I#=Wi{VmUYrP7%x`u_#iay=4zzf-q#m%eRl$b{^a5aOzY=UnUn>wcz%3TzBkv z0%vxzs{EU>XAuG>1CSaxV2~t_X;*5>ogIf$;8aqmVo=Hvo&hIt#uwZSB5^{*ndI6IZ4A=rPL_iH@4uW7s035O9 z;%HzZChlD*H)o7DWn(Aa+#h1_C0L5gB=WkSN>$BqQ*+ZbIsxL~DV-N|5K@k>dD~S~ zAP$alA#pd?YiEMy>G91PT!n(RehyS4Ta6rY_s%r&?(U&2Nqa^tV`{C;w`^C2U;0J7 z_cSQJPti-V$6h?GoisYCJRX+iTA#CpEfWM}dXase_>ThJzEFV%))wqm zVw68*7MS|2*iLE#^b}}T_(7qEckOUU$ou>rTpZ5?gRW8V#?cZl)9g4_NeQ&-&5YXs z@NcBSQ~Q9>0Aa-%NNaeQLdc}1UzdFwU`yMR4)if)ZqV;+{}Pf1qh$XtRj6HDY&JwY z(3Qf!%mkiF{GnmK>hT|>(=1t&P}oL7cYw_x z61$;ZqRDK!sj`HyVr7`vJWBJYFW#Y^dKN?<0XHwEjhQxAe8))1jC z=!Ecd=j?RUb8MP(#JS{Z+lpzQo-j*GcnY?A9(36URVzK2JA(p80|i}oZ#=eGmP4LM zA5zJgqDNRk9+0nXT+)?n1DZE+EIJb*#5hBq1|9lGNQP*nN~DX>ppjrk-PnsuAIe)a z#Vv4|Hx?Gl`dk9NP){@1?#iw=I&cFV1xlhxk(M0$nkuVn#)H#vY2V!^{FNx_>y@K+ z5PPSh2^gtsBt8CdIr*bKhQ6Y`Kpc~ySWkJ;&6;UD!wEC=HXB7mk6Q<(h*U~@!{b+> z(Ty@n&mjNpo$xPEV*8`aRxE5=okAFI7(LdhT#L_f#lr2v>Yy3)vO}0&rj$F_y!Pk+ zu6-4=fzakJ>>N9Ywi;Tn1+B8Wjh+AF|J|iNLT?&fKKHruoc%jJZ(QNm@`=Ir|KK~R za5dz^80((20S)qu2z%-@h)VL*YqhP=LNc`e5R{}gy*x^qa7|@kkv~$H-UpF+wk`V^}X?gl9TNBt#8EJkaL7Aj3g$LyR#<^4&D``-B zT-j-KFf8of&rvdD&wZ_FMD6V&@g2ymrCy_(0<_6UvTu{obfDvwpxJgq6zKBA1+hd- zYVe3knA_Q++At>5m@(=pH*0&l>7xZ#v9zh--nmBAWG$NN?0F`$D-X}khW3_p1pe55 ztOXRg-HIe*I6_$TfV%ias3|23RA|204J$$IEfjrLj>3`W_3R!EXrlt?`Z+;k8dxHb_-u*h{RBaPN(jm$1Vj=puFIP9b(T`jqASS#Sr8xau^) zoT)soSiw=XZA~%`6B3FZ)q$z*H-B2s<6q z9h0@lZ;#avB`_Vb5|eW&wY8lr8ky}Vo>UB>Bo~}?aKsp1<21oeJUPsb#x5`_(0=uM z)LWl?Vut~m0zni%QKg%_Kpj=~Q`UwPyxrI!EsVn81ub+b6!CahZAJwO_aqp65w5F# zP(f|H8l(72CXBw;vpeps1MO+GN4Vq8ZW;V(l+H z`G~~bS3&%fCx26(52itOlgrXE+M89N zOM_E85m}m81Yxe!7(6zaw*wGFNRcnw#~L*Cc&Nh2M7on7N6iSfRfLUe5P6u2QeZj0 zlq73QGR>oG92xzJY_(iEK744a)c{doj00B&bVgRQFzGv?+u}>3T-t!A8 zkJux-3yd0bdx_1kuB*yBf&KMV0mT1 z{mRAbVBK?^`*N9?TTPuKM%0N(r-zxo2NG0-jo%=ea1ip0fN7-?0{nag;9;O8kA+T<&Z`JlYF^cTBClX+L47+0TDGty#X9!_sWwCajGjF8PdhMVcPr| z+b%v-wNZlhI0kgu!3yx#qOZr}wu(ZWtMSYFd8S(nLF(M{PWk3N+rW**l{4C15tF{l z(A!HY%dsQk9=pb(N<`NP|Bm7w&NSbM^%Cmq}7{iLh-sRAW~d z`@M=3e$T!DVJH-d8?^+gJFSOyr&r>S%e3BeIjE8~tfm}zy(@Pu8CBCV z4)4FPKXc`gdv-9V_7gBX;~2UAfLr*-dnfM(TQ|c-<7Z|EVA9x^bc3iHkl2q*16>j* zSF-miT{QCW5br$_a{QO$|MtIWtdmb&*0UDM|Nqnf7bsxdC9_Ex%;2u>0GK<{5LFZx zmpKj1REmhfZ;Xyej%t=3s+TByN;c7SkgK@$sU9siyV!Rjr!e19Tuay;aUTJ5DQuB` zp;0#hTEQ;ME$eB%jlgiP+uVE;GtPz_)z{4{q|lSn`3hmS(_ z5?rdayfJ=>n&B`{RX7tThPx#HF5aR^5m$nQ$N%v37bT&FwkQOGPPvp)RUjMgLmcr} zI%vl>!H+wM5r!FRMv$e8QE4f;k20gSNcaF#aM^cUIMV!Jv1$9U!qLW-#Qd#V5Ipr#kbbGO zmLke>gA|Aq%ZJ0BJ3PSA*Bujc{DYHU5$cWa1G#YY4vm6NE%{&$Wk91a4bxEVh~w)I z%TcNfA{EZ;a1qb;C3n*qUM{2R@sg!8j~Y#9Q47{RxJh&Y=@fsKU4umt7KR0R|3sko zcd;nUNsj}|J0n&f9h5uclCFYGXs2u|Ve<(dFw5w>A%qn~A3b15U~ZW@12NpcC6jn6-WdW?!*pE_b@y^NY1!)e$zC#585!0kgvH-%_sD3 zI|fX`VJE}~Y2yCyNYo6Xwa#?($0FX?b=SD5o%M$kr@}XL*l-g|#@npLhT!&sdt~-R zU)>=$0B@WdW9wRE4N0dVOi2~9EZf&eo}t`+yBdp#GW(&e}q9C^6^}JzBr1OOqY9&^6PoESxaA9@N!`oLNL9 zIC@$66jqh^=N4Y$BfE$%zF4kZi*q_430VWQ@xt5X#)|RR={+X$6Ty*rfG2kFoD&(M zkRMbw{gJ3*P(Zg0c!~X@%fw4|mu(a@dd~1EiicE`brq}JwWiD=uj8cs5533DzO?e5L8WNa&p=Fja1ro7B8#huttMiX>A zTZ~^)7DRZz29k~N=wBdZ#)?=HzpZ&=lQQr+Mme-|8}?ke_>K`87iQ5OG>`nq6X;r? zU@1IGQ0N;|#5#f!^w%e7fZ1R#m;F?sg!cIPvd}UfP^Vbrto?ZM1Thi{zv(vWc%vbm z=8zz^G(5X@KbQW6jhm3o)wKAvU zvK@tFyvJ1QN>is6RL&5A=z+}PzCzOhkVW&waFI*E_-)-x3Ps?9C$d+P*hrlmZkMUK zATBNpp}&KGgi*BU+O#eSyo(76CH^&ZPN6W)I_2k;##jHf$}_eQA9@-nlLr4rg8x*K_eU8z&nLw7yppz$`FVB8Hm`38%j#NplW-CTB4K$q7(aW}&8g+tvN#tX!bU#r z;~zSo{1s}-mJ0l*^h~G;D?xCS%w)|ghmdHG2r$bS7&@ncf$_}&>lN4p9mjQ0% zq0Tg(vJXq2q6m#&Y4Gj-uHb<$#RVKTeQ_QIbHBnkM_V`Apyw*-;&bJEFwQa~u%XPE z<~TrMWW^bwz^0EFLLN)&@H1ahvd)4H#BON@(68dXDhfNC+pK|IE;g)SC4{Fb)EJG zP&HV*_EVj$n#jxblpEL$ae%nGVtYg!@MYc;*R2_vknx^~RAD3f9x5P~yupYSwqjqa zO;f3yBbAsp@{6q{_@s&4zG> z1J{;4@-1Im)=@gP8Gwz(P*!gi%0jC7zA4)OGWHG#qp}hXXFOmH z4`(CUYj9C2D6aD^5FhTX(-7V(n8>jmKweD~?{4DJPEUCd&NF~jlDHlYRInm@hOpda9McRb8lh&yK8Q*0iC|iBear5{$&+hm zCw=03jx2)1>|0|&>J-B+2iFy#vYJTM41##%db@lhequP6QVyWzbs@(waj+3fvaw)Z);iM;}%Q6=yW3jW?_iaVB!yP3(lHfqTaI zk(5_k)m)a?IuV{NMM;P_+Pi9(B}(t&(X<34KzO^evh~nw*?J1l9VLFpy(wexO zL8GQH7{f5tz^-B?Qsn1qZ;miP_9SN&lVy=jY#Fg4$UHDW_g(R6OH*PGfEd^?qoRa_mdkz3=PjSka**hg{QCv}`-4$QL(_YH^aR(Vyit&j z#l;Ac5a8pz)z>^*mxFp#S!oQPCT2wJ%K0C=`vp%` zkiG5mr>6DEleY_f$N!7}iFI7~Ot}yw0MxEis?#Ct{mN{Whc2NHB!n`kdkPkUuk4X9 zFhME%Nr0LceVj$j)L=wKD2hyoAij89++LSR3Pp9|{WoCll;tRIU9i8ArUK6RiJj27 z$K`8AMkIRG40b@|l{ND+TT^TkkPK@Nfw!#?BPu^Ew7FKf8GoM)tXG*Wq4L|*n?b|j z#sw=V}X&rP^DXhYXbh&NANFP5A3HLYdCwub^|BBi_xI|W5Jc4FwP zJl}*UIE{VTKI>ei;epRBz;UBnI@94if~f#G;xZD#hfqHne+w~1hBC&Gqa-(ssaFPN zMg{EFk}cl4jsd>sOXgMy(-FMIfC4Ty^i1P>u`lH!G2)6`tFbvguy@75DnZkE(J*>b zktcg6F~16wFf#GcpO?(%AN>*HZ<2W~`GleXk9^>+?YZ*SMf>t9 z8aWSW;{6@Zh~q8fwJ8WDXU|Spz1!TdAD7M~yJYG;#%I&;n`u_Tok|g)3XIU(jZO>1 zBZAY`hV-14vtkkfi>?Eo9QoF2c$gTtJAO#{I@npd=Xaq}2qGi4u&A0#ZhTHJZnCPj zoy{_JFcpWZCaARnaw`-!NwhIjt7#&Xd@w)`cj6-o_;+BnWZR&1XaPan&oFu08{eRZ z1YCj~brNz(CM6Y`GW$U4Udr%MxOBiX7UNoBa!v+3#7IQNtio!c$7qjuINtS6s?uZl zLq=q2S!(metHyB>`NRI^|LDp|@JTIS^O-*8e}Ci`?-~Cng}t|b-~f{^mT%g#;X}Bx zR)5E@aHRxc3Y-8ToVC(QF?Bdn7d4iH6y0=|FQGh$!#h=!g9Fna&zEo7?U96aFOW2h+?f>i zCbLn?BBGJLYP&}sIO2X{gfQtN@Ld<4DX+##(OuZ0>DpbmS3Yao5(TH0V*hxPj~>NU zrXwU$O;t1T5pa*aJ^N9bo2*lOLsw6?Gbg7w7p~SBl&>0drGCGfm;w7lEi1LaEX8X* zFd1+b9J{AxdfbqmMBt7Z*uY+xY2_5Rn+`Sfk_*;r0qldHWbVc}x)V zamYP7R$s71%Q=>E*-p6fDS>6cNW_C{4^aUIvSY`{BKAulvfa0(qnW^mku;8B&PWCi zq-(^7v5MO7W+M%Z|EDGr4+W33kISx@q*CtF>Ds}U<8QF&gXEM*N6>hVV)9_+d-uw{ z_(~r|l*ThE>>jV{m7Tr#=;DM1?mdrm&T76cjRYfb(F|n#z0r7;*$}Qz7)SGNae1d! z^|`0SCY=QKIizGuvur`6&^Gowimq~7b~1ItgA|_!OJB!Z5Z_ z2HVu)31L3t0H({_VjRF}p>cd;ig-NDHzEN3xMoe`8;e6t#Z7^Ysp>Ij{3oqoiG63s z2B07iYjazvWO%!Fcw0)SwXWvgkb zd_kO|jbTm`!1(mF^78d~7GXbZ&Ui6F(+mfrzn)ZlinT7$_(3-)5$rv&}CX_ha#2=s(KOdYMR#rdx8P%_<^FF^`qpX_T#n@Nlk zJhZI~TZbZKnpc*zXeUxuy=Wkcr>sxdAt4be8wEwjbmYZ#Hokhf#3TU(JQ2ptG)(8DVUZWR1>x|V>$CYi&b*K0i>vnkv?q7 zBHxSM9cXB0J|Z=n_i!Do8KdE3`x6`tLdd_;Fkx6XcE@8uAnm?^x9@dGQZ8%w?_2l_TqX< zv8nasmLGRVJsl8@o<%0Cn6PzNTOGS=k}cg7{!ku>bYFr>EbNghs|}0dmJzn7ZDU;o z$yqO{J^6l8F}H2 za7>T?g@{>j1RlhY(~2dlXA6QaS2u%!a6~kWi?%TWgL0<@eE^a~P5EB(ccV1VLmEu5mN_ zpk!?%*co)=4W8)>g25O2Qg)%=w}&9hir+jYdik4Ap4{Tz1^+XL3gS_Vc*X}-{oYIG z+&TO-s0i%Png7^Z0k{R=A60hKqpT-SzKpoB&#llDe9e}=N@NViOic*nXdK#!w z8O(9=Q);Po5w{PWM3zdaY{QnS-L-G0hgAHvMlwY0U0 zH50%Z5XE*Ho+0qpkhv(CA%(GF{&2`eY-q%-9jW@9rrEsjDu?m0X$KmIyMoh@yx}-` z2p#3jV4RRkRl1yk#-m{eNdyC{OMgbl^0JSN0JF6a9uD7ddBRfD<7e=^)N8d!4uJG||<@5J!qYWD9kM^Sg z+f=Zc0B@HC3yZ`$-OC&6I`pH|-%6M&ypTdS7ru(o(c{$mgemNjK$DrYZiKB5Zpqz8I$d zH~Sj40=pYIR>~tIF*iApc!3bI9CEwbTrEa8GgveDYs{hKkN}93GCJbn@trj|jLEU~ z3-+!)G~pB0IBf<=5!-*oKHWgmK?3?O8p}W8$=Cu4IcAa49#A95xB%M~LuhUrWfe!e zcGaGy{#tBu=n+P7JV~4Zc1nC>cA>m!3?sq#rtuY`p;cz5>^-=j-mORPxq8{2F)t4? zNldg$&qX{c`-gejfx=j|)od_3GJk}X*tT`g5q_dvv+G_AT~^L?sky@w5=rRi&WRK2 zjYpo=Bs*aVPIF923MwnGvExzce^NsVMx@(+<4HAg18T;o0?Gr1a}N9r|0(*v_{P*= z6MwnwcSb?n+@n~wE^(wYT!gKNVII$@V_Vpfy;Y_Sjo;z}v)KK+2g3wXAoQe8Bkpb9 zf=G5k3b#iTwOY3y=>JbXsLenB{P#xsg@OA1!gKcG8|X8}VHtzWkp%NM;1szBOL@1S zpGNPg-(jMFhO8MQUHoFXXKoF7MKxZovl>DI5b)cif;^)-)wvgcGYBsOTcRLCqH7Uh z6J`raZP1@V0wL_gS_pxa0XQt~dhINi(V)1MDNVnIrZs9x&5pdBhfb2{kgQfT>XF+L z^CFj($-%^mUGp~ifVgu;mFLfuFUF!fyn>tJg~Oz{V`zjTtmIhjh-!volDck9)m#yF zKngahOz%PLcAW)oy2K`73MmEAG(HfzO`0>|5XTe6L7Lf<_AvyCyw*Hw4$EF~fRw@z zc>A(3(60LNgtnxCcJ$M-JLNBQqpaB!g14;VE+1ZjcjLVt7b3{ZbRZ$g>>R9KwRPFM2}aR?xhmRt&!-0H635T!BXbJy%sF_*|KWf z!#yQrYI_C8O=6_ux#ybz&qxb{KC{(p=*>YyllR9*2m^J}MiIN$cZ&%@_64GB*RIB8 za&9+%!J!1$oxGjt6{`A-aO#y|y7P18vE7;~Etq>(g+@F{c1m*WwKrsq{e23-j2Vy? zcG>gVK!N~`*(X~x8l@|2&~TH1OuDu4n`-88VeDxnYIF2S6W4Za6O?;gu}33*5gi^c z8dk8Lk$(#+On8XIbH_{NcCW3aOfl=>aaeax)*`Bye zRqIxMVGJP+lYzzr{}gwN6QtKHE-fHkyLQ_Qz`;1(^JUZ8#c^VFL8Y%Gg0%_?MYy^z~vRcvMAL{D2S5klpb8Y?Tzv$7sP zJzFk*F_uW*8NlR+alkDE*~V_jHW(e#P`LB}Ze+CdUvoCk?NpmMV7T`AY${Gb7W>)1hTXN&TnTI*xGA3ja{WbI$tehO1n7JdZ4P z`%D~br)ne!YUhc^TpScpukjDu^v6L@v8g5?X&XQ?H{I^|7l)iPB7mANQv?8@y~V01 z*Xu+@WNR)1TpH}&u$S<=&9Rzyu(}Q?1v{lqMtk;K64KcnR=NqBT5L){Q7!kr-2sA#Dts zO!4CseqV=f5wQBA>nJk$$N};+j z6IwB+DGv!kfNSLaadYUUvDuAy)X9@OTCbhUI?}zj5p1Yw3z*3aP&Q`6a#VWW?!pVt zme1JLAx+pFsbi9s5Sy&e=!P^QCL(-WJP@$(wvXqiI_&8Oc8%}S?O{FW-J)oTJx8bk zu}Z!wy^MN6;9{0%*ltD%$2&D0)MN&8*>nr( z3+0`Aajj&h(qd2sgR?^$1EWtdZR~(n8j5y>8?CU%m6u`%cNGun4c$v4hgj|!IplJu zFv~pm+Kz5C+$9JSTb*d;MUMLdu)(k5;@dY=z;-!Rz{2Wh=E^8{r@T?j9lQ9ryl^j; zdV8M=26R!pbW?ztEvNn(Sqkr)PX+;}hVrC%KbgReeOU&<8R$8ets5XU3!>ORYh)#s z!t7i5e9oqKaQ6D`ZNqrg=)@nZqw$9(1NRI2?n8!JHvfsEWDG3|XB#P-bGi7Omy_{# z*sgg?yV!qIF4)HA6jJtMLiv&lvTZ7mh-n+q`wJJ#xws1?7$XJM%pQCc zg+OWKIz=4MIpRni-$0JH`*BRQx>FWz0+wQzd7E_FS|t#}ET1HGY1^77zS{sHjehM- ztSvz&n|=|-R9`LU9bKjxP9jiTV^8~jsk~(;)ymzB1Ob>Nmsp8go}8zVRcEe-C2Uc` zE)Y>c00yZD;(Mbs26V}-a1_D|fYI!eO=}ceH2|nB`-@86*Jq5}>WGlspISJ#lYYV5 zG4|4FGE^RnUsAW!%JCU{`Xb!c^Q$i?t$uc?gab)O7{X78WyM}d&zzw7j0}i#NE_T#Z$z9k>i>2+L!F#-zpcs5KOcRW25ac z%tkI^762+OhdNW-`cUIVZOaNwuQ^y|1`gbhJ*glEY^!}%o#eZtP{uUY*~fPvO%q0M z1>iowMmmiW+ILkMe%paXO5v%TG?3=g<-k2R#l?A1tYaTNn(`IuCz$0Xh2q0tyJa;M zqu4FpOg<#lON<%Hj=0MK96_*iW26nj=veqTlM+86?Wc6@;&5Pa(#zH9v;$!!-ag

;GwWyBw2?0@xG2~wkp0a$)h?#Jco{EahEXYV%5OM zhbbm7Lwbt0%Dpxc-I-cU?7G*c5XAUXm7s3Gk{Ac=73XpwZ$3kER}gus`yN>jkep93 zQ=s|hnN&;uQ!p#&86m9lQ0$h>RpG(2=a{IfQY#F^At){*z#_3#C>sn?XWwDs8=))E zjt3f{D}gXt>%rP;AQ)-<;U;myXn?gN5nJOcCJ*8=vJNzhZb#%Rh{O;XcONmV+F>@2dygk1n zfnv@H)uc*?Me7}+5~ThdBtL^99Jrl?c}_Ud)~4oHZM%KA>TaKct)}GyI>!D8Y|AT* zu*n(&v>s3RI!n?Rd6LSSB9fiqp}me`qLL=u-UUbpS|g>Nr%|f{AlX}Ap^27JsWNBT zpJ0RhAnBkjUK$XJ=&c+=6&U+=Gx11BqYSWY;gG_*u686EgUaMEvi3ulu$6QZgz#6> z@3_&ggKs9RY*zzy3(l-J44DrzVV5YTWjox*UN(N#!A_;(o4uo2sBspEZqV>>;b3To zaG}ELp^O;{m{M^#jvKk$zG+T}Y=|3&{{y$TiEs_veZN;e7vx}0Y{ro;)WYD=WZ$tN zvddzashB#&#%ICQU`EqX!gkKUGEd_(fKG1W!0!oebCnL&P%!qvd3g1OkVLCOK@7sT z)Q@@0vcxL}H%Z?haI3V|Yyq^4OjuF;o$ILxo2X6~;13GSPn?eUrv-~f6tHnW!5Xn2 zPo5xd_YzIX88rP!Qm&B>BQ&A@Q7mbptWL!fTKGAsgkp7vsvzDtbl(ij6)O&zCR8!T<$-*+mepnDVEw2|9ON z4PL5^j1ZdxytR13F`>1e#qre{WGElAx~a1HXNFLzLpT5Or}!YNmC zdrBSi*A$J-Ji%ATAwt>|wG6QX9_7CLd%apTh9rm$-NMw1drMO=(oMwh-7;+9**Gdh z77vt^5~*h=76pY8Ie2I!sCFAQjh67X#LhuquuD(~dTVbSICYwYX$~4wmDi z24>jO{Zz-*-sId-ATf*^LaVvwe~;aIQ*>3TPe*zc|T8ZMVZ(S^u;fG9=(x3RVn=YhRY+0v(S$mXD_vM7fp=}0uZ3krt zGdYzC0uU)z?5jw2WOJPSwtVRyXgBu36w;8P4!n3qy>Gh6{+hu+wsnJpzWw)Cd3$w;F79~m;E{@PrLn(H;X zfyMoy&V*IeV!YxyjRrXLv1$Sum~+8?YGMJapr^y4L(xjqy9r2EOuz)Z(zt;gm|K*Rz2Di{cc~H~pY*{x#uW8Pj78<99h^i)W zDqjKE%ZBJ#x8X+OVcCv15Wg7gI(bQ&KZwUET~OPEgw8=y(&s0LM-+3AGSk#6OE-5n z;~44jLEiDyMYap7xghrM^o>aOXBG>?xkD~S8r~#Z_)NqdQBzBP(}Iq_Q%?R695ESS z_94uKAG8Cd>a~|tGpSxHqzEaElgT{=Bu>bCkBXytz;5Uu^M&XbaD|_JgthPoGAD~_7BtZqH#l=YYnvQ zv!_!0wg1Y0%4@ILilC^PCphX`A$6GFk4VxRz>mC6f2Q}Qw-ylipeoLD!efysVp|L)dD7TUx45d zoi_j*+4UwYOd1yE4-13<$N?vUgX2jYK)huPSISOPxlnQSV#?KfMtE800{V6u&IT-vt?S z@~-VxWmYpu7tYDf;xz5=$KQGS`;x#3I7Jv*Z5%G!$_B@3*Ydu^nf;)W?iWoxa2E*I zZEZzUw}xrNRJIeAMxyW|{}DiGdHPAP;7CW~;bUaP=H_rLRb=YS2mS~K{778=16yTd zqlUN_<8_SCwO%%PO1H~u%P4u5E4yW60o<{@o#S`2{1u>;?ftFi%RO6$oY>QjhV z0GSQr77U-F{22-)6~Oo4GyH|vBSSo7KYK$9v=`H#AJD)?#J*AwDWLS6KSlmE3 zA06v`ofa3%n?co0R3p}7Y6=3f76gzkc?K?MG7n+8IN6)}%p;*=`?hKXz3m2fIOEkw z_78+akAhNSFL`<&_P#`vVZ0G1Qw}GqfVQ_M6dtuBxD;cb@vl}cBdF&ot z3lId&JMDS%<@SQ{v|MjiXe6t#B-FF(!V#8pHY^#xgRoE5^+MBgjo6MoqorUB5vdf0 zJMCD=N3ju&P?uh0_NB2KQVPE^UoMbbP z7d$D6A$oqW!cznL2mv$ zfe1%T5Va-~}ClpN=hx7^c2Ndqb{5 zRk9&XW;NmLfd7kK&fF+omAXgdrRM^Z#vKezLS@)m6bA>ik-iKi6T$A#RhHP@t_i_b)C$w8&y5BNeD{har^?AM zZS#l{6m*TtnzRGYoWcjdYLKSsg$e-KO=g{YZqI)0b6L)63LCzWF&wu~oq{=v0K}=u zser7_>y4)b>zCb?ZmO!Q_S4Vn(5&q(07wKdjV{=Bl}2HdIn^G}cA%XKX0}bMo1pM{ zWR!wgaPcLlhY;&5? z_GiGkXmw{B^>oVejXSS`-|#GENIt}VYH{NRm0Wg+>nr7kosvf4xW_5u@1ZpBBhDfo zpef|ll(PpMaqjl-blJIVn-+?#ovZI&c;SWeYLHBM->qI2YVgxu%7%jqkGf#=F-f4r zCAYZ9d)UJv+K{2hC9Kn2`5}|7!uUdIVFGJNWgYz&`?`rhl>ms7DUTng-aJiwBd+H@ z2Ogz6?TsT?k@d!@?W@VjcA7Y69gL|zjbJ4$n!C|=@;)92R$p)t9Xs4W=YFFQYxlHR z_9A^h6?RK6UTbfab~m}u>p0{I9ycbqNg!|JqL)k(fOYr|o#U5BWx|@JYij{ijrSkh zy9H|`Z?{GwJD#sl6JZ<)`?6ZROsDy-ppS&9@|KS8lxK_w<-*Sz56#Ze`F(jVm`37T zVZ?Yat%edn zkoWWUPHOxb58=+NJaQ#v|PlzIab!{4;ESDVw_8RotO$LJNau?!Y zPFLd^(pRf#ljcW+8SRMaMFMAoP-hDMp{GyPnJsZmO2V%je7_b6WU`St0yIqMEW`zo zjrgJ(KI)kGdNYHc?-wDjKeCCHbl!7ic|N{N3v-ypB|XAvJpenTzG~U$Ui8sz(&`*YS?o>s@7O%g3qSr2v2J&{uuQJoryk(P8?m|PBfqDb+BOrcUHeoWDiIfoo(S@H z@nrt|ZCedijQ$>a2ER*IrC1<%gwA7hc`OGNDsxlvmq4NIVEa!VE&0^aimj}?|$y|$cVp5>MSfrzLBns#~KPVAfI z{PVE}B07pO+AqxA_A%tOQH6X8+Y>$oswHfj>h6_rX*DKPovppK)HjMp@9@Q0+8LLR zmOze#!b^I;{Q2)gueFt(2G(rUpi%4N3-9<`J|(BuZw9vznXV9OlDf88#>}J+cRIy^ zgVPnhom#eit%LO|x7X%E4>1@WfqiCD8fdFa@c}BcxIAb^qaIIGe~Fk#Cogqj%@#9T zx-OWIrm;#21h3mAI(dRfj|UzpR#{Roe6ve26BItV`Vyy08>VzhCH)I8U3#NJFk>$ZU>M&WGtg2I1syv6%tdkjp<(c zN!A+C7|u~6gvwpHX23^Z!d{PcLu%mhaoAH^%DQU+rhlh)g^jHcoPo2A_+Mpag7Hxe zF-5Bjj_pklW_d0;@hI8s3hkH-pr(n(#e(*rOn6)^v_E<11I`oC?@#tW5On~rg+%(zX3c_~o8T65bMX6I&* z3Ttz=JDhAl6+vD)k8cp8s7~wn;8=18H86%ItX!v z$W3iMYf^mV$@qokGP8JlHVx3813$=x=n>v`U!8MKZJ)U3iBAYgk}(U`+Xar?vE@#Y zCPNkUz41c0N?n7D zDNuS6EQw?1S5{_AE51OSJkNGGy*D;M3Wa#KBbp$Lu@1bXgia;&qZ^#9HQmdU0NW#* ztb|!e+%2LK)#M2WfcTO6p7A8S%j9KDAF1OVN9{rH`LprEJ>=x3J2h>rzCnrtw+}(g zApye8!e{@BpjmYZzTrie7)Dq82JbQE#D3@s0Ts8P8bjL?_eT}(3sXsB^fWY7K9{l0 z!aS{DW|H%MIyUO)e>$~EY9=JNXJg6-#`VeXYzfEzE3o}I3-GR9;UkvEa<)&NJb9z6 zxg_|HC-uKY|A?;xAY8PcYE6m{I;%Pl^AxUeY;!SP1??fHiyNN&9DBz4>F}20n z%q+b@2{gn2zi!l^e=3HZ+X0BtGq|Byxd9X+1QLPP5$A&ZhnPG{dB)>#IRl+hQ;39g zY=6Xx$}st3eT`dV14b{U!`rz~}?~sVbj*QAqsP#^v_1$MjE4pB~4v!e)qHDELBO#t;FU#JFoh z+Q?5gv4kMo0^eb)U4Sph+ZYM1BTS90xf}1D&^(DN^7Q-md91Y=LTm9ZrGhlJBguuy z9?k4z*?4LIpXrr2C%Nu&sUn?TTS^%(EixoFw{@J`!>r&d9x2u3n{jz6tej+M{%s+HzY5(s{y&R5*XGEt z0R4SEJ<%p(3u6SQA_d=U;27_4uT-=Dk?vf&D)?hFIW*5MKn3=`yQgp?aytSMc zsl+c*S(8gr)Rm+pi=rs03IM;Cf6p28Wfuh`gA}2bT?YO?;#&T z@SJZaaIa3vO$AcWUSGi9SWgBRo%G&`DM^2UdSkAK;+jX7P=(xCo0HzhAw2IsMZppi zH4wo2{p3rdbA{6*9!XCSfMjp+gky0<;7iFDO!<$W37Nw|tkvy)ZNCy586#Z07CVuc zefcI9(MVOuGlEEg>(prmvj@TDu8y_kmN@VSR-`&cSnE!PWAS2n^ieN!tkQDie#V2Y zvSwb(jHuwUt&cw0Shv%l09X~FEcQLbB$>LafGHM_+v!jWCZLRZ(%qI(!N(1o=R|iZf zU+7Kvvo=I|ymw6uYHExM)nI3*od>0OE)IIDMi%1nT>(%LuBCEhpS~aQcPg0?{BSFq z%lF459j1;$G?|hkeXvG5jHXf&E6TB&c`Prl4i^+?mmv8K^AAxks(Eh*wC=e^b$=(>!+6w2w{qk(+71$5`rYd55A2-{Q5OJey@k38gK z3+;a+Yx@E_>tQUS_3uu1j>w)Ea{Zk&dR@spFzqknJfE7#I@haz2M z`96$=xz$r=zU0+HeoGUfq}JQ^AT;+VrSp>V#6W;1VyF#pUKpcfAjITXF?peYWd_+! zxnmn~z1)m#t7%tQSvHfZ(CDTV539JL6nRWwQ5^mZFOeH_m?*3;p`oLMi*Zof?IbL{ zR~Iu%k*Ohcx!T99uyG8@jKK6tb`r1+c*78Q!al=Qh(_D-G`nkM&gS@LZkE@ownQ?^ zIK!B-QinV(mXbH9H9SIS>K!&kDp&Yt<&8J(-9~z`7vhW9*--$8s)4bBs8fn!P`BG2 zOO#}-i!+h+4u^zj_m+K$;5S>E zHWi^kPsR2?B~<~lVY1|E6_+90L3;>bim>0TV^!rzG{;oQnkg&3bw8WK4eCOh$^cnT znI%KwM@V2un~3VOYqoyMfyn(Kn~01_h|Db*o+-n8p_Wr1ESkJK5M$l>U0+_(h0eWb z&sV4}gcg1NK{@@QZ3JD=yGifBVpnu0wt50ChVeqmJcOJH*kVWGk@A*3?&L4a>6bQm z=&35(;%=1^yzr6YHn6ZaIwyXKLPzE+x7r?r2?a5%>$HS#QpD;H?3FYkxw;moL7VQl z!v4Wtxs`NicK{nvv3kE@;bYXTiB~bb)BypQU*KKnQHoDYnf_P#PtpIxM;58%qUthn zgm9$jqe{OLm&TzIhY=w%0&F7KlR-k0pUKEj*s=EV*LHO;@HyMuniA;BK!IDnvYkV2 zQ*Ll}4}X_`m*3?>al$rhE83|;NrpoON5f{XxPXZj7hbtU@oMxK>W%6b#=> zYsp=qa5M@iHl0Hfy$t0jXYI7s%RLtH+m*i4bCeM`ApC{4asqsDm0v%zU`wrvLOG~- z4TGY4+m1XQ-(&V~Fx12|X-sNYC9T7VCK39O5V*WM90OJ=PQs1BE;am*2!^;bz}j_F z7>d|E4ADe^+#}E#_sQvlN6OPrr=&BiM`Sx>eSxNRnaY83!S>r6$Bxp<6hx3=nUt=v z)M+;xnCOvlL|q)kDdGj27(65QFCKewnq~ExEf@>(cGTfHfYh#LGXpu_EV_M)i=}OQ z-lwfG+iZK&;YB%OGCNQ>@6Zv%7pMi*6&q^XE(^VI)}Fb#gE`?hf{G3Pldb-Du0-T-rR< zK$;74(+&Dn3fMIp>kBJR0KmfH8zZ04H42&DT()(gGSC}oJUEWGlNtmxg$24F-U~Se zhe>fxu%)a{MpZm)PgNo5!SQFY@N>NZvU}L`vsrmmr(c94IH?iK6~vMKnnBk0vp+r{ zlK_ok%OE3caKx=+zo@M#Bpf!9)jpPd>?(w!qXftBHW#BureZ469lGP~If}}1y~RX zz!e=_#o+BgW2Ob2+l&~tQ6sX=y6^4=YR&=rnNmyNK8gn)`@w^l7 z9P?ArWLMXyd#Ek8npy~U!QmhUsX_~?4%z81?UgW>FNdt5n8&-^n(?GAjao}Lc3*)Z zrzwE_DtW|TBuaP6QNTgzlQ+$x$MdcO-Hg&zxUAG52%j<`rd!McFyXqBO=N4Zvr~3b zw!>^s`3{{O8=G_^E}E?$+s0LDKTV)R`zTN=!TGkCUch$W5ia)~{%c=QE?jQEOwkn} zilN#`xteiVD-pA7zx;Hea-YkbtkM-HfqWgl7t7Y<&Q3fL($yf~#e*?g5*eMc09=pTc2d{| z?;8cij8@?K=>4tvS-+Q$^}D!O|NLkENiQu#RynJKC*{msnMr9b#uru=F{1& z2tr^xo{$O{_6Cjc#lplk(Bi1H_Ovr z6pJovE;q$3HBp_;6r1n(hgz9hY~eyPriIC)59|Mb_P%c|8U8(lR- z>@gy6Vbk7?ZB<$e%Fd59p33*j>2G5qc(uDSE5go`8-F;qM-b2Hbh)G=vz=2hOd6gu zU1^&UovER`vljAOf=YpsQyKKb5=R(uK{n!WS*F804<@eFUV-9Py&D!)2UVP--Vuo+x0OWyhs z_0A<^izS`E*DKyu;kVC;$ zgK8u*Hd~?Aa#@ZQuceQAUh_h{eHV0ia)7Lzq0=3y%_fAvq=K6-r$Nm#53i_GP2W%> zObO>J0kpN-aoe7>h+X0iN(cOso5{9Y5w`Yhs2|Ek?iT(hn;-;x1%o=^boziC*1MkA z`E+?Ao0XYJ*q($zv7l<+AOcF2~>$(avKO-$*_R`Q#54O0&YM&=7>_*RMU#!joip)BZh;W zg-Y0(N+^BqdFVIs44B)q3u~>WyS1n;ktjoNQ*B@|*@;j_NcOe|fD7(`eh&KG?<;l6e?~oY|Dcvs8sHcJms}r5yphQBDAl@H=?FPho*O z@kU}JL%*vC7*OXY9yKZ(Zc>RGo}dP}h2zR^h{55qsB|GhVNZ}*5qC5eiLy;Yw_amp zHWExhDU{HLlLF=-PJ2mT5y8;xy?h0FWPHDjZ~K}dK)m2i7?mVKj8jZdVcv=_#0pVZ zi8QEOpicOctKbfnBdAz(7$%EMn})gz?X^PD+nlh`AjJ_n2o}FBg@PuXTQ=L%2QRZr zsk;W(k{DYG@WnTP=My8A_#^Ga?5~2)``tKrItZ|59`Hi68kbjwD$ z)$v>rShyFSw|7)vB85@fm}5~t@lv_)g3U+E!95ZSvQ7_-ND#qX2g2(lFk_*{Yo)%U zVb}?kbiWt-f;B8G%M(6mXUUpniy6OvcmDA8GB+QOtDTHKusx~{`oQlLd?H5Y%+wVq zlIcZ65&OrrqZ5bPfQ4%qU;>7%hxa zBG-tz0m*wI^+sjSVsByYU5XQTr(YW!pbB!8Um??!GfBoXNeZk*ra3J2BmWh(=Q=`m z+pgabN-yLHNY}Xi0eFk)jWK2`@p$BERTDWu){%*<>-QebVG>n+(8lTh1{6R*s>`hKNRqy`+><0M-D_Ab$6Hm+AUS^ z@+GCXAOWAFI zHLHd0sE6yf$;#|?I^@JEBX?vSq@O`J`w61fRF$DwaUNSMMoy;T#=!kKYV_dIQ`yW} z1>CH`U9D+|>0oSk1a8DM8{=(N%0h$M3R8+z)J`~QaVT+#J!k5zx60G8lt?qg@2RB8 z2A%FZt|u_!%B!|9OSsy8>EqpzJ+ilvrcAxg&cP0}JgIE^5?5&ij8*u5M6r4sEiMWg zM2tOKAsjB&jD|`!$x;9IeNf;ojYXWuNtTZ{A3A;TYFY4nyY;egH6FFDVn@5ag-}R1 zyf!cr8%wGqGvsosl^(;`eB&E$e51VajabM7`qjpcx5-!8_lTi94tB~8!GP<*)|uPL z9zCVVDLx3DjuZLNq!Jtf!JQ&)S6)d59D(iJYDV&FsWfVy+aoBFuNa~&0%N3`7(kp2 zM(r*uO)~r}VRf}_NX&GnlUKOOPTSw8iv%|-RsmWQXEC1M9`)3PuQ;=sNbBdaC?6dE z^7Q-m6iAeKo`H)7OkHzN;_(cg_r=eMCl&B)cBBx=+t0qtsxUrfaoelJWqI`R_=$cW zsTeNhA`sWed9U36_+=QQ@u{1%!V@WL+wQvg z3?X3XPz~H~l=J6-J#EEmOwP8vR<6fOus?#flvLA;=BPC);7A4dX=>+k1CI>w(7uc^ zissA%GExIML_R;I9J75=RiJzoIA#Nb3ESZL43p11kR2JV7s$sxIAy2P;r?#51Dg12E4 zlF{Y-j7p~C8%Z@%T+|Ef*H#eRx;9y&9IQry>TYr<;aIsqoqWX7nd-w@NhKNpCsg;t zwsY;Q+PXbj85FqJHV;s-L$Ksp=E0V5SUmAW?R%-*l|%ku8`~l-$J+D+81u|n_p@s< zlXa0l@zXNh7-XIZB|bJAWW!SrZq!>md`M>19&ysxL8$9PI0f9ooA-9ZITxjyxs6kG z4DjcsZ3kvRv_Qm17kw@UOx)>Lq7{5%YuZ?f%3{mH8Me~lQ<4g1L0l3IWFZ*FbA)L! zCuI>0v=Pa=)a>v)P2_^mI~ravQ_O|_Si7}J)g)6;Xb1xw za(apYUgfynx2+f>uwD~-2C85Pqbf$I3i+Xv+Kw>aoRjnP8^t38B1=n@W7vyS2}?=Q z4T6jzZq1eD(r&$Sy|ui)&{&K&!ZKxS8Q|^+I8pzec(NoMIL;PCwZHA(Jhl;XG-QiZ zQ89gY73u`%%NCHMP2~ycBs-xTLCO{GdNCdv)V%wS2f|>+(D*M`Rcxq=#4e_;ZSxVL zJWBQkd`M{U=}>JEEW=~5A-Bpszl=CBx9IF)f5OCcD+r*S)B_>O|EHz`0SZ`xbgKMfq!g9-D_DLJ23>srU;W5@cM|*};CO)7I_N7N2!(g^Q za_q1K+>ORu5g1@IhVJx&eUH+E=}sf@M;S`CvbYcd7#~#0!$vig)-)qCo&^;7f_)iE zMi9DC0=HdXsOAiIjeU*N_i=Ho4=74_`k`&o#QU_LP9Iz@&nAa#wR+3lbm{E`cv=B)lXNwrN`*quO+cfz zR9G!Er?e5uR>RLr9E&;C0meM`*biBjTuH2vy;5e{p<=uIE@lS8hO)&=ld+lVyNz4Mkp#6nGFr=+aTqF-*kER~H|N}-EAuG%4$s8tc%#yov+ zH_T=L8P#k^f9yu#Z7|g?kejVD*Vi`Pk?luO4-ioM@m(6UQs~DPgbWdfA~;$1ZQA`k z?4!egw82C6mHb0Kq4c3in!5SD7_vh4UajDkF+q{@CI=i{YmXPKeUNZ6v^={7_0g9@ z%~REp+QyA0r<+~N#tkZY(FQ^AD>JsF)&USh5I49=rQ3ZIm2se~Hhb6xK?wWzeVRDL zmfYYlMEH(Aq&i8X!%&+TTAQUd6d)>L&sZ(3SsUER-;ilyZ&rr7EF@?1oTw|xZ_4ZT z0n8w13Wre|ln;DP3GDbg>x&JOjs+|6eAqJ_3)Ye=G4QjR29yxRAxt}oQ#^B(#hK#m zEDQkxIy(j!td&>dN>{B`bap4%1RIL4vc$1&Z7$tiXDN_@_o}VU{c=AM9A54TR>yyi z-8uls7WdQ^5RTGq_vT3W0e&?pf>F&$wYLJlsX}miRQ2mbvmZ-=BU78Gw6$d(Y+t{2~46&B+|S>(0X_AlklZtFYh29hY9NgzT%|7 zL$QTC`S0cU1N-6h!HqJucM7ydvTPXy`|_+>sUzNqH@>fX-9E)>?^SimX|)H{%3{s- zQ)XWNOK9zOE>tD9>-1pCT9;mTCv+CdvFwil+;lbC!FJ#?2w${XyllfFIZ>BYNW~02 z*Xe<6f1gAeFWA%2*2tW~Kz^u*x6?*E7Vn*s!kDTZZ;yJBJmELrdDpf@oxst$XNN}h_9-G#@?*_`@f6EWNGZAWJ)u{w8!n8f$m_kiF5=*N4ZRJZ8pyECc9 zVJh*-%%=t2c`lxS^TvfUEo-Z16>Q>}*qGHKNa8SQB-yL!$00QHOoZm{j*mCs_1jYE zZeN*Sj;Au?T6cUZfe2Eda*xs#aDXWRUhC%(i>};GhFX(j;Ao|)8SKJ+VoB`1^rNNK zg@Z@cHuKY)6LHiSZ=YWD{>?2#Z$~M^OuRf?J`>&AfA3-(T>!o<23uiDn%}p=`TuqWOrdO{iiyZl2I0#Tk%*NO%D`STrZ| zQH6YB>D%{6rjVS5@Z6BCWS_9Fpx|s%vF|HyBeCMZR2i;)fW!*fw_#rE#PnRXbGmVI zMI?0~1cz4sqTs0 z>U364(TI6f7`C+*9*Ifni-5WmR-h@xNBVM8KZ^5CcY+$)jZa(yiQjOuia+j|bPxv2 zJ&~76(}N0#!MImelB}QkD|oy%^OKJ-(!cuifHNvVvWFpD3u)O5Ro}=2Om6jRlD`@B z9(ZYp(0HHoqhPF}q46!&&`n=8LVYr(r}*cApSIHn$|toEiYyN3Y0tqj-i=NYL+*rw z3}_0$OWIp4l;SlcrjCktIU)^ba5qLhfga8}sM+eSES1)hjRd1cxhVhEj{SpwqzrK` z?7oKY5etj!Ux*K&O2nhHXF_+RY{_S__g8UpaVisBF56yVS?-$9S|XMxZWGkT>(wof z(hO}@!qrACt)9!@H-XfsQN>K4E~M#G!R6U!P5E6FUcX9^(S9+fGJQ920&Eyc+i{LP1fQaFJB*iBRl_4L%@9DYnnAfc za9^2|3f(4B%ysjEJx<4KHhXmm@{Lxfy$<^{UbojLL^8sD83!#)YUp07mdxslVVCu) z;?v#KOj=&^A;++?$z#3>E-J9(p_WQ!oOBhjPqk2{z+Unz;tZfqFc*h@%7}7;2H)yz zn6he9S#)_ezOYLuer>tozJk26BruKHa$_OUT4u<{6a3Im?|ivK9#B6jXrq*vbvnz~ z2h%O`8Z$Ntl{cx1Ii~`&%eIR(G`w7P5D1$A!h4JvKodi44wK2ar+ShM%4CuNTkhpP zgK+}tXu?KWVUV_Td$6^*xE>i+hVIqb(8}5}^!-F=Y+1qP9x1Pfcr*@Ss`N<#pnTH5 zKhw@%&;Bd%3mfS<+_A^wxHj5N7|QXEb(T2>(o^wcqGvTe7V2VO`Nl#K2Cn-nB%ruwrpnl@UtBJN5uIA6SnwH1T-B;O$qd5P@s|oNU-$$D{!^ zG1aYlWcrLBlW#my&izqb1;{xE31yNr+KMRlnM#9S`2J}v4fp=(_iQZ|P{re(O{7s) zY)Nr$m6I0oHlnIc;2I}?Cvfb&m^Qs#_(psHI1R(qxu{}faNR`c7k#fZ2Ya2;0{b}v z#eNRZOEJeys5IQq!+yuczB$0r@;Leuu>eHV#kA(RJs;1%-Y)Gm+sFlgw)m4W;1aRA zEwyDcYf-_T`+LgmpqUBovcsn+QQ6c7wp*m(5@?#rFTaV}%7p`}eA9K=gan(Zw{Hcf zD){}uCZw~`7CETCMUPo_p3Ih!844HuaKU*?Q*49 z`Q^*;SA2YiYjf_EMKB)wsV}~VKY4+!P-`peD`B@|M^UMDAiL4- zIl%OyF$O(`{3facw|SmExKgfMiJt^T$4?Df7uo1IxY%_f!Ptn7gofBq@8KERV6<(= z-xpx@{^_Tt57~KIPt{`V_N$Rh%6$odJF%g^R~p3@`#?J zcog^lNcW*_Ckd`RLl%ln1&|NJ!+w&UUu)R9z07F{*#qW)Gw;sY$w!pY`|2-jACv~g zv@pv3P`~>`3c=n;8DG?qO?BnTHyzVO(7#6N;#P`lRxfU<6;-ur93T!5X0bIuZ#u6EwIyR9UcFUG9bD9L_Fz4Zc2wk$ zFo3N(_rpF_CZU9cNYq$Rs6+}(;6?(*B*|13l0DC5L%K*3FJk8rL_ zii^zlNqgiyxtj)Q=ZC(nScPtVH>e;hmar)@j2ZiS_<>x;;s=t9loAlx8;FU}GQ;%Y zYN8OO3s&QLc{PsEH86|89q)JqM8RbQxisE9QX<&|JczIL!Dz`4j87>CQ&l!t6_ZnH zd92uKD6g@x8q!zmB+)zG{ys0Lz*n4<`O=)Vhf{00BLtHFW?Bkh#N`+emKoQemtFbx z^s95?ujna*z5TuuVm1+APk~aB79>RG9LbPe^|&X=%e@Mq%_8{)B;#@;#?n0G5JY)_ zbF#6%fBLhSeXUx{giZdy(4&SLNk?J{gi}h^|F6pFXZB&mbQ7+Ho_OBQFmN5sQhpHD z0ZoM`-1h1Mnh2BK%US1;fc4r2a6=HRilaC}ThwXWYfmWK|DnA|!BT4B#FX1aI-zFT z({5C9r2Rx?c_1Mui$P2B&=Q_P;k25vPSf2rXcBT~TP z13MXbTh7Rl>~#G6fh7NWA-)EjVTd{x1B2RzT7FRspfcxPl=5?Q^Ic~^EEem>(;HWLUn5mMAO~CU#n-LU2 zSv>Zl>TX?#pI9{~0SFSLs#RR2Tfh+kd7M6Ysr)DVrn>d)CAU+A?B2*-Bbkc7z+ilM z`r~Rhe)Z?}#dR6Fjzy`d)~wA9r>CP$bAv(DXOEz$uq5&Dgeu{lV2wMzt#%pS@4Qxf z-u1qoAmM%YB(F|_xVAzkAAa@cNva@U>>57>|LA@BNZhZrH1{VaLwfwYJwSREn z^c>d(4xWP{ppS~BQQtJw2PF*8eE(1wwUjnioZmBx0Pb-&)3Xj~t#`BAYc=hnm)O=S zlWzk(N6Yia?S~G@b^8V)TrIEJ@8xDB)xF=t5n?O${8;Ej)9Jz{Tv3JN&qG0SZHY4j zg$I}0l_8tMNJE^uO4IefssMfDdT2pg4x-MHDsxblDI*c9MQSdF6c^NHXbw7+5*Q@f zrn>R=qO-ww;;T!T6x;<^;wEZnrG;gDiM9Apt**_7TSQn9 z!fo3jEd-WVy(e<4nE}c|a8=*pOBM6AL2HFYvals)wF^N^`;qaomQI}DF8giy>c7~T zs*88Sl-9cP6aW}@6DmW?TvnSK*~!De=5#HA^l3t%7^9y&ktSTC>gpvMOc2(-4r)@& zD}u0X1eBdS=yNzXU5YP}nq+ekRFaB+$oWCuV^w$iZ;zF*6eX+(@nQuEn6dr4Y7wSb zzg0vp>T}Vt0bkF1O#{NZn$vj!Y>9baD_(FAwkMELJan=2D2yBH+$K}l=0Tc9CReG{ zVJgKyxp$!D^dyYs+sSqptHFCW+Y26jy1f3TH^P+Bf!3(PWG@98qCn)^whlzCSUtAe z?nKVM*L<8uc74S(o-XBi(iuBF!c{0f!BL)_rqFmEXb8y~2`^R^?Zo7qjtF^x?mvl1hRD_j_pjNNk6@{mFqbY3CKx4(|S9`I$zd3LG}{m`eM=6LC00!k<>&z?j};^ zZ-h;uY_6eSnnNzC&R#!eG(YK{Ei=-VMAm*JsZ#z2h4I}}xAU%(akXxvq?*YE`#1GQ zk}*NHL>GKH-bARozXe^E5JW6-n;09l>mBu376t;KnCv@PrZOQ&+06MWN?{=-!jvSA zXZ37tEEyuxx7CL)s1tuzj(-yt6DPbv|DSN)-?Q-|Tgo(FV)Qs#QKZ2E^H;fF|06~TaK#u$wvBRI^ZzJ`-kc(pWKgH4Elarp4po) z=VxO+n`*1MlQNH#I#=FG{(Sjk-z5R!FbdC z8fUHcpR|>cl}+g~+q||iaMn#w1nt!n5_=uL=%J@5R}v2R>hPW{55-;y28SH-%RZiG z)TUx&-Rj%{CB_@}Kn94@+5}bQQQ=_w`RcB=X46A=0^7LmoU+Ex^DV~wnoS65N~2tB z#fZ0rPJ*V#PBErxMklfY7W`k9;*$sE_}B4uTsc6zRH#K-J~v%^liI<56zfe( zXw}_HF@MNgY=KIR+|QQ8GRa266)W zivRqFL21YebW$OBt%y*f>enxiH9;>w!617wDG8c-+38AQZd zrM>#gwnh6m9VW)Q3?XtQr(7)tqxRA2T3IyVSu6npVlv5El6Ulpkih6BsoJC+begn^ zqJZ{R%$QUvsX;91kg&_gv7bt?LS6#uRFkd5b(&ZFDfO!3X#5b!=lEwfeUt#fz?fO9 z-XJc>DbLVmDKg$vIfvG;ylqRW4eIyC&U_*&PF+2}kAV!P zyim6HOAt^$-l}6iwWKVS^6BhQ;A@uTFK8*N;~Ug&CFT2x(sdEusaU^M*bQaM6|AHT z(VuMmFmBn?d=;}Jab!2n(o|zDmV^ey_9EDn-< zq6j)RTaCLjr#3U@mK0)61YNS9s32g|Zj1_>>mI#*iDaR*`uQ(=ND&~IljKcJX>*S}~`2qoAoj!f!Y@NgSP zwTI(aNWX^@iiy^MggVzU!O;?919f9!3;SeS>6nK2fc}CHMG###wImz`5K_sVr$aj( z(ScfEyHovq`+;g-&pl^9R6pK^&&eZj`>DMg1MRFrjg7dHa}Re@*ma|lFuG$8jI&}K zKD#a0sm~SG(Qf^m|NhEeg-+qt^u#2@3A=~NoHnlW&rvQvCJ4E(!V~O{jb%dg>+Oa| zA<`#j0efxWo|Y7+S1|;6>LsL1ku>xahOj*o!QG|aQ$op9*jePTV5QFyCaq^KMa3HkiUT>L7co7DRy zk!-t%9R)12OLsIW_pe{Z?zd=E)3TF4TRN%QQ;*V(u`t|6I3^DJJ$|k{@+}jR74iWV z?I9S}5PFY&TU#wlUUr;OdVdhY6_Km* z%?V5Qvp6Eu=e!Xj)!7^(R2$)3Di_(y&8`TlD@QO=Yz$~DH>zrVW!Jb$a9CMtg%&4- z&wQwB;`T(n4KAis;Q=Lxu#J%ZNR%QX-t!$CAFwDSI>r{k6R2k;7J@rEXT<`Mq5R_u z3nVwFt51lYeAmH&#qH6%q~xf+8&qn%WMXi!RU}Q}1>pJO6PZY&dKnaQ67K*|QtDL# z${Nb)iKbIX!=Yoe_@j3*bnST{WV-Kn9jaF?yyL%DO4YCJd|@f~x9o>iV2(N45S-m~ zsuZY+y{yG;W0blyH&H3E5{otL5Yo`t;ObH=Dn5c`1B21VI@{aoj7h&wdU*!~6Y!P$ zAbbRfD1e%*Dtn~>Y>a_*L^}KFu(VmJJ?gXe+{J{;;TOfdg6n2)XWONQ?DuWjnG91mhxRbv4ev)Bq3oP{M>YYFyn#U zVx35`jN(i8DaMfPwtepGT%Moid0dwe4{Zod;9}1LP^dE(H-&z6F}Cfj-6YB)whg3* z(-<+|CTw(G*!L*SY-zf-Z?pQgJCkhXXl>L`xwDwm4K^aRSfQT>FhVtb(tfG%3Rj|S z0qgA0Zkz6SHpPl)hnTiI!`o;vXdOqn{^~}Cu;*!X6cay~Kwa3(b`%<0HCCAmtHSM= zTb$slHb){wHU1D!!sq5l>K?RR7hc&({^O0(Shoqm{JLl&f4#UbV|@_q5u+*fMhH!i zX4;|(%mgn;NfuCDrZ#%1v}f&c?HUjXtwbFQYFMQ!0%)f6ZL4T$M56Ie>_>!AT?*|- z`NLpT_Gsl-by6`Gs%o&Px~Jk3^JQVlzD^-}1ubLm;J9LlqJ=PnQ^Y~BO4}N=lomKA zl@J&nW9r4dGJ>MZDd2R)K^l(G2K^0tHB4Hu*z89+?vz)Jm$bR|2amMtuN|g)ZI+UOklFoaqDU}}Eu^zNq9q;2Pn@dcRW%je0k=N8z{Yb3-KznR$YiwI?GKB#D+x?GhP+-GX=63svM6Hh%McX zS@mL-kcDe|(JHT8Z!Frfocve7_n1NxgEaM2XPRFWkJ)nm6r>B`6142Jec63`$?w>w z{6KBaVME8E>f{!Q6DkQ~#SFL6!)DWE84$z^<+&@NL1sBB+vp4XIEgsWoC&(vJH*T$ zLpO|rN}!dLFj*!@zCDTcI^dGpF+C!iBV43 zISs%F&~Rs5y(R6FN{t8?z9@>Cqj9?2?|5bv6=UF+#PvLo_Os*0fiCB08k zZgOsH6d0zT%R>D{Jh+K4DMc&J`lM1!XphQYz8v4+sdZBVNw2=9=_oYbK-CnHV$+@H z-YrkW_W`mf#KO8KHGH5-Ee~zwxWW&U!KdT5mw;5w45gpAmbH6~tilY?sx63!&tZs? z3`fA-7Nf&|ivGu5E7a399w4MLjs_3F|)foXFHT`5)Ax zFhxYJ$bkT{UQ(R5v;zR2zH7w4Q)3ikxg^$ajnK0Mwtih_i9HO{2W_zE?Bih z+)IcCsL;;lVW%!;xNs`7LG|t$JQ^g!lj{zJz@u%NlXPpxU)rTPZ-0{U)!$84G%9Rs zl20Fl)|KydKGnRLT4On^_CmSh9u+Ug{R|jUjo4R1Di~EwJmjXZAs{Ep;EJ=cgJ;AL zf)gQY6mbTSj$^8d@3g4Nh5GN2$tU+PbPp_bV&t)~wO3}`5naTWh1&Lb1{gNIuWevr z>Og~vuOmGyc~YDGva|p1&e#m!{Mvtt{cHlo91iNfB>1R3J|t?Hko?DAFW-z8!bO3d z>p=h&CMo#$A>n6U2YF|vM;6JPur~@VBfuKXo5u#joWi?}$mOKf;X+JxR;kIcBFW}rmrQ7A2byk$30noLqH}1vcQ;af2!1j7 zWPoE|)Uqr#;rrY>4tcy&-c7xOua}X#E<;r{uBM-zS1vsj^K$5%nhVZ=zbQC2d4RWx zw%7|X)i(#BQ-@S4kyXY2r2byjmSClP&hYj;28Jwc4x&)UxpTSva(ko)V(ll0vjbU0 z98Ft5<>}eKUn-BLDU<5{yiM>1J_EcLXLQq6DH2>k*%N^p_Ao5>$?dZzqLaX+>;p}HQc*iLHzaIR z0sC}44Iq(;_0hGk)#-!r{ty#tNG71t1YKQKCLXXGo)l6A0+Z-Q^tiY_KW3_Cv7qmQzcv-BgZnWg=OsFT;3mJjHl52q+3*G zK{G;`O}rtVBchW`q&WTy%k;Hgb7!GR{Iz*4{=5Wz0Mo(VTWfgHu)#h6^`Ei@sT=NU zS`IiGJN|A$y$3L|F14E!UTaCBJiO1LFPNx?KyV}_e>w&}& zHnEnj5su~>8+$k+-NrorIB&MR938RxK-&mZ-ensH_cfL~=2NJ8*8U^JCe(FX<@Y2m{@&L&|r_+lo29I(=E+D{;eSUdI;4Dj0lqN@J=#q%~S z)qD3ioj@nA0Dw+_Iqm!{Q34--BXx`1h2KR0EOYR;)c-nyqk{uhDwB=Sfo03J48c~m zYP7@MX)Ar1>J=usDsc!!%x^xmfTlsfQ8J2f7}(rYSq_Vq0SR!n`JH@(U2Vsh7$pX@ z0?){g{Zba5f*9kkQ)#AM&vopFbT1!at&j(mkgb{0oU^g-fX<=sleJCq*+5+v4tAng z2D85fpk$K=8A=`}fi-8nj|D-5{uuDxa7a_R`u99@Ho%29U>vNl&^Uy^!P` zm8KJK3~PrhbYEpu@Ra#=<5x0yOsT0#ERZ~|;CIUH& zK!qJI<^Qenj;Gh*+oGodjPR3fb=5l7WUlgs)ML&^LLqjpT^<+Ob$}CJ?*aGKxs2KN zZO3ErT%WgfLtld0q~PZQG_gdNXk&x!*;q(JJ8w(IjlT!36b7n-ijp<^K!t~3Ncb>_ z(h|$g=R>l>eTho&jXPWFI70}d!ln{)8wS>ompWXZIuF9O@Iy>uWUY?kI@If^6%6fJ z`X|oZkoyQgCSE`Xh*ZT%$nAj-upx9Yup27gY^533UIJ;q09Zg{$D{vn<|#%sz{$T- zdBB%!Kod5winb*P2k=`67E2X72L=L+N7vtz(;u*V_v#sf<0QKe6u01BiVk z%B5cL)qb*_JivHMYOjB{MB{NWNDmW%n-bGCSjl1r!+&CFf9;0Hy(2^cyCBLEf!Y&> zN!_R)EssBCN1)mS69dc6vK;S?^mhTMiaOfBQDX;j)H!uk3s(BI%RdMX>0u^vi#X?q zoXuA4_BbuBu882lj>)FHa(Z$BqQ?HVD!4}Dk6IrsE)n&d4R;vk=WI-7SQ6?5n@pZZ zxEWXhpH?d%H2*58u1bvxvE|tk<)j!1y0~J*Z37Mpv8bdEy3C*xY{me9U>}2ovCG$` zu6>AzT8W`HYsZxM?H0iyWP$#Q2Y%}4D&y7y@xEr9o|E^=>2FSdZgbzou5;4yEPcy8 zHYiAhJKc5^TQ(II-X;S`SFY_IX*RoMl-flb4h&sz3ednrCVoRG*#^K_ita|DUQ`<@ zt+Z-6C&|JFc|Ek(q`kH-B*flUlEgc~gHENw`^1)aWhvI-*li1BlQ4g*&u{<}Ffwh` z>J_&pL>={ncDmh5&a@2k3dA6=HZ2vR`N9T7_JO95oUR5S6~7@S9WxaoR0eO4L#FVO zScZ*f-Ta zn_#$Y@-4;cBjy93nQA87)akO>3n5R*Eoa%wqtp>g2!cv<$GaD09{9*nTz^;6jjOh8 z6W2KwYw*17#Xu8`wln9ul%WerTHx!veFP?eM>UNZqv4_w7ef z>w)I&a4&t+Pk-F^EuW+LWjA|HFe+SZwqFRRhzx5h*HN*wVf&~EkR zxH)12>8_)SvNBCVJ~5`*i8L7+>r%5iU9iw|4&^C+>=Rg%QAY-aTQkf~wu;S|3s*f{ zbGW9xLlht!`#*_Y0p>tSWs(T!+J)GZ%jJCPUpX@R%p|CyExFNDq3&6 z5v$8aP?3$Oys5>t(q4^MkN3B~^uOO?kYWrR0`RkT0YOF}v$s)-ms@w#

r$EgmoLVn(WTY4ijkkeD{)A7$q?5v1Hjo`y2tDeYi(cnBRv+U^Ktw)*dBh@@It6nH# zH$i1JZwi0RCQCyE)9=a(Nz?i8m7PR57{=yD9L0KPat+qumhTJ(~wF78OEQx$9Qxo-_Q9G9y{O;gR zEUk(f!5+h~iX}J{-}k7Kig`xrYuCobK*R2-M>p#*Ise_A&^R#jb)@<#n1xei^Pz2w z3nivKXFY}uJ16ilU>3yvjs>u5un62yp`#D=zcgY}sk|TA=+>n6W1W%hWB}mK5)(_^ zS^<=_>lu|oF;?ia-wh`%k~M^D#F0?p5@8(~(CcpjIPE(Z5J*ed#W1134x=N zqqMgeczx)^@rgysW9^swgIG_@Mz85iZ3kpwuX7OR!mR9c6neK*M|O8$b7_=&y~vpW zCJ{cZFprLP+YxQP@5&zU_FLPFs19Ez;+C_6NEj;DC9$u{r!vU_15R$RdqvL&bpx*yKp8OBJ C$dsG_ literal 0 HcmV?d00001 diff --git a/models/ggml-vocab-refact.gguf b/models/ggml-vocab-refact.gguf new file mode 100644 index 0000000000000000000000000000000000000000..8f26cfb76c9f287f5ff06bf0445ba9ac363f11e8 GIT binary patch literal 1720666 zcmcG%372EXl_n?btp(QgX@6VzH{-9?y)6RV5)akyQ|L6M(EDo2)KA01uh3 z3cv%gWLA@v&~2c5Zj`(Qz{B#83W5U;5I&zxZUUm$&cUZs*g%#D@Gb{`t=qpGYQ?bTG;C z!G*&#=`23_U;OSzZ{Iz#j{gPxv;28a_mY!zbbFAG`nN~@@$Gat&U*R4aQg12%m09D zK-_n;UV3|SGECVo7iW^+O$XUa>FC12LBDsQpLf!pEl$8C{(CT-{4tUK(6iy=_}{=o z_QKcxU;GPy|4#XzFJ1m-I_TtGTb^%T<>FrMX8qekEO>Tr@wGaaY@5a-vJ!b>h zU-@p*x%zLGpY02*JIKVpIXJvMo+P93RY8%B$I~=?Il20G_+t5GJMZ_gN>~5ghraIR z z6KtGk<4HCyuyK)%KVsv*WaE$7_^;Ubdu;sIZ2WyT{s9~Rkd6O_jZ17i#l~eeuCQ^H zjjyutH8v`2tg*4q#s(WzHa6L~#>RCvYHWO+ji=ezVq=?)9X58^*kj`xY}DCku+d~= zpN$)A++^b!HlAhUPuO^ljpy08#m1kqahr`7*htuDvC(Fu!$!(RmyH894%x`qxWmR> zHhOII*%+{qvoU1jMK(rkjMGV~# zUQ_G)YQ3)31GU~z>rJ)ZQtNHCexTMnYQ3x0duqL})(2|+NUa~M^%J#zs@Bic`ng)a zQ0td!{YtG5)%r-SkJb9MTA!%(8@A5At=21Q{e@b8sn&n1)_2o+(Lo_PC`1Q^=%5fC6rzJdbWn&63eiC!Iw(X3h3KFV9TcL2LUd4w z4hqphAv!2T2ZiXM5FHevgFPeD6yiCBcupanQ;6pj;yHzQP9dK2sp#)h(ch<{zfWJGXj{yRnMtnpXKni*MK*rs zf2pgKpGno}h^=hER<714<#)-1t+86U-c1M8o{E`%?xhrSi|-N$sv-Uje^K{7q`*je z-u)GCw)svXK(dL@F&~qULzCg-9S!m1ll-nNu>1OTlna-dz#{ez_3Y|XT9fbi9?#kw zzL<46R5tc6hipQ(Or!Nn z{Lv+Lm37$a<(lWrsiQyS?`g+hKj+Q+Y(m%hS4w&X!A=gNskZ(Te*X}%AEuB@m^^n* z!`^eSoRE_U{mXo`oeP#pH2h_CfZ_RU*5kKN;*akYvz!l1du$~_(KPDym_Mfx3G2yG z=3VyrcL}G0rWBEJ*IY@&Y!{!UJ(hBDz z4bT&=r=x-9GIMTrSsQ1nwYYMH!?zvp2cqD13}8*59%|5`pz2|{*jIK$uwnZlC@?8S zn{b^^Kr7~JOXt~2kNI;kYx2wFgsmrKgoxR!zw)RzD_=I0;V*vD$--NFC7Fmnz&^kc zFI{3Q5$@7H-|0CUgr$KK1}6%poG|*VqfL_y#Nc4;VIQ!$afqd>{1V0!?=9={Ygldg z>{L{f3$U@6ZZXrUX3oOK<1ZUb`19$1c8dcZ`66X65h62R@PTZsITf?#_-!$JlBboi z7yBdJd57JA*@)jg@`!)ye7-Jv#37T9^vGWo-&K1SA38OTvLXhP9}|)SY~5(_Un@}n z9JGyg*;CT$X;$fY5}oi5owvnokHfq;Wh)a+WOG|5MmdK+J}?nyO^*0o`z{eS?L}zp zkxc-?(rQ>P+2qjYn)tiPI6@Ox5c?eZ+N~_}%cR#M8w6p1i|~i>WW-^Iw)y!zARva! zAjhxA&fr$h^MR!2C&bF?Q+&3VJ>ks_PCgs?mtSKK*sM|>?rK`o7B(8+OAh#6Ht+=- zMTft>EoS1qU6EQ(yQ!FMv#VmZ!~R!wN==3Y>kc+4AMK<99QIv)JIecfcsh2(U%0>r zirJPo#Z+>!&V~knC6iC4V@)gZ!~gbe{$9*v-0wK=h2fD802DTdBMCTH@W-z+-^(!l z^V?}phgv>W7saf~2a4H6HesM9?j~yGVo$Xl($!=HS&(lPvv0Bqvnz3TIB}#GvzEH6 z)>pV|KSzdon2)jZV)g=?>1f2gKOKY8tSD`LIH?1TUpGa!v`zv#3LkBC1qNRPdn z^d27v6|f1$!zRqh#8oa4E}NWq4=xvs^*}M(3GR%ho)J3gyiO_>cB_U+OozL2@rlex&_BLD6V zpByI6^k?TpamDN!@ln0U)(bDN)fVl-3B;~OsW^QxyL>TX?~n!Uf1JBgSpG zYU=y5>%v{96KQ}C(I|!dUCeH9 zlEv&gn|D(kJ4fKdgfZ#4_Clw$VL2Hb=r~?p<#jL^nfombDxs2I`3vGUAe1?7PC}9J|}C<>Lu^P=z@K>#6)pqjsNHmByjeY zskda}XL@xc$9^>6QhTV z_~Yyn7XxD>!HdH#DS`F8dlVcw?jgFtB<&1NyN(>Y;txJ9&pGm$+t=(NshZ zn;uj&c3aH8q7B{B);W9sR1V7}8LE7j$ zeK#ge5B$yOSc$9}V;FdxO_Y*3-^u#Pfe;V>1Qv^9FT3Lqcoj%PKewScOq%!m$ztVx)sWAHhCfLqAPH5GG0j34Kbj3o|2 zO!N7@@PZ6Wh|(nRhm*eanPePMD2_~N>B*76T$Xngh|!oUs~5;#N+Sxvt=eAXPqt%R zK;H8Mv9d#Oda#iaH>qT_Eb?Qe5#>N+Vc!P}i_;gp>rLY2S}JfSvC@91P4?o{QCn>g zjDdbnAxv^&kdX&sA-z25FtXHaaN!K&lO9csZE94_AI4+QA}3zV&c4SHmt}4}K9-m` zqcXtxJe`W|_Qh<$1rrek)HObg;82Fm^1QMC5_`MB(nDCXj#~(#BO6w`#|b-pA#NF5 zn{t@xNW?VErT;)sy2`%cXclvVDCh#tEpfGC_I36*N+SeKrT=e6M|}&P1x^EAB$!(W zBlKg}Qij4xL&{bhq7pm6HN?5hNRl)Du#8Hnw`O*M%OVHe$Hwq50` zK&~X+Vs=T+iJuaX6g7ud%%n9oXZk94lE@ z0HLX+IQ!n|Cwy}{$uxW<1B1C~J)4pDrI^W?SdsGb#1rhz5*mnOFiSfJ_|pjHD&|WB z^L|i&A6`s7ohF(^?m8;^7r9TO0}=$A-t(grnl{vCa@H%$#2!v~4$x106B+yD!~2qf zlDpc;FkixmF6e_b;$g_`Gj}TtRS{I4Xs5I3CK#eU6|A&TZk!J-{HVD zJVvO&C?i8pr9u2;Jz#+!Owqqe+}73!MWxdrf!EXEXYdyw(SLb9H}@HQc8h)F?Wrl* z>;ux3+rKU zE7ObVA;o)y5eoV0Q~m)~HFnhn4=0|ou?`wqkonWl1yPXN2mE^47el!vr8FBvepj40 zI(H$)M{Fs{MDnI*jJ0!NN`O}D(1IoL19T)0YT4Z6QoD5l4%8^bVCvR(Glk$6@*6XB z*&ip;%Uc=)$z_g`u8wV!7o8f-q)r^>@Uigis`(KJS6fSS5ZOQTT)S)>%eU@*$I(Dy zGjj>_ULW`{GWLYLEkc2GI&qVg6}|w?BJw5f)d(jva+e+8aA*ttFy+A5&t74jl!#$g zODU$U6XLR%HG)}?UH4+^N;~1#ApBHr^6f<~v}x9UN^b~gD48+Q_}ncn(*+NE=%vwW z?BI;$)5~Mx4u&A}Kk#LoW}MXswkAGbS?9O#Dr*D4$r++p_*6qyl+1N`=&v$|R5mzH zKGeyyE2U=n{4$3DJC#(pzRR~D#Bi)mi>o(AN|$d+;6GR(h*21PT(LY#Wfg{QcGr

rh|14 z5rnFsTiiB2Z#i*Ib;A~gy0pkr-}Wp2qts;;yPgF`Ypk> z#P=XG5JWR{inaHgF#59KKwaU&!gU7)ny*54^RP#FjCR<4GRz3pH)KpZdrz$a*-` zI3dWckY4#5MsXW|))o~F4`r@{;go|_)(8>KVlz(=BA`gVlQQ9*uXHjFRW*Yckq6_K zZ*^n-k)0bRNtYtA{MzgA@+jkGS@O$uEhUs^0yIXXiDo=A8!kuQ$|LJ1m@U8G$+eL) zh^T~m-GXGjr#9FJKbfW0X9R}J!Eu~NuNXHcHmV!!7sup4C-6WLv03yy0Lx>tH_|SL zg&cb5CV2!#I$>k~9ci(SWqzFw<8Yi_*5WG85zn5}d-fdnv-vqqEOUqa6$K2y93UX+=YjS!qq-oMELw~WeJ^!gIP2krv%fNN3ZmB@WO2A@ZNzvoKuR{ z5&VzLdQmPcc|;-DNRVk~go@Ovzc&!m`ne$4uHK4R*O2)ZdTv{9{- z$U5Tt#q3#!A9e?L0VC8LraIU6U2-{)9EW4}*W^g%Sq{cn?i0+*&T#)Yp1?;(5O!rF zhRW*^FcTeNmJo#iGD1^!@33OqMb3}q)$=|MpW_+Uioq8`&10 zhOI)|vv!TkW?n3g*)Me-!W#CZXs5c<P*aq+jUNgbva8vY+Pef8O3B0!qbR z`x9PD{GUG>Jz`C`^3`Vu)zbMTHj};+)f>ep{Ce#AR7ILqZHry4r6}eS{yv-2u$y!s z5gVeUOhz!DnqSXn>B4I%nvk}O*?fgL@Rp`(2993Ory6)Qd(5DA@|8}P4-HC#1%WGO zQi35a^6`mGeY;wQv%fB0*(UAG}iEi6yFkAV@<3lL=?08itg-N3J>-L$JSexmoX zSUrL2>Ql$!#HUAFqH`lmfxxY^6q4;NL|}_4&-F{f+I9&eKZ4_h#T^NFPELkm!JMnQuT{gf_ytLB1O4jDa#wSg)?|iEAbUasbG)QvEZ? zlYA&O3uLs#BLfb5^ze`=2#Ki9$059xAEB<}@X`=gABk`qDSL=5kVlxHKD|mf;E8jS z6ckv^b2@=rpIx;BhvNG^kOc8GN@YV>mjf+pk8%PC?ZK?+oYeT3Bg2^M$^*Qu?7gvL?Xkmusoa)&$D-mvVF06I|p=-eTJV$~dE~T=r$_uh!P3Qx!hP1^Q z!e91tcKr>r3_4a?v8f7Yh2S^0??B{i)}?-g%)m?7B~EpB1-3kGXcYNl#xN-eoCl}x ziOB_bmOHeD*ihatTqFDdPj9NT5+;1(VV|rym~#0@W7;BbwD6`78^zy-Jqa)yHGaA7S&bW6#3QGW%Q*w{S8GMx z6nVjeWWiB&ewZ_stF#}-nv5je+mWS{_xb1fbTRV?!8jNamK6_9f-gY~8$8FIwF&E`Bo4j|XYO zdbnU~vj_>|d_8C;9Aq9^vmV zoR6ZGBI!LQRlbHJov(G|X-96k*Kk`jm8 z@qAC%*!k`9>IFXBgi6h}6~QG6%!>iuO;|T;`~ixKS@FL--=ZEx`PnT_<=8^g*xQ~c zF_g#U_?GbD3l0-s2;1SWFpn$Zai_v>&)z8H62vBfHI0+Rk-&%g@C*=;mMt}ZfWdHu z&+oZaqwmp9K!XcX2kP0ejgJ?fA=t zFr`dxnWHSSKnzd^nR7z$n=ii^u88PO;b|S;^6_hyg~q)z4w;So2{wpqqD-0vbCcQf zNd!_8y~%xus0*;}XY`(r`OZBYGx=n5*ck}?bZD7qK9r}}nOvl! z81(odfxZzXYu&O-9w`k`=Op;Fj#KE%qS!iUA&#lb6=zseOeO+-XC%{te=z}Xoy zsLyL_oF0l(5TTVlhtjLCEP1X19K{G%aynRGUfIOcBx@cJKJB@(*2N#u*hy8dYm5`w zqj{#-V#H8snThH4XzbjT1~crcd@ykK5&8d;vha$>WJ)6QyHr)QpSr8dPEm?=?^$UFQ4i> zz;tweBqsuTcaNC7-sA$brYg*^UL{bW*1BV-ll#QaUF7$rKaTx0c6a!8)`3jP@1d#t z_6a6_{(`ff%~7T>kz^v7u%GG}$F_*iyFFl>QyHq48`V08x?VcD2oZ$AapYGjc_0VR z^TQBKvqQ_?6>VaM7_p^!eozN*|Gm~lVmkT_+z46uhp%2CorMSRc?S2EZZ-McwNi~@gxqHIqR z2jCig<|m{yNBT3YNFvbP z#ndCz)dCZiMD$zIJ+XG^O#2Xi9L~(7UM)NFRW1=~s4w@qju(|ki{JpF2!?gDstMaB zO+tO*XP_(?;`um&Vk|_EUG6ssSk@IVCLhbI0p~3r{?@k$m8tXm))QxA7Y_V_AN^Yt zF)ZY^+y(?=o}tIV@gp%|O12XwLX!Ms!w*pRIPNv6?g;u|zc)(1A>k}$FHRj3o!kO} z{kMT!F6NUZEb#sMBOZ!{Bf<@TAf2qsn?F8JA}YTpxc5}jC5K<%Hv@D!tMTMtl!gIk z(&Gb84)0kii%$qqUr{|IjUZN3i=UeSIm^GF^OW!a2<7Q9IDO0CZOeKAjw1t1 zAT>U;4TmFpt-2lQSS%tzP~M?TnsAf3n+VDeyMfEMXeWOfx0 zlsc+$X!D`mji?L<3>XSm8D6zmCTt{BRb{G%SS8XQ`7Gd<3B%{9(f@L~e@0 z5KAb=v}xu-f(g$&vQ)T4h!s*l(}6;|*5iT`l?q3iByvISNhwN4xo<7Qj`N!87RRZq zZB^MeR^{!N#izfqMP$MH=N1K18D$tq;6vp~z=a4WU!(=W>=x(9$1Vp%xVh;fvYqw( zyoF>F+t1sIJTZ3)R?>$JQ3%d?z>4C^QR_NV8xn5v*hI2r`5?~Z&2thmd~^!BS$5vy z+ARhi4ZOZ-SF15_?foG5SxBfWD#W&`*r#BI^TjKzj9eNRh@{dPr#Og|co;$0@A}h3 z%tIWw;vp?v$c20g!4MX=Tfk*0HL((7Cj92v-8(_GtQ?iVs4EPNlPr7H_q%`TsR2o9jVSwOWpqpYfpfROtE+}Wmeg^Clza5|Aa z%XGpx;`CA7_OM?gBo?Jk7?Y7@d2#caOW%U<_$!1I5DSOdjjBr3(&58HwRw^drKv>3b=gqyvjRA4KpWf^Z0L-?jj`s8(_!+x5W3yY^v1q2u^Wg5JirwbyV4HuB+tRCfm<3Y@ zhZiLsB{J2e6APzjFSY@QNWANXM4Hd;RXNv+=XWOZ4Pg1CnGX+mdcr7!)QWjxAqR=0 zsG%&p$$pV%!WlHB+hY&*#*x*gvRC6nwQZ6$PDk@u5O@2*p6~OB!p?x!Uva#i2es`B z_L^81lnh}Bd>^EvSj;ZFu?phJ>mU3t}UjC^f3{5k<~C(4$%Ri2+NBwBA}ML}6} zz51>QU{z zfW4&CQ;!pM+q>-2?noVpp&^7is#;lnrpej0;)pKO?fCu?j3Dj>g#zi54%Tu`3Nb7? znzDR4)=fE|b8K<~u$<)D46PJ;8GCF3ZG@s!oyZ!Ek}bd`avJQ{r3xC!u@9(%GFr~n zC=OiAAX5-u`*MiA1iu&GUJMq-EvcOQ+#kf}EQ65UTD@s88YkP`dXp~)qr7|V;fnr{ zFZbdD9&zSTvPRbeYlsf8ZJZROFHFn=Pb7n(NN{C~qqVW;qg40(H3AL7kW2)T+5|I# zVU{p$bueeY3#J#b9n+1S?F6EC&-Kk1?Uvd_Ot)p}-<%gs=BP+|%2hb$%2m#8u61Zj zPdk~!R#R9@wxY_;Ee7h6uEB;ud3Wt?oza|Iron&HY<}YheY}#1DF|xV7 zfBK#;YBEM~7y{PIMKo;%m6$jU#|hE1y5buMv}{F?I6kD(sE8%9kPGqqP5_IS_ve=VPO{1{<3MI! z_JYw!ih4ktN8}^m!Qp>E+d)YC z2-Wxa79_05JpaHwrijIG`hqtx9a(7}_Fu2;keTk4qOndO^KhDhnroGhJg*r?00%~Z zXH4g2YUQHu?E1*kQrR8Y)cb@2@-DkG>%KOZsT>kFt)w$C2G4)$jxC6S9i7M)`^v{c zLh1L}b+9{fcoYD0)91H8{Wf3ehv0F};HHFP1S!C9Pk`@v^;1v}zrC62jKoHO7my>b z$CY7|PGnGl)zldV%n1su|{J=nKJt0}<({tVYI&JfvLyryhZw`)g2 zXT+Z4plHZ;stUEby2`O`#(5psIQ+hC^K;BL^Jv|ztilfDF+K~Qb5zm~t4Qo2O4Pf! zse`T?-O;pPZSWBY(Gu~XFoo6eA;4^-nYAJXbk9SbO3y0Kn4r>H(1X7i>bdZD6BR+* zj^5gih4PS6Iy!8c9y12U!r$+ywcGGTK88BR86omTliq{p$#G~(P;gB8i-@h1nvTOz zR#F(m9m4CuXUF|*M)yTe3i66Q-5-n`f@mIX|1u_vNbMorqDNS(kOjDIdy^#dc&g-kp}D?I`a% zgB;9Z!4Ra@B0txQsKO|lwsbC_%=2g=l@=l#A#BG%K=43s2&-L-vvm44C>N@NeoRtCfL$#+R_EB31P%uVw|wf-v32AKPGfI=&j! zCci#R-4R<=q$8wWxAiYC6BV0QZJPjOBMU0rdZlO@9V+o$kD-}q)DiFrpIcg4f{ z`3`?TZnJw==bDmFu_CUWj6F>nh6BECNmbgk&?|Mu!bvNVH7_~)tilt{g_+!wWeSjfR*HAG!wti7>+G>d_5#ciTW2#C|5ypb|Xe@8g0f ziOKaX649+ols16NGRmBIzUG_`+J}N~ARV#O%0j~b**nD#os@V2vA3RIiZ+Sdp2MDH z;1il!mmL5_^8z6J#@s?y%*Ho090ac1WQ+M{k)#HV4T$t^XrAMsU)A-Asoy$^GmVf( z2@IyFbMj?S8CRr}qAU#e!^Geg6aP7hFElfko+WW`IGCM0 zpK(Jl1#_~*zC9hVAY&Pz&5P9%NrTG3t%-{5ZB80yI8Jmo7`*rimY<(_Arq+ZtvmcF zlz;m!d%$Xu5D$@E?vSIdf!@Fhy5Y%85qxFQ2^<6`ZtF5*SvK8%6lotPp}_OW&QK~{ zp}i;Y>G696d`XZDDVFAzBmB!=gBHdYixlE7_1$eHs6~nmS$+9mj)D9iakTYV0*lJn zTD;5T{O9xQ&mDwFg)<`Wk@{dD4*QbeLgpqgumH%_XlHMmIE5x|Ax(S*mJ2S?jo@!! z2dMfhqH3p^g5)3)#tCW9X-iG|L*IFj!WN>2%eOd*T{8ypL$DbU1|%S&G&6x)b$Ko8 z-1EI;xrtD_*aK`tPS{r;=W}2zrBcQ2+Ro1qv4?1BZibc$MC{e8L;!>jneeXT8WMiL zfm)Bg0ARJKH^<))g?z}A5-X4x|BujOhhDF8zU67o*TH;$$j2%s^79ASrIk!DoEHP( zgPRqEweZ766*fp;1}idRvCtgO^!c&qe$7=E7(+M~VluQgh#ht*)Fc!>v>x3B06zD9 zeh*b$fSOS_KISaa<@7B=8kjs^!}cx!Cd&cid(n2zFTv(mL1=d0Xjs4s|P5z5!cxEFa~5`!RAD0 z5JTq4Z|kNa`chdE-(s$mC7AL}1U(!`J%G9vOp{zyXmZeoh=(i+M)e!NpWe;%^5PX&4V8n zPak%?;Yl(GR8v&SUY%f=cT9SN#aqi*QHh-=+ldzXL|`0822tDYl7??a#UjnlEXPkf zRG#EMGB2s6^m2;Xz>N=<2x&d_HkVIdA)$4puR^P1mxxtV7$e*{u3S#6 zJQX`iPRu{acVK=IGX#MC_0o9)QK^f!7PDoqCWSy@9U92!<*V?Fyw5h=*@A8#(Sq!< zm8$-Gko@DzEw%t1Pil+JiR`?#$JHNr>03l@5XK-la9ntUw1msLBVtFE!6q&wN}>o{ zs1nqyi6%46yC!@+M8EkO-K@xR9X6|rWp?C!mk&Z5@*;+~Gq*{AO&+n_TRsSW=K3FU zb3{ZN3)~Y{0>4%XD(WZ2JH9n%hVCV2;Yy46dq1S{2~z+`YT6Ti>@p&o07}MD6OK%UKD&HIEbHh zf#+Hzu`FNY9;vPIVGCncaOe8seP>6xOfokAs0JGDlhAI3B&4{{5%NKtKyiCh&wXsO zhkZ>btx2Nat#5F;pxw7g3iHY~VntSHQ0dEehN_Tu?6uFa#XjLIdpeIHhDCP^-)r-U zd(jd5a5PW_f$u(-YJ43K6wa2inI1BtEb?EX+15ntJnMT_Fg(kURk`xvvJzdEQ_Noe z8qs3$27C%B*r-@d+%?iR@wG88?ZD|BsE#K##BbOY;u*EMa$7O>Ym$S@{&_o6CoQ;v zqku040N5XhRd$}UH$MF~M_=FeHxNR3;8(Qa>JGka=|_Cw!V+JYt?Qc{EuTDNi*20t#cTw+7dAh>@mI$6LF}&!q1Shg3X#ZZsy{< z**yUsQBmc6FoIyznnLoxdno0&)W4JX2? z$*ra%v&T}A;^f;F<3aQ+y}+-5M>qC|TyZ`#yzAJgglue1C`EvdZ-L;s@JEr10Y?)b z#paTL733E7-iWj<)W7G~YrCA|by~szptkfbgeT$zp=uHUzVa1* zi`A15TWM6ei0$0sBl)Qj;^8>z?b|Ed`FI;Ftt{uorGR_&& z)Z}`+Sw~PbheV(tf)%N9*3;kvSja<1Iv$)0(Q$#(Ll9d4oQu`S8gQ{X8$h?#F0slH zPDO|nh3StI%@RRx+LWWri-(WWg{nLdyLD^&_z-2~_)aY{t)tMtj`Ffpw{$u?%&hha z$p%6RLlFtoFc)Q;0arT$#ZcqQuBg?caB27tUCXy2{J2j1E*T^l=^V3=wc;ZlK&3}K zG8){;RrJ0mscB{TB1s@P%m^?78o8)gAc$@JA=1F?_SRF01drj553Q`eOyZv}I8h2o z?;Hn&RodN)gMBW zTr<1kWq!{o$rne&cS=c&m?&lz3c^n%pCh{AfP>Nl>~J6?5E2J-XNX|_UR-t?=NnuB zT9C^6(|iu1YLqwudn>!1x|Jz``HaH^RmmO0SRObSxqB;;bgMfqDF1cw9;bt-W1K@= zgklCA4B=Ho26z}WY|GW=XrM0;s9=Jk!=8>8a+&NpF}@RGHIvA?jKX1X*>F%Rgk`LW z9bFHmPyPytIcRId0%ErLU_FG2_)@geaDws4iU&|)Kwolfx5dO0GwAy@p)e>&{Cc|j zEPHFGJz@omJ_ngUPA7_CL<#>VIB_fucD>|@P}7bpv$@4-lw~Bi{74d#a}s_Ih&p~; zW2?Vj{FLumetcZU<)MJ4;;-1{0sQl9!&;@qtL^B7HG`zKo?Xj+g)hKbpbSP_=6#>v zano8Wz@D5Jp0D0mG6-R)sqRO@vuU{DWfkpi}A(ZL-#CbiH(G{n^ z0yW@Kj59arUndmsKBPKk&t5(K$h&_y_!4!FxLDh3Y(4c9$A?NlTr{J*o3UTC;P6hk zez}9i;?g164CN%)XCkwgm3}6q*b%<>x814G6%zOBI)Y8G>zp`^qkewyx+uaBhpa|v z)c!==@Boo9oCe(6yBr--Sx+&7okW(j34(0`>mj#|3xV*7 zD3=-Pi1gdc{B;fww8*(Pnb>N;hNe4RKi9wJx8nw1bNb95;GA)Lt_@YgKqn1h|Y;iXAn;exJ2XmLKpmke!$m}I_&kS`hO*XvcnfN&4K4x!9! zc>HDPRx6mgO?Px)=QRkpWv4o=j7NJoQebj25>=dpgHz);hYbV^g>K9F^52A zoNt6})eD5es4DGYPnzr0%KLHZ7Foc4z#5QK%}+3r{pj?44nI~r4Jwpna_mA3s5RNw z9!?ClA&^RIUh*;w2z3y10pY#E-Dde-mL|u~czvPZnG{UAU3tb=5q=-5D$M6#KKp<@pqxZ>Yu2GE{wymJ7OI?|_9ppw)?V0U+ALtQ z?Y3jWt{YjXkx{uOdPhb9x7enY3gm~emDVGeRylwOvRE)k-$Z$(S>Z2W!KRt|e9V@> z=W5r)9ky{56CVl}5J)#eaY^}1VAz9l$8~_wG{l>-yWRTsxi8EsDWO%t$V-;X?S|t@+dY*7hTQZMhC;4%Odt)WNBj}jZ1p`2chLZ!9$;1)=0tZQ) zz1M9MW-Rx-?z}&Y`0m9p&*0nSqM<^o5Q}P!lIizgwsQLl?Ap?mKqGt^=i+ZT$E5ED zv^R>M^J@s^oEEIRC;~_9bQC3A#`5?8?Y>L|u(c+t3^G4H*@8j z-3?JP>gzgc<`FgQV~%HjwEEMqUXr+MD_MDS;#!FvT0+Fua${^*AePYdMj>uFRbY7L zd1MV3k%_QQChmCTSC!bA;Ih<`*OuG%D1^=0mJCOx&@Eg)Yz!YJ79tPEmMG*EI~C{8 z_|HV=f4h9}&-+*5?XA-{h!zOegdMnm^2HxA&&YuW)|0Ijv=-duq_;+e-Cl#Z%C{Te z*y7jOFpabq$cdx_=tx#sov$Dao~sOLBUdn-m1dEo15yI-;Mf~=4g$lhBBT)}PEtfc zx3W??dq%kVSc0j=St3w|z3o&aNW;4DJ z>fLCbJE=HMsAXe5UJiqai|OJ(M$D6^a8zNoBSF?8c^2sLo}vNUaww_x7p=$n>b&6Q7D zTG2a_RZz|0O+JlCKA&z2IRnHj1R7i?#(uVTuki;Q?Y}zxIak(m&L9nIy|3?a%9dGZ z$RgaxR?-o>viu0afVVC#hF%lYOVYgia0wEe>Og5!sGqV}0qsQs3PlBH56<&GX$!Fmm+v~YPH|D8m5Agr1hbRJhU_67o955%^AWTAQ&1W zBLW*L#t=9fd=Bg{^BeI%uv67yN;D_<#!!o%Gc@~*4})RN!9@huEP8+-KmovR72h!k zErrd0hAr961+W(G_)ie@yl_P(L3$0swvH0m^1RrO)edp*lqtWUI3kC2{P-DRX~CGu z@$5i5@W5pg=|vik$(GW@U1*MlF*E{VcTWNtij%Zlh3|;{z$_MNab?IZsbo8`NE!6?c;|{yJvyKQ1P(!@sWr> zsPUX@<5`le;$S2vfqhDTg)WJ>N{3+EEDW6T7OK&6Iu*-wk5mX$Q8Amn9<8o7xpNGQ z!42TuCdN$&Tl(Uc=O?un1^?_8C%jj!pX1?f4$QyB)ZoAI<+@J@!lf{Z%^TmAj546a zH6*268S@ReEBn$4L4a;Ygy8pMzSFj137p`@Ccm~^t+DGUIMB+heJ&25ozKQe_HI;+ z5Xo)uK(IIHq^ZK8_I4Xh{{eTM^8*~1BZc08rce`Rq2evv+gvrs#lPafV53k)h_s&b zuT4nnTv~Job`ZsSVjF1>u0{Kc(au>BU# zq^AT9^*k2UzhxB!3pryOJh+%=coXTK&#=($ybhe%6;Hv#YPsUrQyg7er_$rt$+ z>?|B@I6mM(Tm#8p#npJzvh;p#emf;bqNPGKw+YZ&DbSGF6{bHzlO3qrLH)*Bq( zWPDjBQ`N=-pVgV0!(Hb?BS=Dlr~o8(yKN#spEF!nP+Bm*|IjJAjCY0fO%#Hpe#zIi zqcYrOG(^&*J9zq zc^w?Wj(cXE&h|Gr9Efr-wgMx1t{qvHVb4;S>0L$rz>0$$R>D_@3=gwCCya!7sU#3J ze_6cEU%rJuwqYW2Dfz+aFL-2?ik1ruWB7Np;L!a)a>?~Tew?!pKK*kN1?+YU22eT%u`VW(wDua6~rjjXY)JK&)zCtBZ!f090lfFY=NKz^A1nZxn(8d zLUq{a*azyyIV|S;;~eitmD_?hir9eH3t}lw4t9Z}LtFryIT9l+e=LnW34mj<)ByA* z$OwEDVz(9@R}g`>5iaZ-T62x014qGzQsq6f3E)uI zcI>x0BJ?UEe%R;XLNO3%%28!<*b(_YY@e>G+o}?i&pk>^wOv(6jW749 zgw%Mx4t0ZHcaWPze1&+EUy)pb7Gx2Sd=%>(5&Y9>bc|bwyqoZJdOD1-$ONuK7`G#8 zGfx%AIKmx%e5Ro=1Kd_*EQ>$X>h8TOqZ&*A^TYwCGj0_c8O>W)fIgTmr(}mh65QS=slzQUGMlOj~T_5)T?o6GOO6l$;o! zbr_&j#wB*4Sjv{z&E+c#s=ntRL3j~ks>z^V9fZ4HN%R27(z1MZ2+`wiMnWj3W15iz zTb`2MMX1V6x*9}!S1XNt{mrE1U6l28*}c{0;rpAe;@}Fv)1Pu#5&0ISlV9?yZfn=B zHwjCKq;-CjwAJnXC}tdW6}(qyClW7^#l|1tpovx;OXzWCh`Qr=7LNx0=PNbO=H~CH z#)m^w9BsYA?jh$P#Dr)^FttlV7j%Z#eVGsZ8amj89FJxFRN`)^ObJx<)Ae#DLX~G3 z#{&^((V9nvyUh>4Re=VmL${bcuS8*-IJq+{leOYhHw2u!Rkf_N#SZO?R!7xFHDvYj z$LqWMO;?AY8ie>~UliY0KXR`%jd71*~baZ7f{TG=@=_%Mq-dVq-nILQRDv)xZ3_?+4Ri z<51mlOwRqv^Gl=+WSV(;CQReInoYUL_&0ksmka)RU9(%bF9QB{cE3H=Icnxyzv~Q1 z9hL>av0)(g1Ep;xp*Y<@VKc5A1Z=ytgK!}3E?fu4havmGa6&9xd6e+sNQll3M7`P7 zU;NPyL3V%yCZuMtgZm0INDd@aK{UhN5`+LXdlu>^z$Bubw*y#YRhpXK#vxI!Pq#&E zY;m~T3SL}US>>a!EhusTiWqi}4-H=d;`sgZqTmR_r1r$h(jze^nY%rQ{ zq^Vv8!MiK57<$}LF;eX#-}+V~B0Aq-3DW{wBo}+SA>Lxrj2Edz^pm3EN@tWv5Lt3& z?DDnQkgx{F??=QI1aa{}^le#U{Am+^T;|PSE8s7POOl__&Vh)gul32}(2`CGYa_39>@*Q+}Ou@z0J0oynq*^85}?OGWk ze;$1z{ovu+AIP17(876F?6M7xl}0v@#X|TNW7g+FfhG{iltouTAWRI>nJ)&y5lvPh z5$?fN!7)~(Uf=>!oqK3U!4Y6_KPZ0eONr{~(%ftlgQoc-ykE$H=0H}^oCsQ3y2ytP zamjUT`$9THh+PveMk=P)aKJW+A1?V!WT^1mflk_%Tnu97$VkrR$k1_r?a4RCcMcTk zVkwaj%D8Il@UGi+@tg@VX5L{N<1jjDe|JUYJ``OpW|Nh^4zm0Ikn`f?r z(=tJun{jioa#%l!!Y~(VKtc)rjk9ArpV&7;3R~>5WgUrL#N%ja?W?wYkUkWewpjKw@{$J(X2?l z0wqHr5dU34ATYbC*L{>;7Vg+$e~{#nD8m&ni2}|Kk!RAz8R}q+WmtpgcEVk0F7DEm zOz?*-$0-$&k!N=95wb}CEzxcAOa!TRaIlCd<;&| z_fKWuMii(hsvs(7Apk=vpE>=(VB!l# zQG$W9yRt%BY@psUmk`&E-g=4?uo4=s9zbOLfmfx6j!ZE(1QwRjm+bt)dG>)^AH?=? zC}eJG7Vvg1o#zj@^e3VmB;r9zW*5*z?q#m2Lb@>;bn`?MWRALys!q5uPeaXJ!I|B_ zeS<_f(xyUA8SxzIa0bJ<4{Z1&5zOlgQgny-IxiIw-r+=gC^YWxBhX!5&EVtAqaQ7V zk$AU@5G5$~;3f1hMGMSgk5(tnt>;~(l(k{*R=!jSuIC$ONlv9FfkSz06Y=Pe!cU}_ z!!?CA+XZ#@M_6H5kJ2b$q$qyQ*@H%Y$~I1WKXl*s`xFg^Xvx5|^vX64UG9K}% z%zf7&Y)6FhKK@c|mxmp^*KdY;R|Lw0m9}Dg7H%DRrY{-KZZAsx_KQY&d+C?w@>+aL z=x4P#c&HVb!Yt&2ufxnsq;7&EOWZIuhGd11M6;XlIa6fiL$gW;5NY`+D%;yfPQ6~O zDE8=BmdZw1DTRcQc_<5<^SUVP!qRz;2q%k|qk}1v--oi`oJ<+$&!6lv2I=dSPPl4- zza?gECcT)a-mVxWy=<%#`0LH>Exuya{dAOc<>hEwWi&nmue1lI_tI3MN_S;g-Sw(2 zEsKmK?0e9pTycjh8p}d3V2c;|VDqD<6xK;MWqyVs(U%)}l!tjo2Tda!Bz`_XqJH)C zEl&JS#_f)XhxvU@l6v+lTZ9z|gV&Ucx1NX(o6}umL2e5r92^*na)IQ>X<3Xak0KD5 z-3yB)b6j>Z$?%B^Y-M%>8hZfqRfg5I5HqV=kriM_g_`sUNp1aBD8>#KRL=#q;pRduE9Y`epi76ONl&M5 zauEd~zcnt`oot5ZtPnJNZuH(Vk&3_z(!wtEWy|+9h9! zZac!{usKBI%Wk8MqTLRKbHwVy8}oPPybHxS52vlqIACBXn94E=(Gm)TOM1U;;mJZ= zkVxaL(@)smMu0fmyO6=zh6`GF0!xDi9%1bV#apBb1&i?ubE_hJ!v!+jNK0gdfdGfI4ObGF5!D9RW&}G*6fe z14BjZfos4Nz<2wq@Qnhegtv|z?i`I*;_;QnUD6KdB~D5n?Pj%Z4<33h z5Ih^~uDZ=(Phk4s+a^uGWvM+5P9D44A;uxT%VC7EAe1b0M8d1CO%bAYL=gsvZ@_sC zDp>BwDPTeHn$V^ULqzj_O;zTT(4RcH)&fb2A8?6GddH7^GKmuzE-Vub&o5o#3+4`l zyx{|Z8HpiVrk4!mU?D#u@+vpjx+a&-%a5_ohXMc~drW-dLB}`iv4jiE)e>s?2#g@O z^V$A=`U9@GRi7J=BOxBAIBX8?wE>@wjC31{#p%eB0~=Q;nm`~!V6flp2rJ)@Ij7Zg zMAVv}K*r(X?8aBUG(s~+bR8rjV%=QMvOD5pXGFV`)*uy1cL=9>WX-%Id*YR)3l}cp z|9lJ15*_=Mt%k(z>mJ7r(R7cGZy@oQGpJV^r0s{3WJ^0|6tkB*Z-=%2m&DY156se~ z*K7o1p>KiyyzT*?wpzJh)5HZL6I>&~qW-$jxv~|-g&{*CCk-}&Ouce1{NBOTI2^nmVzheX9 z0vOJB)7TAvGH&7m8;C-QRuE>ujk#b*K~@Jqn6sl0)Qfc@gkv$#Ze)dAxy{jGTX*)e zn4oC39j=<&dD!vIs>6Jc=h7i3@Pd0h6w2}|VG7}F5!b{ygeny5A2F&`B#%U^6GB>l zNic&|f{Qb}8>tdl-aM{>X{x$IW-OEt*L3u+3HETIc!YhEs{HsI8z~__So;*N2+weU zGe#i`Pa`SBSr5#CRsx@kh25?7*sO9&(^#;!gOYH*^pe95)5WjZ&HC<+EY&y5n0CFVQtP({QC9`YXEDr~g7r(NU$>?OK%5 zhg~=l(n{Kc5Yxu}u63i`o3< zR^=KITRD210^Dzz8>qF!|K)q%{3fT{^^{#ZjwP72$GYSf7g3R@A>@=a9jR#bP>YP{ z&yXiZ)?&2NIIqqHeSC9xeXes`jx5XeDp6hqcmVK>{0EULSu$ma>J|@i)5$ zP8r!*7Zoko;k56uYV{hQUo%C^p(snvxj2qAZjvIVfk-62hj`zC{FNa?fT%?HgKsqI zqS^x&FYvi+5>;qJkw3x-M7+fICZZ92#3L`31r{%5dsP8mc5M~AuUADLJBn{;ScyX2 z0TFbMz?&9Kx8rFms;C4RRwC^AN1YWI(!oFoDeR0ne}vprCh0lqFs=$5BqPlg(|C^n zA7@HP-rH~Tq(*LKD@P(O$AQA6jv}iel5Q=4O!Np4ERjIMYxt3vY{o^`v8)l>h$~-1 zkopOC%39$GfQ4Wtkdp3`BL{0N#Au0IF9z@c>mB8%0FBNBfgD6`AS-Lx9iU2IX z()!-!CI=rUU7gNYDTK!mlr78eAZ~Z6A^`}0ePPp_1MCVMS!&v)bLaRq3`okxIs3~` z|D4}%p{g(^aQ-~O2F`v|=x~D-XBq`Ry%;I`eC1G*q zE6RmKJf8=VLxL{e^09-XjEHO2wslHd#Fu+XQ0m0)%_5Ly0*g-Y>&>R>0Dx$NWavXc zit;R-kb^AK{nbcNIeUYf6gSzGwMh9z2*HhGmyX2xAc&xh9M=en6#^MK^d2r}j^@y| zTTlHe09+mSB;T{T5Po|Lj$}_sg#bYGz_s-xK|(kXoSl*58rrRv6y$DspnUJ_-A}*G zk#LZ|R>;6_P7iS$q%~<*!0f`eflW~uXl1H|*OuF(v183u#8;q#b##coQT*ccgVN8c z<+D&aLKMm&X>XcAQzyc;NOtUo#8&oTfJh2KfFw~KDpcIn`cCC{I+9#s3Z7%JQM&F) z;m8h1%}>plfD8Xf?RQQekZbIF_tp_1#V2l4#y|C@F*WG8}V3y5a7M#TE4W3dW(xMmqh(eu8VQUOu zfQv%!oWANJ1e2FG7{Zka2;e9PNblg;05mrAz_X+gIE zg8h+s&>y>n*2X5n2?(?uFt`h<%Dr;Wn2{p}k7KFrJ zqB!=-T8IeY!mTTkZ{YUM;0k!3xWxf#X%XNL1QpCvxNLYv;XKTS$g*CFH~gKh9DG)_ zN@jBK_6vljzE0f2isHC5#^pQ~2`Zv6Yl{dE6JLDXlo{)U#muaj?0lT6f(?pKA{xno zynUW+IImRQteua^)q|Z3)_L;~vp+;D^R2H(6+7TeE6+jx2rs#c4Fz`Z>1W%hM#jfr z1QXLhsleSWj&nGalL#SRFn|$nd)G-hU}esRH*433L~x~_A?#f#z~hd57GSThbFG6y z7$Sk#Aw1F^@cmUe(oKjXyv^w1Q5+9-e!mI-4g+>M$d`bKFVP z34xYB;Ubtg!DM>AVYfA!Mfozd-p>hWign)4Ym=lt1Dmz&)R_m?I)&Z(l7LC{VA&Vhac(jD{ZqJtjnKe4f{b1GNyXE02Sv*Y>P@s?Xi*dv zP<~i^z-i)S4W0u1`k;98^Z`F1HyUqpAc(am3i1gd79S8N&)@bF(Bh&{NXkw1Ff2zd z5O7p+UDW6nD}re~uJs$PlOz2?U64r-=qOW3zT;DbIms-n#Bb0poJnG}r3izU{M6@z z@=ly;xS(E6&x*Dbq_5Wr2Il;(KW`U1dRGX}^sP%TnLli$B-srTrqy1rY>BBr9P$~2 zVsCtZBjjxGeQ=^JuHXD#d*7W8pxbD5p*sc1iBHdY&+A;a8`~TKM+BsDeu4r9rb=Mo zoi&pfrw1D|a1D!{CV|o|YNL|tvSa1Y-RG4_q{Hn^TZlR5a293YMmt<2@PboQWi(1o zw&1|!?4gbdY7j39%5b>gj3CfB9W0K9Lo~vrMEukZr3l%hl`G^gCTyQ76U3A{r?R=7 zi?2b>7x7vJT%o`+j|mfEc~_s}2h^a5E6^Z|F)|t7L|t%pjQ~Fp^IXT`65_O4DD`Gb z;nIy2LX#gXx#vMu;Nm#y6=XW!LCm}J|2q0Yj04vki&s`4&?FJ$Dk%gu7J&=@2$J;O z=kPvw;ya|bn!Isol<;Fh1*SnP6*F95L`>X_iv1amMF!AO6gOviaZxy<2?ltLT|lgP zgse8{$_jws2NaqlWLH-m!8@pEADNxsEk5Q`Yl<)nZh{Z_{q_cdZ=wu=4umo+oTLSP z5xNi&yt^45n9^aN*fhDcTRXr%zWCLv1OMwLt~2HG=5;mxl! zY7IgOlq^LN@?&rj$!-T;tO#eOHdm#KgMvUb5WPnrte~D>I*I+GAcMVJfwU$(?Ud>t zm*OysDvN}uv&DRH_>s9J@U2WN6pG+C3|@XPm!kyw>vd9Wh{RkdE^ieqyuwsBjmp(+ zC^;Ku7Iln#50O|f!FmvLdng-xrSY_*02q7RcrWQy?+J)E5(h~X6G&f}L6Z1w8!DB3QFS-i1zzqB9gLV zBxJl+MKja`~m0Z7G_3bEnl~)?G z=vnzP_mqTtbpmCGEh6||#MqS(0b7Z4#0Q_gQv8Z*g>xyy(`s&Ze!oxXV93*+19^yd zK@j=m^j$wB%K+S{v=~xNBY*B5e3R^Qn0qW@W8q>L`0f@Q4Y-pvI$7?R z-H4rpLT03(dDatQ3|E5igPqe7eM1(N5P{2pqp2WKDn1NDL@r^5=QR8g7C3uf=X!#; zVUFQpQK2R4**3n+x8G{??IQ8lr3cR%6SRZ$*roF8#TY#$-{ zoA^W20b~s?-tkZcP1`km6jBqCb3H!{uUWV?4?~!?2vRY7TO4E2;K6oJnqoEx0t=dvlsGl{3r_HE|KnPvZSk})q4vZGe~rZF(zswV+f(ep#fVf z$HRs4=-b0jpl8j$;q$>m_Y)$dnEfefs%0K5en0Jq6~p}W2+3$uMmuE2@LAN9ViN`E z9Vw0LgbFYQ`GLP##3I`e^deriuf#@NnV=KR%I5VXZ5%0C(*!Pf_&K$^xT5|*#%@IR zOki8^kgK3V>7dxMO4pp%{TDa`#E|gI3$g*hn)W7XGg#X_1Pt2feIf}B(Iv^ zg=q2$yIs&5Wd6>hi##p+%H;&BnhF;+3>*Xj&MH%9h(R#e=%j`U2oEVZ3b7ItiJ$V} zc77lyY~w}ON*@&PpAiYb9G_o{R9p=7mS_J#D1-d(=hFlAVgcc>rWHVVRGO*YkpOpPNxI0YTVbMFbX^3WYMl{ z$l(tA#OtT8JH;+mqk=D4W4bVSVQKX#WlwyD#!HZKENeTpVASEg-!{BRT7ZF6s012uqt_!`G%@X*^NaW6$(GAhoZY?u7I2$p+75{w1e!$tXBOfuO32{a!0H+=Q>7V;G zP_fr%hyVgUKSBXWLTg?)uSe0SU%55}a!SOD=k9;e((Z3eO783e5-|v3^`C{ z*Nl*~aW8E6kBc9%N8Elu08kLs_3Vu}JqYC{Pc;QQ6Z!U)r6o2| zi;f7v-i9I4<0asTsB-hnMNbc*oaAc~(suT@2@C?7x?)Dg^A@BD$i>fbK1g}b-Gkji z*oPx~Aj=I3L3ofxSH?XgD&*1bsVUR4VJQyWV0Xn*!PO=?yyOJaF5?h0w$oUhVaI&m z85;~kD4^KVOp}*nia>m?v@J^{bu}uQYm!~$78ILogpUXRBks*^FL1@ zk-+DUT`N363WPQbCz+@^M8NuQuwHo)Gs|RTe~7g381u8g@JiZQ*VqgxVr5A+zt-K{ zfe@%a;}esG;xutI$;tRd;TYtTMnw=2bsU$@X7DP31Cw;)CB%<*blW-B^dy8}5&-C# zT%%fW=b(g?SR*j{Y7y1<2x;*oi8VPjOQRmqhTVZmSy9L^uH&&3A9)5~3Nc;C!9@Td zKUgI_ky@~Y!>i06ajv;2+)6FD!GA(lAwOhyp$lL5gPhL3an`SMLS@BL@scVow0B=9 zDoIThd2fsvf@@!C!0y+9zWa;f$KHKg6p!#Y#SoM=*t9l>a!4bHjQrk^2+*8~>_ z?q0ul_y7a!WIOyUpVR#E8azbP=D3kymLzz7uVuicy`H|;v@HQvj#uGXz`94t#Ll9 zcCKY|fw^+=@l(*K_###kCp52S@MN5|6Ax>;vLsccu}y%W-NKzYoM<<}E%p;ae)eV* zYx-y1EPxW)#Ljx`MMV0@&-eo(u)O+HGRR&Ex`pF7_?9?F z3mbNW1VUq6)BtLM4)n8ReBkJfEi(S`i6g6j{+2PEHZz= z;rttyy^+}!=_P>%gC@uUcI(e_&d}v!4vk2offuB)is7XONnBd_a>$C3E-tY()}<@})}%7NZ){wgJGkEo0+RCicH`y}dGPN{gd$3({l21dCK>k_ z^mFqT^COZJtQaTXIeXV{GEBTM`tmIW&28;g?N+vbSuktGmwr(u>{A6AT?+FdkYoxqmUQU%;{Ln2FB6 zG)R#gWGnSD)@Fo9Mk%(TVnm>G**(+VH;z)10(}DQ1obqCH}ILrkyE=-<#T4NN2)uY z11Edk?+0&g5*k=LiQCP2;Z%OESrQ4f5B6wMNVJq?CJ6fn98p-x**`55dc3~7y%!;7 zs`x4M)@dh(5|XUers9>fz( z`Quz$iX0lmIl`<>Ttgmanl8@$)VR=wxHV3FiWjn8XYmq4U6LK0J?F<^*7{@oJgSaiWw3oJtB* z-?BVY4uG4F#2->gFSyBY10Y5XCmN(z3ie=a=H1E4CqOek2gJ;~VRB zK5zmv2ixdd0u!=zqiu{^+fQF5pb@cH0ubLeZ=O#e%EJI7xfhAG=AQKJ`;Y~>fT4aOM>u{yMPhzu04rdWj(UBtNP_zWtG}LnoD|!<@`c_F8G8}0t+`Dp0vbV zY_nJ$d&j}@vRD>ULOBoA_x5xAmD3-P8jx)Q>UGTEQbId<{fp8cDy9RGD8l1BZo!|P z>$zO~?V68c!7Tg~>gt0oxFIUtljK$7c1A*PSWI1Hdq>bLeA1Kj+mxu~`upOR zihnk5_)=8s@_M{Dg(r=VEH~E9xZlXb2(d3YmrLUNHgV=|tNxoP825Qt1)wfsp zwcTT*^Ay!mz)dCFL0f^ON1{N4@f8v`{MG#0QV7`DjmlPSgXozDGK-azryqAjJiv93DLNb>AllAheQ# zu2!I&N?i1?6Yj;}d{OI_&Hd(ef@)zdzf~><-0bu%POX@s_L)q-;B@bvs%{#j#eQlP z2MAyX)f|XCB+`^oGyJiMoEa{3P!a9JR9u9$xF$D}NZ5l3ZvMtubfaZuAU|!0YD9 zGhu+FifaV?Q#0+$3W0k`*w2pvD;v zdGfYn?RYD}OSl@D)CAMH-Pzx`LGAZ%( z3SR?%T5!n2JjL(BH7{HiM1IN|uvxgR`)!75|vrAaIAh{xrAnE?!H z3U9%=PLcqNYuKCBBIVYdqBVZ6lt!p_WzOhuDx+zS-M~H34#Pk5CnN++C388wiO%% zK_FY|E|Nv5+PrmDQL@L@xQk?wDv4yVNtP~lE12$?2{1rTkn<|%AaB&Z-`cX>zFz^l z{Jrv9yXtw?rrkX=>6yHW=Cn^0>;K=EwfA0oU7od;`ibgcb-Eg>k}50~9^$0)J&l?BN##gnpsgz`-b8botm zvuQ*&m82%(Cb(UV5D1O2LNJ_?THD4faB#5?)X)f@oO7u3Zu_<;$lI32NVBdPmkN3= z>=7EmMB`N7Q9(k(z4lY>Y$!t+`{HhM6)q$N^0Rsyg(9IrVMeRn!;}!MTHP{t4DlS< zN?qfX@HqsN3gSxb#V{?eFtaMeh`Yr=#m%}1_EK|2b4FSFZ2YzkEM+9rBlayX!ldv= z8p9)Z2+$C zRAA34zz(S%zyEEnUL4CB(ptwQJo~K47tq2gqX=hy{%Ns1EbYAi=S+?;4!iguE% zuIee0o~#fK{1`MyYcRXrVvuyeahkB|!A*{ToUw%Pe8m++(;QYMwG5`CT|9vK6fvNr5 z@gKx9Q1$I?P=l<=YnlqN1A|KLi4DMAqM?J9)k|#>o#gt&GFBo7XqwbM*1L;(30?U) z4gCjxq3uZ9DV8wjz}qgy5xsGFk~&Ubsg+!V14H|AdWcC8Z#B2j&FNigc1;=5*q^+t z8yT76bb<6`%u0H+!`7(rm)OYDvV|do<-I)JIsfuXc>na1qWMD4;@0NDNTbD5#(pM? zhDeK_bXlVv-@jKSh?yXiv{s5xP1YdoMlCU&kvgBbXsH|>{N0p4TC2@F0l0-(_`SCi zy3I9VF$?otc7|*p!MTx35!_o{?2-#HL=$zCL1Y@y!N4L7TVvNDh9Tcz?u~eP zJ=$Kk_<|4tTwK!(YhZEKv)$a9e0ykZ>>?>l@eqhvT=!-D0X!=!-R1s-O)jkFkNbYMXui zq2|%56wd`e@SnzSXjF92%f0*N$(uS>%Y@ekA@vd$_&`flMimGd8vr(mv&$#H`w#lu zz&fU}5YnE#wlcSD$9B($jS{64WyxsPC zGoS1fwLYmrmMCppE{_6-t=w@(B%(s^5k0Fs7;KtvHI^;Rsd|LMM%me<|6mA%IppjtFfv z15}QV_i`npe2kle<`2f7is~qh6$gq0HJV;T_;RUeIwjoMbe=jctdkQM8xM<4*XvD& z4aCl+(3I37$|8ntE%|aDxF^m$rs0vf+E}+$UQh>JQ2UkHr^TjolpT>5b^hnm3K@aKeH4VxgnfZn@&Dqm+1U%zk@ny{;Zan@Mj7L|kS zI@X`+R$OdhSl2wBb6DSzp|9{aaV<>aq`;0HuckLfp(yF~ojS*x<%s|x**N;7%dKBL zqhl0~t1vTZS46!bmr0*mu&Z5(cXn9S*@QrcIaiLw2Z@?}epf&`iRKVHVX< zD^iKrG8`3Yv#OmNQd!T}{#x2TqCC=TDf~d6X#s5=&r!noOVvpqKDgCwE#K_V^l8I; zIv^+p0fW!+e8`eH{z}cS9vYc}*X30&F31!@am|<8txK1EcVcUY>7*TS#VX7?XqKcS z$<4S@wG_Zb4W@xQ;X%*qw$YuxtSM~SzP>v+II7kqU|DI&z-ut@>Hi!4>HQ}SDI?P5 zR;7?{BeQAVD_J?J379Jx0iI=j0jaoW@8-nJ3R=i?d=tmO8)|R0-O+1v5h$WrFA}$w zdkrb0WKB^UMXt1~pxd}Kr>i8=;r*-rX#gZx#%FXVu%MHY1?nQAl&vNJ`HyIKWf*jf{fVFyUd zTDsOVYJ8ics+Egc!o@qFAGy0e)(-BrpVh}}k6+Z<2FWK?n)Oa=rQ-`>5^1PkqU@bM zgMINhZ^wbiIqcrPv16AA(z9L}-~Y`8jsHT$pk;a2EW&j%*a2LTFK%vB3pX5M=+XEm zDnF}bz6qOqP2DWdi+s(J9$nv{%ZKCl^whOSHTdE+-Zvr`T>m8CNgKGeAK;84(Q34f z{fM$*06D@*8WHuy!u5BZ@`(~VzWBJlf1;j2s>jjPw6H=pAl03DI@z2f71>+JNv7=)Udon;TH^W8Uv0M%kQII*O(!| zOH{$4`FVz{5h3dFlXb{oHqv0LNlyIlMB?v$_xE8~o*{e`wWtrRNw0s@xp+>=Nu8SZF_p@DEE2R0Q{0L>S$>+R$KpwbClES;-UtG z9glp5nZ+2J4u1(9P;yrbjG{!XBLyLCtg=*z+ma` zjXzTp*Up~HnUT9|Tm4?1=&BmfO)n3ITSw(%zDy;H15_+wEWNz4e>i!Z&TKeLLJzA{ms^&7{@^vBw(KV!UpQ=hvPTpc*3Lue+7fYy?w zjnw{{BW^s+9UFU!rCdACAB}U_@=v4%)k<70k>zR_pGqB2+t}LjHn{Hm;#i|?k_wfs zVx=$EK~8^-JzL{iiyfu$9v@mPjtFVr>qBx2=!Nx2!78dO&t**uG7@{@3mJD8KB(~n zZHYwy^nu=217Mx2komyvs2|p3*L<&(VTh#j0jSYv$@ZJ%*})!HfiCdw`22D!Iu?t4 zw7jMUMh9lm+&)B_rGuF3(j%G2$6j43Q=5Nwy%}i(@@uJ^pdCEtTxF!8A{mqBy|UH3 zmU3Yb-{wGTT5OaS-9iSNEg^dR|Rf6O{*1a?NpBelaS;`jFnEU)g6Tc=Jm9Hf!0QYlPiq>J5Q)#1Yr zKb#l*H2l;8c4N&}gK!s(UsjV?k29=g9(K6SlOu%WZeliNU#r8#j@lru+yO}J)Z%EW*y1Se4UM3{L5fo^ z=U{??CV2nEP_>zBv)U2S<}Kg|*$Uzz_e6&MULW38gU_9ROb3FaPUdXiW|x}{`#64T zxO8L3;j-{!Y*mj7BS2%Gar@ep&~v_wVGjF=Ys9 z0SnF7Cm(50FcK#W=^k3#BKVV3*?MS&8UZ?2Q-C9kRD(dz9r_DMl3Ly=7lV6#cMQZp zjV@D*L!ZMGqo<1N@GA`;T^TOe8>%u+DNUNlJNzTwo0HVZMQYRtz?!eDYXn#c7W6rs zy_)8w)od{AYH=PelkeN@NfO*puYZY=_&Wnr>@+BW_D zn|g)OmuxpV)=7Toya6$I#sS$3163zGFTpKblU2MclJ*;#;#LA+ zUI%>omPWf4{t(@=Wvt0i82BiOh@Jfu0FC{fg~?|hJF6$x>}1tnN*%cH>3G@KyWYob zuuTadAyaBy(Sq=C&;Uuy;XY}((nW)1@HklxY3WYB$!FtQstoh&iVAanDPVS5lbyo@ zYl^X8>4YTd)!108b&8OBA&S31bVF7OiB#>8pwu_ZPVJhAib5%8AQ-oN<-s3D5vQv0 zV{HtaWE#Ifts%Oqix3X9guU?AXegngzzXymvD-&(lMZ9iQr5bgQG&$3Rq0_%wW0k@ zpF+vFYBn%UJ~L6lTXYHR!}xO@%I9aE%r`joxgk)_OR6i)hm3!fd<_C9-)pWoPIM9! zOt zZ;h(KY~3YlaNwXw(!ov=hii`Pz+dadc5@+#(Ky_}u8<*QbK%|9lbvBVQr!ntE7T5DCsdZ_S32Hu7cIYI)lMkQ=|ONfOwC zRiN(U5*mQ--Y7vZ(TFTs%wI>>(jayyrmV3~le4RzdF)C&yqhD{8m>(Kd{5` zI*(`?cTGX>fc#4 z^y^Y{MT4P`gW3xKUvL_YzT43+9DB??nXT#K1q~UC4yFA zc2&C7_S2dsn~HJ`=^<#blxk@KOpFMP4u$HPb2$jZt44#iH~VBRT5wR(6Bpg z#;^uJih7BXliVblA9NCB38gVQkDJt%md&2AF*nkPgl>?+40~Qwnz}61{`x{w1S@D6 zoOb$Yv9+WzZBpMmHB>alZ^nPfVdb3|xPM2BO&y)uPT^Pe31>5qc}GoDKJhaTwPCTh{H!30ri2* zW~5=DY(s^$WzPB6c5>lx4XCfC=hkHUZGx_N67wmmE@+OC5t_+%L%qlujV; zAIMH%V7SJ$6yfsom*L*Vp)Z3us;=g#BtyCHEYJi4tFK(bnI(|I$Yc)MvdsiVFQ6!9 z@Jn`S(eEej7v`MBctQ?7mA!1eoe0;Cf~&kkNe=*eFiF~Ef|<V1 z6%7Y(qjdqU_o$F$MvE);LGJe0HSBI{daVSbzi1Rkar%6s#yiMrnkyryF{t;d47S)1 z28OtDL4uA)h1aRVef)><4-$NYSX0U8)0*3&I6J(S`eOHK@&UEZH2j@ST$cK&c&|Rt z&P((Y4%G=r2k)!d#*ifshxO>vYjKTQ4=*7{_ODsSo zm3^K#0}7I*E*NF6%+@cpR@Ct#5}Bq~^kOd_cq(<&i5wDok*43g3gV$_4mcbQpeYI zXP?PjuWQrKqxWbM0Rkcgw0elNtEj8)zf^@WDF~!Wn}ltD>J^1_;0(k_PUJqA{8tZ< zpsy}P2Eruc5+jpHmG>ChMp1aZS^S-1EN5m!or#90wc%{Nq8EDM$q;2QALJr&h{Jbd zv)YFB2Q04HW07!0m2m!sgI)ZCqmDP9Ocy$X5sMUOE-XD7=Ht#K$a&kXlC@CwDKb3p zY%W4?_uzV!WoBx1$mT&g20e)C)uaHaYqH1JsWGmxGL%y$c(T8=jh!zgY&a?+V4!P` zE65?ac$qPaz}Tu-F>Y_Ao6zu=dsR{mr0EPWzSXx&wTDpm2LF4- z6+;T>rmpS|YBZ@p#hS~b4Naif(_%682aLB|yxfi&1n0QYflF3~B;iu%&iS>lU@hts zvs%U%9>1vmBQrqcXs%kfePsshm!6vg5h)C(6HkbE!2iqO|^6 zf9`EmQNEL$_S!_Kke`qLAnMHxcJ%fmPpFqb-69gzjAAWQ8;|e*>}To+(W!5HZB%(k zGzx(il_8u&mFlvJ5ra3GeM!*wlFuql3?@v9r%z)2K_1(6DRkw@%-8 zGP}PvYy++hW~Y6EB|$A=`?~q|>569y(?sCfP$8BCVpk0=kziXSnqMV}VLmv? z3$8I^`<`Tn)zi4AQY42Y#E@ujvSZ5O>mTO9J{nA~^FAV8Ms%u7k%nBy-HJD? z+6YbzkN((wMoqGhxa_j5A5NFl{zKMCy1jW))l3gVi%oSQd7`l&$TazH5O4FR44HrK zZ<=@Gd=7B=k7FkfJNM{ATFkXxrHqC} z?PI_A3Zl}>Tt)m>8xm#}RrNF&(h!OIOKZ9RNV=%8l!!E+#jE8|XlfJpTOx-*;_hWw zL$Tk7OGf@WoZ`a{mS6eIUGU5kLey`!pQ>%1~#PIrcy&Yg#wf@2jkEzR>#_G)(#)IF) zsKP8}y4G6F1qe9bVHoXmNYb9Y!0VTv4{OI@MXuG?9471+vH~ch6J61Iy-wUMF2qdWXV6}FN-=F+ULv7 ztksC6>UtEv6`dmn^=@_jfMKvAP5=18t==Ku*Zk%-J~s^^)4{4w{PR{W78je=B@2wz zC#V?PM8HNUrF3*K!A2zu}E9WiO+Nsz~Ky3E0VL5fJu7m#0#eb8VD|ZFg zO$D70FM=FIaYT&OT-8VlxaXh&;#+{r zru~8{(P(2ffuFfHJDZb<%J6E49Hi{e6l%Fyhrw%5sL@?rfWarGKk zDewKEmgwS{C-re-w5MH-0b-h6Nk1;N?9~URp})49wtXT56gHcL!Agn^Ge>a?sI2$) z)PyG*Y!kSuM$eu-tB+4S`lN=<6o;EWJ~*2)*yBbD$i@9Ej z<=otOKQvMdYJnru7@m8{n3sUCY7ZA%ZeD>u3U9l*kG&-JUheP4Tx+S-eO4pIaGoV!*YK)kBmhYbxz;GT>KnT^-wB|*@YB= z)!|dB_{=n+f%=MkD>{JJE~FNGy^7J*>V`am-Bpk8IXDszg@%V|{vZG`rUj8BW~khR zzMDDy+$h>9_^wyB$<0c{{ zu7?+}5YeK&y4uwnB@czZM6ExIx%f};KUef_uQ%1g(eB8`7XY}aG)JY{Dv9~Qpvv7! z%H@fnUBk!PFcGDaA&|HOzAqjV?x*1f&UuSOgvpAO3qz~BR|ba>+^VsntbRTIG#ecR z4BAG7_NBR80@8`}j0pZ^pvS#60vhd`MxCy=Es|}O(jS^j@U{|upeHZ9Wb^no4)VMu zfG-DLKuFR^kUYWs z=@&Eycg@xJ$;q1`GG-LRQx9n_zp^HCwA=4j*yjQY(`-VCI=*gxR903!Q^SuuNBXv2 zGz7JN_9yy#sB1NZaKiHg!=-0ijoeju(<(F)tf4Os3&pAO#%#zVUyly`PITSs9bGR> zCi_M(vh0R+%b}$gXMFu`W)4ye!eMUI>w{1j%`fioPh>{He^ZOsXj=;)j_gP}bC{!! z`%}xd-fM6pXtXF%rxC4p9F5cf;i%&p7&UH1jD>2MHR&HlP+<7~HHv0pJ2xp@f0z_t@;f64%D&(3fER~HGi9j_4j(WSD4tN5a z9bcCaom`8(B2AiabSL&{EkcZ8V)@Zl1?m`{F`tav7>cW<{XvTR!9n1tMSpDfKJUoH zYO=sCH-^3#H@7T%P>Smt(H1~y-(Cy+r9BUNQohr-Qy)Kt&e#jHNta^$(;nwg5R{i~ z&piIP#)BC%5@x=tN7+Olo>cH0b|E5-HWCpVYK6QKeFL)FRG%ZYndZLKYA)!xZrCd| zuBlN+&21iAS6Hj8o%?kx?CU4L*Vwuxs`|gQGK1~27RmJ3Ja&%3vD_D7N+@ZK)9Tf` zk)Zqzc8&BPu1zYrswH*<6@o=7blOPy=E+R`-eqR6qZ@It;mh(!|xY z_V1qJD8*)n5eo9 zj%@CdutBJWTX9`FBq!2I?67Cl470Wh^~4iT%;wCnG~qr;u2BM0ioL}zFHwC+Lpyiw zA$_Elj6U{`><-k;t)7va%pAC9j*;k{QYQilIE{KVFy~q}0RVGFjuG>{>0ul1p1hjf zJMUNjj%5Y}e4q2k@UN#S$f6!8dhA(3OJ8>jVfN6uIPWAbA?k_VgjwF|RZG*JfaW#c z`P~Ss@|GnF>GK*4s^Xzdm~lSv7H3r9a_5pBT#UR-8tPwDB}a`TTZQ-~T7?!tE2dpk z$0jR?hl*L^Zsx{XT+zffY%5~VdZ-A0Uuy(WDsY&4rQjCX!zrqvEpng>PZfFZ-Fr-9 zM&NYuolw5BOeX^W%j0*{P4h}Ki>AipdXCNr`;3Q)9oeE)17=1~2g#<$NUqCU`!PoY-<4C=8^qC^fTPhJQ3 z7pg|CEH8)czS=T)2W-m17>?&))An!LtAZ=Hz}1nP*2cy?6X4Oa8e9ikGJMxuq7#k+ z>?OIQ7y26`i;!{OYd@WRP8ia+IOU)ho;$IAF znVQA%%4yOo?Ekl_bKTen=95`CJ0o04nOlvsT(VB9DXciB@FtDrT=K<SHg{a*@Mae?5P`0-H<|n5$P#zn%X$w#cTK*(spIk0JI8Yg#q%0Ej%h))sd%!x7i- zf_gu5&7n=)Km=5aneSCt)Dl7AF_EQJ56xfEr)z(s6$qy4)rs9P*sZqzO8=PF5}>mi zI0f0eaLH36l*oruB7Lz8m6Rak|e zES&}}Vb|~XE8qr>T&*oqanF|+^0*}?MDYmi`A8$%$m<{&kEhS;%t_6S?_a!lF^S*X zCG?Dp`vx~)&HE-HR2chgY{P!yR*3FSd&vtg%&K+rN~K5_sG<-cYRQX&tiyv^@r}!; z@+co2qm8;t>?$@7_HKJ`ks@t?Cr8vKZp|`w$=&EpQDV6gGM?25^(yvQmjLNjxH1Zc zVrDdtrw+>m^#4EFnvo{+f5|89(%{@26D?U|e%_n(KLR5DkNF(3N6re-p3yjLB@zOGIMh}XJgVCh4nxr`Baxo4?3ESD#r>PNu0 zfnLyfF4y_UBgw(hkpx0?xKsZ$lEspULEn(|K0mS0I!Q>8bHHkYwMfqr2T4sAEoQgf zu+sw5^q1q$G{&$MR#_0a9n>O$)Ti#~!<{OWnuy{WS}OaZdS@XySmNW?Uefv$=Wa5_ zfVxz|L%F>5(sk#~p=xLqfk%1q@v9nrK^SF}R!PjI-y-7%mz%l{@VMO&bCLMYD$;V} z>B>Qk?G*u7l!F9jD+Q2NiTyTDsLROEz-1&VaK@OOTEe$tHK;p0NZ4uI*aW(yhpU@; zPpF)6PR+q{A-bps;;HB$W;}4#K?P`N%y$8>iJDKiP)3uwX&UcTNzqqK#KXINbM2#a zMvw`qBXgxvM7XG&DBKO%PrC>9`wjp5qkb%v05p>|tS^~y7=iYUgp@aq8=}|_0sf{jf+(!EL4cw3%Xe>`yj+*h z>l?UyYEgsZAh(OfD^T^6!4lhg$`8AJ`#1>;)|lb9838@YlC_c0RL^uG{eUP15ZZw^ zrL$7sLllXdnZgL-N$q|m%@kaqewjb8D~gNnORX!H(y6rK=#>)jP&y}?rH2A|*VZ6t zUr7(7&PM2gz}4ZJ)SJ^lC!&3|yO%R$NntNu5>DoDXp8MN*wL%Unfth1Dq(AvY`|eEC3d)LJ9uwJZLvkTussXp zuRnhOJ2g=J=qKy% zIs)-15@l#}6(^&5_;R-^l&_#gYsHwk}Cw+any6zl6|F*vc5WgOc5!9P(8Ud_P8dE&X|iGRW3i%l18fpSQZzq#zr17)CPS+ zw0Ps>O?9zGk-Lt@iUotT436xgt>el@3#)}{I=`SP0()0Ai_GtgcXK_PoVkn(l2!K& zbt4Nm@nqVZI1O%sf7Uk`={uR~6`RVyF}d&v5~5)wb~9Kn%*XJr?pOkxUz!(ZVV7;* z?1tE<(ZoJH1&;&a>m$lmpvHnNKy!iQ$*~23nI8zz}_# zcoMaymsgjY+9i)Zp=Uy2O)Dh0AWXtqyT}w}i4`($Tcy5abvdYC3XYmign`;C6jtU~ zRPFEO#(ZuhS9UYFdTu=srrIsIn!`S;cjuQ{9`qX+ejMA?TCkh|?+$@QiWD%-*}F4vXD>1%|HaqF`xP1&uM*vmNy2WsIbH@M`KJ_huX}i0-3Vvq^@> zb5pJ7+&K$_f8hr;TM#TuQl|b5BS$nz1mQgWuM$r52dyj1TpZIP!9w3I#MWOoMN-HH znit-$B`(kc3(QM{{a@D$ImLJ7+a)eD-RfAWQ!Xy3b9H}hwz3eNc|?D{KHAXkIlQf& z;H=Zcuhu$-1I-wL%^CLlyBsXw=(Lh)#={9>cEl)Zung{g`UlKh}HmGroJ)O|QTeMVxYsSj>qs8{QNcdK|^m`O1R ztG@|u=h%V(JJdeg@xLGcdyV55TnU<+2#M>M@_d?KyV+H5TzM~QfLPVj8B8(_yCvpX zhGKHpljHl3Kl-@3iWEphF^RC!j;vj~;bqk5py#TkJVO0;jy{1wKBg%HY^R6OuE&@ z-2pHl`!?pNil3@!fHRPxpAE zf?;5p4sPgKl09ETpa_uzDMq_iMMHcQd8lx%jon(g(paj4?xqkE4FTzJ*FGX)mb{oS z$}h(6i*DolH%QHZPf0@|1(x+?TsrE0qwm-nAj!Q+!D|hO!-{6)iFI$AmmftHR}p?{ zsP^r|#qoch7@Pw`hFu>zCB~zbc2bh^X?j3vGo*RBPb{2%39ZNnDN-%Q%@lm?j(L5QYW)Ut%_|u%Q&5kG&M`yU0!={KhqOIQwrw{arF#Z+-Kd|?;6l65!H}6^)gGT{gDt8dr~tEz9ywXV`Zq!78`_#vxlg!B-iy9M5(UmyB2g1e22c#|dt zAGA4sW7mISqLsUbwTZA^jSC#d?vWQ^esx*Q_d--4@c+|*Vr361pd4Txd&0J?_AgQh z%>J@zQ97*;=?I4w6%fyljzZkm&I#O6NnN52Ljqv0n!#i3F*u7{0JhK~#@4v}xTMY1 z@tXbKV&>vAqBREITZ8^>eI4%KjXf-OK?yevIh0qY-{tV=5kw|!4>aE6(a}8N8~IXi z+eozejKxJ2N*nVwo$3HRKl{`Og`85Q@%IQPi=lXboIU2vKc`S4jV?Y#Affhn)1qu$Y z!AFY$wy$HH_kOLnqeiKEp>kzEi`(I2vA#s5@yn?cY?02T)O=GT0aeb8fzdA~JHje$ z#tK#yX%oAQUc{pLQ9fI;A!#r$Z{Ny%DfrTL2z@=$rCcz24moGwD_V zZD|+bYmwC=ZYO07vn+)g5Uy|jD|%v^%h>c=wlY8W&~xgr0FM(56t?KT1N+L2_<8*I z#91mg!1U>50v))wE9#+!C6bbOk0csg9Vsxk0fv(Oqev4bTCa|dU0Onie5Iv{fYe`j zTHFkVM_4$1hsuJrgBX;We8*}u*l5w2?CAZU+rV<}U-eHz$yLypuiw#cs2s6V2o%={ zK;(L9(YwLL>KB9r8yJJ1+xbu`6X_|;0H2AZVKzh(aeK>Q`7|44lV5JM^Xupchu_${59H zV8tu05jWMw=CAuagvV?2!Yp2r^bAP0o!?n1eCPLHNvB&Sp=aa=0 zK3YY*FZ4@&SiMN!-;6&Nfl%(z-^?;_<$ZJVo~GM4?3YBn`p#Vv|94+ap-d72<6rNW zXvL2;>qvhP?A(lE?dg2+7;!)^7O}PxWY64tOr&AK^8Md9L=)dx8p*;Es!NbK;lsNi z{5X3<-=O$)uMaCQMwJV7ny*g8Us|2h$Ie<)DO4V8y%pZdINsP?oTgZC}Cn}a;%Ys8CoITdC~1VG^X&Ig>)Ovh=Xw{M{WpODD8&@;}p zZ2niZOR$~=4lvIchq<^+v{wMNAhPINi1vAN!N_@9zhV9RZ2Z2qPB+-GdVvp2z&aWx zSC#M0qSwQnwBB(C>{n%G;#NYu)2k#cvZg{X46tlXa^}2+DCFZh@CLFq^%U90)D(FN zyi1RePd5@VKp9WX3uo71%VvunVcu$rYEfzmepm&x-ZGS>+q{%{&jLN67f3MgaLDZ{ z;t?c*9tn7jh=QxSc*C{H2)_z*seb0sK;t-HSy&ICp{9V*{L6&QhW=88!vYP&4&gss zLB%DRNq<)xIEpDtveoK>GD$H7uJy9V{QAi!dguxs#s$%efH{qZmoK+`jgRlY@WKmz z{f+T!nj*&PeH;+_8i4Dar2R|xoa+D*YBT5| zQ8PqKv%1LMQU||c1|dK~q@vhCY!1DNcC}GuLk^8?5*;V1&rxk&pKr4_^x^?ADRya) zd8}eQ5r)uiUAo-Wdn?Unx+31hs;OT%%tta3o%WsPTz?QNjG*DE&Xs@-HMU7XA8q;x z8Z7VpHCU5K5Wp+e*{|)6sw@>tt!Zu)@#96Gl+}{TS*=XVEQuar3N!R-K1^+qamgD4 zNgMBowpJspD|obrt^_j5WTKdq_bWR*`HL%KX~?84XPrlWlQ^smy>@h}FfI^FZ3GApx8Lhk-@(MB?SsT9Rro~X zYDXKDCFEuie^?m&C>uPY0rNn3Oe|OST1Pg5$nmO|kx9NCf2*knQxu(-NfyLu%L#LB z;GEa@?*fzlpT@7Np*0lt)Zvas+{bDOd6X*O=2szOp1kdqhY9&C(&&S{Rg0ofuu|H9 zp}~|=BI$ZT?T3>Y2y`hA4$R1c2&Ncyh*>Aojq!}{55%fOuni8t@`~6O9&zex%#7h( z@?^qSc3^wN0)_D%91N2w zQxp*7hkojy(*241*Qy534)3N$dFH3u8cVAUUuCe1i<%}oJyS5Sv~Q2?_GI02qlJkm zmH+X563iw0E6BgBSdY&pN)p!l2OpJo+9vJPEp>Z4;*X`|bGh0zQBby;0j%m{&2x1F znU!;@$C~YqR5b2@uuD&z%zB=~9-7j9eGeZ}gnNn#6rsr&O7QdjHPY~C!02}*X4IpI z=VxpY3C-J9?+Dtg-OiwoNM)y1AXz2s;oc3MF&pK^)00Js`$E*?dap=)7 z?C0@AgvY3n`yVj(M96Z5JDgvNVYARtZm=YQM!VqAhLmInplY=PaRuG2bZY( zsytw;+`r4j)C1>K8)`O`)-C^o>buv>IRU)RjR2dF)c0-tz0_ zt|JF9QDMoh%(4Nr!UV1DMSg_rdwW>nzKR}RU9-qNpYgx1G@`IQae_sxfcb+p&EWR{ z08HE+t`g4UR#t|G2Ug~psKp1|f|?{v`i%D@2XX#z`%aE}|G-cr9HqoR_x&T@_G|*} zzf!ATJ9%~dy0-m`R=C2Vu|22zFML3M;jK4$Pg}HTv#;xP^!INKaJYoCThrL)hIc6a z{OU@ltI}ap20t~2<>AxVcdFY8_(!A|m7qeeACOb#weSD7t}J+9ahCoOViMzX@u{C_ zU(7r)qm}(3`U_X#gN#Avs;M?fxuURxXjn1{syHC~Nz6vEhC4Iibc&_#=MrLB3d!>zJcipb*R6N5>yQcq*? zdqsPwQ;_OvadloxwKg)UB^*YR>t9X&7;!$ne-0q376@@+rxJ%xr8X_G&*~lNol+Dz za^OkGp`yTqdiLY4)#1j))1S?(?q?x#ruXv6JDSMKz~L$1{eJwKXuzbN)#nDuxHR;* zAPJD-&f-c$ucoG5(!CKSU8(eeM7N%{dVQ zL|RZa$K<gIVq&;F%e3W5_X!gSTHd zIk{5-w`kCV!^5=gVsON}=Y7Fz)>Ma)OevWcskuZFK;`dfS%2dMOq1M!!E|$k6Q{er zPu2S`b0>xCO|6D{swV^Jy@{n&bQsm{Iov95kH63f#61T>l%)L6ic)B5xVkA8u9m-h zXZ(Ts4EJ(%i@t^@^K*^~nR3L{!+?vwzo?&TA*vkcGkyqD7EuGnq>e+gD11z$UTrTl z+oII`iZ&(6?CR30QLW9DMpe_kcmgZ%_r@I_Ky+sN?TS&^%GWl5t& zoc1B!JhlNH$!bj`A-W#XLSv3foE=H~pq&cjXNe;femy-@iVo=gAXwkzX)QGua-t<= zsm6kD+wv!|t1pl3&x4L(#;r<4wb1DU0qg?*}GVV?&b3gi%p?JI#t+Y*7&{Lt&yFJdJ%^spO)1nuRy?aiC59o5Ym_R zX0hsj%70%Szo(8+QJ*qI2u|9ZTfdYyH6eKg--?88Yf#XT*k^kgm`dda0!X8!J4)sh zSEwaVEPU-|_D?-Z4HnVH2Mw>-hnYk&kfvGe5`hUAUi??&#ob9rj8V^Fy6Ht7S+$Id z-?0%erSK2c;JA|gLqob6{xW^(?}H%KDox|CJjTFH_I%&XU(yy1Z}ZR&@rUhJAz*+j zUSnD%ACy`~&PF%d@s6Zkr+ba0$)O|Vv+zUewClFS#64N1;h}u9NQwIqt~w1QRF5F> zjj1h>zg`nG^cqnSmuQKsP@kkmM3$;66#?CfLce+-jE%7`M7Qz%XP$XRJK@fr5kq?e z(#YuVXU{mXTojo#UAb<4fAX=|=$#5g&$>m5$oIAGla%QuDI2(zBMNCe`42p}kr8j5 z%MG1)EI^T-q7u4QJWOmJb!wuUX-GF+MCZaIXEo*B+c$R1sqx0fvWZLJ);sl2KpSw(Y2${ni-$gvA5CN&@Y&}C~csAe)5BVdTNQ}p|uN|6hNV6=;t0@VZV?J^siLyQZdxd=8_naup??tWXD)S;!l;)fU>xc z?(u#(2L(2f37_Pct8V2q_kn4 z`PTWn+3{b=*VB{6YfYBCvEH4rAM}rg%#L? z>83Qq3Hto|->IKKco>!cH#R?_9-j`Tk1OWLcHe;irof1A=7>7ik1Qb}!4UUSWDq6uRu^;AbG}!JtHeegG zf*NIJr?T9kLv2*H(U5GHm$mYw{?&U?XJ~c7gEyGz+JzZyqhj0G1rw+9KF^&pF&esi zB=rX;AE{AXSf7vIR&VXhUCl~679qIxAO*H**q-GVPJDz$T6zpg%!g{`+H*772_b9y zN`h;=JN{hbD3&h^Z^e4bC9GdDi~!y6w@APAK&ueSC8=uE2V+fdN5O*Hb`b`dgYe(E z-KFLVJ}*(v`OcdER>JSp@uc{pC&)>BZ*250yz~=ImVPG& z0}US!o}n2VeY7`?b>=0F6|wn^@q;klpS-6ITl0;`6r{D`5Hz|C?WZ$m&gkRpqmOEd zTA@$q3o=!jJiC-e-L;V_0-z$CxVo6|82UH{=nKu!_U9Ec1uznIcyY!?c3f^RIBn?7 z;W1`=jkgu$JWc1}R+Xfx@JA70&B6_ILPQNlD$30_*SaFd9NBTW=0xQ`VI-WynCO9G zPm4XN#-OgT&zqwyr>*Sod(2n@agwP=_(h-}G;QF+B5fqTx~+~%R+~WCXy)XY`0D%L z=2q{86+ZEe_mQ`_l)@Gv8eFYvS8U^u{mKpv;-u9qS7NiVpk6!k?bceXil!F~Nk7Kc zFH*6_4s2T=o9$Iiu!DsLkdH<&x}mm6QTDFp%Gti|>pv3IR>+sAI-V&rI+v+9r?z=5 z3K)tCLl{q>53*GhTy;0VnV=s~51Va$LDfi?9b0zuz?#akO&85MwS(06xLyrUr&Wf6 zf!4^D`=xNLj(QdUrgvM*jrOw|d0aMksotxjN&%q; zEX6Vfi8(FL%$$>OPrz+yHDky~33A7N*zJ;YPO}JUocajrD&lmKH<3>BzHNsLsThj`>D?&_?lT!y2hU?n5?G)|rg*p252)?SFg^FF$pr~WE z6>L`&4~fGwy3(}GEHIgZ=K-a;EHP3TD(WixHb1!WjgI3lL z^VJwOk34vw8sBD)4Y=tSC*NwYht$%ncK5HI(wvSEs?*Hgd0M2$W#aJZ$Da^IB2wYB zukVE?a*&N}_^76i4^(f_)^W{EANz@Z;aU#!$=B)?zV^C?%t~!Kxa5l6i$wFjp~rVC zk+kH3?>qVSfg{fc$F>O}09H}}ZmEC(i4t7#Uyt8Z?|7BcEJY;I?@k;%a(4EfCb=;1 zRfwIz;G~$au`!LQ|5vN`3yZZRg2!o=d@E4tA+`1msf6R8tCuq&cE~)#mKsMuWI$uH zP{EB@umCov#UMG)( zrrNorPv@*uvwm;oC<>}dH0MNR=rSj9-z8j-96|!EO^5S@Hll$Vn~Se3vMp58{qp2X zb$_R!zwZ<0x?`oT=m76%7T792)wwK&Fh<~|;Ci5gV%OJr$)~+;J2)YCINIUE%e-iRe<(s=v>h}KvIaX7{N4)fZv@|cGrWRmC>cfoMyc}((_zKqs$x@ zUI3lINjlX8G(LCD7v-;JT|BEdvdIIv`kU{6tGSHt|LUANDJJ0uA|Ud`q;!%-vpG_q zVc!cEtX`Z{Aj~G0UF9~awwS*K4&=}Uvse%6+eINX(Pt^~NO*v-nPrQ6J|7G#Mc}!+ z5%R7@D!x~ZDY9dZUsVej)Hl>k@5LAO@r4(j(DN~zf#dGEvImyVsbYxsXA!OBjz}s+ z5FzYeJzcJl6s?s@9kmZTf9K?ZItHMik5y<~?-rRya|a$#@yWyskA0pFAa?6riZxk``KQZF`1_;`Q3lskgeYzGU?RfAeDN8Br6EQhFlEYcFCQjkYR>Wv{GVjvC-! za62tBnx)!XTdk@Q6qeMhB1%$3)q254Yc+BmW4?aEauzUcNt^gJu$+f8Ok?b{E>-I7 zJX)0ov~%Fo92q}Da}a6R1whZ$1gK~;zVZD#miX7B;;3Fha(OR3rBn-Fr)Sba7{1 z=Gq~Lh-MgB)l? zxlG;%H43h5)5fUIaQm3c*F83{fcmBzkyiD1BwjpwMt-@eGh(5?-K(naVyw=+Ud&Y$ zHm8I+sim7XxKa$IuA=)_59^UNV*@dlA>yY3-<)7fMM-$B?_!)%Z;Gk}z$d8?k|K+- z))Aa2NexptQV|xrGdJq0T*Q%t@8d}DEh0NV9e?=a|B73HRe6Q~eIR@*t`lwI7RRV*Ry*~j3=fs>dS!TAR66IoRwq13+FYmW2`71l)|g*g_l9_9 z{GmRx?&+s`gT&Vla*$!=P~$~99@uEez3GyQV9HV%BusliJ!s^s+&m@2b1=YcIy`Sl zu6b?~!e;FoHuOxR#G38>aPNg|xp9p0w*dq*+md*vzQ{a+>r6W>$_YDEgGGGnY*o%mn^{>&v>A;Da>&f})(Fo-``Tp!e=te4)5G*`<14iX?~x*jDEwR$oKKa?bC15v-K$rvnGtkZ;!1!HEViKt$LdWhliuRJ_}y zKvBlU1TM?7e(u_1>ZXULrk#xfNwMdY=((v59n7W}v|LJ}>QS#NXG`GOhUO&j!m!Y5 zA4KC7^9$!-%7^i6RKd)~5jk-F0UObDu@zF}z~^jX=-KoR?c*fx;ZL-*jrq$tu}Lyb z(Q2-xQH2CpF(@1@#WbKLp|T`$svahICN(|wqI&A|eV>M9`Q$Ub{&b{T^Naia10YwCh+LD7g>y9{=e z>)IwH)?N&@Ty=dO^|mdEEM?jN3Na+hEZ!&xCW@BpeJVEO5Q-*a&QfBu@_b$RD=l%k zzV(GWV5yZIZB2x{iahYP5=@@mOb(qkM(@^+2w(Q|5h*S6S)^3bZHW#m!~FmuXLltY ziYrZxbvjFGEVR9N+eR{MwFUpAm4R(v9~f(gBzx%ErU5n&yB!Jd{!D_2xJjn;OV9bg zL};p?(MXV42`S21;R-J)Of?nQ&ks>0frq&@*dnt{F6osBOlk&rHdzEADFF>lH`6Wa zmvewflXBq3v@4rV^G1!Mz$53#l#EmhLJcN>A@*#J??0P~`M{Ln3$#sRdw55~0!$rF zN?NvI1@EqA%judFz|(PS!jF(!Y(|Ll?&^z0iz_DjP*IigRYI~qle&;>M)Tz6Bb zRc(ZDCX6ccN~u?+F=T4i1N|H7gPm+J=NrdKdR`nz{2Ok?5WvXU+wm@E9 zTN8coU%Wj2s;B~Qi6Uq}UyZ#)^a-~TCuw&?isPX{6eZR!gE#kXZVfVMtYjzS(0-V1 z5`?1L)xa88)HC<=Vb8Jb11C~s+j}F%Z98(=lHWY1k^QF1yA8H-X-PjEd*0-2(2@k# z08LuK^2wL;{ogUpuQJRWff8v?uONy&Qwo8I{$ahvG+{<`0td@BCFz8|oJfG_6AO1YW5 zWhQDO270nEX(T0qgO6V0sAj&Tlo4f{s~S(|`k>C}dlfsqI@}+I=M~hg8V_MAvu{E~ z^1AC`Y#_&rktkCSvojxzKRuty%3o+1(LfPy*49OxNv2h?wgav(e^0sCXUi^|bLTBZ2|4#lkP3SBl{%lsg1Q2A|2H~P=CeMo4%Qt<} z-%M%vyTAMX@9e0Ek~Y?jZdqzTr~(o&f1Pw5>WrOExSPn*4*QKZTZEKHpqP3i`;hAi zKYGy-A7p~cNx>o_Nc~+da=^uO0fF~<+r&63_%@GkjU&{NJc2#i6!4!*U55z zaWvdtn|M3z3ZvF>7OG+|?T%3Yd=;IY`MJov?~3FJ`5r`(O$~xzL!abMvfQoVF?pl4 zknQd>dLALkA-R!~1>sXqwVIBmy56_;S41x|9*IV!4337^y)Eho+!5n{nz_RT`<{f1 zElMxP4PPu4B7dnVc)7d0B*LL~gcUjWIg&5~+`ao(I!F-)`_G2LYNUwcDFj8DvB-EU zIpoEKSb!9x8spCS~*{w zya>`+&}L(&szW2UjHcH?aI1hofOB985$_;Odz|O^o9M3T5~7XULEpGGzs#U7_`=sG z?`nl2x&lb($W|S6p49#jfNegj@xqId2P2yxI)iL|#;O)|2)TMAF~yrZm85tBzLtc$ ziIIQDn&4W5XYB54zl50tp+lQ--Da#fsco%FR0Go+bG_X#Olj`p`zSb-m$k@s5RVPUkbMipB9RA+)(O<6y( zJ?m_uiHIl=0h8vjdC4F`P&8Q%jBlm5=()kN!Y=V!;3nHk;_66zVjMA9?l{o zHA^H`=%RV7+DPv#nA<`hs0R>89yD>yH$asZzoVyqWg6gm0!NT>`E13vHD}bIBCPAE zlmPBv__;N(jX(#{i-p!y2?MfBFxN6f5&NvqK(UBgN?{#SiMd;iFih`QaxD+zw}Q9l z00jY)v%e9yKK`So($6GPXJ?+$xWcx< zcJM=co>=6p&Zz(r$$>O|AolR4Rs^jqtXEr|rrua?wCt)yEBYjn?Pt$Cr7Z$KoB~_b zYq-)pPiWh-!w|WTdoYO?x!snU;ww*gbVMU|p`=VylYlcX`MIkGfrUZiUKfR2^u(hV3`+yBH7LhAk^S`@Ej5Z-f1$R}?yYyv?G86Ia2zn1 zk0PuQ!@SvA`+%$_Ne{I2CG(nB^=RNkk4$nB9-$Tg<+??|eH>jkEFm4;?j1;di<>Fx zh7%xR%f|TrZ=A=gbrjY$(IDzu+^(xcJ^JXQ`VGl9uK8;E5lZx-zSB{u-6;B%5Xx{W zi$17>Ri=@neH3Z(6>8eidTrr+1noxacs;>%FOOf&d=A7|ub}1Vv2K-?4s3QcAn9TD7k%)q1f} zrwYcN@ao~d6}jSygjF61oI3rT|L?0~%!ry>94%@osryLGwc{NRK2LphhoceVku22o)Vw+h%XD1>{6yD;t31 z#&@*}CA2MLCgda0&m|Rle=BbSq;2w96|o`zNVHsJlz}Pxcr0h+N4sLKME?20iDV#^ zwlYaB7c!lOKE8kNUgC0pJ*og>7^OG>nmO4#^IW9;eYFIjXe!)G5pAbM0Z)-3q*%nP zat1P@LKl;6#qxbz&xV)&!uyeN&dxfEc|q9bwU){CG%eU$Yu=Iz%3UR9An{_K(LRVk z<4A3wH;J43=Hx4XduROd*!io*_h(&8j#5W;>a3$o;R}|Ei`*eKfBlRb3%Tx7eJhx! zKZsPM9Z!f%pjwftwShN$T(@gW3)f_&dJ%G?6Nuefv@3!qntHWEaXk$kZWA#XC+|yZ3ZaZF zb?^tz2Hrtw>i=3YR<5O_t$)-Mu3Gixz4zXm}hyB6TmrJP2jaR_}` zyNY!%8|ZI^_fyLN&~r2AUz*Wp-q}<)H+!ZiHl#b)iFCa2NW3$CIRXd9hF{^U zc>nv~ifkBAqj02}B9nBEbGSbVDOPK-U#ASPx`4WPJk{EbNdb5b5^-UusZdjgot|4C z{NL%yjmZBehHWMYfgh-+@BV)5h-0i|2Gua+osFJ)jokz3lg+}o zu&-*3uJ8S~+7z`}*Pk_&py0{3HC3jDO;ZIY*J`_~ja=ewA)I&{5QRiTPQ;=}5$Wc^ zSf(3^{2^6&IP~jxPCnPHuW|QlZXhTp&Ji_Ml4#VO#x ziFRyP%_b~^VcSiI5V+9cAwUgMEx&TDsdj(x#RYIUh=ykjeA)^G&9;v znzL`Q_@)sVIW^Iu=g&KrWq^0E>^kNSp7gO$2U>>$(wWCjqd`EUN?dA0MK=G0c%b*z zei$CM)FdiYdn>@FC!Z!s4z3&B#Fg4l?_h`{ABu>#Z0sJq_@Xvc0relI_%tld4>O#$ARb=Tb79^*c|)g)ng6gRZOCLg5?e!Ckq2A$R{$zo zYqr%O8wFbGTaaN~c~?gV=u#OUv(#Fu7InyN%H9ys_?)4fq&oSH8l8_YUw45G=~OgI zT@(S2ml+(WKDeXW=-`scS}i4*6oOe^K}6A$WhE^Sg|jfMc8KHJ6idxT+tRwL3&zC2 zuqm68^P8y2wLia@6c6piAW>%;^SLp|h~o1%j6~X^3>-A8z7tK?!-|ra9ysb3a3=FPl|k?w~(p6<}_-!J&nf zYqRGywEbn7lGvi@lsoHQV*Gv9J5g1TeyV6*!fO)GbMEINUSy{=C3}%XQDdM61@eJbd2`n) z8kNI!oM0zT7R7Br<#U*>ew?{??J>PRx7ro$3T7%re~u@@o2DAleGQ@S+%+P$!0Stl zuUdjRQ_t6gxNW7J670@g04b+M;q=-im`7!wZ>_AY+e|fc(MYF-Wl>^vy<0tU;ldfs z@Dlt{4{jIC@*1`{*Db&`FkY@)@7(m_z4x6u-xQgKKzS{_x z@AIE8o5G{qM%F=mpl}l=Ci%(}^Tlpbc9=tVUW88G@tN zPPNF`qTd=NL`>g+%zinxV>)zNE{<1p#MfNW!*>7$3p8znj||lsQ1~6>m*yQIB7;;G z;ihR`SvmTve`@F0%)${W|CjxBy5_6sjo*%6(-9&fUsNG}>#uZAG;9Gc7@lh6?dC7l zEC(*bg0{B3F-yG;pYAiWkDb*UjopLmRYdh9fnLu12>IFy+E20$7(~DrT0Iw*FfH`% zT}p0p-D^g7HY!c~7b6=!!mNHLeO#@sFFYcHQ&+)n92ZEQ1TNF94J_~?6J}h`jq*hn z`!=dOHw*!9WfS|<(yV>M2pQE~UYxusR+2EXPEvOEp|k242(z#Jl85B9)KVooJ^Roj z**kHCD2SQN^RA^yq%inc&YcLmu#bz9Pp?$=dD0bX9D~E->)Pl=2nb_V5`|m65}<~G zvOBJX1~BhjwY%OeMJN4McPHsF4o=-;TQ~4u4fE$PsPejx;Tz+xPd?IK;ASn*Tkp3h z!lq$^p1a}KNbP&_v3}{mBDM1$ae0ym2M&2V_g(0Ym8j94SY*^0jHdmRyLiz8zHMltH?JgS=Ykk?_wND=%DJKg{-m>&h^ZZu2M7B%0xu`Bsws_`{Q>+=9pnv zhIAI6Pi`1yuCX;&>?-x0zJ#QuQJ`IA&JaLNW2X}DSJ{=ez%C^52m4s929=0BJgDNv zb^wP~cfrh44h?~)AChmimm5#WO|G_~84$)i1Xz1p>j!VIjeq$9A66Z=Kd)esuWr z=_Y^pU-?fI$4d*;1u;nrJ-pSE!~_<6lPTs>JNg5iF1r_41%`|JngVBLT z8=hATZTrVz%C+cAxrbSiDH0#lvCen4tB8`|_}rZuHLiGl+^wTJBX!HBPfci~R-_6$ z{Zw0@5Y-^9#tmS+;mG1n6nlzaHB&UHqV|RutqZrPb;QDxK28$=T@}Kv&pp|JKGP^K zBrF+<{XuJiFjVEXCI}tuN-oC~JEwE_v&Z!5rkC&Xz$!Lr91MmhZM04pDFB+9b$}l# zJ!&C6BTe)nUrz+-S9U3uE954#!1+w79Y%u_2Y|H=gh7RXmx#DwC^M5`m4Vi2{@c9` zG9ZXZlnK@ktkxGNuln1&s1%=SMrbfrPgokHNkjE^5Tp)T-X{E}wz*yOK4Gvf#twd) zJW!P|alJ%GSz92Rplky$xxfI2g*0>w7xal#z~isA31C%`MbOGDr)HmuI^~S&8CnRp zV3kL5tzGibYJ^+Zy0q3ucK=hC)O(GMgWdEKbJ%q zU`08Vcx$x3OU6ve6vgxv`8sX;n*ibJD;`keEp0EdCj(8Z(1>eyS2sde);exPV3}=! zHyom~%DrAF01w0pHZqjT9oUgAEG@2w6` zGU#Z>g`f%3s( z^u_`uPDGyFt%`R|54@g>pbt$0q9X0-Rd+Zar>nXWk=Doh={;{beW0b%Z8h|yOB zbW&bfRf-ImC9@$|3+o^ft3@UKlh03Y&wO^Wy>+nH63>>aMUB6uMz<+a zJa_H6YkD5*CVYW=D@!Mgc8@B`Y*{tOri>Y&+|D8vnCmVj=RO zwPCq7v%THH&Hk=$$fGJ(`f~BEXsfSOQQjHKh~$jlSfhE)Wi$YPHQSZre6pHaDQpuC z-9tlq{Beyh5`r#<%Suo7m_x2g72ZKm8r2N-QxUH81&K7x`=is@?$A>!#7ggGvZ1hw ze5GzthYYk_TwD|l_6K+LL~Jin8VHWj7#ajArkl>marqgs?aPGVg2zCs)e5NewO%|T ziD9$NV+oDPf0O60oUqzNP25v4)n+i z>bc#|WMtf8Css`8k5r?29kuiA?|&z<244+vT}+2NG5p+Ghpl|qI##Rl?f0+f6~gP!CITFPPJ zxz!iK&6{lvCm4Eh44QqSKM`=P$sz21tQY6Q-mIa9-8thfNWO^~$I=;MBu4No42uZ} zBhqo-nPDpfNiC`AG)G>Qtx=W7g-x7Z!S3L|=x4J2?4$Ak6U+E?QlqBn4$%=`jF#lm z`u`w)`XgUd%F3EB{GbL_U|9Cyi?k78xYp{lvB!pILJM$p5N>{-F2Y@rUPV-xm~96A zIz?*XVBwt4+Xyx`k&v`v(3S)uux8(*+%tVdJP~hwA4Y(!(;NWLq|E@x9T9_gQB?Q~ zZ)_7ze9s-`IQ;&8YpW{EIQ-FmhQ z8IrV~&p)msb};fmc%Fay^PRgdYKcN+IGtpyAE`#vG1VBp9Dk9F2Dz8go&5K!lehFt zxfv@s@Fc#rqeUnBk(_h(k8nY~@Iv-ZWbZ}_gMkKAvna-|pM0xU8i8=$bv=?x4H4rC z`BL<`gU?Sd6tQZ{AhJfc+ty?J>@X#`r`?gqLwRQCA^K1dghV;leR-Zae@2UZ$vWtkgLTVF z*gQ5kw56ppN!+JqV`oB4pOwh#0Bj^lM^%VSs)0!ai4=CCu4iA+?gPmcxS=BLi9)Ch zuZ$gw8ZKBw8Y^x$YF^zeb&$Qw1Ff1#1PBS#er+vy8Aj2Q>xh9UJfr477DaO!H%AA^QoS#58~|?gJ;9B0 zP#4O&ocQK|xW6LWF19>)o+l~O$d?()PTAMu8nMqc z(VYlh(?sz-VOrH=3;5;qnPf{=l7bF z0>w1omm^E;&~`zhwlzF3h1wg3{V=C%ZLz!VSJ6vcWJD2=uSx?L-@gez@JMH7XP?kv z$v|;67B*}vj3IMZHlYb$*C60ot-rRn;m)@8+d1b@-58N4A0vqNH!`GcRtT=d>1iP7 zI#V>ip5-h!s~okBaxx?p=^5SlH`9Om8Pg>mSyLml_|WcZE6Zz?NV$v{CMnC)Sy^h+EY61h;6Hmge^z?66o{sQL&#Q6x6(w=eR@zlsONs!K-^;&qGoR;NNYy>vJw4`(sxy)Q|6R{L_bksj z2LpdNQSvx6wmYcoY7+J{r4?+9amiU3U~|p$TbQ5GI$=%HIz9ce8bVU4C;L+G;PPOl`gyPyCo-2AA%jBZXK~8&QLK@HocHn+1nS0?v8|Lh8v7B%sHvKjnJWq^M)h% z?85V&?WP)Vk6a9ICq2Tw*NHbWSK^xK)EMrm5qB3E&A5cYxyR4ykKNaDq$GLLpJ$D@ zi1OLVW{JDY{$pLDAf(8Dr16F^sXDgBI2c&!W4ndmA=cy&iQgaI%NF2dV~@Rt^4oJ7 zVFB}Nb4xO4u6vOJj@PIcre~>_ckVIm=4K{Z55d{V%p**3%yk)ZKJ5b&(XZkDQ>%Qg zkR()uI{rlcqk2S&wxqQc@cQ`;2SpWek7vh+Xn2PN%<7;Di zD$ad%5hXqoBgcjBvMiwok`{8;EFLA^k<%d?R({-)T%(JtW#9JTn}^!*F?5jJu-cnT zSF~t58T}4w79plel!a&T(I55T((HmxGVbJ$G&i_*MgnqZztp+{B!nkeevGTiL%UQ# z+ZP??!X5G1qd#a}`X#-)x@kmcyhi`3cMH{&tlVO%!#kY$aLVOchn~A*%hjqfy7&Qd z&V;6;ZgvBprWtqk(@A=tdL*A9o4x<&_ZncpLXYq21dPmRsDK~jc|pE!YRzzN{zPwb z1h9%~cSZ>UrZ8se_b_T4zwa?pv?=PX==)Rzpc2!eg${WH&ZNb;lmP-o=dK#Pd;F>9 zHnr}0&;iODAqF+pFcqmc#)=G>=N^AE3+hJSAO5LIt^eNeccphwnTb##aPc^f5>?93*-L={tCQrx z+Dq80Z;H6mfXsMSCGnQ62zaXS9|h7+i+A?a8BKC(J~K94A!}HYB3Uov9c|03O*NNV zbEb-i|MB=^y@IYcst=)8E?zZpm6{g##NQkme-f^aI8^UComu} zzgBj-e`KTR(-SZHji^BVK!d|O*SoF-#0T9eEJ$}h^R!)YW-$Ia!XMT2s0N+Md?$G z1wBdt*8H4!R`?XqxQK{W_egLipzdm*R`uc{2ql+oow|5Q-JzO|QOV}l+#h#Cz}Bl+ zHQUpAKGuG`!P+kV6hVPoHe&{U9jm6uuQ~R|Y4+og7m)$Coq7s=&W1b~L}WQEwgNpn zd($?z!_L+Xy;lx|mYU730=T9#>jC4e9jn-T(+Lu`(eAcp#HNp8uhb@-4Cb8lM^r?_ zir_~Qo4Ea?_F%EubT}jF$g2kRkX`n;fTuMuV6prAk-sTP zU@>9~2_My3;PPATjLo4)98hwtwivwm4F5|9`pH#(8!~XLp3ZRT6oOWTzgu0YOwrq_!Sb$`^VOa zuLm;3q&1-QQCjf}s;a^q9r!^e91aQX*>qKp5C;t0OBA;}Yt^`@w9XD5kyv9{TdNkn zv(UA)!oN!20=kkp48#*hPE$mN|Ib60f3EZ~IQ9}6t46ZYH)p{EVdw+bpo4$6``3Hf z>?RJ!+=J%0#E1qrcuk8IK=MkqpuAN#i}YT&V9Ua(GcStB;>ep{a5zF7F$E6q{OW>@ zr0t$l39N<(Khf-{``=`HB$~>RH;~lx+f{O&Xc3OqZJUT>>!V~j+C?%?w)L4zu|7fA z`;746ss!+-=5D|KQuZ@j2|ScL*-Dm&hq&05EIMC&R{fBv&>1AIUe;e*OEZR;!ulg9 zMMQ%7a4W*9HH4Y0S%=I;VGaGzy=jAToFHpapewuUerUf6Kcc`z#sFa2Me5F8mx_3y z0bXq_d9bG^&Yly4!HNj^!%nm;gcBkBWiul$h<-(FBNc`kr9(+L zVp69v{;A>R;<428+(uvW-(S=bgyaFDe0(*0Pc0z`6)`%ayvQNpe1}64T-Q85?6Br7 zQM;v7iptz|m#T< ziBV(b&dZ{Jjd2)_HYJqgIzSK7sVW>H2oVRHM~DpQpb??_?CNNHE8Qe<0aA8_d&ut3 zMr$_vA{-MQowM1sf;iRop`QU1sIf1Q*xW!JEGSXY@MA6H)v*y)qv2Lyh`x&`|AQMu4;M5HMxy*azrN1hqo!f}93YK0Kd2+%s=Eqhv6u&@Vd6_5m1w;dTm{?|Vq-_tk( zn60Mqfmg=+yBY0#3jbd&arim+?82Yoh@oESXCf>kJ#aOAdMAmq`dhC+U_l)SBYG8~ zoG75wC}}F0B*_4s(gW@=?{M!!Gry3K*OXMA6m9iCbx&=q+1!|tew$5+a-UkPurAe6 z5Od_8`S;@k$sO@$a*6)4|5po39RAfgU5G0^`*<)#*yi*joocY-s|x$OiK^XdX9X*ayN~PeT~|-N)1tkN&Q@b@qnHTL#YXr&jU8uXZ5U`if1*;< z`&9A#T=VAk3obVB_Kv@PN^5~`mEtK? zS7t=lFspl-)x^^mG!4wq=rJB73=W&}q2tP4e^QPK06lmE$cSs?9bl~Etp;e=B0tceTrdB-{XF)gD#Otz*LDu!i155`^Zwg{c#Me_HL2A)6*=; zr^B!H4JR{Y5F~5OAG-dY9%xT3wicGu5RE(h_LVdCHqJH{pVjU!?yjcS4JkNa4nd9D zZ-i=!10&VwLg3P`Sg&pq`HHTC0teOia4f06tQ^_)${93m3@=jkf`tTcZ}Ci;8B*-`Y=i zGIpqKv&@lI+nV9<&g-`)p3rDIp^)ZMw3_-M${$;{8yUBb#EiK7@q9#LA?s{^k`D9yALaW0at?9*)>={?2a z@H&R@bXU#dH6k>{xOcJ)^sHt^IX?{&3&pF3fnnOAh?Zuxc3`7%Z`UFW&#ir?G!TCnYj9=J4z(bLvkN8GiH^i5YA{|9??Sb)XH+L-X zQ>(^`)X0sVc$LQVG??GEhThp)8Q9!UnY5YZ=_V#5MbYn~hqVch+0Vkc@AtH<>^5Qc=Lh&@={G_<9u1wFWX zQ$&O&{Cj;y_OX)(t3uz=eABIqdh=H<8vF>K6`@|0St$-B z9uPfEWL~Z+6#tju=TQH9!w>ZUsMeR9(yW+jNeZMNFGXJZByd8`$c3v(H+BvrBMFdN zk^$;v8mprGB9i*^Yznjs?rc5U+XKKZ`HeXtX+=gq8~FGWqNELcrwtCpdW4_U%dt2r zXpN+%lXKW3GBUDtX(zf|XgQ2h`ob&&4eav-8*boOsnS610H75ibF-d3dsck_Q^UrJ z7^ao!Y&)Wad`qI~B4~}p5-nWF#As%NB3HqhB2KeT81cJgQ$t-XJaV z4T3cDmk7t#w|%MzB@(#6B-4}~SuE@@pS# zn|FqY8X?wJG9v&+!wD?Ymal%!$9HuG{}u_k$^M)V!$FrsvzMB4bx!3a{K%p!@*qRK zSxbI+Co-XAGhAV|IAT%cHp|}#F}9nF6XaAhVI&`U*Tgc%0jT;qzt<#`#d}-glNp0M zBCl%Yg;|A7gyB;(x6*MkwG|TJYbK1~7FkNnu5PBSq&3%GYRpX;4t2%$i1Vi`Q#7V< zNwu|GFaE8%yT0a0l4Rf;O&{OEW;*Z@ObI+NO_9MTPaHBDr0dBcr&b_@X{t1+&l@+j zVZ?c**Qp|7=wV3%*rFh4-e_&i!N#1TUW9o*{Q=NnB$s%#6a}3{+uW4# zoN0(N3%k*{!(EkN;!_uFtX%5YuNv4>wT8sP8m3ave!+G??Dd*$BOuSB=!fqgf1)37 zPjwt)^YrVdG?}oz!q@c*JgJNy9!itu4;k$t|)k?89OAVi(Fedl5Ma$RiOBtV*H|zCUL%P!-xqCkv z-YZoA-qsMfZla!fc7&C_Qel7oq8|J~ZY|6Z-)Abvz;&nzQqckY)0#bv<5!=rau4V; zAR}80QS~a%z~sPy#c(fxVuD?0vI|y?izhFUdTR+Mm}(gQwbZhg;WtktXWNfUFiQhQ zH^qgfCnmY~A)}6SiTvyd4PeT_JW&L_mYyQ{iJrRc0!^>lR)@GuY~fpdkt6G#Hij^P zIFaYb=b6?vVE*9p_unlvI9u-2P;lfB&*rOw6MJ(-f z-YWxg7I$oDzFdJmX1LTu76mZBS{n^NI{rq>ux55g@?7maAUX>XPrrUTV@&Tq`a@O) zdj^|x_EDxF0t3PTo=44bwfI)t>e;H&C>fXI^yNY*GEI~1Z49)Y!1Xfy>9do0f^ZWe z*yxS@uAvaz^yptc+3ZmbZRL2pm3bv*nvHhTr!eAN?-u0mr<&3zXjb!G#VChtu0i~A z{7@T;rI(Z z^YnzJ4-6M5;p!*ck5S@9o3p*21)o>)`UqqFV9Tqz-KkJ^%k4?61NkENaTiEw!SSow zZ|W}!x`biIYpXnsm?bq>AVDGlq||Xne68hcCPnV~%BnXDNL!O=!BF)4{G~i}=n&XQ zEDfAF<0{C()h(ewf?F0DA)rxyQO!W9&DFtA%>^!vL%>BNoV0GoDsme|1V{R7^)AOn zMp8d-+2jkE0g5PRI{-O7+zPqMYbfQ_B?M?ObtgC@cW%+0G!U*4SihD$R7n^}q=~9- z5t1s~sCTsPbh_bzh;qtdTU7stqeLQVFHA;{e#a&CQKmkl{tqP^=GV1Izd5gSi+dnd ztC0giKXGFwX8^q|dRX4^o^yVjUgP$evzlN*n%NI=1CVPuj~Z~y7^t1d<8rrSX%Pnh z>0wexso;<~PM@CmsRkF4wXOayS&ZbKpPmuD7tO5%xK-SCk1pQP6OaT3=TdP_e=6Zi zBapvH3}Z-f2-IHlP)r!Y30aWJ>skR$9*`1m$%A9EgR?PT4~=1uOQUEv&j{-BPo$gq zmxY+C+oDw>NJCgIT(STUC{{)Mwooy$?PIL=3m`6F|TJoPit&Mhmy>m-L*LAVfM zJln9(Bv6FhxY_VF_oUWyaTLSm6dId`grL?P{Ey@H7@^Om#wg!{9Lb4og=?$)O~aa* z6qDkvq%0@7nHHim^c^qhy~v}uzRY;s%UC+4F}|{p_!m_X)^0TJHkRI z!W`@n-=%?$Eg@J>MsD#>$A>KpAGij9&>E?A zztUuwbSbS!zH4 z_6{y$cBz!@0>esHG1)zMN(bw|A%l^)LfCpo_l0lMV8!DodF@Q3)5V~!ukGey6j~Qu zLL{K3f%|MGBVpq{3T(=+-=_+e zg5L|l@f&Ri3e@;~s3o%W>P&>q^JcaBxPHDEzN?1FL;=gtvsbW`#q^U_=A&%pAzGmx zLEHt4g}l(YRcd5 zg$`Z3&Lz-!W?s#HG4AQ4ae>-}{wo8-Poxt#k-v8a%f8+{=w|kzNX%NNcnUwS zH5E?NV6Cz>asoaQ*>-mNJGUt7R>@OfThs-bnt#BvRn>Drpb*ivi3T<2#JOrW-u&JM zivI|On5&8gA7W@4&0=_A0am9gy^h-sf=zEv(~9!`t`XK`i1)aQrqrCH$A6PXr}sbhrQsS=H#^PY29L3^{mE7A^akNdB(;9dp+Fw<0Wy?-o; zQ?3aCu%l=-QEa3=PTbWLrCaq&xQh2GM$B~@`Mk2Nl4-CtxopmU`<0A_c=*xq`{eB0 z_V5!t4Rr#fvA0O;0}J&fLSf_Qwd;#y!!l9B6)9Vp$An7=OOlQWS`V9EnHvH3vAC50 zzCp#6yb&WM!|)po0Py$EM6JPbN}!s4Bofxy-6pp<^f`u=0Jl}fNYDe>%orA-$&hfd zXZ7w{U7}=aK2l?gJq%uRaC@KzZ3BP$sdIV+(}h}vbhmu%hZqpJ7VTpMPo?JuVfF6t z9lhAy+uLz43$}JADaN9$nd7E#t|w~JS_UeL`h$_WuJ%y0Zft1sL9OKu$)zf;UEe3| z=9C7BJ@LciPsQ)c>56awz-OZUUDC6Sl946_Mgpyvoo{AL=mBnYt3Z*`joR>%plZUV z^%T}pYAlI{3inAa)!DlBn)Q)G=U0K<==CtZb}A$DIF+%_J?pos^*IrFI5j`rG%0sh zO9~0!n71A78R)L$f!looCbcz<#Y=5{jw(m`%q>?1KknlK)9fPC2`Z@MgUd78o85lN zmQulBN-nPI;9&={-y~$D+=fybEi&4xmsyg~VPw&878;kDS}jT!Rpu59g(7Flp6hw^ z=zIF}bi?Hr3ryof{`Z5S(Mrn!qZHXded8=4=}2F_1q4_P18@0H8c0Y2={3dLpq0Rr zRl@J|JuWf+v-(5?ed@TRG3-w2R{l{w_#dfv|CJ|lY&S6-h-gCH+rIM8@8n-07VF4{ zmx8)+8Qp?`0r}WMOreoitKh5_;izS=MNt(6E8rk-v3 z9!JorF2zj~VuxlbY1o&#)I&~57K7ol_GY1DEjlD6t2iq}UJvxp#95aGwDMsK#A7FH z8Sfr{Dk8fXIUPH;I5Znt&q#(9!DuFy*Z?g^T#>#r;&|cc6j8OC&n+i?6y%h7)Y*w& zYPkE8l@WxS&iuB!6G3R&sZqsW^;>|SYGGk%@w^W8zv9S_c#DqR<;z-;xQ?oFHIXMK z@`q>G`2jaNy6IWc%ZWvEf(rR@72a#=9HXV)-pENxZZ%5>+PQmcbGt_d`k3cIr z+uI$<4-J=s@;SpGqH{b=Dx!i%(`t}f*@y7e<|YRa z&u+L=*wd~3R}C4(Uy*LN^&1)*hkLV|WQQbj0Cn#+d9l`Fb| z4x(?%r68JUvU3xbnm(JA>-u4TN4&I#Mn4Q&*3y0nGkpUnL%qRC?oS)Dt zPvl}Ey`c8`%!K2}Z#qH+`*wfTdLz^RI6Zmf#VNIjljinspe^oM-G+>nL9x17Mi|sM za_V`%UtQUqju}2Ni92fc&U?m@U9w^*Y z?;hoWDyVAxeSXdiW9n*CM1l9xNQx`Z>PiUQ36^1|Ws6WBN0U~+$ZJK9%st%hh6PU) zN^Mm<3nHo0XVo1b@Go;FzzeAeO57@xHWS5I2 zksVZCh5v>{f6aPJxJ!+g=Y&Ld$B}zfBO{3>fv>d@kL;_X^uZe6!sLWxfjf0f@?ZO$ z4HZ}2$bTzt(_SDEXm0=5belXPl&k~Wy8yLi+E9E?JsVg+l%Jc>GGDBcxXqGZB!o6C zba6B$#;%|BJCNmAn?Gx07w4Df?AhZGRu+=De$!@&VDe(lAmpeY)Lg`hZu-F?YWl$r zQAtV|c#Wka5;CGqL`;|gwD3=y*CZk7uk|@1Jv2e`AcV+cmy{&6qH1W1;X$7qzo*wh zxLFoPdPpxNK!i%HIP`#4G7Hqj#m20SJfS0On9{HYb$ywQOw{KzO1L#d$OkfZLp zmS?0;X!(y;*0a5U354#c_Evkl0bNk%jpe0zwSvq}bgNpKZ5YLk#?rNSmaX`HXoMme zlZ(`Ao*nrRzb$rAS4Tp`u0ktMB1l0@&d=N52;tc?7rYf=N~|PIAYu5`Jicf-k+WZv(rJu#^YK_w=_Yx{QFe zb*mZ_wx%<`)alzaCk|0F;tDlgaH=eJq5uh&U7QXS>sgtC@kdUq{>Z$!I$lql8rP5^HGllh5eKuv{M^D|UPxp1`mnR40_<$RMm@1v3F+>gwEF>C}2s z2Psfzi;Y8!(u7Go_@zF-!aog}`a5g3HZC-RRgrsIZM2J`q4yTharGXl6f>G-Q@0u8Z&p>Pft($B!@K#z(e0!b5!zmY)Rg6uL0Ax0Kq=Y ztu=kGjgov?L>(9ALUq%=WA#9Otdt(?G2g03q)Q^jH0+tf>l^m?;cn1!Ap?p^5es~% z*7{c95dqU;g+M}jI=wvCtfKg)(!ovw9BZrd3(ZAs2p}f-N_WcwMCe;1e;y1?Y&Uk3 zyK~dm3q|N}pj~O4SAmjgl1Ql-CPf`1s~D`4Bc_St5S%ynn(1$tY0r|-9#JTe-?#nue`OcOsC zXoj#X>{AIZE@4FJyuvlUsf`4#T{ribldJQel83H4;i2PJp9=0;f1C|788sZvJZ zE^rX0%}S|-2Ai7ibdEe>AF)+hhs6&KPh;tXpd4i3A%2j01bLvnF+RqA-9a!$y4@{{_I$e|s?e^P}H$>_`Ew2G=vJ0^!>6 zL${NiJp8*XZP(i`esMt-;Zt=#!iL7*e(;mF67F$}2t81l;mpuBkX<pDS~@jHuG>>y*2z+C+W~=&|;Cm0FXO4KdBbr0DnBT4)*ZPG^``v z_?8NIJCdR!a`E377_5`DKUn!uZF+JCYsGtsMLMKutpN!Gwk!v8s|SIk^JXFPsO7LX zeZZOOyT|W{RfJ0{q!2xcThmZclD-U3FGSg4cEz!)t*k5u0V-4jNE8O6Q;F-kiv z*Wc;)%i%pCr`WYGhwuLQe?r3)7>!DXjIP6kESnVVC2ywLPB6!eBsf4YDV7^Dz`LTelVbWHY0S~OwlSH!qPt*@# z;^t*VC)Z2GFyiS=HWt+rWmqfmCEQ6L{?9i4;0XGW4^vBY!bP%+Sf{Q=IV<$7s8U9l zY_f$q$Pw;>0e;KcIuLyAe-TRX&{UYX$he-#H}(TIBtAB*tW;VFHq;gdaJ-qBBH~I; z;0-2QbC&4IoR#OH5!Ed;-Ipeu)#?I*hnFwZXgx#lUQTv_l$K_}FjTH&*uvWwb_-V^vt zg-rO&_L5G-UtzL8X1o8ozv!ZSwGvW~Dr+#Bp~im|XJloTBLAu;+1u{tK8(DStf-oP z!gnv+12dPl5M^}r#$?|KrA7(O^W21)71ddfM|i?EmX~~Bdevh_PO#$YRQQ8%_VD(n$epN-;1Jo+Ql)8_}){j zf?E&pQTkB3k6KLB>V~I<((cb(Z8x<%$L~O}e?lkVf9rZyn>MzU6rt+3xYLcA zY)mJ+oN3LrD(l1xnT#t^9owQuAxEb<8{T=$c@mbVe*NE_{fimli$0klp&+BWi7ATU z*If1&GsFGqi6@`bQY>AL;3e%8*}Bc?x$wDn`&Alx%*h!|B{Py53QnKN4sL-^Kot_b zKQ>{rB~IUKHj>HD`5%N$(+gxw(#O+3wU0Qg;OcL%8&+lwe6PD}`yV4A0jQoMu9UQ1 zNywC~?1A`G>oZ+PiU7@>n^J?1y?E_(E?$4voT}9SOR4b^2?~>1AHGX&k~j62R}@^$SwRso=!SB}amxvHy_2;$tP>c$Y?99wRg>5#IRZtO;h?tg2&#HyA`eubPQWF7c5I)fQbCIn*Zhj(O$NL zj4-MC4JoC+W|@VWj+yAKP2Q(U*y0!vye$sFdm+7QqBqkHPGXNgAAgYTleo*P*LkhF zhA?6WKh}iEz%foVtp#rnrsONlH#ZmFHg6$z<)Zpn*tHP?G7A7wI{3jYo zBS7sM^*X9Yor4aeN>H;86LP_h7h-DDXK^ubK5CIEBx4*QKGZ&o04LBe&GmJjlK5)|Tu`5g)c>ervweJN07MalcHa>T4 z0E1IcX$Xe~f^osUua^3&USg5}F zHp=97+qc-+3JkE0tXGYdFz3|X2fG9)teFqOKdEh=aw%@trjJo^ta~{3j^BLrJ8e+- z$Ivw6F4dxUuKUb`ZP*%Aw-%=SsNbtOul28cT`-UJvbg=;#)`!*aU9LGJlPFzr+S=j zT@eqVhp7IRwO+AO>=+G)G&}x(o=xEMKbFo{;Lvi#gqzq9+glyz7 z`_1_$Q}%q81faNO8L0NMwE@zm8zs#&yvr3Dub3{as~oMYO05hZ8ya@xox^9L3kRVX zk|i&x`r5GPWpe)ce0}@ITF(%>m`{ZfzO=Bcw)E;CYhbP83pwB()(7wt5YcV6 zcy+PiYPqw$qr8uQ-CT5im*Jh2?k4Fwu%>HqVZ#n8o{ubJC;cd7Ks~2N(VhOHHLOy) zA@FYNXtL0euDQ}^Z`rpYi-wj1*w=|_>9gI)sHV=AD7B&|;}llj)ce5N-W5Rt0C>ZK z(~3PzL_KnBb67YN;v`$XVPrR*BTZkx;|NSQ!`y<0NSGpBJFYvEEFS(R}BtpI1FqjAE> zchIlwPFqEC)HzxlgL%}@QPys1EY9YzS8z0;4C^s+hiH(O4-R(Q`2O&rY2m}O>XUi; zm7l&YB7z0l281=#exUus-n!pj4k21Dsx}%9r+5GAxL{z8Xspkw=|ClmKI2$rfWd5Q zwyEh}?r(R=JZt@QI*dqa0vv?xj`$w#YqFR7seatY1Zw6Rca`UP15djKz!k|c)^bqK zBa@_rO_`*k7HK)5ZxU8-V-ZK*l==AM*G_BXZ0jbXUiBU}9_}4=S*ij%U<}JZxa)eA zY3@p6aq4ozhQIxeVaDx}3P>Hvwxot1zJYJ}4Xs6QtGe2!2}Mfm1l(}lKpCzX!e~5` zCUzS&7_hX7vsxUg0iFA)TEEyc-yXTiYun;gD#YRPP&4x-tEavd-tdXOt(Sj!19E}2 z&5e-{>oVtTNF11i$xQ9tLRHcDo{757qLAgemTy56U5^D&GY~z!f)-lq-jiW>7u}X@ zVUZC;Qc(Di9ntCYwX!!zUb%*qH2}NXq2kV|3?mfY6W1C;qDv*4e0&lRe^!%-9&hUO zqBceU5g?;UqL3_VAp-`OxlRu8;#R{dIzv?z`~LA4DyW(7Q@Gd0ucgb@fjgyqxa4K7 ztRG)>m?^){cBWZ`YC|LPPCpVXe8O|FVZK6^!Sv4-Ke`qLMfPHEFH5^H(UH=jM(4P$ z>ZsHxZj($c_U=2`E3|c;@0-NBn!)x6~a1Ren4CSkolsUAPO>-MMp*Ync&*VFRgLuad{tnl@P7W?L^^JNu$Ow%V69^ErwY z>(8fOoX}h$^JKPx$;i64?d64lfau9k94%xVY@(fNA~4LgP(dbdrEMDvq+Ue(>bqva zQXgquW5SwdJ*@F-{T0pMfcQu)M4*NSe&$-8ytO@}zi;e0Cq6ztn0qd=Vw~cl0g<~@J9_CA(>zR)>R`XT+5$VK&WYWFJ5~+NgFOn z*df}imJd3h%oKtd(4m@g$wE~D^4m-HiGcdYdZyVd&z&fjdepVWu_V^X@&E`tU|q9_ z(COFh=I(^SoLES;SxRBdI1lqpTXD|+?5ykFHK~B(|eN!u_0^kOU-fq$gE zO9_3yw*F-Q4ZLtw%!5#aI3jWeT*ehpeR;Z}RbHshQ4|yw5upRx)s6yQiKdj~(nCl* z4gf@C^Ti40WJpQhD3L$RFw${`j@wVXE^>?XrYkPTu)MT1KPLj>Vz^6gg%jAE^^U8f z0P3$RvIZrjgNMrQS}lq)H0Lv&B5v)U=VYd=L#*!a8K8lx^@c{_h#E?AYBy(_(`tg4 zH|GOc=x^QH$k4X&-lb_BKKJJFH#)!*7fxr|!HrscxXjv-21mVJZaCzWY?r@R&(t+u z(t5*ELyfL&p!+url80f>QJ}V|Hz%$o@@QnOO}<0f@{l@2D_6|ITbCT-6O|#bQ`ty@ z!DL|NAu>Yk+N~QQ5G4*m4enIt==Lov66UPPk_Ke`r2NxL5m1w`ND6b2i+4B6S57+3 zNvF5=zsP!5#R1fIyZ9{9yHtv>X`eabZyi5~{~dm)j&F3d@Lf% z9{w)Ry%M}q1RH(Q=mRS_&d^*P?K8JEvJmV8*2G|Q_`72mwHA;Rok$I$s?e?V8E?0f zULsMe33RTV#Uj{hh0)yW3xOOv27UGNMNvw-L4)$yZh@_BVC9qd2>a8A_KWwa8A&DK?+P5YU5mNyKBvdK$ z_HI^X3_1tF`^Mbx#p|y=@gaVu$Le#a;3*Zur9Hdip-=GGnQ*O_VOe-de2+C&XPw z#R^G9bHocc`cPwH5$z9c&J=I34o#_wDtZF%@_7m_&2f4lJ<9&K=KSc}6jpdsD}^RF z#>DGk%x-yv0|1_5Ip;uicu)GzN{Ge@9|t&j5(L`RJJV<{X(L`gudai;1d6CzA!GyZ zQ*zVf!Q9_-WEemIyLI9QfG;MEG$YYvV{gN^PGCAhFtlX^1o?c{5EpA$1HvBl>9eJA zk@UQ6&@*@fY7*lNK+fSt!uHneV3CPXP&mSKdQNSutdm-NU^io&+~!MXb+#- zB+UB{$Dir(-rlR(e3?B|=po$))%?k*YP()&FVAZ8h^6u=$3(m~sVg!x^dE&6{Y(BL z_6KskyO)_8F}adDOA`reNV1^)Zr5T1Mv)9CLb+I+(&#%k(&0{((JVnc4$@G^=?HUSy|%!V}PEK=I)`EJRO{Ev z-RPU6IY$Nxjh5(i@tr4ct8HS>rWUj=B=oc=Gk{&rpMa}eI@^@SE!!V&mqHOw4wDGL%fCw@5aeku06s$7E za7$$j-4rOFAREne3~2RCC(hWZgbMt%h-EQs>G~3G7Wda)vv9kUlZtSM!YLSLv1xlfx> z+DMCt%CY9di%1+PeRR?mMX?YFfGR@TGtG7T+}W> z5ZANVeOYY{lfq)*1JJ9pGfmAe;v!21hg=WVIG`)()u8NMQ9`qALN6Yug>X5Q(z)s^ z^v{E3ui)-r1z$3`rbG^4+`c{jaP+_0SWbO$r>Tu`W-@(6-lk}&FMy-&N{$8egAJU+ zJ1@U{+L7?#I!1ghWif z{-9+g;!VQ>9fV}0;V*SI!mc8|(69JHq}Ec`58rt7I}P@k&dm;X%3eZRIlGXs&s!*X z+JaG{4K~;AD3L?p_TaKdjS8a<51HoTf@7#7f8p z=H3k|CvIs64*Cwxe$CMafty7r;g-;nz>q+^>!tP!vs#lYr1>Sh$!k)C#k+d&^y80f z7z;C$a*TW{+TuWx@~&n8i+4d#(j%rVwRptt>M=?=`KnsWQb$iu*m;5dEmUjYFs$Qy z$M0w&7eP>Lxt8HM7o~pgInl{ZID^z34xP$yh`a`^tEmyY(vwskFv+~Ot{^>@0}#TG z|5u&igt=Ma-S*2V5JiWpWK=dwU3RHr`{<8adrsK9;4@wRDDrCnIyaY={-FJBc8 zb#3G#c~sfQYjr&E2&*M>D9Dd!(xe>%M%NpB#Z?ULY`&vWLuW_58mvUhFYqurr{~Z6 zAYAwx?GUUk%uQ)(2-yc=SEG*$rsWD_3!{tyAuFZBMvSDS6R~Rc164c6c(paXbXm=Z zgDo=|oOh(1Yua6>B`ciUipiu57+ z6=}|^9ggcAhsEx9jA88U+6c+59FB2~ZuVCDIjh<5lHFUfa)tMG2lrUnPtgcjJZGdq zYZoGls?^ibkRr}b?SsckCs*KUYEkqS=5@8w+iymI+){-xB8hWv-?c$vr^4c8k?u5E z14P;(pP*xSTJY+GN0h3BTI*mEkN%LuBX6_DeSK}EV)kq9Xn5!LODRfGB^WialDtk& zyqwKp4A2+El-kBT-CD%ZpuQ+5v#L#nj-ccf&^AeENE!pzSz76&RsTZ{djvInmirWO zIOBTfYX11G^#2c5xF+=j@jX6Mr%yTA39&WbJp3=(3ogE8yrhe5oxYvE?I(E6A%HM} zs7VB-dzv$fTffrA$;w0QM=*rks1fv5+-cI>5b5jP3^ASrJCyH~^hGvg<4CUD+-&IG z;aj;c5aFe0#)2~n!LZY(Ybr8vM*jFfA9%@MrS0!QyU{2sLlox z_)EYdhVKn+$O>am_V6OH^nHJ;ix{E_I8GR1HQZCmi!y%NLCsfK~0`+fq{i0)LNV zC2YRNR)Q$=C6B4aw)}86?HwR1gi&8hSH!c0PR}%Sxur4O>~8kR{jRRyeNNDq6VpSm%JGTc@O;uKr#mJuL!7Rl-1_HaJRnJeHdJnb$zq zr*3#}_&#a#-X;^0G3zh}9U^Rb1f41K1YQ$dn~!CoF^efmf@Spp>b>y zp-*xZ4X9mG)|dxD>oC`J8jM}K=_xBnhR-xTq>=-!kjk1$;tDP5rIr?Tw^vnH2&DB1 z!wLs95g&RjdAxvztj-#KtL zG@Op_AKw%Gadv&loPP32{rTa86S@3>Feg-MYPRugQ&iE8Y-mZq`vZ9zi1H|~3{TPan*b)e`IGUGO>Ku2P>MoEo0%nTOAa4cd^76~ca zw;m=JaU}NDqO{r$M4MUEJIy0lOO>1$glPuPUJA66v?Z7j7Qp>T{tnrzk1)RgOv$fN zO2&|w>G>(G7)pAE^ufF~vIO$n{M=s)h9GiCLqB)nDQyg}m2GeRC`-Y2m>)jL-xplV zxZINOLX6)XzprsDW4y3s4dN)P4tKQ)vP2B0g(QGNNcgi06^9`9PqY!%G6JDln%(>p zg4U=YnpQbbNuiT!knEu)lIHUZNNl-)nEv`khqUcz@SOid73J?nP8k0!ZtzmW^U)u5 zs3%V8(+l(2I0Y9E?7PSOwRsTcRt>8zei?2v?=of&WF1>UkU>5i`l3TxUsMa^7w~m9 zwW7an@2{mXII3mTE?z>%ipPe~AnnmqW9qVRuQ+`<0qPg_ZL}e_M6cWoEI~eohO*(f zL~MX$;LiQKH0T+z8)}7bx$BvEj#!8rw)2pQAe+X&lCf65B3er8JZp>bVdcVv`5jc! z5_thPfsCs;Afa(aHMXUG*e*0DSjAlf78>(f%~k&C{Qy(qXw^^Hgw6i1HoYs&=~iWq z#NNOmRtxAbN5lY$Qll()*s;{&%P*fgug}9fzj`9saK!mlhC`0D8PQl{_OuX^-hT8) z9p-CKyqKML!ikWLj!K1CcbK?s9|j;(HE<*Ai3KPw^i&N$lETGzEg=Vde^Upu!-78G zzt8i|#-1PiRIkIXqmHHz!~(e))$3$?TFjZN9RcFt)+p4*#sUL`YbKuK@ zwIGU0D+BmElfj%mgwEOGU zEIER1Ccajfj{WgO?Y71vuQ)>At#IMh+e-(1bVnrGS(Hf}|2@T6Y zWIT%@F6QK^3ypeIJEi@)0r>yW@2A&{-M<9wME}C`V+lHU1-wvg2^clvzv?e>+D9-A z?Fsid9*?wLkoF}X7FW~5jr}%{SJ^>SpI1YhF3Ha{Wy+&>4SySf=EO(z0yd5hhEBac zfYy~8rHB0Jq`K}~=TkCoKFv&fw>2ut7t9xjvU!~PRO}eXzVQGTu<4(@~gt37? z$HbA1t#1u7>a=)xDg~8X=zk`C+kNcgHX*EJ1%)S{ynS19M1i4Rr`qLI_O~+o3#aX1 zVEG)v*|!4MD4B{x8kb(s@PoR@lX9X&AaVh5H~+4NG>sEF4FAgi6>CKU7^Q#JP>biT zN~BMcEC(1{akSZa>w3Nyv0U1>(-VfZ6HGu9h3Ka^%6KrpkMd{dHmr451{=io**iyS zj%Drj@XpUq=`$<;ndmGcvl{y0`u?t=)a$q~HN*lEdP_5xqcsmzgk-w(&?ueEQ36mP zEx3#J;xz}BHOY@dXe0W8`iCEXt%ay$UZxH;c`)~I4mrpGPps9-xGaDtI5ge159Au%wC2^$S`@UpWS#udC#;*#9;L z;CL)Zutwhv?~A>&#oq%oeEax~p)X!a!C3SOfA_J=2GNfF1-H+=uHECVU5$*uqLqo; zXEa!rwNc071x8ql_3P*K!ro?;&Td@0TqlCm012f6LizhgzgOePH=GJR7#Cf29E&yN zKj=q7tF|V6`I1 z4gen0y#SX>^ByR98YVt2%z4^Va5GPYjZJcA_6eC#R3mZSoJl;0MTVxZ*GwfGjSR5UABO4Wqq8e!~UO#f8xIf zCc1YL*%CO&{-I95*`H~hBKhfwvlV5)a?+Su)1`mN9aGm;90A=lW`aP83Rb&Ve+SMG zh|dD9kH<^fIw(Nl#1uyXUo+_6{`R-Byq5xQV4aTN2VrueddS0n7=Ei^BIxb`k;|@c z9$0nd+}KA0{f)wzOoqRC^1Oy033fBG25WlX;cM(YXK%#bs=T!S!Mm*8F8^0MeWPos zD0m^Mz`3_H6VXFcnQqP7-t9aE5hRyw?}Bdg_#?4N0a$ClcS>YPW($A? zqUBp2ejXUN9&IcwEw)hF)%3X7v%aDnMvgj37|0XANy8y<;xFVROdnVz=P0Q*DwQE% zyIO6`(1B#h7vPlN0?WEbIDw(|^K*o1h{DL5b6^Pk?x7_m@K~KatrQj7aPEcWGEi4~ zH3Ux2Ek?$%(cJK>5x!{)%)@sc{a(8j^Bkc_(+o%4+^XiV6lR35h-n$o(jwyF3oT1H zC{9+VUB=$4p35LtE4Rz&VpQwYTNstX&@2{a8dEk!Qx(hNA$MM6xBNz@3b@3#Lsv3J zPUx8jl<)^`F7b%`!4GNW? zv!xa zXfd<1=5>$gpS`_7CYgiMLfZOX`+N%xi%Dsp4~pMF`wEsFIRz25$O#%B`L$fq`h}(# z-jh$B(F1`((Vu^L?wrmOR2d={f|5Fi-;+Ju9-ZNxR|n|V+Mbus>p4z&ze*?@=5dRi zm_qI!e;{F}2pKrxxPsNp^UvOD_My+0__te9+s1CJnP$}&fJlfHf5q=Rs7p&^k6 zwMUWEP9JM*NXD&{Tc`W@Xj9dm8cV?`mxo)U?1y<6smSc480^5n9B3VE`uHQ&KrBi+ zdi5xlYl%n?8y^99c55{Ey+^;(fe!1ZUEEWdWQmk189dV>=tg7(*#aHmf4|C59*m&f zo^wl(JEA4RSb<8k~ zMnMkhCp?rpkec?8H>M)^plYId)U&n?8@NS<`Ym6AZSa{6)joe#Cjgl@5vYg}X@O$z zh;P)Eb?O?N4x_F_9Go&Zjx;wQBCgd#@Y&z!?*IcIQ`eCK$qAXT zu$)&DCk72$l_PkHS@O^2WPYi(2jFTGSM(t<{C^?Xi2oHahtGQy6RR0MJ@I<3?ID*TG}MkU}(t>Abx! z&dTH+PPTFl#29sh{M5=9;G+tsL7o|#RAB~@gD?N z+yJVcg*?eKmE~ZjHK~>-E;xoZYWnUTRM_f;sHNQAizJgptn!A)tIKqez+Hh9BWCd0e^TzzFllo5!+FKiFu!G%O zEWI`^8MGQ`|5{CldJkN`QuyQSh6_MXj7>}CFNa(x@E6el4xo_~q`u=&Y}w|!)pG)s zJtH)YXD!14n!|PTeJp2A|!{mE)rTXunP5h)NzF5i00Ku?!hS6v;Gj`DLQrE5;fAmU>g&^^A@4!k(MSlD&JW`>7aYtXn!m*{=LT1 zTwGjU$h&2#Kn^IZu6=0(P1G8#ru7G&-gYeob^rqbHa!t}uv}*}Q?uo?vl;rn0CX`}v>so<9L=9ZyqE$Vmho~WJa9Zo z7~Z$heo^kripy>ybfy<07!mU^_s1wP`u|!oI~*!W6)rfkHfy+8TCn(K#lFMMZY}hd z06N)7hj*|eIzTr~9is#$>u&PzO9s2Y*Paq8ppQ5ECW?q0M61r-9KLh>E@A;D2R*&n zvwL{F%GN<=Vc>cUSwo8*q5x8uM-r}+${hLf%V#Vmvau9m%WZ9N!T>_(z`;%zo7rrM zP*k%jW1s;nFS(e}qRBVTO%}06gWl+`8GLy_hw5YnhG`}EiX1ko0m`)hPyuT5lJxbSP$OCnC$f&rJIaNcG>aMGyad6ITXLq*} zxZs}eQ4h_@(ZbS+#RI`5Z`;M@bIZhBYIJcm=I0rwkQya*97POiZw2V_f{r{s+Ul8s zxl5aft3)MU%KHgr-D=O{kDt>DU}(dU(J97GHFvX9<+ET}!>E@>Vx-7?j#KrnUVv^y z1)+dHreTNLf=1`e|DoR%G>Nz=BJ@nVsmFn_a#g0?lUg^%cXw_&Pt!UjX`NMb3i*%R z|03HE`Zzx!WoSg(gZ=ar;51-Q)L}3gj)Twa(VER{osEI-p1JSfFyS3}N?%#BsA!J3IcdMH^Nnu=z$i2LkcK7;@*a|`6`14$FxP6n$ zr1JcL;LjD=wa0WR4RnNOivWwE{zLRYbT7X}nOF~I#J7>+gfN;eSVzF_C$|OB@yon( zN1CTr4^~!f^Nj?kMahSAPwBDg`Q=J%!;E@o`1kq^a|opDQQseafkOWPYpt`ZMU_{w z5#$RCsK}zi#>~vTk>99LjxqwIwwG5!OqTz&g{T9;vLtz(|EX38I=S(G{r@QS%6$A! z{a=l(v%PCe?9Ft;Z{zRwrc!DqG}6p0h~jU!OEA-8J4jn?a__nAy^YHFGBbaPGNSqo z%gpb!++p{qcY@5y`P1^A_1mIC1`@T#G?NOF0DuY+$qw0Xx3#QM=uAhj8L2F|O(=V&klF^(d2R1gWvf&-rg?`!} z*fEP!xhf`!?%s9ODwu}mqUgK@V)AG{^LwANtMS75YgPh5(P}7cp+(Hal4%&@wzWHbPpc1FxhM*d(>!6A~`yWWNbbt8a@ds*q3BQ5u zm!Pt~K7LQj5F!G1#g}>NsVjaEGMn~2^cYlFarW@eRC=8zUpV_4{&pWks7qGDDV+0e zz(GieE=uO2Rq%q&v#vL&tcp>gLrq`g#!T6pCFtxW-g2U%Y$wuqaq z^AV-X*&4NU?n&)JG+@?>>~AJFb$MR%zDvqeia_Y-Ph&aPXUy>?Mz(>BZ|g^zcZazDp0?y@tpZ+xT*&$X1d?fa zxmGc%X5bV;(HeDy)Li&CHiSY6g;A4_^tC0G>^FI z02t_J5{FK_qVa;*#>JR&F}&cQANW{p1uCO~T?>(VsJrLbAU^}g)n^vB;!_v7)6`Xj z2c+?agJ4X_#fZJqW0=0)KK@+u-OanVfNjJKLfr*lQ3M`MjJV4TH7*vk`#Q^{=HDow zo?Mpp!H8qz-mFC}I}Kyv!c&hu>BAOFSh;_|6MmKx-tdNwGr|TF($7ot^D}C*)P*oW zd3fikpQ&?T^WnADDL?Zw&Hu5?=zH#!B&&5ysx?_HHZP_~3F>PmA1#P^y9T%(irSMm z_I++_XMM)Tv+iWRd?8~*jgZ_q*z}4HRz{(@p4`15aR20Ug;WBpn4a{B3r|0FP7NTq zjB;-#@!|E0G&p)lNCn}`fD9Cnf74SrG0B; z`~se+lOaG5p?#@nd*2Sf*5*uKNg`5KUHvxqyoj+R|JT}J@TbCvenRxCPwsw7IajgF z`b^^(>DVb4p7{-@aJ*{iomJfQ*=x=v5$yD*Cq;v3e2%&VS3JEuwWM=YvQK_Cvi$1t z3#ZQfQmahsngmE&Ow9TQMU`)n3Ou?-yGHRi*8X^I|3A;&B)y@7YnySSs7&5UzNhu^4OuKIne^j)r{->LB z)xja$CvR8;#*GzSktb5f{8QTHzxKBrMTiS^F|T%HqNzkNtGoXYQ_4&-{ z)2G#@G(OAyOV)-);FU@l1S3Mq5wb+37$4D8e+od$@t<|vF1mIOrcahda|z#77U}5R z@ZC3q9AF`iE2v}95L-ToLmg2pa(fOgn?{Y@>uDBC5jG#LVLdy!$L*#04(~ku+g!4# z7>D+9e{*mUNur(^Q_(oF*uTO5QRg@%ZMnVpV}tM%<{S+gpWR81ot?b&t+58yQ3({IaYxwHBL+iqKn+1KWWWb+~351npe*^!2p@V`tV2_8S5l^7lUF1 zR@&|v^#dy~7e0+8Ej&(a%dX7ArqJGs6Qx6J#ct{o1Bp3wX+H1+8TU)kM;jw>v69$J zONR8g<#YD%KMn6|1lPMa^8O1rrZc*eMlIc&Bk!+To4D}AQ|EMMo-Y5?-tf*#r}OjA z&g%0jsX;^B%@Eo4Zu8cxQSw3yh&TBe7z!T=G zni3RD&LcMym4?*a6{4cs8s8QWcg=`f?fs!mmTQ3Lyx^b}H%Tg}Gwo1T{ek_!VUN!x zi2vIlOb;-cRI^kB80|(7tj>3)kDWLUcBb$kK!nfw8%q=4R_CwbE>*Ehop2Ol5))ZeW zp*mIw#|Mx8m}OZ|OhX^aDRcr^k@E{|ya=S}LvzsC&F&(nVOVkX($xIS^6cCRWXlf> zC3+J2LweS{=3CSHc+^Sf@tp>#*)mH2zI5k&E3w*B1-Pe$B7cEB%CjYxi#ZLvYF!Iw z;~{nejb;yZWIb={`@^q}{|xn$^+3a#&)5r`w>r9%hD(hL`uK2YU?*uF0K#^XyWa)n z_95JzWa(ORPH}H7jojQPk-$=e3XysBrgq?3m9GWX(q39@Pyk(XCuqWtgd1@ zG_bN_`e>im)mE@z$2o(^9doMvNfRM@Y(%qYc`=1T9pa;a`$A)_chP!{7X1KMXJ|MJ)w`+_st0+&Tyk*{}94A8d>r8 zM}}EaG^RG&q+0a}=vre07|Ad$Aa;733n#p&kwj2`YAiUHzKD4CQtDRW59x6U!3I5! z`@?sKU*eT~bNE1~00S^j=-Yr5T(4#kfzf`O(GS1pIwn|B&!Zmx`Pj@KHV__}B)o!} zFTqb8%+AF|FMImu<5q{EO3K1LW||Yj%!D{%zc|8p0xOqTTM|e>c@j zcO|sBbF;UX){3aA`&jbo74SQOR?X+x0pvmJ7N{)U%(D=oZx0}ew}#*38;9sLyLr&} z(#(skdaiOUvC^c3vTF?U9%VCFTN`=V$UI1k9mj(|W7}_pYA16=HQ!I2dy0!oqr(0e zfP%VmvoM!d{ZXPnJ2o%xtX3C8w^|TxJj8=Hbb6*Ioi#q@GfK{F(IHT?Kn9W5j8TZE zws#dWK1xqg@K2#{6Qrye&){8M5LGcPhxJWdsG=1J2wJ4`abW`s~w7ZVn z2sDG7m4GZDe87||-%sp>FR*q*R3?pttu0hMG4^pvG%a-CNP&4Ha?7@Yg76)jQR@Nae2 zejt1NYmFIy{k@u#L4u=llNw?mWGfR-6cIF*hiiUqR7yR|;huX!Eb?WWPLWKtfat1$ z2Dd|NwwQL3yzd>qIlQNphZQ@q5<1_s7!jQDgGayD))I9eURCt~>GqLz?MJwg_U!ji zfW9`9y?^{UR*VpJzHq~&c{+Bkhd#(k{StnfbX0`?X!~2?%s!GwE6#v-1+0>VQsmd( z#&##y8vFhVGH!s=MU`QsxY#44O}!$yeL8$!Khs3mfd`e?O8Mkc&s8f%nqec|?Ev}B^?~}a9jJVsgy-d~w%k%6KA%gJJG|`0xmFx{x2Jxq( z0`nJfL_&cz2ng~WeTF-xmJ}t5J}0v4`{OsYvV(Kiyk^ z_SP!e-s!*E@wrge6Hrf)Cwk4S!R_|5U#dT{LT$wggW zn5JTwT7|_>doaIk5So`4+9-eQya9&Dca|5!RT-%=Lv~PS;DG>E`qT)Vkgl~`?KO3Ir%G_n2t65*_|z@2M{1q}0} z;XB&S!4{>bMDgoLar%rjVFniHrgTaZ6K~@kqM|2x>dfui6Pn0UW}#~wgf^%j@DZ!; zu!C1Sjbt>+rfmki*t~={P;VZsnYRa77)l(nKgq~V5eP2Rx9CFUv>mQGe7y8ZK{lK#$AP_tKutY4Lm9!jyJpGHb&kIK>iu=p&@{usnjkwH?Guz-ge5# z>vnC15o3S9y7fW77+^AR(x|wk_XZ{AwYnqXBmsxl&R&l1nob{UzNSN9e`NdZZrSx) zTz8SLckO?dFbJyjuM2Yj*k9EXhSN$OL4$nFHeQ%dNKJY&95VXcLV47Jwq`3(PCyD; zm*#RYPd$C=2~81c{jh=`Kv~;4$w=MLSbTsWlNA!uIf$pZOFdk*RFE@bC4rCG9#ppc zu$+;mPY-uc^iBQyI~`m5x@$Hd6pNkX-Xl};Afo8R=3abEvprh1dcg&LLOa4cu6YA> z!B?zz)^;r@fn|3bJ2a8`*!Tf=pXhsI>sGke>cBc6GGqmF07yiC0Eo~*g=K?UJAPZY zF}Ow+v)CWJOx&<6Q@*iX@K(Cx3#AvZ*?vW&Hw*6FzL>=m$z3$Rr!N>wHES;)=`4r^ zQRnPtq&U+X3yl{v)nYh`RMVm@RAYNoA`s`Mj%%DmTz$pcY%&25c#r=+(zMzm;Hq^5 z5Hr;xt)gmVAqb+77eyo`o>W6%tu1rxV@25XHTm%WJ$@(W)Oii?mtI1gG-GIwmR{sg zEDiQ`fNV`!o1{yb#R4C$t#SB|?x|^RN9QnwKbh|m`{1D2s|<4r$P!|QyS+#J$X8nP z-JWfskq`^|3>&yf_7-)mvLfZ+z==|EU}zktuAO;8yAe9oHum@vG!|wrT$dWaV$@c% zt!RqWUQI(|r<)gBbJdmi6aHy45RNZa8H{e9Ig@LSn0jETE5hJt8>03jRhjcy8nq){nd0J}@$kq1^Zt|Jr<2 z&(SwYGSmo%K6Jh00VUvkGn`VIbZhas{23{DK9*E8-ylYyd`|9VLFVbFHStbBQaMlQpoxb65ac%pqMyDxfFpu<1f8X+2pkc`OqsFNu z-`zVD9}L4P;(Cg68y)*0SL!MiQG^6AM81dBlH|V?({7KhQtjZ;sxLNpR9u!B9oOkp zmR+*u4lE1W{+AuCfB-w8!bb7Q<+gVBSrXogaJfOZZNnJn@XV=Q$eV7v2L|b+k^+;? zq8KB1N9H192G}aIVr@kEhzck}8Gc_Mi z2FPw?$I$Em9GE3i14VsyxH2O~P~UeWZ63R!g}~dB`tRyy6>f`{JRslJmXWWzfn2tH zQS3`+f5x}NH4^t)h zD$HxcZOTQ3aP z?^FZ&jsVMm5mx?c(!W%5Ku(G9EG-t^gE~h2R~IHO=y3FJ+Wu2G%-1DRjw5pi2X;fn z7|bch8B@t3cl^1A8O&n%sW{YXj~71mRgJ!4ko54*<7a-U=ORfNrUx|whJ;XO%@r1b z!Ej^|6eS?)-J_<|+ew)FzQi#A;GM}UTGu@6RGgK7$-(wcSYFg1_u}XL_o>)i`lP6t5=b1T|k-KogKD?tU zn69mM=Grf{Qmg~1l=Y2ip(SZe+LUn8FWWu*&i;k9tAa-gv;sA zI6BtseOx9>%x*lAHkMXMbR$HsRv}Um+vRYlcx+CD4`|p6xHo$aT|eRYyM0X$;Ss)^ z0L)VQ3H40JsCQ7UVKAei9G$zv_e6j{`&%{JS#^A0 z8=D;3Hq`<0aGW};Q~C}?n;EJ3$dadiuQUzWSZH1ny@w13 zC{fEh*;;CAT6n`t>W+UuR>;ZW^aXVZ{@MSlRfas@9VV-LXLyfH_3<}DFN$fB6Pc@^ zD>4%Fx@Wt_$`F#eQb9xh=KJ^p5INRCb zzSOrcUu9}}K+Q|@lgk$9;dULXEx|N;WG2U{r)|ruPvS%WA8&8k+~$?!dwz(Cn3-1- zw^(*b@*=&McebS}5ClcYTwnkcrLxsB2!bRe5}*k{)M9m1+1(RyZ^TS=Pee!eeMd`{ zWS93{-sA`f`d;^!=fOYoP}S32-M3=gZ5IJN%Q<=S#< zTApZMl32kKSt222*)+(xl_fwX`h?ax+HyY>?tqDx1P>Fah5n<+q$vtA8;A$yZ@-W zH9{qeDdYW#W3xZ-cwec!s9}_%yK8x6X^E+b?XJ3@dG5q<5w*0#$?uze$JI={JCQZY zYM?GeDFLh<;sAOCb}61-eM;`CNQZqexhu{wt;KxV<{;2_obNQ*;gBLROiyU0-VYk} z1)o{9yb|j>n}TS%1-BoyB%A~R8t$$K$+vM=iy;V1XUD;^x27u7RW7cf*|VOxH@GPd z;kZuKio*Scc*rGRQRFLr2z8aamL+df3`IocmWab_) zHBHmTu=-{Ek;dEG+r8ra)x_5+Ub}+y(BG!6h0HEmcVrR^oJu?%MG;7uSa=nD(|#H6 zmPU5bWgd9}RgH<*aKG=KX7iYmaUXA_V>jv^KxGZ6Q7d`)=hAb}>1!CpgLS0RLo@Ay z0(s*)`RvNv#dx=+TY#(qliA#?hrXcb(IQFQk zmLMr6W)QB#8*Z5DAo8DU zYG$~BcZOLLDKq=9OElFDmfv4j<1p2Ic>NztKGpLoXM@3~CyFafx8HnObLVhyjQ~x@ zTP>Ok3R}}X+P!|?9Xh5Txm^fepw~ZuDYiV;hX+c;GL#ue^d}xaF10;=N$zlN@f@@U zJ*^lH7E8S={EM20l@lzw)@m1bTCVgk1QCeK;#^{^pv<#a2~2>%63yVtPABu7;DFlO ze#_l*MA|USLMvm3l7&f~fX31SHHmw5K zmmKFmPQK7=Ek`<`{j&3d#_ll_Xk=M0nl%<#y&{3y+Mvi1F+r^|63M5GpPbW5uVJsF z2obt)KSOfwe2OIh)8tDn_IK=a>+A2*Ska|G+XLCMFRTdaOUKUejy^Z<*F1Bp1XR~ zT*SvylDY%45VRYvSg>&;!H7-hcok~m^2M z*2U=>Fx=DGF<*C(ZLW1guzzcfK@UY*;vGqRwJd)(AX8&eKj195C^}y2gc63i{nozw zQ08{LJPga=N6ns}x2TZLxr)M{j6S~SkM*zm-<4S!l5}_@C z7t2EbJYe*D<)^ylTvr}XS46ap~>F2GN61h*Aep;d_Pnz{Y5{)8jrHE z(H}_k?bGouBICaMrqK>PQngeRk=8}a8RCA>vgT8w>hTjNFJIPMP3bd$deEUUq*sT= zcP0A3r9eDql&&NsU`|yH_YD8&Z|4PCn}U(k={3b5>4cu6uE-`5?YIO)Gymdlqt6}UO8 zK`c*unu%l$@>ImepmU4DA^O5vC?uDJ9@$!Vu?_bn^$OINRTk3r;$R@z)SZv!CMtxt zE-;T>rmt`|;BC+gYtvGe&h>@cXPwQieecc`q_4KTv1p<^9XYuZ<{UHuGzxX~$m3Ms6 zs9a0KO~c3EjiJ-lJZ?{MH}@n!TpY6gFvB){tA%7sf2%2hGSt^2s+YyH8_kQ7KIVMs z_^GQQA?SF6#pXiasRe!;z@l$l2IH=LVwTlLc~LLAzCDb#T4Zi%?U6&oXx(93#iuqm ztDC?yHOuj#ywcQku5~g`1A-W1F2Iu4ftu*~h2_=tqZBge7a?VK_Lq{0(5)ir18=4j{faM-Z8~ zfjE7w9+X(@_!*c~N)2Uquj7a1fNh+c0Z{sCB%l#8KzD2Ckw;wJ8_`m*2Hrg71Bt^@ zVG37SdFHFmCq>Flihrpq7iHoM{%H9tOCZ4`VV(f(nd4fc+3NBf83FN%1=0(G4tP}Z zN~LS1x~oC_z3lIYPY-&g-h_?|{S#X42I3`l5Rs@)qWADx>0&%0+J0+rg-~_1lY74( ze=K^}@nMQ3>2{>A$t!o^MCX88w_`F^&UJI-H&L}iFM^g7lXSweg;;ri1dN&F!l$UIbBHCNG_k&^<*61gVHtjr#beqe$CzP<@`Ph*{ReOX7sAhD_hca0 zh`V+%EJPX{&Y$RI`<-j(o0(X`*_Bs-OEuwjBeg5C3D;|7O^e*qq<#6Au?6CIf&8vB zgC_8vC;$tY~#XS^r}ZZ22- z@TmFWHaUFecX7k(zj|HcDBRJ$k7;d(8ls6YNcu^M9_EJ8Gk}hCd{16=!T0#@Cf|#R zW>G~99N|{c_4u>hAL?{P@RLKc)4(PtK=NOvJp1gw^N;o--5oW&ZtqG*^j&e_xTrN}IsL06xtgRKU!gOY+Yr0gEuPx|B^fv!nNtaQNQ?E}r>Sml?O)EnZ zb%vTN9z}V3M$bT;cs#-4j?S$!Ctf_GhZ`sadk1z7+YW`}fxBznuPb7fQqW6=l4QxW zFCwadh=Q_MfG4%mwGwsD8zSF)bg{uq=>gTW<4VNA$lmGH`wmbeeJQiDNmlP>&fA5N zOX+SZtf@B+I=ehZ#WExA0cJJR1fHsn)7|JhbE4ppA*3jsZ$Nnr8+ia z(>jU9mCKaymF#WMUy@*h3=J_Q=jo^8&rRq~z7q{cUR}e=$p?nnk}|Y~a2eDcgKb&p zk+hJaD_$_&EH=@6FUl98^wt-><|NrAN6xR9LIX|>J#(s+=J@~sarQkX%N83sFrx*U zFhZp4J1w5MKk4)6pJ$8B`lZ_`-Oq_d3;f^fQN*g|wtA=2J z2k^Wi`SmNhh(zto(X<#XT);FWM7%D<)^zAUCt#Ly!7l`of@hY90>-ELQguvZpeng5VQKR<||7f0H;-J6j_?bf2})Dz2Yq-);*8AeGp6F9w?1|(A{+(q6OA6 zbRAu(5l-<^;i!kED-^anM^apcu>QZ}Q@IqK+AR*Yo1ulOmWk)|GJ!|K zA|f##i1O$mnHK>p6RUy{4X`PWy>BOv(tqz1{}rjt!M0^C?J;jqYFrcOyWGI<$Kp4`{jp{{c{0H*+L zKj+sXI{n``f9?O0Uqi36Z8|+vv=NPMG1g)((`qN8K+~`kF60(nVM_woi%Zm}D;NIq zvKw*J2D_5EIIRolm!HlJ%<2q9LbNYApM8(061K(&#fE*WiK=7Fw!ig{&Df;t;GWji ztdJJ$*|; zglS=1gRtqT=-EYG6^HkadWOR2rRpFq zcQ~{kPCn7wY6f=GoGZ)LL2xd(B<%t7e(7*~^}Hr*T{d)I%H51Eye41}>$-9dZ&+MO zfIC`rBeWh(zS0D$jvN!sj$rs~^CBjFMV>+)*Jz_tjmj5RmhwS?H`LPzit6U>25z?4 zr%|jBWV$Ok2ndr8NiJ&w)Soq6hscM{v+qIX!3~7guJb#>$Wm}bx{C8TyDthsw7}@n zc{HPWD)EbztsAe5Gy%{S8OtWuAVyeLt|WOod;sV#1`X7l`^lH7z0#mCqDdG{Y;Op3 zuH_Dbt-Y>WuV_bzV>IrV_0k~o9Z}yOS*hVxJS~GVeeUm&GRv1S3d{PkKj1=(; zg}eSnTYPqBL_#rg+J|Xs%(j~aU!Jw#MD*~r@Sr+-jqhS`f=EYq6M24~P-fJ+p z%{4zV-eEcyC0Fwzz4&Ql^P&70b%kQp2vWrDk9r{oxCb0CX+@izP9eH7mK&|}*6g*t zDABOB@32B(^j;3Nx98x#$>Hm-Xz>@Xyd-Dlp~3Y2GyjMb=6T!ScSsqonmU0uYGgP? z(IY*}0qDm^lRL?~a4()vF^7_`t?exRhAcIgr(_QdgA>0_#agH3y)hXlq9ttS=n-D> zi&fft@@_QYw%*8u(DZZ+{vApqh%CQ4simgohh6}ft}}366Wmms4{~DEu}%cK93%6K zi3i#!ELGHj!t&4<@z6yhb&F%hnp; z1Qb-gW7P8?A`NL>{Z9#98YB$N$?n)hupL=oe_BKa1tiE15pe^=XDU{ZF{xJ$u6p32 z1$0keZ{_L37DJ1P&b;6-Gb`QaYEzx=pC+g@(7HjT2A)a=SZccOeD(<~7>vWrmT}_g zO=RsyWX`%z&WUeOe85HpRdFqL$WO?Q(}hyj`vQO!HiKri6V6pM?SZ#ZPxb<;pM_9$ zZLNAiZ?QJyog)R(ilZUuYPMbE|KZp4T+8h)T`IF!?+|>8yJ5_%w$6@WSEVz-3&zCi zl2>A?Db|Ju3JlhC!)$U9E#+Tm_?>ok&|?)?mp>G>7&7~G7_A)&i%t`s$1T}Fxejt2 zbh_()WD=)qWG?bepDgm0LbBSnm~)>d(EgX>k2I`CGYjuh3xZANEMdDNb6*0scC0iH ze#@#yg`9f)o~BLKYal&4R>5VksT1Ug=IEu5ug&6@C@E<(k|6*zKE zr({VEETV{znBn=S{?XzfR9BbFX8MU5$4|_h(avj9dVRFzX9E7C#JD=CFCF)EM(wCt zmx>^++qaTuO;1M8^uLY#i-E-TkUF_;f?s12gGMv8y0lKJlDXR zrLty>W5XF3^*a?6vk+rYpSo)lLD_I!bTsr>RIM^6t^`0IMcL8u*Qwp8-%^uLeSo#I z>Q472-%LIe)i8`fGwDKnPwQ?)y>dAQM8Uyy)Q4tgS$kCHxtR!f-0{O|d7+!GF#7l4yB7EP&h*SGiED2XoBBbG}ciuf-@vgXFZ( z!*;@x_xPU!%2RHf7mdvR8-K6WTSq|-@k6!FKP0Q$ZB=$h#4;B%zm-MbM5rwo(S;JS zu6c2>qM-vk11J84p3^mp|_j{Q02_KFwDl|&|i_z@grL@fCwYn?Okope(ydLoL zE9_B?jg8FFsZW*-5zv2pq<_k^o_)$fM5dW3q0!LRWKU))Pd&ns-#FWK{}G6&R0;~) zTGX>G;Hn6*toO$cVpZ^O;S&wLaMLuTU~Tk9`nCCrf_Zej4+QundPAB?sCimST^`!s zy%s|uQ$1d8Uw;Up6fR60!uTWHFZbe%?)vZldmsE??g3+!#%WGT&h9Y$M1~4vww9vhfL>YrnLOdodlc+ zbGfJKGy0R7f#U6h@lSW%jy0h$O2qaWdjjLB+!r)@vr(w3J7Rb%Bb!Ly=$*Qftzs(rDKdC5N8+I!QWO?)@kDe4$UTTJquKNtBP|3A^z4((*%Aa4U% zYQJ)BM7lU3N~a_#mVkNk`t~y~xjknZfOpkAKJ$_$z-ufftJ#53_;UQUUS-ZpH!sb` z<23PZ0E84Ca;AN#TXC3O59v5_=0x5KaJ6tW>soW|U(JN4MY<`4Av#d!)agy;I*tKR z&_HE`(@iwHR=^algz!O+vQmI9<|#A6%hs()k{(Y{NN3EeQRCq4)4IU=PHLzeq9}Cw zRBCc2F2&n0z}LZY1ibfbg_I#ZXP)4rXj!3D#G-3gDxxyoWV)y|xZ3fknX#pzXAN7R z_b(CUj~=LViyqxIyhqG%@`uSE^h}Zsf_xi1&HjD0`hdD9;=}@q#u4fu9ZqBBdM6daolM^`WEfO$W;y zxtwqq5#D(2yp_Mw5C9{cd`7c6{*33`7tO~s2{bPQ@4Gvr#dZ!^n3lSE!@;&$UD3a}G!w1_b>Qn2bvsMIA1IBnY z7lvj|^wy%a*WKzXEmq^w)DcKSN|Ajkx=N@3`Nk$Fpa%=$^na7H|MFw@zf7NZ`QQ0R z{C3RS)b#uA*qRFvcDLu4lxes$b266u&Xd*&yJg;C(PvJj)WsQ>FVDQ>6exO3SRaW) z5zVNHEY;=R-7P=bVbvXX-dL3oeC2L1mq*!#d;M80taX5hY;?}zEgu?3CCVU5fZhe# z!t1hCz)fzshn^Fu;h3mZ^OA^gOE_)DfTg(zq|L**UeL-n~q(&B;f?s z=^rO|Io^JZZ&Hm_7)-MPCI3s*zOaOzogF9UAxzvYI(mqhNx$t}kB|6myRPhGMnxq) z>>zNHWXZ+btKN18WaFoi!nD^8~ysBufMgpwf|YBFg(Fkm)>NB+y!pfcM(DUgSUP6IxE z+4TUt>H`O!<8$|-Oc(h_uc|-gaX6q;lzt=T`Qi90ktX@@VL0&(YWD_^ACbqzK$}20 zDTmH{=1?(DD}4$@GTYsvJnC=D0p^p#>$|RHSTSo)#k41Hq{J;f1jop{hGp&pD*~b4 z<`-Ai^jqb+Wn}cWG$0fYI0i=;3R-+;Ivn3;e1r|PgQ6W+T7x1I#>fH_%N3~&q83td zuOk zPes@7p@V3BCWp^@^4N{tbt?!f7Ag`0lFh7}Q(yc2%%{23&0K;c#f5(?m5vJXUAp8I zUGO1!$djGck18YoK#cVtVNvWoz0n-4t!M*c&I)dva00#XxlAU7h{4m-bT6H z?`?Ls{0yj7Yi>YUK8g6$v;+$x==0;hd-6wX{G%}u<)epAUoTtgdh;86&aN(_KoE)7 z>LD0*Th4<^tBcL5MrgZK&!QElzAi1+7UGRS0xB2uuoOXgsC#IPt$RR%xqv0{H%}RB zdGvLAM81XYFg;B3B=e|8Mf9s;0Ml_H8EGdjb-_-aXQ_OdgC6v{*F{;A$CAKWqGK^j}R-;i~ z5751k~I^w%t#iKpP z=F;VuHm$ZZJlJ($vAVe^MM=m(n`^b}g(KGH^#t4ryHT&)x&<||wg=0psG&$`(J4(I zZNq>HnX8MEJF&KwjPZe3J87x7pR`0AiNo!9Py*I+(8cACKY#Ma$yd63snT3n(HI5? zUGH;p$$b$Z8j}0mB|Cbf2VzS!JBw#yG<3H-i-B4%`(1AfTfnGrDp0*ccd#NU?ky_L zT*@4pv(U>$TlvBTE$Gbymlf8MXP2NG2fk>&2&QGnj{+cnCr;`o-u>a^1HJ3?>obxs zoOI?!5={)Rl@%y*Lr%}Bu{14*{vbZ1b{_pHywJ=rhpVqHH{$UOP!s@-eZw01Xi6zM zWg;Kfx>nK5#sYGx5hT0mu9j_bczQ;^Q0^RrlwQ>!aiXFRRyzGFF)(a$6j_eecZH+U zgBNo~M`Lr{ZuJ17#EezPd0#KJtcuBZk|XsF$rHU4>(&UHt*d?(up}p+iHEKPR+6kp z%xTTqAVg#$Q6o>@R&Ugsj?Ku@KFjH-r6fm%(teKqy!85|c-3YnQ_R-gis%Nc0^Nkt zNO1?R*Bsl}B0Z-LEJDG4EPSn_D$0tx*jJi7&1&7G!^+6pw|JBTr9p3Hl^!@$;rZuZ z&;UqN`DpT^9y$*YFDOOHl_SLu$`}bzeh0T6MezGKLXK~;9x1#x~-})Wa!l=KA~I)sUP-{IM4_s=}R}wwhLv$x)PW+f}2L z(Gb_FGhm_=I_|W`fluF(4>7dz;_>XzN0a*^;}SBtzflE8CO(DC)eB1%O?Qdvy*r(A zQ}qPU##2$dqrQm0{F4?4&655F7}I(o)gr$df2~hzD8MUg;@#y%=S$?7h-$I582y@c zc+^i8tr7__`BVFrc=K#0wVxJ$lK?t&Wi#nd@AKb#V^=24gJ`;ro-hUQM!2x?7M4IJ zPe@|aMBp?O0gc6WsrjX};_wArf&rZ84mI+?B*@EM6cN3;fnh=WuTXBNYq7FC*N6vN z-6|s7Ij-NiAqLf0EiZ_=m9Q;MzKVtx+tS}8gEaLD@TcvLHERr)=lN=*O8Q2)S*9m+ zu4KW<$$^+zle0IdtPrGWH0#w21pV=Z+Uc5ismYJCt4%*USh8?I6USkbKt}<1Ei`2t z0Ff{i&3ceEwn2o4@@Fs)YYplrNns4?Y+rIQxYb#Y%E`wL(vu6V%7M0P4wu9=yi zss#(`b8>~!xKaF9DrY5JYGe_0+~bSB#(_7yk>@S)YU-f78i;rCVIW%ODz)Z<#(w%0 zEpc`E)p$E;l;{h3JJlHjfoeruV$C;Q_%T6ZVPYRd$@}Av^?-yYjhkv-kgv3eGe}oi z;V)NYEz3FdUM%NcYKg4T0;G!PRa_)d+ZQCNBv@42)Uc@YKgu^=ji9}L+oLx_-*w~C ztQ!hOUA_1G%A&5fP+<5yi3j*E20)G~ZfO)T*z-QTZZGNQbA8XzvHJEfuwHi1%A#DA z^Oa@2+7cc+aV&K@SA%y`XfI0O<8jry={Mrls|M7D7Tu9B)hw(`6+^s+`L0Xaw+BV8 znLXv})ESz%k&MOUCD`qnY!7|Rk8E<*0K6#i_weL zYcc5l%^}a<>69(=MFc$l+n0hcsKgY$^x@C|t+G6);~b;>XAJaD_}}k?E;DbuAzorl zr{!z(M2y5Wn6!To*H`;qq~kevZXrENnjHR3qU;X$KnHy|{*$(y`r&GCzFO1Dt+jT% zX3Ld;Qcy8O#&)o}{iWB@iGI0RS`<0neC}`b?7g>Q)W=R93&tWB$b42yR-{vurBSWc}h zcuSJZ0Esi3!(FG9jP>YLaZf9WbHbUyiqLXSihh$r+CHBUec6=w+N`dn*kh@cDvr5D zyO&`Wi8JAw5OwG^1B$>eY+PJ+(-*geTl$m3i-f3r2RxYEAAgIv`Q_wuG0aj1^P?uO zU%QYp4>&f_EB)H2yS3i;-25#sZFME8%aZIyh*9)mzxEy5uv&dJk`i|hL=CKRb3;OD zQOm5~Yp#eauGE^M6Yi^2>|VUkdh7R6Q4K-zSBkA4eU3(zdg86eNT?^aT3^;I=n2t5 z2?5EdmExeLc82SI1{0$*yyN02bw;TR5sENNe_hLU)>`%9aLMftJaeFAwIG9N^+WxP zhmsT-2?)7s2U&rvr>qK&@b4z~W26*bDP4?-E|kt!Vxn`UCCUuMNP%$ZQj9!YXL?b> z)1a`pj<->i*pcdpR}IZDq4cz8sw5KXZ-5u;@s{SuIiRwa8>}%Up+M!4C zGa{6Q7P~Y6YxtZ*&~(h@+ZlI|GZCc}9D-*EF5l;-zwM$wXwnuCv}7&4KLxn9QStN| zq+RQZ%Rb7Iu<JXYV>CB^Iwe)J=77O zs^AxyC@{&eYLQ_=-p=6Xp?~huD6C8$rs-Q>>+z>wJpH2nK36(l((ngX23m4&T9!>K zW4K5&YhA^K-gzS=JQf)9ezviy9cR9%vL)tTThTkQp7YX^ucHcxIrc&6=Xw_5`Y$Fv z34$hHX%eWzS?fR4UGToSTWlU}!^#2c^{Z!NTF}upuvYr`%rknw9Ww_17ex}BZGKrF z4Uj=fL_7(U4Vf^$B7{aEimw3m9maP;G-E;G-wIiQ%Ba~OTOI#v@l{j?CnZPWG%Yk9 z)u`0ZS1G%yaa#h71pzeB^PT;5Faw$)`b?VDI_88 zS}5r$jp_vr6IaAf@$Mv!yol;x!(Cz(Yp0Nfme56h8GrrwqpU*DN0ZNpr54|Rg6jHt zoJZB=^3?K?aAA9~Oq5T^((|b?7qd>qfJNLQq)n(nqn@u+`V!{b8jg-g3}~yjV`MZk)-BB) zu^izU*49$8#UJ(3oG5M4l;DqDPR;lHIOR0UHa9Q@{!gzoPV#eZ9&aekL%|$rMWNSz z%Mk6lej#t_u+}msh95gfFW25q&twHpIc@a7DLOtg<*Da3`B`F)V}e-6|025sU?YbV zMO972HVO2){6G*O-lGa8U0_{SUSk{?vN@^a8b2#pJsX5V!T)mnvvywXy8haKON4#= z&*Kj?c#EwJu3{x=0ggZ8?N2~_5O$&djQe!ZQ!K))Yu|kHg_z~zcb>eXA;E_0{Vbm+ ztxvH^Pybi9lu4TL{6}z7=0(?Ua9k;aD8b-;2V|1q=7AXcs8QmjkcYZ=gFEwO@S{4oXlKA=pn7nvM?Hh!oZsy(}!^%cb@ zhi_iikFahnvAqVdvTsHF^~tKa<(V&oyxB6lTh~Do{6~^2#$-j*H5h?VQ08>EQw@@8 z>2BSAIi!Z^oTT5_i1k2&!|bQ-0-A$C>-8(Xt4%8v9z1V{M{mOoOI$ip_9O0^!rL4V zC)}+TD>b_jl{usxGNNJQ9M`oa`V-G-wq`Ax(X2KD_gprn$+a* z*T?ieGUKc}mf;}1#LwgZKDnoBS-4Pn@6z+(6TKbRV~VP2XpJSD*)i)v72nB&Mzk1A zp`*JRA}sYME7d}6yifT1)5$|k#ez$dyAr-hHSRl*b<-VHSi?uUYt^m$wgxy~U8H2z zlJ76r#S(P&ih9TRRij_Sy<8W;yS=Qz=>hIoKku%3c?_gNCDr9{eGNd%ism1dT1Mzk z$9K?ODU20NX#V%=Y;}>1r^&VrrrmLW0i{{Cdby&s*t+SU2cYW_#KRQp?jgoCrPNna zPfdaOo@dr9RO!LzDI?u5&LmHnjD#hrlw&6I~7=t z&51ZGgd`*r55RPvM{8!bKXB81k7fb7A8zZ3g-!$}Oyf9x#y3%+#ucYqkcu+~KjeB? z0`n9ijpR3Qtr|v=RIQ1iN$u~(YYQ#Y)$kX8qwAIo48v;8 zJh#QuNp7{$dNFDP>tY~>7=Qf#PQ05LDpMYKMK~D#N<3!8uuB1VU2k`ATcdFV3Y9dCHIKvtiqiv82`Js=5gOAw3V@FD zx&d^ol8|0gbaKsD)WYA<(uX+U29W~COR`=4IN&$hv<1egWZA(Q-oo#LXoQfk(ehf? zji+nlrT(X4&u-5qDxrVT1aq}LcwbKOdwXs-M251Jg_x_#nh-Nyk=@dEcvpu=|@ z0Zdctp;wj2(mLGec>K|mKaM{ZNr<&q>dWFf^ngIjSw&+zPzh{l-n(7bt%)ePBk0U; zyyRHXg`d#a5kimNQ~Ey=bFnumX1_P@0%qO8Z->53Fg6-aJUyG7gDH1TEj6{_maGxTS8UwoP-v9d5>^ z!QGgdcC~H72r)wek7I^Z6(4wIsm<>7)O=Hs+~=Rp6w>Nk_Ygg`>b?qkmN{?Z*=3XC zu?v!IYg%nIG1&nBLbQiaua>HcgCj(C=^E<3p@8*@#~;G0Uui9gkmye;cKJ1y`RSoo z3OynB2bmhMu#!|Fy>W7Qqpu}4C2mrrjyfpRM6o4;{qNJ2OG`o)AItM8e}6aq{H2~Z z&vB)pupT%kG)Agi#ShQke&v)tk%+O;a@7yZ`0J2^=slT^(H&+s!qi{y#{vc(Vm*Au zFys9vf0Qr*nKC?5Y7-Unx+ac85xJL4249Nu%BsT>cF3#Wxe;qw?cw7nve#m@QmQdN zd=C#`Q)upBOT^n09UyK|r2x);$67v%VUUqzz__e?;dOD_k|)iq70oY2H+B>dX3YWM z?TAp%uBnv%+D-qHTF$W%W=C{Z%X~Bz)UYNAdgZY<^;DMEaQ$hywF|F|a4~AwIRDI- zq2;H>$DDpjj*5<-$SGK*-*B!n24_olCMbrC)00~3Z5}El%m+BjJmVS*?3ve@vdA;g zN;egFOoEw7QF(Wha!C7BowcpcwHrm6wMM-d zmaIj^^4*UnVe&=`U3pcMseqb{PQ>P5E__GM2=~VypupXOUfwRq45O8~h8{6Wbys6U zIA~dWFrnv+KSd`k})(u=#bACV_eae64la`0&vc4bg6q&P@j9H+xk!p-&s^Md6Z}CvX*!CQ3 z>U)aTaB8>j*%K^;p|f9G)N1ic#|6iXt<0$PAJLN!vUX#Xi;N3deq6Zi+`v z+|iF7d-mBIVs~0-O$4I`%l8v+NYSWfB^Ax4METW~D1D>IrhM+y3z`@0INZ*zp3~cr zKyWm%Xi@byo(VHZS%&T|=gJgVh}~()rumCs?R3159%!}VK%$tahPFsil-MwMsx>rg z6QGsBf0N83-9#L^+w2w9{+ym>sxg9JL0#Qz1XZYBo#M-JisvG-FnuDldm`atjGbWbH2V*3wrX7 zzsbsuJYZyZ$6l@a#Oc$zxV%cYrzNe_-pC_R*f(>co2Bx2d>TvDVi%$tD4y*e?b5a! zT4(SKVk070%05Oa>K$ipJ*%HmG9p%(-h(qJ$`MXsne4S|4*y+f?PSr-J@2dyOB#e- z!~;#S-ygU^l|YXVG-FHpo7~f-4XmV0;etO&CJwXgY~RJt9A8JR7JFkan)z+C*yyf5 zpnJ92D-F%~#IY0l^QBw2HCeR8mTQ5hZylH1DjR;pf`=L~Z5?QKr2(fF!}W`LU^{AR zqPj(F4RzmI=XTu0hKH?ym6>ke1c&3VrV0|j9ZeJ+Q>f#iz@Ml#;bu8*JIxMevDECi8UIj9&kgNG`kw0t}7p>q!s{K(nJjX>xh!2QH+zlF7n@P*5jI}bXKl?IlENeA8ZKa^+qV)ZwI}qx0 zPaN$!g$(bN`7Y>Nw&Mcx3aZ9YCrrRY}sMx8Z)jA}(s%OIwZX;51R$n7cJT16OvsX!0bQ%A@cs|W;ccOMpEybGj zHXMfKrXKsu@tKoij@KN;{hAk7#t*qrS&5x~?38zQI2UgG$Dk7Qa<&webcWm73(0hj zGHEn$c6U92WkX%$z$HwscP8Fy5aq=EFL+VWaHD!DZ54iqeC#E!)rU8C;F5xMHFOo> zAq~e)TOvF9I9DkzW_g#dT5A^32KT4)h#_y{-Bj!cK_G%!dFV`T&A3C(qFEnWOlw*` z?-vO-cv-Rm8x+if1j_<`R8s5=)}U3n{I=eM10$ESy;c@ZuX9_TH?q2i2Gkj7D1fm$ z>lip!U9Rh)sGWiE8|`N(Q-C0%(tIk4=W5d9f8i|u?-S3ZM_%F?yn&#Prw&@);u2@K zt6nK~C}#&?CQ}jJ!KW#2zNIUYrd~wU6M$L!n-c)-lC}xrx;(d($w?l8XAb@TJ|%qy|_NF4DFCX9twGy=9X zD1H67P+Fc_tkktm2)WWK*%lh$Yp%Tj0gtCQOJ_BfQDi~mTuPfDi@RX^6_tPDUOKgE zvrUs2s>C^BbWrFgokX)<);tEdGJB3VgkHZHF0?D*pPzTH#l?!xpdD4bCx;QvLj$OL z{Fu#{T0W1m`Wap}oOPAZ$1t@z?r+*nXBT2TM|h z(dv8ap*w_y+F7Zhg!RS?>iT3+jHi~lZPLNzX#>?F$4#&9x7&KOc@^1%?`sF38X>4% zX{VT*1!gtSa$<+LmwXRojUKyNZ3biGiiqnhVRYF`@{!5@X<$1nk1duZ^%RY?glbAa z%6OpN-?K*d@t#gA>&KpIlJRrQW*4%#^4g^Ssy36=gFjltgG0#_$Y_ga&IdDfzd_#J z(IhK@K{SiTnXien)*gterAupz8IGw;r7T7!zMwXgXIuic?`39tiWp-p>e6P`&okA# z@jd+dGncf-7~%tlnyhkvf3&jN?79rNHtGhst^NW1V)Uw=jg~`8-~^DSW(jHaYm23e zmAZBePR1;eM^C`MzZmO2u;y5vwK>@J^ANVf53hmp7Oj}|IX|+#Gq-N*c?Pe{l_iuQ z>$D~kpt_9M)Z8Aak4&QLdN#$|EzeogF}T;0cSVb}&Uz!MQ5p$~8-PwP+W>ZOgUHX& zSGN_pHI@$3cQ%Z|BI4504$XJPc!({E834}C>LO$)DNUAhr*60*Jp;&Stl>u6aa>by znRCnIzmH0)xQ#KJHDAt9j8c6%&#|#mmtq6!{G8aw%3gB12N4LgcH9<&CU`M8*0gfF zS6w8MNx0>*8yR+hKBUX&CKI-jg*EUNc2@TtLUa7ssbeqbl&yMRvK^rwW!`cE8{iFp zEQ-3GzutBP#Ikzi8;zLUYm3zWGPYW> z@{$fL2F24-cCR`ol%rplJpkS3A3YyW5BOHLx@b7)WZm|Ff=HsEf^2Fvn!gX}gU2u7 zI&$db^gDb!dTJ@^hL~Zaha!sIUIKR?*m>$ehkN3tkBBPnktS>YFc1_mlSUgzRoe-h zNJm$PAT)X2%v2J<`?9&*MJ9NT`NqhSNVO<<9FZ1cCPP73{2A8QOkgGtxnlSL{&grI zmD4_o=Kyf{FxHAH(fSFEX?xvC{F*yP8krn!<9~4%^@u2@<^~~e zvjL+0Hh3J*Zd(8?r+ulz=1wfu@`bpj6!eYWtQy9rPu4SDXkn#Gl~ z)hzk8RMksq{SuG9b^F-KAnR{m(r{RB)Xq2uk%Dwp)oPkeWm`Oag(SmCegaA{3rhRO zjvO#C30S7@Kmc?8S)ef>N@q^Iq=8W4uA(6(_x*=4EK*=7bFaB4uQ6}VY=B-3;rfE+k zV(9pLD??pbB!f0fwwLvohM`FPus%KW>zOmJ=r@W4|M28pk?7>xmoJO6OSn_AaKHHt zy_Qhrg2AOqi;auU`}G7u5M8XKF=TP;xCFo@h!66A{F5Znc{d7sVa9(vafdodUU)$C z;G|`LH~CmA!oh|IkX&~SI~_W4^-8ab^&LreL>N)`O-(!^8tPPK(7BrGI_&dWd#ota zKur1H)L9Vyto*@_XB+L^;GXujrgqaSJx~w}wsdAjw=;Kw9*FwoN$gmF55vEK1`Gd1 zfqI~`{G+*=60xa~;3a)H`BB>?fd{+S;^W?ko(6g$b01~*;pEdJvEkR9b8&p{=l)`b zomK6oL+1DkJ;kh@C=8koz(47?qcCQ3ZTLWr@lPxkSuSMWX-Aki*s{XJuyV{}tzK<9 z5_TT&jOK(m_tLQ&BWHKObF`O_`w{sqKM`lqk##@k!)ua66;84u8%V#FuC%V76i6V` z*8hEgfTqV0YpK4D46Rt%=vRT*iuCC5?`QC2=Ceh%V^9MJD=tk-7|@`ov*sZB8}3Fy zw+dZj2!-^5zWKSto6oObIQdHtbJpOrXXLHjFFy6Q- znq!FCHF}+)@iOb>95GRm2*??$9bPV+{aAFvk?Ud8RP-3Y?o`x;IH(mcUOf^(K5e_h zi$^1aMl&>6bGO{r$g1>g_H*Vi6;f@BL71l8ri{7(DB#6=}K9xEdW7acR zL|AAIW+_n<T)`5iK+uMcsT4 z)Kjjhwj(0+lZk`z3C|CjUY8z;(pb@j==UT+YX;LgJaRnS7N2~~fA3-{8}{%=ou1{$ z!|Lp~#(J*5?cwGvSJ94^joeO*qPgPT2W$;L>cMm2Y(&`Inf#)C0Q$fUm>bbdVah(f zMtn}?xAeTzw{B~4=By{5GB#TkF=YkNa@V_6P=h;z^qjwD2m2v$dgd9gG`-w8Sj7H_ zC!*gG%cm7r=AcMAA6ST3JF~j2C$(Fblziqf@lKSSWbcSO4-8z$vgXb)M>t(al3VSt#u-kHACxl!~Z3cqHMiAw5AftV$MN26@Z- z^~+l1Ph{n@vBW)F&x%TOgO-~3fZNcl=TQJ0kU2egYiBpc3(_BVhC^BJz&qa(DR}C? zl8pOany7u%p#dDcm6#w}aVp#?DwYTaY-eko92cqAkQi)0K{P@TF^wHJOsrJj$gw_2 zvCT6ZDZ&WTX{ zvTgKP!K^H288~?qB&wg+R5rVtP9+BKFmPaZStpGh6{i#t$)!Oa8VW~KYgXL*e$c%c z8z(|Vk_;?72@7mcZ30;G{8_Ev+}XI7jG7*WUI|8Ol3nU$M6;stgK-Ak#0RF!aKYah zzpv-xz3MwMWNo=p_sc$-d_2A!0PSfu#TBQqX`BgovjAxV;@WlmJT4<;e!+`5Q(L8Q^p$=_+aTWD7$ zTwGB?-S>)WWCS24Ot^_W^hy{yKIiIaok_V`#pZQwkKXkjpRf#er=p!AnMqCkT#?jb z4L$+V3~&k2DSrGwzgyKFr&**SBQuDUgx@~}HveV(g+B_d5WtGXSsZ?7PS67Dt9hY; zj(iy?3Y<`>+RDK+}j7Xrs}6mDgf6SaRPt82+t4nSV=D!P9e7Yi_t0 z%1HSsz({E!tZ`vBMtSL}lN+l>H*>(>F~$~olUrh4udy<*`WtnAYgtCwNeyS)um{R`#3CP z8O%yf(GSrB>CBm{PuBl5(CQ^qo?};G0;9V6Fy<=rWZO8>J%v2;YAF~il%;hS4sBU9 zs#4h!3*6N)OQ*xmeglU_jMLI`j=aqhT(5;gjl}Q+cV!wY4XbWDfOe?+J}6ytd0(;w zDUG4QNl&Oo6Q5sMoC|&TAPY87g9*VYVq@u77PZGNb{%Oq3Nebi)Ul)Z<7rG1ZZ$T+ z_d(FJ06$?m4GCA1p={zK>6+oMjT(Bbl?UI+!7!rhK~~Sr%5ZD=jRNf~25GgNDF?&Rh6f!MUe&t@I&=qD z1iFypjum=y?gK*;8ad}^ZZm_>9%fZJMcc2t|)NdKkVQ|A@ItGSL)EB{a+5`A+ zP`&E^T;=x+LnZ9#B|qfuG(Z^sYdB^tEqAsG8E6|6F#8+0q~=rYe18ke3082pcu zFSR*twta%CTbTY=b4uRR6x!Dt3Ce7W_vgr$a6P!R?EHj->&n%ascRBpLWp%M+p zytyOynYViVUX(OBT5*(58Ury&*UnSR5-&d#RhG2z@Uy-(qSFStUhvAa>50gK6w|>LctTwbynqI|7grxw5TGJPjY1opZ#oWivt3DBpPD4 zbk<%KVY&lwznWDt0({DLuux-IFRS*3NAl<4-~Ih+4|inOZ-JwsyPp^2Kki2>3(kA% z>f+qD@pPxkEi(SJAK&(h2dHMASD~?(r@?wrIsJ;H0!vbIY!ZD5#QNp|9#9JubxKc| z`bRQ;EcDuNf|`>h73~*w34q<$iZum)`e~e5$naq+Yn5Y-njn9qUe2$2oiJoc-wpZd z+=Ydyn)~a&(m#?8DA&GWkDq!&s|+NHaJXiL7GoPlJBP3|+I^OpV2!Z6jP7D}wvN!% z)?aU}>!GGC^&QJZ2(z9anpSo{%9oR1|K9kn-e@pUf|I$B|9z#tqMt#36vmBSdUf3a zvRzu1Jb6b$Fz5YlxSGiYb4V^$$u4|)Gnc-!xvkVi1HBEt@8ptcZBAZ~z-tRSmpwJY01ZiTy(9?K!F^p;+K@v zsI$cjKYX7GS>Kubxby$}nobU5=upo3+iKU4Yo}r3jSK$+Da6~oEX~B43uZ{UL9dI7 z=Jt1Kw?&t=cnL=WT9k_)Qjgymf1&q~4m4Vq=>PyZbu~j*$xYE^cTJu+al&^#-W_kO z+u2KsgP7-Bh}3EJs8|d$IAcx>6;SS>kXXR=!_-mhdyVgU3vsP|h-1Pabo%YY8z){l zB>{kLk`(;Rup;i9`ep?m5vTcfa$n>=@tg-2CWosQ$JWE_T;zzsG|Wz83fkkQgPMB( z?8-{R(?dun>xbG6I2DT^urerb-$k3FHk}FMRYe)3cFWOFgubi^lPjTy{p4AqLZxVN zrH_Sg(S?3vrBoBOdw}Y68UA+5(-tj?w)7Ec*`q~-ZUf=&2W^oc<%TTL0?+*_wNbnS zQu_K!FUI_>NRm|uh;JVM-Q+8<5#H*_utOS*1&iz~)n}Ay1M|C?v*74^ICL@Pm+Mk% zePcW!cAr*q3pXM{CwV!-YfA;C$vtvnew=(MPIznRy9Yc2ndL5OGxbEr^z=(gFL+%ag&M;!_L7Pg`Tr>L-rAKD-fUmF#gKs8dnlF z81_+1^gh!`GfTN{70yHzjC-rRmi$|B{qfg-9b+;1V_*=c8X8}V;`{N!X{UxP!nm^E zH4MehuXS)7{<~c_V++>0*8{6IUutCD7IQWbad4|9MWDXYz%9t)p%kRkji^m>`q(M0 zRjE;~>JAV^^^(|cDRo`ZPqRc9yvxPe?vN8ePiS3jWhX~}-Rije>VjWtZ`U>Nzg!9W z3$KiIu_!KPVDv8Bv^u9+*xZvNF@Um)-4d=Fe7TFnMg8X@@$bKCng~OWIN^7}`V?(u zE*(2@Qet7Y)X+}9>Je7p0(uP0>Fz%k1*X<7$VsoURUJ?#FEiQ9(}k+;l9`ih9SIg( zf~z=Eh)y4K?-qgub4_Fbs6_$GB5W5GJf4;(`RD(lrF&X$eQJT?4OX(qEXrIQRBEDC zqe2lqk(kqE*XnJG<%%NTk`@Lc>F%r!XmNJ59KU5{fk=?}ZvA!U%`$Fm$l%Ore1f{ipO>H4XKg*ybA=+BFuyM7wf8@_R@V&AMo;W(%E zy_vwYDvNn>L<5Y;cAgxw&A|cWDqDLyu88Bh?-zlS`mt&_aFK^`^;Fb!bdh1_f7SAN zO^)1T5Z%#a@RN%)D27fNj9nrFMcL!Hn(Ra`az!RAzT3dA?}R6b$eo33HKsy63hUR* zjL2Nw(_Yqt=_7D#UiFe;znB^r383O6$IV zEd=l994;$4kadF6r)@A?*`J!jA``Q7gMlR(c$KdIFW-g+othTK#E%2fjtSrk8^uwA zZ__er-Is&#u!>M$#r2d8&0NUqCZ%J5rFk8XG!(V4O-H~Ik(QF?`m?g))z3KN6b#Uy zxpxaccnppDg_ct3IwYAfz9R?laLRX-cn%+}W3ARM=*5l7Yq}0rM*U1dV+jnrll?5& zgs9j7ZgM|;H@esGJ-HTWz!n2bia_($qSD8RZtgl8XXP(Z-1?PgB(%@Hs=sPpqZel^ z$U}_?|H-;Pat&8s5d8JadNGwPh=ys3-Q6rh^Tii$-xg~jo_lmf?Wbb2evX5rpejH- zwp`IN5OA>ODxw!#3RR?HRn2q-N0UWqbu(6+gHzc&_`9?n(RFs%O0KoD)fZRF-mXt5 za7_p&@=xO*By+bZ!Ag^Yq02VhUG|a+1hPKX8f~_+?Y!7O(9PgA zEnusgXMKOc{FzKfS6^Lh#+(}4z6J;)-({_iCV4x`fND*8Q|My!USQX`ab79< zENQI3Q+&zIXXz%PMc#FMHC|iv6m@M2_u(Z?JcQ0PGa-}Q6fV|60k*5Jthyn~2J~YL zVNbn^VF$qXw$}Sv;B7ZLB$jDbU_vd3?J~DXT~{2w9z%WQMJ;j(wcpQi1Fmc_e?7il z@`Om7@_jD`Vo6~863cdz)%&uRr+G}heZ@(&SZOq3hE~7Cy)r;d-W~r*qbVEOM!R)6 zS=&MLpKTT!IBd7p%EjY$fxqYaqkjnW8fg7^QG5c(R-kWb?$hm zpU$-$>Nj7VcRxG+%r?=%KrkGC?t~~jOaH*E6I99aMa7g<;7ckZ|D{K-CJM^JtslVe z`zWh<_#0G9=#j&I8yAs7VWC%2o%Hf*;ik~WKrw9XSPMb4!I=J39BDBzm>)&^@%7Qw ztKB%_EH%)B=R74#ewa2K5~*q}_t$dxY-jaq2yj&D2B``vS5y5=4KraSzp;X2llx*K zRQI9>SUZE(P>8Q+A*`1kE(<5t=4yI#Y_%dl?N)as6=HCLU_a=$Hj%6nv|Awl4m>6dzR zuWJR2^~uWmMST~h^=*#P1n?{>;Vey!@3K1?qPy}eGMv;KT~7fWd@Cpj5+$G;Uq7Z{ zQ$4{!17C}WvktrIOg^Mj0 z*O8sgj*DEXD>YZj(eu~30mOyRo%oH&W_e-bdsdt@Uv!pSoV5_bcYs>

B^@Zlh{tT*y&>E#^dITvyFC=kz@l^V?p|*W6K7vPM5n4sYK+6_dY)8o4)= z`esTE|C;03dTV?m#(+`KU^ut15{P*LZGAFHuTCqsV4&AY5Rfx89w-r9@~FE6}g^0 z_JYJvLg%EWiuwjMA5m(J3v~JI%eV9%a@jI?-SE0Mmo{w;o#9Wc>?h;*bteX@#$ib1 z!pE@VHq!zW@hl@4Skz)WY-3Y|E0MTfW;Ee{$PwRGrt^Jm16m|n?E_5)S#DKfD1f0M zwb$K`Te>i-Wqk8({eCmRfyj%{_6%HqEstK)NM|PlIpNdYwG#2ZF`o65-e35Q;VbpJ;+j725!sK0Frt(%kHqWSXL3W zDhgU!PXc$GkIW*@6sMHflc`>8;;gUZInY)`j*?!TIsy9Oj;#U~R>W-6oeDK7^KF0t z_G}C5sS4=`V zR=2Uq0E}YSKwTF)JJ}xG*uSQ!lQ5E?YMMF#=4s-v_RNJ9{q>fkm+-Ci{1Kbef5cLz zSnadYWZOBQ8Nby*R@8{JGKyIwKY`C4S0q}m#z6#Xito_k4O>|Y9h{x!VqxoQ^OTPE z3QB1#;d3T_h!D!6dq`@_D9rrMxC!lMCh9DAc3 z98tj!AYO`QNE$dk*?q5K&q<~cy!6s@FNfM!ASg(Yz>3y-?A+9dkh(~06F0h>&Kj-) zq6(~4^g|b=Ua!WJvFxQbZohCVJGwVYL;vLpbyX#J>6%Plp&wkSAb;*m!dPMz-C_LF zb7F2w`Ux<)CADcp6dM0FCd%&mnTl5rCf|-f&=r7>ug9TLDk@#*M>yRlgv5Fx1xELaU(7eOix zT|6&V!}f~K!1$HTeHKn<0630dz_7x@t(|q|_;0iglcG+494$N%eFvnUo|$<;*KN|Y z(iTx1lOjR6#*a;wnmYKT?*X-UM;9PXl4!I06EZ1BCl#SsKO0P7>QNAuET(g^f8^Nx zpZ;Hw5y89eQE9v$Zn~mN;Uyrxt;LWc7A8L~m2w}ic$&K&2(l+iIpGQAFvw)0DKR(P z$qH4C1H6{n@#Buqthg4rRCUwPw6d<(KqwCLY9@nAwk|DxlGWcmx zRrftts;IIU{lzGjUI(o&#(Un2F3*_{TbmfP685k~a(&M=$_tgHlJ2W|*2|w~rRbV# zBk_N|^4cof+<>e&+JFuBsW8*QhAD?YbkX2HHZ$3Qg*b<=4dX z>!U5dk#{vstJLcaBuo%V+W_5y8eUO&e0rjWLxiu7fWXwq7~gFH1j1-cdQNE z*@=iY8Ls_p+S*_UpT_ftT}-|NrWdPPse{)VFZ#Hk2WL!Zz&*1K8AWF5cS)rjTx z4FK;Gpr_i|#YzloYtPZkjb^15qr%sQFE3_FL8EH-^ECJJJR_%$yftP z{6+t#cSDa9E6QA^;`CKlOFPSYzb_n=(C#u4ExT`!_L{Z#E! zgTPQS<0wnrUan-Zr`m#hzme>?kST!}EirHqmPe@AAs|aB>)9$xYUf;jM$3Hr%@cYn z$446!lDm(956AgF+ufSuJjdS>l&L0wR#KU?5zJyxex1Yv$3E2p0ucBCddwqiulFY( zY82(YkrunH%G4Sba%j`4Uh8p%T6;Os$jj>yiy;b+Jxy_J}JRK2M*7xX>F zqj%j!wYx#ek7$EuSr>u5iM$Y|W|>i0j^5Q^E}IWs4221MX|X>qmE(D`arG20zXkqD@mwLJMGMlxgrcW8@Ps zB&4xvTF1OXH}j4#e%`p3Js+c_4()oS!iXJTimYosk4x_r)sqLY53IdqSBdXZfO_c zk$gV>-s<`o)HU;m@n_nz;1TzS25QqvR_xf)-Md@6SwujJKPN<&5w{^0iS~_;_Wlz=&xlQ2jjp zquF-{nbk!$A^3r41-t8hZ*tF5;dELlT*Lbck@E$*{lC_jqnf zLF0UN)oj>DR_hs$|K-WM8aFC0O22-=!<9|?`nHR?qWB~>Z=4xmrpszCkB|*w%6hzVtOgE6Hfe%#?tP$`+EA^Ynlqqwwn$CNuF{v!Wc5=D+;26 zDd8O))2UBm(2G()-Az$xb*`pAiA0Jj2vB^C3`BJGQ%JisM>lMusrvJS~x!jy0}=@ML2=n>t*b)=VPH-jLU;k zBWruu-`q5`lqS?zC{YDQpHMCyjlG-AvD>#r*4AE29GdWKwv74>pN4)juX+C}+JQz} z1_gaqs})NiiNUdz)M6eNU%}GJy>~=|XHTT$Vt?yy%WF*z+=B+v{tMBDWL-D5N#wC} zS_46cEsAmZH{j5h4f$G`6Dg2fKqIuf&sxZ6|ArobuV7m6F`GH2IzVOJkK^;<M#~HkfiLIQkfPwbtu!j+m5Q*-}Ga{g61Kx@$qE%4+GQ>y&m1MOD~?$ z;MW0YYw%3u?)Y=PWMNj{gD{0Nq!HV*FkR^S1$*ie>#7jNa3+TURe883?- zopzfROruU!?=6A zN5>U+0;wAXE^b#YaF(Ef12WM(&3n?$%F&|hnZHNuVxa2*zAv$6iUI1!0#_P^1tItM z<6krr9HwMac={;omCSD4OgWOD$-c~rbE9uRoH*FDOSRg}Jy`gBL`&P3dXF8*Y##qe zt6YL-8ZcpxJr~w59us3m3Xv{E-bcy|oAXE8q+)fr7B;6wN7TDI0K27)US4kM^@dT% zErV{}OqA?Mo@!Z=kvU{F@$SMYcgOc*c4ysAtSq}YX+(Ej>zAFeMP-U<)1q3ab^L`> zx@hq2A|Qn(z8Zh6nX|xgt}a$z6%!*F%`QoW&ow;^335g&Ar0%DO>758tZ#2@X0mpP z+74YprSYbWG;G0mljVaXh_Y`na(8@xlZ))0jTQRYxyp6?_19x4pzByfPrQUiCNZ?) zz>11OCvM@U%wzNp$%i=)1g@R+5g%k=M#y?+@>M)=eVSOMiwVOnHnoVTdye9nmbUc3 zim#K^^Ymq(IdS5a#!|l+(h1sFbujaO=}q%s3*9f=#gx!n>nn?kx(J2*r!21puKNNf z-X|KrRiasqZ4T6ZOYjH?DqYOCD35M;^XqJ{8K!gu|wZ2G8BR$g6KZ;V~KEsbl z>%XU2EqQbXXEZ`MEZ*}#vviO7W%j&3B2gf=9>U;=IL5q1dC;tnQ2{`E@odcrfYjb` zo7%Rpt|w~e4k*s=Zzi8<2A1GK<{yxm7!5E>|CCtZOYJNh=@x9I`JH?z_DR|nap9{b z{Ia97aBa4F4$b+kCvCSkHiN103SnlsHq_tEipyJ*Q$Iygt0BGq$}AN>1E2@M!lZAE z2A1N)^UsESq$Gl-Y|3+F$gJ#7okiNM1F|>T9S*(DvP*a5LaBbX;xOqYcYiHKAui(N z5A_bq4eGc)*S&eOC6>e3X?a=&rYr?9lb`8RQTd!Z+mFVEDR&&?7{Vd0BxCP*9I$%D z#R?8amlgnrY(t2E1=7R?!h%V_JaS&E31E{aat*rVaw4iB{PJxP!SkO&YP}>8Ve?e6oOaAlvyKilH%mvxw%e6#! z&3GaDRUeO+8nAT05(ipFZZHo@XnJc4xUa;-FZ+2wk0o(!ubW{b zx6#Kf5!9=?Q${6&-&n%I9-Ulaz<6W3yWgf2f}U4ibk5%j_cHR!YnI@O@Tg${?mm)@ zL1a1=QWHJl4ccUDvCen)*ERoXUjYz%`gJJ_LD4S(F7jn68@!Z(`-VGgWmp<5Ikp3X zL0#om4FEw)MJHX6w9=2#f4$r{&e!jsysK5S`XO0PqJfe#MxXbzvZ9-<+svt_ss}6| ze*{Q3E6-hCsf!KJFihGx%#60A?h~oB25xsEaY^J4#!SJJhZ-f2BQs(($U(41071r7^V zpl^C>uvuD^U2j-7m6rcaj&NE$>N-!h&~E%WsB2SP3|U7ReonA2&$F z6s5~64gEk#EFx=~W6IwLUS`LL)_8LG(hJ(I#t?=+%d0xK9>1?S0gFF*GMI@5U!*eW z$Bj|9=S5Y}(opYpDb)t8oSYMj*U%(0L!lp_9%zh6+f+t<^5x`n(aAgwsDk>~TDNGh zdlOK4V4+aTNoex+K^TqyF!@$&JC&UlY7DkDCf*n>u7cjH<*M#oi_tvg8zXXz=OpT}8R<}0 zbY7hEq+Wv?YDp;q?m%@bF^j-N>PE(~wO#X`YtO!|+YA~W19_??1mt&DFa=-eBjwAx zMo#Jx#3IB%jBEa>!a}#55)3uvx1V#^JzWWY5*eAp7T#IWm|ETBKnHU+S&U^YJ3VEg z)c8MzP&xb$RwF#^F(`aPqYUU^sU=yM#uBTo2c-v58`GMsVs0=|so=Ce6L4GWVvlO6 zhkbHOG-BH0YI10Pk0zh$6YO+n@{#CpwkYDNN3w>57y_?ZTU-@!@q1aT)Thi$!uTve z5RQw?ckjcXC#Z&3FD;iB^dhUWO7$GqL2FZlzJ?D1Ow0lgtZ2Aab`Eb{@J?a4ITfu* z)-`P7RwTW$X954GVe8>Yu>1*F)_M4&HOLYc_x5jkoz=pu1IZWlibx}zuP#tL|vE z`|BB_e>DEme<4<_UqI8ZhwYCSkMpD#wgg^GOC2RocoAiO=w0mI@`&kZ-%$(Y6=Zl2 zhUCKs6F0$b=LJa(`L ziOx`67d`*{^WsTMRdu1dd4P55nm#TD*HX?c29s0hpm#k!l0T7ES}v+QSy92SbqN!% zQgTRig!k@Dz81x%IJ%u1`hsC%HDyK+UE``H{mMK>T|BT6G`;4=%!u-KDXyy*TUZg5 z#Yt|p$?yp|$uZ#9kZ66c{EZ6_`>l7Ra>YtXR9guo22Te1;trV7az*q+SsghLr7=pd zK+2r^7%bWKtBL1`W1fDi^}^qBTtq^I=fP;&*{U<^Z<41pPH_Ia$S1;r6BWx zWqG)NhVfGAEd|6-E%l_y;VY-kgrma7ZRL5`)6s_3bh#(*OzwviOH4Ecyov_!14_PV zNA&G{Ff55r9Jo$C9{8v6=aU~@-dl0_@5~~d+}FTv0ATTCE4Gs5%6@J#SvN5T_cz5D zETK0(S=%~SZf$CX`#}RUzgoXX=??9d+Ddg9u#C99wC7%rjh-jbwm3&LO^$VU)uJ85 z50k^cIjxamH2p~6$qcCPbYr_ea3>|4N67$2OmK=O0LYUZ{kV`RandwPb|UvbwV#%58aThyS)AGWIY zQK6qM3#Oop=D^r4c|k*PV*`hlS}S<>x>u&Uh+fT+nWwCzJ<3ThE>3_~L-cEd&P{uZfFTiS9dx-Ed6$g5B8a=lv%2*M6O@+;lA);Rf?XYu!B+Zv~ z8AXxV#<8;99%{OI_aqK4{$9@@J;NOowqYh8`}BvrIO1k_X$wU#RJn4WD2zL$AOAxj zj4V4MmHF_o)>o=?+9@a%fX?_SP8$PlFbUBCp;;-b$B08&Rhz@VANV+wpflde17EqB=Qd?8K@;IihUA@|xm zHC}8H!TAva^q~d}MYnvrk{bwSM^l5(%@~e#$-7*kbrlFftuO=z5ha%5cv(Ol=Oi=5 z(B)rIYWu;MsG;6s9uCG3fZ{4n=q)Q-#A)qiy@H6g6n)E|71wiEoA|!w-uuF*67I(j zspNTA1LMRo$Z4UOJ&M6w`zjx%6@s=J$SD26ZrecLQ>IY(XAojAAvUkynW`~hvHmpi z-WoSXuAgA(mQ$yl)_1a3r`Ck~X3K{>S))n+LRf?^O!e9attK?~c? zGdtOfJR)9TS%I3kAw630d%?=D1_q2K`i^=JVJAk@&dh_DqYOE1)`hs_Y zz9sy8T5{Ha9?2O`W76Mw@`u=rsT`pn8*8&ZCwyg@=xrTzEE`3`HDgo@Vc3Q#a@Vbc zIl^|9D5fEo2Oco4kU5~oSa~Ron=EjYX#IjC(>>J`-&xdysf8Gj`C z_QDB0g?OKrcPTBdoR#I4{P6b+-G~3aKlwy=9JB_Rg$ediKj2mL?K$YTyuW?Zyi5k= z*F5S7p2)iDMK3n!#L~0>#0mA3DnIC$ujnX>c)r6~+<);6=+;s2b{WL`&cqjDsd~p)s3m8waL|XB|DC5Yr1Ws}*q=OGoa>mb_kts|eT#i4#f>2oWQt}s@GI#2|a4X;&VbjNh{ql`)~k9;`7 z0=FIiP}FXWMVlYF+B~gsKil8%tF1oS|6uM-d)z*=w7<^~1VMlxK%TC)TasIr@+vR9 zXqrW`O0;S-$&xHfYMDi{swh>Fbdi))@@O#a&IHJVU^4R}m_eR5YFVNLT<1DF9`!No*tSdIMzz_@!Z6oh?;_Pf1Hqi6 z3X-8-tgPv+ri5ea0~763CXe-Nt!tlG8_wy4s$b2RzQG%A6@NnGK@a`Nx^# zOJSq&{cauVd-N6TQ#Q9e@A^1uV&4qS0%)v|8lN+(PMLzR(PgHO3PWplB zWyu<0NQTjAv=9t=0}C)M zaoDry^en<_IrbaWR~?zG+GfSr(MZMgbHhN>@7)HRKrqLu+KAUCj*^Egq$@V?7KHzGo_|+FA%;KRMQJYy50o4%*#VMai^ys1ES$ zo7rQH2PSRLZB;^f3nw%&{4y8B@fWl}i|G1Rg^(387d06*fZ~Z$$H@KijD8^vBk6r= zBFZuW5#+cx>Roo+=bF3H!On;UUN~-|E&-~(m9I%V7+WD5-M?~yx)*&YGZzUJo7Dss-LK+q~Ik3=*#(G z=qVzsYBin-zMHFChcBdhX#eK08*cX7Pr>l>6gszzb{q!R*r<1HMty$S}q& zl%6;}uA56C8aQ^=0%JnZ%#iil_Ae+d6Hk7`(_(0uXg~wsM}MbC)X=CISPNiAA)I&1|rWQd<$ zeK7pa9s?Oe?F=wCGU7k*$fH3fydKQngT>$|b=Eu6&o%0J05z%>6Zg6hRj`}mLpxox zQlI?u^x>2L5)c5Q`pY%97El2u` z_@67-*Mg|R)B`^wl0NxZ!$?GZxn5fJSzr)IrZi@2FDq7aTFBj=XP8i*O5NoU0I3*= zDVACCsrz2HBEOFN#{|M;F%u%j0E?nUh$jsGJ9qBH(_f!}7|2#MWJUUMZksx8j6BpR zteg;87ULEBk@}3!bF$oZL9rS~^@Y8bn~&+hr=9yf=feE;!fCCBn-Sh-CvZ+kd>5co z(}dMXK$S-nVU*qP93$5sKuZpaRI=c2ypk)yP6D24vtk}v43qhF1k6iMrkkw1Z8Rkd}3y7k;69@1!2`%k|=_6kj??N(vkG=8T%(FvSr(bL3 z&F-S;Wwr+X&4A%TtNQKq$J3{E@y5mG*)XdtrAH<*@`b5pKN-eAyE&ClvLtW~{V4cy z*gp8kB!k+6(2|C`he*vVZioMCbJacFjfC}8i)cQ%aGF7c*F3=>Mw?Q{Fx~6=b)dP+ zD?T_6rXRv4`3}V8ALPC{Mm zW%P>achO8(+A(wE^6|i2Fupyy`;Mbv(RQxsT}eqX@B4SN?=%VsmpK^R$Gw|n(~pL3 zj16q@XbxaN$(Gg2hL}Q%`#f;&wm}rTQkmpmhg#h1aJ{E`%flL+)ke0gacuA6nsRsO zVC2r}+1*=_0guV0)&QIF0j0<0btju9)bsp~%px$f>1KD&wYIN21c9myrenC; z(mkN4Lf0|c=ElE{8V^}+1 zH(GR zKSPwp4r?cC)!qJ9veJ(&9GVYi4*gk#s8tPq@=rhgt;PXL`KR89MkGgI{075E{Dxv3 zRIsHYtRKH3O!CnxN(1`q)1y~MW#&@$%G!)M~r_pah_LH0PnDJ)L z>%vpds;Ae9BIZPd8I)^_4|J6@UKSmVgNedxV)gNV9h|u)ZB2^S=|No z#C*hl&YCnww{YJl<5J{P>1N}xqhzlJebId)XO%;yD@i)AYTO|Jcy_JQ+<#X2~oUv%S?Q*4Y205v&?vtmNa&5{T#j>Tg~W^>JW18n_3yTJFYQ zuKNj?oEv(!g`|4gpM`HKHBOIC<`3a!BLmdHHYhAuZK_+Z-8rc#nX{EeAGY1Dust(# zz6z!tN#x)LX!G_oFV8wV*rliAxl|r#?>?pF9w0*<*MIaEwSho`&lGbLl8h$of^^n6 zSWRa?i~B=&vl_TM%XHZ%pX`XNWiw&)2eI{r>-txpa`*x6@4@wn25cz`nJt6#RZHuW zh#(U8^TFl->bSv8c-gSTuJ26tvcTF>S+5wh-9-;Bdo9Kh^<~Q;Z6Hw+d9q$L+8;o_ zrDwoVtfn=HKhCT#ejD zNPOY;z;u(4M6EwKEFx5cgHj+{5*plxnP#ttckcs2<_;djDHBvzgOOwQaZi{#TgwHR zVQ3UjSYyjt=?9G$qN%hNXEOFTdPTu3q$eboFO=C5a5fxU4MajW#&cB{2W2t|unLJ;r*DUu$Ev#haJ7Hyr$v@>eDFi6p z=mC}C$@(nSqzsT@=Ao%OXWr18DpmtBGfiPL#I1_(@u4j|5RkV{A8QrX0qqV7)({O9 zI@8%i_O3rP;lS>5t{lYJvBy*K67_jBKHLjh{AEiB)v#iS=N4803DA$Ty9<{u>B|wD zWSK3yWogG6xGN`~J)V$LWBON7@FDt-1;V14 z;H7ecG_1XxW16x7nco2=6g5;M1KE4z&Kf3W|~yi($YR<SbV8N za^l|h386F%@926Yyu8h6^l?g2{6s&mt~(j019x3;r;M`unvNx+8v2ak@6Dc!k8E=e z`;_OUN$nMcm>N$QQWHV4mhC)>mhXgiWBFT^rbJhtOSmjUNQd>CL);HIj%$4I` zCFLH?@hKWO=ttZtw*#RAR&oDXER%bdpM#>P6OlHF6~s;MegJt6<`(qd;t7_%)1gC}%mf>mwN{B91P{ymBsmP#(kkzl1s?(DX;g5?o zfqIH&uK7Xf??S|+`Pt(9bOFSh>^}0liRSLlT zHdL5~*y3W0!4Z4~3%1L1f_?gRTfL}SC4Bu+bcemANniBND7G%})D(mHj zbQ4zdUhB~Kg*5W5&JE+uQRAOpaK{p9oSyh;4kf0~=Zm_d7H?B4N<9tGub(acPcNsU zisLcNiIONSf7GvR{LI!{Z1sJHm<^_)m=pC7eyLuz5Md;6fi-OPJhX@v@0t7kYwG!g z(inIxS@U|FF$H93OvykE3|0)#EZ?9&Lo`MN6D6R4jC8Z?T8lJgrB6yh!#TVAo2$=j zm*8Zk>;Ou${$M^iw7G59A?LabGUtDgkkN7Sh+Ag&47idKZ6p|bXw~&0wSE3hMo1z1Mq?8)u|b@=fh5+($Ux; zRqCOMSqQQ6{b7=+8;HQwQXMT<@7x}v>S=|Utz~8Qf(y7onJ`OI*5+V|?VF9L@tQMo zs58$WJE@Pima6LR&cVd9acDl(Ckff=#|@ArQZ-NiiGL*YtoQ~gCy0=r-qigBqc5v9 zco!@XuErJvICjwf2FF!Eb2!370vkr7NCnMK92?icww{&(1c2?ea8?jO-F{z-`;Q!; zGr30p1O8VF9O*-bEpSU8Asi$>y?GQ&gMOcd@;k;Q=pc`z8#ps%yEE|pOWEUbmtreX zAdA&$kV95SyuRD#P!HLQ7e(`bDMs=19qGBvI{4r!%@j)Uh` zPdszXdG1#71i^X9bw6$CQx4$02u@j#Xm=cMRcWk5H!O1pyDJ)H=W2w1ShQATMNb*E zvjbrS)6!mN(eLGyw1oWodRA{_`!;YUoaH!qwR)@Qtg-v>jI_%5<#i}6`abLS@%iQH z#FcnP4H7do?dj(i^s&6?Noh-6gNGcf%-8Cl*k%8&KdY%P{!;JV={Vr|TKFpvPqc!RSYapRudcd#MfOPZoZV!xifFS$h(u<+a?!9(GGG<|FOox~wN zg-LR@<&Z#7VR!>MX?)k1LHmiBwLiV7zs>7%rzoddw?wK9o;!U-BJ2;Y^)v)aUe&`q z%N|=H0py$(L)vcF;?!)|vNBvsEJfa7xGKR3cofz7Lk!|sS&rm z?#1lx)vJEu)LyN6C6oflMd3ZohIttx!cis=W_+@8>>f>%S5IuBwg5D1EYdFC3leB^ z&1A$HYAW#AOez13yV!xDe>L;U&3uCgR%61WIp!#XqV{4{601{7AhSgejs(e zx~09fgD5XNYZ^gzVW5M`OaWyM)EP7lGv{@w9j2|AkRx#rwZamf%ubKxgIZ1A@=^9% z5-N5El|{9R_znJ8(-PIS@xZEU$7mg9`hZD?HTt1evUu>w`8aU$V;}_%%Pc6L;y%ef zF@S8PZvq~3FwT%OsbX6uN?ZG`Z8j*n=#Fn1(|5cK1VxE;R_(sEH@cf zeD_^&#Qx&g35{Htku%#zEBL%urlrm)48$!p6R?%^mf79Yr+=fFr6{_u2eTYq6<8sKF~!q$+u};8d`@H6 zs8#fLbj)6VX?_y@**LlM0111G`LGgAL(xh(XgZMn?`!?yPo5mWFX>9>&xadS_}h^&1zg6N!o zhRDxTr4aEJ=MST{J=jT+J(q#zAhXy82`U;8{Ne@OTpXxCXPakZsbSSnPcsh^k1(^< zY|@5FVb%hE+WBi35to^B_e7LmVC_GgeHO#D-?|o4URz_K1xt?!T$4_^fGj`bDBE+# zUI8uX9+)PhRxFe!La1KfQ2 z>Q(i{JVY=I=u*D2(&>8~MdZDVP$U6l`2-(dxE#qtkhs>}C!U~@o~29jX`i$T+ik8y zo_Y4!JmqSUV!ze8+TKC)2L3}Pv7A6=hRGO9pTA81eOr6!61iz*bTb0wB}46m2l}2pxPa}U^0{U10*1<)>@jvyJ-;68eE{zKqSE7?aZME zA5!X(U%o%i);*|!CF2WMe;F0>&NVkMPgs2oa?I4`PHz|jk~c#>#VXhJMS&f>!0zoU zTr&)4tTs*ulk|FH#n@7_dEv~-iaMfTNe~bX%20PPdC#7C<0&;jA*l$Uvp)@m7HdC& zUvxbx{$vXoH}bPT8NcWlSyeDIKM9{YUa5IQZNHEb=`}Xj198iqDmep5lQe^XJYW-0 zx?T;KyRH@oo=%+bg|QW}%MnZo@UDSaHIfov4WLv&;X1r`SX3Q$y=KCGRC8HJ-v{?9 zI4r9up{2$lY2h|%Ew%jgPIQ!S>vlH!O37X8NcNE>)4O?ew&T2>8xAAC^qSUP4{FC) z1n5_IwlhltJuTjIe$dNc*`r-ErPSdU=Ktt>A{?g~`i_>J>U5}I=~y(BKN3lAXh5i4 zcAe-q{38jRU@YUod|LDjRNRkepW%i2X6CSit%$oP5Q=68oBO~A5Wfv?)&i+kz1kS( z@KMv{6fZ#5A@g#M9 zZJW1YPOzirvw%48gKW{6Dq*Q~q5B|Nf}1{RN)OV1zt*@{EnylLgXSBam5h~* za!J1ANv!9=Oqm4wg^Uu_yb2tf8chS?dsMgJYvIOdDg8wX0Xd8plY-|42%LKne<58l z#6aDoUOZ>6e&@x8kW*`ziE0Vj)`2TVVvN&|6sb9IZzk1^H3%4_uj(3Lz+EpF0@TIX zGFBk?3(hGRLU#i+w6Tj1&9!_^Elid@S8^NYYF_PkIPiz#UVmPrjYWj5kuQE?KWo*@ z^<5^KL;xZ7W*2#gLq?m0%BWx^)e9q<@^iDY7-r!z{Mvcy#jCyi6vLKd`@B87AFrV@ zt+^ThN7=9uKl!6idie~xekY8Fs2=Yrc7UW)&W-Bk^ofiz~IsX&n-b)0^bKPe&*hmO}d7?EkAoyd(} z5n4}K(ArZRPstmursJuix$1DaQrAZyZrV(DejBf4N5-SfiMj5q=jC;G2b+0hrRx0* zK^nZ*Z}>}+KflSP_l>U_!-4oqT)Cr>q+~$=^oAllHMnNd5XtE^QE21 zb~jyAJ*LBWNVS*)ucylqy(pW0yaT`;?*{qy)-cNjYD6YJTvg-P`#AFqQDC-I=r#yI zA{IPa<9fl}{5P(}+Y{5f1|0Q0s;M-(o|}u=)7$d>wAp9K9EDJyKr}vf+X;+-&gOK>E4#61JXzlS=+lqoTR`GdOG^y5?kTp^L;i8~wdpq99=eIf9!-Rp zO?*SOKO&bsv&1siW2F@ddpCUGesfvlgIB$mkyCJ3;P$0QG3pj5G%tgzi{(@hX)j!2 zXjU*P>WRd}BJe}XGO>LvcvU_Qfm47dd>s~EX`kMgLT2x{JYXq1t^Yh8#NpJV(R$5X z-*%82-c3@D;AhhsVGo<1S5IH9Hr@1LUCeFxxEK?E(=(63ni%!!ef~B2)r_QXqZIS9 z0gNSjX*7S?21)?Pd%746@Zs!zEq!?m65czW04WOrAzy~9s841esr^8=U7tYFKvaA; z9<_IUhZ;|qc?18_va)ycqtMDQ z=>0WBt!I8sMtF?$cEGQ<;|T<@TQZg3X2}+r!4F3;>g3Ooc(f}h z-~nzc(@+3jn(tPp-+1f@GSYI;c^oMhfn!iwTU54o%quKB zh#dDsnGy`4oHi8tU6HjMf7nccCpXzQ&Uubf4Z}z40ZYdV_+a2$Qkx5@GAM2>%yO#H zt-FX{y7p&5SWN+fQa6~z8Gw-9whuFskSJg1_R9ve(p{M4pm{YiNTzk>+$tdI&2Y)) z99}Q4I2*Pe@#Nj?Q45KkaNgEM^~jR5MOl zVM72Sl{Ld?K`j?>7K%<~==7KyUGF0f_4j9AxI}>{Xm4-B9FG*+zGdbXCaPL{$zjQ4 zX&%kFY7R23rqgwyL23$5(s*$syP!6=5DdBd6q3=Ny52o3vxPKv%nguteM43MG8m|l z@!z2QW)QgwfiPdw=#Ci-n^V#RlGl^j8h9KLuFY_RSOj|dz6LdIqdfzD=r0IqOZ)(H z9TS?e+gf{)V)KnrykFg3qSBv+{-v%9VN}#$W!*3uGMB+2i3n{oxdr2Ac0ux^tO ztwU!*j2{aPzgGYgP5;$PC;NUD;d)EH&^&4JZ(h_4FeTqT@d2)O`=n_?zRAG%- zsVeW!)*vIU*w}SxZpJ97; zyBv?P#>5;Z)5pne;xMLmr+>{o2c?OFlbi?Y0uk)=J-0*IS!xpw`ApgwVj~(- zRCjav^yHsrpXxSZdhVMmYJ}=GLMjD1UyB2w0PDSOoZbD!Q)*hV1dF69-Htzae}cQo zWANY^jQ}im8DX#dY;BN4mErMHI3^>rGzSdZK*N6BJjefwR+DLn|K`=CesQGZde?)Q z+Z|uEj6oM>Z}qtczqG^x!IMN}tNTzDqT-k-@g_liP0dLP9?ia{Yzh_Gf2VJ20<6X_;hiw(I;ckpF%egk39FLcF0pRQ z(m~p4CXTdMBw-iB*9YErRc*oR^^2?eVmN!pFW_eVe)^H-yj&C!jn^4_Khz_pk?Iwl zksn~KYc7h+=cD{)H6O0Gf<{CW$stS}J@%OLQ{yjnW2x-#`Io4v3e!u? zg(I3mBmqmb+|N|cQUlCRnD`2vSBjSIvYF{88(mMvG;a}@O{YT#YwX9J6<7dFttY(b zcq7xN;+@Z&zN1E_Kx2YTWJhQlWyv>3a>w9W3ISmf4UiVem_8Qagp zpMF2{!pi1pLOv=~u#rdrTYD7_#+*)NUKvD}@x*@s&Qh2Z-)Iqn3Ef|wIK>16>2&Mt?(J}Xny5pr`t+G(u+^xGP1yRu$XvcN`zBhRHb5bcGev(RA(wE% zsMAY5vU?I_O2X~JT1CPyX{g3rXm#yk+EJ*ioDp;hY6%hBV7$Hhf@T;$_m{eallj1n z0)F^z`mWwM8apgza>V<|R9S#b3ly!|^1-7a!FQ_ea1b0o6-dBT58oKv)JIsK)tX_C zGAwE)T*5vZ%+I2yLZ_tyVS~FMkq{P?AeP`{mL}m{f)0DGvYLBT4abvbRhr^!DvPGq zFT8lh*|cyNdqmsv2Yu9S-4OnRJ}s+Qy>FH7{f#$ocG_B_%PWhuHLL8dzMpyRYHR?G zhmZ6Fb;r~Y%+I&AraLeN+94xs2^+x6Jn{Kc$=Q{bl^jguBpTc1U_B^8O#a{>@f`rG zNsEY5T5-AoaBKgDsIrnG;oM-A;&NR{EPmhJ{W^zYpF=2BJ4!fGKqij}sP2%nKhnoL zEl>Fv?`Bp)z2!OANhSYQvu6u{^`!y8zcL5qxSqZdh8Xg&yQT3xcIDJDjjw@Y$LL2M z9y0CdOWt&U_Mz!WKm9JQED~KbVQW!Gig2(dHO1mR+uH2G3|7}4xD2uUX)vT2N_Eb{ zcEl@dWtV>Xo!crRC_i?A-&_; zja)0p37t7pO+(Zm_Sh6!4v5Y(UUA#V8l8(Fosl3CyZwauaBNjM5>Gi_!}Espd~f!V znrTsl6$fpCkED3J8rs=%sdsG)9F*FH6mTZGYi{Kv8{V%mCySo*+;h+Bbt#>}BjpKV zL5DT}(B&J*!`%in&Vq|%%V-z?w6+`)fMRY*BQ?@#983J<4^#&HR<9dT?KqNaR!R;1 zz^(m=X&P5}pNe!&VC>2VS3tBA94G;7izpNA760IX1=EB|F zO@tPrEf5>H9+u*0=oRWQS&y-dXm$pVdFuFr_7sp}oKyD?`W?j~q(Hvna&fEekOY*j zMa?F@cPDlsCy>BYmu;h8XLl#g+krFe{?t*-Fse9$ad!8PXBR_^(mFuD)ubXqzvJJp zBtS|p2@#-RP#q%eEq zC_2yn6oU>2T*kuOu$4rUbmC&+Sf)*~@iZ2#VE{i&+5r<9TNCwrX|>u=uQ{Dl(JnQI z!>(Z1Ix-9>VT&yveWX0DCo8w7iwxkB_-DfFZgpV?o~vE=@G)-d>$o@EJ3H5?=#skJ zNM)}=ec;JvHMoVu^x~A!iZc035Z{o%U)z7D?`gJ-5sI=JTA+~3#Qww>YOZL;?)*X{ z^W^uDuEu7IYFVRpU?Z%m$qz`U52t_7t1h&9pz?h8Ct?8o20X^JH8lejA}Extw#1N= zOI;^aL2)PPdiYq|^XTfHJH3(akLc@LVds+$%;=2@x_|ZQ;n>2X6R2>~2q&&uEQET^x#imKU@M4#30nh$ z4N+?<9WpHKC2g%#&@)N{u%d{ekVU1TX^cF~4yGbG68vzR=W3_}1Ms48W9wQ5JX?V? zJX6jLlt4F?-|_u_LD!aD(7nZ$T1n%O`>&!=sC%l$#a zR!WX^&2{f*>E6#SoK(xr5*Rr0np4Qn@lC3MXaJCKoMyobQYtE<#zV~3biA3lYX5}8 zRx1K6dv6l#f_HxFwe|3Ox+c>)H~^=kx1dk$w)~6+e(@y*c0rC#9l!nyr@Sju%rQPa&5E$ur@tziAP~xrd(oUFIezNFm%uqW(({=>MS8U6kdHBP)n`r zh`v{v{&(Cx;m(IE#VfY!kT=TDln7#d&@v{pXR=dIp)(Kj>VC)Ga56=doWxYEOHr^f zaMW*-1*#xNTXHq7~4ORV8@@d*k?0PcXqA&fbc5EE-C|duEZz)BL~BXn*8l>R=xD z)Jm)t9DTt9!b1II`h%wXxhu|;f58tysM0)R5)T+hK(wStDAb_6n!BRm$x(|C23b(A zh&p8TV{ee?glXVtK@yM6r58a4w1yQbcxXw~tBuW-WjB5s?*y zHSPev7hQC7bupN!*;rr3D?0>4C5xw=T0Ps$hqQDQ9{__{73wy{(0fbi4z(G*m~N*ylFAm#L^kI6Z?dNNiz;vfm*^` ze>;7s=K!EPa2yb#m?*sjpV``m$Z~@RffEgz-MxHOPj6J0_4Gx&55Z2e7xlEPHk_Vp zA>l;csF}h8z>YL?NtfJOn*XB*Q+x#QY;4j*TS^A&o;%7?ovpb;U#sT%zt*R-t7%g3G|0VQoYlAfUJK~|SISn{Hy02L%v2&|&=9h&aI-*QppLID*EO2> zzmWeLn`>UT2foLL+Pa_wZSUwrBH7P1?-fKLoL(-8M^<=<6U2{SONH&Zra*K)k;>@p z4G4JxwciVD8K+*W??wuKz)5CFnH$IVy=e{8Ph%c0U3yJ|#SkSsWCds(`k>dSq8dfY zx%VHTwgu9szEvp(rq5KEb4qR_+tQN5nldh07|Am9z0q^LD;bV1n3AHhk$w7@`fOhB zaJ{}9G|fmS-6C8_x;tyZ)_5SHLT?2wOM0z1dspo^PP@?)En3uBd0%Tv^Ae!KpL z67&7^!&J%WT{RJzi%VGJVBGi`-|`9=WUZF#E^hH$XM2*P0HwC5&!tu!9{9MTarmK< za(VEKXo@Ilb+)^QajT(3_{)77i`-_6qs*|n-{?*>SPpNrWW8~_v#lOrCwbYhNphR2 zKVZfVv#{K;-<;O-+Qj70X`8;(8I+Whr3;)(N-rFCvYMz{lMZ^3e#*FSFC5C{s8S;7 zf0H=ThO^aAj=kPwae@6ImZw@xh&nQ^ny4&3SBr!ZbrlLo-!-NZD2x4}yA0CkkyxNb zBrvnD3LD)b`M^1XvADb2>zQwA6eVD)Ua4JuST;yuFToFTbXE9dEt9n0SLW&Fy z-LnHUCwgy~GpsW$3B;LkADW?~1xr>WdrghUOe58}h%f32LB%ow z_35@Bo))U_ghxUs()-hoHI-$wJ1uGg#h*_fg%X}oEXD9cL8t4%y>qf_PLpW>z+F#% zd9&n;^`@%@s9D*`yVAPtrf7R_FzImMglm^`@`Ks?dcK|0KmG0OvBsxhK_U(oZ9_GP zs|{rQ!fCZNX%Q|ENe(NDbZf^KZW?@UUr#^NEreDc&)(Ic5lf|cal?oDVp9ueZ{p*E zE=W{TKh!qdSWbi&qw89p1)>onGb+cG+HG(x#rdp$l@D4`0SqzaQAxrN-Xn~;VbzLu zXaFkf<;_*Vk?IZRah9S=ti9_t#A1u1M$KIgw=GT(KMPuI7PZ{W6!|JrOO7Jwp$!a7 zZDAj!|7yn29{Q+WImVF$+rf3VrJMW_I!yOqajrj~ekNO<|E`Z( zsI}FShoOm=XUPn2PajPkII*$bs5Lbw$nK0C;oaFcq3qI2cC9-BP>g;@HBxg)ejt(C zn85xO36cpp;mg=K79E$=+V8jq4*%a$@D;=wqy2BII5ewAKp2lxoVO)Y3MwNrE?&p

Dff{wZq(Gy zYWkp_6=)*O#<3G8-q2cXG^;V-7po2JMRV#$xLIqb;XqjQodl$7y8%Fy{~d4myqO0# z97KSo@&NheIv(z+3pI~y!Q`u5lrmf0uY~e9D8dwK{p{}FIIAG>(I2Ouft>nadOw6J z_37rUoW8eq4GE6r+;I(3OCee|e!b)FBDzS_N;l%Nw6aOM zGQ|umqX97!MN^7wjzF?4RTWdI1tndE=MW#<{QcA)vF;J zs)XvO5T&cpmW-`hOX0+oQ<~@VOIj4b7y5xwhZut_L~~I)TE}uoA~WiBcibUG4T_{O zZyfeBCXBVTt3r~puhuK1PpDs-laT) zZYJXDz`Ue&n%mM3CDWh(0kRL!AR4D7ws?c$K`yV4Wa$_OxAR{S(d@dbt28Ds7!!8@ z@*%#xiqbF9PyjEYd#rKagbhGB(btw|h(g2TE6;@4b(bXNj02lk*2lA~6!E2CHOEOZ zs>?BAR8@ab=SXxEKcQ#p7pi4q6j$GCc1Cg<#crkA(-5#@n?GtQSE?=;Hr6*J`YMB4 z_&Bim0`UpXPO}CjA15xhWk|3*1|x}T0s2&sAk!x;xg(24n9fl zieEp{4o+~C1RaE^(*P>>ldra@L1?st)qK|aChd{gxlP=z_Am@HUE}7 zcB|N#YBgT$Ey;}i?tULDKnsln?9d8gY2*$6}6BZ`L|#)1Ieogbo_Ak#q=FDh~&g3 zfS-m|Qs=;pKPza|%reMQE~y32)Ij3FC^>Gdt~&cSJHu9}-oYYifdy_#WBIM`H{r~|5{$my%gyR73OwVON1Pe z)=*6aC7P;@>AM-wGfL{IW5l|52EL)`Q`hsx46?YN!1?M!4fwnsL>;}o93@bs>Sa1U zpyYv9DO(-fW^H-y5RtuN%Vr*Eiva8&$)&2cMp?~=!`45<@cl(qgFjc%(2!KtFRZPs zmt3l>^;{t$h(HRw#u2ZMSCO@>BI~xBertoQ&gc&Kp~##$bW|~C2bN#RtPVj^3(mxv zSwGkzm8@X>*JRfyNEngoGLVPm_4-9^g%_TCP7U0^#FMDoT+;3BRE1Q3uX90 znh93;>6N=TGZVoohCMbf&riQs*RS>Zl8foZc$hPn7&`gwpap*n%a^U(3aASwP6$fQP+^NHz2#yODlapkXO8D z2)VD`369q3+q(LVJE!zg&TCh@63W0=jwx{F-|!=_pioUdL4{mUpYvRt`$05bb_HwH zcEhOD+zh6*EysCSux$E==(@mWYA%&{HKJfhifXfQ_0OwvLLgM|zG#JvEpt~(i^NLv zyaPz}Ktm<32D82A9#{(#SwWqU6NC+{TC{6m;DKn_Y(o6uuKIueJXpP}ZP)0C8)eGRexp*H!1zDRH|0#*!4 z>!^9QzJ5WQgR>MiPo!3K{lcz$#?Uj-@Aal@``OfzYuF+^B!+ZdK;-@f-^A)t^!G;ZU_5a};haNU$P#joIk)tEg#y$J zMjpfK)rbar*gnBEh%mZBaOSTry`X);fpl;BjdsD!ma81LaS8kCI)aSVYE^Ay_gIYo zbA2y7!ZBKD$A*ugLgu!ndif=PIlI^=udsuuCa1sztnb^!44)DU}s zwIMY|>5FU7exQ11r%rkeK6jjHK<)m7i)$DYx6bJF9R(6kQG;^+RTxB$dl zc1#r%tNN8A^Rw{AKK+6_7k3U_o25pf`qS&9pX2`2fMI-Wx)d?*5BiULr0^M9{Q!+8 z%AA>A{5$n@`60Dhu(i~~r(e{ky7OhQbeC%X&D{7$l`SR42`RK{7=nMT0lBN<)9kq( zYk|2f zpPBxkUY%`4lY0GY<{b@jUtV8btu^BUgH(bB%wrV*J)CCHJ4<^kIH?AMszqJMT!2Ib z2(gi{pdK8*bmc_6rnE=ZJaGE1Xu?E|Oi~x&WV(7%Lq6DZ_UU%lL3z0Hd?{Ki{JOUG z67m_;eYfjd8!M{uO3YfFu#RWsU%nLY1SGHi^i9X5O@7=M_AJjxOZ6ec>4J3mYW8jl zzkHz{vj;Zf;V)hA&9a6Gl1KW|(Z`HiMzsbQf%S+L@rBk$^MH&5cZxmv``}OE?y=kU zm>_!|*yL#OU^1mg^OF^|ANFd!Y}5y?1wpUI$Z}^Xhoz37A2>S!3V`miW5ujyH4)Ft z<5?C_I7;k7vheH^4ab)4ecjjF*d+p~u>@&YR5MFQr{2Z-8r%dDsQF><6|NdB5(@F? zd2Bp#we>1sj5|BA@bHa8`o{*4xNX`wSRdR>W!?0&jfieuAjqvJV=fRej3<}2eK{_i zIrhAMz>XgK!1I19C90v_npiA8Lv?`&zyp(6;XMZ=j^fSG9r3$=S zJ*Udq7iedn&m72TFg@)z3cOgEaCN+~J8h;T#D#B$gIJ zN4-i^8zI(kkF{O&mkAxm>z5`6IrS{~vZJ(H0C_FMhA&Agg*C*vsnJD7z(9bfr1Re@ zqI0*=to+O1%320x*&n!f9T;tGq#;@H9om(M$9<7z)ptJ?&GMeTFp+CKpmEcs;7Ofu~v<@5_(lFC)G_t^*pELD2 zRc5cHMqIe1Pp|rEza4QHqY>8IsLJDj5HpC;Gx}_1AC4Zy8)J=fv>2NJXP7U!q+A6K zYPA%pUOP8EfPvrA3rv7+@G6#811gQ5+5@QFWz+ih1*$1(ZKr&fY;)bEE}CZ~0MkWZ za-K&LcD|dvk6M0jc3-xH!R4*K;XIxed-zBi&`sj>kkPpN3J|Ene4(9d9VTq9ADoEr z`#=3Ibdn*0`45!Da^Lk}?}j>HN)&3LdP@4f`$IN?64omE1=x8OPC>wZm7fWCdHggX zS;KX>#C6ghdxUL(nmG21r{tt!H)nDDfn~f*>>5*uUXI6UQ)EHvjfQuFY_s~Pk2!R;M^gisNbITAXL zu{dn`CVTGmX-{A>3xTqr!3Izs4$Wv$b60Alg_r!S!Tt8!c^B$`Zeaaix?f$#IoEB2 zjEem;A~B+!pG2q}UxT{Fg$#EP(;dJ@N$tSMKTioq-J5twHFhWq5c6j1u|08*ORM8@+Y(HyMj5SfHHnOl zAQA(TUezYEKu&<342)>b2rYjYQ(Vqrmokryl$fq=15vcvaKZUP@$P4S;jseZYJCs- z*_#qrH)ep8pl-snsWpuKd$Vt1%Nt|7@oi_X{cLo;&uie@)O6al!#P!U=Q5VVE3 z7b9uv>cX!zD~4e57-wzWrKFLZOdm!+=LVRb&yKV5Tpr;ykK$YSyuX@$n*QsD#UdqN zs--o*5Mea?(M1xh1$yEV0XD!D>e_}|*_k=rssxChF*O~g) zP3ygv^l@|S_jkq)m#ssCo5+JS1{@!*OaLiyNsM#lA7>BMxrCnE%hP)0tJdpkb@vP> zPv#^3`$-HhL;dOOOCL`2`o6`=fT~juSPmjvc3*XMLx#@3B!R8PLs?WBSISUd;nEc!oY3y{a-NSEUe%H)tmTHlsERS_C z{Dq<2|C*7~;=;Vnn`Wt2W+=q?tQypM>_7%cC@c)rYWL929=obu_9W0M0jz-sz*3N+ z8-ZpcvB)f^da|E0sb-}r9OLxsU!Qnhd(}>@ncMP~$r4R068f~Lj{n9}5rI*ngIfVz z?a5!C(5DqgAng+u)J`>UO+)YOv<@iQrd>mT(Ty4Mt}-8HZ;(L&FyD6v9SiCB%C{Vh zN1#T(o4qZyjjdLwqA8C-uULgdM%{p8NbO({i300~;|ssiN1UI!rcQ!X%g;5`j$dkq zad^REpN;Xd>G$fe#Z+9HmiGO;t?jr*cRgGoU*L8A zYW9Vu`_OgO*1pHq@Lr=tXb2PU*Mm5{Q{eT097$b3aFhb5y1?9-<_jd-Q|%qK+(0o* z2(nefObNAKwyD5tXCV^o37oNk-K9uL9+p&vYESB6Jy;lyF^eoTme+ez@9p!))bB+< z92ybWZM3{$J5UvcfjS=eT# zIGzP`cu|O11By%75BeEZr?I?YcA>tQ*5$u!?Xi`86+px?`y9FP<4?cS z)#a7-@;QA;94Pw2T3y{x5072Ba#HKlyvJyOxk^%ps)L*@AL0vYW)z65fs6E+;Td{|u7u+<}$#wW_9OwMY0gDaq; zr$dhl81l{~p`dXFkh`nrBnQ4{#+wGSg2C+_IhbU&%Fm6y!GTdENc<6S-KJuv+D>?_ z`rvWO-OB`ZsQH<|vf8qn|FOwllPtg-8WddsR#C5DM&>GFgt zXA5X$aEO}%r@LQ!@tEdduoX3omul6D?x^-VF#BX^#$9|B#%!w_Pdxv^X?>>L<~4N` z{jar6_`}D6JR;(4l6;&r1s0{10UaBpEm|RNpGs8cWLmv7{ZMTt_eCE^!wlZ&aaxT# z=^2E7^^H2oxFt19c=D4dh_E$M2IM1?srFmo{lJj-dK+9$njn~gd(PKbSmp*8R^LqH z165{bs3sz%YvT7*1F-vv)*BWq{({M5EDhdd`~q98gRq8pF1asWPQJey7ahYLh zK=t$~)u+xU!`At124hz-2qC@_og({NcVO{CDy?rRX~28`k|p_QRJKR1R6;D{yNs-O z&&$CX*K;+Evo{k94ZMuurCYDBRMeU(sq^9 z*RIBDFk(dFgQ4X!BH196dj~j0V-HnJ`V4Bsm#itB5+!@mitewT-2S^>|lNSPNglxULVXJ z{q&|@vszn}6q(mJ)`M96r48N63Qi)86T^>N!_>JEN*An9Xl8IwMC_2vB9@MxSX!x- z+(%&=nwfZNDpC*LK3$5TaLP6MT!=S`kx-iT-&V?VAO?dR#zE;)mJi>i8h% zzq6ziU5Ei>6;j^S{35P>kK*(#)z{Qybhz)QU+KCF2nsE%9rxa|HogCJvFo9uZl{}Z z6#q#}Ta$wJj1RAkop&7lTK2h-j2%w-M2sK6C@ z$1Srj7|NbH?Uc>q1cQ%hYexam{lhKJP(Pxt4R&3@;TYs#$$*hZ+dU;QjFqb zwT&3E_rLaCUq)>9jC z_=71@;t)`rh=j**&fP-aZUNtvzku zb7eJPGTO}*vjzgoX-e*#IG*`|9?U+Rey!DNTKWxBfi=W`b!Wl(f~#H)+>itZnclZ* z7;N1_QJi1-Ae*A^%;M+Qbj|S-FTSX2W_Rt33IT!aH~lFvYQ~(kSSp`Wrx6225us-R zbZ}1q%Cd!lGk13xO%eXywk0M-*sd(xPcb+eNseFLy5w*pCOjYDVM_| zVupd7eW5Md8fMWJ;AR?EPW%ET8oHG&a!y2F--Q90247EuAzAIl4sB?_>eY?)dNUqv z_$$00p`@K{uCB}>{JMYP^qEt7Y$vjlw}*ai-ww_FnP*PuUDzk3Mng||@z}95`UUvP z2h*=LNQ(nhFMX-77k#zygs#VQz<3t9np)t*#^JgQmk@53FQZZr!GbTRZYM(R((Su< zsmP1hD{YgwvBEYkGBCfGeyFAsTy^{v>&yN6&AQ_+)(;AcOV4cFgn>@oz0tC$l9moi zK!H=wWdM0723v_KZcxpLTFjX?Mi`HCRaYEX3LW(PgPWmH&P&RrGfvm>HuO;dYcBtX z*RsuE(NlF3n%k%1HgvNi2vAf6W1~zy(&!4dbQiv?p(o(EGxa+S#qv?MIjSYDEm!q~ zgsWUT!lhwefoRgR$MqaE$i}jMxO(-tTZq+LEk6%f-z%lEI|TLzUV0fR{nhNfklE0D zzzIqa_6x12ZL+N3MewO!BvC@YSKAt(+ z4~vh?TVfEI5|R>oJg=DWc`%!l78hf!mIpYGC^DdNGp8hMFD~YrXKM|OVHuGP0K*&c z4#)N*-H0b=x65&-UlAK9VG@4pYn%EA7nBAkc_3pD4QkZR%U9nyaYrp(vSugzN@ ztpBBoLpnHTNQ>R|T|j-It^t2_BU|@&s92n}w>9eBTWD25&I&fVzs3nBF~9~Pv+78q z?CkEb-|A>h{b!ex3z^D6c*Ep85Ka*9Ke)P z7mqaZ`_#zKH6F}E-#qX4`|dkw?7DCFb@%Y&5sVdNt0S@pwyk!mNi;3vRMTGl!5KKO zEuapd?SJ;u?=+pqPF^}jO$mL)rvx_47d4j7p|24*La|FiD;#WX`GAu3PWOIq^>`qCy)P=LJ5C@aZaHHy$DI_+7Wq$`t$K)$4=-oQ4T-O%10_0Qo3*u8X5Cu zh29O@pz)sFJ#U3bH8(7-N7H-%{k(}JOPhxeh2jXaqAo^U5k*!bmRX5pX^5{N`>qPwmYPxbLX-b%eRSgB7 zYsPD=W9RI^E6DPb!TVvS*N<0DT!~g2g)%;)P&)_@NBp9Co?X?Dh~d<(*LZm1YwC7NUTpfxaioF&ziwpFUN;e=22yNAoy6JnI2M2EqMAYmg0l; z0LCJZYN6#bJ$htT>M~*2_N_{f9?U6eZs~~UpyB>Xoo2GXHy9;pXbvgb0Po3e*BWZ= z5G&G`1d`TkA-UAwVHnM$5L?tYG|{S`K~!};E^zADDGdOsz;%hcS3GFSq4%8YwL~}k zc%SP=qz1K|rNZU~+3L$Tal9~-{mjkrG!(;nCCHcTT28E#LzVj9rIg(p9#xdZKp`A8w9WzVkWs}h&!^ig`TCZz}5}uFM!9#uDtk^ z8rwt}P1?CG9^yx`Oaxtk_n}~U4HK+}a+&zvm;?2Z(-Aki7DVzm`xECh!l3N3PR)KO}Q??dI zQyu7pHl`uh*r$3w=MMRF)6acNSwQh^Hr9KY8U79%%oU$y>{Y#^6<#h%Bu;u6CFEN~ z50X}98W(YO)4QDHEz!Zhyy?dlK41gosf{R-1_jYt=wxi{>>hJ=L}JGrKZPb`DOaE8 zUat}*FSCsH3R$?>qyXhrr>%hHO+qN6!M3}COi#wzF_)tYBa_#uNRO5Uy?QY9_y`UF z3M>;EQ^VIN_7($O>Rg-Tc45OfT3yG&QIP6PhlD0S1|iB^>2aOi_Aq{CGjrl(a)QhN z&!T2pjZnes`=Xo|beVyQNiGIG`s&par!@M^q@}e=7#Q;AVBe5Q-fN5BeX3h&gs(^Z zTq#LbS3+NL;>7du;0DEMe1T#X>$=uqPIQ3F$kW$2bM1uGg?6z2^gF$}x>mlZeev>I zrU-BEp%`l9ReVf$U0h!GS!onljm(#cWa)ZVr=8CE8TQ`nGqr>u?QVo7N`yrR2Z=M+ zXTMRaRCTqTzAdNgA{HZQF?xLuQA;1S7UyA{d;ISgx@ZMQw_MIWN&-y0QYrV2$T`Lx~J%xHzA?D56+_q#UdhMV4UTqAuMp= zrnKXb;rMb4Hw_hG@MkQdp-P^vyDNwC=8;}Y_P9rg@Owh9EJ=6rDG#~ODhH;3if(1A z9}O63te331%|H%~FAL~!To@x{tjMosu0`- zI4!mEO}iMiC%zLnIjuH`e+OXFv6D*I6Mgt&GRGeS%?nV;vh5>FxW2hk)%;AI9gM(kvm^m z1r}G-Zwn{x2IBsi~ zwMP)iK{ywnp;OPC)KnXom6IWh(^3`A5bPSrC;uA(5zE|pOXIe2-OGg3oiyh4*d-Cy zU~sZh4p5SPqt1e0n1t4LaecF{Ur6EjdiHj7qV*&L-quFhno0l|NrR2Oi96;DYv!V1 zgRp4Tv+$q2q#JO<`q{HH82HUNSAySg7;b&zWU_%#JI8~U8tZG(&KF)-(3rx}3ZGVh zW2$SF^?IOS*`q%i7|Ql4|9+-r%t=QSgxX%{vswl#zPI)}TWUIR(t(bYCZS-f_?X2i z*49_o>v|~g@iFLu6_hJK*-!skk8iq%>;`g3UE@@upoQLs`_Tsqh@8tZ^d^`p)DC<* z2ORWS)}(pY3--Zx)OXvWc`=`X-8!KC_-al}NCo1{i?1 z*>w4jLiL}gz{J9)=_zZ{m5d9lA5CA|bZdPf12(su%T&A{`RYFN+zXis6*Kvj20Z!r zEmTOZoSsNAw~338wVJ0t-dB0slhey6%@z>*6bvcAK3VWE3t3|IeDjA$2`QR9Ja`3n!56m5!|IL10tB1yY4R`n zMEFlZ%Yb-z|25AE-0cMvw~R6@)_KV=FI|*+vR5>RzdGUC1?YFZ0M$32u@O9VZg3j` z%)z)8VN`f!XCKQcRumq@KrFny>IJbALMcXA6Y=5Y>RPoP5iod8)TmO$b!|M7pjuGw z)k6mD`_K=@gT-#a~?*q<;@!^u8 zt}TZ?q!A7@u)~-!YUpyrO}J7`98&tawde1MG=+@&a(4I1iIZB3q&hfJmX*u7(FH!D z_8Uacv0T_nMAg+BTTtv{eO;!?BAScZ%h$7@)J>^~6_^e+7lY(1>t=|rS4;Ja>O*U* z-RW*iuBTFQ51%BKJ^OFKb+IULar<3{(9JPQ1A{f{SHorncaW(pTN8sNMC&m4upc$L z+u=i62R~cSybNQ2lwqGhVB{{;!pGq1(;|+kO1?G>CE3y&mZJn&d%$kdguJ-0;M?Nz zQsJU>umk?KKXz%U=CxW5J!*}7Zy0@lq?`A5yeLY^0QdD&oHF`O zl6wY0HN&V-xUw>!tGL^1=v!*3)sMpPwG(EsK-$f7G5Li(7B(!5fvyH_I#T57e72 zb6m8c9oq6;F^b}uNP8!#0lH_Eog)xM_&MWk!9q1Kt|oXLA(+xKGzbKn7M_;K!qaS} za*Qg0rm3T^y2%BNNUR>?y~8!c!!`5$y=rD_XJ#0G+ z%gqMXr+UEQx{~ZWjiYI7M4BPJZcnz>EK1|HOFhPn_h% z+z;p71B%}L<@6m5P_kzq^WO*4$LdqLT-D1f6YYI8;kl1iUBvh;HOd@@S`C&F!|)g^ zU)S_39kj_#)nc;@a@;E}glBgvfNbIhP>I8>0Uq?|$?@Y)X)1^hWC4?;SUw~0oiEm5F6{r*=kYLVdncKtX+u!l-pbsBYACv8FZ@;YMxmWKgp7gD7n=boln}e%KI%90rb1HCI$( z)bwS3>DX1xO7f1)_mc!dgWAuB2mn`zP#F!=s^1IsA88#AS+;5x$>hwFMTvvNL`POJp{pn9<#(um}UUQ>wXx{p<=MXa$(wR+55p9z&< zWRNz7WQbz)deCaAS+6kp+zkp4V{l{9ZPADP0xYF;l|QS@ND@xwR(~{0reSi(Tt{_dG>fL<;E^RY5hh)zow7p9(h^% zffF|zES55B86{dg=9$AWP-G#-li__IXBDPUMRxl=gHCJY`5r6Ml2nLG+)M^c0rGyCz@>amxP2G;JHw9(k#oHltz zVgXe{9;gxP{TfL`x}l82Y!Lw+t+3!B9EAOGrRVUa3+wfBm0DfG%+5wgNleX_j5xon z=4?})L7#zLeeR#%oj!;~)o6K8%oaPe-^nqiZXZ9sp!=-+u+|&lCrb}6_FL>-iq^9u z+$)Fy0M6SjQ791Oy<6?_6(;LQV(8W53;Na6?OGo?IJL0U$qp)DZYixR zx8XM^8{p<3Bi7nJLJCQdIFxHwGZz+4>dCu3mz#)YbAP{F)<<~%LvW?zOm|={XIU7w(Siy3N|dV8+Rv6PIU-g9i&YSpG*(GS)UJNHtRJ?Zi=2$y%QZ+ugwZZs7AdH$ zu|rQ=Z)j)+BM*$z_tr!gt(8`5p6W2)+6fY-3#>GR11>N`GdkX6Vt<@W?5t|m^xjW@ zi*KDwdbRtMMF3_||0_@nhI>79Fo`jB|6rvpd-_~Tj6|_g(@>uyU_s)Z+h6AF zLS@r+ggpWXhKzeO`%=rqq*MTBkec2Y&fLKmvJ=g6gEemtjq$FOnx(~3qpEJ2_Qo`7 z#M|vHVF+=Hah#@8+x)Vo_UoplL7OS{&4u){wW()J_T3##9C5Ox#_*+!m~?H!DzC)p zkzQ0~C36VHUn{&h6(pdI(TVP@_TB{6?)Zxm+|-8C3mLo)aCXGy3>M@aLDICguRAPk zuR|DFV#!P){CG9*rg2--3{B8h>vrTU0we4g{a$FUQ~@O+DjbsNsb!As6ys4Vp>U1C zNjL0;pdf%m<_SQ{o<>SszUD|v_4Q)~Zn!O!pdFWy^QzN|HxY6|$_CQ1Y#7h%l zW9Cz*1B}1k2xj~8vG`IxU)7za^;MTPSJVcyMM|0AS@1OqIa1$!UfZ1df#Kqa4MfI4 zRJvfvduL%lJ#)vUdx{$;pNA8D;?;oY(_LdPyF;-%a3PnR3xxGzoq#DOauxV%gwx)h z98Z>Fp3vak1GmpZfgtJKXa{V=XcW-A9s%ZZl^wLh*BSuR*F$e@ZFOFC4OAI5f9e0# z+9~Z6`NJ$$&5x%wXeDrXVp(}vKSsqR6iA!unhw#cSD$gJp~cPGir!&I70@w8&mmE` zmOyyT;rv=PxXxF6lFoT0d2};fVG@7iHyWJ(j5FoW_&+t90^QjQ$^*~b@9>WX#e8gu z6gN6$EYayt8YhdefXUOW_V6B!Tyw$0h)=OM&a{4VZ-0Ou=Ci5S<-(Uk6iL0Em`0^9 ztl{5a-6#{L0vV-?O_#!5h7u;Za{RI@!Oz3IuRp)Ia7r&D zeC5Fa8wT@SQos3MgSTTD_%-yF(bZQk}#&mXF#GAA`Dz z{Bk9Ou+HAZk$J?$&%B^LVbN63#u_y`HP7Nh;HYB|{;}%wWNoJP`#9L!gKk1hAx*;+ zJgYZgWN_-cS7~iP5rtj#d>+^!)bBf@QIt^93azv(*U2X>CE0-^`Y=d?VY=eY(pC@s zpaK;0hscAiRsz5XT>B4>#N%NPrx`-V{HUHWmK?2x)`C>kJ6@`5lr3bX!6!1U5wpW& zdOtYxtte1Ps-(XMdnvw3t!Ye~8y?l?#50yZ4HY^*xW=(rVDYGWQtW({v7IZW4gD<2 zPgfbX3tdl*$BE;nV8V}BpitAskEVx6Fkv>d;p5K^%+eU2E?DST2X*kSXwX&Y9uAa} z3G|duVhbP94g4N)STDCGJKa|#cmAsq=jRgWU;kYlh1BDuuP5j{gKn|`v^?}Y6to1i z(~J_n^@aCv`UuF5732L-Jtn3_+#ZKv$q21tq~?)mkHndBX*~OymwI&x+Q_wt1)gih z!WpJqQGHK-7!qx@K*-GrYjeA|bzCE8dT;&pQ}4+jGfM?cg9_)HL_ec@Q9|_ z0;!mRR!@~fOEO^5Z6D628yX&X)uPjAB)F{vE;3eI(u`o`;!WWrr$Q7={N7$-fj|8| zS+wpaOR-Kp6)ft;I+g169fhGh;BfE&BOcoYd2CiMOGE)kT|3NOJixMLkisoO_PULqFU+(Xjvr6xycs16F zC537h7B>y~t2?QtvK=jP3TREo`H7~WwpP-|*2Iyou+|;ZctA6gU>zC)OPcd}J?Yf? z&2E^|9Y+Y=1W2VeQ5MB#$uyMA*dhTW7BGv)Tnl>t%~bX~b86sf-N}X!z8s5PsA~(y zb>rF2uxHuQ`sH|6irOBNZkV3K)9Z_6@q(Uz) zVsL|P&FTdzGp%26-_BxvU1PaoWxX{1^?_?^Py>uSVn7tpN@MjXydKG%!#zKz>acZF zSzW`7XJ_0{qPdwplyy_l`~VJ|D1ZO#lvN-K|`HaJ-6 zL>$Vf@0^wJ9A)Mxj{0w-w2cW$jTSfH!^=%oEAw$QDGK{@>;wOv1pEvB;BD~H7Gt1ENf|7=UHyS#*7JxQKb1AKan_?Trxs4Q$9}8j9_RLCOa0v|o~4;&mfwmVp!Rx%lCr!0 zSRfjnOY@5Kh z5(x{hJ*y|MA+`sum1EuxQQ$Ntb|{-WQWw*z-NVcWkFto?H+D%?+Lp{Z=Mn2Czt5LZ z5o&l<4)Kd>~5&}?QyR{Qwvy6mh>I9QF6_u?q(BRU{>3vH)AC}f+48;(>p&{TTEZK;7U z5gud}46XQ&Y0w&kdQk#U?^=YH&+7gx4&0Wj_DEXQi&n}G2D*%cQGZ5U=;Piv)5(UT z?xf=qk%PRut+BG{HC{+lj6$`L3jEdVo@Vo!%gn}vJYwGg%YnaGx1SZ^DCV}0&sA^dc{zHE z(;o})hj@PBaoHYIUsEe(Y=v0%%6#OSlh&kf$!jblQ@gP$)RFOL<4zV->OncJd3v?i0DQLr|G7r&b)9+taN3EBS$kd z2o2@xVwWfPcleHZBITLg$`zD1!I@m9BItjZW!X)J_q5YJT<&XC5{TE`5x8^BU2|}` z*91o2Hnnlo#XF+`Vh**yCTX?tvVA`FHrvlT2C3u?@^&q{tGQary6++ORP(=Am`STTyHz@?Eze=8qVD>LSOy;^A7QL7t75T`7#q&) zZ>Jw?o31-7kGh%?2F4@ z7#j+)W#mW@>Fl&>kyEGPL1Qb4Cg#Ee+3@qh{1#dlsN#!VoWZ(7_23XUs~=7H%b30? zUr`@_g)nhS7S@`9{$|k$X6p8cL|1^xVn-lm2`C2tMFfuBWk{q{usSiI$+VUy8myfY zy@#C}dImAFrUn325%0$$MCje|?Hpv`cfU2g|K!(_ne;Fv#&x4$Tyfd+Px-}$dVZvL z1s%(TUhNM#@*~Bq*R$Eh^OjJll89g&JB#8 z^aE3bhGQp|p*TD-tl9F7TSQ`AFg^aK>F;!h25l5*#~O^)_9BXwppT1J6vfpam_J$^ z5Gl}CI9V#2KFNDn9G{zqElV=GKYe%ho*r@zn1pz3)4R#Q$G~2-l*5)gGoC)>p#QCx z-6Vuo*O21D8U|lXWgT?uU*6QsC>N$@n?F~-LH@2<6Tgh9IZzEU@2F3!XR6E1+cgiu zxXmVaUHY%D(mzq)=bvZyMIJWvLyzj+K=Bjt@mQgoxuuR0?_DUc4H^!OnY2>LTHS90 zjGgDk6vcD&Xlid*nGAmdSDL0N`~XMN&$E5>%M*(?ZxT;9CF<2KiewYYYw0!amtRVD zU@KT1je>yUSx!i5E3}7=tO^QdT-1~s>}JJ}LE?SKrhqrJ;nXLglT7ePj47#E}GcZz$1l%l@%8B>5?AXEC<(Lq$ z*{#Iyr<(W$@A<6D(OazJg;tG7dfUygI+^zLO!YlO%{ltQ8<6%Z{k#hxp60!p^VJ&~ zR&v!6<#i^+g2qu%97HY{;pBlAE;L6?)Kal`rc`NUvs;VL`cclt{eE^k)_957MX!A{ z#&x0XbkNg)3V4MGnpix!|lWkxV{|4g=8zo46%R@t$jFRKr=jX`KE|t8R;ID zg=!o=&|0hVyq8>|^5?MCjXw3(3eW{4x=77ky~%5XMW~_ILHzdGOC43zB;CgnpMM^4 z(SWT_qcu=ZNd+44Qhh@M9&}MTL^aDkNYErv4U8SkqIaV`EMhOTWU0DHzo_NZ=ol0n z6bX|~V(2an5ntCN6ph)f7k>GoU)$?dv5|%z#Voly`>^470~%*VsucW`#c6N>nzufl z&T3l~|EeoCY~-$oDe7Bwv4xcu^b~lF$J7@NlIYrjAMRmP0tI0B{q#p&Ok9BN1x!*@ zJHP7*JYW+j8+P1UU>a%uF!GBE*@ef^-uO?;>c-r{HaDNs`)<$1;4Q*x3g&WmKNZ#*xv>S<`JP9pX zu|vYne0je|@Qk0ubP-#QSF|!mIYI^ReLt8e%NBN%v$E~}Mh*eQp^0{h$ED3m$yEeK zo5^T$@x}UYym4CRFg9F|$XvWVHzACv$s`{TejtsVwx?`L-DY&P8T7;P0Sq%wjo?11 zlC>Cslfh$Bfb%@Gdy3wxPx~=R9PYPSqCo;_{_Ida{Z2OgeE^;gC$WwkBE<>1(mbzc zBR|}{Y&R#asiX>MxkX#*4fY(|N;G6_2C}A)v+Th zE458@2Es&s4+HsrA_AKfv4j8t5eU{>2?tiR@SVi7FqX_B^^qIkcvua$r!^4#o(&0{ zM3Y{8@yy#=tICmc_RGG2SL^QP<_zETAyWP=T8m=QzMO4KP9Wm<=$lcU;!?xCV>odmSrfpjGy8TmQ0CT5C@(>j_ z`TG3&YRThqjbYCV0-~uMtKYmoY9D10W85-@6Is-%b{!i=Al!GCxiefrHaOWZtM|6t za4QI8>XHaS6l7r4>&2F5&AykA+4L}Tp$=;08V5HVB5X-anY6y@{vsMWmp7{$S^WxQ0aq4saJs;iRsrnzBH zR=jl_N5s9^_nPUN>rK{-MZ_WE^8khX!?;0v$63fwh9$P3h+@gqVLH(eqX=Nw0jv8N z7hb|b%jQPNQFosla5(8JLQl76pXuHNNwYnP+KrQebw;s2~bE_4`?4z=3nHZATr_Ah?fx(qO{JzuCy+;4L(ZRU|3j0EBOl- z)x!?kQ#Xc<_OtUH2Z{zj*rB$5R__X}sUVQtt|1{Un=#>87^e03%Hjn{mw6>cx zvSecza#fF8-gbMB5aA1_^!eeykuuY-vWB!DL^a?9t@?W{gqhhMg#`9DcJE_~Po!i# z5ftC)i1I)#>?su1_grPH2Q(Jj%VMalVWaR6`z>peacyTa$HZLlxMeaeMO%|lS^EM5 zi+-XhUg@}@W=Q|(9SyHuTiK#_WJOcM?hTCjgi85ZQnU%DEgB$Re)NgRr{A0xd9YxD z=CWelRalZnmJCvqslp!9$aV+r>>=V5W(K7mqLe|kT+%{UH%f)M=Qcx8J@hlU{-I`- z%z1h#Xmh<((=@2$;0}xS&W=PVCq}ZA0n2?jb?7kU1-c^oq6OWbeXkqUS8H4Prep=6 ze3O}%2v1YICyau@;_(fu)?RC79lr3%Hhg(nctD08&8b?Nhb4Y%eNe1RTjFt;<$$oH zA!9|(*DfzOVG0yWYGEnrfmOD3I}G*b-t_(HyG;A1=}-D07-{blq5Fc|iy=I{_{?e9 zRsS;b@{e2*w=0)iN3_`w+TW1mUiGKFFvA( zhfIb@1DmIx{if}AUE~ z?hnKaC}l*oNx^+{zL=2<{@>@74wVa)SIDPc)?&cXrPXXP{w^xOldkEkZ(e}Rro}dh zEYPAzM8|MdSsLKpjB6JN^2?>kH3o8yay@w{|98%=J|_t9s;ih_?ypEbBM;nG2Y? zZzl83x{<#`vx;yZC&3kWsr%jWU1yP*aDX$Eo<&Br{ByHU!(UF-g|)_3NZN(^N`t~M zte5Ce?ty_sa7#3&BNFM%@uzKi`pv)A)vE}XF^Qjg&&qX%6y?^~2{t;=e(rIaFPnzO zVe~LkYG^h^6YFJpfWfOJ{TeT92j4a6n;Ju_<7NCYjj50mG)QZ26kNFy#Yj$JcfcI^-? z%q7gLb&c4N3C{9&%8Z`QZg&PFi!?)2L!@Y5MKur!$3 zrEG?`reEqExdi=PN1iP%W+$LE(T&UYagykr3I2VL7tBK^HIl3j${^nY0)UfVJ7X!? zu34exF@D?+M1oYrS$DVKerP+K=Ky4VAlSzrOh4AbpIlg2&^^rY;UE=GY)ah)=T#VqKA*A|4NIGX5S!JI&Nk*L8A)TJ1URsc_zQkk5{Yv?%T%Bn%0%OKda7?@8tVlfWF z3^=*)lox{U4t87*A|!S5ra!=P{Cr_Isa>OQMleL?H+YZe)NM_$kRfqP!!5fi%xwDs zCdNx)6>;RM!QW)5^WcI=&~CZh#Uw@7Yj>{Kc0^=P`~?|>RJ}k-s;VEeHXE&%ss5=tqa~wHzm;%e69S^Z z146Y8Z4-rQn&b}2<(0QZhYNb}!vulpU;IO~y$4;~FN83sMX!R{;-Si#tY<*0R zq)t{_Dj7KD4+#X->PBgK8LmCf%JUC(%+&_&7ex1VCn-nAT0J}Q%oAvZwlu4i-XyQI z!pK6iMy&yIkF}vy!S#H->v9dr^AEEhMKa_ek!S(0>le)aeYLNu+ND2}x{eY$$ z2|9v7@cqmS$Y1DiUb*k^RjfqK^>E)Y+M!&_TqfvOXkUbt z#Uv)oDYj&OE04!t{?)HMxI~1&Q@|~qo1a_MNc4dJ%ARM>c}cXy8jfoD!Aqx5vk1`Q zIh~OLLVxzNKYy+R^IzzlS{!K-lMKP}EtH{l?TF4+t8F{ zitOQ0jhSPsw@B#{_tb`$_OL%v4zKQA>P0`*cFX051#rf-)mq0Nb8r3Jj)(DLR*u?5S6F-I0&9?iWvx9{}pGKk5@ zK1$e3dNS#Fsk`SC*i(O8q3)yOL}8)HJZr<*9OGYEu}Sw66{_nOV-*iD0^N!>og?$* zSP*qMwt6ZWpjE1FyQx~oW)l4nC-3X^?2Fmw@e7)I?kTjxfd2mHilKg0-s^hgE(xa$ zW^GKKA>H8iM~X$ zbLPbp^((CvXA#{5dQBnND8ujpK6z&t zRIS^y4|LsmGM=^3K+**^64gJ-h@kBID)IaKv-?3^x_0d0XVN6C@1+^ydH_*4*JzXX z60Jvv-a75mFX0Pab5|`dM5mWCF$_PEpya)3ZavIywfckMrIzP!V zhz#p<;JGx|b6Fg?-N?LU6)QMMab+z*X?08EFm)v-#}84(Ci(wdQ8V}6^h-h_Fp=od z#b6Pwa7C5RRm$FihB=oqG&S0MA9_VcbxD#9dfO(Hz96-2H$=1HzEqK4_$_94xLQ1M9_Jd7spJt_pbO; zZBIHUV<_C7ej8hwg&L3s5k)5n7PW}`>d8P0wag?_f}~|l#8w=gN4pajG7R~L>F1hj zLIY{g2|?#np)S{Et-7o)twG21WBu-BYkQ|h&Y_Nh3A9*FQe$Hb$=D2fEU$^c><|1( zdHJ@@&TjITnL_GMw^-3n{*JONPkyV(pM3L_B$7#hA`YfUk<{dyZ@Aj`_!s*luxvDp zIF?kMK&N_GZMXtnOv9zD(~xu1&-i-HJ!?yoky~B_+?|jF^9Oq`x%!C15Gb(qcnSJ7 zcPX5j%Wz=#MGbvD*IdWbOd(o?MX>I5Sij~uD&!-fF0i$61c^eFbCW8Q&M@j-;t}(> zB%gi5-!EgeERyROcTB64@v>Iblow?QWR`SSRFode$NB(MgJ0C)Y|D40rZOR894(O{V(S_pEQ8 zZiF|dP1xF{p-E%9UWPsL%jXx)gg`~@LM2E>l26duJHL;U?Y-F-!GK7)+eo>OM0-5^ zq8Qmp?pbBMk>!3XgMPh-)AYmX$C|il*g$oC^pt>Dct-2}(wgqS({kMx&wJ@y2#wNU zU@pdRMdD8Z6W}+bJmizV(L3{MZ&vjVUi*A@FD6CX4}xI1hHMRP$(z;>*m(3Sjdv8p z@W>(zAH}l~>SYNLURND1k_tVB@6sYR`>uT$(srJg=Am7K#Dl}qdRm8Cay!3_o3|QG zX>cflg+FrsU(tA;8zNd#3?_bFuDkmH4V@_B#9>>eT@MEnKY>Vb0nujv&OQjDSq)zE zy2K}?0zc=Pf6B_v42JdS&4vg)+BYz7(6q_Uj(d^HoVZb+n6gHs8^NMaHamqkr{Nj~ z?ko@o?nhbepgg(#eHMh2IQ8^(xBimkIxMN2rE0z{fA;=q-^Lv&EbkwWA#P$NBs%|dgXeHSjqtqIc& zP(qY3AIF^eow#NPr!1XL+xN{aYqQ?nIWip!AA_znt(O8FCU1WydCOyPiBP0UYhU0| zOtaDq4B?>>a5>PYU8s|lVlP`;TYT{T95O+tcX0*Ky(E_77QK zAU3nzZ&j3bv^<61)L?-krd4Qk*0bu3p>P4untrP5&Tf_*l!xbYGTucM&9H(OaxLHk z)Rd1>k&thLGO6~MC$OBQL}==hqs_nUXU7_HHak|D;BT~(Q^BVQi?vDk@m;#TUfI&q zUY`GvJh>K~Z%gLf)^+GG04u}iT-w?ymCuPDR6wLof|&O(EvA*NAN@yp^OR}%Cj`E< z-qs*Cb@upQ=Qk@t$|<%&7ve znNB`uE8bPJ-QCd)+qi`fJRX1oNc5}AnFHcZ3s2g1CCEj^dF;t}Uld%_`!$3?cM!$V z>pgdQ)T44S~W(!@TlQp|EJ-&em#TkPFq223TEsCKERIx#!#_+7-J}5(T_0 zrP7LnGPFB3rsf(KViZ!7g#+9%=t(}`%ZvEaqiMTRcUu^EbGxabA^&1;QBc!$aE`J0 z2|D|GO%AD}-%%$o99?6o+xay&yz?Sriu!zpipayn*g7LM&odKp_O(TvW}hZ=^2hXv ztH;P+3(Q5H*9#}3UF0S3K3eo^U{Wa8<-fW zyPkg!Q)}hfd@#4ZncceJmDaqlP1xR zo*z5fPE@AXW(M+(<1IPGLbjkuZ@VjK6Gm_|1(#D?c4%RMn1r2Z`f?M?1cgs?GL2FD z-Kx(MHjNYfEEXCi2z0Hr>dv~)~mH;-S;ff9lhS# ztW>msP4`mHzJKx?ar!7yH=6|992A02?^#WPc+Np8ZmY+mwLOY%1S_bsS%h!(lAhjV z|LZkp{HGEg2HD%rdX;ceCxDo~x2!nU*B$YjW-j`(@8cHoHaJTp$m$WCukB5p5r4wj z@N>WQAAYarTS{Aa;nq@j4KbqS?Q|P{0#E>xQ_g(@OF6E*+1Pb~PUb-3Qmsj~UJ@x< zUE*8ZLyLIME32$qQE2_5F}|Uf!+vJS)Q@QkR2-^>p*3jasL5+t6lS?|J?8%lPs~ecZQYjJ0;b?(e|6hSdzqhD z{%(m7PymH@)|CD+P$YcZ9 zCjo(tgGB?M>H|w5OJD&l82SQTe|0Nr)_5zS*{zp0^|Mt1j|oAjQJ7I{e*QdUx6{w! zs~anxD`;(L_6Kp)nBUQQD7zXpE}Ecie2-Ae$bz`;)5V_H8}2JmU&jaskWkdxNyn)Oi?#tmz!~Xc$4SnHwAiyQ z#D2u_=t(rm2^sqr9lBoG)U{?!ap;SUsP3Vc^*C%NJsoxS^P>aK;sPRmVE0^2CzTBUMr(~6`@2%4(_DdZT_Q)9cI-2&Lgbzv=2Y^-B&;=l; zcE@|!2thBTJb1riqcgO=2~O#cP*$>{fRBH}`&;&}NLzLUY`H}FaPQD`p(a}WJkZ^5 z`O{3A18vsQAyy8%txihb)mef$kRlMAr4~^?R^q?Np3c)Bu{y3r!s&<|HYVdkC4(v{ zDJ*rAHS4zwut}uFevCq0R({0=6;S$Z&UR4*6%%Q$cL4J?s~2ThIOxl&&BrCw`|(l= z<}@OAkAQK6-2f7PH+|Q06FBz1l?1`F?IPJ~C7YqF#YE|_sDx&KCDipn6Gp@^h-I|M z!~5C;FKv4?2I{^KRV}9Tjo1diC8~;Lx@%n!NIyNHf7O(_w|Y9_xmsFvul`CZT_mPB zcP(a*moBr`>MQ!@nG+`#pE;>_1YbFd^bSwoaN-?p&844u!I%@&PeT@Z7oW5ggG%9r z=pW2uf=4>5Ogv*vnvMH|ZQ|Sw2`Tk7IwLlWGN1E1gurL;JE!TzRYF z32Wxlb5HR_gzm9;3ouO-BayqZlKBNkea}fW823fCYwoT>XEG@fQXk~+_p?u&GKM(1 zmvBef%vWk2#y31L&FHf5MFBUrh_m11E-KgGAZQ;&tvv*Mby;UiLPX+bO)7>6V1?1x zDtddZD240yyiZQeH!R%eGuMP({0ni#nP2MF<#)v6*Bkqp)5anm2Ky6(p^AL47ZJ8w zcoIO}TS)<3agkOKwN2gk<@2KUK>(Zy5ox#_vvzx}Z;B6?56Asyi#NS<$CXA?H@h&m|WOz=~HTJ%7!U2kND@ zAZ~rzJN#femr^`WOi@`sOO+g5``mNSEj**IEHge5cxSgxpFXXx3JcI#xp3`Taf|n~ zK#T9Zrk8u|2&9^Y#9!TKYY(@$V+5)5g^!XLD5(3`S$m!96n<_|e9Ns>I$IiViUM!G zE;`~$q-BtAIrCbO2`)+!!Y&s*k+djpys_wp1s;T@TD_u+&E-76zzpRwf!eY%Xkgx{ z^pLVnoDx6>@GM#B?+-Qo@PieE$ac!coA(C7aJsLZX$F3*$3*I?Y=i_w8;ghYRB{q1b@CL{SZEf`X z+Gcg@75y0Yrd)|lPIE~oqt}69(7?T|uDE}1DAF%9wzblxUXs!qq^?iUW&^vu&m$b9 zY1MD|zcE52UZ5joO?f#!EOP?4_WPphd8evHggx&HJYX|b8_^ohZk;^i@Yj=z3(rVT zoA2bjIwH&8b>AalRr7p{({Xsa0=a`<()BQU1B5Tu>|5Xj`qA0ppyjL$M1A%NzWE=c zOGKkb9)?3bc3j2c0RB`HKrxFwRy{~%@kC=l)igvDuO~+qA!|t(!G-A z;Ot?c3vwrYLCs#fuXku2>#MKmc5?l@zv&b;8#I{Z(1t2ln7YHoN$+Zux$KTCzJ$J^ z=~=+b)fxo+ z*2B&HkQrjALRM@&DIt2`a;;pi*7ff3hgMxlKW8^YqB{JDmAEr;!UokhORE?34cIvX>zlg6hBdGD#l+EB z;H>pEu7Pj!xCX6TNXFCfH-L}3{YH@PBmE371F%{>l8M==r(d;(MB}ypUR?OaYq}1Y zy&X21{J3$Qg2nq;NSj_J=V}fJs2_T;?eRZOy{Ogf)>8{_i>8JKr1uEgQRZ|aW3=EP z$y88ZmfXg%s%PVuxNDxDOf-Ch|L5+MKH9owef~$2o&XVwLPLoJ#UUOSAijJT*YUcA zYqvRa&)o39*R9j`ynHjgqs$Bicpa*h`gS4XG#l34%9Jo0QJlJ}Pn{7j9CZAE8>|z? z(C3q`XK+x?N%ABChpe}RE<1T5l75LsX>ODRZbebx@>*ZdDj8tFU*v7pYk7yw#w#urIR(^Csc$8eg<@>IIkJm2#rzOlTT1uy89Xz z2}S|d%sf{Qbdz2)10Ui;L?srDSq&PNy90Uoylz@Y%NWT3GBm@`TBUa)w^lo^2-#Wf zF3E^47BfTg01K*b5oGLj+Hw$9N0$2WhZ6TJ_^r{A#&3{3JqlwLuO99R2fyH%^)r~z z9Nvc5!>AsHp3|soErI)bKRaHTAs zc5T5Novuc(FV5;7>49IlG%u!iEIy9f_ulL?jg^B)FY#3BE*zD^s^dhk;@z=^l0%#$ z2##OVQbqKEdHd!U zdKJe$xx*UzQD_d7GB~Ea6KV}r9^cN`k7rb%s8l>b$F~%hytNQ@8v}xB0i5J{w#ZYg z5k`@@tej2u`kvHA=VC%Ynr*|PSS}H9!v2NV7O9f%i;W~MF-w;+jc(Q?CyGA+gIW|S zF;~ZXyn0q2V`eP6bx(|eawHnW<9knj8?9L$|Lv1she?e4gUGwC#1l^m9Sw<~J?diK z?iU#J4?qS!q6NW^*>Czg4)i0qNAM-}^>TJhg{>1r`GEu<{t-v7HQKlh^o6OfT3>Rv z_3^^x#b-o*DY;STyhf$qRWV}?t3Ig*y56dy;bP}xiV@MN*;>R>6G!wrLN)UgkD?m6 zv@FCw)hmu+=P5}kK`&|{R4g~u%R86K<;sTRR89Lu(f9L(*K`Noiar|z*HPSv_B$rv@w77sej`?l8GsVlLv6tqITp6n7Dg@ zEb>O6pQByNyWTilA`=WrAr9bR$IzhagD$ZMm&u1ir#)WAj*DyZf$6C(enytwp1v!# zpha8MnlZ+I@*B~mbg3TLNE)XR#3qt7dnoqZ+jV!>h~3<%Wc+ayzZ_r^xgEk{Jie2L0AO+PH$?4iB&f6af&Np+paSn$C#lA2;cjlEU76|5MdY;8mG^FQX{_*7ta|a zMmRnqOCoF6V5J^DV6Q)%eXYHIDdHVNqJA-|YXPgl*5owRJgpG+uzLe-A(Hvh(P?wGEdn4h@k*fb+FN{zXw3>kzp)^&r)kYrnp>+ye?-qvf{2_9+yhIR+{CBlua zxu4F0pW(=z{nS+i0rh?frnZKb0ftBYUD8de%kCK(p^G5EG^RbABQY~Op85Ne$Z3Gh z>qRBU*bI0|8mzc8O9=!hXbqQtR|xI6RH&@EL3QzEO}E{@ew4{~g_1ICS*&kjL^pB( z5p0Qf_Njd1f+EGlUR7?w_9wYW{HSIR9uxp z)*#F^_dyh{h7sL^R5G}qpSYa$9GpHzfDAanZe(giht52fQKP#)|IVN5<>?cSM7Y>> zpzuzln&0U9LM%S-rTkKH77d?_GtUNQhaI?CZo98Ssa=VeIXp0yV}ndaA9t$g|NRT6 zV)yiEvz(M9cK%<&(eA1v|aIKJHwPI$`ui)1%3= zBK6*ru$K8FzsZYHJerfg_`f!s)$>GVcu}Sm4?ZV|YVI*SDYme>kSN~KJc>nbxD@BW zL>M@iq*}D~j!RPZ3uQ;c=YgXIn?wNOWHOC%c$*jq$2{~nw8XBndE7&MG_mRBsaiMzI}7?dCe3;u0SR_8ekFp*M;rC7+JFyD($uJC+4LD1^9O@-+9WJYg60 zKu;NtJr+=1+g=lsfbBt(hZFPxnX1XvqTfe*1T!OJ zA_g(Lwbb239%{b$N6Ar&4OT}!a@ts7Wp|n1aNW~DPg%6c(wm%e*qQ{Yc9myq;qlT@4<_ExbJWLW)r~q7xYv}> zt*LBgQ3zenafdHVkgDs5DCJiByr z54bwM#N)OTUV`cttt?hW1#oxBEwRq_0kzBM+>P|s%CFYEFpc$3%H&(ZpBpS$zlNE!YBms>e=pRJA+HTZDf{S0){pd)xzFb`qMXp{EMQAFUt@e;2o+DqO zola(2GzJ;gI-Mn&5|*U(r2p(qvFUqc5PuGo^S{%-8ajE~&LRCnTXPcoo_M3PRFb^| zCPg%`#7ArgEhnnZ{n~RovK+lkUnjue(8c_FMl@sPHRB@9Swxluj?V!3zUwj z`qf@zpBfw^_d(48N^CJnW3r*iDnGUlo!Q+QHQT}s4eZ&ze}g&__TtT(r^L{vdN^u8 zV?JA{`RvI?m_5>{2mLmUqFinpEqna)g{FaFK{?-O#=Wsz(>wwUCt94&(qcNc$-L*;UbYglTwoG6c=HCO< z7ZY?cc>^`9dS!J*H(;9@zS4k-7vKTB% zH$j^5k$HxL+xNS8S^{!`()v*p;28kyzvdbyr}q1^`@p#=KB|FQm?vdpMLDcc&opcfaD-^Qvm!F_G+4nJ=0-5w+N@a@0EyByx1L)8dAV`U z1uUvx)?(%6sm#bX?a~Qah^w>+B9;nDMMR6Ld?|f^qRzSqPn$uJ0!D3Gn(02VcUhw> zUE$(2H`6X`i(i^iM#Iu8JoJwbKAD}mS(kisJ0wLe;=JEY-_uyEu;Wn^0tNl(S>7EH z=yoRocx``;hUavmf}|H&_H@b!ifq!%Sc6?#E60XOr5yB4EBw=DUSxqF3>EP)>O~}= z0|d*;6rU}ZH8WHj{E(U%h&8K<7f{@T{?97sV#Z_}AAkgVe>8FTJGc~Uoom!x?X?ZY?841t3Y=z>44Kukq$N()<7#Ew z$}hUzch>z0jfP{D$*T(4k1n&lxvD|p6?SM9R1W5992_`iC#fZmG=4^wFyMg~*aFn! z)8;I2#Q?7gilvK>KawsimK*jxe2ufrt-(gKIrJ4EiEt=+v2;ZXXsxi^!B2JBzmDlG zdow>fd9F)EpZjR;W9?@G(E?9^1%fv>OxFuw`1z8?`pV0jVi7y;b4>b{B`%O&qrpMq zu6x9iUpgutGy~>F{f6!p$IlUK|7`Z5WCf<ud86cOT1OAiFO88b zRvf?dObm^mQ{lY~?O-WC5A+>y)Mpo-(bgrzWcy!-nJ7cY6)M~(?x{8}9n<@K`F_P8xQ z@FbVM) zL3f03(2UM3KBJe_ZP%ah4o>JV?>7XZ=1ORjU=RmHE3-ETwBL$2+FUOS!yB8r-*>Y; zImNyePv8dZOhOGeJohd~xqnjV45rSA8#q`^meIdS!V@LwVz+ZmqeYgkuIf8;Gm?ib z4mR%(%L(>(zTW)mGg%(WOU$hDW2Ys@FrYDW~I8yu4DatS;9@ z5yQ%*iUj&G>R@Ptct;XJM1;dmJf*(J!S5kVD>b9MKYy)z)(Jm(jO>{c&xp4w^Q2}C z4P}}I(68{@bX|oqiy?K7)ldLb`(|r3292WjU66H+LTKjo)xFDm1lU~OB*T|g5gAC& zftk=X^Z|#+sA5eN^gPn7-ECrei=|=dV!Xb#g89%_YnSxx%WJC!HHZ$4bSt+4g&J$M zFcGp1es4EMcJn5`WjJxJ03A=`z>db%ar7 z#hgdY`DtUxgMZ@x{={EsRY6!>5^-@SuyZFd8HzFQ9iUdn#V@}c-*SK?*k_!Yn4kSr zb5tu&ULq)1oU#fgP&4`i`ZC;|eocRh1fbKeEt`K1dy#Bv9JqP2@cfBrCq}!e>0NE1 z2F)Z4$*A{i_f#!kt@tT&?p0+ttmoN`a(ZgNw9oxfR0%ofa!5;EaRhs#S+>RIVeCdPo2B zoKeP&-E*BDeu5%-7SUbC*^VYH9y{w@w5o z5}Q%#j#^QXGk>9TRP|HX0_hlZfbzouXorr4^z~d^IgD2C(%F+6>Yq_Z*DA7m?_H9T zV8?ku0Pz=lbdn5{_+YiVRN35mMWtJCi4~!(GRc>0WidfFk_Wm}adV3udng?ix^AoH z5(G}7@s4i>`eq*m2HL>RI6Dd(RlJ7wm}vowl20+?)9`5 zPrsnqEKeK)Vr3O{!3B3`txh~{wcqiB6CF53^)wyRB_)2%bA+koUjZ@F>;3)}j|G`S z%^IY!dqgKo;x}b5J3+zC#eI`?stHqvcgrP}!D2d28F0MZLgX#uv8DijbR5Ua3(k@& zBx^wxcX!cpHT#z`DU1aBZ1yl58Q|-GLw4}J*@LKIiSr`?>tduSs;7~HWc{il&?GA$qR8w6&#a;^}JitR1bY_!JpDlqK!eieg&& z)egxXT7S+MeGCx89f1p{Ju_~Dx**~r2H)RxqQA~nuX`_coYpEEmBTAGl z@~8PiGwbfDdmEHB_nl!#jr&%OTdQQp49@2a`MqzuDr5)9Ce39R9Y7Dok>5L}i+UnI z=i7!Pnn}3hpu=;u+66t^Ci3KOA{CgXBODBEw}a3JKAt`dKHldr`e01KK&_4~!9Gqs zk1Q?Ytf2zczDx2qq7LvFg*-+-$YXhZ_l5vWY1oNNvW{krBgv`7?b~pg?}>!?`9ia` zb1EHcuAf?P^~g8s+2zQw;Fk|)w<$2Cv#2!alDE|9q3tElzNEfS3Gu3VoBNZv)?w!< zsf7QE_IqLp(`0UhT5RUXt=az*b1((L)LJofYsUd#^{biwcg>o0<%|+8`h_#k2Bo_L zKUO%wzMFofuQ;RfGoILt)3qo;79%Fl$4g;C)5ehEu6{8g!p1@77AM+VV0!c*5__KV zWHD1ec3iBU!w<^t)|G3u<;q%!B?O=_S|gD<0R_HI;DCherjj4{?kB%VpMRbHy(gAM zgtdmuA8D*7>>GFLjg@I|XH>f7H?cR0D*3vV13Xp9s3OJ>$+>RcG zrT^@+LDEKVH>-7u){y{l`ks4#I{iUgpj=yD&tBK{dapO~2(UF!qvWl)hA@Bx|FySt zS5zMA#fgiF6Iy?tpgt?xZUzCm8&tjZZ8*F2`a8N$6=+e_30K>(z`yoOO4Yi}_<`bS zZ(-i4{8>_@;~56Qn0nm#uc&cJ2Vh{7mC3;l;7!26rSZzp`FhtCqF3$Znztf>klrw9onZ zzTfFNUI6fB)JGvlDBz@aPRu?Q`6c2F$gKUc({Mb9^&%LD@o>V?u)GM_#EO2VK0#tW zvJP-=`n!8mvZ)kn0Wp7KU3Xos*vk^$8gEEctySdOLjyCo@zcXi^*9=W4 zPCTCmi#xh7Z*H$|QI#rq2l(N7Lfz}f)XiDn@*)JN79SrtVjte8Bh$dDIf_0T=k-H$ zLJ$qX9OjS2zvXN|<>mul*H-k=vxtAx8re)9+AZoFb^KhXoZRvAXtQ#*^0Ib0v&Bdc zKnG;HdbJ@Y#-oMnwd@FNFMqYu*$MCrjv#zz<_2~_cw?`-H_`C6UAkQKxFIyp-0_>0 zHSt<%HB$3NFw0BHUx=$x9C!20oAKxlur}N5onBUwv9h+Y^@@mWef74-T}PCcvqq8a z2%tjy#~;K@7ragdEuL@c^JXW2EuDz#;cG#bXaY2GG9_KimA3V5e`lR*J{5_{K(Rn# zlq#aF7CizTFT3USp{|l}s5Lfn`^m639uL4@LVGe&e0NaOWTq&1r%TcYDSDxmt{%`X_88c-z=TB84*;I7C<2k#>+WP0&sNy@u(QYM z4ZhMDlPJ>iJT(O74LH$x%1feQXWy~x{oU-;MvNGm zUObZAhV<#noJtnK&Q%mM0Ge(vT2z71q8`(@=AnpIw3@;h{s?FIr!MtTn`jbucly)p zZmbU_**8R8RIY7m@p!4lr=QXjD3Ga_WQ=OB;A-B`YpS9?BkncKsoTaJqxYt+T|*vf zj-g;TUCk*JJ@I6VLU$YetT+_*tGh=uhVFsybqI492L}8BZYbL+DCl>iw5UOGY_T={ z0=rZ*dJz1uhhjgDWIC^nZJC^n-tsY%dfo<17K!IXKV-YFbkC&xKX2?w+4d~ zu}H1ydYBZLjG1!I!6s$kG>y=#OcXOmm;uD=c6yn9@d=gQKP7_k)AW0B?V`JXHw@w+ z622Z4t#$`4N+@#vl}4UeuwxldPOf`ESnq4#s62Tbest(axvg`XmHN5b>arGfuBD2C z75Z9<_bb3d2exAR645m}BNMqWbpO=I$^3c^G`Uyw#@wKep@)Gzt^;Z$i58zM3%Gk9V|I>UcZI3#C=r>w=bT#6+0WWM1 zu4Z9^^48|6uCyb2GOAq&`fQ|9fdM3^ejRnMGO6@W^zZy0;E)IHrhYXI+CLQ z{QZmr@&hg6HI8MFop4 zGY=Ao)wQ>iztYxTe^t+Xwd!X(By~{-r#s$?;uLOmZuQUO8PpT2%d0LBTk}?yA{b)A z&Mmh|KnkJwxiYjd9uk)$Q01b5(n(p(KHDf3Z@x48Uc6w?I9g8IDH7fz9HdzUT?0c` zbM#;VQKBJFXEORf^AqP!K6OO1!Jey0!?9P#+#l_l*TB6Rot>uE8Sj~ujnGsn-a?&7 zO{;r=7{Q&x4iH15E4d*i=A7O7<(XgVI7NXV;|xM*!u8tbmKOKAck8^qQkEovNvhbpNK#hmc$XydAkEh{R6EHxVs2he{cG}F2-iH@vK=K*qy z->JEq^I|H0pWN1Wb$g&|+shL0sF~{&N}8*CI-y2IIc7GMb_csfyF-gOyYKBbM<5MLpeZa@%Ufc-I%*Z87Huk~y8%NT`DWT+hIafr z(Sd-XN!+Cj+ijiZDIN2H+3DClX$y=0_o9fRVaPPhq^ao*K*hYVouO-yzxsA}m$O+r z8GfN!{FC4KVD3}&>YK;bcJa1mM3xk4lLsF8$;Va`L^E8jZU9={0*Nkiz_3mlG=?nk zVkZQsZIjfLDH0(JyF)Q-2qq5T2t~!v;F4QqjnYt=XhK;qUEk$0$Ts4p>0t7kpw>>Z zv&ry2-i(UfNd&V$Tmdnl38xB6_q}n#t8TFnrw|pb25%;Nx3+8)x&h~zf$TK^029X| zEi1=T2AouzwM%tvATr&70i;qaqUt;RkwCL}T8jBgMqOCbFO9qARZBHi9NN`Qy#ZX^ z@&mJWkikz)bL$>V#2WP2wxSSjM3AdG#LZ?HHfK{af9{+4GqY3e&g_#A6fnuX=^di# zzljcMO(97=h*2Tn_CoRJ&ns6Yc3Wn_P7@o#wd6tu(QfB(0*2DQ`!zVQ0c#gH-n5PzRo z%3*Wy%PSeY`{fphfS9iV%v{wl=L29LD?R15BGDa3rc>hIz)B9uqIQpjM`W*xP~(wH zDoTs>vzirnP|(H2#}>25&e)x69)#_VY5+8YvNvxooQY0$>wRRe9?!__EaT+T)Ihvh z5ieapk2p}_*hQ|_Zf&yYkdJeh_ z5TjNlg-p^S6(V^?_OE8R9FoSk4whSJsg*$ zaOz78_3xf|X+%s|_bHQgNJIj)HxStj9jrU-BmOpM1=WCv!_7P|0~i_(KT%>ga#gSO z+->#vqc~I=y~lqG^7ikZ{95bXu%0nmCDf=(6hIcMBHphACGoEtTFj4)1Z11{-}EsS zRdvl+{!+8Xy}`oDqQs{$9&^?hKls^al+I0e^F3-1dz0PFg?zBDat^*H=R5QGd(DXD zrO}8>Lu|#Ajis?6AAURgR`gq3d@ZJHdbJ%EnwlM~$n6wa(r2cRvOybzR@-}rBhkQr zAZDn|AxU!nCC$cE2$3GQU?g^DN!Z4ngNSj#eu?t>+R>WfU5rwS3c> zA#&RFl-{Ci(OO{Wc*h3(SntaHNw4kliG525+BK3lqx;0<0C^&I;xygNY!LHA9Eh_) z?*s%`KS*AK7`$13fxg7c({1-}Xl28&)^YPc=)aQb)-l4WYe)#zISfq20*R&h^ow2@ zBL7w5pCbP?LusUZJDq(LZz*E2`ZcB!BHDKk3i3j>FUY%*cwRSUCqR6R&H3(!$} zNu$_>J@gunsVkF+vj7BA|D>BST_fc#_(|8atRD??nckB)hIPws`?`B$mX?;ZeQCz` z!YPrC*g#5a(r*Ymy0+a#wTrv;vL1&x{`UOPrknkNSt+s)DM9+iNsE5R<@U1=3L;BnzfPBU?S)5FqiTWyy0) zN~9Q;-YPMgX`&yn7~UAuj?pnV4?q&WzNMm7yp-zAB3a=aiXob%W+Jwz)7z;+uP+(%ms1a3 zu25>Nq5XsxFTqcbg?TCZkU0%nBS*WT?_~JQM!3f~_v$6lY%l?v*W$b@+xj}#~4tJKjc+4Z0;!Utq7Ve4Cq$0sOeCYb@^#h+{gFy=%usdTFGu67M$ zDC!g_-)7hX{a&_jPA>dPKQkSm-`18)4I#7efF@R1FNv-c!pKBbfa$fl*yE;0eW7JR zAbdCd$s=~Zcvd%}dQByMbPc^cwBAj$YeP|LluQ`kjcy52D+hgJf zvHVP4*({xnyOPR}uS%mROneIR2fy8=T+`>eo`Pcc`TIZ$@tY?khgLhg4F{uwyBed0 zn*k|O(RZwkk6SldrpCTyvFSaMor0U~U_a|A0Z^G{a_@_5#G(vzVZ!kJkSLw~Ce<&K zO(b%#BUe+XP!AuF*g>UxEw~vOdQEMzm6Ly>gGYlN_phSz--tn*^C^|dn|l1Uw42&+ zWLo%Ab!)BUNg$j{1}>nOr&LFTByCtzI6`DT?Yjhv^uhEq5uGNXv9VBS8tzL^InS^o zRX&{`kQ9!FUwt!u8`oFz1c zwyO2m+5^F6&~81=Z1cg`e>&>-yh&y9w-5SUAS$C#S@M+ougQ#MvzUiO!zgE(R5CL> zTj|*|Z+IXZi)K>9i~R^F^hN;c9@=1k&s*+k@OIK$B_Ssxi8ptHHl=-6wwm3jV8bT) zT*PFnmdY12*x9YWJ}GH7xaw{lq*Y;ny7A8^w09TDQ0@BteWyHZtjS`F|##_cF)f;)Ah^_9B9u`_5!hUpTD;Q?{Cd09M`g;e zE-c8F?Gy2=MP0m;G>4CS^wx^{Tw3;di-g!*MN~2Y4K(uANUHqnQ_t!7JP}~Lu6h0S zQ+l~xyQFy~#p;oHm7;M0E1iNpp$3^hfJOL(|Nf-)WBI?3xMUUPo_T#cp|Fx(qrdnu zh%<4gZDjjS~C6rE+9Cq*-QH};UG7?-==lY$}LWk}{K`Y!mx;HaK5`qaWR z5+3IrJ6yY>=QX~}q5vi|erqJ^gy z^s`HqCEEykScwb^Ki*z^w)mgETq@P8WsUaDr=Qkt9CUG=;0bU;m7LBnJPN0;CO`vR zJfnJty!&bTJ7531!2f?J0$I8@YgFXmiv{ajrS;{~=Ca8i zhf74kSH`TBIKN)492{z>1PDdGuK9H}4&0FacEiVkSQrmmQI~>#<3=X8cg$Mqy5q+p zh6d=wCnADV_zWePsc9HyKtvp7F&c-PQ%sZVcXL&+OnmI9oi_WFqF89mSUQ z^2#K`HrL%Zg}K_xilbfhe#w?SjyXs~98xnG-_R8=zUU1x5hmlIrvsqLTHQ~dM0MFgs*HhBWYQvO>~fZ>Q4KbG z(AGkH;@j>Tl((|eN%BZes*A4|b{Y1}v_oiNE!n!CsPuT#qI?VFjEMQP-s zGEatb$A10<-})!BZ)P8f`70h_Hf1Fbn;&5Op($x(=90!BbRO8JK)mxn2|Kd1u7xGF z-4Pr03VkxQ`InB&-|kb$C1H*V4VO&FIyn&{wxr*p4SSvilHIbMlk^T{dMLA zO!UMe$0ig@_jE*napA=?PwA1C^<%Ls{T%m>nbYDx*Ci!(YIA{ zQDZ%(ofFnO*V1m)W6|o_k@;gu32bL!Pq2JE7rjs`Q6>i(|K6}(r09DH?G=|6@UCue z2D6=F8gb?2vLlbXSJASFgKJHSjM+S*R~X@hRbd!yaI7oI#;QdC_4!WJ9N*2m&r<|1 z;CCGEsIU4-^C+V>=?^zN%foOil*S`eA+E*u(T%>mm6tbgHdkEgoQrd|+VD8u1wRii zbUkf#!;x>3<5f;t;Zz3TWKEENusFcrul zzo-k*VWK(`d-8)Q3IOh@?^_>3X1;wQ&5YIS9Uf~+^Vcyp_|thri%Fd{~5>KmDcOGYl5X(q53q{zaqE1b>@L}Xd&UoT>QF^w$) zD~Wl{oGWU3Y9wb)xHFc>6%cQIfo##s-G-NUu+zGSBVz9>@{9sovljH zqE0l_YYQ|Qb^T!B&vi|m-fUT-mIa zB^(Hml|?smOCIcQ^(D=yaE3%@8kXCm=ge-wRG7TJ6^g6+L*p&o$p8oRG}+V?kd-KJ z{xzLeN#&?43rBH|tZ-AQa8IC(lhYPT6ads-j_RM?da3C8rwNpa){8V3a#&J%BMYXo zpFNA0kpLOcZ>pPKLc1Du!s)Rq$qH@HzvNZD~GPu0-4r$cAB0wW(J5JY^nV2tAhI-4r*#t%k=?{ zZS&+B`0RTT1o=ou43|P{MVy*6OPw9avgYsAt?ko!85fA33JD|QTW;%IWvvo>fND?G4P zn3sim%oq_%Eg@2`*6oBB6X6f=?6+p}5sFN8wjY%*^cE(@{p@iFm>30J&jr7}v99S` z2NH{R>q|Qty*|Xb+O}iRtzlWRlXPWkn6>U|baPK!Gi5c5-S4-?91L`yan{<_st1D3 zQEE#Zv|d`*%foPtQ1A?YZvc#~>`QjDE#fqy5t7m+#mR*;8tsxLOzTabK&c-mC{YBB z91BE&%!yf5xLrIHM?n+Bzd75rxsfNI&AyfdwzH8~{`goS!;-_n%i;4N`wmA;EE8pl z;xQa-y&@K_$Lur*J1xg8ZCF2cyiKl5BoXt~EGu<@B(u^bk>hIOjHHbG$O9#d!tM4h z{R6xkcZq6WAQ<-ced&z5R8_rZ{=3g zg-NsJV&ggjLCa*twH!Qha`9IlKS2)TAa-OK_&%2|AQU{PiFbmWSL>0XQDO${;RTOF zlKMSpj80H1gJYStV!>0taBv=0E80m^t1_4xY?33_b|?fAY`E~Eh+*n&D)o{1EId&n ztQXwn^ZK-rPKLOw+@Zs}uF*ei;7!#quVLtR;gl3tjrRv=a*_aK zpx_#fs7R`eV>!aQ{3Y^+hfY-UQO>@Ze);$#O_dn7n`pQMs|Z8=bav}iVznYSkpM$F zR9z*6C$Oyf$c3-DKX3DZA3@xFG<|pWqlE8@capVGmL9z`i5@F(wF8rIK}=}B62m}U z_c$LVcHhzsx_BH#2>QU*hPayao?LNDgmo~oEhB`czFI^PknWby{eF`8NPV#`JuNC8 z{<-K)xG&#a(1fmJHKU1=7$=GA8m*nba(nZ*hw|U{T`;I+k)pjiIBGM^)RiJk$6m9a zAq?@h!dAX)30{<3`vp+yBOPmmLAYx=)?|DTYJNETGWpH!iWp1Y%W2u6W;k(3!igJk z&>MI+k}r!035^1LXH<)V8m=wSP2S`(K|#=G{rPDfRn*GXZZ?{W}bti zL3Yy_Ne?JDIFm^p)_XfV@qM@{b$In`CzA)}yl``J2`7`Q%4K(Dt%MtLdl)r`&~364 z8$lj=C8%4nHpDT>Ln8{&hc}CsG$}Q4;D;(jiD@p*VWMANL78*skBWatY+}}sVh{QL zRzl&A1~C3@#lwH?Zki@OCWp|otS@UvTr;$5=5wL^g4J(~)vA*%8%1X7*Ko9p8YdS{ zEj$y9N%q z()=FuwWR0wiDRW+)QW^69ts1^GKUI3>o=hT&x1==kXEEXS-3a*Vfv*+McIX5Yu~Zo zT$FNE#ZqmGGi%;mz?`mco?LwM8J&@>jw=DM-LkIdj6P^*!+pe%e>WetnPd+|L#S)| zs(wc+k$UFs*#tiC`?07{R((|x)Lf+a)D>LPh#H;tlGyNAA&~#tHu_`ViGY}GFiBK0 z^o_{bKgDhP~7F;}SKJG*OdFTGV4l zMLh7&Ub}o!4Q@tnIt#gKXb~t9mdW(@tjv4WwV;L(XsrOkRJ!TwSy}C7(OlE46LUCb?$j zsugXEdz6Zzw+;z1X9yBK?Hdi1g6f%OJHqkINP}q9-OOMX0f(kOqUe><*fK$g>-eH4 zj~6+CsQzP5t^12J3uofD>m6&&B{`KsN)Oq;-_5*&ZBdm0XIDtKMk;mb665SEn>D|~ z9oF)8k^tZT2XS^@y5$eV*R`L}mzJtQ@WcmZzz0PeG%Y!zCVO0D6thCUe#E1I9YcB5 zR!d?`PEg!!*RWLqnri_zHY*q70aoR1hwY*rzq*WoJzLjXcoAN54HqAPr4DOXJooe| zO=SbjrmN;KzAhm)r3Vus?hQqu|KW$T28ylN$T(~x)1#;}sbK@toZ))QxHUk>$$51T%suo3ZVGEe${+LeHVV?C}8#MvkTsieGj!vdlDxO z;!ul7e`gPG5$wfCD5sdOuC8CD?6%8FD`H8Hm};dSdx^u;;@qMm(vTuxR%e04flq_a z&2vC>r{@QZ74u~0I+An0KOA_zQg^hB{%Pi+ANY-dJFg6Xl)2%}32A1F=F}Qr9|vLa zd;n8VGAp);MfRGQ)S@GBDU!5DP71@QKzgeb=%gomh=P+oUMtGw8_56<7+j(n4c-ZG z>0JW6cxblJ4J-&W`(7)Jc{TtfKLRA@3|Sa@>2S*YR^x8lDN=7_y4V1nN(5-oI8G0@ zaA?nw`5c>se|-*?0O^kM=bn4+#G-C-juR!;c&9h90Fu5u^ZLoxW4%+{+yKht7V3&u zF17P=nN~^SYl}uqKhwBK^5&q>E^e7b4rW0~XQ>~aSF>$}_Xi3}Rrx+rr34Z0a zDWKYSr;bFhQ)J+ag)-X(dL!anr7P-ErK0%$8D(in<)AU(?16t~U7-z90ZvNe;y6b_ zmGNF7bU+y#n8)9n)kLxvhk`8 zyq~5SDumgzWZMUYqg7fdTD93;?1#Fd{?j}V|NnJm?oCM0v5e+AetrdzS$EAc6xhyPo%FO5y8I> z%E^7yCRVJ6&CrFwTIp=H9H==`09s&21S8Xq-WlxrQ1HFBG68divWGea7Zy)ynk)Tb zg*_?GOxx^{zr)O$ekKy&AwO=687A*TbFJZIG&h^q4#tCHfl5D-(EL$oOq7eakP+ze zhIscG(&OaxX*~vZvmR@ewJltl*lDQ$;ulqy@dRl))J=GMeUOKQ` z2CgheUSY*5JfYb6^zYIwNkKC` zphcU6eFwf5zHTiH3U_5=dr416t53(egv8<1^1gRkTnWlV{mh+9dJIkkGJmv@VC+s0 zGsD%B6##JSzi+SoXv8GjZKkZNpK8%Z7*4gS$Obh2s#y&t&Lq z*r?-UzmOTodwHyE%{v=V4~O`Mo<@Y_oTz}z8y)<|ka2%N?N4UxAMv!zZUs@$sBi@O z(NsEU69U!Oh53o2Q!_)J|I_rLhP1`GrEfTTemDEj-+e?|usa@n{SAK1?5~)(+;>M` zrR;U0y9X&Uqldiy`s)k&!RXLoC5*(BIPU;+`SsI&7TTtKl(y!#QniW-@!%t%b#@%G zaiFnJqHep!&p|4thz%zccto4^M0(KN&pj6`OK-|ZawE0fofrfWA_8Z&Bp)2Z&00E! z>+Oc-DB6^%VDHAf>(|d9?2@3%REg+PtvN^HpvmR}HSMO=sz-KTIJxK%SVEVPa*rCa zhx=A6*4pE2&~0W*E&U@T+0PZZDmhI+9H7E zH>GBJZB--PN+rjT8oFzF(`D~U^t3tuvQ&yS-%o1|XXCwz|NfwvNhNfWN8s79IKmpyD<83QMJ#igKO8a*lzfzG-DDn0;XRxU{1pmIZFM$~Oj+=5Ez08B4HoNhr| zoHWydFrGUem9}W^!7h2*ac7)9y0euK(HHbq8~HJOrQbA03tEoAy6w3J$taM#sf3A6 zokHL)S!g1DMMB8K>BnLiVk%c~2Y>cqe^h4sf39+c5ZPjY4b#c=2y89gH zWTQ{tSa5Idel%}D)c(WN%aYdy4SN0QA?F&Slo_Ch0O1W#W-9&`A<4U(2^MAioYKvv z6nZ|;@+nuBLABw%9@WkJ4 zyGb;xpuyKRMNDvBF7iqFMJZ~z{s1%@PDGsRm`OM8HVuWct78U-PlxWMBY+6w%->l# zB-e>gL^VTRbca2+y{aC0#>c|mfQA|p8C4l846LoM*DALLje{jWl;Gxu_i@3X{dD@F z_$rCbHZhyLm)3@Ph+L>MbSxf?5M^?oi#^hw)ZMSsnfigQfiLhu%)W@F81x;bZJ~Mm z7%>2vEv2dou7YCxyVyf|)Yi2o{gH)#{3&zTx_ETA?_Y6re8S)Re(9|d*&ze>K{Ps9 z5svM3G|7wA<*Y5Qxj7ynGJcxfk%>Xf)~d88g+HZ+hvY#(l;I^*ZOb;%`Vchp0Owk5 zyIzUGP;ZJ(Ai8kj@-qvX;qjcZ<Csw&F{2rzS z=vhq+Sc%4Y8Kik~mhLpW_4Lzk`f%Qxy*GWJ^W0FLI0&;!D6O^w0y*@QE$*Dtv^qqt zGm;WOWi|GM?oKyZ!UK9R&ZMr}vemk`YQsm?*)Wfw%SlNYxN`40<7nVWD24$dL}QN1}%NFwEI! z=upW7Cby4S$zzAUxmR72*pIq^R+YxBPqrTZSS?;-|E+=6!thqtlI^5#F=HxvP0@N^ z^43x4MZiYIg>|~w;ktG5*YP|1VzPZ7Os&7s!|4Of*HlVNO!Qhbl`A@u*Iw0w2jm%R zVUYNwN&FvkW8~B+R|o% z&Ft2xGoD0>veIj(qe?rv@Flk^R9+g>eFYxo*9#)JN0Lm^00?l~7n z<3fQK{gbA_F)s#rOxi7e5c=cJ?r3_j>_5)F(l4kkP}wMLo%3IqHSR`Z8Mby}WNS4FHO3FuoKb&jUZTB?h* z*rQC1kC5)%xI(CbG5t!s^7>!u;$1X&ZXRQ~=IAr?OW?Jh_^D&jqfRx-t2Fz7T#z3A zB#KI)i+sl4uSGAY_ulM-*_V1At%T55-IGrvuIP4s;G7rJYAbdjST7Smlh>Z>BC;cWa2W7m)f=2#2_8bKA2 zZM|MVZ$*#An`gwuDRGLJ2Zy!3J+<&Fy}a1)s3rytozZa#`=Ifi5a-mTgOP88Qrjc} z=YXEik%RW0$3sy8CzP3v9()AuA5HK4`?>JrVU;3FXtZ=xwyYAvacj)`mh4rGS*w>e zz+o4i?tWin+L3Q{j}Lo$hM3E<9ikY%Pmm z$774kdBX1Ia@Iv5shM|B)UfdM5IXbS(;_xn?o(h1wyda#`C0OOuwHV^>IXC?(-I)- z_Up+z8qgC%?Ya5<|FQRO-EE&)nqNLdvXV8Ki!tpkO13QZwQb1-f*=JMJQx6Vu&r@{ z07yav0h%BrQ7ZMSa`jwfR;H(OkxV*QDqFHFpU>yRS`hTT^4sqp&)zRx)m`10OkZS0 zRgwwfe|V4k-G^sCdu=mYI`gm1&ueQLa2))(9%MPTGcdT{&?pm;mg}ez?Qs%_W9j!%6?Q*=)=E0%IPe`&_$GFn|AdYim=7Ux{5!qkx z*FZ?$s~eS-1--xS(P)5YDEt}*OSaqgfFwrg*aNxbvy#(hM<|M-`n1=M{NqamjF(cF zP^*WH2=+eof_JCy5a)XbnDqyG!1QkJU*F0K^xbV_;g0o7E9wQ+BXlz^Ki3oSu-}wy zwqQQdt&yK6L{|BuW>pK;KwZtjXt@dD7BntlcMkQ8eT*wKLc>F=a1;w{oD4aA%Lbt) zf|}Z>I;^+_is+_0>lQIQ2Yw#a5gEE=v*(^t{6I6>Ff|;b|9JgRxw>{EkwmTalv<)IjMNtR(sAWeHy2_$@M4l5SvzoVa)9cn?f_PXH+L!(? znngsR;G5GhB@6}|N6jP`sLM5RcrMh|9M8ExJw?f9YIIg#SzB#}NT#{IKVBhZ<4xP@ z_<&Bon|`MDWA*@TQbP=)WtkT0s)ggd+?`Dx)vTzH>;+^vc~a`82M36bJd9H12<=Z2 zul4NZDRY`Hn9c)QwK3{qBj;lHo#}^avjN+Oglt{ygdVfygji>u zIw4WG=s8^A8+@DN13oeTf*yujq;MIl9oWRBReg)N8I%nLu0{hVZy@X?i;IA6_hmy= z>UV~oVC9%&`j1-d>_w9ch}ihe9NA?Si2H9DL+&EO}2y-ff{ngam^$RgjfL|k#kCYmo@QLQ&OO0}5t3#H2oGy6!F z?DR+deHSvUrA0Ny=&#+PXjE7Hi*?KK)BT`=i1SIdm`f1go43ihoZd-oa-QCD=HJyK zufHq@Bta@-rG`e{wLjEbCOcqIApYb zs{_IWz0N-p^HnRV^{C&=8T$YSUP2V>sQ|9tvmkv;9VWX~HO(p&L*lE>U-DF;&Zw=m z2y(0_AE%d|ojduwUVtc0yb9{9S)i8v6q!!vD?!K3vgU;Nw|^O{@V{sl3&KYiHso1k zlF_PFWvdRJU~;+%DMeB|M;UP&tvVD~lya=@fh1NpPV8{sN?My&jAxH^*x8?YUEWHE|>;@?$78+8|{1({9u z6<9ai!YCj4=~f{czV8}FEg9H)W*d;Heum!2(WV@2=c{!!5@`iGQS**6e*GAyNOy2; z=&_vALPHN+Sld`t`w9SCM3xC8wb}=y?UYE6EXHXeV8J z;`SY{|7#8{_Xr~?uV{@ery|BP4nQ@`8Zsb?rjcbKlKf0&q<(=O2@scO>MWXYvslcH zUMEt^OPyBcKsIaw^CekS8W@r-w@DKQ&zY>FB7g??iRbjwf>%AJ&WieJULEL7HLJLn zQdx4n4=diBhP*Its8RdF?fSwFEu4bUy|KI6P47Ik0;nF8!H_j8n#f_W^iq!eUeNa7 z-{ZW&!xKyC(0B&}zJ6}@N7J*}aRQ0)!PxPM_%iY4;+xJw)%9+>%NlZ{4s!&ri~7F< zIK(3Cl6p9^jH;MG>f_(5d(^3ENav+u9{F#rEd$(a`&$-}lpV+__BORtFIm+lJ)lPb zQ7Z^z0{mZW5hehXC1|=R<_Y|_=hr9Lt;YfwOO}uK_mY<~Qh%ze<=O@z;dtFrb;(mO zUdJ1!MnPxqoBz?@PCwJow!4lPz}DF{txBDu?Q3Nvm&&y35?s-?^{g`4T<8`vpOAb*zx=O;X?pjlwQm*kG>MB5pE{p$8Bp`j z_3iF~tJP#V=t+2(T6?x%HO&_91Db^r>U2Cu)T}heV2@#e?)cs}xPCf3T!-g3&=`jNeA$8~`UoD? z&=%1mTUb~j+Q1y{_xbz23nUSyQtU#VHAwN2qP~90&g^hKXaz=0poRWmj@8wT)k;%4 zp_#A=Y8E=MbxKp%4Q*;|b$LyTH03N5%tP9lOI9%QaGdK>mmWBFMvo>$1W1=VbWZnqb+|v`9baBWcfZat!JmHzQoAE#M5gS zxuNUTM!?y+4hMy4FxuNN1iGw6A08(pfg_%muBhAlmvug%1^~N!{H8%LC--8#j*h6D zLU2O!UWb+vdZ)pm+%j1z#|-?eMw{@Qe5)zIIO&YB0FIr|9SC%sd7$6B8O$rcnf=k= zJ;H?vbLa!L{7Kgqs@L_qrf0?ieaT{Fl^nxIG6pVPwXxdCy(Dw|QAh1$uN-jLcngY< z_m^6~2to^iTWQE75Op0RX&DI*wGm7}5_GnNSbr_o{KNF^m`^g^^kEaVz*&J$^5>Mk zndd`h`?W2T$rU^fqI=VO8j0;|y2)Td&Naru2?U<)Ut@l7JS@>A^Ezv2kX3MO=@X{~ zZ;Pj1+VRM?!L$w{`dqmbBf;Xpgx52mAVCe)R&8y&@W#+CAZZ#i|3zlY(s&OuQFF}A z(r2Ez>XEwg+S*H1KfTOBd~ldNJ+aJN6wx^_D->9jUPtiw3(?@lm3Yw~;J-2h^`5RI z)M$bCL5^iws5oGbiuRoOl1(WX+;f%J%yY}pj__t>k)l!>Q>w;y9r}{6TSc~uo`!VZ zrPVs$W~08D@C=&wiXV;eYDSx9RO$?-9!7|U*10G$CrW_ZNP}JWkW~|ClO_}w0w-$F zYFe_r+r^6(QVjW+idz^B{r---g+L`5C{wLna%P)Vr;j|i?uv-X1_>h6`vit+XXiWK z45Fb{G~5weFT8l}oHj6(X_I#9S`x%wsph&as9D0Nl{(Z6l(4VeBt{c0 zm+oH6;oa@_mPV&KgLvt2sr-pLfPOOSQmnn@reo=HwV|DnObsWkWT6o8=V&oYB4*J5 zj@<+m)fU40udzJqq=|MS4zn}8+Y38`!-4>pxx=&&SXG<&OY|0mnKAPvT#x!OW`Ac| z1BAh_ehhD@5hw4QRwL^EOR`Dz%ZpiH`&dxYz{9(anMF)4IX zH!tN@XoFImYUw(xpcPFoHy(7xz2LSo${h_jsRyJ+sSBew+=Y9yvZMzPChl4tmdyG_ zkAO%VT(h1-^yux$`&t2LWeupJe2S-cPrt5#wqP$CP))Vn-ua)UFyEbd9Eo5Ro^P*_ zpvFA*6x!1L{E?DkVA|=+tEeXbrI5_Ma)-%0p^f7uSoR~NyGxl)yB6ZH$#I*Z&h{8*6V|@KEgWH}cLEW5JZGR)$&GhE z`p;8`v>>rHUh4|HYQEV{R#n^^Mj!7_-_i^%ILXvOmu)9SsHk;F!PG#rv1Jie?vR=l z+Q}&^;b1AP{CBe_&z$4`SgzUs|NH-skASP*mJVV%Xkvxszdvh5@1Ua-B$7O^lD>Y* z;T+j>HyuU_@+t>8u#M~4`o_I;kh!-wJ@5}^!C*U>S=^pQ)dIFM>bYjc@v@I|IjO2@ zjp<&V4nR=o$LZ(%`+ii$q7=_Usn(D&1Gvj~7{&|-sl-vMeM_6RPr5;5grFMGFRWM8 zgXqjQ=_?T%sjoE}YEq>}^&f3eQ+#ijf315UWvd4AypO`%wS;iJ5+(i*)1M?9O0$X+ zKF)mx%Biw?Dz2?Jo;dmZi{~_5rWs=O>N{L?p(eLSE|>`Op?qmrYPCu|AWg}&)g~#foNfA!VDy3O*!`oe%pR`}_JQBJLg8|ZTa18U=m*XE-G^2f?L zw8b%b(CV9W%L*}Myj!MRSrMFj{57gDAGMdxH&>j(;*WcC#;z!o;muaMKUtgx|0T zkv8BebR9dGL)pSib@MRAqYeXH65W1e3g8Qe38|m}Fec#iSZC*ICnmRf~~j$s0dUL*->SzDb<$Pf>XO0#wIcvP(Lp?4HhcU=rt_uExO04F!CqU*yNb`jzJSNS>qyAMMs;TK-tJ zjQ6(G1#`Vb5-}&E-;%g8)i(f!iF}HxetP5lQE!!R_kC*((UfQY*GG& ztK}WU(S_FB>epJF1>Sq>lr|e?q4waT|7-HGeukOXt#KUD#Nc97=TFS*?%pj=9Arf! z>M{-mqaOTLOET$wVeC_iCBvE|-TS&v6;9LrmP7x{YXBJ;9**&zAEG!}jTmQ?9iq@) zeL^tz+o)Bv=rw?DpHaWqSMCW!RDx2?u#BKuK?}LCu~0smnIvnWj#!SahJsjE4#Fh- zM!ult*EG$E7X>jmG}SD$-+>j3iB}G@m*X|V=a{{`Jf<9p)*h+I08bhX4g5m5CtrR^ zzie;$fkfZXnq3JW?YFXox_U{EHAHf1;_Zqyxj zJfWaB+I0Jk*sD3Xd9;rJFs~!jUN5-OC<^_#M@cgV(|wGo<)}Lf%W}F(Mvubz8j{0m z#F`=0FoCUrFzn*{v2o`G}vto_+p||5`ld zp58J)=XrRGJ;wu$A)$_!nPc4}D3-eDyNa`*7|4kv4!vT6Ne9kS(v@%v=ur^#d0d9G#QALi)GTNYDRZdFn$YD2kbwKbgz#0Bz#RPAhQe`!_+Yi6o)N!RH75*9Y^#6d^2!I)#rM_ zbbw5uN84$++tNZKzNu>z_tqPr$E5f12KJu1F}<5oCgQt#v#bwD`RkdN)X3f~BS155 zF$}U2f-|6mu+%HfBoY1pRdS zmYTOxt<^jy^yTAuC;Rz1568Vj2{9zb(f4B&Zt*;PJpEXU@E5y0d8>3|<7jUS$*hKy zkkgr-cK!;SwV8Z*XY%gkkGd92<)Pn5V|uQ_mCA1I7zD{MJeE~2Pfn?Wu4%@p9TKs7 zESsvpt7UJrkaN_62ipr@si{t1YOK*jP@QnJsYm z@_YeX5k1Z*PJwu`L2nMaJ9=^w979*3%wt3^e4VVWHLFWk;_eQ@J7cdiMD63>x#WG5 zIR8KKH})Aq<0E`Kh>__}`Yk75Ivk41f!g6GPCTU_OB;*&aVv_~Jo?`!-)L2%EQ}9) zwK(BS_%Zt!R`!|*^IJ3W6!o*q{C&zx5Zn|&5H)YsNL zb>zIGdVb?1YOSFg&$l!FzCp{wo}4d9M~vN!pm!w$FVA($-O$kCF$i|}aVIOoXy%?} zm1#A#kw2NjJxVb0JQ+N(`)PaBaVGC|3spCyGKC_jMIQO5r>~yUkw5_QhK3=bo&7jw z<|E;eMNG|T-2|=Pih6=T*~u(gKqq0UTIx4WY28K~siB|8gI(9fEn{9>GAVKjoZqll zNj9n%SU$ia%{v~}5F0q1q<+9HlgFfi<4`m9U3JcCBnDz?M0gwon!bE_j335b5gQ!) zC0yAq%0TNW9@dD#cT;a@!q_(E>qJKO zkF;%*tithE&?IR9NG;^tL^Qbu&Hh7j;AK~<>ib@z2Z^K`@vzTmHuM_;2eV?KY7<-) zr3LjUv3nTiBm^wTIs7p4#!=iVd9-GUDSACJR0I0xkA&Ag(^MoJ{@b2@-ymzy%J-x7u9n~;m6?6-q z5UYdnnFebG7C?tjKwyu7teca@W)+GcmC&X{J^foEKfySr=U^P}4gY#3^uaYzEBc@R zua?ct1y9xG=I>%6EE6p_RZ|)HpMwPelveOWkf+Lewcwe{g6Z9x*%^HUHbG3U=;dNB z#lPseIzvAFhO+`beOt|Il2Lo6k>sBxR=eh$Z;f2a=D#_j~P>V!?Cf+Sbc ziBssP6Uz)64B*Pl!z~@CF@}43&?yAET2yX%;=mG!5AC6KPsQ9G`xy3}Sis zr(&c5uv1}8FGZPQVl6zwh-p!$dHwm6+R%#a<>QB$iv1P; zO<^T1$FljTQkH9|5A?>X1JV74o5Q$@Oyg|%@x+#8gBlz7562x_a0%4u zOL)lQN}M11O~FH9B+t@|&7G7o(oxL$`TJncuXhvkkPeN6p)F_bndhHAqm_BtgN3i2 zd0MN31C|J#J`p*~oS3+Z;Es*nG!01X61#;NN+l^ZdKoBB;W*K(t&u*>9lC(ifTd!` zjoyZ#@$us~V{xSjT!GKiZie-)TGEF)C$Ha(mCN4OSDc(Qr3b(izPhM|z$O6}Qxr0ak+l#VdbbR<dHO*}f+_LYv#2e3B_#~UGJ_eLNk8Uq7U#LKCc?zEUF zNR|3UV0N*%aGoG0a$t86R?d|Et$=6}=XY!$gi!UaM?#PUKSnTo3mYPn)U9pY(gal4 za2lXSrMa;l#R@q)69@mYk<*y(RpNBK+tV#8RN8itXF=6ITD8x5J!MYD%D_i!S=`5j z89D*Blm)wE0JaX2UX&@@*cDXDrsPl(5I*rU47;%GT0}drIh zfZj~kz`gzT^gT^AId;_=AXYsMWSbjA9Gp)`V4x7RjqK`kA50v!7rml-Yj@Y*b@KLe zdRnUXtm+n_FrFek@8vTULv<7@F6>Gmf@!eGU(F?2`)+Sc5dEn!>_St?Lj3$!`Zi z<97(VpC(Swf*%X^lsJ}>kkCftOKR7fyKnlr#`xr!Gx~8mjAgtKp}(RAT~Om&d$7!D zxwG^Av}fXAdNiq`*+7C@zSCR!yjskLJC#SsSY)XBga@2@87ys&!>AX{^Yuz;v7y07 z!(ZE4z+D~eAk4=eqa4Tswz-w-QqR^ddO6(1BjhZ#Mps2lZ2<7%bJj=pEC?xCT6;!}<(KXJm~`piM4 zE_HWZ3W7j>J4!^c`7kO>%gfki#{1+MSbRB##>P-*R*jA*+Q|B zuoKKt;m6da6j4&g>RmHryB+Os+)8ZsZYN2@V~`w5foYN(h?>1%g5SzBr_UCD{L&)b zP^;SVyDT{vHO1K%X5IyN=UJf`E{R)^mMJqWIKW7G@7} z!^Us7-RX~T@l=pmeH8cCo3TewT+8Mx)R=k|u9BNQN6jtOn%ZsG$Xjzhyk~!_#;kYU z%|baLKfhqfcSDO)IGRFE4oBHn4)kme!7qUP)N2Tx?v6Ek1}w+xVUCjTgL*4^9e zlEj>`B$sQIlG|U9+HTulJq%}DN)c1b+dFsjYv73-M^*+UEB z))p>PEmXFkafL@I3#|T`Uq#(rG38FzHdz^Gvd_oGAel!^@qT{|KjHCXm40bbOL-T)DLBi)jyUS-0<& z#j~$DVs+0vE*kFEfuBz-_lyV$4M^gYc_`#^^{pJy4^e^OYs3#K#2?#fH$WPdHKiF zchy;r4>O81HbwnhYzr-=ak4pcIlL~ya?Q{)QO4lxuYWD+{E}}d9B7?3%)-%Ji1U3- zSdi?N9tA2;oi(VE8i_4u!D#g(nZ{6BF=;L>?fcv>UniuywxV7nyZ7_JaT~$%(3GmU zU=fUHxBoxMBU#`R+D&)+TlC#+U@8mR)PAe5B=x!#ymKu`agPG= zPYWnBXILVZf{uEAi4?=LkCtuOYp&d|h|IS8QPY;BnN>H`aISZ|uIx7u&}w%h>OXEL zIiefzjJn5xdCQ9!H>%Oqr3$59G$^Ly^*X_HV>sdKLvq118D$Cvx3b_B#;@a7tZJF> zLr%r-rr+vRM5fKkY|5mg*hqmJGkZzv=@toYZX_&jtTe&GM7*yK9i6apLkl&V&NTR%aowfc4%p+RNqBg zEh%~k@KXI^Q)DijYO~^(T2^-o#_Lj4uq@peGt{(QHi*7&fKFG7b_j9#bC*!?T+=T& z&9x$~TXu1T7B)vNn=C@Br&;+7Cx)^*1E*pt-XH)QpHKs`Bbk$*97zji=}AlWL8D%G zXZ&Ggy55TA4bz|7QdFTe&eC|3*-Yv=D$kam97c&@B#4luSq9?dtKw%}d%jXy*UymU zSG5jbc;Q4meD2w~QyQWLDAvfS*J8I^6U&s;VF*6??Sy~%BoZJXx9Ix1S*|wEZ+M)n zja<^}i4$+?F@`lk%}leQc2w*)nOZa+-0Hc=Z7~2Yb2e6MYvq^JL_?4vJsqRUQ=kan z9eHtaRxu>kgX_hb_8jd*uy_0f5k@;5Ncz#dnKjHMMr}y`_6z| z!y|g;mY@;7BJoUYWNyLvDG0Kd*3xc1!m1o>&fT2~xe- zmgD{O@3B&vS_zA|vb06k_He;!K43r8kFUNqr%O=Bf^P@`o4c<}-5t6E0f+}EZoS6J zb?I?)SFdV?8*Ub>RUdJPov$u6^${Wug7?{+q!=T98=sR0)c2)8@8qND4_YlNF5nt) zLBzl~Qw$du^7$f}EX2px>E{RO-_O;(dBa85X?8@a z-%!aN=I7(A=&ZH~3$*p3lQ+v#7jP(e4*J~F0E>D;DUb6y~(Dj z$koa|HT<_Rtue;B99rN5kCGaMAaPdcxf?alMF%~HEbj-?5r)i16Gvhd6noNLj9%D7 z;_6wA*ph8#4TOc`^2Jd{3)IvnnPspU_j%sj>kmAhVzg5%+3W@!*DQ?VDP)X#b4}eZ zi7ao)lJ+8ce=O}(Oe6_RQP(0p}=HyZrNVGKAL;P4+F zpfp9JR@bW9X1MHe=Mb*ef-igC!G#6tkH_+9-oSuWbBRF3<2td^3wLZR=$FXda?S35 z+75a^d#fWG%n-X!$n~= z&Z4$+b|Ut4{l=^&vfgm^+*A50g&B6jfH0%J>yi)`?iV3UiKqNReKcsSM$mG$qsAB3 z_=X3}v2Ut==FD9^t6MA_6XY!IO-E+2HNKgAX8#6D-;1YSJSDr&=uqH!xi{YKX*Io4 zm0Uc4sz%*o^;7I6e35Y4I1;XDHg&}^baW>kVTZjGgNO~5#}3MM${)b-tbsf_asWIr zuDzLBr)gno4b-jPu8%iy!f4)0BLWSno^f;veJ9Zj`#J`DdtdKr9cCVh#tvIV%?4%Z zX1k5qp@`rWEv$pSHu^|G2p)g$5qnTWA^Ivg}rr`#ZK6h)L@X~roF z**A%;QOCmnyU7n4D(aeeYlvNCMM0Ae7cB26EK%11r%iI;Y3{Zdxgw~k=T7fl$+B7u zY3kd#Qp@1oiS%h9I~(g{p)v58s&GFQd8f!3()r%xlcXd(w0|GJso`25_XEj+-#hwh z>43V5G}l2Eh`#{hm2ooEq{hlbJSVU(o*m=Z8vGlU1>^5FokN!(9e_aCLqa6l z7EeloTd3(wwfUl|y=2ppGlVboB3#_?M3XIyK^F{)TTkx>!bfr!2|W9s1_c*S*G;zc zR`*6UtAL3#CCN|xsXm>UKQn*w`FO_S!m@Vd%Hkz;4>OM3uw=~L*Elay=?WuJgJiZj z8d#{l)Lpn#sxL}svpjBQjlXy^bQ4+Q5N3D#Te@l;-yE>nenNogDwuPcso0Df&73Yb zS2U&n>C>$ETdIC4|%gl*!k31N&LOxFmUGl`e1u&oF#M>@!HsRD}cKQV`(An;qP`xEUdb-Kk5QjWrmExJ8?^^3a!uvSF!{@ zgxeBWR*RVz0;)P1r57&rIUV$2yEWdc==^!h%Tu@t*H_!cg<*!QQ~@ejn-{wV;l} zUfb!9`bM4eV{WKYFtDryjcjj#|H9ZQnLyV$Mu*p<8`Lp~4|73F%KEEj(KN&~VAGIq z9Bl~-$8$B6oh?6a7YvHn?Hs$DWTT{FWW9cXhl*2V`lDu-t@Yl-&ml-Io}PEFC{(4p zJSW}UQ{Uy6Ul{#d8t1~cqaW(ck% z|E3>Fsy+QHy~c=oyynlfm$gtWCz-gvdmGIMcl#!i%-m*K7EG&Yit;0w+LD{ zGF!jibp++rldnn%n?ZxKCRflf`j|QB{4O<2^^FWJWL9_EGPbmiFLvF+>SF$>Y3Ac* za{NGna4iaLpFZsj!$t{m!{|pejBFK_N;ahlj$mfJt7haXl-8>nW_yA#fLfog-kziS zh^|1CTV-`5#>DFyW6B(>@OkdZ7c?w%k@^L8+^I7ur<}O3X7O_DtACijua{!j9EJ46 zPCs6A_F_+6OguA|VWoL{?e*83$P!yQWxoVa?+TPst`fs()Pp@Eg-PQm;9&a1a@;4M zO~2M#SUKbV7XDoQ0y2W4`hliyz2}1a-+PuAs)SdQ-{#HPHmnW9V+Gw^gF*6X!wN}& z;4I8$do$qhNz%I?NMn{fazCph1|h~;UL6irN$XQHNB!%%z*g04&t{^uv9PQ@qQVo| z#txJ)C42{@zP*z&@3I*#-sC2Wc_=Kj8QC`1d;8LLlf@9{F+XV&V}u_%g}``*CYFcH zEHkRe|2ciUf|ES0FpVy_J2ScTahAlVPJlWAOMiHjbb4Rare3|z0RA+6pbLv#cR*MV z9CnNeN26GDN;Bf)1+QMlv%zM@dJvRC3$085iTRijYpGXW`9h<1f@u9IwHllD@bC!u zk9kJDDGw;)^ewUn(l{;X`T0{ChG)*sy`Z1V>xV8?8(pV`8{%cvC6wsN^fBhZG!^yG z%#0oP3^a#ze0SF~B_1$Y7fQz{uTr>FO|4gdOuE0JQCaQYm}R%d*UvoTG~-yozngxm zKJ6TNAb_05y*LCPU>JOWG55{%eTmNv=Y+K={am#FsN34n$(zi`B0Dc7r)rcfoW=ql z9B!c8YGO$4;k4n@*Dr85wGEN&r&xJy1unmM$PUMEs>0m$V$Ddt^pJ}H=?8u^zkE@> zM|_6!6RfO1n}L7h(C4MdFvXD9q`?L1#~=~QfoEi6Rvu5*&}Kmz83&eIuU3aJP^{-U zRt>14=N}zjcONZ6ALXa&3BkbNsz8-$-7QzKXTM5$9vUw6xq9^?Dc+i*WjB!46YWmS z(TS`)FYWASMYl`|ZZy@b>D^zyuA#$rD6Pgb+B-L|#Z<7@Nvb^n>fniIh3GW?AYEo@ zcD6~A)YL8|^L*2QpDWnB-GlfoxS?;r52k;$K+pZ=lsfm4L*3Ub+@&kpd#-hXt&4(& zXU_auG6XCzOQaRSw+`0Kv zF<{FXjRDXt%=UJ+v5B|f@gX@O){kxT2q&z=_&D^FnQ#Z?jTW*8jQ1^$+bQ`PF?lt; zhY9c?(CNtpAp1IAl1%rnHOI_ZbY>f{e%t%I?3nsDJQz$K`zN|}Oo~x9R(ljI_R4pF z;Qd1BpMp zga3(>A0MGEHt;xEbNa|(yOq_0u3WBYC7pOW!>Y9R0tJf+VKl|b5v&$2R!Fb+G{vM4 z-A^9157e!6pzOojd+6z0N0AE*oG)Y51L^0^^Q10R7xf|>%-IvJwK{(-=k%vNF-{-K^Ze^JB=OU7Pz@qN4vfZ=FXgZL9O6QEMNDiYyvI}C2=(Lyb!z2M;@ES zK!JJ&A4_;%G(O15N=-`+ieXoxIS@!nBa)^O-alxERNJl2{NtS1y1Ts}4q8iV3L|=d z??x|j{t#1#-a3GXJs^NuURf~^O~k@leX-*CG*#!Y-MFF`qd>a(vfbahp)Hw?ge;8` zlQnsQva#o!-Xvt9K92hCiJjg}C8S-GupYUJUcfIoSN+^AZ;%&W*K@M!lt5o^px|og zu2pBT>Vzy%4+eNx)M{32G7$8v!M4|IM8&5#Vd0F?<+dU5hqwa-=7?4}hR*Y~w~?qX zzysa5+!_;eizEor6!ps89nU{NbZ6N0I(`?mnX+WQ63N`G)GPXC4*~)$Hw&@B@vO-# z^=Y5q2EzW+$8XBC!N2}KeobSqejjmFbS7Kzk@L&2gp2D^sQ zNeRW|?Q6)`+@DOoka8_lG5c=v^t17>zQzRJkr zkEDLfpMK~H@L=p_18yqP3G`UZk}YqfIqV{JnA6p0(;W(Bp736#eUElL2oJ7&J%clx zx9Y%J0p=o3$;*0W*8pxh0nDHxQHMxUmz&h}3ZvS|Os9f6NDsJ(gJ4sS2bG7yz&6&5 z?~NU;i?aNk*4-f?Qr**J!ct^WPsAS|&R4@fOh3TE{!p)8 z4Dzt$W3`w`xrlL6vn)`R@+$dQ~%^|PCYSGl%M$vtjo_z?7ZXw z(Cx_YWWjVY88$YmMR%RCpderOBd0aq((D_0&n!oG8kb^@KpzHZu1M52DxtSDx74Mw zAikulId)TR74zhKGmO6DrxZJTS1ZOS>ekt)y3zbhoH*p9WKT0qAz?sSm2NFx@c44= z6%9qBvWn8HE*f8sW@M|K$=g!Sc@f#h?7Cn%+gPn$*3F~qj?GD=nxn?iAcG&6B{qaA zU#(c^ZKG7HhjsGQ%vGyZmGIG)h@qk5SU1_EyHWX$(UI+=7H4DlF1fi66SU-ey}|TA zy!Z#2&XO;<^H>DxL~Z1Ppod+hU+?a@48nWEpsO8dPq>(3H5~0_Nqrnl=y$P`fhHvq zepiohCLC<`9MwzUYQ{pX2U=v(Jq^gTPN$h^h8*g4B6Zv>GZfWm-#NMwkLKOg5F&Ss z>sJe{krZR}LbM=VVzELnIvO)p_saTOy&0by7NGJ+oG)IGs=GNp!)kdJF;An^Dq^4P z1&#ZQcQmM%&-)^L4clKYv8ZzwkAxPeCAVrktbMN10{Yqa1Q2uG#z^-@HB=1xt{dSd zkD7$Q>;_R~0xGH6+OpoxcKwuQXE89_v}ijqM3>g;FD-a?i{dkhlY#pz%xnIc&(z(Q zoDL3#Gp};w?Td8;9h>W$oV0j~;=6TvqTyiwZNxWA&>3hv+igFgl2$hZc4!CyBlaA7 z4G}!UJwey_m1ozS~ga_7=yPny|-eu0* zmis|Wgm2f#qV7u;7;y|qSML%1{YafI;qel5zI3TkKIT8y@~%5q$l*2p?#^%Z2oBwE zLP1K0?u@8TSDcu8ZB9QE?`~)%Sego^wa*NMk^6d*dFqtr5$i{kY zt>nJZ5;+klszxK^M%8)^Q&uY%u56CSrp6d9&JBN5n#Z^F6{f_B9}2jlQQ@@&P-@K` z4O5OnQRV{TG^9|`0P7(F^A-F7ej0E(7%o#UxCm!qMIZLJM{jr-KIzjq?A?87HvHjE znAro^gv=cuD*#Gx(>&D88#IuQ8YCR^IN;)HwhXq4w}hP&+;G#o&!3F@47sZN;O(W_ z39pT>;qG$@ZDn13y}bi8D8)37af*ULn0_UnG8=|N{-6J^hN@|NJ!%st)Pb4F&Naai znF9COU@$qb$GZN?K6u{!x(~V?cny2bD!Ma0MAII5K(~ z`?&hFTQ{6t7oOwvAZ?@Ve$NlD?cuf;PuRaPa;PYkPb^@fTZrDhq}jio09=x9x1V@I z0vR=#7)*MV#@L3~qDDS+Kky@=RKX9?mdr`i!uN z9|8(an!kqNsb|&xgZAM~?Galk_%+lWb`08>U)bGpx1FhN1tD};j^mE!tL|-{kDjwf zimj){Vf=oTm5rH|beGtO)Jlq_$iiV8^jw=IR z0kyt~{-d9-;Y`-g)4Q)e9Y6n-8_3hUFTCKf8I;IF_ar_yH}||6Yc}FOIy!o@x9k4o z+ejcxJJ0oD5^%)l7DH|YzcR6gqiJz1x>YbpsayCVS1aXaWl?MF+}tzz@%1O4)Ci}t znem3vqraU#Q2Q81dZU{*YKz)hiI2!tBVA7TI3sR>NRj-yaK&#QW`+PUxnC=O_2&uq z|JlD)^BVPw>UyD;mog9R15oW=AnutLoUZM)DEmSNuqM}1OEe@T{~DLDhKt?V%K8CT zOyh4ZWXQNZCS|Zb4lK0^E1;kn(HL%EyI{a`6cSr@We}s!4e3tHcXEl?^=QQC>T#(0 z#G>-8P2CELZoE7BY4VdMBz^ip3vjo4WTd3#zt^wurOp^H{k(^~qv!VC>#xp93f#J; z5wRh_->JdEwD5zzK&1KWACMpWX&mwx^rNn?8a=L5qbh2}VcHjrZ9roRpV*$I`RiSv zgS{vTVMpWKAxxIdP0#~i@0T34HZvVVnIL!J6{5NtO+37fV@JNxUPmDp!n!y&W6-D$ zUcb$}VU zvO!tMu_wZrgOdfL3kQ@DccrCeB}iOWkZoXIYRwEGdh7_vO!S#TS2z5ahMtgW-+;e* zI7-y2kEa1ek`bkefy>D4bjx?_9@x33smE$G3(zdTPx<*nH3RAXVCZdRNgE&GzmvP} znSc6EG!%v8kJFzta#$Ezvlkq!JLqNFo)uFYZ~7wN8#!>)6q0W4ezz)$kN==oEzRid z+`2Ub%raBnPJ|n8ldqGEF*Rjz-9x-<=hdh;oQ=TpwNHf<{PPmelc)kT5L43GchXs7 z@EiU{Px~`>#r^;FKWPFB6jiimJaTbpaXC&8PmS5^4>!Za5+LF~X#riTE_uWR9y>BW zv^&sGQk@CFW*LHijm%&01&idkX<(o8;MmzGZ|j%U8~W*TNz+1s4!xE<2w&l&J!)v_ z3%p|<59l6vn1ZdcOh-ccu`L#~JPaRbhJij8@V}}{vod&Hj5D`rDwj(u>!F8K_rRRl zwvl8s?k#j@4Fp6h-%Ko&{PpA$9U`EtKP3MBH6gg4C*KGdOH#?&7Gdq}khU6yS27-_ zHf0uaG6HLy#1s~|6{dtf&c#cE1Km}sbhddz(Qx*5jWVzrJ|Ly%CjIf_h@bLX6-r)dne>huLq*S&h@qSA(zEiOIS)>IXw zOkEbi9*a)5p>BQr=no-cc>H@j_Dg+F!X%XW`yO2;(Xg~qA%P<7?hnb{yr4W)E+v@KOxM1kgw+v%Cc~9LjM9%?U_65rA4&Akl zLIDQ_#j_P zwNJGGjOb|X{MmW+xX?^Cqlu=dECVRI2=ll`bb5E0+ERT_6f_ugk!}2c{3}V%|7Y?y z+CNI}|M-j*gw$~{Ve@PU(c9B^rQd)Su*De4>3No5CW)qZUp|DxN0%puiPeVGs#>bK zgLl;|tDw0U8PY@>rakOZMZIlzY66MT;z-l%BP4KK5t?xs$b@qWx*(xEkLh7P|Jaz& zmg`|FsLMNUA+grL`Ve9sWWGG!$9i*D=G+S}cppk-wbQ_DU^*+uHwuIUt*T!9BpFFJ%uEhY{kWu^d(nEPJB1 zbV;9*CVF3UmUN=6-p$bR5elD9BQSV>`q2ai?Z>}={ChPHDg<&0(Frvq<}F}iyFiaL za@fP<>>e`M2Of`m$z3H>efPW{2z}1Zol-xIPfb6Q*x(9AS_v9Hq&TbHXP2C|r?d_R z-8cU2pWEoaP`}hn6UO|tpp6p)Q;DGKIg5?8jnzdpX#M<^hT6cujj5!ieF~F6B1~2c zzDU@h7)txFM%(p-HakK;?_i?n=l1@t`zJA^n0Q|+i=}3XG6lM_R9{|q&AXrxEwpb! zt=aD_-L42h@q0BAt>3Vme%zpZjWIm|9>*2v8t+EJUDG|7H%NjBv5e%^hu#Qip+2m^ zN^)L9gfFlUkWL>*yH*my&qh>8!Vdv7g=eye%eb*>8d3;8a}a)q&HK^hBdt54v__(1 zqE8cQe3cdtG2fvSgs1Lk@Yt0u^_mA6+wYCMd>0Pn5Mj36(RaKPY-%>TZzf0=r$1<8 z43Ioc)XafG2L^cWcN4Itks^*-rtRyoB!(l;8L{5xWid^bjG4SM){&@+KE4t4mrm)4RWYR_#Z@v6@mN6U~~-v-7X%z0BWmoS`uA z0!PySBBx&zCtWuO>%fr@2@X+irKDvI#5z)zwWifhxtrby?F*Tz1(()W-V<|G6zn1~ zx-|&*GER@Xz<39{JXphX=@tF-hI3FcYyV;LvwGO;IH1<5=p6XkMG0Mx=5?VrO!WzL zUnej%iB26gP|cc^d~gONVqOG@>xS?$eub1a&sr(%S``KIwFR0v?vO_B1RpJX+Mf>BaWy~W_t zlA#}N1)Vtgq8mOCpfvz?ex5j7h}~oAT&5Ke9?L#c6EwkM zWk(js?ZS&aJg9OUX_(BBJvf{RaE;6pPn-@Tj&<~NT1)6F8h;#&iCBPXi3WGbO2~pM z4<=tsKGEs~PgNm#zvV;1dwW!%7>%S7nLd`wSS<)<^(z`+T-DCJvTKaOvo`Df>32&l z#|~CEJgv80ElYsD_O$xA-|>(k>U!M|`toiX)e6aILYZn(t(U}{L6 zH``Qvz3yQ1rY76g4oOWcOWjeV;dYMK?mSnM-kP$y2c#i?VQ#!Ou z7z;8uM9~%@_zx&J_2}Nd$4E>y1B(&#D&%cdgV>WK#HuTYmT7jayL(uj>GFRG-1*P` zUh_$IZ^}mqSr1 zKfA4e`9h8IVz?}?UJVh{X*p7LhY)DrNDiaJ0XeRda38o`i0Jyp>MHdr z)p?{9lAaJ1?CBa%h~~&XsyZO6I`qVhRp+U!H{5Ngs|7mt6hj@o+enA4UJ zlE%7x;6#rbj_POA_1qVTaK)JhYhKC-(ou}xJIMZ_$V?cd_;RJ^u2{6XFpCNk!XPec z6n-##LKu>0!QSi#0B3`4$pE517ew^BV!wa5b>Pr*#5<*qkr5gnz$!)Y0Ex28zO1jp z&!(qXSZ?GM7LS!6MfWYHG_J*Za4-X^Vq@&kE~vDInM10`OU+dNEr1U>4OjVK>`p=0 z@OrL^bsW5CixLh6e<4Fm^i2Da;#cnZB@JwlOpI`JmCd)MhNq<0pZ(JZ({CqV=o8+G zlXE^C7|TCBep5sCa%hEZ4-}KGfzi36ab}dDedjEV2R5;+)l6b*u8dlKSl>ax=G96S zJawZvO9M;=-9m35fzJxT=YYmE?v1=)YSl1EG{tFqx=+nSl7+SY9330KFL)sCPu|yV zY!(a0$Nv>~j%-n5NXwa&(YCLW6QDa!s)HPKJ}2|1=AVj14VHM8>m@ZHS;NqDQ8i7s zudFR*fuud60~(H0N!GoG4g8XY56~}|-hHWZrID4r#W8~7VT_UndNpPCUI@n<79&=7 zSX!4aM@wg!6~LNYCRA!VevQ(-(6#h1tRhPDWe;oYW*(URSwdSZki>5j?#SJt_X@`w zp_3MZ*K_(P9C2#v&pEvQo5cvDp+$+TtabZ_>F2cbT6_2@+M$MG@=g%7Xi&;YC?96v5E+F(@8J8{c>)xcwb5GLA5%^Q4rZ^C`}VDfPCm3Hh( zMx{|%SdU8+J7X%Sp_Mlt>>latpj(uAv2!62Dr*TM_y5ny=lYZ!X&-A5P|O0@l9Tc* zRM)h)7QxB;skvTuwQ{5ELFKD2sukNH@bxp{J3p?ccTpD3>6?9Ai)sep5D?V*gxoD% zENL^t34;rwdmts5eEpoK=dM4G0oD_cumCyr&H7HyO~mAL8=#b`&&C;p5Yk5yH^^@B zZ;KPzbDwbY^x;sBNd@9TYNH)>YOboLYS*27@wC25SxmG`U)5U9jkY?*FQN8sELJl} zmGJ~jKhbxs&30$ex6)rqy8p}H>$%gr&&2)wPxL#6a~89YFa(rMpr2~Ggd4s}#k2_r z$}y2^K+8o&+3?_k>#|S(zw}3|>oAFs7%d1wBGtXJT9K5sy=Petp+KIv{hAg7dIn-8 zSP@xiSRFeUwSi=Bl&cy#*t@^?1I9?`^u?BW#x;w3BxLICK!(R;5Fmtf>4Me4l2m!Q zs-IpNt1A`-&2X^5y>k^{dDzb|t3_Qzc8*qaCsOg?l+Fw)4o9obt~P?^UezE#SC+g7 z(g_TnfZ~L&hcJ^mLgi$XK>FdzRda2<8sRUKfDce(zk2+8%@7(O#-;i_y?Z;dsSWPy zDgYXBUN*B{E~}Nm*}bYgHof`Z;&E5jHuQn{iu`!|gRSl37O!JSlAiJ)X)#`tunA;~ z{2)CG&V-(uKB%G=u8$t!i*?i$YGV^3YTd|GhqNO{-J_MvV5$6)`b4zRUCXFGfx#v} z19u4eo{r51P6_;=K^qRo9ET!)A>W4L#mxy+T@{;t8SNg851g3rnR$2I?O8gCo`&v1 zVo6c%O|4x*`_N?Ju6DtD%%+ey>hcfdk-H6fQ17&4qSflz1YL_geFxm-Vi$ZM2 zoP~yq+{CXRn!qaOASKlzso`1#B4BFmxyrKx;2@^wLe)>qYI#wk44kOnG1;YGyga7x zXbvbReupn&*l``Mv|iCu@%;7NW?gD!0~FN~0X9}NhTToKvjGnpm(bx(%o4$f5A^AJ z0zB#y(X1~fZlVGf`Eg)q5$#U&-&qxPZDPyzG2Vm@&R<_I;pz;lv8ZQl0R+v@LVfL$ zem0Pk+P>cKZXC5T7zKyHr<0F0BB&kViwEY6ArUGYLrG$ZBpr_S99(dEE(36w2==)% zN*)&^`65uDg^NTdkeBl^w6{!H#x0@i5m9{q>MzxT{eH4(1f z%rs<~qgZ-O2#e$`{@%wr`COxmSToGOmI!+r70XI?XrUX=0V?=|mVRNta~y(I7x8_H z^Sa-MGlGNd&@)ph+VEiNng)-b)u10{k(l-ve96#Zk{?fgKx_Duf7L{^ezQ~|AJJ4Y zFXL?p0@XrH^GZ5akqP1@Qblfw^9j?=L_19_>M+S;WFx`mAEmp3x33xv~SgLdNx)AiM@)!|_b zJeXtK4E%lD9fLT~IiR&X@k0%YI+WUCW|)lf3YZXoS;Dzm0mCqvPl|o~3Eb;HYOSFu znVlL4ZdO&#k5Am)>SzM@W2^+!*BF z>c*8;s_Cs6unI8r!!d*AG3&eJyM-S7xu$k_kkN)L4xL^EQ$4x&wIufmM#d42@@`Y! zcN$|$M>BL@3<==XKCun85UcNqJ*gI2jcbdq$MvQJUXR++qzGiH8TOg zj?l?R>IS|p{kYrR`K^q*Ux0!BWD5VA`A(*Hqh|(_0H~;u+*ny{X!h~nZDwJA8^IOa zQ68T(;?iN^WYRJQY*mmuy1$DwulFYJX_eNH{PHgY=o{TT(X%$HlfREUnY z+fLbnqc`J*sRpl_znqEeZ8~Vil5%m*sI6v-ZY9Sl}>0! z?L&8b*lyX|SBnpa+uJ1esD)S!DQ|!3~G6GXhzZTER5!tf(VEVM{z}P)&2Nh^+)rk=| z5^LARv@3{(Qi2n*KN`*L3EU((Xv*lJZ&mA3N*d9_o9OB29lCmdXe6W2nY5q_=23|- zfpBcO=r$wK3d}Z)rFaVyhhS_Z*2XnB6ZBr=9xiYB!Lp18!5cxygC1+1_j(z7QJe74 z)|MUu6WJJo`4}1ChWfCz#cZp6Q0D?$(>$W04lG4bn|gJ);d(JH$!4kCjG4nE!AXyC zruLkA+Esw(=CX3Y%VbgN8hBjwGLo>z{oPjP^*3-1&^LRiE`s+WWlU>~QR@wWWgaBw z6F9&SQ^(QP_FLg{vy?=p%}+~I{XURz)`cFpIP8u%aw|78o6}yBC`dk@yrp>>ar9Hk zJR;-LYENnReqT(nK{X@Ev9< zS+a@^ot)7vRn{9GPU6TVoLgCxES!5)Kdm@H#H+8)YhK`7(2&nEiL-4*L0T;y{0VQk z%xCdb;{anF7u8}JUA4LnyJ-ZK8_iN(-&pR(+Lrp8Bn&$+QVoaKBk#M&`c9Ay4e0to zuF*qZ-o51*N*q4qs-g8hn7pmmfw1~=nmH5M`s}6ger8FSzr9?lX_#JiI=)%3(Oqds zQg@sX(rCum5Fb_pF^pWB#=XCvogNrpMjzn{FVeeU%6NP7tp)_BIcv>)D|$NK%MgX6 z?6twbjWSiSPy#QqfwH8W`_muM+mXhTue9K;eAVP@e)q%l)7Z@5>Cn|XVfSP8f0F0- zBPF_HKQDU3kVcTrZ26go>u!0YwCu+C>Oxa%tJC)Qz(L1thC|$h-7NSr95f8fJIdA_ z47Pekx9#xpgSKbAqcDDBR;JIgNr$gBhB)kvGbfWKY=5e>wY(cfy37Sxp8mRJftuPE zP^=^r?W9gSI@|H^!SDK_108~+EUnSe|0qT6?!wVwH^o@c8(r5|B9xuTsYoh&*&tAv z{a0#SS#^iw-oc)4*~Pk#Fqt|Uox{kxwtOTupuXvB`nbVOrnS`D%`yxjFV%>Vl3y>( zZCyEk^5nDULbw_ugPSuj?cO+a#&)b?R9_TmXlbZF?T(IyyZA2mu4|$=XHY-1S4ih_ ze-3O8`t`}@o_<TPgWVm{Ja4D{&FC3c%`jn-2iHq-rQB|*{x zi))~ic2{~pc)OjVQfiRd=uW_SV(#`G-9>uEVmK`3t~$rl z?b|O%Xs}ex{|-PU{7#LH^-5hY;SRvSj|0PD&6bI3>Hx3+^rFEyDq`83{C>SUEe5EGUemnhw|z>bdgT}3+g+|^f| zCSmS z-b=A<*tv5^z7eX2>&8IaeA1Y^x>po2IR=#D^!JIHUI^wqa5;G({B?pS9u8%QiR4jjArjAA*dJ9G2U6 z=1-q@37u`M|B4^A1k3zca$F*H%+-h#@S_ouw8>&hwbRwX#(J&Vlv8q+Oj0`7hNEVH zcG`xW1IemkTIU$kJ7I$EMtqwv#AYiGUi`?)|B#jMgO&< zbpWKB+Bv;@=aeT5G>Gy>tiHKJW#w#863BV9!O5v{4qrr*IvrD zABB{SL^EU&nFurGNDE+w%^G@lrvOYs-BwyrjHCEAO`oR8L4mkQMeZ=%j8br19ac( z{K=fbp>>F`)pVhd?tl|?yTN2^@C>7)470aiXsoR!^+eq@s29$0U!~Z5M$s8$@O5`` zzdrwrRv=1$av{Pz=ncTbzaeL?t$)TKg>{Wcy?+2-%{C0X-!(&|h6jqgq0 z7BI*>yzu{a%*BjvLj%AFCsHv@Lh=k-+$K<7JHy~i8nvJDzNm=CKklQ{81c}tyTLhg zhwgqde-qY;1{MKyPAdiB6!VO`+EU%rHRS$b#1FUeGwH6<%9?&`te12>2IC*2M)*QN z7Y)cr{y?aO>tmyn^2brdNr(1%&yK_c;Nc#8g&q*VPN*(wGO%I|_o&9Sc6YV&Ebne~ z1S--U?r6Z|GDE>>%p0V+(jwY1hO zahs$FOSz020G_N%wXqE6?SVz5tyYi5;Hg(NC4?foIHCdv)Uh(WnSocWIvR3Fq1v7s zyZF@?9bslFibuxqDAe3CN3@x^b!EcR{_$e!{nR>YCrqsdxV`0E8R#^?huDT1LM!HV zQhiu*!rhHT=MY1*&uBOt3ztxqqXlH8sIaGfV}Ux8_c3}PVjV+AnNz*Hv|2vzOLEY? zr5lR0WK9S@BbImj7rD_8<94()@Sq%N?~%6n74g@9@?YI|zO&`CvTBtnHDGH!ch_}} zKzR&1emeCJ{IvsigSvv5adIv`%)dIX&Y+Z5Zu-!HDtk6A>jpa;r$nYgU~|BtqyaTA zg`xa49pi&Ut(-7 zzh*+*(+g74w3QkQ((}fp(z;rNm$=oh84sxXx^kuQvV(1l^Pf~De{G@}!i6*JXWDn$ z=$AVVAYCAaxs$SkG)=!b;Y5zNh)%)E5stJ+|7CJdFSjDGr#0yuj9C@UB7#4b7pYW(IOXOehGwCz(oaEcwRo_^vMXQ?S>_cf~QMZ}a zZ0WK3%;;NUxJNp+THQQAM93ipLhRq^`$FG_?sn?QPbx_j)By{tjp%YRtITN?ZPInP7LJyL%Ko zB!u*4b<#CV=4uEirl|y^AgJ?=vR7(ocT=5lZCMT{;~1A+>Ui;YGv3`Ml^@`Gk0|a$Mg` zKZp(hZH*-X30!w8D^G1p?|!iW)yL#(#((NM`S8GE@ai#062M528<}_+bT6bbz+Z4~ zM_(}n9v<318kgnuhE_w(gE5^v(7ZKl>waQptRD~rkr7l~^;4L(V2rC}r1(;ZBC^W& zkM`Z)FvK^VnPi1~h=MrT%`nN6CaV{7q$koyXacooQPux9$Y6>jhHv@r_y5KfM?QVV zw`RG9G7Lwz=4t(6y?I#^2xXef2tD-B6mVEk*=K!I#5c*Y9404xeP!^_# zC7FB?8(^i?UTf%xD`J|)+o%^m`ui!=9g%Bn`a7laFu@Rk8ec=@PCiHH{y6yBEvS=n z;Iy32yS+GQ@4GJrYjKl#@l<$oPFf6~m3x@EWC^MfN=16drYFDQ3-uK6XYZE0{A3ao zPU1A_I3{A%;vBfb#dB)t+D5(X4ykLbl+7XJ$yyLMBDUV!M7*fccIdZ9nP(;?x@Rck>Y~SZ=I-1% zrEZv6l@QOd#i#~`d6X*<7wBoBv-!lst8#r9A* zL3iZ|0!vNek5JAh^IE;#$%^ z_IQH*>BiF1l7vxptTlJ3RIjK@2i@@~H!qT+WVtU|J9xCK%h3_?!d(r$(cSDMvKXyG zJDQJ*8k4&H=;&gprh&E!A#__GjV+}!zNvMCBwn}x{h(_DFS(kY9Vj?CzEDR%{yM;H z{#0hREe1_D>NDQwthg(|HVPuD4W$I!a$iWL>DgIGC1yVg$Z;?OKti3+V4Rpc1*xK1 zFES(_xJHkwHTbVRrvU-Ez2}pRxzzG#FmC?t9?l3opCv@(L>0yw9%eLy~5|trR=v9}tZaHWadD7nXg|~$) z#WW6d34-!(gN~vM1^IG?!YY~@wAFSpg|+HAqA;t&4ZeVb4#o-%!bzf^YQ#bmkY5fs z%DQ{%FJ97-K_l)#2w!-A>teT#Al<4H!U4uB7>UhOyOl&UxB z7u8WE`K=tQZs)!dU)2lhr~CI1I#_TuB^$-;w_quuHfStqkjY43jS?HDKZb@t542jn zT6!o$9p_SoL)l)h?;w__IC{IVtTY6rL5CHgHO+DTDJ2i!xSM{Xm)v>wsoVMzvOKH<%r!OKKBZtHpMZ8Csud#ehL5QqCj*H>yo=aIUNY93PX9 z_TSG8877PL)#GJ%QNp@YPk|)U)f0?A@+B`=6z;*YLs2PRH>}W zret;eh{b5u#k>4VNn7u#CACmk%#s*1kO@){b<0<}P$Fi{>Ku&SErh|rVWzdfinScP zndYN;T)Lo9p$q!u#h3NdEe{S6c6mFN`qqJ~7SEkIGpDta#Y`OWt{XR zY&{<;#Cj3DGIsS3VtX|a6uWFM{f13EYy)V?y@vHv3n#);QWCK0`W}f~j{I|2NYtre zf-LA0;{_rwKJAg8pp}VNZ#BMI;KbH9M2G5{(N@dF6^pWf@Ohw=J<;8?0-UaFZ_jEy zYj4!)80ix|N9-*Z*~r~a0%T|qOrs+&55Bs^yj8@&KF=(_&!rn;i%vBdf}3Tr&)Jizbm_{obR-6vhx z-HeHx&(w5pF*T2;o^8tk#*T0YgVa39lxA;#c-)?ifgrf{`Qta$gx#LI!&wq;Y$7dI zz%1yl>R^Qv|G<4|yng6k5+Q;MB&*Mrf; z`_%@mqjAL%d8Fe;0LY}(YHZU1`{6Hyr$M6MEpk+R4DXX`=QWR#cASX6)Xjr#QQ6I( zYnan};7|WBxv!y3%>%oRaNz*9u)aV2eY~z8N!}tvtjyUv1vIX#T119qJ z*sX^?BEqvXiEEb^V~vjog>B*tV@OUb?Q>knu@Mv9tX^hGb!82!Wqn&%H^diqlBuWs zMsm=mmHNthZ`mN;za+N=D@KllPYqsTEv46Ae?H#4+4XbCVn$s@?UZW`?U4mggu50k z(J#azulqSD`U4(sZ zDtHA8-Jvo(=m2hY?-IFen@B`Xr1m)-wB4(jj~r^f5mJ0 zy)u;I@$X;N5->J0_i-bZiky|q_F&ZZ-5;|Rh3aqLeocFp4v~G4q{NJNU$8ae9>;1v zKK?zwxi@`V;s%FdN_ztH7s>-F=!2-*yF|GwjZdew>$cx$--=6WGEHm%IE}@Ip3Uad zy6N6bo`ytRoJo*~-@c_+yu1;4vL)!@2bpvI(fcToPOq{A7nMd4^kWpB+#?X8P1_rJ zC3xIIduk5@?RJB9XN&BPbcAPp+8bVT9ho?Z$@=)VZ$_-{COm;Nmjg}Toi15 z9@ZdZv+bdk^?FU?a=q)`(q6~2nJq9t0i`?;GCzOXdvptLOQcVqnES1!o5@a@TxyrY zBhRC|VSnohZPWnvs78S;|Niuz9+EzNH~mZ_YUSE$R@YOPr-Lf22V2^yn$GkcJz{K{ zTIO^p5Z78Hm6tcFUi1rsl^e-Vs4e-jtFQgAwhw}h$V3c*_NGg2Lr8UV;|U8#x1h!RK_W;WCP%1ICr+>pr=SYkE6sBiYrX zYz3S@1`spzL(9LA=>^1Hec9NzRUuh%?p(kuENB0~j(=U6fK~7MCqor}tffU}@;$3u z=SrG+-YJ#r+0&Szg#O(z0mSNjrLKly>;8UG>^(9J{9A!w}>t0yyCS;#;gs4J4X9yr#z70l^hF zKRNrOwE#(a@&N0z2_GZ2(cS3lZcX8ET2Q-_UgsjEaiu|ym^;g`QHaoMEzUT5HDL7k zM^KyK0j8g7DPZ)nJ2JjF+%-GhE!{#Dyl%(x1V98xx>*ut{(E&Gd3|8y*5ucN-FIs* zi=*tfvKS@%&jGoF_gk)u5LG;@CnMVpWcon;vOGlCpVY|N$xl|<)G3b*A__xk51h81 zf@mS67R&6>hnQUNN&YSSf};e^rhfbSv+7N^>$EjPN(^bjNxt8AznD!SW_@U3cu{p> zz)_nlKIUId_Yl3jd+)$4>2g4LoXRHz55*9ihVCGW^idLTX*^8fGH=nuYcS@O*MVQ(51~=>5nTm?q!& zEWCpWd{6u2#S_oz$EBec^j`MJC;kBSYIp#Nn7RvAAv=CrxW*soD>|GJ{#~>TC%qde zJSkXuVRLOQh6TyiivcIr9M^4b`cA}M^2ziIjYBB`h|Q1Xz^&>4X#)go;H5sE|0{K1 z-w<7DWve~(tR*|Uy&9TY_nY55CjkaB&vv73wQ@o+#x$I7%>$8BLK!opdc6(3T~-qQ z0Ul&n<*fFi-c^fp1YSLN!5#)HG^uNc)Sr&=B2OKVheqOX=&7P=<@D~a@5JR6gv~Ru zOrNAWhc-do!VcC4wPq(!~5RK2XxTS^fj zFG6y`Nk=acD6FfwC^?=nVF;EEnz$woeP^5*Rz$mi#^~|p_sO&}_U3z&@1t>>g_l@Y z0|ht_6_}<5eXq9G48e+(-}vCYquK%s29|=ruid{8zox5vEKpMH1IlH zPVpVxeRc1R2xJV29j=eax>NSfTikyWK~EYrzz=XAZACNDgO_6-Q$MNn) zBvi?WRZ5!$szswefMHC@9j6?COD6;e92@S*<&yE{+ zJQ4u?UglDTZxMh%p21@)SZ%=9Y9TA_-L*l%1E`Pa4l`UJPNo!E*+lD&Fr!n3c`Z3`bECV-ooDG=sd)T*#gJyN+ zOVv~$rqKEHavp<()Kkgq&8nM-bZ~P={Af{OsOB$ z6jrr4LG>(N{)UzT@})MU1W$I_A&EVmjwJ&F`RcNIfhVOq@C5y{^Yd!c+4&cA4eI{) zQ!n|+{xl)wqP?`fUSCtgW&j^G2I$j_zQsc;9N<)S?YoC@r5a#s3q?P~j)ojh4=k4} zm19_lo#FP8*1@w+>!+o*etOB-1IW5?A$-xWF6vq*GRshQaS?e651*LA6pvE_W=Erj zni?ejoU`ZVVu}Hi8g&>ZaA!p$FW1JGx#a4J z?l@i3qbf@T0qni@TDfxN<;yQG(o{(w*=acxe?PLLus-MmAX+sJ19%~!nIn9(C+nyv zQ^q%efHkZ@!il%SaDIY65?LT<4YwOue%tk-6lgMNco?6-jYD6zPWCM`uif@yWP_2L zO_uN3BeNRUgt7t9SD4wo{kVR)zwd4u-jP-2&%OQbM`z?4JLw&>@MaQsc(H9ATkJWOzxL9`3(~ZS27+YyJRh<@3jvFKrxMzSMDL zLemuATj!!#%_cxd?Wks`72*=T-JZKwN8L7uo5QHB8o~0h?qrkUt#5k!S8J7?@`QmK zYoG)*1%@j3QyChdQ3hcK*vgs8A**Kv1=pepB}b{gj(C3IKn>D-I7ZpJY!$s~v~1JQ z@H`)C9u1lRzpGF6O&TtLiYlP(x30ci;Vg@rx2#i0rZ?Ia^mMz3a~Ek1Kfo$7;la|) zDS79pkdl(Etao{ix3@jL-%kZ`Y*&B#4l&b=?uk4%;abyS5?O=xNbqqUv5c9wIJmG1 z41^sOQpjTg(u>c`B`H3`S%MFioE+HK-ym1&L7Tc@m_guvB9gTADKjTtRNT zO2*Ldv<~zkI~S^L=Wp#ju z5nH_Gl8Tj6_;eCH%<bsh2x2TZ}prQ2N`Uhn#G+HFU6>BLXg5k%h^KLG99vPeE8J)T!U zW#++_%fm?%#;lk_YVwXwkV0)9WYh}h$X7UGm>{~fW;Xg2Z}>}Zxh@)a4+3jFW}~{g zVdt_pzhJvr%dW%?6I(sS&EByv^YnHI<8G_hib3t8Owo(?v^XpE(&{=krj~8VIRTch zQZ|+qq8xVIX21vtG7)ePDTe9y_%GxW?SD6Qk2yr%`xM#{loSRGH5AWz;P7zLX6xxX zMqh)yOqHI?!EH9qgIo@D2kXks^k%x*`YPk!WR^DlLua4xWytO%Es4c2$~1Yb9^?a> zI-90}J0%dNblaq&wI9 z5>+~QHI_9Nlbvp+)k`UkS5N&PBwqlOpsIshCSpTaQN$d`-G-11k9MgN8YMlfXCc^M ztF2W^8!B}xvxz$Y^2tGI4*HNthg;~?g9B~aR^Jnbc1pxaYI{nHWpvR;l#J=w7S6u- z%=2e8Wtj4v;lb8cHLMv}q+P2gN(LZ(el^Cv)oj)_{0S`|tV#W&R+>G9l6GsZTw4c? z?pd+&#FI-a?&wwnG}!98qG)y9X=-nK43{vg?qsM!Kvr5sA`V*~Qznw_+sneyVeeVr zNXoAIy=*ayUVc3fU~9uC23jqhl@_N?nEC~hVs=W?cWwhtzpCD~LRTZm4znhOP_P~} z|E11V3Vh82*ZPosTn)<0Wse(@p7zg-tk~GuqeeueGy!aZ)}bz*Xib4pMo~u9%FC>x zPTR;D*O|?UjJbrcslJ?krHd4e^s0j$M{!w~66zszjBJ*wS-GF_vY@s#11Z)z=}(-B zWZ&5)p>AbnbQO3Iey5+oSb5}kLV_f7HX7SK^$}vNsS%}oCvC2B0aesI5TP1-cpYYC z>UHwS(|XRvv=4uXo|b&}WQ-{g2D?>iTryH)_nNs?!9nHe*UV7!@T3t8>VevT#X{gC zvZeF7FOtr=EzkDoTeD3Kcy;BfS4fePzut31;(6a*ZOVx<(c)-l^ zcW;vUx+Re@?qnYv4l_JDQMs7{CmbxkaR`d>zoMu?$_OHRvnBj+cjX2Xt7ZV+9LE5@ zbgYpi75c~6lzTWj;5?We&G639Jt^}>=%22MkoNum>|Z_J4*9ajjI|puvTHSA9-UyB z_#iC^F#SHBkN!7~!^2eedMUC+w$Ld`u4@^--8iE4ZMMvhvmYeR?FqS`u>tY4a^$iO z9TW5D-t>Jdzyj@aN0WY<4z^mAm1F(%>RH_YtqEjDH?Vu7G&xUo1GHZ@8Vr>q&&NFX z+-VJarv>itBj%Q&*}m$@RZgZ#}bRy_x$FCazm;;CDU;N-4mO@ z;=SV*9I5vSr8}OPUS4xNY{lE%uDN;NSgTYvH39vW>uxJ+BaoZn0LDXC3E;mei_}wN zo4{Tk&xV1^>2RTYA~B#%*p3#9h~b!GP~Xpm8ZP9{E#OqF#7_U33dnoYccQ$Bu%j#b zw*Wb52w0^b1Y>jp)}>x(@&5GVpZ-RZOB>e%0`FQ=dfVptnt5|oPrWTAS2%?SLs9ez zuJ(Hr;(IJ>z1268ncxFjxU#5uUE6Ypk6gN@1=3odUgNKI&>MjZf?^uGA>?*l60jS6 z573%R&wz^&4V*85sIa!8x2-zx%HT8>C+VK$)tt@BY0^u;~z!HIZV;?qxJ<@6)^RD-8uGPH%MVRtDC`^WG7&)t+Q_<2fEc zGu7*Mo%?4FI;tfSP(QBO)m;v%_q0L@nDrqd^kc**09#rF!+rN(JSB}(sp?Mac&_^G zZx@4}`p=ppYT{=%pImeRD!{p(YflVH&aL>|?#QE!+c+M;=wpQ~1g_OcJ&jG2^a~;y zW+(>BJ25gQJc4a7QIHnQG#-eIk7xJv9Lw&wbVd$xg9!1V9{SPW|MUkf5FMPX%cgd- z1smtwU*2z5it(|7PuId=b9e);)h^&h&jhZNUeb7RMCIWI5wxSBLQ#g& zNtyYKhXwQu> zB0Nq*Jn6I|jA7(QACvpPGkaHO*7;$(QE*r3H+Ha^7rX7#KrHNiKvsA3>}yT0q}y8c z&ajuW!n-yJk+>d4tZgrkd@{qXDnQO;brF@O)qiR!L3@w~^T;snhM{}1sHs92>0kFO zZ=$(V;%_r+P)T*CJ!hQ81VMW(7pxc0JTC!wX5sV|{d|GN*D5vBH6@d;y{Xwa^Wqa) z1tUT6b89^GCN;$FPxMf1bo0TqHz1F}5hVg8X&TWnluAigvTPnNoPf)dXVZDMv^YDpu5{GhwK{?rHI1|8&44t|B&ye(v7}8S zQJhS@HwH)G(Nn_Wkktjdn5APV=P{)q1;MNceXC1Y_CF)$VEc|+@p0nJ`zbo46$K8~ zAMg1BTH|H@^SpPxw$+5XNrJQFdD08s*J{(WpFXhV3w){S^_MpT@_jrxfmXKV; z8XdayTzS(u0$yKq6L-$8;0ln85F1bvPFQ8vrR*?5x2LWwoPI{PvGXB|HPGa5_KAjS zooB5hqm~43923r4g^`Z$4_jeyTIwnn%bip-9aCYh`_`DcAl;i9Pe`jYe}saKz}ISL zycI{e@K>>7#=d^lqeCs~OWyT?0#IoeH1RFnVKQKZZWKYe1K(%d3^9Ue)T==DDo!nNv0Iyxb*T#d#s&S@-fihga-T4SJH z=l~a!+mufIkjkmv(PcMk?tdMOy{^uXMJ3VKmD~XMppm5*}MAc!i%py?`gWHpMCmSoyV5nNIgxgc2}RKp!v{`oeLgx zT5ous?qapx+=`&`EA9oh$Bz0;bwXY$Z`?z%8ZbIxTS!nQBiB7G^0U4+mb~^zJn^2u z5n$~ayQ*ZJm-si<+Ab75xR~&SN0g9wCkX=ai3HB)kHk^IOv7wzVyvuH~l!`Z3zvA2; z;4p#q^c*3OM^=rXUVz${!Q@(UB)&Bn5C^wXEQwmF+>pmqMt@~(H?(o+TKi%!md(i% z!rBMs-0geOuZ3qUwavc2t6RMITL%|EbNXpL;dy|rUZ+kBe$=|^HrS*8Yj!(sZz(_R za$M#MvVa?NRZ+L%uuntuy$rQUn=Pt=Uw0efC=f5 zX0So0rX{_A2`po?u`2&>LJrOHD1yLQyPEf~3$5wHgCL7UeBa4gl0Jb>C_!2@Yq+ZEe?m`!fH(z|ZO;F1M!AGi1~+q|z{IsJ^DrMi@vcuhd}?W}2!Ddl$w_xucIU#E_k3KEz4z_9W;Gci?H3!iu`t=Y}VAUC+`sn0Yo^xt?fh- zgaD>1ShE3khQ7NFdZ0S>OjLj5V*Bb9iEYUpnNcsh5fo}#zPSuwo`VUaLA)s2130G-`Oh1tfEpX@d9wUQXLUu(#_^4^&~bxFe{e`k z(=6I?z>Ivy<9c4=^rP5H5O0* z`b8<1^vqyfK%>itp8FJcNEUsOQbwO?PRWW6-B<@U)XCZm7Uj!DY*@BptAoP8z6b6e zouP{CPXa>vmcV|4&Lw3!v%||Os@{9_J}pIE(Jl9ONMLI1Ci|WZNStYUK*B56G-lgN zUd2y_$z?LJ)dXQNgJkh_1#%&d19<)Qu%-x{4n0#mY;z>R4Qd|I*A5J5^P5uvc>=792uN``9on*1}*zBfDx+c_mtY--U+-h5Py*s=4 z#Nrd0&ch==b+>~yoCQl;2&0kJ3yk&yeET8xEd*cX`) z`781V$HR3?N;C$oLqqRsOe>d`HH??GDh-cx46eD=vOUgRo{^(ZBz5@z-M-IV9dgH> z$IUR%+*;Ms(1OBeWbxHENOs)cLX_w`jEGR(oO! zi%pbzfY40Q#|?-u(dkMmPd~FDm7EM<(hLNVJ=I>s3O2{i$y8Jv%~M<&8+Tzh`pl;I zx2>$^fB@=a<|Cu$3t?;+>y$z{|5X>Izcf+sc$9gIj&SiB7t7j==8--AKgq^=sa&t= z2{F>YBx7NJf1(K}9XO<^^|L=(vR51x)bU+=W8&2)7}Ji?+3O;23s;fQqNFiN4wW- z8fJEmB$EWPZ>|yP)KqbnKt}L*(Zu4ZXHK2gjhMv`rr+yE2H(PwusGP@n$j5Y%X$K+ zH@ai#NK2cTgj}Llgl}qSWM53J4|ZW0-u2{Is|A>=s~a1IUeD#(jvq`&f44g>vuici z7#zVBO#m-2ozRd4!K+1Rqi*UN{L)&#OSGSwt9d;tc^SNdIVxT<&%tRG40utRKyuGD zo-E0|G#4VhoO(FuSqK>SCmE|4IVvj0sAYFBPvJU}g|pyB&&Pn9>X~ib5l8ePB_y{A zHGS;ADV0vF;C7MQO0rMbSz#Yv2OvYI*4!VAb_RJ&Hq}ps{3Gn?C{?vBbZaXgi z;J`!p%*3F4Q3EIwZiIRPlz@&TR`IbUpM=svPEf7rCAcihGUz2K7hAWO%+^U1SgcnF z^OZH`W0~^p5t2?7Q@mP`aBzFt@D<4rB$x$2SxW6d+@$F@K*JAWVEVQe;E! zOBir03t&tw!^W-;Z=lN@`O%!aEzFGWTqQGYRatt(rxs4@$4<-Lh<#tLH{4bsxaQ#k zP8HLc+BWHoYI!N68aF1tLgVGcCIy_7msIx3QES%}{5;vHtHBA()Vad6Dq^5&G>3Id z%d%pLi2QM(H}CS@x$qZghk5&AJ-neE(WxHNEv-0rPiIn;S9JZpgs*}0qK2bho$`TU)Cyn?C4NB*Q{@gPTL%0#}8h?C$k?y-;zQ2)UcJ) z`}!SF6!3NwB@kwl#906N;2~&mA8m&2b>l3sdN+-{wSCnkc}r~6vV1s=es-3IlqN?p zDtZ$3I{DX{VW_OitM||!N7K@goL@%hxA-HnRln(>3Re> zZhN5fnP*<(|5}36FL+<()s>9TVPWC)>F4z1J%CzTC=?zBvQK<C8o*gars z@=`S)(rubre5vVmCfh80VA&4DM~`K}0t;3Qw*4@-wHJqX%rDh1Y!Np$C}Dl=7t)+7 z9=QDN!dq9K)?>GK+;QLD@c{4PfyeS55jgY5-`R1$I;A9|&0Av+#=uNNu zwX)K;bz?$Hm-RDeXy#nw97dcAuO#7MHAb83$VSyG%hiTKDV{u6p@ve zH>3Bp z*sW!T_#jHV7n@9Y5eKiHeZ=@En33 z6bws6%WBn*NY>uSaYe7h)wVoNTGkEmySkF5G_vJ$`ST$P2`(WRV*8f9( zrm2wb#BqZN+}BeODKAM(s>^Fqr;Rjeg=3JJa2T4d3Rx897cyK2mz`R03bI!mb@ck_ z(;?09HgFE$sK;nF(WkyijR()CEzuqSvG`l7S{+bB-0RQ^DDf2d3|rOE*U7kksMTui zK*Hn53uJV&)GVLVyNJv;({DAcL8qp9Id^GeS=!>aXEoeW(TeteR}ED-#&VGV)!%4W z7+fUFm~@D|xtg))xD@^D<_YWd-M$uyLOzX=hE>@WUf#@lp)N12>jI@gK@_KNHEB=w zoB0FKV(H)YN3&VRBCI=@kbN|v2?LCBV-Gbyamv#x6;Gu%stsKMDipMuCI`69ZWi5I zsaGm`i=K5GRju1r0sP27m&?m;nJ$;tOBX8I%|<9b+5;V|JL0lW4m|#~4_Un@!mRMY zE6vzMbg#gO{9*RJ7UA{91zndMH0KO(dA;}6lMcZGQjn9B8xF^Lr8%BC6SOF3;*x$Q zuI`pdDzJ2`-6Ep>Oe^x5qH8U*)iFU%9#XrLU^Z+Qky)c*0Uo!M@2%c+pMHVGr7p zC~Q_dSGlw8+QV4Ai77gY#NrxO+h=7GA(*>|Zl$mmN2JuL6w5MrAc`b4fYwQ$k{1r$ zNG6GJHBlJN2G+OXaX*(}EnNAnp1p`dn6;;8XUAE6wq9E82rjNJ1=_^HA*=<<}R^Z9N>(=d`TyQ(-sp#bW+q1vb!orrCsiP)aEfdw=$f67}SDPi@ z`aPJ`BBYXKF*5;L=a0^&6hu3iub(W;1akYq{w|!msFpidfuXP7(A1@+vzhi}7e~Q& zvj?`pr{7395Cd)0SM+2xLby5%>TUpR^Mv{Y3?>=xMyc%GWy}mAip`_wtfJ(@#C~9& zijTgi7@VJCK$srr)7Vn#P2jdV0saqW_chei+b%n!7Z)Sh*Q~BPyzgQqZ~R8hLC;Xv;@rmY5X`ivJ< zmP?*JxpYI)_3}9lofPN&=VfQ)A$VCD|HglH=W#bPDF8#`ALv%6pMCzU9-#+3&9io8 zW3%x`B~dc{Li&pMx6#*`3X+VVRIJ5M5@oFqC)B}m-E*-?a(^VqpGaKA$S@uf!Ranh z7G}q4?WY(}_3Cm&pd)G>HBu11xcox}d-tc`>!G^`o(EWX@rgI}kTfUQSk(|= zR52b+eP!6r=oAM;tE0&;J}k1vfg^FN?o_UG1i8hUsy071+dDe2a=ftF5oz6=Q!2xB z-<^H|>LOujqXH0_v!J8mF1LxrI2906_@NaGYxtK)yk^z)kF)iWdxH!?>kN;5rhyx= z&KPXhvgq5T-o6_q036&9I~gI&S$x5Qj)`SZ`zjUln@DIf7hiR@^)@bXUi4Wwx>@Z6 z4~BOg8;@u`W_cvL64cL1uU#PF*PY$dv{E#&a7u4sz)X%jTZ>43&uNzgGjcfvAK2S;s~;CvFIAwym%ObbvI&4(+D3+LWc7{8kn(V zL>u<%pi$J0#zTPh&}T6^z`|<~fZqXZT1GoQyj-ovi}lzuh7R&gj772Ki8_PRFGDN| zXkKenZDeHjY-}$g{FdULfU?Fy>OY$Jn_Vcf3 zf0AsEPjvpV_J5)y92iT){(JuWCT>Nd6%R2B2pKm5Fb9qQg~e0So*OUehNi_d77i)m zotZ7y1BVetpEYvMEfX3v>ngO9d|9_C*WIHc>tlR4PApCaA~m=4J5s@kE#^#Gn43w& z0A?^Rkwf1o7_PO1R#F*s6jo$%C(V){FTZ?lVZq~a^>SH5ySV8;lAY6UJJC-ML1i%{ zSI|FkCp{ybyBGA8)-`9@v6FN4oQ6geI*K=F7+Joc$=zrq4In8k5f#}ZGTGbK2TDhs z2~Dss=qL;1h(k#J2Y=8VIPon&s39OrmZSO8>_dIPa)Ph{>G?}r&2zQ7Ug2L;78YMd zIH-e+SYFq==>MUzc{vF4= zGV@x(-i~{*6m~1xv_Sq00}`4>VQQiaw)nqUdVkC+3mW;7DFali+XV9GBWg z<6~>Ub{I?|rlYKkQ@2Q*zoAeq0At_Tb~uG$JW@9@eS;)8=B7g@_{LG|*tSyK_0WnR zjc`XAKe1C#BP{vi*m>~NAM|d$vP!>(`ekXX^ZwJzZm4#){ltEJ$rgXD{RmKNbRz~q z0*PmZ21M_Z>WlmZ+S+{`gV+Ly>ljb3K8%?^amUoEri??#Rd)q9IqsB%$a;H98dhoz z{kZrm?_aXp&ua3@d)9YE3f*i7bK5=p3;^d=c%fJ<-NzDErNsA|8C`&af5umDVIB=J zVe6N+s2{Cs&QZ=p3IZtf%Y{IgqQ$p87hpvz;WB@eVEdo>RpyNX^c=kr$OF<*4go>} z;hL0~Vg$KTA&i>jEY3@|pS4wq@Yw0o?ich=9Li3J3WHeld*vCY{#?iA8+Od$*PB}n z&*qL=Ew@PV>?oSFQtzC-+sIXbm3t{BzVyf-nl7^+fWdJRCinpeo$0wv0jA?1ezA;E zoHcHvxaQMn7EbxTr#*3pJVHUzcNGF)ZM$V1Hew4*Ts4fALjy8^)fxBzvo0zNl5-Bm1qp0B#v(oa4j-KcqP5ecCI3rNLJS;0{l+ZVPr(P(2{Rv=qBwKCXL7 zd28%FmB9#j+=@)adL+!v0Ea4KPnST70Fvuf@*lp^cCpnoU@s~Eu#8thUAOvn8RGF7 z)8_j)fZuDu%&#F;m8v4OM_9nr94jL`5OnnAD0;B_Y|bQ@+KY3$>@ z$-tirHximzr^(}Gv$iQ&A0yjIS~PMScLRY5x20O9xsl|{jBJbjT4-J8F)1elKIrBY zVT|993&A=21#)u--}}q-(1EhLMWyPGn9?5y?~SF5x7lj_*z>imjpd(pE!-ebXMT?+ zCJnH>f-EEH_&2GNnz~0kOFX+n#%({+B(df;4c6vq*-TkT?im+r>>Y6xK z%aXRY-Q8Gj0l2x;?YfBpfCGrF?sGD}Hl%~ow^D%{9^gQk|z@62VeDK#rgMH2iUWS3%x)6Si+PNZ{8+*LVxsl z(9Lt3WORI!y)<5(IQb3N`fB!rj=N``aL$5GGFLRKY@MoSfQE-Hl(g;)$ple*y;`Cm zF;-Rtz`BgZ&s_&|>mOzxYM8sk?RTx>RZ38T*+Ywr%yS?Vj0Ux8tWpc3jIt{D$CZGLu-Wq8a+AQu>1k|7&KqsJbgN7=cun^ z;ChEl;1$hIv$kF?H6;YpWpe|}M#NW7O`^r9aJ3-Y(cbn_t-kEXF@>fZ)t5uPuM_GO zYet9=rWUOib@TFCjm}WI(K5ui>!me`D8ZOg`CJaWTI$`X&`XgVflvP;yz z)=Y!Sa);}v)7Fz>tU+7ro--y9O4-K*L&$HRl*edQuK%V0u!;4p0W+*ZD}?OLCV z-yAWl=h*2Ne3Wq&f?E#Y$1>b?Ki1gG&Hk~&JcbETY=7|BZ;q|tBr_%rVBaFo8Pqf> zNSsI>xSTdiQ4VT&@C^bGlATR#V#)IBFG%o@2fq1XD2&2Hax^`!#{>Ph(~$)Io22L8 zCS(6^pVAc8Qcgy%H~`+6JrGm{sJp0`9Wt#?vH|wHo-i^!%%!!_^E z97#w>CJH~EKlQpKn7&;GWr3&M+*KddhpEoPe9Z4Mp!J`y6pGp>Yqn6-FBMG}97BN> z-k)6WxQ<_GI`zp`)a~>>3|PnCCo# zfH;3~Bidw~uQoLTiYq5>v*PcWR{Th#n4pXFAhtTuv9W17$~9xky}UH(hW%vcQP}4^ za$*ADU%z%^y}9YD4Eh4Y(f)SR>436uSTzD!xYF7-9E5ZB{5fLv9;NJeG%I#)5v=qx z2NWn9sp=#hXHILytezAvS})DqCn6R~dK1b+xLn;foZYNo&geeLvu9UnLX9luwfOQ* zK@Z{bKx`G}A8oL;#!{@>oZ4HH|00=I>4sNIy8LzXE%n0i$bMz4*;+*Pu;j{RJyPNd z7#WF zxtav>(c3eeeEVB7{7kTu?6mzb>PN31c%mT0_)J$L^6na+iZ547h~M%MHf-M}9M$Lu ztr(kT%M*}Ddftx;YC!+iYIjUBSA*;9xHW|ed+3=)bGh6E&E^u2u{Iu2be_rtBavWY*@R<+GP*D zukMnEXbJpqU402QW&5Lf1@`7xK;+L6$y_Bq4$b@YJ0uIc=F;18~g{_xMBNh6q;2@4s(5f`jsB~%9Sgc46K>4L&irt>A(#M zhl!go^~jH~(lH%UBKB3=1U##w_=)5 zbc?Ega@P%AoJ11zrSATqLi_zxn~-lL6d)%p=>RPgFotg58Vu{%;0Sy$u)nel7OQ*+ zTKFhKPn9U#h4L|n&QZHf^>*>MX0dcFYf@AOb{{~lCV6U&qThY=cR&5jxaYw`4CR=O z+0EZR7f;zA?gy3JsVk0+vd!nmd}A|9A&(Bm9!W4ZYcwRltP$Hm zcCMz9nT?WnfhH$IptnuQsSkNz74o`-nFfhNUUCO4GBuLW#E`+Sc+>9y#f{C#^_oo%gG7e}=0@9TExMRq%2*rPc@}&p$Kakidp4WF z0@k9P$~J()G3IZnd6Os(Tg(;Vzr&4X^rqX^TLtbinybniDpSp5gj-7C9g; z?e=l9<;RZQEQmjR!C`;E=*zO)WXd__5D^WZI!|OkqvdL;O{`CsW%hP72oB?+=U6B0 zlQ0bSPTB+&^*O%u`@p}vhe%P;K|8p2byJ+~&u6#vz%55ybhiC?7_{6u9EBaoz&+Tt zc-pJn#z)t+U<}`USiQ9TEV!w7iL=mmO=?fK?Up2C{0KTs8rMzl#e}fe9&1AM0hKrp z<9a}n2Hw1q7|7AkUGe5|&uvVu{p0j~i1{% z(5yLfqc@DC`-fnjKhn%wLMW5_;jxPt;Fi*5ZS10o&thA;k!=gDj#0}lIX6w)sQWvu z6L-pQdau8RaeC=no0Rj0eF+%??`h~SEW8#wXjC+hk03V$=6ydM&4J#kuf_b+-j?m0 zXLmkC59l@zhhAAChF*sgM4T-U-RC|3ZUlp`!BxzFu3e==o+h zU!EI{di)&=%fxKeE1PSjvV$m`ULqt6N1alC$I^5PJ0s?2k9v~Cy2AR&TrKsj#`UDY z$m(Y0+8X^s$=FMtao^x{W`B_l*1Wv%nnO8T$->Zdq_Pwbwi=+7lL+He0tr&Ve@4Y- z^xAt9&4vkZN^HC4LCPy$y=;MIk`z3LrnWgO0!MlTgA7Yhl(fcsxf1F=VCO)TBJS;LWHi-i+#%8=4wI9^>dE z0TGqKUVJjJ_uR%C&!0#4WVzd!5q&j*Ff^mB<3YBI!2b}qc4Hme7A+l{v|<@=qxg`f zt5>MqYMcu>!PhW+r$L)P)g<9M!q3&r{QVMi9W4)G(W>W5n)Q0c10?iowM&YAnq5cb z0pJ2m$_~u|HD4>6YdR}xmo-b5p_;8-dLs7_B3$?{#>8XQeJ5KK~!W#vp+8=cB!<`V*we8%sj$=-*eR{!4Grbp5-kvN1QBk@CUDwO_7+j zCnu3JTdQ8EtU37Y)L-eLPQUfmf>!O~buH!f;2AjSW&ndDxOF6I^CDv1jo^PPL%XlF zyeF(>tkD|bQ#Pue04LK)SvQ%dj3v4dhf#?M|+Mc&=oddvhh5h4BhO#0`(>gL_35UOqArNE< z(w-vd+85xqM%|>Yv>s?dQfEkp4knB*Yh-+ja;)$4HKLthJFVlI#s;e3;BqVn(?~#j z(s3^tzveTtl#ek|H>fgn*d*otx)xqqwV?+?OfUIRiuy267-4;&dm0w^(U)sG07LX; zxFgQTpc0*99@8BTP7d65+O%%O9lwrIFS=*P{-WQ6W-?4q^pNC@fHj6k%L%DD)qaF@ z{tmACyIS#;O@FM_wTeIbl_#Arr9Je!Fbmc?XgIR8y`&8X<6wBzt@v@*gK5J@k-Dfd0j?HsUWB?jjYJ&0oUC2n-Rb>_u4 zB|9V?h){N;{SOx5TJ+mRya`cy^1@yRI{z|En`+Ue#4LD|M$6r$m8%l$xdl zX=`2%pQaAtFInc~B0{-W!PN7#ew5;lmYNqOz<9lufbz*no0jE`3{7Mf(of#gfOJ^i zI;}PI@gt9=hV}rmXmMO~c4OeQzKaG+*fNs?uMA{E_w2;Y{9bHM)OY>_tZ1C2m33Vh z?3aPaVI<)U@m^vF8<)(X-?IbyDDl&fn}!d52I zHAfEkQ7pH;vZ@+A0x<=GA9`4*j@%hAp;N0Mt=2iTBUop0j($ljRQv(6fpp17A57_8 zqFIKyh+O^AiE_&pNAaGEbAnRM(B_i6!Y?i9as`pKHuyEC?{LBUqrN^0tT84sBhs+# z6i98=!cMR>M6mY4h{nQuajg+{&O6`A1>PMFsgYwwj|eH_1G|H=enQjESXc5&oy;=A zfh)?0qD+@noA;~J6Ruf*fy_`Pb96rs(n8u`75aM2atL>2)n&6o0M066HSg>)L@eBv zhV4fwJwxjy_@st_>iB5-P=`Qct#r`?rUXy^arS*Y;hS%sIrDs|Xu2M!ZahX}<_MQp zJn1xQ?QrtMpzu~co_?*{asu`a9ja-Y3)+Nu^U*)dzLYpy7!m)~zW#&8$!Q)G=dIBR z*i1iq8MZM2d=R%z$ql!1*`I5@!M>RR!yl z8-N_OzJk~*Km=k4Ci#(gPd3Ij;CN0eeTE;+L-eZ+DK^!zumm4PvZI+Q0%WhiivwD* zTf`xK74eGIfNOxj0gvIbvF8R3+(2Yi(GBPP78t9N3*EYs+`tm0xJu zc85;$GBPTE{T>Ao7a=u_Mq#PTk?^_u8$t_h&HSnBLMo zENm3$wmF4Y%md2-#9Xh-we?Mcu#TIs9ckr1ohRmd1r2ZuSA@TR`U8L8rZ(69zHfiy zHrl)I>SEU*6DP>x0uAhDn^@IBOf5$A928(`Kw{9qP-f6E5m4~xaD;)wQ&(2XCv{L- zJ{hFy@;GiT3`kCOJG+@xT%zk#%N4Mlg62nLNByr~^pr2b+3`UipHIUg@rs(w0^D?p zfJf7n8MXj=)L-s)hgUgKd;=;;Ndiz4#4T`Vf>~>`*QL`*yt|4^-*Q|hB^ASi*65nY z^Plh<+G-^3t_CRt{fxPshL0_1ZJ#w^25CFscy>RIJf=Zr^O@(&i>L>f3v((0n_+o* zkjm}+xC1ANNu_&O;+n#&VCcb6uCB)l0ou1Noxww}J=oEAEk%JA3wr?lWZ|;MkB8cR z>qMp_hfRS6HZS=zpLYZ^iR!IjivMlUcusnrc`u$W1%)JfRV(e(coXn`n*dl8>XlYk zb$8&eM<>azhuO!QTeI!SI?eO*+L73{)G=y+Dez=rHA1!e1it$_p{F-$pi?|S*Y8o2 z+M~X~r@5`8P?sd2_6^*R#3X0l#ql?b+CsGc@N+#Q@fe$bc5`)n{pxr@OR?)>{ps#=Qyn(l$8hF5S4oE=fhT`i^b->ny7V zLX6r>JgKm!&OH7d8TO z!BXb}utNVaJuOZbl=kckR!qO+**^ivLo>D6ZJm<8TGYi#@IhtapVhK6>23EM@;RpI zipSdMT=ulvYqNhP59e;lO-&>RX5ZbrNxS92TqYH~4SAASahwa5IDbtot$U~jkFsjE zo*8sqFEF|{doPtQ8ha4cT(f&51HPSpA?t+*edVPsSTdIN8$hQ8z}{D9asv~RHGhlGPu#p@IhqKF@C*jY+D2m z5Q8=?>?2uD{h1^n==b{8vi6RJDFDxO-0CS~zIx8bVn2PF%l-5R7aI~C^wA^-@a^=L zjH6O_BxzapqSLC^MG*dTOA-L>Z(?hpTN)`a5(jqEtl&vzUJWpzwX7Sp%FE@-W>l#_ z{4~pI-7UJsSr)(^gQ{^8$v5bBB5|m4N(Z#0vYw(K(R>nSR8M!<@=boN=fSg+7R+Ee zN6^SPS+mdW@99U{FX&I2ICz*5UwUv|t54(P2vAU8Foe&_b#`;O?1_+av#!xsG4z#g z)94ehi+t5^U!S6wyJ39q_K>mWaEf^xf0u|^%WWJe$x};&!=#x za%1HB#J2AAYe{sI7$jll+@P_ommO4lq!7oX^~ModS?+S?(^q4gUZF)kGZyji=tkH1 z`!>D5#_LZ$si!0~9#hc@Pr6PN!K^XO9z8wZ(t{emn3+ZTB7l@}Mod@8xt?!%NrZ|<+4w=wTCs0udhD( z{nQH-xa)mA3lz@AaP(he&WPUZJBo*h5hMb3Aq~><1JMx>JfmClsIEL+`bGe#k#qpk zeuR#POWip^%XvRy{s)Ps{_Jb9yR3i%%hX#^5*>gnPR~(VDlb=7VwKJj5oAp@U#lBi zUeLEb*gbG9_~;L_kM$v9L_hjOnjX0|eMj$Y?;rYkVR5(2t$}{_*mq5eXyP$Sn9%Cc zKci5JL&q}iK^h-Ftdu-KAC5q?+ zI&;a^ijyc*$3xHXHtQ8VF*y$Z^mg`IYk&#j>`{E3c*3%XaY#FVk>ChpMJ{ibE>8qzByB*j*mV_wLRa> z?u3L``xJCj?9W5mvT@A5>k9wrg{Nb5i@5%$HN_T;1p*d@7x)mGw+S|aekQT&6Sr2Y zUC{UI!{f&w5_5(d1Xd5!Zo7YCPGRahgWFKQw@`DvzXuRu|N8M zC<_D(t~on0r@$>1#~%5e2&DH$`%A-&b*zHcBzx)28WvxBO+p8nA^<|8)EGu@r5;Us zlvdw;a73W!lB!w9^B^ooiBUAcA$w^lw8K5OtHBo;oL=8CJ05;F3-e5>eEj>p{1W29&s#Hu0ykAhKMdl?1*&t$uS{NzlHs-)i*61ZWRecqXE}b<5;%b z{95u~Hz{UtiGaP>xBoS0ds`k7zqacp=Rwz@hUi-SKrJBJgyqo0j-yo+1z%~YR&PosD~UCu+W9}E!~e}rLbwZmxu5jJP=H|m=oLrZS2GWg zall%5{A)e<4eO$+$FY1qPwS^KxUi3AALux#(PH+9b=mg4VtxtiSDq6>LxBUd7VNT3 zIDCp$J;gzkN~a}LnDV8~YO}PawOJ3=HS`L&Ykjgef`n*;7=lJ!ZNlTCQCrr=@LT%y zQXhzrIr}~XZ%@COekmEJHD;EmVKZ4)$3Cv_7i3zUl1mVXLfi(w+HEVP&`7Y?`xzLI zkA5FaA*FR#dz!j2j=l#Ncd_H$VBX`&GzW*%_9s!^1 zPV}8lyRDV8WFH6yY&q>#bQUhB`J9oBT&q?Z@mRxSzenAYqp|fluE{vC$eJ*#pcop* zW&DEvqWE=wh0Vf*4Q*s$LK4t}T}3sX{n;O%?R%71cngoGqj-_hnsv>raY|_9@OC_( zu@W4&qN|%_odzeG6oUJZHDSGZpm%n2X+^WwI(AdJ+jd|Cb)qIIdJawlvrls)bC9(K zu0dl$e&IyVVnv8YA(iie+;d1K`x3^1pjPU*yTD8N8KMhjY5GcaQxb&oqn6HA z-zU{@loo;dCODRf0Hn{z-mu4F|NN)J%YN7GfMD8 z)5Uz++0CYp5XylCDEW_H>d2rl5I{x)AB*hR&rn{C%4$xnw7Fi^e(kp0YajG_D>{$= z0@Ss$ETBhHEyj)-W{^KyTkBqQTZ8K7?->#Y}Fc;W0> zy;W|ROz2gi(Df==7awU5R(XXWw6Zd=!ci#byfwS)oD_CwX*$mVgOx+O;fU59!x-r! zwn_E#>E{++l;k0{mYvcxw+@`y3~ND8!zns+{sGLBVNNW>!FAy0Q{v-i=j-yDsd~r5 zfZ+&^595N+FsxNhLvz6U$XhTN&Tc;S)ak#KlKB7C zXfMovttkPTzI77lNGSk`y^lkh*ddLqWA<+B`;mDj^Y$%T(U&}>Z8&2VOH)8h%LFzi zA26u#N9+9r2LTJCk*u2XHS5AhQh@wPxziEC(*0jc4mvx|-vZ@u(f(9wvYfWqT#d)Q zP(U!f=mNc~&h?9pMrbj*Mo~s^WI%X*wMFYv>u>p3Hwt58N;qZyP32pfjdR!a)0#Ie zY?$H6C30RLiZ)1a-tZ%&H+GwTf;Kjkn+9O;xWiV@EoqEj9U0*qEEydQccL(#G}&IyA+tvIQiau3%fUwKjjU~z=4`FHuFK4BKKtwo@xdK( zzc+F&5h|Xdli2L$)sY9{@S2QjPa6#T=acBY2Rzy9UCO9%4_<;$CnjMaM0ccKKIkXd zS<7;px*VSOI8uzO6Sw-9!U$>{qd);k%T1QF$=!8B~F(IooPm?5_AJVae?hf%T#<~}6;#h(+ z(rPb_Js(B%_1#$c(Se7?v8%7?^W!5YHY9;Wl61$biOospq%UB7ls|j zmhX<*+=7vPXxG1_-qLBYVmHMf80#AbkmO(ES`C~sbm{_ z=;@jBSu{8c4O1=?rQno?RMHEy~gchn(ND066_9*H=cxuLZ|-?JCCi+zh@1RJEu{bJ(Txz1R7y-k{B8|Q6R8P`ZVrRC#%Z8>N!m-RBq#xNJ8co4fABk zdM#9f11_ajQo26p7j(gzNve{pM-svK!%R_;tv)btzJzLa^XXIid2}+0n2oi{{hImw zX8M8TACyOr)jx`G_1hL#((NJHLDF@xP0Hmp;aBJMuzGYL*a0I3j*=`Jv^KEZW28;=e2N+&Y6!&la}8eTE9cGoBn380 zv@MmqAy0P+#~R{=MppX5ndi^w$HQ%pkv#gRnR`pV@NxUjf%}Fi3{oSSH1f`DH;uqt zo^w`Yoz2M6FsNXHY0_ss>3+w~&xKtpb`Ja^g)ce02c5OUNrinLPHef`mo%UAz&B~i zZe~U`*PUT>G5REpw%wD%8FMo+p$Iffla51^STX0)H&v0*uvlT{`})dqt)VF)bZ>bB zRGdE)3AfZEUXpIuPO^QJe?MLfWvJ%7=!!0*Kz8)4_3GMMwNaryc-iU0Uh>Wu4FVnR zeeMpu{KBfzNkvJ@IreVED(~j0ppDAQ&3Fj-J~_%GOJ+@Y2?7`uj6MXI2hTna`eN|I z5tOpd+DdDdJkA_hb5uE9^)@f@Y{LhjMC0;?ZeUYMJ`Dd<^Sl@x!!B;=IktiQcxrhR zp6qo4@`T*dP29Jl;mq+8Xq}O{wNbC6;d0arQC&WrmkymH z%FBR+22n;*I4k-@ter`;9Z+} zsTju=gWKs=Aol|MuXQo^!${+CrnJR?OkoyjHj`&an+qv|!r?=!I=<=rvSzF?+S891 zdirq`?1|is5SIkNbZ^^|cG8HGQ6Ax^vr%3YyV-SEizy6Eg%#~W>Y{5lw#wy7Wx28} zjdTGsG5fITC(qJj4T472a}!8_RYA{Lh@Wj7-Dw`<(HVf9`K{TDvyY~q>Jpd--$rvE zj@S1RXK}kt9yYZMUoXuI=KQG=$`2sTld zvYH;95E>b?!tQ~40ei$ie85KU;8A>t+#)y+Uo+N2jO-#Og@$}Ma_vK;B@;+8?@3Q+ z?txSl{4aVYtBlNi1Hu6a)2=6fAxm3dvkckX1Bq#?8wT_-5&3X-XX#tbB_@ua?Am4CGl4HJ!7#`I0JJ#9TgfY zuxU#qI^Kb~>mKVe2Y7m*RS#+Rq($jYuwA50k4K$;WGjJ=Di%5aNy7z)gj23p*fdm~ z>lOX$xuc}zV06^~500tWprxV)M0e@_q|vO@YI@Yx)x2d2m6C=;)})X~+|T^?QIF1B zp1nbxT~CpGJuw^dtJxjBDkah{-SOJUA%7&5vfW~pgw{Yw`ow%OicRrOwOEO2dxYdg zPXVgLlI5#$E_FA0S%#d5W)cR26>B)HtKg2`(<=(AAi|4d-_&_R_9yZ!-PuHxLPY@i z;d|el-k!aq!5TKdwqauCzrk4Sjj3bx-9-|WUUJi0I{cV6QwLb)IaC9Z_P2{nkPfD2 zvmQ)g)pE7JlZf;4$58iuAFoTC;EjeJlI?|QsX4+oXt|f!Flu6b&$!>r|IsH&a)#eb zWayLGkCK*VY1JEEFRt8?t*CgJRMKBHK((i zv{I405#78)5Zc)Ht!Iprdo4}+`-zgt?{A5imh0c&%>QU&PQQ9eKbA{n2~OQ%l2%<3 zXM4B1txsPK9nM2wRY(*#t_g`oIc8D5VI@sFaENB^qlLMdwS0G z@LnG2CKbI`HIkiXr&qu6k~ z=2azvQ(KwCpvGJ8u4*Z_WP_t*88O63b%-stTBPkk_+9C(3Q(jSEu$^Vu-qhW%g z-=LkJ={_rFLv;El%FFw*gkfxGJs(`-Txrvz-ig)d0Z!rPvrl|pjJWU{o1NMHc)+INL!`1> z`vetR^L&mLr(9~5J)Vr27EC7-x)J;RtqshYzEi40K&?lw(3wDwi3I;ckOx@U+DKOUjtadjh6Yi&Jhf^aC2Dr zpeb{p#|O1b{?thKxVA23RzO$_q)=}|KAR{If?A}!ap_;)aGK}M(&e>UX*q0U{2eI1 ztquY!StrE>&qut%W|pAwELJ}8mF8qLcIwNX`P#SqG$p!Q_a zdLYy|r;ES1_@wUJZauyks1WE9Skt#^OXuCn!j?Iqqu1ai=hy^0QB=Vl@h^aM$8cMwNLT!AkejSR^x8Jn7GSGwme*P6P; zIQlu-X^tVS4OxKuAzZ-Nb!h_-JG-TOo7cI?(V{tDr;)EtHN%)#%CztK?k3HQU*boto0aQ(~ zEbG^MFy$gDvR;Dq+6k&(kPsM*7^xjp;FXXf6F5Lw~C`|s3d z@K3YsuD$g(r0rZg)S2_Cr4Hm3{;9=i{~K~U2Kn}Xkh56e{m5|N1A zG=B(3;%gt{@92)pFrf4X7Vp)k(qaBhkq-CY8G58Ba`{Bx#GNRNQ+JXR$c3DfruNxi z>!%AIh0mi%?Vjhpi0Ou($0#!X~5Im)4+TLqp?3AAAT_-Vc*f zwXTn?)|xf#JSx5?UUD$#c@GVcf%*tpxt7CYFop>6d47kRyZ1_Jw~^7oAWMk^)?tzq zg3-n%fLFb)n_txt44flhOC-%&jr(R@0v!N5w-*0=qqbqWn}~>5KhR_r#M85m8JjUW zpY*sU4h3S^$-aY($S!d~_A0Qv?(}<%|OS}4V|`0-zX@L^$n{L zAaST!BUn}ucF$^Uv(!8%5nRXq(=aS2dDNklLvIpj^v5RbFv#FW*p#LVK2a!sWB4;% zzpinkJ8`2CJEFFE8C1CLSUcu$2F&Fepmn3$c0A}Z46+*y=&1B@^h0X<9hQM5%gJmU zxwae8x~0s{*1VuL=+`rG)Dye$0}#ePUyd%o7r}<)P;+FI6fRBq^j_T5g39I-hvZ3$zLe*_>y=XmX#A z`CVPZgT|=Q7Qb$gTeo@Gb=E-BP|Hc7_&70pr+14ZUK7lzl1QU;C61C(qp2ZUhkHM9 z4&j2=$C7vE4BEWH&34FOyYGSQlqD!7dosv4xZKnVEOL1CK) z)kjQ&CmbzyjFvP-X{l3^Q+os*grM?Q5Dh+}2K(%;PrJGT7?9u*aoBR$ZA~@FZxh>}5M!z93s2_kh`2pPrYs zeG`LBS*d^5zZy2S$!W(V&Tc;cJ3X>tR%jk4c`?GgflJ`*CJYb@dXMsgkEVBHJ**Wy zMR4?tiiB2Yt<|ThtF{l1z{yi&UJ2!EixDDH>o&u&TBCz_< zXnY(~v>n)Vrlpsi?aiW=Mjw6l?Aa&Y)C;3jccK*#Mu7}R(*^%3r}IC|K9$mBBe_Cs z(P`G>ELNuA{Nkx!40t=YS}s&79(aLJqipQQS`bW@+n6rj@~@-vmwQK>q`CF%7FpTw z7~5KPqtfti7i5!(F+j^%w(|G-Wi2P$VS~#3l1Cv+rKKfJ^(tt^@&4(jKFe2BT%}I*{ykQ>_vwlgQE7O z6AZ>;V6>ER*CaqU&3Tzb*8Mq3K0G=+nlKnE$cQN!^(V%Ms3$W|r{$*U#^|ChT|mir zld~+20E%@%_yrx-YIk^Gaalv6=?Our^htHbEkP8tTUQ;yON^ukQMYbHu=aEMHQ)K^ zuV+8#cV-#FQuP_3F(OGK0dmaem89WT9XIS+jUr7S6Z=d~2 z{h}tEtO1Tb=ba@ZHC^P4-VQ9&X06_A=)22^r+z*lQLW22>TYMEkUpFKsE6c{soi}% zz{)o65){GT2;de^2voi6^l80O3S-UjUm7k4Ut)1=4mq22Z|XDkW2L&HIjVt1*^|h> z=FAe`(0N__fbkHcf56ywlsy2oxF5Gv>Vi*_zF)qu0l6$fNA5e&mz z-1ak=lH1x=wx!M)J|q(-Lp=t1YS4DYM55)yD<9r^^OU0Aa|g+&sVr@jG>(?)#VgOB zm26ljm25cJ3#~VX;MRIcbM^F#`suuPW7)PyZ$O8@q-ZJt>FFjEw0q34gF;U!M0Zkb zKlpjgYKQa8fIRLpv{T)8Ip`^09ZdMu81P9}9Dmpk;7CMF{D9VTlaq!az~o%BOoXPD z48+h=EHzP_*mgIl2)Yr542f-G`d0sx#cgUkOGqwNo|1V23fOVILv)}k*A!a)Tq@I~ z-JrWG=7C*J?*4|)O%G!?hjop}FqlHZVXPH+3~EF<(T3g_KnuTO-fhSIE)8p$Za4Zo z0uj`sbSHsSK3Y&+edD?=Whl}PhoLDR!ce+7nXj*B?`z}z`nURN^M-B)yx1x7iT3oI zJ_V88M5$>Y7V>ClDq7e4lun`ZhGjy%=&?lWX-NpvYDIdV>oOcSZ##;XYZm;^?%^*H z2V)C_Ja04%MZqzu3tX(hbsSHP@0viq=@{2CTU-#gJiK`p*3cOi=y}(t<{nUhCtb~55mew^H7pflgUxUmjP#PR$`V3e;P57h#dv+@u*}d5h zj14qJjEwzAGZlUH?AfRERXiZ)Az-B^9=0Ly5VYsBo4-1(ha{wj0?{jyB2Wp%Eb(gJ zJunmZtY!6RAB`3tB^(u~B6{;Yxu}bFC>n`!j;o_XN6b^3mhjh_V_OyuF{#~FTdED?*-j7srf;66>#f(^g&_qGqlv~jX(L6x3QT3z ze=5h#EQst#8Ia#zU&-N0~9&wABl;YtmvCSJW45Gh=;4da2YXY4#g?y>8$0 z(k6zSo4%=2i>=u7tsN$yLZD8Yo_zGnUOctD?9?9ih+!9K&-W=fxd}}RG_j_5gImz*ToM7ir#6YX z5m}A}?zOT`?ewx6xG!d)Nnattzz3E_)1nmYE6J+SsJ1prxx4t%r5?lb^J0@&ST^w{ z8xxO=7KDV6u2^p@>3Nzx*3-S#Q~;v4NH3FE)+@Bhq_vh=sYNef136i055G88Ya5#g z1EFyC;@4@{{=Cz6y;j%0x$J&Ox8)3Dhe6Rff*&$!2j*oQ+6_3+G;qth(2b#5&V!o< z8HuPIfd#BR&8>0HI4#a@_ z$3mD{)eV~KZImmT^e11{B#e%s*Vc>U;UtJCk!Zp;qW7&85&}~Mn6ZR2 z+H-S}{Yr|vN034K0;Bsg`cd5BNccflqDo^PzVMTzZuKWjg4SvMC(+6ikq3$g=RP3It`DsmP+~dYDT9@Q5_9aGG&=cUr@o!qApGR z`ZmETi)ee5QDbXqy_ywwGR?Y_=t>p2`Ot{F4JXb@I@^;kp9Z?N)=T*gRd zKH)#p37@%Sy{_4M`s`W#vR2hEH&^^XY@Dm<{Z~9>cpV{kWtkWjVl>veES(VX06j9! zwksu#7M%z)rcv3{G903WHCF|3L#dyGLg|RN>zOwUkK;J^jcljS`NA|vtjVTwT457w z6)C&SgD{wi{mJzh6wT1@>ASrmI~1pY#YHS3ANFcO(YFSe@$Y$EU}>v)j$E*w;T(C4 zIsIssD6_)7tDv6Fx()q=!JlUhZ2 z*Q>gf85w4k=)&`JQZEnea36b)VA%Dl6BacytWB77$v+=!xpuMYW&e{y4v222usWS(S#)A(3b#+--kr# z8%+rLjw8^rtqf(iiG9?w+FFf=Z@x1lx~*qh{N3;L}_n>)dRi)q>54pEdjBN^oxy2u?$4x80fpp^oM_lxXB81dENf3av`QPF1;+(!c`#OZ%>eI(e z=?dffK`a2P_vP%NKmQ$KDqnGh`?CiUNe+QqvwITxmmT2=G1?oB=0b;}CbN~p<(UKB zd^&K{>~PX`35a#O9+FK_Dnk@%pDjM|x^roosgZ?{pMU-ZT?9WB?_IA@Doc4_)qI*1 zI2{hw=c+{k$G^zGx-W(~wZfWI1N8zn^HDVjKW=$q83b0gzl#(X5~!87yd;?+3x^d6 z#D}^GGm1SN+n`Z#Bqj~5_NZ_RqrK=jYY}w~iVoS0(rTrls}VQ*aF!K3Tir#@Ktcqi z=zOEiNa(Q$oq(b8t16)o(%2d916>aNhaA2OF|rjVe5|Eip>>CzqXq`nO~?jTOEf>& z>A9U&jD^Ydds5o}%C{Jm(h(}$iz?@;&=3c&&l4o)zTfrn}Jadx0Qj640 zIpLP{DYH})lbrS~o@)0RMAjzkQa4mRnlnp18M&2A#oa^2y~Bdsn|`BBeLi4V=k&W* zeDb?3Pk2*lB1P9iqZ6F;~LO$FFr+D`_Iaq1#$u$Y8a5p;*%%vfzx^ ztimL5^99^;);^nsYg4Aub>adJo&)0GW+}vGNNmq=g-DK=r;%H@SO(;d22P2)j5_t5 zU}|9YFuq6MHtpQe(Kg##uXg)Ocdu(o*k9izW}281D)4a- zK{iU_IE3N4AhLwf8`EvgtiIv^qNSBix-(BKYGu|)JxFlsZ|DP8-g-+vcD6lR2;S6D zIQ!8a4(Mb&Zl~`hI+*!Xcg0)lCC>)h?uq2T*;xnVUNRN=TK zEXxJ~EEVE9Ib?WTSF37B7;O}A?F=vSR!P`>$6DCYz0E#tO3;HE0*)D6M;c@*Ub~b2 z4%DeN+A|!|o!J9D9~k?Q?XGkLmsXtANFmQ-|jm^ zpxp4#O|9d|2qQ-K7dw^=(CqBAJwn=VIr-}0;dnuQVZiX>r^kFnEhI|Jy<#Y+G>y^i zTk~)*wA7Lolc>%iFI}!7+z&Rx6pPmlsP^`?&a6$e=S6z7VwZ%7I(_Qhc;e)Y6 zMx0dUS$e0QU!0YGYxmf^n$YX5`5)bsr}Fp~Hbuiv#>G>wy{3D!tJyViCte+7H^xs| z%0^B=H8Di#7F+c-H?>Zk`i;glyZPF}6S`&r0dK^XCPVrmZo0GUQ@`)vD2*E7g~!A}{!fvPmhfetX%a@0GGk9~>r< zF^XSwvB4zFZXDl_8P5mcDL%thzBB#gKmEDP6_WNd$&=`CPFVqqixOK1l@yJGdIwlN zF5lx}#)z|Ay%_WS!ix*>;;5BT?=3|T;#&8_4-X2>QoTgj+#G9eq3XFO9cLsJx;O8` zUUUG~>QOig>_KA4DaXDC{HsTO^w0E#{pZbghzUtzEPKW8 zjHjDlu1riXgzWEf-hq1djP5yVw$lf7MaAxBpN7}Yk#jsM9gnPAxuO;+qVomR7w=Oi zYc(iePQTK7I0{t$CtKAaOxizZUeHgMTmxqS&hcd6V1VEKQfq6bY}7vxHXCIwE@nwl zmaT<2XnD;_xmjs6B}U+{F4`HWsb07eUA`Az{Z&j7{q39eYPlKv3bBCk>psxztyed6Eo)+wDzEf*YkQm1TIN$~ z)xAX5;c!Q)v??hI%H$z3o^Dpt=EJ`DGS4+5&k=K@7mnfm8EI^iiwzsn3#x^T_w?$t^S<0l?ehM37I5l0F>lgIY9sOFDvv{?|NfAG~XAIo(m!+1Xv=Dmw}C z;GDDfUVH6zdDpunmw1BkE|~_<_lyxe2Soixq9z4x&D-&%6vzf0s)?vHnl1f&lYg>! zZc#mJ{_zi*9kNhIA?e8NVCpiB4yVscJZYyO2XQNQXLRT}FsLW)a^R=FkGeO%`(gg3 zWFW|OE)8Y4YOa@_ep;`*Mp#7Se{Qq1QPnlD*y_gEJb2#mX$0wzzJn2a_LtI(DT275 zIMeSii8}|)S!%GP1uFwvv5H+ii)N)dfUL^Y!G5~OF7XqX8%YY^^dqN%TG1d{L^^b@ z@zmo@U32H-MMoVs>|vmod3b|P=GwE2RN>DM;BwC7cBN2hngGofDlwl zhD%LAZqs|nF#?eN)`Q<^ol*o%$j`<9>e(>76VK3w=}(q9GrVjAFPSt|H5yLx#Jj0BJm&1&xmWZuz!jKsT}@mEI76xx(60=eLGHx7 zleodzN6y9*DNVD!+PWjj4QLkibu*gjbIqaX);C*}xddw<+8vZ$U{?&SXnE{!lz$}7 zc6}jz6>>{Mk8!MRFH3`6^89sc=qB@v7BkhF;LmR<{_Fz0-r_Br88;?uk4~ z21h5Jm3rpvD;mG`6r37zCvMX!Ekh)#&x?Tg^o~7DMK*l6i!jmhC>V@VIG+6@s1m85DtAe`%|^&a6~R zp7MoUtnijKrPTuRZ4fR^`I(oOmY&jrA(3!VlO@()p;;aI>_Z9Ng3CfDc?$voPB@S( zwDSo#GW*iN0sbw-h+v_64#rot+Mue)Vbe>Gp3@RNcG0tUn_JFzHhopE!&-Hnuo!Ea zpq7TlpRifUAx~XACL>R;QvtcdAshP;G>lW`()}(wX$`nN8$B9J_%JKN&wSd@wYUE& z37+C6nq@2Wh3k0{7;(>r?%R~?X&+UPPiw=dd0d;h(?`>9{q{z=tRHXv@-=-IVKgEA z_Y!|vI6~wN1lL8xl#+A&8KiugMNQWZ-NR`OK{~hH{DDI^B?EM(na!$#E6MTv_Qfak z^NTMpJ*AgQi3)H>v0%5BmNH%iLQ^oDc%v+QtCQxd4j<#3N-WZWf3beTJtoYdL>4SG z^k4MJsiw4DyS`Y=3xC~zG|07$l9xtp^t*j6$X0J8^|kHJ&bC|lS7=bATkMha)P@)j z38v};qB3+`5+BMXP3J+6EXJ_yP>A86n*uPtG)C=WlFk2yE@0dCZqzfK|KmME72oMS zUb}VXNgYHOsMho>%0WY#3@dCDExIobtTlIu=FAjf*MqAPiz;OowDKqqaDDpn{C2_S z?;g@qXZ}`$L<95=@lfJgGrm#mQmt?q5n}M|@QY`j(NN6G*DSWI7FNr0F!b3y%!LJa zb2N-D^&NH#RTe&-md_)Q+$DCdhZp99Tak1OIP&8N+t7Kg7=_XUV^^AVyTS6>!;sdC zJ=D*GRH6Y2;w-WN092NymgAQo#u`P>mP`*e*%q2=%NfsH%$9CFoskI80i||T7gBWh zNksI~t;a+6y)ALlzVQ$kszly%TG%IXh+@Wk%D;wi_)05gX-O1F@TJf$?|0PmS?j!a zm4HpK1S@~PLz`BC6W%`kSQFjsU_y2Fv&$W4n7;H~y2016R%H-|#MZH<HQ5k-=(3 zx5&lWsdqKYJX!S;KL%p4#rGvssPSLxomSUV&vTzYs~`Cxo;D?rZjH9$Hw~+3tJF6% zUx|f?qpb)l#F{*#Y+ljtgGgK#pbe)}e)7b$9Kthv$K@q$)sV0E_`mkJJ>cQ89_EFcHD zG{9$hvd*y=XL5mhMP_V^MUXA^5G~>_tRN1&rV5y|`)&uIjr4o^wm8OwHspOznv$z8 z@LQ5MQa-8T#WhI1L9=h4T+5wce8vFLx|9ngzf8-xQD2ro8p^D9=;Z+A7Bn>A_O&*U zavpi&yhzr%fjKzd@zlor_Q^>lfxb1M34U*gC$c!6Qfu!ojs3Xk1bHwgx^~^DgCo?7 zb|Of!KX8+W5c%hGud<#U(ZIwbXoQQ{#oAxVU^&oh5FAm^RMVBz)5gT%vDE1vfa&RF zdC8&iv{HdjDPW~mhEK-S9BZ09pZMi5}^S5dnZl3+GmAhPP<_K!4wNw9cX*+{pSlb0dSLq($ z^sNeFmIaWB>v4h@j6v&amCPDBK542=(x42+840-ko>vi`TYCA)xX9FsUK3Y!=t4ox zFlrp0o3*={HO_?qcl7#}pzWdYaKs%=w`(OtwThm@E`qKqH0dsZWIS-c92W;TV;0*% ztUlF@8^a%}Q}=2Ue+>N2{Pv@ddV0+`2aGPU?|$#uU;a`S++$u*x$d>b1?W&KKMT4I zA~5|tl)uv-XD>_3@Lp!q)%HVm(1t;`tF33i;&dMPu300Z((6jcAwNYSOaX-dOfrxv zGkT>#R+CUFoGA%_X-l0(3kxM_mzv{1I0CxY02@Ap#7ajuQuqGpJNm;Ra(JOI%RRHP@4@V6?2qyTq6c$%GeSC)@@*@Pn@wci1ZBenMw?sn#fIkhSJ^ zQ*U4_903JyBE6lI6w#cYd(nk~1o|RTo*%mNFDzaSWx6HJ@9Pz*yJjo(eAb9)eu9Z( zSKbdXUt!}CBLo2!l5CMU)93LU+(LAIfXMCOd+-MjpIPS`c8Y|XA`H~J?_zq&83CCa z0Ka_2k@V%M>l094L0`t17WBME=#;t8jfP~&@UJsAsk44tRKc9F*;0ERqCd3WI! zwz^h)3?gbQsZ9ZeMV!m>URr`m#VZLd6A{fQlKW_ZP1iBIR^_8L;PgxVNwn1T5FtXL zv$LMq=?c88i~Qp0sj#oUtA%1|;m%L?=~sI5a;4OAdV@7rpgjmE^xmW(bv5zR{!N`! zi1`L7Is+BO-!EM7cp@2AGX!(uOsGlR|3(~$7`u5!`ur(9vA5&dJ@TprBb3&Q&+QX1 zJDwZ{8XR1-%|Tl)0ldzL5D_2h8AH97)1D}1Nta1;%X)D|(8o7~N3^LI3a zWT_ixMS&3bD!c9-JPofpS+2rktc4($9H5bv^}4>aAVhaq#S0rc1;);NLmK+s`4^fR z;#345TryM772SPC(Ygey2O7~%78+@6uUGY&6osud%k^Exjc>@O^ozpgGm1P4-bB)fuKRkS%nWe?6djD{1;e*zJjUn z36-qQqe!WNahIaX4tvSy(!H7>uMl3nc?ad|*)9u#VST&i7+S64iT^oq3IF5i8B?kd>w8&*a9u_K-ngZ(?JT*%v%K13j^rX_L7sc@Xafj2%8=dYEBe&pu zOh}XVZJ=^vm*6DB*@bx?wp9Y}e~3mS%_hm9#}A+heF9`Kp5D}5OY5t+AgwiTAf0Ho z$!vR!;_;=Iq<_}i4)B9r*sr*GawMAqNiiZI2>W1z{6=zM2A0b#mbHgd>|DCh_vi+i z)ADGA%OnMX8{jM~5=aigJ-A`rtWOENDOQ0N2Lr~qBlOYMYH#LKS!GqIdk6=PWMV^W z!flWB^Ln-9mAb@wG=*mBc3pCD@=`NqO{s-63GPKHC0oL_MKFP5cz-|tKm$a9eU_Yt ztZxIAXZ`7VJFt&5T04D<#eHx7fu4hwEYx$T8P=_5(Yup~=+!`tCW{A$7X2f$qs$1b z);NmCD?7fp>jclV<~yU2W}U@NtTCDfUSN8|eLuK9b1S>D($p-X4&9eZE45B`LN`se zEXgp<21~^$b7*itzqQq%jdTGTz%XMFjm|%lfVD|b9ml+oZZxWX&C3=uvS`w!S3lv3 z-VG^-rvG&1n#O%?=n~68>c$HGg6=_z<6Fqd}1leSsOiclxyk0?f%ICr)P!;Wy`R{YGoJ#vlv_l(p`Gmh$v7{oQQpUZRcD zJb_T}<_%!(Klq&)KO6|}!2bNPzQ}o9r{-XDyQ)S$U= zUIt5@TIT&h`Z2SxW}y0QkK?Oq?hi*!oDI(^zEgOx7*(0EVoszJ!L5?UO5U8=9=cU) zf2JQx0Up2b*aiP>+57TZTq;{nbY_C#g^pL?)qooz+*e)eFl(2qjh0pugW=2hmr~ch z*p|2RON>3=8&W)Z{>k$a+Tyr0Mko(xPKV0`E2%$mw*zyHC>ud#0$^IIbPz$2BRh%D z&`nScf7HWr2pINSsZy^y@y6wmFWU<)_+)kDhm(mijcjuq2( z8k-UHwQjv8Enr@pX7S9@t@9c{q5f+XDeq-aa%|SvXvTBdoStwLY8vZXWr|>Rvz^&) zE^SueXmjqC2ady4AW_<8r1SBCvy>OqQOWGp{YuqKPGMm)nV0yNjJAt6(c7V-zb70Pa51@JSpiy9&5j?2am;j?fDB=C0_pLb91Xd|#N1g4VL8E?9OJt|j# z-cozWf4`gibf)p|SkL13JiThl)i$K2^NF+PrH%R19iU{^3=FeZ7Yt4^2m01|&$SXb zX?HVVzi~k)E(yKgb#t|R7meYa&;ZS|8364vdDNT~u`CA#oLySVfJqqCBv1_v8}FL@ z;v$ORJF3EwI_~exrrP#R)6mvqs)vb8N+zDRYI`A8;KU+tF24=J3#i+l(Bl^f7kvl5 zpOJ-!^cj($II}-iV4Vmn{XbG?UIU>@P*?n?#x-rNMpij)?C|et?)RdC4=dV#+jATh zc|524X!I->`H)9#q}%$|;m|X!SZ121XKX-`yYmU#HIkV&ut-I`!cTh9;DCC2^ckb~#qQqDl`c4b-TP?4l zN6fa9bzn7OlSy98Q8JnX*CW=0Cz2BNt(28m*S9UHIRKwB^}ZKm_3@;5fF1}F71T}v zq((6rP9h8Zu-V_8e;hmF#}9}MP=Ze1x(_67=R7aG44gy*Yqs~Ck6|~!-(cmwKj#3D zl;rwnu|8TXqDcqYX2uH)DiKTL`ME=CeK;O?e&fvfGyH#?LKT9OgcK#U)%6vYiI<=pgh!N?P> z=tWa(4nP)8-*~?H+`3dMC+;_=U$TH7oqCM`JX-jXw6n7YrMk&x|3;J=UUg%0?=ag& zBzU!L2|$=a(#I6M-gT@mCh~T(RnKh4!LhrWIM}FT-w^1EuTSr2CUEt>$LaZ6H*Uh6 z0`ySF9d(^bQfv8_>(w<~Z73|ZiYB>e^{PLTeyg?W4P`RTOx@@i!JbMHT-PkU{Nj_6 zpMNp9%_K$-2If6#d#n*=g~2HnB@1?&u7%#fGF#PpGvvG)e=*fT@(!zWDNMUXHXu z6CjV9pFiuRwN~A(CcqgOIdKG9y@P*;$qg*6;hTP{y<#E2H&(XGEs2o%vKwp+?Js7r z)&T!lZ#=IT=Mt#@_cdbE!>|>z%i=qu;o-1jID3tI^LR%G%@xN49CaeJ2XG=2`Z6|` zL+Yl4Ma`|(UV>d7A0^)AlKCI{BPQvmrx}jlBKN~d+bt7%s%Yaiw`(o;*m0>-K1BC1 z`CRoFJgxEo^lHQ@K8>XrB~o=a{X08DXWsJvDFSk90A$ zFGjec7qrB|B>O5XjENS(oeV=%x*KzD>6Ry@NfDu)bz4J>??iv^doeF}=9M5}`b*)7wvhmuyA(?Me2L%GdgpkXVTB*UxcZVa#a#{^vbOcRlIx30$aSH^bw&iXw80fca)dEVaN0S0w1j^(_q`g+GcUpa| z)^x~Ax);$r=qD2~)*!Wx(#OtlkF|DloumaM+5|3QkCMLsYW}T!OoC$*lcS?HITAf_ z2he>U{9e?(({|B^O1mj5WYf%c%qtyXJLx@2WdJe(w?SV+YhSHMHtWr%e%$H0vP5%h ztqr0T@@lh^0lv$00C43;O!!%Si=asDs%F??A~v+>?*$~OmvVEu{>-i{@{1I*Yp4YpFUW_h_2?D z%10%?^;PR?8QJR8QX)};=qm_;*md+3YSrG3*vLk;Qrpg&;1RIJN8sgRBVcC%jY+<= zWb^SQ4_+S6TpUk7rwZR;iSV*I#3m<^CMF#bB|(PK7e3|07fh-@k7&D zxkC>Ii(cvyyu>JFcE5gMIqR|gN7{=q7%1o~{`6&b4(+GfN-480SJy4+psN!n+GqA({iZw`mBE#BfNRR; zw_jd*;-W^fScgw>3Gao8%cuAF*BWzJshIusLxtCBOdSoy>=ZGNDN=prcp2Deq3oqZ zF!3R4!JRe%lSo<5wB@Rsjx7u4X!`a#4o{;Qz)<5pclPYr7{dw#n)$R-fn3Sl?*$@T}+}s(4dTOHKI_)0*=^v7?+@wA5RqNjQyBkYX`jo?!~H}MboXv z^fNxJmn?MJu8&g~@v|5uq20c(5eLhFs5kM_$^hu4-;gk3=Yj zz{=-)TML`~uhT6o?xOiZ&FuDXgtbc$#AYk{?lcoCM24rJuH;6)08Hv8$cRB-zSR;z zZhQ&vQIbD9&Nw3sSeL**@poLq38uittDT(RUp%F_)nJY;i&RgJXHBc#%?Bcw4yPg?+ zlT@6AE}hl&O0P?V2h^6Pm8?A_X~vJ1!<(m3ezeyMm?OjJ&PF0@Ioi~LOb0qjQxN)eR8zm8p__xB{^-9jD4=n0 zXkHQ2_fJ3IbbF{__2;)w0mJcz^F%GdN)3UMsLu!vS_-EHQ;w3Q6{uH(;n_@74 z%Sms6+jKwb@Mp7n?MuELE%U^nRCn8G!Or*#oN`GPST6!G+YUBY18gTet!4R;F zTy{X8x%f;BndVuLNyYS(Q5Aa`LT0$FnZjt&*9=Z6H850j2!O+4a75>N`P1OklYjcErN3D5kM|B>P#-!N@kr^k$t z)lw#1iy~w~Ms$>{tJ8p&AaWtev*3%5d8s`MDYx(9_i*;j8~XOtK?iG*i8pVOrO@PW zyzrcUyw-EW7)FAVBdZE`8x1sFO{>Ds5eJ;#zQQ}{Pqa_3HuQl99sBWMPD9y1<~N+P zpF=fBW8UKDZ?zOuaGXqgB?FghIU!Ey@JjyT=YDzKM{Fr&ODumK77V#;UTY%n6KKq* zfVS^E_$?s5l#P}Qkiz?R{$1Sa!EY_C$93;$UUvE&jp!Ph#sM}|3^S%MFL~)WDR7Ex zq4dShKBA{B%^`F>bPiGXW>Rt-wR$O9m^Nz5p@9l5nC3%oYT&g+yAeNc8T4T0rxO+w z0lyPkz)Mm%p-t85r5MbYH}y!Cd?(^M`0gK_z7@Nd;}#^BZ)dVSHokCMc$U=+W(jGM zr=NSRpM~$x7SssoRSM~M*T%X#WNfuosMUv8%tO6I8B%i+Z6Ne|hd>wxi6)icMpHkp zW{9HI%5osrsnzvc_Ngea@^HNojUY|W$E2IYfyK-95l&Z>IYC2VRSVUM9t$1DlbTyJ zRmYDwqBPc(EPUEm&SfpdN|1$1%2@@AQn+=xwE%@>K6*q^tOoiM{{76Q+!&5NPh0uel*Ku;B&6mI_Q7DFl(d<5*~n)4!9*Lye!dfB|{qYET%TY6ULfkA;Zf z8l(cs&??;_C{Tv$&M1|iC_}T%GzhC^Gs)INnO|%DV)2kKY4fZ+?@yU+#Qf1(l z)@sQil`kxNlI~#UJO4OEP&33Y6w2;Gv6-sXXFcB*2E$KG49zmd53Z>W`Dc`%kmao&?rBj&a%Rx;Xhm2M!$!~;i}By6^!mobTryS1j)a=Q;QU!)&k{IJ=>Ky1mOin3cTTfHLzcSWr}aR=i+}f0EYySF z*+N-ekDhzENUi#*Tp8XtF=Q5}lKF(F7&#bcJcPyH{kK=074`34dr6}nIg#8yb|HSU#b8Iay9wbkwD{Ynn(2Y2wW2=G@a z5S2`?PW;F9Z7(bQ&7%@2YfY>pp0qfeqG&Mv5q2^6nhRE;C!Tmc6d-%M!x`;CKqB6h zbZI&)Kt6v{pP*qtam6Rb<@%$G+a5eRHR~(V&WN!9Z{Zd<9xi5G=9oSpwxBzbrr2z* zYoj$wfD@{Xm_{Thbs@`vFCSnrzGLa%(=Rkzji>~;9Dd4`Rrka|Bf84$PiznBk(}Cw z{?|uhY&a3V6M>z(AB|%+J@!}ON`A8w^^)~8FjkhTE(OMLF4vVBq zA11r{GY%5te6vI3JFT)&%JnS8p}StLmwm$)hun8|0O@)Lmb?_-fwNa*1Ena9t6C0` ztw#J2S)y1;Z*0+vv8O96FoJ~8)gk^4>Aa_|*_MH%U?;+HE~(_`am2R`^T8i9>4ql3 z9M`NMk2?=j1zE5zdOaQ|?qkhZlGiWrAiT|#ZcD1r97260flhorO=jI?=X{*0A zm_3AUBVs>tfax>mmZU~6`Fu1jeoAB9pm_($?5E#x5N=}*QsWz`n9em9DWc2__?va2+;)Rr8P)r&x|y! z16V3HmLZRHDr~?K*OGXF#GUk(=MkY+PfqF$=Z zpL>oLWX0bnEA*BQXQL`RphW8^aRs=$!TxxDW4XSro65}53j@!~xehH5*&9nwq-WsW z8es4A>wMH?(n00wrsQ}rG{fOzT;_#p9xzW}Hr6Mz{d4R{DXSb_v0h2UYBTuVc;prP z)N{GD#F^^+O{w`81@YrN?^+P+Wq28qRh%AqmqjC%uzKnsm`GYnqbhY_qtAlo*QihR z;MvpyvOXpvI*`b?N{g;U-{@vcB@CtWH>CmVWshq>j2bzQm(N5RYCQDX-cdPbKcSSs zCuu@0#y)f>2@CH)yM-ZNj;r=)q^xDf{TX`5iqiq#jq=o87G{Oz7WgX zO$QXD_{V*e@}EwB&~w*t)U$zG6w-@gaJqIO)ecp&MXna2SGk&zLeAM~bmu;{oTiyj zqhs_mt^EL9gLD{qyHKF%*BzlqxE&$)AOIEpTb@0=HgTo|3$rIM7cOYb;`96yV@tJD z8QmNZj^4zvU)X*<8W4rlP_kV=@-E9o7n)Qm=c+2T=4-Vl2eGedZRA#(zw{m`rA+)r zC!-VTb>_wR5#ztN^%u1Kr*!GI7@g5~{OQ!&T(rg~yr#y*S@JcYiF=_3>ck_|kWE*a z_OroBrkaMY(Gk#d^d;$dF46)KtB&vQa-8J6D7P{?MUX7aR&K{GHyk}h`G&jdSfBkk zCoQJs`FXM|H`<=_DOh(hx_<1?CdPm4EYs#Q{Hl;J46H>-QOpH8qoACokXH}pygnhp zCNTk~fB*DbpLRQp@24YqK?0{KXpnsw9_7U!^d0XFJRq7fdnvi4SHk%Lhhy3`D(|V? z=?}ZZMO}^n@u@tKLd0G-8T1x^(BfS7FiP)+M~8-xpvP1!@;R=EX`lPFBeANjrWT!Q z|EagqDp{eJS(S!L%DdbdiX&0Ma z?UHM23`Wn3l^P}KH)M)v!XRxh{Qu^=vIyG)M7 zqNAE&)$|PO*!B=*7l+N8^A8pBAxN5%$`-#ehCAJfB}q)q$daVXj0jVWAU<46q2Rwq z8CV}@?;y4g3r@E4$0PQjWZG)O4MQoFz4}6JOWOqEsP@f)Q#3cK#{E^_)ZmiA(}cM$zSjy8e@gRE8Qlf2(=o+)Id!n41*& z^>OFAnqUw*Co!05^-N?6R+a1+`0>9y_=E0`Ib;gdPZrj8WJv4m^3`FmSD~`LI{ivg z>O=}AG?FtYQ1(oXI9B!S`LoaPucnhqiP=Cauon zJM=!u1KYx`kG~(8J+6XbfA5QW?)Mn;k-pX`FhDNi3OSl00)K zP%YB%*y(ZGgrtqS|CBBnoLH#AaUga;fbwIe{E)Sdb04OI_lqJ4;acTgnz~qzluMH3NRa5XceikOOQQF$7Z92w{tmU58qV5UIR>$?i!KAOhzv|=~gJD*4cev-v za&GB2ztKBdW|pv)o$J~D+`vxN8xKr-YqvCuE`2s7`a2Qvp<9Qw$!6a}YVeX}7Ae)y z=G16S7fsmok#H|TVGpd52a07G!7Rdr$FX9l65fP8fEDGoPYaJZQo8sS79gQ_q!cE{ zku~@2n98IzpEi^AEhxBye!T!2yz<}qPb@DMTLP8h>AlUrAH`@Vqm#P(1;{}IAeUC~ zl1CkkZ&C9Ae7b1T8M|xW9RLuw39PLL5G3mjhHj5S&INOWv`{sYf~1&|k)|zO;PNyz z7)RX49B%!6)-h3(G;msx!B4UyX#jrG4N{B(5#$LLy&bjkW=3cro!#xa-feqhS-+k@ zDXGni@DN8CPyTP=LHwS{-T42-CykEm!?&kDJmTw?qTS#u^b6D8iJw7phJruL=5&jY zHksbOBTaU0lH}5c#eIUFjr>0SdL`M-6uhvcW-{(#`3@nj%?ax47y-AO=}3}4SZtc| z=jv_ul5AD%vlctOMX~-`tktcWe%uPQc7L*&*mwOk3c7R%jWt7NsZn4PT z^7&R?lH9ZIpdysK1)HsDrrzeXrWo|Sj-ff7nHcyx8S@Wxa7{gBjfg)jo%Surr@hAJ z|0o(|;Z-#(<^*2qpicVtWduhAT>4jq;frDW)d2vMT*{c9*)sq zo_sxbS}MX-#+V$uL`18G#|cZSDcfu=jxzm%GcRR+d{T|oDOGAo&KS9|pjmeRTWJqN z(GW$odyPrN=$PdaO_b$Tya!AI7%pSrhxJ7+sIyN|7Dj~hL386-%>t)%mM#D%_fV4? ziUVsU&pBOLFL^Uw_Bjstq_;fmp5)~uSdzF%`9`@1j?+0!w)zeexYqWF z^dmTtZ{a@|JC2+zWzl92>M{fHXA;>gH+)g)VjWkZvGieP8F8%=hlZDH z4R$?mf#Zg@wvJO-SqA=F3ZG>g%P?I%53-rvVhFJmyBT_}#w12E z>&?s6tNOE{c^APvJ#1zJL*(??+IX}u%k>#uVKuUTyKBSYtJ<(iH~L7AEQ*uGVXQLZ zKC(e&gjOnf@mi_LqH+;*qGw}&_Af(*x6M6fva-k4;^HBpov22S|!*cMi#80Jgc_yMwr_Nq~d zTAI-}QVmYBt6L3iKEiJwK`&&m*86&iCPv0&SK8}&&-u<)oH6uzRKkvjoSL3BWunZr z%Om->UiPj@Mv97C>E1sh@8f0U(^{FiZ^_|K1~}%5-~~uU&p;4^?Rh~wqBS@qBIjUO zUMi%D%}$b~=0&7Ld@lfb*J8HKHPv&9ihdGg5dBk$8D9ge-I9L~HaF6!$TDx<*Toiy4}(^bDs=x zof7LBCXXLOi4VD-zXY8S$DEQHGVB}jQXrfvaW4yH> zn%_mQ3=G9kD0*drMPHnHBZbxHRVB4%#D{(h$G_0urS6J@~MQKdo zfJRC614aB!%WFobRfFiW#T|PG12<+VRbZSwCokZwDfK*PcdmI}uu^T6YU`fl-3aar z;xU?#qZISkgdUG>MCa=p18W>8eSnLGn>6Zs>I|frgE$U*hvpziOOC57>+W=S}rU;a=jgSai<`zDfDH&0lLeTFJtzxQ5tg61LQk*^q2> z-pZhn9Q@hAWxcB_t~M<}IGGyeQVUcWk{GWq_LZQ5f2IG8i7pMyb#r6m2J#<&B)@{5 zc4YMKC_D6OThswXW{pL*Vv)NfxU#bzGb}%=37~hP`t(YJ=yNsB<#*=aA|~*S&cD<0 z-TL)oOXnW@wb#n6udYiQwfde&xms`Nx4ogqs>j{z@Y{WOgF(6X(6M?nb9mY0hq8?!14k8WdItbcF0-U%%|Ci1war2Hg=v!jwW~e;ocmAX3;? zP{L?rMnJjYJJ=4e>B(lcX@2qI%jYh{28Zm@Ld%(>vsu5Qdw~(6$)Riz3w2V;^d3U! z0UH8b8HL+TVo&z3e}fVy@bPf{o_-$pJnHXwgON*>;0$S_;513*07&&=;bQxIr*-ZO zeY>4oI;$@rc^NyM6>?A}8S<2pi_?AG63CyODVkn8`YfEJKNIX3P-LMep8MMu^%#_Q zMOP6zY}VH^uOxlVF<`BIu(~XiSu^^I8DLpL(YlO}e?k#M-u}Lphd=S+hESKcw1t^7 zO|U3Wn~+|!hGj^IrriRF%m7agCxA`x)%*i3!ttP0-}0mcyw_+J{SC%(z8+Jvm0nq& zaZWp^?Xd?3t@i$7#K0(8QuQ=q!rs)o&l9{iO;nXdVqV7-i z_a^P@`n#R?h+OzgDXi?P!>Pztf|dpu__twa!}^ z9GP6WYpk?X>|#A?!@>aC23KKrY*gKGq~W!el*FNa9CSP`bKDutw2D9e{`52d`P~P< z)$&X{^`b{YhBa`7(g4U8f7q-$L|}gV^;cfkiV`Ei<~q#c*Nb3`o1PfaWh->oG^bt@ z^+b{pK?bXsAM{eQAvcsce}0(1sjt~m0OJ}@TVC_F+*p;qDxzwwYDOW$wG(9REw?y# zW@873mr29Wz)!1#AiO(&OQT&ew^Vm7!mN~!>)IM!lX5e5bw%#j&l^%&Labl#T7Do| zg3@1&hTDyi+@1f)89}XNQ9&6N1cc!Ph^M}eEu%0;MQR?1`M+~|M_NlDeoL?Gj?Kih z6)aA5=+HWNhnf+x+Sb29)4ogq{n>oE4{80#-VG(l2a8y8M7z8tX){`Le`<<-LEb5m zFB2=2(rdXT*Pgg}fh)YPK@l~e0bYE9eeQ~G!~yC1k)$>1)e}&r`H1;LOsn@d<-0+k znUILaSE&*N)vXh3>tp@NVoPCtrB)jp0ANi+3R9FZve|@JahNsxtQEp_lJ;5l6^rjj zeN5@{s-HnQP%2r+m>fR@BV3($uhaytNO5+c)c4Nc)Va@vD8_VCob|e1|;}^ zpA9Phjb*Qt2rG3Bm`UF5igo6+X5@~ATK05-)!6JgsIqV9PrVz<&>9X0t-jYk8GQJ< zS#J_TgXdi*Atj-+c2r=%G)oq;HTaIJ0t`&Zcy%>LSh>2@((|mz{h(*J*9AqTs87v8 z`4MgWzy6iB^Up*_exgt$f?EkT8@r41&r~_l3@;U5n}ap5&M#Y8&g@G!T=hMxG+-M{4B?uxLMdmb03HWg?DIz_G?YBXr@NNG^H9V&3Ym zHfrxE3eI>ORf&2%gYf5mhWQ=Sw)FSB$o41N&X~q{aV*PnL5mRgyJE2!kF(kG6Of~g ziq7Op)savbYwqh7skhCZ8*2t%UoflsFraPKT3Myi{0FHE&ZMdzeT!7`)D!j;>uwlZ zvqaPwSqLKPBZkKjWjz`egQ^}`++=;|IISxllAs1iBWZWCtlL&;Q!Bp)v}P&I*AY* zPaQvhb54RYEc%sXTMO$jwTFu_Ru@<~jYb{M>67rXBf)g4 zrU4wbIk#DT51&a(d61ylOIX#kqcv_zLy!ds@`b{5YwA6Xi@FyrqsU1@wq^gd{N&s7 z5A|>oP_(LtA>~A~(eT&CERgWvx?1}Si3EV0z)GexKW&gX00LIL^yDjg6;c_ztzH~2 zuQzQ{!=i}-6XK`9v%QlhY5EiFA66KjOOp5&|a=%1@wynOEOFX$*wMGOv<&^lnP21jfQMunDdzXe6+GG!Uz z%b4ZCv98508OJ4pbYtq(3O~WunZDJFV~c!r`mukq=Bo~Y9u0w1M){w$s-b6@tS&pB zdxNeSq58Vzocd0kO*VXyNs5&|G`969kxD+f1#P~Ja3|< z-s|?qenXbp&-gZR!^`zrMK5!b z7h}>ZvRCsEHj1{`L!10MB&$__R=0ZT%r9Sj>5QbtQYltrH#mvkP%vaQtXZ^$Ld*B( zccoa+OVJpIP!JB>Y$){lgU~JL7WnQ^7=Q z?MsTEKhu=(D6rN&j{$)Go9wemXb!D;xlCQJM=~y+J1YgaJ=9NEBckz7ryuFo{ato1 zq%67*iydqoWFA}xCGf#9n$u+;&JNGnlC~kj)tRmecYq#%!5C~ zy=`u(b_|ADdR=C@s#G1}!^8l_RrYR{We1tT?c(#Y0X3K={RKCfUVi48vp(iAHQwwp z4Tn@&HQb7;c9|dDI^kj_};|q9GTo?g1({IDAL}-@At_ygUC^Q&OQ$O6#-c z>6eX4!J^wxY#0)!SMNS~GQwT*zl|m0dUJ={&-InSuEkXCYsoveez1 zjq^bOmMS$`WNU1o5^$#;{2o-oo%s*ajVqCqT>+q{U+;|Em)q_8Wbb2TNR)|~zvLWY zXD&YR#4X2rpasmV)SIR9(gosIqh^QT(1w+ErMy;ZXr;lsyT#TTqJ#7Y!z^niQ`E;| z5+1i|u$gQAew(;euFNXtW|)VrcQPwI)d#+PdN(W?OH$JnxG=S|d9(gQpO6at{4{v@ z?mqb4>3i5q-<*2td>QGMEM9U8mKd@(fxY6JdVl2B%ki+I0TNHj09e88sA&VSi_fZQ zLqVJEr40owSV-&^V0Z5h@UtjU|< zypyT1s1V!v0H&R9BqG=Qos0q;NW+(qml>VnXIhDV#}z7KyUqzOvE?Is*4#FC5R3hx zn-ku3;0lNWN7`7n*kV<>?%dLoUcROZsBhIzXO9IlA@QYcENtD%JIBmY&0tA>U~%!yf9H z-+twl^Li*A6?hcAaWl2pSZ;S05mK!)HUHWUWo(61L1v4jyBNAXSBo_7n1$BFUa=ft z{DQJ{OncX|8+<)z*!Q|o23ML8*0VjW6ga0U8raOj-owp72uWFHlS}oYq|Q?sj6D+_ z^R`IdV5xO!xn^0-DRRE9K(3i6-Pt6 zzLQZ9;1+*A|191!iy+1v!9+wAvD+cdUQh~=I}_Pc3|7aF-i%k*l$j&G%4hcG1F;LRO>!ea7bU1VHQ^j@FS7+J;1T?JAfvzMVdDM&V zjp)hlx`$}IC%`7wJk7&;=y+gR@oHACka+T?Us3?;eKaCCNj=%n@x0taA_2In#wlMghU5|9HIn|8W^jeIYLpRZ^TMN3@ zja1x+iGv~utvl3>iEg9nf`Y^HkyfwB2GTyj{d9mdwoTvjgH|=z?pn@H^R)~Sxs96U z@cK49*qfRsM319k9GifkKx%h}NX0c&W=GYG0dk|0BLEVs{E8($`^ec~zrIUd!q>Qq z-vq<_GL`;l)GQM~2UkO*yW?}sh=slU((w8;JN8(}Imje5zZ(+|mOSS`b+}Lh28W6Y zBF+93Q}?~>&Lcwa0rj>N^U<+HW|15#Qe^x$1e`j z=G!&!pXFu!ukqcc`ZI1XKVY*xMOh{S5q zG(2yFR;p~&HZ?J+(Q68j=oVJF%+~OySmgcoh2dC*h@?=O z5bGKyuj>T}dZ^i-v|}R=uRS~*>>;>s&?C|{z=hXddsMU5C^=0W6_QiOq`t5&Nd!d_ zU;Xi)qTe1(CJ493lesQMprtA}R9&#F=8FuS?nycFO_z)HdZ`j*6OcQxCSiL0(A#x=a+oZT8Nut0WEvE=%G zspyfcmqpjlrcSM`A(oE1MOmq4#PG~}T&cH2#y&drh*-JVSP7j?%#Iw9&2VNbpeOcu zm%uh)W}!)RKN#Xmo2!yAK%?}h?3pd8p#Zy|wr}9nW`W?u`Q~NMW&A@Oz%HQaF%B$8TQA3rC$RtedF|xzZHVSQf2>&|EKMM7fX?owZ@c1^~+yt z5Y?@E*<;3APPx-Ul61)qhOnzM95Bh=kNsBy4q7 zsqv7^Sb8y4Pw5s&@}Pa-O#>4Y1W1{uZ#SFKTUyUa0OPfku+_@T!#w@s^ji%EHs?1o zR0*mJL&3lB-U8bT{!_m*7tV57wyQ7wg|UnOdW=I)0R-{;WMKR-zoS>JHJjV6IxYAB z)WMcQ%Su@es8gF*Eo;r;;u|H`nEYnc=?n82`Ts2zqgI(OtHq{67AAlv@W9Pi1r9YU#VbSw$ zEbK&G^zo^0STq1shX{n;=6NatHEjedTV5WKa)n=L#(w-qVxa$c`bKPo=N)8l!u|9T zOg87#mr_h9PKv`*A~>g^Z}ogD&_Us%q#ITt;Fi07qyX9>?4?^%I|;DcUwoTGS}(rA zfPA)y8R4&$ozb+F2tp+6gg#)%*~{zuXtI+Pf_Pk%V1yQM7npS9DvaP;N5RVYP(Rih z>zdU4E5TAQ;tbO7G5Q=&k1L1FXmIJw8O$^w41f?rI)Fw0UE=Abir#C<{>i8XFZ`|k z0@eakD{gl!SOP$rnYXBSn%};7K|^@_@eA>l*KWO}yCwekC*xBo!(VLB48>W+tliE| z#d^T{vK4(4+|1oN+4LayLd(#s?$ndnhlWbx(34DTnUR1wD?x1HOeUO#?=`~k-_8G| z;oN%V(buFJ3^Qd#!*(j_29N_F|TA=zFx(UI) z&NkL)cwqEBn#=aYL%1mN1xc`OGr#@Vg(VFUNk|}}kOj`$V|N?D>Zalf-FUs`wO**1 zJq$a7SGmDW>(#oqA~^TrIeoFYTj5`$JL0t^X@2-bVOz}raKEbHcAcLPn zniq3|t}D3IoC*kMZ9o--TeCRsB+2;iM?o&oT!d=%^ipHBwyC#6RUS?87YR~n%W-`D zK41XMdEZUY3c5I~n{;Hs_NqxPZ*8rtmsUNIM`$B??{0-m?6CQ6dQ%c!ZkF{@sT-Ge zFz^3`ekNAp>WcBBsgi!spNXqDpGtp}YBP11;R&HPVQa^TcDKTu8)GifV2Sb~2YQQ| z@kZ&^+Z!9t>#~h{rO|B3jBC?l2}3Vho@{Jv=oU~0_g%=Xg3v7Pumo`XhZGnj%U?Ig z1b=(}m2Q9yxbA2pLS!GF{-Eb>n6llV3|U*kxOSl&dWUG`AGM-3LVGG>ml9491ZJa? zg$vl(UuZJZq4a2{;zj)~x4r3-A;N(V9r&uI8N z0Ly&)qDg=>M7YI*fwV4*y=bAcB~PQ?=q&!xj$;R=M0fTXTC)J-Sga5rWQH|FV&0zr zNvmqFxsq`~vV*rbYb`D9R@r;MTd+UAMR>p=b7(hna|)vV>=HQHbRURAB{}okk31cc zel1XN_L4=UH!eMJ2skHam&8p9wZ~w)vqO}>Zw6yHxl_~+EB1s<4R((IQYcLF;DfpM ziO1|*zNFW+rpDWu-Xa~1)^p+Ko?o1PqC@5v-a~SCW)W-WRLg)T57jg1DeOqD*6_#y zWO2=IkHX_DsGBgZ7MUBn7ep9>!y0oi3w@h2f-yq6&WVJQ&{cp|7ZF2Eb`uW4HEq<+ zNn-EROxh`8AP`-PmVV#ySwH?i^LGsis!?FSeSi9GsFnp@)h#w8^0NoO*ONeV;VWea zEzQdV)Xb4i>ii*oClWmO^z90xUsj#Uegu(8B-aI zj2a2Mk5Ie@%}LV9P`ZfqPZ9k$54%nultsRyLCH`lRMq+1f&vIv`M8mbHuvsO%vpu1bX?JoWnpxjTNq`J{0sm2KV-yNI_)7b!#> z?_Otd%~GShrt@ch`#EwAfb%tunDPF zDw=OoUmnxdU~rQAo@brk{+o+WU(ogDr=WPOP9cmT60x+`2)CB#Jgf=R$cY(8gfBU4 zlWVo*T1!vsoNyFLTx$)loYAc>4-1byE z6BYWdjkBfU8sXDOuTe1m6(*5h+@g;FKJID#vmBS+)aovSbB3Y zAt$V`bviH`WsNPM-l;-O)}b7#21`jKc6hXfIoI(%jF-1IHOyDep4YDV4U_Ta+$$dl zPWVmD0CfX{Lk!L8*cUWREfdI6#4pTjX#1b6C&&#=Ov_qe=Z>dgJg70zjqqyshyGowdv&P+);QYtxah4XU(>)%9{^_7yJ3x3!R@=c zr5z(vw^a!>jg14%jB?!ovw9oNsV}f5C$o&{Yh*2S!o2LX51~rj+G5fk8{q@O-}|3irNfwX(OKSlf|`d2Um4 zZe)YLbtgTy6|ud7+i^P1O&O?{4rvz|`_de{$lsOLOAYrY+SiGd=~){9R2rJ@6siAD zQCE<(`@ft2pL%tuyD$qvcQ070wP)wIuX?KtHfl{!tg;^qZQ78$OWaiC)wEf_c{mJn|BC$7=*O=@7?rs>ew=WUytHIrk3U@ot?M; zVt}N^va#TX*4rnS{EZ^`9kvsZOk=Zihxnb^Y|htI|G@T4`NQI^=hw z2OofQNed9a$GwgN$CGBYdObGd@KQ6#JQq4?+1PowF z^i?9JK~WDnEkm*c@9Yv8jSYHaIgA|^S+_1NTX=H=RVWWLNp+otdLm1RNLUKB z)!xhN%uy(7t&Me!2oNlAvx2N#w;=)noGzw}p4pjXVHMQtTl)SYltr|8{)xVCI^f7p z<5sJsix*;HK+mgj`$G#SdoZTx7u#`CRK|1c%`2&|~-bNiGqR)19{Hp;?tE8!}*QEppskAR2!!a~BOB z`%YYq(TW-0aRjxo*C4EzT5%2g?Cy{M@BE`s6&#R2I;rB-0Lpe^9)Zbw=!-irPwo?^ z3QG^BLoi<~0#O{TBQX{F+dQgq*R}{lSEWn(lN3oeOR46r#nUfJ8Y*6n1ZT|;nrqUy zNKBm57*qY+mTGu|a!xGl63|)kRk|bM*Gpd75SqkH0IMoN0N`83sDq4`M?1DwUK9Fh z0X%g_=GYIjKXU_r>K6VF_$*n$=G}1sCn0ju-(5$#*|hbFT3BsA5}s!L4&6rtJ9tPg zIAK(Ct;aNB{rQ%o>C%D3#elXZ9~>JolVAq%ZM#`p+x_X`U_ym8TFQ2>d#)4$EEv9m z;L&2}f$t-?yR|6p(lZ$+!Kpn{9P2*uf8h@PnXgGYL36=>!30^jEar^&C#C9eJmVjn zIo}AO9ujgWUJ3kun|6JEfWUmumr=u7vd12KJ^J2L@6z6}a%SH>mBF5=70K^};%0-p z#mgjQO7!~N*lty?>fT#W<~FsLuFNF6^Pg05>P=BDOly21ZIf-BW``)QAGmmIc`)as zeN+2ASug=GEVb&5re0`5t#z~#zP)oY#mMZs(Q7CvIt7Io0hhxU4Xf1p>{6SQ5dI+x z2Ai80fK&T)FA~gD3p&Nme(rzm0{GQyAx=4|abb>X$}Rb91W%e5le|Oc!x-xelov2A0dDgWydKsnskr+0+O!(L#X+v9@ZW)Uybq8E+3 zo4ON`4oWEp?mS{i7x?IOvF16elHoot9@gT3S9N&RK#M@AFcM% zGiRjESNgjSc)jGMWi2l_v$IM2p?6u|;u-Ekah!gw8xvpCLqTamG=rFFc{pgYfwAW>hU5A zoxqFecMRe^f<|{JPWnpUg9&!K%wBwKp)ZlIq9dWTCN{6lLQQNhv(Ss*P{Ws&BE<8x zCmzx52#>W&KrppV#R}7A-J=Z{eE;_JJ0I}VXJ5Ivq?uhGx)uD|ty@|L;IZxpk#|Z+ zSlyq`bVP&N$s_t;V;<38IktfQoBymQy!i6t+OQ@4=dQ#W@l5sFOFtc+o^& zJsdbfN0m!i-oZn@v8la;SWn+dr-Sy%ItJ-s-#hAFGwh5W*0m1W6lj@jYnsW*U#``+ zU4brh3VY|dm0D|)KzODd?wo%5;J2*oo6#VIQVP8aYOa6iyL+}{&V4NIdg*yh-f;A) zUYNv<<;3F_S2(>i?L~Um@GM%wM!mkFyVS>S4TF=oc~kemQ|au7v3vXUlhZ$HpP933 zddd&D0`%Ek&H@vdzo<=Ptl4ze9nemoq$wYCSY>*ccE`h|4| z3m?F-=`8(+CtmV|nTU0qlxc9%axD0@?L*)776;D*;r*Ouu2#}CqdLsu)=LZzoO7dn zvma$$#yWCGOJu*=kReVcO{3Cu&ZAW99m5dm-XpNq&^ICnw zwIK&T!be+3cYgyb3-rv7u3w#9(sA_bCpG><#-g7s^{X*VCmqczK3s%5IKkOcVQ>~` zIo*MnX_ccItkfL&^)XGi$&0aT7SEK0(JK7KhdsxWp)QrP{g|=hQ7xdQB8SMvX zD>ky&$K=@E5oDcHMq{=-1U73XA!lAX<9ES5#AWjh2Z}2*-pvH%Uq?VEP@B{d>K8uM z=?Qwx9K#rp0Ul)aVIkN(s5It(&7}QI>r#5QRa=R7BpOC4LFXjlk^7(!Kd{7t#=E>- zTZbzyo>RJ1y86E@v>bZ{vpjWkeRvznv|XIa4@S!id4O$p%sUJ@7FjXH%K+a-nZaM6CSN+;|;U&AnH@*s4HC=2a)D}e0 zBi1<^GCP@65E#JU=}2NzSUK!B0bR$R131x<+*!cH*mlO~a*wZ=r4D_V{r5Zm`}XO_ zTB2poY}8y?(GpjyLhU?sn)-x;3PwvC53d*7`E$onPzU;ux7zD?qLe~aBjU_rO|v7w zGw`$clPq3S^Lrhh>=O2ooM2PGIK!2|^N*j7B!o9dZm)zGxNl`?b(UK7? zbOaJG1N2fiDrMc=DBMPY*2D8o*u)qkwo?^89^J@5dJ*jsTWbclsagG(0Uz`P2zKo& zdiHR~PZOvXFvX7p0yXRZQ|*!JGonFW_;6~ z%>M=nG9adh2*wuONQLkGJ}%_0U+V4}cbTpeSO%K8#?YJ5T=h7{OMj!4-^b>3%Mg>z zi){c@_adT*aaM2XJA7ybu)XSr*sA&0oJiO8?Cs5xzO&q8ELt=(v~~v}%fK4;dRB;~ zF${V)te#fa&u>5a+PT*>oyM<=)#ipdaNCsncD2piV#Yi3)f8yyE&zGmtxB|XkYPH# zx2!IhO*=LrFUAL5>-z9c@n>BT^71OLM{4;-?%+Em#3;UL#6sHlb4#jKD}#G`ARgy3t24pgw!f|0LJ z&6lgcGL06)!n#^7uWO6*?(C@dFf}C(Tm2h{ZSRot*qJ4tD!{Lj@0pd1Nqqa0E)3$X z;VpP-AbjS~UjS4JNpKB_q<~w5T>VfH9EC*miT7fLN|pFc8R|fn?>bb-mOIeRj)y~E z2|`B%6jNA1!{Nykpy(5)6{$WcNoUE>NcMYEP|=@D&Xa(FVArF{rLCs!)bF5(UDwPi z`n{Pefx3iwhrXklH6lSdJmrS<7t%t2W=Mo55$Tv*@5doxT>}!nEABO%WMQeZPoK?d ztf7toWP9XI&C;oA__X&qRgE<0Q{AS2H0=8-k-?@{SB#`tFKvaSnJH^cFG0ZG~NV}f`kT^94EK+jyvv|`~CYWfmN zAkjwXUOT3OwXodSI0*GO1H3vkw+3rWjCr^d&A*#4>KN~V8Z&mVrhkmVXmKx71 z1!h1)D;nKeBo^ao^xgdm8=k1R1fg1QX%DSp4(Z&u>{Vnmb&#MzcaQ!^dsrt1O3L_B ziWrwh$?T#3+nG}NrN<8-^dJtRM;i9V-!66Q!u)*e^qzda3b|PPP?BVpr1att9$@v= zT02zs;F^my*l|%lLU7kkd#m6 zZ{e}MHNUGBoE-XrXs4r|PM|SQ0Y+Cmk{QG6+5^beab~~JdcqQGF|n&aD)pdd%gt_E zb_&llI?ky#3U^BPJv<@2tWi+h%jGripV749i=mqSYFX@Xf+kk?wAZBHHU^R=h~tx~ zFlwH`iA24KMUIVsl;GBMqUcL<6TeNq%$>rciTko)Q#>Y?Gr_HxXm_h>H9t+Z!F1NW zkn0M@vgYH}h-6YF`zsBdDUHb2&dgjapXZuXP z`=^@7ROokopWUf11K|Jop=>5lDs#|+pX$OB7yM#Njr?$S>AZ#sg6hlpmr_70*Yt`! zCsmPa+WS-QD}h7|Qqhw{q&JDT()aP+O{MzUmL|@rfEbVZIK_Ge^a|KI4bT{Y#;LWO z-kIm^k1e7YqQmgn##zv@L6xx49!RX8AI@5=y50uivz{V;lqcPPSL@z3QUE=F1#@Mi z2ZKQz7k3E@eu)CV8;rpx6t(LcynnY%JJDlb60={^3P_`JR?HHgR5u;T8ac<*$Mbh$ zST|>uPx`KtwYom;?e~ovPjg-18_77rE3Q@B-q%kr>}Ue;u!dM#`iO)9-}R5+4ZBz_ z5;r^$pScB{P#iTJJ_u(bTa3FAENXi`8Q3o!*4Lc?g4-2A#QLtL7<;4MimzWY-4)|b zEmw>dPwPbwV;6 z$EyxRKEolY?Q?Sy8vVKLQlqMk--I{Ry!K+K+9#u~d$CA?9atI@GO)d}5&;TS?HC_l z?`pw+;)J?7?fNfH|D>lCX7q8tODZlX{40_7t=bmRTWob)m28pJ&`tt>O|BR-M^8ys z1qQ1wVC5Pd@k947ajr*>L$dS7urnFiGo2PC%&ZaITk*-#WqO}VeN{h>_OcTSwVLhg zBD$AMJzYILc-7yPU7C|{@iu8sX=gBjtoM7ibF=z7I&+O#a+Wu?5M?PS12g;Y$Cno= zU@p`H7yZ}$B>u%SUfKW1FJ9CeJ|1Nx7cpnwoc~Fi@mIglHQCv5Vm^*`#c)gotK>Ju ze2NsX-T+CodU^M^c4kp20!llG5y*dSXvbopC#T&gPTW}I{Q3CwD?O^9Fi(L1t$fFf zvt7vcbq&uhD2{^paG~f}SM4YmwH7G`4QU6ap)8rtihN^C$eNSqJl}Xyx)F{RM^^jD zfti$2I&Qh&SzWo73Ea`}AZEvG_Nm8x2w5S!nFKcJ+^YBD3HzQ0VW(uL55cA#?e_WY zYnfWJZU}*+A9qriCNr>~&HS9g?9YQBSei!@;+j5N#Tk$r$t@c8Sz0r84F~^{*5ryE z^G2p8DHW{I%<_2SS6$1NtU62=b6`PNn=IRJGz%%sERB;``=bQ@<;RzF85Ctth5hjc zq3JJW`a!B}_TliT{8M~p*LEKYR#y_N~F6t`_97IiP+8bI^B&j`!G6mS(h&U zFIMO5nTuz&I;IWVJHsdyV=GMEAF@^6vJ*>lYI$eRZ&S9l?P|~>pSGgfdEA!%BdNWI zp0#Zjd5bW690$1jmK{kP0NN4D9B+9Xbsws*o8kBVefEz_3F+3P4FA*oUH_eNOT7!4 z{Ql{?5*FCr3GldX+^Rt}z*5HhkWjsAEXtH$d=(-<$kW)5JoHPV_3MXtS*>ctl(VL3 zqxAf8ReE!^?RdbDH%*H>4=8`pPMP7ClH>Gjw3W4YGQDq#ul zyX}ivhNgN$mp%5_%esuyV?2~xUuoZ^V`<2(w0?ErZq;rsYN*4>2;?Ax8E+l_;ft5MnV8h7k8>20ZC@GafwAi z*<<=eb3sO#eP^ejq#cI>Srn6v_lHBL+bg3Q@X7ma(KgqUF*m5~9U&stdp#z{Vkm}l z2O=WD1MyL_Sb01I{q)=U=QgkNyYW@RCO`he>8HSH{qUc$4(y5}ZIw8AIOwTv(8|(< z;#rAOW5aF5_Nz`UMuue1QBgQ<_qDoY7cu2^BVOIlJQUVsU+ZF0nFWM5g6xKfSY<_b zvk>l~?;68kR~C&UbR#>B{JqbfKl_N@yMHi?#WdQLT2^(mOzVU!1ZYd|aSJRQ5YGSNGXzsnhB%?AZ4@Oa&7|FuV}j)U&T76fs9*pWPv`J-T3)N`XG}52a&J1_UZcJ1c&KL0>&JIO zSd8h~cASJI3o@k;lh%lO=izlX5iImZhpJJ&xFyfYlVvSk(O4^~rXeG@F#du8G@w~o zB_O$pVFa0LO~7!`w@RN;q5x&%u%(1KHYw~RhK8&8<^z6@|7oXUU)=$d99AezV5YP2Ga6fb;S)BQ|6Y?YeLEA>X@f6vcT<0rMyUH-rP zEHSPI4ez=qJ{uVc?;j$U(>t;gPwT$))MFQ72QZ6f;JRW&j5aqlh2Vl`1F}96QY0QE zUMAmc%AspQ+f2!j(;mcV+|RNlBTxwlt(Lh-VJ8M#yb3eUt!s6Connv9&1{y95OYyj zrCFZeejdsXot|d?W0k#o`o~k}r@A)WskxprFWLYVF}{>5bEj&*O~^d_$Q@vY0XL5X9bCSFuBb72bpcWRU~uLGHQ*d{R6Mrcd4JC27bh5?BAVYN zVl_=YIvm<2-^~g0Mk36`x&(>bsszhADtmmjz`Z27)y&kJTQ$Aq%^o@7tc-euCcf@$ zQkYi+^5~pp@lIEWs5%P=vz9E%vAYqwW@oYke1QwR8`!wH2Pzul6Vg2yWF{TJ`|;4wuzEQ%VeKPoO*0y5 zG+2>|;W9mvG^(AENlGO)<}{ljJ{)UtDO)$^2w-JO&FbzyDgxWfEr z3DL2I`ZS6k|0x=c;8ms=sfJ)ZjPS^!<06&3hX&d71%c&`J?e19(z?5#)x?$OM=er8 zrrI!a#FMm~snopYDeeYs4`yV&mf5vYD{}tK-)OFqIrKsL?}xg_^xx^CKB+GP-^d~hD}JKG{2Z-*_;RL_C?!ImUXVEJ)tjoHdM)Lq#FvMb7gfK*5nD_sAvb^1rux!pM`tj;&h}GTsr&#t6L3nsIo072?2~^AOi`S;U=fT=*OAQo^)KYMZ zZe}kx3qt8wx70XJ7x%}+q4WWmaIL8wLxIDyVT*E(7zT3OCutuwZ@A;h-2VIAPbMt#$~fOxw_*;7C>WMichR0GGZdfp2m$OJd4S@GEAYNNK|^@eQv!N9#Vd~GOs zaB<;iw}a-vA7W!QYa119)72`)ovnr!NUvqRF+5CFcdDe$wZ3-U{xIC@ExJ<0>3KYK z6fozmo0uG(LC&)%NhC_D`#tv9Iqj1f(bkv&IL>3*c6$3(s;1zBC9JfGaxNU~t*q9( z)V)Tz!cn;9&R>c!{5##;I6IO*q(^Vp07dGrR0tY`bm~DB%yOf5iI{^yrn~(cp?DXh zFy=)`)Vmz%J45O{-OEMv?K*Qu^;|aF7k!;UR-r}0gx=^mwRiLKrfx~df>yEQt}=^Y zmEq$r$#8BuL2;8d8_~@2u%yW!?RZ3tjeC6P3AJ>ojGq=*n zMrH>S;VP|HihgL=giQ#~NV6mGBAyeh{Ikb(V2 zJ2!cDz41aU^fas$X0PMnw`3!~30Dk^9(p2nY*a8*8x2@^v{`5tHtQyDdF*;+t6b8^ zKwl7g(x+6(hqkB>nL9rM30Lr-Z3RJP}I&|a5dvqdtSAQy#?RGyK zM|ff9N+3YU)ZpHfrW@k~Ng^$;aQY`ek-8NP)}Owh52cOeijyi(YixuETKBUrYE0Es z2BwEC$|?1*29Y)mu(#{zD@y=SP2s2jb0`jULvwaV&Ixqx?CW|TrlncdReS)19-pi= zke1YP1hjB-6-ehl$m{WL-Q2PVrZ(BJ6)mz;B$R0iVFK-QQ89XZSL}wR_a+u)=QKsxC_0tz3inx z&F#iYsa%bN-6kNZk_q3?fT9J7UTYZwyTE2Q^MGKJoSGKy)xICD@RhptdcEV}ryB-o zk_fPVE5sZ15I#2At|GCK%xH+-17v$%5ph|xJ(0yoFRdwWUBR0 zlIW(OBwoax7F`fzgl6Gz{iqu-7nnLK2m;ywQ-77F2 zXoBQ}Sri>LN}FzIqF-rgGH|=jVwN5w&`34?wM zhe?rL;=iZwYiyQA+#_yf@mawRjBpEbIkE zWO(*B^G|U#--lp|7LD_-^&s#p{~vpA`W)wxrTc~d28Zpq_NPW`nuJ;+yZg@c!qzf{ zLIEfdo2m+2TH>e+RRy33)S@aBNl?v+o@vjGm<)arnFJUA98Olk zGHw!lj{FlJ=sA&b^#B~j2hp6QTZ@M_VlaT{6$E;pXtVLVT0o=yEp|QThLn4se_zu` zTTM!W3mDy&GO2dB5+h za}pOm)V1kP;bqmyqZ%hM{KElihbzfdq{$7d57T zXu)93A8i5bw)%>tTTw%%ju+RDw#wT21r2L`iyBh88(Rd~r7P997AWfNMR!eUGTC8Q z*SIfq_Tu&$5^(q;GCH1}Kq8oUc;NgdB#C$6*6LNg*>G;k#Zn_`qSsr#dCh^6RMvxu7N_X28C|y|oB3{e^37zSi3i*vq4!(d*CL&EJW4U!cVYw@Ll0uhzcVOOG0Dl_m7hR7?r!P z+W{>|aU9+tAfcDEZrMl1#XBuoKQ*jRZJvd?r=G10Tp4V2-4zXk$D>u2>Z&W7j@LYL z3{7wU`X%R7qotFqqMMqYS`>xVJN$OCjYhER!(JW{s$TFKuGZ%TB}5ZBMxUaJcw|`- zTx+_Ztzmu&BgPqWHN(0`HWS}a;E*ASVsMGsIxqs>;a*IC!-`sS!w=sQ;r}8*D=rR+ z!scI1vR{}x>deNzj;M&q$+EDp5T|W{@ThA(B!OesNxpAxta^EJ&a-!^OoMaa*t@cH z$wZ@|R)oYI8tT4bO!(tZbq&g|Cn*GVPU)df$hZG1r{VuLeNBsh=GTu&N?h93o~N4P zcGGK{bjPp`y%-o%XS@l`HpVd?#EWp@yDr0mWo^=w z{_M2Vz~T!pJfiCj52($0;8A`HXoMCOewxsA^f4)R9P8~FVYkxv;FOT`&|_=lvx0_&#^ath_zxi40w`{Yt}}DHCF8K6mVepVBXN%0QIh}(Y_ZVv2yt@To)u@X_q6B=6qVW4>qmVc6nX0^Eo*V6|>I6Drl^ zb?sxqixYaH#CjG~JfoNdPRi&Ve#ANMOQgjLp3hv+UzgW}KAcHuLRJ?djm8~>6Txe} z#t&sAUUw{`Mp$z}T?x`btUFL50_e`~)lPGF;(QQXm|<-}tR5+N_tuFj>RQb`STb4w zbeYQLs$St?W(@iwAim*nVcDtw>ziwOcVpPn>T)!U z1~;7I`|W?6x((MIhdeg{mH1=g*lVVq-i2=B`R2sj$8n&04M*M!3Un1U??9OzmO6Yx zqOHSpCxz|ntS~E!4yQWOGVyoAz0!hMtIF6=ahBq7k0Nhh>K0G{_l*@HeT?O1e8~>i zrEmhjI{h*lRppJfO{(^g7^Crb)2}&Q^f9HLNK4dY zo=1jheDFWnJ58UlO9S`Z^ja<*jkgtHw^H|No;(g^RIS*EP!f*jq6}2h34{J|)$^6r z%Ub#A?OV6zo_ZoauEo*`$qzlN!mbR7&3!rB1nP$FF5(IQ)A2hlRf^)v)F?mpL~tUb zvn+D|mX&gSg$2An{Xi2_-dM#;t9ff0AH!VldSk~N1GH#z@m3=ZkI(h<1!GwIEK?kF zyXm!MkX5)bxL))yLaE~GIGexH^sjCg4x>1G-`=A>nzeHL{qbvBu=y*ezptlQp(B*# zKN*ig!zfNa7jV8*d)(?1?S-3Nw_+gq#d3P>&b~Bh(w_YYK+p50)~nq$1OI`5EV z1rORvr0J>5$3c-x-?2t$b8YLnD^Hz{j~i5bx+dEJr$?N*e}d&Iz*x-*T$B2W<%K|t z0ld*xCAJfUt!52%%9)I#$@bB1C&qEXj}`87uik1{NSBF#YzpcbVE0_EbL~?X}nyg!nWe>(Dhk%dti-RXKXgo-+ z)M@vGp1?ad?)fY#t)cJkb=^5;QE@APK}RC=({Jd4a_9Js>8nu=aAvWllW2g?>N{!S z`PJuCws{>k%w36Xeq%%uhUqS_H>U50+|I9Y#aV+LKUe7KWkqT>3xb{F9il~7KO48Y`>~$fxwB_q(oEwaqg@|+ zNa)2KcU3npWZ8DS-G0;c2UAvqW~d)hzRf+0AKlyM={Xz`)>yP7anX(V~oBd$T)4hRyqyJJMX(QTu<{mHpgXW|+M z)}S!W@rPAuLOw1AfoZsN5e3w_7qCDJ4ZHQK+D_eDrPWQL%yhzbCkX?G%Ae7Gq~JoL z;u1~iy3d6!syN1V2j=X(>1)2-jOJopBwnSyr~TJEB=Q&s6Q}N-x;6ipro;G(gQ1Pn zb=Wd}Y9L`ULAs5pzjYtIrB&8yTbm7?bnDLk+lz>uqIL~US`3j z-7TT|y_v=Ls^(#Bv2;Do17{qHwmz#bS1)Msp|b)0D;8QO+pLm`Yu#G$diRF@ywLURnM(V@j{3R6FR1{m>RCWlxMogaCBcg<7e6Ub_GTmYT9wu`a@;g##olEivK@CMXhB zw8d`0pwiB|+N8r`EU!scz?-pY{PRCNckX$2PA?NI^b?Siz8gyMCMo%)t;MQ~q!o|I zZv9GYO~Qj)3@G*=PCw8mWcPbP=H1SL(_WB0C2#Xbsw(cB)EPz=Bjg%AKDgS!P)TSK z7~Ybg=x=bkG?Ap^$H*Bkpg-t~YzErLYe!lA&BJCN{^EFVm8MRJ_zxOnM{Otek$u>E2D!*&v_bY}W5uJFaz`bZtjg zBUv!!htb2FdTQ>6GN?k?Duez5+n!T?-+jb&3VUk+&z*bh++z>;;VAWqWPTF~Z6zc> z(j>v!i6Led)&uL7mc zk{y9})0rlwnBdURUAfzPic}6 zDzC;`DT<26siyW0L66|C(02|d)Dnu3RrbS&oVI}p5HU!TPcj8GC#HopcbP-&Y)l&} zN$rpv?*^`IM=L|4hwnGrHCLBq?)|A0`Z%qJT`$*0uAu#+8N;!i$h2|OPYMKQy%Hz? zkP|dsuMw+(alNM=iBp1tmXs_kc%f`^OOEnJI3&7__cR0LvQyqH;S9P&B8gr`7p9(6 zYy?LC6FY@{&ds-l&EGu`FmRX{mvPpx-NQ4;WHpoxDh6pvZM>T=vMut@&H=s>Zn0J}Or>bx{hedwMgfDjsGI z{==h0GkR_M5f5?)K7D91ujMsknuFN>>*ZKBn@8sJD_9Ty#W%mBgFM(Eq#?Q$MT+#iR6Wz35vL$N z=Gw8Xof(Acl6c$}JFY^}EwSpe`qX1jXcf$dz34}-k1 z`PI6V!awm69Ew)G>+bYzsXhqWx6w=A=D&2xMrru;yT8@1+mQ7-9;)5O?Yt9f*E8c^ zOy6eFUOT?4iD(bJw%S3g1_N48#ye^<8RY*T?Ro}k#W z!6J;tfYk(W%1aL?k*<%#`ug~u4lShYHB%Ml-VMAQf$|Ls2)Q&ywLV{USi95CIj%kFE0TpEy~B>)sbA z?4YwlFy1bBZSJ#IznI<^0O#hO(xI`^*IyeG{dLKeq}?O;s>Mgq0wcc62dser<>t1N zGkC>0MnU9(Z)eQWCiNP$VNe>IE+iWRyOb-dr>;EtT^WN2DKwSE=cG2K=bOr)y?MI3-Jy=SVB28BBZ93(CU6dP(-(pQ^ z4iyK~<5aS;iV5=Tjd-puRO+5~yHdKKNo=Lr>L#I&UyrD3ah`mX5;@?!#MY=3L1osp zlFVYPafAEYh0NSje-R}&#=lVCSl9Kj!v^mE!MaE&*Bl9! zXrZYvM{k+RZ_Kz*N}kvO^yB!FsPND$Wgz&iYItPIVAfeI+myvP^gkmAuUT6jxvVx9 zUaC=2ONQj_kt2Lcsb+5(?iEiR^TsKl2!xh(=7+8OZGSuE|MUtZei{7?a0RU%K#~z@ zKH856+uy?>{I)joWzP@%)m~yF4pY;6VkfTK4phU5;ADL&{ls6k8INW>_fT|_NhVuw z-?uMI8@*IEqgGqVQ{F33{(FcE>j<9-BTc$)Fu;MY{3E z*rJ02N$Crv)hewvaGNCii7cStR>#FINbC(rhUvPPLfbQLxF#NMD4|`Ahr{TgoqW2m zx=}t~7pxnd)*fYwn+Hj%Ger}T=xToVS@`?S@ZS$5Wthwfo=a4kPTKAD9<`R=vw{Ys zKm8tt^Ls2V*U_(x90^4`RMFK=*(|MBR`pyQf2$kyioQ>Pb!)Xz-CU)+nWpZSS5E7v zzkK?sC(i2q%|ma&v*oPkOWv>Vg?fl^pV5j`R+Ygq`P60K$-c;CBTmw&Kr1x+kv zMuXWH(QHP~vvhDnKis44=>vj+VswZa-qI6O>iDe2YG%pt`+8+3c2u3Nk6bawlg4BO zBAyF7q-IyZMqj4QGGr*BhhI zp1R@B-Rq4HJsr0k+1pFCHT}jCFM|c8DWooSU8CLJwfY$@Au$1AIkkN#srPdBw@}Qq zny1#vHF<=0<2p1RN6gX4>Ws!=pm$jd0ikbs-7QZ+I#=Vc z8-)oT!3aAe^vibcD7xTAFz_@m<1Z}Ngq7*--@G7{QZJ+APOj0&%YflHGEuFsQ8n3(#wXD2cK-ogPI2hIO%P(oezW3LV7oRFaD`wDSy~?w7MI83;b$=*H8q7udUju zlOTkA%i=CwhgBY{Dl9WUe4Mfuq zpYn;Vx-w5^)m5aZLNn`rEjLS?v0qE^UT)T=C5btVpD!b z>%@Pv|7zsuX5_&I3)gFv&7b!4@JCO1cFr%K*5XzUu+#ncsIM*PB)Jfa<6KPKPTL(% z>Kc!n!9-;7J&nS;vYQ90l^iaUK4-HZ@@$*VSB@A zkv0a8t@ymvDfJBMIxIqooUk{AA8Uw1T*?hz=Ih{gxag{5)9Li-hg%l(S0?&jdFX&Z~KRYcXbEyd(mOF zuC_;iqh}%$`Z0aL-+%d+dT}_Y);DwkD^9lPvCH{0&qrB}K!~;9A=Edc^U~4;;Z;R_ z(aHeChBq5KmnjjW`?0-8-TxFH^N%8z{$cX<5C3+=+N|fI-dOBfb3yER%~%&dM2z86 zVHI8Tsx@DTD4>K;qw38PQ8C-Z`Qyi!;`gUsHrfo5!UwIh3UnGKCybO zEjO{Ajv4Niu@`8mS6#pP)v|l)t~}}CzW#zBGHhOVN861`CcJFaz;+yj)DjkaDBP5y0Pr`h4mG)0FCkTERjOHqF$|Ebl+Nar6$el zVoSr_A9&rgYki(mzwAC%*S&>ik@xB#Tn9AwbG^OMX?-<2alf&y-CT=pz2rpPj zanSahrlkmYb^g(e<2eWgTkZ2WhP9&Rgv1s{lWh~-^nzt8+z97Y93|O%aT(vlY5gWT z_r2q9bPWOy>~OE506BY1&W#h2&Rw~p#j5xosBd{Riy|Y=>$n9^p>p`g` z?Jg3^3IVkEy~Rm4@op`}y56MjJIC5|>m>Jw7;jR4i1Sp5-47H{3!ZP7ak`iBMXHNh z!H_0OGM`@KG$jz-N=i7G)(wF{Uof|WrDpt_rRy0#rZbjBLVH&D1Cw0Q12+>@G+0zi(CG))yD{!! zQ6)VRwK?96vSAzxO&e(~I3M**fafSiWk^&}oOcOs%MCx5I=vTlZTgf)*F5GToZ})p zN1Kt*n=xeK%LU7@l))QPhaRZ-t%a}LU@&2znm9tW&4V4S(-t+rJ1u(VcG}{G6>-22 zmNV^C!Vh>_+7sLa&R$)faZ+Q$BEEXZLbb+^j3rBnp?5CCLp?YXOGRHajp@m`GiS~^ z=O3HvHJFt4UoRxUd|TN`cgxfUp^cdkO4Y4UvW3D+Zf0{lbO(`10AQ`sN;i%q43scO z>J7FSR-&U6EmGpFH|RT&a2?D+088>2A!;t#LR*U)0_|mQc(7xN0mp~qA;ba&p_|P; zw;VeCo3L&q-&JNi(IV<7L7H<4K&Lc722k^@g^P^+FW^QwvEjgea&b~ z!Ts!!AJjnQ>MNsKWs=esd3JQ&Lr}dxeP8qb!#~ryoPFYH{aHEiD%@0N^KH7avAP(e zIk@SxD8S}|{_c!NUa*QOaf5?NDtQ%tBfx806JZ@CXhN4l?n~yHwnxm#m)wed#GcCN z32Gp6GZmHpub2(OAt7MXWnvw)B3)107%k|->z+UzPL2E8p={t`7d<3Rb1~ftN{V}U zG;IskhgS<$TJpq%nMLr1&;?1HczsO5G>4ycqbb~euTMYM!ks(!$P1EVg@!w8%%nGQ zzmUDL-w6k@E^na$YWHcqc=*8xWjwwn8=tcxdN&J!LUMZWZc3i=6XUBd3!`%Y0Wl%Lka-Ljk+BYeP8yZ29(z#+fF~IhOAXh zG9qH5re`sku+Q+c&>TS_Q05z%HxLzxqnga;C=>9(wmLDShGV7XipsT4a}uY5K`o-W zoBovM&s0Nq1YEdrbo(i1^QqJGztmVQTxG@11Q1vVaT!C9vPdJ*=ck{0Bu)}H53Wzm zsa4$BUS8F;F{2LbK-4hy=qsLNfso!ribg`Dd>u9EwQt|}?zd1&UOWEkyT8?S$1jc! zmo``9javuZ(3!-q+-s#R`;=3o--oq)wHqf6i;vP3p*QD2assh^utQ@RXi%k&dT+!i zYJKf|W5p}b7O$%HlD0<~H>+lA>A-sfuQ{!-QSu6_mzXp;Cz zJZAAiNUyssC>C9>H5b#{+lL-kW_4#8i{`>Yy(k{>1{+`=%4ciy7y}Rdvf(MQz0h))&mJh}6;v7I-l@pizvZd=+Fumfwn9kz0QJZURSC zrdh8=J?!&ybMwE{KARbHO>a`*0XQ@7G^Z`1jvZ2~HQj3qH34hZkdcs}u78d4NzT{` zX#vz^qFUuE`a=0~dDT4tA<>EK20b6I(GKw;Mmc8@u;X{5PF$i?qpnpkEnMgAs(W1d z$;TR@=o=d&xPu6FBU;2R7m1o}fyPr?H2A%{X`{8(dQ~pR%ND z*k|!-%dz-KyY00!xCVKG}(rz?@4%81jCHoxSX&%4h9dz zP;LXicl3q&5#f7+Wu8jDb=X=TT#s-;&Jj=VH3h5Zy_qa5<|tMe;@23ZgdUGy7CP`G zINgY?oaAE~uB~>HW)Z)w88>=q=RZFdLe170c>8m+2{aYtuy#{cf4z~ErU_3zN;{5l z+@U|SK!;A4tQCDp(uDctRabH996m%hlj*c_Zb^aVD_7>v#koSEGH@hH#Wt0TT8S$O zo$M2<$*bu%{}-xjT0aK|t5GA(+4_cNfF1n#cfZq9Tejh9GWBmCe_~n8aC;M?W6zvm zJr|q=-@5c;(dMu!^QQa78AqLolytMCbZ;~ZJal@+30+-X)e1K%rL}m;XIIS%);FoH zv=^Ien;{yCc%oP7Iok9kbe90od20$U{}^GVOAxU6==hcNRZQ?+Ajh|FP8D5^T&XwR zuSZA~CuGRki{l$lEDtD7Tw#)0#6M=XK#H!0Fkh*5Sy%4tVQ^`c8==FJ8L58a1d1sq z&Sz9Lv0ROJbW2oeD?99z0OptgFw?G!nY@Q20&_Yp#7>iTX1c=W%H=vedVRvro_X{! z3Dxq*T`L5Y^=@;Na8-7?BhMkAYyDrJ)xx2uyJb-1Lg`^4(CoP>b(+)Blaoy8gg%PD zwA1k?u#HnB%xRg*du(jq%GPC(=hO3;;{e~5e~h#D0sk}YuG|YmXE--IZk;`&Ei;Rj zR~I$nP=HaA6`!~f@5rTdbDiE^=H?zbgKXbMq{|^(LN>XZm@OK`R^7(?*F>0;-$uL3 zTF4ktF8Wg0>K{yf=i1SLLo*UEObN22nF&9M(`Y%pad#Q)$P~m!GyQ%1&fdV9??t7x zMYla#U4>Y!)7j9MKEez5I>i%SmyBQD*I%nnncv)67sNKpj>jF)qPNM=7K(46LvSql zR`cvZi{J=@<^|Ue2L18@>}GvLxdUH%(uMUln&~zDO>Wa}Ty)EnFk++27IxNXJAU4R ztv+tk8Bi$2C}P_SLgs~YzDh8vi>g+G5#0ZS&cB<%!CWm9!rg$06fR?LbVd)x+DS-?E1d)1oHYuRdQmMq?}FNE-0`@4{oQZ>11n|epx?OTV+n3; zd`#-!?S-W5sPncbffM)OCfsix^_%;8tgCt=QgnK>tsw+ebc)u>R-*mCxmgxAZoU|y zhU$mIGrm6L^!d`Wny}-NfHMk9TE~z&sL_h%lhGszogu-gk>I$nfKYr|sOR*DC2p@} z9JHFfBzi{fHB}_yMyJdK3+AV@S(fSHzHKMUCgF?t3v&o@11+G@A_=_cCHk6CAL4+> z(`@bY1E+Ndt;qTde>441zbc{uK4vl?U;0alhsI!+hKmc6L-v21%+NLp4SQ2t#e(Va#960|pan{`jLS}he)g(xk&{6_7wRy2XV=^jUpkd^kZYffj~3J>Fj_^4O|7>{vA(bx*{tQ+(pr^N_q zy;8eSg##i0jA$(Hsn|t|WZ@In-AE$Z)>Le4qXPwIt@appFnOVF^uIY=~ca z(<&CCId?{O0%~*=%SEZs!VG)ft%jQQ=5BIr$0SL&wPfzQR1$yk-QQU7_V_D}mwG?W zd#_`5;#-K5Rd3)Vb=wnFlHjnRPv0PU=Y9VDT%)sg@1xk=lpxad6QMG=GdklR%_LoL zJAeHD{J*+Pn|hv{=t7Hi*7*`#R{&R}3Qn>cQC;cdYW?@jKHmH9dH@EGy!lr%CF1uWPM6<4)su z?}mnRlzP*(P37FhPSX>DfzAYOrY18eY#ZQRW1_6Zq} zmgDLbu25bQw@2^L-7~lc{5VG%y0rlPW87IoBx(7m*b{5DQCg=i3na*q2Q(G|0DXI# zHP?hyE^Wd!wuXPYX3y9Q`k0jQqCBwIhbU51&CMnu!j66|>U__gJLL-$@;gljjV`x> z#t%kNTAe`q{f1L)JZ?OEYO_K!HvT3x~=Mu^+ zLoheS8>8~J4!E;kzG~rMFz`}aD;rK6e&$#D?!^~@eQ0wual|&$@B|{ff0aqI~j#|x<;R<+5)(TgNY)HW{1JO8TFJyga{pzgkL^1s5ya(Cop zn$5l^S+%=P2 zJR{dH%$%`C(VBKkHgRb)91^kCCpco#T4LKr_)WZa!C>22H?$I8?>PI1WBb&+*Eghs z$iTNV4*4Vyb(IQ(Uhr{qb)%sv9*&QX$$M)|z%p)}N) zyVAsl9b!@LRoAWYUdRg7eaweG_K0M`BeC$m#@(o`*Y#ani{mf#YSVmN?xSI=qn{;v z3F{QL(bylF5_!(4pj)ijb4gWn^N3m9w^yf0ac;|sFDJ|d>~o^et{%h29<*jvlQgJ7 z>yQ9zV(Gj+{gis6_rCk>)KgwpNajYr(q`M-s!1R`#1CT1JqhmCBai3@o&H|jC#UDV zG6WkElINdGLoLWqe~^xpXM{?qXXXicHKo1T@vGD|nK(JVj?q7RcRG*)VrlKb0E&z+f@ z*SABw&YTOn)~#EQz4VgbjP|hX!aK)r9>1!cSs}X@2NyrlHO%K)-&`$udHBUjui zH*X$vte}NnQw-w#5_vy$25 z-UTS2zvh__sL(WVXC8Yd% z#t|C7`({yi&|CE@>}Hy22m@$hpClwfMof@CN(A#7;#;AZlxvj8?NP`PPIghzb;FvS z-ItKTQw#yj|6ljx2G*1%`E77qa`2MLon(f;O5Zu#^hGV;@ZX|{(ew+g4viOx4RQRr zc&eCFlrU6`2KHl1oj#B132j4&557eZBafuJ^5H%CkR=({ZFp6H#Q_uj;!olu*KCu` zO~5aL&I&ZY?56|?K{3M&P9M*ba8b~D4kO5VoW{B}u7tQAmDQsDp)j;?G&A?~2D~Gj z*_fev1Dlj{{-Elj65C}oz~mu}=N)la%nz`N)bQfo&u3Hg{Xg;_-)Kdng;4g(--D}+ z*CMn!)F2Ox0B7tP%`uM4m))V8M0{bdX}4U6)t`1^-P>NQdyls3`wnH)g&jFWbGnaJ zvrATNGQ>*u*Kh+L5<9fzwFr`yn21$K^%ek7%b9L1TzSy(e&0|ZLx}>H$COP@t8HwK z21>`*JRw$#G(T>rv%egRf$3{&m_X(>Gd{A$^-O&kf=$etS{^kMcC$72j^vyHQIl3; zc_VE-fq?}ndcQ;=!AV~f?P+;+AHZWhcs}9Zk2ESI_@~ovjD~#tg%+&e^s|ylyxNps zVR}1>d)S|Pj8Mhn<>}X2EaXYk-T5??oKs~aPTK`5Qprpe&Oau^3*HAQWW^KXWy)(_ zY7JLIEq<_4xumruRp?;i>asd8=~J^SkMHRf9`s5--ONbMg=m%(fyz)d{=c1VtV$oh z^`LLTybA9@TTTLF`eWSkm5vyG(xL^9-Bh9Q_@N(gs6!S$ z)pOT6wCjPq5+W|~vXq9XHKOwZLN>hwfi-144ek9joyEbdl) zsahe}d+6k{+Pa5buMPZJ5P~glms`*r{HzVhXmPbVFS>&V+k-t&Nd)9V*CB}SZ)3~d zf{h2$EYA;;osILiclbbkQZ$flpLN{->?R?zj$y(SEFf0&gm^lrNl+1QjJY#o#9HkZ zEqBpK^=oW)eBVgRzHdj0j($RNwJ}3J*VoqYGMQ0|n=;-6HF44t$SkDF^4e;+=bvm| zD+{5oNa~spm&oCGH1^$U^9WfQFTD+m<51%_&%+e<6IF~p#~ytfrER?0;GV|#{K-S)2bReV3 zB<&?INv=C|IEKGD{z}6pRL9CEf(Lt8pJAe$g18xnZ8?q_4lDBBcYhPZ1fi{A&_}gD zIb%aqJh)w4Y%}eX79CT^I}>LynSbHG6Ad*v5kI=&cnE=)%_O`O>=F zOfNWR?$=kI)9-$L<($4f!n2od?&i0@|4FoJ9=2&7e*KV*lYpq%8&md7vH*pgy0Nq%c9o=(pVW1-Zhjo{R@+V7oAiwWeGBJ`q(9J)p65Kpb|J%XXR$ z=;^9x9wB{I(@DuDUFcgHpH9D=iyO@QziOa$4tr>Kv%r_HuGK>RAk;#B+26Z2{W$Dx zT-N$E`l(49Ja*P~Ga?;^BZc#0zT=tdfV!0=b zyWzrmJ>I2D0FVHjYmeRCeO%X?BOF5>#i#SS41scMjwDW9AXLZe zMWz2Paf){^fZmC;ik(SR;c+6&oq0r~nNlal`d4VGIDMR!+?ml5wf9LZOo}RVL|Z*d ztrOYI)w;gf7&Pz#Y6I9ktm%%g$EhnAMk%#BaUiwWk^2YAm|RQCy;Bw zFvSkM=z4Ivvt4N55x3-Tp3vXeydGvUB18%Ql{?%*3X^esH0${9IeJD_+`W3F(V;K% zW{tKtu12@bk1hl-W_S@=&=3W+^f>s!$$R9j@E!@bdNguvMn|D}xxN*2StEqPcl;l< zV8(>_D5%mgJ=09wZ#2faUpsMlt?bFL&w59y2FIBu1YOkaAlL~Zw>yd?$Y;rmLl4p8 zaOm5)$ieDY8nDy!{8n+L??e?Gok$^lhC;^r#8=oK2?4GMcWHFD=I4?I2xj6z$7Jv6 zNyly9${aklbP+Yyl&C80o|}p$-tp$yUYE*_Bn=3SkCJK{z+rE4LyN(Y@Wu3FNVE^s zwl?z;rC5-GC}=e*?27S-Al!*rqp!dYIhb@P_|{Jm@)}iLEn5D%zGfQJLFa}&lmJ3R zqPe|c{z8e8EqH1pvzer@pb%R+!7D|F;y4@R-o=na3;*Vh`ygtT6k^!(2enR}cX-gR zrk`nALfq(rz_7$Y;qlaZ)7_VK^92(N)%x7*C3V`M;JFroDT32TvyA=bKN-ecAZtZ* z6NiMpHviZor!^isJ!6@U`T6ZEkG=d`U|H+v*o$~1v!JI7}ttvr+AYd zpf~owjwv?}6#0Xr(;X$Q3g!e0HilrjecVVXpQf}}CX}V8Y1B$dZaAq%aYfjO7Qi0F z*pRKrXEOgbARdMzNL$dYv0B3uDlzVMadh}bo807joedWvC;OQEE8fvFQB*%6Db74u ziBkf@8H--eVR#QcVhfH4+sYAWN#&oz2e}&$t$8|z=su++7lwx-G5it9bQcq?yeqSbqpJaX#Yo=ywwohf=15opeDP!|&f#_P=|d&}rNR9g;}_jdo4Hj!i1#aq>aI%Du4x>S z+1zIXiD%!%DukT3_&2HVYlWIV?0Kr`td4g@PrZqka6R5Flxs+tIk<;*-LAqO>TFZ~ zZO(-mk7y2Bb1YiB(YyR_;b1JPX?vC#1lCfu;oL9NzDzHBEd;P%hjVHY$=lF9d1_t; z`_sSCUl*KjhlY~~8tR3)SV1_%qTi1`%2lb~DwyHua_hfI**3S;P#V*`^+O`~7K_jn zVG^VsvdujwGjw~g_0J)crbGAfapL0OiaQNeYv1Jy;bhyJMY{;sqbw!``&@Umq8fe# z)-8jQXq8LX1=4JUyPX5izp#$-H8YEK=RiH3_&tc1lTYEB6NBr$<1e-P7p+c9dTe5q zy*u^bim`HCjbIBL^gZY)Dv|3;GZiZFpwyaJ#Hnv!qh2+mN&{UZvexVz_+onnw3bzu z)}-tjTUn9-p^Ym`7wC>d5$>o@AlpW1NN^N9DnT5Hz<09}^_w2YR@r4r>#>p~oTo@fMX|&3ODqu&j6rXnWvf zV)E$pxI64UeYfih=ePg+@f-RE=>Zr2|t(0%N&OD^ZC>&vkWLO6xs)Wiv?!{$w+6h`0GN~14*GfhsV+3+;r zDb!f19pco9KRAB>_`Y5lZ$4J?$C3AEr}WdHAV3;#dGh%h{%b8QQS@#6Z@LZ+9t zr4bdWwt8hc#|=-^DyT8~{dnkd7x@6z;tJMchXp*0^BRADx9Y9~V$aU3uuzt*tnO;6 z*wlM_pk}wo)1=i%tl?qQ#u*&^N3T;%;ErxJ>b&S78u=p40M@rp&z&U{q^ZFR(P}k2 zEiD9q4CEO5Sic6Z#v45y+MK4&(4`2EhKoYKq06JluHt;~ikb2FahXsxh5-HD<2tj$?EugO zJ@JJ15VU;O=*p*#9gd7ZSkwIg!7JeOYmE|Tt5yHf$E@);tnDijztAu66Bc=bk`-k0 z9t@F6ZfsW# z9-tpciT^U7^`EErG(N%|R@y8$JoQMJ5W`k=Q8LHU(3``+(9wj=-+EquE=GFYdHQDR z?ONny)bZ4iy&yT!#b@Hs>@}Sgf{(@Z!d*muoQpP$xoBpi@vdt>bGRnOQW1`pyKT>} zA2$Z2$qmj3A!i6PLj$oE=k5rzY#DT!7^n_94Z5OcVG~d8EC{G|g!MVA;-SlLma7TM zpaj@?p|RLJ(n`Z!02F+%S_27@1Z>^LQS)ZJK`20WEq13zp<`=B`e?zajv8sv1~Bm**^VXVAAkJ0=d|90Wyn9&F93YcXBRUm z#Bw8uyZAZ$fTKYNn!7$ksWz9gWGxYZS=lsHeN8?vopoA__j@K*P78qPc#N$I)5Tgf z`*?zXmbKMAkT|2FG#mF2UP_yr^>}r9tZ9IC!V1j;_lK8jmp7xxv-Ffc5pR7NP&8<# ztEDlTZ>YpbokC&HqMD|*YkU6YH@}G=1qV}NGoZAN-RWh)i=cq{2JZC-!^ACnai!Fd zOv2y5*9mvUp{+I>9j8{7tPQkC@$>dpJXO<3{ktJt-bTmKYD-tp?@OaC{WexwFZ*HrLD%wMy>j* z5Gv5ZFE{T+8y7zbdm^WA6#O-00 zoKs7~V?C=#Zi*!&At%C{2_Am^*JsaYwmFH>l=_|_Pph+-r(m9cJ2y4tV5WM?C6b!D z5#~YtAAB#`tMvil2I$wGxUGi*6J&k%J8m?t)oU6@6HcyWy6JTdf20MeBEp`*GibS{ z!i5cn&g%Bxm*Ymo|79oHz)yz4Hut?U!ib62v|NJ@;zg@aE^V3ts6m)A2%jaUir=MZ z;+x3tcaOhFZ%Js^OmICX5!dy)H9VK8kG}p-(@%pbKa)AiY(b2~0%*1mB`B6(6twEG z<>vL+c<>gv(;DXSE9l&>P2bfr5uC(r8lSS;XXg^0(^Fa#IxGrJq`ky24s|`W??F*R zm$RzE(VJzdnvdy8&9T!nl8oLk!UpH&$PKB*%4S^`qUbYDaV-T-w8>5!Qirm=&VFf+ z6L<5lLji_%M;T{l|Hyyh5Cc=cbEpZzf|~^=G*BXoA0NN3vB6p}ezZQM^wHvK;su7= zWjQ2hLYh#l@Q>h=zYa`3*RRkxn>RhiwhCOd(i{F)!yAlWiXi|@h2|sLUtVVSAzybw zchU+@Z@=UT07wYjKA#9LbYo(##pB259v)xg{4KA^n~clW4!&R;J`hT;*c8ks*xhoR6RDS`Qjom+v@$kL+NK6W*RDE9~>N6G)R{r zs*JQ5PGZ>o4usB^LC`y->%ieZz8`-~;0SvyTH*MwNqe^%5}G;&@ixVH@5I+5Q+?Z# z7q~5O6qOF>Pk9&uKw9o=a8m_XM%lf2Gl_H;L4iongqI`fGWXai%@*_A_MCuP;E(I- zhrP?cgB_yRmF5Xz_*g|SB+hp%{d#=qcpq1d3)?(FRRH^n=_EJ?y< z4`~Q{1hQsg%%6(FcM%vHv5^Yvw;n&(pE&!jPLa;O$0k@p3MIR+AL13e93epdVkmSL z57A(_5LdoLQdV237I#gKN)BOE0mj_iGAOxgj(FYT^u%m-Xn3zOY=8rnICdJ41R z(}IwuWmv1!mJ=U%eQVP+W+_GMuKu=ObgghNR@xf&{N2&eXZyP4FG$Kn=5T~>4;d0Y zRFK^dT+66cHjSgPQFGx<5V{YoEjmYy=wP~x9$|X>?4y4v5q6OoowYmB=LY(HeNKSK z2!mD*UT`A8*Vl`VW1RDm8e8}%%s-J7qN4#JsrjinN|%Ht;HqT{OLI@MQPWe zur}yBS?gkngj#KcBQIw_N`GQaw5Z{v!DCm$iPO~LLXLg}?6^g!s;L&d{$Mr`9SEZA zTCFaxxR=yyIcv0{3*&^1nRcZ%?LxZ;?hNI)eNJE8TCX3ij1%OVLe{y2FAABgcqn9nF5uD1lU%~#hvbVS-TS>Ccp z_o~LUlJ<70iGhP{B%cGZc{LL8sh4b8n$yzEU>r9`($iQUu_a+n>P|PC7bO)jsZBcpBHczI!&i2a$|UsPC)qeyf||u(I=&C21$r zOm`v$SCTcA$MDlo7n{5KbEV{st!s^n_UGk2tsf@2B})fWT)&}+i8cjEC8HnFm4{SJ&7s= zLPtlo!S(HL--CJOMV)YTnl+isQ+u>}SAU*8vS|XOF*aQP%8Lj>F_7hJ$En_IF?{6?;%Wd7i5P5G z_}@_SPD2Z#iC|ico`3I@mlGJ8mK=4~SZxVUzfpe@vRo58Nkb;k^PAtq7xhk3S*5<_ z0trr}HV_m-&j0vocQEi3!3zLhbh&D~)^P_cP8wUj_`8+0QduaQw3~honK~BCZow14 znt+f|5W-EP!y*|vvT03u7%fnU-)|o*B`ts?78ln|cLxpE2zjW%zy+6uyvaTlf)`DY z5+#fyF#aX3HmmgzliIQHKRbR`Xnppu5EpKHnXk^Awpc3e?%V8!oM*gOab7mhm+N|T z9$`Y;s3>@?YzK`^--h#YUyoKlg6B@UOcF=Im4)dgo6h(L_?$*mBa1%z4#va%w(EzB zlSr9j^Gc{}Zu&v=E03yNx#bQcd}A6p=S_d$j;aYK;So5~g?huAns)?S0>5jqYptm? zRyWG12Kq83h1^xmvcw7is7d=x?fs&-DaT{tr?$W23~Db!3l{R-9ez2iIDUE{%U&uth^4@MBPC(1^0xFLj?)k>`;D;1Q)s7=PaI+HV@ zyr$M_{y8rset~LCy5SxP0{(bU&OPyGPwJCupLSw;fd_KS!|O!<#t$_OUxMHgze`0A z-F9x?Z@XAob6zDUOgMq#ZwTB89;Z*g;CZ#h@l@1p(3Fj@laA;qXF(I}ADZvjrEo%| z)pTK#cDAkKN9wCEL&8eY`>BcQfDh8e>IOd6sAV9ki5W{4&?nm-VUh$J%S@)#a?n8W z-F-U!T+c|Req^VFjYMB6s;mkdEbk5xE4)d%Mc~|^uSaoX5j*SK_g>n@V~NAxH-Lp^ zEsz&~W5l&r1AXmPO;9jP0|7W~I7Nh|8E8Stbj-bUYwj_rt@Gd4vn(;FBfY)qGS+-Q z3AWKqX+t3>TwlGaKRbt>_QL|8l_y$GdF0Mv^XbP*8@TOE6MafyEh`Q1N0$fCgP8u| z*|WkP_MpJn! zd42Ert>e3TK=`f1EPN>-SalMEsgqV)3M?QP%jQ(7V z{n47EOWkX_dHAs#c;*2})TO;%)7cfA*v28!(s1C$tTMAw>J|jnRY~RDiRzH?)R6a4 zVc^)Y&QvDU^?=jcS8i#nr+@H>o^_97(ab}=W+z$e5x-V6Ss#D=huRh9wWA3fT~{S& z&2k|b_|EamQ!lQu(r9efW+fzbd1Fc%k5i*sCtpgNAGv|QAF_@BYYMsb4dvvI-(jm+ z2vQepbZ&&oR6Y=L*69jpZ87g`%p~a9NB^Y%XnTR6ySTglC&}sC(8viY2DVFJb`@19 zkowxuPkSx0l~|f_WpSylFPwMB{rpTK~)w8$TMUeG!bYnLBwcugs3NY zAO$B2ruX#O;ea;BO{b;cYzAwH5nT&q7=0w3NTf1g{JD}e-rH|FgJ_W!UD2l7?)ae= zAaQr(v-7Ze`)~bP5VP)%nzF#(8c1|gx^giT`9T*!wboQw4VjVl{tVbfoq#u~FY0Lp zm{G=5PrNTI0i|l%&4^!vh_z($o(R`bx~RX`yom*nXtTjmMt?S~){01fq5;##C+WUr zhB)M3SKG{#I{rwj#cw;&pma|lxq6M*3y#xq>GRLx#M21$&Skp zdT)E3SR7zP30)uPj%$<98B|k_l)D;76_io8x$%}zyIDxYbfva(;x@uE8M?}Au?ahV zfu;liuAahv)f+gVIazn3WrW!Y@9Jp7yLXp17j(N2KaA`*szoJfZL3xSHu2S#@mM=G zomwlMn_d~$2F90*Hr{c$O$)R(jET4+5|Kh=&8lk>d31Q)rJBusP;fe3-OUtF|y5O@yovau-Sz&yhfV*0{q+hIU)f-y; z{*F&|A1VvS!}u^}?u9*j!cZ-CvaW`f=E@R>U)h>>YdmnNaOG;F@#^xXen!sgU&Rj& zBG;z5OPs(144es-3boNTlTWsh!)D7A;Ss^8T`C@0A znq+!~lG5q9vQVyVEouyyN3r=hV)c47NN@#*RVDk)*V{dOM0`V;10CA&@OVJGxjEP# zHE(KlR=54+yueARJ+Rm8Q6V+S?&>$k&8Y9BC1o_-^oI7B7!1C#hwGoDte&3DLNuPA zXsQdD0dZgNQAckulrRn9Jd_Yis3Ve~OIS!D;o`0)b`G1UyW4FGU8$yV@^4)aZu7uP z*;sL%L-ZU?&vwUif}nA`saS*v5<$((QwrkIN987Om`8U7O^ELVSDY55j)n_On|Z({ zlJ;XpWe8n)<_q~4AzwV{l|$36L;pqa8m|~pN-Ku_Z6x5xxxqdwvf%v0=GeHO;2AJV znQ}1Q;W6F`iIvv+?Edk)8X^@X4+ck;2;<^}ZZq_8qNm&R3oYSNb-h}rLb6_4EaEyV zsHZo-OlvQAv?5)kOmI48((O&m;5hUxy*ru=2O1v-k~wLL;wPb8vmU>+SuG093#4@p zl>Y7lg()p>Tp&Gj>XcJQXgg-COf7Yi$aJ&gG5{XwRX=Gs2(EQ*#vM#=K8Gdo#Yw{Y zU#6Rbi+W2xj~Y#PJArTgu;*vhkox5KbabJf#5j$Raiae=rKM8bfwd8rX(v~9Pa*kS1khc>tR9ZxY%4C>vq*!FH% z#gxWEJX4cBQ#Ev*h18DaEIC9BB=9T2S|Kx>QReLB+8)em^aJYjf#t<^X?J0e3DLk1BaY0 zJQL<_sZy{J;(;C;JGhqwws5E55K62z@+t>`vaH>04w>)%7E$o_^vmPBPtQyO?99g0ihDkvW1-;Fq?CcG7Wew=)j+QLS?qWPsf6cHdiLRtVLf#WbvytNCBTBV}# zPj8?5;W>R5q5nX#<8X2S<3ckQ!V(h-m%9IeELmeEeWYiANA%V71MS+C^Q+#`VrSCY z8+X0TktNB7r`O!uk7$R%{~!*ERsxCsfrK1b!j5i=c8SfYRU@TPKP2%!IGS~j z?SiqbZ#Hkbm!8et3B{h`p4+k-bd;|mw3cj63;$5D=)jpVHT0e$7_Z^5AMPK2rIqFk zzY|PTxGEZ#aU`u}ndTKPg&j)lU@c+^vedfK2lN%DeuRESX&yaH*mfT}LgAUyPB7kq zmMqCGU)b1MU#u+Z@|d6G72qz@_aQ+AH?5`y4uSq$sx}0bXAm#WQN4BQia+r^0`DKi zsY6c!2k2DP1=M$ti72nS5<#$7gT;V9-j9`Oh)X|p$}`QJDV9u6y|$5*{I-JBEV*zJ#2#4M97HZX2S0`N&<`U=06Pl_ZVES= zwkZ>wUm9KTq>t!js8zK$AW&VUHMFWN&tovo*`eu016X&F?voS*lR$`Jp1Ef1g^xcvYqJMQr_;|-5~)xB{) zJZWP6G4I4XAH{RR3_H!0$zwd(2S>_+{1@&Lzkk7Eb5ViDacPw{k)96j!1C23PH&$_ ze|E8JsW6(*R{JpG?pG}0mq5|}$NoJelSQKkII+4B_SEOG<~)tT9G+|*lyT&zMq(w< zqO05>7epMIirb;L&OGvxR+0+Z9*;)lyV{J;u)0C!fb?!?)h^b4;2h%q+;uX^KD!$(vg`S@L#ESHo_x#0LO!ORt!N{{_MwZpBQK2gInyzdHI$t7aYnyAD8F)BLCUuY*T05XJ zav})6CclFLqpwGk>0Csz_1mp_spbdN4p#mW`I@l^GNgO05S+iXUHMJyVj&i7m72NX0JDNFu_gM^) zlhwRWiB*jZIfvJhc7~b-128m7;D3i!Oun9LbuU{=iiIxSHs7e9=k#9a!;VV1SBEL_ zhjwSK0GqkWd_*1$DxRC@rPWH!&tr4$CTjF~$k?tFar%CC{H3O(Oj1@&b4Gn4OUt(; zZa=Jox*5{CHPO3W!q@}Pxmqh-s;+H$>oQ*HNpZh^b^MWLZS#oC4GX|)eoD3F3;O%X z=dO5DW~-9a(MAlu4y*rupMT%u-z34_GURo2qI@Bu>_GynW?}L4#BxLjegD`C`-Va> zv_FzmTdDeLwFh3!WiQtCf{pQeJsR`NVXx7nVtB6-z~SjcK#U-cf9;@q)r*ZhujE%#=VHA?W(_tpDmdL)Y~QF3r4f?k_Q~U+c(%0O=aMm+n&sypYp3LW6?cK??51&Y;*Fo+*-k9KdUyO zDJqDpEcEBcpJ;j)R$WZ3q1~sNrRl}~n!Wy^h3(P>oHgYAI)diAzwvunE+q^FbBIK# z0LcLkd_v^+8f03Cr7rHJv9A)d&ToWKP`rY=!1>6IAFsmAijU6#t+(jMkyJ5r%Nl*S zC=^xj$llWmoFxVzysdahy5702H}}0z3mGQwXm^vR(D8HIR3mqAftrJ(;hwIK8apQl zVb=s7mZtsmOD}2v*m4JbEAk9N0Jnq32{T1so3Uf9`h^ZUlx%#EElZtXGiwHoMF`m^ zPWpB9L97jZ4lCDO%>Sn=$^X_N>Oc1Sn#c9Z#(I331EMkPRILX2KJ_K-BU8nqgAus} z?KaM!$$zvn{&kX`1t&Ok@So`hXbmrq?LV4iWC$L-(D1t6Nqg?yqYlUinDcK>KhYWy zwnM3JnHx;w+p4UWFY8Ir7F_ysGEIbrvu95UC^%dO+k>`O4@0b>1CWXypAmn)k{TChR~1 z?eBi4nM1sGTBD<(OTf*I)yr{aRDv?4)SA2WQ{1KBOz-RFP0QYolToQy=Nmo6_GHXR zy-u4YC3QRdeu$oVeokBWyi=K1J0n*gNafTQju!l{tMLiw3~SYi;$Wcj>E`GNtCdns zpP>|#R`mkca%2*l_K7m&n!dF#Oi_rXNB>!FBUqFgrFH#Hdpg%m7b|5v5gOexQD$m; zrCxk1bAw;NX-tLQzd106gtIsoaf8;XP7c6b-oxQ309np&EG1T7*=qbif}Er|ZG|~KrlqHaupR%a(x*1ycX(~}dS2JkP>|3g zHdXh#)mB_TX~Y(nbU|L$@65xca|-tvc6@)g=^Ko`c)l&E*K+HYes+5P=`)%Hq(B{W zs+X93<%P0l6y=w#XN6f)!)0;cEJij;e{0z0^Ek)o3Z9o{>Z@P+$y29d+gK~%kmyC+ zq!V+8sY0veUtI8T4I$i+nXa|PXPWYvp|a72+j;M>vO=+@glCPB@w@0ET|-{4krJwr zhZxI;Q9{5$BUBMzW7EI})oTmlz~S*cI?x2IpISnL{zJs1_gU(qvptH0_Wbuw2gbA> zWwW$Zccl%v_0`lVOwD1peUMBz6sdzFT+$i{X*TQl?44y@cJ?&EAuYMp?$f=-jqcFP zdt4hif0a`VU!i_+nnVB%);RMJhlClcHUzkuL&3x>T|@pVYpP*!q*bkTknjmoCWkC~ z1k#h%fl#-r)=O*pDJ8xZU}rdq8PAk)Pf0EH4uCKW-yz@OA}RxRU0k6@Bv z*!07Aj9;HPSu|3yoDUSYuUi0HwGeeRYs9sxZv;b)NauLG(DXUR19eGQ!h?Nk{w2*1 z``rq3TZV_%>!D=GtOJ|~9H6r>f4>GWMgEro=bv#Xp=at^=pJy*3s7kCDp-R0n}#L# z_0Z@aw#W1cn6EAO-tVT#UYivsl$$M+rcv>f+KTh%p4TXn{_(EJn?KsBIP31X0nEXm zft@*X=BWq&!x<5H{USV=IJc0Kb6+rO@!3k+w37<2_I()+V(t9?#MkpMib9U2 z;No+K1nY1!f`CqE$%~0ky7{0RCN+y6)p z?0-!67x4=s)aI_#s#fad(x$6R@W}T`=8PUC(jK*B>)7ntF1U8|Hl5sj`L0H~O$g(} zDK+chL){azd!yFSD=v!$sml=rOENVVBK+E&9V)MAW--S)9+yF5K@=EwJl=4H9g-~R zKKFNr;l$d8>Gs=4=DF82LjM@-7AdWX4vUJovm;fiU?~(GX1`5j9wyB$xunkgj zPDs9jWvM5C3_5Z7(#AKoh=)ppve$iQ9km9#$a}3SIKF0e-t^j{QdAO4s5=NCtBvLk z+F3NM)Bf-NYtYlZ+2{9;@5_6D|5~nD=|hP>W6=>2PSdg3_aq`ZK@(p-0e0Wc2=>@utxIZtEa`k; z(l41R`ukOC0vk1n{Tf&bF;(AuPSw88LGZv6{Q2Ve)xtR=ET98+4k-k37?d~71#|Xw zoTm^SaMK~UlRW*XYBooA;IUkyzHC0gtPz{r5|^M$_uiOP(f)t%b^K%91b@okY1t?T zKiRfOzvcmJ@90^VH0*kO5Q0B!=zZUHYuy_ia;kA!9Xbhl?va;-CiD&c`NLE5x;k!H zvV*jg*KtQ^(vY;qI(tR0F$5}vf_#VY+9n>lkcN(#iY*V(U-xPDCb}{)M<*h%*J_n# z6s2i6WZp0a7+)6kk{yL7TfHZxZx_o-b zwHdom6Fj~B=%bIvf!XQjG$q_K!%&h(RmN}#_0jsVD-Ms!6v`%%Kw~uDO5c>*nAc&p z#9-+2<+PSp}+{R~zC@QmljB0V;~h z?h>`eun#qElK{`=YN@fbQFA@J3~!y<_ceiKdV6y7$nnOjz_qtpXYs1z&hEsV02(*y z=#U4!+d{{POCG+}cEhi12b-3Id>#F{zU_65Yq8Y=r-plJiMb=1G{7cqq2o98%2*@m zY1W`hy1S(~qpTQaTSj>H2ou$5LP3E7LE(`&*J~KbyWU5R!W~G>WV415*BR`iV!{z08Xa024!++6qiq9KJE& zf*AdTL+36QjSA)qrJuBJlzn@jI2*j3sW+A~8r8NRPqnUlx3;44 z*^jAiWclOrNNt7Yjtc}MPDwbR9sVjlERjxEIcu~Vgf!#A5G`ktx>?aP5cOI_7IfkT z95_#$HBAgVnjOI#9{I;)am8JVXec~cniz|L6}}u{vR+v`aio2Q8D=nfOM4ihZJat2 zO00#(M~CRHb4fQwflca{t$R+a9ao`!or{k$CpV6CBlT!t_&Ol8rv(6zN6_cuvp^`_ z?L1glfb2FWCM|JXIJ@h*C0H|3!3e^~5K8svD2f{VJox)U@_}@bihBu`O77G=?-9`R z)zwu!-j%1%NQ`b%fq&F>GBO3@^`+j#xBHC+@7DnQ|Bomrsm?CgK-9jbpVGHb+lPaR zRDj6%oZkMyxpTTX%hI02W}q9jl0LZ#UvM<^i{oSPLP6B6De41O79&i{d$D6-D7RhF zxGaFDtD-~Fp57xMroU1fNz;r|ixUNPSC^&q4yU(%%yDcdl|IDn^TqLJ0>DC-!yXUn z*mc_AP{pLznuR`ZEb6n9&cshGb`*01F168#>vuyF1pHGnL}xKliTGMxU>5@MwCvW7(Tp`lgdxWV z1W3lM=f_#G7Z{GOYkMKK(>$iNYFYD)E=;jQrb0_Z9M25{GG354nq=GX4d=VmE0yya zquH=%)PB}kAY(HW1&QAv^^80w%?gKwX`-5y^^LOb-|Kb#9+*zP{?prYbGJO}p6buP zoqESJB+S>xFKbFn-au8kJN@R^yLge6eaF~EWTNN?=fhRp5O5I1tca)f8S^zz` zJ6f?hVII+G||^|WkO)&cYCO7(kDNcm2)n8x9xs& z6c_+(#%^5ZcIVsw`MbY~uPZo%21#tWbK{{n%l71^hlNl#lcSy5FrFO};#lq=MUe}M zms&Xs??v5ualu1Cm~l5!K1&xsqv>8tlfw-Y=z!52qt@x`=>Q8%;?sDV2Ch6kl zk`@(zZz!Rnk6^#(&vDP|OdNU*tXo%Z#p~l%No5&@=31VtfXFtQfi@aGLXyFuVK680 z0_)uy8+EM*wcGq~LnsbN&TyXxKi_L|NevsNypJSxfpWxo(DL{yfa-aX5=*(WqFb6h z>?j&)Y|=7ZGtS6(Cq%aN>>HadR7v0*2S>QR&Q77U6cLW}Ni2?7j_5f4aQc<5^;duO zSNed`8FZA;1H;!zPyaWjAIq4j;NrDHUY6pGGNdbw+DYtz`k!`FO)x_7Z5m%AGp29p zb`<0KG!}$hS$#%x0N)0aZXZW)oX}7k+>^=CMoa+REd&t%rCg)F3s-4kwH>4%mA%5- zGaDiBvq=>Ijd^WxRo`o`YG7llC*qRKjzrA~&rgi` z7bnL2ixXr1#fdTh;>4JLabnEBI5Fm5oEY;jPK@~%C&v7XhsXSrA3Z$gpZw_IG5_R8 z50CjLKYDn~Kl#zaWB$pH9v<^ge)RB|fAXV;$NZBYJv`>0{OH7(e|%!hKRz+$ADT! z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN z7|0mN7|0mN7|0mN7|0mN7|0mN7|0mN82G=~d$-@nt}D&cz})ZC(-;f}gTZX4EN7;a zN}C( zEd$B$Uo|iD`*y^)){d0bcGX}W1`|}d@|-y5?919~ulrhijstTXnB%}42j)01$ALKx z%yD3j19KdhsT<~T6NfjJJ$abS)Ea~zoCz#IqWI55Y7IS$NmV2%TG9GK(490%q& zFvo#84$N_2jstTXnB%}42j)01$ALKx%yD3j19KdhsT<~T6NfjJJ$abS)Ea~zoC zz#IqWI55Y7IS$NmV2%TG9GK(490%q&Fvo#84$N_2jstTXnB%}42j)01$ALKx%yD3j z19KdhsT<~T6NfjJJ$abS)Ea~zoCz#IqWI55Y7IS$NmV2%TG9GK(490%q&Fvo#8 z4$N`j|Jyk5AAjz1pZovw|NckjufLQ2?e0?<-_h^zdQUdc<`U|zyIqK zz4E!w{U?)uJ~tkf@kkH#Hy+j7@#tSn?&}Znqb~ll=0O>c{-fz_w+wvQD@VTU=jEVu z{cES3_;Q$+{cNFIjt2hLFxWilV3kK{yKe6{`;4o{>hKBm!|I+T&El~y*$%g z@A$(`j;U8p0!Z%MpY%#UTHVcgK2e@-`T^&!d(hK~`O91R`o;2eEZWz~ z_-lTpS9UxSz_ph@51XHlecd}M`+jA=bp115zU`koW$4S(a^g3-M>&bp1k97=Nx$4J zZI2iHAx*cx2dGYGmbufse$FlS%C0XDNL7vXihoooW59-{{WlfPnO%^qQs*{q}`&(U;GZSfS&@PoU}V zI>R#j?+nc8MtumePs{KTzwue%HuT3MH_Tk`1PKB-U(0Q9TDA=yk%zA}pKN;ICyBS6 z=5-;0U+E`MI!6h@ZX$m#H&DNv7@ImLrI%!&lUTWv5ISEjc~tu}7{jIiK!yGwdpgLK zI31Lue7_8XDt;;V-)V`(dZs+%3GFn?QuYJ;cV@=Bl9c*Z_J5pXJSd%EP=#K(<9YO< z3Le)HTsX zzjw<{EatiLtUF-0x9#-y`~W%}TlF;2te?XimZ0{GAU0*M90sXme+I0k`_rI#2vP~` zJIyNg%3Ge~QRy7UWO`tx2TI>BH`h;?e?Gw<mHr^!`nxjzdfbdkujM-{Ud0PZz53;@M>QS|*peWyge3iqM?2*Jq<4FrZC1qd zfz_R~!pM|@`7u*pcXw}=Q(v+IexnD;am$avLxPDw3jBJf-0|g3x$Q4c4-${)=fDI= zaU7l8YUbZ*8i2hw@GG68;F=Kg*rylDdH2{kCfU<)*9E<~oR0KSj9BM3)Y{#cy{Z0lw~(6jojN88!za@n{zp#-QKCXS!=@04h|2YP$m*WlE^ zH0AUNVvm^MWgU;6Ns7-$_dWRWNWgRxkdS~G!eq!C;}N)fHybP6d;SFRDn6m%04-n+ zZqrY>6moRGX(t7lVZK8%YX=|<6 z_nv~{=L3k2`#w%N_(scn=w!nmL+@s|XDV+1_H?_U1n0_A{-{?vfsVaKY-<#swQ|F6 zAD2!tp3!3^u#|B;=dmuAd%k?VyzWaFi(3IBVoSn+g4>Ko2zJR~B|S^%SZsKows^?m4ZUMzp{j3`Wq0*zipRrp z%NCta6CenVdt<_{`SaOr+zE8rD)*1Op7&$?A#ptRn888|``we+l%u<)chc_N7rg{C zydQ27E~0aZIPr z9*M2b^*y%_lOD`+tIr$e?6wr1fbH(T{-<)KWLh5HxpL9aMuCgn3>WB{RUuCDD5d!x zK_7b(~KyqXj`cUoLv6z3xG(XG;lM z%$xBo^3!fNASOY*6Br^hLMWC;wj}BB7jam3~gL($l7~)3A>1 z%_z^^81l;{EM*tVQ`u6*q8wo`0dddazxwCy4L67_*P~=9Qs`JNH)06VFvcUlwu+h+ zFh?~FE6GCH@;j_i!idcm%1s9@=&l#as=GMA($MVs9(==Zy;?4K?RYCh!R`QSa`0I; zL*N9{=j?bkdJyPE-#b~w=^R(oG-5?ixP=DjRinlOqB!X6^PJ* zcX>jm)fnNX*+OZ3yIEHK7GQ!Jdaf5rSQ$?t-TrvEQwBZXoV{Nj{K7Xwkyvl$0jv`^ z`*t`{O#SM3^iH{CfJor2YO7zCeG9D0mmBdO=SV!b#XMqZHp{X>w9_ret-{In*a_^D zx*Pm+voSDq%H13Q3@v#eMzEyNt20`9C_3!GP%n~>7!kcu*}_2_;PttkMJa-|Ada0L zdr@ZXagt7gIPh8+!Vn{%ojHNZx4o2$i6L}!(M>}IwoikI@g#uM$KDn!?Kr=NG=-U( z6?FUHhuhr2qLnMed=sIzbDO~Z`_caWMv8_5xKYGzH##2OFa6-k(Du``;-F|5QD(RS z(+RUO6AiG78H9`lSxRwXsVuw0$rkfSwo;daNQ8cl43I{E`wdNmRwmnE>usmC=oI@v zvq%GM9KOZs3PJ=Dj7OhO6$%9OB+ouB4}Rm3oh!*y5lG`5#HJ67b0TG)*PExMmt#iH z2oBpV+x`FtY0$pI`-%JXI{H zn?ddkmoyfabfz9Rk_D;*iz$Wedpl10DSRV%ahzo&z0yjno`aq1{`tUD=tv`ouoz?EaS)h-4n^-GQqbFx{j5auU0KJYq}u5yS)(b{DjI_V=E``E>K$ zEq6out;~NQO~{jazI21yA0~B=#BR{P4vw6Z&t6UH$tQp2W;w19icge0|3Q74W#Hh zJFx}0!NBJsZHb2a<}PYuNRAjz{JYep80xunnXaVeVXxti8)fbP@aBBTplr$Sv;W0# zrvD6$@rc-!u*j{HwHxLV2q`-U2vvp!kos~A82vEz0IFxuvt#(oNn??GvhlWFFU$U3 zP!DN-JbJ2x`wDs-@*Q-=!~;*Y$05q*Yvkq7Q-SooAdE!egsgURwXC{h2?y`_hQ3ms z7?#t}G{_2vxlpNh|A%e8!q@2~j*=<{b{|h1BF8FOR#?ikw~o zUdPsuK6liWlkWfGrT?_N?^SnW5;|j27~BI^>LoS`(@2)hs_%C1C4V2_yTs(+b$BK{ z)sjKn5gTt1yRgz}e{eIUHx$|+3XAOEz21JzWn$*>_}hQI>*XXmN}1(Yq{3ij{{*MK znZYfXojt&6QT7dH$Moc#eN@K3@e@?kPB7FL^qS{^p5x1fG_s$4H2zH)f9TD$32i$G zph)Elqc$77AcVOiA<{p(!Q<4#uO!j#@0Rd&OpE6vBab5<_#@19(@@pQzir(}TM2>q0Aq*~*O+yWf z8<6qcc!ZPZS%YS!EP8MY&?#?>jc&ocl-iE05#52KY~&tl|H5bFAKgrUhfNR5bl=Db zjTsW!<-Wo1V2$8NuCXY3P)~MB$ljOBYX-v@hvj3w=@Io1w4vIn`~)DU7y3TL*AG_M zDq-C{`}hGE_XCeu7LAKf=N++dG0;8n49@^_f3Uf52KXVdD1r6D%pdvzT4dOcUrh}j zB5|B<{mx)NMFD9o4Z*{O*H{P8v4%kc@HY0a1LW96>wJg0*cgkhma9exEE&cqdFLDJ zI^;MUD%I4)Gt;pr`NQ0p21iKrcJzgTzX8-<$`ds*z}>Mv2UwrIDmZ@=ahA)SM)KOp zwL_v$6|UdUjf%}OKu*iAMIeZnlJT=zq>h^Xg}m3f6A}q#!QF1qbac9%pivjf1&>=g z6{OYxTrVr0xGWke-^kIl6RRWJ*lCK)HTuEdG2Tw@Wb9f7vou<4wn-i!LECA=V)@;K zd~4kZ)wmAb88!kW5^wjQQ9v;J?}+$&4lkEijiHzUF8i9G^-$NwqZ{Ss>Z-puUxo(i zCL+om+VfXps$NtqHW`<=P!|2W0-9p`FnuDk2+VThn`x4()9sPE`tI{G3prFjQDmpxFrVKKpN?* zw=L&azr>rTjj2YS*QSw!)I6^@6tN@I&4L+C6)#&3K6~A;o!JMz^IXC1*daHB>0Ute zFH8>okU8bj=mn=oF+T}e{8$Fwm;p+0cMzaoPSEeOy6zTzFs`ACfi_4iEJ~W0jwCht zBLMuomlUe&pTUN(ZK7VKf%2RTdVWjR$^SpLuxIZz;<%*M0N180WgZr2nA;Kw$J1!&Dfk$i>Yehm!NbDH)9QxC zBba*LV1LUSD6((f5Owxc7BB;KLbyeH%;21x={Hdf(U**&cOWr?UM{zI>MuN`sYLC* z&i(;&r6+{r&&vh<4OH!ydmhbXr~9som;rR8&+SBHtcf8Cjz@o4{?g>(5&m$6J@SZ$ zAT}3d<(cxfUnFvCveT@R%YCH3>POOr2g`dwQpO_!u{=Oz7t{&ArhRG{!Is!hX;4GN z3hj+mi;(9U{^14|5N`}C%q8wc6G#bC7KW49gAj`l;+q|?QZ~IH+VPFpUZo>>9Z3epRLA^9b4*7fa+omQV*yq2t0#JkRj#Paq}Ya z9tI%phOw;L_3wr7uw0f5BWy88x#=N#+0&K+%sp@L+r2Ci@{@1vARdMY-C^jtDUc|7 zcVayW&+=ENL`5_~<~a1Wz0_V8ze?_fUNw1`L@gm&=J+KTDwkdOkuH?C4JkYmftQ`~mggj-IPQl3 zqQjiigo**tU7pH!m_p8{v}sE(pY`uUVo#W>YgptoG}3*^>X}1d3ge{o^e*;}0 z5TRY`Q=r$6_=}t6*xwBLFyG{cm}z$!?W|fSJ%;f}euhBcHg109MtLqqL_D}#-BNWK_L;ETPq1RPVWOWR(n6`g&Q&QGww>t>nV<6X<0#v+=0Opj#~0 z{8m-yVi#Lcq#jX zOf>2*lMlZN%sVhZ5REWNEl<3KFE1z)ik#;STgaH6mGYc_htKbaC=W!d;%5|^Z0s;v zEZ@kmE*u|eI~b1`b?V}xbRj4=+(H6ONR)eRSfHfd?--Ql%BzVWlJPy9vu}^3Cbv)-Qmc=UK0yZcSJ!}YT6H6c6|!$^s(e);l~ zh5z|6NLM5*E_C8B~PsjmkY%ZEf153hjjM$4?e-QYOGb{PTKSP7^7kf-No1H1`qYb z0NF;V@B>Lzzsl9XKrj%_8b%AH0l&=tJ7I4|;s|1iEjSQP)}z?vb+=+L^x9wwMx5CR zc6>3p^jecHG#;_ZJjKNdO%EQyGG+({hL$SaI5~|`Frswei}qt0h&i*w-*oFBQNNAV z0zuss7Q9de$0M9`xxo;z-J1NsA=0g7bl*w3#)zP3v@=H9>=i}DCtnuIM@+!xWJ(?> za%$tf>>%fdMr9q`BgeQMf)Fd;;4e({C?l%hDeoAssomTp$9l zSn$qKkdUNuurc>daH%uSJMa0ba*AaQ)qR|t!mgJ3f4`f`s1X>SIn&dY; z^3D-lGc;BPn;?SuoMcQT$SC^fMzHCo;fvP88nc*gR$3eMJz7PW-Gw!HmfU#^2YuLj zEHwNzhKi7RH8S89GHeSaoM9U^SjmtEq3~bz7^@=mh*{WYJ4!9Bdpt-|m|gM?ku~(T zhfLdBbvt{G)lv_E_8bp0S{z_DO9xVELpQXMx(2s$DaRv>+nz=ZuJl~dSHmTQHrELX z^?CeT9izpTAGq~H?L?<+8&0o%*4n(C=Y%RRCMC%9EYTjKkSZ$7Ct+4rKLjM=a+n!yGPce?bK8|)Xq(|2BJx^Er z^dS$}Vfy1IE{E7Vua=iR+G`o)f}ba+j*V|W7lnx^EiU-w^_0Dr%9;l&YxT1p?0AG1 zELEQIh{y`R2$@PB7C5^Xq88a2RdGtj6Uio`eL34cSlJMV&anm`s=I1v#9-m{i98d& z=7H2y6R&os5}zD@9f%(yZ$bI}odnR|`iAg~6s*p^RUZ7#-C0O5F@mqhhVNA0edy7L zJ#y|F8FoMA)GMVQ@$;J*8cB?@+vVF^?sGhvSpB@fTjeEB3AQxrhmuD+-&^*3#15XY z^o;P@P|0Y2PCc%qD@M#yMRh=-Ikhhk~ikrrz zc*ECApp{3;o@P%3$&nR06~x9)e7T877xfP6$)cu7=Vm!+0;HI+7hi&KdbVCEuXyRl zqb8OXp(kk^yyhUcs{BU5BxKq=3VpkZ`B^kW8kM^x2ng7FfreS}L4kd-nJVJ@C88?~ zn`*W^+I`eg#UC{RBHH;|`DrklyVbR_YUo4#gW6sxH}eC;oP$OZQ4qSnS=dS<10d7n z+pSfBa#JHEP>wDnw=1m)1!~?R?-staWcmB-@dzBl0T!M-LT5v55jQ|94Z1*pjv-yy zYp@iMIx{D>SrEekit0Fw zOWf{Ox#nFn<+BhAk6?hWG_=!4%cK6CS5j`NEGAK{b(d5++h6-mXt4h*APUk z8J;Ls*dT!c9tB0bK{XDKL2eUYW8A##55SnrSR-w#6S#{64oApffU9N2m@;up-aBzL znL>}-zlcZnkDl0Fe7>3Dx1NZ5(7E4`$eQDoNFjr34MOvvk@+%6_(oaq@3P^A6S->U z8K@vm**-cBzs}OLo7&#<_P;@_4oHfQ3Q57*W&!oC!d?V52<9u@>zq`jx z!r!``*x+a1E92i8@q0ULo?(CQ#M>MNf=RXJOwvGOU9^S5_J4iiqwpB2d&_g4Wc_$C zb}}4fSi(*VKiLaL1IUACSF2?RY;58|kXl@byN!`&JZe&St1uVhfpGX&MgiP6fq--y z)|wl$`O1ssRsd!&iO>vvNeKOgfYJ$}sPgazJ^E7lvgclPu;`9mvAgmoGjh|u&-!;t z6Qe5zaS}McP}XxXIvEo~W_lzZL7AwA7Al*J2`yDb!pbylbf}2%BwaEU*FB15?HRVa z$mK>gw6xA(Ck&sgR;#pElUEacz}qv0BIHI6`EYY$GjK1gS~x@?2wyP-2Hhj8H|roR zYU8!>A-%wtr^-2px+qvwe>G}U7w%;%8E9)xZ^f{QKgeULvQS38EBvS7*VS+|7&c}M ztcJ(I%`m+O$?A%-$K80;)<0Oz`Bee9(WNmdd}s{ZT;52sD_?g=Y}n|KZoGi&B+;iD zfE@<`+lwwIWX=sA$5B`Sn+Qhj^zXP^?31Ul7(`H|UIS+g`Ef1l_+87`JUCbsv02Pi z*^?@-<~Vlym;${<6o~Q@IGcR79{eVXk0*&Ni*W2GGH)cYo;Yv(4EZALzy#O@&ZBvR z`H_x7xBSnJOLBj+d zlKU~3-fQIKtttq6Use0hM9%V1z>69M>gCf~{@gbbC?g<-y?i#m z{PEPKq*P-p^dmycpZ4034eGV4Lpy0YV*`X33Bsa(nD&oHqD*Zpzf{Q#7F)`iXl-sT z;pxen-Yvg>*U%?RzNtxdv%x#0n^f|LDdgb^ZIsJrOJoAd=>8keR2jdXo(xRQ=DAQ_ zHjJfO#MG1;iQa&K<3`9Fi|`%f{XJ9JWu9*mmVwUPX*?p61lD}lcg{WdNB-TGvC!hQ({$i;+388hvH_N6QCI>BSGF9>q zdZyG|fACl3uRKDG(HWi&F8TKv|D7UJe*Irt3L2@}=wu6dL~2}lza9o$G_(YT`X?tI z5@y$!xHp+B+OG%pT$(!+nB?YPpn=)Y2*^%?2Z~Pdz!0s)R73EgvrOT}hxCRgSPip@ ztgZ1`u94o}{}%IY5du0kd+VX!%Zgf9pn6Tvq7+;u73ap9T2zuFAXa;>ibPQcLrzaY zj-)Udwiwp}cSxJB5s-dHt?5_;MjB-Ru=b?$Y@;duB@rM>0+M9p+3Y00ufBz#+%mX- z&tOydSW8jBg8f_Rx}5cmf-7=lC!f8za2?%dDHgKuR62tIiO0((3Y z#Pp*YdG3yu@tb>cgs-`2%r~8#Tj4ZjCu64aHxTLusi=JAujITXr_AlGm8*{!Sw1^G z(P-mFs+A$>#@W9u4}RpH5&19qy-6vrhIlsHr-QxtR(hK^GY^Gg7^$=q^O_EC47E+# z6?42})T?$X@?AWy&9dS_;4{Nh-yo$7*o_#7vlXSn!$+rxRRzLLb-6*e$_t5IXtxj` zf5^PUib`!L1t$3s5gq~XtC`D-UXzp0z4c07cSljeGulIbGWeSIX+Lk+D+2ym>p>!dJ4sk@DlxB2)F~Ajn9T z-6pt(xEMIGS55(c3e@n_6@Y%6E+w{UKTSZb$&!RhPvuylq!R5tm7R={stz{aLPtneXR!O+n_?xy<>ypEdUv%Jqn!Zn=>`vVgg{B3#gU zf(jn|+6#A85pS2!rZ8e6>0Tb2{O!Zv#h%X zBxx)TzN3H$M8{*8?hP*?YMQ}BRA8VmB^9>(g?2ZV%(l5+aQr(UYs1yyss5}wYf^iF zbf*hj@CpjADDueQ0kb{3EkM_(ikPi#MNg7vKJmI)QuCa^4iBR0p&6!pEq(6_n+W%t z++oMJtTBrNX$Llp65k+HGFyEE9V$9m)(T~qUKP&JfjkdouwFKh$^H#BWBWe(bW|mt zQDCO>s+&-zX`=$dJ)yuk8p#t8jio$%MdceB!*XZG6esAirJ8^0zYj^#pZ?r~I(z@Y zFPhZ9Ev2{k86uPfPEQW4V;IyJ!Gb6vuPn$a(_ZrbrnJrBLy@G*rVi3MQPqz#Qw}XQ zEQeJ)PEX)?MDU0~b+fE^P&H}91BOl=p;guU@X2dTkbDHJN@;FfUdoE7 z#!wHI;Y>aH-e0nD_rqKdZyJRDD$`=PEzgz9IiP7wP!0&_B!X6mG&wN4<**kfzO_kU zhzEZNrgdNk6DFV?` zNewgQ73E&}ftLp5$K$95+cKyJc1tDt$jw7@LQdOCLkLD4Alul%@F~Oz zD=X!?w}YaA<&82>Z%+I&2^e^IkX0G=e(8?cL?IKC1MDeM4#ooY?YwVJ6LzC%%q zCR}vl#}4G196K=nx<|E&%@>=HyIyI68MLI|aOXG6b$=pb4yGUXhY18rgl8>-q!P;f zGN|^%VN4Q^B06p7Ba(|j7kMt}>=tQbf4iK^naeAfarF1vNVywjZOb$1q@!QR zXV7De4Aa17N`bJQTRG~ELV!He@#xviUfCoUHK!`*ML`1cHt$mtFl98Q{O$Y{?d})q#!*0kf1uI?Hmk<<$Qb&uN|m`EN4%m8Fbxu6qd*MwIFK$w zu&C;{aZPxC7-jgLE?+cEua`AX9yb6D2gOw0LQ^Gq`KygH10ly>Rhp9&5b7wY)B|F8 zZzdcj-VTG4RZjxa!%Q}e!}R`;l+X%~*q~4vJ-KbReJ>~i){n!$_^H-t0xYpm9tL5e zhj?=%wc?7R9-p-0upnE7bf1VWmoHy76mOMGv78PRq;dWlKgq4aIYjOOtG?h({zS?J`7MTB6AxPn6GqD?3SA)x+FPGia4b<8YG?dWs zmH)TIa`!0N<9$O7U!*TVH>0?sYRMb-7c#sApx~@o1PXyFLk#4bS#lHPs{N2$H8&yE z?6*c>;^KoCRl`{5u1Xe;jRrEc+E(?bH!Gm%EUE5|gw?0$RO#$sZnMi9AJ) z7&gnqCj0uz%}lvIN0dUg^QH2P2?ga>N-sk2Kc6w_o0$O3%!b_{Ehjau+hbJJBlOFC z%4X(Ah{V#u-7Oq4qkL=JeewxW!4p_n8Vi{0pZm)<6jd2FagEQt|_mo&ym? zoPX#C8&IcQCd65!A3r0%u6Z^oFy$CN)hcQKqJW8i^hD*81?N3n=@OBwG|@G^YS?-W z_!lv6wmEK%LN~3vkte6Vp5>~LEW-3_@3qGv2Oi>zd)UK7ld1a128aDD115t)7M|N{ zbuf4iaLx3&@#uUx9|7h#$b0Tk(g;$lC$0q84-DB>-B@a!G(@5KgAOD(1WTy{{uog` zL>|;QN=SuYrV3~g5x8AWqBR-J_ib;P*uaRo%iLP5MU5+1C=woa1qCHGVrN0A+H0KW z8)e~!3HCa;Dv3Y1pE93Fq)&ktG&%^Ymza)Mqz5Bt_eV8>8#j=>a|o%z8&)EL9~(Xr z;oT^fRIKg3t2$OH_X0_-7NT2v0tfVsN-c9C# z!sTvz@)xPq=PpSmYSOc`@BD-nX1)pYtL&u($8;Fs9U9k&I$ycqj`y<~a1g!B0 zfzFLmvncEF60Nf72T~3~KvlGOUA*i{RK1xhvrEk!=U700>WnwU(fFC6!fo>7Nu4ae zQKenHLJ4;wehEJdos=5fo)(teP%4yg=WPTl_vKoaYmxyIrPMQ2Y*xTw9kO|nz+EJg zZsNU8EK=ZcST)!HS*%c=j2!MY8PAh4)O;lgAvt0E)ub$&mcTiXsQJl1h`QBCLqi)Q z%biTPU6!C?2&^Oj8*(*%ua;Qy)=nbKfaht*x$v{yG!zS&Kw^1$TQ*^!nMi*_&==kzmWU z8y<=x0SR?7eC1ZHf%E=>s5{JnxJ2DBR3{V3u(}>asfu1JG78{XF|f)M9)NJIyB&;( zxC|<9v}ybx(3^f4Ri&S4*hAPRf z;9o!j3pMT{Zx}xK-8gi@W5H&4f?p)(2+t=)-mS~-va%G=X$a6TW5X1*8-jq;de3S+647xl&z4JmptCsI zK#QbTaMQFi7mT$%$*!WZS@e~F4 zYgHBZx(@B`95(hc^!k1n4mV*@8B9ZHeYoh93JqB>WL4hst8P>J0>X%q41=gIUoB51 zLq~h72_7b+&z0pMqtdi~vGRIg zod)uLf|Oamr8EVkM9Y4}_j;%pY3B)hQE$IFgp@D_!{>UrZaB#B>8YQ9&*iGcM(Vn!6L=w{f$fffk+t(pO(8-)@GrfIJlaSWb^!Urajo=L=u zQ(?q2AY42~GD70bWHM&juyofq9TUfQAJhy840 ztz3`9)z5N-*XK zbso3$2*aA`o`fU1N&!2bu8bA!XEOR2)Gth&M{WgE&M<^c3M=*^7*qmno2B}4Zb%N* zI?k=DSb`df;4Y?LdQNM#fV=M|mN>sk;xYq>9d)r0Gs(HRmmBe}WF$0BHzR9vZiZ*e z7yT-f3YsQuGJ%$qvcHGXD`nB`6iVP2%La->qcEIagF*&`mIyGto694LP9~1O<92w< zX<_WHYKWTre@=<_(6!00EgJ&x4+$7`D%jL?==W>nk_8;vl$-v!Kv&S&Z6WF^0UNR=RPa-|0*3?( zY#0TI9C*pEEVWg170~B~WYrAbjrHSB01K-_TO5I!UKgJ>60E8^iL`yUk09jPO%gDo z8J&OK5TWbz=Qm+dI%vRfSl-~^rVZbrNux05V%D-Pvw4zRN?-T;inZAfC8!z+WFdB6 zs%?*F+^`{IWY5=lPj);H0>wPXx-2@^%b?OZC1K8j{a{>h6I3G$4Vfx}_~hLnTAGJ| z(?n-K_z8pelTDaC{MNU}9KVid#xJgw=RHaJ^D}jJC8Gr;Xf+3iLuSvSeCJ|D$~?4n zL8vT`j47y%Yfwp!zURjl2oemlP0MPPU;@)O@p7pKURXWYy*J8Brc%V|Ox=TSR~8G; z4aYGo-P9bLsCHp2@1(MoziR`KB+0}Y0mw}4#Pl@`=6+6xS%pz^HgwcH%FQFT#B_D& zWcZ|^mmY3)kcNtcl@>zbXA3j_F|_PJ2OLU9r@O6%`t02Y-%KI@MP?q7Hy#0`P2&cd z-teS|s2ZiKXR zqQ*h$+pN?{qA&4FWM_wAUe@i{D6CJIQ4~9nz82yS3_f{bE}e2G+b`m59HSP z*f+#Rlo;ww39w#Fbk{g4WuoJ#ypX?Jm9ssOZbC&o5Yj}DDEzA265lf5Ct@V>jGxK@ z7|VPblSIKFw*E*M#%;veED<#G)%~EOER`P^Jg2_HXh0h(1r1enJT2r-L-R3!H zQjf!iforFK8qtv};g<4{WRzHrnIRFUDnZ7i-%?Y}r=>d`1w#W_@ z*D~&sn-)eciP_zzs3n3zqOpXx2#h8LJI`g2aY*sZUV7BkhrNs>8HKP-9MF=-EQJ5I zzoTH7l!>%x6UK%5;*q{lj`w|y{~?@h+N7VDvLKKd*z!mb!QdyHpqZBq*Z))m+X7bY z0GDhH?fHLF#kV7a@gbwsjOvl;0(HfLe?aZCTCVESAboqtlcUArEWGjP?ecF7sv6qj z7vWBD*06?>2)bh^E?E!vY_ZY14H?6J=Nm=%M(77#Ruc5we3fldJwg;~lx*n#-b|iW z9g15Of~Um#Ar4KCW3gQK80G4&;fY>Pq=q|gp(Y#pqs!%-Kf*k;iP^Mu!AiOa;yE7S zV8|I%X5w#aIRLpXyeT#-jqzO-B9`*yDGT&toPOBOk^*&J1%?1lu^>K@Z-@I%u#vYEWA5Z1zfM+c6x#N1{0s}?eSd7HQuQ_& z!jQAn4T;bR?N8@t z7Ra}L7MgE%pwI*ZQ6!k;A@?CMGBFN<5@AGOYE>cJ?$j_d{<{0TlLj!VaxJ<$IW#i} zphk&CW|6d#?mpc|qr?@VM5kW7MfDJmt%r*-gzgP(+RPOgs*1_ zo%K79IoM;>>#%@*#4pOeq=a@qTUtqFU_+LMSObzE{F0pHfkku_!pY~`(;`rA`~l&T z%SIr?_uYn~5`wN255I{Q@`4EoID$}C_KEX93gd|Ut*60Zsg>`6z=ZCG=KgAyQ@(P) zti;zZZj_a+7z(%~6tOzaW~r4k*l9_42i2z2M%;>TH6~OVNu5D?J*hMB=zw+e2y4}x zL^*|3Q;60xCz37ZQZ-6{#&*iHGf)VKFl;@)=gTWbRQ6`)IJ#S5c7gt#quVKA5D#93 zo*WCzUFhRoy@Vamf~3cX(<^p_w9{eiCa@DPR|YwB?%;v-nB-zk1HkGkiH_tZ}m-)j`q@#Dyxe$;BB7MV)ZW%sk2$lg2LZGFAA%0V~&o+2u3U;?j25TEO zo5^MPtNYvO%lNahnWbG9D4661Rza#JGzO>f^h|f-yiA)@>L=OZJ5oW`LK@KP(csUqtiV+O>&M+Rv+&)1G3g3<D3uHj~f&ScMyX~itfuQ9L!;uXhs@8p)6`&gLm+?>i zZ7uE5*1&o=Ca;YD&J9BlQW%n9Fjf~SdCA+gT8Lxwjz^y-q}VM0#;kcV9z9y#@e_$2 zGxnjdQPooYnUl_eXWQ;d$!DkG<)VyM6o8S676+W+SBR3f8TOh8!}btXctJpt*h$j3 z!ZInpbDMQFt_gftf;=8o$8OTOzCmhz_QFcc#wPPatmgG{#S8m%dD zVzVV))Udx`5s3VGOq23ZDW&74J85Xdy_B|GNf24@4RtN#H9`(N7>@J|p`c2pyZ3W< zuzIRMIV1if)_S~~E9K9F-jXw(=Ln361za=yn|L^dN;QiKEbW_DxW&LtNo-(cGO2<*k|0loMO1 z`{Go+bo(f$`@ydg7ZzoHPXTF%gL|9W3UUgCkPl9=w{v9Dt*4<00bTh&;y8QovHfq# zxF$d4uE(i;)P3ns_T9EE{-NucUh)Nxu<{&tjs;of0s~6bh7*wz@CG!@uqQ3di;ow= zEsaN&sk;~1=0a!I&h3YP0xQj4b%pv<>}|59dl1MU$H^6TZb_)m9mWw12U&8QgKemJ z;7+mKWaTCdiP8~0bn!)HletsOW{UFnrvx&lqh*op<~P%0nT_ zpCflg6$9%uYX%Ow_!KjcZbCc5_C}1dccB9Kvjl)p)2&@Dk7ZYo7TL7JPzpuSy~jxe z_+?KNOLqn=ua{8hVSSApRKr*dDkzMKJopyJ>5s7adm!uykM^)~4&TX2zOvJU}`){lt-;btA-qhB7#W!;J2RZJ1J0iC=rkIC35aK;;r82@TNzo_}kqQK@uXg zs{7=@u1)rX$A`F}ie-o&rUTKPjavyBmsedG>7e_x)@l6X>QQv#_H zTBi51QOW5G51K@Tot=aZaxF-Wh8UQ&fN?z_Nl38Q{_0He7PN$Z>UoHHx57?_M*)tX z`aJT%g10PGp6tm~WpRbd0^|Nxo+}ZcaJ?+w*fJc3Csf}^MSDf**M5dcRi+8Ak__23 zJll<8dB6VnpG=B_)LN!QVX3vO*xdiUTN%M7ngR<3kqA=Oot&8&#+bj7&N9>1@cH#N-SbPY@JN<-77nufQ}aPPHgXA$;3$@42JS|uCJrOul<>uVU|{%pJ?6!=vJ}XV8#(-& zP$tk%CJQ&@CGb6A71aXaA=hn)LqB{jO2?{4q@?Z%jgNuAya+4wKtu$B9K#*7c)>RJS41=6v9$3iRiCuE0s*0q6ipi=C z6KAsnnKJyF*dHmQ{By2i5B_sL);9r0>Uw-=5J^Mu1OyiJO+P}G4s?>%c@TDPRhq4l z7v9O6S=S#)r59v#vwUUM-6BwR8x;*XBxDK{LPX!AFh3{-#nb?FAlt`xr0k&fd%!|B z+z%ebQrU24vXn>S0fKy#3WW;8_!OLzP09*vj0J!CgHBpkam6Ib2s4qpIo5XYXwb7I zVtX`SDqnIJ8~F7y;bBFFNqB1^sp2p3OTNV^1ug_;NH2*a5MC@dl7`X7K^GpF zIF9V2`oY~MOhvZ53{m1pkc};a4(&D^T>H{q4V4;(C>NI(8$(qkkDf8L-~H}{P~tsu ztHh-S3y=tX8nRxeb?eGl7P6%x4?sV6NB_{WVeImi8lI9*m7mu-?f`^PF<&6YYB%jF z^8Me)hZ0Y-T2C)E1DqNd&~lBIaIbn%&c5~F8;wV@8Uw&kLm|qx`Ugc_R%^2RU7V9>`BQw*n#=8f) z6ZRH5&NZo}Ihm=hX4oK}G{ur2!Cw5!q`P^>ATCiHNvlcj!k^(LC{fF4Mg9(LGEAz& zJGpHpc8)~5myGM{X>akoMW$oDq*xPdoMTbOd3sY;6;AKzE#NQc-D%V$Rh#PE=|t(p z)#b9d<$cG4sY4+f@04w-2zZ!mZC6XsaTo1hoPpsV4S;o=n~z)A_~SK_Ek^ir;t_KF z@sk9Y&N{FmH$Vx-C1Whn%KllQ#~v$@I{l_Jz&4WO-ENj@n37$8No-g|zg{R$ z81Qv&9@S08w<7~#e&R0!9$*fL{ina^!MCh^&~~{mc_E^ z2g3KX33p4B=)l}?7^n{S%`+h?{oRRxZH=k*yI!T{+I0gFgPg}c@tJ%-cLEmLsgKaG zm6rJne|o(%&hd?@b3BKQ^(vU?YVxa2t@GtRRW*S_Sp1cu12ao#YnhGXg`(PSAUINJ zMp1P5q~kGR3-eeNkr?r7&$WI`C6^+BxaHdjF5ZIh9mN}?M*Z(W*G#tl)PGT%ats^< z;>p#-fl&1n9sDH|W2R7rA`c1CDqgIbv>*H#&TfB15EJr4vN5c5PkJ75*$w(SL(Ffp z1XXv;aj&tlUI863IN?VtUy{Qhr@tq|E+Lp0(hOP7Vz2o>^&=M`q) z8(FX$+4$u=>P1zH0{xZq*9ls3_Q8W6l<{v26_hw^$GuR7UXVq%J$z1~$XyxmnIW6HZNpw5enG&J&(8bW?W^p)4-`(T!6mVXj|@?(F_X*c-{0w}U+$Vp}1%~cJB%FiWNQq|D61hOiI#=u};IxFGRij8Oh!Qy}X^UxKx8hQV5P~1w9){P_oyg zj=YC?u_v-O;-^M<#t5` zaWhhFo1Idg?>3?Z1}i-MVKhxPp5u*jHXiTw7B{PV(p}xrNo#&>v#c6`I($&n(2ocg zO#dyRGepjmt(E0XPZn|&z?X7Mv5!FDv+s_--_#;}Do-h;z(F{=R8?lp&8fa^&Ic+y z2>{8*xwTbZBV^0XEa)abL7u^Wt1X-c%&ND+h)u2un=J4ZuTuJvu%uk_N>!UJQHuMG z^ra*R4@-F!Zc|W(+ZxU~cQu&ma~UboEHrfv33TxHI%gBhwr{~1INndZ!K$k(aIjhl z{D|uGL=3oJDgCEj)TL?NLcc1>H&2+t_vwkidxFyf-8`T|xJo79)4dqi`wxEACKinr z*eo*zvCfq*xIc0_vB>4R(wYji<;gM{P&%qG7>AT1k~*6Z1m1dHla#}Q2=l;5cG&<> zcJ8;0#V=+-4N4egxc{*ZM9iGmC2+sGatx0ScDtDIw;o`03}fHZ-XB0`+iPmBsY`}??<%$ zPMe=`4J|qwhuo$Jq{LP~!VRaX7&+EyJ7XsmUFwddgQ)68E+vYhLZssjPB3H>q!zMR z7Tk;MvV=Lg7o{oWVR)BQxWN3h>+K6Cj#E9s*d{QOnsAEv^2&gw1{Hht)i|D3avv&% z7@|9_GD?FQ?THj3Cc(^=+Zl4ec=Sq^DX4YSk{VVav3HaSu*&cVWK_zTiXeJ?L?E?6 zGfT_m>H^gS{P@~hxw)0vAY%~6V;+VH{5>pMO2N&jFpQ$LK%Hm0A?Y$y55>VGdqH*V z9~)u$a8lsFecArl^yFfCTC+FRxA@`~4es^GWr?bFT;v}ejI=ZsEYsbY>tuW0bKm|d;E zp%wa?+m>HV&b9ab0eP{lDOM32?&Ug}ZJvEKw&yXZt1AS{-`HICAY{Z(QiZXtIg()- zD(`iL5MYDu1<|-JCeMlJk$8ucCi{~C^Gx}I$0{b&Ff2uLb_fOZ zu`C@3m9TYE_O)!{I)nJ!QldWJAf{pSg91<{mU#{mr`-R+lJElBL% zaK4?Qz7X&@BK&sBounamb<%6YU}ba+B&fv!-QEjHt7KxONKj0k#J~5v0#H)9Ap>Qx zTuh4?xS^#KG8H!PYd51KXM@Ah1!%MrlHAp+Z-cghkM%abN9*rd<|8~I@~V_DBN1&UZK*#Y7HUUrq*ot_eAcradaQhsJA6ZBD` zXJlK1d9|@&)$B^FX?`vbW+;{_CzzU$bV;w|v*m5Ksql%NlaATSr&7kvJ^s{xv2Fi{ z{>!0O*Syzr1JT3RBt+iuK(Qm`7eF?TxrJ|r@G8F)8jY7^8UvNO*nzx$Z5bpbzhR3r z3>NBnR_+rm+bvZ=u{{%}R)#$E3ZmZhb#`WRqLKn0L53tAFQTwsTe}%T75;||{<}~@ zhhph7?|wW&OUi&8EZW~G;~ys^K;(xC#jpkZSl-XusnXT+It*I~=aYU9q zs_~df@0G@#^=Y)X+V`hatFiDjDLlx?JF)SE`X0hk&yS@Q@oP~GzrZz)WOo7TZ!woUHH4jJPuEc zz-uD){z*2pzYoTE=_5dtRO@=CsxF z(M@6EZnyGh-$>NF?_FJkBqp^{j8JT;kb2}B45Wktl>2SbC z?|WrxQX2!x38^x*3}3mz@*a44V0aJp&Y*a%YRg{OJy9?tH5wfi6vG@TQ37f6 zAZ!K{ee~>CD0#3I$%C(6D35q9ki(CObIwYR5q$A5=kWxdc%p=XZGCD`i`MEvlixQm z0TY&7(r*2&D$*F};*gJ)e4KcO)zm_&l7@>M-sm6P#opUL{# zo0v%An3M77W$YTB&MibW8u2X|D&J|&W0BFo9A>LJe?&ER>N+3rG@`f3%#rrWS z`NGd#@*~8WVS{4IibCA?2&zQZOO+kLOBmbOai9YOo#VGJ?@8ME;*zr zi+P6;rf3Uc)ERhgw}usZY&%^kQsL{^%Op~qw;b8v-8V^R@mJNz-wmZuR4-%k0=)UQ z+<_5vd?nKZIM+>Q*riDqWVX@q)SsHy|MT>4;@Bbd+-ep8Ux}^ zV+<#U7{kEddGhjDXBmo%lO;sr@{fPa8n_Le624;f{8Sfn2N2CosiJDIYKK#zZ0I?cNt^$0aOdeI_bQ>;f z<%*G_Mr9<`yT&o#%O*M`OYCN0!aqpO4N(A?Z{(UjN7BttFm znjaJU@DcfK55C#=@TxSG5v0UIHs^bNl_PAt^Z=3LlJX!aLey%&xI-g#TzUIf+8bSa z43o0Ks%MsCz&gJhv%3mxQ_HP-ng=0o0FjxEEQr}We5Y?r6-~d5p2^X>>Fq?>T8J3A zyCp6vN}Rx>@BF(tE`Wg)ERfD!P7fR8>E)Sk*SuvGVnpL9}; zCWi&v!?m0xVu*hqGy(ei)eXdoCWu+VP)>;5@&gQ5tg~Sy3Lhzy&!GxYr{rJS6oE$f zkTt>xjWrKqosvIBWkM<&CnM#mu&ySXqqL@xnolEAOjWI@)9FXV%mhT>zT|dz#cOGS zED2RfEmXkcK}4%&Qcvqe(h-(?!yJ=*Zq-T_Zng$-JoqSXiB$UF!Eb##9z9=HZY+6p z3a0QQgsh)7#?;ASr6DXW1Ds5mB26lypG7ya+`)PcFL3*(+v&Rde0x%%X3Mq;ZQwk> z_D5|9=dEO}f)@_9|6#h6`{aGlyx7`9TupM!X;`KOst?ESr-1qgOk_RL?Vp5{sPvjo z3~HEtp4hzx4u`<71V;<#A%Mv^(Et<6pTKmMk|WM(4;0RhW*um-ZVuF;u^K$wULfHF0Se47(YPw^?o`k;)KI ze~NHhQx#%AxoVI*m8Iv0hGF*}2QD;1Ba_m>@}5*hy-Y26y~Jq%FCcPk5U+gTEGf2bb9zHP> zV?-kVy5CyD#<<`KApvi?aY>>xIrj(-s>RY6MUX3llbpR*#=kHC7m=)0RLavuk->YP zdxwAyRc%R0f#~<@AMgi|86x8hpK1;t2RmDBLmTD72yKO=CJ8+L4!4MoDDzaMoHx5w zRrF%Fo_!>Xd$?gYm&@vMMt+~C?2w11)fw!2rc1=0N+$Q}P;kcGkyDoy;G{R>SuqI2 z?p5soGV8&zhjjv*ySZ9Pm>*U(EX7O$>~iySOs(j?yFH`ULA0W{p(JuO3Z7++J&G&O z1K-P71eJ__&Evom(h#+W(wM8E%yVk2W^VMwvYa#6hknJ9Ld^D?YGK6uUu=qVt)*#X z!URdYb~i+Cmt``&n#G$0eE-5juIILdD0YmG)HqFJ_@(+gO&d5PlGVS4G`taqI zoC)?ggr)|OuoARqXAUdc-2y2*ASLBy#*v(C5exG?$P@9#vgL3DOp~G`IuWaNHA@YX z{Me`qm1BFPZK6V4A`0O_iGE13YBFgnVPbvx!Q2>O9XbuzjZtha zaZbE2I2V@UYS;6Ca3OfN5DY@mk4Bt=X#Cv72i;`P*VmEob1407l~+xjUnm*G^Xxe`#L|;F>F^wGc z(|R`Sj_;@tOtqfCA=HO(aZpv(Z_xuvW{ziCC^Yp96_dzCu7Ps?Rsr&9PLb4-pW#z! zPPh)`JgmM?<`Lt-ttq4lZwiZ#{0MJ(1_-S!XCTZd(mN0u(j0LyBNM50kVjC!a083j zoSp=3h0bv|j!u%EHC^c$gZdEVk;5%L7&hTB#PLhz3%P+Le2Gi>O&NLnehe>?+&Uxr zssCf+9O`h)UbFgvAHey-%D9eS)MS=SY%-+D@OOV`iY1zB?X{%-D*!SPxH3je)1kCa z9vd)Q(4^M&hm4$52r8S0G)VIMiMQF6)<0?ox(C-+u?Tli0~s3BTb<XdhFD0eFwVBsVrBza`$oi^ibBD!5v6u>2JUxWE*7 zzYhb>VoiY#qWz((K85?zN!4qb)j*sNHp*s((VEa;t%SC<_c-cNQQVTe z2s^=)BtQw3jH0N+vN-g@U;@ha+7f?L_o0V)Av0ppUb=gp6wKqt7)f$aaKDDZk^X%> zM4&waW|@KC1!~8~jg$aT(PrE*ylbm$t><`Eg#g*sU!x%*_hqu(^@;lG@jjasCSW>e zA9}Md-(v{1bhUq(gsLk{+27tt;>y`t#LR@0EPq!gb3@kq0Fc>wOgk(-X|NH2rU<$x zJfUR~nLIrW0^6jwOENJM%BT5+M@OtFnm&2r(I`}|6G*^5sd?o7_UwD*_kZgi2^mAa z2p3ItWd$K42`gj3bgGS;kdY$eQ!K`907mMUfj6_=k#Z2p=CGJ)epx%fAB-V zKK}T@4~&phN|}*aH#9LnthVJ~L=Te*gVUn(5zWSHxugNV zs_Q0rHxZ^(4%z)Ju|iy51M21XU~*q^hZA47 z4F7I&a7x9JXa=f~aei!K`#Ur^~uFG7uK4m%{d`{@Qlb_jyFc#X(WFQg(z7p`PQy&G_@8#AIVq(1UR z7Os?)Rf3W{@x=^UMHdWEP24&k8|(_OAyY{*fhQpn9Z<&Y8m?Q1fEk9hsqu&T9a&C?ht~a5CBN-=>+@)k{t0Rv`)u`+~hE3Hk z)gq>rP>z^VD87-iCQ4AUkVEqkg)vnZi{e(?D&~eD=6_bOQssE4dsZ28Gz{%|FV_?a zV>gpeud7z1-;`A=$WSf1z3TpM;qErv6|RWA(ePX^*9>)J8JMWQxms49Gju1lFaIHn z4GeLCxL?un_l*(q$xNM_ZdNLMCR2Plb5E-X>Q=T`m5_VImNxw4Dse7Rre-H2*Gb_d z4?T_V^4~0MdR`DJj~>I_B!Sf+m;@dF#7iWBC+dFqBl7L=vf)B~#%6&(>X6dh+y6@5 zxaaZccI%VBP+svYwOs>0mZ&Y|Pl&0*sQ6K02w)Hh@yI~?*1K8D?N*4 zWxSVRoZt3`&*(J3+}$=LDr}ST zhCcA?%Op7PwfmFL;(kP;P2zffXehl>UP~SYt~-P{L3lJ3awN{gMFe!B?T5fgqc8e>8Ck8g3DD1YV)Q}@O1{DDc?`(D-;%5{$xmh@he%Y^!gh7cV?Rn7Ee`u;3a2UY_qJQZAMw-y)bCQg+tua@#Qej~-B+J(9Jm zVw)&wd!j#;r#hH9xR0poN43U*p`r-*i%qcW(h#Y4!EUQ zYhTxJhK)`b4q03Xk+=eL zPF=sI#^OK7nIO3EsoRm<^@xc%rme7;)QtUJ2+7ZuiwXG~h`0u=8w;kg=}k?i&OZ8J=9(jDEKodm(<81C+)Y{veo!QQcZoje7g-BDDZX2v z7)}CeQz?}mPA%s;47pzPIAU4IFp02pNFipIrK$sax(Gf^;?urjDIcmzp(q3|7}bJ0 zC(`9Ts?9oh-=R`iO#E^1UD%77HROQtxIq^497QRh0s0Eo! zZ?uV+I>h9(-0v7@iu_CKTZb(gyxfYqLA_pH@&-iH>Bkz%h!Sg!9@}hbP<(>n z8Q1`f4#eh$=>|LK;YI_(^&2y=2vskaZbT+*U_*qmvr1D-Pcc1CETtg&MTSv3)8JL+8df z42^)2qy5Om!o#02e3PgJlE*xT%`Mz?H=aujniw^`mXb$1u>r_S&4S@IaNO_V24vaY zsJ0EIiefG>@&MB6a>=tMv3$s`XmofsPi?Gb;s_C#P(af8+id=aBqLcgLK;KRn*!9zl`=*3|;PnI|(h+QL>i;*1byk{up()cW(9&SrCX)UL&NQHN>CWE zN-;zl%4$VGa;CttOqCJc#vUQ(zsU@hQ?!4OmQ&ddJv#9Ekw*-HX&RDms|_e^Q-tk? z&;7L)?$yhl4Ps~WLY8pvFfiNo} zXoPH~!&VYdo8_8UcoJZRY~!bd?PPH<NIvWugcB;haze+6fk!j2t|eXfTGrszPi-a(kz|ZZIjD$^BhUyP68zedNw3=b5yQ zxVOtYk?x75?N`lNEi2@zcyiFcH&5Ljfkl$x!hCKXun6Y;)?`@!BYfJ1sHe(HF{v^Rc64+ru(!8Z6OiQfb~}<6Mx% z=RIi1%6@K2~DH<{ypE)8rU^Pei8O*6|lRM!-fP_m}%O14sPPj3U-HrR+dShHwNCy zB5fO!tld1TyOP+`0fLsdm{<$7Q;c_{{UWU7sX+)phaL;^1N&H^dWpJCu9ExbJYT~N zUsQ0n$}A4^OP$|cqmnWVO-S*Yn>i^>NG=wgsKF>MbFaa(>#FC`Z|XWpAjacu&|~KM z(wB37lX}O-N+yfAoUomuwI^^L_j^9vln!y4URe;T*EpGO;uC=R<@?73<|EGEPrdT7MBsvbqk8pBDZDEr0}xmaGw z1>wwuCglu8>4(_h6C~`Lh^Hd)CUg_oCnZ|uRN;S^X_6*k&8_tze(lC4={DIjh<=pD zN93H-py|K=8wTO2&v^E*W2D{yaBs)Nk#3au;7ML8Px$hD%M^y~!4fQ7n)ekHA2J`? zvKQ1{?rThX9+1L@Bbrh%;K_p}p#3(dNZAS=dK}AHq;EW;B%sm!g(mZ4<;E5cCWGvl z@S8MGBB3XE9a$21hrcm9Mmom~K>cVD& z9q~%Ymav5D1`z>Wy}clEn#uTDb7y-~*_EQD+o&yJg6Q2o0sT?(;h|F$^*0*lWP#LJjNJWQM? z3VMghNb!4r?U&&2p@AV=%tSR&i{Z@xzfi5)l)S~#GIQWVM0$Hp(lgVJECFe!IcASl zzmN1vw5UfS&D%?eWG~Mh75jqv0%}eRYswoz_Q}XG=HTEmzLG>BYpS8i&O&5p4-`P( z)5`9|E47AOlO>1@K+vlv0%t{}NF-tA3PPxs>q=C{A&4-j~BDIndN3!45z|?vIUg(yU{b zDE>1{%hO_+v26GReU0rPJkMm$%tRTIi5g+UlYzLa1=OsD#BHN49HpUk5``22p5xJT z0Myf$8H{>2AxPJk+dPx!%koPe9gdji+~)=xxmhFykmmxgF?Pz9`8!197~XO6m(A5$d7{Zizi z10e`zfegV`;Be4v3wd#H=;Mp<3{6w&%9Z;op=iSEL*3de*F7{Ct3vFKPhu0E;ST+Y zM}oDqiNXG0{M(YpoXb`ig+AFK+*Su-A~+~}e9#X+=i-c08D9F}9}P?t4t#_j6tYO~ zt*Sg32$KKgMf9AnE0#CSHww{8XAE_+(kdWZUx4Zbg?FVGS!GH$iE5(IZ12Ven5rK^ zr_V+YKFnL#Tn1ZY&7&cdkVVj!NZKqYZ=5MM+o012#>nN$%SNt)t~R&5Ds%3nUp{AxS&RndpGQ2 zkUwZb!=R*Kq~U=rNxaCitgMz)e)&4qsP8_AOtcqN_Z;46)(nZ2 zysLjaH~61FR_RcLmq1hGvhVw~$ICf4B0I#iXU|JkB41x2Xxy-`e#c`!+1o22-(WV_ zOM=N6N(2?$s<@KCg8^z4{P_zU(C2F@%u)EMhH-t0Sb7Yklff?BDk?4U<&^aH#GWkY zd^s%JxvnY@kaVeXim=2?0y#XE>&OLeVza#Xh)MjXMl?&#A<%|2+R|Y}iotq43K?&8 z;lMZ_oCu4{ehI_hLWF(k@cBVskU`GNQvhUeX2921$zHB?Idw;;Ua`{Z4{P!U*KKqriuE1$JB_L?b}t ziD%1eMkeH!Sv3bCo?s7uiGQY*)-kGuH%3}$3O==X>$g2N++&yAgf?1}6GZk_Lz1=G z|Gmeuq9zsT;AXx`eJcYFF}Es>23*ZF3;)4JS%PeA&hr&UIDg1W*G`VAV=~O`4?3-N6?dcHhzDhZ035b zK-?8h>DJ}Zy5y~MB@3l{WD{xp6R!rsUTolM#SNB64iYvOH;b*{PDqbf**J~dV-Sc{ z=%hVT`h^Ca-^9ms$KBWLzU!-o|5NFSLLv>ZN;D?)dubEG2RAX%G2`7A+W<$2VDEa3 z6i?Y79ORDwHi6dM6gk zFnBL)wFzw*oAA+3Sh9aWb;a2SL-!9vyaT}|yBu+zc@7$7y+bCmSYU6*BfF6V9T;_j zoye0BR*t3VnJzgpI7CDHrAe}&eyrl4nSzKakX!}>YHSDmDG!xg<;C>85CfC2q09rP zx99OhS_dQSG%p4wlX?ZQkl*vV?DDPPWVFQDw;xgv%-rb*eqTu;yb;qOSrlp1n4~gs z>|-}3mKdoZtFYZnDTd0X9UtyG#Lneb>d{CUNKOfl%pbt7F*zI7@RSFpAO6Sd3t5kb znWa`KVw537HOj)Hgg%ddoqg}ZoA&aSINV{x-i=3>%aaBLPMF)AKOIU)RqGVzkXcpF zmUD@KEDM2`Vd{~KpuvPQ?9?B-TMRd{?|Ume^Gtch^OcpcDO2~6sBaun2>aqo>ew_= z`!@0-{-C4=55D11E8f6O z)LcH_Ft?WU!8l`Ei!|Mfn78W4JC5=K2+0o__ZJq*!h+WWOMEqfxtj&4*&YlL@ewbe z)RK_YP~}qXf(pk09IMmf=l^OdratjU{>sD8Q$VZLPs-h<8Vj-W+YN2Pt)kR|Tw_YP z2O(sugjJKx*OcLXw>}Gs%DqGN!|-8sIu8CY!MGZ~UN^ zN_JNxs(j0a(s7rb0OwUUTB$52%bQy!Ns|)2)t&3ju~~~>Hl7k9%N50N(tX!T!aq6f zX;z6q1bq01f9ky_rySFbaH_zJ@tYb4ui!PBwFtuJV_k`J!-4EcM>K(fSjFJXGG8V- za3L7`YvqCmaHGcBx=R&}(y46O3cFxGsQq~Kf3x$zun&U2nD&-)axzk5C&m9)xDz68bB%%oSo zD@jC7OPpWDFT=Y2GbI=UlL%5cErXuzrrQp;%+Dq`rQ&3l5O!P{wgAPn2`*L~2%OWq5Jwhp$QFGCeM zyzce6bf$8IQoP(u)iLu|<@A$p{@x!a&LXboWpm|a-NDi$Huw1BQ`nWL& z*yIU1WseWATk>hV6gap1Sz$g5qWH|{Q1*8se0!@*x%)L`#~-jMGh)5mpXisBWq-$( zIEqbuNO(3}^@zEh)%A?eL4{TtTpQGf+oXY)aD5A}J1rnUPQJ!=MtItx;F(my+0xW> ze(Y0jEc`Vn=K;=`m3OqQgi{wNBem5*5*p@uGAQc^u1tBO!YHupx8c>pKl_enf-7<_ z*ueTw!+|CaJZ~P!`Qf^qNG#4KTGVl;M)`g4H%3U0&}BoSpjIzBv9ZEJpb#jY33~}wD z{y@*p;KH`MnW(eM@9sV>qmH^oh3$3{GqPM>@;8gTmyO|%2RPw}$FYz9s{EDbcuneh zXc*P?=?)UT9NqT^IZR{cRzy_mN*^c(%uQm9Z~`rpkx*>(mJ@|b_2s_Xl}qSTcN5X{ z4guRoeRtmZ(^p-+8GXk;UtV>a3edlB@!!&j-s1^gVGmg#RJRH|RPNx8%(bIPw3wQ0 z@f;9$tZzhSkYoZJ3wt~N-W~ZjYXWtA9+Z4R|J8kbg;ZpJp(=3i`U|X@AnoD?$2Ico zU-o*4Ojjfwv5 zms!96HUcwDHgFvum2dvevQX%Tv-HpZ(8OR!9%$E1PN&? z!))X)L2zISxJklT5l6=p$<%n!gm?~^!b|mtxQQJu*RAHoVL9xBSM={y(>AjF>jmwe z;+yk7e_WpNzJ!r%XLT z*q6G#Dd$YCm_H%BIShao$^?s$tU?U3NnHH^$v67SKV9N7Uf+wPLfI61;exuHM;xFW z_KFt)C2=nmLzvK1l=ralGAK3FW3HvOLWsz*5Em7N1U_oG(<%Fri%Y}{2%a0kHv&e) zy{=|&Vqs)&WRC3-e0b|2Ik~vv(vhPw097bF;cAr0zM75mvfb!)747O>Trb{#Qyuwvv)Zqf z*BaFbSW3KMIdP5Lc9Yhy*rDH1hY5;#FZTrMOuE7$K;QraX!jU}-3r(mP>`IT79`}J@y1vA8 ztMSz+qY0)#q~5XPs-fim$vfEs2(Dl+5q(^|i$gk~`+$CU5@&hxY(7;{#8pV1g>8Q!DAXxk@sS&|fh)JnD`q{WrEFxkMPZ`LlX_ z{iJ5^*ZEyVw4OQIBE$3dEGx*1FFX|p&YOmGjM}>2S=&G<-_tXf{R_yap7N4zHAVt? zFQ53CWx#Ll!K5t^&kHnqxT~T+cSj_JgxZX6br{UGWX5Kd?(dFi|0#vnFBJB-1W^2q zM0lc7w2(1KeSsh5+RAY|ypf+3z{^yiSIR3n20>;U)}-DQgusiIrqa*GWy<`EvAp#h zvB#xir6pHOR5kRv0s69{cF zY)PWF*hFDG|1@LwX!Tk9TPLh$K6)T~D7ATb`I-cyox{`w?EPdn(QG}?Xo!ly>U zTAYIZYyvJXkApoE16)!Sm0Y~K%#Aj5|C^sR78c&OX}?jtMPnZ z%EWzKFb6^Fc7-h`Zcn$e1NKRAEsvvGoNJy2MawYQ-RNZUB@1~u70%7m4}_^dcRGcz(14X2x6W&lSP_bKgf5T`$EX6X$=g5w>!# ziG{NJ!L)@fp@8|E=VEl{t(GARnJCnpmoc*0Q^JH9m&=Ej)o-REcbFM+O8g?VqM3y< zbxX)|giieSL+a!B0}EI}9hCnwUy8+2Q~N`mkxJLFkySD|_~hITd{!aHG^C>d@1d1b z+wco-49!fkEUXb6Yn(?&1y2+7yejM9pH+UoI0Y(~<3W%V+?(>RUJBV=or}u- zMC*!QDqMQqLw;$nP2y8MnEB3g<&ntlMvSq=!^c|DI2=sxzRKQ2z}-LY)c~kFqVRc_ zE9LjmQ)h;{ZzPz6LXJ$@-i{1o>5F*YNyg96>rz<>#R{zkDbWBqueo&oMBt)AN z2_;pM+9(kr^8VyRPDB@Sb0?R&mS~GcIqV-(1H=m=qq&i4LZ?TWG#s-Jl({;yiye0g zl|*^;y866tNH=LVq&!CvI;>WqXAN6fk$rEAx#qx9fa?i&^(J{Y0o5cuFSS$jQCx&x zrFyW}HzK80SHV#@yK9O@aNXnY#M73M-+D>7BhWq!{@tJTiZ^N7XN@v9gO~~Jj#4Pu zO_r0OvuB~QB8)z_KqAq`^N6W|QQ%L@xG9gLP>VA6j(7IMwZfx`_L!b2FS{4h++Jbu zf|OS!RY@EOPdvd?fJwKx{9X-uM^|yg+ZL!foR+mDPZJA8Y7x!Idzns5%}#n)uVyCi zVcB%&BseV#0SWwPm73w4cxoV+Zh2mSle)_~1fl7m2jcYco1QJt`JXS8^FD($Iw`lE zrxU~K_-NBFcZ{Lg#q4Twq5bl^P~Hm5)J;i%)vWOY`9#FPkO?D&iAG~T3sn!5JnfXGM#&{-A{B(GApkCA@8fgjwWs_B37zJcb_!Ffr(8-1y)1Vk2m#R?ca^wo^A4`iyQ!W@UIK}1 z9rhfMcpmRQs$iUIiNBj}_G0;-{|Vl}XW&qzdIJvv_jx$e5lYzOU->JLilN~WM0>Bq z`U8Pd{(i`QG}Fsh;`XjWS5@x(^7#yJuTV&@d63>8HVkKCu@4BB=8PMKc09=-D0)}i zDEVU^(e@BExmh;E*TFCyKT3WWh7kGT56cgK=tiDR{3GlO!y8PcU$!YF(^CP~B$!R7 z1UU4oDOk-z6ITDcl^KR-<|>##hFWc3}19p$2bJik+6uu z8{V~-OIAawAtL_ADuaXB6~;VdlRTN@E7UlhjUu$k{mih0BwEcaOb zMbEyL47J~z0ae5A01BVf;n(E++g9|?U&|o)jW;|S>J|U{wesW(UjFJ^(jAlQfuDCd zuAgywvD;+ra*TppMW=CGa*8~~3N77yeJj{H@Nx#wY;@}>Oo{NQWlF%PAK7VHL+ zLP;}2)VbZDrbF=r_yaFDFLuvy{FAGtzhq~Z1;tNl_j(q$H$5^`tWn&%(j^{s4>$$9 zZR5bBtbS+G1~FsZ->d1M(A4W*3U!E>Sy2@fzvvYe8TLNLTx02{EW!TGkeIWoI6Y0= z4gsbg=Vgiwk7`TSXqJ3aXe_J|Y{l!|pod6^SeScC4Y}*iD`PxHLN?uYpfI`6BCkzy z<7b}u)1Tol*8g!EvbO>}E2p-U;HKikEZwTeJO5x0CvWf~;>>X0aKwvH%!>ePhL;~% zMGO*=92=tUg>rZ;A=u(t`@UZRdc36xSt!f7`Ztvlz7@Ek2d{t!m*8LioK2EdPSn=( zO&~kcfPic49t|Q2jO+;&d>=`@mDzo0M==wM;^Lys;p22AH*hyL=xgOIuZKJeXDAot zfZSmGo>s)M$YMP;HJi)(Ro7Ya&g1TD;@ik%W$T5jQ3ot$>TH)NS~6{IWi8g%;Q(Y8c}8Co`0>BFS+@=YSMSpU&+V= zfTbI;0=)83wowSoK5?4ds$3f%QDoZV$FO=jM+#nGW78eDGB(%UH%b6|)L?Ipf(S$& z)Yp z6|z`~9~m>?Wc&q#>TA@WTnyqdX*+C@_?Ya@vYMJoF8`vVNrKfc?ED+Bih2P$o7Nj) zvMhuOg?hj_iV)BFr^+MV1aGz#QD??W<~}n7WNO5;PJwR!#_OTlKvRC%3+2r>{e@P! zvB$>F1b-{^Lo5yc*U-rnRCwP{_1m^@H;S@MC4tb7ZhXY672#{pd-&oR^9=2(X%Pn@{zW1R`4#HC937liu9-aOL;ek<+0 zf2gd0hNfc*D(LV-YR$Gos&;jUR4zqeE_WU5Lj4VXs(yAT7~?2$?-_wd}J zTOrgDmjU;lwAD1qb!$hT$gV(#Y$WkGM3W1*R@qDPGm#4HYOftQChzi>&_It9ggL)X zX!mKWqa3(D^ak~XI#8wHclj_~o?d*v~ z>7^ceQf3KOoO>S$@(Gn--7c3ENUmRnFQL|98vRpoIT)|bOJ%_mqR3&$Vd-9bM?h!% zE!1H<0Y14vWU2evuWu<9Iz zhV}fL<;~oNGTmF4&tg|5&C3nA5KS5ki2H#^J-O$F6T^j+W<+^_ONYQ6k$DK)^9JC_5Y$Sj$PmxeZ@COB+J?Fv}uM zRCK0)qOitZL@rqvtqVGYhd|BGj4@UN7wU@U;<|BGEGRBLk!`EMr$?bTZD$oy+Islm zhg0QnUV1nbCm^--Z;{Ii&$?kb^fTG4hpCC~A3u~l@DbU|wEg4V>r_F#8Jl}ap-R7B z(UrpP`Q-FJd*q`#Pn2_hZX$1x0qQ8Iqul$xN4JHzewc$QopC*BD}(-+*6> zo6243e3>lJ52F^#}d2&sPJukf%(5hQ7C!yABGvMF(Wqt|CXBr|u@zkT(Y@`O(k zGD9UB;wlBWI1xdr$IE&DroD%xFnNV!gAmd8yg!sFs35dM#e#YfJ0osy0AuS}$N(6P zr%ZA;u*IP0_al0)Z^D-)MLTJuq`MF+aT~h3Rl9s2u)xagA0+3f4wbHi_@+iO&K!;;9dfQd||0B`IK3wF$lUpg$To#v~E0 z#xW;VV z9I@eY1X`-rR_}a1As_H&dWjgDhb;pj1>?GUdyvRP9kZIEcTyHWY4l{Ux&?h-wGr10mB1O0wj0wEAiGz>R+<0A##pCL>FVQFeTR)X5B z$3|Y;2;Bt9T2~d)gX}Qz+RTnu@czEXyk6olnEZdnd3M(qYj7*Km=Fpnp#=6n&RxL` z#>|2v_OF)};@S7oTzsNj@WYG9iE;ehf2t`?3dlF$%zp0uC7^fsW~1q9bmmHnsiFbV z)_SNtJd|+7Io>yR; z#}~uvo)vwjpAb?^y0A~mOmZ<){>WQ?@UWI!%9}`*ErKO*kGRHb(8M!|uSSXdJI4{t z@7PT};2SWV=Cd47@`i_zC7m@aJZA0JpZLd1Q)M#eXM%i9+%TCQ3MvAyrXL0)8U;Fx zrc8&hlrX=8`ZDLv`2$wMM#Lc}#s-SdW6c~qzz^#`u;Y-!XY!(3I=&vLOpi9J27A)U z9?kU=Xg`(2%F!WnB@O&qVYk^ySbl=n`w-F-5rcRb^CG?Ui=Y_rtUObG5SAPVl>_F7 zW|H9O^g}se-ti#UDhELWY~zZTg_GUQLP?>w^{ydP1JsFsZ!fR~t{~#BBOt(NhF6Ab zBxCdfy3X|7$NenUhUu^u(bwB3&~8m~3xVSe4BD(p*!+4$$0)Z^#LqljZSg8Q@+Yqg zj+tu($49ge0TFSBg-uT|DKzu`w+a`z)rv|L^`^(t&sv%iAp621_`4U%^X@!aq zPFW?slD=HII2%4%sz;%z`lp;7%B5wglxH)bS-p8}#FE^qlV5oA$V~q*K0ulGOOL&) zp_&tOsw!80as>$MlAm10%eNf~b7XFNQou32=t*G*;vZ*IyNMaCwm(dY2hQ)3>4-g_ zW5Zd4C)}@-#e4DDTvZ8aRE2cfV;-mJFOlrGz*lm3&7FIZcVy=d<%Ui`mJHM;d@>?8 z{UH2plU_r2c!ttoh=%O2ct8_{#YGtR@Z?ww`pv947Uy;gAvu@TI!MWTr?GjYZ*M#v zc5~}p{`S8c_p|#s*r9etBD1l{Ik)~&1zp>G@eh<}5Z+lFS1JxPh9U=2wZSVLk02l6d@D+0bavZUJ2&SbWBYoxkFYhTeuvEP@li zO9oOvY8Bd#<|~wS^o4ISTuqdvmxY>B&1n>Oi#xQ2x=R$@-}hy|_8R=yI4Tz*h)G-< z{_WV0YFP+M%l~}3JnV<&rpnx<zHwUwYwO{4s+owC@r?UqHaQmPz~||qlM0ok?qsG3)|8kyQpFPYy&MUXF_j2N z&R+vInBw<{Vz0`r5?B;mBIMn&=RKo`@_iA-m>3jZ_vT*Nt?V~?1H39y_oF7iW0{X3498ZPJ4W<5gRCeVs&kjp z{FJ!S$N6$?F3v;Rx-%%Bo}4#IK!>`(-sgS?X=OVR$>Gl76&^e}x-HkaTrR_aeM|o) z+=qVT6L|`05!#FXC%KET$`O$sJ}RN?edM;+fY4aHw&iLT!@6#P|clrlMBrYg38+#_^PM~IRgD2Y(GwigPhjQTVB9M(FO7reNYhFgUx-I1=STFj0LkYqCS z_*TZe^V?y+LXO2QhMh$WT=jqa$Q+kyTSIhEZtP{2veePtt=!|xITMr4l`a(Sqj(zRM*G;)CFt2icsl5l@E7dQh>xCVJ z`m(X^scvf5cwMlO3Br4mKd^>M2c<;CnBqT+g+C@o9GS|NriuS4gUfk^tPB4n>`df0?`g%JHgKTP1(UFK4y=7rf~T4{<@e zH;Onr(glnQvUo#bNdFc? z+LL+-aq1=>$+B$1rsMC#x9mn`Zzd(h=+5`b_wp;GVS^0>qMmptB|g*RVSn#4x!W`UjU#hgNQvoaqS6k;B9Hx(*8 z04X#bhqz~2vBsrQCSf>JJ{%seRg@Hj726RHsRrh7_qgE%0dgxaRTC{$ z>j_6=g$cbjLJhZ2vXG4Vj69dRum(-`*CF~vsA-@iZW31Ynw)>4#Vf1iF!e|xNZTtb zVVzb{7_xj%G|S;ok?EN85bj#f+kk5)k=mRO%w^9qH!A=wwqo?~=EiEd-ntp(1h}-9 zuuJw%7@ty|{F=%M7Ai8;RpV@fzD2Tz>RF*qnVUm)+)T3#ZsHCg=gzI}l-q|{-4uJ~ zcfcULpH79o%-yz3*MI|fA!J12xX-&@CFf6FcKTfeg^%ORw#7+Eo+5BKm# ztN3~{UL2yNmA%HzJ`4?Dzr+QG%#{7_?&DVLg?DEUB{7biwqK&4L3ge-SIq7ndUoTL z2I;&N*;iGS5{ag#B}xOj|IW>?N%}t3&>MMA0v6l!pZn!yx1%k`Z99k% zPABXJpL!jt`HH&l&sBBCq45)Yh5zClxo>-t&;F(HE9(mnNfmQyr7I5|#6;kLq&&qf zuAIl;`4SmBd_13`-Q-p|y?64f(HCy%>Bq`LUgkxXJS}qN-H(0~KYn*4b&lJQ|M3J# z^r>#H8xvidS7f#<<~9Yy5ovwZpPZKP)$SO1sN1#!nDbm5WicgA7F|WmCD>rClI4U3P41+BH(dfogs}cr@$tnn(F>T?v(J`i z!`CinU#jb%`!NIEHLkDL@lrXDe(o)x?9=d`(N|1$|E_nXOw316Nht!cq_rMs3I%)z zoIgu$bjKNL2hN|RFCB#DTMg>H>)vRr1Pt|v`u?@V?@^sNmWrwFn5l9XBC8IzLXaR8 zg$o`u`vZmKM(PxRp#|~wEW@7arZ9jjleCvzW_hiT?rl-Y60&+(R|O$V2)1@6My_>* zqF(~W?M#GP#k&?1O?IX7`N#sN%jpYAuqITjRf@x9M{?KD3Re?A!`Yj^sjiv(X*g7Lcj7KaH<`Uwv=I5pfN4v z1DS&0a>s~OZ?rOuU77R~93eA?`)J~M$drreX|7$*B<>?AzHYd4W_n&H%9Ow8^22i~ z77j`iLm>*e6L%()wAlprNsz)oZ$`$$!|*K1cO4Zqfv0VK@4xxNJaYkhd|Wd!sJ#g~ zTAOu>ba=zuB0Q0U#LxYdKZ)nP6$I6R{iM6c75k_Dd!{p925CGCx#E??b!R{SKu>Qp zo}Njz6m$R)o}gOn#fv#GL3G1uGj$8L2VHFWF6bdV$Q_)W`5J7Bbwb--<=LbR-o}~C z72`HgJx9OynlH0Bx;*_Qy-$QIMxWbX2ev(2$KBx{Vk+v`glas~FY_Q{{51yDS%S=a zqHOxC{aWN69yb)S|G?KC86NIu{crL+c6=b^r5Ur*`D<+HdH;I?*nt1NG*!ao%yy|M zB#lp5{%QFCvOhNj9}k&P6+P@zPYadU2Vo#wxKJ+mHBUWOp7kIYGgyi^bt+7e`Bz9% zu~WFgo6^zT2=XXUT{uq<)7>jYW~DJPxZs!iGm*)RtCC))8BL5N@KzPi?Om@Fy%PKb z@@9Z`VN?41da;MOA^n)Dy#5j*a*#^?q;T4~*swc$csz4h7aSueP0XN5$wfg_ zd}ZK0wC{Y4^sb;vl!;3-vorly{3^0W@#T4X1pysq9+4ayD@*N#hRY(b5QNfO;Je%< zZihr4@UtI#B}h9?j=YMk@HaD1_+CjF<7%V+iH;VtnpD52s|MQ4I_fv>i40bB zimwP){K{H&(?|HV@_af3a3LtypSUb4MsC|x(%}x`-rdQhX$6I&T<8(3V`Z` zraKtC{!}^CqpKBw*`I}?80%He}QqWvqemOc}=#RLDcJZuv zWRjX#Rd}}iFd3E+zu2VWdBZ566rVu0iRCF`C~R`U*BjlNaN6-sECo*VSowq7ut|l+ z3HZ?SAnYvIdY_Doc5#qx~1I*AIj8Sni-b^PA*cjWz~qAk4{Fsr{Kp1Bi!Ah+&w)O6{(nL_kjdB{Wi zapI$ZC2edp83V#+wV@4=jo0$J=@#GJ!yj;CFL_K?I{U{jxlkVUgYxnd>9rh2s@59@ z9@vQ`KTg%;+<_0E{XC~E9=%1&Ne%|wz}5@>^l7|R82Vy z7rhu%yA4%fjgmFl>H@x`Jn+~>$MAo6&Tri+x5Du!AISS|5Ia2bW>B_{4(&?7xrc}> zBW<5-OyAf<@n-e#xd*mnTL%7kwbCJ(jEeC6>gDLfLtX3 z7o?e;d{MCfJ{^7OmyGT_R9^K^Fk-`RH?drl`rZZoN)Yhksi7jVrC|H+aHS;eg@be8qD6OxSh!p^bXbdlQWh2)~26d0C_66?E zpV^g_20A}aEiE3?g~r}2t{^d0@dZ>&oYQ6~cJ!$AmAB0D?s zvHOSQo2IK43}ns~V&RM$LiDk9cAtZyz+(njR{4?N^PXV4eMTm{zptBCYM1w%(x=>8 zsYyQyQ{~=v48_hdH;RoD_RwPMXBMA&ToocFdE5Q$v%v`% z|My|J%}oKflNLEvBK=?wr3|#Kqfgvv+=?pbm>$IeF!b>AEz22{Vip%@tsLRz*fZqD zALaZUA!cdPvv0(NXQApue!BM{;GZD;uc#{IO*sLF${t=l1c%>8BIZ{mql;Z;TR;RK zE$94VMP!K$#d1`f23%qOp4;QLG;V)(xYsMB-wz_*t6UUy!{3syfyc~)5@q#gz8$XT zjlEuOdZ;>Il?u-TSw6bWMHsgLmW`6ZaW}X_Ro`H3+`^QRRDNK3d{6tc_z2;*iUYxM z*$V(8-YDb(0^Lt@Iw^d37%GLL-7sMLZ5)JJ)#MNUgLqhIB*S(f6m;QuM(A!?So9`^88R=CVyU9PCoJPN5(-MFovNQcul049S{ak6Q@Vxab%}vxyM1`}<8Z zT{`j?(ds?QKAhaOoWZnN^BdqVKGP7M50)n8hVu4F!vijxD?1YCCS?U|vA5h7^HBEu z;i;zpKjeA>zY_CVN+4N%N-x1*6%7Ds>4#(X>8Hy>?(tT+7KQ?Df(QkN9^tnVo=Y+t z4j+Tt*xvL4tdY1QM}eQN85~~Lz_t2m+?Gn`k3EVEqAK-!?i2k8rbXP6p*Ld=hA$II zhPT7NP$@INdUU5DBGx2_XMt&@q>4|}GbMnH7>%m1)X6*X z@vc%qersD{)bdSaAIrt47K$oD&TVq%ynpg33Vm0j1JU(#Te(v6(y<%gKv3B>TUadNKt)hiASN`qNwLG+bUwiC_|(7p1KfAU)Sq1UI%?O46K zjfUIJt;!{RM5B>+2vM1QWOgS`P;C)i6B?m2)(U44nGpi^MhTGOtt_{z3pwI(r~d(| z9<#n!t}OcGXzPR8)UE3N|DY|-IFYDsi0rom`SW!7o|oyAqHKTkewPoCxJLs=I+otH zf7F~U^qg=0-cv;01@YKu3&d7{JaKeU8sqo=ju^-p!m~&iPF1XZ-EscU7wQ%Hp zJJ>sn9oU;o=M;jZt=K!*BH)T1GJYjuY$y0FWn4Sb5gb{zI+gTABBFL`Rj?Wh;M<9NzUG?m7lCn=`%Z92=0O4NQm~ z9=gmV=W`Hu=njE^Q0|ZF0flE(g1T@^uDF!UZaUtu$v73R#QZDZ}085?f?aWTKEA? zV4H_ORjFO!lch3gY(vpiiZaLRz{k9CI&$8)dTWLzTANRJQ>EBHS8)0Zf9NAkA~dqT zflCk&y#@W|@$NXm=6?$x(4UIk>t22!xbJr3(||o?XEqm~WmC8v5|idMpNP{SQoc-GAJrX#^5?fIp(2 zqN;@2J!aI&W^s4R^XA7{fK%$C#o~g8i_jCJBN^pOcegr^8cth|_MY1nPXqcwstd4{ ztmY4i=0W1)9ft6-Q`g;)6y-$vN_nlk>BZ|H0N!6(5HfNal}5t)T3B%8F!_fnKHlWq z-s46az2`1<*tg7+L)(M{(cd2Fh%}K<(lEH?-PcQED9vjfwDgM&s6kF}qrb zacDX7JJO%ySJx;%H=ti84T90SIVD5}IYytfjf7sKEij49PQ)|&2MIq_MX9IVs60Qc zhbI+OUS*a^F($b^-1p*2+3xj$VS0I<%JNYp-}k26d7EPX4CTfik~Xdeuon?e5{KNf z9mSB_bUlcwv0gtErohr+gh~Z1D-ERbK>?uC29neL6==5ED-|B* z?@m8BdFM>Q+_JH6L2-83s|dpy@l_H@?Pq+&ii4y|gVQ&*323=Fj~;^n8fd?VSanPi z8zQSgitZ%o29!JyOiuNsr7!=xw?P+sPx{d%UXtH+uvfpbSN3Cz5Ly`hJ~!c5qxX|C zuPsrqz6A-yz;29oysTbE?dL|LS;WnZTE^5jM{`_K)*Tz0EOE07P?kA|9TNwNe_RDM zqMl*9dF<1vWxs!#LlD85v$SJ)KV!bPIKHWdY>GfdGzg*Y0~zG<4bQPsD+E*aj9}{p zT$ee%F>J{D6zp$J`n~N4R}9L$2hCQJ>JWp18tGRgZN{FgFpN(;s#D+vKM%hMWnkc8 z(DjofnF@?P6u$=1-q5LFQ-49-7^SU_*DjVHdgaiXF^AKY`BL&@ zR6l)Trr|MqDH04#uEm5muf9YkX@#^yg9X|+@ZG$xZhNS|v z2Q%$`uhQZYh3=N!alAe-5@lAPdV3T+LmXUeqT|d*!59&wwCT@X8GS)bie(ZJ0jUO{ zyOa2Cs3nyrHUbJ%vB;E%ntGWn6SaVT{1bP(rV$4_3KD01jHmaU;qgg+Zv13ecb;gn zV|$iWY)OCseQJ?~)?D}P8N{dfc;lS2VN_r!jG0RnUe<0W;#KMBs0F)Kubb4+-3Yza zg+h02AU}(&cHpYkSutUTqnoqkM2DBZuL0En>GbA)!XpS43Y|C`}! zxLm@Cz!!@Dva@3$;JsI%m0m&-~BC2=0qg zJaRU{6Ndi6ZmTMl1;(U2N^f0G&?8N)L4JP$ggdq=&oiAQM6itK;u|GME0JvC?B z@h{HgYOG67m6Z9EX74Eh@Rwe2=?N9V;0eHzj1V$O!Bd`MCQ(%ITrpmJP%3DLq4$$1 zTYl0mumOqL!mLuY8#D&N17yEvf*>)bzWV_j{iz(l`SOUrz~EW+_JVN1Z$xBQkRUw@ zqY96=kVvKHeiv&qemZE5Rqjs_R-~bSN>rBtB zDB1dj=bpAS5#(?5t0Gf383C829ePh(R)QJkQUf^E0YX3I9&)P?tSRw5CWp?2YALj1 zosW39D<6_O0Xc)*OpqvM0+PoaP}=kBfIyo-Be!73OOzAv2I`Aj>|YboRd%B%izRr* z|0bE{)foQ!Cm#uGbpPb*laJi{X-F+*^X&+A>4mk<2Gwj{h89vJMZ_7@nc7!l4wUqIDbTqz5nDM9VyrKXqax6d*m%-WE^!P;7@X> zz2QkC^npU?QHv5NLP|X?Q4Njw;nAHZ%VSUal{zFMWb}-O0UPIIbmw|WW;`e+f8iHZ z^~Z~`Tn-vUICjFIb>?)?#s~lmb`=DZmE*0%zEu^i?Bo(o_muP)ik(jD=RvYCi#wj= zH4>rrr4YGqcyc$w$viX9y}2?Omkdk@>75oH6zGXV4|3!+(%LUT&4Cz(oDTPpi@nh- z5poH)P*tGVI@=p^xir??!|4(dtxW5yAoC+qKXgBPFmhYo`yYIle7p9q*MOrEIyLdO zB~O+JDKH=WaTQX|hJI>dvP>?;`qSHvB+$ZmGE>njZ#f zvcZw_8s>O~$9T>t5ES-38TT(u%(i*9*sT@Qu}W$+@i93iSYjU_`oo z#*R0Kpo(IbR7P>PE0wa!%Uf};ke04{h5yO4HaU=`m3SRRrsB~c7uG66o3{i79}qVo z`ani-4IKxLOc9F&!c96OD5Gqfyx3-u$~?fZng%$3qY{)sQXaBB8zL-+i5NA7yqS# z;VLteU(AL(Yd|U)Fkpm$P?IW72;4UF1u9B5mR}7rvJ#wU)zvEYPMBnfPqG!{SAvcJBcNO6Ni+iW^x59_S~zegQ(Jt%Ff?#l6&}mhm!@ zt&t#`?idn~Sqhb5cCd;Bi;wshp=W7Ac@!8pb36rv8~wsPBCRmtDoP@(kQL8gSJe?M zuhH+or+WxnuOyV?TE?23i9v*v~4jvLFP@5qnbq^o2n;~H`uQCkG z&~zjN0unb}OP$Nr>1Ej|KXDTiDDM&0=GJPfcQSk1VIq$1TrF2@P9ke5<=c^axur@p z;ZZ=xh;ucEPfYR*aOH~XKI6Cbk5BJVN+};l&xAeBlfXT}T^7dWIQZm~FGMu zK9M?J)*DH(^J;nMRlm4{=qET$Ap)dt%!uyf-|A*2qr^Q&jY<%U$Q*D7Hgk%$*#L*h z=nf~l6qfBh+={tXa4n(pi;vN-TsS_;>PqfCG)*3sY>CMiEhc57K|pcsAoBnOc12f9 zR|kBhUH~N&co;Bc?QBehTk7dQ`JeJ9zvWAJs3FdD=dYDp{uYx8&s{CQ=WZY(+w$HU z!if7jS@Yq^t@lP`?sVk<{opBnk}thut9Mq6VcwN!=_-Kaj~ohkUzYq^uH+=Ahgk}9 z_+5X8$V@*!bzXaM3oN0K#R(UzdGJV@a z1iahtkdBRFcQ^*4W_Nl7Ed1#Q=Tue1Lz6gY>8cUn^33>Q+8&nco~;)$OHlD=9~oeW z-?oU}KLI9p*OUG?N=NYriJ3g|p11@)v! zvF7%X8sLe8M#KO7Q9>_eO0lcj9tXg~%DH#LB)Y8M&YEd5BBxWcPj5TWq+25k^*FAp z+e&<~G!ty6gX!Wp$r12|l}lMB3fGF#roA#sOiDb~5*~q2cV)FI8Z{b|zeh`q3EMWFZD@`Dswos@3aHYQnEY?P2Nbb>g^Z#-$L zNU{k8z-^;m0g3y_YGMPB(9q@$;lqc)GNl*~m{bbz5{>>421c{lYgeTNqc^GW=mmLG+mCHVC}5 zP#=~sgqS%*y~Ab~$|e7sROu-Xw1X0oZ0`OEPBS^_1yjau7+~@gp?Y|ybu&1w0_{Hv zM26;yiZ>fF*(>=vN-Tb{8&z)p#=~$kE5r zrG9bpw%>fU^gUAGh_(1o92>#D_ES>M5p9A zo-Bg^dv^!sJ#QtY#9@p##}jeTg}l6>#FS2|mp>|?g_@Kf$!2I=YXTOJA`;xM65lUF znvCIM?y_#kxVby02l6gwX`2L4WU;4NH=&5n{Bx1Jg)OE;HL@8zXzrfS^L8Fcj#CcT zlphqQqZ{NrFLXCF$?v3#d2jcpNLL6o zMAa5!GxVgfc-L%}hUZh5mS|e&E>9JBh7LO6sk)(iqghtt!9Mi^5@6`6v99P2zi+_f za?`cx^D|gH`U1DWe)p%+k2iMZqlZElmMC<9qN=KLGMKWS^LlZ+)D~BPkKp| z8S9y*g)3zybc)d(pu#_jHt<{D^@tQlkUz(tm8C#*Db|H@_80v_tk6~^ghDV;i_8RQ z@?ZF<_4dlfCU@SjF-e|;>De8u_QB|v@$o)7MF!DXZEz{LlWTR^CA z1|MrR0TWqx%dW`o^kgfAPF$1C#H=Zua@8HeM&D};l|CNWoYI;SzcXn5ARN|q)5UXb z+7bnZ!hM6cTGyE+4Wuhw-IAtk3Cas~H|~3uZj`_Kmzi5<67 znjD;t7{*QDC=CCVe0MOL_^YSOxfgtRMgK;i&0n$ALlu}1hh4_?rCVgxr7nY{GX3W$ z@E(Z!+ieq>#^U>&e>~0~_aD0aj{j$L&#htfAl$+R<$^&saC0cs z>j=A$gwl}9$%nB&q%?#N_4J?1$;T=1J2R*v`MhD}V3c`E)F4j&61#?5Cje=eA))io zzDUmH;afr753~ zk`*~m?6YM%!d}5Qe`c2vhV+&sR=B~Yb4R2T4z?_OcVQP*Fc4kUBS$$4KjkUN_TPAo z7jP$R9B|P6BpGzeHcYv4_7!+1^1%(^pU5#>ndAJ28Xr^|xVwXwuSut#M*Av@Fq66S zb6tSmPs`*uA6_ImB$AY1pIyb>WOOVKZt_<(pI(*ZERR+}Uhczhhp^ukeJhG#&zh zUwIe2@m?sADFfTY&M=V++^N->z8es2iFE~yy;C0o@E$xNPjiyJ!9;i5v6Fk5Un}RH z@Xv&U3u~5_$_tV5h@7V}`?!DmP`Plxp^3MA&hDf!d>PS7@lEsX#b41pqziI(uUk)Wd3&!C36WJ#<*F zr#PoFVfs>nxY*h`CZUF!5hy|QDAX3zkOS_q^l;#wg*_oo8p-S7HbP67oQmi#Ls9^? zkyT3dv4&2_sr4Cp!dfo3V`HkxQv@PA5dgh_^^+YeWV z{5YY{kkcJc(Qw)i^xmZX_{@W|2aI5BZ&P z@^cSso_xociMMgc`Gd69SNs8gMem9SQm>+LWr(#pyDh%CyM9nj3?s*sUbNL~7eyDG zTJV$Q(Y&vn*Pnmsu?ZA2@tdX>mPnTA=k_Mss)whM?A_QmmDo9bL8Cj|g8sdVH3jw&iK75RtWmGvzMl@TMOOL(n5O8`dN3vuiQOGL1n} zx%=2Y2kBpz9ROQo>>d3ywxFtp>lnFH*umjH#HoJrbDyuDCRyE>;#D#|XK)PB@E_## zcmTYLMw?zD$*0KGS9%0aq^zw@vSL~mH9zCS(eHeIiNsDUElq&ldd1U+Q7XL3PVz;Q zC^!1lTL@t;TirG`2H?l4X9Q8$$=m)Tvs6(c)$sQI!A98$v8~uq%(zq^B%M1TbBSG# zZ1fr|Ea7Qi^6d38qZMO*VHT*HI|uA$qKVRWYFzjNRWY+a?OBx_BtJl)b0u-t$>#KX zUd@sN&>qP*(iaDF*Fe;4efs$p} zs2tjCPy+hJGqY%RKDyH7MOE!DAtuNi)YJE6NJaFP`K?~kknfL|=iN`m5P=|h%RRi~*oVHitqQR`TqrE(P1sO>4<5~5SPF|*=Ky}-ibTMyQ;I(8g42V z$c^U(tKpkVw`bNpjY<=#rm}S$PYwXo%24UgIv`ZvjAPJa35g;OqUn0GsXDMbEq7>I zAzk@`k8*pvI?};$+(qoeSeyz~Cwt5Tl?65DUsu#L9Mb?4M<4lpGSQz4kQ!oos$5#R zoEifsvP(&l#^4DdRfHt6CMcwFH9E!Yd)++IU2^nzTlkxL<+6KTp$K~coFMna-{6arzTtIS5QUy!fVIH% z;x+ZRJ^7UUE%0z%94%x-5QO(KCP(C9yoP%R4%Ju0YpFTINX!Q8LX0q=@GGeo^I+myi02w;?JpG8ch3f9{)Q;}FTwZ?JyX98d3EUd5TT4{` zh4V@52@^0P!gB|u#oN?KwDi1Hnv@ilwle*G;;m6#L4fW(mr_h-vUAn6NO>lB%|zy~ zE(3${w!?cS9-Y>Hh|-=pd>f+f9Eg7p6@TmnPr`P&`TADe2OHz^PZl>n8vm#GA07-! zK(!E}nKSjLdY~+V@PLCS#y$_5l^U$Z1l}9rIQ-PFdvyGt;(z$LSrJ8V$90L;0RP{12F-RN#==pPJ9Ph$HG^*MQ zc-*`>Zl|*w-Kb2w;%-V0^G^OU--125=C>>>>gEsC#`AOJu+Pj!uJ}(%W216CVQ~uo z>0fdaCRT5B=gsnnn;``g4Z+Wyl6m-nzmj)p5SN2aID&p}$HTf<9`Qe)&UDdRWi^bm zvD-NA4Cu#tK0{cDmt={KhPFn;^0!zw zey{?uBhTfBCEUDdT@R3U)I04kJnv#Krw^V8g`+RFWug~A8v9?{ekN~Yny18 z>{xG(>PQ1=ks6Z}^D;N*xn}2t|9frAX5vD9&bGAfSl>_ zt~lCXd&EQ~DcJp?-%6mR2`fRA@9D&ksH-c~zv(V7H*Zol4{>Ng2gHgrM$Y_j4H%Efq-htK|R85LE|z4j+%) zSo^g;b6m-@*h81Io>nqa=8|(#k6;7#8><%|AEQ6RaC$EU`A&SM6UxYHP~u# z79BP`HMh#7g(o_QlfXA3nT3J~L)jInrw#!0_+_Izx65rWKV!#B$K6hHt$TNKLzw1f zHt&h3;9dV-K!MM5L)A*sO+hY2X#?%UdXv{+0eP;x>3*a8-xn;h=Ft1a)KZ-gWtx{+ z>dQob!W-{A@izbaVtMQ_uis3yll)1}w8XMetz!$1org;(BNTp*N73Op&J~u3)f|C< zdFn%}Pku2~d?Naxpz4o;8=C47M^GWT^D{eiBUfM#a%GQG7PCV6o)=J1R=k)!sn=~i z%HC92?E9ye(EM+5%XfS-@AF->E1K$Kp$nnn904|U9fk%h(9|I6dpAe|u zENxLB8b2yPz2_Vm2;9Pbnx9~Qmqn~l&jkcHUx6sh^TEN#>zJ5b$PMvLACM!o8;*jj zZLOwFMdiPW7?j8s+&EYARGFCc+n0MwGmAw_<^`@I&R^@m)Tm+%gcwKsn<(;v2fWXZJx;@lbQ>5>FI5p$I`_aM-1 zZCa~;KZ65x=@l>Va1)Zh+u#Cx+=;XeZjfhAsCqvXIn>yT!IVQX+beF2bX;=wvY(Br zBgW-WxXzVGH$qS9(hD*fw5HcZlHg{?_~|iK@gBu^V{*Z3~+6DE+pNDVGs8?`)$ay7HDkdnRDxsL*8XhqKS+7 zkn$PGZhx-|XO-5;9iT=BYzIvT;HQfLzEq~d8M$^FQXP*Lr_rK43-V}HR`yGR38om9 zD+-VU65r!~x;*NCPL@}4+mXlynBQOZO4A!EuEtPs0D}Yzav<)>-$4NGsRP>rhQsqc zA`$sHkCB3qfYn2#T`n*BF~VMD;1@qxp7Jy)$qX$sONRoiE>C5TC(0(W+Ox5Yx75F+ zMYeh)vY#lA`a8Luj%i9PPVf3iJ9-epHrsAMB@^!Jn7 z0?%mp))ativ=eiSdwS42Ky$qBeqbJvR~KL2`{rlmBoesb{CUg6W~-*6X@ z$?x5dTV=xE>~eLKQq=brqBxzY69)NvzCKl)%&-lxD7(FHi0nuRN{sz$!mN_@< zCC>x0-oN1{?Pcjrv9abp`&_-LMm&6g@BA7gueB-gZ?!45fIjrn<_yy1V7wepmI zBh&L>c*6}UFJl;{M`h1lABJiP5}l}Z&K+;q!%q`#5qu;m#AoI&mu0_(>w{DGv3oFV z?s9yFfym%~Jlt#0Qis=IAc#qY%s`k%w<(A@glC>GbomdSp4Aa(ZxCVUjN50H832Ezs=9_pSFhS;iY z`Q5OI4YnK5>B1xxY{QZg5A5{J%ib_3t%Hn7V}`{!2JOAxM6V$pyusS$5b)GgS?WC@ zOWUo&65UkS=_K8t$?j^DZO^Ynb?atc;nakRN=h4{gf`3KC3KPMqVS62khwhXDUhiN zwA5YOD*GW6#Oz!xq9`PI8uX`aJn7duS zT>ILyHW_>#ZbkwOeajCJ%F>E7@jJngmD|qF_iqU5!{Pj~U%iZ4a1drn9bZPScV+f| zV3naGRJp>1*N|qy5wa1vo?JnT-dQzVs?1Tp>3CCF9MO%t_+Lg}x$E*}bTma?Qq|tT z5werBxtWmqS@_Q>tmdX~v%YKLBwyY`WrT1p-aNtFCuj(R1wX>E&+)}_-Vs>>N;O+? za$|EqnI5;!UPXSm<0RZ7XB&)|%#g#mY=45CS-HfTH3?PJuN)k4$tRM4J9eW)auiF9-So9}vL-{= z;tj$0N45d~^Y-W?cS-$T@(X}LXb!#m(XbG;4Ym$VPB~*fo`M~?ooed%3qqYiNIkCX z6BlQ#ChT64kM^KTPNlL_WfDR)t6>YM=Ftd46T996m^e6q-zZe7oeiSH$D!$PeTS4| zx)3|R_x_NH+g>7~dPLoFyMnzSr2J9N5&(Aou_M`@i}f;bG+~Lc$eqz&$dw)wn4&-W zacN^cw^hmV@Xq=3;We>g|)&<{wT&K9`TR&rBQjT!xCw4;vA3J z5(p1ei67f7$C~*!Mx{ za!HX|xy-oAxEr_{ng!@%Km<+$0n46iIXDMwPq$bDv`B=|WA%6~i=D5K?nA9%fJq)c zE?F>a$&^ZDG{%NTxazTu?p!F3de9TSa;15#h@sQJbtwt4s-7BIQ8;wDYpX8i^}5n& z)}ErtYF+v*_~FyXRW{-hr_d7=0AIR|7kP|&hyE2DtwFN$TzTGSpTqm1B-vdEvLmN< z0NXSMPtFBvL=Kq8eENSiNm^6z_#`wFL(4Cu*yIzQj9oz8Rpfie;!&l73US3{KO#tE zb%FXVx;C3RVu^^#H?h8D_6;=T9&V>5f8;)+W^KZ)ApKU$k|JN&4laWL4yrkKOa- zue=KmNjTmO3s|?!>`Y4k^-d9Or%dt^j*6-9g0iV0B50I4%;+sI2@^bap`7X50i}N2 zxp@TJJWH8xB}UKD;U@K}6D^)~i!yB)9ss~>5yePRL$4YaL%B8#sHn8>gD8hy6E&2U z3+L!})8Cw#(}Y>*&;Rm&%eTLBcTliJ2a)4r5uvKapXM>)VEYz!iDXT~wkNhQnubuiSDEaq^S&N`=wEc9@t7_8JohQmgnJ-V*=5FDiG%CBlk4y)U5^rFS_uD!h=ge7crKu}qi*`4+Yt+e6zcc@dG#ZN1W!SPaL`eaj((Kj_gUBwzVS6ma`?XMp?+R3hH87S)UeNScqbf(K-=dp+vHRruohjfPMFXFPvP3U z>!Cc(e~SK(|9~OUnR=EY`y|}1UGst<`q;f>jQb6G6+r3)T$_jsY|N=nwII zEu8#ep1#B5&L6yaoNqS_En&pR4`Mb03hgHBn zjvKzKF}lWI6SYY9?v=%|=qV%G^v>ujSu}qfec+DbjAhzm`FJ;;$`DGRt2{ZlcLS5t zCC_Hw_72rChM6t`Xz=81&@`jZGPb2&-E787F=}@~q1S^!@M8>1P~GaN#IDs<&#v`} z`e}JJ|JhV^5O8>Q)?XdvRP2LgPh+ywWf($mj`=BC7wk~69k zN182L?Bqj_N20#tvGEB5Zh0%e#XVPR0eD`&S1$TxOH&h*I6VDP-X$Qn{@6ve-glSf zFMQVnJpBjucz7~#5Fj4?f*U2Y7*sw1OM2IU6-2Z5M@n6FOO#zhQ?-oZUWB!cV3glV z@3)5@*kk2|3;s7Zf51oRIGaEjKyFpJA`?-Pql=r6+8ZH2jQ^=J3xvh*L8j;N+7o4- zb)ZURS^9h4)_;A-yB6UuM4U!+b96_cx199-?PKiS#L0oR`M&38w_Ni-oBRmyvkl`e zm7TLT-*OXxT}L6K_<(0&sb6095YXZ>TOW!kI+z`pnacb}{8w=)bSfh3Z=SiXTAVvb zkqj>lu}HLWKp3%*H46WcHJ^1-b!&9ES~8xB;jV)GSNe0ox+MIH{P%e}P~+9t?c3*l z0!4TI=Tqa|Wu-8rh6d7=ASoij^D?vCZ(@e;3A6Vze>WiL9|rO{DkFJtES>@!9-NLw zB8vTMYIJgCbz%-=!MD}2)s$$1ZD>%L^*vnD2FShl zcH}M@r=DPjI~_za0pWib@ktL0dWS+($RD}1K~6=wCO7)m5CeMcl*EKH3mN2BQv(J! zPYj>pUg3#61D>BQvkMdbY==ZB=u%6-v#Yfyf|u2V*soE0-uZ;>nElT8->Hda*Q2$KcZ#MecU&|ji8C4mVt7fV{kR8fke+MvFfL=FUz2|Hb&v(I?3eHsmQdORtPqmd z?~rpPsRwHAWRV%3M{;S*3mpxz;dk1e2s~EsO1R027Y;+BzHp=Qm*`Sd1&z2R1dsYK z&)!jId04{q=Ez3i$_HHLl_OBLE^-7?Q>A?*Ek zyY=qWhzRtWq0@5`23#UJ*fxF&xk9VB34rU`|L6;@;U z9@6HPJpYZcoW^m~)&~D%M@?PDyI(%K^K1#Y4Ax_;F!UY|J^7^14XOmp zMDNE(=rAB3@@VK}JF@58MRoxuY2IM~c4MHm`azCNz@zNbR6_N+2#CGrgT$3XSH3!f zY2{zd0ltnN!pGf-ELK-WLr*W;7I|%5rjjeV!Jf9BKU@M?vg-LEfYvbX$Zb>h@?ynCRjI(b zt-L`huVpSgk-HT6RVtzJ1C8$dv;+y_ z45d7YUT8xj8pW$z%!lQWvE?52|O~_}SAuizo(b?ljSs(_r)By}UO|pd> zlB7clQ|;rz3`tSn(K?hLDHqPiI$tZ1N=6hN#*#n}T!oBe9%IfsqTljNkx(_;VC*+> zvkgPLWoKn(r@P5r(9(k{E#GU4MBEqz&Q<&&&-+(G;$QgRt81Qfrh!Cb(?U- z?%wtib;+v9OBvxUKhPE6*jUd?2lD~$zi!d<%n>fYN(@A>c;Fq_A5 z#VLZ9MUEhMVGj@#KZ{&>iyGPiO9xum02#-uD1NEPnf=mAitV{r=yJe3Fge_mN$V98 zGAC2YF%3(Y-4$QcVC^}Uslv-hTDjz<{p;MnqdRYr&Gf9>@4)4Gq2joDkU&#%bEX$+ zm13)n_JiiGvI5>La#`RY$`~KTVJIc?m4|!!^U-h0D9HXscPJn2zU)-jw?8QYoZRlD z{cyT=BFXJ<8!&lQMZfSHI@sMW-3+o3PJK^rE#!@r;E7qyGrw7DNP3NQPd65)0yu7i zI2U3%Wiu7riI01s7fPl$ypCcSrW&b~E#<*_=N?kpzZ!k*txKc1GLhj6^=y zxQG__LYP3BF=Ts)^))Fo359%#g`(O|Q#NBS1y-+&{wa_8CNW;l>Zbo9=h5um)>fKGm&?kGzwPbqV|gjf*MDIn zjA<*qR1~>byYA_S6|w=Foc`jQzeDTy1Y_n7rI~tfh=wYEI>&9}A-z>zbBl0lKj8+L zVPwSSrYEAyf{hDcsaB`+mw4ED`Mvm9ebVpxqXHNJbNm(}nPF?bl}RXv_%8y)0lFz1 z=&~1DNluybldrRF-0N9tT@hdc3OR$SWqJ(RR1OPax3d=saKvr#+kav+QSX9YN_0PA z1tE#hgpdA5D}%AvQvLK_7TuFcFx10*^2ze7MTNvZZKTP54jK**fq8C$5xrVfJyYhS z@^HVmcL?4wT3ZWW5caw5l=ilxmPcINDrVsE&q?8H;6IB@#|^55WKU~}tCL^bWUN`3 zvh1>gxpd=)-C*^I5|!|FPDkDqP-IS^jl7yL60NjhyDGnd|QR>#bdCnj7yEVV=It$=A>9SY0!!99; z^m9xc{^t(~2n3My2{}3b4X$f6p3TT1QYC`yGhX|SgCInX0w*#C5vq+^*#|%sN;f(5 zyVR2igA(N>Q{R?#eGQtEB7OwQLWcoKRA5gS8V_*3Y=l#57AOpy;r@z5<4`jcm;KZ; zbV?bfcl|+5)WC3IUv7b0@XzcLx$GRi9F-z& zK^9WEI+Ok*I@N;m&n0Wl@=@NZ$_wOTcONG3AkS|^Fa{SwB{ho_ZE$I^nfjEBDJx5% zS;^MEkn@Ww$mVB9deyqd#LD|$Mjv?(JXJ3EpKq0CJfSr|u@>I-d@FC_&HMoWz89Ki zD^{i{`a~g$Y-wDMK0<&T>aCa)FWSA)yfw<)M6Hw> zUrJT+Zx_Fn)QPA6rKy!Xx9HT+8w_y3Ug*w^NZm^W5IZ`Y2H9z2Om5isTyp z;)-xDn4;S_-*$54vF|$`6CZ~fFUc?m848(Z-$kR}^P|@<1rw12CTrUm*Hx z@_MXOqnOfuHnATX;%D#!xmc9 z+Fr_^ZmC08!;_73N3^0zC&u2G4kH)dtI5D%aor-Zf05%Iy9`??mGzr2NjfTS?=x=&vV*AG#h%C1bjv9H zH?p9Ogd2>+{u0OaMuu6C$Wh#Gj-2y2CFqCbH4MDGHXdm!D8WHm z2Sk|cgmJEk_u&O>gQ_K_VNb~bk``96F5DE5w&sNq4VQQVc!xR?0W ztsUVFMS;#R7eSJZux-ez7_0^K_B;I0)wJ;W771+dsk&2@O)3Q*W^M!cXK=fDut#^k zU%sFH!V%kSeGE!8{qVObiG0xI1$jQWf;xgoj}l%B&nZ!|@SIRKwmsPbpEs$r57@iY zBHx(`rZF--y)@VbX?4pG!>ib&`pZ9)XtJlv_r0$wL3zwDJ0G4~PnK#bp|ZHwSngqy zqT=23t0kR+Rgde5sS&=^U!TM9e8O$8SNon*z<&=*{WD?+4@UHs zppxCVF9DT$qdZ<3KqVf>hG>gxX4xl&-N~GJ z9O!?uo@=z4!;PKQ?J$9*s>+psV&U}@*ahqe_kZ{sNL{=%JJa(Jx{?ivD~0)utUa!i zkFu^ce#=QeU;PoIfAUAkcb3S#J;aN@9cTej2{$Gnk<-27rgrxH0z$~6se1P7B)PdM zX@&c7OA!eLEagQINT>lsQ1Ip0yNVzUcHMUP(SA|_=}N{#UdJ_nf4F}}e2M;j?V2x3 z_6ec_yd7J>;Gvn_b&f>6M!Q?|e;Qkj^^4~3MY}yrV}+4|TVxuKpVlZ1x;ahSFz^9r z!`xsV>#?*qufH%Xo_bl>&?5R{p9GVpO#uWrKb{r>H4r5YgPnxJrd1co>1PXamBhu$ zF<;vtveiV9fVILwQ({zFp@q2jWHML78HA|U5G+Ph-k?%ldL&AvaMybT{z)zOh)@5| z#`Z-uX4%3qC}2>!+eB^2=?L>+a7gtI$Qn@D7Gi&JNj4`4D$azvdkTW@_ccQGM0 zrNFrL^B14U%A_c=`_~3uKE)pJoVR(RM3N~X?Os0G)Zf&!ps?kv9aiU}+uFX3O-_1# zxDs!zmT*PX+MVGi(ra%ac>ET$;AIbMtvVX~67H%4T~>K~HaGCyt`Z>e@CMkDG1h$? z-=U@wI>}pK&brYD+0HH@13YMn86eEo>J(kzKegexDMirTOID1ccJ2b_e-LjX-%@Pg z-nyMS)sR~A!$-0kWDmEEb~qLL`7BIBdWX{H8-df z5_t!6qC_m)=y;U69{~SrbQsb`z(pg8Jdn;V|D#nhRR`JE{TUCem)8kQ?&SkLM4#G- z7VeXpd-7{<7%p~%2a$G9>O`%J>LCERF0aQ=@4*^#@*95#TzV=B7(bGgxhiLXOb5@H zqMn$}jrPNZTAf*bDJ49)VVgb)3wMSyhEp4SLgG0HLk=`zb2bSf2HawBW+W=c`V)^t zSR0_!KJDZS_@6j6JiZGTsh;5;iukHTA|3{MrfUy`!S{1BD82LHbCGWjJfjCTxv>hc zn4qNpP(ynQyRF(vaFuv1m-OS7cX$3(*Qw;5d#)6WU*;B&Awr7{VaZb93?<5^ z0zFJ<3h~%Pv;PPf(~HWp@_h%d5Bh@K-@XhG_QOLkzSP%j(A>Z^MEZa{^?aeA_q+vhlj3? zs2fq;Rj=ySJ2483ZNPW{V?#vh@V)x??EiVz+KJuvyYD?$OcvoBM8^ld+(c1a5!cNO-Rh(}Gw%RpBp#84-~t1mpHf@@}h*BVA!fx}_g z?|@OK)QeeFbn;z7WtWEh7}b5;<8_01P9}LIY?^s!Sj!$3_R^ z8-L}W@>hQUhEE6yu#`q6pD1|x9-l+yqC?FI*@e}k=)I8BD#Kvqge1AI=?sU9s!su6 zZx+WHhIf=v)Z-gP<@d7H~m8P_zMIC=}e%%dlYY`E5yzha_{SJHybE1OWMriJ8TXfzlR|~Ej_Om*|oy|6%OJYTW+cQwq zhW{qZsqx{cFBekV94#d zb3mD~fD$m@^%#ai{`R)f3w;n&3++=6m~J8wt^bfX_+NONP}SN-2@~uz0a6~1G-r@} zVjg{hpDB;|&zl(~aT%KOdr_A5BAiu?2mHerB;t0SH@NcuJGMxs)+i^1Q;9{EE|k2cQH+g?dd`kyWzay>^(O5eUd` zQZD#yUx_d3CqNFKA!FMQ^Gl(>%CPs0w@`HV2|V*-^{f8|@edk(NP_&wzG;ofCF(af z`ptD2;{;9lRyOCBVG}qf0r4k)wEdJ>RVoud{9{r+hguLrQW-=>_qoc}eHBlsuHdHV z?;-)NbCe^G2`e$o^Z@kX?+Jovot(AC{BMJOuc&ml$B&j7!;pLfPVN5VXF*%pHS6lv zs}vF;wN1fes}>!QsKU#Et+LKv*EZzZy5%Ll3p_y>WuW~qx4UIwRltF_d9{|}jx%s0 zRhb1ej7}Ja^n(V`1cF4+1W=RXQ9uRa#(uJ@t5Oay&l`()pyB5Q+?^nt$}Q|PT6{#5 zo3Is*c@#`RJgg>4d9{He8lf-5#>)WWPL6WM;(<6*5$t{M0#YRlm@Sn#-&Q@fm~ws-5T2fisX)(?pu90SMq^7@M;=2cGq#aU||1@F$D`YWL|_+rz9I+)Mee z3?X<`VDc2QIN0b}(T-3TV+jAF|1Dp?!Q(*Et?rfay(ZoI>lq1ja++WEDZNwzldjDp zhHrV$c1n2qc*Y6gHvy=l*_1!X243)og=fo5@bzPWt5#mzEK#`y6|o6V=s~EpfwLz9 zl8Um1uH-M|<))6=ZX#5qZL6*UKG>6tQhb;48YgdhhlS!%4BV2ExVP$BI5^J4>=~-T zABUI(6-%IkB2Tj!A^2yEBd_9OnfIR*pYn={Jtn>?0T75Xogo{^a98ItSok<_j-Pg= z%y>Vfpi=cTj@Brp&@<%;ZQ43lcR+(#p+FcYRQ+fC@arXtCf0Dstw`a;Y4RBfar_5e z)YD-CO*AecEm+FG^6M+yb@GN83#kv|z7OZ*8`(mxGRVD!A+omJbz%uJ_=E==PoA$Lj6=-te81KmP9)Ho!H1>3@GBN_?@;6cI@1yu?nMZ#-5mB&AFHRQ#LA zfChk-tzviVPhPgT$GmarJLNBT z79J6Vj9OXuJqJ&z*M=-fq4!T6dh`bmRwiGB5Km8+X|D_X{EjcGWav#${B{KQcsR3T zGw)3Mcxz08QYxT0@_xxA`U5O zd0~XWs4;B=MN-@sjl&~Zfck(79xE-d{JI{9WoVES0-Uq5C1C)8kIM|q)CgPyu@4Ff z;S{EV{=SO5ed!laZl)X>H$dd=p>$A;>a@u5D{~dp4e{QR0YYA z+~$vf7eAxyhZh{S4T6d7VIGD=-W7d#li;kFQFRy|Y7OY}?h0Z&yyX=e&+1lBl^4(Y zAxK+HA;M46(iV;#W2#L&T3bb%of)nb2Np7HUZsYm(U<5@_v%!;98L8j+Ws?qCx~+GMjG_NzZvB^v%y{}jrP}}a{!_Q!$G{mn)mcu{TpinOR)kbi)pK4W#P`c#; zK(jODsi+fC?xs;gAgQ1|eVE~X5wc@SD7oZCO`x5xi9^wF|9F(w5NQjjCp;qW~Q$C1K?(&VH%*w)IW|bQ|87W z_%UJojRO?=%rC7znJ}xKq6Mqp`%-qpk;91~0RM71=iM1qExxWCTojc6pD!H{oP7l! zqYob?>fr<4EL@u5QCRC1$*V@gtG?>%hv+{0L^MOZl3~b#Iq8(RBu!{ z)laXa%rZA}4e>xQlJ@~ySD|XnrmM@0D&lzz=gZ3`RyatFS34!V@l=1MlE_`@(e>|*7|KP;X8hayV?uyz!i|D4OcZF;_pBlOw?dnx6?n94 zN4>`{k;k+L^9}Wep4waZ^}Q5|*sgVXy?>WWAg?+Gjfx5kx?$6EL;g4Nunqwk)|$wQ znW|o3Dw|*kHBhp7Y%d6T`0^FjaKCMMr3O_VHoS&zSdo51-ce@FEsY-PN$9`^3#*3G%+cstGU*(9MC>r9Hlo-C@1^i1dl_lv$ z6gk;GIhZlbVXvdWTDJA(fA|mG3Hr+d_uv25zasJf{RdzY#GkO{T}C)l$rJKM!$|w0 zGJ~c@bPZKF2>KT2kQYB7d-UWTuX@!YIfnj>l~8MECfB7_j_{FqfST-3=*Xi}fd~IR zP3)-&25n3p4evG1QbwIh5#kaPiOAE&6--MTqL)lmXU?B57ksD|vbKbuxKHJ+3}sW2 zYHRyr+MF�fhDa@B!g?4V!*E`z);<%Wy%{^cWN<6C6qkHkq~_5|$e_Y|Vl3_$VOdjw5$B{m!()z+O_sbl~xicsP z-iU|))U>n3MA%AnNm@rVk$v)+5|wN=2{-rV5D6UZ&k?_$DE*ikqhI~l7Z7b8I>cm| z_X41($Pa9aQeAKuVq4L-tD=$@l!yQA1dTn{YRJ8J+-z|@Mk~k1bKuVYblsfPvu%#fvLl)U=8}Y16J~A-U zev@~?53A)S{b817Ftf?1mLb;cQL!{i)9`dy{NL2YZ>VPv!t?zohRPre#OmP(C-mt0 zwKo+;7xreAHELnuWqA015YQIM>p&IqmYlNFE_GY$8<-|X+v>lWut2Yj3LAwxXYK=h zrNrSEFMW0+&6#%u+`i|ns@631y{T2==}NMlN@|$E-rhxuc*|z9v(Cjwvo7RDxtF6x zB1Q1XT!!ipuXfC~daaZ=nQ4nHgrquV9Gc=x-W0Vp5dZVCV*6WS`f7cZ7aJOlLT2vb9=pDLG(-YE5$ zt%I%C{cUb&{3cQ(K+?Q!xOowT-UwVjM796Q1F4a790LDbh4*2@lNDavl<$C}w8bS= zoZs)6x&!PI+(w!%_F*>kLCjI{K38O-3;ghlfvJ90+Gcaa%le>V^X``(flMJv0G7)j z=RjAR#)l7J2l(4{Tk_W2Dz{A&gWVeFS}6YQUoVv_UP@HLD59}MxI62d(KmAYQU~7G zF4K?2#G;^rkGvGJVKb*!0_sx_mdIC_OFD!Qkifn%hV&vf*dEn!AZb{wd@2Iu7rPcgWM4+c3lVfS`QIT9l)TOWZxrZ`-edlU@T-yV5KE zE&-hc1)%7D^<$5=K2Mrl(oFCdBDjvDw!UvdKC*L+*t_B^_n2CbP0C8_-L{n|x(`G!1ReDRqyVej=9{@Jy z2l9zYTcxO10CEBX4erXXO0M!rS}u5ELaxi=I#o52Zctr!g2v3iYe_P_hoq$1!8*b? zzZgaTdH@w;ZR@0{MT}P*TjNe(&qVyb7r`DjfOs|wv zrDUyx2}%U%k-2Bp7uc-metGzJ8R3UoQjObu-!z$ND$(L6Y^u5gpS_5594HYEh@N&3 zXuo`TBRvJ%y zP*}EaUQ%G(o)6u`bh+f$${KZ|q=~4@rNCP7?ZS?GlfFf}VhZqH@*PC52BG9^Ro^_F zj%Wv+kJT4A!_1>n)`4Hhac>Hdz-eyLZLY&yefeCu75F6;v$t>h-tHI$b-be>w0C?L zcxIp$j2V^0!y$?XV<~-Bh;WqT9ZwLt7-dHK$*@Oh2T7|W8KErUTQ1?*dOd^a(c-2e zuu3%?vlHI=^7><5`zotcTiIV+ag8kRv*s5u9ZE);^cac##&a)zmt?YpE=W<=44zZ) za_df^-v+di-u_-$oH4unez}@YniI&QtYeQ1NM1R|8_h{LSN_H~5N4`gfTwL*>KWtm zW_Gz*5OD~%&L9Ku$jF;9s=*zHK&hEe>CloFUAB(51kugp5>m%YKm1I_{LfF`^&4?X z0nT~XwHnf-{Di9DwNY-E#J+ZJI!LDPJ8pK@BWA+Q{QW1Urtu z*4U~ZQKGm*P(70gx)Yd%=_ZHB>Us5mF-y=cKQw)7$vs}#Xv^dR_&1_{gHJ)*M-ee2 zA^cs>YKJ7ts5Djay1fXNhuUffE!Nm1a$N5DeGHTRPK}3-kVc7V6Yc;tuamR-!RG;Tl&zJMx%~cv>zCkAvo78FY z!hj|RileR`6@=XnKg!Gm_>Y%|Gk+YXPq=CXl+joRjvLt;ufJBVu;O2tlkv9Fe>u0@9Q?37nRf<`%DKABiYcWVI#&v2IVy*U_;RE9)t z(4jLIO%|I!;Uc6ewI9K%G$97GYJyw(k!XdoBJ%aiogVE;P+NC7Mo#UC3m46@0Bv<} znxGzVB(-14XN^2pewQL!sLkU?>&2mZtQT!x-m1J1%Dm?nXiEyZHIG7ccd4TZ+`>|% zO#7BDF@_8ozmIIZq;Slu&&c8Q;~)Uq=Wwty8xz;fAd1dP#59=?BQV?o1Y_f^VQsqD zlm`WCS9PEWNTzH%t)X%)6mkAvO&;TA&WSy$wBaPYT7blxsyKx-K5H&SfI2KKUYL-o z!~5ULT;!L^ncOqSPkB*Px+yfO+f^aWsOertBVNF#?t2vM77-H~N4hol!9NazjUoLj zrt29<= z90@O~_6Fr{Ou6_M2~Wc-nT2VDOw+14+$nQkcb1Wh(+|loMZHW8vUVmH z`pjeH%wr~R$M4nRASR_b*m)-jeV0xFA)sE`s|LZnH>i<#IWi8I{&<|}#vnppl9$g9 zpeg}aKY{=59?SKT)r`Lpxt4`VS*QNr-nDUv#XyFaxaA)Jtmuy!2DK6VHq)ELmI z<`@Kc2|n<>qkx7nv_ES~u1(1D-Hxr`56TnFuD~1+^t{-~X z*_W~a@g96V_Zd$W#zs6GZ#39dtb3KhKZ_3P_h)$M7Q-Jq*sA!+l%q{O5hO3XEgwJi zyrgO`9{6+h?EZ}xg4L1~ZnX1PGmz+9`GIFf>|)Pe2;5fc&PE{uTLAl^zO`j=GJ?&ZH7LYL&BZh5W$2nKRIr3V0+{4g}%gG1v z@jZNFCy^AbA}}FJRg))Ffr`d+=sZq>s}KcV1VzDmnCJk9L09-9LoFvmZqpYD*0|&k zkhA~}*FziLW&FHEVs;2)m&q#T=seuSX=`(gbb?E$hYgsLL&0xt8|XL)+Y z)mIuzy=)wp&rAdMfFqHq55KXPiGqR|fL9lF_@${D5a&%f9D{|!Jxc%sO$Lq!gk$Mo zBcM0%=`TnD#|`<$@CzT~r?R8fC18RD+c1z(ZP(jkSD1KBF>cl?cKdDI(8Exur;(p= z6yaxN-E0qTcos^>tZ_(j_xm-x&mSw;la)Zw^-y!bL6idTon6w-`UY0%6XcaY)|Aoh zu+0~4XPaDK(wkVCSuD$bn$*ci!+)im4@WcA@Ry!7EiRS)we+u|#mm*n{v{7q+smCA zWoX%hh%(??wSioNjcx4pL1aA2uwS84ut!LVLY6J0>|zc4O@HXbY-T9S1yRDk1s^0} z5LRRw;H*(N#FPv+{zpU_|M~RfBhMekbl5zA?IYNE-E;{YQKJn0@#JGa6Ezp*%D3Y| zUk`O;rCc#-EtN4JDA|OBP={H9W{edV5uNL3=3Zt>!$0cx$TYe%?}uoO6eQx$iyUV~ z-FO`2_+@VTWGoG}<_wem%Fi6$zgRB#h1J)Fo|L;d zs#)S0FP3Q!oursY%ZquXnEUk1B|mCnVNR_UCjGevhdV z-KkA2)h9TNI4?rd@ml1&p&poj&=!7f3SLL30jtW<@<2wuh6v~$-hYi~!pA%=5M9ji zTpfg>f0Fw=b|{INcl3rswhXW$l2U&z?})a#HKz< zrg%j1Q5}a9Yeumzr5406#c-(6DSnn4c`oA|20JPj=tn6-BZV3S&%#9gx1;MgEg{80 zsD(dBhz!2`aT)$RUwIZ#^0EBbWBRccINf4zYQXDe0H>Ro4^gR#mTP_pd<>`77v324 zkt>r^)A93zlQ+uAFFi{#p7s(+e*5H08RrD*o_uC9L9e3P)*{IA7Cj|B2Q_UA!*LXMksuzLp2%**E@XgSpl<1*J`Y3uBbRJWB zu0K{#;p8p9Ymzd~(O(0!5k$`myDw=Ni4hfgk1W}E8c}z|V)zy$AXcrZN(xE;Mq7?a zNg0CS(T-ygCd;hJVo+}TYv?5gLi||Mkw8o&d%mMuMenXLw0jdINLdXo58huZG3`;( z&tu&s8R4s+`qi7IO+}-wbRkybv*8CG&vKbq^tEstFWX& zlxg4oOg6Y&)CV_o+VZ4mZdWb5mGzfx&38Fsis_>InxE^cDR`~|fY+0KS%;dEb z$IPkuVECagQC#=6Gr3Gc%eT^;yj0twTZ1^Et=9V@#>*5zEqn6!FyrF=eKv#FpD zHR>GxCZ9+8isBx8HQbB559`j%u!ho;QWg*@-wC}9)^RQaq6Nd@GmiyYLRH%CbW0z) z+oa78h$|TMy-7p`x2L{q;}jwu!;`@8aF8MISshOX$^+qwYmxATd^O4a72swmOf%?J zk;Fw^iS5`I2cJr7@@iJ?9>k|B1>i@(BEp5f1{~A`I-%Jx6OWC?dEiiu2Nt-ApV4u% zlrLd+wZ3+$7I0p4U?4)T*9?h&eATpmgU^%j{p-A^V}k%y@oIuWNNV*0Ps=j(Z?zC= znjbd-O(FzBkX6$+g28Q4`PQUO7_ep=`czLnGj^kb5a;_D7`D$IF_W(&~(No-XL$>oIFs@X6<15vD+)))+skH6*rb z(gYM)ZDO|Q#Ub){#xyW$0- zJ3vsRa2TmXUNeOgHEX+fy@UsLc>npbv+bSyE)`*)^j+loswR0ZoIHUQck+xBFqFwJ zYGxBQi2PFtBa3b$=X!c&zD$^Dqs(B#5#@2TJy^}PAuI+O@-idx{S)Vp0EEGsMIWLX zQHNmjsAVw1!Thm0BQw(eYZHGIlzm@ zGfsZxpX7V&M+V$eAfGvdPQ>^&V(rq@K&kj0QSSatS)dW_gb)W`$l~}Fm>T)EBHd|5 zAa>^~4}Z=o>f!B;L~khhxDJg6PAHJ_I6fia0DHWC>>NxbPG1H22Rq)F8ztOgK4@>Y zt4Q9X|-wA1wSW`=1D|FYR7^A_@piu>gB^0BM zpb7y0`Q>CS(Dv!(L1P(Auk;$C&Xh#vWjxo2vtyN#>4y)1(ws@Vq$nOXOR>yxT5EAy z+IfEN)x?S6$E$d{iAABHq2F+h&t=B>yI=jJ>Bady`2(6|=d>P$9@%BYhR;zFZP9Nd z@HJqmF->M`R|hy3Cvr}G2iX?KUT6iMW?k!N%XpqY9L4Avv1o_r=pG;Hhhn~hMX-gZ zBD^_T_{gKw%`Cpj-jsQ^V%AS6g-sEJ?1wbUsP_xjgnrVE#iRKy{}lbp%YC7|=INZH z@|Blca-%tOyK+Rg(K21F;>Ox1<}-vr_)Ps|x_^YGdU&1W+)zayvUR`nWpIbkj+7?= zMB#DE4XtXNK@U5Jo|0ZB`rS!(VHrSu+^1W~gP)snoviiL> zQ&C}qvFM{Zx>7~n_WP*@8hJ=4+wRZeHNIDZa>>d_0H^BVN#X!{V*wT{luPC~lG?{> z;7c)IT<eeA}V&iqyKA(VkDV(D2y4S%RTlLO#`+5_xc!(Ru?ppe!fM zh!txIL4*PZ-H#m|e}QHhCL>iy2p_OQI*~)U{0x14F9e=8ZhJ2cM);?qY)=K*BfBJO zycx2nRi;8!^cvAcNKgp`^$rMUUJpNY#b1x(_NW4qFB1Ot)sG|}KgwE?^8lJ)1q+*A zw;z-zyyjrf^mO#1p&|+e^N$kSbHQUKXpL#2SpjVDI_c;|F`(DW`3rs;!W7mK?);lP zy^p4r4h<~<93+pzs`t2uDr^gyez5dNBLCA46X-{h=83=G#% z{%Xd7Nzdg~@B12*F1!JyHih+fUG{gEMq^?GMCx2QZMcPEeniEml-T*QGUp}X*eZf4 zw(G&kdnDMfYrh{xK8S==7d`Z>;>B;gflh@r6SimOry6Ady7{JT3SeGW=8W@X1BcBhyI}9h!67-@L zVv*{L$zht4MSJu=kCx|Zjg}B*x7WiXK#XbFe=*4`e7A*W~%hwAR#ayJq>!NFj~naqR6O=Wd75Kn%h zyx>vJ_eSQSJ--XIc~@GGLiabC;MY#uhvvPJ^5d1ga0JiwF$Z>zcRYTodYydgqbu-d zKb+ti`=+Bq$3<>$AMCD+>QqI|4^Tu=K=R7^4hV9oOnL)UdCNDbfbkW7_`ZspC98(w znRZk)NbU}Q?%~V1 zGBYt{0v4r^T(ALho1BPk#UcKWC!ZteKlavMEfF3=Tv2PDojHp;)R%3i%S~GIaX>Vj zltC0EXZR#H!O9xZg(;ixG1QBRXf8YD-Ba0lGG{jabDWgJfxv_=Dxv9bxy&?UiT5@ubn5ddnH>Qa&y*

kQ5kFPChap00-LJy}Ls_ z2vaMbL!yXssKDI*394cJ{gZ;$;-{J!9vrz(Xm|K*7uv|$Tx=qrq%?z_ZopLqPuh+GUQg^vbB*%|(Pfg&e zh>xe#^Po8E!jgZhAp{2vmq(HpwVKEWDyC(5X=Z#SWXJ17PsI!#epH5ErtCoFkoJIM zsI3EM*Pq9rlA$r6HLA8fZ<1NsLxYNQ0J~LF>UP6H%rlM!dk#^#*MPc;Qf19yLY^X4 ziV{}+kzik{YJTpKC|BZJCT=+0DC^We_c2qW7u+~TbDP4u%34vC(L>k483br92)6V1 z`FC~L9E2fDsq`?_u$30d!m~3|W-}ybMfHp+Z1Ih7O+3YIzDhBnemE#u*^ZMTeWXTF zdDfc@9ks|GQLhR9e&085(0pz;vdVVw^4|12?)&UHGZ8 zC2wwdZ7XrtEX&-@P5JuM?&pk7b`xHy%w`9JLiJh$%MZU$Cwv3j9$3_43r5Svh@+(GrsJ;eIOj!o<3rMWfjF z1}$SJ`VHKVNls)h<7-!S&1)uYFB6R?7KnHV>hG`B0yinKU(dX6g&-m*{TRwOb zL+yNZ;(p6Zw4x-rh(3T;4>oF zR;Bl0mp=)}fU*+aU=SD1YIc*T^h3~G62**C(`)AwQ{2<#QQyw!p~f~xiKl0p{OX>X z(E9B}uZ`jYOd4Z~ef0;$^*24{B{?1ugc&~5e*-$G75TetcOc{VQJDov%dHYy}HtKBDQKBe@}$CDnVW) zUUJjckvd`atx^Bb4?!KCjmXh!FPCfI@mk`Y3)eTQWC=Hoq{|*+q47R#Ddcp6NI{=| zNlfZ&2>|iN0ViU-hB%`4Oan1JN9-H1IN9J+eiF*~c-d;NlU~M?4(}7s)Qo7Ga^X!x zQZ@3au8r#@3lY?sdQH5m8avEf3ENiX%G7q=D}&8?*pQhr;B$5y);|h>iO$c+h!S2y zVLC@UF>@RPXnzqXBkhmKY`+I3J#Bo2J-cA5H-b322;Az5|7`qErtu0Kk1~C_xC^gX z59~r2>|TC#I$xLxY*yhp{hgh@Inr46QH%pB)X02b5&?C+#!GtKYDN?1_x18h$eGpS z?6U&;zJ~(dm^2;C4?hWM(zR<3ZC{)V6)Bs$R&$G3D$isZ6Q~JZL+s$9jUqwR+1ksz zTnRGP)MGw}b1GblG;W#l$pWh*q`)gaheGIe6uY~7`0BQ=+?PHa727t;&Ad>3VUk2^ zsp0_2(YFMtZiRj)|61gl5XK2anM;D8RKSt>&~zKyO6BbcwWvuHCvSOWu6?Imd->XT zJk+T<_~8ql6op{}ilX8%^*c^EkVny>8PW+wTSfN+2~)>e>IdE_he6^nM|}B_MScp= z^Wv*QQShthpD)ip?{N?BqfQ)ztQ4Ee`VZn7JB^6hBm5$Lih?ln%nEE?HpxmfNUb>v zbX=v$(HTOQg_Cu$o1W4|Vg@{13PhX@Yzk-WY6>#-6=ApX3#6pF19+W=_wSTD5l*Q{ zWBGiNdl0y;yuJ+1oS4CzkayEPD@gWfq6sd9;rPzUM-HgF5#>QzP+vg5qgJ*GV@a6;l!7ts-ageV z4p5J&hdMLIQ91~!5!e0`Z_U#sq$9%}9K~BDV){+@NL)5m$I4xqm!*}7*u(+%qdn)X z?5(EM0Rqc%dt5a(Fk`ZOVMzy3R_8n+aGoF8114iU4X6lVEVB?FgEe|QlmG*8ff|!O z4=Ghnee3`pAp9tC8<`g{iVk|wc{QP;3di3s(c!>EE5LY=)IvuWEX4;W4>A=O)*||4jD*owPh?uUVa>mgG;0lW<_o+l_2|_ej(++Pe!u{-ZN3IPI-uORfnhU zN_o~2NYP9{^B7j?`ys@rAE^JjIy7{~bB-9qMbN*uKpocc{&;iFl?#5Y%JO`gOWz}R zmbQw)8bm4T^5zFk^G}(cHvO||)mQ1KL2M=bU6{jP{WymN3aYYUUI+A&*eb&KLi19V z!n1xIo_L`40B2-gZEdAzbS{(hCgf3%Xcp@spC93&69yufA&$phKjq)QqcGiVK06d$rl&P8Sfw2GB-mE%uy7nc9igLcP{0O z2<5fmXp-j1YW>#Fon9y_^OJtAL|20&;YU_lPyTwgyqNQmve)IfB;6Y{J z0Bx)o%x@53pX-!QQj(Wx4ibqo@K|}m@7Edly*~(T4$lxpSU6c5>)~P?Me&Ys5swd+ z_A4^x{Ro8XW+?PhXwoaKxRplNq1cbdwz^#+=83i_V{?IwRqJOfXi_NJMo7`lSx_!| zk7Sat51)7}Q>gz@pB48tMcB$FYNGc^Eozcg9nhZqFyf}9>gfDeV9pPGNa5>u^H@b9 zodP&1tByG)Npk=jSC3MVzJv)m-kNIp*5HJv!cCv02sRw=C|Lk?Q(^j z=36Gac5%6!n9u7qCDspOStOO4cpYJ^t3hLMlzXkugCwh1L}r1uxb8<0pP;l-w%mIo zn=eW|T;DD?8Z8xH9-36F_j@N_bm?FZPTujC#0ph4aQkjEJhdx19|4fplhaonS*gRG z{QdJqTgTzTRNv*pP|qSSohUNb_mfA1w_0VlYgZQNDK( z@{B|gVu&Dq-zyMC6+g0zCuT8NYt-NGDkaa>awwkSrHq~-Gpc98nI1z6q;j_xZtX5= z-$CsqmrF3t%A*4N9%x_07SuR$AsmDzMl3h%`DZEZ5@k-}K&6v1S^2JCHLBO1M#Atu zjf|s%6#c@i4oDtB$NXUg{(KUWyHXX!cH3jmfPvApU=A;PtI}{mt&wC*7 zhNEPX0osa`F&+5LO_1ljmVP|B<}89FdYz8f{8s7%-=lfZ4hmC{+#q|83&zi^ZPW;n zZq~-682OoRd~FR`_=17xE&4=7kbO5=1J;>tuQQcA(D&02I7L zJy1V^M83vAo&ag_Avr4_hd(UPBxT8zq&A1Jsg3Q>YGh!JaB&!=&(dX8AK8}2&-1<^ z7b}$?{8+LT{l^I(smp$WMe8`kR*$Uj0;{`)W-ilm34oI-m=9=_m)ur^Xa1qdG zqR|!`*ZGo?EKN2e;7HbK9_+@DkBRqc@vJ8D3MHhRyb)OD{Uy8^WT=-Yl;yEZ!=K=i zUJ2&l3Bv|K`t;`TXuM+r0R_d zKsSP&o;Rd9Jdi`*?^K8SPd#=4RAbAHAE8!TZ>)Poq|8&7Yi42I_bAGn5^knOp;{6P z{#kir^bPm98Qxc`0@L_7iqeEqxoga81WwLposp_Zz^6P>jgU>GK%N(Pvqam1Dr6Lg z=C`SE;H4>ZP?qG5B2;jIZwJ>Hqw^_YFcAfVm?L>RY*TitkNB?y)9JMR7r7AY>&ufDW~=@zjuBR3aEf zrpA{frhWBe0dRDfFx8Oy9;OavLWz_h*HFCm{x0Ktiuk!00`CP}q zkNU5F^G3sl?oz#*NXzPl^8To(aKila{HpvI@Ac+B>0!XShtK=0Ztjo1D2mzhzDy|b ze8DfDkNtk~g~IDjlp+iv;^2=s20PLEjjPU*{vP z5NNd#a86AopkqW$ku(XOc{-Cv4{!y79ff$SX?0q&#{I19H37O=Nx|XH6t2YS6+M`x(v-cJzhev;K3l+eN zQu99j+Hv7k2@Z4t^u7{}ZN1q&gs&f~0vXCxC9~dNsZz&ab+FBw^3V|D9$*M~B7WA* zu;3+AkZvAH*GcAAem6;MgW!%rDN$`Vq*$5 zUadK>3H5nxlzbBi3{mWy4Ra!dn99U7!7AhMtx3*10QpDVbpLjZGAe!pMcL1P zH%6pf^q7w7z~@2$iS@%{v&i{AB(5@EL8Tt_w^OBhi0lsU?!)_h2*ykMx~7V%M>4li zu7nANkVDn#W7vl4ww8(AZX)KRWV`kdHQF3m=qtR7?$9c5WU7VTAFqPi!qm))R6i8% zyPB-3YE85J+e+ewj;RF_tMjHvn{G0UYY96V>-fbzoCm&C zwsu?!J>X=7KZYhvBKMj?t9(G%G+xZCfHyqL862()OW~be>MA&!I@u~BBz!FbNRRhN z%m+am9@uSg;N0fhY&u4hudS#0ZtycXPCD;IqtZ=^{07^BVc;mJVSAC!`h7^wQx_E6 z5ChF6dmc1Zf*4j#y1G6g&7`C&3uPvbt$af{?s<+Qk!5)|MKJ-&YF}7i+zh2c>xed~ah9%!#Eh(I(T_Ts@%OrRbZNOx9D^5<9D~aQsCv;= z9KiD?CCEy2}C1KN1WC+M%nJ+^H9&;9*p;}N?HEmvPHjY@qd z18bztPlcShs$|1Za|H11gjJ5OaWD5~zTL@)Dn2FEU3x8+triUibGDR4WmsO6}Ro>RZlt+xKQBHuL1{2`8ZtaGExK9;j z5_Eru>iF^SQ!nRIiRD7xQ=>b=d+z`unFwEbti0lxZF_uh5M6eTpk^9L zq^6=IWkSA_I@ct@tHPX)M`~kcOA?{lSis&UiQSrp<>4sjKi9DbqY-=cwv#I|{PN@v zzS~UZ%baopw;pXk5}_GGPc@>z%Z|SYhU-tDKg44Axsa1eLARC36ce7*@bp(Y5l z?Aspx3sr{wVR-+MEXJ}@CSLsamoVmPJ$chl6=3NC^ENVut>(IWEo8!GUlpRK9)H{m zvwT_64dEnL!-o(hgDg#= zKnBF9`X#vCA`y#Nbc$X1v5&l1UOJa+%7a3=6__h1&X-<7YK2LoknR(**WUEK>ln=0 zGInNnUuE(097=mtC2EgIn43?@qR|+?Nnz)6X^D_^{8p_oVn+5tc|Q6cgzobvQE7e^ zcJQQE>S67g@MgXUwZfF(@IB8+6%7x+~1ZpSR3)4x|5sfPoha(mB@SYQ{;@%{bvasz(rj{=S;#OaTO zn-N+Z(N!9#%%~{8ATt(e+ha{lq@3O5sY}T|FlYiuI@qY|8|pXCN(y|MoQXbPo}CK% z!rKx)NL94JDDstEJwvY!WD%H(JnX|M=fb zp8Yv1s{QUSSX8>q+W_a-i{k>!|I4Panjjb&s#Cf$h4ljxu00{ujxt%mF>ys7H1Lh$ z{A>KDFTXmKtZouL8&6hH6_|WApG=q-&KMbu*QiFdbVb0-Q>eXj4{ff0I}Y>H5Q7k z`XzGrEYA2PXofFc@CT*!hE_}3bU-n9g*QSE#&+8XkNL%HNGbWd7wfJ3_b>tW8mUKsvvEY1Ww>tC2Zfn@%q86~$J z1q}H$40s1#;sMFQ{b+tTK2;|1Qze)D%FO>pdDU;Ik>(8|djpsvOhl}`GRs`@snpsv zMXJ8%QB%l*u8}`9b##%4Djc~NkpD*Rha8X-9?b0~@b#alm2Yw;;3HO_>hB8E*p6TT z@U+H#Syd6wNDtr6py{6zxpwjg^Q)2Nx3ij&xmtZ<9eq2YI)mLvRN}pZASq&W;-x4@ zzbeCDdJ->WJx8`>H%@%@dY?S-meJH1Uq=S^aN57j#ifmz$2l!$V(%_w)C7w_S384d z;FLeWb?W^&FS=apG>oS_xkq90ZS}fW>O4hQY-=b1qg5C>7Q6n7&!SN~c$>lZ3A(e!W?fjqh3eWpW! z?|As%%E^1agFvv$z5}6-p5V7b0Z{e3{s@rsCf+U&rboeBf?idUft~VO#>#Vki`FhA z93i4{HYpAxX&4Mke35*hpyR2svv*_QC$5%zej@M7Fv!E2D0Fz0@LK<_#ID#nyn-Y{ zeVD&-cPur&`kED(zH_O)V|hxc8~1qX%+IQ1GtPL55BL!?TSReflnI21M|!!98KsN@ z=6jf8#JaB8oUTQ7K^s)Uv%!B!Ra9Ja#12fXEMY6oFPo1KYK%0{gOANrryCvoZ=pH- ziJj4kecuKnAr3#&;ksPp(DbCi*Ddw+6<(&yR-UlE)v5}z86KO$NxoK=ehK9?mIhcP zX$;-3Q3eF=)V0l>x>5pk?^5$T6k)GwC>SKE^O5B)U~))tZ^u>%HyNkm`FzICO(Jw7 zRih@SHL>x;6|eXaXUdbF;V26t^B{yGDfYy}{?6m&J5d|(4lXwTpwsq=nE(fv@gFqH zFM|?e+Ec1*cld=*CncsI^UA51PP&AF70x{7(WzDs>eyaH5nyK@+cu7lRpQ#Vk&P?}Tta8&vs^sTGF zD~p~bq4RfAHTXVoM07P1cmU3;at>jP!s$!6RTk800NNBs^75;O^+kVBAlDybfG9iu zL4KbdpKbPF)Ewb$Qv#Kx=#QttzoH_7)_=TUntj1!#m{Tl6z#q}8zo3(k zCr@O{8zoe{)pQe*wYlV9NaRUp-OaV*`X+t`}Da2uiyRZN5e1t zdTvsGdviKa`Y%9E_|KP#p1?1_C4GE4e@FTYi2DxlC}7|>Ua=?2lYZ>XJlPFWL#2g! z^$|`zUptfOKdXS&^yc&HkVu%%hY^HjT>Z10=mhw|7Sx-!VEZ=xWydjutcn_4d+kD2QO%A%r0ylD}^ZJV=1tkll^3(8b1fW5@^IT40 z6=PeO{k{_AHPVqUAK}31;oQYJ=)KmPlXlO&Qoi|`iA_GQK+qI}6Mj50sRo8{+n_VH>ivQmmT&suL_*DRkmrDQr#%15djv32R6HznBJZ0VMEX`{ z10Yq4c-ON>j<)CH7la&n;>bvv&3PysyNq}yAPQcv?JIj=Fj>;O6cF7Ur%WQ866EE_)?mpq z7xK=F7t1&O67sA=K8%7VS`hatJ`h#l^RpI6E@&1+Or{KfV!c0hE0PyUMGsRnoCG~BX_0N4nL3C??kczej8Nt9Jgih#;GzV8Vx>Ki|ZMu~Co8M)r+2Q6MM3-d4d zEvnZ*1ytWRJ5@q-g4{NR$0DUI=(d`XQ3@j;NNDF_>H}&bO64`O--aK1+F&UA^6dDn zu!Kjwn5gq&CW$u?KtbxLM^9urR=RI|=|XwYf6riHdM)NRn*7Ie<;63;9XPCE=CU23 zVh^jgvRD=gC-TMBZVt@{#2g#lM+|1XUK4i4@210H0-ew|%iP?HK03>LWhQw^RmQEA zjFs(uZd%cZ=9r)}!^3K#l5klBK_f_m#%y9T5=HsFXX3eSbd| zJxOHvLF{);X#LC|hxdtr$V^SR`iG6vT-i~fOVw(WbBkU^93J$NlGexo5JXrfnPz9)X(ciA!?~UR3?qK-fGr!vbmE ziyro*$p`psl>oWMNb5`~YH0YySJKRao#0P>+jG^iMEs|Ft0w$@^;16_W=ltogE$+w z=@;g$91E@Tr0bkLJ1965dKGlcb3wfrl+Av|l=l=#bu+^AFeXmHXE}~BIa?cHTSHwH z^oZ*yXHz5XlTEKomN75FVtWALjLF<&{6DvZ=J}!hN*{}4Ue%ol$t||b=<-Cs z75}6b5{ZFcpw}NOubEJf1RShJyPy?nmM_ek(2zoJtC|bJU>OSrXdL~&bkN%N>=27Y zr(ZM%F?kJ_XXd8LLRPg^?YDYT01NU5%Hr+j&FE8LRwYLnbVm;;XHQI&*AUo zs9ChQN5#;s%t-a#rvC%>iLvpyS56vIgC`@dVfdF>QS7~lMz~Cjuir|rQ8xOV zdLF&kTLH(e(N8V-Oih&r4?-EHgZ-2rb>=0KtGzetn&?_Hh#l2BVSP$^Ilx48?65)1ElwxOqwCC&h~#^i%W&}r(?6G1Q^9z#4_nW*}fE% zYx8F411eZ0p%AzH|hoE+tf}#0yU#KHq%~*&(Uw{`)@X&VpkpD(X_ zh_X};39b{ZB=_5QPJ!2SwNqkDd*WWfY84O>Rf%84jP!G`FL%pQ)y#cvglDw!Oet0G znV6oHa(l|2f5bc;S!p0b!R6*I!T!KAH0tX&r* zzn9?M?5NW}qDonl(?n&o0Et;(byt&i8?Ph0ClWaqm#||dmSz@}hw62fM$8G49%OoZ zq+Ihff6(+fld3^BqK8HD;H}8?>94Pok-B}n6WgO4KAifQiA)J*A^;;d0*Qzd_#;0} zcuoM(ys2W{%|gkS6R)oQm8`YRt2J1p6Z?4*_2Cw$l72;Kz3cRGZA@zdt0+?&8r2lT zJQcMpvubAEd&and^xpLHEpp;VStNdX6|TVrYJNnH>S7kU*hj-oCn>15G!ku|575$v z+E?`hZm0NYZ`qslT6w}_5U;9LM%t+GEVPxX99lu-a1vFOG;S9QCab zay3#&nry&Gq*;`O+q|W-1;-=o-*upT#r}33KUvQbv3Ec_tfSoeL2m+%`2kZOc z$%kHTp_yg)Mb6aS3Bq_;>w3f&^*uM$_F<-ZsHEkFJJ%1UECik3e(gI*fq6Aw(m`4K zd)t;5@Q#nEibP7&7wKi`(G$uWt1QQ06sYLO>k}5u6|*=ckXzyW-}ATWfiw?JNfyxs z<}Nd1v!0UBG7K0G>2PBs{bIn5Wc!*zg&r+Uc<#8iGBiUC#ZSKQ8+9sWFZwem9|5s@ zF;f8TJmBmVOZvX~GakgLC3R5pTkjrHJv&N6_r}WHVyv%}5L`G?N#67ISlnY1&zfL( zb*-yW!ch(9!c{J>Qyw;KmB3O6W(O4Td6_PkJYnL==RG4#S{{%+W>}gU2joSC90>?{ z*O%X*xZ%NWLd_3B8lg`cRGo85mc%G}z?&gOE-EF-eM#MlkRSBj>zVjQ=}mGvdm&j@ zLO2V9UL=8O2jPOoIBWO^!FiR%c<}P{%V5u=QG3k8KI=h@DL*(N<>MEA1LzKC z%s�sJI`GZStTGp4AlKy&<-bt?Ro{&d27aJaQGa@P<5IUiU>Rb}+H%wSrdLBJg-S zqEL&%DNHlya3nX4Qi435QN(Q)NFv`gsI^kF7a9RUv;cgwANq(Geb)JgXZ7PS)xfG; z1CWU<{mXhlg<%_@1^p5COVX`1R^JY4B9z%5tEaLxJu6(mHm2b>GhSw9XO>^^EFm-X ze@)W`IT2C=3Pw=O%=!5$Zo8Lp(zPjGnZqU4gaVFTDP!KFY0xYGJqG^hZJ|4FM!iah zgv-a$&8hE0po0Q#e49#9eZv&UqN6Nji2M^|5afQMy6VKr-1AH)h%c-3k@O~0 z^JLaOv;8@iy~~g!NE?g@aDk~9X@>5W1o?QBQ(G( zIzh4#Uh+KHBd{;^o_)y5&myED0~l(*052*BMU0!Lrx2CRvTy3(WTB$zBEPzm4)1wg zx`gVC_$0KiM3mF=2D75RRw@++LCHe@GbOR`E)I!$&2$9JOc{|o>2S|8HzM(w) zm*E3HMIJMZrxUChy+u^P}A zmP6P8Vy@<(NGlmMW&d7&nR+^TCw_1g-Xx*bJ^Yq9T%$iEgU%`wrX?Kn2LWeMmMZqT zV*&3Ce;qS=XZT>Kp64gO{^}>*EBvRSc&Sbu%9}^DOlqMpD6^Nnmc7)vgC4`fH;57S zz`xhO)i1rZgF(h}V|?Pv%A;CMT5nG0&ePG3$Gu-#-{~aO6Hz;h<%&0hGa%eH17Ddj zFk?i6p^FMU;u1gj`~@l!dOG(?ICbUl4Z%Tkjx)YNvC~h)`n`YhhJ1PthJQSX0N;gd zEOeuUHpNy3_m(?;+G~oI0iW1uB4iggG zhf{qX?<-xH=Dbi>FoviXolOs<<2Je+c|MPNHx^07nHZCCo zw%nafWr$sR%K1{R@XQFVL{{uXBms&|mL6jb_rfZ# zW|88rH(Ve7{cZQ#+ zPsiIQpP#(#-Ntc#KuDPHA%OQ7f!Y=e7{w5pV!C)DJvYumReCmp0Ip=eVFKA}3_=;+ z_ZuKc`s21Sb8AG;kVNETMLyUGNo+Srk95yH!ebBvGROl;bt;G%K5)mk-4;NE>alIlh`BKj#f&9X{P&Ng? zlYG8@@WTg``S2oaqtxv+7{$gNWn9UqXwC!LgF?^E?#7eP;+;S)z3oSja}q++mLabu zb>Uf1LV6Pg!H&~&=&fAh84z6vEE)RE1O!pL!2L}uin1>oQHJzy(*ZzL@dn48riOes z8Q;lEDT^EdkwWPy<>WKPbpBX_Yla^T0~oAD85b$a==tOL%Rz@dkzY+t;l&`zx)+b- zrP8L=V)H}q(gX^RcY#c({_f4(QGC8zLeldaw=M8W3$$;9>LDeKK!+kt7L2>bXe6hPM`i5AJfX%9#^Z&_V^X90!1Ulgvt)n>Cp=$+jNB%4~UdY~E{l zzI@yFkwf)_newE=WPKMMX>MhgfC;3Me^C-zTk-C)nx;3dTR~`43irBA;OkHeBCi{V z>F26T`X1qRk?O%VH(oNh@zI{^1}yOKD&o>RmgI`$$#8Rt#}piBQaHF$2H?`}@SDJA zs34>wT*1*e9W~Q1&ezDd&*1$=#lhfj@v04AsYf)tZ*c5BfhJz-OJ&AujTY530IHb? zr;sQf0=D3*iL8k;`}J#8bWBk%e#z728(ykfhV*o`AFn!!g`_ggR;U&2*AN(a-rKZ^ zM+Cv2UhsE^XzTCzC_#bQXZIXD!apE3PIZ(~=fRBHn?Rp>cFL5~L(9l+q40?76oF8Y&GQ>WG8354|f>Wf!yIxnsM-$is&auiFIHl&&admg(Tk(L^ zs1?^n@M44>S42!%^o#KI0Q+lbn-N_?6mR5`&Xy&UC`v1Qs%d1UIuV&ze+ggPMpF)~ zR((YQt2(7$1@Rz(4;w8Np>ez($)l6B3ilohx5#8&3Hz=8XdmMJdGvE78+|?3NI898 zyo=<6<$jZE8q_h$yZA!+=6S_pnT;#3`%ye_bAEC1a}Q`m7*C*;P*B|sG|ntx&{Y@6 z_^KMAhQ%_s=yT6)>Oz#a}-Dp8=P7a5_A#GGFKT) z0jFcY-JV)+KWU$x z;P^SBeLtOrx8Dkg4=ZzCm8f1cjbaTBEmsoJp?$0h;N-7FRtAaf6LW(vI>g z{7?lw>yVoa34;>CI!MsBgGHZx!XJkBpDB-}Y6CFAR1%*RUkKJ_^dP2*7WW>F z;=KVcJyu@KQ2;5!vhvZR4p;$EoGGt|=V<}wyq^Nf7)nZw;gqfJ*{alLHQv3nk~D|c zeYX1oqaN`8zsan4IbFoB{Ro5tCN{kqyaeC_S!iIq^d>C<&@<3!XmZ(QMh4mO{P#<^ zsL@v_m=S9-kawYpBdiPLMBxH#bl@RHd4U-m3YP|m(mkQr@&W* z$>Dt;qmWfzAH34nj~$j?Z9nFtJ%exe_(*g1AaUxej-;1>YA$Hub^~-_p2a-L_u%5A zp46M>4mCOrL)L>*6uBqOnN7$-Wie--_fO-BC-XC48k8SS;fFA53IHj?_-)S<-Ihp6 zB~&)s4Yx!KT>}luCz5=AoXO{%v#znxNu~U4rx3oC2n)zo;DnD8zVo{0yg#_ZSqb5* zL%p}1#Vk0fXuV->KwijI-#gY7Ycu^^Dld30Rc_KR@KiHjX(}{kc*L4`2(b7-Xe*gUPsO$)>O-!5nU;o-ZS8WUvAq#W~4H=vS8GT2n&t%uu1 zslDa3z~PQ|twe*kpbEuk&9@18!PoC(jFY4~jLN%2?5KBX3Xv(dJP@Y`0!5zu-fP2i z8n-(YQnEBRdhnY~;?D5?%O$L82_~hn<+2gkfYuiTi1|ExkiX6^$V(CiGyQ(pZV!kO zd2KW*tfR}Bk5jo_uE)wfRKp@K%sy)CYBoWf!5d3xYv6O(Eh@BkbbTkVLq$TPft->F z5tU{XZDp61IwEdJ`DE4a-oyenk)Nb&OW*{P z80!XUcg=#lCQ7I5`FaehiK)e9vxoV;Qp3djnZtufJ!%SxQ`BMW7}l5wC;-nFMCn+D zAoRaf#O!+~Zwy29W2B{C#_j{0N8K$wEId1}Uo5YAZxQyuU>pup6ng4;4_vD~mZcH^ zN0EzxV}HLq=bK2JXo`n!4lwQ;N6I?kVS`jy-T`L&>c@VEU^@bl83R*WMD{sjTSr2(wVIB(f0e^du zq{{SfvPd3=aKS`R5tsAjs@DSir3s9=NepEef}|e}Ja3-Q@%&|Q4deixy*)_>s6W+nR;k7bfHx~sr4r$+5@^& zBDz%7vFWPh3Bq-EylbO=!+z$1TK$G0Sq%x5d-u;ZtnV=NOUe{!^|W|0L4M~o7F_s| z?-6pc5ZK+l5_yX-JFw-{NU+I>?8y!1v!XYWvW&tp|w0jmVF|(txeuC`tmRktZu;PJyNaeH^w24(D(9q1EU49j^*D zTv|d@AdK`atSfqqXO^AZm*Ax)h$t3kUesyCMSbx?(OuiFz7ZG1%f&~_3qAtiDhSU@ z19A#ai@A*iUVJb>uRG_0EN=U^8Uy-{jKQz6d8NZamN zuaI&w0@#rcO|BP@7EnduQvQh0LWPSWkW!SwGl8byvkF=M5&HavPWS`A=OG=X-}i%R zoq)aDJ~kK1x4d|@u=S~PS-Wf|El}wRI`V^VORBY9k-H53m;jNH8)UBg>2i-9N4-`x zJ4L<_t?g|GfDzi7QTFtc$3F79{qTq7hxUxr_Nkv?)cjoWOYqc0kO}x3{=p+gPbV!P z{TC`)d=%EZFp==Ifq;xM4#Rl=!QzZkHt`f@SrRc;K88mByz(7mF`WUZ(T)+#t@R>8WA+kZ_IlX^oWwee;#DQ3a%0|qkI-r9PB6(NcPL^ zP~-p|2?_AxUFGdK)r1l&4A8e%H8~XH4k0{fHcuc`Mn7`;zxuIvQqvwqODDG3r_$=0 zncK3!h6*owZ81t~M$zxQP;w}Jt!e#=5O=RV!Ffg{Se&lD4Si#1j6U@mwhF7v}Ukl%?(VMwYNZxF;hXkQjz^Ou4$jd>1AXIN6 z1@#Lvm$`Dqe@%`cbV3aEQZ!ru4_;U(y*wY%2SW=} zfvZ7K3Kg+PIx(w3YJ`XYs{WQ&lia9;359n|PUkYrOy&b-Q4VuiYc%XhsK!|Pu_ayp z<-zfOOPJ_`O?TD1vL%_M36;Eb9bIocTofGa5FS?}AUsqSy}#q7eW6TwYM@!?eGQU# zEs)jkmWvmC6d~fO6{5%a^ns=mIFuQ?!~4YXgEjaSFPA5LDKQrG!A`Gv^;v|j+*u&M zv>GO@N9VfD%D!A8WAohk^0X)3**U-Ux~hHZ(cmZx`p+q>M(9lLiD2w>Ew3U?whn5( z1Adsjux3>8VkvnU%VNyJ@0!L}P(%1U?6Jc`D6e~+m$N2FH}9s`iHFV;VYZ=j17GzI z+6Jvy(%vA#XG1~YI`u^KGXdW&T)5~9Un%D=`1z{efz^^b00g+vb{@JUPS0@Dbv&8) zsh?6c(EUarpe!|>M=5GD&ixjZK9luk61bAxY9aBxMo-uzEe2~L3+uD|YUIVa_Ts55 zO%zod-p3|M{1Tx@wY!f!OlkHrsc}#40rH{Lns_Iy#HWFQ9yXFJ!5CG{F?>qQ46zaQ z8LB8a`N(aHk#34tAWE)M*D^JLg2a??xDlydf-7LC-mi)^M+C$K1rq&>Dn1Ru-%HZt z?3C#bD*s%9o(|l@$gJBS2n8$6i#10Tn;Cytl{#vo5ZG<@Eoiv06;7**Ud%e=VS^2m zT##+eD0iuL-m~7~`E=hur1;QhJ|EZ|Jm~H8a;WXo+ZN1=T{y+FxlrH%GMYRE4A_jd z29k&j6gWJL?G%*}O6IpNiW+QMBGK!%w_x>l@nxW$^|(>al!23m8E_y_uFk1Ig{it0sDa*+Vpb+Nf=L zV&Z7?YDnPU)$+HJthE$H&IxfoQ&DjAzx53&|SDDji(kEMa33_?8U^Dm>B;Q36>h3P9jpE<6#5$CNdQm(7=P z4_Mmv#vPAHhIJCyE9Hzwjy%I7{E0u}0D>7t{VgSLW%Dxm?gUQ%-pjCn?w~9?zlHvn z-}SO%)YP`PzxI#ol=t-CEM`c!>SsRa`v6Y2m3dAT-}KB@0J)NOz~JH-Vz8ev(TZJ< zR@&=@ZaONS=$G9nH*#|jHdO`YvPL)(13&Iz1JHiMbKg|~oc*IF>0oj1HhN9(I?_2G z+E-b1)qlXS&qbDc78(Yj#c!Rm_Ha8#(Af?SB`B|n1Y1YdYkYe&Dae>osYSURa%bc_ zYfO-L2PlLeAt)0CKKZh57DhrM37<0X|2X`eCr=fL?|R*c1oYPwev7m*lr}ZTOl=8& znP3yYl|9)fOf)t1g$y*?BwRg^DYUs`kNkN5F1#cF+C~a^G`;eDzX78XlnO?bWFQ!` zda``YWnej&cNR0djQ|t7GM&suJtvQW!z7!-bB3#KTl;eacuR$YM!L6N zSKV2mQV({ORpvzzY(lxXG_oofKEVO%2oMgWA&LmM^wEseGSS9mNy=lDA^__&ZvcNUbKuYC1WCx@vq^b1x@$AdK6Pj zOJ!xzgo88~6;aBt?cO4#bi8b94Qj^4r5CM#v^U5!pwc9B!GG0r;Z5p{+bCyutvu-m zqg=+y5d1`tyYCVEAM_75GS1^`^Es`!|LZ@oK1qSA97Do1l72x$y}y&hi;s=Z1_o7; z@3E$^DK=zPx`a{t38)!6VX-byfqSsyp9tM8QP@M%2JV)#K8X5s$rMODa#H6rbAGMq zAqvt|p`u*NIh?EE-bQ!=aQ)b`?bUWeP>u&gUC!QRbF`K6tcga`ItViZ8MU^7C<(qU zbev9@zIvI~e^Wg#8rrbB>ctW$9A__ltfHwjrwF9Z+m>05?L$j3k~XuHS15U4PLSN4 zM!Wp#C-mM%$^2ED4Y42R8Kp_i_E^Wq%e5rm2+uW(+o<(_3%-P>^1R$pf^ z$bdB(-F4%NZ2bg+5dFxFvgs8862M&oDBvAex{d$7wU=_B<^qE( z%4|fx&P~yEa_I##2|<3h(>Rk(e-fdS)oazvQTQt@nL_s=EiRLQ8JX@ zLv~B~=Fu=DFXQv9YSN;j#g>0C&k+`yqC9?sC;e^TBjWPzi3dLNi06QrUwsbIg+{<>7n+oOa%#5prhEsU zMhxwKg3DBI&ibv?5{bx{O~l&uD1bZxj8-Q1c1+~bO?O!mX*?bqzVyTN)jLwBT%N!~-` zZAOrjPd~d5NfC;vimX2cP#y$|D$&TRGnhcOjaa;;#S`V5UNwxkgPVW<|C_@8*HnHP zJp9k2>todh1t3NOZ`dK=q92=$DppPKtmY~Vq}81%kQDy#8H7ZpfyuYuPx78!xI`=1 zB@cjA{_4kO8p4FisokN{e*Q>J&oJvdds!jYZA?FvdC8kmaG#%#!{aCtO&6)0TE;Gg zlvpCzH`-DW>q=~g=}>Vx{u|7q?@^3V(q(4epY92P4raxVmQaHIM#V8??e%&AzE(!z36q_15D`X-i~p6Wo$LiciN27q zxz&(wbfgN2i>Xa5FF20p4`dWR`NN~TIM8A^U>K;{9-Lxerj~r#x^fE(n|{ped{7Z6BbqB~M>*GAL>u z4Pqv2yDd^j0^n9f3<)diC0!Q0+++X1k^cu%@+16%^nD~;0o1Y}O(x$48$XPO<2awl znfB5v+%&kHoM9B*@{JS-R!*~rxxHJW7L4NE)P>Dl=1fK#Tr6iVeACM!X!%wk;|kQy ziUJbdTXD*%#5i%;W+e1^N8tesjx3^Nq?nDQ1HU``@Z^_%!!prUyMYiB8A!vr54OvC#(M(DBMIgek}JJ=O44ZD?MM_sKW#dE{T{JgW}HUIfidByLp1cvFg zRx7l{E)dQpI55P66y%7|TT`wRd*;VoERTCERj35x#*Qh$I{zQu-tD>0DoOkNCnjQM zu4a{ivP}VwiMgXN-CNR@Es!OlE!)sQ!`8``trI%cW=M6o!XWqNN{J*99Xm0kRb|MVS1x~96LaQRuGig<1Ato4>38mPj zNs?iER5^3~mRDhjCfVEYUfrTzcfPS#+phX|ygfu;Azi@h%w)t=Ls{;IQm@(}&gO+| z4y$htZ)YT!dpN5m*4s6c*Uz4y*3iS=2=2Pyl+pX1boEdK2IP1ZcHD?;lQAc8zd$_j zY{mL&i~o-c=psIzB_>1YbUd7vABoKqP$jZ>%+u@N;(`?o#cz7@#N*|}s~$V~3koBr zMUOTCV4~1__&xZ@CHRF4s<4LH9m#p#S?YpkB?cC~ReIC9NBu?xq8g}6<6ZaIrfW*1 zKM8tB8rx@HfZgWu?}%L1D024IWV%w4{VebW9NgwvpO716WiZgeth}+*FsQ*!^W?sf zN7jVuPCWfo`N`vPObLQ)?KXkCqx(%Zt>P`Q;p0-@HEY}JwKQMmXJd49FDjW>M;o{D zh(|GBdxc~_kxeuCIIHV1t7Q?%!-Ez7&SI8Y>MOPWk!E?N*RD@p&rGPNKTFymyc2>e;8x z_#v26m?H}Gr=Switu?)}(7hy6Mv9_Cy9*!I-V`m-LI^nr^+QS8pnOyXC_q9TzTG;NAn%47ro5n zgV}|hdhGDy(1-a&Zav%X#}L9|(JW&kZd@r*PWN&NXN1+Ir;%sahr1mf6X$Gd!oMNk zGoe8->ZdMFF8K`BFfoXK1>OCPr?J9+?hVjOJQ!>lsy&DIrEnu9k1P_1loE4Lp%C(~C7DZ#~F0D6q-`OaUZ; zb%juFSb!*f0`-BE%Az_X3hPs~p0(;9-;UXr?d0wwQ@6JYooh=T~2td4itsLS{-ICX*m&$ zQ1S{_@C*FmaSJIH%KDD@+m1>Rc#PBHRtWF#VsTBkH_fZ%MgQh>Id$rkpBx@;xG+l@ z02#BPnygT$Mg;tb9IT`_qU=U;i&WS4k3PiH**Fe-SxaS^RumFN=nWsj`L}S93QWR_ zf2vn>*s&C%5k)qCspJ(ub4p(P-iGHyA}*W?F$4VZ2^vQ4hjtp>JzM@R7jhE0N+5|e zp2Gvxf`4`_XveZb{~bRxX{Fu)rBen{KK*A2wtd0qCK7yDAV z>E*=63D`kxEaN)C*kzhw-+|L1&whjKw!Hvk6$Ic6-}7%T%EQ0lpX9_1!Z$+{_CTvG zh0LqQTH}D%{G5ZUhs#!g%8J4+Bki3sNd8M8!LMkt#ed!%{npQ7sehrhi_G1PBq+nc zJB+ly&c6z6;O~Rd)@11bmTPC_yWH`i6OUz%!e|~iKzDOmXE~2q%;b{FB@8@P_z8DH zsJTXIs2YeccIYr`X+yI`Pi=+{<-!~1u3JxNwlbt|4bNeh{J- zVy!-`iobeR7iRl7j1o4lwlaoU(%>8}gRpx>Z=?wgK}y0Eft?QM`OHt*u8AOP!2qfB zP8ghI<%Ga1-V^p&ZBrkH6^InYmA>+PiIHI5e&mNw;H2Mdn*;xP^d}Ncz3U^I#5^46 zpJH2(@2K3=nkEX;B$jh&T+7u@fJrj?#QQTdS0+REjN>ZmuKhbJS>QVLyx68VZ4Q`p ziiV$iADUcx@nQrOPl+@3!q)IV^z@&dHVFq+IB#Plf7| z23+@QCNc|r;&JWaW|j3c7~UCuDNZ#-RYqTXfb&f*lVbPQ46`g`(m=a}=in(*RtCF_ zo-KYoM#!HOB)fkE8Pl>QzWv5(h8(!TYGGjyPbU9Lx#1J>a$8?YifXX5*qfP)n?n6o zBFp+0IX2;`{Rxm`x7CswXht!=4FSnhI!aI(jmp`*h5XBPYS8*ZP!*mJ|wlce=gm5n9s+$ ziGaL$P*1*8&b)NSlKpZy`I!HFZ9nZ|Id@cyvJ`efYX7k zR`(Q@88>F|OT+>7H+03R{+u$P{CB;DvM@XD-(sP^P|l?TSwmPG*w4Hs#`DhT{Shg* z?{aAGd(48~`HX+${@)+2H#0&buV(m{iHtZAH3iyV!5=1i5O)l^1Q~3ZCLJX!m{p{M z1JaXH@12;x)L$e-+?(<~A*mYyA9H={j0M6wx_hStXF``1xwbLhMT|A-IS&56b1=PU zh%lAoHulo5Ca(Ty`AOab%ajGF4D9?0yP{fnVVq%FuQgR%)VSq-O;sIzonN|%kf{>C zWrN^IvpC#?O>mIKrw(3b;sMU7*AuH93P?L_DVG))M#+e05^=9(98zb4PjK4e_)>{? zBZL<|v50cWkcAU`dCVAYfi*~yM{!jQS2C6?WzZ0ib;*iJ?&(fVl*xQD=*Q-=Fq5!h z#0fvZLc@j$vq$$CkfrNN)EjNwMMcp@{^%8$RaAY!f~QGFIxd!Ylk7tt-XbKQB@b%> zW$K5ht<+TC55W{~gXfivg%y~TKzc*5rAc)gZ&KWsK(=H_ryy0-Qtqtp4Hu8JjMVJD zT4T+hAlX1Mn1C%vs2T>zlK@jjBX)_{RxRPlkZlc=6^2?8Qe>)x4O5Yux};tGhjvY` zz??gbFeU&X%B>MO%7q-f;sg4FxSGwOzx==B7|oymLaPrEd&oO#6788*+IItlEunOi zCx(f^O;`If4#U(wQy#OreI?Opj&!t>mW!^eh=@pD=m4+)1W}%EHq~^b-NTBLdpi|+ z^&Zyt_y6E6RS#nY(XBO+@g3>~XGYcarmQ~PzPCqTrQ$#OOz;k+W=5aKwW z(;I7>>1NPn3760x;4u|=KWuDlEGk+9(v#A|U)}+U_t~UsT7XW1X-SLo2!g=!iVf_qIZT_?tMTCr9TS~VC^RH_LLnsre$R}SnSxy zU6~cNKJl6|sO@0u5L>+FIb+zo1z#Ya#xFN^H=4W>697|j?~~a}YT;JGoq*KqDh?A~ zFCN<-aZyTFc)ltF;L)hi<9q%=8aLfP_3NXd^Lw=HdD9DrT|gyqKZyGF&B38IwMr29 zqXM`x(~3@a^r3-Qx*A7TG>Y=tFpg8yVuD4Qi@=6Z)uriQDN@OrOt+Zf;jSo~fDr*F zJXv0PDFwY&%mTXbu+`@)7QL1c4=Vs}XHaLko>6Y`D?&P<;{@gpmtDb>I={#MQNyQW z|5#wQcHm%bZidkXQoa&=uN7vc94vpB1}`%9!#PB+u4&e#57)rMXNW>L{2jG(pQEpl zuV{|xx1-gyi%0IRp@s6J5P+B6)^=V_8xo;yjTB9908A_Mk5+oz+$6To8A!$ zez8x_mH9b;8&QrMM0awk2>cgjahL@!z_p=StVGNe+mTXa0*ySI-eh^U5e2c_Kk?A0 zj*dq2Pxv)t0(xz`?8mHfLvDKnEZ-2ycauP+GOE&tv0l{=xfd@naq5xd|Q_fl> zqyuqOx8snLbrjbaRP*E<&H=E-B26{P2s02Fym$cM5nNY^UH~j_68eW%g%%T@wU;!v zrEIXO>MIv#X+M;j2rpqnPN$cZBB>9*?nRiw-0tedgn&IQ5%8EWA)oYxIQM4>XbK`W zekH4>;Canth`PVmk_RJrhnjhf38MP1!BslUI(b_Ey+b0J5RRJrs2;=3VUSSH*O;p( z2Z6Ee$9tQce+IJ|f&Uthtxbnk{}&sscYg%o{dp5K|JH9qF($~^dCPYKRY#YRbCqo% zY6Q*SoGEz9UgR14{oJoM+cs^nkFsd4SR^aElXQlf1 zXbujywdafFC!RhfXX64V*iC9!P91DHQ@}+chbrLjt(M&el72nwUcg*Bmbdsis_zL8 z1vM+3uI*MWya!NZysl0^d+Yb~LuLFL(=#fSCcUC7$lJH%m0u<{_-ZH{|J@aaSU zf?p*<*EaLabQWO)5dtIcjicnlUQcoD2+h+%m|(7gg8lR8V^4CxQq|N-3pVCrd!d5= z2<-B2XJ^X9q9u8^UM2n&hdeVK_W=P8uEs6>;Qzs3%qb4b9dv@fC=$(PIN$GlZ?AGelqR*s2dg$;pK zJ(5MVH&>m1By)BSw8=kC^=Tt8*W9+{A^!0~iG(seoj<)%PDkk<)tbWI3F}aFVOH4> z?gCBUc|tSPkV|DH4zxh|&%L*^prNE71b>}09@K$fEWf}_XfalIu{@U_$n@B#J%LST zg0IJ*zk;Tj54m}>+_2@jn%-aHrCm*e#WrokZf28j#n5(>XVVh3gMK+UI?@=-~ZA|^NFMZQ9oea z&#U%CWEskBt4BEX!YrKUur0;0BJddIi`4gCxoH&(h|Pqkb`qBqkX(3(%LI?|lN;MN zq`V^=kw+`n9ePiG26a}ib8j$LW!$M9A&UYNp)H2P7EV_+^uw@T6IsotFT+NZS(&0x zyGCCec>ABFtxu42xx>p=@MI`7sOribeUI(w=8{EyLwb`U_SUo2wbZZQH6{sVsYa>s zn!D|ngA$DH`QzBhs>zhPfR8u)U`4Xp=*SWM0~Fi%W)y|)@UWmiyy@A`s6$J+L{AXE z4~9xATQ&6ZyN60vP?oR%f*J(KS<^BN5BO>V0o-{_roFPeAeQYnJFinOi^w8~r3gmn zr)x|i76#-rLk()SG0fZ3o5;c(hfK!{p^bq%jZI?B zfy#cDa5~HP(dZkWLRpq&D|k=1+SuiFhx0*#pVw|C302YJZNLB3SJObFCK!1-UijKB z&F>kbS^g1^P(+7jXx~!u6t(hROJ0DXzuino7L*bja(!{rLM_lK4N-mC;{xFh>`dxv z^p)VJ_eQ_>2e@}5mRLy_8HKy|{jbyr@EroDpLxVp>x@3N+*8+*FMS#ogS@dw7Z}}r zl zKwejvJE*I@z06vAKx^tIH*22zy;ds9Mb|Y!sMkw~O{nrbDrn);fSFxks9Yp62sfKh z3Q24i!p@<6`Wp!~DHp!B1Ig!eS=4O{;-*M~4IEi}udbt(!kkxNYBi$Fcm?~1yZTh( zQZv>3>>}@87U8KkBQ%6(tRNfFqWquNbAfvg#N4}ub_j@3&EQ}5=fw9opETmGIU@e3 zzsL!1@LaAylx?_?V2RidzxTq!`v7U1>&=Ef0KQf@MCCF+u&_Q-{?-r2Ta6su0br-F zhpTN__>);xB(((?LTJ)jGyr{qzDfaS<*)(#l8L6~BMDgQg6v;iD*Y+{d7)p<`4Ny- zhI@OdDJN!@%D}UPA;F?RSNfmF2*rapY0vS=kN;hM{8!!IcO&(Uk!KR_DFC=facLq4ug4ko_z z=SN@Ugl93puVmEc?8)+w=Wu~4T3prs@xceQ2g+V0F#1VKnYb9v3Hq}P!zGh0+Nde_ zYk?v+Rqm|QR51f5fC5Ns;ZgL-{P%2RibRoR%)?NCDKi*6an;e4E3kfY@-M(w^6;^? z`E3AsM3XkljRrE{(8?e1$BFiHHHX7WczH8z6w-yC$R)&t*d|5P8>T=9Lb|-O6FLXk z_#-XGA|4#k_Iiz5OmCI_Fig0tz+AlH@&bE@xl5EDc*w6KWCii1KFG&#BExJ1=4Y0y z85N7g+2>}<<*)pG=pJdy!nqC%vo_Z|Rz83tQGU%MC$s7Kr% zH?hN8-UYgLBZ}*D;0Z?xs>0R`B>wVxI6#T`;jzX^o5V?dukaQhrV;|sbS#a?iSHU5@GgMpsnSX@=OAVqX zSdYgD1KsLJ5g3sMRfX##e`onnP2EBg7ZztOk)GT4<2T6M^Zco#o*6Ae9Ml?h@Pi%@ zV1&HlmkA9JNLTF{f96()!KB#h=4}(#8Ona6&G06-U)gg0kiSUtUpnZyKe5lJP^u^TPkbcJDa@``dVX?|8;Nd@RuBcb#@E|)Fh|U!Q z#oxNU96tf3K-l&gY(O@`a?CX#(AHoCkenlIL6~ON8_{Fx{?6q5g$tUQW%PP#F09^D zeiNDlw=N36ZkDi)S#_eWmQS`yVYRGHf<}x5grKdt=x6aqk;>< zPW+dc8tN>E+`XH|{1S&49qcJQpEI4AEb}?Ei7q(omxHSU%Lg#tBi!cAC4V8e0}6jj zJ-P-O;FTn!Ejl?)s)wt^OEB&5hCjBuU+!1}up8*ng7js0PBDi@*ba8i6Lc&S4Ym6K zlz!~muo65?jNdcnd*1kZmLm)B0(jzDdQHVqlbJEirrqB1S<5ty-;HPWjD{{KAj`! zG+=)r)i{c6F}XbyAT!zvD0%=uIp@`XnWkzUGxC&F6$)}ls(ln-d2t10$F~WB+%M!H zYEF_rTY(K^&cE&i`BN{U%=)ZWzAAm7V7y&C2wGizmt%K2_gNU{I_F^%*Cquzep%#= z;L~9K;^JiA%PBalLOr3=L?KvN_c0UDNCN{ zmag4{rrTn5cdkZG`G(Ouq{U)`KegiDU`m&;%L3`YWcZ$jms#_6mff^ zA#&xT;%rBp0piF`fMK%Z(kF&!vP}~pMCVGxf=P`Di4+NP8yp~+1--Ho(Q7qB*a(vk z=MOk(+ZzfhrE6BLub>ioR<&wt*Sy}c$f*q0l*Zy!)8&;HBI8ynMic51P=p%vF7RpG zq$aN#9B&s`I?M*5sUWtQ?j-=?=$+Bq58nCy??>NSiWeazfvl!F{_y+1r^Hn*`%-z{ zBgYP+-dMQ&dJop!~T@H}fjU2VLx2%RzFw zp~D^q7)C?(@g#zwd(v-1V_>WiaLQ8t6&jT|Een#N z^$vsgnf!CYg#;=&aSdeK2MT(<|M-jYy1y({1*(gQwZHX&YU6!sG-KAN}3I7hsf)8dGYEn(vlQKGnx82(nEs-bpyiPw%H-|^ zZqWl!_E9>IHjyk?bYYtJsmp+A*CYZ8!d~$4s)cFWB)H``8a^zF8m?_B{E$mIg+1z= z88vZ&PY@M5@>2?^c+`;7nQmZuHK6Q0#m-4v5d`A3vKN@sRM`xc5&M%7;a!5+J8|+S zD=Qak=pellf#M&CfBV+xzK=w|T(SVN-^*S&c~q;18T?kgyiYt4b=lbpvx|_Ws!Cl6 zj}dG3T7bJMF3{@%Ozwot}`6bDNlMy`o`}UIZPQ9`SbiD)1r6Bpu*GVcdbNtvz5e58M zLabybe&_Y239~;YgEj(?7sjmx{JFUjCE5go4{tOUpMi6U+3Bkuxqp8 zB&K*21manWupERrN9?BJ*&YprwQG#&O+x|l*4j)s|7w2?20iN)XJE)b^duO@4za>i!0~$i zAT9KIxxNwTUMGRO%5k+w*O9$ z5)WM*biDBx*`NzL9W?z6F_fv^6B&Hx$sjE_qd5kS#sQSo5Q2ATWv1?L0Nji%1oFQ zl=EB4&OH{GSk;RZgH3B}p=6eW=(ln3WsyX@g_s8I7ba&trCa6d!Bzxta4VZ~8)REh zAk?#)5mq@h;V+6!xipzSfdVagc32z@7K5hezGL`aiCYD1##=c#hF1Mch024}KH1}% zwjdCUiN?qNw0ycSUa38qHNvJSDb{fp{aM_pjnJWZB%rnjjg%piUEngjd%R$_#sjVC zs?FP|U^;)nho$!31&c|3^>bx07l)`$K-Zlq>_M^OYjIa2q8qiMx!0V#g(DgWJux2G zYJ-6vd^`#PnvsVW-<3t?dd)|jfYP0u&YcuiCg5L0aQ0$cOwN3U!dDVkOvo3qN2DFJ;><9LvE1P1wo(lTs zJL0I7hw+iWPD`bD?SM5Lz;vL z?wJVM?d4u~MEZlD*|YvZAifL9|5@(3S76m>U}$eLsukO6JNKR>Z^^rLExTKEO{R_s z-(^U2dXmqyI?Xg+qHh;HN^Lu# zEQ!BEeMvBmO&|T%@8nv%0ck3PUdBIZJnNcCd+O`5J2MAC%2b6VajH^^2qmXWsr>FRg z*GCA*!%;7&wokRx0Gw>btvb5W|=F7VDTowsU3A6aZcMKKlrEo z;O9qtXfQw`odrp{HuNt*PKO>&MO(z~yzL&YuD?Gnzt31#*LnwC?V2!~XJp(7yzu1J zu4Li7S9^5#RQWsWDF7U<+np=hs=DKKUQ)RvZcLAf3o~M1zbGgCAOt#NKQ-VS%9o0D zk3a2q0I!eR4RaPfCTyVlC7|+z)onrbI)awnK_4VHF2*YHDq-D@8NrtLh2CQCLK%4Y z(?D9bMTDV|L4g07en;1gesL#wln^kkHkbp&F1X7*aAh1E#x1}xkoE32@2_pqD0a;S zjk{=-f>i`_XmK1X%tfM}6tQ013_BsmGDaaH_?aq%2iYX23*x!LzK4Y%Xqg+;+j35o z&MoQBB|ND+@UfF#NUjxOaq=X+PE$C17J&isi6<5)2voKZCz`Uck$xS_i1|e;A02)` zuZ=(+DDG3^Mj=a+&Ky9@hcp<7C)MM1DO(P)&4=5aP_(fJO zgh$75O2`IRxh$zgxR+Mf)!N#sRr`Rx(8Prb9@OzJEvhT?v7!X@V)V)$mYaSIGu&x% zQl{ri)~^?svKUDUwUotn?woR$M}P8QtAFa<*u7pt1hCl`XR{hGl*!|qHV;I`A%bmT z$7fLA?7C>)Sdx`t$46@ITKJ>oq4$&KQDDwGSx($Q6My`_ED>0UEvAys%g@wyshMpz9i_k~$1Ii_AOopj9FQFd|Ny4BRwmO;E3SQX&_HzS! zXgHlxu7c9KKus**@Q`?lXK<=KjZr_AlaKpca3p7%at7dRntQxv0@ z8@W)9>mD=oUybYktLCfM^fErZ3I}*j8aTGGpN}4T`l&qC*SXXGShgb-2K+*GYUl>u zU-YLIgk~}CG-YLg-OCqL5|@9Z#wzw#s6jw=kjoD(zubF&Qjk!Q)o;BH2WsLfvNKU7 zuimMjW}(X7IUMoS2K2CsI5WflCQ%>&I>7=SN*759FQg`sV}!)-$zD)_WWVyS8eRol zsAO%UjIx6!ULsz=TYkqfW&!Qm{G(h-3}YJ;*t50JSchlu?);oCg#VI1PSVG+pFe^F zY&B%HncC7J77;~SJNZ%U;9YMl)=`V&L8_Cbz$r;yHIONh-f#7v(VN}By#<0SWnAbU z_Sx+@UY$!%q;uNKwFaLte?yn|P_V)*UU?-S4XA&h6==&!ML5Badxl-L4C7jH$Ww@j zA*_Na`faqs1Pk<$80*EypKcc;&o>Ckjuh8rc zKt0}^j|^898U5dR9&$y%we<}jkn^IuS}rzVf%(s(m-^3~fF`Y-LR6d~TDN*RTY!g2uE{;gk1 zmMiIwUWERfdi$2C+?;xBFqpIF*|X)6SAfagjx*NfY#%fn7`fZSn9vj_DTXh3B-ts8 zBE75C{?w}tT#evV3Yb@_2k>oizzxvpr}$Lt)(@%pbm^O*9${!W7ExBULc}55cZ@fIVR#OfE!jr<4eCp$Nm^&c z&`HDfgH#YT@WU-x!JZW1wxDc4mF*}Wa>4vniX4`|wO~wa5|62=yD{WyUtJAXM=9rz zN7fv#y5re)PSL_aI08U|td|zsgQ3o2(0T=Bz5cqVs;uUNKwk&MO)0FD_Ki3Un?6)4 zlrP;^$c$p;UimH(+C+E-`v|`AG$%l{!YI&L=uP!KEK*0Jg6rmnu<}$e6yRz0V+dc* zjxLs2UH0cLcD!v5r3~lOPT$&o$YhYJeO6%Ka9GsoD|Y@K zW#C3{diu2KKm=7950A>j6}fcV_*_?*Stdv=yyz!$TZ0s-E1DN+TWD5z6ez$-ezPP& zO;okL#2Qb59C}c*>=GK+i-GD#1-%yJ8I=Rx09y2*l`uqQTA~ABnO+w3?G^u-^nlQ3 zBZLa?Z_0mtsT^+!f}BtP&Y_+wWkDr8}VY0tBr7Gcqzb!2P}DCKuh^+ zv>ypufw-otVsWwM#*nrF>8{Oe+@=)WFsi=qQCj^ zu3%e*am&3)X|`3Ct?`-9(>NoSL_}=7Ag?0FA@FQb4EA?Z6uC_a-R-wb@`ai5ytnoy zLC&?~l_rJDaRueuswQ60SkX+;~<^K?&={yu*-2v z`VYNm7^rknhH|PsdRqX67qUxn^2~`bHTH(WKI*%RIq3ml+U5yX=-u<52KgOV z7V#Tyu5Hj-+dm1)95gYD^_+W^shwDBXrLd-z9Zf0=mQTBRJw`rQ;$ZB+mM=irW%x3 zIS5e^lgGb?4rywtK@k6q$Ayq00T!V(6{5n>@lEw-1Du_U_Mh|b8tczR?ymgs+%A+k zFTgs1TYnA*LHO!noQjJ~qNTE_UYaH4XXAg0|KKe{Ra1#(QtV(KdzAHwr!9j^YUgoy zC2k${5~<4F=lA7PnX2psT-dn9-h_vOi>=C4e!gtt6@L`41WILGQIMlQJt7tOoe>$< zp0%o2cswebh?f%2#sPS<;h88Pv#fYu_=cU5q=Ay+yS`!J(D@$tF`C5ocr2Z-sDQI2nGfFuUb|EuSi{ zJ-7v-`2j!C2 z4H5pf9DV7{U6x^QQNRrNhzt{959y-oOl8=5wnZm(J;x4QsT}Ye_+aS;U`GE;qY+9i zwfuJyMoBD199mci)UIZv#E7c#TLJvhZ{sfo_`M~@B>(Ytb?t!vdGwKIrWE!7OK@lQ z8*v@iXiq#tL^Giu7Ol877T zGlc9mv4(r2p^GcIDD!4OqTer#DTXm|ldKERQ5k>$!(Jb0vKA?*IQ%vgoox%}1i8#U zH?Z+)Q9RUCz;ZgzX%|6wpK?wkhfdNthgFks?>7wpB3L)k6CD`Q2-po~1$ zE!ca3Jc?osf6HPcXy}HgC9!BxUU6ttxiPeB^#_I_CWO< zX5J^MUXw2b8V)wg;1C;UZ4GPCqXSh=T$-3t6PdWeT}RnFulLL4rAY#C)Nk7HEm6bQc&SH0zF87{^it~dcSL<12HlDB8ba-8j!h5z4J|yMy=nsBn1%<=2 zt6nU2VH@b7h~z(y-t*V*5PH0o>Xzmor`xW)nC}(d_s46X<$C1m;=C=+lOw(~@cdcL zz1RsAy#OKc!iwG~dId^9EfeNbuB8T(fv2jgafwg^sUw|X%lah{ZnK&tF_K@M)S? zJm!}o95nmfjan5G+4_$MzLz!Zs)5)5=G>=>ooxB_muLx;bT#qbo_N@^JoRvi!X4v& zo?a5HLL3ait=>WREj_}PyBHfrCwTppYPJn{5>cPut-(&Dz3MJbp zLVqicRFBc;r@NMT9vsEZ;M}YnHfQvloSTlFGUKB$KHvY|dp*2D6L(+v!9Q>gj{jpX z;$%5vaoB-c-jma=Y?`7)^H-728B8s1~VBk zJkj&T-Kcn$zer&3Wve=fb^k2?^q-Et&cC8C2jAddo*YRelv4f3Kh_Mo&COV3;_sy1 zl6Qg9^h?Rm)C%0npOuHX5$=i{#H?{D?+(H*8Lc@A)uyT_e7;t>$3g$$e1jp!8V_pr zr5s_;6P4V=6-=sb!Ei#+c$q#+g!$?y(zpaZ_cn-Ekg2iqbTSBo<22!UvX?8z*v2$A zB|k6Ufg`j^j!Mpx{9vBkz^e~1)4t3}Qvq(jarwv}$%nBqq}zSx4^ILMz-_Rw+$_-s zp(o2$&E!&)7jBm@gr7cDp7M5rx_mZzM@3G*8hzqviA|+p`seX#M_}bq7nD7cCjo2U?mupL;@WH{~2#3S}I5T)45dR*^~-SrZ_0Tf2Vjuw3?d);IQmV?{6Y zlX_ES(oS%tkVJt`e}}^5kXtY_f-Qn_6&J>1t^QC@K_&%3Z#74*i>(84`jw|%NmE26 z`}VgyY=u0c1l0k*H`BN`f7f3-WEm$}WTmr*}$w7t= z%4qvaLzNZORIzM&275qNH+Y_^yfSQh{M{;BVeWLf{dxWt6{;8Q+fS0UG3^0X7)6C< z{M%{TU0(KY=?#x*kIwPokzip6c0r^3OF+B>!A{LRj}^0tkAU&%hr!Dt(8x&gdvgFo zvz_VUGp?-a{pAQN>Qs8x6&{LmYz#%ItYLEDKY$*^)-IIG7xIf!2(hI|gFUVY=@S>N zv5AYKtT3AWae${=Dm9P=fKfxD{%{*ut{2NiZ?F`<8tnRKBosxSm7P>6U~?z1q4|X@ z^oOs)GnASGqe7jBFKN|!B6gNuam$R5GxVZg>Vnm#Ww6787!C-8Et;> zkKg}8QS&=LivTX8OF*AxiFR)yZZ=}qLk|ZEiHf1pZHVlJi?d~E#-pw0+`5O`f9PLe z0xz7;SBojqf8Hmei*$YA@0~8EPWgKxB8GcH(m%I-Tv+AtT_oATM3aO_;2b2sQ&ope)WV@uGFRX zgie(^xBT0BGPE`mV@u-9e;S(Gq51E66BjsG<5<1-2289Bsy}B|BF49ZZuHG*Br&?J9(jtWQBXkvN;&%54lv+>WiVVnyGkfv5tlz83b@ z19c!^fv#t z1HJ+yKd$xKF>lbc-kIyl$9l`S3HGDT3+B&OEqj`<+2M9mKU&#Lw59SCK0rUfJTzBhECU^)ae^^r^pbp})kn>cQ+|6GzJ~TBm#UpuE%&MqVDerz7r;dy#pA z!e=sk6IN8ZV8Q$vuN;zKQ+O?1RzGY( zP|PrJOz^R$45>OOMPV$sOrcX%U}SEP!_#2BC@9O@xVW3^o?#bS)V)HS!@FFIIG0MD z_E0*%@GbA^=V?`Ioi(#;4wOqzNCoO|nq#z%`8V2rf+pV&eza zaw1s}t5)`U+b^#x#p8rG%kyc%L$0FeDk^O)9dqa*q3M>AoB81XDo4M`_aYjt`#{$Qzi{_@qn9CrP!?y` z1LWdUP@$gFJk0xWpRQaDvu*zHU{^ifEUk6*Fwm7$To1U{gUn$?;kbM9S)6caugL+B#)NA^6!*t}{C^%NSrh7=1Ek=0(nax80(<`P zGBtGO%VmrC9A)qQXX!_;>^f+CgO6WUCFMAS;801sCA3GMa7V0+MbN*e*iWi+XU8MJo z7rI{-LWJ?!cVl4*SuG!W3zSn8RUlwdLzY%oCFK5Agg=4Xu{YwD@^ARr8eZX&NaABdS zFy9ba^b*|AY1&o`+ne5 zf1xa$_e9Q?rz`@v&(G#CC}t>bj=!a1xXGesh*^18XiDR~Qo<+0(GPG~-ah)!Pw5!Z zDNPF{iksrggP{3?wFF{V;8Z#g4?lOOhw%%wA?{`j^ag_Lm(Xs*Jus>aAGr8&$+m*( zD?`(lJQfS0bNELtY*sK^I_8S@+G)^Eg*e)T6?Obgp5MXF6%gbysf;q8 zgHz?Np3Jq9K~D|h7*}s$0d!_6ZjOIC`V&wEy{G=^=u2;_yor}y@+GW2SG_H$b$+ZI z*b!%d>v%%oHJbayb0;SIeV+>sRcS*K!VU1WtMbgxcc#wPmfN?Ll1%z8ThHaG_ z7FHz~nyK6dI z49jaT+P417X-NN8>)S)?`p_=tXeh2&E-ub_HcEz*VYIvxJOC_~Eg|#%j_8VIaC>{j z&r}Uas_J;9YLVq-OXxW?*^gW*pw7=Jj_ge!wv&)3dFEdP&KW$=q6A1EJCwlzfj2BQ zk4easFF~O(BKL`R?&J|jsZGnH)r-RRh94C>hezP6BwWZ22qSdfm zZz|*ogutS&A>4Sk0MfXz)q*HG5zxj2U(&mSPOKhF#rtx)D_Fw1M!Z2=~%ZVO12X-^TAFx`bq>x2~M_mptDIG&4jL$hf&I^wH$-*9m#ohN^fIMgT;-2<#%_GVa-?xJzpmCb&)ohu|5H%8%={kS0>2oNt;IriUeBl3~&spA?~+4 zQzpC^7jf!6WxNOxlp3>TNIfnTw?B+t3>uFvP8KJRmTC*FhODVB%aRwVQV2s;$jYQH zL)Mih@X~ms_|3}H50rLrGqjff&dqNoCzER zti}fLVUYyE$B3&z7%Oo0@pATYzwsF~WX~H3=(KYX(yZE*O8CyTriUz0cP6*65kyJz z_kwg}(fWC46n^is+7ubIq+Txb{;W`hw&w4P1)IdGdyh<(s#0P~wfAdG39R$zQ%{MP z?le_(uVT4Ct8B5wX<-zq_3m+8Ea!3-EOFHV-a+W44|^cG)#RVV!2o9gO`M*~gEB#p z$h`(+VjD?U&fP)?RH8CwqRKHxZ+ekLga`7cdQ4~?PIoX$a6H<)aY$O2fw98e!UUje zrguJXKs%O8Gqhvz8~|^1QzZZH5^}fhk3nSzsE|D8X)Y%KToTcsR zE$R;nv4*1dD6j^uX=5vBla%gb6M6(X8nP+0{3qrbR#aUu8a z0`&fkt-~$YU?Hr;)uZw_ z`Ta$9rWRB3Usd6OSl4hrtJN4JJj@j@%Yck_5BUM5*}lm^j5#;{qH zVj+a$4la z9#pFb6sg`E#-%@97W-j!fuFBjj~nsj63tP-kA^|@K}hS(nQI4GvMaxy1FOQMYOg`( z#wKKXpC|v6iqgbsBZ;`jO_8A(1)Vv|DeLutGU~8tEEWq4Ya`f6KPkZ^5-hvZ&|nsL z(e><(cBlY4oa?Nf317G+Vq75dIo!jP8Z{j-FCEDnRYIu+SSFb=9q3}*jgFK_rM7r=PV-5F)0Hb?B zoTG{tK9VKm@()@Z@_@86&z@+DQqMGOF+ETjBn`Ad;Y+HzTHFOP5|#Sz>`IB0%f-0a z$y;-A}UXS)xQylbQHyX zkA8>y=C_cHjJK`-}4(#);dYuO3GKk^I|tnkNsGq@X5J}4D4 z5MY<<)u%1Sl7lbGcO6hC6&Rt6>ojz`qEgor>el-!psDvj~<}DVDSK4gAM-=!V+x-2b&r@0#<_C)F z&CGIQJ*A3}kUrt#MlU*?h}MRiEyFuOlp)N5sC)kMIX8BRaCm~fS~tVb7HI?h9@f%@ z(pDX)aFt)%#WAsY8?p>}lUsS&a$SNeiUblbs|@IvvpS74=eNmh_>(^}zlV<)hCu}v z!X(0@NLDC_ar$v$>M9Zt5fCg}Y>F%1II4DS`VFeddis=qQYzZ{;9BcGD7B(~o^0vI zPWQ5oA&~b*AH!7gvI^6LN4*!p$t8&32Ul~gDjyY!q6c*L={$TgHpND-Iw_ZrqK+!d zyPR$f#yaqJphx7F2B!;cq#6sJmF&E|u+W7{;1ZycEv%21pZcTdS12Q%$Z{omHvN`K zr&#P;38*UijF&*p17FQKV*k|Iq&WpZ1N`RpqImFsvTA;{z`-`?E**0bRcvXjoe_G; z=UxV6TRpV+)YJq;pw>59D$2xde7hdTQYsrtI2`>(a`yF6lccZOYVAmt7JWW-NC*Ln zQ|g)D;Yx``0u4Xl63RYpkCvCbu~gZOU64_8JK`oR9;?xIrf2L;pKr_;9qpL?0#|{| zGw<2uWNJ_kDn5p7qSA#8=U`P7JHYT3(Gr&u(eY+ciKm8XQlJ%N1c0_2z*&Z~(-3|& zm$-KLEz>7@>-Fo^3IQJJ`SLfplp+}s@M+^ggCEruWy_!;S+Y>Gnz-);snhxGNnxk6 z6^G275L8!_W;`(%D9RAvb>0v}uvuhX(;J+&RIy~_i&n*AXKTadEbuAE@V*6dmaa*n zRQqir9WU1^OrH8)2^ej37t7=H&;4DD2p!P?Zh-#@aCiZ4VjUkuEvGhNDpuve2M|S% zKJ+5>mf*0KISZ(8ucUOUYJI)Vzc_l|3jv@VAQ<`~;en@V=H&fa*rS=y-tF@2T3jHY z?d4+MuN}h_p6^b%?LWSj6$@mwO?vwXN_;a?0^2A`6#8vou@|}D=D0*WO01W2o)b57 zYE1N3EQJF0)u;X2tRZ&_xNeGOQRO2WVg`a!$ z;Zg9u72i7?{tw{^_4clGz20RwMz=iE$GDBYB|yzCbbol$Tg~BNCvxmA7ZI+UjvAbq zAf}-v!2-4ekeBUAvWM8?Io)+M{(e-@Tc`AsYRK<9jQ_JACFgnD$i?z{BQn_xn@fW= zZU#Z`6V!bk9D!(^F`hlQR^e`nr`v{Wg#YA~&?ABhyOXh7q8_{_x&G7wz&-{ljoTHp zO-w+#jYP=(kaSfMml~M*JY2Ru)|#g?x_i6q__h$!Y&6ack88UsdZlRokK;|Pdqwgs z8}|`^rYwv$@1Kr7@iV7oTzcf%C~zB(oK0%{mPg(o2h*Pp~zv& z_F!U-!{aDQ+&;Ln2kKW8iUz5DhXe;JC6VC{YPOP}FPr$57XqkOp@M2`1UNBC0%dSm zHaCb-ZQjnEp4k>Lvdb~eo$SgDaQOy`Dj&&&zC9xQZO{+hp-5sp|5C-Fg%gV&lXUG^1c)=#Sy zIn5NUd$f&K!d=66YGd?bHGSoU8{K^d|hx;E4!bi}?^J4P`zegJmkoD;`u0s)Gk!^_t_f{$8UZfny4Q` zcx*l8$@=W3S1bFUY#6x?>2{A{gBls z%g?`A8nWpk#G?ExvcVys-B<4NFnVF;6bVvrzlFx6@5u%4SZ}VJ3y8vr$=+HuV~tJ6 zMoN-VS0waPk7T`b<|NV<@&5CZ{RIHPtb)o-GddDbM6XDqgggI{Bw1QqJ}ZnZi4p zO+*geU%lHn*EOfS9l^6CK13l>_>-Nly?=EhF6>$Lvd=RHA{okw$jnw$y8-L!e7~HY z_ByV1v?z&_zwz>cX8le;*ZuE*IcoBMZj|V4py&xovq8pd&-zdO`~w<@Gy_+fM~vYiZpgNF$6*_j4BD4(i0Bw5m<*>Z(}` z>75=Zk}RQ(aF%t?gpn;M{od#g87|_0akQIDux?)y=DtP)MOf#A& z9mU1W(ln&P1%z1<8$YMNeSl-Lpnfe0?r^zNwZ1d*{@|n8wf$?HU+CW1RR&A)%`*DX zGAs~d;BR6kPfP``D{`Cbw0e?)8*u8ul$qrboT6J(hkF5J^Ok&WmPy? zsYnVBdjyo<1k#AP_Ae|zJ6(Lik6po>$j6d?6XwmddOUC!tOd@1;D3R%Rv-1VJQPF> zC?kdfkfq}1!sGdw2L+ZvDQEFdeYgN>Rda|*#K6SvlLzT9)pXKYEh@iMnP~C|{2_!0 zB$UcFL0%v(7DRkR3K>f&6r?5}^q4_~U5Z)CDukheBilquE2a!r&+7@^LG$Z41f(~g zc-mr$^@hC&y*r1z%=?s&6uy&mU(kC*d@kYGk?YhN=vW@XJW{N=TFaL1pCB4Pw+`UG z$~{CqMEHlAA;ez2S~y?$G7=7dy5l(enGfzaJxMF3sh#z2Pyf7}IpZ%XlXN3iOg~-d z`Gb3SDS@PLqt%8)?-DV3E0;wLpuzcayHVR5e&)ZLj%4j5PwyVJ?(xHAoPOg8>E@t= z@!?juH8rQIfpxCPV~UIZ8m9}d&)ZdR5>U(l!q}UO6VT{pmPp_58}}gaMDhc{!={)7 zpRaj>z|xJaNnLH7G7gzvM6kI#=w7~MtTAR#yF= zjPw63Uvr)#w3!u$7og`%_ZR(EK}nOblGFw~@q{JlxlC$!E^Fz7@}BS0@iw#t1>vHn z&@*h7XCR8l?>r2wS)#dD4z5~bigdilmf!eY6Uq1^@$K z1AbY~w{R39yg3a3&mM;v_uyY-Rs7dcl;S`agSX-WCKnOQt_*|yp)|}e5WD!W0U;=K zwf07Ure1>e@|r&*x1;-C^$bUMEnlpLOk(==68{ znEQu7hv_q~lxzO%KmuCJc=~VH-P_0^?vWQTRp2|rtz*4gk zQCNrTJHRcJ0^!aO!1KL1ir4lRy@{yK9;R!5%9DRCd!#Rxokonwm^$Yn!Z?oF2;?es z2d9aZsn8f_i(IrEB%H1bKBrCtn%Q2DSgU|C?kuaJiDC4>$g$2(lxJw2AT0Ld3Ry2ae;1MYLMa!#(Mv2|6*ka7PI$pp^Kt4!cOo`$u0ppU?g*> zAPF}M-@|H#d4@Ua7wq3CaqExn{#`lqL?d~sLbp8J^RdfG9(%+i)Wf%ES7OITC?KVt zq)R_Hk09o5(SH_o6ZOxv%9DN$bP2k_7<)rtEEP{@_P%Lcp(4 z-JA*rtOAtrt#M(og$C^EZp+BInW3q*Qc^txN`tNV1(HP_;kUgT6@Nh!_-A0t8kF~# zCp(GB@6UH|kRN^#_D~)Lfi?Njni@Wrtr6ZGebg|7m}%ROLzE-#=eUYzx+K6xOjdU2 zGR@NF>o}gbay7Y}rHeD^O(-045SI5+j|^Xb-UUUGs2s5>fI*M9*a)2I{Qb{Rqi=f7 zeG->*5L|m9r%QjPA2aS&!=9Uj1Zr?FX;bD_m69<+AUvPsejDORlj(3$cL@18Qh zxO=mq02UfqFFQB9D40}-(hEw5Xf9U#nD7^d${1|oDd0t4m+yb&)w#U345+1>*+ask z1WVkDJA*5ZY9BJLJb_`8&sMX~c0yzYJC4g8S)6*CRB>p$N0<=AJ(yWKMYSizn55^q zbI%dmho`xLh z*%)u}|BX_)i3(Z^zmru`QA(oFUX`uuY3p%&?OloM0j)^7PS&}F&~`p2uNV|m;auWy8Pm$KqIsU@-1w2`) z%?(hb@UXFT0JLkZoGfm}g&KMdc}J-a*F$Pm3xrb~B0xO!Mr;5Xb>Nq~p@;!*xSog2 z&$=78V~ld#>z(C63*Z>8!?*AR<>i#44}6L`Hs;>pZXo8Wi}Xnz)9TZ9`or?9EWF7G zcVPb}!Fw(V3DhXxiK7TcM%s_;S+6Xz+|Y*MTEzlj7nMlHUn+l-+Dw3|?%!%R`0nsu zHr7ZFDAUTWdT#y!g_0TOB&wiK z_(%3TA0}eVd!994q>ZnCiA{I4EXUeER{}*MLXfJS1GBXj0Rsk`)H9ZsOM=OJn*pv? zeCk%S>VeM9_1JL{zal5~yhxq?`T_`wMcIWZ&-i&ym7jX#qq}4+Whq%fvInsMlGo@+ zxPm57c1EzbX6MgQGshAlRA$C+Cv)wEe8cN+l#@^7=l-fZ>5(B6qVXmd2^~x2*d=ec zfaauEPa;{OT=k+!@E#A4?krDwM5-9t_h`F1uJ7dl z_e7*lhh-Iz%%9aYv$?TrjW7le8dD60t45z5iM%~-XYu^nSkbN$q;rZ-mX`xBr)FCu z64w~xxxI~56;SXX#GB`w<%+FiTnWhw#7MWVE?<1ryn-rdcz941f*%C|3zIt__p>J+ z0u+AAzkTo<{P6F84<6Rql%N@sGZ-{9Y>I4{w)%UgV!fw1Z4c)Mn_G>M@Yg|DiWaw<;(rl`=es z?xypAXt=RmW`HCzM^>B`%AqS}b4>ie=&TdbSsft0a4a4^+{`t6D3p zY)0J@d;`9>a@8}vQ=+(x3Irm=A^b8%#l`SD1m#WpvADh@e+CwW2`*91)+eGdPft&K z?lNu4dVHCFcogW%?Svo#GM7ISKBLK+U(8~Z!{)mJQTZl$^;!V|YfkXE5hTbB-#v${+P zu*A#B>N*SrTkzMibq6XS&ImM5^2R>RhhQ{gWq4^&R+(L+>*zV3(>|9dT>h3N-lM4H zturQ;{M)K`c&@_yDmC14-d8gLij5?J1;6C&(vbAKOo}#oFf}s}fILIWjVHqWpA74nzMN?t2UJ&&o^QstTAU#!Pwablf|Qc^Gu7HMe*6y7A>hg-Dwi z-THb7`}B6X?!AyJu8;Y(U2y=5k62j*#l3DTG7gpR>2*^~W`KG;42#iaEOI1U0M>N} z{UyYi^_O<{b4g`Es2Ys4Hw%fcF~EVSIHw-t%rr%}qx%gDIWl@y`@3M=FIS3#v zG5WA$0({F4Z;ih8s?o$}*MEi#vt_A85UvIf;YwPQ>Ve8-?iXL}#L087&9!TTCqHwF zAhdZ1IRauPEdGWki|>yCnca_B8R@+Po%n{_ zwL+4I(3#v6OaSU)lZxg!15iI4JqYFqV$R}TrGCU375U?ka6>)>3=bO>MosQ)#UV(< zNMgdN}_3A9jH!D}ii-!IWQI>0()_27jy`JPw$nf=#XaO=BBCZ5tvRcNV zwcp%ypz>m4Z7Ps446+8B(q$L>OFY-TtUSPe%Q_(UT>W?byUV1#_|I*d1&+Y#sy7X% zOpx!2mtJwLz!bFI$C;W_@OS*oD@bV$o-#q;U2DPGese@a-v{;IrTb6!Z>!wBa@`NY zybMB#uKs>bSY|yL_1XKPCkEuzMi>BilxrjH=s6Kj0l~=fB8qy2+ofWt01n=(iaw3n zjAhlpI>oZz3fuQNOI)*UdBQ>xcv7wj_89Rd1?j5~-bl-bXOOoY6 zW`zQ@kHTC@_3EP9|8n&Ecpg@)Qb`pk`_$qq!(hyH_cR5z9L5Hb?Fp*iXoYh^2g*$3 z+x4GE-sc8bKe;z6n4h1*DbK?ETx#&`<7@&JlCT0;=y?I9Z`|?876VI_^uh;c%oKNy z z;f=t|Q`IKbDos`{Dlw{355*DnT)Ob(jcg{ja&?DWGQ$FwD}HUUq|~>3?x3n+-hxgX zM!j%$S2%qQS^2yR+~>ykkEHTZ?IQ?Ny=wjEQ3M0P;Lg~^RDT*cr$6NpNJGFQE3TAYO?iqf z`XET1gt0xY*6Xj$lxzOjMVL)Xe#XroB}oks*jfJ?gnM7bq&z$nCKWxxcj%J?rxWgt z+T0+J6+9`~8v}n+WlT?;{&~use8a97p--bAChpkLoBj@}2{_}Pe-tMcdamcO00hyX z{A4&WmW<*mhr0G`@Lc$dngHMpgNPnN%&9$GjS$iKOy5x!!p8n}gHtktz?nfp+v*vN z^S$!@FFi1kn*srT@Gnh;I{`9r8kGTtoya8^2Nn@lmfdyxVp!8P$XYI;fSDVujQtXs z0pa4TfG&8Iz+c&-cl_gE63RkcD_E$XVDr6of3LPuMfvgi}p4kTO-H2~V{MnAUmq>2jV_bd+R+h}_>~0C-{DTJ5{4#TA zXv*i+;^NPoB|i@c2CJBB+MhznekWNXckn2Pah&WcJ}MP@-=}UfsJWlA3+1OaS4;K4 zjV5zEJD&)PBJP6T^qLdVyiNCpxY$Lp{wVKAt_XD5?R>9F#;`3|AU;^r<>IXO0*!=e z<=@OqmFH*1*7-yK!GDUsZ{;E$a2)hga84}&tAL)Q#Cn1iR%W-XQN&VUO`0U1-s}Rt znLj@}GeN*Hx#)C^SoWi3rjcCa6(?Z5c{Wr*UQ0f~62&z$+uO?Uj>``3T+;?u%+V;|Pwb^ESS`rPj4 zfA)XFyA2)l4mqH|S1g`tEM9@zC7S!dW>2kvQf<`;wk6ip6$I4_xq$n!9v2O zhWBoC_e6QsFIi1zVIY6gi&CTR^l?dRxEci5db&z1_w)1fVN_9rYf zoFpukylbB+sh*u6ZYkUmIFp0K*HQvV<{c7NSI*46?|(J=(nGz4`X4U)ZGvb-?DTI> zzf?{?b;e3#EFq|w^b^&%rgoQ1Ia4g{kE3_I52qi^Zqv16J1ho2QP(dQm_jzq-M_k4 z0Zta#1N z^oi&3S~-YQ2j2y)8!R5c12V<5L`k`PvH*%$@4k}t;7E;I@I@e29i%#?^QV@j-7Asl z=nKYW?V3%$_)*QK@KWyz7Vc=e5ZaIZH5o!3Py8Fd7MBOv?z$g_1H*j)<1oqRgM-_c zB{9VdO^*qSlSIME{REYdta3V_1$|E|ldNk0;MX|dL3kxD_RZ+C zP91!e2Xb6h9_declDWluH*ed~YyHDdHS_!|!5eJk<-;(Gs?8iMA}Kf3e)KW{YSi+S zmB--YKDvAA#qv;&Nt#UTp~Ploi4JM&lJCV6ym8EzgF&^2`crQ3!@ij)LMcD}|Lq^2 z@k$N4m2Amrz?TMmQY9J%e!?-Uz)^M-#uPlTQJ!eS(^(OWPWpc(7?>}Fb z7JJrX7my)J^81&Nd5A6em*>C{t{yJ?F#z+!=7QRxm>xYq!Z?5)hXFb8I{d}v*cg%d z*R8eOEZo>N>X|lOgV({wl0|q7+p-1`l!DuJC4vJ&$!7_6#|KEztuabZSYhNvF9sNt z9BZ!>ZIe%)e!@CsqVxvV5!W-diYQLR#HH#2H`Qh#{`n%}!+y~W#E`wi20E#DAihO7 zoLrJ+8C95a#I3xSDSPbw!+sj7N$zK!g_@Pn#ay#;5ElHge~u}rpLk1u5FPr_#^p=6 z{b0}GDYu%0j+kD(0e|jA2H>VAMNtK!$-ogUVv%XszzaB4-()_ow7qU=bax{rBG3ECke^Lhpt;XZIGvG{S7n zwZffOHgZ?eKEI%Z;;86Hp1X4YGlc`a8aNRVIeh4yAYhG+wRG^So0J*pTe`k2_>=TY z6c58Ij&B^l18}Nj9)A|w0GsMg$k^yEr7#naB5c1%hm0ggvcKESe)ZsZT6!y$G;UM@ zdWPnGVbkoCQ#Zdi_W3Pl3NDAUqx)q)3KSZf@qYuX@%ZV^2YitWaY{J}x#A!hR}S;4 z0vi^0BLk_H&QJ3zstDa+^UDhGH&okRnf6_LPR(x-m|!4z!PW4>f2RDvtW`9(X~bYW z0lt3)Gr1}5$=$=bU}L;Ks$dvrc|Q`B{nQ5Vj@s$5>L*)+a!lSV6_B1}mEj!k2Or^>^=5S}T|`i~1) zYF)ul$gm!qkfh~U2zpEIHBq`3%PSrN*ge}C=njnOAmouy{CIie4a*>!f19S}oc0PM z`vFQ)p_qu59Jwz6;}h`VUdnV54x>z>zREm$8x@P$Ap-9QsFYyPsyTE78)5;mylNHl zKoW)i&CX9oSKLT_@aTZ&zk$PJeSTGCviD#yho-Fs+Z45G?@thnOj%HPlz6@uR%dSi zPX4q|GE06PpbVYsw;?oifEBsj!&wy4x!vfLX)$Kw*IYjuh6Rw4w^Lpwx3coiLxS;& zDd6~_ze9FgqY-9ly67k1LedX28Cw(CCP<2)QTn(U0LV)@#{qD@-=+4!3*@ ze3G_>8iggwVMC5wVnDG3l%0hbMtPXrNzhE?nTBf%=_P&r3jD^esMWc??i+LP9DM|C zsRj}VgpX3p~c7+#~~1RA&B(T_2Va*-b(lvb6vR) zAGUEN4gW~NU$|Azd%DQPCV}qqTB>+3E1a(bqD)@C$I0gLpRo*rQza#ExKFEOKYRw) znY!F5(Nj&Ww|^3k4r?x@$sr!kgPT|#CABlNfzrK-It9tNd>N8K$P3~8^v7VQYl%c@Gb=lF1wAl%y zs)(+at3Oj}`C^Zlp?odOg6fN-cPwjoAoXHYE~o!awlxw(tUOcaxIUVYKNn@%HX}Oe z1Rb5COMsyJaS||tAf3g%IiSsA8JwSw_SNUhoTvBnnevo(PafimC*?ZyqS4rC{yWD%en#=IgQFg^*)IpgSn9VN+9`?qQLn# zH+RU^VD(aGM*fbU^5BCbIJV#U6Y>YP)<6mm<9Kd~Z06siVruzQF5dhr3~tpC$$l^d zTg1`M6AwLu2Zz1Y<3u(TOG((#n_kx`SY1sih#C;*b@oS5MaWd`>c-!C8rvJSGQEGS z)lswT6qCdYebbA|R&a%TnZ&iN9cAM>?%bj%+N0P%nctXQ{)*Z#FufS-IhZ zP|1+NC4?n8<=+s9$Efp!Yv(dm)$*@Lcd5L2#=pjWWF;vnVH>jpCTe|>+Ue;t%TG;R z#=1@jQH{PMD|({@BS|;d{i8n|HQE*0TGTB>@9$QQNqEh~;=&t~GjldacUT_$`skYm z7P-u!ZF*qR+;}E3XX@3MYkNX%cfFY#$lXfZv%ez{@-z~ z{wtC%q@&dt$KQCPyz;tVw{(}!jzdd@}-Kqt)*-~qfI8c-n z^MgUFBjK-t?Q$HOi)?y+5pJqdTyp~JHaI+@A*XkvIW4c3Gmlyp01UH<92()j@%E-a zmR47~=ikt++x?~0(sq_9NWkQmmRbTsIffIF0WlFLk^}>VPfVGS84)LlA(KpsQrT6v zrIuRvmOor|sqdFG^JIe=Z39Mq;_!d<@7eEp*4lxuZ`W{jZ2@tn_ucQ_Yp=bAXFUt^ z3Ntzt51%iW)hm&)5~4)KFK^SO=2u>D8K(3}@u1gea_iMp)CA@h{`0#mD%rrmXjCsH zJ-iVII;l2+l{{m#Vn!7#EJsLKiO5OD5xyW^sOL@OFUaAy^b69ms+Mc4Az`6t!Qcsd zQk^)uDnLI^*Kaolgdd9a82wQ;hqaGY8~Onc@-fJ1p7QnaTb>4>`e6Y*A)c1UyqEFH zhP9HyV~`UF*%FK_VK&s}SRRl#HHhH(Wz~Os!3`qc6WjK*vHuTgQ;QMLY2<~zR4!b4 zA}_$9br1@j*w`U~au1K#n5uj>cP&8JwFIAHNdcCUN|nzxe#4h%%MW73X*DAbF+?g8Ohm7_Z~>f)t^n7-)=M@Si?hm=yng$OR5 zYlhcVfn}bIv@neb+WKC(04n)CX9`oKaM~Q+Q)SwZ%YHd7vfDgLU?Gk@dXx(|ZeH>4 zAsiGcJ>4UOmu8X>Q|kV=5hmm=v=v7KUyTTJ#?EI zbtFh}SzZXYw0+5DXrtth2=-^{RvGi<;Pb(Lc(pMgw#w8=e0GBbagd;uFTd9LP(z5~DTc<>#vDD8{_%rElEg|Pc(^ey}Ui_yFOfoh;$v8{l_ zO(JprSeqlZij+^D7Pyx12W&P>Z8(c;L@dlkxODyPy=HQywN;ShrP|vmw{u~+LxjGNXGY`R^ve56UCr{W67$YOoh8tiN=`#>+Fee=OpDU&Wq(4-%JkXQ3Mhc)K@kl zbLK*HHL;ePD%|(dJn=yJOYh-o0jt}aA5<(cQHTMe@xsIY>JELRmG)~~7^U%A7Y3*m zv^R)}9~-C}6(WujFHX((J%8$DHa66* zuI>3%+ZL;}gTJp{7?}+uBLTwp+k8+8ez0YeD4+IZ5BuWNe8S@SN~jEGK3|bfTelp@ zrmp3xp7m#+D?jz$)8Hk2O$8zvEvv&An2$fzGjHq$Rz^uDDt{BYw}rQd_{22z7nbMC zVlT&ZtsDkQzfYEFH3z7u<*@C3tdd7H)%7p|e#S6s9CR2esT&5m@*%5lkTw71|CjH- z?Gb3J)I6W{?Ku-WbQ!1LD;eA-OQTeJieXi^%2s&PH;@Jk9(@jz0#9*j0vw)Nu+{>5 zcrjKRY$#NeCnD zqBuW%MI||e*Yn*4)kr!n0yvlb$sOt%N`&|J+3m-Xzh1$n^!JX2Y{Qre9<`R*HZ`;3 zd#Xf@s2nKO;P&M7iW1cc+|ifSz*0bKyavOz;f50K3Cl8)n0+xtvnjo%KlnlUfhT9C z?Z1TYvc0hOJ+R4P*?r4z zQq}XW2+&n865MFBJwfs2vLCaQS?y4P(YQK1)H%;4Gf7l;;s3%HJZn$D?cm*~XzxS$ zf*?o?CAwMvVf2x=JO%e`U*Rmh%Ds9{`U?Bh=!V&Ygp*DXz`=KZw7D~!EaPByq`s02 zTqa8$M=w#Fz4HIkL_%mLxVZOocWn@W>$`TQ{3%TTZNCUD=k^TsmpI& zovLbUy+>Yd3PvD5imWOBbekU3z>V{ZN=5oZjS25Mhavrqb$A^6@#$0cFOuO&mOV)g z61&!$ha$8Nj_kX$b3*GAKZS1%j7d^<@0UnFaPW7G4VL|9%UsUnR%vF7m}|3}IO(Ec z;8%VaDo5Ze3fEDGS;P(rC1w0n3Wqd9ghE7aqJzNN6^6p88LSXL(8J}03m)N2+4TtV zx)TnDsfVxSSv~(yI@ND(UWosyuPSt_vhsG8>rp9;lMx0NAT5oQ8C{R~NYv#m=*yDh=ZqHtOW=VOhpXLZnwFKwEPb+fcRY1h zX3MJoULvjG;SmL+mLyJy*NA85jp%Acc~UM7RW0;GPOs_q09+s|=nK#EOL&JUjmGbo zE|K6RQg*${pr#A*7)=B z8@?!aq$)Di6Vb`eX0sqb)|U0m`{kJ&oWhbHTSwK{@Ks{N{#;3z^4%|dWeih=;&f7Z zL1-tq=(~uVH3}RgGL`sA7JDd|zCtrOY~-`?sz6)Ep`ajRMvxGA`A60~&}Gz6jBk8d zh#kxMPur746bybk5q1prH?|@$O$s$SI8|Ua-nTD_*bO4)T>C5Js29uPjK4QWpgY2c zk{N2}{h27NLbFWu+rGoYot+BS^F;Td8i1F|FEPWwjzw>_`+-1v!r|%}SfVS~lEp~Y z1vGT%Q=BZF$ec>skwniGaRKrV0>4j^bBQNylTCs0Vws>p4zktHy; z)-i_htwbhK!^s}QX5#wrB;%A~nmB0UR8V*fc(w;9fr3KagXb9j(Gz~L!D$u0V@j}I zKK9KnmuX)D>@?fE(LY=f_pB==^_TEVTe_}F7mxc8o~-CFn!>Wh#4CgXRtrVeM3B?> zJw(;XhgE|Fuzf5L(^B|hdhCj-AA$9!Y&bZ;JjuMGw&42k^jg?{oDfI^)HVLa!GLsC zln&4lspINm8N%WrvZPQO)hd4M8f)5Y*u#ayQK-y9r4j_9d>LiN*J0fLcmkMCW>%Jg zriI^HlBZ-ZKyvTzYcVV5u-||L31*~!7&jdns9^&E8!&3jVYaBhFJw-QmU)JSH;yD- zwF=mDzCGN;m8r^eM8|PR7%U@znmoY@=}v0+{9PR^S)#`NhG2(H?#+Ra3)~4CP&P)r zO>tL*&SuK;y2l}0inY`~JlZc)ktmAd=>eJ`04sfa1wl!iQr(`+_r)~}ZN82;@R zjLUT64u6`L-Eg_1zfVSYv8;QM2?Jga#8^io++?<55i6T=NX7*yFJ84^ua)QWKxV$+ zMU*j++GP&BR*EeI3W@3fC%U-kf#Fsx-tr4mYr#DH{s z`u@+LBI0)y+H$yO0V12HEOgg8$y-26t#u8T^8Rx1VnC*7uoW}H!BF-h$bb?kx4drS zd)Tls+Ix6~n1FpbE@V$fZM>ldm10@(Lt|oU);*Y&BS+i3TF2ByZEkbgZkOdaL=@VYTAS}h z#lTWRpea5{`x5rf-}zR;>A*AT#oE1F?hY)Ta_9z8tAs=U>8N^Aw1%{}J(+nB`5z5X zfR{IOhE60%Vd%@Li>xoCr=g^XX7+~F`X_xl0Qag=EP$MV@xO+97}cYdj(6($|v};9wR!C<8d>R0i4Uk ziv|InqQwjWWai0tgpv@z*+)M0z`t#hvMg5)ZmOAe-WEsY7H5g)eX%^{Tgto_MUxLv z($~BG8|3?jA9M=hvWNxrppx+gW}s8XWRKVYd5yQa*3o4ygyj(dKnu#@&;B zKXnFdsxwH+%W&8Y0~eDBTDPb|f^9BI_^wFH2|(;Eu*NL1FsN%k8sV(K0l!#Y@?9l` z;n4w)<6P1<@gRv=f?dl&pfj|<-dC)~Z^h4pUsoR z5Oi%=u0{elHZ?}>6JL|?l4`!4gp5x(j`Bm*lEHdUh7TG zmgnLD0OX85>0mi;~n0`z)3mn8AECC0CCxKXI;u4iup!vw# zy{$*w@v(&K zPSaq}LnCF;pnQRcP^Zr01_c%@Po)8R)w-QS+OxBr^3I#PM^LF`AKsb844e_*57oti zy?N6MK~Q2l;8c_9hjVpK0xeW_NpE-LhlodJ0=srYH0!pej5t;K9vCUA)ultMoI+yfyXM%Rg?#+N+}I9>mV)(=F!e@_V6d#7?4 zH8sYvevg`+L0z=JdZp`GAK`=Au0B)MXCHj`cQnHBDhmj*vSvxqE2+@HK(4M)u;Cf$ zA;sZ!OP|9El3p^NKR(j$JPgoo)sOc>PtDF20`UGAd@-g;7Shj`Us2fd?xDv8F8#Tm-zPJuj@Z?wHX$eRl9 zg%b7%$%bwCf+(21a?@Ykf!KrWehK7U)@Atzs|(M<%ICp>&^!!7=fV@^C;l2ORMuxA zG}~pu8VBWQTM$F`tH*SBI~%h;Tb4aPEK4mhWOcGPhpO&N^LQ0+l4L7YfrN{Wc`A-W zDv&)i>}kqod>&peQ5QZjS-p50XuY-p?dl2$*hbher8u)AULunhzskQB@5v%1T9zOp zma0nk^OBJ7`y+S(l|1z`S2_@(A9ZMXqR01WcIEx9Q$bB>h*|rg6y7*{)2|?7r_SJw@92H1osp6q#PE18~RoNT$Bc;L;(S%n|zk=Q$p4754+dK_>=L+etL+G z33_JeaKNb`Rzu%p4P7P@nK1mtXA(W~9=KcRf~o4h0^rON1SNxrZA zjeokQ@`)Uhz+7!NBH1<(kB6sfu^E`VW~iqkfOYU|J+tvCmK|2-9LJ&}nTl;5P(cy}G+~a@{u}aY})+hfcP0!BU2m`bVgo z{Su*jy^KHbw9HSawtZoxvPcJ|*#vO%ie-=tckOvzHA4PEpbyq1r;@4FX5NhPJj40t zk*>i%KXci?77I3p^FhQAL;+l@Tl_2>B4N9T%WTC%ubiDkK~uguYdPnZx*O7$0G`M% zcoXA;0i6aB-dN(~q?_V;l~QM#(ognydg=T|S8i*kbU%z45JczyOA^Fj3u=Lm7u6s!t)`Y2%0x6qe>*{@5+auO#%7vX*1E6iN#@_2lw#VcIS3YFL zl{Plhd*kk*L~Zsf|Uar7tgnNSS=Yz8}QBLgCMtb|X72k9OWE}n45!}5A??Iaq* zzFc%FBc?B% ztcDnD4?OLZKL^Qsx(i+0Zn*mWaw|MF2;FLmKOqn`iM+R$mLwF8HJTQSK{x|-e?Y%b zKMA2xeARMrjys2th^Ks|f%R~GA*>=^g#r!~G1-xF8aSu^X@_d12}Z7N%^8eej|IF( zS(9HVIQ4)hPG%?p5>Lu3;@8lhygmL5SIok6ql8_Txk%S-CR5uCQZRMWr3J!ltn6Wt zr(`#v{c2I8N40QF=*6R=tQKy&P_=5PQv);pe2NfLb9xDmyCC#!rdzLq}$>SShokx99ngytkD(j*N zDk1${o)cTxmmA;v@ul)a#&ISIxvIdk!@NvRqV40itvsqjmMCsdibMv7#S==hwmg&K zt%5|JJLZ(Dz7HkB_$y1Nyl|kr^*?WWgd#%DT1a-_H~jOl^!>#w;6*57U#7P-?>khUuBrlae(|4x?Q6<8j!Rp1Smb;`%A~w0sbS?WQ zs5FW|(?eya50Ap-!~$oa5!PN#x1PhBRDqvH?EkO%MdfhFG2q`|c&V(_UswRV686k3 z$f~kg$&>0|4aUrq#_0@f%L__%;%oi{n;vh)D@EHl6yac_63qMSvd(?azb${0-6+of zk23m|F9cx}ThtPUlg$=T=LXLZJgO|}wm0C!u2!C_!dv{c*%h$mOFMgrSBd(98Y|s=73R*(cm_m-k0!{K-#x(Hs6kU74wTdQ zCZckS@QpSa7G%uFiAInKQ9|Prvo(#*8GK4AL3}y<`2@FdhECXr!ok3DUzqgcKII>9 z=CeS}E2PcOF@~T!;4-iZ?>5#Lz6gzc8r_WqSk{5`MExG86CiP^Eod6_?@^H5qLT@w z;u`qw^@xo9PC9T4P=%jVJ45bj4i3ecPu6K@!CP~tu>AKoEPI?~_)D)AU0A||#4L%lCZ=T*GFCPYL9xP@2hS!R-bsQx&aUkugtAv?fjsM?*OrI` z2dM8!bCieu31?VS!>w)1h4Mb)ajx7Ho+oO@^;9S&LcfWuBx<+S{0!H())vNWD?qY2 zh$men;*iS+l4BxXIqg%t6-iYr7$VtkHX8t((d*^Azsqf0sbBMax>}~=33q|iG+Ld8 z*V`)p@Q4k@A$_;S`-nXJ=NtJSyw-1kRM>Ko0Z@6*#^Y`#FybB4c76^Jj_` zN4fyta4{-p7FNqGU;5S8N-*ajlb|L(u*!V6MC;izpvgf_1^#4Z-fn75j=bGjWirm* z9RJz_fb;LT@vNEPI1>rjme||J#F-Agk=7NSt0gmDn>8#Zjyw+-jTWs;nLRve6~9|t z)+*x$=cYn^=gzE6j*keaV)j4{J7W#NRI#Bm2n_S@S^x%~DpOus9j$|~9|k14P9p8X zx#Dp8cm0l$8G|$JUj^_+th81~)?& z5~2yjj(_56n-sFf18Em-{QGYCCRVkl7?Et`_bLV=)QI6*O|sl1$Ni*?;rk3E7=YJH zm<&2Y)l1ON%a$0##BGQtowj!oFIUPS9AkVHwI)EVd33$4BvfKSu+s+nalpY*^~Vwe z6B3>nHSHGyI)nZZe{Qbm9178@!L3uKhgZy%RnjK)32e6|eG3-~(W z9vP>8>o`4sZ-BXkorAlfM=SF*g1K2%{szX_q{%r?1{nYbCC)va!Xty_D zWMOUttMF9sxy%GFL0BNJRbK_qEz1-(ub)zh*d!EJMcINn3z6h#8xrpKkZ{Yf&-Q(> zF?^~~)~Br*8;gF~xzeH~i733V9g35_Gok~UrIQHqM!Df54+{kDGGbho1ze&Rysp>F zEzc%9pVU=bC(bmesZ$eH*dq6k!|j!qeal%jPmR;9D%#?4Uqx`me(U92E_d_G7`vpM4>}zZ)s|u64E-jnAy>^DXSw3d0CbV6BD^BWKIG`ZfWt1atmHdBTr|a_=@8hbM%HU&v>`X9S+zW?W{ViD&FTi66KwY z!H0+rtNylv9~sQ0_Ol^vUmwCo%z5ZBuI$uHdNpRbB927t?+QxYA9|??xj*2)K{*Ay z1YeQOp2Y@C6Z$s!d({xmnwYmMQYJ5Cc^gQ2;^8=~8%h~Pa zCtd`9qMzg{2UMF|USfcQxUk$66kX1C#XjXIFhi1lq2DWlVtEvJQ@HK#SYY?FWQCA+ zxf5u;WG_~rF2aUbIE9Ua1f^PIC*%f@NkH@jo za9(uMpES+HQJBOmyOTI7u}Dw^;7_i`FUL*yLSjG>xKmsBbkNZTIXN8K=ea}%;gJ9M zugc>V&#rMPCQp+}l|QL5ePT6*NtuCv&#*E=Jc_?lU3%bcZ;rnH?iZdUv8>cJ^pB=a zUs0qx_lqJH)rx(f}%J$v16cluE7bG`IELtVzn%IvN#k5;xfyl>ShhM}53xZ$C zWIPRVQ;1r4tQ<+7~LPCxc6VqB*R&M+k^dRI>xGI zcRFLjfJ0H@{(0Hv%T@pR(=xE86lK8owWrRAW8qaB3M{S^=7M%^P!k}3(Cm?P+`E(G!#(k zdX#o7v4A@Vafa8Ue9OG6FH zY$VL+ke;zkA5|&_nca*~%$H!XxzV5xu^#i`GkRTK3c5L{z+=`KPOmZb@57>UkDedwrkzDM4%2r>O*6^3;1~4M96w_ zHse#7AIX)iLy!8bVOSIWn&b^X;RbQ6+@_c+aT>`4#*Xpq3UD#MHnkM%!?dVmTHOz# zG3P=FnqJU~kt`r45pJ0RQ519}A1MOaWjaD#(0Tjsvv3A4Pac^(qnwp3nH@XGXoNwa zj(*K^TN?{oPGV$wL>)CI5}P9}C9z5ysj_!~|2VsE@}hd~nUZdZ94X zzfK_M!H{}?(&e|UQT{gjvn$1ort%`n?IvFwp$5_Y*BDvO>?{Wq9y+?ygT450Mrm{i z#+eT+UVzKF!9I!ayn@ySoz)-YWFFUEYmHTsPhe%~GaD%FPAxPV;wE{hAWZDpfGI0cBC13d84U3fT&vsAF-YoEh{ z`}EQo!3oaKeR-KVW%PB9cmw+wPd|S_VoNcr*i#%s0mMPX=|k>YR?Dg8M7|z>ZLvpB zxlY1EMuHk{4&Y&7dJPA&z~uX-X?t%S*6j7fDTAH<-&v zLSZg2eMuun?2Z#N!p|_8N1kAOkKaX}8`m(-K^1<+JTnEgsE1<0bT|kE9PWAK0S-s= zc)!wmkl@|%>o7y87>_}! z9PYMf@_~!x#mB5<6g^1u`UTOS=4yAwS-LsQ!kxB>(jqiqG^p`7zg4aPVEOt_u8k`RmLf}h@z=PPfUvf^Yl-XV;gd+;y83g{`oqe?N!Ohjl z(R1n2xz?)xiWd_yq{%O1N@U@S4t@OL=v_acZ1Qv;YY_cf7zwpyK<@4%C=}#Qq&}Eg zSz4kgiPsk@jf;1Z!Z~urAh6jIq$%0h3A5GR8BSYvff%VTp zdp^Jx9Dm_k^~>C>CxK&p5IdJim=o`!zlh@>yBJrqHn8~8Gsam5)$is_zpiV8y<<6H zCx9pV)ZR)9iY~nHJXW9Q`JeDyo7lN5jqDl_}$-mX6RW0A6bs3 zi?8OV7!eUFllew!I&7pVY{)%KjQlk!2|LO2jA{j=NmH7T&i80G#%_Oe^!w~taH(8; z*yhY>`Kc#D>3`-a4-HW&7Pub6Ga0~c8(W%ULqfrxo~)no44nN3&NL5nLbea1d~HI^VK4lgxsLQ z27D6&JzrCj<;i)lJnD5>=K))UxFQ-gSMKjVQhwwor-9N|jLh+9f2jJ5NJc8!8N4mj zAMxej-B9%kG9MoLi8^IrX5|Wy9z2&R!wjqJXP#AA1~uQp!Ikj<&WEo4S`XyWKUgk! zfa}Q9P46sP|2*#*RChF4c`;dU;X;mo?JL(QCzU**gz>jzc{HbTuUA_7g_;;$TUhlt z=97o^Hiv}Yex&?`Z$pIEjRE=N8%w@Al`(s#O$t~T13#2@@7*+7fv`^C)XAXGxk}Cc z#t0Gq|IgvvP?!wQQ|Chy{;Uif9w3JRIZ6V79uiedhhey) zlujXmgE|pk0YKV zLxH}wqnV2w^U1-h4o?PNjOh|Gd6^05Dn0#WrJ*pen;*JbO0_xHrmfv*XO9F)U|Y1c}FrlyPot#*&_D$_EIEI0YAY<>Wq z(NhHW|1Rhy206v>PGAHLA%Y0E8$8??zuI-d4mN1dE1cApM?G%vC?T z%7kWoE1>!j2oc%Sax)F=-i^Rpf^X`m^u6-OSfOwe`a9C%N37dfJ@)d+xJS;_p;kIt&-1q%3;@C zDzCog>r6)FXID8jjzO~(pUi`C>X|aN5E*wgFUL|64h$gjf^gawCP;^Jip3-_o|;7v zoSv;+;f<7E5slV6_1~X*0uQh~EK-w(2tX8q^p1@y9oL*KID;22KH{H0@PqP04~HCv z4hHY0<#NMIc2&?1zd&S<<0wIvN|WQc3R7AmG4$N^uheaw%{T(5sO786Cvik75@^Zk-zH4^iC1x{(Lt}2Uermb=Ic1bL*XB$lm^0K4A-dO zW*n+3tN3eE>T_mc7tWWXFHB?2L&I62X^8NR7#y?pJLo3hB!pKy$QS8K=;>3D*>=Q6 zk5PjYPUm>cZl70rv2QFxLi$C@9U|u+W~CR0KL>1EZHISmlbQvIuOSOZ2NS_R$pt^U z@R*$_HpC>T0`Ai`itpTK-xqa%G{l-D{*+bPu%iE5J@}~3AARV%fog-qI(9HdrGDb| znuOr$f_TI*d>&s%;xk*P=-_EYC|&8NddIvA3^Yffu%Vh_y&J9qfmsq&BIn<2&Ehzl zd5S~dV_<{9ih~VwY>occlm8X}VX>v0;jZ^o%{LH5_ex+imD@Q+l_dOKP9aKIO^Mq1 zK<6P>r>sZXaJaX%7QxpcaV7MVUykXfi{)(jVUrB9>J`GM=YME1?F%X)z?z>@)pEmg zq9_gAGzr6J!&6q?(B$=;=5N$9{S7Et>We?>-{a_Id=D#-V01syAH-{tQHeGb(Q6NZ`*K8e8AZqA-+Eb=sJgX?3#?9-0zr?1 zfiL5dzv+d-NKHTocTL(mPy*-lglvvliRm@po$wWVmJgKX*?_IZGCoNapG&{GRnn}l zqO-I$xdLwc$> zFckunOj@H~F2M!EwqVgcDo#@xQZENJH&zI3T*vR1vk%4}j=xB21LNe2@!Pyj=IDH!t$G}n5ktyDzQ_0;r7%r~8^#mVa4eaI z!{<5Al}7#wOv|y6DQl<2i62kgrT&PB#yildTEr&Z%F+~{!*U5<03g_Ts!c)$wV#bg zI-^85o53rwL#w5%zt%P?$>j>?R~Jq5HYSmY!#Qc;>`S`O<`2?;4UmcvD&2o4eT2Lc zXaA8zY^kkT}3mN(~k@Td@(q+nj8f?;@IivK#7_m;-z_zK;$~0>^X& z^&Z3OQY)MZipQ(pgMxcy&C04at052}jH0Jhne@V}oP*{d+uLAs+T&mD-q)N45Yq-; zj|1&+NDLy(HZ0TaiS?UliA6BPUUOo?d%iP}I1=;|z`R9f8XlvdS)&iA-|vI>iE7Iw zVtaYubLJ4&mr%Sn`pQCaz3g}ngiRWxLR4zqSDhs$)X!ZD!N?sPoNS~q*aFhlFfZIk zg3`aGP&=!;JA0QS$a{vK;%wW!0Ao#+T?^?$KYHj#MgQ`Z)GTZ_N1?ES=H**|E=r8X zubpN{v3#r)t0ZN_g87NB6DQh@(@=YwW6oCtZnqn$Swb2mNI(b}dK5&4$rNCUqz_WL zM=tsc$~2I2k!JNH^OoR&vA^lj9cX^O+o*F|o;Jnxqzox^^4Mjh^!oYgC*`HbEcJx> z$pygB68o)&IghUsPxioUVv*tgU#zd<==zZC!YkEO=@x)uYolQeJ$(!Z0il1N#Fg>>{R62O2g=cPyx=J4>*;+I+Ik zcqx8h`jGQ5g7o=;rNg%|Ioe%CoMxLcftY}$sWqABUh|!zB2TZUyh+8)Sx;}EK3>M* zA*cGOMlqzH@|7|dt}e&$X;i^Q0zK5jD$disjpIw-`ybGt?th`r z=o>Y{5JDM!_X|(3Pzf*O(qceERO)ea)^=3_7DL>2vAKAt>l9d~eyWS+bxU6zC@eYbGz6!LX%2(X05(N(=_thH>869wqI_lHzA094YT>0;Q;r*vT3vNP}%l6vLQPQU9r(dH@_J`%huIFSf zJ5kBh9&UQsgvWl$!qV;28+l4ojKIMya8rsbx2j-115UV}`(f13si)%sF;8CfeJb;y zM{_ruGmY=P^iqjaZQ-XlmA=&kMR>d(<%A1t-6jP!1HiPBg4?J49<_!atFGY&D1Ofl zI)HMSy=(=*l%C`C@u;;O8;9a2nda!a*?eOO;w44jpLpopfQpAB+Ijg4C<|gXfjWek z9DkX#OmuW}b$ zpO_fT4kAjVoBROPGYknJgh)4$m{m3)<#YkKLROpTFTp5ERl^Tclw?0ruH;Bgw<77l zurPi+8xXG@RfZ~y!7FrKd&NRVJmvk-C(0szIQ}{*zJI`3;S0>*;_*~j#r#Y*q*uqT zq`M*9vJhU=mFzk){?FxkuSFMDqHs*6l(pG|w4d|ebh(`KP!&;Y411j1rjevNoq3GR zYwao!f&Qs-8f8zF5$v>5+WGrHFcgwbZbyL0Cn7!(X4PM+bMrCj46mN*S~9#>i!ibx z=gM+<%KL**5fJ4-OPzfTrg{8@FLRxiJxSg~+~u1c$M3xEF<6fa$&m~;@ z0Hx(}ew%=D28kis(ZFDj>)^S!>d+YUS6>4M%C`r z>qJN}YL0Us5EGK8sT5h;#b^L2k0*q4s9Xz3kWfO(a(K}Rsh|D(?|z<^G9go;4B%+a zJwA=5#*OfA6hLY0m+`&(%BxGZk?;N6tl)DNuvc+Fv6yczDDPqRO+R~A;p#X1DboKo z=sT<8zHk}vhv!@nrhVUXrI5~oU#Rl;VQ(T#Fd%kd7ei>@p?s1MJss-CzxMjRR37vX zum`#lkQ@lTjnAcQDR4l=Wd=g|`$zNZYuG!J+R97CI%LN-k(}pE!jbRedRn24iV5W*TgaCV@nn+W;hl$5N}nWHSeUg|&zGxv(l5rJHbs4ygd#7@$J;Ag zQSkD>{e(>XQTL?28zF2&GVtG+^-Z9U2H|U#qu0u%;5HLoY{d9P0+A6-{0%NR^sj#Q zNcowiUle%MydCCZ9}cCan%ED!lXUO?ZU8-vfc1J`Zd)$Xgx0f&OMKqL7tA{Xqm={I zLt=k~Gy$cGF(o%K|?}A+ORT`7jF3*9@&C=Jb%Ll}zz1 z-}(~l&z=p%(lGiOCZ-wXotTU~`ZiGr%7*VkpX*hJyx@fwyn4dat*k{#LN9S3y$L(& zJ>KAf!sLAdJK?pa@Nt?8(DKyJK?VV^D|yB3Bge`*kF#?)tKT{PvcC1U7v8oI3tkeqC@V^ZYz9yv(tAa3=Ucd|OGo7yYOxHcgIS){@WTh;X7>ZkJ#p zexh9b@g zU2fYf15Uw-@_=ySVr_r)M0xdx9(-gI6H1|TT3E>3E`*x!^86EA6*L8^^Em8~>iWy< zX&P!)r<-jUpr`e6%Ok(q5pQ^Ogtfy%W^Um1 zk8&RqpOBubr65+rOL9abjWi@fzP(MX#|rHbc&CCvVZ|_cSptl-ft&x#lOyaO9ge~{ zCqK9CXwBM5p}Zio{!6AD|*|mCfV}y#?NG8NyEkWO> zzm9=06DHeFeo}t&lXOqWjS`HQESawZD&=E%u!-OOAyFx0cLRli>LJhLmBVAk{PF#E zMR<|eyk0Ncb)p)6Ckopi=|7p>ag!@VE%)6Bs(Joh)0=HVe+HzOdPzy$U{u&29^7xfZ zN)+<#(a4cjQLzY|2f$?IDslc}%vzYSf}1!f2oyAF&VW)w@*xgKaWr9pn`uL#nj z#lvY)*lml7>M+pzPnCsL-yQ*tdnncMnAbtBHyBkJ1eRYV>y~>>`e8;5}zm|)`nS3B^L z}rLvc4JdHqb84ND$2%uwvFmA}sWqw*U4T%*J*JDx%S zWD$jz16YrzIId7B@0Wwk-LMiUk%Qef`~c0VAAo3$fZrfD=o~QZSrX9MNwO>B2#T?MBn;ERdCec?T9wx#g>q71dn@Mi zC*_3{-*wK)oO}+ZiT9Q75nNDx67y{-n~}2zrz&K|D~4p2!r}DqvE^zx9PwG&Ge_$p@lv+U_ycd3h@{k-2d@MtBNo02gyU!TXn#Ha$U{IKoP>EJ><~KV z>yzWn>wZ3R;y6IHMVBwZr{(r;P%*CX_`JnC=7-{)?hRCldN0-s`i`~iB(&DV?XUiS zym@bp-O1iX#WxwPnkrGa!%{Ru+p?s%b#pr3hdvl9aRB!Ywm=9I7YTJxmD+KTiUA@& z7lG)&ntozDN$l#-nH<1s0_1TlNQ?LUiYOvsdXGYI?z?#z#>ni!OIoXsEYHHSdy-3q3mZijWD=vL9#>2b1&v>~$c?umAJ^ zUHxk=k`rRH)ofdjfZzC!C@@&Jgcs z_E+sd_IoRbr!w^g3)mhJJ%6=U>Gpx1`Hc=~NzWf&T2RMk1H!~uMj!jhu;!gx_66n? z``^H#^aZer;55zkXHS>YRGo-4g*`Z1O^>{j>!5W4q}; zihdMNP;(Al=`w~9wpe|tL`C32!JV^|Ne4xXI#FDy*fcCNlHSK*OV40tQrzlaE)n+$ z1N9oVU_fUC`1Lba4?Dp`gtB7((W{&C^WvDAuc$u%66h|@Ms&|W!Ic%nLQRN0gd;Lw zErH?FizV73_%dhjL2@Y2R_bg%*m<3*Gij$|Oa&)Yg=|e=62~H961Y5Z?yc8OFWA#< zt?}QM$GtdOE=jXrvp9@)I~Gq7B#y4oaY!Q}1N?A5-o_C0Yy{Ksh$DQa2mzt?Hh_nT z(x@SML;GPytEz_P5T5rsQ5?#L(Iy6CpP5WTu(fS@A}p2X_Wp~|2VQu>SHKdR=tobT z9_E_vBLm&Pf|`J=?jwF4*>Ab3c#GvmezVf-=hbksX&P~^0*uw&jP!xR6}?IxnO9$L z28@w{@@{5i`bt??oPEqr_Py4*{uiapEmGP%GnUFVnr!%kK1S%OKTw$7Po;R$=+iE& z6o|RaEv6yjHULnfUZjhd67C~vA5*HtLjQV;4udAUHzX_0`zD8xG=dB&-(J4F9DA1`gyU@!KRp?El z?Z~o?Kk-j-Qj)TB(%RcUT|=Yp9;$N1qb0TqjZU16<9ih2dz4jHiLKX)Mqh#meAz!G zNEf(_KDZz;Am(l*eaUf@ulqdyUV{^@xwDo*SV1Zg)K8Z+k4{w8;vz4(k@i zQwOif@64<$Q+T($?o$Kt#SC+0f(NWPaL66Vqo}L`nAu5>gT=81MFvT z6+SglGOjnzx7JkBy>gH=Ln7~&N#?I8LP^!QOc}p*1e6gFhzKA2uy-N-AHS2+wUni3 zGKDw`_e1T>;gnIP?uVS6EsJR-RN!Ye9i;1USxie>;eYZbM5p}Jq&z3RracrTLQg}Q zH+LazJl%~P)|Iy1?kajAZ@sfiULnuwEspLuUu*Aff|0h#9#249HTCl54_+#-dB8P^ ztl&7Z>XnzcZ$TL9`*r~Km!@)oY6h`A<#X*|T?G@g4upH`&3G zC@5VgZ&t61MHT$SEQfK+?Sx%XTUYsuT=pLYjp~Zg{^09@YThsNr@>uA+X;N(H9z$( zydl?uK2$2;ILrb2vtBT15cy8&AXGT9BBp*e_R1GDT=C5ZkhKD2YS=xSE$Sh_8$xtB z=a6(@op=`1cSd=W=p|hdzV%>hi#V%qt5*c&Qiz`_p?(#FSXlnizefG~^0G3v4}xZw z^wc(4pa^p)cD6L@FDn>EAk%vUI_SQ4-;_Mb<72t`f4XX-;rYf7j1+K~X~Hc=eMNP# zbRs-HQ(AXIL5;i3Bx=Z$Zo!`C@x3P?f6P~RfxP(YI{H!9xw2KcNNW9kiDkA}WSxOj z{UM*?9O%NENF)rE!-EKw=U{GkdvB}5=y{5+B9Ydn)tVLsEE{PMWtO02YqpL-rt3~c zBo>Fa^-JZ)UVh@Zqc1&f*#t+SSIIrV&-;~ppGf*Zh3C(cp2vbz@j|1+D0vW5 zL`|WrvAFZ?+huFh5{=JWdP$*F8ju7$&mk_+E4NGxSWTMY;e;3w zom_w6>x)ZzKQb>}%3FlUp((_dw{V{2NO0>KH*A#M_944@F9Q&q>6@M!GBttK7pcG? z3i#z$z=`9ft|EdUdO`?6Dcf?cjFM9{qz)ZRXw0(?FUKSId)6^O5J?QT$mfYsLvW}! z$G@qz+jl?D?=FH+r+CZ*UPf*CZ_x*O$va?1z~d{JxxsqkH8*jnHJwG<4dB`mJ1gxe&`jb)(#B zVma*jCc3eNQ=+;MPc28{ZipmG`HAwl1qdo!p3<_ttE0Jqp4~nWROo2nyK&q^9r--* z>kMF`m)~a}dB~hhwA%>Gu|T~G&qM%?K4_udZ$Lhmyb-dsgR%2QM!%#3iTnCV(9w4y zCi3W`u+C1Wb4zCebA-NY8W991D$T&)m3URbfBp8)| z4LvSs!|I|3UCF>B=dRoYww|HET0z!z z!TV7Ndct~d9vy5so8xow*%AsD;8^4RER<^NvlW0E-8d)SZ%3u8s79EO$+#I@I!9Z zw!U%P&n9!J@`f^84I@Mf+C^hYhzepm9>ocTC! z8UFC(5Fbg%P*tL!&`zY6{EaEt8vC9EOaweYXumx!&e#c~;JIm$C`=DxRX9u(*AzvN zzb2p*CNbfP%z$hTMHCscw3Zz&$r=!d7!D#3Zj-P1b-!x_nbwR=@o)fH{I{_0Eq6Kr z2;S@DsVza@8olpzRAd4?ZR`_95A(bCi9EgaMa$@GKez%pAto{VX7PgZR7*Vp{_=UvfC^Ts*8eU#&#*WB*>PJ zwwNt)Ai0=CM>urf$e;&x|9t1T^sy{%1}(+I<;Qtn#32F?L#kI3KC;%E5iZ$dfLsN; zJ`Q+6rO3JXLLJ3|UqNC<7|R1wY6g@@TMW5d3ygeer40J*;V-6ZizN4@(hjUgn4H*+ zLph=N7!6kDrg9&H0~<$EBH}6a)M^eb50GsFaZ+w;VTa!ze}hDQHEO(7cJlBrq~?^T zYI7gL3)7pG4U=Q7G3sXJYDhb^`Ied@H_A-$s74N#%XVXVO<`(@dC2pK&Sl>IVf>ZP zWXwTI2Nn)aoayqJJOwJ(LM*&Q2ofzjuQpci`a+q@v9rO@_{TxT;fQ-UEEt~(Hu)}n z66(Io9Ws@1OMts;vz}_G=15=o6_P<3#S5?V3Xs2F){M>gvpi;Z4-^-ql~z_qsPy+V zhCn-j3m$?_$}rUNhmzDCGAkjwbGc=F>d3Eq(0j55FxXEyAsckhQVnB1@Npn6guki5 zl{ipc^W>))s`Ma#OyI47KQ6K%OJhO`vjXWx9n@ZK;GWkmR7Xr*a;J8dxO)_qNgzHWiu0G0k%y_T)jY z&y3tzh+W(ns&D+i4nqBl!~aSF{I~mm`L|xm%Lf|skDPuf@%<}H1uC59JV}K1XyxF| zjOzHIKOru5WqH;!w(1VJHVKZ)4S%x|frK%INu9aVd9;jxT~=QedQJ@Voe@NizBY(6 zfs#tp678~;qrRK)ic-xAb$OTs_@jM_%DMxWr*sj7uzq?Hud>hma!A)vHmaZC*|Hv= zCVsdqT3H)uj1>~MR z5h|wX0XB-=SYd&~TM=WttjsOlsRC&DY?;nmF@teZV!30HZ*R2*2GhpL?VI6VlQ=5l zpt!r+($V7wWg1D3ex*qxiFdJQ-4(z~PD(!)?j^{&rrqwe%!1@3stM;yk34e88VUy? zb={G(+7{d1gwZq!hmh;vmeC(A5Y%Hm>}92Yov%t{Y7`aE3LkR7Ibi4QH`XmBiDNUD~s#d@JPnYn!wV>1r*M&elsDA?^voKVRl{%+fV#CLTk&d>J4T_oSE8!ka9LNU9 z2cse?!FR{5kKB2;XdP4eM_Mgx~6IBe!z_~Jpj)$ntnQ)5S-m3=I-Z_K|spe zs!apMrWRt9|9Cltb5RJ$Be*hMdR~woa5>6&G9S8uzJU_kJ666c%8W3Tx7TX9?I+dU z_X3e$xx-eXuwfQd1`P!~t;Y~+5*Jip=GcdG6khyWPsu;``#(8;bKXdBWR??citXf0 zsQ_tkXV!+l$YO#W2t{Y8=)#MXpm{#7f*9INl4J2B<2j2+>F2&NyGrh(mjcd(u+Wf# zugd5hUrboeqsY6V$%NMMvM9zN^k0*vV})~!<9jcZmwc0cugv@Y(~83+XBH<(6oS`& zs=h%50aU_9FCak>o$c}g{UylpCjay0)eZ}zK6iIV8%wThHw}F>?+UPZNP~3+&l{glomSUBFfCGJi-FZV5MG5uJKK zUO7}df5WfF`4mS!ep;a{zzRYLjY@IlVf4}AbX@WXcL^o?Bb~2}SODhv?7OZ})l8c9Y@>V=g#K2pz1hl~GP1#Ii^>qsJ7_3BH{gQZIQ6;$IYLDiZ8~Gqd0~$@ zfM=L)nm+_26`T%N%N@sVqoq}KYpX``X01C0 zW_{mK*%?xIl&^VTfJ^{A;8*KRLz1T8a~#AMF@p|Dm~d0VHSZrr%mg#!W9^mSdhh|* z56Kan<(KpY(Z4*tE0_j5B*uLIn%xP5ulEXxJTQrPbpFA-P_CSUmhy_ZmG_gz)A$}N zy)Stj%K2{Z!OZJP#BS*4bO`B4A}65lsL0A1E7#3yj(sb|=m(iF3Umo$e@Ed(Ceklw zv1yjKV|E1Z9g6M1*XW8L6*zr;)N};q(ArViLTpMd3ql#3phu5;6o3S3H-h&>c5VvF zWh$Ay79v3Iy(&p#L_N=y&EJHU+d zwJ{s_i4uDc;d}3d7z1>Stf?Z==@er#9a37JH(Q`>YQ|LMtE%}xxHq*V(z2r-ZM|hL zt=yZnN(b!EmHSHgC%Kzz;(GD-yUdd`M$X5iWk@qX$g$|_{aZ^PRYaF5NSc{iT=W9{ zpj>#&Q~y}DpCo6o8)Xa^9xcyb@HGV2R1sfUh(nV}K7^xU&^9JJyaU=}Km7SuO4C4% zcrD8FR?OQ8%1{FAXH!NT>P8Bh@|_ck7*8q=GiKGDs8n z=Ql?md#OP)rL}+dei?l}`Y`>6wqI{9%nfAM15p78Hijjz^KYp0@b#ZASEfR^Z>N!h z37B0BRJSv@8=m*(-jMtQCtOC)Iiz8b`@px6wea2F;mLW1_D;p(RwyLTVb0)<PU*ZWTLXo%r_@W}-cnw#q%PSeAN+{C@PAe}#tz3(|W&psGMsc`EDAt;Q12?A`K( zn;cVx$to%ne*|!~pIc{+={SzgAQRXvR6N^Zr%ch5S_s~1BlsyPlG%HG}vAQL~G zsEMdA+di6^nEsa@Y-RZzli@%2g++RS_;Uacj#SPYei=X*5)FZ^SzUp%ZQXOI)3V`I zc@KLM30!i}9cD4JvT|S*e1i>;8yWT)FezSJ0(ZXw zerH`NFfBQwDoMUJwg<7Tlj|;3WkOj4dX{!ILEQ>^dZ)VjzD)97=5i>kNPyocjo4#V zWy{-v$Q}}aGRrQ!c0g2WchjR&kfF<2wI!fD4Q@$}R>*^$o%T0GMk_?k>vTVs?W-2q zKGvA$Ngc;}eQ#pH#Ts~l^(f`{DSMDqs8PyhI5VI<-pz-oBkq*CX9%;2)dP6pB_Nzf z_rbt~h<80dQi*ya>fq(Nq>&$GeD80QjFtHAx3G_%(TZU7X6kC1x|#$z_T%+hbLp#4 z50YUZ4(fAhdZbfm_;CE_$@2vOz#QXAd3h`1YCN&hj9w1Kfz=Dp-zHv$Ktuvv1YhKv zJdQK6XK%XZ_0J}re65P3yt(UJO;8+B2F6iP%~Z}De?`9-+)5v|50lbFsYmh~?m!GH1EjP?9U^gOu}tdsCT#1bzw0irV#$U(R0CqGk}v&6ChC zcMckUon>V#hmbW|fY^`ND%2WzkT)<$?_dJ>0yyS_aJ|TY#N;o~-*jcIXUV4n=&OE$ zHNyX=aRqYfC2*phoh)v_H~}Z~`W0gQJ5S!6VuyV?;KgA8=q?f)gaC!!ZS35&vK;|J zybyA{xvtoUgC<(=Q~57`{0TZIA6LFx?BG6%?j#=`;q$DZTed?=1wJJ8?A4elO{)VX zAU__z>zPrng=}p>bgLZq9W|;XeE&^DM>z+Iw+4sD#0m!8o8UCu299Npz1-=vGCWjq z)F$tGmGRp{w@~1}YQ;D^d_@5ueA`4RQ+geSj~dZ&)Zj@ccC6IJOrI107(;+OA@zZt(PXYZ5o`<`Ct zD3<;AT`254E2NDl*hpaOqPtTm3pzQ;!v_UspyH)s=@c1^_OfbjUDDnIBlqanH% z%8z{eRa_JQEmj;~k)NE~T||qv!Z1;i1PrZzcmKumfF*>DB|hf5r#)1xt+HXovrowI z0RO~WlLHgBAT(XBQ4eaUaCRO|csa+yd798yX_MeVJn($^Yg-#=aq%vEQR;st^b4L3 zMx*G3$VdEJ8lpYx!JgqIzwN=I(_Vo}JNj{|d}2GW*m_oBp$Q%Z)M@GpYIemJ1NfE) z)m_-yqkyN=ZH<1OR6Tl>(Qz|V(a)8qa@Pu-s6BfKVA8*-0Uos}X>AGo=GxgMEPq8! zbU5?#Tg@tzvwwnI?Vr5%v7zBEfT&ZyA#FEv9T2sjfM78p zTi*$51(4ouM2zd#n_~N#qEA#@MNg^Ah~>JSZ=YUBl9)v(#E79nxG@$t*7%FjyB=A; zw@xSe<@vs66XKh2an+fxtP_axMX3d!AoM(R^8YH=3io^|8!B;Xqi$OwDmCGTQ^&*R zh_0oALIrs|tDI^IPR2}a0)9ORDzK-c4RG#AVoFo97HSUK(=cwR^ltR$umnuuL z;j453N=8>RfakSl=z`t@bFrBoOi#-%+ zX7s<7ar*D^+4r(qo}1Yf>dO{ara~nxWB*#TbRNNkVHqLhRif4Vl&bai zi6Vz%yG(PO?Dc&O3>q59S_@*9ylVApbM>}RG3d=<1AkpPg7@%X(#pa=T)=9lF}sI8 z0}f`MB>r>W|8(nn)MH&0^1(O1Qs#UO_7y0}_yp71zlO%+&;sD#pU;KjRpc%e(v@lGSEgBZ(hf9^P{pK-X|9@{>-<& ze!Db24pdsAOdtsM-QW9hZlD9Kt-1ja&u^lLVj8=fj3KMA7kiF$%$M<(o{aImr=i%- z$9|@qY(xPk{5WAck(h2GCujsVa1H_R3CS?d^r$MiOxp|JQ{E{%C;8(uKshOtEJ7Q%eMc2RL2Fm7hvUd7;BjU- zx+ivFrab2hx;$+dOWQ}*8U!;ZINKz_s^X(>A{IJJZ&NcAQRb(5&y|^#*`A*i!~<}+ z&B)x&mV?+*izRv>s4ylTiSwMf^l)E&tvuqXA?|k)_8>MferkwVH80sSX*1B}PFz$q zwCc1NX8*a;wl}lS#7qghk6MIq^`_R#m1LP@ZDO3HJ8>sc4Ft;3L%;BFdEPUvTuj#& zK7Y{venaf_;GVJ^VT@6cZy_9j<52Dc6m=wjW>G2rwm>9%5#W~DCww_%K;DOJ{-atM z;6c26s-Q_NzVb+g*X!a!Hq*wz5;t*A{eak}QDZD0Fp4o;j9HTW^ZbX`|5s4pnMK!m0h1$o+fRJ#TL2%8Yl;ipGpeLeIm&lS1hin0|P)=6`0VwmRxAyo>;T-}x^dR}?(BNe5+=c}H*MJSm}1{_i223@#WZZ{92 zYTq}4Nf{Y8j6F6vlM^|pj(|4K@EYFqxj2=?lV>!I{*5JtgnS#sC1VbJ)bh4| zxvZ^R^_J2C;x1IMl_&oyMYvunB18gH_>MQqey$oLCE`Ia(q&E74SC6P{ z*g>_S0;zs~RZReUy`IO*wyVI8&;3S?zr(TY!UW5yGX0eO7g!Z2T+j0^vt)jo}B ztR*IX^*}Ax@T-on!)rY2s+Y}(hzju*RJ8t{j(%#Rc=Ag8x5%If>o}Quvu#R!R-hOU`=&3cH;K&7+Cz{D89k1%ix28)6Wd{PQ%(_e=5V) zs3cw=UZ*ovGb?J_fgK<;q^&Yi7YcDU0d?TfdnNqGS@AGnZClVUl$ZSHYPp)*vIl5S z7f)q+a$N)&0_Wtn6!(KJGdz5DU8M;9Jil^GkA+Pj$RT|Mdf?5$D@U3wVCX)&5Pgyl zLF%14MhgU`ncLq(IN;45P%#$qNu(K2fAE8VS&m0m+VpTcsmgai$;OTEjef_uVz=<+Z~0R)P2A=WqmR4=1yGr|-rfpx*elEHeps0x2xeDRoO3d*gS^cIy-Ny+ zgxtI14@RF@-loB7ws#i%_XB@ORwSxfl*cRMsNby#QE)AG5Q$IVuHnWWC|K>q{%Hxr zoo-7HdOl=v0M+-ZAL5WyR%#?6zak80b1+U=uKhVy`DYBrBfSsm8vlI_@SOi9P8xwc zHY=V6;bqUFs8g?mK2of#wscAPP~y7jh&b^waWzLlqj5N-R+}y6C%&&d?myR(t6nLr zxrP665(|+PK2<|0w#9P=;M(m&v~6qxyLA_F(f*^bbQn}pHT&?jG7+%ddcdm_U&#Y_ zx!MD$>SlVL+l=p)k{S})h$@Ui`}}_nws61K96-0 z%J9EL+d7fCR1u^=&3e=%65Hjb|56yr4E7J1}u<8yv(4sVYrFu9UhSKMl!(7cb_` zruGUQXDMC@(^zc=&~Pb&50sZ4_EO-~@Az*5f&ms4yGcl`T8RyAS~<)~|5J#vz1@(b zJlQYLdJ^$%0LAPEUB?UM`MkMGM*Mg@9QOGJfp*V;0FbOs7KL`*nA@l)T^Gjh)m+&Z zo&p^*K|dm|^=fRfWqDR@?zVbAGFL=W;M;QWe<-2(!ZNgeMA=S>xZ>G=X!x7B0(lfa z5sp>ai`+GVkOUH_9e8`NW7}dAL76;Oa_dn*36i+y5N_;wjvZ!vH3Y<|IJRx8a)sr+4iGWA>N6NQ|1KjtM zFaL@^9hUP8a@?w>Zq-CK(2f%3FA zqNKxrt|drio-s}(r6Mwm)s>4`HGBKm_)r_C4Q-ElLEwaV78TP=!PaTAa4uDI8#Tpx z50{G>C!(JLHOss7kjV!LJUiM>HkOpBmer$_gwL z?p}X=wj?09#RD(2z<;0~5nSKd$xf!|4(k10Pl9{6PMDsfQ)p4P!iMtX++QwS4CPOw zHq^Q07EKy|lO~QMT#(p>MeD9-xOVLj=x4%M0G_JDDk_5(ZgAH5j{JBz#P0BDNGL|V zLYrb!>N?P`U@N$W_xEaM5?(5=zThz;Nnz!l;@#OQ*O@c5aXj3^G+re>=9?odB^m`( z2*>aUe4#f={}gOA!%;dmj&OQ4G0K^BXOMQL(rq@A* zB(-_)Bv0y{MXZZ8x5!jv=H37cI;$m@ZQSg+OLdUVRc@mk^ z;U*1|P8xa~B1Wiy7LQCu65}~11_*Dc3ms(`n;zJUWSf3uRkET}d#_=!o|LVu52>^E z?nKBcZ&(DJ<<+Wh9Q6P;VF}@-vznb*x^RgvR9!1r-NJNN-SDR#Sao>v5iU1pgTUWl zzP!UY8h=kx4Sf5j$uzxlTJGMpD9?gt_1~P$v6`>ZKR9oJuh1YqA$iWzTO1m+e`rn& zY!LeR+_su0xFHZ*j_IAzZ(zFU#^imyRG##+A3KTXsYi~;K16Kov;zMQt_23ICeyx$ zX2_v~xrpNEM=xE<%0W3iaC9(R9_b9iC2!2H7jcw(Yg2KwDVX`_)i^o+Z6J;B4cHg+ zED+{I?t$UumOH+!l$ZR^c6mqO)rl!5jptOEn(?%*l(1R_*cKrV-Kf1*oW(U=|SR`ZTkvf0$1%^rmLwVmeIkUZBlf!0V*^}+q|K|y#}tO+=RKcv}2q4bQUbWR4zQ^1p}wV6MIiDfJTooiT`u-PCmCC zOy7oGibR}jM{=Jyt!hd6&*+o-?>LX9S60emZ)!QdDo#V_p@aff91fxrUv+M3hV<&% zbAHg(j52UwDG#xg`8`>-jRIek=0FenS{?7;Mnq9(2=@5F2Y{~zkrG~|RD;}|=Rplj zJ?$c*`JE?4eF_`Y1F`g40P;=jvD%kc>WXBLhk;?UzD^;S7Yt`son?{+qALUF`H>$j zf8$Zg;T|@Cc5#>Rj2`C`<-rI2caK1p{{|Kup>?`U?G0#u;=6Z%QZhsS^2%;`(tlqm zCvg!n6oCS-jOCT-%+tcznN9>;ZK>lGg8$|C9pA5mbndklg`)R0SK!+&td)qzgHL#e z0xRK4bKWUb`2;{{=n2eu>i$QIc zw_vZ(>UtR2%gz+qPJn?d^wRi(YhA#g(TO5Zo%!c@Dx8*Yly#^IUCv1{VO z#C#&KkHl?5%(PW^RO}EHF0qShM_*f$gy$yqaxM3-bAM-N;m zf9b#R+atul4C}1&viU11-{SUiPN{h}R zcZaFR4)L?Bm8U%ZL+}|M|1HHl194Yv(D;mcL!cj?I}t*S>ACRs_}lM(?w`+qu%3C! zuQ~=Mzfx{=Y1L0mV}TbR_K*qxity^3g|xO!6Kzgz4|uv<^?-riJ@^;a!&v@*a^{^s zJUxIVP+%)-7Dk@)%_fH=VGCMARG2E|*<|sw00Q*1D$jV`&oE1JzUNf~77gjbxqy{2 zQO7nUF?DOgk_4pciG^2;p}Z?rxKDjA$~7pg1OzIUTMvGj<21Xc$>bm_?hEi_eLOn9 zguh@Hnl#VcolGzi&)-fm-s&`U4Ro18|GLYh1bn92Q;SW-0dT(4#``*kL`$jN0KyHA z4sqr-tyb5M24sn{UkxFx-9bbaaO=+#tkBS|=VxZZxRA$%3H2uVG{3z3Y*}1b?pY=l z%5kJVm&+|*VqI)Ck?T{ML2BIFsuE)oqmRPjDY-=S=J5)nfD%o~so892ilPCVg?ne% zm1?<~EP6&wy}E!?K1A})e$9V==h0&c?fIVLd#{$~{gizB*X6rkWFU+J|4zuR6Z4O8 z>cQsosf1@BVrJ2t(J>~>*3Qsxt#nwQRjNJXR=`F8uXs<2#ak%$EQFu0mpLly6kU0K zWdb&|7@QmMQFtH+G>xnqf{fn;9gk59lsXGxP(^vRedHjA) z6txRCVg}~G>&^OCJv6_17x-zhMism{MF0867at>l?-y!yeZvoqiHpPnd5M{+EmU?R zV!^?NhSg3VG|1fkw%P8QN{HiGP-_SxgaNJDGdqGs)w6_;3|K(Fr z9mb*(iRko60vFI%!s@cteip$nj?Q(%zz+?_5;k{~mM4j*3DF5<5t@TT#5ROGSSBw~ zS8>fL>faFpQKi0si1`-YAO{#Ho*zaSrH#Q+#tnepg3?0~qV@B4;)kng()^;JS%AeT zTgt}X>_qJ|&m<{^%ifRW>mjPZpKZ`I4|Gh8-u$tJt1c zD9ODqM#0@MCn*a_x*>sYh6MZLhbwDkF=#k@L~r*4X4g5{!$ulzZ^Oz1W-W847cZ3u zJo53q3(v#D-5AT%#^cGNR(?5t-H$L=_TzEr02t;f+}o{HRJ*F4FXcQT)&$$Ems~&% zEcDTvzW;Q`Ean4-22N>ZVW!e2WgDfMy9+QtUTcPgAOWTPl+r-I5auW(JOwiMl&bH@ z;Z~9Z3%RI5$h;zK-g=z1968eEBu6|QM8q3-Z#nyaM(_Cwb%Wz;WwKU8tTb1(94&wa zzcTH|Kd$Xv(l%gFO0&N;{_wkB_-=rarmc>IN#472H+(6H#?-0@E0M2Sa&@p9$Gvc# z>3R#vQK7m1jo?{(M7XKM&y?uY;sUL1a-s=1tJch;mS0BV$B!i6=aEbPcFl_D6!#q_ zR%Dmw@wY%@?-BmI)1>%sPnvDyS16grG5y>V!FeAp1JO}VfaqI{i&vX=z31hp0wpyD z@pUp@%N{<4Cha$W=dS`x*$I^GwGxRG(h@CzVh(0rkAw=??PQq$v;6WI75M*Xd(-B) zuPe>-Tg-G!%*5QHm99)lq_qrElaj6M}VpCHNWTn^Eu~2F1uXaFKyWqGV`uQkGut)X1PdW?^ zPK_{OFcS5 zo$Xz3YmLJx*?c|WaKYnW=D5r;_Z}Buw1o0^!Y>23M0DKc<79n^Ob^(pA|g%TVrgL+BEFQRA`Z+Xj=94EAN?Yc@|J%T9NI|!PjH{qFRJZSGyweI9gcT46O zRx)aZ=kN#R%`+GLl76}8RSnLB9yB`Y0)jQuMpa<*uYGN2wM1cwYh}?-KX<-7^NcT3 zJ`cxYN*c}>sgrCikBfK$%T#arIS&JeK;@nB1A)XOK$mb6CnHsel5NEa;web!ZO}O_CF{ zSarQzh1wR%0*D>aSnqI-3N~8{=lzZ>6hiDmDOxpmA1HuQklH6S{CLfnJ#|D znTwao6Mi_@hESR|%YJAN@EC^Tj$iM*-zm$hHyHR?&zvo1FZnlsQ1Cr|>Fz^_3bB|! zks!3S9V=6h8AT{gIOQ$_+mjDWf&?+vW<2`=G!a?zjxY``gfNZ;g&6IIgF#t(An*NZ zZLT!LkOY&Y>d#L;azBpm-Af=Ys?~bLT2@hG-6}!?oo(qS4^Xl;!`pg$h{LE&&UW@j zD@%G6@zjEu1}f2acw?{885PjQPvC`epaeq{djQs{15Z-ftMCjR5?`7sSb?34gJ!?n zYS+GbO|PF{R0FVa%>8zL25p-aE=(9F_>mXdRt zdc=#feeb^t2QG`JsC%lbB-{3QIp>F=@|MA@fBNAM%ekXx`8dC*T+Bi1-z9%g7X5Cd^i}_j zg~ z{u61UN1Y4BTR_AOTqOPWX!L(f9L$(AUyMKa&>#c`~Rg4$Ee;XOk4L3t#l>;h?JO<+pO;$M?^czqPQb zU=m6XwR)yF9V2G8b+hbx!j{Y2wEtc&+X46&??%ipZrf=Oqhj51MevU#O&FAWp2w;x zSHmjSi*9K&>nEOgqP*(M$xS%k**yhdLNwz(T5q*w;NJ z$3muhwJ!{BNl+(<1w?9ymYY<(!KC$#2#DRIzjCnW{-3eh74-34lF=dg$mj`@4wB%` zp7&LXNW~#KeY#HYF~{Cs>W$3m&M}0{8i^d3Yk)4d+f|5@?5+PWX{^Y8I4DND-y*H` zG)wq_DR5(EOK+j?N6XNj+S`a@SFY5dp$C5yM{1j?FtK<))|)wBc)qNLR@}Qr-J8;z zn(uo$P_9UBpkv-LoUFKMfd1U^C1@@L2}yx-d7xoKFm!@`hPbmG25csaw~CgX!rYL z&5lYZAjhQ?Apc6QMJMKu?84u$3;pqs%t{VH&z2@=!K0%Ob0Vy_m;|{R<=^!qc`&!B zZQ`F_gU~g*P&x8(x1}C5!zq_>LKO_^$)-!HR6|Xwm^A)5G%62Hd zBdg1=okODVKM5B8=X+V7fXc7b=p1| zDme!8DEJbrw=IJ^2u>W$Sw%y+gB=wWGrilmg1$@Tw5R=Q0jgqmh&Hw3oIgc5FSv}a z2QiQwJPMF4?{R*6;^*!C*ZISRaZ7<$w@@@&CHJ;@-A*{%g^ zewlI0;f6v}h$dp>O=)K0zsC{cf4aQkv7FB1dWONHJVwaELxI%1=)?^R^G2w#a&iyX zPz2pE>NZf34;#?q0Px_M$K3Fm3qFvQ;p_MD!BHlnEi8<#T-T@f4pvM{ljkn$TqdsJ zMbkO`FosLdjiA}+%MBQE2B3Hd&c)_1ToKWV$5>!WZFE0I{x~vUPk!yiil}@5i6NWv zgT^EA5MG9p(8<73HC*t-1l&XwB0k<~2zcKf|E^OVD`;~3Yw3o`h1r!5dH!k~l{nzV zcNqtVTu9pYp6`veZU>tVi*VZ(}&3Wa}LyW-%{DvOwosBW+X!G&mKQ^J-jvk$0?D zwqhmBc`8{YO}Z9diStk7;0a9EGFeveBqv>{oxA{d>}PJDEY4WDRlXrGBd~z;6u0xlhU^}+VsQF53t6g5$q!v5dCqxI=e#D_lSn$7^?ipu|UWi znCbflf>gI*Y4KPnh~}kr_+w~aYC%b_TU=DXR?t|!s?UAdJig##nWRmi&kR)Vp?vaR z`h#qcp?kvzhTlIeN*VNnC(CekFH|&nvigI851JbzR86ul(JlNqBA*0r`~<|sJh;z$ zqXtMvBAoj^#B)kOHLqq*e}HQxcCAj2^@#2$j*!=KKvk~W6!hRlYxcvzH3}K!VpD?6 zQIk4~%)xKo@ebt81kG#e00gf|+A}4^+LC&ygsTJL4&KXyCVGho^5I^lQp2L>^Fnzn zB4uh{V4c|X+rwtdr~)Ix-Y;87E9h!z6f0)#Fl0Rv3kUdKqz9ry<&ma5vZI(6@*%P5 zJWCYGIdytL@JA+gh=%!cbC?B&tBQKKejNKp$M)9sMuefT;&$Y8wA544;?W;5=l+4> zYM#GExrY#ONfEGu`qTH%%edIKw+ea&)whM1hem9F!ApS>9ZZue;F* zn4y1=RucYm@#CdDe@JwH9Qi23pBBptgSL578fAB-26n^Afa7{$X*K-TLp*~XWhLSQ zn1GuM;Fz0qcs-SG08j8VdQ=y-IQn!bmzllwI3S+>ZuuK;j9&W0`}l>4Yr2tgyFpC4 z+votC`II_zqp!Alge*@Wwkp<*a ze&83$`Z;sPm-Vid7u`mPZ9%jq2j}vTJZbPBw}X^Vl{f*FYj##bM2?#Q@JW&sy}1=FBRgLF&?G*k~f%xyqVr&j$Ch(wfm{$Qf@vF@YRU?dqD+0Ngh0* z|M2>&9IyxMLV3fh9l=!+_wyF@_fUK5zkC3}x<$18Q{T(!=O$A=d(VGU#4e)Az}h#q zuJ3tFR2{YS=<40WtuRp`-uOw0V6hT;o=d3O6W=Qryc&eq)LNO6E%_t}Fgc>1kh% z-}6$@QS&GynHJb#v;5#%g>l>Cz6=~Y9G(>j8DeIchYpK#@nlJvSBR!NSstA3cSyM}U)g zfZ3sGTrA)BTUHY+h2y#b??U_bc{Pg`(1 zS?&dTv0vi|p*O#P$O{#0z1j($X>#6~ThBB?3GBk&VQh#?PnM?}Rj--nFFX-73{&&S z$Z+*z0RikD$I_!3X5=*AD+}&uMZ5YA(GDjg)obNUU=MI?Lz&e`aFQFtJ)jJ{VIT_T z>LH=}dfxNTmYL`L3XrM?2NBVHF&pv-hxU6OVYd2*WSsdUKq4ZN4gD29q*Ei-6LEgh za3gNH{uiFVnF*tk+cRBeF8l8}%o+c^%8B82)+~|6Q#UcY)*JQp#$8VA<`S&$W0Kv{ zor*6*mEkfs`9f z$w}Zo-V`}MHEGI|Hogzc@PhXQG%};03tVK4GJimld@-O>1d?AMqY&VQyC!>{@MNU_ z%pykR<#F5f`3eagQgb}7T`q7FAc5LDP#r3Wu+Uq1$?qGk!Tdhj^x2`pt7GCqK&^<< z*BXSr5=;~^s$cjcJ`Ik}$RAWTOc6ZSzYjR-iJMwYk|wowT+uAqPFcB_&K`7<1Y+wi z0T(YTjT`0&gf@@&po*I<+}85J2tw!~CI9d0;*2+?%;O5!8u~>Pd0F+f=!ewaGK`_wpF zZtR=6vHj4QHRud0jL>UtgH#|y9H#eh+m7Zr#5Q%8O7Gx0sPo;U5W;RJ*tPZe`!MN^ zI9E9WV&Ac-qMTQ+vIwu#y3sin@M9pv++>-5)(D&0-3uf`RdzqOd1o{{#2->AR6Rdu z-hA!NQ(2R%iVT7x^U{!qV>g(WuwJF;1|=2^t9DJ9g-)z-z&-jYJe!{9=@OoExOu+$ zsh95vNgNyY0{CV52uSxf`yu`86mM9ck@m;ek|0HXFwqT`Tyh)?v&%;qpIGS zYvFLlkp}qmVJvZ(^Hk{(Wy&wzo@v;2+y>zHmLAgL2c37^^M|SpqISAB6WB;2EDt&H zeASjxzl-QtQ*_+Wu%v`1V8hd!dw|sLQ+Cxt;-1P#m?l0ZuK+@<+$jqnC> zKqzW4H{;jokWE5sWM*|&#@vIoiF7#75j(L~DS$I9ImXLr-#~DR`<(r}iq1(Q?SEfQy0l3W3llkQ@mzTVf z3K{yO^0VErc~Kxu!CwO_dbll+Q>g$vLK*qULoclUaR{6Z1h>a$ zfHu9Cs0!SoOXa!#qGhKxKSvh=b}tak_*LC>ekL*j%+Zh2ZbIt2nd``5p#&o;)J^p>3g>v6{j(T*xGR94g^I7X6lxS^s(bPbs7nS=AFG}JI|7^?OWx_E0y9nb_PhgA9^Sl83tpr6D?K- zL0-y?24gHq4XGc0iMyEkOXjXNWq{yVA^%~N_>+?tDX^-t~rY(nv0P8`Lp)OJ# zEtbwICwKU(;3>lC>^9EPI(h38)1WeI5gL_73{c0T(3PiCD5a544F`<%MI_9@9t;K? zxAe`ku|QR;mARmoe+?t)3AaPCdv$ogzjATvv_3wPXWrknG@O1aY**lZBh~0inQ2_a z8pu%`m9iaB(qL}%%@v(?9C|f=qq60fVZ3T0s>$iCMqNL2QBT0xOC^X(*{!dayOuHQrO8)(*JvZhS#Fl7{H6+?h*NwenrKg0Z@JNnT zKU?xROgUwWJ$^3b0sIK0&$yR^RW@G?GnQwIJy9Il(_EA0mAK^*k)yM6`_#PRe7?$u zWR)kHTD=ZiN+j@<$PJOJw;B+;$bstu#6x07ZF|r3|he?jPW9xFtTF=H)e~hhuUfsKR-Am14#y?lA`e7Kp4C4^}x6P_thIkFrAOZvUV=j0w*v^4WBx?gQxPw#< zg(rI=S zfF$*hNp1kW%3U)>5iL-Cdq)BPCGl?Bm&-6^8r&O#Zu>Ek7^@&@FaUiqzFn4bSak3# z;%+$ugX!?&{&GjJJP z&y%VqmzO>(pApQjsoBR?*ji(^q@qiZ`StbfW5R9CVvBTCx8)*|$L&@Y0mecI{|Z2B zj4kZg&07Id+}jUZls6AebYr}g4MLgW(0g-rufjNquNwAFs8MIYz zX?d%s(H*J|<^~fZ=;d&A_iU%QfeVDxt^z*N?>%z~VvHzM&?{G8beoZPMBZhy_~AsT z<8VBrd*o9@e;7zksZ=E7LQpton4)GWB6GG zxNx?_dfw;s+YOf?rzYsU*a|w~6U_tpUl_jHB++{at1p!|J)-P&Py?MuC|H=$N$`-Y zgmdxv5^h>jQe%2LhYO)vPhpka7^h(U-A5!g637laaZ+?i5~G9gQlvHm@D9WBHdu_z zqELifn8L6+gpK*qBT@<8Q`4qu)y6BKFe`iQzyH5AnD?*W?#0z3lm&@EwQfCYnHQ9s zXco-ctv!pOb%augI^}Nl7yT3X?(YSI+SFBwdH7WtTBQsym;uAX6(^A4B>Tl9PV55g zn$Ml*%B+{NR7YP_Pn6}Jhw7hjH2sy>4ujjRpp=f>eXY zpn=mFdJ=t4ikFuXU(s*ozDBU`0ESE7n(O*Ml^{bP&e3#sJ4N(KB zE9ho94g+eex%t9N&gWv5V~^=P3cGuTZ4_G1^b&pvfOx*Xs=|00>3~Q4TzU13pZ!#! zoQr?JzJJ0$D7YVy8M$t&Gci=dBNdIPH3Z`pxmsse|EqIFkKx z6v-u5SLO(GPHaC5W`P`}f;W41pTgQ6e`!k%24}z#wa5zd##L}$R+757D4FYdYIU}!oVaGTr50T~s}_0rScM?kcXaJL0z z3q8c6NM2$UT<)fd%CJvb5(mxt?^CH*-y4Z_v<4pWW_d*d|+$)srx#5wJ z2;*=R^dmOWfTgX{fga46A!piN0>nv)RZ)S5Cm$snJj$UC2urxTB@RSEtA3VbQvDf< z`*_SN8AAX>m0ks;$Svo!sH>?Y2$kSr)OiJAL{?x~ zU~Za(h|c^-mT9?v-9`+laywuitP9NK5(@&*6)4=JA_FsJV8{263Au=a8JO^VK_z3B z_cyc_D7LCD>YgA|cf;bve}yMNKN@u;5C&jYBAv1b{Zj*GDf|g8dI1+B&cQ zzhi+D9cwg)8)c)R?_({A)Rq(x-#mD!G|qa`E4$U=MSkuTa8Yp*isHmQ>Lw3GBP9j~ z+(aSw?G=OBa6DbSSl;viR>z&ph`oR^y+!dnGy?uLeyjUu%!o9ax?KJ#CKR`u3T3@F z{!Nx7vF7DQp%oxF*)(h_i>+BO@PLiH1{5X7#Kq-FX|fE5PNn}^aKGCgu2!%_JX~FS8zC(Q9FSE0W`}lU$vorlu^bU#&em%eTc|c(tGfMLI^q@(z$3xBA;z-`i zCMEN~<@~jB?75+)bkBd2#ueE2F;t&tJw#ApbpRj_k%~#@yk96$re&2S;m3iw3&MIkYf*u|txy{!BUNo3E6O z{VlO3cpQG574*PqgdYV$vUDi%_~x__isBgLtKU~68&c)Mfg={)8+&_^JKhr)j$6>h zP#FtP{0mehTq4EEV>VS*mORLKrm1n|UsjM5{skQ$l_wR)4NlQo#)-4s*As?`BN6P) z^eQ1nFE6|&W#1QW!?h6`WKu=6*7?acu9hoHSHnLD7KZ%j-Jni?sWrP0;;(yfCcYhp z&kGeohfmNYv*k3K3}1Ni_L5c#4dWLO_GXbb+?R=&tj<_&YEB-yFRQf8NR*wO3HrE? zYayfHDBIoR+*1JFBQgSV-PCQvNLe4@ze<6pEihf3AM87CMPaBqxLpi;Z>C)H&s`{W zUz`a!B-2atWw6{^j3I}XpRf~akZm<5O>WD@|@H(aeH%gQ%mnE@4@;mFrcWjJQ4VgRKw`J%jJ!el~aYopL za#!KuXT7X>0JyzA(KlgLoV@M!$&-tN$-kkTtj5y;^CX#aN32-pLboBLTY+N%fPv0) z&CeN92Q#Y7JbM4+XN30hm6Cp6yFVAtmhXG~I8~21Jm|jX_eUjSm*jRY=y`s?V|IywAs3UT|-=duzH6|ToYZyMpBmpz;dJHb>I_Jt!-}z|juBa!Oh&ErO0IELO5aNlN$o=F4;-%t+F_6wX@79bdGKwV90q zy)io82sKqY#c0#xr|DfNAlG**@q^@hXvI8MKnnU&zgpXc~ms9H6-n%kQ8g zQp1mg zHz0X4U0c5c0AkPVR3}MZ;fp56!KPqP;J4Nx%#=Up8{J>2H z;ZQ?MvB(u+ZIiQBXaMdm|GubH+qra()oh|D3V>VhTk7BC^5Q}S0`ZhomsE2c_sXpI zoKEVlB1l^eMxwD>;JE@>Kaj5FK9y47+<2pxxj`>(;qn{Zx4@uCK&Jzw<}Zy2LbKu|2nY zvjYz1>xXbr<|mjB!nLJ5Octdd1us-Vz8O=iib!QOY*+mBcj&lelIk#ENZa6MehAeiQvL88_ zv>d&zc&188rl`ArLK5Q#`ACjnUb-HoIFP)Pk_KO=r}hzk{CDz^N8~kZL>-+uxQ+$p z#i8M*+id!S?~#gz)Qw#N@`q*WK>%kJJD+qJT=}PpwouW=X)1Rq{Xbh*b+e`@&3*Glg!q4xW z4pwla6Yj*twC})+9Z@qM`n8~Sb@!>V_5GN`|Hk%w^e^KF9x{Z&rzK*w_hfE( zURZSNVmiuq&SY66nLEQ!!l>?0X~+H4Wu~Q}1?YIc+y=@7RAQcZ5g89P5Nc=%HDV?y zb&2iw&1cWQ`KCAwFM3Fq$~89-{WVLK$Yb?giy`c@<2CxyowruTIG*8Z!M!_dh?xfT zD``0Qu@quGy$eDV`W{x$xDB_me)rOSxj;n^azt)mWguPL${&_Uv#uZ$@e8`yWNt1! z=MVF9QzhI#>kR@8-0hZUW|Ec{kWLiyV;=a?yiutK4$~Nn>I) z6W)*YDqjM?Z}R?-cA@{V@XP+XC8H$6C`s!0i2nh_&utvvKVKdXm4c1`6$G=n2Bs8W z+TOT(1fjFH;qF$s;YNvpf{0^QIgop=x0+&ATT|z%dtJjlQ$0QwJ(`yezmqE0&jAC$D5&JN=Q zD7WFh){we+SkZ5~d9_4T1mhV=G+V_qJz-Ck?|5wRosSOIw+=(rvT+&(OtJ5!@U(ZH zF3e!xJpj-STxx1$W@FOofOd?^Bt{k?GPfO&A}0?*uO{EP?QD%@x1H^P66BGri8_M) z#wiN$Qwz!ii(Y|OAXwy-k}rvxFkDf`;;99;mU|ib&H1JS_%>Peesks3T=N)RDKYvl z5x$=HMqU|}`K$gQlz#Y~yJd(O=e8;82Bg$Q!urF1yz|DX5dWj@Cp;%y1JZ%7x z!l39+PFBP$y${g-RE<016&;JxyD0@p;=~oz0S7}}Is|5P$3h2Fn}Zquj!FzEh{Gk374=pJ<$)LLpvo?lf!s&E7r6EmqmUM>qEDZ!{B@o`kS_nZ+Ezh?R; zyA8!;cq(5c21q~@IAnGl37DYT%Gefswm@qT)<=7}%35lGkPm&1boYQ$M)Fk$hd105 zhF>rA6dIn2zs||wbuSfAeNS2i5G&(0G1vTj1x+z|{5!81t}}O}PxbP_YG^=MWNU5{ zPHUBZQEp*=C2UcxW#C24*^|YJ8QY|Hx=7Kbi{Fd0nE>lQmL?~sgBE}5Fn_ZAr0ENf z10UiyF&@hio#R-AOx8akg3_WUHTO6ar;Zk~HRgNpson^jHi4_7Q;ZF`94Y1SkJ)1+ zgc9s>HBe}BwMXDKU5b3TytG2%B|$z9ENFnYh_zWfR7s?)8KYGIa!^dQnwp(|Dc{KH zT+Q(41gLr(b93ogR?Yz)elYVVKl2z+o9xmXUK4%lOL!~1mp8g_8lQ1M;lJ_qmt#;`rR@!s`bjGKl;)>@T(di&;8W{-62CDiEm(F3jc<@ z=9zGPP2er};p$w;WDAg5lPdS${G+!eYB`gqQ^B`TO*5jb=o{NXttw6O6k-3)VcEFb z00(ejbyW$%o}MdPflm~wGd3o^o{}6{f+XS`$dQ{V!?5qBp?RC~*pN52U#Nl-XY#Dn zzs4F4$iYNy^-Qtl?ga{7NrNrW<5meT+f1KCmzBBYCRq{BS(x_6oD{8Q%DEAzS9ip* z)4d?%pd0Q)91qnWtF46oH|WTzr!r13_cE$`yZ%4mv_JVI$K{36pYqe8Tm<{~l~?FO z>U6b|m`6<5kdN`Ce2`DsD4Vg^DkQeY4)#E4l?D|Pl3k#?8fa>lXazdIfFbI}sH=pQ z{SM}uv(P^hKg6s3iQk~Wc(bQ5yXCC&#Ud}*+YTQuQ5ND!8%C!qCW23gGCLx0R!Opd z9*0FZ@w^F<>SF^5QK4R)5gp3OqyGngiW`gMffLKElcO}KZ7Ld4%xW~y7p&3X8~(6# zx!emRgle5h>2_BheTd^CnIAXqvQH!SLbBVQB5o1?+&dJZJIlv9A^QyA$|7x3*}nRPq{VND$A?ui0pazFHRZ&!PW(B@4r9Ezw-xE4nc^X`gvriv^$@y-*ea!f_`JVbIzC$3EGsSA3KEy5MR?Xc;F5O?ia(+`u)BZZ zF5eubJzCRYPTp}#2W9E<>I=PXh`SA z5Q$B@kNfg$kTM!a@$+TLD?sIX!2l^{NZGb3S z^=~~DUC-Sq#~Kd0<_mDlBCkvw88&rUT5D=y$^)Ji76^kdU=fBRq)?eWvi^vygWjsQ z#j0#-LeN5ed-MR$o{6!VK%;7vX%8t7{W__)Q#E_<ykZFdZEv*D^9#0Q4DB=SO{+1pyI|>_c$*1 zLwH@2e<}y!$us5qo+zn!KrX#tE2N8TsfI+DoKw!>yk0P;&wUnRx^?*|r_4JHlk1t1 zx(|k@%~>ZeQYd-1xFWyS_pa}PgAQ9?@Tm-29Uq8&O8pM~quMQ7kr}_VNwPerw*76L z&o5p6UZl_j?S@88##Q*H28glhoC}N$WQ#+ujGBnc(C+N}8_okE9m%vGPUiV9<&yYQ z*ZmR4#J4Z>sr6j{r2?VCj915HrP|Kt1W~OzYlZ-97>4?XD}>0jpQEUxLfaM<({e-M zBn}MVTyjw9NT`Q`UxR#r`Tw`R7tk24L_pQ~@O@skx1JXi5mk)8cB|!VN&2#LXDD}& zKP{JyC_bS|QH^2uT6rbfpdz7%F-h3;2gFpjZtrb$S)=MBgnt zb4iSYpEqaWeaIHw7E~3yUtT`a-tPR*|LVq$P3-4_ocK9G(UV{K3t)0k>@`tMZ~&36 zQxo_u<1iiE-Hkz$AFY=Da*qM6h(OgJpM*&8)iY0JclWCnxOu#79qi>2HX}Vr4otB< z%=kWB`=ZTMf`z*+4^Z+DOsQH@y6cr9l>tZL1>+?O8Z(0z5 zQk^Y;C9p{*;nQu3kS6XHo(_<^oCucq%FqUJT8T9+l%M=j`8$vPJU!YvB7@ssFs-bO zjf@p5kQ})voxF}obt6$OK1I23AIA5u^#H2Q*xk-pbd~|G?h#P~GO*ni5N~UKO3g4I zZe-IdNfCJS6q5=K)i}MIOaBxm-3TTivhRQsg+CNLflk?m^!EigdZnRw=}XY=fHp}l z^b~}9UfPv9-qVliHPc^C39W@$29=1+ec((YcPmJL0626?UH&fKnNR)1%RI8FwD6Gf ziQuNGn|Jc7|826Ls8Fdm&bJFx;)}*5>IFShLV;ajwnJ9ETn2s>8@@?|@74vRQC{t6 zF$_yZ?z)3$O<(i>Pfr;bE5rd7W)lJM?#Y*;kz@g!eB#!Cuh0G#aDcz^N7#Eg1OqpG z{(O7n5_C%^i&B7y(ECOmEU=7tMLD>&bO7>i8c%&9Lli6VLPV6vSZ`SPkA!4=-(ym3 zk}!ZP;v@Zr8~AR7sBjbT3qZ5;p;rd7`OFzVsJetg*Os~fBWSMCK2iit&QGd2{Au@r zsfc_9#3MvhU?BhHUlpf+PG9=}>$gAQqWAtG3McY*u=^hMKF6nay8FL=1718~N_>H= zMgTi89t@XUdLEvcF68X1F@-rEvW_rtV+yf-k-FA&f4(C8r-4y=cJOX(-OJg5*ES+y zOx$BpD+I35&BhA>kHMR6Mwf;g^Swgpq|~!mD^xy$xVPnNGNS8cHg|B$$G7(uZac$3xk49wh-#9JAWmKiqG zG~sj0`<4v8?o{v+7ppl*%MDo`#y>=Rpe1l>crxa(ZD(d!O)KQgj$6J^g>z_%~FL?dcuGay-t)lKPrX% zYmXxZP;TsPuow=hzwAy(Whz%z6QYYrM(^Qi^%cVnh|1AH!Zu&cDl3mbmc`+TYJIe` z{Xb0vWj%}`g}tm?fS>~LLyg$p^N?==PTkwd(ER|#$s@=pyzX-mL4s;fS!Dc`NWuV_ zirL3^umg7d_t^{OM;_1tducN~uBhTQHtjYb2G+#`n^RJ;p6Fv4b{-%|!1DvO@au9E+-_y?%niLt#UdLX2>xhT|rtQFaj?m z1g0{ktCKHK8LyS^e?Ovu3f2S}1oD&)uh*P@j}R-hOa~K(4roGIstfJ~LWTeNW|FTT#YRAZ~OB9HMxOn!Z}?%h@{4Ab2CIVJpjCmbW{*-zWtaG_%N`{kC!+6 z1B)6RKJSy7-f*T(={URNpHU!*d|*j>TmgZQK>e2Y0C8OHi9jln3RG1#FDXv$=$;(a z>H1eRGvYv4J*^{Dpi}P`DfnU{#ZPTXCT&uQ6Pc*IbWmQnfgv@ZBx1lIdiZMtl)6#Y z*F1?e-su+NlDX~PcA5=b-GqnIn&Z4UND}=!%ohpSm_Z)t8D&X^U5mdB zQtn_#n^`H>ym;zDQ7J79o9y>3mP3svdF-bV$6VpoibqffYDik57rtA#y1k=5X+QyS zR_BiQyOgKc7q~$F>C^`BuwdC-%Sqmqog3~0o&K}k%1q0#^m|b^7fkZsJ{-{!6YkRR4fePLB zPq-Z(FBb#*x;PR{75v2>NwUq{YtUWs8Bh4p;{;VD>o{;WJl2^)7}B6_?qo$|1iuJ`IIUsu{A zs;yTJh>I_j%3J^CAqAlq8|Ws*p|6L2sfhtM=Wd?8aJD?-4`Sy|GMD$kMx4Z|LK7q+ z{Jt-Jw!nGdElPU`$M_SWvZiRO?3S299zp=-L5}(mB1}X_)DBk_>gtE^dlm`DhbG?n zgHaqkgbJwW5c8`CgkF-`1(+59_w8zj`3f=xe^}0X$Pq@Quth>>b)*bCEPa9k@9xtF zOtY+tu|g)@j`A(So|3`Z1pV27ji<4X2wkUULLB`#hF0AmG^o=9^!t)5Tb(O4l*!8H z3uKczz2mPvLI|mLgl5Y0mHu=TOcPYC7fJM$2#bn*zL!vOlC3LLNbT*8NzNK&K;$Kt z{z#+5&qMs1J?aV338aKA_fC+l{*0G&mHpzD_4va8|IeY%COc@1f zyuMoatGm~aLtxQC#&WcVi^`ra2wqdaQ30mS8t ze<19T8gQ%Xw;#wi)xyppnkfq&dF?9va4nf7K8x!dvmN>_T0vBg<261)5d|vDqjBy> z<)Q@~as-iCLzsRe1ueF>{o#P@k`M{BjHFQaO=XErQ7d$yX}=UTv2rsif&Wi?A9 z-2>~ftB15Otc6G1``!vzNxvICO35e6Y|985>eA;YBuu<>^1Cd3x(pe_+;o%2NaAnF zj4DLHb&oYciLe%NNmV{x4-BEjE)4JH%!_@{vnX6S#+1jpmMN%S>$u>(T7TA0)8Zx? znAGsP zpJly6tivOM+X-_#PHN`vL!_a9RX~9X1^(C(4!oD!k;+pFVYUkX0`=-eoZx$O_On3p zqquh>TC8YI7E92pOy@Z__T zU*#)6-mO{F?1G94D^5b>9(yNOd4xq&m5-P%gGhVkun`jAIBE_B*6d z3gm!<@>eSmnbm8Cq3_@g1cj0rne*%r3K4k&Qd4F-RFc<-8ej6Ww&e1E!2jP4Rf`%p zT`8P)_<8};sITgdy9haTVR3f3vqAbM2+Q~_faBu0n#5;oo~|Gij_B7v4Hr(qm!h~mVKU)ZUQ5yL9pkK3GCw9z;m>UHWcpexqTx5J=IhU9381N zZ_345F+dSpB&@yjW_4vsE%A>3^^%bMp-lqa`)wk$1q z1wB{hQl7}D^vi``Qw$@L>Fan-_Os;@sBdy3wk%LhM z|9hpa{zCmhq}3pN0PD9%%UE+81s=@}NmOMk%m^wLJa}|D>p)ES|RkLg=ed>5=Qw1JTm`zNgE% ztj$a^UHV7hCc71x=hm;O^a`q7XWh7N(Xb1t zUSb^r?Fk0Z&ZC$EyxO}0p{eJ21t;ZJEHI3wVv%~DK@$-eoi_Yjt3VD<@FgU$A{)G#oB9Y69| zUq%7P2@%1B`SOAvL{d&eGyadk{L_lrL+bS0fUE=2qQ84hDjvZAG# zM6{VGRilX^+-e}7d|}%wA(@ETDWEr zmfE!;1Mr8G+xU#VeB_%CS4@ZONX94R;qN3Am0$Ih+YaWL6Do$a$9ajr&%X}lW(B*; z3BqUsgz2V%b8W)WyV9(F%PNc(D6NCK84-O~MdGa{wkU^V3tGhcA$LuBnKp?fx-Hu< zBVs$nyNL&7w>dgIqcYP*P4VsTWCrkh*P!(-jH9} z4W4d-34imuC-05larRU7+;CDW8SmTr|72+W$eDhZo(LtHdxc`0K@1usY(-#|6uQT8 z&8oK8(H2;!KoXCbG?#0c0Vg)C9Y1`0|M@IjKTlYhQyy@un%99LpOO*nnI zTwcsKQDS0eKMyucT~?B?ZC*QoD^Pn6!sL>5jP9M01#G&vl(<_hJiqihj*KjUJC|pG zFF+Zg@&2@is*!BHVqzn7-w*dW))5BfjY-*ea+b7d&Vj`E0up z>>!>?UfAZpuK>C7OvryIlHazhkBS~*$|kIm$Vf;)j+EGO(&Pw#`ZZT6moxB(OTe_w zd+8}UFMm#wd@+8z;*Y6jL+DlSf!dXEH~!VhNACVXIodaZ_gew-)8~lgDYn8s1VIOJ z6yF-%CED&DS2qXpULEv{#jwKep*gTnd(Gm}BW&>x%}31G@U-mXl@05=M@)os$TK*- zw1T=Yy_%0_xija(?Ng=J;KIB^ebP{sNqQgung}faR3J7!SnYVT{EgS~G1x+!&R+?^pGoqXhnp^tckUny?{B1N0I-CnAIr>zCtSPmYkB1N=((C6%V*R!o7!C4>btE0QM z27I;rrvcD;ysu-rXXzZ-afd;(rrZ*hL-7!y4P(Q{M1mJZ+rDNy$#BC?%K9GmCD_7k zucdm@>e$g-cq(9}`R&QiGJ}W0I456P3Mr(JdK6lj90kFgi2_uEMefe{K1XVFfaoy3 z^W@7^I0!(w*X#3FGit_|KV+zD{Ot=bE%XetU$_HEwm*Ov0V4Co$rtVbqJNH9e#BQu zMG@ngm(7)OCx>H?YUP2tkOh3%zhPjUJ%8bxKMEwox#y2KP0PV?)S_2#rRLz_rD%>| zErjbt0<50&Uq6y-hSOtZRHM9ieKK~Id^PCDzn36rUAq*Fso*eSL| zSzTH%u<7JIn*Z*}S5=x(U!#8T9A6;K((|yNXp5@eHyY$$m&e>!C6h%;2bw*QOu#v+ z{^Bc0G(8FnOmrB_tGQy()>XczE$P##^mNa2*`JwPMGN$pbr$mNQBYO%HaKZ{L<(EZ zS&2c^H$Q_SXG+`lh>@JdMEesCGLp}IDMAcb%7=a(AY7R?t;gq3k~~N}&b~%2${X&I zVDAIYJ|I_~u&gobSKv+!drKvp9wEJva?7iT8crFTztY_yc@Qr^nR12>r1n70QaMSNUd?xH8$!`$$T zQJN#52v{p)bUHTX7WzJUIyd~cV6~oHa2jRGTLxnWf6z1`Y`=32#fI6ABk>-~YipQu z(tn-kCD{$3eLZ?0AMI!E4MmUL+-J+3$g{yGw%{?-*?e``O_1|I>FXKH?k|>EDyaDK zC!V0D$psJ55stYgLw|h#$tTN`{!z6PQF=Tb_NGIcGk(jhI{%aMivNc7He==v_xoY1 zNF#VX{4iJzL8lX)KHk{c%UyW`uh@{7EhK|srUNvqr7k=sKPsvXTbt~23imz;WAjt#~T}(~ma&H7GQKbgz zKa4-{aLkm1#A(mxAp)Vmn%naT1m17lABEc~wH$MfhJWk<6z>u`hbMY`|5=P>xD^Ax z4s1vEkE#cFvJowxC%kKVr6pNc(j4xFa6wA?)35+ROfS{Ys7HMz9doSV+k0{9RG3kO zvueUZxT85CN1@+LtfDG%Jx0ULl8hBt4D3TZ zk1nmut-j<3XhXn~q|SOIbt(hR@D+4`qz9sVtqCDC^e^2?2zyuj_d4keIS1rWp@?Vt z0%_I24!xc$H`nnR2tjQ~Pl{rf2O;FJF+QSd2yOb^$wLn|cyeDYG(d^$M{Gst?=uD?ZS{sND(oI5>MVlT|5kRkvcO?l*w=t&jHL@ z`|GhA$l4hHqC(`eDA90Rghhc8+DtW2O29|%E*&_Z`x8H5r4RMs4}r=UAm3#)1@)Qd zLM#c4Qw|Y-WG^h1`Ir1mVQn}d2t2sI7hB4gnMPUb z%e3}msDZbAEBS=+*(m7%SFd}EO+X|38vGlT5^*lR;dfP7A|=+3#DzqOnFn6(hr4)1 z&=ykef?tK7e*9Z6rKt>eUM*n@sea^>ZiPHErLj5D)M_n`>htr9XO^Z1uxR=tVhio+ z7k-3r*mR5hY4=s!_@f<^VCC$g#d(av~rRQwK}G`3WDv>A_gV1#LFpY#PTOtTC# z8~bA-hHZ@#p=7~BQ zzK)AxZ>PaHP-^SSQVH0|fQq}`8KAaK-g5iK_a7@~-tbtHe~Nue_HmQqEqv7k_!vyL z;12EMbsKQp02aez*2tJI1g?Jf+RRSqEi(6q|HM)G*urd_wmH#>i>FLMH zCBKm)IgZx-vPi(mzfce{(xFhM0&&(s)NnC|k$UcI$#nk%2Ed)AHe^{(X7}j@h|m06 z)k8PKxMffRaxlLCMw&BVHp-@N!ht@Ac9~sywp@$Qf(ijfRI4|EpxhfRMR@w##76(# z&!?D@vJG!MnU&!mzw|~X04YRD=*?2(Cnah9aYcNb6$u3Coopq2Qra|K+74A=U3>*J9 zZxUpo(KQtQD?yOQaleO$&F=;U39+hAA1x!8WExy2T5#S=6_Q)zp?NPO-ud-*^$$3E z2o3ey_QMuiL(?rwXx|oDQvv8<4@%E5qOC`UwwX)K!%^R zy_O{uF@;d*QR*$qX8RC_a*TM!XUemFG!Z&Lx3=--R8<|%D{1(dN$)+|1!Ifo3N2~2 zkddm55n*FRtGa|Lc(d$%ojX(>RarXt<3w9o_;EDO;KvbU)Gx#jfwvSd{4l^)c*E20 zgNuOuAAv+oa)(oK^EA=Qg$vKX*-Pb&6(2BR zJET-+V|p-emPh|_{J>K}j(@;5JK^|iH;fcAC?59PGO8E9SHAPZAA0lDw8^;u%43S7 z-BTws-A?DrlcBk2l#W8F(O#PMgSr=g9&4wj;(QA;0i5S8GWpzY9ee&;@nt`XL;3*U zkr%QY8<7M?HZwktfMRyQSmj=MTh&<18m{8VyqLNxWW66gzW=gLB!9&Ie}E|R-Ke)p zy2Fj&p}sr^uxiHRNl<{FS;?NodF)5{#N*Jqi|F#j#N@<-kd_EhIZ#S|vb>H*fi0A3 zdK5=8NMcG+b5gcNyO1U8iK}Z~mbHJ+U?g%kahW!@&9F!3NYJ9*hA>yTGIp zyfqI>VmH<^%m865Ac zeofRc)xfGf`e*2<{EN7h;&erfXGmM|-tgrr_YM#r4o>J?a*aI8 zDx+p*3`!*+aRUVrMT6Sa$&ibP?*OV3i1k;Qz7t~oWr80bT8Rh#4<7=;!b3BdTUajh zvD98GubqRR*L`B0CclXMt)MO~a^$xd9No=gvswWP&-jAzeMB4Cfp^?K&ih>^cMT6y z_Kbr{YB=6*f4AREhntCQ50*FpIkglKd#~A2ilCug47aUMYS-YJ(7(nChrfODp5@!i zC7c3Un(b~U;t<*cak<%1gPZ`5wsjC8`Wa}Bm`U26vnWz7xkh8iAF=_v9OIME{r0Pw zlfW?uU7|;OS=lw-PF zQgy(hv3Md0t|2Wx9skDC;sso8ZXdYSy|6tLAWE>7KoXNKfllJT62_zbu5Qc&)tX{)1e7{cdxH zOknkq&8pMHZ}6Ip6(*p`s&~MSDD^_zbkR-Libvq(=Sw6I;In+~g6~7GL{9lK)oI9ffDoeaJkoLHKQLZ+6WrP&wlDB<*)tslOVGE_cg+^{+rlTs1Xz~jcGa$Gl6@FPoMMM zD+MztsmVYSQ;o(8qmtM3;Yhc%GLkhXYnhFeRfcL&n6D_!kUMX09VcCr`*U~~3k z8}kC8ulexGg5(aZB@ZuyQHW}`8m8P%8mBm<&^|i>a1;TfOL!DuX^hj z9-cQT_U9u>v?J6JA_eAWCl4)OaGF73)kWVtXQ-7zZG~gIkJ9H}_TihJ@nGzfXWap; z3mscYSq(DoyY9QvQF^DC;dT`AW8HbV%C3Rj7>2{UK9TTIR!G$4<2Jp$W||(dH8#TC zd+v?$=J}`G%WmK%P-Hh?x*mycHmQCd9~7QIu}Za4B87iL_XCNhmymTd1H}r`j8TcYA_z;v(wL&kktGl97 zh?lFcn49v5rl`D!rM4b5b_Ke8 zfh~4I>%+sqfjEp@i#?AKu%Ai^lEt&ZjvgFYQOClpD5=IJkc%He_a;IFry~IY{Lk(_ zK4|uR6v#qNKCX3Zsj5bvv0HzygwiY{WxEOV)da&BP0-NNnZwKQrz#8md%t&yBKJ!) zLodxPEqWt!+}?-*)@gIcV@Ep4&T-v)-aeIvC*Wu6!xG_aJk=Qc7+^w?G^z z7cXV#=?!X%2hd6hpD3`FB09y!VktTKeEfmiJ-tOq_~SIP=LwR;vQV2~tMV$569-eo zo3Y=qY_;cJHm-OfMm2(pD`Pfo+eJXMyztk%sTyAtZ})#FNOD zcwOswd+&dr_^)(;k3XvatS0I^;}7eZKFK5ZYZ;V_LGkfCG0iyV!9t^a(S3}8M|Y!f*_YYO|-O@n2-l0QC!u&8KyApNxyFaCj>JC;hp2@SIiBrpu)%7 zDTMJGk25Lm1OU-EJS?bEA>gq)#}8s1Q~w3Gd`M$}9E0iJOy4)|5VP@9(SFc7C=%}< zk@cZRY0barr&6wyVia?iEdg(pn{FrfWcAmo>aGnBWBN96{kwn4OleaN0QqxdxiLh> z1g|;y#HD<|BL8Om*Ir0@Vyau4>=p(u(NXMzT}GRXYWePZ1ZhJYK%Pg^cCOa2>UX~!djmYhyamflej<51+>lyL)QvH#rw479yC7w~O; z)OgZ+KsdL;y2dWa;-%H(TN{P7hz8lz@vK0ot-z^tT33xqy9s3C$AYM%PxhdPcZ~=E z(1FJGlhom0 zSHJJSRnN(PLz)#HlOJ=E`g_ctFq3N70$i7yCwb1xv=!5->e7^7GZuw)*Nnmn7XyH zOR{VtB5Ea4)swM&UMS%>SZ|P1_sW%!+mEO&)6@ZDe>cvKBb4A0qdbv+b#mOzpv=o? zB!s47N3Mp7FT-QWgdLvZC7P&X-kfMNjC7OcbL~zEEA+W?)oW+1A?}d|HViXDGWi(jm0o-uKPg#ThxO7nZ6fE?O08(52F=d_|HK`m& zN_bUu7+)`ozBx`e%Z|+t*_#WjI3jfZYF$#NV)sK{N;nY}=Lp?vD$1`bWl#pXhQf;t zX$`S0&GY7)Epd#&?Xl+W63_gWNpW>$-tCn$K9GD2KD*C94R=wCJuK3aI9c6tSzk3B zB9SDkRP0P3FV5uc&{FVYk5Gg|5Y>3>#G!7fq`V*{fcpQVXqW=fN%pX2AP78FC49ag1( zB~7zDe&&Lu93i@n^F3tB!jh6=JqgS6q-%Iw7CN(DQ|bBR{<0ib<0v`|_qUzPnLDNM z)@}(yk9A5y?7M!|>{59)0x>tPZwQpJ+_%eJD=2!Lu%hom9ung|4aK zd*Qv1L$Qp%^rFG1O0HlU6G~~guB@H-YNEeL$MR}X?4G5HV>I@S)VFI}qdOiCZSnbe z?<#q2$;%IfR#(e)e;5bK0v3B)o%|<1Dd+O$1;QP})WzPzf%6%f#>qQv@zbTtm&>3Z zQH7_QTWK=4&D#RhwJ$`EN#`c zMCT3wIabc6t>9i!MYrtIux#w@$3eka7`|!&9k@A2#&kFh-9(gQp@bSR%e$zpC!cvO z%{0{{$M;_;7ridfAUK|775P%~P~NoEmB~R0M2$|z1^5oa9YH8NQ`^S(FP2xIX!=4# zAq(>D-6{}Jx6l%BPH3yFV~_g_SXi@HR&zPArcv2o*UYbokyH-2XwGKY4Ks{`;Cj%H z$yG7G4sSQ)@?7c9&kTHl(3?ymKp-z;X|L&|z+KfvY5X$W z0{OB5BH+$$1gpi1HObfQxUuR7KX@RFaD+QAl(QE+#eMS|xRDh|-WJ)0XDT$jY68{T z?{3rH=4K*U*70r(JKoS7+65>gtCQVtn7%=@H0@w?o7_V3|4DiSr}JF)Is&IePgFP8*C6cNE##!4W79;(d=s{~5>>9p&`zQIN0HUn zOQ@b$pJfBy{=@aj!S-!*U8~jo>wo`|75cAVmfw21g~@rrqpf61w)}B+3bOqhwbF22 z4-s(wF2pi+@!D~o(-_=)9Rsu%1vENh1;K!DaOcs(@%twq;CJ~A!nHFs{Q%StYN$#) zW|4JVSYfoCd8(YvYMWGBEGO@J#D`QcLNK&3d*G}px8fErQ6vu_rnv7cKc~uc04}P3 z=bqb*G@WD>Wy9xNO!2aW10E(I%FVYHtBHbN}C%?iS&(LC>yA^|J#ThKNWKxLI^5MzX z<6qjAS}J!uZ5-XZA#f!1No7pyXuVauD>NxIk&ER_Q!hfqcAg1Jr!d=bf^})c(jam@ z(7%1OQs&e*p}(+&LSZ7n_o_Q|9Uysvsugsao^$gn2G4T=bwgMo@~EtY+eGg8h6JWZ zHIJofHwS(hfOB1zkDoi&Fa3Tlhc1^QBX)=;&W2d~kH`)U3cyp9lJFdexYu2gDwDi1 z9FcXXI1M}uH%GS!=x=+XzE_@h=VmYi+`1Wpd2!8i=?nyE6Uhm9_~N*h@?Imget_(Z z&}^%>4O23H6p+my$hyB75j^}6>>dh=vM4=zxY0PhkR{E*eFa;|XNN6v4?z!a-fGA` zX-V6ok-HCAUC;5rPZKf#8;#=VZ#Id$fZgUA%XTpI1o?qvc+8$J4Kz#m)KA8rrneZ0 z@Xb#tM*80P_N?vPZ2Oe|%LJzIw*814U)ROwSuV-c`fF+xm`2gB49c*ZtSAuL4_sq~O_1 zV6GMn#iPAimcdWdsV2XF*tjTjbwj2c5{Aw_!^AlbuUnLr7hoKXKelOuG0@^gJXFe*e@1-bC>Y==31ATlqHWG=l zRu}Ivb4DWzhrNus|cPQ%Gx^Cqepmvftr`z87Vr$ zdE}ornuy66GH1PxIs72na2q%`S>&JlM+K8wT|+3|7;J2?eLU2@m62uEgN0ZhhX-po zr1OFmPTeLr0oXv~{p@(ZkKDP(N@Q+IA;ReM9=jsMc60H<68zT-DdP~T0Sr9!3{>>U zmlZZbf~o)|pX#6A{Ivf4oBGf9ytp?ejHTZUpOtL7! zRQP!~lyu_5bC`oD$xNAh&(s7z-pZ2hw9M>IIXoVJ-0#GSrM_B2K@C!wyBQZ>!5fI* z{@zP`8sd?~_QSv*ZjjC!!T(xtbYm|LQblHbw%jQ{_QO%enAU(C%R*4ex^@b$6Vl2d z*(%F7z|HxcY9=A=x>W!SzYg^{s0M3T*|-8wB^7AEVCKAgG*(ULh4m z6-V(JgqZgjpzw4teafs%S0`1u`NSxAqdsxAobm7>cEk#qj*mf?y}LYYWZnl@@h6{T zqabP9XXA=t*OHAcbP|HBZqjYDJhtFP8ukUoSylGl{u?KJps~ zUyHwlvGZfb_kWVKTB<5hQ26}gYv*7dc3Y+t0imXLZX1wJBGRVPXnqIO8vL1N$5R98 z{}J^0UZu~c6L72CN;#_F5&+^)+|-_kwxl|RTV>{@ve;XgoA#z8KY_}2WOfW=3tY+2 z6)NU-07~$zam0A?MfH`4=z1EBKlIG_45?W+p6yM7X}-JGx;aX7=xm4Vo&da4&^`J< zBIFNU{W}i^z?%oCC-Lz;%)pbc92I-?*7$SCf$Kj@`@w_vopRPq8nRCOn6siFfBK@k zfd2Rgp4+)7;>!o)uSL#;^Wo$Z&nZI)O~Ue(m~cp#p70wGj4rH-229mgL zmaSGADA06Jm8aZN$y!nwJeCA*Q48Ig70NfQLco`+Nf6mZ(o)g}f#-{4<;#8^ zTF!@MH?msTFH|4|NZY8~%jH>LtvoBBFS&qFyQ=3m779oy6cvvEJ3hu3=MIKV6sTG0 zeBrCGwv9ryfO_*=r{78+&X5-^DCj-z3JjQR(C}8dHT0UteB9f<%ek>@pVKPLNW+a~ zc+4j5kiGDBTr9767!?(6LE)$$5J@-Olk!htt|aAp#lOYCR}+j4;&$Z5_QKGC5PW}| zIB@N?VDh`%DtX;QyxzO9Rc;)6)m|+jsIHZW2x~H$(8vl}f)_&X;DFqZrA<%N%oJcN zQM9ke)V%ql@@BSS1jrIE5<1;>Q|YLW6L<9`5B`CyiK|Cj+wK!#{VdXf65r_UL%=q^ zjH=^Ffi~X3uS6});r1Yc6_{#pg8%+gMV56FoML1 zqddI3gTk^K81U5+9C){gOGMaat<9NNoNMOu}3&_2<7D}SDQX<%1!Rru&QU<*tz{7Tz z@zVK_+%T4Ihztg++|FfKtKB`U08OY~m~kyrm@a(RQQp{DSfz9~H3Fe>nNdQ+z%h?}!lKFdo-e z8v?)UWk00S_+xTR-3Elz`+ShdYXBOq?@CSAi{P$FN%;1$9%nHK^m^$|HW7G@^M*vXGf$iGpeY?pMkj zXOjD|UUOxDw|g0#D^GmSkDS7LIDN%kc&@zUA9xfq>4s?+H*h#Qhfe-QDB-vZPGGjt zzuxIQ5B2Z&d^_Qahd{494d6qNSE+X_u%f_*_1(Z{%W{%a5tG838=YaPv8hH;Y!g(T zb}e~0I<0&e78_!DA{{eqC#b>LBF`KLQ+|ozHCK- zQC9Gg=*tqB`D8ioHvOplf8@RCk6l-mr9xF^m4O1>cGU+1-To93Nm28hN~NX*f=~aKf9u@0_C7&nc6Bw3 z{?e*SDc*GNx##S&&mPuZyB4^A>@hLG2#S*kPft(&K_l}7hd4ew`VNWixr9o7Zr0ww z`K@c%#9JLIJIu>dr<8b^{e$9$86({Tr-Ap8a07kq+a#Ha+6Y4Ot| z84HVE`J3gM2YRzij#(QW?^-sT!n}bJ`T1(EY&GsX$7VwEa1IkmCBI~9i-q+gX^UZP zRvL7Vx9!k{9-d@tU7=f}U5BEp<(TMkbwvqh=k_EUl< zXpm|IFd~P&Mu0e_bhj^Y%5n-~)3uY|;Jd*)xng^`hlxf!QGR)%vBkk_H_jcFG2!M= zCy^5IEzwk6GbXQp*SsmZx1GT{JdrWBsm)>KaK&}`H?)6+ntCMWgsnp15U$pzN1u58 zUs3}Nzjy#H_#@8^R!jrwRX)wv6^kZ720ysk)2*OUyhKB$@rKD)%Pf<6zU zDqf+*r^4%1R_>ty8^;NL)VuyZemYW}qM^M(_o^LV+8t?qL&k&C4Sy8{q^v6;4dAOIQhMrsPF>fia@!;^aa>^GT z96k$cs6SE!FAzEul5@nxR<50FGvuVpLn5aNtO2u5ub1%Du&S`?hHx4 zQo&N&CRV6%X?K5q8RCLAURm29{rH>M$oC=nLp{*%cyPy$x5ci)o;#lE7afsje|(Bg zRuhYhlx29baNTpV)~Uv53YP#UglevvtF|J{3L@dc${LN3(jwvLhLvo?!>my!_+KdD zNlk~&%Sm9*Rl98MHm>qu$I@bxYzj{w;lP|e;y4$3-gwYp01zpaAIjeb zHUHOt?ajd+2EwY7)8l=<%=p)6GVI-s_0!{4Mqz5fuT)pC9B1_${q33yV_g!YKkzf| zfogyPaXG@2x#7U zk*OnaYUxNQXa1V5R{6Xyg4E6~O?b&QZ|lcHY5@~<{CeR=K6sl)^Br*rIR&sUJv-E7 z-}c(o;^?l=fj{1d|GIG7&Gz7j?T8xQZ>AM@!-8y8xm0@&-w(J=?SIDbn`{E26? zE9@+OdE1k)3+J%={N#PP3OK1A>MR~Sa)-wkNuYUjG*aEH7}X4rGBV9^`1_;J9{j_Pf9HoJn1_gX z1!dt~gw@`I<@_jDbAHKjChlEL!) zgXfeMmM`bSSEQ;=BtNGPbR|V=LeOu@dS-J2h+1BWJT4j%;1K!>1W*XnbL9m;S+!)m zB7)T<&x8DejDl5ym^KhidCu*V_OKPB`I^wo9sc`A&HfMk?_1?h`0wwBuRpg!oXxhG zi)GXAUPvr0xyCRAz`%O2sF3x(Ul1fHp()Sr&;a8&qbK*iRT+&*xx3PeKz*21yr0bi2D1g%LZdym!da*{UZgud*t)`qEGPi4GG>1hl3 z>ljgz{(Om#GWgIh$F>hpC86us^g2PeEGxfmS{@MuMJ-wCbc!!DK2;6PYq ztVCCA491l*YVkl;GA%1-I?P>Z#YCy91Y*4rOPsjcnZWyZNf4&csC0jZ0Um#O zHfeOUjQVHK64-gBp>90aRS!9G@`Ppb>6X8c2VjiNCU+^~=k(niN7TMr*Y zL9y#0Tb(zyy1o-Zyowp}53nVM_y{W{6_^x}+seuEswkBfZL0YVTOD8!K$=+p5e4X0 zR&Ux7SQyszoWpj-{`DM*#(lQ+W_uHO-Ppg)fW*_=n9TG;ZWudB4(#JaHopE8l=!~9 zB$)1geksKW|FJhO1DEZS#k*A+2HyuQD*gfoTH~hBhimYKgThyWxf;9Z7Uo)BcMeP> zD%flET)7^ZI&KOXTH=?%;l<@?-VRf>*Ae`QPx zvL{O)1stH)x z6kRPNBlqgd<>y{>)KrHTS*IX8dzFxfoA>S5HF5!bHF@WtHDbyc|Ch87^iU@Wyh zAA+dt;M2#ns|aKobO$lOK$0j^83n+4k#WV!%r)YPB!(b;!Z!0bnL$$L(S~%MgsR89 ztcI!V66+qkS@v&b3G`R!QHNO0My!0SyyOda^{as+(AIBZt2#wHI$mE3D}m_6K?bcb zEW)2s<->EHExpJ}ySis%D~wE+izAnM9>okMAsWpuaoV9;_|)=C!CrGHya8_2pKyxD zmKSp&hHQj(E5>1Uf6qxyKC8_OyU4~YGZs=>dr&C|PnX`DMqGieJjCB)@6dLV{50oh ztjshoh!uG0z#VmI*zjk7_ci+sO!Q-Z%xZ*euXsZbRBq6wYW$QFFyVSbh-RlzeNARS5i=o z#~=J~^sc9p^kGHjNXNdLx@mQD39ii=8VIXNyLq~%e!YY{RC@2Pu1D0FALMQ(1U)%< z`d=Fa^XT`tugPb*ougNrj`Tnc*LjjhK)K2PZ~TW*JcgN(D4?p5ceM{L5d}o^&W@87 zp6E@Dczy$8vz`A=)2u%K@OwdM-NW)lu|RBapOE zF3m5-$M373=f{@v#gaswM46Rn@NOfXwA361b0ti!vn-5CF)YZ7Smn#tRMg;wQedL& z?8nddP%9BwB&x?g+}Y+ko_)-{TCf-?18Uy+PPM{4o-u03!W8QKr6>%1;#m&=4AT(y zpo6y3B3`(miAxeEGES%mx32Yr@D{Eq{AV$_RI{OGW4cB_DtSBXVWTUBcgFou>`cJj< zkb5J?c9w%Yf@Lez2Knh_l2&hb$D0*K9VxK;{O``T-(&}WFWLcRY9hIbF5Zh`MwxRB zP)cjHsO^-?Mn}aZTBMpq7ZiWj7pT;Y!WRrG3U~eNLc+(gi{%9{vLmSYPCdE|)Gn z62TAr{B**q8Rhv#`x63ei}NYwS9eg4ab&MV_1kz&`N0%{C*@s(mX<$#IocpGxO;MnBxgt%hje?my?sRo|C_jtHUQGVC@6BED67yxb?W;)y{2j?Csw zFGKXR5rTW2q z(pwxs*N-fW`;9T&!7Xz3F%#qo_Tt5Ze{2Y1QPo8(g*l*kA#y#xV_NGN4vzVNwnFb6 zy*>EG_dxW8+aQt{*YkM8r)J@VM}35v#=X9Z1T@mSoBn;#3|K)iKx9FdufCI zlW2?mD~_0hajrREn6OZw+Y!@rN#1?xsXq25iMq=q8j+O*axt--pkV`=JyW9TFa?Om zFbeBS?PPH>TZq*D6;C&Ew0PLc1!Ox(b|Ux#WIF*mqx6gEd60a*a+N}rCTtXGb{@dD zeQp-&q^;|oIdW2gZ&-)Y;K#r7fQatGQqzwCpSC1$&W(237vi#JLqF-#2oo(*40w5}ck{YMdgkyum&A(MA*I#&^ zW>@!X2$m43&z_OtKgEAo#hfWmd!lB_-Zk%FsLA1H^s6O;SSw9^_obvFzoux5zgH8A z0hE!ZZ-~T5A4Vq&z6fiG7Ys-N_Rr@Q<<4Bg4~SzOqaaI3D#=}Ga>*k%*uGfB-iItB zAbk!ft(>WRo?Wu@mqaljCP_`{Rl~979fS-M19SlP( zA0SLDzHb`T@V$X5nD40wLXfRb_2TF~k3g;Hy_Uj}1V#(D_O`!t=1jJxmSWo94NR4+ zaC&%c#v_3|45&4O5gDV^hgy-HUEROXM`h%k*Ni;2m~Zw}IT->q0k{Idovte7y3l=^RJ#s9teixhOl~-O}Ib-zC$=C(b-ue(pP8>FfByQ}rd3GgLgmLR?>OU0&T4 z*=6WIr9Ydn9}(R~lhT|wauM6~q))WbEFhe^%YA#i6U6-3?XL)E*1!=#m~wkruwJ1t z=nn{&1_C|upqDZqA}RMxYQR9M*U~WYVi^l)Jz5eAa3x`J!)o|8KU;&n?+zKwtsLY( zdcCG9(9@rQRgHo)yqwU`I>WY`N)F5i8QuQb66vR01{Dh3j`}EkUwHvk%!K7fWQcP; zWN6~WdFoMMtMcW}vAg{D6sElu#`7hPoac$C`|bIJVAAR17|#;Wq6mn=Q+>?gmgr7; zrlkjfq9zF>dU|zt8AzWeN;brLM1xpKk_j|j_R5G>;)1_Rf{tM~I~p4H2(b%aw;jp@ z51a$#JC9>djdMm8=N5bdqYiZBu#O+L0VRb)5p3+#45o;^tlyll90xw9DvFl2H(vPh63Y&2(-#hw-&f9LneDv<%BRuJE zl8SB=r`d}Nvvjywh@4{Fz9`Y85>ddKMx2La(84uA+y&~Hh9gatO9HERWiS378FmOC z5|sl@0z`$Y4GwV`@3O*$V-J#;@%RwN=uN3;$AwUvHOnk;Mg7AQBZv`&LS8ei3*w}K zr9p=&j(mB5Y+E)(3mCh;-s&4*p2Q|Fu;b1|PRKg&ue+%CAABoz7g=ZM+hho&<|BAZ ztz%cF?S~?0)AQeWK-WAJS)K9|9&sl9usrfF{F7pJrkol6W4T$S)^nKH^O~wOb;d9D zi1{4G5>;z~W{)N``dwglDM%tU$fRy|4#&&K`0Wepawi9H2*dtcFELV`U z);~ReQ%WnKkZWfF-;iKLO{t6P$X~3M<{xBm-WvpF``qFpP}t@xcOZw-HwW)qIbFy~ z45GP}{n)qZ(yv_+nl(o~FypWql^XF!Q+pIHMLc}ivQ9RWr4T|#BG?UoVpsOn6Y409 zQ}I>31qR<5hjMTdR_n`Q?k<((HIII$-1hB}5Da+VOE6?|2EIsTd65WOsPIYzTMoZO z(T9UPPAdbKZ3L*_2d=(vZ2JS*?xu}UG_ZVYmc zDzIv;>Z7+;K+B~0v3MCh1M)bca`TZERXP(2QXVT2O14@A=bk;8ac`v_H-9ZpJ4T{5&XNJcz8wp*1>n zP#7~t-kgISq6Y|DAsc*&9VRpEI{?%$ZZ{P#gTohwZFK!CH2j@iwBS6C9OUzlmH8%h zE`<;jewe(V3e$+frtmeUAeQv{tAo$&O!sq)2@UQ$oTRb)tB?D;9rXCUczOQ_WY`_Q zB-Vsm8}X@XdqcS9OCee0&irLbC5*=YaFNkX&HDThU>$`l%x-?4B+}74o|4{N$?)mm zkRVVp{(gkRF!;fX(_f}w6bbUP(+hjre_w>@&hiPxiQh)-!f@-s;YbwF-#VW~ZIo3s z6%P2$VcYhIgW+vi*svaW7`hNiy|fQR=}(mN@FfUBLOnnY9vVee0v;MgeNn(!-6~HY zabKb*o*pT`z%%2fVAh6Z;^?_Rk>7ci6bsaty#g~GUV&`%w2vxOBr5h{hVgAejHnI0%`-fZicvCP!hyXwT_T5fj`d%&oB?o{_21W}py?-@Yp6{KV@GInMdnL{t;l0^|{)rbg*nGqVtTtw_Nrpq$H zkqX~xg+};pYp@KhE${f~nBNsvfEZoq^_r>p3aViX8}22=`~q@j@jB9??SlOHLPT5H zJ@^da?+*@dmn$C0h=>lU)xEMA3oB8jx_&fS8Wv%Ww+u~_s+sq5iSk;3Kwk|cU1wXB zxEdqE_+H=0;?>2n6lw{(Fx1aYPOZ@2s-|4s^i6?qJ{HL?X?g}=TRWTjOZy~JXyaiuLOBSXHT&(nO@~C^)s$(PHIbg2PppsUqvGAO za(bA;qS;LBBXf#{#j>+rPY4mV!4ag_m!XktOA`c1!;bSt%s?eYG{awB1yQfOCIRx? zgxe`UKBof18;Fo^0@Kb!v}9W-5!VyM+_NSP*x$O9!FH-tIg&l)DnrfA)D$RU&2H|MFm@F@Pl5SM!b{tt~bTVj= zF@>h)U{nQsGA_~soLq3zU(V*bWKlDSr04xP6dOy^p@EqqFv6i_MT}YOYjBH!BIKhSjC+!n!Y%IH# z_FyyM7z``u#G8+o6QRp%D_jvFeSh?M+B}#C*xy57KpYeAa5CIy(CHJ%duLpLy1Ctg2D%pU#ia z)4K_YEvUjSHt>*>aq1;&WJRAzXW*9ZiuF1|uoNS#j9FR_|EzRcO8=$*TH2KHLWq(@ zdVypk_D@ZVS;`ay!r^XbCgLX}i)A`iyYf{CRrs|UX&ikq_`PR#jWnn?C*ijP+#YE) zulMm&DDV5Tm&>cCd|y@9{J{g)8h-*+#m9aF!)jyodIK4eQJASdMW;c=1+M|ou-m>c zfQmFhF$qerrz#G-yh>(<9z!{*mrs@_pYnDrHV&^Tu=m4&&u%3T&3-$506-~G&+;Lmljwdi{T%61!_vSlU} zEIB!DGZ9nhZ``6iM}BQ@{}vfMu(V-TU=R`pXz+y4CXQHwY0IG)pBjuYUV>>HiO&nM3VWri#b`uJO<-@ZwI8c3>&6AXRPScUkhCTDMJ|@_r(c z-#z+RFf>y1gL__AEXrlTDG?3o!M=|wNT3$#aLG^l#1m{Ozqlg%(5i=+7GC-v{MoLM z*4`v!@~NC8T%wG=f6yZ+ys;6Qm#nM61dCG>z1(^n+<5AHt9g_^fZH#@v$7?c-+6_-SvD*mkN$(Qeidhl0Wp%s!fXn-I zpU&wPTg&=L(lB>U*D)%XD+V>Hutldi%JaF-5dO zZj3`B9SJD`Mw7U-n%~}t8Wptqy6{|H3l%DtNJ%{Ti_(x9~BG2fc;8~8vrv{U5^>}E|f_TC?{9flI*kExtqpOJs zm`ZP?7Ja9kdGz5CsES&#uYVr}M@B6V_yGvx;De)}^f;f81qG&eqIY$5v^dZ$T!5%M zH5$3-GjEhU2`u<^G+`X%Xd#H5Kk?xBagDA=mSd)zjWjkn`i6w@jq+$T z&6vVoh+u?msG8gKv~yft+24$y$uNV>7FZ+ zX>q29m2cOVQ?_@-`%ekN)eStbFbO7i(OVBO(Ep(vz3tgX+(d4*4-H@g9I8b@N1s|i z7w6_npj*cg@%8H-z&de3?DpKYDm=i6l(enJgFN3x0?5xb8O@pOheP6qh;SUDN_3s` z#ORPHTmNh7%s&bEeuzcbo=o@OoTM=VVLNP9Kg0;}BGTCY!qC4aQ0y-dIH6kevzQMZ zkp2wGf+_UMP}G9|d(obM|Uq|G@?zO+u>aM=ub@@26E#j^}T{_b4FtTAaq> zWwO9xFjGlFqncb7eEBCmcquOc3r(a00u6sv+&7skt*$a}9fNzF1oJ6fhfedT z&%4KEm%s}Eo3-#b^fH*C#J(CM8LOM%lfS}^C)k2zdMZ-&&y-WABbTncbJA~M4G|kZ z2pvaunG?v*t7rtabjrdz>W=-sZ&ho7^sn;Qxj{J2aOk_T%P(^greR*t{2cqDYDe+g ziIjN~D_|fHb?8B2;Y7T{M1+!nP?O~a!jn1w@+tnwOs*`JT4E7$dyf_`M{j#T$l!i; zC&IemeED|pxy-rukG}E^$cCTw-)BJ`_-0g5FfD$i2*OzCX1$Vaz3m^eWJ{h=B#0?Z zqg+J-;+Xd5L{)2|&6lP}2HdkA^?M*3O^SkL{!3 zgpjMPO?-2xC(5Z49vYt3L3oChX!bf@KOo}>9Mo)gW1WEr;#BfPpK+}-dCxsug z)I%FC`XjO$c-CJ0s~}q>l-5RwzrdAO!q{avvcM`+l25pyHXl`DD1Z3iBj_NHKJ=B9 z-nvRa)ekuZ^>(0HmNi8q|6asJ_TR6OUh&_!5)E?i z!9TI_ybl3~_UVr6o1Np{wMfP{69zpo!{WU}X@pUC5Av-q*`l8t{uO0)5B?99RW!VJ zN`#7jRepZTGeE3iTS+&+W(VU}t~ZF3*@gfVtPqNxS`Du%`b3xwf<%5Nol`&BBwBzn z49_tj!lZFm6+iaca+opz{#S^i^Kd@X>eW@wE|=J68ey?jdWaJd(5|=Q#dJU@l}m|< z6c7ha(2Z~`rxH~lyaO9*FSCl|c>9gC07u0+c+YdAZpS-iG!8+8AA`b=i3<<0`1_VW zG_nvUm^J5!ZZ zI}wxV6Lj#ykc*xz7`_*##wW)8W{v$&j6wUy!Cm`I%EYTnM>Sh2Aw=phX_+wu4qu^d z_^l*oq^V~>{PiZ{5vcpbkJw$e1fF=6w49&!p#siv5;GHO=;-I2FVhni0I~Kc<>HCQ zDj-zjPmv9&!!Tq;?Gok5OIY$-R=8$utM!kWT^}4i`e^xe4D4+jyQ5Dm+ZBggIp^Z2 zT3#suN9A-4l8CTNmc*}-cqTecLL{ke{4Gy288EZ~2%}~3e(K?|E z-BZ>A-QGT}=mKO|iBORUnd*Lc+=T3vDyj#6u!Te0cQ#pG3a2qvcIStrDgh0v;C-sE zAAdWbmWfvOgS&oMTr^UyU-?Jyj^O=|ES`%W$vp#St^eHjV^YH1urZ!vjXX8TNQj!M z2H*9N2Zv9WQ>o_JaeJ$q=&rQ)8zm5q3fN#!;7EBgl<1=;brL!)Z3_~G7K+*V zC$)>}NS}Z2pE0Fxm%)33FMV6O-tntM^zBMp8|8?J&LmJx{JlyDb^_mB+i#h*!Q%1@I1d!9`TPc>*#wX7_Y}BlVi${ zjG|snDkpXuHfBE}Lfe zg~~lAI&kTCQRSTmCm`4r?*Z}^KSpj`cVopf0}9tEp@1Z7!Kf?l$PGTwCI=`7>Bn3K4( zzC75c!8`swz~KnCNFWwPTBLwfTY^cxRDp2+hG0V|Z#3v8QA35H6F?@|XeO-YDCqQP z*|x8glP9gVsE|;uJ+!Ne$fxreitH zcVC>h+yW9IkIr!8jRSOND;As^Sbniq>$NB(79Gzkbm`4<#aEHn&E9E!#jBJ^S{7L0 zQzswsJe*~!(fTBip5F#(A7b?<5L>|<1amQWx%?d0LWHFiJ{zWJJ&V@VTExjveqh4@vy;wY8_8qGB|nE3DQQ z1+lihu^yexIP4TiN6Gn`B(vZO`80Ouy}_ql*OTvm{LKr))A?T@XZROCE)RdhPe2$8 zol9KH`~PRrwC(u zddExT7KMX}qg1i?QzxJCB^5mCqV*|RihYm=yD?O>EU^6Ug1a^&#F-tH-Im`zb$1=bct|HwI0x*kzS zkwl^UJtwE1ETEvmfo+uOZezAil-ALP+ z8p*yQeu`e_C|)E$sN=xmv_-^CuA9Fhrpa6N9nTNkjTQ5=?`}kd3GNJP3|KuVh0lMd zzNLTD63()vxa641aYiEiGn)Nn?DTuhVVMfBtvu^L`t}0)u$uj*rRUEZVtV}AC>OX2 z1+A7o4rvS<+!0@K(JY6;-h() z3^AHv64z3tKr74$%b}0NL`SDxjV}1q$FOoDsP0{{P+qX7+GPcQQXye+r{Lq|lYWQ+ zBxc~LJm>0TQaff?7`vz$2LIZ2jH#l|hF*++SZhjko>|zZwrW;U4b`gde<Kz^W{{=?B{xv1zUI63K_62YJ6aTNkBb}@_ZP%X0kqK`+84AWXOaKMySsaDRUm((;d9-yvmg=qyzv zg)>J8gODG#V9p1yf9S?Y2PQE_e}gD>Kl}>pZPn8eH#zS3;#>nC`z$VX|LAQ+}ZXiY?1v5TM8Lg_uREJ+5^?a!b+5Dou8F;;d!?fheWHR?A^< zT8KZs?}eWvLf1sJme;9#KL~;g5}n^1y_H?Gt~dNkH_L$^;0;o;9)og#RBq&IV=*o7 zH$8bc5tA!q9J4%59qL{&V8-E`X@ucY=`Bu#w=`C6#c@i2mjb0QcBZ=!gCC4KtbQm3 zVie1PnU+m;ZYr|UQhmT^EgEtqt`i6KYs7D>T-}ZY+{kF@`5vRB#b!O$A)qp`1z-`l z?{03`VUd~K+HZXcILwc}&3m0K3lkGzJ@r%~vMjPVfBn=O<;~NU2t@-+*tw|Ra>=Ul z?8de#%7-ke9->A>bnbFkv`8)S`(5gnbG_6`NRFQ`MN~qD%KBPObXm_$m%vzDlk@EX z-X@*Bm<%I? z0QXUZ=KYi$ckz$`v11;;wqh8>%h8#?(wT6$TmR5e@?+B*rvxF-eiOSVwn@S# z8h0riQbpyMu>D}`Ae>P{vD`R+LjY_y<1a(cyNk5u(-PG2&?*aBS*4+3WYKWn6+=*J zIJwjx`~%7MBo!lIs>6+3K#JTxo;MY;5ukpSe~SL)r&}s3YaRp%wu6Y(5Gq!N0os)Y zv#>zz*eaqHo+~bvBDt2PE2?}H>cYyDa86~Ir)=WKzw>?6O|55Jo%XV;%e#FNq{PrJ z$Gt|G=l;s_?!o4gCHW$cwh>M_p~!v{RK)(Do6n7l8Ze=m&tdi(?xE;@`{r7C0b z_!lvmadp*{&kr{>J2riO9ME>|yGh!!METNdifj5AL6;?=%Y(lsfZ!PiniJLt!KZ7W zarYw#LA?w-SAGu1Zg~_MspYEnHwENwj?wqXH-Su0aZ12xSeh-EJ%Qh+CX@Xf3UY9G zru?NZ3L;nR4^1~#FkJxnwSE1(T%RfX*e|i6iFGv*B`o4qj_xJ|03z@ArnyPg|epU5#C=$R7d!e3T>gjKK-sJ>O*pR9ZMYpKdaNvme zR6vmT{3R4k-L$(Xy+_j2i4q4#(z^dfV9gUZIJ|{gS94iE#5l*ahjR{7I6}4PflTP) zg@#k0(jZ@tere31cDtnjoeg>jafJP-DFvW4ZaE5PNDV+M(9@$hS>F7Gwb^qSrl~Oz zAd1To8}EQgT-hTqyggkVmHdDg@>g2_UgVDaj2wbQ3-*`?hp+IAhUy~tc|3mU zWa>G+HrTvmVew~<83;n>CAsGYRMAQG^lSbp`j;p40kL5!MF#i%5RatUGF7ho-Nh1= zOMt1`Qrro2urI~|ByT;jQBy0&!bm4hloKcX;MKgSofiFblxYP({LbR;H`IGsd`b-x zkQfl`9PfGWNbQ0Bs%elX}&7y342> zB-z)6&-!i|-1XdCE?50SLA~3q9R%hhGFSJ%cw-_Jr_`XB$9!GF(jr>)OL3s%edo2Y zA_C-~fCfHGKoBO%MNgyhHI-c27ov(sB{JUX{mXei5N;SkBt4V)4oWsR!Fm#1P)yMe zkNmO6%BzunC4D>h{8Sv6*rxdBU1TgbMt_5&k&UKGh;-fGIr=gv%JaLVGVUw*hjV^( z*CQmAEHnm9NZ2%fD8fyFsB3F4Hg3iEMZ?P7!!FWi@7+*kDUXH-hmhwd__QH^dd;TRs&55 zxKxq**+@AqB8_2$E3N`BOEMP=MZ6xNfpPpTW;)dFnEEHx?j$=`!g=1oa1^ZABcDvB zt=V$LbG%F2SxZFKF;;8xQz`&&xiK@8av<>yElHxdp7|y2f)xO5i&CrBDtaD7RQ7&?ixb$kNxMvSYDH%`knPi-;XYqCz z##|6~=H|>H+5&?xAw>%d;>U5ln|@J8T>7bi^}qK+9kB2ljRhl$ci4rtZ+J9t-ahHk zER=YA5x3TFU&yuya#nMts^N!05s+{>*pp{{X=2+E{y`^FkQ?nl6f3_;$FIiwCf?hN z3W#~&WMdN+0Deq=5_%;@?tSYPo5xE*)s89@ecL@4)BN{M%qCw2kN^oc>d~7x1a=KA zB=~iBpKy=dDMuezN@P2NX59@T7#u!XUh@*vG(Y>-vt4z@%Kv1G|IqPVGLaqsK$LEI zP!E3l<8PP2I{o-t&o&(zErZV$)i72@dEWt>fx&wJiPbZ3V}rv=X5}lxH;lmCppeAB z6}YKHOcFT6+eh~mk;a?$e9fON(XM!L`#=b9HloLa-8|GI&5C22MPVjwdB|)3KEFRV zuhc)u2Co&TnY?qqM1lz22Zo&RG`b-3moly!pI7Q|G%w!8${Q5qrc}1h1Mk0@FRJCf zq>8*~oV~TneU9P5|J@*@@TEiZ%txZ#@&M9eACklMGy^J8^xIy&(d5iS+Ss$U(&UoX zNDI&7k+9{Z`y!k-40InO0u~X}$jpED-*+)twqC#m2)6 zW9Sj)A$)w>I~s5tba+uhB^v~07jUQ8A^BvNRr;0IEqbii^DIr!iGA*(_pVIx8(87w z8Fzi)Pj8D?Muj1Z_{F|6K<(c`8}Hc~VQNJ>ofwV{;aY?1kCR6vr8sWB@u}0lD8GEx zOIeLnx(E4F!kO5+R@PMA$FT*~a>4rZOp#rfSV&E|w+}bm4hNOTNm}VsF#5hOs2ftD z)?rVSr~Cn&B&?%{gHf=%%W>&Xm-ITEfQ?&aE#ewnacf5i@=(Q~?`m4^=+opE8N*TB zi>Q%e0e!5M#^|9JIR^j(&u6J@tcnG}YgX(t|W*t3XYuA_T-Q#z6n_Kxc0QvhP~ zX(GC9Z&wn;vE1KK`0oLIdm8#cOgq>EE9+rLe;7Eo=gYapnQ1Q|Xi^Kp4bGQv;P8Am zn;;nYka!gNtXKOH555NIb4@!Y_7?$!ekvaU6$sJ@JcnUU{}xXmgMX_;78C}Bw(x_^ z4FOUu+qvU8;T4}QFy|^c+XLv^mzG|pA5TxIh+#kkpTc4yjySo>whbd|F)IbralZBxc4A4O;gO;=60J?%V*9dXfP4*tK@#x*7X!zzY2Clt@|82h{f(R%bxMoer#3GMI}#;qGtak@ev!sk-RA1nb$>?n$4D5K zWcP$PQ4QF^UhwWc6#I^tx1(ICYoiZUs6C9Xl@d^<3+1*2{iV{>eXdm1>V;>3aRRFN zVtK=Z6ve&F!n~p>_9|@xD zN)EmnJ;EjunT`Dn1TOUj7&Lk3))DeR2zZJr-J`^3sp(xmD_EU~7j1D;0i?;xK@G^6 z5eJ2;p+On?0mld|H_0a|w|@NFW-s3>v9o6n3ZW`rmy-+HiheS!zV|eZ8Hkf9=$(*Ir#7(p zkgCbZwc0Feo{k!TN`e3#$MG}uJz0%%8-T7Ifgb&Cec?6X0OGAt9mZ?I_ zZ54CnjGZK^Wqq^`NZXTz^uu@!i>4oyB>8tR@_MQ|(=~wrz;+z2#~SV+=sW!Y zo+>_~25W?M4mO*Nk&F|L*5=@bzYmcDW0f8C#><)yk~hjzeoXll5jU0cr6F8|@73ak z03V|6@e>haJ0%z4&@!UtK8`PyIJM|tvAJAGHVed^;>c(h-)LplKqk`$Bo%ZxarUqfd`I+9-5tmiv)&R*m9_m$b5L z$qx1$R13F}Z6KcLM1P76K~S{qXS=t4nMNVJpUVm;@L(mvdtd;T!bc^Q&LXV0J@xj4 zBD{quziG3%*AZW5b3ZSqx}1ca(~(pJzE9PYWkhUDoa?E5)F4&b=k38APt;>4%WHmo zEY{lf`i>tEy@PDyvMTs1-E!0TdesK6E6vLkK!QpG=X^(|Eq=wOh4c+lH^bj9^gNUpoh|Qz3 zUp=-gO}7w9yzC$_A#B~1ZR2g`c~bEVW%gjxs$I4TrJU{%=egsUCs*E7vyG8H^i`)5uSkH#-h_57@CZ|p~Lq8MA$XY6tpF$@*=2rsS zTyKKRV5A^9K^1wNB#;#OVUv0`N0im@GghB-qp{K`Ppwde=w$XB9%|`|?tD$<+PNt1 zzpr`w7Iv~l2}AC_w7|Wi&jxRMpfgj1g`$f4g@RJF^`WQ!i6wqy1;dyhAZOTEo+x&7_6=@stZK8t@BA$e-fxcJhL%b9J1+rN zk?I}^Rr>LEP;&Cg9cUQ2d|9VBnaeV@MP#5kV`R+rqi&{(%sNLRCrw_yVzqWxulVv+ z`;_gl>=a&8d?A)?c5bmuO0H`|lAnq5meI7Vw-0{|^lbJuKNQ2fIxs2Vq&IA7{#gM2!;$Cz(${}7WS;|n043RdKj4!m%Tqql5&b_38Kq{73Vb!yl7wpK zqPXf@2^83`OK?U~ryBu-H%dcniqvdcjdFq!7to0mN(*ZYQ&5Rdy-5}b+Y_Rj?%x7$>oMYi0ck(jr^`Uh zQhD`pD=qL_+c!L$1e%5D^=Ks(S%R#D^7YlwpyC;w@i(c;12>lAS^$oeK6zz%APL}m z64hJm`NhH3wPdlI7iETNtZM3eg<2Lt`KqS%GvUbr&_#)~Mj`ti#thYM`_kkh+pfSM z*`P%Cvr^tk2f#R~WDyGB=#O7G0MnI08EP$e%LwEHF|YMlrN$UCch9RxM|10|-Rc#< zu697L;CKkg0-!kLVe$Y83VjOc76c+*O$-DYBT`+VR*l~9Rpk*28uGb$LMZU%pvHDc zv!B?t(zQ!TrH4cIj%?jc$F-uf?-o-gV97l_8iEh zFJq1Wxq{2L2E&_agtvsh`rLw33sCNqhzax#E~}?h#gN*d#8344QWrEaHBM>x5}n(3?r1-wu#7K-grY` z&|nFCZgH3qNorKp(8YxLhyuE5OV-R3JT%_U*wKHl3|%lPhJT9#%)tI?c%p!ddwcNJ zPybzh`d0Uj1?y%R9rg1FRQ^qhW(S829&11bVgXJ%5bP5C3GY-G1r&cFvXGIh!Qsu) zSOzyBTV=Zi>k>&%z+JF1wB(P|%+dKEOntPx;gR;pIC-R3s9cH)XWSypBcNBUpwe(G z#CB?{%pF7!wRY}ZL~D8Eoe58xiGXd&;+EJg81C%~@w7SCJD`6664xCU9vZ;gFcIjG znIN9|?A=30fv^qGPUIl`Fx0SRa}lxUPVbW*g^s3}Uqv3m0OEWIS4NyI33rl03Uv25*TZVte`6a>x&H6?pK|?Q$tS2uz}W7Ls=}u^LP9${ zK&eF6Iwkjd5du)$R+e!;&O-LPI}zj{$QX-~)w2axEcT8>vr3^)nq6X5J6#IfZ*|Ny zm8IWyyy9;7157l8eZ2TK8>%co7bsFRpG+VU7Ut-xoUyB45L)o*@nUkrzg(Vr)B+@6 z0A+7}sifyI6s6L*(db3lAfe#nIOy-<_0s;v4{(+Pbr?_bg3gH-fgkxR95^UUCGO+E z;^96a@#qagCR+K^AsEzZ7xK?CHR4wbcrcldRPLJ7(jV%;J_vyLqRvsJ5t=MW-Ge;1 z2Ze>_P@yj}3Jzd4*hbk{V5Ss7Z?c@oZiQf8TlFbdBPMs$))!X?#^(~ccA`+?3D-`_J&@apCKZP!V-7H`bYdAygDFDcw00Ox#3=Og=>;-0uIs| zG4kOw?S)*_GK~SC&EX#wG{j&%+%vXKR?Bwji2y*aFtL6-VrXf|f2g9m_r0prwG!%D zoQP8xPh_LX=AI}&^COY#0W?7f?u(dVfekK^6e zSg$lmOn|t-HaR0cUMts1qgipWYHVyvsd=~Zo_i-xlCotHD0S`KIA-&KKh{kJW$^*? zi}IugKyd6DlIiJErJcXESXTTtxwD|3fdwK`4S-W*E%^C5G?Z*TkE)WyC^T6B@G=#^j#U&mciGc7PIMw=>{Q-fl0CTWQ zF^q!P1c_A0xgXKyt=0X?K*KcoanxjHw`O(x4(G!DzIfT=ks~e`eg%iX@5M_2nd;yN z>t4e7R1`BYWp2%)!JSGNr3z!LS=Cx(G z(%_b5_MAVWMw7Oko`aPGs(}31EGqoC6?|qvAv9Ji2jBJ^v%_V|m;^qM)+V;V$ReCiJKnqS0y{ z;L?sLD%1(qx?{8ig%>VMv{{z9mvLtNQ5r%_dvggAReEIH!fyk3ir)25E_V699JWxU zT+RxD7#tQWx2qcV7^(}$IZ$IgRU~a@GY}4oh7aKknKu37WkYGyqF-+V9+qb(ria}W z^ehgwL{zjkEkiNpuTa7SS+u2NUDibEc!Lm&ACN){*~VUmrygk;j|4_TNBP6cWjPWr z%6>(SlzVdp@TU5-U?MDBuyCIYNX8A~aW~hueAP+LL(AP&Xsr6nUQqhn_)GJYANXQw zeiujP18lMIBe8*iG&1-?e@1Aiu9zX2X}z;Rn9A+^=pf zvCy^murDw+4bg4*ms4Ye@boWVQJMc*g6YTL=nBgKQ-qrZ#eqyh)UCgXu@@8*O-R63 zuAPk(An-bJN+>36xfa#|LkUUs=!t+h@Z{0YR2$Sk=>-9SZ-Tq>J_JaEcPyAV59oq^ zEbD{+6Vt|5QwnS=rk3%L0mvWrsx#~F!oB`f37TH^WYVaJrXV_u*MJ*g*wDPPazzj3 z3*EQ`#xKNflV~I6W4KCBlve_94b{QMM%Wz0@#0L>CTW=3Q~+mftu^W@?2|ub51t&) zkX8nvx!yS0Xh!e@J7MaNL)gf?G*7TtYY}}5+#`X%q&ga}X$AHl@zN#CnXjCcPABDbiooKA0lE!B1ispA6o$7;sj}4+>+4zV?Fb;4XPW4>k`lGYBzw zdWc#ea?rcsje2i92G5fJK2v`6E9;BrOKe3#gBvT$Fu>=~QR{6ZZ33IOeG#@!I7Jl) z=K;%a6$-&mC8J*rx54Ek?uaiiVNHreEi%XFX8cU@`teY+c!3iMwir!Beq|h>8%zrS zqhq}tb|w7Oh)sFO$7dd+vcc;D#z(REn;4|QQS{ge9DP3=!NJo+2Y+D&P|jeQ{TTHxv^KSlBM(OnxeQ@n$Q>65|jlPU`q)086xdx2L1qk318iEr&BM& zY>=Uyj97q1#sT18)|V0z>$ZB_kX9g-m+K8rWH0boRGddlOHDBMcH(Rv9~x09apDHu zAVHN@gZNG|0xJyE~R z12@e|7}PD@SL=pAR%~+cE@=qzEF4zE6`D>`2wDXPeD4*#aS7#EE?ry+>E;|=4Zo#F z#{Qbc1O**&)61PY^1>)*5P=xgDQ-0`M6XQclTXs)X6&4&Nf_7N^_Aq=s_^&g+t5^a znZ~eum-fJ>0g$;VGVQTUhj49#LZYrs_-?;HSegi48*z|a#KwuVAI zp>MuPqS><~4i2?0irCZQHfBjU2P`*H6m)!?Ag9X%c-sdbdkk~LbiLw?fPz?{%t7}h zJiG5ZIQ+}B*J#yv1Y4D$Kj=u5y;z#oYo}84c|gDjc`J6D9I$eGJyd#MGZH$C>Z9qovFB4< zF}%@Yfwk^(kNs;W)i!9ayd6~Al-hWUkFiU=g0zVc5RJwO-JAx~OPE^6*+sQc&|K$X&qrE9A6dcFh zuZb<9mL?bd8d@NPrCl#V6J4M@*6aS!Tk7h0@95*fcb2J{G<|!7iW)@&ms7?GR+h>Z z`^%RS+@_|99}2=_l@*okS}GZVBK2h|6d&hPxgAeSYr7^q`o`1cmv5$RzXRuUF79&} zLV?S_#bEH}N880zAKa>Z>H_}0{(Zmx`wKtp$?|hA*i31iX8>iJVgBehSUetNJ)k`f zag-f9o#_q_{HFz;EcIcS`|T4)^BBax}x043XtLY;^<3{2Tay%ip&W*pZoq0 z&RvdQYn!VCLe!FMR{w%CVtqHV;)0oUd^f)Oc`!;|lYQ0)A7*1eLc4frw3rMW;E;9H zR@Lk=l!W!gw6LSn2A&xu9l2<}T#$!j_0cIofw2OzadUH{Y()yJHu~)E(h zUDr^a#Sg~<2u8dgpci5=w0R*Xh)rif0WCjQrYs8J$_aqs7zd%32;Hsh}Q{AdL7SmG5qA! zn4~k$mY@4WazR8$p||qP5aeJphkAubdGKXw=^!f`9QxrUBXP*13HDdBv!!C?=SOl` zM}h2k0y&J%MBqKv((*kaCzN|`K2=D z@eyFz4AW0rW=y3N{&WW*YydPDWx#YraQ=K=Key(pb9)_k^H_do3<{)0!nD(VvRk(@ zqDIbglWHyaE0&`sh_$Z_RDSw-AhE=(EE*$wd)ojvjz_Jq!#Yy zcWapw0G;)8s`{$Y9Vg18;iSoL+72;9*HHH!@zvg9nYI|A=BeHBonYM=^AG_HDTmW2 zEZkgGBx5glf2aza@*CdtgV1;xJC9hA*MqnR@yqZz=CSf(TnfE4jk!BpGy8oA{hIxO z2y}IEQ7`S9GIL!X+cPpayp%M75ZFrNhDw0lq^WVJY<2W?_M?Ef1AneHY0Q>;gFndL zsFvtm@c6P|h@0xI-*^TIIBX;sh-Hn(lZ4YJz+c>v8HAU@Y49rNHml$p5{?7sxQs9I+oNP47T zfN6Hs?fC=BrZPl0IQ&`p+0XnqQxuI%^!(~9Dm)oj$I%Q#f0B%J5}}|=QB%yR3RxB9 zdj2aF?y&I{rFIwp-NTa6VGFQSk*BUAW zUfl^Ry2-NSoiGUrhW~~lXUxyZ$qHS&?-tqh!C}J4h76Q#<%oYxSd8e2r&l*8SmF;C zrwXQOd`OI*-Qky~PCiAJ?icf~yYUi#?CAFdro&ypIH1OW)#+tte1Y4N!jZ$$$zCQF_NQX$!Jbn(Z zvt@C{GyPmP=HZk}P$Ic?;+_YM_F#Scn~^@bKlp9{<^WjT;5|HCDouTZ{d(L)tdJc+ z*-_Vq0Vb0WGC*2$x1yG2AX6A4_n;x;MUI{GA)`-2aUzJe~6wA-hHw>|ZgN`~`` zI!hV}apxYyNwFhLa4N;hp!a&;;{t+)B}z!!at8ubyqBHga>@BYI zE+ZZWhn(mJkVnv_-1aFV*sW&V+DXxwaEzd|DC13RhLps0#%CcPMP@Ubcp205a&sDO zqOGfU(Fn_lH}LeE75m`mL$&W^Lu6~92ls)x@aBEGh2A?hmtWUOyx+~`Cz{x-n*WRP zs%3Yf1RpRX*t+K7DyzC4sHjs^H0LMdO^Tf2-IrhWe30ediqKh?&BP&RnS-l^1M~xm z87*+jlss7GSzsKbKX`!=(EF154@4a68%z_M3kTwt<>y`)aYhIN=4;8ATfWhpT~tcU zPUI6%2|w;vczNd+CydYg3E0R(of#>YaxpeV|BKf`?yn^Qc@b)n1gk4wO9a=0>Rm*O zj0MU0IXt#*)ZRISm1r9b=-rRChQz4Yz?w)nGMy1^8xCSGB9z1?^pf^6^G`{k_`Ut8 z!si}DhDIbhQAarp9WIB?>EO{cmLYc*Zcl~NE^Xh8MFMMcRAE!^557&So4$qw5?J@J z{+IcuEa%;*#3Fo`(Yp~~D>2gzl{YQ~fgKr`tZo=TD8B3YOxQ)s`RYVN|E#pYRFfSQ zP670EDooB&K$@sFuI}5f;3#?lMk{!Bb>-MED0ijj8D?TL3mi{6r9K>d@6~2YQiLBz z9`re&iX-sMIk0z)ig*jQO!lC1BZN$h!sEe&y6TJWb`Vv+cuQse5wABP5O8LM@<&AhQ+=*+bCA6I{#BH8^}E zJzLo>g!0<6q4voXL@Tq%c*v;}#B_A@UTn@onN7kh4hv~K!jVZKUH!G;N%=kht?kYm znst)Qn8nIv=AL5H^{*iSv7AB?b~<(Nu{YJDnQOdcQfVr*Hr?oYB_uPjT7aK6Oa2l92WF6(UyDv=qt~s4wqw{wV!>Y zJmZOyOEi>ge_t#U6~phfd@&=`5B_5reBhhQS-X{LS=r{}jo~kpbw4p0qsnInN?v)b zyb^rCxKXvbtbvnWS|zX&wmg(<>$~`+6{eF14(H&f^42~$`h4(v-wXL7RJ}&<-0#q4 zY1lW%>q=YNI2rz37H$%*S-s))zRV5?Xp^?^^mcmlY7tg3hrS1Rzz+W&Sij-M&T5hF zG*R5FDiY75LYs7I^iv5mFO=Zy_SP&_xE?XoGuQnUDsW@RH#)k9zu@8Eh5&Lmf zlAo|Yam<~V_(`XSo{n$)$yB1q8|RCk2k;Un6ts2qIDe0JzS$M6Sp3!r=LbV#ws+2Fk_e9BnJocM_ap(ACpM7SsOuGbK{nb~ zedEpY#+xx&axCZ{j$<+S(&AWI>PLgsiW>-BobnRj8%yID-E@?29nuF{0<)o#XY$m^ z$2si%0J!n{tRC<(#hqhF_i0J^2dS{nj_!DBL9agU2Y9TUIq{5NjFtKQ(A9831XU(M z_pQW!5W#A!MRGG0x9kV5HI7#zL;^~6fo1{M8$V=~=uf%9@m7gnFsHEJe&`O`S^P9fpzlvWaAzj!m4p1 zABOt0{uLkkoy*)+($=zb&pyiOfDM6BBL)Gl8Fh3z)v!DKPB3?e!9F%OBl6R{&=n5; zjcS6eu2?2;5ZQ6q=@%$8*$T7Zg>u2O!STu6y^uJ1+i?d0++W|16`NbCpl@Os{x+GJ z%vLKOy%pRH_|Oj(J$Zwn)+)2Z0_eSMLzTh;Yhks+>j{;sIb4GoeErqk ze4Dc9uZ420|FI}PTYmkxANBbx0-3AXN`HyIm|+mAZZrDSWpnlzVT&MwJgW;^C+H**AoHo;H@(F)W2sJVr1py(`51=jy~~Z zj|0@SP@lyd8mDA38~&+hP0gF7!>jCcsW~DKNntlwaeNhsDwpt^2tWFsLS`t(=jC|t z8;VveuS%x1k8=RpGSXVrZYfjao*ZKO z?PacVO8u2{0{{Le{hQm^^H2ZwSV+%B@Cr!piD#ejT34TR`!9S06$Cu>FZ|-aCGSjECyd;d56?=%&AVm3?S@m(%|G;6zN_8&AJ~0C4RBK4Mx>M z*tVSV^+q8<0n_RoKUIQh{Cv6O5vWgOWUi^@Tf323OefEcOdQEguTisRS#-WH^)oSs zixTl}!6s=Fl4uj45FGLT^$+)A0ofif`M>B#x`jzFok|&#b+yP*DIWHlcv%XB6!mUr6mrdl~4azwnRG z61paJngv9vGWM-$TN>b5oefPv$AUsregO6347nAF-NeF#KLc)+s<|dg;qUiqk8F^$ z-;1A8g7O)R4ffc`rF;O}nEDjp8P)uX>2EIse5B=S+rE_ii)?9Y?Vl1Dh=ed> zCYXl-lN?1n>ZbBYWh#J>@Jn1>D$W8_w?>T}PGaOXNvR|+$nt$viB-HHKl);D`x;=EZW!-(&ulZviO`+R26LKw21xkv$WYRT!8Y zd49UV;jhbUule#6ONIWQ+AAYlk=Gofdr@z3VPtA{(Tkx_E$OZJ?e&xhm1wfWAp;|w zSBTY;(>^&uX+3@BEL%^OzwxCYgSS z*TO~zu8^#sozJDHsRuPYIaqvO`L)N(Yhm9(0P;~ey6Z_NYFj;oCN@c#2&P=4)won! zuZgYuq<^j^dm~DS`bl3bFL(m3QSIa*2wr>8KF|7EoSm>>uqeJF5x=rTLN5+eL<&T& zKPCYRnv~v=)~W<52<9D7g1m1e-ubO-0)KJnu5PSA`QiJbBH(8J+ID}9knDEkqFJvD zqdNm42r~WUMu#ehYN$>aIib;hfuM!;(n$yr%%{D{3PC2$$4wU}>UFXv=qMmScXd3G*cU3}}+ zN)zDYMxLYiXDG#5<#&FP2~GA&;~@?y(0q{#Sl|7n5XDLc#K9ep&Lsnpj`R<{v49Zl z$@1hy88fJ-ba>SoGy912)MUK{)ZmjwZ50wto!dqfPSG!vNWy1nW`3hi!Syc>@J%T6 zSeTm*5aM~5**&|+w(#50hY@`C@1V)>Xv zVu`JfSrASLMS18ZiyO8Dd8D}Lkj6o@Xm5LTbE<|!^I3N*o?bhE31=L=!SGYRA3)Mr zTQ_;8E3M}|+uXlZobEEo5w>S@MOiM)ScN%e)Zn@7R5M*(ifP10#%%W!6SPh?18994 zZ9a00X{TWw(3!p+Tbpv8sTaN6d)G_gzAB|LR5!P_8l(y-ILML78+kmVWUlcJ{KO0D zWTgnX#jh(4P)U>LQ}%^aov(n2`t~uB#+^PYt>nzUd-R2HWkM)@>-)%V1zm9^y5o){ z&kKAzSv)29m>zB~G)LBL*q(q@X4Nx-K@xEb5FSP$yn~2Rm(VCje$ryFw)RbCeiW z9oeg*S8aQ&ZJB8P5~n=2?v+NDOyNMxEi8Iw(Ipu04gKgje$QQ0X}X^%ccLe5*Ye{Q-ifi(bpvbJ`K^Fv!7`(MrWDiRs_MPk8WY zAmq*LE=&{{!@Qs%K$-t2K^vc+{s(OCIa7Mt(U`kBzeQa^iH@$U5hDSA&UJqR3Zh777_joyd+JB#EI25N#I2kV{ zu@1U&uR;IgD-puZ@dF(9yaW92+|Ru2i={X2kJ&TWM2K$q)psa^{I+J#K2j3x&e6NR z?kh@Jd9%s59`e@s%4gpAb@}-l-mz$)s+pDZ_JdJa7d5YwvUNj25|r57@QsAL{?ISz z3Dq~wv z^utot%sluI?`rUcKLnIM(Pl4xmI>1?mBKigxY-+iuH4s6fj651fd(NIZskRE-2t&HRDRdLoB0Bmo_gPIGYhi*nj?1U5~0Z2uM>c34pO zaRVe<>9;cyu7G8z!qz>z&Gq5#&`fu&OKly>@ZV}TrVM`o{ zz{3a>MArxf@M=k7GdC<6OpGP0ZfzVmK94^1Md3fj(CI8tg>U;^3bYLo|N51J%A~Wl8S`szByzNhR z%8k9iJ*z#JQe6H}m8j4A;jUvCHF>BvsH(`p5jVfSUT^8a0o4H+>MJSM8)8O&TR0%k ziHOX+mA~Dt z4NIOvw+_YJh&sU%$ZayC*2U_O7&iNLSGtpFsE0q^^W+Tc5X`7t6u{u{p_Y)m3MVQ(qlAJ!z< z)xJpJ@*((Y^GGBKLfsM(634~$NHWw~F^)$M4|itBdgN(6FU$s^431wg24S<}-DV_V zrJq^I5#VdYRw#}1HF2)Qy4tFSci|v-SG-_99R1*h(klx1EU4F3Z&k)g4U)S z4vS-(t-Kx?&)3S!C#*J=JGPzV26ni+&PH~=PYs-C&o z10&P)Fh`(0??j#f@_Glr_qMwUDj-5bg2G3~EV9)ERKfXWYLb`>_(oq0Nh~9QqqL)( zR*kBDULvXQIVPw+tXS}L*6s({dJHU0fmSVNaw+9w*{Rbq0jFP-c!xN{lVvhcP+%vQX%cfV%kX&`V}piNq>5c6hKzc#Git% z+SrchL%+7i3cIX-(9u)*X$9cGDY7b-h1IzmV=T4TM7i}1LEL={`H{0R%d*b96oobC zHW9{%R;Xf`t)Ro!vLV-AN1UA8&v@~Xhr{u4pJgBr zsfw>VmE-4*>f(ge{K#V(rzx7pG*`~~&)1V1nMly`{u7~A|C|a1QuFZtDT2y{qf+?Gc|u%HBcxhuBt|uFo#tZ=-vUFXX zDwC1ag5L@i6{dU5_Mn_A6|<$92+S@*4!mldpuFe#>CAARDUZD2x2MVt|M_AW@mdj` zkL?XYW-GP=e)@K1G#|VJA4dF!pq<4>8|y3&ma0^HXw7F-m0l`h&MX@9JulhwX3s^d{?1YtmH zMKxmm!opB#M=r1RRxM4G_3IA7c=0xNB-%jAiKru;{v;+K6(zq(EyXj)eGqE(JGO*n z-y9tTw1Cp!-qI|kN68IE-7LkcZ+l5FoL=?JB-hjAG=0>i?l8){VoS4SeeHR_VsNx2 z@+$?Y%ciGqfQx|AZwfAb`o8BTz}O*q4v4>DREdlNMh$BKsZ;js#QQg_8qMD}j1>{rEwOqny(Tyj~O-tyAP1_G|Wsdfye)lrV4Mrl$ z$c`#N$p?!v!)@{W>eJp+<=_cBdgXPEQ{h3{EJlDBRPF)Kj3nNsyxI^83Q`H;hTpe@ zRZnco4@D3$+z}>1G@iD|In;I_Gem%Xe&!Q2S9;U_bcI;L(1WUt5JHdaP27OW3sRQL zI!4-M<$#0Uz#{ugWjc1@&M9@%*pjF3j(_Ke$Pn(@`(?lGIs6iki71gAG^U(N0V6bn zWe$B@vS*5?_WR$Imd$?fiu}Uk@LI&EF+5O|$I4gPl0XeDthTp@05P;-$%~q>%Qf9& zDoOVSa(EWY+-1L%m0ZSmyr0!D+ZocjI21g(A zrw7>Vx#(f2XFnXn z^YoF;ne}8=fS>9q@>)2~4{tNT#g!ZLBm0VRC| zeFKy)LzRU)E{f=^m{fFCA`7+L=|?`qyKQ#yUiX5M(>n}3 zx=tS2GZ48<+s9Jj`P7KjamMnB83|g4rie2Lhb`B<57WLdSnJrq`%c}!6T*aXOTlIJ zYv$Mzy?H$wvu={Lj-o5=RaY#OH_ETSuZ@h0nil6KcZI*9YY86ijwqZ;BXL7EV@!Ju zvO+(7opKH@)8&@9UH~G`jv5akM!i&Ya@j~c>H>R7T+i>tV|{~2wZ9+rHk|{lz?H1V zL-fEVeCWL+zW`Oq1k;-z5bup6j*NIfpSsLJqzl(a{fZ^&u8bJx z2XKv~(642-^lPm7oGqe5rT=+4ca;nWu@pAoQ)3X`E=^1evYsx&xK=>t+(STP7SeT!xlM9xjS zfNRu=)&PDk`Xb@YLBkt#8*DnbH(*gIEid@TS4&g#r7D~6JYmjsz{AB}2sfTrOP2C# zjud@7+O2JG{8{e&NM_Fn+^?XSN9guK!QMuSk_=wGzP-XnyyA)e2-5Ze4cy1Uz_STK z1xO6Pog@NHWj^U|+MU2~Xu>|grqJ&5?$dB|=X;$IY;mAGpxc>sDRZ?0hmNz=IVK1bKhNYORGOPP_ zhy6k7^2%A88N>bS=qj=}wB*Zz0W2ossDWZeQ=Ehf=!_c6I?=e`5RG#x>X8A}_kJST z2P21G=f?Uw$Hpj!t-MT9;k#R9WO{3&YyqWr{xvo#Xlo2VLk1!Hv|6x zV(_ReQV{cSuoD&a)!d^yeV2~&A}jA2cfRm3+mXz|i{x~u#>l(Yho+`xq`hmazz!$8 zpMH-{q3qQR*Waaq{AobXICCrs#%q2+Db1#|l}GGWY`0zINwq?kFc=*!aQ%7ZlrmtD zlN-Xz@K=9T9!UokS)+q}7krJZ0TJ4cgMHKgrtxUg_Oqxl-1u9h>340>1HK6{k5E)9 zPso^^?iSF26N{hZoTFH2XJQMHDJ3LG8m4&2I7CG2Ln6WDOqzlruq;C* zDcLehorJgRfCyRx*;xa9yG)yCuLLSjR|r`|dTDrxFYXUj+_$jh!7YqR6frHA1OJ-% zU$}*$`J`?q;B7<Sz_-vVy^; zq{E!*YVCd^-Zh`4$eqg0y%2=9zbN1T$`hB|6YM7vM}E%}Lj4b|DlV2NaKX)kf-2Qx zB9-d(0gwB`))p9X~yXqO6 zTya2%#^}e|tiYPW?c*zw>L?rK3G$fuh202LFL-2zYyq z1=e=j{&3BQWSdT?TF{TagpeZ+0yZ2*%h6Sv2Mb`?6tZTWDzO;O(2pnDP5DPNQr4d* zgp^85c0EY$oPOYgOtheiSM19RC4xqj>2HMlNznCVGb9PS7dy?f)vX%4b@rjN<$*i| zVq`cbIExt8;9E~pO9|j}&*hkFZcP*!;MQ&rXswHAWo43Y!g&ztJjcX!z-n2{Uk^P`_gX>_z3pu>rB0})@ z6c-C^vpF~b6Ii1F{&rwg!mq9`K;Y}mglF{EqKmzI`rhf6KIKa-HP39BTFrNN%(k_8 zlNbJk5W!8~P0W$@WA)alboiE0JODTfKljTn`uMTMgKkNDXb?jvhTb!#wfJ^7=y- zoJB#~(uscu{^8R%Ly{3;vYe_HKYhcWplt_P@OXShd_It=n^u6kAL%a+EDh4p}6Tbj|Tq-1e;2piE)GD@k*PKV|jUDmxJw zg&pz`X^!m?EF5|mVBJrY5I~jri)jA`+Q^FO*|OjtUnnP@nc!QTow$ls#n4(F^U%XL z-9Q0XOt80>`mI4Gvv#0IZhS*-VZmquBCisKWv|N;n|?_+X4@h)DTFgh1dL>$r?s|C z){-&OWX;oBUs=Ux4|$P3y{j1bm*4-&+b7!;0r4lsw|_}+-mieEV~YC~UUu2q$yaMM zEy?pk?$RrzNQNSo`PS#l%0<6oH#sq>UAVBTwd!TUu@8t*wUrpMY^iAOyI#NmQeo#W zBt%HTo*yoASqP1sASHp@A)hj8)%VFCR^dacPH}^UqoWP(O+Vu)k~PlgJ((_8A9E5z z^_apC^>6ybWw5oH_7Ko)RyEuto@5ACwsKl4#3qGxSjNB4IUJE?2?E!jo-MIOdzj%z z0anL^61_jkaYWMthZ!uZDsh)K{0FLz#_zY@SgjHw>haeEPkmzqpGQ~&1KlWpK9ZOH`b*Nz=1Out@Po%Lg9Yi8ZwO<7sHXxV|7aMI$e z-$ivn1Q>)|72DZylt$MS!Rd$I((CXRk)ZWUvX6xO!W#!m^iA0pn8>FaUx%G0(|O!< z%^-X;#A;C*CIZe))HogE3&U#|fJI3h@7)wd-Ix7h9$gjf9#+W`drqub1FX?4_37!y zUL+OYW)1|rJ~XPpMLZ`~SjAU9m`ZQ7NRvlsoa?^{lZQf!t*u01Ap;O~@V7W2cTPVQ zhVJeu`pi1A1}PB6s4Bixf{B!0M**qKDf0QAw?cTa2r$|`84TNG%s5G{VqAfM+%vDf zWXS6_7mxp2Epqx|FP=U!P$26po;eLv^Y+q z;|R)d8|gwlj#_e>|J*I(Fnf3iyy)zjPPMm)n+E>E{jE`ec8Rx~eypgAteECS$~^9M z!4F9xW5YmIL!;`U5)x6)doc7G(f%aldld#_xNv6+H%)M_XpO$DXmu zd9%SGk+uUrvMb_{cJWu^LPj2BxmPab30PdARe)cz)ii|t+zYa_vJSRkWif!S=gUve z*zlOm?z9&n4w#;8_4J+b=k&T!%a6A$8{9Sf!4K0_CP!HY{$PZUU|R(|?N5zwKTw|X z@M^F7%%h7`EbTF{em+o-q`k=VWIPf+~{u`bxq zTqMBO+;yXdfMxY&;Y9l*M4ti50ItZ7a5d3Nc@U5*?&0(t2x(JH=Vjs*;qklUkGY+` zIsM3UTHqu8nPm9Y*X_donS!hzZt;KL>whJ}loPDb8340h*NaPKxo^23@VQYM*oT(0 zED^SgJTqLY!_g!aYhee;r(6R1G=hUvP}0%jeT#C2LFN6*zz>S&6;l4*57HkUH@X$$ zt?WpFyhB7D)KFa6+q=0FEIP(3VAFOGY%g3Qk@Bk5^aO1^Z28@ASKOZ!>#6U;u?K{S zEncaGnfwfD5cUp|;d!HgNZmhD+z{#TC&}K**BI>nACv8`vsP8}q_vtFmTMtC)vqRP zs{c@bzFYqtXN1c*tTi(H5eQNQb+C8fLyT^|a)o+0k5s%6p%nK3*

  • i9M4?noMU0AdU7_{u!We@o zMilX7dM9qJ`Es?PtAtUHZ-^`#`P+t64OGwZXlKV00R=lcq0xxfR?T}dGQj1CGX=!{JyaeH>*KL<=CORxeA(N$ z;J1xVN|W)v1Z?Nppvkywccm9qh&(n?E zSjJ7<%xuFhkW6KF{H-%1$_%;UXKys#wI0>+8GU^Erlk~+!xh-y$k=+4z-fT5MA%LX zKSHED1D@2Cw0m`t>OmydI!Cn&aQG20X>%?p)DJ&`f+n?Z6LXrM+Mk^kr)28C8VRUi z`-To~cD0`wKW=!X>St%HThEji{o_)y$h=ryJnJ138H*k3hk8Dz9a7F0GP{zQITMa zjbA-v6+L*;q%sa}1-h587zLq%0pC4+%TEYC5HjBNihK9T6qvm)3LfgaWWk$kV&&KapcW2YKq9Pf3501etL=z^|jrtT_bW|SUsdF3#pp{SJRvMD$<4j)#FXkmx^mG1VZ$82h%Vlca zB8VRsc^U$e;q;ISruSKsyu&=`pVUfk&!6fB0=@pC?ai{|pW)RDnhWe%+Be}$hZJz` z>67J!C1HZ*t?@k%Sc;rhrGuE18l8D&YKkDdxH#6jFoWNG_WXI$ zX1N3cIi?svfDW~H^y?rHP}^ZMCz~b-a}#$``AK0UxMU^357)vdg}aR zWJq#bg&-iX?r*`1{m1hCuRY?5#fTi(0<^)ExFA2SNIhC9-+w??==`#?4?}+Cch_Dj z_(_qNTVGq4FZ0QT_6*ku+vzobl`u%2J8!f@l;)!e*(JfBYS`t`ct7dNRTafK+b2SMNsY_58s)o~ZvP7KwxB z!>;%@DJdB@-YC`L)GlRy6mVp$;=@1&b^Qu-MR{7%k{}@RfxWG^NfpYbR-;z-N{cAX zz7Iljo_wsl{9_4`2C*jIU>5hu!`EZvE{ z=?jf`5XlJ_t!BQs`8^*c4E7pdtFLR0de5ybjG-{>iQZvh$o}=N!5lHDPMUvlpj4xl zCRXO%(~rdr@ecmjUCRl<@VJz#spoMBzpc_nuL-VWne##jinF?uuYOei>QO(pV}+-L z$@^Ey_sNTn4*Xr@IQt8JKS83dG$El8x*Fn&8PJtU|U#sh}Xqey~8VTV(nv2PY6mpmsMSxel$e>oTKn&Neq!H z3MY$ex%T%~yeV3!pBK)~V}&vFz&~#Q*39)M7BTdCa#ps`RxKIL>knn^OrIfuqxYEB z$!QV5_lL0naq$R@djfM*v`33j?lS^g#KkQn)1vagMfJNct(0e)bGu0fNL0j@Kx8P5 zHjBr?euWL=yxG89DVc$z;x4{@K*YMqdoC1QH9uL_c;ofYm%hi?bueuMUW$PbuZvLO z4uyUoOak>gqA~~_*;OTYp*k}!S`AP$i?f>d69&I1;t?0`5WIqM1Jg%4ZVUNK#8D%% zefIo)3@^k8b_b!0F`@BuX>#IWpuMp`BP5O4gwdbFc)Aape>IsreE|xUmI#@~y``t` z;$rp$Sxf5npO>>g^`nUS9&aa<0mnVkB^1b%!w7H^9YZ&HC!PgCc*~osI~ezX>&43= zUMx!+l8#kRc-0fAVOXp=5wUv$1h#JqAK(vRqkKa4X#9nr0cjylvl!n!a^P6MqWs3+ zph{!v1CrYkf*f@)MGcPM_t&0mlJntggu=r;TUz&|s@(hQ6(8e#`EOLFJ@SChA_$PH zCruHBK5#+PhiJ`uemF_rh2W%w_h($T9l;eQV29U z)mh~-DBKo?@0H!A%BQzfR{Sk!^qxP1cl<^t(Acl5Bmi5hL=6e8 z!Y-5urz;*3i8(^AnV7-rA5%yKc%tj`y_f7YMBXa zqpXo^G`~iaWik4vNLvo~2!ZExgk2a0@SRqDkdq4+?p@#9Nv<92EW9+GV2`Wzt~~D< z%_aXiI7$(}(W;fg1_6}hnv&>d(_LMD-grvIc|9)L_QAjfH#Xk8%JXEha?ERX)~^P#lUa+YsH5}D_<8`Eqpi3OAs>yW)4{xDBRf;hpYc*dyh>=!Gm{xTdn49Uh{lKW zom76r4J6`;SXM$8+QIF(KX5U-$`|j{$>A_RnB(PpRw(!pSuOwf<$;zCAKx^VnaDVpjId zm%yVx8<=s$X+{dwZV9;8)>y3N`ZzU z#Y%VAJf26%l{RJNiWn$-&Rvx&eo;7?e%JTYQ>E}*veed!qYHFEEYe-!Ki0uojYV7S zHLN02d}>9CoC{b(6-73XXz&IXjF!3ZkUO`j&k8g{4+^`aOk*O z6nD(&!y$G6K^e!$2a6mfLXAhCIYDjY`fADm2#Dhd+$mNZ$PMnmnGzKv6%YgZNqFcF zt-LVbFj`Y;c9a#^8Qlm)BN^pGRaCv^%oRaEm4sU{IwW$J%$Wd?2n+3I`Ep0);?JKj zNDx%pw;FT;F3o$#a%|r7NS7_j;?NJr5$YLVFWNL z{M?8~s%D>-M#(5~Ktw5GFe7}t-Ny5r1k`BO)_eh-M3C6f)TEs{qFtC%ed4i(emj1r z8sh&bB5z!l{}5I#uRi0?zvI7+5sxG9L%Uiot*t+2B?cvA2|a_(;zZK-&|A4cS{~}5 zNa0L@yRR3MS8fAE50g1*89Bsuh)_vT1wIqqw7iaxA_HBAS^~?sS5TrDJaf9Fzh50@Olc+=fozE{W zEXUW;-#00FAAB^sXA3+|&fjycG#*W*GaEw=r&eg4e&K}%1wGg-D}A3)I32l8idb$v zfP4Qq7A4R)cm(8#><~}`zJo4_)1{_5K%5m>izXcy@(P_DZ;vef7ZRxtgy|pxPI~GF zdlrlstSPspyfuz!6a6Mbiu-cJ?cfDNEU5(g!t++)#PF6AFTH~Ui97xeVgZ8o_@<2GqM;u~>1DM7wkbG)ypQWyo#RB0+cznz$w`eJ#IwG6c zb(R>=Bku!#T_I=^GCjWiR5|ld4pm8P7zk0F{)E5+T7M56Q#}Gg@O&`5hyydzj_FT* zxxH$*{FJYJ?ytj1p+K;uZKY%yZ=kiokLk`9IXIH2YFq5vQ9E5XT4rB{AEc(PzFYfq{7a_41NebA7hVuUQ9=Z@(fsjHdS|BJms2covBj z%-m5ERwg}+{O0N|(It2Y+r`LVU2nC>&7FCjVk1u~$?x)P!ZX7S$)=|*^R#BpN#Xp2Xly8k2N=j- z72)p~$dO2LfKW9FD&nSTn&B2^Us?=g;R)L!8X|EN>7}gWXfCZ>n)ST@SkAAJKi#TyRhC$p1~kB*32XW)|NzSO>l zQLY|MIqIcC&l`V{f<7a!lz_lkOEl%R{Jwm?M7Cpn40{guCB8!1?uaC_MsF z5<UI~J8@XkbIt#3Qa_?@_9UFOWaZcKog+(tp)*>Rcfn@q=KPfNyD@oc!6c}YFbf>RYfk&}mMEU5G z&8bB5dvtPCxmEpAjwfWPUVTEN_*)!OuZRE{f9=02>`Nl*N|5y78GfxB$ltaqQ!cYQ zliR$^d2|@uH7$p1S(>|ma;*cEq*$?DgvkwnW|)=KPEfwMz4bp$+%$HQlJrtP9Ut~m z{irq91DD=rq|0a4s8 z1BbdFPKkHv-rten@CzKwy(nu%6KrV7y+a5CA^Es#g|Hcfd8I$f=E%Z42LC?nWHPa{ zgYo5AihJxzBS7Drey94}Q-Fw-6enh`DlxUvRLze!R=5-5QK4LywhxJ-H{;u)mLkxV z%Gq**T3C>7mB|*&GHEGq@?3e^M?=%`uP>En{o}37K63_7-^#N=bE7bJ9;4L!j3q2{ zTn(45r-m$5JPFm3>H4VjdmEATSmp=`dEILX$97Wz))ju_B{PSwem&g{SRfFqrrc|x zlJ8;cBfOR(Y{=R?vspcq#&FkbE#H<*ax5PgS(k&P0p)MUALglCN81p0NfbI7U_|l> z%Oj%etq=h1 zhZa?GZ_Q5_5I@O@sB%##)5wf-jxaip2t|a;aAklMd(^PftuZOAe6Zt|vw8QA?rPZc z`%yJ=xSD->kFw73x8}ZpJe-Y%Q;nWu75V5x`P5V8si)FT>QA^m6OntCe~SL)iD|7< zOci&_k8gI!YV=_Wzhmj{zzTB?ZZtxEpH4pgyp6$w!rOHW85pQn_V;>_p4-81E8mCy;rR39TmbxtG;U3 zV|$Pt@}v*9Wl%KYRuR;+=y~V+-<;m@dTX3C=Bt(=SHJKc@rVy(W_;xHg(6bfKDf0H zVa@r{V*XrB%K9@nN`osW;R;a$MQFfpRfuJJCQk&o7W^&TtsqlG8*}hwN3|+b2e@*8 zH(#_;roGf+fKUpiQH9m(RtT-lHmp)%b8_IvPwO~>@M#|;EB%h@i(<=w`iHmu^PiW; za|8rZiR0+Z;rr?vHYNWl9& zh7JhG55b={S0eL7{Pb`$uVKhAw^Sj0QMTY%7 zp!EO7+a_tKRh1rMzAMR6;Q0MH;Pka{tC zYB*;bewKox!=q@DQrYSLg%b0eOker;P)c2}e$j)d%Om8z2dUZJhf=K0l=pvfYrr_x zI1U#NiUh|Bfi_}^%;CU6-zJO059rW*@5W12L!5qW351FOTa0e5VM8iWcEq(GaA@3a z%%P0bOsmh%?3#y&ga4iUeCp$}Kiy1|L+lIm;?;!}I|hD_4mTg=QUgC~d@o`R5DE=X zn&=ZT-L)ttl>L4yKdk7UN2E0-#(hi|u7n|vZ@*LmzjTBl)S>*mEF8^$1VihWOzKmp zz5VhPYDxfz`}zTAK1Z$6#kHOgetGGWpd5mt%4!-Snc#Ae?L>PV9bnG9;=QA^;i0qn z1?5MRe=II2{x0P1UsOFN<}H7NJdOwrD?&HrK~Hi%7ECJO`c!R3tP!sZ%Qw!g+10&> ztiU&F8~|Em{Hdh{XJ01@65M>qG{rzW{uCWzIZmpnff&i^a(Q%dALa`FGWmliLwIa) zkx*8|Lb(F>EGf{LG#M|Vy+ek^{C;R}ek53e@xa&nw1u=NyxY;}F zL+18J;~=aT)VV+pJ)~5@--M20VvRW7`<*<>_pMXsv&@y6%i)H9|MT*&Cq!rV4euH} zbEBYy#1|eNL{zcvFw{qBCkh8?gyd8R!Tnr9y99vH%;e1yE*)wrlj)%1!% zh7)<26K;zpXpI|<7~NMUSAo+O5*0Qp+vpRr0{pACInDLps5d~L3hce#DSE4^2wX<{ zU>SzXd7gjD`SboZ_z20bTN7bdV?&~k8weD{CeM3{FC-rXajGV#O@LLmz4@X6hGRZpD^ zY5XzV_qO(x0XqS*eg&s^@1zS%duET9pL?v_XxHM#*E#MOIXwEhLQTZ|0uVLtdv7KS z?9hC8^CSQ1wX3BGLFnF8BFMY$fVGzowMRJRLmzYjXT)=uXL4%2ET?@-$Pf#@yUsj~ zo=_it`eIq{c>|!&dZ-_XqR)y~ zU?_9&5nL)Hhy3WJg{IU%Km-mst#HQg`&*}{N#?VB9uuZGCiP7V%fsbC|2S7R z{o^x9J+erkCDc6DRTCK}6c8n!3QUsWi&{B+dr2o1z$c-cd@p;AR|FmOM4ouEoJlDd zcAZN=M>iZ1lhoE7LeYVRAmTpQ@aingkP2-<7aTsM^IBwv3GQ!Tp^R>LqEqF92k>;c zwB(UJT`u<5{et92c2Ffd9Q7((-jN3vrHsFc_*N84SAZch9kvyBK)Gu3Z9-Lm%LO}j znyo-i-Y#kXKkz_#-~o#~u=jomO9mOi$s0A8JsS2_cvUPc9i3udx7tsF)!R$@OT}XL zkD`!R#gl;lPa4I=oln{2Mmh}e=%o8Q%{qsHiSt+4JjQ>@4j>#kPIlu3|Dbmj!81ur zgJswv4a>?@-5A((y%l-|}uwBAN zH(Qqs*3Vj?=)+2c77Y*w2-|l7KCX~Vh#NC^B82gbcL3b37#>8lmQV&R=NYt*2cWF< z_Wm_;242i$(LBl=0nrO*s<;$ASME%1V?Ry-8Y1F2eP4~=i(gLEQDp@7nqd@^n%ax~ z<24nvD$s}+mu6XJ7h^7q)7a#+2C4@k~aL3E(IT=CU%Oo)#=p7MB?De)=h2Q(3f)cnh*hLOoI(B-=hY(52{R; zIv@6O1Iq-dvah?`Ux6flS;F0dEr>5XYIf}6q_>&{@<&!v@D9`D{7ti zH|;DeS~%Z9uVy<=|rz|MUD9@@R_mgxr3V`FiaF3d)C+QLUJeg(cD7G zTS4IRUEEq^zuJt{x$@|Pme+@h;!Wf4JPZ)gZJUNQChm3ONT?WmCE^AcBZyRJ{(UMt z_O1C*np#0mH)YXi-$>dHh8X2y{a6G$;Nt&40d4849uCZ;f zL%R_Yz-__H-}Bpa107Zm4?`3)D7Nn1@yBKSna|TR6k>QK-~!036aum1Kis=bBVfNE zYI@LkZEPH_p&%sJL#kzdbv~>mR7miad;>o)+bGwZr?-cwGD*@=EIAk;W%$#O*y!5i zP-~H`rDtiND23xm;v`WQ{igSEVR??u#XvXx3pr6lfKI>i_~teB2u~4hxD}`hvYCU> zOVk~_=!NKU=vKUrb0AVIrC9M3Zo)j0l@DMc8{2V{ zqs{|&2A^Hr>c9vH%6K7l$1n62^Ci*9!+_zTim8swF+C3#>^fY5xv~_{5>r5Aedv(r zWA1sBDE%Quyg<8(0zKf7dL~sNS zNy&!dCvs}t z^17KzK zzcT)Nivg!l@rAH9r&pF%)k@t{n8uOwX_-sb&IOd8-vS`H{W<(W`oInR9)-01NG@$q zqoYW~C=hcoA`PO^YdC)JOWF{ippK zq^Vj>>v8D&tAN`U{CAbI^SDNex-gTB81GDP>lGFBD-4lpXnLORCSJ6egngi|Gg`-| zIAXz(M{ID=gr0y)9rzg?+cenUtZ7J>$D-wO1CJ>fFi}1?QxfglHpmvM031@bs>gs4HfAcux_17N5%B_am>eV6|2UR7ac~N3H`0j<<80`Om`Sv@CN(A@&=1 zj`#JA9k8O_Y|$2T2KC*{EP*bge~1 zo-uWv)~!g-$$hN!Gus<9(1|`(bC^Wv4kEYSZD9?&Mg(u2zUS>_=#po&QCz6Pp;n^v z@LteayF>9_c(t)1$Or7630@z9<~KO)uul$J1q1N593tHgY}>B0S^PC zSd|_-+dw2}uCtU&)!)|8b_sKNM4XYg%kJ_2Znld1dTpvr)I_Cq|BX+9>$xvg1#yfx z9OLh|mBM`6a#N}CW0aKUszrBJVb`bB6UVZ1iS99ex|M+5g z)X%|q5e&@Fse?cbbB+5sR7Zfu^ZTpKbQpEkk3U|{dk#B;z2PndJxzJh%niM9n}Yg-21_c{s%AnSZI_4)?+kw2gZYXUfaxyxNOUh5KhgE4)x@!4#{GNlK}PM@}pUh1P5W z_QI98U$AhSq%)U>XN0rFw|MO@gTkqmI$nSt_JD#H7Br+#DEPgD$hXhWF4ti5>`a5y ze_M?&MJM&0{$1PnkWTSa?){hXpNN5E?ZGx#U5r*wh3%wA+^6yK1ENBwUwKGZ2S+5T zUJG+ahS1NVCG~)tw6Sl&(T=Wp^@iLZ;pI+1ZDj=_--g(nDC~Ac{CM{&8j!lD_r~94 z&<2Ds4jxroJ~uxqKlP9F`0xWMSKN=d8eDnVmEggD)`Qp^GQRy_ITtu;AvuSk0F@i& zn#{M7z=-hers5am@4Rd>PH1?aJBI~3l*suvM0qL|Mm7Xl10W!8E2Q*@%aO~dQKP_k z2+_Ug$z~&b%BqDeX}KY~VozZ$70yz)L|qaLqqoEc4OLofa6LT!kVW z2i>D+96HQy#FVx0p>RNA@%I4eLsfElb~dO@S9Z!2iOdMYE4e`ailf$e%nIpDB0K*O zzrM}I&}DL02iYH5%sHBNS_6casVENdd-CD(GfN0ueCJ>Gi)Z9wdXDSeH(PvPD!=ri zJp5#Nz#AmvF#9BOw?%(7Ey~T5zz zBUry&R`RR*te4OvjUM`Am*u2%-oMn&AMz$;uEU`s=UV~I3hIY>^LY8`PrV4_Me&xR ztYxRPjhcWB5S-6(5LiKp9Yy19l>$&}Fa$#fG|Usll!K%N&Eb*t4u**l%Wpf}#)zTd z#&08KUgK$)cArP2wYLg^{;XGd1DBXOyoe%TCtbTQ4T27Y3C;FJ&6>g0E5+j9$p@~u za$x1&o{|*oxjLWkfvEyGWkkP43YA1WY(wN=Skw|H1R; z%7eMm6tn{L5qzgv7(|5-kPtQAN??Do9k&l{0e68K-hp}h+9JZ>5dc-#+8%|7yJ#2 zx-;`G)LDQ)9>%;7HJZ^kCb9}z?h|3KR^m)Z8K?n_i^o*pc?{HQi=5AkO4wZ-?6oc} zxracxp+1k4Z-6U+V$SM<@9Yo2r*`~VaDQajl#32D39!mqJT>F zxYG?^B1+#i+7KQ%jvV9tqVTUzG;j}ZnQWm~eQsf~EcIfFza78NJ1gDorxrHBfT40% zBT}zFdGwVI?sU zZm!H)(pTvr99K^*)c{KFrBpPO0I8J_nUSsmdq8I~t5jzn5zfuSn+GU;r82R${5h`2 zjy^7oeM#8$A^hZJx49D*Ae}ndOH=Qk6D@jRmtk(5pI!IMm9+&Bxee)og7LK94)23d zGU-~+Sv2=&^2OQmgm-6r8{MJUeTKvs5mD3#!}YnBCPtT^zITK%c^v-XQolUomEM5t z+z;w2dhZwK&ittS(qEwRVr*3p0-WHnaD^z65T!d5E>b?ED*reA<;!IyoJ?BhU9~V& z$nL;L=0*tv4;>do0`j?QQNQU7q1Qn$ z-t`0sj{-di*~HL8tC&2^Z@R2zb8R!nO9auewbFAX$kr<>-Z&yUO`(MnOJ9<|QWsNy zBlQ*3O~2*cIP>I-l$ZPC4?R{Mdd!Bi5>U12R%JHy2ehIGamziEvAU(wkEAP!b#PPvf50({iUoRR@5<^2BKjO@=c_!K#M@et|>nR|=9P$V8|B zv0qzy^?HdI2W?_~{R>K`Vr+i_%nrkYXkbKEq$A1J-|Rcg7!_e%lU|Shr{d8hhd5KJkZ%9i9d|IPYI8T@2hi**g?N$IVwH9Y$pR z=N%vB$IBD`@#%Dysj58;Yo2B}ksh)#>%jlX_O&G zW|U6&tkHD-$&$2<=3hy#1zjypgi$u{s(p2=pF9}7dg&3E2 z|7h?^n27iOAK2P*9IprpmanazvZJt&7WXeilr)uyb=|t z1Rd0Vs1|m|5f`f?v=vn6HmdvC(({bnJ~ctZAlVju`upn;Tu%8F1c+!FuVdGH=#MF{ zvVu4@ zgBk>n^CbYnM4?lj?_-R)a~e#DV0=YDBL zHdCUUeZEWwb3cvA<+gaiWv{Hfwr@>hosHc^m;fs^FCa%o{(k)TUOcUBFJfEt@QNY@ zuvEd;s_02oUY_>4;^}k48=~m6xPTO*!vDAJ(SPyfa{dv|Y+Z__Ih>hM_7y68NyoFKj8&G4aKgP5mT|L#)9S`d=d!)%oMq`_&3oIO)+R{L>e@M zY_oFumcJKf=B6h13RiT*T{zG0DNF>$_4JO9A*rg7TG8ce5gTPoojuahsgx?sXN>Q^1`Gb?}KYO<%h}f6$}` ze)gM+ky&uXeW8w8jgx*I(Lz~pEa%hT#B!q%J{1eW*{5eUmB49@$I!})2qAgG=5-!LIW3y_Bd>?-Mj>rE(V zk45?#K;m{}jp!`-@wN{!saOR5`*&dOZMnlpzJW2e~TSd!YntG1>$l9|<*;F;{}+P%#Rn5kRU6 zEHodj$WZG0+gQ3es4b;xxx>7DxMvb)S$%#JL2Ol_99@JT#CX)$!GG`5ml#;DPr*D)CQUa1i$RaPajwfk=}q}Stl`U9iM&uQ`PW=B zVUMV^;59n>1o{DQ0UdJ?`30gITo52d{b}M^alZ>)KCvT1Z%k5W@4*Z5W|u%qfv$zK zP0bO&g4z*H7tn5V)$}V2Ik3OeM6|G>@GlbJr~8OKh;c8$i%;FtR4(R{!Z&%gN$4t{ z9ucA45mC}qh)d+FdwSAmi-2_q!?)v#L~oihH^TH{8E{tu-4GXxc6T-F9pId~_lWETV(&I_~93!9Jgh|+B z^*9LT2s@~*Z<3+(3g}rx2ECfKdXLNr)}Nm(5bJ$bE;CVTHf^4UOAm5AfAi(-4IzGW zTJ95-erz8*+~*h+`+ffd%We@_B9^tb;_2dH{6UE%{pS;}M{lnLaBeX9r}zi&<_ifo z+sc6NEjX-vKJfuc;6=L=Ovj*{f);wMg7!T&mV)v-3n!vY_WbbJH3nVIx&n>YN0^x{ zv6k{>nA!muT`diQ26ZzGz=Qm6@KH7`AjP1X5rN$r#;wnl zg|(nwFO6)TJNpQ&+`VzEEMT>7{5UX!1b}Nn{_OsBrAA=WuJ?#1&RM{(afyge2)?1JeB>KiA_X~EVV-qO zeE=o?GlDxrbdI<@ls*lUIC(`?Zo+U8;4s~*T_W`s`!kH8;F4$f*3ZS|{#?~pcZkb< z9KW0#5A2ZB_dRIhkaVZ1E>IqZ!2S@m9|VEU9vseIKN8#6Lh(xL7qxlHDAlmCk07yu zC=uBsS?y(KWgtYkb$mRBcgGoLpj^!t^849~pDR{>t{o;lNLpU9va0g?_RCb(yzD| z-_LH%O3o}Ot(N;jjlM7~`JEMsMZvVc6Ycs>Ao=3BDqGcY>Yl!D$-sNY&g6)oR`Q#c zM&%Z&F`z>0cEeaJ4`5-9@T@8!y$6_w=lwHritvZ7mDtT!vyus!-HDiqTp_7qpT_P! z97NS|v`M}y56NT4ToAmm>Ox-#bBBW~^#Y-K600e~ch~6cgy|Mqm@tAAE~e%|n1Hp^ zMVIOi4#`*5QogUXDOIQ(ls|Ya1{!A%1J9v89K~`HIc?TPeq`uM`f@dRC92!FmkzAWBA zE~_T@FM%+D{8ZHr0ZPjhT^glYJj%X2HJ=`GzT!6#Cc}THxc`-qdbAL?9J{h*ON@K3 z_zIug;3ast*TbfVxgM?gy8jU`YSlDCwu$#d|QYV zO8475Nt|hLN?-d+3HID6q3R!boq$Mm-7? z++D_v7a-Z0U0b`j>V?4$8*)3eC8II8JL;bF2Tv)u#uNRc5Lw5A=SkL>3HKejxiBRp z8qv8SSA9Y9gD89Q6r#UWeq?n#m!zt{C}&=O`5{l88#&?@n`I|L=m%vlZVIq`-~TGs zW2fvyNMWX?nX@fWA8hIlRO&uoh#YtG!7wsFyEHQ`PZO5#*IiO~IWG~(1?F(wv&R@5 zVCCKycZV(FP&jbTp;uB})ow!fhCcLjRc9n4HVz{EE^!~#!tggd0FlK!?-#NZo4CRv zE@=h^sqm@0s4Wji{!ND*_cDlz5&hs#tNNfZhx(!jj0=?dnX4wWZrfIn}al$)!zE_1^*qff{Xo6o>;>w>6N-rE-%k`X1^$p zdgP!PY8UzcGs`IcH9*Xy1n~#_`#wdA2W96ky&`TI+Nx$GQnqhuC%?Yt-9uw-c(9ZT zCBxA8Hm8x=#&{!XW`9c$0^>@a=})dxE4|#C%!R&z|LQ+#fBCQe<1EpeOlyhWV6 zlMMui_IKHJ^#Mx8yuPy~a1{Nr(D&DfdI=Tj)e`jhN)ry8D{HfXG*^$;=*XY6Zn9{MXmZgMsoB46$ib(Sf+4X8^W=ZXNGd?Ktcg>)bG56>(CP-17Kq zpn113xAY6T*jf&_ki?UfGZa*yyyib22x>zVKF;fY<#)qIax+Z)77}K| zP;HMAWQoeuOIh_vx-48u23=nDcK`&2dK4b!^k&pK;Fq2*7DZ+N)Zj-D7!R1VHx3w> zCoei9s`b0aVF*Qm{A? zSzSie2AOdH0I6-+HYEs%QF(Q66e_a(_ZGl2FPC3<-H9j@lc^S|mqNg0wIlaym0cCQ zdVe2L+Y$98sjxv=y#aQDTl%Afm2q_dAt+&d6GVY_hbuT7)CURYdr0_h0gOT2{=(bN zy)bA}{p+erY2+FVwqh>1ez4Go{uuIfLURtuwTbCQ%9fVO(vt5%%w9qJC!1?y7$JB| zR!I1-S-g5-wl{;W>w7utw$=@Z93_{{*OZA#L!AE6CzQZ)f33`Unk#B1XLY!ilm)1~ zYazmfuJ$%^s=YEjosZSZRupV%-FZq`@H*vr#Y`gA&!vg#Vqkth79KV)R4^R;@HNO1 zd`JSyqwYGvHuk}}b4}8gO2DNk%e+Lv>hqSM8V(!`Bg|1(NfZld3j%_@souIx^tR^7 zN!o6=U$i0m#H;?|(Q8o0|Lyoa50Gq&2=4vz9>B*h#sPyz95+)#5L7xmWKjMtWN8lT ztT~KK?vY&KBQOUQgY6H1ZYKWS=9(n$m^$87nOh5%aF*)De0U9&z#ir8MC@?C{lQwn zl~3jXBYkAbVqWixyzI?*`Of%H<@AsKuG*z2!0d7~DkQdsDLk6<<>4rVp61M0_AZxO z{_(kT!Q;6{*RuDYu*vhE1FiGt7#9BZ-}(7$>SpQ_u>M%n5u-f+7&v&(W(X>>L<)_j z@lb2U#FfH2{nE;hsH6JTRb^(eMmN<4bye`ab6kgeWo9kX?GkgaTx41*skVt+jz<>= zqfWu`r5FLJSyA$YRnVstXJN+LMU51ROLWPDtVM*tIBBQv{f@?R-~ZZcfRh&4m@`yH z{?b$KslsjEgIy$oD5he63}DExSvvd+ICm{sctTu^vXYZCYPaY zYe_c{`IgL7sZ*F!EOea5VWZjmWbnN}^5w2Kq07r+5R^ zV$+)K=T)bIL20Z*SUuvn-066J(I}@Mdm~5uL)dK;t6J%IhJlD7;N6GuoVxFv&pIE- z`H)1-FrDf?02T|!kne}XZ!KRfp$`12qRe9Jv1`i3$h4}Qgj^PMeaWAVv)A-IK_?I* z6g#LVJ_)%OYSUppwiIAnE6_5P8_g%FWmyF2WBKJSsuqPMbQ_yD47#olS7!u$;`+~U}+b!NVlzPcH_)nnv4CKVwl?rq~(l zAt(=fs?)zpus1V${A%lDxIbF-y?Hx;M*q$q5~<{2;8)guBR=QI8!0r&+wc4qrFMyA zh0)f#NCD)^n%8E0o943z0as+(_pcL&Bzx#DfK>p*<5rWUpkcOzYS0+sGca(LmAIo; z%jGqHUZF@*({X|YKpn)k;oxq~dWHUbEDPHm-q;~H9Pb^;RDyRN(CiO-=rZRF!)+is zN0B+DJIA!eW^RFigTMV^zXa98^}TWsqFC+lLh}lK zTPwuAJ^j!_C9H9RV;>Wtu8|<2r-gV@#^&)6no9m`nGn7ZxK1#5aPS+hqN?mvb?qmK z(1-YZ6YQfno)i$F0BbCvfargPF7bzeJ`j&?>*BT!2OXFyAAX?XWP$^2RJ%t?m)d^| zA#3v~B_P&4YqRro|G|H-?t40ad@e0&z)@i=;2UKueti2}dCH3)DT9jBx8NCs+G`DK7^9OZo>a1n!N44Ud#;5KS}<>ydu#VRW79jjJ*3cT&j5PXo`XhH$Kj&r*)27$LzGRiOf z9q^L%$}_WG9@u4Rr%1c3pKn=}r ztXgU`j4#9BIBLX0MOoBgpz8CP)X#BSu8;CJE34|YSt6pcN5zfcjJ}8?%+m}g>c|*c(u7gv&3AvqorXW z5#&42k@)DU_gb=crR9>4=l7KlnV>ZlbV_bH&H4G=(FRMe?;;)2kQjd;ANL#Yr5rUS z7i-zF&y^fc1b7%Q%WDL{cOu+-?;llW0mdd5wMR@E94D#9(Uo*zS+e(aRedo2{rA6i zTHwcj;h*vs{{CYxAr`}a7-0}`jDPQa7xai@t&p>q5Sc~3@3TuPm)jtG2142_U@qz! zr}O}ezHR+3Tr$5IMd1k7`!y>}5QKHPEk`uv#B|LuK?`K{;5Zn0n6A4d0k4NBd(ES8~Y%<~3h(T~*)5_sm zSqNhm-eJPOUQBkfCgtyh*E9o4Z)NB5_IB7h)Qt)u?#JCc2Et7<=s#JZE@$+&7Lxn) zz}2yR-ETT6b6yvGR6hQVqM+6htmPZJdxfTnsNnL7ynjEIj0y3qlWfMZaEZ1j@dY&a z5-Hl<(+^HR_KED0roZ=iPH+mEi(~b|Dh$}eIGxWPm-&-m;bP0b0`8NW@<33Ua0#M< z!x+U_hL~FQ-5y5m)e>f~DOuSQ8haQ5eIwW&Atu)fF^4`4}E; zeE<`BMneh@#UH$xB7RS#l(o9s#pOrgFnFgR4P<%&bhvApip zym#lmd|Gwki7khmDU+Wb4DobsqqJGEd6IyWMbfJHo5|Rgm%U=NOI@1ot$UwgVNfOw z`xbfkGC-e((o0Ke6Dz@Uy}YtULnVJ3yqQ^={4MnsEzrcOdf-c1WZx7RTFVY;!pIh%V268XYzr>G56J7S1F#OAlpn4X z8;}~W&KbfC5BV!-^!CUjkLD#g!Lo@UEX+7mCH4KSweb$oxr)*M>hu$T6U-_E_x%t! zunxneBr34ssbfsSO`jnHU3#ef7@g>ce(_U##}yRu1GIs?-uwUSVTfH~Vck#T(YOU?DEP*kNjp`XZkLCb5kCO$v5%62s7c4FLENklIEc)V?E#drAQf7%?|v zzc?tMNAep9TVgN!?2%)n%^c`t`mb6r6uc!4sbk*2ec?Q*i8&%jHBDDv{3*Bl#h)~s zuY}IS#J>(EKEHZ_q+i9rd%zHtQP_jaoL{9_V%)hhU$2Dsu8tKl$*Mt--wI#%ZiS-f zuU@NlT8+`G8j}Hp;|)1hjmCGhFnp)aS69AWiyxMW`dhkI#V2C9|9N@B2j#W0AL?U( zFVfa1u?_)O2CWAHw1w=yNxtrvw)RzNU;Ilydqr2UV@a3!B^S(_6eJnhgn34kshTEP z58(H5QaDwS=$qN7-kbMs3AI*vqOIs;{1O=hU{1OyWe!UX#He3`uX%%DOV~MLttDD; zs}8~6gka+EUah`fgve?!{GS?7$h)WSsc!u4=@+RJK(20V1U3zTQqtBQluJIc_%Z;` zJS1@11QSwSk! z82evcT0ZmlE|p6WaNRB^;SnjQ+r*J)O9Sk9Qsp6Z*0DspNMBz)He%%xj{8rWqIVk?GHa%StfdcMOOjszWHrYM7 zf%f(D=F0l2f5zE3jyQI}wJ6Ne|Kb}!8`MS!URTus3>!o|w%BVRBmC##Ee${S@@W$I zt%vgrlMfxrut913c6`^LCR2vK^fK03K)=8dIf+YTAFZ>Gp0Qy=NPiNX_P71O!8a=8FhdB@jPKX)Y* zZB2`nXHT}%sN*QBh%vT!!VIFdh$r-y*sFL4&>We!`cIg1dvHMDU}8#o)^i*sp<{wR z?9bq=-%~f&cl-%mkPJQpSm2d~lwJtNpNGqNZ)dFmCwAc1a#`4~CMHDRLeQy$mM?r( zF;+hg!?tTY)nueNHhcW*9-@23zcxsoZtO#NNKtTT{vV;;b>DDza1Uow#R2J>!BsRnNmT zvD*P+RE%^SMh_hNVc@XWNh85&yRV}QVaTRDlE~%e=t=~>xdjM7jpC%sJl`66;MC#~ zWA;qPxBsR*<~0-xzhYsQWfXkJqQvza=@i}W_ZbouWd?qHbP@{p5WrhgU$+^hW>Mct zPz6_0tTS;K!R`GTM2v6P_E!89{UcVMw{3yR4^|FYR!zF`AiUndng6W2C16SRET2ah zRxG;r-WcQK7yZWy7C-zE{|pyQ80xzvJJN~XPB09Ar&1qrsQ`~Y2puOmof*;NAdCkk zq);^80xVR3B7OzHo$^epq)H*3MVRw^7a4mGZVT9+KZj2IyhP%-x|2aH=bMUUuIWsa z+$iPx49!1V)ZzyAM-vUl9U~+KiF%}}aMOu&w(!0Qu)u_`71ke~^E?+`?N1Fjay&@R|@T|G9r&TS+vh?sos4 zCJ;~hn^tLtc)^ET{H_SngbLNlj}xOZ^VEi9BG`!SQRdAcS65#61A-afSpsW$+^y=^+*Jg5Sj{g6&%F|l+IJC;u ztnw1G`XK7Q@*=+ca(UTD1?#FDM|s2%pd(=rax-4z-jXE8ec5|$pqP5VyH{3){q|Np zSk79$37QO~$}L<-_aLHag8g#r{WtZK-1Q5rv6C>bCLzl#8&wCDzMnzTPb|^l&Y_lZ z+-kv2d-zbOy5h;nc=JG0Z0~X@XAa{Y9C`ctS80-t?EeAcP8h(af|T?TtTEh@Qn0=b z#H34_sQZtXwpXq|7DNV}n^JnC1Dn zjfJ6C=*wSM;Y4fT^>dAo8@4UwSkW=bMwm|Ms<&4LL3vO?=Bsob~Y5AyldERWi1-Ra_M!@h_M3o(u`;$=d->)tj>;_lO1EsFds z!%1YJ9I4nd)xdDYw{K@_Tq0M{yFYbj?oRT2|wexCl_E%!UN3n-xa7s4~LSj)7=c4 zm+^iy2Bbtw*VFfMkvOKZmtyL4Xomdg^ZZh3rKy3smW^n{Iy7)$5A%fYPCuv& z#^3+Sl3Js=T~I9>7)-6LTqYR3X7w;#E>6vQ!Pdb^`DX|a6L>*3_v`UT-W1sc?O`DZ z5aLJ{zx1BBa*(>hGY~zWA1GsV)^l1Ep#L$TVJ!afn^wR)yLYKf_1650tF;6S&&+dqyqt-A z-piC(c^wa==lv$EtuP4`!V4qq+}ZL|FE&tZMifepdj>nue^h{@jKB0S2D$}WLjx;# zY1Z>q$=$@f^@5NGBzl!gNKNhPVsAQtHpxwsvN+Jra&9c~Tf+e-b2Er-u{Ef}>PVj0 zU0V{pT@|W>IIbvfKXQ6yQ9ZkXZdm@YxZ&f!s>>^h6rU(Bo%Ngtox$UmB!<*$)sw7n zZ0Ub1Je)^r(-c21O=VGfC)Zt;`Pxa)aLo1?78;1 z4WsHoC4Vzh0neM~9$fT(31T69OgwP@1hXk)OhlTcfk-`{zTquf6uv41V0Mny7t?W< zK8Dy?rOsxf#TUvgFPGv$$Jv#LgDc=vYS!B7+~dO>`e7uIdd|iHc%=jDcs;p5_nYH) zKw^@NLHHY-CTx&@$}-q1_shWD9>Q^fT7rUrR}`_$B1$u~#i`LW7F4VqVrWzcj0 z!R|u&qzBz!NC*UR6O3-QY}7EQR=+IGM^*s4iiYoLhA3TFjfAhV=^z~5UC7m16XMPf zPv4kOuvzSCT^)nn+;*uDP)mTY9TkjFL7000dK{>%nLD6-n4Nn7ZJVs?ECoy5GI+O_ zyO-(4URIRSYb&zbT(HfOt@dQNa!q-2l;>_N%=vQMuw-M{{nDbL^Z^7_(JT<-TxcU; z4~JoQ$f+ZW3d{`5lMreijsl&2A_3I4dF>QENOGiWAe8(Hjs@a+e!dZh{+esAF)5#B zBz;#D5yB+ll=#X6wOC6!N=;+TN!*uFR=J|zd}51w0J1;7FB<=i)1#0$hXJWB$5 z<#LQ^QkAK~tT2UyPV1vpVni!DI8l*56!Xl}IlPVtR|r;*WJIn1UKn+&Fm@gTNA+Jt zSeriVHX$#%7kPObOPTX&!6Z|F&KM4&wE+3r?_|Nf!)5xGB?2WMlbkP$v#}O0mj{EL ziIy?(Fnsjd>27o+{D!4MS){$uDhJd?1xvC^iB&4Yonas5diqPF%2tzaXoK9jJ~Z zS~q2G37uOtpeL;y#{DrOPFx{QTHj^SQC=_---MA^fw-J7ve;H{GAe=g@gZfTixswi z_w?@RTmEIQcCaE~L%DVsOUG!Ly}$!Op3Ik~u|fF%TgZw04aomp<+|S`Blx8+jnx$~ z3iZ=(0o{!1!9@j}(46+JfmH5}DwUm`%0c;=dz zlpY(VbAE?{tT}0!lMCw>4A32W0RQO%?q@)k5M=?5Z`BBf_k{rX`NzEguoe+$ddQ2^ zA&DSmA9A2Q0xofcZ~w}F2+Id+>^mi8WR`u!|CP^HZDl|f<`hNU8HMs3Bf7Erdz(<`C-u?0N&@$Da#A6r`bnDvNOYGsK+1Di>2>g8_*;vi~MR(|_pUGre=6v*1!$XXaQ|9E+6OAQ1 zlCQv*^nGq5w~Cv~%g;HmnoWa1>?@F(QMHRkAez8+-#Gl3$e53C1bSJxyyQb!L9Aj| z`RfHYQ~`x2CNML%#a>W-)J*N1oT~e7&VH{fMTmeR#xB6@2O@f3@Ib#LAITttxsPPx z6nj30o9H3EQ2J9IlB{pamLfebWYCVc8(U`BtRB}+C}LF7qS{+WjzcW*s%1-^+efJ_ zPcJN5#jB?lWO59oX*0pc{y*N{tVyox%F_H7{m|AAN03qgC=S-Il*p9FfItv2F+2dy zNqLNLAR;^%115^ysZv#&Rhd;)=0kT?Tfaag!9k)(ah@f!fb)O#x6bvo_W`A>%*sql z%0z}w_nv!(J+8gBNRd^hmJcw4(G(+smn|7vv^%4#omcWAg`imYwd%p!44WFqMr#!l zLtGZ}iFZ8tJG&!NpEwIf6)4VhFf-|0Oe8^$L!e(*%2dPvo-R`!>!dS*CB%*71@aDM zZ+hN&`Uow{x@T;D1nC+AfwUG)d!^=xl2rXNB#`WkD3`Bv$dd;3_WpN9BPv$$kGBp9 z<>UT`h*_lOP`%){ zhU^vk@%8s1<)i|e6; zW+gucvpN3Un_qERIqkZ{A3bEOSN68YI5@g3C~+u$9--2Nyq81h(ogXV&URTC?qByD zsUU}FHPR_Z@z`TOC_nQJd(Vr7#9yBhf7nfR2leL{Br`z0HS=PizT0Y8M4vS1 z{}y&&hszK)kTxykh%&Ui^eH+;l#7G5diB7sWgvAexbtPi9DEZ7c08|okBqs(_ zMvNSu?aq`VZmZ%Xe~UjAXvoMWH8`WNs+8P;z|qgT)|JcHfP$tAOf9^k6-MdX;6lLa z&7ywy>v8N0^eNOs5rId*a;zx!v5fd(`GEz(`SOEkcKNgNxL1LA^x;l$&M#!6@TW_3 zzSso%?0Hi7DNqK4%d)@^+mMOkQ|~dV<^=EvnaIl#1P5LnrTe*gi$<(c8$A$C7WK`S zmjYXdb5C27OglaU2w26|o^&iNv=5@#x_g>ia7a4nFP0UG!eck3?O-`7gqav)~ zKi~bDf8RU3SO5K6{hQuJ`uGiB@5UAin=n)yfIda{n9(PDIpH_}6R=1XBpxf07#ZI3 zyJ4n8z-)m)a3mgUjjbgLFz?&@m0uzi+a-FT+@#iir?KX-*Ht9vpGouL^S~nFBQU=E z*H4y6^b-U>-rL273DJaMC;8o^7Qju4p*W@+!-5Xeqjc&)6dH}%lT=}VCXA>+5bb^m{@jPHeay1W${Rk^Vf@+cv^i1*nXLYW4 zoxpktjayoC4+sk%)vyr-6hC>S`(N=Nu`=(BzgFJ$t?}=ZLJ{mj-DRF(5|@MUs^xiZ z>|XcgC9C?ppM}KP?x<<`qnsX8T{_(HKzRLO75Vy9P7gy_RP3&_k(Yz@W(P!<8xgFC zH^q;-2tPo-1*udHjz9ThK!u9o#s;~72NtOZJWBeUFp*l`m=f9#3m!%<(eXWR)*qL1 zz9PgmbtOFmG`M$n3Ml#I@L~A+rgXeo#DVD0|yOL)=B}s zMIOuhh%60TjSbMgfj~SChg?8Bwg?>-a1}PTqI~b&@!R7Mu=Faw8BEUOcS+2AefpWD zFP{A@ZOi6YXJ`Btxb#V(N5~9XeyW?EAAXkpnqE%y{&B-93X}}!v=ST zF;3s8Y@R|t&?uG|kdqk(rllirpKk#7P$kuvLZfl|Bg|ddyfLJiuEn57i1NS^obho} z)J7Pr%`n2Im3oTwoC0KojRmQSjfMS8qiVsOzCL}^v(Q6~dq2&Ts2ra!4Pg)|HJ=KB z!GCV?-GvzXHOrR9f|79R2#_PE?6cFeD_QIXZ`}j57YYeVaVAo%q9qZA zJ`J?1g(X)>0x)-8@i6GJW?1f(z&n0ej%>&X_I?B&iP(XPv9faE(x+(0H%{O85kn>u zdfJJwEkT5`f6My=-w^bzcQ#Q$L)$O zdr&!){ubaCJ#$X&eK2N`PT3Jmc(CuE(1N0N(h;aKVr~lpIx!_)^ImzyV6UFhN)=qJ%Cfl?Z!c$K6uZQ>|M%Ba)K7EHj@tZb+^cbBWq6+Sqm z!%R?{u9Rrs^fDz7-bKnwMQAeFrW@HO!v(f!&mrN9e^2qeRWurA2qcvHstjcJNPmT&JDos`>9Vxs z?J!ef8D`asN!EcM<~~J1QOF}Eg5So_VPq2x|9{Sz#2f&RkQ@4f$VPNE54fglh9PAH zaSwlla>xf#;;%F{1Dp+21cdHB9!*i&)v-fe3SIWkq|S?<1&3ZffvTRW9qlI|MzTg* z7r*RH(av!8LayFLHHKj~dj95?E|+Pq{UXwPFU0EoUzhO*{+>ch3FDg_G2i1T3Jl?oW}=P+nW>3NYE z7a9HXf@zHY>>SjZi5t(>$ExJg3OVc|V&J6_(6lu?uV8E9@{v)#dx8sjI;tiN$@f{*$asU5Q z;Vz{Rp2tI&pRwqC7!%hP_X8?A1jaBmYB&~W5yi?&N5`LY<;54hJ$cd_2|6QIsU~Iq zkX}{T;C?RBrV)Y=;;-vcZc&RHg?Y0}9bYzR1OId|st#DV1mM;%8IPJA1nLKuSwT~4 zY*_XMO6bCI-t9XC978e!7&u#gw{S#gqS&GUx4=5wE;k7+$Bl@*-5BiF zE~e=)f-%SI-;K~CJm%cl2XiXfwb$5<`;jr?OMBtF-ym=@8bt2vM%js+4x%SeutK@u z?s|EVOK9m>CQ@7B8jbHhQXciPRP0|kU0sp*e85|=dEdne_m_K91ccVSe`hNh52yG4 zw2p}K{d8;ob^rPwl3e)F#V>?8=9dTp7?2{Uh*qyji7)+7cgUC~{2575d%TAg01T0^ z`g=Yn%j)-jSbbg3gIp}gu#TFVJ21i&mazM6BoQp?ddW>RUgb*go>TgG1qe)U!N+5f zbMEnDVXMNxUnVsQUD^_X6h8oQqxmbd9!<@v2`T6wOx2cP3U>I4kY>h-rdAEwN@ijVnKzabh{sQKDuZ({em%MkXx+4$i2&B|*0}18ZI3yqW+r34+#-w1v zZA`Eot)l=xcx-^#6X8LnL2P}jybzMNQC^mNq)5mNX0f9rHRyv#JDlK`dX_iW%evPU zVTTb~ElrCKq#tH!4d`Tgd10PHuY62Icm%9gf#_fcH}VrCDskdAqEy)`A9!cA%ZA5Q zR)|+BJ#Sga0mbWv3!C>G_I`ZwPw^j~SjxX(+KMJo4`X6`CRMDq=ods$hmFB~ru?iS zFo8xk+nXu@Gu#ihW%4W7Ga_c!7W7l0Zb%r`-raEDSq+b%c_@*RglT$60&*WtJn^(&@?_OZs!lnWE_5?N^0{}3P zv+?OcY=T3k)X;C-|1av4`d^bAgx;JSPVW(v?zj5FTjLM1KB7OX9vpEZsXRK|BT5+6 z&=*V8h(A}tlZNXwP8XH%B=_TC(Q0bX<86oS!iPEkH%fd&P#p`whe6nTV#5)0BOikS z3K-72{vh;a>_0MhJbx}PTvYQB_u8kCch0J8w-%GUGGA7_K7(!YG{bnHDs!_U3?rKy z9^gj+{Q-I!-+jK!uXt#ZslyI`8Ivu*Z;jY}l@$5=BtffBl@GnmK|I|5!9*+l^n3r5 z@4b$o>`|>8IoXLuoR5_=*-Gi@vYU2x_%iZtyetY>XX$~6D)yS_76}j_R9RzGVNoYL zjXyU_hcFC^WHP80wD9d2RTDYD-XH+~Kouf5J%j5FAyuP=F34_>{K@Fj9zg?{)2jHf zZlZFRK0|%5exx;76Y8cb*k*+kordb2}r;+R^Hc2g%RimP8vDjAC{N>td-m(loh8pJdBD4 z@tFrG`rWU6)D&EU?;~HL@%F%RA#z~w0f`fD>~y0;Jw4OJooQLbX`2zjpuH6x)TwsC1aSj}b{^Td+{C}{V`BxA!|1$psf5dA|=xUd4GlXL-QZUATOS9-ouS*2tyMH%+BQ?lT zb*)rPgUql)_;gYvzGY9Rj*TKGxJaSPpP3SB|POmu^XP_c|qwA}PXhSy5auwytXIGe2x zDXU?87-IhUU{hy_;;aX{8i_%!BJUL#RNt0AdUh_BTmEyQM2la#6JZ;}4chXJ3D!ln z`c9}PT*7ci$U6ZR1fJl5=s5t74;%EAx<}Ff52(qy&lk1aXM(I?6)`oW1B>_V>MUW) z=llx%>~=DBU>C5O0rsvOQ#5L6nBE1-RSQPXBTmMmuQk5=_~YdXFAWwPseQ`EA{T~y z7yomNWPJD9Q3-blKac~IG-KFdKuCY7g)sR78sxsI=Q|i+eD9dN}cKbp& z4r-=4&-mW)Ziy_=99~ck9{Cp`S(z6D>%rMf2zoUA+4Ag)U#I~{@48Pq80mk>|2U`r z)c;vG*8KE@9L-~{JZhy+Bdaj%rmt=hK2}RhKl9OT1YSfhi2yML5deOQuz)bC0i(rn z`LFmfo=Y5OLDjqwrpj@I^A*s3!#9zwxz|K=Zj>AT4WXV05er9)0k|Cm0R&vdF9T_P zBvjZg!=WX57m&?rFnooo$2~qA9eOFe;d{-{w>Xw(RS2h*@PxpboN+&e{B>ys z&o~*c5>U4hIpAOhpCB~6vPyzBK>^hEXtxn6PnDaoa1}HnH&QiKIi3lh-Wt%@n@7Wc z>ysP1sutIG4e|cq<>^d~3x}gUFCx0=8p^l&uhs*X!3dJxOUNA+A=NVUF1m4*5-=(y zP%M7zlU?i|W_;2O)}xpkRhzj~@!+*MkAe0xA04dddCxt?I0}{fqi4%uxBy__LM@=8 zx4k=L59|i)bzWPcgQfZbiAtic2b;_LyOBStr$2-P=LrV`@$S7tz5o|=5ZPxe&D}D2 zlKLPX;09RIq`ug}QF9l&6d>MBCjYf@6VnjIwE33=X6s9$eD_6;!Q;z1#@W^^%Mcn*w z1g5rqG*8O29+T{?UZhBozsiJBrj{Wog~ouy`3G>&V&UX8UCX?5ILuVQZ)(8J!?=hkkI&>Q0wv2N=`}2XYS(%y)HZKFd>fKGDaHk-@6Y6I z7loqTKaLPqpJ`X+guijK!m|}U^2IMKl*|1nyE}LGQ5gCC8M&PwB?mtU$2ZIAZ@kcF z{;E9beK5XziDL}RV3gbLoiQ_`&>jsL{TnJU9`WCVqa##w|G$@UxP7@bavYoN4bPqA z4k;?X6dxW3)kBfnRxZMA*G>|abGGekczVNdkY;JhMSMIwZ5M$qs8Xzz*NDJvPN5=; z>8L|;x9i=1u|$qRsPeXx;aXW-@jRTvF!tZ@eKx9RD$UpqMdGiWfd2Q^VL`KNsN%xp zD?ByYl*k4DjD8P&PZLQK_UFFXmf|6mAYGGX@6(}R3t3ufV$uEiOR@QWub}Z5oHR{2 z{lmri1&r|iEXDpaL5=uyi7rchZk^OfpvIyyN!mckLO-73e0OpmP?;D(Q8p6W)sYQCBR? z%lyI$MqES}D%Ju>o>Hk;#$q zo+e=^@@O3nB2ofRl_;-SD-pH1#_W3^NjHZbO(obv5t3q_@Gs-OIDnd#!yv2>Gvk7H z6ad40;#W*mCaP*>T1^vrZaWnYhB>m(vJGfD=vh(6rdf3?%k6^hlk9F|bwn0PTr>xC zZEdBa3Gr|O9rQ@~iEr-ac?oPigpD72yY*&+oR|AeHChayrPg&C*VS=4F5JKtQ|tv(!do@z5IYQoo6 zvm;ja{8jwMK}=39yoW-{TdxnCY{hy|Mb8t3Qy^aP zgV%l99X79qgJ52HfJt9h+jji^X{5)oQ|KxmjptD9Z+EQU-=6-_%d@+2gokm=gFnOJ z$U9y?mJlj!E4G!Z;Dv|(nnx^^LsbXGS1Jtb z>wKi66R@ku+orMKplg=PguKoF-v}f(0#kS%c75ANs|!bFR^}%Ok^x7~9`?Ww%6{Z; zZ#5+9ce(os#whAPeG72=|b8=psTdJ2&f zRQD6E)tb1RRDvStfUTWK?L!H0cvKcH(1{g*~(7HvM0$A=M2=6a(}nuM!z`-Y=eBurLCJ5 zQA%kAv$WLt;E0nKnc@!6L!@fBZ)qSYj+~l=D}7rj88GxYS)G3js6OWeH@eo@3^#Hw z)c6j{Tq-K7r6~qbBDnOu6ig?LfPAUF+w1%Q^bKtQ{S6BFv84Q1q@6Z*GwIysh$o30 z4_sI_;u3y|i6xROzZ~Da4)IPT9rg%!@BLdxn-op9oCE5GlGqAaN|WJbKl9)k2DW#z z%3(V{$P;vQFeF&)eZeYXpW*=I54LV~NfbXjXZQ>HrubzK^Aa^^$#4U*&$u%gzvgIR zoFRLWdfpAy&7>-v@RgUa>zi=Iamto6s=};xx`#YPgN$JVg&>hyZJBCldt5v=Z4ux0 z%co&@1yqnWhtxz@)Uv$|{L=%_HdejxaC4Axhr6c$&0hDgRg$v2RQF-Lg(-^RY)pGi#g)0h1f`p=dM6Tbw|JHZ^?i6rv&C^-?JzmcxUx&Hwarr+ip zJy{O@z8_h~k^H7-O~jXj@TMUafji#6t=z+M=ltbeVmBErAb|D<(Cc2;wccFz0R->; zHlU|?AnF&`AIPhFr|)=sg0+!mN#*+QLGJDE*zLg{oV0dQ^WHD>QdGwgsRm|s`mXmE zyyg)-1fdH<38@edOoD2|0tsIY599mnM2f>aKn93Ry=l^I=i#E@(2#*T7d7*!M&|Tfd1rApv=HY&+ceXFxW0pZ*XHM zfvv}m?|02AAv5s`<4sdE=!L93K+rvO0`cwFu(9dc8jnIpEOCb9cm*ayKSkW2%7{u- z`qk<08r5{0*}u9)^46ySrE&`_tYM0tkkKCA?&!q#N9!{oZ7%IhdFW9ugiJpbEQXQb zqm^{&sz|!Lu;7g;s8GZyVJ~{U%b`Mo=>Z_LKZI8(pYS@NUn&pc9w2ykwu;hW{tgkA zWBjuQN6^36BfuZ(0FJdWbMhAX%Rj`MqH;xqz(YCyZ9eWAvSKeV1B2^|$_^&^Hm@2o zacw$+_W0C1l|O^OVvy5CP0xI9!JqMpz5$*JFaqPA!PP?DW!dg&RGI_*LAIrZ$X^qzwfyf>4^JWku; z2uL#lJj{9Pp-UyajpwCeJ!`UZR7doM=LnxR2!k=Qkfv{j`c}9aOU-K|iJBC&k^>>T zdbU12U$ilpzUX^Ad9FO<0j$G!$N9vGO;5V%5p{?|zvHcVT?DVflXfRYhIn@6Blakk zGSxQuJ1b;5L=zdJj&Al`e9d<{-Yn6(vf2^Y{vPp0Eou=5OJn6!)OPmqtmJ+xC@`-D zWgM%so`woTf{d(5zf6&E*uwLuJi*P$q3X4X+{|pPQ~ zcv6g8OR)VF1&a?Gr*xMHNV9X`Uk8|RNcE9W zu$L2%OM?lbjMHnLP;4wbq_1MC-W|UiL5p~E(uP<4+Zvsg)!aT8y-kgP>|}XHoB_6t z*G4DU`Ydf(hUu6XmN)hDB{VSw3^)Yd8waX;_0FS0CxYfM1BB0TM{%N_o%eq!Y~0IB z6vADhJx6#R3?X!Z98K(MvM?U5)XcKyyy}k2bP){s!G7sDD?OoX9WH>CbByEW8_z(c zL}*$y5&m9HtD_?;@-p5Ov}jTZ_6PqMXJ}$RM?j!wgrsNt@$%RH^HLV{JrG_a(iB-r zq&YZqKcWWQa8$Cy8M;-uAf{jJ%t@#+9z&kn5Xk*jp_0ebB(wvQNM^$7N3h1SavV}c z%;kqlFt^GSbD0_wLFU3xov<5p90#I12w3d_#v0`)+{BMDgNo~Wps3;BV}fL7yk41K z^*86r^4hYGD?)*7?G4F**8R6oss5I#HUKE(56)G$}Q080uESFAVSvSpE0`88rbNS*E5|Lf#%a$e?%T`Oj;fpv{D z@R)L4=9rt<8p)aFS;nUgf-s(yc_Gpr4#h(cB}Xr&R|_=|!<_dQ38@-v^58{-ED90A z^C=%!*x90?_m65}3|U`KB*CNA=K#@wu=n%s4d}kY@@0>K(IKdPOo3`OS&9%=00$A= z6f!DmK;N^!jd}DL8^hE&@ za$Rv12bgucX}#ZQ@r9vue^MT{8kkEe$*0SV?@L}x!0dSJWpXRKiBF0fh z;S(aHEu5P&?tMk!NMRpoGNCY*K2SJGA_h-8R6fG0kAGeztt|v4oYDGOEibJo)klz}=Nh-QCGdz(}UzOF)B1bB#*I)BsK|vsC z{Tv{EuZ{dod7dS^AMld+LJ9EdIxq_xtu@Cx37+MX0nR#1=uhWJe47tSC+_OLgoL=Y zUH?g*)4>j5fE!6zCRh0N>JYYWOcHvY4&+*CR!S?k5x6Y!-ikPG9U;Ly57o&jT{}+?gbMRWWQ?wtt7Ikp5zwl} zPkWB!`mPtV3%4qJhX?B^h4+K3S4%=cT1=5Nu_b%OYXF>8JxU2eh-#w}X%rb}J||a( zM@0Kvo9G*3to}*Sq_;qXzTq3oC1JY}g{L;@~ zP2(o=gC-{a-edwqi+jeqh@B3n_dA|ZBzkS}NKNBbL?pFJYG!H`dVm#$)M$_M1oHeA zEL~r%#$$h3R#w>4+M<|f!WB8a1)71(_Tvf@ zI$r2P#_xIWOfM7Y!rJsob9RgRk ziDOx$2&E&$V~4Qtv}|q(aMck#1WZpQQ4xSYI$=HP-myYIOIk5B3Q_cIIl z0DiG2NP_Ruhfymoarg9-a{4DfG*j?v0#C}Gw|VxxEdoXIoU(412L^dUJj`b2@TiOV zO~8d7Ie~$bTPQzVTK?@JP@o)c`)rxN;73@-{ye_wGxB22w%SJLp`{`ha7I`;irPoC zS5cvxkpq#r)Q!lu1`ZSk3M7vzG|k@21OtePB{B)RN_t;;beObdBXmCr|C!~>%~8C_ z#Wq3#;LXUBZ}52g@H_fa%7NXEf}qy`csbD^zt3?brT41z@;W~GQu*0q-av{`1Aq`7 z$IFSSdbuq2Jb@3FC;jJBCHj4B)cX9yo~d)BXNPUO?VI6ppG5q2Q)&a~5#b^~0~9u# zMZeMs$iRVgN5rMT70c@Jbpc=J+Q|o*%i#lTO(qP&xrr@`a8gtE>6w5|POLmS8!p@u9uV(H0ZjfVk9Z|O zm0ngOH2(~@<@?!Wa2z>7>jTuDw79qJ%5nofo-WaHj73l+Bg#Y?NOtbIX=OaTjd!Y! z@s_xNFOLx%-lPXOqYi8S344V&v{!(}(drzM9TTs;Qf~QrLOE|vm{k?9^B_q1eo5xh zhxM<^FZ}09x#OF%IX9vy3NNU3@dRpTuOJ~kBz(%U-1qDt#lk}95*Eq`C8;Q|h+ee7 zLFCj%#-I9HsI}je0Dg1j2|IrT@TL@;WVhzdU+PzA4a(aWxw0(c`J4v)bcG#}(zVsG7 zJC|T-^OYCyfk>?TBkbQOfv0J3NU;0=PZ_^y#i#V@yW>9sxB+65gOnbkOP4Q&t^5-& zB)&okDiT39P9m%&TtK$57~lPIIq$J4DvAv9OHx(jL}CYYFRzDdS&s-!sk{J3*9+1y zC;A99W}?$*4);(w=Sx#rrGTM_dkya>ibSXet?L$oR%^!B(R@AuOFML5*+eUO5C6P8 z;y;&@R~5yz0Tn~eHX3H#k`i((GQRse{F~OXp2XM6i{X?Bf&nv<#|)&A?G|I!6gNE+ zD|p#gIrDJ&v7b%z9nb>+xoI09I@pTG8&@5Fl`Sk9I$2?L*M=DZ=7+2i%pRTr_H706 zC8~m+JoKd_l=Io?JKndLM!T}Cw>BM+m-#*iQc{h_*wD9*u^u`EljntH-rC3LeK378 zM^r!H>r06ZTPhbKQM_E9S@H}}I}wn5?1hya5e5+K*++pYQR%tXwk<|i-Qd+oyH#;O zbU2fNJsR41S@11}Te|^8s9P3o9dcm}R*czxN{+#{W@MH~r#CJL1gc{_@)?MUaj31WZ|!6Ms!JvlkwR>^ ze`9o=P1+^ceNBYHH}WRl8Gkwc&~LKV2M2^l8P0Bmd@m)b!)bLtel?CDIhDpCh8WF> zr-RfPCaWJ%_{e2~ zc(wjmc~CD`N9xpBSsow0aj*4_pQiCU0}RcyLPYtfS&+JYP9nWFA!f}a(qcdR+N}+b zi7AJ#&d$IQ_oEZ5hsEL(zeFsZ49Y@dRaf2_sgcgQCMJ+8shWhrx+6CEem(I3#H3-e zG;|flR137{O88aCui!}}MX>9k(1>S?gYGp$$kaNg+{W{2RSH(ZYs4Y!7?gYk@I_Kg z`Erg;5NC*4n3)eIE<&u~*pjMc!`Fhk#65KsKG^Y*r~}#EFi5N;dt-wMmvDJ;sqevK zs0f?(s0WW{OEy^Eo%hq;jS?w=YT2GPB;A{&LrFsVL8UwJA@`edhE-Z&;B@WpSnRTQ zrgw#kDa%mVOZ(*Ro?pTV>n%;sdNs&6dLtf{o3uvoEBwckr0m5)<9PeCisOzLF_V7~ zHobH%+3I$@wV>ROk2)Y$_)6qR-sp}D&LEU zK^v$dI`&j0?CD~HfNuE}X`$!F$HjKMkCqIyjrG`E4Zf!UrikNZ1gWvpg!N!UsHc|vnpBN5m5}2uXK2b zO&rGu4Om9SZ?uRHw3Ge{<5)aawNaFbWIoBw_p;~jVc6yTHkPm7UMUws2lvaxWq;-Q z48h()UZ((7Ep6!i{zBj=0&>npdd|saeiFeQ6gxjUJyVu~C z)eUxM*WZ^3bTe0B#98P39?&K`;=+`_$@;;X_YzYR{Xm!s8;j82Ew2#9(SK{2a}FnY zyezv?$VhyAO&+-`AsOuBi@Y7xxxLG47fD>N`FSo1Yz>#QP;X6+Q^~gyuoT72PT%3* ze}q*Ro$<~1LvLk4%r+usj)!>?`x?To5{o+`VB+5}p`-nXC`@Mbn$Hzmr2?Tp^afx) z2mWxj)BlKJ-;+F4+P3L zl(AC{BwyXS*{^{MiKip-YS@B-g=57fGrcj8}16;g;dyp;85V-o3Tzv+8|;L3{nxe{6Fr7{0z3>|tML2urTpnkp>WBoyG(E4JeRU{6~B%-gP^pf z(vzlWTzK*Nu=vOSyA{Mgz-|`m;rsf-kJ+moTbBu^)NPUjs&qf{nfy)RTpBOjN)1z& zJmyu90eSLg#YZ=}qSs@n#6pU&s7QfhCKM<%Jks4>S0tF?a$UnxPa6^^Y)sBRWf%Oy zQ!IOtcwB0)POB)?=Y4&o6uf5IUz*wi_D;Qns3`Ig+%5Hi2>S0;dKV2^jtKr0QAl}) zkW~~?2@n4}2+h9o43UX~n@qNdZ;1^-ihp>dB3X}%AHPoZ(RY9A2R&DQ`Ah%(NO|>D z|Gn7Y+hC46{V1452_+HKAQ|jl8xc4UF^V5YG2jEbSKt8-DF11UY$%<&suWqEwyrK<;I46UzF?&W|FfbVOWEhkCIW)E=wsE!hb?sfiuj~5^NRvUs@oJ8#=YIe2+ zQkV|lSId6ebG&Qr&IIJ>h*}Wln49hUk|2vfmNgLgT9edL84*#MG+OgoA8ui|$twVx zmzR#LPVU%EfF>t0`(n$zOV2e>9V*lLz-taE{c(6T%LU+eSh#&A;{p$LQg@*=75Lg;NALa}j zdb%rM$Tv5D^&*F4r+LT&KuI}K^E0vt=hvPy$X&GqxBzhcsSPJ{Z8;*ULrRzkf_hx= z-&|1TH5gu;I)6{rG_oEYlf$6(8aV+40|S&JMk;~4sM#FJZ}69qDY(p9sCQ4lvM_k6 z1^8X`yU%CDhfqR*98@Pt-E zYj?uFQI%cx^X@HXJ{iuLjhF(?l6;}A%Fh=c-<`sQ*=dB5)>KL{CAAJ-wIt%04>oVe zgE@>`hyv&Ec6d|s5XWIcp$-NIc{Q<*tW(yt6%;0d9qg&9mzB<4(XsH;i^5wMQ@b36 zB`&|JqJI7HOywH($(9^J+WSLHlm_N?M6Dp=!QZ6|(G)2jufBSlF(UE(9|W%T7+3q2 zw-?9qn?oJWZ}PF0I`+X<;BcvF!xw&F81aG}ty2Xs3uWKU_pAdNykHVIt70?ML&b ztw!(y5`WoGDe$IR8tT6p#XxN0z)iYFd)nCQ$~v7KguEk#ajyKpcUJK;IN$s_D4(?v z5;5PYU8JvPLgnJKWd`_93XToxWlzuplW2c#C+M1AIUyJkA@{#7KlXI?z#DoxXP_Pp z3lZUrA8}aAjJ-XG&N3a8d&zqB(2;Qm!`6XSf6{E=E=vFqUXF&&WQ>%H`?eKBM@Jm_tyBW@rSvI zRa-(3bgL!X>SQkHBoFlo2)F}0KdjDl+`BKUK0!JpE}4dRi(&v_Rh=n6^u$l0&wbe$ z5V0XvA(Zp`^w}7ObX2tviF_uXl03m zBt;Xg7Ntc>?;yGwY=ejz#=<>=+cVklo^AY6I{id0zfW6EERi&HrDFZY@e$U~TF9)l=#M`h>BU0z)%VF{i+TYl~#j_vzbaX`(^?v`kWL$i{Ovg?o`4u@DRgd|a*j0`52;}br#`Yd?n@*&g)%#X)^9?IZqK;JT*>Kw;>mI*Uy*HY$VY+F z1rUz-=j})c;J^clswRV%0acn-wWS!P2f_6mDD?;W(^JR`4AkR>xtN{RMC&UW3K+Ye z^#1=l{%zr3@AwVUSLl`HgC-cGd|K`CL~)!ij8~2L`_!TeQ*N7J+zFs|_`Cfswi<^| z%!<&eMw{NL5`oIq3gHOV{A>wjQ@bpA!4>tp=`F3Op4#PlN&w76`$PfMAeWA1lLd>H z>Ot3_#(c!DdN6ucpr`LcK6L(q=bv;EPes>D!S;OxbqK|>^LHiy8{oBw1=Ea9-lW-r z-o0?5V%|J0J2Y$+qCc|s7YF$C^{c)r+L^$o+>nnlZh<9x^QiQ09$h;c?K_B5LtdYa z-vYt#o33>0^V2W%h4SN^x7FFw zUt67@Udyv}K-Ax(5*QW+Xn&u&E+B?ib!Oumc;OFNnHv=G!NQVXxcA+^Lp%HSo47Lm zo9IRCDW!QpS^Kh;&vh79%O<)iJhoo#8GsvSjd1}Vh(>fx`&Lr6&-sZ!uhJSK4^Ky@ z+ZD9m9Fy(yQJ`b9(5srZu39pxM*Fyw1~apz9C zbEgTJPpaS*{{53DjFcgzPH`~U;#7Ua0YF2jn^@j(UB*9_V;J7+2YV$lmjqnjmdR1O zBl`}#NlUlay-7sy)$x0kr$*}&OWW&pzWnTG9zAWjW@bGfPnV_MVkC6LAMVS`4kHkg zFNwSl8}IXMIs3S!Z3nWPFgN;5qH%bv{gc}J&x<(?(y7+pdt}dGT>7}y>hQpx`a0VL zn8;rfQX@d%hgcQygd{L-n%dS*7scjq=^EyoS}r`cIwB zB}=Qi2LA3)v|Cx%PY`Je-PC|&@I;1OeG51cF29uw-d+>;RiAJ|~MDLB$u)=~4L zb+L%eN(qtENZ{dRj|%*XmclaS4n)Tm8$QukiaoIURs!dEu3ZJoY_gSLqnz0P*SKJB71A&6&YOD_`p zP$m{41q2E_&t}6HPS7iX33mScPra7xT^b00zzT00B5N>L9;FEU*Pz0&9kB+p2y+*5 z&jxWN2vt$wO5`eCEPLUv;r1@5DBqXobqA)j z{;bxu%Nlbv;CA+l7uc`SLAXmF0boP^AB| z#PUag6Tw#PCoL+*4o0xtJ6~s(R*P)!!OCQrANyDQ2YlLNe%y-~!clg(2>sj8%LGpR zz%uZ13D50vMpU06<{24T(g%r@VV4l5&NW)WC~03!_q;-IN|9O`4WiBDh&uY(_av6= zKco6N4`s|BIftsHxrHC<3!(3b-2CbE8$a2ruaz^8THRqt*Jhq7-X@zs?RIWBLEhm) zJ&dd$o_cL1Hs4sR3sh$Y!7nCWm+JAhK=zP+ZM5;Er%O0|KRr`^oFhHLP9Qpz4zw>;?n24asAl;#H=oQ0H2{lV!s^#@DnAHFezHOG&o0O-j3`Egc6s=Ky?y*5@vaMd^o` zSOQNZ-vZ-?9GqER^!!zJG}u!naTOa8G7%`+=DW#l&h~;1nbcSlr_K2zw51nc`cPb) zpTa=39HXsSRE;PsP8BP5pg$#*7k({h<_sO_M29Pl_<)c%_anLyu%sYy4RRF+rRrx7 z1ABnA@GEJtthA@dghrbA*(GkBuP99o$?@nz<*z->v!X>$KboRbn9?#AR}0Z_j88ut zznNntWD1z7a<&ES3^o;EAdXab(Aw>?da{1IETCi46zE{uG}Njymto1;iMwC%;ap0| zZp(&goT>0guVXR9Cpny#eMR_w?=C*cVif z#N!AH`Wf<+EP`=HMr9uh=cJPPyVCNz;CK&hX|=cL0DWOkHpd1Y1z zEs|Ys@~5|9!ah8>nq>UX)W&|m$%u)Ds;il>s@^?Bq#`-Y5B=C%uQZXKzgN^+;+3FM zE+Aocz@3(Qlh3P%8!7*lAQ|+sUZH5Wtqg#aL4M9o1Stg4852(wgQ>a2Ahooq3;6!@|+OdK|&!!(cXdL+*U0f;V$3qDg@FH?d2kGLkmLGp~Nqc#3IIR z!d}{ZA9^L`F);*j^92V)VR-2gkMI8IMQ<2EoK%A)6m(3aQk?X5?gq54Reda~>8yFV_&HRG7K*P-W!Ob!sOL+eN_;w>Beb z83km@UjE(bN8Ts3aA~Xbc6a>_0VPI63@TdVAcQm6inY|$9iobe-{!&%Hm`wvt+j@J zv$GqNzWMFx@BIAq)WAKBryxxgkB4R%ME#jyVHovZ6gkuL4H#4j8edZ38m#Y}47S|* zdip*xQb>4sSaSbWruscD;HSVd_dL@ay32iEzY_Xn#Y3L}AWB|Vsu!3HO>7vj6NX5% zxZW|CPLc!TFNBvEf9b(Ot$?IusXRKu!+(&?rSE>}H+e#lu{^V-8rIzSHGnCNk#FJo zqE^^%-t5w~M9ZUw0saSB?4c;AQIg__kRz>BE3;ylA=HpW5y>=X(<>sfj3j4V)!N0O zujF%0&|tN!=KdnGc)}6iwK*lbNm-aloYe!z6G6_6!a=pG;rkyF`yCv5hZ9vn0!#vf zN>qAjW`NarA!|w~+ZRyGNwQGLrj;M-dfMXJ6 z@x6s|Pa1GoQB+8KeATOD9bTr9suIB-1gVtSa$$bSuWNrO(B!rTsjU(#s@gdt>HRIb zg!b2F7X#E$i7G1e4e*oK3|ot2$%qS6ro-d2u9EZuh zb^mqJv*S-Z4T|U~>*PfMN5t0iHFXxLYMSqP&q&GEMzqYozl0@*8sZRonkcvgm@?~2 z>gC0LzM5<3CxF{gpA(km5Wg*?IOvGaN`^KBnjQES9Tj6p!V2P*!{^6B5|!zuQVx;$ z(HJ^{d*k1YKk&ebp8VmvUm@KcFj<+fLthY)uuQ@T(e`NtM64sGX+t_P5|j!YDnJ)s zAJrP<+wlc)ychoU8}I({-3g~b0*v3GbM^0PC!4k*lXA+`et5`NX+yz^?ZhHKTP}H# zpG{~Oo?0M9ep#9X!5LVTt*0osZ=F3tUyW~u>Kv*4TCm*V|chhGHhe$N82GWQBxy1M}0b#@|`%zL_ca63rl^JhF>jW zAvbzYDN=8cYvhnYp7^DBnvcsJU%IydZ}IB<+Qmh`L(2sA7ifkE+We%Pf6 zi1|IfbO$JK>LLkf)mpxNN`l@S9;acr5j$KR_afF*)!?mcfG3Hp6lmuVTFl_jB-@i` zOT8p_Amt9{q|3mHfy>J#lS;W2zpl%;a}sb+e8O54lzyse30D1_I*T{_Vu2jU1|2bezb^P+h0q8A41 z@h$p^k~(Ga-yOf_Ya@K|&PYB+T#I)=ycUH(Z~K@N_XFo#(LxYeC=_QKXc}R-31KiN z-48oc)??|}21zPM2>c*Nr_?-DRh|~PKgYMi%N2og>m1X9SiJg|=70sd0n*|DI;62QfEqVkYWDeD~||N8X2n>jIGFOp!X~xMh-dMHHVC zdOknKZSsXgoGoGcCH{P-T=NWF!(;QqNQH;_#E%17#L+GLqnjS#`0iEMc}7(FQPA(Z zjc}zf#XMjvAc8PHU3A|&yqz5H{!NX5pMK%T0zX%`S6T-c@UJt2zwi=YJ2mn9jiZZey?3H0R@S*lNN}73&VaKXeGqDE zZSpE%APP^@m`^0>2*ulgnL`LEVxd4^au!09a^eN=?I-K)V-J}vC;>mEpj43`$Z5RO9K~wJ3m|{73o(87}?o^s}m)1 zRz0_8&X?C7_O|SF451OLhbe(HIqW$Jdq_J3w-+@NRG*xLt`LSnU|Z_Dhy%TrUnp(Yq_CeK$7JwC*8h~t0;_*OBj zw;?!q(@$qKRV-jKHIb9rAVQ}03QY5 zFO?sA`2d~8Du~oS*F>s;*V#oZn!}P)d!L=KKa>Y15k{1Zz$`!gohN%{Lh9_j zj8FMV8Gr7NE7{8OYB;XjC!jtfu&LVncv#lAi2!5yg%7I+8(seo~YB zdWIl_*J@p5e(R)TJo{(Plr!fe99Ua0No4(wB5#=0#}OSB7X$X3e+E@dZ^;*}fForR zEl5?MN!F+>fa-|~C2{_04p%Z~w7k+KjTU?bC9QWsBM|NN+S0!S;%*JX_g&lC7a?7i z3n1kxnx={4BQ4&c{3H+m{r40q`HG0iTc_{Y8ah`33@}9uB$Qeo*KFY3NbEckn}Tb8 zdp-a|e^$5Bm9u>R-`-e#g!uJF7GNWtBuf!9Jxcu6E+jUi!`;Y7oq3oQd-zPw%8YUx zSb!ep>_WNVWdih*VKOGLLTh%m{yql&{Sp4#zHYdOyBo)8xrc@sH~v}0fgVQT*4%0# zB$RW|adq8^(2|H|YsHvnyt*(=GGmbd-O~%9k>173{)~Np`u=GY%s>XHc2cx;RReHe zXsHZuWfAbRS=kIKKot9U^_tTnDxZ;*OnXemM&M}Su3`HkPlUk zocW@h-t!^AdxK#1@5f*G@_p3RF<4k@Rv4uMo#1^jwOnQ|`Ue%}$FcI{Pdcpo<3%Q6 zerlelfHSNe=g$1lGNslQ;TK@^dGacwwDKh<9`rvz6ZB6#7k{>+|0zH8 z0FPe50NMpIvKQXl?onCKZQdhiunJR#r=ug@_d;#3BKxePzduB!4DO%{8|E*>-p3p* zI!Jxg1_IR|lt{aOsi5~XeQ;KT32>=gvV4JFC?*)rSRhaSBD#M+ zqAd>tfwrWlKF!72PqWssbo}rb$l>xEftXO|4fqCr)tNnak5_-eL0D7llLc zaa3)_DtKH%gwOdI_!9g2=&C1RhD4r1|nG9*{CW^JPk|b#=14wGbPX=MBdgT zEQ(Q+DK|+POuzthlDd%FnMYrpMR2onTu&7_P(GVj%ydm|3Ry^-;7uM4!Y)$tF0WZ1 z2DP<_R1ibxa#X$;n|Za&Mf!u5u_B-K6x3XR%;2r4JzP$Y1t`ZTs08bR{6%=Lp9ADk zUFn=mL@JK@CbUJKOr`n!eIOth->vt=6^^0`LD`%MaZ1F<1OZM;YETHT1qy*g9@wVF z4Daoh^^>TCk@HwiU%zAt^k!LN8BHVI4YMl79~-5o#+akWOGnziQb z+pTZiqlfD9Gr6@_w<^Qhz)Pgm9(5H$DS2Xva@TSUMWj5j1dAdVrnUlPWAmDrc=q`s1=DV0>BrYE&l_duAt7vgbY1ccs7V^iNiO~w&=`YDY8%@j7YC3Wa<8ZoRx39juFOY@|VC_Ob9PF zc-MEM_ij<$sPDEhg1BOEFxrf7^rnTYN~ji!fIQTd*%?nXfXu%CMo5SBe;apgc}`kGU z&7^O*NdYe>`IfhjJV^|Crnv3%I8CUx54Jls2q#JsTE2kaB---+DV$>%N2gzbM&%(>V|c*;KxgeAJs^3ihFmQ%m%mFXCP7w+APDEr&M{(wPqG0&)uP6OThGF_tJPk@3=9Wp+z}uKKSpG1bZf3Y&@vptupeqWLR#oya!4G>a80myoFR@IfT&w^?FDP5@hb7!WQkb>)H+^xX zj{zHEaz0&ddH=$5@?$^W`0jF9eAd^J`~^^sUiUjL zXq=DA8Y!s>sMs4^fFg1f3itRB&eM0Jv4KIiE9>JQ0KWKqOd6K|_^oM(5~nGlznD^H%TqrZSL+}f9G{}j1-f9M=^aU>Vjo6DoeD3AgH(AuyYsLxu zL&?K3tt%eJhFX}PTE+DP6z(w)o`~XfnDg7nuVRINJbll%X0wv{jR2j@gfo8X zXCX+n+vMV2DCf@R!NdV<0!j;It?yYDPIP5<&Mzg7&^P`i0rZc{&eIcj;-B6Ba`MBj z4Q6u{KRUu{8%>zjIl-Vhd)fZ2ukJqtee-N>*O+csHjF##aN8)m|2j5Sq6;wKe7)rp@3}ka#5=rGWnpgL)lHnwi?Aua{ z7orGiR1#_dN`QE4Bd^m5_SZfC9X2S>ETV*v)FB>`zoFi#KX@U(fm41&M{s0&exq0R z4m=@bHB?9xoD?K(a@)@_!&IJlOZDe*3A`JT=OTHb3;8@LhZ}=Ie0ZL+^ z9A8aUtX3TrizoZi-(#tCBF3-#YC3b$Y)G8(8+q3$tnC`9kMAkv2SNNhryrjF#`_jm zN@kLuxqorCB;nLta+AMUf+7y2OR$_BmAz=AfQMmbFEkY!yBRbpz z5jc7IWg_dINTE%G^qE*uB$>O2NmZD}ho6X#nd_gwID0aD)!lEda!oL$owh3hI16060VPQX7D2 zB74X_DkED!a!?fE`#v8zt9!j%_6-E6L2ty-6u1k;=f%LEWyKHDZ=JA<|ICYn6Y`_- z^D`+v#B&)F>OGEz#qGn>u0E@$0UKA@8o~hi4nu~gH)BYb7Rq7{2D=evv#_KgF9%!( z;haZv|JStt%UabX$_B)#;EgACjc6zGRC#41B5(g_MD4-&+W-73pG88Qjj@b?+KaO# z7ggEQ#x6Kho_r#oUOgb;z01i$ePHD_Xe!!2dh7L?h82L~HT5?l>VEe8Ps?9FnR@^j zGK6ByohWp8z!?$F3c=uS4EArs0?FpGoU35ci4ow%U<0ZO)L-cdFHcTQ(#1B)6aT`0 zDCK_eA4?sQJscH)#L4uH!9eN?z#aFGGp%6rkSOgiIu-Pe2m;hlUUjx}j^?Q=T~B%Z zE>*JCv-|oqz$Uks;vvpGULFqNCzJpQrMGDA9HtatEfsek`G&x!KBOAq-gkd1;AjOIczcj@{kn|5 z$;*BBw>1;XSKsnP0q@!H(_NFC3M3YEUwf=^8ow&eN-d;pp%&3Uh9TxSd*w>a-As`s zjD5e#N>sat$J9b;HB2DWAdC=PSZd?`mU4S(x%yWTz3Vq+cliepU3TimkqvOFo zD|dWncEoxU@gijLemQ+3Pd)m5KyD}G0ExKzw5gN{ywuX&253MU{Nv6%^TlXASv_BJMwC9a}8eZQD%foOAOW-2Q* z)NU{?jCv<^4WQd&12IzdM=l2MOltm8sL7AaqCxl~XJ#qinVs?N#h&x1r(6VX8=*7c1Yae6EeJ$gm9*W$I z*x8@z?cZZlyp<13JsRc$8>UJ?I|7#*w!IdW1ssk6Wep%3e<1?a`DD}_?ex6fGXz%nx zkMS|s%OA>ZB+sT6Hm7jf9MR*n20;9^X-*LWOXi}VNLY~vn+8l;j43?ynSY2mOrC-E zJuN`yGWBfiv53TM?v(3RT6DG{B=ZxqVBclvRSYyQ^Tv4_1fjPl^4(e^P+kTBW$nM7C@ zNif>fhi28Ld!UU~Luct9%Ug=0q!(G9g=B}T&?_l)5Bk0ht;KL_l9SXOopiOGRad2gRyV z(hm4E*N${H*=S#Sd>8VqD0%=)6naAuxaGibFVi*C(>1n@*%X&-pLPkG%J8!$J|Bn1B%KD7m}qk7@Y37l8uvo|RvwR23R=10$g^R+ouZ zu6lx$lpuoSm*cya1Ww48!bXN9+EmCQDZJ*z6-{`>vn$t8_^-11VEOqJUgmW=xQ8)D z#t0h|qz~u=l)DDXiKT!ls5UY2 zH9E!gukZ$zrW@oB@Bdx-?$=%n`GJbodY;IRtzT10JbBA%awYpB-xvYa+&c6uYIBQE zq3Mh0Yaj4UIlX>ON{Jrgv#Y+>n#BAp*@xqByjT-T9x#w%u0_fO(p<_gf-`sqA5w#M z_Td-=zL>f)QlIx_O-zoz%ypTjGFE*vymv@F*UhoAd-4E~QpFet0eyag9e9}i=gA#GguPDLxOcw-UPA>c7Ok3z9o zkaNZ^FRghu0UXC}3bLZ!%DlLNjzRK-f3>)^e^RCfF@%jhsIEEw9^=GzO3Q9qU)T!3 zYK_2?ynDmef#?^nQ~S7&l!1+=QGRd`EKV&i_{xV8oz$U)Gc0&$O8~3%2T-mwckw~+ z>FL#Y`tj)(?If__5hl%;r(_nVW4rigAs}LY$$K!te0jNr4`?U=E@0KLz>pn#l2K#j zAQBwnT8rI=HqWMYT`NnNFb>V_z@5Y+l7=;5UL-cyi`AoTzU!=}vJEBSQ>qZgdLj}p1x)J&e zoeL3C(}S$i6yC1ensgWV%5Jh6AfAak=s+y;niW<<33NoBZV%qxwLJ))n zpAJ54yVjru>whPRVYHP17xeZQ}MT9aP z9Ew=?KJ}6-kr=;5BRt=xW;lcyl-He8`&@bMx##jpyK2P9ALA{sQMQf(WQd8ng*%DV z@g*Q-D9ig>zRjcsTCGkEx!3imT^k%-Pq9Gzy|f}@{5#;@+Ko!y@8Gr-$5?)1g`b9> z=#Nl)AHuDuU`3?>`r~QzNLmA5$9VmWs4U78KDqz96eP=l)L6cbjmF=TG_4QniKo^E z!D215RRS4|zCPjxUOFWP@*0qp zRpZ@Bx;8mR_mn^u=Z@ffe#l@cB%Qa<8nY{lb%NGLFhP0QvC z2~Pui@-<%I*|pO5rWhzH8(krLs=}<+uTXAnlUD(KP)eY@TS=uVt*DD3mIb(S*!09e z$%wS|GFd?sdkw7FEYiWeE=~dJ7wvYF(0dbWbJ6$xSQ9AEX*V1*DJ zh8J`W41T~06t&X;IpR=)6x+Yp>ID&A~Br zdj;M@e~E&ijLKJIwX~EqI`2&=Q-%oe;n4;*y5B^7e5SRx_cdTbQGm>5 z6v5Ta#<&ACkCPO!!4teYi~f#MmxvK_yiIuDmxS1(+%B*MU{(l8Z zoKH`4X`CfLoSgL)WEbpBA?MCKTwe4Ll%*5nV^aUic8$cmaBLB`q5_C+NnU**f0yUN zM}4c~HN>+O#r8k>&V+x_+ocd^{jf1CRS~Ob zwYQd_(43pY6L=(&R0V?`z!Y_2-ol7E<>SUdQO!Y9Wd&YF&J!?jo;Ha;YBCF&0pVnu zMIz-zWS_|k<&J8nM-v)fg0CiqLN>i~`ngu;9S+`mo|U~3BsqKjwY-~KpxP&Jq83fG z*ju57V<~Wzi_bq@cDzQ*B@)wX@W*@iL0miZS6(TBfFn@afD_4*#DY=!^ow73ysL_) z5DS#LYvMVSZDuoqUCwZce06`)v{sW84=4mzn6UFlWoa1kNRcfz)-!Ymf-HmxAa;N~ zpReIhR|r1Bb}OSY%oRHjOA#xP>PrYD@IaF;B4LiLQKQMDS*hbB5jeu883hLdnhzc& zFsG8kG^+Ji!Mb44@UN}I-UN5@@;~wqG?)M5e{$)O=N12~VBDMh|9$U#b}**?a`-ju zl2suEy|ho22)|TDTF(hOdeZYc59HMn{)^z^4*(?atZ!`Xj>2x9g@kfFG6^_~9BUzNXfTR$UNps#T)ei&=nWfs8R z_#Hfw30aGe8aji+Y<-8RFAi8z`N`$Cey2P?wd(_pU-*$?nBC9w)r3e@@@DRZUeHOd7mzrNWF%o zNG)L=MgSP0vhM9X@yM&w;jhE{P4lU# z-2D^;O-FY!wEJu!!4)8a7hpus(UZzK5?j*80edDkvl(+lS^^WR!w-8!$kS(nlbuZ3 zI%=D`l*tA`t9CcsJ^iALgExm8CBvo>g_PFb#KFUC@kftbQ4Njs1n9kQNEM@$z> zmi}V7lpZ$>a(ruI31rv3y> z-v2UDbTFeTc9oL-dyxb(k{rxf6;gq*M?Ixmx3=ipK)B^VVK6T;zTYue7?Kg*8;UQH zG@(R3T7^~4p6`C0-?*?v1#jDHjy~RDWGt1{3$|;PvvV$LV$bqd^|~*d$yXp$Z&(Ge z>R4w-k?d6F*waTYzeHUDHLbll|7uwx{i&HtIYycL;ItC^`QlH?<6feEuRQCYF3N3N zys_b*c7-&XN_%-Va2UX01)~g<63Smc8}t3n>7QVkL>W_r=*wW`R=(0E`Mf=0af;s| zp2)@{ncg^k-)|C_H5Xg!&1`|_6QI6)K9!M#r#TDNG&wCfeADH)WmR7EN(e*?obmtAXSwGH9wJ#t|b_BjV8ry+o{0 z$-#p#TvZC6Fjp~O+zBiKSxbQ`6xWrrn!1TJK#SA@5vCxLcK}LblGysn`)(SQ><1=m zN7fLtqp2O}Un#TC`U`Ck)b~88VLP^_-&k4hL&q|IW!7&XacWt6ggvf}M&%Ws_edGX zgHstoXe$F#ioAV5l&#hZc?|`d$ci33t&r_V(_C2(l#u>97KgZTuh`3@LlA65Kz_+BH{X_jhbHHLTZ^!+grJpr-5^k9(v z$HSv-OT*_%Wb{wy5A01r^raz|o z4i9&rzES~_o^X1QB=By9st!_NAFSJcXPz;GPt1xM*uu*&E)- zD|ztXl`34xgXc(o0*b~T&pln{u+}4RydtrW^`4n88U5pTPDuhhfc=s*@Ms|v2*U}f zJB-pz_-iOenRS5RA7%52RO?@I<-7Mhol4<9vz)Im02@MG><{TFMZmq6g`n;-L7ob- z)QYs*ChM$sl6iGaypodpwFW5*zP9-XOn>Z$*$0zD7mvZ-cB$!@m8ID-JCm2hfRve+ zXd40zgN%80m?hPcJ<7F;a|hpx+RWA>BTA!}?a-6)n9LL?%1{RoHl#8`mPE zJ2Si1Bd#8~`^APW2g z%LAHAQ6F2CqJZ%A?!R5epIY-NoPIN3ES8g;;tPl?WS~D)9B)1slN}?|HUa5fSzr3m zxM0Fc9R?4Xs!_vpp@~u8-W0Dcc)*yC-jz3NpEIE*X&f1$B19(9NDjPLiu4H#(;HJE$eGCDgRRbHKR#d%B8nUr74u*IM&C`>q*l23RpIPm#2xi+{d9!c>_7P)L82dAdKuRE#lglh;lp}g1xtCX8s5E5}w7^k% zSEMaf&iFa!mdizNt!KnEyOsXzz46DwU9vLY{mS0~9!FC08y~o*N;sWQl`t-s%IL^34KBrIC48(FtnZG$PP9I(IgoFB>k;2MK~3vl@gph+PpJ%nX8rWzyzo8| zzvxBq6y;^po@OHXn~@C{_@K##Y^!MF8*c@u*W$2}&pPWXC_22J`>a+wQM;V)bP3|) zXbLRIsp30+20=9WVqz%dlK|zP`*C@}cawIQ@fX#tpufD(cv;;~17)~&RY-G| zQEsAtg-!NAN_Z$L+wW6R-6!1srI!wxDm{cEb9Mt)h}zQPO+q zuu&=lR3$|q6T#8;nCKJyYw=DfOYwo#i>w1&LMS5Sr}!yxTPI-V>>@ivjtp-48(K&e zuS0sci&yO*f2N+B=TWyjOCBneW8?{8aDr*&M zQ)Lz@Y8iFbo@7;JsTM9;JiXWk{0wZ^7|g(pc|$EEMNzG-CAD?|#pL&TekcBUZbZrI zcB^fS8ZA|3{?~{bH|}!oxl{keyPe;@eE|JqXA=O?Mu_*iN+#;rzD<2^=~IIvPkY0= zz5-*up;1Z8-EhccCxC4s-R?2C%oz8pve~UJGOeUrm`M!;B_{pr!Pgd`HdPQI(Q1xnB***01 z-v2cuMDgLB^x&7CH6rfK{VP@GX^u^}9Qay#=`~L_fulr@V!O&1 zI8GH-vTXkr7E#2vnwCWKqoUR+(H4`9}zuODE^PHC+G-Z{`6 zIQ?nUldf13ZMVxuk!)V{`IkGC8FzMEdYG9b?FR&JXy3rH~8+E4WZUnxU5Gh89pHY+3>G!ufy(lCU z?EP9cC@rf3gQjaF4!v%36K#-%9<^L!EU;JT5X@i3IyAX9$=3Kn$G4?bEw)3xiLcpI za@RN>#5T|1>i_*;qyzrca{qIF`vDzd*;;5|-DM@R!D?4f%r~kp_c0UM63ZYs3Hq@I z5(PkvS5~9PeidGbJ``gSj-xnvyq)Hk7F*+%@OX3y%!9IRM4W9s+4(o z3E8@>rkb)`^J0!u#oOntmB1x^xV%776q9~Ftj6Nb5A&pRYhg92Dg)KS&R;s&u89WQ z)0k`g2H#oDu-S2OPmVzX2LaHaLTok>a*n9d=pk+Ch`U;-F?m2eu3Yg;@`~d-s@_a9 z{XqL&EWsF+%=x{ry{a53;F&o;NV7tvG<_dYC#Vf_d2m31)u$;U$R5+YSaq@wZYksz zA%?Bhv{G*L%#dEx10?eU!^rQ0TK;?>#u-feTRuiGK=v_bPgC!5x)Q&0 zNg`NG=vp9|&`N4&hBQzi=!bzw4Le2POy3BLjh<9tVuGtfGO%PwkyO#R8DcXcbF*y! zES>UDpkDG~tCpl})`LBIo|c=CGAX;Vf}f-?BS|$epJvAVUNflnK)dWhjPDJOq-5S} zq@z79loQ~}9jk`p3$$hdbhOttw4j=Y^f)L3q{K@hV`=QjQepfwK+2u9)iBb@k`3Nh zuB^!~zdiV!pMmUPk0>fnal^y2nBom&kXXVe~1ErYo*XEn3MIwim`*)AXM%Ebn-nqaL@(^olP} z9rMxK{(z;j5;sQ_B>&ut&5Zb~Yp;0HvC)@<7%pD9A3ovAUS`{DbOoBB73 z0QlEOeqd~+4QAC!{0ttCCE>9|uP9I2(+whu)slNq`*=!dV*|j#jx>3WR0ZHRjExuG z96xwhAwf2u&ii^VoOnDv|H28+qq16*hlx$Qd?k8qO2BJMW9psXO@lkWb)ExEmkL#Z zodu4GEO7{^`VIa8eRuEZJ&8<#PIy=Sl}iImPpxDqV1Zn3UDUP-Oyz0tX-_@LAdq1MTJyNko-eVYf&V5FD2i?4!s( z`DXaxV-FGcgT09T)QbbpgomF5p4#B>3Z@-QMf=U`p7LS0JH+5Fj5LOTBpS-kH6 z-6mQZo+ppGvd4wtXPwMX=rVK{ln{43W~8R?mBdZt*o=sqSE9x#sBa4 z2GGZSz*1Kpr`$k8t`hM=-mPZPRVuNm4ys@WvsjL$?{lk^rB9J+ijkTHx;^EIc`k(^ z_agcAlQ9Mar8XgKkGWM&v!px8N*_dJifSpVmZ6PZ$Fs+W>FHgOg+C!@RhU621&X3*K|tc5{p%!d;4pVq9ZR`5f{NN+Ggm zcqU|bHtp*p{yq)f5EB^Rr7~BW5bWok?3TDOnEoc*7iH=M&EH6c#T{4?mj9T?c-X9? zl-G}|8nFlpUFm^E-5R_z_{w91`C6>Isge$xO=F7}Jg3i;JdNfod;>uMZ^ZKAO+g@) z_ZDYUUvXW(lcc+%Uo*xQlL33d=n6h~QdIOnGz~hu!j3n%`CJM#WlLn+UJvEQF}+@n zv*&BgsA?P^f>Pg?#v{Sk;k|STk5==rjM6q4yr&91+XG znF#PLq72ybJsNY=5v9osmJJ9_4;1E$YHBPz z8W@3XXpG;9+T?PVs0{E>czkPL?W)*Gd z7VaOQNe?7WMVjb%m}$RBkxB_NlXzU)iqxRu3t^8=@8?L7TG#m1GO|kW*0sIA_AIDL z+MoDG{nL*9<0oO5gr$m<@dZ*kJX*s|=No3wFsVdHYdqQ{SgC>e$YMAqoSz&fYns== z7?yX3%pg2UWyl5V@TsVF&09C73HI6D7J&+n|V?s?~6d*fZlicxh{-1e4zm;{N_`09>)QL0c#2J6;68Ne+fQyPr zp|AH~j{VZINV&*L)wn^Zmd_siiWJ_vgO5E!grh=3OTK3?E-c^r%u<(28ZNLF6v#H; zLUHo|Hwj4+psb7|QDD!9o+1Df1^4v22N-SzO;Vunf|f~r?6JqvDIb&TZCxJYV_uPv z+z5*-*CZ;S>8V*{+wE$3)*TxUl;`smnhqnM`UGU$%)CAVZwXml|DIT20COP!|Io9l z0&&=n574Q3OY!-w8?*_ioBWZrR>ri82N!X!vGj_k7JLmpgh!1=d9S`;$zz+F8LyJ5 zB3M#K87ST(6gId?vx+mmIjJeZmHhCks-+*(Xm@oOceWUa;lsH4SX8&NWhmLb^inw$d7QsUujYr5OkXi)KIbQTeqO(?M*)7?Z1!xIZL_5t$Ns z<9|O0yb*RM+Ka9jAvC!tGv^Vk&UX6wl$DBl#MCm`)mm|?PCeU?B>g6cfPEvwqdB_Oz(%l$r5tUvw&&0`}LjU5eg?;fG z1?R-;6XiLhILz|fs%#AH&~vRtD8b7jb}x-%QA{k7{*?$1of5~4hkZ8fM+oVeQivg@ z_yV?+mtJUx(YV;hRg=_|XRD^i{JSg&mHL*^njkY&qDSmp zqQ&#Ua)Pgf8M**iKOoubsPna)Z;#(c0GdWEN^U87bk8>$b{>fBn3agb)mQS#0+_q| z35?`(@-9JY+6SO*cco?`htdEUhI|WN5L)jvWhKEG0T>o$0FJwgp~s4sV@hDC+<|@@ zS8kNn02yHb&vM-|XkC_0lpC|+haGKwkvu4h;CQ-MDUXhKJm2A*4exxXeW9!ID3n+T$HBC$i?Rs4P**0v|)k;~4JSoQEoRP;X*JK12ir z(&TY3waf)TOV#KgLeMQ4Embuh;qP3ullK%?1C!cnAMT1+n$3Rv`pR17GxM!wDL<2z z#^+k&@x)sji1-`isf+1<7MtfGj~(=7!G4MU**OT%^mA#(A}gFxIsZOU*^fPpD(HRe z#F_F4N-HVcANoeSd5E8?DV1UMm7x@%+|n0gB1gu?$3jz4W-#kP0}4TQh<*aP0 z*8)4IC1qVyG7H^9>c7Qb2ZL4)*gRiKOx(h}fb^WTF^S1U!|> z)3{eQ3#tZisJIt=6)Zqys{L_kZjAWNq$LqB^Hl~lqq6;4RGA#WnTHQw1>145;IRx2 zgO;4$wfzc6iuILyvb^mH%U%UFWB&qW7K{Xzt)^K3Umn~;z`oyMHo&r*OnG}DVV>#(9Wd-(U(Eq+0Cy+vs}UPwn<@^T?LYf zv4)KbGB4k)0mV<{RJ=u^aiSHE{p9E?zZvZMZ`3(S6rj>Kan4=i(19fTOMFva1Fj3{ zg7bf71m{@v=1+Ve(&PI8hxv75-F!E=Q?u5Zqk%$%-xcu$ z?jsYl066DFnyTDJNq;E|h26Q*t=8XrgYU$Sa*s83+fs4ma9Q!YXzj%^&4=NsQD+_t zT;&TG5D1H(>XPbjJ(P3vRL)0zkP^L++TAa(nX`d#3CS0x?XwjGD}d6AK&Lgg{{x4^qg0z-)Q8cc2C%E%VOc z{lP6?7d2g8UIb(PuwbZAtrsO8-4#)Wd4zHDBP6SBVx(baFN6N@*>u`(jI=aBvm!4% zD?O>p0Zqny)t8?^Cd>Ou_-R`zBdlD3KIUnD-6x!)X|w-sL(1a6$0kasjUZR%{AIhq zsB$DxK2Z)x_f~T9%XjGSi7aV9S{$P*zm~;-${hU8&qiG=r8qlM=UX@CZARQY-Ujh6 z_*8OIPd@1f8tY^6V)B~_K=k#36yTf6G_c}58;qw+}TH9e#==i zurHTSzknvFs6$<)39kAGAQ^;4%BsH4wh-(aMC|I`dKkv*f;W2kp*xAns30%s$M8S5 zNCB;c#Z9#0?V~#iTD)I4McRsjNa6Y^MixmF&-pn~zZmJ%wh_$pld)O;n%&7K2=d(v z6M0NpzF1m$gA{YN$G!z^=)}y(WF%h+@hD=!!uKKj2Ym5$I(WtM-=dNt?v{(Pz4q_Z zPU;hXP~<4|9s{DCOi{4Va|$$t?&Ya-skApwd^$t~b$mcL(XmZNPqnAv~U4 z7T*G=f&NE-@I|muX9{GEfFcES-}(NtS%uF+A>d_%L5f%MG1lI%2A_Ju<&)-{GZ+@G z96u3-{Yn*XC3&Ac-&&o4Fs1FbSJB!YW*hM^KdsOOG?w;YDUp2Wu@Kj=&%2^P_}ks^ zQtv~f9GFa&D$^QHCg`gwkI_o|6{Qo-$8=P}6BJ^>4Si70dGlqPE_4tD$HGz1V>sW6 zK(Yw7NpqDnZciQhhCZnWj>r{!7~tLDM)#XE&3Yj6@^qX!d4^7Hkr=-Y zdCf}x$Lo-_3!s6r^y&*yGZqY&Lapr2HQ*JYLL|TSP4vKoZ_86?{V&Z;cRcY6sTD=l z`-xObIa8sL0BK5H%57^}vQ!5Vdx;e#qGgNM;O0-$6J=#w25Dz`*7Iui5b<*Kl@wJD zI2fy|M1a{h>P=(ykkEYrw@Z(Tnl2aqJ35Cu09=!QQ{Ip7FnDk9S$T%}(e$*Re31~_ z^irh-K^;hs#%kfZ5Y2q(?d&Hhnkxfj7sxsIA!}^X55k(I*ErtqsvJxeAZEK=LoCXg zXzTVacm<1Iw*U=a%4 zBHMhxBUrZrn|K(sW35yGbFG4=jhIJ5-PX5PgVg<+$P5o6enrdL8H^|EC~yT4hW5ye z>#f-(eL(_e(87P|_wFd1sp@gdN7LNP2GHrDx%kun;Gg8*|5w|W%77k|RsrndOFv!q z61uyg9(S=WXnPg?Srs2nBa5x6qG5o7kp?d5`&Vuxu%>rCr2FcLQ#^{dDSxD8${irr zU-_L^*H$R!r^|`AUx3QW+YxriWZgv@caUQW(V--b2oSzMWK!~CYi^$%0;6Dx!a8lG zmn}#aQ#i3qMTC*HHx;xSy!bero`W2}idebdOwpHKz7GYJuz`cO_2;vh1OA`VdTrm5-C>5gy8N@mNmz)vrx2zAfyZ1vsPS#~)EVJiV9{6XLDkmt*= zB3$n6go0KXQPXyVoU1+CRLJrarqDm|l{KN{P~h&GM<}w&xL9+yXRP#$N&Ce*OXZ5P^>$gPW>3`LJx^Iq~FXewZ-9^8DkG*n`tH(9AVho8M6dB`4jRWK(~ zsAbFH^UU~2dpt&6rXip_^@9GucY5Ka^unWlt}fm?NW(ZmeRTAp-;TXa#75&C zpG3rck5%WB6m$^2KVuO6FlyO_BfM8qbvdB?hP3!M-h5fuw-g2v%K7vB?>qV5x&7DR zUEdE=;4>oYPk76|`5yvyGZy!Uj`BOrnzY|9k)shLj&sVP*mFdN(P!J;hynaOojhk5 zY^7KH=f!l*e?FI9w#1Qz2<>?TyxTqyfB&YU3ab+#)*m_P*TuyKH(V`patBYh3vDJl zGGe;zlLZjji=#7|8gy>BS7`9@%1{w71Q(C+NG%JC#I!C5xg=KvqUJ9V&W8B&@;%miJs*I|mxrnv>rIf48Izu1k0vw_9q1*1$B zk%E!ksxE}lNjHcIRDv?e)&0U)_@Us4-6lO-=4zWb6(KTz0R~9TOIMk}mm9F@8JE?K%EdW$~vatM##0!p-WmHv)3;rNOB1*2&-RjO0>l{9k{h6WW`hH#D zkNrHP{mG~K)kRv8=hhF}?Lnmw@g^1V3U8O`RXpCftzSXF?=!A3V|x+sAP=Z$rhi;e zr|EG^J-P?ozYbe*?3!djly!`2gXd0z+vNlq*pzClL`4951;%p*5tgG^o2=^pZ}5Im zHEhSgjZxIWaoA#m?$K;G8@A=#We9mUDe0`>8eK{oa?g(%PXwe*z0)4@qfYZ~^K;|Y zM!o%UiH-&2iS)ex94l3Q_hH2P&Ql;v3L;gk17-AjwRx8oTNc4Z$eR^%D6krp zR#0!h5)6M`h9`nVN(Yh>+S@D0vyRKfykMqVY05~tAw0T3n6AaP`F%h4$9>Ky;U9O3 zjAESf;z_KdP9RhYA-qTj~99fbo8}g`gIU8Z>7)5FRPnq_IA! zDe7JEwKFVSvcrZ{q(*JdH_(@B-dZTlIM~JzhcAJeLxN0u;hr-^Ll#2P&=#MZvM7*J zvi4%XSGinwsbW=yT8G!w(5aiods<*$5)oJhruw^^vD~e@zKX zR+X=*W*F40qY-?~*(I_U&-xP)y2*Kv33x042~@ z2#d$gj;vr&_;!PvH}J8&2{iUofwfY;Jii$1`TJnE_qP4=q~~t1Eh*sb(`V)tPo0}A zhaB$oaI`CP`94kTRnwPCS}dYDBBrMTP3%<-;eW_82H#dN)>7?QxsvO6l9hqbB~4V` z{NctDE?+R2Vwv;E$=0Ok*0gF;g1!hY>=HD z+pE?s*{xByhmv9$ueToj+K-FR{>Ii00GbEC@d}}qixQRY+c|Aw79KEO`+xo2L%NZ($7VO#`Ck34Z~XnH}CfmMJDjfxLj zS$<44TvZ{OTNzdo2Gj%qiT#M6;eW-7a?lyB& z#Q4=m(@%Xxf{1{-fC768hx5OS;9F)eK>wcPalHK#CtphEPJ6UxQa|kUSJUgSdrHu7 zhp~Es+~VNs7_GJGe@qWs23p(K6>y=Cj8CPPYSR8(9gs! zjS>vqKxSQ`3_p@Wt0R{@pm6WJT;_P5{sA>g7dn!ifbuAXjWQPE$@5FpdVbcIn%^tL zWGxoQE3f#0@U!T~Q!MhE2I2v(AehwZR~-rqL=UgN?5Go`YlUTWQQjRJ~50 zQ=~<4HM?4v=QuD85~BF9Q|O|2Y7(XX8ytHe-04iPyBn=own?GdHet&_N1N2jL!q4~ z4#(&SwUHAJHEa;CxQb8EpvvPf1tbr_j{^xsF!az5yBC%k0ssQ+mS=Q>{C~Pp;?cui zb!nWM?X5`hG@k5E1PCUmjL)lncBn_ro;_}j{P<6mIpblGdv(>hm#}ciHa?&JLv(I?b2H4WS%3v>=*;4Z@G5 zLfDN8dNC|Tm|&as{U|&ewR!;VH!?asLru4@o-zG}QChp#bgS)Od33r(MV<;?Fq!+l zn&2da_i?){Bb(}Ji;wL0BUkg-Lw=Brw7ca4<&j;*lk$7WO7HD@c%f#JX523&R%|0f zY7<`?yScRE&=7YwwnJw?>=9jn6l!KCu`D*`3YKUIgDPxzznbM(U_3VExI34fiWo!=W?8z`+x1Ze+i3K^17%j}oy>>??&jW2gT z@j^Ngm%;8#^_k)#0&X}d;A#Sjf)@t${B*~*DW(e7 zVRcf4PgOYo|Jc_XIZu;>iSo2GEhKRCiA@4RMPCR9``&=8j$eOtiP~j|Fw5OWo(2m4 z--cR~6^2id`=JANRB475vDUDS@oWQvnile3bj(V9{Y@NR+%pdNPUQ> z+C`o7_$gi*SsQKa?0u!P05%_a#`KzBPr)t*U*)|0n1bCal$jPmBj&4s_(42{a(jYw z%;mPco(jy}eekWUyRF0h)_Mg2B>_W7GEANvuOm%uETQPu zCbUexB_Y7rS5=XXmw5fJhc9PD#vl7{KsJ1U1&vxWOo+X(c`)$4BXr5oeNe)eIr9WN?`%n{@p&1mc>?d97kV zHXU*OJEkpLvs0zoSpMhBsWs4i;K-P3@6x(Dl2#Q&f#Je8;Vgmzc zV?~2L@eyMbM9%v82%zK5k=H6;2xP$%mM=Q;WJ)IdkrOGJ{B&jtrd_#ZX(2VlsG1Ay z)0TvND!f{)=4IxjSAfX!{ou0Hf8MpIo_;Ak?%g_T#*fjAzmsPCsQ5KNAxNVwg9xyU zz~*>JI5Ad$tZ4bvew#HEonAOJ{lo~wMCp<>?Pz96^6b+S-tz{U0_7d)r0Z#{<$JPe zZS{5wJ`{ID=;4(};IV=+=*Hpx^3{-#R7F^oK+Y7{6*Ox)7gP!`>-&hu{Gj5-0bUYf zhcBXdp|8K)2RF7AR`|qp)lGF(DVkFRxFLTi^Vm|sEJHgQ z9*csQph^k(^gVOTM8m!{Lua`1c#tQEz08w>NBl7>d2r8<1P9ZceEXRhA2M>t($`>E zZSY9fck$L}g1P%V zEQE@(R_0_Bs1boAkY2f}i_l~GJXw*QQoJ!W|C*lygJ8txjfw{}ZjtfC z>sv+kfwpWAQlhlcue61!tWMY#g2By)UQMT7^$>!2L@Myp$|w($#IYx|J0pvPfX95w z0Z?cvXm%n_Igahu+p3_(>e!C-v-A3{hX*TZryuo}RI9J;HHCm7aoH~9S=VA&-v4m$ z9r((lFTL`{)27E{A>p#X)56$#Y}{u$iv)X^r|G#v%pqn$|k+f zQoNfmiGBD&kN#oZfZ#lwPMnU>6HeO;;xGqi{Vs$fR(l&|7s=TM9zy>bb#}tj!~4nINln8FUbpH9-O~0vtia=#b=8-Qkae4>@oPEAVyR@ z`(4=`m8JC(v|25F4mJ@GK>ok?9kuQWz!y577R~tDzFcnwEuTVh9i}n=lD*^t1Qy#D zJ-a=W;ybGmOxIag8(~f#2*pn|8w=DYka6jijG)qd3^P#%{|+FWrz|%=`2-nW4UK83@DbWQC!*l@|C7w z>%vp1sdNMo$&G(~#{-~s8=>m&ze#Fl@Mc-)7UCdAYeRr({H5;0ZCoyRYoX+_F9;d z1uL_(03}1UU{T!D?5T^FoV*m+l`~el3&R|---VT_u8U1R!h$l+K5jvoOcWk^f4Y#O zOcQijs7NeJ9uveWOT!zJ;w|D|qEw$CfZticZkBep(oK(-Kng@aiTIFjExNyAF6Rmp znagh--Lqp$B@@DgoxOEKndc~;%ISXmhq8WFw$kO0l*Y6de%#MBj^(#6G?nEN9Lp9} zVwG`2Raq3WGFs`M^?kk9w7oHHJ^x@zwIi+t6?s#^=!9_g=0n=hw zC~kW!ms|S2l4GLs+e=2lu&b3prPf5siPY3$t3DZU>7H(gdC#I_@q%vDE8tmHGNSrEIj#oyG z?y7RECCJ6v!G7f)t6f$0&u%qdTDi6sxV*BD%Txp4I}})^^bMazm^~y*!Wz#We(6n$ha>!xCw8~@|x98T}NVy<_i5nHEI|hLi~)h!%)Y1 zOxv;J?Lw`xbJdm`p5A+1A!l=gl*;hlH&01 zCr_MBC;ToA>E1^k-dty!u5GfeeSyrAo@vtk4bVLCEwYt(7|%YM&iYpjybp;^o;-af z?r{%yXz-ae*+jbRr^PYEZ(S}eLs$R7St1U@D3Reggn3=XPqSF!Tb4aQNdmE%l`XBw(V&ch zS@{-%&Z=_WV=llCkd2vOiID}=#AD~zJ@B%s<~!vQm-W_flyR8ST2W0^)Ob zhUdq?!*?JiJt41rDJqOI_fol(NU0kt9v{Q4evgY&O(8~6jxknoN&NWDtnE$US~T5%pGIMDuiYC5UO& zhiQ8cZ9>h&zZ`wxFPTTEf@NNEsBb$BJ_X8(6gI$5(v!jJ@zeB^=l!gY;4pc@&n2wy zHwWK(9Y2+R;=}4K3iAN3%_|A#lX`QDJ61U_+hqIwELxDe1i!;6T0W$io6p~sp%+p> zP;`3MgMf3#1QD?N~@KyqT()$m$LytM~X^bShdqBI=AdaHn@4YP;+FuNDv1h zGq@#mE_ z^af7g;4>SjYu8Dn%Jg?4wd{Gltjj9K$T&% zDs$-%MMMZC1F2x{J*p(qiFthH*H+q}SeVp#XxZ73OKEi6vp0!dWHC6OB8)UEhu>aVlsgg!IJg!{qOeuO~Qu#$Q;J&|Ji$-)mqKG1++Z4}Q~3_9xZ>tWK%b7Dx)Z=~lp! zHucCbOnCGzQoXAs5E%xgl*G*wOt*Ns@HXM7R8~0s=W$!Z5L2@`@e4#Euhj-=)x}4O z>0AVuJ{{1oaZ*x}f4_n>3|2Qq$W^#*bCsP|OUZ)slk^ME$?5dkYyP{HrWZYP?Ck{d zE%8DGpzz-CkqN@RsT3|sXp-A0Na5eR+mzmyh$3Vt>F7fn711JTmEmDxps3~8 z0cEe3F9#{Gtlh%jw1;%P^=Zo-q9VAxqJVb90E&7}oBe)p@U+k?G+v;kfeH2J_n^Jx z?CFgi;0Y}xy$qJ(O1Z+L3L*ju0?IJ@#50n)@*I6pgHJ=`JpyN=Phk4fiPyf81^1(^yJP)!0qnS*5wPYY^*5xA(kN+ zXS!A9pq=(NeUU7_5h0fjEaX}WijGbAnmInc9ZOAR2I@5eCbw*I4ZFH;d-BZ5^i#h@ z5BPHtoZQ5Hn#xQ+$EaHAR8iEX>f5u9u_fB6{lcfuc;y0GY%3o`E{;5?gvA%7W`*dJ zA7^g9lY*y&+06;=vSsW2U;ytHGghG-2G!EcI$^mT#3nsPtciu4CQvw5gLq;xgXUwx zo|`C4Bafyh{4C=%)|&SR3LCbfYGE!#OWQ>P27$Dr{Yub8EK?!lJF-XZWPi~@RpN2O zXq+by_YJ^Ogral)j(&=}$7X*dkQqv&k6iT2j|n4+5m~nC%xAI%iE>r!3`7(23YlUO z;sM=c(O*bAgVrcp{PM9CPG*aCZi>5lV!<-8Z7`GjZok&faS#l6_GgUH%&FqnRKy5I zpRz&=vk6b-quO--L!$g~T7A7sCVv&E1zZ$GUKd87-@Q1$IQGBP8@PCy!Ts~^C|2#c>wmbtaVU7iap(D3{ZqCChPSnG# z!|fpZlGF12swBEHhH}koFq@cOFIcsJ3q0(Ymxzo++(OVs{@YlR<(ZfW29+JzmmpEr zU3sM%d}XR!x9HIx+`N>=M@mjZ83nAxcaAaw`EVtHMOph8Lgs^huTg1A znI*pvR%wpkcmmeRGZ$0hGI;9PBII_(VtuUw;XnFl8CFVdgz&Mf;;vWd^p4OdMLvYG z5{k9VD;v%Ba=92FA7S;J*!Vt)DGa4$#}u<8{a!O;q-TT;g07#1MK6ObQ65EijEF6B z(l0M-@J1Li4PvVCqC9Z(?X)oM=Y0h{AZDU))ipvvS?G#~h^3T9wvNTDf*db9)P+T!=J;%2m*0i1M*09D>*-))6b~E#UPm%faMp&+ihQXT&!mU^(1I)p8owHbQy2fJL8e)71)$`) zo41WI&h6L(L_n5cH>vcmygP#?x}QQ!)g!RBhY{mLuEJPAgj|U}FWF468R_PR=z5zd zrcb%h+t(QgOt5+o%M%r39TYsDrWZWBKwW+(P;wP`=m=M*oQChZin3WQj?yivCkLNc zO)2^+%vcV-HU*lVs%KJY({`Gl=vc(>|27Ri4BOzJejt$KfRfK3O0gdW#tN9i{T_at zl0_?Fqbn1*7Lz}?`EYvP(=C5<1nOk1onpQyW3$x{pDB0L&2dRDA9gzQd?}S$UArnz zCC=FB_cs`^?rN+MIget+DI2WR&gxnyzuXHMX^1n3Qx5C!ic~dLWQ72;Lj02E@ACDYFCJCh;Tw%o|GB}>7A>_hbT zJoB(O(Ut`0@b#Qhl|u6US^v46`f6If@jnlZKmlEe3uK%t0fO?uVs=$wgS(z`y4;j* zRXER0%-JuK`{}{wQo0bUCVv}Zg<^og>M3(SK(tVGg|j{05QN4Cujn8RkcMzj&wRry zQ7{dQYx{5-kuDo$y_dqOGbhr|Jb1Ev2JdA9Xz)8<=i1uZ6;Y3rMW?1u*jygU+$o$X z7;2V9aLRsD4ZM&Q2Qj!LC2x?ZMr&@x71?B=$>Wk9K7dAx=l!7zIC$AIw4L@M&@UAo z0S9LMiVu1{J@lB*7Q83?LZK2WMSG!g8DYmOrn)T95)c`d4Sh;x>aDoN>!tMW0xn+1 zLl`(&BH0o2nW4&YI)XVWZ)VMtb@WMCjo6LZcpSVJN;uQZyjf&;0WGXnI^me#zc=_) zh%Xvr3~u{cO<1gv|9Dq&4PcKvCX_}MXa@N+=s#GIYKA+we(BCLWr~& z=vE}iqrp8s7<`bP@J)EE_GlU_b4hHovp7Av)UiO#0WkI7&rVj-cW3aCSS^pCz<5ZI z(I~&JPeBAD5HXF!`+G(@)HG^-7~J(F3H!5JS^4cmSvd$IzlPk-8+cznz6VoT-C=_! zC2b3p*4FuAIBUBCo|`);pj^vfqS)mq{GYXVEzkNd{9pP@Pi)Tfo8=_eZjmBO;dErD z2v!g?I_NSSWq3gR5lQst6`DeGM3jS$(Sq#4c$xGCG@)YF5gl$gcL`qlF<>G|FYi|5 zfp3|s@J`kdAL!pWs?rx<6(zgB*<06!z3)M7f)#(kkPZV$KYma>bmMbS7c9=t$$2Rc z=o-R;)d^!So7sMUQrMSDVGs%a#qm@XXI>a*L?@!^mmmNTpzt|~DttG%ixaQL*V@V+ z{HCnG_VjETvlLE_tMsO{-VQ>-bM`2DP&b}(ChErZltnQ6Ky|9U{r}Z+JBn>uI73$E7(uuLwYSJym z>nmD`k=1mWqOP41iwF{E)g}Lv5xkI~l3ogg z98uc6>$YBr>NKQN*&pLJ{94{JK^eK5wTHPs%PC_##9r>FRb=6cQ5Z zZ^#z+WAY5?Fg66TVg!dhp{Gu!S6?p&&M^BPnucZEP*Z=qltE<&hr-4bAgL*^f|1&9 zJO+g&n^gB$+&1UnmGlUO!#*^f6ULs)QQ0CHSK_yXVdgf?kbOX&T48i5=dU%NOUULr zK4%Z~90mR{L$?NZAN;zBqrF=sg2AGw$WQ#rVa~LD4MCX?!OswYWPfz1#(rA&wfTNT z>CgJ?xqhm|<1==^-#?SCdZwRGk38bPH}RM}bIK!E85P>qL|Uc^0eKK$_z4xuAvg3%2ZtfBou@Pd3V~OiKsr?D z?3=PD%}x4+DG3WaU(UNwUldpnxkKrSFPlC3-CduFig?YBrY^YcmJigY>A7kGa&J1% zdCt!CxrBz1vDj=N2hME7|Jk2)jvuJ%J;h=qp*x>5y(AA9Vv z^kTd^S>Fgpf++zA{I7#ser_hEh5C{NHvEDIPOBWFK?+?~M=hxPD zX~@KO>ANm0jixB2YcDOPiILWLF)cJ8jYit%B)?tN$jZ+*ivLHR%!3YsuZpF%f&7KX z@-hKb-yJi5FYN1UX(hsQ*E2HCqN&-PAY|*C>GNXun%{~I5sQs@M%aGJ!X1XAi~jHX zQf)%bL>40?n)wCT%6#elgH^o$b+5Aq2b`0Y-AWc$)ex*aPa7|si~1xh3NZ}4;T!Xi zY!BrT)s=N-+^;Zlkxm1-(Xb~J92m6nyWK5cH8-)xxkpS!Z+L6V$VCAcdFbuFueRM6 zOVb{V#a7N$O8KdDBACS{2r>JRse(&{$$@N$=aPcPzkBej@*eCf5Fgn{1@#MGVUOCM zQf>&@%E?E3I?uL$B^+IC-miMFRo^!@+A6OMz=OnDHPD)A=D;@>f?68Sf(T&DJ#qXz zb?(965td|%S9A5a?=9YF&=0;hrkuD>rVDJ8G#2X9cg7$=a`wWl$t3+GCAJPr@!sOd zT(OH$-WaJ{TVZO!?_rR_B%_55HMYs8KYDa-!4*R~9j^4BEuyiQkr}!ZTIoq2J$jf0W9^uQu zM>x{&5|ZaX?+-rr?SMYDW@l0-ilIeH)oxoFu#Smu`cYW7{n%p^&K22jSyu|$!pmOX z?)lV!$d95wqbz}~;wmrZ$~GmLvM&_l+mwvN=;zQz&%b88=~yoSRjAF6qN+SQ~gp=4-;S&k8729g|k;pONya4)#=bPD%13PJu__IJou%+L2nN}@Ef44Kjr`5 z@c)e3{@f$=@VQgzul&HHkQ%75g@05Zgy>QP_;*Y0wN=F6=JOu3^kQJTA5D)u>XkpA zg0TJyR+O)L|L?F}6US7+)e`_wP!sd`?oX-(#-EF!Bz4P{vQ-4^WR*a?0cIw_orB&Ml?qW+H3Kx{@aJ<=X7K zbYlpna~X751L+<%V=myGmUX#H(&wuR?(@tcC=z&&e82Mx>A8uQt=~{sOjGQLS1+fj zg=qPb-FqTo*}wNAZ9kpnmUv=+9=)8-FH~1tznCBji$3r;H(!Z~mENXDfkoYXxjmCc zFX!`oovW9*bGjJJuiH=O+obW?xeJDrtzXoo)9@$#V)4}#hZjuOFI!|EE6Kmv)SmX& zofIf0D)u_XcE+}d#gEdVQG-WKTBjElxIT4DSR4pgiFm`8a3XfKU6qwZNoB(xLaU`< z-^?jZ$SXl;c}6to>A`Qj)SJSsD%eoX^pNz(&p11uE{u=;Uyexq1+l0fAB6hjXH;QA z@59-hpl+2CYQM!4`RxTT*1hYtCV{=BrqMiseKHx<$wNxtZ@swWz^QtUN|mabg|1ge zG~KvG6t~J!(rW>pWsCyL8#^yG45c|C&A`%?EA{;*a(_@fWG^(mTl7 z=lPxJAeL*hJi7Zp^!l`bD5yO(!S06cUX-hC@d0fH9K`BQy_I>D5*jtc?E% zbmx7nfXz~BN(&n>mF18O``F!7fmZoaRsK!NFSgUEK4}TPKs*m7Y_)lfrqNNqC)R`V z++l@nv&;bEdw*D%pGh;m89V0(Fm)micaNg;TW?<`;b9?TXVrZ|k^+1ib@RT?4R)Za ze5Nfgd^bku{{N%R_x~NdZSi<4{o;&O%(H3P3mR3iz1$79B=S8wn6G{nhBJd`{MOiJ zX@A2{aO!mWN%R+1wCiB4)D9|FAjaSs--+lG0gyeb4(+H~Tkt2ZtqNM^2Yrcd)xQ5O zNnv`z2D&3>m2wlKaQ@V}`;2W)Y>HJQ&Bu45d|UWU(%#B;kYx*Rw&|ESb+~Ljt%g?; zQtgL_0WuIcbfXSCG<-lydofICT)~yXp8yBLI!MZ8h08t1ur`r3zbK8xUo6W9#2pqN z_LmS9k3sO&+lTryq3DHZ!W zQ%XrjN!AEz!)CqJ_pLD8zgA%Houh9DpSqX^1^O+9=UXy9rAlxL2)8 z6tolgJR%Vb_JXw>#5=GPbmqy4JNS+ECr?oEoplPK ziFE438NWB|9DpnU-CG=Ae>A=7bu>+M2+3n%W|XG~f9sHtk*D%1M1*=&pFf%=7yLJ^ zVrKpKJPo{kj`p9u{`CNpsljl#r|6!{ZsNPK`PRurP@YvV9rm~?-7tmieue0X?+I;G zv?@dn33y5l5lFwi7)y8iyNA2=0&Et4fRA&x3+e<4R+tMPrH`6j)_92i7af`V2_}` zFMudbQa@l3z@YgJy>Kaa_5qO0!=M*9rt_@r@NgCz`RT?OLd_OoE$Z?@;nI=0&T&ko zb}%CIZp8EfIUWRSabp)WY;{%BB(+DKK0iKl$&xX+`TFaW**#I-9s`TTrPhF+G+ll> zE%zeOwG77W#%_evcws!e_hUYW-1xMeI0~KzQrsqrh~tw_SjH!8P@{;$UeZ=v(H~8Jr+g>cE@r!@zH=>fCs5*6oHAmfoss&3a<)svfs!iST zGp`+J#KRpEzVt9GNrGfoynVA+NAfl!s})-`PRRKk=XlHpt%{#+(pJb-HQC2dYmOQnW0bcnv z@86`2vt^S>s*8D;!z#+FUVV>0?j4Ev1ly}Wu_Pg$D4!N_T0E^#;x;8tWh?;ZAB$Wg zcWTSzduCDm+vUP^|1;57z&k*s%jD28I=!>U7)OUkB*dw8nkGPp z>6TAL zPxY?!qR8a&vuD$*9{&b@tp#f#APs2gyiK-d_@}(%MDP-aWeaKlznzj?z^rBP^OABUV@CNwwyMbD=|& z2NHX7y4-`Ia9RG*)a=TFg_q>UlP93CFZ14jRW0{g(|FC=JId6k-~)_i zk&Ok^fQ3$C)ISK5c6v2O8NFJ8sxDXr1tw-2sgnE-x>hv2mZCqX8x=ngpkc=0oRN#?+)&& zaQN=gn}hGlQ%sDrfh;Xn06_qq6xD5Q2FWwl$GfoW$@tTMJk07Z2QveB*ivCU-GnJi zS41zzHtWfMUm(lo=YEzjao0br5UR1@Q^mQ`-C6f>YC9SsAfU9Xt3GL;b?y1U=%FOk zzmWEb9HM}4J6pauO_)@F;L}vnvK(bk=oPgt$cwV(SQ- z5Vtmwd-2Uj(dMo5UN(5@gr;&@t;p00(I9*a-$uxhrn_{p8^_?}&wGZ~<7IqW0<|UP z^E=962j6<6UwGv4$I}ZD{GU{&H?~?b;60oJHkOKjV`PcWexcr`Ql!~Q3RxOd^%GTq z_wSKy8+`2Fc(2}yKM)Vw3Z#_O*hn>)52EUrj z%DvUFe+M_8u2P-)1RTc4%703O_q-N?C`ctVr0JfZi$rLO5*zCuqS7=)D-mzSQt5!f zSsM4?XJ37(GhXdQDlL*-dh`YVX2Z8wwG?{mYpwbl!h|9&MM?T{vc;Iq3X3y-$R?%m zxSt#=_XDYxod5EbCby`=?t(-Q%8RGclb(X7@e)GoL3yG|KT`kRzcAN*By?uI$jr+A z|AnP>L4`Pwsp8OhwAkNgirowJ0g;G0j5fTko<&GrnrZc!SQrpBm7k$brhak9T4M#x z;{ic&%VH19fO4@Mpz;%+JV#>7PGY9lLmi^fv@!a2m>O3~)dK`0#dCT7@iXc1Gsi-h z@FXjNeurH^B)gw@hyjzThy>f-w&h(YgVMaA1VTu#7@}m|Yx~UW9rUwX!|XN5o|_ zkD*@)ElbA(r8hj>@&GCHi9J9b33bPQjErPk4M0-1l!)$l;t+u_c%;$yBLJwrDOzyl zLd5^mzPIRHw^^kVJ-cAK+Of3cIhFrqYut}LKI{)sra;}r$U|jb0w1+?h+e94|k7rR)*=WPdCz)!|>)=$%-(n#MZ}dwb~9t|1ZYV|B$Dl z?<_?*1soKVD$*4o*5fDKWThVQ?qf5ODsGRL`2YuTSZ$h}ipY?wo-3uaO)vI}xOE6z zMnkB`Fq*G1hQPWl8ZF`lo(ENP3O!S4v^QkGLv~l1Szg87$o0V%R!n)(nnC|(sUdzO zWG%K#bEL0_s{gye?+KTD1y8JRN>uz9{HnK4C@GC%gM{_|R66~*pLaA}oc7$y!nUzDeJ#P@e9CH`0EDQm;`h>qEo&AN->S9a~9j!k1>JM8o=%c<@T&3HE8- zIKX6@J;6SFDe|BoD5I^S8a`19Qe;|T70fP1Lubm*(u28q)R)AaMP~g;jp+*RwsR>(j)?|zua}Es)IIpM z*ItW^ny*dMjKWcXS*_Xy{PBYum8JIf0HE?aNAJ)O_tw$-gF>ONfC46#{u2yO3*8&J zqPW4nSW@oac|gmXCMNiyr-qP$s$NPe84>t24c_upF5}+apv9O!Q+y!{FJiR3Vd>8? z1_IopLaU9{0uI>)n?ar~ygE$4Xge zp71Y97Njfj;culveh9a==$q6K)iF*Pd!^y!2>+y!k#ZUKV06D+aUM#~=_{MvKADvan_WX5 z07lt(_1WYtFk`UtW0~9rA$zx3AH&)9u_G!AAY2%R`%luT$30*zG-Ko<&Zh|VFA&mR zi!%sHggWhThOiL6f~=JYOuT|G=Qnsg{o*mdoQ9XS^*%?{R?n6X<7uhP1KpD;jIyZW zy(jcAn|Fvli3`klpS`Vcns0*+{5Bok^B##3KRnoFy)}rDS#&N&v@boZucyCgNX+y? zvE_MW;@uP8Kmn%U;*hDp!i)^a&8dTf-Qm?+)jppqCyqY%o$z8==R~{;({BsY3eeKt zF+_2E zu1~&*fpgMxN?H=s^W7BB?6n9zGu9>&n6MtMc-uDZy5!vHq1<{meXa;^)>t6WUqc3w8_g7;Mrjbp;@hk;tgzasaA)JkldLbDLmxVu=&b zqNT}@E6tMyRa`uP5?TE%PsN3inQ1j?BA!&~SZe8v%lpz&w29HZ4a1v1W0dmb`w{;E z#Ei)qE)e_P7S&qi_tBa8*7*XL>8t^(gD}3kRdE-nUyCB4!^W?l=fCgdfB)X%k^1eV3&n<^htP;WBm`Z2kKo3tUg=0ySw5I);jQq9$Pc^>>>?%T96I`myC_F$VsK$v}!HOuB1(#j_v=33(Tl|Hxu&253@DjBu@U<_i}qA61@V zM0^1`d+Rrtm~W+}=qX0!kf-X|G&}2q)a3Xm><$8Z;fZZvcLzHw-3zzFbr3>1%sZHk zsyr7j7PCb_p9NO+yC>>4Z$%-UlQW*xdnpb_6R#SULv<{A*UCX=XO4Fn3G>4N8@}~i ziMCvUCSO4v(n%q$#qI?ZO?G?*3eF^(%7;>SCKu*SACf(hMQ2^q$e-K6W zt#*knOn}7q04Sc^xQCV*H4td~!I$BZ>f^)uGtRCkL099Ms(7$`&g^(v<}n`>31_iT zP-wB6haQ1_%^z<7x`VU1JPtM?5%ZO>6DQ}=(#-T|kuaWD$YH`lR<Q@TVNzl6sV%AH6wZ%u+Z6?blF-8;@fp1_9bp(~c@AR#Z&FKm z6|vv#G88)@zsqRa=!`bZj6(^wZ@j9*xhcYDWt3h%lyO_gPF8kHvkI!`vC|xTQz;DT zEdrZ>lH*B4zBs?`NdUx*v$4Mt!Se45^pE{~jOX-+PWGLlY(X3%fy?k-OA}k&((nb` zbUJa`Gfk+0xfd@~SU3tYgWf$XLMJ~9@cceLm~TVHCNY(8Hrmo|3N$c?-dUbV4|`TS z@Zs%OJZmDI+R&Ej1H>bTB^SCAD!0pJZaTyGLlId)c<@D8TwFeqq&UQ69t(E(H&qItK0DvVm$~9avHtf4 z-}t=N6l4)?f0~9ls1w~JF{XlNCaXO^@|;R#Du?F|QA!GWZx7x)y5nQm>&Izw?JF-U zVHJ__JB#$XjNsT4Tj_SLuwqFYF)f zT~sxiCBG4Tk1I%zb|lspQdRIMg6v3gYJTYZ7J5&gN6CU031CXW!&;%M{(#`dPIMz5 z+`Le#QV?rJ=!NZ%rV1_mjoo85M26t|g3A=dq*cbpUvxx;BB``JX^jD@R~gwGv3UPj z!s^fa{y)Ij{qc)<#^-l(PTvDg^l_n_iWSk#qr=cq+#1hji4R`tF#5^V{L@ zZG!aNL{3WOh`@*{N;@ba1R{J(b2-;smfS2}eW_}DdWjw;-Qm-*d_Nz&`{(~#>-|Ta z;OECc{O$pKPIBLaU-2}ZT)Y!LOyY^tQz z0wxFPZlK`q-$_Rw`hm!fE6gIlt!!4rfiVHXo?vaEhOS%OlqZgh0ZcpfuvL%T|4z$7 zhj-SPuGG@_c1IWhz%@*O{_NPf2ew^$Q!C;aJu#IA7cpK|5dafD!e(Gs#C^L8~DMld@2w4q@k zm&uXMeKzj>qq6nB3;X#ThPAJ$^_yPv%Ryw2&iE&N_?y4+Ck+1S_PnnmQ1|p$q@GVd zmR=}YA1Wy#MLv+xYx%6;6sIn^r z8sL%+@_u14PV>rId>$00b`QY6j|zEFn! z7P!Uttd)rrO^wV=(%Ni(GTb1Q`{yK?2sKnjXzN}haz+CB(SD`;GYD=U^UkvNztHD=rf!=IIQuNl@ zLb8O?Sf$-An-Y@`;dMTuJo7B>se;E|rXxdyyyxnIDs*7l+S}_^MQ3AxhJpwHc-L2$ zVt54ij=uACNbo505(xw93`hqomkzFvrx#!JM2@FeHEp6ue*TUKyF7E$R)un53uwB> zl@V0_Abf7MzKqBaQJt&2U^#Rw9rmi2AMFgIURJ$nqF8`swMP&(^3?54TC8Z#w0(=o zUYmT~y@xNe`PvH(00D^DY&^e!9`V+wn)MeLz(N?TR6H)(F#y2gLlk7m)sCKSEYOBs zye?>m*;a+h7sYZ@-nTWMma1MhU?hOk-}aRq!yfmjzew49ln($Yg#w>(sFcWMz%W|l zeiZ5tZVY=ol$$=?M`nEwg04z(ti(ehGLT+=WZ9JHK;KM(?yaM5EO~-=t`W+wBb{fd z=|5WzfH#~dfdWs}jzLfG9KI%&{q@Ou*L9g&{a^l;VsC1m{=4Pzr|AX1$vi!Dd3OJ@ zSbCz0BRljDX^WuaR24^5hgy8E0vq!P(@CpDtaF;rzuf=(H29#X7Xbg&xT`>0!CD`T ze5Me2nJksHtmtMXb>ueMaG0~r$y2Q@Yg2X&);-8Q^5`oO z736sjw?sX$8eb{lj>e@@GWHN4;Xv7nkThY}k)2gF^YpVldE&9h&XyV%yvMhNb?u2uv+PYo)%M9z%Emhh4J(26)WafO_n_)p}@nntJ7l@+0q(^-*-#)6?D9sOmk@ z0ZnTyUzTh-SM63X7X?E^^7ypH4oZUX>z2~kyyf^(ias~%Y&~LeDLM8gZ)-7wL2EPI z9!JuAk_nEvpq{L^!7E+sR!#r_lz^!y`+s4fyb+l4bQr27CuEoC zJyc{{VJ;SlWlRZ*+>EP8D3rU$GVk;2fc1*mf+@R~<;6FBc6%#hJS~)in+mamkz*NK zgLPLkl(vNSu;H3ueq`VUD@WLHM8~4JI7@0^>i_^F?xILCW zj6WjJYo!EQA<^8}tV!hF8hqhddi?Q+(o0r{7~|+Tg-yho;BSdTQ7c6WbG89|59MA& z#_cI&Ec%&6xO@TEhBn#)jA|4TR1;_0=(MF*?0O_S%jsHgE{AFWPXM&3HIRFE(E-Jz9095mf zWp#3esIkRBdtA<5#s;Tv&@)xoA7i{K6(W2MJ4aE#K`Cl+SP*(<1oUA3@^%L8^ksGJo_9x*GbSbLu5w^FZ+CC)5dHW=CLA6wNX$BM(( z2F_V_&(bRPc1eA=XU~Iy4pMJE@`)JGh}RPt>%}mOa0B{Q$FnI=tYkHUqWaksWfqVI z^*r*_IzR4V6)x&FExkW0&nBF%EzFx1S_Hq&JWJ3I0w93DWxL5EtX#cppYj=qa#pOq^jqu`#ZIV8U(O59i}3dm|FwWe?2QpEMK z(rgYox|(_T7>c)!J}9~=8*sxm!xoGefFGXzxtC`XqEM$OpA~7*?6`WISI7>pB}frG z39~FpKl-p6v6y;&7phz^!mP}51&mnX=tB9X4{2RWt(gdaGzEfCgZX9o0;52|*HW&k ztR!LJDs}@tAu6-zQHl<*OwFW-Uf+Ks9ewJDMLh80*S0aaE8&2~^P4gz=wK#dv)Yr_ zQ!dZ&v&-Q<*z~+o=C`pQxqEz@e-~%e;ZR=? z3nnp{_A@h{+?g(4Vc^St3k$rXz6;C!sy~K`sPfLnTb3J8B&twmQQaeK?|qaN5zeSb zY{4)+1c=u7&@4_lGy3J=Q@`>kbouS6vdC8POLn5nm0|%l3|_R%HSivSiO14dH^nmj zz7J{N5HOycWI}N?&$??~8Jhw)%GPfzrfBZ&-eCv+Bx3 zNT5npG|PYu4-PrM5{f>9X#80gh+;7oX52PG_AB|xi@K|HP0h_k+&Y7D6~(^o5kYHw0mrKc0lqyVp=CnPd*o5?1((eI2Tal37lAm)d?I0W%}cc9{vqc@H|^<~JQeUgseJod5B zHH=b^Py3R}f}qy$PKu_5&9G3p78mUZFNe7B6`k zXsJS?#xl^!UTt>9V`s~FXAGauMTDf1rT0BV*#F{qPXYWGLiL#qUr_srih;l}cm|oV z?klJoUu9wH66uQ8NKC`o^w`5^tViLDiD=V4{Q>q$ic zu7!64R~iwM0165IyBQ`lgeE6SSzoq~fie?;UR#Uz(PA5Rk@XCMbwDqK#hDun^S*vP zg-)`i?)N8>&J$IEezGs6HjCz7rQqdthQjM?h=d@Z>ZyZ-7KQ1Z1G&d&!FC}m=yT{k zsjLDl5kBWio(unulF9@TC7!l|-rs*Z+}A&a68Lj|Gm^lYl(6S;4dO&0;T}MeU10SC zQqR`ITOi<3Y@o9msx8hc1c9o+xNnvwZv9DL?Axl1_$-D zKa!t^*08|j$YtP{npi?Zwb$S4C}8^EhQgxQ69{;P;=q?~3Ba?qcZ_&bu67jBNKF%9 z5l+p+)CFLuejR?9DGrE5y$GjsP25jDT1Glqrk%Aiw>ul@{y%`|7HsajhyYDs*;EFt z=(jgBt%8khQhKB<(FWNvDzbBhG~Z75ieK@vUv2_Y5%oeNO$bXNwz0)w98vS%3u&Qc z>3Jo+T&^m^6rQ&7#etHOUvK-p(WomGngA0NzM)*_=W-_*x1N-$toc_pYKv`I~^ z?)DVe_Z^jHzffLX?hFockpS6}joSBJvHV4c?Q_~7GA~TG#@p5baHqX|7<39vfwD3t z461Lcio0bG&OC|SL!1Xf`d*q~8^vlHUTs-Da&*GuWqm;3`OerI*wyL`@K{lzir!S~Lz z(4gf|jJ{(wll|n1)oTR(BN}Zj=z4&fcCoBWo>iO7(B$$em)JKk9k!2hLm1K~{E2nN zOM2xr76iLM%vuwhB0}Ip8uoQ1#w^B@++}QSy^EOp#zb<~sp~>MS(bi$cKy`ziNqzG~WW7vtA(>=2+@s4D7O?N(6pnYuMGG#P$6&xlfYP`yK~8n|Cs?sb~#6 zUc~rewo{6~Vx!pZ<(Ai#nI;+*itaOGrEeg6n4DjYmET=EROBy&c3Van%}X&J z&7}R+uvlDi$q>Weki#w3=<|=KlQDOhJFH3YMv$_>8j~N)1c!c!%|jH7Kb$!Ce0stY zcDS=8)~k{pn8G|;+lqUr5ec0o)WE5#CU0rit)rae@^caL{yZ_SWm`*U3!&a9;pu)b z0h0dB}oIUj#9z3hh+v?&IG*;ij6k0YS6v=+`Jq#{rG7kEogoIU4H@(Ro9)^=L$ z&s22mN3>3utu(UA=2NyM#63X4hl);nmXt5#aIH|^i9jjVn+5mWx$@ouzNGMw8Wzfz zys#C<$=w%2+SpuE?BDnQO|Ycj76IP{LJ>GDLc&ApL(e7ggBvt`>PHrAf)Kl!uKm0G z9<~4Uo_1kah6sErL2}CO)BIIzgbX-=&w@a{O|*@si+2Z~4ZaFV;p_p$gSyDw_)=8l z)a$=Tk@+WOrOc6zs$qAOMrGQ#2zQQA8bnLAOI`(oo6j-82l2pyj^Acq<=@&~W8*Q0v1l@KrL1JA3ZPx#!Rey|<>$?wS?T>2nUR=*x*0ls)Q5Oz;OoOaT0 zfdQ#vF8^JT)+s&@F208bcrVi6oIYuROQ+27l1vzW3ml zgQ!BEV#Siuiq%X2XVEY8eRP>-BI-(CPY*rhC4;|OLE}%Q)4>s=)eHprf44n71lN7f zF8;oqeEhQ?Cu5{3@$&F%(F1w#&p-p2%7buQTTpg+G3*E#t8j=9rXGV8wH3fle#-S3 zo_3mt)}9W!6kJ?!mT0ddG*2d}^a4?zvWe?np8`cl{0}OBsWSyiU?gIcr%pUZP}L{= zkK)ekIj-YM^Z&%e#Kgorq$$}XZHaoAr;%;BfkKfi5}Qx}7t69;*oy>E5DFj(N}O=F z+cD7-F+J|5>43=&xYs@UT;Sv1JEGx*@AdFyP9qK;{^NC@OjA?-8|ZsvbRc5ehzJwQ zQh-F9r!;W)Z)E+K)m%iSgi|!S`-}8TKU0rwL}4HCbpMjTauq7T0mz&$T)E* znzH<;(8F&@5%tCl1MYs$za&_IJrL2!oY5H;kSfEc@MEhe6$M}S`pcX}#oK_1j)hb$ zf3^3tjYpQuT`l#Y}cy;s#tbB5rDrP*9KVmpetMTICK;;Q2H5I5_)v75h@ zMP}zd(k#F`N{;O{?-lXemNWz==6?;c0@hxTdxj(}_A4RE^s4pb!A8C&28yjcWF9?&#L zDcESW#xkQJ0R8cY$jsmRFXK6T?9cp@{?zBb9f;@$IwLYcVY~_6UeOz6qF(V(xA4Yu zJ$o!H@!ywI1Ys5Z@_t$Z1mnNa7W4j_1Y#TwIDc`F$vBU_%B=R{6P4bb@H&g4GgWIz zBw1R{i>t0ok0>u7NCQPU{1s&usen5Ot*>Yni(s*Wm~?I3Puv%uml#Px&x^IbCgPdj z8whljcc&*0Rozt5Di>nIqgBxs`inDi>lSDtq`0I9c%hrlfyyeihYg3Wt7(LMIuG*l~0(CO$Yp>SbLefCUVLIMF$gY zM-qAv$CgXM$rntK%JwgmkItv|6>DOF;kFLK!jA@k{#^buv~Ye&&O#hQ@d&g{M+r|B z2Wy;f=08Vo{k!pIZ13Cv*1D6^u$Ra}DV*jjWGt^z$(9JjgkEg)j-YVVcO`xOYWn6^ zR<(SE)(~RK4FT-!;0CN8?3DF9c=(&6FHob`2RAGKW<5vBSZa?A_y1sDE)U%m38`%Y zZ;VuQKI27(-dgk-;dJH<&Bw4oz5EmDY}GCq>tM^K-*v_S>_>twBL=X6pFM*?o zS<$bWg1-LEuSdXna08A$En|NC7wI|w2G|xgO@J6JB4jGLWR+e4H$=*yF7j2#A)dHJnuli&g#Rg^*u#XZ5S|G zi)9(GPsv~wAyl+E#|b?ih-iQ2>#wfesI5p5qy%i?@&e8L&iO(a=HETm=tn}Bq`N%) z%rq?Dy^KagU29v%8kKx9R={Xm=$(+gSIa(BG#VgHvs%7Nmp>($rwSF$zdSc_7&qB4+EAB?Tk@*K*u#2$ItzDysaX3!~sd z+u%edRBjc$zTDFu)7cjwV3D(xDlWE2VF`Ua!XQ$^-aq(@Gh#WH`%T=p+zCS#gphv- z!ehX>CFiL$J)r!amxT&a3SIgflaU=`%2?pvAC0(w+EHci0KM4UBlB5~8b@T^f zYi|?ci*#q7Er5tvZ1^sv&teM8P%l3QNB~Un2dK&3fsGuZWsCGlq&bxZ<2=Qqo^k3v zC_oIIAo^)X+7+Xzn51f^U_K%%TpA~`0SO&cLNQFY%cn{TR9FO(+3R7R2}D6FBo9aW zfn29|R|XC#@93dxLA*VYY4-VIf4_w4F|5=~(~{h9eEZs10AHu0FFpMbhU;{WFSEQv z0Q@Pw)=9kWiYmK>*v)!i%>R2B&?RA`cD~tot}*4kh7>VJqaqlW{^xMgjz?3z{pwpzTqS9Ep+lG&y#R`Q@9fX|(ohvWVnY5iUY4yOnc1OWLJYwX zB^1~uNRoyFyo9oLWB=c44Jbn_Zjl4Rlq6&bH}77A^;WUcmi?TaZHhYx50xkwWzuuL z+C#d9!;N{O?8(wa-DB=zXs8RzI8zzRF8cWo9`bLxSH_!fjeg%K1yTdP0A{qb6;V#| z{~p@>uaU8p&E+>dSij^>>=-}9V3cpIoSg5RXEucsH>F;hQ@YqJuA?2rz|B` zm4oKPorX(A8(??6UlFV*Llj4g8jZrNl}QhLHB{%zWF7hug4+{NN?07PyLIO+2H6>>5i=dI zI^lTvC*~@;8HZq{f?9*uNp$aFqzinwH7K(-!xFNo=;`FA78T%+{9;*QbgxV<2Am+k z7jem^{!frFUl9%ljx}mysQL2Kv6^9;%`eOmiCOfPT;=#HhC4=$@G)NKoPy33s6gDS zH!0~-%$cHiKPZG)^r+w8#HI50vYnl~0fV-!2!;t=Wn+I_BCT-g>%8%`2YVv@GNA4= zsTGCl&!%-x3T5Mk;zL$)_;Es~WSRy)AEtC#slreMCjyc|tOZCh=Pze?mQ{pdq-`QEpjX^(Wz zfxTm9zT-$r4quK7AJ$g=ZXw9idFPP$Aax!LAA#B9(Cdjb8C&3CxBQ7E7@*SYu=neOXYEl&U66-Jcp)lUBPo z`f~KCr-@YF*<0z7Mdz%1*g&sCW)Q(&{-%%^ZbmQM?X+DnORD^)@{DpU&7zCfRLnJE z+7bQXtfWo>h^N1K`ib=TDGx25(uE3{YQ58^%EKeZi@hqZ2V8x>a;N0n_g2CJ5jk;o zf6F?u9^}@;eWVdbq}j54b&R=GZ65o3Rh))tl6ziKVf`J!G-~kG#1!qOyNiA{((4ly z)dLlJX+2hpJxooC&M`kdPCg_tpW36HBg)a7bKbn+SS~E4X+IWK(`@D4K}f~HZRJ_l z=6#^tRpYjQ)<4HuZ293@xak!-)?vOX6hoN(V_Grzd9*i1paiu=zQ}s@53zg15kcrt z&3fWkJPj@5N#AV(rrBWBxI!Ieft+KS-pHM5F#+h?5GfpKtJtHAmTBU+Bb#V&$&eq!2kq<$<|`Mzuxw0@nc zpTg^^9;hXZE+L~+7Pnu&<}OcD;K!m;RFpay`z*>V6~0!l3uLomv;h(CFmI}jZC?>lftdiDi2 zJXTh`o=Uu2kERzYw6E(>~j0^;bv66&OX zE992%tQdw(h!tnj?OORo^ z_JXUnCIVL2WoS3g305u0n4+b?pA0e~slOm+rj8)a!yg~l#aIe^5Ht`Vg{XfQ-kIoM znOoU^*Q<@Qt8LAu(P36C0fs}O&W#=Ga~kbSWkw{q1R_si%CKj)2(kIs0Y3{9 z&SqWJd~Ts9r@sz}%#lbG{K8`<{6J!VZL5ew@Hc{thSlHmw4w7S4s#s|1p9my9Jt&M z!5c$CF3Ps*JTAXbjESWv0<3DI>g5eIZYW@h4J;Ye$1P<=d`ES#f)z7dF@kYIRkCnC zkCsS80vupDhx?YkRnouse}sJE*rYHA2Q63o_}Is|*HJvoRLH&|@qKh;4K!?)w>oaG zF)2Gg^iF;Rl5^ap{Z{z5{@B7VPQ9?p06jqmaEtl#!=R;=Zdd-?Q#u3QlIGmyL2_HQ zra;P9UEWAH!*r%*DQ1{J-S)9CXi$$@^%HQEbCI&-hM_VsHJ2`1OwQA}a-r=>BQ?MP zOYCfmmLhl+#$pLNvUiY?0C|&E;&UK4)CYdlv*4`9R;Qm0tMpCLXA6AwwWYV6q8^)u zG67^)sj43zHMX%$@@*IBRKCey$#@0{#*8*2gi|xe;Q-W2GFf2dmJOE{KFy0cK=XH26w1T z(E{KA&xgXZr~Lw5-ia8>Hm82TY?N)xsZL*1wjyaUi#lxMgkVK<&%=8{GP-swH1WIF z$#IPC`;L2Sm_)t^ZekP4tt%K$ogfij=&lIKu;RV76$>Ten_*TZFp^aOgSClENFaki zOt~kkrLkUQwS92R$;#F*agN3=amJRf$lpD{$kiRLQGRE%V{Ury>`-WR9Zc}PmQE!l4H3b(DUK8 z6HYJQbL3UjjH!w>a$-euwp_$hVU!j)EadH2R57qH6?fL6N1lQXp;q;x6TY=v!?jJq zV}5+a7&ylxxKHgxm{4sD^Eux|#`m=N^DRczSr?k61Mf&{acQAl#6S^o9WsprasV3iz{#(;;rNQT&o-k@KF($ z2YXwAIXgesT}-nRGodH(bBIOJnZ#Gh-aEKz7218}CmNy^hD6UwK`;}sx81vq!G>Y8 zvp+ZnJ^7mPNuJ@V(jP!eeRY_FvW=f($bLEycBE7~Q=VjTO!Rg!DXJNS@#$xt%_*eD zqDC9&mFxasfpzdjF^6_I@I$;PJy=~0F(!2uU?F;T6#JDVYmE~kWV%r$s}zNpwffM} zexp~3MnlG+w5}JDjMT)zEzTkjcNc5E(tZ~>?LEgTENd7*{p)OH$;$+t!V#kV}VS8gK$Me1;BPNgm` z1}BhzG%gP)N9MRHk%VH{Wl#uYV&;B$h4d?8Fa#QXnbwsn>Fo5xxhm;P2JL#%!DKlC zPy?%niEzm+SupIxEkoiv7@tDcJlP6~a zI*e|)huVVrNxGzX2(^UY*;+_f=Zg+i0T*W6S``~Y2hpkC%O*e;2b9=5iiSf`pz#q> z@9mdfN-tSLFdB0_uJV576*q`zq4G(MeHi;6j)K8%75Bajv}VzJkHz2@n7UQCZMui8 z7f@fQm~qMCRBX?Z-)PUoq1>S8RXIV|hb98?x#eO7rnfOWUr zbyGijx!?(;%LF1T0mB=u%u^8mTXbzIG#Mx-5*)PwLqY2n_2hnRJaB$AMV8!S<=lBC z9ev=5p4cKmB?ziD&{C!axt)zfiv}e_6VcHP_ywma3?I3& zE>&~At)lD@me$-%nwjvVfXX2)9UmSFJssOcmi7+DFfkm=mGZ!?F;P-6S!OBnUE2mK z=Ic)2qC(m1ms3mgX}Z%YgPu}zN>!43Ioj&R!(NLsV?_g_ZTZTk<9%6A)a)6a?kT?4 z55;wkBR?wNyXofIS{P-(uJ^+*WL~#Qgt5u#zlAUPYIQQ?&Wbq$?I#hyj_`|@hf?hq z@A$E##>m&HK*gWSExhe#>?7u?Qw||TJ;<@IYAol?7VNJj@T#~V%C0#>7v0FcG%hv91Xeq=Rj%!Y6}(AxDrsAK;ppvh4{T+mJVd+ z>vSRn!HLsTq*Rq6^x`C@=&!OEGw^q8gPK=v#On8%^wd)h1Dq?#?epnk`>Nlk|Ff5r zY(P;|xj$*Rq1$QFa)K&U9gTkkf@1Wst}<)E`c7@oM?SKS%3@SH;hE&{$b`t3*1k+dg@ftq|;0s2nywt7= zXo0j$M~y=>9lqBXG}(3~F~7v?J>&7o9+LrAoyAM36zk+GB z!qfjDHnCnoak`(&34)EmPl(VEJwOqLZ#>gY3m#8Tj8`fr^&1O>cNKuDpja1FzpqMg z3yuawa3ufd<+95O90InN`&_1r&}3m}>ru>6r$zyrsy%go#jDf8{*#OA!N_PABSGPT zcrf}*2|oJE9erNkndCj{u%z1;7ba%A^KhLnI@p2a{#NV)ie-#-m+QY!MrVV=QS7)C zTh|MgIgF$*+p(b|A^@(u#A=Q}VTjG-UC#j4+zk>AUcYRPV_lJ9^)u&2syIfT-29qY zK&Aw;NJ-EZea%j8^b!6Ygp_AaL{A#=ve9z`nJfap^5uZKuf~R*@A_(c6wnWA+mPJK zdKnxB3!a=OVRe|Q;QlVS7PvdsT+#W|pZN#n{>T3*Lv0FE)*?56bLwP~2TYpy_vz?; z4-ps(QH!K%qY-(;*Ab(NhhzuF(MXU^OP+k@%%%8q zF$F)wl~g5SaCUm%^`6XqbN&om&Z{xH`(lc!7o9l-#y<8Apd+C6XF27RVkvcIf+4qN z9JwbZj>RJIG~O@PtY-|VBXnMo1y0Its{C;B?5F{lqy38prWFQYV;nKAmo3OS+Y2xq zRqEg99H*hOzX5oMoO!Nk^EV`w#{#iWq+q)sgb51ooVBoeM3mA&3?L5Ley~_g3!R(! zZ#_@!?tIXeIsGlb%08r&gmB218b55x&Vj8|H=wmmMNS5xt>Q zX9=H5c>fp)h{`bsPXU3+!L2ea+@Q(p`l>RN=+D_-E^1AfwFH|4u8B|j+=v0HgFXc& zby`yH$N4oavIn&+;*b7zxZ;ztjShC>9PiRoQodKgT4IBKB9aDp8;)MHf`PyQ^9Ulf zP`}&aJ>^RN27wB2A6xVbx6a#vU$i_{^Zl#-`jC!(>VHx9IBRW+=~n;(Kl~JqPfKAi3JI zs!2`5!)^X+%|xJ&ZkvI6Bq_&(gl1z3=9t9z^4nq}{>&nPYmWI!A~b^Aik&u87|7%Q z8J`J?wL^mNulDcsuXpL@%AQX$lpQz}i$LM^=*ZgB!4+jZ9As2?)3)w6M_;k#uI1Jl zGDH)mgKH0e{mrkz?tJrWOR-Meo=@CM=>_k7xEuDCktNG2`!lRB%Fug3XM%CR7vYA{ zU9}#uNGK7t6~#O}0+JgRR1ce}&@-LG){21K0CY0}7w;YM5l2fkk9(DtPLaPHK*8h> zPJ&lG>8Bt%v?`7yk4OdHr4>)pvV%yrr3wLJPs36hQZ+#}sXL!kqWSPm`ej(5QbW;X z?(@0pQS+;vVJ_1|sLOa36e8I5cqYN0wkoq}f;rjWWWV-T$`;POzW{GkTNtz#=OdH5+CuY%R$%zM-o7HNjg0h+d-TX=V46KwWg5Cfu;#Js zEh@7qjPw=Cdo3p9K*HtOP22H4d8k-~n{dVN$N7q#+!{mQi?D;cRT~ZuMy+GE!pK>` zlAh~!s6vesfv7R|0Ty`p%o+0{i>h4Xsqd%1uok?`G^cU1EPq4WBwhD6NV|ywj?-g3 zWPMj=yVr6^_Z21Kz2G=FO8A1G<&Uy@2fM^aF%c`@PMxiC2;0i7HFt^Wz8Y5|tHwpB z#wFbBnVrIjD}T>-yr2*7!Mf%bBrTqN##bFXIiA%4le)LNS++58__R>IJe|(XPy0gJ ztWzFO-he!LMge)YM2b`*W7&t+RQYg%7QwJ;KLYQ`^$3y(Sy+N7 z@=mAL6j!l8=H4a8D(Fz|A+B2O&bjjuaLz?1YS`rkC92Jcy!{iR>@U-H4eVL z$R!RBNr&tfntD)FY`ueEC24Yt{d^F5!Ew|H6UDBZ?_~nQ5g7y4L<<6u}S~E0ELK;#;a-5i)+q<9F_2Q(Tu@FrsAtQ zQnj38300JUhGGy`vKapLHfD5r1Xdj_ssr8vI>p?V$YM5t)NHXV2H6!V+s8mpt5#S> zq5fE53&xHVu0SMhzVQk&;0+^on~-TLGFBDQuHd~7C?grYX~pE;Mjm!afTWko%Iywl zr{Jd;BH*i0otU%Q)b&Y4_y-T)6R0m}=D?F@kGYzC7`akC7|`+j#4_-_2Sw75XWrR3z_<1dvu+_-=2;!x z1+Da?7m>TtDtG@38ZN>F3T}98 z=pE$&o8W@qq^4ynYb99?{w+n{g33(xHoVN;7dZ9y*N$yaf{23nf0_5`xKPPM+$4ZO zVJeZT!cU6U$!Rmip&$5xK|$o6y0=RviMaVV3^8;DB}c9W$;*U#7JOj2gpkUt@_AA%o=W=MJ-T???9;H zso8DXj#^YuWx@M~is5`DH1F>cZt39xdi2V`z4TIf1){2=XzHe!P;4FW9li(}SQbcm zxd8!jtdyX5ln(i#omJu;5z|)iN=sXQd!8(O%c6Kd^eYa$Y%AtagXI{}#(7-{=SG1p zAz2_b$T3$>7$ywJ!AY8~#4s8U&?^(V53G#uy#cjAn5{e_$lcI^c-f#0N;SOKbzujdv~A&w>=s3MXs@#W-^`9N#NKrSqI-;y5A9S0-xyVrq3g z6Fd$M+(BK3s!k}#&f5T_KJ#;S4>7=0WmaXiR&c&?;_bDf48ciaL~X*b5gKC+C+IUY zuP0}UX%7q@>`+sdHoU<&@P3-Y=X*ZgiI5tx!w?P~YdeDo=^ZGfdWb;!a-3tgeNU;$ zN>cHaqKG$JSmLx3+Qi@I3@ybOtQdxbg;0}uy=UdoP(=Tcf`c+A*imVl%=La{X?K-z z$>m$`cRr(@>mgTnkhnUe6+T8bi4C%PF8BQ4|lqfJuSy2nP3zZd?BDv`hzk%P{ z*{HAxQdX}@VR;Z=`ML2BPIy%=v1}0+B4$aaR*wS0)gOKL$9{!|p$(>Ajo=Jf3P43p zqD<`V^Tv3IbsHcgzUYmgA}M6yk$Z*0!3-%FBCrt+|G#4h$`G&h3Q0p4&ZU z>~Ip$nzV0qashi>k8@B36V#>R3xVgr>#5GubaKL2N zSHgzLI4wCFMQ`pu%mCL>1&o7W+95_ zs4oyzQ)PA0=P;u0V3A~wc;hSaE2H@*&Ow5D82!`vLHtbOzUl8;8;seb3IjdIAFUy8 zdJx$`WD8lns4l*@+k;X()=>6RyFf#8aN)=fFj(O>cRu2$4xoP?G3!cdEl3!&<3Y|)*{p}^W$ z4Vp#eow2RM8b5V%Oxz89caa=ij&)BhDJpF7(D!@LhOj5h9;NxtGbY``)K$o^iTd82jsYka-!X z!aQ8+$#}$>B{?g00Z+hF>Es#zO`EwXPc=5cmHrJ6LK6vxuRqMFfXay5q`oTv?q}ar z&vf~~euE8&Wk~kV7cBnylTW4}cq0kJO&copFpuTH4I6=qz7JdTJAU*0;^KUyI(?6f z+!eyu!<$tlqSXi5hh(={7|Iz5z=|Qs^eErb7F9CYS6!qoCbQ@H-mEIAXS^)9F7MBe zmGV>H{}=iP4gvjB?S==64gM!9|GQL45F@AaFwv}x7i`#S*m2ebKS@EMcpeCvXXAXD zI@|SkE|$~^$bkqMR6ln1&165)VQuH{WE2HV^IltPcQ z6aoiyNnl6%ccO=UK}`3ARaA~WMVfJL!)QZ5ulE9gHkEzNzzq`8isLimMMe*XF=KVH z??3+U^pF3ECouOp_6Ey@9jgN$2cqA@?9gi;5b2 zLe*hNcs++K3f1<#)n1WswCRJS)_~Cv=Oz~505XQ&Z5)@%;#R@cxcH!{M)=qxXB*Z1 zAr*Pp<1`FAQf^^7`lyV8laJ6`WwHO@q^IP(a-Y^1l+P^q@1=D1tpDajitlWv^@t9F zUklYkez2>s3r`mYDEv zSmqf(t^*(Wwc-$5ym8b04gqt{ad7>W$x1?+?mu(ueo|nOlfG+C0b@aU+TpQFnErk_ zohh}e061*{Rt8adah5jK5ZGG1YKSm!GcocZiaf|VWl+a7-bzoPmLf(onSXe@w;NLU z0NSk2(7^8~2tcXf(PyCyDF|1HL2~*#FY@I+Hk}_UH{*Rbg`M)F^q9YQE=_c1{d`D9 z-R72;ySq_156kTp|7s>Jbr(H>Kzd^dC^pz59Lm#_a#?NOKjMrxd)il&^Y38fwmI_5yOJQZm)$4@jMp4t; zCKi}5d$a1)nT9~*&}dFxj6P9>bo8bN@8d~+V#)N4?%p1x<-Oi*uiG?Dj6H{!bSit- zzz?zw$8=RRiAEdtis3~kt@4jA0Zy+pxdvcC8IwOE5OC&kPrE$GQY4%WC~9JgUf}Ml zgTbzqbWX;~3{n9rsT^M>=X5F)db;-4gsbXr^j9r}wWXuKqx7Qrz}FP{?;v=yGM(2x z=9VXn6D~*-iO%&xspPC2?Uu{_WkKMI!H{rHVo`x|LgT&M!9wv%uU1#x1E@P-USK}8 z6(*mVc~a8KZ&97Z_n3vM8Jl`c?#GXskoEtp3)rYv{U|=Qt z$ZS}4A#eA2A~2t-a#r`w&yL>p71t=d8_+BbEf*qtk2nCp2AY(PKKI(5h5ez^g`wVW z5bi+ESz6D9B2_D!gfmr25wW0_FbJh+b(0Vo?$Um!gN;4t3I-l1X~+hKURd)z858rJ z7OoZJv@CZyvvxKnMc6A=2n#Iz_f7`52*}))!nGCy=5yD7-v;1Oq=$rG6*cc6wYSUO zp9W44(ktA>K~?HPlN?$R_{&XW&#F&TXKy=>#obL3gM=}w-9iKov$WkruDnET zW#Va-3O>}6-?#&aoYfiec!^hKUuK-tRyYvzB8d^C)t68N6(mD37-gn#h8F1G-6Q<( z$kVfzmHze5sRq$OxEyF$mcMrkgLkkUYyUzMcqyl6L&k>(7sHaDL|XEo=u7co19ZAe zs60X!x;D-SYwty-=-~K^(A#PecB3C~xhuu}htemipbpau7I(#h6cWn$U&{lri(c)` z^$0lCc!A|(5qIlOB#~Al6WMMwcfjRkK7(LjZN0Bi!W6=c7V_Z4ZSWHNu~G9`O0W zM)}eB_Eyv1um~4r&YWWM$*mxSH~;3Z6w-Wq^u^JGzZiQo{e2(>(?vOwvKO z-S)GL?mqp4^t8WDV(@s$@h)9ih9$S132itIIoVKb?IS-A?WDehC&?S00?_Gb zcm$X1%I2^iq-(0!V>*ywFAB4PiY{~;&?21vp6?}Dgi5s-La5zAQjwc)F6{-x(CR_; zy#BGMHu7%pt(M1y2>vq8eB?$@4QmYM8aYDCAUqsTORfs94=5oUi3*&-fV+{hs z+#va_L{uXLembNcPjsk0~415{0_huq3LsWh{TRC6?E-o#4hLD>*?5?l_NHwu%1G7R|8puV(`+c#A+0-f%Bsdl8 zOLd^fE%Oenh#wqLs#@I;NWgh1r0FV`nh(an73C0#PJ2ODwgvzh@`C#AGjrr2BYO|m zN#=F6FyXImDlrO>p%jl`e84EmTpkvf$lS7`^4V1wlMmqXc+qq2f`Kju?~2}`oFiTG z$as!MAlCxf(xnc(VFXjmd%u{`EM#L>TMt}7lZ1rg>XNpf7+z3u} z@662USh5n5%SK-cXk%Hpb0$6hqF;2?@!hwUL3*uM9%y<_1vIwlD2~ABxCs^9_L{+7 zx5ob@|6$irAv7Ua6-{|P|NE_?4KV=6UA8_Lz2UK+S3zt$G{xfB#i)KIe>rC=-Yvd} z%;jM=n*Btr8URixZ2uUpnmGOKg8a+}a2yTF!K>Sjr-R4~H6$LN&g1P~7{sDU=iKyn zC2;dIo+@Ft&eG?kMwuc^*nl1-A-TGe4bj>0lP`rzT7&-_$JbAEzD{5iulZq)xSV} zONEVxOJNn|0zS>4x`ss!zH84WKmC*{4Bvnl3INDqm?Y>-BET?2R4tVSa@c0K+-Hr# z#iBQ9_ndbbLBsIw*>HrL!&Xd-hqND?G*#|Jb(Z{ z5KAACp-pf+9`3_e(&%>{#n{yFXBavunq475r8H0F>VP@J8)RtLg0?v)|8nHe(;#SC zsam4s8}n%xAOc}bMW_$pI?i-?eE8qK07nD6-{YcgrXO*1;RR^-DMi*htX4BuBKe8siYxR`zI`q&%(pzt%)EBZ_Ob_e@^t#8zpLy6 zxAt=PaJ%Eh=aWA4S?YOdID_q6nUz97R-ro06)Df$Fu;OxPFUQg z>80Ny)q)7-^qk*P#$-%(?OAJ3*WwlU!_minu*vBZN^b_|qz)W5mV87KQ5cjKk$SCH zcig++t4glE`OCN?wapo*q8L$8EWlkjg?m*EOW{%U0-Z%W;zq~%u@-FizJ(ciKab7A zp)4;aO@sLJljlMksu=K23&>_1^cDbq%weZKM}IeZ&&vYu?U~0dmFElg08IEzzkNLg z{gdjj-mz+6%{bZos$qHab4ZJ99qw-WKBK#!Gc9H~X-&k>t8wbZ^kUiA2>1|7u!5R2 zDf%@+bGS18qT$@)$+5wKYmn;{rLIu~9!db*Tl?Q()i+_= zf2)80Kfn#1(%E*Jn=MbYMS2X^D9WFxlIy0k-n^KHWHUmh(R_f!@q9g*s7@Rt>_BGF z5~TzYkvk?dN++>dUdBe{-=1;Q4^hwlvOLc2TBJ~r#n;KY^iB5x4p&AyZEbg$ zyl;M8R&5IIg}uYwP^9y7)9GrNt>EXa(ccO^lgM~`^!4a752CkBVJ-rA z%LhXZIyfhZJ}r~mz8>fGne?+0-tQL+8lUvkUKlSbvi5fl(`5PcT6rGqn61M~3W?QH znCM+5`((O3vEWh59+{Z-D`NKuM$9v-z&piFp>;2{ZI#5zjis$SceC(Jz&|?c7j#}N z88QMfEut|V=sCot1sh?VKT(S$tfOAZ!Df&(;TZ*G!hW6VkO^7n1UiK51SA8)h<^i% zgq$;S=rovKT0Cj3@<*E?dozKVlGOkDZ98Lbd-z0>J%#OQ|dP=dF z!Btuu*{js`cRdLtm$A+j|0YW+B}c> zk?(S~+@+lVnm<|te}~tZ9PD*3SPPg))?9$k zSux!1UhGSzrtD}DK;60HpF(5#lQW(aDUvQAlkM=GCA5<*PqPq7Y}x_!%e4bWfqvj< zb8+?3?ke9XGf&{M7lohM#*Q2O_+&a+y0B+Xm&!!rR-h##Hak0!!d04@ELk*V=Q9O` zm!6Q{F;G5m$$ixpc~L?7HRN$s)c{)i*Hy%MhSMO;P_OOBP}U1q*ISgC!F3{ORNlZj z!$XKS@V(g^g!X(_WNs+Pmcj{-VBBE1Z18Pz&px)w_zq-iFWxy3!Q%M|P)6-CAT0J# z0OESP*oa%zLiBKZ4?$l=+JmaJS@LH)Q8Um=Q3wF-O~ebSI4gR@X}y+roO>Z(Mg>Mc&EpH4sX z<5Bq!e0{JZ&`cbJqGreqv^)$FS~KtKr#8eBGm8*6kX@H4|2`EF^`M9~ffS(R1ar_xuH zx+@hFQsFy_8jgb;!3oL|uMYZ^iKuQQu8oKs^VcNoBPv5PjQ(v;3~nk{ zuU{R+Vq~NnQMVO;oC7wu{9rn5WMBI=(J4e^5XPu-NU)`D1m{4Li-&s@utBH`)q<82 z4`rNUWEXUGw%gzk)mWTpZqyrWgLPuX8%W;tlr3E$K%OeA9>jb3~)IzpXbV zStJ1w6!rCjx4W$gicT$^fHAK3j-`xMe(c4ZRh+^LsEC(8SY)fm%HjY-6cng^#mA@g z4?fO^zPT0<_9MSk&Ty}=Tni6hZTv}25r1=)nj09MJlpay*CJbn{e&(_s*5Pg3)B#s zth~Oifkea=>N~cmua3ztas|&Wfy}`M$8~m|%N4pPt@U+Vcz^+!RYD{U8J)GpqWG%N zG6;=-+3Q2wgd*`eHN}izfBiH)k_rc*rB*4ONmE4eJi7eGPN3IT29P-R<7$Zfjm$$` zEf6B+B0%pQ-6P=tX6|ARd^TeE zE7wT8jDp#JfH4Jkh((AVCE*b$^MErrAy@yoczS3ITiV%kt5 z#%|IZS_-gceh&FHV`o$d5dudQBNlp7+sRKuAxP z5h}M;iktC;xr2HynJAHf*-owi#3w#fCxeaJ27z`U+u)_rDLYF?QB1Q)d8jweB&9sk z?d^tC$7@d{`pc%v`5Q+c7bBPrBEqzc;r7$8ICC@fx>_`b7iJ$|kQ7ApnM6quWroY9J4PeE8AN`G$ zp29=p7v6wvAlpz*AYvg}YU@Sx{{gM!@0SIf>U0IT?97z67Ta>cAf$_*zIpV%5@c_Y zbcvRSM3V$C_p&Wek?y}?&kq|4M=~q`(RvX6sr{TDjcKh=a=%-@P_52SJ=0CXI!xjP zw&@}0s!*>aJ#b;;KC*KtKrHUL=%Ej`eQ8dsLfR{&LLqd?4h|7bKglxOA=XDoHrjl3BY%ey;e?CMS;f&ML3d6Q=GFjDJhu2 zn*^$McNw&W4Ioh3r9@!wu&68|U@FZ*WetLxydbofVC^6P;R|bI*}(Xf25n%6`UE8M%15|mobOiF`?Z|zej~qwU#GX; z^-_F4J@Y+3Gf|ddf2$PQgLx0HcN%XDV0au3&!-co&-fx74$FRuRa6DLqNWLCgAq`& zZj)uoN!766dO8T(Y{K>iHvV;QOdSh#MIa|{%kqz*&UrIrZoGKHM1^Cc@-WPJPkq)I zivf;8U$ifJxboKJ1;h!f3>H}SJ&yMB9Zq5PYlSy_mOO9S4>Txic_4K7>Bp_;8≫ zgN3;t!#l603qO%Fz2AH5Q9-aERVF>2Io#kqxz#Hm17IIeV; z+T^4AE@%TgMpy_f6>JW1IV6GLah8q`r~ibR_^8?V4|}b;!>68#qw_8y9yJ^xH?P3Y zZ;#&Rjkhd!3vFOaolEUd;_9p-8oAe%tjsSg6uU#3(S;CRBF++)D!oo4ptB|Ghv}FdsaU`Qj+M z9UNLRiz{ygnX4hwQ*4@uHe+Yvb#g#>L34JtxCEpaB0s0@61T&W0tY%Mp)u53%MZ8% zotLMBX4>RS3dwfvY?C>iLWY^4(70r|e&%h;!RuB@<&Y*+3%P<~7#=WQJzO49eUEhr z+kyPc2pg!N5S7IS_NAM$6DBGp)gYZdziL^goa?h z-zGtLr`$)8rdluJ>runu&tXlYZnTaaAz^q&01NAy4v_e_=DJ=f(NV@xijpPeMFUU| z8PpGwMDDY^!OEhYS!li-pyet9grU$;9y+w*y`#5}I3`g(qc?q5=mS4VN1yl# z=cm&JPYM7n_*%f1mJ}P7I%W%9%A8Kam5??YcZ(WR*m|1#xz__8r_Hlg3g*fSX)fpG z=HdVF&1dNBub>3~PB3g6TYfy!@P`Tqi#86naC=Lxe-8CkxfWb!?*@vbT9Rjy5*YyA zi@0&)#*Mqj)vNoP|H?!D_jl*b|7iZ-gLIINlHP@6)IQ86X?rpqrT->=`?u!5k07bK$ZLz+2g$w>vH|yUTj?!*+ck#P^*6H-p#qY+` zu02QUDc?;xSN?5|V3RV^@ox?eZyAw|uLz24Jf5cE%gL3$!xzgh+j+l_b-MEJKk#)g zAEzCE`1W*oG69;ck^_^pxomH&Vp^V@tn8BU|8)BbRBa%=vrpfk9#$*ooj zqKr^7|GGau@Zsj)B|!7Yu`o)9dBBU&kNtg>jU_f7X5$ey9%bV(HkR2~VPlnzb8I}$ z#(6fLVB-QC7uonDHvTI%{+NybnvK85#(%@c-)G|=u<;Ms_;1;`#Kx0sJjKRkHm$CL34TxW-0}jc>5=G#guNY_qY$#x5IsY&^q8os9+?O*Zz~ zxX#86HlAhUIX3=;jpx~TfsLDN{3#o^*m#kRgpC#(Z8kb=q-=EAIAG(Djf{=kY}{d^ z$3~xx0UJ3RLpEMwW5mXojR_l5HjdahX5%g!Cv3dT#w%=mlZ|_9eCuB>H`7C>uc`IA zTHjae4Yls8^`=^Hsr9y6@2K^zTJNd#zFHrs^`TlHsr5s(ex%lq)%uBAKUM2zYW-ZT zU#Rs;F*e|5WS$QtSU# z>;F+}rj{t|OcZt|3Of^por%KEL}6#5urpEEnJDZ`6m}*GI}?SS{Xh_i*3Lw0XQH(; zQP`O%>`W9^{26;G{#-55K_NOQL2o+(Lo_PC`1Q^=%5fC6rzJd zbWn&63eiC!Iw(X3h3KFV9TcL2LUd4w4hqphAv!2T2ZiXM5FHevgF2o+(Lo_PC`1Q^=%5fC6rzJdbWn)r6rzYi6j3-ui02gIIfZym zA)Zr+=M>^Og?LUOo>PeD6yiCBc+O{{zt2Q}pNalHdzGSXF)L<4as^+vw*5nqji32n zldJ8o{7kA&M{H#SwsN&bDZfi5Y>m~*^=>+#QdP|Kb1$WsTYQ&5{D&nB5BWFzMcw<5 z0wd{p_m{od<~xZ1$tFU_d`vzLU51Z$G{h56@VmCa?(5f4E?jB?i`YBVv#U>PO}^uM zJZp3KV%FtQ+1S4vvI#YhzaOOhvY5TXFOxA_(+PiuU^sO*5 zjn*&mN0-=D)?urcYo0Twj{cCpryYO&oHy^W30>!3BMr3#qgY}7o(loVKnO@hz?sNDpO8QjGeGeDL?qT0GM3L1kb^WBGi<)# zKw9B^qyc)u^>j4QTxQPAp3=sdYAr5b=J0LD`++F99RpYssD~PKD5!c^F7}lj5o}n3 z2nsAs(I#By6VQtJ+R}Np(qsNy%$oc%IbrJw86o1WEQE34dRNi1<4Wc;t(ey-bMAe8C5@vF23Fp69p4 z>XZ#H4uWpkAG`S5h? zh`(@w4-~U4Z;GkpVx0{Q081vHOvjp5;)nmOTl~G4$>!g2-V4Jc9{?z94o4DjF5{11 zXTF!=5#YDeo({EqsxFFIl@AoNi)_L`P25e?%Eg{)JEW`02(lpGDrVng6J}T9?r`Er zFJ>)uSFNvb*Z!?1e#yt!c`mB3CKwN!FeejNxkR{Za^gL>Trk!H#cY>vt%{&BU#(*H7zapPj4Ebp z{5BhOxa$ir&*HCqyoq4Dbn&8p?HbTflHyLp_lsGbohR-1ta3GFHeA@ZOPBaU=2}nY zB9DvwyE}Yxm^jm)ofE|sv#Z2M^)6d4zQ|TvvXX2}=@6WCacb!h80X{&Zq^DNQM-o>ujv`Wdm;iTDF{-_h4v&$REBAAK7@hmK z`F=6WI4%T|NrcFLv|g_D;D8kiD`rXXR@t5_4^4K3T`=|~4uZ5Rg}FFP2htRgSQF-w ztVg}G6bk&`gnHeN8*EAmsm_kzpJK=LDqA-tK#mXTJo_TKkE0Z`TYNR%dx=P-2?33A zgPr2`M-<>xNhjJ)vtu0i-tP^PQGB*0BeA{gKqqTHiWm)KL+^5&_B6@fiFdxwW-=DX z1j58p8cpwGK}d_m>IYgS(@95iavM?raX9h4G!kKCj>4(S zDN_f!m|f>2i`g|c@1#0*j=+ZrW72c&g-&V1axys3alE|Br!lrvk`1vj_(u6a8!If# zKxime|dvQzXWH)pW`+nS1BnPg`Bg4k?L z31u;>Mf>s&7Z0*eTa!Wv0$*(AJ|$yy-py3Ha#s^AW~qRDUe=n`OWudk1^aG^iQ>o_ z|Ir^v;Os3^Z^^{Z^y-erhQM9S2>(_@f%Q~WS84KrWZ?K%atS{fO5Ce#vFlzMD-2mr zIJmjO_3fn+q#&H-I@>ZotdmLF;c|>peB#W~HorE-Zma{{uC$U;hP(gouQZexzN)rA@sPd;kJ13 zC<|dHMh_S9$Jr$=2JF^}5lq>Q6jQe)SWpJ>H;!odC=ze7!5zZ-F#b4BF?)f%8%yl3 z{egt(9GESR2BX0fY-3Xi&;!|tt~})+9Jy&cxx+d2B_~z24~Ig1Hu2flq=(j;Jm^g= zvOo5gNw%!i30QnIFW>Y-0(@vHo(D3MNRYeAlN`B5Yg>bT_97F|{+3S>?24eQJi>LD zb~MbmJ<8a}*&C<7V0#SpotqF;161-8#6YNEe6E!y*iz^ z#5J3YrXq6K^q``#+hX=rZRnP^&e?~ja#$|OP?faHphd7YNFn#RVqi6d;H7D#cCY6; z($W?mK5(+`3z_+`R$ZukNGI2rf{D3%1K z;pZ#t>F$&2yD@2c;BQ99N@Ud-!@%QgqLj?}PS#Hjgn0NTuvi>>*&T<#t3Vq1xedi> z+9FMi+MGWe1Cfok8_T1xI*8mG>av(U%3d;Qoq5Y)Msy%#O~O1KgU|5;+(J&SshAUD z{5X$fEO8KGn$PdW7iCyNlqP{cob;v7B;$xeab!wMPmToUvb?K6jK*A9y+HO-8c_&t z)%GHPvK`|B@}3`vl^uf9gN>B9NhPCYksm9KCMf4D@>nVUiPrj64tv>E%g>k)>vX3uhRg^w?r-Q=@ABFdl;zIq_n4_C1cc zEOYDevBbm~l>yG@=~QgDFJ=oan20E#uJK_6hcaxI=Z*cB*xL=39>S7!+(Hl?*|6F@ zM%dvCam(P^l*3F%BBo(3{Re{574{8BvzQY^K^JgtimMf~Z?L~n8X;&Z{eLq$>Ra$E za2oI;!Q4U^p&z@JG89%K$4pG)om9)Y4$>#QLl`KzwAH(FQGw$Hxnayt00axF(L0c1 z`BOTIWgKkph>~zDxq65oY+9U3BaZ)@eFa~vRGpD@;yzp{gc=4CZKInP2$CYrO%yI> zJ?D~hlOoZMplZGYOKdKC@dUn?J8ddawX{&vrBSbtgK2vYPvsOc2n>l!jZb}*2G!MuVKw(bAM}#!{G#eo$ZzA zz#dQKSjoBq2u&@;+4oLA<(tz;fPUhe z$k-k`4UESK_9FU4?=FAxm#hVg2>CEhS=YPVB~QW`%R8H6w@?AC0{$r z+-Gz4R`Dx--%H)1cOB2LmrUEMBC+4zE8gU8Xh6acRM2u{A;s2&Ji`G%9*VXbN{d<3 z9P-WyCkoq7#QktSxD7cO$#0Br*>?{R{O9G~{;ivv_<~uH&5`qJXf2XRKvuHt!lV}2 zLQL8E4hOE`F+vSS85w#i4dN&30So+KivAViwzf_vDxD4qyq*R>gTDZY{wwpjxzFH> zTkIomPff{YACRux{&i6ST>V5x6O2(>LNHn}x3LSU7jALV#q8WUwzH9QXtSj7d)TGq zKROwRN7M#gnO;l}DLx>KP{>!G^bfGAv8yh4IPsK?bC(7A%P$pd)!u%jPDR+N}$4phh7EQ@6I8 zDFnZe-c9GkA-hn&5uC1+FF`}$o`q<+GXQd zzIE?Ajs_B&nMgu_xqh5sEI{r5t!=g)cy}hdP6`%$&5Lh=WlYE zE_m2OFO61X2WKpwULF&7Fa(+ZfiL4UnXMz;VMRnvQ~$ z>=1P_9jtqh==lRwHQ!@k;cm&Q3dIK=X4Q5}#V{2;KqQFGAp-~_1h1GqP3(6?FqzvI z*8wxvZwa;~z6Y6sAey05ti9)i(U%1W>IxSYt~)5ud= zb$*MOjHgI#2s*T#&!2FT{ep0m;-`N3YfrN{ku)=Ls>|;Zgs?;T!2~qnbrJ{uWqrw0 z2vGVad8au-?2iNEkP#cW{0g5!kXqO#)Hq9Tg8!T3(oVz!5K$!%YbI&)8Ei13180M! zEc2IsqGmBuOs4tX`9#INajA$GAiD%+1~KiQQ6fRt>A2P)%CSi!+14Zujx$K*S%rMM z5Zp4IL>A_O2b;qz^F9}Uugcbz!kfA}LZO;K8Yd2_r8ANEKYR7`9lqSGb6F?m;Nb^E zX^sR#*2AI32|;#+^vdTjire_Jwy0=$D03YQryQ)ZMu>10n|Xo|0Y&nil&Ch9KnL`X= z!#_lN!R9W%J}_SrC;O6^#Pdsh09$V)MIHtP)0X)gEKYtnCWFWh2PciZpewI%#inB! zJ(8C>2QcBt#tvtI3%bYJ9dMrHE<8LCuJ*+u!k2$5OXxft%%bTyC78ZEdZnj>7iLR` z_crX|oKm!w2w?Hh_WdEBH#3r-rX2{u`~}J<>|-1mL4s~X!*L>Ip|*3CqfK&kTMvc} zU;6Q(dFM#bs6Qc@6HkWIguj^)lWh>SJQ~{-2jwskV}a|2-(zWfCdJJ2W7epW_+Uioq8`&10hOI)|vv!rsW?n3g*)Me-!W#CZXs5c<P*aq+jUNgbva8vY+Pe zf8O3B0!qbR`x9PD{Et5xJz`C`^3`Vv)zbMTHj};+)tkkq{Ce#AR7ILqZHry4rT@uq zRc6>tI*^DBQBo!&m`}~G=d*O-wG>TA+r@0Y!W?)@Q#Aueujf+@yqY~`P&@fbr^|;1 zrNM&06*DQp5EuFQM5ex7EyLMg7q4@D*F!*ouVg85DF-fMtIk;At8;tSK&q(4LV3d8 z3p#d&X8qt);A)dT)HR)+fPe>ldiFiiAdWI8-yBjoXL)=q7e*i7^XpcwTtx^fA3MVF zs!<@(QHf9tyRm1ov=7D0vWaNOeEUSVZ8Vsz{NwQ<6n%EQs*`coQwoaNhK@aW>^;b= z;~6N}Iz)kq##4=*!Dz@|K>AR1i9@X4_lc-bc!)okC`^nTWYU3(+0v5l5Z7+t*VAs= z)fqq0ds(cWKy~%0V{zisqb#k9b{8VB#gymzC1Gv5gpnUbw0IZ?e%`z1 z*f2%Xp$SGVj%f}A;SfAo{?y3`ahzPPO#Bc*apH$97{U2O(sd#Ne6cLkk|dqD&>yIY z3vnX~%!Vt5fKS^NP|Jt%Npu{y5o<_vz_`pepe{li;oKl!jdaF9nJ27QR`|qKlL0vZ z6dG%%+>_T_Lp*V*AraiOWtJL0?Ig}tz7nK#EOcSeD#SZ2r~G$ ztFvaab_J+r?2)U6V8zG~U%ATfO&p7;;ePQWjt$c0Adn5xQMCOQt#S}9Mam1ZU`^-) zuZFb68Ny%oGj{!qSq2>|t=LqBvqJEj+jk&xHtSM9LT2D4>=LKCy8>IDHZ+R-F=Ln% z1kU}_55(kxJIftfLu@GT7p@V0fTuUrSqT$T6!&7q!qoT74OWo!V6b5a$8jjgaG;JM z&H93ZlOoiC=$W6L{FuEQOicV}=OL^{92vwWezfu!=LbVbaI*XIBlbnA&7d`wMEr^l zP}ndCbj_q=j(}aH+}mmNJ*3#TxW%V3X<1Z{q>gM5yOMUWo!Kd@qsobW>Y{+hdEKkiLAV#SCUNovt^ZCxF+g90Q2aJo6l;X-8~Ggy9?85>q#J`7W%W zR|uauLtFtUX8Yo1$8ezWjWJ5@aRA%e&>*4{AkxBpa%KHF4g-&16efkRQ=O%wgdGFt zcC>Izi(2POB5laY45u2#=TC?&bgLw!w_o%9qu3Jr_UUgp%=|be>MlSw=~Kkr7mEljz5q%m!gH(Gd8h>hZJ!=40~jT*mP_pHW^EaH(<$mN`Y z`Kz^}Zi>9%L9*beIzP-A%T?NsV@*a9?(N9Z$@{_!e7cxh7F$M z&Rdg%Jp~ea1;Ovefkjr~nxsi%bjWxiUMiB7?NAeMHwBmdv^;*dFIwA@@5N|}y^XZ{n&VlN+;Xqyf~-qYPdJMbE`(*qn&^T7o-l<kgy1({eluJV(VN24I=f|$#pkT8IVLp}I@@f(t`X^AEhaf4bQWsq)( zy{d~b*atEnZwJ#hdxQX}trL<3h=#VD@?)6=dudmC)={Q&(yV6k$>y*#5cuiPGSPe} zPq8z(NJlZ~@nZsgJxbW{G{&8UCxF&X_@cSzF`uP@IAYt?W6JUWH}Ja~0qyM!1sG!21N=2*fB4xp2#5tLze-*Z=cF?)v7g|Xj*Ke`k1`j8S3 z3lAR6WiwsWa)XJQo}9&q;3GG)Gq&_rv6G!(qUTH9gx^~|cGt)djV~>8J$T;)Vb$~@S+DN#uj8>UFP1NwHTpYU#!a>(U90{zQqaOZWaSoG$)_)w`MvO`T}tg=WMFFNQEUD&1X za)e$!)p>yF==?}d1oZA6F?p@Y1!zrGm|?w2phB&6$4)2riJ!a3?@NCi`)Ta%@a?Pv znUdc_Q}^u?O#J)>XFZ#vOkpC)L^5GN)iI835ubN^z&NKeR4>=7bq;l{baD|Q2!rFu zuT=6t4xZ4gu>qu;nXSgD`0jdB}`MSu)E z5}&rZ>fDc%6A9(xk4sF1_K1DRA8xKbNmPs@|2hQbXb=Pi6Bi+p&)E%lNE}oQMapAs zHr6;u6tS9Qen`Cf*{kd}!Lg#8s(q)VQKZ6-wUOxj!)D{)$85l;twO9ukrN z%JW1dL~k9nRs|u1lk)wk*RNPZX@3{dA)%NFgNP2I0?sQ(NgEaMK|fr{U|V$W<`6Lo z{0WM(JxLsZkN9)Q%a=_a##PBOI|>XbEg_r?IJ+hTi^pBa^&Rcp+8%My2wa+t(LF=7A+Y@b&BSPvuLJaA#6TNowVR12>sv> z8(txDQxt|+LNTUIGZzv}c;=C%!X-kikouVp6wqu=#xXEJ^$(H4VIFmQeNyzZg zDd=X|d5dee74ZgkQFM;(Zt|62-PZv$1>JHzfElq}#JnmMU{^IFny7%mejsWiNQ>`6 zUig&jGM9Wfhl!Tb^e>OOytd1i!I0&PfsbT(NbL|u!8{F+Q#>#x=pfvNY{E|=raqFC z7UJBHhjxL)wX!NHyoD>nR3-9rvb|SB!@gD~9QLO1M-B_ZL5c(CZh^;9DWCCC%f*N< z=$|6QKac+sAgll6xjz;{&tDExD~Ld>_kc;FwP7*5J_jrKx~m2q_>I4znF+8^<0I*qef! zMF0Ur>6hhvVNK0t32uXv6BLk_{6$L_9E>s&!@=lBglE(DQZ`8k7JEL3;6nuA5Z=CX zA%uQslA^3UK)9gKXvHid3x%DK#t0R(Mc5lXJx3*)^Oqu7w-ANU@&Dp^={{gq9 zSMx9nrVb7-N;*nps!JypPS0L!0}zpT*9(a>pWCZ)t`*PkOynED@<}rv9`N*pQ3k0M z^Ta|95=T)(S$K>6BF}^~XiB%o9_)=Ht4(FE#)oR#Bx{_G=CdH~_Jcj&=MjaS0jsCJOu0!&YCD_E;uMfPIj2!{{#iDA(%8cV$B`USg+SOk^OBx z4ub>~GL8eKK9!mb3ztVM>*%tFLC^0X1RZS2L4?+H`;gN!nqdt%K4zgkDrDpECOyoP+nQ$TEhB~z=^;g z?nYroJG)JOoyb`WY9e|YUVgtJq3Xnf*a#;|jp}?vkuwkUXx4pVz>*@Zw-yQ!K?xK) zIlKSas~mW*zAFNlm8>|Lj)ms(XPQL$*)kQZrUL;S!Sd>mtiuG)Pp~6IqygE$%^xRy z9pk2H65SwRFX{B;V?^EdF1xf_Qb%HF2%(OuR#u;Fa(1maqRVtUzJCNGhDPE zwVabe42zDYET4{bQ_hzho16eFC%HC5D}`Rh9-BZLp(s@+vPPq13vh{?2K#lXf<|)e z1FE2mmUA_V0~a&M6vWrQ9AYoS@5Q$lgN1QRD(61;2k|+}AY`{zZ(5AT$#%EisM=1H;zaT-D!{LyBgFsxbveg!AGi#Ms+y;t-m^uzm_LAaMU>jQb z23Hhj>yOU9$6uhW{F-xz_nZrDh8!twcF#25?gaaaKNhLNVI|nF$$@+)nHZY+%UTqh zHkNRVY_9L0e&CCmj8Pnhfc0__OS&>VZmN)~X9c=Xk{f+R= zk&KPy^#Fy;Q_r!NtR%`;<~zaD`My*`13qVc9dGq7Jq`|=jDyMkv@VGz~bfoxn;kT ztTN0vkeQdgU^J4V?i1&cIKCm)i+Tsz!H8axdd&QT5nHJKa)SO)l1YodN| z_#e=A5DIF9>U(?(5>{lMzwaJX#9}yo!JC+ltTYe%uUB@+O!rFBSSOHqIL$!KwaQ1H z*Nh{810%pQrgJm3a?y8oePn5=><(<|LqY+0m))6lUz^KR4vCvq(wP{8=RbAF7DU00 zPGpOHI?(sb%J63EO$PPUyACbfJ0ig2m7{YH6>WUpW*t&86umD z*OhJRcI`;$jM#G=6b;!Z4L$-PS|T14rm#9b1ek3!vsR>l?s=$F=~?9&6I5CYdhi!R zJs19Nq9SP9(OcWGP##iBM~6+*W5&Q(`1@V8b{oFP$56*OBSgMv(tGecISx$;3XVyC z5wVq0({UKeN(zIxLwGIt?6|+p=)UMlL0++^`-71~5Y40QU&dq+sXe4y^ayJevH;g@ zZxY3+&C7*w)-8ETuX_|C(tuqf*s^ZFK0kE6xYFJ4{w?uiR>D)oKp}uyXEGvwIH5M| z>(MS+C>Mf)2}B!qa;PW9sdq9z?#|vRUT1gfO;iUZ0EnN0*_&O!dlQ0v`PsM^{5T;xB zV;c-l$5*4;d9eu!M{wGzEVJKgSDG(1>6PS)254{ZK!PzNH)ov=UdKGsBnJ<%5AxI2cJ1`m7 z$mC%4P)Ec=xlUG5F^|1@$}ds6j>Ho(nZ)asVRD2RC*14BC+FteqpULayLcNo0Gc=TG4WzVXdQ z67!Ba?uv)?^F986+-CQ#&NU^UVntj#8GD*E3#qCS%NJq z5+AzJzT8*96!wc8#1&Q8gt^L%a|cls>Nxut8x1iZK6DQP5@CeV)uS8g?zVw4i2Y2Y zK_z(B-^T?}5|is&B%+&_C~W|jWt2JbeAPJ}v=0T}KssWlm4$@=v-gT0I4SW2VsAaa z6m1f@J%>HZz$Y}dE;|5<<^@3bjk$%an2m30I0#(1$rkg^B1sJz8xZN;&^*UMzozRG zQ@?c-XBr`o5*SQT=j6+vGOkD`MOhf`#|Kkw0J^9hcA1|!%0sA$hl#;0CjN60Uub47 zJxk)?a4qeS11!LB=vbn-{Akk_MH5TN4%A+nhAaaGdCFFnIA3EI&W> zLMBk*TetaBDF60d_JGwQAs!;T+#yF@1HFM2biv$QQY_6aNBEb$1}%&)7AeGE>bu)YP>U3s#bbbDApb`k zZ9SI2qB6D??=m_6<^1|f2O(17jL3VWJ{X9@z9hJixd{v`0CF|j+1n;gp@~~a6JLSl zf=hHg_#4;(s{V?o+G(aBIf#UDLfZ4%Qj`ABcOIm$g{a~3Ely(Bj6wVmY(|6u35Y1o zOyE{sUduXneJ@#VBGfMS02`4L_Lax@92iTfRI$6Z^D{*3AzGT7p``*5d-W<20O3O> zylc3Igx_zV)}t=~SS{+!@i#;vA2OxH3M9t=5L)ceYgNv-Jk9w!nC}nxSj9ws{s6nQ zk_m?MVjz5Qvx2Y|e%PqO2Io(VSYp&L^~ zggeKT%ZZhzVn@k|`6u}f%r9bw0MNf)I!_=fb@A3>w(QlU5Gbre0~x)16`qj~*@in? z(Cs5ykX^P?)qf9?e|)*c7NFxvZLv9#o!9ob`U5Y0o5&5q7z7873vZB?a9MXm?8q|M z#Dzpj6oCs>f|@naWTtu7gs+9@H(#S06ISgl^}ho@v|=QT#Y1_<%`@SwKYC$Vay8dTwlEJ>?oH>#^xW@K*N0!+O3d;6!$qoK8O=2 zZg1+jk8Sp#uL-3!N%Xt*4Ne!d`wmHAUfD*h$O;WAefjoK71EBq_BpoLr<`R^=P|^v z=x*V=Z9Z`~I$|G;2C5+N-RDw`uLFX@*-|#s14fiZ{!295nuwieea{MpXBo08S3X=; zqRVoM*(+ZsS}fjxPay>x6|0H6M%pI6Hs+-rIK2ba@x+Gs4ZA`-qc&G=E5?3Ja&Xx{ zZ%68+1vhXM@WlWC`vbAc&U5zWXW!=N>)ZYYLMRXXiZ)!`!Iv%lh%a1N;tP|!0i#ds zbi<6r-iq0_WGesz*3S2Eq6hOg(T+SCxA)s|z2Q%mXAqyv&702Mj!w%OhQ9tmE#$+9 z1@wX?!$Xl-JxH{i&J77&wH@&aNQUDBhVz$hvhv?t&bwXZpUi>aFKO?DFUEceVls(T zBg7C9Rs@;(6|c80@N2NtsGrC#g5MdeKpXT}l0x+2=#5kb%%!-*fL_5u|>?-^cY zk{kHrLRp5EQ_=lG(Hq8GAw6r|%vFznIM$IggRR;+hq0B2Eyh zCIR59U*)%0JqfXuMwN@$&MiKYpBf<^j-%eXwZff`x53iNa$a0oc-N*1UFwKUZos5Q z_-?8W=GsY3uE!g71VwX51PUTpkt%0B4L*Q{JanYv!MP9}7dSlxu?4`nSdFX!7pt=Y zbZhMrs~q7}gji9S{y5Pr5%i`_Im*0v_$Xbd$^)@mx2BH|QC5!c)FRV53jOOSFH3bx zr?bP%YM+p7Ae1l^kw6V|QMMUywIfgrHLmQ6T0IJvhX2sDd>g`#>%{MpL6VWqF$-BM zKH&jWdc-56!R=f{?|YJ(R+cZ41cJki03)E0i;4w;*v20s4a{zDJ(WoCDE|1+%IYg5 z{`rCvrI7T_aX?t5-Mu(SxS%1*2TRY#V0?@H`Xz4J9}<*3T+owI4xl`_Df}BKyl=-5G=zrvnyWa_neY^aYTHll*EXMVrHQr{8aKeq8ko4C_TUq2SNfNaWHp=23AWR$*vRQJ0VsxiLA>g90r#S z2em?2#+umCwP5<>uaKC7wni)oDgw|3eiR5?}Y1@J6J3(9g@vZPJ(?VGJ9F+XF`e{;d_7EoeEtcalfu3*aW-IiPJdh=LfHg zA`EfJYLrIpPs9xm5E;X1z`ec8(IJ)f6eHM4WJ$|#aLHc&!sA3n%Y*aHyd+@&T6b_z zy6dZ*NQ}48X*bGagY{fifh8JD7ql<_VxCPekPrAQuOr%NW{x8`)3HQcvznZK?5n&U za@)8N2%m^@nW2tIzs<~F=kP#_oO_drtp;ppy3_Sb{cC=@uA$7U(lL4YEXPA5V$5g7u>J)CD(rUTqmj`Nh=!+<0`5xXysq@vOV#D#WoE#efShW6f|2Zpryp|ovEpe^p)8YQ z7h*uI$-ee*Vz3Q?R9f?rmtjDtgP02l?^W(L%lEQ0Iey0L3k4@HFTL`L0K@fZr}sG! ztU-UqHV%j-qM<(>IPsQxB$kj*p1pPYYfkN+WYMIRS*1iSdRBgPn95@De6J&qHrS!n zG$DYb%2m-jG77lGHmy`3Ka8!k9>KKA0Ys3+f`j}Or9%R~TMYof{^^W&4NzBVaF)8;i;$Cw`Ep} zY1d#j;|rnQjpn(NisOV@Hs<5yFqpWQE)HbGJb4O76=quwbzpfVeCh0Oyx_xbY$}E7 zTo$Y3lLgofy9m{_^aINL_==53C? zY5CP$`J|;4y)9V<)g0d9(}?8r>9&wFK+Hm*!F6KnXKVK=f56fH%hR86Wj*H%(y-S1 z+8(EDnT3Wd!i{Vt9kDCR4-*V{>*8YQH9@^3&AShmAi=2)ltzX6DT@`*ULv4SRB$Ho z6Xbt5E(iw13e}Llu|t}+5UX(cu2bt27ZqBGNG?M#J9%u#9@60nRNL=CKfsKM0qfM9 zA?yKyp)oQdu%TiMfuq6a!2UA75f21ARV}7ObAoRSwdgrRv(NZ27}gwIL~zZb2M7Wb z0PI%r9fQzP*!*YMlHFVYYvGRn1VPUWS7Z{T*C1@`D1j}{i~U&b5cf`*@(YS1a#+WY zpAnW8jF}wI4zvRgTsDzjr17Y1DNWpk=2#d*BOrG7B#@yvNy}CEj@S>(Vv!bChU}6` zwxh~g7O*SIUG;>-YtYLa`ciu#(h>aQwLwN=Qx|UC;`~5|Rx+P2>{-=5e%Q2o7Dx;g z4_hA}iTHyW&$%|9BiSkrMsgC^r{q`Yl8CEx2)50_z$tH`8a<~|u}t?!g+LV*v)Sv> z>Uxtq$FLaO0N!n4+=Q^DFMVZxQhQPGFK%(dd)4|m9`5GA{4J&i|BWx#eL@f}g;8wY z__kz}0VS>>DdozTZ@^vImsSV@bUPvhzaR0PwiQd@1UEMMwdHD!T}Q!zR%Y#UaRBXn zHcqm4qGE(dZi5Gcy+J2U6%Mtx+i3a^xa*uB;J_Ry^aeDAnlK9$Z{gnNszEOPB?kr@ zg(^a%^_+igLR#n2qC2pIDAp6(NPBQC+ILNyCF~2Fhyylj)dmL)38hjJ5qu$*RIe`h z*35_Pw|FK!C2$~jWGZ~D!578__|re-ARFMWx$HjsH911htr9#$S`xeoc!y6F=|2{n z$F54g$hTl;;b<#Q5cRfnu~Qu=;LzX6ji4s4Ol<)w?HVYqb4D@@q;R;fn$`fc+G)s?i8 zlkj&f7A~CE!6EFpXV&R#e}lt;Cq5xxg@ne@6=r-TxDpTo2^OIs53dKPOSZ zZnt0nh2&-~1(J6~bhiwrF`<}#@FDP;AJg#~yenkw_1~6P^(h_i6 zOGpc5?})`zFNhZYvFM?)t__jreMf=0=^O>Gzj zdmT~Q(y9U@;$Q~z#-F}NB$|PFDvCh*ve&eN7^V7ben4jUc&K;1Zp#e9F9Luc3xG36Vx;AdrI9BA za4eP@fZhZdfv-aB)}rGIBJeiCg?)pIUASmeGx#fHG*r;yr$)&4=hQ8IvP^E^DA-V{ zyk|B69O~MR{Z>bWUPZ(Y`#fAI1_Dhvs!R?$BHxGY(^Yj_Rbuk_M~JDms|v|c7KN&m z-Z%nz_I~k8wk@EX9i6zsGu13$JBZZ;5R`?JMq-r}{mNj%mV|kHoAAIj;;3SJ4@Yb1 z;KxG!fbfiv8qe3EZt&|4a+8R!5O4BJl1tEnECP~`Vx1#`e>#nhaSM@m6P`{_hY=Q; zz;y`Yc0_IFsp1$%xWkXnG!$ll+lq{3@rPR7y?13)g9%`qTrv`hq1M@@oSGk!P`&Wg zCEZ3=Ycx4NZWL7z+Rkp%CqRciM?s+^z5~uK5v`$mL$E&&B|{!YlB>BQVTB@4w%45e zy2#_?5KH&Yb1h6I4*{m6S%heNp%;XFvp3)p#~dL;T%+Wst8#SvDk2SNs-vTrK`4+| z2wJ#*`eP1fJ`qus>B0Qr_F_@vgDDw4v}5CDd=3+E*Vv&v3+R@xf;s7U2&9LoUCs)p z+Q(7$0>TOxE4(Z{!f|Sr1Lv!Tj;y|>3`YRN-5Oag(LAucIQ8I;%zPmi$;)`Hc+oZ( zkKQ3k-e_538!+#C*)!#IXRj z%+jb5=3gI3*+n%T0RR(Ahza6zC@v&ndKcCbes7V?oPx!*_GKM^ITP0`W-q?@BHzGy zpgVE8eQDRG`dbROlksReu<{yQE|ctqgiwl0K$zQ--47!x`<_<{fUKEmi>+MZL4#>x z2$zYH6XUZE1C+|R#4Z#|*%G_Cd}Tq^_xvLWFJeqJ8T6}zaMvq|9spTdmd_3$dfd%O z2<3E4Gjd?dQ_{N#Rk=x5gGldcrID|{nY6r%vc4|6xB5JMf74YQTmg9c6D}(v-=cK# zOMcaD?dr8AVF{76&X1C|y1f_0jH9lC_X_Pq;svtU_yZg?(W+w!J&hWY~^MPMO2fL8tv8JW5%965lT#E9Gf2)!lQX$AHWngkT{x4M-$o;V~f;FIW&%oomL#Y_C`A$W~6h)P&ihLle&UzX-;C>Z}S z)oGjF@1;-+a6|@|J~9G(92rI0^0&eM6D3~(YuaoZ3s*FaVHDw_Lilhb zL}v%0-t6ix{%D6FJ3s;xQZv}WJ%t%02NJ3vn&EBoo}#&X@M=0i#=TzZ!u}ci_{|eNl|g7 zGfE_gEIBiF`PytqScBvD6JiU3xcDIYwk$FJw2411^JcIW@E61-$TctreMoP|5_>a5YtY;e<=D8qhBvjsoxEmX^?R=JPTo(kt!EOwbD z!XKfd5H1LoIzp{9k&41#_f5ROBrIipgbyus4nTYjz)}gqLtJrk~ua7oG4* zGanJsP+Z@Vm*v1}V58t5;gX`h;{2`>f&xt!E;VPcTZ zd@&G?XtD~4a1X8uj7SC90=ObF)nhn&yx2ejx{%16e_H zB4}mlA|E=$CD*a-3+W6Yc1^q(shD2F0ox>gxa2dDp~7+7*LT=APB*?19J^ruU!PO$7hfO$yMOEb zHo_Hep1BTA%LHw1#?8gbVf`cu!(6BV2_^V9&W`PTW*3m}&9qEPG#|qespN_h%~2Wl zNq?g@06XGCdkufL;*tcy(-7k;V>9TGFI-fClMn_&{DNo>op$HSIp0~Y@iDOYWv=Vq zLV^B9vm*5hlnjAD{C5R`!0f7C_fdLTxMPd`L6S$J3|GJ;3OGMRo=F>LsDm+gFjs$RVzCEZ8sJAo6jL+EMOPu)s-JYP2>Hbj>|E<>qj&f{xR3u;>^GsJgpt~^W#!MQ#> z{UE9p_9Z6?Uwy)FB-kshKp~^GQy?N@%!x0Fm)suSyRcnPP4TEG(lh+4+U@ z>;t(zi0$K0$lTN{;O$&G&mVBJ>YYJ_)3+n8Tu)?w)rBT30QT&Xv2aWuMZJhRg=zi$;DH;sXl7VUIm2Dil z+yVJOR(h*%oNFCW;jbT^@91tJ%G5lVD(f)!WUaIoeIqO!|-2pcpu zNvXDXq8K`0^z=7`)96JY!wqko0ZXJ2HFB#8nFG~IPi$-~S z=~w3RT6|0B7qvNfs1=#QEaZc)!^}&hZh|9A+%PqUWQCAKvzzcaQ)J~svq}dLY56EB z+uKJ@yFbqFxN3mEC1!0Vy_l!ot{5e~Y^)RbYt8K~zGBt=bd+@EG|^P{E|)=4*Iet{v;mm7JMhj~W_O(Ps6 zem+2=e(m&aPW*Pp?T(0t`F&23diE<@gcS&b*OZI5o`?^d(_LdhZVM$G92kpof#k<& zS&S-=A`qC}3yURlTy`?a@QDg+Wp)D^djRuQhSk*&Gpk#X6<|n(n)C@uLcp*%EyP^J zx;c!~a1){2wI10#$`q1=@-1*@bN)zZAk@IPpp-#ts)$&jq#N#zHMC=W!qIU|=Yi$}$Sk5(v?;?7(Y#+%mH8y}Cr_@mK$7BJE|E#^_>oU0aU#QoWuoDQ zrAvIl+<}lcd>}9*F=Wg1lA#;&F<@=HOl%@af1%x1m^^jx0H_afPA@1Tq8$`^}E9 z^8JW&T0KWZt@#OL94^joe9cQEG;>7PK_Vj7&E+h+BQADEv^!}HQlWH*aGFQf%-gaj zUS7Iz;UfOWx8N+%v0vG0NbJ7maqJLH_xSh*5|259dbL5?elSV4v~xx=d%5#=So?oL zOs)68EM0odMlcro7U<9G9`I?al?ygaTp%*RH4-f9uM3?kTTxsXG9+@+U?a#B`~*XL zR4mkD@%RhqCX)FqgWVH(8`Ujvdm>>u_Piinny^~oaI72G{TuRIUBjq840d^x{~D5z z!3Oy|HXts5;e0oZ-S8*lCN8jnD3oXgVFuio3x*VAbpV7pI|@O)SSLa_76a`@R>+lG z938fGXFrPxie}s4s=1wq9p9)r%m;Zc9dZIMxW_}GEWZ+_5Y85HO`JohLc#tKqgqAs zNVGa3r1cjBGgu|KIKw-UDuLzA;~JQzsyk%HLJ4tANB^2&4;P9@*f*)lkI%7@67qw! zPvMI23$DyKA!1#3Gf3Fpf%JNz(R{F>dY@9xM_ zeWScdk`Ef#emMdTdEECBU4t_XSNgG^Dg3HzgHWY&T8e9wBY2=H4k4@FITCjI&$SdC zReIg7MJbKAX95SVR9g_4*qc=t@ebPED5l`rWTGr~#HCHZup=H1XAF4m&*Jj)*+3f0 z#E-C;&2Mg1t`f18qsJ(~ea_rKttI|1-}~k_Io+gAM zCBh$kqfr;t9=Ldc&t;RSLK}+w5l$fDWwtjFjp!pDd9f_8cq!Yf3h=UPtJrCPaw}Uo5^*^W6ee{PSq+hNYXM}UM}T071QK4ukHlm%F1n6o zjo3z9`5J=MkGWIUA}`t8NlJ)vc>LM3d88w1nkNfM!C&?uk_N5>X6JpWIO`ik*&4{w z-62y1VEL8ScQ-dV_&Dk6bjC^{JcgibS$+p`yHgbjK=|tmo8}x~SK!D}(=MGm$FE^P zQZ~-nUw-!I{C*2ng*k!q=Lt4&_QOJl8>~3fC}?9lk*TMb$o`Hj#@#yK@1+OP8Goek zox75vOyZC5o%MBwAvei&Vi?dS2%saEF!w72cp?Y3>9&psh$coJq&R4Rup@Yllks5| z*PAK{i!)zQE*#?dJct|;bn&*29UNsuT(h>VQ`#cF+)IK|Cw6ZZfix3Xbb?=NHdO}z zL>nYS9|BU8XX%6-WSQ=-M1soMo7|+h$*!zL$}d6)ZXCOGB-RH(1ZCv7Mo_E}$jG7h za5-}{hqm2%>Q@2a>bNKPp4El$+gorXdrB$<0HO!3ttSW)!hzuIj2zd{ZndN!cgq9i zduQ)|_HB-YgZ%YE27YsTh~prwNxK4O7rqT_io!rEQzg8%+#Zb`Ypx=`0u`*IL;TI+ z=cgZ)epW4?h0+nCP!36Z(+rwA5w1qEV>cwWvIhf1QV0ShiSkgP;*QpLD!(W8Whm6XW%S%VM0w)9Te{I1sjGnZ3x)M$@6N zijYO`0Rn`_s5c`3ZyzL#=bipoh^}LrUv0WjamOzw|c)qE_47@jZN{L8|UhpFd zbt;9eF?<0o3cYvwnu`=P3o6ei)SZbnj4ex%5|yJ;1s6zZUYsb5r9_RD0fiZzpd~#N zmgk4#LmZJr+FcPDWFV1rHbLkpF@^Y6SaUdP8B!Zr%eEREf93gQuKe;%Z(B_^f4f~O z-7cgB-3AEuN9I9)>=s%Zn+PW)RN;;pg{svP?Lc4cm8D1dG(yYZsA4zg&MD_3TmlCd zi<2oVFes%V9vtqLIajFV7 zC_ag3BnR@=dA8xaQgyR-J|G_7;)@T;xr>OOQNkCJq^L|;IBwWnyL~hmGIf_>I%IoCr+_f;XJYhxjngdK-3dWwoLF5hnre`K-kp5g~i~YRN!>dL$3sF%~TqAdmK>kWc|Ilu4E+r^IF6@oK;>(Wc+4_hfoc7udzwbv_K zVk!`adl@$myNd&n{3W1G9 z;KDzGBz^ZedO#?6E~t_e}-d`0dy3_%~@Vt6wYXZ z0bXMl5NjSLt4+GH0wDMSg(eBvl~qUZ4l3G5X6N^dU-79mMVJLQ!N>f5dxO9?QHDSV zLKzlL(t^GSU5E(Y-3*WN95o>X&NJ+~TskGT83$N;oU=JYq$+!~QUG3)5T$LSN+uQq z?HQx+=2sfE2B8E>mZAvxF}R3iw*xO$gfml{tJ1|mK_D84-XjoJP|q)&#Qsr`!CtOF zS`(gjO7)LRahOGwMMBisVm>(h$XpWmRwfn-MerL2FF%;eQ3Cz7Iw>|pVlEVyw+a?s zVXB)({_MT<g7c~yMzop!3{-L2b^Hd6;oni?>S;_2!93JunTA<*RNH5 zJBnWAmBuW3R=&(VCE;G3KpA3-2>ur_b|pl>R^lA-{%5ZizvNosTnh2Dnwy>99}+qk z^0enb9^zdPL_R%z-w(+$05>WvhE&tYpSuU&B)c5u9*fvmxEKb$y9GxB?qrQlmfPm| zW+!7r;cx~xSwbQTXV7TYYdcrD2;gfbEpV}nG)2t~c;D3{%y)b!>tapWojCwJLUNe6 zHojH$>#3}ouSbtx(WOsjqLi~YaH|Z=ZE1n3%BNB2-6lpDrOHq%=wnN4*X=c z3^%M)cH~FB<;V@8Ti%t&9h&zRvv+v*LOzZkMIqcJ(p^oKbhWg4cY$LDi4HNwL~UdY zA+$I&U~A=gxKJK_d-w_Ttob*5K6vPUOoSA(KP64I%!9@6rya3kn4cad8EwjFhs+p0 zi<(kwq5!=krE!f=0mdLd@HdNCWE+BB#LM=T*oZ3=bfQ_=yq=_uBPDB^zy%LKr*;Qd z)E~&$jmVw}YzrQ86;vo46kAs5n)AB%B4>aY5`K9>HXvBj-Xd)VYrBVlK^wiNq%RnS zK0cM?Rr9+LO^}%)kpCUMoe>;*+Orn#1v7qB z_MvnNklHL0Gh(cdpnACj3|rlI$slnmmB9prXlr-QG_7Wpds~rDKf;PDc=69Km}iL%@De8^r2m z5`>K`avUNQ#KN7N80}y$|KYeh`+O7knR0cFf%p9_t9V@UC1RwQJ#vdZOtdyA=aMSI z23-GcL_XUg|HP32VG@Zv?Cwudk6X zOwyS26m=<^7ew{&EqGiiWbu9lJM#a zj`na@0*MUg`A4Vs9RUY^Q$eLJ=^fgM5r)sLWd;XlBPO`w|DUiQaJKBoM+|8~oDmAZ zX~%!|=Y9=T?DZKUfPl}BPymw9nitON(w={Uw7DFKr(`(L!Q zyW!)FQ%KGHXhv8hs1iZ&7JbCdg96T(A;D!Ksch*{zJf3TIpc9^_fl!75C~PDm6`_M zYT+V74wTt7BP4Cy3mg78a1|v}{bV~cMfYQN0#{m{6RDRMqq0xL?PXH_Y~SjP-`gW6 zxax?6Eh&*`Fo}+0oLMC!6D!3ziv&%Ahx7ke#ShsdZa*LZD2VEM_C}l@gmROonu48) zeEag!5}T++M}%N+!w~855^zLRxq0TIr-x8Z@-+!*JNw%N1_4c7F(c!73(^GS;^#RZ zq`c?u!EPb!!;w9ZJ};~o+f@@V(elxf+p6bEjwJ7THeYLgsZa)N1>aflh) zX{^q$W4`Z<4Td2UP;6aW5EFAM zx0|K;pC^z=;B&{W6`mjkLYsw?OjI2rVEs2(ue^wvWiqlqL|S-^`PpB1CGD(hY=#uE zvZR_{>u&Bq2-KhPiOE87nmC%|Wc-qF4Dv~%B8Z4Oj!S1Vc$L6`NxJb8;zv8W?Hp@* z5<)Nu0Q5|*Q7yQ0P(n(q5g2`~i0XTUw0M%lnw*-YQIBZD?m(riC}bGd@mPwFJOeO= zm@eetA^?ydtdgEcE!e{0RpyU4*IX2Cr54=aKOw7-AF?~pg)jU;PG{dZ>(@A;vSO)t zNfj5`yDt=#q^63zH^vOXwXZZ__v=94{dw^t@4hXHM|hlK2+A64TAM>Tq!C2N*&z)s zj?k)mxMUcfLsSri=j;dtV-OZch7saGK46Eq(>_ibfZ4Q)NySgvqGZ%iF`07)hy1o>&CG?N7Nd*u0CkJoFKI;#OJH z&yd7xf{O!px8FPbgD8Un_3!^*ZkW~Q35Hp{#U`;GA`KkeBPwLxMIfWj`C|5rF8V~q z5dxJQcP2`5oDKOQf@~~d3j*5D9d(Hn?;=2`zv^z@xuuJXYjG_+yW~vqk^8t=gP&lu zc+ZX2IGmnZMw0{*BAt$n1*rl0bt&6XXE9_2)Qe=<+d#Mx@ce3({D{@KS>$F0FhyWJO7G zyYkt?^bpmzD9scSB;YcOt#f4>JE7@h%Gt46raf6)O2fWf25aGnpnhu>wbKs~n}e%o zNj_zwt7;=$%xtw|JPH=M|{jN)>9B$MHO3T;v4PQLJB zDUNNIAxo|Y$N0!9vvMjIm)IKXQWgMfQW@VjHZIN`-0uVdN%?%cabt-*_-{>wB1)(I zzM^s_8TT0UbMqGSBa#%X7$@I3d*5#|OuR7q@=XQJZS7X%vWFGd%}2OKn7tsoQRI8! zDSiu5-(lNsfAK{C3b94_TQ|Dq1t*b61R{_ajEP4QwXdqkG+51tuG7QW0_P}}my^9w z&YhTFU#~VAPHK#QB^)S#DOy`yI?q+vA8DT3ur_gH81L>QE=5`*jEc~K_|_9wZgIX4 z7Z&t^%pCO5c8Y{u_!S8a*uBdbR^{&S+*f;4_gUr*^%{ z=ge4-RChiHPWFc158m7)G_ZCOx100Asr+2CBob&J?9rr+e^w~; zczt(!FG9>z@l)ol(@q#G!46hLeS5v(c$lY*lByu8oaYW$Ls5aUvMs?0vh1gtbFb)laaPulxA!M5Le6Qm2+S8x#$GNu1d9N&~K8a;A;Orx8*=@Ew86VO9 ze6d#9;_S^EdSa2ij+;icNg6RLsvU;>3_Iab?EJD2UXY<^2m&y2g(D>uIW&lKgjtvH z?10rKlHLO%rqo1yZFIlWOrd5;KF5~#8`WRDLzOyK; z!u(nq)Li6z=;ru_<%||##@YdiUifwA8WqEhbFU37qY(v!i zL?CL%H`eQX-~?t4w$Zl)CS>bI+ZeaDpT0&wBVw@xAiizhJfA?6hXF=%Hxg^jJ?Y!` z0Sj^kf5bVAzGl8*{}m{f1mR_O0VCdBdlI?IdSq)?^~<-)Dz$$!m-GzKi%gm&`!7o|T`Oa~%S zgvWW@fn7YXJj-Xliq$laO zDN)Py_xoEa{`tJ&OHr-M>+#YQo;1>!U15sdZ>{iayT?Z7DXOJ_n@YBWwgO3yM1ct7%Or02tNFF15U{iBm95$a*Eih3U@@nB z5{@nHl+E3pCgDWhG)gi_^9;7X(gOIoh4sQ*sHAYJXhzt+M>?@`L^Dbt#Ra1r9z698 z-zNwlw333ZR-l|pT=cLL?#1AIQR|h>{pK}-YGE#)D;EQ9cKSA_R?JZQOr~FOy7y02 zHx1HaKedVj1h9i@4n!UjY09V>{@6s$3>P}6yok#6QS2%zF2Y(|lN(7S?7;*#f8#8= z(Xz4-XSxHG#5Z3Eyp=QOar}`kk;j8-G&(134N`^Y!FCB5-ydDbV+YnCRY*0%bYRbt zC+}{R5L@a1+orA)daDAy-{3E;%mewxcf1Z^FR_aai52shM=^~Sa*X6o*20Id z;L9*Wy;r=>d5`j3HF7Q~^dAmNx!d5?*7U(cI<$uX|K02W=9E}OdBMwxB0th|*TKWp zVL$gWeD~czh}@G`2=3Zc?4wLqA-K3VSqd$J1?4D#_!?h!Uy)1Af=DExShTduU7%{6 zLXb63;|zy9dE2peyp`Z3T#Za>g6Z7u>~CBrdiNj$dHxlogKR3IeC#n|G9<`wQf8E6 z`|K}@x7damPi_iG-Vp&{n;^l2ouTs3B+4m%pwK|G`fQUu!o2ZA0t!Cz`suG8;TiU~ zT7`-kadY{3_6Jw&m6hM42RrSE%Y83xWHtkcG=!{gmrE!#yt)-RHpWjh71mlOkMfP*HO>c!>fnk z>H>1~u+;sb7t&q6N8oTFt4_KZYl4wPEPhB9wdNLwJxM$X94`6~KKnD`7Ov~XTA#1F zVIPsu(fYuAKoELfsHW;=n)>piBKV+;w<1e7NeT>~aJ#hX51`KTK%zxyz>b88il$

    4c9E|Utg*!@#JNT|W4z1D_SGE{St1!!Lap_UM z0l_QNc@2RTz6SoZ;E;!TirY=GiVh*V2`#wlPCclK?}Bg zq+YXMw&G;$2dn53$*#6O*;s0pMK!=Zc#T~YZnqN`2Lnv%psuWe((?mtBj?WCat_9 z9+&%N1~8~8yanevNdhdg<-KlY162tO^5AQH4pxmL?qM6>3+#42ofYg5af+% z_egZxR&Wpmfo!R}NEW4P^VU^G$sSwdE|Nv6B$CA@S-RM*V7g}}zyLWx&a0e*yixmp zYs+@~eg)|A_sVbWs^?jocK6JrXYwkV(>_(K|9@ZB-h1tJdDdF$C#nD(JbeM`#EWjZ=L`1qluJ+E2B!p$uj0i@VKLxR4ac&+2Uyii8G*8Lf5?Q$nC zIz9U66WRznDOUf7@%ws!DhBF!VJfVP2vO17w{YaN&H?6W3cKntskBAog8r^WJ+d#3eN^!S7& ziX`k=rfbFIUH~bj2iU$EFuc~@@u*qNvgs!r>Z$UCu=1XM`2~4tv<5$b^YhZj{O@c2 z_nC+VruJ{ge-O_=)wj1n4YDS$X)44H3@W)NHUM{th7MX*FSSW@lIs)8Scx2kbmiwX^dINtI+R&osv4DHM5Atpt<)!afir+2B@ zHDyR+fAX?!WMqod1=5!>E9ub=TcgHbVk1w>7KRL#_wsb-{L3rh{nJm1<_kTGTblWsP=x|6Y|KW`a=CS}8&`S%b72wZwEr>U`#+rE+xecT@gotv2rj z;1*`#_ufwEHrIs3EX;G+8M1u@=URF|UaE5LL|P>NpL*X)8I(yJCwS~lYKeWSHH8ev zc3&XqSJseSqu?;QkU?9oL`ErZYDx!a3XPTdrieJX>_qPvcd@l-Y$&sO3q91(7kfobbSaBp?7OD@C^P1I2ak!eH+1B*0lja`Qr zhJ1s$H{#{>XnWn_3qk~NaZNL!B~i6D9+Iul!c+;r`+*hj^|r4b9BO4ttFMx=L@nIHZJ~8R!oh5;Z(RQ$j^7q{i>XebFVzKwuNPG6$%G|OY+dUsPN|aKV2?dM{{R#LuDp_ny4KBJg&P^DQIw!8Q z!c$`=BGa3uP@;FkmsCAb(vAC0MxlP~#HAXhrE;{hg92zF>IuO8uS7WHgWg?K5-%uu z3dT0`cH8UCe6my2`lJe3qO@_jJPI7Pa>pH!hzh+&^sMq=uxY;4Shg^y>JbVXWoMI; zuNoGm2G*B+;Y|xRm>66JVd3)xflzyW4=IeJR*O7ESxB`Sl57fb1QKTGK@U@_W;)m+ z|Ga29IBJF=O-+Wf9H(7PqO!bSNe%&+`dZH-7JL(VEP&qo@%ax4sF>Vf;KNx>1s-rYk94HdhXnGOh%cY{}lyGO$dFr^ZPEKHK zJS;k0uQwSs5IdJbQ&Nj4ix|4K7Mspdc0^v( z`M;}DwHV^+=-%Mm>VCA<`*;7mG4{{cht&wfkfgI@=RAh@$X}>gvn{?Df1=0QR;Cfn z(<9S~tnONfz)O>0T_lMr8R~Gc^^8BRuUPHFpA&jDY=ZOxdhhb7e4Rmj{lZOX!mb9! zS!0D;R1UK1SbwTpaj}JAUGsR(VSPu2zQW(cwJ?p70y}oRn%)?NqNLY%>Kt#DCjx|I z#CVgtbu68Bf*@69OIPTsamWBx?4NN3xGe()L`9 zOJuCN{`2W(0gLjl`X_pDnUWe?->7{$N=Pg=F+0|-P+Ukc7*Xk-Rnmsh>GAX5m%HD7MGE?xHBiLD)`lXk!r zt1#=JS(1(?61B%f4i);q10jxU5s zq@jL^vUmCn_Qm789S0)kuzUN)j$I;1&w6Ei|2G#j{tFd@mgQZu2-nGA2XIBcxVcd+ z+;E7YN8_KU{H&JwCT#9Cb+bG#@-<6(bbW&^ACBMCQ`a8V;EUII--uvv{gZ$vZQ#~^ zfHR6jtI;<0Bg%#W2aau-&Z?YqiBVG z4eR&;d;TGr|NQrC@Ry_hp;hOVo?{fKa&z3XOR#fbq5UAjKP)j&5!`DLVRY|mL&xn; zywSb|>}&T#cU^Be2*iYpFQ!~MgDh|N4M5nMN)JZkQZ;wCM$N`zSlLaap zb{&JXI;qT-#9|9X`fsgIbAPMk=-08Do7dI}&@>DqHwa3k#!AeN8i?yDFgE#I=h)n> zi}~LJgQdSW{!C3=J9{o?M((a{^?P}ut7CL;s051pt>rmJb7@{ToIbJ%nPd)Ra&H)t%_Jb@rY*l(j%G%Hm1n7 znW6R!`^(G(^W8h+k41GPF(X_rzVwp*%qmv;%0zkAZyYDnA8W7vjPd$SeePCpb>Nr| zp_wQGT1%ETQu}L;xbZZ1Z0sqPa_u;OG|pwqKamzxD{;9*maAcWDs@0@V{6OX;JWjR zV~w^+Dpb0PmA+U9IsG;EY>jIzc9h0@d}y&aBBXt<56LZ{7uF*MtEjR(mo+WONbHF( zWZYf&pvDihB^CkD2YO!(fOW1y<^#8*epr)T^Sx4rA(GAqphlx5+i#L*2YXxvy1={R z^UJO1SS6l3AKQ%WluLWeogx(j{DGD{C}PP(_5D-+c~)xDjLl?9C2Ej zMO6CD_&ptlo3z@TD>C5S{L!y!jX3sGjr2$U$u6WdcH9PF23wAu&& zEpsDt?B5?d27-&@jgvoGaW*Pul0IKOU0K!aaW9!sBG(L$fja279%0t{eD+KxI_cf$ zRh6I#--CbZIimb4cYgD9;o3D&Mzv#YsS23{ENl7Y!b>kDmqTH#u^7e5Yy*A7L4+e; z-IEsRbuA6)Dn!3+3IwB%=*UfteNMlb(oDtyzy@ zIjOGCOUqsTu(sN1>hpL1G3%re*j=SXOf#;o-OA&wp&@q{!9o}yB&L%YLGju4W&iP@BWtqvDEYJ<3P2OzOii=(Mxi=((V zG=c&LDNendg9!$j;QbRr)n=~EYDYwyw}2yLD~N~O6B+hB_9hU6bkey>GD#YkgmSlNt)-t|EUudeq#FHEYY zs>|M9&+1GmV&uDPldb5daMt`6GE&@kz<9{4Ri@V;0R+93D!$)8wB@mUO9%&G?DIJ3&h@8m(k0U#G76X&t!U zzmwO-lp&}EEHq!Ae566aNSrXFduVZs;7?Lz>!B5D1n68%0gfL!*`<}VNZ~D_Bt)L|1*oTq z;@mMi?iw5S6uZP2_@~a~CYdB6V*2`1YoA1IsLD8{G-)F5@Q-+JPEscqsZk>UYre9s z5nv@)(C2XWYMPf;v%#>l>16^_64DZO)GEpq#FZWciM8&F9$cH3b5dkMyC=+{6VpMp zN?+68AMyz;xyCcG^=C)h{oDG98a-@N{S*mRe7z60?CM{iw`2hHQ5};v;ov&DwFr=w zg~|G9+w}Ku>J>&`vfboZC;6fC2E^bQ2V_f)-ZnE_#37<#PY}fVR_?pXBUz@QC5ogQ znqR@D>tKWns`zpBBnQKA6PoPTSBECZB!mte#x6lU08yb>PCM z<7Hp(dLOsJHYI?BOsRE63&O`i10*qr`=sSc7Y&xd<77Fcr91g1pN(s&GR(6pD$Mz% zfZ1tHb`B4$DaL}O6OyD?V`Ht>DMIRnDE+;0>u5}1|{GGwx z$j;i5KTi|9HL3=)b(g5YfrBDR2Rlg|t~s&;f2|kW&4naJ<8TMNLWYpdg?Cp^cGjH- zeF{fQizM=m8h>JDW=5akVRlb7CPKV;V*tP?(N?KI0Uk;{q){i6njcyS1X?Bsl-PH5*Ra$d_TO>%vc^75&aQsuv3nib#z0O!Oa55! zTU`zPzz)ajJfdmbO(_yC1CLNAlu8dKs7}ECaT;=azy^w&MbnE78%I8$zk%f}56oQF zO(#03e`nRuuS?Ap4TeGvYA*nM!D%%5Zb!dx>@oLbwx)|0G-NC~m`1z@HhQS}q9145 zK^qa7!j{u(9DzHifa>W<`?kX9RJ*Up81$~%Lo0pTpWhw-K{F-uQkoW!M=9r_$JUps zz)#9mISA+2Rq0mSPivZND#|sahoHq$s-*=mF(NcN6sl{^3jE`r@CmZ~m#jX?UTNrbB5n&YgZD+d&-; zib1qal|t4V!x{i7>Lp4}a+7F&&`Fdfl*Z^hZcatM# z>kCa0te|CZ+Ucjo){@4wNqz6sP|+B_8UG=Nm3Ly`{v9ngb#!Vwgma z4j*X*)CV$~k%ob?6Y9}MESwhVVN&Ma+&;bBj4kd-r^E;DhCnwbNPBSR+zU`L=Gg}#_`A{h$+be}*{VZm~f zWp<#YCOlD9G#tE*)&;oUqe7AyEw0oDx!Yscu)D44wGxc}qEQ^h>GO#i?;xvbu8g3@ zpx&!8*kVH%7~;wW2|6AXUZ)E8@gK%NNbnJ2O(maCYi^6;?C@IZi`}Qm2h=*#@OL(G zS?Z_az4|~yCs5}lwKqL=qV~iDm^pGYU}BFz5$z-@Z)aOaj>ZdPL#?jj?nxIj2IKRh z7e_lUu>h4+_IctAC`gvNV3fTwTffv=QOAo&WSU;li@kW@snk&?a!BZnGZ!wN(X=Cv zJ;Gz|fX=l1s@{x^Xwj!q8%IQZsoMZ>C*WaHX%(Z7e(sl0N>NssztzF7M=X{IIyc(Z z-xnK89bea-eI|3gu1!CW-lItb2#6HW>LJpuqOQ9CQWeIeAdo6;61M%RR}|8LGY}&= zk^5lsUp+vAzPc0{2$PIUj7%a`-eYJRMd9^k@pp=`oS6}ICK{gBhO_mGUg(7k6nW@zwn+N3>^dPENlLDl!$sS*)#<<4HP)?cP$^OzdcD|Ib z;i!m!fv!2OAcuHU$>)|_NhR(a=WyUN5W2<7txV@EbLc?F~RY^6FrZd3!R^Kkw z9zxw4{O=W43@MBE9_Qa4eLM$Wf?RU} z)}mru{)%)@(JhlMr05C%(t_bI+4hAJF0|YE%W}`C)i8UW*um8qZJ^7lmphc5DF5=! zrPjQN()w%txwlb8`A%}$YZIYDem?$#s5dv*(c6zap>=y4Bljxi_xp`jK-4gv);~NQ|CpD4nD8O&OUcdqb@l@ z!>+C0I(^^C?Ec!Y4Y)R#o%RWmtS@A;i)DrG>*m|1E1oS(6M<_(g;)}ZT{XBwf^Cs# zew8GK`QRikxWoLnST!vyev_oNaeI;ITVdB`y?i{N8x3g$+_Er#lT;**>CqZZq}ACbl$mP zBGmvmYFJ1oYCr6Z7BUa+`d~Im7?3Waz?x))Kx?H{E%NPto!DzHYQ||q$oyZ7E%S5v z{W13j4i&t-qBb%z0#Nnh_`M3@`gbQrxS|7po?{>FyTJ2Q_K?g&*RPLXofJvcaE_3F zhH2By^34anOwb=tceOhfn)5bRkTlolhAoxYPyA|TSx}Qq;niuo3~h4G`-pfM(Wx>; z8gdEy@M`g%)&T#1Q>3aC~<1Il$#Vj-5R0+@lX^ zG1q#PG8z`OkNx5+h)OSW74ctfNSIYr)ze@|LnP`it>ykB>7vF`BGP;oua-lhsZHE( zi5vonyO&`N#eN@>Lz=CD7Pz z(Y40BF)~l`FDHCEWnH|1Y41BaWl{*?;T^3nVQIGfFuu%wF@L#{d6#4MqDv_+=0jb= z7xJz`)M&1p%8%DG5`e25_msBJ#EDI$X@!e=y6M$`q9sL3?kplrb}Hi%S{1%$4GOzF zJVFm2-;N+I8z5WuwxR__B2g_W8fi(Snyo}xE`2N3H5=~OsTQRHE>P@7ychEU59gBZLO%wh#xrly@oeWt(9 zt2bvD4}KG)3bUB$T5B~IAmDt5VYD+cW$ltCg>^6LkE*lQVppSJN8ZgyI@rhCrwnxbdDI*yVdmrhQW$7{o@C>dWU>p^PAiF+%$wt2dh5u&s(`z zTx?pGEHGA|pkizj0UJ$GEiA|tQC4faJQtRI*yIweoVQ$Sr(!b!vDwFl<+qm9`Fe&*WjY)&RB!>b{3kg`8hsO6gRb-TAU&|@Krhfh;Wx%8}> z+wBG4*BAjSf2j@K>F>m?qMry3GZ65KdEFKe=2IDmNS|LpL|ff}F{nlKj8m!EUMHW) zhxL!d)oWa(y!VG%qKjvq)W?m{o^~|`h-r2u{kYV!S09*${@QNZ_K6Ts*lZF8D=9Y2 z9K|i5vfkTM6P{?WP2j2;J$v@7K0fj2lNvTt9B%sf;B3lZk4q(7Tut4su9n`BAvK(d zBkL3_=6WTTb93YU&`2?;1&&N(cCKq_zQwO3h=IfBKAl%Yrr)a16U|kP2Ww_LA6pxxX88t)*7?S&bCKd6s-#!>g8& z40ujYUoWF0%Q|Ru)iALKXgURU`#?iOrS&EcFS-8STIgO*@=QyY=?|pUIeiCn@pDMk zL#Z%k7g7XPhfk^EGt-0y>MQcC=m1{3kXrEdDn?hU8}bNtS3SPx;7B|a8Xl(kg8;;s z7DSGip>h-YZszoJqiCn#@6BE!^Of%t4ET}kT`O{(mnVjH4IgX6M3hQ~K;jPgzIaTypN1Pa=PeQuCM!}d46W{785~A%tHz45 z`t|tJY;+JXXd4mQm*#Q_NGH-WBKVhq9{1J=XtZw{b-LcRNVZip1kmq z&EwlR$n%x}zHAs#q>0JFl!z=%B3v*d2U8s#jl8ahcMpy&!QHj@023O3J8jz8$JG!< zyRYjC&fonX>Jg|kTho9vTzyd{srXHDS}}OkSxswiU?uL%?0M~ng{t-&XaCESSM|#P zAxR@a@&xmzU(g)fHCNv!CvS$xm{AN*J*2t(%9_m4Zogk)p9?5Vvk4{Y_`3N~Sy}Z= z4L|Z6>DzkI5Y+nFpXl$QuGJ923C|A>I(zvK!Vdhn8NP@%6izIY==Ghq+O&4?G*SbEqmFA})VLKf7OG{|q<K+vIx5oF=VNzJn5Twzn?Y2=1P2KvN?eG z`T}}yw@P}%NRjI;y<8d@-pobXGZWd`uuZzZ@3cb*5Sf978^WNfkZY>2R5n^90@V~a z>g{$q;0bJYd|g6xaxM0XG-d@^ohD6W?F2Py6c2Z5s& z{juHqydx8<$pX9F82Vz|+_LOJDXwosTL7hfdoA#n_B`lG`A*+Xef$(UV=v4mU5fEf zdz?c-P+qn@^Z4T$4`$3rnE9$6WfOgPQo(cBg@`oTNJMO?74k~-4aja&eU8*-n)_0# zxuEB|VXxGi^Qp47Sf&B-3N_*f|Era$kfg zp`tc2o|Z(X(Q#ECo}bbYtkWh=+Q~_qhvB+aX$G{ z8?CXAakg9*>WHQPD!os|N@n}RPi92D5 ze6Sn+76^0W3&~Xap*GY_UzS@{POw2q;-sN>j_-0p@{G+9<%tbM!zj0Vx@?rAmR(bI>V45u%1=LS)x*g*Np-qNT{rubdPZ(CbKssiMxu90od_i0H0sg7oNL(x0L&FRM$Gr7 zhi$xj@@jVPykGr0mKhN6ea<7pzn-Qbi+ZH!v1bV_ecdgD*+b{zypyKUu$bl|A zRph;Q?=g)Tfz!ozLix@zoe2CdkKa)@%`43;ni`YqIXWZkGae>(WQ$gji^`iH$I+`N zFKcPg<-QqzF3Eaag@5P3W?gZNd;jZ`%sPRS?&SA+u8wv8?*AUYrye3peX82s%De)D zv#a2lXSG6c562aiCz)*Y2sc&y`y@ch`shUebZ(KDL-l_e-#(X!`am;2gqec^%+is2aJlyd1XsYRljquqg{;IG%$|+rMeA3a;D&S4VDI8yojbfJe`2a2;&P z@Lh9>PB;p%m*kFK=x>ZHLdJcs{dD$`7esb44qs`u^#fjWZ6G`}Y2jLixPiEoljBq7oZZ*zw$vUm3u;QG;n>3bl$rmTa zRXqv;kcLH|Vm82hkV|sZFYr|JC*S}UapF`|cL^K*^OH~16jES>P+QL{zkZ}{IYXyd z0|J0edc@>qPGMl<6id|tgV5 zTinSEM_j)P>ix_$hc-NRg^1&izbJeD~nwTWwv8T$s@--aA4XIykP*$N1gve=Bm3D>sEX zw3oNMuU{qg_?PMckgI(vzzhpgQcrePBj7|U*4x<3k!&8~3(_z1%QmejNILo!Uyw_aaY5vUC{W0-_5Iy9_`6S2#K3c!-NYA4pQ=nqH~%W z%<dmqi_c9` zPy8cSVHJL|bQ-vXUBBP2fEze+wYEsbJzrwT@%tJcXYl_FiBib8;>B`*rH z4i9R@H!h#bqkMFXHtH_1tJpl)yY0P2inIZq98sINHOtr~ccV8&iRDVjcvdIWtJq^* z0;F5v$|x9$nbAC+IxG{=|Nm%fMw-n3C7-lQgL89Cv}BF>d2iDH2#EMU=5xp%IV(ha zO0$Z{$B_tBO$?4IFMb$os(t@D#^%qp)9JeKUUeAzx;hmgUh9&9r4NnfGDh6xo~7cj zT%LTY9|7M6dO_p4T<0T?BnL-F5(v@ZPW{tJ7E2-qeM8p!{KP`*Bq2r40jmwxB0WnS zBsE>MnB8{6P76%aUyeW17{gXrWkKY2P>TdopSq(DcdAfoB8qEhsqBmDorUCJiH~1< zN$XRbyU7>>>QV_0VW)9p z6X=p2u5RW%p>oDKH3!dy=%OBor=o+H@xWOJ6`-Lp-vz)XYChpY8BOY@X}nV?H<_gH~jC9`mt03&`j2_zGTK>1ll(eQrG~X#S6hL;vs;Wj>dshg_o8M5uZqeNdNIH}RZ!0l4lhRr_?yNEqOjTq0fy2q z-@SG6a$P>JZ{YH&MGcOF+%6WcK-E(QOKj^YKkWAH<0LFtV}{>m1oS9N)6~bm z9tz-HTZ5o|B|VTj8=(gRSBGm-Z%zZ9i1yX)Ue1sug}r!5IGMwtE&3}B_N&OT$Q7zN zol35QPhNlD=~Y4(Ad|g*EhNLDh{)5pz=Q{4N3R}d?&Ef;gsok&0f(uS*x|12;Jp>K z#TMbh_AHRU{`mdx)Ijm0zprUvw0*}qf27R+q}&D{>36(j5BW_1yP2W{p_B19GUf;lLgj59~^&3an zq8oNYt27%P-nzp%_8g5i995OiaW%r52tQM;2{DP(k>|NC?GQTt4&Gl?@ z<}xlwR^2z$jV#>6lWB9}G`I==S>Ir!?_{c1Y$^lC|Mt#}*)?RO4Z;@L*KaxT-~@vKqLh z^QAQcL-cLpNz|HNUR`c#mpuA}o(Y9Dt&re?FbQkzB2$zlR>-_!Eyq;I|LSy zX92x11ZGW$>b~hg4b=vatlC_i`vuj9bJ<9Y8ctPFY~TK4vcsqDRaOP3sxL$W)pe1E zl>)`=#j+c<3M;ECO?^Hyd+(k)EOz@97`mp5f~8#*G{#uZcF>!bF_P-RtEr3Jn{l`x zx{qefCK(>jO|_nL=PV5Vg&)*xL9i@Inff=39ML2Zg!A;jN;uIUw5}|3aZHN@3w^r~ zTYudYNg*3(UU*?`NQ-uk?H-1Ck zM#m8@s&UQEzN8;ufbXXA1;GtdYwd6DaslZSfICdDAE{wBDcV+#W8Q2T7h|9<@MHI8F&C1`FUB(7u1^J#wVW>>v&<-Mo@VpUIP zFv&FRmY8Q5ipgD1j_*JI=;P`tQXmnNr+nA#&eyXOCEB24DZE9Rm6v7n2x@D0SOqMsrE=AuGn?Jt)uuUA1 zuEAG5nZFXI&LycaVqAOuzY>rCQ3XJey0>L!&}g?s7{XkTx%F*>DE`dH(pXq%dv%r7 zO5Ae6L5>U4*d~#H3NIz)nuZ4)rIG_F00E2Etr?J1VEI7V^cPYAz!}1FsX?KdfWH+% zF+OEJ-Q$f4hJj@|xS?lB_IwS2B18_P80}gW4e?dvp~AT~c5CHIW2p|hn?g)91f;`V z`-p^D@?yd$zZkzSx{dGOAT8SgSzGG{EB=;r-uQebJE1Hof*1c_B zeiT()Mfj`!kNlD75=>e(Dkmlt+v2gk&v?3prOR=>x zy(rCx7gyWkutL?AQau`EpaY|vnFzFnc8IAua)nzf?3sHf<|4##ol|86{a)5d4 z3EQ&Tzepi4`^%<9>9jtiBOF>(Ks-M>3UOaMCvZn4b%{C*34pz729LGJ;4E?h*g}gK zTjTQMk~UYzYxaAKnTyYe));ti4f?nBb+~^w_ORFmCEPURP+pyWm&2n+5Sg?+(0GqW zNArYl{I9T8DC*qlHNRdSv~FzUlgT^*E0~xdgm%> zNwf1`JyJ=xwOFeAF4roi<u` zx|cOFu9x(&a2@U+8;}UE8m6Uo>2!VP*ANgZrw@6b>*EFHX#B3`vCvJ70Z~W=W*|zH z@=?f6-b36N#&Tg*3|ovQfl3uwhcxoRoy=GPelSB>k8Yx2>kI)VszMbIqHq)Ad zr3U*JC^)`Vy7KFQ-zlMLL&K^G%HeR5>>W zM!%fw2&=RiD_B*eP3$sy5sT(W`E1FCq`|P@@m=W12J$xSq-qAMvZ2f2lId;T z^}SGVoB;v*{%`a3M!6p1~#532P)Bx^c7|W;j zdUrR>q+12FrCo%tMOKTros==mvJ`4SxW4(X=!tDEW7BWh%KY3z&#A)#JWe!F*rNLm z>?=3o=kebYXQ|u()2EjSbl~2usD~PsNJ`>8l4x*sq`=$;7)th!B2AcRy*fH}X$c+j zm6j#~Qh(uTaWfbmVd3~4Dht*QVo+-G9jnn`qeW-3qxXMq1IxL8)jtg-S3zUGen-Ed za>Py{P+TJbk?W;J?*JXq^B?gd?u2H*$_#@?JbAp(`=MYe!0=s zd*SHQt}F`SEBXapHRB9Z?sbbX0an&O3vUH6ZnXhP^x3 zu%upKV-qzC1revUW{|!(3-8ZS9A-K~K`^;HD^Z(ZNP|AGQ6#a!msr zJQQ$o53+9YFRO&9um=x*eE*seAmJ3Ze9=D`W`zwlMkO3j+C}{mjrpTW-fxYaa+$QS zngeOs_jmHOQZ<47m8=G{EWrIi3X1H_Z^ti-Kx6?qMvK#C#inoy1I3lV1uPmZJDlQ$ z=d|c(8tfJ=8dtQP^i#1`E4{F~tWH@Krtau3Xv_#F=rc8hZ0Y-hz!X0Vwqk-|#3+!1 zURbRwV-%x-6|cBP+*BKzzwYyp_Y0;)Q(Q!~wln#M(-bJ#+6dk%k4!_kZIMO?+o*BnwNZ zEz<0)^xEgxFvoNZvk-qj{-f43 zR35U8yU&8QrDwo=>hr3h+S7Ur-nZax4)T<*5ii>1RG29d0Du8unES+p(}V87ep%p<}?~!zTEOP zKED6L3orQfH^#4NiWsZ+aX{#60IqkE_AlLYt^-(fC1yoIHTA^2-#vL($7Ozq&4}13 zNT*Rfe$jiWh}M6|H^2Y840wraI`=v0oU)?x1WKIjKMYx?(fl9=Uzrh$Ic1?fEh$aD z<&lZ1&7g-w%@8fk>LPnf9sG(Jga8SVied+`IrJvl)kc*KIW)FObeyO@N40r2g=^tu&wMig**NrheftAIV5`+IO0B{XwiSf`+F$R{}QF z*d_&iwCN{ku)OovU`--H0IyhQzqUK7vQ#X!rnynXj~9JXR!b^pwK6TUBzlA?%+Ra( zFttU-C2tHQZM-AeT8*@>;L#qs638f%iDFXTuk7&TFRqNGA#a&G%};_JG_JKtE?JE$ z>{UJ|TFZof-8zY~N zrcXG2E-zr%C{3sK8tZB6xr_#urzZfObsqUm;;=UK+R>@PxIiqm5g-32NRRF z4-%tP;S-Ik9c@&WkefyPVPWv2Z19K*z;Z;9TXsxeUDlvxE#wUYjp5wYUyT5M))fZE|HYrfe@ecFyNZv01{aNf9N9)HGS-+8DlBIOWbaHptao8 zxJXxSA5LZ<(4{;$Fe3{hm}1l+W}Qqo#xuS@5UUcwHaGywD`H=G z#Hp__GlqA`lL=qhf$b3sz$VmByI(m%U_g-uoKO?(ij{>$U3sUa>QlynKXn2A>Az{L zgG1wUFifUQQ9zI%`l*9T_b2XOs~S8zyqg;3nV)KFEUh+tmBB79YMSiyOu@j?zCE_v zlXcIH7AB%p{>S%8FqiDFApf#rJwBT#Nm%b6d{o+Lo3vB6)a~tvKbDfuv;})XiE3>J$y(J?kOrzgeGGs!O!>CNW-H6 zqu-I3QI8^?pRq+GG;dqIBWSaBJA*zVm7Q9FWRMPZlNa3sH~r71%qG z$TkBXI--k`9lTB$^DAn#9o1p=WqhBSc?#Evi?R-tQ|l`Po#V2Djc70>uEaro7mej# zq&#DKq~YRW#@%;xPVn9Ss!R7h#6PXoxkp~o&T*Ps{dR4piq*blm2Q!vI6O=)LfFM5 zxvbuM=&V!bKs~4WZPt(->U3ie*1Q5{GybCnGO@y+5>Ac|DhTQ%OQKHI1-qqIRu`IT zkzL72^O&e z<`331gWm%HFmZRdN;r>OSs5N4Sea*{79Vg6YLYbRGv1FJ#QDSRJ2~q814EH;loJ2k z_m6nnvkA2SO09nF|0vfzQmS^7ta zNsQ0Mr+%h=G4sTXR`!SJFInI&h|l~o{#LEwPk%aFi7van zy^XD&=tCSsi60xo4&m$P3lLv55l1^#&qVGic zT)iJwt~#;8?K{aDxO7R|QbuEA9u^1Gcs0gR2xD_Ke3Yg_7cJ72w&uMKx5{2AB8!tx z3?{`&J&nch744x;L8`08)p;$|+Q_Jua2QRle>M4I#QFICIe@5IAjE~8N*q3w+O)(z zt9PV#N>SvStapM}Vo-peQNXd){Eho^k^`|)d{0h4-G zpBp6O($M3EFdrjvX?8(C&TD##w+9YLTg4EQQ{WaN##jV>{3m&3PD6F`5A!9`-pfxt zw&g}Q*E_Dql9XQbAfmViu#WF%adxC7fG>J5Y);(W8Z9QSOq4m{Wtuw2_;>pJ5JhD5 zxdQ++=R^n)X+hN-lkc`g-t}(te->l=iFFJn?X1RxeXJ)*O zA;&BZ-hSQWzeY z)6Ef1obLKQRqwydofNJ&wHoTFo(!P(CYDywVN|>4aI3sM{z4}Z_Z$dOlJY+*N};LY z>ZVw@TK?{x@dxTN+{@K1`Wl|h&p9S!$`Mx&11|plqJFA{sB)ms_#sSLL=6~|Iu6aE z@G+5kwY|`6i&FC|+LS1>t4pg!wKi88RZaWi39P{5>s^=a44r{;nnu{b7fE?-Bg1oN zMXu(RC5;ww+J|`a*ambYt2L2?=z2s8jX5fDb|meCb}EpcC5}}1_4H6FI-vK1V11LP zwbWe5iI$Y58VkN{%b&!qzC5-+4?2b!&pPr?FJ$9^5z)ksHw}lSEV{?MADCQ?4M`>w zMn&oEG(&JOk= zrWbW&)iN%A$40=E!ar1l<4X1q4e4t5%k-tc4}w&yG>yaZ7y~!i^L;meNn1F)%|koH zAGTYCfB~*}jcJv9P-+=D8{KHfJCb^x?lqDohmMrb!Vjs_uGNJp0J%YqHrnW@>dQH&KYeYp{q9w9IeUcgxS*os71avD3{px`*HpadX-NyHydFC1I zggbji4DAg_Bcs2cJ>$f3QDoM1<+}O($;V=&cPbD)>lP^@-`BcNQl^`vY~WUoD5UY^ zKk(p2M!a<{H+15$07ZI=O6XScFtK^msfli;A>DKloePhg)s%N{-`Fvy#v2>UCN6Y{q3=%L@f}*wd02{ z)xV`5Sf{k(b4Fpr!#&Mo_7QzL=NPj)8&+N>woJ40_S(G8LnuzLs|@BtFMeep|1zFIDwy7ATXUBY&w+xFcDdw5?;8 z9@$0~R$vdNo6-;`==1M?r+xzAVO0L#*!+lkd^(suu9zd+eFOfR0wcbSxBs0^#g8JM zPWyd-{7t+eGNfYU(i$x~cJsOCa)OabTHiLY=nO1k91?vJP+&DTI4Y8=Xa%dpeweq> zV7u?wfNjhQYLuCs%5sMewNcqdL$Y07*2d)E}IDq(*UJeLj9$y|puUH7o5{gy7bL6xgO=dzN1~@evwn=`kcRAF7#a z&&_Bjgskl=39j+(_;Zb;SiUU073(RNuztlb0(8UQBK^_>twJc5q^eOLj5WO-1q*81 zMHplb!hh#>mzpd1yhJ_cOUo4nhea+)brl9=jAHs4X>E}7bp#`pQ9sQWI7wyGB=1L5 z*3jzRSrCsa9C=d2!VvtDvl?;SoT7K+orEXhu2z@1jyAYj3BOavlj4t_ASdy?vC+Tq z(oZy5`kfdIG<-aGhGuN^(cU!HnU^$H#O61~55jnV@}4?u%{L-bkk*Dn(C9X_pU#{) zqmQ$XKB^^Zg+8G#$W&?a>{1$a*G8%cfQoS9>SDrU=;IimFEmHnpI68fz(~~L#Tgsf zak;(Vw4pbL$C&Lk-d2?JG@XZARg$X0A4P;U3pdaS5j7a8C^z3+>xvw6WXIu}6P5pj zk#G`Yq6dmSE%u}ugSy5(Z;rN{wz9wPF=GkDNv0m*7lD4zw1E$cw2}DgwmK?VZ31PZ znUiDUtM7lCTfG-n_{2BfN8aL63R{F|aJ8ykv5iCaD?2oZlUB1_iOt4>dhN`&TWhf@ znqDv@{TN%nNW~gEux)*8wpTU54i*|fJ{ra7hT0-U*}Ix6XZyOZ|42|Q(%k-fb;6+Rtj_aoOCZdasTu1)O@fdYzJ|H9MYlCW@uT zvuX$RJV6Ao6w4GO=CnLBb56oN0k@^qj3FZ>$Q}D(w@c1B%_5|6>LaMDh|@{lL^{or zFSOuiXAG}AK<{D2)!_mOdBX&;=}5Qh#|}eUXkpjvs{Q9@9(&$GW!J0ljJ3jP=&27p zTCjGb%+R)bu1$EWHg>mUxlLNmXX2AciTA!HvQ?uWw||&nzuos1AEnwhI2^I#{2ulTRO)K!O2r)@cP90Dfu4~`6Q?$Pq=IGZW_^Ngm zDt1YOqK?^Cuw79+Bo5E$O4By8*o>mcLUT=ObXaBLG|x?{Bjg|hT?Z%DJkR|^pBA3Y zy|i@b^5B7Le49Bo;HF=ke5=78QcJVi-M@NDb2>t(PBVMwX^|e6iNmKK ze?k?it#YdOp(U#nO6+UpuJE4At1k}GyE z63zRD9^b7*(vl0l@8sJDjyxY6+a`nnSV;l6r2+yZN^r%0J$_TY<5fzt6p=)~J8|&H z+1Y!VU#;FREY^|;9;aFItw5!R)Y>wyl6U0>rRpZ2=#;Dp?f zrzB@qEM|IgG4fQZ8*K2l&~pn`0otdbb4Bw2Ng={w1k)@6es@yYT@QLzMwc3Mn)UWb z&vO}#GILmX0dxW<=~NTY_}n#Ll)svF@vPp+CJ*H5Z@&Mn<}$wjt8?n4n1mmQfXEk< z(n%W4=16^peJ@Rc<$h#J)_+B-p$c{OFRV`pp-%vNb7hlxJ7hZTm&&O~Ej=Sf|9#}f3iXqyc zMYNJTBB>NXgs^}0bh$!Ov{o*4)IRL|os$RZ7=VI4R-tjdTVx*19e6~=ClfC`_IWyh z*sXUhW`;eITnC~fSM0QDFClkI|D&vb;GnU6Gft~jHy2Rj~NYtMPw9(!JUU~zq= z=q&n<(#@XOs$~1y4_w-y@+)*+NvCu zy|Q*WYJh*i?X<{fmTGTpwW>x?SW>TwC`l1j>jfXJ)yQ>>`T7aVS-`Xnnqg#F7g^e?u453svj{O$7gX!b?Tl6D!T2l91|@xx-$M(73li<# z9%!^9^U9^>GI<}=D7dms8>2eI?PD%q_t?M!>YHvvTGiu`c=7BR`Q@h0h=u-kud2R_ zu{!s9F;`XCoD$}wmTubMN->nWitb-MtVh<24a8iAh@T34bAmAyCE>Zgi*ZW5DXJ0x zpQJ)aiY&%jM{uGfHB8}1MOf_4+^DN^5l0ffk0Zgii0u4y{Na!PD{cW+74Ico$w@SbDgdyoa7Z+ zV}5Pj8{(buhx*LAr=RK#5??>aL57t>jTh;7V51@Trb{Yo3(G)&@+t^Yqs~py%)0O#xcs@1`y0_OX8jSBJ&8YGwrk}C+tuS z7V)i$n%)T9e&3}JCeorI8IM!Cvc(6(IFWK6((B^4Q~GfI}#-^6Zi+NFY8`{57PJxigJp2(x@nd-n(n@g*qRD zk%f!hglIsTH#3zLx?auX(e@E+)M5cEyrXYn3^=sadg{Z}%(qCUu9aA8uXa~+B_~Pq z5Y6-~_uafm6X~BYDe3DZQ;hGtz_k0?Si=q_&avrC8dGQ*DqxOtxxYUW|1034Fq&#z zhWo>n|4&$rrgxQOm+E~fk{}XbTb++seGL`MIlt>guu`_44jgDhzFAWTCmO&25n+Fn zp%m*-@otj>MHv?pxGc~5xoeN9n;x2)b~Xwm#hz25=cYPzFq>k~aw&6`ErDkn znv=i_!$Plp5RF&NFPwuZAI7s$1v48*Z#NBeHxbKlh5?}(~)Y; zLmu36`iiG6=@GY|2}r42!6eUxHTN(c<;<(n)yDs z*5Jt6g5hN;ATvDx$gCLYv>#ZBiXN;2PS|vNA?T5ORnseSr~Nr#k2bKyiN@!zsSCCR zMI&nMGT2eBYnzZ*dokE@)%AJQ+qNXKlxYJf#E>krc%vYgC|a)fso0Q1D4L8pONr6S z^L62`w8Z85))(%8rB-&dH4*YE^1$0lFnM+}Ids|>y<0mXeA&-Oq_oUuky1&wB|5AO z_XC8S-IaJKt~52)=`5+S(DveO8_BTM7W|V|2DW{DV5}XI?4fI$2G~68b|k#}GYKZ* zCYjPNJ?H-tp{agGBSB^*q$p>FE4-vI)l^_VKSY%T9_H3yi_A8;q*o#^sTtteWD$g< z1T-|=Ot-9G&H*A#%7Gixu53EZ8#RsskDMb@GEyxFHJAW~*t0pl|7<4a15<`C&^C?j z;T;VNFm*U7Y1xJqyt|q$r)yFGPsgnZSI!^N!%1!By9;LW>B&d>Ie2T>z^!Nnge=ST zaUpYdlHZarGJWab3#{ivI6!3R(Ga>6Mc1apnEVC*vrA&ovu^;|FAe|Xh(3kxXcRR< z7tlO%-A$oZwGqOZFsjTerCycBa1~;U?j_G_2JUgC_Y57n?aPuQFxoR7b!l~_f=Y&4 z0<>9d4ciNl;a<}$e>(lA??~ZADbl&8`c++}96TP9r&ndEOhnTnLlczIw{GxSFZ|%< zK#MmuJLipuTJ$EA_EkYxOt)=S0c897d{?cu_TzMm50F(~Kfr*$8h@&3;9!51=(unE zYh-wNJI~P3sMLPi9$9q7`t-&4Lp_g(Q0cIdd9}*oBr}CZaq-M!O0HE5m(ysd^7+f0 z_0%?2{~->Z59;dTVyEee5(erIj)w+OlvukA-rT#nHOQc` zlAVl0`(e6C5Q=VB18ZDS&)n08J;$;SoJf&v?~NF@?Z{TN~Ar#f++S(DFh<=hxHcIfKBaaz4>gg^D2=f zwsc!&o~;50q5nskeYBP)q_bx*@2IPA{6`)WomC=JR$S>g@_yI(>wb6gsrdW)e!vm} zzL+B`#m2ffgCSJqD(!^&U`Tb^n5BSf1zbW14X!5TNiaExsDJEEQj&40Wkot zHHt_*+{*ZCknz5x%hCA~7WCQ4+nPmq@k`zaot4Y|JNer*p|gnivsv{LK#*k{go~<~ zJS$=^-}Fg;Go|70{_gv~v!fzP+E_QbWvKz73P`~Gb<%mLGj=-RZX!!N>^ItM5mFw3 zV(N|TL#`+M=tW0-kO?X$1&fF%1+|A&xZ%|3#Y&GW!R{lJ-{ul91a&|`BC~@b8KHD1 zmV9enC(HT8(Qtol;_b96j9SN8sEWO`J3{^QRdjad=OXjIE0QPVdk{r7H3)(YeUdxL za<_)Zhd4wc~)KBrMZvqqu^9t z)*{!DS6IKJIS{a;mm|kA;-lyv);_BPQH7ArozO9ah0(SijnrQ@=piBN?{iD5^G}OX zoe63+W&OzZth0$GBBDS9Oq$2$C4&e-(PTL=zLnyl=LX9PyTqI6xf{a`FX|jdE(=BY z&TI5|IE#$bERk5Di{`OvBfYa=ZVP>&9zY;@(8M+0099K2j-L9JX@Khq96`qAvlZXg zoKb^{u&$$00=R?W=hnbB0v$vz7Fts!49GIUT+0we?6W=t#Ug4cg>_6N=595@Fuh~R zwLFa93f`WRt77iZN^Ef=ouY=dpDTKH=5dXXE$Q?)QHxUBj|HTeY=5EE(Ow*2*ygAw zeI|ik`F-NplcI|;j6lS~ebwps{*#7091p#K7oU3Q@y9g)6a-Ap{zlyT_>Y=OKa)tE zoq0;*3fl(T!4K_uVv(~trvgYM2h#L`*u$Gz5wx59&L*zd0!6aVfc3WzSuRPt+5slb|k}^?E0?xeT=dKzA7778p zz?U@0ktfvJ&s?t@E@kh}dH!j7T@-TB6OUdnEDgZcpd9N&_Sbi`)F^8Gh1x>9x86Os zJKWH~alm9gim*lu^JZ)91G1VVJlOv~adh3VgmiSf zcOdmGZlUDC^#s$sJbpRzIS^yLf|jGlx>Z`1 z%ct%~T$Dj0jhtB3nmhyR1zpsulBWiAOw5X+|?jteRj(0rxJoVKb zj&hJI>&tT)jCyOhTe!05^WFb7uB~sj+}lM}7j4iuA=evPvbACA zW3P|FsH(^IH2=9a|Fc)Kh4mx+OJdGQN0T%{#*8f$xEPz+XT`RU7TO`|!@Mx*DjIHreZVYL7w#8LWp`kQ6Z z^)OVgO2|iYpU>3uC5MFGxf8RH93bJ6*6h;B^suZD-SZ!;k1|t+N<8hp##_Tj@t~$!7W`u zTHL-XsF<@;gtg%xbMrTUoG|NZYoU8t5b#CS{y&D(+P=1eUX#^)z(2O~hoJ zyf3XOgfgF8U&H-T7XNJ zavG7wA@pVKD%QblpuZK~Pb~wKdv*Lk&&`~FX-1!UXH(tW?3tq2knUh7((%S4@y__= z2pkw2eub~%{qKJ(vSC1t!jWoiME#D$@z zLQNfZdTxF2f2S)u!eUakSV_yw(hy6ogDjRCwwWLVexRPd`}?sYj-;MX?;^Wr55ic!3_7*yQvr zHd|5@r+@<|+Ob_Vo3IRqZ8!PSYG8?KlSub^jYpn2uie!-_60Yo)upcSj;*ATADua3 z)4tGayBpTNyX53wZ(D*v-8`H76)~&$at()n(?if0aYkq*@w z5(W=y_n=qx>Ku+}Jltd8Pc^aLd_KB4bcbzF%u(#Jv_XdMbjXC6C^1_6yKaj6j% z+58jYf!f|?SbUwm- z-32zJQ_(1OQ3O0*W^kbT;ErmegG(lBwUl5|2xfT&5k*gym9#t*&cd+TA&zTPEHxKx zOY5#K7!w1-rff>iZ=xpG{`_K6JhT^sM4f5O=f)r-iqGFL5^0MvaL}y!PBdK)D@tN| z;HY1WKeiNKa}S|OQGgIBt>J*BG+JJLoUQY7Z+NFs?H((H_#%hCY*vlAgZ_|J zfVtrYhZa_@&7RlL_P532u-6T@(n-?952hK9fS4Gp_+yi3NaQ-6MNKLy()b@Wlt64J zcfvN3xMqw|c`nEIfBy3vO{3n(&63hmy9Xd~ZxX$r*>fY5Y-XC)UC(uYXyZT&gObL? z{=eK@(d&@sl@%LHpL{&lhsiJuauSq=5xu1)8k)76*>_rkqje!~z z$Ol^G&0VW#R1Vj1f}J>76t@MH&tbayapvB&$MpK#YFD%?n5h)~Ii3h_nrcY*HH5x% z*NE5xuP-saY6<2{Jzo>zwv}>9usd@Bq?{Io(`%Pt9+iE*wX(KuGu6yRBb^qOMTyn* zZuQ883uiRLOYlcM#O3(O$p_j4AqWGfBzQA#0bsRpmhunafiVesf`K8Lq&Cswv?EJ} zJ0K0>_)8HABnTMYT^mcTJ+J9?IlyojeI-0LggUi?%Jd(`FXtF=U`2>){=9{Lss7m< z={|pqf9gG$+8qKWwY<+=GyfqGWyAZpIKZuSPof>wcNpsqkrNGZAo~1|IgY^N>)T}@ zJN4n%?$?*{__LFbbh3V0!P6*Io!8+~v!hw{L#Wco=etXq6dS(3x3OzYFoZsoXKwO( zRl^UI`t=&68C&xjXUVLcn7qhx^cEccy)14?aWFkRz8@*c^$pfr#WNh;BuBG$8QCZ+ zt%w>Cfe_G-E34h6CJ+O@L0pX{3e!FCmC*|CwcY~7Q8~Fc4@TN8-(?sdl0`jb;Z4EF1tn5Q$MQ5pMOZ3p_qs3qNqCP+LDjo*g)00C^iYY`$XR zDuTcJZX;m6&&wuH(XhExJP+1k)i+R>3?^n6*GE1Z_qPTgS=h8W0flS01PfG_>hJ!y zRR0h!`?V7q1ccTFKpI=!5_Rt=a#w|acGas-lwBT%?Zj0~(c)@}PKLV~IgAi#k9NC> zzGc4}C#Zk;SAR-L{_srcmHtm zK=dQr2=`nbCkDpI9|~aUvotd-vJaX(6kXgGE{3o;dhW< zns;^eshwjp3rDE@U-sAOny;caemj0mM~H}gQHA)eztTO?um!we zc&d@No4-`E9JmY%+S>NUEcH5ky3fo$c2;jRb`P#s5!I6fdO7nW@Q4gfT?M~!Tp)Q8xJO>_d-a@5B|NAZ9YpyOt)A!r)^$cOvYfz`S$S?s~Tro%CDXoutP&ICYP0-N1h}%%8)c%IiLcZ;Zb_`AB<# zo3%i1z2Bk;n}!W~?uK6@weQKt`lSPl)Xsmz+c0@`OL+3Ww-U)Q_e$>j`E?nK1cgxZZN{l1KUoH~0@(dbzk-av{R{c2Jd52I&e8 z@@@@i9qdJnBwCQjgTXds!>i=X(Abu}i*Y2OgQ|ZPvdTs{*E2`DO3hd+6A|%~=)eH) zkKYlQV}@ZF(ph{yxnY>O#@1Z1tJHV;5|Wlifp(QSLjX06ol3l4Wmnn)yO6{m>|?bW zR3h^5po$yY0UTQ01v5`MGz7Aq3QvUs;AwSqsL&UOT3;iN7?b&lE-DgYW-Vzuc}A2m~vKg#>FJ+j&mE zby|b@(c#ahoBZK_xb0Luu0mz9$jnfqqiJlIN+UgD;DVj;gdc@K*uX=p?iREWDa z)2s~-Mh6;gcwRBI?H`9J*P<`w9%e?wZLOwpu@+8biDF5IHl5erZHI86X_RS3I2 z_hbk9OryMzuw*Fq2dxFdP?g)7Aat-Rxg1mMoX+9T9@D3rUcSo%tJtJ*Fc_Y+(K=zI z0BCB~0e-0TsD<>5G|`8AJrSf|*`-*nkekc`=QF8x7!6Jw0M<4T1{DHcBI1Uj%uI$= z23n{2Z}&FHfFL4KCRjhPT3?*J>TmC&Qhcfzp}|-^VQG*i4b|I0kUD62oA8_3=62Eh zgu%KPJNRw#KvlxT^%5OrZGmipvJJrG0s|Zt($Fzn&?iy>kH6L?fK^2nK`XPIntdwj zlrySlXd&E!RUXN;cF9Mp5pH4Y(pn$c{ZCy|?=?0KcGFMHVb{szs;RCXxcBeky7^@C zUp0Oii$L4}0)(rtctDM}w7tll3^c7mBd*$nktWwr(0aEQ(-_j;j_w+TDzv$5BoTljk7Qc#d9fM=w-PgelsA05V!)g`c%%7{d~ z^j;-02~ew@8h%%kxUih+w4k2oxiG`{76ob(6~5GANtE-Jh&_kRLo3-{7NIyFkwZzZ zkerH8a=pTBVl4!#@b!Kbc`{856NW)uUWk-1s?gqTt0~_zm_& z#SHZv8j!|>Uj0c8vn2&P8icj4rPfN8H71(OCmNla&8z*jIo#Q&E`tvw#&OXAWd#=7 zZ%bPJG3Mu=m0Kmh{^|c;W8mi57?Lzd(~jHv6yAEBY4-q&jX~o}uY*+%$CdSt*pF@8Rf-v}#&n zjQNCZa#p@LdEE>5`uM@g%ld|DNrG7ImuZOWAsLf3lNQ--y!9gJNL-|`yj0<_ezZE8 z$=ZrOy*N;taI*{rfJ6JXI>@|6QPUS+p?aGeIZ*XVOGM`|M`~fsWfH5TZ#`IjMyc{> zQLdAg+kg@xx4(vW-KoC_*#p zMq$Ulw>mt@praiZf^DegRzKi%M|K$o*o*C{N#h1SynB%6@J+7R_q735K_~`DL8E~& zo;5qWmED&93c@OmzSJ~R!8inOm^BAQ*l1b8P=WF=BAnMu~IfY%*n zb-=$nd0B0-|102rk)OTWxjCBJvJsc#0N#mLtV-%eniHyQV=&cby5@MOFm?2f1MsN@ zgt>eDKcc1%#x5ylVmwQ9{)ivtb<6b7M1i*K0m!Z^V!Mv*1=v&JX@|7 zHU5?w-KI$K+_mSf>3OW1@CELzES)ggJ*p_PWz`&;GG>5sJBwIslB_I6ya4imw>C0t z#$>K;R?O{qh;NwH9>+&yrmTBpaJNODXQQ8VJ(jmfvhl45Mg^)P>B?L`CB^UrT&w3H z>8*h|&?7IX=XO7nk#UQiSTUhLQjO|$)XulR|DDJhd^N;%F&*y2@N;V&!XiD{85-Yh zFacTvZk?lBBFgNwZlYSYXGL>d0R$gb3K`lI8_25(Q1S&0 zdXg__DTjsUR$mA=Z?-j@VCcm$X!eQzM8LHshp_vxUYrkmvxXXW=Zw1``6gx@OJ|6Y z7{RkJEG8g~NXLC=hOG=FwWOxg9C=l?MpYgcHgS3dyMqIxpUL{OkIDl~EaTHjjhd!A zL`QrvT9Qla|AYAHk9<)nD{I2=gBn9vhwsEx^@5xcPm$2zNz# z6;WYgwi)#66sd)Sg>yb{BiPtPLeh#sTM~%CnthLQ&-4-TM7;HV7y-6Ua{xS(HUl7c zL=56ZQQ)9@3NYZ*f|G19W!N>>UdH(6ockaHZB?^_{bds@tq#9AjRAcyZ{6#Vv7MlTaWRRCy4W>sz?vjdzPLqTyS-pYv(koF!1l2 znO*iQi|$dIc)2U!koMVKIvi|Wfjeli!<68jc1Io$<(Z*}=tDse66IX?<$31(87=Z9 z>!4c>)-5Yx^Vs0fmX^*Wai5xvoe430RwAzhu#qGkRUtB|1||_CQrL;Qo_#^P407%f-H#y{Ygbo`L3Z5VbJsYb+9J1e&ciWd-L5a{o8^r>T%(*o2D8dd@r-pygX; zfKOet^BwVE--iOmeVpc=07M*L*Q&B)CFP@*>9TDsKP_b!^RBL<@IjG6;k6wPVe933D_^}fJy z0Jzcj1UJq>T`22v;+q5F{)%Y3*zRcVH=N_`p&50H%^LunVh$;w6p(f-I8g>5QQZMK~ z`_Ct*A!UL8e}2+FBi)f0U>FC1`bNH|6x8Eff0An)Zt6obFjVh+@(pq~zNVEZ5?dZR zL~fX_Q+yN&6w`oTjx4c5+Xac**6_d-YHu9&!^Oj3C_tZR{ljWL@}Y|Uy20La>j4ps7#AXuwE_P`SUO6)rED@c?qYg*PI*jIwOug2k5l*C0_X;*12DFRG>FaOTXe4cY5RrhrF^q4cM&P4wI zcRlyqvpnY<4E*6l$>Y%2?x3=(N!ZVnRxJl2{6ZX^m)`kt97rcAwcvcB*Nb~$xL`u_^LG}2J`-z40#cTQWjqWW}8*7%t`T3a%Y&8b=fPf_$XsTG7;-&n z$Q#6I>EZBJL+iVh*ocH-8jH~W!LBN5ID_&mL47T*WwnS)^SC>=Md@jz+`t=K5uvCr2Y1g5HNw|f58Qg{3^ZZiX>@0-X8iIA%_=Y>dU$|T-j7pnT`GI0r@8jKI})N9ZZOI)=cMK~ zLWd^K8;;ev?JU|^|_?G}QESd&8}et&o`TY!^| zJ@y*PZ_jCj1Jcg0lGawh>*qHd6jj7Mo*f^e;T;k%tApkd!go#)nF}zS zeq^})?fh}7IQP{>l=w`H92dUJvV8vU!@EmTvoa*L@B?{MbBDVJ*DomzQ;(>rl_}~?^6+gN=%0qI^+>JlNRSv1_%_L zyK40A@u!;G)Vk|I2PkiZ7}Qw9RHWV*D>7i7d;HBTs2hEM_@^qh{(HlZhxgUW@XpVj zFR!z|>T*si0PWURNiJm>8ReAwOKn2gRY*2m#WckII&j(DmEJ*RCPIb4#p5_iR4GGe zF9iauPLc;}FJZ5~DdI{4GUHj5#9Ou^;HkoY6i7cU-q}-UG|8#?%-C>+tYJxtWW9`c zv@NqX)m&=LnJOOs$K#Ln3cB8?K7?Mmc-6#JYFgkEe{*R3Nw_-dz$#dp_Dr6dib6WQ zJx?f%bt4dvmb?&(4;4S+V%Cuei@#S2_H-ig84`3!H(kl)rObs!LR4N4$+6;$Biy7q z4trpoz<|X3TG{FTk&U8HPrT?iq5|~;4G!;I@46NcGxRp8mNJJ9YGkDck@8;~2trov zjid=R5h=L_I6pj}s5ix8Xd}u^8Gu-1Z>?_U5Kq#!$v03!8NC(8`wV8DqPX{o;!WXA za$g+ehkus{tFaW29Z?&ketVrbQySBHkwV&7tO!ci1kug$>1(hdq7pf)^_vaJEk!vX z%v>ZDrB5{$^e6#X^K;@^;Zs24A|hJdBf*(~x~qX&)r*TDlw7uT>f$AJhiW!PC7WM! zf7}fLTd!i(Y)|X?So`q?YrFVU1O;x{j2ZZKtePUf=GY&n*^fhBLM8U&8}eWf zk>#-13iRykP21cKJ6kvOUO5n2YBswH;F`{?2aL0JtYYs?CrH>vyW5%(n?8!YQk!rx zm~+w}Q4tX#fU8=d{k?J->M@@r)_~GSX~i$7stR*-;0Kv-I3%=Z(^Wk}958S%QQY#ZRpX-4Iy-no zVvS{Oty=ibLf6s?|0;b8=t|}=5KkOAO%WOXKM!60xzfkr*h^@v8p%rEoCObrp$}Yx z4*uQlU+-nJn>ZYE51Qi=BO2V`H7!~I$t&4{@>bm}(tF{8Eeog4yeJ}zBX54e;RtcW z6ga%|s|z-gwtG$`uo@oxM6;vrf0ONzXevkEKvK_dSIK#zML1fwZ6cDbkCNqR7s))? z)@L%s`UGL`Gs1_f62PCDyZ!o0+0Sex@KEk#D_I^M;$mB}=zQ^6^+TpYXOOshS$}OU z%@|?|>yMxm5ee$Utq7~u5N5Jw9Wob%HS|OGrVYw*f~-Y>uI#S+q5UfShyoWG1Au84 zsXKdJD&mC(c(t|U!JeKtdrk}nD+F#rEWD zBdB|RzgiHBNuA30r-qk{$5PL88-2-te^Eyek_U+L@zwA>wS*v4#ORFjB8P;#$hztlu(lE06j>js&IrLL>z1$Au^zYMuhIOtE26$bd$sd zNZA$cA-g{tt=a60a7=h~&SuvN;#Av*eg;sW#=byea|3m-pnOG4)H}y-4!@o<`sC^V z)_)of$cD;G8M!7zk2s+SuL{$O<{bvoDjUS?54EiE?Y+6k2@7HLAt zLL$-QIMRLS2Rp&>HDSg~uKj9C^o)&HR73b!ic-5FK}g@a!XBhmKoVTtc4P?o zU;lV~PvZz+wwlHVUK#K2X0-Du{C~N`;pg153xA3uhI*l&iLi|Hz}4{Sog~icZ@mJ6 z1$7{d=v9PrqJUDPq^V?*Bm;Cx54gj;!@Up9{6a!rQ&M?SwAKIAJ+-lBb7M;SZ8jy! zeQL47x>QF&%#nZQ-;WO@cf_B`CHl|)Uo9+g_*dt2A+Gf7ECEBS6Xd-Y&F|DbtL+FhEihsiX9)cQeM4L zXZWiTa6|LxY#~^9%At<(pOY+Nc4<2yd~?xO9r8weq!x%^$9Rx1IBd#?jw^fpNp;Tq?;dA<+_#TEKX%k4 z85w-7ML1_R9t_P_F*ozRZ$^NG8rn9*s-gAQEZFXEt?LQY_e=UbiLqMSxroS)tXJV8 zw+RVTTer3yI5Cae3%i1E9nwze|EaAK@f?1BEhXOc1Qr*<@lTUYY;SqG+T%nt`iqig zCH5&+Z}`2aA7$xH{xge(%~>1H$N`NbLUnC7!dF6yd1&N~;df%|PrLHRrG=Jj!H=u| z)kc_8XR3^?+g4AoKYus0V0salmj(*=DTXn9kNdF>xZ-w8I_FONUcd?|Nay?haaU0pnO%vwGc#VkYws9s$Mlnh*T{Vl>h|m<{-pMl1vzi&@{4_``6t5ZvhG~Z)TAJ0`fsMw!U5nHR z(Hsmt)0*-@T>y4pSuLiP7qxCpi|^PW2KIuq0c~y!*sqr{!VKjAwJX1tcLvnRO$yJKP+=;)4Y59J*sJ78NU(%jM6MGS*(eY=K2j% z!Bm`H+!bF%RLJB)*KoKOv0-V8gSYwyXBFNaKEO`?!SPpmprk_8_%^Ck5J2BWpieG%_rT4GT!Ed2%>}are84ov2x?9@pzY7!r~q_F#3> z(3Yka^x*DI5fPg3@AVnk$4(xs3VlcOO}8%U&0o1}2%w#neigeqGe6mw$(cc%q9T}8 zgnCtGr8ty$K=d?`dAX`k{9lHjL;dd!Khy)DT3>QXvtp_xDUg1=6nW{BzzI1c7p^AV z*g24lBtU9O2B?>5tcvoBNb1kCDbOysv-N0i4* zR4zHdK4>!-M=TBuMrl}+R$z}{uGSs<-r6Ap7#7Z&g?~|(gDETvbBpGUesR$E6?zf# zsD2rFgS5yu2-3`7A{<}e_NgM2NZ_DQv5q=3ZF?Gt zM>sIZuYIs>-Weilgjie2i~tx7C$LOgzWO;I-_;rXTO{Nr`*S`F2VD})UTV(OIhB|2 zBa5!cgADa%E&1V{$b^#3aE008h((dxEPo@!*lsRPkWA*)YCGfyBMFyWdamZ+pt|y0_T7eLz zsnVc6Z`{;|5$Ba&r;3cBhb0YQi-MqeqqQ*y8*_?!5$5^y2SA6BT<)zrU%|JqGFe6| zsa4edAq=82s)b_0+ketHPd}wks}-shv)w4?rdr`pY`eNf#E@AUBaLRDtF^*UpR(i+ za<*4_BG`9A5i1@!yz^W;_>ATEUcb7ROR$302cfchuD@xg-0;rx+s3W`68yg2ARv%6 z%QeDjRIDdCp}90$Q!v2NI$N5iOz6Z2Y&gJGkJo!e2Z1C@HRS1vbsLUQ z6m%ACb5q81rXkKO>_+1bcU6LkPhGIFa;am#YG6;*8WIa@m`XwW1=|6!*K4+ofIN$$ zAHIM5iGIL6)p3l?)32Y>WWxFiU)L*?2QV|{9{*zawtgXq%=i=NFsz)_BFQP@po>-* zP>YAuo&oS~hoFts^I&%kinY}WD_Yit?*`6PE5+U{HGG1?nB3bIEnl}UWqexRtk-J| z=}v>>?)_+ZuT%keTSMTwiF)SQ5mx$2h5h-9dhiFiwJ<|`pQ#)J*P$jzMF;RtYxXpb zUwyvHJ)qBkjBGJP)vG)MlLG@5!@U5C33j2$E?6}#p1efrttFgbs$uxoQp;Y3-#n3= zZ9gu-EDadl6c?JFnB?Avj5^LG^0Oy2fGG#_L=p5_dWz&Hdg`_dG`(tD9pW;vg>Usm zj;wpy7{UbNM4ls`XIk5U{iB`S3S^^RZVX73py7@&TA8YZV%|&14j|PNbr>3k3nNuw zkB*oXv9!~9uMEgp+_9ngas~RB;ZhS>6u|syZ8ZGo_!}+5n%N!6bG7q;=qyA${rc&Q zF}?rj4_OuL8EnqkN11{M3}!iaOdTadq>YD%M^S><1h5g(-W3HFkGO7tDkT`Mu`_~&h~y5d|t`xBaHQfEwAc!r$X5+wo#2ezxkY!4pa)$|;9!QT-o|5{an2Fd04i9hcNcnfi?SKa^~kU)Li2 z=Df}=?txUTMh*o1#EqGp0ra-$VR^@U&iQeAjoW9=YJvr6Wi{zj3LD#PLBGtkM=F#2fAc#f;vk^O5rU@2il7W3YqP@V)3-NpwsX|m9yrn! zRPUFXD&~w`OZ|*@UGTa|$ms6gPE;rRHgXAR!hk(&2PV$bwz9=(tuvWv^e~Aqujuoy z`~B@?sR04lJGhA1rBb#F3@cg1WcT1H9jyO`3`XJ#Ve1{;7rsq{6_2CjwKI`U7lXRK zwwsGlXkBy(k${>8?z5SUgpK=*|DrhQ$J)TFhBRPjs5F>~Qxle=x9q7w1RdD^5VdiV z)Fam7M7op)iA}V5L6lqS@`=+Lb9YosTu-frLr%XnM%fq@MogR`yd-WnV2{W~zQ|Y7 zMX(fLi^{`reGn$WBj25)Bw!|ln;zb+UU$o~VO$ub>lJ}=w|#T`jYwv8c}B;L=Qu|j z9UTDg3oB~O8EQ5R@x{sGTcRnG;1LPXmp8q}N< z=c?U!^Lra8{v#A(t|}UQh@ojTi{XU@Se>r)I&M1%HoZMfE6V%3Mp%=T)0BdiayP>u zSln*bfS?cc&>rWLiL`juCdJN;HDbd(L46?altKNISGW?!U@{dl>}4 zOjF7A{;?!Zxh4d_j-u5>v61#TaaU86Zq+a0D&DIYG1qD2^UAhLroqg^i!rt}l`e%R~)Vq-7>l-Mj+?@{o~TJ{8K@}g4@T;`+C$O0v7yNawU#?1 zm#VmSeV??OQyL`p#1D@@6~8a1E5ZQ)pNaN&NzXP)Mw%2D3AAE%zL_zh2e{F#0!2zU zYQsx{stKFcQ&>x>u_PKQ+$XtIXY1B$)<+JVUj=rf*TeYQsf^6yRK`B{tlz5E=S1Y; z)ckbQq}*98DI|Pj-gdZWpu3U>Zube8)Ydc>FSYeKsvPMvw_FwcxQ`1=vx`h8sGyP$ zF3)IhcKan;N(F-{xwxu>haJd%laP^e8%k-k$Y`%#W=TSakwwEAR!5) z*A#1mRsv5}3BS|#xWxF+>Jt(4spFEyusf++`A7NSf27|1SDwhR-NbYtq6u|x`^rDR zlYfO+tRov<3hKsXbPEOszzS=`S42I9zn}v?G=#Z4G;;ayPJn#l=0S&apobBpXpPMlV@1>Vt`h^y*k-?>iY4!#L_Xk)|xitd07q~1P zUT$L2(9|wDOktQwHTuC;Wj%Rr5HN}?JBDE%p+PGY*(BSE%p<8_#*@QUBB!CTBSp(4 z32TqzsrN@t660w|%-y&@9BkY9 z$esFOVjV)j%32qQx<1w|F2%9+L+jnUVLVMqtI_=*M7CksbT~qM()38Y9g(X#1;abf zP|H<60)@kIzA?4e&-gvA7FJK=JbmVj-BD9B zO>JP9n;bwqyWvh@Pq+48HDnZjMY`SAZ)j*7?#*tJ9g@fi$P!DwVJQm#pP}+xCShLC zlyGg4LRh@xVucQ*AhaHe#6lS&@zO8i2x=D1*@dMSL?4)vw3H~Z`zKwCBdlGb8 zq^q_7>b4v~cC>GbB^4f+RLOR}6ObOY#Duw;!934Djhvb+M69ZPcv8pO8OMFQ;{4c1 z(jSFxnTHM==#pqK*fv7MK}BK2gVbQo5gtwpa_*GlI!xZp=$v>yvqIRxtl#t@wTtP&CC~7>kOEAuH z#M@hVnC-3ZnIM9z{?H$A!M7BdEV4KFey6H*g;O8|ncRyxcyoqhc2+ws9Cp)327oj$ zpWK6SenP7}k&B7+g4*jd6OJRl=?E3<+x=DRjZFLF^yHBjr_>@&n%lpDwzy|?8!}b~ z#p-4mVNm19sptKUKVG}$Bu$)}UQz;i^gl%Sdyw&ZkQCklwb7cfUKrxU9yLI7TQ>|DXu)LD#fh@?)RRd;~Ezs#8cFQg)rPj#V!4E&&m3;#sS3@Gw5sv&5|u9JuV z@aXqB5YpA55op%kE$;@UXY@9X$7eOv_EH4ysvGo=C}^ya&pH%nDs0K>Wqj>_sRfBW zU<3D(T`rbHc2Id0{u>tkHR~Q&;o>az zry4;M*PnsAIyixo^#{ zw|dqVKf>~cH~3+w%A2o9SIS;3avbeAO$fwKW~2{glErO@K%H=v63)>gyC27 z2>VEH%RdKf2GQ8MZkXndzZVMyiPMIu>=y0$tGjc|!R>Bcb>IP{CAPL85Z*E<$gueX=g#rcI{L|;ptBvVOKt27k zGkgbxQ1s5>*sZ>>qiW||Vsaqu6HYY{%36Zy<*B9Qx~?3*W%TBY&MOwU7IwO_AqV_j z19^Jlsb4*mxWtf9?MKBFJQa(#%b*zt9E0>g$-oluS=gRqJf%mj$3 zt8;UuQ|n0`q(Gf5HV!dL6DINCm-_q)|1@Ok@2uI{xX=hzMeb>}(JqRH-djM&)qA8; z%xIpmbvX-*T_p2F*Xfi?j?I~plt5d2$?%8fTw4e1BB>2XhyzwzMB`1EuOjaNDipGCypfnI~<-At)r-ki*2luVr9?fhXo*$ zS}MqWv+vM7MtX^Qis>>W3I_L!j!{V%_cc58-SOMmz-5kvXoZCBIqMF(SZdu|WgNkr z7Dx7^=yB5*@iT1z<*bmFy*H`-gLpD_le-&g%-n51Jw5T09M&WN4}ELRQOVb_C3(la z229%m1p6?z*7UtLO7dwDbzGPW)lK`3)dTslQhKn*e5)RjE{PP=uxAdhZ`k99yFtr^ z3@9o^EbyUP>sx_G1Wbz+0txBq^zvM@isG9}2RjLHtgX&3G#9lYfSBMb-7O0cp>K`+ zc`!7w-Pld;&P`u06rsO?cBOG%1xlt#BBf%O6m^WOVz5q*m?n-xaNgW&roUmPJxfA+ zz)ETfE%sg}^4;oMtZHMv7a2TtL4!Cmk;~9nxU3oO^bbi!uwk2A#9LJiu#1xF4nt0= z!WO}>=_h8N#x#PbIv#gb}6l3fK6iHWIjUspsmO9=3z=RMZ0})RQ3}l-%i>8(~4C zT%pyaN*RH>zrOcxj>SL&sTT&VmJu*wblaRx~-{UXj*95{2I1nl>RPvv6lW9zo;tIve3OEk?HkP zH_(bDJ||~KhTnCuZmF3k5^NAK4(!p+n$+2e!T_2K8})7c7XUy1?ZNQRkABawBL%n` zT+grygloqS-A;D$@b9v;U2nhm#RXY}Pu2Yh8ybK6!B5&sxW_Fb^gv~XGeg@zcI7lI z6U-I(e(mP^?syM$@Mq(h@Yi$wwhKB$|e74Ib$>5!(i1|$sFvK-8<9t4uk zn}x`umc!ok0cWc39={`25iYTiLi8kVO+!UV`Z7Sh5M_ti700f&va%cms89_cQ5cL) zC9dm^Pr~?t8NznFf}Z~KDE3Mdow(o_G-`XJ^!dJtoXZ81n34ezG+}4VKw$=i@lU(6 z9Ug5PRL=fff2ZFshxde>V%NSLzWd|<6(y`XVQW|=0VHXt2>aSSPI4%r+`w9c&>z*` zWXXu7Jqru2FUiZShh8D!uuBDDwU68*#L=pY;6W>;61EzG`5sy4#{#y7Nn2e8Jg`bn z63O~MQ9p!?MF}y1BBxvN3%Z{*tWG^AR+|$6xNsbp&!va575&Txg&GAz`Yohwc z;Eey`ziSkcGjq>kK4~<_qpvNQOgP;YCh9t48@+=tMfc2Z=ND%)XaIK}m!IV~RwTHS z=ZUW_i8vBAQLaE8wi7>(%IW8N;gW)#Su9-`<=+>%hy@t~oT-ItOhHNGO#U){^d4<1#H_jn*n|bB&3G#_7u1=gJ?cB_q~1m#jT%MsGfRFN)@A z7so8(drz?nZau)4XK8c4NO$JG5rJVt2^NlXr_>?}YZP=gF9uN&smj&C`KR^Z*c4{* zVt#MByB_(V{{8rz{Yx`btBV(3k~o>7{mIj)b?R&w-`JYFXwCiTH8pklwOq&8FhnnH zFPD#vqVf)F0mzcU9^H^f{2j31j8M-EWS*&8mV6M z#8GxA-`wiFb}QdpfnV=L=|k;4YB5o(8=e+QyFYWa-PH0NzXQSk37vrdt?OBB+Spc7 zgsR`-PB&_@F`evkrZwBDtP?L}GOkEOU+9G&KDc;_+aNm!ox^?!HvFJ^=<`ecTL zf{f}WrYM46bJ<_a4ELueo_tbEv2;0tm$Xx4>o%w7!sp)YS83=mCucO3%t&e|IDIBN zxCKH1RY>&y*o4iNIDN0#NG3n$e-JiJFOV@wA5Z_(KH{)~tG~f+SeZ5Oz3#5P^-s#5`r818I}@{vf0|L%(UyzsnJSf|91 zpNgWVP9_`E8crz&@n&{$!z_IapVB1A*R|(Z1%Pf|IV!88xli2iEycpMG3Kip7}J-Zg&_!)i%1P3ap69(zmfR=7IQF>K9Tuq=TABJ$g6 z{+kCxd)W>$!lddqq?G=eWfp2WW}>$?d7mm_i(^3Wwm1auh4iY4-b_0$SUdn?%o$;8=i-oityMSrk>#-CiWK1~k5AD7}4TNQjj!VKWEyYa7R)jzsR4y|- ziUy96^=@pz(wK=09Y?mRSxSMYTDCH<>oahvXd^n)jqyLdhJK#RGLRr7&=XmOBOpWq zxUa^$*fC|ku(0&I|L%9pNkQlz`=&+Tf(L5PF*|>dMP|bCu#2(1x;Y;V-_@+h%c8A^ z$ANsWEEd3=3&c*(UTvB|O_NB7i%73xHx7+M1K1}#p!QD$Tw!A;oH^nNNEDR&?i~71 zUig)kev+t{t+iWWP1jCQ=rV&?4v>!==e4myk~LCro)$OSuIh^bAV#l^t+s70objB$kcQ2Q(boIt}s zGpMTX3>>Y={Sn2CJ`sDq{;HY|0Ir!O(6>QG-=GrBF#2r!z)8c$t}t!j`SZWkz83&k zWJ2rP_}sMt3{E|zAsiYA#s&AjTI#QQiAe&CLw?IAml|!FH0K%#KvSUAt%R4pAL^F4 z%t$%fUP*W(ON=!O+JwnmX0g+hlfxZPEqe5pq z7#h2TVAR*xD3jZ5-(qJgFu*#pUNu(2oKt%r>=K}`Wqey`@d*1zs`!93Q>;`VzRD;B%NaWvEN zWH-E>>T$YtMLdKaqWW9bdc{hyV>BGn?D+qAHi6IoSUO*UL(4s{Q4a6CbV@ye9@1vx zGMk>)K2AHg;luyu_-#GC+`g=*xm0T(K16!oLwK8%`}Z`2Ht7~eWu#bAHQ#tH2Xp8f zXQNYDC@5T;5xilguCRm6@EobF6{iVZtpo_T8u$2BFpAmJGv1t%FX_YlC5!Ne=On_A zGDHy(vXRH^H|L*B+4ETvfZ~>ApxVpU21uK3lr+=uE>~#0V!E`ha)SQEpn`ISQ3sQ){ zCv2f%(j-ky9+mz|$XTqq#uRvglUa!)PG3uL#Y!?R^+9z97p}-U=7wCc1-L{7<#3Bt z&y*H53f>V(mb|3uYr~$G$@%B=_3ayLJwxnbJ{3mz(!#Rdo_I<#8sWVY@g3F?a0MFa zFP{8`Q(_%;9e2W^`BWzu14~KcQK^k>G)m2(AVt2@3HxPL9ijFao}QR^LPJ7dE{WOl z`%3tFeO=>9b%MBvg|{;%T%sUvOn@NWHHrx1?p8Z8%0r%LqMJRhXXl2mfwhh= zclwLguuAEMz`L!Z$wEiE=1QZzW#5J@8d?ruUni=i&vqlDnmSve)QX;rQ&@RZ?*nUl zR|E+F;0+5-EA}uE^~kZ!Vc|@OlWduXbP9VnMLpc!C05hU%95Spb27RAF6)a&g3b!O z8rZszuoZ(G)=;?V6O~*{>uIL$4AR{6ACEuR894ux7+n}6CDCb-$+fHpTaH>-i9B4E z3ArM$EE_8`QqqsV%9%}}GFK&~17KT~vTGE(Z8Do6WdaTMZplE;oW^0Sg-dB!O?^=tjEY5qCs9hIM{9D`@@H( zg%8iFPv+@Ye)_tI2o`7?5Y|xpf%Xr3>wbGVglM^_+GseO-uS~`R6e+P2aKm*2 zWw>Stqw!3d*lpBcz|tnpYH_Frbnd5W{bJ92d*mjsZHrf_5Qob{&CHjqp88gJ!zcQ- zUjF3`$OYCmH%30J%bc?zabOZAGqraMRYl`_Ch9tiLYC)Rz6DWqJr+RCK=kwqT4=3% zPlnxHbX&58MMe-wLE%GoM5oW!%HAM(aYMIyuCPTMet|3{_R^`^R6Xpk}^L;a(fRmM&Wd z?v(Q3l9#!%etgworu;tJnPw5H4UNb<{YbFz3D3ob`3hME(?47M=vovM*^9lsEbYQX zM@okpo#VQyqf(>1QRY{{#P{a#Jjyr!=t%S>s#~~BT%nrB+e&Iay))A^)S|Oi<>ygojcGCTDo3$r zTaf`;6vmvtJpKw+`R(vyO_P*&;Vw{j=gvK@WkwK&4Wx3tN*-Tp+F*5?ZM}5u?2G!? zYG2mO=O|XJKc9YaLUV=8li3C)BkS6>mlpy8q9;Rfw2*PIiFT@qz%bWB1)02+wrwns zdJ*lb@0tZmeWZ1b32U15u*R?TS2TYE;v=;Xff^e4nQL|O*7l74zOm<=`1tr>?zzZ{ zak_W>o)$UO4AA9bBxvQ4cQ7vmMD9}cK--3m;?qpk!&Q}rWP$-$SB;!-Eq`7Cp{C)z zc@nP#&*ccNVCQP&p7l2|9p z10e8#bK?d38pLf zC{p^u19A}n%8ZPiNhGAu&K(KpGZ$hmM+wM;wdV6g4{6=e^(!fD8^QSrgoH#ZU5>RX_11Dk=DJxBj&7S zE`9;Yzt^7Br?uBaFuzJYj@N6y_lhYy9GRF<*|o%CAp$)9_*2?Nj6YnDl?GF^jjB-U z73(3$PIr#)YI2-QZr*f|1VZr9AJr@zk;7s_UxHgo#>DD!Vvd^6QL2KZC(B(WZNmW2 zi?L(`{*m@BCG`E;`jh=P@WNFw4?+#%h{zdm8CO8{<>`i2d7(N-QBYV!gbrv|I|_Iu zno^QW4QMxV3+tlbNy(vAVx!fCj488ybZpYADI6 z-JESss|jM>oDXE7zjbRPL)*rCm!@_2+?&VW=m1Y#IGt$+H)`?WGHXW~9QAg&;gD0Z zUH)D@Q`dM&>kUf{HM+Ke?%yy-9)>+ff!e0toVb?Aqmi{X`3_~vL+TK%Trmr8U2=#| zREEG#Wg`g&lYy0o$OyG-w{C<$lsE`AxKo*<+qbYtn6n~F8j$sq@=q&8Kuy9TDa=JK z-rX!;Iq5Vfo!;92BI{ih2TJ*s~oVl0t@+z+S1Sw|Zdyu(4`|Bz9zL-KwpHYa9jc=icgerF;R~DV>HDn9jHSXhQMw3w zYqctz5O*0BDy$#?d=S3EHTMtv&W$M|K zkjN1q5GU;i45!%qs*^*Mt=t zoHm*j3ecW?k*iUqP$R?!82mwB!@bJ^OHwaoWr_EX0eTt_j_F8k7mY|iMNnks#hl*j z9EQD9tzR#9qi>Gp92qDyTB6g%cb>ehwuwEPTF|p@}uEu zT}C*?%sv7JOic!PYC+-Ofqq=;RGpwK=+OvI)8~M@a6~A$0uZ_|2rm zu^6bIiIisd^=(viE?HY)h^)HxqSj<r1#<++Tam!tMH=1&0Dx ztq&e4QEV$c>=*^bc|bxDaZXopgLrRT3L*T#>7Xci@ho$M`D?dwyg&BXZ7s!QA}pUF z&6u`-6L1Xq=8*ABjh+3~o>}1V&MQx9c_LGF*je=*I}S=rX7a`dz8i{=HikU2rj$hr zeW5bsK5a&6BP}8-$C?i>B5|bj(Mekzl|<}L_SdR;PB)WhM@YGem-5n)?MP$bPQI`4 zwxEDeghj4Xcy1G55~4@&@02oj_q`05UQD8q3tGzu#Rp%7FZ;}5eb~*Dh`NKM$6@g1dtie97dR5;=fz`}X+5(f?{=IrYVzrZ&cz$@CR@o1&$@0FJsV zITp|lHgFE_y!`TMN5Y5e81XT=_DW>O#7nM`Fj5DMYYVy7Y@L~3@NO14@a^GedKFdd z!H@!|$Dcj=gO-(uHw_DP5R#FGztq_XyNdWizv2s#T1#C&eB;sYG}vc4H#^uVdkJaf z>_WmmZ=v963r2}H*kHHx&Z9qQC=o)kYN*A+6)kyCJ>0W8k(kDe%n6SksQI+co;myS z%bLJz4m!sPXomjOQ;rblRqJdo_Iguudb%y&my% znktbHD|6%8-m!s=!&Xw}ZYs0YVE_10YgcgH2^Lw#@fDT&l6 zB6fUN+j+1_qHe#i(;aZF)$zb1td_{3AU~o>lXeIgU2pIeS247+`Hn^nogMXRuo5Z1 zz{BXAoIf|WFLfGjXo}zmMe@cj4}pZx-{43N3Lw073B`1G&Rg)lwaP(vlBQiaoM9)-vw`Xu$i4Xtq8 zZG-(O(ue3*q&cs4IIedb7Q5duhOxJ6BP6$SIL0-)*<0=BtY*VYc5lhb72ek!++$@w zMI&VKoRJ2tU5F^EQcp)iia0y94<0L>T!E*lMbTTB*VRgIzZn5?OBKe5B+k8k*9M85 z3X7LTy3=G05NU^ef{x*7!K)JKCJ~tKY0fBa{Yo1rD-W?B!4PtzM$lVvr%7`|q_1}~#CQ_yP`*>r7uk@F zBe`;Ov!Qo~Z{=FHur&rxP!Bh3z(TOuxhXmbRB_j7^8VIF=kBe_zIc)R!6*0`osk4j zZH#Q7IvY&jF9C}fzBjZXE0B0(Q`^*FmX~v&62bE?p*{gTZM((j?Osgq_(N^Q)V{?e zDzNR`$UOrT5T@Mx!#Mx6_KCw&-EI33M3~t7g^}w$2@v~3;^L@p_fK!FVHP`SgQK2RD6zBc9)GaPjH5i$kaLj8hUo>t2R<(z3 zOHBa?{5_79u=yHW38KuGJf;@g^26P=WO>GGWZ6RNq9tC&%7J@D8QCjPV3i1pViGAxc%d;~g zAm;J|kjnB5Rg1M=XMQdk`px|h_K}mbgH1>dD<;kjvXxd92M5=vE-_9o`eut zhHoZ^#<59+KFL)ypms@FV;%&p!(7v8Fm~ytr>r0uKGXD&N)EU}Dr+iSfXwO~Cn`TpR+NXi3nL9Ji-iPoj$(r~K0u=*zn78z!*SCzLqbM1`g zz7AG?=fK&}a5}z!d{6Yp+4UuJ`pGBt=Z6nY?8qFZWb}`^>2{RdQkwrWrhYDbP;RmS93y0QV#LJ7lju z!u$d-CBH@~8AD>G=clw{DCrr}2lLv<63BD&bAK%ug2*8a{oIA8v@yU|w!QVEECt_T ze)uGRUvMqsa!bAoF@AUazQ(bP@xqohh@-4J+|?$?5;2?>k^l-J;m+C(MDLy z2!v*7cJogNTBCw!TIE0`g-)tLvWJ#Pn$IsFvE>3{`s*7V(zc_)bN&-ml)oD}Vf?qa z!AlL#M}O3zo;amXFU)7-6kI&8?;i8l=0TWSHLSY$Ww_0}%a}cob!-Je2KjL4iwPz}MN-ivGI2zm~?}sFqQ?cnKXV9veb~v`15osms2-;`HSNs9)H((T3O(y>c_K z1o;>m%7)_-u>q2SJNNI>pl8Hxs1?5Du4m>sVj*tW&O;`GY#RSc##;S~Xeq7ptS!ce zl?xN*cTh=7WO5-5$9JK4mr|hL}QKF z(?Upk`_UhDn6EwYVs_pMCqgzlDivbgVdA!Z7=TRGz>TaY7NEG$Q#JfZ3K!qCgdFhw zO&!b*3;KZnKF>QFdw%d!y$-vMI+{KZ3*=^0uaoU*F=wuJ1c-xMqfjf;5>3BjS2N-P z^*pt)*brGOeJc?!Z>iyh>5F+?V9@Eq0c;BkOleqvwM5)BQ^hK&CEUl3^bIti zQ~^R&#}y}+6TuUsBw@$Hlx78 z*bn{E?yp<3zdp16RmdTW6=@Vi zYotmfG%N#=@hpb8n3Jb2H0n|9q>5Pl+M9d13I1|PbEi5uI9c_*2_w<)y4x!nHr9+x zPR-@r+pB8!u9JO6@9gw#2c3*S*or125-jYZxex5`C|!$sCnNDsbvqRfL-2QaTQ!X5 zmYa(&NCkQLhev;?{}V%5II`#ShT1RHmhJdEkEUC}syOQ*6t>^W_B=@wY&vD8>V*r3tE! zJ303g#s>Zz6Gt|-zBS0G)8gT&6jXAd|C#h{_py)Lgs_qo6rOzY_HE4(1%`T^YL`>l z-^%PSoVJ63<#PyU-wIr#WGWJATzWyn59%UM%83$z$OXjR{JR>`G*0L+{44)gtQ8Gl zl>SviEuOn7kv>VX9AIq4(PrnZ>-k>9a%taAPZ-utFac2%qMza@x4-N!B)L_78u+&=fZc8|Mu zH8KK=Rwizr(O_BDMjeY67-22eub0HOA{V*|k;oB)P|F8_5-Y!#Hr zL8XkqPAas)(u_SNuV!Sx#Dx&Gw=?^|e#v>*RKI5rE#|NTOXi4ydYUcfRiZ=2I=j);OaVdylapt6uk)BJ+3 zxC4_^`&4Dbouu(@_dzV#eRv{Qg&u)FeVp=53B$lnT3t&2khf(IytzUBi07*}@H-?) zQe-Sea!~BS;Au(gxH@o_Txvtw;7Ot)s|<}f$C2!FnIoWSnE1Fb=V|wqAW8gVBwI!S zAPbG`A-#jvDyHjPH*#f?m^>LyO z`+pw(iT@s$=-x?WOW+{;hdKdgf2MVchFm;NDlOkG!T1a#Aw2?8Z5 zSnXo{9XLZEJ`1=$9xrX{pa6vvQyc|+&7gn#+uzFaUJAH@bvk|@gvp8OArJpy_^pPC zpt}b|F1x;YVAYj#V;>FlHwtGm8UE(U^BR66*v-fqtm%D+ud(->y%Bq>^3whX@3MBg z{9o<#jjpAl;Dw|D=ibtg+asda*XV)BTv1mE(g+P-`_(r7h{Vyo%6W~M1J=~DO1Q8% zzmSdY26xM^McEP0u7Q#wnA5M{c1q(-L=R16x;1ZmxAPQ4kX*LC3%bqYkHjVgV6FY$ zDUl(WEdUmXmT!6Zd0^anw6V0b*g|Pn)8k^#`igQGIqD=~AWr}%4Tr#qzmStKePEHC zqomrXREB`dOHq_B|=82I2!5Tk7Ri7V!C&n;)MW31`v5lYw4XwMDIUE4+RHMY8 z-LYpJ?sv(#XfagI&$pA>k+!_9_?vsSSK&Thv9)ioZ4Q3=)X&sE2}7#KG3w@=Tho4b zrso?U!VN&7#mvr{*FBK_08K=d=*7WF_JVv`>_HSgyKt4Tnp?h{HCoO(eJB z*vO})j3wXoN#b0tR$i*OX3$Qxk#P@UiPY9yF;D7{^;}(`N(1H(Jc5`h%N*rR`u2U3 z4#K&GhD09J9z{|+eXOw|8Mjhyo$lkKO;vkpECr`r9&U}YALe1CBD0fXumb~gpmnh6 z2)uULhB_cg+d<5Xxtd*MlU)1FSa#$9daLUEN-W>>wT3Zq_V*q!YJppu0<$s#yWO+J@>`!1nM z&sA0sXd-~M=!a@_8uYN%;@WB31$+A?EKLImMw137E`>_|d1>N_moASj!RP*s{%Lds zD^yuSHVqBqJnr=))}T@;fLkNMYMqNYS5a$ZfG7&L5Ej^HU~$v>Bq`K8((fU8Yh(TBwF|Ak;9{#V2tKJQUXtY-N1 z#Ot}XqmU>YHb~To#nu)-|OEzG$BH?}{kf?yn>)g#&GU2{Ka9WfAi#Z%@d6W-a2agRy z3egm%^Y*?tE0cFP*~&E#W7G}uQ!8J9k1Cu7d1h=QJQL3*_U@6^ZBuTO{!Ii}-H7wvJo zk8&)km+RMa1E_iy@+8kxmV=qrq*|W1;27Gd>AQPSVXGISmU4TOD>BOk8vL3UU!??ou-02qQjd!&&)~6OU=L zZ*8E#4t8&`^xC*&&}yLlYc(C}J#hU>;g7Q$E&x3-HZ7UI9CD$+Uqk~qfJRb~`i?`f zWt;C-&k0nPV`ZCBhrl8pOZvuM&t4zK92*=-*0MFus`=>>n4ti2nU@nySFG1sgQE~q;W{LGQis>O2culi`a_7P=+u2n)JOw^ZA|>mTZEoRTAJ9Xd}j%zgYIpk z{jtRO_ZmxcadCMe@0O_oIiRq*_N5IpQERlC)*pC!+qD$f0SpA#^hD&5+oNAB^X(gOHZlEovcqvMUjlMgg5s(`)ldH|M3+5jsKG5wbDDl4X*{jZspW+)2(&`pM2_{#O4)5Ikr$n*=B^({t(^RG%u0d_@RSZOe%~292zAGdH7^Tg{4QsODh? z(Xfclswz5hmHk7atZ}ShaYTY}jgBrsbs<7Mg9* z`3MH>2_vfXD=*kosvdoF9YS;o>e|=ULLivk8)g-SoQ^8FEmGhhy?s)_y?1>qM>Rk# zXgv8+Ql+4cTEolpSG8z!rO>b*o#tF$EYa=0Nbqq#`dvVcvdnUUZ%W?&0w&TL+zmf$K434J~$v0!U#VNw`ibbL7h}pRt(8#!`qax3$3u0|=o5 z2RmJCX0s(iQO&B1fd;U=%IsJS{*CWqU!e2{DQQZ?Q&XTI$;p={zioMC|#2w57gBmqvB5HR1Hn4yRL@D z!8x;?-Q7yyf_uJ4Jv1jr3ri;!4+NLIZ5Nx*EfaI8(Z$u6pJ$vxYLwJ*6fvZ|6`;op zI`a5vt7iu0E^Q*N5|wxt#0Zhg>fw+_wrWS-RnDID+Gn( z&vU`y_DwF6%JTz)KUZYe9@C{X&=H<30xX95577hBz5EtsVm+7<-$sfP!f3i+9Ra(a z+!jQ~FZ0eFX`Wg=SXs5rHxi&0B_GZ`rN^e{mn*RiGwPk;-|IKbA&|01eSi1`3jG7D zwa%^expJ;$_S9!US16`S^m=&q7DShlH_&%r&=NCJww_O30lH`5KjjlbKQN~xL9NHen_iofA5!Ay_sAZ@kDz2~;~HY(%G z%={(Fi0U^iGr!kzhux#z2{J3^Ps@AOZ;J{UNYonBOe#nM04hWzJ7mA<0?GUum(0bv z5nat=BmDQ`iQp~tkv!MxyfIBaO?`O{Wz0;5yX&pDwJwS z7vXlZK`dOc$R>!D>I&a=X5x*X@ypS>mTnp>m_8%Bej*wMt&Jf}zpPSAMsu1R*x0bj zhIe2W`e}b)$1G0es+c6Yd)HB`U>cf>qVpDr$)owq?|sUy#tY}KSqTJ1tD&%k7BLq~ zreTcVnk@JJVpCrzo0!Rau`dh`076t{78;A1Y+xQXoUNj>VMF>xKLzKAXy*ozG~h7U z2!;K>b+0@&{--g7)ycpsZd_s_^b;<_WV4a%QKLcf0o zh>4$Rh0dKi`%`s-dFQ6Bv|P#=JIw95VS`w6Y3i~REGQ1A-w4BIzX5^le<01${o#kl zAE@mm{06pPg39{(_&qH{hzQ&jU*@T&uJ}R7Y})tGV^Cqm*~2?i>2;cX;p}ht+kFtB zE?EhuaL&5{2O%N4D4B~^!3#Riy569&Dn^A4HGPpAGi7g+ zWPPpJB5u0QN0csSYt+)YC$$UFfLSZDznR?B<$2AQvv6?WIQTsSL6PF2=RKbxkj=e| z+O&TY(^GCJabG=C-9l#hE-6nb0->WnjpbaQF~^%2*#ZG~tabN`Vewb%9H&vzu+; zx6VV*JmRVYV4$B#96Ire#tUK_7h}rB@PdPW;A6EFsEh`7Ekx>}?w(_V{0tmdpIO|B zPhI3rQ&$llkj5Jhf-xl*Blb#nxL+f1`kUa#`93BaV@Kvlg}NG>nN0Pd)ae4_ho@<^BOr_*qVP!y7uz2pddDKQGPC z(7s3GL;hm>`rp|%Qhu2!C{LIfZ|Hm?;@3~i!tkyB9)?~HVyqF>-sIQrPv>@i~ z8sK^;YERzS_qnm1^%)z_x|8|xg^Uq3LUQL|(_N|fe3wWYVh5$i?_NAijeLMVGn=^eSiAY&>_1oO@BF2*ZUu%QGp9&-T3DK`U zx%(;QT*Wf$GmT@UW2azv<~N+e@v5bFR&mp3uQ``Qu+yKO6b+*BIqDKz@$~Z4lFm`d zKKa?m@~g)$oI3MMtun1^5+H3cG3y%~m4qVNZb*g50yrHQsYCs-IP<|Q0D(wzL6KJ@ ztA~0EL0r8Zb!^syF&XPH1-GuqsD;LAhy?s5>?TQ7H~Xt-sur(c$ixOErI27S?Y=$y zQPHycpKi`o2ZwND?SsNOGS1M%?j0h=5$P$%ed_+_IDF7|Uf7Wrk=-N4$K3Nvc zC45&|q@#1gci#+hfQ2}&ppHdDZ22G#bwshq?K!w?8Z~yWr&%mT*nGH#_3Y#xx0mKS zyz}&LbIGP+9NNqM&A~w=iF#&CMdQR`{|5g@o#T|W<@Vx_4Z>5Hb2Mm#f>j6mPU6EK z#p7sUuN;b~HZu7v<+pr2B0+A}wf>+{HH~z%XWJn>fYW$Szb&KX&gu(nyKOCIcM~hQ zMYge9u7a3f0;oT5nNIi(Hc(<_&gf1WwRCTeyuWH~;=&V8ozt0ly8Kgn z!#gjX&d)zPtIw;X1`%ltGE@B2EuuTr?CR!TKY`GZSkQ)7EPw>Sb9gelG724K`Lsbh zuj%vvPnfG}N>DI4kK9aD8d7&xh>C7&d|N==H6w1d_lGuFt^uC&f`eAvB&ndzv_o0- z2lfMpJwBHt2B45EsGX& zQ#uZc9piH~Kw{$Kce-}&2Y7B2iB*ZUvlfDADsfGiSw1-aTJytP`lsOsYId@Fa9CF! z{JY^_R&6%pKKZGSY6+hyIZ-N!08c<#E3S-^3yLC$FZ)Cb-R2HhA3XYFmSsUP4Sgu5&B9NvJ%|T~3yNjHLVa3%;Q}Z*+ zvvVhqEk7`n=t<}g=~?rdZ%ymtQ74_pcN(N-%Pax-(w+0I#A;6!;GPzW`~~(X&z4*+ z<}~oCbuFBYhu8@;nmyE!^}MO?55GG8Gt^Jk0}X3FV=r*t>gZA$E;TOb}bZb@*m6!6?SXx3IblLeYVA5Pl12n^T$~sKM<;D@ z(dtQZ6;$4&IL~jeBOeSaeEm#V@N0tyIVLRYqkUdiTfu@I=L{xy%&GP#O@!#N5zV6I z#dPBIe45hwRe*nA&WUYSky8YmE_LB*VCX*y(XDobaMX5<&f`vEW?#BI4Ogsau6V zq{k%$8}vBt58oYriC6N?;RBrl48T00Zv$3vy_!V?M*D3>Km4BSm|#ggk9zp$V>5r) zKzL-5@Cs_ia2>qVpt5u`&q9R0J%A+M8h(>+ z9HP_g=0V>}GcUI4xyrS~N|O@Gt})Del+9pmZRBMm^B^sD91s4CZNCw!oy-;0d_Q&W zDK0LJ3j1RK3hK(u!dzPQM~VLI*u1>6T3rm?YC*X15D(ta>6xB%*7%suC^@%9hd|8& z8AM()Mj@Ws-c`u>C_PERKZU+ckg{exgLid7RK>I$);DpXidG;XXpyor8qGq+KC#%- zO}!t{?mBWK&(v_i9cC369E5YKVc5txP;oM9^3suKBf5DfKLed+rIb$d_$8 zMKaX_qN@fP+zzeTV%kaazIXiQ@Sav4R_w$|=zP;+L~zCr9{pZhOVoXMRn-He+eg;5 zAK^yYv)@Aj`r1tP{_*EnF+$Y&!VQz=>Daj*`XDRyOZaKhQ4#v1?QexM`$!(GI0N1l zuu2w6kzaco+nro%?E5RoxB*TVRfdt`VvmqE^@`;7>F|C1OcP-T9#mp0<&#T2SFIRn zhK*349N45%tco@*M>nQ3Jc@_rKp{w%5`Wsm4YRXXtprfM$SIRX_^g~%i0t7+MbSwD z+FCIXA?)rC|1NPBu)LItAHGu;j%{ROGis%3vqv>XtYtN}A`!KNSlYWrsNxIm9&Q<` z71=+wS9cBWHRPWB^jbU$cpG!kS#y%LxH^u%Px2x&;%?9OGFcNW&$CN}2*OX(L>CrR zvNu>6#Gj4|%wNP22?f?5Ajo(08Sa=`Qj{qAoXD>4kKfeNB2*>dV0)uF0wYOP&&7S{ z$Ph~XbZ-ILTdQb$r~hil=R#ReKxyTqL8cJ(p4>S=16|}K&xxdikuD~|aOYooDG4_+ z(4ezxzLZ8WiAU;Lt4~L3Mk2;NnHMy^2+anwa%bh=jX5J(CjZHQqMPmhYd{Gt<=`nd zB8O`yB1ZHidJEUERj6Jt9dwBKf`HH^*=5 z!Lh$57j=1Inu=j+6&6G7!Th#CXkK1uqx`Y+1{fmWSzZiRWu(ds*+HFw2Lf2_V~+T} zZHpT`$2S_#jhj9=`+%V8Cam3(kV?$yp4B=ik{@DGq-O~Xd+9Qg|2ZB z+Ms^GN36cX4qojvlF=xewi)nZ^Ag@by?L}|-X3IOC~?UCBqKLPAiM+=HOU{xdmRk{ z1gzR;kiVa3KnI)fAbmk2=$q4(n5$G+I$4G2_Ro__`a4rW&0Za{epNbpD8-I`8d(|n zUu}1=EDUWvq)VArvq!k9wq=fgS`hM9=wmB0?~@utgo5n=!;HScG~kN*HVHf?kw;4| z`UZHv)OpC|1MC+nnMQE?+)rQEl&8Zg8`&I8HW%1m4eIo%^SYY=`Det3h5&x1QoG>X zxKayx+bJin+qD@+jQ#!U)(8D!fXTo~qvDd@8W+w$1RP#FdpW*qI(@A9nht^e zk?ps;W!GEcvLC%Pk z1U_baP}%asaz>gyJ={UjH}&uDbZqVGuGxT4EOw52k4(vfh@umld+{;N_Gs1W1sC`U z?FjF<<_**ZU$Nd<+qIwsmfdme&_w2A;|JV*qVJ8ZTj5@-1M7gukQK}UAQAlmAVLQf zmJMp{_-)5al^7q`Nnp^Tj`E3lwQDQ`xTMiEVy_3Vir#%chUTwzF;iX zti61svmh2kowJ*f;!JNWG+xkDi{U6zO^dcrjqOp1K%AR8u5l7^^%ZZk$pk>)J^uSh z(`t`^tJV=f%v6iCimH)?Ac#U<6p@s8QVoH%w#>1Q6=BoY_??_n=QY4zdI@pT zjG;YRdXYo1G}zYxvNdIGk}hQy3w*e?#^FD@r>40box>FVWWG!6gM((TGR!F;ONbrr z_8##gUun&Ed$xr}LM-SrY~Uu@Thz76ij;!`CrZVEp>dqLcIFA~M(9-A*yB&oSeU(V zU1|V}QCrQnqA5~)H4TlOZeDE7Raf3m_@~W4IKEhAFuHx_Os+X%>Vc)M2!rd{LQ#Ae z-)p|D`F3&_41$p`>kG9r(+JRYQyuo>DC-LXPEz9xCStYNOMw$LcVR)(T%KEQH>X8` zg*$@-$j%ALMO5y1oGjfSh1zzh>ZZ2Bp)>qeLm5P_fBn=c_#4gOxsgv;Kkjz>z{oU) za^qM0Yx7k-N8cpLP$L}r(DjlBlz{Wia7t;?t;Of^XQbefL#a|7!Y4ylK9OM^K%I93 zhle*sKZBJT7Sy~+iBOFMe_lj~Xwhgeat2g_0&?@m!>{!CSW?k^gBXGGIk}ewnWvxD z#5)0vcLMzrdGQc%e1{XFhE}GRIJoy7{Xw_DStB@h`i95Fwe7naou-_@Jkm4$eamZs zh9TdN8mE$cckfVqFbu1R>nYA{bnJ&*sjE~(5fZ=<`5snFlK)mryFI!}wSz~izS!VV zaam?`T&Gi6cFCGMuqh z$$md{Df<=~AiI$rL$d>LV3tS?6!qER%8VF6ecz3=dF+N30&h?1zpI;7xGi4tfP7n9 zM!xC>a@q1lu`j9J7)sQLa=Qcchuj)y091x~PFwgtpVNZL2yc;zsxj&4`oZ;T6ojHh z0GkdyOqJlPFs}{2P0HOr*fu}~RdX=f!yFGL(e7a)`*0=t?tkSKOF%~iAsU<$#bngM z$S|XCy)azAQw``l0xSbYSoy0-|5D8XIVHlgv{-l#>KOH3U6{C_!_mKK`%mF8UzbEV zj?5h#*bNn9FsB@6OeKrl@#h+5FpJ@*;!vwSUij2kHTsG{(!)ECpZTSpizH>39@GdJ z5<;CdS6BoF!;wW$lz^yrkD5|%Ct>dU62}05cP6iBUGuP0aaIB*2irSgc~OJhi=Xq~ zr($#MTaqGSsuB2Dt5R1RttAb;*sKzjR=$GMs?Og}o^%PFXXac+?!pE8@QzlkK4Sz< zrJtm|zBO=Sy0+SxYroV=u@0nC);FewmZUXlQ^HBVZ1?ax`xn-(3LYuY9(b;nBEpFd z@C0hH5Vl5r7?h+sDn!0ICK7c1LY zoOKFM_bdwI=vcG&ahWVJyYWccSXv>`jS#(Bg-Atgm&2Xnu{jYwpkXiI-t0Ma{e0@(llga zp?OL49x@!DL@n=RYpJbi;SDdTJO2GxAt#5^7t|&AXaBEO8S;F0n5^!d;XN|d$KMRS zD5gnHWUhj)$V|96MrT!Z-bWBl51lNe4NYSMo1nT!9;7x=P73V87sp@b_Ze``=}mb0 z)KB!{Y-fl2Qs2USm8sn|nH;B{wk@+hi4XmMyuE32n^%(W z`5`7^W?oI)V%a6hi}YgN*_Nt65ELPEfdNpI%2vxD2$GOUfF=M@i`7wOcTdE<5i`*} z5gpz49W7asUEX(jlOrJLd);522mj1NRZn+y-->a!T?Ftf=j6$gCzpR_rquTx%SUuw zoU|ZK02RP%d7^zuVg*ZNiG-A8(;(+omH?UP6I$zN%l%Ne19q-urm`Jx0bsQ({5&eR zwd0z>?PFQL=5p#Tt!br9v<~G~TwK5&-%jp&s8Au-qz;J_uMb=wAgUY+Xj}v)a@lO`Hx3ki#aZYznu*1bl;(OUt@E)t9hiLr=X}(_hbe zXW_Gp6@57x1>2xfP*_3h1(qyB8dR|!yuPv}5)%FjB$UOW5o<>MSkGG3N0k(R>#Y^P zQj|(>!VI}CvY2|cXMRw3XwAa-aT^T|bXz@p+kCa&@EU|~-`4DAU)9BD5#surXt(bH zn@Zj}#EMCZB8W$mOqt4F*wWc7ZeMb@SrZM?AIo9*(UW(?wd>stoMz&c(=*TO2TTn* zn@ez+Ns3ccB4e5#Z&hqQ)eY+-THUxOuvV|0Ck&wncUxZ5j$AgE^euXFo2j8^5UhJI za5wf+JWNS_&=uE{lOcu$?eZ*EYfvB>*>>3l)}a+@0{V2V+Q7+WaV94dP1ad6Ep>yc z82B0pM`&SK3_nPbQ_;h8Zofr|InDEW%hMCBu0-Mc(rb3wwA`}R^m-_08z0?n>&Ci& zz-6%S{-f^J2$d|RjQ1yw&HljSeWmiEhEa;{uH}`bC8i>_yXu1Gxf91l)Y1+ozi;*( zS2OYMMAj&)fw~Z-1h9691LzUhrFeSvDY>g69rnTGt~kfE7V~ABgFxSLzSCrfLyE*O zJ)xO;KWNk!d}h`1O04f}3Zm&2+*K7-{3j38R&J`r$3kbIG`+sbSvvqLBd=6YGn2ft;dMw zU(TwNnR~$0G))`B>X-3H8gFlJ_lomZ6JMuz?F!OEf1A1%GP`Kqkx49YD)D#}MIdEj z;Z^WW`(?aa8remcdE^CDH6~)i{l0se&0|W&eY}y5-Kcv2l{KJ7t>odKOV2&0uVEAq z){#mN&9n;&b0*rTpmf~2GoF*OGrlRycEmDPH=BEo6aTmYfNbbO((8r<{S9v$h?a{AVrLAVlc zxM8Y;$bYV>nc)WB8D>qS%0|EcLGNFKQlEPO#`&t6kh_xzfWBL?AAUbBVEnGS6ZqFaZKfG=ncY zoy>QF18Q&kEqBWiX~QrJt&AN?7AAEH{xEqYzN}lHkq{pAfcQgn9LEa0pwVt@WPsV4 zC3m^lvvfqiUewFgCa}B1hvXYB%d;V za!xC~hP{p=MCii(49UIoDU$q8lP|T5v-<$N+aQcZH&pdj^dX?IL^hSw$OAj?D_xA+ z=-bHy4YXzz;`I|p5#map?BIz(m+MupTE&sV1M$b)ncNkVWRBg_r?s>G6~4jTCIiZf z32JY2y87$wTe{K2i-m-`gWYEoN&O452er7%CC~I+?-G^JY^n1U3FwoJH38&~CV5!N!dQBQ~MqRj7%R`%rPFBjnxFSFbE<0PyCvE9R6B zDp_W;mI7rO{a!o{VBTP^Vy6)vd9fP)G?LK!9${sUzxMDu$CEW^WW$M*Mw^^Suv>-| zGUo@GQ&H-XuoW?qtT~LO68;Da0GZMF;Lnf4MlTIq=q^wvn57m!0-v{x>&kMLdW18z z7Q-jYDNDpz7pH5$a8GB)eBD8|xz-KA{;f3zJrrq)cO>!Evi#kEOpQhTfV1GD=yTw6Cqu zEA_cZgth=)EDQbfkU1Lr@ngT!pO-J6(zBOP2*eP$06)nQ+*VwLRw24;=Jw0_6ON46 zz-cHIdLYn8e<0DfPshKAjQj4JMmzLK)lyMJS{E&6i2Fgynoo(U$4{KRd|7WbrOyEB zL5IeWUL6|WmFNSP0`Z(tx{{E9IaM{>GyJ2!S*mceVR}tI6rb2(3YY@}Oryb)s8xf( zt+*}*lCHP#!a}OLDcP7@$L3GG*3nsLl-i=$p>6J!hhbg@XRyuAfnen=<aOM82n;cll_MrAxSeXlu1byPio-!tamf0^4BPOn7LqOft)>LZP+yOzUKY=8 zG%rf}nDeFMr>=&CpyLe|n+tuX7Wi!di@tFgjJxuQSymh6MZM_y_AuINk-4R{M-CCA zb%$*gpW584ZUWQPEXRlPN>kIh*2z2#2x5%6083s6YNF?t6NJXkWWuBl<}H`zDq3bd z{4$J0CNxYp23D;o#izH-Kd;UQ?nv zfcV}VL1f|v;`Ft8P-3y;XJAq(HI&`Gjvtl-wsCF-K=U&j%$r(tIKm_1jH*A zNG}LF;8Dpdm9CZQt_Jb4-otC9i}8qP`>nwh zLe%X#*UgdNMAZ(x2wGB1(j7-9-_(Oi zSrKU(6)U@-PuM3Echuk!8ox|_Ou_X}b!&IS-}Lftey!nQ5`U>H_O=~FQre<`hQSK- z+y1~$nrnXK(!flV#QR>^?O$raOy@rQIIN9%RI zS+}}i;d?BfuW0Px6-#=5p~rQG&&jg<*Acq^%C}mNjj0dI#Ve_nhQ@Z&fYp2B4Jo2m zJhglQ=avOgVkBkLA>G>NwCQfmA(nvB#QLU|r&_#(W%xxFk7<(}V}>2d6OO6%AHW4% z2tSYAlYw9(?%Kt$5NU8Yf1;P|cdnstW?~6vS6%@w)r8lL)UL=TT(6ZiEpktj_T^*7 z7Kq~o^1IFqn!tOa5Tv+2d8mUE8ya7X(-rnMbvh$hA$=_e(6m>WjV06Nm~J$cmy z-{ZfVd@m-NMHMk{gj+?|e!4;>m(XiE>pr+vbRBhNrDYBG{lsgr=N~LH=#TEPBa{Obqy;g9~fp!%Fq_V zWl(nvwq>D5(n5-^c)@hD*hKTaC|`uqTVM2=lVq11Ilp2G4LCLQ%&AtI=K}!5+4q<% zTWsXOj239Z2$8byw0P$Jq|c*&o-Hlc)@@5i6&Dj3JYSMaU4;$|3a%*h@4B~xlWI&Ah?5s8Z}-0EeHwcaR3?rAR? zDH+~a-@a>57-MbjO@4mzjzJm5KWIcRK66WpF#wjXp%->PRi#~W%vS&|N9Y8XKq@n= zo69J!8iD~H!1Ida*RSXz616i&(_*x60n?BW@wyOO)1m*IfLYE3zYs_Yo>?La7@y`# z)kS^UyJ5aH&2w(KAsYrm%<9LQuPlWCoK~$-WN9A%weCFiinowh_dM?QK`eoLpfvhH zch`N07Ff&Bb#$dhIK?+b$yUP%iFj+onaB#(ENrkXm&=pGGp}eDU@UN?e%o^dKgK#0 zBIp-dsp}f7Jy1lb1R5G@&b@dv{%!XOQuB}9sNeAF-XDMLl9MV$gq1At_DJ&{63?&` zo*;!Nb^JH3%~99XO;JKdBab>VBP}uGqNpTgz`u~nka2th8C(? zCZ5yF1Rf2Gh{Sv#%A<#5UIegAtO`Ojz@|9%zMVWu|GiWESEM!v+m^YI1LI$sMlL1e z*cKHpOyB9a`UizW5MrZWz8nv=5R1*(9be|5F!eD?Tb&!ZwF4(v*#uEzhbpvDKi^90 zyNGSPCHAv{x2V$P@{icH=z(58qp{3Y8WzR0z?#0|gy;N*!yGV+1Ml`m?Sc|z#tDT4fO~X>SkXv+xEeT*R zE>WMZT=>h&Zp2L+>`Lb1v@W1uemXZWt1}b{(Z1w-_C2CX*cu}g8}_Xxs*W++{?

    zW0S6fds&iL2VR0n^?r71C(0VlaN)xC$a!fQkg5k5xi8|7;AWS|axvU9Lf7WmvA|E=>z6Y5HHxOF8&hH2#OTiK8D$eKZ zz9T@!6db3B|~1AEv1>+in(+Z~CHV>g={|YIf+;yz9ohFRd67jy9w z8OFwYufgCp*ZjzMhv{6DT+NI0;-`_#hw@|86^d0OND;R`>V+KO9&o^<6>WAph3Lvy zZnVx@v)A^bM8np;!wP}XdpXqJo`d@)hp)e)#b3PglAM``2Gje`{3BAB=WT!AA!WE~ z>IB}Xk>M0YkMt}DpdTMi?j-NRy?8>!97?{nwzKpbveaCjl07gCPW(C*Yn_(&#$=p` zmav_pM|jCER%!3ayU~Q(dLt7;)6+5dcPNb@vi$0#mYSL$dI4a%&cJz1a8q$U$ca(M zIuYn{jLa`49%!SmR8a>C%R^(tLl=$IEt=s)#Pm>3%%W9{NYb+o^)cv{!H>izfWWM* zluOCuCtlQ87LR~6nXH8r<1v*k{;-f#J&O`qWnX7&Ob-!cBd-Pj;)`cqIhEDKv36&v z@eh;#Ve*ZpQ4$JHuRLq#O|I`+tyI0?dOOoZ+o_=k*NOtTy0Z$g)}6*dGN<1R?&wkB zmiA{ZTWf$5P*C-bQO|>jG^BO)KP7Z&kT5VOyJHiVb*jsq?cq$oSsp-D+*(bDM zFb*?Y#)+plk+mO@IqN<-C%!@P0UH%m#kJTWKOs9#7fM<03jkKw44T;ithhNikEw{ULsmx-%L+~x`hB33+Iy;74 zmCgh&7!#{YUWuuuSQ{QFFj&(Kv&luYlz*Y&ciP!Ok5yn@{!r9n$n4W$w00;gI!$;U zw`2q5I>>d<>8|^cNt~{cxyUztvdCKs$!gnT&V8Cd`(KVf(y$uMEWArC2sW9sgzb*Z zeF@mwvC=&FEvp_Ca_aGWnl@Rlf%NQH1((65PLLyx!@_|1>;E@snvPz#aF&)eYr=cF z2o)Pw;K(_hk|jB?h$2E_hUcI9M~j0{U0p7l=_hI&KQVJgJFiLU_0g7}3HXl^{qz7;^c87?_a|gtYMP{>YZ!*e13jR3phe#a z{DArW*HJ~>dP;H)+_D7#b>MYedHgdc4On_GIaxP4_ITeFqedTWwX1E0H{xR+5T9*r zSkMjgTmx^G%9=5b4QF7~?^IOGLX1Iu>aI})Wy5vR(a>X2waT2h5&(S^Wk<(fr*@-$ zOHDrY0oKl{JKdXnGxfP z7NZyqlG8#D+X+wJ<9`k)Pq}ekG&1{d{JmCh9R)eW57j#VkgRUERoNX8%UsO-Ru+8| zp|)T|7fQ&w=EcQ|h7MfWl1%gtJkMP+)L84>?_tg-d^ozO&?xyYMzepH(lV3R>aIjX z>O1(4lTS4noCogw>7uX1inCV9$E#79_W^@hAJ-71ne1L_(X&C5JKq}ZcyPn?W7{kl zF~1&rCx%z+z8A0P(Fvjv33Eh!$`zlJWgFaMo<9e)WYBUg4q5Rz1(8WK9)BCR@QJ*K zNmdE)dce=GutzmEHZn)2K3O(IK>zWP{wdRX_9+VynP#ShMnhYZJ(;OI^$16P<80Uc zMarYw>IIuvw2cqINsFX zaIJ&>S?wduKhI@>qt<%n5eFnmCZ=}srAsGcuxf;(Std7}>$1Y3CF!NjVJf8UD&4XJ z%sjR%j)-BWtW*E+x&PUJ{4IUb#@*(qH_M8?6V)6J_t0u_sRm^qzcc>M{?+XqGM$^5 z((*fZ5^y5S<({U`=uc_}inkBOKizdZ%34sErzM!FVXo!S1u;O10H4TBQHNP4M=qxS zGk^addeTvK>gcCKi7f>6qZsH0Xd)zxeuN`3Cad+Gw*7!hTV5Jejg`j)&x-b4pZ=q% z)<)T3KOl#a`s!MM)W_7d5O(iMqoMmYMsB*3|Ac~!m~d0s)P}yhZrMyEE9#+*N~vC6 z&=k{3n1fS4gXJsLFG^s->YJP4xGFW~CHrh^?@fa?@!edfsAC*$G0AuST;zNF|3q6m zw3C5@ybWZj{mQu!>EeVaosy(j0_Mr<+t0k@_MB+|-c|GX%uAX8ud$e{W(P*$%kkHG zl{qinyfhn+)5N<05K?%^nf9S>#bI_mq~pw)6L~AZ)xy!NYt6NPH4~l|>82Eh=s=xQ zr#G4FI0i&P1CjLXL zsiAU+qR{D6smYnR6mP=-UkA$(@ZPf(Qik-Ld4i9kWrb1^i>_U%h{|-6>7v%)YR98y z#+HVjHEe<2zeJQjdZ5lNdUV(D9x=noA0~g$Gf6fG25pDy!QwS;4lkC^=@pNy^2Q^5 zLL&mib5bHgP9u%~-~3;Z2_Bs$eCwO@7=j3a)#{w~847h#6i55FwC|A?tPRj`9-O&` zth6*m-HOkDNX3cBf@<~~nmnq*`yOW?-sg3r>~-#-JahcS3)&C_emWG0lx{TZy^eU) zhmN*49V~O?a>8Lmc;mVAR{lyu0E~3<8O`eWGoEu_G#}F>(7X)D8}TH#n#!5FZ>{A? zpdcGgjYY4iuH)6KFNi|ksFwF*({4Y`a!V-FRcAS`pFmbo&=pkYz{kC&GKH|6S zy0VWM6_xm~gTPIaB^PhEN-yRa*4r83CliNqm;x3x0CDV*>cz&;%s>Lj6mS@7ZU9@j z(sc;JM)r0;9{U3~Er~8is+}1L?%QsbW;L-&uYClpN&B@)R_}kEAGfli32mA*!et(y zc>Lb@`muO}@~cT3OZBsg;`L1<6k$A}Nk6QlgP)G{e3Z z2+?ThM8Bs8w$?O?O$rzvY2JXr6#nq6IGt{w#x21TN``2t$)xqbfY}fp`7c|8%549n zKpq}C4fyzF*8}jX4;*-o&)thMUF09Vs{WM6;ebw2`i+?9hvTnAn&iWW;lwwn-5WrD zL>?0ZZ35+_96IxvL&ZF;^eGg{Yz)^m@I=h? z{B*>lb{#Zi6rLfo!tstyes7MS*HnVO{eeL%%pXO3Ga|x0Z7yhwS zIx5I_>5^A;!H47_Pj*^As*L;tG1h;CMX~qvMsu{bq78^SE8GVF!}~GGs*>}gZMR%q zj0w(o8|8Apx7pqDGoV(jxdCPQB;r%k5-frZ{FO4Zs!+g67(f zVG=Vk%u?S>W2M81o?fk>a)W6?ZPKrA=j|7+_5Do5WHIGvXf4x*#6!%yl6rt{Jc=h1pJMi)6{QI58 zka|!e*F^huHoJZ^E;mw?rib{czn14Qc`!cEV%0o=t7Sm_xIwirB(58&L=)U92Nm_` z$sZ*~mR-)9OZq1GxtKA0;qhiMDu?{XlZTQ56-qF<4=u%89_c5_;*~CnE|bGoJ#Tr* zFnZZojYfGrU<++omW&=m%5b!$qU)ueNxRg{p2HyBb>$Git93f#!4mLzWzfJ`DUr>J zZXv}{>h%rI9%Pr5@6=5;h5B;Vj_daA+ivS)_%Tl36qG9>LM^Jk_>Aa9{1W-5Cz?O$ zi0^t8kMgJ*pB_RiGuGOv=j#!)56L2T&M!jtZR-niYIX$Pw(zG1GZF}z_7_t zWI0;j6^=>|Ud$OCjm>qt)dPqUGgckveZADODkk4aj?_COPxMl(TO(|?uKHQPlAL@d z9=Z}(NwOj_r!{MX5Rr*QjXZf>ET^NEk{lIE`#Jja((9MvRhykmF#*x#Ka}_p(KbvCI8@xU*ro8n4ZMRIEYHN47e`DpiVbU`@tYtN&(n_~ z88$ssAw9rgMNwF2G5Rkx9SryQpQ0WZ+lV7m52v)5>+gG4Lvj-F$6nB=3U_weYFb4m zM^VCUSB+LiLtLxQfQeG*xYHg7K7C6*#L&u%$FoBpP40_~OUUH@Mim^H_!Kf%FDz9w z-6g8`?sU#g)e}G)PetvH`Xc`FPg)=}OZpdJOzVYIi~MT*wLYz(0I#fxcb6BPFOg>= zs>Rk~^lR4PQ9oIuZ0_f0{&7?oQ&wuZYU70iwqUkz%!W6(8 z;ljpSSOS?mA&F5Ffzwa~G#1;X=9kin!xwM~25_D`)W`#qATM`OMD*$gh6U}vLb;)? z#me$rBOYjVtB7#txPIq`7*u1myddgU!nQQ|DjHgBOMjCL($p)!pSC;JtT9}k=c|n> z=^No@nV!(Ok_9Ix2V!PT&fcK1LXf7>tXDG-^v4rwr)%D&CO^)uHvQ~i$-)Iq9EVK; z9R=XE(3EWeM8Z@w>p|Ao1`!_0pTRt=HK?B?g)ykJeaXe(R%bmbCo`L@^C8Q+;sGZU z*{M{yW@dh>7A&aG$rVcDM)6;%oRx5?kww&Tk1zTf2j1{Tp0~)Wse|rnAl}7?foPel z)S3$#`{`G-#MR|jf6J> zdf7oMi*i-YSC;i^OL*+WvDE2Y4c<+my(oc?$5rp9--uVQ8c-WrbVtHev#>H%4DlM~ zyDn+p9u&D|_LQ$vXK3O^G8U7UV7F_sJ@hd@vdLKk@S?=u(=(11E&k9Wkduf~pB#w_ znQM+snxonheHFj%JQ6!bBywzG0;EZf4>j9 z%)If2c!@QgmaowhF%s8c(*8kQU+sI5j_2IDh4d(Ca`-oivOC-Z9rWS&Puh0shpWB$ zYE3J**4pu!Ems0cLB$Xm+rjGgmtIFF`sHS6QRH~@xxdk~_uh(8A3J$07>isaKksWr zqP?y}gjeQ7541BpTju%Kz2@6r@ghE^c8qp5JdQrC2(6LvGtEdmu8d3nXgzY<{nrEU z0jeUE-;3K{vx4PX3Y-e0GLzh`>HW@a3xJNj@ZautJdRbgnAF^?aE!~#1}#7{EZ*Vx zi*Si!IkmFjElDy1B+hIOcb!%;)}vF!J*_0p31XGbAmpwcWCgOGvMM;jznk2Tky3c2bTKBnP&!|UiO!XlC^HZv1;U|A zG4gPo=|u@ogTmrE-bPVkN2()UH8jJ7($k))l1Qk(0bZ=fTbd*1fXedflHN#j8Dx7< zeb3V9kDy{&SF_4d7~g2bDa7Y{24|k1n{<6x);m$swzKV@f_uqb74vJORwCoI47<6j zyvXc^aW!4ECUp=D)tIIM15jutT=kS@+lONa+qm6}KWp+2;m7hS#JDO6K4#vj6sJVJ^6m6~)BGtBNC$Zb-5Vtf}q(VM-` ze>FPvP)B^Kf?s5!z$C+}MTQA^JAsf^BznJ(W2%3DQNuUmAt^ZJW!TaWJv3ax&D+jFCubz!*K}XxbTIu66&*=Sj%ozM% z6iINl`DJ}HKn5uh@gz_-WWxB05E_Ljz5>*D7~cuej0J^%D`W*Kqh^C_b^NczS5X<9 zlpKZAw9t4|qf$R#rR=K4Z3#3M1kgawclOu83}}YvGa+%|yJ@SZOc0&7R+F9R?q;4~&I9Nrt;Rp_e0?E4#D5tmKx@HuP(QYh?3J+xklS{at^jjOS~S_`T!746UU zg{P0`{>kC*<1D1^wHRyW6iLe&k*3Zejo;E!hlGelBm{(&zY>usTM_rxrv*&(jWLFa z+d$T(kc7Bvp`@oYsuwg&ToFITyOTKbBC3N8cZpT3okA8`LKpdE{Pp9HvI;#PO+F)* zT73Trs_W-*9#xmiQ_Dxfh3&;MQ9dC{&!@&*%sLeV7IBM^HlYTMdcIz9-KNRu?x)iw zuVqh`v%@stJUhaIPQ7WV-WZPY2~7Tb9fNu8SyP=! z*W-ZeI-KNJ4gj&JxqJW3DTAWoht7y_C+O5D2-g7kA_}3WPzA>8OPFtKI65LRpsn7H zkf13`dzk1CjSfpuAVjd5hi=A@2m{H$d4Y!C_s|I6{u+Ih9> z`fL9!5%%#vk3Z1hEw(baij|}VIR1>cKLPPU*oFEt?$bd}u?VxSee=y1VwR8JdGd~i z1RJjRvwWVkKE*0M{a@WuCTYg=AHhkP7hS)>ait8R1cUb-kV%4@2V&@>Mv0d~9_ro= z3ekDt8C+S(oQP33YI7c#J?ow>z7_G;C#&X`XTA*bX3OkuT?b9@A4#qllNC|dU<5)z znbX})HAt$ZyLJ2JkQ%0Ql73?&)&mU=v!A*PXbuLg*RS}lHmy{6@Vp%!y$v@kap^?a zkGN|JZ*x4HaJO2l)a*u7=8$&Ch=z@GT-TQ9PdumDnzd|Br=U({uG*A;-;(GW)c3=H zo~C4LQj^19AJhBDjI-`ohJ*AHKacuTOV5W-^mbg2DXONSHI{H@$E*ug zd?ybY(PA)#j_zuRu+*QdR13B7KH=|ACl56h3ocFWO86$#xbHyLO?Omb4Ik;QRk!Zj z8sL0&k&;OVHIT>K)@(jeZUHa$N-P_Ob@22e@PXyu0e!yPqfUZXn4^ynW zhZxtCQeR0uH3jB-o>{Y$XC0ddMjI8mx?0X__fws)zZ>VaDLg5}?{6{pqEFHiv8~L{ z50jsD!OA{;!S%t6YFE?Rv;q>J(LTma9emIa`PUupoDn3eYjJk_9u=}i?8o(Ja%2{o znuZDNRA4zaC*rISl8{V10MmUQt(n>Wz)kl(ng!^7xUDA^IuV#KjpOtg-$aEPSDbD^ zD$W@Ekn3Ry%u|RolHb6!Y8XXQwI+flwZ9v$EwoHm!(aT3u3IuZfS8>c2nnDqdQyqH zqQ6n6v)+*N+!jwKxz$SR#i$Lei-8mj$#QIkl(1~=_h6tO$62~A1&K6iM$oKlm%;Jiy9}wPsoumlB9d5C zljFhn?sMMec+kt>BKR9M0SIL!;^JLYbqG=wjyj)hjm8luRMIroJQ5EmP7g#Spmdi< zXiV=Y06Na=2GFreLV8Wn$u(n93x7vTAL4);L<$@)$#(VQfZu4-78s|JWe00`3%?Jd z5kkU7%WGXXp016T`k#tDyFHtzgf@b2rB^l|p9-Xl;O1;H?7!wh{T0GryhvMSILye& zVc7+d6|xXkESyeJo^su*)nPY15LFkc14i4&QMEFiow8UR5GX>u{sv@kdYoIR029A=X~0FN^EY0|GH;6^-pcC9tJ=?{;0c zCZgnypfkVmrr$LR%9393YR5%8KHt@fURRXj)cpE4^w(RSq>N@%)(YVU|H)EnaE4C{ zbj8}!rzjPG($MCrXEh7!cdf5M2J79ELrSH0;|T}bI8|8JplYi3qK%%x3fz@IX=;j> zM%VBWCQ#Q(H=s*TB<4zju6?UYpDB%0@f=Ye+aLBrL`nNqCctF z<=0r|r-xoC^n~0WWNN^|N>YjR#>wH0zLwaOxJi*Z>Yz{)#g+*6zfV^#EeTnCEYGL> z{oVBQmwMhj$CZY{df=SU7^!jo#K6u)pN8RJ0tc3kWc>C6p)~ySa210*0w&^ zZWL+O8uemWvKAG~cR!kh$r~+np@+|iOg4P z&1_xcp$t44jNUtI^-!tSrT4@4D^C5s-`gdy}v8A_GJQGNsK{C<`QzvJ1iWN?Zj+XN}A? znDP@J;phB${Ha(Q*UmLR*J~srV4Ua~6?o-F&APjW2ZI&)g?}IM@7JPcYJij(grH$v zuR>3|Gx=C+kLB#q1GKVx{QIG=Z_(ZFh~H-RVzbhaRNMse0KscdCzTWt=A_t_- z`T9~V=*c_&CM!GgfRWuDd$sBlr%&tR@+#e)mb6lPBac8~-^___mdfMtX)INXU5IX= zc(!}AOWSg2oxwARjfh|=`xvRHcbvKPtbR(#h*)8I56++{M>vILve<{CAZMof8YjH0zE#^j4kPJa!;2wu#z%`3;raTILxxMeHTA-d>yq~?2Wx> z=C{#eqr3iq?$v6qG&JKA$4=K3sz)O~B6+i?>c9<~BjX1aY79FD)5Dm)a|M8(r1kDqv6W5Ix@LLQo# zRvUL>QSHOWWALiXotYBV%tsz~G*NU+p^k?Ff1=uio8`FeG&`8ZQnTY`{6i@{H?$Y& zd#)dpTY&ZA0X+juiFVldSA(Z--<~-m-s5Oqw1PYFDvS@R_@2F;OI9odX zcH-qzr*xB5+jHXZVX366_D9|D92XrSJ|G%#H)vpOCM~Bj*2=K{?8~sRtkvwam4cp& z()W|@K&aC_akT3cGQ3yjyP$8`jtk5ys2WG147lFhv%>T%<4cd zE~bVF{yYHCo((^^jY!E^eT_8nwBRnyUL{%4W&Ho*`82!T ziP|-_6l>Dka2S@Gdh9dDXHJSaUUL-pYhGL#KjcDXC3gC;Q{LI(T)6QcgG$iL*-})} z8E$JYB-1&{q|w0H-Sq^P4Rw(NmoT~BnRu%~loR*A;6+8ljq0VeRrn$Dv6sA7AKu)7 zOA6N2&{c$oG#oo^iR|d(T&292Pdt|%d5LH627*4G zI%s)|OPt-VdZpN*oE?CfOht4DpQgO|maa&edJ$1i0A}rPP5`t^+9rtW_K@ZFugHog z_WSXD{kp>NOsHsO$?N~#ue8F39@@rF>%KCIL%=HWN~_DqDIMpQ;7VqC=yAy|cD#fg z(8ek*Eik<{ypE0OC_~S9!JSiY{!xcc~VgzF?d;de+WLEX&!cKRa)^!}wa)&Ck;_uf*yjal|{A zFcx0X2-wn~^!4LHX?bq3Qr9{mC~#tHceuv66c7~L7|^?63u#9^BCaD>^b5Pdi`d&(5{4ke%`$n7b`x4c2x16 z97Z?~4WRDvV>V-I`8>+%XL#9g)>T3u!_4YJ-b%;^{s%kOm8 z-QRfT*6;K+&`F1b)snwH<6Q(GzT=OG$a`?BWPGyt$>o?7O%Ne7pw4OEL9H@&*wZtKzJRb&so zuN{DDgrIh%onmelnAJebi5=oz@;#6>dhBYo8H|l9BCfN9(Pb~mM<)BHf$gw7wpfo4kc3{qb;5} zAI#MK26=Z!ldJ>=(JUHgz9!CEdmyToF0CzQIHod{vKX28g4$4?aS7DEmznJ;VvMz@ zOPg6g&s6Wm_weh_T+$w6hz}TQvdaDa(aLJG>oVZls2k+A`Umuj(W`bgS`ICN6F{1p zC8X7_EtW1;>e?|l8M8zlJpudvVyyeXnqzs^=3v*)L)Z>Kyavi!v|`rh{K)#w+`6sj z8N4!AmQaGM)0#+t>M~+eb9y4yFX(T9a06M*F z1K7b0B0oc4-B#q*SUODK*)R%=h)YX5G~X5DA+{)H0606Vi;$tDG+EA_y5WZO3?QSi zh8u0iaZSNx&MlAsJ}Rl=HpXn$d^tlgO7-bH$Hq!siVdvub7CJWd&%h@L?F=Gaa#1OC&GBQWj=i8$w(5Dwc7%GA zdCLiGfH(ZHDC&CtdfO2Y%kBjjhK-99u4&@t_?UbzGB;sxtHr0!5Gj?o)?jY^PWN#b zQVA+Pt*z8w)%y>&-Jxl+0!lvAsb0emKvD^g5w|i?D{$PUoUu@10?ZEowt>mIpRdT^ z_XQqxt~r^TeLQV@Zav_cAU2hKW* z{ywA+9>0X^$f1+d@9^>HsimkJVup<#iYRt_3EX{P=cxl7?unZ|BC5DYnymT5Kv2X? z8f_p|Z6|CZ9bFxQ(ByeDQ%L~t%jR+yncz9*8zW01)uP~WL|TZM3nAj(?R6*dYwj3nWOBHT|HWO@Bchm^8+62??Yjov z-uI@r8=l=Z2`2LR*E!dJ#<=Q&6`ZW@*9oS@++d%2|P6P*}{MBCP0le zqJ<&Y5k zJKa4oXkjk1DRd_>X+|9SyBR^#ZqII1Gc8svCiT)K_tydj(BM%PTW4|hS%$PmnUKSo zw-UXXrah5}q2uqZ40UCZ4B9N&Ue;q8h9dRD`t;1NXU@E$-zXCN!;^PKqLXi5zAVlz z;ZDiI{pL6HT0)fz2A3)=HZD5v*Aoaqbg`1gkj1Uz5&)MVKFI&^Pm)CE-6-sZ8UOLb z9qJ@`;Q`Tula~G6F{@&`MfZM1iTd)nKY+D)(YKtU|n(wQ0E&fE!lAnKPVv10)~ z4F3iiEc_P*>VeMkkLGGh#HL1qm-OM}M{Sn`9_(I=k9#9}8t8@0eU#mYlTVMthF^Ei z#qqtL`->TNR<)ZBnd2|?6ti}sFlage|D@lJ!kEpq;R89wKe1S3xsZ9M9bw{N%L)_2 z$}x|%dbR0D*m=M+niJyOOUG`EoZSJ>(Oy38N94EsM4UxO*8Q9huSpVBILV4^ApKgp z(z9(dkwalP5>x+3QHdMz_~)^m z%}p=Bc;l*Qjv;E-=yit1%dD4k#6(3RAZM(0c)4))W6=#qu7^!i(PIF+Q&AVID08Bf!{GmK*9Dz zw9sr6b@M$?Pr0Vrj)>4tCJx3YJU?i9U3w%+V?`ID-;)Hb8BFW&$nk7jeDX2>y^E=A z*ux)ndX^&(tFz-8>$(26hnu%tMLSwHayv1K=8AV8ur>Us2hWAG5n*>{@{9HX=mR%k zZbUPMDf{>u@i~>>((_K=x~<8Xvz~m)*lbnAlodeBUGG*w4ekumbN-qg?1#kZnPo- zVyK1wYHl&DD$sD$5q)A0??sdXr@+S4d8#)?HxKb-p|rO=0uS+1Dx&V-k%YH~^aRnc zDp`CP%|# zS#j_CLHA~CoCp<3GO+L@EU-bf31G?dXSIHFXX9QnYI+oUB^aqmcBz*U&5Fhk#u;=I zADAw~1%GG!zMhZws_)2W>>%y<}%Aam!2BuSQ*IZ^#Sn1q~j>muR>kw!x& zf2Zwkp&L%Z`Ap%Wofm4 z)k0Ew!_4hVw?%XdYjU#LdFH0}nHjHbO4@`+eN&)|Ryra!Kw7IjA_7qOwBttCN>Nl* zm!dK5HzJ)s&+e12tdF7(}lEZ9H|CIqL5jip~%)E>Lob)?-W#3=4k$ByEU zr!h&m)z}2z2SLvQ{DkQ=BwS5~vWbtRYlgp;Ur}zoDxRrDh?8(ho{3yA3ux?zs(AaE zTk*{HP9rcKQ~?eWbjdylpFH}1IfaFElJtCF(@iF>wgTZIV^Jbfw^Xm_n$=RhsWBWp zox#x!>r3beBrq}6h!91KSr9&}iE zRqrC`&>dWrx5bnWJfFQ`p83w;K78_yh6Db@eUk|Cuk}4X2j5T$6N$u+y$Vl~-E&p; z3R-Cj!JS7?G8?)j>MhBCV*SOFeu#jhgf)T%87PBigNnNY#O& znGSJJlwM8p=FRXlnAN;Dkv=8oKF-s<&xQPSjS#Zf+K48$Z|J5Mc3y!=p9S<=SC&-&JgP8;ZY!7J0I zM_Okt(0GuIA4Xx?&VY5H(6ug{t)A7Njg`8{&?uLp$9fAD9n4@%7jtl!S&NG#Wvo)j zP)D#wCn*an{v^)%Lv&@F)~GVM&?G<$O!u%cv}ec z%|2w8A}K6G<3e41Phu9pwGJK^T0d}wA| z_c{k=&EV+u2av=2u?s1&)U9eqNCO zxF4-7IPa~ii*w(`)14}}$oSKKeA_D?pqhDJg~no@2J1!T^ed7IEJ?|+N%SQU>zfC7 zKrK+zDLrB8AIbQ!&}+j9YEG6^v|rRE0Cr<5))f5dr*UQ>!-uV`RgN`kg8Y$sIlt<4 z!jL6>H{`2x7Z#>!?yvt!|42HZT>FMSe(DXaGLR_3;hGg%jBOO{9KzCQ_gQ9wHNx^T zx{KA>Izm@lf4#M?hnlw3cPtYj%zA!kTG{<5UrvJkd*i!$qrpT8PUb@X_m%pJeg^$f z7&m(9)pZBRc4=Ai8@Hwo(@rI2$a+F75z}P2{HM zN7Ur-4eugJuAJ3&Ax1lL>i+i3Oz;}DgsoOk_&Jc$=+xSA2(Y#*s=qq+f>x`RC0_t4 z)pR#oURR33Ia#HO`f65om5qmt$y==s!HEHV)37YmGzEHUAg}z2Y;x_lg>$+RPA(Gl zIvcZ7#yoZE&rYEK;1{BEhju`q6Ryax(^>mhHSqqmz=unU`?#aNC~78X?l-)#0#)f< zc}^Y6+=fPpqd#qSqL4(j>HI=pgG7F<<>YYYO+9_X)ox(=4uJyZNI&bWEMhvHi$b(w zXuuesga?OuuI{JE~n1J3T`iYVonHr5Tj*nH1BToeVBnuPkb(B_A7d(X}=M z1#+m0Us6(|&K58H@O>&|eP{CH&j0UgIysD?Lpkejt6f8`oraA!F8mLq5O4RgG!tts zm?7l`y)G)6+ux<#7G2ijB^(K8Q7(Q+J$`5Wh2BFt&}dzz0|4aI)eK!FH$|7-HF@I1 z3E%m6cf7G~XD=xZVxDs$Qm5IYVlm9%j5#q>K)HuPVgc6=Q%9}uHNNXD#I^PzjtPU% z>9-SaoOtDw1OU28Qt&gwinw#?n-zRSoaWofeUbaba~@on9Ijd%TMx5yks}7vFguMY zXpfr?YU=&7D=Q684L!J{G=37y6BrQccwE0jkqw_}eW{TeK+J(nq9aj}{TS4TQTNv_*oH8?rFY1O81uIxNmd~szIpt2ldr%=c&jJF4rwqJEV8pypHZp}%atI%}6LV7l>baPnBOCQ)!g9pDvB!_M9PnEdkJ4egAUg z+aLJGz_dTW3?{?jcEm-Sb53JgJsgZeG%-KN1!L6DUf%-}Vw;+p$QjktJ zqBhCtW2dxMrAE1`J3tiGOJcvJ)OAHa%@SSkE*EFJLrwrap>?&DogDpjtK;sg3x27+ zUDv$-awX_5yfW6sqPUoW(YtWd>YQp}b5D-M0Lm(MOSo?E}0Q?!}6bnL`QiG|rxLp%MdM_7Ri=rJ&-yZ=}em|DLeC%wj2bwHiG%w#i97pl5T zW=^hkBv^0>uHr}`I(^K&TL>1+HIV_J76mMeuw7K}cv_z1pZ|-N?rFXCsRfERSji%@ zD06X8sfkvN3PtopVosM`tG6kZD~fzeS{R6=yR$l=#o5tv{FapkB0=K2_2=a=h%0|P z`Br1%1pjXGgSD#|e=cILj|Nd6&vFPYlkoirHqxY|>!%hL?ht6AKQDUj`e{&a_{On{ zeX|aQV%c8LrWWsl=(vJ<_?6`8R3ZUei%6P_d@cNVhM zma~cYFZQ%KMq7Y zCV($&6h{fZP0OftUk<{sOwU&_4I7 z{;GM6UYxNY4>cnEC+q&mHC%l`@YgTv#ZfUh+b?dRFR5RHPaOwO%|oq%~){`PG$4p@6vWe*V$n!xz^5B zUtB4ByFQ`7H6fhHKaGEo%-yC0D@_W9E)($@^kD~BBn~v%op?NhSR2;4NXXt9+>8&k zNTDS(fN<1n;D-amd&mp`&iov6ohQiS@33^;0O_nY=5*b(SBMsN*-I)A$og1owAsqG z^J4!%H-p!-fURzx_5B6&XEGUGeRZ)Jb82k+8X$;#m$k0C46Q5ib`ux=KoJ)QCTTDPcVZztxvuWs5i+2H=V_;E(2p^MJ@qPv9RT0kTJLLtx83NFSf*Kl3AG@$%iJb)U2*t&4E2>4wa6vZem}lAe%`uwCD}SCm!asc{l3`5j1z0In zrP8|${imhjNMvir%5?`sQo5Nd^Ps(3Ty;Ln{kGsK?~$E` z1c6_sU+U4lt`#uWCoAh0^<9|Ow>d@=z_X}?votln%kE@|?#i>sa8hq{Jq2{|t)L)C zlz?u0{g{SL^#lhEnAMb*qN4?D=2xy#x|JH7aq8kAq->;9%3_CcB-j=)77|ybQxh&l zrf6UmF1B1;M|L(lE^?`^)Lbb?&tK~X5Enjo;x{6j<%Ny!X+3G~wA(SB@%4ZF&e#F} zox!g5)kXlfr${dmg!8c}afa$?$uCo+x#-7P6#n0hy)Iw~_niZ;4%|~?XA~a$L#rMa zyP4K(T&Y=5En#K(tawnh#WdKt(xNUTTKai(Vy9W*wa$$zQg!}A={`@ZJO7c-lE$Ed zt-4yQF$;{w2jgpvQynz3HZ(U5ppTfshaxxNu8;NGdePy~Po&A=rAwj#&0lp(4$MFp zp@nC4LNSdrI)2L=%F&HIuh0pO%=P+dq(}Pe?z|q+B+S>KoL2M5#3{(B-!;-_m=?Wy|1o!|UE$+O#!vhCi{gpN!wv zofxPZhar^TEM zxmAUs0EULtUUxrk>B6j*@y)mO`^^9cA}>PQGjRR2JbFzdot+Higim+ZO2qrdc-B*r z+xu?fo97o#LVmW)uhf?kL^j5}IX&scwzORBW{iL9)N@*CoLyN>lvMpV6BhisTwC>c zh@l4n``@RRzrh2i7v%Ze!KwA|#N_utb1n7r5whCBS5wlHq zD%7aVw*dm+^YLd)?9Sw#7QG!s*UnI6;bl$Ilqf@#>uFd?R63!cf*;z}dqm?%sqLRgC z+~{sPYq$!CDzH}34_%acy&6x(vX|Pp{lcy6=-wy|{g*4$Rh8hSYchF-esHCN{JAp; zV~JIChw)3#iMcK5C&1{I)TR+pX#CrlD7))tDqcO9d^`R?R{%b~9*0J$sC1zp;dGx6 zG9yGeRu`FgS%o7kDT9sYT*H$)S)*KXNGSyt?#P|ZgIGDCsNBpXdDaqLL1Bi`gj4}G z5bI)P77b`A`jL6ir?ai7vfd(=>05xb;ZMh(>o@x}@eXMzGTJh`iX1ULpJMT&4O+77 zyRTucT2DolJd{s?P;U@wUK-mqHsWk+(xW+h=fk{U%WF8P^)mJ~(%%?Rlduv>fn>U2h`pjU4S%6qRsA4$fO*dRD@#vY%qbTM?qM! zn9j-mkz@CN`hP`61n;^>rSW>W>549emw@=T7DI|ynEbd@%6+`zY3_O;$et+WgeR24 zAd`uv#N2QvD^xWO@LFoek2^lI;#%ZV)lEaw%DP?yp*YN|nG7zGx3NnSxW;1O1>ed` zh6syD@zXbT-H zG^sb1UlY%-kGA|q-qkd%Qm;3VFhL}319S^&ctzpy>4_Q+5!N1CCJx4D$nw46N3SS$(yqGBw71Tyw?zyUu%^Y`q7bEHI zNayDyV+|zn7yX~!4LwqAgQ?*YG0z=7+qbzlMxst`6Y76fDMzZ5VrUYWN#K1vV9-(4~fGnk~XR9o!opbpa zE%WU+Pw1^2A8k-b?mhxO9OwINcWaLG9DhqtrkVg+NoCSTFpEL?brKI8`&0`EK;Q@H zF^{ml-k*G^QIz*aTI{weQ)^hrp-roLt;ZFXvu{T9HZ(2kjHZ2S`MA0m`drP55n<{6 zBKBi?Vdj@tR_Y#H?vVw-+#D_r*a$EYP7S#>M8=YP@~l&Wvj@#NB0B?zpCPC8R$}r| z^`_EX(DxLN-gOt%?glA8q79y9T?FI*?NW6v%m|bRqBMoB zBhF2Zkx#&okjAEI9rFs^%sayTjogQeO_BGzw@Bok)zHv3X(U}XwTR0tJ)}gL7)#ZI z(sm+Spi$!~4zA)FeMger2UO9$_vGCuG?Y@*=c5y3p_N%~al6*1&O+Lrs?p-7;Zwhh zN@t$ArCo$a^7;6CtLtM>*UTTrpJ~s6N8BG8s7)(bv13bj?{4j8F*EF23v26*FU1>y z_<3!!&N{&yS7sK05Sg1^H`O-0Y6yq)yxz)==y+QLn!QIJg>^vwO^9f#`b(~eXFu~0 zFSthnFK^P!OeQ9Ld%UAm_r^QuE!tNpampnC4~(*7~d{IPo_cOS|9h>*;f^X(~9|ZaM@cdCJiU zW5}GZD2NWGgm-XEr#_8AFG>M*H$|n@xtjhY5-F-6Ky)HuBy*^6!biZNgNYP1^eqYb}}ZzKm$sJ8r)s5mes>BBo#BOZ+JkAD;$$ti9w=qG~#4P29=onQ`e+rz5%mBhuHj#=>p2qfar8C>BW zQ0EUA?KhLV*{hb%hd@E~Fz=pk%Uic1H*)uU3<+P_=X#B~E!S{c>Q{61KY2&bMAZYN zVp_lG-Y6}KjV6cZvPQh1%_zBI3a`Jvf$JfL+HYr}{K|?;>*O5M>kU&K8D1pDo2x-D zv{Izv+u!^BEK}4_w6l^MW?eBTZ41-vpYU^RZ^4t$<;rtlr;$m4B;RJH8m$Ad1 zkA-S6E)PnLtnFccbJNgLnowh*L=_l)Lb-S}_HH)EZr>JJTYD{WXu`AEGU_*c8v4z= z=KZT^2O4b|6!clGRxE)e2FF%Xi+Nmp1xqLQ-VqI+J&}@&{jIw#uQfSv4;o1OFGL%X zb=}w|k;l$y4FnywD8}XAfJ0w40ygY`m29a6y?4zHSpFxMX^0E)8zh6 ztY9*O@esJ9E43bj?L2ygXpe7ba3DJPMv0J zp+B1qbu9ar25q;3?rxLvuxS%L-*$VBrr?@2o=M~kj!{vNT5fvyMmzQmd-2B;qk zTxk>*gxuecf6+{En3751>7%SyGP`v%aTv)$&OpF;RM7j`pA1O0z&L3@)iq+v-*qj<2QSa&i?3Olq zdAX_A8%80w47zzUQL-a>s%1$==8)CIy9=k>9p8`Hopn30vh3od5#4pIUv|b8l_{o8 zi)x|P@fS|%qQSR|fE1eeYW%fk&H~4|x>$WxOpIhSyCfAp*Yq$X$QiAKG^}?vu^k+- zzP+)T$=W4qJ9G(^#+x$Ium$5ymJgC3%D%%wb|-9H0QUTwB6p=45r2_gqh{qP=7ZoE^kdv{S-;9hV=R?vsC;H zfF1w~lfE$;Sc((RKO6Fqk_eizDbJB1v$8*R7HPK*$lhpoIP^NpF5Qs}rTW>5!=#to z{k0T@xQLTK)H^IUsN?!v_vX!(SPo;SxWkS^ylf4Iu&+ND~(b3tn}0;kB3n-alur$>DD<>$Cu9NPcDFSaIC3 zOD9gojTY03C1!!GkfN`~;nCCzNbu#@3;rXotp1h8X{A(#El0PM%n;5(SrmBqar~Kd zFEB{-9NJjzU9~Qm@xHC(0~L7}UyX;GQSVN1Y7%ObZESdMHZBbp$CVYYZd53|E|-Rc zT{H}sO_GqmwDS@~3g2|w8;@EUd(I1ykp4&+@SI>Q=XJDBM)!CxQYOe!bg8w9NHV;o z2wtZ$BzMM8&l`z7i9^?B@YJ zmc+HaZibEAMjy9CP_ODv8I=rvV+jX)baI6OX@KOfu8}6`` zVQIAF*bWQ^b(LE+00b=+opeRgNW`hU%!9yu2#+Jhh#a421?Etecsc`if*=U zGpC-a97eE;c~JFlpy7Guo26Po&ZsxZR1wC6PZEGX+l`YLq~Z%!t(_ zOLKF$e)Z$jk)bs`yS%DD-qakwHnw-X!LkV<0=l05L!Ko(ml>M1>V=AX z&1iSzTE)zn-)VI7m1fz!hAEDc>GS&?!*fMd=e+~(B6nd~#DMme&jhYFYOY(xZ8jeR z4n!dY+xl*xqV5%jfCj|jxZ`ix_B_!-0o3}eBc!tKc(s+ayeBOR3*JR7zZn8yCEyTR zBzGi!+#nTGlrFC{^aCZah^%RjDSsb$nH?iqLzB!5g?@l~pfMtCQyKNimy^#$C-XF* z3hHBP-J-$nO+e{^g+eJOp~>3^VKn~3jLq(fcNd2!B@dJS%ECLg$8yUyecFlXPJ^Qw9GiZ1W%5j)pJ}2txXa78a@axF$+AfqTyQEIlOhj zJB8uqRJ0~p*RYLSk@U)*1^l0et%oDQ@+V+f=i!gmAWK->+rQ~`RtvKZBwy4kB8_mq zx>(Vy%zJ|n28XnrCPHq&G=S70on!6J(vyM6iyPbxM@CLVB{6J+ z=mnq(mFL5WXW5a&nKr3j2g8Ak;@|ff%p;Al6_vHIbhFfaY8b}KFF9$8A@n!nsR^!Z zcx^DOx}(+ZuV;+@(fCXMg;=$I0ZqRiwm)7x&XZo)5_mB!b(B2eMU?rWcd>iRBc`K$ zM=g|Bkl{fXk`EtD+z@}|xcDqJ=QH`QF%UoN@3`v|JS;oMRFNhdNH?KjgZD&EjjNXQEAtq2@xVsV^qLzpBg)&Q zxUODoVMSCHC%M%o!zbh<$ADi$qV>Ju*l#wfu8DRb^)uw>V-CY~dXdHSu^3xCUT5ee-MUw|GuM`JLVrw=dF1Jtd5YP$N# zb?Nn&g3Je&<>CGr#!IEQ6c9tT)RQKMubes)jtU#MmFHnkM;ltx<(|AVxgSz2G0_z8 zDjL8KDEXot(YN!#up~Zl;5zwu;Gf2yPkwZHZ^hxiGmCU`Ujw@VfW?!o*h-cw`?<+v z-NYE&-xOo8gx>gMZR=dQwW$^E2Mx^pYW*IiJG5JBE7fJdGUE2qo_jqudY(kv;vCU5 zIo91(i*^t{Ob-9%v_^{2^do^MGoZfHjqU!xos@7MB@djr>#R~JGrQNkDO~E>t_O6n zTw)q2VOM+RCm^qw+TLPOw&PAw{7uimi~HmFXA!Hgbdpu*v?(jx7gr#t+7%fW=kQr) z@e{nDVa1+!`L=keg9YT|C>df8HO|ToYoXC_^{&5tJL-);{=dfW=^0jk#a)LOn?;pw zQG+6X*s9t`g?_p$n1U{v17o}71r5QC4IEl(t>E43UYY75dNoI8p0bkmC?~zRI00S_ z(XW*Pma(%w+7+${eU9Kc(5b0H8QU1X0IOx~A;Qa49O%($^xUc_V@2>a6*fDDh<+Kg z!_EbgG+)+b6h&qm$I5nlsOje2lQ_Kidp(2n40ll2hM9cq(;xEUh@0W1Efm2}<;s1c zFz%Fo{11UJvh0Xd=EKKYU#ZS%r=U~-I@|ZyOOXWQW@;BU^j*od#QsELmSi4jry*L# zcD6H)J%8+lnDl_K3ne`wLy+=!0TV!ri#{&^NFDJ2gJv3!De(PFy6$+l++oM_g*Y*Q z%bJgd+-vjHc(FwU=SK+8hZ-;x-SX{9ZXlQ)O$|ObV>s3&?{bCKRUibl!Vnlllvs-6 zWdU`Zlgt!Dmw!d6?FVC`hI)&6I2c0!imN!Gx2$Xtr?r>$3L@H4^eul@T+d-`;`^F= z?+c$wxF0{HlIL9wj1$Kor-f$rC8L5RVG*t~vc zs>XoD`qRXFYup&Qe%9UcC>SjnL)9dt10zTzW5t>fxDjQZI8ZtU<|`(UhBxgL0Nq^_bA7V46a)f?ttj+qI@Renvw{_65Y!nUGj8QFw zVH>8%UAGSA2-{hrn1);)c)++q=71h!<)JigvcOTI^$U(n_f%7SXHgHT)~@N#TerPz z1+#o-{E_6_3n%mx;(cD;rL?$mR+d}x!{0A-AO8FPCbGCfG;)fLGDC=b+#6 z{`O7tG8vR#^Qa?uBI~Lbz1W}=OV9ojC)87_{GemLsv`$UyB&EJ#PTYQax5qC(kL%j zrZ}8t{c1@|$0679qr0_9)8$xhtFAl#TAWs-CYh?PryP4$6RuX4oBsE5)4h+6|B2FS z{}cmCXnswe*fl)~#hrK|gtpN2(-eW=ozUC!#YuoKU_8YVX~hSlj3d_)ILWP$BIXG& zP;=wuPgM#)zuNZmn)A%6_h^Q#gJ{RMj(oBfhx!er&#gqd!dMmRKoL|nyjF?P9n;m1 zGAg}1^5F;z+;;p!QM)k~ZGPlx^R&kOY=6VAw)$lMgSj{Dar?~D{yswx1Ob8odAize zNp4xntGw`{X%@*U(W=cPOR_AfWfsY*qEtoFMN(49qrtR06Ce+Q$;^vj26^77Wy$g) zukyYI6#c#OJNKX0Id{3IpUh160P&37Wbwb;>$%Tzo$Ks))W@)6+b)G0)n+pb!(4;C zi&O^<1ap!qNQQc`vZlA15|TZ%yjgLpvsP(HAY0mY8|S%6P9HJqYI|-T@HnF?bEd>( zHi&BHA7_p)g^kAdyLG7V(O0lf+1&EH>*J`2eKY7sA=wD_c9Ktb2MPSBzK-1R)q-Af z5bi1tdjki!0!luV#=*$c{AjjC7d1}Bo4Mupl{YuE-pvgbXB0cUNQ~Fdv%42dtM0*A zG$3S{o+CI1`n`#xJ5bwnA42-8*<-E01{9C_Y=A~xl@N>m%em@Bj}flc8b$)Bo3L#> zQN6u8=?AKpC2NEs8Ahkk9^h-%E6m=`!;r6e4w3C`J>SZ3>htXG%kBuaz&-xFhf@m1 zXrys$Ze5c+;?#e@TKP;H{KWH@HE+8&Br@2x`v?{3Af|a+y^PG7?i{+9=0Ja4ud0+w zD}K~G`9Eg&^uv0+*1V`C13;Ln%_hb529xHA%thCCuTvsj9Xy-quFL?(tD7sht*a}t zM}VylEWotHVb7w|vk0%{*l$o@bp);YT2x1O*lEuBrYSLGSx_b$1yGbISzT|sL$0*% z{>?5jE7AeTZ{eWTkY@x8t&xRxH8)tac$CJD^)Nj6o~f{EYaxLB5JYW{))Xz6V*4FC5pr zmMSg{u%ZUIhNHzq7Ozh{at7uZ-;~Ss0amWoI<0h#3|CST zy_;xw+iLrT?p7XfT*cqzk2K{zvI31Q#ppKG5ahb2vosQRSTDz``|a$Zq{hAj?@=w? zR4ABQi^ExM;RL-&?KdeRh^h~#6v_s(@udc;$TEtp+v_s$enYCMjrsBM%(o1xexjn1 zf|m@SFXxA$r--ns)p#!8bkta}Z+h-`;;;=9wS>W}q2LXEY*}iI+oUi2)r(qkAdSEl zL2MMOG8tI}oYa2P3gmK*=76+w;gd|-NBo!MAq|O{_BKztqcpjNXC&T&iJpu}{f?XY z!|8`=I+EPe#3<&crP}LmYyLK_>(eW~gbdv3xrfX6>{zXu#Ty1w?vJwpFRbwgvrlsj z_(n}5!x*sbTU}$0Lf>@^TY)%_q8`>94yaU3Ky5Y|zi+meI%IJOeP-UBBOX_svAM@j zwTw!IiV~p~_J#n2w3hAQU3bRt<5y2;E1}>}akG#es&0&ouNDPhM~jEe){gllwP?-h zuocjdA%1%G!SFkK3}g(oGr-))i2uMNj|Q3WdN6wr7K5YIS?^3g*QnzG)Tmla-0MPA z!ETNZ?R3#fee%!Khfn@XKmds9FW1~!Kn0lWYr@&gTE-e7h?hT^WsV*TEBbGo`oXWt z9!ll2^=rf8f39F(3!(~B5B!Ws`s8B`BN6rGdTG^Xfk7ad(wMEitXR!yA$NP8VM2W> zb(cc`q+%eZSZ2wm?t9&e{5tL*69|*VOo$i*EQ%H(o-p|D+_@7^e|-XCAY0Lp73s&h zZR)r&@=&9&azbQTj92VO>N7si$#T~P#cCYY7xr3iKBfbocJB9_3-i|tr?nbxMtGZ@ zz&RoDU4Twa6ILSuRUT1iw1YsIKa>=hkRxcT6m?JGLAd2!%9Kg>cw6qVVk9=9Y3(5RF_QrQJ z&kkXoeyx=^yNjZi*&6sa1BMH&>bKJ$PoLJs8yA~r!>qED9+}9<7p9v1WEcbO=2Sk( zlE5|equ|S7``{y!3~CQTOB(JTA~mzP9saM)Rrho^64qBOqWR>)X$BEq^8|w!ZAu-( zbg%2zf#xo+_~1O4eh8c7I}n$Ds4LJY!Nlm>3b?H(0Y&bxanAWxS~%%sGh`a*sX0Pc zLy-hH33aiT(JQ9kMKfV($IOk(#{+Z0`1a`TJC1@y+qtH9B_+kY@88Y7(bEX$uK4i@?yPo83Lv+P>}( z1gbKaj^S!c_kf}bUB_sf8~-|LJXDQ4PTxVyL9Mq`f;h0)hd0&C<0q~>ud@ZSIn-1T zzb9ihlLi!40i8Z{UILU@{8!bw9tp)k#^+I#zYaxY!RRV|_TQO4B;)Uh%zx=6&Bpq~ z{od3eN?MWMXwhLxt#e+5q=-Uc->Kt}0B7Vs%N8+KZ_iqGw1%67Qeh$JtEE06M)edD z?s1Ok8!~*oQjI{N+`z?Ytg8<9?MPgoLkUlx&da?0*`IwFOizX@clK!dj{0T0Cu=LZ zi%V8xhmJjJm4O+L*wW?o)zy%W$L^eXUgI$a*@9Z78RpP&z0nefaLlb_YA(q1r7CU_ zPbE3$8)r3S>Ds{6;5wP&(flhf_)%9`YRIcR*LQ_p;D2?;!kK4YI4wbBTZz&$+3A#! zIi|(x5{$O@rv$pR%oD!V#_gf zMQWjeWww0Jn`sQ>Lo3r{_(o?a)4sQ1mir1}Z9%gtQebIzt4`#{f3 zLpvDGDH?PQXMT1(R{2L$QH0RywGF*~*_jZ@Jl7vj{Zh|QZktpCiCy}=c5UJ1nq<*I z?0$6vW#D$Gf)jXOpqXJIIchms7L0(V6fJAurw6#@J;ZFZ*Y+~brvJ`??? z$Yk^s4^;k?oT}y98+v*0WHcR_C9_=2_Ew`%uxf;{l8-Y>AfErLzj;a2$ART& z;2zv+xf_4E?k8Y!Zs^$-lIm%H7QU&}I6XRtB7!;Rm?C2iGSWu%#$u zwhYo&Ev-)?f=JxY2b%+^;|4e3Wy2D?zBAd&0&7cUy<*UI7d^P_wHQa#mo101fka8< z$$Hgje*pcKo&iU(n${o!r9XE9(}?sq9KF8hcsY2sGOEuaylVs;pK1^C*#Ovnf?e3u zt6M|1XIE>W>Un{^VOwG|w;Fw>PQP(Sk0{!ru5v`9RcG7jB|oD|=Sww@xgs40c-8d~ zGB%(q)wgRYdaF_B;Rw=a;-Xb)j%ZK!rv{XLfN#wH`&eVupSYwgNUjM;d$^gMSMYsk z2~s&W3Qr&Z#ZzhpK66%?#%ArB+Hir8zCK%RGJR%^cz=kKV0FbWLAp!WB}+?xX`=;Y zg5GFyHF6sv@rBz1(@jDWwf^9+h)@jG$%5DDEF&sANF zv(NCPK4rKTaT{5KBn;8UEZy>L9EqQ5j#oo$+^RYjoJ5pS_8>bV%%Ii)n@6kdN89>_ zQ2`Q<*WyK0

    tVihgIg=XBfs6EKoR30Pn#JH~vamR&iyprIn=rR#3IQIA7EMLWG; z1yPRC@O&R@<%Wk{;RAij^G`T;iySCjxtj$-7EWGz>WcIfD`cCLoH(P6FUZ!G8g>?S zIfJ5NBe&pRi|{py-CMICG%h5LQGe0Bo6wwVK%cyirT(pE@_eaYv)mK4u#%PUgq0yC z|CHyX5TJCU2ULb9>$6moGC+ozhoyZ+FG1G~?;au8$39#6$f)aTLoa4%@_mn|Vw!-^rETUZ4oKtImz zE?mB(FGp;WWwz{=r5$VFuAF%GjIP9E6ySE|<` zcb*WVuAn{Q(pFau?UvSOMj^mX1`WxkP;-(IsZlnV3W$Ql5v;#KmHRtt4w0EVeWdq* zFY1Kb1$}^7)vOebq%`hoCxmoB@{oLbDW0u~d)2IT)|~z@o_`2itrkb$W;Dx7wxEn7 z-Vy&Q_~NGuYE@nD##Tz~n0d>` zhv+{R2#aQdm&ys!u=a9}Y03s=eTfsb6I5434ZZ zL}gY>g{%8xi#Un4m=pD_bgsTN`zU?>!Tvk*N{JU**^*#J)kRk5yG*|od-B3528MV& zOS48{@ud#QiF?~8gwiy;qwA6I@;0Z@$0Jtgo7K> zmf{wrLh#CxR$~DlwVPF&xp^qp+;vUa@;qFb<&el664-c&BttsPStn>mhlT%13Jpmen zU&l|Zf`Koc4hKn4VOcBkqrd>@Fd4X_3P&Hr^LinW2W@-o_lGl2I3TMYl{2i>rHof^ zynB=)Zf^6bc`WWBcWS(;u-*2yrXAtrg3Y;4NETYW-G8-EgU0L0 z|2ey-ePGtUjioyM^wUr4jcol-W?!XT^{*v%SX&A6;=w|G{*d&G2Q$!zT9+4Vdb*j# z2>(nKEd#(Bx!;20#=4ZpJg$50k71ass+G$VoEkn@hN}rEA-YsS8LxJxB7+t}R=-xN zPES6BKQ7h;>M5GJ<_D#}3lWp%XN&XG1xyE~3HLj4gyGQy{G{E2hGPW?3LJ*+udX%L z>k=_mDFE}^P+=Nki;FP^NAMLa*e=fr5`|<8eaC+>k78h{64Bmu{m4)p%wtNd^)AT6mijanpht&+;)XD+7!e&uvBe5*O%-BzDzO3(va zkX6sCtd|?oO<2u)twZA%(#W?uH;gw&jemN<9ZRHfdg7-!l$buBFY1n3yiKhr^)x`g zezy2Oy_||Fj>j-3N}{y~j6 zXYTi}spk_)W8k%9&FgW-6p*1YB?C1uSTR7ee1ifF(HId-lz;*<(#^7KEz*>gJ}Cta z=j`rpu0F3_f|HrD11QbWM#sq`-tjpGgO~JBTdu*@pKp2F z6@jc~<2qWTweOiMImW$T>;$fytyL$sCgbJ4avS z*+~q*JQF}~-x+(D$b36!ociU|mE&480~VtG3`$7TX|2l$h;UvScGfo!zz3dJr)JQe z4?BfQM`MFjsfQ+JA;ik}he@VxAOcrQb+lZ)b9;=crxj+lmX+BHF5m`b!YoNyn}a2` zZ#JUFYtG1_&OCqYq(0tSs;av?2NTc6q4`vwBxI`}H$aw1)ja(t{*lnL;v1x#AVPk6 zQ}+{$zO2^ZU9dp78e0tD*g^Xn99IF&;Rp{2Y#5Cq6*N0>Y+MK1dRht)0Jhh{SwRGK z`+Y6$KXQQ1qz@Ukz%6}*aFG1;=20*W`h6D4?--Y$gFKRM;LMcm&cOFC zWsk>QimgO}ELNvM4p|-X`fi^?J!CIl6wUvY7rmx3r;8=xx+RJ?*@ys#jxm(E-1m`8!{j{Y|Ie_;fIAuMe-Eq8CrLhv-u*@Cou4t5aC))#_q#2(kkPZ*P*oN z`>fl?=a;7wSK=8pNX*o$YjR}wDn4=7e+KW|5tWGU~%oaU3 z5+q;pAWV1bmiE>TqP*~|X$0AYfetD&1(Z2ZXV5gvoY$pxn6_dJLK3QKr0J3W>U zYBhPwN7-*lsMr})7S$@^H~3>sOH|j!1FNnbqji|+1125T=!aU#;=v>5P!;FlP)ISj&Yt-z)grC zBs`!lNdxh5cudfu0>xs8^GGk)Tsx-)fj^{La(K&=kEw_Ih1TV63m=6p#L@6s00oR9 z)DM|)_h)q2L{K$q*W_M4Y{*7jqqUgRJ%yM*9V1*RM6i?S} ziz}t_IgMSTR?*+lF?;=``API=Nxagcb|S^MY`u-P{Y)WwRSxiNf>t+l~sXe-NC0;WKlv# z#QPV~%`}asqL0CFg(IAh?sIW%JuCXTJ~RY`T2yWg^H@g_=87}r#Z!97OXPEPU0x<$ zBqn?5b|Vvvz?{yL!m7gxvOM01`4T$Vri^x==|J|sul0*Rd2#^1 zq${02FV4`|s8%bQ%C%Bc-MrWeN^BldTh($mDpFK3gD`2RncAdEX>!*O*tk)ZA)%M` zTtH@S(A~r+)_B12OflS850jCDFpgh!0@+$BH)Cd{nhX|Ik3clNGjohr9s3l;%{?U} zvi8*oqI3QkB0o=+Ld08~KaAS;U?)ZPTn3th%wiiPsAxd&ix+frai9X7ZJv##hE+p7 z%{)jv!pv5)NgF1GSqu1S=dWQzTxQPQ6H$JFwf}JTSq#^H>sm~CZH_OS*G^o?JAzBeLRY)5?0ed+|M5@nU{E?cR1SKlmm zNKdkjrO&g+kzquhROYh=;n;C7|M6o%&yu>Px8P#s)YTxcD#AE?DZs^dPWzC;X6PO( zcF7;yABU<=?!4C2Yo}h*QZ{RJuQiOh91ajHyfxN7j0zmcx*_GyPMq`btg8}iA_utD ze`e+))O?*9Yo8j@k5vM=iR?(JN<;@ZQY@=zbF=1QlEw21K+X!9Lif~1`*9Yhke(L9 zq~J*naP#S_SJfBu5Wz5@OZm!5r|)qTk@qq}kpz(C6MTH(awHEy;#zl~c!EZHmM+Ps zebOpyx49B|=GkZSl&eLG{Z{L0dk4)M_z#)HasrtdCSxpp{xbddZSAF#ztl>@+n7Nz znQYklUFpI2al{0X2ToEFskFwdq|$+3N)+gdeqkgS3w^WfvPdXECfsxm%rhrma5m(X zCC%+1qzut7ESFBEPZI56H|WyRnlpJ+!lg@qYIo>@$xPl4kbtmSYiSDara?e!aDhSt zkpPFcGlw30NU2AD`TjUt_n-!rj4xdMWmL#J*WAE7Vf8i0F;km6yCV@u8Eg)=8B>WG3RK|nMpL*2#XJ$vSjr_=z2q#}UM z{xlR?to;Oj(e_7~I&s1m##Y2GM=&M8y9Q#_NJ@M)fKma4>+s%TQFYk$nhE<+ z&1D^ZAKa_pu&knlmKukoh1;mL)bi6i(NVsw+u7(VC3meO*+-U4@8;3jj`MnMIE?(# zYg%_bs2yVwpkLwH&MXP^w0O_?K`(=4k9N(JQiord|D*4TaGYl7J6d+C)1iK)W6@Cl zNF=?X0ikx;b)w(!k0fw{v5W`vY0)!KaX+4Yh8OCanZpjYBJQ3*D4HE??gJk{{5HH< z3#3~0YGa(kU%`F(&h+isTY8H@LJU8B3#^9z#+!+c;`V+s>L=Ob19g*n@tnE(ofjKIPOV`kswHS!2d)^2F-|{Hq~^fAnN&B{AYhQbs%wA& zcfDK)P#0&*Sb^X#IHzC;-3`#t#x6cI*YY{FFj@9o$!(mgd9~l+z#oo#{dtWx77?~a zzW9m#tW`7DcbRAs0fg9_UF0DS8EqCSqk@%GFN|o)&&|qWn1##mYv-vKulDj&3|o%v z^Y-k1yoSoO=4SjKWy41N9_Ep8ILk2=DM?ba&?=oMl*V9Gad;;$sVHPXB2CRisOi^C++8^dpHkBdY|~^ zDIN>Wmv$!G-E>j)m=5D1)nX33o-Rl9qHOx{4ghz&8|2$t!z>r55t;aKRgGisjii7-?$cUPfYI`aMbsxrqbwoZZ2j|Z_D@7W}hK*6heIh(fHVH zCol?rc$D^FyvxQB)0g%*o6{|??8c(;WO?(WPd}D#0f|p7Eiv4>r`S>t`N!4QrrUIT z=q4I_G!bGp@eS4fh+Ou}63bYRl~yF|-SC0?&1H=bUiDr^PQhV;+m{~2s9T`WybP`` zmQz8by>N-4S;45NClV8jzz-?Q#P+q|RrxpsP63|qby#?%eR^LCnZ4ukfTirT{_}Vc zhf|M6>os$I+d*!4H%U2ypG|9oJ#2nnJ$<#>bkm1*F}LC4Vodx^&pZZeV$`em`Pb-I zGm^fIQq0Q+FqY`0(fnl_C;=ev>0&g%hqL#!^yM)~c<*=uq$~u4d>OK$KAC-__5YJjYovU(6!ovLwykQ9-mtO0?k8b&URKt1CzC@XC*~=tM!M-;v_|8~ z%HGY7LMy|d_ty}$p7}Ky;W5(N0l(gkClJJPv1d{+UqY;!v+KI;B6JTLZ_|&^jyc8j zI{vcjvKq!<-PMD=NE2VWMNk^{-oDM$Yx4v6$vr@w_x|Jlt0q^fOSQF{C0k?$KODiR zlRr!1(XOC?2e_?FLjibczFVDsisVKW7u++^Q4=Q&C>3?HorEFCZ4gMn{JZ7!tB zpt!X#%c(}U?jnBa+MfksH3bMt-C!1H0781(KFml$qI{v-FB{NGcVU);=GDj`nbw(e ztAMCC!zG(@c)h&hY}k6llXs{0Wgm$YmBw6pE7 zn0@R~%{XO+4FQN$)(oQswOqhiC_0s)(_?OQy^lE5-=BTq5(T25y}b={JW_1?mYG|a zsA}ychb5Dxc{Jy$Imon{PS=G7sVO{3n-_0^Q6VUc~LXKl$cjpUrpEN$?^4| zOH^G_g*9fSs=Pm2gN(RhW7pk5CLL^(#oF8$WTL~=X;(`DnZW<8;k7~GbR0UDKb|uV zbKkx7u4(as;5l~U&Yguby4!-fDI~68vf$_rZ!ld~%W8I)xZkO0T4(Gdj5&2JE`!_7 z?IM~n*;+Z86lDa5dNYGk#X~lBU;<}xv@)p&*#InV)>bOd$>h9?Hu3KYooc-h0e7%T zy?S1MhV9wyay-Tw6LXkMA1Ako!jSfzS6{KjaYLc|H#jyK+R!G#m;zN%Jl?&KSavrD)M6lEM+zw@DsZBWK zGihgtjc7^?n70Giesk4n*{YWH76-}H2Zq`NKflz zX-RlShAthDN5u5PkW9xEc~NQl^*+>;fG4xCDO6+~Ks#kPIet@;ExhOK9kMf(0Ytks4(SCFR z;*LC13j*pdMkGe8zqIH3{q~dpb@njEX0@gT!H(v9+INw`ak#VVM_Q$_q6^VQz}U^a z@Dh~|SEPLvj%W&z1T4{VKT|zR4KO=l;wyAsDO$SAW~QHPbUhiD2=(A(CBCpi;8^YyJ&P*dTU-r#pb8d|{8o!aV1q5-Ys6cJkjLD%BOv`EIa4k_kt= z!EJoJLaP;BnM@+zzxDNo_K|_ZMkBL%|7dcIB;$JxD-%81wXxMIX&7XlZLLu%=L$I{ z{q1bXXeTgC7Mbg6v%6HK)2*|+x5N2qq7J$0(`S;wR--O9Ve1DYbNSBfn`n930EIZt z6#b2aT*3*XPA~Py?n#g-3AYPt6$!tjp&E0c)wPRhN1?KEM$jdwB}8n4@%HKqnqmCh zU+NN0<^wkh_~E05$`8cWdSlRP_$~x2akpX->JI8L2v+7AOTZ7d}DA^ zA7On~Ylc0_u&9}E3Hxj?KZ~9Uot6rO4eo+OLRe6OSb~#TnuK==I_$a1YVJ`r98aEA zX^O9@ESg@w@ZuR~)52lw5pByK^ij8UL--H+w5($FzE!&SH{QJ2X={xxuPoNqtg^fM ze&)5Su>m+9KGF}=9aBRvKi}4x?!Xjihm5c#YydCw#OF^XXIENQaxjsTXl$E<^`Hnb z`GbGNcL1y=Eh0*3#pwpXt^FIK%1VlabAwfi%XKBO_l})G4xv=-DB(x}nLHw( zx5wEP3UHa*FdgF5oC$z4R2yOgVZQrozMEpoUTd&$wzVf9$x5?GY6#$@7 z?b#TG^p0yca;+dIbmmMo4N-&GV^e53AUe-@#cd;NbS{Q;MuJG}_7mpAu~p?rJmq{1 z&l}S7z1c@3Lc z<4CSqDK+#1xAr5ZXeSE$EiJ;pMk*%>_MspAXUQ$UJwPTfD~cNB+^0{M!| z#jUnO5>UDpHJkX}o!Et(Kmt=;wvB$B-JLjZ2hOnjQ%5nwsNx95+1)#yT?{cw>j3>$ zlZpuaj(@+B6xqPLZ?OmFU1MxzXK|CRwOX_kXmAwk}fhU{Q;1&|oi&I7`%H%Uad_w|%ZU3FVr`a+_D9UPRfkHA9`x9fR zxuO}n^9zm4lix?W8k;SuWsTZ_jj*aFKOmhxoc=+ty3p!@%JbcyhynB)@EFt9)C^RJ zpis8j5<^Zdb)8TJ#hs+<;bU#jqpN%F^hUZrqOWg-oliP2qcv-H-eQ6@4#FJshR(6o6T?X;VI8qtQ_h#R-SiYTRMjTXA zLthdgr*oHk(yeXzM-$D1n$1sgx4Ou+xN#&N|1F6(@KOLcRUnRAM(b(ytW^F0j%U&^%i(WK$0lSi4C2uzl_(sX;9}agW z{;G0TpZlkv24%yNo=c-A)y4$MB2P%^=XV+)T?pu$NboVaSS5b8DOmTS9% ztsnv>Yz+)HM6IcG$gs4Rw6#(}&nOMRiXw(W7L|skG4e1wn2O{`@WXAMtDz1Iz>CI> zt!o+ZYz5BnOgS@90x9*@mc7`7)dGS=IN~5;>-h3m(K(b0X~#LK>EZB5bOAAcB(s^f zQ>_AG^l0i~t0vU?t`65*w}bEm#F#`_>o6ipXDDB!zoS3ii_cLA?*w#Z662|8W&;I1 zpSJNW_XiDIDLK+L*S(*mdq2ByQY|-2VBpAWP9Zn1q0YJiWnguUNsi=q=4>4QQ z@n+_#{Syvbtq8R2y-BbO-ubQ9*2C}VnoR590Gy8AfvGfMZmT zNh-imE;lZjpIu_!329adn?|tXeb5mnMEc~^Z!1h z{gI2QgL&XnE3sN|^aT$H3-y!f51Q`ht~gWv1wRC#O7o0KJYXCF(UKyeP=oer?uv#d zM=e4aWI??m>X6lsy+NWArh%gcNjx@}UIZD?8dj*_p(RnTHa1t94Sltk2rEQik$UDN z*Ys`U3XQJ3IsI5a5StAL1rDs$)-B&RMXjfY(@&;O^i*>M#?A2A5k%H&F$gh6+8P?e zMOyBfB;-7`fsW}0tX#rLnGU$bj>yPRI1msLV9iki^%S&;!pzDLdS%0l>9vy~K^|zd z)Dx1>jg=_Tk;g-@kq&YSi|uZixC8uNbkWV##bBmpV|^K~><|!@EV9-mKe82%(v&5~Rn%tQT&!x`3D=OF z5J=B07`r3A+zh)!_@<4raZ_DGEbKW}c?J2j?&$7HwF%R-W{o3*tOoN#9O$h46AHk8 z-1!zmg@4^WVu-(uCq`ro~_rOJ~?l>=Osde zI=;GG*J$GZLjG%Pu6f-a_#Pi>>w*%ry`vL}WIxxuR}h78dbuPXS>Yj05I=q`6}IP^ z0@3+IDxZXDnDrZr4Ijd{Fu=`{%!LzL{06`*nG zgI=SGY7{Bw-hYJJ7D%7^R;3u2K2u@NDY=boOG^%G%D8A@B+JnEM$hrCWH`EDN{Y%x z_UU8lvw6M4_4;zqG$WmSZ*mQmq~@!%TG0kUSb_twLq2*5>>O`{E^bE2k8M&ej8#T0 zPf@S=?fM%^%=gm|QzfH!)kI`2E@6#>apP-z%PU}zwOX#bxW#jw?MaRTl-i;`ms)jr z;NyzM;fG4f<-s$eDWas++3p_3t%efeFZXFIa+@)ZGQ;YAqdU=HIlR%5^~UYawt9e_ zy>QscYNBpUI_O3EDdWDq za446fN{OWZP2xlw&Q?D;_Ii`W1@?zno@zBA>d3fiqO$m0EfPl5RVW~R*O*G6EcS=) zGDxFGVu2Qsz|6iXY;=p{1Lp|F;_hy*XTGUXlz^#vrFQjU*&v0z40DF8$7~3l#aJb; z>ivWXDKa>8&koR>=)Gaiu+Fq35NF1HXoij!ELoB4H8mbHja1_zzNjk@A>`lc8ysqD zT^|g?PEsT!T(c<`3&-ERY34HLK-mVv?dqZlB}*yL93QkiGF{6ZAAnJH$=Rwqi!XcD zYw|XGq6Sp&Nbu-t0sxtpNF!zTtHCif&a=ClYZ-v)a_}&=x}JK}Y3<|H)OMgUfH!Hp z<1sgUreP}{!2Y=Bh9m(L)34M9EIA{mcGJMpv$X1{c2ix`+%XUJ#9FCeiLV6euGGg} z*m9h2igMUykKPx=@L99qp!s0>iG*Rwj;NGND=Qk|Uo1Q?>GZsp-h$2F5#0;VU0TrX z2`9&4RrBME!5q?pphRQS9~y(09(pS)NK;@Ge2m6LC1G{l0;c$Dx^9^vpl%x0$*J&2 z4hN&BC%;_Pr`vvbTByDg9tojH?@vG0RF={1w5SObe?EN_N_a-G6vGb%ovsJ>&dIJh zO{M_=cRl&#&5|$Ho30k1W@RVuO6#_pqV2uGq{D#|u3gT_4`%P{`F2kK^tZFe8lQp% zi8xra4b>p7HjwcPr`6V^MYupDIjktstsP&uY4EvyJ^f6#5L$UWdsl}>ES2WP4Ik=@ zO)Z?giH{4qAW=#EP}^{0IT2oru4{P~h(?Ues2o>nx52d(=d=1%K4?V+FvOHcB?&)x zk1*zjRV&(|0jR8(H&+2isyCR&S&AyL_O9Cyi!F{CHFr7Owm3ojENHb^)N(UZca%h9+K~B{RG|eKd98#KwA~*3_IJyEAr#cW2*(vP&=7weAE!G5Q_V zNX;erfkbX&0{d4aNG9ZjFJt3abX-zv+jk4*@`qE`5Dl(K@YD1-J(WYB&9kfgIL5C_ zqQH@v(qNWS>QP`ZaKIsGJW6^kTgr@x2jkEhh<-@=ReG%*;3EV#wV=(^PSKG(1I@^$ z+(*i|QByms>4SP!pouga$4;DhLu;|otj2&}tTwb4&8Z*ZX04ru17Xp35|FO#1^`k1 zcf8^AW**#d5CNLX1LT+Mc(|u7)I72UldpDB%4~JN63XA82vex_v%7!etb)Wxf1G{> za_WQW{Sd0ur<=2K`rg_#B$S@eBRr%T<-|04Lw#lNK$N*~$2CYTg=pFM^^UuX=ps=o z-H6N5$|mX35~`y@l&(fwGPY_hg%ej!X`at7X;A=Q=m$m}Vhpkn%|-2K9m^q!%&6Dh zafcK&D3Zp!aoEq8FxJwp3Q5YoTCb2kp_W~n3=cVlbu&y6ZWp(AeUNL*YwPu@nt%F@ zH`M5Qm+}m{nTV?c^ODwSZc9IuOn?3d$UZ=WXq=YV;th%ixx7A-rDGi2&VNNjv+J&| z(wM+tOxyv;hxqm?O20%y0lbLrvBrH9HUQ;BUt69b3Js61JQHTuU6PbD4s2psAJ4K< z#Fv8A94E=BF2{&bRsBVsBhgX(gr2EisFsOQTz#+E8OdoByOnBBL%@!0{-~*3sk&g; zSl^K7s|;@8#_de|-zwO=G_p^7fi9U_mz#Apco4CE}#Mp)jg){?@wqCzN9=3+R z-F3E-g)>jy)UKk8ZdA}nHI_A~2FFhPy1pnEp4G!PdTw&TW1(lYj_wu0-I-q#ZgbxP z$4(sA{9EqWtzu`Y)p)VDBs2EA`+ckcEi?|WLo0}-kw*;djoe0f&hfZLgPenf-Z*_) zO+S9=w>s(3j4cEEi#!8cy{8dow_|VV#?{SN)IxUT+iD?AxHUfTDr6QfbResB4nQe? zax1O0S*fk-IlBQkHH?!BW#yX)NNE7th7cGq4!@jvbqcW_Fkn z5oL!AmL$-HvHR8x+GeMuQLDyilbZ4H$v z$InY$!DY~*@Uf*;p7MV+l6LbEmXw>qKaO592#k$~)@nAiA)w5b1c>epB(EOO@x$2{ z(|6P$k`tQ%ei~XyodY-ite{ad%OFd+q!v6=1BnNtg?a_3|pak2aBYQ8^v zLsD74u(q;Za;dV`bA^Z?0x9qsN4z>-Mb@&4tlMt-tqrm|qdVY-B6H@@QN^4cSbib1 zIs{2AI1_7T{a}YwvV!$rlU<`AVMMCSKpvLY>ld{ZUU=>~HE;tHPoi#fNw>FC6;l1Z z>VbvcV5l8F_hw0g&uM|nsYJLQz6eE)*6J#te0OEAz&*djL^vYAK{70AS5YYM*=>2W zat_^W?_%p{CRpX~p=W-kSMJ`-Oa!YK_Sn2UKmA@^zt-zZE~XdbVa{A)=;XJ9765)t z4M;k3zf|96cab{*Y(78U@+`8+f$J;4jUBd=_Vo|(X5a>tVa%crXERXa+s1#UabpEl zy*vksR~;s4wWA544{R;!J4LLX(&yIJmaj89+Sv-x&xSN^0(A`&>b(d|*%>}^&$d}* zN@Udt*2)+4ta(mNlp$zrE@h}tj#G>8sYyoh9P60l?It_L-J$2I7mfc$T|>&=fb32$ zt@QmsUh$$K*_b|oYG4>p+&U10@ z2hn)h6|7O)4Wm+XGnm%49Oq%dvgse9>jImpxm4oSh=L(0s?EmLKd;IOfl$Hwq7^c> z%v~)l5-ZK~4j|P74VAna%=VspU@c5!1$9DB5H_%C(XN4!`+#w0(0{cLELGtFky3>5 zR!|+y<+S=V+VwLY1s{BA+f~L!DROAe);zL|O!#Q}2X$(hdWWufw>v(T+hkG3dLsnk z2|c5>dpI$L*ByHfv03E3KKVN=qu>AZdtWy=iee3#51s@X$-PU@sN1YYctBO2^s`vlh@!srUYnZLU9g7yUm(!J?7+66aTu5#GMCG4x~2r^cy zRke}bV=?~E^}X;2$7rP;8$N~#ncJG`<(K@C|GR038a=G27#(kT)x&^rONAa2x ztWjaSSBzXQ)veZhtzv{FJE+698S(04JhWXGX`C(9tDfCqFu#^VKOosmX1=xtK=-Vi zpGhPq-d=VJ&TH-w#Ukx_rRY0%yclMh6+;NQnqUQ*x^5YxwWM4iBj3=gNqF}h15VmZ z(lt{$bI76=x_c%_T6;rkdvvogFp~D418|LE%^l;WrDjE-D=Eb5kne@7TErU{bXeiq z0l>#oL+k<8hSV6PFRnrRfl9^=UAB)=;C&t!j3xvp14NDn`_WIo=f6~S(b7RyeLwp& zdJ8%@B6#G7bAG7!|GTr8;ynu^1`BHP2U!1}|D1C9@~2Li1;A(f6mHN<1wt z*Rxj|obrABRFBLL4nRtnYM?BV25dp5ld)}^YT7q9s;2W*SACBkdq(5UNsn(r(>g?p zpZlZY0uXcAF;!5k>Q|1;&%zt~^b77>+&Oe@mKufXPp^-Dj{8#shVilKQpCJJ=s)g} z!e?mp12mo}b7p$+@6^-fhtz7p)>03jeo>$5&X>W`U8?;zbK@UXwv-qrq|mBi2>!JO z|1Ftj2}nh%0-i5@w#>N+h-UgcQnL(d3|-Y){F}bQVAL`k5vHlaGF8yEbX!2q#6vW z7Ih(W0TK})#74q`dT{*Gl@sxr(jHaw!0Ef92@^FkNnM1K>FP-h`C!l4r`ufz<>AWn zrD(D6>)P5&$Y)UZ-L7wKtf7_gI-Ze#`BJcbzg5|mk6ZB5~N{K%`6?AdKc?!a1%(N=7+sk zxN5XWD8!@ZvGK^&)~kRq?(D?E!#57;9~(g8wrS^JeQ-0Cb<@)}BD#5jAh({3xj@7) zo?P1Y<+yO>*z@`UJ9_K`&-<;EsD^fHVzKxP)deB|4@_o-3(cZCaPAw)tR@3v_LhF) zWE{hKt>z(-A?DLQFc3fyM2X)K9S_vUGpAmN(Ok8V_#|V8?ck5XP?ro@ckQ_DiqUd+ zTncJ2mSK*+N}@OGCgip`tp;iqs=Efvlo}r}T@7gvQZNM&rDNvZ330#DLXeqIMt7zVY4{0D;fZ)f{a8 z^5%L|Bk{VUK=5VK`!CcZjN)-o-$5N%Y?k$716*}ylH)l6@!({RHei%~q}FY$Y&JBL z$B&&pcETI??(ALg@u@(8zarO(I!y6m(v;OpH?Sj9||9Xpzg7+V&v+xho z&-CJzD)4IcoGNEupq+g_b0DL^^t9h7@M2}c)$zvez=a=50*h$-LvSs2f#q#9MF_sTv7W52Lbbj zb0px9SXu}j^(s+qgjmBp)^^cfCUhLHUz!}`)U)8rj?!)cQT}!9XDUQHt6Y1u$239BJ$48p(I+C+y;!@ zczNs$(v)p@@2@{`68-f>TLbw0>~VI&f@A!%Vl) z$O3nM&eZ2rnZ1@8ap9Iez3Qj^cEn+fMp$p7Dvtv~%pgY3=(Cx9IC>Osj5W&9Vr&AO zVZP*&auqnJ)l#H-?cDSL27XH~Faf&3t5{kMs5E|R51@9JP3zkisHUj3o$_6>&2^W$ zXr7S(Oc#C0c^*mF`EK?;YWcm{ec2KQm$&+c^LSe9;Uj54H;L0jM&s@)K%frug?6rW zn6SBia3aF*|Ma`iNrnjKKTs0Oeb0)&4VtAmOSK~|GH?2HsgtHJMeHYcR$Kf&66{HfFk8YA zLPb#KNa#Gq;;`kL?77pYJ%PzA1j>R28$fwDG^0h$U8#{4Uh=aB_uF&lU8w)Lf%Sjs zesvw^T(=D}D)!Ha#E5o&5}|T@4eA<~XK>Y(N@We>x+{ddXI`H})55uEe#IcsO_oxM z6KKdb>*Z=gM_Te1GTcE-cK{nDwF4vnJS7};Z{i`<*r6;y%$u#p_QX9dt&YoWOE9q- zWwgfCBr-aJNDN4NRh!HLIRSbyFrqmlwESUAaXE)w$~-nwV!FBwMA2%)1?LOJyPx@m z#|nt6^*!ikZ%Sa@m;q9Px(U;!)-d+(&Ay2(Z;bKAx0Q*q3#E$<&B)~|n%xvp1mGG& zMQ9~J&=%rejHIcn3%}N^7=p=ToV9hAl16eeeHi_m8(?}qJI=~;d4$(Iif`fb{%ZPZ z`mY}rijMJ}wed2lTRXeq2Zp&LHOEj@a=+mM){u@t4 z1V)JtZUuC;Cx3lHpH>`!v`<`6JJrB74ZX9|I-q2mb`1eWH)hDY%6yo;K?ViDeBT{( zETrQr-*PY>fg1g8_O{eEwpyWzraT6{Vigh@bpwtewSz$<3alTFFZ@a$aenHWItfxO zKi5z@eyJJ8;RTC*Hpa`Q->btGQ*mip+V}Iew&NPz^>E=>l{<|)J4Gn3~nH3m*E0sXtET27SOE zOuwFff!Fz~*%zAbL)TSX`yN}vdyNvIAxykq590Jrf!70aBy|D7Q3{~y0&{1YFOY0c zwRhBV1H~{Q$W{$ACDeM^rUI{>g-Ea`aK;99mm(#3SW*?LJ*kKFU|~4MEV9s8Uhhr4 zx6dC_zZd;*XhdMQ(ej4vKvfh5>UiLjm;4>sfQh5BMzm;bW0$5!@L01?aV zbL7U4KmATumsi%y=kz6Upy&&0b#+5MJa*;ENv%)w9-{%~DoGux4sy19h%cy-m!5fM zA!K?&N&IP9tT(TgURtkfiP5$D5vgDiWjvKjcsFvpoWL;sGf28~2UgbCs+yYB9^uoZ zB&%B(u_GyJ=6|;z@dRLg0V4m~Ph$UB#Wg2ov@?yjDb9Qd9YZyL-B2Df+QV3OG?KR5aY2S$+~@khXQ zn~I%kJK?pKt9(SlkMJW@>kh=V(Fu>5(Tsnhww--trM6bpoYzJkG+Xo18ck>=JZ!`Y zGAXMK!iWO&i44^eRs@9<)VJ+H0N6OZVna6Zvi98wngeAl{Ui&mx~osq2lfDtN-EVn zntr7gZ+G1lFuVK4C3TBSKltg*@bJhRXUC_vj=n5MM;KfDKw5BQ&C{A1`hospg|!F$ z4f4zUB?b<-uf1iAv0%F*S2i6@-Ge5{BAPIitStH6Wcs^mK)-u4MZ?sw#;ybEEvq_} z7$%CP%M-4gEufXbA#Mts?tbmXW154(R@5+Fs#PnxquTGl?31Axckxviv#oAC@%#&? z^_g;;*VIw;zt%S44<85eh={jI@^R7>Sd>-?aH8}AQcNz{~DT;3#& z-n*EHdL;Q@yBe#(h!KeohL+EWWP?!d9pDs=Jyb2}GpG??vcAk>cUVf$w?Ju#z-iT5 z?mzLm0IzfCaTXwNS^ergPHHupPW<1~cXhXIQ8Pz;8Neq<$vDkPt$B8HQFl_agZ1q@ zm9`LjeK33U)0=wDYHd+cWM1P~4`TI~Hgqd1IEge)3_orSQ|Cq~U9du-nZZF3u|qP8 zSUP%QX{Aca{3k7KO%Afqgk7vxqH=|=QCBcu@KO67hNax&H>U4Uky+z6hZ-auOn1*S zmo*5Z0$1c6x6Hm^D0}L(Q#Owi3_hl<9R)=954Si&{fNFc*mVVmV~~R-J3}6lf(OYO zpx1Xay67Vpn^mp)1gpS=cYBdD-V(iaH45r?c{LNx1+QdE1jWc`HS^7h2VXBA*X+T7 z=RyMK3zgY46nKD+mNbPzEsN+=0f|Iz;FUSoZmr|(Br5^pD0e)7A%G7!c*}+ZM@quK z>Aks2F^Z4XHe$%$7sFlv$ktD-VbA?>`%rMDYIodxF$u6i|a zLlPWhdf%#HuyqSXaen24Y>K`!i=SW9HOEi9_@b_v-L*3+1O&3*^ryh68FSWRseDeI zMhqB5gq{V^!94*e%N7RC+}&j~Mfk7d;+j83uCpg|=vGm_=KFn`vA*@e7n_=vKDKIT3w*7Y1Y+d_4_@WVIVRw4nj3 zS2xz{&3Lrouke0^l6JPax-y6G>;8q)XHMy{oybn!9{Ra`J2dxao;jg+VV{&54L#+> zW5>?u7vL)&OuyD3Ee=q<^rgaH^wq`_x*pR3<5}ctYJn3QhwCz2LbzSNj7mWS3%;DX zod~r{x9{GiA}?OAv`ym13fs8I!2DwRp_)!`)$v!XFZb&=>yE!zKPW6NJ+pBW20C^3 zM$4i~S~@5J1x`Je0py_=Y$c|+K{X?4F=yHsVLZ-NU2$M3bkOq;ZiYfRFDaMKI9eb_JAy#j-{5)WNuawH}5ZE7h>1Cw! zSF`s*W<&Dks8KsFUw!Aq9kq1H zqBRuJ?K^Ian0EwZU|ru0L1?6U(XK=~;Wih#o#oV{&uBqgdbYv)G3UDf^W5)=^3Rh0 z*Z)_&HgA2f{+B8a>EN6pEq2#;0riEt2K?2HY~9t&6Z1wb*s zP0#p1i@tGNlVw_GvOjz^*!7{IQt$n;t_7Jr+3)Ar6WqmyU)9JU7Vpj8))s0|SWGKs zQ~+6X08>g`JkrSTQzJjucrXur^Ss~hyYHm2>%QIB-NTPZFjkPQj>sO^w%VyC(X@6Jwd{pr8arwh(0Xrbs}4?vi4;JeT@Z`YPvcrQaAahQnF!@EUG5^b*regf)GBeL4M zjxY^i!q&ir!ZmY~y8eWoJpNA#CHx7-IfWMVA|$D6N8}ai&&Q7)JE6}+Is7;)AE{(W z>B2#1WXzitdN*u?#(Q@6ycHtV+_1PFP4E5p^CprkZ5}=piX+U5x)^aq6j_Z}W+m#W zNV>&A$B#dbjg~^qKaWr3ts{PDF@$zDtE(?JUB@h)r%XX%a{&#&Jl=}Yn<}Smd9s>*|XTw(z zFZyuy5T)RQ>7&?cqyeYF_xXguMolwNS16#C(-9sl!{$UVhF-$Tqx&qb6ZH$I>B^y` zDPbB`H57cV8LzR9owEb4Aj?k%?}wdUKVCU;C0cD1%J_^z?I1iH@r&wtc2z?nhEq>Y zW|t=$7bhirg_h6s=#g2e%YvpAYZa;Ik8d>RqlTT^Pr0f z75j?_wQLMHLN%`|_=dWjnh#B%J^8(k4m)PR1H)>m!J_VjW`x?|MUI-!tyL9x9PXP| zhhPWtglK72C6f4&M|fX&L&H{f%x?vLCMcQ0GBRBvkuuN!74PT?!TMTrmb8V8_g$?6qbsY;wL8>zy5}N!NgeY^R$8~nw!}yua z%!!l92{Hpbi<)UQLItnyi*jDjWd*&r6gHh34O(h6VJzk8x*JU1&Ud$>sp67(E%EX0+*HKP{B0oMQqV^;0-)!Y8Xctnl9hCc z->P{D$(F12XUF3qX`17}NEDu*xN+h{&rE^-Y$ljK6Hf_o`_A<9nS<<6Sk^8yvNkQ6 z?xTKL(9dn_dUbWZS=ErVu1_RmET!+fm1G3Xqn}4RuEBy#Y54|Q?Qsm*!B`ozU39tXOa zcb@l_#}ER~_^TL-%L@y-rgZ)lH4G|>4NJz=b;e-o>L3mX^f5F6b#Kjsf7?-U(u^(7 zpum5+T3YtU@paK_x9#Od#M$mGy|dH-kt&H+Cb2oIeTmh(PRL7hWlGf4ioM^T{-_O< zlvX?avjgEJ6cb#(>hN;eooKt0>?}tuCqp66=&bsz*4G^doGh!qWm|qcb6zL|T@^J7 zH8>}|;4qRVPOy8FBzhZP8UNM!2Yt=gvrly2nj?}ML#KHf?|6KX^p)O@r`{XqaEY!R z;-t_3l0yI29zi4r;aq@*PCau{Q*B^YPKGQ_OI0{SuxlWn{BHzAEOX~AjoZd`FB4LC z(wNs{mqc8H!O2QFKuPwEItzkf5?b5E_076|A%)}X+1t^H){_i)TN`C-Dgj_54L0^B z?wB*InTv)E!lG5r!hiOXZom!eXV1=H;5Xx334X(2xb=;b$p%L491mVw$u0kN#+2DBG+2`!HBM$DjvRP_F!BKmBVxzUdya8^|GbjZ=w&7J3`*M;|C4axTlz zn_#L?JMirsaL{L2ljdD7*azQH-))QHsW)rq9sBR}ef<(i&>O5h6APQBr>sd=GA^)wG<|K;t@VWr*xYh1Q}KS}tNYAzFJvlI z%;Z-Z@Z{sSP$9W;dLqT#CN4tOYM%ahdsRI%yr8bGSVE9C+m!_mH1|eO&t%FEJTbb3 z?4Onk=$Z*{D>|s8x7QM=-WN?b)gJaam!Q+ZZ z{5Y!XR0`q}wW3^E(#KlO37VVh4#+Ujdj`-fY?5}S2<{$YZAvbg;)ZcJ3U6Nzt2Yu0 z5Ny7u$-n3yk#JyV+L=LPKHv;ca={$S?gnB!lAP95t>WnZpWIKr2~q2ur)eF>XbRr^ z5#<4{Xjh3^2caYE4O}T*U|~5bjw;Lvk|%+J;nC`|p0vi$0)LoV>e~;~k9`9gAPE0Z zTNmN@a377Pe;=DK1LEQR*E}b1w-->{GRm-6=Ox3ubW!TbUeO%>>V#_-px^ZZRNs8Y zM)1_R!EFRE2jg0VQQ?)HeJrO~QFssovGDe)7sO5or5IsN#D|xwYt?#0z~DJiqe>Om zwed)TYC*YI4;i%YLq8Y~7P|$LlLpWhRASmuU(l4UE=b&1mLA^W7?UO`W__@lJHWfW z4>%jfhf9XKwjBD9MmW&G4r9isq013B;Yu}eNa^d=p1&i~6f*A1+1)EAPHHWZ>fl6K zRxam87x;+UZxB7la$zeGRab9pL9vhZb(t!QXfA3mU(bS4H>Dz0U^>)X43e{~n<2hl zE!8io53Q|sr@Jk=o=U|%e3Dr9?7som#iGE)?RObMH^(Rq4A!V$4Vx9*L8h{7O$?S0 zt;68Me$?o0hYx8T{A@k*GK>LIhJ6Bok-Ja}AA_q;i#Vn#`Pwj)WJ_;YjuK?;0lP&L z^5VjRZ;Q*9PszZtN*D#!4#io|IJt0H9XJa#$U+F}FfM&C{(9&lpc$RmXFS6<<)lj* z0+%pC7oM(ZF{C4>j-F9nsWkNB>S|pbo6D@+*|~_Z97*tJt8Nt_=~I*@w&pe8ntEPk zi=}R$Ev==Fx%VyCr?n@~XAS8}g^SX`4*1*t*rlbK*J?TRs5SDvVf6ixZrgnLKMKR&nxtu1xO3%Bo|a`nT3|iC!OZy+ z@3gcLXP2Qt$qkAtc#=7gp1e8zQd3!W@9mR+`sqzgRF{9WEKbhP0PqUTEF{%WbrjEYqCKog!v3iX64%ZM5*Ub0#vZ10ZsRLdMM2W8yjTLlwZr+)% zH$k-YuH&xAO0w@Xj;664fl!-c!eS0_P@lNtN6b=dU;Vl{`K9XtFaCf1 z6XUf%agrBvKb&_DD0=so(|0sL$)0`8e;-UAt54-}RWGkhwD-}3=RR6>5#zViD03KU zHCRRr!(*^~UDLC4&?Y-oi_J2~aj&=#p53hgvWXi&B@VX+c+jIK$B#dysUSX(1x%I_ z#!Sl=&r)m8CDG+yKlhv-VRA1E*0Gwc4j9Ln6QNLuJqdbz64V?@8^0#vyLIDb-vO%gk1|Ltyw%S zq6{HD{Zb=B-iIHT<(i|`mg?1)HJ#@odWLhB3=xeOx(a5n_*Tu!TSybsKz!WcMlegt z;zsvpA8Ofvw}69;!vVcqYo+NTizrgpI|#J|1(?*{CK{2pM1k`3`(M4NMS}a2b1dt! zp>v$nF=;+qDREQHSs_Inud+o}+=qLCY13PYbAPBI25mTTQPJ6Q-L%0ZN7x+y-TJxe zS}bjgqbKD`-Cet&=CZYoLBhw?yh0NnJ{+P*vw}cOBfmWuY{fBCYPeoQ+~#^zAXxPT z+?~KG%uHcR|7sjlXeMF;1@$=!qt+3jx>47~n%Yo=8@0WYLCMk&qO`@(;oH0WVM7pd z7&t=JTv3Tp)0g?BV^=jR$vZaRPZ9_XYCj($09+wLWi(8yelOI2q;))G*{WG2lSeE1 z%5%?Y7G8Tnf4nhpWf9$N0u)fQzs8ZS&rKpYu@dlu>cw_QBa$O}O(B}=K31s~vBoOZ z>M?hHCRBovLE0FSA&SxKL93-^z2>aLSB{-HrS~IWn=LrkT&GEJ_erW{r&X9y$?1X6 z#td$_`{rz=9*cr8i$IlW-iew_H?nAUC%!>|9QE`TOghFp8DknL(sjeNaO{jT>#^l- z9ZG;uW*KB|fh>Xqo4LFTbsA++{P|YT%}f7~s=!{# zW7C8m`?hxcz##OWPhAV)Mvy+Gn#(p+2gU48@m9d^&18K znm(R;1M#89|BuvfJGord4}TcdhL&!)Y2%iNW3(_S`#-kCkr-^~=$ zMq__-+T*xqp6l`XClnqvb&{TkOz&C&!q&ef;=>?z8g4T5p7(EIqu~ zZ?ShNTF;JfuOJ2hIB&N^p+JoHZnd*R5`^+kZ^mO*n5-j-p;wPD=vPy>Yklb8)WT9H zJE(xUrL?ZxhTouUfSZGiSZn(TDI`VWP_A9gTv#}%C-3%LZX%k^5#|Riiv5M?%VI5K zSGcwUQ@IOj4!xEuOC<2&ph)$i+~%@)1;ZH20+JHet-8xPA`mm~z7mTbd7h{YwQ|fM zX``ZBZfzI;xYhx7)vECH9pri}94rJ%G{2}mo6<|>roNf{Z|Qd|2VaK_BQ!Z#)YYs@ zk9XPP2Yj}_=SMg7qcvR30m_Spcx!uGAL0EE!Ih3P-GQ~7WntKs|137Z;@*U`bi6xy z&T6Bq9hOK8oG1XGl`jLVIGB9{bYniqVBm*J?7)reWWwA_|NS5)!NjdaxikqdaN&0h zWHT6o`_m8Ap^ef-3nu6*QL0XBKU=osh*$|MRzY0SSS2A*yZYs_e%OL8ax!u+*B}uQ zM!RrXq@cFO4n1wXp`jU!JTOk*TN7QhR$8rjs>6J1CrFqsu+k6?xWEw2=y;Qf{c$p} zv#MFsdq4dxzI8I`)$UUk0hmSouRtvr?)A{YB*xT{*YGC)gO#@I>2oPD62(eQLw$~b z1&MoZf0?rjl}*##L1Q#!ZC?y#`E4q;@8B{PNajqrBkM%P!^ z0}b&LFHMAvnNOV#F#dWYnC;8Q;!F8_Rd<@!S6$j%Q5(<}DP@9Z!Ph9{NPYKtZFA}e zhKnOM5E%zi>4GKiorMAQ%pI5RDQ=v69!~U$R|BF?ca6R54#n=kg^J~@MI$!ZgI_H(-(am&)N&Jo9XmI{B&Xhmn|I};>bZ0Lp4?J_f z!#^4n^RXpT+~}0CM5jM#oGiiuCQq~4!+SJx%>@r5KE>WR)B4H1{Q-KI&!%3N3ttXV zB=vS;8kN4VhA%YLWPDsJu~6|-F=v5%!!mMpqfD3zWRxy8T?%&@N|@xz@yo6RKM(W1 z{`}&?DZPyFl?MZC7|bW;`q$XF!GSY|lk@HrJ`arn>tp5o>QhH&>!z;au)D9%v%8m0X|zB;#o@v2?q228b3khEx`#v1UX7c}gpD3S?%OCk z46V4Ff&F5W7iA}tdrM8gy5Dv|WIoy-P!!<9P|NFUnGjEEm)qZCW@hZ}iB00yGLa-f zEXs{ao9(^fO;=n@NeO60qdPV%x7I2F+rp#?l>y7Dlfi^3poGzA^_C{?4q@0zbrR!P zK59RF4C*TK%ashmI(rjG<`EY^^Mdw-MN>f=Yt-n}Jc|#3qmDuN$EwehwVBrM<6v(O zx(P9bG!0Ymtlof;!Kv?FrL_S?6n53~d0>N3zwd}fQ9?;8w9>L%C!e&GWCxDu!ypZY z>54Z?TRrrH3Q)`+A`iA&2>>H-?LRmYkB2>+W(XPcqk6_zaeVG^ zBiAApc&-@>XP9zD^*#AvNVL@gAvY(i&F$XSagCsn@Bb@CQ9WyoJvu{4DAHQ>0xXwK zMrDOP2c9nt+zFd64INu5RVMt&C9R^yxbq zEsLL+G#of4n$>E!$#ph`?MJc^!tdbFN0QouI{KYb~>Zp8F!)9 z$a3AlBbsIlq+$kIJyi}Z$$&+-eK?zLXn5dNi%z4F;I5RhxxZ)5 zD!q^5)mST*6slQR+%)8`?xdQ^cC^GPpfw%mCz^uVT1g*U6Gy(nT6aw20nJQ;b!Z4I zY0l^Mq*LoRyJ1Rq93gZQAeGugSrnfo(@-*Fiv*BZz$_kfE$IC>Q`zs#se!9?CmTZe zax8YCt}Ptbjb}Tc5TBHYO-FTHJsSFE>@K%*WBBDD2O%F9+RJ{H1tO zv}QSBcD?8+y7(&IT2_NDXYz151~1^oS&zn?S~%ez`>mFHoZFKv^>?p$mS&P!ek*!_ z+UpTY%I^AOfoOO(bwMUpb7g`!puv0`iMl%2(!*P*A$s_ecTs*lU4s+gpJqR3I!+(^ zwML4wZ35p)BrLr4te(Jz*dDl6j(Iyofzz1Sp=|C*T}-QX4>KP;$|73d*dSbU^PnKi=(KI=z!oYw4MH-kZl@nI8xO> zQ|S%2r3S)8c#u^vwBkReL2D4|MF~K?YY|>PtNXJ!a9gh0BWYDHS}8ji=rRsQ{TXqg zk9*@xCmW8sla5P74)X4{#>%GGcp*(O3e`d?@K>{Yn$2r2GaD1~hg8_ATC%yeETQMMXgRox5p0z2q=v)%npwr;-5vR%H(7a*1TUi*vl z6`25ht=NC!j2Yz3eCH2bQ%RJK?Y1epZB|nA<)+ zSG}F*<>)O=e=NWs;`xQgWqVA0O|6u%6=K;d^O0*#T9dvduif0oZq%CixlnQ0c?Svk z6FL|h3(}-d4uetGoeC;w%Oyhl(i?4di)!t4byDEZ&MBq8lNcrkkER^TH{y z(v=;K9L>-mG?c50U7p?wt+qLMf=4vVHzK7UT z(Oa?6?tA!>AUsC^+4%mH4}jZwZta^34#Ov0mU*d8{Dnr@DMA?fJ#50ME*>yfn>hgW zt@Wa=x+RXr{b1&Wp{=gtG39FIS{_DG}}Jf{;<*;`BBuuAQ1^HoI2=c(HTnQBD10td?JGh zkIX9p>zS`E?g5&$TGex({|m3Mx!uHN;Gw`atLN&soNtGT>&PG9f6o7pcwoY5jb|2A(2wS>coI1 z(^{Truy#)L9(Hc%8N|q%8UR#9ydR4Yp?Al(bC8AK{nqsUlV3|_(!-P(*NuX4#bwVw zLO%l;XzRd#?$ZMv!Q2+wEBL$fzHPQ&>YhF2y!#1Wo49MPYu@e z_t-U-f7Sn4-93({t09?gVYPo4ce9g_Qe~PT;<2b?{bRTk5iobefA~|HltlBh;{diz z*1N_#H!yzE4@?mnj-6PB;_$?{%CPPq(EQcWT|ZWB=2Eyd~P1LEXnBp^xfHeddN9o65_Q@?0us{A${8|3rbGf1ceJdDzeoJ*syD#ZSb?V})+!mO4tjccH*GXgD-x z(n=+3b-xWTcAgtk6wlG4sl8!kGW-c#X_}_+0~|>|&-T$TPb}WNNj%|{s8_ovl1(VD zrPsJ$eks|3tzdOD3Id8}IU%X7&>l9jDkzw7QB!WPn-xC>iT53w0^ZPuV@Kj_3t!tB zxim|iJ#=%#g2wiSXJ}IKCl%Pz_-IgdL9bVK-#bL^DcmR zn)hnXS8r%o$yG~~*O?Ft8b?KO5V>H4lLub7&>S^UOU2%qQl*j2ZY@6RM>!k!``PVS z<0WDjz4p-<*M+vzL3dAMfqsC9Bb177sAg?HV`RV47@AaSBilY$jb!%OZ2kWRiRiw-Yzu`f?N(lC2yw!~#CF_Th*D&G5wK zn<9>7q65m-vim#`!m!#MN4n!c~?#$N4U!?3?}*CmRIh@`7?16<_Uh&10pR9Zz=_4I2b zqvtZ9V^DBVBuqMqp}RChd|i`JG-kJ6_~na!ZLe3wMjCn)v*hmV!-nGxXq*+PQt(q2 zr@;ki-uiett8G>MtFG9vk-HwIsBhK97FJr&Q{XioQ(rhpqH70!xQ9^*6oBFP(;sy) zaRIg$FiBDE{H`bPfK8xm*l}xtX|QEtK@{WCA|T4?=o#+uRK4VGNG81ZGZ{E?r=TRx&bTVHcg*&QfxgmUb>9F^R(MFdlitIl0q) zOrQrH4a(Viwd-ra!Tzqpxa{aGkk8-3X|w3Rg%Zd4;7dB5k; zZX|~CB(!A34hcK+<^3MPGkzA+MQk}<(aId<2o=2d{a~UjTi8v`%C`F(IRp@gCfX$) zmo_UUR}mO(CZoy47wfxZh@p1_`A3vqSatJK6B}0eCu`#5!__ z6es9P^Sqvo{BZNK-JH0lk}9C(7Hz3F*mH0z(U7qj$eKRRvM-2ow1|4CMQX2y9Zs5&{H7AXslD99Yr9cM{LSSTc*$M{a=QVKv;I z)GknvBNcp#DEs90^a<&2X!Jl>7$~__~ z-)$XHmONqe5OtbZg{XnWN+ldPfr#IuZ$@>BOAYsq;lzz(O#rLT?Az5qnOifFuQ)Bg zdGcGyLsZ=4>+|cYC6C87hCMF`h^BU|e)ImQeUwFvamy4=WKpZyb!-@caNk|#&Ts|U z;AF$B-rI7+tss!8OCkhOkbzaN7h9e+`(8q3)5FY#I;fRv9Nch-uq82N()z0Vi)iRv z-mGq9Ia9;Fo41pLtGUW|wd-d}I(FEoT5YpQ_B|4W_KbDo8Wt5ez+Ehble+rqac=%_ z`f&Q0=7v34@z!x15%*@_Yo=?iH(4_l5r>G+0~GQP;|A>=XCXrwme_(KiX~Hr=|n?} zB7k8BtnO!AcnJ$Fn;RiV-F8ytrh5}4&GsZ}KOQGt3CgtJb0;L;{8pze z0PoxZe`hWzAK}7qo2hUi8QM<*o!y&wl*>&fe1ytLsP)eZiLZ`V;0_8yZ4C z;;l3V_7vCJ$_)>D6IL+wRl~<8Kp|m0pnXJ{f02uV$b?rTUPefW(mDsb(z@6-_$Xj7^huZpCy(^4PO>vpCN3S=wTI)K8Xk%nW%tLxu zLC+z^b1qPD9;M*1hJZ$Qv7|d7+w6uCplPh>0*S8q1@VP8e|>nUxo)ldtA<{^=AJ*; zgfZ%8RPND02J|J*w(MEgIZrGui1u*Gd;Im~(nwRSLt0>ae zPFC|Jyq%X_LgNTdAtITHtu{5DL~|74%+4W-t7YmZLr`;j@$J|C(_BBJQ~H0?pAgyD zKn9@E+HTUwl8s@=RXuKb+wDChHA>W@dX764>9^ zy^kqAk&^L5P<*E&$^*Hur%+hmbCt0k&{%9Qi=noLjlx6hx2#RZwVllz6LZ1imdUgf zZB0UD?F$Sn`iZ7^rQ?R0A^oR!G`xCkWsBaC6-^DhH!$WCD&=cQ(I%X>Xn=V6(I+CG zesf;r!GZ~z%ZhbZVM!WUGDuOT3VTc=+a0*Ghlo>{8I*d6QU=v>Nef-wC>7?O+YCkZ z(9hudhniI~=jo-O&Gk}E)1Z=rJ1p8eI})Ls7|BuwEcfBmp~H|D=!)oz7Ic61y>3)r zt!?R>k`;jRO=eyqJWcVQFbW2X$2Y86d##yu_`)aK@a1XY0U3HUr)p^)miVppL9s4v ziN|4<1HzJqj1@UwyS(6pDNrn_g{7zmR@v6=Fw~!W)Ay(EGVPzHKk0{Hq`gms?hA4+ zhVb;_GpA)&{maP9KXOIfu3T~*(Plqz@A+1rwbrQ2LhWqzIlTrhb*#v3%>DGjue2g) zQCR$9L_^&aNgnY~I_|~AoU>o)bh6Tr7Q#Db0HM(D`Y|+^5HA#~nKRX(!y2YRa|oSS z+JT$A_=qANG8rNbY@UAho3`I|iCf#jp?cunqr!9LX*^=TUDOk;%`oZ%);oot$w3d3 zUGFAlC_IR`KM*sZlo8b?1^3PQVn#0bf1g)6R4!CrA)k6#ivdTMRyw;7TX}QK#L*~9S34nz35Al+_z?buluvlA5FiD)hi&V8X1~@A5$d0wv6=Ij~Xcs zhNuN}a^a0P;){*8TcLnmGXv;ybd1OIG4kaAR6cxb(mnL<@3?L_i|sJb0@b%Rx651G zn-z^`;S4pn@+0b2!xj1-!qcTS$kS{8{ts`_`F7MTk$fM`?o7Ybd%#Q7E{KTOQp58P zP@3B-`1$J8EYsz9hf*&UkrzM$bpZA5^y>?+FFdbpXv9U`+L?GU*H_)F>XF+a-X^58 ztl!{gE@0-qnan%uM*b4bD#Cr71XtXp?svy`okeEC0nSi*78%v@&&@s!e>qhb)*4$O zX&34%4GP1sUZO*}2L=+sEzz8gNTf5zpSJ1gH~(5!uOeK=B!22WE7uuPlv`sb*yup} zxyNa~Y#JJe(Zfioq1hBote52h2CtU%YrL=>eAlFJY7DK8m+{9mrb14PmvpfG#snqro9oG#J;-0)=+s?hLEx;v;NFo=-%O)4e!wp9%EnhWVt2itw1Fo%j zdv8v=#BY$tE2kI|J38nR+fMqE=mIu*k{QeK)7f1J_rvr{wYdHxjkwG> zcFla*wL`QpmoTr^HDW_1IA5f7r&Nph7Zu`aHz6v07)7P?hq;!%S>IYa8^Pea({HE1 zPiNS`(qL+rvKii*eyMll67+W+dA7Kioq*OvH!j=9NuqZq`1d_tFb|#7NU}O8gM14J z08V=CjHP6|W`&x^_;EiF2~rVf-Q9xwq3v*<1CaHBU>|=l{a6csa$#XX_b|tYgH$-N zDRmc|SD9Fy6+OX{d&5H7UU#TAtN+Oi=-EI0VbS;B(Fgp8{K9|yGi?gc%v1qJ9TS-= z2W{K5ns_Ry4}w5nTM&}sXrhA!a{}Q-q6Rxqmtrhh0X$twWpW~~q2rV*s|I;4gJ9EQ zU_RZ5#W)Bv;N-$nUI@NB*l|6Gkkrka{s7DI^M&1{c8$In!4R3>;60*Kw>7~+hQuum zx9qAgv+W0%7%zoY#F488f0L!og9{=-yXA5hlN4RA-ML=d5s^KOM<1AXl-gZA7glBS z$Xc&7kphqGM?s2V*87C$>DV2)8|OAmG4s>NBxGctE>p+TM(ojmYsjk24j3Ch63Y9I z?axa5RS$?Ez!V~&#einsg$NU~N13;Fx*2#5j?2-Q0ES&FbfL>zrM`&L9j=Z&7Lm0DfTt~nNi!~UutJI%fyZ72Do)wi5= z(|5$P^)Wq?I$3S0WZ;-TBoI`q8>QuCxb`?J&p*^LR~xur5Z&9Iq#PY<_3XqmPoNdr z(yUf`lf2RjBMZ$MwFbyN)`nIE*YowR%QYm=Kg@m<$&iOcvY|H}AT?RjRIGEi-ebs& z`_voMdsJQYuE?|8a%lVGcV}MTo$j;yy?-|sZ+19R)NsY1(QYB=esh>>U&QUUyQGr2 z(NK@dYoy2OxFRhaR~;&rA7Ezb8@eZO)bxK7oliXTrf7#{vE#l%bX$vJ>e20fs$>|H zH+UTiP=qM!M6w2SSn|hO-yHN77G4h^CuHBfkEz6yTgjFqLowEhd0|MH0Xx zDM+yN1Db9m=m-YE_cJdbf1$&9<-WsLu@W`c!-2o)kUDbtPN*#jDKatCf!d|sIFg(RXo55 zbSv6)j?9;1LDb>c>ZxdeR;jw}rfMCVN%TXUysy`@FJ_;|FKFtyr_c@q`um?NhWb@` zuj`S!B%Css35D0p->MA^Yg)gghy1lnpwEL((31>O6|A%~H&r$HlM5&GK#Heq>Em|W zD}wE}eY(V@@JDJY2`V!5~nHNvgue4U2MQFzv6cP*RLwuKHIcvJ-X^&O}@bsg4@~-(dWoucjNDaJ@ z7}^qZ*mpPF!lqJpQ|?vIIgPyQZ%7YJ|B8C65f8jFp@y;Uk9!bglO}fJnG-MSHHBcK z48sTb3OW z>c=7J{3OR9GOW*m=h9%$WpUtkBlDJ3tl%KUm9+$=)h&&~)RmkZKSULqbI{GPUE zJ`I8dj+o(OutXD02#9E9IR?c>pemvWO`nS2gt!43eD}%Uh$z55bQ!KWbTZ8nLFZv! z95dP8yW&r^J?Wf`p>TWpZER^4YCsx96rChk)FSSyCj%|iGLuXRl9n|QTXA$A?M__C zFytSmpKGoO4WvOQ1f5fbx?G#J>axDH1|8Fn^}Cm??VTPuhdKr(&|*1Bjg2uRV>9Tn zye0y(KkzH%<=ZwpyUAZ>3aLNcVnsjsJIb;=`K=~@^379{NG1V_IG7$qQj>4K;cDOG zU+j;-ve7W&SWLU(A zpupDSCFs}OrEqF4!-3rwHT3mda~)4Jg=i5L!MfLB{hH^fkdK7Az}ChQBnnZ^O{z>f z!>D(ON6h1reD)20zl_zgNUmetF|AU@%UV%WUX&$}S<+omS&EL8oSOQKv*N4i1He9y zqMO@BZM9m?vZ*YU(DOn1JrBf_udl%@lhIDqNYQ~H6`sMUx4ep+ znd)EPv%Yz{5#F3OVQZI$CXMNO8TQC8pI$_hw%N10v;a zBjr94?eXx7Vq_<|XO;Cvmiw&?`t=@8(+{T~YvQJ11J(7>Qvzb)8LjtAYr6kV%XMEo z@1=7gG)jYkxfsJ0i9ZEQfZvevkWcON-d-yY^v7+j(A^hjtAT4-QM~X&q|G z?ff!s-fA?Z!J!Bi{>b@%MdNvHh-gVMnD}|Q?(PFLbfSn8hi#d5JseE@1R})+M4SCP z`yhyBHF(YI5}%X`{G4n4DJwfO7}ldV8zS^*-@v><(=fRdhHDtOvp^iUA7!&8^g_C{~ipU4&uU8Zo_{u2Tjj#mgC1&6A-gnq7C+9h_Xv3^w$45kE#YVi5NW1}16TeI(Lo(Y3X#V^jr0^Y3(a-* zUAP#xCQLU#2~oy;9CPM(;+h?tvUECa-#53c&3bp|$aE}x47%2|UJ7)Wy#1ZzEswz^ zLXj%1eSt?Y%}O&cgoj4JPGG)%CwB_zsO2~n{w}nztK)k1)m};)+XV{ zcj@+eWlK+cdHzT8Nvg5ctx1TZ7ot+2enm->e8Jr`QTzl!r%{^!P@$*q^SiR0nfxme!NTN}aKF6Levg zs!A8w8kZGiI{BQfcvsDKcSkd9;}$;fcmN6@(XTFN4v0G~JZamNAQu(qu_xnwQE*Z3 z*ANEXK@>-?_uS=Ck0!pfbA>521PUJw^U6ns!lH#cTdRpdE>uGrV0G!4OxWD#o^zXM zSL{kj6!5Z?N-GY^(C*lnnrmE$QAkY|4sgeyC;5CYFXB&+rtM1IZDHWe?WTr?{ENLs zK~2}eImYHE=hl>YA`cT|>x|Sq&rHbK*A{V_ zeVWY4AJZqU9wUD(Fc*1VFPw~ak(a>xXwk2MN!8F$<;L!(w8U-)Hzy}1xD1YP{SX_* z;VfouU}B{1dj363t(9lMcaf8!;+dUacim{K<}Rg8Fk z1aUG>nnXW(e(Y#FQJG$w8OS@1x8xKH*@7m$?XI9r7{SdHTuyP>p@jir5_Y2L%S|j3 z6h6(#G)C=rt3FTIG*0ldSZI_W(6!d8L$9gr;`NkXJn<{7M3wV6Yg8|>7z*9Y!YyDPzXM~XEh1pIR~k@tsalo_9(g$tf0bnFEPS zwI!Jq4kT#_=a8%`eCJI}W;Zh%Alnp!O!#qN2qf-{`DIuHo)J2YdOGEuW0u?b_>{ z?AU{!({`?;*DcnD)}WE2Ca+~tnB~s(nEx+4F)yXHbz5!=n1Yl2 z)om~BWqxA$yCp(E0TkX@S4v3mwYs5qrxff~9!GLhw8oY#^m84$prUT4e5u-|27Cdd zSTx=vlMP^>1Ozq?77cu=4=jZ&fd#Z+=nHiH)vc&m}o9WbdCl$P|Rec`Bae0^ylv%Cp$>|u-k7( z`#$#QTuAF{oD31TXo9x!Jwh!b3*x#@7kgrFxUWEc9U~k-LQ!ic9j78J+6D*#XQYcA zCpjC^V$Zq|`w_>ZC($G)Wb9vb=z3*S*P1oOp)WR~x`$rYCi8Ye4&(fMi((B9 zQgd8H0UaonT6o~PjwQSY77`Q4OejrpI35TV$*z)j>i3QZjk&?(Dct6HK36+OlZ>g% z8bMJH->Vf$NYE0>p8AY#;WYns_U^N(yA}pJEjL98Y@>v0mCChEE0QW9Xs!mVgoHTJ z;LPXIzc$`;&C}9je@DpfXaDN7xcQgj)X7!+1wJAILr=Vu&Qek1UeCqh@E}R}Pp6;i zJ|~_PHD7z%0SD}+J=Z`_E?!>LL=cm-X>g~ADaO zxPXUb3{)O_a#;ObMRRMD!RLle`b_|84A1$6E<{AuJ=@q~awI6&BV*v}Xxe)cJ}6xr z09Ii`7l54F9q(l$1ig^*;Qfk?&d~ZMIHf;AS;>k5KK>2wZ`r>hZP^j9CdT15R=iT@&dI!}MZ>bMpOrz3XQ zn2ZmV463B0u+&l3tlu)gCXp8VF$#5A`4tybK(X0>%+`14#JY^j*(Q;Mn_C5(Lk-i)5>nY=*KH6Q#qV5}E;)P}c`d z7!kuDmeC>)?`sddwC&LtsQW%twV2K~VjKLHs4AA}u602m{q%(XRa5HT>gkB*YH8KI z`YWk)k(lD#wU|9#y3AUuujrd+PMlbL=A_;ceB~(8J3M{EiFdR$mwxI6V@^;%4O!@2 ze9}@3DuoxKe=v^;9_g$y@r*TTHtr9$iE}q3xFnPJqV^$rqF#enf1tU-RmMqTXfPi< zZRjC~MjMPO?$9R4xmb;Z{-I<)iZ1FNav;8K_OdST)_BC%it0LBwKd^VruP{0T}sND z5c;Zkt#y26(Stk#_bb39c~AB~e@YV~{bf@Nih_*$6uboBToIgmX5?7J7LGJ;fIhy2s)zz%)^eMDEH;<`*3GJtxs%+!xudxw{IT$)rR`eUQK3 z&pvU=7~<$&!X0HZU#WQ*-|)aRqszh<1>D>s&VG}-s9b-8pnVjz_7L#ZWt}Yv5s8~M zsTd-F6-HyL=`x4a zD)PZzMA&ZONdR?kB?WZFMOs1BHg(^Z&x_gz0dOWnq~UVR+U>QzDL!C69QU6s-t^8L zR~k*-?8?A%fZlRVGMSs^D*f62wAVO8S}tPg_&W)2W|L~;x_70%p_ip7Lq>L0(^tyy zIwq4WTBVI(=6qh&7uoTf>3z{*-Q8Y%{@W+N)%>a1MHaDV37uT@kQCk{SF({A)wPPw z0}Oy-hM57N)gv&!B;ke$&`;agRdRIgbI(1u@Ql8)%=k#)o!vTp`n0|(EI?=F z!nJF~E#A`tExz-bUhcIckZKkZe|4X&J>24s5v0x+K1yPspzdR5?RBnG__;;#Ew@(b zY-zkH3cUHc=!h$kmO;Md%xgg=xF|^oyIk}{(xSZa#-bY*co3Fq^@=Vwm-7GvGnC5& zYRk%?fqAFWL&`dFN&p?evt*^eKh*S>1FU30N*C|D4aaLfCVyU2z@Ja=M_*G8ViAkt zC!F-+q1mnTlL0lx9Nh~F1AD=WN^3O{312xBr@Wb!>a~yok|Ca1v+}%pP`PIDG*Nfc zJxm?S7y4OXND&W_7+TvsrOi&qwTZ%pNODp1XzVy1yPn)Hc;`@^kJBB8PqvDd*x;GM z8z3LFwbAcuo7JsX^kdkYawRr7%_W_TUI&Ij1NXMN;{Ls%NWak7)=HatNlI^!x;{ah z4ea(lk8qHtRlni?#t4ykfsT|l<>mOW%n97u?~AJEovIcQ_Pi_bfX!5GL~A&^b@GhE zUr#PBJR?1AzLWFnh%A5CeUF4y&GRiz$KmY?)#Vqz%^&pkS6O92`3pJdQF$xeeWEAD82nt<^ zzG&7-_ez?BvxkK)$er{BHGA#8-l27@ufC$&$@TC4rc=~x&|sEB8>(Po>JAqty{l2? zvOBW)68eUwX8|u)YcS{yxKs5H_?9jgq}u6jgk`>+-52|g4pMehh)2_EKc9UkCRy_M z*tUCi>6mp|4>$KiW{9B*S+Vt`gy@0GwQ{{$*Sp6bT6HD;oZS$K>hKdAF#!afw6%le zKGblTjUl_;c;0E?iE)>Ey&MGz;Pnq?4>gSwevZF-rUt2rQ`e(1rr$NxC>qE@q8Pc6JHni?9A-Xmy7 znbV1k(Sm~{Q$c-MavR60o{eAPu6cek(eMrapSx4~XzQBw`5#Ss0z@bZ4J8s3hj?6o z`0`y`$LkiZ-R8(WbHf8)w@%yh^3C*)GBXt5b*NVA+l7qNY*=?IQ^IUSaq6l*bw<2! z(D4Iquud35pHI4;!9h7E$&&;evfdK9?Bt0^`Xw5rxlt0h6-9x|Yc=mIWFw=7Q*&wu;$bK=%#&%Av? zGbWyfV?YAnohbVJ5lMvJb2S z$t(MQx}p^YZf$UsH_st_2>By&Q4en($Scb*%(jK~{!Pu&^^_Dm(sVg&@s#Q-j?k+a zD4a@hId~3@((@fDEPu7$Yx@jFPVsS4j!{d@L`<;ekNiGdKFX?IYMt}MjZ@eMSvrMh(K};O? zJk7wum9lu+wFPr@x*Ea0IIDZ42Y%(!yqMmx_&93cd$Z3pRt_S)#8auea8wSfjuXL( zcgGq^4snhkIDSn_710Og?WZG80L_<acRUG@|4r}N~p*c{>;F$JKs5Ml1d^=-5o>76KQt<>G-%?!i)63<#

    =K) zDEREk^nX%~BJuwE#k!TQ)?>+f#v}Sqyw@-c@CNb_0BJa8qPdp`b zG$exdsEc{KUtrKb02%m*76d&_v>97xfh|?X>Pe9@P`pd|9?Jp$`thgQ*i*%{)o`g#{Ai*{*C`gCZ-Th9>7_P z+L0q*;_d;m$Qyxvj&?2YdgE}3OfV#cIDms4LxZXhy2K(}CLa!+_IMdPF0RQ3rl-32 z8CiOJ`mWf57Hv^$#u)#}Z$y{UrFvi^X`Dt7n@G~^q1bnC*WFzsc5^2>Dif{B%9Q;q zTicdVWI=C)VSIDoJV%wgVjg0Hy714ILzEfQwLY1E)jS>FieD8y@q^3+kgkexAfJZF#?;rNIwiL70Nm3sJqz5a0awf6d@h<6N$`o*ZO1*`^Jlhai5v_ja!?hUks zNajmNr{Ot+0k(!m$U3dEoeTnte; z9!^@8EaYApbmWh$^QEX`e&U{E(~Qt4HRhr+WDwF=*9|5^l5I7v$Nyz|Td!#+c&Gsw z+8x}N2sgUsemV<&h9h_OQ&$lL)cYlv+8SC07#{U^NjIr3yJu*GE`k8lnD%gv#LVn? z=I>7;rvWyv7nK}iGvFy{u;R`vB@m#XHC+B(A++OCp|a)%)y0=J-FE-_Q6}FNO3JWh zvA&5B-N*q%uqEEvr}B*piWC!r5j5jhUvd;R(5h$j84r|;&Rq=aQYYlGT;Qek*N_KI`ddYjqdvVJAba1 zr%yN%;bPZ;!aI>_exvIPvG}}~@=L{8G<-77JR6uDcHm~Y?Y;`7b|qfs@W5D(4Kf*h z+^M4f_b;4^-P5Pda+bb8BRoWt%SQ^#uDS>DYBVS|KUarD6IW@SvwpFj|0S86?5O$Xn;@CE(A;4wnQNdh+XZxVZCTFZzrDS(rE$ z-6BBK&-JuAt^YbFWlQk(-5Vc=YnYSGp^E=k!hlpPJ92aXnOqGTYu5zj%;OznrYsV!-G`i+QjbMd53 zDk~f?y$z=S-Rb)h3gt%6%EQ3%`^Y zLt`x+E1U?3*=Ze(73)C4cxJdOMD#IbBGaz-YA}wd^+y#!i=HsSQhx85aM>p z)7-D}gk97FJ!LrdSU`1cdreFNwg*ifPS6KrswP*H$7k6(pU*xkioJ>OR!qIR8rbu? z#k5_z9&|lrl_bopVi;`jO>|hmyrOlJcxU^-!Yn~p)3?om7gFs;r`z&Yfb-H!P7OkY zFp`Iy@z-=(dk0;NCN0HFzBN$>-jXO{MIZ5*sCo zejn`-%#4hQ7{u(>Qg;`5sQKa_B}XYXSRMJuX=8mHcYlA9_K~)O)y8mOP%F~uG1A^M>qp`M z72<$;r=z{>04`j^8UZxm6m+%UYba?c$x%zK#cS*0`TgAA3lJ_-iU}AKOVez+*J~a@ zsdpkOc*9lkJ)n||!>>Y^WoMA_^Xd9*MSY}@mLZJ#=p2kvi$%daOc z|KH|rnPjS^UlWz%s zZm?wi8nz-~lkMH{u3zlc3H=_BJ(l+wDC-oa#kzi%RwNQDuhv8xOynOl7fbISXgOdP zYuZyF6vZKC5fEU|j7q4;IiTQR*HN%G*+e_tyh>(*``91<)AVZ*jnvnxcHoN&qY%W8 zKbqawqYE#wi_S8A)=SG3{b+ye<)hP~e-QO*yHOJfE`C|}qa)e+a&<)%xq3wup{Z=P z+Cz$Xj(ml7I+#MvBe~f$%ZDY{MbHpW_N4UYzsFuuxI=J4eChPi#Knc z5<{En;iv(P`D~@;vnLy2_DG{1^xHIwa=C4^?D5YRng)gi<$R+V_r`Kf_iQ#iiO=M< zXwn^3vwX_Z(1V-UJ)zP9$TA>~Z!z3lSs;is8nQiIDp#m%Sz6U|u}1Z*UbpWT;jsBM z{JN!GT%@|pI-6_4W*Id*Zcuf1vUX+W#P2!~!3b%s148Gm)_t{&#)N)!*CwsgiRp#d zGJ#>3e-BV!Owh^X4b-semDLs9fNg5{YG0&}rXSAkhXP{#EAVzDgqk+5X8^GOnroPx+V9Wq1Lvmrs0M0bo|KIh z<=kqURcaY$ZC08TlGqz7J zru)R+WsS0Qg^SnROuMixerZM-4NI@^&_6!-WOnLiUGmNCkQBLy^L{sdPh+vdjz>)h z6!fEKd3Qvh+nog9wf!|3p3{j6l3rxl(F;8LuW zC!DXLm#jK_tUobW_6cF=a9+7lUlg)YV;gu2FZj*ESfl3pbM~aGFgrWLC?P zmN;3DtCejlzvy<~S@$P28je*auPS6ey3F?Gss@Qy*r8QWIhd<)aNwAoq?SC=_!(Kk zfCpY+3s8?wo3p?b1H39ImM%X2NV>3CZrJzmHO?}(1{=-h&{u#Y!lC5F(iJVBwZd`- zKh8NQj9yl^U4OzmIHAA1-w=qJE1^w-K^zdR%-$T(ekq6#G^@fg7+h2{qjC+`Am*{z;)Tm^veF;9xabM*k)WPn4vK-Oe?Q7FoKw zs_)FrNFKI0*t|cKuXka;*6EmzFA}`-&dK;8-i1c!JooTae9p1TYU*Zy<5yPd6?beu z|E4%#D?^ZNZDqFiB5Hvho8^Q<+bSL1;W4hk!j@Ii-LtDi^dC+p8Ee65E zOuAYd99@YO((Vsjiti7y{OTiq=aqUmGQ9pYx+aEuhETXm#PxHLXgOMF&2GJRN`vR{ z=8VLI&~>j}zA5o&syg6rJ8ArD%w|o^!kDpkr(S<98$F$9{fz8irOs&%8=z)35bAzlSib)Qs}}{I%{`C;a3wvS&^_ zBi^dalbSU&lxY?~zrt_Rbrs4ihSWV)Ljh3jo2}ItG>Y1HLDn@2p_$iL_b%@dU~_qs z3}047WFS2UW?1bIg6}|j+mX3^>aI2 z%}FZ?aqQfieyuCau)%?4u@a|R3ZEV`4)ERl_~bX5iRp(O$`}u_goO%7{9UKw5{0g% z%UpNY5k{F6a~?J4r;Q~K{)zwl6Mvyq1z~YX#KoP!&Yi?$D8{^ZfLa|Fzx;B1%K?&L zpK)qpe)dz%QLQ|AiJ)L{$|{&Z&FBy4%W!x4HT@|PfKI=*Z2mdyMY5@J;O5Q3^CzO6 z811H}ceRBYG?OqSqu#UKQ?-1Jvj+Bkm^O}XyHs8d4rw;LcM0;Zz*on&dwm!C*mVSF z8sYRXq>395)M;#7bz~$?sm{T1wayD{S2xQl1(J3RF4DT?Rtyt#TCA9WGY*ogRvo%g zxsE{U9sSR9Mj1DD&vknE35w)dM0XiyJDRk3?5ua)qQrH>)#!KrQcr3O&0Q5M{KB8* z)8E(fm`<5WY(}X&YDGoP{Dsa@)lXpyq+`$l$`1#i9Xb}$*K={@Fj~D!XHRaZe?}c$ ztH|!XcS%Zu9p?oB#9!>uNit00gVpL%WpnElm2SZ$R)n_7Bww$a2x~-Z^5IBj(JH8p{n|%}*XahUr>?mwh@g5c+W}4Xw@kx{8#rBk_TqoalSu&?k z%W|e^HFcR#u*~4&TfUo}z*}?Z_52A)8&W{AFi;oJkcC@PEQ)KW^ zmb6zZifQRrJ0yE({W)XwF+dD=1TLKR%(xBef{2S4e1F%8{yJB^?!DM?TB~f3>lw-U z^4;`~C{eb^pXLkAth=Y~ZBW+ScZMM~?prl(t&$xxIG;1*_rC3_kR2eKG?!g;06i2( ze(#tr>WTcEZyS2KT@)-RfkLC5<8v-z;VJ9xhI+`_(B&Qa)Z^LQ6 zClcc43(eNfsdTKlermzhBj2cJmm|l5Up}1OrofcWqSByC-cqNBwwFBnlKMU+#H;3Q z?oZ-chn=US68zhFNFzB8$*h_`o)9@8wZ(NoM>}_ z>CuBo?0L$Q#Z3L!aj|+1KPbCfSFY8TD{CQ^5P-sHjYR4M6!&$U)*3Q@q_Li`Z``RjR;Iz7QR$Z7#NH^XT&0j3xB0Y7>1I;u_*0!7_%!;kOhE^F1#(y&r% z`>I1ScfG^xdRmQGmWo=7srwt+>x;S%6e(7TGrMgyNMzQ zR>fG;KIiBAey8Vn0l=G4AB7yDfRoxeG5c8LmxwnYv-ZnQ!|@>2i(nka!wE;j@*-pt zEBcxG1c~{`I>5Q<@9s^>rc$g0%*a<1s{FlwHKkvi&`=s(7uYALWc$J?jT`;!CoHdI znfqt=scZcS{$Pfqb%S1;rHymGq>GEs>E7e6=Twd&H)nmzix8k%e0<=DeR!XaOarUt zDEe%i*ALMNK{Nz&m_HK#ma_qsn-6?lThT|)BK}cpWHWhax2SW}@pGMWa>vi3&C1!z z%i87479%|X9gykj)rOcD)2iP)qO+B}DOMu3(ca*UQSB^E<>nR?O2lHI zhR{57$8T2F#A~V5NX;9;EH5R0A+Abs+|4&{#-lsH+HA9TdRa-v%G$=(D)!QC- z9Z_D+8b!7vfC}v&e-JZW@H!Q=c)qF6o1Fl*bRw>YuLW763DCsJlyosy+Sa%IoprAH zR3s(?#R7>@s))8)^aymk?3UMux=O;K*4W7HC&S)&JOF+w4J2BQfW`2Z#u*VmN6H8ao;Mz!FIeF6mwsernYsJR^JAqat*NU0jTT z9>)`GRMujoFrzv0-9b&0nWEsGE=eDx=!I6gdO*9_V_?$)6B1270C>8h2t;D9yOUWw zTVdnF&K{>X_)2F?qDag0)DW0IfGhz=KK)pfJmLB%2%by@GW@@*hj>CNFNuboeaEu* zce7I)F=A+X@knwT(x)$TDp>?OS5eFWXu82@Q3XPadQ9V*hay_hY6@rgBb?!%y3|K) zqDkD{=})t}u|AY!-w<_Cxwfgr1F!GCsca>lnBO8)9=N# zi|+p2Fo=Ul_gQ^! z%UaaAmMRKX=xZh3uK*7n*ox^(MAztyOyt7Q{Zk_+^XoOxixg3M7iU&nihfslekcp!7E6;@3u$)qWa4hXBp z1|B^AyV)ayO=}w_;?|`?@ z90_-c5O~}&1xg-isU|%u{Z&MRm3WuoW|xo=6x)d|66298W7lr{PxGy`J?i|S-)QO4 z)rjW?ys$mEnuQ62*CN?{@uqYle6V~@W61|T9GU!2+o&E{&gPnOXMU;U6a|8eGYFvx*K3Ae&^@WA-8C8{r zXcoGDK-ao@H5zNc{D3X)&pw+z&;q{-xK*zKS$-l)Oagn$%Dt6^H>^yYCQ2ipHsU}l zQ`D`^icDJBNf#>Qw&mdFK}xO$GNycawoj>y7Na#r)K=ZAV?mzd?jmIUz3Kb97@M{0gypN>&b|?uF;YIAKF~X= z3-tP)2gogcr{-?Xi>ds5a$Dck?SZasFH69qX0B5xX|C?+gc=p)nAud?9qbnE4lUyB z)*B~v&)Karr%&q(Tu~0TAm@~Dj|Oc0_QJ!^Oa=Qhmq1bNZRjC5&Zd-Sox$Fb8x*!3 zyZE{|a3qJND@1`4%#X~}C&N}l1s)cy%n-94fix_Erm$QsZ;AEls8x(ww5gcx1`KuN zn`wg?+VSs12Lg&FahEb|w{@DQbj$~4r(^e|EiC@uiz14KA=5CErlvOl74yb+hOR~a z>f7C2&Svdo_=RfmPk!TrxlhrnZysCQ#oL+@SyHG?9(d#@A6rcj&2YK80cdp#B)Z4} z!#Ziu7_!8Roe-e5O;S^)NQ5x#4#lt`m^gqV6cs~*OKz1lN<(F$31z`_eV5B1+lZT{ zgUN4#T06(Ls_*be0?p!SDdsO3bzx1vH13*LE!9|Y zXjeD&25@!D56s#@20t~;t$Q#LYtUocibA*%L9Xf$H=AMDoK4OAxo_sr%ucmCvrj@$ zz$EvkcZjP0CTe(?`WV|AGzeH-Jr0qvWsNBR_qodQ$MfL-@HZm1RZz(@9#T7AM7*A} zMVIaRO2z3h*`@iozAT!A9lDos^PFyLe3rWycQAmyiuI<9eIzG)`aA>CzO8Ot6dSMC zwyG=SP@CPV2eYYgDCOxMI4!MlMsA#=0u5Z0k^P~?zv10e&<>0L{ompm)KU-l!s|m9 zL&o?+{C#36ht0(=uVnD)>K+C35; zk-aKHjYlr2C@t2{YF6MuK^GSvTg)OmV|T835Vkw20niM}-n_YRCOX-z_mRDNJR`TW zjFU@K1My}>ymSFQ;y{IC7r9=)VZtjRIT<*3r>w@Aq3fO78TlD};N?>D7HdD77 zE*U@fa9onYsV_0qzkA}P5iw!ir%cu%5ed}ZKx8v?uq#J_H6F+VmE zkZs<7)5lm;)iq=JOU)Yh1`8{T5}(F+%vodn;AfvvIyc?T_ozYaO?ERE^1;5!IryHO z@66-xH6xanMk6i_u@zG`md1vB`0eal(Qk3_wV1By)pl5DYId+9w^L+EpP4?&25k&l zZSNh9L<9eUn4va@B+2=gG#gVPM0(tUk=TuD9sV7w=ny_3D;jj;&t-C&L*g485x4p8 zPyDrds>yTDf6Y;0@4aizx_1WK+Sgz%y727m_+i|4sI@ia(P%N1U9fErIGSO$Al;zT z%kU*wcvdkp8upn{X5Kp(R`jwNBMGb&vS%J7b)6&7)l;;jUqA6W1fA+e|$%K(tEzH(Y z^&s6WKu7H*jbaz}&}%%Vu1q4%0uV_3lWxXzjg-6KCtcIBel*NwdQajQ)-Aj3>+X$N zT3XWfr5WD~r$jnp11YUZzai}C+IAP!F7DRLdK}{T+w(({!+xi`yOKL3t(w$(2|4Ta z+L99eP&{Yeu~f(9fTREoA4ox!VzIjBtl^na5Zh%`jTmo%7t(H6ZH!kVa+HE>2x-m6 zP{0zB2NMT`ndoC~x`&;a;YL_GA}2sU>K}Tm3aT1zuhH~COahY?NJo8;ESL_BZ1r?Q zfW()UCC@P_kz!bStHfxgiGILhcw}zmWo#KQmQwLWQB7mhG>?W4OM`T z^)as(5s_>lspgFfQyMMDcNo=(!p!uomezD*xRU-ydWSfp6lW~Uh_f&<18ucuKX{Qp zd=eDN*H1aT{&Dt&e3)dD&MT#^HjYq_;+8c~X;I?u{GrEypbhD7zOvmzA27I?%4$64 z&)<0CjhLLJ1)wco|sYQ4^l#H7MTUT3YjbXS4VUy(V zC@ckF#xXnQ2}Dsd2ZMNhd%a2@FAw%ye(fo(YT+mbWJDfmq!MnfZp5@qnbwU=Z>I{q zzGTc_PCa6N-3zDCWov7g}>v3I84XyIxeh1^0HI?|$HT3e(dNhagoZfe7kY2i!Nt+kRTfp9JvxPW4wQXLVJv|&x*2$A`;?-DH12h-0)bee?5#zLWK zxGz2BJj0Gu`E-6nMiX@vcS6nYr{9V~xE(fM+1T8RiHCY8zGKIn6SsEkHs$y4sXCNq}JVjdC= zqnv3{$;|Ly4s=IZNR)qoT#y_9X-d!X^wd?oyo$|1agR4xF zjo5vEwA=kXDLPNDj}e{4%+@H{JwMA#*E2hCpgl*~3&hTS{@xC}zcr(H$UgjC6q~$8 z{bAi5l_|rzupnEuPsFnpb@5Kp96s*RTPx~wY1!v35@K@|QON`}(8yOKsq(K+J*VgM zM1b+S=JnT4>E(LulIE2Zt4HQlipB-3bPD!_8f5+e7U2{A`;*p><^MwBl2w>{=JoA_ z!b*CL{^G|V^4VhNk)J}2Fa6InveL*?be3_R6wT<}*h89PT<&^L3Wju-A(^Y_yWkIl zqk78dQwz^Xc$|0aaP5ko*XX)qs7ZV))&{4s$Y_H0$!|58Wi+FB)^4Bjl=|wh-}Q{i z`p=V#7M@$6 z{`&FXM^Qub;Zn0pPd4y*;9(6xuIc$bYC7lnd$>?U zFM5)NGqc^y%B`ZUVkP1-)HIvDNr!zt-iwHMiV#w720_<%Z`?dr=sw9H-X2#uUy+z& zO35CG8x6Noa#ZV@cSP(Ri&A&JraqKG0Tq9shhX`+XS2jRs%W^}XFCmuzA+Qge`&Q; zkFD5fUjdIFE)fM^8M9X6{Cc%=aHydYAQbt!=GWCYa6|Un4Ic+$VLWU_T?+b*8=2hR zF>9&ojvtE{8lV%OhzL&MGn8bereT-?5pkHsXdG@%F-@-D%~ioRX?gkv$9U#ERQV%) zOE@NMKl>vg@#1MwWi2}Y&TcJ5(T3)Pt~h$g?A9way}runD>_4uB?Wbw7O))nx~%G6qVKNsFYh z%UP;MHQ4MyTMN->+A22hX%w~s8S&HEPq`xq^OB(C92cr z&^)#`mi6+|x*rnOK8x--5^E?r7L$EX?@cC*7^ZZ5ECuVOapRbD!X!s&?jGyEPAR*$ zZ)R2&rICxuJQ>Oz`}q@m>z~ZNnSCVYuXu#nl$AVeet_|ZrlgUXOB#dFd0?Lc@y-J! z?8wr(7M9d@M{Lw9^vTrbUpg{>TYg7M(|J}mE6~jwi)~^=VK6&7V%`(SAIiFQheyXi zb$`tBomtbzS5+f}fv9(?3AFo7KV1$xM^O~PaMh?Tk|-!47E}3v|5`~PPq{T;WEy); z{CiJ+O^)3Aw0M0lr0v@F>Q+^(_2h46-?2s?M+X0abpjB*Y=N-*vp~>#UMt`#hK5`l z#Q^B{*O?bE(G!atn@}v>(-8s2g%{5}rAJ!UkHxO^bKE;-PK!J3t#zhNjk-lVLT8!l zYB0(~-&V;*jrEvzPFU|;OS@H%MXP5==8q*Mu$_fH!SeB3^g^vfnH*^Rd&7E>qVFNJ zS6o`aySlv@%yx=t#FdxJjy&#OMav=%t~Dt#X7h+%VT2P_g<-V8v92T=s}=#&=Q~kz zd^hhtPZ7L;-*LF3zUn8H~R8cUf#gjTyd#$F3#C% z!{c}t{5-hO^|aLuN4`ytS2<~gQyGAhQB6dH8&^b7lMPlMSntUz9JT;ii6)%jU3Yh| zR5(P!R3L}^qAo;-iRwt~$q%9^0Jx{VZ+#4rN3-wsxEdV}N<{Q@81h5T1J>!|+*xA& zJ21M{hiOK=H)MaYGJwKVMG@26ji zSX9{F_Y?B&^g9}T&bvX9kwB6xpSaSy)b+Ly%}8Kk+1YBkAAS<|V_6Jp1K+y#XvGlU zgRIDqLKhCB9i;* z$P7t^--NU)gi6mX&y7;ucU{8^?W59}qP?K_>aAb=3x9%XUeq7)MQ1pi$A`3Xq{6M~ zwOPU9;UomW%<5(aXhVx{Amj8AmmHYm7Y2@B?a)s(HkySavTEaN6k-pwdQ=)ruSBk@ z`L#R41lHH1FvKm!YIR3+SyJYbVmEWJ>$$D1je2&$?bICO&g?^J7Z)Aqw_nua)B{qq zu1K2^A5a)9;x|jk0ew*&)x%4RpB&Gy=_%qcikY!ZH41Y7tm|Xl&?BMzS%H{$-*Q!F z*^2*kTYfe)TRR%@;D#Ox4}|pu=i#;vvaj_`76zN*wg__@;~QUyE>5lW;Ay=#TdqsY zM#b3Bmil9>8h6G`0+^B<3}9uBh#)k(@c<&R8N>K)m$@vPCa<8(!YQs^6x+F?B@Uo#}l|2aAUt z;shUcC%))rHF{_|Sw@}fy3h*tdp08h*6q#e)+@Sam&zu2J$=$+&9v5}&Dow@C0g*Z z*LZ<-wkko3I?+(CEzoGx^@D{!*EMx|v!M-X=9i+u_v3e;{8~?Jj4eVzf`1I3IyLcT zrRs`vWwTnAa3Dlh7TwG(d9b_Hmo%fo84{goSZR-(N5*K}GXm7}sO9K|`Z!cC>ZJ%Kh(PFpBZ08o25s(*ItrK0PfCQv3?FVbAd zVM*nUESSoE_AFjT0%SnHscw1+?P}Bsr^f;fuC>^lF5qWH%M+I`Uw%ekB?RXO{_Z|_ zPS2hnPYQwy3{gjZ;JhzHK7)IaqQL3inE1x79J*QyWLo3dX?oU}86bMFrSiM43hs9} zsHssc*9SPZ&68{3v+qR^qgh;(ww-a7Wgg?Nu-dmzVg1C8~XaVA$LDr8EAUjFuupd+WLzeXGl*4NV0+kNL91y9w5GEELT^ zFj}&{m0L|0Ce4#b0^+1UZa@*pX%6`&_z!Q1GB8-U)JEtw)AN zi5akm7d#G0>i3{AIzg=rj%C`41yB9L!FgD%XeUvv%3x}+Nse6Gp%6&0;lhg|hN-uy z)JNvC@I;BQUT~Ms>(fR$F#=MdT37QqBrWpe@oQ6ZhYs($M*pyZH&w&DhN0VqQ&LIs0b%<>QYuRbtp~qTv#(A`JD@*{xTJ z)r#Ci0u1R;b(IjFz_R8e7ry5Hyv+lC1ab4x^xfHy622?mN!CJHdi2gDdaS_J4ot!Y zF`@lR3j!;&Buq=mT3D;%d@+a>XqX*1^cOj1ZdoY7s?1x?4i``$^&> z^~JjMw5WLa=b|^^zI<~*6S|Vsj3!EAoFuMmw08c=?akvJ%75E;!Jw8!iuUT@sLeD} zSBfwld(D1^FvQymTlumjcu{ig7eJ|xbgU5u;jZaelkq{Q`Qhx#cCvL<+Z{XcXzAPdnGz##YQ7sB;xVAtyd6VOqHd<3^jtv{&e5BIaJpkg76gOV( zw%4qtsg`hH$!ZcDXvPG0{XyGw_OO76b`JzgdRu}JTL9Up)(vPKEaHwJecZV#T{T_ zgZ?|BZvQazHJ$?rb`1v6?ayyi&T8>C-9rToHwfLPfphqNY1|Y=?+`iB6w}ZEP6q3o z>ga357oz!1%Wn5C65hX`1+$974~szN{T_&Csry&xP^}R=+V;t4_9T6q%`C z!_h8koLo4y@Jui!<&%J;gwe*--Hf44=!0jT9E=};oj_r>6a1} zWfy|2eaC)tQOZ>nOSLJ^ta*0-F(<) zl06g+p|0tx`W>xA>Y2A^6ZpLE$D%@6^;JnwbCKdxS8z!qYINF5V#8yFK>lmn=#PCT z0%EqoBvHwfa|IP7gT8-3)me;htbY3Ri`my2_D~WD%%nJGX z5sw0P4CPf@Er~HXL2M{cMY+Y~RMR>_ITzmkQ zI;>ss+|#Etl?^bPuA0O6x`f!29!!L|Hxz~bhab)wD7IoFXU;3#_r{IJzY{g6RznOgrDCEJvKP;``H2K@b_P=dKoTiKZwhfa3r9Rr3+`UHFZofYr0lE_gfk zJ=8MoNt`%{LoFixojtrouooktoMOJZx_*_i+b%1uh$TH@s+D@|B@R=IbBm5hLyCY| zodpsHJ`FxM&jHb$o*yt)%#)q#NY4HKaNzk$-O)1orDyfXYz=7u*Xq?s+6 zQ)_&E9E8R50Zcu~tk@8(Vo!-O(Nc!^3>nC52^-ghf z11OVQs4HH%)XvLgS|y3EEgCWXOyeTSn}b5TxMdPKm<1)Jb+rdgwPLo!#Qn0PC2fnJ zVu+w7_?6qHfNI~JIugN7k%2E3%4`?tjfiiRuBc0uisJicl%*w=gT{cf2mYCLg*He9 zI4O;b;~WWLF0dF$BbhQTRF@iFgShS=?oLMkNy6g4QFMxsY&dHE#XEBR+yqNHAo`!k zOa^vjhON`MV0+b3KTyR$fG*G4aZFe^q87r=B5Sl>7n}LoY{Z z-rF9@#;ZE;ewt>e5N6YoZ66ep&!d=WKrLtUqBEXFa~UH^_Ir38!cDsrKQ!1H^*M_UBQx!-AO=Wnpgr0SoFeyXqLqLW=|y8>Fwz~ zk-mOJ1phiHC-+gCSg{^9Ll**TrL)y?pyo&cXn`FOj7&RvXRzx-!S~wA1k4r69_kca zSUjm|uJnf$_M|v7ZL>%I4l`@|nMi<#{J1e@n7j|owT6??+-zPu7!QsGD*Z%4^GBgE zQ7+y>Mxe_Z;@xLRkCW4<^(;Kg=EcgQahKp`jh|9kdbwHiaGMoN=7wm$?h_&CY5f)T zR8y>a>A-RsxUw91g%zvtgktB@ze~4_2O1JTG=H!=;MdN20QBOseW z4Rh5c1(B|ROjJ{{{45{FmI``&4BB`6d1Gj}fOF*p&( z{Lw~&u{%A?3|CK9XifXG!p7fD-wT@6xpqwPtPB`p*$>mt#C2P>4M!C&8y->*?)FR- zjzb7PlcBR=qmGaLLS`WE<*~9g?`%Ll9O4^#8WEOrq5?8+bnqWT#{B`cKbf(A#M3gn z6+}U!!V&04Q|X{h2vlDe<|mF$%?x?|Pt%7Q(iZ2IzTxQk-Rwhu_YrNu?s)L^H~2BL zzhdHY-yMCGve$|39;C>O9`gF@uP^8aqeF+4FcMSZyaUYT*H8OdXq)m;+M3@=)ha5) zgO7mL*>T9mfyO?Gy6qZ22dS7MHk?r45pC8J=|OWp_gt_ny(uHfjnsB`Vh}`#2%OoH zd~ggmYv~xSw;P(HXj7(wy&LncUq6GeOM)&_C8A5U<{XKGCYuY?w3}9|9@%~2APA0U(52qh$5GnJ){&^=jo>S%_z8%X1`%=tAEtZsO z(mr;X<*-@Jo8Rfd7Fc!&6q1WzV5W;f+`wW~*K7LKDf^?lA@!QMWyD3o4-l zFueeAx&>`<(o74&cA!yX0-hopJi;&Q?N1U(j1^8Tqiye)eL#j9roPzs(R!Z9}9Z}8fr*nRAsC%u(rNltK1qi4wn2-f}0!O z#|4A-)9Huet0Xqt#BB0jS{vpea-q)9v3N8>l*xTA_DFkDcfU?&>Ib?8zQ6}D`y!TN z(07!!h34^N!~kfvl&UJY3X1XXVh`z2Ti2TOM;89^r_5pN;?do{f5p-934ibVrME_8 zhYZ{Y(dcAFIJVc(BrjH%v$nkE=6Ha}_-S@WCI&HEtJ0nn{*)RXk_Q1%hL=#aE!#xv zL(t3voNKl1dL;%!y(v0@=)#4|&n#$$$8*Y-OIsCdd8HX{tX9PA)Y&kHzbMS>ea}gs zSmn0xdzcoWXEiZkB^u{tkmku*y3_2|(@($Y!+CG^-t>Xab3=LJAj~SEwAu~`JYikNJ;>e)z}ldJKban59q--le%upR_orX4If!&!#uvOiXw9f3Ft0t84%;5 zOSwUGz3u1PW+1R(%D@nF zi5l+1FlV2kLnRZK+&*R{j~)8vUUf}kKk5QnRT{TG*?Rb6wRnyFw+31Z!&_ZTwv)ca zjH&1~MeBXZTSuW60UH$;*6C)4>(EPnrhDycpy$X}9=6=#M+Qqv^r2|2X?fzo5E6Wuvrp&VOOn%r{B$ z-q*}3)KK+9``pAUD;xzEk=^vZ#yPun`eX=Vqu)LP)(=Hm%^wogX?adv6|s^epl@;1 zIhrrDk1{nrLb`L~3ZVwZ^egeo>wl?>chTUvd5q`>7QLX}d$SK_U+Q_Z5<*{fPd<&fwkzx9iUwA0>D4PYbc_8Y z*RQ)m_xL|gzZ9tl0DpYq=oePXSHEKzR@mRmZx~in^E``8(scw2fc=ML<81zH`nj$g z^xM8YS*d3i#n-J6aNU->OAuTq7M~ME%(wq@WYYnabwmwvagKLvgGb>~?IkQF1StuX z8q9KaGYg*-O%spq?i5T}4PRjl@(ev$J=&s$-MUHw{On2p|uZp0Cv+*;GT|*+6 zV=)kD1XV<~^?Cul6+ISjo)H(P#3^DP9M<~w)WWay@?yiIniw>6M#m-WgT{A4oKu$$ zM!pS7ZIc9?1A0D34%&Mj4@Cu>P-Z%M@DaFwG`;ih=faPNRf;U3(b7@bvPulctugOg zvR5%?tzOyyhh22K`+bpd>m|Lq1U73qJ=%>!ojoQeMk6~ubtS?4(yG?f%8g`Cq|~3jIr-Frw@Y8`$Lela zZ#`!w<;v+Ek1aCi3A>xiSr>(*X5K+j!@|=;=*)Ldi`ZznPk|-avZ5m9XUX%yddV@X zAJCXgOMtA~uP5(lKu-*{=jQYO$KJbjw|!=5e)$l|O4ej9#wfFj60qaK2q*l>sy@+MeGnG-X2v=%M3%kf5=2ZtU%A<1eT<4XI3IL?ik z4_fU+WPib510j8{Zd6tl^!~a>qXC|w@M{<>*>2kdk{G3959E^1N=};{p(u*#(_TCB zk1r80UP@s?tsXWa*!$26-krWfobMfA)*t8r)4REUeJd-_cejy+JJv6)s25a^(9O8~ zTu;Qqep9m9g84+ZMt+_US>=zKRV`Qpbu|a0MlZ%)IMFc@qcHIrPRF4x52xlmhkJm&)S6eXXj(OG?EZM7L9ndbWb zc!iLSH*Krq13LX~`kC5~*#opm4Kaw8Wm>4K7LNCFcQ$!cv!X(>7m(rPNvWG293VRK zFiMppv_DC__CG#;bA5I|{)I&CfAQC9I=&g$rvgCy#8pj7f9xe2YU=x>C^)2FNP&O2}8V#Jhfv}e> zE&{sUmkm*=-x+#>m1BdS|6`I=PPb=(-Q(qi;EBw%MnurWdJ?M zjL9q{LOK*NxjmJEX2D5M}C7p09}eh+w{jly%1rR&b{M zuYqEMvMXk*3>(B?ysuZkgu6+RO98BTR=Q}p0z({UAKqndjrHB*)KF2WM>(2<4~m|%&82L2YVSNR&y}FlYV3W zQfo)+#{6kUnO<@a$AZ6cX7y@Tj{?Yujrp_Qi&F;gSSzb%_EfMNciWDlzBVfKow(0T zfWDt}NG&4k>Rw~KS%}lPeAL<^=BOoVL5O`n3|X#f=IGFA&DhzUu>*Lop1d6od-MSn z%pB;2L)Z(7YbqOP_aVlt#NykrN-;8-FJi-L4g?e+i+s-#am5*%Xufnswcgw))nd*s zlrAsK>?2*W(;xNsUC6MO7S$M|zjlkFQC;yb)-A_R_k#)|&L`PoE;oKl2~n)40=Rn5g7h(UnCw>7G^}Df6iM+MWyEc?>QG=&%CWkSn}ZmeUrs*t$$2n&3&+yC zllP_{Yg3hj8R}*vGbCV#mS}YqS_?gcAtRK(n^3n}B%SW{VTO=^OfXZA1pNQw)XP1C z7Rr;StPry&xF?42KRTB7dN88g9IMT@=T7O>#F3!LkNLz@H050Lxyc{>ya+{=E+pM< z)Lo<&WH#AXVBK&FqkQD2TZL%&zH1n@WMJ!=Z9t~_8G0i}n{u?Buh!K_q!s8y%{$8Y z^<$hO-NCh?$8t&w4LxvSZDUpKD*$W}StgLwY9GY5Gwy-{VDP;J4rsQS1PWp zBu9XuopkMq+jqSFuQ{~bBaEcHqBXXhiWtv00M#&S$bcxCMwW$0@-vx{`UQF&r+#$)UU@?p+f6CFo z^QB>-^foRfx1k{=k&9SYT*Gi8-AXHOVd(8Ks$@;@3J|Jjb-g!o#@iI8$tWc(!|U_t zQrXANVlg**ok%S&by}GN*{})Bmt;|CU`V#yCQTSTXR?lp02<&Yp3_eYUiFwdE9$3t zb)YxZtm0luWy$qEtax)8^1`^GM(q!`>kB)ya0*8E#_no2z4Ootpn6aSL)NTlB8S1! zOF8m;LED3WkMjl(Pb{TF;~fn6`nlO3P0wb>2_(h`W5*}r%fy?DZ#oNA*Sqa5Ysif{ z%n`gU>i-Vl5R0%&>fy{Xs$v4EkAJW3QKzOMotKJv`N zkWuC{LB)jE3-O~1fJS{Cwb3bpz<0hGSQ@A+VX2;c?KO41N;ubDRLQ%lro))%;|M*X zlSyC~R@7tb28BEFBO6~`D$}k@a7EkJv&v+1pD{B&zE#ZABrZmL z>U_p!K+Qkbx4Q?fR+HtRC*fgg?b&|SG+Vq6c*-H}*9;I%iNA@P52^4O0#+}dzw^9A z&;|ELm=^$OSlivT2~xr{_2wDsH_U^S!SJ3Yz`z$bC8Z3DQBY;*>n&Ubo=IO$5o7-(I-+t4!3oWK9a>80od$<;%Ven>Gw`z-ZNhW%t)>9uq%+0>ICesJAkcBBZEhs|XUuyj#2rUF|r6H3*)OC!cWh6Y*MlbY5O}tKjrqaxutb;4>#U(c zR>85QPn;IKEuMO5$0OSY(>jRgbLCQu1d9U`UeADn1T|D!wYBZS8$-K*q-o6j7nvMDMCZV)P+(Ph9l_%-M1vbw;zfUe z|H=&1d%BWPqXpgvIhJXm;($3S+H>YhHl<*2&sAPC&n-tg!kd*vib`orsT$vP=u5(G z71=I&8q#@}R_lD5jrwN7Gicr`el)_X8Eu|XsWX^*7$F*3=c34*C;@IG4R+Z>R!yKy znowK_oTx#oY036(7cW{!G2~+^ZecX^`#bIy0+nc>Otp5&nQc~`KJwtYDnzNf3Lbn(MltW(l8G>QFOK!oGHs7@+ay5`pKwEvG$gmj-|`hhIU3WHJr4Pg+j!i zqs1(Vm_-9Pb`w-oTL|yJ#`3I_CfbQO%+B;~FYF8s3j$o`4%0$lRc+!g(OVE^#>|&+ zJ?h7p{hetI5C+5gF}$HhoV;^dji~!C$tKk=FJ^)5V?jv+5AQl=7BRWx_`KGZJ2wzu zs|_c9M^A3uyp&s^4N7sUrR%VQRy4ibc+eU5g4@a{cQoLn9*`QPE{xuA7w*lq zxNCJ-GV2>X0wQs6&3X>eqqisTYXzW{HK2;}DW2Xv{kjI)g1u}&HPv=|=YN*Me0Sz? zB!X3VzP(0*8uRE6kN?oNe>AM;Uf0sPV7XI?lvoe9Bp5g1;IMO8&-AX*$LN`GV&ag@ z*vAc!;(jzj;BQgWDcO}8sP$#9sMs8iQPa>l)C^S6LIPw{T3_=VFtGFAO}~zd*VZip zTyi%z*~kp6#ue>~D1*ihpD7BVpLJ-dnGJ~ST8PIc$8Cl>+hd$fSo;FEaD<`W2}J1e zoPky)H{Si|KTjRfg2dK%tt;@V`DQy=RdH_^eY`(?OEa|KBvS`nww)BAqShe=Qv=P$ zmPJ&#LuyuNC#S50gQc|c-_4#pbB_OGxn}?W@BcqO0W!62kRLl=weP zf0A%0%_>g#IQJPSr^@Q7xVGMS;^gx$p3`)hW{B0R?{LwDn%o|_U?R+i@}*&^)hhLn zdrWbvQme+H(tZ;4e*sw39nJXc8NFzMG$q$oo20mMw&^>9(Fd+$_m8$Rd%QZ>2Y%}k z6{wVjeykjsZZ02SF2ultY{ig{#-@s>9ES0mWta!en%2xNmFjvH;Lk1B!;q95JMrXI zaNgm`$80dV@6hO|9THNMj16j|xiZQqn}0(C20wUz9Eos6(FrnACimlQpw9^msEsFH zn~x62A1mw77RTg4t8dONE5wlTZkcjrMR4x%*QmmL)LuH@TyY}nhL?`Gku6$~7Magy zF!tu7?ObcQJ9D>tv}McRcS`SOE2M_SuLs)(cS8Mz{Zk`tJKHl{){a5n*`E`73G!2&AvDa6Src) zO+Snie#0U}+JLLjb?jgcWeYRa&BGLrIt*}0bo-GhfG->-q@re2CS#U1Ry`dCjT1Nz zAsOE&I4yA%doub#DyO~ohT5@c*=dLhcekv#9A=XCyw1bnu(^Yv2^p}`x|TEMqVe=K zoEGoDeXKjUt|4Epmt3dgImT0;c#Hp?l+VB5s|~TJVj(W5 z(0@9PP`P+Z)BQ@8d)6^2sHq(uW<$|zyV(W?!bx#x97<)3$}NM>sV~#Jmpueg!9Y4Z zDq?${AI%y8;OS9RGZzv;VsxTlZ^hJBYb%a&UAqKmDR%p2+v+oFi=*4)z%!0swGC6i z0)*;Z|InQ!gP|1-k0-&VMZ8hdV+oei57nh|Jr8<3`Y(@vmu<)UzBgCLxM%c4@HT`8 zg5GyXg1lOV)5F3%dOS%A=hVXaQ@_>|Ebg-g0!DXPY84_nh6yNoktC>PZFMpsLp(St z&DPD6e+tO@m;FZLQL(~@-cd~9u@AcWy}^pIp-c%jLGNt}PW8UrUc6!4XPkslB1SDNP|d6F7@ zv|E#D`D58K-rG_a%=HpU#GH(NOXA8@-vAgU@+qqN>5*IX%pXayXm?>#12~+;cV0XB z!WngnQ~GU;dkWf4KhcvdFbcL9B#G;JX2=17iX~f46%q7ejI&;L@DS$8mfO$;4;H-_ zF~jq}-;TwHp-Bl0eNG{kb`&Z>X(TaZ=*}v<#I5?!F+Dujh@UcJ+DK7P#()l5t%Eg5 zx4ZwcMfn%5mUj?G7g}?xUu$s|c<-%K+H9DG+Jle&ugS;y8D?I$#&JXwgNsp}KQXVn zd$&AskQI%n%QzH_dhlB<$)xv%u}>+M3~Q2f@9REQI8FCk4*fH)0c2!&IL3Q^h~i{5 zVw_QSh(dey3BlZNqgK(P*8sYGM*U)6xhD`&2}(7?GJLoqaC~flSnQ4yMf+buVc^sVLc=|Hg3wP1j zl;N1TQFq+&go55^)9p86ujb(9(LMseypB+Nz2HWpDD>wZCCwO2_c5lHqwXjy%jqf^ zJqqV*NDiwJXFj3n+ZvW<=3iHTQ3ew|y_O}Q8$553v|jL<15KQKCB0*2w_5zxggRbkj&+ZqSn8thD$atA&sLSRaE;T=t>RA{%aom* ze(9_1{mFZiccmjNyV81T;Hf=;Mp@Tdv(vkGp3Zi|>(A*6b9(zgAlk4`4h|bA=emG9 zPo7Y-(CrE&gc?HdncO~28WxVzn9pB5dsfp!u7V}{p@knF$<$y@J|t1=ArXuZK_Vu; zMLnn0-_NY5QP+%>XwS7^ASaSI^oj{49XLx#SHdlzM?ui%aT(4M=eGe+v!FSo%%LW0 zR{umFj|L^oPG={;=bCarvBo4Gu>UO5y*l2L@HMS~%ra~YQ^zz=9JbU^iB6Pv9MKE# z&A=U1pX&wF0WyUiZKvgKOAC$ormj)kTW^3KlitT0*n8^6^lnO-i0|snvOXZ?uV-FT zBYU@u0L{3?Fvv;>&VUlaQm-_VtnZQ%d#C^~d1dDdm~Ta!DPG@@B`73bNX7c5o$-9$3>RK?BhkhfC>A4D5D!a8~5G2F!SXR9}Ii(J| zrWvPpNW|{3Y^nmUmc7wJ&QS{(sDy*cRa=*dZN3|)mXj}g7_b+WqFtS()NyE_Q)jJ?hf zwU2-2lJ`yG{Qtz?*k=fhkMQjvMy5aMx150Ka40GVYKNaV@sxfnZ7k}?tteje=zpJl zqg9QvFh1~&OKt=NCz@G2JR!;vIHT5esdB$Sga7Ne!K2L#!vl5f^!yoldRXBInj6C$ne)orI}ssoyxIbsKS{hJGFoc3l^@jCpa% zq{u07e#2fR*{EJ%`2dSF?|4{4Y~XZ~`T@609+L)+L(SNC)j6w?7>KD6;c*aX`ts#5 zei(N}Y;f$CaAmtNx1$bS71{+YDhT#s2UBwnLO^fV4i6@L4R+I~dudE~SR)4CO}(KB zW80Xo6B*e*(zZ>q3ddhTlcWJ4wUBcY(c~I5`wz*1mtC!@?|X?JB$95#!#<k9|7SyA}?qQgd5U?QU@WaR(M{%p<(V8Wu==I1@4d|mk5?=dEQ;~4^Z+rTE zgRDW5qX=YaZS&4tR;g81bl;Lyu&@FsItPaL!8mtvq03AL7fjtJYZcZATRVac~LYorx^lyp$1ml>VgK@Yw{Og&} z2iHWc=zspdS~fQqJXM#Qzl({mOtj!sO=aYN4i*4VTEP=Ro+|6rf@dxZrgv{u& z1Tnp$my5j=|DxyW4EgjM&Im zj~`|#_E-Ejg_X1%%jTm>S+1cz&>OD~ME4tR4&y2^jkD#)6I+&%GwSMi`!F#MYHZ*? z9CvKNB~Yg?;USAFaenAG1rLRhJWDS&cT&noM=|H;?}I(R-c8IyIy4f7ww$?Vo`3p` zR_0|77QTAsX{`=tGym88_@WuQET<3zKzM*1{& z=mJgymWmxWdK-qu$B*BP#g!s(1wK!^8P>aMNgwK*ynZuQE_+{JadM6wa@*t!^J%*( zky2r72dgb8QZu)g9spDL>Y^3`n*>-)QOGDp)q1 z@zL%%<@Mgkk#PH)zTS_zSy2M)B(J8UHqpgEzX5RElH|f`V2)50p95c^UwYw8d~f(| zY~<_N0Xhc6W|c4)#J=m0`!{p6)-tRx*m-$a!^DbJejaHNv&^loB}4*02CKKlFV=UNMJV|H5ZZrC3|L_Nq126J%9V90W>s;dN`^qw2k z2=h}JeluUyth0xccVZ%Eie&r(;*~OogH}2<@#tLJS2{W$z}_qzZ-kKD8-bK)3=B*W zFQ;0%(_*3^Rq7Xk*~Q|*d4ib8f!#$|IaB(#0-{Nr-?4oVLe;w-2|*J47{Tx@Y=}%! zx3+Oh6HsBpX@DA)=EiyyE9C4<9Q?~hPGi1TiPQ0JPq(a4Y1>7f1y%cK)jsR>lsOqI z10StraUToG)6{Z&65>*Nu~pDZmA9Uu_zI3SuNUPXkG))INHMXIXuv@PcA@U4bq@$3JXWI zu0s$cza0dP-y!UNnm9oVek|Bi;#f*TLK~4UsaVPH*Y+YB3w`R30H?k)i4n9&qYqu(Ul6qh2)6 z*DIyPh6W!Ee{E|4cXhCXFduu2av%@b=2ostJzKlz<#ab3Uef7W#-TTdFf;m%!}Lre z#aX1r8`Gv`E@BMVFJqe-?~`X>@in!7!8vrg9fxA5)i7L`fa1cg>LPcC^27E3w_Xog@v9L2@Vsrb%ufYW9K&ek;$M zK3n|pON(?vt!m5fvgBaY6lY(Uc^BNBXN6+8ByK@krp&b903+qSdjqF}h9AH=YNdwz z%##^E3Gkr|8^7Ilr$55QQ$c3+QQTi|#vVa&Et|7YW9n77N^bTXHMdl2YPVe@Z_WAe zp8c&Fv)*+#3+06T{DLLl4J}ULXbL$w9A#fQ(6c!NzX0-6uOW20JJ#$OupFGa@83Ac<4vp-9jTiiFY7Q+62sv>Y^C$w}Jo z?z^tMNYyU1kr=I}MF;h8j0xO$w&&eX%gThb!-$@;?bz|^Ai8tK`hgD~N)??DnC4$j zKea=Q^Y}hlm>zw;yr_{rG>4S-*rUIje50SSbbO6!mklEwq%z$>z-E@VW@gHABxt8H2OG{Qa zZ3N3hQ>x;EMKGe>{{JM8WPwj`^G$9SHT2jJL*1{I5;`E<;coS{rf-vH=3Y1xSIwP- z;zxs?pixNkk?fo@M=IfWj9OQfb!+Ly=o&E}eN6uG`Hb3eds(Ra6jsVrzy`>n!~)azF8 z&b1)LJqpA>EuhStVTo7@I_mi)QVh>NTDE1cxpKoIGTZJ)OK+H?EiYo+s76jcw{;e@Xb$pzD7lqne8%7RxI zzm8wAs%5?pITgQ~eydXvnKmo4DU*(3BL!~E>?N(ITO_!-k+8V2;*7b>2(ib=!NRMD zw!#SVJ9TCL)cmvjm55k(Dy74NOnkxTuf7a%?ggQsdG9oIckWz0r6HMBnsBE|bJ!XB z;b^N2()V4C@zj{usB0O8IT#A7+u)!YtK?`GM z283yAwhsAd1}^wc1Bpqwc2Og7%|m>gtZS}2nVsy)TtbQXOhR6K;%PmLg?ErGQ-Vs{ zp{1cxeHU%Dr05~QOZAIQk-2cH&5B=YS=}iZuS-$EvUFq2P}6qVAo{)mI$bT=A;jg+ zT|&WgO~2qY*NV7q*~JlB*c`cRvIwo7X5}-S7|QAloQkPDya!IczPQ0nd7}f+eGtGwDQL*1-YSDObtLGxO#Q?a>*;uWum0wa54MB$V zbc`xbfg*f&ft=-P`YE ze!Sbyy?7@yr5FcNspC6=uKUeqW9f!4&8 z>`8YqdSMTVt7kQ0OSYLc5EhQh7e^f}P*a~|mceG+=XrClKk#^p(N3{svm0<+voMaQ zkTL4bHFduvvb-fr+KcG@v9wb)b;-!6c)FSj-4p>KHLe2W<`=~dj0%?8LbeBVkI}`C zwltDNG&JP&`_TP8MBN-h%M29?vTlNbOixSeDMRE_jVI2SWRYc7NU9w`^VJ>RXz(M4 zG2m!`!+&^y(iDwaU8`!F;j+h_L%3QCzU+Ai7Z#{L9?PeB0|QpgB?1+X>%>kk+_ABs zUm|zQHM;|9JLm!Jt&VIkL+nB!)1Or{Fggtlg?Tv+v*2RSqLf8|+)qm&9s|I4dT^y& z){nK&m_UK-%Iq*G!~vC&z)4rIeZw(w;)xTQcSI|e${5yAJ?zCmNg%u2Aqodo3_Vms z1W}{+tlg+Ni`vfFiP+Qi8?&0odc)asPwA@^X4nY>!i@T^OF~?@UxY9vp7IOz(V(#! zLCe{W8edrB8y+ynzNz||Gk5i@Zn1Dokh8Qm9ht?}_-68%{TnQOFP?hwllp0qeZ8x7n0X`` zJ8Tg(8x*{FAyHk$e|Q*WCA zu>-D@?fSW=-Hb8bCAUry5iE8+snM)yXYV0(-Lx*%%W4)?kBARsBF5_JaL`npa)Z24 z6j6Sq8K*2{-z2t19Si^OCO>GXsB7Y_A$FA&1x-3!u)L$NL|q4*Hpzjfx!YpoilC;R zJH2})%W5&Csc+{>ErWL_(x-*&Y^;-o#=vK)!u?d_og!yQ=X;Y+l9KSy{(bzWhHHJ? z4r5V8Y>g=oWQ<#c8p_d@NWd$HkJx2^YRIQe@uU8 z)gsmFn`9g6XK;<*Mmq?rWN#rP44^}Ag-wh&NM2b1O8sp0Jn_U6>H@p1($I(H8YYMy zvWT>?V2)M_1zIR6;lW(Q&v4uF{B-&lo1=B#g}ISMkZq|ctukfBaj!cTjKAA-4qbwD z00LnT36W@9JShoop{6s{=8LNKl1)p_5WdulaB;&EO|~!wT`(waJ-r(UAIV)L@a%sY z6kI%AH`&r#-5b%Y0w&UwBtP+|`gCIc%>2pc;~9$!%i5JIi8f>nbHHZ%2?3(3V9sf# zVl!$qbGqDI(U@LvslAs~wIsj_8adniSe$$AsDCj1Vfv0CobZ^Zlr}v&rLG6nP=WEW z>PA0ky`3lT(M|$2nXMyNSU9nHsKMh%HJN-IBABuPUx9>u51x&;G|JbqB-0NgbDX5D zvF}E5ctBmMiAF>9hGVdS23fv}TTpcitRAeRaA^4I3Ea#tGbhG9@?gvg`8+L@Z|&!( zEQO*8BbMgg;>_6UgYB(xme5tiYh%}~0PZ4;rG>PIzuO_PuBxNXPB>*sF2HkZxI`;!l- zK4|HWUew{y1F#FaNo0X4D1NU(;a9W9fbyYvroyQVSe@5Pj?XHPGG4B>P zZovqc&R>1?s=7b!3|zz%F);Y@;5?3wV5t33Pi!B!WzH0GdT0t+cMkxoB{D=ky4GpA z{{7-p8h*0G4WXmwqcpW|WQWS?+G<6EOeCu0QLXygMzbQ26f)`H9hHxsjL^z{1JJr0O0PvI(DUu_o`h8eO_0q8b#v$c z(-6~uO+&tMv?VAU&(&0Rw*0(ZFeqZTbL?`GjgpFy_4)xGDo&BhUJ830v-<*+Ix~AYQ_l5)V-zHzTquJUqtKFtJy(^_+Hf$l1$y+v)Bdj>`ZoB zewV>$BE)!b7+czkxsz_Vlmx8YAlQnm^ZG)0{R*wQ+_*mVo5 zi}|OfnU9;v@dE|IwJ5ZG`m{3)8zsmMqaV>QvQ=0r*_0+Yf|>QMnvtteTCZxD?FqsF zYJI+XdyeWOx&l#dmDQ0L6R&HGDRZpC=eZ|e(6G=&>KE8?r_Q9Da^k|8#mlj;{$cvQ zUW#FJ6w(tr{dmpUi#>HQ@yu9;mFDfW*I#oYOKjzo{SrXED^NT0Mur>^j6?AtE z2Fa%lD3vn3di6d7_|x=( zE-ZH40bxCG*fAy?sRqN;oiG7_4J&Zj>Iv}ek@Gv;Ih$qTHrKt}&WT?+*9Fswhb0Q> zHOciuH_$s5k?qM%r!BNdP0cdfTuZ*U)pfEkFG3mJV0l^g-zKnKjY&cfhb*Mf32ygb zV7Ptf_8oQ5bjl*SIQ>E^&4`Z|ym}eW2Adh{K~M@Uv@!uC=3_>zrCxdE3ysYtB-CfU=c)(;`C>^7`O5sv9wO;)(>HdaBWwm=_mfad(Kl6;! zjAI4=Zu+hIv~%Qv0CFDp;t+g*VekRQ+&9zrB|bNt6V{^ibJ6~zZfi#;Z!#l`?7Wnm zs!_IZ8Vh`IxPfx3i6Oa%(}q)Dzrf+tHbl0cV&%0JxcuTFI~>2M3Uk+sH6!`bLoNcO zANbMy@ zS{=ebv7YBxHK2-~e{^`=eY6ODl%J|61OtPs0#&MYw_L@Z{VL^oXt>bl>eY*+cx#H5 z-9TDTv^z0JC$jRqw6mWT-7+P((NwdhcYpo5h7Q}Iv>MN7@7%l=Q^8&*srCSfB2Xbzie^m#%2< zxz+`?E(#i+IrD4DAY3B7tw={^EmF_fQN8C*IMFr6JKlJQp;?Tu({iTwir03kM-H}0 zH=jKnvfGuY<20*tk+uPa3CfZCcuM0rzaDD?CW?*GTpz{95ZLpnQg%OZSU{0W9r}VU@&>?pXk;xDMr~? z?NPAUE8hW%(<)uI0I4k#HNn8vhB6H^(B?D6A~bg9Mw&j*6VO7b7S7Jngjy_~aFN#R zqcwOY{wGd;e1y8#z~f}i=_7~jR#p$Xa=D_FbmHj@tJ2;J6f7o$(G({~uv)lS zA-&$y6q7=9KY7$XP`A>7vJY?Xp{H{lMJ_OKzKm55q@O#_le$n{)QfO1XHU4+>io5w z)3bicANrU6t8VtY?w2_`H+Sv@O{ZCG!>9CcaM6G1k6KuynaB8|L52lEfyd)1;m$K7k*F01Wz4MYG|6 zLhL#pd2AK~1?m}mEa7?4_#h`MH7z+PhFyu~Kp-iNNSa1?|DYXGZMQn}k8@({?)H8- zXf3TNjOhKn8@i{11fBeqpL0))W&&jG& z0)4%Kf~%psR-MJF6S6=(7~o-1t68zhK+v-W+g`H~6`$gSg)>H%+lIs+;tmj)BU<4Y zI?vbMMxwp|4|L;lYfQ{7k|0P^)GK#)JpTaEonhDO_+8Xy%98m?By+P;ujrdS2ne*? zEW`%KvnI3Dr+t1K2>VYTzbVrO|N8s*HI2RceZ)O3eI1hPou zf0Dh-kriHom34=wEim`CIn=y#OuL$n0f^^JJ=ww*rUaZi37E(oSJyi35_CzflNSem%cu_~nLqvLeJ zY;+6>p{-?ZX^X98zJ&D_RAej=&HH-A0dbeEXq{ld9f9j|nl)Tn?H!H636S#Y0*YHJ z^GN>Y_1E>>(a?o+byOO7NUXfJ+Gy5G)zzj?fQ6=h=l5Or>9%ArTn`$}QnH!Q6Z8wT zp;EO*^&YL6(|@CyF)vup=-PI?ugFl?y)6n8vG~GJ$sR4wC@g%E09njQsm8@Qe zx_kL$U9{VBy#|>5TVB@~mluOVY|e=_+-I&<;{0K;IpMV2F*wz`V@OtVh^f=xXh zR2~Wg+gLNcH+Hlx%JO$wcZY;Xbx)HCOOZuA5r2G;!#5D5IYfedCCMF@=q`RcUk(2- z{Qw91L%n)2$itS8(K^GQ2;zKS-7Am+dN~Ji&+~{VpsiM#^;IHt;!6PZNZ<=_pCv%L zo@q37Bkv)@x^D}>SvrccqA-bRJQk22x)-4RAEzJb6MLiE3fhBrCtpv0*6L31w;_#6 z%oTq6oqPHPCq6m-j5-EYR@;vPGn6z=n=SX7Ah{i)Y(9>}?#pb(`bZs3JZKS!nK>Go ziEvD_L9{seXVnj6>1tzLQ`cCpl&hs$$p0PeWBjmD81v#6jw!7n zJ@wpG{kXs7N+?Q!i)o~k8=7U(4)KY(Rc|=vwSy)0y9TdOuCLV`0k$#9wm>QYsOx~T zemVV6CvEbky@zH+v0wk)JCnCfk+JKv_*OU~G-#K7S+Ga6bil8#J8*;peHfs*B2n9@ zgx=EJQkTkt_>!*X*iE%n%#-iUF#3+4Qta$qtr(-ITW6!{M)Naq;*gV)JDoU@qXnZ-Ek*#(nZ%aAnMPwVZ>w@KMW3_r&H;=A6HYbs4jv7aU z41Qpi*bu6GwPK;SjZ&>1*2z;dSFKi6!be*ohK7z~-DH#QM&&z3N4AezoQ>hTHwv z-^ES_nv_WRT|L5?aIo2PR4;+684I-@Xpu?xG$7MDoo1#Pa;V#h)N!-SP*kIR=jcW} zns--2h}GU0SQZwBX$>iq9lY2JW*k zulZ*_Q+Ho-Iye~4yvmWcFV+!sY_4x|(&8zK@7C#whJ*dL5#KC9XQ1(HxBY}lTHOrT zp&{Qw!8|$^TlKVzWrOgRcgnG1;1kU~WRtcM89SMUe;X~5}VxJ-sDE;C&-})$=Sq<%f}3N1=sPE4Tp7!ia)XoXHsG>Zd`v z6Io|gIB<)c6gMl*zIxTAJ{}F~I%p9UY;xwxRbMR3(6xYtHUjIco6_Bn0RajhR2I>| z6=b;J$mnV8ft22`zLD$Dt80B419fu@4D29*g^B!SaI0Gie=q zIK#B*Gr}f*2q-vd{u+X(o>lt~+J`r_M{K3w*HCxZF=%6cVRy^jcBZx!gwSC*jys;O zy0>{gdd?mxww@Y?@%vR)HfC1VK{qVUNKja2^pZsCVqt(2RUMXj-ObI<6< z*Pnb+Bb>@+#v4YD{&xC6?PDD2jc(egEoy5eJ|b6*bUESUjJO3NMe^&y6~BF$83M%Q zey#Y`pC{b^Xa82sYt%2Q>xEig$~>?SK(%{;xMyB)y0+J%>jzjdjla2&A>;O#l)?Hqu+%23fP!j7W4M9sf&tG_NNm}aL5w~(q&qF&$t7af zqYk@$TfO$xoV)^yvpJz}@bVk&>GKUcbVZI%B-_^B(e!p4)q` zzd9!=aO;{z#D)NWrv?kt!Vmfak>;;|Kz{6}amZuPkGj5U^te)us;Cu*X*CyuL8Cf&4HrWo)6C^XWYODsdOqjcF{DjPcb7&+YE?J+u453*52{ss8lO$y)oMO> z^0d0u0cPaN24x|~o(N|SP8N(V98gBwm6n#3AaPkiwt;!6H8X_hu_Gii(Ps)>-SA@? zdP1sw1ODpaC{e3Eo(2?2MwBK7E+ewF845a&mp|_DG zZG43PPVTyA{^>u_P!y6sPJhzKVPR;^UU0DPpqFWTR!nWY>5F`CWE-Kr=) z{)1k%G^4k3>(&e~%S?GY5pKLqzD_d6)Re__5Am*@SEJr=HUi7nJ{3~%&r3W{q6*MJ zOi5?oNoS40Z}=NM?a$m5_y5=bqzNogRMDRC$i<<>piO$!A&^jh*D ze1(tpsG+4V@Q!&rpnKq93bx8J9SP~jwph^eFnpjH2KrpU|Eey{%HVY|&fKD@TrRDw zhaOVh19N8EMv~FEx6qw65D=|=GqF(e*OO0lh=8*Gkofo4gy4Rjd?R2iNhNDrgtfOr z+G-SD$#|UFlv&8h2&{1uQ&`|um=gLp7cUJCbXTd;+2%bRVZ5N)`&K568By;-E5QXc zvIK}-Pn_QUo4?WOIbn=sx`?vSeyAAhW*BRW)jC=YK|;;uC^AXTopX|&rZLp2(-%Bl z_v)RK>LKk#0Cr8+D8iKGv4OWDrA`zllwxG4e{FArFzM!i^=u1 zxb$FKQ&o^Mby);^EIQqWy7lp+KZJ$tp;GjCFlDAZlcxpQ9S<&Kvz9 zp=B>3I;ITPKGgy+qNBC*XXn-9LNnQnCYqwM44~*D%;Or->D^^&OZ7cb&|uI-w(&;!6b1%H$eJGXHP6M}r=|n!dRZUKg zuThIPEsPm5cl@vIn`-3oy~aw}NBcg7JMNQD{_gZcJ>D(eC=j}Yl0P)E@fc!sC{mOh zQ-s=k6AZGJvhgZD{zJ}uY<0UyQP2a-O|x%0okS)^s{=u*>zSAb_W+Z=ls!}&Mu?Nf za#%I6?1|daC4EYo=zYyu(uuZuH$%%uD117Nz~KGqM-v#dAOHUG@6|Y{5XdP+C)ALb zw}6T50zJ~mVGonDd&poPcs%YUca>1}-Sd7R^f^0sO8qoGHT_ItgDV(mC207N;;eR` zU2@u<(mEJ)-}twGZlnJ~{Zcbc81vVHHckvoC4#Q!EH>6QRu|Qv_48L6Y6Al|rjnNS zDNF*1Fj+D9B4LAKDDA@+z=#3m#`@}t{Xjm4OxlvL<) z(W(;-k8u2I7O0FEaqW5}%{GUwPG8D84=zOgF=%@P86ofs@Lv_LK$P9UxclSC4KQ#3 z(Ky2?M5+)v)mzgKG(8x@kPj44$+!b3AYwv8yl9*Iz~!|aS4pIK_Zo+Q8YS_X!&Ggp zkm%|LHY0rRn*X{->^9ufLxdn+(Hj-Wn_Q%quGFnpQjIZh9lMFJ!6~Tv}gw zPs~+Ou#3d#)*#%=I6dwH;~nhsU=7cuSM<{x&Oybj{fEiV>S3?rfLg1fbKq+iC3HQS z*M;6N)hEz>oxs#2I(5`QHEUM#!5NTlaYlQ}L;Guk$zgr!-~sOo8?49#n~_h94Q zC4xZNq5x2%p!^ho&?e$aFG+fY7B4!E=<$5B!6wk6baFAh6YD{D4I+A%hdYV{#z? zg+SSL97M|3llMaID&jaRTc=xVXrHZvUzQnr0w(Lv^rFu+4+eGK z?(Pr4>W8^Mr_ZM0OMWo9a$i^3orKYpvrNiVKPVd;BY3uH8M{;aXO4R*3r*tEupVy{BbZQVgaTl z8r&f(Aq%cNn0zt$M5_}#RfXjJmJbQo1O(+$4{|H2h@#?+5Z_~Tu= z;T8vgsUdaVY*X>|x`WM|nrvG;BsH-tbw`ng+c{pl^L(kcq`S{YwdLzu8S`L5M*M`} zJq7?z>Ci4=EXd#xMO%d6KcL{$qkH=vBQezsEJo0)khfJ0Vo#C~tF9bcrrEXb?qPMN z%l{>C=Rf;<%_rHtDIXzZJ$TuJjB+DegPJ@{lOwI0f_y5)ab41{kfx+-S1JupSmNCI zZm=@z$(b|%?6&^p3pL7%;j+AXHAGOS zqU#&0tJJGh=aE)OdO}pNr)xkVnj`zD>VT~3&=WIOou{(iaQlfAW6~4cl;4laAfuwR zs2)5<=K(q%y8#6}04&qHCxb@DP7YULQiAo{+EgfRWJ3|^k%7j#&C9r}-)I0|JpR2p zYU{aUPFq4q8td|b6FqJ?s-I2Qb6+6B6=xc(c_|}EM=^fyAp3_RGhvY8%axwHV$tft zEGkS0gSe$xV@aqyxoN;nk!g$yy#Gwnx;U%BU(G_XN3F~ZSRHs6*So|0aF_D>&7 zzny%cPk1X%&iQa)EdTWQO%2)0p%u10P)xc8M(2*knNfoFowGC^*u=6{Gl{LaGHUr@ zeFp`bS1VQU)Q#pW4KNjS3%!8^J}U&D0~*t~H}Zn1Rl^|B6sPU!J~b0b7S{T6bZq>- z;DNY5d0)G+Su7kM|5w~OvPF#{EoV|j+rCOpfbKl04sy`>oXnq^e<~I=SmIf(m(+k{ z4MWpK)im9{vbLB7lJ*6kanpVQ83?ct|rhZ>5JL#CMt z^O#{A#%An?Epr^qj@X_RKD?eo1F({C{J6YmgHbi_#4Yz#1CIehm}n<8Z}9283HRZH z$-~K4+OaDcl}2S@JuXe`jH#f8R^E89d!)02Zc*mN&V@v%tR;xt|34?6>r-;1eXK=5 zF$-WzPRg@TUDM)P1SjvO=6c!H%8jxIm9M_2R&0a7*UyCS{J5UpMOiqfZ}xF5su_es zKv3%wa<_D`q|FQ`3@(W7fs|zO^>dz{yZ$@|SWiI00_4;;>pML+5tGktfKsYH8)pnc zNFPbuAiKrCEly<5eZtMtheJ6g6^I9^jds+jxvH9~U3c=u)A}l9G0`r4Rckpn+Ugj; zgxb5YSj`|+#uG68MBlYG+nq(-HVk@Za*~X->xvaixK**5&ZE*DHR3r6qsK1s_j7(eD_}S*tHRQJkiMN-!Go@F_N0(s*0 zYg!EG8Hkl&MP#L6b?ji&29mu|u4?FD@BZEo7$c$67hC2T*DUUlkg2x=86J~CfDqE9 z3swh9Qsw2UetKoBu2>W_!@&ag&Q*ZrVL!vH7IhKXIa4z&<&9(JvguhGzK0uBA>hbS2Luh~)m+JTQ z?(N8?Hn^{=0BFQ{+01&mtX2YN_p0{T^yYty$6Z<5&O z#duA^CXg-igY+yo6MAm?po&_!K6->N)=^iejZKKCbt6+9(vBQ;k5)2+rSePa6VXO@ zEu;Db2Alj0+#%?DIyM_PCGdj=Z8#ir9E$jbd>e`vHz!baRc!iYw0kr@aALw|=G}3( zXXz+<8oCRKB}KV6wRQ>ZS8Hpj>o{jltPB7!ON)3P%FM5HBSwc0l$Dubjp{3?U9ynE zXeWRz3b7q?78)*c6Tg0F0;`;ZlvInPhHDXsfT^|TD$fppgP5KRRX;JSan#CS6dVShPCnL%pmu~W9+)$RM5t^GC5a`HbU4~`aKY)h z48UO`*yqkDd0dc?pWpKy;g0(h)YF3r@<;NFB@I$&=qZdBooF0tsNfG;`h@|{ zaR^pj#P=!A>wX{32oAPG&rGFg!-J`78a#ehgMOGrV%lTyB}0cvemwaBt>I7pRTI(r z%~FMYL{rJUjJF{OR0}bYJ63$pnG+VGtVK0+x=z~c(2P-=^rVKT}qU_$(53Fm4B48vqTDfaOv zaIgQUwT7l-c4{EFSyeqhK5=)mCmbn-nYN0DN8Pw#btu%J%4N&UYp7S}4-fYP(XDeH zA@N;uV~~HV8&_JXrnhFmD!|YW#|)astnZTV7JBgKn%dz(MjNs?bb1j?_2k~ylH4a4 z8AmwEyG?oDX^br$&Cq!7W@)%EdXOwwfurl^hST3`$UQJ0SuK6KHF7 zF>XM9k-p!}YRCWu9E{8p7Uv29K$P`X|F38Bocd`=l(RZE)JV0Se`>#(RnPo7{cN@a zr6{yiI-wo458d%$yJc@*Ej}D>ZC>(QWB05b zRG_g{Cq~>ztX&t=t{@gl2~NoVXf(GcaFgVqDWiwJRjo@YX+#fiqNk&G=<5BUk&H%X z(t<9SMt*akZNfuaTY3mgWMc^CV`P9E>ciF+v#s_)oeOMD^N5N%uoOXU>eb&3Vvo27Cy zW)71CCq2fQ+H>k@R{@@z%gO;SlSQd(;BnQ-~dBR9YxjaPC$8wBiI2uf963d4Y35Lq5wS&bAc= zX|;IpC%oY@pT$#+1B`WCREuSF)#^IzrV&(bG)r}TW4Rk^Tk3C;FzmocH5^`#yze6G zJ3%%ypz8;@Mh|^?_m*QQarls{hSvLF^0r5%NLs##F z-H+A(NuJ-2l<1EAyyy`_8bLO*3ab%P@qz zR3k=8e!Vcab>;lYlh2+D;cAQwZqC58d*jd<+p&sKeNmvHrJ??`J31Qf;=A0tu8HED zLH*EPA)U+pIj}kC*C(HQ`e_NJl(vbxR}Vm6{`;unXW@81GT^&R*(7mm;X78B%-!Su z0?Ot*JAX=-CeD0DPaSU7tINy5J@nELK)A*loHsT2Fo0 zO!uFa1W5}lu7Oh8UD+9y<{6zD*cHVoqg(I;ft*(3?RJVvsX=C=I|1j3x!ZSi7wHv? z;joyy>KsqEZ@(a+!BREm~+71rA)MVP^(|A*C!M;7$4+8#Br`%xuPbOcZOJWA<5x5v}_Z!lTURpUcv*il%z)u zWmQ9R?zf)$L)~RH$e7%9{_I^-Pr!CY7U7-nnh;Ysj5DSWd{E6|x=Q8q+0;|PT;HXr z%9?A602<7BFU7WD=guMdMyMXH8&9n$yXlkKC&08@X3psIewwTjNX^nhDpuP6je-o)`mkb+uSHOs=63| z2r@o$SZ?2$KYiXMbhfenD}K}xEc0i{af#G1S0hruk48w+CW|T6PFDvT>$PfAPRUs^ zN$Fr4j+z15X&ZJ9B&&vLonuVzgbBJE@omBoo2@)}@gpn$Lsq^QZaoPeN|RC>d7NM~ z$BR%1N-a;-0g!HL=k)HKQ=Tx;Aj%uD`sNOmm9s^{df$ua@KUwzHPSd_aNHhxAUS38 z#}r*$dnwy~6jC-4&5%W8BFvN{Er1y^v+FU;wFvCAgC7*AcosIRV$x_p(Q7l)ce$bE zncT;@LnrYxhTm(8grv@a!zzc&60U+{f;?$b&QNWsdTt7sJ~D)W7~G#`q`vJ(;a;RH zlRJSA(0#A-Cvyge)*-@H(}hC115VKG29vSDGmMTh%-(*Xv9_Aj6Lr_1UO2~nm16T5 zMQ4z~*WJne`usCmf#hUCXQ>{HM{a6jjUBjyuE+V{I`qrq-Yrd@)k;qsM|uJ#sJg?# zwdA&+q^X+o5OvA;e${4NuSWZNoztGb$GkZ|u2Fnvbn%_?z>TXMAoctVo zZ*W~uwGFv7eNRsuw}~;p)~vPCXs$H%vn^u{13n-2iM}~IfBGrynZwr3IKy@WzO_}L z`xqr6^}lkFM1KV-|DMhJ5uYuYJ5dwz-%t46J#7Z}1>qx7mkK%d+ju->o4?nWWX;=2 zt1t04zBheaz##MR!vEVb7c;&M4FDsYNX0Y>$un$mn?QN(41+Ui)PBnQq9PjqxQ|j} z#6!pK2ItHjy8FreO;{%ySOm~HtrUb)%row4OLbG%ko$)bKitO8q`OKhYx=RVUefg# zjDL(8;R^v>G$14S1EChKkBv^sA4e4@9opwTI}#6ohkNi9dO!d>p}MHaz=}28qZ-rN z-PO*syt~m6s7QCXqXCoC! zVjF4*t(ezI^C`{)*ACbX>I!DY$+`G2|LVLtgHl?#=|cyq?Af@i8|-YH5}68t z%>j#&2GqP1hVs{Rj1LmEa=vMbaGE65V0dU8bUAP$op&}fTE%$(+;s0cjOl!Hr3Na` zv@eZ6viCFonh9}FFGxw#R%$Fr&l{IY>uM2R;#R+AJfQ09%9X~;4z?}Me^QnFwTWg3 z7tXMsY2R(5U+y@7bb%P=PRb6_H2vm;6FJ@@It437IMN>dm&rZ7+=|4W)}(VVW>qwk z41f9_iR>b@KobA%_$Qv45xUYl?6Kp>3M>>_ckR6G+{TB5LO1 zi*@?wAEpo0IOKQ`Qy_`pH)-g*l-r=TnWNdjs2Z8l%7Ww&(~LKl10;sl_|+Ok)iXW7 zAYv>OAHzjl%sU%fU2P~{MqRYvwGN)CvVGCXjx+)XFu67Yz&N^P*$r z6XLc|EcTb!vl-KtH&To03$_iWa4Ge zy^zWPf5EvOeZ>%XcxeAzyQXdUDIm?2D1w?FXE;;YVhxwWUY3_JQu{11Do{ z>NTs)T1Bs^u9llIADi7B)?R$RX^vFaFA94*zOAm-s;mB_b0>mX^64|41BBK>=vPfK z$X%X6S(qM{Wb#F9fR$Ezt)U~Xh-n&cqh9>z@260AM6R*v@07~J1VaRBd<~U5`5c}5 z&^;jOM~{5UQw3gIgl+Xo+kFVpGW7w1kz`wTlpVVi*~MHVp5n%mxTmj{l3UNDk~ zBg?9TYKSm=KM$}T+Wj7NyCsXvJ{YwL(&=hWti7oD!;DgEQKyy9WpX*_4#W%9l{5u7 zbfc^{9vGxYEV7#ArpHIhRt{Y)y>sU^4P3LbT#p7`;}KcyGX%^ZCSIa)jq*R4-*erZ zmNAYi6dr2j(7L%7 qn8}+g~q^_}2HiwWWYeC$I*m`dh@uEiCq2C^5o|%;Bo}rAZ ziyq^dyL0E1x?yHjLOjP7qZ%0IQLaE-pr?h-<`WN>1GHHVua{HS8!$3@i*8i$umun* zQ0rzC+e6_5-IXT@G;v_;#saNZR@N?7)X~Npee>08JQl*hUEu}TOSj1O)Q<>Se7&P6e&v>7+;;sbSD2S*wloD{ueIb>mXJ;XmnEfaq$H5E$33Wn) zaboTiq>5_2$dG*C8a=Mo;J@~q1_b2xo=-C7Qp=;kxcR$#I3x6YmJpE>9XJ?0$yIkoTy22+F?=I*KwB+Y$)cu7YFjkpJm$480N!ts3^;Tol?fi~O?3#P{REy^W~CmE?a06w&O zwcn&us@|kuR7aKMw{onyo%>3BRWGQY?%zY`V8PXtY!tKKf~ADops}PuCL@70N^G3| z7#adS&}#K+>7fjDoJ$oBWqZB8gIJ>C=efSq;d)*f~?N1NWA%(43#WBhnWRvK8{ zXXX7`-tK#JAIrG&y+k!5#q_*9u{;eUxgyW~IJC`Bg7gn;zsoPg(c1x_<<%F^1n* zlM1oCo{l;Jw*@O+lYztfzJ5kFd)i!euxVqfi7=Bb&%Ar7nvdpj>4HXuF6fsRU)E2zJUB$yS0q z*>_)hI5m(@4hmXYYHENDwcgG0Rj@CN4@qAy^;_4o_!xu{d3s8cx~R$sC~YiGR;wkJ zjTByqYR&*wsIX?bSq3?6SS|8#eK<4WK3W8rD-SoCr@zNx-V>dn9r> z^3P!*QKyCpvY=0l7l^$0v`2n|RwiD()%a$C6IIqkfA{^m97{+wTOt#*VCVr;*R%*$?G<(M38y6 z)Y2FE6X#N6Mr7x$nV-xi+>Jme-qR}%kaB!W)66@b6tnr-l0Kd26 zCohV0pLAh&GbVCAQ`5c0)I6Sgwk-!3JHj0dQu8EJn!WwuaeFofg5cWckKa@ic6;s) zXGyrRiL_V&v!J`GgB4Ev1NWuz`k{YGga|H>up+UG&7#)=NoXCSbg_Fu>UTCh2f_}c z128n*R$Wiezyicy3*E~wvaU%Xw zHxIf+WjBAWVNUOXKmEhxzJ@k659~U^g#*~a`u_Cy@w$E_d5aLSGH36Qw;uY42+z(Wu3cV?H9j5`wuv*0AvvwI&v7BgMoe_GdYL8Fl{KuE^=)C@ z5MS6yrk?T}$w8Y|>MQHLWrKMClH3xk7&#I?HF$}&lwN=R`FQhY*Uuq~8Fd}CQ?50% zM;1U4?pm}&zd(Zl3ksq?o^y1R`R&kNk-%L)sGdHnmK&OnO|NE+(dYCtom$cX_2&Rn zn*0KG5%#&M;1w)%hsyAv1GsUHoh#e@iy%n5H+{uCn0yNU;~u!aAJjGrf|Hq{Ssxws zdt=xBR)*I+k;3vjpoP+Xr6R9a6Uc%xn8=SIqBghup;(@*g}Ag1V1wo4yaT4C?SbbF z{6n*nF~}MtWZMrLn*UQID=Y&#*H}JttPl(N#6d~T*lZfHWuDslA)$q!b-F)&Cm2pQ z%3g*26|d#@%20~OzkgLrz}U##$BkGja#k|igHhXef6P`Cs=s~vHSJkCMD|6J5;NL; z!Pba-9IN^G`1kzg-t=vW8ytoy?Fr0ZC=aNh529-C66LNmKAqOC+kT^cD=w+YG_e8T zG!`3rHk(iDrh7Aa8WM4FCP5;8`<7nu@sXYv|+YQ>CEwVe(5uWvFZ+OjhWa1*L$L8L_&X@C43W4m5p_ zZx>o&*T_nZ26d*jNsotwb^{#tMdmYbaw!i=dl=48x{QPO}(Jj0!kv@H5?zft5 zCOc(vsa+0_Jdf^%{jDdoQ3Kec8U?oe`_p@RNc!~M^fQg9m20b6T~A$}4yv#oY-y)z zI@5Rbh_PvEnbV;_Tx*e3Uf!sB(Ju&AZX`dUw&csMzV^e~J_t4<6EOtZvqFyM`qOxx zojak|7%NO>0+>nL^)Q-MveDmTS)m#tv^!|rGdR4}Ths`EEaepS7ttCwV7B{FhQ66X z$+`(U4^gQhDyq8FubEWLJ-}6>$C-K+Mb!E&oEM7Z7*#Wn`gg|!5UcZ*x*CS9`!g0VX**H$XE9<>4X_Lv$QywW zrbNLtnYjzr4@(^KvQX^+4KUrl{r#v=@ zC=8`NaN2qbqJ@xJEVD-+VsgDF`M2x~juJSV`t9q_syE%P)7A_rF{BA6`F`L1Vm5`C z^`V8~Mb(9kLu#aG4p}9vrDz!0br~QyAspQpJ8?2&WUX3mXlO4xppRz>Qa%{wE%}Ut z1)8B)d?0)Q9S1TVNxIhXIQ}94tukekWEt@7VwK|@RF_uci$q2=C|5ia1UA>eiJ+;q zccM?IQ9tEXNp|q3sQ>8NrFv6C!(7!W@!?|7PE(-|C4=)sH)5;5HT~K@Q8Zz2^t+jB z^UrDj|C+zkmLf`X)EpArjp0m5aEJ-Fe3MmG??D%Qn8h@a#=x|2( zchNGO^lqT=q+scV&9$`{79?9Q2Ao)PT(`ODI}vxuC(|!94y6PjHb0gFx2glA4G^$_ zm-=}AuhfBkLv*Q?t@hBfmh9~IYG`WRZ+`Qf1Q^IX+l{)_$_d37({R2u4@6E0Wz3N3 z^)~c&SxNW@c#vV0v)YS#S1ryFc=g-`dl;%pCJas@G8i~W9r;4hT)4RXE z6PH^MHqXd1eUj=N+5~kAJ6IpoqRnj&(Q=v*K5$V{E;TAOT>82dhXP*Xez?Do7WK+f z^|D59DMf_52+0K}9lb=Lu&(B!tcgR+Y-JOefEs(^wEe)%FNLyBrzC<6Nl>GL&(qqHUsI=N57~quj$w z;2~)XojQMy1x}6nBx^Qy8wH$VC1ZvjlKA1Onx#A$w9jX{NtK2Cv|JeX`iz4nt^s%s z;%wdk3}jq*P1Tv_U)S>M6WrEzVk;I&_UfsPN9F-OUutNtmli$g$An+74BXgpHek;0 zVc#MSn$?*vRa1eOLg&-Vc?=SktM!V7=4z;oIK4<(NC%8sULwM;YirGt+I+=$AN7Hx z+b}*crG8XXSk>kP)w6i{8(Id)m)ejLJlSc7B=&SVmJAH!tIO&Io|Nvu6ZFr{&#O&m z=U>n@sQcefz2qnR(}a|Z_R{)#eN7FU0esXLpieXU77wj(fK%1A?;ggLYJjaR6#Wo8 z8ge{6uw1TGj$t8ohTBJ42hTpOpO)JC=_O|mAnU?~@I}MAsB4|bEJNAFMdT?wd}0by zJWdUm9gP}lYLNJI&Yqi#DF#ex)M1#wq0to~llz1*?(6>LgVv_xLUoY{S{7z!3sIHO zLw0zW6UtCMTR5){T)4QR=Uv!nH1+fUP29UZw{<1yo`1*8L`3X`!?9!la-Qrg^%|PL0cgV zSr*$mohO)a7E-iMMT6{M*xy4@eiquiPcS`*9Lv{T?>7nXk8x~0a@^{sh)eW#d+uHxb=w?n4x_ee1k1;|lTC)VzUl2> ztyOx;69#UqffCdd7^>V)WoUp#8H5>ND`zT)tezDVT#F`@9Hsg?;`xOGHAwT}7-j3S zRrIRSvQ0n3^L(gzG-v|+u0GW_X}J6;s(`lNy83d3vn+1jvQ8nH-e_CU)9oV8U8FVq z0ISG^2TM1nsUPdq-O8w6iWF~w{LCd9NqP92CW%W zn^<#)854k^=xZR@2hz>x+Kg04%fj}fXs##})zPu%0VE99po+j0BtoI*NsyAlQjJ|` zY0eaI1-b1i8AHF*I?#vga5zdHP_jCF1@6-B@KbG$3titzmxw&**X`Dho(w_yR6qBr zr-`GL)d3zxZ1I{)DppS6(@F3!$8&3(j!PcsusoR-frw}KWax8CX#xutcY?zaqaJK| zw7INcxS=ZaR9IgBIQ4$CRif4#oZu2t^ERfRL?_-=9@WRu9o+CFI?zv_gHunxs^>1P zHN6UWrB;@B+7XkuxngHoVK5a~?iipTtk8>6%l@lhEoQmo?@2mZ zZs21xG^e*?7>%tZ8V7I2;!!izb;vIqFa;x*Zg+8bz3azmw;kD~6F*r;5PfI-0JLw* zBKeT?cwPaOnFm`g4<}6+vtkaZ$vZkh3blEVQ7fDyU*U*hg6P(o+2~ii;V-@Ax@g=z z2(0y(jq2)#oy*?*g6(E4yAn4{Z1og3d&k1e)7v48yRBX;2DOheMK9jd;;ht5tLxa9 zTDB$U1X#XG*;rPHa@cX30V5#DM8H9$7^dIjzmQL~|J~F*<`8-BQ)ov}QW!AQP(0^> z!^26Nt*7f4eGT?9ReCN5x7j!kayif)tSdLuo9SljtBik>S=#guoqfWWA-j{bBo@Oc z)8w&wkPm3;Y?=n{lt7r$ZIg=Dnou`ez}=-(x%7K*aHI&ko&g6DS;sYXf!a)EV)9^Z zmgfIUGiEK90g*(pumxeYk|s(RN7U5y|+ z%$gKJ!FtgAmpWG|@HG!y>qGW&H7GBaJ#I*P+CMY0Vq<5I8WEAw1h55Khq`#8H3dc) zMHy8qFSCj|Z6j-3XErA?<`TlD`f~b}E>bkos}6P?#bsSesE5!ovRSHT<$lJ?g4)&$ zq*&{uKXEFOeP^45x|NyHRp3GRoqh&m<&obB36jj&Xl(b?M~JniMwIfMw7JR!R8jLl zglg>Jb(oc@*U2MK>p2_KKKvniTJqVGF{VHm>{h99$w-aeYvxu32bHH^Gegb8lSVM8 z2WkTv)A|B0{Al_x1h43!isID=u)rxQ)AmI)CRKpNdUqMmW!HOytPoj6^9Fv*;9gTB zr6yvjXJWj`md@+GNIK`XJlmsh%{DdQ)s?GWAw^35de0Gw=Y4y%DL0ZPg8Nio#prSM zVUUdE+Ud=0I&$mDOi8hLSz5eSUypfd80)1)Z}jAd{H^}@e^oj9LXu4VXk4XLSR#CXgN7!0wIG6B zBeFnQ2HHcvS+55@Ps(GtJF~Uqg+f3yExZuBOmRd%s|&cmQ(AKzODtO7^P3mS4XO5& zOvedyPiz8<_l{d|q~0f#?s#T;dCl>#6>oRD=H`84ty0<41oT_3yREE^KyHQu7!O?~ zfd8f}QcsO-0(*Hp8wM_?!-ejN#DF$oJ6bFvhGU9BeLoj!xR5)yfK#y&JN;`aAn#4z ziSj1Gj;`q60_3D2V3mFljL`{LmwKVa`_qqq`WsCyZCnosylYMAZJXz7=FL?-^|q8; z;S?SWMbRg?+V4?_@3E})R^Lcwf)8ln%A)3VZOa`#a_O2DNNat1jlb4GZv-v~ifQbI zklS@hz;5(CKx-~N11?51aJ~eh!rG4Oc}twhcPx zdy?6W=Xe0kRIl50?w>j6sFp}T{kUdVcR8ru(+VYE)`y7Dj}fB)Y-te;_uYT-lr&PM zsynUYx$3vST?~5aKWmPtiJ#qka?t^(0OxwHJuxUbx8ifVBab$2<9Gn0j}^8MxK<WmmT?>QF;SIP}yMP-#6Sz`(Njsuh-IOp<;Dfaim4_Qd z(2j-*MHxzucT>*LH6A(auYdZ3rZ27XJs%#8XPA$Rts^7I#COCAA9WR?BT!M)ZxvOy zS{>4(JvYLL@Hh?eq|=HphLInAOz!{A>|LE%=ZEb^!Cj@_*uiRE?6yw>v9R|6S>4gI zuQk1rZfn&$!(Pq`@7g3p;(8ddw!J*^$qc)y06CM@MO2zr|EZ+}?Li*QBg42GhVIFt zrV3%Cf8DdZiRMm;zs;;cCDonwoN*cx1nsq4uwFd#yaeExh0|B`^92@PtJF-_luW+% zre@>Ji%)13j0DBct?|&C)DXKr(L=G(%?H!ofIJ3Aln9ihX++0RDkWXXvU$940xnOU zO*d{{s>M}+AX(BfQy<@?92upx`kgH-TG~uwcR&nw2_Dp_2t82n+*42KnG>ajSQIb!Ls=Rlsg(ouKU>Ih=gG|rkg1JXQ` zs9tl%k~WP*aWeJZ7#x8|PYH`dRu}AImX4vE$CQE;1hXRatuA5N|BRS}?K^J8$B8rV zr|6JY6gXIayypvOjhFe)^WOE^Ruk$b3C@z|NiTR`t4-5>`oNYi@TIDg52h0pFJbH{ zyu@#xqufYJ{1|%(_gAyCv z<26=WLUIvnbm-D^1Dt+rPzR&p27{9)i(%9(wPFbn0t(W|n{xrMK=d32^fuyKu`TEpj zP4i58wYIigS&^Z;%DNIkEJBS5Zmj#Xx*n_RUeY@A$z2b|9ps>CN>vKPxxT-$QqeR4 zO7SBBQ@sn+R{*ul0bg zjcKZA-F8tz6eK*x9 z`n5r8je&Nd16)jQQ#$oSDyMo!m))qj|8+3-x;jG^wIu%6-Gu6my*e7-GsPG~xazGo zQ9F$#Wm!Mcoy~;g@IrZqBamux*9mmRB^vZ0?z)BilBd^UQ;4JhZaXxhD7={PT|eh7 z#+x_Q+9%GbK@^45hMo-HAz4T7Y0yOUj>ax=7M>WN%o8j6$R+E|9PF# zI9KlQ)@=e{?@xco$A|T74T|Hi-WuR}7v^kOy&z(LKYPb;u~Cw5@mxGzi|VF&$LSsI z$md?rPv_ltBuU{0@j;|Y7&AzdqwG#rSNa{h01lQctooQcmEuqS8%f;k=He4?>YN2r zD&AcEigSB_!vxyXbA&)1Sv7)s0cu|clWWP5_|{}V9NbE=Bx&27M`)xHv9grZt>!89bEj(>8JIC=K;EUojNi2QR}MPV2}Q< z+3mQ!rTnzZahWg30&dKai++kVkI3|dv+VY#zTAQxR|?APZj4v{3r~lIxlZ8DKU&fd$5LkTw=m zUQF@)8K+KPv}kk4#|oy4)LaKfHry*+lK{cK%cf-*pwO``l4-hxB}8XrvuCG=m+M%N zrnqY0$^Lfw!1WvDJgjxadb`9n>vSp9hv7LltdZh4^o_#*4Lt(o1-B!}(eK_sfBNQG zd{Yy<>?Na%c)+)S~(gA#eNNA7_8mOkuD_%%-;C zIPKohsRr?=lgiO!Ho5Ui@46L(OBTR=+~U7%^S*lJ^fP*v>QZLnH38kXv!*?!l;0)X z^J9b%8B-nVaZOW&Khb9h!&7fy%Re^N+sN_Teoii)e(vm92`~oDq@5gqaO0o2hZb64w0R$qt;eIoa@on*oeN*$2d3d&n+_?TU-3r+w!>`T;Toy}QfD~# z4A1xcbUEr~6(q-D&$G}}pvU61yWUyG0;J=$ESGI|(DYp`!gf0=^4o>7SyRKFyhkJi z5aIN;wi8JZ0+_B~%?8*R`tCaDf$G#VQT>sN?WF4i+B|Y~1)hZy+2L}t zo0r#hg;C#A=Im#ygUbT@<}!eJ4knBS@uF-G;G90>KX04@YHYCQ$?_YW)fFup$2ZDC z#|v@UOr>;1M-*0}Yi2^?8Y@OYs3|F_)B1#lAj}}<+shL0$ zNFGJkSUmmf7o}X%GlOvfjV>R0?o-?$S@cCp8GWWXB`Z2~V;$H~Cu=iUlrI;tVcCkU z4hjSN9=LmShAOf@2?*(10{aa*mz3$u4lk>ydhgNuv=ntkx7^zyfvL5d?0YsKai-}3 z39nq!m~Ah46+am!m&wFd6NJSKlEv2*$b~o#;PuzTnj&yI^i1)v&5;B@grs~GWkK#z zQl9WOi8EEpp3Xos{`8F^0_v%}gm^^SGm`*y+rb^ogGw z_%X|(fTuA-&9{D$khw>-|2tUeq-8=|$T6kIl z&s@+WlK)bBqgdOWF1B(ked=Z8Ru7hbCmU{2`kbG|L(9^J(Ntc8m#X6YByB3?Dp?ltkgR*9oU+{G%~^^LZB=)SDeJjszssO3NZP^c ze{sz*x`Y+KoIQ+bpjzS1?19X>>(2f?A(E$iBTXQJ-F!&Gg}F@%TZ_*=%*+8&=gW@V zqS2OH?TINYHc{#ULNi4lHz2}9rz@#E{mg}2Q&Dj=PjP8% z+=bofGn?Yywz8T70;rFfkBpu#gt1|)Qwrt$S6!6;(nP`IQRXc=!o_P`ENe5GNA~#t zBpd6ca=oS}#7O^=jD`LEi6)?Q;E<-)&;Dr1UU5`V$9L_GiC3dwOglO+Z9{rF#IRY) zdV(@*L}ieMS$L5ydPig7ty5MrVK$paBAT<;q*Tn<5qg^$Hey6uK+PJyAJ`TdY{>vg zJwuTl?OwBKnAtg!OcKbxxkjW@Q^i>V8NuU46N{&wIdxh$VirG`eyj|LV=#Hf$Ep1{Fa*0|IzNw*+eKEB@*o9?y*OOzd7GSQfZfq2KJ(p)YelQ{Z z-R`)|uGL&)a0FL00ldI;LPHh=uNI+=x~XgMOKbfu(SB;K=JlxLW$+5-sCdad2d7mq z;6-Ty$vxM2vLyG?T!{2?>fxYgAz<8}WUON3sHhyHmfgWTh3iNb&Vm~~9|LZxXSQ`m z9MOlAklZHJ^s)c0ka*01>3xlrXd0ab%!62~?YQ`Z0}tUd6NBgd#mAC-5=sj>LA9cn;Ib^spqHdvY~5lq zTPIOqv0fp}SJs%1VbbzA!Huqaj&EgH3`<|=^K6`v9}E^{2W_BZ6-}IdZaDzkED8|` ziOy=`spkozLD1H~!d=z@u;XEEJ4&r-l^w_Lq<^!cRSJ~NiJ>$I&jiCbGn}@2oudqy zR-n;X^Mp)$Fa7;cGRTvec^nnCmhVlCH?@T4s!I}#HP4_Q+XBVx42E9IIth0SOUM;b zy;7oKu;%R4=_^lb1a#nK)evXNLnGm4HZt6-b_^JJs01}896=L*xR zh=Hon9M&x@%Zeo;^2deVyvujz!e5{r=Ix91@P>9or+P@YwBp=7ok>w%(e?Whz6R2Z z8jgCEtCIt?nIsVNc<6K2y`kYV8F*W++e}J)S*z@^qeJOjv%W1lZF7(vKX?V7%zg-c zO9tUn!&Xl3>vup=z}r!jK$uMuWBuoYhoHfIv>CeBjkCb&-8Ayn_EneUEwNF{^5Hc4 z*;yJ=njFQb=txsp;Bzt>ENJ!oq||;^8^q84w4fpvhOAb1KXt%66Q`H(sSWt z-kpA@>k;6%?Sal`o_US`YY9%j;C-1_S28|_g@x0npVO1~0BUKWPkKNvJ$9;Rp1H6X^9?N?~;LIO?XUF~Ol#+}#Z;d?|3&X%h znD91xo^t5ZrtLg#xvq4P20fY&%zWURdRQzju;!0NgsBy`(q?8vlSZeS6C+E?^Gvu> zoZ@<}tI@R9%1YnXjR`GX*3X=wnRAVE7;!GVl7xfR7;Ua28&$6?R~y<*$vU3vAjw(z z+}4IuL{?tjtjewcCW@soKzr{#W{45jNU$RpCu}0@;XLp`O68l`ySgiJ)6c zL`-C(RDc1IujXj41dVfS|1Ly{5@PP#N83n*@C^#At)810FO#GR1Gt*mAdpmc{D8A0 zDnjbRa|n7+Ff0`O(d(yAhcv_6z&U`U9;4YrpZX>>9z376M0fnh;%}{LbwCYquR|-K#8coiY*j;F zC*%5|R;#rG36CQ$kkQRjvwTkPA~N4hztyw`otoz5+@+0WX^Y>U)o@2eE872EHB{jk z%R&BEf1_PtaFHxy(joHZYR01DQuMQ%C#=_Z`&uLl`7}lvR%KUsc{A&Uy1cZm3zP;0 zQJlWjq&?Yh<_|!NrGM8S&1M;kurs#{@ZzKa!=7y_4%<(NxVh-oPA-$j3Z- z5A4xHO)FzdMFBlmNM}z)CEP_XIBQ2e1_OOVUk74&(4&ev90ACs-_+xAp8F`Q6$7z` zcmg<>AFG}mS#e;rcfN`fsJqbY(RlQENbOF7*|1$iW{rjgc-&IHx29j~;GzC&!$N7` zMK?KzJ!ng!uvzh3<<7Qi4`cNvrsyaVi)&bIpOs03VD28emBLyaky58pEX&}5D3a6w zS|@!OjLU# zi!#JqZI*oN_h3?skV=-t%miqiKRTOI5ba>TezGtV$n6LFyKwHJTJBs0hQ4}3QS(Ua8(;p#A`y8*Dx6Y3K%m}I;grLuRIF*AfHHjkpS zijof#`+<2XKKi0!aDIvbVS1oXV@s(wf!pc?_&=E4*HBY$yX=f!T#RI2v%2o^zKfN- z@f$SRnpL309oc~i_2N%*clycn9e;HP8oF;J*49MA!M|pxTlP7b zwi?9gGhS3#E_wFk(hW)1%jYz7Qk?gnmz|M^;ALt28~@dv$KA}N01S%hQydVjjwZkOu*ez*j>N6HQ@PF&I_c546!7jd96{kk&)T6tx*&Nul1z!U@%3kFmr01u~tzZOM&39uMOA0a7ZeFh1j7k z$%9DS&%dJmNwPgY(fP;P|A~%pU@Q^)@A>bWxD|<3Jj5&@WZVeA95ntH7EeigZoH%$ znikVoIHZVoX0}`p97Y&@*2p=xOlZ)otI$sJW!#dc22+T zL_a+QmBo-;LI1>^^o(@wUeH%s*PLa?PR`YH8X8gPDBhr9Wch+7ccYOsfTXlURAi6H zWN%v^C>?bsG{L^0qb!gk4k7s;{6Tl%#J2#UhJY+tj^TxuJQkF5dQVK9l9jsq1xH@6Z`QcTl}^5 zBS5XujTi(8B%T!-5WP>TFY*^?Yxi{wVhbRyV?4e3FlPS59aF2CG7ceE-4)#AxKk1$ z>+LCNSgAGiF*Ubh9DMZTIXm0GwOlg<`REA4^!365nfPbO8$f z8DG7Hc{Id?tzX)rezdMRM>!KI2%yj}7Xo357T@+b9LW2aBMU(i2sC_5o43}Vgim1ms#a~+#+ z*fED+Z*Da_n>%W?+#s?%BUb@d?xmRc(j$Xty3Bq62FFR5;0GXdrsp;V zn2v+^#WF^5*0_!0nopxyIOY4E_QW0X2nBgJ3I_flonRIlIX4dbQ5F-Sx&%l%+Bj|o z?yl8u4&xWKO=~sg(FCh>|IKUKU#nhS0`HLo-h4>6+Q_&QH%*nO9%aWJH06;UDyf}E zBJz}BG`mTT<>;jNqE>y6?6>X$xMi4ejuQ*~km8{CX|Md1245k7J3Ps{E!a&#^?(S| zQtX2Hxb7w8t+Dr11|#5cD>4=9kuWy{9IA*tT>>crNUm4OfA~t<#a7dRy`=oZGF}CB z-Rj$Ah{tD4oA2WQey;^HzlKy*s*2PeVF6Q@H%-C*pts}U$w&VH=o}P9Okjz~>3Ert z=5RM0JZ9)VC&>G=&wu*;Z{`Q&FaMo?v^?9SDfKf_`gxpV3XjGtjUHq7vkDOc#BO^V zw`C@$_15SR`#5ha%a-RCLr2fIkAlzW@X#h#vqWFAY8HwCcgT7Mgt**y{foSePa5f* zi=mshjCynPZmF_T3l3TiVt4e(XqW>_d zq?`=+pqo>KF@8fX1n2A*$ju#m?=RCs2g>Rem8w5tN`D-@H^F|yZM3xdrv>Tpf4LR>pCh$qc`u_=@~h6_p5Fw z!jf63YvNcfOWNLccVoE);O0`d>m~*O4j{I=&&l}OkPc4YN(F9sz!I-EN-jFxYx=}w zMQbv)Z+>rKJS_v$_BPoMgG{&|vQfho*6oZ-o=A`%eASB;=ig@?V9z2h^#1f?2~$eE zd7JDB{n6t=H_vU7(eX|8(s*^^oOKUcOA^Ff0%u!VeS&Q-?fTYNhMsnrVgZ1!2($F{P7d_r3tDh ztCuP**J{onHM_ZRC5IuLKIIsTWh^IYtc##3^fO7REL&p^trY@j^yGNM@(18!(0qOK z^y#3TqrQ%T>m4$IS2R1#+IqRvln_vt%?&Uc5nnwui58>6)q-qCd)rI3`m!I#6q;^S zUk>%YPN-L`86iTLTC`r&&C6>wIz#D3%MjzPm)0br1Y=6&b2;p4sdq!8I5_a!;$(85 z>6nEJTThCy25qf-&X`0fWgim^A-{c69-~#c{+j~8x$rF_!g#7iyx^R2`P#LusS!?4|R zTlw0yYke|)bHuQoW2ay6QN~pWZaIJ-%W&8ISYt0Y`^OIR7$!uq{lR0uIktk6%$PKQ zeTzJ2P}8I!aUyx(a@s6KIjG^mHwZvTb~dqzCCjhBAi+N#_~wV9FbWgN(e%6?5A@$o zM-udJlAeE?jQziTN>f-%IT^j;0C;EiKu{H+?xJFL$h1Dm2H5X<3TZhdIJHdcXe6U9 zm5MUUlDLnN9ep7&=XraBJ;x^-@`;SGzF`cEz?_p!qTg9cizX}P8@X|ms%gKC8{LH+ z@*!v1XR*20hV8Z^igxhBwLpg#wf|ZmnliYM0fb|REtkCVos;&P*?leRuYdjPm<*!` z*X0rmr7U(r!v#=@qOn9YsK?>p`h;3RV?+2b#-`~g*NiFm^3tRm z_LH4QVW0EJi3xyz{o0N7=BBSQ=nD)-``b;Y1Iof-)d*zaN^9G25YE~2=ZMvNl(OH^ ztk}6lu+q;QP@rt2s*`k_Ijt45dQ!Y-y)<*5h*&7;O(+lHa&_BqcC&&xqx&S!o?WF0 zHL{r3;>$Y)J%q~xu~nFVw87RIOR;KmYHv;ci)31*8(t~t^4HC`)CNQo<8MFk&PtEu+cBG;9?hhxf z8jlEO#TB+yDU(Hr;nP7pA}R>5>i5&nblPR7)C2T}0Sj))Z%BnZg^L_MEdZUj@oYa$ zS{yP@tr~sgY7)ptZ_jY@?QhNSGr>->)AqxtAH9CyiGmR0GhL0yyK8(ZzFaLKe#=AH zuzj0wRHGxbVr-f%Pe3B+c|R(s0sUL6-7(2r4X(4}))Xr2p=TP+<#H21CkW9w%Zj0w zD!Bf7Vgj|q^*Os8e@?975N|{q*vuZ>?JQwQ%ow6xPlw>Og4@d?I0e@oU=r=GE`}bl zVeKMomp$~px=S9SCGf*_^(EMp?T_Xa*qdVkkv~V2UwP(r%|^ZEXgbbU{r0W5Ueq-< zYvps=pA;jKxUBS3emb6&o)ye?ye2x_9Y#tfk@pf-dwcey1S{pptUTCCJgnTT4UTi_SrNvewI;%g#y+TJwOYU9TG!70$rfnaT1cz$iwJFo&X+6 zU2zT$4^WS;=Sua4-Yfd4?ez}h$%`Xw@E>U7hV83SXjUaT%=P{0S9<6xSFUI>ux7>% z86WMW12-fbCT_yiBR|4Q$8<=E*jH^6@T`uKCr?QLEt-mnqq(QWyGMyYRT{m=b5=qdPy8D9)?e|k{LcWnufSk0X1GG%Q7`lCHFsx^TBk;k% z{>m~~tnwjf;iC*aRibbg%Euf!N9{J%+r{6S#nQE`Nl_WteE_+dwPcR~;yO+VH2w$)TWZV|H#Q^JYc@Fy5*-ql8*Qhx=wf;)V{K^XS@4}4 zgL~@i*=z<2Sc`Tl+W-p3n7^f#r=S6CRyKU7MUT4_aMpKe8u0c{f31PnTgR42)zQwz z8Gw$EpWV^}w;XZN+4kdM&~oQ+ z6m}p3_h8rJX|Hk{A6?giF?{o3_0saQ;HKgw&O+ZcsXg7cTat|NBj_+`TsOTJ6T)75 ztO?NvRN_30>j6m`c=JkPAV))Y#hb@Hw=uc)kJI-pV)E19ND_(wq-BqT@Or6UyVR)E zwYSS#OBG2&v*yT+-Y}BxAA))QNHcEY?(eit+$q25z5W`;>7{RNQqCLpC1eP^r=h>F@LKGkQPDg;g4__8_x*G<2YRc% z7V}SgTefqSTc8GQqjqT{9vLcZJ(v?6yF~-Tft7TJ8JfB_ersm)?48iK8>Nd&B?lT5 zH7o&7d2YeeLsu4^-T4qbtS1~E_<>9;%FUt0Q%`ArNRnlzKKeg;CnUfA3k{})ip~%C zdTqU;=bPPpd2Tf7@pmjN6SGyXY_65c4x(^+iI6ZHbxQpmOVcUrjF_K2>PZso3hO6x zwbZv7*OLMxtDBW;YxEB#V=sBeeS_1P{Y5rd^YX%L4&`hm3q#Y9%2GVoYJgTwB8*Q7 zBuEAS85Nt+Ywt}o8z#UhvF(}%DX)0-vIUw+Qt%v_+UBqb9O)4ZGGHk=M}Y%gT3&M8 z)aqJeO<%SAItU2A(<(^@k3gnK=yyc?ew@Cog;~Sk@f;Pzkd<;%lk)6>H>0X}Gpbi^ zXle*~jH8bPL{tWQ@yWp6a~p3we;(bFe64J*>8z+-)+}9yYPNRiiQGeoaN)lg6OUE@nb2c_WzlQR z7v`=P`lj0YafG+VL$m4-E{)wIx28M+$G8$^KXkn5ihM zg8!`y?Y`FXp0Jj&Mr(vm*{FH~oJ=QW-DI9JmOyh~!ZKs?pK$VK=Re`qbVk!XU2{4o zko7sGnBxNiIf>dx@|22zOn0da0;C%rUjeCUBzp>shQe$q83f7-N6B!E*_I>up5#LJ z-bk;)ySqe;P4ftEW6aNX^en=F&TEL z9R50nK#(O!dy1fIUx3>hb(6Z%dY}bKogo=Im@vMqk?}3cvA)ySh<1YQw2o^U8>oVV z%ds3xBLVG6$Gv3yn$O5mKE_Dhpvur;la%}GT6k&Ih8_$tz2rkF>cc=`g!O^$X;|Dx zU#{%{4AGb2jyNBKN_38SOm{dqIdI!))4CCN{5nRx=$;+>i+&TD$uK?9Ly|WF))*cw zC#2?7`w`OlJGkoaYQTSN zrVS50iBsxy9&5Wxfb#lwpT*EPHqSYk1!jcwpV7*aUh=_@?;aRCdVk{LV@X9w&mn$y&V&TQ=)$l^-MOwXKJFbjz=I=&!U#X=i9-J{{m!pw zY~l~W(7ikRLPzUnjTlBF41NpR!%{K;bIka)1jO}qJ8J)OwAhFWp?GW1x~gfhlt|VP zzIEVq^(>(AHD%EjZs(?Ge4n^rJSEmzwW)O{p?|PyJxDdae#gPmirU!D(e%IdR!-6m z5BSrXlC(MC{WXqG-I1klE04Eo*PNUqH7@R-!Ad1zO zxY702nHS%b?2vRILfMV>KUjoo(Qg;=CPW<)-Q9L*Pjw@?Kw1`@DKEIgrS`I$L-I{o zTpy!=Y6X3%I;#3UKoJ02h)AKKxwF_JD`(NhmZm(}rKt%+g6wFON=IW@DEzc9*Q=XN z>AV#$OkL~kWPuUuDDKV+#kyzC#OwfZClQbtnnZiD_EoHS+zgwem*r;@?3u$@4?Da0 zi(g9g8UQErr%V1NyG);L4Nkm@oi>U^2w7id@4$_&SLUAWy3Tz5ug;u#Rae=p)SXV8 z5(WBEYMK(Ht$8_onmUNTWSNtT2<2V{Q_s`-QHncSYF?B83=HQ6c&|_eRci zMMi3=b+m#brM>z}V@V_7*rN!;^R+-cya1TZlg-p#&-Ephi`H?|BkSy2D@YIJh`F{= zuC7T5TbW4L968`evE25`s%rEI#1sgA=wYEca%aGVPOXBpTIbM?V4cZ1`X#YY@dwNX z(j^~#Fr{~iW*O!pa`i_i$}L+Q#d|K!2}(6Xn@jErzqF*w6-3tB;MbhK!v*h;`uZra z#+b;ANW->MAhlHsJHgTr!P*NW8Vm2mwMN)E?|ds4cy~CYMvfUhBBYED><-HM2~9g= zUCAqTGRp`Dt|%jlGF?_}-mgwixMuwYGDDTj(fvF~3u%K@=<6}dA>5T!m(30VIID=& zytB&?v2a@&wjZVR46T>olNti53W>H z@feAjBV1nbq|>Oi!^sna!dv-x`n7J$3D`SysHSZ$XcOYiNB=PUQsQi3MEqC#`VSf> zr+HAEw?-#mGyUjg*v16#LEJheH{8l)f3Ecg`W&x>M;DCw2ioMn_$$f!>A&(0V9!6R>4x`c z*Ex*Wobu!)r$F!Q_{fQ_xGBOppY5_{LS;#=DefVk?>cO_Yhh$9fu((EAIYR?f|7F0 zl0wb;)`nxAYC90vN7m{RkV*}FPRiLO&ESHN}(njeuJ^}l}6Q@#Xe#|M3UJ`Ib+ zD{3|iaMLLQ9!*zf*aGNLf4SQoUgbpb4X7j~2|!H{x4@kVX06R$mrf_~?kX;Q%W<8Q zR16PVqiY_|f5K~MtC6_78l({PGv;y{KDMB>eb$5-r0sy?+5I^3m;d$X zh07j49%}oo6Pb=2HU$>gyyVY(-Vw|ss<(nE{|g52-WHn`0nq7p5CZ| zPVod?zei1KkNO6m=C+PPU6O#>H*h}^lbm@M$KNb!3(@++&k^SA@ABp-RK8})f+@FJ ze0`Cs=FG{ezNCqPf!%nJ>}L#imD)5@p1DJ_=SWw z)rW+dvcRe#uw@u>5*sBI16VI216sgWpQTZr?$Rb&Z#9G(_a@*=+u)?TbmI!UBo)=_ zJGSYsv#c5jF={jMq{5y$^Z5UK1@6wiCDDIp_F<5V(PJK?)k~--`|{)+ciHYcZR2?F z6^*}jLR%>v?8z0MGs*cw;0((W(K-Qfb%oCE8{1JK$+BJ4@}>nv!%_#9t+%c>SZ{qc zRonllCkNdHOPveA3jN3Qv^ZT*+OscMG5wBb{{$ot&D3VMbxQtfQ5P%02bG0?R?E($ zx7~Bd=a{A|9&4j>+0$;X&Hj}ceRuCB?Un~~nN;vL_Jp>&F+y5_;&h*tQR8mm6x_)$ynBJ0G$>9ci;6i zKdWsy0;4kWH+CsA3QSdZU@1^jje5Ar;6^XQ2a!p~`1Pu>;jsh80qpi1w~)Ajs~H{{ z`V#Z3vlF2~4BE7?k7POZXOe)R-|Jh;+B+7e06f!itEY_l>Ny{a{q$)r_tPI-Y)Ev_ zN0S`Dx6@lPj!NB;q-EKQPODxQLHN%tNdUCJiLHTdX{5kN9N0~>f+v}IHNb?{vToEW zFPAHuQKbU$(=4lXx9ApUSpa(ss>V$u-=N!x#G%S59ng}>dWwQX^GTRdJ>6l;H~F=m z2hUDgFoWqFK_la2%|5rkryps*pg(Ei;9*95>A`iaK8=$jKtX-M5I!r{+0EgyCql~2 zx<+5c&{w)mqffvt@>RoqeTri4hVi}IL&loJDdutfT_R>Jw{f5(Pc0Fav#|CWU=?OS zi&}a^T59KT?7w~M1wD)zQCS=mqg->>ABN$W8h29GIR95;8@4TBt`!!|n4kO{>v=lzKJA0(Rkv#-VOvH}h)Q*TL0bO5qAJx6J&yj)p{RXRsR zkTunOt!`|2LErjd_rSH_qd&|()`y4@{pb^EdgRvh9lf``f9U6h#oaQu2Kw1!-!&l=>1mM=tFX8y^ zu~jpbD54MO%q3eZPNGm94?V-%tXK5JT_qnDv9nGYj2a-$v||=p=$l z0}wrSRgc9s#v1~29g@oFTJ^Hz6k$v`E`YE^1shVo|=1h?~ zKKdZl_Ix+H6B1(WQ_xMZKM!fk#xeV@EBvPyo{rHi;`*P~6k9MB2v`(e;6rHMCfEr2 znZ&M7+*+-6LEotrSbXg@2_0yP00@mz zV;H@adNk=#T7CDy5rLvhs%9O}gRmSWM$rU^?4_m94)@%y247@wdVR<2c=+8c*yks0 zIys97ljfe*#IDPzPe1j9mVRz{#IaPl4$YDoBDx&0BhuX`$Am!r7WTVV-?VVJRU{CN z23${#W7%@^Ysr7zq?o}a0`_9x{@0-GZFxxi+OC_N2VI96qHFO3wSZ_7mO~Rej#gcC ziF`)GFgTXGC|0fPCX zR~&U;%{)NH0c+v$ul3+Jtc$81$MX3+t)Ir=!akaPpyQ-Qi`gUAW!v|P`6aMlc}@ro z1rE?!u*){#@F`mL6bDf%ot8{t%9l2)&C;6IW<6Nf&@14s^~v4{5~2-a2pV;@36G0L zZCM+`Z|T!ZeIP>S?E4J7J^g0-rDUAem|3ER&16{}`?$VekZE;FEy zBf(zpXJ9-&`h75kl-6PGY3jx}`W|51#g2D_d5& zYr?F8VrU$f@eBHk;@9~VHVYFrw2_4gNk9*F71en5XMcRQ?@?mmEj*r%;zdep)-|)n zDWQ?W+wpwHN^snYu5Olf8k}fS2<}7Hg!Sfu-r3Ei70q7j*iGeb+kp|(iJGM7IXDf> zKFy8HLDm+y28{{%g%drC6(JslRK5pt&mo!YOBe@&TH!yxxfruqL$d;u)m-6l5mYj7 z+!ERSENnn?co1>g7?1uQG|zvF(trAt^|mDe9D?8T_MO?s0TgdkU}5)t1=#0+h?mz^ zp7Z+B%kEvaTE0tmw#oSV(;ZQdGjy+tdgv(a0x#unP!hK7c|c;*#4$aNL?VDdS@2sP z>I8GKA|FI2P+z~&M%80FgYI6;A68Nh@Z79ApG9lODr>YrrW&J?rKl&VV-)A6=_}Pu zNf649S~^>OpH#z9S_JBw;8-RCkUk@O!yb$M^PlEN#&iG5A1&s{AoG*mJ<0spg=l&4 zQa|k0+#85JLA6o5Xdh&en^X-Ol@rAFT9e((`KcIvcQ=Zg%_=o0aD<8|9X@EeD|XQK z2qdW9gwT46qWDUjDS1V>~3w6iPBpz=)vi) zw_bSRg|lb%R=H&|p;v`M*Q;b*e565Gqe*6glxQrMxT={yGvRu1ik zBU*P1W2BGRCe_cUpIdlQl84w@c1qLSI&fw)tOY#{r|8i62QW{DIk6B2*MXl;iI1P1 zughKzZ0Z+4Fg{&_t<<|%P=4Xm=(@QN@3h9fvWj0-};uvR$@%>nNtZ^2+VyZO{p zr~g(;;{R8py)gf^rUYpE)=8ivr2r)MJ`QPOhcvQ|*}JjtN9LK#+qY;%U-Fc;;fz@< zO#v}26WEx1z@Ww-t@jfg1T2h3vTDlLtP3AW0rDs1PDcn!_kS%p=c5VTLD{$a#Gz+91Js!;g^O*lqd=+SpKT8i2v$4qH99q%nSZ#LhM#1MhtH z0*xpo)L@l>>HF@e(^>ekWOOv#iNbu+WP3e_%o^EC6;@X*2OFt3vbI&4v$g8FE;GCN z?6WV#2Y1N*-pIK`sCbG_VzZl9M;?g7Yci@mZ7}SgPono8@MN!dDWk$YcnLzCn1q24 z-I03vpr2r8Ez51{a(Le3NHMNX-0EWvTOLj1l=pr%70)wimpAChsGlK!FJ0B9{TGYj z{&K!_TuBF)r8VdLLpnneQ<33)-JUA%b+_qh9VPvv6Z%ZWgsjp$O_FqeNXHVoJH)#f z>t3{pV+qbktGzV#d=$~wcVp#82Ob*7uD+(vkB^+#kOUG*(jBiRHYc5vqc`-x{?$>h zCvoLn7qSq?Uz$8n^~@}htzRoAB`tI5#TD}vmwZ<#k+z2aWmP{O+Q+& zc;@M*l5Omvr)SP*(cmmJOu0;yf>Rojce&t(#z>;YNea#ERQE3~4r!~k>(6E?&ab2lQKINNRbD>(Al4-?w0RNzmpa^ z+1R zi-~8C_gbF2WCQo}v1`2BZ4a@|Zoc9L=qsGzI3b|$^x!E+_Z}ryBm<-{r9OPOfo`JLI$YU!+tn0OMWjSl{RUd*= zcReH3^s&;@HIsYd}E<2aFgvN~VAt zjSO+IZm{C9E7gYi1lQecJT}lPoygn{Us>B~c*W4-sb@|-ts9bHc)}T-fuG%)zO9v9 z_~na|6xb}$wp8+lJl!Q6Yls&bS?LRBo54Sx=^5~yt?k)Mk$L%`@?i->oNR4RH z$UC#$Gy-#Z&RLOlHX}#Fpn?geNuTwk`yD$!7j~`KIq-`VzU1&8bk+_h74~^JvE^=G z(tOSX-=rzKnHkkwcZSi$=#wNIu1=@#hgpuR7FO^VuhLS>nqE( zhNgtjz2yy1asE&w+)|HtNxES>$@WqH{dh5yp_=oeE4qvV+0nPwt7~i3Muqy|Wv35& z$va~-2z0pjxjXdo3#&>e6(uR>*t-#{yql+jHYzVS<00VtuDKjYMnqzWpv8{W2M-2qSOyN8y?{Z7kC{APepG;w3e zY^|fVnZDt zC@iy!pPNbw=jaM!k}T%TY5# zb@_B&I&_XGOGE8BR*a&?JcMkZycSIfB;c@1Np5OKe>MTv)7Fwz-e$!IKmrCMidV9L zn{6?3<4t4g!IBmv9CvrDJToM4&y=_a{Nh3~1d4!+NMtM=}X4hdYrZ6-WR}$& zDwivj<;t=&(gn=K?8By?JWG!?2pU<>O&|eQ1wCgWeztLRr+JJ=X8?BQw`MQSKAL{2 zOJE*+8_j(2LNJ0LhT zug^X4rcT~f2Lrd__`gclDr5cYUZ)Teoc)Iv4dMub*n3%n~BPsCIGg@V+(K9URz*B(NgRK{Ky$N+9`UYC!VkyQ&5X-=U?zm87z3phv zw6RlgcKalr6)g(-gJ;z&ovUqWUS>Cc`PQ4ijC;NMrc-fotU~auLj=6U;0UN!8NAV$Yi{Lzb%~%gHvWuJ)8uH=DwGWY& zOd!d;Cq1FL2U1n=zv!8)GBWcG2nQfcyPo`oENy+wGGucPB&Mxy7|_Q=-KG1JMzd0@=}}u(^Oh-8N*WSblR_eK zKl9&5JvwiB_6Bu!Jw@{M#B9i~W_R?elt{mH$7>^p{E<}3c8gUKS_38N6Z6F=HpM&D zVkN5W5t0`@1*j5BmaoRS)ZOT18FC_;Nf-=Ptl_k-f;)asuPCg72rrI(Q|AfUpUAg# zXA@Bh6#?Xj?|pN6d-je7YuNnShKZT~24k@|rjFHj7fDol$xUzR@MGFc9blQ~Pz^}h z-!3viI+&i#dN74m%hmo)BF@hrL*4g%ye@HqHyV0Kwil+Q<_O=Q#;0?H2Vr|b9;JMQU@LYyDm<=zn$G4wdEjKENGyiHug89H zo5>R?nCi|(w+rq`i-*f#bB07}3d_U8lcpbhgC6}*HBt_vWQ|x<*pDQ#cyxce3!>@s zbUfYGoX&32N=5QUbn^~DXk*{Eo-t1DwKVDPCrT#2za?T?u77_s|D%aH{pu3Kg#HozneKPx^Hdm*=_{;v$ zbFO3V={eVzm$Z)iSXwTau>a}fI2{-DU?`JTQO2uxOB+oO98;SMrc456E=11ELB|?H z{&xG0V#D#8SCvSn$?aGJZ4GJ3>@6)|1NgDsU)NtMl`m|0qqhS*^}%>@;00z$e-v6I z|6B5mh6##(gLZzV`>dD^(dnNkOM4NzUHsg2u=$+&ROnjF)s=69s8b+x>l1(cy~Km6 zmBwxZ>d5xflX7Sg_xBLOAxRPqASH4BAyU+>9mi*1>!DojLH)z#rgdO2(3GK-I(`N@ z9lX<^HLW9i(9V#^hBP1sX`mnuBPoKk(I2Fk?fcouaRO0*^&yvVr{fYdVv(iS`dHti z;Y801a*Xg?rm`2Fd`1t9u~j9C>?XZ7%Tu}(+IDMgO(F*-#A9yIj=YnyanQr*)2P@7 z(}(d;)azSAF#Q7HMFr7^3M(p>uHZW`YPN@z7wI01fX97JY68sNA z9$;Z>JMHvP23tykqMcr8*7Qv#?Tgt5de!TgyQ1$wjk1?WNAxzg`NQme4PXT~TIS5AMpyCS%Sv1Soy?Pnv>DksV{rxYv1zIl;|p^k=6he7%Yoy zW!E~TxrwLgfl%X|F8<=;le%xa_4sC>LZC}vP2Z|5op&n>Tjqq0UW1pMV-xH|Q8~lo z`L-`{{2Bf^wxD&wAPG!`=+uwq0c8?d{>W3mT5RICEdp8{_hks;(WZ3z-N1?Z5b`f) z4_G-`m1=U&{%1WS6+im1vFwKT>}HV8zW(~F3z`Pg6C}aiK^z%z1-@)FG97Kh>Yw8x`=;vssIfk@0WC8AnZ~7|a{nuOOSt2mb3*&I$Vo>{>fRGma@B&x6Y!3dT!Y)%y0O2N57F<`|m{JY887U%g&gZ(yUk{c;GtQoH8VM>^=YoK}{2E3VsVd zELQhL$sQ8|E$-$uKJv2ZD>LX<3S`LrF7$U^y`5kWV-YcoyMn(sN zEF}_Hhe=WhMjM*|UiG?eepN>>aE^Q}ku+;H?wfT9bO7w!TKw~k+J@zBA|hh_K$BSz zPtQ7LY{uw((&L&q6o_Fb`wlW9yTk>NLljMXh)4${vnT)4e4ioxRjb8r^=&)_wL0EL zO#kXpEWOheX&+w=vK}6f)D-awz4zpkPCiL-W0r)*Hk87$0~JZprr;np1072@blN6; zqo6$2H>^g0#Gz)5U`hRQV(QfsBF@x(;6>}Aw74Y=OvESC0eIQHUFTWgU{4`u&GQas z$#;9vJ1t=p`ksz@^7PZ%J*&0NQuCZda2@+k!?2v>QHN3vy-A?aADghlAcGrWQ<^UL zM4|YN;m>gWy2g#}#EnMmh}!05P~o~`?U=(EFqdn9){So4@u15v$Zj;CqteIG52@{U zSO%6XC$n+n+HOSamNGkA^Mcx-U(du*Pwd7IKp6jk8T4@axz_z`bdiTmOY24!!IIv3 z5+VT(<7Nx7;XGu%1BlAEpOF(?)t+Tp9y(H?QEBF8`G>Pl;#L%ZQduvD~;g+)2HDn8MdG?3W*bPdzO?z3PP%FD>gD z+sUon)khAOH7oEKey%e&1&KXWEO|$)Kx3Mntc!u=gM7fX3T~SLhO}f%yZQ5e@Iprx zZ=HjII11&Vh{&uP7NzT$=3-w)alXa1+>Ks%cHwLcri6OQsLzL91M0Ry%LxfMd_eBwbb&?+Qk zbDjmG$$dWNcXbU98ly&A{JKGI-R5D}Sp!W&EhmNIJQ@u{&G`5I zeSIhW@kedp2V4ufpv29PM)Ps{J5lOIh-9Pg#Z-;04c+r-$Z6;yJ=%cjdEpfe=yGDn z_LV~wxiNvIV6~pIdW%G9(mMK8auAA6XOedmFg>4#CA2AJ{aJ1MlTGAAyrA{@rKS>Mf+BsUx3X&&&u?K+E8hYl38&=?tgiY=M z$$M7|W+wfx;{-3MJ{keFuW8S|dS2G{O$;(+rT$(2YS`E&ryY|xyZQX@^vH%;p?RF-#R&5TE`hU~FhDHmJ<1C{ zn%<4|uvYXG!O=4+F4h<`hdR>^IY@p!=LRLE3*Ypcm;719ocfpf_pF5N+{AU_Hf*PQ z3Ye~l!0JPz@o`Mic3{()mR@$YH;Y;tee~J0XP=u1u`5>7yPT7&i^p` zR7#VLH1u;O*RMxlpNi-~~dBvaug)K`>cvW4e6HzmCdZ?j3EC z=GL=YWM#u+Y-`nxO2fZhkWD7W04-pjQ*zx;~! z!OqpV^A5r9ctvc`We(?`_IELzaNKAF<_==0N!Jvy_XOG1Ck_EJ0?P@FKFc^!0(Ne};lK|Z`=VcOE_va}2@aXVp!eFc*Bc^23pBNvap3FR*mYb#< zql>z90VU&2&ayZHDAonx7j#&w-Qj`7Wetg@Cj_n1C)F9Z1X0j#U3CO6F_Ioc-MSIM z+Ry3NeCMaXp8cTTnPmt|)n|mpoDAo8PsEJV+*o&G#w`k{_dp))c_55u@GzX7VbzO# zB?-X3efBH$i<)q<1~~ehcb1IQbdfW9JFrZfwR*Fm?=B;r`uTuFwJzVNyPb(b`fU26 z9+F3LE9A*iIx+ue0b~4Q;K@e9VDZsvb0gs zI9jR~uRMQNvSFcAvf*GawB8tkTk9pw)zdHPr}N&8W!oaX0UZL9qNxC+r<+jF?lH#> z3O%I|-AS?i;O8}~9nLcY^0>>;PIceqpr?FwFyU8Yz$aO8{9!+UBM~w216t2bP8x;) zlXJ~75t>#q5JOL~)I@P&+ufid=tdYaB({m^Tm4fOx2f$cA-Pm}O6CbDV8`_i(Sfd9 zQ)u;bsZ5i0gYJ^q4biSziKeASf)ARVc>kwA=m{*)RNPv3;{DG_enZYVz3soVjL^F% zoV&9REV zS@1u*hrdJ|j4crIywNZe1;?l^aIps0aXdA?YXbSEV_eH@aY5Yj@a9=qLuXi^=Utzg zdq8=<(4O65;*JckgF~Ghq2N;gA0B z*{y73_hvsZHqaC?GWH|QRP@!eXP?qn@qnC%fR&zj*oMGE(4Nn3{_3M6XDS zKqVBj#H)Sxz)ak;mer$uG+KO=a8#g*=*{!wqAuE@Xe7!xu8s~JF;8t;!e48SZCN;s zgOX}v7e-JH3`ym0&giF_r((7z+Z1cZq;^|vsWyyfJ3aWDzImFiw_bA>h7>%ECK~6Y zjTHGRFqK*VsT?=E@bs@@zE?aG4Fb}21$!P)!S3|2BA;RkrC7;DZp1LM54z8M1<52>CXWyWaJRxik|NsC!rQD3mljP(`irBb7$ z*>CLix_!?}n;3F#`le1Twqnz_c9?()fjVt^^3gAQ@znCNQ+wExkE|9>f(jJYhNzbW z#SJ5)2Z@CNwS3#G2v_Zb7Sa zNd)ko+9c*iWH}bN*UCDz)5~t)zLPxo3=0f^ory-Z?Ruh1rw)>>wz7QKKCa2U#DIB^G@6KT3!3*vil+3mNSeU21Vxxe#odDn3r*AH{d|iz%A=S zH->6C4{jP{B%*Qz7O?g-x5hc+v^cxr?~&g_rD9-ac4Fe-Kngwi)4G}!l~eofqC<*$ zYki}k-BDnPp3#7D_(&d-^^#gRxE3`c7>v`5JGNsJ>bVJ1>^OTF*c&?Hp%qn2Ln0Vu z{Mx~|24%7x3t?teH)yW6QLbpxpL|u5Fgk`_TQ81>lOUo*Iv+>FdN^}Tt*#`5!;ge! zY;3T4l&0!&+4#ijiNZ_7scfM>)@Ihj<)9Mq!jkW>5SFaF06()u=TSUeZ`i)Nd2Z9J zJ8!CC#uCnG&&@^lD=F?CK?dmyjPB3qM{$QE;RjubDvf#g!cUUA)t@j4TC2gZtz{h* z2cjl!JOp{87cDC*(D63LCDLsZty2XmZe~^&n%#WsG(6H;D&^a&8J#Lcbu>)Llw~S? zK?O65x-{|Y+XSmDqU}{ijjg5iYF6CIG#}fUr+cBPizd~s=WwvNX3PkqL0I|KV+}FC z!OAmo86%nbg#Sz@eCCq%x@PO?vuE|oT2;T?T=4_3ajvHKU-6LPb%flNWnx%}(OB!U zbV9@f^vF2du9P%dbRx`{MrBjWaEKDtTouF(rG5?yr6b<1XWlS8j^o@nvYkHX3)3L6 zCY#D>g-xtgr0g;e!eA=)C)Z<8G(*3q@Aiu9P@Do57qNtV*sBRe-x^@Xzvp#U za>06rbL27R^rKm#%o00}KuD1wYa;stt92V};R6XM&iuAVa!1{X3oE=izI?-Ax`!TM z;C^*6dsnsNS$k`Y`alaj+HdKtf_gfaH>fhI_yE^6Z5W7~rE*0I%<23wZyqb@L~n0Z z3lc9)Y8B;Ouj*E2WSCW=3(wCP)Aqb2kpz3=6}hXPzuZ`LkjFL|IJeHM9OK&% z5p>r_w6`!U*mt@m`J73>oDG-m6I~tD!Gih82M(uvA-=XuO@`L5)^r40@?^{{bb}s> zK1WV-4R~?Fl{G#N0(<|PRQSK5z<;LJZ|c!X*1WDp{2&?mvC|XB2%d&MXLyEiO0~H# zB0IoG_q`Go+or(b+4-WG@9{BLJ#Myd@A;udfEanvUSl5c4vJQ8nx<~stvL2?+q@FZ zCq@}Trchf+>zj6vWC4y$Z6fuNB{~Qnb<0+o9$IJKKrYuRAcMY|IY~dPw~5w86Oxca zUjiV09}=N&G$G_WjzG({GL+pW_EFDjYc(Fe`Ob{!ww`VAcfZq*XWo2MKduK7>Cu0i zei~oiNmYN^Y6i*1J&y-DG;~_gcR>1wGw;pc=(@wPvtL>?1arcUoUR|5v;^%QFBwC2 z1Y?`S%D~lLgZdV>u~v&sMn1XtJ2PCZbF+n33pOKu*6foj!M#Z!I1Q=Yc54olKi%YC z4RhAE7;a#Z`HWeHgx@kBanU1+5K{LfK>*U`e}}(|bNcq|>kw|MPbcr>&&=-rmS?!f zdV8%k#&&_HD~#_4u>h>zm$Qfd{C9|{e8m;+&mKr5IRtLa?n&fdc7!LyXm2>03muA@ z%vKJUXAX4p>A+F5!%5dAAlB`ANH#^O3{j|kw)n*B&ZTLlMixST{`nVl5&TrVcfCTX zEaib!^J!AxbU0X_s}=c`bpjO)Q zl4OP~99AR{AL=H|DE4q{gGR-Xm^8H7qrxqW_M+pgMbtGYI%GFWtCfbXM%?VfSyu3D zbr(4U2@#Z{^Nltmp~oV00*1=3s)RyFV`sPzbUE}Na`-O9$X1x}v6gm))*X6|8W>nN zAsbjN(fnYi=dSkE1Gl58?^Q+3&yN8`uyt+o_oB&8A9@bGL*Yj#1Pd@YIKe;$9;VgD z;rZChH2NyOQsQpUezy&n%`ns3?vb|kJSoCOm$AR9&C5au8^Ris+KN2VtV$UqqOe0E zd75@eEmAk-gj>?5%u-EEa@x0es@-c4S(~s+-B9&t&Mft0TB5L8aYu&JDjm42-3k??vE->~0b*1Mm9LUfln+59V zhFcg`B;?`3(t1O3GdkIJhzh&JTnRHDzt&}~q=^WJZfk)dgVpYZVoiIP*4v2%Br4XATu|2~TA~|B7MsDF^8IU^~ zI3?~f>eP3Fse#$U_#S=Rv~x#C+iY*W+U+lqSCAZ7&zbI#5lSC=&V9Vyy{;)?e|?vj zX<|yKz{f!Z*(izQ5Qgi5$Pz|xOt&?&`icXHmR36H&OEWGm02V8Ai=4>p$}Yn>n;7* z+4gK9cvDB=>_>Yzpp)^qoxYdoVCGZZ6>qJVJR4}cCzAhWXC07x$yDT%Prk0FA`M~f zqU@h=kKd(PWb(Wu3r(0<$6cbV0#@oo3kl}G%t*Rkmv{AsdGrY)KC1Lj+Yhg$CHv6t&u~b0W)JjyY{Vm2|N7mW`X*^d(xe*T3ok4<*tlnorr)%Im7E9V zc6si5<`lajv+xVeFqOc%;Bu+q3UJ_!&aN{l5)TmJWO_6&O)Ku(50U3>08iy0bsMtG zmY(-g@Gn>j>#kmbKs$5Fy9qt|Cs4otG`*|cG9&K?XNYODFe*4?$ZzLP#YVLZdAOc@ z*tc1JyYCQza>GM6wT>erj2PWt>{v2Dv$NCo2x-6NYR=7T79ChaaQ`R-DC4=La(>xe{@rx%Hvzu6b(Na7f-$Rn(oc6X4k}> zcy*B77(Zz#8#w{h#1N%hY}MD?)H-$QHyYRM=4%U2=$Zutyb)WP4C#ls2_H{yYY@wo z&9&NP|3q<6CuE9rAqTg)BzMyx{-@c4n5;R;kc$OYt44ROR9}{gyx=FwCZ)Lg?PZs~ zSIRDZaF|HOD1On!29q$maeO~!JRg9k_zYM1&h(T2^ye~HNZQXNPol#)Wd$rQN^Bui zQZx?g9bomie2<42BhGU5V$AakFD}H3qgF<}w-iB$Yuyt+JSa3v^%7xobF8_As^^|` zoRL)M-n46P|!?O0+;kjc6P zY6e=@yd@RkHmFbChBB)#06XTYood%v7W~pXd1oa_;l-|);iE9Opm6f z_v~u1>=nN=o^F1*GBLdnvcJoD2kO}~y633bP9M}26}y{#8eTg`&he;pJhE=(idv+I z&KFQ$yic91)u4Pi{YvlQC{X#IY*mLaY5$ygK|ft`4V(cu$CH7B0e<&Ot*x1|QU5^L zY?QgUm?cG7wie=`)%F z40kyDNFSy=WdO@EJ5OUZhcvg~j4v7!$=|+B&CXp7wKH~QNa2(|L#z#`s$N+}x97G( zht@oLppvmq-$|5BuWFSP#2?EfZL1Gz{!02|~Jx-aPMx z>VN#b*~57X!AMU-nf?pk!=$z@g4QtoJJPZ3FPI8Bm<%T*eA|vt~3eQ>d!rQ zT32axydMLnUd327Gu_aEeNmr;K3Pu>$ObFsLFT5B_C=rReKy-@gqy9`%eWoZUO?mF zK&^UCWO6X@5|4yN`0vcUYjayymZteDCSoRHVj@y@l|);%`g7V=CJ2BeWRjo(P?B%u z1ObqQ2of+Zlqg#jd8w|5?ue=Go|uU4iTNU5WZ9PQcG{LN=?EJ7U-PVe@UFGxbVpTZ zXLpUO>?Fv8bI#s-?X}nCUGI`y;t9gLWEw!kFGMfI%t$3JLx$U+^3q$9V3smnAvoIWq{q@98s#I4w!(V^$Spq{wPfuH(5 z>fZeBhxwb5fgsnpG?d}0xn6quX}$6qVG)i0xy{l>RoB2`s~cnU;CaWV5u`)<4o2+R zUrIBk2;zd`OuxeCnB#Q;#=w&7F@I9d+EWhk;_|;SD;OYtJ%Lg@aNXgJ9e@21-Dn6q=|UeU_{S76F@HE|u_45?N? zzcOqFxfAnF;s$3QIU7%;G|l>I>y9Khpjp({&1k02HHV^G-)vFl60Ct}cTjqPT`{zx z<*~m}{*gG_^@a3R$Sn;$#<8}&EDd(a^VhAB(^L=^j5nsot5%yCdO<&1-C|7kP7jik zto)FY1~zX#CbwaB9e%xJ|3H43VflF9PC^$Ge<;>a3<`L5Il+;GV># z_7TRL@1iHqT-3{!XWh0&yyEuji_|Wi*E{a)ct{cOy<uMJ#YXsD6+6m{wjF+50Vvr;X2$`^96!dupqRtw0tLAW&KXI@@fdP)n1M8ZW)mRNs`Ma&__q)vf`#rm7+=+DgQ_BjO)ouqPD}LIMbF-CZaLrC^i{nM zYt?naVytO`S{fRE!e%9hJazGyj6A(g1>_EgZ0tkOFix3E_q*(*HQ@Ga^k^*M!>kBD z^JzoZ-u|m3c#4~7maWhiuIEKy#626jZ&R|TeN;g{tqr5*ac$;KA5FjY+Z*Mwe!TU| z*YsV4(S-EhOZ;i!2$44sTo(~jO3v|Tkn(93HC;P&52rN*>D+en2M*no4A7ZoHmeG* zB**jH7oX72FTT9=lwK+&D!?7Zg56qL%6JtBO~G*Djk55qPMWVee2jA{u}BC0#rg^N zm@tPDS+LO1f6*tWn$mXd`eHFJ{B{4)AlEiZUK+L0@AkDITfLFg*S0%5+iu}sp+S*u zu}9KV8)7^pn5qwm%FuC1d?=SRod-R#7{j(hA%=r)3c&c%7`2N@Hvb#CfNk5mQO|V# zkM{^we5dz#?bewmbr4~oTGO*A2MuX5tgunE=)O3x*4!nUGgE|J53Wiqs+3*O%A-8M z_36v=+Xb7ydq_{6`CAPV4bVHpLy2q6_(riywZdgYh{3nRFP?cuLoqL3v)Hm)SS`!J z&}a8B7Z%*j(J;Eyci1gdS@?8XK94|hm)N-;UYHMVMba_g$d4mzL+80-6iN?_U1`$o z2Fq&?Ls~EPP(Ke+i3TW$v&aGfP+6KDBYO?0z^3Dw!pE_a+^`qFpl24BZol|dL1TgQG1 zp)EQ`2CEg_A{S?;-qkGgWYtUj7>LCd-4o(BC>az72Xn6LNJ&-W$iYqjNCO9HjN z<@kBb_o64N{x<8m^LL#x1dWiL>5e7};Kz?c*3k228ynzE^Mi)DZ8}CzCoqtw*v3($ zYfyW)K&aRaR@?dQn{D6vPd?^NqN}70!pWri?VF5Vq^PB_=0%m|$pz{anXxSvLAKOGw1~g3f;jM+Dqzm;yB&Zw((mcp;usU! zkoP@lO0K@ZZ%N)r`J{>$*C6!<&AxqdEq8+P83RP?QZAJIGA-jqeOUr&D6`(7mjjer z(9nR}*V;hJdE|-nB3bJO=HPh8QycT!CnuEz`qqFZ_`M;X$l`cPt-ZfA_T#1#J&e~VmuN=ND*Cx^o^AYJe5bj8{xfdnG3=SPqwT+%_SH$#Pl6dG^0n?sBb}BdDp?PW{WJ?F@op zZ5Iq(rF($WwE$QmB2z1R zOU5qC7*u9XngDtZpP2)e4!q`L%?@xc9Z zTpZwxS!@Tf`cyM+41cIj-K$OfG4MO{+mAl#={4gVFuKIP`@Ls>`Ab=Fk9kGqy4M;P zphK#W{rqSuPYsg{1k;S z1rYu-$v~>i=#>UpO+u+~rX&ESEp-|#ER>{OYK{Zp2v)|%T*y@9cC1Qfi9^mbBGM00-bMHdDV=!-;oe(27>uy{3;>6SFVuUDk*nyu9H zStFwP2_}wRc|XK_g^f#$5Cm9AvPI%dpT}=-3(@%jBDaI@!5=()W}R!;DH3jqFi`8h zi|Hw61Y~Xi{PGn?(wC>MPe6SIeHmw3(DNFhQ|3Z98j>Z$zs}gC&iZXp1RRzJh^VooHU$(GaW2byX$dM7uOzfgL^PvF?xO`ZUB~QNm5h7K){XJ3rZ{U+K-ul~T*;4c1(N_8_3pdy|6H z)x=NxH+51W<{PBw3{(_|5zDWx)He0e3PORrX-=(Rgx5<@ z2p7oa^G)mUt;tNq>V{GR$D)C=zzS)NSSlf(Je=m_Yn@SUE+Q8YYKTf|sy zayOsN-_a0~rEZ)R1w!Df?7DaGG`#9$xeAZ57J^`MfJRo<>-y4y5Zz%FFKp-(7(4R~ zY3O(7UubHGQxSY{$xJ<0bT>8?B^$MpB*G3nYrw}u>k_OUXhb_%Xr!^dUe#+-6t>ok z535my4kQX*T4}mIm^GhPh_J)02b<{R@;G`lG18L_BKhM#Joudkf(9vP6+Y;&&*mTV zUtkUT3Z}v*RI)mcBBch#U5Y9@>?Na1_iBQ?LU{G&9h9$UyDS8T_3fHtXtjBU&gD?Y8kJ3Jh2yjyW%cB)8lN1DQfU~ejAUOp0;D&XxJ|*y`SOr=f3>f2%&_`RVy_rvCl~tkc zAsjf8i4Cm@w>{R+>(!E1>JsbG6q>2qb;-raOU;-yr54g8xEH0AYzf;I!32)s{r&s{ z4G;zPS#lb(z715K^{4CYz&_Gw?er}c_r3WCdJbB$P|u-eSht=M--89*eZy(gkP$!;C>RI{!=p)+Rx99P>iD(Wv@0FI&vW zqDhxt{e&xeH>4bz{?nCf8uzuKODqSe8!Pw=x(6weqv3VE76JE@ks$)ft?CMEj2KSc zv`4%_8|69cX2~Ir21Ppb1!nl(>DL+vFej6oIGr(s-<-eo8?E6QgD@OW*188;%G1yE zceAN`i8fC21VX)=H-NeS;CE*Ha3H(``}4>8BIk9TnuE>ls#b~E+CyQWN#7jH07|ZH z`+<3>qiG#^87y^bnfC|j$IQZ-f$FzCj<2e@KO8x6Hax5NPT|2~RAt7BIgw5Tw@Ml- zd2?oa=vJ-$nSLwKa+7yP$n@5^g(scb#bnF)p$I$nWS18#(HUv;g+tX-})T3SsE zhA-z|N?rS6Ti(tuG4_0KNb%(PC(lc0i{sK5p*)~D9WE2Br2fR+4$L*8Yy_1FfN81H zK?FsP>?A%zH$gT0Q4h-@VAyM=O1@pR!rMzY(~)6y7ij0fO&D6#WPE{&T9aL`ma@_yq7`Au~}oI8P8>NdcsktX{>LR zDT39_c4oV|v{`|p&AD41I1X2VL}{Cm&c_GNQeIF;C9_xeD^)K!g@w&zUgBRe+AiKi zZ-VY;rp79LbI;&~JD#e7=C$Pxtb?=~Lr>SebQnMy8Mx2P=P*h@p zHtCfzZb6uvlnG@GLs8eKceR!kz|YVwYJ{LWE*n3D&%zy$z~5nf-aWmejljYXm`Yw} zyy^b+s9XVhOYI^5{ci5lna00kJ&WJ-^r|UW+mM>hC(fRiHs()vfRb4=Fw9Gz`bt0_v|45yA4TLH|UGbk9*R-`7S>?E~!@sAw--`-9 ztZ4sj&v8`b@tp3X(X(9SLmst}ZtGizL(jBgxpguslLh@xmZtdCMF*o0D|V#Kq#O1J zVK8<$=I!Y{O=_i}8`b(7o#p&1{n02C>J4}2c4s$ldP`FJQu^R1{nUOk%gP-)3odjV z;7RmgjXdE^b1SlXr7Jb3lik7o+#&L>IX-jd%#ug6?y^hXLCxHwkPr6WxySQ}5@TuT zJ1xj>wY-KNG22eofz^mjCV4SO$!HE-k5~_$NJ`YVQdVMJ-?pUY0DQ{S`(BXM$CKg# zdLT?xP&);X8pUKdi7fELW`B47aqNg6KOizd2|9i2K9IPb^Stmfa1sry+1_(LhTQ;v zgO&UKoC82olIx$v`e?C;CLLs(880xXL@bTx=MJg$;dtQrjWg%Z@c(fNRR~TJQk2wI z*H`q?z&L!P8wLW@#9r?|#_RsGuljRPrTr$zlxZj+9$pU_K>NNuJXyHfF&dwT?>L#208&PU_ z)s4-)!)zOo;MKMz0AUJAA5-vp*Rj5s$lJ|UJ+mDL$L?<8V55$GL!c|ZKE0!vz}5R6 zr{`s^?SJ!m4p|IF0n&hI@tNuv(t=6hHl*u$Rb)#nldn!e6 zU99)nlNdc1nD?mdu|}8`Zr##6a}Wi52uu$Ev%W*Ph+i=^9vD6M=(#hG zNHw&G0vvuSNUt2f!{`i$;c&`H0) z8AZqdSv<+LbpEV%59>oQ^1xbB`yq*aTyJXF#4e4vqcg}B8)jjInoAAfIMz~{;ZeQT6Mdc0B2z2#1Uxq4*ne`H?XvZZ~CeBiiH5*SlKSOBtquP zZm==5znH~Z1N>vX@w{G~OQ8PW*N9CI!&b~Li|>qvhr^EH>^1Jq;~gC|R~!>?)QQj@ zz==%g%h+5FshbiOHMd@S33hpWlz5v<=6~pqn53VcW;lL}+z%&hw@m1%qK((wuC?4_ z$E8mB5Z%Y*bJb(;w8{g}s}ZO8G?r$RNY&l+@9Yeng-iIdKSNLI-D%)yW?0M{vskqB z)Vz^D(#6!i7~zUu&=Loe?5nUaCRzk{G7M4aZp^u*Tb`6AMTB@l3n;N3O$u}oD3cqJ z_FA#tY4x>Q(;+YEUPSYtpG?G9gVZ`oA3MW6*4oW=k`|C?6S#;yO8Wk*`M2^h364!n zj*i;oNc6-VK=*m@dr|jJ+eIHL?WU}dO*7juuXKd%r1vP50mua027L{!eYGOltT&tb zai{Cb63wl(Hi%ZptIbLV_%71{z?B~{;b-+Nf+Dr6nqiBH*wCWC7m%c0%FXHe*KWOb z>wH{awn7xxNHhpgJ=Ulz_Lz}CSrjNUr-<%aSw$#$wutFdl#YH!oc^QJx07k|O^7e% z0+benim5%*#IK!zQSjvaLRj5l1P^R&6H-D*6OU9C&@)$&nXWqnQe8tOgu3QHqBa=Q z_yfj(Mx0`exyx#81^YX?7N73An_jRo<)PmqwnKER?#zc|p)3)n(HY75gWu_1(*po} z`d|$sx|(MyAC>&pSFNjMWUEt4i9`vauOI|s*U?v~ReL*PBOBF9Z98j%N5B>zftQPo zfSm<2Ci&8m&BvEKczHZ?aXkH;Dtw16QX27|c04(2-$c0TL@1A zAWURx0-mzqCri*Tt<(JWnMd4@A}3}ghvBYTOmavC(ZHz$=eM8Rt~R{H+zuWNsnmH| z0(;hZqMtmw%r@U+UI7RCe`U(WFw$k4&k|J&Jkx0D>U` z7_q3t4^3y~4m}twdZ|nB5~Gya{rZLFtjG2rX)nfLprEh#@7r3l=Wa+W&z=2S4GF0E z##P z23O7jt|^<}etGGMiyFyd9X`b+ycZ@epWfqNYs_J#V)oY$6<(_`bu<*SQ^Y)`NcEZH zWniO)vX>IU#D}Z}ciIF@B4s_(maA?$wk({Z>D%i#JdI`mLyh~~*|TS33@Z?5>I;*7 zN8%BZf^myQM)?GVBz0F^T>_gEGF=h(Z|)IAZHzT!K=5JW-@G_G5yr9rOyg z7pr;}O}8G?&-k!jve0R}K2Bl8&tjB>cKg0Y98@!c#j=u&@XaCxlB|>|W#E0c1e5>h zh|opaAxb}rvYoNVKP$6jcaj6vTLK`>hIQ-KQ~CooZkF+R@!Z+iUwe5Nay{=Id1c=k{kU3FsYj$ zBL;o>R!aoA@g=-RN&f6Ovc6Xe2o>w*n{HB&Dp+92pp?niPUl#O!`WG+8-*F8mm;z&u zC~%G)dTV*cNDRcQ)Pat>EUzZyy(FetyIc)PwXzrMjV`TcG3_z*oY4CpYv^UbYghWy z{Wuwo!`*OGwrIMF(Gew?zo|Rf4x;<^DW=*`mQE`OR=)Y3|@KVjN2Dr z{hXh)R`CR75iMBn`PS_3;9zR7jI@izj&rCr+_Ax@B&J)3Ml2a+t|WOlZ-KAXK&42s zi}T*?dS>)ZQgIf#bXM0Zy)F?RP+OW-vi6ju89!PMZ=Oc^(OxfLjtrwa8|mm}Wx+RO zhB#3t$gi}_u<0guMV}$UhPh_wR=k03sHD&&Z2wG6;!W3_go~^&?%(TX)|l-QC%q)0 z31lbW{#5S5c96YU3p8@rRscQN9&Sw11`7yoS^IG3Xj2C=9q1@cLFm&_P5pv~ZVrC> zqyNI7fX2b0c|}m)KmCBy2?7k=`N@|YhxMB?FU#m@JBJp{<=ELo4D|(C(CWuYI~e2* z{}mb^Sb$?K`gGvM!}a78GBT9WVdlNXtbW3X%K{#3OM<{ppxAwXrCz}AY0h!=_S>Uy z>I#P2r?mvw@Q?obOl2dt4kp~!P zz~OrZL%=d}*#Uj#;xjR1nrA^K71L8jRqSO5nc=o(3ZqG1GdQuOo%QguOHcjkVl4EC zBv%%`A@BTnijWBz(NVIlP6J+o$b}@&f-gSirS>eO+`fz7!`U})=-X2V9jrwr z-n>bcLX*Go!gKoZTF(t*7zs{}tSa1XG|+T4tqMO!9B_X73h$&p(LTM}&<7rL?8k#S z4P^tF-*C=;4%Hxyd5fRF)lyKwaWd_d3|y||ggBwYEBT9``{j8bv89wPvHW#dFyyj% zt%ZNF5+Ndpu1}d~*nh(9Hf!7x8M*O^G z(1V$uPFPR`{7z^AFG=BqHdU*aVlZFc)FWB)orvq;yMJ{0R_tDmTaaA7oyqpt_`+@B zSynTcC8SB7e(tq?7QRDUP$Q&QDWu_cNDr zV>tReZT$}lDGxEje*Ghd`-dFv@5C4>uHo!PDMo5%Z|GT=(afAW~x@7^?X|x3_me3G|Ln}xTZSn zb0#^sWElk$nUF=irR0kR-0iEmH{7+PeDC^N%38$|;T)NH6i)z@PGO!e2Q_&KK1`*7 zi@nwJzhR*{!qFC>dtZ`P;sEus#K~}Xn#dY84ofg=GOthks6(Lg-Xj^?o4$y#=Np5c z12KXEo;hA0C7?hP}9|7>X+{T>>Jt1^pMFuF6*(P$45 zB>(Ofc_ztpVf8-tm_C#8Swd$vGWq9MnkXf8c<`be~S~=b(7GdI>#dTeW`6s% zTTADqR{@pdYj>BSCC$x)N+1GvJ8)A=K~VdF2A1CuTOqmExL2-aKzgs#R=1<~D><+q z+`+>lz+a_6R5HCf@gLW>y{zyzk4mJhHL;F((&BK6qQUe>*u~syE?9-0c;fX?fb8uK zXS4?aiFi}erRlH$`TR|Nf`$Rb6`vTF>yIvOd+_MgtglErBgO)}gF~Vzarfjn*syPN+6w8j+yXg)9fYe1OIHj-`K3ztC(oq7vY8_$gOb-4g?i=qj^6 zu|23qa%vm;UmuCF;Y9dO1a|IzFp^=MHd^_jmSViHizPnBi&wPdBZa8f+o(4+-lEh; zYf`h@5#Kh< z2Y=9{8=3@jT(g2a?mSEtWWl=V^?01Pk2PaSUcbPD@HSJrEvZ6t2=$Q!M*T{)p{KME zf+~A=>TU`zWx#Mx8QGmFOtZ7egAy3mpU&Spd)}Xp=R)GeYK!jkmhaE+$NZP6&ztp~ zn}cknt^U?v_7J*_i2cX`rq7&Pk{Z3_^U<{UDUET1<{c!npMJ+dxQ#hTjc=r4I@er~ zN4Th32pmq|(H%ll(IM4L*oT-T_eQS3*a~*Ob-aslqv`-+LRCbjV;7O)=f?gbL=&8s z)*zidGt#sUV5!(xhCI@#umMY4OX39*chYlekgz&wH3Sf9-H&#@z=ta5n8dLZyZZB55s+s?>#z zJ`0*(qdwJxXHyHv`k095KqBKRExHnYqnk06FqF>Ulm@JqJ+1*UYUDg#J`-uE@z85~ zN9CCPgi->ZqzSbc`_P>vEW88l7KVH|uG*uKvX&k9XXqg-P6vFCV?Sl^8++5N8;o&h zoNz+=LM(4L9Z-(j^&t1b&&jxN$NH2=P>Dq-IcKNQ zo%`5wnr1?cj?vS!_5*Yc(qZK7LV>1VcZ4G0c7)u6095pEdG_?$#F-K-%$~qpxS%nM z&+|`=E!9qCbaOyBdK1TfVf*!HKon9#$#(t7yDS%7Xi}-1tE$wRuhpI$#J;Arky~m0 z(tDtkGVvRoj83H2nHS?njQ`%&U(oWO(xuyCbVlFtr&Dip(Hfudni>~p$=84;?u8zx z6OT|sHeF@f&ju%%Y8t*qM?lZfm!#vlNDD-)I=;Wlagy_*+{)+_L9#GgxgERQaP$=A z8}6!OefHy=w3wFX=gG3%XnW45VBN{+`msZs82_=eOq&LH1>Mlox-{cf2?7fN09>rR0`g3FikK zj%nAZyr*`jKkN<{bvXjWr}9J!5qsTa&|CaLi*wn-D7_mV9U4M{9#gT%=eQ!KeeTna z#HzNMT6C)Yr`}4dWQAg8RT?TO?{a4-jzk5MD;X4`*991%6DO;p5#TzhTJQF@NjBLD zYq6p*Q?W!i=qS3{a|3bd?5}x5A=<&= z#t0&HDgN-cORlXk7(FjmYLukkkTC+Rd((ZrJ~jYR%<0K0h-FVSTU2$>c24YCz1(ug zg1DURGC3BDj%tQg(=)7N+e4IH95!#xKUBzvAZbb}Tl~rx?sO-XBr!Q7OOh@#B1|=c z_;4+Sg8v?6V11mugV;JOIN8o0kJy8fX{!x445d`|>I=0kZ4-#2+BXMI(cGvS_g8&W zk2lR^b%`Xo|Co^>zBT`r&GS~;LT{bElYah4>wUTH<|NHGw9KzPsmBvZaC8>)V)p-M zT0pBF@M0;7I=twSs0r5S^~{RGX2DI>LuUtV>F0tZG6^^tMVAxm`cEEG8EWYKt>%ey zFCjK!Zc^me$DQkHf5y=CL#9eF;tPr_kVo-y(Pl z6W)ta75N2M_t??qOt7=`3Ib9&iJNqd;-y2b*2N6ygB0fEyoHEgSSx;z!EPHWjw=db_g}5aq2N9 zv1E!#^30t;wMfHbr^js*k~Zr8Q@UhuVxb1df!F~7%8!}yL)JRZeV7j3FN!3DYn69t z>SAp+zCZmy({MPvHayfV%mU~)@x+gGO9oGv7lfM4`s*`IB zhFQ(s;hrzcxuxIyM(=2uS;AU&u4nsm13Oi3JTUF8-O?<&^x2f???l9hZXMPpn|%+d z!Aq7|q*OIqG-1<6!o38AJ+Mk1D3)aevj`I&$BLm!coX&jR+QU5Ej;E(>Ec^h zfP~(WQkWb^*4(#aDwEcH+Dz8Bpx_Sr^#W}0%75oSvAkGp2~>)w_cs526r-JtPU`L# zAO{VATw1|P9(6FjMa={7>7q$z?5=%x06^R(u(lpRkgPWtx;+Xx7t9UPLe)qLl443m znznR-%hS|g9C06Wxb^p0$3#)mz-dJWKgo`y0r*KbNHGdTkSAF5cGSw78KHr6cDL(# zx9yE({dxkWq&6?YLmXv1`M-q+@p~qBXv1!VmtGC@tvQ@FqTI}=|#rkWpR<~;UaVyZ;{mEux-}Toh=+YrH)(n}YMuAO? ze`Dw(KfnE(zm1s!I&iIbkeT2d*Ql(Ygj`^TvNpN{kA`7hwOw}7*+9z0-$%~-Z4KL! z?~*hUC8_Aj=UaJ6a?iShics8!9E?0y0$RL~9q%xTA=kMqS3zNo3cC^-@ zOU%USOfn;eHtLu!Z$n&YM$Fy9lnFNzU4tEC=4^qp%*>f~2c~uYsNfEVf1!#k*KAc8 z#t!UxX7b@T1NV8Ago>w^Uwrw=cxjAUBRl!rkuCkhu1^Y~^8Rjt)YROQ%JqzyNobfB zDSF>|I7Wkc^7Y(lsR&mYV{-5k5v>{?CoHL^Y_qvI%Jd7)yp;LzNi|ldRH-F7W8}ty zX4(C3r9BKqLlo8SH6{_GW0p%aQI=Qn9xw@DxQu}x))%>;&OSw17!lG3&5dU@3!Ktf zx&WNqLrrcd4y=_t=X7PgN`x}TH7O% zKmO-aZ(g?7dsSjfl$yMa-gGkBJ&wN&XxgX0V9B1-UtqgtqW+h~Nd!3{p_-p);81ia zDB-hTU>%a|Y4&;0{n5BMA{DrrYe2%FVY^>A>m>Kk=b@1@GK z@m^*`6KnH(y!7}lb-QNis=l^?$Fp(V(brNDLD#Y@8ZIVyzw4|K6q}Rtyz2A%F1^jH zREv@+-2!9Mi2JgvscqmikT zZ_$fl?%$h#sHv^C$AOWlH>>)+h5uaaIC8R-MVmdS%M8GuNo2F!@I|GIbzFtU(ubL4 z#I;Hs8eXn7*!8>xjvLzAI!pWcIcid2mSIPOm}3`*F+u7KZ-WWQnrewr>P05y8%b2T(UP_% zF?6(&;D7F~H!oMO>d%JeT?F&=u$c`Ek<({u*JhIeDteEMsfb|!jMK_fu5YTO5VQCG64o-NsK zY&wiF( zd*roWYw|Wqo26AR55nnM6sL!raXRe+3iI6VbyTGJ`&tq!w`Jk6rH-6X!YUgMW{%S! zx~1pwP1J=J>$TUVA@Mqlyic2SIQF7)IP;#ksIOzFjQTDd_?|fv$eE{Z>2?d0ufA65 zc1z>TeKN##N~~*`Jbnx%KIDd)c;eR1lI6jq;G#q#xF=N77&b1SkesJRd4?_#k&1m9sh z>m8>7TeqNyMtR`^X#R>E=AU32f5vV8XRpwWIRLNJ$r2`8l%`)xX4=_`Ekal9rhj|}s}H3n)~3@@k<)a8ZL?af#NDvbu%-fB zVZAF9r7?vA8YR^a6!AMPuNj?I4WiE$ckCSu+?b_QfpPYnynwf+)bpU-x#oGnO0`w0 zt$UJpBe*Y!$7n*1Qp{fydOW%jov&{UtZ|_90WKPD(x~sLGmvHu;yCObnu8#bGqz1Y zuX+ZgN#r}SF@FWR;vEn|9~EQP*AQTnRDHDIAlZRlCgtZgf34|gB@3_O8e*SG z*iu7gL$c9%D}zRI@Mi~?^{%eC+O!1WWNMg8El_1hV!XcCSAq)umHsy-x->A?&5ey4 z$bbBi{0e&7k5MedT|%FcSsu>7nhfZmDf(<=?4&(%1W-(`GhoqO!pUMsi0x-MU$#PYQ3S~_J$s-9(S|DZ};I12IbyE$Li6{ z?U8n>^3@_>sn7h_BAx`ZMDQwNi?~63L`D0^8?^z5cL=DH2*tdz*(N9}jgnK}7=nU$ zfsQ~>Go*SN4Q#c(rag8Ag2kSrL~utw2MNdYKlq)l06F#5>32FIFS%QG$?dnz;c;7k z?|M5^LnUjcU~9W5Cc9g2i=-o^IlG0q^ZvDJP*@?*5weGU{j#ef+IzAYbVm>gQwo*+ zargs)NMTz+38Rr20p*78U^~F3C!5)(`NfMbpSutn9I{IbEoY9-X8nrp1xAP_hq6H| z)JZARdkCQiYzS~=6mB<(J=wqh4N9EA$HVn|`gz>*sK4V4MlMx?Go+1z(atK~&FCv;fMp3q>oPw62}KNf`}_mLVaUb_*af13WpL05-u_^AEHL$AeaV%aaoDUZYv`HyFqH zdQ8n$dS!jaIqjgf#~vKC+WU_Y1EXk3)zgRxdsFX1GsHg-UcXouEaJ8@8AuV}q6>R;B+R#GLcMNV_z_8Idv z*p=yMvrXf>&cXTuETP3H*v{?n)?6SmZ`;4Cf88TUBW_N6BLeEf*`CYoZlA<;wy5vS zKRmsog|jM&`Fr{{6TD|LvD0=h#!l;$o7F5_qi80p+8eY}oMG(7NdZ96+I~0xPLFDW z5)Y8nI&WogWOCuIvC>kpi}k1t3j=5yT!q=OQFX_WhSyqB5{LS6(DAs;ac4BsD*pKU z)6e|pcOU##%QNxRiyjFX*1#1?10Y}gVYBWKf%)y%UwK_CN{j@X>oAL7FM=^{dSXPE zt?+~VAsjU60bCJjRaKdlae@b3IAjdsP{Qr)=-vr;~;Yio2(%FWo-6}e+SZ%Ang zv3|j8`GI5!N`EyPZZ}49cm5}51htYy1!Y(e5QY;Vp87hrjKUlhsd*sg|IXc<~ALxhuL62c+*ulGdnKPe7UGBjyh= zt=`|1?*@TpLLwSprAiP~w@$FFkM$>uErs=!T5WIufHe&%Oi{+jW)oh;Vb<)kRtVQg z+Gp8UEWRK0F{R6^eg@@0sbn2va{LgCaCPFnQWLl$#oc|*(`jVj^=@q7@q&rPQfE<~ zxX9NSkl+J;HmLYFmc3FUtkgANCV9In)|u0qkvkS@+0zA9W3%U=%D$mL^=>RfYd9RV z`dT&$A}?gPz@9 z7ZjDEJ~a#FN3`+(`d8Y{KNB7Ki9(SGZY9`k>@LnfQ{_Z6yi|-9nnb^1^sJvH);`nh zv7JXL`k~pTKlWgFl4FjjZLDw8Kun?aDhA&97t7#>HI^=McOs(NH`ll7tFw61taf*K%=q}|D~Zd;{I$w1n1 zrtb6r`~d6t7%w}>Kdc2wo9#XOqOLTck9*yNZYQ4bTu~vW*)Ut6s3BX0uy% z;I}wvB)BXKLpD25bKAdm;D6 z%R)};BtmdJb^QFzISJ0N=vR_$Ev&=T9xldM$=b?WsDUmWdi|C9dPP64!{wwk8g)FU zPr}QN1kRz;rA}0yi zmi^cAlW)&I)Wb z=YWXXCMZ8W1k{oi|M$vwDe?5tP%ctlCRREf39xv^0~jiprbq$F*r~{>wvWy9I-7J6g$$s>N|Bd+3-asDOUQ>*w&*&D*5CVwD~fU>-Y!X!6lm74(W@{ zX}8Hj9_HFeayd~>OHYj_9EYY{)s2FI8V|8M2mB2 zu>}UHv!-@^XW~lwaOSZ9i_bW+|;z47iU3Vu&=&J_%7HP$EP-#_oGmsQNx^tR8|q>$4oy8`t1vNEb{%PB1m??$8$-8$X33wB@MD4W4_Bb< zjLXqa1rx2cFDZWhOjE+6z*_e_1_1hRvd<==Ike{GGIhNk$+&p#tQ6q(P(NLbh{ivi zexzIXciFv=vgkf6cCdAjd2k(+zz4@@PM3W+J3MDg+J*?1Ys6Bk+KjQXq6QVwoN^pe z@S_(PE{fMf;&v?lhpvsOCXI`og|5(BHmLBvp#lueF+GGYWd$B9`Svu*7)Z2qIyyBc;uXhFq|^2dLcO@F4|!?;f)7?)+O# zNrgHotLYD}`K9N?A--KbF@t{H8OfR%Vi=1ls3 zLkclBttW7f412*rZe^o{U5Ix~t(H*RiWb{cZ)+`R&D1e+A)mojcHN+-^9cK82Igm+ zg?xR=Qg?4Q&IbWls?=zat+9bhz@2*Vdr%2?=08X`u0&FH1%RG@y)$xOZny7~y^ob4 zQ6^&kl5>Qex%k8rw;b<*7BI6?ZbtnjL~e8&=wt@>;2(l?Lzb7F%nG4$>bC zv#gm+Q6Gy*c-*SNX0G}BZQ@qBGOL)IVII2P$*lBLANcm^-LPaVNljPa!qm>@&H4|0 zLMrg{)8OH|``~w{?_n=}bLy${Wu#lOc*!kTV#wYE_KI)n{gGQQ$HR^WNIWS6U zWmsFZI?sAep%ooNuQ+Vknfgf$H17(4!Q!AYyzVh3e3dIR8(aVle&ue3C$bSMKl z>~YcI4XU?w6D&lx_G%N0h!_@LV==Uh;R}1?f1?Rsbg%iB{*T^)St0V^smIGkvUQ_i zfzrbgM$>Aaoco5P9aT+|vI^KN^E9L>Zar~PVq&$>?s?%{ScYF;Lkkr_zMl09e}J{2%;WtSwI;S;?Dy=eNbB^C{W_??uwj5Pk&VBK$O zYErMBm~#s@l_@H3c|`r(i!VDr8H-MA;Akq<0WSpy2Rf!8<<=JC8DH~nLC?3~4$*6# z!@)VP59HVKPNv49LTu*)n0CIAh+OY?G74}Y4PQcDW^{_5X(jp{SEz{XIw!oumXGXN zbKBfOEcS!Em5;8FC(&D3IJx!qkvNVU$?{A)Xuu@zDUnJtp;V(9it!PBxRXRF4c>Y zI!|dZ_Dpol+irohlZ~7B#`MNO}3TiO3M3rXKl5jN9#y&J4WoBs4ki?l={5W(*T5r6|8s-E$lX`l-qM2K(wRF_I z;3-_oRYmXW{)xN%qgl)?HbuN6JEO2TBnkONdc_)(TLzA^<+cG6OYq| z!rn;%acYrM2rJgtnwkubvjVBEbq=%gs0gR0gYje3hx7j1!r+5;J<`4AR5NDNYcXyP z-9)o)E$CV|QgI(94vHkS?oc--x{ay}3J%LhTD>A0Nc#Zy(*e@hHhs?zTGe2?YdJg3 z*D^%pHfox~>)Y^PZ)%!|T)R*kd8*Ad}GiZcIE_@|*+J z;X(-*94ab^H2YIb-S@IPj|ja7)Z0?bN5>MGMRKf2k@4RUaOwJsZ`Z(omY4Ov(!;16>?rJhOLR!T7#EhoP4r5p#mQpo z&$zt+or6!R$Zi)U+R`HISi04bCUYNU)c42@m8(FXkVvWn_Pb4>19@X{})r`Udh^{5agx=!Z)of2DuDY5S*YJvSc5Af2 z0@+2ylI#1WqDQh`7F|D^I<>ZjSUTz!Wu=-C!!z%3rQQ-5`{>joV&!IIC3H41J90!e z!!miG6z$AM={^z+Pm3xCDi3#SH;RCcHd~85& zV2dh|u+>?m#zQh=>BUq%rCT7$gZ6c(3v4g=PyNnZILl?(uDiwBQ3!2U`j)D`h#LPHkeftTl&=ZnPZ0_B!WMiR7r)-7i3=RV|ZfG_no z&SC z{$K`&MbEpjuoHFB$EUtw(Ev~#A`p6;=cx$Pv=OXqd3i+26@H-^`|%%%f&Sy^8?h0d zcaXse_tQ%-*_=~fN-?20DGpDG;GBlO)$^@D2Zf80ZdiqYTkiUi0%(V@mu^k%B*1Qe z@of%iz4!(L^4TJ0guhmHM$=Xz2$8T8`hX#4FR$;T$xc=X;&D-e5n8}qVA7GRFoJI# z1uNr2{a9!KE{2Fw=lA074Au02cjsiKmw;daot> zC!-d;@VEL4SPM+8xZSm22>@wk-lE=Ve*5AD4dL;}FT__~yY-UpmiXhJj8CNuf3ZO` z6lWE)b~`&2>jCS_R`gMDGk5D`(}Ua#Ekm=qQ%`0e8Y+oHPcpG(Mgr!n1hI)TnQ#`q z*9gOZH~*7{bL*8yUz2Ju%#;-k+o`CRPaNY}0L0v9(g^wrIE=gS^}Gw0%N^ZvfU~1% zf$C@ICItIB+gPLFfzkJ9F543i;iAYFB*D7P{PtrPmNY~pA%TQK7C3K@-E9P`n~E!R zjdC-spTjo6@!$WGrip$96RhZ%#q#s5 zwWzzG4y22kcfqmR^`fj&uZno|M;L{6i26<8JOHMoW8tbJ{1!b8<7sA}&cD<5%;yGT ztBv5vJby`@l0qi?ofmWbulczyB<0*#UCa4 z1fxoW41Nx2Ud#!)uHaI0Dj=M-0aXxg&EmL|B;&sy1-U?T5vtYGOO4gqrrr)!c{Ige zBuJ$#$MN<1fB`V)eK$cX=;E+$(vbz*t0uX;wY9QdTJ=O8p^fCdyA?9A!{)o`O-XpU zS=LLXZd}^Iy#E*anOKRdE5?teO8P;6Ca&UqD*aKa&D3FrCxqUFtsNuU-3oJVjJZUE zCCZB&=q+l-8>L%sZ)`lT%QotjMzbX|u1$|6483T1vazwDTR<7ycOkb5LbJHT62R>r zQecoQf887t{O$Qyx&b!ex}%KM^ex9%+%CS&_B~60wmBjIv`&Ac1D!- zALb5`GeB4L66qafG4=8!BVp-JY8h=Unt0)^X;E=R*-fcmNK`BnX@g^F;&O&iA6mVG zrlcQ@k@PEqG)L6<@=o3bGnFp~XwazNiS3%7>n{tgYv|s!adLFSd9blNCW3>NF0fTA z9mpI!qv7uWEc5M)CIQkA;T8)9(z-16qJ_?uJdJvzv-n3ljvbg1-Pvbo%>s;Lu|j~5 z8P*Vqd3*jRt*X7|O2!4r4&L6ZwY0QbW$*oN!T$Ib;Q@!tq20{QDTw;BOWgGL<}|A zO*jPCv{5@JiM>-ZX{U&RKy)oy`hCY|{rLaP-!&wtMuGkI{pq)%S{8U!x7d)#&mR0< zPXf(_uaq6MG%pWOGe(p4`zB7b|}#@-%# zO;_w^Ol2@KY9#DFLh%|jCrKwm=_1xYMfBf1>^i~lbqK+x9+2AJT*p?CZb@j%B5bN@ zs4_^*T0NQ2(8qIcBGDo+XBDc_aYa9th0$?tVU#`cFZ}2KkP&C;w1>oyPyRc-ml@o! z3F;#R0bm@xd!5BKOO5iH&Y$`1=g2hx&eu3@o&fiIPct}yaW5?Mx2e`La_g_=&W%m? zcVo@)<1j%%5#VVem(@N7VQ?}st|9WxF{Q>F4~!Ab6Wo^^ivZ!SK4LD!p~g5t3{g)oLl#L`|P+*+dZuqI3+ zCuSTGzT~h?uGN-nEj_Js!cin~tu?%IVq>!Bi&LpJ%cX`_svG}1@UwU+%X_jomZ{MO z+Tc_)4W;#>2{Z?BO!S5|2ua3>uE6)-2|2ws>s{*~xiYny3FC}>>-7CpKk<%q+0vP% z^LqPn+f(gKROq`l&X$I2gij;AM#1=3m_&MUi#`JQxTp2ca$I^-tGf)&8Hx&A$)qyS zz)+H`BELq5oUp>y>A-B1HMW3yrwTP$hjOSIEG3cH;n5c6T*vz`Uf$Z&Fkd-)Uc2Tu zOvamYuY4dl;Wsq{)C~*{F*K`VU(hhMOdv}Uzc90*?SHbKAU8BIEo%je>8V-RKC%u@ z8pCsx_qamqldsMWj}r2nJD!H|pvFWu!mHgM`gg7F)ujel<7nUGqPL!WO#?T50GL_t zhBaaZx9{qfc8pBjRwdLlHV!m1%5?+G>TNWqzQCHC%rd60k+skX^Rm-Eght(6Jc8K< zj?leE_|@0jeMyX&QkLfg1}#Cu^Zk}6+&l9RqV4Wxb+z;`4c&6R)v9mkl`x&x%HDcn zZAU8RxlPTvkq!FRo%GyR#P$kq$LTmXWuRUeZp{!Yl~gya8-^s3{GS=XlMBu&|rv7r&xP zDlYEV054N&R(*EO{Q3?z`}_IFdd5qyy!3=#hX%XA!2D{fyh>GAr=Ms*95*nNR9pH6 zVPNl|cn-XZ8=5Ypnmchk?QW6S(~J&FUF zTAHVHcHaJr0g@Wa#)2DKZ=YQ9H;UkQ*iJ+;jm^#-;-}7Dlz^3Q>ZdE$_0tQlO8Y=+ zrFA*!kl&3Sd;rcREkOJp_c{(7Pny;0_1KKVOU)pc<8Xw~N?&B(jQ!FsI91z4nySG` zH8T+sFn}e|SBaPgMLp=W49O0>vrA+&Ht3P%Fm_mE-MX}F;ccC0o|&_NkA7)dgd+cf zh&O%-TE+@R3~?ygvy{E&DDiwvyj3_7@Kz!`Mk-7i<{#<}e*ECU@8{m|s&{nYVLi;? z_xbmon8Eh(kY-KsdD5f0($OMh;=(wvxQS6OY2}Snrq?wWQSz~fmbXcHx-!|=bfV;D z$(ya5V69_k#6F}i{iHO?fbvYn36qh@!D`#*7&8|KTH|dEElVP)D(|~gp*+ka)pZu? zi7X)^VJXyBdoQmuN1?2>Hr6#FK(N5g3bJzDh6n_3x|lM0W@nOxRZy>Q>HCXN7SZPU zC;GnWfFnPRTdkHZUWkPOJ+H>?4=tSR!I+|7Y{yAa8Ph$q!WVdl-<`gv`K%zpJ+m#e z>DCS;MXkceby2rnq7OJ`V+UqYPPHX5=kKN-ZxkN}#ciN(dW>unW$bhl6$Z>Ol zX#Bm*T{L*?J8?BeD`tGh5!A+BgRo+1#Wn1+yFdQF^N&JRa6kg-q>5JqDBFp71SapH zFYdrRxlf!bEIpVG!F;g@L~*o^#8l{S^QgvM+ae5Il`iQ|QY76hrJB1IProQ>sCYRN zoHajau1Vt}F>y{~O!aeHs^JaFIkB)yKxf5Q>5hnBFL`N0Xc994tf~Y7fNvS24l-UI z?bup*P3Wfu@YEfdV?WIP%nkghTlhcVvt$LEcgF#ogvd#McOB_w)7C3$VYU58c$)P) zbRQAy;32u-gi+149@B*N=Ua}ZO9v7c1KOH=aBRd(f*HiO?PhIl_os)02^H38Dcim7 zxl#zQVE77xM~k5czK`7Q)}pve&t#kgr}j*7toy|Og**6Xz9#7e%?1Aj6J+7Cm^0p= zl&Zt=jDK+Ed?SQHEwBxn;h;e}k$7>*2Qn@%z_fNkp;;W%x@j|SY_7nbf zb%?e$W7Cy&^2MfY^!|~@P|#DD5V^@^N1y3)afC$jlx^ZVjV?uN6H#85$`F+w=^^%vCwY=cW&L-`L-erA@XSff=ar(7xOnglb1*Hkm3}U9` z;mlz~miUSI4O;NH=ZA-xAJECT%MYq|Z{QZp3XoI}B;fxaIY?xqY5Xjnw7@G&z)el9 zLG4Vd$BQg<0xzQ9F^KmF8r`8d=_`E?CfMyVd-1V_zC^x?j)c~l*t|9iHL<bpk2YZN{oB*;e85kiedXekW_Eq(R`6@LZfPBW z$GRUx-YFqrb$>q75e;f5kLZJqc|?EZ*aG@*{(X~! zPTmaTMH6}TaNr0XRW40bNcCn-?FlAMuQMaDfB9+x&EQ=?%9qx_p!L^ zrROzy!_ljHVG=i%6OUV5;q=n97wKKYvuFt$_4!}&}Vl!3rt}CqBf1SM&C+hw07#oI#7pV6`fH#$aYNm@o@Oo+8(&B zm&M)C2 zFEKoD&W-lXew1|?>&P7~nMEp3B80jqBL=HZ%wP9SrgF4MTcP0AcJ2B?30%1FGuF>v z_$!G34$84!aidml)Z<i+;A$uf{N)bTqH{a1rj{ z1ZPWy!C9c?bO*u$zU2v$a&N!iz85btcRL#2>@ZhEzG;uTqkY#~Z2X~{f>h@1WvTcA zy}iPTsRy;zmbEbJwbF8JozhX=*8tH$iprrwBwd)+u)jix8;XhRh2VOhJ0u}`}{ zyhsiEm9tMis^{;G9O8o)bBqU~d1QC?N7A(zn`p9$LrqggFR6Ilbk++RcV3Q0uD<2Z zT=d=#iIOhvVuk{CUWD|>hCwQ!L6T`-Z9DD}ZX5nze}fVq*sA6ZPnIxHGY$t9+KxS6 z&|6Dqv>%|Y*vMiZlVf*BkabQOjoI=L*sPg^oO$Vt-v##&m(4dED6Y(SHxrb99RZy{ zZBj?5U-(p~C+IbE3}ZkBc#zeHg<$uf(wP4>llC*MOX=BGZ6)53Xc(ykos)z|?t?=7 zz!D1@@A7tS9j>@|PU%wV>i@RTa_kk%^3=`s;cY0>c5x~{Ecx1)R~;_{>V6wc zQLhO2Y1XkLCqN8qWtwZ2ce4WX-Ce?LZfK-%)Sn1P-8uc?$;BM~>=gW0Cr)#7?(ek6 zo_YFL`mtGRY6|R-jy2(vJIyF|BF0*8^m@?MFmGOEP3Ix_(~ca0*cz6NO+}CDbo`{V zjX*2tf3q0Rzc7FAmuX?BqnTheXkoCWemK3uRDB!)Ig6>e<*?sf^=sdSm+TJT_$p-8 zbg`9CTM$8ySm$iW>||0wU;uxoBZ*C6<*?rbbRB;V;6z7qX8{vq+Zm(FJ-%X=I`m=o z-|zVE+ovCEiIzRHQFCQQOI)oAwe!$v>Jtts7%go)yk2bQ&mBiW9q2>eYOmvoQVLa# zh%<{d&5i)iz|Z1OvUp9+?{#>xOV~$pf{p2(Rmv7Oxf$pK{k_sCZRo8C_u~Fsa$gfA zMGD~Sn>I0$S=Rl^W{=Om7i=e1Xs zjpszG+u6|45lFxc&`aH@ly!5Xa2o|$56?Sc6Jw0nPF47LbRz@lMYKz7tr^^=X7yhN ze9#jh*tM_d*~1-|C#-qQdENOChNgiVw(uJ^bcZgO5JshUqNu8UXvvOP^U@K3G%x;P z&5IiGSgW#?@bz#Mw2>?hXR+?qP|L%NC%|@UgyC;R#WiB8Cn4~k{rHD@)?U1}iKn~n z|3Wx7sa8EEDjLvKcN)#Dg1{f$L}r&T@~jXwl5j+8u-} z18dmpSs|9jFzDT|dRkpSzy0WI=U&rv8ow@9n;Yi9ZBy#o)i!gB8Sl(jQ=p~00OWPI zD$&wGhUxU)vbtb4?HmnVE~tb>FIx4I%Tfx|+~6G2EB*KhEb^b9exof^-PaUsZFos7 z64>)9oZit`qNpK>U*U`b#>AN3=aDOXnqh=h+YMYcQ&VJ=!HFGn|%{`7sl!P@#IM>$UpP7IF{kFIeNg0C4m zP{CFUM!r5ZU#|YjG+GP`>uSBct}V{Hv!mX_)RZ`E^=}-uy+h7pXO?`b0KZDUXI3&M z@$FB#Fo?T`x8SLP@R>t@0Z=Irf$E z?m#y?9u9pa2pthnOko8LhbL2jqEDPwr23>Joh3sf+3!t3MSm_iPXY#lU5_f4wwk(A zzk?!nT{Elb_hzmH>JsK1`i^SWhy>;ElpEGxNDBd)ArYQLq+@cuABTu_4M_N|xYux! zg{96ueKxDHhB^X}?U6S%OQ)*g)86M)HPWC@b({Xtu*;|ywqIN-r8z99J?_&cA27wLADXfEWh3cBxTEw3F4J>Sr-d?U)MI!g6EdAk^Os@aoXq8muuf=HX5>|8BylW4s5F7o5&C)@++| zoOIxlh}R&mn))8!lkU2v4DP_ zxp`g#YmNy0UsC^fiEZ6Ll^o=H4$JyARyC2$`I{Pw!Kv*cPrcxb(;LO4P;<7uRmt}q z`n9%MYCNwLm;nu~Xmo3lSd6F9clRr7c%tGGglfH|J+z8Bq;uo4SCP@wL4pR|J^CZ< zVVxK#DdS5iVq6*}vxoj~XG-Ol9zTH4gE)vDY1kWoyVR`<^Yg9Kd-C}z(Vt;vC>~C4}`}2Dm-0M%g{zORM2H_1fzt?Bu^%Uwbzs3AslA8DR>f}F7 z65M@2Qa+i#g~#^R{H|7Ta_9%5osN1sfyO)q7+vv5W(==u4aIt}7VJnvYi_l1Y{9uQYU~G$wmwzC%oBnTVHkM~c7+F0n|q#692&i?6Me znr`!ApdGqxHadZ`Mn)1P9vwpk7nF{gd`JP%1rcdXUf~`6m26Dg_KK7bTZyxTAfLqO znKKuh?KAoApK2mgq2Kj=cBj4!fdAu%vY9}s%s~r&stZqC@QW=q^26Ds^BN`ysxRkX zN&&50(<}0vR7I|7?@zt21QIbwMNblu-Xz{i-^Y75mFjC-nmDHdVm#{O6zdhxD`4w1 zKw|_Nr`B?MXP&n|wuoYg4#Q^~XF-E<^t zAOzW>iW31-#2bN&2@oqB;yFLxK?d@Uq8LDqY1#n8e(bbBN7ID z*FS|(h{-0(nr<`#59anx}5Ae@P8G44jNsO|Y=V83)&Uv~lsZdU{m>${p_?2URW zzJASgSByKgTrpZatrtCvd1%*NBs<$iMp2aqy=nzrO56q>)q+(+58^4GldOn^;#{*h zSr+o#8Co12uR0L<42Pt)&&^3_^yjuqjjA?&6W&ns+KZuTpNzWh#UcfEU};Rq!1l^Y z1Sn9oV|;wQs|Ejw6YA=;>%Tbtlb%+X(Z~HRskor3u5oRsA^H z%T6fNYPPS7=w34QboKP$Rex7@X->k$+oV0EoxudM-tXDY&Fbsu%r$1oS>D(}l%=2y z%Pv<=x-fnMI`tDD5CdApfH9WhZI11*&g`#6!wWDCvTBH~>q#c-svSdCh@{KVeYfhf?eB(*! zMmSm=S?wbSW>QM&xaEFlb>&(na7V*~m>sj(rylnqWQFWz64<13tKN$z?0X)BosykC z1eM&i*fdyS{vTVQ6ETlBEG)`jej}rKoA79dC zP?S9t_QxB9roWWw2dT2zhr^@tPw|;u+kGfl$p!*s9{N!)E8VYDS89ROHJxS$IpE(HE}qrum^N(h45L(xtuS?e$X0pFPAtu-<()mhP1)AAt3iu=+KOuD zaa;P2r1l_D z{ddMK^)6`g`={?pSYUf6z~j1cs|M8oOBwG&LiMh(C{up%Rfqr~Ph&sw&@YMBuOH%N zwW<|U&YGr;((}tz>CM%?lY~F((UJ{ZUtJAvT;EwaNtB*j(zcjoMOUWEvdra1+j@J) zI}VR3goyJB_l7f4kxHLBdqrj|=mqAtFKu{@N2#*%`XX2OuYuUsGbOJ5ls@-G#a9N3 zXN&rUHnW9@Cq3KAAS*LCL6iJl$9dMTP(5fqprnkDjInIAB(HdZ(?x?OJ(}jYY;1kO zyb++=UP%O#-bNeu!dI?g-kv0_O6~)})_a?hz?4NuA`Z3R9rbO0ND2JnHoQJ{@lbMsDO4PjuD(IiGD%bkWHYfmvtH1;0qMx# zJGA_Xq>=LDCv;JxPJyB}b_%hZpRT{Q42>~=++^Kp5 zB$?61B^Cu`kLef91sP@bot=V`b{q<1QA{@89}b;vuZ(KIC-1jK+gwk^+@Q91gos%0 z^_U!sp%~5`h=>Fa#7E6yKcy&MXP*{_Ft&2%z77*SD zvKt~|l@;C1Lb!*%YYc;3Su~E&jqEh?_da|6>?3;b{=qC3(`Z*}S=G@ptrM~kpe?<} zEwFHq$4Rg@GdEVnhee~MH~#4%rAQuXCXf<4^T=fbegCiPBR4o zu!t7{Vyr-o%%UTwZ^k}zDgz#%Ynpw4XdYjy(TX%uyzp^N_cQgdRcbb`)EkxmJwH#4 zpVUHk`Tz2>#JCzXyz8F$Y-A+7e~4U8@5oL(t^3kbk6nlzz$}`9>xvOE+T7F>f(xDv z$ofb~k$8}JnS8S;hpq{2GbKY#dk~{>Kg*VkKqVlwTIME&ofvHKD$F>yuGRH*iak0v zvspGm%tc|9W_f=5c_=@0dYbue_Ip=6cS&XaiKl_)@OSovQsdA@lGf zcYqm=JKA#{IoHu<*SfOu${LX!FifQfh>g4XPlUa`KmDliOf-)qwGoHuzQd#do=s_< zvTys!M`JLgdnwAC6(sJ#jMsb4(dtq-kNnXb8<9fug! zah6{jc3p+ztoW{bJqI?&6ix}EMfer^iCCN#d5{>h#pt*){ygCPkNY%+hNRNu{W+6g zoM3>8XnvQ7)im|!aA=!+Hz&*+i7*%I5+rh~5-jVe?D5qC_mbpRGgEJF)%2D(d*p<( zGU^eU_`0)6VO|l)qjQ$UJ6$27>MR`0TCym|?ndmIoyiXH1upPzVB_W!Me zGQu=De+7cmp zk^gfAX)%L>+(^oAdgNPna$wo+P|Y9{+WyVd!Y(y1)i-aw_Ne9~Ej&?mO<4Vi9;}pQ zxj|(r+ft&M1Q2D|o@t-yJAh7#a<0%~7~k+hhLr#ZNDp3!Oz;Vu-JgG?slV`y-mP5m zB8o{^szBWWkTHN@QBsV~h?p_f0iMmG1%+<-^7%*gqF5$9z{FiQRobI|fh3YNBFX?o zyJ>h5{*(*+SQ7?7abyKbF;->>Om37a_s5+v_Jb}sroNd7F8L$9v$4*3dNPr*9ukVv z^_Tk-E;oV_2^kS8NYmUV!c=^=wx#z0*YF`Y&&&aD;o4ecU61R|pa;qZ1Bt%$%R_;~ zR4}FmdCrwzv%b=j)KcXAiJ#9!@0UfuqzIenzq%g0aaKsrP7JRH*QVnJuVozF+lRRI zMX5PFEm#fGmJ7>)RKZ~Ao<`~?Xt5Vfmjz|8SoE>mJGP=Z$-v%lNC90tit^0I0vj9$ z9gC9U zwU4MZ&1k66U_~Z|%k)UnsCGssDV5lm(`<(LaID3pY~7qAoNLreIw!I{ocg_XcTWD- zg|(IA3iF>OM8_8D(+XV9 z6IY%ewMYe-YQxA8PttOxQuCUpxEr)Rn345bX4gio$oVsWqq$1v&!t`5Q}LM!VraF-v8&77bOCgx`Jdv zHzc%>`{DHn)?n5G?tpe&@UmZjKW-@{J|jmi;*{&EG|8H2@*^0Z2WK7z5hTO|F6 zn0Q1p(Mgij(Bokqcy*L9alN&m{JM)1_7?mzh8z1+`uSU}p}DyIU5?PPCUOC@Nxu7k z+eH3_-_(3nH!s&3^-c2v;_Vh?PXW!4jg?MN4II1bc`t+@6Wpj~#bcMNjoOOW8?xyK z1NYMKwV~v}#f78Y4w?slh>g{(ZB(>PSF03vwi;d_y_WUH@Gw=~sggR^`r38-!*H*+ z=t>o*=kd@{z?{2oVsdl_InSabktnI|_t<0Sv`=P4TVn>`IFD)D>Fryont~6Ou+k>V zxp1(zvRd;}_ZsC2N8y?~e<{N7?{stH>`4BQ9=%-y6sf;bA!rcNsRvas%Z=V8Vh#qG z?)Gnl;$4u!m=`5c?{cK?45{~YFBj3b>&zY1bJ=WP^mPVVg%$}DdZXvm-p$LKx+NhC zTE&vP$}ECahL67_!@21M#ZB65L^I37k|uw&;}J17?(w0QKY?R_sXaC>0y%U2O$w&& z%I)0Q*KTP9#;9?ui6}h874oZacIlVfmQ5i*L1aAueG5(+rykM}E6}|ShVc65cBg|u?iBlK!Oi<#5?oP_2;yF7NyifII!xQM*Yyz7sfr#@W5Z)nZNho z_bFWTerSFs=g5&gH)jbkMv_Sv6n6Whw9rA1{CV)?zwI|#(O-Gs*pkfp?Q_Rk&lewm zTtDv5+)5)GnH@}otF&Gz`k`SHHX%GK1%bqKK1IRO6sO+k_w-ey{>YttgGq-`xV>KU zsvJ8&2KFEA+~nEy#tX5~)38>Uy^e?9l8yW(Trn_u=!w{|QNd7cG+^P;W}#Wwted>$ zvFnwsa!DfteL?6+pHd~0LsM|tVJOz5J%09#?nzYy#a`e8DC5#AP{p?E(2XDO(TV6? z{i#g0+x=`D;f0+mfdC;>gL_k&Zj2KoiL|`J>7M{a>Q*#ZfBJ$xls1+tPO3nyu@N3< z-Os+LF;!C;m>#w$r_{q5MA|gK-massECE0@g`)z@p*YYD&Dk9}C(yaGuj_r7mS$O3 z@c|Hee6rR+T2jjq(8A4CAf5jpugAM}bITr>+GNL8w8&Bo6%oH_({`$VJoN3(iX=&i zhthcXWgluU!fW6PzF-;d=t}09DpI{3OJrby&-J+|D(r_vX$#1=xyx5tsb?WsFHx%C zF7(#+vX=riw;L;^ay1Tin}DQBCVWE!iWVe#tz`)80-N2;1ASL)X5 z^^S+1ZWyFVBEb5s5O35&_}FN>io`}Tqak_^knNedGq`;;)LokUDBtJ@XcftvFS_Yu z+Qp&Pekp*6mH=yKBG#pAOT0#NgrDQC!D5qJLGl}VBCiI^k{f{ZtXr>Y`b$@ugw$xQ zs5PUQXgnLWJ$_sC)TJDcqDdV%jx3`)%ushqXqYkdo4%7g8~Lj<&grw#JJjFT{OsF> zY~9__*^mG5;CJ!rq>=K{zAoq~iXEah3i!_B+DByK4!j#IXXtC1RjQII%jFp2G7Thh zh6I+9sn$bDqML$}coBPAbU~02nuWvlt8zh!XXU7RC|7|c)k86vFwknQBe6wj3ZX}0 z3J^hdufTYq36c+HQFPQOZMva}ex;?!!0kGVS$fC-Q$5JzOW==tjYAz2d2Zv0$2~S= zjW@q14Eik`CPj9M|DL|Du~`~%kGK`Kmqw!)q;$mot4%LzN)gMqPCt-fun~HxkB27I z;xXW}uoo1O;o0BJKgHF2AA%`bG|s=)gTS-=f9$>6lUrAo<~M$D*nV_4#*VRH+*#@( zQ$>*u+Yb(wL^TqL%!HUX0Z3j-Vyln)j!-APfU zyC{jeO{Ac|$}j${eekWl$(kO!s;k4Eh|MZei3D)YK6`(A?RELqTIG%PhNoOoDW+c6 zdL!(5zTQlNi+>I$D`6Qo2|h>u#0PpwWL!M}M{zHjlXPqG&_)af5WRvx?-Ok{epd@< zw7=!9$J~%|AM^JujkMLIB)EVP?wu6U#7Wqu?|qEmQI3*I_EDW$Yc#eW_hU0j-~RY( zrtPiShZ;6g)w)Dli@4Dy#qn>dk_#|%4w)&Vsh{1wo$}gmcH4xdbpo`yo?uwoE|u3RS|eOFbl(yR zZ9iH)0m;(?zcd(fe zA*i@)@`fOpT~BQ8T)3NjCG{ z^4zHfKWau~$aub2P!wsIaS$aOE=wA^550H1&F+uC(N-<8QKGS(tYuN4L@3$Vx>Q;I zTX|i6KTpd);iuzsBIMpg*nFcWSZhwUp|EyuNoRwyl=i?)zFCN>?}VRV@8G+_yATyd z8kU5{&>x>90Wd0e-?Rf-km5MJMLbff$ z29HOpEY($4HXW~d>=>He{pD-UsYXjDSw%NBJ+&wbtKab5=`I?vA{5{1pbnqX(>+I65&zTd`JSvu9JM<-dgwa z5_>?L9GahdoYUOepO7E^S5CwKZT6NH z|LiZ%NJ?DU)t;xC;%?Jxn{+3z4!sx{Q)j#h%{I!=#2bH}Ui_2i;!_gykNHlxKtc^Y z-2#fq=*Zadd0q4Buq4gLej zwg8G-qMOWN@EK|5U%PWHZU$&loS)T%V^aJEWW?=14>t$)zSerB(O8S9IMBxM71daF`d{y^OzMFlnlmaB4l^S@{ zR6KqTS+H6+oC%fc>ZbNF;l(LEQDQv{DxOhH0w-nk4nO4__Z8A&18Gn3LLbhg zG$E@Ckw)VV!inIuUgL){60bX!Q6sFmpsobzAl4ly5dn1P_j;$fKXpC`F3hmDAXbkQ zynCBO6?LuV0W29U0J= zjc4fKeCf4?(+g+xpk(PEL)UZN=*}~*XeHO6TC z&Fni)7rjlXC(;r%nHP~^8Xx>m_D<7h?8?wRH~p4NN8@cp*sayQnkSD#8C5GbB9w%q zxhMmbbi$y2T=imQ{i;@ecK6Pmg$vKd$F*2GA^DMKRoImwvAHj2mq6Xf-97VE+*60Xj`A^29&@hVA&jp+>)d9D9sJ(Ez>sAaTzgSMM-8qydP1>^`0qA-D zd6R|-M{|N#R_7g(tl&Xgi8MWx`8X(Y={wd4ZLV#-bnU|F__#r}r)#nuaC*eK`zKhg z0*uw1z%{9lSY8OU7{D8CRbo3q*lN~Lr<}<+p6(v+cVZkD{8-^W_v)>Og>;z+NIv`V z*Fpi&aWZ6aKWQ+AEqe3C$E{UXDaWAYfUWmk%K15%;*)d;{qVZA7xxnq_8Kk0qAj(G z_TeP^>W)_LdQ45=Q2tuCLWw~p$E`^r#rWdX`SW^#^4m%*v~tN0K_o}lr^&kISoUCi zatQcHzc_d@gC@h|N}YCJ=n1@Y(i-~yLD!vQ78SPw7<42;KmDFADECg@nY|h1 z0B06!I*kVSyuOnbo^O3cWt+EQ!`zqH<~v3tVVLd$duR4>$nE?NSDZE2@pFZqURI<= z^GB2?Ik1K;p9y-61dl>7< zoj-T(HO(|0GTQaAhlF0-ao2V8LYD2u+Z{Gte=ub=XomVC<=fn|NFF5>(8f-}vHbqf zDNrA-b@q~YE&a(GClCHhJTF!&z#0^$IsULJO~}W^ATSMgE~0=s_W~AZp<%aPRokh1tF*o?l$lQ0 z?j&L0Q28_3j}%-;R9vDdUH7@rMHR=m?!cUVG<(apo6%gXi^QAM_q2b#Ln4pSn>uyx z)SbmMnhxVD4o5al*I~=_sey#a1nD-W{?>i;mR4D-?QA!6(rr5X?;sv-hein2B+)lJ zw#4AHpahls2nbTq8#J9?NMJFPiX~It(lKYz#-b;B!e^fpW&9-PPA~jiw}r>rOzJlC z?FP%^`81~%UpgfOom#xM7?+rM1O0-8i8}6EEW?NihC4MOp-Hr8B>Zy>&P42AyLLwF zO%q>?)+oZeer^?*dOf}L~US`3j-7TT|qq)WRrsiQ|xpXtm17{qHw%)6+Ru67fd_SDi6FkDN4Y5WXz##*}t08(Afb06$`DCZC**mwQj9= zy?aYPUg~=G%$0=E*NQ6Z-lWwW?CFv4Gr8lV-)*})ZIZZv2BfU%`n^u#pdSrf^@9Zc zSYl*p*E2kj2^Oj}{a(0a`Jz@vRo~r2b*OA^>0^9zl)UrIGe6Zkq(z=`KS}V^&h@aE z(`B|Lw@kuY*ktD0*92egn9^%0)ebshKeWY3*%RY2ApqTVsa7gq)Gok)rKYS^tc!4I z;Q&cXON=>#35rA&ZMj=8sI;@LH|ek#%WIMq@NR4x|MJhypMTk%)2jpv{RHHs?}k#m zNlJccXSwPkY0V?DJHOCclkngc1B(5pvrqI6+5KLSdB4+h+6%I$)RmGi?I>X3f zgj}P?2UmL-DhW*j!&?#*{S8i+CX#gg7&+qw^ap*A%|QEj?I^3idDJYb_Ug-*4=8e= zMO+?Eks7Q+%bkVj3XSUKPDOKU7A&o?l0)L{6KkuiwnJV7x@-gk!;FLyjxSQ75Nf)V zDM0ve1p$QCl*0oTuEvWR`5{sv#9MTg)!r0WB*%%4i|3!yyuX0QS0CGGTAC8UVSQ}q z%rW{~d%S{PqrcSsw@E~RK!`=4xxv#El}e)FcwV$~>y<676g8B?%IRC2?%gDv4e|-j zW__=<=UTT(*LGwzk_BUa7(L9X3kyG!K^4kY8T239_MGyE?jx>K*jod5>HL}VXCCvz zQR)-P{4NsON=SaDNrKA+UFpq&Fm91xlSII|A>fGfhk}!J(nS5K`9j5CfRFOADrq?HoM;D?T1ibW(=AvKXk+ z0`Ao6zDqEk(j*~NUX8U<6cvwCP3;|m9>HIs&-A9$5{i*k_QR)~wt)!{F-Vh7G6ggz zriC^4nM3VtOdBdm?U5Ys2Ci*KD?_7)A2i!FSC?h({jn7KIITxrFV{w{p#9N|;n+@O z+N9|x1%k6)iIacC2^z20h}FQj-i4>)l%Sv`B?}8)D4X1pqr4FgiEfhv%|N;Als8K_ zgD#OsqSw)dsV5a1fzkiOPGO&O^KD`CcY6W`4in=t&KtIScm|ma2LOk`*R!{^#5iwl z(@7L{Cpb@*Pyt*Oetz-1M!9sksvkF2%9;pjAT^!zX8v?|jpV)Rllv2Q2Hs4X?#}Ju zSna1fFre$A`<7~?7qA~jEbWdae3EJusTU!W|OC+S#>;f0!6zh&Av2;^|_~iKl`~;ZfJgi!=s`T*t;$k8Ipoyc4f#j9cxaa%Ec= zrNFwUH>0ZJVdmgJI!-jBw`QO5Aot+YhbHq{UNfdSi0xl5$Fki#HlJU?dhidv`y(9` zf~)HzEda(dwEq}|WQ{+cy{XmOZSEe^0Y)$3q@zx`lCXFumT_OMyXaU(4RIii(5)y^ zr01pTneL1@1@ST0j&1GCAXJycDz6{dL0kd z?&5adi?!>S@vmp^vuJOf+}A|3hhAH)7i+0>Q0qqwj_jKdU@>Qu76f{L(pc)Dj*yDl1>7aiHX5#$w+?boq!ZmvjSgN=Z@8GhVRM-B^$tdM$BT)3Q^w#Fzw`BL|=RtIzgMy=Iv%BIOM z{VA2e+FQ|-+Ze0bLih&;&?-YYqA1>RE?A9bWK%B{ZEr(SH>1yz;V}IoK&TJ_6~&LO z^88PnEW&jk3lw(H*&!Hj7rZu)*sEX99tnU83m0@~tPS+j)>J=TaV2U0*u84;QMAB_ zFY^IwAV9gf>*NexagI?CdFb02bF@jl25lIW#-pP z)dQS9{ZqY5RFxHdaoUNUsj((lYNw||#10vnmQby41$RuhRRt62YQd2RDyV)35-GRO z?t`VQuM$&(aBZMT9{q>EJMrq6Xb#(n+G|;q31On&y~R|sm=>G1j=j{p~_#;liKB2R*E9+ z1mQJnt7Dhd=E6%gN@~fFyghb=&nVUGEyKOysbk(a1r&kMvd;amb-(SeXZ%mEK;k#i z&j44@>H#Dfk>;cQjIjL!9K!EwBVYCWz+W6BHsUBXy{C5Ky6r$UoCr?Vr!q+VW!v#+ z#&ZuvCz)ij^$vaevb51lWix8El|1FWVp-~~G+*hk>%M$rP7?;!q?CFD$ag!wN%7bW za7hO3SS!+vH^vqn_9Ue*mDa1Y+Q4m+>?g8-f?FLIyC88eBpIgbVhU}~xZ#?3xS@o0 zH69M5UORbrX??4Fu`XCQI;{iB6gPWGsxw6sk?3lE_htC`-SF?Hk}^!@6wf6pO(*U4 z`hZ%?A6Y>I(w}|{!}%i?m+REsBW** z-Aq&W^J}N|)t^6q;n{Qg{q~W!;MsB3^A+#c_sSi8us5FS?7xu|tt7^fG?^p1R5ZfV zy{xyjTfv-GLr2c+?s4;`rXHRaEn+qGs}$lnp#8Y^dY}{*Qbdo)%32mes6?w-2;TRu z*X1AXT0s*_nbBaj#x$GJ^Q`o4>5B){J$+0tP>c>y!>{zjlsZ1Av6@+O^08joi5*p^ zn`2kZ@uV>sfr#hA4!M`|5AQ-#nRE5@sJPB7!I%@rcjL-ddB+>RJmbltS2o;fN$9JXRT$RY9Mf3+YK(KW%PuUq%l6337R0D;I(??}2d25rw)+Rc~L zX*mB`-`*OB_S6l3?tXuAl*FuzSYlg35f{^%c<={ zNxhfzpM_$k)jYLUuE`_3TQ{NcIAV^+R%bL01HH>y2ncTaj8Hp1 z_>C5Z-X<}`vU1RMcH3smt=owTqU$f@eaoJHgHf<{ni`RFe?@~U7AkB5|r=J-9d3m=rwLs7wzlT$UPL*71SX)l1O(Z&QItU zuBVTHdSzRCnoO9Xq`L09ANNUg{;^-{zLaD@FvJIH-(2Z6W6giKs0)ia>9Dq`a7g_Y z*DZoG^v!96l-VZz;RLHiHz1X)R&{+n^cy#8yXiI2pM6?bZcu+Esci(V_wAI^+mvfU zX>v=k)wv#r-6%}(2u9c$plzJH@cXEx#UIq-u zk%?-3jH-dXrXHQ6mVU-?%?Nt??*iK&=kxosZ}p|}TB%moi|Qq~$dch~^A0wpnP@LyjeT=Txb}Evw#YnSB>Sf};+Sv@TO5aQy-CHq7=Y^e( z?O)Ev#ism{)`|aQ|7zsuX5_&I3)gFvEuQxD@TX6CcFxbA*Wy-s*y(T+<(1P2&&bksBYmuYF1bK|zjyu1-)4qlH(d6c;`YJpr+BLdKj5eYczGU^Gp7SKYi-oFTel z@Iq5~T@zKND%t_%l{sZtXHw$UfsUTFYbZ%8|W=)RiWMY%oexBY#> zySfAUz38x7SKFh%(le0>eMWEa^Pl~hUK|ap^(|e%ijysS>~it!%TZP%5Mu3j2=&eA zytFhycvVqfv@!s(;my|GRZ7I@er)e?_us_F{Da7)zn^^l{XZSCHt)HpUo3a6xghqu zW~_@JBF6Bku!^pE-I^~%6i`B_QT1ktsF-cy{PAr}@kcW+8*K(j;e*y$1v-r__gPVi zP@B$bpIE)tmYZ15#|-z%#0#|4tFGVtV%0r$*Piom-(X1)88vUZqwQ8D^7sA*C!QM<-uM?pBv7`sTrq&VTSk^_s0{d?8ma zOpa2}%pcn9wD!~(?2=<#`YYnHoRPElqaZ>B++3}!Z>_q0VSU9cKx4c-OQg`Qs8{Qk z-M3aq(KzYbDaVv@S?RyR^-!syzj*orgHiXO(}W9dq%O<BX(<9;oj;m!JO`m*tGyq`uu;^Ukl5mAvTdT9Ub1Y3Tj9Klqa=GT zF5|m6t=~oGesJ==u0g<48-mR5b*PGOR=UAI=o#g%y<4x)hah@u%`>_IQ!Sf9> zPWLjtNOf5&7}7*Z=F@AOrUar}NeKtjx*;&=1Lk(HG%c+sx?ZS)n+CZk>BvUAY~?I; zSG#A)1RYdl~|q?c<_yc)Z&S)C-As zGZ%cz$AY>=MvEY4xS+P=J?yD(1u0|Rb?`lN6AkIyYj@7Yx2=r>A$LL)2BST z<}nxH92eO++KhzWj3ElrZ>cN>n zOwTQxJ$ueM|JYn_!KAc*y^sL&ZDl9jEmIqYHfBO7RkuRP778!9na%Og9Y!VrfVE01 z-8hmkP{JIkH`roWiH=gVNQtxFpzlP&bub43EXfyysD)???JREzv{$|1!Ja7w93PH{ z5DOH9ZZ{9ya_9_h!@7}tSDEcZi>RXnY0fE>Z-mO5O;`L08mCEfl}p?4876d%alv45 zz8#uPM8F0I&xg9SB_~)zl?c6^>SGbx085u6=!j!OgYI^v-!ow zjm;Q+&1g%({p^t+)IjCxE2COvlF}A=c62>JP<=f6So8j~Kh?UNd-i$#Sm}8cZYs0+ zHeK6VUyjin+;mzLU~@@7cP3*mSjCjM!NDYzyb8Y&;I*xZu#OWnp-Un6C38*NBj)5w zZpA)gPi6E3H4wR-ipu{_%m(3*5U}You?||1uBUB`7WC;&PoNH`#v|=eHt?v69+IZH zm~I6n#XUTlwk7Mss|72qcw)lbB6vsWf+SA7J|O^l0)zUzfYFUGRP+4&8a}j>f}zdrVzv%{G2R3INBAx*HOGU-zU2l-DBL zPG6{otW`}iB4VqiXEB*@$ndn#96=#a<{Oz`ASx0^HJQ&*Cg6i@bz(>j$4bo=l^dPr zG)@JBT1Im>{VC0#sfO+dxNzg>4pYwNh0}{a*H|rFWyQ_}5LgIt8AFh=NF&kvr(b$1 zP7*f{u20RWRovNKUDvfSqk48AY8VIf6;HB2NN*xVBOy}0jhgh<5AXc=Yp5k}oqYS_ zU+KD&SI0*y+w1YhtzI{DCh;rxTWQNa<<#i=VJ+Y6#)-q?qjW{+&Dl#%Ahr*7XeDe1Q+U47}!;(5&= z19*ofi4Ws3ix)zA-CaSk=z6WWnBCny^0+dqJJ(n=7Z&P8@rW;%AsM(`f|W+=!duDf z3QGPkoufgB$++A%ohV++I5)1UTfQo4D@L$BU~WaEmQJw1i@^bnVkG6OAQQ6uR_u!0 z@{6lL8OGnR%xD1&5~Uh-t%_;k zI&atAMiq_Z{~hGLfEA0rJ(_*4r(9}O>Pe-^Hbi|-!m}b6X6(l0 zgx%2_J{Ci{3;aIN2kOU!?+KQ9D*4V)Yjb!r!UZ`;JiXr(tX}qJvapz=SYe3YVU!Yj zJb7K{z?0x~Bersqk7>BJ+D)28{I+J?=%t%a$IbpI^^d(6X=2zEU#i?`n5Zz3s)5^Ie1(vT}TRb1<3WdtRkth|r zR4!^It|WA_hgOqU({KJSRM)hA4h~kMMw+wr9nAnc_^Tg(qo=lP!}Vn9-#>Y1SG`XIxfUslXhmh!uHzLIz4)Q z!q1(3`iz8XdF-wgg39`BbDVHh_PS%wA)sshU!2pzp{ToMP~<}CVIj~QxG8m-)6tWY zOzDI^iodke@h7m2QzXo3naT%jY~RY(Ws&F8^O)lR-xto|R8pT%K#{0)a zn3L~DyUSY07*Q_zQrYUC%zWqC(SSoU5->~&vZR?QUy0LbIlXar8SKau#78sxZT!r^ z(3rL{%3Jz8CbSgq69(3d{L3-~t06W*4LUp>@M>rR>9-q{qyw#$ykJNB+wikrV3+H^5U{n`Xt%hT`|Gmz?n8U$bEmOkXfQb|?V{dfE zkJYKe0`1>u|dbsb}iLy=jB7VahLfk+LXtYQIFM5f- zX4Ho`Ao4U@hkU_l-9am|{=(nSKGnC1Xn>EI49M61OyZ$2+^6B<()5V^A15=k4M--% z_A!nYttXtt>yc8c5YXIXh&L|pNA)a-E z0pzq8A+1+xm#T0;1b{J()<}Yxh2m-AxH1%^nJs!JW~W z{C9In7u?RD|G)oNmuXYalM`KNvCcZ*z^rk?&l#GbXnJ0G<`q3ADIe`kow*h{`J?$& zg$#>Cx;(=0@=a_!9p+3Up%0Ijg?Otb%!n$G@0Z9`NHFY3SAh^p8nr1CgZVqhe32)kbNZx-5_&M;_By z1OW8yUDjL^R=Khb*Vr2V>6(3GFX(Mj#*6a6ULT@JQ8hQ4ga|wOt*G<8bpDhtP{{8z z9W=V!3X&)IvZ`U4(1Yo|OaFxnFFmVOnbJ$mlv)jKZAduJul_a-G~Z$Q-g zD;wi`hdd+KFU*{=M$wvfOEz(7Ga3=G);l<2(pqA>$M{XWcENDhSvRy2-|RU1hhzKH zqSrU1g2>RfGY@dM1G=gC8&1$<>-FR(u-58{yPO2VY ztwaxgVdoSC4?P)fEnmg{XwQ#jsv>%Aa|*Mx%UQ z_edJ*++As6!w#`1_o|!LcrRpy>OST}pLj&F;E`DPU*m4nHtYJVt;NYVdbMpnF89%} z)zQzAy@YiN+i2nsO^H0`RM0Kf?75^Wx_QK`?%SKQq&T-_#g`Li0`@u4XIGD5V-H*N zs!1BuuysU$HL-NwpM62S(FZ^NdgdvwYb0}{Uum;#@6;p^p5O;D<(>q0=c%Xkh0fq0 z?vvAtUKxT73CZ(+l!jW6p?)tNRpa0&NO`;LTO#0k(`FS4xkgi7FA3fQ^fU7Y=#voE z@jy7wj(3c|u9c;NXJRHG_=q;RwEWj6pP)5`@@{%o`pT@_(nhlYIfy<=uFzPkRZ8w} zJH2prVNstB@j7!Z=vsH~oO$gvzZvae*@gE`-aUC!JF`M|FAgrgqHCDXwZ6Sx^78P@ zm0;EB;cwsWb*!L;UQ-O>;tF{`ewTNtmi|5@IjW}XEv+C-+i+vJm{_Z7IrhuG=u>(v4;sskP#E4j}pPWhWJ(}CgmC>a(f(dgp*xV zbltFKXOARg@DxJ;^Z)C9+`yW$B;O5=OAcN#xs%NBH|aZPo4%+89R4ke7|p)c>d<(B z*bv8`i>Hb?MF~U2Xy72W)EV%op3pXe_~2UvG4fQpE1%qxPgs(1-A301SR63XFa8`p za?Li`+ywk0=&V5V>wZd*5EL`a;Pmk<2^R&e=O}`#&uOe%<4TC@QCTnQ9|}WD$8&Q} zzkqjyGaEBhzrZHtobOd#RARe~hnPHs@w_7ri}?vwks4k+_@Cxe^pijEk8iZ1(LyNu z<-dcgjMpNxI@BP0Mu0PRjpi7~d@*1bpD^?ipj>cWm3 zq6OW@s@WwgHXUIl`)jy?Plz4b@>&E*OH9Nnqvdq;B4 zkf=#3vAmJCp1{C@6un=fkl>^*iuSa;x)0#79y|~E`bntoO? ziPxL*E6naDaS!`Zj}fYPygvI*i-kOCx;vkyk_)Pg#A&-^MJk!8!uiL9c)|N1g{*jD zyi9q+OReE*sKqzdDp$0Yqzd(>t}d$slioGE^5lVD;X$tq(#?$2T#9B%5vUAR5*9sZ5m2LgG^$W;lQO>Z@D31?lH1m zD+8w`F0EAb<4RmAir)LC=ut~s*`S=HKxUZOHAfmk0t)*fKh$#Mu(#sn%#!vs#-f6}t_`Z>r1K*Am9eqf0wJ}4!(#JONGMQ0|n=;-6HF4S($SkDF^4e;+ z=bvm|D+{5INa~spm&oCGH1^$k^B7qgFTD$k<4EH-&%+G%6IG19!XAAer3hGtc3z(zc;B2cYpPSn)Z))8K)IkkirBRW6*Nn7vu`BdNLWNf$hR5)|&1N21Han^nk|J0dd$> zF5783pr@;%d4%*;O(!LrbfF(;d^-DPA#O15|Ehu3Iqad`%>!S)x>gJMlTZu!b^qOi z+2>(zMqt>+vpK0)PbITzl;9AL6>!9N`%9C_bIn9mhn6af}HHGy;z`lJG&L;-C*@ zD_-rDJ8@UlI%d@9JvO`x2UMMpV`T*b6E6R)Q`=DyG}VR7jK0z7x_@a5_KkL-^=gOS z62TISPi;l%@fx{&pSNVwHPL0)ckt;80)$!tp__h2i{)HO)(Unnr@;*uVF;8v3nX#s z0--uyKPvtAiBo)l0rWwnRqRcp3XcGS`S|M_$^cbA$T?MA)f2=BCmtBscP zi90)ZU8#koXT#1oDvi2&{Gi4^WEX!BtMG$r1f8o1G`KL2hv_-7b|fC`L!yxy4*9z8 z#h!JRcV2i9;(9LQA6$=V&&K5W@i!U;77A64+rjqHMxpTR70QfTB}hrZl}MkgPoq@W z?Z%KFWglEL6Z>(N&<}rq@_Iw-`A6ko6v)sJyOVKpxLN7$-{XH;BaUiwWk^2YAn1;) zCy;BwFvWUabUirT*)BBjh+Fbk&+2DvUJo-F5u$|u${lVYg~>QRnsxm596jSI?p{68 z_{f)eyGGj^SEJkJM;8JZGrR~bXo!MZdK`S=+EBlTO;cl{t89=^|>ZDN$A0JvS9iyyMNYy)KmD zN&q1u(cIlKf1yOl4m`E7*-TPcP>3y^;Ekd~ahwfu?_x-zg@1F;eGoNE3Nh^YgIcG~ zdpzj3voAF*A#QX@U|8Xx@OWyy>F&$A`GSdsYJG0^lR9ls@LY?)6v1hvS;l_zuZ&_Y zkhLPZjYC2oTRijBX^qED&se5oadGi+ z1+a%PHe@UErOdxAh=<__(iU`Utk&>^N{qXG938&Vrnk9XXUm1i@AfhINBl<5L{a^O zq&W9vB~A$pXDoU>hv7Z+h%GoGY%52gC6#{#ALM>Kv=-?YqWhGRjQcC2gvUGX+#o8A z<*!eU>3^*4gEDU^uuvz3V;mN#>bA0}kK)ybk!CidUz zg3V}nLp#(Ys+F^M9^5=@(bQ-XSk1iDEi7(NXjfZ#e)<$mT@s=!#fgH)$@fKrl9JjN zxTevcbwvYGHx2KyjgyBc<9x*FG3X{J4y8;9JL!U!9KRc7;5)OAAnLr!GJ50lN+>EE z?F!!gEV47U=^h8I4tJXdg)_!%M!mG!_TV{AEKCj~C*9KwR`Fy7wnGwlv@R;7{c?g@ zEAXNr={yvvwY>qs++80_AvsGki?0dHkm(lMu4(}Zb$da*^3>~DRYxuj?mYc;oR*`< zavS%ehZos4j9vwkbl1Ik63*#bS%;GOQm}dVgTGY6Calr$oD00o?x9&&SU9UGVNr(r z20#IBXLD6QF1~grKBuV#K~g-bfZNY^Zk7y2Bb1YiB(fj(eJ09=Bm_h70hsSx%JqqFsdRQ5KVeeXhG& zQ4K!=>y|-Dw92LH5@|NV-Coc0FRY_{&CFumIZ#iheh=d1awW?OH|a z!Nz7X-C5VWZq*D#v@sgNcxnNMpt&XZm?9ZJ(Zj8DSex((Jr3cI_o)18#^d*bWyMoK z+e0T4lSik=-DB_Rvt3s>fB4@|-q9yW5AeV);?#q(6zcrxPc#_Lr*csSQO}D$vFr7a z@r+cq;ycyILgkd(T+x1p?qiQ#ayeJsT#aQA!YKr&CQe8lHg6iGF#4`m8hz>eS#m1P zho=G0pvF?|5T{Q3!pX-ckMzoT^RbdYj=VoTrLP7B0n&KOlg~HsUu$WJqVM8=({*t0 zU?AxMFPl{gWDoUect1uF(p;CT%QYEwmKPF&SFwUx#fw<2)VQgCap$d4G*I>&f(x+dYfVb_jIdq=T#5U$QNk_us(fy;T)kLO$}a% zR;$@*X(0e)AkX+Y6ZUVvBfSx=#>fEjniS+0(-|x%`N?BeiD{JBBZWSo@E@Nt?cy!| zelWjEqT!Igy|h(XlkC7FZ(W_*AzCJ6^tuF=TfTeGM+Vv^v-zfT%4t!F8$nC9JMfY_ zgOTTw0K$iwH`=T?qt`NA25u2{_uRt|Av*Dym7Cs9#}XJ4VPB)y0!BidWit^v-n|IR z+vOiWSKZ=-CKuA@^{`mloTkpur3jCPi$cGp%cIDy<9zUnneq5>nNT%`0R7$LIgj6w0zd+%BPMUj*LK9)BOO!8{qUijS^?8RsYi4tnv4(?Hdxm&@b>47I}h_ z6=d`FhL?xqo)d@Gw(5<|(ndvB;eecuM_tEquw>sydeP8cPccEvPxg}Xe>^#2THVY0}Ig z8nn~Z(iqJ*RAQt~p|EFBO;g*oJ%9D9U&XhAgDJ5YP+BMM^s?YZP{4cx_XopK;+DO< zR%%El;cwvMguCL<)|-uvQ!C5XXVXuq%H|gJ=KXXmSjc8~TXe$y`?c1+Wj+qK6e+$K zuTQzvmpqTS*eHdjp;4fv17cX+d(bY$~TxbL4!d2C^SGexvlSj$#HXZU^I6j zgj$trb=?E~05c}7FdkH4i`Qc82n+bu_(iz03s(x;cxAj7nm3wD<1TIl!QM_0*r7@O zTk|H3HR80AS3Yzs5G2_sX%b*yKx)uv;Xy_8o8D?g42i6_l0ZhesBjXMg-sIg`fz@9 z$N_lsJ9po4Om+Ta$SXuZeo6(P_MBrkB42? zw^v79&!es1>~vfKf+Ojw*I?L7ykr*feSoF)W+J<-0DTBoYV>Sd+zkT2Y5Z(@5~L4d zAS3JRT}-FjdaCZA9a1CXCWAGD?(K#;ic(Q6=!O@v!NEtVD8!F83OPxSCtgpE+E&d- zHzWr1s%}7GgvYJSP)LvNHTu9Z?S&pAv7@@deIrg1ndp~77HBlG0r|#@+qwcRKampD zyV5Uga&8$B;c0mRp5%uAgQO!DAcaEyu?Y_T8q4xtvPSRYntPMh1sDu*k1=rt`%)id z;x};2^{0LF%}ZQfICoC=@T#9_YZoh5_1k?<1|}fjLdIIXd8O> z^%o&jpoL#;`jPoUOk8W@|90JdGWU`Bu>Qt!J~cP6$IPduyLMs!Kf!e$?D;WouDq$+ z!z?+cmWIcAR*~EkOGrXagf|mB{PHi(oz-k}5~C>%JVTyVXE9H~JpXoXYRJJ%^^{8_ zHFYD*gZf|jUbI)6L&6QvuRU>F4+SR3`W$rJXxymRG>#^mT+4LR>l*$@3sOady?|%X za!rK`8w{P*?cdkqM#cYSC)vNbYY#&NctiCE} z)nm)eo3ZiW9df5N%#%0Jx!;<7sAVEJiQ6|6q-nTiD4Y-dT8H+ zqJ}PKRfVHB%ThJZ=t<47(>Icgeqn?S&dsqKQp=U?x-LY~XPn|%3Y=)uy*Q)}WqY0d z(w-#l=23?N4DF6G&d&XTf8r1WQ@?Yh3BrP#2PZU8B8#7&e5|p-S}=aJ-lX)=@_OP0 zhTCO1BxpjKP^|FJ;FG@%Ouo{$&^Vj7J;t^UT(r_#{!_ynj9-Z%08EAEQ`%o%X7>qS zcS3j43eN7n<_Q2u2;4pog%`RpvDf19V{{LXuW|mCH{?ymWorjtunivzC0N;)IH61E zD@b5n->OPLDRqexxHfJdEIU%SyqP8RjOHY;)bzd~F7+Xv z@kDrbdu?k~5Tnd^OnE~0=usR}qh2>sWSXkSCN*DNBxYN^-w!DLjKfSrW$eS=u|sURK)Mbb{*y=X%LICZkA|6m|aiFVGR^gXDSBr((snvHzl-)2gYYa$0Yc#?PE-UL5GBM z^n0CZR@Iv%X-h~-gIxS#tz!$R!R(T~+wHqHyM$JPTUv`%hn0uTk~BB^EeZdRejv&F zAe5euGnP-=}cU>&Hh4AHM3>GV(uR1}`EyyIa($e%-IGj5Bu1=B8fyX9TLJB3jupi+SyBZ-t{$eO} z77x*2xZ~w<1)y>ZTO~0kYgV38+P8f6mI5|%1$B1!^ec-R+iAF3uW4O~XrmOwjc18+ zljA=Bkx0RJ^(eB?i1%PO={+8KYrUdZY*1>$XoWFY88oR)noINjg&cWc_HXBMJ9vWM z_z(AAO(REkb(Nm8dREM2J6BAadnSXOHGfT^s=q$@4$`JFM@`;Cz-z+&E7@Y!98)a!-i)p>-JVWa-&ttOqb z+{dxnIdZ4daL=3$@!0^`y!DN4-QMjxM`w8GVFqlmQyMpJ5!%@J&1QG6#2yTx$~&G{ zVF#RGLm_E&&k_u7j;xSUCb)4Tz}l7^gk~OsyWu$0*FuI2!4q)kLd(Q#Qt@S}Mqf*P zo+F<5IA90HX75Gg7VOX@s~FObd4!`R#F2f-j7dA7_@&*Ijro|%YH7M3PeVHgQBPr3 zd{Ge6vPdwrVcC2|^E`wMFNs5gkmI(Id?6o_qSwB*HE;qw{trdfz~QsP_r* z7-7)L!3$0#`1*R$ag1|5Qez7rh509vLUc4BBsD)ZifB+0oUTGkdFxW8c2x_F#ql6` zIB=6{CoRYH-R147hQiu>s-JP7T+sCjQ$Z6*Kg6-Q8udxM#kKg!_0{e5_|DR@MqyQJ zMfA|rP}wB9fXvLadC{df)$yU~rsgrXgF(BdI@y>$M$6!d>m_f*5kmDLC?g=)RzTfN z^sZPoLANxI!O16a#7bU|Mx8xHh~UK!&27y@F@|^gY22*AnQ7HP(OO)REuLP`Qk3=` z3LC?LleI3FNT}6DIQDV|r1U4&M2i|u8a#G2oH$J_F68J(z>Zsns+wxSn~!G$(Sab! zuJ!8bntMsjma|4Hx-d@Im}ysP(=N2zb7v^W?MwRL&Stey*F)a9^ORPOQ08IizpkZ4 zSO%s6GfA~Er%2~cJwl{ck{tC1W^ zmB!^tWm6AtVS=DMAaymo%D^iJTNX)(@yE#{_&|&=!hBBoaJ?m%ZN9qZp(E0!$?}#( zx>q%(m9%$LO$;3DBKaJM&FhhfPrYQ*(wvrN2IHhTmY&A?h%E_oQg^!Dyez4RA&2LI zlW6QwstI+ZDY6pRpNW}zSpsaE5VU&C!_&Cg_1&}G?L{&=p}uc_{Iza|!^+NAmZY6f zGu??4TuIhc9>Y&VU2g8{$F-6-wyrfQ+Mibsw0@Z6mMk4iaeap(CfXDvm83qIU9+nb zo0;Y>dsh8d-|V;DZEwqp#51ix7hCR3qN~hQ4(ntUC!Ll6mYDmP6r}QAg+M2539Hp|uygPTL_=>w0MaYHXn& zbYY(A;oy~E22sCMlW;T57{dSVr;e5E=&;+|(nJ?R$9-M9kvnhQWUt37uN1e^l_#yl zPFWiJ17pt9bvS|ukREmSEfGsoY=VKXjMjd3_t|Hkk)R?Q>_AUkl;blcB@SX05(KEy z6zt%atdy%O)ylF4ZmPJY&gwgq$7vm!eXk)QGw@OtX10C@p}%MH#7=E}k;GEC5lc^MVTp97`~k4-h803$jB2n)`al5`h4+32w&_22tq39-qjS z9~`Yk5i+Yw_mO69GCJP!E%ah{tNViH1snjNrUA`w{`TzKlSdp16bF8cB4gB;&$-20 z{5O=m)6jxwBA6DV=YMj_%L$B3OO85gthR)w->5$cS+0qlq#+aN`PHxDi+U%itWw`_ zfdnT~8wd&^=YM>)KOFjs-~|9Lx?Htg>$n3JCygy%{NCC|sVtOD+f6@*OdShmx8Mn2 zO+d&f2;rvjQIQNC*|erSjF%|H@3#+@k`_P`i;L@~`@@E7ggn%6=z_~a-ejK&!OJE{ zi4w*U82=Jio7H-VN$ptpU!Hs@w7z&$hzqy9%vWblTPziK_icAW&NJStI4|25%XK|E zk1(ZeR1~~cwu8o|@56a{q(`eC!*eHHCW)iq%EI)LO=tWAd`@GkkwqVU2jk&U+x5fc zX{1cCc_mb~xBVdcg-2Dc-EoHzzA=rQ^JXw~N7a;*@EDxwLjBQg%{ziEf!~eTwboP` zs~crh1AUm1Lhh<&Sz-kM)TG0v_I^>^l;bh+Q`^sR1~}vTL_66$E{N*5{X>jjv%OtJ zv-Q%Uq$7IDSrvcT#LoKmeU!HGSmE&Z4Pc>J z3*^P`7;){*Kwo=P6BNwSKmbk~P7z^g23k-u9Sg7BSvVuLb@3;9mK6qdtY5FYj5Xg+ zf^BqD+E54zH`lN0$Ig+b{jdON<%yP49=mhceEPA{25vjkL@y|;Wu*cB;PL=^5Ys9Q1BVbeJv0me6t;!ILgg)N9jnS)sAVd<;v+rdR32!Etn}g?Q2hihNy* zeQR1)m$n@`?^3MgJzaUz_%A(rH*1R=P_{wW-3HyS6}9TBzCvd}T<+bLK6Et}@#(Y@sRJurU!i+6do7axaWB)5QO}7d z(@5o#45v?dh1rOT&TvmdtaIufx=x5!8#M5WbF2-x3?)-N4Km-LI{E|5nVmh)O`P`|q`f)k- zM{AC+bZ_Y9;mdB|nFkL8LS~jbS;!&^pSWXk;;Ja=StG#;IQcoqGeiiMVoH7j$^w6DDA7&n%H>ex2VDf!T2o~;WX9V2b6^*B0^X&*sHYWR zMj2B*@xHJGl&WbrBYq7c){@D4BHTpjqW)g(pT5&BU;LR@YM|Mkd+J^&)%1y-l@eqa+TL&6L$Op zO$h*8J%#A$#8+F!W9`&* zYOQo`du3c37#}X$c*o^7EzsI9CgO@nL<*4&tFB4p(cyKUYBrCMn;&2plOc8T$ORd> zrdZr2SAVN{z1&^#!#%Vm{Lqz!XMds(t(K~rg3lUtvSR3Djq!N`?t1BpzOk`WZ)ouc zdp^|zs4N^0lcSiq7xwJ$7^=li*45C`Tv_7qD_iq!jR!6ju3T+2UR~YR*T{MO^Y}(D za&4OX#0fmWz?pEh8b5cb>zOZIwo1~`u`sBD@fGh*XRqYE#fFYU-J)%i6;jfNFL!pS zNv2mQDV@G63+39*vc`aU6q}DDR&Pdw1XplaRkGiFz1_n{#5a^V(4idgm}mMC191 zrn-5qgfMXSd@yLD0C}R4hUSiJ)fYDFyNL({htH%%i)8Cd6lgD^812N5h4t%{*We zN&7LQGK8)?^M!njkT0I}%Asl3k$)q2jn|AQr4_^eHWF~;++d#-S#W-0b8I|H@C+EG zOgWhD@faV3#7b*@_UPn84UvkHz2UJX!nio0+YEi2=;=25T1&W6-K^HBkgV4hi@3=O z>gmld(>e$qtw?&tj8~HSBpaP0%@HC zWw5_QVM@yz7f8>ZI_1<6+Kw43Q%jvBGTrXD41h;^-A@`0f*YONaR<|z&tXY?ahkCH zSLx>9qTbTaqej!+P2gKU>ibzWqCPo3xdi{_73pC3*}Q8dDuirlDF<0kvm7*R;G{DN zmoZx;(c_Hx(sPA;8&ac@3iWk!1e*S>ucINb#UfMd?gL&>ObeGSJHSDvN&exl>F+_v zGOl6+C0oefTE40ej&ReOwAJt$c+ziHJ!-LyL>aan^JaOwQE|C+(LKVJPts{`;gHjX z=fd1ARSGsjJkVof2ltb}7VZ=rLW$KzUga=QmbJUhA@k#3Aqw7~eRFdE`MF7eojG17 zXxQ(Lrv0`>K*+J>KD4PJhZRDiCAG9EGfC-n@Pu7zenL(;==l6!54}?O;xst(2KYfu z;KQBx?T!=7!KTLrEmCP6p1<|NMkpV_U)_G%5?8Ha*==ow^-*J9f|2o%pO}SPLqn z!$g50$#6-Rsn#3SvR>eW*s2R_m=yF-(5o(4B{64w;o9P9-;M+U$R`iVtv=0`YfjxI zz{Ii}x*TJlI-57J{=cDS!h6x@$H`ZzEp62+nqSI85dp#@qy;b%I1b~)Tf5Y#RVo_) z?C$xWo!4g(`i~_$j;1{r7n-pUmY7nw)cpr!$r>x^Gd%-5qHkxPXxFY?T=$L^d(+mz zr0ZplEJ-#zz2@GIbbh6;KK=CKPvf9yC6MSJNXVfj?C7Rwm)M+IHBt)oMH26WqgnUZ zEg0MSWb?Lr>Dkdd`fgq4yNQcnydB@aW`Qtu$x& zy`-C{YY|J3r8bQ|ppP*1WArOZ^XOs1w)@l(3eTK&g7FTt zWJz}U($>!Aa%EYU$NVI(0C$-_4+$!`X*D%)2=wDhwIQgyfOv6^>YY>9{D~hBc>gR; z9eNTtK&PTEpgx04M0wSf2!h2LEC&3^VXRC;T>8Q(&p?|a>Dcoq< zrc7{tX>`GpKBkwUR@Ht1f$EY*RIrM+6@o#+<$Sj?@HbO37}nGEA7)i7BH>s=E0sC>sXp5PY~>R=}5-ishWKQBRx z)HrH8p#iVH(H{BfY(16r4gA|q%b>dD;-ZgKhWOuTc>v|m?FT^FagU!FZ+P^p?v4A| z@0wVD$Zz7EkK;LEhMnfh2`vr^m4N$cI*xw^ESu}cp6RR6xPkj|@&eIso;mPJf8ApC{G94C8?n8@n}@O>&^HK>sw?FNbiO=e$>24fq;HTq@7z&{E>-e z{-J^~GAf(39hGvgj#Ay?_H$L8Ek)aZ+lv0W+R^!@VW8%;@>q^z3ejQT{DmTyVi zepm%{Go*EEqW8Omv4@^>wNbiK-PrNgWxUdp;(q=1ypRB-SF(hFP%=>(Lk4h3T3N7};{4oc1zA&Z$H3X9Q< zk*1{$K>P^lI`6YOdq)T}C6Go4x;{PYzrD%Ob$x~6gx zHkjRqa=7;a{~O_+U-K_901pvf5fHA_J&oYjQc};QUoHsFdM4~yOTEEU+V^UNDDcG9 zg%YAL&EsUrEZ;)UXq`xs!VE?`&6?uz1Ux~*FUweUAlnthP>ZJ(ERw9elN?VgrQ&#kth`) zIlzHWi2PoIObfBn#l1A~RbtlpjxY*}*H9NYAKCGfb+}pa@fo1?7X3VuD&}rkqYoE_ zq6!|_M_Pe%!~lf1H4jPGJ2&;qLod`qhRFxo-Q+2B{Mv|{cx%YrNAfI5)zdw7ZH6m<> zQr|K+n8vqL*(_hxlb|iQ^yg%n3JvGZof1%RxD0oPZLc1NSVIRO6+J#9{)QdBA|t*T zmHOFTnyC66b|F7#(J2jJ>>7=VE}?BdFsw;|sStY8B$xA)>W!Lf5Jxw=i5^VYfd<+? z{zfy0ct$0L`3+gs~bcgg)b>if z_*Uiyzkt)23cY`OXbuTyaW3NqtyP^IfV;ep!&3mVoZnbVtiHC>_^AYg4DZJY+fV#f z5dM=tao!dE&ioW8NYh^Y6Jd&*H*9RZ7mH22|Z#{ zb-!C}&GnN;Y;j2!} ziP=|PDr-hje%X3fm^C$AmWR$_WTOmrMs41YbBwOwd1~^?ojHZ6IqJ51$%I3Z>K)^f)<8(J*~Dk>EbFqfrwI;e$*p#u?loz2M_%6J z#@P9*oMQM2^^Mac0%)+tnTI$e%viM{z||ZHCT8gx^4D2Y4T~eKYORBWPmnS_V$maz zp0f^wx?Qzi+R#@i@wEgy!%57rPTV%Oqjn6LF4h1jlvpyU2>J#7q@J^C86SEClMJJ# zAI1~>`ozhik&5NqQ{2980c_Pm)YYsJ*SbCt3^gL1lgU!k=NJ#v6=4Yv_Nm3!G(YTj zE6{Bj9$v49k|FaBa3XMk&cgit4!{)oUk04N$DxFtscWHoz%?&Gp~DWmORo! zqkq^Q(<5NMw%mKapC)^4SDa98woIBv#Zzi4E}nl`qe%M4yCQG?XshC^yW<8h2ZIK7 z_UzdU5e!slH4WF9g+?K4x|_ygD#Jm{Ilz5{o2XE9g+f|_SgEAQ=EIkU-$x*#Kn2wc zV!eJUt$mmXsfC5bL{GMQQgAq12|>-=Qn0_%u+s))F7#VhoH4#GUZRj5PJFF9p}Egz z-)ja+%T#=;)E--49Rywjn+3Ky>>o*Mxcs7Kj7(@J6<#0sGW25Y{K3@M^C*f!j;7$^ zbB6@$a65v4PG`xBsZYB3xN#_X#+7+0Zs}1t2~1nnOG}TsF?2=GViy*%6+A+`mRfvP z6-W1ic^d`%^XFgI@3(hwB5Kt_r2E$=@2~h7`VaWKf3II_Snw?7{c>mbb;lq66+N*3 zF5O?mFNjc^yHcxKsh3OJt}ekNKO~tmdXz|e)RJvtvunHH+R?A+pifz_{b_hHLDQWJwRX zzdH=4);3JH-#s?Zy`~ZR$8f($X-#xkRK&eKsZs?ysWxY?Yuh(aP$srb-K2zVkdkvk z@)aygJpp9Usmqr(zOiLIR2r1M9ysf$HQYzuYgNJV4Xg8}*A|tcl2}6BK>%58G zqiLP?KmYeZPxoe@KR9_L?*WdFk51mj6a5~Z0-C{{xPQ5I-6}llTiBPB4-h^U@3>Jh zC~5cKaBTM?^SH$7bJ?i0A}OeE9^}eLExAV}rFWsrj*_bI+t- zGF9~VtJDNGY7+Z3uoPmdzWJQ01D}K7fhYL$^~sxsb4FM|2kabC2;?XzZ<-6{?7KKm zAv)luLvSZ~`cc(vj_%N7xkP>0e1KUaHn$}%L6`3R38|ukf9dP^hq?*=nBUW~Q4W5( zYmt7<1J>U0i!N!{_4ptJf7sB6zUwx+w>soh!Yi#w(De=%R4e^8Li{wmD0&<;^IE;ZbUGs?J?a7g@h9902n`ZRU<^pG2K zcA+MCcK7M0pNRvr)6Z#2xMxP8B#)|$;Sd_2^Uwl#558jT%14vDqga^XNP!qf`@_~7e z=N-pVU)~$DZ+`qMl4`%4eJl`?dp0tPtL_6*{%-crBkf-1MF)V1Ax&)sCM*u$7;r(1 zKIG83kHvT|t`eRZh;Y@7_?t!r^M%q+S~tqReN3DU-p-K*S%XiQTZIi zR5!Bxae1V+LUYdrf)S@A9MBGb5g(RFr>mSb+6_XQabbv-GfmyB=oyH5Eg}m#^#Tr@ zC(fECh8@k0;0=%bW3ss7E=4pHo-9p_#lQ+*jxbrTZ2ZoV_9bSR!Q?&dVT87E>P#rH z78)NNqPxx|-5Lirsb9A4Ik9$Jh4ytWKFXZjIMR*Oqk-Y;fY6>606-o=pNr1|p>)6V zcwGUq+nkuR!g1m3uA7!%%}50!2p=OT)#KwRYVh;m?+eKX(nTunC0HrBQ}d!nKrdF; z*Y$YUo*g#=dm5X8Zq-V9=Q@1B@yIWZkHHHCQMab34_H}_FfAX%j)kGzc17c| z0G_Uj4oQ1@kARr|N^K-fGfpi|6x3Z^meM<%-ug1fv7JZs zj9br-v*I8y9N*UVLTsmbOdHj*<{4d>VuwtHmWVi>8wO;&AaOLww&5Gjcd1t@7d1w+ zVbQ4ltg}GIb|?xGKSAmlc}$uW4hz#nH7lE2W!=Bu@A^G3oqYXgcNZ4!c-B4DpMO2` zj%P@i?@nIVl$gAMs&aq!{fT$+A}jlzv5Uw=@xuhAWY+>udOno8&ra?sF~_t3dT@8V zSiWDLwdY*o-`S@Z|DQCme^5pHcfCQf)VXthm??wB5;X0v`9dFP{%VzSrFyC2;?|1o zTZOc@iGGEjweq70wN*5?moU#Y<$EJ{WmuZ%Te>nKF!H-S)HNNDpUcWQm%ZC|zc~sF z05)ScE_1u{!~gu_U&hxJoI%4Rw%ob(M4V-JdfUT7sGG^rPHh;^4heBAcaWmUg~Us( zoQ3zIZoRzZAt21S+pfE`+QcFC`1S3wejJQ$>*47KN{4V)+l?q4s<3DR$$)WZ#KN7I zwc$7h=Y25Z8H1a#u;evJ)Q#UJ-DSuTA;TPRY}|<>=&S5RUA3U#GrP_>1qH_MF`iE- zbQSLg6BT=vgI!O8o3#svO4owMNC?`DCy7AZP`BWS6vtNl);9I0JxO=9aZ!_Wd3!~R zioZ9MP|;hkU-aXo?{y}Qyav{tYj@)HajT@Vj6!oQPgX!=8_hr)4Id%N;LtFblX!v6 z?yar5)`Qw@ez+kNha_jX&x5ZYG`XaPjZ!{9lDa@S<~(S5d=)_Ty-10r+*#2r%^r3X z4K=oD8Lk;;Wc(&Xw)E^<+b&c|;G6_UxIWHKp|lhcj`T?^j#!T9IQew;t*-UwfBxrs zgVGsvl+Xji*GW(RcV?f$z4A#ksY31gf6Ug6>n?X(IDkEsV!! z_xJm~v*G0{7LTzV8RX}`EyTNx>yP#KKjK>XZ=QpUfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBES zfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESfsBESf&Z{E@X!BQ@#koG z*co(hcgAP;_xruG{myv5Gx^VW??2`5|Mgw*q5rt}&p(^p{cmS??M?eLyBl84?*8Y+ z>+lQzrTy@qByarRw7uDH6J#S%Ii*4DKyp(s#Kr;M)&F6a~zUSOX z*=<)1Cf(JFAAi5QoqO)t&$(qX{;y|`-7@gcUOD#9e*PSkuK(I8r~Wz2pZ)BiTaN#m z7$(0sEZzU!fBq(8PyhFCzW67<%2ArJV{)Bx*o^XA^Ly6~J2|IbISn9ra5w3de)PJR z>%3epyQAG)#Zi7RDE)x*H$Cau!rbz9etxk$6PxypGWmu->6Km2gx%ZEKM$KfpZIt0 zxE%PCgVOb%Y59)-+$lr<9F` z`HAvGj(@+IVlNijncJZA{(QIeV_K(kJMnCEBUJPEy>jTkUk0}>yD{+X`|8v(V(T=i~b1g4)$=aT=LAi z<(|J_U$_0|PI=&;m&^12;}?(wL~5FSx%s{Dr7wJJ6q;e5AKi#u3&PVcdqGv6E>C+7 zkPQFqmLoT0Yl7-_?mcw^at_PKa|Ng<6&HSE#F!3 zE?!9L)i3uvtI2r4kpzJyBrNOU$2qu4&0b?8RK>bbD~Q8ojHWiw4Vri zSmIRj#hQEM_YKeBVsg#Xa^gw8S-QIc5JbjU3DiGOWWbSjgVC_rnk(f>Gt_GHJBU_4 zMYyd*mkzweUw0t^{@p1lth`=c_n%qEHUHi#y`cHKOw51Ry!`v53_?bG4h|GDHFzJO zM0CvGAeI~VVIKa?z{}0=r~JG3cJfkqiecPOD!5W^7#&ZMV~%5&>GH4L%}E&r&$@kD zPGegpFhRiV;gS+>#v(mlqCWu$jgpLEWfH}BLKjl)$(SxR+6l8|^4c)98 z@qr=bMr%F}MtE1f{cUL&?OiqCqv;jd3hCmGN9i4s`KJYMizm&<+s ze51VKpD-5p0!GAPHya{_DjeqxLBPYY*2nyNubbp>1MKzez<+-N;Rq4rWKK$X)1S~|&r_I; z#b3N+-W;`?dKkn;GC+_@KI;SA8@mCk9$e_u((5>k0+ zrClG7W}%K&mZGX zpwo7FaMJaFG(bKh9vB z8JKS9$I!|gZy$_4a(fZpehPr&DH~uaB-U90$sBz#7SPIG~Z($p3)H7*YGejOs_-a9B3q* zL=@aiB4Gox{W3b9lBY33O2P2H%;Q1~8o!{5} z`%X8x-(3{DSSUhiYIB^elYE02EePWIa?w-mbq`ZLTT0Mk-i)@$PkY^fm<07sV2I2J zp;#h~1ob=ncA0$J6UH(Ti{f`2bB^KdcySkYW_&zKJ%LR|0&!0}$OEPeqahsQ7;Vn2 zc_t`IQouvWw;E?5Sc= z&M=sOxaaWS{cF#LJH(djS+W%=bS#$}F$HNDld(TrMa>GBqnd`5WT9;P8}=w+#Ni9& zrUMuB&X-Y7N;g!U<1jMu_RXQF&RIZ5=YTfJ^?d3VaRziiT?m&mM8fX#_SN|u=~W;td~u9 z^Z=2h8|n}C);QLPScd`HVT=5COFyMD7~&ld_!uURke>k2s~)8)^GA#ST5L{=?_VB~ zK`vkRgAQfP_#kbg&zEO&B9M^~y*uTO@$e|^6w7HmL~C*9(3u3sN(sGvrEHtvJjXxO z#OFdj>ZQ26P&VDuoX~K-yp}9VByJQNfVh|{2h1xFp#$&of=1OC;jTGCX??p{R{a%V zf*N|Q7fM(eN04qe9`2Sw&ktuGl!w3Y!%!sFhj{?&1kQdOP8CysIvKxPE*T&ac&pm# zk7eHitMcPUyvI2bFK)Ar*qW`fY!K~s%So$nay)he`=ss$|J-T}44rZ>CjdiB9*7Yv zDfH@`mL7@@Cot5Dq$5T|A5@NT5C?dD9%oUCpe=}FqZ4n+ygg3RNe~A<3qu%U1hg|3 zQ2DmEaxpQ4fiAjhsKCxBh!}4INPX;W!O~8$HKZxb)a;=92S42BE*7oaA=aA+wUfsL z?%$98A2d=l9Kf9-cDvKb_(AChSBAEa(u#wkWki|d1}rDc$}BX%E@lui7Gx>Kg{89W z0jEdIGucjE4k8iyIWj;R0Uk6o5n7pSgQK^T)}j&ifo78i*f{)%)fI#YB$$l9lqwVm z=tZ7=S|0w!Gdow3sUncZH;7Fi8s|jHyso!KrI&L?&j=3NEjw<2gEVO0QSyxbz1$}Z zd@=BqD}KFUtM!ol{c-n&y~aQLWv8W~P>^0g727GQl@~M_3!~i* z!QVXtxL6qe%Mftq)u1;pFMy8zjno`aq2JqaTsd(G_~jB#S&-55fJp8Jr>>I<`ouZu?EQBZh-4n^ z-G!?fFx}&WavG<8GUiD56~qJ-_7L=X_K#k|`E>K$EB8YBt;~NQO~{LSv2=slA0>(J zKxaFFL9*_;0RsFckPph}b^xe3XD<k1-)9pjlxoXt(8i4N@w?uadI-g4V~Ke zjAdR(3(iR%x7KebheDhXEC#XZkmaKo6RLT7+ZeEXA55vDjR&p|W#~tyq-~FR>>(ip z9qSI}Bs4_&&T%5%U`~z3^ZmdUKN-#RuUgOaBSw>ul|C0_0|T7{a~$iD5h{-2Yvqb( z2LMLLfeY-wVNzsUe!VAPeypl@jOG*$02KQ)kfQJG#u4BF1D~g~EgJ5Jd#H^eIbt~R z-=!|aRL`Z$bR{hh`we&8EbITb59cEWWlMg4{J+&^)LM;*T?vcaN?E^QE`gA;dx%hF zSOBT7#)Q!i;|!pB20c50&zv?E$)_4`>sncMdqF*<`N{a{67DPLamaVj6%!9U)t-ka zn|~uOhn@o2zBj14}q~-w%wH^2D%=LeuP_2>2uYj*U9|$MW!Vzd@+# z#Y|dRU*t3Pd|Qa(`LgbCP%oq=_hfm}omS-Z7VtT?hV;3kuAFxNt+)P*^1gT7ok{47 zMPYCcSZS0vC@dpcHoLypy`TJjfbSBMgZJUN^i)d*@jx8BL7c)$r`_OYN^dB%K@=7_ z!TY^~Sj*JRtuTclL3a{Kjuk zQ9Hp<&*-6Tg~7 zyT4Z+c=4EITLKGfYWx8ZoCsLZ-#rEpI_>@+#+EM-iWwmgtPMv%9H$1{2@C-bQ}~qO zI*ju&a5lE4ijv>(1XlE=mmqT7I}BlPp==pySlob&pC)6RJkJ|6D`nA>TYygaU~F~^ z?xoarWR2(!9AzW-Q2Q@@M*gF_>F;vrVVNEq8KE&lLc2UR_#Lbf9LYTvMGxx9NeS8e zN_pL2nBcH{!Vf*79)dPhTb17cDKQI4pJ15*3u9>TzHLj03B-> zBmi$?4?954U9`?msEduU=vujEgus$vjFNZ0v93do!=X}5O}sN5dy0RU8`IzjiQb8^ zFz`2k+E00+Mh18|_U91mvv&pOZz9fex!XuyySaBr^r^!2`*~2YSq8}I`Sl0{5mPdL zR*TeebH0%GI(I`N!7RAf4VsQlw;MF-Lb>31OQ(X=8h{&R#S523Bjp=8ns#G%WE&f$ z$Xufz+>ZHn@+4!|GMJ^&VzW*1014Vj8y4H|F63M5MySRbbZ6KIkVw4U!$twY?7u7G z?=`$qUNeSb2Dswi1g(d#nwqzMuZw(o93){E3_b~To{Wjnax2_lJ1;gvuuOhr zyt@gP2pQ!Xye-C=Or=fFbFGq|L>x_{gPiF1lQ9%Fz=KmEWC2oI$8EayQf|K!Im%lc7c#MCz!XA6fQxKaAvhrMc#~%{8H9cuo$>l!Mzv@TQ z4^Niwf}~8w1Y&uD$S$Z8{!Hi8FoG>{p3)f-R;yn#O+zn${wd>yx;bFNf8Ag*arj$7J z5WO5}TLI=?H~8&QmI(RDuXYg+Lxk=y^*j_v6urB#pM+<*)rhEwCdeFz-j280`{GZ@ zz0jv757Ve6M9ZAN1fzUv^`UEJ-7j!{_eXJz-%8T+N;&6mbcP3~p2!eA2#GZA>%DQ|mCLW+}a_%AxFIZdb-5Iy8bzQYu9zMxH~{PKr? zJtFpmxw?i$&O#$SmaL8$gT+bq8csQqIq^5q1p*P;y&eI*e#BqgEGKR==)-)I8)By2 zZM3s$o%9?gWBC~Zf!nzGl^f-Sm=W>d?tOD=DoXSi0h`7sD(o8rHJ?c*0gRomCq1fjoOTWW25sRpV_N*+$O#lSK_4{4>HlHzbrodDlqTB06{duB(**96~4Tn zOek_*H*6tedREE{{yTjBAVhf}S`|N|(DY!3(PH^VhIQfiNZY|=%&b!v7o`h9x#1QP zU_zqYZ^Hs5^?uKwJXc;z1d)vI>70Fgf`Iygv4N?FC_}hmP4kx4Q$wtd8h85+&S3W; z>7M=e@+adb)7af_!X4JihWCWpuR-Ff)_D5MN9LX*CyLJ@bc$x%eq-aKAli=5DrE_Wj+l>_@=e6J88T&u=fMed|MzmG90j?g`Pt?uwhZw!zflnTF)RQ0Re4GaVW;jCe_P#W;d z?7tiKW+aXvme_&=;bc9EQ(jLi21D-+reMUG-C)NTlS{8R=|Yn+hs;Y{tkCr25iDbc zU|?ve!j02W%z_!E17EZs(?G15E&jH92Z{P^tQH9Bp|IeEDmWSAoXZ1-i0$6w2M&>L zJ)`?h(=|o}MWdZD)8?!wDn9wLP(ES;wx&z+Op#L??_~!$KQt=q;GQ|g?GS|6`38Ss zqQ@Cg{cd^JcvY3Hw6t(AtDf({PdU)J$KnDJfW?M)j)Sc9lI$vCG{Ce&cOl?*%6{UU zCOhDi8Rys$uY=+dfd*6TGt^%~0}4kz0&W0pgb5ZA%HQ>YVM+=Z`MV)fDWfD;&;0uF zp+v($usK;nJh|t~1%u>;D$IHy#X^L!0p^odgf2(#__QGz8bl|LYhXxJ0F-n zii?PupVtt1;i54TwwC0L^$cWOy98QKPnzVnJoC;mTr)IQCYvCF`J84)L>jMV!Na6a(pm&kKaVPiXD zz#&4RXS`5e@S7@6HXb2lr5}41IuhyQUI`644eR3BxG@y&V}1oyJZ0Rc zdr9q1O8V?2r{|`NWD|a4pb1D~=6=R2QLX2GuFM%5o#!LhLlC+PD<1~EI zlQEXnw9Fjulwm0K?<7Z;NytlBL5rV@4$BGxM0MmFN#`FMF-K`l1gs4qLh5gwi&$-r zW3RCJW&jhc)PspCBc^F{ZR5q=XcBgj@ZFdPP}_06olj27+n!QJu--K$-nXC2cu(8 z8#_Ro@4rRRfl2Tp5qvrYX;mjoCgiQ9vf{3BDcJHUUzs*qbjwI6YghmRG&?lW`MEi_nua4qkJZM^%2KU=lKI9*4eN z#r!OqA&ttz5(EV7y+OmQ_@KbPI7}7s{Sna>hD|kF9`8MFsp3zX01@rnR(=}H=I(X9 ztQz`I|Dd*4%gtp14awP*TN-KbO^A?h^OAU9S7o%=j$C!ZR4)D-G@R@$$I; z&L=6iR2Gvl4Er4EE`hCv{<^eqIRP`$8DV)iXY;JlJD%|3R4emX*N~t?D1!I5UX_uj zUQO_}1fqQ7&a%o+V`ex9Tf;a7&3BZb^=t zH|qp(;WWsa3V-+wp&yN;gd41%!IJW2?-_!KHN#WI3L7Laz~i8ZH>k$pImm6|-xxQq zxB(cG8Ed3%bpm&hz~KlP3~;Ti7*nQ>$@`~{CR6Bf`!C{={YNkC9zNep@!LqmJ?uPa zNMz0NN~Dm%wFaSi(#U)nBz&VR`0ujeg%i1I<{79UPUP=spCk`UBo-CJJsH0#Hkv76x_!xDB{_{n}S8bBUAyIL(nVB-)E zg4E(d+-rr5tNB) zXrZ#nn9x#1B&8%(x^#^$_RTj#~cZL5n{JI*B2E)dTfz|LNxEZGRAX!~e_IR3%+xiE~xxOj@ zH@Y+?g^!GZTg#hCcIE31i47Ya(v3H8gCzP?1F+*DV0+W$gv_NRl2kRR8xj^Fi+&4YtQ5u3$Kl|8NUYR+S?k15c5M1d&3fwReH>&b7S z_;``XvIxh1BlAWQ>#6g`?~pIT4orYu;5?c~m>(G^qzlCTIlfCwVw&4evaefsojvAt zjGucTw`Yf41`Re($&AQ)N@R019w`#M4jU%$h}@6C^nN2JZ&yLs=c?L&rgE020$$W8 zP;a01^5=e#Kp6osoaNCVm;%*Bx&zNRgk8`0$tP2nl2VPa(60zBf5v-9HmLWmPVKZA zi~|s2BnXTCVcI_#i!!yb{8A+|SZpb4qP4lXgr_HOdawNcJwu-?`KBh-%?9s~Zc@n~ zrjUmxv{5dfFOdl-qx)~XQf2&lc``6Hhv!0h#W0p?5ld5QBzgk^jyoZ9EW&q?_xDjf zymj))+`|~;T|knlPOt|FsNKGTX^tCY<$QOuVh&lb!?1o}QK%mqE^}wTnvAdU zbvGn9^p*C1Fl@sTtsp29=ITr-L6!$9Ia+!8>3qOt=9OZii@wmWy%G3G%7{}R8&Sn# zNUZ-*oqY1ExW!U;8ba%iZ0+M9p)$As}ufBz#+%mX--(XYtSW8jBf`i-Xx}5i>CE&qcPZ-)C zv>mf-7=lC!f8z5@?%dDH!|!AV2tN4$0((AF#Pq8gdG3Lh@tb>bgs-`4%r~9g+u=0k zBx9y>8whoSR8+olD>-k;DRY18<=SILmOq}JXtZ%B)yj}`Qy@x`7U19R$1{R@R{MMZ;(<3 z>_!a4*@{x(>7!G`t^(nvy4;{!<%Ps9v|9*}8?x@OqEZ`5fk}Qvghv4UTITYi*CZvz z5(W$OoD~Q%I!aR=Ac@`xqli)xsEx#_c+m@G!9y~&5LgMB_MahH{XCsB>5AP&f_*&LCQhr=oWU3w=1R2S)+XUAT7Xv5u&MDweff`=A0?<#=rNlAq zrwOPvS&~rcrJN{~RHD6?vYRnd)q%#V9BlO#<ppJ!DUE_lxC4L+ep;zt998B9A9ypQY#o&!(!@CF9K5 z4-lcwx6e%?%=%P%Fvzwkb zso&wymssBDN7!!CHC>g@1jC|tOf_D}llIo2U~c-W`)QTESq4$cRjDWXS94JG!(CmQ zcX+FeT;}{e$eQ~LWi8^TTW(~KEMRV~2p4popn`|L_QqY41dr|4Q`Sa$3d^FSx#;T! zONHA_!`2ejcJl=6a8L)42Dw_pE-){8W2KAi1*3W~wF$U6(J>wzVNipIua<(%v$QL5 z8FzLS+t6;1IjL$HxKqWSeIpklL53M_l@0fRB#p(vPZSV==y(pZv*9g7O*5E?3Jesc zq{6ma=yYSrY`g0P$G;1*He4N^>SonhliCBMJ6+g3dh$M7ZDN4m-JRjaeK>C$M3Z_y(bp+3TC=P%+4| zb|}O2s&Ix56^@@Q^_HU>eI}gyOqbl*70yC3W-Gwqu8x;`l2?fsCNS=sjEal-V zD&No;mb<&AI6;>!)%;8U`-l|%#pa&W*#{4Q(WL%uE4{_<5TPV+dU0qS!=T0p7DN$w zXF*n(_LBcMrEQKLi6mV%b&$@9s(zB0a%iz(Ijq`odIBe7f=3Lhn`On5s!1cBFm&n| z&8i{MbVwMg--~a9Pu^pK^(;)O$nHIxs zd9GZ^3C&`Haza2S5wt?2$${A`N4+rdtxp3(JozIq%~QEmwG=;%aQ2s>CthtgwSJ62 zrlQ7;sKF*cCOxCGk0!rrye0J1mEFTq1fr*s8s^F?%DwVKZw<gQ9`ujWSSgPW>_o747;!Arq4W>?KkT$dg8$YE8_wnyEX!OHqp^Ty*Nk4&<9&J23gCXSIsW7n_iWUTK0E zw58wj;5W;fo5+}h>Br+?0>KjDRm&i$gff2&D*e1EVX@g1Z36Ob9v)Bk8#zjR;w}z* zPXK#^`_wT-qVwY{pT@9}Uph1mGBEtvWK8Ybq$NZLWacv!v)U@~9P)-EQzkb#a%T^_ zMcUZkDd%$K@(N}g{rxsl?nYVP_DVYG=oj)C^cW+Xem=8T zw#Y@zr3!jckbwLR<$<~xapLKWfj9sDJx^hyY_12UjAxX;U7w=e{XsoA3JCBYs5F7i zs&PFshkoo*W$q^tuP6gdgGAUU5Cc6Aq{|R2s`_nQ6W$+18NO%AR}9m&vhKy>2B71h zn95sdsw6Ley>VtBDD+u_*vf+Ar3I0}rPX^kep5)0*F5GHzxH#bu&t|;pHNh=NuvQ5dNkR3LLA;!-T0ti4X&@__;L?&t)Mi;U|{6KUwM95 z`5NIv86Qt&JE#DOHo1dTWjYFM)JkiFth<_wZ=#+7L99(DAkZzr=EnDB0r!Lk(Y~e}ZmCaYfaVH|`cP zyab@&tXTvKfht1`kZSf@BQSCCL5!+lEOb{T3&%kNnObeDdeobp z)E2pWSaWyRLvyYKeZM3AF)e}|U4KTJ=eLn1bI++M``|b{zTj(fQXG5O4K0v3rR|TLDF*|f{62Hez6I4%56fNMf&kO^6Q#+(*jeD;Zv=W z_Foh*@gKcV`DDR)PglA`Br8pIP461EUIYFm%$seFTcgm;DsSY?sc&StDkO_A{n~r& zdB}l>xZ)A^G0|kIKC;14Kg)p0ppb><_FEkco&#JneQq*7U(QE>IS%rk2b45|6zhd6 z0ro>fwpBNlS|<%rXl~Gf1czWLb-;}g)kEY#jiZEA_+_eq77>9v^Uro5{5&QnK!m6*|L)a$vcvrpd+PEX;|JmOmeG$vjO*hc!(O88TnUo zR;505_C9{c$@`Ju{sp5~HDV=|LHBMl4-_tU$BVy6tv(M)GEtMBrE}*u>@e$1m~WSU zgB`l|wA?_9^yi9QXiC0npFJM=E+Jq|#t3xol$u3ZkC$kbEx(X*5CW>A#p~wfP@?M1 zQdwPU<~YX!`ZH&|A&$n+3KedXji+_8{6&>^@d_o}iTEY_EOb(8aC=!;azm+5!kv#1 ztlU4>vs{x5m?))Qp<=TF4jYio(**7!k#tk*#D zE1|#61$@>bk>0_*UK723_Fc}VyICaIa{Y#ORcqk9|3K6oWMEN`R0KhqQ; zl-%R#z0SR~P4!yz6{Fu4MHBrM;!FDOmoxYmkibHXyT}`c4}Lc;o$y?+8J^-7$u+|B zNs)K&GQ3~N2WoS8o2S9~oWy*4E;Go^$RrH2<{AZCJQ1z|b^~13Y3(M2@}Mb(Ir1#AzJ$ zLQarkpGEwkBtZ=o5P#kTaDgOFNJLdWQzex@epX;6&g(8BWh$i$S@dWFyJXe93_F;2 zmX0HmBeKx7SWh1o`+k}H`bjIWt&{ePKB4w69%eEoY0{8Xs9ct^Sj&{n3*`w9srrjw zx*YeOV|u|>e{*@u8?#YX4ec5^t=q@5{~b{IG0*l72n3J(DbK3xwvXth2)VHxGO~Z- zVN`G-9r4x>bH&7Z`vEmAD>x{6!hPJVP8NSAJv&(auIgm?jhwPk!X6aQ+V-blOF{y} znk?&v;&qafkiU#~DAUzZ=Fz9_Qa=%o-l!K#GWtSU4l*iD>qmYeInI4A%-1kE z`=m^Mkcb!D&rP8a0V>Ety2eS48e!%2#K>`XFM{0h)RFB!$rY$lVfgyJ&59{Vbq*I( zxW#MqQwTt?(J6$puds-E@UP|t(5PjRVJfcbbwG*Y7*vaf;V}!C^t3o;JEQQqGR65U zI!Sh6rIVrSXSp5Xf{Ls0mdZOCERJizn#i^xz#wWgFF3-a*Q7)*l}3S<)jtH*jTFv> z{oyO9wb^#!C}kP4y?tjzqxvB{3t4R&+NU;Xn(7 z{Z`EY(v3n11k$ANNgHyh;wP>#-_+RG!_Zr z@*iamE%(yzsy$BsXjFZ0gkE)+L>F6$g6ceP=@EuC%RLQ8bd>^jy<8b9+GaBP7}U3> z&Lj7NDQ6hMCWRGy5ezDUw#`y~B@ZN*YMtcKRV+b`MDP%^mR{3(E#U5_sU^;zlDNzS z;zV6+#7uH-?&m?gCm9Ki)7{A0oQL80@)dsyrGlnOn@pf3rR?ux^h#NDCxsF?#&u1!{6hkUyAX~4 zfxjesAEm~xU$LPwQ#@UCH&x!2zw8c}B z2x;&j{K)(HBUv?rcVqpy8^FTq z&=yCarq{)1j0CHyP9kmJ{UZo@b<+fl=tlQnH$-TS@%$w$N*4_n4$GTd+_dQ@G-(v( z+{{|GWj;@GTj}fmUa>X@p#)VUfh@!xOSSFsj2kv&jO_b2-jiLggFrE_v2Kg*^)je* zPf3`wU_Tfa+y&LhLPMsCAU=6_h?eFl;55XVnem6~i>FiS~X(Nq-m_hH( zQK`{=RvXsu-!s1LXF+ADx1KNi(T$LHZqztTeVd&+P4p#xiR|n!%*(nR2Zj9!Gm2sd z($_-#fx#y)%$27+u43*(i-ybX_=VgWpZbB=h!Ru1DFN1-iS8O#rA&1kl^1fmRXN)e z>84b~6Cq6miNc@CE%6-#ekw*H&-kqzfU(Wz^M)p@x`y+%ZX@h80qjzpY=H1b)xDCu z;`%h$(=(F!YN3fL(y32@66T9&>u(X?ni98@F-gOoeU+T{d#!U>$!uN=x>91?(ma)x zZ6$A5B$(`TRQNpMNLr61tu~IXUoe{14%5(;@xb zlm&szz?Mgf2nN652F?80aQ!btuq|NK4sglV(BA(iReUEh7#}f8&8Z%lE>L$Y_y^QJ ztL3UL4br!#JiS^h?!udl-zopWpsJxQ{t)g2XANs8iJ-fN;*#}nuNE7<`;am0cYaWW zZ-jo}WhFt+-B;Nr)gwf)M#+Z$@6Y9F)v35wA$UrxAK}pSJQmBE=O|Zq4NvrbA~oD~ z4>j4)jjoh)ZiIPg3$tnKf|Ya;#B(yn!H_Gc%*1VLIRLpXd?>amjqy_zB9`*w84L7l zoPOBOk^0TX<0z&|&SP&n{x5EP`*vMOo zF?aeOe@<3x6x#N5{0hKJxfVT~9GV#fP@_a6vq)M=_kiJ}Q{swHqEj#4rh16y*2BdZ zLieT)ZRQFLRY}<&Qd=VRUCBg2FXWE zI$x-YNCFhh$csSsoEa!np4q~2O}U>Obl⪚bM1y@JDiz2Np3<2q#}^Z;L>=aRb67myJM(?|ThJB?Mh59{v(9O91!P! z9L5p(TSviRsg>`Ez=ZCF=KfliQ@;A3tVHXVHp|L(Oa)vLidY@zu++*KoU|mogKE=h zBW}gF8WSp=q|Ttcp41t5b;!PXhP7%=qMX92DMag)6Ui29sT!r5ah&q*3={$)3|r6d z`SPj}m9yD7iQ!gQU7&yG_)ba~#DiC%C+7ll7y9^AFJT9?AnEzx^okQ9?Q|HY3GBqn zl|c@jyLezdC%Kr@I3l&FajBx11Wi>!Verx#oH_Zmw?P4V#91^J7eWf-su<#-J;@8m zqaJzcJ@p&K_ZkIt{WvnGU$vU(WUK-iA+S779mvQ19Wlc1_}myRa+7dNdgKSPTZRuB zf@MI15NK&&h`*HWvke}Zf!)25!Pi$mpGXA1$Wog$13MRRORgkI)jlpR= zyVKn`FVp7Ccx~v2wPJzySpY9jgt%;t^>xU9T8v0Y5{ zMNd@;6iv31Y~ltyfq!Am!%5nP<49G^@B;8sUTBllg)F=V)lCfm!|V^eJO%TU73dv8 z6kw6~ZlL&D!ey%f5X`$SyKZU-i6C@wUi#TwX$9Rn5zws+Vjkv$*} z4+En2hV^Dam=~AJ%kGo-&qG5B%<%mE8f`8@dUXfRlLiIC9RwuRE(#i97xlhfE300W zM&{UiB4~NTaAbprs&$`c1*pacW%5(Et))HM8d#6UZ)bJDr+Y{x?>`D_$kF3M;{0T`)h zalsjWg(zv8VXuiW91mfIHv}Y!lO&BRER*s(w^>({n!tx8$n#Nk>?WP-Tcp?S7QvNdNEjiLF0bR&@}y@)1Yig*H5)V}6;+Mu9anndkHPKGR%(($`dD;G4^ya; zI0}8|+yvD+!W9k|&C|J1-kvK>Ikkm)EN<0Hw~unVAO0$FVNvGy6p(hgxVNdTAg53W z`QQ|1J7*@{dKQ`x(3SrquCoUp+yBiN*Yr<$>TzlxcVGUqeYb6of9UtzDEWbBSb2^I z$AT<#fdQpz!-+@8rbLOt7VqlH3X5gTUPcZ}OrnEC`Z^S736e@r} zO8^Kp-P@J&L=FXMkxe@crBD<-d!9sqU-3e*bZ5ZwMhS%;*4M~EHH^ieg2JfCgCB96 z{s^1DC&HQVY>z7E@RO|ME889yg*}y9iPf>+Tx@sAUvbnkJ5QMUo)7A_vuOPX`j7~o zK^Y|Kyz@>>@`H!JE)PF+zhK7i|JDy;NaqY2OpV8h@~G8l)sW**L=b5o{?==KHwEer zCE{_uM9v*oyw&HN-s}n$x7{ldBq2hpx=)_$`t&?_eu!%sSGTrMzMdN`Pu~IYCjL~` z%Fo=*4n{c6kp~3*{qua5#FILk5=fQMGQF3BN={#R&?F-4?k04QYe8x>#K5ctjOzhO zLV~^b*XD}1pe6KE&qK_+9d~E|LWux{wX%F; z+i)13Qhg&8?G>e8`yD1#nI^nTGGy2AYb^Qfpapxc?XTGKNhw z1r`h<5u~g=f1c{1-mRz0tDepXznL3|(I=C<(`~NdRt&#aSgQaG8dIzSc_b<&7BL}? zPA2*8mt8MF#(Q1I(it{A;1(Lphe0Dh5l3k(WF?twdPXVV&{j4`e1DK;D)}KItx_U^ zdpN@g`a3TgQ9g~8Ao$PxViDT!WujLRO%vi2nKjRa?v|ElExdcaW6iYG#cc(KG zhY@f__~JY;G5oxq^Wu703gpL)9R5ux6X++Cg*);V_?fVZYJu>S>oLTkA3hhQW7R8C zQul<$$3$RWgcW)sA_75<;g1Fv!RtXNFy?np`~nFvs4uEih*gyjrO-!Gc~lK0t1LF_ z8kcZ}K~6CbY~-E9E;&M* z(h$4=fdzfjuaKn!ouqXhhMilLW^3eyPx4mQ^+!_a1=-vxUt9IC2vprhMMDk=nF56n z(f2sa4@yBXH2@vR_VE)bJE;Ahu+R_0bkZOGneOx9V@*dHqa=C|%g`;sv( zQkELu?##*HhLElFvF;=!b{Z!TNBDiiTIq!)f8_iVUfXR61e_p@@)fsbv8c^Y^HXOM7x)b>lfG%_>BZIMvbgPY$BUUmAv^Ds9jXX;njCFcOVD)}?Z3DK z!+$gYHgIk}X<_3}*GRS);pW66!@+_;3O=5OXZqh+{6zx7^W&FmbzCBYTh=8H_j2{nf5}rJLNcw(5^x# zaU;3QnuHl~d>bS$Z|5t59D-+JK4WZe61X=Ym4hPe`LzNhVm+|QU;2C5^m-w7f*b6t8%S)S$ zUlohfDQTUaCE`THeJXos6hOKPqX|3lUzNY|V34y8Y{(r@f^o?hOSH28tk7f6l}Md_ zQyO3g$?;w{OEt{Mu3Hit7SXR4%4Gw-?rta zE@5#iMF(b<(DpJ1#~Vep-9T`p(2Sz!(4^xzVGHwI6_FV6YcI5ZOeL2hfw<>82rfQ? z@Eye)qelH-LDx*S{@nkfHsu_+2*iu4i36ePDLVLB6JwT8g(6Q0(JDTynzUd11Hg0?E5H9c zV=F{<-Vlwr$<}2dHA02l@Vvqd{2&WoTjb6F>K-?tCti}75I367MavHmP&U3X1PYmu1;XD`rWqv<}HwD6z04EMta0+ z7?vlbl5QSA{4NZsHI1~}c!Zb{XAJvkZ2vU=^7nW1}i-MVKhxPp7V`zHlFWVi<{Lw z>7nlGrZs=IRaOl^T|Ovk=vRabrvH}E86s!O_R8{>7Yn%x;7hrs*he7n*(Z}9G_?qy z&Ra?;a1f3zRh3zHcdBoj>wyYS13>b19&MG^2-$Kg3%bcqkaw`(ZVRUYv+8XyVv}pa zAq#xPtCW5uEGd_~Q`Kfml;S}neJRPo(^6iA`xKPnwuZCrT@9xCLPkn78%>=<0v+65 z_iSR>_9Hk0$NPy7Sao#;4pu9HUs0W&hyjl)rGMm2U7FP`^rw=1^M)yWpI!*OCpaz8 z%?m1ot5gCW?Z>=6c=)R}v1q)&VVNt4b*_Bb7@0c{}BSa!qR%k^!5K!<#oT#Q5&zL?A8e_>UaLsQKHin@k ztjp@;H~mpbE-+LQ?@_ok-exfG!esnLdE*VYk{TW5_4dnAm>{aHM71Rn`v>qI)xyu7 zB$F91(?W?cog?-&QkzI;PyYy|$HMh3vAGxeDrrDTu~9LO3D$D=uy~au%h^cPZp*wA z0++de`v^itj~d~MQtKrXFq(D1(S7|L#!*RJjm=MftAk*TvcY6BA2EAz0IOPj+o@UEnAf%$3A#}`fG}+mEjS{sFX7mLG=8HKx%_#mX^!41*!@7_4V~~b33&`<{*y8It&x|dswuTf}2rc z7)5J=I`C~66*S?Ptu z3XxBR9=VO|vu!;?6uDkTl~oJn%#i066fy3nBwSxsW1rvG8vgl!*WC{tq6yQsLrS98 z#1&LgJ#C{bY+pB4Q=5>(lv>95@?01PRP~_@|GkY4e}2U*TO|@p!dL2V2On_-%aqY;XhZ zx$^2cqtn-_nB&2S-5ze1vfWQCPLHRvk=bC)tL1yNGn(R6%Kz@6jo z4{=9j8Na2hZ1s9Z?p~l~hCf%RL5#d*t!^B~_KwaoL!Zh=r zC%hFzQ}!GOG?a9AP-?Xxv3tY$PKx?Mz>|pZ+bwsKhTPLluMLBh(J_#q76){Hw~|)L z#7dE%SUib;?|TQJq;f+B%3`^g6)|u}ODSY3Y~a`KMpw=ThhqrPXg4IeYuC!PYaZj; z)WqV33Z)A-bmcq21ymUCt8TVA^*OqkOr+0xQrqPv1NfMVxx?F;&xQrvP0)$+23Prj zC2_g?nc>He8}pUNmNTFYgF;jxQ(`b+Qt}(NIKyC}-e=`8(X!oB6%^Y$VQOW_Bkv&UO<#9sHa99M;1Og< z;_(s+>-F`UAynai$l$*VC3GmZKJ)G;W3;3U$ibrhy)yZ6LIOm7q)-e;z^~=~ypt+j zy|2Ttg>XLQ?@;cP7o2pI{Gu9eDnd z%TNbPcW)=I+jil0i+LTM7|D5K9%q0v8X>G(#2tr53F=puw}&8+^5PYDxLG!HG*s}N z0TDXxRke)wd-uIPRm^Ft<)fRz#NBV@&wh}o`M{^T21!h7qZpw$QX%!oHyB6>1t<^N zqEQN_je8}LDY!6W5DrHfeLnEc)TA~hlnYX2Y8k$Ahvhx+^1$#O>YYLHLfQ063A+8f zMlyH>(}b*A;VD~#2W_x9tff)Ar-p4$Z5t=Q>VA0n3&$mLoVeR^#1|CFAa-?ugeTl}eO0WGT zUOT2At_aly3EY!epI|@z#}le$wzwcDCWN>iZYqev%q8+UK&q_7*0RxaMRseg(URmvb9YjZXYXhRXMv`&eW&Fo)T$&fjSa zvd6fRffC43g1ljdYwFZOOAA5Yx~Zi0}7>!q4?asm~s#+05|IW9jdBN8sBrD&(kyejf} zTM2-W&2)HA)#H9B=8{92vY2<7VT!g8M%{tuers5v=eFCGA{D-l{Y)aodCQdzK7G@4 z7PqQK{%$CRqIwyN7vRmW@n(U@eCOGMS=85B5EpaY=qWVX@rAsp- zy|MTtFC9i*d-yF+8Ux~PV+<#U7{kCnc=7UBXBmo%4dOrm7T{e#HL4O|FE01G7PfjCT$nAkv1OM9n7xAAaX) zfLM(sR{_3ICeJEzx(%20a@9ytqcW1}UFRC`WfL8eCHAr~;UA>thNz0T1s>wTLN#X+@X=WuDtzM+6Vpi9HwQ1RnIKvfOUQ^R(B29rj}dvG*3d_03tIRSrD^%_)g!P zDVqKoJ(H_<)7y!%wGc6K_ezL>ETz_sGC-*p9^NGddPK=6s$N|(LY%;EvZ|;|$bK`u z-@=PSA5fBwUnIPy*6U6LbC?~{j`2_|n_Svu! zg^v`<7f^+$Q}S&SXYzHQCibTP1A@JQ&lVK zbov!BGXW8}FS#Aw@mg9SOF~sr3l;Eu5Yeic)XRE_bc7{8u*M{x+qIH~yRAVS4?m7a zB9%UT_**|t#xItY8%v&@f+_q8A?v4&F?D)bX$VWp04GzXNRx`_XVHx;cd${z3*7%` zCtY`6YHupkYB^S+4V(wqZq$Zw-cIH!c;R6CUuRo+K;8%4i>*Dv)g;G^!ZIyTeKh$X z1=K%aBI}86|0JYDrPq9JP{Zu=#_n}+I0S|zI9fmt0Zhh;2AEh^zGGz1phy3O^Pn`= zUOEFQk{ql!Zv63)e7;;s#F+XP{Y(yw3YtgM>!6VvkbFLhDOH{1`-ZZlcCV4pjN$2d zpOHrn$fNp?tRk4>+^rt*i}>;E2i$`_`ISdu%kp4ibdH=*h3R;BX}^&ZL-m`NO*9}} zQ1#%7-v^%2zXHkcP{Bh{8eWZt_ogF3V-j&?Bs|6WEHJqlA&v z*A@HS25d<5L|L(V0|R3UcU79!s|Gj__c%?{ShB#JI;A|pq~poktC$D_SH_lJV**o* zM!FSIH58ujkClX($@(>dY4SbqAm7&4{urqZwqy`oDd)W%ILx1RBRMez{#=d-To8T) zo#rw>QKTQa!-aCoKdHIrpYN2%-SshNpp7Ph*X*7qEUFYK=qE)@Pu81dTpBAWyF+78`@D7FI1Sr<70;N#EDzKa^)%h zpIgfBb?OJMaoP%1;(C)YB9VW?UoBx{T=0UBfVbSaB+jKt;JweYLqLbBwxpy$^n3LWxB+B_$T-8Nn!_i-&Q{ydM!7ITTOp}Q z0*~9_7SR=DUaFMy=CrDcUYypmk7aQWH|*AOSzXS^@AH%$^3=3Dg9ER0iP%%gaqfy^kzIO27%bUsvSUPJz37MZea5;*D49~%c_Q@m`Q+L9)7N=72Wr+XVf}~ zUKBT!M6O1`v#oJPapig9`x%R%lF_ew9(Y0;qV`A{b2XHCO^wyeoxW6-a|H*`uh>$E z*+Ek+jF|t6O>wUEG>uG|Ac@x=hUo3GOr}?}dDDRJUwF#(-j)!>PVkW$r|As8RDZWA zi(JhaImt?7Qo|%aHtIs<*q-UzT3ACj!POxjAA zSYLiHcScx;ZUc5_6q`$&6E6(Tg{8RK^*kY52tF+YgHZIN5vL#;KR59~H`({^H6;9; zN`FV?Ra55|N(S*f`-psv>}3~3_LOB5IX#Wa8pQ4sYj)H!mEYo5I{SF?v-Zvvl)TW; zPBZIHg*KA0Tw5&*FL;Fu=q)g^!3D2<%)dQn@IWl zYUwd{Lsm)j_1hb>$Wgzocf;=bi3-6~>j@k}eFzr^Rb~AZBcNpFd}f70Q_oN_iQMEG zDCf2ckWX`pq?T-kPo=rxI+XLU`aYFcj03l(kS4q-EI#rhyzLbrw6>gqFr!HCKxjyF z#KnwEq}D-RK>@=JEaGr_5x5mPC*8O@NqW|7r{@gnLzG7@xAbHW6*-=L;+227XbKSu(N7kS4?5;JPs2%8@Twlc^JU|U( zXi#r;n){c*&*#zk-EvFDP>*qxdfj4KGInwqr<5r_avjhCrVaz}K7mQ@Y)F1fe1{i} zNrhE#wdP>C6$ZG#6nVdo0?uMhfexbmp{qWP``#nTFc&#>Xa9*bJ-@-Vcw^ZMx|A-c zT3&4u3|3~bQ4=DBeId+eh%Gw_%xhSDy!J!H$42(xtav0GTy9eJnr1f;=Yx%Mm|?Ug zG*~O4ZS6CTdQ=qmBrn2lFeM34LM5Xp>aZ*hy)c-7vc0y%-^_jJDPG8oShSb!z83}a z_%TM3oD|%zVQ{2>uZ0M-PrxiI@V7wiOMP%~!Gze^y-Y&_+NGPA?WzUXSQ#5_@#G_NFTqlr#b5irj z-S+JJ<@bN@5eXSXz6cjhb!7!1BMB>Gz;vsPyO5D3&{58z(SZ*hK8}DtcQ5YVhI6_Z zT4>n|b@ijk&mVpU*e9Pp{Gkz&N-1+P>!udwm({i$jEKTax$*m`{Qw}p)OFsTlG1+- zesEfJKBC$9EVne^S9RS4?-s(8${~BaC5~uOpo14CivOc{_=Ez69{VQxrDsJLy4kq_C4>*3WCXGS-wZRme(>t=8?zos2flqH8e-TS(n z1kmS44sCVkQeOR=8Y_)mwZ?_})h1`!``4fZ@r93@ENv>HJ1cuU@kAzO?cSz>H+F(1 z{+mi{gJH%WXrr@WGb))EjB1s8eoLh%y$Lm{IqZ<+?x!0J*&ztx;&mcNy^)F>UAUSR z^=`nqZp?|gkow3MS-4tORtZY-!WT1W6*Q-hJkob}4a_E1uY9S@cH9B$f1|62&Cul%P)1TpvOSM>0+%xkt&= zR!5$Ut5Mlw44bN7szppKp`0SZbrONqG z_pCDH=os4be(os}#$G0$uBld}zm!!g$WSf1z3TC9lPSKMxu;bGbuZiOO31xpOPhXk zjX0MmQ?r|q>!fg!hn~iF`EM4sye}JbfFB_nm0fe@fKf}DE>P-Z&gusj4_BYhwaQ`(QIc&9LR{j#Ry0H^8 zL{U9g5C1Nym0rcNGTzHD?r;0UcXS$H9&QH`l{G-g+lL$7rp#FaqdGXSEO{K}OTzVM z-bJzaF5EH+*oPB@p%4A}G6@c}_IUDHJcvlNXFk&jV>HA)!&N8Nf>`;-)dG^Con%~!g!tRSo4Jk5^wYnM< zN^)B+NFf=yXN1)&p~{_zw5@uBPjI10ZV#mkX$@^I5CEkS6d)VM4NS^D@V4G6Yo0AE z>HQ{`3H1{VA$CgceAynoTzmoS0=Sh+>ft>P!mfJY=Mz%Ux1g;Hh+v+OBww-((;C{H z9+g}!sT7HL3xffcgjIwce;@@tmLSppUS6s&buT4g!8HQByxuLOTr5X^L@+s`?5z9c zv0?llKcqT)Bx_a0Hc`^{L^qYEI+!`QkErWcwZ?&=q6qkl&-h6-cG_B9$iT-D)CghH zeHQL!2|&jQxUE=gWUK@!gzva1&?(x3am)kNYL%JQC1-$PASVyPM+9Kkl*>o1J)b@% zP|+zHTc`GRS~w-vtU&M(kYuZhnb+){!=~)fry{qX7&lkaEsLi*;az1N7L^Mk4E(ZT zqZ@`p78gP!uE3mA*RQFu_z!X=2re{rKa#s1Gcm`s6&914vEK_J`T24&A%6prk1Ch$ z18sit8!zW-#!OEAJ-_mswX)>3Z0i!-;6Jmxi`*WHm1kzhI{MX1d~HrH6?}LqNsH=o zp1E{(k8`d(88cHXxBJD8#NE8HVJe&6)O6|`pbuuQIfBLl#dEhj;~K%;q@~~oMZ))x z=!5={y}*;=rv-}PB%rpGQt9c`a<0RW>qUbH*|J}SIBFdAl*zYu2{ZqN`z%2}yUe-2QUSMTK1 z-$YsXHqD?GWHPrUtp)7x^tXP_zaFRgDKj%e_5E9wUIMtRu>5KX5adnhAH z>@|99bEHA>DTZfY128%in;WJZ?68L$1wcaP;}|OgCroMkDc@8lt-*2j;qTw0e3tiP zOCeYaKhO6PC_eS9Rrw&Oo3JRTBI4rb<>B{}RZm0KWlV8j9o3CaYP_L1e0K~uG7cAN z&|b&(g**)18{aTA0!oe#A{Pq}f5z}lqZUXW^BlIeans#+Au(ub)bw6T9__>dATKow zhS$Jxe}@~8ZFi?SHk2xgwZO;&NUO^w@0!H&A-kf{;rYC^v6_h^L|{q*N#}2~`5%#t zWYGv|3_))iNYuuEDFDWEQt)zUhhHl&{M957!*yn}- z2-#}*U5b@@1h7PO^f(D=b9EdW8Tcwv9r*f{@|a;wOiWX+ai(BLR zPHgmirx$Gb>8MucHAIyz$*}IXPxL~wGBA3MB~Rnny@1#GM&75cXIXIC?eh}@Y`EXy5#~^wl(-jvRr=z5RsAwjI z??>LSdJZW;VZbWI5NRl@6#>b)0?RU0Msyozgq;5-GgMB|{$W~9Wjplj!0X2zGX!R7 zNPes~ptMa9wi`b8*IT$(FMD=~oy`j&AmVMr@Hw@s=GG-2oJL`R${tm;uu;`97R*s) zL7WhaK_rm2hNSERewf!t^lNE%WSkr}{^TZ8SM5)XjKJluj6W2V>UBf1MK%bcFSFib z#<*)~Bn1V+tc0KuvXw4dNkDCt>)zpMfEBWh-x9Wy#le(M6>pRVv*OPPWXMP(h2E04 z@A**Q-uqCcL;^^>*gKSo9vp;oLJ?@ESY$GC@NlZZ7y_#bu?@-X-SURPq-Z9OcO~s= zDscC)2cO<&(mLYaDep$QCziHfHFveFkgMXwLI2(wxpz{!{CA1|;jq93lf~?6zGdnq zbl^*sJsEi^=K@_JcclFytmLgh2tY@k3-SZ!SfP4} zx=pT<$LBs@!wp|laJR}VjXyWH29^gJPj4M#MkV!)FJOF;W=E|Iboy!1Slvq;}$Oi4ha`K>1N zWaY*-4km-_gz%d*Pa>fwcpX_1c!$3+2Sz%_4CH5-#+A751)Sj){LEuatrSKdEEe-= zrMTAJOhG7Nk(L;7*p19??5ZWow})2cmN<(>;Rs_DSOI+^9_Jr7*gyWbXRv%+4NrcY z+5mp#o}|-+AANInS-Y2V3Y+gmG;BTofe#yH!|>(?%Yd*v7M+k;PRni{OgQG03nt%` zLFg#$;7BEygn#6I*V|B={Wef-38G3yj0AvU?KT`9J(b!^{&X5L?t?lRVr#k4`;W~95LkSU zAzsED;$h-GQP4X~MvCA2TYm(H4-E|2VlJwQS`2Rm_=RfSrsOS_mbnWbBGTJylAf7% zWC=*S%{6=M`U9j_qD4IuY2MyKBzt-8sMr_O7f^FsSX15zvQI{iF$V{i@s%V3SyK&7 zP8K3Vd!YdOo_2OOKB+a_nk+$N0D@k<5I8F$MIs3+S2%)S%c7H_J@K)Mqsf;jL-A>k zTSW%Fr~d4Cg(g0sy|rbrT=(Km4pregNz*b;pvGOkh;3{XvQK3Icwdg9Dk+~%3QSe9S*>~O@q;4wEj$jv4xfIK&Now-xC%<3sy+0d$ zq|p6;`QHrve8eu6S3DE$WW?_4mFf62)(J?EWIBJlKaf=?zb#)I8F*XZ82NdAg#Jnj z_IOL_PpXlFyCE1#%l>(S5-$Sm*yE{gcg6&urdEob;}cwEj+s8PoC0Z+xh)uasPp`3 zHl|35$EC@);u*Bt3vEf zPU8@s;ST+YXM(k~iNXGG^4pTvoXb`ig+4hUJXQx{A~+~}a@Y?)=i;1G8D9GEpAAeD z4t#{36tYO~?W#N(2$KKgMf9526w90D8--}4Gln`@X%&#IFFrs`MF>9g^}kMa>VmjRl@>1Ap+Dh)N}Lz~dWh7n1wGc1&?ny|;=LhzRjg_9b< zlJHX~-0_{L+(n%;8x8%uKwe-!T;P?r9%;30!@+2e&ElZEa%*b>=3h&y)Ida{QD|F z{(cGh2D8C_5=`z;BB*~#JuPtr&PA~ar9lV6 zUn$SJx#|Gk@B_!lnkB*pNOfprt{)gQ5cr$HrQ1oGTb;KQc^ zJ1!BT5g>B;`SQAv3HfDS%|VDK*uyRH&$QAyMz!$9NDEEDrxtJhuIGk(?2^0CL2Gh? z$lhv5vJU%y_FPufr6L{N&DW`KWxyfkR;AH^tC?ouKiDiA`Qg4CaxpdeOxa`i2|0aHAyTU2myF6N#d~~j6p>)q|Dvf{c-9Xrj16-}R!ScvK!sg;`u@&46=@Ba% zw~>1e0sYS8&Dd`x#ee9i7#TQ&TjPEQmPX^2&#F`?f}n-D&@i>Zzo-@eoa zI7$S2&wHeJ%KqRmPyBZYyngsozqYb$P@tM$@jIe8RW>n*cdxwZDU%=K7D~18@0GH! zYKT-%`2&T)dt<9jXxrF?kABLM{RdQ6oP98K|AB~iAlPJ=BhItVL8Gj9$Yd4^?CpAH zH%?n4TA6 zU=lWzdEoZ;yq-w=V1%9J#lU1zuOJrk``(v5+6qoaOPqZNAqBzAoqpi&l@!7ku^f^` zkw%S4Dig;!c4uOVkqWX3+s%|>sBGHt;jTmMT<)b_jg*1pl<>&>0sI=%yI~Dad0_hC ze_UJ0dNiyowMr4A3?Zsf7M>;adGhNVdk@~UpO3`h4kPw%GQLutGAMAu+~NM|P(rF& zr#Oeqs(QYhO9W(F2)qnak6Z)|CY)iX{?x-_xRHI|N8!2W%5z??tc*>Wx{pPDWYydsLpc@Tj2nzY`5pwVWH8382 z%d=LzfxD==e12eUE$M@C#<3P@x*sub)sc4+?+2FnY69~x3sSQ^ z86x5%UO=fOA*rFtrP>7*jte+eXT{I|-Aqiq{3rgEN8hJ_R;!woQ_zC3>qn*N0=P7QbvfB}A4his7{T zo|S}uaoW?o5`hT#=pX*2&t9BzOgF=+0yD;MY8mtox^)8odrhdb=-fM~-|zcH{9s46?_14qLg8P$IyUQ8AUCxAyDB%CvDyUrVQRuQwPFfTYkMwbn&Kf0n#=N zThGA|MTU30KUe2&j!=r1yQwl}wkl_z{r>O#xG*1aeM34|GV5trdids^e6l?Gq>)Pb zcSIjI3IRx-L8st&2fZb~#!7*4%g=K2p%BG0t3%j73is{(vh3;ClpQ~yQ>MpyYkOh4 z-28u8d(-DguPe>(UvSuAI~@LUStPrvC@$fca7+X@36(%qp^2@51dD8vk`ikr3qV3x zY9U2Ow|d$UJ>hZ8e42LnOK~eMlDJ7CDT$OKfaL$>-*ex5&bcVErS6fXnN}t8&HLW< z+_OCAIV=8-EpZf^`jGH!xatveyK5U6pMwgmG`KdX4|hldFX8$YUiY$q06F;@+Zo|$ zhk|EP31`bQm-Azva%17IK{*d_#+M+E$hx!9OJA(_`?pC7CD!;q?xPm(B78SPJP0YwjdEVbF@m@BDKOW$OA0Ed( z{;Tp=p5t|?>!D#(*QYy3^m6>3KgeMkJGUaDT37l&Ibd!QV}uiEnT&*Dqqm$WT&geh z-L70hpSqifrnd;#KJ2^m?w`Ku>dojo{+aTU+f;!5*^B>{M)V#}@G5)A0-?H9;GuE{ zcV(^}MWV&bT#M&`xMO28GJ_-&;8+;#{(E=i->eDL@p(}41^rj|@kLUR{e`N)z3nfs zYJ#+j8ywfjQ-9g(H8XzGEg9IJirz9@@tK&wxh0$)#vK%X0xj$(GyD1ShC2xpR%5CK zC8PI`iZ&+tyI3s*PFC%Ox}5F^5$z`6B*z#ElpKWP zU%J`4d`AuO1=yFmzA5KSu9!a|yg3Yj7s>>SkgP%svPoS10LeH0%0FG^GTsVh9tOit-*dUIwLxdd#)7RtOO}7UH6!kibU`cRFQ1a&d`x z0l{-4_(s5pxYyO}O)QM;jm)tA$ZZ&v%2@=Bu`0ZWNDtR$|H+iuz#7CZDi>M%hu@8zCCok>?X1PB~p0PP;5uv-CJ z0}9fWehAkfA%nVI+}iOL@uaAn4T*5<2+>N!+}C0uo&#RNOQR^3%%6$D?M~DJOPI(4BWjiv6Wq{*Sj}G}EZN$LwiO}FjHg3~C{R(ZmyCHB=lDd4iCFyO8<@R zNG=h^ZT_qtUq7kY`*nU-5v^yAw#e}OJ%nIN)_h8x^VwPeO-mG1A3Y5ysO z*Dnpcl)FIR-&y8`h-WRfNEEmZs9r zCS}U}i;2AT9I?lxVx=WlOH?)Vx&p4$ik;ae5}G@&VNAuA>xCC-s?56anvy4t$baa` zJ0-xNCy0~;xYcu@zzy-sc##)Nl;Z0x&O!c>4`P51GAWm>g*6g2SfsEWAB9U@dSl#! z-2}NqH4_MJF>Fbqw%9~rJpVLf_h|K5`&%chWP{Q3hjeBX|M?t&Y2bH@&0TtMx zHG1nWg#t^%PQljkNB-`w3xrah_WLtt#4yAE|5_x0UwVUqF#kKiDwaNSg(yN)(N1m^ zQ*!*S$61T>MOvJK{%is+FOP#g69ZgU6_s4Py3CC>^xii=Yb-3hZIS*d)I8X!5 zMEWStj<3e^c_|b3alsq}t=m<$oVY#R$}ZR^#kD++YH_Z68Wb(VV0WX_$(JnTMiRAC_6Y`wi;wIv6#cqrEn?i!9?#abkU1+7~=WS4w#*6u|HP?_s@MFRdl@+ z(@dQIy-wK5y(SjQ?g!HrwuA!a^PY?G-8Wi>EM%fkb6&>CW={zdW?U{GURJ-Eirisl z$SLuQ)QV;o%gk*d&k;KD+YhOa;}0xg33X8Z&wMEsOHJ($bw(;(!$wxgb{X+5(+soX=f)gjHNH)c_$e^L$AwaH54nf8l*%6IH@F~ zoDTPfVzVrkKVbs5gsk`#|K5F2yw`$SZUs}!d$y6IS<-X+U_?4XxFl9#nD9PU(6pWd zzf%+#X(7+c-!r zm+5#0RLgtrpcLFeZg+6i@5f+B$}h7=o4fv@REeQivUX6wtc0P08Ek>f;QIW_DR~^- zEh66`Fj+##Z7d;IvVH|QGwvd{W&>}nm1q)EbhycGZxEkcvW@Fg(ybv9C;2>+VmihJ z$TvMAviy(?%A{%%6v!=MLxpaDn*pUi3KzLPosWZNQyfGdw~|q;l^l`y-iiKo)3 z9OBII!9kd}DiR8UJkK$P-i%z8*=dPC#70c(yI@KiZKlHczV_qjDSu#Rj^nzGWSh`1 zB=m#oq#_~OoJc6ClGH|t2$AM8n5E;#lR1-Qq%B10# zy|2vGnO*F-Q>Y}$qu15veM7oQvmxa3ON0qL1Pt^eWYZy|Ec7wYmz9!r5I@G=l3Mec`zutq24w)1piSREg2`Q?r=G`pBxO)j*5zs$5rREM16Jbg;Y-miM7Jmg0ai{$td z-^dlef~RKx`qX4kJ@aS&Q~u2N&v<@xpHrg2A6Gbd$B7M>tMM^U7|{9Nj7O_0rAq>c z{;!l}H#5N#`s36JuE*uRKXz5XKoWj3^?vP_clq?3;*_VI z($pxqf}pAlZPSpyil!epSx^XoOWFJQTzTbjzd=H$Ii{V$lfF5A3= zEA-t|&m=E_M79ol4oEzYcOO+SPPN3}EjN3yeBb{BZ{RaGqKnQgiCYA zjY2z~WDpd+t8SG1F^_0xh??9i8{+F=n2sMMKMX^N{P2h6hd*>9PbK~l_J!e1rqVCl zl9K7E0BaJ=rZWN@`qh<^CuXT%Lpfe{7Z6D(=;`~4=e*}uNmp8NLzh^_RHMkB$}nPQ zr&oR`?~q)-MvNd-?6*P$5ue{eg1#@VC(1n9cCF61IX3}%A!ljVsNCX;g2z8?%< zbW_JT1k#bPgu)x%wHHcOL#iPn{|0T8Imv02XJ0N)FW_-G9yZ}DEa2Ol5h`C2#^bP= z?@Nk##2l>jSp6l>zLpHN-RZwslk0(>cO|Z$NqVu{WbJZ{f?P$Xaa?kWJjDtv-FkI9*gEiX2GDGL_ow9<59gWo z9*FQr&a33fHZXB_K*w*bUD?qHPnk#4xkOmZv{+BUw!Z7P&6l31l>;NUoo)(E!Z4R6pxBt$ICJ*9@c>&`1*OBp7_(B;V;(zaT~I?0z50Hwv^ze;=>%>s>nP4V1$!5coA`CxNkV( zMJVP)fHlL*kE|jFiAatOQTIYQyq*wjajpHpuK+#X(u6FQm0bN>N(tW%+|d13!2L_` zFMrM^$tovm>-i>-9ce(oHTI4M5d}u}gbKcoq~6Z#KD48l2}N;n(dO`Rx{@2X7aR1I z@`l$#9)&ZMi*i72GJa1h;#g#{o|>A?<^8JbEP3Z~_ciftWU{jL!qun)mN&vdl0PX9 z`bK0XE+XUIH-PHwuZ9#KhnW7<=TGh)aSmB*?$MM2)}f-{9=s9w4{}Ev6?v;%ABF=L z%>tukhjbM;TQPhsPX>Ob@L>a(@B5A&-T(zu(9u9&dX6L(Bbfq9MGkegOB5}cwzjht z>#J}8vI{(RaWUg8J`QOrIn!0*ilrfmA6H%AbyrqPuSY_tHMim)SaKb zVTM{VAfmR}LjgKfY?eSw+J&_{?FR;1g4qTm>>+TyR zfIVukH%CDPB9C#}lyo`+Uwc5e*3y$(L%P{XQM`ML01B!Uy&0?-n^lle+dfq2Q ze7eHg?k#|rs?fNLyt&zo4QrN71^*CilAI{v2^MNM-=l~(e!k31rfT%96rlYQatF7} zk=GRq=&N#cn8XTMti+E@7;rNFfQ62P@tCw7wn=KAtY z4Om6J0G&Q zlf>wqO118~s8k^EL72@jT4g4|)sDQvn@O>;Hv4{RgTSZf?s^HKi`Zz@Dy)P?T7$=E z(h~nxfd;P`jzM;xr;#lAz(rQRh0rrbnEi1;SIKkCw6}TIVc8QWZu?kg0x5B+68;6D z{;@aDWdgsI_TE2KRzO43F#{EJ_#w4sJ0VrOxWIsLdr#VG8s)mRqfca4phGs2coL$?g|6RX~>uC=ZjMw4+;6FwG$G^Oa z<`*+XBl!W1@?zv5yQY~Z?x)!O{&%ZcAeDmak5u~o*m%-RhK>E#<#V~0L|}2d1vSKW z39OUq-PP^viACw99(qz{309nY9|`gam0;a2mla5^UxhEB)?pg|Q*k*Mug>#j(G#M` zVaQ?WUZW$RGyWFpu-yQkTp)7(M0^8-f0QVyckKqnh5Rksb+dtj%&?mj9B@B@X37oi zkAwr;oM~8fjzYtF=JoP=ZbO;wEzD=BE0gBs23&|H4F<&hK%}1B^TLT?$I}WW!?Adf z{7XAkgSrm53l)d`^%9FxIfJ5Mgyz@oKhsVfg$N7OX9F^b3~3!`;Ghwu=n`I#}sYT!a$(Og_N&x!@brKhrO75MZh z^roGxLP}c?KKNj&9L`G*hvF2Zmi{erS>ahXEQfw3oAod?(f#9xk_SE_qfFaB-n&5+ z)LXH+mldk?`xRX&?4D0f|FcIvzWZo7=Qm!So4S&m5xtWyKANx+1a$ z+1oXS*ycCj*W#vfS2|y24dr^p8Bt6luoOb7pZF_0>`??sTHEj@;FfGkT;J&RnGea# z-STf=da^v~lZ4Dr$%eQ}0WMBOkm`HoynoZ)LsFQ$Lb5@K=zHED$P`o%+M!}Wy@;I= zw>N;XjVxpUjK)(Yxf|GGQ1tr|J=Ztk%aWpb`F-!kQ z3cJ^tVbEfJ!jl>413P(!YRo@Qjec~D^DAS~2;9MIo*G<+?6feG@uuYQ0xMJw|GT(G zDd2v_5n6W9%`zh_^H?~B<-kOHodayWq~U75op+U`i<<%53M}l` z?w{0_U_k2QXYzBGco!ua{M>?S-}qdzycNZZuy%Lw1|rJ+33HzB@@#f@k((xu7sTct z-1IzP^XL45+G@kNI21YtQ18LuMZoaK)tQs`0m%MwqD}9~O8&s_!DS{8i)9?*k>noS zaGtbzlKkDr<;^2DT#i6X)!OQv&nM&q-b^nMWAm_O0Hk2tP;UstQ;L|CRdZ!hd8!3k^pipju7>u(HI~S0 zY#9y6`%DZF6IV;KhyaBD%lMc1jJ!*Sngp-=Bt(<%10{CLU#_vP>}!8nX`{ka1s*hV zmvK3T%EILs+Xy0}eSA9Wu@@R&%w&Px1OcEM?l@zS)i!Fp>kM59N~!+gbgoBh!_&=a z@G_9}wi&**3CiAM>aN@-INgoPKdrPu9&nfF;Uk4VZxZPD!x9LAaH3(j$r~Rj=>800 z5(rCE>yZ-FW<56Y`ex`RNY=WlkRD`*iPvU!yn^@ddCVIn9)rpMXPjqueW?bwa*GL} zkP=E@|Kr>h++fTsIAZ^LMIoMjFU`eA%LPBYgq)bf-~FeW;-r9l1J3N{-d_fKhi^8T zu106Bw3sRy5N)l8+QUN`7eD@3lnu`YiaR3t54k4wMr>0T7<5*^%csw#^c8-N8$BUI8MmfCG6sMm$ z%NefnevEiMj&bPgeszW1#c?B$ny!rs)g1rUdk5-)r}9@4`MGUyUv$KQ9-hcq1K8am z?DMxXB;k1lCV6}@yzW`iXZi^t#iR@Sq|7E4L*7k$^ z0BibTFrrbQ!)VHM2ulg`JE$*n?wmhhC2U3IeaEBx~1dm zfy(q~vudy>o$S$EKY{jBNvs?lGFQ^TuN8KiorL8lc)brHJrOa8hp`~iOTP$;0nf^l z<>j#CIH(*jKQx;JN2edk3G8H zVSIox@0T8XS3@-?=2TU#{NySS)@47rf|qY666VO<_N0Jgdd`!=4#YpsrgjsvT5W%r z77v`?CDRdmKF5Z$22Z$OCyV#uQ@N@V(x?jQw8uP7(_bRlZ-KAm@VYzq9Ph}^9m)-z zfGioPP55L?Zu&v^+orvS?(i(7!4M7EVex<_3X6*{?&0Z)81$Q2b1crC7D93^t96i) z_f})`NZ;Q4Uf9j8clq1@ZrsoA=U|uG8HvosCgnt@sky%Vm8Q-Uu8gS#sJE8P8(9 z^+YY_FE5wbIropvbO5uTKVmxLRr{rTrQHR=lG3@*m zXEgLSY+@0d_+2uP0#d8cel%a9tm7|yli_NjEWIq$oN7*^xLe$zb<|y==>EPh`?c5L z$0kv^2tiEa+VF2DepJgsP+I=y6XiiaG(S`3FEwl>H9HaXtG_=azIKq@Y5Rh`cn&T$ znFD~#uFQD`q}Day3^{ytv!zrCrjxw-*uea#7BcZV)!=VPCen7-vivXrw1XxyWf}BG zT$$pb9)>b3FVb<$@QuwoB3Ro9uT#xLq>N|WCv(ZMKnFfg51mw~EOjR{O|YiK#E~kN zxbKxnn2f1JKyv;Xu)z$!M-+QiZkNEK;1VJ4mObwo1%$g8S3+gb3H|IC7r0le!%E-t zuat#*&wEEPfJ%*!6#2Q$Kfpeq5hZr6*fwN2;gKqa?`Cd}aJ4)sMtzj#R0Cf8qNly=2GUrF$LQ~6q z(D?qOtd2%jy^kq&5^g@3NuYUhlcG(qzL?E9!0TOAsW`xEB6UA%@;g@e7{YL3mAPX? zpEJmc!l62MDa}ub8-1KF*XH6pq^&!P^6ANWtps$a3+#RFcaT=L8<8CD9A4qUlcU>m zoy+Ai4A{5yZ^C`(M?R6KkQSl6=zo&C2&)_s>EWXi%HBtAdo4H=XVlfYQ@uHBwnEmD zp`;0MY=7CsD-r z<6oVBhSVC*L@mQ|#$gpT-u-KdyJ4JLOoFhVxqY)>YR!d6SPj+OnK!x6ydbE&ZGo?f zijX7F@4hu24;fXb*pqt9WjQ+dIQTWPpen9?2T zimJuzr~pYOLr-pH%samw_ABIA>|)qi#K2Ym$B)c&skSvl2j%7{vy`Qd?rrBDXU>_J zbgp!va396<<(xY@v2nd{c+kdg`;{1ogrz)Na<=4AM-(VDmyxcu5~Gm=JYU_Z>Yk~J z6%bCteMM%lhMTwd8)g_v#_7y5LE#*IELmV>_E%WfLa zvHbFt?OyZ%9wN@fI1)r+j&~d6_I+U!v0p9sJt`!0cg8%ez&3PCdXR`|E)wdDrc3P| z@Ls8YDP1q@Ak>%54NrAbyTRY)b_uyyxaxP40X?GX;Xu(}-cpVA!^v z#?$u*wT$n1XtR~7NnRhfI#r&_-XSV(w29KP)Q|Fz>1JP^^-kXxzQxVVG$HnCV14cZ zJLBC{a26IyxJ=HLw-4{ z?Z4nnS9pjE+P#r1Xxsv5s~f|C-zNw0?EGv$KX+2ztNwVo=w<#9DJJg723#dY{QvDs z!a)Vi$Lp{AQCIv5F|}0YJpnVfHCp-py&AH$K;938&UxETt}cKypNw++H;a z&XxdB1Z*p=u+FUXt+W)s*)~X zQjoz9YH9`&G8nG=sD%(^`P?K_J+FAiuE{7VbA%uKEr&WZ zN=1m21mun9N;lq6QpiI$;GzMY?+#rraYeBETV-b26)C*=s?sDjYBvk)Y!q_})y?Wc zTu_L4(A`w1@BpOHbR6QIX~iPb&d@ZG;+31PhMI)oO!;tlyk1dK5LWC&Jfs?!!@c8% z69mYuz*J4NRIMi*jTI*J+6Xn=Ldilh;WP4F>cSc{*$IuwUW=LuShUclU9t^}@R|LP?Axr|p+0XwaSO z%@wmZLeFlz(jc9;Bm1hVQXT}~ zrOt8t@jsp*i9XfMbz`Dy^NP%srQD`~I3lgD`jgWVzSbQh4|NiAO+1@`>bsd)RP)26 z$)MPKoI-{HmCV<1U4|1tAprLeTRx=v>`IwkT<~i#b8ueaNp_puVM3Mo`^^rgoc@~C zvYN#%*f;`iQ?|!_SejZcQ}dpSqb#Px$)c->xda=mSF)VYpvj%|?=F{s5h1LVDEl_Q-M{NyEmI2-R8opS zENQI=nnD5J0q4(>8{Ki1+JOsa=}QNp`BsB^bi*5sm4KlhQQyCw_&ur<$5JuX9Wzz# zLS)s!b_f!rqHw{3W`CfN+)SMUFti}vo@Llm-4q6JWs>%?%Pg<;(Y-AySwdDX>#87x z3BlIR#K`roa9lRc27`^lZ+)OV(QM183yMPzf}kK8p5Qk9B+0%(CB!*Kc)jVa_O1_i z$dO4_c>K{ziz_q!>5qO?9`(C#Bn0(R!pal@i(J>HB>K{9S^bWJW6EvcF0=7CH;Xayp9x~sJDi#k)6GI^ixf^#TleEU zdVE|nGN`=?I$E0zigb9x+$KDcgT&AMxIc;Ky%hx2f&G}f#})gh{(Gh~UIuAAi@D;J z#C3N+|3FV~HlChowiI*#5uTu0?8S>YFhO*~X)|*hwg+8o`7Y=oJjh*~o%tGUiVZ^B zUgf!@4Bo++%oXD{P(8=L_nNP;IJ!LjCB09CD@LE&-vG8f+`!%8A7U!%*o104+b;_s zWBfG+)LDYed!lUmto>T#9v(Lovj4!>9vL3)r~Gg7J9d2_<)xXh()nv_=>`9L3fO@E zy*yLG<;-@eDI|?gS^jDG|FSlFnD}H6Iy6GeQN_i$70=N*A>rY%36(hIp8tHHcaqsSC(zJq0?rGJ3 z1jmZ$83BI{X)UwAG4$r;A@J5zN^8DthpEc@3&=3BoD45sSGkgC_2k#oIq|qo{}IHp zyR%ZksRE#Sq3I3=uRm1|_2_B^Ai14o2*Ue*t|QX%Cn;j7tt$)$Orco{(QUe!Lvs3J z?^n=iJb8Neg#717d_Z&Yqs?@2hsgTW^=(fKSY>#}3bN0@=bb-*zJ*AXlmO0xG9 zL)adMKQ&kmj*znMHx@go>tAHk!8%u!` zJy!nU4s24PaRNT{JP11rw%#Y>VtLHp;1^|v;xwa%oc`aQ5fnjbV}YlX0wqJSy76qaAnEZLe-SRaLJ27wcAhy)+t$&tuEqA$^(yGbPWH8=lu5VayuM<@`1eX2C>5Qcrwn-rJkE zCzyN>3ngvXF8=)$T86|L8WTu?mJl4Qa@4u%pvG&XNdH@>K8;rtr0@6-eq&vNh&mBy z9}Ys064}|6kKI2c-!xsdU?6j@5DRD25TcK*v-=zr1s*fNvdWMAj`sxH?K3jr{e9iE zQoFq8ls@I&B44*RBcowiwPh*E>;7T1&rJJKm?}p*F%&z;+$c6q*h5RLpILnBaaD+z zq4WfTBLyiv#n1iGK*bW-^6FjNXfyJ5ig+c*fds>vVx2l24bNQUh|DColR zjL_Y(xa3JVE`fq2wj9_NS|y+Lzl=Y1tK^)6LB~$_v;YAUioiR7Rl};l>6jfCj1prl zG!KRP8>w@(^=Dx9P@69H@-jg?!it^(B!~c~F=6N|2XPs6C3GSW+6<+lF?HwFk4mcL z&P#;C-{vXdhSj7at708-EV-u_)J51K3JNV8_L@!4G*|%uIxylo0b)@ z&E9fb%tIOZ!{d(we#rF%ekJC!lt8ljlwN|rDjERN(htY%6Hk-}+~e(XJq!ih1Q7}j zJ;HA#JeOoP96koOxwGX3SSN8ujsibjGdR4gfot{CxGj~=AA1xTL{;i{+$Z`GOpCZB zLvO|$3|}Ua3~z^jp;Bgk_4sZ>M65{;&jQm*Nfn={Crc1n0m{n!<8^k~%|WvTP+DqA zihJMV0%=RyijewcZ&Ae-4#In^qlJZo3~^8ASIQMXi)stSo%{W_9F~CFC<{aCqNh-n zl9~$7-GLk|5FdqIiM(UShXr%FEO?-6Nn*)1PX$R@>(}l}EfPG+U4^?{D8#+()Gli- zyGp*X&K}Mia2t96LbNnhg8TA7#W|@ZE`RQ~&6MY7vKo6GY+FW=;9WQjg^~_EP%>nH z7>%m1)X7`%@m-~Y{MNR@xaFJ3K9-A7EfiIRoZIBidH>{76aYWE3zu=2Sf~Fl6|DTC zYt&h3+#CJYK_%fCah?~-i(Wuff-dJVHON`*GZI7cFOzJ?u+AsOc#tUl$8r6yNaN~% z-BlFlEF^}*;4F|E<*b=aEJx3f7-{DD?2f%sV>lL6X!hU(2NQAD`f!o6!rrXMkr#(HNdw^W0z8u|tnkk`pYQt}Z2ZGiA zr(CbY%?_m3o7!7WqS_+5CNx54tQF28G9v`+%@QER+gWZ` z7jneoPX7Z^J!XBWTwU_X(bfmGsaw_kf4MErIF+bwi0pR)`SV2izL)8gqHKTkewPoC zxJLs=I+otHf7F~U^qg=0-cv;01@YKu3&d7{JaKeU8sqo=ju^-p!m~&iKF1Xx_ zEscU7wQ%HpI~X0t4(!dRa|%JyR_q;Y6L3Wj@^GXp;LmJm*5!-OvNw0ij%{8fTDaWZ zWp15BEr1+A!HcU^Z;n78I#MOJ@_Uge6naR#h=bnf5&v-Vn{vXwta4)6L8cO3(n&6(bH zjt$7t1}4M~4_#)O^Erq+beBLtD0j}r`?WiXb3g8%3Kb5;N;SZmNc{Njv*p=mb6qf5 zx5<7ux*o_Ge5Wu32Gc7*)eNb~{}kdH7uf9-X@EH{df!rVuhB#+*iD9it0^M7cSifI zJ3v987Jfhz*yiC+RccrGWT{LV+fa0sqRjC+@G)2#suP;t~WzZ$ZCxyf;a(`QO3^^rvF?x|i<@?t5Qw3faV&&pg%3+5o5jeDYp7 z{pWmCYEh1G8s&q^mY({f$_VXkff#6@Al2#B9F=>N06zJx&jx+KutB_AY7kpztB00T z@!xI5;Osu zrvZH-)dko}*7Ao$^C0o@4nug^sq1b?igF@-rMyyJ_u_RB0Pim?2pKtzN+V%?JuEnK znEXQ(A8&GQ?{TAz-*J~Z>|5r^p>4u}=x>j7M4Ct_X&Bt{?&~Enl;*V#TKc&L)F3A~ z^2`$|B`QSJB($9Q9qG^Ut80{>8_+M42El0EoDw2~9HUR#MnbRA7MMh4C*qm?gM^=| zqSO;^RGuH!!;=asuQJP|7?a!{?t5{iZ1?)WFugpFXZa|S?|U=uyiGBGhH_(sq>ZZq z>_x#X(Y~!RZ^@1hm||M~^`O z4Yc1wtU4x%4UyF#MR$^P14yAxKmblpkD9fC~ zj)?=sKdyorQO~g50`}?5ir;^kLlD85v$SJ)KV!bPIlifeY>7ZcGzg*Y0~zG<4bQPs zD+E*aj9}{pT$ee%Ic&)L6zp$J`q55=D+Xo3gJvs9b%;Shjr1#$He*j#7{(_a)hY0T zpNC(BGBEHk==w>LOa(?Ch+l(fZ|GF8slT9Zj644-C5A0u_DWp1)go$nWjKUbxZbZ_ zxY?gjm)Kg~F*1HvgvRzCF!5RPz0{KB0tj7T35mC<8TRnthf7s+eSjk7{xR+T=W2=6 zs@*K`0Utd%CmuNU)^wZrV;xc=_$Q$%&jYL+p_-xY9mC2U4J`{Fb{uxeD;LWTy>e*H zn8WGHd_MUxs-HiUjAK~H{4O-)*xz{Csa5H*Q*|$6n(`{-mZ;USzx1-xc=dULjvs`{ zwn>_P!%~6TgPHceS7~XPLU$|fI9?wZi83oty*&z^Ar3A!(Q)RZV2lV-+VtmL8GS)b zie(ZJ0jUO{yOa2Cs3nyrHUbJ%vB;E%ntGWn6SaVT{1bP(rV$4_3KC~yf~WVK;qgg+ zZv13ecb;gnV|$iWY)OCseQJ?~)?D}P8N{dfc;lS2VN_r!jG0RnUe<0W;#KMBs0F)K zubb4+y$HS4g+h02AU{j2cGbA)!X zpS43Y|6Ac}xKhH2z!!@Dva@3$;JsI%m0m z&-~BE2;LK=c;sw?Ck*|C-Bwj93yf)bl;qy=_;$r!*#vXRSOPrmmKQITznCQ(%ITrpmJ zP%3DLq4$$1TYl0mumOqL!kkjI8#D&N17yEvf*>)bzWV_j{qY>Y`SOszz~EW)_JVN1 zZ$xBQkRUw@qY96;4L%3bZEdO)W+^6J}B3cDV5TEDEd0D*Un6#Teb{HVKlREJe}sM(twr z!2uxl*4dt0QL^>3Pd{O4BFNwPS4F07G6JqhJM^Boq69O{r3P@S1B8CaJ>*s)SX1JA zOb(q3)lz83Iv??HS3V?n0&)hqnIKWj1SF3;ptR>#0f9DwMsC55mnbLTHPjck*uO5M ztL#Qk7EAD?|4lN@OELWSPd*aX=)IG#Pd;+*FGFfEmv2X?OE2_=Enb$^w}h`gi!9X$ zc$}^}@;nnnjFX6PAGcYP>d%yR-g&=wf*(5yl|(x_!uexr?ENSA=t#M)N5gcp+#_!x zBjczW0e_N1?KMvtp$`;7k6M&K5mM@DiE3!X50CFYRvvlGuhbzCA){wJ4A?jyCHbwwou%{rHtQ>D8_N}UDWha+$x~HVaQ0#PC zKM#_HS={v`uagM1FNMf`!;`xiPUhJK?#O?dXOWpk=A|zY7WFO z zy#^eW(5Z>HEqk&=NP+p_k86-}HuO_d(`9-&)}P*XB!L#j)0yf#jU21#iJlcIrFE2- z?kNbU(x|OK=yVl$9cMshqi2bN(0>vs3Aj(GQ0^WweGD+&6AjhhRPwOHNZM=*q6Lz( zIcje6(EKn+lMRlX*D%K`JjQcIfuOMO$+&-hYOc+*#cr*bCU-{a>`uh~K+{ds@aX%! zG1u@Xp3U*ySHa{dT<8gcQ?T6fyV*j4eg|(532@UOL&^#oe=Qsi_+@!VnJ4rpXJpFC?UXbm8w;jlD87y5~ndS}TN^}G^5NzP@( zR;ULM1S8VzGj_Z+1XUEfq%w-TU8$5+Ufzy-g|u|tEBsHUwaI}jt;FjnG8K;oxv*Xt z+Pozw_<*a4fwVfALJ0zsx;ZMpze2~XDT)89_Q941@j=W8On&6R6MpINQUnF9&yfWN8N`47Fk@w z-cIXbzxXc|3|E<%{9-oTSp!nZfB_=}gql=wLg2QUFHljkvHWU~DQ{*h`*L}~lT*0| zaiRJ12yo;s=AL&ij+(gDssaBKf1_jPe@nCTE|N2fbhzGVqmc%1b_R-4lzZ-%48HXH zGp&0e3|}r6pMJt87$@mTdgMqof6K4th`#0tM9-!tLv9fN?i#@T=lrtq-5-~S8XbMG z0X)YWm*guvET6eqMV37$TwB#`;mvlbT=rKJ$56~Ofr9?>i*95Ec%Yx;`UMD?whl(o zRrgYtTE@#rwnl<%xnoE`W+_yLxxpF|EI#62gr21ddPn=l(EIU%vY#s)pGl|oXxm=~5dU)({I(SH!Ky8MQ)IEI6 zZia-(yvi^zL(`EA2uR%JTIyV`PA|)D`H7pDLV1s{Hn&z=y_4D72@`RA_gcAXa}rrg zDc_FV%Pm!+36BCgMx3iTd}5MkfGby3_Zh#fe|&nEQcC$adM4~~o&@d*?y@j0C&4G5 zd?C^~`>C^vq9&033bC=F?!-j|f|-!6P>bLET{h7n=OZxnV0qvP_eO^N0F%bOSzRjA z;DG(LmR20t-Z#`?d&;A_BX?syxC_i+E!4+ZnfH6)hgIOr5gT2MEG;~M+ybHYjsKH? z64*K8|LAGP^@-H+vffORotMf3FZsnCL_fi43K1ZEV@7l*|5i6M871yHYE*()MCO1y zu$fb|%?3D3Mt3;brLgRba4Y6k!L@|WFFr=Ua`E^mt1G$p&@_2ivL&XUvzU~P1_8zO zgUkaE*cDwZT^;b1dI6MB;9p@ulqoxfgg`&&#ZJa@JH zp1XmFY|DFZ3M20CWX*>sx856(xzm*c^n<7PNxt-st=?HNhIv<_rK_NgBxV}*k3x`sU4>DE3lV4hD2q;E%(e>4g)`O2 zuS5BT@F!^Hm+0mVGJn#6LovZ=f@0#nOWtZgX>?h?oi)>BL{6t>pWb$$ zNw-E8>Tz6Gca->IX(rfC2h+uIk|W>^tCzA&6s{GeO?zdOn3Q;|Wjq3*?#htqP*mQL z;Ci*={oCC^c`dUCmY9C4B@@F7)6?A+`_r`B5qn|TjzI5I<>eGwos@3aHl|rvY?P2N zbb>g^Z#-$LNU{Y4z-^;m0g3y_>421c{lYgeTNqc^3jAS8 zLG+mCHwnD7P#=~sgqS%*y~E}f%O(GtROuNHw1X0oZ0`OEPBT621yjau7+~@gp?Y|y zbu&1r0_{HvM26;yGM_1#9yU#0^F~Y#

    iZ>fF*(>=vN-Tb{8&z zwRkfq$nnS0rG9bprr&(6^gUAGh_(1o92>#D_ES#FS2|mp>|?g_@Kf$!2I=YXTOJ zBNE)N65lUFnoQtg?y_#kxVby02l6gwX@>+*WU;4NH=&5n{Bx1Jg)OE;HL@8zXzrfS z^LFn`j#CcTj2{%IqZ{NrFLXCF$?v3# zd2jb;NLL6oMAa5!GxVgfc-L%|hUZh5mS|e&E>9JBh7LO6sk*6qqghtt!9Mi^5@6`6 zv99P2zi+_fa?`cx^RrkyS^LlZ z-17=9PkTv}8S9y*g{x&Ybc*p^pu#_jHt<`&>k%oAAb*ZOD@%dsQmhN*>@WIy66h7I)sTF-abS>De8u_QCj<@$o%$$Z@2i)q?Tii9;ZK2vW<> z%F}QrdA2$S&!)==2mAJemRM(F2loSl^*ImQlw1=a**wQ6#f>MeQdYHz@_3FwhJ`5u zr^p#poJ`5JQ7*P@)JYDkC-oAFWSmW43Of;433^>5IIfmC&$TjibQp4ElenJeo9l7k|Hh7v zfA^8C{`(wK#Q)Z@^1o*RO}UR0uT3yIp_#!>Y=z6D#?5C3CXrJ&vRIZwd0a#ul`RS$ zd8+=V#E#o7O%6^+4C5wn6o&swzB`yr{M8fX+_OHsqJJaM=C9c5p$bfi!!G0c(k-&; zQkOwenf`MWcn`$=?Y41Y*xH%N+4TN1tLTSk5QX0aCdiu}hh7Y7i%XnO(!J z6M(cUkkEN(a)HHlyfU2vOrEjz8|`Bu!SD+-*h^Bk;IQyAJcBFV6M<|~@Gv>@35}wC zhBpbCvb_iF5(xqt3&6zDU zmH;afr753~k`*~m?6YM%!d}5Qe`c2vhV+&sR=CNgb4R2T4z?|P_h1)QFc4kUBS$$4 zKjkUN_TPAo7jY+S9&ph8BpGxoHcYv4_7!+1^1%(^pU5#>ndAJ28Xr^|xVwXwuSuuA zjP_NQU?y|t=eq#CpO(pSKDl8hR#UHu!e-Q<$~UPnuJfgsxU<=?e#e5C zU*QR1X*>i1zw$14<2_p!|66*>Yd#64C;5~Rkp5P>VgNg3A zV<-1Azf#UU>YoV*7uGBxp@$MA&g*k=mnfS7@l^nLs!<1pqziI(rip)Wd3& z!C36W5jw2bQ=C(oFnuXOTx{(elTbs=2$Udt6lx1<$N~3QdN^>;!k!Q(jpX%k8=)mk z&qVZ>At?ac$SS4!SVJe|)cOoP;V#)8I#`!%ci2J$1yyV+0?)E7xHY|&iVieReZEU4 zr0$sp4MGIq&RWQRUFD;-YzD7^51F|Z{N?J(nZn}4b_P3l4m05%jXlzH@c0pBXqg+R zE?2YKy%WXU;uVwPJ!)WzcqruB$crB3>`Q@czH;BHBM&Ly8LU|}&@3h0MiZ?H{!fXG zFv*Z~`{C-4A1Cw~a=POw8czFxyqShxR&K%DIfT9a68Jcdr8w?4@h`l6eegP?{+^E%u8*5T!e)Ync|coIG=jdPx3~b* z+x0!WYsAO3yHcvR(PP*y%>~J&Ah{oTRJ{CC5ox{q&EK`Pg!fD6;#wzEt0eMy_UZC{ zZ=7?~3-_{1&#t*QF|fOYKgJ*Xqhe9bHqxbtU>bxzD5PFy;gPRT@ZH<}d*3S;8-0rD z*j?Kgc*5%GE*xHow8@DuIb2(JOst0DsY05$}N0vDnuS-S5|#+*U3(S4MW&ra@n zr^}`r9JYmJb+8)37dByv7cTv-z#g|$1gexeSiboOx2Us2@=X|83}u~!mjh*ecmba; z;f#Z3q&;VlDvSEs0=m(IJ#pkdX5G~lI?F=3-Nn-LIzkT&)GpAyEj=(;Cj>bru!) z%_4;aAM#t}>^9O0IFZu)iir!TZq+Uhg$`EUHc3XUN@A^SCF^n8j zdeK&|T@qbzYQc|{hx5L6UVr|j$0ktB#BaL1xJ&4E4lv(;TvzWAEsvu?1B%T*t_r!VV7qAx`y^pZk3MG|B2F6t9x$ zIfE03hW{X^$9>>cG}`nMNj^oczS1LbB4urLk`>dksQDQmj(_L#OC)w`d3g%-){CA# zj8frMc9SofM7i;&-a-g-+3L2jF#tbSJtK(1PTuq%nWc&nsfM@r4>rnfh;7A=V#cNV zAnDuznM>??WTV$$aT!nhvS+WC8Lb%ei*rET+&N%3Q%#h%Q{%!HsEV2W%br!)LGl9x zI#&{RgKSQ}=cOz;K)%_W$AL|d9!=b?5!{gOx8$B29>Lgl9EV_j>Ixb0wq69ltC+Jn z4x_@j9w=Fsjmn|T1|^_hJUfSG=c6lKUR2fo5@LePK|Ot6hEzmvncwOq4f+1P@{Ieb z7$OiPZ&?T#qe(*AhcN)u$mUQ;!%Ch#)umuNy2(orjKSUFF�j8R8Q7C;N;~-8*sj zcu#e9TEk7{0=eCWtir%`Dl)l{~QiS(>ST|3pt7Lm{p*UFhGQCl;`k%KPbT`) z0a8Ou&y-86S5jl(M0P1j(il7;q>7M4)&zw#t|q9bx+Y#&q*|znT;Yv-O8(R3nd}Jg znDAsMb0Ll?j788Czv*QbIs-e_>n9v3-|K%o?Zmq|9;3(%kh_Z)Hvn~IR6&80A%Rk^YkO)7OK0)Q#-!VdKd!UGPT82dbIR%);o6L@cos|AdAKuW@v^@d-vwA|v7|sk@aI2g4ec+qWOchtl)QGs z?vdlOj@L2YEq4)hRVDr1yYtUHTORU<@!hA(v;NS-10C+Xz1E{rrK*>Ki9y=ajA z8XJ}C35!$sPydpeFtK{$yRVmr+zct0Xb67pl+42q{FS^*gSZ@Y!V&bNT@UMGdC32K zBGW~0m$fj?CT`=nGoT;q`3zwpVj8pwyEq8Q3jVD(mT)KT7CS!6c@X}HMG#MFBzLLW zfcwsU&CTTlRXMcT!Txr&#*_S}(e1?BPLod+fu<|u4CA0a2%}GX8a^;L!;({J*Ub=f z3~h~w{$1|!ax4XOhczOu(Wyk9l9W;GLkQ|Vc0X4j(Ncj_ze@hk z3{iEk=kW2!jkRCvGsl%Yhdp#9>uDt;WiB}<^$0d#zp;Ap@iF={EJrJ4>dK6F0cLdO zK=eAWQiH7)XVGE9Q**maTX>>_I0<|+l36H-FqB=9dg=f`k6$*vd#Bv-@-ue4blmME z*SdGNHicltT%ZL7Le!4>+UzY|9gT()*O1j zm|Ch+qD=ELOMRK@PkG~=C*J0NUo4M2;`N)Yc9K8InU+{Ks&#DRvGZ^VWrV`-@hCbR z$GO51v6>?gut0ri^~o=$icds86jc3Da6?l);s`1vcYb!4ZsZD#AXoM{Wicz1?|A_Q zWyOoxlX~6OqwLL;rM`c93C;f&x2)&?yBx6sppFh=|A<|U9QY<+LXLN)25@<73_LoN z{}_N!`h-CJW@(E8(fCmT>OJSkK;Rbc)4~+{yDVXadM+Tq`3gi~o(~Q_UdP1jLT-p} z`hXmvy>JvZp&e>ryVae37(p8gU;!h=Ub;Q96^5vrX@ORq7^aF(EC&t8tzR-2oTi<4&Y)aFaZ9Le=}B$f3qw45l2C*V94 zF@ndxI|gpx@n5dQo>k-VD-mZaD;^`!HNPEi%K+Dg=R(qb81`_Vv)_hHYk|gQo;kNJ zIpkgDB$~LG4=JC4?DqGna8_xZ+yQEIz;@7d0Dig{;PYiBoRRByAl30`aT+bzvmlRF zWo5r4m|%)wxuO6$An`r!C(6VA=X7~7w;hRWfcgDZuQa`(;%W>92QWymAP3@}{2c_~ zo;t8CU^qPAV-k^{_82J$30OT;+LiL0A0zBl27d8l<#A7wlFZODvverH>hgH@c&cn6 zt34YlcuW0DT4bv?BKy(uu)mYrN#6CZ{d&pFdmaG#K02{G^Lz=wm)PUp^G|k$$h+)_ z3YBa|j{bgfTi_WD--4Dzo^6KKtd*A%5oJ0Z_oIh`P*lg9b z{E-a-l&fenCBnm3UIajRL`bk^)#D5c?jdGH7c04nL`wqg1 ztb4fynO-W3ekTVWj*zc&4#8qKzP@M9JqWVVV_YJ|KYm!FQEnj{>HO60%kwK# zk8&HLr1RJ*K;wS`>ha@R;SaK{4)5pnAROE99}v#K73q0Et&tvG6@BkApy-5pklbz% zLW)WQ6XTG?JNdo)al1_Un?26ZXgiGB&dB+B^b+sR7Z$px*lbw366=K8toPJNjQf6t z$k}SxzU0&r$RD|1)fJ9}@WTWa8ct2ZB_&h@BuUHqa3AcmGqFJDc*bAA!{{+F2#5w7 z_cw@^u*|t(FL@r2_5KYvA@^+v6ZtbER6Mm_y?}|;eTlqDzxc-_L;Bx9yqi-5SKv2) z@0pZzdcrU3G9*6s#G?O93KhCzXro?R^g3CK2LVwP<^*1`YHeafke2ne=v0T$`20P?NOYpsId{Bagr6qdBKSyBh|exuDJy;r z*9WKWWA|X#+~xQT1Chb~csOd%Qis=JAc#qY%s`k%w<(s0WvTasEN!<6OLS9Rr;~JpCcCRqwmrWR)vcR(l~WTcDk*J*651?_=g~!~i^408 zL*~kYr$DAA&{B78yX=Qh5VLc&kn&lG2irVA#JU?D-@TnS4f!Hmw*DFV6+Vrhd&IAC zzm7sUFn4=?x%RbZZ94co+>8Vm`j#Icl%*AE;&*}}E4Q7U@81yAhr{_}zj_6=;2_MD zI=+lt@5$`_z$!yWsB)DHuOZEZBV;pjJ-LFGyt8V!RGFiG)A5$FIHDWx;(r-`<*v(@ z(a{unNmY9TN62o@=2k-L=ioo5u$r5`!}_j=lYC`_$_U|Hym^YbPtXts3x0%SpW}<= zyd$y%lxnu(^yb!pGCgjcy^8#B$4R(F&Ndh^ncoRSCiBm~Q@OCshk_xk0$HagbyuyMlT6UAC(Zv2R3vP@66P&1Y; zhvJL5NCOc^mM(#aJ`5R7WUNNZvhMD}=%}-0qnv_Y#j|HP%jBMDp+LQ*vg$)3P|s)n zpbe36$oEuO;p6OYbqx6(or*sUC#_s`QA-)`ZDR?G^-NU)?k49G1FtT)P!vi&J`X5|uV)+AI>zjAQIC7(zF?%2%|$x$pZ zcGK6^$(jsdi#G)0AK3={&zs|q+$HsU*)ISFp*i&KN5ewYHrP5eIpvJ`cnWslcB-l4 zF9>x8A@!uPPh6a}ny`CGKHB{*IhD#zl}QNItcES1nnxoHP3(FPVCvuiexp#Sb~lL* zABU#H^&L`<=|b%M-upu)ZhMJ{>JfFz?F#mSkn%@4O90sU$BtxsE;h>4(Uc{|5_d*_ zF;{v*V2b|e$EA(++*T#a!#n5Chu6i1mDg-d3kh9%Np3w0pY`Pbp|F$aa1|@gKEv*} zckKI!IxG{fwHJ*5&uH=gH-QyPBZ1_RUXy*{9?XfWw)7A~*~Pj!{85ZeJmMekOQZ5w zhb7Y9#5o?bB@iB{5PUpGKSZGb0YFDDVq zztf#~uI-@Q;C_MoSFrxV8RXsJbqHySP z*H&H3>vgr$tUX23)w=Xs@WZE%t8B(4PN63#0KRk^&+!=b4*e@QT7zWg>GF)vK8N>1 zNwRk#$c~)a0c_J4JUJJv5jkKU^XdQ9Bx%jSi$|3T zD#R66{D>ft)dlLe=-O=Nh$SK_-^BV>*f-FSd$^sN{E_>NngtC;I_{nd)lAP<&7G>6`;SlhOq?eD8O)tHPrDc2R+j|y^$G`ebLq>Ch3=x zl2wsgK6cNPzw$0PB;j~3EMVO>bF(S^H#$YMoifQwI4Wks3(BU3h@esGFr&At5I2bB78=N1rb3oK>6l^8uohnv)^PPBN=Ey}cIcmM#iMHC}N4ZUhy4CVSTprX>g z5274;P1I0YE}Wy^O@DJ{P7`LKKmW`BE#Lmi-9f#6$*UhBBzOuUgoBQXbo8SHzt6&k@QtrilEe315B2j#F;v@o zrG|Zu8y^&oQi?;mt6b1oa2SSf-j~$NC>5AfUR`Lb+^8U2$GhP;1lqoE#U`f$fwky* zb;5)ecna6%yB^B-_)pRQ@gFcGI#bV5WS@lF_3K^`L?64CjJ#N}Ie~_bV|^g6lSc3U z_0NJehTw~`DO(19HzwcvT7tNeNKfm-jo`*sATeL<_5BfVlca+siE6(1D}?)3WWl;& z!7*UQAO9h~uZ5F8%+q&x-1&pIjx$bhPXUI{{1d4wjp05Za@$heh4;qYd6w(Uw1gGG zT{$*`1UqNJ?#1>mq}}3K@v9dvUoI<6)#rD=`FldRF}SSNC!qU z^{=>FUnuxI+e9d#j*l|_0nq+(K3*}q+_Ja03)Sn}xBs91i#sfZYiZT9Re9f-8g3x3nH~u6m2wl@mr74R z8=9LEOG?hDP8?~rY_XFMJsyerj>pC)47la({1*3Itp(tD{eHRVmo3jsP2=$NM|qck z-1=h|)q3AumcQ_A5AgIK*yG{J#6f^~^b2m5&|*;e1T5)Y2UZZxzBg9ts#~J$8k(wQ z6!#*mZ3LtIR(iiP^uQh|&tCAqx%mS=LdV$z$^de!$`zT4k{n&!gw);$0b=~mlsOV9M;Ab1gT`D_gZN8Nz0K0)gM)3j9!g9a7;31&JWwt&PQ*7vyUR*pNDU37D?w63g6Cy+rQgI1-w|f-XZ~(L&_4|1b5ut1 z;8;8bI6OEVjYJgt*UGJEto!}4mXn2`JBtc?vP6s-0oCZ_%Id@%$bxTcWxFZS2HVh} zGV6P|qz#aJ@9oH4GD$tb40k$+WCFtfFyfOQ7W58&F& z%nczhw! z+BxnQ_K;HO+(6~zN8|PMkiD>EbXXxKX;x@s^*i%!%P8m6GpoE-`(*=RAEY z6Qp+93$;#wMF3dKuuhm4_A~A_w?t@<{&~SLzxU%FtU-FN&BM5iiGEG{(N{qh#Ij$) zTUbJA6R<)^UcW=mm82f1xsyd^cpk~+2`_Xs$cEo-dm`{y!7Je=D_%GZiTc9L#$TdK zQ57`emJmGZ!#sOOo#kN()0-z7fh!+ynOBZL*@nmwJc-&09(o02a6?e%|DUpg2G6%_M0%fpoi7m6B~aoq;C>080cpIIeP1 z=R~17H{+f1Tv_osmz61hO709e1aW)%(Q64&td@=#jof>|kskMh-!I?KzoR_(!;?>z zC!h3(K5n0&{ZznKlF(fNH9hgAWydEbjsJkH{+mj!=mvY*dj4<;WXYQ6hX7i`xFfes z8Rf-_jjB?Cbz6CZR9?$mcp`&==w;B8R3vYozbUJs!tTLkP+ko?>5ywmj#92zE;$pXflcSOJC znIfTTuEE%E;bt3#cFWGn%uaWcyP%~9Ra(B+7Kyky2%M|b4HCoj8Mcx?K?%>~4**01xIFz;VmaNoKE<>e4#eZ{$XlTYoGj*va>V1z~7 z2;ifi&xT~SdUMudhp9s?chcOiiFybA@l<8o^{xT<@(%UK!g{UIXGKsB4aTldU^FQ+ zD(Jkq|Z>e-}M@H$L8z_58V8)9f_+7cWCrJ`j`2F!Trk0Wlkgo;g`@P%vk8 zB&CVmZrW1Vry0In;4zQP#21g-g+^2glw%r}FuN&?7L6k8*io;M!lyQ*#jqg%E+I`ur zu5W)*1UR|fN&De+?M9N@-!@?Ks)~N$H*~POU%DA&Bb@r4-de~TE5Q@9mS=ve){yiX z>7H&ZP6cq>25~OLbjoHbxDy}uLNAs~ad;KQGE6m6DO<{ejm|x!w0}2zNp5T=&m&nU zGwqDF&l!n)u1OIs?u9UcG-Js25bNtwW)cedJPSqD)d(on3S^TSWaEFzGc)rYM1$AL z>~#OCJGO(-ORUn9xLqPyez$DJUJ9&U8UIrr^(|t&oYgJ=Mb4wS(e`$lNLR}0tiSDz z_OZN_=Ig((5hk>iUMh-Qtl#kT!wT7eO-_ID&EKK*dx9}@htf>FH$+2~Kb_~c@sQpq zuee1xwI6i@%rG)yYs(YSWx>V;uw1Lt`Aa8#tUl?t{ZRo7fH{5(k<73)-^e7C zL;M#3;sD(g4s_WItt6++`N`MWHtzK-wXO&-0fn4F)iOPXY$}I^u-hF)0vvH${Pv&N zOw_xemlEAiSV2f4G~uKF(aK;fwp2g;mqqtv5)Ab)AA77kWl}}j5-mY}~wWy6Q-0hJ1PjJeu{)} z?{{e4?#LB_-CmX}jjhGu&U1 zXdG&W;u#w0uG&> zz}qb`$@1HJm&t}4v~0USXH#`hYmakKdigZy(ncHYu$`fes!WGc#m{cS@rUmA^98fd zZBc=};n=V0=X$ON`&^#)bm>dOPDnJKxZokidZ8*PS>my9_oVcG+@V%Hu@xLv>qWaa znzu%oo2r#k<4dV3{_WDYk~;D9KR*+5Tw&HHUwCAklj~WYM%Q@~<#sC4XP(=eSsw*T zz1KW~N|9W{UtAIH1yghf=i6?sJobIZW8&jb<0Tp9AVVS3?7L|6JASl!luzFCi98UB z_W;ai!xxDDn!Fwx6uGjj#deQKlW>nqGs|vP`w17KKh~KUH=%YU0l6@ln>GhyN3-e- zhnfc|1o<}<^j;K5o61k!H0jNw8^d@Lsn0u*jriLx4e_dfDfXTaoWi6cdHv`L!8P2s zIn9b+aM(hNS|6qS>6SWlH9XlkcSI|ibYkp{%VFfgdo>*xEUsH5_Ahe06PG~?0^wz3 zL1js$C0r+&$4p&Kx;l#E%N;GO}C8Fe=`f(NVvgB>@RU#Z)TVUi5$i4=E*saQ-Xd-UcwpN8oiNEY@jkqOt&l6cmYZ^X_ceKH+&;2{!_MZ8CEsUnAI0905;Z&s zHHy3PN%s=py0s&`p(xN9<|0V48MY016@&GF-hPK4x|$X~-y(qxK2>+BvPq@D!^~{} z{|s&~5BB)(56Ta+UpQi$t&c&8rXT(`C6N!hydcj9S5QX~=~2RK;W;Hr7M>Hz#Cm%%+81B){~{0 zN~kREHI{qWq^Nke{Ax+3VAbP#Vrqmh_1EVxJRfx%?A5;K6fhs*$h2lS;DlG>qLZ~! zD>V7HE9FkW=iwIZJicvK^K`ieud?U+RtY}^IEnROO+hlba;tlCMH*=nLjl*ev>Gly zY`6g5fkg7`EJFOXHoO>_uhVBjgAhmD8sw-N_pAC?F(g$$ns}&zhi-<-4vEC z>eyS^nSm0tk&*cT`p#}p3u_N0;NsCo%cGGe!8Hwrm~MCxNuvpwCgzr@xYatiMED^l zNzLwre4!N1VD3E0r2C!xDtm-4b{MFztHO-6$7&1jZ+wg8`z4@KZjo)(G&sTrM_@Dey@|`7eZx8X}?*v*vRKkr3NaS>M+|F=|HnfQT*eAiHX;T0J&X1>sKn+Am!(b<&uxZsr za{AeVTqSX_a?IB^iEK4dBw(#@(3BXJR%kKqJ(t{VT$0TRf{HVuY|gUq zB=Ig}S5gZQQJ4wb!!j9KtIG=>TE~!fKAZ;sDqO5*8;GP3bh!iMq$88pGk`9V#b`Lg zWqZwE%_=*?6UQc!S?ljk{}Gk}e|N}4fip@;@U7^R3g7NG*MV3BIT7LYg?1dLIlhBj zW5~#MMt5rBg#Q@)WE9DbElk34V=5qyRVTvJj1)h-ag=2=JJO#9nFTu)q;Y-$#@_Ew z$a#P7o1cx}_6%yF&O~aVNnMJ1BM7}zjb$L9zIa~hwb6!7CDGsWw^JM;GCcheI6ol3 zfGwb1QJ9Gge9MR>ZUmS!oNjzl3+3>9 zY_If5eTQ$#fAZec!R&{`#xDr5*t_nYm_EE=1RnS+Ur19W!V+A~s3y%QmdniFxOGly zldZS0mAjY_n^Iuh`uU5GW@S)()$4 z(QWPA!6qj?KU|5o*Gjk|YVFSO6X~_L5j=hiTJVa8wO$afIZ+~(ZFD?J-4B5OH98DwBjBQuL>@?Im;cc!nW}^A>;8-fHp;66 zCP(=|57DPKqJ{TJ%{}?GHw+g$!h=Y=Cv~dUMfDJXT$fklr}touIr)vh11>!i1&klc z%3PH*K&FFdOi@ov=SKVCLaoj$zmyW5+^|ibgoQi98N;a!J|XcOgdqnSu{oQB5Cd*8 zI5QFzWBrLoBCHKiYM*xU1^iE(8Xn(;i&W2W4@G=cA`uS*J=3)Z!r=S4S(M&|@VUr0 z2cFRbo8DXlSWHmTf2g6o#l2Q-CAdnwmP`73mUnmmRoAKHo_nqoj9=y!kRd{g4dqM> zw_(Xr;0z_orvg1pXbSPzM6>@07}JZ&v+{ihuMhfy+~2+o5cb1EFuv5+Y|z}mHAMP= zJoS8`-(ghw4cElxS3YJI$3-24Kzg`n6OaFYz9>iEU-jOTr- zyyk(isA)0?WE_qvZvRiX{NJfj13$jR@M10HoC=yfY^YDmH-F~`aeAd?gaTM1C#zln z4w4d#qilC|MBRw$cD<^5 z--%IRYy-vv7#ku|hws(DXaCQ$)=uoU>vmN&u7W)0{D(cPy@qE^c*%1o2fGwrxRv-O zjz?U*qXX%2!`V|IGvib3C^~id<6Kux!t_61`tx3tns-f|?gY6>q$Q9fz#$G{EqHUw zFJzCuKtO>0emMMt7fR~aNq}36h`8f@4_xkd;7gbj734NTb64AYypL)P_6Nza32gJeu!rO$Z);3C* zV5bR?@_3{qAI2y*GUauUR~pqI5SK}0 zg4cv3{nbQjRSBel_uzBe8#GJ~R|*P1*y_Sln*7bBjJcBM6}IG8ob5XRB`7M?Yr=}G zlKt+r!vu{$Kz5UI!EgIYd{I9Ja_|fp+kTi|3jI}vy=T0IqPtJvnIEfP{Wpkz(C7ma zdG9C=JwiD9M(pbvjf5Jcleim7dsCj09S00cl~1U2lSlE_&%v%48VKiA5#skGZ% z_@0yxPgu_3y-36Dm?F?sWa(5p&!YopAgQK_G*<%m65e^PwPD<<}s_^JdzAj))xY$U^p z&SkLhao`+3?OK`fen>&3>S-LUQA(j_$`jhOb*}D!2D3teFi@!a&-mdtN)%13;gDOA z!i&@7GZNzX54xzQ!vvaWTtZr~lz-*dSGeo+4Ko%}AI5zj&dE2jgOZ!y(o5z3#fR(LcckEAIwz$W{mowp0Wg zYg$M1*V{d==S%d6Qe@)PQWIM-VtyuY!gw|!xm{-9qmO#5wVd-b<%MGlK|2l_|96{B zH7;#4bBsIXFLxFm5rm9dS@%5$PpQ|2EJ>mFPaS&n2M<;zUxW}(PnT)03;g`9FREnd zO;G%H1owD2vt#45Uh!gGEzepspmREi+uI`}bNl2lT`Gs1^Mepe)YkNSC6*%Z3FM4z zPH+KjD_IeTl(f7sLSWRGwt*rk?u*9Zkt{%czy*($7Fd2=55zJw$O!??S=o{>fWXIP z24-pmu7TJGg@kYlQ$c@UMc%&j3n({J4viZi^7c?VC`NK->$KLl2O&AWSzhvv;0uU| zIZ01!H*czfeQW^;tkP@T}9sA0hg^}r-IP&l{oe0wm0BBl;2%YL;H z-0i1Yl_HdGxd71YOnEBmM3lQ}lo-jq9rIu)=xrDPi>LS9Lw`5jb=@BTHxmuh z07a($acr3~H~zqn3EOWRpwMT2Y4yp3S@i@hSpD7?vm1^aP6Pq?m#aDN&Zuhfb>-lq zs08?Y<$&PqYxo#__$W~iAMj@3(hQHnTDM4EH5y*^RbM|u_xZ=ld5_^r2@`xhizamY z6i_3cmB6KXqspm%dL?C+xshv#2ZE8j57@d2Rckg~U1n4f&tte)UNW)5L2A6(88L?$ z^cKC*{6=}meFRd`Z znHpa;TQ)pvJ65YKNwBH*)z^uq`YV-0?n;lYe{aN4E>kw+-{v0^0%Q|z6pUe_K%>5A z4dK5X+AOWWqh&klJ${KirahQ%s5kV~-omf%rBK9nt;_5EyIKNy)iG#PRAA5zo1Po; zzmbP^2*|M3L{`jH^#W7b1VgBSlFeg#LCC|Guds%DZNn=ysPeGkHFU#@^c(VyGHY&W z^iWSi2M)NpB^;bPiqQ>mgmeSb4g!(2nF0T*zkm9c7k!#4o?Z=-mHp>K<(wxqK2ipf z7F50VO!bHfBKp>jqYxjyfU{~AfqUu8 z+54w>400_-(ZOad!qKK8G=>9Q7J@_TH=(0`yix`p_BfCw+s8WrJPFWVTl1AlVhiC; zI6>BYB#&k%ViTpv$@a;?jA0IY9R=31t+)Qef9OuoUlzFk{=fbeiU03E0Fxm8gf;In z!kJ2*kT)7e+831>G&Q1YsKPxs}{*I^k=MuT01kjF12!mkHiDi zWQRgW9-Rt2`0r_APfajrWAbQtuW^<#>QsslmzYRIo;I#vTG|l3WTHBE@nX5;L$#2# zCH%yFDsN>do03#p+aJ^BY#|CDtnY^p2*+#K^yArQY4uo!3!0|Kpg@`6P*SkTwDpj% z+^}J54va6?iHTyJi=6RXRgjeRek7axE&mk#%NGa~R!XiHNJxfjb$CS)cX+%TpW4Vv zlmk*=C#gZLYY86Q#m$;?&6Q{Tz{-WP(!_g?b-^f2=d%*u%#biwdI z$f8=R!03Deh(;vsvh@fL<;wd(^7g0yRRq@X2)Ql2Oo|c)BaLN(XLLEZz^}?p9+C* zUcD#EOMaYS>PEHd9i3nld27%oB{Td8$TBP!IRMviQuxx@4kq?TIj3%A2yf}qtG!Xepr3y+Z@9H_B3ms?X#6MrJUoKV^4eO|g8G#n z=oRdqtH-O&1%RlC&Ukx8dV%>uvf!xO3DlHvlAu77m#9%ybA^)8jZ2w4-v5syeN3hG zg=OxSIg)c{Pzbyc5B{lXXN!rjmFSYRj%XtL_){e+*=!PS?#&?*INF~hem_zAF*QcN z`jIan+B|fK$ujQ+Kv9t&*c7F@;4s9tqHkA4B`+us{@dyO2mgKet|v9sHA^UTStFfT zW@lau#;XkDwi7ekTmA5sCh9&gB(wR0;m?P^^=JwGi7i}&(+4^u_k0bK_QLfFu`GF9 z2nrIMWKzFUB0qCIy6t(l@mi5fK22kJlNN-!x)CZ8K{NuzlA|v&c4CZP&|JU6?14zI|1)_C(f(Oc-XhJmN{>KqvtzVG2;{z>~n1f(Q>; zWV3C=voiU}z)1T|-U&afmYej4S(?GjCZk%0ShGjP(kM;C(_!&{Qy0IXo;?W9_oEmp zgD?=Q2Opf$qwCk+R2W^@n^o4Rg@u>l!T&)(TO_XoRmfX%#!kD^ZLM!$njCGb|7OAh zy)r6n6z-h45Ac-|hhMz(>5VjJ-Vt#7p0}!6)71B-R*9!8$#y2GVFG)57b)T`o6XKT zmmki$kQ?RI95oUtf=A{uREK!AW46_6rNr4B2|Q5B{6@r#j7)ujZ_T*}Yb7#AX$7Zm zdxIHa8mj~YBE8@7(aToQJjHYPVTl;sE&;~7-}MYY`S5mA01%nbR@uFLxjf_E~(=De$UihV3*)F(sZ#8v!M@Sj*9oWA`@NUhhGd#^|R78nETn0G@y4o~8d;mMZ->%z|x8`=aW11N3)*y1r>bcrH~DqIlU54pL(!FzQSD6A%uVg_RWDbIUtmokJY(< z`fGdv{s?8Nm55&l7DkZ`KjW3$O)TH>cJ{W$C7UM^$L2A?_ zWrf!UUVv3>!T$Q%!SPN+X2AZ=no9VxvVa4_I5-o*bsV)FR?*-Zcy3;mL>lc{?v)4- zzfyw{m3d%F8Oh2fMH^{KP9Oy3S#`AuBd`vCqax=OnCd0HRL)=Yq{$S^N-@_|Wiej` zCppo5rt!H1wyqFD6SVW-|H7Z<+g7XVRb`!g+jO}ebVy?+3MK=@y_peBwSiBY@hDd5 zEe-ElLr8r9*q9&4Cnjx`qFw>W2?#W}E59na$|q^L;E4&jE{p3-)kwNQb=?UXGXt+B z$@CtQl4=L*2;=->6#eS~RE)K)lcE+eUU6)VyMa9u@%wz-bzf1d_dQOWu7h&Z%ZewU z>>Q)6hG$-ObYt<2mDx0&;Gll$NXPMWsNTGFe5TaKJ_>`@$9q&owZguM&ItRd4@7-P z#Gs#AgVHj+QcjhUwGJjI5u``vo>gC9v!eUu!QW?uA8JW8Zu5Q9WTvS^i=VKm>JEJN zBGPf7L^vRN+CiZG^5Kz~>l*>8;Y=y>31L9dOw|*`jgp-0=^%t|(R-lu$c1^c0fFAK zFe#egaX?yWJn=zc*}i#6fpL32bQ9C%ieD>h)QOTNqAr&LYr(e*JMK;T7VU~Dz5t&(JfvV?EB zglFsZ45CMin~K0H)o{#Cco)m-k9h5?tWs@de{sz{ z$r8FCMO`y^PQlBqJB5B5&_;Uudu4IP?DG5NdOm4RAdj++Ju)D92BH48S8JZ^ozwcN_wxWxnUCf+PRU_$Qwa<%9G&iIL$CKf|*jjI&gv- zSah_J$A<`Z9Dl8`RXw6aafhIKDid@kFbmU74w2RK>H%Yxpk01w`qq+ryt2`j$p!Fl zMEwSzg0_z$Vn#yvyPnk!NtRJ*s^WEf5iAe2)ec&$u}S2(yz2KcO!hl99y&%+i!q!< zPd=IO%lV7td2fi0VZTvGNsdHOitx7Ky{Dc`gt~`eok(m_r^yQgnj9#Ox_VR)c0c?mGZWxHULMZ;ahyKksu55|V;wkdWNWFE)xl|kdccv?ekq?d@?7~{ifo}aj~}fUhw8Cjw0(K2@0(nJ6s%p&t zxex*Bu(Wt#LaGk$eJgX3Uo7Wx&m2GHMN#Rd(5UWIg*2n4dl`**37@*}QLtM?OlTbG z*4zjGI1Dz1^skt%XCR@w7xaOA=(5uc>`2|QV%rzrl@}N<>R4SNoT0~$T zr@K4bCB0pvCR&nz!Of_=HJ7g>?V8xUfL)PeS!8gc^n!cL4$7@i$`*m|k0K@v=;<>8 z)iICX9R7YXqMC?T`B(nq1xIisyr|k6l)EwI;$I{@4XR4Ca5WFH2DUEZv5EeVUcQK zWv}F97Gm)Ov~U6E(VA<%^bF_W^5Dc#U*pxP4)_j6uohecHY~Z~7czb|8*e5^B~G$l zLaoI*A;!k11%}Su<`6R_++{2x$80||AH^`N?Zl~}v^U}Y?|4KtTfxudbxk!9!Y#5K z!g)D;bL|PNL}v_;dqO7@)5#a`ner98S;S8+6*WZO@_TX9WWDR^P(n%a?Uji2JP&LF z4^;AAMATr@Kfz%ZN>qrrlPMlm&tG>V6Utr2KSX>#>T#gGkIm(-b=@1#V-IQWZOTH= z{Pmf{qFP7mzl@GDvgo(NH~;kCzYui(!fqhPO_U)B)W6sr3lqgk*%Z z<4E?gLlB|HfKD~XAix`6#K9W1n(;{Rf$tp!G>oDBSyOUtLYD7#Yy~f;(1+=c>J^>< zPTE*8v@mr2(8JEZm<5RU;On{1c&acq;^BCs!LDN6s}%lObWp!H!$Y?i{@B4*#ZRUj zZR&|2dEssO_^D?lReSNkpQ~s0Z@dt!mZWf_oxhTSL>I~rJTqb!d;XicSv{Xv6-4|V zZ9DxLm$$;?B4Znd@-vw!H2}zCVEAY`u>?-7je@so^VeJS>j29l&`U*YKJsHoOP28T zM;w;wcW1rKZbTzTS2&kDDIjHwtgZ1V=rxf0Hi z*UF=BzDTgf6@M6q4M+9iB@7VBkqr4&Q^HrnlsWOj>;GsEkX<2E z6@eFUna4iM(=)EV(pc(c3}687rz{xGJ)<^gOU!OrWZOW=qaW$@F} zkNr&4T$C%{jt6}`)RC2P&7`$d#(bb;6B0rlW(k@xR$N4MuA`ZInJEqbu-_xo=*qkw zqBT;Gh(9lKoE3HBaggJeA*e(vkhzh(izt>OYG$XKY#E6&qY{&`G}M|iO!_N7b9nD^ zx#SmCUmJQ-?&7FsiD$f6rag3$VjeEf=apja(=%86sELI+wOW|;=UPCX0A}C5ssw;Q zqJU_aT4<~{G#7QJHnmir;4tF62ua6lk?)3jVE#c{__--~9iaxSDo4u$8TlF_pnG`l zHKGY0@wh;AF~@Ut5Q_du?(@_|M_A$Aq#^(^wXDtr7yu5x6{UF-wQ|=)FpIwKzh~$V z=6A>tPY1%GjiEMkoUzKkL!Dq@-W$>%fqj9ov5Ix>n^CVh*o0qU_yaWR0z-mRbn&e0Y z9NgYkoyjeS`X%^GpVU~3Aj{kIl=K|bv@HzBQQSpFM~zKRmZhm)1imALR*%CsLyuCT zhbHNx=wZ-#OzFA)SV4u;xBRY2$~Z@V4bVmqJumFOq+ujRROmgjWaDW>-4To7TabWQ zwWca5B>kIhIVL4#2!=;HjzyR(vnGo{x#O>)mlz1~V@*c_F_G-~j%pRXyT;J&O^_gE zHMl%@f33u{M@c`Ab(ds>uYTfJZSdGtyA9y^=Wn$6S!g0KO58ef!^arIN zgxgNZZhH}jH91qJefv|{6pOGi62+PX_SuE;GB)cWsn)l%ABbfKd5P=xwl$3mFhC7!IF# zEYK3F(srj?`q14WZGJ#p!JzL=A}Y8&^<5jM5b+qE1b&Bu40+G$crs8P2v=N-geT;y zN$#%zH%nofL9dD=F6v5b$G$lDR9cf)vugJsK3ypQKLQpJF7!3vpeE1>&4!tHY&6aT zhiW{qz)k#&j+>=?3A3y9wNtf#^P&R-5qiC5Nc`igru7?qo`mmT=RF-81gMHv6AVI9 zs~31$mZ^WMg;3M{xCLkuArOMBn!XVXZj;KlCT+riHQUgqdg__68x@2&->*<9G^=oR zsKDtbMSu9H^-8#zmF063DsPW&4#AnFWY@wBVQJSKw44J=v>mz#(g&NZ=_$jS)gH!q z{Sr)d*(82tYW_^Uef&WOAsH!-&ygmCF>mH^n~e;?$C5EK!f%nIc`=7+I~5FWZJ(O(< zVSpU@t{xk`9pTm(P}R?w&Kt<~qZ$<5oHW_U@+J5gU3DmbLW@6xTJi+q4gYEQ3ojTo zcKqjBY2vzZk0K>VU<}2D(^Pi${oswF%A2MVN*^j{?3r2Tnee_t=jNxFUNpm`a?!3i1zj zyfHURxW#hr!9QFf*d zA*julC9CX$cICdS;OA;G9U>1eUG}QtqHhwOpy5DhW$8Bl9F`@bapf7RMZ6QvUQED1 z)mbEQd>WZ}aB~$?_4ms&=^^?-c_fcBp$}&|`s#I^!_?P|pMac=$qVe&|5nyL#qKuc z0V=^(yk_WmC=R(}uV<&jS6css)b`W2B72)t)ntEE@F+?#kDT8LX_8n|OJ6H=*P|Gt z!Ox&k1&AdSqmG~o0RQ>rWG&G4+2%oG8BDMA8lujWMCN6@(1^2Rm6GX)4}j8~NxP&d z9yUv{%yC+4aa!7We(u%8iQ&hqc)E#2p`fAPaE>oz#`(Kn{kZAH`91jqnq}v-9)%v+ zWyFThQ4(#@ZzJ$EV5u=pW@}dmI2R{!PJIX27RO#_1)pYJ>!-_joJ0qtwkTzRBK{dA4HKPbq~>5rpiAG|H&=3)X~w(v8KV`7Zwy z{maXJsl4XtT%humms@h9Idi*mM7Pm0U9IBA+9&2SghBXB{bag-gr<6UgXG*$MIW$r zzw>2qhtQ6cCjdm@amx*@YMen2JBQ@U`9?%u2rY%)>>Y2%ax?!XV05MQ#=SP!pX~Ch z;S!*jV02~mduyhm!Uki}M|E_iioEUjQw=ookWjYWpT%qZY6;3EDuN@OKG8zMWBXF+tR(~y3K(=hc69tXnq`=bR3RaJzzXR^4(0MQ^!2?Ec-pw_y)+o% zpNg_Q6J(FD1JB@ooRAe?zU{L~eHJ&rr03P`?4_}f>1C;9kM z){>kD&;%=3*z~&npgiU^2YaTcqZbVoQ7D*yl-QmN9y38}OcTutV1w65M=y#2y zoi8hMUJ{P2BA8;k?w`I#f(^U&`(fmRNJw?jLr*)t>MzyO`X8$h;Wx{$)=Nmx%N|{e z0>+iI7o6(Ja_RBQIe4v9)TtLmlYEPEmM3B{N^YekY|FUd_v~ohn^@)y*3~O%(Sz4ph%4- zdYCYjt&JEnGMC?E-Z#ZYua-$q9xW(?iNrulINZ1P_CO3dMT0n0Cnx135<0=bV8xlt zgT_r|buUTL{UKU%K8ona;Z#u15|m-H>iN| zHGlZNikl^?hT`M{neOAg&VBD1YbUoE2PApI$MG-LXv0nNmyV=)iUPVuA}NG;YmJ$9 zR5eKM4u9_9%egi)F=YZ4rI1{(0dkw1h;79o{?pUX5%eE>Yp<6Gk0Gw8HP6nS#~td+ zw$tS%t@$`08cxa}3X(H?lAB;Ez`Jkh*?*zyUA;01&`%{p;AM zO0G)}sN@yF5;eNGhL(gLf!-A01SNe4;%Qt$H;*godN%}im(-Jla=Qf3eL`5`c4)DZ zNlYxT07U8MX!xe8Ig;soKcAm_cjsF}cr55*S@gKDjIeRD?pm$8-Il$RZ{YPXz@THS%pfEMhLqEf z%m=`M_I6Kps0U$c#dAm$Q4STD+dn}ytiOL!&|3UdGsA-;_X+I|pY1{$S(}SZCh(POuI>y9s zvL{}0{*k6AE7^V^;JYa9F>$Q^%O#R_dU$U4eHvu0%k$r0=vRpzDYH|qX8QHIc2Vjs zmx<)~QSPY;Tov*0lzJW%XI)tGZ#9JApyBdJ@}gD~`9Q_AEHBNBuY~M)o#?5U!Gn*= z@XM4Ps2tKBa16C|;OzSI7*sMe2DC=iwr5Q;OM7TgaSmX&YD(R1IEZ=1v0%?3D)$;t zS5d00IZVh?#7a@Zsy`C!OI6L!Jrd%>}`B9zXxC4x588WGR&%rW&@=LRolvX3A`aZ!uazGCtf- z<<&5;?qty@_Ps&N*ol4v_hXV1*~|FaRbBI%N!!arcL-aAM!i zbCh^`rpd4FsR^y$PW0L+F2JNQrr1}1P)ttXi)vr1Oceikj_lJ%Ohgxne(?_q0GRa~ zc2%cI*jXU+(jt#GO#{e%&@rMtax)I`fM&fFIvEO-qK3ukeY3N!>cuO^dl{bsQI_?n zoUCB`pZkejUyQeM{lJkr^C`H11X6#~V_uTu5kZ*YGd(`<1b7We;{!epX2>$JXVK)6o0hK=PnW6CC8U2f!WgT?Peb$)Uz77o z&BE4jXR5mi8g{$)4bMinvZXt3M9Ac-nzSGLx^tJZfv-%eQ4s+D{1CF%%Z(aL=EEnZ z$J45$ENK=QrLwSJ(oNrR@u{UV{Kism>L^;A-u*HnfGd}bx7sEgwZZg;7i`>qFf zSjHQe2`Z7GCp3m+Uum3@ub6h z#4|M`+NNB16OmMne5z~XddWfrwWeMZ@2bWQGuOhlRk<>?o%hOMvmQ2NrVRL;9f$Rg z0$`%^Gcuxt*HD`6FpDymgE7k+MPzJk~U!BeuW&)d4cus$3r*DolmVFfCfC@D-ADBcy-Kg=B9=Dp& z#QA-_yc}|7^*H;ifWGgcfHx*h2lK;ELYj2#+C$qH=R!rw=C0M;B9_WCnZ^Wag4Yl` zxM-tD5OucpGA~zxj5YO`FW{UCmm-Z@rhKx%>IfsX6%JbDk81VFQYy;xY9*&Nz@q(V-dA2}N5)_X7!2$64wJ-Ythg z;xI>i`H@9_2GR54t3pxmt7o4r&pzvM5AUH)9E7YCo6GtS;u<@Rh}k3jB7KU2F!Rg` zY+g3WN;F8VISO=KrODA5LYIY;b+MbC(q&=>JX{JyoDXaYXY6VUGW8W*Il_DP}%E`{Ov&gn-K*@WbOfBIoG z{JZyOwCH>o{-~zt24S@cNMJRnHkm27e_ImmD2%|>a??M7Oac+*L0V8>K)<6_whCiO znF5r8G40+t(<}~9kEw?`GsjUn2&xg+{u6J_lO?1h!yO#OTP0%pP4`G#HdV*UU744q zm5JEI0r#Ul=k4sRrqlrf%W`{MH8wC~vV37l2T@k%A|Y^|AK3#YV>}J02w^O<5Fdjz zdOVZ>18{*FlRghARZe~E03IOxC~+H^7chzrdeM0`p`r@M-!IbPz(gy+c#zaWM;9!` z`=|Fa6&Kbb_l+MkL(4diP&y77NNu%cEJj{_9E^iYq!DICZY-4``<;Fv<-kuyvk2ZZ zQLayUh;mhjr|w#L+7d|7OhEG(R_Xg8#Hb&r|GGLfbjEXz7{o=;zqddg*75#$b1sxi zeyz&#e40z&BX^dzioqI0DeCg(2Tk)&nVvTNvuf2>>8C+#CH!5O!(aUmi>e_0peiW0yXLajYVZ zrL7VG!&da7kFY%C*@=UrRqLU59&wLxc<<&O@PCVrs?v7nkM)W#mho>`Ik~6O1%L6M zd=`G#aNT=QJ#dqWsiY67odV61fmMI@QNV_ZyUbG1_z+<)ei+FYm&-ZtAK5auLJZ7N z6sdNU@NRc5<%|gBwc%)z=E-XP*3X?@C@b@mey&7UgCpTbR$5Q~dcHiL^O3UG<+$Y% z%e4`vmb~CWW#9m9tQgF15MiI|luuHUmuU_Xi8Js>dCc$E8Th?F2yG6}5JgxxSsUx& zVjM;Bj&Ko=50>^TGUojVgzIJ~^ipWjE3LSdM%SU(kH@yUT_Wa*wkTtBfs9q_XDet@ zDB4Cy(au>=E_;t;lCTe-cq~(>|52Y6_ccY>$|h=}_em{kl2skhp8PQ4rljiV{8wPk z4}3u3>v!{5MIxOCfc{J-X9@)tBP=Yq;-=SZWGs35cUQ}1J{7*wwY%|C*MKb)?cz~T z+q`R;xc3S^`@pvsVc>I>N?$_>70o}~KvRd)}`AZYs!)ey;sf=WSaQmVfe zBuxM?sD5~up~j8oOfV00KEF?ZQ6U8(Gz-*lbvlg;2+Cry+f>a*g`DPFCcAcVxty5K>oq0T4`Nv)m791SVXUh`V{nvLTb~C>R@lmeBq^NA5o)UeMWSq_8SFj zyeur3*e*ALU()2y_)MqafnF6ldrT}RV;}60Wj27f07ZK=&zGprSOcbrG-nC?fSNbr zAw6-9ehE+b6TXIqCYG?+M+GT7%7yXry(EXpJqjtvkn+zfz0yQj67LA;;6ahJ<>P~m zAiteg8tf34?rOia2~j2w!mG@2qaMk}p1=dF{ZIYj<(JFLFMIrSrz6YV6tzh6Y1khv z4E$m+Oqo%>cMg{KwT{)68vt(X)~Q}=ASb8{PWJ*$-L)g^Dc4##+Fh{sJjM8W6 zs;ZA{%j4&H-;|4$$`5`l*&tczy*dWgf`~e-5q@zdvKCM0La@YH+$X`N=MbHWmh&QF z%p>|f@kH^N#?*T9vUp$V#^>Q+f9A@JEtadE@h(1O%AjloZp!h3p(Jlbt{w*@*lBZ3 zda~+LdRVv!Xf)Aii;e4iNlBI_n-Op%>ogB`W5~zEd$o906M2OaQcm9pEc4zH-V8F- zOBBlTSf=4ma7nKObMS;=gP?s)ggx1PRTK775j|0Ec%zBi@0z@anWqyCaP7qxkM+e> zW2r4WJX2EjMg^c7!A{Q`(j4x~q3?I9L;a^7y8x=O<;IUttF1TIy&_WPsmnF9Fzln-LaE#}<~0H*=d;d8)g<6ko~TC1CQ=~J z3%psPZ9x?>ibM0;R5Tsg(}rXlx86Q-iLATkiLk{m#X zTG)7MNG~c8j3QIxOA^z*`jG%QI!u^qNPQ1e2Q#5WN*}@U!I>WXebaiDOqv=w;mBLc zZP!=%b&hq*S~qAVM8ZW?O8lP*h#pQss?Q%xA269bfD!_Pap(3flyN@>Cc;?=Ci39T#_z#$ABCx` zSQVL5+g@n!^5H)YNAUwbfQQH64~XA}94n+7`W?no>;$|s%Y$r@+Oi$0JX(DhwGz56 zCjdc_bHI4@9GE-4()T^#>hV;(p)Xcic7#H!nC>x_QxYHqi=O0-uDD7EtJEUFeh4~F z=4{Y6hT_-xh-(B|Z3LWClL_b;QBx#Mg6E#hW0&;oBFO_fm7vBFV#rGV!nWL}OcTb`Rm}$ErYva#hK!_gAXaF<2dJ^QJsB z#JC3-0-lJUbu%n@$rPlUN78kY`IX;I65AlSqfm;}RXZFhrb~U1CBG2>w}}kwJcJ+q zuZf(Q^xIGsFB=^zW1kR!=vLt!A63%vP&=bB^7Ve32qC62F-@?_IDBi;bL?T+;~58LyyH5Bl4gQawa=hj;hEJw62E zrF~sfMb#sjTPWAUghI%nYV|Q}!*yHB#BMhc^HH*0dx#osjx6*Q-bHt46*w~0!tRe( zL2Y4bW<{zW3in-2R#i2scRcB7X_|+TRe`0IDT)F&!VObhDF?GbeUR|K(LWvvNz?3p zm@FJ2HFoiaa<&7*y{4nqH0Bd8OF7Q z9gTJT;vUWeUn*NWE`=U&GQuB2lO~aSO`%mjAZ!{hW>&x(p5+V<*M+6<&MtKooK2l< z6%i7?76GKk`y=LqpbZb~4mfab^Bp!FqsiCSQ+*QrOpcSzJJG0glOn&tc3>Dd3ToJ1 zDoe>iDN6@P>y?^<49y#-c3me~CYg+W9j%NJ5t{q)kZV<=d zg(Sz|DgmlqbQK5iyvaBrtI`WZt)a|Q;I3QESyfAL_0fPfUi1n2t8tGlTmN%^|JiuN z?n2AeS4yK&-_5`psq<4IXRa&RFw`6Yd^=&4<7?c@y%{;BSz^E8kreuHACT&xrimLH z7|7#$cfFFMZW%tdm{qiJ7j%%sutdEb0IIyiXV;6&QO|}R50}^dJq6WDMCrJ2jJK8u zcPu>Gpw*5C8LxLl1(19dSXGtxCMMJ{#-Db9uru$xnBhSaO108e!U*LN<7$)>;HSX^ zIIdf}At3HiMVSQM-=R8wJp9zlxm04gkoVN+j_}^QfJi37mmeuFd*)l^)j<={h@o_N z91MsqJ4aA6jU-Z2QIawt-$|WolHgTgPRAp)F|#F!P;D$=ZOV}f#ZVE zO$T~$x4i1-T`jMAl*n?n5^)uV?2Pcg=4t)-`=s=}!&1|bmkQ%RpUj8xJsybdqa;aS zhA!^~3Dub2qUOj0nngbhHF_D{EH)aGg4#~ul~N(j-{#veP!5hzB~7;&Z#VKZZhLAkcz1KR=qxx#YvM6m^+t zAe-mPAf7@^5NO%AJ^1fb8TN+ZFX z37dUYh@N`%Q7_E$RYf<1lUxlSLX?o}p)xXiTYhf`;%ACuU~YaICO^j{u-X&CSow}S5TZUy@J#VlSU!k zr)00a>3i2Pn6qW<%@Vru=I3m3}6rfKTq|6Cot8%AaJhy19Dw4UfW z9WYG*soY6JwIZBT{Oq{_t(o! z_^m$*IHC}zKN4<6XmLbWX`nKrqWpr)Sfp)_H8qiPc9*BFB>TXi2_WfUqpokL-#9BN z@M&@;`h0nID(DMuOZXsF(E_8$S9bLby*iLZU@G#&Z$uO>sz013A{l2$H%Bj!Hk2!Z zC%#K)_rs}p#8L6XNht~2tOj0VUYRY7@lVmed@BG&!-%mWYoTTnpnNk3HhenS1Yn8i z3%}~Qvbg-Ce=~XZ=d7sqyT4#j=`wEvoMSJJ3o!pLo5pH_U}&gL>B-k1j{&ALY98yC$lbFz}te@^7u=^xCmMwZ{sYDVU2^@(-#?S$$Kb|X=V z_YQ)jh|!6cq8$CI41eiKJeTzx*_Pcn@zv{ndf!_{Q)he~8Q8;V|1uYsHfA2@w48~( z6Ue9u7J;sI2F<`Je}L=M`++*CwlBe5Vax}J?8eiImwhTuA;Hlgb}_JbPTwqF{hjv@ zvqmNLt8MQ6=-(`%j}i%k?$}651tqQ@Cy2+1yBTXGd~t9PJdOeutDzbO7xT=yST&$j zRLgIkoGI7L5cDE-V9n<5Xt9`oaXXD8r6}^hnA<%T{q$p-xQegHpp9Jp7s`3RP?3qI zhwBLB$p!B-9SVHMga1}e-}4;=f?f3;2zB%Xza0vIs^9fTfSfn+c6l&83f>a*s*()s zl;1K|p7C3>b|K*i5skA+aTrO%U|8ad_wY=&l^1cj%JgkXAht~SKF9T$|zvIhbcy^>zd8!T4WcrK_xsJ{FhWk#WhFlz{JWDw&MJ<`S_s5NCQ3i z*i3c0(ZT-~n!}&i8Lim&Z7>qz@G~8*%S8@NPa1sPQeR);Wy);j3ENw(svw);u_>J7 zYh~$|P)=iMfJKtV(ES=^K;TYY+uW&ZB|!HsHP1s4_Ns=0L6SNjS?&TRhZOgAY?W}6 zaVnn8XYAY}LN`)1YI0f=8&6#EiXU;VJnk8evLG@KLMW1APdx1JJX*dJwE^$qV)G9= zZJ(G4aDW;AL9_faC^4ozrOI}PU-)!VV)_xUoQmnBOBh(;+#?>HYW1Lw?L`y;cJ?uD z{>0FfV-dmTwU$OW&*c9W;|gL`)pU;sm3ZyeMzdd|juvT~F{)VTJ_(v0`g%Wg1H}ua zIfV~Lr5{4yx(>Xu=vfjve>YWw?*m6fS2KYJ;Jhm55XLB+zJyz4L9GU$O>rbIziL=t z_6G%W{V@iJvg04*_u28;W)DWq5#BZ>P&vzM9oUbThyXV_}R;9Ears&31{ZPOX5M3q` zkDZw(yFqHGv{0`;!l~zLXEOa~70{aAe107g3G?|dg0PIMf0h%S06*A*dh-@+-=@Fp zSO=IV`>NDWvd+PYT_;S@N$iz&JVs3F$b5tMKv}!ZXI$FkpqngkBX>HlzgSXGvOp?7 z4c|rp8pJ!#?a$dMhvIho}HJy;Q z6wJh+_ux3 z#MQBYR`Df;2HWd;j`;Dy=VbGIihczFi!d0#8Ycu%I3rwyYeF55=|(amwn zB*G~{UVdy1mOOVU@4S4ueA6!>&pPD8D0rd;aj)Y8Q3XCfYk}l~W2xhd(&JT(P@NUObysk6MOcfnJ?pka?(Rd9YDa0sRqXLJ9+bXK$P--a;&?Z^Ilf zt>bK5{_#j7Sgy@cRRhI(S_S8#d`KKlIMi|S;dLsc0`}c$a&c;rro@r3+E>j9<n6RDgpkCz8pUwk_m7DrQ|yp6pY?tuC6*;3?2MEZ>;KYsh$>kYQ(>(bP4cXNri6er z9`9eRzu>7;)wOZbcOe#DC)xDVgM(~l)`5Josr=}9%nHy#-=Q}%(kurBg!<<20|l-A zqOF4rwuy&N)`iE6aaSX%++Tk}hB-Ydl0Lk3NvtYT6GRMzo*PjFG+_(3#EjDyd} z^-e!%@oHI^f6i}Fy#^|v`o7tz5~362wkbRoDQ!Ww)r^c%82La#I}cMIP!mxquaW&W z{Mge5L)n*S$8UutJnY3pofk7nynz4;Qb#>{BGa+bedCLl%Jcqv1`E?`F~8B|KVB%$ zpY!d&VGT2v?FbcnSiP0SvPd|QFRpfTXg(n3*yuiDFyr-_urq#=4u=VJLfWQO zJyP8lQJW zKuDuf?kM*C{aExQk>Lli-!-B2Gk+Z3BL*TfHR0+XHcoS8M};m`t5MD^dKqz$fP0<7n^*)Lc(onq%;y z>ccyaJ_A{our%0+>5uS%JaRxri zag52?+6dbk>Z+heTt_*Z8fl+wdS$YVc@Y-d0|;kK<|gAeLNbI(G-@@^5A9d_SS0hR z?nFp#v1LY=CjzecC$*4B4D6nb0LTnGlsSTI22 z=>MgI*0yJdSR^|AqA`fcYq&f!H&qt0s;wd)zMW(@k>=6bVL6D1A60UE!{ZUss%p-h z&I}6Yv6Frde=kSPqQyNbhHhm>s`obiAFxl1jnB<`&OJ$2w96hm8F3B6zsibY??p7i zRbqVoR)USP(dX2&=)K+wIChPGYQbk}sx){I$}k=5r~IgMFOppCy;;{p*P21>sMZPV zYofZA;LyyKOtMo8Bb3A40G5O@9fvliQn^(0GJ8}?QI}Zc$HGjLra_%hDuZ*Q1ZR}s zH~6P~Bb-(Pd|a8ewWz>cvM;kY^>~^0;!@pU{XZEXz1L*Y3~_a~|MOa0DYQQwn>8iC zDBdELN&m_ArJ!7!w?ZFK!72%bxaFs(pJK$33-?dM##@g+wG9HTxaU@*$IX)OReko} z(`xxIDLJJT=qqJqCdg3x7J3M1tCg1jF2jHDL@M0c z1t6zwJiK?YyzU{&QaL2JPPCHTZ{IluUenc1i81Yodj+diKtxm}eg!kq&%wUjElX81 z_qh?C(aJNWRJ~_ndO8b%ACw!B;d29hs%EsZQb}x5qcEtvJZOQnSC}ur=AkJjt~L(_ z0LBYp%7W)U=ll0Z(V+f}bga+wwPTpSUJQ8M$7T3CUrKMnV=RbpepQtMZ%6>t6a)l< z_1)BGkvrDof+JHd3>MvH@O+QYo{6I0Ioj#y=n}sJ&+DRI`yq^3bdRZHsh75QA_x4pdk6DD=@S`c!$!V#ZX= z2=fM$wg$0wU6A}FuHNf~Wa|rq7vF4YCnEERqLrN2X7IeVvTd?c<%;9_8@i)Xz+0N-z@v z7`YKhM4Z4M`Dwy)0*K~K73*#mO1_+Ub?vWYt!-Yd!6Kd5&y%PRw>XvbD?;mCr;lr6 zS`%1BncC2(rWod#sAZW|GxOdv#to$Rrk8J#6F-mQBfhBbxv91fGtEOKEjQe`elTSr==}C; z-$4q@EBTTR%G%%Cw!DCMd`wj&QkuRLZML%AjuxPHB#TkLz3g>^x z-=+uBJUAs;L>HL5%#6)?N{Bfo=2nhn1_AVgBVl3 ze@e>7FZ>439nP44Bp6X~KOWoUK_5J;DZqPEY#&?KccYw-%}sgaDr(^kd9=Lli&X4j zV$o{_t+qwr@peR^7KKxoX3*hCZW^Toc|400Rk;Qr6IuF~^?(Y)Hb4vdBkq@^TWhSo9n?f9vp-f(Wovp?xPWa;!*6E1 z%*@U#Kj&FOX6pZ%rVDZ+qy`j>pqQES^Htn-FXNVORNb69J^M=yhqcZSN?kp z{L$M&cixP8l@1A)kENSa--kd41>E>Hm7@BFDUd}+S;`Rk$H*Ya{X})uiIutMnNAR2 zR_V)Mi~_iG>gTBT>vz>^CMK#SxfUWUxBAFpUOgI|dybn_dGux}IvWI^#rGpA?6DN^ zG^8G3d7LBZO{V6_tl!+pte@rj*9-lzD?A!%5Ktlo7Mm!IhaM`gduZe6Egs1CRF}~b z^#+kxs^Uqqd~8AgkaqIHUWfOnIqpRx4!Z-m{Y8x>Lj(tpAx>hyntFa6ZuzWmute8P z8A7>@2v{cXrB>|9dO)7&^pyQhJbsA=#_a3{xrFP@ydjvB4Yd|U*mQ$~om%0|gCRP6 z_^!u3BMV1pfLC;aWFx%fd9X)dU+O*kkd>cBNJ9oN)P4b8R1S(5H&0I?Dw}2B)WOL@ zMbkxobtfI(^SX2i)fw?gXkUpar{xW1MSZPQDhz^>h5l*sWE9;k4nbEi%P;EY7{wqe+?{Rkh_8ytHE`1$6zAy%P~516to;!plNIuI ztkn*X@REE(dGOzd_x%)k5LKQnxtWabqp}jqx{rgK2m_La%?b#~`~yX(y8rk^(&($R z)MIxsM}Nj@Kx0@AVFQS{nu8*(WYCoTd+8ExaG!BKdVgjV$Xg~FiBUiMn{ zQtJ+S3=iHQM$`lUUjKH#^wtgr8Ox3Fi7zXUYBg!SIh{LCM>`((erbKDlTc4Y?JSmS z-VDxwaN7)gWyZja5e$ywTWQM-Pu&$0lrKh7?n=8JUCkJhQA3=6CDT?&J$sMQUBlf$7!_^ESui7 z%jI$Z`Am7n!$4r-*UJ&^w%9CKAi^QI4?M8!WDFdnc^K67YT0z+Eo(#!OZRUX1@Xb} z#EW*jG%=c37RMYP9|NWEjCxRdDj;qTe)82%ye*I`1%E!xE`hb=kE-_vG0?&cY{nD5 zTCST#Q{NA7jZQ#Sn|;OYa@$X@F84DGO@__89oG8p|1z=+u}jZ5U&=L}8Nrpvik*lg zK(WcvW31s`Smo6$QvCIXOT~X*p0+=Ht)Nsc95Ffj9w(+89{tAgJy-fv_x6mAve+yc zNSd*Kc``HiRf{To`K!ly4kuBr{n~EZ{n`irr$X`GIQ`sYemp|0{P@Sp^S%}V#?8Ds zt|e{b3~cbu@Dugvc>DD8)3?3bIL;3U3G+P!@E#*j+hPHu7(!D_7f+<;##yLJ&qffy zwd^-cAbX8LD8qYx0|ZHb+%{%zjp!MYh=KHps@YDa-}jc4 z$TDG>zQ}4+!OX5@kYDhmS^*)f(XT@fIa=z6O)g*ca-rD3C_~LX58UpZliVv23Nf=w zjz?gNPc^O{I*5+>ajLz;wgl{JT`o*anO;G_pOf zBjIHclZ(n4Xs@&b1!{TRb0?+uOh6B+yt#j3#IX9{cR77^zb#@Yygb`g)x(Ah6!))^%lJLn+7uvnETWxFmnF>cJ}XmGgcw`m*E>T%8cZ4|e^ zbSW1SP!;?kCJE3)$!iY=#Su-|n^5J?4?iAB(tLZG_5ti5CvDFRTk?Qvf{4=j#VQxKEi6FTyrT-Cl!HY}{4Gm5hq!JfJ-&^xW)jJozl%3FOk- ze)KpeAvA3n@@i5So&_bOH&GDmI6a5n$|arw(S^X0q2EeC5VZ^3-^8LQ`?3*bNdGn+ z08|xkaNKEX$aj0=0t;4571^H)l+ydAPW@T%ul((=w%Z7^9=3)#c+6qpU-F$nBRe%mVO=)tv@k+KXmZ!(&y@nUdw|yTuRF9b{PdZH2chQmNc6JGvKq~ncC84zy@2;w8dgHnkghr)s zuiFH^4z(cix^b9(uDYb}5ndOm9&B^tC4(Cu?YVBi0uQeuF1=$(u1KB?HjePqI-fvVK4E`3c+5nb%M8o?A$LFTf-G17hP$M;Ub<%&5Hy^r@$(OgTNY zjO-Q)kGMe*XqLf)6c<({Tk*w6Ri_g96~9buC4%RbK!pej!HpE5eVgZ_lzvWl_*BL* zKm$@59slY6b|yquj?%b@#;7Wq#d4SB4fG*H>{FNl5G^4%MLN9ebwzwMfj!_HtDJ!| zYAzjDcSpVz4_J*_acu-IM(A-x#FRz92wxAdzlOFM(IrIjMlR`WSu%;Dw8E#FMpmj5 zk%{$J@U?9;<-lsyR}`?SQ~Fg94-)vW(NYl_$LoNxu zKUcER*K>`O)91yzOg>odH@T)k9izO9FO+YdS1gvfL4U@1X>9N z)!jhj%n}A&b%Bhpsu5~fEOU!K_uQr~M9EER;HKcs{rnDH5FZmIa|4eCcda=`f#k5k zsU;yn7tt(pmBAFyx=dxg^b5rVe=+bP_*y1)8~xdQNr|9q}2{^WF6u-ki)y3YtBZk!f5;Kg|*G3C2kk$eou z3NvitCa=&0B?8tF?fBbFkT-FqcreN|x)lhSN}uE#E5nI3F$ z)uRwM*oY+UD8IrFRnW5zxyg_)C?Px^4G78G*`SQZ+kOcE;M$rWLWnS&qDhh`WFaKY zb@DfT*VH5di{={@kr5;CW5`@W7uyNc1%0T41bsVL^!dm9VR-MU@<^&S0253l@mcYO zU~NVZVwz}i@6jmU8}Q;I<@p>1kTNVQA3f@T6%fU_@_Kll7I4n{DWHs@q|_Kr+3KFH zN^Mr--AgM;b9lpNyDu>6KL7ul%!-%OMf}>2Kqz2h)2qQt06vg~2F6Qo(h>kY1D%E@ zmtAILkR8u|zl4h#eT9MAk8RUJ4AU>0_>-!U*-a-*zp7a#Sc-ZXcp(PE}v$&U2}9lYW6`n)ymop)tcl z){JXanPL2URAwf>@aA$>VJ9l!>=$=F+NOt#3JOLYf(R&07ZA^Y7~cDKIqwe--sRMoAZsS&n18wn zl|+)krV?*G+%8J(Ew2R*ceHCI8pH)vC`N0(O~?zrem7&BB-LS5-X&s3y-QPwOu6NO zI6V+3^7Qv!8=lj+-Kmh0rMc0A-)s_hhWB17VO2{oDUB_cjmQSHz92x%=i&YQb$&r! zk}#O*_rrF3K$OU9qfucUUCw-*%I$I^R_=is7I|UzQCnBD3E~XiSVCI^pTllZp}nIU zJAoZ45*iKUj7*5AG^1!MyR_6nVMm}$Elm4mlU*qcFY@EiRiq%XAK*OdZs}p+*?IkPdChx^um=X?aG0Xd zQ!jepTJ5ncl>j)3Tnrrh`{fzmMB+qKJalt_ao;#n)(H+X2hM*W8U%muaj4MVaT5-RuZpKDm(Vd$5XDbng` z@nnMh&TA~V@FU+N+(JO$YioY!4jH z-|$1L&+|K86>PY)gs4Cm>04M=^cc@9JGn2xOHB|_EY7^B(};`u;)SBSwq1QAE{K}E+~4V z)QYA;oQaXP-LqaHH;A|p4* zT=&!E9y^YDt!j3Pd?8xf+YSIDv^Asb=_ikUS?tMfDXaDOd!M#FhQ@Yfsl8r~-8yypW)@wkdBCD^;i1!Da^7YVyt`&js98XJNRnik-9DuegSqP^n4uk zFyszY?HY5k>O$)rH)p=uCn_u5n?m2ZiV}?>B*Qf#Xq?}e_bBKQC58Xo&x93RC1ghV zET}lxQ6!M;mph@z0Xh;A;KjSn+i|K1B~}=qZ?9@{D8?N^c+hN~K&p&>Wq}P9Ui8{xl-7)*-+7_rQ2JWa`V}GWUVDZsWuft!jU=+)L31G5 z+^U(Ar$m_P*u<2n=}^h(emyL$Rwe%2hmn{YqDp`3ThUp7p?o!F0n#7AX{n9TP|~z% zEE}QL)KJbFm9{!q3lSjjBVZALm9%T7c|-cozz%1p#(H7rpni#w`?*()y?Ij^ujW@f zXwoIvbC$N;)HdlHyeVN|Y+wDIpE+U$8;4FXC+`?u$+mqhe6vPx=0YKPv(X+BgjNA( z3SA*D2LXamy@?dmFU(x#$~FHvl}+6_qfIyyrehY7L8`|hNP-e^+>g)+G1M#3Z~;7c zVWIT$d`KS*EldTj20 z*y&ncMVf3K)O-j0FneLmsN%&^@-mjin1$apjjy1F@Oju{hlfyJ_c||UO^|NhO|KIV zohQOL0WXTCt?PL4?nSg1~j^iRfnnzFoR>*%!WCE?)BURlftPC3gS_aHH)! zbVZz=;il_&GVxPCrD~x2jX*$IYCMlp)MT9dEh>E`>&+x^ExXl1;(Lvrut{1B)9AA6Y6>}OKrp4W^+ZH3;6t6&(T&1pMY5)a^DdBJE+NhzSZL z`WIDv8ic=>q{rDQ(;rm+xdc5OxQCHhw?PmJR+<-Ujw&`Y{;(=_)I=e$+w5D=aAPZ+ zRTsUOb;!d88z#9R+nQ1CQtiBFy~XqCzJEaRq0f9iusL|p+v(*{+o!iJm>0Wnil=j- zzyoA7c?uY?8EXwB5g90Oco^FmDkGH4Z(YX8i*RbyuXXg@iU9i2ljQlow`3Q2gb8wB0ETVXzVk&p1 zv5}OBfa2|C5N1R?Y*t9tEdh@Sj97@cYs?c)k9u6>>!?wfi!cVv*kCl@>!D+I$wPjf zJkq;tQsBVYV71?S`xrus1_3%qPM_?o?rn$g?J4Sb*;Msdx#WYPjNz7n)6YH(Z=eoj z(5^SLkD=p31H(}2X|21^=w7(fNWw`nlg~Tn{c`!6{P0?N$YTId+~1DEbU^4JCj0Ra zQEtSSEp8vHCtI@=v>O!K7uj^eeoyIdtyBG}-$28qCQq57QU7Burf3MX8wDSF z!sIi(<|7qe@<0+dF>@8h<*h`~dcO=m@vg0JY!8&h8|&dX7G3zV(cPKFB2=vO8VE8 zQHKdn5T2~8JFgB!bfc`xbeS`rRYNW$Yapw1aDB6cjY;5JHXNw%NIyggWThwofp@v^ zL?j+l)*xOsU&cLPY1pc@`JnFuINes}Ia7SoGg|@VO41R|RnPkD8=| z#l1V|HN6{1=X_{iWz|*x0mD8QS?XD67=#wTb;jDm?Hoa8J2;e}ye1NC9aXRK?a`zl zV@jnK<#x!Ok?*WALEasp5PpQ9OceO!%f49{35g_p%E15Q@OPd(RV2Rabt4kcUsL!k z(#BBQ)EqOlCH!TAP5f5&WS=n6)YKO;&}@@%^+2Z3=8iq`i*9s`F-Hizd7SKYSu4K=dlQ@ilq z&_=N`-asmb=guUs*VHh<6~i#%EHA6sv0^bpWRMoy`4Rk9A`7EFA2uRLs&wdt+n*uL zNn`Pr3I~mJZ@sR%vqGgF>?*6wiz3*Ba&c*7RdC=ai-6oM{l1r@vlRDzAjc>hkPa!(N zAo0jaozKkqwWfzCNK=K1axLd@u7-OX;R(R?BhR*1+YLcE9uRdodsofTR?5>R8cpjU z%nW4I+6JN|_`1+>I$`?iWm^AD^}J|k!|JLROQ3L^z3{P$rqY}ukUDQ$W;wPGEyYOM z%u-&V6^ou8H*OLb>jxMD1Ui z8w*#)q7|1H#%NoHmz?q(Qi)AKozw_Q_u?B5%<8GI@-oZ6#R;2rx@wmdJW~ zsnugxW?sLx7p{~g)xJYVXmWvSe%f{u0wJJ)$)$h+t>OO!+IZ2^Bh={E5wI5{2a!_l zS$5UE17~gZbrypRSfkNhH?GLmPap`XAU$wUmWZ%c{*t{z0)ZtlfwL$P>V5WpZ!FL@wQQmnD(L7;p?ExO)0q9??Xu>jL9dl-cI}3xK!vN6)SrS-#2JZ|Rc|@{P40!H$`qoj@!To< zC0A*`gr9tw^ewL#j^791-FDAVbU!8a z(~F1^q_gIaBWI~cfomc>A#?mUAqQ`rBS$}fY5|9NzMtlFRe#7N)`I|N+xW3y4kstKOeT!n$Ox-$im!XG|^kjOMJ z`S$xs-m?o=Xa&3E0kFzn{m4v1m{2*hJ5<`wAF1gXW_@QbE5y2u>8CO;c{2*`^Yd|d z97Uq(B9&9i*p-kHO9cByTPk8*iR~~QDo)3LgIV-FiZM#M%*^}Kt3sfIS#hqBT_s#R z)eIbje!-B5SncnJ!7K=~DI*bW1Oij6E_k#hlwiM6aST~|y>igpuOn ze`RVXdqGg5FC=VkHRKx|sY2pnYE#P#j^p_Q8HG>&@aO~wS_}sa16A9DQw+@1l22P# zZed~5k9obE4|npDkChk0-UZkQ|JKSu<1JX~ip}GO@EfFlEKT_nn@gABq*CGr)nK{p z12nee=_^hKMa`o@%!F;XMe0ZZ+^UEnVP(Cf%Yv7C>>oMu|7c2nh<}j24}~j$S{9_q z8UV4R_2A7jFjG|kkk<@}{@ zdRYW5-wtG4f!bM7K%#pqPC1nrC$8Fzgg);oJb=NGMU;#bvypV*cZVOI{?cz)CfaH@ z5Mm+&X;}AxS4QfJ^p;O^V{WJ{h%`&nMm5ne;VnB92hbX^UX4z6yb&MYrRe2p#2}&> zo`&BOL`hY7JVZtNEd?CW1(AknOE`ibsn87kOa3_BMXKjGRU|Z-5NW?CIo?Nf@pPvS z%0kZysU$R^${?*?l&(E1nued;tJr)xFcADy&h{|S+?BWGk2ecV0}f8U^H;@)q*7(@ z{dIuymxbi9{Zj1_*K1wT>;auTPb$b)jCo}LhJ6~S& zpD&h|{q9O&m|knOLR;(t;cS8fLp(@9jtIRqQTfbQb3G{yuM^CZ+gDc+9GkfN}`beHC4FKH)2!Cd6DdLXnaB0ZB< zMJ8gLVVO{hUYbENY>z5uZr=4O^x-521Mk&c>UHNE$7FBoG7c&vxO!q`gOn-AF3xqfsV=(4m8%LBj z|JV0>*1Mm~Rp6L*A7*ejs8oBu?>gklmHFh8nR52kWV%w4{VebW9NgwvpOPD8WiZge zth}+*FsQ*!^W?sfN7jVuPMmwT{NSlLrUXH@cALQ6(ZeR2R`HhD@Nuc{nzilqTADBO zvoSil7nMw`qm5g6+@qMUy+X2|@#*roSL6pJYq_5(ucrXw2S9;q1Q6qMI_+R3q|fH3 zyVR0~6|q)5x(uSrvU?}H0&u+!*Vnypu)#qD_ir^6UkXE9i?o>$_SkWGkxWnw++$H1d;`UiQmb9&rg6{stIjr!Xcmwnj4+dL?YR}<)DcnfO zBZ~wgrNkUmCFm6Ez1)h}+UVY2mlIyA z14W^*R)?8OT22Hbl)Sd)>2uf6@^3* zdc%is{w-Xj0+aCKpXwDIb}WTxM3K#3DtX1voRSy6x8XUFhzqAe%m9CUf`-xip`Awe z&X&K+g`7mL5=bJA=kP$a;GZ80+Oe$Af5#6^TB&zH>6C$#PyZPd9^S(2tOpMnk7H(? zIB|zaUYGsm#lBo_c{#Ch0(MXv%eYQ3c9~|_ci?o$v)>@QZ7%>>1pzq25B%Fp^6)SE zCpmG0@XZi~JYr-uB6D{m3Cb|=4kPWa^RGf1`1_!=HCZ}<<=WY|;S)}B-7k4dxU4q=qhUtL=%y@B zwB0}^J9IjZnP?D^JvfhQV*|nfrC)Q_kDy96$}>8s^FdM<{rX4$2WFRlxK2N$ymV`o z!>_&kDn6N^>~Z+tzr-AP^4T*!6=(UUoQ*!WqkFHEQ+~zh9$k6;14n6R*AfJiFb?G8 zv5KGI{Chi@2q27P6d<=sXAtS*%w?&4eAhowKFngeUGDnOiN`WWVKfgMpu0J(vz*5) zW^!5O5(XYC{Diw9)Lf%9R1HKJJ9L<}w4vFer#3@}a^a10*R3bCTa-^FOqMc(1Pc`L z8tT$#WS+v`WIyWBy=OAojb%iF?XSItgUi{&iGoL=?4E7{7mS_imGAgP%z0~hwE<-@ zA8`Nz*O0RsKL}9^u~r{e#a}(Ei?e+kMhTl&TN%SFX>bmgLD)T`x6*`$ASL07z)lDB zeD0@g*F=!DV1QJ5Ck#%qazfx0?+N>?wy6)p3Pg(HN?&`v#7Ho2KlZ~XaMEwJ&4GV0 z`Xh;^-t`epVjc_hPq8h?cT{d_O%nxa63e+fuI1_{z$6)c>iwCSE0dvn#&H#O*Z#GY zEO4EAUTjmGHU~^PMZ?d12+cG5kKvb#R2WM>AzTn*PJUagr;ww5BK#QzWv5(h8(!TYGGjyPbU9Lx#<(}YFl4Q zifXX5*qfP)n?n6oBFp-xIX2;`{Rxm`x7CswXht!=4FSnhI!aI(jmp`*h5XBPYS8*ZP!*m zJ|wlce=gkxn9s+$iGaL$P|v(l&b)HQlKpBq`K14ReLwADId@cyvJ`efUd`|?6B%(LY6`T!frBLT*+9nltDv4)+H+@xu-id zQ6}@rpdXvd!c4-35hwfr3k@44%pTomK$fm6QE#+y7ZpVt`J-20R#Ei@3!Ww!>9|

    XLT#AKEp&3Ulr-!k7SnD7QxBC>L_@nh)rA;%YXB{__8hV>EyM3#~px>>=-{ zNwjBLY2OVHwuI76o){(uH(l+|I1E$!OnK7k_O(Q(InvQiS}wY>A|fJru>-&Y5JY+2 zZ>s4?yN4Ag_jW4u>I1CpZ~wttsvgD)qFZYs<2%#~&Wx%XO<8@oeeaCEPQ`!px!@g2 z&5XW?$3Gl_az3=w!U$_C#}Zb{NRoVtjesyaeIG~pHjPibqLNAnz_{bC_xNfF<@iRDDb&vIw3l(r4$L{S-CZLayf?p9F6eSF zRvsw^^5_>y&_RVZru-uS`nU6&J(89^G4({ZJdC?!g%8==MDGl<-1~qON`Dp}z}hY1 z?I}BOOv}n{u-LJYyD}?ked0A`P}{-QA+~tkbH=cF8@@n3jbCo;ZZvr%CIF`5-Y2t{ z)WWTVI{~TJRU9U~UOcuv;-ZwU@O)JUz@t&2$M^k%G;X?o>eoj@=l5yZbKeVxT|gyq zKZyGF&B38IwMr29qXM`x(~3@a^r3-Qx*A7TG>Y=ZFpg8yVuD4Qi@=6Z)uriQDN@Or zOt+Zf;jSo~fDr*FJX2nIB?Y}!%mTXbu+`@)7QL1c4=Vs}XHaLko>6Y`D?&P<;{@gp zmtDb>I={#MQNyQW|5#wQcHm%bZidkXQoa&=uN7vc94vpB1}`%9!#PB+u4&e#57)rM zXNW>L{2jG(pQEpluV{|xH=|$UPkwZSH}PQf`=j^#QOD?Vr7uzTqqo%eqrp{(&@?}y zC^wn}c4~mDqW^qwVP$@XOpk4h83^#+n;;W zUVgQl^H?X{u+$ob>gyi%0IRp@s6J5P+B6)^=V_8xo;yjTB9908A_ zMk5+oz+$6To8A!$ez8x_mH9b;8&QrMM0awk2>cgjahL@!z_p=StVGNe+mTXa0*ySI z-eh^c5e2c_KlRY4j*dq2Pxv)t0(xz`?8mHfLvDKnEZ-_WN^6XZauP+GOE&tV0l{=x zfd@naq5xd|Q_fl>qyuqOx8snLbrjbaRP*E<&H=E-B26{P2s02Fym$cM5nNY^UH~j_ z68eW%g%%T@wU;!vrEIXO>MNIKX+M;j2rpqnPN$cZBB_tP=|z~r-0tedgn*rs2zX4G zkk9x+ocpr`GzAeGzmio`@VsU+MBU%($%7HRL(RO#1X2Ch;3^$vojk4o-XRf92uIC* zRFC22Fi0pDYRuJ>gTUDK_CCI-9@JRh+N z7*!2EEAVsOvr>J0GzW*<+Vkb|15cllvvGkF>?SoVrw+E9Dc~ZJLlyA%R?BV!NxzwO zFJLYm%UgUC)%UcA0-fN_E{(+!vteU6ThvL6tSZ%57TG!1@X?1PY#qI4BS?f?R=Ijx zv39!RS-bqw78;F;DK<8`qk-Y9>%haI;9*S*$)W*^wU$uSpz>_b;=_9JE@o})9pbPg zSa}faHpe|9`1GNF$*&TjYnypyI*YJ@2!WCJ#!>QNucx?ngyv}>OfXkL!Tx#li6=Q= zscLGa1sij*y->k_1a|qivomF4(UQDduM+=?L!OzA`+xuk*W#9b^#5Qm<`jqJE;_+q z6oV#6G6UeH76hyIZN{WvZKDr7fGw)hWv;CP_<@Tk0{FmhfI~8hg45D>aWktHU?j<3 zVaP!BwaKF;82g8^{$F_$NgoQlSw#WGN;_7}>JAVs=&C=Dw-$s1{v0~`?gCAJ@`PrnA(zTZ9B6^^UwCh4K|@JF2>v>0Jg5V|ScYc`?<|rP zhBB5@j;ZLG6r{fuGFA=$7H9Ir5n)F&g06~ZZty%1<$ zzWte%=F>?7qJF@-pI7aP$TF1MR*!J%g;_YyVOxr0Mc^^a7pd>Pa?2_f5Ss~6?IbQK zAi3}mmkA!_CpWflN_j^%B9B(CJM^CX4C<_2=iXqh%D7WILKX!kLR$=nEu5}u=!apw zCbF7OUxtk+vob}Yc8$I?@b*7XTc055a)+0#;K@*EP}P+=`X1Z4=8{EyLwb`U_SUo2 zwbZZQH6{sVsYa>sn!D|ngA$A$_~Y2gs>zhPfR8u)U`4Xp=*SWM0~Fi%!zc>f;bB34 zxbNA|s6$J+OivKM4~9xATQ&6ZyN60vP?oR%f*J(KS<^BN5BORF0o-{_roFPeAeQYn zJFinOi^w8~r3gmnr)x|i76#-rLk()SG0fZ3o5;c( zhfK!{p^bq%jZI?Bfy#c5a5~HP@#qgeg|aNmR`8y1wXw_V4(EdeKd;?N5~`xb+kXG+ zucv`VO)&CwyzsSMn%^@Kg<7Cf z8lrm6;{xFh>`dxv^tIrp_ea0+2e@}5mRLy_8HIcB?a$Q+@Ld9@pL@hr>x@3L+*8+* zFMSpkgS@dw7Z}}pg6w>Mv=(tBsl`tML8I_zYzwMJm~02RT37^GJfQpHdtOZTDGg?@ z5yjNfP^cEixZ;b|Hd)YzTieIV33DaWgaA^obth=0OKwejvJE*I@z06vAL~H6MH*22zy;ds9Mb|Y!sMkw~O{nrbDrn); zfSFxks9Yp62sfKh3Q24i!_J|7`dbM#DHp!B1Ig!eS=4O{;-*M~4IEi}udbt(!kkxN zYBi$Fcm?~1yZTh(QZv>3>>}@87U8M4A~b|&tRNfFqWquNbAfvg#N4}ub_j@3&EQ}4 z=fwB8kTl}1IU@e3zsL!1@LaAylx?_?V2RidzwyGu`v7U1>&=Ef0KQf@MCCF+w6H#2 z{?-r2Ta6su0br-Fhih$F_>);xB(((?LTJ)jGyr{qzDfaS<*)(#l8L6~BMDgQg6v;i zF8wM0d7)p<`w@^T=9Lb|-O6FLXk_#G|AA|4#k_Iiz5OmCO{Fig0tz+AlH@&bE@xl5ED_@Q4%$O__1 zeUOjgM26W2%+D-YGb$E~v(L?z%U}8X&^^+Yg>xMkW^Jx_tb71PqWqf2%PSuG`7+!0 zUmupkTpKJhP>;AjZefSFybE;gMikfQz!Q!XRF%U6v>b0tZukQJvZqx27?QxVRu_VY zNLzkX*7UZQs=`gXf}H~^rsCXYEV=-`*)YKuH4?vddZsK6qSEV6%Uho9M0wutA^?_T z0kI(B=+r#t&Bk@9<$S$L(!1#M*89LaDENzw9fe*GAaV(RAn7{L#y-T^vf$!3u7X2) zXQ;ONWB&>RmKsD&upW;Q2D;UcA}}HistVV~{?78Dn!1G~E-cPmCOx<$TgGsU1&D$ofGnD;Co8e7v!4UE$Eg>sg z&5?zmKbe9;wi8sgEKo4$t3&4865%7yK>HNe_)wQo=!>bnA^nWQ<+@F1!(x?Hz{7vs zTv4ye;X!sV5S=RqiobPdIer36fw1j0*nn(=<(O+gpsm3OAUQ|af-udjH=@VX{hi7A zix)LB%jk{NTv)xQ{3bL9Ze0|B-7H}pv+6`$EuU|(S6J2oFF9%lzmJeXO zN4U+KOa4M`2NeF6dUPE$z$-~cTXb@qR1a5+mtflAO@C~6zudJ1U^mdC1?kK1oMH}* zupR7NAm~^o8fy0eC;>N&r@~fX3{&MZC6+W*toO|DC*ftd6#+Gp67v0AjbK&{=!g&b zCkh{U?AyCr<$8daONy>}U9PLNxPPd&WvbtCyQxMIiUgI=ri@#djvn$QD0X{g5~$Go z_EL%Um)$&*`*fb9(}4YjRO2YN#pL!-fXrwwpy&Yr<(yalRhp`K%*azxRVc_IsrFHT z<;4}09p5Gha=(;=s5wdgYy~!uIsdv7GV8Ni`Kt7Rg7J3oAZT^zs#8T$>c=_+^ndf=`3_ON*0zFQ?$J3iX6e6NO+cwh^Nw@%I&qOth`X&c0b*&yDP1 z%5B~eD&d`Zt}J*XijH7=txQ^f6whRBtVinASY28bg&0fxzrOP?5`$u>=Z5S^TmQ z7)_{4KoM%tyTGS$lbXD0aJ*e$=`b6Jrh?dJx|aZmqjyK|JbL%rzaM>LDPDw>1hSgy z_@i(Co)TBN>`UcEj~sijVTUh>84)ovp8drU{XkWv>NlQ!595)mK!GX0Pf-~Wf%4}r z-OQ^XA9S&AEeFZzrVe`;U>FVA$CC(#?n%E5je)U7z$r`lS7?m#(qdtM>T@!I>pWat zfno2plo2>w4g?ObY9nqNcc21r`plmPc*8({=rJ#_F^Oh#|4Ehmv&ueh3g%2gK#2-| z++jRbFxLIi+ZH54>m3H~Gx_I)3kg(m;u^@d4;1uz|M92gO@CRc3RD*pYk%u;N7Mh3 zL}2gy9Dy3o7K@Q6l7CW(yDVKXx<|8?6aF2N1s}|!q)6S{e&AF?q39}H1YNKLrTh%e zJ!O>q49;%}_x`3>G!1gi!Bf~Nwh;w6mye;)PdFOXM#Cz_MfN7)7a+^Qlp@1~@>ax0##o0Vu7XXi7HbgZH8^S3l%^(QE%u;l#X?~{E zVET(zf|f^$D3iMvxJ3^@*+=OD+C;Ko(S>Q=r!E7gU6TkX2z$ZDs}`nhli-%)X!x)w zYPhzk@Ix-;6!xfdX4J$9K0#FM$WJMt;!#6RXS#vu)qt}16gwwvMG%NL%3fejQ)M$) zM(j^Ugm(#M@5ITUtgKwDp@Z~J1d4ws{_WeNhdvVha@hjNelL6B!M z)MaNY%q~Kfsw#CgJVvb9>k;xgq`1j?6akpoFPD+YxXy4yr#$H;=^MXaL8MG0Ayf|(x;LpvKDA6Vue0a057*Cyky1e$9 zU*93p{5scWfL)svCo#pNAP~<|gykU2Ibt^z&-S1|t@0*-LjkTxg;to`DG_WN!mQFhaNH2aEO%y%R=%HPYQU8 zv*p*}zJ#58p3q5fFXV1`gDEU?^uV{Oke>prwQG#&O+x|l*4j)s|7w2?20iN)XJE)b z^duO@4za>i!0~#1CoS|wxv>%GUMGRO%5k+w*O9$5)WM*biDBx*`NzL9W?z6F_fv^6B&FL$RI5^qd5kS#sQSo5Q2A= zIDOxDJMh>C>YT0glD9qE3L=EmA_!TM!7wMB`(BT0UJEuhgE*8evnE6zjN){w(g)M(9vH5>VTNM#_-M zE^ry%Jzg+dQ_MlktwYaMh(T!Ts z+-uI=!VwLGo)`~owZXs-J|2Yt&B()x@5&-`z2>9Nb5g_4lbiUwg}uuq9A}-mu_d+t zz~l~=MD4yv-b9w)P_X{oDX+`K%-obRZ1P1H3OPHuGQyzR8q-%uIt)8aKg`8A?kOX% z8eeIpqb;CT(o0^tDvvMmMGUTQEOJ+%cjjYoX|C?j3h3HaY`Ah%QMzF<^Zl==+U$Mc z+RXml;BldyRg@ysofyb(zLis>IQMQ@KaBH(pf@}p6gwq+ZIeA+UiV?SM5Lz;vL?wJVM?d4u~MEZjt+q3>oAifL9|5@(3S76m>U}$eLsukO6JNKR> zZ^^rLJ-b_UO{R_s-(It!9?edu3+oDMylinfT|dB;6mU4MUEexI?fuJsPO z+BIP|&&aqFc;U&bUCF`)ulDHPsq%N$Qvf(zw>ww2RdvVfyrgnT+?XB{7iPr3ep*iW zK?rolermuulrI(Qo;v4u0I!eR4RaPfCTyUGC7|+z)onrbI)awnK_4VHF2*YHDq-D@ z8NrtLh2CQCVi|b&(?D9bMTDV|L4g07en;1gesL#wln^kkHJAg%F1X7*aAh1E#x1}x zkoE32@2_pqD0a;Sjk{=-f>i`_XmK1X%tfM}6tQ013_BsmGDaaH_?aq%2iYX23*x!L zzK4Y%Xqg+;+j3r&&MoOLBs{4*@X3>2NUjxOaq=X+PE$C17J&isi6<5)2voKZCz`Uc zk$xS_i1|e;A02)`uZ=(+DDG3^Mj=a+&Ky9@M>H6SC)MM1Ia>~~&4=5a zP_(fJOgh$75O2`IRxh$zgxR+Mf)!N#sRr`Rx(8Prb9@OzJEvhT?iJ}Db zV)V)$mRo)dGu&x%Ql{ri)~^?svKUDUwUotn?woR$M}PEStAFa<*u7Cg1hCl`XR{hG zl*!|qHV;I`A%bmT$7fLA?7C>)Sdx`t$46@IUKJvco4$&KQDDwGSx&wo{My`_ED>0UEvAys%g@wyshMpz9i_k~$1Ii_AOopj9FQFd|Ny4BRw< zAB_I^?azwXFh^fos-Gfr(_`BgHOKQ9qajPzhM|L|X~(ui!yT`gvNvm)c|TxM7PrSz zSu-%&y%)F3b$?E!*BYp9w&_y_&75d*qt0|@B8`Tt7g37is^5{SW~o(tm56Wt$VD?V z>mO;E3SQX&_HzS!XgHlxu7c9KKus**@Q`?lXK7 zJnwzPE^tEPrYJ@)H*%pI*F9$FzZ%#7SIt+i=@oo>6%O#6G;nNVKOa4G?%6!mH@MS( zSGFS+2K+*GYUl>uU-YLIgk~}CG-YLgJ;)bS5|@9Z#wzw#s6jw=kjsxOzubF&RFF`S z)o;8G2WsLfvNKU7uimL2WueO6c^vW72K2CsI5WflL!v+cbb&I2oTQIsKYs)V*lNgXGqt5dEFy}ucJiax!F%3VtfLmkgH$I=fm4#aY9Lc0z2E3R zqx;>zy#<0SWnAbU_Sx+@UY$!%q;uNKwFaLte?yn|P_V)*UVAMc4XA&h6==&!ML5Ba zdyZYT4C7jH$Ww@jA*_Na`faqs1Pk<$80*Eyp zKcc;&o>Ckjuh8rcKt0}^j|^898U26qJmiXiYwH_6AQwb;wOnk#0`s3mFZG`}0Zm#v zg{U|~w0QHccfkEDg*A{FAdJk?=#C!hrJ;Km% zF!)B$B2}0t3Pqvc;#$FKA! zcVo!azPc8!j#AE_imW+ab=R})oT7z;a0Gw^SuZWN2Sc65p!EvMdh<Ui58N*7T52fvso&LCn#a_hZbn`>iz=y?IX9WLwb zs3{K(&?L8#tZF}1sVKkB5A6mqb#(9L@~ltBY`Oju!^ZdityS`)M7~8qpeGU@DS(2v z7T5N%%aYtOR2>-~BG^P95?mJca`Ygdg4}|}#m+0MDikPM8rW#1Nt~q|<`fNUC*zRe z!&2k?L$8G7GbQ{T)RfAZl9Bnf-$)5Sv{8#qQ&$#Fk5}mjepe;jpOFSM2-)%D|28d-}BLKm=7950A>j6}fcV_*_?*Stdv=yyz!$TZ0s-E1DN+ zTWD5z6ez$-ezPP&O;okL#2Qb59C}c*>=GK+i-GD#1-%jE8I=Rx09y2*l`uqQTA~AB znO+w3?N$Go^nlQ3BZLa?ypufw-otVsWwM#*nrF>8{Oe+@=)WFsi=qQCj^u3}q-am&3)X|`3Ct?`*J&^RNPL_}=7Ag?0FA@FQb4EA?Z6uC_a z-R-wb@`ai5qPO-ILC&?~l_rJDaRueuswQ60SkX+ z;~<^K?&={yu*-2v`VYNm7^rknhH|PsdPe|-7qUxn^2~`bHTH(WKI*%RIq3ml z*5(OT=-u<52KgOV7V#Tyt!>a++dm1)95gYD^_+W^shwDBXrLdRJw`r zQ;$ZB+mM=irW%x3IS5e^lgGb-4rywtK@k6?$Ayq00T!V(6{5n>@lEw-1Du_U_Mh|b z8tczR?ymgs+%A?mFTgs1TYnA*LHO!noQg|KqNTE_UYaH4$K!vB|KKe{Ra1#(QtV(K zdzAHwbC$s+wevW<8n=#miB#q8^ZV+lOjY&*E^ORpZ^A>t#a87iKVLTSia!ci0;Mvp zD9F*D9gzzB?uZO)&stS1JRX%z#7l{1;{e=mcqR(SEGym@zG0^%X`rO|u5Va4biN0^ zk7jWLiS>mmZShc8cqi@!82ut{`2ixh@gwIf^~40Gy8@{|;GV=1keOFNhbAPQ%d;!Q zs5o+N3*{?SwP`i1E$Tf)JaGY+&VyU1>yhizTSlA42}ivgyQ4^h2{q2)`W&mxTOr&R zPKKW=%x*bg%V*1LIX0Dgt=2}Go=U*QxZb#1J!(ILe0pxLkk}fhr)xnjo$As`HY`mE zp|$`m9_;jzMOntlm>kocKCduj$Z%wiA)5Uc%fezV(DX*j44Nq~Qb^6jf+spGhlVOcIoq*QPv%UzFA^w zyGM_p=gVTAmv=vMtO5AjlmJEsA=UJbWA44Am!1$(jM zPU8oKFeNi15FR~#BuZVc^O{efYK3E_0)ECr*F z4bYpK_q@9T-}nZ z_32zbv~$-1&if}wlagFE!GaxG6=F^Rv{mQP-`ZqA&5;*Gf3P-X%LADFiT~g~@gm^R ztLZ^4(m*`CzIzuA4tC`_16=1~Kl<6x7e)W_Be>6?k_Lh?qETVsB|^xiT6FH$>OC%x z>vy>m1g--;s(q5AYg6C`wFrCKtMaG1z^%7{l&77^Se*Zu@x>kTKlxF|EWGc8;Yar@ zD!Ku_$>FZ9F&u7w|C-BUxe-DVLBn6&Sx>aErjN4DDH469MrFCP+x%1XFVA{`3ngE> zuheLob$D002IXN8aq6jMIPiB@53}gkpn6%DZ!K2>UfE7q1Uh^q9{bS)VzrBt7yRyS ze*_)-1F%i`#7ow|Iuq;biS+?%Ot_L-tXGzK#Y#4gz$+M_0j*{ z^peF?B2&%3suW=hm+q#lFF^=8*HgLED+kBgsGKOzgue;^jN$C{I>oO%9bQ+X@Sg3X zk4X7E`mJACLE-T1suzo0*amtiBKgmw_x<&|gdT6Fx~2KY>9#8`=6i+r{qY)Txe>X# zIB$#d#4zH;Hm0rTq4v!>PTnUvVIAK z+pK0u3?>R9{Io>RKrXTr3GQx~N&#BI%jO=wm~Yi?FGI6SCOffPQuh49=^oyQml!mg z;Conp_qr1w2h<-Mf?BLa#e^=HKm@R@SaT8j2cPjaPnFqOFLY%K<%~z%*?;dRD@`Zm z8PqQB=Nev%yLfi8Jm=>u^u2JQxza$y8o2wU20zD3(_^&SG>vERL zrI{(8P<(-L%*Ye%5uHkTh9M2oXw7pI^^3WZ#lg1dl&sE4nUT1(QwClO?3Z^+4{G2xYM{EKbk`EkgQK_^oST)y=8T?~bJMX?W_&cp=lkD#uZLG? z;_hqT`3KIy@qg?^oGfQ74m(h*{AcoUgWAsl3|iCbI40gewlu5?*}X{Vo?!*zy;`uW z)t3X)&VoZa+KFepJ;vLsW9v2S_zyb~VcFQCK$kaX03BvC9BWnBQam}Z0ytF7L4CPA z?mg$uTp|AD$6yIy6nd0g(W0|sjChof?lsLg`#2waS0hfs^~d$@CtV_VJwNC5yjbqW z8cw9spipPSU?wAmCwjiP8x_y;7YXdWYE=iZ?w{qK{?pO#^RFn(!8iDqCr1(qrBpxm zk2QmCb2Ap1_&ce$9R4yMypcMr)2jwW%r!pRZN! zanOG_-(bkG#)F!DDM#4zL?t(I1(T{2`ne zhbMsr;5JxTZk1?*(354WW^yUY3wKHw!skwvXT6=EE}xIyRgu%LN1u9HVpD0D{&~FG z5mR|H97eIMo8;nO1yYbo z3l>fTm%y-La*&~eGTOe{P-O)*RV-WuX6I+dPvg@>XX8$(elYnWX451>b}wF~9S#r)zF zLToA0V2>+8`ou+RY~rFQD~x7;9N_7;N)2QIVAPPPKimeE>&0@(8!W}I2D|+RDy z&Y`KGfgiBC*IhIISyX|Im4sK7x0=u*7Ctv;t|}L86rN(9>7sH>DK_AJ=Jx$W4()sV zA@a$Vu8BXCh6z_ZYwC$W!pe|1`_5GuwF~CT%&b4B2=d=~@RK{+>m=|6#7cy9A>2cC zu?@RN1`8%dMw=h~263}N^qTQQ_n~m7@(8Ga3qGG6Y8zOu0(rj6p z@o4Kgx9;KgANm)VzzY}h)nbbDU-XISB3)nld#B5(Q~sWah~b`)^v`V{SJo1tB5xJ& z3TaFJR<#!MR1(A7CGur4k!eo+2ot?c9jTG38m3OoQY&^y|N1qRJ=?4dg@pTa9{0)9 zg8mE(nn`=sK1s3`l(R+M&AbMQT_rE(cN_Avk`R zGev$?rqzETc#LvS`!W8_VvXRkl$RR9 z$jd|bbj00pFEVdX_zY+FYGnRUl8Tj#D1OJ;Q#_FBL7s_rtI{8=?oPbg;>8mfLNlQ% zDO2;J7yfR!6$+=;ZQ9zXG$QZDETzowpgdh&h^L?SFB!x8W%QokdA7WK!iq{4ESO*8 zl|vG23a_Qh>W3`|iWvru2|m`8Ayo&ZD2xS{DRim|jLZ#kcp9u11!Z{$7k6{rGwec( zx>tyEc$aGt=TfQD9!lpIzU^Hd-8)-O_;>i_Dn{)$VpR1q|FU-9_{_VGG~q+HNfyc- zxQ;O%!KJA{Z2Z7lP9zIr)yiJ)_~kVxY4nA4(xQ4gZ+Daj50-oRMzcB;0(>kVJZ;NOQyY5DPw%aa}o$zOP}zxRBm@iyMD7=ePSmWm(*e|(Z#nD|-y z3G_FN%9!c8HY;{SC!g$me_f6gKuLaGi^L{vMJyMeob(Y@0tk*j0}=OKV*{40I(G*8}eLAahtz zIPRW&7AG7U?7Lput~6|!AyLv|&Fas*NYaAmf`z`iy1DkeG2xmJ#r^Rg|DVT6)`WWJ z04eyLbW!|_z@C4+LJghya>ZgkN7;M-S^Ci{yAB%P;Nw?RNjc6SI8@SZ3GLCRoGhUn z00|k=kpKHOWaXV-yisFUgD`2)7VSWmTZ)H^FNg&e={!%=Y1Kv0mVtQke ziAMjfmTv#Y7%r`_<{THCkKt;4y_7f)L z^dlUWcaA>tQ#wX;O4CA#;->iWAZY$zErA#oIF$~>!_VF6Vf;dEh`SjBy@4S6CA8ac z4~#0q2QGeGvaO){%Fy&BkHv!M9R86D8W z=XbDk6$H6VDx=Kj;8gjmXL7A%&{Kmr#?@O`0G*kNo8zC3{s>e-@2P(}`pVlXZ{n3# zdZ+ME=%XTnuSm($`^7-iG@~mGwGeroV91ef|_435u`W3t7jhq7< zfs@`q;nrm(?2d(TX%2t*@=T!}tJeq^0S$Yg$c(>VBQNvoo}0{|LvoxVl<2kq9wZ+C zVB8CC0pS*oVO!;f1y_;6U<^UV;edhU<|EKMbh+?0;T5Zz0c+~n{&hM`yiJXkye3>1 zLC6#_o0)j31GUEtbv&S_cgqvKr<}2-$mRe|Yw6pDw4p?wo1imZ~o7h30CjRTu*gMh7Oxme;bXQ7@ThYH&@` z{l~YR3-@2KwSYfH_g8O}bjv{gLaKT%J0+qT^W-Nmlsj(XckWVqD#4}t)ebA^^@Ti{>y>ELrQ$4TU> z-)zTjZK-mMXf-T1nhJRWA+YFc2shp>fHZDwwIGU41hg^1m-OzS6RXEk@xGky3YM_0 z5pU2IIqT^wpvhG10SV8)!%M_N*OsJZzs2njBeI6m{iLO`6631W@q9!U|Mz0l?j`9v zbaeqX0~~{Di2E(il?gA#C7gOs881QvrN(R-QjZJ8?GK|DgT|walf}uSrP@NPA#1A3 zvgAdo6v9vyvNEa5kaeXA`F0tT9!M9gyhbM(Z@QG{@OtRdD}?jFV>~WuOx^V-C5LRb z9|e$yt|83a&^5Z4^!CAq51Vo>e(N6wv*w* z>7dMrAizZ*X95QStFZxmSR_I4G2&_v#tNK$s+@hwZ+s37+4DvMI_(^UG^=){625b- z=^+c$oyje11X0rby&xS~w0<5Mh2Q(EHbn+4saMLpKPwcWt@-g z?fn{40_!~b%v0i}J55#HYgjJODqC!ES{Q|Dy?Y#&%K4lHOI&q;cMy8%V;+cZHTfrT zFu++r6Q}3$piEFCa<2iI*hbQobGHxzm8guFsB+BFeJ_%T@Id}lj|r{A=?+E-jz^m} z4oM3$Fjkmbm;iLm^v>rEXvcDShITBT1K_Q0isav2Mh=(Zmw3W5vQncXq2#o6VT63d zWpcxaI(q?^v$S2kMg2h`_D};7@B;CJSwfJl*H7wHkwjhq>Zq8IaNLAwQxt+aGceW6q5~ zxld(43sb-L^JyZ4SV&}u{r1s^O?fd;{a+J8=QsS~S6JMic=GsA) z?8>j_z^X8*+H26cu?d;p7s)@RqBL>ZNFwfWQ)DPcL1)f#%6fgEj5=%@i^T%N+6Z>i z4@xkJ1k3I;G?)cmbR)Z?9V&nh=Q^ur!WV9d7#E3r4mWc)@;eWYX&!*%@I~Y|4H`-B zO>wFlXBkgf>R1&TEE(9sQ{FK&;A$htTWUQ2%ec;Ui8lJ78s4M-)hp0ND7+KXAkSzt zkuH47Si`*`!028O=cwX^k7NnC@|_lkJRt4NvnSf3)H4lROb=8BNds+A_>!uw7I%S+ zM5X>ayHXn_=>-81N+k@`yT%Rb=Z^gf&tDrob3PB?qYspV z^hfKyg>u_}+$(SR`Q)h`z2^l|ysYfszzg)=oO&$Vch!zCYOjA9#Alun7Uwy@nXaiD22hI%`Cl?%Qm2%-gPvrH59hk zN8eKU^u41$9z}8AqhI4b`a1}azQ6(Yct`hMFR%I6l63HLj*8z&#ztFbS~V|^<(Dz` zraYDC8vdr13FyrS18L;$`;LpcPq?QEg>aYFmG-{ANeir0KMD(O4rwXI=>1f&tN@iY zL{F$@p!oNof9u4ZSDI6zd#Y@&hC@vi8Z?P(4&@iky$rr)(96A%G;?eAT6RM4k30hf zEBx`^4DQC14@$)h1lZ+z^=XT-?+{?}O2w z$fbQKSXXsXzVx&dAOx)QVe0Jqo#1J&XXjS|0;o-YQlQBYFWy6)o*`F!u6HrcrS#jd z37lY2NVdJ)SA^~_dLz)%(G{g)f0TZ-Ub<1D7LPPbznX|)RKk-g^A?N7D{VI5V+wqb zZT`X07b&d^^8>~8W@b6Do>E0fNT2X=qZb`cL~Fy%mf_tX$`EEj)IERsoEy7DI6T2# zt()Ozi?o4$4{PaSX{!!YxXQ2X;+WXH4Oxb~$*sI>xh}yKMFNSJRR(m-S)In2^E+fV z{Lvqo-@``?!=Qo-VG`j{Br6oeIQ=*=brlJS2nd!fHpNwM996qE{RY)!J$=eQDHZKP zaIJM8lv+_gPqy@9r+Zn)5Xc9kPhcu}S%qoBquz_)igKN1~m5&NV(E~brE)SoK zO|j9dPRiw@sH4jAE~i_Au@1Z)=n?s)!RbO9sm6k5B|C2~EOem~xCE$V3+q$mNB$`K z70QSwvRsLtO}}N*DHi)y0;-BW<0X*uz*lpQ*gv&4X-)yq0Kd7tC?5TvteRgfaIg)! zOUGP76XM|qzxt9UiRu3&cH8nvIsP&DOiZXEr->!$Tl*)z@4oAO~oc;c&Nzzws zwRR*+i$0$^B!mFPDfP_na;3x~frcM&31y$QC(0|{SgP#CF37046LAw3kJV^9(=+y_ z&o}0aj&{s`fvZ5~nfL5UGBv0N6(7SkQR%{lbFeCk9bkBiXo*XS=y6lDnTI&TOf*etTH=?zX>s#r4eMXO@5v$f%J z7WkB7_|O75OV=b(s{J;Rj+bi{CQp5@1dKMihvjklC;l!*gpOzcH^Bb{IJ|(ju#OL+ zmQ$NB6|3^-Lx`eBA9;~_OK@1roCQ?4S5i7vwZ2~GUmktng#geF5Dfj0@W45mIeEVp z_Gl)wcc(nR78eL;d%4v2YsWB!=etwx_>XU7#R6Gvliogp5`P#efo&8e3jH>)*h}1R zb6g@GCDzM%&xxBkH75EimO=r0a=P;|x2_3(V&ikQ)mCjkjM|#tJ}w>?*zqpSswgd; zBm~Ats&N#G!p}YW=qPyKitil`|A+8|dV5#9Uhgs-qg$ToW86mH5TIrkx9V>>7ePIn!RzaJI!)+zm@8uI%N9*k-;XipL^oXFs?quwis7LQh zu0OK?u#bUC<8}pY6BCebBN1{xBwba+r3R+H0GF+gwdU!J?%gRnzAeNw8;vu=~c(VC%bY3T)u&#%182Ipkc21C{Q47xVc|u zsM)o*resTtEt(goC}THImX|G-7c)ie3Zvnc3xUx&oEa>6Lh#Ee2dirkQzKqq*5l7T zs`uw{n>eIDsctr3b&796P06l9C(G-8Hd{|34Bn~)!Uvej)tehb&q08yfSz3J3uVf` z6buddAuIxDXc!LuE$R>2!)Ayu$6BnH=4^EK_?E?}k0DBLKfg*SdSfcDP|_jiNqkV` z;I-yTmwm;V_0wvF&a7RpSg-1Med=e3Gf4qK(fhothZMZ`x(TSo9&MwQaM$pi+8Di9 zO<#NAM)#frIrg&-%GKZw)uv2dmigKvcp^gAVm<^)Lz$1rV3|ttiU(DL>fnLb{1>EE zcf}X{TAE_U0meZQ+c{4TW(~S44ZD7M-mBFI-6M(Bivzw-5rG;j2w{2m=)EKC%m?58 zefGw~@tdEAChEsfc~2Xh$B0Q(b1i9hLPJ+-e-$qrL_-Q8c?P6Kyy|1}SLLKXgw=#p z$A?aq6P6Qfn9;*PukJ-znLkH%@Ft+!1dTQVwGM4w=6nbIMS>LEZ=o^idveh`)|)Hm z1EO$ZvbR>vSYy+%k&r%B@8PGAi)<+*%F#f$b* zC;zBK%GrJ)Q+Q{yiO8Y*t9Kjcy5^L(BY2*~hbTk}f3own_pfclg*~fY_IbuYBttn7 znc0eJH(*_z@0ZilUdPpr7A0}=mtH>5tX~W0did?nMos?D%@Vy06g@#{HpqDGS^t@z ze?S9~rq1*P7pnmH(Bd>0`+GXiSs<>6j~|H?Kgv)F#kgT=`>DWdEv;JwX@s%qe$E2i zMV(lbR<)^JT{Vj#z0(6lk|nee&a&>AFtP=uKN$Ts!$ll0j&^ei*6nM8JSv*SR`}J~ z?5TlU1y6sBX+{&Jqqvw^nub)kfG{g!y#%FIo670v2B~p z-_!6X^~ktv3c0SalN3HH-~KFKh?#u!uvS1i`n9*}bi=!zq$T>&{6+tYP<+e>( zkZSr|5EbO0Lum@P1)kV7su^tVTn*l*!s<%1@mi=5fyiA!cs=)KdECG35{mIO{W>)= z%NnnhTZ|oym3ni>D}4bQt%+mP0@+Jg^FJ_G@@H`Oo&E3^JG$<}ShCu)<$*svzyst6 zhe3!iga0&*na?Ax{_d1QTv<$tHw$15&fe-!J=Pk`XN&T$^Woi|E+EcU%>YK9=;GFmI;SQnnO$?1}1i&JV<}3rjyoc zQTe6HM3YD04 zEnB*OfN1>0I)M8s_Yn0E;U8*-5PS7%;e6rCNI3lIj^pfSK6=>nB(0dHcGkZ={gZO$ zjK8Q%(v4U#{dBSC5ANZm1d_syRvQw%N5tsuToyHe2ItG|Ms0KWng41!lC_gOy?fNU z#}Air`Ug))HwPVz54XatsX0{*taDW!Q(W}dIbC>t-mZF+fMNy^#@<|-fJQg7MEZu` zxCenJk{<{jHpL|Pe9aRCmTqiK>T2thamf54g3aAQ_wp@kjc)i2l9}*|x$FXW6Q~l! z0XEigB(1lxvg-e2od0k6nhPAE&8#@Q06k~Azv#CLN}7z7q&DD*r!7G*WKzQmSxX<3 z_d=hJx1lX42p2tto@28-15rGF?O|Ze63xAOaLpQ1q%%%>M7O5MDOmCz!kw#(ot7Lffh4K^>!x4>c}+Rr89K;Y=-7A!jMfebD2y2zacxcYC#4+w ze!%nGS+`=6O0)`5Qlt>Q<3Vuai)rF7%8|d?>cSHNZxWg$Yh4k_qdy2`I{K4UrljA` z!;=@s#W(o{aTQ;VtT`=`WJ9zKf9n80Ik%n_OE~ zk(5%ek5)m{0AK)Yz%R@B7LG!MH>UyM+2b(d9{r1~ivK!_QXJ@F@K#*F$N8OzHfY2a& z=!(eiX)|OM=VI@d)u_T%O{arMP~8CA>@MK6h`6dh=l{Q!b@`5$t07x+illU?kdQCc z{kw``)BMel9fF?~bN>kFFn#8ga^0UDNI+{DPyY?Odj~nB{D9?~do{G&T6rTvE?ihq z77^;vvpMMnSZY=x3hQuv2e^e&Alw-Oc%e5(@!I~PHxbp@!*uOWdGar0kMzZ|(}*z{ zQ|CQI7{^f?fn0^|;54x^6&mAgk&Bjtgwu7==hSIHGu!JCYZXw&on z*a_Y#*(E>+jARZKB;jV^dsxjd&oD>*g8iE%ZvD}{zbj{+ZX{1t=$40jK6W|DV~=>8 zdiWOYO6=GO1*FuIbm`~j5yadr`p=?nqW-y7dD73JoQIqPkMhl@$`Ac3ZLDs{mzuYg|}tp#i(P+cI))W@u`ylvEFa(qJoofn-rf_#N*? z#b3|_{u$V^2IW2G$xdSO`|}+f9sriRaDYlQbkA2$pkX4>}S5ao#b zIj-WFE(x#^la(F1OtZB4I*#YEcX!6AFhMgyp@|Bg5C9cR^7kDo3meV9?_& zHUeilfB$pT=zY(*PvUY8f@?42bm`CZW5(TT*mIMR9QPTckH(-}$ZYC?bV~zdh&UiC zAZMUpn75`@>q@2e@?+&yA5TWh?K_4kJZv(n!b<`j4NY>h9B!n%LH#>eqw$#sh{G@p zrV`SEe^uQ*j{-XvRkWB(Ha;93lIy1k^=PbNN?&pTM^-jXBI7O={zG`ggVv5(Hc2`m z#i|tmI#a&o-BaclcW*Wnz(OPIW#^_B1(WJfdO_(B&Bclz6aK!4mi4&ftop+J}rQPhi;Ov(@agoe){Uj^lDi7N_1ORU8`c5het2 z4`!B5QSC`FCh2+Z+;hbCp{e;fqCw{l(=Rm$g{C`(Og;Jq`Fh?>X+d(C@<&Jq!?^x0 zi_b8>Xp;*8`=f+RHpbihf1^}xp@P=J?_`x!l#(d4S7qx)+IrkxdspLnKr522lXY$( zv|Y%_D+UFXc&%L83o^;6lV_-{=WBi@OBE>Da4q--S4%9sBH{wVK~X)R<&_f5nYj}F z4y={%^rhw(yfPIc8hzyrSi%&kW`X|#>xXm(JkoO+8#4=8xpuEI#j%+`<9$ zQ>1ltj=!{h0Z&$Ha|0A9JZvl-0PR{UCySeLp@v>V-cjnq^^jWC0^t;g2oMjw85=-G z9r)#LC}O}HuIFL%v+l<27^58bdS`ji0yu{2@GU$+c{%0iL!Y9Kjk$NY8;H5;B7K_2 zwEDE2{;)hN3vV*Q9oWA~@SaOT0yWBa;wXZVk@h2d)+>uFH?(26RTzGuxBY2)i(V$)qK%dz$^lt7V)5TvT-z-+BWz<|Lf z^^E1^l3?=QW`L^|pSsnodZ2T2BX(TGugFQgAX2Blz5s$^QFdX1 zSxQ!r>_IGmb@(xxXsUcw`8L zXuQcqLdQ}$cF7wqpt-7wL}VFJ9CFA`+=ESD6fQdK!jALklSq~*SG{NwyvGBiJIga3 zkt&AvJ=(60>-#ytJrSwXVOa$v^JjI73#-<<-E;so55Z#5Klves5z{1r$68@#Z;axnk=WS3>dvG1Bd;%NJiYub>JV9v)PM z;738g!sHIf{p^V!0t!Ft-#+>!e)zXP_o_YpQ~E9VG|)et*5heT)WojLXMYi}V2+YI zgO}pOfaWlSEw6K}WzRx|Mbi_3DtATs_(y-R{9Z2M4{w)%UgV!fw1Z4c)Mn_G>M@Yg z|B*LKw<;(rl`=es?xypAXt=RmW`HCzM^>B`%AqS}b4>ie=&TdbSsf zt0a4a4^+{`t6D3pY)0J@d;`9>a?LZnTcWs(3Irm=A^b8%#l`SD1m#WpvADh@e+CwW z2`*91)+eGdPft&K?lNu4dVHCFcogW%?Svo#GM7ISKBLK+U(8~Z!{)mJQTZl$^;!V|Yf zkXE5hTbB-#v${eFu*A#B>N*SrTkzMibq6XS&ImM5^2R>PhhQ{gWq4^&R+(L+>*xia z(>|9dT>h3N-lM4H?K38p{M)K`c&@_yDmC14-d8gLij5?J1;6B-(vbAKOo}#oFf}s} zfILIWjVHqWpA7=;Bzy`AM7<(^iXsJ>4nzMN?t2UJkIO6GstTAU#!Pwablf|Qc^Gu7 zHMe*6y7A>hg-Dwi-THb7`}9t^;k}S6u8;Y(U2y=5k62j*#l3DTG7gpR>2*^~W`KG; z42#iaEOI1U0M>N}{UyYi^_O<{b4g`Es2Ys4Hw%fcF~EVSIHw-t%rr%}qx%gDI zWl@y`@3M=FIS3#vG5V-u0(`>{Z;yWORilZ|uKx@fX3J8GAY2O`!qv1U)dQ8w+%LY? ziIeAEn`_qwPk!bUL2!QeX44Z&j|27Y+R%qAb@mt?!C6dOgXVkm2he z(E?;NL|h5PWVMVzYrnbaK;^~8+EgH87-S7LrOPh%mw2vwS$Tl{mUTexx%%(=cUMSz z@t@l^3mk#fRc{(jnIPX4FTLVgfhlOak25u=;P3dESCP^jJY|BwyVioWeSbtl-$(V| zrTb6!Z>!wBa>Eb8ybMB#uKs>bSY|yL_1XKPCkEuTMi>BilxrjH=s6Kj0l~=fB8qy2 z+ofWt01n=(iaw3njAhlpGT)m@GPZz;%<#M9aEs)V1(#0Wq$}(mjG%5P1?<%R>-t>jNwV zXuzr$?6oD44Lw_sr~t7^a9;AXS%bTI;)E8ABY%FPWP|)4&bN;a0(`lg_x?_m^Z9+U z*|t$?`&5naq&Fzv!{y+a5Xn4hNXY^f{Yj{5_d(PB8YM671n1}q{plOLj7j~@7$ zkA7K3_x&}T8#G`J2?z+yP|G`bN&wY=-FrDhN~%p02%2uIV+aF2QUlu&8;a?Wu_H+B z!O=TM54{()Bv~$GRwzLGD9n{quP&b>0^Ff3QgP zIG}HCxZiQ&t@7$y-U!S*Rc%tO(q!eL5~C{hP#jUur3+u)%w}>c*LJuiGc0hW;@1{S zN`1@c4yqdFE$GBy)C*^Kh11uNmCw7teQtdJNGc!IK7ugStJZ%WMKAyi?u=bb^{0Vz z`coc(Gz2WQCK?9z!#Pb{hI14KcW<&x&R?1fFld7A!{fR#I zsR8>a{aL@_rLqy+P&Opgvoa-v9t~WB$J*eOkA6-&zFg?@%%Q!H0qQEF%cHICLIafx z$ix4MQlqliI@iU+0uYO&P0SY@^_vm=d8<73WXWaT;lGFf!o%OFlxc5~|%ELop zQqd!Phdw!QI^o`^%?$!s!IP4`G4MB4#`MJLpQPN$H|&ZL`ZNk+;*K5N_jgcDz!?wx zqd2k9b3Km*AczL#C&P)cWE599)U{`W=fYpq1ORUsMD!41PVM1pgorL=`i`;?Hui5A zoRS#?&I}UTR?lFZ@0V|X=7EXa6bSI6e`zY*36P1?s0=XdL@vQNu!yj->~7c>!Y?3c(42p4Asbiu0x{>m1;>mLV`P!{@gDfyGide~gMon@!?hs3AQT0jQ` z6Y9QrOE!5jvU(ULGk%<%d5A5pKsWMo;_EdHkvIY9!VL}}+REY~gnO{y>rv#6aZ@I5 zQCIyeEonmDGT1%myLTD5hXxq51CF@<9U^%iB@`8RZQTFV7Fm4BPZ9tU>P2n1*1~_C zPr$GOgcyZhOL8XYx$~*d)ZMZY0itnb)NziKrzo^qp?Z#p->jT~buS363K{lZL9%zA z_p8>ihFh{RAQ7otUME<pwjsN&R9*hFf;buI+Kjn$2 zvep!r^PvUWe&Z0QUPfsO6s}Bd1?XwK0j1Rg!&Z48B8B%12%Sx%Zks2BKK> z!XUN1*+5)|sQHcc!@zh|Lj^i}_z8-+^ybcAqJXUJ0L7%ue2D1mqhE|j$^7}ZKlgYz zZ(v{Sk3-qu0dmW_SVx-xg*~ydd8jC&hp0i^il#*v#$O$g!y%S)bIq_5`?ol|-Z;7B z&;nv37nPCCL%m+(s(~>J!7m?IEM^qW#XixA22^478<1feBtl^Uwj>@xR{ys@mu*U6 zCeM7I>rt}|%F5_oLKgneGyGHZFAx7JEgkXygD`rPPzpV=uW}PXy~mc#x;9tJ^Ij<= z22!J^2<|}YS*$?4#5T4M{mvOy=vWyJWSuzk%rUXfnsM^NFx1;x6b-uQ?ITJ9KY|i(M4!kMoY?ia?j$$@i*c4BLVQ z;)69^F3ox`&`6k8{>{u(d2wcJo&WGZ_)qcotz5(dj)Q&*&Z#9}70{EESWmFR%Iuak zidYJ)Nt5K$n_a*+^XF%0CI~nt7oCm~%YL-XG?I(F;smT0kKNR@$IE(HlM6FmV*k;rGj$#nZFCI7W-z|E6<)1vh>!I(!Qmb z$lW{%Bfz`coK1|A(ZLc!=wYgobTRX33M@OE_a$QbUP0e#1KT()}Sc?kExp6->L-6wcO(y(ZAw zYbi;@hFKm4e_6po^OrrOXMdmy6uxmUl;?6f&bM7#og9nsnKR$J>5kuf|N2i}d|LQ! z?86$oVc#`MpWFTX&;DE?rUFAETx0S-=0g9 z_w?yiXpZQ^=;d`&ZX0z{w(e91~P?7(~hysh(xWR{;hN#^sJ|%Q!+5J!%^KQFN;`Z&Zah zPUXrlxOW6CLCJyOy$qGW%me*1F__u_(m#(8+Bp7$`O;hPE^q<2nX1zYH$|@87MBd^ z78wZJr36)EuyMs@D7xR3^$3Suahu4=?5$8~iKbm%f*Pvwe`Y$6KvLEM+9g%6aRx>i_3#-cij)e zf#E)YahT-u!NDEOl9=MfrpJWENu}$XzDeL)oBovu9pRIoewxZhRyiHeg1)DfNmjLg z^a~vDAiNS6`@`t-P91!m2Xb6h9_declDWluH}BZdYyHE|H1qr{!5eJk<-;(Gs?8iM zA}Kf3e)KW{YSi+SmB--YKDu}6P_qo%H)hE-hZ(yE%vO(E+Rve}hXF+F;KgmC~p z4g+%Fb@+?Tu`wd^uUl)mS-7!l)H7|m2CsvUC5!MFwq*?>CV$NV%@libfd3pFdDi@9dyAT0P}{~S|L zKk$}*Cpz@KjmwvC`@x>WQ*Jd09WlLn1OD8L48UoXC{e{@4|)~sgTS&?4U8itYhBu> zHR!PpFK9XtOcH8+DF8Stu7Fz`n@TWj^gFBt500w+Uyz1O9r7=aKJf-DLpRE@qRRNK ziNEh;!1y8wVH;HE<##a`@0YLBJXtYw6%u zHz_mHw{(45@F(e)C?1Aa9N##82jEo6JpL@U0XEg$kg?G{N?|4-Mc96k4jD;~WPi7t z{p!K-wDeXgY22s+^bF1W!lv0Nr*3|4?DJd96kHBxNB7Hq6eu({YS1t{&!91vV`1Mg~$XouB1bR1vzt=9d-VZ>YAtGVQzgyqe!4Fu_3bf~(<$ z|4jLTS*vJn(}=-%0(}1}W^z;9le>p=!Nz!fRKYOL@_r;L`>74!9l7;a%8MQVO63KA z6v40UL!@QBZGv`pnv^Z|&$>nUCQPSQc$(mHVWEl%Rk^%gvuS+eCXH4;MVOKT9Gk`l z&z8r0Av{-}_a7Is)VhM9kYPPIAxX=z5cHPZYoc^7m)AT5uzR*O&>a}lLC7Pa_^I;N zTb4mI|29p{Iqelj_5+lpLNO69IdWeD#wXyzy_D%B97dT$eU*9iHYyggLj>LrP$|Kn zRdeVFHpBvAdDSZ9fg}q3o1LGGuDFr<;L!ok{{aq<_4zfG$=-v-9GbQkY*W;#y+1)P zGG#&GQR4YtSe?23yZO^X$t?MGfHHKh--gi80aoO84`)$G=XRr0rp1_zUvvFv7#2WE z-cEU$+{(&34++LArhwx|{tnq~jYgQI>7t*23rRoBWNb}jn;+Hte+*>9c z1KXRH8VA=b4iwQ1$z5Gjil-L|3NuC?abW+&64`vUPM1aO+47V{phx%4Y(R-agjvwP z9rq;Yd^Cnu9Asz9U#yv!d)E8lMdMn{y%ys2HeAvd7H2fn4f8kcW=;!;KtZ=>#h%$Nko+6vaf5tKh zPL-6v;XbXB{qPxFXXrCWm-D4}xl_O8>q`M*u0LgE(6Rcc}g8 z-^+<652@kvzxr}{HYe6Kn)=cMr(92DWlYbM=?HEtplVkz19Bfu(ec1bKLNx0KsL%b5eOHhVpx17F1sxy=z&+1F09Iayk8XvaOLQV&$1S z$Mw;K{JAL8wi(e$C+O%DT>=E%kCT8I1nDg9%>iu|%izL%w6DHU<~+S~XUenQJ$bM% zc|5h{c9~Txq>up+hQ*ArGGdBYh^7#eedp+pR7NKta6j|PP^QwQmURU#avGI~>U|PH z26G|zl|b@kMS=5gZtak*!Rn>XjQkxx<TY-Y!OF0PdxMt9vt>oj}zHYEG1z__r0!Du)3O35H%pq>+FxBijb+= z)s4UPG`2TtWqSWutD|PwDJF>*df$u6R&a%TnZ&iN9cAM>?%bj%+N0P%nctXQ{)*Z< zfIVTW#FufS-I(hP|1+NC4?n8<=+s9$Efp!Yv(dm)$*@L_o%#i#=pjWWF;vnVH>jp zCTe|>+Ue@9$QQNqEh~;=&t~ zGjldacUT_%;^+?zEOLcI+w{Psx$#V7&eW?h*YmvC3u^jh%$gGV2Zes%PY zzb(()1Cn6}uTJ30W>~Eklx7(@d`rhlbWqwEZhAtra7SC0ka1`35%~?ZSn@kQJ#+n~ za)rt%o*EZutyZXX{DK&MTYf=WR@rim zH6$(+Ef~CDnfmj&qgbp3W?K=`3pkI^4xWvBMBYM~$aAfLdTW|sFy54;Re^}_-> zr#dY^@maEr>2OHZ3DEIKd#+2o=vS9(jt_6IGErnQ0C{;e&=xslqBfk?z zK-sz3@8E#Y$Az`c=KQW=B=}{EYh!w(*Fx0%adiNnv=_^~|M=f{d($6Ft1I2}Z|K(T z{?ck`JIfTAfXOc{wFHK83@0K3Vj@l?2?mT$Oqr1x5hsWtCyG+puG>;ety|>}r7rdT zk_M951~b|Qj5^}*fA#O#?|IhRfy=jR(zOM|ncjE5d#}Cr8lLs6SF-w@FePT3ud~Jf z2$p2HM3I^UwhOFAB|HY9$Zs{=Y*ck}=n+6>PsMa7)Qi3D0pVGdgF7|q;-!U{zUc`^ zNExGtlv2Wl2riy!hSybrWuA<*FpUV>`d+yJD)~KQ3R9$T+#KE$Wzvt!emN|%+dN8O zA&xzIlyf+4Uh(fB926=&-XVmSW|9z7>i)MDCge7>6$b-fjR1Pt8}|R_ zC-3+Ns)4#~TLFn1N8H3>H&E!gJt02is zwX;=j<-&4@4zCx!wF=@@woXT;#8#u9geS?%E+Nzv4Qx%Unw;TrGGVU~rA3i?+D`AC z{HC0Ibmryg3wx%d!3jhS5Ob9V`!4hup})@-?DpY4PS35tVk*LDPZ&6c?B=bA7N>tZr|9P_e{BAqI%XbC39|TlA4u z+OKh8l*Vga7@$_r-XJD^Y@lvboJ6P)91vNumYessz;9;|a9#2}*)9QrW`!*C%`JAG zwr}@mV(b4>=PfX+qkEoMG&;7HXG0O8@Yp%WJFco&qiQEIJ|@L7rl8Z*kjzA#2VWJa z7>ARWnpm@(-$uU98i7UNl%4PPcU&C2bMB?cwU!7Y3`tAut zp*ML4bwqEc|B85fuZ5M1-6I+-@wmj^n<~#(DAta_bb6kbDcxC>3G8tQ_KrGjY|B0y z8*kOxn6q3*IVZ2L!L7PnqQ@>Dc{c(xmFa?WGVd_O?%xi_T$%dmhVg}fIvm#ICVfZ9 z4L}ZyZ!(4DlJrL|_95Mr;Wx?tDmdx$6SIBKpL!X25f(C{y9_qg!$R;YFx7X(pj0(6 zdMt0F#F_YyBPDT`=0$e1XIaK)_+A6Nhhekrc$O};VS`YX77BUqnA0In11ucb10MGS z<M#c8<4^U>Yuka9QPPRZ--PaM z;O!wkF-`rs#o037%Q0Oo`+?H$lVw`Y0V--aY`Y(;&V`7IcLKVK(U5no$hJT&Iq45mNC^bOZ;Y z@3r8*octKCm7iYl-8rf#zoKZsD+fZJQ^SuriOn*-xz2WLSpViaY+$wJymwIaS?lWE zEG)8s0{#*J4jNC%y?k@@uBUAZLGdnFudN8wj=NzPq3rNykM1=aN6ULtR6O@ZK)F{V?*^OW2hD-ocP< z7*oNc)>7N1W_EN>m8cPw1Em_=o}6A$qB@2<`m!2W3TTbjVAwX?P{KW7Sw<4GFQ#ZV zrPuU(-z(qq~wcE?a0^H>-4s|j9etJT)6Rrqg;oY-8P_-ID9 z7uLQ9Ha;x7Z~0BCdfpKMy6Q!O8*RELDBeu=W0o?j9V##ymxud0=hJ$jeQ@2;@hRHRYdf(t{efaeh&$NPnm?;a%r2q`$Ea4`V+* zbIkrlGCaw$C#gYV*Lw3%gx0~4eRpKoty{05E!#>f8nQKg5|{c zGh&48G~6!NLV3=p=vD0q&gYO0r@f$_yIl%IH1AkNl@et)_=+I^;J zS&7WjCwqI#Q+Ii~Ec@>T(i$EfQ7~#r;)HmOcy`{1u2z&M<IY42w#_#kXU3;4Qie77-+L}P-Zs>#;IR=}Z5t{p`Q2<@;Voz;7M z{J-Kq{DfpN=xXw%XSe1uFZn~^&B*5Z!&&(pk)lxeo~Dtp(uCHl(ukM>wK^aY@H#`K z-v{bXkJkI6&qiRb#_fhzvjsGav`qOc0hGSP4Q4iC4sDp=1G-G^!bUMj!D3R`y_y23!px=hvG(D_?5uk(n(wg*I0|G( z*=AC;K$OjgvCBCXjJ}5@;*qECQRC}5A)wSk*JZO_l`Jz)cyRveegv;J*oLS#fq<)2EyhJDHQ`;g{v zNCW(b9zxf?22i90e!eXD&tbXii9Wqg*^ZwA_{izsWuY}E40(WD;4U0M&_1GuU9(Q0 zke*eM+hQY2U~H{p4C7mgOrnO9J%;tf_2EgzDaAB#(8Q^r@EGuH4^RRHg}MjNG5muk z{bG&NDt^b5V7+|gn_ViCz6985ws)g{xFYUZS4!$H;g_~_U6n2#_aQu4(P1=&W%G$w z2nDPbimZtsr|)@)s*?|^1_@yMSRf{)@Wb@j7F9n2>rdHmaDaJ|c|~o(wc+vAu>Ck8 zkO-)2{ENK->8dClpd(Vp)x|P|#Y1FCp*E^j{Ma?twBE3X3yGspnT1Lv2t@fZ%8akW zxc%`MFrCb-ECWpozp*4w$Xi1jCzaWt_Yn?mBm$$L%0?x11pkc><8}_ayI|KVamV$$Uj-SM@}4xMx5R_ zc`F}wwuFPs(H@$q4=&vUhkEWz9=K<~r59a!b~LZ2l)Lh1zf47dtlW7C0pkK&AZIA?o7&c0&HE&WqmWk?n$(%?_rOd|gZ?#}trW<$olf3Mj%N_lFGP?6+)r(9R@LC|oIvU|7vlWY2*_1;v&Ov$c zss($cJf89S{8ex8Ppy z`xzHXC?OI9((&p0KZ}Zp-&JVK{*DERY@V{vUF#%o0WG!GHC)OE%Y_R8nWDi~%m@cV z*@++nN}$~Ix{dB(!^UXu;T4WP^z+o(T`YPqS^kCq6I96fWhW&c=ZNsCIWU6i@X->y zXHweQP5IrY*#IyDS~u1BhC&g8y?}uIRkAs(v-@%z5M!Zi_9%=s;+T=zWL~zJ*f_Jg zu6ovL*E#o(pn&r0ePUn&_T{*cJsq|2h8k3gWyKGTiK$ulV3rOXZS!g!QWv$p$!WV; z7UK|6XlG(&wigux3kiXy_$2L1*gJpcTM4HF&!iV?`);{Auz1R$8$_)V4*e%5)svz% zq{Z#2%!A1PXn+E|ypc0>BtZ&8Urt?QeIY#!B}F7Ri5-Pj(?;?qm|_m|~m3xI&KluuWO%ukHzbm7tbF*-9R9y=IgLdoD*s(arlSYlQ_#+UUN z(SaO~>yZrLTqa&L2=EjwW(XiNPrf6RgaFPy^r;8_ZJm^5xx9Bn&8+jbI4Um8pbU7{c)LGiu3Lc(-rIxU{o$% z^fQW`CM@EW-o$iyJ{|x-&dJ9eEa#0}tbNo4j%4~}asl^ROwj_w`64aEmQp!6G@RD=G^DZ*$9w%x{3M$*`~n zxHu=0Vm<*!)u!Kh0BEd2qh9m@Uf$E)j!L$cLYMAOytdI+4`)vZn#3>p#)@ zfynpo2_bv$R4${Y##q+xL9;Wci}qJ9cRlMPd{EofXR7-2{cr!4MmSz&0YR2lEGc>= z6&e`GFGiM-XDW6#x%)7`q{F0 z*+TVVId{(A6CXq2pBy1bGgR&;Im40QNZIXZ;yi9(I*XV6z_j$BceGC>(YUcVLmAyE zkcaY(Hg^YkQ{lZ(!X6>nur*&01+!Cb_^VqGdyw5Pft<^_EdOA6?m1ZbJU9@V`(fyu zd$RnQsbgyG&T)pd4%pV#t2=nD%dFW7g-&qUVQYsU?Q2PWI+d)qQCmui{OT zY^5rYaM3YO#c@alvWJE}P1%gk!)qn#!pA177jF%%*EXPCUIGEz2pgspXLiI(Wb)!y z`Pbq-nWsd{5=6vORq1|S67qe21TUbHr+(&22O{*N4lPgg=pN0kyx(;ys3{FGYd@62 z8>esj6=dublXz7tsogfcApP+MiSxbSe!`xV!{Tp4-wJ?>(!i7`AfR-U&k}x0s2cuZ z_u3eJJo?B_5799}&kP+7I2FWd=$ovd%S0j*hQIhsqDP)Qcu03F=3}*7){le$Hrak(1^mZ!>4f->$BNIPz(e>}mq^GA_PiR5v^K3dP)|uu z;4~@riRd0YM~BrM*LP!$WF{$w#QXC7>Xz#*1+?#}5+(lC5cZO*Gei`1ka)m%Ap#|< z!Na&%8NDsZ50t<5Pj^&4kwX%gtL;W4+a}`i@Klj%NI>2dHAg>#yk&k^%Q(ajgtnnWAP`Ry%Civ%n78EOS#T1*8xtxX2}hTyALwl|Kh`6eVzDRB1C z$#%|J%CJ)Z2$i#6B6P2p(fgj3*)i3&FRWA+>7X>508U=E43golJoJ~ZIsZJ;75L|;F8SAD!Nzdjix`3^fNOP&pM^stY!`8vEqUmbvy&)j%2#JC z=iE|vL;4cH6Zr*iVtg>5(;&hdOPri^Q(Uj|iX+@(4;Yn(X_)heH+(mUusc6i5Y(h( zqbRt{%+2@wjEbu$4EV9%z(7zv${?T<1RpNbZ18deF@)jkH!Pb6u~R)HKY9(8?p+P% zLgK&y!;JX*qkG?P+P)G)qV=<9lq}daFDb(sGc{5mWixD5y>DxK#5zs6u(N6avZnBZsoOw+wW40;I(}{B67pETv;`a{y07piou^vVTY|{Km?DK@X2s5-6O)q zWA1oZUiYpZMPt}k%9fvms+VK059cn2gy(`sfB%G_MnolkSk7ClYpS!FRvwJN0JK#1 zvq6i(zEs6bSp~T3-vMM&H3W93?|=SW7Ow2#%#!TM0f@eaSd-U!K_^A&saDBr{nPSS zo*PlD9^`Ll=W#qTq+XNL6O;2j-}uq;;N$-3>HiIrNtWgi)hi$a9kmdVOEKmseIeca$4%GQ%l{UuGs9K32C6ix#g@>9>_ z823r0#P2X-`qIg2h{5*2(@ywvki5s+(8X+NO|# ziK8wp5N2a#4~sk{y8-Q2iyA$uxkExP9u;M^aNC8dRYRQ`nDOTmgqWJsOK{vdp>H$Y zdaXp%>Xp*?`qh2pr=BW2a>}Q+v+9ZX!Z+pHpLs$iS)Y|@zoLZd$sa~Ugg8td-w^9O z=8MuSK!sFU7fnzJ>F@HK*uuWt=-v-6mM1fgGfv1=1)d$|WpWa2AH8MeQ5~{GaeGoE zGB_-rP?ELf*%WUTB=X!br(E?tC=o_qS~}&01Ldv%dCMad5pvc-vID>7pO2*P-}Ud1 zdtAh^o0ww}F=;Pf);_R4&(S*=&0S>1BUFqMZ3vf;_c%V&+#--HZ;uIT)crSeJMe>& zDay`IgpxD4CBw0U&~BH>1XU6zW$3Lt*vgQ+RQ^ogof1Tq2zv&r7pGe8 zk}8SV7$$*+7N2&34dmN1-bwtzY}c!uCnWl=Z10mpW=@>~_(;;&6Ffh}JM z3R!^~4+B3fF7lnohV*P8c?g`>X9~rnrbtr_8rzjYuB$4>a*ypw?~CJIBigV$;e$lR zYDFiTB=rEIBk-Ucw9Lwn?TTy<{#3c_xe~g)(XzZs)ECrP>E^32cV@~nAR>G;L9YDv z9_~gBn)+~{oW?g1m0N^ww9&92V?K^Ff=q}K8XudjX>?BEQ&I`y%i+%_xQ#P(%svzj z2A2EWv^N;m1m#3zpgo(w1yz0(9y;L)NWA@EGCsf``pa6a8FZ*eTj?E|4fWu|R3GIm z9754Z+K}=o|9~@}1!`U)ZGMgs1l<9bfmL|BvBvO4XynuAZY0364x}gQ_c)yZiA!xk z)1ZH!1lcV*nNTXOf$v_A$k^|s12+Iw_(`=horV^?HD?OTe|OEY$5~b` z;a6~e%8>PDWqh>_!`mk$h<&db0^;mRT>Tu0fU8J(Z73I{hk1J~5ou$NeQ#db3pyfh zMkr()dI}l`Ou)~}=nW5$^9@`zlLDIsXpVdgf@AA9V!X5h&}sGt|Gb$}c@2^2s`fIJ zg-%3RY;nkuGOPG+DC6U&|v}{7g%ElomR`~MZ*~G&;NpQ#6wONEv z_6p6BXPx)j5|Q8l^*w2h@`yj-3~Oq*wT-z@-bXynm7BuzMD4hq3Z+EoH<6V@?KYa9 z;riCv!kBFZNY)4Or1L}^a``}VjKwRbeS)_lsfqT-~1AsGny?pz(xs5CJYo1S6 z%49s@HjtV|tJCm$Q{`Wts8b3Ef8#Y$o)SEqq7&)tOpBLCWu2bDsd5;|fnK2k$Jb&% z=W#(~Ow42cOtIof7vLMtN9D}ia=GbCzxrAU<{V@a)WrK%nU9ocJ$ni?ImoHNpDfMV zO|8k1w>qm##@U;rUwZ&>{v9@+H4_|XA_3bHd;5?$)4n&-s={-%WX5Z=g2lv<=K-V9 zqLnGLhexgAcZ{Pb@*i_- zT}g~TRAI%RUW5kF8|jGo{*{XiPGLd*Ai ztMFcB_Ut-sQm}*?9PuhGoiQoW+?g+nzB`hRkmjYQ%H`>Bw?Py4t@10&1MDc31}3KQ z7S&P#Ob5hSc^!kJR-O?5$Rn!ZxIj-6!6;#4@^~@|yfOmcHEYjB%Z;PqzUThdVF|`) z++)mV%K*KAuMzH%aq71Y)ARQRm`T_Nqa5U5ayb-}G`oB>H`$?oKp)fghq zvKGH(=9WE==vqyyH3~{%%&*}ujP5;OCKl$sUFhvWTq}KUv#!uRq))hOkw@nF_nl-LUCD?EvT~)NshK5 z;eHPZw;cOy*B2YXry6B_+M2Pk=$D-IO*FbbU?Fo5n7Dd@}+;!CaBt(qB*_;AH07N^L2+0&xPV<-mEGK1-X zg!+P{O)*1!LmVHFh%4xEKRf<;?UHH5dxkjE%T+Ot2~{KxvRJae)&kC41UZRJM<|(q zlae#O%6xa&dofTkA$bf^IDIc1K0Pil`zgnLjZm+mEgtt31Xt|0Ue3jGH@}RrOA0N; zt@D`P_CR>~2N0{CB6;=Ok%I49XM553%(_0`!fvfFjPL|jYWOg6ww$YP5%5Yd=U_oGxgylH0q2T{9fzqKK%Fp*Ugl;oUM@3ni+fS>;$Ad-B z^UXLvkjlH9-Clm;Mery3Nv?1}wYlX51~`Zd%Uway1?&81+`-+r)2x8wU7i^o2)LtNwV<*!$aJE^jL6 zPTnNDf^YTFD0Ujoi%$BZrkOYjlbB_95=SK#35o#x$<_GfsOerv3@8G3Y73tZI@%y7 zheP`;m&hO-@*n>EX;P{3CpD&zt)?(3Gw|;jRz`?N@pr0A54`Qoldr!0 zxhF|1D|HS1qlx3&igf3GQN*Hp(f$S~JZQRpW3kg|an;gH5NY7QneHvG?sB=|Kc7o7 zy$a)wy5d>!%OKqrJv>-Y6njT@3^dO*IKeW1+!jeJmpM-shr&QyW_eWIsNt3zEVuBB z%DZB*XVG9m@GF^&$02SCQ45ciBk7ZT*t-gRS%pxZOF*t1>i{G@8r)Zm3l+@ynQGTr z=-)j@>*aRPc3)F6E3^_e4*?KVYq*VSOTK6=C^g)5CPEj|f5^`y(AB;(CQ2Cj|AM)W zU--K{*pDV-tZH_rGbRi;6eaGTmuWnxPUbUgX;!0sIXy*ns z0rCgU9!bZYn^~WzkoIOkEGu+L#r_uLs=+xEiT0+FPGW5aDi&O)?G^k>L8>( zG{!rCk4H^}tS4tPK9%{AT-iGGsLvXPHPNq0-tZHy6UWMJim4K(kxXFh7|*T%7qcr9 z3$Z>-i%O=|{U921&Xu6)1+5s#0%8*3mMIWLK}YhDB9L9CBh&?*w||j^GkAIO$mAL2 ztZc~a*g{4l3<7oZYo6mu7ef_Gc)_~q>XJXMU22Htg$Mok+ z(VZUbg-0?ecT+R%3NqpxOv^MCh23f#CLmTN2LUi68eIxs#@~BvAoW4^= zzqb(MNihA53q>dyUFr#e9s5g!^WsH+fBE^cdMVwM8XA|?mW8+zv&@cg{4#%uG zejMtx4uE)Whgw_`7=3m==Ij2+Z^ZEX&dIM*ZHSEv$d&IhKL^`ZHcEt3fVerp13%rl zN0K;81v|d>87#O@FP#yb;QZW|mzh&ezRD4=VISk^=PyWXDP|RWieo5%IEXlX$bHLd zIn|uVSEH{i_UI|sNqEReP{YjuJSDa!j{^k9)YZA6fU5xwy7OBx&~$L)nFIG zrU+~uW(XDIF-Voe-SSMHzffL!+)75#gEX&S6#Z$gc4wTWo5L*JX`3j`Ljy)`+ZTpR zrR5tMbO@LKm4WifaYoEm^njoKQl(JvI>?2^P6|_l^Pxfrd?^ul5Zm@k&WWWm-OGe_pt>>pZiw*GBfQ-;20mo&Ses2#JlJ(;`qld#?`D1EWYrJan?cg zyMDv3>)K%NSWegp;E6u5x6*>53oks6RjrAfCo(eXEnVW2Bx(%1((4{CVctQ6&VdYm z`?sDMdX~UPmZRz7tNAHLM1;y@zLA;^8)*U?at9M5e~n7QPVzjXTES@2lqRI}U7C%t z+uuC-efBK4SS~zbbLP1G*b|}jKl7A_hA0&aT#w*3WnbPX7yM zng=>2TaFV|#e|TLSv}8nbS-K=7~7;HpqkIou|={-p+d3NLT4y8K!i5vBD$2lNclX1 z@u8dfY7tCAZct$Zz6pVzuPDj#&KxrdJ=6JL} zRDDJyBNgoo-WKYQ_;T=WsCosN4-frFow6{qas@~ap2?J9hE?`6&#Ekgns4FY%6I_h zLsx&T2lD72D(5`FRpjZ0cNVRGUhoX6JDRM#n5_G_kfUGw%5};~C66g#{4H4?&8gh$ zm6m>?CPr80mOYNyp?lEq~$L5TSKrK>qmJlCMr>%-(U60v5)=52f9EH;q;x ztYbKJGAMMeQnSA?0tvDE=|S#ewOnoD@dWL7vYVU#b2v8?Cd2d8`Ot(vD+7lI$RR+E zl0cw`L>1G17_KO#V~8fOSYmJESypZxPb3!V*X1O@i2c>)O9GT%g~-ldWSIa^h2ZOW zRW~3@a*fW%5zmpKKwsO@%tem*vd71)AmR-GI6*$n;`+)HU)lB?P)*EfP^ zgUb5G(!w-t*$K4&h@)ECiJuIu{;E%Sb_uIPrWk~8tT5j{dR{Tg<`#NZydx0`@K4dd z{AM;i|0DrHeL_R@%nK`bmb2Q|>|#HDg?1UVFr=#RF7xCLd_4grlUL2K#h=cOA8sl=ToF?(bM8pj`EzFO&!JC+dE)e(eG8 zkU+=UWT+MGe$oBcV_|95V-?9VVOQsM*?QppJu!6PYXTyBWj|utwbG!eDdRw^-6Oop zv zqG1_Gzvmcp)z7Xnp()=AsD1=OME11UOar@jE%27$n>s3eulylaDBOg;ka+5ShrSRY z{s50JK)EuH?i{|d6PQjm|8_&qjLua!UyMK>TetBy1qtOO4$dp(vd1vMrElo%dnM|7 z*=_4q$!9#}u}qo@I4d*_5xx+!LesVJsD&2C`was2o~mWyYV7rhBCBgg zeD!_B_P_K(KRdBP%@b8#ezIw%JvjB@fFac5ZTM)3`Y+Cb3^EDa6U-kfgOY7h*4XKu z@Yl+dulkPD9BDeWBXb6kte8YlW-;aC@C=OHC!bhiR|Xr~Ljs$kH1hPxth=-+G`HVb zrC$Q)4Xk=rMd)(WS?$N=IXFDJLw$JcRE{n}1{Mg0PLkfE6%v5ZVv=rC{qSOW!CyY> zaLsDVuZfGmv-#2m2oe2d#oDqCgf)Y)CkiQv=nzB($L2{`kS$!RT=8{%y@9s2pOv*4 z*P!{qMDR~?!H+IHW+#dbF$t=G`?QVXJNMc5Mcp3^u_lQ>WtBFp=s!~rKC1IiKJeW@ zwLxMXI~b!pfNT4Mfqs5*SV8R*q36 z34fbYh!R#)qIN#edC1i<>yb7b?rp6_@O4OB3H{`kBf9BgIa_|%B!jGag)r)c?^{g! zf=URm=BHG(-0+MjN&`1d!tmMfl$AF$c|E838}&?o14@?q;*a|GD0&%R!Ef+b5Pa6# z4~#6oEAdcQ#OMcBK{T&4Gz>DLVrLK--Hr4I@tR~*q76m#+C$*JIH9_XqT|tTy(|k< z-CD#2Rwqk=pvS?$mvPD8^g>~zCZL16ChZ+4firqSHbj>@&HpvPfqA{(MsJ&enUA?0D;V|0&F zm?pyw;|XdwmQ2Ip^PJ~OBYzpD<=DuSwbSCnk07%SlgPy39Jg@xCEaK9d+EOh zNW}=1?!V(cLSBi}zb6#z=c)k^O|a+Sfd|S1zAx(Iw_pavaWVS6H5ejzjle#^!2BA3 z4Jw-Mj{wbDgr6^SzDErWaKbdU^OQhMlpVKGM z7U!J@h@HpztSS<}vpAsfFA6@hb@Oyx^Gfd%2RKNn!J=j>HUfBylP&XIMDvffL){W{ zfbYil(VI+c=dZwa4)S`S=DAW1R{h{^pq--UYM0L&>UoY6Kqa< z{L9_@iqimM+Q92^pzRNdL4?_cWx6@GelsmG4~E!lPE2^mcLowif_@B`x5!MwV-z%N z^a1tzeega}ZMi^fFAsdq9OC*Cig!=Gv`}0tTb=`9lLn~}m0I^zr-=#mbJs#Jat8+| z8)*zSfV4Ht3lET>^lvHD&g$;Y-sK4Lo}s5W+jb|wSQBO2Li+Fz9{xemzkDS%3){|7 zD6F7)`Np4%5~J~JCmB*KA1lQwNg1(Ve&TDyiMHc3)Sl*;^VNXcZAWUBkVXj-5CVoC z1(9Jg1(+h~gH-O33;u#K4WwM8S^dboC3s-$Z+dhGnxF4B>YSFRO>sRbLkgWdb{Q$X ze!luqdHHcmJz;)w0Wh@0eyd^5!)wHoJusVCWVrtq>#I1rJ|H{!83$OjrSi_au0}Df z+91YH&LAoYl)DVr5j!IYH^O4DgaetM^?17~ZuuhKfg*DnGxx{(DCE?33gYTPu3YP#Scs$avnyIK0mN@_$DStyQ_%PY*Ho=6RGhO1 zskk}o=?&D!%h=y%b^HdRxcgm#R_?eSz}CkphV)b3E;Hfkatt3w68+QKpzdDl#w_u>cIKRaC7PCd{ z=-Z!v2cV_RAKh*0$}5rYN9m+67=qVf5{PYo8k^UC5N4c;8ZcqVGtU?rbsRw zg_u0ubP#&N^T_sQ~LP*u<4=6Hab zl*2&vVX27Lo@Jq#rd1^Z8U9)c~)v++V>dg6zXMj#jvbx`%m4)b%$f z&LMnq6sJsvSaCGta#8;5XTk>o9!C%>*J4sZb&GiqcwxiKPS9uoWM*U2$1!J?Hvua^ z0JUWmm}VV)Au5`)@7@D&t9ZmylUX`Cv&kl6f)~a@?Vb4V{cqjU9`Y0AFMRK-fe+Od)UkcRS=&K4#c;D)ewY;5q6deue zDR5A}aq^XQEAbVMSOqIv*y^$BMybP2^mX;&;S$D`|MutJe+snVCUm)Mugwf4ZHj*S zHQHo%Sgvn-PG+(bl}zp7x|dCO?58a(-9EjZr!>U~9NYporO0xt3g$E5gzLENdv?$Pl*{a7D*&eS45yDrt?k%26hF!|N7v2fYfBI>Df<4{L+1umJRH!@ z%U?iQ5VHx?A;jePGfj`0r!rqgaR0V7%~c%|U0l-hs_lCt^urYAlDh2p(PZx^GURVp zO5s7S)!9UqXGj%?mFh8t41WL$1EQztBcb*P#}CW&q=%P8N7w||RZktZj59d> z`Sq$_T3o)$U2uJ3VlX?1D3NaR160p2B!Cbi-9%zm*?^SO1>g!AH5?LPk90y_1iXMf_m&RZ@I^kF&xTn8L;5 zsj`as*=$I!j$KK2L%3xwyr#?9b!7Bk%L`tMF04f1m`o{avxjIu=fCN4Ipd)!qShGp zIJr$DNp(8&7@61FRUiWW6XiI{o+=~Qaiz5L_kmz2B%R!j0F#eId?L)Mzf|YuBhndO zJ=L{jc&`>=WJS*9#qzZG2caS$%7KW>TtPA(=OpQx zsi(azG|_63XsRKd9iMyu2K45cpNyfLK2NZ|qKBAu?4wEaBo2+Yt(sP)F>4)@cV&lH zt+>3rVAqYR-Kp1zkYLmt<~|@MBu`T*vbKxS08$=L21U8;om5L(%3Jfdk>UX7i=Tn`}bMFXDnc^;(%f?-&|1M{p_25 z`i{cYZ}?NB|1HpWR>ghcGT;x-xgbpYp5;m*ojJcyEmGMmsEdIPdv9MgSgZk>ySsEN7pkc0wW6;NzELhBT zygHTcXNI3d4k=(IwmsO>UqH!XQ73v27#wEbFpvMFKTv-eJm0jJdJ&((S9+c}!cHz7 zdwIdTLp;&gi$z#DKdPea)yDJcFA>dGMU5}fVfB4=0Ug6zy+wsjBi}dBIKeulc0DCf zJJi4h_xIeM9=v+b9zSsRu=7!I6wL&_aD^IcEXyoFz#!X;5h%+VFQ7#&QLBhaSfL;3 z#IFypc3IjGPvJ^x+KH+!&k?8Xn4~k0@&!7!u3oa*QI4th!ftT^htS1Eyh(%suozg* z9!SD1_0R}4>|jk&K2RP=VE$82eYH}qr&K^8fJ`RMSk>cFb^gj+HL%%*5g+Y}y*U45 zTUnafkYAV4M_yAAxW*5j-98jPpnr5s>N|yGy(8-L6{bp`Bv@FOwN}rUt9#NfMxQiA zeVBwIFU-f=DH~Dna{fU=CjO{){LL9f8rxf$AZ#KSG*-(qSA0-yYc-@IYbmK8c<1T2uHq$pvV6>gOPX0N9nh;^u*4Wu3>_Ih@t+9DPyW zddrLQNcjtoZku>t11DBV*;P&*e=NT)IFxyQo)}){*gQBB{2;!qq}@w?R1}*g$1iKi z=V3%RQ7yMhFcCjlF8uJKM>k()JpVK#inybSFL22=8sk^FIrfmhJ%Q@?-ja9e%a;}^uGo2?==$&7kRcmlv) zh?>Id2F1-yEghww9y4dneyU!L;D}z1$u872k^_Xg9Z^$-u)s|+3=1(kgn%?^A>tO( zSZ_edJdW;=Yq~DCZI%J2;6!;qIB~JIKX|gd`h5>RvWW?$&^awEWNsHiO?Y|!39bs7 z0@ZmKc1U&oW%e`;HLKIjHVn|yYPso=U+IW9JUYVK;UTj*%S6y2t*SI9fiV=T;3vk~ zIqWw%3@QO(7l#MAkBLu6&(%^8tKkJXqLD@#k|E#RB-Uevwhz2h!Jx2Wn7k|jM%uv5 zf9lB*_KyxnVH|QmD{oq6c7}s6B04tUxBT?z3LKNjZor5-UU|>QrLqydZI_d5`GxXR zizwPH+s5nYCjNb9pPUB}n)jnm;C6HrVEs^kIE#MZAM(GVfB6lDgDr(9`VAe~wVRZ; zUOkctu~$pb_vx=wrr62p(+WcYjD!3fbL2VW4`* z^LXX(m@$8Rzikm-BsQkqEMprSeq^#0RjZrQg-K;s@tbv*8Mkn0UbRR)3OSIN4?UXy;9 zQWTWjPM!eVho?`lIdqe%eep9@YQ0eG)A;5Q_?w+4G}6{D{g^Y!;@J7F+IDY^-psoV z*EuGb`2W=od?dNr!iw>m9>eI~6Hg?o74^y;Fv=6Mz%ab9OIv4qzSA0xboF z%hR)`gzoj2W(rDM>?o0}I6L*Agqcv;o^1{vrS8=qEm8&K2u@ySyT2#YJtVp(ALb)) zwM-A)DWQHS3wY&t7}Ev=uP}6#48fZbb=u`5JnnsQHz#@hRGAJ-9yZKS?5~xdy$sYO8%ex0#?4YC|a zR`z6o95P@SGDwoV1C+<+Q7RB(qZMu%J|DO{zOIIm80Q z**)=n>~XIug_a<(13m&*7!w(B{}q59SLhRD^#UbDV%8>h$k5zrrWfzOIwu|*tOWky zDV}mrYt*d9$w!X6a0=tocA0DW6i*p9Vy36iuN7#Ma0ulEp!N48Unx&{lCi8)eM|VGcg#4ON1kP48{zQ` z+i3{QB{_A~#zH%2NkR?O;JWXz(1RA&8vxUpP>Y_(6xS+XQpUga zg;X>&kxRCWr_>zJl{b}H^%pwG?(6YVw$12$Z=)jtOVmwLg>d=`Sz-j{Iam-1J_xy?|B4K)uLU8W8c^Ty*V?j}^ z+9N4u3si<)TJley7n6*a0Y^ZE?8G7C33!VxcUEYqw@5W_laC8MI&oXb1Zdg!w1|W8 z`G5L4kmT3@<^L}K@~?K^^uVWkGsMQ`RHW2|KE^=F?-a>-CdfXs%!x?tx+yAD?x#J1 zyUr6%Q66WA_cQyeb|Cw`mBZ7S`ho>)hlrlPTB~&XK+pU}hqR>Uk1s8#W3vHaVk{>g z`N^>6om=(=W)%Bh!=m&Bu!`U`&Glzbm*Z5Oh%|*gI9pARyrXN!yKCe~w*$RB;K)H~ zMfeWO9b;p&=|75o6i-ld4qfgth7q<{e!4_O;9S9-vy@2(MTR&Dp_Xz{_Dz;!iX9W25Ggl8g!9;|zV*b&q>+#0KVicGKUTK1p zq`C(F@M_r#kQKEB+Yv!QxeA;rV31`%d9*=j%Zf&zb8;q_G3jE8iTk`yHa64}OlO#1 z%hFt%2k~eg`3s4@2LZT#aHimK6{9+bz~`ZNDD%P>h;`(EXgypG#NpnZU7DhKJKjJl zrr3O*QPxe|GJc7x@db4~-UI0SKX6`TE0GE=20QKV%gCkQw!hE0tR^YjJD%G$5SJeK zkhZ89Bf__VRPX&W8*rzBhWr+=ZPiYx(jXVSt2rI)_^2FE3!#QH!+njzvP6(~d*5St z3PaRmz@kRKc=XFErAa=g?mm{jsn?~D_WgC6$pW>t>W#_oHw|}~VM%~_5<%&m3Y!w=vI2%XzD8~0FtE>`R zuN95H1QGbMdrXina2b7YL1IA6-AekB<0xPEdHkIQCt7o7ErYOvR3gf;L=s^qT>rmY zWr?_^kt=jDSEy_~3`V z3+eyp?VPTKEJc$k#96o-YG($gj52jUp(qi09MZhL4Qb=?cI2=wx9xV9(F=L&t!?rOc~)<5bjSHxdv_Czv`zLz0@|vn zmp6atVtLI2u1RDC$B|X9yu^J2!cgD03s@0~^WqXjVR^?A=x;3uXzj9YFe?26jD%3tKN{~%~oSB&-tUk_CCZkas} z?i$)o;0v$#skh+`xf=ALQUQly4%nadf=PqOcS;AL!ig0z^|P^8KBwV|Z$5ym6(Cc? z?%8Zn4*}i~qRTmlqyy{3v!K2+%9}(l>5A~J2OAs2S$$i*A}E(a{B#NRt02VO;t&2U z>erW-m9e=OG`pmyHpv1-m_xC%g=v3T!7u`u-XqXK_r3cj+ee}v&ACo45aE0`4neB7v4l7VW=D)M5sImGuxXx8y!Z^Q*;H1v^K5Qv?yTNNP{S| z1T9;$bqq3HcPb*WIJ~W2Em}-OoO|NQqz_bh{w(Qv zEJzhEG&+oudoe}S6v`TlJKwrhHr6fC7@m`Ji_`EPc{ZN>X?Zy}&FS9}^z=vS(di+{ z-+s$-y0MLAr31~#BsNVpI^@O2mc8iU0V|Rx2}HwEauQWlCg7l#6iTH5Nx-ul;u5`b z)5L()q#5pyi4oDs^%uUrxTJR@^U|ffMTi`lLVS61XIYK}x2|!+M%iudvzvD^0Ku8Q z?x`VD6Igwb3JjuvUw#FgI9}>1A_$@tS4S`6Ng&S zS+v;zuDt?h;@&{o-=*b`@AgAN28@cXRY2ItfFNAgYfGHG3#N9@tzHNq++_rhYVTpeo} zN9j=^j@F3;qY|*8#|3Rzp7)?D8F=K}m3t+n846M{xbxWB4|beC z2YH<=K|~YbPYCcn*~aZa+>Y+OmV5$`GZBizH212bo?+>Q)Y&R&`C!;3g%Ay^Ep<-A zn;xWT0e=mC$j#c;H;()1WG+?SP=@PX1n?Av8c=yv>$tqZS63#$s?MCjjjk5n%+-MY zsAbykO8%NNALlK@AD$fIBMBL*N)!~@k@S+kF#%g+-;;oefCmWex5vd9J7yF-Gc6K@ z$w90Nhl%2vq6qTW1eC%gCR~vjkoBR6B4d_Tvg0LL0|F7lK?K5W@-@Hica0#^nz1P! z4j_wvAN$^NrxSqSy-uFm67>GbdtOIHCcx9iK4J7Qzk8p^(_5XljJ{UB=eeS4VH80z zZPV-_Nxg4^Xw-TOh6(-WSH9(y0rAe6M0268ah&`qQS=(7$|2B3K9C&hYP2uA&C;N{ zC|G7}my%0@Z24e=*&+v$i%E2ZL-&mgdQkVzcMeM*%i>1RQanEV(=Fsv0*(MMt<+c`f_`T8BNYs}n zjkn599`1+K9P?Cd?n8KCdb6@&a;!B*-K<;dP*dbanJFIC$l+qyY%H${Of4}F zc^=WZ%-i3MzVw-lIY{Zi+}@EhT|Sd1K;>G9g_j6HqGjim#_C<2D>FHEHW(WJFsL{j zaSw+D<5R&V-=$AN-FLY|rZR2`aCc?eQw`M|=?lL?GDxF%;dNdC^7qS{u^D}u$L#i= z;)1l&%IXM}{vO8=Xa{i4L(oYXhC2RGlDb1?C1iIlw~S96`E?I^Pu2hi`za@6jqX{h zVax|U48(=-H#N8t2dZnH{4_(A9^{V+yfyI0c{XHeOh{o?Al;~g+Q|*v6I_JL9CCUk z$^zNf4rn2SiZ^mVTU2yNRd$hl7!{c+nC#&JfTJ=h@{X$*9fL6DG3D-b;D;Kx6)q(=R2ye|e!m zh4Z{8iO?Rc9K4xP9pCpS#KkTzPJ70d-2vAo!C|@XZ&o6ZFs3l6Gj}?RmJzVa>Z?M} ziGjX-0+FMy4dP6oq!P76+ic~i?~p^y z(sh)L>L+-vti~rwfn;JuJWW)?uGKnY8yGUXyF_P;R8x*ts@PZR%K1LYsd=!x8l1G4 zTwPxRe_L(=xo1y=ifMX)jbb-eSm5wx#27Crb4z!s02)45Ci7OzV4Rd#?pWlT8?AxC zv~hC#X1LcRj>;$~?(Vj9^!P!UM$)5SZqi8NUF=zR1@MxS($9r^39_zfw>v4bAUTO@ z!ujH(k6yHf!a+z~ci^nH#kMzQG)=-Gaia2veLiKS0yra5*5!1A9BFt z2kPVOzMnzPD?~dh9P7d-pKbDv&1~&&?cZ$4_a5)DpJr0q8P!2Vwa5ZL> zu~13uY2%dng6_5P@*_DHpZI0JM=rBWTjUq+^ubB7^@$wj&9@X@-Puk;aQVIDqY|Ap z0OH=V8(onv;bpwaf*;22#0y7F(wW^Nez;b`ea5T$hC(ZJP1DqUj~iur0G?+w{bV*F zIK4;A-OnR~fRwjYn+A$aEyO7Q@p201q7aZraCx%yydXW`a+L98K6DLz10}Y1tbA9L z8DT1KujO*fPpZ4`1R}q3hb={6!!)Q28VYz?470xk^?!8!E_D%Y|GVA+K zDh`vJS)3$M2wwZC`UVvQPzf8ofCNEww#x_fmmtR*|Ie3KJ1mU)+}#~*G*#(45n!yn zCJ}_1TCr3;UjEvDUMdS7Ii+;rT%maTC0sHY63v1wm53Q)E0o)RZxQbSfInL=XRBtIR}fUi9omo_Vn-JTJdfda8wp8LAR+4ZS7@I1TO8)b%PX>(DuKci>@dwI-e$tw0$e8vskWhLM1lH34 zAs>OU5-BC(lhYo*H19UmT%5c4)D&B={GlFJdEpPYRGuW3D?f(Pm2nBbi9Pi-?c3F# zZ}hH5ixS5Hj_o3D*1!l(%?n#^p}J8*e=FDCY-LUv+2FNBWr*h;G?bqkaKUz*dR>7W zp(2Ymow4b>utOZcGt9XXA?e%~TFyXL_{W&rij*`&2U}%h%Qrs#6+QF(s2GQBE?vxI zcbd%VgFh-4eO2LPR6f2MyMuY^sv!N9WXH3(??dkA6(M*BdJsF!9|DpJPKPVyj$^mc z(yF?(RU>(`)*S=0zHg}P45>TH*Ss%4CV(FBt5v2UNmKAS_Tr0}L3<@kxC!B!clRS^ zf*JCW_R4QP_yFvON&CxbBW2*91)qEh_ zn_3cS*-?+S-lCUQ?#)W21NP_2eI@*p+|5;Sz4-fG=1CeO=VRP5q!}ROSoHP&t)-7D zqDvGcO-;~eylC#*3GKO=Hl^4$W8iH%8h_B4Wp~)og!%;D48&UJLq5=?X3`=0=-%#h_ z>pxR2PlRyaN+SgmFuNG2Zf9^eJn!|LA^8VRxQw22NW&oazHcLI;oHB(lk*Ji9E-)R zP)MG`oWdK)A5;MS4=;M$Dt5#=@$V_jM0+Z2m3v;XEcFig{mG~P6&@NaNbmZ9ssdHz zsjNP~97{m6cgq)Ua!eH_tEf!;5x~`cZk-vX<1jjdjA6G>@oa{jGC@;nA$Y5e;HPBy z2H6i2M|CeU%}7-uU2#|UOHcixJR{Rf1X>q>@%q_tYB>Yr0D%<=w4LuWIoO~y!b68t zAd6%Ol*NxI$YL8frjJ$~SCpJBD1mUb!G~A3ZybE~a$wAW=N#f9df&;Hy@^t846vmV zVGP13J3DKDO#F1BCZfJ<^I&Rh`d@sgmE|)`hX2wR7U?%7Inz4c0(z zWY}lGqCFd9>lheuDMi|31toFS=!YEbt~xUo#^WOGRb?1 z%b~C$0e-zSVvkjoEpG=Rdq@DvG`sNX9#N_7b&pO#hAwB-mVoj!xFtDSArH2;+TRcv ztq?V@(}P&HuUcgLSYw_ibsX#Uy@>@EYv2Xeqm_JkYMk(vz%z*ZICm*7YxKrw$ zAU;tYgHWOjcwm*jN*tgFph$1rgG-^EBeLYX8N#| z;Q8g3Q@LPe)|x>707DMnbSF%n3+1IpEeFq*8OzO@l3Y-z%d_$>qQ16CV!6p zrb{b5OFkVyU-c8L5dJ@oE09w!ffMcQWN{0|2{@kDFA?M4dGclyJM7Z|FAf7hcahj2 z1Ss@wW9P1v%?J?Ug^=UTb;Uj$G|_?|%YX6XkI^~#xbod%2lr8QNBQswpJxT#vKdk; z@FA&Zuf|kqS{*0>`O)Yd&y0F4WNQneTjj9ts8J>1`wtBro~a;MYEa9_ny>%8k##%~YZLV^FX731{qwgN!-wuw@v^g0Y5HKOCF$C*xjcqUJl zN)U|3O_{!p;j5A;bV{?Phgj_qiBbw90)6b4FV&`r|D1f_-Cq$*70_Bve#MJ7Pu|p9 zC-3JBQGS}pLtIB+z9=J^i`m4cdgJT@x`m zAUwXP$`88CXo&8)@&n&~8P~*rixtOL_i>Ol<^&dy^AFXvb|OB4DkZ4x|)^DmU2*xEpg zi+ADkQvWliU+{b|8bvQeKH}ff5bZe+_7pGqEe{r*b{i_~=*Ow@iOs-b>sf_`CU_K3 zr-{p`*(F~L;9DM4cVTOf0-jE{HTrp6_2^MX$IVPdKVP2CT`P2=_Ut}@N<>c+{q( zwI%SIYiF0R{1r9P;mpr(Hmguh{|R!nfAacc;Y=(}dxFlNFR$iZqV2kc9Fu1ZfG50c z{YXs0a4QTv0+n@NboKq7UmBon5K@n^3C5vid*|fCll#7UeFYO;ZOoQpL&IGFQOABm z+HUF^AZkAW!D2$Tz7y68AieE~7}u{i#r8EtpQyNso>G?)%XK^7KE046F^f=$5krM= zVZ+ zyV0M+5-^3MJ7XY(uhIpS8;Qe1l>_Esq25(QoJ5c~%bzL71;OFTec$}_m!z>C{MVhO zc@{~sz8QUvD1H6y-&#=QNoPgRw1UCc@-t`$_dP^V%~T%{&GQ?I9E{#-n2fA%Nk-^pruZl;&0FPmSQ2$i&m{cF+Ec?1)N zWrUDdiB|6us@B^jiX4vZBF%BK*Y{O0XlNj7Er?n2s@1Q})!RVDpf`sN{B`L7-ow3d zD+~W{4y&EU>>m0QIG9x z)~;F)hmldh6j#Yf*nEOeIMqGl+f%un~8 zFH=j?JwGXk2jFn)k-42Nd$FbFOY}fcVN5&{=Q(rf;lBD>dDK%w-0vvtL2P3D)DW?1 zUb1J>W}wTRxTtDq)oC%z{xhX*Z)Tr~sS#5hTJ;!dO*2$Z9T ze(sU-f@fN}n658;{-FQehS;mYJ!Lt<7^5QJLO1}&q1*>3>PY@fqf-2Bfk<{Dz%8>+ z_;SdAybsy@2emT5gLvgwL6cg1<&g@n*Tscwrj3FnZtR}=0kKV^##lbep_%tGm;z2&(&WGLpl9hFoH!wB@MDJQx~^hSUr^@HAa1>$NE+ePm!jqZCP(zPpZN4Y zpU?&87vKIOmQ69W6j-G}aLuBWN(|!fa7#6)l+}jyNK6-z;70o@7y-(2 zBdh}!VoKx{Hb$i+*f|OnWh*$WljhjOFwX}JvKR$6FI1kfHuzX8s7A|IFZDYr!h0iV-9@O^0s=ZtSnvemeK;^E>y6UC;tjXxLztEL;_Rzj@Qd>t{NjH;z3naPKv6K zP(r>JeE@hGKdypb==S67=+Aa(?C1FVY*|b&-$8&x6&OOZ;Nf6azD;GmAM}(wz(;v^ zlFejFW8v#*fYBODG}egsNAHh5%v+v=q1>;8O&A0rWlitH=}*`^OxHUq`-@lD$O~mA zM{si+_wzW+lcy6RrRuv#S;(~B3wfsa9*l-(se(u6R0!&K2vU+bBXb;Crziiayzs-A zleZ~SJNbiGkEm?eLA9X*seXS|O#pknp2y3!tH6)X{6>wx!?F`os2#6k#uJVKd3epj zFk7CA3;&wHgQ!qBn3 z6{{;hn8%@2)98NwtOvu3ieREN23wG#6)?2$|8Hh(@fImHDxczN6tgJq{MP7?CsDXh zYGKt=g+H`j6CoZ>MNQ;UgAsP`1f%FZKeve5H-@oF>@iA zb+!c0@%0vuI*Fy~$0Gk54XEpaRj!k;S!(;i%?Jx%O?5SP;`Z?vSp35%zQL`_;DdtG z&klBu!`fnhD#O;OBwp`dqcc@AD{9+;9UwHMtuj&<3UM|8b>PuECH%)}@i1U*ThPyy zm;L8*xsuzm2WU?hPi1;^O#~SN=j68(_k%7oJbZRtr3n2jzj8>Ag>@jvA$YX}93k0Q^o8Lh=;LYw)F&6Pjq#00u@PmL^jz?D7bbm9c%C|tt z#*Oct{El@-`Fn4k;iKa_@}ZKl#}5HVIaTyABUu}QX>iZ6=67=gK^Aq?a#5wKV>)`=>t&L z`0uNL=lnNu(g@_SS@ARoFMAe6oq9R+kz!@FrAx|(64y;f#Icu&t9cSM8v8?Pwb^2R z;s?qT{&OX{>XpKpnfo8du@G6|6E&1#TRc|)uH8OF+r}oaTelGx?LP`jhe0J(vkzY> z69L<;2fRG?l{|oFN(7%(rxeGf@*PW4o+1|J`@Vt_O1H3`JRVXy#P0D}y;LMr-GU{& z*o~X5so9O`vsfoFK8-%GRI03Eua**8_93C$4w8|oFRywu1ojk7PKT`GLkWV?g#3?w zNK*PaIF|T2KWXG>YH0z+veoY9Ru+P^Bw0i~wfMj*DD75Rw z+(td=x-fdT=E^?z6zGr%`Vo1pS7M7T%Cl;7x7GWRxgwGR-OE8B-K!i=p6mR*C9Qv^GrjU}M;6b%%tjt$+{;qI~+WpiI zz1-xl(cd@X`h*mZNe1WjCkQcG97rcV__#k(JmIJ*@#zH0qPli0a7JnHgn&gRvaNke9{B3{8p?rk!H>ZjY(=BfvCdSz=F95y9K)-D7 z9yi3@8;a z)@d?#CRK9_HN|@54IpsAPaN7_;OxAwuFTcS@NgG0FvUWHn-9P=t!LdH$+J{JIX7ZQ zj_&<+t5wKUE_;fJ;xc&gk~GVT2T^X)EXT7<E-hB-WpUkw^Za2f9p5z3-Ji|KLTMWyd(*9lfjK zk=p7eZ)!xCTmKTyEPzdrT7=V^t| zi?WfEua$g#fArZX5Vvg045F#~*ns$!C!vqs7Wx!(1$#Mc5<(m*W6t)8l=fuvSmIZl zW1jsk&5jM+fQRLhB@}eEM?$)vTm10OE{M*(J646GdzUcO>Cm}J%|!S!&zE@*mGYph zbP&N&f_HHz6+O2ya(h@$5jBPDCC(Q@bi+oU&yc=pX~FmYgkMlFb@h}W>1R<=2$dr{ zpPmFP!b#HOKRFF|ufIB75)j^?t7>!983jOwYkFv?v>4 zLwRx@EaxtS@~2T7>fCaRCXK&I6UPxQNbKCab=R|8yLJflGhr+MPt{=+l|ggYIqQ5! ze!LiBw|_7s6r*0DO|dC;9q3oE6N+H3^tnKC>~l}nz7tL0YY z{BX{*4X@H<(L|a8;NuR9k@Io0gq~a4A_BY_k;|@>!3oC+B|riC-u%E*2S7zWGXUqX9I$)2>$_NE`ef)lM^10iaL)t3n#>z zPz9!uw2#jJmAdUk0s&(^C||3Ehl=-uPu^)VdzS&c^3tDCfzf8bP@164nuAAiu{gFU zcuGs5<5*fhip=S7od!uq4LuGKBUC_(MIEfp7c=LuNXE_sb^hoJ43!Fo^1e-SW;?+qht0yW zC5MUJ)Z>Usp#kDhn5#BIhpY+9jczLqZKA3(dX|owlSzD?Ftas6{22P15_MV;t zjUHnX|L4iu`P_CeeH(Tu5^=N{$$jFqswL$=Cm+{;$9Xilv{dGM6N~XxaT-DoB^0pY za1fpNsxuQ)q*qs-_k%8Hlz{_Fd5De7@5!=l6!^R}2YS%g>Uam&B8oaiu*VNR0DLuw zl<+d88szRg4{BiQX%`XA?>s5$Q`n#$h^5y8kgsEp)xNw^S0sZx3=EsqRSLnpU^uht zER!@4T^T^nkNjBqYmZV6_pkx9i@Stp^f;d^4?X0+djzulH?ZIct14$6$(LniXhA)c8-Tve|Of@DJlaEiBu6v%dVd{`XJR zZ|(~B$@b%lQVBB1ldt?0_WusW$_S>PmJ=`1CRhrW_lZ%5Dxm47lANAqe!U+7UIFTI z9wAmj(Mh?++M=AcRR42$>~rV8Ki|sep5^{#Mdc+JM8%Dw_iqph3$?c3=zs8Vv94>% zZm_`bA}1Q^IdFO4s_(gh3}?YNHdnYfmRA1)Uj_r-NM zmQjQ;Ao(wi>pZTt^*WyaQHhda@bmZ$weh{TG1v8<^NDiX9pGeodkd>o`o8%FrrNFT za8r~v4$n-ET@x23<`aQ^ByJO8rj5FzVuz@3iCt7X`r4W#JU6zNYq^hQ44nP^svk;} zXdaN}mb|||I)ASGrT@lnj}QYhth36Oli~9hmH~oy9rh>&ajI7m zp4w?+VJt5CQAA=Xdk>b^{9=;sbgLE&jY?ETupE>l;mtY~ z0TvGWA9kUX7M(-x4pWaE;%8YYPka3P;4?h_n~He`;;!1D@fr1oKtDWpB7_>zbK$Mg zH{bruKc50&J@vF-bqq{?rQGPkvY(j70xvz{Art-;;nf)nX=Rfp+ML`T@MO8-0Rz2z z=r62?vHbt!%sYE{dH@Tcz?N1lj6CO?O%6%I7PN+_FjdO4$>M1N1n6m1p7FY$VVdN8 z&#MM38q$R`0V`#q4sA$c>ehrM2}sow3$GYMc}J{ppZH#sYfxAT2vjV$9{eK5X?9PO z$w5}!7vRbIcyxXVf5A31X`Z<|nP4QIznNsb)oJP)=rV==b(cvA_)N8@=9`KG;C#o8 z_jL$~mQuR`gc~00P_-# zetGG+GC#N2vrNpD!$^HDmYcrBs@Q5G*QYdt)VQ}*CB`I1ABDpca*628;}u2$C7O~` zv)RrRMFTbq_s*~@)p9ji^o*K%bpfY*faIP2n*aRHqsJ23@jXZPUM(;9Df#xV%eOzz zKo|x7oseB4<{#tKgU#ks3C}>p%%VA^V@#N>t)bsq>aae`RC~s)fQxshPJZL@tD?+*Lkr!&aWgIN z1yOa=&Q7TOSB^z>7>i0IqSGe{TtHt5tIJyZSp>s4IMWRSKQtUm*xXTCo+P3sL?@I* zXbuh$+Ys(xnY=_@#Wkm>e@6&JmHGlA=397!9AKPyei&ht)&>U|HvoDIN)JVd*3aLG zAFifJ^NWIJ0T!cdDI0sU6SdDglT1ulJjx3{XYo{^KFP@O^r4;jqleU0*~Dx-Ra8pn zOTwZUcAO}$VtZntB=^1;1-HYTq%0)qh6KJD66}v3F0GXLpyBKhz1+; z2`dknwalGfxLD46v7g{n$Wlh35JRR<7xuztVhjHOMXxyV@jwOA1EetP?U*VaWjfTZ{*t{Y2{kE_n1w&dVHY?|A`6ODcai;xd3sMCYEz z37D!Io^6ihumdLDym%F6VVmX5nXR) zy57-RDOuKLTb68DjsR2NtAEe^=X1`5Tz0#Q!NbIE1{-C^Z=7L|+FZaEw!I{v5Mn_#hux8q*3T*zhukEasC@gWU zEc)r^&X;GN@ny>A;aE&b!x*5>Pdm~NjmC7_ZcYxO6%{+6QGfMgmGHJDcL*~P@s|p-IK14?dRS< zJmOZ~ic`2ra$**%u9vG&+d^3Yu_HRGxtkV0Qm3i=z%kt)_|+@@vb?minr*An78g+} zlUG*D!u%xD#m_i%@ltuh4+q;2O4DZ95A6XS!%*Du>z(&IWqI`m13&ATv*qk1{{|2W zzQ-@!eF#w@7V{?(gtoS0W$H1b2*nAf++|>U@_|W^AjaB^XTJwcMAp0`jDrgyjAKC| zM!Vr)P?jFZd%s$nD-AIu!6d2r^OKL(=*%i<~Op6V*ewmn|X`C+KMWiacXe)z+3F05ksW`=R_ zXh+QVro&4@z~^SHpm$`fe~{+_1ViFxhG6n@Pw*y;j|jW%K!8#?JxQ@tA?aIGJZlip zf}e|bd?;No%C?eKSD6hOVZsU4s4qjngE=|y3lfaV&!7i+quVAosd$7#+d5>4AoNlf zc@sro9HMu(g!55s(6LHulFCNtBxvJ7{lMY0*ST9#rd*bV4X(@Q!b|1(oE6&tp=V`l z6W;A?{2aREZs>H1B$t!7+!y(GfgohkT6Gc17LSpH9t@~_E>2ZG6>7TCj5z56OZH2V0A2kbt6Z;Lf`Z8tMfeR>wz+EI`9fyiPQIn(G zXauadNVACw$nU5BM4ISP=R)xo5OD(+NxwZB{T~wtGv>?};}8D$pXHCg>b~(9D2UP& zkLk@M6o^xelNG?QTStXg&>J{ad^5=;_nXL{j{5ta4C;!*vYG7JBn9lk7kxJzR8_tF zR!;o*!P)Y+7B&@3Lg}Ga&lIO)#LTvCmR(QSa+#a<-|J;N0RQ6Mh#AIhJMCdqtXr-K z{*j~!gL2>VSXJd}SjBqLEsbXV#1l`HSA99T3CBCTrvOZdX52^X%@!6w@m=q{K@a4s zSJh#M%f@i?_z6v@N@RjQLbK#RCpU8%-%kL~g+Fo|yN*$c0nEfg{GXZHLMd}8bYVI1$pN}53g z;`2)-mSb@(##w1Py%(q4iHqg${O1cL*jZ&_LB#9#Nj{=^giOb42pe12w9i9SbB|yJ zX$IAWXNDa|)lZrQ(w7wAmv3vAa4)#3wGgiBTKE6vb zIwT(%Jwehz65QGIzDg0PI7FvU*9ku6*xO6Jky+h2hLBk!kppuL(B*c!3UQLX`R^u; z71<95#c20iq_v)A2|q9eZp>`yE%g0p8QN2O8*%K)l{z%^;E&=+Z8H@n7VpP;Gv^D> zm(|dUd)KIYQ+iYLeNP9<70C^B%v*+&6*mpgpF6$;&4nN#DR6GQltxgzUelDud@r%% zirU=DRQTI~up?j~ZtYQQ9oSRjmsO6Cqo7v<->lrHSZ2KNBi?~}@{5xXbCZ((-%U!) zw&=FpN*o;8{r*_9qtXe;aVZ7JztU^biTNYD@Hgy2e>^0!l0(q5r3qT_=;*_o2bQj(ps0sRzw)%4OWRe~ryp29{Qwwc+Lu$wadCj9XL1 ztIqa(1!;q_9ZK)W>hf#nkSP35f`$M2URL<6s?oWrI2@wSFsCKMxugJfG?7DIv-nRj z{1YC;jQtwymIsiqI>yk4esEiATCvxygB?|z^AgnQtl&U~;-s7ZwFiM`|GwgX$q)_j z8qBY^2pY$N0u4a0&r7sR5b*0oE$m;eixI5}XRNVoD&v7K;J^U+>sy~FXP@vgz@vI_ z%Odo}5}|UPwoisij=?+%z69%S%is=z6GwAa(NOMSM@7X4TW?@~GKX}?;4s@NT( zP3<`6Pf^YbF5~M#3?v7S0%Xg3oZp`Kd3xyw42iEDV#bWZXg2zC1BRRdC?0}yu{jJ^M6}{D7MM~S-H(w!j?CAS zUwg44D&K>|kWKkPFT+XbWMHWpE_h-BZlVeiA8$1Tyl;(v*Qt&bG&%mYbi?Gr z>`I6{e>ILu9Pr}1jDtfiB<*|8_eNW{gH4CUIy%TSDWmwXf8OK&ziyp}usY%+_b|w9 zOyYe^FsdFn2af@j@R(1cUWa>|;Xsp~k0n=Kx*^)?fl%wsdF9YUjKPn+G&mGVOoP@W zoXLwSlA6Qc+bB1WTSIuDN;e0whVh+H1&bNW69D9q#53Q;xZEh~`*{JfK<8T{ZBzs_ zI2D-lYFvJicdS>oVkOLZDp@5>x)xrE^H1d92~5{ASyu2QCtawWya0FXXKtS?&RDrs zz9BFpuz>RvxA;@}ctBVHJuI9!b3-j7=ci>&k*BTEZ9Aw{%!KNCeDe=(Lw^wpJooPT zij?!GO_toP81X30={sM7uQ6t9z@8J{r|}q*(zi9*^uy2(u*Rbi>?0Tu{c*cGyF%3W zh=$`Bs{F{YK*$`J>H7wPRJUPi@mMH`=B0J`V`yJ$K}oM$TvWeS&{)2z&wbfEzTjh- zq)njD3{>u+eDYuVgKUtYd&38Y-#;x%8T5lE%W!osR5W?A`h$TFnj0fjO|mf2E&Mnl zp9F9G1jNNWxX=5e21rLDoclh+b4oxpuVznwfNLdotxk{ii0&wkkk@iRRj%6<^x#En z_QSz73K`{MQ-aM=lRApb!EfI24&==Q&1>oa1g}ZjGbP2^l6t9xs{`Q<-phj~dWi_~ z;a;Xv!=mT&LU}AAWolnwo!Io-!)D8<0wcoSFIz|}=xS*cD`xI6WIYlK2l!s32ckpe zk)}MdqnH=+A+hN^OBBgDb$UVYM<#ZNhWT=Hm<5Kbih8(y9Q#Md_SW@AgrTtFcI0%l z)Kk&o(H}AA{(<6Zp1(!8hY)c|5wL>#)A!HIxY)M04jXd-2o@zz-u1YChq_;0hHKIo zupKx@VxtIp0Bv1OrgyL9Cf^>#CD}X2?JcAx(CE@~@fSU~9n45HXJ(X_&Pw+H)R2Q~5`gAClnZ5NmAfEni`5SMHUi!rQ z_=SmUx{-3bK}@>a=m4Dglsa^yugi1}C)hqzlWwIPLCQa_7%hK^jQu_Thmsnp>!!a^ z-gwG?1KEg?1>{qH;1|jIIdjIB^{$l{-A0ISmI<$}_I)WF(@iJ|_l)#L4j*@SF}a$^ z_YT2`U5P6`W!xME+-KvV;=`L-@cYpuVrL6tHM;AU_cnyN_EYewToo|YpE3TI3USOB z4_t2AF#6XYy!TiriUP8fn$zc5qKpUN?c9RdG96CI8_Yr8Om8tquD8kB{Zw)(Hy;T2 zYDE6MpaP#H51!C}c>Prl*aLQ#%ScyE(B~bJD45Y<;S?uLIq%G^XBwddc46-@HpHbT%hQdj z*Ua-5o`@QTsd;2%xcaex0CtaK=}`?ca+>d#1$VTfU44gWhm(=&wQ?q~2ROE&%xWY! z$&KM2PzK&G5CwDfkWhU+@A+rT%yWJPNY#Ubh-ki;4S9q^`(2MPTm3^a&ioM|5fRCT z{t6$`sS)dmI6rB)5w~3b3(w!ogi*=unJzPz{r4Q^jQ?Kc#Be)nmPq5No0whejrw}y z9w&Bl3D);9$!_UR#h0PVa2{#|X40W3y=nK9=)`n@($;Mz)GtST163k4f{Q=I2SdbZ z(6~Jo6nrQVzPP$OLHFTnTf!Y(BXEDjrlPn&sBL80rP_j$9S1LjTTeb6p4)}t-3~>X zD^^^%tUNDAG}o`YY}Kz%KCw7q|BLzUaBJO{&9%NQ4?Hcxq3=&ZJtrBFG9U1{`^Pmt zY-NtggPvejqp~tmon24rUz5n_zcFUxX6f(kY@!cAs^l(BsyxqylFE|$xQsusmr)ge zWxUwm?k*AaQWUHUQF$(B`-=oEGqfoJCO4^9X2Nd3`Ae$+@8w%75;gRdqdl_PDs^3B zc0yyyD5Q@WWsY2SzZ+lwmcs8TKvqc$h3FV`7Er6`5B?Yu^e!tGLHrvj+t84h0-<}| z& z1bE@D$(|=X8Rh*5cY+;)AwLPCes9M5Z)3)}=qp!N<_hYBJr^j2Q-`-W>UzmGP3 zcBt^`n79y7E28wZ2BEJ66Ge>b7ygJ(gQGL@2bB#|1kd&F1CDy)rdE@rNv$1MG)uNq zRxYNq2c0B=*!oMr#mh?LhB*SE&Eq|&;${oCwR|vw5PC?-|GT<4<4q~^xB|9@ei21p zR(&n{;@Ux23D2CNWUw!L_0{r(?Vegbv4e)9{ridwXUePY&K1yTHQ3`3Am{=tAv}ar zqN1|aa49qW=PHwh$4MopML)$$at+AWpYF^4e3==z1^tDx;*LrLa!8>9xlI6$QPh^qSis6$lZB={?-Gqj?UoP2Hu^JGc(&eD^4X zu-gfCZ9V=zOnM{ERgQqzcPy$X=hdq$!Yj3IbdCl57zi;pS>~TL!lrij0?AO7-Op{_ z84VBdhg1qx&(E1RUVGzI*5s-pgP_Q~H00sf4dx}RS1GzdiABSzT~lVE6KfoBkA4cz zrssLOgy$S?o^O8YrRocUpnLw85UgF!lZOT4`Aqq)MIau3piEtI2_x)| zzg)t&M7)JAix}8~=l~}&ukd;xqSTBZahmObc27V`v79`9=mA?{&vNRr@TD61iJOf@ zeUoLry$jwWZYWB0AO-9XB*ADd7p&@bu;$Ahr8c-y*P)-8xs!J?1$ghEp0z zbXh+7tZq#Bj)@M%@zw8BgvrmTqO_S1v;`b_$ei1wpmTaKE63E-@H)~-VNe!919OHX zkdi(3Xuew`yg?igidxLg_;osDlh7KOS>2T}_h4-z9S(HFPOQ}mNgMy#j{(Yy@$7vu zKfEWZP0mHxa-l#dUjX^wR*_hhUV-T{7nsV+Wu^bTXZtxi@6E45Quv|$5j9kP0CQxM zu4l1?zDmp?-77K`$qy1N>m#o%AUp6g(nwhrUQeqfB%so9M$upSrYJ(_wy*A$sZdk^ zE_3u`e)-GgC9fp;nmIX}F;K2}hSRD-hCZqMY&UFP6i8F>*T9M%ZVTj8DgcjAMn3Y; z3#)$|0%rrk?eQ6)P46YD0=MW=d9J@`*{RLX(S?BB3q&)1RX3fViA(@<^y9Rfkos=s zI&v8KcvuY&UxOmFM{cGkkH~&#Tc~ENrZ5c`G6^4tO8AdOza?bWe;of)3Moa_GHx>| zn1Ocd^+qff^*#ip8#9S5TR>DJS_VckN`gTSQv|JfXRq1Lvm|W$R=M&@r8thA0g~>A z9tuW=!I?ekY2;JG z0b_j;33IRqg8|1aee-NAP}OQ>F6iZ7!$^9HM&w}8W*t!aui3UYzLGym>Yd_MW-EyUX9)mqCl8E#e_AbQ`HI0@2(D$&8BibmJU4SA;s(F%DlVO+MGw9-`HJrv zZR9wM?*2e3DQCDlRbx>nUwFkln}I!+7!Ga=?4HvTLWZBw0XBGqH#6>66wSZd)Ff}C<_BiRV!O5~q8 zWaeh${g8%`i{{6iyIB4<@8q}>is8@`Qg;<=VPgRWLDA@I9%SA^+I5bovebDgWT!3s zOy|}IZbt7eve@q$uuPAc=!|nxAYyPOt_77CIjfvkz_>ql4^XN(HyW7E4w_vNJC=TaWPk3jm2dpTHT^TjY@dA8US#gRSDHECXnTOJWP zIxDwN%`495t9(dSd7`P+>#(In0#AwD5V?A*0kMlb$gh+TTS=7s)s4|M+2X{^k4e@w}(kI`Haag%73+_K5ze_LtK_I=dp8~}skS^4Ks2S_Ge2VIP+<^pM(qv{UFzzobtvp8^=@kn` zc8uW0Y?1X)o352vw+oo(!Hz#X`uC)7$A|Y}$@hwsb~TI;r*_8peMQ9aD|-E)PoMF< zVr1U+niOg-cd2(G+W{L@I8U!q%oyc8h3zex*?j+6S#M5e0rbE5X+CS7ZYWPIuO~C= zjDMH0z+moYf?tn%f_aJyf+@`kY3{~wDB?vw48xaU9HRfWS(VEWuVETQU;uy21rG+> zIgp8DZ9oQhkjkO(WG`e(rx25#W$}^B57)9IoU(K+o2q<*u83mDG~!7U0GF4J_>)-X z?mo*6Bsx4WFXld}l*M|O-@JpLw`ibz*WJzfACe0E4ajU&n%m6;h_j6g3e7nj2!i%q zb))U{koV&)uMoI_a?LWhVrOiE?J}m@QsfjiQz3@!&>R+tws{CwQLU0(Wew6HPXo$w zEH7TdQLz&qXM6=nQV*Hr2GFbAHB%JP0@b&76!2dX@1}jZ3{$4Ty&>qfA0vse3X%o` z&==#|WhsY62hSqz#uyz!W|Cw2*DrE@B*xjb8?lNYCX_!n(fxo;$Vvam3w9aiRYiIH znK&b0h@EDY_eq>lk;qf4e6Uc+F1GgZd={i#!+st@UFS}qF^fPiTPQ24ZW&L5EY+of zhaQ_bz!GvJ508D_B1t-Pku%gu2MIS;tS?!ro-@Dt5N};WvM#$&*_iS&G_@s9-bDK) zE&vsW$KZ(?q913;Ow>U?6z_f+-HRF>A`L1pBTc;@$gP5buFTH+i)m$gGbu5T{Mv+* z__P})rb2M2rxBndFjbDi-;CTOoc0^Hgk`B9L#!oL9P&%C@Ud&)WAYaRiF-XdCt?O= zp*PhV^xf^MU^2^|3RcdVaj%3YILEteZ`Y>sePG(xnejMEbUJDs`#GFp5eiU{_;}5& z;uAA`J3BE0m%;TsscLe0>9g_~!Tg$q@%hm7m++} zx3UN@7DD(}09s>gVaIOX3XtOVMgd2VGOZ)vor_){PzONm@mgFP-X|#H;U*7=#1jn6 zQS3~DZ6wH`t$It#TRn~LP<1djm>5AXhpW41JH-uLAf$E`@R5G+nM)93M4^ISx%#5p zjJzZAE}O*J@M<~hc1Ul%)_2pT zno{ZP>bx7n&nm!$vnAH^KBwPqxC}WpLFdI*&siaZpxi{WVAgK!Sq!ZsltR=gcdNhXpTKv2FBsIOu2RgyuhP&eWq`p97#^-T zfea_vFCKAX7huX(0Znq@Jj&1^Yv8~#?wd# zJmTldt7rV|rwZj<`~&v=6aGQL{fNxSbz7Z@p&A~kXhbbH;tqkz`J|kD;*YGL`EqUH zyUkqwH!?airxSxVvpiT66`$@#q8i+BBOkpBeULwlAG}hY@CfcPWqCSjU4{EvN9>}X zMgTm5Re&8TO`+tKpF@HwHopgxWOrrA$mx~h!4X`YA#Mu+y|Nj?3_rM#pp}QmF9xdL z91op37_NsS*)KWBcQ~XrqEH=Tw5t|KvB9n5@DVYTq#5+6H`HHoU&Qs|o=Xmfro#cZxqTdvQQ=%K zJ?(u2MEeMLTTr&pLp+M)C04=ZZmOsZ`=ljt(5(MHm5TMfkyuA-@S(AIulPL#%v5qO z6v4&4LfM`h9tnvs4o5*hViOHm+A1CB!JHX#rtKv_oRnA<6?k~^QL@3K9O{6ugu7Sb zKoqpsUwvW(M7ns>Q55Ze$}&O#iNh9no5FD z2_8nBR}e;I1(pTorb&qC%#UQ5miyOj#E>et1LncHz)UW&AOKx~!aXW7FjEG0d=HtB zi#V8p3C|Z)GG=*yLu-LztLmce2{LszEMELqh$Dr5Jv2(f(Av$NNi`cR%;t#hVJ3@t zVI!By3L3ri{>nwl{$@4o9i9~9kXpKaQaesIRDo7vVhDrrOf4zP3+wn*4I|~Q+hF}Ig@%ZJi34I`6 zY@lU^_ea1%@m6`RR{EFGWl4}~PVd!RSNQ0Z0`4zr5(Ab~L~1}BLVVzcf}&kJ?-Bh^ z|IHJjQ*?YYJ2~ukEO4S@jplHpY&7(JtR<1!k|N@p2QQVzSx*wxRV*N7f_}*DV~Q$z`w?CcK?hSkw#OO z%Rj}0;&xM^tar!1$&w`2yxb_X0t6?UhD~L$H46qFu#wk*qU4ykxI8IMmf_H;^j{0^ zciRKDeeZO!X1rs2y9TUPR&o;ij*j)h3`-2~D(~%WN7FIgNe->1`#T=1E`;sj6_9#) zX>Ku=6dDVs9cNEGvDsCio)$(z}vWd66DzgCVtH`J8w`ESy=0{cFO>hr9J2r8@&0OTQ3G3lK53nj|5 ztg930}uhBhdN7KSf& zNRrl{Dd&9im9nwFCDsIw!;iCq9ypEgqd-WO4kaGnoEAb+9D{uI`)XuEs$4j5#KL=H zZ!dDkd*Z@z3%VF8W8sN^fr^Anq&RuZrpn5a2N};aHLm>23X;OVpyQ+Rq~f^2DO$@o zahCgf!Z2|pg1wnuCB*3Eh4-ZF`@(IwHe!QJs)*J)KiS6Ba%Jgi_y@tlkRQDp)afs^ zW*0*It_Nr0+hO>;P$6{q1YI&)PP57Ig(q(>X{FFGegR=`7HPwMnV8AyjOC{0ij*BOupJ-8PJr^%4H76nNSK)7AOGzVlWThN^?x z#jy8g$~FJog;Mv$nUF&=y)<72%e}=Ia%lMpI{}&bq9NRT2(cR+mkXfy<`V;XRyM7eTV5(^~1vtE41#z@tWxx;;1 zhJMm>_5>DZgiR=S6&`-p%bEv(+v^j36IR8^TW+5`xj2~o8_LOQJRLAkk|}q@ie)Zz z8$!AjI2HgH=sef_oFR2Eqsq*q_fLLCXfIzW>G!q!bMb8XzQ>PK^_ath?t6ZJR3e6{ zu*~LO*cFPZ?W1FQprjd8_inxiU)sG$;KHZBj()9^!w;trxBq-DEAdd_dK~no5&n7E z!@1DoyqHa=0Plr@RDtn`G=WzAg#Uk~gvBr1KCHab!`<9#roJ4kTM!5q0%ftBjWEJ_ z9HHHKB1#akk|z=P#$xX8;`#(-DyM{d-WAjOAkV*D>KYIVH#E+K|0FFg%?JfHqi zUcXu-#!VH&>p@dC7-ML2B<9akHa&itewGgAEfbvTPU!E%i9FXgON`Wj1#vs;=*(hw zx(l!ih<&J9`W{$SVgMzU?-UCs&g4T-5uqvK0VXS=$t?sP#7b=Ls<%`ohpZX)x38DS zJ({x>Pc#fKysU1r-kK1fLbxZt&vtZ9sdDuQo?{P~7N3~ygWeRc5=oXpD+&FD+$@Qv zL72?}nhUY~Haa3T{76V1hcHxFBO({Pp)!#N8z#6n;T%A0Vm$f`0NfAWdhYUEzlp0O z*6<{@SdxplGcJ0;-K3PnC{~v=N0EG{nCRjTY4P&nwVOnkeOeix4l5dPxe(W!Ca_*N z>4ZWlOT}oA3k!3DGMHPO^%~s75#;U2rpVHiD9KgPJ>0^D<*CEi!{FP9#ek#oGw2Mx z^qwjKZ|fydG#M5I^WAp_kU{h(?Zr`egz%p3MQ9p82AdA@oN&hw)_e>2hMWGQawf7< z6z^=@duPiJ+++|AHMA6qToKkbIctRm;O_G8i%PYfOXpb4CW@i}xb?oJ{#`CFE<_*@ zPf2x2HOFzU%zDr1r0yz$wAElF8oLFa3jkjPj^Q+wYYgE_n6f<5izN(tBCFwEBYF@m zX94!N59913Lu9W>rDb*QZ#7XgxXvX6lL}Lg+-lggz8Y4kWO3u+0hC?l<~$`&l^+Dr z@m0XEyGIe)CE~oX7g`iX?_=JHQuxF&fGL-y<(4r>Uu?{$TU!_kjh9DgEyyYx`Kg2e z5TB!Ads%Yj zvWmo3iU7~(R+sWmq}ne*FKhQVu=3$qW7UMY~bs z?)z1$3clu+FxcWN#JrsjW$vY`&@O64>AK}M;J0E#O zUc*Mz(V2tmSYTcp8g9DHra$-|sc1;u*d-u;Sf(BXa8|MNNtaO`LAi*8*^NBPc~EQ=&_XBbKt)g3DBxSzVrv^2B;9q*UhK$(C_%o8sn z@4d*_VLae8EK!`%$!wMR=;a1k~Ub-(AsOUkC$PKIvq>Eem!xCxM z6=WiQK{uPs&86r3VSa9^g!^Z`L7;)V-SW&#(((e*$s@5sKn#17aJ6(rmzz6e7p26g z%GA-HsaTTSUD(uo?|oqHHp;dl)oK+?w4+nBQioR zdJt}v`)(j|f-O91#y-XGE~^gkAU*RffNYq*D8GMFDgi=0yOB~cTGE9zK0wZPVLFC)J>-*f=qCX3#0 zuDqIS9-}KIM*k(k*Yn=UE2A=h)gOe?55IG_3^C)}Hbvb4RYwp<>1w~yx%GRfU)~#k ze)7=!l#M=;g~q=0qba0z5bI^$z6g)lGV9HMqeE`{F0t!3A9|VlqkNJ0$4sC|Hj(P9V{xkM_q>X5OS1i+lUXR zXb;eRuvGnaxuB88uQ?Em0`x!f-}2{fZl{^L$U=zcR~3*d9D&8eOe&C<%R)#>Fsev= z998Z;XT-#>nf}RcLopei$`^?N5)cIrnH@(0CaAVDwgsOp&>DpG(O#~ymKq@BL!Tqv zJ>Zm)eAU6>4L611*9$#`hNt4Mb8>jyO9fQllU4!5%D7F;H9ucLQ%oNJ&TEG2%pK`d zy?n458W0xQn%jiaTBTo2pEJOrPRl>_Q(X4_ru{J| zMXQ-|Zp7);9dYb*F9?Ud7<>D z{B$T6!Tx>a6}pf*U9BYM5fe7#V>~Gz}kwyIqQ6}$P4zi!^caMg?Q42(dmkb z;M1YZjtHDplI)+yVG&L|Z$hN{*g!&5s8?r1hjQ}h|HYr;#v*y(#B%H8C=F_xiiQ-k z8V&RXYc%+VKP+7?_X7!`T4z$a-IYfl;RcE%nu49sgapSKHY;%+V(wWtY+`7;6Eir@qkGG%Wqr{{ z-xMeiH7O+#?8=Byq9l#mwuj5D1AKZ&pS}JH#T)lra+6`sB|kTLGEYFV+Pc6@g4v{9$F?jj(fI;hhGU2;pC9J z0QNfnCa7m)#7(n;Jc$(VklpLPKJ$8c<^P`Okl*`n{HOe>-}6+Ec_)09XMRPMrrAwG zWBZjeV{l|L7U2u7-#8aQaLu z%;r(Xp?oN6vp7>kleiZo1=vF9Q5Bx{d8QSvhT)a&%e+ z6IjzWrB=)vBj>uiahVgbszJjht7<~VhtcUaUN8IM8&lLU4W~hMO8L4usEE82<|y<@ z8A+D3TY%UVoij;@#HQWHeR(!W8I7a(`Lg5{pmM$8%i-|Zv~ytXraD6dVpuo#8^$BQMJmnhZKl@oz&Z@n!S7S@Z@Jgwf^Dc z55AWSfv!xEsulU+#B5^0j~`Hx^}_nQneu&4lvF$*mtL?H(nYpZLn2JhDd%urFPPKkJ_|A3 zx_p#V<{gH~^-M|K2gB3mtdkchl)PJ9kzeb3*LT4|hpjL8REDjN55zvDeuw^1?Ut>` zj9=O$S)Nnd{s8*ddLjX1mL;b@QLS)*{ zQB+c)ZHtO&xuI|p2L^C1IjD3b)I-6qK|a9z|6AV+Xbe{(pz3`1J}=u_&kKr(D#l;C z)pE8Zec8D)lsm|umdi#IpHQW!#;|*>yb^6tk{MZ=oR8v&Qo_ zSY5d~y^43D@0OjpB*wwdo3ro%WQ%SKstVpOFCS@dcmC&pbz{dS_H#i_{G6cZ$*=qc zFgYmpny4l?fJoP=34E7vm=5mk#-PcMR!e`m$ADHupz4oLLL~U=nWwV5`&A3vJl?hr z_HqfEk)9+6rq~{4d>^iT(Pk>a!rhh!D0v8`R4pmp^-7V-fTQq&@e&0Mni$$B(fKrL zyV84lh1kAzXJ! z*lr7mw>3YdW|$8*vgwti2)ucUNri@LoZii)e+rXs1QQV1cfg6l9}1p8r))#|`vM%j z(onqgC1`g*o1_Bsb%=`W{**1{}9s8k&1+XX7|MdK3nf}Sa%z^*XcA*)_41HXz5 z-z36!>jKg!uXeN;hNU8R-9fabulfI{rwoi0;s6V?i2!)#=vH(s#acjWWXMYPg zz+d?z?7bXeUEyd<5N4` z{a?QUFP<RGH6DxbxrZ{p(_zvT|! zrIZzNDj%#sQAWk+K4f>{Xj9lcx4j5)D7k!J!yP$Y6!Wk*Ds-@|ESPEjHmF5;qPbC^iFcvf7@3 znLCnrDrw*fJ*E*{vNl8W_2AIq@w z06_wtAE28A3GDGwLjoE_9DdCmC+D4%_ZpZC z-xb0>FPlwHC~&WA)@I4;9aDnf^hTg z$ArL#fn|QYyzU=Z)adYepVah*Gi^%8*&Y9k0!icpOVZ;C2!sUcH@ydl<7!U?Qjt`k zsm8j$B$^`-e zWi9*V{h!8Amu>f1b152$uvQ+r3P0`aat{VN=UKm3!JJk901PV_5O=`rx+4lC;@R~_ zl?q$ii?s+;=(c~t?eKWH7}(dvkzlIuzvR+bp17I&KXwzo`FpCtWd~R$%W|3fXAo4! zvDWdomG65ZdWYe4$j{`Y#VD2))=qHz^K%J@iXW47fRW^X!GQYFWYX;@-y-ZO8LUmvpAFb}8vBUQb!sNW(T`(j)eS;}Iz2$Y zFUhjixl%)!tbD#eHmTD){@NpikZMP0rc7VyPe;KtLDhPZL|=)psL1Dg2^A;VxtWgF;UUKP=G)nwD#J}02o)DctO4xGm1lj7(cu7~;FK$_nKMe3cZ6p_%_jk%S zfA2+%Lf2rj*Mw+v!zG3zqK|gC*Y4Y!@p>J4X=00&kAe;<6qrT%1!=F_zXm>Q$*vfz=|uEG!3l3C)jxXv-#q3@yyEyV0YRe4@;@jG&<|eSSj1#M>vo%hIRI zkU`8%H+hUC{+7(BLIhm*SQC^8YY~@J<>U3h5L)cQ@NUk$*atm}!j)r8d8})hg6g%7 z3*N2uXZ1KAkH^Y>5Q%)%1#Cu-;a z>e0{LFN~}34`{@q?tpmB@f5}LCTzzK33h*&)3dI+2H|dh^#9?oK6AnA;rX)Q*HE@N z6nDixdRJ2T^!E2z);q*HJR-QAFvsJhX6`;j8v0iS6sS<(j~(H_ySW{yJf#q3tKctC zuU^CnzDH+23nV{^dncmBiq>SY1RWgH-m&^H7Z4*tU7G2}Zf1w>ggzwXxD!Pqh_C#R zohIzBEcXvjK0EnUz5?Xknl;TXsHm{wBt-79cXE|SSVUF%i0Lwjv}X<*Apwp9Cn);> zf}`WGEOE+z0XSFx8o!?dS&7r-p31L!6EJ7EbGOLrAh9n{YhIQ0cW z=tGK+jX&}uS^k_Vp~eY!8o)sExtkohda@s{H$B`^VYr=;Qd-%B@0B@~Xrh+aGlm?Q zI!S*UZNy%Ca1+nL*5jd#m`FP5W7pe4&` z7>668xHRGelrJSoJqr`(w1`zbjzp2JEyNOifVR?TgYT0scJh%goh>W90EpsS##wM3 z)ww~iK6gqLc^{<-8bFwdPQJ{++9xQ4^5O2!ik`XFceC`ENzxAJjt=&l;|o98L8FX| z%`W4b8;GIY;lvmFtA6W?{HN$29zM9&LObovaMU#gF7|~1&eX}?D#!Tr6%P??vi-6c z;4&f&9O<>l$>INL=jdO`6WKawJRU0P-c^2fY>Gm}i0{t>vzZpG;)Me``|M3vhi;QQ4@K_V!SScgD+f&sMiDCPjK_O3u^>UmzlNx2nyH}cZum1t0G46psl&YF(H z6*0P_l1A^P)>>eo{wO@o2>5BUlvltZ6iB&>0cTianhm6-xZQGCl11TNmY8=EoO4kEC6BqEaw zpB@6*?I3e4T(byE?b?t5_(RHVe8yfr^38`UrbBil;}i1mcM^)qulmYu2lLDc6~o%& zyu{zA?vIH{G4_ig=uGPHi=OutJ{gc8lYLNU%D z1`QInBCtvd-Q&1sRa@+63oKM1iAPMD%eBma6PwnKA3lEYd={>sC#=jV54hFMktNS@ zDSumzD>0!aoW5KxFXo#lF|o6s2OFj?D@oWkuN}Y@sJ#baa>+VI_s+-yHr<;_+^rU# zUwR!!MwY;x%QL_ipp4LXe_BJ;NVZ-vu@SoOhx;7s2!nFR*bSH@F@-0DU>#q+Tg#*i zI})qy8poV}!&a(@34g{tPS18DBTatYW_Ql-C21qD^=sbY_r8ubuZ} z5x#7WQTU6b)StZB!2mz@;G_K$5kiUe%ki%#M@Y^AekJ^pO4D&Grpr+zW)8}g$eCl< z(CR!35^LL89GUZ2&K3`gaCw6uD_xP>kF+1HvFyYmi}~n=<(PCrgtrKMdxh9y z3kTw&EyYE?W*JpV6?xpOM?aVD)X=Fu`l-HnpnrM9rxVLfwiT5mq5$D@Wy#}5?$T+z z1L^9*bdKz}!=PDHZi&jFc!}W1L6|mC$_T*=o!9!u3lP@iW6jDe%3aw0zf?&=>0jj|w zcW3;7BQ-iebQs@x@?|O<1fbmO_4%tAHDk;lGSoHx_Jx-gdWP9A+yNxpA3%%%k@@1} z3wHp~KSwM-;wz-0h;hx!=1RGf!?8!T@<3h40>13uFfh)Zzi`eU1rp-i^GBSf<={AK z(JQ!8bMWv|G)J%&!gV46R!{nOkK~%+^jI0yDDOR=jGZN44f^r#B?wyAE(K#M_|B6+ zy@mB?w30SHcGuMdtv4?|L4|nfK}3OuLL|May2ED++PPFxBuP%=+IG2> z@7UNvxsI(mwX|ARmlh0cI(e7ozjN|cm1fk}s2@DX7f7@8JnSdhqN?|e2Km?JG51x; zWRcQ=W)CD2aE_|K_zDtDkHP{I9meu%t{Aj+mG5aw`gAHi-Sb@bXXaMX0zGD(g?xJy zR297qPFfz3!j^MZVi5Js&!EVe(zZQfBxf*=|j95$rIB?N-#&?zk30x~LkdgjUU%Ijx5+qA^E>{Vn#|UrlV@zx&pI%C|oM zo%=Gldiiph3!m^`m1oYniO-c-=%~+R*n$J`yTPx9!xAO+y5-^|&-7a^LQeErc!3=1 zZ~nnE!{Mj$vuzDXIjR~W3f3bl!=pNW@JtJ(64dOe`mH5?Kc-BO}G!LWuR z_bXG7cigl91*`qYAH~c7g6HYKCW(G3EC#-~ps;x;D74vlS`XRkewWf1$~E@;sPXjS zDzBwq^JsbWGt7kXFGA=F)Ap`3D*oj@DVuvKU>z0lK7+2Gv868YqGAo=LGPS=B&oGXCYA-aA{ z>cnw+i`R~uoS6QgEcWfHZ19OKc+7M*UtM++5%4(-*l_a|D?R) zzhS-2n7PCKe%LC~2wo3A3|2$X=|rcGH@5b2SKh!YHY8>X$zYi20L^Nt3y;Z<%Bvn; z)!m~8UTj#|6-;q>KZIhK?n?f7!XT16AEpvcl`^$7=;wq$@rX>_N_Dv6D_b5&GC@Qa zQ`5NI8-Yqxse$?r;}1L>GbJH$+VgpcKq#>0_B;ZC_Z#;|;dV+b$DE_#AA11ByM)f+ zi5@?A7GoK1#lWuv+mZdF>H(f?M9b$1@0wm|N!FD#hr1zMkdpp1EI<&`OEom=QC~^N z9BcUYUYt4=W)$J9ny?V=Ximsc=r~Nn1%sdR+(hnydEN?VrTnUH}O9JDRL&P813yWp`B|lSG8x9Bp z5AN^9mhxq$QP%o0t^F8k;BDVZK4E+|N;<&R>)v7$&GVym<*hjSHiw1s)uHxM)uT5b zuX|?!XS_Hneg{t)TPi%-jK|wB z!nCzd`T`fGS%#U7{k!irEK`8QE33*Twzz+rnlI1!H$ggqT&%cP%R@k9sd3{_o+KLh z0(>USb`ycxCggv#x9`3zE1M!L)<^$=Bxm1>4+a&$9;-c8wX)q3y+Pbn-0yZ+B;;wfFsI8MX-M;aI$I6-4J=Wx(V&9T| z+@yF5Uo`=mq*CMo_LVyv~>P;Xh z_eV<+p8htm(ZBWcDW;@s!|P9GW%$Q0y`Bj`3Xu|evlRJBNm_qg5g%toLV?L5B>^-e zp>D!7enSt)R91jhf>?!sc}&m(NqY)vn9CD{knbij3}BT%32U85O3-I*6}VAn9b|}U z(0ECO`%@jm#{bQm1X*Zw4Tb+o5ae;(@8MzdyFo!htm@N8%Lpc!2G@xeocB_NERmby68h&Qd zd(U>k*dn?@OPVcYq-tYC*jUl3E};tEEPG$)4wXk$mJa?n(N-3I9E~&haReFl3-LqX zEyW8z46qg6@bvrOB4GbVAW@Ut;Z)o_O|)|1!ZUF8QaNMA34{5c&|EZ1N1@bcFU|Tv-HShuwNq1ZzJ-|p&hsXjd~Uan zJ^!uvvLD4EeSq)C3t5hhNCG398J|Z$F*{(aa<9CtYAj|AS8-%sOx+W*-VYx?cv&Zs zKVtvChbZ#hsJBYG!;RpfzB~u8YR2P9P=KFV$)3e|>__;-zHRYPpB%ZkzJHL^Mi>L9N=H>=fA7}Tw>(bqo?{k{`_@z612*(>ne`9j2j|d) z-5fzq;JNn4NAHp_auRjK9{u}o{-FUqu{H(E@Wt}O!1?9D2Gk)QuF*!b?LI7E>pw?9 z|MmFm<~HoRz@!qqH4jQ-A`%y#AoD-=D!i{%<6eLe#G&G!(7VgJM_Zf@{^1N-{{;&f zE9g!6i={=-&huV-OL#h@s3g-3u|RsI8bl%YP{7_66yRMEc)2+&I*?KBA%ITK#~?Z5 zlz}PdbaKfadKSk$5NKqhN&O6}a5^LCBvpMMbcg;F9r03RG6%ayzsO z@-3--RG%| zu7|hq5a!3x=za=vj@YCREFP6p}*j0d#`A z6M{20nUHxI9Pg`sP2+IDV~FF=hpL?ZcX1sF(codAJPY5%9zS4{Z{78e;|Fr&UU1V@ z6HZJ4e%>;D?7+;(`Ce zhk&r~&{s7s3+`7H)V_p;cmR)E4YzF_wKAT^?#D^0Ck>1Va6=&pyX(GQ zW+cRake1!Tj2_4YfZ5WR989^MWnG$xw~*Pg%|B` zZZi7B}XkItVYX!e61`Npf*uxSv2?!>uU8AUtk8 zCann&VzlBl%Op+MtcM&f_ZzKd^kYRELBsgjPyM9)wf}w+M3(=)MtIhL6PpS(f+D6d zP3K`Ia4+%cbH00}U?wFs8E9gv(Rg80@|r#z>6TVTvgTwhv$3+uP%R4c6{Q(+=k2ZI zq-%124)0-s;gxV>ULf=}A6{9I++q4vA7P6XK|5=r7N{1(9d~t?N7+#TJ}Ah# z?#(rId~}2q(XPw()f#?{fbX0+*r>4^rmz`<(zc#pHO&mWs($#`ZNU1DAPEQ;I2ye; zYw+&mGrzRsB2JF^nnvGZqDUf+x&_1+In2U|JA>k5DLh|}J&7c+pZks*0-OAve;`Ph zjEUG)QHfY-kC1glR^$wv+B0@7Ju@j1fDXd{6gT33UKPMwP)J~Da62xS04o!n)hhSf z<%B)w#Z%dL?}>-hbI&p;!#FOsL~Dd^(C?|_;bkxiQO#Dvl-o(;6o(YrN9P|1cZN=# zwXF@1ZZZ5-Z~el<^9IHKd?bl>ggQc`!2Imwq2&usGbpUO=$q#ZwNj|9aBTNc`rOMt zeA6=?jGgkVJAieeV=F1ELB@U8eOEe4?-VoKjzWH{J1#4cr8Z>;_ENBhk$!)z9OD!V@S~sa8s)@Nei|;YTMj zK}b8?6&jx;#1t)Db%BM$knTVVke(e(x~p*6_ks@N7Q;2Vj=tiTA@@U;&Ou4L;FUC~ zx6NYlH>9J}9ei{VnXiB+f-RL5(=)=-?UB8mNg1efm;fQYS-p{$uB7UQK2anCUt|Rz zf)c7$=p}Y_S5yk|a`hE+Qy$S2mG`mK)}zL*pxAPP#rP0|mlrCa2Pjsf^&7MZ+t9Q0 ze2I-M_vw$lRAwl!#ZG8_co;YkhmmWs=P?5IQz=2RcsAJ4gCi^ISeO+h)wl$5@k8j| zM2O&YBp`tQ+1+hFPnq{PHH-WyIU>Ktb8d^GYcp3gw zWubrT_byT7eu-x2rP-xLZ)A?!8!^B-ZSHvNNGI7ju6xhhr_%5Q{A_(#BAkt<8e<>h z&hi9>hSFu^z~4xKke%^v3In2}H;kC%h4UOMgLGkTFrbv1HwbeQV^%$jEoaq)mfwBb zx=Ut`KMZgS#F29GQih&hr>1xSt(5SI0&6LvQ*11jl9SKJAGqDqTa<)9P9uAsAW19> zwF$N=uM#`yI<>EAT{gR0Bl5g%g%I<;QOK-rCLd5>h5@)q)Y&_DFdaSU(VH z`0gD-vN%UPiEN3xTF2Xa|NF#$r2~BYQT=B%QQsbaSkLrH9=Tu3pj-@!kLQVL#yJlb z8s#e|8nGvu5+svH4Giaq1G$6Kxa&gsNt}G>)l|>TQ2BLI9#F$EObI~QbJK||&@+j? z__eIA;ccXmjs`D{`K1!{S+zo7BP`jT6EMD?!0ICf8?XrL!nq!7@?&ZWA;5~#xighh zVR_P~sb6xi6}}k=hCN!3eum4~9fK7Fx$J48rM1L_JSd6cs`kw=g=tUveG51tm>CG~ z98bSuZg2$^KHg3tjNf>iNpU9th{oYzL6r&tkKH+bFV->jUvSHZGzQ2qnC{K=ed7)> z8$T882fc$L@%|B6A9|G5{EL1ny`K8^J?g6bzUOLh#wIX%}UiE%c#g5w} z@c?){7Nj_$A9{l-^|*|G?!MfkbSVemAVD_iM_Rm66AHzWk4-h5yU>_ZdqKI)OXn-q&%rD(1)(v3@xRLy{>C|ND%{39U9$BewJ*-kdYyEhzfEC;ohs@kAAcG9tk*UUpN_pSMb4E=WH((a~ z-wnV(+lzAn-_}QsC%p%Rb1STC?2;^AT1~#SQCN#;kWC%W3WVAUoJyy4)u^xuH}_x-o(Ir(o$v%+KYV@^_kkJ%GuQVm;x>v9w20G_^qV+}c2s(vLg< z`%1gN(<%)Q$L}ex;hi+F6qBoIkq8h!7yztD>R0oWTVr;BD>#MK@sCq3EPd%WE5d_C zm>-cfhg%+|ZmsN+ESrdkT8UKkWGtT-N;nSI8|2jeawX*UBdW_Zb->u)jkDtjCAh>W zPvl>n9CtG)^D-Ik)}TKw9h5!=3wMuWAj7y<^n5@2%W!Lm(;1){g9UuPDI5yLid`A z^6N?&l!30H@M1$+Lu^a)y!mEJ9Aj{Mthu|yGrwh0TwR%Wd*zG|Bp-v%?(UDRR^ zi?k$8R<~T%S51dVB*`ijI}^x@GkH6-6#UpD6yXp=H6A;0s9P%O-(I)`Xac@~|LrJN zDgn>6gP0ozWOU%pPnDbQa4iNIaTn;o%ip8&)|#r)q1?bu(%T%!I-oJ9Qy!`wN9_~To42N4X zuvs|lD3#H)jq-7^+t+s*rdAH1w<(I5lTe*U#EwUY^AmIVC#S%x|5oJd>FYgOpb)?n zdo4iHB^_)o#BQJH$|2g0`*{g8kpG@5S3|jzul4w#!HRPxq~^S)%n9XBG$!^w6erx^ zS(n33s$=Ft*HrMm@LtHFSjJy^(cn`hS1^qUr8Hbu)=qpi(O;xvd9^5Z&r-!P8v91- z+qJFH9gl~$`24(gl{~lPi@mK*{*#}Sb9wUu;f`VIV(;R>`3z0t zC)xPWzdhP!c*l(UVf__>!A>l**>40FOU9Z{Lr&Dw<{a1@i5f!K7{ZneB^Kk z?}y~W^?V$bwrX3Va|eJNE9cWzaIdJMTXtz!Hum=8px`VFU$uY^+#DohI-G`XBFeE) zLJgSZ9n{v7&%Bmqnrf2c2d|WiUKeN(9M7_fd?|S-Z&>QeDhFIN zXS3{v8OA|yJ?O{es+eE$WN+b)j`#s&wKH=o?HF{2w;OVKuJq?;2EIV(O(qc_ke9Kv z*K|_guIi#Rei?3oeAxgIaA!Ay)#AmPcaUU4Dab z?MzKS0QG|!suGV`WE~e)7;R^sDrd9WCe;?p$vYnLAr*`e3~kIFIIGI7xW!8p$peTf z?mNrRsWKgai|XIG?{*_iCs_r#ypRnM<&*hJ2yK}DpbB^jTL>atSX=|dD}wmZ?zhy= zpSu}zD`na|M0J-5fT2W?(LqfY@NyLpi!{EtZ7vZ%{kupt5={P=$uc?`UcWNh#%V&L z-DsT&*rVz#8~AQi!ko=2l*b^+hN1uer2LIvh)E_-UoL>B%U_)uj~!2)bYC5wq+!JI zDWbik*LV(kvbj(I1I z!D34$g(xi_o_sz2rERIDa>vug(Y+f2M^c|u# z%1XFR(xZnPjnfNR(j44Zu$6pv z*dq53^zi1bhU}A;v^^TR`+(K;91r|7Ap@|{D31PSlei1mZLYCw2SZPgA4rDB?D^6_ zvxHCmWc+D*i;)Q5{FGv(?~XrC-eAOe2l>oKmqkqE@unwx)VlG9EJlWIZ0K?rIw9!y622m!O~ZQTqtg0bisGam z%1buT$9H2Rktjy>F^);akE&K^In16{V7Q#km>pS>+NqR^V9zqBERF{&}N`n4BSV*6WzV526jXfpe2Z{<(itFsaowgyN0C#s=HRL+x7` zS!O+0i1l%Ju!ciAFIeH!ZGsbk4Mg70j`#b>oqMcA=B5-Pj6UzND?)5H7cVTqf4z`0 z4xt*rz(dbKMUQ-0VIw4{3Q+Q?{`t*M>)*er|9sbrdt<^_x{j=IlYkbmjQt6+HWFyu zBi&t2zR1laixNzQpNB(9Cq6ufIf#9$9QZ4E*5+>AVsAuLVap_TnH_WX5O9o$_Np9A%7Y4al)9 z1eL67r|>!L!kl@$83Qf7KbV#Asg z^4sJe6|{2_&;!cF{;i)Z01}b0c=AImEiz7&LDw9TonskXpEO z>U<4?%8BR|Qejka6t6*ud7l9aPZ!gt%*u3iQk9!ejDk1n6KBgA4Vr~bZ1kV~rj3-}IUx|pWr_uOB&y3HInswvZ-XxgjyKAkRqcn%k zcF67tz&i!qqYorP{?OIG^I!nHc@Om@KE8_?c=DB_VvpV&e-1ft{by-Ec<{bc&bmoM z)`=f;RutqI{ z;ZTSJS?r8~B(9rftChxzh#o~c?xjoRDYsOzmQ)6hC4pPiLbqmx@=dD{@a1X}M0Sz1 zl(a$M`65~QvY&^R^I_SItQPhQ6$k;+HY)dWdDd4e&kE>EE+Evd>iLa@0#XV^#UsFu zk1@u%gJBZ|YF0X5_$sVzqfjlN-u%|-x6+3*K8^>O?S4#-0Yb7GWnv5nivVxZ2h0r@V zAopWw(-So_1=vay?dvf$Z~Umdk!=_Ovc!voPPg4uI_l%ZU46-ee;{k(>e1G=`$Skj zi*%sGH+uUJu#GRH>Ufgike&6$8~%mxY$h{|npiF6HgTYY6j$v+nHrw;PBOd|A0Hqn zPCoMBUzR_NAaUX-5AW@uu&ym(LhEIg{i2Jeaf`4MTS=MqNlbMD5P~Ae$2Deul zn&4)b$etWhQhkaeyO9MG zR8bbB5=J@HT7hbAY;DFFQauN8P~v@0K6-(nXcSpj>yfsJEdIR!O2j`;KJ@sWO(lAf zp={JMomWSp&dOcXf8Ig~Wh}BgSkaC4AQ7cR6OTj}_gEk8Hy$~V3>c+SEP)p+^@O2L_W;K@Uav3`7&GDma__B$k|?c|2)0-7 zIs~DVL2n4~u-#?6bUq|EjHMeQgTX4db6KM;F`klq?l86nwax#*9Tv<>Sqr)KD~RGP z|Bm$sBhv9fQw1_gu~!GWJAUbJ$}8V@!!P~KE8j1#z4CN6WE~Bc695CoYhJTh{pP!o zr^m-rm~b-JKyte&K(DId8Y{j!M_7}-K&CaDI12)ADT#cv-Do_&pghP&1uXR+PQLOK zpHIg-A_O>$$Mw~Q!0&q552-Z%m>g4gL%i=NjKkX{Ae^gL zR{8+jm*!_YbXKrEuK1g#uXw-o@u_U>YdZZii*j`M1G@?Sdn!C;@X8Hlh2ezqCz+9Z z0jJkuk5C-2qfSOe&sFf`n)H@bfQDDRRZs4-5uCJiK6HB-+%NU0And_!21REN`~PmT{isy}$Vcqa^}Z<-s(PD834Ic38mw?H z(~Yu!E8H#GXR9mozE~Oqr^nKBIbh2j<7y^WK=ujpaKDjn|Ape({~`*te;L1*H(uW+ ze*lb0IMuph9E@aWAQMYmuz0slX}v%66wJ-0^w=t$v$Y-JkBAG54XNU zqI)c%l3!Z1HE?zJ7CP~6hsqB1@?818<-%fl*)M;&%!O_N{(3uPr#41F{KSA0cHrt> zdA2F>2y$aSge(eDS6kY{h@`>Cv!5m|esrm)p`tB=?IzqxU6qh7B7z$y-zl35xBxT% z_V_C=7eO_**bX|X(&Z*Mbi;7u!I3T*2-gV~jw*Ab8{lhlfa;TKkks*22vBvZ;w8A(#qVpZfuCYhUZ;igujyQSSAT zxVy|2C4<|&1W(W))o@@$40{a+ajw$czLry_Q{bDfmHY3TXJl18~6~d2G;P8c46=X@0I)GyyWW<0?r5ZszMmITJo+MOl+7-8Sv5H_4}Mz)U;86A7Yb=$!1r$L zVZ=q&E=kUmwD?T>TFEwDE=%kj5P@-kYxqTjiOa~#fRj*6 z3pfScLdI%)OI+mpArCK5J&TvDQ@=k+(iIBzEA>uGd>X9i-T!m zs*Epsnf6flu*agw-Ktvxsosli2#celpD9vFc=(3nhDrI|;EolFnpORYC#t4YdC%bS z2?L^H7~SHbBI!>`SZd3}3N?-uR zZnq)|Qw#p2yn^L8tLLb1 z*E|^Q5-0tE`?v?H0Sd(Zu!qLCRg@hrC@__=re)3Duhv{WO79LnN^zs|oTAtJko6Xv zL0T>_i&)etArCOTUnfDChie>CnKbbN1INx<5a&Qg0?J0n`FGt3OcOVDJ! zu4Jt@`qv^0G~c|)(BU}s>PRSO-c41j?C*;pwX;eSR&tHox_hizz(hlSy)YvmyiM2q zMhrqu0rX3c4msJkJ$E%ZdRXVcKkvhF?nMnam>SD|5=sUe59`}M^Xp3M7g{3&*z?HI zF$cUj?pHQE1dNCOf{{HB* z2mkQn-?@`G^H?IDL1}mwVYPQ-IX}wP9A9D_@Mc^QG> z*9n4LX%gK?gbbSJA3Ua%uzWc?Uy-Uhk=#!m=n9I~fS|vW_RQu65VgD#aa<%Mz#-%n z2%uO{&y^S4v#QB>Mg*%#o(I_l=>@9^)|~(@EHz6Z*2- zS_`InJ{9pMq^B+9%Tc1V`tu_;%HTtP9N#`bl!UBf)9VD?@>=B%cNN<(+AI6scXo5<11vJ^KeqGykQP`D`|ks1Zu{RE6Z;lyGGGftA`>C)K| zz-(k}I%i}^Jgm$`d?k8fqcEM)(K|B_)^?k=x41@~kM77GCOkXL1 z0IMEVf}QfTSTq4Eo1&^^MC4w5x%}Ldj+h$aMb=3O&srtK;pY81c8yE`KTXy-XpId6XfYC!p#!FRNfGyTq~wYnJ_6Spt0tHR@QFv*9ZrFE9DQ!~E625oqhTuvML+ zLpokR3oQZf#X&l)P%OfpQs%>R9xZ*yOuM>gV=Hu~%f-&6o?9`4N{B@BM~rr;7C!a* zC1I~Q6y5;0>R&j;W6O)V5JfgzyA|WGy1(ZjC%@I=g+tHAEF%`HwAP@K6P_x)IgFSB z!}Jhe$KIjrB=Kp^&zPBMKHw|x)PXx1hGD~=0p8c_H!#tUxtmo9*k!duUsk zQ^NqryX6NHu2FJJSeZC#Sc=OrG^$n(Q@y+K*oks7hf;yIGFMd%4>?2uqgsiv0jq0f zZ9SWKMNW>pjdOnKX{GNxONvhJHPTJ7jwA zudavJnHzF9g?kgGJpB#PanV^ekZT=*eQ$OMuLzV+X58nP*w;TY9 ztmmY2UL8eEH5^F`<V z)<%=*27lD$^`A=T5%)&6?JNg*ILlVZ4RY_Ll2&VX$D0*K9SN}e{O``Tzhno0FWLb` zY9hFaEZ&P`MyYcRP)c*PsO^=Dn!wme_i0Zsd@r0Cyf3X#S&=!f!#3VN3O85$i3b&$ z;^@)el&79_XZ2?CM10vRuau|!_bgU?JX3lfs3m-GSq+D^aHYoOQa@w1KF3i)>H@A4 zH~#?$tS@p#m&-6b63!3o{4~O<8s+gu`r`s@lk-XDS9MUgv9nhq`)#6z{9uZ}lJc%W zO3R+U9A%Jb+&$V>ooe5BNCBvz==S4aqaN<#7DE(p_n&j+s^3dON4U^18Fm{55!(p&5x>pKe*{$d<+aEq9Ij0AClz4-9p9~**LM0F8MVK!)9h+L2F zxaK;VgJC|Ptk8Q$Zx6olyAU3gP4_d82G+)mf5aKI2DemZ*pe92OzZ{)n|#{}lb*s9 zKcxyV;k9?Smo}(BiL}_iVvji(=Nj{c2@3(b9X?H`SJ$`$h%CU5ot*v7Zb|~ z8aANWGbM@+lYn>(qp-fz4i+b~7m>=pV(CVV77JUkfGj7;P6U6z+D?GZ81-U$ZY002 zTqRMZaT`ULoeuc5&&@)ev~}GhM@%a44eL-E{P=fn5Z+x_YWmS2#0%ETa?;%#U=V2N z8f9{ajTpW%MS;N}V_aku00KxQQa6F&Y7u3`4CnMyUO&~P=nvaOXPY;z0Emi3Nsd!# z+_6F8=6hAi^%owe+135(g2hGZv!^rqr}z(xm^0;R57bQAyXGAXH97o@{Yo*EW zzLZqt*W_*Sbv2+EKp9E;hDeO?VN|lHUvhBrF z2_IWh0AZr>ebb zGg+Eif@yy@FjdmRso}L5T>^O+P-_|^GDeFJl_ER4x__aM$jC9T5qWemzwD`UG8R-9 za0P%nVM78HS2@HUaxZ%Uia83&aTy)K(rCzZPnw!A+f0mKa~?iz|9x?`#Cf!b+6(wI zI*1%40AB`gdqRa)LCM}-wZU?INF@DGObhP>ia%wNNp)(Dm6NcvG^XT&*hgh~t~U7| z_z*r);Qyn+*MkqdU%Dlr1yzS?;S)Hp5nc)4Rxm&YaIBF{iiqSE?<}6jdYtvNi}oTg zKE19AR(pfsFqQMtUDtE5KsJNlfrb;>2}NBI7h=a7#CUv5t?QeY`g+^@$vVVNI${S^Vt z8aN^dQ(i9()-yB?{Q>UMK%hq)^isw{w95UG>M)S#^=jz8SjGcdkCa3MTuE5mC>y?w z&sJyeyCX((3kTVcp0BA2^z<)aRU;t{D<>qh&amw!lLPfZO1E!YBK(xophThDkspQa zD=UDEnb7?33~{c<8tT3{Pd*B4Resz#c9;L2LbbQRc)rAu^E@7Pe?6ZNOe%dG<5>b) z6b> zM?=FNE_UJTwnKPefpegI=XT7ga874&Zowxo;y_0Z^Y~%wP?9^8g^iw?&J?ki^_vrx zovCqzb$e01bPu*yeYy2~oBuo=hly`yia zyzL^)NAC_k!jk?bsp!Tqnmws7ONW`ol9P|y55=2Q!V6f#i1UyPdU18}yFfkDaHJ`7 zNnrJ1+KaCv!Vcj>qH>@~fT(b_!67bVT~@eo>_GxEZXb&=YE#PFaUsNJjWP>dQUCD3 z2x3H{kmpSE!g5l>QlUc?XMMSWY+E)(3mCh;-pU(bp70SWizjqTi?0${E- zWhaRIutzL-nXVvdt$%v_rW95{AlJ$Qej(0?8d4Y6kuR*8<{zYR-WvpF``qFpP}t^@ zJCH-^%E9{e;n!gjnk7d)FypWqlNj+&Q+p&Xg+F{$vQ9daS0RLs zc(5D(#9`W3PpG3*PQ_RC78ra>9MZuFSgkLIy1P`C*WCJ@a@((Gg`mLuU4kJKGw?$y z%Zrwv7Zp~CV9Q~b$op`R$7yBY()!N_^mo=3V>3G?dc^j=xX#ElV~)h8@{qcVF*@Pq zu$~q7aa$!2ZZ`@!M-^DLR`gNZE1+eP{Aj$io(6fK#5Rz)?C z17}n1J@$bI0`9hkj~h*R{QSby;w3+M9f~%jj{D3RAak3OCBGr)fg>sN1jozee0Yx# zbq)^{Ne4+r(ub?e%jiV;OJCMw>7m(&mT{3;v}kzn78%`sHQ_4-kYEDI_lZI)KY?Qo zjh|CNp|x~`=usyIy;AO+UpF{p@fB?S*gwPTI7qY#{WJon_--b; z3+@jB7z-jZawv^X9u&%qkvHdHhv)&qmdgfPVu!&D{SE*%jN47d%i!>ZQ5jwLg@V7c zi(WX-vktQV<7K`Hol77Dg&!&}sKQjD&?)?kDTpOK|El0~JH!1PV?u-b4JRq={_5kt zb_hLwFFxKs0vUG4AMrI|)`owo>fR8p`BAJaVrRZss}e?I|F}r&re=M92(XSo7G^iE zCx~?Pj)$Z-SJHhtIK&B*jK3dYFbsb1y@6^Jc_TLv_y7T&k;>2IG>_Tzt z!QqG(&{v($q&CtjnhXbQ=g@6?_`$HYyx7nl=nPeeBwpGMk@^$qJX{1pNQeih!9$`b zOTbMdsV@>Zt6Aj%#P5su#KR-u7kFmO6x7<#Ozb@uDDpdxl6-+`vu9vth*uyQJ?*0k z5s8R>m|=Vy7o+m*;shcnC3)g@EwWc;nus;kYw@nqOB@8((hk?j1Jk*y;U}ON@ z!0xrzNOM%TN-O8XoxYI60%x}rBmVcf*A4FCFUU7{b1?llK;0$zM-`*f@#3D3A+WCz zBf{C@(Zn~B)I>`|x2p%Pr`?M$#GhI9h>QIQ6lv9CW%&K%a+LX5OOh>=i(0J8Ki6}4 z&F_Vl@_giCkdrs^!TDkeE;V&5`xKOJT$)S5jQ}x^Q1c8OQZh<9OgOL?r%S}}&N2UM zUS|LtX8rf|#&1M>5n`-kHv&`)F+z{FG_c+E09{K6htN3%h~9UNmFIhByZ*$w+FptC zOKP6-_pDNF!klp+(-G%X1XvX^ks+ZrrGYs))6I_7tU6~IRqa=k_>zJhAliw*OVWPSlT zGkKlWqV2-^@q_TTvU{)@!rmVo-Y!?%l8%TD$<@8G8566eN_PENvNSBh9Pc$WO{iwx z{SxK19D#lsNV=hJRpM#12<3ZyBa>Gb%TkCX^uiE7H#xOJek+@Db<-~e!uePPyCmsx z%m=CP1cp%vDr&JNNONs{7k5srKIRs0id~*yL=F#}V^oC7ajtY-%BJ^ned>`C@KFqQ zL@%m_?}yr?T6`I1ugBDWGukAAdVH#OUa5r|m(HV+U6gj2a0Qi7TtQ(kTm`Qkq>q3m zN3>E56U;zdNx)gJs~gZ)A(p&~@iDOeLFd-WroL#OKni6%EJjG@;?PEhxtFe>kS^L6 zL==$eopS$@^mp{-gAY7s5HO`FDgfscGS)8~9KOUc7-$IMGdj!2ZS@-&#RGXwlT589 zc-@iX@Q0)Ckiu^be()xoEKvr&#v|PXvIMgb7^d=Gpy1d$2%)_=WVB1Rvq%U5==InA z6gK>3IjHY)lFIgFI%NTWkb-Sr*~2x3#iI();IqM(a#`WZ_PC6dYyNX0 z0gv)^0r1VIp=eaRduC3LLRb`=iG5^FvCvp{*6RsDL8GPp37q#qKge zbZ|!8(w^BJv3bS#pwgG?L=e4fNa+x&KR49IPPwqZwBc66iX57~7h>=AvKvYq#^#1Q zeiRXs=zOZ6 z`AG8$1DfFM)^h0h129<%h}g zq6g_lg>J`@g+wQU1{qUGY7Rzaz$fD(HNc5=cwA1MfMDnGT%hFALK=v_#V_FG!4CYR zudk^OtB-k*ev&HmlWaK;l}DPr*WHq@Eusm6YNN4#=PMLMmLGLF+)C2^*HG~k>kYsi z}SQ+$7+_WRYGV*ogj9!(v`#aspv+w<8n&lg?t9j@hn!6+#t$twI_{UkrZl zkzFGU>dlG!?Ete!QqA*y{1nRf{_N%Q>M6fh*)@M~!!^gBKvnUvzd*6tSiRmrMx+#G zs88N$ka59lfH&;69}J)(RZvud66~q+123-M1*b781#$mN$d>U0OJ&-q0Qj2p7aD`jOqr!!(D4}h6GLrNa1;T`vYr!F_j zH)hizE#DLptf_y=+ps*^#mi;vh`@C~}-xSNI~!Xa_aUMdT7@sztvG`52>^uGIv4Jh4hY|5IMM6lM$ah(aDLVx2H&X-2-L%b}z@jVb1&EIn#N~I44)t!~$ zhoXwWDNASs{mmuTq6Z%xA7EC6;QQ&fZhm6HA8NrIwJ0s2JVgJ5LD4=FSx!SycBv2bbaI zfYkxmIQ8~1KVyg}h1@8IcsdeN0*oecX*IsR5jiSI^>yL7JQqq-E)kM=^2sMD+v9;f zheFio+H3fW6yx*H&%RPly^>8HLM5OHy$m0Rueo%wtZd{piUq+@qW>Iy>QNK11ILhC zwA@GgZIXoWJtE#z70$apoLT}IYDj*Ga2HX%t_*-CU{7xNGK)5`SlK1N* zad}q6+9!IwS~4ZRgdtp+Pk1o~pTw7Ft;&(8J_|PxH3A423_F7YO5*IMm{8xzkCj5xTR@7b;vb)R=wVuDQZZ*$@aFtelTQvN)#|a(pat=O zbNIc^NwC4>ZbnrT5ipfrrzU-;o_X})5vYoqv9Et01xH3r5BLEH9ma`Y&OKGbgaTpO|95r zxn;p{ngLc#p9H^6O!Ycr)7>)RH8j}<*ii?SquLYr?a^CBoFGck$tH)CaEFuQ5NITrygJ;5W{enCqq6*5l&X{4gbD-6vXa0 zv)J-i+RTSZX;nHSe3~JZ6bDd_NA#D{S`ZfcjSyy47he-Mr5bDIk%{on{Q}H1?I(-b zo~LpGU5)?}k!u11JF)tu=4~kS(;NYl6J;R+qg^!kbtGXN!=hK zGx*iUH=|j@w}*F-dD zvK|hB8$7~sh$_%^&I6-EqHO)IDKq~h;QJvKVOui8|K=c#;|SYftGW{%{6&PZeZk1S z#!>7GaGa2>`B{vI4oLsT%7Q8M{rLC+c>qUuBajXm$^-sa^e_F-OCHx&U?ag0O?Zjm zy{r*3zl#jSS5EjQ>H|d01KNasSq4#!?A2Gw(>@C7hUVrwBNl| zP>%jL;CloRdo505@iKYAqA-(5LZO~Oi@R*WGCdWc`e(|i z(-BKo+&Sqt(1!4hAB2n}y37gW{>mGHE}d)P9aYDE-><5Ant>)aq5XBhOuw97AZ z5~g8Z(D)p?P`RVn?Rd&Oh!rpph&tpTzHmI=p&~*_fUC*t1;Ud#-}w~(WF%J_OHHwe zb$gEzFGp{?L9D_3YEFc9!TIv-;B%>S?;m~T7Z43U>%Y%}I`GTLqF`A3Ngjmpkj?re z-Fn+MGG$91QC1K`no7Bn1jI4z-|?!}K${;;jSRSF-Ri|ks3_6nXOEYKkn59-jAiI^ zZ&P?uU{95Q$V;cw?=hqhAYCc*ROTw4s~>6CsN+9K0l$0n$D_C0v0s#M{o%z} zUbRe;l&qCMEg##iVT2H?twnsZsVB;*6K)!o)k8?jGb@U$R9% zH~cHo>K^%=>u3T>rDYFd$C|Dr`J-Hg5Rpf~< z8#sylO&X`}+9XndGz^b1Ai|_^R~A3|+H$Be0RC6-qSHB_Y4zz!XO~IrGYz-cDmBFM z2x!+^@S-}fC>2YIffNu2PSA}oETUT#CZG7Y5|UlaqynUM%9jY%2*tN zEPfOUcM}sHV)6IA{*cH*oDfTE3r5ZhCznbD-2kQ)OAOVQ`~$p`?t4|!f3M@b9KGkY zAs0R_nZXxc8-jIgM-oCs+dyNN4dEOdg0jO2As0v^rf1+BaW^1`P;@R{<}?kjVY2n( zM~*2d4*of7JhL;ENwpI`nLbVjcZOK>Y(eq8Fg4Mg@R!y1Lox>C9|w1BGbs|UDjijA zse}+I!z5+KS#a12ZNYB^IU`Ix1LCjOjV_S)i5;=K?iG0ARl;)a??VZk69i_uD(L9` z&X?(~7l2rM6ms#vqZQz)@vp26iNjE2MeP#l$xCSRTNb!xZL9f@8C@S7KKf|+bu{d4 z47;OGytXS2xpL0MQMJ5M0*=b*8YB^+mn?~2Bk@dBnuJJF+sN%F|^nJ%s)Jh62%-1vHU3o{?t=mXd^~IH+4j4LSBj|{mjYo7oNANag?I2KMf9_ zq8MuqFU3%YN26uJ&~;B*3v_$?l%fleVI@LkMMzb5=P?tqR;s8T{KFOoZQs#kSt*>x zXxT$MBt;2mSUK-geEs;_0l7?+vLD=aXED(TxqjuF;2pvG?<}5+n`EAWv(|s^yPKqN zH*AdOm?IAjYb2JMvIgIElLv=Sms2U`*>QWTo5=1si_f88T?c2p9OWLSq$dqWBwITT z#wYZ)3lyWpGm!fKJ75lRhry!&KKQ@DMR*Q&B;yYW)Hi^vJi77A_-lGWHVRf;XJNY_gfW-N73H_dO4<6??$|JrR zwT`-Hg7La-QaPsFWd!wdQaQ2P&@ubrDVK;O^yP+ANMHyW_Yj?avpnl%58_g02FR4G zvkRi>F<%QvFyMEx)jBIHN#;fyz6(LvZ*WdWvT#f+(hf)9PW8Npdy3V(HZkzimGWD zIpN8a3;y)!reZn8cVC?Mx&9xQNpE~)7$KfnPjnvO^z^11I9^tuOdC~pFl`IhJ*ebiNS-7IR3wJA6 z8iKZQ``I$H=-HkomoX5vy@c^0y*Ogs08N<8Ik)z8tj68g7o`2{N3=`XjPn?g-d6PH9#V&FX!Fy8Dl?J9G z9L3gM++3#itheDspaNyG27ui;_A#PAQz%$E?T9S-r&<=)sydk{LKra)A?T^ zXZTltE)QSf9^l48=Hl1#{{LBd)uX^hY4XIYXP=@?9$v9_gg0zub?E>m0+xn*wb{R| zmEvdNtqy%v0&W#JiuaUgcG-lz-n4HYI=*y2w34 zJ5lT*Cl&s$@FoiUJ3ZG~^iuC`k)ytP5ElD1%15I3;5{$+30fIXBJ@XYzfwB36{-(S z8RB8;2fFW1;l}jvPLzl(3I`KMseJFJPCnyD%6T;O*5}G1meJ7SgY_O48HRhH1d1L9 zikHzan7=v%|N2W|ZM3!&8FAGY7I>^;F9JllQW^YYWVHkU|E^u%{22(3@g z+i5ig5Cu)Z#Esbjoy9T>Qomps%q=Lg;put3T#xl*tcz!W2}VoJ#|`_}<`Uy}6VOAj z0^f8+UVz4unG+}gIzmr0iYO(?!0qq23D2k3Wsw~ejycYP{WyFznp25R;e@H2P9C=D zZt2eZ!w!;kBXwtTB>VFC$$Ooxco7Gojsq{JEj(^A-F$_ZCU4bmJU(zYR?N@7s}bQQ zxHG6eVAY@$KL4HimHtghIIk`FCC8PH(+T&_Sk{-Z(eF8jWh%h7vaJ8;*9+*wZ1$Uy zo9K1gUEnGdw3_-jq)}`zXYjRq1tA3mw{T~B2TLOvop0OgBPa6?MTy{Eax=BI z{WoGKNE~&7uN++SPp_UXZ{#B%dDtuP}jhddG!9hG*~y5LhEN6QJPx_8AwdBK+IFe>>$19Qc3~zG!rd?(^df8m+;sc@^GE{$_xEQZ zE&u5G9Z}|n%u+^D7<0HVaQUGN=6C@6hi;5;U=m~WH}F#T!>&NzRz4kmljDXj#x?M< z&*D+{kKUFy?%kvJ2cLMiv5+E0KuPLYqQp`eVy|T|2+-sBL5w2V9@mCIa<8J5Rf_EF z#97S*0#QnZt)|0ZwBUby-xEKHhpzExEw7XLeh>r~1UkPtdMm4FU2pi8Zk7Xg;0;2u zZi8ZgWNzeaqcJV-H#K=U5tA!q0<}C&9r9jLU?$+4X%@q!(p&6?wKQIC#c_&*mjtCy zcBY3e26v1+tbPaud=$%pnU+pT*nXU&n&;Ka&G{0Lqc`oKZx#^d2*xA+sL*wl1*dwC^HF=T_e;bb!-2U^;ZTfM+<4LO)XYYiVw zO|AUG*UCQ^qgk`rZ>+B*rvxs~e&f5xw@JV;bEG?43Yh7eXbX!|JJQ0hsUzc&|5o_E##{=6R9-m-Y(LmKaA#CkEHBP`aDeTm|7B!(4?V5fTbw#>T4g~i zs}xj>C>rLwdLx-1D@9{fEH z1dlk-oX|#aK3xNiyB|&n@@3$;azE_5r7ILt%USJj3dr3Yqwf)K0-2!Xlz`JPHCr%y z0>4j5CjB`C2G@6WdwJv zNYzeJEvi)=~kR9 zZ~ns4?74K)RG$bC#pUpg4}nQM*)1@cXWLac$6yxxTVv~kzgS2yBW${r)yj>@(>OPK(dyyRY)Sv zsb=}2ePVhyKByNe6PFjKAl8EEWZOWJKxeob+zqT6sI&ih@Igqo%8fF#bU$|Ih2AZb z>A0$Ad^#g6O=cJ}4B?C}bBfQR^mG@!zp(uUVsZPVzN?3xU=SipUS@N%XUm)yG!_@U zU)s1C+ZiMI5Y59=@KTx2<}yFAeJ)Gy@f)`Z#sJC;h!<-I8=2?Iv*P=y(!{dPK~uWG zb1*x*pBLyS*XNOevJZ^+JNJ@9>|6XXey0y||19$jW7cnDFTewmI|H4#e;p|E3V5&A1w~Gw+!#IFsttU2WYUP+4>BNb0;)FY1)r(qb(YGT^D*)nm zUhe)vwU@=G)F1)z0m07kp7)N#F1Q2QLUhohrAhdYgF89fz&m2U6X`nG3l{g&FhHJu z+*h8|r^eD=lOuUVFZ!%Nai;P z+1v!{NpwLnML#U^#~v%MM);NR?cDQIabTjGVxJE^W4ST<8yt;nG-X1h>Hf~qmqAgU z*AmLOFXtb|`O#gskWjMF7!)C4)3{T(n*zy+SWzK(3jt0K%!~EV)vKdIBn^4QbRm^o z2;kKWHE>r0OjW^yqhvBa65&s!mG$cu9hwa$0|qQ%U%W87c_xdwINOh&1B>2)7#I9R zKxEV+NV!%WO$oSElKa^RIWDpqLkX8(1zwh9E*6S-JzN75*jtQrh~073PpZ{PcCLi+ zyo2H>Sg~6^nM_-=<%-96m$I{75mm-mtjSKP0KDZ!%}~gJz&ErciQ;%T*ldwxR7Ii7o#6GpJkF_}g=$vynitEJ+e-5`Zz zJj7>`akiet+o2fqK-ig^GY4-A48ph+y;xX&9M`*P7Y&I^KNYb4_wLjI6Ti`DFrs*e zU1uWFiTRc5UGPuBPk z?aw6=IpiOR)Gasa!Eb;3?J`)WAAjr7rb46F;B!SejFnN|Z$M|DvEF}T^$gtD;INWe z`N{ANvtVwJNaA}1ZYtrE1P<}`(S3QOv8Fv<^JhzxD_-0_5W<^{==PvDkK{-**wYb`X|`nx#BdFb?%R>AYAu>A;&$9EXeq!lUy-dg28+wkE3ZV*z~(jj@~Bhc>k0McR~lEd{>11eGU+n&9# z`95^v|5iHu za6iL5IFwq<|0)5uoYa5xs=9*NtoU7Q`?`nx3GKXW1}0R<6NFofc9C(s&@6YZp>7Ssg#IruB-Y8YJo%Uh!J4!w?(YDbQHDbS(GO7+X^7=m@*W5e;E4+i03KMU@r zLz0=roeC+%#={Du>EY(VeSF(H8gLwRc#%RS9Ry|d$c+Xtw^U6!?7V;YjFK>@`$7q z!_6-~b@~_Om(O}CtCC9fApeRx6MfghnymXcwxC)rXn!6lq6^)Hl$3k>aKr6zP`RCi zl|BWd@8^QLArxvE_C$HgKfp=CJi0k31-rW(m;QB0pTh{)xK-A|ufY?yc7z}gQ4IR7 zhUJbvO@5Ja48^_h8p#*XM_Xx>9%_+u0MPL4OJ!q~$H2od35eG%0;7zp$GwzF#Tk}U zK8x~#dhO|vm4o655wY*5G8KJte?>&IK`6zaHT;4v`}tIT0L`=$T^!XcBAM&xUcQuy zWVpQ}JNXoV*z8Ssw{7ie1#v9*H{||%K;52(J`mLo_Q1+|IHW%eoZIu|+~Ul%ClEBL z7s3tBmoVV4d^Z~>82FIrifq=a{qP501N6D39TWQt2SPuEkAMm|X#}1_F{gfu2av|U zRU!%sgF>75!RCelsiy7R@fgxyPTzYTVi-Gs@uwFacQDp-m6_ufkwT8a)+I1nT6)P!k5IAeBzbS#bbe!J2Cp@vxfgJ$&O)yGP#e z)s4)ZA>@EQItu1B;%buozsM*<0bWHHB1jgy3$HJ6k0hkn?Jq5{t2bYF3n%2r!q1a# zr)bLz76=YcQKh?;*eo@?>%M~334hTR zClx@Nd>qt(oEdRYC>t7-p*uK^W4Q@Fk-7Eb-!^;sUWuJOdr)vy`MI22&{p)5X&vK# zJ>2ucVk-XTwZSP4t+A1% z^~k?DZSP@ppm#zSfscXWAy6>kAf|#(IA{bNY2@Zr+>K$iBimq_{U%%gQrVr}zI+~= z!Mk{vgCgV1(}^a-IhZIp#cJCK)|sD|*L=owU=mamvC@?L0a#_nEE?R+UzYX=n!i*Q z&t;M`$DYc-W+#=Ck#n_K);t{50hI&+I*#Ku)je5_bQ^%KLjpbO-TJ|6!U4pCXF8M_ z@=v?e!%3~^%~#6lXP$B&@o(QzK#el|P5j&XqehV4} z!3FsTk|rDyOrDFeylnxxuUw-%WQA%<{(`5n4wg5YO~!gup*c?o&i8&<*uLRC z%+o7rUE?aK%+sfvru8}X=EHOF@cR6>em=e->`TQ1s9cJTQ9&|KIpKp$0+Vs;A0GU_ zq+t64QY}M;sM|{B$`Lz>SIhEf9gwyMi`5U~H7uI$Dnat^VC3~s4OP>0Y4jBP_XOK; zxE^!3!$RNb2k=z>5fxa&t#h#1M2w`IaI`iDH~f7pDKJ)9QE#HG`5<|tJmqf6t_Z)W zgf9i*!hNq2FF5#E>TW+CF}7235sow?a_-~!Qi@ZH4i=lsg=Di}xsx0jrKSY$XTilH z2UJpXA-0=F;8ZL)YMVxk1GKmvumS>R!Y1{4o)OSA;nsbjJnye@hsM#Tu8uVVot))< z#GI9**zuHBb}i|_9)qgkHftM*Co<8$qC?;mZR^?YtzV{42;b+l0t!4>3HKftfTgfe z38ga$>upWFJ)sD1p~`PsZSD<;ud}$Hr&Co*!~xSf-{_PSMb8mBaLt?uo=>Vp7$?8 zI*)mdF+F1Q={FATYsgJxtri+RXE$;u*^9!KOvKbQ4lJT_Mh6$1zW= zyvb%8Cw%CkrsQn}{Y=GT7^{j%W0FYw>W8;?SCz6uD8OSq8@eO5l60CJI^hi6CxVeR zRVaUjPI%m(1h%=}IGMpnL2`m9aytni$@9Y|^=Nh!)o>rH&AHKNX{4uCs6tdSdkzb= z43qBsOvT!HDCWPPdHWW6vX>Hy+^I7WWGeYsSUcUDl0xq)?tj^%-3M`zvO=Ecf38~n~!aqxa~ z1UIx)y5D&U(2A7zh^x}w+d;_5Cby%ZbNRAPaUz#xY75UmbH+%S>qp*9T{7z&iJUa~ z_=?rqUA^MRSLsu>L$i~6P5yDwxp5O$|k*GP4mwJ@E`U(|CfILlP>#g_yZ`(?z@9eo-9xKL}%&$QOGD2 zW0d2o$(F=bI~U1S=SrZ!eqDkyl04mT7`#y$VpCSlro|{H7=8hrNFfANS}_zIdBLtL zt;zdGFz74^f>WZYTwG0ArEYq)EhE8ND`BrDN}%2P0x$)Y_|%(Z0k=Iay6OHc@V0Iv z78sEBgMF$D#3+?lAGgo~zqNhCqlu$gh+el=t0Gg76;Qsu8WL1IqcgsetUPdI*{%iP zNNJN-mIs0WekWeN#hyPLY+XwhyLnM&n8vDxzE{X)5tOe=TK5S{4uCE~q*V&p_b_HC zZ`+S17THz>2FV5`y8B9cClvtWq>@G`fTMqY;Q&lmI%UYU+$|l*2Vz|7u}Y3HWbPhU zk&fopSGmzP$F9$WcL#qAwt`)9bN-8~U(j7Myr8Y9N72X)}_2l!S7`LZ*aL92z=I&LppbCaf zkqt(Z1+r&HCUqIB|IZm*el-~03?sZH{MF}PI5h#~P6?kt@8Gg(N>vQ06-w+x&o6mF z-5K9YeOfGnmGGBR84P4=prqu=2PD;aujj$~_(?dSOM+5Jb6WQ-aD3E4$wii;-^ErH zIwq4c8GfpV?ykD;SopX)YO*Ige&d$U=Kbw+XewiHkdMm>TmiwbAE(Qb;O)4s{AyxZ zcS)?1y*mhK;@P=5IG5WK`UBx$gOag3B71Sq7Ev!lp~Mp04oOAWuJnBw;jOai7sCi8 zwrxBzs5jmaC^TpSpL;pXh$J<#YN%quctiqS)g^0W3LYA7XY}a5SA;GY6~n&80H$Go zH9Aqi#l1cF>ZkuMKmBU>jThF+CKwF{MkW3gVg{V08M@z! z<|y?!KZDKdUrzwO@;OBe5u`Hf>m|aJDy{f)6{q9Nx*$5)WqGa`G!4-?Xqor9_p--A! zd{sNc5VpS>GS^g=e(Uk_yI~J7&=B_V;oB^zvH)G6NYQ*Ufk;@GqpEWHu6}`Q!KcTI z$qoNK1FNy{U1s}&je;2El@;C0lSq{{ZKgkn1Ctd_@ z@+BNN2uua;KG=Ct&WIoJm^i7leJqRnAcW)%2kia9hLRP)PeE& zLmXmvJrc5^mRY>s$7F;CAYrF0#r`J6zA0ZL%P*?CxpC@nqAP_cU$Y>@bt1hSSMPqH zv&HxK%u{)V5{=~*xZWJwVE0*~`v(GmZ-lP9bCbh8Z#+va7G|8G&+m448#D{Z5HEN( z#Wv&&Exs3q@@SaDueI9G6!GyHdHOhqrc^NDQwGzpCyNdyc1x*Op0GQa}*EqTOPj_|*{liSJ)Z+;_Yr*R@p-t!cOWl?V} z$HqV{RT=vRkFmRYopcrij_>K=5ddqGjp#3}CdDvX8?x;JP)-XgvHhs*dR>BEQ-Tn1 z@HB=C3nSvSeMP-Ja1hv`>_&VFSpvIRz#!HzhT2mAkUI_pVqL6vk6Et^O~HH?^F>1( z)@L~@LM*wpy|fP?^YvXb-Py&K98d|Ym;1<_$gPY*u=YYhr)GqJeyH+ zLQySDlQa`Bkk;^#kA`V4)_60sgY6S&~Mh!Gap;KI1uM%w$ec<1Ri%G3S< zC-2m}SL0d9%1RsSnI?z{5I5K+XT-;A;aX`lD=t=rjjbs)-&Wpp@8n5BwoC%4uDu(_ zbUyIMda0l+K45-Po^%6nj$LDAdbpHn=c^XWioYgy7W6Z)Kt!kkaEi1A_iu=XlCI}g zRWf=|5hMSIhn{UJ-}+uCiAn>9{w2v>W^hO-`(k(BsyS>Ps5^p&(9N1$;?@xe2sf1E0H^!Qm8*V=W$2IxOTT@EmR^;rh$(g*p zYt19msA8l&zoW2#uSAp#eV$O-euMP<;V3wuAb>!~2Y{FXs&d9Mji2vxfwY^!7ari# z(Es}H%b1XEt$LFRw=}co{1Z8vl5QJu^yX3A}i2q1%}#Jx>Zo%G?@N&@pLaKA9-YJS{Qbg_xiWWflRL zD;J`w{rRkJ!6rirw?HQ{v4K73V?buI-rNz0i=|s7SZ1tOSJ@CQFoAmb#aIX6#W4LQH#e2@+L$MBKt|19*zu zbyF@56n)_kviVM#o`W4|$dPhwt^nRtn-)xj7Z)ttCj*jk1ApAj^({YjlJn5(?kY4^{bf%m zb#8poJn091n2O)Uk@)~!EbK^hARvvNEI|r5KEDc;YH_;&Yyqq6*I{>UUpvU_<;!gN z5{CGJ@4fCVxn;g_{p`hksJh`M@|X(q`j_%bwE*KrMmTaKu0JC>RWn2K;V~PuDlNc z(%>C0OpFI)K|iMT!T*VBr{!=ky~7}-ch@B=%c>Q6w}$hb5;Sgg5-eg)={z+Vy_jn%XQ`;U0( z66Q_Mq2aH!s7i#eXVfQJKs#4_7~y;Q-QY7Foze*IkslqHgBOz$3_>|vdEQcDEC1Rb zh>9eZ3*~WhDHj=K###&?9;8W61yYLem!%J?$ZxQdXvQalcfAZat7Hd-vO`^aL3S{g zJfH`g2dEji7(6_9Em(5UyJ3xbZ#)LilK(zae)TKMi|0#hMO=d$E6XszXVVeuZ6R$O zo45TCwoVvD6$j@A%Wf44!M&2wuY%j)auRpM1q@gdB2kIViMbi~Nmf4=Y9=pmBEc4; zfyhfH0J_1X@ITtuJH)Ppof^I=H~IL;qf|C{UBLJV7GH@%8XQHBoxsp{=Wq_5#yj{6 z3xJBglj6Q-8;-TSr|Nrbxlr|ecjv}lxk{AIpKFNXUTIukU`r4dV1T_s(9c-X?lbTQ z@Jrb0UUxe6!p#O5+R5+*sAU`gzO#PRBC%|%#|>cxLU}pg@I>|kk446L__S06b8jck z=82IKl`Br%pc@3JQfm<3DJjlwK(+W$6vFyCkK@_{LFvPTzA)3 zl4q;J->+{&Q{ibENAq3kH?c=d|0EGm&Cblk;W#D-@(-SL2}D#whXJxb!>Q0b!ZJuPmdmV|M@bmK)q#>WYAx;%ileeki{Fo#dq zGfoRghy}_VbYH@=`@Mt1zbsFC=oOHk>R!y@tRR$4Hjy7DvxWdUYtO?YQx2R4|M}+V z8;>t`G(GU(T+Ohp&h@#_?eG;Kl>PQ0w5Y*OoH|t=^%Li7y$lZ!nh|;zi2?40biQx0 z-GZxNd!%2BtJfQs*A)JixEkh$5f!DMCN;4k55NE?Ax*ycfMfNPpy9S%K}Ag@z$B_@ z8ZM(U7TyokTBuGcPZSDT2s_YwEy$MwO~GeZ3)4LAfl_%5jKln>XmX?Eb096#VMQd} zMDq0c@Xw;@A^N*Ml$`(Z41{51ntUrlE&RLGiUXrbH7`d1LPpM^W9G zuf!a3^qsGUqOvpZ58W5a`6&;^GgQgUSDbi)Gu`7hMof-FHMBq_Y!Q#^$1KT%HAGQ; zn%P2P##xQ^MX+h{-1#xTdRfdL{(wTUwB#jSGXYVK>rA;pzzF#&cAN~ba(g{QdS4?F zGK}n_>ACUebGf2;BgF!1J&Zl}ukA&luuVeWkynC>cAva>j|&X45xPgvBf9oBJ(c)Z z^DQiU(G7UB%GOY9r9}#oZ#v~J;Uan;hpzAEPP8=+XMaJIfSG3>`G)J9f*3=wa}KAZ z4eQkN&>;rCQ^fcl0IaaVmgPlSc5k}nLg0zcCcqI)z(zO^s^f;{#QK%*W~mAJ5s!dX z$g?-5+y_5@0@sQgjH*b|U`*OL(YUtMCkT;^KA}QW!fLtx_}jW%arbD|zOR1|-qjCB zcl^2uhB-nftQ+z&Wp%y(pj2R#{0RYm%m=7&@78uWS8$yugTb$paUAx` z2}(zMQ&cE8hPyxGTSP2PF8VXHKrEJazHm(p9py1!_mAFERnL1z9}m9snwm+~w}-2! z5j1c)rJrDBscf;o{3yX~YMA&TCp=nNQQEFoB`uJrzD$MS<9sT&qqmf{Yuux6JY9bI zX6p7ka6aecK8GR{xcpl*25Ww-olMoit-_~H;P30-_v^pEaA!}JpL@b)N^3j=DBBG6 zNB5xdxRLdMc00sTcIia@?(Lt`rDSOR`yg2WQ0kZbZceGa2&T`03}tD0xoynICMJjs0-# zVx>`HGH`$+)=_I!v&T>pmKW2)j!GJMWE6Ddq4{w^7LLWoPzVZ)6_Aaan;T^-LSVJf z_Ygbcdk7$C?Ho_#)NCxy+1D@X(0clpNd<+iWy``scaAJ_H+oWBdTN$iN#25ZuKdRC zbZ4wkCa0~EBeH9i#M7qnZHUhO^}RKZE;duM-Pg)~xXBbs4OF=1Zm8k=Ml`*LR$Y^= zqRMf3pbbsW#=II<4W%#c91|cI@qU0_@WD{#g_s~Zofisd`MEOXr2wuR2N;HNKsv`W zMd!IPcCf#nYizOHXrQzTB<4d2y~S4L@N69*gTv>^Z@3XkD1NAe2=Xkil@(i?fYG6y zV%A1Aa74-D0hc4_p(x-mmZD7%A|HqlcG_lu`SA3Pd#I;5mAs=Ek>zW)rv#qi*)=tw z3zBpr#P6w}l~Z0B$ZQm0G_iyD`IjY;HA2jU_nF6shX{Z^JZMDDQyef!C{dmQOb{gE zb=;Dk$MZZ4J9#w*>CCg`=l+pc5FS$KtvoU~IoQl0ULjH*e3?=@$jSzX?!4AW9P&tl z{ne~&DPQ^dPEP9>kR1;ohtZjQDJo++@0UTB`km~kua7?0zZ>?_Up`)b`M6D8If`JG zx?2t|uUjy`RHocM94wom`f1CIE0w~(?f`@hfaanUm|+r}f1lUwmRxmguVZc=i|>p> zfwYL5cG^9=bt^q;#4I<-)`GucIdX!S`^rG&-p>Px#b@QE(b?PE2DowDYlR**B!!Q4 zFI@ROzY-&(aoF@w0n+7$H)ALK7}!;n>rNRk(tDA*+G zymBUNExQc>;1~UOtC`~fo%L`k`>K&0C(5H?q{(jDj%A3fA@4o>tG&fC?PY|RCwIqh zf^}!yO$0Ec7)~RwFmsiWjK1Lgp)7FHZ}`$3q3|+#9=;;a2YwIymtk|vqvb`rV8njDA9R!3cDcLl^9 z_;bxkqqf`|{6YFg)kN=t$Cn0!-&9}y#v_QsVI#pnOlx#cE1Wt3zHmo|Qv=p}uFO?6 zL5N=upAHf{s_81bg2=_)@$Xwu+?QbClpFB99FN}<&HIS|-S-%P_*{>0%!B(-X696| z{}TA3Y@Kx==@Ehfra8=R&l^aaN*Cea@Mq;`KXZ4cNE+$({OK$rJQ-LgkPLW#l8khc zMM0IKCYe(ivP#PJ_*W|2V+V%#XRud737G;XuK7mo@J(YX(om!Wc~Kx@*jUwGw9u22 zMn%J=u7qu_Ig|^$niCdulV!;}VGf5DA*w&*62pEY5hOpUc8LoN@_DB({#<^PrI)%x`})!YB6!-wnVV0IM6khlNX~ zsb8>PkDKrnvLi@48rJ6f&b#ZJh;D>+a_iY{;Ze*$0uQdZP13fGK1t#J;KQS@po=B! z_UYhl4?U@pVf-S_l14(@xd(Al>WEDcuLaxFp>m{NVi`366LvUTZo`6i&4g%%Ec|VNR=U$z`AlmC+ zjgw^)nXwmOZ*iRuBjTZO$cSzLc{pv#ZJ#27-D<=wo#dSf!w5=?GST>E2uWP0eO}~a ztl5kvUPd*&+?+<6Nb71{G>hfL8(8|yjD2wQq00BNA+j{kgZn^T`0_s0Lhl_L%dhJo z-XD(TCmPtyn*WRPs@Lv92|i$2uyxJNRaA97P*JDIXwF|Inh-h3yDz`$@gT~*6|S>k zG!uiIX%4Oy4$u!sX7qwvCgs6vo(aZ4`hzE!1$tkr{sR%m`UTVY=E8vZW%;=$Mw}6X zfcaTc=9X_XXBU|gv)$|gneY?-gq3%Gv1@$ZPrycQ>P)9x%E{Oi{V!GvvA>oC8Y@Q029`v^k?D+R%W&X(5uqeH zp{KN$k$+MO#qaH|3ZHur5gL)`L>}eHb+{ZdX9$m`une)YFncPTc4_-&OcGd|qY9gP zfADQ;-PAS2k-)r%_P@+OWjWtQCKm3y^xk!Vt;9$-RNj~nICi99GP|MtAo#9lpU{hz z^V5lj{#jvxsU|uqoC4_S6quZ)fHVdLV@NOz^?8ERrP3LH;5 zr9K>d@7ZQcl7t^e9`re&io@~DIk0z)jCe0>sq8`Jh6@=Vh1-J)b=42u9YR$7k#~Fr zQLI~wo{W;a)hv5LOs!+4L24}; zvYVidC%BgTH8^}EEnDd>xboVwq1MS1L@T4nSjZ_8#B_A@UTn@osZGKxjuL6y!p@|S zuD)z^Qhv{WYq|4=W}P52YO!LOxu@84{c9|Mm`n%RXEU zgm2p4McLVSxJ)>FQ}}G-0LDOAW!;lI>1B3-gVOE5RBypeY(gfkks_?ma2Ze(H<;i- zc7iMy^N~lF4W@`uqYVtxCt2W0Tt|uzv70H6zn&6kRj;#`-#Ag;IPtiz;jo~liMHJP zM_+k7b+{a>to`gGiZjJiJ4QS!=b<(1$A#*C`jWeuG4(khOX(B+|ITi?YltuUQDa5x7)m9_T4(dUET z`(3OrT-9qh&;1Q$mPU1RJg=0cjg#Sj&%{mKHLEu~--iX2i9-2u`^p_c$&y>Rt1U2QLarYHM&;<%?l+syS+6_7OqM0SS!wEh(z`a_d>m`PjH&OQ1Frvo+wN{tuFRi^FdcQ_&2Hwwz}dqfq}@5Lr=dzqRCdM1uv8f9u1C9-tLLS(A$nT2;lzu ze$3e1QU!hE%kb4iW-?l>c=T3qGhjnMl=S2chFGgCe_^nT&9Z@JpW`q`HeHUlFb&tk z5ze6^vtueK0I2vpMRIAGUR?|8z~9xJDUYD?KBwT`SnPW8&L&IeJRN}^ENAZ&%FmPD z%`}-%L~=h=&AqqiVHh0#s=Vg$#0nyUnGZtH-@KZWgs@QR{I0z5UZW#q$SaV^u z!|Mr^vpJfB8EpO4-Ta!g=&yxxt^e^-ezyGjad-9kOahs!(Mn%LUCdAjmA9GofI%kz zDhB{3-C8mcuxfEv-q`X}o;{T%3-DklCK7;1#eFf1uemARtWvm8l8ynsKgEdE$dHka(Y#6y49XIC4KSe~mi^+(%4%7T{?io?#p{Uxc<@#keCqqz zg(z7$_!OD^hoetC*b@LXy{ONk4o#3UnFaq;v!=$)(%@Bey3`nvn|J0)` z;%Rxa0e52)DFyw@V<|HeT`_S)j0&4%OApF~fRRnim^XsLuQD>d~PYGz2>hE|)iuKmg4 zO7|rP1df6d!%BIOXP@zs;9}YP0fYec45&I(S+W}<&5BD|@V*fntO&Q-e(vC!a!(|E zyVIUZiqdB}4Ne^y-#C>#@H;5bDJQRSa16r}XbgVB@(!Wt7*m~q=|I@oq`=9wNYcFm znsq-KivMDL8;q(KVOw&_&l`gT1x%}V{8S01@$==9TcA3T5xFLpZ|z1(F`Ya&GH@g_ zy?V`>Y0>$<)X%^eCQA6b1)HQyNTN-ELU6?U?h$mDGd)IN{Jh8_7CeA${b%91Z@^9J z0lms_vLe7LRMd&P1DjM**J^!NOUDnyDZXtj6F;sh`wkse?z8r?Ttxw-*n|S2pI)?c zT#(Y0@-om(f8m?Y;<_eunh8Y8GWM$}TN>b5oefPw$AVl_?ttoX#<~@WU3a1D-+)`C zXs&@$`1?KEoeg64dvPl%D4)UDV2h1d$_KEGsZIfwQH`&7UCftA{q3cLkFZ=V+m|c< zB3s&0`=t+8D{~iTO-E~BQau|gjAB3PD=1F^0T23M#;9WgNdc$rN}&laHNW!O;!&X9;Ds(TTCG z#gT%clvAo&CGvu24=F^vOy*(vtv(Dd4UU)qXkILrd>!Mzd<#fn&`vH? z1Ja8DAK5dJS%rehk>}nG4u4%P5Z9h0fIMq9;SHTEbg# z@AX^}O3`GBV-0kMRv}tT4*O(>)Oza7d2Kye{>G1j6n@QLpD%&i0$v#?y9WT7-*f*^ zw01(eYbTGnjY7!uuZ4~dTp?LMhc=hIrf$^etx~h>GBh=MT13)N%!1$o$)HE5%3y$a#q$GJ7Vuj30#J9E#@28%Q@+o z;f|GSo}Eio7r%P7(ggUpk>x1<8B(!Uxz9c6YOq%t3von&=7(HB`|dBrQmkY^9Nf`$ z9vO&qR{!7|FA$tPnV!5TWd`w-2CqtEW*w29imbPQ8f?7XrI_FR)5fFuy6Z2?(PUl;@(LiSdR=obj#E3X*D$~C84hceuNatSkO^aQ4*UKG?;T;qeC&^i2!v%ask{pp*5Hrc z5FGu|Lp)|kGbvCO+bWuYN|cx!GoS5d`t>b1TbH51Th0tELE&WCqXdhFPrqiykB30;r~dT-+%nHKJkOlhd0ATRGL4whBnp5i%QLS@iz|2fs(+`IcQRGq<&>RvNKLeg>N{+>oq0ARn8dboUR+nwN*r zrPzGr6S=eF>$_-sL|xoj@E0fv?|YmdFHgVfzjw-7D9{{|J8=?-Qzne;aTF4a;r(!R zRP_Gb?j$n9FCo!mVQxA=i05Hu_vo^=h2M@kjNr3>2Tg`g`|6y;>ro2~#+UCL-BDy- z*}oroxlo&CCE^S(me~5J1!07cl!t8caznQujua0a(Kv_}?QOSiPT7!1KFe*9k-J_FYxVT^7J$+t_O=esEqJlE1MzF&}fzR=N3uB!%3HG1SPV`#?@I! z@=(eGz8DsBiI6>>mfb?pC2I;qAUV94;#7+{Vps(p6vQMz*TqJeMVxU2>cq=y(9`yI z0$?VwD@1~SM~IQtk+mv%RkugmmWt+!IOWlGuQak`3Ik$pVbLRtEJ1;9=ts|SKW|Z{ z>2@Ne{t3Fn5f;p#FySVnjP)Y83OTqk zN~w4zQT%J@eL+QbxvMCZ=B88n+z8cHd6%|Z#RJ&z)m zqjqs_;rTKa;HF{Ow7c9Duu*xmK2JHmN4f-Ej2^V&@AKreaZboJ}hO<%!3cHt_ENDM?mS_HhS^1444i>DNI!J&gw_+hs%hihVyV5 zYK4tB9>qCN0_6LI|37wnI_#`6kH3_&I2{9_z<xiD_SE?S|W#eU5N(ZP28Y3Fj;dp#f;!_;kJ zDUB;%ib}Gsb+$y1CY3$c$fn}F&4~j?;xWXdawPC-<_$XQ@f=2z1awF`&B5U>%4v@g z*fim>eJ?!hu%K{r10-AH;Ef}oC)+qy<~xJ02zAsrPDA%Byb9Gx?bb(Tplkf}-`anF z@&}LIBC2n~mN*cBhv6uQsu6PF)s)0$Y?w5d7)xl~+Bk509)0MC!hggmAT+=ai}-Qs>sISH^07KU+KmH)d3mm zC&||v%Z&K8a6ldt5wokv)gD;PQ|33#%Q0Pl&oxY{t5em^fy zxCw+W_HTN4hyLJn9+Nr#T*Tn zRcR`oRd+3Pf4*lz?kpwfv9@!4E{f+_%M;k8=itG6KmLYjV~XT+o!N;p-StqiBM5XC z_(|N8MRc3c&cXfn5nGfLrP@k?{L?KCQqW<5XIaqb(|%T+hjy-=%bsM%U+H+^V~}uy|dkMj@}8+#+Z#q|g>)Lb!+M>h|1X2g2rVLdO*2CfW_ zUoZxtvtr$*Ct;#r5WFj1upf?o@I>ho34C6t*H&*; z#%Pg&`a7J~rW_89W1FqK9ud#i%F8D#NN$&_xr`W-{Tx6A4a3f@$mf(R6qXvBRRh@T}x+TCK<2@(1eC7B6- zdXD5kmea()g0I@x4(~(1md6UctbZ`1r*dxv;J_%dD3*pbG&jbXYR`#c>l=c&`xUYy zXJeG5oex74mYmyo7{gnkjAgch4qLAcnf5y3WaNIvhmYJGj*t6zliW3SfU1{7x#q_n z`lmef`LS#<+7WK$A3T^8o!IdDel7v&EUrc&y(Pr$!BT9BO=i^#MfwpDcTTQotAPAJ z{?@OYG&yKk^)fJ(@pY$i{M=DqoUoc7xlI!kMRS|x$~ph}dU7M<30mHNBE;&S6QMwA z9{#_aDN0la5`n28C%Nwj{^YKQpYosnT>jHP4-P$2Bj{eX39}5a1!Nu8UJ;GJ;z0TOp!Cb+6GLgma~0_NvAMv&$j}UNwPJ-sAjqMmW!uN8a$) zQ{{&Le6e&qS9s@RdxMbKimia1zMT=x2k*d#5%=J<^YYQgI!nh=lxh!+dGS;ft4tez zF@mU0gdefiJ~Xu{zBlbM?;cnO(E~Hi4e`_0M{;tLSof)qDH04m z8hk546_u{}+Ws6ewC2_pRDL1_xQFGX(vZ~Q`$wT~cuZ<5E$q;DU<{$?0nPzWe~;+O zcBpy%Eh2;2?>*UJm?j|t;4i=uNY1b!n#4E=PRoNMR-72*-;@qm# z$7zV-zgs}XBJ}Ht+rzl^-_Ih3{me!3mimntiM0Gz%bTa&Wu%16*qY(&nZSWtF$lf} zwIG;L?T=z2a06N@su1g!UJR9X2>!^>w27+rnkDp9ZH!; zY^GZl7cTh|jiW4)Kgmg5mfd{=Tm+PUTX5;4_uV%E#&(HwK>Q7(N<qSjTs4m>qwCs5JYB1G8Yg4i)JmU`k~9LZA$Y} zJ#XLwf}6OA6h%o~%o4%xHNSKJaojIhs!~^V+fXJlBQqlYck%1jm*dAVYFGm(N3uwI z>ZGF<59M(ilNc)pCa(W^qV#5OP^~Y|l42o*lIGxx$Y%`yx55-ecW(|50(k%dNqhI5 zEl-^F{AGP`&R$OQeL&Ot&JE{OBA1ulYx5e|TPkT?5gD33hmDe>+g$HS~7y)8Xxd%Kml6af) zYC|k2NF|6He%}&SJ+Uo66hXvrN0DYxkr_@bjOP;D_>TU{$y`$zO06qpmbPx3gpn6KYYdXCQ&l};Dd{!QaeY-WrjIO zA)9Kg!?NO@{b&r&)5kVv){|M0)36zcyIKzyw-u>2V6VQrAme}`)2k+=*DiogzXsi` z?n7}4%fuxIl=Knw4N$%eRTl2JD5A4sQqfh3EYxzRANvsZb_JW|Lr9CaTcp4>JEh30 zXY@dM!wX7I?=bY}I(cZ%K;$xQA4`SjQzKT#8OtkXBxoI)BF-Qjwp{Z*O#8xMtz!r8 zJ9Pt32ouIF1((&YnPW@z=JjmMx=GeLimtR*U9nK!D8Kr?HZm@1TAZ8Q75;{M{qB zE?gh=E0(0YGGd$`!ZngYzn0n7izPx}+`By4PeSB7>{hFrmdl)FYkd3f%EO+FvP!bk zkEHp%8g#CB$pc_}M}W{hEndox3Y|TpJ2!iVQ(vfjM!+^IOooEV`yxrJ(%?v?4@hNU z#URx5Et*vjIXCSBu2Cmi1NgbSEltgrs%*aVggMgz z4;OnO+<0CsS<0(9QuOg?x3<0UXSwqunLQ(Lzk+5Sq1y`udmAZAGI;g+_6i^IiYNLb zNZW@ra32Q)&n5&FATj)Qk_a@F`P{pWl}!yD%K2n3M|n&emWt*J=~@O%$$@6ol>v?K zdLJF;uL7klsvg+m1|V%(mKC2)rnB}9@zex#R5vkI>cNA9V|{TDc`lF~}CywRZz zdm&9UEX7=vS>3NY>*YxaOr~=Jy5%T7h^<8{(hpLrJ9EqttLy7e zFJdi|js0Ph@Kw|azV0C6r`jV-hx>}B5!7n1{m%O%JU=$zfl64?@q(1${Q1XyT3$h3 z{4??r*3%w)Gw>fE29L@j1u+i?J5f~O;S>G$Xq%3jTI{aqT!p9b`dGslu(yygd#(rijwdBkqTcH32+R4a4| zgVEsv*PmBTDFX&Mxgop^fAcrxk#tazH9FXL!Pm$d5TWfjNZ7C($rhD6+z1vUPRJK{ z#};FO^=9|Me8&>;ct%UfT=WL4h4EE+Pjt5hhAsb6=LUP8 z&H}(#|BM|rZMFPTdCKlT!;NyovLN}Yy`D=@?Q}KvTR-TfeW8bU<8Qc7F3$L88jm(@KZ_c}jlV^j ze%ls3;F}Qh2t}pxglrirVWmNr=#KrRwCIbZm5fKoGx6TM!?W~WSx72nk%KLS8LNU% zx*%OUetP1N`~~Fkzv)+rWAy*-UCNuKa*jbA8;} z-c^sqoV+TtuEc?qk}b2;NqD;sh@dr)oi)(6%e0C1N}%#|g^)$0mxh=4;{H&@eG6M2 z+`_0t5z}%x@UMygg?nbFNxofG>8=^_{#RbeXIr~K$O|)DMLFJHK{aI@&`P zC@Sn`@E@p#fVbCJU~Q-E57&H1w&{eb1^wtt2sz>)V8daw99^|}umF}#A#2vD5{uys z{dl6?lz%iMW&L?VNU6kR*MsEF>4!eZL<_2T#lE~yB4|XJ{zkZ;1YJ)yLz1w2vC}+T z-Kw!$XCFFS9>_x=Muua8!?-!=aR^cb6CrIy{T1}bnQ78}d&+=q+6= zHQoY)`t0+i-*l}fV1)St(i*fvwS$N;rOJH`zV#%vlmI^WT#mU`9wDuOb z9{;s4|6lw2zmNv_;m2~cov(U-*K%rFId25_k_@VI7xF&MvO}msH*$+xL zxDF<~ki+XOA_RX=ak0=gn}Y)|fi()?ZwE#t{ObAw1is!(ct(FEy4bs?@1K6@Q@+$v z^URj1)qHoyY+IW*dErk85#03M#2jfqR&Sk3hi@6h1AwFObHD7OkDpjP=$6EX2Ei0I zRTfq(Dwqw_R9eo6EQQ&7u}lYE;N|j^$3wa6PMkJ^9BC6`upZ9B+<$6LfXKpNrB_>lygnvIy6(wv57TSD!Zc;=rfZwW5pi)AM}qU z`k2rnD#(Mx)H;rk)*4&Gs^vau-JXM_*kXc_qikVv$RdfPYmTqtwr8~lWeN*k zNt!eKIjhH3*@?&~?2v~@b8L@b;n2eX>wcny0IJMiMEgI`MpjJEmIeR#LOJow1mEK9 z#8s>+hSu_!haSG^1`4=hg1xoWZw)e;wF5nJ;~R1d3q}(Vd6g(EdtH{;^h?4q+ZL%w zA)HYnU?c-Qt+j2kmW+`mYo6Bn$|^p4$cyypUB$q^{Qj5TKG~)Sh(9sD{W`&UzXGO? zDehNz*=1`dU#-!!B+n1IORtn78H!luTc0m07yXLeve9EX*-zR@qg%7DZ#SIpYjyAYA{fwtb z);Od0WV&E|%t;K@V+upmzv&Z~!PaWpLqM}x)o_z|k|9{x%4w|-n-tbz8UH@#a730R z2wZ=9w!{|gVTK{r zY_y{wxL~~gBZN|IUWb5OpxZI_Cb`l3BCP`G)+$q53mC0jyd(q=LQB9am@nQRHn(Zv zfa`SJ$m7${Gy6je(`Cl1Pb2YNYBKRm{hMb`wt=6tB@?7yJ9eNj{d#3}){mX7nRS0R zWo7N6Wd~xyNsF_77u5w3U=VUuY-h(&8eLNaryqGsuftnJg4Qp|J`(N=ZyYSqH)UU7 zBA;%29d?>b=W)|DgYeA|t3_#;2sk%U<8+KK46k7T7A0}KcT*H~U-pZ6bXBx_SS3sB zIk9F9utvAkXQ!WdkyLz}IS}yr(5M0z@tjy;6<_&aD!tJnO&+0fuKy-X9tthCwi1Pf z3_#Su-{OSaIsHr+y1S?7GwaA2q(B&>s`yd~CQ^PK1*9^k$me_B3gN{fz-aemFl>`C z<0Q3;aRmZ$&%FAQA+Ot9JpPMnb~ESBt`RWb34Cre_DI+RJO3TGSa*wC!{7T)l$;FI zoK5xD9xZI@rEz|tkjI~C3fbjs{bmdv)PM^J{_NEhmH)RNQu=WZE?*~3HNMQ7J^s=Yo`7T@i=0i@zEdGV&nHy>cl}z~Tz60{oJ#rXlR-UXZ1g zb+8R9ivfH+Uw(SVhR1Anr@aVq!1QdZr|*nEr`L^Ie!OGZ;I7#ZeweN@Im$Bd2P1q0 z+bZB`e`>IU~+@HWcIHs`OOSY;Maa_wsz`IL`4C;@T zA>?Mfii4x8+$7i@;kVQvh?f)#yrKS)9o%G-2rq66DMK2ic>pe)aJe4b&Q94o*p7U= zZu65kvRqYx)E>Y&loS?XRe1Jd1CtsGSD`#C0YuMqVF%Xf`LSUK$(< zj2Am|q+A|zYAy!F%0rpnBCS9E+#63RRKrFzLs6m63ttIW3HcREFLjF7f}V`qM#Y_o z#O_Uhg6dC*b-{+_A_2DMt{XK3EUPyQC)yt&`V3G8a7BKEtBF?1gMeId52xoqNSk6h zFB7i_kKY}C!tM0U>BpYa0w3|uB*U-1VHfr<6lDEyi~svx|0@xuoM45{0GRcFQ&l?3q>i&`9hDe7$N%mI0#$fmVm~4NYwW^vY zt<}`9Tnq7;el=-R{fGMV-TLo1BV5K|t&!o6K#(G+gS`VEVs!JBE7Zezr1E{11~-O# zCrvn$`u%^D`SQ*9_c{Mb@S;jUn)*{X zu>w7$j#?@=scpBcfr$bXYr&K!w{kcOrML&k4wFDl?3p~$WI8(lX|%5*_Z+{zqGIKI zDWG9dMsc|7{3_*!AF(bM1KY=-!`Obpn8r+J3F2TDA>^njYhn- zYTlEP0WL?R!9#`3Tu59CDnS?sJSbtcLRcP^i*acY-=#KmdOf!l=L%y{$IMFZq4Ho@ zACHwYkL82r%ihKXzio6f-e)tg-G^u@?nA7~! z{_M0kB~$;^NI(VKH*|2btNqOQal$irMarzfprt zRQ>6fUJLL8zk)B2Uw%FQEOh}WPCCO3w=(e2QF(-?&T$xkRz8VaX-J-rGkq1jn3q)1 z&-shJ`3OTSm#KA&AbworX$VM$(?cql-e*nn4)dUYQY*bZf2tb@^!kgoH_MKHhF33W zF0f~5--I(AQoy;VPnH*!gbA9r#`ip6DRNqs4q{SjkfK7BhNA;y7B+%R(uDX`Opx$XBYL}so${)eVus{|e^w2df5rg_omR{G_-6kN6ET)@6~aY7W=P-1tnsC&DT46g z;#lj#41V+3^XEyMGKA<1nOf`GufzXdP$AItZ@@`x)IBXVF1&<0oHg8aB5^=PGh{{da0 z^UKaY4EdGcU3;nECq-gzeQjaB%qJJxGh8EVr`P;d!XSC>z@ck?O&9bE!;NoObMyM~ zd^*MJ*Bj5pou{fK=DY9&XCsBsV6e(htw*jaw0PgAR$w%vtE#th_Bq{6>k(Y7;fdfg z2E_3C8~MEXGL!KeT6qT<-Roo~@;r1aTY*Vl-Tp;5qSyRTj0bpnGQtXgRBpak??&nM z{J}b&sQ)JxiG%0EuJ|`8DH%83DAnTBE@gfcaAd6F!$1ag{R(tNd0NtvARzLAy{)!M z70RYoqgM7xizv;$4?=RDe5}0uVsNLdUMTdV^!lSL6eequZqGkop7-FUuR@cFGV;=i z16EGEz@VS`LcYvhIaiyRP-&D68lEhF7j<;R60Un_;`WHz+`aOO=LN-d(`NCF`#{;) zS9zfkC(5EM-HE;F3ypXX$q5&&X1=)jJs%|u_8MQSuWOEa&#f(tp)l-;-eFvlxLcA zyGaH}RK%7*WGIa`i^sx#g$?7p*}z;WnSrC?F1~$0#Jb6QE)-lfKUvmzg zFl_@~ih&TXi%{VXg?=DR0`)thG6)^nRV8?#Ix{a?4NxgQ3;9dLQ6sW__WXSeFT@CT2ce5Gq49HRa^hj2y|F+eB#qgG(VxS3x(}OwHJLnp z0Sc9t2${ycrKj)WV)g`COX~KYm$N_hqloz)Zzq%i$34;|6v&jr2yhY|LpOORo&`a8 z%bTk^825nd#mgdIEK3`bj#W>1)f1>;Sgbh_v3mjpwr>g_;16M=d`k9c{Dq$ZX(3Lt z7~ef|;8?$;{Kns)N@MB+lG_r39Ca{74URwX*Pd*W^WkiS!oxjVTKA-?-23YlALD%a zZ&ao|@_^4G2#~8MO%cP4QZIOM{%qy`+TJ=F>W%3~3B{keC_m;GVr;0St^Bu^t@;4f zFxl7oH}GChuhWX<@A47%-~#=#{|sY>0)=^3(^#*J-!{~YC^~%IfJq4FUnT70^-Y)+!luKmEESwr?*sA{4Hqoo4GFEnE|dtTD;^SwIYO_Qn8E8I&g!YIRtRg_zoc!h`Oww^*rS_3b|c>b z|BlK9qZVkh_EY%Wi@L)2)| z@r?+%H#gpDnF(y8tdVRqzebd0G5V)STMqXKf#-CDT^I)NomPF2lM5H_UEkbEt{vt= z9cWHMCXt1w1;F%$kYFk~Zh0d_J`VAKH;X_UM`H7bBnZ4|Gp=~Wz;F8G!g>BBY4(5i zZ?*P|Kln6W0526Awp{x@dl6Ttqw~!8dH|ZEt+);$AC0He!MtT7J5$b|@lr#)N@&hA zlNmjGBi2)h#)tErRDQ$_B;tu!Rzerr!R@#|a51~e7w^=`;VxBf$5-z=Y+XLox^JID zEbh$~VABOAv}=}n;;AL#7~6rByTC=VINL=xye}yB-|6O)NA!-LQrQuNlWPKM{mZa@ zdt}7svA-6@tn8UDfk%HfFyo5Tj1;QLr>kCKA4dRaOyfOliY=csi@zCwpc;3o+|E1r zi*hbFZ*TGOYq0xdH1gg*!RYKkXrmr{#8ROiD^0g*u5;S}nPqUgU8c3gWNjNZ7NTo$ z5alNMOL1=ZqV9ho=3@*Hf80C7$|(sU6m_-Q8<}?*Z0#?rSMy_)Ygil3v@s% z(p}*{*1=khMO*DPtRhq7QgaDZ^xqW}ByyL`nE;Ro z3+-n4a!2Lj&z~?z5LDZ@8gv3K&HKl4Y~J@uLBGo*+A-#ftS@uiD3N8Q+Ge;BWnG>fh6t zwTGK)cMUBN4tPIf#~{A~5{(%++KOYW`?&RLNfNCu&iZle+w^+snGH}ONNN7!2Eio? z^L+{?QHT6HpI=;9j<2J?Z&LC;_-J;|7I>VTzvopHijHdt7uKS(NaKlL^iSOEHR)*-Us};LeL~+dVKq-a^|5Ns*>0+5TZK$34sN){vJA}dIW^v z`Cxbv2WF-n)1Ufsd)09HDPQ^AUx$-IfnZD9O35_dKx=~^)157Ha3oRHw%E6$cDin~ z%)SgiNKIYI`%Y{gC(XuDbC`tn0@X_D>J5z2c7$7F9~1kt@$ z9xjK720?0ss`3n~>9Kj#gs?ici;=&&-fEGXJM#v`MxIoX-{skaXNDV+O;20qY0aFI z!ubi&*iZ})Fp$40!rw8FBa!3)p=uIT#7)yQ!!69dv>3?36ShS(MB*sYOIgR!Tw1v_ z>v{KI#WW{)>=mN=E`0kXJqKsdAHK}pv_*9E2QCug>rZ2{gpIFgTW`rStgL#k|4gn1 zaZu5M*CdW-k1k&V< zYFj8L7Ee5twOP*)E9B&;b(b*WNGfk5C@BXsWnn(2B}Ry_oL?hf0_EU_{TZh~#Y zONu*)9MYxUqIcjBWTVJT{aR$&Tb$mU=*ra7V!#ixbUY765g3w7o~1CbyxlB$qEZ++a@L zgfh}!(Ss>vHnK*7nOw6pUZ!M&SWU-H5@-RQ$%R|1xxAZ9u*O@D=xFVgCVif4>J)$; z9TB(Az$MLnseKQlTs@j{)Juh)H~u08eMVj>0fDiWXv%B(effNeY}JHOd?EZn^%klL zcfWgp^XunOdIY2-gmec*jfV&L$!|ah|9weXn`s(N%v0075(MYqor%WC9q`_BEs}(4 zNKB{bNC~VZ(jMmvscw&EioBbDo-YY)S}kF{T`HG-K?$$>&gpljA6JT;0H6}eA9}MgKV9DVUhL$TC!(g0tIGKdAJsp4&6oV?nA1ZHdCFX+}}f}mWwO7_?x&`j|T z$Y5$h9X`j+#-kV<*OHs2(T)K4sCcj3_6@kF2pdK$3AL3F5Ls$lL10akZ`1-ruuNuV zqjF*5KYC4iTqTH&4*^!gXKIpjXZ-Q_E6?DKhk=Uu9oTJ4z*%ZX`%vM_h_jF5N}2Vp zt(41)eo((di~3L3%Wll&-rc4YF;5dkv()_+ylmqgT+AnC(1 z{8~4Vzim~fTxNA9w|SZK=rFizS`OK=GwlWK zY3w8=>7{-;KJ2CXQF+Ke-YVCv#v~7&Z!chk7kU;24}P_Y(TJ^cXLlIxM-LNiG43N- z^Yq{i?Lie0*5k+iIx;MHpPQ6TSd2KOACGHi_PeKdy^=st7YL}W2jbzv0;#E$wRjEI zhfKs)Tr9)`qPSfK4s}1A67SNz|3rerFK{sTqO287u%RXQ4j~ML0U}mXoS3<)#MDYtH9y{1;ZBH0g>qfm zJ|v3XjBkrtia=K?XUh$0VL`f8CR;Geq@}#cbLD9t4Nb?tzEqy|kGC@W%o#j=JI@Bq zjl$S@j8gM6maxomHC(!$8nRUJBvebL>!Z@|ZA8*znIk0Rb+08H+f4;nSNM^a%pAV@ z^>jC2fk3R9a<7F-zK5}o@LGznA#3x@X7x}S!(Fend|NWfv3y))T@I24l)o8&l&5kX zZA088QRrxZ5y>Yko3Lgvyto+oO(q$`Ug7lYMlIVsD%wjxMkMZYbhp6WUJ;g%z*_SX zW!_@lpsLEZLIAWMT2#rsH9uiM{3Iu$%0;0}BQwr9!st9A6cH}Nl>t`lQNv2N#-y36=u72oSPbrcM3jE~uBDp`Q^ivecnNdVs%d>lu~oS9)tk?ip{?ZS zpdvjLq(9OU!dZrotqXMtfQvtgLS5RXGhxIeb#IU2&tS~BPl#&gD6iCV2+L3h1Uuk% z4;6^lU$!=)dT3pR35pZ?BX#Yz4>$3&yfM@emYcrV0=HX@aj&>;cF}m;uaZ>_fS#`P zUa_usR0tcb`l?}%?Ll_PlRn&*LD7g?MNreC=iTpreR{|1t#Q(ruUdv&{la_1BR-Uw z@v+Yrib!Sq;MP8ZHRnr<`ExZX>(AgQ4X&JoD?|+xp#i^DA(rWxJQ3hp@V9Wcf=m%@ z%)yr()v8P#;K~8se9=mo_EL)hLMfO=6;`iXA+$Ezuu6r^$$=j~t>Xy7r+tvD^gF6A ziY)``AKv!Qe_kHX5fDg?8&BCj>WwUy(~G6=Q#A$J^;%I{%fG>%;x0v%H5WSWa}xx` z-j-_Lj!W^u@`Obb-xf}yL<3Uiv=RLdCDwDk3x9~wtMEGuhf;&JD#D|x4#X@C1p<7< zmjQ&pf@@dk$}W(7I%i~BEa!VbMdarM_8q~H+L(S{N@qFeOvB<$0rQF$c_kyPry zeIEWnZ&JPQ-9_v`8NpJJ+71kOd^N7NlzYB=O*kKfF6c^RLh_~)ZWC`1#^H3XL?zbU zj7yEFd|R@@XCBig@RAT>v`xUo9+15TST2G&9aoCa14jmgP?FQ#;rt}CG)RS6TRSh5 zXnFfWiGcP0RbI5n;VS}uvZj!V>#D)9E&LYNvK+)Lcd!XPV=&Dx`N%UtDwwoo(Urw8 zO0rY;_fLiwfI`9X+g4j@qN{>K&KA~v%<|na%ILQ~HLzGDj~PYt*TToesU>RE#8`E~ z>_#+>ur<@d3-&CxZ*;+zN;%&sq9$qomY@3U#ijzjw^NaYxykoFgcj^H2%gB`CQsrT z$*gSVOipq)&OGVPTYdl=Fxd0RwEp44_z;m$B?zOKf=_~~k@iNfglSJct8&of-**M_ zZj=MjsUuZ$_#H&k`FnOK>-GCk<<7>^S6Al!wC{FyQ4upFAM3gGaCJUuy~!Q&*7%F~ z>|GTdy=yxNJ5wDPIv^lF1b^CGiOdu6)5Fccwy&18MN1e(e-V?0vmrjfmYzeNPX-$Ki5nYqpNES~gt{6#1cUjAwP zJMUC&S-Pkm+OZg1i{mO&p$EZ4d?kFlpf+;&FnT1=y0C;3x|1*E=YI*OEx4?tL%Yq}Tc= zid$&}fCfQ<)Qizm!#UgVvlJX19z~Os%1-w$l$hsa`pUnDQtE>BiylN>9wGNVNX_m( zlwx(Jy#JG11IDq&akzL;Bsf+Gv=K{W4hIhUHd!2gK!@ggFJ7t|;`9?sAXEg{Vsvv2 z8&Zj~Bd+~`L*sU14rQcfT77nA*E~cV{O{!FQy-W8>1LW7Vqc&auP&_EG4OkIxcNAj z8u(G;dl74ZP-uA4M4yQ1u0=7S?Dt#wVMX^mBCRno?qj-eB@B6d`=t{2r6UZX4&~=% z;b{IN7+Sw%QlCNX?U%1mQvyKT*AF=JIck+IuJwfQ%S)dGW8wDjKzVPTEqKb8gp*~7GQ8+M5dY`h-orvH{mJqresL*Rba{v6}UZ>}?stWu8nP`s^X9$o%&Jaa2{&pNEQ^|B9PB{(* zc-Hg7Q>5|o)g@v+6=h{onf8 z`hE|bWZF&a5%T%Dx>u?*hnk7|dL*YiqEe_gB`qp6g*MOL0z`VMiAF&DvEkW|Z$G9H z=fZO*Zw|5_!4|kJFRZA}b{pf7Bp%v1!J*SA!G#!brbRPG7L6Fq;@tjEngM-xJ5 z!Td+9K*N!G2^#XvaQMh5fJ2Dn`en-O9C#V^WZR)#b|!pP72 zcwVjWa`jR6g3!qCZuR}D=ypAUD{z2f8icyIOT7LrfuqwufB$Pwm_{dYTj7Rfkeg&j z#G6%7dc4<`n~)1r1-aJ-;AE74=MMdCc_Wt{k3$s$J|`$r$=E?2`8DLd3nX}K8&^ss zc zlC?$zZa~}NNO)VZ!GIFv%#^GIXyvCULe?6MoNeki{+#1f^6&nr)A|MRA9~v`N*Bw@ zdMb1RNALjQhd69Q(qUz1O6eaGxUORK@MaO`<8Z&AEDCLn)kd=21HpR1I-p)Pph2>R zH=~3|-YXS=wN%n`BXDvZ%B484m1-pnatsN0T>Sm(3h`FLtDFhOaaZo>H)U6coIN#Z zgBTNGqBl!(4-$y!sE9G5y&(;W3H9NpFP8P5C&0Pg-VGJ8p%_Z~t$2#xcu)$~6K9S)Si1aGZ<`p% z7U{U4hx(By`mA^bb~3#(Lth=wNwKEw{p%hQwWN{N2^sOBy{?IVO4!KFfS$d6uHXi5zPMBtFq3TOO*zjb<=WIoI1F=2{h zQs1<&JX{|1k8@?yKR%PxBZ~xDLd|1cHIZ>b0a5a)z$6*IsFlOFmvmAAd=kpZ_p;Y` zMbJS{u zWH(;$4|-P-Jd?yUScWaqu&hkgjiHUjm_n2%O^It)kZYFK@*yJWco}K-(Bu@rPBR zcYPAq8x6%3K(*vB97(K~4X)KFX~Qq+Qt%;aVy9SJolbpBBo3}@-PG0%eJNM32@#OS zG}z$sJ!+8qpvrWq^IfTnu8deaMcO5;V)&ok6^E{R@K#yXs2T%3kh}ASh?hL z5*i!kzQ2!2y$O|?Cd3~rT-ZcKs!vGQ8wrN;n;$A0?go?pe zB5r^&f=Gqt->0%;-_*&?GJe%D;U{UCpSQOzz~Ok>N@V$Fr-L zq=|{0=T`~s8rudtv>PD-+!nn2J-#Mo72eLr|)(%4z zxL66NpNchlV>@ng)Op~};IoTc9T)*Y884*n_=VnLz9brX7%)6kG1ZYdrsv^;U56_$ zSC#@=VhV_?4;>PH%sr2i9B%m(lhF7*|A(9sW*EPbF09)MQ26uA!u5488Adn_KJ6mnDp1tCFdtWu7Uo zP|MlWcM+0_2#&x($!7#^rVACvqy9nZV*WM0?MD&s?uucYPttwY=^f~_sBw+!cz0fd9t6ZTggV#&g|t2OM~H}k`z z!z*mk@TeZ5eB;y##8R`pW!IrjbL`;PZn!%=Ve;JMxVb#(=oI$#0t_iW?Cecsf~QIO zH23!bwow@WY6yU{K8%RRnMa>2kNoUWFCvEZ7I*i>iwkL7sZ&7Q8JHmOgFu-QUr9Rf zBJrJ-<(RK<0IaP3SH}O@V!$a>d?BpO>6N8bwNm#Krg7waTIQ0qa{=Y&w*W|Pe-3|; zK5zrSMWY~j z-cv-M#i#Oj{@x+?W5nB0cuk_IM%RaKjVEEC@|z;LdMu|qSgq9o)e$Ghk?X*Ln!C`^|v*&UBX-*5ohF`vU~i$o2}x$UYlwYHBo8Z zf8$f&dhSbAK^!9v$N2kgr7+*I+*E4(7$v2-YSEol*fpxeoDa8S{ofz+KVi+O0|HagULS&i->@qT~xu7+EiUYYwA z;{Ij%Z+Z2DmHbN|@kDpfRXTPai8434!w5S<0u8Ahx2ylII7m7D#xKXWp%@wlG*rfY zR4JkistD9Ph0lkmE^31Ze*;Gh2l-nI{>DjVmsO>Kca&Tta9OV)m(YaKfYKV^>Z*@1OxMP>L5_VT;qNY)e)fa{Qhb)9Y&q?NmgoKRQn_%;hNZ6a`%Z<&rl9_y!IDa~s+o{S z+a)5yJHy|IYP{J=q$N+LJmPA=gr(5ILCQ}p6}Zq)hbFpy^_eo=_r+1c75KU7Wu6>! zweEy{2^xd?KdQ$)#TkqcZ^>*K`p3l*-btT#%&z+VoX@xz(3iB!fiWbPg1aJgifSn+ zY4$o`DygYO&J~NC`=>m!ebcfszI~(Ycq<9{0|KC4#o{$l@XB;;ABNFh>rj+^_#>6e z@1MSv^@O4+h>HYnjI$4UD45V6mFx~sp`NG|9L;>k=_lp;UwYB^#RmwOnLd7S9=kvaQ`f5g%?UKm}1p2 zNh#Iv$cg2k(3(xaUbqtX3l?sZbmr3VjBu9t7O(wfP&l*N2>mQtQV+OE8~YXfBanrZ9oX);8C^ZbMvF}Q~yYh4?mD{#r=q@!IhU?2_F1sJ&3&_`V!7j3V3sT;79R_H(ZSFTVe^caL6$pjfU)ANCr)oe&@X zFTQIh`t}ERroRt+G8wXP9Lp8Tqp$e5%=OB%{&{_&tXk+qLXGj1)Emnuss$c7cqdCv zf3L~l2-XSf$mqSMk^&$CF;E(%u|k(PiDD(jPbLO&6cw$+Z6;h7+gmEC_r9r^aTpe& z`bY$;Ji+ln26j{iLtDuApdxO8ycEO(*ZlLv zG9TUCX@TL%RVc!7&^?OAp~LJ(Oj!#b3I`+>e-D5@R3(>ZXM@^wWv5J$$c!+&k_+Uo zIBJc@tdPzmvhxq|>)TumT_$&Rko}>>oTF){H9&ZoisBHzCm$|9vxLCKcm8F+ct$R! z=eXW|v&HwN^12u0;U~)j-XIx=*(Z^^E&8izQEsLzU#U)}50t2_aJ>oHS?G@N#g0eE znJ?0ri;^QXM`W@d!TRO0l3&$ly@V!d^w6KWEGMP&{-t*QkT)rF9S#*a-wJ3}P(RF@ z$IDND>O~+ginkPHEjy)c)C6pR;CzmQzzRz2C>n386o6WTAs9NKVV)?a93(Ah4v(yN zFiebCe%s+TMhpcvej6$C8c)Nt`#d78y;TVGXT8cBxWv@qMHB%$>Dqm15Og3+XtpnE z)(oy*DHi`uK5)gA11t9)S5!z-48_^!oAVQt)B+(_B+UbXXkZtf^w`{c|5dbZ|MfJ$ z4{IyLgdn7|bWODohiK@ys>;r|6)tAo&1J}u^GJ)%5|1arR`f2LJ}LPlUl*i8CQ(paw859#etmF;J^5 zay~CAVRvz`*SfUm3N_ui=+q~n9(}kM%He_K;@NC81dm&2*L&}k(|7zJ#F){B8H>9$ z3eTdVJhCr}0xH?#PB(msD1FyxLwMjga*X$j!oNP%z&*TWvV~stxrN2D)Qc(pX8Zx~ ztaP`ZS=a;vhRR)yNWK2#(N`8hPR-dupQlz>>A^09TT%6X8X?VydTy7?elQ_iV#aU!{j|Ts^f^11PzdQqfQXq*g*?M!E*<0iDIHQk{K7I5!V( z9-#D<%Ea38=eQm_`nWLmC1KZx@ROI_=1y3Ebn0X;O}&3kwCI6dhPicqcHJ*m))qkI zHlzm%#?yW~ybnUjq-#BA(cGWO7iY^8-ktGnbcbU184_beL{TFQ*XLfE7+rq)-Vw&+ zarlQz{ql@gdIPd^Kd7(hy*zd+^1*s2}`IKgA#3Q;5>N_Qw+qbPIxdI=j@AZ1$q#&iJ^y9F?pKbbXm>j+GdWI2%=+arRPeJtyfmOaYS^Q zLJK99z9fI8E~frQ>MN+5e%rfo=E)Z+FZahEdaOM3mV#OOLpkM;I2ar$dHHHKII3WP^B2%A_~ z?S_(E?Du|6{5CJKcfbGTDYPx`jz2v8wjC(3ZqW!d_QJ(|;tvx$JPmem-oH}17`S(` zcPNIAo3BVZjL7=WyFSd1mnZz=)9EZzReKoLJk4+-p(QjeK&8WcU{@^uHI5O4P)+Rm z2Qk`)XsJzko&&b;MFklhjOYFxYJDH%9yW9Wegx3PohAU4g&s4)n+rvlALQ0X>8cje z;@G2&m0>T_C_{|QD4p=fqE2z$GRQI!`=NY?wYJ!GAvMu`b_tzn~oboFO z5YaSV$FBF#pHg0B1#xP~y)Y~<2$7VpyADI2B_S_@{#Mh3=-u(BqCo=|HU3?E?n9ci z!ZcXRFTF53C#WM1Y7jimmjDP8g$j5IZFm8()v;>SW(Rb|O=(t?D+;>R zJZQwS_-?*Bv5bEYh9gWfs^%mLvuZ|`pW51n(L*%;ty6%+H)e-=whv$Rbg0C+(}aI^ zw{Hph5l60^d)h#4tc)7+rV_t_C67RrKi>FLqS?a!o-_kFww+96->7qG8> zHkG==ATGhZiEIK0ct|T_B1EFYity@VRR*2gn!q;qWx}8#2=yn-`w2lAKXrWj`Lg7% zIod4&opHGw&vU*u*9E;Kv*_maNdU;&QH4qLG7IXh6y2B zfIK8%S4j_CZ$d$PEYjBi61O93L}$sDAM)P+ZTYdkd6$7kEeZxygY(21Kf20ZSh8z4 zE|!Z|ibX_NgUjQoC#|Sh1Yl2;HPb)U#}WuY^^o2r&G%(Z_I=@RCIe1P#IHc5;T>Dg z62R?<3%Y>3joQJMu)c&MDs_sVJIUEq(5^S0N4-UVK+YX0?flkS(MT=xhxGjpvuOrB zf@$G>Jppd$QFZi@UN$07<+}eQPoMjB843@Ok($W__i3nf^K(I)u#NT{id zxe_dgicu(y08&+8q4{V%BJ(D=g>hqfjVyg=EsQnnst7;A$ zaHxC4O%d&v&sjBK6~Pn?09I8-m|ABK$8%n8N+p^`YTXyz0m1*sGu`oIg0#X`&60ZSDA zm)+ju(UMX{rN2Ko5FXuwn=6-RJe+6C#o2xYf7s2iq42C4KaM1>yeK8cES|(b*tS?u zZsY^EjTCej&h>=8jIW&$jTRlE^y$DmRq`nw4C*M3w(Y%&i_2-iw~ZV^6DraAXZ zk0<$ajC^_%CSjA+;~D%tWc#w0RmX zJ;?R^&6l?~g!s*AxldI3v3=}tpJPz$_x%qnyG3M)Sk~H#r;CU22PKm9pHICWy}c5^ zxxwV0;vc-5FC^S-D+9i_;IQ)f#0MyW7wt|k9fNWTTIjV3+V|L43d-*+oQO8r^TT7; z7<4)73N&6HVP>|(TFRGUY6oO=wKNDC)Xgve5AwgkN7?5>t_r|fZfYrY76O0Imi3v-{VbUTS{3R;x7n z+sD?sN@_`h%4O=T_^^jaehg8=$}V%egDa8Rq>CE}!bt8C1_59ZqR&M~;TxBLhgW4i zF)hUD2H6LI7$|;`H;03ThwKmXY?NSFv)6&$#MWM>=OQP#-XoqkX92&)B_ci{_=c+T zk#A^;6y#uqdDb=c0hIX92<{NkIpXqA`ZP@9(XMcD7bN(s0wltMD8l@dzRV z0z`PyF{LL<-BLe_<9Cze=a);fij_`#{9I@hUwOEY(u8|VJ~LrE$n9|?a)h-#in~Vu z7Zs*jT}Z_@Z=Gq16trQ0B&|B;ky@R9h4KBni~CC8?Q-R9Z$&Oa(V*I`QI^14yim6M zYv1VB$xKw%+kSdjh&}p0{M$_mG!GkGWCw*rCMr7e^ln8*er{Q1mYDPcUHLbvT?QgR zmcr|GfvB@(XR*W)d#z?Py~G09jSl<~?wjy47qC@Bp{>orz_JpT;aMWN5BbScV6fPB zq~8+Lujs4F5aj196YtAg9QMMSO+E5ZwM9vBa*1^c(~nGlWXuu!E881~13y`I$==`= z`pE*+W!@T*e}!9rKf5(6IkTX&TJ8%q`ogs2cUB}81=IdcwCf*1^2KpgwyNXQJ^jFv zf%lA^$q_-VCR#SxUuF=~G(=D_x zVFW2$OwEHZ0c)v?F4Z3#lCP?zd|zu*s!%y7fACriHb@1dsv~SpYq($;gXE$sM%s?2 zD`UXmQ1DswMHp>wcW~7QoK0V}o#cv`^hW}1+|0^N& zXd!Mnc4fE>Rjr=ILN^5n!Ry=f zS8!ZU_V(-fwh$?l?zef8IMd*izV??A?734y)j#scOEe-26xjLl#FO5p`7+n=$eiIk zoPXRa0e~rudK4(QyNnwzK(aHtwsvvV3xgdtFj)C;CSrvW^GO zldLfl?mKdGVM<6eqH{y8`hw&KQTF61M1QIL$m)15NmYMQ&b;yRL!LS}a>Ok*%T9#Q z56WKL6kz$j|7EPlPT7f&!c0vwXIr2?*wh`U)P24XIqv3zVPt@IX=YlUCM@BvyQJ=N zULukU%;CCck1;sF%Dpe{4qL>baNwLnucW%F-GuH9edy<^&PYaV97OnC;y$W{;ct2X zB8z$6FJvh;afL-((hLmB!>Vv@54Y|@CJ?!@W)}KBg%Rvb}`&?OF zu!XZhu!U2#&)v{j?YXs}!T__i0gio4`1`^^;Rsb?d~QuS#pF~m;MYgn%48fx@_^niC9qgmai-(4Q^= zD1WXTMCtkMnqH;|gpM#Yz^=XV{6Jl6D31?3C6O2Qd}08`Em}?Y>q3H;uyS0hAPutvsklNDD>asrO-iW# z$D6*e4=fi;k5+b-)FdQ?th&H2atuEAP7Zd+yzK@y2Wc*=z4hq}{ySm?7yF+*v4&OB zD|MkJHDEeig@2?T{5-QTGCFt>$CLB0d)@B1~ zt{$(;3*JyJ`U(+hu)#AkX++OVgS~xb)|x=QBp~qs@$M(%mSWjO%z*zaMP>lh z;71S`516z!4j7mxFFGTt^}EMm2)=^4kFp!}RaWB_X6=Jb3V}L=l1Wke-YDSU(zquVMm3SR5cKt%V}_Oi%PsL8^8^e_aX2X6e>icq_P)MO?7fg9hrR z79s=sQy(=cSe%HgE~9FL%s2pm)V6G!5(LDkygE1v6gvwk^8mEt_ohgzmKTxi29OL*r2T51Utbk{c*y|xH^CklrX*tqCmUD6&w!ggM{-v zBz(63#-MJ0;ce$$7&NK=b=9Rbat#JsF_&CFSm;B440$@CIfvxh#B?KNOG{;G$@d^; zub};t&9yO%5WFobB>dMbUcE5en?cw0y_|Jh>jp%QlFR06%EY80PXFW+N?^IaR^~j- z6*ZHyI$TT20#x3$5Me@BdmA~`UYVZG$7*FO3O2RwJf$pno$|b5CXwpr(nNJJFuxxQ z51SV%7!H2;8e|DRA_3)5cb#Ax`{3NUCTUA0;8K)jUZP<2c}q|Y2M&f2=BTSAiiNZV z0m0r>Z`~$(Tl3^3ZMWMm+7Nx>Re$m5HK^nNe*C@%NVY`;_kMW~;NutLfWae3K897ZPhNUrb^n1hPJ_6I;W6aQ{=O_Fy^9q+2ltp!UsOZ8$tyoO3( zkMedRcDUdEV6EWFCv$+2J~Cx7ulGb=_Ex-ncl_sa`X_%^?NStAb~zdq5?jL*9?kjk za1=sMb7m}im&+~x_*}W*@!X?p*#}S9{CqZbGxZ5rf2`?sn{O_7&2^@4*vqqT}u{TyZmP^Hos+IhiE!? zUAiihw7o#7pZas$VyFtjgfTrX#I}Cnz0)s&hoa&^gJ)?jlKGvalmC;`1?$NF-}jSQ z)!!<;@<-3)GPG?i=>{U-l9?)X3Ui8uj`KKdG<%;6z86To-1R1OdASU)g_R`GOPGnE z6ctRcgrIl?6RwP`o_fdiA?6LQFxl$pzp+6afCWwX$HceCw6qj5PX!k8yb2zQPmvkQ ziaM=)3;qn3!rPR1=Y-ClDQEphSucl~kq4jkjjq=!!kKQHa30QITv#03#HaTh$vuMT z=IPv=9#e09eQiN(TC@GU>Qpc&jdcjCM;wG%Yv>i z`Ll8Mnw}@<1VV&j2ld1!A@@RUI?TtG0&HsqTBdTN`6RV0iy(b0zuZODqOgQ+WAlbV z*Y)A*jG#|_-y#~nQSp&Z2JC_S|4;cqa_IQLZP-j)Dl)t+h)ti#%lu3 zbRQB!ZY)jASP7q3Upycb^LjaDiH^d_9N%6p&-roF5E*&T$ceTW!ye$?BXhj zg(i9Xt>2>5E|IJ-+Ikl$fLvMg+Kg}0eD)yVifsG-b>fg@5B&wO3V?XrYO)kG%$86M z8bf>r2F|h)chqXRyynj<6iI42POt!|gV;74+^t!!(0`9*VY|Z{I|PU0y(5`Q@XiC8 z{Xq|1=A2=;4MgWCGN*Lsn6}u=Ef8?JA9<*RHBNBsV?eH6!&0wNS(jU^Ni{jbm^{t(ay;?Zqg+}7cs15@S04^*5? zaG;H9_ekkd`)?s+Z9by}#JXo~cAoA(_z%{7Pv?)%r9}-mDvSkuqm0FmZ=Wkqd6C5K za&#+)qeLHS7REQfL;2dwC@IJiz8e7XK>>`pB_4IZA14J{-K$cI5-TaAH)59NRtI7Pbw}}?P~u*kOH77dFx{1fkaV>Q4UD{UARzKWr^Tl6Pj5PnU^$Y7Z{Un8!spg zWWuw-rHmR5VtNW0`E4&hQi5H139A&8nqnUQGv_;U_D7(0!g|Jn9PDm#C{;R@r(T`c zi4h(~)ifal>{q7hiLnw8Sq6jLky<>pcR!64>wV+>R^b+xO4HGl61Crz(>oS3jOE;u zA=?lE8*x8SL-T8^mKqJ?%P=^O8u3t37Ihe?`g|t!bKI8eqx{Xvs(Njfh^jz%@nHGe zmwYtmvnGoSbR>nZDIO74C7=r;(p?pKq*LYcN(~`dUdM~Imr8ZBY^~J{81*T&0zCjt zS4X6Fd>;2m@{XD&6IAW}7OXibW@59R$n0KCLvJUum>$jD#5Ts=cxA9VxRGQb9Jm`^ zZLZKPF<0(rX&6WZ`3`g>KDz3?maJWAxg_NIedR+YXiWv3l3Px5etvhf!P4uyNQX2e z#vjVZ{l0ImN0nKCvB^d45t9bTNvd&lC0$sS z>;qj@AC7pm^{7sqlrsx)~Pbq;+bDqXOJ>9Tv$ zsK5z(`2FC^4h-YP+)=9VRr^8J_<4^I=m%ogz8}Q*4j~0#VDbWjfJ@kbO^} z&sM0*8U3w=(LqoPy@!SiP_c1NJaZ=d;IU{v=qq*z&J{ z`{brP5R@idf~epy#&qHq0Zwd^pWrs9jWQ$L*!#y;%1()9k zSVsOP1-m(kVd%dG0Q?9@ZKDrr z-x0(;rGN&En47U*92C$a`Hh4vv6p@J$T8Ap4sm0$aGun}91*0N zrmHXhoZJ24Pa4iwLg!)PUk4MPU%f!muVUamV2H{n?7?NuuTm^A?p&F#SHgQ&#|oKb z)u704g|BM1r{-vM2qAS?3q|5x03+62f zl8kJ^JR{0fO_Qt#@OwEaoT^Cl&1_Wf&3m_mTB|(KR&+9ciHrd-CtZ{>houH$)UUzU zyh*Sn>>RPy5-qq@hv08QFmZUVR$nhdWVIOn&kZQ#z0>zqH-7i@i_{4qS2s2Sn+8BB zX=@M4B_COQ8GvUV5;$#w38}6~Hur+}g8Ln;+y~z8r=BWL-4qi1M%D^BDE_ZDJzWuj z0`5>uSSYAA**&>|_Vx4T%KEB*#@RTIICj9bD9qCT;u}C4)J6ziSJeOv8$>*|*lQpo z{O92<4L|qtX%hIYhw}`R4;{*|L23MEeAk~QQ-;3uGS*r^zrYbWiA!W3t+S7wv0+0< ze-fPbxD~UY{rV`ZQZc5<%5CE)NdK4RRyJRPHEP{MQ33Qx*jeI=5)|TN4+lKyX868x zxd2po$JbRqcO?{UO^cOhPqx#j<0z|$F}8TZ45GD&C-j%tt9S>{9GSQJPndIia6sT- zVoG|}a~vh1V}d^HFW{`-S2x#p{Rv%=3_b%`;FX1xUI@mYhs$|yXRQGzcHq}?S=g^8 zCPd#t(5Zu#FML)pRzD8Iwrf1)Wv(1mFXdIS17*m4PBAHF7(wC&TkMyq+;#VE@UA>r z9{bs&z8797JwF8+crY%0rC8A@Y-CyAD;XJ;ojE=28G?r=ZGRMD%%#;5wgs46;tvTO zC8S1qeWm z;-t$w-x_-0)Z!6i_DsjO|E@gdH53cKVqunL6nw{`#PuBM6y5Ik84?v`27Y{W5(@Va zz*|#aw;82oQQt~X1y@t7GjSNf?fnWwjBnWXR{RY8BUYYwY=OxSRt{NKO}g@4)K94Z0Sak2bImX8?`i~VXe)uE)87`PG)OSmEq!YcJU>N>Rr9R?P0Umu2 zI!;OjrESt3-_b!hdX)!{*|&E(b` z{r_o|r?u{JXqBs3S!<@`xiqN5UZFX1vC|B}tC^viI6R zG4+6VudE9D?X7ySoV9!tG#N;hTey(!K}6F8`{mgCZ|Wzx>lavKCt+SqLY7%JstziB zKZB&7Sfaz7LoMUD)qP@TeB%y-jd8dOwlBK!#pT7U)@i10ZL9Se$ z@ofk!bIwHYt|f~mMBo9sLrNIjv!5|#ER3Ib}M5zp)Ru%Kwq)CHUiL_3azW8a^H?W%;#6LJZh_Tr;D!*`ywtZ#5lr;mkIH% zd)p|9yH9VmDDtzA|Fz57fuVfPRUroHZ7%8*R&lvNTMJFR1Q2Q4j_?NPE4FM{0hj6@VkR%s=IlwkTfc<2Da$g ziz~#rtwx?Mm%SR82eBGAFy=CVqEtYb^)&=XLmrP^@Bys+kO*?nD7KRZQK%*YT&swY z=R#Gsz!W(b#x7iHETNLKWhWPOW0Yl$CmYo0gjsWy3^w6ezm*UEszFZOg-uwsPxMAj zZ17$B0*&AC3IJd53UFD)1%yWzelW~faXgqs78}AG7iFe6MmM}B{EX+GT!1wR4=~Sv zSD+3(97?)QcQb5W#{2OYkPOlZvG~VtSpoCx-la0tTk|ij z))Fv0GtcGmawhV5FH>gabv%%s_nWY`!X!`#FO0NvXUk8$*g&-zQ7Ad?8SFs+Q2~xJ z{?fx3=oV-V4Xog$ST*c6mN=ggT``ypt0O(6a}dz$>PdoN0()Q4(|Q^_rI(*@D}}aeJbZT z0$xL$NH5&;7;Jq0$BnP^R|`y1Du_B&@4)ktjo4%kynqtODBf3COAUA;GRStw`Xqbl zSqP)v1<9q8!B&NyIO1yCiuPj_{ny*yr~gz%79e+(biodlnbyuDlpf%(LZh{Gs#ZjW^DCLobyZ-e)3C5!2_J8srJ?Q&Vpx-yDtS z$0`psXkIOrLDK;Qy9?!$9&~>pArQn(FuK{YQNy5G{jxM4Spn=S8os9)qI6+3628i& zgK&6vAy;cnh&w+zeRD#=X0fYvbqscM+oeK4Edj!IR4_sXVeSFwaiFqh?tt=PcJ2YR zZL+Sj6fAYi;N4#CUZxv+Sy4)_t;lY3!8S{_+LPhRHRa7wp1ZLy=gV=!l8s^ax4#Vz{Q%4jPm>HNSA=Eq^1v>pi0;p~C+9`UF*DESo}3&i#O zd?OD1HP>EaQa;T{`mQJBN)FH;Jh#3IfCJpi zxo^0L7l?0omIU_7(vh*oxRq9T7N=9#B+cpVY05Ud`_h+6-> zFzQxe>^ugJ>c5JxHhtJ_LSA$)^71y8GUwBRNu~guF&so|0rIuq$%1>A%k*ta1WG<8 zIbRlMV=Z1T4+c9EEo0(g_~^CM-RMa84NHZxNPDAKM*dn74-t(76cjnP9<)@IVmWsl zXu{?$`{a+VUMN9|Ll>f26zg|`Sy09opiRz_M^h{xAAc11Kq|RVYy@r^Y=e?fZ)NP9 zxL^%`K~@nvP#sCMZpz#eI=5;-Pg*&Q`(s3$xI&z?zRRMcykI202_vxraXDdRv8~=@ zR08eeL&``OD{TMX>D|+}{mWkMU`4=&a_umdj?prEfd_&-nJ-OagYf^ikQ4bEkpH{N zb-zbO@JnABt1Ds@>ZjiVx*64nG2dx7?~RfyIa(SyG?xV_*_}p%VHv;Tg{ny_dTjV> zII7XUM1Uso%r!A7JvL0|{0;?KbJ8*=7uGEppgZ;e{?i5A&wwr=$^snUsu2wD3jy%+ zk9z@NEh5nLkQb>#5<$v7`2CENqvWmDvY zm0-;Ww?Dgxys`qlh8ZMp~~m z<`9b(WRVNYoonYE!vt>Ir_PtCVFTlJvL6{|p4yf4_=5{aWg$)ux`b#ct*AZzBQA^t z;MU0l>mtPNy}*zbzM3n1Sm9sZ!5)!2sBFUGm|j~TJ!maDmxbkv9MvXl#L9xf@#9s- z^YFvDVk-0?G1J(^R9B0c#2*mrRX$V}P`1j`E1~<^@vt#XRZ-0 z29BrSKHx$PHVd^Q<}7nP`U^O6UjgJd0fi?fFf+HsUQm71OzoVUs{3xvey=P=h=3x-F2L*uB6?r& zK))m($smKdk7VK$dp?Jo=pnsO`coc~tZ&MeB0Vo;(2lnoTV~j-9@kDNVpP(i+FM7C zLoD&CWlNpgN2x7OFDzQctEU!Zatx$tGr`A1imWoVoWKl5Q;Y~+wq$IPbVgS@@8ksv zL9y^_)q}+hn;OSPdld^qTo&?)_q_O5c1NT>aTbg!P@L&tX41QuNP--PK)58H9Im%X}6+T*Vz8(ua z@bo?U+^4%w*znS<|43x%pUU`~6!X_#N0I$3FMyu%b?O+0@0*>`UdV(Yva!Q8m%~%O z;*--q<`dTcKi=M~Nv`Y4()<_w(AEz}kWwHh4%V-f$dt!`KoBu8JOIu~d5mx%B0Lxa zCW_t4R8^Z*nN?NhLw8kMzd$5G5)ws<^DLPKod2u8b*`_y4=7b-R%TjKCNg}w_uMn= zaqYG71_N4L4<$4!`7xNy@#o(Bip$Dr*CqbwA!EI=w>`$e(JeuVL-F$nl_uo99730V zieGTH%ffK~n&(IbIYg_GPC1IlAOAu5nQz#8K`bQx`jq&?Zm2t`KR++eEsQ3a0qU)p z7X$U(R>LCtq(T3;umd|>hOimm|HtQ2tIT6wIUMcV$d({RDo{&w(#0VxeqgLC5wk_f zXTAZTl8_@gF_V_%?8p&p6|JOY+uMX`@%#1G34ED+9>A4IdupOq)P3dExi zcYz%C;bVChd zC34W@bUkI0BN*`F28=m$NNWF7C+ylVQHQkMUd=H z@>4M~nXKwr|nqJbqoK&C-)bS&iij#G6xW&d(*)JhcO4!zo=hPF3X z<~$0J++dh%cmvw8Ez9}JmrNiH#0JKo+2Gb{0f)^V&o6wx4oZ!ULLZXpXLmPz7m-I-?xpq5ZqY7 zv8!a*i>AXr#>X5s9^`y^#%qQ$1&h*g9_L0Ui=h%*kGA$bUR1LIPl{r;R6P0@HRhzK zQ2wIUit~=8N>^Hy0p6AByHE&+BsxSCiNSy7CnwCGx=|R<{!yONV*B@4quTSOlFqB2b?n)bZ zIaqIYKy}P3PHorPMWa-#Q-8&(?<7|L%; z)Oz9oG29+DxHF7#`bK5*6#9WivBZF!%rG!59fA9N1GtAOsm2r5n}MKP3-0vw>06$K9%9`4X{JQw z_9-zHY zNKlG1kzy4si7@nOpj|C2xk?g%x$}yLL63 z%BvnD(f8wc|Ml{u9}U|MQIxO9aE7rDHaP8DDv$L)$&vbphY1y>Kog#vfF}9#S5KrE zV)AC7Jt1&EpVis!AO(J(_Gq10DpAiGEU zEBxH)1Y%5=r6q5NnG(w|t6og94*W3pDGG{09x)O8Hiix(n`rp|Q_dvj0C1UL^zsQ* z^;GR>KLIh4HQKuPWp9dhhO_5$^)9F}47<_uH@9@DOndDYk=}bDR`371j6d-A6iWM3 zOf80|{LDA}DM{9Y&`LFE*|=$90>2RltqT%wBq#~3v6V@2$=PZS_z6%cP(VhU2LrEE zz<@u8K~qZ4i_Eyl=$9A$F=mTUsD45U7sy@tIiB585`xp3QZ{LXXkg(Dc)kSp$~-`v z$mSz5XFUbfU>wE=R3HSJRN`aZ?ksRkkihP{&vq@25$s7jjK6HG8IMoo~XMMIH zXfd1cZ!BN*MOHCz9)Qu9af=P^$6H@7;cwylDjov6dpslCcnkvueEH))BFld}{>UTV zfGHN=Awm}S|34M(QVQXDJcRifi_V8JacyxwprS)y3{#_qV{sNyth{t|{CTLn^pdwH zPkKE;XT&Piq|6`Es|p+3&qdlaLJ&gybzRCWYH_15Z+5BU%LZ-WpAJUV0SlJ^+!`k1 zQImr}{opbyXljiO%f3JfT{zCWeTRT!NJanyXUmVgH*5uEjA8aBjtET@TNL0HSchBX z2BGD+5s|m+gWcN2G#y4T=6LPvaFE4TlEgj25YAal$@tsG@V_ueu{R^k7D-xd%cq=yVJ)CfVxi>{X zXwCa~wvzF1djC)Bh$!Drx8`5>um2&*g&$q~LYQNIi6DRhDT0b<^_rCU(hqf;jA_E3 zkp#8JdsqR$5DBZl=X0{Ge(#6X*Y!Ne#exj$sHwRFBTQilyWd6cwA+Lc%{Hs(1p+rCVs>po zKNaeR^znn)vVHVG1ge)YLQv8>xBO_T6Glk-C83LNN@6xS)nfTfPL{uUtvux23yfRX zGyvb8z@}pW00we4K0Sy{aLAMz`i=YlS)EezB zBTgihM~8bv38NbNLWvsj=Sz6faGl2Kq7t6uempE%P3?KS?XX?=Fz5eziH``XV-S(WY9Vv<+p%Zk@$uuYz3 z7!OosZdQb0WRt@K{0N{wKu_a4FO>Ne4^1+4*x@f@vIY395xcLFB7dJGX!WV`p|?4R zhxq@p2|xDLqqm)6NcGM&6B=MFHz9JrGgFUK8CS z0Rn_7Ym6!^>SU+!=Vs{;hCz``2GxQVzCELAA_v$T1mGX2LIkI0aJ?a-;VaNE2s0B|qoUookpfkm35$U37E#p`&y|m0^Pt#BM1D$@`|6e zlADCG;`D}xQPChi^8iJ^`?Zgnf@|=72L`k1Z`IT77sSQRJmpQQO}@r4_WYw4g2z!diyuzPd$jK1 zGc6;8t}lWutYmTM!meKnlEYtE!BN3JiUh?9U+?;nz=AYn?c&!Ot?b2h8+fk^tXN{5cef&#imgW83d*(gZ(q5 zK5c=rd~R>IFNEWuW~%dy?;Y=!$O6sb1?Aw8e-VjaoXv!wN7J7z&#m}{8i4e! z`J{u9{+IlZbNWyHpLJu+Pfy6vJm$)yR{Auu3d3&t$`;{cwY2mzAKgOWMf8#g5K|BV z;HL=-2%{P>S{#@Ef*<3##Bml>%^P8=97i}`0qr+^6WN-3O+@EWL7saI_eJ z+d&XOz*YP*kk&^+h3zsNTB3IW*{lY`SEzd2Q8 z_)Ws+Uew!?Vqwmc3l#sjf|!vK>{X#Kilo`oSz?7{fhph_pzZ2Oaz9W*g&fGC=*Hqa z^2$}=C__4_bT8v*9|PI)qgTxiYoL8ZL=UW`RRmRf$alRnPOjD@@P;zMz9V9d(mLc& zbY7Sl-Mlp|upRi;2oj-axe*IjK_hY_Ra2GY znegeY0gb(JH2k+dxv{Hiaedbi?+;#{&eS-6INI|fqKmGge5?OzJ#Yz(AnCn?+))uy zEkp03>sKfNqf!FJ;>SMO#r|Q&C*5E@in&p>nM)N9UW;=WXg~AO!HS;u+*6FBP`N*P zt{jF700u790xEjjyF>QCZoppWwG}#8svnT3B>H-=xxBv{`J;OJLpX4ra4-<>-aF(A za6t!=ea6z)v?Z6}K)tZ#N|0CtBU!bCn zGK8hZ^>7(cqEOjdHdnp zklaZzE;xOECU3ha6z%?Tgs}QdyDBIAjT;r7t>}?2etw}`>POk#LuVg@k>8(@+xby) z@PlxCtDOGE3w`FV%2VD4<2x5Q#=s0lx$WK^GcyY9(U8%+)3_`qVh}e;c-ws6uE8X0^D}(Bw;yc+rEaUHw*`9mZn_9$HUWh0qBA% z#aeld2;AlrDzcc4IwW_y-u)L!;{Ohi9oTj^**Je)uWJz9R^o4a{l0-ILqMm;vlTK+ob4z8{6iioc`Y<)I( z)gxu_o zq&(e-<()A1?7;r!3#^iz*Xhw_lL1RH#_8p96xTw;*hkh<(kp0+?4YBUIapwWFr z`fjNj7o}HbUow0DcD2ulFKB5jhj~^Yo zm2PU$^F-kkh*$jJb)R;J&8y)cm{%TP(%03t9lw7X>2d57y2?l6IaK>Q9qaeEr+@VF z>~0+4VI1?|&vH2Oj@OPQgi70rZRLviY$z?rZwW}hDP)gl`=>NPx7wd82Y!3dSZY_< z{lkV2l`aNU#bMZmJ`*Ar3&YRK%c_h$S;{@4hJEdq&y|P$5wcu8vHTL7>&tTbj)jEg z5liJz)j{!<3IqE(AL-}>>?-oMY3w)Xn&mPf@9_UO1IdlR6rP7&-}ce!!jYMk`ALFg zz>%|uJ@A9FANkvx4T<_4?tVf!mA4L}rd;mj2yxPgtP&#d7ift`B*~ny$ z`RFPxN6_);zW)Wl6P||goxgg$y!KbVBKGcUZ8!7Uy}`Pw^$LK>Q05%YgM%1*C4z|riY%E-im2lxdLYM072;vA<{liCkYt3RO0#)| zxTxaB=MkKqLgWP1{e)|^CN3wHpa?o(YbR3sP(m&)6lF9gPE}6js!+;`hvhE%{g@Vy zk=k1Y6Nk%_F2FnE&*cJ;06cA4I8tnYL}OKenpx@%);C19vJxM;?Qd+?*Epk=N|*A z&pE-3u5~uUjob@0zJoHCippwfiUE`eE`2Wr(@7&BUuy5xIzIq?LmNPUgF=2RDL)oz zr_J3=I=4CENg~Gs7nY5J;L34|JKnaMUyS(fO?@MwnCQD zWO&)nJh+O1?cJ<$*v=2~1RWg=2^M=_u!`8HH~{&Bt(#pE#m~+e{(`MtRWCf;9Aw<#&M82%H#}^Wq%1Gh zeHd?H%Hl&#b8>Sg@6$=OoW5n@ih_|Kp=ckG>o@Pe{oUW40uHWuI%~ftO4~Fr?eAKy znAb1&KY+sY+kB%Z%c0-*BkMSl-}J1B_;L{5G{hos$NRUHdsyzAzr0KACZh!e(Eb2= z-Rrv6o6A0c;Jx1g^b`+7{Q~;~d3E>nU2jjYHqtDqT>l-&z5N}#J=lYj)=p~P`(<8= z>Nq0Rz^qR1d4Iuc9??S(x-gWG3h}@ss5UH+@YV1zzRymiC_K6}42EicWoB%C8!5O2 zg3c>Ta89~UbdR3b=@D#_YoTNJIWU8WjNrj`Hjx7Fq4r3?p2F<^E?nK8RvmcP6HSpe zt-HJ}sYZZQP}#?>XIYRGPMz#tD~lq7i(x1`#-w@)f6hNe|ME4SnoQ3>)7`Rsyl+ER&uTHmL^DE3uB-75+F%DXz=knVnWt z71n#I{^-y~*~>V1Wtl3p;yYdi;ade{fXLLFCfzpvD!;V6>Q#lK@Q0U+4a(;IY-J*! zfB3KTPnlfDPfXoG61FTtPV5!FCQl&tFPm@ZpGe7G_9G&X;9#x=`2HN++gb$ZPYwmj z44ee50LZF_nLt>bFum~VTf2dj#Fuo@l}{p*xJQyGGKEvs{`1QA0nf{Rjxga=Z~9`n zwBpbAh);0Wlf9_ntP-e`Llbe-z3|TGXivJeXj=<%{E07io+>6J)b1_k-Y>&=jyu0d zF$$&zb5UC|h6?U)h!Hy!%_?2cyu?>JhH38sAxZ$;a%BD3@}S7PL?i?Gp6{*M}0=dhGaq*Q^pU6R$AdG)046$l3!0-9sl3-+m1no1U%lD0IXUXGo4$ zU^4Vm#0{#9s6?e-o&K&-O}Ci+D_bOQeF{)2H^IUhrsxS7?cwc?PJDl~J`>XB($18J zAM--U^i#oN7zsXFNtdpQq)Q76-k5?4MVu1$qBpu6DkPX507Cmic!lx_uMzsC@*wU3 zf_G=DC>`eS5MepSKWlIV{R=$;{GkruSQ|4ZZM-=d_+e;R+v$PqAIeTOL88Cmp9D*Z5$ojW%EfHB(>kNN^yOEFookwzhmNb)X z+r^IUogY*`yUX8LGXgYM-yt!MwvGrkNMo*$6Tho&G?fk|*@>o?o+YW%9#o%G&;Cm9 zc__hqGil7@v>lFsG!wwXoVOmjSi;+QK`Pd>CM!pEL|=H0@M(iE7$Xa5`c|lKg{!gD zyf%`kNkJ<)5VEUh>(lc^8-wW!zQ0d5Sm8Y8s%gQaf zRPL8&f}t2JO&42Ly-|pb-ZlY*4M=D$NIywK{FEimTjPEoo->r3o+9rQzg-nNNB16>C&7O;|`A)~1C3;s@I|AF^Bi^V*E#hEltelG4 z&OV-%+;0U1=Cz=VV|CWkP+>@rku~X;DH0A_cpjA}xH&mgy*81XnXPrIGKd`eN)~~{ zggrcg)w*Qd=ck`oCg1-mMJ`v2kZajyqVh|)@G>0J$HY{X z$?CmKi{gW8k;5KOig9ZRw!fla@nPeX?h*lMb`JdO05cA$-t_WxfD3pm{6+^Sf*92g z#3HMASB=Bw=vf-}asqN`FhP`Ydd(AxjfIEwRZP{r@x2II#G8{gyz1Z9=(Mcn_POY7 zY6N5_%d_GPux-3HI>FXwY0EN9$HcI_sb46ei78;fA@JTfP~EF{9u+zfG=~`=e1_YK z6ZP!8|4U)xUS6UQ?h5TW!t-DVp$p__Vpo%e@o=SPmObZHcU+>2V8{>lOUGI131#bW z0j!*395>&11}Y^&)2fN^_i9=l9a)i=@ur|flS;5Z_{TUy6Z1I&0zD%nJ=;%|zxJOO zv!L&R@EVb($WkKB!I}FJHQKU9LbRi>EB)R+h|7l!JD-Js(*5Zyt*Y7a2hC`aKYevBDZT;Bsl z4gVe!Bs=5v%KWOoIaij~mVI0i3T$g{NCvd-zlBQmw^X$;2r!k5GXkTLc7rqYFwB(N z#;#$SLK}8Tsle>2bc${ZBq%S2zxN`^KV6kAi?qtG5%WRnY~T7{Cx?^s zGEeMUF>?*9Yn*|{l9<4qPhz5kcpLcIS_Z5~edkl;YLG5D- zRIABSgs=iQh~TD>QBec>p8ajiqi@QU>=0VfDwH~V5*~(`+!teauJqO}c(YRS*~+I4 z<&8$y13v`XDW`$ROYLp%`{F-g$o#|j56&GfjlN&Z;@d#RaS~=Nw#@R%^D1`mL@Z;P za$=!7_K6tqi}k=N&&)5viQ9)W&%YoUx1)@5;wz2>jZi^7Lk)7BE(v=0K zl_Bk_ACDFLI3X*2T!Pl2_dBkd9E~zA+DKow=d1?9aJO0)|nehg| zNg98evyO@wM;V1rh>*5$Zpyg#6@?>(eWb~R!dUu1;UtL|Jnc~V2&+a)Ul1YuDDtN3 zn#r`b5R{PXdv8n*by9MGZzZ4X2RQS@neuoJtA`zTxR>75C=0a18k^r&3cAzRp>yu^ zClznOg%?YC@ARHJmE6TMc-Oams+@i7N8XAv&_wwIIzP@{^mbArY!DWaG8HEgBm~!l z^u%_`!B=wfpXIdAEdZBpn=2#Vnba`DIIwC(Jt2xiuf~wy9=}B@x-&C8ky2lk)y^VE zDy!FD^I$b``8xVc^bNuJZe4q<@nNmwRV_>Jli!&j>pU)O(s=_OgB3e11^sV?`EAlFgB z5e_9%7r|pft4yxHDBesGdY%sCT4`2FE4LB2Eb`upIBp#w!8{Mu$thhsPY;9&^GIZj zu9YifCH)c5s>V-yp5*$j7qbhuDtm_q>nVlzgRECeLP1(gku~P8_OkOy9afY!og$}J4Ov}C7krl(&S*d z7llE1;$1r?%b~dvBI#m@gf`6}qSozFMy_GW4XHj|J1sUzx+LqUyk>= zz{ig7{H*-a&tFaBCh~(OCjQ=J0z`{@#=D4}4yX6Ko>3%vZShD=<5ol@wMuGcY885b z6@}DjkMjib{5C9IU#-Stf1i{3pZywM0RDO0d+`F@%ZB)O#F(Xm~tU_n}koxvIoh`B(=sYKwo|2!Rg$EbR7V& z&Mq>wUMIRlbdAYWaP6O7BtjSQC{gcBi;LdtgzgLn#Ura7Yi!n_9W+l6uTJ?&jFEIz}Hc$282bw z$r7$UgdDK|fToFKS)>T1BgA8eu<*2OZV7PJ5j_M()L=!@=&CmlG!~?t< z_;aF0c#enRE)iS*bo>z_^1EM+|LAMWcpZOn`U$-t$3eUG%krb#?^y3)1j_qS9Mzu+ zymlo*hdd9UQFY@+9{73Y(Bu<1i{}97>a_PZZFI|JuLB|B1KO!^73GvYgXHvn1NPnb zT!F`@^!@vp1$+R%*b^kdcj?2Z6_>bk`bjzclOLKX_%(qiWzX9@``#9TB6&_(H_QWr zJRu%tvvYXV#r!7VLXVulz{yRNA1*Eb_7Esg4!3=_%%AroEMtEjU-21vDQ8=4qw~;G zkqbB@tQj1o*XprCMIFizPReE_HpL)6c z>~U`(#i#*52#@3C#8kah7JHt+N6J(F^XU@(zBX!oeqztmInuMkw%zv4aJf$+e!D5P z0rZG)k)Hty8_uF%=>%lpK)NI1Qs9bZ_4vAguXF9>gUsdd0k$R+2I1Vq7DYIzsr&Ry zKqn_w9-R%#n@weZs~MYilr{sQE(?nB9hA-b*6p;bgk&JIqME`cQM9~Gz-S-p#Lz6n zR%B)l4-l*&x4WqIr#2mRKq}EDnfp8p^D5}7Ip1ul!ZH3W)DM1`3aAiFs4MD}ASxJqa+bP76n(!Wn4@y(d6d%}Tz=A(XzDW$0?shKz$(8E;4G)H%j1X8XOYr{{K_PZ&~pvy?Sr_M*ufKY;us& zLv-o#rLdKM;)TRlC_zOc$i_*8wS)`EHWuSMKQ8AyHbq5|L4HZ9ikwL7fbQkBa4qW* zp(&LY;OJUGI_5+lfyPX98qMJzE)V(AR8}cq=;2<)JBlI^szK|zg`m}%@pUwxPr%X+ z-B&izO5VdiFOT}q<>XaGacw}wkh6`3S+}Hw9E*(a{to}9b*v}x_3~0UWrAS9jN~x` zX=J;_m^H;s&%_E|_EpY2Qhw}b(|iZ?06=cq28a%};_=26$6sX&i-t~CSlzW@Mu7Pt zYXq~0XMlZML41j-peGM~=?LX~cKWXOEvC_~EbFaJ2jpeG&w-Rw<1se$&10;G4#DJk zVVSq~F?t_NpUe@}5BU00V#Aio`A8HmmuHte1Jq6gWFLEBB}aq-1bg;Tph{GFuC;B8 z(N#BiCDLwH91tDOWMGemc3u{Ii{aL8KoRPeMO%klSc4T~wx5z?u&o)HCDQ4Q3j%@a zSdV-LVxk=FriufoR`9ka)Kf?6*W@Eutdqv|p(sXK3JVGjsnVhyaa67z4D&hwIq6&b z*uUzM$wZ_O8}45pU1O7W33gu-VepN-iFe0ejz9F9to6YG;ZcUO8zJ9|N$PM~-H*Q= zN06LK;}AoP=ET!M>I{?B4=8-*2~BJ~-ID6)qSTZmun?(0*!*w=ED2ZQ9(hKGgA!E5 zthTDo!r82F-dMa^f2=&Hm#ZUn>Z~k}58t@g`o_=D_?-cUW?CVleAFyR-99IgUYiiJ zW)f+!pMCAthR4K|!&hf#;E4OtiPghm@rhp|mQDs`p|Pqf?~K$)XI&E$$dy!0!eHGI zn|!~XcmQJ3Fj*S93S+7T+Vds+D&$x2B$6W7^-yTUv&BL8njvIrol|b(d9^A9E8#Wb z5Oxeoz5@6nDW-fm$0mp~#4OCr2NM?|)^Kb|)w1DhL0#gWItm}`_(;@&>~0t&){(uj z!Gue=w7Asw;4xH$O?%9P$Fn6HEbq?yY43W8lt8s?PaBf%P12zxA^o7z9r%#@O*z9V ztuS!9_IE6H**nv_LdBG2sO+VEa(B-!;e_>;rf0nxWE{N_kID^NBls2m<4IEXVxe)o z{aM9vM~s-sKM0#%I+tv9JKkDQ?#D+RkS|dl#AkwHutI&i0=SW?sMUcwbvQvk4CDL1 zXIqC+?n{o=ua=byD>6cxY%8VPNbzBqwiJN^Zs2ew{s({I6Ll|o8?Ez4UrphkTExl> z__F*+43n>Pc!*6L#|I5qM#XQmh!3=r{tDw*JXW<)l!;_M$<6nQ=kF2N<@`35uisuN z=R*hg%Y|is<%JBv-b7xf09Gw^M)#!r#iS=Kluv^dI0EUJ{v^IP!1K@^eGq(Jp96_^ z&)-!P%0{fH<`X7sAh z6l`|N^;N~{%9h&R2I^=M-f>1e;@dxGG~iu$<{S?Q%R7l@z@;z!vz zv{o#A-8yQ@g59gY!*6_RCB{z@z%LoP9}6(QUseGghOSlkefnblbf!?a+EVFBQ#3BTczsy>q`ooXes~uaH38&O;k^`!AKk}LU zP2pS`FWgEEQ!%|Nh z5+`g-&OT)q{K8W#dy#ltYOhYKDAebDeWVn;X4+qx+5+}Yy@RMI@)6uE^??Zb?^Svi z4O)%}{uNP3d4`Zx6jBKf|2qiHzVZx_iGrI3c%&j(kBc9_LG{shzw(1V zRDStO|NUrr?KS_s*x=h>jywG*uRvx|PrYI1e$3A4f6Z1G-n>0S+ktX^d){5{B*#{(qMjANy7t zf?1qI?Ivn=wggg`4&YbHe%o`rYwpejYCo=nD%iW{r8mJDH>3rZdhm`&}yqe_#a62sAzOs|q z2M3H}*oRjs?D2ubI}3Eq_w`FC9`B>-$Yy6Zu=3d%Ml5qFOISpJn0{)3O~8J2{l&sV zgCqznjh_#5h7CR46)@zR8^C&zL$cF6M5FiKD4M!MxpOjq6A(Y(ptu(v^bjB8oM`W5+T>|}- zg^hx#ij(>h6ZqQguy0glm;JnZi+)a@BNw8;IlLX-)I7v-m{6#L!9iY4>?7-xb!`QOiC_nNs_JE>b60dM z{Pd#m*2UB=M`4M}ud1kDe>_vUhJCUnN09dZ5EG?=c^y$JhF@suVq5z!gFN?_)iLs4eDi2&;yfbe{Lt}nqNI37!e`&zb-%a zboRg-dOByI9t{f-;fx<~Sj&vPJ&4XS9h7^?diBteaRUZfUv60 zlplKHr_krV>SbOS@FI4 zmV2BX&m6R}#6gmxiB^l!BBgf_T@AKDL=9u%p2h8%Yj%-*Q-J$b2ld31nd73ToR0TMP&MNNbFK5#meh67-V z1Q;YQ*%j~1Q!kZ=UVABLb3-KfB$gWUy*kj*CGLmiao>vCE)KtU9PEmde|&R2WF%xT zol3v&j~+Tpqdq^NEix=_LrJbSkM+(R7SdFf2~ zk*_?dcj%{3Ao&feh$uPkdNeb$6h$mQAK&SmL8Js}7fVWK< z5CC}l^bc-4lyw!|ru?%#>J28nINC*xjzV{sk`t2abkbSt@>)BogC zp4*B5A>&}kQW%J#|?8a zJFAJ-S2Pqbc0cL;|9AY`!oS}28>Fw$E6WE>Fh=>b+T)4hIA0jA8u9n3MHQypHo>?P zK<)5%`(11`4xgA6p;e7Gy;CIum8%uP5vuvQ63V7_S@MD_>UYCiT2Vc<%k`81n2Gj@ z0;oYQ9m^&Q7BAID@TG zdNkU15T}N`J{!Lcg5ft^>DK3`U-(0u9%w+SU@gnr6I7g!+@s}5|M_gTF2GueQ-)C7 z4^+r6%U|Ubkl4uDAdwCnH6cHlNge%y7ie!#qS}i#;j{fU?=>~0yDZJ?1vv9^dHE6l z?x7dUk8|EuXG?!=b$)s+&(;A^e~(IFSQw!Fed@Y^7+%qtjc?$EKVW5UP{aodOMc<* zcYlX=_U$)uW&AhMi`Y|2^MJDUWhw} z6Mq+g~4FkZn3t2m^wKo+vmxd$HhJ% zaLU&lz++%rT}df$II1DMLcPn+VR{hD@%6O12^S0%Ve&>}7fu?(y(8_?DVGO79DmHa zenTK3 z+J$b0=^{lwAE~O7Jz{+xG+`qg11}O@FGs18zxoU6cTrlr-QO#!)bVA#mA~s!2cVHW zH8Wty*Zku4?Q;8e6EdGv!7KdxCr=nDLrR_EV6erh`iKL7hEO-Lyy3cxe=Nr^yw?x* zN@OkxxV|HkqjpF39e9(LZf|&#h~TT^_bN}1)+d&>*XvyQ+0Q(B+H%dzdOn^hOTEQN z=!ieumzNzzASPcDc^@|3=h<@h2}|1!WI17O^qWNE@L2mNwfCPFa~h;mt-trkp2fKI zajn(ifj#wgwh1tizb2$cfWQy2D&h;jSStj9d_m5C#ORQ+6u0?V6=>NA?}RNpJPD*7 z9`_*6XZ7PBFsC`6(22e2JBp2N!{5ilsNxLE{{bY5I6NdELQO%fh1iGcWy`Zl;lZ2b zyTA1sRvYU-b~cwRt?C;1yF<}#WnDi(q$zY$1CqfL88*J1no2!zu2g$WsMm5&MgV+Z zgY~B1NM~6`&5zc_A~q`}L{1}thgUo*@GDvh|9}XXsJF0o5+6GUcIS|=g50gV$k`v3 zvpEz>LZQcO`w6H_Jy0eX`@7hWLIH;B!0BdsiMSn^|BFSMz@7LnF~|;QKp1DviO<5j3T>`SO1`(*gLcvp`Hq}YpdqH#df?o(<*~qqa`W6n_Cc-R< ziU@pFcmOy2_mgMKE1rW4w^+K3RSo_a{`Dwg+ zjLUE#DfxTK1E;;@=D>4sKH2_XRL}UZCjgyBpi)&Rfk|eg$ggXJucoCYWz);Vxn~7a zaB@74*cm8Cn4B#H6r7|eVXLdnd^Ek}e>!^K&r8q@OEG5~@YKAYcLqBx;wSVv+YS4) zi^_x`o|!GZNbEzIScnu5DDXU+4PQ7xuLLI8xpP1DTC#U(AOHd@ylsfA!CZNiBJf{> z3deTD8q6ZhUC2Q%I35tCZ`_cu$=pIO@M2E>A@U77~u2b387u zdf-~c&21lT5~*~%4>8AwMAB-Lpx8mE_5h<|)gw)^;jg|=aM}~DuCI-fuXPG12Y;38 zTZxIxo|6A9`R9_qeCK~Q0r&myw+T7VyGr(f8#!DQ9?uk+4oYXe3^OQpO71oS07&6! zIJIz1hf{SEGF8w0>*a~pe1VRw0O6k6{|Z&YLonpAgA}hHYu`}9|2ATTeY9Gs2DpK!E*0>ompBfvb_f@lVyJVU-2LCX^;DHFJK5q z+2JDeZ$mE=IPn9^z)K}Ow@VpOeU_MKWMoMnBvOW5LYO+&Xa%FBeKpgPO^F@NM7s+Q&^eyA^mz9Vw;r_*ozWUswm&OByyhap{?d8&Au zYy!30x#0wPhYR&EvVM5#wUyX>W3et!of!nbn0Q^P$J+wgL;AJR#*>~d;qd+RO!;w+ z^awkF=uke4JO*tMuRbJeinGM=`9XQ|Nk3POadl91+L6BxN8k&F3 z%SBB;OfPTwC3F(-A4JT7M$gBCn-3!FdeENJWx5(lsk1SvU=`^wFQ0ogm1!nzw401f zw2J-PQ?ROP9Q$sNy$as;fzT-w09KHSpenrOLH9Qhdz7FwKj`2rq+IF`PQR%?_#zGp zbzguV$!Rdm+}T3x7j(#^#+o>7 z&L5#Iz4+3H;==qC2CC&4ZOx);L}78NSg`~BDXF~hYe6$-=tw6zTxrAyguJ;Q(T#v5 z1&M2rt2iiCKYJM11FVH#NsDErJw+xo(#+2;a_f9WX=+H0#~v(@FW`_2Jr56 zFbBD(Z{5Papn4=8?+7S;Yb26BJce*1>sBys%hIyvX^~SBJ7f_!>D;mhy-_k~bfxTu z1$G~x(Z}N;QXr;7SFA=+RTAmZXtm?t-&i39;k{X{+| z9hM-oqjbwFvqET*>~fPoy#*8Y;lY(8sh^)v*#=_LenK{VRP{b|uDtwMey@rr zkC?%sl>BNPWsp@r&yY0OY#a!E7xVv~(&qcnD>09WA&8qVI3NncOOJSb=SM#(krzDxSX{9`!|?IXmM5OD zf%$M2Juf78zytv6nG@{7H&aAz*yd44r5-jTCnJ2g)!3Z#u=shoh9ITF%$vsq!F(P76(HaLKoWWMCrLOJ}RYd$Y z7jCe5736EJHT0XE-JtZ%Z%=>c=clIz?rA&)X{vZUG|M3B&jbs@sQ04CnVxUJpi0pA zk_y*gedlDb<<{5J_mPo8!o$Oo`>!(9?{NV?4W7B@ndZ=4>ihbY&?hS%`UC(`^0HFB zz+`A*!+@PIM54v@j=^-292kEgyu|oR4;E?#BrQwj(Ged0gKRE+_e;OY6N-%G*)7$u z=Eko9OlgdK3(ps|!hZ8+m#!sR9yJW`KgePaML~^{6i0*{X{B136~hdnh9rtera7Bl z5s_siIpeCvij`*(4DcMcR!c5|<9yp!|a&8n3s$C7=|B%@4 z;Ltmqs0tEb5*SpX(n~V~ti}sjQ$pFks5;3tN*n{DL)QHdqBN5+{kb1vy3F*}dKM-ZN=RnvRp*XK#S@k@#W7scAr44SP1k)w%O9Z~jm;=-l zg`L+D6*<5QrV)zoEtGrGfWwNSLfYdiUM1`BGL2M~2=*XIrOcM|^GkkR`$K^yw>3y@ zl~_^L&KXJXZ_y>Rzc#xVppHsZQK4^upS)(+S|m$W{PO<2n%s<45e`fwB{bmq#&@e| z_UIj)yMgTTNAx`M)5z7}|6lX}iYfiP`R3ac&S#O<6xFWTWIz%7gaV%ja^fFiR@JT- z0l9ghEP8VsCim9;H%QNpKk+muqNl8r7Xcg*ThG_jS){6IzUMt7C0iTOGXMS(mKbV? zL+ELu;1XcUtS_mT7y9{XuA!d*ZbyAiSeirpwvghWBR(q`+7M`V;9GQ5j3Egth*u7u z9}7uTrkhGRMB+zd=m_qPe>eWX11Ea&hwuIt>F$8Z%7h*IyoiKl5=Mx&Pb(l|9WhNC z(vgv%RNznny7>C2)*#=CFNov4@UP!^_mA&PI1LhD{4SlVe^)!%v<;b*Q>OOAL%u>A z3RY|<7Wuhy(Tn_CLc{RX0x9y#(j*AZz@ltDMZtaR>=F8Ed^059v?o(>BnaAKs5Pz& zC87{#A&Dy?t>l61ufq^O>u6%%u!nCLwDB9m!wr&Q8$&Z3ILY7q;$(Ys2KdV^dghYa$>FU8Y*TyFc)y#;uSSLfF*EczW^xp6|4a1Gt;p(^-v8Jj9Z)O8LNo1u! zJBQF>27e~mo;+LXCAkABcQ_|q238DQUN)Ij%B}cyUB;c0fP>-_)~cZNQ&mf_>gUv1 zyzUnZ`YmYrDq!?sT?8jgB+bw^H5cJTIBv5-wZET1j?X_q}N!k@rd`{^3{1~^$7ZP!{gz1;~^VxFMGjtV?%?~3L9_AB24rmcax9pE@c!c9S zS77HEQRzoPzwb7}mBJMBfU$rG!uWL2eedvAa=iODH3EM6g&zz2T-{!29bmw}!3_Sw zOMHRHMy9B>g0I2SeM=3Zzi}RafGsMvEf3`YdvUXEI`$=cLq^%@hy3`f(+~Vx42I<; zl8j52X1#y}Cl%AonAy<~sfHgHIW&^bp{buP8=mch5>@Qf#P2tbF0S?7iK19p=OQ7& zaSk{G&U*AgsI9fhtAv3lJWXRhk)$IOZv$oyA*6_f0)5F@2u;d~7reKhthbLpTpo}2 z;a&5LRjRpWxsk;k7&O(0W81Y|m^ToOPt)IEF3aWF)wNit+TQ3hj})U4J+1Xt?lrzt zd#OZEqsJa8k3Ev+i>hnY`8lJ1$0Oj)0L?*o=I)#UYtRuTHlmGerdTCy!l4F-ECXZ? zTbM8nw;6EVndRjbPs`S0)aFtY~ zUmaj%XA7=Ql*n22+@3jCUVp^fvePkyMywvD1k&WN=OpYQ?GTW+sPXdTF0W8vGc)Te zuAgjYO(`-@^~YL^w1jYjo>OCQA5aR3)r!7JS01RP6>At1)0-_%Px*$LV7!?;Up@5r z5X&Ks0~+94#jxIi;NUGkozYaWq}4Tkr!%iU^CP0JKO6HVjhatC^H1mdm#A}`eLDDT z&z6@Xg$@IJ6o9{6e(dD~bQY^1QvX~N!RCI_@Swe(jg4hVV|n{Y$9JZ%=BK=v(&QyX zDGXEd-uCP}rAY^yHD2lOPs_(i`Q(lc zr;s9!M=a|VO}q zlemR_Pwp)5Iag1Q_u}}@ts20L&Av*+1wl?_R|803q#ZWOKkKWyR+V#pgDpcTHC~$Y z8-%gxEwx!680>Tq(4H^x=W4m?V@{*QOl)t<^0?eoS ze)>C4_RNIT*?SqE@{=^c7o zs+iuAFIoXd$|PElsz8&hQCk4j6BSD0{M8(;WX@=LrArzu_zFr|?}A1k+UvEYe+k6h z8ien=wzV%px-1t!%2hN?6URqdyi55>9{&69DOBGNtA&tI&Oyi3bvr^!BATrgW1jKq!ZgW@MFMosEQCh7hnf8u`~LL((`(kRj%wF&hD$I{# z<;kCPSog<^Ov3!y@~X4c0?aRNiD?u9qOLk8WEIJ`7a#ZL`lll*wfMffiZTxd5w{ij z%ImLt_67M%EShSGKr;>_rJ#TxQxyScSUVm%^Fzy&T3du)fYIm4tBlghmz;Re{~em3 zf9kpTvmO0U`Jo4R^eP6>E|8JE@ZNTh%6e||9yxZ3LgsQ#ct+Wkufy{GAevl>i*OXZ^F3-m%U!EnX`dGaT>8)^Yn zD@R`P+#}_z&zcquj)C1Rs%%0PW5`3W?0Fy#EcTxbydFPk$IR&)(H~;bPhBBY(8zjg zgEo9Es{0nv{reGZc?1Zwl_g=C8Ad2u>Vc~0jxkjgK=Ct8dSbxU=rU?*y}CJZg4@nq znqKn`lh3{=9DCEiz z09uGEH4@dVHE-W)ed``QRF|L4t-ZQc8P*0~BBl1Is}M@b6HAo4mSZR)<%uO&6u~gH z6(Ad%*TlqQ|72yiFu5(5b{k7SzJ6)mC_4{9DeXyA(e95htsSPj!du7x!Tara#BejM z(#Mpn%!^+13TdR_pgld7FW(AmBS5LlqBr?iQtjk6rX8QgCSgIZwROZq--Mj+G$X=!h#v*gpX; z{Mze}8y?x|KV$d_iZJ)CO=6o{wB(+hIpR#}G@+{?(mma<6z6p(gx)*+zHKK6pl2GB7@ZkSBt zJ7-jr;Rjbd`a(d`h|doqx2HMHRmWAw-DsK4fk?=v@|EPH-Uw5$4{e-UqUU@O?_$3l zLFVf@tqgySNUwb~zWZBW@EAn%jo1#g%h0ATM>%q!9XwY9f;lX#7qRO^M7DGkXPzet zhA;n$(olNiMTBT^mU_fXO(R&M;PPu2rccE)prm1LnM4h|jY*S50Qfp_0$-nolyGqb zl2)}s_q=NbP>8d~jw*-8vGPKlHZXA|DOPLx%d0Hg{1R0|nNcFxGwEXEK@D$~k+&V! zR|vC1_jz=5q*a#4b4DT#eNj(_zkv(}D;Kk>l7|U?*lWQ^C$xHrWisVr1sHlk*?K=L z;SQ3*ti`|Siz|H$*bpN(cvZ)Dp1DLWF6bnr&rpV#W#~ii?z742Pj}pNk=Vt0+iCo3 z8hjI_YH&W}j);+uzYfi)Lcpmgh!EvQI8%yB&M)U_RhG0WCkcR@tNZVjL@O%dyV{!$ znp(CR(L6;H-y47H-y94C8VG-HitOG7hKxTZ7{m5_-(e5Jg_YT^E9ety;xo^#(*8+^ zwpr2Y0%}L{K~WMg!`%M2r=R)y>l98ho8BZ0@eQ>bM>dTk5`6hufyROGO|G~Y(6H8s zS6eiREsc%_yAcHGSn%Yrc*hEza0nj#v?0eAt_;N_47Zo_nR3(n7oL+J`}xLqmdoOE zzLw-KfO7P@-*G|Xd{ov*Nlie--rxchk)u$!$A@s9z8j4V47y!eAO8UG#ph$vu>8kw zO+%D8O$i0&gJ*Gk=jC(d(Whb-yXB!gfd1@hkMjyhD{xkpO=Qv^^7ro@iJWILcRJ>p zBKI$owAj4^Vb;WCCtYGWgnP0zP^oAQaayR-6|^^1X2Y~ys}Ji5-GN2g53c4gBfns# z&z{Z4VNRxJ{Uf400RvSOYj6}!A1=hazjpq7naLf3U!bEwPSo>OGbp7maItU1UIgNE zH^;uoqPAW$PT(I(9+qic@h~>j!u-@Kt{d%iWBmCSDh z=wv33*y53WK{lr2^h)z7W#40;Rt8 z%87ISX*^_azmw}NwG3qupsXzvlt;m6scvy3E?0Z-{e)UUr9+6J1We69Hn$~_R8FIL zb^i((PBO{9Ewy+dil9a%p%$P7h_^QKI-OvD!}H%^gYwKGN(f0E;t}~9>Ye(77xNo9 zyAgRVl2`1ijhwLS!$Sy; z)tvXbpJ5uHBnHayl~l!Q)lso{vM>ETmP#jL{D!ZlGbhc4#2LStcb&r8u95oqo>G1g z#J_v`;puO@Z*iq$Ci$8B7iLQmPR%7Z`Aa1z;y}6t%h^%ci#AHG4v&}(nS-b#$EX;E zn-A6^M1ne^!#xmzlUH6LvhIl#+B8U?i4{eXxr>;Tg{=@>px^PO{rF0r%Hu!=c!}5R zeAHZYn6N7y5Z-5l+#B?1k}mVdTLFQc7#xwIW1`a=<}*3r*mG*eT!If%*&@RGO4 zVjmXqID3ATI?if7QfLXh<&nzf$$>AZl?Ut6I1!4^LEPtkfbK$N^10ujhXHo=txM5&eDnZE=wRIfKwLRQOGBlg z&i4a=GbAsy0hlJThwP&=vIQguMG?O5^O3W<*UBZ|K!6(bMjTCnyHI>y4E$ME{2=|- z3A^~uyf`=^KPo>zlj1`>mocH<<7imiK0NK}vw9k^ah0th43O_IWO#ZrhIDbEEaqUa z8(}sJOB(WWz-185c{KNbP5ZyBRb8TNK%5HRcw*Oxb|O!gS2rT^_K!x?9*nR3&%g3n zB-Gg$%Lu5wI9qa2l|60jf-~i*C-dpm0}|f5oGjD_R&Il)qWz<{Uax6b0VrNme?6k^ zXV3k#{Pk102Y?|%DAwGGLWc*O5#g*54F1Mo{}wEeY%a^W3MQQx0bUF?psGOqm7ehO z+p zXFKO;p1RWYl*jj|lC7TI*QWtC0mc(5rh3aPkJn?dyzq?wzKZeWk&f>ms`f?v5QkjQ z&3SJay*LQFbbRMoG-r6&y|r>-cGX{{V-=lSJl{3xHI2;<;^zT>H+ZSu#A*84U#Ga~ z7SPC^>L;rOe|l0+Y<%6XTwi-RdOnjSkQg#vBqiqsyHjPkg|nZME@9u znB(k~D>Zi`MV2u3{VFR_?H(Re3#HvuO0bDEtrO%yYumeVRp@sC)}S!Y1@0;Ied~61 zpTH=n;aHE32m7qt_MO=g>rKRqkj4Au^vyi===TA+osa`0;_B0;QYP?HOLrTf0cr4$ zlh;*&Ph|oqvhxP#u%KHS+IzU_P8y1Ez~U62qU!^>NSLK-c9l=c@h;ojm_(Miit_aR zVy*?E&6Sy{tk6)q!MHH$ozyjeZjTMbNYx*?7`!v7`AeZDKQfC3;ftJ^rF>_0#^gITx)Fy<(_g6PswlN*{8}YK3DQ3el~7 zj4AiEjE8zCax-FQf2y~Ck4^D*J}~uYmb7yU%SiagjfVA5ht z;i1p`L(F0F47BfQ0Wz1UXJd~=BxZB3?Bt>-!-0?3b%k=iNkJ=$fcFnhj*%W@8O*ju z97;_xYI8Gn9{*NBDnI!Rx&F7-y>2sQ_rQP8m0qBGUno;Gf)T4z$wXsMb~>!7-}DL zq2?EJ?GR7ZQcK}V*#-M$-CG3bMuC8?)Fw`e=sbR-Q0Cp!k851>x89CD?3H1Z2Oo{L zk70s@$=1&#!n#O;(WX8$t2W&OZLAtPOaEBjQXD0{$nq>CJ6wfcNuhhtw-!<|-w@={ z1x@*$3&)opEn)nm0p;)_s_Y@1$ynbs?s>fbT-o*!`J3|EOP+?^2TT)?si8mw^o4Ua zS!uLXRAzWktSTk#fKPMnNN1Cc_NB*nAm56j2f#$3Hxz+e4h;7)T{Ash<2z(M&pl$1 zP0<7ahE2S$ZT$PZ@Jxe4-Nw2)j`Td0QIhX=M)UmChyqEYbVaFPI6psEE@iuQ(w2WJ z;k;s(kb6`awp2%(OWySx6FhQE6eC@}9?`DIE~ zp%FJQ5*lN5nP}yzCrC*NB1nEYzH?FFgnTJ%WJsb-g)EZ7YhGN@gjYPfavg>LDyt8c zpHJasUZaD17-M9NK=Lhkq5D1IRAyIma@mXEt+5VUtkip$wzOv;$A9{XXAQ2X*b!8h zHqpYla4=Pm6|xvvsuS#6%g|Tpi(f0@h%Xf9n8Jz@em}cMx#PfqcjZNgBuc71pVM$1 zf2#JSB^T_G!qE~DV-?B)W%Vj`iKm*w@PVv3Fj6w=?$psQePde_@=WY(<(zzJ+RsnN zdGFFLs5$hVf5KCChGd-Hlk#ewyDoqgf?VnHmDT)MPTT%wES}1_28WS6Qm9D!fIdLE zYoMH13aEl=6BA#fQ%wH~Z(wP(``|=pjD4>U*t8%+Hd2I1a~)HKF7I11aWOq)Z^q zr3@oDgJ2XAPVG0n*(axGwYYFoqe@#x?I$wBm|47GXV34%FFsu@fF3sfGxvlpq2}vZ!G_ zE9Mmf9;5Rp6q^M(XYA6_ns*bxaqOlbE9$MxiyP<|Bv1HPi(C6AWoi&Z*w};Wn&a;= zPHd;N?56dFtpKdn2t3KVH*Fn=e(@T$kNZd&*k~H%2M598)bfI_d??XL9a=cUf`_&Q zuu6XbUi+>gZBIcL82NTSfms|LNh63O` zRt*ab*|8@XHC7HH!6B}-*llR@Y+Bc~vV;lauuZBH6&7hs1GOTzC?(bQBA%4Net`_D z{md-_J$b!NT9C+q^_Z^8oQZ(r@fAYdS=>O#OQcnCY9}4@YN4&K$U?qNh4J_e?^Hx; zM0?i2=wu@5<#Hx7h8UAko=?U+m!(6y0eTI?nnk2t& z6fr(jkesROq0i8{5Fs@^$SO_YElm4ZqNz)zAK;Q28A1+GBzF2I-~WQDd77!H0o|w| z!>J^uy&oV1L0ItV;M2Bi4O&2c7hr)>NwGLl;>7ahhxm<$o_=x21t!q5Jv?l?zznIj z_R5mm;51o8DAU2Ah;{E%FS!zl@vAh#^KEK|LzqE%-8r?-m*<~nK2mn5bL0lSmz30#b&uyuan!Oj@AT>eP^XU60ze!O^u83$))$D>BBv1MaQesO0?) zZdq}RcTR0^O!o<@(PHSl$e*UyN`qCDY~`@c&;vJ6O#FL7F9Ib;ZMsPvx?syPr%cUMV_p=;Zsr~`_K3F>%g zU3Ru+BRw>HIzU`C-kqcy6ub>bbjwgF8^zGB|EQ;tYGAf$bx9PD@v2=e;b9`>BaX&a zJPwG20;AZpY|fDIG_WUM;{~2wD}8T@fugd}6|$!)%xe7#<<>TN70?Hz1j@UWRI1X7 zx)@?vfGdYhPYjfdNLw$F6-2Suz?#h>9n5Ru6rhfACydy0<^S*?X)?Uxr@mC4@t@C@ zYyNY)gnMy>zYIzRd`Ov)B=vTGxoNuH-?D@a?B z5cbaT6+aDD2+?79LFd5W2dqG0YYI-HoxwaRXf1pskht}D+DDo+53T4(-)WZ?rCo^5 z`xLeI20hmt97DHP;63!0CN@-O__Nb5 zeL5Ck-;&Jn#2n%!9R$}4{z3=v^-RsZz+(Bgz_01(w>z*1S)ILH=KDaa6qu^3>dm$5^SMxh zjQga_dY1107of!X^fZ^oS@OflSzkeR!QK>d?#v_QB@aPaIx#*b^}lS_NX!ez7I7;o zfasRw)d%u-X)b(J7!;S6yqkbkMX-@g3AFG12Tt2}zw%4!gK${_h(abr@e)jN7xTMc>EDy6>+kXzCmEm6C*wO)wX&x%S|+6vp6bh4?TwV) zZ0?N+*pD_GXd(Ge$3kR#J;rx_UVh+NQgCFrovU*dXEW>Fs(Z=$>@?mYB?&ka{zY$> zLY(!(#;{aHtfJN4T7p7zZVpf2kw{V%40-@l)QNcuBj%Kk8wW)-2Thd~co{iQz`%Lh zB>t$$ENBLVlWi7>loyeGCNGpbs+}H9XnYC2nivY%^zP~BTAg<}c<*^u_C}E8?D^O7 zZf=5VpTLP)G|^&jg&K~fz*R23@J!k98ZDPdOs~No@7)J+?a*I&wFCl=KxqR`Buf$t zM(NWpe&O-1Dw;wpQ0lIU=TNqp%?Nfm!zJ?7{YleWO;S9d5L{ux&L5SfVZaY}5F&us0rq^phCf{)_z2srjLI-q>_99- z2%BaU90+JWc$C1LN)FSg)?Wqdfwhns}+{r8d$Uo3r{*V93rAMAu{Ih~_Z}I>4 zz4O_@nEK1%*RV@gg%tGCK3yXGQW*3|M~acZ13lgc zwm|wdfFsWQ&fv@y1%yjM7F}N~NXadb=0~0S4J7M(r!pbE0M9I!LHK(c+n112%YzUX z>$e$9IoZ&*@bb--m4MH$G-U`#5rDGPt;cvKT`=y9|AXgDvF2NprR97O42h#LzVnOn zWUd-8M=@?WiWsYe3huxYP*Zp1#bl8_|7^`z%VLK`o|}Ao5bKmIR@8d>HA(6$hFXK zkkW}p;gcjm9lq~dvvF3JMU%M}HO_P?B&0eFm;n}R#HQ{;(ebUH6v5i&g9ymC;a|87 z$a>~|x?m#p8kQopgn1YNV1&xLxAVj!uTF=*4(~V3-(vgi5(GF>2Vt)b`w@ojZ^v)@ zBDShT>E*4U?jtg@HjWxI>aL>ivZg+-X4E^KF_m7l@aFi?GPyhCZ>SSFpRn2TM3sQhpFrjewQxG&A-OkYNbA<#~fCyfM5j{swD(6USNgoI7nb^!`%n@k`Osoz+ z>=hwTp9xNOGHL6mZR%1c8w9P|-Eimhi!u)0JmPr=L|PC$UN^#dBf9I#IPn}VK|&VBNIW=YU0IM{wTtrh_tSENNUX5pq)BpxuU%oOF;%=3ZEqH zKyq}*dMX_;T`XDpi{+vxg^XKi?7?-#INEKa;@tUO#pylv^X2=I=r7wS7H5!>GLx z!E~A05BW3oCt&jamx-c-8C9{Xlb5Rp}mcOdk zeeq1b0-<`{Du7kTIy;JFr!vQ$K63db>I$f7?aldD%M$5N&s@wg%G?L1mDtaheo~(B z67_rKIsbG~ZrkGZ4ga(&q}f#3%d3II01hh{WuTN${`$F??{`oC1j{7Km?A`91}nGn zl{U%e?Foxh{0{L%HXh0J=IQ%>lfbOG*jjI93q+qVogOv$1ktgt`ZYi-*i3@GS=^_@Rgs#W^(XxYaB70uMEV~=2+u;}Xh15`?B0HvUX4{JU>fv5N+~U__y&M( z7`c~_?UkjFyD$ujbxGikr&r3uIjlBu9}!Qynpm z7@-;wk4ESvVvR};9)#hlQuu_qiuvMpU=he#3RI!EuAJ4>O{4)@q!x%U1(Cc1P#TlO z)>qzl)2L)WFj+gYhL{~q?Lhx>nSIV*XoH}>=SdCQu{Hh1%5on%mif!Geglb9%i1IC zadk8*ull@4$~YdJ$`C?Z8JJS!?E|80wN}V$DA+_+^x$c2UdyNC@ONIL*O)-iej{OI zryWpQo);rRx9u;lgZYWn)uq+4)c5zGb6`4sF6t?PLtBUCw|*SVCrHhAz##jm;N>PC>Rrw%deoC5LZ+9aou7b* zz;}ib_7LKJh@aHXHbs%)Im^BPK%eu+p56gD8?ju4wYLIn$QA^tMievB)`GX^?n7LV z^&;D0F+?!^an*NtxC8Z-3Xt@K(}N^|cPmtNkP0L3J)$dYw0+9vt~3eJAHG(8?kQbF zAN1y;#12Q&R1~=((S{I!5%%z1Zb=+n1 zBlY`Q`K6y&AwE%-dNuKE*ZuS&2z~S^UxQG|@L(7_NZ{U>q4?QQax_{ck z(e52Kj$vgGt!N5AHm)4{q1IUX_9Nu=;jwL4&5F7tRQo#{kb2xSXN#yjxr4EDB5O~8&6# z;Q^?^#>3pW8X4W0*|i>V^~l{XHf%XicFlT**!aMdqH6MbIk-6r>2;ID5$$t5zZ4Up ze)kTBk?9niF~B{0xP|g_yzweI@m{7iU~;1{V#(nE2)sT1fY5R`2mU2Hz3yng zR!|j%5JC&5^axuX&|Hf8*s2r-gs*r1oihH^nor^M8~I|foa7XrM_eHT{i))3^SPMp z7@4*SNbkz}(vQXk6ISXlc*s7fT*Z{Sn>K)RMjh%5Y#7k(No=n9?QzQQDYHvj_+vX}? zYa2n;JpRVZ`ufky!{;7)_~%iXx=`j8JgPN34FCNc>~#K{V$#S4Afg*hyIpxEeer54 zfFW!&IdJPRED$MDHeMRM3_i@?;&e%vdj^#^3pZdNF-C>4Kcfhw1VTBv%yLFKqDPW@ zdG&=#Qx-uB9Ho0AZK-m`&pEeTE_iD_E2i14^k?smKNjwimHF;({T<+OBqhJ`fqS}y z)A@7><8rBtt`Gbb%0&DO6=LrfWJxmFp%%*D@`Np>%@$D1TwEh)8qH^$* z$`EMQPe0BJ?-TKhUIb55UN-G%CX&Ay*>Hgmnrz6niZ;ISR)Bgf4lDVrv%Z3&!|S=v zYPA!!%lS^1AU=+!z=E7AzT=0GzSW&Rwjd`ahC)6GQ2s+dE-(6S(hf8JqPi9Imlql@ ztNUr74A-s*Y0fgrP4us@$sR}v4@G7BeJZN^gxkOL(m_+DhfrkBZr}=0TUxwHC}-$g z?PD?b*hXFtl;NbyY$K&OX_N*9Kg{*rvbMYu*`fRQPU%$nwHNQ0R8P6t2!)ky^qxI@ zag22|Nkm!2sPq9Fr7}QOQUo#)9Bq$@KEb~h?}V}xA6UJ}I>04_B0_$OpAxrq0%p!G zvP0y^;D*1Ug;eo6q=!4+DL_|ee7u)S2s;iJandk_gz$OU%Zg`z5WWS;X-o%q8kA|=h--p52tDIPnzXJEYQ=NIrXrBTzb^bMYdb%E{}}qmwJQLKc4B*Pt7M{G z?YF7#%|10a^0YU+=PNMhJDQcUJ_v_Qb^_QI((RsutIYAJDVyCLu&0%D3p1&Mpv0no zGycXKs81CHR*_eKVR%eP7EnE)hmd@5Jjf8;k4Y!lMer?H44zSXrPTAmWMPDWcqlEEC+ws&FOqv-wwG}#NM zMFJnlabFwx`QeC~v)r46BiCjdrYgREr2Y72n8xc=ghchjG%2$GpSL&dk^9KbJwL`^ zE-;vAw%Q~miiClgH)@G;RaVv_)~3oVQq(f)tUbxvR0|iW>FLEb;5)G4i@^+R%o}PU zDT!)rEvdB&C?>zx{XOx|=R}mOZnxSOqehd=%>Np3;>1~=^BidU!mvhE$N|0S*Byss zZX?{7VGJ9F2v77Bh9DFi9ylc^#_eW)-@F@|ZJuDvbynO#YNoBo0>55NXtdG{B&LnQu9XdLh;fF18x^rLb`I0~QCS@#i~f(FC%T9 zW@ zyLn+&XOZetTU=H4vRxGowx==I_zk|Znqjl!;+`CX1P%hAL50{%BIF!VrO`v$(h+yHP-F6d zdR)Hhm*f@O+p6A7(>sCoyHtWPDw*?pVS80MQou8_evoE`N@@B&qE1j7ND`~mh=-DB?s0T>q2ZoW~2eth9K#Vh(_IG@YV1Vpn7IUU- zTXmc4O`fLym2@?J=aNLQn9$WgGNF~!&J1awLeLKbks5Z2z?r@g78^aO!o&nuhh$*M zkRqv~anrNQv?fNkD7&CfvU*kEO=7SQwtX9-J+`-1l9HL^-Wh?ms+%vE$qygTh3tFeoe@Bbf*odAo67 zCKa@JCI_rEc<(mJzhC4L5v$C7Y1(JRW6_Dq9_VzuNR)IOdP+SmZF zup>>LBUOPs`Xy}{$_<&dYvH$|dyA_AfQ~X_z$$(OWFp0@=TTWL%EQE_UA_{%HYMOSr7`u+ z@20^$-#X6$rb~sYz|I24M3y)NRQ(43fWEtb^d87JcJwbwVGlMo6-|Dt71Hazbkm6t zlz(~tR$7RPZ4F#bz595nuWWJq4OMRfy>dDIsq>`+q@3cpZj~;?tceX-D8{Tl%Yb5U zQ_3sXbE!&j0iIbw?Z=KdT%IIy%R^u_l+?jXL`=%+VyH5KLV*Ji6ZkCchk{I$SST1OrO_}9Bg+Qc zT)SgM(QH=&5qYGw15=FbC)qJKMG@zv956+N`}Kv7AYqv;AwTc}!TrMLf)-r&{Zn2sSc`O2QyfXrSEgAl%-FS zYKoDX0lGcuiFrPSA@>sb^^-9M1f@10Y>&BBPP3#t$x0tYWr}JktK%k#u6szjJ88Zg zxQ6u;qufBK;FR|a3a%LO`g7;g3!X3rLbd_kiU%jh%!he#A0>Plh!(u(vhC&=L4>;$ z5yiO1oboy39h5?3&+tsh?rhrENBn&XydfqqzDs4UHX+#0J=raBV=(5B(~jc0ekVzHMZac@EhYo@g3%Rx@T92dfoK|Zc!eEraQpcb zXv&tzw!I$8jbnPf9B0sburYX&=o7e1_#UX2515cwvbpNLZ)|nxaGiJs+yCnBFp?gl zc=zZ7dTX)V-aC5TQaLs?K?g0rCmTO(Fb{ttP|BUbcivEden*mhT%b_@KnwZsSFx%i zCCR&$LIht3;~ghN2z z4BdokFx3JNc9VBeaD{&K-osxX(N04BAOj{bs>NWTeWsY)iqA)D^-D!3(AXjDDTw-( z{l3^tyOrbkRH3c)SR$8-$9XKZ-^(=gLPT;`=`2(H#33NU5|}^Y`mXefVZulDY5;gW zHr{(VAn(hYvcLm84rq32x-641lP&Jg4ufG3H2#nIFLxF2V-Icve8YvRDVt#FJn{4T z0G161P7f63i)v~tJQ^5*ZE0WQ!3JHhAG8D)Qsi^Ggrq!|mr@`jZ>C6UNuNwoUR?$P zdjj~aD8GGmnPwGj=qBzTph*uTPDL8;dYEayNs&qkGLv{*-Hgjr84A#b@)_NyXLJI(**nMZj8L}ZM}`dzx=EXWy2&3|Yh_G(cyJNt8cVNwYQfjwLwM9^ zl=s?;mOQq(>2{S&6~U4^%0TfRp|HVinpK?f%}GrOuH=VTRW1FPMtdv6xURcm5D5NVCZ!4`~7rTOWTAG1SuxR!(AC+HwAsxgf zj4`7?CNFH~#m7z#CzAqP^&f5kixTGIJim>TG)_pR!z0k3690<@bY+J<2Sk zDhyjj4I*boqO7nEJ#4~lAiNahbLG82nr}4|=G!C2xpU5E<)BrtVTW16d4#zbSx=&h zdpl|?Q27RjpvKuH%gG`Y(LP&(P5W5F@cYU0@L=LLb-jT;pCTjA5>?ffkD@j%{!Ij~ z8gY!8az_G{s<2}%dtE5kU%>vv+DUR*wB+2v^i>XR<2`%T5Z*!6uUDG~{5`AX+}(b< zx*AH1R7lBIlAuA=M^uz8{2qkz$#O|eWEQ*OmvU4@UL#MEVwiIHBDZYR$<)s(eVKgpw`8$F66p)s*M@{EV)N=cp<*G?)%Cl9|WBx1) zLZ!ZCw8qH{mFN*WmuT^Pu$aG8MG!#C(4ak@xzX`zDOPvMQ}XbE0oFl-|)Fc zK&ffD=WJN!A7uLoj>f@1S%!z)Y$2pWGPuk{>A{jeojUEVmlIj@S5=m%WPy*N=E_p@dKCBKLAr_{lhr$ne+srJXk*)ifb6P854%-0yyjLPP`Jg2ysA3|ew}SNAI*Db`o+$?}#bEPEByjQtCgSuheQM5mau*CMEV=Q3AF}utq94_7gDdT#LP$0 zYnTLtB?@ygd;v%b27$51XWO<2DbOW|h$l&(7hpKiTw7Y?rC|Q{LpzrWL|+DZXE(nZ z&2k0D+aiTsb`?k_#u_#%$h>^F1{6P;Q}Gsw#)(!u_LHNp{ARH0zh37gQGiO{#5wO6 zhYlp!U*enc8gN}m7hL!=BRI#RH-F*-ksjX%ILvPv>*l+`9Y63CB?OD1gcW=N@{*^= z6HfISN+A!lCw!{ZK9GV5<%^AnHR09VnCbH5yv`^$5xuuR!as@BhpC25!1-aElW=&O zoSL=PEDaPQ{H};6a37hV1;9Bg(p2R(O8QG#DD2MVUbX(-8+<2rl>4l)yOxTphf9jz zMQbmXX+8{3jXLvK;3{9hfIwLMRF_qM>!F;TqjEm#gOuoni`Bblzgw0VhZg=bkMI$T3VJt zyai-dQ}Grv*~%`L%B&TwV1n*8tU_KGIIrSVTvx2c^LqVwWmFShCWcubv!){Qk}etr zCKds4(iL0Q4O1k$J=h!RQei2<{@n}w3fJC>iV@kyh?ZYg(HPosA9|J~mPKSK8>o8S zD|<0A=Jz;#8stk&rg|>@m45?)vWo3jE=r+o(1UWbk1+|*kkEjIgL&r zFZ!BIUy!rqBNf&KmNSDC>;(@Z?cG+F(wG{n$FvTK;lm)Gr~WFE3D#D0V?7Gs!j|A| z+Fm`~4WBP7#$wySUC!8>;!=_M_&N=~^~>FQIX&{4Rg7?W7&Vy@Cl)rg34y{Y9;A=~ zf!XqGZ$k}6TITJ+`-3~aE^4~Gya>j2!h)efwO*8XbXP?g<`KrpkC3dkiIIkxy$t%J z=hA7vG1Afi&5FG6tn{QV2Q(S;6<>ZDnJn)s;iqk>jIeSA`k1G=HJ@;brp^9$2T~UQ zdu+Uf+6Z!G&R@0*j4DSG0<=3(pP?>|@`Prz8r4(md z>U{I&oXv>a$J-$O1)oYz>ZzyvKw~>tyqNrD&{~(#g{AhWw+t)+B^QDvUW#FHvmY4Y z>GNJW;}8vZ#t?eN*uC|@5$2obOc>isxv41R*;svm4Ez=F;Qp*qfjwlUNCKtX@kCOy z-$OX$7DtQaZEq87)AILg&2Y4@BaZfdtO^nR zWEY{BBg8v)mI;q%m&LchX`uhnAAAvP)ae3QBcMnD-FLqKOjhADPzZP#VUXgLe2lgC z>%pg?DMWF5dKy#ywnHKCE!Le}E^BB&zB9JVCEz(>ijoVj;zM)U*fg^GS9|m|gxY7M44f#MpthR>ddl5ND#sP}KTz+07vmjJNJtwm<>^->m1aE< zd3iccojgOQwn&WMfV^fo|L1kc+6B-+S$g$_s2K}}OQBZw=Nj;eP$81v`X+i{!nfrq zwEh=or@Ef_`P7P{>itBjrJSkINPsk@F6Fj0Em^9Ah`q#$64A27YjFFg>B+J(u7I?& zJnMNidx&^B`f7?Q2ONx*6(Ydw8}+8Ka!BaDfZL@<+~7Ni39 z3=lj^`7*6SG{BauyjKYy@4;dRc(>sG9{k$?#sr|s9z9G;h_Sx%gyO6Zfz@G4BDx=^ zhrjU+DegglY7=;j6mMNA0(hK=X3aTAd)vVmenPwfN_o-so{))>E16Kp!U9cNbiw~# zOq*eglH)Bz?_d!M-6Gq3z#~|<0-Jakv|}w-0CTN^rj3|KLfzK4R)W<1y2uOnLyq5r+22wDs2PlD;5;Gic$z^m}&|&Q$fd<)dltWdrE+&|LiKfAC-O@Bgdq zOJzWhNvi;M@ui;J<7qq>K{;Y}*r;&x$WYI7{!AJv_^!=+h6Ij!CJf!>T ziBmj^wfRD;Ug=tyfEji< z1z{@zCj3F)?vUpzup(UPZ-;_b8Bx=AgPf~A+_hdjolcxS<2Pac0$;({5w7)N1a?Xn z@JvCs>*pB~oi;|ArAhHyL7?t?71rrU5A(Kkbm6nwh1x&7k)}SoADCW}ib{#ngw67) z=(i|5hHa5NH)ASy+_I0jz&sP%Mvu?K!Mz+M9v9R6tl#=&(vMzxztHf!F?h@GEmTOv zG4!}~x*<2J4WyLR)KrxIh(WUieN^*HF3ryI-6eePz-Kod}(+nSVp-- z{%xO|x2uD&tO?BR^)oz|aq>lG=El+@+Suo?($dYtik)y*joKg|(QJ;+bzxJP@v~v^(-0_{ z5>2KhYpZ8d1OPiZglf@2TZKuBU5G#DUr>CtX{CP9TT5hrV&vt#R;kw!hyoVEGwM+y zScQ>jN<$?UdXtr^ zbNJaCl85YZR|Rt-g<7^8K2NttI_(&BnTCM!)C>9p-|5Ad(~FP$xq5i-APwULk>|dj z_0iFXemnLy5gWC;K8c9?9;?nLDd-@4f5ssCVbrnT*E&b!qW$y!o=Q?^UOC z=(DX}!~lMtPM)_6w$iKq=cRPr|9n2ZVu>RQ5!&-6c(;8Z{{Brx6;>xgtUq?ruZxQf zZn#?HBP%aGC(VkYR3y$(}kCe6-!&I#nt z`^9c1oDD1^AASGFMx~sR)tz3ot-xUb-9wH0+G`Q7Yuk z-Wz;<^o|F4OnH}889839lj*8ox0@QL-U2|?CkxA;NW9=USw>Z*IPVWKB%d`&u{x#T&W7i}LqO4R+GJJ#e}ngns$n|@Zj7Q1j>8rkbf0F!*|06=EYet8A9ad%o11N08};{d>OzJaB{~+6C({f5=UA!gdjKQWcb)=aQV^+P9VnyMug$x( z(6R_FK;Ep7LxI((w1Rs3hF+33@QaX^7(B57_o^@R=<^?m=N|Q#?4dKxR!gM{h z&F}lUKkjoz3IDiLbhPCNg-b6a57{)qo`V*V!ZO`W|53%nJXE-_*-GCD2N>^nO9*Pg zs6lg<1L2X9O&aTynxg(yUpvFXB|B_LMQYUMd>wtc>aB&+jDsxN^weG?zsn?Pef6<90f%X16Cp1%)vdvD7xPkQbq z+mZs_K7D#l@zmLga>(I!A4j_~mmko?UNwEWq{SkdBVu|g(8ONV5dMceWAJSTV=dK= zl`FZ1Cs`Q?J<>$w%^z(n;qnEODV8~poNP^sZhcEW1+-3qB23oR<*nG;I&5?`8?Nk= zrCxalea5Om*vsH*-}GiqD(H*g!Y)Ar+^43?6aNUMiuTWDa47%mR`LMsCEHd02eU@f zMLeO5d0Q19T~>Q}?Ij1`Ye`2H%kVH%y_N-x6)sxzx^AL_z$5$iRgS)e{ z86GkA3va$#xpxZ20-)hw_YLCa`6IKVv%1u zot`>jb7P$JY~Zn5(Un z4NWgdGO!A8p;7T+E6b0mhN~(>b2GzA!hjkFAh912lpG}q1kp@~ioO13q>bQpB;G=Y zXAijmonSOqs?dh@3Qy$6#1wglcs>3i9QQsIO6^4&lg=$XHdMe!A?9iAe`fKUdEl&v=?l4xb5`|FL9&GqF`T|x_XyzgaC>YgG{kvkrr0^6Y@QtYEd(3NH zf>t4d)dv8gTEEn=lC7ja=g4mgj_tvHj=eEZT`v5~_Q0{KbhiAW5K7s|wOtSNl{Eklf#zOei_U97OTiBc41?~*bBm#rN44`u2i0+Ed1lEq6!jvc!AB2& z#e~06`bpu{ANrZtrBQ;R8_29Hl;KBGXm#YW2Ndp|SIQjE(LbO@=|V@c9Z(*nuu;ZB zJb8YJTF=e+QgeHSn5@O(c=c635PlZjc#1`S8$mt#)XP)ueAaofqc#}il{8w#WU$dy z-*a%ReJhRGh^p7=bBeSmu4Y#Y^Bf1JK|&NCb`o6_PfeoK{|3h%2zNRY?CwS@mMv1K zwoTY_(9tHf@=$2!iNi5ELT%)PLk%0mE3V=bG^q0UO99D4@Z&&25ez-_!|sQrh5 zyW<&MC;y*rmU#5AS6ymTv%MK9p2m~ijsU?pmGODi&kXg*xpT*@kstr5GG{z2a<8uW zL1!@R7v}uBxcf98IShW$m(xrBiHuPH8rc$Gh25i&>B&98Hx6qH-HPq(Lxllt`#PjA zAs6XIFnD$+U2MQB9^-j>N%~YZL!7blY0wJ_>+e_Go0I_yJpL{J|B3+V`~3Z-K9|)w zf+Yxd+LC0ol7Y#&G!yz2e3E38#>nB|H>hrbx`k_uee!9b1hyRirnD(1RZ_ZT<$S*1 zhr*C$4>wjLi@|z^Dsse@@&~WVAu!W#uZTnF3`Lr`MO+o5$H!qt^_Kj;;OtOpq0@Y6 z+z`rvLkprg*dY8^Dui9HpclhZgmJcM-;ctxQL6{wej}q()6{hP>KW5t7^Ss)O|?4y zl}D#rROG4P1(Uh&s|ij*cptaRGP0qbw)n_?KXNr^AMt~%r`=5-D39zKo|NB1R(gNe z!wWT&G~<3Lv0@t;Qk(eF*v+LKhlaSfz7;wHVvpzoq);>;vw2;8jCpHNP6@4Kb?0W;UI)44p zMQWEJ!Yp?ic?Kx_e;H~`Rv11-?uQQ8QKcDL#9G5P##4xp-3Q;wy4yV5Z>?1jP!ceNB*Wy%@jCLfZy((``tICt z%!VU!d_bhDJd(!;(nEDB{U2VJPo*cH_P>cjcRV2M8@2H1W7LRh->4Wsv~3i&5RGIo zCaAwB?C0A7f+ZB)+JKhHwHCk@e*(R_3-74$oOM_2V}zsSkS07!-Uuin+F5$ zJ3^NX-3KLnnWK+kywC`3!n4b99}YNN_0A#D(($4~$Q(hw(X@7-%C;vrCwJ}FMG922 zPNTX~pn$#89zod{!W_l zqvF>9g&>W#1R}sP0-NI@;lx-0vZCcv`)yWHbb8^?^y4EC6Q#@6w4<3N$+J&Sc+VSX z3Y2%GlWwH3mhZ`?wb|b-_)y#pp@)|vfyWBQplieZ<*Okfsfw^Fft)F@D`?hKE~pe> z*7p&Q`9Z~v1H2@}4qrs^LSKJt2i(|ZSmEPSRX5cYl}j}8%5JmQa8$%FfTBsiF6 z<=anB`;d`Cmc9nVYJ*3*v5U7xE8LpJV+M8$RAO%3af^&6UfV3P542^SkP@Yhex*%JWp%=~5DacV@>)9enuid~BT|8%Rz`W4 zB#u3y(;ZnL1U%+b4uC>aL9-ok$~LxNf3t!bt7ALT&(7<+9v&>G?VYH%q*{G-uPFo! ziOY5&&$=4R^1+9L@4#0ced(3gP8%MR`7{%?@AGUlrFe~G<#x}iT{~eGW!*6FM64oc z;rwhv0()yhkw8WMzJuvn_&|s+)T|2GY+d&gq0z2*A}+EnR)-)=U-zk#>EzFSaC`>x|#^lTS{kX9BRle-+djBIs8Yu`6kc9F_6f(p17hikS#Qk%zg( z=v7Ie=YXwDmrZ(~rT9+5B=+I+efo#>0)q2sI&nHiPdIHah{GJ5^}7&`Sn02qT_k54 zco3D5$CCi?!RM~RbnRlca>DVlQ* zQkae4>@oPEAVyR@`(4=`m8JC(v|25F4mJ=FK>ok?9kuQWz!y577R~rNzFdD9EuTVh z9i}n=lD*^t1Qy#DJ-a=W;ybGmOgC6p>tRkG2*pn|8w=DYka6jkjG)qd3^P#%{|+FW zrz|%=`6L-$4%vp1sdNMo$&G(~#{-~s>!Io&yg_Pa@J3na*jA$D)}_#hKtY!7UCdAYeRr!*8Vu z0ZCoyRYoX+_F9;d1uL^S4<$pjU{Tyt?5PWvoxBv-l{1!m^TQmm--VT_u8U1R!h+If zAGe@PBnl6`KV3{wrU|+%R3w%qj|pOxrQwZ9@fPtfQL0Z6!0)VJH%q%$>82-2AO#|z zM107%7TsSlmve=Q%;h(a?%T1Yk_lnL&fdJK%ySe^<#a#(Ls>t|o9RkON@Lm!Kknyh zWBKh1O=Ys`M^?kk9w7oHHJ^x@!5Ii+t7 z?s;~>!9_g=0n=hwC~kW!ms|S2l4GLs+e=2luxph;rPf5si7H(gdC#I_@q%xZ^ z8tmHGNSrEIj#oyG?y7RECCJ6!xD9X3(yG-@T}NVy<_i5n zHEI|hLi~($!cfP0Ok1(z?Lw`xea)5|p5A&1A!l=gl*;hlH&01r%s$pC;ToA>E1^k-dty!u5YleeSyrAo@vtk4bVLCEwYt(7|%VP z&iPjiybp;^o;-af?r|S?Xz-ae*?79*r^PYEZ(S-aLs$R7St1U@D{+B}Nud6OWx+^T5lhn(LNJT+v&OImTGU zk(V$>2^0`gD0L{jnxG6ZSU5_tUp$M3TWDKxbkv@UxW(|ry+n6cQuyu4aB6cb<2JgO z#>d+WeoQnt@?I0YY~{4APsm|Usza-E9N&NWDtmVkbC@w zBI>Oj5zW)pl^~{7AExbnvR~ zHd!C1JzbaBUoa-nxR&X_+sdmb5C2LE1wOJwiE%ONGdFl+jeqH3X|M$z!%@1G;WY;E z>ZQlhX&(vK;?FB<=yjaH!Dlv5S8tF=mFe$9YS|fA^1?-Vm#?V7U@xw)YeZt&pppvE zTp>NI0^2}QBn*lkg4|M36YizGPxJ;{4eQme~DuKs?o6jZSZz{DH=S0o9}>MecSRxD-Yb8l1-SO zny3Dbc`naIzWkoNRGCYEC?Y~A8At_l?@=X@PR!#ozqZo;#KNS`L(9<u!5H&)x)f zk;ULbiZIfQ9Da{9VHVP?{-iwksncFb7*@4s-64nVVN?In-c=s1JTH`$x% zSe;U3|2Qq$W^#*V}+eoOUZ)slk^ME$?5dV zU;5vzG_~NFV{a#rZ;2No0EPFuk9_pe^pcNccfuC6%*Zxt_T|_X1O#W`zrsK~A`YP8|! zEEmDF)&0ZY8@3FV4O;B4oMcvj5Foo>c5quwo%5{w0cJp7&UHL)TT28+4mQKKJpV#^ zvfOuytPBqm14S*zHYj_&d^t#gW$hOJrZc4LtxsFx5Ea4g6$P{{22j*<+Su6%4xScz zg~kiCG%%t5+#a--oISO^4LqTRq?f@`Tq#$0R6#^QK|ooEca}S$<9(TRnQ#lg!!R1MHeh>EP*g)dl>c}YxLyydcf@- z(AMP(uWYO+`5~4e7-xD_=Ae`IH++#Sz7Zjp4lLwa3W|=6`Z+r60$@EjdMIZQc5uDt_eTvFVKgXzA=~PkFrs~_Xj=hK zghaV2b_Sw}d4)_d3Gsk#vgj|Qok44qEq>|P3MaEgJ2%B$J+WY!*fyBSeYaohJ3~x&EWp|GfDw6b+ct2O7EUGo$0jhubJ0y`>0GQ$u`U*ZL*e&(?3s-o-5CQOPFH; zVd%*1pPluviWBv4^KdK3zT~ugzbc8YjGklq+_S z0XOAsn-phwiW@xWdvml^U4VU_0ijVEA@JaaK6u7IbGEkbTrEY@FEApFN4FT+ZyjSxPT zRowLoo!%81rO1a+Rzk6sd1a&7S}GR<SD?3iM9q~B|1jP#7KLD2QH zu;^v5CCa1djuEkCPWa_z4c-hxra??KUX%xJu9N1c{JgJ%XZ-t`8(~*rzUx)kHMh@5 zOF4ub3YL72BJRWKx)xD{H5tTOFfsi@s3pkEL4<9lwub>ejZUkqd14v z+1;W1G1A4_4U%vf9u@cjWhLjQ^l76cQ<}<2-xwR4kUaFc#Zpo1MYJ*1Iw9EjMH;;C zIXRb}@>8~eIr`t!q*kh)TZg-gl&?iR2*m>jfY*^s6r8nTvm#&Wr8DUfKeQlAg2u0e z;nc%FYLICbTmdL~?&fV{jB`8o01=QS*i9<^EAP&riSDNmQ}qd~?P0|DkZUj&5FuBh z&r3EFY(~1dA-diois@4>_V;xL0u!tr#PUQ1SqB9#q^Wt&E>M@>36xv~9=gKSDW~DP zuApp|ilcOg>dC<;R#S?;3Nx02uT6obr|Q`h+O(7A#=92r2mh7^ABJu4Pd^YyazM#v z5T)3U0%HYC;eHQ4PRXL>u+fzXT#d;e+lQadZFq5N_$WTYX^AWk`~!z)tN zDAH_0STrZ+QB>}xAGiQ#$u;o5Hz?#f{9BFfiLOjL+heFIR4O_oeMyYEbf zT-$OZ&z39&3$hQfv*($Iy@|FYK!_7_6Q$_X9)=RaZFM?S>#U zHh4t`X@E3@gL>v0W{HAnSXkYM(};B0DC@lxR-HMKe&)fGNtT>3lB`JB6L^WD-Gp@)c3r!xE^zZ>RTDag3 zJ;1?BmZ7b*7lD4M=m zhggfW07a}C(7l{qu$1IfBZM?w4+L3`MItc2qeg`-I6zTd0V3&(6*sZPi|P7lQMEV9B-(zo`<&n=6cM%TU{TQNeyz zA_-}pj)`TTrbB2T*fE=WVg1UqD-14z{V2>y#MxIR)o{v-iPgOxs24WTwX{|Slcd3h zszM!lQ93c!T1~p8czs1HF|v}bP}H?uVi7?Ct-9oY^c^zgPj>4(EsOM5X?Og4Phcit zha$d&6Tu4!D(R&_$PuO8zhUc@s7^yVmHk04i50Afr5yny!#-e*s2(^s0rc%zoS&q= zH#nyG10}ES(p5WRNC?iwH0$e%=PDiSl3T1SkqM0kexK*LQz+s)gkM)n!{@Cu;z`*h z17Cz_IbGX|ib6s{JL|H={g^yMI*bj0tQf&zPw1(W>9t$sz-eaRL({N~8*1ur7c;2r z;856@0wgsBRxncgjmMy{WRvP0i`(WLypkTFaM*{Ya>CeiIVxKu<4XLNFwESg8L|(^ zQ!9*4<@~kgatYaN*XQhko}<7&X6Vk~ork|^;%M&{iD0lOD)JM*dYCh9UqevlL+~>M zAlV-ssQArv%q4_C z4*|-)DOQ-Ws>>0p%oaTXS9Q_-#PTPa*5XsQ7Zza1=y>vIa7jU64hrmrZklhU5x*xM ztk}c-QDuRZuSkqyfsBVo@-eU!zOu4@7}mf-D3SEDP&1m=0?PB?K5}#Pwts99VJXcd z@b+zrTVkfCW~PXdEqJo_2| zFZdNPuIeth<4A`JoqbdGq}d6-FePDu=gWB)>WcykB6lcV^<}e1zq{)*Q4zoNqp1sS zr{x3nX==8bfZUtTbDpy^JKV)9i*~o_QGyR2rk`%CYLdFZBgfsQNTT~Zgh!nb``30t zUxcon_h}nC+i#GNH8S;f&bg!j-Q(ec#DcGKPdB;`D=ggF9ZUBQ>=f? z)zSro{riJ8(1q32T^cg6UHY!`i=!z@={k!GX?&#BE~bSBq|r$GoaDEw8d>@IM)Cj1 zlX=iV@Kv$Y4v@d_SY9E3>bqm+?}dGRJuOFg?nXw&Su{1fPJ1iXv_$hvf zLC*GO1M+g1ibwcz@DYymyM*NV>;1v!z8%n~*35M3MlrNVsX84?1J*I|O+O0jb|>~2 zg>yyrTh^6=w(zo-w|gNqAo8Q=&nQb^v$)C&xw1`3rtAyF_%reUrH~c@Nwm<0%Yi&{l7+U~ImKRU>7aDKXJombguR>0~>DlFnP!FNHisQP-4-5YXZReuUms;eJ z)?`5W31F#dq_3QpviL-U(eF^s%JAC&PE_yg3Vm7H=L{`QN0z}dz0 z{B&eZSy$48zFdcWmu?KfbS{G~YardjX3Ryr)3PpiN&0+M!F`@R1VsYxk?(h5K0QDF ziuD@`i)o4-@!FL%IUg-wvU^V?Ec^F>^L>&!bn;h571=YnKv4VbKR3=jJOh zvC`Z0D6ps-uXLu<=#_k)uXF7RcTSgr`E~1=e4EssnZ0O8+1e#tIt_o)FBV^2ad^RW zo!c z7sR4|d=To7pHYPgy$@%1g1S{osQng`k_Q_;aCl4uozxCph z1E=aaDpjg#7P?*?(RAYuQQRs^Nv{QXmN5z}Z|uC(FqGzmGy_Xluh#b)&;3F1kiF1Y zi&5Q4{3>i!Xpth)&-T9?=|shBqV_o3pS_zQV1wJgNUue)qyjwDbop(wi_x%e`zy{T zu?lcFqrZ|86qJJ0V#2eDk6<-qPIf}h=STv6YO5-?ghEp79Y@Nz(K6;)LWTX zDe*Hw_*A;h#me}PKzF`_6|hlCO=)2Rrm`H8VIR92D$puls>;7f`NeiRxkFk)FA&dz z30rMmqiJ;1?}_!GJa<@OTP!nx_}(Ap=cd!NZ^q8~0Zg69!#hXO`K`Y%lkl*Rv9szv zAxQzgjk^7rq-K^x*%|=KKE+-m-X{O}{u}74uwL@`6THY%TSIEs1>3 zHs-5eh2hK~8oxERS=wLs6P!AoeiHqK7414$EwzKn6^Jo-+IJ%QL;z&ZszW=f))xH9 zt1E(*`9WW%Tea`MOH!Dguz~K#S*6^>D4aia?mlCi6PseiNb~k~ly3{aN!nZ44zg_F z%{CnqCl8mbr`7OELaP1nFhB+ZhpyLQhlUSmaW94mjVriP_!Hn@SO-bDtZ=z!8P+DU z<`<>0_)BH^fVjir!~XKaWDuUdLUxJ*X}|7xHhn!rK|%zL>m zazuA6$3kxO9Neo`B?{UJd>#>eMOUbE3D;5eV}*NUM48*jI8393)(-*wncDBkLMX0WLszll{sBwtF*$1<5W3L3Suf%!EC!RSs^)R^3zez@+#Pwcy?L<0t;*8%Lb`C%mfbK1hw;oTgc^yp=9YXS0m>K2i!QVV2WaO#5 z1`(ki)#s0eb7Jk~b z%yvx_Iz?n*-3EIE{e1yMX@dFzivR}AZ|Q|gxw8*|WF7{+z%gB5ZHI@m(8y0O#t>?@ z2y0Q77Ydh-%yo`qGIfFxnRg?m56JN#Sc@CGpkb@4nkK0|>ePky^kqxN;P$Osl-WI5 z-W~&s#iiDOo-|#4D=qaS(6t1{?B;HS)OcY$y!T^1hTQnHoj3}f2U6T7iiqQwE{Vup z;sGCfs9JN=ih_GAdl{o6aJ#=*Q(WXaHbAILDzh_Cz?T?hhroZvY4Vcyp?$>hxj*>b(OX_Dr}2w_>Nld2pQt)@gf&Oq zys8CDDdpu9iK-3V@iVU-XvD)E6Tb8?EJ=c7SG|3+SV!_UBdZl#G)~D1wu%q$W66Xc zuT8(f^!GW<@2H3m)zYO0@x|!v$77(LP(!*a_On^bK z|0qi56C{BIbHfVJ1^|?~bh`s1`t32e*ale=sMUOd2d^J}d33K(ebCPGrkZz#?LNUw z_oA$1DlK|yuK>LAYd*M58)wTVlT;V;Fo#u?SH1e4c)~jp@d>t9e_}~OJW)O^;Iw#J zp~P)UoXS`L%s&>nM($ou`#aU;FsrvLW4XHGzslAb;uriL>pVlm6EpUKNue*E0#V-$ z?qQGI9lTjI2|HwNqE)JluCU9Bf6kJ@d0&V|>ki1Ncf2KVnovgCLnCH*ckmHS#^1u; zA!?wGZn1kUv}u|E9hQf5oM>CMJA!hFff;}jRjU0TJ)<~xx$?#5vg?c4r3@=sm0b_( zRj>ekXr#hV%B5RA6+P9z+K(cWC(fNquX+3%__Y?Sg@8Ozy5gshU{bcSRBlU$gRo<= zX`M5}K6#5C5)q+QS+m%01GkmwJ#w#HAHM*ou}aCWaeAh^@{1u$SW2ytz|KuI+}2Ha z>UBpB{$ec4ACCt7xL>N+!4I8inPL%%G94r%L3w{kv`T9iL3{V;{{Q4xx;;u)rHrs> zf{a*Ql_%Aj6V8PWQ65O_(So7|%fEdkvgmRThQej}M^m#a3l?6I8&92p!oJLV16H-% zYfa-dYwsviqk<1GnngC|Q3Dn_iP6qMn6y(XIm+nO3RHF8A}BC1+enqyp>L$@c;=f z3zSkp%WyPXv_n(vBxB~ANl#Nk;y0njnN0cOeCpZ-nN5K>ML)%cNcAHc+5^W@C&}lb zSG6)MCJ`dRnd0F&}hc@SwDXze&jwtTn z_DkudGakn4tHfF|$AiDgq3o<^{EgzuIXV{3G;KSFEI)p+m-#Qf>}7`PmcWCmVQ?M0 zBUlM-6f9xdm8e22m!FzzJdd&<@fXNWHN*&V9OTWu>0y}xk9@rh3%)e~(hvM=ht2#s zKL{b4&jPL)U%oSVM}@=h9KA94t~|wfn+;@fp#lg3=%lD_b0bKeu|D2~T~EfJ{_-%Z zzZ}dA;9*OJ@l+G0EL{=396PKh|NA0YHb3`sgo%6pVVO{k1)nO;<=*z1k5k*x2mt}5 zU0Lx-`>bov2SyJiq5g%mN8}I%eB0ji#c9H%`U9V)l9r_?dqS_MbxD?PAHme}0RE(| zq)U~n-cGY~K0*;YII{Srs zn@o{rCn;oUP}NUV0Y11-wr%jSf8)LS%l<$-Y%`EjQez|0+*bAEgum+8p)%cYx_Cga zYG6>?*{qN=3OD%GWLEC2g#A0X{Y;hW)F8sx&)_A5)`Rjym42lD zy?%0naAXCMm@o2HXPZzrv=mR1Vbr>CZT|JACyfoA5GqErr zYAQcNolO1WwzbAGn#Tiz;+Dl8mI38rIY8wnK6#$RmYu{*uZKEBp=o3E?Jza2mZ}E` zMvCY1!V_oG6K9TvFyTp70{tGlfJk;f@el(hRS^lcy>CaRXKU*a%NZ$|>Fs)VZtM!8 z@&f8boCS@A^FbiMPc=^fv?NWQ0bC~1PK&ELUQjRQk4!pU9HVsu5aGZGk6{^w{4l#X zF8v7gENEqWppS^lW*$So5?Ypy2TE^vxa9#-=o5Q@JQC`T{TLa^wi1A(Y$*}l@x&nl zVem+!??(VoeN(jH%7uvkr+shHxo)#cCwg|lbai5B$#W|I%htFbyFKg=QKmrM#mGZt zUIHJrd5B)Ba^)Pi&lo|ZijB9%ZK63vyv5ISdOia z-D;&1hW=lSr~e^OL*H47atb&oC{?5@K&;14xXDUA;@!t)BvssLm-zq(aae7d-HOPN ztDY~Vv`sJeinw(MTt-8v$S|6(F^0gpEgCK21)c|0a|%6EX|&g6ze9Fcnps}M-pKXA z7FJAo(V9X3XQ?56BxEhNOmn2KhpPX(!S4x|d<9ReZ%S1B82qZgPbevkVuOVB{&YJ1 zgr9dbU7GU0XVQ4f<4vxUT$8n;`hAnWS*ASQ2d}672&G=9TGoe#^FR1U4?4Dz)`Tz3 zPKk!~C-LBw#uMz*x^aNXG<$-5_)_FSK~P3pMKye)6r{+s8k=BSmx~iGUZ_9ymmF(8 zhTq~t!x~U7H7V0?b-OGERCG^Oi-ovM9zxS~qcvOs=N|n}W4Jw#*M;wDGHT_ahV4!) z_orXN&$BX!R-W0_vpfiD#I71xp26+EO=pS(P3_}o;IX0GeV4T!LW{hFvJ~i=08Z>x zOo|dU1;%hCD{q>w?d^ z)Pn|C^gCrcVhOH+o0o}EN`u+PZ_zBnLvt4U_oTP}%jt&yxtsz;K-9k~uT}c_KBUZV zAH6$rw8l!nb~2L~&M;!t#;cV+FH|(a@drv-DwZ9`z-0XOUTdQe(P;yX{_1 zkz=9(-K}zQjJk)v@!D&VQS-HFno&3kFsoI&fIoh4v$E9Q8UR#&`{->N;@&xWe^4m& z6;Qy$(qF;wG~c_KD~cQJizVg3y@#~CX<~vOdTIz6sOqJpk`aMV)8I`{j(!mGmsjJES~@LBMM3rQ*=KxML_svzYU z(&t;yM3~BYkLdX`=}G^hWI?(bFMfCMtzJA+<%e)S;E}b`q=e|~SE^kGnt1e% zUjuI?Xd)?ti8$y!|BfpuUQfgDh_6zLyt8F-0+VtODN_ZjJt|Y%sk;%@wPI*Z?$mRu zZ)Qd=wEZY3h-l};=B=_-g`n1vw4T;leoZ)_t{$tr};MMz;DyheeaPd@xy~%)?0%ZnL+1b zMElai`g;0{hQv(G7h9f3Cf+^n4HRGsE)JOrEX>G&+@3r**d1QIRqgY+a^mQ7-w7|4 zbxy>qF#WbLtpF|U9Yd6;BjX0ZF@dYS@(8kQuVYlKs53~}Lx{6bi9rdLcxXZh1jSe zUf8?SWQw}Xxpe4npDU5q^>i}=lGS^8_N5LLZ1;SM6TP<-z#V;L^E}w064$2)57OJ) z40~2cqF(Iv$cnBP#`VdUFmO(KPDx9Gy57rd>3*G4fS~<5sd8pJJ$txXHnIf>&Bh6V^qAJ31E00)81fCbp*Wb`Erx=30Ya4Stmdzw*-{ zv|rP7*MkaXHS>Yg>WMh`CjxEUKe~JLp7p@xQmFfU+NiwY-D5$A!@S*x6U6Hr7D<+d zXm3}6Mo9WAU9c{uj%x6!8QZxUtDl)iPW;>&XhJ(nd%L85oZC2a`>er%3=&NF z6C+&focY4V%14!F7!hAU&ffYB#^+jTF?xzoIpnE&F3rsNAT>EY3cG{AUU*{b*xkX- zO83I8a2AM#B7Xz=m%jJh9stFY8~4yMqXq&UKll<{Qakvt{*1FLO3;;frYasR zpEEn2mU+wvMZ#Gu6ck$Q=AcJlU-QSCfbQUIE{}svNW^?O?8J%Lv^YIAS|p6;6>^xc zkd^JoaF^mM%agigl?>Q;+UN(OlGsQTwl3C|Y?xFSd}>Q-Ife5h+;&9)p(ONhNPI?b zPDL07TAqX0|C`hjUPbJ8y9~v4$nO%GHaepXGviQ#?HjM^aBhn5SsA5Q4rSaHvXkZA z(yW5&dF(XD-c$-hdW*m&pyYTGkuT1zc@h9I<816NN3i_+0{vq@ALBXwp_6@QC|eMR zNZ>NO*VFiBuQYrCH=RzL_DmCMVD7~W6&8+y%%FD0zAKu59ZrYu}MrNoQ;mO zn*t3CqIZ@j)1#i%E_`_V70;STr`ENl`T+6BVabK=gv#wonVaq~{!m0#5FUI{7P*g- zwMw5=G>IRP4vyXS#DZ7h;Px-m%a&$9NK+#b93*WMB!Le;lAYi=ExA7$oKrHBKg_+-(pR`o$#ms6T8B(|H_qT+6X~=!4fhr z^L8Z|Mld>eXhXw7E|Vjh`&``nM`i1M7xwd63~OIg>o@(y>hw2bpp}|(D!*ZWi2*7&4S1x>}!%VsADG+B8{)YSqYoMnq+KM*x z&;Fj?9$`^EpC%%fNfbGfv)gIZ;zKa`dgM%K241kD_&h_$%*P6W8F*%R!wDwt;1$Os zQJWo62M|e^G9Et39%+p75uI7Qc}%p4QNJpijiS{W8&Aig*oZ07S&i~rqD4(efvgJ9 zkTL}V;XrEOGx3p`16=TcZIf>L5^h$H))cW{pH1Lle;4Dc!l>u6p}nwXq?6%dtT#cj z9Ead!OOZG?_(B=>JKz@IvsNZjG&wRmL2I+QiEx8d?w^xnBGgbBk=IX$_NSLd;t|v2 zf;Xp2Y1v<2CHL(U2ng}s{>1LWnD{s$WOToC1P)xZhf<@B;Hp45W#kDFu1~ ziu%$7)5wOD(UqU5ibFePO@b0UD~qXD(Y~@>E;9@7`m$nT`w*@}8@Us?dRHYj3Yt z6`hR%8VVu+;9Xx~is2F5Kl;wsA;F`}OC$`aGawzXR64jmkzRVq6WLC&YC1%b{QO-J zc6sKgtqSGB7SME$DsoVHR~@hfQ2wvu6SIsV*r4~ zhbYLBs~tVvSfCBNcwNvAv#ko1FNx)*yl-nREmpm3z(@e6zw0YIhTZn4zeL%5ln($Y zg#w>(sFcVRz%W|leiZ5tZVr1ql$$=YgUtFK1YMQnSdNE6WFWo#$g(NXfxejn-8)C$ zSn>q#TqTrWM>@|@(|@iU0B<-^0tKF`9fO|WIebkl`|FeSuIn7R12*Onrju5QSm!jKe|hltY4AZ& zF980laaV!1g0(&v`Ai}5GFd8XS<%f(##^Zy+aP(wS9}W&L#{jxa(_X~1M?%s=7lW& zH36y0=TX9PtDE-ZbEVz!whR`G_6>Pt`I$YOYJAq)T=NvNmt~$>b>ueMaG0~r$y2Q@ zYg2X&);-8Q^61ME736sjH$^?M5??9dj>e@@GWHN4;Xv7nkThY}k?j>V^YpVlb>i&V zbEU=w@9`}mdG%LNhd4?SfvE*(t<-nZV+b$cuxl2;0FN0PP@kQ*S`Q6RQ@=b>e&k)U zK8g-|dV1^YRlO%VplPk;%aTp!s=W&4qF{(f9-pz;K}isP-C`P>vm9Se(dTB3tw$^_ zCCA?6Z7pUnXl;bs16KM~;91^3&})ozT@=AXGQlwy)RXnsd8O;U$_W5~5-=5I|1Zv$ zHv&_h4nwu%gzO@{hl*?~%*7(Hj445pn{gEhg>v^;=6!AruwF4+FlG0$y!fWiZEa?Z zr-gEGTOoEZax7zOu-;0B(w5L3He3_Tj|?2f>X4N~y4@MT%+kgerSYq_N$^~%bp6QF z@mTtqXY5>h@@fAYmpd|B<5-Zfz$eq(^jOKNDU|8yQu)tCe;}E&7esm@M+>(}Jd9&Q5vwbcqp<#N**v|Lfk+^cRhTvR@`mswOapzrv6L+_eZ^!|M-Fy zRluLJsaTa>@55?Xf5amDeYzgp2ilX;;7hM-7XEHOVGY$aU^4d9m@k+jyl_X+#{F<4 zM#fXr;xsM|ZjYr8#o z)JjppoNWN#L%A1`aeE3Gi=E6OT)u#7LmO=YMl}ivstGg_n~t?J;~?IH17$GDo>VIB z!TkYIoggE;M94a7}5(*Uz(YQ-SDm1?X*5rBvZ~?@;ys)`Pnb ze7UpdWErMTXo+ao90T967vfbnn zR<2&QPx%Z)xnKAx<%;$;dpE-0(!Pp!vQv-SHpZS3{(k>#pOq^jqu`#ZIV8U(O59i} z3dm|FwWe?2QpEMK(rgYox|(_T7>YNKJ}9~=8*sxm!xoGefghg!xtC`XqEM$OpA~7* z?6`WIm&p#VCP)!H2{SB8Kl-p6v6y;&7phz^!mP}51&mnX=tB914{2Rat?3AVGzEfC zgZX9o0;52|*HW&ktR!LJDs}@tAu6-zQHl<*OiibVUO#v}9ewJDMLh80SGO>^E8&2~ z^P4gz=wK#dquP_#QZCQ%v&-Q<*zmkk=C{5dxqEz@e-~%^d{oO;AFSu-`fm-3J zOjNwR(>@e9@W@QL@Uyhy#N+q#i`#8uHNqWow_!7p$Fv_h05L!8$03NdkWyk5US5? z_=4I`R15@;!86E=^X7mjx-=6#Yr2>?%iXkizD)VqY)Q zLza9g zq_I9t)YX)mTT+>)gV1wqMy*W%`Tmi8tyBuAwBJ9-hQ8(xDhsSIvjDm1P(lmnP*B>( z_LhpwEUV)Tt(v!j{78NtTEha9BbR|+YGMfu)!xotR{_)iG87iYoi*RZlrXB!8_3QAIFERYvO+L(K6D>GHtJxx!qn*5B>o} zw_tPMMFeOZ%ce4DMZdj~X%%d2lhPw?i8jcVQIVair1`dcSN)1t{Bq-vil`SFX+l^E zv5hSbZA8ugo=@{FOV6w6m2y=Xrtq|tFAkKP{CdmpjYeIr&;*#E@C_B0lETxZ9kE0hY!RtQec)|CmzxLDm=X3L5&xrnm1s&uEA6JV7oI)Z35HbaRd%6&d&SS;mD zYo07pIZ2T+?Tnt>48$cH=Fb?C$jW}DzLqwH3fn|sSzBZyjgX=m?)}~;Qr)jaOgujI z$FHTo@hRP%b@dq@Jv=~vlo3{g7@np)o|J&XPoAu2=F;U@o=U%edPI@udz1uxI_n%Eye3p>;Z&}mHU)emkS{`Fb(M^W79MD1FVl6W2@=bmG zf&>G0l~;o*LYvg&%5GnQecw@O_KW4!<<8(B7YUFp*{FTr70X|A*gmHNBJ<)@tKG2< zfIID#!=O`W3Y3*GVNiWjRopFkaOOzl9^yO@()ZE?+bCAs@M_D-V?#LHk}Lo_^0#ob z3jN62Yrpkl?$H(o!^Ts#OK8Q9Oc#LW7n=G5U_#O!kv2R<9BCk7%^jpz8r@+QYIgc~)&QLzBy^Tw>qE zbl5)14Pi(d_b1jBFX@%jSP<+2F>6h1iU@%ZY1r457_%5paF?;wwH{*b8xzS{qpl12 zWLf(0+4WQ3lP_gO=XSs9G@dod_0U#*cH5o3W9W~)v;aPVD=YeGp&HzN8s;JDg#37f z{?v%zU^SM5Xe{9n^8rnyQ_o+%q4^froV5zUFw0U8V_=sBRU+^so5QXKC9dzM$$gTP z-uF1**}R>JO+{6&uA?KexQD%rw!kP;{ReD}4jm!{q#Gto-ikp(1}F zB&X7eGbcRcin2#m9ekCT-Yw`i=+kB?$U<0EAItCm?C1mje82GS$%3llKU?V_LRR1a z>PFNv#nlc3Van%}X&J&7}R+uvlDi$q>WelEW?5=nGGzlQDOhJFH3YMv$_>8j~N)1c!c! zjYAZSKb$!KLVD5@cDTJM)~k{pn8Z9=-Hdyv5ec0o)WE5#CU0rit)rae@^caL{v0u{ zC0k493ZdR8;pv@V0wn(z@b+iY^FIDkdc_YZXj2RVv#-8D9!EfBX*HZlNJXCZFYuP0 zICtKk=qU7~F?U3_Qo+2E^? z6wV${JgAGj6JLsooO=EDC^G+~tdv>OQ8nz2(x^-u7vat^N`q*rcG;_7aQk@%_#hrw z(DA$MtNf|$H9lB+CYX(g{m#?H&{Kn375h2Nxa<49>;6ggGyb2eRG}RCRrezEISJA!v z+aP2Kj~CSha#mSLYmBLq(&|E$>-DH!Z6(Bu`oQxl=@ULTs2^;HfAV{>2bVrbl+~|= zS%5Fz3xwTM5T~8=TVOz{7`d3_>?8z5c)pOHtMFb|b)Dh|?H|~fcH~#;M?0s@W|%J^ z{-C*RF3tAcl()c(u$T>TMgr&g9iZfy?f7s_QabwDALlW`F+9p56lZ1)zr+#WrDkD2 zBz*z`#6a25e_fRP;N)U$D}2{b@shz`t)TIz(&^xc z(P{>Q{J-0t9)jz>XBU6pPCowGkCQRdlz4ggwdjF7_-CMjOyxnitt}|Kyco8Hj8!o4Lzto)s zB`^{(%2OxK5>)jGIQ*hHsf_kpx6aV?G-QKj>sr2VAKg28(*oOO(^N8?vwR&zvf<30 z9*I65V{NMXa9KSVufB@ViR8v#ur@AW7%r^nMr=I%<#``X zV`Kg|(D%sbK*Y8l5hkXk0EyU7Y2fa2Wc`=f?4nY_DH_~zTB&6v?orEI%sr@LN(uy)napyWjJf1PibSB08BfI{O8r%Hvb`v00Rg zg0Fl1CHA7?ZNNmwM5>m*T6@FJOzP74rF9j-c;JOX7&dCUT1CV!?*f#@RjGS_uq>?%RQgzOOSP6BNdq z;O!N?Q6}mY4|NA`JlC_w!V>>|IYkgw(J$|(6+kfl8*Q=Zzeym*(SY+82bqlX*sIKH zFFsM}-5Ia5C^}QMmPC@J<-EA+%Jh)(0)jM9gu`D^W|0cGqtN<_X0ZqsD~L%qHvPnX z@p*}nB=o#k>uVyO#e;!BS9y1O@=(=HC9QHHHauDtZK1z7C%0~iCPIo!dVo)N(+sGr zQhNxIPp<;$Q=ANENMuR}N@CT`+@->!kOx~~+(N9V7_1EP%(7~WgEQtQKo&zkHY!9WW{78bTz6IsvAH0mx+Pz4msJ-$T)3ND* zpA>5^bJs*pnV{%kqU}gR58~KzDLDCpDN@<~rSj3m)V^X(EHK>8QCRrV0MMVye})#$ zFUeVmLnt1Bw&^J0$>Ly*^UeI{f{U5==H(P%D>snkusLr zW5fMFI+V*pcSS;K+rS$m6`jv`k)gMieMUH)IY;v`Y)~)%M4E2>)CW zTcIPfMNu9|oE+IqEvrOEddk~RZIZ(2!fd8zyT3>iG!(HFwX8EG#61rLYM>p*xX)=Y z|7LVo+%O*S+-%_^$-I4?r8!>@n}y)>{IZt|9n1L$fwdc~tA3+PYhgGF2{DegEbWLKq`|rzmEnXujnAI}c zwX_>x&$MqcatS7S2xIOi#`AQ@Dad?URO z5iogC?3;F_c4_Pw;tGFmT3Ctr>N2NQUG_}>!YWM-<{P7cG?WLDZ6RW|URhEw@_#MI z-G(4JO`@X8Pr5J)F0~C#WJ2Xu(d)}S?J=Ew0Rk2|TdCq=ixig7$0H0PHSGPpzc?qB zbGhHdeaoFNWI+h|hafx#oLh39O49?%?|E6MAf?cyⓈw-4RE!P@iC+j#1L(`l}-2 zJoJ+ga3M*Waa$+9C${!BA-+g=_Spi6h{cBQQu-{WunhI`V}Jy}6u*y}>>b(2AzHRf zpG2BdSuieAJn9*z?t=ov&@kw9-O(VacyUA1FG5iKV>;Jpebn*w#U zGQrT;4X$SedhV2^A0rE z%zY6w68x`so`1|2>8&Y+uv=V~`=eKZdE?K$Q;;?|rXYLf=68mkFYJ%YX_Ku~oMeEyKe6{PTX49A83N z_;sc;fVa!?0r-j9AM3K`&lhgU{k4)J1l3+#Ub)610?Dp>xh}5NS{nY3&`94@Z~s5v zN%hS<&p-jwWG{dXSXdYhY=|6|oH*F35af7CLur73g5f5SMcwb0-H+>m+pnGpua#L) z%%PYkbURXotqd9N>u-@FfD)thm?Mz%W-6^$33bW!Q}CoL@|1$uvxT-m;4g6M9t}Pg zg!1c+59d8anbDP2dlC0^AxzilR8YKKVBoR8IkMLybVTU&F!cR{zlIIKg8bm`P@pSb z4r z`_+#=B$w}f%bE62_Z-+e<`z4Sq~!4BxbR_P-R~BHJe_wQ8Q`9`(7jicumDzBc>hEAI?hZ6o7d8o97=(kDl|;@+n=Ykg3)?f37?{V!Yg|@_NA4 z4=Z;{&V6q!ED(_sXZN?PBkMtKJ={kcaYULe+gGQUOV#GFe^A9~m?pXBB^B1+6HKE9 zUrkKWZo0edXCu8nQBgfmp_evewb;Ydr05*;)8phr67#7&+6AH<%{k}I8;<4Da+>vH zQ8mq0-XDZi9Nku)bz{*7%3U>X`)B=gs>PNcu7#Ukp<^B9t3ok^**~TggP%uxV+2Z2 zTjYzZSAQS7M;sA^9@VTTj>YrPGM@F_CSaNkMvW`fQ5MKKrs<8`sWy&UV5LV9$iex@ z73!dOE%JD?P+j=a%DK&Zdy+z-`4u?%5+hSwMJJcPLK%~oCDcf#T;hECu6M~k4K2O3 zrKHu`YrW7NKzc8ZZI9%)^Xy+$p zEl29df|c*fW7pi4lhg<=RsnHKRU zFLMWiBmaE|&PdO`z=o&FiZ@ek_NGde>i5W5GHx7XsUI(8~ z=!|v^ah>4{HF%_nEPYUJ+f-l_v>qJ-!5FGcG=f*cdtDDj4RDJ9=ik?C>0*ofkUA>M zl^ZLxJqwG(R8yq3c~%j&kVmvsaYS z=I^qgmnR^O4lJQg`nR(F%8-QDPCk@M8@=hd{UIniM<;%iUh^sSTr#s4GZ6UQcnIXGy!+Q-K}#=VZ>VWvX% z4TkA%B}N>DKh+7C2g)xz_|aWV4Un+xkMbKz$4)-;OBMGMJt@OK`|Whr zKj+6N*GZLKc-rQaA@JHWoK?jteKA5`xDjD3T_)5kF06y={sBO-0Zw1!uv>r)mb{zg zV=A$T7z7DqJ3Vd1OcRx$2l{cE^rUgHz)ZlN@%OfXQ~7V%dmbD|7XE3BH5)*;qU>>u zpz;rgAlv;zDH+_MGDS;(13VuJ&z|=Sba^LYD7&2c0kcuIF{e6xQQ3;5#VqQujT3?u z(LE3E3CZZ%vCzctUL(gby5~C{Y+w@kBDjf7D7UU)Ja>jfc%i!@B*TjLHr6bZh;N2j zmB2_=0SwkAE+K&o0x{*Dtd_=lk=6FWDJLsizr;BjyTln=z9vKGRsK)hy}3@+p$0aF zG`Y~eXdSdf^b^rpZ?FlLot?h$0&B7RpRB-*^HL?hiE;@_wN$j=#avB$!S1z}_M_73 z!98juJ(C>E1%aLqx4m$B@tz~EqGn80tdSEdnzQ91o(iM1#9<+C$D)dXg{io=5k2x0 zbO^Po7oG5}INTFT#XsW0)`aE;7Dn#h-66s?NI5EFE}9TFWa- z?IH$>i0h#7to`5zrBAME&a0vtSUMHk$*4%*7Abi}BK-#sm>4$T;zC=85Mb&9-L$v@ z$06Q2uFti~p#UEhad~jC6PUA$3*F^3KQR}25mkOZ&H^k%&yHfhl4PxM zLWE4Ws$`X-5c5_aI@)jaD$!`j7?jraLXwf1IJ(7IQ0gv;yUgS(t+|d5`CX21qse}wgG8wP)(-G;w9p zFXqwgjDVe19d7b<34vN#pn^W-<$b8#p)D0)>+(Dk8-KzwewScB)=k6tc#0_ie=$V$NQMl16a#QzRmn+i<^%83L=ZNN~_xm^L#hJ0Dfzh^n zZQJp_tS4&r3{UqI-|L6sn&HTg%J*)%xv>#O8L;caFbtX3of2Vea{6!KOa7)h8FFXE zoPqX}2w+F}#mhsf_KSD?SW;u;>r|lP&*c{0_A~YobJZz_kfI*s*jKhZ<{Pm#8jf+W zSd5Dy7YoSh9PAvRvwA84@^S^1wFN~$_A=SHLxs& zu_S?2iY+dw)P!vkQ3kfeTzYoVH&RDz6b&(Jzr%FVU<*5J<9m*V-2HPPwE(q+3Tj*l zs7xSn;QvDW-YZK7vhy`M5rW{v=_yjGN)dW-5>xb7S&TXOJ9a_MD>q{G`&4@335Nk@ zN^<*RdZvBV@6-R;OG-AND5~6_G~CeLG-)|O6{?QLKLSB9`q=U++|c>Q{EDLiP?2Rw z@`Z@(4z7hl#sy{hDsLLl=2mHc)Eu8w&Xr6EZWYBDZ~Nwu=oC(+obaiR=BtW=l)JBr z>8R$@;O<8{u`|NK=i=G`s?h53RypJbo=>W57cFQR&n>|$Ct{aC4{A=dKAtSVh433j zBJ%J!^P&ZAEL&`^rqL(m`yB!t6IKkH;%3*hS! zvn25vKL_9oMyb5it_o;@v`j~hLo*$|*BCU}ZY424$Lqb|@yQ;O0au;HE2$Lgi(Kn zr-l6|7uSQ4(Jn@U!UJ)C^qCTT^p`vNyuLHZd(>e`w=XYE%y$>zI$w6M1Ihih*aZ~J z80#+Af1!+~gTqnmxD{L13zj*Iq%hmDp(7#yuDrx*PC#LZ&E#Fr0M^_M5)NL!Y>s1H zkzw^S=SHeHMxNaKnpi-l1hPm;(3XA8PHyxO{vCvr7tTaa8u7Bxa|4+y0>JX+fV!{7 zhF$FXY6le14{O_y+{$Jd90m)XoF`#@n5y9ZKDZXRJ2qU=`NW_32j%|9|0zRl3R5;B zH-K~MWRV9D@x)F0s^x>lsdBoQcql$-Q z5600*kj_e;eC8~q_;Wc0Kg5+(C1P-Pdf)b*%zbnI3|!8uF}nL=imDf#IRwT&_70#U zp!R1u<&5tp|ZaLc!!*Mu4(f(B$cNEv5%!-yC8%K3U9_*SUn<2 z=^zFW2W>xCtY(GI&HT5XCw6x}XiJ@eIM(sfU;mmgV;K`oWR8{Sj0wYIadZiPc8 zi%8nU&aMc$bB@l!K&Z0D3Um10zFol^IIfhODS%!-LBXk)nv+P^z$z~taonHFx)j8A|l$?5`HJCd^ub zO#;`%Cw*?j0M$XC0+Tu|Dfi>zh8Ed_S{3m}e>Yt5N!dn6`*DtU=_x7St6(j$K|c{m z1H265NzxV{d; zwAoi2@oEv0b&(B;udy85Ib@M>?4IHFEzyQn_z&78<9^z2h=j?Ef*s*3kM5rPSvv1g zo;wd1@I_y7JvT_MwybJW)9`qg|5`H<=%d?apdLxe@gSkun1Tf+F~0n^n20~K2;iDy zzLE%y;I?9?4HX9R_uu84m{; z)!nqM`_;)GSaa8M>kJv93DeQF2fzIKmtc3k{-vc@CvMLt?xpmC_deVW2dl`EWtIII z))!^yy`VF}xIc(+!|1MBk60v>h}wx_9v%V74GXG=%~a@_X0Wv)AU6QrOu)r^M|{N5 zQqALDr=?TmF9%RC`Gd3IRnPhCmbkj|jq8b7v{|yUC%_;zqI4#bS>oxLB+OAxU7feD2x}uFJ>wod zvN@P$XR1s?mk8D(mc2z~K82CKMtQHrgd9k?Jo{-k-X{+gi*OUJ_`^6~v6EY4=z9@% zaJOp1;lZeN%uX0NOIXqi-40c#aUu{k#y-FT51%<>USv^~YdrDY^cU8Gmzm}?ZkFY5 zXq%+#{sw6`QNVF}Y=*4w%WU^r4(Yz4B)k_K2S*8C@U#3;R_|z^_$Ve~<=d&VRSscW zxwRH9G2K_=N@Ufz2-UcRn?18r7;)wA#f}&B!5gry`2|Uf$Di_5$4-uCb-<(^>~EKC zOdLKfl`qexnZ;RONSk%aBdZH%nTE1pa zy0YQHM}Jst^c)XnJE5I$Q@j{7sz2kuC(>33%N+hs00A!(S7}F^;NwCIFB0m?8^R(O zR_#aNJ-Hr15+Msq5JleU)P~|J7RcPYlRB!ANf)t8r5ZSEJ z-4-E$6xMUIieVgJ-v3oP`O0^}flyq&imTx>fb&$4f(}n13My7R`M~3RKK&$U8y>vI z!iKct+Xl#+RR+g$Gm;tP&`||`a-DE@!sx*hSR2G)p(T+S(!=Pkbk_vn|1xkIS}$L2 zx92?@O3~zBsV}KV+J@22Kgq>bnSR1+LtePYS`|Vq2`1YS3p&my+)<8_Sp6K>RD^%<;5~u*f@TgpdG?sAsq4`~?}NgYoLZ9GE0R9A4c z7bl!Jen>0RuhR1%ZaGZzm%Xo~FBWd1E6Vjj@8vuC`h<}y)q?>Y&rd7^&wEfL4SD9B zts{JE-!SVIl4YLN(Ou9=k9!d*xlG%hfid@}%%(D$83rgt;5k9S5S`l%F8VXh-0M$#iMk{7wxPQ?}(VTf>&DF^4p7K;ae8PBcfk%;ALAe zhZ-!$h&ImaS~xcfbP34mpdcrVaKn_mQbR~w-cz|A+(0yQKeD5u&1;T9Q5kc;T z4#dj_ZBVLdrZA$3o9$I2X*%uzXcNLaoLyoYkZ!zdxqcR;cvd)JD=5a9qv7~o5h`8e zG!w@OVZJg^i_fH1*E7N6;J_W#b*Soug6zBvK%>_HgJMIL-TrarkM7?;K2?xWog43j05k-DSW=?)13&Z5jzaw(6P2Rh>+fq zLaN6Iq%X%gcH8%qnye%hUnz=svxOy2JE2Ydea_HQoWY7=NLUCpnb&((9t}nGA1OE} zbAla}cF9~HR+e^G8JCRi@G2~hQ44-8_j8?D^V6Uw+m^{3kZU5aersEBo&nMPop?+v zA2yG5YECb!s-{T6P>jIk5Vt;39aN<+GH6+DtAB_YO`**_&&QCCTifB3aYBg#vy>IJ zfV)sxVJVWE4)GiKt-Y-Ziy&q78!0Rg;tzgqJcKh|l}jvJ#D$1i(y7&>fN=E(-~ORr zpOuUmaxm>@U9~eH(QR z=gy^{J!VZkJ_#Ex7tiAmvvPAYDm<|va*2WfSpFR|Lbc_1YCMJb;FPtkXR`A09$ss% zp{WBytDJLtp|2PAPZ>L$1hgjYTb(?OJ+8+&s)7nFL>)u0u-p?s{3MNj?VM*%VA z-8bCcU|!-HtUfqkGV5z$!(^P6oQ>nIqo#O8m-b{*iN#pdLp5bbb&&lelmC z`_=|y_Nc-@&+$iV$eSKSb`aS@RxhfH@9p=X6pwWjLt;7=ZGmi9ww~`ngzXkNmHn`Y z7jmh620eI|W+v03_XC+ac&p#EK+Y3tL`s$m@$oj)#i7qu`HbvV#S8andd!y^?xTY0 z2DHL6;XRb05r!e&df(3r7F+3~SRoL5WDfi(+1Y8Fu_e!jf{VnDfdEzyE38x+z2aag zN{4UZU5-z2-<>+%4KiCANN1odpA*GDSI(~Hbg@Uq^v$2{{ES?9>Vp0Gw2mAXwN05mX z3YN|uss;1P0rY^;lnW^03WC@ggYeE*qYqTmPM+9LNgnE_&`^}a5mw#9Ts8MHIdY#V+6pcp{y>;J;}zH|43u2Ds9{;X!C3;qdi` z85K|&aof~)1>y#CA(O-S%POBO^_z0IQde1iUz^?D-z+wo@Q^a^i`Amzjn;|V@{j$z zqr2E*9{0KLq(}Yi+v=GvAJ}iO0kI6p{`rE%KY#r3^gVARVYq2Sr5@(79Jpa4P|^2b zYktRXUR+*Yj8v!Zk&(MX7<+j0szkKLvF77If;BLP@3B$*!NTiT*ZCi|+()Wu}> zJl~sDCH0J#1=r=h#j#R;;=BJs|G*)jf2!SZU$MdeWaWRCDhXobbRH&}mGOcNTMawT zn&3w%C=@ROLGx@}OjFZcf9IK!S^=35;N1tmhWg4sU}lPu$0pM)Ju5PyvYPThmp~#M z5z4i^iN#=d*Pl}8F-;+GK$iq|q<<%R$QQ(PPgq6e*i)n#=QfNs1oV0@0BBR$*9_bs zA+0z*GhSqLe;6}X7yJI>|4#q-pLhaupJQ*ZOxUqL@NpnY->FP3hAo66;Cag&RR>lX zdKL+P!xQob#As1bgHNbB3<#pVS&K8sgl<0vtfb(7TP}Qd!(8 zxEdE9G}QOeY_eQE>7RnyW1K@16COTvYDU8iVqgCI7vW zrlO1(A%tHYV*W|_ro&J0CFAp$gdTL;Np#&?so{7bB=@SuS`}F(sch>*z}VEi=6ab za|##>!qX0qUBdMD%h^n+T?N2t3$Qkb%8O~*R6}5E^{OGlz|F+ShbZzO>y$yA(s*k< zfm(_f&1L@K-QIpk;eBYcK0^b)qaXmKh9{qeGNd3}AqL6mZ@tKu``C1TtlW(E-4u4p z57Hz4-b|Y4%=`I}jJnM&FZcJOa2}T1EB@77TInu(0)h0#5>RY#Ksc1ADdn=-ynnLHlq0(Y29_*MqnizWyFX>eFu7Mw97mn$wXcCP!>=na{PFm$3Ujm$7X>tv~f-)w5L?Gb8qn>to zkflgC8&K556urRR*9U`rE9sn!l^LW0R#G{>OwQ?4CiHCWuL)Px-|DYh2y076e@E#> z^MS7^^4~%5W@S3BL(DBt7$;niCK8?Ng;L2`Iod6k{mX*D6@ww+oW!yM<%Gt2xr2q` zmtL)|x(85qz`VeGYAZ}WGxMaRmEWQ|iSIEBRWmmAm^_RhH6i(nLfv^S$(N|s2@IVt zgVV`n1gZ~l%fY}(_L14J>_Xn|@kC%gRpqSiou8e&>nm9-yqz9oU^o^3q`8dwh3pdlp+-;t+hFDw1)pHmH@gK#y_uq=P?76$KNH`e}zCh$^D&xVW-4=#o! zKZ&&DQPG#;!v^Sdmr!|xE_7|257yp`OwqyV7ooS+BJ4&#;Br@r`wyj0R6!l57cB0I z1t}zy^S_n{Vi&#Io9hv9s__D=rzC)k6!{GQR`I7{6+a*Y6P2+Fedk54K~G9ddm(M} z_+pc%ymbyN;N5IGOB?Y90jNDKGon=Sj%khS$~$570M{awP)O_^{7N06$UK+JEz2?R z4q2E=SH0l-sI;8O@kp0RUfhHv#ILDy?~@*mX&}~6c}ABfC{?pWOG5y4Tp--!%jcs) zfo%_k9yP+BcpmWi!AAMf`1VfI->?W5WzL*p^2x0rgg5{CuN2aJd-TQ0{l6G{H2p&$ z1+yf5cl-NCyX(^Dq^R)d4{@SY?rHP76@^Hz+NBbDkC{^tiKs+ohA9%2rI zgoVJdTUa;NH&WD3AjKu6o72rNV;+7PENe2LGP7x!nmKgl`c9k+imbIaRi5slh`2}0 zQDuIX3FoVFxO_vY#vGUAe{upsIQW%!a8{dUhs3V zk*NZNEut~T{d>+(%)W?*FyHK+)aTUvFi_~Z+=X~KY}X}R%25m0ZE~U8E|V%IDaFdX zGSoNRAQ)fs%}mlkx83%$jP5@9z4WBNPGazM$?-m2S%xLIoe6C?4msWZ6ZnrazU?DF z5ACGBgeS=xo&wO>Xm|vd?ArFQAEaxl*kd}7VJ`}^fr>748_*)0|Bmk^S%gZp7(%Gs zK~j;MZ!YZx#L((N^}PPEs5bI$@U2$Ig$VvK&V1xXPz@Um8C}_`&~ZYyNnb?Lp;d?} zr{;3Q4-4axVq*;g!{fq7BGUb(1pylZN|%NP;7bn>O(`@+VOImTm=8d9zgKu-wlgyi zvB3PYZ^n`wD7OYHm>(=VWFHn`rjP=7=j-J@bc4Zqh^;lbK&o-7J>{FI20|&ma+TJ{ipF973+7OlB&Q}hYfQu{3o+0Gs0J|&f08<*}$w&mIiWB z@qS-yVm`IX1PM;X`cfU}am%~|E8+)7l&V%Y1QKvw3Te8^r51xRa7{TxqSIc`wVeS# zhPj0Omm^;|C>~z#Pzn*tDNb0HC-=Q7*lFTc7-7uyHgQBRg&-;(1FHD^_x*xO zy24_Kvz!~=`&a8}KSHJoRs}hqdZ`6qhsXoagU|^;u2US$`QgJBmgXe z8Zg=c&=6%ju+6f~o?!rmizye4d5YK8(_xX#ALkMjPA``E3;q9)f-gUa`WBZs8BQY6 z$l#BrljW((R=E+J>fV`|)3IbFB$ti86wt=9aOXmL^hLkus^hzFErax0uRPG~f(mGC z(@`9O(Qy+hxZMqdy>5;FN&dsGqe5sxuqvAJTK@N2MH^xOj=OAqFnZl%zo>%Pc4&&_ zv5QgtO8#=rRJ>b!5t+-wY&83cS~UQiP}t!qTs3j}+XeZV58yZ&l!I5d9Zv_57ivg6 zKAp$Ay)cMHlg_#6?@Hhn=R8%yZcWqYq(+${OxS=PB_X-Gk`2+>^OG*Q(PD#F@EtJm zCYItP+*IsmnhNSdNw0DfuiYZb?MCIGQS!PLXff<+@!X>iy&vqG6b;uXIA3lPV|*Lw zwif0zdqsR^r|MszzNNy(!=3FiaA3CJ|s5 zBC3|k0y*rmTkf$&;bPI7v^(QnM$j<4do~>5=CCDrV7S16Z&V73jTXzPLR3fwtN?Pg zDHBv6Qvw)7d}3lYwU@#U$VXmf{Ii(Ee&b1B=Sfe#@VvBC=eXpF+)=QaO8y)}3f(HJ zIER%2nX!eXOKEJ>FwRX}Nu9ZsIX}5x#trYW@lHPSqbvejAhuBDIVYB8(j^Px51&iV zf8T>+I2_*I!#Vtudvtz=`IeFBz6VY zTF^G<$Exd1h5~jd-vqhD}e=ygmkwF4~dc3Du+X^fk=UU@$YCaz&Npv%9qxu zpe5eRF=i2DZZ1Mz58zB$q z7-qzg)E0Lw9V<6esDQdKo~LfxSVP?m67u`p8F~uc`E&kFBBo?{uXuK@q;AyG+)9%b zO%#M~4O-vNCkI|-Yvh@CgOpXk5ReoWB&EoD$JJ`)N+dtATycdS%C~3I(qhZA%*<=o z?5=unXV0gf`@70MaBDAj54SsBjP5>@qE8fObrWvXSp9XlG)p}%4QH^OE3;Av$SPE) zxgzD68wOZVW`xCUnqK-XQZ0yJPS5!*WlY9o*PgWobuC_j-=BQ!2b-Ktq4ef(PU^s6 zW64J(5rsi%5vkXDb;rF6{!q!)*MAvzq_#N&RTLvCiUqg}r*N;TVJSR{UZ82TBW`r8 zA8WyO?^~FW_lwvp9Ln-?(kzHSKY1>+p^5?Tw18~JL2m)z#~gO*bM$wk_q;6d-d=dr zQhBjp55R=q^xM}{&_AgT>m92G){K+QuNsy&KZmr)&hh@X?=!j!I@4l?lh#E1yc*|T zOfQy=jerla1S_aXlcL`sG>0qWFB;A*o*Ww-xCXgSQR*5+;L&6pD@d`j7B?|j*k!B}f6N!RlZ=$mFCNF+Sj# zC{L_j!z+GCEe#0J>Qi+gkF$$(STD?;8>AnE!?I+PY2i-3H7wM}*?{rKS@#9Xt-A~u z&^J8g1Ly;~n%%-blwt}CxMt>{xOd{j$(O09-Jc!;Z zjnAr>cbhWlrG*IKtsV_E=-`|r`m{=J`+A($7t&A8c)veW(Dbk z*UIx?$Lt(eQb??p!bI;f*(bB@i6xI(_Q=GnUlF@UFk+rr1>Pxc3axvoZL1_!ZY*ux zxtoP&0{+o?zo7GK$&e9M6xy23KiuWUo@!-}NMrT*f+A{Httc4ho#1afGwT;aU&O%hBm-I3j7v zq*Q5I;skkjX!AVcN50F|a+h-cYyM~r{2g9raCi$ zvHkfO|2PK%t+@c7vtqd2z1WvbP1(^RfVy+XKZVBfM;AOPQY2kKCcEJ~OK2xso@ODE z*t7%cmum-%0{y_z=Hlv={dK-kW}d)hFA6`ijU6}m;n{SybYah&E|rPKtw2jgY<7Mk zg{w3*S+Z!#&Swe=FFhf@Q=oj{l834-@}h$DYsllOssXh2FRO_29H&8=qh8yOp{y6K zuD2*NgX=`nsJwx5hKCSu;Cr(-2<`c<$lOqnErk;v!nnb3+2Gsco_%bU@h!;KUc7TA zg2jsypp4pOKv?Xf0L1n7u@QHwh3Mh-9)rG$vl_WhE^vNm@NubXxS)5tgpuo2*OLFV;Niz$NkLQp>GFCVu^w zAj!b;T7o)22vWesvUb_;g()y3qXHnea>fBp_IC{0=!(N57-4-$_;=yL^>?!Sxs@&1 z$zDwT41ka;USLuM_#iKO#*hqkgv#U?!4YR@X}y+roO z>Z(Mg>Mc&^pG-gT<5Bq!e0{Jd&`cbJqGreqv^)$FS~KtKr#8eBGm8yj>c@H4|2`EF^` zM9~ffS(R1ar_xuHx@#2_QsFy_8cu^8!3oL|uMYaPXDA=d^zs%9GOE8zqpv(86H}tG zY%iT!ejhd}vCX{-wEXYz|MzUUTu#q=4#jD(3+BHe%8nI=)j9qqT^kWO=C4WE zM^uJp82#Ix7~E8@UcWku#mGoEqHb&cI0tNQ`N4GB$iDV#qEm>-AdFGvkYG#Q3eJHh z7Z3L+V1rN>ss$}4$d3>>-)(V4kytM&yXNh$e*t&yOdQ@1rx*UQuX8%W;tlr3E$K)1 zebWd3b3~)IzpXbVStJ1w6!rCjx4W$gicT$^fHAK3PNj@he(c4ZSDeBNsEC(8SZ1ro z%HjY-6cng^#mA@g4?fO^zPT0<_9MSk&hVhHTni6hZTv}25r1=?nj09MJlpayHzHex z{e&(_s*5Pg3)B#sth~Oifkea=>N~cmua3ztas|&Wfy}`c$8~m|%N4pPt<6nacz^+! zRYD{U8J)GpviPddG6;=-+3Q2wgd*`eHN}izfBiH)k_tzmrPe8(NmE4eJi7eGUZB_3 z29P-R<7$Zfjm$$`Ef6B+B0%q* zyg|VK&D_Nt_-w@R*RGLx83nWd0AmX75Q`8!O2RqXOA}TfH;VNYR3Dx_o`sypL3QxT zimTfS3(F}7XAsC?MESRbMa+X15%QB`_8Lu*Tw9!$vBtFHGs0#Qx@|}>{ zNYazh2dAJ&g_k~eqa&Ug*7EL9W!!uJEf)FTDhkuwcN~kc?&))!9+vIV+8?K9 zLsy|L6p8UZ0oPA>(Yti6e~V-gXRz{^7O*pNL!f=mB@xL4M>p z)oaRd^1P4k0z!Jcj8M6)QrwI$%pKHw$wY|+%=U5xAU^T2IvH%$HVCu>*#<9_PT5&H ziDH^%%0szafMO@UtEpcQ=(3^#l zt?01=dElOJ_@y%4ly8)UT~?cmh~5b#3nnF^chaKAu^GXd0Cn)Cgj=uL9gglko1XP& zIqXvhP>DyaRp=nq(_*FsGyOJkdGt4JS_qidanM(2VfXc4QT+bt=zTBmknR)2BZ_^4 zF#Ro#Yyd0%`{-}1^b{T%zwicZ1KEah0uc+*Qd=*g|MzJnf3GayRHrMzWoNFuwcM5q z1|ePi^v#p^l^}bIq)W6sB$_0ExtDE;igf=Cdw$qZIFexjh}MJfPwnUQXiRH`lKb8I zg=%$v?3r#7)?pGau}zObSA}{d>46Iy_mQ1L0b+5_MGt+n>q~Q571CZI6$+tCc5sMj z`bk#d4zWH$ve7P_*%hyX@|3)#FqePQg;?U7C+DZ|XsY_^O)CJlpctxi-*Z;U`(g^2 z=?B1n;UHzGb6lG)0bZwkCgu*U;RnI-!1kmZ4olTvQ`F~DqeaVXP5{1(?6q>DDhfPJ zD8i9kn&M2;q@-X5ZxX25-)GPkHh@5BpAvzC|4{JE;>D2|q-{VY*1tUK?QJxbNMp3`4dN}P!V*iGJPlhlO zygCIbgsJ%*zLTDO(!8bDdnTfIt`#Yu39?W}qy|Z{@0K?;Ocs`vuf58{wa9H(I ztfMN}6*WyD8;pRGb(<_xPO63l*V94RW*fFQu<@^XW9nF_D*`!rTb6$eb)O3IJ)yXUI<{KyXGDeM> ztvI)^kvJ6y3&)l2Qk#5o&joE@#|R6drGm{NE{7x#JkHYb;q0F<6CXAk|6#9Hclg8; zadh4##G{5IByz!RhZmA7ysdK3vN?e^)L?ic_l9l;|g<^k5GrADMOT<~i zQl-~v1ax*}oxJMnD`g2^LnJ2AMby)?qU#o5He=QBh(HG=5Nxm_07&(NzyA6!%NuQl z=LQvS0Oo^-GG82Jw}V4VW^v_>AagZjdWuaG(Pr#SyiN`XFKEur7MFk&L*(bwUE+3F zQs6)bB{YV5Yxx29p!4!{&`g_rNg>(JoozCwQ^+tg6dIQ-*H64nIe6VFsT|UTY9Uup z48sG)tB1=Ys_(H5VLOn28DRqz6r!@&z`k@-cEY5mKtv-a$~!>Pr{otw<8n`+(p*n~ zA3qgW#D3d|OgU&;AO=i&Dc~ehcMI?kszYaF`Qc()X1J$nCM`eXzb~i7X|Fku$A*i^ z9||MozlUizNN5Q5`)v}0cglSfX{z-iz8*Ck{v6gc>PG9>5fX-X1hBBK=>Um;YoY6v z5*=k6r6^faUNivpkU{+*N#s7u8>}qaIS#F3oi}UOy}cxOyZioj<}ngUpx3C?G+tWl z9Y&Kvg^TIMl=F~&>IDSPnp&0B!<=_0i#LS%j@;#`$D&4Ps@*CA1iokVr7!r?pQfMs zqjP7e`7F?LfxIgF;{knCe`o)!P57MEo&-8Kl!fNY0a~syKo|-g<)K3>zH##Q3CASL zXY{7;3Vq-w>Esh%;o@w1+LHo63%(Zcr6t9NrHv z$7%Ddm4dnQ>9mmZa*ObP_~vtT_VHJNUwMec3d!?S_4y~hy75Q8>U6+ESA}nCk3i)& HuKa%il+r>+ literal 0 HcmV?d00001 diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 1c73de0a3..6757ad1cc 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -28,10 +28,14 @@ llama_build_executable(test-tokenizer-0-falcon.cpp) llama_test_executable (test-tokenizer-0-falcon test-tokenizer-0-falcon.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf) llama_build_executable(test-tokenizer-1-llama.cpp) llama_test_executable (test-tokenizer-1-llama test-tokenizer-1-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama.gguf) +llama_test_executable(test-tokenizer-1-baichuan test-tokenizer-1-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-baichuan.gguf) llama_build_executable(test-tokenizer-1-bpe.cpp) llama_test_executable (test-tokenizer-1-falcon test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf) llama_test_executable(test-tokenizer-1-aquila test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-aquila.gguf) llama_test_executable(test-tokenizer-1-mpt test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-mpt.gguf) +llama_test_executable(test-tokenizer-1-gpt-neox test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-gpt-neox.gguf) +llama_test_executable(test-tokenizer-1-refact test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-refact.gguf) +llama_test_executable(test-tokenizer-1-starcoder test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-starcoder.gguf) llama_build_and_test_executable(test-grammar-parser.cpp) llama_build_and_test_executable(test-llama-grammar.cpp) llama_build_and_test_executable(test-grad0.cpp) # SLOW diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index 85a59a14d..386530f23 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -91,9 +91,19 @@ int main(int argc, char **argv) { } } } - // TODO: why doesn't this work for the full range of Unicodes? + // Restrict to assigned unicode planes // for (uint32_t cp = 0x10000; cp < 0x0010ffff; ++cp) { - for (uint32_t cp = 0x10000; cp < 0x00080000; ++cp) { + for (uint32_t cp = 0x10000; cp < 0x00040000; ++cp) { + std::string str = codepoint_to_utf8(cp); + std::vector tokens = llama_tokenize(ctx, str, false); + std::string check = llama_detokenize_bpe(ctx, tokens); + if (str != check) { + fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", + __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); + return 4; + } + } + for (uint32_t cp = 0x000e0000; cp < 0x0010ffff; ++cp) { std::string str = codepoint_to_utf8(cp); std::vector tokens = llama_tokenize(ctx, str, false); std::string check = llama_detokenize_bpe(ctx, tokens); @@ -103,7 +113,6 @@ int main(int argc, char **argv) { return 4; } } - llama_free_model(model); llama_free(ctx); From 2b4ea35e56792064598e922e46d081e02bc96b94 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 24 Oct 2023 16:48:37 +0300 Subject: [PATCH 022/859] cuda : add batched cuBLAS GEMM for faster attention (#3749) * cmake : add helper for faster CUDA builds * batched : add NGL arg * ggml : skip nops in compute_forward * cuda : minor indentation * cuda : batched cuBLAS GEMMs for src0 F16 and src1 F32 (attention ops) * Apply suggestions from code review These changes plus: ```c++ #define cublasGemmBatchedEx hipblasGemmBatchedEx ``` are needed to compile with ROCM. I haven't done performance testing, but it seems to work. I couldn't figure out how to propose a change for lines outside what the pull changed, also this is the first time trying to create a multi-part review so please forgive me if I mess something up. * cuda : add ROCm / hipBLAS cublasGemmBatchedEx define * cuda : add cublasGemmStridedBatchedEx for non-broadcasted cases * cuda : reduce mallocs in cublasGemmBatchedEx branch * cuda : add TODO for calling cublas from kernel + using mem pool --------- Co-authored-by: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> --- CMakeLists.txt | 1 + examples/batched/batched.cpp | 11 +- ggml-cuda.cu | 190 +++++++++++++++++++++++++++++++++-- ggml.c | 4 + 4 files changed, 193 insertions(+), 13 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6af42a6c2..202f26049 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -331,6 +331,7 @@ if (LLAMA_CUBLAS) set(CMAKE_CUDA_ARCHITECTURES "60;61;70") # needed for f16 CUDA intrinsics else() set(CMAKE_CUDA_ARCHITECTURES "52;61;70") # lowest CUDA 12 standard + lowest for integer intrinsics + #set(CMAKE_CUDA_ARCHITECTURES "") # use this to compile much faster, but only F16 models work endif() endif() message(STATUS "Using CUDA architectures: ${CMAKE_CUDA_ARCHITECTURES}") diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 75856a81f..22a4265df 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -11,7 +11,7 @@ int main(int argc, char ** argv) { gpt_params params; if (argc == 1 || argv[1][0] == '-') { - printf("usage: %s MODEL_PATH [PROMPT] [PARALLEL] [LEN]\n" , argv[0]); + printf("usage: %s MODEL_PATH [PROMPT] [PARALLEL] [LEN] [NGL]\n" , argv[0]); return 1 ; } @@ -21,6 +21,9 @@ int main(int argc, char ** argv) { // total length of the sequences including the prompt int n_len = 32; + // number of layers to offload to the GPU + int n_gpu_layers = 0; + if (argc >= 2) { params.model = argv[1]; } @@ -37,6 +40,10 @@ int main(int argc, char ** argv) { n_len = std::atoi(argv[4]); } + if (argc >= 6) { + n_gpu_layers = std::atoi(argv[5]); + } + if (params.prompt.empty()) { params.prompt = "Hello my name is"; } @@ -49,7 +56,7 @@ int main(int argc, char ** argv) { llama_model_params model_params = llama_model_default_params(); - // model_params.n_gpu_layers = 99; // offload all layers to the GPU + model_params.n_gpu_layers = n_gpu_layers; llama_model * model = llama_load_model_from_file(params.model.c_str(), model_params); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 654d3632f..db053e3b8 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -29,6 +29,8 @@ #define __shfl_xor_sync(mask, var, laneMask, width) __shfl_xor(var, laneMask, width) #define cublasCreate hipblasCreate #define cublasGemmEx hipblasGemmEx +#define cublasGemmBatchedEx hipblasGemmBatchedEx +#define cublasGemmStridedBatchedEx hipblasGemmStridedBatchedEx #define cublasHandle_t hipblasHandle_t #define cublasSetMathMode(handle, mode) CUBLAS_STATUS_SUCCESS #define cublasSetStream hipblasSetStream @@ -4326,13 +4328,13 @@ static __global__ void mul_mat_vec_nc_f16_f32( // nc == non-contiguous const half * x = (const half *) vx; - const int row_x = blockDim.y*blockIdx.y + threadIdx.y; - const int channel = blockDim.z*blockIdx.z + threadIdx.z; + const int row_x = blockDim.y*blockIdx.y + threadIdx.y; + const int channel = blockDim.z*blockIdx.z + threadIdx.z; const int channel_x = channel / channel_x_divisor; - const int nrows_y = ncols_x; + const int nrows_y = ncols_x; const int nrows_dst = nrows_x; - const int row_dst = row_x; + const int row_dst = row_x; const int idst = channel*nrows_dst + row_dst; @@ -4345,13 +4347,13 @@ static __global__ void mul_mat_vec_nc_f16_f32( // nc == non-contiguous break; } - const int ix = channel_x*channel_stride_x + row_x*row_stride_x + col_x; - const float xi = __half2float(x[ix]); - const int row_y = col_x; + const int ix = channel_x*channel_stride_x + row_x*row_stride_x + col_x; const int iy = channel*nrows_y + row_y; + const float xi = __half2float(x[ix]); + tmp += xi * y[iy]; } @@ -7013,7 +7015,8 @@ static void ggml_cuda_mul_mat_vec_p021(const ggml_tensor * src0, const ggml_tens } static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst){ - GGML_ASSERT(!ggml_is_contiguous(src0) && ggml_is_contiguous(src1)); + GGML_ASSERT(!ggml_is_transposed(src0)); + GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -7023,11 +7026,11 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; - const int64_t ne12 = src1->ne[2]; - const int64_t nb01 = src0->nb[1]; const int64_t nb02 = src0->nb[2]; + const int64_t ne12 = src1->ne[2]; + CUDA_CHECK(ggml_cuda_set_device(g_main_device)); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; @@ -7046,6 +7049,159 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor ggml_mul_mat_vec_nc_f16_f32_cuda(src0_ddq, src1_ddf, dst_ddf, ne00, ne01, row_stride_x, ne02, ne12, channel_stride_x, main_stream); } +static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst){ + GGML_ASSERT(!ggml_is_transposed(src0)); + GGML_ASSERT(!ggml_is_transposed(src1)); + GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + + const int64_t ne00 = src0->ne[0]; GGML_UNUSED(ne00); + const int64_t ne01 = src0->ne[1]; + const int64_t ne02 = src0->ne[2]; + const int64_t ne03 = src0->ne[3]; + + const int64_t nb01 = src0->nb[1]; + const int64_t nb02 = src0->nb[2]; GGML_UNUSED(nb02); + const int64_t nb03 = src0->nb[3]; GGML_UNUSED(nb03); + + const int64_t ne10 = src1->ne[0]; + const int64_t ne11 = src1->ne[1]; + const int64_t ne12 = src1->ne[2]; + const int64_t ne13 = src1->ne[3]; + + const int64_t nb11 = src1->nb[1]; + const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); + const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); + + const int64_t ne1 = ggml_nelements(src1); + const int64_t ne = ggml_nelements(dst); + + CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; + + int id; + CUDA_CHECK(cudaGetDevice(&id)); + CUBLAS_CHECK(cublasSetStream(g_cublas_handles[id], main_stream)); + + ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; + void * src0_ddq = src0_extra->data_device[g_main_device]; + half * src0_as_f16 = (half *) src0_ddq; + + ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; + float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; + + ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; + float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; + + // convert src1 to fp16 + const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); + GGML_ASSERT(to_fp16_cuda != nullptr); + + size_t src1_as = 0; + half * src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne1 * sizeof(half), &src1_as); + to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); + + size_t dst_as = 0; + half * dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + + GGML_ASSERT(ne12 % ne02 == 0); + GGML_ASSERT(ne13 % ne03 == 0); + + // broadcast factors + const int64_t r2 = ne12/ne02; + const int64_t r3 = ne13/ne03; + + const half alpha_f16 = 1.0f; + const half beta_f16 = 0.0f; + +#if 0 + // use cublasGemmEx + { + for (int i13 = 0; i13 < ne13; ++i13) { + for (int i12 = 0; i12 < ne12; ++i12) { + int i03 = i13 / r3; + int i02 = i12 / r2; + + CUBLAS_CHECK( + cublasGemmEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + ne01, ne11, ne10, + &alpha_f16, (const char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3] , CUDA_R_16F, nb01/sizeof(half), + (const char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2, CUDA_R_16F, nb11/sizeof(float), + &beta_f16, ( char *) dst_f16 + i12* dst->nb[2]/2 + i13* dst->nb[3]/2, CUDA_R_16F, ne01, + CUBLAS_COMPUTE_16F, + CUBLAS_GEMM_DEFAULT_TENSOR_OP)); + } + } + } +#else + if (r2 == 1 && r3 == 1 && src0->nb[2]*src0->ne[2] == src0->nb[3] && src1->nb[2]*src1->ne[2] == src1->nb[3]) { + // there is no broadcast and src0, src1 are contiguous across dims 2, 3 + // use cublasGemmStridedBatchedEx + CUBLAS_CHECK( + cublasGemmStridedBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + ne01, ne11, ne10, + &alpha_f16, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA + (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB + &beta_f16, ( char *) dst_f16, CUDA_R_16F, ne01, dst->nb[2]/sizeof(float), // strideC + ne12*ne13, + CUBLAS_COMPUTE_16F, + CUBLAS_GEMM_DEFAULT_TENSOR_OP)); + } else { + // use cublasGemmBatchedEx + // TODO: https://github.com/ggerganov/llama.cpp/pull/3749#discussion_r1369997000 + const int ne23 = ne12*ne13; + + // TODO: avoid this alloc + void ** ptrs = (void **) malloc(3*ne23*sizeof(void *)); + + for (int i13 = 0; i13 < ne13; ++i13) { + for (int i12 = 0; i12 < ne12; ++i12) { + int i03 = i13 / r3; + int i02 = i12 / r2; + + ptrs[0*ne23 + i12 + i13*ne12] = (char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3]; + ptrs[1*ne23 + i12 + i13*ne12] = (char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2; + ptrs[2*ne23 + i12 + i13*ne12] = (char *) dst_f16 + i12* dst->nb[2]/2 + i13* dst->nb[3]/2; + } + } + + // allocate device memory for pointers + void ** ptrs_as = nullptr; + CUDA_CHECK(cudaMalloc(&ptrs_as, 3*ne23*sizeof(void *))); + + // TODO: this does not work for some reason -- not sure why? + //size_t ptrs_s = 0; + //ptrs_as = (void **) ggml_cuda_pool_malloc(3*ne23*sizeof(void *), &ptrs_s); + + // copy pointers to device + CUDA_CHECK(cudaMemcpy(ptrs_as, ptrs, 3*ne23*sizeof(void *), cudaMemcpyHostToDevice)); + + free(ptrs); + + CUBLAS_CHECK( + cublasGemmBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + ne01, ne11, ne10, + &alpha_f16, (const void **) (ptrs_as + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void **) (ptrs_as + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + &beta_f16, ( void **) (ptrs_as + 2*ne23), CUDA_R_16F, ne01, + ne23, + CUBLAS_COMPUTE_16F, + CUBLAS_GEMM_DEFAULT_TENSOR_OP)); + + // free device memory for pointers + CUDA_CHECK(cudaFree(ptrs_as)); + //ggml_cuda_pool_free(ptrs_as, ptrs_s); + } +#endif + + const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); + to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); + + ggml_cuda_pool_free(src1_as_f16, src1_as); + ggml_cuda_pool_free(dst_f16, dst_as); +} + static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { bool all_on_device = (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && src1->backend == GGML_BACKEND_GPU && dst->backend == GGML_BACKEND_GPU; @@ -7058,10 +7214,22 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } } + // debug helpers + //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); + //printf(" %8d %8d %8d %8d\n", src0->nb[0], src0->nb[1], src0->nb[2], src0->nb[3]); + //printf("src1: %8d %8d %8d %8d\n", src1->ne[0], src1->ne[1], src1->ne[2], src1->ne[3]); + //printf(" %8d %8d %8d %8d\n", src1->nb[0], src1->nb[1], src1->nb[2], src1->nb[3]); + //printf("src0 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src0), ggml_is_transposed(src0), ggml_type_name(src0->type), src0->name); + //printf("src1 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); + if (all_on_device && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { + // KQ ggml_cuda_mul_mat_vec_p021(src0, src1, dst); - } else if (all_on_device && !ggml_is_contiguous(src0) && ggml_is_contiguous(src1) && src1->ne[1] == 1) { + } else if (all_on_device && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { + // KQV ggml_cuda_mul_mat_vec_nc(src0, src1, dst); + } else if (all_on_device && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { + ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); } else if (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) { diff --git a/ggml.c b/ggml.c index 49f3b7aba..17f0ce487 100644 --- a/ggml.c +++ b/ggml.c @@ -16602,6 +16602,10 @@ static void ggml_compute_forward_cross_entropy_loss_back( static void ggml_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { GGML_ASSERT(params); + if (tensor->op == GGML_OP_NONE) { + return; + } + #ifdef GGML_USE_CUBLAS bool skip_cpu = ggml_cuda_compute_forward(params, tensor); if (skip_cpu) { From abd21fc99f1d35e2081e4c01dc09c71a86bf3c5a Mon Sep 17 00:00:00 2001 From: John Smith <67539080+kingsidelee@users.noreply.github.com> Date: Wed, 25 Oct 2023 01:48:45 +0800 Subject: [PATCH 023/859] cmake : add missed dependencies (#3763) --- examples/main-cmake-pkg/CMakeLists.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/main-cmake-pkg/CMakeLists.txt b/examples/main-cmake-pkg/CMakeLists.txt index 908131884..cb00edbbb 100644 --- a/examples/main-cmake-pkg/CMakeLists.txt +++ b/examples/main-cmake-pkg/CMakeLists.txt @@ -16,6 +16,8 @@ add_library(common OBJECT ${_common_path}/console.cpp ${_common_path}/grammar-parser.h ${_common_path}/grammar-parser.cpp + ${_common_path}/sampling.h + ${_common_path}/sampling.cpp ) # WARNING: because build-info.h is auto-generated, it will only From b2f7e04bd312eaf97eee0523aa09d950d585626b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 24 Oct 2023 21:51:20 +0300 Subject: [PATCH 024/859] sync : ggml (conv ops + cuda MSVC fixes) (#3765) ggml-ci --- ggml-cuda.cu | 10 +- ggml.c | 438 +++++++++++++++++++++++++++++++++++++++++---------- ggml.h | 15 +- 3 files changed, 369 insertions(+), 94 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index db053e3b8..d1e874b6c 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5664,10 +5664,10 @@ void ggml_init_cublas() { GGML_ASSERT(g_device_count <= GGML_CUDA_MAX_DEVICES); int64_t total_vram = 0; fprintf(stderr, "%s: found %d " GGML_CUDA_NAME " devices:\n", __func__, g_device_count); - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { cudaDeviceProp prop; CUDA_CHECK(cudaGetDeviceProperties(&prop, id)); - fprintf(stderr, " Device %ld: %s, compute capability %d.%d\n", id, prop.name, prop.major, prop.minor); + fprintf(stderr, " Device %d: %s, compute capability %d.%d\n", id, prop.name, prop.major, prop.minor); g_tensor_split[id] = total_vram; total_vram += prop.totalGlobalMem; @@ -5677,15 +5677,15 @@ void ggml_init_cublas() { g_compute_capabilities[id] = 100*prop.major + 10*prop.minor; #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { g_tensor_split[id] /= total_vram; } - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { CUDA_CHECK(ggml_cuda_set_device(id)); // create cuda streams - for (int64_t is = 0; is < MAX_STREAMS; ++is) { + for (int is = 0; is < MAX_STREAMS; ++is) { CUDA_CHECK(cudaStreamCreateWithFlags(&g_cudaStreams[id][is], cudaStreamNonBlocking)); } diff --git a/ggml.c b/ggml.c index 17f0ce487..6f66bab05 100644 --- a/ggml.c +++ b/ggml.c @@ -571,7 +571,6 @@ int64_t ggml_cycles_per_ms(void) { #define ggml_perf_cycles_per_ms() 0 #endif - // // cache line // @@ -1828,7 +1827,6 @@ ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type) { return type_traits[type]; } - // // simd mappings // @@ -4057,16 +4055,17 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "ALIBI", "CLAMP", "CONV_1D", + "CONV_1D_STAGE_0", + "CONV_1D_STAGE_1", "CONV_TRANSPOSE_1D", "CONV_2D", + "CONV_2D_STAGE_0", + "CONV_2D_STAGE_1", "CONV_TRANSPOSE_2D", "POOL_1D", "POOL_2D", "UPSCALE", - "CONV_1D_STAGE_0", - "CONV_1D_STAGE_1", - "FLASH_ATTN", "FLASH_FF", "FLASH_ATTN_BACK", @@ -4092,7 +4091,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 71, "GGML_OP_COUNT != 71"); +static_assert(GGML_OP_COUNT == 73, "GGML_OP_COUNT != 73"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -4143,16 +4142,17 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "alibi(x)", "clamp(x)", "conv_1d(x)", + "conv_1d_stage_0(x)", + "conv_1d_stage_1(x)", "conv_transpose_1d(x)", "conv_2d(x)", + "conv_2d_stage_0(x)", + "conv_2d_stage_1(x)", "conv_transpose_2d(x)", "pool_1d(x)", "pool_2d(x)", "upscale(x)", - "conv_1d_stage_0(x)", - "conv_1d_stage_1(x)", - "flash_attn(x)", "flash_ff(x)", "flash_attn_back(x)", @@ -4178,7 +4178,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 71, "GGML_OP_COUNT != 71"); +static_assert(GGML_OP_COUNT == 73, "GGML_OP_COUNT != 73"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -4209,8 +4209,10 @@ static void ggml_setup_op_has_task_pass(void) { p[GGML_OP_CONV_1D ] = true; p[GGML_OP_CONV_1D_STAGE_0 ] = true; p[GGML_OP_CONV_1D_STAGE_1 ] = true; - p[GGML_OP_CONV_2D ] = true; p[GGML_OP_CONV_TRANSPOSE_1D ] = true; + p[GGML_OP_CONV_2D ] = true; + p[GGML_OP_CONV_2D_STAGE_0 ] = true; + p[GGML_OP_CONV_2D_STAGE_1 ] = true; p[GGML_OP_CONV_TRANSPOSE_2D ] = true; p[GGML_OP_FLASH_ATTN_BACK ] = true; p[GGML_OP_CROSS_ENTROPY_LOSS ] = true; @@ -5954,7 +5956,6 @@ struct ggml_tensor * ggml_sqrt_inplace( return ggml_sqrt_impl(ctx, a, true); } - // ggml_log static struct ggml_tensor * ggml_log_impl( @@ -6008,7 +6009,6 @@ struct ggml_tensor * ggml_sum( return result; } - // ggml_sum_rows struct ggml_tensor * ggml_sum_rows( @@ -6640,7 +6640,6 @@ struct ggml_tensor * ggml_set_2d_inplace( return ggml_set_impl(ctx, a, b, nb1, a->nb[2], a->nb[3], offset, false); } - // ggml_cpy static struct ggml_tensor * ggml_cpy_impl( @@ -6720,7 +6719,6 @@ struct ggml_tensor * ggml_cont_inplace( return ggml_cont_impl(ctx, a, true); } - // make contiguous, with new shape GGML_API struct ggml_tensor * ggml_cont_1d( struct ggml_context * ctx, @@ -7173,7 +7171,6 @@ struct ggml_tensor * ggml_diag( return result; } - // ggml_diag_mask_inf static struct ggml_tensor * ggml_diag_mask_inf_impl( @@ -7285,7 +7282,6 @@ struct ggml_tensor * ggml_soft_max_inplace( return ggml_soft_max_impl(ctx, a, true); } - // ggml_soft_max_back static struct ggml_tensor * ggml_soft_max_back_impl( @@ -7702,7 +7698,11 @@ GGML_API struct ggml_tensor * ggml_conv_transpose_1d( // ggml_conv_2d -struct ggml_tensor * ggml_conv_2d( +// im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] +// a: [OC,IC, KH, KW] +// b: [N, IC, IH, IW] +// result: [N, OH, OW, IC*KH*KW] +static struct ggml_tensor * ggml_conv_2d_stage_0( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, @@ -7721,17 +7721,21 @@ struct ggml_tensor * ggml_conv_2d( is_node = true; } + const int64_t OH = ggml_calc_conv_output_size(b->ne[1], a->ne[1], s1, p1, d1); + const int64_t OW = ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0); + const int64_t ne[4] = { - ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0), - ggml_calc_conv_output_size(b->ne[1], a->ne[1], s1, p1, d1), - a->ne[3], b->ne[3], + a->ne[2] * a->ne[1] * a->ne[0], + OW, + OH, + b->ne[3], }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 4, ne); int32_t params[] = { s0, s1, p0, p1, d0, d1 }; ggml_set_op_params(result, params, sizeof(params)); - result->op = GGML_OP_CONV_2D; + result->op = GGML_OP_CONV_2D_STAGE_0; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; result->src[1] = b; @@ -7740,8 +7744,61 @@ struct ggml_tensor * ggml_conv_2d( } -// ggml_conv_2d_sk_p0 +// gemm: [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] +// a: [OC, IC, KH, KW] +// b: [N, OH, OW, IC * KH * KW] +// result: [N, OC, OH, OW] +static struct ggml_tensor * ggml_conv_2d_stage_1( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b) { + bool is_node = false; + + if (a->grad || b->grad) { + GGML_ASSERT(false); // TODO: implement backward + is_node = true; + } + + const int64_t ne[4] = { + b->ne[1], + b->ne[2], + a->ne[3], + b->ne[3], + }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); + + result->op = GGML_OP_CONV_2D_STAGE_1; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + result->src[1] = b; + + return result; + +} + +// a: [OC,IC, KH, KW] +// b: [N, IC, IH, IW] +// result: [N, OC, OH, OW] +struct ggml_tensor * ggml_conv_2d( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1) { + + struct ggml_tensor * result = ggml_conv_2d_stage_0(ctx, a, b, s0, s1, p0, p1, d0, d1); // [N, OH, OW, IC * KH * KW] + result = ggml_conv_2d_stage_1(ctx, a, result); + + return result; + +} + +// ggml_conv_2d_sk_p0 struct ggml_tensor * ggml_conv_2d_sk_p0( struct ggml_context * ctx, struct ggml_tensor * a, @@ -8180,7 +8237,6 @@ static struct ggml_tensor * ggml_add_rel_pos_impl( return result; } - struct ggml_tensor * ggml_add_rel_pos( struct ggml_context * ctx, struct ggml_tensor * a, @@ -8625,8 +8681,6 @@ struct ggml_tensor * ggml_map_custom3_inplace( return ggml_map_custom3_impl(ctx, a, b, c, fun, n_tasks, userdata, true); } - - // ggml_cross_entropy_loss struct ggml_tensor * ggml_cross_entropy_loss( @@ -9828,7 +9882,6 @@ static void ggml_compute_forward_add1( } } - // ggml_compute_forward_acc static void ggml_compute_forward_acc_f32( @@ -9968,7 +10021,6 @@ static void ggml_compute_forward_sub_f32( const int i2 = (ir - i3*ne2*ne1)/ne1; const int i1 = (ir - i3*ne2*ne1 - i2*ne1); - #ifdef GGML_USE_ACCELERATE vDSP_vsub( (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11), 1, @@ -10149,7 +10201,6 @@ static void ggml_compute_forward_div_f32( const int i2 = (ir - i3*ne2*ne1)/ne1; const int i1 = (ir - i3*ne2*ne1 - i2*ne1); - #ifdef GGML_USE_ACCELERATE UNUSED(ggml_vec_div_f32); @@ -10287,7 +10338,6 @@ static void ggml_compute_forward_sqrt( } } - // ggml_compute_forward_log static void ggml_compute_forward_log_f32( @@ -12120,7 +12170,6 @@ static void ggml_compute_forward_out_prod_f32( } } - //int64_t t1 = ggml_perf_time_us(); //static int64_t acc = 0; //acc += t1 - t0; @@ -12316,7 +12365,6 @@ static void ggml_compute_forward_scale_f32( const size_t nb1 = dst->nb[1]; - for (int i1 = ir0; i1 < ir1; i1++) { if (dst->data != src0->data) { // src0 is same shape as dst => same indices @@ -12714,7 +12762,6 @@ static void ggml_compute_forward_get_rows_back_f32( } } - static void ggml_compute_forward_get_rows_back( const struct ggml_compute_params * params, const struct ggml_tensor * src0, @@ -13997,6 +14044,7 @@ static void ggml_compute_forward_conv_1d_f32( } } +// TODO: reuse ggml_mul_mat or implement ggml_im2col and remove stage_0 and stage_1 static void gemm_f16_out_f32(int64_t m, int64_t n, int64_t k, ggml_fp16_t * A, ggml_fp16_t * B, @@ -14298,6 +14346,9 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( } } + // need to zero dst since we are accumulating into it + memset(dst->data, 0, ggml_nbytes(dst)); + return; } @@ -14370,7 +14421,7 @@ static void ggml_compute_forward_conv_transpose_1d_f32( const float * const src = (float *)((char *) src0->data + i02*nb02 + i01*nb01); float * dst_data = wdata + i01*ne00*ne02; for (int64_t i00 = 0; i00 < ne00; i00++) { - dst_data[i01*ne00*ne02 + i00*ne02 + i02] = src[i00]; + dst_data[i00*ne02 + i02] = src[i00]; } } } @@ -14389,6 +14440,9 @@ static void ggml_compute_forward_conv_transpose_1d_f32( } } + // need to zero dst since we are accumulating into it + memset(dst->data, 0, ggml_nbytes(dst)); + return; } @@ -14450,6 +14504,144 @@ static void ggml_compute_forward_conv_transpose_1d( // ggml_compute_forward_conv_2d +// src0: kernel [OC, IC, KH, KW] +// src1: image [N, IC, IH, IW] +// dst: result [N, OH, OW, IC*KH*KW] +static void ggml_compute_forward_conv_2d_stage_0_f32( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16); + + int64_t t0 = ggml_perf_time_us(); + UNUSED(t0); + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int64_t N = ne13; + const int64_t IC = ne12; + const int64_t IH = ne11; + const int64_t IW = ne10; + + // const int64_t OC = ne03; + // const int64_t IC = ne02; + const int64_t KH = ne01; + const int64_t KW = ne00; + + const int64_t OH = ne2; + const int64_t OW = ne1; + + const int ith = params->ith; + const int nth = params->nth; + + const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t*)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t*)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t*)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t*)(dst->op_params))[5]; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(float)); + + if (params->type == GGML_TASK_INIT) { + memset(dst->data, 0, ggml_nbytes(dst)); + return; + } + + if (params->type == GGML_TASK_FINALIZE) { + return; + } + + // im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] + { + ggml_fp16_t * const wdata = (ggml_fp16_t *) dst->data; + + for (int64_t in = 0; in < N; in++) { + for (int64_t ioh = 0; ioh < OH; ioh++) { + for (int64_t iow = 0; iow < OW; iow++) { + for (int64_t iic = ith; iic < IC; iic+=nth) { + + // micro kernel + ggml_fp16_t * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + const float * const src_data = (float *)((char *) src1->data + in*nb13 + iic*nb12); // [IH, IW] + + for (int64_t ikh = 0; ikh < KH; ikh++) { + for (int64_t ikw = 0; ikw < KW; ikw++) { + const int64_t iiw = iow*s0 + ikw*d0 - p0; + const int64_t iih = ioh*s1 + ikh*d1 - p1; + + if (!(iih < 0 || iih >= IH || iiw < 0 || iiw >= IW)) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = GGML_FP32_TO_FP16(src_data[iih*IW + iiw]); + } + } + } + } + } + } + } + } +} + +// gemm: [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] +// src0: [OC, IC, KH, KW] +// src1: [N, OH, OW, IC * KH * KW] +// result: [N, OC, OH, OW] +static void ggml_compute_forward_conv_2d_stage_1_f16( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + int64_t t0 = ggml_perf_time_us(); + UNUSED(t0); + + if (params->type == GGML_TASK_INIT) { + return; + } + + if (params->type == GGML_TASK_FINALIZE) { + return; + } + + GGML_TENSOR_BINARY_OP_LOCALS; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb0 == sizeof(float)); + + const int N = ne13; + const int OH = ne12; + const int OW = ne11; + + const int OC = ne03; + const int IC = ne02; + const int KH = ne01; + const int KW = ne00; + + const int ith = params->ith; + const int nth = params->nth; + + int64_t m = OC; + int64_t n = OH * OW; + int64_t k = IC * KH * KW; + + // [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] + for (int i = 0; i < N; i++) { + ggml_fp16_t * A = (ggml_fp16_t *)src0->data; // [m, k] + ggml_fp16_t * B = (ggml_fp16_t *)src1->data + i * m * k; // [n, k] + float * C = (float *)dst->data + i * m * n; // [m, n] + + gemm_f16_out_f32(m, n, k, A, B, C, ith, nth); + } +} + static void ggml_compute_forward_conv_2d_f16_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, @@ -14462,16 +14654,40 @@ static void ggml_compute_forward_conv_2d_f16_f32( int64_t t0 = ggml_perf_time_us(); UNUSED(t0); - GGML_TENSOR_BINARY_OP_LOCALS; + GGML_TENSOR_BINARY_OP_LOCALS + + // src1: image [N, IC, IH, IW] + // src0: kernel [OC, IC, KH, KW] + // dst: result [N, OC, OH, OW] + // ne12: IC + // ne0: OW + // ne1: OH + // nk0: KW + // nk1: KH + // ne13: N + + const int N = ne13; + const int IC = ne12; + const int IH = ne11; + const int IW = ne10; + + const int OC = ne03; + // const int IC = ne02; + const int KH = ne01; + const int KW = ne00; + + const int OH = ne1; + const int OW = ne0; const int ith = params->ith; const int nth = params->nth; - const int nk0 = ne00; - const int nk1 = ne01; + // const int nk0 = ne00; + // const int nk1 = ne01; // size of the convolution row - the kernel size unrolled across all channels - const int ew0 = nk0*nk1*ne02; + // const int ew0 = nk0*nk1*ne02; + // ew0: IC*KH*KW const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; @@ -14487,24 +14703,27 @@ static void ggml_compute_forward_conv_2d_f16_f32( memset(params->wdata, 0, params->wsize); // prepare source data (src1) + // im2col: [N, IC, IH, IW] => [N*OH*OW, IC*KH*KW] + { ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; - for (int i13 = 0; i13 < ne13; i13++) { - for (int i12 = 0; i12 < ne12; i12++) { - const float * const src = (float *)((char *) src1->data + i13*nb13 + i12*nb12); - ggml_fp16_t * dst_data = wdata + i13*(ne1*ne0*ew0); + for (int in = 0; in < N; in++) { + for (int iic = 0; iic < IC; iic++) { + for (int ioh = 0; ioh < OH; ioh++) { + for (int iow = 0; iow < OW; iow++) { - for (int i1 = 0; i1 < ne1; i1++) { - for (int i0 = 0; i0 < ne0; i0++) { - for (int ik1 = 0; ik1 < nk1; ik1++) { - for (int ik0 = 0; ik0 < nk0; ik0++) { - const int idx0 = i0*s0 + ik0*d0 - p0; - const int idx1 = i1*s1 + ik1*d1 - p1; + // micro kernel + ggml_fp16_t * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + const float * const src_data = (float *)((char *) src1->data + in*nb13 + iic*nb12); // [IH, IW] - if (!(idx1 < 0 || idx1 >= ne11 || idx0 < 0 || idx0 >= ne10)) { - dst_data[(i1*ne0 + i0)*ew0 + i12*(nk0*nk1) + ik1*nk0 + ik0] = - GGML_FP32_TO_FP16(src[idx1*ne10 + idx0]); + for (int ikh = 0; ikh < KH; ikh++) { + for (int ikw = 0; ikw < KW; ikw++) { + const int iiw = iow*s0 + ikw*d0 - p0; + const int iih = ioh*s1 + ikh*d1 - p1; + + if (!(iih < 0 || iih >= IH || iiw < 0 || iiw >= IW)) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = GGML_FP32_TO_FP16(src_data[iih*IW + iiw]); } } } @@ -14521,30 +14740,22 @@ static void ggml_compute_forward_conv_2d_f16_f32( return; } - // total patches in dst - const int np = ne2; - - // patches per thread - const int dp = (np + nth - 1)/nth; - - // patch range for this thread - const int ip0 = dp*ith; - const int ip1 = MIN(ip0 + dp, np); - ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; + // wdata: [N*OH*OW, IC*KH*KW] + // dst: result [N, OC, OH, OW] + // src0: kernel [OC, IC, KH, KW] - for (int i3 = 0; i3 < ne3; i3++) { - for (int i2 = ip0; i2 < ip1; i2++) { - float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2); + int64_t m = OC; + int64_t n = OH * OW; + int64_t k = IC * KH * KW; - for (int i1 = 0; i1 < ne1; ++i1) { - for (int i0 = 0; i0 < ne0; ++i0) { - ggml_vec_dot_f16(ew0, dst_data + i1*ne0 + i0, - (ggml_fp16_t *) ((char *) src0->data + i2*nb03), - (ggml_fp16_t *) wdata + i3*nb3 + (i1*ne0 + i0)*ew0); - } - } - } + // [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] + for (int i = 0; i < N; i++) { + ggml_fp16_t * A = (ggml_fp16_t *)src0->data; // [m, k] + ggml_fp16_t * B = (ggml_fp16_t *)wdata + i * m * k; // [n, k] + float * C = (float *)dst->data + i * m * n; // [m * k] + + gemm_f16_out_f32(m, n, k, A, B, C, ith, nth); } } @@ -14570,6 +14781,48 @@ static void ggml_compute_forward_conv_2d( } } +static void ggml_compute_forward_conv_2d_stage_0( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_conv_2d_stage_0_f32(params, src0, src1, dst); + } break; + case GGML_TYPE_F32: + { + GGML_ASSERT(false); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + +static void ggml_compute_forward_conv_2d_stage_1( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + switch (src0->type) { + case GGML_TYPE_F16: + { + ggml_compute_forward_conv_2d_stage_1_f16(params, src0, src1, dst); + } break; + case GGML_TYPE_F32: + { + GGML_ASSERT(false); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_conv_transpose_2d static void ggml_compute_forward_conv_transpose_2d( @@ -14628,6 +14881,8 @@ static void ggml_compute_forward_conv_transpose_2d( } } + memset(dst->data, 0, ggml_nbytes(dst)); + return; } @@ -16126,7 +16381,6 @@ static void ggml_compute_forward_add_rel_pos_f32( const int ip0 = dp*ith; const int ip1 = MIN(ip0 + dp, np); - for (int64_t i13 = ip0; i13 < ip1; ++i13) { for (int64_t i12 = 0; i12 < ne12; ++i12) { for (int64_t i11 = 0; i11 < ne11; ++i11) { @@ -16193,7 +16447,6 @@ static void ggml_compute_forward_map_unary_f32( } } - static void ggml_compute_forward_map_unary( const struct ggml_compute_params * params, const struct ggml_tensor * src0, @@ -16241,7 +16494,6 @@ static void ggml_compute_forward_map_binary_f32( } } - static void ggml_compute_forward_map_binary( const struct ggml_compute_params * params, const struct ggml_tensor * src0, @@ -16293,7 +16545,6 @@ static void ggml_compute_forward_map_custom2_f32( fun(dst, a, b); } - // ggml_compute_forward_map_custom3 static void ggml_compute_forward_map_custom3_f32( @@ -16568,7 +16819,6 @@ static void ggml_compute_forward_cross_entropy_loss_back_f32( ggml_vec_sub_f32(nc, ds0, ds0, s1); ggml_vec_scale_f32(nc, ds0, d[0] / (float) nr); - #ifndef NDEBUG for (int i = 0; i < nc; ++i) { assert(!isnan(ds0[i])); @@ -16596,7 +16846,6 @@ static void ggml_compute_forward_cross_entropy_loss_back( } } - ///////////////////////////////// static void ggml_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { @@ -16808,6 +17057,14 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_conv_2d(params, tensor->src[0], tensor->src[1], tensor); } break; + case GGML_OP_CONV_2D_STAGE_0: + { + ggml_compute_forward_conv_2d_stage_0(params, tensor->src[0], tensor->src[1], tensor); + } break; + case GGML_OP_CONV_2D_STAGE_1: + { + ggml_compute_forward_conv_2d_stage_1(params, tensor->src[0], tensor->src[1], tensor); + } break; case GGML_OP_CONV_TRANSPOSE_2D: { ggml_compute_forward_conv_transpose_2d(params, tensor->src[0], tensor->src[1], tensor); @@ -17737,11 +17994,19 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { GGML_ASSERT(false); // TODO: not implemented } break; + case GGML_OP_CONV_TRANSPOSE_1D: + { + GGML_ASSERT(false); // TODO: not implemented + } break; case GGML_OP_CONV_2D: { GGML_ASSERT(false); // TODO: not implemented } break; - case GGML_OP_CONV_TRANSPOSE_1D: + case GGML_OP_CONV_2D_STAGE_0: + { + GGML_ASSERT(false); // TODO: not implemented + } break; + case GGML_OP_CONV_2D_STAGE_1: { GGML_ASSERT(false); // TODO: not implemented } break; @@ -18670,6 +18935,7 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { const int64_t ne0 = node->ne[0]; const int64_t ne1 = node->ne[1]; const int64_t ne2 = node->ne[2]; + const int64_t ne3 = node->ne[3]; const int64_t nk = ne00*ne01; const int64_t ew0 = nk * ne02; @@ -18680,7 +18946,8 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { if (node->src[0]->type == GGML_TYPE_F16 && node->src[1]->type == GGML_TYPE_F32) { - cur = sizeof(ggml_fp16_t)*(ne0*ne1*ew0); + // im2col: [N*OH*OW, IC*KH*KW] + cur = sizeof(ggml_fp16_t)*(ne3*ne0*ne1*ew0); } else if (node->src[0]->type == GGML_TYPE_F32 && node->src[1]->type == GGML_TYPE_F32) { cur = sizeof(float)* (ne10*ne11*ne12); @@ -18690,6 +18957,14 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { work_size = MAX(work_size, cur); } break; + case GGML_OP_CONV_2D_STAGE_0: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_2D_STAGE_1: + { + n_tasks = n_threads; + } break; case GGML_OP_CONV_TRANSPOSE_2D: { n_tasks = n_threads; @@ -19878,7 +20153,6 @@ static enum ggml_opt_result ggml_opt_adam( opt->loss_after = fx; - // check convergence if (fabsf(fx - fx_prev[0])/fx < params.adam.eps_f) { GGML_PRINT_DEBUG("converged\n"); diff --git a/ggml.h b/ggml.h index 16aaf169e..08bff5511 100644 --- a/ggml.h +++ b/ggml.h @@ -401,15 +401,16 @@ extern "C" { GGML_OP_ALIBI, GGML_OP_CLAMP, GGML_OP_CONV_1D, - GGML_OP_CONV_2D, + GGML_OP_CONV_1D_STAGE_0, // internal + GGML_OP_CONV_1D_STAGE_1, // internal GGML_OP_CONV_TRANSPOSE_1D, + GGML_OP_CONV_2D, + GGML_OP_CONV_2D_STAGE_0, // internal + GGML_OP_CONV_2D_STAGE_1, // internal GGML_OP_CONV_TRANSPOSE_2D, GGML_OP_POOL_1D, GGML_OP_POOL_2D, - GGML_OP_CONV_1D_STAGE_0, // internal - GGML_OP_CONV_1D_STAGE_1, // internal - GGML_OP_UPSCALE, // nearest interpolate GGML_OP_FLASH_ATTN, @@ -1020,9 +1021,9 @@ extern "C" { struct ggml_tensor * b, float eps); - // A: n columns, m rows - // B: n columns, p rows (i.e. we transpose it internally) - // result is m columns, p rows + // A: k columns, n rows => [ne03, ne02, n, k] + // B: k columns, m rows (i.e. we transpose it internally) => [ne03 * x, ne02 * y, m, k] + // result is n columns, m rows => [ne03 * x, ne02 * y, m, n] GGML_API struct ggml_tensor * ggml_mul_mat( struct ggml_context * ctx, struct ggml_tensor * a, From 1717521cdb976a2219888b0e5cba36e210eee9df Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 24 Oct 2023 23:08:20 +0300 Subject: [PATCH 025/859] server : do not block system prompt update (#3767) * server : do not block system prompt update * server : update state machine logic to process system prompts * server : minor --- examples/server/server.cpp | 57 +++++++++++++------------------------- 1 file changed, 20 insertions(+), 37 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 693f9b773..f52a928c8 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -454,7 +454,7 @@ struct llama_client_slot } void release() { - if (state == PROCESSING) + if (state == IDLE || state == PROCESSING) { t_token_generation = (ggml_time_us() - t_start_genereration) / 1e3; command = RELEASE; @@ -754,6 +754,7 @@ struct llama_server_context } slot->params.antiprompt.clear(); + const auto &stop = data.find("stop"); if (stop != data.end() && stop->is_array()) { @@ -867,7 +868,7 @@ struct llama_server_context kv_cache_clear(); - for (int32_t i = 0; i < batch.n_tokens; ++i) + for (int i = 0; i < (int) system_tokens.size(); ++i) { llama_batch_add(batch, system_tokens[i], i, { 0 }, false); } @@ -894,16 +895,8 @@ struct llama_server_context { slot.release(); } - wait_all_are_idle(); - all_slots_are_idle = true; - // wait until system prompt load system_need_update = true; - while (system_need_update) - { - std::this_thread::sleep_for(std::chrono::milliseconds(5)); - } - // system prompt loaded, continue } void process_system_prompt_data(const json &sys_props) { @@ -915,26 +908,6 @@ struct llama_server_context { notify_system_prompt_changed(); } - else - { - system_need_update = true; - } - } - - void wait_all_are_idle() { - bool wait = true; - while (wait) - { - wait = false; - for (auto &slot : slots) - { - if (!slot.available()) - { - wait = true; - break; - } - } - } } static size_t find_stopping_strings(const std::string &text, const size_t last_token_size, @@ -965,7 +938,6 @@ struct llama_server_context slot.has_next_token = false; } stop_pos = pos; - } } @@ -1444,7 +1416,7 @@ struct llama_server_context process_tasks(); // update the system prompt wait until all slots are idle state - if (system_need_update) + if (system_need_update && all_slots_are_idle) { LOG_TEE("updating system prompt\n"); update_system_prompt(); @@ -1498,7 +1470,7 @@ struct llama_server_context for (auto & slot : slots) { // release the slot - if (slot.state == PROCESSING && slot.command == RELEASE) + if (slot.command == RELEASE) { slot.state = IDLE; slot.command = NONE; @@ -1509,7 +1481,7 @@ struct llama_server_context continue; } - if (slot.state == IDLE || slot.command == RELEASE) + if (slot.state == IDLE) { continue; } @@ -1530,6 +1502,17 @@ struct llama_server_context { for (auto & slot : slots) { + const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()); + + // empty prompt passed -> release the slot and send empty response + if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt) + { + slot.release(); + slot.print_timings(); + send_final_response(slot); + continue; + } + // need process the prompt if (slot.state == IDLE && slot.command == LOAD_PROMPT) { @@ -1749,8 +1732,8 @@ struct llama_server_context if (!process_token(result, slot)) { slot.release(); - send_final_response(slot); slot.print_timings(); + send_final_response(slot); } slot.i_batch = -1; @@ -2285,7 +2268,7 @@ int main(int argc, char **argv) if (!json_value(data, "stream", false)) { std::string completion_text; task_result result = llama.next_result(task_id); - if(!result.error && result.stop) { + if (!result.error && result.stop) { res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); } else @@ -2312,7 +2295,7 @@ int main(int argc, char **argv) { return false; } - if(result.stop) { + if (result.stop) { break; } } else { From ad939626577cd25b462e8026cc543efb71528472 Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Tue, 24 Oct 2023 16:10:43 -0400 Subject: [PATCH 026/859] server : add parameter -tb N, --threads-batch N (#3584) (#3768) Co-authored-by: Michael Coppola Co-authored-by: Michael Coppola --- examples/server/server.cpp | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index f52a928c8..b4c4d0a20 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1749,15 +1749,16 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf("usage: %s [options]\n", argv0); printf("\n"); printf("options:\n"); - printf(" -h, --help show this help message and exit\n"); - printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); - printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); - printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); - printf(" --rope-freq-base N RoPE base frequency (default: loaded from model)\n"); - printf(" --rope-freq-scale N RoPE frequency scaling factor (default: loaded from model)\n"); - printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); - printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); + printf(" -h, --help show this help message and exit\n"); + printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); + printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); + printf(" -tb N, --threads-batch N number of threads to use during batch and prompt processing (default: same as --threads)\n"); + printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); + printf(" --rope-freq-base N RoPE base frequency (default: loaded from model)\n"); + printf(" --rope-freq-scale N RoPE frequency scaling factor (default: loaded from model)\n"); + printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); + printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); + printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); if (llama_mlock_supported()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); @@ -1907,6 +1908,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.n_threads = std::stoi(argv[i]); } + else if (arg == "--threads-batch" || arg == "-tb") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + params.n_threads_batch = std::stoi(argv[i]); + } else if (arg == "-b" || arg == "--batch-size") { if (++i >= argc) From cc448774866e6479c750bd7c135cd8f92cedee67 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 25 Oct 2023 10:09:16 +0300 Subject: [PATCH 027/859] log : disable pid in log filenames --- common/log.h | 35 ++++++++++++++++++----------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/common/log.h b/common/log.h index 70e7e4ca2..d2c864cea 100644 --- a/common/log.h +++ b/common/log.h @@ -97,22 +97,23 @@ #define LOG_TEE_TARGET stderr #endif +// NOTE: currently disabled as it produces too many log files // Utility to obtain "pid" like unique process id and use it when creating log files. -inline std::string log_get_pid() -{ - static std::string pid; - if (pid.empty()) - { - // std::this_thread::get_id() is the most portable way of obtaining a "process id" - // it's not the same as "pid" but is unique enough to solve multiple instances - // trying to write to the same log. - std::stringstream ss; - ss << std::this_thread::get_id(); - pid = ss.str(); - } - - return pid; -} +//inline std::string log_get_pid() +//{ +// static std::string pid; +// if (pid.empty()) +// { +// // std::this_thread::get_id() is the most portable way of obtaining a "process id" +// // it's not the same as "pid" but is unique enough to solve multiple instances +// // trying to write to the same log. +// std::stringstream ss; +// ss << std::this_thread::get_id(); +// pid = ss.str(); +// } +// +// return pid; +//} // Utility function for generating log file names with unique id based on thread id. // invocation with log_filename_generator( "llama", "log" ) creates a string "llama..log" @@ -126,8 +127,8 @@ inline std::string log_filename_generator_impl(const std::string & log_file_base std::stringstream buf; buf << log_file_basename; - buf << "."; - buf << log_get_pid(); + //buf << "."; + //buf << log_get_pid(); buf << "."; buf << log_file_extension; From 6961c4bd0b5176e10ab03b35394f1e9eab761792 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 25 Oct 2023 10:26:27 +0300 Subject: [PATCH 028/859] batched-bench : print params at start --- examples/batched-bench/batched-bench.cpp | 4 ++++ ggml-cuda.cu | 12 ++++++------ 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index c552eaa73..43f9c971d 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -154,6 +154,10 @@ int main(int argc, char ** argv) { } } + LOG_TEE("\n"); + LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq); + LOG_TEE("\n"); + LOG_TEE("|%6s | %6s | %4s | %6s | %8s | %8s | %8s | %8s | %8s | %8s |\n", "PP", "TG", "B", "N_KV", "T_PP s", "S_PP t/s", "T_TG s", "S_TG t/s", "T s", "S t/s"); LOG_TEE("|%6s-|-%6s-|-%4s-|-%6s-|-%8s-|-%8s-|-%8s-|-%8s-|-%8s-|-%8s-|\n", "------", "------", "----", "------", "--------", "--------", "--------", "--------", "--------", "--------"); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index d1e874b6c..ba0cd5a7d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6254,16 +6254,15 @@ inline void ggml_cuda_op_mul_mat_cublas( const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, const int64_t src1_padded_row_size, const cudaStream_t & stream) { - GGML_ASSERT(src0_dd_i != nullptr); + GGML_ASSERT(src0_dd_i != nullptr); GGML_ASSERT(src1_ddf_i != nullptr); - GGML_ASSERT(dst_dd_i != nullptr); - + GGML_ASSERT(dst_dd_i != nullptr); const int64_t ne00 = src0->ne[0]; - const int64_t ne10 = src1->ne[0]; const int64_t ne0 = dst->ne[0]; + const int64_t row_diff = row_high - row_low; int id; @@ -7223,12 +7222,13 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 //printf("src1 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); if (all_on_device && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { - // KQ + // KQ single-batch ggml_cuda_mul_mat_vec_p021(src0, src1, dst); } else if (all_on_device && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { - // KQV + // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); } else if (all_on_device && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { + // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); From 34b2a5e1ee4fe6295fb4420eb91131d743694c65 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 26 Oct 2023 22:53:37 +0300 Subject: [PATCH 029/859] server : do not release slot on image input (#3798) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index b4c4d0a20..5b7e4139d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1502,7 +1502,7 @@ struct llama_server_context { for (auto & slot : slots) { - const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()); + const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()) || !slot.images.empty(); // empty prompt passed -> release the slot and send empty response if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt) From 2f9ec7e271220a78fe27c9e6ccbcc0dda31cda0f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 27 Oct 2023 17:01:23 +0300 Subject: [PATCH 030/859] cuda : improve text-generation and batched decoding performance (#3776) * cuda : prints wip * cuda : new cublas gemm branch for multi-batch quantized src0 * cuda : add F32 sgemm branch * cuda : fine-tune >= VOLTA params + use MMQ only for small batches * cuda : remove duplicated cuBLAS GEMM code * cuda : add CUDA_USE_TENSOR_CORES and GGML_CUDA_FORCE_MMQ macros * build : add compile option to force use of MMQ kernels --- CMakeLists.txt | 7 +++ Makefile | 3 ++ ggml-cuda.cu | 130 +++++++++++++++++++++++++++++++++++++++++++------ llama.cpp | 2 - llama.h | 2 +- 5 files changed, 125 insertions(+), 19 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 202f26049..d9fc86237 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -82,6 +82,7 @@ set(LLAMA_BLAS_VENDOR "Generic" CACHE STRING "llama: BLAS library vendor") option(LLAMA_CUBLAS "llama: use CUDA" OFF) #option(LLAMA_CUDA_CUBLAS "llama: use cuBLAS for prompt processing" OFF) option(LLAMA_CUDA_FORCE_DMMV "llama: use dmmv instead of mmvq CUDA kernels" OFF) +option(LLAMA_CUDA_FORCE_MMQ "llama: use mmq kernels instead of cuBLAS" OFF) set(LLAMA_CUDA_DMMV_X "32" CACHE STRING "llama: x stride for dmmv CUDA kernels") set(LLAMA_CUDA_MMV_Y "1" CACHE STRING "llama: y block size for mmv CUDA kernels") option(LLAMA_CUDA_F16 "llama: use 16 bit floats for some calculations" OFF) @@ -305,6 +306,9 @@ if (LLAMA_CUBLAS) if (LLAMA_CUDA_FORCE_DMMV) add_compile_definitions(GGML_CUDA_FORCE_DMMV) endif() + if (LLAMA_CUDA_FORCE_MMQ) + add_compile_definitions(GGML_CUDA_FORCE_MMQ) + endif() add_compile_definitions(GGML_CUDA_DMMV_X=${LLAMA_CUDA_DMMV_X}) add_compile_definitions(GGML_CUDA_MMV_Y=${LLAMA_CUDA_MMV_Y}) if (DEFINED LLAMA_CUDA_DMMV_Y) @@ -405,6 +409,9 @@ if (LLAMA_HIPBLAS) if (LLAMA_CUDA_FORCE_DMMV) target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_FORCE_DMMV) endif() + if (LLAMA_CUDA_FORCE_MMQ) + target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_FORCE_MMQ) + endif() target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_DMMV_X=${LLAMA_CUDA_DMMV_X}) target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_MMV_Y=${LLAMA_CUDA_MMV_Y}) target_compile_definitions(ggml-rocm PRIVATE K_QUANTS_PER_ITERATION=${LLAMA_CUDA_KQUANTS_ITER}) diff --git a/Makefile b/Makefile index 80179631f..68069f9ff 100644 --- a/Makefile +++ b/Makefile @@ -397,6 +397,9 @@ endif # CUDA_DOCKER_ARCH ifdef LLAMA_CUDA_FORCE_DMMV NVCCFLAGS += -DGGML_CUDA_FORCE_DMMV endif # LLAMA_CUDA_FORCE_DMMV +ifdef LLAMA_CUDA_FORCE_MMQ + NVCCFLAGS += -DGGML_CUDA_FORCE_MMQ +endif # LLAMA_CUDA_FORCE_MMQ ifdef LLAMA_CUDA_DMMV_X NVCCFLAGS += -DGGML_CUDA_DMMV_X=$(LLAMA_CUDA_DMMV_X) else diff --git a/ggml-cuda.cu b/ggml-cuda.cu index ba0cd5a7d..1ba951f68 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -87,6 +87,24 @@ #define CC_OFFSET_AMD 1000000 #define CC_RDNA2 (CC_OFFSET_AMD + 1030) +// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication +// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant +// for large computational tasks. the drawback is that this requires some extra amount of VRAM: +// - 7B quantum model: +100-200 MB +// - 13B quantum model: +200-400 MB +// +//#define GGML_CUDA_FORCE_MMQ + +// TODO: improve this to be correct for more hardware +// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores +// probably other such cases, and not sure what happens on AMD hardware +#if !defined(GGML_CUDA_FORCE_MMQ) +#define CUDA_USE_TENSOR_CORES +#endif + +// max batch size to use MMQ kernels when tensor cores are available +#define MMQ_MAX_BATCH_SIZE 32 + #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -470,7 +488,6 @@ static int g_device_count = -1; static int g_main_device = 0; static int g_compute_capabilities[GGML_CUDA_MAX_DEVICES]; static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; -static bool g_mul_mat_q = true; static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default @@ -3554,9 +3571,15 @@ static __device__ __forceinline__ void mul_mat_q( #define MMQ_X_Q4_0_RDNA1 64 #define MMQ_Y_Q4_0_RDNA1 64 #define NWARPS_Q4_0_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q4_0_AMPERE 4 +#define MMQ_Y_Q4_0_AMPERE 32 +#define NWARPS_Q4_0_AMPERE 4 +#else #define MMQ_X_Q4_0_AMPERE 64 #define MMQ_Y_Q4_0_AMPERE 128 #define NWARPS_Q4_0_AMPERE 4 +#endif #define MMQ_X_Q4_0_PASCAL 64 #define MMQ_Y_Q4_0_PASCAL 64 #define NWARPS_Q4_0_PASCAL 8 @@ -3615,9 +3638,15 @@ template static __global__ void #define MMQ_X_Q4_1_RDNA1 64 #define MMQ_Y_Q4_1_RDNA1 64 #define NWARPS_Q4_1_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q4_1_AMPERE 4 +#define MMQ_Y_Q4_1_AMPERE 32 +#define NWARPS_Q4_1_AMPERE 4 +#else #define MMQ_X_Q4_1_AMPERE 64 #define MMQ_Y_Q4_1_AMPERE 128 #define NWARPS_Q4_1_AMPERE 4 +#endif #define MMQ_X_Q4_1_PASCAL 64 #define MMQ_Y_Q4_1_PASCAL 64 #define NWARPS_Q4_1_PASCAL 8 @@ -3678,9 +3707,15 @@ template static __global__ void #define MMQ_X_Q5_0_RDNA1 64 #define MMQ_Y_Q5_0_RDNA1 64 #define NWARPS_Q5_0_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q5_0_AMPERE 4 +#define MMQ_Y_Q5_0_AMPERE 32 +#define NWARPS_Q5_0_AMPERE 4 +#else #define MMQ_X_Q5_0_AMPERE 128 #define MMQ_Y_Q5_0_AMPERE 64 #define NWARPS_Q5_0_AMPERE 4 +#endif #define MMQ_X_Q5_0_PASCAL 64 #define MMQ_Y_Q5_0_PASCAL 64 #define NWARPS_Q5_0_PASCAL 8 @@ -3739,9 +3774,15 @@ template static __global__ void #define MMQ_X_Q5_1_RDNA1 64 #define MMQ_Y_Q5_1_RDNA1 64 #define NWARPS_Q5_1_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q5_1_AMPERE 4 +#define MMQ_Y_Q5_1_AMPERE 32 +#define NWARPS_Q5_1_AMPERE 4 +#else #define MMQ_X_Q5_1_AMPERE 128 #define MMQ_Y_Q5_1_AMPERE 64 #define NWARPS_Q5_1_AMPERE 4 +#endif #define MMQ_X_Q5_1_PASCAL 64 #define MMQ_Y_Q5_1_PASCAL 64 #define NWARPS_Q5_1_PASCAL 8 @@ -3800,9 +3841,15 @@ mul_mat_q5_1( #define MMQ_X_Q8_0_RDNA1 64 #define MMQ_Y_Q8_0_RDNA1 64 #define NWARPS_Q8_0_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q8_0_AMPERE 4 +#define MMQ_Y_Q8_0_AMPERE 32 +#define NWARPS_Q8_0_AMPERE 4 +#else #define MMQ_X_Q8_0_AMPERE 128 #define MMQ_Y_Q8_0_AMPERE 64 #define NWARPS_Q8_0_AMPERE 4 +#endif #define MMQ_X_Q8_0_PASCAL 64 #define MMQ_Y_Q8_0_PASCAL 64 #define NWARPS_Q8_0_PASCAL 8 @@ -3861,9 +3908,15 @@ template static __global__ void #define MMQ_X_Q2_K_RDNA1 128 #define MMQ_Y_Q2_K_RDNA1 32 #define NWARPS_Q2_K_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q2_K_AMPERE 4 +#define MMQ_Y_Q2_K_AMPERE 32 +#define NWARPS_Q2_K_AMPERE 4 +#else #define MMQ_X_Q2_K_AMPERE 64 #define MMQ_Y_Q2_K_AMPERE 128 #define NWARPS_Q2_K_AMPERE 4 +#endif #define MMQ_X_Q2_K_PASCAL 64 #define MMQ_Y_Q2_K_PASCAL 64 #define NWARPS_Q2_K_PASCAL 8 @@ -3922,9 +3975,15 @@ mul_mat_q2_K( #define MMQ_X_Q3_K_RDNA1 32 #define MMQ_Y_Q3_K_RDNA1 128 #define NWARPS_Q3_K_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q3_K_AMPERE 4 +#define MMQ_Y_Q3_K_AMPERE 32 +#define NWARPS_Q3_K_AMPERE 4 +#else #define MMQ_X_Q3_K_AMPERE 128 #define MMQ_Y_Q3_K_AMPERE 128 #define NWARPS_Q3_K_AMPERE 4 +#endif #define MMQ_X_Q3_K_PASCAL 64 #define MMQ_Y_Q3_K_PASCAL 64 #define NWARPS_Q3_K_PASCAL 8 @@ -3985,9 +4044,15 @@ template static __global__ void #define MMQ_X_Q4_K_RDNA1 32 #define MMQ_Y_Q4_K_RDNA1 64 #define NWARPS_Q4_K_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q4_K_AMPERE 4 +#define MMQ_Y_Q4_K_AMPERE 32 +#define NWARPS_Q4_K_AMPERE 4 +#else #define MMQ_X_Q4_K_AMPERE 64 #define MMQ_Y_Q4_K_AMPERE 128 #define NWARPS_Q4_K_AMPERE 4 +#endif #define MMQ_X_Q4_K_PASCAL 64 #define MMQ_Y_Q4_K_PASCAL 64 #define NWARPS_Q4_K_PASCAL 8 @@ -4048,9 +4113,15 @@ template static __global__ void #define MMQ_X_Q5_K_RDNA1 32 #define MMQ_Y_Q5_K_RDNA1 64 #define NWARPS_Q5_K_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q5_K_AMPERE 4 +#define MMQ_Y_Q5_K_AMPERE 32 +#define NWARPS_Q5_K_AMPERE 4 +#else #define MMQ_X_Q5_K_AMPERE 64 #define MMQ_Y_Q5_K_AMPERE 128 #define NWARPS_Q5_K_AMPERE 4 +#endif #define MMQ_X_Q5_K_PASCAL 64 #define MMQ_Y_Q5_K_PASCAL 64 #define NWARPS_Q5_K_PASCAL 8 @@ -4109,9 +4180,15 @@ mul_mat_q5_K( #define MMQ_X_Q6_K_RDNA1 32 #define MMQ_Y_Q6_K_RDNA1 64 #define NWARPS_Q6_K_RDNA1 8 +#if defined(CUDA_USE_TENSOR_CORES) +#define MMQ_X_Q6_K_AMPERE 4 +#define MMQ_Y_Q6_K_AMPERE 32 +#define NWARPS_Q6_K_AMPERE 4 +#else #define MMQ_X_Q6_K_AMPERE 64 #define MMQ_Y_Q6_K_AMPERE 64 #define NWARPS_Q6_K_AMPERE 4 +#endif #define MMQ_X_Q6_K_PASCAL 64 #define MMQ_Y_Q6_K_PASCAL 64 #define NWARPS_Q6_K_PASCAL 8 @@ -5663,6 +5740,16 @@ void ggml_init_cublas() { CUDA_CHECK(cudaGetDeviceCount(&g_device_count)); GGML_ASSERT(g_device_count <= GGML_CUDA_MAX_DEVICES); int64_t total_vram = 0; +#if defined(GGML_CUDA_FORCE_MMQ) + fprintf(stderr, "%s: GGML_CUDA_FORCE_MMQ: yes\n", __func__); +#else + fprintf(stderr, "%s: GGML_CUDA_FORCE_MMQ: no\n", __func__); +#endif +#if defined(CUDA_USE_TENSOR_CORES) + fprintf(stderr, "%s: CUDA_USE_TENSOR_CORES: yes\n", __func__); +#else + fprintf(stderr, "%s: CUDA_USE_TENSOR_CORES: no\n", __func__); +#endif fprintf(stderr, "%s: found %d " GGML_CUDA_NAME " devices:\n", __func__, g_device_count); for (int id = 0; id < g_device_count; ++id) { cudaDeviceProp prop; @@ -6347,7 +6434,7 @@ inline void ggml_cuda_op_mul_mat_cublas( cublasSgemm(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, row_diff, src1_ncols, ne10, &alpha, src0_ddf_i, ne00, - src1_ddf_i, ne10, + src1_ddf_i, ne10, &beta, dst_dd_i, ldc)); if (src0_as != 0) { @@ -7048,9 +7135,10 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor ggml_mul_mat_vec_nc_f16_f32_cuda(src0_ddq, src1_ddf, dst_ddf, ne00, ne01, row_stride_x, ne02, ne12, channel_stride_x, main_stream); } -static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst){ +static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); + GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7202,17 +7290,24 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - bool all_on_device = (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && - src1->backend == GGML_BACKEND_GPU && dst->backend == GGML_BACKEND_GPU; + const bool all_on_device = + (src0->backend == GGML_BACKEND_GPU) && + (src1->backend == GGML_BACKEND_GPU) && + ( dst->backend == GGML_BACKEND_GPU); int64_t min_compute_capability = INT_MAX; for (int64_t id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_compute_capabilities[id] - && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { + if (min_compute_capability > g_compute_capabilities[id] && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { min_compute_capability = g_compute_capabilities[id]; } } +#ifdef CUDA_USE_TENSOR_CORES + const bool use_tensor_cores = true; +#else + const bool use_tensor_cores = false; +#endif + // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); //printf(" %8d %8d %8d %8d\n", src0->nb[0], src0->nb[1], src0->nb[2], src0->nb[3]); @@ -7221,20 +7316,19 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 //printf("src0 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src0), ggml_is_transposed(src0), ggml_type_name(src0->type), src0->name); //printf("src1 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); - if (all_on_device && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { + if (all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { // KQ single-batch ggml_cuda_mul_mat_vec_p021(src0, src1, dst); - } else if (all_on_device && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { + } else if (all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (all_on_device && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { + } else if (all_on_device && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); } else if (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) { if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0) { - #ifdef GGML_CUDA_FORCE_DMMV const bool use_mul_mat_vec_q = false; #else @@ -7247,7 +7341,15 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (g_mul_mat_q && ggml_is_quantized(src0->type) && min_compute_capability >= MIN_CC_DP4A) { + bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); + + // when tensor cores are available, use them for large batch size + // ref: https://github.com/ggerganov/llama.cpp/pull/3776 + if (use_tensor_cores && min_compute_capability >= CC_VOLTA && src1->ne[1] > MMQ_MAX_BATCH_SIZE) { + use_mul_mat_q = false; + } + + if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); @@ -7601,10 +7703,6 @@ void ggml_cuda_set_main_device(const int main_device) { } } -void ggml_cuda_set_mul_mat_q(const bool mul_mat_q) { - g_mul_mat_q = mul_mat_q; -} - void ggml_cuda_set_scratch_size(const size_t scratch_size) { // this is a hack to not completely break llama.cpp when using multiple models or contexts simultaneously // it still won't always work as expected, but it's better than nothing diff --git a/llama.cpp b/llama.cpp index 61f30c398..cc8669b0e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5959,8 +5959,6 @@ static int llama_decode_internal( } } - ggml_cuda_set_mul_mat_q(cparams.mul_mat_q); - // HACK: ggml-alloc may change the tensor backend when reusing a parent, so force output to be on the CPU here if needed if (!lctx.embedding.empty()) { embeddings->backend = GGML_BACKEND_CPU; diff --git a/llama.h b/llama.h index 2f2fee0e2..beac9a0ce 100644 --- a/llama.h +++ b/llama.h @@ -178,7 +178,7 @@ extern "C" { float rope_freq_scale; // RoPE frequency scaling factor, 0 = from model // Keep the booleans together to avoid misalignment during copy-by-value. - bool mul_mat_q; // if true, use experimental mul_mat_q kernels + bool mul_mat_q; // if true, use experimental mul_mat_q kernels (DEPRECATED - always true) bool f16_kv; // use fp16 for KV cache, fp32 otherwise bool logits_all; // the llama_eval() call computes all logits, not just the last one bool embedding; // embedding mode only From c8d6a1f34ab6f1b6bd468d256e535a61f98f114c Mon Sep 17 00:00:00 2001 From: Thibault Terrasson Date: Fri, 27 Oct 2023 16:37:41 +0200 Subject: [PATCH 031/859] simple : fix batch handling (#3803) --- examples/simple/simple.cpp | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index f376c0509..374aef6f1 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -95,13 +95,8 @@ int main(int argc, char ** argv) { llama_batch batch = llama_batch_init(512, 0, 1); // evaluate the initial prompt - batch.n_tokens = tokens_list.size(); - - for (int32_t i = 0; i < batch.n_tokens; i++) { - batch.token[i] = tokens_list[i]; - batch.pos[i] = i; - batch.seq_id[i] = 0; - batch.logits[i] = false; + for (size_t i = 0; i < tokens_list.size(); i++) { + llama_batch_add(batch, tokens_list[i], i, { 0 }, false); } // llama_decode will output logits only for the last token of the prompt @@ -148,15 +143,10 @@ int main(int argc, char ** argv) { fflush(stdout); // prepare the next batch - batch.n_tokens = 0; + llama_batch_clear(batch); // push this new token for next evaluation - batch.token [batch.n_tokens] = new_token_id; - batch.pos [batch.n_tokens] = n_cur; - batch.seq_id[batch.n_tokens] = 0; - batch.logits[batch.n_tokens] = true; - - batch.n_tokens += 1; + llama_batch_add(batch, new_token_id, n_cur, { 0 }, true); n_decode += 1; } From 6d459cbfbe5a011dfca94f9550527a504b6f9aa1 Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Fri, 27 Oct 2023 17:33:53 -0400 Subject: [PATCH 032/859] llama : correctly report GGUFv3 format (#3818) --- llama.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index cc8669b0e..408533d8a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1578,12 +1578,14 @@ static void llama_kv_cache_seq_shift( enum llama_fver { GGUF_FILE_VERSION_V1 = 1, GGUF_FILE_VERSION_V2 = 2, + GGUF_FILE_VERSION_V3 = 3, }; static const char * llama_file_version_name(llama_fver version) { switch (version) { case GGUF_FILE_VERSION_V1: return "GGUF V1 (support until nov 2023)"; - case GGUF_FILE_VERSION_V2: return "GGUF V2 (latest)"; + case GGUF_FILE_VERSION_V2: return "GGUF V2"; + case GGUF_FILE_VERSION_V3: return "GGUF V3 (latest)"; } return "unknown"; From 41aee4df821854f37d90a45281f03b6db8d27de2 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Fri, 27 Oct 2023 15:40:07 -0600 Subject: [PATCH 033/859] speculative : ensure draft and target model vocab matches (#3812) * speculative: Ensure draft and target model vocab matches * Tolerate small differences when checking dft vs tgt vocab --- examples/speculative/speculative.cpp | 33 +++++++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 92ad27e8e..f921b7845 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -8,6 +8,9 @@ #include #include +#define SPEC_VOCAB_MAX_SIZE_DIFFERENCE 100 +#define SPEC_VOCAB_CHECK_START_TOKEN_ID 5 + struct seq_draft { bool active = false; bool drafting = false; @@ -64,6 +67,33 @@ int main(int argc, char ** argv) { params.n_gpu_layers = params.n_gpu_layers_draft; std::tie(model_dft, ctx_dft) = llama_init_from_gpt_params(params); + { + const int n_vocab_tgt = llama_n_vocab(model_tgt); + const int n_vocab_dft = llama_n_vocab(model_dft); + const int vocab_diff = n_vocab_tgt > n_vocab_dft + ? n_vocab_tgt - n_vocab_dft + : n_vocab_dft - n_vocab_tgt; + + if (vocab_diff > SPEC_VOCAB_MAX_SIZE_DIFFERENCE) { + fprintf(stderr, "%s: error: draft model vocab must closely match target model to use speculation but ", __func__); + fprintf(stderr, "target vocab size %d does not match draft vocab size %d - difference %d, max allowed %d\n", + n_vocab_tgt, llama_n_vocab(model_dft), vocab_diff, SPEC_VOCAB_MAX_SIZE_DIFFERENCE); + return 1; + } + + for (int i = SPEC_VOCAB_CHECK_START_TOKEN_ID; i < std::min(n_vocab_tgt, n_vocab_dft); ++i) { + const char * token_text_tgt = llama_token_get_text(model_tgt, i); + const char * token_text_dft = llama_token_get_text(model_dft, i); + if (std::strcmp(token_text_tgt, token_text_dft) != 0) { + fprintf(stderr, "%s: error: draft model vocab must match target model to use speculation but ", __func__); + fprintf(stderr, "token %d content differs - target '%s', draft '%s'\n", i, + llama_token_to_piece(ctx_tgt, i).c_str(), + llama_token_to_piece(ctx_dft, i).c_str()); + return 1; + } + } + } + // tokenize the prompt std::vector inp; inp = ::llama_tokenize(ctx_tgt, params.prompt, true); @@ -227,6 +257,7 @@ int main(int argc, char ** argv) { llama_batch_add (batch_dft, id, n_past_dft, { 0 }, true); llama_kv_cache_seq_rm(ctx_dft, 0, n_past_dft, -1); + // LOG("dft batch: %s\n", LOG_BATCH_TOSTR_PRETTY(ctx_dft, batch_dft).c_str()); llama_decode (ctx_dft, batch_dft); ++n_past_dft; @@ -370,7 +401,7 @@ int main(int argc, char ** argv) { llama_kv_cache_seq_cp(ctx_tgt, 0, s, -1, -1); } - //LOG("target batch: %s\n", LOG_BATCH_TOSTR_PRETTY(ctx_tgt, batch_tgt)); + // LOG("target batch: %s\n", LOG_BATCH_TOSTR_PRETTY(ctx_tgt, batch_tgt).c_str()); llama_decode(ctx_tgt, batch_tgt); ++n_past_tgt; } From fdee152e4eebb78c191df0b074857111d7f2aba7 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 28 Oct 2023 12:06:08 +0300 Subject: [PATCH 034/859] starcoder : add GPU offloading (#3827) * starcoder : do not GPU split 1D bias tensors * starcoder : offload layers to GPU ggml-ci --- llama.cpp | 106 +++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 85 insertions(+), 21 deletions(-) diff --git a/llama.cpp b/llama.cpp index 408533d8a..6caa58960 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2695,8 +2695,8 @@ static void llm_load_tensors( } break; case LLM_ARCH_STARCODER: { - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.pos_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.pos_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); // output { @@ -2747,19 +2747,19 @@ static void llm_load_tensors( layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.b2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); + layer.b2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.b3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); + layer.b3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); if (backend == GGML_BACKEND_GPU) { vram_weights += @@ -4616,6 +4616,8 @@ static struct ggml_cgraph * llm_build_starcoder( const float norm_eps = hparams.f_norm_eps; + const int n_gpu_layers = model.n_gpu_layers; + const int32_t n_tokens = batch.n_tokens; const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; @@ -4660,6 +4662,27 @@ static struct ggml_cgraph * llm_build_starcoder( } } + const int i_gpu_start = n_layer - n_gpu_layers; + (void) i_gpu_start; + + // offload functions set the tensor output backend to GPU + // tensors are GPU-accelerated if any input or the output has been offloaded + offload_func_t offload_func_nr = llama_nop; // nr = non-repeating + offload_func_t offload_func_kq = llama_nop; + offload_func_t offload_func_v = llama_nop; + +#ifdef GGML_USE_CUBLAS + if (n_gpu_layers > n_layer) { + offload_func_nr = ggml_cuda_assign_buffers_no_alloc; + } + if (n_gpu_layers > n_layer + 1) { + offload_func_v = ggml_cuda_assign_buffers_no_alloc; + } + if (n_gpu_layers > n_layer + 2) { + offload_func_kq = ggml_cuda_assign_buffers_no_alloc; + } +#endif // GGML_USE_CUBLAS + { // Compute position embeddings. struct ggml_tensor * inp_positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); @@ -4685,6 +4708,7 @@ static struct ggml_cgraph * llm_build_starcoder( // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); ggml_set_name(KQ_mask, "KQ_mask"); + offload_func_kq(KQ_mask); ggml_allocr_alloc(lctx.alloc, KQ_mask); if (!ggml_allocr_is_measure(lctx.alloc)) { float * data = (float *) KQ_mask->data; @@ -4708,44 +4732,67 @@ static struct ggml_cgraph * llm_build_starcoder( ggml_set_name(inpL, "inpL"); for (int il = 0; il < n_layer; ++il) { + offload_func_t offload_func = llama_nop; + +#ifdef GGML_USE_CUBLAS + if (il >= i_gpu_start) { + offload_func = ggml_cuda_assign_buffers_no_alloc; + } +#endif // GGML_USE_CUBLAS + { // Norm cur = ggml_norm(ctx0, inpL, norm_eps); + offload_func(cur); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].attn_norm), model.layers[il].attn_norm_b); + offload_func(cur); } { // Self Attention - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wqkv, cur), model.layers[il].bqkv); + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + offload_func_kq(cur); - struct ggml_tensor * tmpq = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*n_embd); - struct ggml_tensor * tmpk = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], sizeof(float)*n_embd); - struct ggml_tensor * tmpv = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], sizeof(float)*(n_embd + n_embd_gqa)); + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + offload_func_kq(cur); - struct ggml_tensor * Qcur = tmpq; + struct ggml_tensor * tmpq = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * tmpk = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * tmpv = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + ggml_set_name(tmpq, "tmpq"); + ggml_set_name(tmpk, "tmpk"); + ggml_set_name(tmpv, "tmpv"); + + offload_func_kq(tmpq); + offload_func_kq(tmpk); + offload_func_v (tmpv); + + struct ggml_tensor * Qcur = ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, n_tokens); struct ggml_tensor * Kcur = tmpk; { - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, ggml_cont(ctx0, tmpv), n_embd_gqa, n_tokens)); + struct ggml_tensor * Vcur = ggml_transpose(ctx0, tmpv); + offload_func_v(Vcur); ggml_set_name(Vcur, "Vcur"); struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); + offload_func_kq(k); ggml_set_name(k, "k"); struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, ( n_ctx)*ggml_element_size(kv_self.v), (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); + offload_func_v(v); + ggml_set_name(v, "v"); ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); } - struct ggml_tensor * Q = - ggml_permute(ctx0, - ggml_cpy(ctx0, - Qcur, - ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_embd_head, n_head, n_tokens)), - 0, 2, 1, 3); + struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); + offload_func_kq(Q); ggml_set_name(Q, "Q"); struct ggml_tensor * K = @@ -4754,23 +4801,28 @@ static struct ggml_cgraph * llm_build_starcoder( ggml_element_size(kv_self.k)*n_embd_gqa, ggml_element_size(kv_self.k)*n_embd_head, ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); + offload_func_kq(K); ggml_set_name(K, "K"); // K * Q struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); + offload_func_kq(KQ); ggml_set_name(KQ, "KQ"); // KQ_scaled = KQ / sqrt(n_embd_head) // KQ_scaled shape [n_past + n_tokens, n_tokens, n_head, 1] struct ggml_tensor * KQ_scaled = ggml_scale_inplace(ctx0, KQ, KQ_scale); + offload_func_kq(KQ_scaled); ggml_set_name(KQ_scaled, "KQ_scaled"); // KQ_masked = mask_past(KQ_scaled) struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled, KQ_mask); + offload_func_kq(KQ_masked); ggml_set_name(KQ_masked, "KQ_masked"); // KQ = soft_max(KQ_masked) struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); + offload_func_v(KQ_soft_max); ggml_set_name(KQ_soft_max, "KQ_soft_max"); // split cached V into n_head heads @@ -4783,22 +4835,25 @@ static struct ggml_cgraph * llm_build_starcoder( ggml_set_name(V, "V"); struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); + offload_func_v(KQV); ggml_set_name(KQV, "KQV"); - // KQV_merged = KQV.permute(0, 2, 1, 3) struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); + offload_func_v(KQV_merged); ggml_set_name(KQV_merged, "KQV_merged"); - // cur = KQV_merged.contiguous().view(n_embd, n_tokens) cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); + offload_func_v(cur); ggml_set_name(cur, "KQV_merged_contiguous"); } // Projection cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wo, cur), model.layers[il].bo); + offload_func(cur); // Add the input cur = ggml_add(ctx0, cur, inpL); + offload_func(cur); struct ggml_tensor * inpFF = cur; @@ -4807,27 +4862,36 @@ static struct ggml_cgraph * llm_build_starcoder( // Norm { cur = ggml_norm(ctx0, inpFF, norm_eps); + offload_func_nr(cur); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ffn_norm), model.layers[il].ffn_norm_b); + offload_func_nr(cur); } cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].w3, cur), model.layers[il].b3); + offload_func(cur); // GELU activation cur = ggml_gelu(ctx0, cur); + offload_func(cur); // Projection cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].w2, cur), model.layers[il].b2); + offload_func(cur); } inpL = ggml_add(ctx0, cur, inpFF); + } // Output Norm { cur = ggml_norm(ctx0, inpL, norm_eps); + offload_func_nr(cur); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.output_norm), model.output_norm_b); + ggml_set_name(cur, "result_norm"); } - ggml_set_name(cur, "result_norm"); cur = ggml_mul_mat(ctx0, model.output, cur); ggml_set_name(cur, "result_output"); From 177461104b454163473dced2a5038f4e016cdb7e Mon Sep 17 00:00:00 2001 From: Henk Poley Date: Sat, 28 Oct 2023 12:16:33 +0200 Subject: [PATCH 035/859] common : print that one line of the syntax help *also* to standard output (#3823) --- common/common.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 44bb76618..c0d4924e2 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -743,7 +743,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { #endif // GGML_USE_CUBLAS #endif printf(" --verbose-prompt print prompt before generation\n"); - fprintf(stderr, " --simple-io use basic IO for better compatibility in subprocesses and limited consoles\n"); + printf(" --simple-io use basic IO for better compatibility in subprocesses and limited consoles\n"); printf(" --lora FNAME apply LoRA adapter (implies --no-mmap)\n"); printf(" --lora-scaled FNAME S apply LoRA adapter with user defined scaling S (implies --no-mmap)\n"); printf(" --lora-base FNAME optional model to use as a base for the layers modified by the LoRA adapter\n"); From ee1a0ec9cb367ba41d138134795cbbbe93d2bf1c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 28 Oct 2023 14:23:11 +0300 Subject: [PATCH 036/859] llama : add option for greedy sampling with probs (#3813) * llama : add option for greedy sampling with probs * llama : add comment about llama_sample_token_greedy() missing probs * sampling : temp == 0.0 -> no probs, temp < 0.0 -> probs --- common/common.cpp | 1 + common/sampling.cpp | 8 ++++++-- examples/speculative/speculative.cpp | 2 +- llama.h | 1 + 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index c0d4924e2..f81f4d354 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -224,6 +224,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } sparams.temp = std::stof(argv[i]); + sparams.temp = std::max(sparams.temp, 0.0f); } else if (arg == "--tfs") { if (++i >= argc) { invalid_param = true; diff --git a/common/sampling.cpp b/common/sampling.cpp index 5258d4e82..c4996c985 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -167,8 +167,12 @@ llama_token llama_sampling_sample( llama_sample_grammar(ctx_main, &cur_p, ctx_sampling->grammar); } - if (temp <= 0) { - // greedy sampling + if (temp < 0.0) { + // greedy sampling, with probs + llama_sample_softmax(ctx_main, &cur_p); + id = cur_p.data[0].id; + } else if (temp == 0.0) { + // greedy sampling, no probs id = llama_sample_token_greedy(ctx_main, &cur_p); } else { if (mirostat == 1) { diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index f921b7845..323c74652 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -148,7 +148,7 @@ int main(int argc, char ** argv) { std::vector drafts(n_seq_dft); params.sparams.grammar.clear(); // the draft samplers will copy the target sampler's grammar - params.sparams.temp = std::max(0.01f, params.sparams.temp); + params.sparams.temp = -1.0f; // force greedy sampling with probs for the draft model for (int s = 0; s < n_seq_dft; ++s) { drafts[s].ctx_sampling = llama_sampling_init(params.sparams); diff --git a/llama.h b/llama.h index beac9a0ce..d901dcd91 100644 --- a/llama.h +++ b/llama.h @@ -658,6 +658,7 @@ extern "C" { float * mu); /// @details Selects the token with the highest probability. + /// Does not compute the token probabilities. Use llama_sample_softmax() instead. LLAMA_API llama_token llama_sample_token_greedy( struct llama_context * ctx, llama_token_data_array * candidates); From bd6d9e205982b34e0ba2c3b22bbf31a1ef1a1bb5 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Sat, 28 Oct 2023 05:54:24 -0600 Subject: [PATCH 037/859] llama : allow quantizing k-quants to fall back when tensor size incompatible (#3747) * Allow quantizing k-quants to fall back when tensor size incompatible * quantizing: Add warning when tensors were incompatible with k-quants Clean up k-quants state passing a bit --- llama.cpp | 108 ++++++++++++++++++++++++++++++++---------------------- 1 file changed, 65 insertions(+), 43 deletions(-) diff --git a/llama.cpp b/llama.cpp index 6caa58960..3d431ee7b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8049,6 +8049,24 @@ struct no_init { no_init() { /* do nothing */ } }; +struct quantize_state_internal { + const llama_model & model; + const llama_model_quantize_params * params; +#ifdef GGML_USE_K_QUANTS + int n_attention_wv = 0; + int n_feed_forward_w2 = 0; + int i_attention_wv = 0; + int i_feed_forward_w2 = 0; + + int n_k_quantized = 0; + int n_fallback = 0; +#endif + quantize_state_internal(const llama_model & model, const llama_model_quantize_params * params) + : model(model) + , params(params) + {} +}; + static void llama_convert_tensor_internal( struct ggml_tensor * tensor, std::vector> & output, std::vector & workers, const size_t nelements, const int nthread @@ -8109,12 +8127,13 @@ static void llama_convert_tensor_internal( #ifdef GGML_USE_K_QUANTS static ggml_type get_k_quant_type( - ggml_type new_type, const ggml_tensor * tensor, const llama_model & model, llama_ftype ftype, int * i_attention_wv, - int n_attention_wv, int * i_feed_forward_w2, int n_feed_forward_w2 + quantize_state_internal & qs, + ggml_type new_type, const ggml_tensor * tensor, llama_ftype ftype ) { const std::string name = ggml_get_name(tensor); // TODO: avoid hardcoded tensor names - use the TN_* constants - const auto tn = LLM_TN(model.arch); + const llm_arch arch = qs.model.arch; + const auto tn = LLM_TN(arch); auto use_more_bits = [](int i_layer, int num_layers) -> bool { return i_layer < num_layers/8 || i_layer >= 7*num_layers/8 || (i_layer - num_layers/8)%3 == 2; @@ -8122,7 +8141,7 @@ static ggml_type get_k_quant_type( if (name == tn(LLM_TENSOR_OUTPUT, "weight")) { int nx = tensor->ne[0]; - if (model.arch == LLM_ARCH_FALCON || nx % QK_K != 0) { + if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; } else if (new_type != GGML_TYPE_Q8_0) { @@ -8131,46 +8150,46 @@ static ggml_type get_k_quant_type( } else if (name.find("attn_v.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { - new_type = *i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; + new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; else if ((ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) && - use_more_bits(*i_attention_wv, n_attention_wv)) new_type = GGML_TYPE_Q6_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && *i_attention_wv < 4) new_type = GGML_TYPE_Q5_K; + use_more_bits(qs.i_attention_wv, qs.n_attention_wv)) new_type = GGML_TYPE_Q6_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && qs.i_attention_wv < 4) new_type = GGML_TYPE_Q5_K; else if (QK_K == 64 && (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S) && - (*i_attention_wv < n_attention_wv/8 || *i_attention_wv >= 7*n_attention_wv/8)) new_type = GGML_TYPE_Q6_K; - if (model.type == MODEL_70B) { + (qs.i_attention_wv < qs.n_attention_wv/8 || qs.i_attention_wv >= 7*qs.n_attention_wv/8)) new_type = GGML_TYPE_Q6_K; + if (qs.model.type == MODEL_70B) { // In the 70B model we have 8 heads sharing the same attn_v weights. As a result, the attn_v.weight tensor is // 8x smaller compared to attn_q.weight. Hence, we can get a nice boost in quantization accuracy with // nearly negligible increase in model size by quantizing this tensor with more bits: if (new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K) new_type = GGML_TYPE_Q5_K; } - ++*i_attention_wv; + ++qs.i_attention_wv; } else if (name.find("ffn_down.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { - new_type = *i_feed_forward_w2 < 2 ? GGML_TYPE_Q5_K - : model.arch != LLM_ARCH_FALCON || use_more_bits(*i_feed_forward_w2, n_feed_forward_w2) ? GGML_TYPE_Q4_K + new_type = qs.i_feed_forward_w2 < 2 ? GGML_TYPE_Q5_K + : arch != LLM_ARCH_FALCON || use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { - new_type = model.arch == LLM_ARCH_FALCON ? GGML_TYPE_Q4_K : GGML_TYPE_Q5_K; + new_type = arch == LLM_ARCH_FALCON ? GGML_TYPE_Q4_K : GGML_TYPE_Q5_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { - if (model.arch == LLM_ARCH_FALCON) { - new_type = *i_feed_forward_w2 < 2 ? GGML_TYPE_Q6_K : - use_more_bits(*i_feed_forward_w2, n_feed_forward_w2) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; + if (arch == LLM_ARCH_FALCON) { + new_type = qs.i_feed_forward_w2 < 2 ? GGML_TYPE_Q6_K : + use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else { - if (use_more_bits(*i_feed_forward_w2, n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; + if (use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; } } - else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(*i_feed_forward_w2, n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && model.arch != LLM_ARCH_FALCON && *i_feed_forward_w2 < 4) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < 4) { new_type = GGML_TYPE_Q5_K; } - ++*i_feed_forward_w2; + ++qs.i_feed_forward_w2; } else if (name.find("attn_output.weight") != std::string::npos) { - if (model.arch != LLM_ARCH_FALCON) { + if (arch != LLM_ARCH_FALCON) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; @@ -8197,20 +8216,23 @@ static ggml_type get_k_quant_type( int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { - LLAMA_LOG_WARN("\n\n%s : tensor cols %d x %d are not divisible by %d, required for k-quants\n", __func__, nx, ny, QK_K); + LLAMA_LOG_WARN("\n\n%s : tensor cols %d x %d are not divisible by %d, required for %s", __func__, nx, ny, QK_K, ggml_type_name(new_type)); convert_incompatible_tensor = true; + } else { + ++qs.n_k_quantized; } } if (convert_incompatible_tensor) { - if (name == tn(LLM_TENSOR_OUTPUT, "weight")) { - new_type = GGML_TYPE_F16; //fall back to F16 instead of just failing. - LLAMA_LOG_WARN("F16 will be used for this tensor instead.\n"); - } else if (name == tn(LLM_TENSOR_TOKEN_EMBD, "weight")) { - new_type = GGML_TYPE_Q4_0; //fall back to Q4_0 instead of just failing. - LLAMA_LOG_WARN("Q4_0 will be used for this tensor instead.\n"); - } else { - throw std::runtime_error("Unsupported tensor size encountered\n"); + switch (new_type) { + case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; + case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; + case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; + case GGML_TYPE_Q5_K: new_type = GGML_TYPE_Q5_1; break; + case GGML_TYPE_Q6_K: new_type = GGML_TYPE_Q8_0; break; + default: throw std::runtime_error("\nUnsupported tensor size encountered\n"); } + LLAMA_LOG_WARN(" - using fallback quantization %s\n", ggml_type_name(new_type)); + ++qs.n_fallback; } return new_type; @@ -8268,6 +8290,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s llm_load_arch(ml, model); llm_load_hparams(ml, model); + struct quantize_state_internal qs(model, params); + if (params->only_copy) { ftype = model.ftype; } @@ -8281,9 +8305,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s gguf_set_val_u32(ctx_out, "general.file_type", ftype); #ifdef GGML_USE_K_QUANTS - int n_attention_wv = 0; - int n_feed_forward_w2 = 0; - for (int i = 0; i < ml.n_tensors; ++i) { struct ggml_tensor * meta = ml.get_tensor_meta(i); @@ -8291,19 +8312,16 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // TODO: avoid hardcoded tensor names - use the TN_* constants if (name.find("attn_v.weight") != std::string::npos || name.find("attn_qkv.weight") != std::string::npos) { - ++n_attention_wv; + ++qs.n_attention_wv; } else if (name.find("ffn_down.weight") != std::string::npos) { - ++n_feed_forward_w2; + ++qs.n_feed_forward_w2; } } - if (n_attention_wv != n_feed_forward_w2 || (uint32_t)n_attention_wv != model.hparams.n_layer) { + if (qs.n_attention_wv != qs.n_feed_forward_w2 || (uint32_t)qs.n_attention_wv != model.hparams.n_layer) { LLAMA_LOG_WARN("%s ============ Strange model: n_attention_wv = %d, n_feed_forward_w2 = %d, hparams.n_layer = %d\n", - __func__, n_attention_wv, n_feed_forward_w2, model.hparams.n_layer); + __func__, qs.n_attention_wv, qs.n_feed_forward_w2, model.hparams.n_layer); } - - int i_attention_wv = 0; - int i_feed_forward_w2 = 0; #endif size_t total_size_org = 0; @@ -8370,9 +8388,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (quantize) { new_type = quantized_type; #ifdef GGML_USE_K_QUANTS - new_type = get_k_quant_type( - new_type, tensor, model, ftype, &i_attention_wv, n_attention_wv, &i_feed_forward_w2, n_feed_forward_w2 - ); + new_type = get_k_quant_type(qs, new_type, tensor, ftype); #endif // If we've decided to quantize to the same type the tensor is already // in then there's nothing to do. @@ -8498,6 +8514,12 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s LLAMA_LOG_INFO("\n"); } } +#ifdef GGML_USE_K_QUANTS + if (qs.n_fallback > 0) { + LLAMA_LOG_WARN("%s: WARNING: %d of %d tensor(s) incompatible with k-quants and required fallback quantization\n", + __func__, qs.n_fallback, qs.n_k_quantized + qs.n_fallback); + } +#endif } static int llama_apply_lora_from_file_internal( From 8a2f2fea2914aaa3f4b2f82800c7de15f15bdb09 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 28 Oct 2023 15:25:15 +0300 Subject: [PATCH 038/859] convert : ignore tokens if their IDs are within [0, vocab_size) (#3831) --- convert.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/convert.py b/convert.py index 0680f71ea..bfbfab283 100755 --- a/convert.py +++ b/convert.py @@ -366,16 +366,19 @@ class SentencePieceVocab: added_tokens = {} vocab_size: int = self.sentencepiece_tokenizer.vocab_size() - expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) - actual_ids = sorted(added_tokens.values()) - if expected_ids != actual_ids: - raise Exception(f"Expected added token IDs to be sequential and start at {vocab_size}; got {actual_ids}") - items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) - self.added_tokens_list = [text for (text, idx) in items] - self.vocab_size_base: int = vocab_size - self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_list) - self.fname_tokenizer = fname_tokenizer + new_tokens = {id: piece for piece, id in added_tokens.items() if id >= vocab_size} + expected_new_ids = list(range(vocab_size, vocab_size + len(new_tokens))) + actual_new_ids = sorted(new_tokens.keys()) + + if expected_new_ids != actual_new_ids: + raise ValueError(f"Expected new token IDs {expected_new_ids} to be sequential; got {actual_new_ids}") + + # Token pieces that were added to the base vocabulary. + self.added_tokens_list = [new_tokens[id] for id in actual_new_ids] + self.vocab_size_base = vocab_size + self.vocab_size = self.vocab_size_base + len(self.added_tokens_list) + self.fname_tokenizer = fname_tokenizer self.fname_added_tokens = fname_added_tokens def sentencepiece_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: From ba231e8a6dd8ad82acfe0e4d492ff7cef6b3f0a1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 28 Oct 2023 15:25:33 +0300 Subject: [PATCH 039/859] issues : change label from bug to bug-unconfirmed (#3748) --- .github/ISSUE_TEMPLATE/bug.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index d7879b232..c003fe7c1 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -1,7 +1,7 @@ --- name: Bug template about: Used to report bugs in llama.cpp -labels: ["bug"] +labels: ["bug-unconfirmed"] assignees: '' --- From 82a6646e0221216c41edcdf99f5a44bb051391f5 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Sat, 28 Oct 2023 15:43:01 +0300 Subject: [PATCH 040/859] metal : try cwd for ggml-metal.metal if bundle lookup fails (#3793) * Try cwd for ggml-metal if bundle lookup fails When building with `-DBUILD_SHARED_LIBS=ON -DLLAMA_METAL=ON -DLLAMA_BUILD_SERVER=ON`, `server` would fail to load `ggml-metal.metal` because `[bundle pathForResource:...]` returns `nil`. In that case, fall back to `ggml-metal.metal` in the cwd instead of passing `null` as a path. Follows up on #1782 * Update ggml-metal.m --------- Co-authored-by: Georgi Gerganov --- ggml-metal.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index c1901dca7..2380c4310 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -210,6 +210,10 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); NSString * sourcePath = [bundle pathForResource:@"ggml-metal" ofType:@"metal"]; + if (sourcePath == nil) { + GGML_METAL_LOG_WARN("%s: error: could not use bundle path to find ggml-metal.metal, falling back to trying cwd\n", __func__); + sourcePath = @"ggml-metal.metal"; + } GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [sourcePath UTF8String]); NSString * src = [NSString stringWithContentsOfFile:sourcePath encoding:NSUTF8StringEncoding error:&error]; if (error) { From ff3bad83e29e3009010cbc923bebd769055eaa7f Mon Sep 17 00:00:00 2001 From: Erik Scholz Date: Sat, 28 Oct 2023 16:41:07 +0200 Subject: [PATCH 041/859] flake : update flake.lock for newer transformers version + provide extra dev shell (#3797) * flake : update flake.lock for newer transformers version + provide extra dev shell with torch and transformers (for most convert-xxx.py scripts) --- flake.lock | 6 +++--- flake.nix | 7 +++++++ 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index a7777d05d..070f0e161 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1692913444, - "narHash": "sha256-1SvMQm2DwofNxXVtNWWtIcTh7GctEVrS/Xel/mdc6iY=", + "lastModified": 1698134075, + "narHash": "sha256-foCD+nuKzfh49bIoiCBur4+Fx1nozo+4C/6k8BYk4sg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "18324978d632ffc55ef1d928e81630c620f4f447", + "rev": "8efd5d1e283604f75a808a20e6cde0ef313d07d4", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index cfc4776a4..fa34394b2 100644 --- a/flake.nix +++ b/flake.nix @@ -51,6 +51,9 @@ }; llama-python = pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece ]); + # TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime + llama-python-extra = + pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece torchWithoutCuda transformers ]); postPatch = '' substituteInPlace ./ggml-metal.m \ --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" @@ -126,5 +129,9 @@ buildInputs = [ llama-python ]; packages = nativeBuildInputs ++ osSpecific; }; + devShells.extra = pkgs.mkShell { + buildInputs = [ llama-python-extra ]; + packages = nativeBuildInputs ++ osSpecific; + }; }); } From d69d777c02b9ac405a95f3cbfba219a990caefff Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 29 Oct 2023 18:32:28 +0200 Subject: [PATCH 042/859] ggml : quantization refactoring (#3833) * ggml : factor all quantization code in ggml-quants ggml-ci * ggml-quants : fix Zig and Swift builds + quantize tool ggml-ci * quantize : --pure option for disabling k-quant mixtures --------- Co-authored-by: cebtenzzre --- CMakeLists.txt | 12 +- Makefile | 18 +- Package.swift | 3 +- build.zig | 21 +- examples/quantize/quantize.cpp | 9 +- k_quants.c => ggml-quants.c | 2248 ++++++++++++++++++++++++++++++- k_quants.h => ggml-quants.h | 103 +- ggml.c | 2301 +------------------------------- ggml.h | 7 + llama.cpp | 34 +- llama.h | 1 + 11 files changed, 2372 insertions(+), 2385 deletions(-) rename k_quants.c => ggml-quants.c (71%) rename k_quants.h => ggml-quants.h (63%) diff --git a/CMakeLists.txt b/CMakeLists.txt index d9fc86237..3659279e2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -94,7 +94,6 @@ option(LLAMA_CLBLAST "llama: use CLBlast" option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) option(LLAMA_MPI "llama: use MPI" OFF) -option(LLAMA_K_QUANTS "llama: use k-quants" ON) option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALONE}) @@ -278,13 +277,8 @@ if (LLAMA_BLAS) endif() endif() -if (LLAMA_K_QUANTS) - set(GGML_HEADERS_EXTRA k_quants.h) - set(GGML_SOURCES_EXTRA k_quants.c) - add_compile_definitions(GGML_USE_K_QUANTS) - if (LLAMA_QKK_64) - add_compile_definitions(GGML_QKK_64) - endif() +if (LLAMA_QKK_64) + add_compile_definitions(GGML_QKK_64) endif() if (LLAMA_CUBLAS) @@ -673,6 +667,8 @@ add_library(ggml OBJECT ggml-alloc.h ggml-backend.c ggml-backend.h + ggml-quants.c + ggml-quants.h ${GGML_SOURCES_CUDA} ${GGML_HEADERS_CUDA} ${GGML_SOURCES_OPENCL} ${GGML_HEADERS_OPENCL} ${GGML_SOURCES_METAL} ${GGML_HEADERS_METAL} diff --git a/Makefile b/Makefile index 68069f9ff..2cecc2216 100644 --- a/Makefile +++ b/Makefile @@ -342,13 +342,9 @@ else MK_CXXFLAGS += -march=rv64gcv -mabi=lp64d endif -ifndef LLAMA_NO_K_QUANTS - MK_CPPFLAGS += -DGGML_USE_K_QUANTS - OBJS += k_quants.o ifdef LLAMA_QKK_64 MK_CPPFLAGS += -DGGML_QKK_64 endif -endif ifndef LLAMA_NO_ACCELERATE # Mac OS - include Accelerate framework. @@ -365,7 +361,7 @@ ifdef LLAMA_MPI MK_CPPFLAGS += -DGGML_USE_MPI MK_CFLAGS += -Wno-cast-qual MK_CXXFLAGS += -Wno-cast-qual - OBJS += ggml-mpi.o + OBJS += ggml-mpi.o endif # LLAMA_MPI ifdef LLAMA_OPENBLAS @@ -382,7 +378,7 @@ endif # LLAMA_BLIS ifdef LLAMA_CUBLAS MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib - OBJS += ggml-cuda.o + OBJS += ggml-cuda.o NVCCFLAGS = --forward-unknown-to-host-compiler -use_fast_math ifdef LLAMA_CUDA_NVCC NVCC = $(LLAMA_CUDA_NVCC) @@ -497,11 +493,6 @@ ggml-mpi.o: ggml-mpi.c ggml-mpi.h $(CC) $(CFLAGS) -c $< -o $@ endif # LLAMA_MPI -ifndef LLAMA_NO_K_QUANTS -k_quants.o: k_quants.c k_quants.h - $(CC) $(CFLAGS) -c $< -o $@ -endif # LLAMA_NO_K_QUANTS - # combine build flags with cmdline overrides override CFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CFLAGS) $(CFLAGS) override CXXFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CXXFLAGS) $(CXXFLAGS) @@ -542,7 +533,10 @@ ggml-alloc.o: ggml-alloc.c ggml.h ggml-alloc.h ggml-backend.o: ggml-backend.c ggml.h ggml-backend.h $(CC) $(CFLAGS) -c $< -o $@ -OBJS += ggml-alloc.o ggml-backend.o +ggml-quants.o: ggml-quants.c ggml.h ggml-quants.h + $(CC) $(CFLAGS) -c $< -o $@ + +OBJS += ggml-alloc.o ggml-backend.o ggml-quants.o llama.o: llama.cpp ggml.h ggml-alloc.h ggml-backend.h ggml-cuda.h ggml-metal.h llama.h $(CXX) $(CXXFLAGS) -c $< -o $@ diff --git a/Package.swift b/Package.swift index 4ab055b19..5b3bd72ca 100644 --- a/Package.swift +++ b/Package.swift @@ -42,13 +42,12 @@ let package = Package( "llama.cpp", "ggml-alloc.c", "ggml-backend.c", - "k_quants.c", + "ggml-quants.c", ] + additionalSources, resources: resources, publicHeadersPath: "spm-headers", cSettings: [ .unsafeFlags(["-Wno-shorten-64-to-32", "-O3", "-DNDEBUG"]), - .define("GGML_USE_K_QUANTS"), .define("GGML_USE_ACCELERATE") // NOTE: NEW_LAPACK will required iOS version 16.4+ // We should consider add this in the future when we drop support for iOS 14 diff --git a/build.zig b/build.zig index dcfa3dd6b..9b58b74ca 100644 --- a/build.zig +++ b/build.zig @@ -116,15 +116,10 @@ pub fn build(b: *std.build.Builder) !void { var make = try Maker.init(b); make.enable_lto = b.option(bool, "lto", "Enable LTO optimization, (default: false)") orelse false; - if (b.option(bool, "k-quants", "Enable K-quants, (default: true)") orelse true) { - try make.addFlag("-DGGML_USE_K_QUANTS"); - const k_quants = make.obj("k_quants", "k_quants.c"); - try make.objs.append(k_quants); - } - const ggml = make.obj("ggml", "ggml.c"); const ggml_alloc = make.obj("ggml-alloc", "ggml-alloc.c"); const ggml_backend = make.obj("ggml-backend", "ggml-backend.c"); + const ggml_quants = make.obj("ggml-quants", "ggml-quants.c"); const llama = make.obj("llama", "llama.cpp"); const common = make.obj("common", "common/common.cpp"); const console = make.obj("console", "common/console.cpp"); @@ -133,14 +128,14 @@ pub fn build(b: *std.build.Builder) !void { const train = make.obj("train", "common/train.cpp"); const clip = make.obj("clip", "examples/llava/clip.cpp"); - _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, sampling, console, grammar_parser }); - _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common }); - _ = make.exe("perplexity", "examples/perplexity/perplexity.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common }); - _ = make.exe("embedding", "examples/embedding/embedding.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common }); - _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, train }); - _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, train }); + _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, sampling, console, grammar_parser }); + _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common }); + _ = make.exe("perplexity", "examples/perplexity/perplexity.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common }); + _ = make.exe("embedding", "examples/embedding/embedding.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common }); + _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, train }); + _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, train }); - const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, sampling, grammar_parser, clip }); + const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, sampling, grammar_parser, clip }); if (server.target.isWindows()) { server.linkSystemLibrary("ws2_32"); } diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index c7dd0d894..be0b2fe1e 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -18,7 +18,6 @@ static const std::vector QUANT_OPTIONS = { { "Q4_1", LLAMA_FTYPE_MOSTLY_Q4_1, " 3.90G, +0.1585 ppl @ LLaMA-v1-7B", }, { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.33G, +0.0683 ppl @ LLaMA-v1-7B", }, { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, -#ifdef GGML_USE_K_QUANTS { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, @@ -31,7 +30,6 @@ static const std::vector QUANT_OPTIONS = { { "Q5_K_S", LLAMA_FTYPE_MOSTLY_Q5_K_S, " 4.33G, +0.0400 ppl @ LLaMA-v1-7B", }, { "Q5_K_M", LLAMA_FTYPE_MOSTLY_Q5_K_M, " 4.45G, +0.0122 ppl @ LLaMA-v1-7B", }, { "Q6_K", LLAMA_FTYPE_MOSTLY_Q6_K, " 5.15G, -0.0008 ppl @ LLaMA-v1-7B", }, -#endif { "Q8_0", LLAMA_FTYPE_MOSTLY_Q8_0, " 6.70G, +0.0004 ppl @ LLaMA-v1-7B", }, { "F16", LLAMA_FTYPE_MOSTLY_F16, "13.00G @ 7B", }, { "F32", LLAMA_FTYPE_ALL_F32, "26.00G @ 7B", }, @@ -70,13 +68,14 @@ static bool try_parse_ftype(const std::string & ftype_str_in, llama_ftype & ftyp } // usage: -// ./quantize [--allow-requantize] [--leave-output-tensor] models/llama/ggml-model.gguf [models/llama/ggml-model-quant.gguf] type [nthreads] +// ./quantize [--allow-requantize] [--leave-output-tensor] [--pure] models/llama/ggml-model.gguf [models/llama/ggml-model-quant.gguf] type [nthreads] // [[noreturn]] static void usage(const char * executable) { - printf("usage: %s [--help] [--allow-requantize] [--leave-output-tensor] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); + printf("usage: %s [--help] [--allow-requantize] [--leave-output-tensor] [--pure] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); printf(" --allow-requantize: Allows requantizing tensors that have already been quantized. Warning: This can severely reduce quality compared to quantizing from 16bit or 32bit\n"); printf(" --leave-output-tensor: Will leave output.weight un(re)quantized. Increases model size but may also increase quality, especially when requantizing\n"); + printf(" --pure: Disable k-quant mixtures and quantize all tensors to the same type\n"); printf("\nAllowed quantization types:\n"); for (auto & it : QUANT_OPTIONS) { if (it.name != "COPY") { @@ -103,6 +102,8 @@ int main(int argc, char ** argv) { params.quantize_output_tensor = false; } else if (strcmp(argv[arg_idx], "--allow-requantize") == 0) { params.allow_requantize = true; + } else if (strcmp(argv[arg_idx], "--pure") == 0) { + params.pure = true; } else { usage(argv[0]); } diff --git a/k_quants.c b/ggml-quants.c similarity index 71% rename from k_quants.c rename to ggml-quants.c index 801941fbe..fd4ee1be6 100644 --- a/k_quants.c +++ b/ggml-quants.c @@ -1,9 +1,10 @@ -#include "k_quants.h" +#include "ggml-quants.h" #include "ggml.h" #include #include #include +#include #ifdef __ARM_NEON @@ -65,6 +66,1024 @@ inline static int32_t vaddvq_s32(int32x4_t v) { #define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) +// multiply int8_t, add results pairwise twice +static inline __m128i mul_sum_i8_pairs(const __m128i x, const __m128i y) { + // Get absolute values of x vectors + const __m128i ax = _mm_sign_epi8(x, x); + // Sign the values of the y vectors + const __m128i sy = _mm_sign_epi8(y, x); + // Perform multiplication and create 16-bit values + const __m128i dot = _mm_maddubs_epi16(ax, sy); + const __m128i ones = _mm_set1_epi16(1); + return _mm_madd_epi16(ones, dot); +} + +#if __AVX__ || __AVX2__ || __AVX512F__ +// horizontally add 8 floats +static inline float hsum_float_8(const __m256 x) { + __m128 res = _mm256_extractf128_ps(x, 1); + res = _mm_add_ps(res, _mm256_castps256_ps128(x)); + res = _mm_add_ps(res, _mm_movehl_ps(res, res)); + res = _mm_add_ss(res, _mm_movehdup_ps(res)); + return _mm_cvtss_f32(res); +} + +// horizontally add 8 int32_t +static inline int hsum_i32_8(const __m256i a) { + const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); + const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); + const __m128i sum64 = _mm_add_epi32(hi64, sum128); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +// horizontally add 4 int32_t +static inline int hsum_i32_4(const __m128i a) { + const __m128i hi64 = _mm_unpackhi_epi64(a, a); + const __m128i sum64 = _mm_add_epi32(hi64, a); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +#if defined(__AVX2__) || defined(__AVX512F__) +// spread 32 bits to 32 bytes { 0x00, 0xFF } +static inline __m256i bytes_from_bits_32(const uint8_t * x) { + uint32_t x32; + memcpy(&x32, x, sizeof(uint32_t)); + const __m256i shuf_mask = _mm256_set_epi64x( + 0x0303030303030303, 0x0202020202020202, + 0x0101010101010101, 0x0000000000000000); + __m256i bytes = _mm256_shuffle_epi8(_mm256_set1_epi32(x32), shuf_mask); + const __m256i bit_mask = _mm256_set1_epi64x(0x7fbfdfeff7fbfdfe); + bytes = _mm256_or_si256(bytes, bit_mask); + return _mm256_cmpeq_epi8(bytes, _mm256_set1_epi64x(-1)); +} + +// Unpack 32 4-bit fields into 32 bytes +// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval +static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) +{ + const __m128i tmp = _mm_loadu_si128((const __m128i *)rsi); + const __m256i bytes = MM256_SET_M128I(_mm_srli_epi16(tmp, 4), tmp); + const __m256i lowMask = _mm256_set1_epi8( 0xF ); + return _mm256_and_si256(lowMask, bytes); +} + +// add int16_t pairwise and return as float vector +static inline __m256 sum_i16_pairs_float(const __m256i x) { + const __m256i ones = _mm256_set1_epi16(1); + const __m256i summed_pairs = _mm256_madd_epi16(ones, x); + return _mm256_cvtepi32_ps(summed_pairs); +} + +static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { +#if __AVXVNNI__ + const __m256i zero = _mm256_setzero_si256(); + const __m256i summed_pairs = _mm256_dpbusd_epi32(zero, ax, sy); + return _mm256_cvtepi32_ps(summed_pairs); +#else + // Perform multiplication and create 16-bit values + const __m256i dot = _mm256_maddubs_epi16(ax, sy); + return sum_i16_pairs_float(dot); +#endif +} + +// multiply int8_t, add results pairwise twice and return as float vector +static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { +#if __AVXVNNIINT8__ + const __m256i zero = _mm256_setzero_si256(); + const __m256i summed_pairs = _mm256_dpbssd_epi32(zero, x, y); + return _mm256_cvtepi32_ps(summed_pairs); +#else + // Get absolute values of x vectors + const __m256i ax = _mm256_sign_epi8(x, x); + // Sign the values of the y vectors + const __m256i sy = _mm256_sign_epi8(y, x); + return mul_sum_us8_pairs_float(ax, sy); +#endif +} + +static inline __m128i packNibbles( __m256i bytes ) +{ + // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh +#if __AVX512F__ + const __m256i bytes_srli_4 = _mm256_srli_epi16(bytes, 4); // 0000_0000_abcd_0000 + bytes = _mm256_or_si256(bytes, bytes_srli_4); // 0000_abcd_abcd_efgh + return _mm256_cvtepi16_epi8(bytes); // abcd_efgh +#else + const __m256i lowByte = _mm256_set1_epi16( 0xFF ); + __m256i high = _mm256_andnot_si256( lowByte, bytes ); + __m256i low = _mm256_and_si256( lowByte, bytes ); + high = _mm256_srli_epi16( high, 4 ); + bytes = _mm256_or_si256( low, high ); + + // Compress uint16_t lanes into bytes + __m128i r0 = _mm256_castsi256_si128( bytes ); + __m128i r1 = _mm256_extracti128_si256( bytes, 1 ); + return _mm_packus_epi16( r0, r1 ); +#endif +} +#elif defined(__AVX__) +// spread 32 bits to 32 bytes { 0x00, 0xFF } +static inline __m256i bytes_from_bits_32(const uint8_t * x) { + uint32_t x32; + memcpy(&x32, x, sizeof(uint32_t)); + const __m128i shuf_maskl = _mm_set_epi64x(0x0101010101010101, 0x0000000000000000); + const __m128i shuf_maskh = _mm_set_epi64x(0x0303030303030303, 0x0202020202020202); + __m128i bytesl = _mm_shuffle_epi8(_mm_set1_epi32(x32), shuf_maskl); + __m128i bytesh = _mm_shuffle_epi8(_mm_set1_epi32(x32), shuf_maskh); + const __m128i bit_mask = _mm_set1_epi64x(0x7fbfdfeff7fbfdfe); + bytesl = _mm_or_si128(bytesl, bit_mask); + bytesh = _mm_or_si128(bytesh, bit_mask); + bytesl = _mm_cmpeq_epi8(bytesl, _mm_set1_epi64x(-1)); + bytesh = _mm_cmpeq_epi8(bytesh, _mm_set1_epi64x(-1)); + return MM256_SET_M128I(bytesh, bytesl); +} + +// Unpack 32 4-bit fields into 32 bytes +// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval +static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) +{ + // Load 16 bytes from memory + __m128i tmpl = _mm_loadu_si128((const __m128i *)rsi); + __m128i tmph = _mm_srli_epi16(tmpl, 4); + const __m128i lowMask = _mm_set1_epi8(0xF); + tmpl = _mm_and_si128(lowMask, tmpl); + tmph = _mm_and_si128(lowMask, tmph); + return MM256_SET_M128I(tmph, tmpl); +} + +// add int16_t pairwise and return as float vector +static inline __m256 sum_i16_pairs_float(const __m128i xh, const __m128i xl) { + const __m128i ones = _mm_set1_epi16(1); + const __m128i summed_pairsl = _mm_madd_epi16(ones, xl); + const __m128i summed_pairsh = _mm_madd_epi16(ones, xh); + const __m256i summed_pairs = MM256_SET_M128I(summed_pairsh, summed_pairsl); + return _mm256_cvtepi32_ps(summed_pairs); +} + +static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { + const __m128i axl = _mm256_castsi256_si128(ax); + const __m128i axh = _mm256_extractf128_si256(ax, 1); + const __m128i syl = _mm256_castsi256_si128(sy); + const __m128i syh = _mm256_extractf128_si256(sy, 1); + // Perform multiplication and create 16-bit values + const __m128i dotl = _mm_maddubs_epi16(axl, syl); + const __m128i doth = _mm_maddubs_epi16(axh, syh); + return sum_i16_pairs_float(doth, dotl); +} + +// multiply int8_t, add results pairwise twice and return as float vector +static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { + const __m128i xl = _mm256_castsi256_si128(x); + const __m128i xh = _mm256_extractf128_si256(x, 1); + const __m128i yl = _mm256_castsi256_si128(y); + const __m128i yh = _mm256_extractf128_si256(y, 1); + // Get absolute values of x vectors + const __m128i axl = _mm_sign_epi8(xl, xl); + const __m128i axh = _mm_sign_epi8(xh, xh); + // Sign the values of the y vectors + const __m128i syl = _mm_sign_epi8(yl, xl); + const __m128i syh = _mm_sign_epi8(yh, xh); + // Perform multiplication and create 16-bit values + const __m128i dotl = _mm_maddubs_epi16(axl, syl); + const __m128i doth = _mm_maddubs_epi16(axh, syh); + return sum_i16_pairs_float(doth, dotl); +} + +static inline __m128i packNibbles( __m128i bytes1, __m128i bytes2 ) +{ + // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh + const __m128i lowByte = _mm_set1_epi16( 0xFF ); + __m128i high = _mm_andnot_si128( lowByte, bytes1 ); + __m128i low = _mm_and_si128( lowByte, bytes1 ); + high = _mm_srli_epi16( high, 4 ); + bytes1 = _mm_or_si128( low, high ); + high = _mm_andnot_si128( lowByte, bytes2 ); + low = _mm_and_si128( lowByte, bytes2 ); + high = _mm_srli_epi16( high, 4 ); + bytes2 = _mm_or_si128( low, high ); + + return _mm_packus_epi16( bytes1, bytes2); +} +#endif +#elif defined(__SSSE3__) +// horizontally add 4x4 floats +static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 c, const __m128 d) { + __m128 res_0 =_mm_hadd_ps(a, b); + __m128 res_1 =_mm_hadd_ps(c, d); + __m128 res =_mm_hadd_ps(res_0, res_1); + res =_mm_hadd_ps(res, res); + res =_mm_hadd_ps(res, res); + + return _mm_cvtss_f32(res); +} +#endif // __AVX__ || __AVX2__ || __AVX512F__ +#endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) + +#if defined(__ARM_NEON) + +#if !defined(__aarch64__) + +inline static int32_t vaddvq_s32(int32x4_t v) { + return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); +} + +inline static float vaddvq_f32(float32x4_t v) { + return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); +} + +inline static float vmaxvq_f32(float32x4_t v) { + return + MAX(MAX(vgetq_lane_f32(v, 0), vgetq_lane_f32(v, 1)), + MAX(vgetq_lane_f32(v, 2), vgetq_lane_f32(v, 3))); +} + +inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { + int32x4_t res; + + res[0] = roundf(vgetq_lane_f32(v, 0)); + res[1] = roundf(vgetq_lane_f32(v, 1)); + res[2] = roundf(vgetq_lane_f32(v, 2)); + res[3] = roundf(vgetq_lane_f32(v, 3)); + + return res; +} + +#endif +#endif + +#if defined(__ARM_NEON) || defined(__wasm_simd128__) +#define B1(c,s,n) 0x ## n ## c , 0x ## n ## s +#define B2(c,s,n) B1(c,s,n ## c), B1(c,s,n ## s) +#define B3(c,s,n) B2(c,s,n ## c), B2(c,s,n ## s) +#define B4(c,s,n) B3(c,s,n ## c), B3(c,s,n ## s) +#define B5(c,s,n) B4(c,s,n ## c), B4(c,s,n ## s) +#define B6(c,s,n) B5(c,s,n ## c), B5(c,s,n ## s) +#define B7(c,s,n) B6(c,s,n ## c), B6(c,s,n ## s) +#define B8(c,s ) B7(c,s, c), B7(c,s, s) + +// precomputed tables for expanding 8bits to 8 bytes: +static const uint64_t table_b2b_0[1 << 8] = { B8(00, 10) }; // ( b) << 4 +static const uint64_t table_b2b_1[1 << 8] = { B8(10, 00) }; // (!b) << 4 +#endif + +// reference implementation for deterministic creation of model files +void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k) { + static const int qk = QK4_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + if (amax < fabsf(v)) { + amax = fabsf(v); + max = v; + } + } + + const float d = max / -8; + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + + for (int j = 0; j < qk/2; ++j) { + const float x0 = x[i*qk + 0 + j]*id; + const float x1 = x[i*qk + qk/2 + j]*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 8.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 8.5f)); + + y[i].qs[j] = xi0; + y[i].qs[j] |= xi1 << 4; + } + } +} + +void quantize_row_q4_0(const float * restrict x, void * restrict y, int k) { + quantize_row_q4_0_reference(x, y, k); +} + +void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k) { + const int qk = QK4_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + + if (v < min) min = v; + if (v > max) max = v; + } + + const float d = (max - min) / ((1 << 4) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + y[i].m = ggml_fp32_to_fp16(min); + + for (int j = 0; j < qk/2; ++j) { + const float x0 = (x[i*qk + 0 + j] - min)*id; + const float x1 = (x[i*qk + qk/2 + j] - min)*id; + + const uint8_t xi0 = MIN(15, (int8_t)(x0 + 0.5f)); + const uint8_t xi1 = MIN(15, (int8_t)(x1 + 0.5f)); + + y[i].qs[j] = xi0; + y[i].qs[j] |= xi1 << 4; + } + } +} + +void quantize_row_q4_1(const float * restrict x, void * restrict y, int k) { + quantize_row_q4_1_reference(x, y, k); +} + +void quantize_row_q5_0_reference(const float * restrict x, block_q5_0 * restrict y, int k) { + static const int qk = QK5_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + float max = 0.0f; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + if (amax < fabsf(v)) { + amax = fabsf(v); + max = v; + } + } + + const float d = max / -16; + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + + uint32_t qh = 0; + + for (int j = 0; j < qk/2; ++j) { + const float x0 = x[i*qk + 0 + j]*id; + const float x1 = x[i*qk + qk/2 + j]*id; + + const uint8_t xi0 = MIN(31, (int8_t)(x0 + 16.5f)); + const uint8_t xi1 = MIN(31, (int8_t)(x1 + 16.5f)); + + y[i].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); + + // get the 5-th bit and store it in qh at the right position + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + qk/2); + } + + memcpy(&y[i].qh, &qh, sizeof(qh)); + } +} + +void quantize_row_q5_0(const float * restrict x, void * restrict y, int k) { + quantize_row_q5_0_reference(x, y, k); +} + +void quantize_row_q5_1_reference(const float * restrict x, block_q5_1 * restrict y, int k) { + const int qk = QK5_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + float min = FLT_MAX; + float max = -FLT_MAX; + + for (int j = 0; j < qk; j++) { + const float v = x[i*qk + j]; + + if (v < min) min = v; + if (v > max) max = v; + } + + const float d = (max - min) / ((1 << 5) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + y[i].m = ggml_fp32_to_fp16(min); + + uint32_t qh = 0; + + for (int j = 0; j < qk/2; ++j) { + const float x0 = (x[i*qk + 0 + j] - min)*id; + const float x1 = (x[i*qk + qk/2 + j] - min)*id; + + const uint8_t xi0 = (uint8_t)(x0 + 0.5f); + const uint8_t xi1 = (uint8_t)(x1 + 0.5f); + + y[i].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); + + // get the 5-th bit and store it in qh at the right position + qh |= ((xi0 & 0x10u) >> 4) << (j + 0); + qh |= ((xi1 & 0x10u) >> 4) << (j + qk/2); + } + + memcpy(&y[i].qh, &qh, sizeof(y[i].qh)); + } +} + +void quantize_row_q5_1(const float * restrict x, void * restrict y, int k) { + quantize_row_q5_1_reference(x, y, k); +} + +// reference implementation for deterministic creation of model files +void quantize_row_q8_0_reference(const float * restrict x, block_q8_0 * restrict y, int k) { + assert(k % QK8_0 == 0); + const int nb = k / QK8_0; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_0; j++) { + const float v = x[i*QK8_0 + j]; + amax = MAX(amax, fabsf(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + + for (int j = 0; j < QK8_0; ++j) { + const float x0 = x[i*QK8_0 + j]*id; + + y[i].qs[j] = roundf(x0); + } + } +} + +void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { + assert(QK8_0 == 32); + assert(k % QK8_0 == 0); + const int nb = k / QK8_0; + + block_q8_0 * restrict y = vy; + +#if defined(__ARM_NEON) + for (int i = 0; i < nb; i++) { + float32x4_t srcv [8]; + float32x4_t asrcv[8]; + float32x4_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = vld1q_f32(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = vmaxq_f32(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = vmaxq_f32(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = vmaxq_f32(amaxv[8*j], amaxv[8*j+4]); + + const float amax = vmaxvq_f32(amaxv[0]); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + + for (int j = 0; j < 8; j++) { + const float32x4_t v = vmulq_n_f32(srcv[j], id); + const int32x4_t vi = vcvtnq_s32_f32(v); + + y[i].qs[4*j + 0] = vgetq_lane_s32(vi, 0); + y[i].qs[4*j + 1] = vgetq_lane_s32(vi, 1); + y[i].qs[4*j + 2] = vgetq_lane_s32(vi, 2); + y[i].qs[4*j + 3] = vgetq_lane_s32(vi, 3); + } + } +#elif defined(__wasm_simd128__) + for (int i = 0; i < nb; i++) { + v128_t srcv [8]; + v128_t asrcv[8]; + v128_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = wasm_v128_load(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = wasm_f32x4_abs(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = wasm_f32x4_max(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = wasm_f32x4_max(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = wasm_f32x4_max(amaxv[8*j], amaxv[8*j+4]); + + const float amax = MAX(MAX(wasm_f32x4_extract_lane(amaxv[0], 0), + wasm_f32x4_extract_lane(amaxv[0], 1)), + MAX(wasm_f32x4_extract_lane(amaxv[0], 2), + wasm_f32x4_extract_lane(amaxv[0], 3))); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + + for (int j = 0; j < 8; j++) { + const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); + const v128_t vi = wasm_i32x4_trunc_sat_f32x4(v); + + y[i].qs[4*j + 0] = wasm_i32x4_extract_lane(vi, 0); + y[i].qs[4*j + 1] = wasm_i32x4_extract_lane(vi, 1); + y[i].qs[4*j + 2] = wasm_i32x4_extract_lane(vi, 2); + y[i].qs[4*j + 3] = wasm_i32x4_extract_lane(vi, 3); + } + } +#elif defined(__AVX2__) || defined(__AVX__) + for (int i = 0; i < nb; i++) { + // Load elements into 4 AVX vectors + __m256 v0 = _mm256_loadu_ps( x ); + __m256 v1 = _mm256_loadu_ps( x + 8 ); + __m256 v2 = _mm256_loadu_ps( x + 16 ); + __m256 v3 = _mm256_loadu_ps( x + 24 ); + x += 32; + + // Compute max(abs(e)) for the block + const __m256 signBit = _mm256_set1_ps( -0.0f ); + __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); + + __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); + max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); + max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); + const float maxScalar = _mm_cvtss_f32( max4 ); + + // Quantize these floats + const float d = maxScalar / 127.f; + y[i].d = ggml_fp32_to_fp16(d); + const float id = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; + const __m256 mul = _mm256_set1_ps( id ); + + // Apply the multiplier + v0 = _mm256_mul_ps( v0, mul ); + v1 = _mm256_mul_ps( v1, mul ); + v2 = _mm256_mul_ps( v2, mul ); + v3 = _mm256_mul_ps( v3, mul ); + + // Round to nearest integer + v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); + v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); + v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); + v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); + + // Convert floats to integers + __m256i i0 = _mm256_cvtps_epi32( v0 ); + __m256i i1 = _mm256_cvtps_epi32( v1 ); + __m256i i2 = _mm256_cvtps_epi32( v2 ); + __m256i i3 = _mm256_cvtps_epi32( v3 ); + +#if defined(__AVX2__) + // Convert int32 to int16 + i0 = _mm256_packs_epi32( i0, i1 ); // 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 + i2 = _mm256_packs_epi32( i2, i3 ); // 16, 17, 18, 19, 24, 25, 26, 27, 20, 21, 22, 23, 28, 29, 30, 31 + // Convert int16 to int8 + i0 = _mm256_packs_epi16( i0, i2 ); // 0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19, 24, 25, 26, 27, 4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31 + + // We got our precious signed bytes, but the order is now wrong + // These AVX2 pack instructions process 16-byte pieces independently + // The following instruction is fixing the order + const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); + i0 = _mm256_permutevar8x32_epi32( i0, perm ); + + _mm256_storeu_si256((__m256i *)y[i].qs, i0); +#else + // Since we don't have in AVX some necessary functions, + // we split the registers in half and call AVX2 analogs from SSE + __m128i ni0 = _mm256_castsi256_si128( i0 ); + __m128i ni1 = _mm256_extractf128_si256( i0, 1); + __m128i ni2 = _mm256_castsi256_si128( i1 ); + __m128i ni3 = _mm256_extractf128_si256( i1, 1); + __m128i ni4 = _mm256_castsi256_si128( i2 ); + __m128i ni5 = _mm256_extractf128_si256( i2, 1); + __m128i ni6 = _mm256_castsi256_si128( i3 ); + __m128i ni7 = _mm256_extractf128_si256( i3, 1); + + // Convert int32 to int16 + ni0 = _mm_packs_epi32( ni0, ni1 ); + ni2 = _mm_packs_epi32( ni2, ni3 ); + ni4 = _mm_packs_epi32( ni4, ni5 ); + ni6 = _mm_packs_epi32( ni6, ni7 ); + // Convert int16 to int8 + ni0 = _mm_packs_epi16( ni0, ni2 ); + ni4 = _mm_packs_epi16( ni4, ni6 ); + + _mm_storeu_si128((__m128i *)(y[i].qs + 0), ni0); + _mm_storeu_si128((__m128i *)(y[i].qs + 16), ni4); +#endif + } +#elif defined(__riscv_v_intrinsic) + + size_t vl = __riscv_vsetvl_e32m4(QK8_0); + + for (int i = 0; i < nb; i++) { + // load elements + vfloat32m4_t v_x = __riscv_vle32_v_f32m4(x+i*QK8_0, vl); + + vfloat32m4_t vfabs = __riscv_vfabs_v_f32m4(v_x, vl); + vfloat32m1_t tmp = __riscv_vfmv_v_f_f32m1(0.0f, vl); + vfloat32m1_t vmax = __riscv_vfredmax_vs_f32m4_f32m1(vfabs, tmp, vl); + float amax = __riscv_vfmv_f_s_f32m1_f32(vmax); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = ggml_fp32_to_fp16(d); + + vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); + + // convert to integer + vint16m2_t vi = __riscv_vfncvt_x_f_w_i16m2(x0, vl); + vint8m1_t vs = __riscv_vncvt_x_x_w_i8m1(vi, vl); + + // store result + __riscv_vse8_v_i8m1(y[i].qs , vs, vl); + } +#else + // scalar + quantize_row_q8_0_reference(x, y, k); +#endif +} + +// reference implementation for deterministic creation of model files +void quantize_row_q8_1_reference(const float * restrict x, block_q8_1 * restrict y, int k) { + assert(QK8_1 == 32); + assert(k % QK8_1 == 0); + const int nb = k / QK8_1; + + for (int i = 0; i < nb; i++) { + float amax = 0.0f; // absolute max + + for (int j = 0; j < QK8_1; j++) { + const float v = x[i*QK8_1 + j]; + amax = MAX(amax, fabsf(v)); + } + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = d; + + int sum = 0; + + for (int j = 0; j < QK8_1/2; ++j) { + const float v0 = x[i*QK8_1 + j]*id; + const float v1 = x[i*QK8_1 + QK8_1/2 + j]*id; + + y[i].qs[ j] = roundf(v0); + y[i].qs[QK8_1/2 + j] = roundf(v1); + + sum += y[i].qs[ j]; + sum += y[i].qs[QK8_1/2 + j]; + } + + y[i].s = sum*d; + } +} + +void quantize_row_q8_1(const float * restrict x, void * restrict vy, int k) { + assert(k % QK8_1 == 0); + const int nb = k / QK8_1; + + block_q8_1 * restrict y = vy; + +#if defined(__ARM_NEON) + for (int i = 0; i < nb; i++) { + float32x4_t srcv [8]; + float32x4_t asrcv[8]; + float32x4_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = vld1q_f32(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = vmaxq_f32(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = vmaxq_f32(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = vmaxq_f32(amaxv[8*j], amaxv[8*j+4]); + + const float amax = vmaxvq_f32(amaxv[0]); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = d; + + int32x4_t accv = vdupq_n_s32(0); + + for (int j = 0; j < 8; j++) { + const float32x4_t v = vmulq_n_f32(srcv[j], id); + const int32x4_t vi = vcvtnq_s32_f32(v); + + y[i].qs[4*j + 0] = vgetq_lane_s32(vi, 0); + y[i].qs[4*j + 1] = vgetq_lane_s32(vi, 1); + y[i].qs[4*j + 2] = vgetq_lane_s32(vi, 2); + y[i].qs[4*j + 3] = vgetq_lane_s32(vi, 3); + + accv = vaddq_s32(accv, vi); + } + + y[i].s = d * vaddvq_s32(accv); + } +#elif defined(__wasm_simd128__) + for (int i = 0; i < nb; i++) { + v128_t srcv [8]; + v128_t asrcv[8]; + v128_t amaxv[8]; + + for (int j = 0; j < 8; j++) srcv[j] = wasm_v128_load(x + i*32 + 4*j); + for (int j = 0; j < 8; j++) asrcv[j] = wasm_f32x4_abs(srcv[j]); + + for (int j = 0; j < 4; j++) amaxv[2*j] = wasm_f32x4_max(asrcv[2*j], asrcv[2*j+1]); + for (int j = 0; j < 2; j++) amaxv[4*j] = wasm_f32x4_max(amaxv[4*j], amaxv[4*j+2]); + for (int j = 0; j < 1; j++) amaxv[8*j] = wasm_f32x4_max(amaxv[8*j], amaxv[8*j+4]); + + const float amax = MAX(MAX(wasm_f32x4_extract_lane(amaxv[0], 0), + wasm_f32x4_extract_lane(amaxv[0], 1)), + MAX(wasm_f32x4_extract_lane(amaxv[0], 2), + wasm_f32x4_extract_lane(amaxv[0], 3))); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = d; + + v128_t accv = wasm_i32x4_splat(0); + + for (int j = 0; j < 8; j++) { + const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); + const v128_t vi = wasm_i32x4_trunc_sat_f32x4(v); + + y[i].qs[4*j + 0] = wasm_i32x4_extract_lane(vi, 0); + y[i].qs[4*j + 1] = wasm_i32x4_extract_lane(vi, 1); + y[i].qs[4*j + 2] = wasm_i32x4_extract_lane(vi, 2); + y[i].qs[4*j + 3] = wasm_i32x4_extract_lane(vi, 3); + + accv = wasm_i32x4_add(accv, vi); + } + + y[i].s = d * (wasm_i32x4_extract_lane(accv, 0) + + wasm_i32x4_extract_lane(accv, 1) + + wasm_i32x4_extract_lane(accv, 2) + + wasm_i32x4_extract_lane(accv, 3)); + } +#elif defined(__AVX2__) || defined(__AVX__) + for (int i = 0; i < nb; i++) { + // Load elements into 4 AVX vectors + __m256 v0 = _mm256_loadu_ps( x ); + __m256 v1 = _mm256_loadu_ps( x + 8 ); + __m256 v2 = _mm256_loadu_ps( x + 16 ); + __m256 v3 = _mm256_loadu_ps( x + 24 ); + x += 32; + + // Compute max(abs(e)) for the block + const __m256 signBit = _mm256_set1_ps( -0.0f ); + __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); + maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); + + __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); + max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); + max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); + const float maxScalar = _mm_cvtss_f32( max4 ); + + // Quantize these floats + const float d = maxScalar / 127.f; + y[i].d = d; + const float id = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; + const __m256 mul = _mm256_set1_ps( id ); + + // Apply the multiplier + v0 = _mm256_mul_ps( v0, mul ); + v1 = _mm256_mul_ps( v1, mul ); + v2 = _mm256_mul_ps( v2, mul ); + v3 = _mm256_mul_ps( v3, mul ); + + // Round to nearest integer + v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); + v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); + v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); + v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); + + // Convert floats to integers + __m256i i0 = _mm256_cvtps_epi32( v0 ); + __m256i i1 = _mm256_cvtps_epi32( v1 ); + __m256i i2 = _mm256_cvtps_epi32( v2 ); + __m256i i3 = _mm256_cvtps_epi32( v3 ); + +#if defined(__AVX2__) + // Compute the sum of the quants and set y[i].s + y[i].s = d * hsum_i32_8(_mm256_add_epi32(_mm256_add_epi32(i0, i1), _mm256_add_epi32(i2, i3))); + + // Convert int32 to int16 + i0 = _mm256_packs_epi32( i0, i1 ); // 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 + i2 = _mm256_packs_epi32( i2, i3 ); // 16, 17, 18, 19, 24, 25, 26, 27, 20, 21, 22, 23, 28, 29, 30, 31 + // Convert int16 to int8 + i0 = _mm256_packs_epi16( i0, i2 ); // 0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19, 24, 25, 26, 27, 4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31 + + // We got our precious signed bytes, but the order is now wrong + // These AVX2 pack instructions process 16-byte pieces independently + // The following instruction is fixing the order + const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); + i0 = _mm256_permutevar8x32_epi32( i0, perm ); + + _mm256_storeu_si256((__m256i *)y[i].qs, i0); +#else + // Since we don't have in AVX some necessary functions, + // we split the registers in half and call AVX2 analogs from SSE + __m128i ni0 = _mm256_castsi256_si128( i0 ); + __m128i ni1 = _mm256_extractf128_si256( i0, 1); + __m128i ni2 = _mm256_castsi256_si128( i1 ); + __m128i ni3 = _mm256_extractf128_si256( i1, 1); + __m128i ni4 = _mm256_castsi256_si128( i2 ); + __m128i ni5 = _mm256_extractf128_si256( i2, 1); + __m128i ni6 = _mm256_castsi256_si128( i3 ); + __m128i ni7 = _mm256_extractf128_si256( i3, 1); + + // Compute the sum of the quants and set y[i].s + const __m128i s0 = _mm_add_epi32(_mm_add_epi32(ni0, ni1), _mm_add_epi32(ni2, ni3)); + const __m128i s1 = _mm_add_epi32(_mm_add_epi32(ni4, ni5), _mm_add_epi32(ni6, ni7)); + y[i].s = d * hsum_i32_4(_mm_add_epi32(s0, s1)); + + // Convert int32 to int16 + ni0 = _mm_packs_epi32( ni0, ni1 ); + ni2 = _mm_packs_epi32( ni2, ni3 ); + ni4 = _mm_packs_epi32( ni4, ni5 ); + ni6 = _mm_packs_epi32( ni6, ni7 ); + // Convert int16 to int8 + ni0 = _mm_packs_epi16( ni0, ni2 ); + ni4 = _mm_packs_epi16( ni4, ni6 ); + + _mm_storeu_si128((__m128i *)(y[i].qs + 0), ni0); + _mm_storeu_si128((__m128i *)(y[i].qs + 16), ni4); +#endif + } +#elif defined(__riscv_v_intrinsic) + + size_t vl = __riscv_vsetvl_e32m4(QK8_1); + + for (int i = 0; i < nb; i++) { + // load elements + vfloat32m4_t v_x = __riscv_vle32_v_f32m4(x+i*QK8_1, vl); + + vfloat32m4_t vfabs = __riscv_vfabs_v_f32m4(v_x, vl); + vfloat32m1_t tmp = __riscv_vfmv_v_f_f32m1(0.0, vl); + vfloat32m1_t vmax = __riscv_vfredmax_vs_f32m4_f32m1(vfabs, tmp, vl); + float amax = __riscv_vfmv_f_s_f32m1_f32(vmax); + + const float d = amax / ((1 << 7) - 1); + const float id = d ? 1.0f/d : 0.0f; + + y[i].d = d; + + vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); + + // convert to integer + vint16m2_t vi = __riscv_vfncvt_x_f_w_i16m2(x0, vl); + vint8m1_t vs = __riscv_vncvt_x_x_w_i8m1(vi, vl); + + // store result + __riscv_vse8_v_i8m1(y[i].qs , vs, vl); + + // compute sum for y[i].s + vint16m1_t tmp2 = __riscv_vmv_v_x_i16m1(0, vl); + vint16m1_t vwrs = __riscv_vwredsum_vs_i8m1_i16m1(vs, tmp2, vl); + + // set y[i].s + int sum = __riscv_vmv_x_s_i16m1_i16(vwrs); + y[i].s = sum*d; + } +#else + // scalar + quantize_row_q8_1_reference(x, y, k); +#endif +} + +void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k) { + static const int qk = QK4_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = ggml_fp16_to_fp32(x[i].d); + + for (int j = 0; j < qk/2; ++j) { + const int x0 = (x[i].qs[j] & 0x0F) - 8; + const int x1 = (x[i].qs[j] >> 4) - 8; + + y[i*qk + j + 0 ] = x0*d; + y[i*qk + j + qk/2] = x1*d; + } + } +} + +void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int k) { + static const int qk = QK4_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = ggml_fp16_to_fp32(x[i].d); + const float m = ggml_fp16_to_fp32(x[i].m); + + for (int j = 0; j < qk/2; ++j) { + const int x0 = (x[i].qs[j] & 0x0F); + const int x1 = (x[i].qs[j] >> 4); + + y[i*qk + j + 0 ] = x0*d + m; + y[i*qk + j + qk/2] = x1*d + m; + } + } +} + +void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int k) { + static const int qk = QK5_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = ggml_fp16_to_fp32(x[i].d); + + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; + const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; + + const int32_t x0 = ((x[i].qs[j] & 0x0F) | xh_0) - 16; + const int32_t x1 = ((x[i].qs[j] >> 4) | xh_1) - 16; + + y[i*qk + j + 0 ] = x0*d; + y[i*qk + j + qk/2] = x1*d; + } + } +} + +void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int k) { + static const int qk = QK5_1; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = ggml_fp16_to_fp32(x[i].d); + const float m = ggml_fp16_to_fp32(x[i].m); + + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; + const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; + + const int x0 = (x[i].qs[j] & 0x0F) | xh_0; + const int x1 = (x[i].qs[j] >> 4) | xh_1; + + y[i*qk + j + 0 ] = x0*d + m; + y[i*qk + j + qk/2] = x1*d + m; + } + } +} + +void dequantize_row_q8_0(const block_q8_0 * restrict x, float * restrict y, int k) { + static const int qk = QK8_0; + + assert(k % qk == 0); + + const int nb = k / qk; + + for (int i = 0; i < nb; i++) { + const float d = ggml_fp16_to_fp32(x[i].d); + + for (int j = 0; j < qk; ++j) { + y[i*qk + j] = x[i].qs[j]*d; + } + } +} + // // 2-6 bit quantization in super-blocks // @@ -1264,15 +2283,6 @@ void quantize_row_q8_K(const float * restrict x, void * restrict y, int k) { // #if __AVX__ || __AVX2__ || __AVX512F__ -// horizontally add 8 floats -static inline float hsum_float_8(const __m256 x) { - __m128 res = _mm256_extractf128_ps(x, 1); - res = _mm_add_ps(res, _mm256_castps256_ps128(x)); - res = _mm_add_ps(res, _mm_movehl_ps(res, res)); - res = _mm_add_ss(res, _mm_movehdup_ps(res)); - return _mm_cvtss_f32(res); -} - // shuffles to pick the required scales in dot products static inline __m256i get_scale_shuffle_q3k(int i) { static const uint8_t k_shuffle[128] = { @@ -1311,6 +2321,1224 @@ static inline __m128i get_scale_shuffle(int i) { } #endif +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + const int qk = QK8_0; + const int nb = n / qk; + + assert(n % qk == 0); + + const block_q4_0 * restrict x = vx; + const block_q8_0 * restrict y = vy; + +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + assert(nb % 2 == 0); // TODO: handle odd nb + + for (int i = 0; i < nb; i += 2) { + const block_q4_0 * restrict x0 = &x[i + 0]; + const block_q4_0 * restrict x1 = &x[i + 1]; + const block_q8_0 * restrict y0 = &y[i + 0]; + const block_q8_0 * restrict y1 = &y[i + 1]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + const int8x16_t s8b = vdupq_n_s8(0x8); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // sub 8 + const int8x16_t v0_0ls = vsubq_s8(v0_0l, s8b); + const int8x16_t v0_0hs = vsubq_s8(v0_0h, s8b); + const int8x16_t v0_1ls = vsubq_s8(v0_1l, s8b); + const int8x16_t v0_1hs = vsubq_s8(v0_1h, s8b); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + +#if defined(__ARM_FEATURE_DOTPROD) + // dot product into int32x4_t + const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); + const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); +#else + const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0ls), vget_low_s8 (v1_0l)); + const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0ls), vget_high_s8(v1_0l)); + const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hs), vget_low_s8 (v1_0h)); + const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hs), vget_high_s8(v1_0h)); + + const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1ls), vget_low_s8 (v1_1l)); + const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1ls), vget_high_s8(v1_1l)); + const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hs), vget_low_s8 (v1_1h)); + const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hs), vget_high_s8(v1_1h)); + + const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); + const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); + const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); + const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); +#endif + } + + *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); +#elif defined(__AVX2__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (int i = 0; i < nb; ++i) { + /* Compute combined scale for the block */ + const __m256 d = _mm256_set1_ps( ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d) ); + + __m256i bx = bytes_from_nibbles_32(x[i].qs); + + // Now we have a vector with bytes in [ 0 .. 15 ] interval. Offset them into [ -8 .. +7 ] interval. + const __m256i off = _mm256_set1_epi8( 8 ); + bx = _mm256_sub_epi8( bx, off ); + + __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + + const __m256 q = mul_sum_i8_pairs_float(bx, by); + + /* Multiply q with scale and accumulate */ + acc = _mm256_fmadd_ps( d, q, acc ); + } + + *s = hsum_float_8(acc); +#elif defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (int i = 0; i < nb; ++i) { + // Compute combined scale for the block + const __m256 d = _mm256_set1_ps( ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d) ); + + const __m128i lowMask = _mm_set1_epi8(0xF); + const __m128i off = _mm_set1_epi8(8); + + const __m128i tmp = _mm_loadu_si128((const __m128i *)x[i].qs); + + __m128i bx = _mm_and_si128(lowMask, tmp); + __m128i by = _mm_loadu_si128((const __m128i *)y[i].qs); + bx = _mm_sub_epi8(bx, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx, by); + + bx = _mm_and_si128(lowMask, _mm_srli_epi64(tmp, 4)); + by = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); + bx = _mm_sub_epi8(bx, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx, by); + + // Convert int32_t to float + __m256 p = _mm256_cvtepi32_ps(MM256_SET_M128I(i32_0, i32_1)); + + // Apply the scale, and accumulate + acc = _mm256_add_ps(_mm256_mul_ps( d, p ), acc); + } + + *s = hsum_float_8(acc); +#elif defined(__SSSE3__) + // set constants + const __m128i lowMask = _mm_set1_epi8(0xF); + const __m128i off = _mm_set1_epi8(8); + + // Initialize accumulator with zeros + __m128 acc_0 = _mm_setzero_ps(); + __m128 acc_1 = _mm_setzero_ps(); + __m128 acc_2 = _mm_setzero_ps(); + __m128 acc_3 = _mm_setzero_ps(); + + // First round without accumulation + { + _mm_prefetch(&x[0] + sizeof(block_q4_0), _MM_HINT_T0); + _mm_prefetch(&y[0] + sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 0 and 1 + const __m128 d_0_1 = _mm_set1_ps( ggml_fp16_to_fp32(x[0].d) * ggml_fp16_to_fp32(y[0].d) ); + + const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[0].qs); + + __m128i bx_0 = _mm_and_si128(lowMask, tmp_0_1); + __m128i by_0 = _mm_loadu_si128((const __m128i *)y[0].qs); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); + + __m128i bx_1 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_0_1, 4)); + __m128i by_1 = _mm_loadu_si128((const __m128i *)(y[0].qs + 16)); + bx_1 = _mm_sub_epi8(bx_1, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx_1, by_1); + + _mm_prefetch(&x[1] + sizeof(block_q4_0), _MM_HINT_T0); + _mm_prefetch(&y[1] + sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 2 and 3 + const __m128 d_2_3 = _mm_set1_ps( ggml_fp16_to_fp32(x[1].d) * ggml_fp16_to_fp32(y[1].d) ); + + const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[1].qs); + + __m128i bx_2 = _mm_and_si128(lowMask, tmp_2_3); + __m128i by_2 = _mm_loadu_si128((const __m128i *)y[1].qs); + bx_2 = _mm_sub_epi8(bx_2, off); + const __m128i i32_2 = mul_sum_i8_pairs(bx_2, by_2); + + __m128i bx_3 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_2_3, 4)); + __m128i by_3 = _mm_loadu_si128((const __m128i *)(y[1].qs + 16)); + bx_3 = _mm_sub_epi8(bx_3, off); + const __m128i i32_3 = mul_sum_i8_pairs(bx_3, by_3); + + // Convert int32_t to float + __m128 p0 = _mm_cvtepi32_ps(i32_0); + __m128 p1 = _mm_cvtepi32_ps(i32_1); + __m128 p2 = _mm_cvtepi32_ps(i32_2); + __m128 p3 = _mm_cvtepi32_ps(i32_3); + + // Apply the scale + acc_0 = _mm_mul_ps( d_0_1, p0 ); + acc_1 = _mm_mul_ps( d_0_1, p1 ); + acc_2 = _mm_mul_ps( d_2_3, p2 ); + acc_3 = _mm_mul_ps( d_2_3, p3 ); + } + + assert(nb % 2 == 0); // TODO: handle odd nb + + // Main loop + for (int i = 2; i < nb; i+=2) { + _mm_prefetch(&x[i] + sizeof(block_q4_0), _MM_HINT_T0); + _mm_prefetch(&y[i] + sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 0 and 1 + const __m128 d_0_1 = _mm_set1_ps( ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d) ); + + const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[i].qs); + + __m128i bx_0 = _mm_and_si128(lowMask, tmp_0_1); + __m128i by_0 = _mm_loadu_si128((const __m128i *)y[i].qs); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); + + __m128i bx_1 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_0_1, 4)); + __m128i by_1 = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); + bx_1 = _mm_sub_epi8(bx_1, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx_1, by_1); + + _mm_prefetch(&x[i] + 2 * sizeof(block_q4_0), _MM_HINT_T0); + _mm_prefetch(&y[i] + 2 * sizeof(block_q8_0), _MM_HINT_T0); + + // Compute combined scale for the block 2 and 3 + const __m128 d_2_3 = _mm_set1_ps( ggml_fp16_to_fp32(x[i + 1].d) * ggml_fp16_to_fp32(y[i + 1].d) ); + + const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[i + 1].qs); + + __m128i bx_2 = _mm_and_si128(lowMask, tmp_2_3); + __m128i by_2 = _mm_loadu_si128((const __m128i *)y[i + 1].qs); + bx_2 = _mm_sub_epi8(bx_2, off); + const __m128i i32_2 = mul_sum_i8_pairs(bx_2, by_2); + + __m128i bx_3 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_2_3, 4)); + __m128i by_3 = _mm_loadu_si128((const __m128i *)(y[i + 1].qs + 16)); + bx_3 = _mm_sub_epi8(bx_3, off); + const __m128i i32_3 = mul_sum_i8_pairs(bx_3, by_3); + + // Convert int32_t to float + __m128 p0 = _mm_cvtepi32_ps(i32_0); + __m128 p1 = _mm_cvtepi32_ps(i32_1); + __m128 p2 = _mm_cvtepi32_ps(i32_2); + __m128 p3 = _mm_cvtepi32_ps(i32_3); + + // Apply the scale + __m128 p0_d = _mm_mul_ps( d_0_1, p0 ); + __m128 p1_d = _mm_mul_ps( d_0_1, p1 ); + __m128 p2_d = _mm_mul_ps( d_2_3, p2 ); + __m128 p3_d = _mm_mul_ps( d_2_3, p3 ); + + // Acummulate + acc_0 = _mm_add_ps(p0_d, acc_0); + acc_1 = _mm_add_ps(p1_d, acc_1); + acc_2 = _mm_add_ps(p2_d, acc_2); + acc_3 = _mm_add_ps(p3_d, acc_3); + } + + *s = hsum_float_4x4(acc_0, acc_1, acc_2, acc_3); +#elif defined(__riscv_v_intrinsic) + float sumf = 0.0; + + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + for (int i = 0; i < nb; i++) { + // load elements + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); + + // mask and store lower part of x, and then upper part + vuint8mf2_t x_a = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_l = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vint8mf2_t x_ai = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t x_li = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + // subtract offset + vint8mf2_t v0 = __riscv_vsub_vx_i8mf2(x_ai, 8, vl); + vint8mf2_t v1 = __riscv_vsub_vx_i8mf2(x_li, 8, vl); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += sumi*ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d); + } + + *s = sumf; +#else + // scalar + float sumf = 0.0; + + for (int i = 0; i < nb; i++) { + int sumi = 0; + + for (int j = 0; j < qk/2; ++j) { + const int v0 = (x[i].qs[j] & 0x0F) - 8; + const int v1 = (x[i].qs[j] >> 4) - 8; + + sumi += (v0 * y[i].qs[j]) + (v1 * y[i].qs[j + qk/2]); + } + + sumf += sumi*ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d); + } + + *s = sumf; +#endif +} + +void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + const int qk = QK8_1; + const int nb = n / qk; + + assert(n % qk == 0); + + const block_q4_1 * restrict x = vx; + const block_q8_1 * restrict y = vy; + + // TODO: add WASM SIMD +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + float summs = 0; + + assert(nb % 2 == 0); // TODO: handle odd nb + + for (int i = 0; i < nb; i += 2) { + const block_q4_1 * restrict x0 = &x[i + 0]; + const block_q4_1 * restrict x1 = &x[i + 1]; + const block_q8_1 * restrict y0 = &y[i + 0]; + const block_q8_1 * restrict y1 = &y[i + 1]; + + summs += ggml_fp16_to_fp32(x0->m) * y0->s + ggml_fp16_to_fp32(x1->m) * y1->s; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + +#if defined(__ARM_FEATURE_DOTPROD) + // dot product into int32x4_t + const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); + const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), ggml_fp16_to_fp32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), ggml_fp16_to_fp32(x1->d)*y1->d); +#else + const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0l), vget_low_s8 (v1_0l)); + const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0l), vget_high_s8(v1_0l)); + const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0h), vget_low_s8 (v1_0h)); + const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0h), vget_high_s8(v1_0h)); + + const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1l), vget_low_s8 (v1_1l)); + const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1l), vget_high_s8(v1_1l)); + const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1h), vget_low_s8 (v1_1h)); + const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1h), vget_high_s8(v1_1h)); + + const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); + const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); + const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); + const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*y1->d); +#endif + } + + *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs; +#elif defined(__AVX2__) || defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + float summs = 0; + + // Main loop + for (int i = 0; i < nb; ++i) { + const float d0 = ggml_fp16_to_fp32(x[i].d); + const float d1 = y[i].d; + + summs += ggml_fp16_to_fp32(x[i].m) * y[i].s; + + const __m256 d0v = _mm256_set1_ps( d0 ); + const __m256 d1v = _mm256_set1_ps( d1 ); + + // Compute combined scales + const __m256 d0d1 = _mm256_mul_ps( d0v, d1v ); + + // Load 16 bytes, and unpack 4 bit fields into bytes, making 32 bytes + const __m256i bx = bytes_from_nibbles_32(x[i].qs); + const __m256i by = _mm256_loadu_si256( (const __m256i *)y[i].qs ); + + const __m256 xy = mul_sum_us8_pairs_float(bx, by); + + // Accumulate d0*d1*x*y +#if defined(__AVX2__) + acc = _mm256_fmadd_ps( d0d1, xy, acc ); +#else + acc = _mm256_add_ps( _mm256_mul_ps( d0d1, xy ), acc ); +#endif + } + + *s = hsum_float_8(acc) + summs; +#elif defined(__riscv_v_intrinsic) + float sumf = 0.0; + + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + for (int i = 0; i < nb; i++) { + // load elements + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); + + // mask and store lower part of x, and then upper part + vuint8mf2_t x_a = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_l = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vint8mf2_t v0 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t v1 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + } + + *s = sumf; +#else + // scalar + float sumf = 0.0; + + for (int i = 0; i < nb; i++) { + int sumi = 0; + + for (int j = 0; j < qk/2; ++j) { + const int v0 = (x[i].qs[j] & 0x0F); + const int v1 = (x[i].qs[j] >> 4); + + sumi += (v0 * y[i].qs[j]) + (v1 * y[i].qs[j + qk/2]); + } + + sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + } + + *s = sumf; +#endif +} + +void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + const int qk = QK8_0; + const int nb = n / qk; + + assert(n % qk == 0); + assert(qk == QK5_0); + + const block_q5_0 * restrict x = vx; + const block_q8_0 * restrict y = vy; + +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + uint32_t qh0; + uint32_t qh1; + + uint64_t tmp0[4]; + uint64_t tmp1[4]; + + assert(nb % 2 == 0); // TODO: handle odd nb + + for (int i = 0; i < nb; i += 2) { + const block_q5_0 * restrict x0 = &x[i]; + const block_q5_0 * restrict x1 = &x[i + 1]; + const block_q8_0 * restrict y0 = &y[i]; + const block_q8_0 * restrict y1 = &y[i + 1]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + // extract the 5th bit via lookup table ((!b) << 4) + memcpy(&qh0, x0->qh, sizeof(qh0)); + memcpy(&qh1, x1->qh, sizeof(qh1)); + + tmp0[0] = table_b2b_1[(qh0 >> 0) & 0xFF]; + tmp0[1] = table_b2b_1[(qh0 >> 8) & 0xFF]; + tmp0[2] = table_b2b_1[(qh0 >> 16) & 0xFF]; + tmp0[3] = table_b2b_1[(qh0 >> 24) ]; + + tmp1[0] = table_b2b_1[(qh1 >> 0) & 0xFF]; + tmp1[1] = table_b2b_1[(qh1 >> 8) & 0xFF]; + tmp1[2] = table_b2b_1[(qh1 >> 16) & 0xFF]; + tmp1[3] = table_b2b_1[(qh1 >> 24) ]; + + const int8x16_t qhl0 = vld1q_s8((const int8_t *)(tmp0 + 0)); + const int8x16_t qhh0 = vld1q_s8((const int8_t *)(tmp0 + 2)); + const int8x16_t qhl1 = vld1q_s8((const int8_t *)(tmp1 + 0)); + const int8x16_t qhh1 = vld1q_s8((const int8_t *)(tmp1 + 2)); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // add high bit and sub 16 (equivalent to sub 0x10 when bit is zero) + const int8x16_t v0_0lf = vsubq_s8(v0_0l, qhl0); + const int8x16_t v0_0hf = vsubq_s8(v0_0h, qhh0); + const int8x16_t v0_1lf = vsubq_s8(v0_1l, qhl1); + const int8x16_t v0_1hf = vsubq_s8(v0_1h, qhh1); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + +#if defined(__ARM_FEATURE_DOTPROD) + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( + vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( + vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); +#else + const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); + const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); + const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); + const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); + + const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); + const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); + const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); + const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); + + const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); + const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); + const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); + const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); +#endif + } + + *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); +#elif defined(__wasm_simd128__) + v128_t sumv = wasm_f32x4_splat(0.0f); + + uint32_t qh; + uint64_t tmp[4]; + + // TODO: check if unrolling this is better + for (int i = 0; i < nb; ++i) { + const block_q5_0 * restrict x0 = &x[i]; + const block_q8_0 * restrict y0 = &y[i]; + + const v128_t m4b = wasm_i8x16_splat(0x0F); + + // extract the 5th bit + memcpy(&qh, x0->qh, sizeof(qh)); + + tmp[0] = table_b2b_1[(qh >> 0) & 0xFF]; + tmp[1] = table_b2b_1[(qh >> 8) & 0xFF]; + tmp[2] = table_b2b_1[(qh >> 16) & 0xFF]; + tmp[3] = table_b2b_1[(qh >> 24) ]; + + const v128_t qhl = wasm_v128_load(tmp + 0); + const v128_t qhh = wasm_v128_load(tmp + 2); + + const v128_t v0 = wasm_v128_load(x0->qs); + + // 4-bit -> 8-bit + const v128_t v0l = wasm_v128_and (v0, m4b); + const v128_t v0h = wasm_u8x16_shr(v0, 4); + + // add high bit and sub 16 (equivalent to sub 0x10 when bit is zero) + const v128_t v0lf = wasm_i8x16_sub(v0l, qhl); + const v128_t v0hf = wasm_i8x16_sub(v0h, qhh); + + // load y + const v128_t v1l = wasm_v128_load(y0->qs); + const v128_t v1h = wasm_v128_load(y0->qs + 16); + + // int8x16 -> int16x8 + const v128_t v0lfl = wasm_i16x8_extend_low_i8x16 (v0lf); + const v128_t v0lfh = wasm_i16x8_extend_high_i8x16(v0lf); + const v128_t v0hfl = wasm_i16x8_extend_low_i8x16 (v0hf); + const v128_t v0hfh = wasm_i16x8_extend_high_i8x16(v0hf); + + const v128_t v1ll = wasm_i16x8_extend_low_i8x16 (v1l); + const v128_t v1lh = wasm_i16x8_extend_high_i8x16(v1l); + const v128_t v1hl = wasm_i16x8_extend_low_i8x16 (v1h); + const v128_t v1hh = wasm_i16x8_extend_high_i8x16(v1h); + + // dot product + sumv = wasm_f32x4_add(sumv, wasm_f32x4_mul(wasm_f32x4_convert_i32x4( + wasm_i32x4_add( + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0lfl, v1ll), + wasm_i32x4_dot_i16x8(v0lfh, v1lh)), + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), + wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), + wasm_f32x4_splat(ggml_fp16_to_fp32(x0->d) * ggml_fp16_to_fp32(y0->d)))); + } + + *s = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + + wasm_f32x4_extract_lane(sumv, 2) + wasm_f32x4_extract_lane(sumv, 3); +#elif defined(__AVX2__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (int i = 0; i < nb; i++) { + /* Compute combined scale for the block */ + const __m256 d = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d)); + + __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i bxhi = bytes_from_bits_32(x[i].qh); + bxhi = _mm256_andnot_si256(bxhi, _mm256_set1_epi8((char)0xF0)); + bx = _mm256_or_si256(bx, bxhi); + + __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + + const __m256 q = mul_sum_i8_pairs_float(bx, by); + + /* Multiply q with scale and accumulate */ + acc = _mm256_fmadd_ps(d, q, acc); + } + + *s = hsum_float_8(acc); +#elif defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + __m128i mask = _mm_set1_epi8((char)0xF0); + + // Main loop + for (int i = 0; i < nb; i++) { + /* Compute combined scale for the block */ + const __m256 d = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d)); + + __m256i bx = bytes_from_nibbles_32(x[i].qs); + const __m256i bxhi = bytes_from_bits_32(x[i].qh); + __m128i bxhil = _mm256_castsi256_si128(bxhi); + __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); + bxhil = _mm_andnot_si128(bxhil, mask); + bxhih = _mm_andnot_si128(bxhih, mask); + __m128i bxl = _mm256_castsi256_si128(bx); + __m128i bxh = _mm256_extractf128_si256(bx, 1); + bxl = _mm_or_si128(bxl, bxhil); + bxh = _mm_or_si128(bxh, bxhih); + bx = MM256_SET_M128I(bxh, bxl); + + const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + + const __m256 q = mul_sum_i8_pairs_float(bx, by); + + /* Multiply q with scale and accumulate */ + acc = _mm256_add_ps(_mm256_mul_ps(d, q), acc); + } + + *s = hsum_float_8(acc); +#elif defined(__riscv_v_intrinsic) + float sumf = 0.0; + + uint32_t qh; + + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + // These tempory registers are for masking and shift operations + vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); + vuint32m2_t vt_2 = __riscv_vsll_vv_u32m2(__riscv_vmv_v_x_u32m2(1, vl), vt_1, vl); + + vuint32m2_t vt_3 = __riscv_vsll_vx_u32m2(vt_2, 16, vl); + vuint32m2_t vt_4 = __riscv_vadd_vx_u32m2(vt_1, 12, vl); + + for (int i = 0; i < nb; i++) { + memcpy(&qh, x[i].qh, sizeof(uint32_t)); + + // ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; + vuint32m2_t xha_0 = __riscv_vand_vx_u32m2(vt_2, qh, vl); + vuint32m2_t xhr_0 = __riscv_vsrl_vv_u32m2(xha_0, vt_1, vl); + vuint32m2_t xhl_0 = __riscv_vsll_vx_u32m2(xhr_0, 4, vl); + + // ((qh & (1u << (j + 16))) >> (j + 12)); + vuint32m2_t xha_1 = __riscv_vand_vx_u32m2(vt_3, qh, vl); + vuint32m2_t xhl_1 = __riscv_vsrl_vv_u32m2(xha_1, vt_4, vl); + + // narrowing + vuint16m1_t xhc_0 = __riscv_vncvt_x_x_w_u16m1(xhl_0, vl); + vuint8mf2_t xh_0 = __riscv_vncvt_x_x_w_u8mf2(xhc_0, vl); + + vuint16m1_t xhc_1 = __riscv_vncvt_x_x_w_u16m1(xhl_1, vl); + vuint8mf2_t xh_1 = __riscv_vncvt_x_x_w_u8mf2(xhc_1, vl); + + // load + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); + + vuint8mf2_t x_at = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_lt = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vuint8mf2_t x_a = __riscv_vor_vv_u8mf2(x_at, xh_0, vl); + vuint8mf2_t x_l = __riscv_vor_vv_u8mf2(x_lt, xh_1, vl); + + vint8mf2_t x_ai = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t x_li = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + vint8mf2_t v0 = __riscv_vsub_vx_i8mf2(x_ai, 16, vl); + vint8mf2_t v1 = __riscv_vsub_vx_i8mf2(x_li, 16, vl); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += (ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)) * sumi; + } + + *s = sumf; +#else + // scalar + float sumf = 0.0; + + for (int i = 0; i < nb; i++) { + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); + + int sumi = 0; + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; + const uint8_t xh_1 = ((qh & (1u << (j + 16))) >> (j + 12)); + + const int32_t x0 = ((x[i].qs[j] & 0x0F) | xh_0) - 16; + const int32_t x1 = ((x[i].qs[j] >> 4) | xh_1) - 16; + + sumi += (x0 * y[i].qs[j]) + (x1 * y[i].qs[j + qk/2]); + } + + sumf += (ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)) * sumi; + } + + *s = sumf; +#endif +} + +void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + const int qk = QK8_1; + const int nb = n / qk; + + assert(n % qk == 0); + assert(qk == QK5_1); + + const block_q5_1 * restrict x = vx; + const block_q8_1 * restrict y = vy; + +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + float summs0 = 0.0f; + float summs1 = 0.0f; + + uint32_t qh0; + uint32_t qh1; + + uint64_t tmp0[4]; + uint64_t tmp1[4]; + + assert(nb % 2 == 0); // TODO: handle odd nb + + for (int i = 0; i < nb; i += 2) { + const block_q5_1 * restrict x0 = &x[i]; + const block_q5_1 * restrict x1 = &x[i + 1]; + const block_q8_1 * restrict y0 = &y[i]; + const block_q8_1 * restrict y1 = &y[i + 1]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + summs0 += ggml_fp16_to_fp32(x0->m) * y0->s; + summs1 += ggml_fp16_to_fp32(x1->m) * y1->s; + + // extract the 5th bit via lookup table ((b) << 4) + memcpy(&qh0, x0->qh, sizeof(qh0)); + memcpy(&qh1, x1->qh, sizeof(qh1)); + + tmp0[0] = table_b2b_0[(qh0 >> 0) & 0xFF]; + tmp0[1] = table_b2b_0[(qh0 >> 8) & 0xFF]; + tmp0[2] = table_b2b_0[(qh0 >> 16) & 0xFF]; + tmp0[3] = table_b2b_0[(qh0 >> 24) ]; + + tmp1[0] = table_b2b_0[(qh1 >> 0) & 0xFF]; + tmp1[1] = table_b2b_0[(qh1 >> 8) & 0xFF]; + tmp1[2] = table_b2b_0[(qh1 >> 16) & 0xFF]; + tmp1[3] = table_b2b_0[(qh1 >> 24) ]; + + const int8x16_t qhl0 = vld1q_s8((const int8_t *)(tmp0 + 0)); + const int8x16_t qhh0 = vld1q_s8((const int8_t *)(tmp0 + 2)); + const int8x16_t qhl1 = vld1q_s8((const int8_t *)(tmp1 + 0)); + const int8x16_t qhh1 = vld1q_s8((const int8_t *)(tmp1 + 2)); + + const uint8x16_t v0_0 = vld1q_u8(x0->qs); + const uint8x16_t v0_1 = vld1q_u8(x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // add high bit + const int8x16_t v0_0lf = vorrq_s8(v0_0l, qhl0); + const int8x16_t v0_0hf = vorrq_s8(v0_0h, qhh0); + const int8x16_t v0_1lf = vorrq_s8(v0_1l, qhl1); + const int8x16_t v0_1hf = vorrq_s8(v0_1h, qhh1); + + // load y + const int8x16_t v1_0l = vld1q_s8(y0->qs); + const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); + const int8x16_t v1_1l = vld1q_s8(y1->qs); + const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); + +#if defined(__ARM_FEATURE_DOTPROD) + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( + vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), ggml_fp16_to_fp32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( + vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), ggml_fp16_to_fp32(x1->d)*y1->d); +#else + const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); + const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); + const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); + const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); + + const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); + const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); + const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); + const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); + + const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); + const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); + const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); + const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*y1->d); +#endif + } + + *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; +#elif defined(__wasm_simd128__) + v128_t sumv = wasm_f32x4_splat(0.0f); + + float summs = 0.0f; + + uint32_t qh; + uint64_t tmp[4]; + + // TODO: check if unrolling this is better + for (int i = 0; i < nb; ++i) { + const block_q5_1 * restrict x0 = &x[i]; + const block_q8_1 * restrict y0 = &y[i]; + + summs += ggml_fp16_to_fp32(x0->m) * y0->s; + + const v128_t m4b = wasm_i8x16_splat(0x0F); + + // extract the 5th bit + memcpy(&qh, x0->qh, sizeof(qh)); + + tmp[0] = table_b2b_0[(qh >> 0) & 0xFF]; + tmp[1] = table_b2b_0[(qh >> 8) & 0xFF]; + tmp[2] = table_b2b_0[(qh >> 16) & 0xFF]; + tmp[3] = table_b2b_0[(qh >> 24) ]; + + const v128_t qhl = wasm_v128_load(tmp + 0); + const v128_t qhh = wasm_v128_load(tmp + 2); + + const v128_t v0 = wasm_v128_load(x0->qs); + + // 4-bit -> 8-bit + const v128_t v0l = wasm_v128_and (v0, m4b); + const v128_t v0h = wasm_u8x16_shr(v0, 4); + + // add high bit + const v128_t v0lf = wasm_v128_or(v0l, qhl); + const v128_t v0hf = wasm_v128_or(v0h, qhh); + + // load y + const v128_t v1l = wasm_v128_load(y0->qs); + const v128_t v1h = wasm_v128_load(y0->qs + 16); + + // int8x16 -> int16x8 + const v128_t v0lfl = wasm_i16x8_extend_low_i8x16 (v0lf); + const v128_t v0lfh = wasm_i16x8_extend_high_i8x16(v0lf); + const v128_t v0hfl = wasm_i16x8_extend_low_i8x16 (v0hf); + const v128_t v0hfh = wasm_i16x8_extend_high_i8x16(v0hf); + + const v128_t v1ll = wasm_i16x8_extend_low_i8x16 (v1l); + const v128_t v1lh = wasm_i16x8_extend_high_i8x16(v1l); + const v128_t v1hl = wasm_i16x8_extend_low_i8x16 (v1h); + const v128_t v1hh = wasm_i16x8_extend_high_i8x16(v1h); + + // dot product + sumv = wasm_f32x4_add(sumv, + wasm_f32x4_mul(wasm_f32x4_convert_i32x4(wasm_i32x4_add( + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0lfl, v1ll), + wasm_i32x4_dot_i16x8(v0lfh, v1lh)), + wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), + wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), + wasm_f32x4_splat(ggml_fp16_to_fp32(x0->d) * y0->d))); + } + + *s = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + + wasm_f32x4_extract_lane(sumv, 2) + wasm_f32x4_extract_lane(sumv, 3) + summs; +#elif defined(__AVX2__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + float summs = 0.0f; + + // Main loop + for (int i = 0; i < nb; i++) { + const __m256 dx = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d)); + + summs += ggml_fp16_to_fp32(x[i].m) * y[i].s; + + __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i bxhi = bytes_from_bits_32(x[i].qh); + bxhi = _mm256_and_si256(bxhi, _mm256_set1_epi8(0x10)); + bx = _mm256_or_si256(bx, bxhi); + + const __m256 dy = _mm256_set1_ps(y[i].d); + const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + + const __m256 q = mul_sum_us8_pairs_float(bx, by); + + acc = _mm256_fmadd_ps(q, _mm256_mul_ps(dx, dy), acc); + } + + *s = hsum_float_8(acc) + summs; +#elif defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + __m128i mask = _mm_set1_epi8(0x10); + + float summs = 0.0f; + + // Main loop + for (int i = 0; i < nb; i++) { + const __m256 dx = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d)); + + summs += ggml_fp16_to_fp32(x[i].m) * y[i].s; + + __m256i bx = bytes_from_nibbles_32(x[i].qs); + const __m256i bxhi = bytes_from_bits_32(x[i].qh); + __m128i bxhil = _mm256_castsi256_si128(bxhi); + __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); + bxhil = _mm_and_si128(bxhil, mask); + bxhih = _mm_and_si128(bxhih, mask); + __m128i bxl = _mm256_castsi256_si128(bx); + __m128i bxh = _mm256_extractf128_si256(bx, 1); + bxl = _mm_or_si128(bxl, bxhil); + bxh = _mm_or_si128(bxh, bxhih); + bx = MM256_SET_M128I(bxh, bxl); + + const __m256 dy = _mm256_set1_ps(y[i].d); + const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + + const __m256 q = mul_sum_us8_pairs_float(bx, by); + + acc = _mm256_add_ps(_mm256_mul_ps(q, _mm256_mul_ps(dx, dy)), acc); + } + + *s = hsum_float_8(acc) + summs; +#elif defined(__riscv_v_intrinsic) + float sumf = 0.0; + + uint32_t qh; + + size_t vl = __riscv_vsetvl_e8m1(qk/2); + + // temporary registers for shift operations + vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); + vuint32m2_t vt_2 = __riscv_vadd_vx_u32m2(vt_1, 12, vl); + + for (int i = 0; i < nb; i++) { + memcpy(&qh, x[i].qh, sizeof(uint32_t)); + + // load qh + vuint32m2_t vqh = __riscv_vmv_v_x_u32m2(qh, vl); + + // ((qh >> (j + 0)) << 4) & 0x10; + vuint32m2_t xhr_0 = __riscv_vsrl_vv_u32m2(vqh, vt_1, vl); + vuint32m2_t xhl_0 = __riscv_vsll_vx_u32m2(xhr_0, 4, vl); + vuint32m2_t xha_0 = __riscv_vand_vx_u32m2(xhl_0, 0x10, vl); + + // ((qh >> (j + 12)) ) & 0x10; + vuint32m2_t xhr_1 = __riscv_vsrl_vv_u32m2(vqh, vt_2, vl); + vuint32m2_t xha_1 = __riscv_vand_vx_u32m2(xhr_1, 0x10, vl); + + // narrowing + vuint16m1_t xhc_0 = __riscv_vncvt_x_x_w_u16m1(xha_0, vl); + vuint8mf2_t xh_0 = __riscv_vncvt_x_x_w_u8mf2(xhc_0, vl); + + vuint16m1_t xhc_1 = __riscv_vncvt_x_x_w_u16m1(xha_1, vl); + vuint8mf2_t xh_1 = __riscv_vncvt_x_x_w_u8mf2(xhc_1, vl); + + // load + vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); + + vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); + vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); + + vuint8mf2_t x_at = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); + vuint8mf2_t x_lt = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); + + vuint8mf2_t x_a = __riscv_vor_vv_u8mf2(x_at, xh_0, vl); + vuint8mf2_t x_l = __riscv_vor_vv_u8mf2(x_lt, xh_1, vl); + + vint8mf2_t v0 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); + vint8mf2_t v1 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); + + vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); + vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); + + vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); + + vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); + vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); + + sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + } + + *s = sumf; +#else + // scalar + float sumf = 0.0; + + for (int i = 0; i < nb; i++) { + uint32_t qh; + memcpy(&qh, x[i].qh, sizeof(qh)); + + int sumi = 0; + + for (int j = 0; j < qk/2; ++j) { + const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; + const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; + + const int32_t x0 = (x[i].qs[j] & 0xF) | xh_0; + const int32_t x1 = (x[i].qs[j] >> 4) | xh_1; + + sumi += (x0 * y[i].qs[j]) + (x1 * y[i].qs[j + qk/2]); + } + + sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + } + + *s = sumf; +#endif +} + +void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + const int qk = QK8_0; + const int nb = n / qk; + + assert(n % qk == 0); + + const block_q8_0 * restrict x = vx; + const block_q8_0 * restrict y = vy; + +#if defined(__ARM_NEON) + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t sumv1 = vdupq_n_f32(0.0f); + + assert(nb % 2 == 0); // TODO: handle odd nb + + for (int i = 0; i < nb; i += 2) { + const block_q8_0 * restrict x0 = &x[i + 0]; + const block_q8_0 * restrict x1 = &x[i + 1]; + const block_q8_0 * restrict y0 = &y[i + 0]; + const block_q8_0 * restrict y1 = &y[i + 1]; + + const int8x16_t x0_0 = vld1q_s8(x0->qs); + const int8x16_t x0_1 = vld1q_s8(x0->qs + 16); + const int8x16_t x1_0 = vld1q_s8(x1->qs); + const int8x16_t x1_1 = vld1q_s8(x1->qs + 16); + + // load y + const int8x16_t y0_0 = vld1q_s8(y0->qs); + const int8x16_t y0_1 = vld1q_s8(y0->qs + 16); + const int8x16_t y1_0 = vld1q_s8(y1->qs); + const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); + +#if defined(__ARM_FEATURE_DOTPROD) + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( + vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), + vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( + vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), + vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + +#else + const int16x8_t p0_0 = vmull_s8(vget_low_s8 (x0_0), vget_low_s8 (y0_0)); + const int16x8_t p0_1 = vmull_s8(vget_high_s8(x0_0), vget_high_s8(y0_0)); + const int16x8_t p0_2 = vmull_s8(vget_low_s8 (x0_1), vget_low_s8 (y0_1)); + const int16x8_t p0_3 = vmull_s8(vget_high_s8(x0_1), vget_high_s8(y0_1)); + + const int16x8_t p1_0 = vmull_s8(vget_low_s8 (x1_0), vget_low_s8 (y1_0)); + const int16x8_t p1_1 = vmull_s8(vget_high_s8(x1_0), vget_high_s8(y1_0)); + const int16x8_t p1_2 = vmull_s8(vget_low_s8 (x1_1), vget_low_s8 (y1_1)); + const int16x8_t p1_3 = vmull_s8(vget_high_s8(x1_1), vget_high_s8(y1_1)); + + const int32x4_t p0 = vaddq_s32(vpaddlq_s16(p0_0), vpaddlq_s16(p0_1)); + const int32x4_t p1 = vaddq_s32(vpaddlq_s16(p0_2), vpaddlq_s16(p0_3)); + const int32x4_t p2 = vaddq_s32(vpaddlq_s16(p1_0), vpaddlq_s16(p1_1)); + const int32x4_t p3 = vaddq_s32(vpaddlq_s16(p1_2), vpaddlq_s16(p1_3)); + + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(p0, p1)), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(p2, p3)), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); +#endif + } + + *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); +#elif defined(__AVX2__) || defined(__AVX__) + // Initialize accumulator with zeros + __m256 acc = _mm256_setzero_ps(); + + // Main loop + for (int i = 0; i < nb; ++i) { + // Compute combined scale for the block + const __m256 d = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d)); + __m256i bx = _mm256_loadu_si256((const __m256i *)x[i].qs); + __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + + const __m256 q = mul_sum_i8_pairs_float(bx, by); + + // Multiply q with scale and accumulate +#if defined(__AVX2__) + acc = _mm256_fmadd_ps( d, q, acc ); +#else + acc = _mm256_add_ps( _mm256_mul_ps( d, q ), acc ); +#endif + } + + *s = hsum_float_8(acc); +#elif defined(__riscv_v_intrinsic) + float sumf = 0.0; + size_t vl = __riscv_vsetvl_e8m1(qk); + + for (int i = 0; i < nb; i++) { + // load elements + vint8m1_t bx = __riscv_vle8_v_i8m1(x[i].qs, vl); + vint8m1_t by = __riscv_vle8_v_i8m1(y[i].qs, vl); + + vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx, by, vl); + + vint32m1_t v_zero = __riscv_vmv_v_x_i32m1(0, vl); + vint32m1_t v_sum = __riscv_vwredsum_vs_i16m2_i32m1(vw_mul, v_zero, vl); + + int sumi = __riscv_vmv_x_s_i32m1_i32(v_sum); + + sumf += sumi*(ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)); + } + + *s = sumf; +#else + // scalar + float sumf = 0.0; + + for (int i = 0; i < nb; i++) { + int sumi = 0; + + for (int j = 0; j < qk; j++) { + sumi += x[i].qs[j]*y[i].qs[j]; + } + + sumf += sumi*(ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)); + } + + *s = sumf; +#endif +} + #if QK_K == 256 void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { diff --git a/k_quants.h b/ggml-quants.h similarity index 63% rename from k_quants.h rename to ggml-quants.h index 9de089e7a..d88f99e33 100644 --- a/k_quants.h +++ b/ggml-quants.h @@ -1,20 +1,14 @@ #pragma once +// This is a private API for quantization and dequantization +// Should not be used directly, use ggml.h instead + #include "ggml.h" #include #include #include -// Super-block size -#ifdef GGML_QKK_64 -#define QK_K 64 -#define K_SCALE_SIZE 4 -#else -#define QK_K 256 -#define K_SCALE_SIZE 12 -#endif - #ifndef static_assert #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) #define static_assert(cond, msg) _Static_assert(cond, msg) @@ -23,10 +17,66 @@ #endif #endif +#define QK4_0 32 +typedef struct { + ggml_fp16_t d; // delta + uint8_t qs[QK4_0 / 2]; // nibbles / quants +} block_q4_0; +static_assert(sizeof(block_q4_0) == sizeof(ggml_fp16_t) + QK4_0 / 2, "wrong q4_0 block size/padding"); + +#define QK4_1 32 +typedef struct { + ggml_fp16_t d; // delta + ggml_fp16_t m; // min + uint8_t qs[QK4_1 / 2]; // nibbles / quants +} block_q4_1; +static_assert(sizeof(block_q4_1) == 2 * sizeof(ggml_fp16_t) + QK4_1 / 2, "wrong q4_1 block size/padding"); + +#define QK5_0 32 +typedef struct { + ggml_fp16_t d; // delta + uint8_t qh[4]; // 5-th bit of quants + uint8_t qs[QK5_0 / 2]; // nibbles / quants +} block_q5_0; +static_assert(sizeof(block_q5_0) == sizeof(ggml_fp16_t) + sizeof(uint32_t) + QK5_0 / 2, "wrong q5_0 block size/padding"); + +#define QK5_1 32 +typedef struct { + ggml_fp16_t d; // delta + ggml_fp16_t m; // min + uint8_t qh[4]; // 5-th bit of quants + uint8_t qs[QK5_1 / 2]; // nibbles / quants +} block_q5_1; +static_assert(sizeof(block_q5_1) == 2 * sizeof(ggml_fp16_t) + sizeof(uint32_t) + QK5_1 / 2, "wrong q5_1 block size/padding"); + +#define QK8_0 32 +typedef struct { + ggml_fp16_t d; // delta + int8_t qs[QK8_0]; // quants +} block_q8_0; +static_assert(sizeof(block_q8_0) == sizeof(ggml_fp16_t) + QK8_0, "wrong q8_0 block size/padding"); + +#define QK8_1 32 +typedef struct { + float d; // delta + float s; // d * sum(qs[i]) + int8_t qs[QK8_1]; // quants +} block_q8_1; +static_assert(sizeof(block_q8_1) == 2*sizeof(float) + QK8_1, "wrong q8_1 block size/padding"); + // // Super-block quantization structures // +// Super-block size +#ifdef GGML_QKK_64 +#define QK_K 64 +#define K_SCALE_SIZE 4 +#else +#define QK_K 256 +#define K_SCALE_SIZE 12 +#endif + // 2-bit quantization // weight is represented as x = a * q + b // 16 blocks of 16 elements each @@ -127,6 +177,13 @@ static_assert(sizeof(block_q8_K) == sizeof(float) + QK_K + QK_K/16*sizeof(int16_ // Quantization +void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); +void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k); +void quantize_row_q5_0_reference(const float * restrict x, block_q5_0 * restrict y, int k); +void quantize_row_q5_1_reference(const float * restrict x, block_q5_1 * restrict y, int k); +void quantize_row_q8_0_reference(const float * restrict x, block_q8_0 * restrict y, int k); +void quantize_row_q8_1_reference(const float * restrict x, block_q8_1 * restrict y, int k); + void quantize_row_q2_K_reference(const float * restrict x, block_q2_K * restrict y, int k); void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict y, int k); void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict y, int k); @@ -134,6 +191,13 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); +void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); +void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); +void quantize_row_q5_0(const float * restrict x, void * restrict y, int k); +void quantize_row_q5_1(const float * restrict x, void * restrict y, int k); +void quantize_row_q8_0(const float * restrict x, void * restrict y, int k); +void quantize_row_q8_1(const float * restrict x, void * restrict y, int k); + void quantize_row_q2_K(const float * restrict x, void * restrict y, int k); void quantize_row_q3_K(const float * restrict x, void * restrict y, int k); void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); @@ -142,6 +206,13 @@ void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); // Dequantization +void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); +void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int k); +void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int k); +void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int k); +void dequantize_row_q8_0(const block_q8_0 * restrict x, float * restrict y, int k); +//void dequantize_row_q8_1(const block_q8_1 * restrict x, float * restrict y, int k); + void dequantize_row_q2_K(const block_q2_K * restrict x, float * restrict y, int k); void dequantize_row_q3_K(const block_q3_K * restrict x, float * restrict y, int k); void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int k); @@ -150,16 +221,14 @@ void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); // Dot product +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); + void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); - -// Quantization with histogram collection -size_t ggml_quantize_q2_K(const float * src, void * dst, int n, int k, int64_t * hist); -size_t ggml_quantize_q3_K(const float * src, void * dst, int n, int k, int64_t * hist); -size_t ggml_quantize_q4_K(const float * src, void * dst, int n, int k, int64_t * hist); -size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); -size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); - diff --git a/ggml.c b/ggml.c index 6f66bab05..95f72c35e 100644 --- a/ggml.c +++ b/ggml.c @@ -1,10 +1,7 @@ #define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnigns on Windows #include "ggml.h" - -#ifdef GGML_USE_K_QUANTS -#include "k_quants.h" -#endif +#include "ggml-quants.h" #if defined(_MSC_VER) || defined(__MINGW32__) #include // using malloc.h with MSC/MINGW @@ -443,21 +440,6 @@ static ggml_fp16_t table_exp_f16[1 << 16]; // precomputed f32 table for f16 (256 KB) static float table_f32_f16[1 << 16]; -#if defined(__ARM_NEON) || defined(__wasm_simd128__) -#define B1(c,s,n) 0x ## n ## c , 0x ## n ## s -#define B2(c,s,n) B1(c,s,n ## c), B1(c,s,n ## s) -#define B3(c,s,n) B2(c,s,n ## c), B2(c,s,n ## s) -#define B4(c,s,n) B3(c,s,n ## c), B3(c,s,n ## s) -#define B5(c,s,n) B4(c,s,n ## c), B4(c,s,n ## s) -#define B6(c,s,n) B5(c,s,n ## c), B5(c,s,n ## s) -#define B7(c,s,n) B6(c,s,n ## c), B6(c,s,n ## s) -#define B8(c,s ) B7(c,s, c), B7(c,s, s) - -// precomputed tables for expanding 8bits to 8 bytes: -static const uint64_t table_b2b_0[1 << 8] = { B8(00, 10) }; // ( b) << 4 -static const uint64_t table_b2b_1[1 << 8] = { B8(10, 00) }; // (!b) << 4 -#endif - // On ARM NEON, it's quicker to directly convert x -> x instead of calling into ggml_lookup_fp16_to_fp32, // so we define GGML_FP16_TO_FP32 and GGML_FP32_TO_FP16 elsewhere for NEON. // This is also true for POWER9. @@ -587,1071 +569,8 @@ int64_t ggml_cycles_per_ms(void) { static const size_t CACHE_LINE_SIZE_F32 = CACHE_LINE_SIZE/sizeof(float); -// -// quantization -// - -#define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) - -#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) -// multiply int8_t, add results pairwise twice -static inline __m128i mul_sum_i8_pairs(const __m128i x, const __m128i y) { - // Get absolute values of x vectors - const __m128i ax = _mm_sign_epi8(x, x); - // Sign the values of the y vectors - const __m128i sy = _mm_sign_epi8(y, x); - // Perform multiplication and create 16-bit values - const __m128i dot = _mm_maddubs_epi16(ax, sy); - const __m128i ones = _mm_set1_epi16(1); - return _mm_madd_epi16(ones, dot); -} - -#if __AVX__ || __AVX2__ || __AVX512F__ -// horizontally add 8 floats -static inline float hsum_float_8(const __m256 x) { - __m128 res = _mm256_extractf128_ps(x, 1); - res = _mm_add_ps(res, _mm256_castps256_ps128(x)); - res = _mm_add_ps(res, _mm_movehl_ps(res, res)); - res = _mm_add_ss(res, _mm_movehdup_ps(res)); - return _mm_cvtss_f32(res); -} - -// horizontally add 8 int32_t -static inline int hsum_i32_8(const __m256i a) { - const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); - const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); - const __m128i sum64 = _mm_add_epi32(hi64, sum128); - const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); - return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); -} - -// horizontally add 4 int32_t -static inline int hsum_i32_4(const __m128i a) { - const __m128i hi64 = _mm_unpackhi_epi64(a, a); - const __m128i sum64 = _mm_add_epi32(hi64, a); - const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); - return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); -} - -#if defined(__AVX2__) || defined(__AVX512F__) -// spread 32 bits to 32 bytes { 0x00, 0xFF } -static inline __m256i bytes_from_bits_32(const uint8_t * x) { - uint32_t x32; - memcpy(&x32, x, sizeof(uint32_t)); - const __m256i shuf_mask = _mm256_set_epi64x( - 0x0303030303030303, 0x0202020202020202, - 0x0101010101010101, 0x0000000000000000); - __m256i bytes = _mm256_shuffle_epi8(_mm256_set1_epi32(x32), shuf_mask); - const __m256i bit_mask = _mm256_set1_epi64x(0x7fbfdfeff7fbfdfe); - bytes = _mm256_or_si256(bytes, bit_mask); - return _mm256_cmpeq_epi8(bytes, _mm256_set1_epi64x(-1)); -} - -// Unpack 32 4-bit fields into 32 bytes -// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval -static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) -{ - const __m128i tmp = _mm_loadu_si128((const __m128i *)rsi); - const __m256i bytes = MM256_SET_M128I(_mm_srli_epi16(tmp, 4), tmp); - const __m256i lowMask = _mm256_set1_epi8( 0xF ); - return _mm256_and_si256(lowMask, bytes); -} - -// add int16_t pairwise and return as float vector -static inline __m256 sum_i16_pairs_float(const __m256i x) { - const __m256i ones = _mm256_set1_epi16(1); - const __m256i summed_pairs = _mm256_madd_epi16(ones, x); - return _mm256_cvtepi32_ps(summed_pairs); -} - -static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { -#if __AVXVNNI__ - const __m256i zero = _mm256_setzero_si256(); - const __m256i summed_pairs = _mm256_dpbusd_epi32(zero, ax, sy); - return _mm256_cvtepi32_ps(summed_pairs); -#else - // Perform multiplication and create 16-bit values - const __m256i dot = _mm256_maddubs_epi16(ax, sy); - return sum_i16_pairs_float(dot); -#endif -} - -// multiply int8_t, add results pairwise twice and return as float vector -static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { -#if __AVXVNNIINT8__ - const __m256i zero = _mm256_setzero_si256(); - const __m256i summed_pairs = _mm256_dpbssd_epi32(zero, x, y); - return _mm256_cvtepi32_ps(summed_pairs); -#else - // Get absolute values of x vectors - const __m256i ax = _mm256_sign_epi8(x, x); - // Sign the values of the y vectors - const __m256i sy = _mm256_sign_epi8(y, x); - return mul_sum_us8_pairs_float(ax, sy); -#endif -} - -static inline __m128i packNibbles( __m256i bytes ) -{ - // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh -#if __AVX512F__ - const __m256i bytes_srli_4 = _mm256_srli_epi16(bytes, 4); // 0000_0000_abcd_0000 - bytes = _mm256_or_si256(bytes, bytes_srli_4); // 0000_abcd_abcd_efgh - return _mm256_cvtepi16_epi8(bytes); // abcd_efgh -#else - const __m256i lowByte = _mm256_set1_epi16( 0xFF ); - __m256i high = _mm256_andnot_si256( lowByte, bytes ); - __m256i low = _mm256_and_si256( lowByte, bytes ); - high = _mm256_srli_epi16( high, 4 ); - bytes = _mm256_or_si256( low, high ); - - // Compress uint16_t lanes into bytes - __m128i r0 = _mm256_castsi256_si128( bytes ); - __m128i r1 = _mm256_extracti128_si256( bytes, 1 ); - return _mm_packus_epi16( r0, r1 ); -#endif -} -#elif defined(__AVX__) -// spread 32 bits to 32 bytes { 0x00, 0xFF } -static inline __m256i bytes_from_bits_32(const uint8_t * x) { - uint32_t x32; - memcpy(&x32, x, sizeof(uint32_t)); - const __m128i shuf_maskl = _mm_set_epi64x(0x0101010101010101, 0x0000000000000000); - const __m128i shuf_maskh = _mm_set_epi64x(0x0303030303030303, 0x0202020202020202); - __m128i bytesl = _mm_shuffle_epi8(_mm_set1_epi32(x32), shuf_maskl); - __m128i bytesh = _mm_shuffle_epi8(_mm_set1_epi32(x32), shuf_maskh); - const __m128i bit_mask = _mm_set1_epi64x(0x7fbfdfeff7fbfdfe); - bytesl = _mm_or_si128(bytesl, bit_mask); - bytesh = _mm_or_si128(bytesh, bit_mask); - bytesl = _mm_cmpeq_epi8(bytesl, _mm_set1_epi64x(-1)); - bytesh = _mm_cmpeq_epi8(bytesh, _mm_set1_epi64x(-1)); - return MM256_SET_M128I(bytesh, bytesl); -} - -// Unpack 32 4-bit fields into 32 bytes -// The output vector contains 32 bytes, each one in [ 0 .. 15 ] interval -static inline __m256i bytes_from_nibbles_32(const uint8_t * rsi) -{ - // Load 16 bytes from memory - __m128i tmpl = _mm_loadu_si128((const __m128i *)rsi); - __m128i tmph = _mm_srli_epi16(tmpl, 4); - const __m128i lowMask = _mm_set1_epi8(0xF); - tmpl = _mm_and_si128(lowMask, tmpl); - tmph = _mm_and_si128(lowMask, tmph); - return MM256_SET_M128I(tmph, tmpl); -} - -// add int16_t pairwise and return as float vector -static inline __m256 sum_i16_pairs_float(const __m128i xh, const __m128i xl) { - const __m128i ones = _mm_set1_epi16(1); - const __m128i summed_pairsl = _mm_madd_epi16(ones, xl); - const __m128i summed_pairsh = _mm_madd_epi16(ones, xh); - const __m256i summed_pairs = MM256_SET_M128I(summed_pairsh, summed_pairsl); - return _mm256_cvtepi32_ps(summed_pairs); -} - -static inline __m256 mul_sum_us8_pairs_float(const __m256i ax, const __m256i sy) { - const __m128i axl = _mm256_castsi256_si128(ax); - const __m128i axh = _mm256_extractf128_si256(ax, 1); - const __m128i syl = _mm256_castsi256_si128(sy); - const __m128i syh = _mm256_extractf128_si256(sy, 1); - // Perform multiplication and create 16-bit values - const __m128i dotl = _mm_maddubs_epi16(axl, syl); - const __m128i doth = _mm_maddubs_epi16(axh, syh); - return sum_i16_pairs_float(doth, dotl); -} - -// multiply int8_t, add results pairwise twice and return as float vector -static inline __m256 mul_sum_i8_pairs_float(const __m256i x, const __m256i y) { - const __m128i xl = _mm256_castsi256_si128(x); - const __m128i xh = _mm256_extractf128_si256(x, 1); - const __m128i yl = _mm256_castsi256_si128(y); - const __m128i yh = _mm256_extractf128_si256(y, 1); - // Get absolute values of x vectors - const __m128i axl = _mm_sign_epi8(xl, xl); - const __m128i axh = _mm_sign_epi8(xh, xh); - // Sign the values of the y vectors - const __m128i syl = _mm_sign_epi8(yl, xl); - const __m128i syh = _mm_sign_epi8(yh, xh); - // Perform multiplication and create 16-bit values - const __m128i dotl = _mm_maddubs_epi16(axl, syl); - const __m128i doth = _mm_maddubs_epi16(axh, syh); - return sum_i16_pairs_float(doth, dotl); -} - -static inline __m128i packNibbles( __m128i bytes1, __m128i bytes2 ) -{ - // Move bits within 16-bit lanes from 0000_abcd_0000_efgh into 0000_0000_abcd_efgh - const __m128i lowByte = _mm_set1_epi16( 0xFF ); - __m128i high = _mm_andnot_si128( lowByte, bytes1 ); - __m128i low = _mm_and_si128( lowByte, bytes1 ); - high = _mm_srli_epi16( high, 4 ); - bytes1 = _mm_or_si128( low, high ); - high = _mm_andnot_si128( lowByte, bytes2 ); - low = _mm_and_si128( lowByte, bytes2 ); - high = _mm_srli_epi16( high, 4 ); - bytes2 = _mm_or_si128( low, high ); - - return _mm_packus_epi16( bytes1, bytes2); -} -#endif -#elif defined(__SSSE3__) -// horizontally add 4x4 floats -static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 c, const __m128 d) { - __m128 res_0 =_mm_hadd_ps(a, b); - __m128 res_1 =_mm_hadd_ps(c, d); - __m128 res =_mm_hadd_ps(res_0, res_1); - res =_mm_hadd_ps(res, res); - res =_mm_hadd_ps(res, res); - - return _mm_cvtss_f32(res); -} -#endif // __AVX__ || __AVX2__ || __AVX512F__ -#endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) - -#if defined(__ARM_NEON) - -#if !defined(__aarch64__) - -inline static int32_t vaddvq_s32(int32x4_t v) { - return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); -} - -inline static float vaddvq_f32(float32x4_t v) { - return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); -} - -inline static float vmaxvq_f32(float32x4_t v) { - return - MAX(MAX(vgetq_lane_f32(v, 0), vgetq_lane_f32(v, 1)), - MAX(vgetq_lane_f32(v, 2), vgetq_lane_f32(v, 3))); -} - -inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { - int32x4_t res; - - res[0] = roundf(vgetq_lane_f32(v, 0)); - res[1] = roundf(vgetq_lane_f32(v, 1)); - res[2] = roundf(vgetq_lane_f32(v, 2)); - res[3] = roundf(vgetq_lane_f32(v, 3)); - - return res; -} - -#endif -#endif - -#define QK4_0 32 -typedef struct { - ggml_fp16_t d; // delta - uint8_t qs[QK4_0 / 2]; // nibbles / quants -} block_q4_0; -static_assert(sizeof(block_q4_0) == sizeof(ggml_fp16_t) + QK4_0 / 2, "wrong q4_0 block size/padding"); - -#define QK4_1 32 -typedef struct { - ggml_fp16_t d; // delta - ggml_fp16_t m; // min - uint8_t qs[QK4_1 / 2]; // nibbles / quants -} block_q4_1; -static_assert(sizeof(block_q4_1) == 2 * sizeof(ggml_fp16_t) + QK4_1 / 2, "wrong q4_1 block size/padding"); - -#define QK5_0 32 -typedef struct { - ggml_fp16_t d; // delta - uint8_t qh[4]; // 5-th bit of quants - uint8_t qs[QK5_0 / 2]; // nibbles / quants -} block_q5_0; -static_assert(sizeof(block_q5_0) == sizeof(ggml_fp16_t) + sizeof(uint32_t) + QK5_0 / 2, "wrong q5_0 block size/padding"); - -#define QK5_1 32 -typedef struct { - ggml_fp16_t d; // delta - ggml_fp16_t m; // min - uint8_t qh[4]; // 5-th bit of quants - uint8_t qs[QK5_1 / 2]; // nibbles / quants -} block_q5_1; -static_assert(sizeof(block_q5_1) == 2 * sizeof(ggml_fp16_t) + sizeof(uint32_t) + QK5_1 / 2, "wrong q5_1 block size/padding"); - -#define QK8_0 32 -typedef struct { - ggml_fp16_t d; // delta - int8_t qs[QK8_0]; // quants -} block_q8_0; -static_assert(sizeof(block_q8_0) == sizeof(ggml_fp16_t) + QK8_0, "wrong q8_0 block size/padding"); - -#define QK8_1 32 -typedef struct { - float d; // delta - float s; // d * sum(qs[i]) - int8_t qs[QK8_1]; // quants -} block_q8_1; -static_assert(sizeof(block_q8_1) == 2*sizeof(float) + QK8_1, "wrong q8_1 block size/padding"); - -// reference implementation for deterministic creation of model files -static void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k) { - static const int qk = QK4_0; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - float amax = 0.0f; // absolute max - float max = 0.0f; - - for (int j = 0; j < qk; j++) { - const float v = x[i*qk + j]; - if (amax < fabsf(v)) { - amax = fabsf(v); - max = v; - } - } - - const float d = max / -8; - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - - for (int j = 0; j < qk/2; ++j) { - const float x0 = x[i*qk + 0 + j]*id; - const float x1 = x[i*qk + qk/2 + j]*id; - - const uint8_t xi0 = MIN(15, (int8_t)(x0 + 8.5f)); - const uint8_t xi1 = MIN(15, (int8_t)(x1 + 8.5f)); - - y[i].qs[j] = xi0; - y[i].qs[j] |= xi1 << 4; - } - } -} - -static void quantize_row_q4_0(const float * restrict x, void * restrict y, int k) { - quantize_row_q4_0_reference(x, y, k); -} - -static void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k) { - const int qk = QK4_1; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - float min = FLT_MAX; - float max = -FLT_MAX; - - for (int j = 0; j < qk; j++) { - const float v = x[i*qk + j]; - - if (v < min) min = v; - if (v > max) max = v; - } - - const float d = (max - min) / ((1 << 4) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - y[i].m = GGML_FP32_TO_FP16(min); - - for (int j = 0; j < qk/2; ++j) { - const float x0 = (x[i*qk + 0 + j] - min)*id; - const float x1 = (x[i*qk + qk/2 + j] - min)*id; - - const uint8_t xi0 = MIN(15, (int8_t)(x0 + 0.5f)); - const uint8_t xi1 = MIN(15, (int8_t)(x1 + 0.5f)); - - y[i].qs[j] = xi0; - y[i].qs[j] |= xi1 << 4; - } - } -} - -static void quantize_row_q4_1(const float * restrict x, void * restrict y, int k) { - quantize_row_q4_1_reference(x, y, k); -} - -static void quantize_row_q5_0_reference(const float * restrict x, block_q5_0 * restrict y, int k) { - static const int qk = QK5_0; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - float amax = 0.0f; // absolute max - float max = 0.0f; - - for (int j = 0; j < qk; j++) { - const float v = x[i*qk + j]; - if (amax < fabsf(v)) { - amax = fabsf(v); - max = v; - } - } - - const float d = max / -16; - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - - uint32_t qh = 0; - - for (int j = 0; j < qk/2; ++j) { - const float x0 = x[i*qk + 0 + j]*id; - const float x1 = x[i*qk + qk/2 + j]*id; - - const uint8_t xi0 = MIN(31, (int8_t)(x0 + 16.5f)); - const uint8_t xi1 = MIN(31, (int8_t)(x1 + 16.5f)); - - y[i].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); - - // get the 5-th bit and store it in qh at the right position - qh |= ((xi0 & 0x10u) >> 4) << (j + 0); - qh |= ((xi1 & 0x10u) >> 4) << (j + qk/2); - } - - memcpy(&y[i].qh, &qh, sizeof(qh)); - } -} - -static void quantize_row_q5_0(const float * restrict x, void * restrict y, int k) { - quantize_row_q5_0_reference(x, y, k); -} - -static void quantize_row_q5_1_reference(const float * restrict x, block_q5_1 * restrict y, int k) { - const int qk = QK5_1; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - float min = FLT_MAX; - float max = -FLT_MAX; - - for (int j = 0; j < qk; j++) { - const float v = x[i*qk + j]; - - if (v < min) min = v; - if (v > max) max = v; - } - - const float d = (max - min) / ((1 << 5) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - y[i].m = GGML_FP32_TO_FP16(min); - - uint32_t qh = 0; - - for (int j = 0; j < qk/2; ++j) { - const float x0 = (x[i*qk + 0 + j] - min)*id; - const float x1 = (x[i*qk + qk/2 + j] - min)*id; - - const uint8_t xi0 = (uint8_t)(x0 + 0.5f); - const uint8_t xi1 = (uint8_t)(x1 + 0.5f); - - y[i].qs[j] = (xi0 & 0x0F) | ((xi1 & 0x0F) << 4); - - // get the 5-th bit and store it in qh at the right position - qh |= ((xi0 & 0x10u) >> 4) << (j + 0); - qh |= ((xi1 & 0x10u) >> 4) << (j + qk/2); - } - - memcpy(&y[i].qh, &qh, sizeof(y[i].qh)); - } -} - -static void quantize_row_q5_1(const float * restrict x, void * restrict y, int k) { - quantize_row_q5_1_reference(x, y, k); -} - -// reference implementation for deterministic creation of model files -static void quantize_row_q8_0_reference(const float * restrict x, block_q8_0 * restrict y, int k) { - assert(k % QK8_0 == 0); - const int nb = k / QK8_0; - - for (int i = 0; i < nb; i++) { - float amax = 0.0f; // absolute max - - for (int j = 0; j < QK8_0; j++) { - const float v = x[i*QK8_0 + j]; - amax = MAX(amax, fabsf(v)); - } - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - - for (int j = 0; j < QK8_0; ++j) { - const float x0 = x[i*QK8_0 + j]*id; - - y[i].qs[j] = roundf(x0); - } - } -} - -static void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { - assert(QK8_0 == 32); - assert(k % QK8_0 == 0); - const int nb = k / QK8_0; - - block_q8_0 * restrict y = vy; - -#if defined(__ARM_NEON) - for (int i = 0; i < nb; i++) { - float32x4_t srcv [8]; - float32x4_t asrcv[8]; - float32x4_t amaxv[8]; - - for (int j = 0; j < 8; j++) srcv[j] = vld1q_f32(x + i*32 + 4*j); - for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[j]); - - for (int j = 0; j < 4; j++) amaxv[2*j] = vmaxq_f32(asrcv[2*j], asrcv[2*j+1]); - for (int j = 0; j < 2; j++) amaxv[4*j] = vmaxq_f32(amaxv[4*j], amaxv[4*j+2]); - for (int j = 0; j < 1; j++) amaxv[8*j] = vmaxq_f32(amaxv[8*j], amaxv[8*j+4]); - - const float amax = vmaxvq_f32(amaxv[0]); - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - - for (int j = 0; j < 8; j++) { - const float32x4_t v = vmulq_n_f32(srcv[j], id); - const int32x4_t vi = vcvtnq_s32_f32(v); - - y[i].qs[4*j + 0] = vgetq_lane_s32(vi, 0); - y[i].qs[4*j + 1] = vgetq_lane_s32(vi, 1); - y[i].qs[4*j + 2] = vgetq_lane_s32(vi, 2); - y[i].qs[4*j + 3] = vgetq_lane_s32(vi, 3); - } - } -#elif defined(__wasm_simd128__) - for (int i = 0; i < nb; i++) { - v128_t srcv [8]; - v128_t asrcv[8]; - v128_t amaxv[8]; - - for (int j = 0; j < 8; j++) srcv[j] = wasm_v128_load(x + i*32 + 4*j); - for (int j = 0; j < 8; j++) asrcv[j] = wasm_f32x4_abs(srcv[j]); - - for (int j = 0; j < 4; j++) amaxv[2*j] = wasm_f32x4_max(asrcv[2*j], asrcv[2*j+1]); - for (int j = 0; j < 2; j++) amaxv[4*j] = wasm_f32x4_max(amaxv[4*j], amaxv[4*j+2]); - for (int j = 0; j < 1; j++) amaxv[8*j] = wasm_f32x4_max(amaxv[8*j], amaxv[8*j+4]); - - const float amax = MAX(MAX(wasm_f32x4_extract_lane(amaxv[0], 0), - wasm_f32x4_extract_lane(amaxv[0], 1)), - MAX(wasm_f32x4_extract_lane(amaxv[0], 2), - wasm_f32x4_extract_lane(amaxv[0], 3))); - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - - for (int j = 0; j < 8; j++) { - const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); - const v128_t vi = wasm_i32x4_trunc_sat_f32x4(v); - - y[i].qs[4*j + 0] = wasm_i32x4_extract_lane(vi, 0); - y[i].qs[4*j + 1] = wasm_i32x4_extract_lane(vi, 1); - y[i].qs[4*j + 2] = wasm_i32x4_extract_lane(vi, 2); - y[i].qs[4*j + 3] = wasm_i32x4_extract_lane(vi, 3); - } - } -#elif defined(__AVX2__) || defined(__AVX__) - for (int i = 0; i < nb; i++) { - // Load elements into 4 AVX vectors - __m256 v0 = _mm256_loadu_ps( x ); - __m256 v1 = _mm256_loadu_ps( x + 8 ); - __m256 v2 = _mm256_loadu_ps( x + 16 ); - __m256 v3 = _mm256_loadu_ps( x + 24 ); - x += 32; - - // Compute max(abs(e)) for the block - const __m256 signBit = _mm256_set1_ps( -0.0f ); - __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); - maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); - maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); - maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); - - __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); - max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); - max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); - const float maxScalar = _mm_cvtss_f32( max4 ); - - // Quantize these floats - const float d = maxScalar / 127.f; - y[i].d = GGML_FP32_TO_FP16(d); - const float id = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; - const __m256 mul = _mm256_set1_ps( id ); - - // Apply the multiplier - v0 = _mm256_mul_ps( v0, mul ); - v1 = _mm256_mul_ps( v1, mul ); - v2 = _mm256_mul_ps( v2, mul ); - v3 = _mm256_mul_ps( v3, mul ); - - // Round to nearest integer - v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); - v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); - v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); - v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); - - // Convert floats to integers - __m256i i0 = _mm256_cvtps_epi32( v0 ); - __m256i i1 = _mm256_cvtps_epi32( v1 ); - __m256i i2 = _mm256_cvtps_epi32( v2 ); - __m256i i3 = _mm256_cvtps_epi32( v3 ); - -#if defined(__AVX2__) - // Convert int32 to int16 - i0 = _mm256_packs_epi32( i0, i1 ); // 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 - i2 = _mm256_packs_epi32( i2, i3 ); // 16, 17, 18, 19, 24, 25, 26, 27, 20, 21, 22, 23, 28, 29, 30, 31 - // Convert int16 to int8 - i0 = _mm256_packs_epi16( i0, i2 ); // 0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19, 24, 25, 26, 27, 4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31 - - // We got our precious signed bytes, but the order is now wrong - // These AVX2 pack instructions process 16-byte pieces independently - // The following instruction is fixing the order - const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); - i0 = _mm256_permutevar8x32_epi32( i0, perm ); - - _mm256_storeu_si256((__m256i *)y[i].qs, i0); -#else - // Since we don't have in AVX some necessary functions, - // we split the registers in half and call AVX2 analogs from SSE - __m128i ni0 = _mm256_castsi256_si128( i0 ); - __m128i ni1 = _mm256_extractf128_si256( i0, 1); - __m128i ni2 = _mm256_castsi256_si128( i1 ); - __m128i ni3 = _mm256_extractf128_si256( i1, 1); - __m128i ni4 = _mm256_castsi256_si128( i2 ); - __m128i ni5 = _mm256_extractf128_si256( i2, 1); - __m128i ni6 = _mm256_castsi256_si128( i3 ); - __m128i ni7 = _mm256_extractf128_si256( i3, 1); - - // Convert int32 to int16 - ni0 = _mm_packs_epi32( ni0, ni1 ); - ni2 = _mm_packs_epi32( ni2, ni3 ); - ni4 = _mm_packs_epi32( ni4, ni5 ); - ni6 = _mm_packs_epi32( ni6, ni7 ); - // Convert int16 to int8 - ni0 = _mm_packs_epi16( ni0, ni2 ); - ni4 = _mm_packs_epi16( ni4, ni6 ); - - _mm_storeu_si128((__m128i *)(y[i].qs + 0), ni0); - _mm_storeu_si128((__m128i *)(y[i].qs + 16), ni4); -#endif - } -#elif defined(__riscv_v_intrinsic) - - size_t vl = __riscv_vsetvl_e32m4(QK8_0); - - for (int i = 0; i < nb; i++) { - // load elements - vfloat32m4_t v_x = __riscv_vle32_v_f32m4(x+i*QK8_0, vl); - - vfloat32m4_t vfabs = __riscv_vfabs_v_f32m4(v_x, vl); - vfloat32m1_t tmp = __riscv_vfmv_v_f_f32m1(0.0f, vl); - vfloat32m1_t vmax = __riscv_vfredmax_vs_f32m4_f32m1(vfabs, tmp, vl); - float amax = __riscv_vfmv_f_s_f32m1_f32(vmax); - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = GGML_FP32_TO_FP16(d); - - vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); - - // convert to integer - vint16m2_t vi = __riscv_vfncvt_x_f_w_i16m2(x0, vl); - vint8m1_t vs = __riscv_vncvt_x_x_w_i8m1(vi, vl); - - // store result - __riscv_vse8_v_i8m1(y[i].qs , vs, vl); - } -#else - // scalar - quantize_row_q8_0_reference(x, y, k); -#endif -} - -// reference implementation for deterministic creation of model files -static void quantize_row_q8_1_reference(const float * restrict x, block_q8_1 * restrict y, int k) { - assert(QK8_1 == 32); - assert(k % QK8_1 == 0); - const int nb = k / QK8_1; - - for (int i = 0; i < nb; i++) { - float amax = 0.0f; // absolute max - - for (int j = 0; j < QK8_1; j++) { - const float v = x[i*QK8_1 + j]; - amax = MAX(amax, fabsf(v)); - } - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = d; - - int sum = 0; - - for (int j = 0; j < QK8_1/2; ++j) { - const float v0 = x[i*QK8_1 + j]*id; - const float v1 = x[i*QK8_1 + QK8_1/2 + j]*id; - - y[i].qs[ j] = roundf(v0); - y[i].qs[QK8_1/2 + j] = roundf(v1); - - sum += y[i].qs[ j]; - sum += y[i].qs[QK8_1/2 + j]; - } - - y[i].s = sum*d; - } -} - -static void quantize_row_q8_1(const float * restrict x, void * restrict vy, int k) { - assert(k % QK8_1 == 0); - const int nb = k / QK8_1; - - block_q8_1 * restrict y = vy; - -#if defined(__ARM_NEON) - for (int i = 0; i < nb; i++) { - float32x4_t srcv [8]; - float32x4_t asrcv[8]; - float32x4_t amaxv[8]; - - for (int j = 0; j < 8; j++) srcv[j] = vld1q_f32(x + i*32 + 4*j); - for (int j = 0; j < 8; j++) asrcv[j] = vabsq_f32(srcv[j]); - - for (int j = 0; j < 4; j++) amaxv[2*j] = vmaxq_f32(asrcv[2*j], asrcv[2*j+1]); - for (int j = 0; j < 2; j++) amaxv[4*j] = vmaxq_f32(amaxv[4*j], amaxv[4*j+2]); - for (int j = 0; j < 1; j++) amaxv[8*j] = vmaxq_f32(amaxv[8*j], amaxv[8*j+4]); - - const float amax = vmaxvq_f32(amaxv[0]); - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = d; - - int32x4_t accv = vdupq_n_s32(0); - - for (int j = 0; j < 8; j++) { - const float32x4_t v = vmulq_n_f32(srcv[j], id); - const int32x4_t vi = vcvtnq_s32_f32(v); - - y[i].qs[4*j + 0] = vgetq_lane_s32(vi, 0); - y[i].qs[4*j + 1] = vgetq_lane_s32(vi, 1); - y[i].qs[4*j + 2] = vgetq_lane_s32(vi, 2); - y[i].qs[4*j + 3] = vgetq_lane_s32(vi, 3); - - accv = vaddq_s32(accv, vi); - } - - y[i].s = d * vaddvq_s32(accv); - } -#elif defined(__wasm_simd128__) - for (int i = 0; i < nb; i++) { - v128_t srcv [8]; - v128_t asrcv[8]; - v128_t amaxv[8]; - - for (int j = 0; j < 8; j++) srcv[j] = wasm_v128_load(x + i*32 + 4*j); - for (int j = 0; j < 8; j++) asrcv[j] = wasm_f32x4_abs(srcv[j]); - - for (int j = 0; j < 4; j++) amaxv[2*j] = wasm_f32x4_max(asrcv[2*j], asrcv[2*j+1]); - for (int j = 0; j < 2; j++) amaxv[4*j] = wasm_f32x4_max(amaxv[4*j], amaxv[4*j+2]); - for (int j = 0; j < 1; j++) amaxv[8*j] = wasm_f32x4_max(amaxv[8*j], amaxv[8*j+4]); - - const float amax = MAX(MAX(wasm_f32x4_extract_lane(amaxv[0], 0), - wasm_f32x4_extract_lane(amaxv[0], 1)), - MAX(wasm_f32x4_extract_lane(amaxv[0], 2), - wasm_f32x4_extract_lane(amaxv[0], 3))); - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = d; - - v128_t accv = wasm_i32x4_splat(0); - - for (int j = 0; j < 8; j++) { - const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); - const v128_t vi = wasm_i32x4_trunc_sat_f32x4(v); - - y[i].qs[4*j + 0] = wasm_i32x4_extract_lane(vi, 0); - y[i].qs[4*j + 1] = wasm_i32x4_extract_lane(vi, 1); - y[i].qs[4*j + 2] = wasm_i32x4_extract_lane(vi, 2); - y[i].qs[4*j + 3] = wasm_i32x4_extract_lane(vi, 3); - - accv = wasm_i32x4_add(accv, vi); - } - - y[i].s = d * (wasm_i32x4_extract_lane(accv, 0) + - wasm_i32x4_extract_lane(accv, 1) + - wasm_i32x4_extract_lane(accv, 2) + - wasm_i32x4_extract_lane(accv, 3)); - } -#elif defined(__AVX2__) || defined(__AVX__) - for (int i = 0; i < nb; i++) { - // Load elements into 4 AVX vectors - __m256 v0 = _mm256_loadu_ps( x ); - __m256 v1 = _mm256_loadu_ps( x + 8 ); - __m256 v2 = _mm256_loadu_ps( x + 16 ); - __m256 v3 = _mm256_loadu_ps( x + 24 ); - x += 32; - - // Compute max(abs(e)) for the block - const __m256 signBit = _mm256_set1_ps( -0.0f ); - __m256 maxAbs = _mm256_andnot_ps( signBit, v0 ); - maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v1 ) ); - maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v2 ) ); - maxAbs = _mm256_max_ps( maxAbs, _mm256_andnot_ps( signBit, v3 ) ); - - __m128 max4 = _mm_max_ps( _mm256_extractf128_ps( maxAbs, 1 ), _mm256_castps256_ps128( maxAbs ) ); - max4 = _mm_max_ps( max4, _mm_movehl_ps( max4, max4 ) ); - max4 = _mm_max_ss( max4, _mm_movehdup_ps( max4 ) ); - const float maxScalar = _mm_cvtss_f32( max4 ); - - // Quantize these floats - const float d = maxScalar / 127.f; - y[i].d = d; - const float id = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; - const __m256 mul = _mm256_set1_ps( id ); - - // Apply the multiplier - v0 = _mm256_mul_ps( v0, mul ); - v1 = _mm256_mul_ps( v1, mul ); - v2 = _mm256_mul_ps( v2, mul ); - v3 = _mm256_mul_ps( v3, mul ); - - // Round to nearest integer - v0 = _mm256_round_ps( v0, _MM_ROUND_NEAREST ); - v1 = _mm256_round_ps( v1, _MM_ROUND_NEAREST ); - v2 = _mm256_round_ps( v2, _MM_ROUND_NEAREST ); - v3 = _mm256_round_ps( v3, _MM_ROUND_NEAREST ); - - // Convert floats to integers - __m256i i0 = _mm256_cvtps_epi32( v0 ); - __m256i i1 = _mm256_cvtps_epi32( v1 ); - __m256i i2 = _mm256_cvtps_epi32( v2 ); - __m256i i3 = _mm256_cvtps_epi32( v3 ); - -#if defined(__AVX2__) - // Compute the sum of the quants and set y[i].s - y[i].s = d * hsum_i32_8(_mm256_add_epi32(_mm256_add_epi32(i0, i1), _mm256_add_epi32(i2, i3))); - - // Convert int32 to int16 - i0 = _mm256_packs_epi32( i0, i1 ); // 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 - i2 = _mm256_packs_epi32( i2, i3 ); // 16, 17, 18, 19, 24, 25, 26, 27, 20, 21, 22, 23, 28, 29, 30, 31 - // Convert int16 to int8 - i0 = _mm256_packs_epi16( i0, i2 ); // 0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19, 24, 25, 26, 27, 4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23, 28, 29, 30, 31 - - // We got our precious signed bytes, but the order is now wrong - // These AVX2 pack instructions process 16-byte pieces independently - // The following instruction is fixing the order - const __m256i perm = _mm256_setr_epi32( 0, 4, 1, 5, 2, 6, 3, 7 ); - i0 = _mm256_permutevar8x32_epi32( i0, perm ); - - _mm256_storeu_si256((__m256i *)y[i].qs, i0); -#else - // Since we don't have in AVX some necessary functions, - // we split the registers in half and call AVX2 analogs from SSE - __m128i ni0 = _mm256_castsi256_si128( i0 ); - __m128i ni1 = _mm256_extractf128_si256( i0, 1); - __m128i ni2 = _mm256_castsi256_si128( i1 ); - __m128i ni3 = _mm256_extractf128_si256( i1, 1); - __m128i ni4 = _mm256_castsi256_si128( i2 ); - __m128i ni5 = _mm256_extractf128_si256( i2, 1); - __m128i ni6 = _mm256_castsi256_si128( i3 ); - __m128i ni7 = _mm256_extractf128_si256( i3, 1); - - // Compute the sum of the quants and set y[i].s - const __m128i s0 = _mm_add_epi32(_mm_add_epi32(ni0, ni1), _mm_add_epi32(ni2, ni3)); - const __m128i s1 = _mm_add_epi32(_mm_add_epi32(ni4, ni5), _mm_add_epi32(ni6, ni7)); - y[i].s = d * hsum_i32_4(_mm_add_epi32(s0, s1)); - - // Convert int32 to int16 - ni0 = _mm_packs_epi32( ni0, ni1 ); - ni2 = _mm_packs_epi32( ni2, ni3 ); - ni4 = _mm_packs_epi32( ni4, ni5 ); - ni6 = _mm_packs_epi32( ni6, ni7 ); - // Convert int16 to int8 - ni0 = _mm_packs_epi16( ni0, ni2 ); - ni4 = _mm_packs_epi16( ni4, ni6 ); - - _mm_storeu_si128((__m128i *)(y[i].qs + 0), ni0); - _mm_storeu_si128((__m128i *)(y[i].qs + 16), ni4); -#endif - } -#elif defined(__riscv_v_intrinsic) - - size_t vl = __riscv_vsetvl_e32m4(QK8_1); - - for (int i = 0; i < nb; i++) { - // load elements - vfloat32m4_t v_x = __riscv_vle32_v_f32m4(x+i*QK8_1, vl); - - vfloat32m4_t vfabs = __riscv_vfabs_v_f32m4(v_x, vl); - vfloat32m1_t tmp = __riscv_vfmv_v_f_f32m1(0.0, vl); - vfloat32m1_t vmax = __riscv_vfredmax_vs_f32m4_f32m1(vfabs, tmp, vl); - float amax = __riscv_vfmv_f_s_f32m1_f32(vmax); - - const float d = amax / ((1 << 7) - 1); - const float id = d ? 1.0f/d : 0.0f; - - y[i].d = d; - - vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); - - // convert to integer - vint16m2_t vi = __riscv_vfncvt_x_f_w_i16m2(x0, vl); - vint8m1_t vs = __riscv_vncvt_x_x_w_i8m1(vi, vl); - - // store result - __riscv_vse8_v_i8m1(y[i].qs , vs, vl); - - // compute sum for y[i].s - vint16m1_t tmp2 = __riscv_vmv_v_x_i16m1(0, vl); - vint16m1_t vwrs = __riscv_vwredsum_vs_i8m1_i16m1(vs, tmp2, vl); - - // set y[i].s - int sum = __riscv_vmv_x_s_i16m1_i16(vwrs); - y[i].s = sum*d; - } -#else - // scalar - quantize_row_q8_1_reference(x, y, k); -#endif -} - -static void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k) { - static const int qk = QK4_0; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - const float d = GGML_FP16_TO_FP32(x[i].d); - - for (int j = 0; j < qk/2; ++j) { - const int x0 = (x[i].qs[j] & 0x0F) - 8; - const int x1 = (x[i].qs[j] >> 4) - 8; - - y[i*qk + j + 0 ] = x0*d; - y[i*qk + j + qk/2] = x1*d; - } - } -} - -static void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int k) { - static const int qk = QK4_1; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - const float d = GGML_FP16_TO_FP32(x[i].d); - const float m = GGML_FP16_TO_FP32(x[i].m); - - for (int j = 0; j < qk/2; ++j) { - const int x0 = (x[i].qs[j] & 0x0F); - const int x1 = (x[i].qs[j] >> 4); - - y[i*qk + j + 0 ] = x0*d + m; - y[i*qk + j + qk/2] = x1*d + m; - } - } -} - -static void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int k) { - static const int qk = QK5_0; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - const float d = GGML_FP16_TO_FP32(x[i].d); - - uint32_t qh; - memcpy(&qh, x[i].qh, sizeof(qh)); - - for (int j = 0; j < qk/2; ++j) { - const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; - const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; - - const int32_t x0 = ((x[i].qs[j] & 0x0F) | xh_0) - 16; - const int32_t x1 = ((x[i].qs[j] >> 4) | xh_1) - 16; - - y[i*qk + j + 0 ] = x0*d; - y[i*qk + j + qk/2] = x1*d; - } - } -} - -static void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int k) { - static const int qk = QK5_1; - - assert(k % qk == 0); - - const int nb = k / qk; - - for (int i = 0; i < nb; i++) { - const float d = GGML_FP16_TO_FP32(x[i].d); - const float m = GGML_FP16_TO_FP32(x[i].m); - - uint32_t qh; - memcpy(&qh, x[i].qh, sizeof(qh)); - - for (int j = 0; j < qk/2; ++j) { - const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; - const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; - - const int x0 = (x[i].qs[j] & 0x0F) | xh_0; - const int x1 = (x[i].qs[j] >> 4) | xh_1; - - y[i*qk + j + 0 ] = x0*d + m; - y[i*qk + j + qk/2] = x1*d + m; - } - } -} - -static void dequantize_row_q8_0(const void * restrict vx, float * restrict y, int k) { - static const int qk = QK8_0; - - assert(k % qk == 0); - - const int nb = k / qk; - - const block_q8_0 * restrict x = vx; - - for (int i = 0; i < nb; i++) { - const float d = GGML_FP16_TO_FP32(x[i].d); - - for (int j = 0; j < qk; ++j) { - y[i*qk + j] = x[i].qs[j]*d; - } - } -} - static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y); static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y); -static void ggml_vec_dot_q4_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy); -static void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy); -static void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy); -static void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy); -static void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy); static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { [GGML_TYPE_I8] = { @@ -1740,7 +659,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .blck_size = QK8_0, .type_size = sizeof(block_q8_0), .is_quantized = true, - .to_float = dequantize_row_q8_0, + .to_float = (ggml_to_float_t) dequantize_row_q8_0, .from_float = quantize_row_q8_0, .from_float_reference = (ggml_from_float_t) quantize_row_q8_0_reference, .vec_dot = ggml_vec_dot_q8_0_q8_0, @@ -1755,7 +674,6 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q8_1_reference, .vec_dot_type = GGML_TYPE_Q8_1, }, -#ifdef GGML_USE_K_QUANTS [GGML_TYPE_Q2_K] = { .type_name = "q2_K", .blck_size = QK_K, @@ -1818,7 +736,6 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .is_quantized = true, .from_float = quantize_row_q8_K, } -#endif }; // For internal test use @@ -2442,1218 +1359,6 @@ static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * rest *s = sumf; } -static void ggml_vec_dot_q4_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { - const int qk = QK8_0; - const int nb = n / qk; - - assert(n % qk == 0); - - const block_q4_0 * restrict x = vx; - const block_q8_0 * restrict y = vy; - -#if defined(__ARM_NEON) - float32x4_t sumv0 = vdupq_n_f32(0.0f); - float32x4_t sumv1 = vdupq_n_f32(0.0f); - - GGML_ASSERT(nb % 2 == 0); // TODO: handle odd nb - for (int i = 0; i < nb; i += 2) { - const block_q4_0 * restrict x0 = &x[i + 0]; - const block_q4_0 * restrict x1 = &x[i + 1]; - const block_q8_0 * restrict y0 = &y[i + 0]; - const block_q8_0 * restrict y1 = &y[i + 1]; - - const uint8x16_t m4b = vdupq_n_u8(0x0F); - const int8x16_t s8b = vdupq_n_s8(0x8); - - const uint8x16_t v0_0 = vld1q_u8(x0->qs); - const uint8x16_t v0_1 = vld1q_u8(x1->qs); - - // 4-bit -> 8-bit - const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); - const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); - const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); - const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); - - // sub 8 - const int8x16_t v0_0ls = vsubq_s8(v0_0l, s8b); - const int8x16_t v0_0hs = vsubq_s8(v0_0h, s8b); - const int8x16_t v0_1ls = vsubq_s8(v0_1l, s8b); - const int8x16_t v0_1hs = vsubq_s8(v0_1h, s8b); - - // load y - const int8x16_t v1_0l = vld1q_s8(y0->qs); - const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); - const int8x16_t v1_1l = vld1q_s8(y1->qs); - const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); - -#if defined(__ARM_FEATURE_DOTPROD) - // dot product into int32x4_t - const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); - const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0ls), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0ls), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hs), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hs), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1ls), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1ls), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hs), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hs), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif - } - - *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); -#elif defined(__AVX2__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - - // Main loop - for (int i = 0; i < nb; ++i) { - /* Compute combined scale for the block */ - const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); - - __m256i bx = bytes_from_nibbles_32(x[i].qs); - - // Now we have a vector with bytes in [ 0 .. 15 ] interval. Offset them into [ -8 .. +7 ] interval. - const __m256i off = _mm256_set1_epi8( 8 ); - bx = _mm256_sub_epi8( bx, off ); - - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); - - const __m256 q = mul_sum_i8_pairs_float(bx, by); - - /* Multiply q with scale and accumulate */ - acc = _mm256_fmadd_ps( d, q, acc ); - } - - *s = hsum_float_8(acc); -#elif defined(__AVX__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - - // Main loop - for (int i = 0; i < nb; ++i) { - // Compute combined scale for the block - const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); - - const __m128i lowMask = _mm_set1_epi8(0xF); - const __m128i off = _mm_set1_epi8(8); - - const __m128i tmp = _mm_loadu_si128((const __m128i *)x[i].qs); - - __m128i bx = _mm_and_si128(lowMask, tmp); - __m128i by = _mm_loadu_si128((const __m128i *)y[i].qs); - bx = _mm_sub_epi8(bx, off); - const __m128i i32_0 = mul_sum_i8_pairs(bx, by); - - bx = _mm_and_si128(lowMask, _mm_srli_epi64(tmp, 4)); - by = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); - bx = _mm_sub_epi8(bx, off); - const __m128i i32_1 = mul_sum_i8_pairs(bx, by); - - // Convert int32_t to float - __m256 p = _mm256_cvtepi32_ps(MM256_SET_M128I(i32_0, i32_1)); - - // Apply the scale, and accumulate - acc = _mm256_add_ps(_mm256_mul_ps( d, p ), acc); - } - - *s = hsum_float_8(acc); -#elif defined(__SSSE3__) - // set constants - const __m128i lowMask = _mm_set1_epi8(0xF); - const __m128i off = _mm_set1_epi8(8); - - // Initialize accumulator with zeros - __m128 acc_0 = _mm_setzero_ps(); - __m128 acc_1 = _mm_setzero_ps(); - __m128 acc_2 = _mm_setzero_ps(); - __m128 acc_3 = _mm_setzero_ps(); - - // First round without accumulation - { - _mm_prefetch(&x[0] + sizeof(block_q4_0), _MM_HINT_T0); - _mm_prefetch(&y[0] + sizeof(block_q8_0), _MM_HINT_T0); - - // Compute combined scale for the block 0 and 1 - const __m128 d_0_1 = _mm_set1_ps( GGML_FP16_TO_FP32(x[0].d) * GGML_FP16_TO_FP32(y[0].d) ); - - const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[0].qs); - - __m128i bx_0 = _mm_and_si128(lowMask, tmp_0_1); - __m128i by_0 = _mm_loadu_si128((const __m128i *)y[0].qs); - bx_0 = _mm_sub_epi8(bx_0, off); - const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); - - __m128i bx_1 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_0_1, 4)); - __m128i by_1 = _mm_loadu_si128((const __m128i *)(y[0].qs + 16)); - bx_1 = _mm_sub_epi8(bx_1, off); - const __m128i i32_1 = mul_sum_i8_pairs(bx_1, by_1); - - _mm_prefetch(&x[1] + sizeof(block_q4_0), _MM_HINT_T0); - _mm_prefetch(&y[1] + sizeof(block_q8_0), _MM_HINT_T0); - - // Compute combined scale for the block 2 and 3 - const __m128 d_2_3 = _mm_set1_ps( GGML_FP16_TO_FP32(x[1].d) * GGML_FP16_TO_FP32(y[1].d) ); - - const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[1].qs); - - __m128i bx_2 = _mm_and_si128(lowMask, tmp_2_3); - __m128i by_2 = _mm_loadu_si128((const __m128i *)y[1].qs); - bx_2 = _mm_sub_epi8(bx_2, off); - const __m128i i32_2 = mul_sum_i8_pairs(bx_2, by_2); - - __m128i bx_3 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_2_3, 4)); - __m128i by_3 = _mm_loadu_si128((const __m128i *)(y[1].qs + 16)); - bx_3 = _mm_sub_epi8(bx_3, off); - const __m128i i32_3 = mul_sum_i8_pairs(bx_3, by_3); - - // Convert int32_t to float - __m128 p0 = _mm_cvtepi32_ps(i32_0); - __m128 p1 = _mm_cvtepi32_ps(i32_1); - __m128 p2 = _mm_cvtepi32_ps(i32_2); - __m128 p3 = _mm_cvtepi32_ps(i32_3); - - // Apply the scale - acc_0 = _mm_mul_ps( d_0_1, p0 ); - acc_1 = _mm_mul_ps( d_0_1, p1 ); - acc_2 = _mm_mul_ps( d_2_3, p2 ); - acc_3 = _mm_mul_ps( d_2_3, p3 ); - } - - // Main loop - GGML_ASSERT(nb % 2 == 0); // TODO: handle odd nb - for (int i = 2; i < nb; i+=2) { - _mm_prefetch(&x[i] + sizeof(block_q4_0), _MM_HINT_T0); - _mm_prefetch(&y[i] + sizeof(block_q8_0), _MM_HINT_T0); - - // Compute combined scale for the block 0 and 1 - const __m128 d_0_1 = _mm_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); - - const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[i].qs); - - __m128i bx_0 = _mm_and_si128(lowMask, tmp_0_1); - __m128i by_0 = _mm_loadu_si128((const __m128i *)y[i].qs); - bx_0 = _mm_sub_epi8(bx_0, off); - const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); - - __m128i bx_1 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_0_1, 4)); - __m128i by_1 = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); - bx_1 = _mm_sub_epi8(bx_1, off); - const __m128i i32_1 = mul_sum_i8_pairs(bx_1, by_1); - - _mm_prefetch(&x[i] + 2 * sizeof(block_q4_0), _MM_HINT_T0); - _mm_prefetch(&y[i] + 2 * sizeof(block_q8_0), _MM_HINT_T0); - - // Compute combined scale for the block 2 and 3 - const __m128 d_2_3 = _mm_set1_ps( GGML_FP16_TO_FP32(x[i + 1].d) * GGML_FP16_TO_FP32(y[i + 1].d) ); - - const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[i + 1].qs); - - __m128i bx_2 = _mm_and_si128(lowMask, tmp_2_3); - __m128i by_2 = _mm_loadu_si128((const __m128i *)y[i + 1].qs); - bx_2 = _mm_sub_epi8(bx_2, off); - const __m128i i32_2 = mul_sum_i8_pairs(bx_2, by_2); - - __m128i bx_3 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp_2_3, 4)); - __m128i by_3 = _mm_loadu_si128((const __m128i *)(y[i + 1].qs + 16)); - bx_3 = _mm_sub_epi8(bx_3, off); - const __m128i i32_3 = mul_sum_i8_pairs(bx_3, by_3); - - // Convert int32_t to float - __m128 p0 = _mm_cvtepi32_ps(i32_0); - __m128 p1 = _mm_cvtepi32_ps(i32_1); - __m128 p2 = _mm_cvtepi32_ps(i32_2); - __m128 p3 = _mm_cvtepi32_ps(i32_3); - - // Apply the scale - __m128 p0_d = _mm_mul_ps( d_0_1, p0 ); - __m128 p1_d = _mm_mul_ps( d_0_1, p1 ); - __m128 p2_d = _mm_mul_ps( d_2_3, p2 ); - __m128 p3_d = _mm_mul_ps( d_2_3, p3 ); - - // Acummulate - acc_0 = _mm_add_ps(p0_d, acc_0); - acc_1 = _mm_add_ps(p1_d, acc_1); - acc_2 = _mm_add_ps(p2_d, acc_2); - acc_3 = _mm_add_ps(p3_d, acc_3); - } - - *s = hsum_float_4x4(acc_0, acc_1, acc_2, acc_3); -#elif defined(__riscv_v_intrinsic) - float sumf = 0.0; - - size_t vl = __riscv_vsetvl_e8m1(qk/2); - - for (int i = 0; i < nb; i++) { - // load elements - vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); - - vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); - vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); - - // mask and store lower part of x, and then upper part - vuint8mf2_t x_a = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); - vuint8mf2_t x_l = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); - - vint8mf2_t x_ai = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); - vint8mf2_t x_li = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); - - // subtract offset - vint8mf2_t v0 = __riscv_vsub_vx_i8mf2(x_ai, 8, vl); - vint8mf2_t v1 = __riscv_vsub_vx_i8mf2(x_li, 8, vl); - - vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); - vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); - - vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); - - vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); - vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); - - int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - - sumf += sumi*GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d); - } - - *s = sumf; -#else - // scalar - float sumf = 0.0; - - for (int i = 0; i < nb; i++) { - int sumi = 0; - - for (int j = 0; j < qk/2; ++j) { - const int v0 = (x[i].qs[j] & 0x0F) - 8; - const int v1 = (x[i].qs[j] >> 4) - 8; - - sumi += (v0 * y[i].qs[j]) + (v1 * y[i].qs[j + qk/2]); - } - - sumf += sumi*GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d); - } - - *s = sumf; -#endif -} - -static void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { - const int qk = QK8_1; - const int nb = n / qk; - - assert(n % qk == 0); - - const block_q4_1 * restrict x = vx; - const block_q8_1 * restrict y = vy; - - // TODO: add WASM SIMD -#if defined(__ARM_NEON) - float32x4_t sumv0 = vdupq_n_f32(0.0f); - float32x4_t sumv1 = vdupq_n_f32(0.0f); - - float summs = 0; - - GGML_ASSERT(nb % 2 == 0); // TODO: handle odd nb - for (int i = 0; i < nb; i += 2) { - const block_q4_1 * restrict x0 = &x[i + 0]; - const block_q4_1 * restrict x1 = &x[i + 1]; - const block_q8_1 * restrict y0 = &y[i + 0]; - const block_q8_1 * restrict y1 = &y[i + 1]; - - summs += GGML_FP16_TO_FP32(x0->m) * y0->s + GGML_FP16_TO_FP32(x1->m) * y1->s; - - const uint8x16_t m4b = vdupq_n_u8(0x0F); - - const uint8x16_t v0_0 = vld1q_u8(x0->qs); - const uint8x16_t v0_1 = vld1q_u8(x1->qs); - - // 4-bit -> 8-bit - const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); - const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); - const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); - const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); - - // load y - const int8x16_t v1_0l = vld1q_s8(y0->qs); - const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); - const int8x16_t v1_1l = vld1q_s8(y1->qs); - const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); - -#if defined(__ARM_FEATURE_DOTPROD) - // dot product into int32x4_t - const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); - const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*y1->d); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0l), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0l), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0h), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0h), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1l), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1l), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1h), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1h), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); -#endif - } - - *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs; -#elif defined(__AVX2__) || defined(__AVX__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - - float summs = 0; - - // Main loop - for (int i = 0; i < nb; ++i) { - const float d0 = GGML_FP16_TO_FP32(x[i].d); - const float d1 = y[i].d; - - summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - - const __m256 d0v = _mm256_set1_ps( d0 ); - const __m256 d1v = _mm256_set1_ps( d1 ); - - // Compute combined scales - const __m256 d0d1 = _mm256_mul_ps( d0v, d1v ); - - // Load 16 bytes, and unpack 4 bit fields into bytes, making 32 bytes - const __m256i bx = bytes_from_nibbles_32(x[i].qs); - const __m256i by = _mm256_loadu_si256( (const __m256i *)y[i].qs ); - - const __m256 xy = mul_sum_us8_pairs_float(bx, by); - - // Accumulate d0*d1*x*y -#if defined(__AVX2__) - acc = _mm256_fmadd_ps( d0d1, xy, acc ); -#else - acc = _mm256_add_ps( _mm256_mul_ps( d0d1, xy ), acc ); -#endif - } - - *s = hsum_float_8(acc) + summs; -#elif defined(__riscv_v_intrinsic) - float sumf = 0.0; - - size_t vl = __riscv_vsetvl_e8m1(qk/2); - - for (int i = 0; i < nb; i++) { - // load elements - vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); - - vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); - vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); - - // mask and store lower part of x, and then upper part - vuint8mf2_t x_a = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); - vuint8mf2_t x_l = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); - - vint8mf2_t v0 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); - vint8mf2_t v1 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); - - vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); - vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); - - vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); - - vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); - vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); - - int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - - sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; - } - - *s = sumf; -#else - // scalar - float sumf = 0.0; - - for (int i = 0; i < nb; i++) { - int sumi = 0; - - for (int j = 0; j < qk/2; ++j) { - const int v0 = (x[i].qs[j] & 0x0F); - const int v1 = (x[i].qs[j] >> 4); - - sumi += (v0 * y[i].qs[j]) + (v1 * y[i].qs[j + qk/2]); - } - - sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; - } - - *s = sumf; -#endif -} - -static void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { - const int qk = QK8_0; - const int nb = n / qk; - - assert(n % qk == 0); - assert(qk == QK5_0); - - const block_q5_0 * restrict x = vx; - const block_q8_0 * restrict y = vy; - -#if defined(__ARM_NEON) - float32x4_t sumv0 = vdupq_n_f32(0.0f); - float32x4_t sumv1 = vdupq_n_f32(0.0f); - - uint32_t qh0; - uint32_t qh1; - - uint64_t tmp0[4]; - uint64_t tmp1[4]; - - GGML_ASSERT(nb % 2 == 0); // TODO: handle odd nb - for (int i = 0; i < nb; i += 2) { - const block_q5_0 * restrict x0 = &x[i]; - const block_q5_0 * restrict x1 = &x[i + 1]; - const block_q8_0 * restrict y0 = &y[i]; - const block_q8_0 * restrict y1 = &y[i + 1]; - - const uint8x16_t m4b = vdupq_n_u8(0x0F); - - // extract the 5th bit via lookup table ((!b) << 4) - memcpy(&qh0, x0->qh, sizeof(qh0)); - memcpy(&qh1, x1->qh, sizeof(qh1)); - - tmp0[0] = table_b2b_1[(qh0 >> 0) & 0xFF]; - tmp0[1] = table_b2b_1[(qh0 >> 8) & 0xFF]; - tmp0[2] = table_b2b_1[(qh0 >> 16) & 0xFF]; - tmp0[3] = table_b2b_1[(qh0 >> 24) ]; - - tmp1[0] = table_b2b_1[(qh1 >> 0) & 0xFF]; - tmp1[1] = table_b2b_1[(qh1 >> 8) & 0xFF]; - tmp1[2] = table_b2b_1[(qh1 >> 16) & 0xFF]; - tmp1[3] = table_b2b_1[(qh1 >> 24) ]; - - const int8x16_t qhl0 = vld1q_s8((const int8_t *)(tmp0 + 0)); - const int8x16_t qhh0 = vld1q_s8((const int8_t *)(tmp0 + 2)); - const int8x16_t qhl1 = vld1q_s8((const int8_t *)(tmp1 + 0)); - const int8x16_t qhh1 = vld1q_s8((const int8_t *)(tmp1 + 2)); - - const uint8x16_t v0_0 = vld1q_u8(x0->qs); - const uint8x16_t v0_1 = vld1q_u8(x1->qs); - - // 4-bit -> 8-bit - int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); - int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); - int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); - int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); - - // add high bit and sub 16 (equivalent to sub 0x10 when bit is zero) - const int8x16_t v0_0lf = vsubq_s8(v0_0l, qhl0); - const int8x16_t v0_0hf = vsubq_s8(v0_0h, qhh0); - const int8x16_t v0_1lf = vsubq_s8(v0_1l, qhl1); - const int8x16_t v0_1hf = vsubq_s8(v0_1h, qhh1); - - // load y - const int8x16_t v1_0l = vld1q_s8(y0->qs); - const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); - const int8x16_t v1_1l = vld1q_s8(y1->qs); - const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); - -#if defined(__ARM_FEATURE_DOTPROD) - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif - } - - *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); -#elif defined(__wasm_simd128__) - v128_t sumv = wasm_f32x4_splat(0.0f); - - uint32_t qh; - uint64_t tmp[4]; - - // TODO: check if unrolling this is better - for (int i = 0; i < nb; ++i) { - const block_q5_0 * restrict x0 = &x[i]; - const block_q8_0 * restrict y0 = &y[i]; - - const v128_t m4b = wasm_i8x16_splat(0x0F); - - // extract the 5th bit - memcpy(&qh, x0->qh, sizeof(qh)); - - tmp[0] = table_b2b_1[(qh >> 0) & 0xFF]; - tmp[1] = table_b2b_1[(qh >> 8) & 0xFF]; - tmp[2] = table_b2b_1[(qh >> 16) & 0xFF]; - tmp[3] = table_b2b_1[(qh >> 24) ]; - - const v128_t qhl = wasm_v128_load(tmp + 0); - const v128_t qhh = wasm_v128_load(tmp + 2); - - const v128_t v0 = wasm_v128_load(x0->qs); - - // 4-bit -> 8-bit - const v128_t v0l = wasm_v128_and (v0, m4b); - const v128_t v0h = wasm_u8x16_shr(v0, 4); - - // add high bit and sub 16 (equivalent to sub 0x10 when bit is zero) - const v128_t v0lf = wasm_i8x16_sub(v0l, qhl); - const v128_t v0hf = wasm_i8x16_sub(v0h, qhh); - - // load y - const v128_t v1l = wasm_v128_load(y0->qs); - const v128_t v1h = wasm_v128_load(y0->qs + 16); - - // int8x16 -> int16x8 - const v128_t v0lfl = wasm_i16x8_extend_low_i8x16 (v0lf); - const v128_t v0lfh = wasm_i16x8_extend_high_i8x16(v0lf); - const v128_t v0hfl = wasm_i16x8_extend_low_i8x16 (v0hf); - const v128_t v0hfh = wasm_i16x8_extend_high_i8x16(v0hf); - - const v128_t v1ll = wasm_i16x8_extend_low_i8x16 (v1l); - const v128_t v1lh = wasm_i16x8_extend_high_i8x16(v1l); - const v128_t v1hl = wasm_i16x8_extend_low_i8x16 (v1h); - const v128_t v1hh = wasm_i16x8_extend_high_i8x16(v1h); - - // dot product - sumv = wasm_f32x4_add(sumv, wasm_f32x4_mul(wasm_f32x4_convert_i32x4( - wasm_i32x4_add( - wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0lfl, v1ll), - wasm_i32x4_dot_i16x8(v0lfh, v1lh)), - wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), - wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), - wasm_f32x4_splat(GGML_FP16_TO_FP32(x0->d) * GGML_FP16_TO_FP32(y0->d)))); - } - - *s = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + - wasm_f32x4_extract_lane(sumv, 2) + wasm_f32x4_extract_lane(sumv, 3); -#elif defined(__AVX2__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - - // Main loop - for (int i = 0; i < nb; i++) { - /* Compute combined scale for the block */ - const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - - __m256i bx = bytes_from_nibbles_32(x[i].qs); - __m256i bxhi = bytes_from_bits_32(x[i].qh); - bxhi = _mm256_andnot_si256(bxhi, _mm256_set1_epi8((char)0xF0)); - bx = _mm256_or_si256(bx, bxhi); - - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); - - const __m256 q = mul_sum_i8_pairs_float(bx, by); - - /* Multiply q with scale and accumulate */ - acc = _mm256_fmadd_ps(d, q, acc); - } - - *s = hsum_float_8(acc); -#elif defined(__AVX__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - __m128i mask = _mm_set1_epi8((char)0xF0); - - // Main loop - for (int i = 0; i < nb; i++) { - /* Compute combined scale for the block */ - const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - - __m256i bx = bytes_from_nibbles_32(x[i].qs); - const __m256i bxhi = bytes_from_bits_32(x[i].qh); - __m128i bxhil = _mm256_castsi256_si128(bxhi); - __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); - bxhil = _mm_andnot_si128(bxhil, mask); - bxhih = _mm_andnot_si128(bxhih, mask); - __m128i bxl = _mm256_castsi256_si128(bx); - __m128i bxh = _mm256_extractf128_si256(bx, 1); - bxl = _mm_or_si128(bxl, bxhil); - bxh = _mm_or_si128(bxh, bxhih); - bx = MM256_SET_M128I(bxh, bxl); - - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); - - const __m256 q = mul_sum_i8_pairs_float(bx, by); - - /* Multiply q with scale and accumulate */ - acc = _mm256_add_ps(_mm256_mul_ps(d, q), acc); - } - - *s = hsum_float_8(acc); -#elif defined(__riscv_v_intrinsic) - float sumf = 0.0; - - uint32_t qh; - - size_t vl = __riscv_vsetvl_e8m1(qk/2); - - // These tempory registers are for masking and shift operations - vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); - vuint32m2_t vt_2 = __riscv_vsll_vv_u32m2(__riscv_vmv_v_x_u32m2(1, vl), vt_1, vl); - - vuint32m2_t vt_3 = __riscv_vsll_vx_u32m2(vt_2, 16, vl); - vuint32m2_t vt_4 = __riscv_vadd_vx_u32m2(vt_1, 12, vl); - - for (int i = 0; i < nb; i++) { - memcpy(&qh, x[i].qh, sizeof(uint32_t)); - - // ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; - vuint32m2_t xha_0 = __riscv_vand_vx_u32m2(vt_2, qh, vl); - vuint32m2_t xhr_0 = __riscv_vsrl_vv_u32m2(xha_0, vt_1, vl); - vuint32m2_t xhl_0 = __riscv_vsll_vx_u32m2(xhr_0, 4, vl); - - // ((qh & (1u << (j + 16))) >> (j + 12)); - vuint32m2_t xha_1 = __riscv_vand_vx_u32m2(vt_3, qh, vl); - vuint32m2_t xhl_1 = __riscv_vsrl_vv_u32m2(xha_1, vt_4, vl); - - // narrowing - vuint16m1_t xhc_0 = __riscv_vncvt_x_x_w_u16m1(xhl_0, vl); - vuint8mf2_t xh_0 = __riscv_vncvt_x_x_w_u8mf2(xhc_0, vl); - - vuint16m1_t xhc_1 = __riscv_vncvt_x_x_w_u16m1(xhl_1, vl); - vuint8mf2_t xh_1 = __riscv_vncvt_x_x_w_u8mf2(xhc_1, vl); - - // load - vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); - - vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); - vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); - - vuint8mf2_t x_at = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); - vuint8mf2_t x_lt = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); - - vuint8mf2_t x_a = __riscv_vor_vv_u8mf2(x_at, xh_0, vl); - vuint8mf2_t x_l = __riscv_vor_vv_u8mf2(x_lt, xh_1, vl); - - vint8mf2_t x_ai = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); - vint8mf2_t x_li = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); - - vint8mf2_t v0 = __riscv_vsub_vx_i8mf2(x_ai, 16, vl); - vint8mf2_t v1 = __riscv_vsub_vx_i8mf2(x_li, 16, vl); - - vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); - vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); - - vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); - - vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); - vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); - - int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - - sumf += (GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)) * sumi; - } - - *s = sumf; -#else - // scalar - float sumf = 0.0; - - for (int i = 0; i < nb; i++) { - uint32_t qh; - memcpy(&qh, x[i].qh, sizeof(qh)); - - int sumi = 0; - - for (int j = 0; j < qk/2; ++j) { - const uint8_t xh_0 = ((qh & (1u << (j + 0 ))) >> (j + 0 )) << 4; - const uint8_t xh_1 = ((qh & (1u << (j + 16))) >> (j + 12)); - - const int32_t x0 = ((x[i].qs[j] & 0x0F) | xh_0) - 16; - const int32_t x1 = ((x[i].qs[j] >> 4) | xh_1) - 16; - - sumi += (x0 * y[i].qs[j]) + (x1 * y[i].qs[j + qk/2]); - } - - sumf += (GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)) * sumi; - } - - *s = sumf; -#endif -} - -static void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { - const int qk = QK8_1; - const int nb = n / qk; - - assert(n % qk == 0); - assert(qk == QK5_1); - - const block_q5_1 * restrict x = vx; - const block_q8_1 * restrict y = vy; - -#if defined(__ARM_NEON) - float32x4_t sumv0 = vdupq_n_f32(0.0f); - float32x4_t sumv1 = vdupq_n_f32(0.0f); - - float summs0 = 0.0f; - float summs1 = 0.0f; - - uint32_t qh0; - uint32_t qh1; - - uint64_t tmp0[4]; - uint64_t tmp1[4]; - - GGML_ASSERT(nb % 2 == 0); // TODO: handle odd nb - for (int i = 0; i < nb; i += 2) { - const block_q5_1 * restrict x0 = &x[i]; - const block_q5_1 * restrict x1 = &x[i + 1]; - const block_q8_1 * restrict y0 = &y[i]; - const block_q8_1 * restrict y1 = &y[i + 1]; - - const uint8x16_t m4b = vdupq_n_u8(0x0F); - - summs0 += GGML_FP16_TO_FP32(x0->m) * y0->s; - summs1 += GGML_FP16_TO_FP32(x1->m) * y1->s; - - // extract the 5th bit via lookup table ((b) << 4) - memcpy(&qh0, x0->qh, sizeof(qh0)); - memcpy(&qh1, x1->qh, sizeof(qh1)); - - tmp0[0] = table_b2b_0[(qh0 >> 0) & 0xFF]; - tmp0[1] = table_b2b_0[(qh0 >> 8) & 0xFF]; - tmp0[2] = table_b2b_0[(qh0 >> 16) & 0xFF]; - tmp0[3] = table_b2b_0[(qh0 >> 24) ]; - - tmp1[0] = table_b2b_0[(qh1 >> 0) & 0xFF]; - tmp1[1] = table_b2b_0[(qh1 >> 8) & 0xFF]; - tmp1[2] = table_b2b_0[(qh1 >> 16) & 0xFF]; - tmp1[3] = table_b2b_0[(qh1 >> 24) ]; - - const int8x16_t qhl0 = vld1q_s8((const int8_t *)(tmp0 + 0)); - const int8x16_t qhh0 = vld1q_s8((const int8_t *)(tmp0 + 2)); - const int8x16_t qhl1 = vld1q_s8((const int8_t *)(tmp1 + 0)); - const int8x16_t qhh1 = vld1q_s8((const int8_t *)(tmp1 + 2)); - - const uint8x16_t v0_0 = vld1q_u8(x0->qs); - const uint8x16_t v0_1 = vld1q_u8(x1->qs); - - // 4-bit -> 8-bit - const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); - const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); - const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); - const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); - - // add high bit - const int8x16_t v0_0lf = vorrq_s8(v0_0l, qhl0); - const int8x16_t v0_0hf = vorrq_s8(v0_0h, qhh0); - const int8x16_t v0_1lf = vorrq_s8(v0_1l, qhl1); - const int8x16_t v0_1hf = vorrq_s8(v0_1h, qhh1); - - // load y - const int8x16_t v1_0l = vld1q_s8(y0->qs); - const int8x16_t v1_0h = vld1q_s8(y0->qs + 16); - const int8x16_t v1_1l = vld1q_s8(y1->qs); - const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); - -#if defined(__ARM_FEATURE_DOTPROD) - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); -#endif - } - - *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; -#elif defined(__wasm_simd128__) - v128_t sumv = wasm_f32x4_splat(0.0f); - - float summs = 0.0f; - - uint32_t qh; - uint64_t tmp[4]; - - // TODO: check if unrolling this is better - for (int i = 0; i < nb; ++i) { - const block_q5_1 * restrict x0 = &x[i]; - const block_q8_1 * restrict y0 = &y[i]; - - summs += GGML_FP16_TO_FP32(x0->m) * y0->s; - - const v128_t m4b = wasm_i8x16_splat(0x0F); - - // extract the 5th bit - memcpy(&qh, x0->qh, sizeof(qh)); - - tmp[0] = table_b2b_0[(qh >> 0) & 0xFF]; - tmp[1] = table_b2b_0[(qh >> 8) & 0xFF]; - tmp[2] = table_b2b_0[(qh >> 16) & 0xFF]; - tmp[3] = table_b2b_0[(qh >> 24) ]; - - const v128_t qhl = wasm_v128_load(tmp + 0); - const v128_t qhh = wasm_v128_load(tmp + 2); - - const v128_t v0 = wasm_v128_load(x0->qs); - - // 4-bit -> 8-bit - const v128_t v0l = wasm_v128_and (v0, m4b); - const v128_t v0h = wasm_u8x16_shr(v0, 4); - - // add high bit - const v128_t v0lf = wasm_v128_or(v0l, qhl); - const v128_t v0hf = wasm_v128_or(v0h, qhh); - - // load y - const v128_t v1l = wasm_v128_load(y0->qs); - const v128_t v1h = wasm_v128_load(y0->qs + 16); - - // int8x16 -> int16x8 - const v128_t v0lfl = wasm_i16x8_extend_low_i8x16 (v0lf); - const v128_t v0lfh = wasm_i16x8_extend_high_i8x16(v0lf); - const v128_t v0hfl = wasm_i16x8_extend_low_i8x16 (v0hf); - const v128_t v0hfh = wasm_i16x8_extend_high_i8x16(v0hf); - - const v128_t v1ll = wasm_i16x8_extend_low_i8x16 (v1l); - const v128_t v1lh = wasm_i16x8_extend_high_i8x16(v1l); - const v128_t v1hl = wasm_i16x8_extend_low_i8x16 (v1h); - const v128_t v1hh = wasm_i16x8_extend_high_i8x16(v1h); - - // dot product - sumv = wasm_f32x4_add(sumv, - wasm_f32x4_mul(wasm_f32x4_convert_i32x4(wasm_i32x4_add( - wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0lfl, v1ll), - wasm_i32x4_dot_i16x8(v0lfh, v1lh)), - wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), - wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), - wasm_f32x4_splat(GGML_FP16_TO_FP32(x0->d) * y0->d))); - } - - *s = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + - wasm_f32x4_extract_lane(sumv, 2) + wasm_f32x4_extract_lane(sumv, 3) + summs; -#elif defined(__AVX2__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - - float summs = 0.0f; - - // Main loop - for (int i = 0; i < nb; i++) { - const __m256 dx = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d)); - - summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - - __m256i bx = bytes_from_nibbles_32(x[i].qs); - __m256i bxhi = bytes_from_bits_32(x[i].qh); - bxhi = _mm256_and_si256(bxhi, _mm256_set1_epi8(0x10)); - bx = _mm256_or_si256(bx, bxhi); - - const __m256 dy = _mm256_set1_ps(y[i].d); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); - - const __m256 q = mul_sum_us8_pairs_float(bx, by); - - acc = _mm256_fmadd_ps(q, _mm256_mul_ps(dx, dy), acc); - } - - *s = hsum_float_8(acc) + summs; -#elif defined(__AVX__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - __m128i mask = _mm_set1_epi8(0x10); - - float summs = 0.0f; - - // Main loop - for (int i = 0; i < nb; i++) { - const __m256 dx = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d)); - - summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - - __m256i bx = bytes_from_nibbles_32(x[i].qs); - const __m256i bxhi = bytes_from_bits_32(x[i].qh); - __m128i bxhil = _mm256_castsi256_si128(bxhi); - __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); - bxhil = _mm_and_si128(bxhil, mask); - bxhih = _mm_and_si128(bxhih, mask); - __m128i bxl = _mm256_castsi256_si128(bx); - __m128i bxh = _mm256_extractf128_si256(bx, 1); - bxl = _mm_or_si128(bxl, bxhil); - bxh = _mm_or_si128(bxh, bxhih); - bx = MM256_SET_M128I(bxh, bxl); - - const __m256 dy = _mm256_set1_ps(y[i].d); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); - - const __m256 q = mul_sum_us8_pairs_float(bx, by); - - acc = _mm256_add_ps(_mm256_mul_ps(q, _mm256_mul_ps(dx, dy)), acc); - } - - *s = hsum_float_8(acc) + summs; -#elif defined(__riscv_v_intrinsic) - float sumf = 0.0; - - uint32_t qh; - - size_t vl = __riscv_vsetvl_e8m1(qk/2); - - // temporary registers for shift operations - vuint32m2_t vt_1 = __riscv_vid_v_u32m2(vl); - vuint32m2_t vt_2 = __riscv_vadd_vx_u32m2(vt_1, 12, vl); - - for (int i = 0; i < nb; i++) { - memcpy(&qh, x[i].qh, sizeof(uint32_t)); - - // load qh - vuint32m2_t vqh = __riscv_vmv_v_x_u32m2(qh, vl); - - // ((qh >> (j + 0)) << 4) & 0x10; - vuint32m2_t xhr_0 = __riscv_vsrl_vv_u32m2(vqh, vt_1, vl); - vuint32m2_t xhl_0 = __riscv_vsll_vx_u32m2(xhr_0, 4, vl); - vuint32m2_t xha_0 = __riscv_vand_vx_u32m2(xhl_0, 0x10, vl); - - // ((qh >> (j + 12)) ) & 0x10; - vuint32m2_t xhr_1 = __riscv_vsrl_vv_u32m2(vqh, vt_2, vl); - vuint32m2_t xha_1 = __riscv_vand_vx_u32m2(xhr_1, 0x10, vl); - - // narrowing - vuint16m1_t xhc_0 = __riscv_vncvt_x_x_w_u16m1(xha_0, vl); - vuint8mf2_t xh_0 = __riscv_vncvt_x_x_w_u8mf2(xhc_0, vl); - - vuint16m1_t xhc_1 = __riscv_vncvt_x_x_w_u16m1(xha_1, vl); - vuint8mf2_t xh_1 = __riscv_vncvt_x_x_w_u8mf2(xhc_1, vl); - - // load - vuint8mf2_t tx = __riscv_vle8_v_u8mf2(x[i].qs, vl); - - vint8mf2_t y0 = __riscv_vle8_v_i8mf2(y[i].qs, vl); - vint8mf2_t y1 = __riscv_vle8_v_i8mf2(y[i].qs+16, vl); - - vuint8mf2_t x_at = __riscv_vand_vx_u8mf2(tx, 0x0F, vl); - vuint8mf2_t x_lt = __riscv_vsrl_vx_u8mf2(tx, 0x04, vl); - - vuint8mf2_t x_a = __riscv_vor_vv_u8mf2(x_at, xh_0, vl); - vuint8mf2_t x_l = __riscv_vor_vv_u8mf2(x_lt, xh_1, vl); - - vint8mf2_t v0 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_a); - vint8mf2_t v1 = __riscv_vreinterpret_v_u8mf2_i8mf2(x_l); - - vint16m1_t vec_mul1 = __riscv_vwmul_vv_i16m1(v0, y0, vl); - vint16m1_t vec_mul2 = __riscv_vwmul_vv_i16m1(v1, y1, vl); - - vint32m1_t vec_zero = __riscv_vmv_v_x_i32m1(0, vl); - - vint32m1_t vs1 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul1, vec_zero, vl); - vint32m1_t vs2 = __riscv_vwredsum_vs_i16m1_i32m1(vec_mul2, vs1, vl); - - int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - - sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; - } - - *s = sumf; -#else - // scalar - float sumf = 0.0; - - for (int i = 0; i < nb; i++) { - uint32_t qh; - memcpy(&qh, x[i].qh, sizeof(qh)); - - int sumi = 0; - - for (int j = 0; j < qk/2; ++j) { - const uint8_t xh_0 = ((qh >> (j + 0)) << 4) & 0x10; - const uint8_t xh_1 = ((qh >> (j + 12)) ) & 0x10; - - const int32_t x0 = (x[i].qs[j] & 0xF) | xh_0; - const int32_t x1 = (x[i].qs[j] >> 4) | xh_1; - - sumi += (x0 * y[i].qs[j]) + (x1 * y[i].qs[j + qk/2]); - } - - sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; - } - - *s = sumf; -#endif -} - -static void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { - const int qk = QK8_0; - const int nb = n / qk; - - assert(n % qk == 0); - - const block_q8_0 * restrict x = vx; - const block_q8_0 * restrict y = vy; - -#if defined(__ARM_NEON) - float32x4_t sumv0 = vdupq_n_f32(0.0f); - float32x4_t sumv1 = vdupq_n_f32(0.0f); - - GGML_ASSERT(nb % 2 == 0); // TODO: handle odd nb - for (int i = 0; i < nb; i += 2) { - const block_q8_0 * restrict x0 = &x[i + 0]; - const block_q8_0 * restrict x1 = &x[i + 1]; - const block_q8_0 * restrict y0 = &y[i + 0]; - const block_q8_0 * restrict y1 = &y[i + 1]; - - const int8x16_t x0_0 = vld1q_s8(x0->qs); - const int8x16_t x0_1 = vld1q_s8(x0->qs + 16); - const int8x16_t x1_0 = vld1q_s8(x1->qs); - const int8x16_t x1_1 = vld1q_s8(x1->qs + 16); - - // load y - const int8x16_t y0_0 = vld1q_s8(y0->qs); - const int8x16_t y0_1 = vld1q_s8(y0->qs + 16); - const int8x16_t y1_0 = vld1q_s8(y1->qs); - const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); - -#if defined(__ARM_FEATURE_DOTPROD) - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), - vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), - vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); - -#else - const int16x8_t p0_0 = vmull_s8(vget_low_s8 (x0_0), vget_low_s8 (y0_0)); - const int16x8_t p0_1 = vmull_s8(vget_high_s8(x0_0), vget_high_s8(y0_0)); - const int16x8_t p0_2 = vmull_s8(vget_low_s8 (x0_1), vget_low_s8 (y0_1)); - const int16x8_t p0_3 = vmull_s8(vget_high_s8(x0_1), vget_high_s8(y0_1)); - - const int16x8_t p1_0 = vmull_s8(vget_low_s8 (x1_0), vget_low_s8 (y1_0)); - const int16x8_t p1_1 = vmull_s8(vget_high_s8(x1_0), vget_high_s8(y1_0)); - const int16x8_t p1_2 = vmull_s8(vget_low_s8 (x1_1), vget_low_s8 (y1_1)); - const int16x8_t p1_3 = vmull_s8(vget_high_s8(x1_1), vget_high_s8(y1_1)); - - const int32x4_t p0 = vaddq_s32(vpaddlq_s16(p0_0), vpaddlq_s16(p0_1)); - const int32x4_t p1 = vaddq_s32(vpaddlq_s16(p0_2), vpaddlq_s16(p0_3)); - const int32x4_t p2 = vaddq_s32(vpaddlq_s16(p1_0), vpaddlq_s16(p1_1)); - const int32x4_t p3 = vaddq_s32(vpaddlq_s16(p1_2), vpaddlq_s16(p1_3)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(p0, p1)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(p2, p3)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif - } - - *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); -#elif defined(__AVX2__) || defined(__AVX__) - // Initialize accumulator with zeros - __m256 acc = _mm256_setzero_ps(); - - // Main loop - for (int i = 0; i < nb; ++i) { - // Compute combined scale for the block - const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = _mm256_loadu_si256((const __m256i *)x[i].qs); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); - - const __m256 q = mul_sum_i8_pairs_float(bx, by); - - // Multiply q with scale and accumulate -#if defined(__AVX2__) - acc = _mm256_fmadd_ps( d, q, acc ); -#else - acc = _mm256_add_ps( _mm256_mul_ps( d, q ), acc ); -#endif - } - - *s = hsum_float_8(acc); -#elif defined(__riscv_v_intrinsic) - float sumf = 0.0; - size_t vl = __riscv_vsetvl_e8m1(qk); - - for (int i = 0; i < nb; i++) { - // load elements - vint8m1_t bx = __riscv_vle8_v_i8m1(x[i].qs, vl); - vint8m1_t by = __riscv_vle8_v_i8m1(y[i].qs, vl); - - vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx, by, vl); - - vint32m1_t v_zero = __riscv_vmv_v_x_i32m1(0, vl); - vint32m1_t v_sum = __riscv_vwredsum_vs_i16m2_i32m1(vw_mul, v_zero, vl); - - int sumi = __riscv_vmv_x_s_i32m1_i32(v_sum); - - sumf += sumi*(GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)); - } - - *s = sumf; -#else - // scalar - float sumf = 0.0; - - for (int i = 0; i < nb; i++) { - int sumi = 0; - - for (int j = 0; j < qk; j++) { - sumi += x[i].qs[j]*y[i].qs[j]; - } - - sumf += sumi*(GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)); - } - - *s = sumf; -#endif -} - // compute GGML_VEC_DOT_UNROLL dot products at once // xs - x row stride in bytes inline static void ggml_vec_dot_f16_unroll(const int n, const int xs, float * restrict s, void * restrict xv, ggml_fp16_t * restrict y) { @@ -21001,7 +18706,6 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i block_q8_0 * block = (block_q8_0*)dst + start / QK8_0; result = ggml_quantize_q8_0(src + start, block, n, n, hist); } break; -#ifdef GGML_USE_K_QUANTS case GGML_TYPE_Q2_K: { GGML_ASSERT(start % QK_K == 0); @@ -21032,7 +18736,6 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i block_q6_K * block = (block_q6_K*)dst + start / QK_K; result = ggml_quantize_q6_K(src + start, block, n, n, hist); } break; -#endif case GGML_TYPE_F16: { int elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 08bff5511..8c954904e 100644 --- a/ggml.h +++ b/ggml.h @@ -1930,12 +1930,19 @@ extern "C" { // quantization // + // TODO: these would probably get removed in favor of the more general ggml_quantize_chunk GGML_API size_t ggml_quantize_q4_0(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q4_1(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q5_0(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q5_1(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q8_0(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_q2_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_q3_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_q4_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); // diff --git a/llama.cpp b/llama.cpp index 3d431ee7b..1d1db8fc9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -19,13 +19,11 @@ #ifdef GGML_USE_MPI # include "ggml-mpi.h" #endif -#ifdef GGML_USE_K_QUANTS -# ifndef QK_K -# ifdef GGML_QKK_64 -# define QK_K 64 -# else -# define QK_K 256 -# endif +#ifndef QK_K +# ifdef GGML_QKK_64 +# define QK_K 64 +# else +# define QK_K 256 # endif #endif @@ -8052,7 +8050,7 @@ struct no_init { struct quantize_state_internal { const llama_model & model; const llama_model_quantize_params * params; -#ifdef GGML_USE_K_QUANTS + int n_attention_wv = 0; int n_feed_forward_w2 = 0; int i_attention_wv = 0; @@ -8060,7 +8058,7 @@ struct quantize_state_internal { int n_k_quantized = 0; int n_fallback = 0; -#endif + quantize_state_internal(const llama_model & model, const llama_model_quantize_params * params) : model(model) , params(params) @@ -8125,7 +8123,6 @@ static void llama_convert_tensor_internal( workers.clear(); } -#ifdef GGML_USE_K_QUANTS static ggml_type get_k_quant_type( quantize_state_internal & qs, ggml_type new_type, const ggml_tensor * tensor, llama_ftype ftype @@ -8237,7 +8234,6 @@ static ggml_type get_k_quant_type( return new_type; } -#endif static void llama_model_quantize_internal(const std::string & fname_inp, const std::string & fname_out, const llama_model_quantize_params * params) { ggml_type quantized_type; @@ -8252,7 +8248,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_F16: quantized_type = GGML_TYPE_F16; break; case LLAMA_FTYPE_ALL_F32: quantized_type = GGML_TYPE_F32; break; -#ifdef GGML_USE_K_QUANTS // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; case LLAMA_FTYPE_MOSTLY_Q3_K_S: @@ -8263,7 +8258,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_Q5_K_S: case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; -#endif + default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } @@ -8304,7 +8299,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s gguf_set_val_u32(ctx_out, "general.quantization_version", GGML_QNT_VERSION); gguf_set_val_u32(ctx_out, "general.file_type", ftype); -#ifdef GGML_USE_K_QUANTS for (int i = 0; i < ml.n_tensors; ++i) { struct ggml_tensor * meta = ml.get_tensor_meta(i); @@ -8322,7 +8316,6 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s LLAMA_LOG_WARN("%s ============ Strange model: n_attention_wv = %d, n_feed_forward_w2 = %d, hparams.n_layer = %d\n", __func__, qs.n_attention_wv, qs.n_feed_forward_w2, model.hparams.n_layer); } -#endif size_t total_size_org = 0; size_t total_size_new = 0; @@ -8387,9 +8380,10 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (quantize) { new_type = quantized_type; -#ifdef GGML_USE_K_QUANTS - new_type = get_k_quant_type(qs, new_type, tensor, ftype); -#endif + if (!params->pure) { + new_type = get_k_quant_type(qs, new_type, tensor, ftype); + } + // If we've decided to quantize to the same type the tensor is already // in then there's nothing to do. quantize = tensor->type != new_type; @@ -8514,12 +8508,11 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s LLAMA_LOG_INFO("\n"); } } -#ifdef GGML_USE_K_QUANTS + if (qs.n_fallback > 0) { LLAMA_LOG_WARN("%s: WARNING: %d of %d tensor(s) incompatible with k-quants and required fallback quantization\n", __func__, qs.n_fallback, qs.n_k_quantized + qs.n_fallback); } -#endif } static int llama_apply_lora_from_file_internal( @@ -8844,6 +8837,7 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { /*.allow_requantize =*/ false, /*.quantize_output_tensor =*/ true, /*.only_copy =*/ false, + /*.pure =*/ false, }; return result; diff --git a/llama.h b/llama.h index d901dcd91..6927bd601 100644 --- a/llama.h +++ b/llama.h @@ -191,6 +191,7 @@ extern "C" { bool allow_requantize; // allow quantizing non-f32/f16 tensors bool quantize_output_tensor; // quantize output.weight bool only_copy; // only copy tensors - ftype, allow_requantize and quantize_output_tensor are ignored + bool pure; // disable k-quant mixtures and quantize all tensors to the same type } llama_model_quantize_params; // grammar types From 71a09da301705b9c5ad4ca3cf3fbd966dd3f1ec5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 29 Oct 2023 18:32:51 +0200 Subject: [PATCH 043/859] llama : fix kv shift bug (#3835) ggml-ci --- llama.cpp | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/llama.cpp b/llama.cpp index 1d1db8fc9..d8510a5cf 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1552,14 +1552,14 @@ static void llama_kv_cache_seq_shift( for (uint32_t i = 0; i < cache.size; ++i) { if (cache.cells[i].has_seq_id(seq_id) && cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { - cache.cells[i].pos += delta; + cache.has_shift = true; + cache.cells[i].pos += delta; + cache.cells[i].delta += delta; + if (cache.cells[i].pos < 0) { cache.cells[i].pos = -1; cache.cells[i].seq_id.clear(); if (new_head == cache.size) new_head = i; - } else { - cache.has_shift = true; - cache.cells[i].delta = delta; } } } @@ -6073,11 +6073,20 @@ static int llama_decode_internal( #endif // update the kv ring buffer - lctx.kv_self.has_shift = false; - lctx.kv_self.head += n_tokens; - // Ensure kv cache head points to a valid index. - if (lctx.kv_self.head >= lctx.kv_self.size) { - lctx.kv_self.head = 0; + { + if (kv_self.has_shift) { + kv_self.has_shift = false; + for (uint32_t i = 0; i < kv_self.size; ++i) { + kv_self.cells[i].delta = 0; + } + } + + kv_self.head += n_tokens; + + // Ensure kv cache head points to a valid index. + if (kv_self.head >= kv_self.size) { + kv_self.head = 0; + } } #ifdef GGML_PERF From 2046eb4345e62c4575b3cdc0115a51db89f3fb70 Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Sun, 29 Oct 2023 12:33:47 -0400 Subject: [PATCH 044/859] make : remove unnecessary dependency on build-info.h (#3842) --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 2cecc2216..c53c1e726 100644 --- a/Makefile +++ b/Makefile @@ -541,10 +541,10 @@ OBJS += ggml-alloc.o ggml-backend.o ggml-quants.o llama.o: llama.cpp ggml.h ggml-alloc.h ggml-backend.h ggml-cuda.h ggml-metal.h llama.h $(CXX) $(CXXFLAGS) -c $< -o $@ -COMMON_H_DEPS = common/common.h common/sampling.h build-info.h common/log.h -COMMON_DEPS = $(COMMON_H_DEPS) common.o sampling.o grammar-parser.o +COMMON_H_DEPS = common/common.h common/sampling.h common/log.h +COMMON_DEPS = common.o sampling.o grammar-parser.o -common.o: common/common.cpp $(COMMON_H_DEPS) +common.o: common/common.cpp build-info.h $(COMMON_H_DEPS) $(CXX) $(CXXFLAGS) -c $< -o $@ sampling.o: common/sampling.cpp $(COMMON_H_DEPS) From 6e08281e588bbba1a5d180290a94a43f167f3a1a Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Sun, 29 Oct 2023 11:31:40 -0600 Subject: [PATCH 045/859] Extend llama_kv_cache_seq_rm to allow matching any sequence (#3843) * Extend llama_kv_cache_seq_rm to allow matichng any sequence * Replace llama_kv_cache_tokens_rm with llama_kv_cache_clear Use llama_kv_cache_clear for cache clearing Change calls to llama_kv_cache_tokens_rm that want to delete by position to use llama_kv_cache_seq_rm functionality --- common/common.cpp | 2 +- examples/batched-bench/batched-bench.cpp | 2 +- examples/llama-bench/llama-bench.cpp | 4 ++-- examples/main/main.cpp | 2 +- examples/perplexity/perplexity.cpp | 6 ++--- examples/server/server.cpp | 2 +- llama.cpp | 29 ++++++++++++------------ llama.h | 15 +++++------- 8 files changed, 30 insertions(+), 32 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index f81f4d354..c187128d6 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -889,7 +889,7 @@ std::tuple llama_init_from_gpt_par std::vector tmp = { llama_token_bos(model), llama_token_eos(model), }; llama_decode(lctx, llama_batch_get_one(tmp.data(), std::min(tmp.size(), (size_t) params.n_batch), 0, 0)); - llama_kv_cache_tokens_rm(lctx, -1, -1); + llama_kv_cache_clear(lctx); llama_reset_timings(lctx); } diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 43f9c971d..533c55c17 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -185,7 +185,7 @@ int main(int argc, char ** argv) { const auto t_pp_start = ggml_time_us(); - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); if (!decode_helper(ctx, batch, ctx_params.n_batch)) { LOG_TEE("%s: llama_decode() failed\n", __func__); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 20767d555..780398184 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -1037,7 +1037,7 @@ int main(int argc, char ** argv) { test t(inst, lmodel, ctx); - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); // warmup run if (t.n_prompt > 0) { @@ -1048,7 +1048,7 @@ int main(int argc, char ** argv) { } for (int i = 0; i < params.reps; i++) { - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); uint64_t t_start = get_time_ns(); if (t.n_prompt > 0) { diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 3d9f670b9..8a43b6ab8 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -298,7 +298,7 @@ int main(int argc, char ** argv) { } // remove any "future" tokens that we might have inherited from the previous session - llama_kv_cache_tokens_rm(ctx, n_matching_session_tokens, -1); + llama_kv_cache_seq_rm(ctx, -1, n_matching_session_tokens, -1); } LOGLN( diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 3c2542e8c..bd2c73d87 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -210,7 +210,7 @@ static results_perplexity perplexity_v2(llama_context * ctx, const gpt_params & const auto t_start = std::chrono::high_resolution_clock::now(); // clear the KV cache - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); for (int j = 0; j < num_batches; ++j) { const int batch_start = start + j * n_batch; @@ -339,7 +339,7 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par const auto t_start = std::chrono::high_resolution_clock::now(); // clear the KV cache - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); for (int j = 0; j < num_batches; ++j) { const int batch_start = start + j * n_batch; @@ -573,7 +573,7 @@ static void hellaswag_score(llama_context * ctx, const gpt_params & params) { } // clear the KV cache - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); auto logits = hellaswag_evaluate_tokens(ctx, query_embd, 0, params.n_batch, n_vocab); if (logits.empty()) { diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 5b7e4139d..c163c7f8e 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -857,7 +857,7 @@ struct llama_server_context void kv_cache_clear() { // clear the entire KV cache - llama_kv_cache_tokens_rm(ctx, -1, -1); + llama_kv_cache_clear(ctx); clean_kv_cache = false; } diff --git a/llama.cpp b/llama.cpp index d8510a5cf..a4340d527 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1466,17 +1466,12 @@ static int32_t llama_kv_cache_cell_max(const struct llama_kv_cache & cache) { return 0; } -static void llama_kv_cache_tokens_rm(struct llama_kv_cache & cache, int32_t c0, int32_t c1) { - if (c0 < 0) c0 = 0; - if (c1 < 0) c1 = cache.size; - - for (int32_t i = c0; i < c1; ++i) { +static void llama_kv_cache_clear(struct llama_kv_cache & cache) { + for (int32_t i = 0; i < cache.size; ++i) { cache.cells[i].pos = -1; cache.cells[i].seq_id.clear(); } - - // Searching for a free slot can start here since we know it will be empty. - cache.head = uint32_t(c0); + cache.head = 0; } static void llama_kv_cache_seq_rm( @@ -1490,8 +1485,14 @@ static void llama_kv_cache_seq_rm( if (p1 < 0) p1 = std::numeric_limits::max(); for (uint32_t i = 0; i < cache.size; ++i) { - if (cache.cells[i].has_seq_id(seq_id) && cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { - cache.cells[i].seq_id.erase(seq_id); + if (cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { + if (seq_id < 0) { + cache.cells[i].seq_id.clear(); + } else if (cache.cells[i].has_seq_id(seq_id)) { + cache.cells[i].seq_id.erase(seq_id); + } else { + continue; + } if (cache.cells[i].seq_id.empty()) { cache.cells[i].pos = -1; if (new_head == cache.size) new_head = i; @@ -9207,8 +9208,8 @@ int llama_get_kv_cache_token_count(const struct llama_context * ctx) { return ctx->kv_self.head; } -void llama_kv_cache_tokens_rm(struct llama_context * ctx, int32_t c0, int32_t c1) { - llama_kv_cache_tokens_rm(ctx->kv_self, c0, c1); +void llama_kv_cache_clear(struct llama_context * ctx) { + llama_kv_cache_clear(ctx->kv_self); } void llama_kv_cache_seq_rm(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1) { @@ -9654,7 +9655,7 @@ int llama_eval( llama_token * tokens, int32_t n_tokens, int n_past) { - llama_kv_cache_tokens_rm(ctx->kv_self, n_past, -1); + llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); const int ret = llama_decode_internal(*ctx, llama_batch_get_one(tokens, n_tokens, n_past, 0)); if (ret < 0) { @@ -9669,7 +9670,7 @@ int llama_eval_embd( float * embd, int32_t n_tokens, int n_past) { - llama_kv_cache_tokens_rm(ctx->kv_self, n_past, -1); + llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); llama_batch batch = { n_tokens, nullptr, embd, nullptr, nullptr, nullptr, nullptr, n_past, 1, 0, }; diff --git a/llama.h b/llama.h index 6927bd601..d727dbd9f 100644 --- a/llama.h +++ b/llama.h @@ -334,17 +334,14 @@ extern "C" { LLAMA_API DEPRECATED(int llama_get_kv_cache_token_count(const struct llama_context * ctx), "avoid using this, it will be removed in the future, instead - count the tokens in user code"); - // Remove all tokens data of cells in [c0, c1) - // c0 < 0 : [0, c1] - // c1 < 0 : [c0, inf) - LLAMA_API void llama_kv_cache_tokens_rm( - struct llama_context * ctx, - int32_t c0, - int32_t c1); + // Clear the KV cache + LLAMA_API void llama_kv_cache_clear( + struct llama_context * ctx); // Removes all tokens that belong to the specified sequence and have positions in [p0, p1) - // p0 < 0 : [0, p1] - // p1 < 0 : [p0, inf) + // seq_id < 0 : match any sequence + // p0 < 0 : [0, p1] + // p1 < 0 : [p0, inf) LLAMA_API void llama_kv_cache_seq_rm( struct llama_context * ctx, llama_seq_id seq_id, From 207b51900e15cc7f89763a3bb1c565fe11cbb45d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 30 Oct 2023 19:19:15 +0200 Subject: [PATCH 046/859] ggml : move FP16 <-> FP32 code to ggml-impl.h (#3861) * ggml : move FP16 <-> FP32 stuff to ggml-impl.h ggml-ci * tests : fix ARM build * ggml : explicitly initialize deprecated type traits * ggml : add math.h to ggml-impl.h * ggml : remove duplicate static assert macros * ggml : prefix lookup tables with ggml_ ggml-ci * ggml-impl : move extern "C" to start of file --- ggml-impl.h | 237 ++++++++++++++++++++++++ ggml-quants.c | 350 ++++++++++++++++++------------------ ggml-quants.h | 14 +- ggml.c | 282 +++++------------------------ llama.cpp | 2 +- tests/test-double-float.cpp | 2 +- tests/test-quantize-fns.cpp | 7 + 7 files changed, 470 insertions(+), 424 deletions(-) create mode 100644 ggml-impl.h diff --git a/ggml-impl.h b/ggml-impl.h new file mode 100644 index 000000000..5ec18a50c --- /dev/null +++ b/ggml-impl.h @@ -0,0 +1,237 @@ +#pragma once + +#include "ggml.h" + +// GGML internal header + +#include +#include +#include +#include // memcpy +#include // fabsf + +#ifdef __cplusplus +extern "C" { +#endif + +// static_assert should be a #define, but if it's not, +// fall back to the _Static_assert C11 keyword. +// if C99 - static_assert is noop +// ref: https://stackoverflow.com/a/53923785/4039976 +#ifndef static_assert +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) +#define static_assert(cond, msg) _Static_assert(cond, msg) +#else +#define static_assert(cond, msg) struct global_scope_noop_trick +#endif +#endif + +// __FMA__ and __F16C__ are not defined in MSVC, however they are implied with AVX2/AVX512 +#if defined(_MSC_VER) && (defined(__AVX2__) || defined(__AVX512F__)) +#ifndef __FMA__ +#define __FMA__ +#endif +#ifndef __F16C__ +#define __F16C__ +#endif +#ifndef __SSE3__ +#define __SSE3__ +#endif +#endif + +#undef MIN +#undef MAX + +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + +// 16-bit float +// on Arm, we use __fp16 +// on x86, we use uint16_t +#if defined(__ARM_NEON) && !defined(_MSC_VER) + +// if YCM cannot find , make a symbolic link to it, for example: +// +// $ ln -sfn /Library/Developer/CommandLineTools/usr/lib/clang/13.1.6/include/arm_neon.h ./src/ +// +#include + +#define GGML_COMPUTE_FP16_TO_FP32(x) ((float) (x)) +#define GGML_COMPUTE_FP32_TO_FP16(x) (x) + +#define GGML_FP16_TO_FP32(x) ((float) (x)) +#define GGML_FP32_TO_FP16(x) (x) + +#else + +#ifdef __wasm_simd128__ +#include +#else +#ifdef __POWER9_VECTOR__ +#include +#undef bool +#define bool _Bool +#else +#if defined(_MSC_VER) || defined(__MINGW32__) +#include +#else +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) || defined(__SSE3__) +#if !defined(__riscv) +#include +#endif +#endif +#endif +#endif +#endif + +#ifdef __riscv_v_intrinsic +#include +#endif + +#ifdef __F16C__ + +#ifdef _MSC_VER +#define GGML_COMPUTE_FP16_TO_FP32(x) _mm_cvtss_f32(_mm_cvtph_ps(_mm_cvtsi32_si128(x))) +#define GGML_COMPUTE_FP32_TO_FP16(x) _mm_extract_epi16(_mm_cvtps_ph(_mm_set_ss(x), 0), 0) +#else +#define GGML_COMPUTE_FP16_TO_FP32(x) _cvtsh_ss(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) _cvtss_sh(x, 0) +#endif + +#elif defined(__POWER9_VECTOR__) + +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) +/* the inline asm below is about 12% faster than the lookup method */ +#define GGML_FP16_TO_FP32(x) GGML_COMPUTE_FP16_TO_FP32(x) +#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + register float f; + register double d; + __asm__( + "mtfprd %0,%2\n" + "xscvhpdp %0,%0\n" + "frsp %1,%0\n" : + /* temp */ "=d"(d), + /* out */ "=f"(f): + /* in */ "r"(h)); + return f; +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { + register double d; + register ggml_fp16_t r; + __asm__( /* xscvdphp can work on double or single precision */ + "xscvdphp %0,%2\n" + "mffprd %1,%0\n" : + /* temp */ "=d"(d), + /* out */ "=r"(r): + /* in */ "f"(f)); + return r; +} + +#else + +// FP16 <-> FP32 +// ref: https://github.com/Maratyszcza/FP16 + +static inline float fp32_from_bits(uint32_t w) { + union { + uint32_t as_bits; + float as_value; + } fp32; + fp32.as_bits = w; + return fp32.as_value; +} + +static inline uint32_t fp32_to_bits(float f) { + union { + float as_value; + uint32_t as_bits; + } fp32; + fp32.as_value = f; + return fp32.as_bits; +} + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + const uint32_t w = (uint32_t) h << 16; + const uint32_t sign = w & UINT32_C(0x80000000); + const uint32_t two_w = w + w; + + const uint32_t exp_offset = UINT32_C(0xE0) << 23; +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) || defined(__GNUC__) && !defined(__STRICT_ANSI__) + const float exp_scale = 0x1.0p-112f; +#else + const float exp_scale = fp32_from_bits(UINT32_C(0x7800000)); +#endif + const float normalized_value = fp32_from_bits((two_w >> 4) + exp_offset) * exp_scale; + + const uint32_t magic_mask = UINT32_C(126) << 23; + const float magic_bias = 0.5f; + const float denormalized_value = fp32_from_bits((two_w >> 17) | magic_mask) - magic_bias; + + const uint32_t denormalized_cutoff = UINT32_C(1) << 27; + const uint32_t result = sign | + (two_w < denormalized_cutoff ? fp32_to_bits(denormalized_value) : fp32_to_bits(normalized_value)); + return fp32_from_bits(result); +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { +#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) || defined(__GNUC__) && !defined(__STRICT_ANSI__) + const float scale_to_inf = 0x1.0p+112f; + const float scale_to_zero = 0x1.0p-110f; +#else + const float scale_to_inf = fp32_from_bits(UINT32_C(0x77800000)); + const float scale_to_zero = fp32_from_bits(UINT32_C(0x08800000)); +#endif + float base = (fabsf(f) * scale_to_inf) * scale_to_zero; + + const uint32_t w = fp32_to_bits(f); + const uint32_t shl1_w = w + w; + const uint32_t sign = w & UINT32_C(0x80000000); + uint32_t bias = shl1_w & UINT32_C(0xFF000000); + if (bias < UINT32_C(0x71000000)) { + bias = UINT32_C(0x71000000); + } + + base = fp32_from_bits((bias >> 1) + UINT32_C(0x07800000)) + base; + const uint32_t bits = fp32_to_bits(base); + const uint32_t exp_bits = (bits >> 13) & UINT32_C(0x00007C00); + const uint32_t mantissa_bits = bits & UINT32_C(0x00000FFF); + const uint32_t nonsign = exp_bits + mantissa_bits; + return (sign >> 16) | (shl1_w > UINT32_C(0xFF000000) ? UINT16_C(0x7E00) : nonsign); +} + +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) + +#endif // __F16C__ + +#endif // __ARM_NEON + +// precomputed f32 table for f16 (256 KB) +// defined in ggml.c, initialized in ggml_init() +extern float ggml_table_f32_f16[1 << 16]; + +// On ARM NEON, it's quicker to directly convert x -> x instead of calling into ggml_lookup_fp16_to_fp32, +// so we define GGML_FP16_TO_FP32 and GGML_FP32_TO_FP16 elsewhere for NEON. +// This is also true for POWER9. +#if !defined(GGML_FP16_TO_FP32) || !defined(GGML_FP32_TO_FP16) + +inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { + uint16_t s; + memcpy(&s, &f, sizeof(uint16_t)); + return ggml_table_f32_f16[s]; +} + +#define GGML_FP16_TO_FP32(x) ggml_lookup_fp16_to_fp32(x) +#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) + +#endif + + // TODO: backend v2 PR + +#ifdef __cplusplus +} +#endif diff --git a/ggml-quants.c b/ggml-quants.c index fd4ee1be6..721594467 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -1,5 +1,5 @@ #include "ggml-quants.h" -#include "ggml.h" +#include "ggml-impl.h" #include #include @@ -352,7 +352,7 @@ void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict const float d = max / -8; const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); for (int j = 0; j < qk/2; ++j) { const float x0 = x[i*qk + 0 + j]*id; @@ -392,8 +392,8 @@ void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict const float d = (max - min) / ((1 << 4) - 1); const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); - y[i].m = ggml_fp32_to_fp16(min); + y[i].d = GGML_FP32_TO_FP16(d); + y[i].m = GGML_FP32_TO_FP16(min); for (int j = 0; j < qk/2; ++j) { const float x0 = (x[i*qk + 0 + j] - min)*id; @@ -434,7 +434,7 @@ void quantize_row_q5_0_reference(const float * restrict x, block_q5_0 * restrict const float d = max / -16; const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); uint32_t qh = 0; @@ -481,8 +481,8 @@ void quantize_row_q5_1_reference(const float * restrict x, block_q5_1 * restrict const float d = (max - min) / ((1 << 5) - 1); const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); - y[i].m = ggml_fp32_to_fp16(min); + y[i].d = GGML_FP32_TO_FP16(d); + y[i].m = GGML_FP32_TO_FP16(min); uint32_t qh = 0; @@ -524,7 +524,7 @@ void quantize_row_q8_0_reference(const float * restrict x, block_q8_0 * restrict const float d = amax / ((1 << 7) - 1); const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); for (int j = 0; j < QK8_0; ++j) { const float x0 = x[i*QK8_0 + j]*id; @@ -559,7 +559,7 @@ void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { const float d = amax / ((1 << 7) - 1); const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); for (int j = 0; j < 8; j++) { const float32x4_t v = vmulq_n_f32(srcv[j], id); @@ -592,7 +592,7 @@ void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { const float d = amax / ((1 << 7) - 1); const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); for (int j = 0; j < 8; j++) { const v128_t v = wasm_f32x4_mul(srcv[j], wasm_f32x4_splat(id)); @@ -627,7 +627,7 @@ void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { // Quantize these floats const float d = maxScalar / 127.f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); const float id = ( maxScalar != 0.0f ) ? 127.f / maxScalar : 0.0f; const __m256 mul = _mm256_set1_ps( id ); @@ -704,7 +704,7 @@ void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { const float d = amax / ((1 << 7) - 1); const float id = d ? 1.0f/d : 0.0f; - y[i].d = ggml_fp32_to_fp16(d); + y[i].d = GGML_FP32_TO_FP16(d); vfloat32m4_t x0 = __riscv_vfmul_vf_f32m4(v_x, id, vl); @@ -982,7 +982,7 @@ void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int const int nb = k / qk; for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); + const float d = GGML_FP16_TO_FP32(x[i].d); for (int j = 0; j < qk/2; ++j) { const int x0 = (x[i].qs[j] & 0x0F) - 8; @@ -1002,8 +1002,8 @@ void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int const int nb = k / qk; for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); - const float m = ggml_fp16_to_fp32(x[i].m); + const float d = GGML_FP16_TO_FP32(x[i].d); + const float m = GGML_FP16_TO_FP32(x[i].m); for (int j = 0; j < qk/2; ++j) { const int x0 = (x[i].qs[j] & 0x0F); @@ -1023,7 +1023,7 @@ void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int const int nb = k / qk; for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); + const float d = GGML_FP16_TO_FP32(x[i].d); uint32_t qh; memcpy(&qh, x[i].qh, sizeof(qh)); @@ -1049,8 +1049,8 @@ void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int const int nb = k / qk; for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); - const float m = ggml_fp16_to_fp32(x[i].m); + const float d = GGML_FP16_TO_FP32(x[i].d); + const float m = GGML_FP16_TO_FP32(x[i].m); uint32_t qh; memcpy(&qh, x[i].qh, sizeof(qh)); @@ -1076,7 +1076,7 @@ void dequantize_row_q8_0(const block_q8_0 * restrict x, float * restrict y, int const int nb = k / qk; for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); + const float d = GGML_FP16_TO_FP32(x[i].d); for (int j = 0; j < qk; ++j) { y[i*qk + j] = x[i].qs[j]*d; @@ -1387,10 +1387,10 @@ void quantize_row_q2_K_reference(const float * restrict x, block_q2_K * restrict int l = nearest_int(iscale*scales[j]); y[i].scales[j] = l; } - y[i].d = ggml_fp32_to_fp16(max_scale/q4scale); + y[i].d = GGML_FP32_TO_FP16(max_scale/q4scale); } else { for (int j = 0; j < QK_K/16; ++j) y[i].scales[j] = 0; - y[i].d = ggml_fp32_to_fp16(0.f); + y[i].d = GGML_FP32_TO_FP16(0.f); } if (max_min > 0) { float iscale = q4scale/max_min; @@ -1398,14 +1398,14 @@ void quantize_row_q2_K_reference(const float * restrict x, block_q2_K * restrict int l = nearest_int(iscale*mins[j]); y[i].scales[j] |= (l << 4); } - y[i].dmin = ggml_fp32_to_fp16(max_min/q4scale); + y[i].dmin = GGML_FP32_TO_FP16(max_min/q4scale); } else { - y[i].dmin = ggml_fp32_to_fp16(0.f); + y[i].dmin = GGML_FP32_TO_FP16(0.f); } for (int j = 0; j < QK_K/16; ++j) { - const float d = ggml_fp16_to_fp32(y[i].d) * (y[i].scales[j] & 0xF); + const float d = GGML_FP16_TO_FP32(y[i].d) * (y[i].scales[j] & 0xF); if (!d) continue; - const float dm = ggml_fp16_to_fp32(y[i].dmin) * (y[i].scales[j] >> 4); + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * (y[i].scales[j] >> 4); for (int ii = 0; ii < 16; ++ii) { int l = nearest_int((x[16*j + ii] + dm)/d); l = MAX(0, MIN(3, l)); @@ -1436,8 +1436,8 @@ void dequantize_row_q2_K(const block_q2_K * restrict x, float * restrict y, int for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); - const float min = ggml_fp16_to_fp32(x[i].dmin); + const float d = GGML_FP16_TO_FP32(x[i].d); + const float min = GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * q = x[i].qs; @@ -1526,16 +1526,16 @@ void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict l >>= 4; y[i].scales[j%4 + 8] |= (l << (2*(j/4))); } - y[i].d = ggml_fp32_to_fp16(1/iscale); + y[i].d = GGML_FP32_TO_FP16(1/iscale); } else { - y[i].d = ggml_fp32_to_fp16(0.f); + y[i].d = GGML_FP32_TO_FP16(0.f); } int8_t sc; for (int j = 0; j < QK_K/16; ++j) { sc = j < 8 ? y[i].scales[j] & 0xF : y[i].scales[j-8] >> 4; sc = (sc | (((y[i].scales[8 + j%4] >> (2*(j/4))) & 3) << 4)) - 32; - float d = ggml_fp16_to_fp32(y[i].d) * sc; + float d = GGML_FP16_TO_FP32(y[i].d) * sc; if (!d) { continue; } @@ -1555,16 +1555,16 @@ void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict l2 = 8 + MAX(-8, MIN(7, l2)); y[i].scales[j/2] = l1 | (l2 << 4); } - y[i].d = ggml_fp32_to_fp16(1/iscale); + y[i].d = GGML_FP32_TO_FP16(1/iscale); } else { for (int j = 0; j < QK_K/16; j+=2) { y[i].scales[j/2] = 0; } - y[i].d = ggml_fp32_to_fp16(0.f); + y[i].d = GGML_FP32_TO_FP16(0.f); } for (int j = 0; j < QK_K/16; ++j) { int s = j%2 == 0 ? y[i].scales[j/2] & 0xF : y[i].scales[j/2] >> 4; - float d = ggml_fp16_to_fp32(y[i].d) * (s - 8); + float d = GGML_FP16_TO_FP32(y[i].d) * (s - 8); if (!d) { continue; } @@ -1618,7 +1618,7 @@ void dequantize_row_q3_K(const block_q3_K * restrict x, float * restrict y, int for (int i = 0; i < nb; i++) { - const float d_all = ggml_fp16_to_fp32(x[i].d); + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q = x[i].qs; const uint8_t * restrict hm = x[i].hmask; @@ -1663,7 +1663,7 @@ void dequantize_row_q3_K(const block_q3_K * restrict x, float * restrict y, int for (int i = 0; i < nb; i++) { - const float d_all = ggml_fp16_to_fp32(x[i].d); + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q = x[i].qs; const uint8_t * restrict hm = x[i].hmask; @@ -1753,15 +1753,15 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict y[i].scales[j-0] |= ((lm >> 4) << 6); } } - y[i].d = ggml_fp32_to_fp16(max_scale/63.f); - y[i].dmin = ggml_fp32_to_fp16(max_min/63.f); + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); uint8_t sc, m; for (int j = 0; j < QK_K/32; ++j) { get_scale_min_k4(j, y[i].scales, &sc, &m); - const float d = ggml_fp16_to_fp32(y[i].d) * sc; + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; if (!d) continue; - const float dm = ggml_fp16_to_fp32(y[i].dmin) * m; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; for (int ii = 0; ii < 32; ++ii) { int l = nearest_int((x[32*j + ii] + dm)/d); l = MAX(0, MIN(15, l)); @@ -1778,17 +1778,17 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict int m2 = nearest_int(inv_min*mins[1]); y[i].scales[0] = d1 | (m1 << 4); y[i].scales[1] = d2 | (m2 << 4); - y[i].d[0] = ggml_fp32_to_fp16(max_scale/s_factor); - y[i].d[1] = ggml_fp32_to_fp16(max_min/s_factor); + y[i].d[0] = GGML_FP32_TO_FP16(max_scale/s_factor); + y[i].d[1] = GGML_FP32_TO_FP16(max_min/s_factor); float sumlx = 0; int suml2 = 0; for (int j = 0; j < QK_K/32; ++j) { const uint8_t sd = y[i].scales[j] & 0xF; const uint8_t sm = y[i].scales[j] >> 4; - const float d = ggml_fp16_to_fp32(y[i].d[0]) * sd; + const float d = GGML_FP16_TO_FP32(y[i].d[0]) * sd; if (!d) continue; - const float m = ggml_fp16_to_fp32(y[i].d[1]) * sm; + const float m = GGML_FP16_TO_FP32(y[i].d[1]) * sm; for (int ii = 0; ii < 32; ++ii) { int l = nearest_int((x[32*j + ii] + m)/d); l = MAX(0, MIN(15, l)); @@ -1798,7 +1798,7 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict } } if (suml2) { - y[i].d[0] = ggml_fp32_to_fp16(sumlx/suml2); + y[i].d[0] = GGML_FP32_TO_FP16(sumlx/suml2); } #endif uint8_t * q = y[i].qs; @@ -1822,8 +1822,8 @@ void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int #if QK_K == 256 - const float d = ggml_fp16_to_fp32(x[i].d); - const float min = ggml_fp16_to_fp32(x[i].dmin); + const float d = GGML_FP16_TO_FP32(x[i].d); + const float min = GGML_FP16_TO_FP32(x[i].dmin); int is = 0; uint8_t sc, m; @@ -1837,8 +1837,8 @@ void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int q += 32; is += 2; } #else - const float dall = ggml_fp16_to_fp32(x[i].d[0]); - const float mall = ggml_fp16_to_fp32(x[i].d[1]); + const float dall = GGML_FP16_TO_FP32(x[i].d[0]); + const float mall = GGML_FP16_TO_FP32(x[i].d[1]); const float d1 = dall * (x[i].scales[0] & 0xF), m1 = mall * (x[i].scales[0] >> 4); const float d2 = dall * (x[i].scales[1] & 0xF), m2 = mall * (x[i].scales[1] >> 4); for (int l = 0; l < 32; ++l) { @@ -1924,15 +1924,15 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y[i].scales[j-0] |= ((lm >> 4) << 6); } } - y[i].d = ggml_fp32_to_fp16(max_scale/63.f); - y[i].dmin = ggml_fp32_to_fp16(max_min/63.f); + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); uint8_t sc, m; for (int j = 0; j < QK_K/32; ++j) { get_scale_min_k4(j, y[i].scales, &sc, &m); - const float d = ggml_fp16_to_fp32(y[i].d) * sc; + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; if (!d) continue; - const float dm = ggml_fp16_to_fp32(y[i].dmin) * m; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; for (int ii = 0; ii < 32; ++ii) { int l = nearest_int((x[32*j + ii] + dm)/d); l = MAX(0, MIN(31, l)); @@ -1976,10 +1976,10 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict int l = nearest_int(iscale*scales[j]); y[i].scales[j] = MAX(-128, MIN(127, l)); } - y[i].d = ggml_fp32_to_fp16(1/iscale); + y[i].d = GGML_FP32_TO_FP16(1/iscale); for (int j = 0; j < QK_K/16; ++j) { - const float d = ggml_fp16_to_fp32(y[i].d) * y[i].scales[j]; + const float d = GGML_FP16_TO_FP32(y[i].d) * y[i].scales[j]; if (!d) continue; for (int ii = 0; ii < 16; ++ii) { int l = nearest_int(x[16*j + ii]/d); @@ -2023,8 +2023,8 @@ void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int #if QK_K == 256 - const float d = ggml_fp16_to_fp32(x[i].d); - const float min = ggml_fp16_to_fp32(x[i].dmin); + const float d = GGML_FP16_TO_FP32(x[i].d); + const float min = GGML_FP16_TO_FP32(x[i].dmin); int is = 0; uint8_t sc, m; @@ -2040,7 +2040,7 @@ void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int u1 <<= 2; u2 <<= 2; } #else - float d = ggml_fp16_to_fp32(x[i].d); + float d = GGML_FP16_TO_FP32(x[i].d); const int8_t * restrict s = x[i].scales; for (int l = 0; l < 8; ++l) { y[l+ 0] = d * s[0] * ((ql[l+ 0] & 0xF) - (qh[l] & 0x01 ? 0 : 16)); @@ -2103,19 +2103,19 @@ void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict if (!max_abs_scale) { memset(&y[i], 0, sizeof(block_q6_K)); - y[i].d = ggml_fp32_to_fp16(0.f); + y[i].d = GGML_FP32_TO_FP16(0.f); x += QK_K; continue; } float iscale = -128.f/max_scale; - y[i].d = ggml_fp32_to_fp16(1/iscale); + y[i].d = GGML_FP32_TO_FP16(1/iscale); for (int ib = 0; ib < QK_K/16; ++ib) { y[i].scales[ib] = MIN(127, nearest_int(iscale*scales[ib])); } for (int j = 0; j < QK_K/16; ++j) { - float d = ggml_fp16_to_fp32(y[i].d) * y[i].scales[j]; + float d = GGML_FP16_TO_FP32(y[i].d) * y[i].scales[j]; if (!d) { continue; } @@ -2164,7 +2164,7 @@ void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int for (int i = 0; i < nb; i++) { - const float d = ggml_fp16_to_fp32(x[i].d); + const float d = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict ql = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -2371,8 +2371,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); #else const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0ls), vget_low_s8 (v1_0l)); const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0ls), vget_high_s8(v1_0l)); @@ -2389,8 +2389,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); #endif } @@ -2402,7 +2402,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, // Main loop for (int i = 0; i < nb; ++i) { /* Compute combined scale for the block */ - const __m256 d = _mm256_set1_ps( ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d) ); + const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); __m256i bx = bytes_from_nibbles_32(x[i].qs); @@ -2426,7 +2426,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, // Main loop for (int i = 0; i < nb; ++i) { // Compute combined scale for the block - const __m256 d = _mm256_set1_ps( ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d) ); + const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); const __m128i lowMask = _mm_set1_epi8(0xF); const __m128i off = _mm_set1_epi8(8); @@ -2468,7 +2468,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, _mm_prefetch(&y[0] + sizeof(block_q8_0), _MM_HINT_T0); // Compute combined scale for the block 0 and 1 - const __m128 d_0_1 = _mm_set1_ps( ggml_fp16_to_fp32(x[0].d) * ggml_fp16_to_fp32(y[0].d) ); + const __m128 d_0_1 = _mm_set1_ps( GGML_FP16_TO_FP32(x[0].d) * GGML_FP16_TO_FP32(y[0].d) ); const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[0].qs); @@ -2486,7 +2486,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, _mm_prefetch(&y[1] + sizeof(block_q8_0), _MM_HINT_T0); // Compute combined scale for the block 2 and 3 - const __m128 d_2_3 = _mm_set1_ps( ggml_fp16_to_fp32(x[1].d) * ggml_fp16_to_fp32(y[1].d) ); + const __m128 d_2_3 = _mm_set1_ps( GGML_FP16_TO_FP32(x[1].d) * GGML_FP16_TO_FP32(y[1].d) ); const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[1].qs); @@ -2521,7 +2521,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, _mm_prefetch(&y[i] + sizeof(block_q8_0), _MM_HINT_T0); // Compute combined scale for the block 0 and 1 - const __m128 d_0_1 = _mm_set1_ps( ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d) ); + const __m128 d_0_1 = _mm_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); const __m128i tmp_0_1 = _mm_loadu_si128((const __m128i *)x[i].qs); @@ -2539,7 +2539,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, _mm_prefetch(&y[i] + 2 * sizeof(block_q8_0), _MM_HINT_T0); // Compute combined scale for the block 2 and 3 - const __m128 d_2_3 = _mm_set1_ps( ggml_fp16_to_fp32(x[i + 1].d) * ggml_fp16_to_fp32(y[i + 1].d) ); + const __m128 d_2_3 = _mm_set1_ps( GGML_FP16_TO_FP32(x[i + 1].d) * GGML_FP16_TO_FP32(y[i + 1].d) ); const __m128i tmp_2_3 = _mm_loadu_si128((const __m128i *)x[i + 1].qs); @@ -2606,7 +2606,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - sumf += sumi*ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d); + sumf += sumi*GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d); } *s = sumf; @@ -2624,7 +2624,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, sumi += (v0 * y[i].qs[j]) + (v1 * y[i].qs[j + qk/2]); } - sumf += sumi*ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d); + sumf += sumi*GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d); } *s = sumf; @@ -2655,7 +2655,7 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const block_q8_1 * restrict y0 = &y[i + 0]; const block_q8_1 * restrict y1 = &y[i + 1]; - summs += ggml_fp16_to_fp32(x0->m) * y0->s + ggml_fp16_to_fp32(x1->m) * y1->s; + summs += GGML_FP16_TO_FP32(x0->m) * y0->s + GGML_FP16_TO_FP32(x1->m) * y1->s; const uint8x16_t m4b = vdupq_n_u8(0x0F); @@ -2679,8 +2679,8 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), ggml_fp16_to_fp32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), ggml_fp16_to_fp32(x1->d)*y1->d); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*y1->d); #else const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0l), vget_low_s8 (v1_0l)); const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0l), vget_high_s8(v1_0l)); @@ -2697,8 +2697,8 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*y1->d); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); #endif } @@ -2711,10 +2711,10 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri // Main loop for (int i = 0; i < nb; ++i) { - const float d0 = ggml_fp16_to_fp32(x[i].d); + const float d0 = GGML_FP16_TO_FP32(x[i].d); const float d1 = y[i].d; - summs += ggml_fp16_to_fp32(x[i].m) * y[i].s; + summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; const __m256 d0v = _mm256_set1_ps( d0 ); const __m256 d1v = _mm256_set1_ps( d1 ); @@ -2766,7 +2766,7 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; } *s = sumf; @@ -2784,7 +2784,7 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri sumi += (v0 * y[i].qs[j]) + (v1 * y[i].qs[j + qk/2]); } - sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; } *s = sumf; @@ -2864,10 +2864,10 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri #if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); #else const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); @@ -2884,8 +2884,8 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); #endif } @@ -2946,7 +2946,7 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri wasm_i32x4_dot_i16x8(v0lfh, v1lh)), wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), - wasm_f32x4_splat(ggml_fp16_to_fp32(x0->d) * ggml_fp16_to_fp32(y0->d)))); + wasm_f32x4_splat(GGML_FP16_TO_FP32(x0->d) * GGML_FP16_TO_FP32(y0->d)))); } *s = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + @@ -2958,7 +2958,7 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri // Main loop for (int i = 0; i < nb; i++) { /* Compute combined scale for the block */ - const __m256 d = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d)); + const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); __m256i bx = bytes_from_nibbles_32(x[i].qs); __m256i bxhi = bytes_from_bits_32(x[i].qh); @@ -2982,7 +2982,7 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri // Main loop for (int i = 0; i < nb; i++) { /* Compute combined scale for the block */ - const __m256 d = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d)); + const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); __m256i bx = bytes_from_nibbles_32(x[i].qs); const __m256i bxhi = bytes_from_bits_32(x[i].qh); @@ -3066,7 +3066,7 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - sumf += (ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)) * sumi; + sumf += (GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)) * sumi; } *s = sumf; @@ -3090,7 +3090,7 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri sumi += (x0 * y[i].qs[j]) + (x1 * y[i].qs[j + qk/2]); } - sumf += (ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)) * sumi; + sumf += (GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)) * sumi; } *s = sumf; @@ -3130,8 +3130,8 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const uint8x16_t m4b = vdupq_n_u8(0x0F); - summs0 += ggml_fp16_to_fp32(x0->m) * y0->s; - summs1 += ggml_fp16_to_fp32(x1->m) * y1->s; + summs0 += GGML_FP16_TO_FP32(x0->m) * y0->s; + summs1 += GGML_FP16_TO_FP32(x1->m) * y1->s; // extract the 5th bit via lookup table ((b) << 4) memcpy(&qh0, x0->qh, sizeof(qh0)); @@ -3176,10 +3176,10 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri #if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), ggml_fp16_to_fp32(x0->d)*y0->d); + vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), ggml_fp16_to_fp32(x1->d)*y1->d); + vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); #else const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); @@ -3196,8 +3196,8 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), ggml_fp16_to_fp32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), ggml_fp16_to_fp32(x1->d)*y1->d); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); #endif } @@ -3215,7 +3215,7 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const block_q5_1 * restrict x0 = &x[i]; const block_q8_1 * restrict y0 = &y[i]; - summs += ggml_fp16_to_fp32(x0->m) * y0->s; + summs += GGML_FP16_TO_FP32(x0->m) * y0->s; const v128_t m4b = wasm_i8x16_splat(0x0F); @@ -3262,7 +3262,7 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri wasm_i32x4_dot_i16x8(v0lfh, v1lh)), wasm_i32x4_add(wasm_i32x4_dot_i16x8(v0hfl, v1hl), wasm_i32x4_dot_i16x8(v0hfh, v1hh)))), - wasm_f32x4_splat(ggml_fp16_to_fp32(x0->d) * y0->d))); + wasm_f32x4_splat(GGML_FP16_TO_FP32(x0->d) * y0->d))); } *s = wasm_f32x4_extract_lane(sumv, 0) + wasm_f32x4_extract_lane(sumv, 1) + @@ -3275,9 +3275,9 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri // Main loop for (int i = 0; i < nb; i++) { - const __m256 dx = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d)); + const __m256 dx = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d)); - summs += ggml_fp16_to_fp32(x[i].m) * y[i].s; + summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; __m256i bx = bytes_from_nibbles_32(x[i].qs); __m256i bxhi = bytes_from_bits_32(x[i].qh); @@ -3302,9 +3302,9 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri // Main loop for (int i = 0; i < nb; i++) { - const __m256 dx = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d)); + const __m256 dx = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d)); - summs += ggml_fp16_to_fp32(x[i].m) * y[i].s; + summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; __m256i bx = bytes_from_nibbles_32(x[i].qs); const __m256i bxhi = bytes_from_bits_32(x[i].qh); @@ -3385,7 +3385,7 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri int sumi = __riscv_vmv_x_s_i32m1_i32(vs2); - sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; } *s = sumf; @@ -3409,7 +3409,7 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri sumi += (x0 * y[i].qs[j]) + (x1 * y[i].qs[j + qk/2]); } - sumf += (ggml_fp16_to_fp32(x[i].d)*y[i].d)*sumi + ggml_fp16_to_fp32(x[i].m)*y[i].s; + sumf += (GGML_FP16_TO_FP32(x[i].d)*y[i].d)*sumi + GGML_FP16_TO_FP32(x[i].m)*y[i].s; } *s = sumf; @@ -3451,11 +3451,11 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri #if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), - vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); + vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), - vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); #else const int16x8_t p0_0 = vmull_s8(vget_low_s8 (x0_0), vget_low_s8 (y0_0)); @@ -3473,8 +3473,8 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri const int32x4_t p2 = vaddq_s32(vpaddlq_s16(p1_0), vpaddlq_s16(p1_1)); const int32x4_t p3 = vaddq_s32(vpaddlq_s16(p1_2), vpaddlq_s16(p1_3)); - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(p0, p1)), ggml_fp16_to_fp32(x0->d)*ggml_fp16_to_fp32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(p2, p3)), ggml_fp16_to_fp32(x1->d)*ggml_fp16_to_fp32(y1->d)); + sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(p0, p1)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(p2, p3)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); #endif } @@ -3486,7 +3486,7 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri // Main loop for (int i = 0; i < nb; ++i) { // Compute combined scale for the block - const __m256 d = _mm256_set1_ps(ggml_fp16_to_fp32(x[i].d) * ggml_fp16_to_fp32(y[i].d)); + const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); __m256i bx = _mm256_loadu_si256((const __m256i *)x[i].qs); __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); @@ -3517,7 +3517,7 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri int sumi = __riscv_vmv_x_s_i32m1_i32(v_sum); - sumf += sumi*(ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)); + sumf += sumi*(GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)); } *s = sumf; @@ -3532,7 +3532,7 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri sumi += x[i].qs[j]*y[i].qs[j]; } - sumf += sumi*(ggml_fp16_to_fp32(x[i].d)*ggml_fp16_to_fp32(y[i].d)); + sumf += sumi*(GGML_FP16_TO_FP32(x[i].d)*GGML_FP16_TO_FP32(y[i].d)); } *s = sumf; @@ -3562,8 +3562,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -3641,8 +3641,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -3708,8 +3708,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float dall = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -3816,8 +3816,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int8_t * q8 = y[i].qs; const uint8_t * sc = x[i].scales; - const float dall = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); size_t vl = 16; @@ -3903,8 +3903,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri summs += y[i].bsums[j] * (sc[j] >> 4); } - const float dall = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); int isum = 0; int is = 0; @@ -4021,8 +4021,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -4073,8 +4073,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -4188,8 +4188,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri summs += y[i].bsums[j] * (sc[j] >> 4); } - const float dall = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float dall = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); isum[0] = isum[1] = isum[2] = isum[3] = 0; for (int l = 0; l < 16; ++l) { @@ -4242,7 +4242,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q3 = x[i].qs; const uint8_t * restrict qh = x[i].hmask; @@ -4350,7 +4350,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q3 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -4455,7 +4455,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q3 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -4676,7 +4676,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; sumf += d*sum_t; @@ -4741,7 +4741,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int l = 0; l < 8; ++l) aux32[l] += (scales[j] - 32) * aux16[l]; q8 += 8; a += 8; } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; } for (int l = 0; l < 8; ++l) sumf += sums[l]; @@ -4843,7 +4843,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q3 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -4914,7 +4914,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q3 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -5099,7 +5099,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q8 += 8; a += 8; for (int l = 0; l < 8; ++l) aux32[l] += scales[j] * aux16[l]; } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; } for (int l = 0; l < 8; ++l) sumf += sums[l]; @@ -5139,8 +5139,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const int16x8_t q8sums = vpaddq_s16(vld1q_s16(y[i].bsums), vld1q_s16(y[i].bsums + 8)); @@ -5222,8 +5222,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); memcpy(utmp, x[i].scales, 12); utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); @@ -5288,8 +5288,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q4 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -5371,8 +5371,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri size_t vl = 8; - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); vint16mf2_t q8sums_0 = __riscv_vlse16_v_i16mf2(y[i].bsums, 4, vl); vint16mf2_t q8sums_1 = __riscv_vlse16_v_i16mf2(y[i].bsums+1, 4, vl); @@ -5482,9 +5482,9 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; q8 += 8; a += 8; } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; - const float dmin = ggml_fp16_to_fp32(x[i].dmin) * y[i].d; + const float dmin = GGML_FP16_TO_FP32(x[i].dmin) * y[i].d; sumf -= dmin * sumi; } for (int l = 0; l < 8; ++l) sumf += sums[l]; @@ -5586,8 +5586,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = ggml_fp16_to_fp32(x[i].d[0]) * y[i].d; - const float m = ggml_fp16_to_fp32(x[i].d[1]) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d[0]) * y[i].d; + const float m = GGML_FP16_TO_FP32(x[i].d[1]) * y[i].d; const __m256 vd = _mm256_set1_ps(d); const uint16_t * a = (const uint16_t *)x[i].scales; @@ -5632,8 +5632,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = ggml_fp16_to_fp32(x[i].d[0]) * y[i].d; - const float m = ggml_fp16_to_fp32(x[i].d[1]) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d[0]) * y[i].d; + const float m = GGML_FP16_TO_FP32(x[i].d[1]) * y[i].d; const __m256 vd = _mm256_set1_ps(d); const uint16_t * a = (const uint16_t *)x[i].scales; @@ -5689,8 +5689,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri s16[0] = b[0] & 0x0f0f; s16[1] = (b[0] >> 4) & 0x0f0f; - sumf -= y[i].d * ggml_fp16_to_fp32(x[i].d[1]) * (scales[2] * (y[i].bsums[0] + y[i].bsums[1]) + scales[3] * (y[i].bsums[2] + y[i].bsums[3])); - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d[0]); + sumf -= y[i].d * GGML_FP16_TO_FP32(x[i].d[1]) * (scales[2] * (y[i].bsums[0] + y[i].bsums[1]) + scales[3] * (y[i].bsums[2] + y[i].bsums[3])); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d[0]); size_t vl = 32; @@ -5739,9 +5739,9 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri s16[0] = b[0] & 0x0f0f; s16[1] = (b[0] >> 4) & 0x0f0f; - sumf -= y[i].d * ggml_fp16_to_fp32(x[i].d[1]) * (scales[2] * (y[i].bsums[0] + y[i].bsums[1]) + scales[3] * (y[i].bsums[2] + y[i].bsums[3])); + sumf -= y[i].d * GGML_FP16_TO_FP32(x[i].d[1]) * (scales[2] * (y[i].bsums[0] + y[i].bsums[1]) + scales[3] * (y[i].bsums[2] + y[i].bsums[3])); - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d[0]); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d[0]); for (int j = 0; j < QK_K/32; ++j) { for (int l = 0; l < 16; ++l) aux16[l] = q8[l] * a[l]; @@ -5789,8 +5789,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const int16x8_t q8sums = vpaddq_s16(vld1q_s16(y[i].bsums), vld1q_s16(y[i].bsums + 8)); @@ -5878,8 +5878,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const int8_t * restrict q8 = y[i].qs; #if QK_K == 256 - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); memcpy(utmp, x[i].scales, 12); utmp[3] = ((utmp[2] >> 4) & kmask2) | (((utmp[1] >> 6) & kmask3) << 4); @@ -5960,8 +5960,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); - const float dmin = -y[i].d * ggml_fp16_to_fp32(x[i].dmin); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q5 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -6065,8 +6065,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const uint8_t * restrict hm = x[i].qh; const int8_t * restrict q8 = y[i].qs; - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; - const float dmin = ggml_fp16_to_fp32(x[i].dmin) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const float dmin = GGML_FP16_TO_FP32(x[i].dmin) * y[i].d; vint16mf2_t q8sums_0 = __riscv_vlse16_v_i16mf2(y[i].bsums, 4, vl); vint16mf2_t q8sums_1 = __riscv_vlse16_v_i16mf2(y[i].bsums+1, 4, vl); @@ -6188,9 +6188,9 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; q8 += 8; a += 8; } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; - const float dmin = ggml_fp16_to_fp32(x[i].dmin) * y[i].d; + const float dmin = GGML_FP16_TO_FP32(x[i].dmin) * y[i].d; sumf -= dmin * sumi; } for (int l = 0; l < 8; ++l) sumf += sums[l]; @@ -6288,7 +6288,7 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const uint8_t * restrict q5 = x[i].qs; const int8_t * restrict q8 = y[i].qs; - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const __m256i q5bits = _mm256_loadu_si256((const __m256i*)q5); @@ -6334,7 +6334,7 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const uint8_t * restrict q5 = x[i].qs; const int8_t * restrict q8 = y[i].qs; - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const __m256i q5bits = _mm256_loadu_si256((const __m256i*)q5); @@ -6471,7 +6471,7 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri for (int l = 0; l < 8; ++l) a[8*is + l] -= (hm[l] & m ? 0 : 16); } - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const int8_t * restrict sc = x[i].scales; for (int j = 0; j < QK_K/16; ++j) { @@ -6514,7 +6514,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d_all = ggml_fp16_to_fp32(x[i].d); + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q6 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -6646,7 +6646,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q4 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -6726,7 +6726,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q4 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -6838,7 +6838,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri float sumf = 0; for (int i = 0; i < nb; ++i) { - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; const uint8_t * restrict q6 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -6955,7 +6955,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; q8 += 8; a += 8; } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; } for (int l = 0; l < 8; ++l) sumf += sums[l]; @@ -7053,7 +7053,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q4 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -7110,7 +7110,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - const float d = y[i].d * ggml_fp16_to_fp32(x[i].d); + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q4 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -7269,7 +7269,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int l = 0; l < 8; ++l) aux32[l] += scale * aux16[l]; q8 += 8; a += 8; } - const float d = ggml_fp16_to_fp32(x[i].d) * y[i].d; + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; for (int l = 0; l < 8; ++l) sums[l] += d * aux32[l]; } for (int l = 0; l < 8; ++l) sumf += sums[l]; diff --git a/ggml-quants.h b/ggml-quants.h index d88f99e33..70c12c274 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -1,22 +1,12 @@ #pragma once -// This is a private API for quantization and dequantization -// Should not be used directly, use ggml.h instead +#include "ggml-impl.h" -#include "ggml.h" +// GGML internal header #include -#include #include -#ifndef static_assert -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) -#define static_assert(cond, msg) _Static_assert(cond, msg) -#else -#define static_assert(cond, msg) struct global_scope_noop_trick -#endif -#endif - #define QK4_0 32 typedef struct { ggml_fp16_t d; // delta diff --git a/ggml.c b/ggml.c index 95f72c35e..84407b122 100644 --- a/ggml.c +++ b/ggml.c @@ -1,6 +1,6 @@ #define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnigns on Windows -#include "ggml.h" +#include "ggml-impl.h" #include "ggml-quants.h" #if defined(_MSC_VER) || defined(__MINGW32__) @@ -27,18 +27,6 @@ #include #endif -// static_assert should be a #define, but if it's not, -// fall back to the _Static_assert C11 keyword. -// if C99 - static_assert is noop -// ref: https://stackoverflow.com/a/53923785/4039976 -#ifndef static_assert -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) -#define static_assert(cond, msg) _Static_assert(cond, msg) -#else -#define static_assert(cond, msg) struct global_scope_noop_trick -#endif -#endif - #if defined(_MSC_VER) // disable "possible loss of data" to avoid hundreds of casts // we should just be careful :) @@ -106,23 +94,11 @@ typedef void * thread_ret_t; #include #endif + #ifdef GGML_USE_CPU_HBM #include #endif -// __FMA__ and __F16C__ are not defined in MSVC, however they are implied with AVX2/AVX512 -#if defined(_MSC_VER) && (defined(__AVX2__) || defined(__AVX512F__)) -#ifndef __FMA__ -#define __FMA__ -#endif -#ifndef __F16C__ -#define __F16C__ -#endif -#ifndef __SSE3__ -#define __SSE3__ -#endif -#endif - /*#define GGML_PERF*/ #define GGML_DEBUG 0 #define GGML_GELU_FP16 @@ -248,213 +224,27 @@ inline static void * ggml_aligned_malloc(size_t size) { #include "ggml-opencl.h" #endif -#undef MIN -#undef MAX -#define MIN(a, b) ((a) < (b) ? (a) : (b)) -#define MAX(a, b) ((a) > (b) ? (a) : (b)) - // floating point type used to accumulate sums typedef double ggml_float; -// 16-bit float -// on Arm, we use __fp16 -// on x86, we use uint16_t -#if defined(__ARM_NEON) && !defined(_MSC_VER) - -// if YCM cannot find , make a symbolic link to it, for example: -// -// $ ln -sfn /Library/Developer/CommandLineTools/usr/lib/clang/13.1.6/include/arm_neon.h ./src/ -// -#include - -#define GGML_COMPUTE_FP16_TO_FP32(x) ((float) (x)) -#define GGML_COMPUTE_FP32_TO_FP16(x) (x) - -#define GGML_FP16_TO_FP32(x) ((float) (x)) -#define GGML_FP32_TO_FP16(x) (x) - -#else - -#ifdef __wasm_simd128__ -#include -#else -#ifdef __POWER9_VECTOR__ -#include -#undef bool -#define bool _Bool -#else -#if defined(_MSC_VER) || defined(__MINGW32__) -#include -#else -#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) || defined(__SSE3__) -#if !defined(__riscv) -#include -#endif -#endif -#endif -#endif -#endif - -#ifdef __riscv_v_intrinsic -#include -#endif - -#ifdef __F16C__ - -#ifdef _MSC_VER -#define GGML_COMPUTE_FP16_TO_FP32(x) _mm_cvtss_f32(_mm_cvtph_ps(_mm_cvtsi32_si128(x))) -#define GGML_COMPUTE_FP32_TO_FP16(x) _mm_extract_epi16(_mm_cvtps_ph(_mm_set_ss(x), 0), 0) -#else -#define GGML_COMPUTE_FP16_TO_FP32(x) _cvtsh_ss(x) -#define GGML_COMPUTE_FP32_TO_FP16(x) _cvtss_sh(x, 0) -#endif - -#elif defined(__POWER9_VECTOR__) - -#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) -#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) -/* the inline asm below is about 12% faster than the lookup method */ -#define GGML_FP16_TO_FP32(x) GGML_COMPUTE_FP16_TO_FP32(x) -#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) - -static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { - register float f; - register double d; - __asm__( - "mtfprd %0,%2\n" - "xscvhpdp %0,%0\n" - "frsp %1,%0\n" : - /* temp */ "=d"(d), - /* out */ "=f"(f): - /* in */ "r"(h)); - return f; -} - -static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { - register double d; - register ggml_fp16_t r; - __asm__( /* xscvdphp can work on double or single precision */ - "xscvdphp %0,%2\n" - "mffprd %1,%0\n" : - /* temp */ "=d"(d), - /* out */ "=r"(r): - /* in */ "f"(f)); - return r; -} - -#else - -// FP16 <-> FP32 -// ref: https://github.com/Maratyszcza/FP16 - -static inline float fp32_from_bits(uint32_t w) { - union { - uint32_t as_bits; - float as_value; - } fp32; - fp32.as_bits = w; - return fp32.as_value; -} - -static inline uint32_t fp32_to_bits(float f) { - union { - float as_value; - uint32_t as_bits; - } fp32; - fp32.as_value = f; - return fp32.as_bits; -} - -static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { - const uint32_t w = (uint32_t) h << 16; - const uint32_t sign = w & UINT32_C(0x80000000); - const uint32_t two_w = w + w; - - const uint32_t exp_offset = UINT32_C(0xE0) << 23; -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) || defined(__GNUC__) && !defined(__STRICT_ANSI__) - const float exp_scale = 0x1.0p-112f; -#else - const float exp_scale = fp32_from_bits(UINT32_C(0x7800000)); -#endif - const float normalized_value = fp32_from_bits((two_w >> 4) + exp_offset) * exp_scale; - - const uint32_t magic_mask = UINT32_C(126) << 23; - const float magic_bias = 0.5f; - const float denormalized_value = fp32_from_bits((two_w >> 17) | magic_mask) - magic_bias; - - const uint32_t denormalized_cutoff = UINT32_C(1) << 27; - const uint32_t result = sign | - (two_w < denormalized_cutoff ? fp32_to_bits(denormalized_value) : fp32_to_bits(normalized_value)); - return fp32_from_bits(result); -} - -static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { -#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) || defined(__GNUC__) && !defined(__STRICT_ANSI__) - const float scale_to_inf = 0x1.0p+112f; - const float scale_to_zero = 0x1.0p-110f; -#else - const float scale_to_inf = fp32_from_bits(UINT32_C(0x77800000)); - const float scale_to_zero = fp32_from_bits(UINT32_C(0x08800000)); -#endif - float base = (fabsf(f) * scale_to_inf) * scale_to_zero; - - const uint32_t w = fp32_to_bits(f); - const uint32_t shl1_w = w + w; - const uint32_t sign = w & UINT32_C(0x80000000); - uint32_t bias = shl1_w & UINT32_C(0xFF000000); - if (bias < UINT32_C(0x71000000)) { - bias = UINT32_C(0x71000000); - } - - base = fp32_from_bits((bias >> 1) + UINT32_C(0x07800000)) + base; - const uint32_t bits = fp32_to_bits(base); - const uint32_t exp_bits = (bits >> 13) & UINT32_C(0x00007C00); - const uint32_t mantissa_bits = bits & UINT32_C(0x00000FFF); - const uint32_t nonsign = exp_bits + mantissa_bits; - return (sign >> 16) | (shl1_w > UINT32_C(0xFF000000) ? UINT16_C(0x7E00) : nonsign); -} - -#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) -#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) - -#endif // __F16C__ - -#endif // __ARM_NEON - // // global data // // precomputed gelu table for f16 (128 KB) -static ggml_fp16_t table_gelu_f16[1 << 16]; +static ggml_fp16_t ggml_table_gelu_f16[1 << 16]; // precomputed quick gelu table for f16 (128 KB) -static ggml_fp16_t table_gelu_quick_f16[1 << 16]; +static ggml_fp16_t ggml_table_gelu_quick_f16[1 << 16]; // precomputed silu table for f16 (128 KB) -static ggml_fp16_t table_silu_f16[1 << 16]; +static ggml_fp16_t ggml_table_silu_f16[1 << 16]; // precomputed exp table for f16 (128 KB) -static ggml_fp16_t table_exp_f16[1 << 16]; +static ggml_fp16_t ggml_table_exp_f16[1 << 16]; -// precomputed f32 table for f16 (256 KB) -static float table_f32_f16[1 << 16]; - -// On ARM NEON, it's quicker to directly convert x -> x instead of calling into ggml_lookup_fp16_to_fp32, -// so we define GGML_FP16_TO_FP32 and GGML_FP32_TO_FP16 elsewhere for NEON. -// This is also true for POWER9. -#if !defined(GGML_FP16_TO_FP32) || !defined(GGML_FP32_TO_FP16) - -inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { - uint16_t s; - memcpy(&s, &f, sizeof(uint16_t)); - return table_f32_f16[s]; -} - -#define GGML_FP16_TO_FP32(x) ggml_lookup_fp16_to_fp32(x) -#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) - -#endif +// precomputed f32 table for f16 (256 KB) (ggml-impl.h) +float ggml_table_f32_f16[1 << 16]; // note: do not use these inside ggml.c // these are meant to be used via the ggml.h API @@ -632,6 +422,28 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot = ggml_vec_dot_q4_1_q8_1, .vec_dot_type = GGML_TYPE_Q8_1, }, + [4] = { // GGML_TYPE_Q4_2 + .type_name = "DEPRECATED", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, + .to_float = NULL, + .from_float = NULL, + .from_float_reference = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_COUNT, + }, + [5] = { // GGML_TYPE_Q4_3 + .type_name = "DEPRECATED", + .blck_size = 0, + .type_size = 0, + .is_quantized = false, + .to_float = NULL, + .from_float = NULL, + .from_float_reference = NULL, + .vec_dot = NULL, + .vec_dot_type = GGML_TYPE_COUNT, + }, [GGML_TYPE_Q5_0] = { .type_name = "q5_0", .blck_size = QK5_0, @@ -1551,7 +1363,7 @@ inline static float ggml_gelu_f32(float x) { inline static void ggml_vec_gelu_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { const uint16_t * i16 = (const uint16_t *) x; for (int i = 0; i < n; ++i) { - y[i] = table_gelu_f16[i16[i]]; + y[i] = ggml_table_gelu_f16[i16[i]]; } } @@ -1561,7 +1373,7 @@ inline static void ggml_vec_gelu_f32(const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) { ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); memcpy(&t, &fp16, sizeof(uint16_t)); - y[i] = GGML_FP16_TO_FP32(table_gelu_f16[t]); + y[i] = GGML_FP16_TO_FP32(ggml_table_gelu_f16[t]); } } #else @@ -1579,7 +1391,7 @@ inline static float ggml_gelu_quick_f32(float x) { //inline static void ggml_vec_gelu_quick_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { // const uint16_t * i16 = (const uint16_t *) x; // for (int i = 0; i < n; ++i) { -// y[i] = table_gelu_quick_f16[i16[i]]; +// y[i] = ggml_table_gelu_quick_f16[i16[i]]; // } //} @@ -1589,7 +1401,7 @@ inline static void ggml_vec_gelu_quick_f32(const int n, float * y, const float * for (int i = 0; i < n; ++i) { ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); memcpy(&t, &fp16, sizeof(uint16_t)); - y[i] = GGML_FP16_TO_FP32(table_gelu_quick_f16[t]); + y[i] = GGML_FP16_TO_FP32(ggml_table_gelu_quick_f16[t]); } } #else @@ -1608,7 +1420,7 @@ inline static float ggml_silu_f32(float x) { //inline static void ggml_vec_silu_f16(const int n, ggml_fp16_t * y, const ggml_fp16_t * x) { // const uint16_t * i16 = (const uint16_t *) x; // for (int i = 0; i < n; ++i) { -// y[i] = table_silu_f16[i16[i]]; +// y[i] = ggml_table_silu_f16[i16[i]]; // } //} @@ -1618,7 +1430,7 @@ inline static void ggml_vec_silu_f32(const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) { ggml_fp16_t fp16 = GGML_FP32_TO_FP16(x[i]); memcpy(&t, &fp16, sizeof(uint16_t)); - y[i] = GGML_FP16_TO_FP32(table_silu_f16[t]); + y[i] = GGML_FP16_TO_FP32(ggml_table_silu_f16[t]); } } #else @@ -2334,11 +2146,11 @@ struct ggml_context * ggml_init(struct ggml_init_params params) { for (int i = 0; i < (1 << 16); ++i) { uint16_t ui = i; memcpy(&ii, &ui, sizeof(ii)); - const float f = table_f32_f16[i] = GGML_COMPUTE_FP16_TO_FP32(ii); - table_gelu_f16[i] = GGML_FP32_TO_FP16(ggml_gelu_f32(f)); - table_gelu_quick_f16[i] = GGML_FP32_TO_FP16(ggml_gelu_quick_f32(f)); - table_silu_f16[i] = GGML_FP32_TO_FP16(ggml_silu_f32(f)); - table_exp_f16[i] = GGML_FP32_TO_FP16(expf(f)); + const float f = ggml_table_f32_f16[i] = GGML_COMPUTE_FP16_TO_FP32(ii); + ggml_table_gelu_f16[i] = GGML_FP32_TO_FP16(ggml_gelu_f32(f)); + ggml_table_gelu_quick_f16[i] = GGML_FP32_TO_FP16(ggml_gelu_quick_f32(f)); + ggml_table_silu_f16[i] = GGML_FP32_TO_FP16(ggml_silu_f32(f)); + ggml_table_exp_f16[i] = GGML_FP32_TO_FP16(expf(f)); } const uint64_t t_end = ggml_time_us(); UNUSED(t_end); @@ -10701,7 +10513,7 @@ static void ggml_compute_forward_soft_max_f32( // const float val = (sp[i] == -INFINITY) ? 0.0 : exp(sp[i] - max); ggml_fp16_t s = GGML_FP32_TO_FP16(sp[i] - max); memcpy(&scvt, &s, sizeof(scvt)); - const float val = GGML_FP16_TO_FP32(table_exp_f16[scvt]); + const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt]); sum += (ggml_float)val; dp[i] = val; } @@ -12990,7 +12802,7 @@ static void ggml_compute_forward_flash_attn_f32( #else ggml_fp16_t s = GGML_FP32_TO_FP16(SS[j] - max); memcpy(&scvt[j], &s, sizeof(uint16_t)); - const float val = GGML_FP16_TO_FP32(table_exp_f16[scvt[j]]); + const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt[j]]); #endif sump[j] += (ggml_float)val; SS[j] = val; @@ -13192,7 +13004,7 @@ static void ggml_compute_forward_flash_attn_f16( } else { ggml_fp16_t s = GGML_FP32_TO_FP16(SS[j] - max); memcpy(&scvt[j], &s, sizeof(uint16_t)); - const float val = GGML_FP16_TO_FP32(table_exp_f16[scvt[j]]); + const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt[j]]); sump[j] += (ggml_float)val; SS[j] = val; } @@ -13643,7 +13455,7 @@ static void ggml_compute_forward_flash_attn_back_f32( #else ggml_fp16_t s = GGML_FP32_TO_FP16(SR[j] - max); memcpy(&scvt[j], &s, sizeof(uint16_t)); - const float val = GGML_FP16_TO_FP32(table_exp_f16[scvt[j]]); + const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt[j]]); #endif sump[j] += (ggml_float)val; SW[j] = val; @@ -14393,7 +14205,7 @@ static void ggml_compute_forward_cross_entropy_loss_f32( #else ggml_fp16_t s = GGML_FP32_TO_FP16(s0[i] - max); memcpy(&scvt, &s, sizeof(scvt)); - const float val = GGML_FP16_TO_FP32(table_exp_f16[scvt]); + const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt]); #endif sum += (ggml_float)val; st[i] = val; @@ -14507,7 +14319,7 @@ static void ggml_compute_forward_cross_entropy_loss_back_f32( #else ggml_fp16_t s = GGML_FP32_TO_FP16(s0[i] - max); memcpy(&scvt, &s, sizeof(scvt)); - const float val = GGML_FP16_TO_FP32(table_exp_f16[scvt]); + const float val = GGML_FP16_TO_FP32(ggml_table_exp_f16[scvt]); #endif sum += (ggml_float)val; ds0[i] = val; diff --git a/llama.cpp b/llama.cpp index a4340d527..e599917a8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1467,7 +1467,7 @@ static int32_t llama_kv_cache_cell_max(const struct llama_kv_cache & cache) { } static void llama_kv_cache_clear(struct llama_kv_cache & cache) { - for (int32_t i = 0; i < cache.size; ++i) { + for (int32_t i = 0; i < (int32_t) cache.size; ++i) { cache.cells[i].pos = -1; cache.cells[i].seq_id.clear(); } diff --git a/tests/test-double-float.cpp b/tests/test-double-float.cpp index afd7bf77f..753dae911 100644 --- a/tests/test-double-float.cpp +++ b/tests/test-double-float.cpp @@ -4,7 +4,7 @@ #undef NDEBUG #include -#if !defined(__riscv) && !defined(__s390__) +#if !defined(__riscv) && !defined(__s390__) && !defined(__ARM_NEON) #include #endif #include diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index 884af4054..a2459a286 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -129,6 +129,13 @@ int main(int argc, char * argv[]) { ggml_type type = (ggml_type) i; ggml_type_traits_t qfns = ggml_internal_get_type_traits(type); + // deprecated - skip + if (qfns.blck_size == 0) { + continue; + } + + printf("Testing %s\n", ggml_type_name((ggml_type) i)); + if (qfns.from_float && qfns.to_float) { const float total_error = total_quantization_error(qfns, test_size, test_data.data()); const float max_quantization_error = From 07178c98e1b61a5e2af39d347add12e7eb9e08e1 Mon Sep 17 00:00:00 2001 From: Tungsten842 <886724vf@anonaddy.me> Date: Tue, 31 Oct 2023 18:24:03 +0100 Subject: [PATCH 047/859] flake.nix: fix for rocm 5.7 (#3853) --- flake.lock | 12 ++++++------ flake.nix | 10 ++++++---- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/flake.lock b/flake.lock index 070f0e161..0455f6561 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1692799911, - "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=", + "lastModified": 1694529238, + "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", "owner": "numtide", "repo": "flake-utils", - "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44", + "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", "type": "github" }, "original": { @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1698134075, - "narHash": "sha256-foCD+nuKzfh49bIoiCBur4+Fx1nozo+4C/6k8BYk4sg=", + "lastModified": 1698318101, + "narHash": "sha256-gUihHt3yPD7bVqg+k/UVHgngyaJ3DMEBchbymBMvK1E=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "8efd5d1e283604f75a808a20e6cde0ef313d07d4", + "rev": "63678e9f3d3afecfeafa0acead6239cdb447574c", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index fa34394b2..4cf28d5c1 100644 --- a/flake.nix +++ b/flake.nix @@ -11,8 +11,7 @@ meta.mainProgram = "llama"; inherit (pkgs.stdenv) isAarch32 isAarch64 isDarwin; buildInputs = with pkgs; [ openmpi ]; - osSpecific = with pkgs; buildInputs ++ - ( + osSpecific = with pkgs; buildInputs ++ ( if isAarch64 && isDarwin then with pkgs.darwin.apple_sdk_11_0.frameworks; [ Accelerate @@ -96,12 +95,15 @@ }; packages.rocm = pkgs.stdenv.mkDerivation { inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs; buildInputs ++ [ hip hipblas rocblas ]; + buildInputs = with pkgs.rocmPackages; buildInputs ++ [ clr hipblas rocblas ]; cmakeFlags = cmakeFlags ++ [ "-DLLAMA_HIPBLAS=1" "-DCMAKE_C_COMPILER=hipcc" "-DCMAKE_CXX_COMPILER=hipcc" - "-DCMAKE_POSITION_INDEPENDENT_CODE=ON" + # Build all targets supported by rocBLAS. When updating search for TARGET_LIST_ROCM + # in github.com/ROCmSoftwarePlatform/rocBLAS/blob/develop/CMakeLists.txt + # and select the line that matches the current nixpkgs version of rocBLAS. + "-DAMDGPU_TARGETS=gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102" ]; }; apps.llama-server = { From 238657db2364cfb728c694470a4a81702afea760 Mon Sep 17 00:00:00 2001 From: kalomaze <66376113+kalomaze@users.noreply.github.com> Date: Tue, 31 Oct 2023 14:44:49 -0500 Subject: [PATCH 048/859] samplers : Min-P sampler implementation [alternative to Top P/Top K] (#3841) * Introduce the new Min-P sampler by @kalomaze The Min-P sampling method was designed as an alternative to Top-P, and aims to ensure a balance of quality and variety. The parameter *p* represents the minimum probability for a token to be considered, relative to the probability of the most likely token. * Min-P enabled and set to 0.05 default --------- Co-authored-by: Georgi Gerganov Co-authored-by: cebtenzzre --- common/common.cpp | 8 ++++++++ common/sampling.cpp | 6 ++++-- common/sampling.h | 1 + examples/main/README.md | 8 ++++++++ llama.cpp | 26 ++++++++++++++++++++++++++ llama.h | 7 +++++++ 6 files changed, 54 insertions(+), 2 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index c187128d6..dc4865e80 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -218,6 +218,12 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } sparams.top_p = std::stof(argv[i]); + } else if (arg == "--min-p") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.min_p = std::stof(argv[i]); } else if (arg == "--temp") { if (++i >= argc) { invalid_param = true; @@ -679,6 +685,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); printf(" --top-k N top-k sampling (default: %d, 0 = disabled)\n", sparams.top_k); printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); + printf(" --min-p N min-p sampling (default: %.1f, 0.0 = disabled)\n", (double)sparams.min_p); printf(" --tfs N tail free sampling, parameter z (default: %.1f, 1.0 = disabled)\n", (double)sparams.tfs_z); printf(" --typical N locally typical sampling, parameter p (default: %.1f, 1.0 = disabled)\n", (double)sparams.typical_p); printf(" --repeat-last-n N last n tokens to consider for penalize (default: %d, 0 = disabled, -1 = ctx_size)\n", sparams.penalty_last_n); @@ -1275,6 +1282,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "threads: %d # default: %d\n", params.n_threads, std::thread::hardware_concurrency()); fprintf(stream, "top_k: %d # default: 40\n", sparams.top_k); fprintf(stream, "top_p: %f # default: 0.95\n", sparams.top_p); + fprintf(stream, "min_p: %f # default: 0.0\n", sparams.min_p); fprintf(stream, "typical_p: %f # default: 1.0\n", sparams.typical_p); fprintf(stream, "verbose_prompt: %s # default: false\n", params.verbose_prompt ? "true" : "false"); } diff --git a/common/sampling.cpp b/common/sampling.cpp index c4996c985..673d67a6d 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -89,10 +89,10 @@ std::string llama_sampling_print(const llama_sampling_params & params) { snprintf(result, sizeof(result), "\trepeat_last_n = %d, repeat_penalty = %.3f, frequency_penalty = %.3f, presence_penalty = %.3f\n" - "\ttop_k = %d, tfs_z = %.3f, top_p = %.3f, typical_p = %.3f, temp = %.3f\n" + "\ttop_k = %d, tfs_z = %.3f, top_p = %.3f, min_p = %.3f, typical_p = %.3f, temp = %.3f\n" "\tmirostat = %d, mirostat_lr = %.3f, mirostat_ent = %.3f", params.penalty_last_n, params.penalty_repeat, params.penalty_freq, params.penalty_present, - params.top_k, params.tfs_z, params.top_p, params.typical_p, params.temp, + params.top_k, params.tfs_z, params.top_p, params.min_p, params.typical_p, params.temp, params.mirostat, params.mirostat_eta, params.mirostat_tau); return std::string(result); @@ -110,6 +110,7 @@ llama_token llama_sampling_sample( const float temp = params.temp; const int32_t top_k = params.top_k <= 0 ? n_vocab : params.top_k; const float top_p = params.top_p; + const float min_p = params.min_p; const float tfs_z = params.tfs_z; const float typical_p = params.typical_p; const int32_t penalty_last_n = params.penalty_last_n < 0 ? params.n_prev : params.penalty_last_n; @@ -190,6 +191,7 @@ llama_token llama_sampling_sample( llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); + llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); llama_sample_temp (ctx_main, &cur_p, temp); id = llama_sample_token(ctx_main, &cur_p); diff --git a/common/sampling.h b/common/sampling.h index 62ea6d4cf..7c9b8dcf2 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -14,6 +14,7 @@ typedef struct llama_sampling_params { int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. int32_t top_k = 40; // <= 0 to use vocab size float top_p = 0.95f; // 1.0 = disabled + float min_p = 0.05f; // 0.0 = disabled float tfs_z = 1.00f; // 1.0 = disabled float typical_p = 1.00f; // 1.0 = disabled float temp = 0.80f; // 1.0 = disabled diff --git a/examples/main/README.md b/examples/main/README.md index a9561c383..a3428b487 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -208,6 +208,14 @@ Top-p sampling, also known as nucleus sampling, is another text generation metho Example usage: `--top-p 0.95` +### Min P Sampling + +- `--min-p N`: Sets a minimum base probability threshold for token selection (default: 0.05). + +The Min-P sampling method was designed as an alternative to Top-P, and aims to ensure a balance of quality and variety. The parameter *p* represents the minimum probability for a token to be considered, relative to the probability of the most likely token. For example, with *p*=0.05 and the most likely token having a probability of 0.9, logits with a value less than 0.045 are filtered out. + +Example usage: `--min-p 0.05` + ### Tail Free Sampling (TFS) - `--tfs N`: Enable tail free sampling with parameter z (default: 1.0, 1.0 = disabled). diff --git a/llama.cpp b/llama.cpp index e599917a8..7ee589298 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7368,6 +7368,32 @@ void llama_sample_top_p(struct llama_context * ctx, llama_token_data_array * can } } +void llama_sample_min_p(struct llama_context * ctx, llama_token_data_array * candidates, float p, size_t min_keep) { + if (p <= 0.0f || !candidates->size) { + return; + } + + llama_sample_softmax(ctx, candidates); + + const int64_t t_start_sample_us = ggml_time_us(); + + float scale = candidates->data[0].p; // scale by max prob + size_t i = 1; // first token always matches + + for (; i < candidates->size; ++i) { + if (candidates->data[i].p < p * scale && i >= min_keep) { + break; // prob too small + } + } + + // Resize the output vector to keep only the matching tokens + candidates->size = i; + + if (ctx) { + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + } +} + void llama_sample_tail_free(struct llama_context * ctx, llama_token_data_array * candidates, float z, size_t min_keep) { if (z >= 1.0f || candidates->size <= 2) { return; diff --git a/llama.h b/llama.h index d727dbd9f..75fe391ef 100644 --- a/llama.h +++ b/llama.h @@ -598,6 +598,13 @@ extern "C" { float p, size_t min_keep); + /// @details Minimum P sampling as described in https://github.com/ggerganov/llama.cpp/pull/3841 + LLAMA_API void llama_sample_min_p( + struct llama_context * ctx, + llama_token_data_array * candidates, + float p, + size_t min_keep); + /// @details Tail Free Sampling described in https://www.trentonbricken.com/Tail-Free-Sampling/. LLAMA_API void llama_sample_tail_free( struct llama_context * ctx, From 71e3718abdb2771b50c9606d3a7569623a0b0afe Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 08:04:02 +0200 Subject: [PATCH 049/859] llama : refactor graph build code (#3837) * llama : factor out ggml-alloc from graph graph build functions ggml-ci * metal : disable kernel load log * llama : factor out tensor offloading outside the build call (wip) ggml-ci * llama : offload rest of the models ggml-ci * llama : update offload log messages to print node index * llama : comments * llama : support offloading result_norm + comments * llama : factor graph input into a function * llama : do tensor offload only with CUDA * llama : fix res_norm offloading * llama : try to optimize offloading code * llama : fix non-CUDA build * llama : try to fix build * llama : move refact in correct place + optimize graph input * llama : refactor tensor offloading as callback * llama : add layer index to all tensor names * llama : add functional header * llama : comment ggml-ci * llama : remove obsolete map for layer counting * llama : add llm_build helper functions (#3848) * llama : add llm_build_norm helper function ggml-ci * llama : add llm_build_ffn helper function (#3849) ggml-ci * llama : add llm_build_k_shift helper ggml-ci * llama : fix offloading after recent changes * llama : add llm_build_kv_store helper ggml-ci * llama : remove obsolete offload names * llama : fix llm_build_k_shift to use n_head_kv instead of n_head * llama : simplify falcon Q, K, V computation * llama : remove obsolete comments in build graphs * llama : add llm_build_kqv helper ggml-ci * llama : minor * llama : add LLAMA_OFFLOAD_DEBUG + fix starcoder offloading * llama : fix input allocation logic * llama : update offload functions for KQ tensors * llama : normalize tensor names ggml-ci * llama : enable warning about not offloaded tensors * llama : remove extra ; + deduplicate gate_b logic * llama : add llm_build_inp_embd helper --- ggml-metal.m | 11 +- ggml.h | 2 +- llama.cpp | 3655 ++++++++++++++++++++------------------------------ 3 files changed, 1477 insertions(+), 2191 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 2380c4310..bc881395a 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -238,14 +238,17 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { // load kernels { NSError * error = nil; -#define GGML_METAL_ADD_KERNEL(name) \ - ctx->function_##name = [ctx->library newFunctionWithName:@"kernel_"#name]; \ - ctx->pipeline_##name = [ctx->device newComputePipelineStateWithFunction:ctx->function_##name error:&error]; \ + + /* GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) ctx->pipeline_##name, \ (int) ctx->pipeline_##name.maxTotalThreadsPerThreadgroup, \ (int) ctx->pipeline_##name.threadExecutionWidth); \ + */ +#define GGML_METAL_ADD_KERNEL(name) \ + ctx->function_##name = [ctx->library newFunctionWithName:@"kernel_"#name]; \ + ctx->pipeline_##name = [ctx->device newComputePipelineStateWithFunction:ctx->function_##name error:&error]; \ if (error) { \ - GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ + GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ return NULL; \ } diff --git a/ggml.h b/ggml.h index 8c954904e..9d16c5a72 100644 --- a/ggml.h +++ b/ggml.h @@ -709,7 +709,7 @@ extern "C" { // Context tensor enumeration and lookup GGML_API struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx); GGML_API struct ggml_tensor * ggml_get_next_tensor (struct ggml_context * ctx, struct ggml_tensor * tensor); - GGML_API struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * name); + GGML_API struct ggml_tensor * ggml_get_tensor (struct ggml_context * ctx, const char * name); GGML_API struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor); GGML_API struct ggml_tensor * ggml_set_i32 (struct ggml_tensor * tensor, int32_t value); diff --git a/llama.cpp b/llama.cpp index 7ee589298..ead1d421d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -60,7 +60,9 @@ #include #include #include +#include #include +#include #include #include #include @@ -69,11 +71,10 @@ #include #include #include +#include #include #include #include -#include -#include #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data @@ -969,7 +970,7 @@ struct llama_mlock { typedef void (*offload_func_t)(struct ggml_tensor * tensor); -static void llama_nop(struct ggml_tensor * tensor) { // don't offload by default +static void ggml_offload_nop(struct ggml_tensor * tensor) { (void) tensor; } @@ -1113,13 +1114,13 @@ struct llama_layer { struct ggml_tensor * ffn_norm_b; // ff - struct ggml_tensor * w1; // ffn_gate - struct ggml_tensor * w2; // ffn_down - struct ggml_tensor * w3; // ffn_up + struct ggml_tensor * ffn_gate; // w1 + struct ggml_tensor * ffn_down; // w2 + struct ggml_tensor * ffn_up; // w3 // ff bias - struct ggml_tensor * b2; // ffn_down - struct ggml_tensor * b3; // ffn_up + struct ggml_tensor * ffn_down_b; // b2 + struct ggml_tensor * ffn_up_b; // b3 }; struct llama_kv_cell { @@ -1225,8 +1226,8 @@ struct llama_model { llama_hparams hparams = {}; llama_vocab vocab; - struct ggml_tensor * tok_embeddings; - struct ggml_tensor * pos_embeddings; + struct ggml_tensor * tok_embd; + struct ggml_tensor * pos_embd; struct ggml_tensor * tok_norm; struct ggml_tensor * tok_norm_b; @@ -2482,7 +2483,7 @@ static void llm_load_tensors( case LLM_ARCH_LLAMA: case LLM_ARCH_REFACT: { - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); // output { @@ -2536,21 +2537,21 @@ static void llm_load_tensors( layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.w1 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); if (backend == GGML_BACKEND_GPU) { vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.w1) + ggml_nbytes(layer.w2) + ggml_nbytes(layer.w3); + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + + ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + + ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); } } } break; case LLM_ARCH_BAICHUAN: { - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); { ggml_backend_type backend_norm; ggml_backend_type backend_output; @@ -2602,15 +2603,15 @@ static void llm_load_tensors( layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.w1 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); if (backend == GGML_BACKEND_GPU) { vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.w1) + ggml_nbytes(layer.w2) + ggml_nbytes(layer.w3); + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + + ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + + ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); } } } break; @@ -2618,7 +2619,7 @@ static void llm_load_tensors( { // TODO: CPU-only for now - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); // output { @@ -2681,21 +2682,21 @@ static void llm_load_tensors( layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); if (backend == GGML_BACKEND_GPU) { vram_weights += ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.wo) + - ggml_nbytes(layer.w2) + ggml_nbytes(layer.w3); + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); } } } break; case LLM_ARCH_STARCODER: { - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.pos_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); // output { @@ -2754,11 +2755,11 @@ static void llm_load_tensors( layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.b2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.b3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); if (backend == GGML_BACKEND_GPU) { vram_weights += @@ -2766,14 +2767,14 @@ static void llm_load_tensors( ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_norm_b) + - ggml_nbytes(layer.w2) + ggml_nbytes(layer.b2) + - ggml_nbytes(layer.w3) + ggml_nbytes(layer.b3); + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b) + + ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b); } } } break; case LLM_ARCH_PERSIMMON: { - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); { ggml_backend_type backend_norm; @@ -2814,31 +2815,31 @@ static void llm_load_tensors( const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend_split); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.b2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.b3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend_split); + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); + layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); layer.attn_q_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "weight", i), {64}, backend); - layer.attn_q_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "bias", i), {64}, backend); + layer.attn_q_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "bias", i), {64}, backend); layer.attn_k_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "weight", i), {64}, backend); - layer.attn_k_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}, backend); + layer.attn_k_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}, backend); } } break; case LLM_ARCH_BLOOM: { // TODO: CPU-only for now - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.tok_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}, GGML_BACKEND_CPU); - model.tok_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}, GGML_BACKEND_CPU); + model.tok_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}, GGML_BACKEND_CPU); // output { @@ -2897,11 +2898,11 @@ static void llm_load_tensors( layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.b2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.b3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); if (backend == GGML_BACKEND_GPU) { vram_weights += @@ -2909,14 +2910,14 @@ static void llm_load_tensors( ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_norm_b) + - ggml_nbytes(layer.w3) + ggml_nbytes(layer.b3) + - ggml_nbytes(layer.w2) + ggml_nbytes(layer.b2); + ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); } } } break; case LLM_ARCH_MPT: { - model.tok_embeddings = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); // output { @@ -2967,8 +2968,8 @@ static void llm_load_tensors( layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.w2 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.w3 = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); if (backend == GGML_BACKEND_GPU) { vram_weights += @@ -2976,8 +2977,8 @@ static void llm_load_tensors( ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.w2) + - ggml_nbytes(layer.w3); + ggml_nbytes(layer.ffn_down) + + ggml_nbytes(layer.ffn_up); } } } break; @@ -3007,10 +3008,10 @@ static void llm_load_tensors( #ifdef GGML_USE_CUBLAS const int max_backend_supported_layers = hparams.n_layer + 3; - const int max_offloadable_layers = hparams.n_layer + 3; -#elif defined(GGML_USE_CLBLAST) + const int max_offloadable_layers = hparams.n_layer + 3; +#elif GGML_USE_CLBLAST const int max_backend_supported_layers = hparams.n_layer + 1; - const int max_offloadable_layers = hparams.n_layer + 1; + const int max_offloadable_layers = hparams.n_layer + 1; #endif // GGML_USE_CUBLAS LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); @@ -3089,9 +3090,359 @@ static bool llama_model_load( return true; } +using llm_build_cb = std::function; + +enum llm_rope_type { + LLM_ROPE, + LLM_ROPE_NEOX, + LLM_ROPE_GLM, +}; + +static struct ggml_tensor * llm_build_inp_embd( + struct ggml_context * ctx, + const llama_batch & batch, + struct ggml_tensor * tok_embd, + int64_t n_embd, + int32_t n_tokens, + const llm_build_cb & cb) { + struct ggml_tensor * inpL; + + if (batch.token) { + struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_tokens); + cb(inp_tokens, "inp_tokens", -1); + + inpL = ggml_get_rows(ctx, tok_embd, inp_tokens); + } else { +#ifdef GGML_USE_MPI + GGML_ASSERT(false && "not implemented"); +#endif + + inpL = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_tokens); + } + + return inpL; +} + +// Persimmon: n_rot = n_embd_head/2 +// Other: n_rot = n_embd_head +static void llm_build_k_shift( + const llama_context & lctx, + struct ggml_context * ctx, + struct ggml_cgraph * graph, + int64_t n_rot, + llm_rope_type type, + const llm_build_cb & cb) { + const auto & model = lctx.model; + const auto & kv_self = lctx.kv_self; + const auto & cparams = lctx.cparams; + + const auto & hparams = model.hparams; + + const int64_t n_layer = hparams.n_layer; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_gqa = hparams.n_embd_gqa(); + const int64_t n_embd_head = hparams.n_embd_head(); + + const int64_t n_ctx = lctx.cparams.n_ctx; + + const float freq_base = cparams.rope_freq_base; + const float freq_scale = cparams.rope_freq_scale; + + GGML_ASSERT(n_embd_head % n_rot == 0); + + struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_ctx); + cb(K_shift, "K_shift", -1); + + int rope_type = 0; + + switch (type) { + case LLM_ROPE: rope_type = 0; break; + case LLM_ROPE_NEOX: rope_type = 2; break; + case LLM_ROPE_GLM: rope_type = 4; break; + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * tmp = + // we rotate only the first n_rot dimensions + ggml_rope_custom_inplace(ctx, + ggml_view_3d(ctx, kv_self.k, + n_rot, n_head_kv, n_ctx, + ggml_element_size(kv_self.k)*n_embd_head, + ggml_element_size(kv_self.k)*n_embd_gqa, + ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il), + K_shift, n_rot, rope_type, 0, freq_base, freq_scale); + cb(tmp, "K_shifted", il); + ggml_build_forward_expand(graph, tmp); + } +} + +static void llm_build_kv_store( + const llama_context & lctx, + struct ggml_context * ctx, + struct ggml_cgraph * graph, + struct ggml_tensor * k_cur, + struct ggml_tensor * v_cur, + int32_t n_tokens, + int32_t kv_head, + const llm_build_cb & cb, + int64_t il) { + const auto & model = lctx.model; + const auto & kv_self = lctx.kv_self; + const auto & cparams = lctx.cparams; + + const auto & hparams = model.hparams; + + const int64_t n_ctx = cparams.n_ctx; + const int64_t n_embd_gqa = hparams.n_embd_gqa(); + + // compute the transposed [n_tokens, n_embd] V matrix + struct ggml_tensor * v_cur_t = ggml_transpose(ctx, ggml_reshape_2d(ctx, v_cur, n_embd_gqa, n_tokens)); + //struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); // TODO: reshape above is likely not needed + cb(v_cur_t, "v_cur_t", il); + + struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv_self.k, n_tokens*n_embd_gqa, + (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); + cb(k_cache_view, "k_cache_view", il); + + struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv_self.v, n_tokens, n_embd_gqa, + ( n_ctx)*ggml_element_size(kv_self.v), + (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); + cb(v_cache_view, "v_cache_view", il); + + // important: storing RoPE-ed version of K in the KV cache! + ggml_build_forward_expand(graph, ggml_cpy(ctx, k_cur, k_cache_view)); + ggml_build_forward_expand(graph, ggml_cpy(ctx, v_cur_t, v_cache_view)); +} + +enum llm_norm_type { + LLM_NORM, + LLM_NORM_RMS, +}; + +static struct ggml_tensor * llm_build_norm( + struct ggml_context * ctx, + struct ggml_tensor * cur, + struct ggml_tensor * mw, + struct ggml_tensor * mb, + llm_norm_type type, + float eps, + const llm_build_cb & cb, + int il) { + switch (type) { + case LLM_NORM: cur = ggml_norm (ctx, cur, eps); break; + case LLM_NORM_RMS: cur = ggml_rms_norm(ctx, cur, eps); break; + } + + if (mw || mb) { + cb(cur, "norm", il); + } + + if (mw) { + cur = ggml_mul(ctx, cur, mw); + if (mb) { + cb(cur, "norm_w", il); + } + } + + if (mb) { + cur = ggml_add(ctx, cur, mb); + } + + return cur; +} + +enum llm_ffn_op_type { + LLM_FFN_SILU, + LLM_FFN_GELU, + LLM_FFN_RELU, + LLM_FFN_RELU_SQR, +}; + +enum llm_ffn_gate_type { + LLM_FFN_SEQ, + LLM_FFN_PAR, // ffn_gate is parallel to ffn_up +}; + +static struct ggml_tensor * llm_build_ffn( + struct ggml_context * ctx, + struct ggml_tensor * cur, + struct ggml_tensor * up, + struct ggml_tensor * up_b, + struct ggml_tensor * gate, + struct ggml_tensor * gate_b, + struct ggml_tensor * down, + struct ggml_tensor * down_b, + llm_ffn_op_type type_op, + llm_ffn_gate_type type_gate, + const llm_build_cb & cb, + int il) { + struct ggml_tensor * tmp = ggml_mul_mat(ctx, up, cur); + cb(tmp, "ffn_up", il); + + if (up_b) { + tmp = ggml_add(ctx, tmp, up_b); + cb(tmp, "ffn_up_b", il); + } + + if (gate) { + switch (type_gate) { + case LLM_FFN_SEQ: + { + cur = ggml_mul_mat(ctx, gate, tmp); + cb(cur, "ffn_gate", il); + } break; + case LLM_FFN_PAR: + { + cur = ggml_mul_mat(ctx, gate, cur); + cb(cur, "ffn_gate", il); + } break; + } + + if (gate_b) { + cur = ggml_add(ctx, cur, gate_b); + cb(cur, "ffn_gate_b", il); + } + } else { + cur = tmp; + } + + switch (type_op) { + case LLM_FFN_SILU: + { + cur = ggml_silu(ctx, cur); + cb(cur, "ffn_silu", il); + } break; + case LLM_FFN_GELU: + { + cur = ggml_gelu(ctx, cur); + cb(cur, "ffn_gelu", il); + } break; + case LLM_FFN_RELU: + { + cur = ggml_relu(ctx, cur); + cb(cur, "ffn_relu", il); + } break; + case LLM_FFN_RELU_SQR: + { + cur = ggml_relu(ctx, cur); + cb(cur, "ffn_relu", il); + + cur = ggml_sqr(ctx, cur); + cb(cur, "ffn_sqr(relu)", il); + } break; + } + + if (type_gate == LLM_FFN_PAR) { + cur = ggml_mul(ctx, cur, tmp); + cb(cur, "ffn_gate_par", il); + } + + cur = ggml_mul_mat(ctx, down, cur); + if (down_b) { + cb(cur, "ffn_down", il); + } + + if (down_b) { + cur = ggml_add(ctx, cur, down_b); + } + + return cur; +} + +// if max_alibi_bias > 0 then apply ALiBi +static struct ggml_tensor * llm_build_kqv( + const llama_context & lctx, + struct ggml_context * ctx, + struct ggml_tensor * cur, + struct ggml_tensor * wo, + struct ggml_tensor * wo_b, + struct ggml_tensor * q_cur, + struct ggml_tensor * kq_scale, + struct ggml_tensor * kq_mask, + int32_t n_tokens, + int32_t n_kv, + float alibi_bias_max, + const llm_build_cb & cb, + int il) { + const auto & model = lctx.model; + const auto & kv_self = lctx.kv_self; + const auto & cparams = lctx.cparams; + + const auto & hparams = model.hparams; + + const int64_t n_ctx = cparams.n_ctx; + const int64_t n_embd = hparams.n_embd; + const int64_t n_head = hparams.n_head; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_head = hparams.n_embd_head(); + const int64_t n_embd_gqa = hparams.n_embd_gqa(); + + struct ggml_tensor * q = ggml_permute(ctx, q_cur, 0, 2, 1, 3); + cb(q, "q", il); + + struct ggml_tensor * k = + ggml_view_3d(ctx, kv_self.k, + n_embd_head, n_kv, n_head_kv, + ggml_element_size(kv_self.k)*n_embd_gqa, + ggml_element_size(kv_self.k)*n_embd_head, + ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); + cb(k, "k", il); + + struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); + cb(kq, "kq", il); + + kq = ggml_scale(ctx, kq, kq_scale); + cb(kq, "kq_scaled", il); + + if (alibi_bias_max > 0.0f) { + // TODO: n_head or n_head_kv + // TODO: K-shift is likely not working + // TODO: change to ggml_add + kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, alibi_bias_max); + cb(kq, "kq_scaled_alibi", il); + } + + kq = ggml_add(ctx, kq, kq_mask); + cb(kq, "kq_masked", il); + + kq = ggml_soft_max(ctx, kq); + cb(kq, "kq_soft_max", il); + + // split cached v into n_head heads + struct ggml_tensor * v = + ggml_view_3d(ctx, kv_self.v, + n_kv, n_embd_head, n_head_kv, + ggml_element_size(kv_self.v)*n_ctx, + ggml_element_size(kv_self.v)*n_ctx*n_embd_head, + ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); + cb(v, "v", il); + + struct ggml_tensor * kqv = ggml_mul_mat(ctx, v, kq); + cb(kqv, "kqv", il); + + struct ggml_tensor * kqv_merged = ggml_permute(ctx, kqv, 0, 2, 1, 3); + cb(kqv_merged, "kqv_merged", il); + + cur = ggml_cont_2d(ctx, kqv_merged, n_embd, n_tokens); + cb(cur, "kqv_merged_cont", il); + + cur = ggml_mul_mat(ctx, wo, cur); + if (wo_b) { + cb(cur, "kqv_wo", il); + } + + if (wo_b) { + cur = ggml_add(ctx, cur, wo_b); + } + + return cur; +} + static struct ggml_cgraph * llm_build_llama( - llama_context & lctx, - const llama_batch & batch) { + llama_context & lctx, + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; const auto & cparams = lctx.cparams; @@ -3106,7 +3457,6 @@ static struct ggml_cgraph * llm_build_llama( const int64_t n_head = hparams.n_head; const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); GGML_ASSERT(n_embd_head == hparams.n_rot); @@ -3114,13 +3464,11 @@ static struct ggml_cgraph * llm_build_llama( const float freq_scale = cparams.rope_freq_scale; const float norm_rms_eps = hparams.f_norm_rms_eps; - const int n_gpu_layers = model.n_gpu_layers; - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - const bool do_rope_shift = ggml_allocr_is_measure(lctx.alloc) || kv_self.has_shift; + const bool do_rope_shift = worst_case || kv_self.has_shift; //printf("n_kv = %d\n", n_kv); @@ -3139,314 +3487,81 @@ static struct ggml_cgraph * llm_build_llama( struct ggml_tensor * cur; struct ggml_tensor * inpL; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - - inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inpL); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inpL->data, batch.embd, n_tokens * n_embd * ggml_element_size(inpL)); - } - } - - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - - // offload functions set the tensor output backend to GPU - // tensors are GPU-accelerated if any input or the output has been offloaded - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > n_layer) { - offload_func_nr = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 1) { - offload_func_v = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 2) { - offload_func_kq = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd_head))); - } + cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - offload_func_kq(KQ_mask); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - // KQ_pos - contains the positions - struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - offload_func_kq(KQ_pos); - ggml_set_name(KQ_pos, "KQ_pos"); - ggml_allocr_alloc(lctx.alloc, KQ_pos); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < n_tokens; ++i) { - data[i] = batch.pos[i]; - } - } + cb(KQ_mask, "KQ_mask", -1); // shift the entire K-cache if needed if (do_rope_shift) { - struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_ctx); - offload_func_kq(K_shift); - ggml_set_name(K_shift, "K_shift"); - ggml_allocr_alloc(lctx.alloc, K_shift); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) K_shift->data; - for (int i = 0; i < n_ctx; ++i) { - data[i] = kv_self.cells[i].delta; - } - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * tmp = - ggml_rope_custom_inplace(ctx0, - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_head_kv, n_ctx, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il), - K_shift, n_embd_head, 0, 0, freq_base, freq_scale); - offload_func_kq(tmp); - ggml_build_forward_expand(gf, tmp); - } + llm_build_k_shift(lctx, ctx0, gf, n_embd_head, LLM_ROPE, cb); } for (int il = 0; il < n_layer; ++il) { - ggml_format_name(inpL, "layer_inp_%d", il); - - offload_func_t offload_func = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (il >= i_gpu_start) { - offload_func = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS - struct ggml_tensor * inpSA = inpL; // norm - { - cur = ggml_rms_norm(ctx0, inpL, norm_rms_eps); - offload_func(cur); - ggml_set_name(cur, "rms_norm_0"); - - // cur = cur*attn_norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.layers[il].attn_norm); - offload_func(cur); - ggml_set_name(cur, "attention_norm_0"); - } + cur = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, il); + cb(cur, "attn_norm", il); // self-attention { // compute Q and K and RoPE them - struct ggml_tensor * tmpk = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - offload_func_kq(tmpk); - ggml_set_name(tmpk, "tmpk"); + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); - struct ggml_tensor * tmpq = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - offload_func_kq(tmpq); - ggml_set_name(tmpq, "tmpq"); + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); - struct ggml_tensor * Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, tmpk, n_embd_head, n_head_kv, n_tokens), KQ_pos, n_embd_head, 0, 0, freq_base, freq_scale); - offload_func_kq(Kcur); - ggml_set_name(Kcur, "Kcur"); + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); - struct ggml_tensor * Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, n_tokens), KQ_pos, n_embd_head, 0, 0, freq_base, freq_scale); - offload_func_kq(Qcur); - ggml_set_name(Qcur, "Qcur"); + Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + cb(Qcur, "Qcur", il); - // store key and value to memory - { - // compute the transposed [n_tokens, n_embd] V matrix + Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + cb(Kcur, "Kcur", il); - struct ggml_tensor * tmpv = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - offload_func_v(tmpv); - ggml_set_name(tmpv, "tmpv"); + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, tmpv, n_embd_gqa, n_tokens)); - offload_func_v(Vcur); - ggml_set_name(Vcur, "Vcur"); - - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - offload_func_kq(k); - ggml_set_name(k, "k"); - - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - ggml_set_name(v, "v"); - - // important: storing RoPE-ed version of K in the KV cache! - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - offload_func_kq(Q); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - offload_func_kq(K); - ggml_set_name(K, "K"); - - // K * Q - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - // KQ_scaled = KQ / sqrt(n_embd_head) - // KQ_scaled shape [n_kv, n_tokens, n_head, 1] - struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - // KQ_masked = mask_past(KQ_scaled) - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled, KQ_mask); - offload_func_kq(KQ_masked); - ggml_set_name(KQ_masked, "KQ_masked"); - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max(ctx0, KQ_masked); - offload_func_v(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - // split cached V into n_head heads - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - offload_func_v(V); - ggml_set_name(V, "V"); - -#if 1 - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); -#else - // make V contiguous in memory to speed up the matmul, however we waste time on the copy - // on M1 this is faster for the perplexity computation, but ~5% slower for the single-token generation - // is there a better way? - struct ggml_tensor * V_cont = ggml_cpy(ctx0, V, ggml_new_tensor_3d(ctx0, kv_self.v->type, n_ctx, n_embd_head, n_head)); - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V_cont, KQ_soft_max); -#endif - - // KQV_merged = KQV.permute(0, 2, 1, 3) - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - // cur = KQV_merged.contiguous().view(n_embd, n_tokens) - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); - - // projection (no bias) - cur = ggml_mul_mat(ctx0, - model.layers[il].wo, - cur); - offload_func(cur); - ggml_set_name(cur, "result_wo"); + cur = llm_build_kqv(lctx, ctx0, cur, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); } - struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpSA); - offload_func(inpFF); - ggml_set_name(inpFF, "inpFF"); + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); // feed-forward network { - // norm - { - cur = ggml_rms_norm(ctx0, inpFF, norm_rms_eps); - offload_func(cur); - ggml_set_name(cur, "rms_norm_1"); + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, il); + cb(cur, "ffn_norm", il); - // cur = cur*ffn_norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.layers[il].ffn_norm); - offload_func(cur); - ggml_set_name(cur, "ffn_norm"); - } - - struct ggml_tensor * tmp = ggml_mul_mat(ctx0, - model.layers[il].w3, - cur); - offload_func(tmp); - ggml_set_name(tmp, "result_w3"); - - cur = ggml_mul_mat(ctx0, - model.layers[il].w1, - cur); - offload_func(cur); - ggml_set_name(cur, "result_w1"); - - // SILU activation - cur = ggml_silu(ctx0, cur); - offload_func(cur); - ggml_set_name(cur, "silu"); - - cur = ggml_mul(ctx0, cur, tmp); - offload_func(cur); - ggml_set_name(cur, "silu_x_result_w3"); - - cur = ggml_mul_mat(ctx0, - model.layers[il].w2, - cur); - offload_func(cur); - ggml_set_name(cur, "result_w2"); + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); } - cur = ggml_add(ctx0, cur, inpFF); - offload_func(cur); - ggml_set_name(cur, "inpFF_+_result_w2"); + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); // input for next layer inpL = cur; @@ -3454,21 +3569,14 @@ static struct ggml_cgraph * llm_build_llama( cur = inpL; - // norm - { - cur = ggml_rms_norm(ctx0, cur, norm_rms_eps); - offload_func_nr(cur); - ggml_set_name(cur, "rms_norm_2"); - - // cur = cur*norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.output_norm); - // offload_func_nr(cur); // TODO CPU + GPU mirrored backend - ggml_set_name(cur, "result_norm"); - } + cur = llm_build_norm(ctx0, cur, + model.output_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, -1); + cb(cur, "result_norm", -1); // lm_head cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); @@ -3479,7 +3587,9 @@ static struct ggml_cgraph * llm_build_llama( static struct ggml_cgraph * llm_build_baichaun( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; const auto & cparams = lctx.cparams; @@ -3494,7 +3604,6 @@ static struct ggml_cgraph * llm_build_baichaun( const int64_t n_head = hparams.n_head; const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); GGML_ASSERT(n_embd_head == hparams.n_rot); @@ -3502,13 +3611,11 @@ static struct ggml_cgraph * llm_build_baichaun( const float freq_scale = cparams.rope_freq_scale; const float norm_rms_eps = hparams.f_norm_rms_eps; - const int n_gpu_layers = model.n_gpu_layers; - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - const bool do_rope_shift = ggml_allocr_is_measure(lctx.alloc) || kv_self.has_shift; + const bool do_rope_shift = worst_case || kv_self.has_shift; auto & buf_compute = lctx.buf_compute; @@ -3525,331 +3632,91 @@ static struct ggml_cgraph * llm_build_baichaun( struct ggml_tensor * cur; struct ggml_tensor * inpL; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - - inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inpL); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inpL->data, batch.embd, n_tokens * n_embd * ggml_element_size(inpL)); - } - } - - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - - // offload functions set the tensor output backend to GPU - // tensors are GPU-accelerated if any input or the output has been offloaded - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > n_layer) { - offload_func_nr = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 1) { - offload_func_v = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 2) { - offload_func_kq = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd)/n_head)); - } + cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - offload_func_kq(KQ_mask); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - // KQ_pos - contains the positions - struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - offload_func_kq(KQ_pos); - ggml_set_name(KQ_pos, "KQ_pos"); - ggml_allocr_alloc(lctx.alloc, KQ_pos); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < n_tokens; ++i) { - data[i] = batch.pos[i]; - } - } + cb(KQ_mask, "KQ_mask", -1); // shift the entire K-cache if needed if (do_rope_shift) { - struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_ctx); - offload_func_kq(K_shift); - ggml_set_name(K_shift, "K_shift"); - ggml_allocr_alloc(lctx.alloc, K_shift); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) K_shift->data; - for (int i = 0; i < n_ctx; ++i) { - data[i] = kv_self.cells[i].delta; - } - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * tmp = - ggml_rope_custom_inplace(ctx0, - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_head_kv, n_ctx, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il), - K_shift, n_embd_head, 0, 0, freq_base, freq_scale); - offload_func_kq(tmp); - ggml_build_forward_expand(gf, tmp); - } + llm_build_k_shift(lctx, ctx0, gf, n_embd_head, LLM_ROPE, cb); } for (int il = 0; il < n_layer; ++il) { - ggml_format_name(inpL, "layer_inp_%d", il); - - offload_func_t offload_func = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (il >= i_gpu_start) { - offload_func = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS - struct ggml_tensor * inpSA = inpL; - // norm - { - cur = ggml_rms_norm(ctx0, inpL, norm_rms_eps); - offload_func(cur); - ggml_set_name(cur, "rms_norm_0"); - - // cur = cur*attn_norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.layers[il].attn_norm); - offload_func(cur); - ggml_set_name(cur, "attention_norm_0"); - } + cur = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, il); + cb(cur, "attn_norm", il); // self-attention { - // compute Q and K and RoPE them - struct ggml_tensor * tmpk = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - offload_func_kq(tmpk); - ggml_set_name(tmpk, "tmpk"); + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); - struct ggml_tensor * tmpq = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - offload_func_kq(tmpq); - ggml_set_name(tmpq, "tmpq"); + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); - struct ggml_tensor * Kcur; - struct ggml_tensor * Qcur; switch (model.type) { case MODEL_7B: - Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, tmpk, n_embd_head, n_head_kv, n_tokens), KQ_pos, n_embd_head, 0, 0, freq_base, freq_scale); - Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, n_tokens), KQ_pos, n_embd_head, 0, 0, freq_base, freq_scale); + Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); break; case MODEL_13B: - Kcur = ggml_reshape_3d(ctx0, tmpk, n_embd/n_head, n_head, n_tokens); - Qcur = ggml_reshape_3d(ctx0, tmpq, n_embd/n_head, n_head, n_tokens); + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd/n_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd/n_head, n_head, n_tokens); break; default: GGML_ASSERT(false); } + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); - offload_func_kq(Kcur); - ggml_set_name(Kcur, "Kcur"); + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - offload_func_kq(Qcur); - ggml_set_name(Qcur, "Qcur"); + // apply ALiBi for 13B model + const float alibi_bias_max = model.type == MODEL_13B ? 8.0f : -1.0f; - // store key and value to memory - { - // compute the transposed [n_tokens, n_embd] V matrix - - struct ggml_tensor * tmpv = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - offload_func_v(tmpv); - ggml_set_name(tmpv, "tmpv"); - - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, tmpv, n_embd_gqa, n_tokens)); - offload_func_v(Vcur); - ggml_set_name(Vcur, "Vcur"); - - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - offload_func_kq(k); - ggml_set_name(k, "k"); - - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - ggml_set_name(v, "v"); - - // important: storing RoPE-ed version of K in the KV cache! - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - offload_func_kq(Q); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - offload_func_kq(K); - ggml_set_name(K, "K"); - - // K * Q - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - // KQ_scaled = KQ / sqrt(n_embd_head) - // KQ_scaled shape [n_past + n_tokens, n_tokens, n_head, 1] - struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - struct ggml_tensor * KQ_masked; - struct ggml_tensor * KQ_scaled_alibi; - - switch (model.type) { - case MODEL_7B: - KQ_masked = ggml_add(ctx0, KQ_scaled, KQ_mask); - break; - case MODEL_13B: - // TODO: replace with ggml_add() - KQ_scaled_alibi = ggml_alibi(ctx0, KQ_scaled, /*n_past*/ 0, n_head, 8); - ggml_set_name(KQ_scaled_alibi, "KQ_scaled_alibi"); - KQ_masked = ggml_add(ctx0, KQ_scaled_alibi, KQ_mask); - break; - default: - GGML_ASSERT(false); - } - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max(ctx0, KQ_masked); - offload_func_v(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - // split cached V into n_head heads - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - offload_func_v(V); - ggml_set_name(V, "V"); - - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); - - // KQV_merged = KQV.permute(0, 2, 1, 3) - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - // cur = KQV_merged.contiguous().view(n_embd, n_tokens) - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); - - // projection (no bias) - cur = ggml_mul_mat(ctx0, - model.layers[il].wo, - cur); - offload_func(cur); - ggml_set_name(cur, "result_wo"); + cur = llm_build_kqv(lctx, ctx0, cur, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, alibi_bias_max, cb, il); + cb(cur, "kqv_out", il); } - struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpSA); - offload_func(inpFF); - ggml_set_name(inpFF, "inpFF"); + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); // feed-forward network { - // norm - { - cur = ggml_rms_norm(ctx0, inpFF, norm_rms_eps); - offload_func(cur); - ggml_set_name(cur, "rms_norm_1"); + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, il); + cb(cur, "ffn_norm", il); - // cur = cur*ffn_norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.layers[il].ffn_norm); - offload_func(cur); - ggml_set_name(cur, "ffn_norm"); - } - - struct ggml_tensor * tmp = ggml_mul_mat(ctx0, - model.layers[il].w3, - cur); - offload_func(tmp); - ggml_set_name(tmp, "result_w3"); - - cur = ggml_mul_mat(ctx0, - model.layers[il].w1, - cur); - offload_func(cur); - ggml_set_name(cur, "result_w1"); - - // SILU activation - cur = ggml_silu(ctx0, cur); - offload_func(cur); - ggml_set_name(cur, "silu"); - - cur = ggml_mul(ctx0, cur, tmp); - offload_func(cur); - ggml_set_name(cur, "silu_x_result_w3"); - - cur = ggml_mul_mat(ctx0, - model.layers[il].w2, - cur); - offload_func(cur); - ggml_set_name(cur, "result_w2"); + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); } - cur = ggml_add(ctx0, cur, inpFF); - offload_func(cur); - ggml_set_name(cur, "inpFF_+_result_w2"); + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); // input for next layer inpL = cur; @@ -3857,366 +3724,14 @@ static struct ggml_cgraph * llm_build_baichaun( cur = inpL; - // norm - { - cur = ggml_rms_norm(ctx0, cur, norm_rms_eps); - offload_func_nr(cur); - ggml_set_name(cur, "rms_norm_2"); - - // cur = cur*norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.output_norm); - // offload_func_nr(cur); // TODO CPU + GPU mirrored backend - ggml_set_name(cur, "result_norm"); - } + cur = llm_build_norm(ctx0, cur, + model.output_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, -1); + cb(cur, "result_norm", -1); // lm_head cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_refact( - llama_context & lctx, - const llama_batch & batch) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - - const float norm_rms_eps = hparams.f_norm_rms_eps; - - const int n_gpu_layers = model.n_gpu_layers; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; - - // printf("n_kv = %d\n", n_kv); - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - - inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inpL); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inpL->data, batch.embd, n_tokens * n_embd * ggml_element_size(inpL)); - } - } - - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - - // offload functions set the tensor output backend to GPU - // tensors are GPU-accelerated if any input or the output has been offloaded - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > n_layer) { - offload_func_nr = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 1) { - offload_func_v = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 2) { - offload_func_kq = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd_head))); - } - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - offload_func_kq(KQ_mask); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - for (int il = 0; il < n_layer; ++il) { - ggml_format_name(inpL, "layer_inp_%d", il); - - offload_func_t offload_func = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (il >= i_gpu_start) { - offload_func = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS - - struct ggml_tensor * inpSA = inpL; - - // norm - { - cur = ggml_rms_norm(ctx0, inpL, norm_rms_eps); - offload_func(cur); - ggml_set_name(cur, "rms_norm_0"); - - // cur = cur*attn_norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.layers[il].attn_norm); - offload_func(cur); - ggml_set_name(cur, "attention_norm_0"); - } - - // self-attention - { - // compute Q and K - struct ggml_tensor * tmpk = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - offload_func_kq(tmpk); - ggml_set_name(tmpk, "tmpk"); - - struct ggml_tensor * tmpq = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - offload_func_kq(tmpq); - ggml_set_name(tmpq, "tmpq"); - - struct ggml_tensor * Kcur = ggml_reshape_3d(ctx0, tmpk, n_embd_head, n_head_kv, n_tokens); - offload_func_kq(Kcur); - ggml_set_name(Kcur, "Kcur"); - - struct ggml_tensor * Qcur = ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, n_tokens); - offload_func_kq(Qcur); - ggml_set_name(Qcur, "Qcur"); - - // store key and value to memory - { - // compute the transposed [n_tokens, n_embd] V matrix - - struct ggml_tensor * tmpv = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - offload_func_v(tmpv); - ggml_set_name(tmpv, "tmpv"); - - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, tmpv, n_embd_gqa, n_tokens)); - offload_func_v(Vcur); - ggml_set_name(Vcur, "Vcur"); - - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - offload_func_kq(k); - ggml_set_name(k, "k"); - - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - ggml_set_name(v, "v"); - - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - offload_func_kq(Q); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - offload_func_kq(K); - ggml_set_name(K, "K"); - - // K * Q - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - // KQ_scaled = KQ / sqrt(n_embd_head) - // KQ_scaled shape [n_kv, n_tokens, n_head, 1] - struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - // KQ_masked = mask_past(KQ_scaled) - struct ggml_tensor * KQ_scaled_alibi = ggml_alibi(ctx0, KQ_scaled, /*n_past*/ 0, n_head, 8); - ggml_set_name(KQ_scaled_alibi, "KQ_scaled_alibi"); - - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled_alibi, KQ_mask); - offload_func_kq(KQ_masked); - ggml_set_name(KQ_masked, "KQ_masked"); - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max(ctx0, KQ_masked); - offload_func_v(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - // split cached V into n_head heads - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - offload_func_v(V); - ggml_set_name(V, "V"); - -#if 1 - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); -#else - // make V contiguous in memory to speed up the matmul, however we waste time on the copy - // on M1 this is faster for the perplexity computation, but ~5% slower for the single-token generation - // is there a better way? - struct ggml_tensor * V_cont = ggml_cpy(ctx0, V, ggml_new_tensor_3d(ctx0, kv_self.v->type, n_ctx, n_embd_head, n_head)); - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V_cont, KQ_soft_max); -#endif - - // KQV_merged = KQV.permute(0, 2, 1, 3) - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - // cur = KQV_merged.contiguous().view(n_embd, n_tokens) - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); - - // projection (no bias) - cur = ggml_mul_mat(ctx0, - model.layers[il].wo, - cur); - offload_func(cur); - ggml_set_name(cur, "result_wo"); - } - - struct ggml_tensor * inpFF = ggml_add(ctx0, cur, inpSA); - offload_func(inpFF); - ggml_set_name(inpFF, "inpFF"); - - // feed-forward network - { - // norm - { - cur = ggml_rms_norm(ctx0, inpFF, norm_rms_eps); - offload_func(cur); - ggml_set_name(cur, "rms_norm_1"); - - // cur = cur*ffn_norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.layers[il].ffn_norm); - offload_func(cur); - ggml_set_name(cur, "ffn_norm"); - } - - struct ggml_tensor * tmp = ggml_mul_mat(ctx0, - model.layers[il].w3, - cur); - offload_func(tmp); - ggml_set_name(tmp, "result_w3"); - - cur = ggml_mul_mat(ctx0, - model.layers[il].w1, - cur); - offload_func(cur); - ggml_set_name(cur, "result_w1"); - - // SILU activation - cur = ggml_silu(ctx0, cur); - offload_func(cur); - ggml_set_name(cur, "silu"); - - cur = ggml_mul(ctx0, cur, tmp); - offload_func(cur); - ggml_set_name(cur, "silu_x_result_w3"); - - cur = ggml_mul_mat(ctx0, - model.layers[il].w2, - cur); - offload_func(cur); - ggml_set_name(cur, "result_w2"); - } - - cur = ggml_add(ctx0, cur, inpFF); - offload_func(cur); - ggml_set_name(cur, "inpFF_+_result_w2"); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - // norm - { - cur = ggml_rms_norm(ctx0, cur, norm_rms_eps); - offload_func_nr(cur); - ggml_set_name(cur, "rms_norm_2"); - - // cur = cur*norm(broadcasted) - cur = ggml_mul(ctx0, cur, model.output_norm); - // offload_func_nr(cur); // TODO CPU + GPU mirrored backend - ggml_set_name(cur, "result_norm"); - } - - // lm_head - cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); @@ -4227,7 +3742,9 @@ static struct ggml_cgraph * llm_build_refact( static struct ggml_cgraph * llm_build_falcon( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; const auto & cparams = lctx.cparams; @@ -4250,13 +3767,11 @@ static struct ggml_cgraph * llm_build_falcon( const float freq_scale = cparams.rope_freq_scale; const float norm_eps = hparams.f_norm_eps; - const int n_gpu_layers = model.n_gpu_layers; - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - const bool do_rope_shift = ggml_allocr_is_measure(lctx.alloc) || kv_self.has_shift; + const bool do_rope_shift = worst_case || kv_self.has_shift; //printf("kv_head = %d, n_kv = %d, n_tokens = %d, n_ctx = %d, is_measure = %d, has_shift = %d\n", // kv_head, n_kv, n_tokens, n_ctx, ggml_allocr_is_measure(lctx.alloc), kv_self.has_shift); @@ -4276,294 +3791,94 @@ static struct ggml_cgraph * llm_build_falcon( struct ggml_tensor * cur; struct ggml_tensor * inpL; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - - inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inpL); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inpL->data, batch.embd, n_tokens * n_embd * ggml_element_size(inpL)); - } - } - - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - - // offload functions set the tensor output backend to GPU - // tensors are GPU-accelerated if any input or the output has been offloaded - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > n_layer) { - offload_func_nr = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 1) { - offload_func_v = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 2) { - offload_func_kq = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd)/n_head)); - } + cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - offload_func_kq(KQ_mask); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - // KQ_pos - contains the positions - struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - offload_func_kq(KQ_pos); - ggml_set_name(KQ_pos, "KQ_pos"); - ggml_allocr_alloc(lctx.alloc, KQ_pos); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < n_tokens; ++i) { - data[i] = batch.pos[i]; - } - } + cb(KQ_mask, "KQ_mask", -1); // shift the entire K-cache if needed if (do_rope_shift) { - struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_ctx); - offload_func_kq(K_shift); - ggml_set_name(K_shift, "K_shift"); - ggml_allocr_alloc(lctx.alloc, K_shift); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) K_shift->data; - for (int i = 0; i < n_ctx; ++i) { - data[i] = kv_self.cells[i].delta; - } - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * tmp = - ggml_rope_custom_inplace(ctx0, - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_head_kv, n_ctx, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il), - K_shift, n_embd_head, 2, 0, freq_base, freq_scale); - offload_func_kq(tmp); - ggml_build_forward_expand(gf, tmp); - } + llm_build_k_shift(lctx, ctx0, gf, n_embd_head, LLM_ROPE_NEOX, cb); } for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * attn_norm; - offload_func_t offload_func = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (il >= i_gpu_start) { - offload_func = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + attn_norm = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(attn_norm, "attn_norm", il); // self-attention - // TODO: refactor into common function (shared with LLaMA) { - attn_norm = ggml_norm(ctx0, inpL, norm_eps); - offload_func(attn_norm); - - attn_norm = ggml_add(ctx0, - ggml_mul(ctx0, attn_norm, model.layers[il].attn_norm), - model.layers[il].attn_norm_b); - offload_func(attn_norm->src[0]); - offload_func(attn_norm); - - if (model.layers[il].attn_norm_2) { // Falcon-40B - cur = ggml_norm(ctx0, inpL, norm_eps); - offload_func(cur); - - cur = ggml_add(ctx0, - ggml_mul(ctx0, cur, model.layers[il].attn_norm_2), - model.layers[il].attn_norm_2_b); - offload_func(cur->src[0]); - offload_func(cur); - } else { // Falcon 7B + if (model.layers[il].attn_norm_2) { + // Falcon-40B + cur = llm_build_norm(ctx0, attn_norm, + model.layers[il].attn_norm_2, + model.layers[il].attn_norm_2_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "attn_norm_2", il); + } else { cur = attn_norm; } - // compute QKV - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - offload_func_kq(cur); + cb(cur, "wqkv", il); - // Note that the strides for Kcur, Vcur are set up so that the - // resulting views are misaligned with the tensor's storage - // (by applying the K/V offset we shift the tensor's original - // view to stick out behind the viewed QKV tensor's allocated - // memory, so to say). This is ok because no actual accesses - // happen to that out-of-range memory, but it can require some - // trickery when trying to accurately dump these views for - // debugging. + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - const size_t wsize = ggml_type_size(cur->type); + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); - // TODO: these 2 ggml_conts are technically not needed, but we add them until CUDA support for - // non-contiguous views is added for the rope operator - struct ggml_tensor * tmpq = ggml_cont(ctx0, ggml_view_3d( - ctx0, cur, n_embd_head, n_head, n_tokens, - wsize * n_embd_head, - wsize * n_embd_head * (n_head + 2 * n_head_kv), - 0)); - offload_func_kq(tmpq); - - struct ggml_tensor * tmpk = ggml_cont(ctx0, ggml_view_3d( - ctx0, cur, n_embd_head, n_head_kv, n_tokens, - wsize * n_embd_head, - wsize * n_embd_head * (n_head + 2 * n_head_kv), - wsize * n_embd_head * n_head)); - offload_func_kq(tmpk); - - struct ggml_tensor * tmpv = ggml_view_3d( - ctx0, cur, n_embd_head, n_head_kv, n_tokens, - wsize * n_embd_head, - wsize * n_embd_head * (n_head + 2 * n_head_kv), - wsize * n_embd_head * (n_head + n_head_kv)); - offload_func_v(tmpv); + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); // using mode = 2 for neox mode - struct ggml_tensor * Qcur = ggml_rope_custom(ctx0, tmpq, KQ_pos, n_embd_head, 2, 0, freq_base, freq_scale); - offload_func_kq(Qcur); - struct ggml_tensor * Kcur = ggml_rope_custom(ctx0, tmpk, KQ_pos, n_embd_head, 2, 0, freq_base, freq_scale); - offload_func_kq(Kcur); + Qcur = ggml_rope_custom(ctx0, Qcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); + cb(Qcur, "Qcur", il); - { - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, ggml_cont(ctx0, tmpv), n_embd_gqa, n_tokens)); - offload_func_v(Vcur); - offload_func_v(Vcur->src[0]->src[0]); - ggml_set_name(Vcur, "Vcur"); + Kcur = ggml_rope_custom(ctx0, Kcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); + cb(Kcur, "Kcur", il); - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - offload_func_kq(k); - ggml_set_name(k, "k"); + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - offload_func_kq(Q); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - offload_func_kq(K); - ggml_set_name(K, "K"); - - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled, KQ_mask); - offload_func_kq(KQ_masked); - ggml_set_name(KQ_masked, "KQ_masked"); - - struct ggml_tensor * KQ_soft_max = ggml_soft_max(ctx0, KQ_masked); - offload_func_v(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - offload_func_v(V); - ggml_set_name(V, "V"); - - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); - - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); - - cur = ggml_mul_mat(ctx0, model.layers[il].wo, cur); - offload_func(cur); - ggml_set_name(cur, "result_wo"); + cur = llm_build_kqv(lctx, ctx0, attn_norm, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); } - struct ggml_tensor * attn_out = cur; + struct ggml_tensor * ffn_inp = cur; // feed forward { - struct ggml_tensor * inpFF = attn_norm; - - cur = ggml_mul_mat(ctx0, model.layers[il].w3, inpFF); - offload_func(cur); - - cur = ggml_gelu(ctx0, cur); - offload_func(cur); - cur = ggml_mul_mat(ctx0, model.layers[il].w2, cur); - offload_func(cur); + cur = llm_build_ffn(ctx0, attn_norm, // !! use the attn norm, not the result + model.layers[il].ffn_up, NULL, + NULL, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); } - cur = ggml_add(ctx0, cur, attn_out); - offload_func(cur); + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + cur = ggml_add(ctx0, cur, inpL); - offload_func(cur); + cb(cur, "l_out", il); // input for next layer inpL = cur; @@ -4572,18 +3887,14 @@ static struct ggml_cgraph * llm_build_falcon( cur = inpL; // norm - { - cur = ggml_norm(ctx0, cur, norm_eps); - offload_func_nr(cur); - - cur = ggml_add(ctx0, - ggml_mul(ctx0, cur, model.output_norm), - model.output_norm_b); - ggml_set_name(cur, "result_norm"); - } + cur = llm_build_norm(ctx0, cur, + model.output_norm, + model.output_norm_b, + LLM_NORM, norm_eps, cb, -1); + cb(cur, "result_norm", -1); cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); @@ -4594,7 +3905,9 @@ static struct ggml_cgraph * llm_build_falcon( static struct ggml_cgraph * llm_build_starcoder( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; const auto & cparams = lctx.cparams; @@ -4607,7 +3920,6 @@ static struct ggml_cgraph * llm_build_starcoder( const int64_t n_layer = hparams.n_layer; const int64_t n_ctx = cparams.n_ctx; const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head = hparams.n_embd_head(); const int64_t n_embd_gqa = hparams.n_embd_gqa(); @@ -4615,11 +3927,9 @@ static struct ggml_cgraph * llm_build_starcoder( const float norm_eps = hparams.f_norm_eps; - const int n_gpu_layers = model.n_gpu_layers; - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; auto & buf_compute = lctx.buf_compute; @@ -4634,266 +3944,95 @@ static struct ggml_cgraph * llm_build_starcoder( ggml_cgraph * gf = ggml_new_graph(ctx0); struct ggml_tensor * cur; - struct ggml_tensor * token; - struct ggml_tensor * position; + struct ggml_tensor * pos; struct ggml_tensor * inpL; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - - token = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - token = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, token); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(token->data, batch.embd, n_tokens * n_embd * ggml_element_size(token)); - } - } - - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - - // offload functions set the tensor output backend to GPU - // tensors are GPU-accelerated if any input or the output has been offloaded - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > n_layer) { - offload_func_nr = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 1) { - offload_func_v = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 2) { - offload_func_kq = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS - - { - // Compute position embeddings. - struct ggml_tensor * inp_positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - ggml_allocr_alloc(lctx.alloc, inp_positions); - if (!ggml_allocr_is_measure(lctx.alloc)) { - for (int i = 0; i < n_tokens; ++i) { - ((int32_t *) inp_positions->data)[i] = batch.pos[i]; - } - } - ggml_set_name(inp_positions, "inp_positions"); - - position = ggml_get_rows(ctx0, model.pos_embeddings, inp_positions); - } + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd)/n_head)); - } + cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - ggml_set_name(KQ_mask, "KQ_mask"); - offload_func_kq(KQ_mask); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); + cb(KQ_mask, "KQ_mask", -1); - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; + pos = ggml_get_rows(ctx0, model.pos_embd, inp_pos); + cb(pos, "pos_embd", -1); - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - inpL = ggml_add(ctx0, token, position); - ggml_set_name(inpL, "inpL"); + inpL = ggml_add(ctx0, inpL, pos); + cb(inpL, "inpL", -1); for (int il = 0; il < n_layer; ++il) { - offload_func_t offload_func = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (il >= i_gpu_start) { - offload_func = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + cur = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "attn_norm", il); + // self-attention { - // Norm - cur = ggml_norm(ctx0, inpL, norm_eps); - offload_func(cur); - - cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].attn_norm), model.layers[il].attn_norm_b); - offload_func(cur); - } - - { - // Self Attention cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - offload_func_kq(cur); + cb(cur, "wqkv", il); cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - offload_func_kq(cur); + cb(cur, "bqkv", il); - struct ggml_tensor * tmpq = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * tmpk = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * tmpv = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - ggml_set_name(tmpq, "tmpq"); - ggml_set_name(tmpk, "tmpk"); - ggml_set_name(tmpv, "tmpv"); + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); - offload_func_kq(tmpq); - offload_func_kq(tmpk); - offload_func_v (tmpv); + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - struct ggml_tensor * Qcur = ggml_reshape_3d(ctx0, tmpq, n_embd_head, n_head, n_tokens); - struct ggml_tensor * Kcur = tmpk; + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - { - struct ggml_tensor * Vcur = ggml_transpose(ctx0, tmpv); - offload_func_v(Vcur); - ggml_set_name(Vcur, "Vcur"); - - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - offload_func_kq(k); - ggml_set_name(k, "k"); - - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - ggml_set_name(v, "v"); - - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - offload_func_kq(Q); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - offload_func_kq(K); - ggml_set_name(K, "K"); - - // K * Q - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - // KQ_scaled = KQ / sqrt(n_embd_head) - // KQ_scaled shape [n_past + n_tokens, n_tokens, n_head, 1] - struct ggml_tensor * KQ_scaled = ggml_scale_inplace(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - // KQ_masked = mask_past(KQ_scaled) - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled, KQ_mask); - offload_func_kq(KQ_masked); - ggml_set_name(KQ_masked, "KQ_masked"); - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); - offload_func_v(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - // split cached V into n_head heads - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - ggml_set_name(V, "V"); - - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); - - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); + cur = llm_build_kqv(lctx, ctx0, cur, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); } - // Projection - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wo, cur), model.layers[il].bo); - offload_func(cur); - - // Add the input - cur = ggml_add(ctx0, cur, inpL); - offload_func(cur); - - struct ggml_tensor * inpFF = cur; + // add the input + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); // FF { - // Norm - { - cur = ggml_norm(ctx0, inpFF, norm_eps); - offload_func_nr(cur); + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "ffn_norm", il); - cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ffn_norm), model.layers[il].ffn_norm_b); - offload_func_nr(cur); - } - - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].w3, cur), model.layers[il].b3); - offload_func(cur); - - // GELU activation - cur = ggml_gelu(ctx0, cur); - offload_func(cur); - - // Projection - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].w2, cur), model.layers[il].b2); - offload_func(cur); + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); } - inpL = ggml_add(ctx0, cur, inpFF); - + inpL = ggml_add(ctx0, cur, ffn_inp); + cb(inpL, "l_out", il); } - // Output Norm - { - cur = ggml_norm(ctx0, inpL, norm_eps); - offload_func_nr(cur); - - cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.output_norm), model.output_norm_b); - ggml_set_name(cur, "result_norm"); - } + cur = llm_build_norm(ctx0, inpL, + model.output_norm, + model.output_norm_b, + LLM_NORM, norm_eps, cb, -1); + cb(cur, "result_norm", -1); cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); ggml_free(ctx0); @@ -4903,7 +4042,9 @@ static struct ggml_cgraph * llm_build_starcoder( static struct ggml_cgraph * llm_build_persimmon( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; @@ -4912,29 +4053,27 @@ static struct ggml_cgraph * llm_build_persimmon( GGML_ASSERT(!!kv_self.ctx); const auto & cparams = lctx.cparams; + const int64_t n_embd = hparams.n_embd; const int64_t n_layer = hparams.n_layer; const int64_t n_ctx = cparams.n_ctx; const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_head = hparams.n_head; const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - const size_t n_rot = n_embd_head / 2; + const int64_t n_rot = n_embd_head / 2; const float freq_base = cparams.rope_freq_base; const float freq_scale = cparams.rope_freq_scale; - const float norm_eps = hparams.f_norm_eps; - - const int n_gpu_layers = model.n_gpu_layers; - + const float norm_eps = hparams.f_norm_eps; const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - const bool do_rope_shift = ggml_allocr_is_measure(lctx.alloc) || kv_self.has_shift; + const bool do_rope_shift = worst_case || kv_self.has_shift; auto & buf_compute = lctx.buf_compute; + struct ggml_init_params params = { /*.mem_size =*/ buf_compute.size, /*.mem_buffer =*/ buf_compute.data, @@ -4948,146 +4087,77 @@ static struct ggml_cgraph * llm_build_persimmon( struct ggml_tensor * cur; struct ggml_tensor * inpL; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "imp_embd", -1); + + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { - inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - ggml_allocr_alloc(lctx.alloc, inpL); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inpL->data, batch.embd, n_tokens * n_embd * ggml_element_size(inpL)); - } - } - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd_head))); - } - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); + cb(KQ_scale, "KQ_scale", -1); + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - offload_func_kq(KQ_mask); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); + cb(KQ_mask, "KQ_mask", -1); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - offload_func_kq(KQ_pos); - ggml_set_name(KQ_pos, "KQ_pos"); - ggml_allocr_alloc(lctx.alloc, KQ_pos); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < n_tokens; ++i) { - data[i] = batch.pos[i]; - } - } if (do_rope_shift) { - struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_ctx); - offload_func_kq(K_shift); - ggml_set_name(K_shift, "K_shift"); - ggml_allocr_alloc(lctx.alloc, K_shift); - if (!ggml_allocr_is_measure(lctx.alloc)) { - int * data = (int *) K_shift->data; - for (int i = 0; i < n_ctx; ++i) { - data[i] = kv_self.cells[i].delta; - } - } - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * tmp = - // we rotate only the first n_rot dimensions. - ggml_rope_custom_inplace(ctx0, - ggml_view_3d(ctx0, kv_self.k, - n_rot, n_head, n_ctx, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*(n_embd_head*n_ctx*il) - ), - K_shift, n_rot, 2, 0, freq_base, freq_scale); - offload_func_kq(tmp); - ggml_build_forward_expand(gf, tmp); - } + llm_build_k_shift(lctx, ctx0, gf, n_rot, LLM_ROPE_NEOX, cb); } - for (int il=0; il < n_layer; ++il) { + + for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * residual = inpL; - offload_func_t offload_func = llama_nop; - { - cur = ggml_norm(ctx0, inpL, norm_eps); - offload_func(cur); - cur = ggml_mul(ctx0, cur, model.layers[il].attn_norm); - offload_func(cur); - cur = ggml_add(ctx0, cur, model.layers[il].attn_norm_b); - offload_func(cur); - ggml_format_name(cur, "input_layernorm_%d", il); - } + + cur = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "attn_norm", il); + // self attention { cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - offload_func_kq(cur); + cb(cur, "wqkv", il); + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - offload_func_kq(cur); + cb(cur, "bqkv", il); // split qkv GGML_ASSERT(n_head_kv == n_head); - ggml_set_name(cur, format("qkv_%d", il).c_str()); + struct ggml_tensor * tmpqkv = ggml_reshape_4d(ctx0, cur, n_embd_head, 3, n_head, n_tokens); - offload_func_kq(tmpqkv); + cb(tmpqkv, "tmpqkv", il); + struct ggml_tensor * tmpqkv_perm = ggml_cont(ctx0, ggml_permute(ctx0, tmpqkv, 0, 3, 1, 2)); - offload_func_kq(tmpqkv_perm); - ggml_format_name(tmpqkv_perm, "tmpqkv_perm_%d", il); + cb(tmpqkv_perm, "tmpqkv", il); + struct ggml_tensor * tmpq = ggml_view_3d( ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, ggml_element_size(tmpqkv_perm) * n_embd_head, ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, 0 ); - offload_func_kq(tmpq); + cb(tmpq, "tmpq", il); + struct ggml_tensor * tmpk = ggml_view_3d( ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, ggml_element_size(tmpqkv_perm) * n_embd_head, ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens ); - offload_func_kq(tmpk); - // Q/K Layernorm - tmpq = ggml_norm(ctx0, tmpq, norm_eps); - offload_func_kq(tmpq); - tmpq = ggml_mul(ctx0, tmpq, model.layers[il].attn_q_norm); - offload_func_kq(tmpq); - tmpq = ggml_add(ctx0, tmpq, model.layers[il].attn_q_norm_b); - offload_func_kq(tmpq); + cb(tmpk, "tmpk", il); - tmpk = ggml_norm(ctx0, tmpk, norm_eps); - offload_func_v(tmpk); - tmpk = ggml_mul(ctx0, tmpk, model.layers[il].attn_k_norm); - offload_func_v(tmpk); - tmpk = ggml_add(ctx0, tmpk, model.layers[il].attn_k_norm_b); - offload_func_v(tmpk); + // Q/K Layernorm + tmpq = llm_build_norm(ctx0, tmpq, + model.layers[il].attn_q_norm, + model.layers[il].attn_q_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(tmpq, "tmpq", il); + + tmpk = llm_build_norm(ctx0, tmpk, + model.layers[il].attn_k_norm, + model.layers[il].attn_k_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(tmpk, "tmpk", il); // RoPE the first n_rot of q/k, pass the other half, and concat. struct ggml_tensor * qrot = ggml_view_3d( @@ -5096,16 +4166,15 @@ static struct ggml_cgraph * llm_build_persimmon( ggml_element_size(tmpq) * n_embd_head * n_head, 0 ); - offload_func_kq(qrot); - ggml_format_name(qrot, "qrot_%d", il); + cb(qrot, "qrot", il); + struct ggml_tensor * krot = ggml_view_3d( ctx0, tmpk, n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, 0 ); - offload_func_kq(krot); - ggml_format_name(krot, "krot_%d", il); + cb(krot, "krot", il); // get the second half of tmpq, e.g tmpq[n_rot:, :, :] struct ggml_tensor * qpass = ggml_view_3d( @@ -5114,193 +4183,117 @@ static struct ggml_cgraph * llm_build_persimmon( ggml_element_size(tmpq) * n_embd_head * n_head, ggml_element_size(tmpq) * n_rot ); - offload_func_kq(qpass); - ggml_format_name(qpass, "qpass_%d", il); + cb(qpass, "qpass", il); + struct ggml_tensor * kpass = ggml_view_3d( ctx0, tmpk, n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, ggml_element_size(tmpk) * n_rot ); - offload_func_kq(kpass); - ggml_format_name(kpass, "kpass_%d", il); + cb(kpass, "kpass", il); - struct ggml_tensor * qrotated = ggml_rope_custom( - ctx0, qrot, KQ_pos, n_rot, 2, 0, freq_base, freq_scale + struct ggml_tensor * qrotated = ggml_rope_custom( + ctx0, qrot, inp_pos, n_rot, 2, 0, freq_base, freq_scale ); - offload_func_kq(qrotated); + cb(qrotated, "qrotated", il); + struct ggml_tensor * krotated = ggml_rope_custom( - ctx0, krot, KQ_pos, n_rot, 2, 0, freq_base, freq_scale + ctx0, krot, inp_pos, n_rot, 2, 0, freq_base, freq_scale ); - offload_func_kq(krotated); + cb(krotated, "krotated", il); + // ggml currently only supports concatenation on dim=2 // so we need to permute qrot, qpass, concat, then permute back. qrotated = ggml_cont(ctx0, ggml_permute(ctx0, qrotated, 2, 1, 0, 3)); - offload_func_kq(qrotated); + cb(qrotated, "qrotated", il); + krotated = ggml_cont(ctx0, ggml_permute(ctx0, krotated, 2, 1, 0, 3)); - offload_func_kq(krotated); + cb(krotated, "krotated", il); qpass = ggml_cont(ctx0, ggml_permute(ctx0, qpass, 2, 1, 0, 3)); - offload_func_kq(qpass); + cb(qpass, "qpass", il); + kpass = ggml_cont(ctx0, ggml_permute(ctx0, kpass, 2, 1, 0, 3)); - offload_func_kq(kpass); + cb(kpass, "kpass", il); struct ggml_tensor * Qcur = ggml_concat(ctx0, qrotated, qpass); - offload_func_kq(Qcur); + cb(Qcur, "Qcur", il); + struct ggml_tensor * Kcur = ggml_concat(ctx0, krotated, kpass); - offload_func_kq(Kcur); + cb(Kcur, "Kcur", il); struct ggml_tensor * Q = ggml_cont(ctx0, ggml_permute(ctx0, Qcur, 1, 2, 0, 3)); - offload_func_kq(Q); + cb(Q, "Q", il); Kcur = ggml_cont(ctx0, ggml_permute(ctx0, Kcur, 2, 1, 0, 3)); - offload_func_kq(Kcur); - { - struct ggml_tensor * tmpv = ggml_view_3d( - ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, - ggml_element_size(tmpqkv_perm) * n_embd_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens * 2 + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_view_3d( + ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, + ggml_element_size(tmpqkv_perm) * n_embd_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens * 2 ); - offload_func_v(tmpv); - // store K, V in cache - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, tmpv, n_embd_gqa, n_tokens)); - offload_func_v(Vcur); - ggml_set_name(Vcur, "Vcur"); + cb(Vcur, "Vcur", il); - struct ggml_tensor * k = ggml_view_1d( - ctx0, kv_self.k, n_tokens*n_embd_gqa, - (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head) - ); - offload_func_kq(k); - ggml_set_name(k, "k"); + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - ggml_set_name(v, "v"); - - // important: storing RoPE-ed version of K in the KV cache! - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - struct ggml_tensor * K = ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - - offload_func_kq(K); - ggml_format_name(K, "K_%d", il); - - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled, KQ_mask); - offload_func_kq(KQ_masked); - ggml_set_name(KQ_masked, "KQ_masked"); - - struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); - offload_func_kq(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - offload_func_v(V); - ggml_set_name(V, "V"); - - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); - - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); - - cur = ggml_mul_mat(ctx0, model.layers[il].wo, cur); - offload_func(cur); - cur = ggml_add(ctx0, cur, model.layers[il].bo); - offload_func(cur); - ggml_set_name(cur, "result_wo"); + // TODO: not tested, could be broken + cur = llm_build_kqv(lctx, ctx0, Q, + model.layers[il].wo, model.layers[il].bo, + Q, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); } - struct ggml_tensor * inpFF = ggml_add(ctx0, residual, cur); - offload_func(inpFF); - ggml_set_name(inpFF, "inpFF"); + struct ggml_tensor * ffn_inp = ggml_add(ctx0, residual, cur); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network { - // MLP - { - // Norm - cur = ggml_norm(ctx0, inpFF, norm_eps); - offload_func(cur); - cur = ggml_add(ctx0, - ggml_mul(ctx0, cur, model.layers[il].ffn_norm), - model.layers[il].ffn_norm_b - ); - ggml_set_name(cur, "ffn_norm"); - offload_func(cur); - } - cur = ggml_mul_mat(ctx0, model.layers[il].w3, cur); - offload_func(cur); + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "ffn_norm", il); - cur = ggml_add(ctx0, cur, model.layers[il].b3); - offload_func(cur); - ggml_set_name(cur, "result_ffn_up"); - - cur = ggml_sqr(ctx0, ggml_relu(ctx0, cur)); - ggml_set_name(cur, "result_ffn_act"); - offload_func(cur); - offload_func(cur->src[0]); - - cur = ggml_mul_mat(ctx0, model.layers[il].w2, cur); - offload_func(cur); - cur = ggml_add(ctx0, - cur, - model.layers[il].b2); - offload_func(cur); - ggml_set_name(cur, "outFF"); + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_RELU_SQR, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); } - cur = ggml_add(ctx0, cur, inpFF); - offload_func(cur); - ggml_set_name(cur, "inpFF_+_outFF"); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + inpL = cur; } + cur = inpL; - { - cur = ggml_norm(ctx0, cur, norm_eps); - offload_func_nr(cur); - cur = ggml_mul(ctx0, cur, model.output_norm); - offload_func_nr(cur); - cur = ggml_add(ctx0, cur, model.output_norm_b); - // offload_func_nr(cur); + cur = llm_build_norm(ctx0, cur, + model.output_norm, + model.output_norm_b, + LLM_NORM, norm_eps, cb, -1); + cb(cur, "result_norm", -1); - ggml_set_name(cur, "result_norm"); - } cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); + ggml_build_forward_expand(gf, cur); + ggml_free(ctx0); + return gf; } -static struct ggml_cgraph * llm_build_bloom( +static struct ggml_cgraph * llm_build_refact( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; const auto & cparams = lctx.cparams; @@ -5315,6 +4308,133 @@ static struct ggml_cgraph * llm_build_bloom( const int64_t n_head = hparams.n_head; const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head = hparams.n_embd_head(); + + const float norm_rms_eps = hparams.f_norm_rms_eps; + + const int32_t n_tokens = batch.n_tokens; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; + + auto & buf_compute = lctx.buf_compute; + + struct ggml_init_params params = { + /*.mem_size =*/ buf_compute.size, + /*.mem_buffer =*/ buf_compute.data, + /*.no_alloc =*/ true, + }; + + struct ggml_context * ctx0 = ggml_init(params); + + ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + cur = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + cb(Kcur, "Kcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + cb(Qcur, "Qcur", il); + + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(lctx, ctx0, Qcur, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, 8.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, + model.output_norm, NULL, + LLM_NORM_RMS, norm_rms_eps, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + ggml_free(ctx0); + + return gf; +} + +static struct ggml_cgraph * llm_build_bloom( + llama_context & lctx, + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { + const auto & model = lctx.model; + const auto & hparams = model.hparams; + const auto & cparams = lctx.cparams; + + const auto & kv_self = lctx.kv_self; + + GGML_ASSERT(!!kv_self.ctx); + + const int64_t n_embd = hparams.n_embd; + const int64_t n_layer = hparams.n_layer; + const int64_t n_ctx = cparams.n_ctx; + const int64_t n_head = hparams.n_head; + const int64_t n_embd_head = hparams.n_embd_head(); const int64_t n_embd_gqa = hparams.n_embd_gqa(); GGML_ASSERT(n_embd_head == hparams.n_rot); @@ -5322,8 +4442,8 @@ static struct ggml_cgraph * llm_build_bloom( const float norm_eps = hparams.f_norm_eps; const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; auto & buf_compute = lctx.buf_compute; @@ -5340,198 +4460,90 @@ static struct ggml_cgraph * llm_build_bloom( ggml_cgraph * gf = ggml_new_graph(ctx0); struct ggml_tensor * cur; - struct ggml_tensor * token; struct ggml_tensor * inpL; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - } - ggml_set_name(inp_tokens, "inp_tokens"); - - token = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - token = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, token); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(token->data, batch.embd, n_tokens * n_embd * ggml_element_size(token)); - } - } + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd)/n_head)); - } + cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); + cb(KQ_mask, "KQ_mask", -1); - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } - - // norm - { - inpL = ggml_norm(ctx0, token, norm_eps); - inpL = ggml_add(ctx0, ggml_mul(ctx0, inpL, model.tok_norm), model.tok_norm_b); - } - - ggml_set_name(inpL, "inpL"); + inpL = llm_build_norm(ctx0, inpL, + model.tok_norm, + model.tok_norm_b, + LLM_NORM, norm_eps, cb, -1); + cb(inpL, "inp_norm", -1); for (int il = 0; il < n_layer; ++il) { + cur = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "attn_norm", il); + + // self-attention { - // Norm - cur = ggml_norm(ctx0, inpL, norm_eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].attn_norm), model.layers[il].attn_norm_b); + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(lctx, ctx0, Qcur, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, 8.0f, cb, il); + cb(cur, "kqv_out", il); } - { - // Self Attention - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wqkv, cur), model.layers[il].bqkv); - - struct ggml_tensor * tmpq = ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*n_embd); - struct ggml_tensor * tmpk = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], sizeof(float)*n_embd); - struct ggml_tensor * tmpv = ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], sizeof(float)*(n_embd + n_embd_gqa)); - - struct ggml_tensor * Qcur = tmpq; - struct ggml_tensor * Kcur = tmpk; - - // store key and value to memory - { - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, ggml_cont(ctx0, tmpv), n_embd_gqa, n_tokens)); - ggml_set_name(Vcur, "Vcur"); - - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - ggml_set_name(k, "k"); - - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = - ggml_permute(ctx0, - ggml_cpy(ctx0, - Qcur, - ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_embd_head, n_head, n_tokens)), - 0, 2, 1, 3); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - ggml_set_name(K, "K"); - - // K * Q - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - ggml_set_name(KQ, "KQ"); - - // KQ_scaled = KQ / sqrt(n_embd_head) - // KQ_scaled shape [n_past + n_tokens, n_tokens, n_head, 1] - struct ggml_tensor * KQ_scaled = ggml_scale_inplace(ctx0, KQ, KQ_scale); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - struct ggml_tensor * KQ_scaled_alibi = ggml_alibi(ctx0, KQ_scaled, /*n_past*/ kv_head, n_head, 8); - ggml_set_name(KQ_scaled_alibi, "KQ_scaled_alibi"); - - // KQ_masked = mask_past(KQ_scaled) - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled_alibi, KQ_mask); - ggml_set_name(KQ_masked, "KQ_masked"); - - // KQ = soft_max(KQ_masked) - struct ggml_tensor * KQ_soft_max = ggml_soft_max_inplace(ctx0, KQ_masked); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - // split cached V into n_head heads - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - ggml_set_name(V, "V"); - - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - ggml_set_name(KQV, "KQV"); - - // KQV_merged = KQV.permute(0, 2, 1, 3) - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - ggml_set_name(KQV_merged, "KQV_merged"); - - // cur = KQV_merged.contiguous().view(n_embd, n_tokens) - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - ggml_set_name(cur, "KQV_merged_contiguous"); - } - - // Projection - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wo, cur), model.layers[il].bo); - // Add the input - cur = ggml_add(ctx0, cur, inpL); - - struct ggml_tensor * inpFF = cur; + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); // FF { - // Norm - { - cur = ggml_norm(ctx0, inpFF, norm_eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ffn_norm), model.layers[il].ffn_norm_b); - } + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, norm_eps, cb, il); + cb(cur, "ffn_norm", il); - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].w3, cur), model.layers[il].b3); - - // GELU activation - cur = ggml_gelu(ctx0, cur); - - // Projection - cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].w2, cur), model.layers[il].b2); + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); } - inpL = ggml_add(ctx0, cur, inpFF); + inpL = ggml_add(ctx0, cur, ffn_inp); + cb(inpL, "l_out", il); } - // Output Norm - { - cur = ggml_norm(ctx0, inpL, norm_eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.output_norm), model.output_norm_b); - } - ggml_set_name(cur, "result_norm"); + cur = llm_build_norm(ctx0, inpL, + model.output_norm, + model.output_norm_b, + LLM_NORM, norm_eps, cb, -1); + cb(cur, "result_norm", -1); cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); @@ -5542,7 +4554,9 @@ static struct ggml_cgraph * llm_build_bloom( static struct ggml_cgraph * llm_build_mpt( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + const llm_build_cb & cb, + bool worst_case) { const auto & model = lctx.model; const auto & hparams = model.hparams; const auto & cparams = lctx.cparams; @@ -5555,7 +4569,6 @@ static struct ggml_cgraph * llm_build_mpt( const int64_t n_layer = hparams.n_layer; const int64_t n_ctx = cparams.n_ctx; const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head = hparams.n_embd_head(); const int64_t n_embd_gqa = hparams.n_embd_gqa(); @@ -5563,11 +4576,9 @@ static struct ggml_cgraph * llm_build_mpt( const float clamp_kqv = hparams.f_clamp_kqv; const float max_alibi_bias = hparams.f_max_alibi_bias; - const int n_gpu_layers = model.n_gpu_layers; - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = ggml_allocr_is_measure(lctx.alloc) ? n_ctx : kv_self.n; - const int32_t kv_head = ggml_allocr_is_measure(lctx.alloc) ? n_ctx - n_tokens : kv_self.head; + const int32_t n_kv = worst_case ? n_ctx : kv_self.n; + const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; auto & buf_compute = lctx.buf_compute; @@ -5586,269 +4597,93 @@ static struct ggml_cgraph * llm_build_mpt( struct ggml_tensor * cur; struct ggml_tensor * inpL; - //int warmup = 0; - if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inp_tokens); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inp_tokens->data, batch.token, n_tokens*ggml_element_size(inp_tokens)); - //warmup = ((uint32_t*) inp_tokens->data)[0] == 0; - } - - ggml_set_name(inp_tokens, "inp_tokens"); - - inpL = ggml_get_rows(ctx0, model.tok_embeddings, inp_tokens); - } else { -#ifdef GGML_USE_MPI - GGML_ASSERT(false && "not implemented"); -#endif - - inpL = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_embd, n_tokens); - - ggml_allocr_alloc(lctx.alloc, inpL); - if (!ggml_allocr_is_measure(lctx.alloc)) { - memcpy(inpL->data, batch.embd, n_tokens * n_embd * ggml_element_size(inpL)); - } - } - - const int i_gpu_start = n_layer - n_gpu_layers; - (void) i_gpu_start; - - // offload functions set the tensor output backend to GPU - // tensors are GPU-accelerated if any input or the output has been offloaded - offload_func_t offload_func_nr = llama_nop; // nr = non-repeating - offload_func_t offload_func_kq = llama_nop; - offload_func_t offload_func_v = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (n_gpu_layers > n_layer) { - offload_func_nr = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 1) { - offload_func_v = ggml_cuda_assign_buffers_no_alloc; - } - if (n_gpu_layers > n_layer + 2) { - offload_func_kq = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); + cb(inpL, "inp_embd", -1); // KQ_scale struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_set_name(KQ_scale, "1/sqrt(n_embd_head)"); - ggml_allocr_alloc(lctx.alloc, KQ_scale); - if (!ggml_allocr_is_measure(lctx.alloc)) { - ggml_set_f32(KQ_scale, 1.0f/sqrtf(float(n_embd)/n_head)); - } + cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - offload_func_kq(KQ_mask); - ggml_set_name(KQ_mask, "KQ_mask"); - ggml_allocr_alloc(lctx.alloc, KQ_mask); - if (!ggml_allocr_is_measure(lctx.alloc)) { - float * data = (float *) KQ_mask->data; - memset(data, 0, ggml_nbytes(KQ_mask)); - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - if (!kv_self.cells[i].has_seq_id(seq_id) || kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; - } - } - } - } - } + cb(KQ_mask, "KQ_mask", -1); for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * attn_norm; - offload_func_t offload_func = llama_nop; - -#ifdef GGML_USE_CUBLAS - if (il >= i_gpu_start) { - offload_func = ggml_cuda_assign_buffers_no_alloc; - } -#endif // GGML_USE_CUBLAS + attn_norm = llm_build_norm(ctx0, inpL, + model.layers[il].attn_norm, + NULL, + LLM_NORM, norm_eps, cb, il); + cb(attn_norm, "attn_norm", il); // self-attention - // TODO: refactor into common function (shared with LLaMA) { - attn_norm = ggml_norm(ctx0, inpL, norm_eps); - offload_func(attn_norm); - - attn_norm = ggml_mul(ctx0, attn_norm, model.layers[il].attn_norm); - offload_func(attn_norm); - - if (1) { - cur = attn_norm; - } - - // compute QKV + cur = attn_norm; cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - offload_func_kq(cur); + cb(cur, "wqkv", il); if (clamp_kqv > 0.0f) { cur = ggml_clamp(ctx0, cur, -clamp_kqv, clamp_kqv); - offload_func_kq(cur); + cb(cur, "wqkv_clamped", il); } - const size_t wsize = ggml_type_size(cur->type); + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - struct ggml_tensor * Qcur = ggml_view_3d( - ctx0, cur, n_embd_head, n_head, n_tokens, - wsize * n_embd_head, - wsize * n_embd_head * (n_head + 2 * n_head_kv), - 0); - offload_func_kq(Qcur); + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); - struct ggml_tensor * Kcur = ggml_view_3d( - ctx0, cur, n_embd_head, n_head_kv, n_tokens, - wsize * n_embd_head, - wsize * n_embd_head * (n_head + 2 * n_head_kv), - wsize * n_embd_head * n_head); - offload_func_kq(Kcur); + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - struct ggml_tensor * tmpv = ggml_view_3d( - ctx0, cur, n_embd_head, n_head_kv, n_tokens, - wsize * n_embd_head, - wsize * n_embd_head * (n_head + 2 * n_head_kv), - wsize * n_embd_head * (n_head + n_head_kv)); - offload_func_kq(Kcur); + llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - ggml_set_name(Qcur, "Qcur"); - ggml_set_name(Kcur, "Kcur"); - - { - struct ggml_tensor * Vcur = ggml_transpose(ctx0, ggml_reshape_2d(ctx0, ggml_cont(ctx0, tmpv), n_embd_gqa, n_tokens)); - offload_func_v(Vcur); - offload_func_v(Vcur->src[0]->src[0]); - ggml_set_name(Vcur, "Vcur"); - - struct ggml_tensor * k = ggml_view_1d(ctx0, kv_self.k, n_tokens*n_embd_gqa, (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); - offload_func_kq(k); - ggml_set_name(k, "k"); - - struct ggml_tensor * v = ggml_view_2d(ctx0, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); - offload_func_v(v); - - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Kcur, k)); - ggml_build_forward_expand(gf, ggml_cpy(ctx0, Vcur, v)); - } - - struct ggml_tensor * Q = ggml_permute(ctx0, Qcur, 0, 2, 1, 3); - offload_func_kq(Q); - ggml_set_name(Q, "Q"); - - struct ggml_tensor * K = - ggml_view_3d(ctx0, kv_self.k, - n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); - offload_func_kq(K); - ggml_set_name(K, "K"); - - struct ggml_tensor * KQ = ggml_mul_mat(ctx0, K, Q); - offload_func_kq(KQ); - ggml_set_name(KQ, "KQ"); - - struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, KQ_scale); - offload_func_kq(KQ_scaled); - ggml_set_name(KQ_scaled, "KQ_scaled"); - - // TODO: replace with ggml_add() - struct ggml_tensor * KQ_scaled_alibi = - ggml_alibi(ctx0, KQ_scaled, 0, n_head, max_alibi_bias); - offload_func_kq(KQ_scaled_alibi); - ggml_set_name(KQ_scaled_alibi, "KQ_scaled_alibi"); - - struct ggml_tensor * KQ_masked = ggml_add(ctx0, KQ_scaled_alibi, KQ_mask); - offload_func_kq(KQ_masked); - ggml_set_name(KQ_masked, "KQ_masked"); - - struct ggml_tensor * KQ_soft_max = ggml_soft_max(ctx0, KQ_masked); - offload_func_v(KQ_soft_max); - ggml_set_name(KQ_soft_max, "KQ_soft_max"); - - struct ggml_tensor * V = - ggml_view_3d(ctx0, kv_self.v, - n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); - offload_func_v(V); - ggml_set_name(V, "V"); - - struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ_soft_max); - offload_func_v(KQV); - ggml_set_name(KQV, "KQV"); - - struct ggml_tensor * KQV_merged = ggml_permute(ctx0, KQV, 0, 2, 1, 3); - offload_func_v(KQV_merged); - ggml_set_name(KQV_merged, "KQV_merged"); - - cur = ggml_cont_2d(ctx0, KQV_merged, n_embd, n_tokens); - offload_func_v(cur); - ggml_set_name(cur, "KQV_merged_contiguous"); - - cur = ggml_mul_mat(ctx0, model.layers[il].wo, cur); - offload_func(cur); - ggml_set_name(cur, "result_wo"); + cur = llm_build_kqv(lctx, ctx0, Qcur, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, max_alibi_bias, cb, il); + cb(cur, "kqv_out", il); } // Add the input - cur = ggml_add(ctx0, cur, inpL); - offload_func(cur); - - struct ggml_tensor * attn_out = cur; + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); // feed forward { - // Norm - { - cur = ggml_norm(ctx0, attn_out, norm_eps); - offload_func(cur); + cur = llm_build_norm(ctx0, ffn_inp, + model.layers[il].ffn_norm, + NULL, + LLM_NORM, norm_eps, cb, il); + cb(cur, "ffn_norm", il); - cur = ggml_mul(ctx0, cur, model.layers[il].ffn_norm); - offload_func(cur); - } - - cur = ggml_mul_mat(ctx0, model.layers[il].w3, cur); - offload_func(cur); - - cur = ggml_gelu(ctx0, cur); - offload_func(cur); - cur = ggml_mul_mat(ctx0, model.layers[il].w2, cur); - offload_func(cur); + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + NULL, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); } - cur = ggml_add(ctx0, cur, attn_out); - offload_func(cur); + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + // input for next layer inpL = cur; } cur = inpL; - // norm - { - cur = ggml_norm(ctx0, cur, norm_eps); - offload_func_nr(cur); - - cur = ggml_mul(ctx0, cur, model.output_norm); - ggml_set_name(cur, "result_norm"); - } + cur = llm_build_norm(ctx0, cur, + model.output_norm, + NULL, + LLM_NORM, norm_eps, cb, -1); + cb(cur, "result_norm", -1); cur = ggml_mul_mat(ctx0, model.output, cur); - ggml_set_name(cur, "result_output"); + cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); @@ -5857,50 +4692,494 @@ static struct ggml_cgraph * llm_build_mpt( return gf; } +// +// tensor offloading helpers +// +// TODO: will be removed with backend v2 + +enum llm_offload_func_e { + OFFLOAD_FUNC_NOP, + OFFLOAD_FUNC, + OFFLOAD_FUNC_KQ, + OFFLOAD_FUNC_V, + OFFLOAD_FUNC_NR, + OFFLOAD_FUNC_EMB, + OFFLOAD_FUNC_OUT, +}; + +// TODO: will be removed with backend v2 +struct llm_offload_trie { + struct node { + ~node() { + for (int i = 0; i < 256; ++i) { + if (children[i]) { + delete children[i]; + } + } + } + + node * children[256] = { nullptr }; + llm_offload_func_e func = OFFLOAD_FUNC_NOP; + }; + + llm_offload_trie() { + root = new node; + } + + llm_offload_trie(const std::unordered_map & map) { + root = new node; + + for (const auto & kv : map) { + add(kv.first, kv.second); + } + } + + ~llm_offload_trie() { + delete root; + } + + void add(const char * name, llm_offload_func_e func) { + node * cur = root; + + for (int i = 0; ; ++i) { + const uint8_t c = name[i]; + + if (!c) { + break; + } + + if (!cur->children[c]) { + cur->children[c] = new node; + } + + cur = cur->children[c]; + } + + cur->func = func; + } + + llm_offload_func_e find(const char * name) const { + const node * cur = root; + + for (int i = 0; ; ++i) { + const uint8_t c = name[i]; + + if (!c) { + break; + } + + if (!cur->children[c]) { + return OFFLOAD_FUNC_NOP; + } + + cur = cur->children[c]; + } + + return cur->func; + } + + node * root = nullptr; +}; + +// TODO: will be removed with backend v2 +static const std::unordered_map k_offload_map = { + //{ "inp_tokens", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel + //{ "inp_embd", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel + { "pos_embd", OFFLOAD_FUNC_NR }, + + { "inp_pos", OFFLOAD_FUNC_KQ }, // this is often used for KQ ops (e.g. rope) + { "KQ_scale", OFFLOAD_FUNC_KQ }, + { "KQ_mask", OFFLOAD_FUNC_KQ }, + { "K_shift", OFFLOAD_FUNC_KQ }, + { "K_shifted", OFFLOAD_FUNC_KQ }, + + { "inp_norm", OFFLOAD_FUNC_NR }, + { "inp_norm_w", OFFLOAD_FUNC_NR }, + { "inp_norm_wb", OFFLOAD_FUNC_NR }, + + { "norm", OFFLOAD_FUNC }, + { "norm_w", OFFLOAD_FUNC }, + { "norm_wb", OFFLOAD_FUNC }, + + { "attn_norm", OFFLOAD_FUNC }, + { "attn_norm_2", OFFLOAD_FUNC }, + + { "wqkv", OFFLOAD_FUNC_KQ }, + { "bqkv", OFFLOAD_FUNC_KQ }, + { "wqkv_clamped", OFFLOAD_FUNC_KQ }, + + { "tmpk", OFFLOAD_FUNC_KQ }, + { "tmpq", OFFLOAD_FUNC_KQ }, + { "tmpv", OFFLOAD_FUNC_V }, + { "Kcur", OFFLOAD_FUNC_KQ }, + { "Qcur", OFFLOAD_FUNC_KQ }, + { "Vcur", OFFLOAD_FUNC_V }, + + { "krot", OFFLOAD_FUNC_KQ }, + { "qrot", OFFLOAD_FUNC_KQ }, + { "kpass", OFFLOAD_FUNC_KQ }, + { "qpass", OFFLOAD_FUNC_KQ }, + { "krotated", OFFLOAD_FUNC_KQ }, + { "qrotated", OFFLOAD_FUNC_KQ }, + + { "q", OFFLOAD_FUNC_KQ }, + { "k", OFFLOAD_FUNC_KQ }, + { "kq", OFFLOAD_FUNC_KQ }, + { "kq_scaled", OFFLOAD_FUNC_KQ }, + { "kq_scaled_alibi", OFFLOAD_FUNC_KQ }, + { "kq_masked", OFFLOAD_FUNC_KQ }, + { "kq_soft_max", OFFLOAD_FUNC_V }, + { "v", OFFLOAD_FUNC_V }, + { "kqv", OFFLOAD_FUNC_V }, + { "kqv_merged", OFFLOAD_FUNC_V }, + { "kqv_merged_cont", OFFLOAD_FUNC_V }, + { "kqv_wo", OFFLOAD_FUNC_V }, + { "kqv_out", OFFLOAD_FUNC_V }, + + { "ffn_inp", OFFLOAD_FUNC }, + { "ffn_norm", OFFLOAD_FUNC }, + + { "ffn_up", OFFLOAD_FUNC }, + { "ffn_up_b", OFFLOAD_FUNC }, + { "ffn_gate", OFFLOAD_FUNC }, + { "ffn_gate_b", OFFLOAD_FUNC }, + { "ffn_gate_par", OFFLOAD_FUNC }, + { "ffn_down", OFFLOAD_FUNC }, + { "ffn_down_b", OFFLOAD_FUNC }, + { "ffn_out", OFFLOAD_FUNC }, + + { "ffn_silu", OFFLOAD_FUNC }, + { "ffn_gelu", OFFLOAD_FUNC }, + { "ffn_relu", OFFLOAD_FUNC }, + { "ffn_sqr(relu)", OFFLOAD_FUNC }, + + { "l_out", OFFLOAD_FUNC }, + + { "result_norm", OFFLOAD_FUNC_EMB }, + { "result_output", OFFLOAD_FUNC_OUT }, +}; + +static llm_offload_trie k_offload_func_trie(k_offload_map); + static struct ggml_cgraph * llama_build_graph( llama_context & lctx, const llama_batch & batch) { const auto & model = lctx.model; + // check if we should build the worst-case graph (for memory measurement) + const bool worst_case = ggml_allocr_is_measure(lctx.alloc); + + // keep track of the input that has already been allocated + bool alloc_inp_tokens = false; + bool alloc_inp_embd = false; + bool alloc_inp_pos = false; + bool alloc_inp_KQ_scale = false; + bool alloc_inp_KQ_mask = false; + bool alloc_inp_K_shift = false; + +#ifdef GGML_USE_CUBLAS + const bool do_offload = true; +#else + const bool do_offload = true; // TODO: set to false after finishing refactoring +#endif + + int n_non_view = 0; // number of non-view tensors that have been processed by the callback + + // this callback allows us to apply custom logic to each tensor (e.g. ggml-alloc, offloading, etc.) + // TODO: will be removed with backend v2 + llm_build_cb cb = [&](struct ggml_tensor * cur, const char * name, int il) { + if (il >= 0) { + ggml_format_name(cur, "%s-%d", name, il); + } else { + ggml_set_name(cur, name); + } + + // + // allocate input tensors and set input data + // + // TODO: will be removed with backend v2 + + if (!alloc_inp_tokens && strcmp(name, "inp_tokens") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc) && batch.token) { + const int64_t n_tokens = cur->ne[0]; + + memcpy(cur->data, batch.token, n_tokens*ggml_element_size(cur)); + } + + alloc_inp_tokens = true; + } + + if (!alloc_inp_embd && strcmp(name, "inp_embd") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc) && batch.embd) { + const int64_t n_embd = cur->ne[0]; + const int64_t n_tokens = cur->ne[1]; + + memcpy(cur->data, batch.embd, n_tokens*n_embd*ggml_element_size(cur)); + } + + alloc_inp_embd = true; + } + + if (!alloc_inp_pos && strcmp(name, "inp_pos") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc) && batch.pos) { + const int64_t n_tokens = cur->ne[0]; + + int32_t * data = (int32_t *) cur->data; + + for (int i = 0; i < n_tokens; ++i) { + data[i] = batch.pos[i]; + } + } + + alloc_inp_pos = true; + } + + if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc)) { + const int64_t n_embd_head = model.hparams.n_embd_head(); + ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + } + + alloc_inp_KQ_scale = true; + } + + if (!alloc_inp_KQ_mask && strcmp(name, "KQ_mask") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc)) { + const int64_t n_kv = cur->ne[0]; + const int64_t n_tokens = cur->ne[1]; + + float * data = (float *) cur->data; + memset(data, 0, ggml_nbytes(cur)); + + for (int h = 0; h < 1; ++h) { + for (int j = 0; j < n_tokens; ++j) { + const llama_pos pos = batch.pos[j]; + const llama_seq_id seq_id = batch.seq_id[j][0]; + + for (int i = 0; i < n_kv; ++i) { + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; + } + } + } + } + } + + alloc_inp_KQ_mask = true; + } + + if (!alloc_inp_K_shift && strcmp(name, "K_shift") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc)) { + const int64_t n_ctx = cur->ne[0]; + + int32_t * data = (int32_t *) cur->data; + + for (int i = 0; i < n_ctx; ++i) { + data[i] = lctx.kv_self.cells[i].delta; + } + } + + alloc_inp_K_shift = true; + } + + // view tensors are not processed further + if (cur->view_src != nullptr) { + return; + } + + if (cur->op != GGML_OP_NONE) { + n_non_view++; + } + + // + // offload layers + // + // TODO: will be removed with backend v2 + +//#define LLAMA_OFFLOAD_DEBUG + + if (!do_offload) { + return; + } + + const int n_layer = model.hparams.n_layer; + + const int n_gpu_layers = model.n_gpu_layers; + const int i_gpu_start = n_layer - n_gpu_layers; + + // should we offload the final norm? yes if we are not computing embeddings + const bool offload_emb = lctx.embedding.empty(); + + static const std::unordered_map> k_offload_func_name = { + { OFFLOAD_FUNC_NOP, "CPU" }, + { OFFLOAD_FUNC_OUT, "CPU" }, +#ifdef GGML_USE_CUBLAS + { OFFLOAD_FUNC, "GPU (CUDA)" }, + { OFFLOAD_FUNC_KQ, "GPU (CUDA) KQ" }, + { OFFLOAD_FUNC_V, "GPU (CUDA) V" }, + { OFFLOAD_FUNC_NR, "GPU (CUDA) NR" }, + { OFFLOAD_FUNC_EMB, "GPU (CUDA) EMB" }, +#else + { OFFLOAD_FUNC, "CPU" }, + { OFFLOAD_FUNC_KQ, "CPU" }, + { OFFLOAD_FUNC_V, "CPU" }, + { OFFLOAD_FUNC_NR, "CPU" }, + { OFFLOAD_FUNC_EMB, "CPU" }, +#endif // GGML_USE_CUBLAS + }; + + // check the global map for what offload function to use for this tensor + llm_offload_func_e func_e = k_offload_func_trie.find(name); + + if (func_e == OFFLOAD_FUNC_NOP) { +#ifdef LLAMA_OFFLOAD_DEBUG + // if a tensor hasn't been offloaded, we warn the user + if (worst_case) { + LLAMA_LOG_WARN("%s: %32s: not offloaded (ref: %s)\n", __func__, + cur->name, "https://github.com/ggerganov/llama.cpp/pull/3837"); + } +#endif + + return; + } + + // count the number of layers and respect the provided n_gpu_layers + switch (func_e) { + case OFFLOAD_FUNC_NOP: + case OFFLOAD_FUNC_OUT: + break; + case OFFLOAD_FUNC: + if (n_gpu_layers < n_layer) { + if (il < i_gpu_start) { + func_e = OFFLOAD_FUNC_NOP; + } + } + break; + case OFFLOAD_FUNC_NR: + if (n_gpu_layers <= n_layer + 0) { + func_e = OFFLOAD_FUNC_NOP; + } + break; + case OFFLOAD_FUNC_V: + if (n_gpu_layers <= n_layer + 1) { + func_e = OFFLOAD_FUNC_NOP; + } + break; + case OFFLOAD_FUNC_KQ: + if (n_gpu_layers <= n_layer + 2) { + func_e = OFFLOAD_FUNC_NOP; + } + break; + case OFFLOAD_FUNC_EMB: + if (!offload_emb || n_gpu_layers < n_layer) { + func_e = OFFLOAD_FUNC_NOP; + } + break; + default: GGML_ASSERT(false); + } + + offload_func_t func = ggml_offload_nop; + + // this is needed for compatibility with Metal for example +#ifdef GGML_USE_CUBLAS + static offload_func_t ggml_offload_gpu = ggml_cuda_assign_buffers_no_alloc; +#else + static offload_func_t ggml_offload_gpu = ggml_offload_nop; +#endif + + switch (func_e) { + case OFFLOAD_FUNC_NOP: + case OFFLOAD_FUNC_OUT: func = ggml_offload_nop; break; + case OFFLOAD_FUNC: + case OFFLOAD_FUNC_KQ: + case OFFLOAD_FUNC_V: + case OFFLOAD_FUNC_NR: + case OFFLOAD_FUNC_EMB: func = ggml_offload_gpu; break; + default: GGML_ASSERT(false); + } + + // apply offload function to the tensor + func(cur); + +#ifdef LLAMA_OFFLOAD_DEBUG + if (worst_case) { + LLAMA_LOG_INFO("%s: %32s: %s\n", __func__, cur->name, k_offload_func_name.at(func_e).c_str()); + } +#endif + }; + struct ggml_cgraph * result = NULL; switch (model.arch) { case LLM_ARCH_LLAMA: { - result = llm_build_llama(lctx, batch); + result = llm_build_llama(lctx, batch, cb, worst_case); } break; case LLM_ARCH_BAICHUAN: { - result = llm_build_baichaun(lctx, batch); + result = llm_build_baichaun(lctx, batch, cb, worst_case); } break; case LLM_ARCH_FALCON: { - result = llm_build_falcon(lctx, batch); + result = llm_build_falcon(lctx, batch, cb, worst_case); } break; case LLM_ARCH_STARCODER: { - result = llm_build_starcoder(lctx, batch); + result = llm_build_starcoder(lctx, batch, cb, worst_case); } break; case LLM_ARCH_PERSIMMON: { - result = llm_build_persimmon(lctx, batch); + result = llm_build_persimmon(lctx, batch, cb, worst_case); } break; case LLM_ARCH_REFACT: { - result = llm_build_refact(lctx, batch); + result = llm_build_refact(lctx, batch, cb, worst_case); } break; case LLM_ARCH_BLOOM: { - result = llm_build_bloom(lctx, batch); + result = llm_build_bloom(lctx, batch, cb, worst_case); } break; case LLM_ARCH_MPT: { - result = llm_build_mpt(lctx, batch); + result = llm_build_mpt(lctx, batch, cb, worst_case); } break; default: GGML_ASSERT(false); } + if (worst_case) { + int n_non_view_total = 0; + + for (int i = 0; i < result->n_nodes; ++i) { + if (result->nodes[i]->view_src == nullptr) { + n_non_view_total++; + } + } + + LLAMA_LOG_INFO("%s: non-view tensors processed: %d/%d\n", __func__, n_non_view, n_non_view_total); + + if (n_non_view != n_non_view_total) { + LLAMA_LOG_WARN("%s: ****************************************************************\n", __func__); + LLAMA_LOG_WARN("%s: not all non-view tensors have been processed with a callback\n", __func__); + LLAMA_LOG_WARN("%s: this can indicate an inefficiency in the graph implementation\n", __func__); + LLAMA_LOG_WARN("%s: build with LLAMA_OFFLOAD_DEBUG for more info\n", __func__); + LLAMA_LOG_WARN("%s: ref: https://github.com/ggerganov/llama.cpp/pull/3837\n", __func__); + LLAMA_LOG_WARN("%s: ****************************************************************\n", __func__); + } + } + return result; } @@ -6043,11 +5322,13 @@ static int llama_decode_internal( } // If all tensors can be run on the GPU then using more than 1 thread is detrimental. - const bool full_offload_supported = model.arch == LLM_ARCH_LLAMA || + const bool full_offload_supported = + model.arch == LLM_ARCH_LLAMA || model.arch == LLM_ARCH_BAICHUAN || - model.arch == LLM_ARCH_FALCON || - model.arch == LLM_ARCH_REFACT || + model.arch == LLM_ARCH_FALCON || + model.arch == LLM_ARCH_REFACT || model.arch == LLM_ARCH_MPT; + const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 3; if (ggml_cpu_has_cublas() && full_offload_supported && fully_offloaded) { n_threads = 1; @@ -6102,6 +5383,8 @@ static int llama_decode_internal( //} // extract logits + // TODO: do not compute and extract logits if only embeddings are needed + // need to update the graphs to skip "result_output" { auto & logits_out = lctx.logits; @@ -8713,8 +7996,8 @@ static int llama_apply_lora_from_file_internal( ggml_tensor * dest_t = model_tensors[base_name]; - offload_func_t offload_func = llama_nop; - offload_func_t offload_func_force_inplace = llama_nop; + offload_func_t offload_func = ggml_offload_nop; + offload_func_t offload_func_force_inplace = ggml_offload_nop; #ifdef GGML_USE_CUBLAS if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { From ca190bca8e844d171020d6147687e71472d71734 Mon Sep 17 00:00:00 2001 From: Adrian Hesketh Date: Wed, 1 Nov 2023 09:28:28 +0000 Subject: [PATCH 050/859] server : re-enable completion and embedded at the same time (#3876) --- .gitignore | 1 + examples/server/server.cpp | 16 ++++++++++------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 545c28726..5d7c5479e 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ .DS_Store .build/ .cache/ +.ccls-cache/ .direnv/ .envrc .swiftpm diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c163c7f8e..47ae0d558 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -149,6 +149,7 @@ struct task_server { task_type type; json data; bool infill_mode = false; + bool embedding_mode = false; }; struct task_result { @@ -371,6 +372,7 @@ struct llama_client_slot std::vector generated_token_probs; bool infill = false; + bool embedding = false; bool has_next_token = true; bool truncated = false; bool stopped_eos = false; @@ -1244,13 +1246,14 @@ struct llama_server_context queue_results.push_back(res); } - int request_completion(json data, bool infill) + int request_completion(json data, bool infill, bool embedding) { std::lock_guard lock(mutex_tasks); task_server task; task.id = id_gen++; task.data = data; task.infill_mode = infill; + task.embedding_mode = embedding; task.type = COMPLETION_TASK; queue_tasks.push_back(task); return task.id; @@ -1376,7 +1379,7 @@ struct llama_server_context { LOG_TEE("slot unavailable\n"); // send error result - send_error(task.id, "slot unavaliable"); + send_error(task.id, "slot unavailable"); return; } @@ -1388,6 +1391,7 @@ struct llama_server_context slot->reset(); slot->infill = task.infill_mode; + slot->embedding = task.embedding_mode; slot->task_id = task.id; if (!launch_slot_with_data(slot, task.data)) @@ -1695,7 +1699,7 @@ struct llama_server_context } // prompt evaluated for embedding - if (params.embedding) + if (slot.embedding) { send_embedding(slot); slot.release(); @@ -2274,7 +2278,7 @@ int main(int argc, char **argv) svr.Post("/completion", [&llama](const httplib::Request &req, httplib::Response &res) { json data = json::parse(req.body); - const int task_id = llama.request_completion(data, false); + const int task_id = llama.request_completion(data, false, false); if (!json_value(data, "stream", false)) { std::string completion_text; task_result result = llama.next_result(task_id); @@ -2329,7 +2333,7 @@ int main(int argc, char **argv) svr.Post("/infill", [&llama](const httplib::Request &req, httplib::Response &res) { json data = json::parse(req.body); - const int task_id = llama.request_completion(data, true); + const int task_id = llama.request_completion(data, true, false); if (!json_value(data, "stream", false)) { std::string completion_text; task_result result = llama.next_result(task_id); @@ -2433,7 +2437,7 @@ int main(int argc, char **argv) { prompt = ""; } - const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false); + const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true); task_result result = llama.next_result(task_id); return res.set_content(result.result_json.dump(), "application/json"); }); From f0e209324a7f663225791897877bf610f1af152d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 11:29:07 +0200 Subject: [PATCH 051/859] scripts : add server-llm.sh (#3868) * scripts : add deploy-server.sh * scripts : rename to server-llm.sh * scripts : working curl pipe --- scripts/server-llm.sh | 391 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 391 insertions(+) create mode 100644 scripts/server-llm.sh diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh new file mode 100644 index 000000000..7bf0929bb --- /dev/null +++ b/scripts/server-llm.sh @@ -0,0 +1,391 @@ +#!/bin/bash +# +# Helper script for deploying llama.cpp server with a single Bash command +# +# - Works on Linux and macOS +# - Supports: CPU, CUDA, Metal, OpenCL +# - Can run all GGUF models from HuggingFace +# - Can serve requests in parallel +# - Always builds latest llama.cpp from GitHub +# +# Limitations +# +# - Chat templates are poorly supported (base models recommended) +# - Might be unstable! +# +# Usage: +# ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] +# +# --port: port number, default is 8888 +# --repo: path to a repo containing GGUF model files +# --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input +# --backend: cpu, cuda, metal, opencl, depends on the OS +# --gpu-id: gpu id, default is 0 +# --n-parallel: number of parallel requests, default is 8 +# --n-kv: KV cache size, default is 4096 +# --verbose: verbose output +# +# Example: +# +# bash -c "$(curl -s https://ggml.ai/server-llm.sh)" +# + +set -e + +# required utils: curl, git, make +if ! command -v curl &> /dev/null; then + printf "[-] curl not found\n" + exit 1 +fi +if ! command -v git &> /dev/null; then + printf "[-] git not found\n" + exit 1 +fi +if ! command -v make &> /dev/null; then + printf "[-] make not found\n" + exit 1 +fi + +# parse arguments +port=8888 +repo="" +wtype="" +backend="cpu" + +# if macOS, use metal backend by default +if [[ "$OSTYPE" == "darwin"* ]]; then + backend="metal" +elif command -v nvcc &> /dev/null; then + backend="cuda" +fi + +gpu_id=0 +n_parallel=8 +n_kv=4096 +verbose=0 + +function print_usage { + printf "Usage:\n" + printf " ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" + printf " --port: port number, default is 8888\n" + printf " --repo: path to a repo containing GGUF model files\n" + printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" + printf " --backend: cpu, cuda, metal, opencl, depends on the OS\n" + printf " --gpu-id: gpu id, default is 0\n" + printf " --n-parallel: number of parallel requests, default is 8\n" + printf " --n-kv: KV cache size, default is 4096\n" + printf " --verbose: verbose output\n\n" + printf "Example:\n\n" + printf ' bash -c "$(curl -s https://ggml.ai/server-llm.sh)"\n\n' +} + +while [[ $# -gt 0 ]]; do + key="$1" + case $key in + --port) + port="$2" + shift + shift + ;; + --repo) + repo="$2" + shift + shift + ;; + --wtype) + wtype="$2" + shift + shift + ;; + --backend) + backend="$2" + shift + shift + ;; + --gpu-id) + gpu_id="$2" + shift + shift + ;; + --n-parallel) + n_parallel="$2" + shift + shift + ;; + --n-kv) + n_kv="$2" + shift + shift + ;; + --verbose) + verbose=1 + shift + ;; + --help) + print_usage + exit 0 + ;; + *) + echo "Unknown argument: $key" + print_usage + exit 1 + ;; + esac +done + +# available weights types +wtypes=("F16" "Q8_0" "Q4_0" "Q4_1" "Q5_0" "Q5_1" "Q6_K" "Q5_K_M" "Q5_K_S" "Q4_K_M" "Q4_K_S" "Q3_K_L" "Q3_K_M" "Q3_K_S" "Q2_K") + +wfiles=() +for wt in "${wtypes[@]}"; do + wfiles+=("") +done + +# sample repos +repos=( + "https://huggingface.co/TheBloke/Llama-2-7B-GGUF" + "https://huggingface.co/TheBloke/Llama-2-13B-GGUF" + "https://huggingface.co/TheBloke/Llama-2-70B-GGUF" + "https://huggingface.co/TheBloke/CodeLlama-7B-GGUF" + "https://huggingface.co/TheBloke/CodeLlama-13B-GGUF" + "https://huggingface.co/TheBloke/CodeLlama-34B-GGUF" + "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF" + "https://huggingface.co/TheBloke/zephyr-7B-beta-GGUF" + "https://huggingface.co/TheBloke/OpenHermes-2-Mistral-7B-GGUF" + "https://huggingface.co/TheBloke/CausalLM-7B-GGUF" +) + +printf "\n" +printf "[I] This is a helper script for deploying llama.cpp's server on this machine.\n\n" +printf " Based on the options that follow, the script might download a model file\n" +printf " from the internet, which can be a few GBs in size. The script will also\n" +printf " build the latest llama.cpp source code from GitHub, which can be unstable.\n" +printf "\n" +printf " Upon success, an HTTP server will be started and it will serve the selected\n" +printf " model using llama.cpp for demonstration purposes.\n" +printf "\n" +printf " Please note:\n" +printf "\n" +printf " - All new data will be stored in the current folder\n" +printf " - The server will be listening on all network interfaces\n" +printf " - The server will run with default settings which are not always optimal\n" +printf " - Do not judge the quality of a model based on the results from this script\n" +printf " - Do not use this script to benchmark llama.cpp\n" +printf " - Do not use this script in production\n" +printf " - This script is only for demonstration purposes\n" +printf "\n" +printf " If you don't know what you are doing, please press Ctrl-C to abort now\n" +printf "\n" +printf " Press Enter to continue ...\n\n" + +read + +if [[ -z "$repo" ]]; then + printf "[+] No repo provided from the command line\n" + printf " Please select a number from the list below or enter an URL:\n\n" + + is=0 + for r in "${repos[@]}"; do + printf " %2d) %s\n" $is "$r" + is=$((is+1)) + done + + # ask for repo until index of sample repo is provided or an URL + while [[ -z "$repo" ]]; do + printf "\n Or choose one from: https://huggingface.co/models?sort=trending&search=gguf\n\n" + read -p "[+] Select repo: " repo + + # check if the input is a number + if [[ "$repo" =~ ^[0-9]+$ ]]; then + if [[ "$repo" -ge 0 && "$repo" -lt ${#repos[@]} ]]; then + repo="${repos[$repo]}" + else + printf "[-] Invalid repo index: %s\n" "$repo" + repo="" + fi + elif [[ "$repo" =~ ^https?:// ]]; then + repo="$repo" + else + printf "[-] Invalid repo URL: %s\n" "$repo" + repo="" + fi + done +fi + +# remove suffix +repo=$(echo "$repo" | sed -E 's/\/tree\/main$//g') + +printf "[+] Checking for GGUF model files in %s\n" "$repo" + +# find GGUF files in the source +# TODO: better logic +model_tree="${repo%/}/tree/main" +model_files=$(curl -s "$model_tree" | grep -i "\\.gguf" | sed -E 's/.*(.*)<\/span><\/a>/\1/g') + +# list all files in the provided git repo +printf "[+] Model files:\n\n" +for file in $model_files; do + # determine iw by grepping the filename with wtypes + iw=-1 + is=0 + for wt in "${wtypes[@]}"; do + # uppercase + ufile=$(echo "$file" | tr '[:lower:]' '[:upper:]') + if [[ "$ufile" =~ "$wt" ]]; then + iw=$is + break + fi + is=$((is+1)) + done + + if [[ $iw -eq -1 ]]; then + continue + fi + + wfiles[$iw]="$file" + + have=" " + if [[ -f "$file" ]]; then + have="*" + fi + + printf " %2d) %s %s\n" $iw "$have" "$file" +done + +# ask for weights type until provided and available +while [[ -z "$wtype" ]]; do + printf "\n" + read -p "[+] Select weight type: " wtype + wfile="${wfiles[$wtype]}" + + if [[ -z "$wfile" ]]; then + printf "[-] Invalid weight type: %s\n" "$wtype" + wtype="" + fi +done + +printf "[+] Selected weight type: %s (%s)\n" "$wtype" "$wfile" + +url="${repo%/}/resolve/main/$wfile" + +# check file if the model has been downloaded before +chk="$wfile.chk" + +# check if we should download the file +# - if $wfile does not exist +# - if $wfile exists but $chk does not exist +# - if $wfile exists and $chk exists but $wfile is newer than $chk +# TODO: better logic using git lfs info + +do_download=0 + +if [[ ! -f "$wfile" ]]; then + do_download=1 +elif [[ ! -f "$chk" ]]; then + do_download=1 +elif [[ "$wfile" -nt "$chk" ]]; then + do_download=1 +fi + +if [[ $do_download -eq 1 ]]; then + printf "[+] Downloading weights from %s\n" "$url" + + # download the weights file + curl -o "$wfile" -# -L "$url" + + # create a check file if successful + if [[ $? -eq 0 ]]; then + printf "[+] Creating check file %s\n" "$chk" + touch "$chk" + fi +else + printf "[+] Using cached weights %s\n" "$wfile" +fi + +# get latest llama.cpp and build + +printf "[+] Downloading latest llama.cpp\n" + +llama_cpp_dir="__llama_cpp_port_${port}__" + +if [[ -d "$llama_cpp_dir" && ! -f "$llama_cpp_dir/__ggml_script__" ]]; then + # if the dir exists and there isn't a file "__ggml_script__" in it, abort + printf "[-] Directory %s already exists\n" "$llama_cpp_dir" + printf "[-] Please remove it and try again\n" + exit 1 +elif [[ -d "$llama_cpp_dir" ]]; then + printf "[+] Directory %s already exists\n" "$llama_cpp_dir" + printf "[+] Using cached llama.cpp\n" + + cd "$llama_cpp_dir" + git reset --hard + git fetch + git checkout origin/master + + cd .. +else + printf "[+] Cloning llama.cpp\n" + + git clone https://github.com/ggerganov/llama.cpp "$llama_cpp_dir" +fi + +# mark that that the directory is made by this script +touch "$llama_cpp_dir/__ggml_script__" + +if [[ $verbose -eq 1 ]]; then + set -x +fi + +# build +cd "$llama_cpp_dir" + +make clean + +log="--silent" +if [[ $verbose -eq 1 ]]; then + log="" +fi + +if [[ "$backend" == "cuda" ]]; then + printf "[+] Building with CUDA backend\n" + LLAMA_CUBLAS=1 make -j server $log +elif [[ "$backend" == "cpu" ]]; then + printf "[+] Building with CPU backend\n" + make -j server $log +elif [[ "$backend" == "metal" ]]; then + printf "[+] Building with Metal backend\n" + make -j server $log +elif [[ "$backend" == "opencl" ]]; then + printf "[+] Building with OpenCL backend\n" + LLAMA_CLBLAST=1 make -j server $log +else + printf "[-] Unknown backend: %s\n" "$backend" + exit 1 +fi + +# run the server + +printf "[+] Running server\n" + +args="" +if [[ "$backend" == "cuda" ]]; then + export CUDA_VISIBLE_DEVICES=$gpu_id + args="-ngl 999" +elif [[ "$backend" == "cpu" ]]; then + args="-ngl 0" +elif [[ "$backend" == "metal" ]]; then + args="-ngl 999" +elif [[ "$backend" == "opencl" ]]; then + args="-ngl 999" +else + printf "[-] Unknown backend: %s\n" "$backend" + exit 1 +fi + +if [[ $verbose -eq 1 ]]; then + args="$args --verbose" +fi + +./server -m "../$wfile" --host 0.0.0.0 --port "$port" -c $n_kv -np "$n_parallel" $args + +exit 0 From 73bdcb395ef9a997d9c02950c7cd4249546162cd Mon Sep 17 00:00:00 2001 From: Andrew Godfrey Date: Wed, 1 Nov 2023 04:49:04 -0700 Subject: [PATCH 052/859] finetune : add -ngl parameter (#3762) * Add '-ngl' support to finetune.cpp * Add fprintf in ggml_cuda_op_add When I tried CUDA offloading during finetuning following the readme, I got an assert here. This probably isn't an important case because inference later gives a warning saying you should use f16 or f32 instead when using lora * Add 'finetune.sh', which currently fails when using GPU "error: operator (): Finetuning on tensors with type 'f16' is not yet supported" * tweak finetune.sh * Suppress some warnings in ggml.c * Add f16 implementation to ggml_compute_forward_add_f16_f32 * Add an f16 case to ggml_add_cast_impl and llama_build_lora_finetune_graphs * finetune.sh: Edit comments * Add "add_f16_f32_f32_cuda" * Tweak an error message * finetune.sh: Add an optional LLAMA_MODEL_DIR variable * finetune.sh: Add an optional LLAMA_TRAINING_DIR variable * train : minor * tabs to spaces --------- Co-authored-by: Georgi Gerganov Co-authored-by: cebtenzzre --- common/train.cpp | 2 ++ common/train.h | 1 + examples/finetune/finetune.cpp | 14 +++++++++- examples/finetune/finetune.sh | 34 +++++++++++++++++++++++ ggml-cuda.cu | 17 ++++++++++++ ggml-quants.c | 2 ++ ggml.c | 49 +++++++++++++++++++++++++--------- llama.cpp | 2 +- 8 files changed, 106 insertions(+), 15 deletions(-) create mode 100644 examples/finetune/finetune.sh diff --git a/common/train.cpp b/common/train.cpp index 3cce5da26..bc15b7a03 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1045,6 +1045,7 @@ struct train_params_common get_default_train_params_common() { params.n_batch = 8; params.n_gradient_accumulation = 1; params.n_epochs = -1; + params.n_gpu_layers = 0; params.custom_n_ctx = false; @@ -1080,6 +1081,7 @@ struct train_params_common get_default_train_params_common() { params.adam_beta2 = 0.999f; params.adam_gclip = 1.0f; params.adam_eps_f = 0.0f; + return params; } diff --git a/common/train.h b/common/train.h index 42fa704b8..d86c93cc4 100644 --- a/common/train.h +++ b/common/train.h @@ -44,6 +44,7 @@ struct train_params_common { int n_batch; int n_gradient_accumulation; int n_epochs; + int n_gpu_layers; bool custom_n_ctx; diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 35824cd2d..60c7faa79 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -652,7 +652,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( GGML_ASSERT(tokens_input->type == GGML_TYPE_I32); auto add_to_f32 = [] (struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b) { - if (ggml_is_quantized(a->type)) { + if (ggml_is_quantized(a->type) || a->type == GGML_TYPE_F16) { return ggml_add_cast(ctx, a, b, GGML_TYPE_F32); } else if (a->type == GGML_TYPE_F32) { return ggml_add(ctx, a, b); @@ -1459,6 +1459,17 @@ static bool train_params_parse(int argc, char ** argv, struct train_params * par } params->n_rank_w3 = std::stoi(argv[i]); params->custom_n_rank_w3 = true; + } else if (arg == "--gpu-layers" || arg == "-ngl" || arg == "--n-gpu-layers") { + if (++i >= argc) { + invalid_param = true; + break; + } +#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD + params->common.n_gpu_layers = std::stoi(argv[i]); +#else + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); +#endif } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); train_print_usage(argc, argv, &default_params); @@ -1545,6 +1556,7 @@ int main(int argc, char ** argv) { srand(params.common.seed); struct llama_model_params llama_mparams = llama_model_default_params(); + llama_mparams.n_gpu_layers = params.common.n_gpu_layers; llama_mparams.vocab_only = false; printf("%s: model base = '%s'\n", __func__, params.fn_model_base); diff --git a/examples/finetune/finetune.sh b/examples/finetune/finetune.sh new file mode 100644 index 000000000..079bfa113 --- /dev/null +++ b/examples/finetune/finetune.sh @@ -0,0 +1,34 @@ +#!/bin/bash +cd `dirname $0` +cd ../.. + +EXE="./finetune" + +if [[ ! $LLAMA_MODEL_DIR ]]; then LLAMA_MODEL_DIR="./models"; fi +if [[ ! $LLAMA_TRAINING_DIR ]]; then LLAMA_TRAINING_DIR="."; fi + +# MODEL="$LLAMA_MODEL_DIR/openllama-3b-v2-q8_0.gguf" # This is the model the readme uses. +MODEL="$LLAMA_MODEL_DIR/openllama-3b-v2.gguf" # An f16 model. Note in this case with "-g", you get an f32-format .BIN file that isn't yet supported if you use it with "main --lora" with GPU inferencing. + +while getopts "dg" opt; do + case $opt in + d) + DEBUGGER="gdb --args" + ;; + g) + EXE="./build/bin/Release/finetune" + GPUARG="--gpu-layers 25" + ;; + esac +done + +$DEBUGGER $EXE \ + --model-base $MODEL \ + $GPUARG \ + --checkpoint-in chk-ol3b-shakespeare-LATEST.gguf \ + --checkpoint-out chk-ol3b-shakespeare-ITERATION.gguf \ + --lora-out lora-ol3b-shakespeare-ITERATION.bin \ + --train-data "$LLAMA_TRAINING_DIR\shakespeare.txt" \ + --save-every 10 \ + --threads 10 --adam-iter 30 --batch 4 --ctx 64 \ + --use-checkpointing diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 1ba951f68..4e6e7cd94 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -513,6 +513,15 @@ static __global__ void add_f16_f32_f16(const half * x, const float * y, half * d dst[i] = __hadd(x[i], __float2half(y[i])); } +static __global__ void add_f16_f32_f32(const half * x, const float * y, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = __half2float(x[i]) + y[i]; +} + static __global__ void mul_f32(const float * x, const float * y, float * dst, const int kx, const int ky) { const int i = blockDim.x*blockIdx.x + threadIdx.x; @@ -4693,6 +4702,11 @@ static void add_f16_f32_f16_cuda(const half * x, const float * y, half * dst, co add_f16_f32_f16<<>>(x, y, dst, k); } +static void add_f16_f32_f32_cuda(const half * x, const float * y, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_ADD_BLOCK_SIZE - 1) / CUDA_ADD_BLOCK_SIZE; + add_f16_f32_f32<<>>(x, y, dst, k); +} + static void mul_f32_cuda(const float * x, const float * y, float * dst, const int kx, const int ky, cudaStream_t stream) { const int num_blocks = (kx + CUDA_MUL_BLOCK_SIZE - 1) / CUDA_MUL_BLOCK_SIZE; mul_f32<<>>(x, y, dst, kx, ky); @@ -5996,7 +6010,10 @@ inline void ggml_cuda_op_add( add_f32_cuda(src0_dd, src1_dd, dst_dd, ggml_nelements(src0), ne10*ne11, main_stream); } else if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { add_f16_f32_f16_cuda((const half *) src0_dd, src1_dd, (half *) dst_dd, ggml_nelements(src0), main_stream); + } else if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F32) { + add_f16_f32_f32_cuda((const half *) src0_dd, src1_dd, dst_dd, ggml_nelements(src0), main_stream); } else { + fprintf(stderr, "src0->type: %d dst->type: %d\n", src0->type, dst->type); GGML_ASSERT(false); } diff --git a/ggml-quants.c b/ggml-quants.c index 721594467..255c89b6a 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -716,6 +716,7 @@ void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { __riscv_vse8_v_i8m1(y[i].qs , vs, vl); } #else + UNUSED(nb); // scalar quantize_row_q8_0_reference(x, y, k); #endif @@ -969,6 +970,7 @@ void quantize_row_q8_1(const float * restrict x, void * restrict vy, int k) { y[i].s = sum*d; } #else + UNUSED(nb); // scalar quantize_row_q8_1_reference(x, y, k); #endif diff --git a/ggml.c b/ggml.c index 84407b122..80d682255 100644 --- a/ggml.c +++ b/ggml.c @@ -3153,7 +3153,7 @@ static struct ggml_tensor * ggml_add_cast_impl( // TODO: support less-strict constraint // GGML_ASSERT(ggml_can_repeat(b, a)); GGML_ASSERT(ggml_can_repeat_rows(b, a)); - GGML_ASSERT(ggml_is_quantized(a->type)); // currently only supported for quantized input + GGML_ASSERT(ggml_is_quantized(a->type) || a->type == GGML_TYPE_F16); // currently only supported for quantized input and f16 bool is_node = false; @@ -6927,9 +6927,15 @@ static void ggml_compute_forward_add_f16_f32( GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT(dst->type == GGML_TYPE_F16); - GGML_ASSERT( nb0 == sizeof(ggml_fp16_t)); + if (dst->type == GGML_TYPE_F32) { + GGML_ASSERT( nb0 == sizeof(float)); + } + else { + GGML_ASSERT(dst->type == GGML_TYPE_F16); + GGML_ASSERT( nb0 == sizeof(ggml_fp16_t)); + } + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); // rows per thread @@ -6940,18 +6946,35 @@ static void ggml_compute_forward_add_f16_f32( const int ir1 = MIN(ir0 + dr, nr); if (nb10 == sizeof(float)) { - for (int ir = ir0; ir < ir1; ++ir) { - // src0, src1 and dst are same shape => same indices - const int i3 = ir/(ne2*ne1); - const int i2 = (ir - i3*ne2*ne1)/ne1; - const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + if (dst->type == GGML_TYPE_F16) { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); - ggml_fp16_t * dst_ptr = (ggml_fp16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); - ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); - float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + ggml_fp16_t * dst_ptr = (ggml_fp16_t *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); - for (int i = 0; i < ne0; i++) { - dst_ptr[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(src0_ptr[i]) + src1_ptr[i]); + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP32_TO_FP16(GGML_FP16_TO_FP32(src0_ptr[i]) + src1_ptr[i]); + } + } + } else { + for (int ir = ir0; ir < ir1; ++ir) { + // src0, src1 and dst are same shape => same indices + const int i3 = ir/(ne2*ne1); + const int i2 = (ir - i3*ne2*ne1)/ne1; + const int i1 = (ir - i3*ne2*ne1 - i2*ne1); + + float * dst_ptr = (float *) ((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1); + ggml_fp16_t * src0_ptr = (ggml_fp16_t *) ((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01); + float * src1_ptr = (float *) ((char *) src1->data + i3*nb13 + i2*nb12 + i1*nb11); + + for (int i = 0; i < ne0; i++) { + dst_ptr[i] = GGML_FP16_TO_FP32(src0_ptr[i]) + src1_ptr[i]; + } } } } diff --git a/llama.cpp b/llama.cpp index ead1d421d..42cedc7a1 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8003,7 +8003,7 @@ static int llama_apply_lora_from_file_internal( if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { if (dest_t->type != GGML_TYPE_F16) { throw std::runtime_error(format( - "%s: error: the simultaneous use of LoRAs and GPU acceleration is only supported for f16 models", __func__)); + "%s: error: the simultaneous use of LoRAs and GPU acceleration is only supported for f16 models. dest_t->type: %d", __func__, dest_t->type)); } offload_func = ggml_cuda_assign_buffers; offload_func_force_inplace = ggml_cuda_assign_buffers_force_inplace; From 9a3b4f6c86503c9cfc049d4d0fdeafef12806f5e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 13:50:45 +0200 Subject: [PATCH 053/859] ggml : fix UNUSED macro (#3762) --- ggml-quants.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 255c89b6a..740be6dc5 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -716,7 +716,7 @@ void quantize_row_q8_0(const float * restrict x, void * restrict vy, int k) { __riscv_vse8_v_i8m1(y[i].qs , vs, vl); } #else - UNUSED(nb); + GGML_UNUSED(nb); // scalar quantize_row_q8_0_reference(x, y, k); #endif @@ -970,7 +970,7 @@ void quantize_row_q8_1(const float * restrict x, void * restrict vy, int k) { y[i].s = sum*d; } #else - UNUSED(nb); + GGML_UNUSED(nb); // scalar quantize_row_q8_1_reference(x, y, k); #endif From e75dfdd31b6a3dfa0627ba4ac3bb4b36e9db588e Mon Sep 17 00:00:00 2001 From: l3utterfly Date: Wed, 1 Nov 2023 21:40:43 +0800 Subject: [PATCH 054/859] sampling : null grammar field after reset (#3885) --- common/sampling.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/common/sampling.cpp b/common/sampling.cpp index 673d67a6d..1317024c2 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -39,6 +39,7 @@ void llama_sampling_free(struct llama_sampling_context * ctx) { void llama_sampling_reset(llama_sampling_context * ctx) { if (ctx->grammar != NULL) { llama_grammar_free(ctx->grammar); + ctx->grammar = NULL; } if (!ctx->parsed_grammar.rules.empty()) { From a2758d08e44ce3624d233af4d23c6843e2e735b5 Mon Sep 17 00:00:00 2001 From: staviq Date: Wed, 1 Nov 2023 15:18:27 +0100 Subject: [PATCH 055/859] log : make generating separate log files optional (#3787) * impl --log-new, --log-append * Update common/log.h Co-authored-by: cebtenzzre * Update common/log.h Co-authored-by: cebtenzzre * Apply suggestions from code review Co-authored-by: cebtenzzre --------- Co-authored-by: cebtenzzre --- common/log.h | 122 ++++++++++++++++++++++++++++++++++----------------- 1 file changed, 82 insertions(+), 40 deletions(-) diff --git a/common/log.h b/common/log.h index d2c864cea..c0e814861 100644 --- a/common/log.h +++ b/common/log.h @@ -97,38 +97,56 @@ #define LOG_TEE_TARGET stderr #endif -// NOTE: currently disabled as it produces too many log files +// Utility for synchronizing log configuration state +// since std::optional was introduced only in c++17 +enum LogTriState +{ + LogTriStateSame, + LogTriStateFalse, + LogTriStateTrue +}; + // Utility to obtain "pid" like unique process id and use it when creating log files. -//inline std::string log_get_pid() -//{ -// static std::string pid; -// if (pid.empty()) -// { -// // std::this_thread::get_id() is the most portable way of obtaining a "process id" -// // it's not the same as "pid" but is unique enough to solve multiple instances -// // trying to write to the same log. -// std::stringstream ss; -// ss << std::this_thread::get_id(); -// pid = ss.str(); -// } -// -// return pid; -//} +inline std::string log_get_pid() +{ + static std::string pid; + if (pid.empty()) + { + // std::this_thread::get_id() is the most portable way of obtaining a "process id" + // it's not the same as "pid" but is unique enough to solve multiple instances + // trying to write to the same log. + std::stringstream ss; + ss << std::this_thread::get_id(); + pid = ss.str(); + } + + return pid; +} // Utility function for generating log file names with unique id based on thread id. // invocation with log_filename_generator( "llama", "log" ) creates a string "llama..log" // where the number is a runtime id of the current thread. -#define log_filename_generator(log_file_basename, log_file_extension) log_filename_generator_impl(log_file_basename, log_file_extension) +#define log_filename_generator(log_file_basename, log_file_extension) log_filename_generator_impl(LogTriStateSame, log_file_basename, log_file_extension) // INTERNAL, DO NOT USE -inline std::string log_filename_generator_impl(const std::string & log_file_basename, const std::string & log_file_extension) +inline std::string log_filename_generator_impl(LogTriState multilog, const std::string & log_file_basename, const std::string & log_file_extension) { + static bool _multilog = false; + + if (multilog != LogTriStateSame) + { + _multilog = multilog == LogTriStateTrue; + } + std::stringstream buf; buf << log_file_basename; - //buf << "."; - //buf << log_get_pid(); + if (_multilog) + { + buf << "."; + buf << log_get_pid(); + } buf << "."; buf << log_file_extension; @@ -213,15 +231,6 @@ inline std::string log_filename_generator_impl(const std::string & log_file_base #define LOG_TEE_FLF_VAL ,"" #endif -// Utility for synchronizing log configuration state -// since std::optional was introduced only in c++17 -enum LogTriState -{ - LogTriStateSame, - LogTriStateFalse, - LogTriStateTrue -}; - // INTERNAL, DO NOT USE // USE LOG() INSTEAD // @@ -315,16 +324,23 @@ enum LogTriState #endif // INTERNAL, DO NOT USE -inline FILE *log_handler1_impl(bool change = false, LogTriState disable = LogTriStateSame, const std::string & filename = LOG_DEFAULT_FILE_NAME, FILE *target = nullptr) +inline FILE *log_handler1_impl(bool change = false, LogTriState append = LogTriStateSame, LogTriState disable = LogTriStateSame, const std::string & filename = LOG_DEFAULT_FILE_NAME, FILE *target = nullptr) { - static bool _initialized{false}; - static bool _disabled{(filename.empty() && target == nullptr)}; + static bool _initialized = false; + static bool _append = false; + static bool _disabled = filename.empty() && target == nullptr; static std::string log_current_filename{filename}; static FILE *log_current_target{target}; static FILE *logfile = nullptr; if (change) { + if (append != LogTriStateSame) + { + _append = append == LogTriStateTrue; + return logfile; + } + if (disable == LogTriStateTrue) { // Disable primary target @@ -377,7 +393,7 @@ inline FILE *log_handler1_impl(bool change = false, LogTriState disable = LogTri } } - logfile = fopen(filename.c_str(), "w"); + logfile = fopen(filename.c_str(), _append ? "a" : "w"); } if (!logfile) @@ -398,9 +414,9 @@ inline FILE *log_handler1_impl(bool change = false, LogTriState disable = LogTri } // INTERNAL, DO NOT USE -inline FILE *log_handler2_impl(bool change = false, LogTriState disable = LogTriStateSame, FILE *target = nullptr, const std::string & filename = LOG_DEFAULT_FILE_NAME) +inline FILE *log_handler2_impl(bool change = false, LogTriState append = LogTriStateSame, LogTriState disable = LogTriStateSame, FILE *target = nullptr, const std::string & filename = LOG_DEFAULT_FILE_NAME) { - return log_handler1_impl(change, disable, filename, target); + return log_handler1_impl(change, append, disable, filename, target); } // Disables logs entirely at runtime. @@ -411,7 +427,7 @@ inline FILE *log_handler2_impl(bool change = false, LogTriState disable = LogTri // INTERNAL, DO NOT USE inline FILE *log_disable_impl() { - return log_handler1_impl(true, LogTriStateTrue); + return log_handler1_impl(true, LogTriStateSame, LogTriStateTrue); } // Enables logs at runtime. @@ -420,19 +436,31 @@ inline FILE *log_disable_impl() // INTERNAL, DO NOT USE inline FILE *log_enable_impl() { - return log_handler1_impl(true, LogTriStateFalse); + return log_handler1_impl(true, LogTriStateSame, LogTriStateFalse); } // Sets target fir logs, either by a file name or FILE* pointer (stdout, stderr, or any valid FILE*) #define log_set_target(target) log_set_target_impl(target) // INTERNAL, DO NOT USE -inline FILE *log_set_target_impl(const std::string & filename) { return log_handler1_impl(true, LogTriStateSame, filename); } -inline FILE *log_set_target_impl(FILE *target) { return log_handler2_impl(true, LogTriStateSame, target); } +inline FILE *log_set_target_impl(const std::string & filename) { return log_handler1_impl(true, LogTriStateSame, LogTriStateSame, filename); } +inline FILE *log_set_target_impl(FILE *target) { return log_handler2_impl(true, LogTriStateSame, LogTriStateSame, target); } // INTERNAL, DO NOT USE inline FILE *log_handler() { return log_handler1_impl(); } +// Enable or disable creating separate log files for each run. +// can ONLY be invoked BEFORE first log use. +#define log_multilog(enable) log_filename_generator_impl((enable) ? LogTriStateTrue : LogTriStateFalse, "", "") +// Enable or disable append mode for log file. +// can ONLY be invoked BEFORE first log use. +#define log_append(enable) log_append_impl(enable) +// INTERNAL, DO NOT USE +inline FILE *log_append_impl(bool enable) +{ + return log_handler1_impl(true, enable ? LogTriStateTrue : LogTriStateFalse, LogTriStateSame); +} + inline void log_test() { log_disable(); @@ -494,6 +522,18 @@ inline bool log_param_single_parse(const std::string & param) return true; } + if (param == "--log-new") + { + log_multilog(true); + return true; + } + + if (param == "--log-append") + { + log_append(true); + return true; + } + return false; } @@ -523,7 +563,9 @@ inline void log_print_usage() printf(" --log-disable Disable trace logs\n"); printf(" --log-enable Enable trace logs\n"); printf(" --log-file Specify a log filename (without extension)\n"); - printf(" Log file will be tagged with unique ID and written as \"..log\"\n"); /* */ + printf(" --log-new Create a separate new log file on start. " + "Each log file will have unique name: \"..log\"\n"); + printf(" --log-append Don't truncate the old log file.\n"); } #define log_dump_cmdline(argc, argv) log_dump_cmdline_impl(argc, argv) From 0e40806c1cb3bdf9955ed807ffbe212be85b4c67 Mon Sep 17 00:00:00 2001 From: bandoti <141645996+bandoti@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:42:01 -0300 Subject: [PATCH 056/859] common : allow caller to handle help/argument exceptions (#3715) * Allow caller to handle help/argument exceptions * Prepend newline to usage output * Add new gpt_params_parse_ex function to hide arg-parse impl * Fix issue blocking success case * exit instead of returning false * Update common/common.h Co-authored-by: Georgi Gerganov * Update common/common.cpp Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 41 ++++++++++++++++++++++++++--------------- common/common.h | 2 ++ 2 files changed, 28 insertions(+), 15 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index dc4865e80..89be41261 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -103,9 +103,24 @@ void process_escapes(std::string& input) { } bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { + bool result = true; + try { + if (!gpt_params_parse_ex(argc, argv, params)) { + gpt_print_usage(argc, argv, gpt_params()); + exit(0); + } + } + catch (const std::invalid_argument& ex) { + fprintf(stderr, ex.what()); + gpt_print_usage(argc, argv, gpt_params()); + exit(1); + } + return result; +} + +bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { bool invalid_param = false; std::string arg; - gpt_params default_params; const std::string arg_prefix = "--"; llama_sampling_params & sparams = params.sparams; @@ -554,11 +569,8 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { break; } } else if (arg == "-h" || arg == "--help") { - gpt_print_usage(argc, argv, default_params); -#ifndef LOG_DISABLE_LOGS - log_print_usage(); -#endif // LOG_DISABLE_LOGS - exit(0); + return false; + } else if (arg == "--random-prompt") { params.random_prompt = true; } else if (arg == "--in-prefix-bos") { @@ -617,22 +629,17 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { // End of Parse args for logging parameters #endif // LOG_DISABLE_LOGS } else { - fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); - gpt_print_usage(argc, argv, default_params); - exit(1); + throw std::invalid_argument("error: unknown argument: " + arg); } } if (invalid_param) { - fprintf(stderr, "error: invalid parameter for argument: %s\n", arg.c_str()); - gpt_print_usage(argc, argv, default_params); - exit(1); + throw std::invalid_argument("error: invalid parameter for argument: " + arg); } if (params.prompt_cache_all && (params.interactive || params.interactive_first || params.instruct)) { - fprintf(stderr, "error: --prompt-cache-all not supported in interactive mode yet\n"); - gpt_print_usage(argc, argv, default_params); - exit(1); + + throw std::invalid_argument("error: --prompt-cache-all not supported in interactive mode yet\n"); } if (params.escape) { @@ -651,6 +658,7 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { const llama_sampling_params & sparams = params.sparams; + printf("\n"); printf("usage: %s [options]\n", argv[0]); printf("\n"); printf("options:\n"); @@ -762,6 +770,9 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -ld LOGDIR, --logdir LOGDIR\n"); printf(" path under which to save YAML logs (no logging if unset)\n"); printf("\n"); +#ifndef LOG_DISABLE_LOGS + log_print_usage(); +#endif // LOG_DISABLE_LOGS } std::string get_system_info(const gpt_params & params) { diff --git a/common/common.h b/common/common.h index 84523a4fb..343b27217 100644 --- a/common/common.h +++ b/common/common.h @@ -110,6 +110,8 @@ struct gpt_params { std::string image = ""; // path to an image file }; +bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params); + bool gpt_params_parse(int argc, char ** argv, gpt_params & params); void gpt_print_usage(int argc, char ** argv, const gpt_params & params); From 50337961a678fce4081554b24e56e86b67660163 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 20:11:02 +0200 Subject: [PATCH 057/859] llm : add llm_build_context (#3881) * llm : add llm_build_context * llm : deduce norm eps based on type + explict max_alibi_bias, clamp_kqv * llm : restore the non-graph llm_build_ functional API ggml-ci * llm : cleanup + comments --- llama.cpp | 2338 ++++++++++++++++++++++++----------------------------- 1 file changed, 1042 insertions(+), 1296 deletions(-) diff --git a/llama.cpp b/llama.cpp index 42cedc7a1..d0c4ef101 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3090,6 +3090,10 @@ static bool llama_model_load( return true; } +// +// llm_build +// + using llm_build_cb = std::function; enum llm_rope_type { @@ -3098,17 +3102,35 @@ enum llm_rope_type { LLM_ROPE_GLM, }; +enum llm_ffn_op_type { + LLM_FFN_SILU, + LLM_FFN_GELU, + LLM_FFN_RELU, + LLM_FFN_RELU_SQR, +}; + +enum llm_ffn_gate_type { + LLM_FFN_SEQ, + LLM_FFN_PAR, // ffn_gate is parallel to ffn_up +}; + +enum llm_norm_type { + LLM_NORM, + LLM_NORM_RMS, +}; + static struct ggml_tensor * llm_build_inp_embd( struct ggml_context * ctx, + const llama_hparams & hparams, const llama_batch & batch, struct ggml_tensor * tok_embd, - int64_t n_embd, - int32_t n_tokens, const llm_build_cb & cb) { + const int64_t n_embd = hparams.n_embd; + struct ggml_tensor * inpL; if (batch.token) { - struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_tokens); + struct ggml_tensor * inp_tokens = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, batch.n_tokens); cb(inp_tokens, "inp_tokens", -1); inpL = ggml_get_rows(ctx, tok_embd, inp_tokens); @@ -3117,7 +3139,7 @@ static struct ggml_tensor * llm_build_inp_embd( GGML_ASSERT(false && "not implemented"); #endif - inpL = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_tokens); + inpL = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, batch.n_tokens); } return inpL; @@ -3126,28 +3148,21 @@ static struct ggml_tensor * llm_build_inp_embd( // Persimmon: n_rot = n_embd_head/2 // Other: n_rot = n_embd_head static void llm_build_k_shift( - const llama_context & lctx, - struct ggml_context * ctx, - struct ggml_cgraph * graph, - int64_t n_rot, - llm_rope_type type, - const llm_build_cb & cb) { - const auto & model = lctx.model; - const auto & kv_self = lctx.kv_self; - const auto & cparams = lctx.cparams; - - const auto & hparams = model.hparams; - + struct ggml_context * ctx, + const llama_hparams & hparams, + const llama_kv_cache & kv, + struct ggml_cgraph * graph, + llm_rope_type type, + int64_t n_ctx, + int64_t n_rot, + float freq_base, + float freq_scale, + const llm_build_cb & cb) { const int64_t n_layer = hparams.n_layer; const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_gqa = hparams.n_embd_gqa(); const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_ctx = lctx.cparams.n_ctx; - - const float freq_base = cparams.rope_freq_base; - const float freq_scale = cparams.rope_freq_scale; - GGML_ASSERT(n_embd_head % n_rot == 0); struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_ctx); @@ -3165,11 +3180,11 @@ static void llm_build_k_shift( struct ggml_tensor * tmp = // we rotate only the first n_rot dimensions ggml_rope_custom_inplace(ctx, - ggml_view_3d(ctx, kv_self.k, + ggml_view_3d(ctx, kv.k, n_rot, n_head_kv, n_ctx, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il), + ggml_element_size(kv.k)*n_embd_head, + ggml_element_size(kv.k)*n_embd_gqa, + ggml_element_size(kv.k)*n_embd_gqa*n_ctx*il), K_shift, n_rot, rope_type, 0, freq_base, freq_scale); cb(tmp, "K_shifted", il); ggml_build_forward_expand(graph, tmp); @@ -3177,22 +3192,17 @@ static void llm_build_k_shift( } static void llm_build_kv_store( - const llama_context & lctx, struct ggml_context * ctx, + const llama_hparams & hparams, + const llama_kv_cache & kv, struct ggml_cgraph * graph, struct ggml_tensor * k_cur, struct ggml_tensor * v_cur, + int64_t n_ctx, int32_t n_tokens, int32_t kv_head, const llm_build_cb & cb, int64_t il) { - const auto & model = lctx.model; - const auto & kv_self = lctx.kv_self; - const auto & cparams = lctx.cparams; - - const auto & hparams = model.hparams; - - const int64_t n_ctx = cparams.n_ctx; const int64_t n_embd_gqa = hparams.n_embd_gqa(); // compute the transposed [n_tokens, n_embd] V matrix @@ -3200,13 +3210,13 @@ static void llm_build_kv_store( //struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); // TODO: reshape above is likely not needed cb(v_cur_t, "v_cur_t", il); - struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv_self.k, n_tokens*n_embd_gqa, - (ggml_element_size(kv_self.k)*n_embd_gqa)*(il*n_ctx + kv_head)); + struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k, n_tokens*n_embd_gqa, + (ggml_element_size(kv.k)*n_embd_gqa)*(il*n_ctx + kv_head)); cb(k_cache_view, "k_cache_view", il); - struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv_self.v, n_tokens, n_embd_gqa, - ( n_ctx)*ggml_element_size(kv_self.v), - (il*n_ctx)*ggml_element_size(kv_self.v)*n_embd_gqa + kv_head*ggml_element_size(kv_self.v)); + struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v, n_tokens, n_embd_gqa, + ( n_ctx)*ggml_element_size(kv.v), + (il*n_ctx)*ggml_element_size(kv.v)*n_embd_gqa + kv_head*ggml_element_size(kv.v)); cb(v_cache_view, "v_cache_view", il); // important: storing RoPE-ed version of K in the KV cache! @@ -3214,23 +3224,18 @@ static void llm_build_kv_store( ggml_build_forward_expand(graph, ggml_cpy(ctx, v_cur_t, v_cache_view)); } -enum llm_norm_type { - LLM_NORM, - LLM_NORM_RMS, -}; - static struct ggml_tensor * llm_build_norm( struct ggml_context * ctx, struct ggml_tensor * cur, + const llama_hparams & hparams, struct ggml_tensor * mw, struct ggml_tensor * mb, llm_norm_type type, - float eps, const llm_build_cb & cb, int il) { switch (type) { - case LLM_NORM: cur = ggml_norm (ctx, cur, eps); break; - case LLM_NORM_RMS: cur = ggml_rms_norm(ctx, cur, eps); break; + case LLM_NORM: cur = ggml_norm (ctx, cur, hparams.f_norm_eps); break; + case LLM_NORM_RMS: cur = ggml_rms_norm(ctx, cur, hparams.f_norm_rms_eps); break; } if (mw || mb) { @@ -3251,18 +3256,6 @@ static struct ggml_tensor * llm_build_norm( return cur; } -enum llm_ffn_op_type { - LLM_FFN_SILU, - LLM_FFN_GELU, - LLM_FFN_RELU, - LLM_FFN_RELU_SQR, -}; - -enum llm_ffn_gate_type { - LLM_FFN_SEQ, - LLM_FFN_PAR, // ffn_gate is parallel to ffn_up -}; - static struct ggml_tensor * llm_build_ffn( struct ggml_context * ctx, struct ggml_tensor * cur, @@ -3351,26 +3344,21 @@ static struct ggml_tensor * llm_build_ffn( // if max_alibi_bias > 0 then apply ALiBi static struct ggml_tensor * llm_build_kqv( - const llama_context & lctx, struct ggml_context * ctx, struct ggml_tensor * cur, + const llama_hparams & hparams, + const llama_kv_cache & kv, struct ggml_tensor * wo, struct ggml_tensor * wo_b, struct ggml_tensor * q_cur, struct ggml_tensor * kq_scale, struct ggml_tensor * kq_mask, + int64_t n_ctx, int32_t n_tokens, int32_t n_kv, - float alibi_bias_max, + float max_alibi_bias, const llm_build_cb & cb, - int il) { - const auto & model = lctx.model; - const auto & kv_self = lctx.kv_self; - const auto & cparams = lctx.cparams; - - const auto & hparams = model.hparams; - - const int64_t n_ctx = cparams.n_ctx; + int il) { const int64_t n_embd = hparams.n_embd; const int64_t n_head = hparams.n_head; const int64_t n_head_kv = hparams.n_head_kv; @@ -3381,11 +3369,11 @@ static struct ggml_tensor * llm_build_kqv( cb(q, "q", il); struct ggml_tensor * k = - ggml_view_3d(ctx, kv_self.k, + ggml_view_3d(ctx, kv.k, n_embd_head, n_kv, n_head_kv, - ggml_element_size(kv_self.k)*n_embd_gqa, - ggml_element_size(kv_self.k)*n_embd_head, - ggml_element_size(kv_self.k)*n_embd_gqa*n_ctx*il); + ggml_element_size(kv.k)*n_embd_gqa, + ggml_element_size(kv.k)*n_embd_head, + ggml_element_size(kv.k)*n_embd_gqa*n_ctx*il); cb(k, "k", il); struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); @@ -3394,11 +3382,11 @@ static struct ggml_tensor * llm_build_kqv( kq = ggml_scale(ctx, kq, kq_scale); cb(kq, "kq_scaled", il); - if (alibi_bias_max > 0.0f) { + if (max_alibi_bias > 0.0f) { // TODO: n_head or n_head_kv // TODO: K-shift is likely not working // TODO: change to ggml_add - kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, alibi_bias_max); + kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, max_alibi_bias); cb(kq, "kq_scaled_alibi", il); } @@ -3410,11 +3398,11 @@ static struct ggml_tensor * llm_build_kqv( // split cached v into n_head heads struct ggml_tensor * v = - ggml_view_3d(ctx, kv_self.v, + ggml_view_3d(ctx, kv.v, n_kv, n_embd_head, n_head_kv, - ggml_element_size(kv_self.v)*n_ctx, - ggml_element_size(kv_self.v)*n_ctx*n_embd_head, - ggml_element_size(kv_self.v)*n_ctx*n_embd_gqa*il); + ggml_element_size(kv.v)*n_ctx, + ggml_element_size(kv.v)*n_ctx*n_embd_head, + ggml_element_size(kv.v)*n_ctx*n_embd_gqa*il); cb(v, "v", il); struct ggml_tensor * kqv = ggml_mul_mat(ctx, v, kq); @@ -3438,1259 +3426,1011 @@ static struct ggml_tensor * llm_build_kqv( return cur; } -static struct ggml_cgraph * llm_build_llama( +struct llm_build_context { + const llama_model & model; + const llama_hparams & hparams; + const llama_cparams & cparams; + const llama_batch & batch; + const llama_kv_cache & kv_self; + + const int64_t n_embd; + const int64_t n_layer; + const int64_t n_ctx; // user-specified context size (can be different from n_ctx_train) + const int64_t n_head; + const int64_t n_head_kv; + const int64_t n_embd_head; + const int64_t n_embd_gqa; + + const float freq_base; + const float freq_scale; + const float norm_eps; + const float norm_rms_eps; + + const int32_t n_tokens; + const int32_t n_kv; // size of KV cache to consider (n_kv <= n_ctx) + const int32_t kv_head; // index of where we store new KV data in the cache + + const bool do_rope_shift; + + const llm_build_cb & cb; + + llama_buffer & buf_compute; + + struct ggml_context * ctx0 = nullptr; + + // TODO: consider making the entire interface noexcept + llm_build_context( llama_context & lctx, const llama_batch & batch, const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; + bool worst_case) : + model (lctx.model), + hparams (model.hparams), + cparams (lctx.cparams), + batch (batch), + kv_self (lctx.kv_self), + n_embd (hparams.n_embd), + n_layer (hparams.n_layer), + n_ctx (cparams.n_ctx), + n_head (hparams.n_head), + n_head_kv (hparams.n_head_kv), + n_embd_head (hparams.n_embd_head()), + n_embd_gqa (hparams.n_embd_gqa()), + freq_base (cparams.rope_freq_base), + freq_scale (cparams.rope_freq_scale), + norm_eps (hparams.f_norm_eps), + norm_rms_eps (hparams.f_norm_rms_eps), + n_tokens (batch.n_tokens), + n_kv (worst_case ? n_ctx : kv_self.n), + kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), + do_rope_shift (worst_case || kv_self.has_shift), + cb (cb), + buf_compute (lctx.buf_compute) { + GGML_ASSERT(!!kv_self.ctx); - const auto & kv_self = lctx.kv_self; + // all initializations should be done in init() + } - GGML_ASSERT(!!kv_self.ctx); + void init() { + struct ggml_init_params params = { + /*.mem_size =*/ buf_compute.size, + /*.mem_buffer =*/ buf_compute.data, + /*.no_alloc =*/ true, + }; - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); - - GGML_ASSERT(n_embd_head == hparams.n_rot); - - const float freq_base = cparams.rope_freq_base; - const float freq_scale = cparams.rope_freq_scale; - const float norm_rms_eps = hparams.f_norm_rms_eps; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - const bool do_rope_shift = worst_case || kv_self.has_shift; - - //printf("n_kv = %d\n", n_kv); - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - cb(inp_pos, "inp_pos", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(lctx, ctx0, gf, n_embd_head, LLM_ROPE, cb); + ctx0 = ggml_init(params); } - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * inpSA = inpL; + void free() { + if (ctx0) { + ggml_free(ctx0); + ctx0 = nullptr; + } + } + + struct ggml_cgraph * build_llama() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, cur, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + + struct ggml_cgraph * build_baichuan() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + switch (model.type) { + case MODEL_7B: + Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + break; + case MODEL_13B: + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd/n_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd/n_head, n_head, n_tokens); + break; + default: + GGML_ASSERT(false); + } + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + // apply ALiBi for 13B model + const float max_alibi_bias = model.type == MODEL_13B ? 8.0f : -1.0f; + + cur = llm_build_kqv(ctx0, cur, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + + struct ggml_cgraph * build_falcon() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * attn_norm; + + attn_norm = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(attn_norm, "attn_norm", il); + + // self-attention + { + if (model.layers[il].attn_norm_2) { + // Falcon-40B + cur = llm_build_norm(ctx0, attn_norm, hparams, + model.layers[il].attn_norm_2, + model.layers[il].attn_norm_2_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm_2", il); + } else { + cur = attn_norm; + } + + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + + // using mode = 2 for neox mode + Qcur = ggml_rope_custom(ctx0, Qcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom(ctx0, Kcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, attn_norm, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = cur; + + // feed forward + { + cur = llm_build_ffn(ctx0, attn_norm, // !! use the attn norm, not the result + model.layers[il].ffn_up, NULL, + NULL, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + cur = ggml_add(ctx0, cur, inpL); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; // norm - cur = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, il); - cb(cur, "attn_norm", il); + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); - // self-attention - { - // compute Q and K and RoPE them - struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - cb(Qcur, "Qcur", il); + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); - struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - cb(Kcur, "Kcur", il); + ggml_build_forward_expand(gf, cur); - struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - cb(Vcur, "Vcur", il); - - Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); - cb(Qcur, "Qcur", il); - - Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); - cb(Kcur, "Kcur", il); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - cur = llm_build_kqv(lctx, ctx0, cur, - model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, - model.layers[il].ffn_gate, NULL, - model.layers[il].ffn_down, NULL, - LLM_FFN_SILU, LLM_FFN_PAR, cb, il); - cb(cur, "ffn_out", il); - } - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; + return gf; } - cur = inpL; + struct ggml_cgraph * build_starcoder() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); - cur = llm_build_norm(ctx0, cur, - model.output_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, -1); - cb(cur, "result_norm", -1); + struct ggml_tensor * cur; + struct ggml_tensor * pos; + struct ggml_tensor * inpL; - // lm_head - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); - ggml_build_forward_expand(gf, cur); + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); - ggml_free(ctx0); + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); - return gf; -} + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); -static struct ggml_cgraph * llm_build_baichaun( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; + pos = ggml_get_rows(ctx0, model.pos_embd, inp_pos); + cb(pos, "pos_embd", -1); - const auto & kv_self = lctx.kv_self; + inpL = ggml_add(ctx0, inpL, pos); + cb(inpL, "inpL", -1); - GGML_ASSERT(!!kv_self.ctx); + for (int il = 0; il < n_layer; ++il) { + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); - GGML_ASSERT(n_embd_head == hparams.n_rot); + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); - const float freq_base = cparams.rope_freq_base; - const float freq_scale = cparams.rope_freq_scale; - const float norm_rms_eps = hparams.f_norm_rms_eps; + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); - const bool do_rope_shift = worst_case || kv_self.has_shift; + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - auto & buf_compute = lctx.buf_compute; + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - cb(inp_pos, "inp_pos", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(lctx, ctx0, gf, n_embd_head, LLM_ROPE, cb); - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * inpSA = inpL; - - cur = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - cb(Qcur, "Qcur", il); - - struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - cb(Kcur, "Kcur", il); - - struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - cb(Vcur, "Vcur", il); - - switch (model.type) { - case MODEL_7B: - Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); - Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); - break; - case MODEL_13B: - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd/n_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd/n_head, n_head, n_tokens); - break; - default: - GGML_ASSERT(false); - } - cb(Qcur, "Qcur", il); - cb(Kcur, "Kcur", il); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - // apply ALiBi for 13B model - const float alibi_bias_max = model.type == MODEL_13B ? 8.0f : -1.0f; - - cur = llm_build_kqv(lctx, ctx0, cur, - model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, alibi_bias_max, cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, - model.layers[il].ffn_gate, NULL, - model.layers[il].ffn_down, NULL, - LLM_FFN_SILU, LLM_FFN_PAR, cb, il); - cb(cur, "ffn_out", il); - } - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, - model.output_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, -1); - cb(cur, "result_norm", -1); - - // lm_head - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_falcon( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - - GGML_ASSERT(n_embd_head == hparams.n_rot); - - const float freq_base = cparams.rope_freq_base; - const float freq_scale = cparams.rope_freq_scale; - const float norm_eps = hparams.f_norm_eps; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - const bool do_rope_shift = worst_case || kv_self.has_shift; - - //printf("kv_head = %d, n_kv = %d, n_tokens = %d, n_ctx = %d, is_measure = %d, has_shift = %d\n", - // kv_head, n_kv, n_tokens, n_ctx, ggml_allocr_is_measure(lctx.alloc), kv_self.has_shift); - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - cb(inp_pos, "inp_pos", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(lctx, ctx0, gf, n_embd_head, LLM_ROPE_NEOX, cb); - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * attn_norm; - - attn_norm = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, - model.layers[il].attn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(attn_norm, "attn_norm", il); - - // self-attention - { - if (model.layers[il].attn_norm_2) { - // Falcon-40B - cur = llm_build_norm(ctx0, attn_norm, - model.layers[il].attn_norm_2, - model.layers[il].attn_norm_2_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "attn_norm_2", il); - } else { - cur = attn_norm; + cur = llm_build_kqv(ctx0, cur, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); } - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - cb(cur, "wqkv", il); - - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - - cb(Qcur, "Qcur", il); - cb(Kcur, "Kcur", il); - cb(Vcur, "Vcur", il); - - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - - // using mode = 2 for neox mode - Qcur = ggml_rope_custom(ctx0, Qcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); - cb(Qcur, "Qcur", il); - - Kcur = ggml_rope_custom(ctx0, Kcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); - cb(Kcur, "Kcur", il); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - cur = llm_build_kqv(lctx, ctx0, attn_norm, - model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = cur; - - // feed forward - { - cur = llm_build_ffn(ctx0, attn_norm, // !! use the attn norm, not the result - model.layers[il].ffn_up, NULL, - NULL, NULL, - model.layers[il].ffn_down, NULL, - LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); - cb(cur, "ffn_out", il); - } - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - cur = ggml_add(ctx0, cur, inpL); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - // norm - cur = llm_build_norm(ctx0, cur, - model.output_norm, - model.output_norm_b, - LLM_NORM, norm_eps, cb, -1); - cb(cur, "result_norm", -1); - - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_starcoder( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - - GGML_ASSERT(n_embd_head == hparams.n_rot); - - const float norm_eps = hparams.f_norm_eps; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * pos; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - cb(inp_pos, "inp_pos", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - pos = ggml_get_rows(ctx0, model.pos_embd, inp_pos); - cb(pos, "pos_embd", -1); - - inpL = ggml_add(ctx0, inpL, pos); - cb(inpL, "inpL", -1); - - for (int il = 0; il < n_layer; ++il) { - cur = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, - model.layers[il].attn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - cb(cur, "wqkv", il); - - cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - cb(cur, "bqkv", il); - - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - - cb(Qcur, "Qcur", il); - cb(Kcur, "Kcur", il); - cb(Vcur, "Vcur", il); - - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - cur = llm_build_kqv(lctx, ctx0, cur, - model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); - cb(cur, "kqv_out", il); - } - - // add the input - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); - cb(ffn_inp, "ffn_inp", il); - - // FF - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, - model.layers[il].ffn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, model.layers[il].ffn_up_b, - NULL, NULL, - model.layers[il].ffn_down, model.layers[il].ffn_down_b, - LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); - cb(cur, "ffn_out", il); - } - - inpL = ggml_add(ctx0, cur, ffn_inp); - cb(inpL, "l_out", il); - } - - cur = llm_build_norm(ctx0, inpL, - model.output_norm, - model.output_norm_b, - LLM_NORM, norm_eps, cb, -1); - cb(cur, "result_norm", -1); - - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_persimmon( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const auto & cparams = lctx.cparams; - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_head = hparams.n_head; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_rot = n_embd_head / 2; - - const float freq_base = cparams.rope_freq_base; - const float freq_scale = cparams.rope_freq_scale; - const float norm_eps = hparams.f_norm_eps; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - const bool do_rope_shift = worst_case || kv_self.has_shift; - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "imp_embd", -1); - - struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); - cb(inp_pos, "inp_pos", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - if (do_rope_shift) { - llm_build_k_shift(lctx, ctx0, gf, n_rot, LLM_ROPE_NEOX, cb); - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * residual = inpL; - - cur = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, - model.layers[il].attn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "attn_norm", il); - - // self attention - { - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - cb(cur, "wqkv", il); - - cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - cb(cur, "bqkv", il); - - // split qkv - GGML_ASSERT(n_head_kv == n_head); - - struct ggml_tensor * tmpqkv = ggml_reshape_4d(ctx0, cur, n_embd_head, 3, n_head, n_tokens); - cb(tmpqkv, "tmpqkv", il); - - struct ggml_tensor * tmpqkv_perm = ggml_cont(ctx0, ggml_permute(ctx0, tmpqkv, 0, 3, 1, 2)); - cb(tmpqkv_perm, "tmpqkv", il); - - struct ggml_tensor * tmpq = ggml_view_3d( - ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, - ggml_element_size(tmpqkv_perm) * n_embd_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, - 0 - ); - cb(tmpq, "tmpq", il); - - struct ggml_tensor * tmpk = ggml_view_3d( - ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, - ggml_element_size(tmpqkv_perm) * n_embd_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens - ); - cb(tmpk, "tmpk", il); - - // Q/K Layernorm - tmpq = llm_build_norm(ctx0, tmpq, - model.layers[il].attn_q_norm, - model.layers[il].attn_q_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(tmpq, "tmpq", il); - - tmpk = llm_build_norm(ctx0, tmpk, - model.layers[il].attn_k_norm, - model.layers[il].attn_k_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(tmpk, "tmpk", il); - - // RoPE the first n_rot of q/k, pass the other half, and concat. - struct ggml_tensor * qrot = ggml_view_3d( - ctx0, tmpq, n_rot, n_head, n_tokens, - ggml_element_size(tmpq) * n_embd_head, - ggml_element_size(tmpq) * n_embd_head * n_head, - 0 - ); - cb(qrot, "qrot", il); - - struct ggml_tensor * krot = ggml_view_3d( - ctx0, tmpk, n_rot, n_head, n_tokens, - ggml_element_size(tmpk) * n_embd_head, - ggml_element_size(tmpk) * n_embd_head * n_head, - 0 - ); - cb(krot, "krot", il); - - // get the second half of tmpq, e.g tmpq[n_rot:, :, :] - struct ggml_tensor * qpass = ggml_view_3d( - ctx0, tmpq, n_rot, n_head, n_tokens, - ggml_element_size(tmpq) * n_embd_head, - ggml_element_size(tmpq) * n_embd_head * n_head, - ggml_element_size(tmpq) * n_rot - ); - cb(qpass, "qpass", il); - - struct ggml_tensor * kpass = ggml_view_3d( - ctx0, tmpk, n_rot, n_head, n_tokens, - ggml_element_size(tmpk) * n_embd_head, - ggml_element_size(tmpk) * n_embd_head * n_head, - ggml_element_size(tmpk) * n_rot - ); - cb(kpass, "kpass", il); - - struct ggml_tensor * qrotated = ggml_rope_custom( - ctx0, qrot, inp_pos, n_rot, 2, 0, freq_base, freq_scale - ); - cb(qrotated, "qrotated", il); - - struct ggml_tensor * krotated = ggml_rope_custom( - ctx0, krot, inp_pos, n_rot, 2, 0, freq_base, freq_scale - ); - cb(krotated, "krotated", il); - - // ggml currently only supports concatenation on dim=2 - // so we need to permute qrot, qpass, concat, then permute back. - qrotated = ggml_cont(ctx0, ggml_permute(ctx0, qrotated, 2, 1, 0, 3)); - cb(qrotated, "qrotated", il); - - krotated = ggml_cont(ctx0, ggml_permute(ctx0, krotated, 2, 1, 0, 3)); - cb(krotated, "krotated", il); - - qpass = ggml_cont(ctx0, ggml_permute(ctx0, qpass, 2, 1, 0, 3)); - cb(qpass, "qpass", il); - - kpass = ggml_cont(ctx0, ggml_permute(ctx0, kpass, 2, 1, 0, 3)); - cb(kpass, "kpass", il); - - struct ggml_tensor * Qcur = ggml_concat(ctx0, qrotated, qpass); - cb(Qcur, "Qcur", il); - - struct ggml_tensor * Kcur = ggml_concat(ctx0, krotated, kpass); - cb(Kcur, "Kcur", il); - - struct ggml_tensor * Q = ggml_cont(ctx0, ggml_permute(ctx0, Qcur, 1, 2, 0, 3)); - cb(Q, "Q", il); - - Kcur = ggml_cont(ctx0, ggml_permute(ctx0, Kcur, 2, 1, 0, 3)); - cb(Kcur, "Kcur", il); - - struct ggml_tensor * Vcur = ggml_view_3d( - ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, - ggml_element_size(tmpqkv_perm) * n_embd_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, - ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens * 2 - ); - cb(Vcur, "Vcur", il); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - // TODO: not tested, could be broken - cur = llm_build_kqv(lctx, ctx0, Q, - model.layers[il].wo, model.layers[il].bo, - Q, KQ_scale, KQ_mask, n_tokens, n_kv, -1.0f, cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, residual, cur); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, - model.layers[il].ffn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, model.layers[il].ffn_up_b, - NULL, NULL, - model.layers[il].ffn_down, model.layers[il].ffn_down_b, - LLM_FFN_RELU_SQR, LLM_FFN_SEQ, cb, il); - cb(cur, "ffn_out", il); - } - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - inpL = cur; - } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, - model.output_norm, - model.output_norm_b, - LLM_NORM, norm_eps, cb, -1); - cb(cur, "result_norm", -1); - - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_refact( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); - - const float norm_rms_eps = hparams.f_norm_rms_eps; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ true, - }; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * inpSA = inpL; - - cur = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - cb(Qcur, "Qcur", il); - - struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - cb(Kcur, "Kcur", il); - - struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - cb(Vcur, "Vcur", il); - - Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); - cb(Kcur, "Kcur", il); - - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - cb(Qcur, "Qcur", il); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - cur = llm_build_kqv(lctx, ctx0, Qcur, - model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, 8.0f, cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, - model.layers[il].ffn_gate, NULL, - model.layers[il].ffn_down, NULL, - LLM_FFN_SILU, LLM_FFN_PAR, cb, il); - cb(cur, "ffn_out", il); - } - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, - model.output_norm, NULL, - LLM_NORM_RMS, norm_rms_eps, cb, -1); - cb(cur, "result_norm", -1); - - // lm_head - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_bloom( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - - GGML_ASSERT(n_embd_head == hparams.n_rot); - - const float norm_eps = hparams.f_norm_eps; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ false, - }; - - params.no_alloc = true; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - inpL = llm_build_norm(ctx0, inpL, - model.tok_norm, - model.tok_norm_b, - LLM_NORM, norm_eps, cb, -1); - cb(inpL, "inp_norm", -1); - - for (int il = 0; il < n_layer; ++il) { - cur = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, - model.layers[il].attn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - cb(cur, "wqkv", il); - - cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - cb(cur, "bqkv", il); - - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - - cb(Qcur, "Qcur", il); - cb(Kcur, "Kcur", il); - cb(Vcur, "Vcur", il); - - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - cur = llm_build_kqv(lctx, ctx0, Qcur, - model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, 8.0f, cb, il); - cb(cur, "kqv_out", il); - } - - // Add the input - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); - cb(ffn_inp, "ffn_inp", il); - - // FF - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, - model.layers[il].ffn_norm_b, - LLM_NORM, norm_eps, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, model.layers[il].ffn_up_b, - NULL, NULL, - model.layers[il].ffn_down, model.layers[il].ffn_down_b, - LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); - cb(cur, "ffn_out", il); - } - - inpL = ggml_add(ctx0, cur, ffn_inp); - cb(inpL, "l_out", il); - } - - cur = llm_build_norm(ctx0, inpL, - model.output_norm, - model.output_norm_b, - LLM_NORM, norm_eps, cb, -1); - cb(cur, "result_norm", -1); - - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} - -static struct ggml_cgraph * llm_build_mpt( - llama_context & lctx, - const llama_batch & batch, - const llm_build_cb & cb, - bool worst_case) { - const auto & model = lctx.model; - const auto & hparams = model.hparams; - const auto & cparams = lctx.cparams; - - const auto & kv_self = lctx.kv_self; - - GGML_ASSERT(!!kv_self.ctx); - - const int64_t n_embd = hparams.n_embd; - const int64_t n_layer = hparams.n_layer; - const int64_t n_ctx = cparams.n_ctx; - const int64_t n_head = hparams.n_head; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - - const float norm_eps = hparams.f_norm_eps; - const float clamp_kqv = hparams.f_clamp_kqv; - const float max_alibi_bias = hparams.f_max_alibi_bias; - - const int32_t n_tokens = batch.n_tokens; - const int32_t n_kv = worst_case ? n_ctx : kv_self.n; - const int32_t kv_head = worst_case ? n_ctx - n_tokens : kv_self.head; - - auto & buf_compute = lctx.buf_compute; - - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ false, - }; - - params.no_alloc = true; - - struct ggml_context * ctx0 = ggml_init(params); - - ggml_cgraph * gf = ggml_new_graph(ctx0); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, batch, model.tok_embd, n_embd, n_tokens, cb); - cb(inpL, "inp_embd", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); - cb(KQ_mask, "KQ_mask", -1); - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * attn_norm; - - attn_norm = llm_build_norm(ctx0, inpL, - model.layers[il].attn_norm, - NULL, - LLM_NORM, norm_eps, cb, il); - cb(attn_norm, "attn_norm", il); - - // self-attention - { - cur = attn_norm; - - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); - cb(cur, "wqkv", il); - - if (clamp_kqv > 0.0f) { - cur = ggml_clamp(ctx0, cur, -clamp_kqv, clamp_kqv); - cb(cur, "wqkv_clamped", il); + // add the input + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); + + // FF + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); } - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); - - cb(Qcur, "Qcur", il); - cb(Kcur, "Kcur", il); - cb(Vcur, "Vcur", il); - - Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); - - llm_build_kv_store(lctx, ctx0, gf, Kcur, Vcur, n_tokens, kv_head, cb, il); - - cur = llm_build_kqv(lctx, ctx0, Qcur, - model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_tokens, n_kv, max_alibi_bias, cb, il); - cb(cur, "kqv_out", il); + inpL = ggml_add(ctx0, cur, ffn_inp); + cb(inpL, "l_out", il); } - // Add the input - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); - cb(ffn_inp, "ffn_inp", il); + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); - // feed forward - { - cur = llm_build_norm(ctx0, ffn_inp, - model.layers[il].ffn_norm, + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + + struct ggml_cgraph * build_persimmon() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + const int64_t n_rot = n_embd_head / 2; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "imp_embd", -1); + + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * residual = inpL; + + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + // split qkv + GGML_ASSERT(n_head_kv == n_head); + + struct ggml_tensor * tmpqkv = ggml_reshape_4d(ctx0, cur, n_embd_head, 3, n_head, n_tokens); + cb(tmpqkv, "tmpqkv", il); + + struct ggml_tensor * tmpqkv_perm = ggml_cont(ctx0, ggml_permute(ctx0, tmpqkv, 0, 3, 1, 2)); + cb(tmpqkv_perm, "tmpqkv", il); + + struct ggml_tensor * tmpq = ggml_view_3d( + ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, + ggml_element_size(tmpqkv_perm) * n_embd_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, + 0 + ); + cb(tmpq, "tmpq", il); + + struct ggml_tensor * tmpk = ggml_view_3d( + ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, + ggml_element_size(tmpqkv_perm) * n_embd_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens + ); + cb(tmpk, "tmpk", il); + + // Q/K Layernorm + tmpq = llm_build_norm(ctx0, tmpq, hparams, + model.layers[il].attn_q_norm, + model.layers[il].attn_q_norm_b, + LLM_NORM, cb, il); + cb(tmpq, "tmpq", il); + + tmpk = llm_build_norm(ctx0, tmpk, hparams, + model.layers[il].attn_k_norm, + model.layers[il].attn_k_norm_b, + LLM_NORM, cb, il); + cb(tmpk, "tmpk", il); + + // RoPE the first n_rot of q/k, pass the other half, and concat. + struct ggml_tensor * qrot = ggml_view_3d( + ctx0, tmpq, n_rot, n_head, n_tokens, + ggml_element_size(tmpq) * n_embd_head, + ggml_element_size(tmpq) * n_embd_head * n_head, + 0 + ); + cb(qrot, "qrot", il); + + struct ggml_tensor * krot = ggml_view_3d( + ctx0, tmpk, n_rot, n_head, n_tokens, + ggml_element_size(tmpk) * n_embd_head, + ggml_element_size(tmpk) * n_embd_head * n_head, + 0 + ); + cb(krot, "krot", il); + + // get the second half of tmpq, e.g tmpq[n_rot:, :, :] + struct ggml_tensor * qpass = ggml_view_3d( + ctx0, tmpq, n_rot, n_head, n_tokens, + ggml_element_size(tmpq) * n_embd_head, + ggml_element_size(tmpq) * n_embd_head * n_head, + ggml_element_size(tmpq) * n_rot + ); + cb(qpass, "qpass", il); + + struct ggml_tensor * kpass = ggml_view_3d( + ctx0, tmpk, n_rot, n_head, n_tokens, + ggml_element_size(tmpk) * n_embd_head, + ggml_element_size(tmpk) * n_embd_head * n_head, + ggml_element_size(tmpk) * n_rot + ); + cb(kpass, "kpass", il); + + struct ggml_tensor * qrotated = ggml_rope_custom( + ctx0, qrot, inp_pos, n_rot, 2, 0, freq_base, freq_scale + ); + cb(qrotated, "qrotated", il); + + struct ggml_tensor * krotated = ggml_rope_custom( + ctx0, krot, inp_pos, n_rot, 2, 0, freq_base, freq_scale + ); + cb(krotated, "krotated", il); + + // ggml currently only supports concatenation on dim=2 + // so we need to permute qrot, qpass, concat, then permute back. + qrotated = ggml_cont(ctx0, ggml_permute(ctx0, qrotated, 2, 1, 0, 3)); + cb(qrotated, "qrotated", il); + + krotated = ggml_cont(ctx0, ggml_permute(ctx0, krotated, 2, 1, 0, 3)); + cb(krotated, "krotated", il); + + qpass = ggml_cont(ctx0, ggml_permute(ctx0, qpass, 2, 1, 0, 3)); + cb(qpass, "qpass", il); + + kpass = ggml_cont(ctx0, ggml_permute(ctx0, kpass, 2, 1, 0, 3)); + cb(kpass, "kpass", il); + + struct ggml_tensor * Qcur = ggml_concat(ctx0, qrotated, qpass); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_concat(ctx0, krotated, kpass); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Q = ggml_cont(ctx0, ggml_permute(ctx0, Qcur, 1, 2, 0, 3)); + cb(Q, "Q", il); + + Kcur = ggml_cont(ctx0, ggml_permute(ctx0, Kcur, 2, 1, 0, 3)); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_view_3d( + ctx0, tmpqkv_perm, n_embd_head, n_head, n_tokens, + ggml_element_size(tmpqkv_perm) * n_embd_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head, + ggml_element_size(tmpqkv_perm) * n_embd_head * n_head * n_tokens * 2 + ); + cb(Vcur, "Vcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + // TODO: not tested, could be broken + cur = llm_build_kqv(ctx0, Q, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, residual, cur); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_RELU_SQR, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + + struct ggml_cgraph * build_refact() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + cb(Kcur, "Kcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + cb(Qcur, "Qcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, Qcur, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + + struct ggml_cgraph * build_bloom() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + inpL = llm_build_norm(ctx0, inpL, hparams, + model.tok_norm, + model.tok_norm_b, + LLM_NORM, cb, -1); + cb(inpL, "inp_norm", -1); + + for (int il = 0; il < n_layer; ++il) { + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, Qcur, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); + cb(cur, "kqv_out", il); + } + + // Add the input + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); + + // FF + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + } + + inpL = ggml_add(ctx0, cur, ffn_inp); + cb(inpL, "l_out", il); + } + + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + + struct ggml_cgraph * build_mpt() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * attn_norm; + + attn_norm = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, - LLM_NORM, norm_eps, cb, il); - cb(cur, "ffn_norm", il); + LLM_NORM, cb, il); + cb(attn_norm, "attn_norm", il); - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, - NULL, NULL, - model.layers[il].ffn_down, NULL, - LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); - cb(cur, "ffn_out", il); + // self-attention + { + cur = attn_norm; + + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + if (hparams.f_clamp_kqv > 0.0f) { + cur = ggml_clamp(ctx0, cur, -hparams.f_clamp_kqv, hparams.f_clamp_kqv); + cb(cur, "wqkv_clamped", il); + } + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, Qcur, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, cb, il); + cb(cur, "kqv_out", il); + } + + // Add the input + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); + + // feed forward + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + NULL, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + NULL, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; } - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); + cur = inpL; - // input for next layer - inpL = cur; + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, + NULL, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, - model.output_norm, - NULL, - LLM_NORM, norm_eps, cb, -1); - cb(cur, "result_norm", -1); - - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - ggml_free(ctx0); - - return gf; -} +}; // // tensor offloading helpers @@ -5122,43 +4862,49 @@ static struct ggml_cgraph * llama_build_graph( struct ggml_cgraph * result = NULL; + struct llm_build_context llm(lctx, batch, cb, worst_case); + + llm.init(); + switch (model.arch) { case LLM_ARCH_LLAMA: { - result = llm_build_llama(lctx, batch, cb, worst_case); + result = llm.build_llama(); } break; case LLM_ARCH_BAICHUAN: { - result = llm_build_baichaun(lctx, batch, cb, worst_case); + result = llm.build_baichuan(); } break; case LLM_ARCH_FALCON: { - result = llm_build_falcon(lctx, batch, cb, worst_case); + result = llm.build_falcon(); } break; case LLM_ARCH_STARCODER: { - result = llm_build_starcoder(lctx, batch, cb, worst_case); + result = llm.build_starcoder(); } break; case LLM_ARCH_PERSIMMON: { - result = llm_build_persimmon(lctx, batch, cb, worst_case); + result = llm.build_persimmon(); } break; case LLM_ARCH_REFACT: { - result = llm_build_refact(lctx, batch, cb, worst_case); + result = llm.build_refact(); } break; case LLM_ARCH_BLOOM: { - result = llm_build_bloom(lctx, batch, cb, worst_case); + result = llm.build_bloom(); } break; case LLM_ARCH_MPT: { - result = llm_build_mpt(lctx, batch, cb, worst_case); + result = llm.build_mpt(); } break; default: GGML_ASSERT(false); } + llm.free(); + if (worst_case) { int n_non_view_total = 0; From ff8f9a88da0018972dfdf6fe64b5c8992caabd9c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 21:15:55 +0200 Subject: [PATCH 058/859] common : minor (#3715) --- common/common.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 89be41261..7a48e9d11 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -110,8 +110,8 @@ bool gpt_params_parse(int argc, char ** argv, gpt_params & params) { exit(0); } } - catch (const std::invalid_argument& ex) { - fprintf(stderr, ex.what()); + catch (const std::invalid_argument & ex) { + fprintf(stderr, "%s\n", ex.what()); gpt_print_usage(argc, argv, gpt_params()); exit(1); } From e16b9fa4baa8a09c6619b116159830e898050942 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 21:25:00 +0200 Subject: [PATCH 059/859] metal : multi-simd softmax (#3710) ggml-ci --- ggml-metal.m | 9 +++- ggml-metal.metal | 129 +++++++++++++++++++++++++++++++++++++---------- 2 files changed, 108 insertions(+), 30 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index bc881395a..1f0341507 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1001,11 +1001,15 @@ void ggml_metal_graph_compute( } break; case GGML_OP_SOFT_MAX: { - const int nth = MIN(32, ne00); + int nth = 32; // SIMD width if (ne00%4 == 0) { [encoder setComputePipelineState:ctx->pipeline_soft_max_4]; } else { + do { + nth *= 2; + } while (nth <= ne00 && nth <= 1024); + nth /= 2; [encoder setComputePipelineState:ctx->pipeline_soft_max]; } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1013,8 +1017,9 @@ void ggml_metal_graph_compute( [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setThreadgroupMemoryLength:nth/32*sizeof(float) atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; case GGML_OP_DIAG_MASK_INF: { diff --git a/ggml-metal.metal b/ggml-metal.metal index f4b460564..f3152778a 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -184,36 +184,73 @@ kernel void kernel_soft_max( constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, - uint3 tgpig[[threadgroup_position_in_grid]], - uint3 tpitg[[thread_position_in_threadgroup]], - uint3 ntg[[threads_per_threadgroup]]) { - const int64_t i03 = tgpig[2]; - const int64_t i02 = tgpig[1]; - const int64_t i01 = tgpig[0]; + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t i03 = (tgpig) / (ne02*ne01); + const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; + const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; // parallel max - float lmax = tpitg[0] < ne00 ? psrc0[tpitg[0]] : -INFINITY; - for (int i00 = tpitg[0] + ntg[0]; i00 < ne00; i00 += ntg[0]) { + float lmax = tpitg < ne00 ? psrc0[tpitg] : -INFINITY; + + for (int i00 = tpitg + ntg; i00 < ne00; i00 += ntg) { lmax = MAX(lmax, psrc0[i00]); } - const float max = simd_max(lmax); + + float max = simd_max(lmax); + if (tiisg == 0) { + buf[sgitg] = max; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // broadcast, simd group number is ntg / 32 + for (uint i = ntg / 32 / 2; i > 0; i /= 2) { + if (tpitg < i) { + buf[tpitg] = MAX(buf[tpitg], buf[tpitg + i]); + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max = buf[0]; // parallel sum float lsum = 0.0f; - for (int i00 = tpitg[0]; i00 < ne00; i00 += ntg[0]) { + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { const float exp_psrc0 = exp(psrc0[i00] - max); lsum += exp_psrc0; // Remember the result of exp here. exp is expensive, so we really do not - // whish to compute it twice. + // wish to compute it twice. pdst[i00] = exp_psrc0; } - const float sum = simd_sum(lsum); + float sum = simd_sum(lsum); + if (tiisg == 0) { + buf[sgitg] = sum; + } - for (int i00 = tpitg[0]; i00 < ne00; i00 += ntg[0]) { + threadgroup_barrier(mem_flags::mem_threadgroup); + + // broadcast, simd group number is ntg / 32 + for (uint i = ntg / 32 / 2; i > 0; i /= 2) { + if (tpitg < i) { + buf[tpitg] += buf[tpitg + i]; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[0]; + + for (int i00 = tpitg; i00 < ne00; i00 += ntg) { pdst[i00] /= sum; } } @@ -224,37 +261,73 @@ kernel void kernel_soft_max_4( constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, - uint3 tgpig[[threadgroup_position_in_grid]], - uint3 tpitg[[thread_position_in_threadgroup]], - uint3 ntg[[threads_per_threadgroup]]) { - const int64_t i03 = tgpig[2]; - const int64_t i02 = tgpig[1]; - const int64_t i01 = tgpig[0]; + threadgroup float * buf [[threadgroup(0)]], + uint tgpig[[threadgroup_position_in_grid]], + uint tpitg[[thread_position_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint ntg[[threads_per_threadgroup]]) { + const int64_t i03 = (tgpig) / (ne02*ne01); + const int64_t i02 = (tgpig - i03*ne02*ne01) / ne01; + const int64_t i01 = (tgpig - i03*ne02*ne01 - i02*ne01); device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); // parallel max - float4 lmax4 = tpitg[0] < ne00/4 ? psrc4[tpitg[0]] : -INFINITY; - for (int i00 = tpitg[0] + ntg[0]; i00 < ne00/4; i00 += ntg[0]) { + float4 lmax4 = tpitg < ne00/4 ? psrc4[tpitg] : -INFINITY; + + for (int i00 = tpitg + ntg; i00 < ne00/4; i00 += ntg) { lmax4 = fmax(lmax4, psrc4[i00]); } - float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); - const float max = simd_max(lmax); + const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); + float max = simd_max(lmax); + if (tiisg == 0) { + buf[sgitg] = max; + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // broadcast, simd group number is ntg / 32 + for (uint i = ntg / 32 / 2; i > 0; i /= 2) { + if (tpitg < i) { + buf[tpitg] = MAX(buf[tpitg], buf[tpitg + i]); + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + max = buf[0]; // parallel sum float4 lsum4 = 0.0f; - for (int i00 = tpitg[0]; i00 < ne00/4; i00 += ntg[0]) { + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { const float4 exp_psrc4 = exp(psrc4[i00] - max); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } - float lsum = lsum4[0] + lsum4[1] + lsum4[2] + lsum4[3]; - const float sum = simd_sum(lsum); + const float lsum = lsum4[0] + lsum4[1] + lsum4[2] + lsum4[3]; + float sum = simd_sum(lsum); + if (tiisg == 0) { + buf[sgitg] = sum; + } - for (int i00 = tpitg[0]; i00 < ne00/4; i00 += ntg[0]) { + threadgroup_barrier(mem_flags::mem_threadgroup); + + // broadcast, simd group number is ntg / 32 + for (uint i = ntg / 32 / 2; i > 0; i /= 2) { + if (tpitg < i) { + buf[tpitg] += buf[tpitg + i]; + } + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + sum = buf[0]; + + for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { pdst4[i00] /= sum; } } @@ -274,7 +347,7 @@ kernel void kernel_diag_mask_inf( dst[i02*ne01*ne00 + i01*ne00 + i00] = -INFINITY; } else { dst[i02*ne01*ne00 + i01*ne00 + i00] = src0[i02*ne01*ne00 + i01*ne00 + i00]; - } + } } kernel void kernel_diag_mask_inf_8( From 523e49b11174368cd73460fa5eae7b39d856f300 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 23:00:50 +0200 Subject: [PATCH 060/859] llm : fix falcon norm after refactoring (#3837) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index d0c4ef101..17cf364bb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3763,7 +3763,7 @@ struct llm_build_context { { if (model.layers[il].attn_norm_2) { // Falcon-40B - cur = llm_build_norm(ctx0, attn_norm, hparams, + cur = llm_build_norm(ctx0, inpL, hparams, model.layers[il].attn_norm_2, model.layers[il].attn_norm_2_b, LLM_NORM, cb, il); From c43c2da8afacaddfe51c09b21dbd9922cd0ea46b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 1 Nov 2023 23:08:30 +0200 Subject: [PATCH 061/859] llm : fix llm_build_kqv taking unused tensor (benign, #3837) --- llama.cpp | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/llama.cpp b/llama.cpp index 17cf364bb..1c6d482f8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3345,7 +3345,6 @@ static struct ggml_tensor * llm_build_ffn( // if max_alibi_bias > 0 then apply ALiBi static struct ggml_tensor * llm_build_kqv( struct ggml_context * ctx, - struct ggml_tensor * cur, const llama_hparams & hparams, const llama_kv_cache & kv, struct ggml_tensor * wo, @@ -3411,7 +3410,7 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kqv_merged = ggml_permute(ctx, kqv, 0, 2, 1, 3); cb(kqv_merged, "kqv_merged", il); - cur = ggml_cont_2d(ctx, kqv_merged, n_embd, n_tokens); + struct ggml_tensor * cur = ggml_cont_2d(ctx, kqv_merged, n_embd, n_tokens); cb(cur, "kqv_merged_cont", il); cur = ggml_mul_mat(ctx, wo, cur); @@ -3565,7 +3564,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, cur, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, NULL, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); cb(cur, "kqv_out", il); @@ -3677,7 +3676,7 @@ struct llm_build_context { // apply ALiBi for 13B model const float max_alibi_bias = model.type == MODEL_13B ? 8.0f : -1.0f; - cur = llm_build_kqv(ctx0, cur, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, NULL, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, cb, il); cb(cur, "kqv_out", il); @@ -3795,7 +3794,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, attn_norm, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, NULL, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); cb(cur, "kqv_out", il); @@ -3895,7 +3894,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, cur, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); cb(cur, "kqv_out", il); @@ -4100,7 +4099,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); // TODO: not tested, could be broken - cur = llm_build_kqv(ctx0, Q, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); cb(cur, "kqv_out", il); @@ -4191,7 +4190,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, Qcur, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, NULL, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); cb(cur, "kqv_out", il); @@ -4288,7 +4287,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, Qcur, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); cb(cur, "kqv_out", il); @@ -4382,7 +4381,7 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, Qcur, hparams, kv_self, + cur = llm_build_kqv(ctx0, hparams, kv_self, model.layers[il].wo, NULL, Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, cb, il); cb(cur, "kqv_out", il); From 898aeca90a9bb992f506234cf3b8b7f7fa28a1df Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Wed, 1 Nov 2023 18:04:33 -0400 Subject: [PATCH 062/859] llama : implement YaRN RoPE scaling (#2268) Co-authored-by: cebtenzzre Co-authored-by: Jeffrey Quesnelle --- common/common.cpp | 79 +++++- common/common.h | 7 + convert-baichuan-hf-to-gguf.py | 3 +- convert.py | 97 ++++--- examples/finetune/finetune.cpp | 5 +- examples/server/server.cpp | 59 ++++- .../train-text-from-scratch.cpp | 6 +- ggml-cuda.cu | 153 ++++++++--- ggml-metal.m | 22 +- ggml-metal.metal | 61 ++++- ggml.c | 241 +++++++++++++----- ggml.h | 20 +- gguf-py/gguf/gguf.py | 29 ++- llama.cpp | 220 ++++++++++++---- llama.h | 18 +- 15 files changed, 763 insertions(+), 257 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 7a48e9d11..b182ffaae 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -219,12 +219,52 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.rope_freq_scale = std::stof(argv[i]); + } else if (arg == "--rope-scaling") { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string value(argv[i]); + /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE; } + else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR; } + else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN; } + else { invalid_param = true; break; } } else if (arg == "--rope-scale") { if (++i >= argc) { invalid_param = true; break; } params.rope_freq_scale = 1.0f/std::stof(argv[i]); + } else if (arg == "--yarn-orig-ctx") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_orig_ctx = std::stoi(argv[i]); + } else if (arg == "--yarn-ext-factor") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_ext_factor = std::stof(argv[i]); + } else if (arg == "--yarn-attn-factor") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_attn_factor = std::stof(argv[i]); + } else if (arg == "--yarn-beta-fast") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_beta_fast = std::stof(argv[i]); + } else if (arg == "--yarn-beta-slow") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_beta_slow = std::stof(argv[i]); } else if (arg == "--memory-f32") { params.memory_f16 = false; } else if (arg == "--top-p") { @@ -716,9 +756,16 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --cfg-negative-prompt-file FNAME\n"); printf(" negative prompt file to use for guidance. (default: empty)\n"); printf(" --cfg-scale N strength of guidance (default: %f, 1.0 = disable)\n", sparams.cfg_scale); - printf(" --rope-scale N RoPE context linear scaling factor, inverse of --rope-freq-scale\n"); + printf(" --rope-scaling {none,linear,yarn}\n"); + printf(" RoPE frequency scaling method, defaults to linear unless specified by the model\n"); + printf(" --rope-scale N RoPE context scaling factor, expands context by a factor of N\n"); printf(" --rope-freq-base N RoPE base frequency, used by NTK-aware scaling (default: loaded from model)\n"); - printf(" --rope-freq-scale N RoPE frequency linear scaling factor (default: loaded from model)\n"); + printf(" --rope-freq-scale N RoPE frequency scaling factor, expands context by a factor of 1/N\n"); + printf(" --yarn-orig-ctx N YaRN: original context size of model (default: 0 = model training context size)\n"); + printf(" --yarn-ext-factor N YaRN: extrapolation mix factor (default: 1.0, 0.0 = full interpolation)\n"); + printf(" --yarn-attn-factor N YaRN: scale sqrt(t) or attention magnitude (default: 1.0)\n"); + printf(" --yarn-beta-slow N YaRN: high correction dim or alpha (default: %.1f)\n", params.yarn_beta_slow); + printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); printf(" --ignore-eos ignore end of stream token and continue generating (implies --logit-bias 2-inf)\n"); printf(" --no-penalize-nl do not penalize newline token\n"); printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); @@ -826,17 +873,23 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & struct llama_context_params llama_context_params_from_gpt_params(const gpt_params & params) { auto cparams = llama_context_default_params(); - cparams.n_ctx = params.n_ctx; - cparams.n_batch = params.n_batch; - cparams.n_threads = params.n_threads; - cparams.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; - cparams.mul_mat_q = params.mul_mat_q; - cparams.seed = params.seed; - cparams.f16_kv = params.memory_f16; - cparams.logits_all = params.logits_all; - cparams.embedding = params.embedding; - cparams.rope_freq_base = params.rope_freq_base; - cparams.rope_freq_scale = params.rope_freq_scale; + cparams.n_ctx = params.n_ctx; + cparams.n_batch = params.n_batch; + cparams.n_threads = params.n_threads; + cparams.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; + cparams.mul_mat_q = params.mul_mat_q; + cparams.seed = params.seed; + cparams.f16_kv = params.memory_f16; + cparams.logits_all = params.logits_all; + cparams.embedding = params.embedding; + cparams.rope_scaling_type = params.rope_scaling_type; + cparams.rope_freq_base = params.rope_freq_base; + cparams.rope_freq_scale = params.rope_freq_scale; + cparams.yarn_ext_factor = params.yarn_ext_factor; + cparams.yarn_attn_factor = params.yarn_attn_factor; + cparams.yarn_beta_fast = params.yarn_beta_fast; + cparams.yarn_beta_slow = params.yarn_beta_slow; + cparams.yarn_orig_ctx = params.yarn_orig_ctx; return cparams; } diff --git a/common/common.h b/common/common.h index 343b27217..7be69f925 100644 --- a/common/common.h +++ b/common/common.h @@ -9,6 +9,7 @@ #define LOG_NO_FILE_LINE_FUNCTION #include "log.h" +#include #include #include #include @@ -54,6 +55,12 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor + float yarn_ext_factor = NAN; // YaRN extrapolation mix factor + float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor + float yarn_beta_fast = 32.0f;// YaRN low correction dim + float yarn_beta_slow = 1.0f; // YaRN high correction dim + int32_t yarn_orig_ctx = 0; // YaRN original context length + int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // // sampling parameters struct llama_sampling_params sparams; diff --git a/convert-baichuan-hf-to-gguf.py b/convert-baichuan-hf-to-gguf.py index 5ee99be73..67ccbe99f 100755 --- a/convert-baichuan-hf-to-gguf.py +++ b/convert-baichuan-hf-to-gguf.py @@ -163,7 +163,8 @@ gguf_writer.add_layer_norm_rms_eps(hparams["rms_norm_eps"]) if "rope_scaling" in hparams and hparams["rope_scaling"] != None and "factor" in hparams["rope_scaling"]: if "type" in hparams["rope_scaling"]: if hparams["rope_scaling"]["type"] == "linear": - gguf_writer.add_rope_scale_linear(hparams["rope_scaling"]["factor"]) + gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.LINEAR) + gguf_writer.add_rope_scaling_factor(hparams["rope_scaling"]["factor"]) # TOKENIZATION diff --git a/convert.py b/convert.py index bfbfab283..9110f1580 100755 --- a/convert.py +++ b/convert.py @@ -151,8 +151,11 @@ class Params: n_head_kv: int f_norm_eps: float + rope_scaling_type: gguf.RopeScalingType | None = None f_rope_freq_base: float | None = None f_rope_scale: float | None = None + n_orig_ctx: int | None = None + rope_finetuned: bool | None = None ftype: GGMLFileType | None = None @@ -198,20 +201,20 @@ class Params: def loadHFTransformerJson(model: LazyModel, config_path: Path) -> Params: config = json.load(open(config_path)) - n_vocab = config["vocab_size"] - n_embd = config["hidden_size"] - n_layer = config["num_hidden_layers"] - n_ff = config["intermediate_size"] - n_head = config["num_attention_heads"] - n_head_kv = config["num_key_value_heads"] if "num_key_value_heads" in config else n_head - f_norm_eps = config["rms_norm_eps"] - f_rope_freq_base = config["rope_theta"] if "rope_theta" in config else None - + rope_scaling_type = f_rope_scale = n_orig_ctx = rope_finetuned = None rope_scaling = config.get("rope_scaling") - if isinstance(rope_scaling, dict) and rope_scaling.get("type") == "linear": - f_rope_scale = config["rope_scaling"].get("factor") - else: - f_rope_scale = None + + if rope_scaling is not None and (typ := rope_scaling.get("type")): + rope_factor = rope_scaling.get("factor") + f_rope_scale = rope_factor + if typ == "linear": + rope_scaling_type = gguf.RopeScalingType.LINEAR + elif typ == "yarn": + rope_scaling_type = gguf.RopeScalingType.YARN + n_orig_ctx = rope_scaling['original_max_position_embeddings'] + rope_finetuned = rope_scaling['finetuned'] + else: + raise NotImplementedError(f'Unknown rope scaling type: {typ}') if "max_sequence_length" in config: n_ctx = config["max_sequence_length"] @@ -222,16 +225,19 @@ class Params: "Suggestion: provide 'config.json' of the model in the same directory containing model files.") return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_layer = n_layer, - n_ctx = n_ctx, - n_ff = n_ff, - n_head = n_head, - n_head_kv = n_head_kv, - f_norm_eps = f_norm_eps, - f_rope_freq_base = f_rope_freq_base, - f_rope_scale = f_rope_scale, + n_vocab = config["vocab_size"], + n_embd = config["hidden_size"], + n_layer = config["num_hidden_layers"], + n_ctx = n_ctx, + n_ff = config["intermediate_size"], + n_head = (n_head := config["num_attention_heads"]), + n_head_kv = config.get("num_key_value_heads", n_head), + f_norm_eps = config["rms_norm_eps"], + f_rope_freq_base = config.get("rope_theta"), + rope_scaling_type = rope_scaling_type, + f_rope_scale = f_rope_scale, + n_orig_ctx = n_orig_ctx, + rope_finetuned = rope_finetuned, ) # LLaMA v2 70B params.json @@ -240,17 +246,8 @@ class Params: def loadOriginalParamsJson(model: LazyModel, config_path: Path) -> Params: config = json.load(open(config_path)) - n_vocab = config["vocab_size"] if "vocab_size" in config else -1 - n_embd = config["dim"] - n_layer = config["n_layers"] - n_ff = -1 - n_head = config["n_heads"] - n_head_kv = config["n_kv_heads"] if "n_kv_heads" in config else n_head - f_norm_eps = config["norm_eps"] - f_rope_freq_base = config["rope_theta"] if "rope_theta" in config else None - # hack to determine LLaMA v1 vs v2 vs CodeLlama - if f_rope_freq_base == 1000000: + if config.get("rope_theta") == 1000000: # CodeLlama n_ctx = 16384 elif config["norm_eps"] == 1e-05: @@ -260,22 +257,16 @@ class Params: # LLaMA v1 n_ctx = 2048 - if n_vocab == -1: - n_vocab = model["tok_embeddings.weight"].shape[0] - - if n_ff == -1: - n_ff = model["layers.0.feed_forward.w1.weight"].shape[0] - return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_layer = n_layer, + n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]), + n_embd = config["dim"], + n_layer = config["n_layers"], n_ctx = n_ctx, - n_ff = n_ff, - n_head = n_head, - n_head_kv = n_head_kv, - f_norm_eps = f_norm_eps, - f_rope_freq_base = f_rope_freq_base, + n_ff = model["layers.0.feed_forward.w1.weight"].shape[0], + n_head = (n_head := config["n_heads"]), + n_head_kv = config.get("n_kv_heads", n_head), + f_norm_eps = config["norm_eps"], + f_rope_freq_base = config.get("rope_theta"), ) @staticmethod @@ -831,8 +822,16 @@ class OutputFile: if params.f_rope_freq_base is not None: self.gguf.add_rope_freq_base(params.f_rope_freq_base) - if params.f_rope_scale is not None: - self.gguf.add_rope_scale_linear(params.f_rope_scale) + if params.rope_scaling_type: + assert params.f_rope_scale is not None + self.gguf.add_rope_scaling_type(params.rope_scaling_type) + self.gguf.add_rope_scaling_factor(params.f_rope_scale) + + if params.n_orig_ctx is not None: + self.gguf.add_rope_scaling_orig_ctx_len(params.n_orig_ctx) + + if params.rope_finetuned is not None: + self.gguf.add_rope_scaling_finetuned(params.rope_finetuned) if params.ftype is not None: self.gguf.add_file_type(params.ftype) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 60c7faa79..649a3b7c1 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -642,8 +642,9 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( const int rope_mode = 0; return ggml_rope_custom(ctx, - t, KQ_pos, n_rot, rope_mode, n_ctx, - rope_freq_base, rope_freq_scale); + t, KQ_pos, n_rot, rope_mode, n_ctx, 0, + rope_freq_base, rope_freq_scale, 0.0f, 0.0f, 0.0f, 0.0f + ); }; set_name(tokens_input, "tokens_input"); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 47ae0d558..84b04d5a0 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1755,12 +1755,18 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf("options:\n"); printf(" -h, --help show this help message and exit\n"); printf(" -v, --verbose verbose output (default: %s)\n", server_verbose ? "enabled" : "disabled"); - printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); + printf(" -t N, --threads N number of threads to use during computation (default: %d)\n", params.n_threads); printf(" -tb N, --threads-batch N number of threads to use during batch and prompt processing (default: same as --threads)\n"); - printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); + printf(" -c N, --ctx-size N size of the prompt context (default: %d)\n", params.n_ctx); + printf(" --rope-scaling {none,linear,yarn}\n"); + printf(" RoPE frequency scaling method, defaults to linear unless specified by the model\n"); printf(" --rope-freq-base N RoPE base frequency (default: loaded from model)\n"); - printf(" --rope-freq-scale N RoPE frequency scaling factor (default: loaded from model)\n"); - printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); + printf(" --rope-freq-scale N RoPE frequency scaling factor, expands context by a factor of 1/N\n"); + printf(" --yarn-ext-factor N YaRN: extrapolation mix factor (default: 1.0, 0.0 = full interpolation)\n"); + printf(" --yarn-attn-factor N YaRN: scale sqrt(t) or attention magnitude (default: 1.0)\n"); + printf(" --yarn-beta-slow N YaRN: high correction dim or alpha (default: %.1f)\n", params.yarn_beta_slow); + printf(" --yarn-beta-fast N YaRN: low correction dim or beta (default: %.1f)\n", params.yarn_beta_fast); + printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); if (llama_mlock_supported()) @@ -1881,6 +1887,19 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.n_ctx = std::stoi(argv[i]); } + else if (arg == "--rope-scaling") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + std::string value(argv[i]); + /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE; } + else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR; } + else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN; } + else { invalid_param = true; break; } + } else if (arg == "--rope-freq-base") { if (++i >= argc) @@ -1899,6 +1918,38 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.rope_freq_scale = std::stof(argv[i]); } + else if (arg == "--yarn-ext-factor") + { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_ext_factor = std::stof(argv[i]); + } + else if (arg == "--yarn-attn-factor") + { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_attn_factor = std::stof(argv[i]); + } + else if (arg == "--yarn-beta-fast") + { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_beta_fast = std::stof(argv[i]); + } + else if (arg == "--yarn-beta-slow") + { + if (++i >= argc) { + invalid_param = true; + break; + } + params.yarn_beta_slow = std::stof(argv[i]); + } else if (arg == "--memory-f32" || arg == "--memory_f32") { params.memory_f16 = false; diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index 1ce6cef29..2a257e632 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -349,9 +349,9 @@ static struct ggml_tensor * llama_build_train_graphs( // not capturing these, to silcence warnings const int rope_mode = 0; - return ggml_rope_custom(ctx, - t, KQ_pos, n_rot, rope_mode, n_ctx, - rope_freq_base, rope_freq_scale); + return ggml_rope_custom( + ctx, t, KQ_pos, n_rot, rope_mode, n_ctx, 0, rope_freq_base, rope_freq_scale, 0.0f, 1.0f, 0.0f, 0.0f + ); }; set_name(tokens_input, "tokens_input"); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 4e6e7cd94..12ee10e3d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -4493,11 +4493,41 @@ static __global__ void cpy_f32_f16(const char * cx, char * cdst, const int ne, cpy_1(cx + x_offset, cdst + dst_offset); } -// rope == RoPE == rotary positional embedding +static __device__ float rope_yarn_ramp(const float low, const float high, const int i0) { + const float y = (i0 / 2 - low) / max(0.001f, high - low); + return 1.0f - min(1.0f, max(0.0f, y)); +} +struct rope_corr_dims { + float v[4]; +}; + +// YaRN algorithm based on LlamaYaRNScaledRotaryEmbedding.py from https://github.com/jquesnelle/yarn +// MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. +static __device__ void rope_yarn( + float theta_extrap, float freq_scale, rope_corr_dims corr_dims, int64_t i0, float ext_factor, float mscale, + float * cos_theta, float * sin_theta +) { + // Get n-d rotational scaling corrected for extrapolation + float theta_interp = freq_scale * theta_extrap; + float theta = theta_interp; + if (ext_factor != 0.0f) { + float ramp_mix = rope_yarn_ramp(corr_dims.v[0], corr_dims.v[1], i0) * ext_factor; + theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; + + // Get n-d magnitude scaling corrected for interpolation + mscale *= 1.0f + 0.1f * logf(1.0f / freq_scale); + } + *cos_theta = cosf(theta) * mscale; + *sin_theta = sinf(theta) * mscale; +} + +// rope == RoPE == rotary positional embedding template -static __global__ void rope(const T * x, T * dst, const int ncols, const int32_t * pos, const float freq_scale, - const int p_delta_rows, const float theta_scale) { +static __global__ void rope( + const T * x, T * dst, int ncols, const int32_t * pos, float freq_scale, int p_delta_rows, float freq_base, + float ext_factor, float attn_factor, rope_corr_dims corr_dims +) { const int col = 2*(blockDim.y*blockIdx.y + threadIdx.y); if (col >= ncols) { @@ -4509,10 +4539,10 @@ static __global__ void rope(const T * x, T * dst, const int ncols, const int32_t const int i2 = row/p_delta_rows; const int p = has_pos ? pos[i2] : 0; - const float p0 = p*freq_scale; - const float theta = p0*powf(theta_scale, col/2); - const float sin_theta = sinf(theta); - const float cos_theta = cosf(theta); + const float theta_base = p*powf(freq_base, -col/ncols); + + float cos_theta, sin_theta; + rope_yarn(theta_base, freq_scale, corr_dims, col, ext_factor, attn_factor, &cos_theta, &sin_theta); const float x0 = x[i + 0]; const float x1 = x[i + 1]; @@ -4522,8 +4552,10 @@ static __global__ void rope(const T * x, T * dst, const int ncols, const int32_t } template -static __global__ void rope_neox(const T * x, T * dst, const int ncols, const int32_t * pos, const float freq_scale, - const int p_delta_rows, const float theta_scale) { +static __global__ void rope_neox( + const T * x, T * dst, int ncols, const int32_t * pos, float freq_scale, int p_delta_rows, float freq_base, + float ext_factor, float attn_factor, rope_corr_dims corr_dims +) { const int col = 2*(blockDim.y*blockIdx.y + threadIdx.y); if (col >= ncols) { @@ -4534,11 +4566,14 @@ static __global__ void rope_neox(const T * x, T * dst, const int ncols, const in const int i = row*ncols + col/2; const int i2 = row/p_delta_rows; + // simplified from `(row * ncols + col) * (-1 / ncols)` + const float cur_rot = -col/ncols - row; + const int p = has_pos ? pos[i2] : 0; - const float p0 = p*freq_scale; - const float theta = p0*powf(theta_scale, col/2); - const float sin_theta = sinf(theta); - const float cos_theta = cosf(theta); + const float theta_base = p*powf(freq_base, cur_rot); + + float cos_theta, sin_theta; + rope_yarn(theta_base, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, &cos_theta, &sin_theta); const float x0 = x[i + 0]; const float x1 = x[i + ncols/2]; @@ -4547,8 +4582,10 @@ static __global__ void rope_neox(const T * x, T * dst, const int ncols, const in dst[i + ncols/2] = x0*sin_theta + x1*cos_theta; } -static __global__ void rope_glm_f32(const float * x, float * dst, const int ncols, const int32_t * pos, const float freq_scale, - const int p_delta_rows, const float theta_scale, const int n_ctx) { +static __global__ void rope_glm_f32( + const float * x, float * dst, int ncols, const int32_t * pos, float freq_scale, int p_delta_rows, float freq_base, + int n_ctx +) { const int col = blockDim.x*blockIdx.x + threadIdx.x; const int half_n_dims = ncols/4; @@ -4560,7 +4597,7 @@ static __global__ void rope_glm_f32(const float * x, float * dst, const int ncol const int i = row*ncols + col; const int i2 = row/p_delta_rows; - const float col_theta_scale = powf(theta_scale, col); + const float col_theta_scale = powf(freq_base, -2.0f*col/ncols); // FIXME: this is likely wrong const int p = pos != nullptr ? pos[i2] : 0; @@ -5584,40 +5621,54 @@ static void clamp_f32_cuda(const float * x, float * dst, const float min, const } template -static void rope_cuda(const T * x, T * dst, const int ncols, const int nrows, const int32_t * pos, const float freq_scale, - const int p_delta_rows, const float theta_scale, cudaStream_t stream) { +static void rope_cuda( + const T * x, T * dst, int ncols, int nrows, const int32_t * pos, float freq_scale, int p_delta_rows, + float freq_base, float ext_factor, float attn_factor, rope_corr_dims corr_dims, cudaStream_t stream +) { GGML_ASSERT(ncols % 2 == 0); const dim3 block_dims(1, CUDA_ROPE_BLOCK_SIZE, 1); const int num_blocks_x = (ncols + 2*CUDA_ROPE_BLOCK_SIZE - 1) / (2*CUDA_ROPE_BLOCK_SIZE); const dim3 block_nums(nrows, num_blocks_x, 1); if (pos == nullptr) { - rope<<>>(x, dst, ncols, pos, freq_scale, p_delta_rows, theta_scale); + rope<<>>( + x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, ext_factor, attn_factor, corr_dims + ); } else { - rope<<>>(x, dst, ncols, pos, freq_scale, p_delta_rows, theta_scale); + rope<<>>( + x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, ext_factor, attn_factor, corr_dims + ); } } template -static void rope_neox_cuda(const T * x, T * dst, const int ncols, const int nrows, const int32_t * pos, const float freq_scale, - const int p_delta_rows, const float theta_scale, cudaStream_t stream) { +static void rope_neox_cuda( + const T * x, T * dst, int ncols, int nrows, const int32_t * pos, float freq_scale, int p_delta_rows, + float freq_base, float ext_factor, float attn_factor, rope_corr_dims corr_dims, cudaStream_t stream +) { GGML_ASSERT(ncols % 2 == 0); const dim3 block_dims(1, CUDA_ROPE_BLOCK_SIZE, 1); const int num_blocks_x = (ncols + 2*CUDA_ROPE_BLOCK_SIZE - 1) / (2*CUDA_ROPE_BLOCK_SIZE); const dim3 block_nums(nrows, num_blocks_x, 1); if (pos == nullptr) { - rope_neox<<>>(x, dst, ncols, pos, freq_scale, p_delta_rows, theta_scale); + rope_neox<<>>( + x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, ext_factor, attn_factor, corr_dims + ); } else { - rope_neox<<>>(x, dst, ncols, pos, freq_scale, p_delta_rows, theta_scale); + rope_neox<<>>( + x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, ext_factor, attn_factor, corr_dims + ); } } -static void rope_glm_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, const int32_t * pos, const float freq_scale, - const int p_delta_rows, const float theta_scale, const int n_ctx, cudaStream_t stream) { +static void rope_glm_f32_cuda( + const float * x, float * dst, int ncols, int nrows, const int32_t * pos, float freq_scale, int p_delta_rows, + float freq_base, int n_ctx, cudaStream_t stream +) { GGML_ASSERT(ncols % 4 == 0); const dim3 block_dims(CUDA_ROPE_BLOCK_SIZE/4, 1, 1); const int num_blocks_x = (ncols + CUDA_ROPE_BLOCK_SIZE - 1) / CUDA_ROPE_BLOCK_SIZE; const dim3 block_nums(num_blocks_x, nrows, 1); - rope_glm_f32<<>>(x, dst, ncols, pos, freq_scale, p_delta_rows, theta_scale, n_ctx); + rope_glm_f32<<>>(x, dst, ncols, pos, freq_scale, p_delta_rows, freq_base, n_ctx); } static void alibi_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, @@ -6477,17 +6528,20 @@ inline void ggml_cuda_op_rope( const int64_t ne2 = dst->ne[2]; const int64_t nrows = ggml_nrows(src0); - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - const int n_ctx = ((int32_t *) dst->op_params)[3]; + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + const int n_ctx = ((int32_t *) dst->op_params)[3]; + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; + // RoPE alteration for extended context - - float freq_base, freq_scale; - memcpy(&freq_base, (int32_t *) dst->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 5, sizeof(float)); - - const float theta_scale = powf(freq_base, -2.0f/n_dims); + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); const int32_t * pos = nullptr; if ((mode & 1) == 0) { @@ -6499,24 +6553,39 @@ inline void ggml_cuda_op_rope( const bool is_neox = mode & 2; const bool is_glm = mode & 4; + rope_corr_dims corr_dims; + ggml_rope_yarn_corr_dims(n_dims, n_orig_ctx, freq_base, beta_fast, beta_slow, corr_dims.v); + // compute if (is_glm) { GGML_ASSERT(false); - rope_glm_f32_cuda(src0_dd, dst_dd, ne00, nrows, pos, freq_scale, ne01, theta_scale, n_ctx, main_stream); + rope_glm_f32_cuda(src0_dd, dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, n_ctx, main_stream); } else if (is_neox) { GGML_ASSERT(ne00 == n_dims && "ne00 != n_dims is not implemented for CUDA yet"); if (src0->type == GGML_TYPE_F32) { - rope_neox_cuda((const float *)src0_dd, (float *)dst_dd, ne00, nrows, pos, freq_scale, ne01, theta_scale, main_stream); + rope_neox_cuda( + (const float *)src0_dd, (float *)dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, ext_factor, + attn_factor, corr_dims, main_stream + ); } else if (src0->type == GGML_TYPE_F16) { - rope_neox_cuda((const half *)src0_dd, (half *)dst_dd, ne00, nrows, pos, freq_scale, ne01, theta_scale, main_stream); + rope_neox_cuda( + (const half *)src0_dd, (half *)dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, ext_factor, + attn_factor, corr_dims, main_stream + ); } else { GGML_ASSERT(false); } } else { if (src0->type == GGML_TYPE_F32) { - rope_cuda((const float *)src0_dd, (float *)dst_dd, ne00, nrows, pos, freq_scale, ne01, theta_scale, main_stream); + rope_cuda( + (const float *)src0_dd, (float *)dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, ext_factor, + attn_factor, corr_dims, main_stream + ); } else if (src0->type == GGML_TYPE_F16) { - rope_cuda((const half *)src0_dd, (half *)dst_dd, ne00, nrows, pos, freq_scale, ne01, theta_scale, main_stream); + rope_cuda( + (const half *)src0_dd, (half *)dst_dd, ne00, nrows, pos, freq_scale, ne01, freq_base, ext_factor, + attn_factor, corr_dims, main_stream + ); } else { GGML_ASSERT(false); } diff --git a/ggml-metal.m b/ggml-metal.m index 1f0341507..611d5e173 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1400,14 +1400,18 @@ void ggml_metal_graph_compute( const int nth = MIN(1024, ne00); - const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; + const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + const int n_orig_ctx = ((int32_t *) dst->op_params)[3]; - float freq_base; - float freq_scale; - memcpy(&freq_base, (int32_t *) dst->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 5, sizeof(float)); + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); switch (src0->type) { case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_rope_f32]; break; @@ -1439,6 +1443,10 @@ void ggml_metal_graph_compute( [encoder setBytes:&mode length:sizeof( int) atIndex:21]; [encoder setBytes:&freq_base length:sizeof(float) atIndex:22]; [encoder setBytes:&freq_scale length:sizeof(float) atIndex:23]; + [encoder setBytes:&ext_factor length:sizeof(float) atIndex:24]; + [encoder setBytes:&attn_factor length:sizeof(float) atIndex:25]; + [encoder setBytes:&beta_fast length:sizeof(float) atIndex:26]; + [encoder setBytes:&beta_slow length:sizeof(float) atIndex:27]; [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; diff --git a/ggml-metal.metal b/ggml-metal.metal index f3152778a..471d7d390 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1061,6 +1061,45 @@ kernel void kernel_alibi_f32( } } +static float rope_yarn_ramp(const float low, const float high, const int i0) { + const float y = (i0 / 2 - low) / max(0.001f, high - low); + return 1.0f - min(1.0f, max(0.0f, y)); +} + +// YaRN algorithm based on LlamaYaRNScaledRotaryEmbedding.py from https://github.com/jquesnelle/yarn +// MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. +static void rope_yarn( + float theta_extrap, float freq_scale, float corr_dims[2], int64_t i0, float ext_factor, float mscale, + float * cos_theta, float * sin_theta +) { + // Get n-d rotational scaling corrected for extrapolation + float theta_interp = freq_scale * theta_extrap; + float theta = theta_interp; + if (ext_factor != 0.0f) { + ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; + theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; + + // Get n-d magnitude scaling corrected for interpolation + mscale *= 1.0f + 0.1f * logf(1.0f / freq_scale); + } + *cos_theta = cosf(theta) * mscale; + *sin_theta = sinf(theta) * mscale; +} + +// Apparently solving `n_rot = 2pi * x * base^((2 * max_pos_emb) / n_dims)` for x, we get +// `corr_fac(n_rot) = n_dims * log(max_pos_emb / (n_rot * 2pi)) / (2 * log(base))` +static float rope_yarn_corr_factor(int n_dims, int n_orig_ctx, float n_rot, float base) { + return n_dims * log(n_orig_ctx / (n_rot * 2 * M_PI_F)) / (2 * log(base)); +} + +static void rope_yarn_corr_dims( + int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] +) { + // start and end correction dims + dims[0] = max(0.0f, floor(rope_yarn_corr_factor(n_dims, n_orig_ctx, beta_fast, freq_base))); + dims[1] = min(n_dims - 1.0f, ceil(rope_yarn_corr_factor(n_dims, n_orig_ctx, beta_slow, freq_base))); +} + typedef void (rope_t)( device const void * src0, device const int32_t * src1, @@ -1116,6 +1155,10 @@ kernel void kernel_rope( constant int & mode, constant float & freq_base, constant float & freq_scale, + constant float & ext_factor, + constant float & attn_factor, + constant float & beta_fast, + constant float & beta_slow, uint tiitg[[thread_index_in_threadgroup]], uint3 tptg[[threads_per_threadgroup]], uint3 tgpig[[threadgroup_position_in_grid]]) { @@ -1125,19 +1168,22 @@ kernel void kernel_rope( const bool is_neox = mode & 2; + float corr_dims[2]; + rope_yarn_corr_dims(n_dims, n_orig_ctx, freq_base, beta_fast, beta_slow, corr_dims); + device const int32_t * pos = src1; const int64_t p = pos[i2]; - const float theta_0 = freq_scale * (float)p; + const float theta_0 = (float)p; const float inv_ndims = -1.f/n_dims; if (!is_neox) { for (int64_t i0 = 2*tiitg; i0 < ne0; i0 += 2*tptg.x) { const float theta = theta_0 * pow(freq_base, inv_ndims*i0); - const float cos_theta = cos(theta); - const float sin_theta = sin(theta); + float cos_theta, sin_theta; + rope_yarn(theta, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta); device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -1152,9 +1198,12 @@ kernel void kernel_rope( for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { for (int64_t ic = 2*tiitg; ic < n_dims; ic += 2*tptg.x) { - const float theta = theta_0 * pow(freq_base, inv_ndims*ic - ib); - const float cos_theta = cos(theta); - const float sin_theta = sin(theta); + // simplified from `(ib * n_dims + ic) * inv_ndims` + const float cur_rot = inv_ndims*ic - ib; + + const float theta = theta_0 * pow(freq_base, cur_rot); + float cos_theta, sin_theta; + rope_yarn(theta, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, &cos_theta, &sin_theta); const int64_t i0 = ib*n_dims + ic/2; diff --git a/ggml.c b/ggml.c index 80d682255..2c7fe476b 100644 --- a/ggml.c +++ b/ggml.c @@ -1,4 +1,5 @@ #define _CRT_SECURE_NO_DEPRECATE // Disables ridiculous "unsafe" warnigns on Windows +#define _USE_MATH_DEFINES // For M_PI on MSVC #include "ggml-impl.h" #include "ggml-quants.h" @@ -4845,8 +4846,13 @@ static struct ggml_tensor * ggml_rope_impl( int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow, float xpos_base, bool xpos_down, bool inplace) { @@ -4862,11 +4868,15 @@ static struct ggml_tensor * ggml_rope_impl( struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); - int32_t params[8] = { /*n_past*/ 0, n_dims, mode, n_ctx }; - memcpy(params + 4, &freq_base, sizeof(float)); - memcpy(params + 5, &freq_scale, sizeof(float)); - memcpy(params + 6, &xpos_base, sizeof(float)); - memcpy(params + 7, &xpos_down, sizeof(bool)); + int32_t params[13] = { /*n_past*/ 0, n_dims, mode, n_ctx, n_orig_ctx }; + memcpy(params + 5, &freq_base, sizeof(float)); + memcpy(params + 6, &freq_scale, sizeof(float)); + memcpy(params + 7, &ext_factor, sizeof(float)); + memcpy(params + 8, &attn_factor, sizeof(float)); + memcpy(params + 9, &beta_fast, sizeof(float)); + memcpy(params + 10, &beta_slow, sizeof(float)); + memcpy(params + 11, &xpos_base, sizeof(float)); + memcpy(params + 12, &xpos_down, sizeof(bool)); ggml_set_op_params(result, params, sizeof(params)); result->op = GGML_OP_ROPE; @@ -4884,7 +4894,9 @@ struct ggml_tensor * ggml_rope( int n_dims, int mode, int n_ctx) { - return ggml_rope_impl(ctx, a, b, n_dims, mode, n_ctx, 10000.0f, 1.0f, 0.0f, false, false); + return ggml_rope_impl( + ctx, a, b, n_dims, mode, n_ctx, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, false, false + ); } struct ggml_tensor * ggml_rope_inplace( @@ -4894,7 +4906,9 @@ struct ggml_tensor * ggml_rope_inplace( int n_dims, int mode, int n_ctx) { - return ggml_rope_impl(ctx, a, b, n_dims, mode, n_ctx, 10000.0f, 1.0f, 0.0f, false, true); + return ggml_rope_impl( + ctx, a, b, n_dims, mode, n_ctx, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, false, true + ); } struct ggml_tensor * ggml_rope_custom( @@ -4904,9 +4918,17 @@ struct ggml_tensor * ggml_rope_custom( int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, - float freq_scale) { - return ggml_rope_impl(ctx, a, b, n_dims, mode, n_ctx, freq_base, freq_scale, 0.0f, false, false); + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, n_dims, mode, n_ctx, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, 0.0f, false, false + ); } struct ggml_tensor * ggml_rope_custom_inplace( @@ -4916,9 +4938,17 @@ struct ggml_tensor * ggml_rope_custom_inplace( int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, - float freq_scale) { - return ggml_rope_impl(ctx, a, b, n_dims, mode, n_ctx, freq_base, freq_scale, 0.0f, false, true); + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow) { + return ggml_rope_impl( + ctx, a, b, n_dims, mode, n_ctx, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow, 0.0f, false, true + ); } struct ggml_tensor * ggml_rope_xpos_inplace( @@ -4928,7 +4958,7 @@ struct ggml_tensor * ggml_rope_xpos_inplace( int n_dims, float base, bool down) { - return ggml_rope_impl(ctx, a, b, n_dims, 0, 0, 10000.0f, 1.0f, base, down, true); + return ggml_rope_impl(ctx, a, b, n_dims, 0, 0, 0, 10000.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, base, down, true); } // ggml_rope_back @@ -10901,6 +10931,45 @@ static void ggml_compute_forward_clamp( // ggml_compute_forward_rope +static float rope_yarn_ramp(const float low, const float high, const int i0) { + const float y = (i0 / 2 - low) / MAX(0.001f, high - low); + return 1 - MIN(1, MAX(0, y)); +} + +// YaRN algorithm based on LlamaYaRNScaledRotaryEmbedding.py from https://github.com/jquesnelle/yarn +// MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. +static void rope_yarn( + float theta_extrap, float freq_scale, float corr_dims[2], int64_t i0, float ext_factor, float mscale, + float * cos_theta, float * sin_theta +) { + // Get n-d rotational scaling corrected for extrapolation + float theta_interp = freq_scale * theta_extrap; + float theta = theta_interp; + if (ext_factor != 0.0f) { + float ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; + theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; + + // Get n-d magnitude scaling corrected for interpolation + mscale *= 1.0f + 0.1f * logf(1.0f / freq_scale); + } + *cos_theta = cosf(theta) * mscale; + *sin_theta = sinf(theta) * mscale; +} + +// Apparently solving `n_rot = 2pi * x * base^((2 * max_pos_emb) / n_dims)` for x, we get +// `corr_dim(n_rot) = n_dims * log(max_pos_emb / (n_rot * 2pi)) / (2 * log(base))` +static float ggml_rope_yarn_corr_dim(int n_dims, int n_orig_ctx, float n_rot, float base) { + return n_dims * logf(n_orig_ctx / (n_rot * 2 * (float)M_PI)) / (2 * logf(base)); +} + +void ggml_rope_yarn_corr_dims( + int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] +) { + // start and end correction dims + dims[0] = MAX(0, floorf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_fast, freq_base))); + dims[1] = MIN(n_dims - 1, ceilf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_slow, freq_base))); +} + static void ggml_compute_forward_rope_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, @@ -10910,21 +10979,26 @@ static void ggml_compute_forward_rope_f32( return; } - float freq_base; - float freq_scale; + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; // these two only relevant for xPos RoPE: float xpos_base; bool xpos_down; - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - const int n_ctx = ((int32_t *) dst->op_params)[3]; - memcpy(&freq_base, (int32_t *) dst->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 5, sizeof(float)); - memcpy(&xpos_base, (int32_t *) dst->op_params + 6, sizeof(float)); - memcpy(&xpos_down, (int32_t *) dst->op_params + 7, sizeof(bool)); + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + const int n_ctx = ((int32_t *) dst->op_params)[3]; + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; + + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + memcpy(&xpos_base, (int32_t *) dst->op_params + 11, sizeof(float)); + memcpy(&xpos_down, (int32_t *) dst->op_params + 12, sizeof(bool)); GGML_TENSOR_UNARY_OP_LOCALS @@ -10952,6 +11026,9 @@ static void ggml_compute_forward_rope_f32( int ir = 0; const float theta_scale = powf(freq_base, -2.0f/n_dims); + const float inv_ndims = -1.f/n_dims; + float corr_dims[2]; + ggml_rope_yarn_corr_dims(n_dims, n_orig_ctx, freq_base, beta_fast, beta_slow, corr_dims); const bool is_neox = mode & 2; const bool is_glm = mode & 4; @@ -10965,18 +11042,18 @@ static void ggml_compute_forward_rope_f32( if (ir++ < ir0) continue; if (ir > ir1) break; - float theta = freq_scale * (float)p; + float theta_base = (float)p; if (is_glm) { - theta = MIN(p, n_ctx - 2); + theta_base = MIN(p, n_ctx - 2); float block_theta = MAX(p - (n_ctx - 2), 0); for (int64_t i0 = 0; i0 < ne0 / 4; i0++) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + const float cos_theta = cosf(theta_base); + const float sin_theta = sinf(theta_base); const float cos_block_theta = cosf(block_theta); const float sin_block_theta = sinf(block_theta); - theta *= theta_scale; + theta_base *= theta_scale; block_theta *= theta_scale; const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); @@ -10994,13 +11071,16 @@ static void ggml_compute_forward_rope_f32( } } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + float cos_theta, sin_theta; + rope_yarn( + theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta + ); + // zeta scaling for xPos only: float zeta = xpos_base != 0.0f ? powf((i0 + 0.4f * ne0) / (1.4f * ne0), p / xpos_base) : 1.0f; if (xpos_down) zeta = 1.0f / zeta; - theta *= theta_scale; + theta_base *= theta_scale; const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -11014,12 +11094,19 @@ static void ggml_compute_forward_rope_f32( } else { // TODO: this might be wrong for ne0 != n_dims - need double check // ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py#LL251C1-L294C28 + theta_base *= freq_scale; for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { for (int64_t ic = 0; ic < n_dims; ic += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + // simplified from `(ib * n_dims + ic) * inv_ndims` + float cur_rot = inv_ndims * ic - ib; - theta *= theta_scale; + float cos_theta, sin_theta; + rope_yarn( + theta_base, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, + &cos_theta, &sin_theta + ); + + theta_base *= theta_scale; const int64_t i0 = ib*n_dims + ic/2; @@ -11048,15 +11135,19 @@ static void ggml_compute_forward_rope_f16( return; } - float freq_base; - float freq_scale; + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - const int n_ctx = ((int32_t *) dst->op_params)[3]; - memcpy(&freq_base, (int32_t *) dst->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 5, sizeof(float)); + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + const int n_ctx = ((int32_t *) dst->op_params)[3]; + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); GGML_TENSOR_UNARY_OP_LOCALS @@ -11084,6 +11175,9 @@ static void ggml_compute_forward_rope_f16( int ir = 0; const float theta_scale = powf(freq_base, -2.0f/n_dims); + const float inv_ndims = -1.f/n_dims; + float corr_dims[2]; + ggml_rope_yarn_corr_dims(n_dims, n_orig_ctx, freq_base, beta_fast, beta_slow, corr_dims); const bool is_neox = mode & 2; const bool is_glm = mode & 4; @@ -11097,18 +11191,18 @@ static void ggml_compute_forward_rope_f16( if (ir++ < ir0) continue; if (ir > ir1) break; - float theta = freq_scale * (float)p; + float theta_base = (float)p; if (is_glm) { - theta = MIN(p, n_ctx - 2); + theta_base = MIN(p, n_ctx - 2); float block_theta = MAX(p - (n_ctx - 2), 0); for (int64_t i0 = 0; i0 < ne0 / 4; i0++) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + const float cos_theta = cosf(theta_base); + const float sin_theta = sinf(theta_base); const float cos_block_theta = cosf(block_theta); const float sin_block_theta = sinf(block_theta); - theta *= theta_scale; + theta_base *= theta_scale; block_theta *= theta_scale; const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); @@ -11126,10 +11220,12 @@ static void ggml_compute_forward_rope_f16( } } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + float cos_theta, sin_theta; + rope_yarn( + theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta + ); - theta *= theta_scale; + theta_base *= theta_scale; const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -11143,12 +11239,19 @@ static void ggml_compute_forward_rope_f16( } else { // TODO: this might be wrong for ne0 != n_dims - need double check // ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py#LL251C1-L294C28 + theta_base *= freq_scale; for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { for (int64_t ic = 0; ic < n_dims; ic += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + // simplified from `(ib * n_dims + ic) * inv_ndims` + float cur_rot = inv_ndims * ic - ib; - theta *= theta_scale; + float cos_theta, sin_theta; + rope_yarn( + theta_base, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, + &cos_theta, &sin_theta + ); + + theta_base *= theta_scale; const int64_t i0 = ib*n_dims + ic/2; @@ -11256,17 +11359,18 @@ static void ggml_compute_forward_rope_back_f32( if (ir++ < ir0) continue; if (ir > ir1) break; - float theta = freq_scale * (float)p; + float theta_base = freq_scale * (float)p; if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + const float cos_theta = cosf(theta_base); + const float sin_theta = sinf(theta_base); + // zeta scaling for xPos only: float zeta = xpos_base != 0.0f ? powf((i0 + 0.4f * ne0) / (1.4f * ne0), p / xpos_base) : 1.0f; if (xpos_down) zeta = 1.0f / zeta; - theta *= theta_scale; + theta_base *= theta_scale; const float * const dy = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); float * dx = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -11280,10 +11384,10 @@ static void ggml_compute_forward_rope_back_f32( } else { for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { for (int64_t ic = 0; ic < n_dims; ic += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + const float cos_theta = cosf(theta_base); + const float sin_theta = sinf(theta_base); - theta *= theta_scale; + theta_base *= theta_scale; const int64_t i0 = ib*n_dims + ic/2; @@ -11356,14 +11460,14 @@ static void ggml_compute_forward_rope_back_f16( if (ir++ < ir0) continue; if (ir > ir1) break; - float theta = (float)p; + float theta_base = (float)p; if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + const float cos_theta = cosf(theta_base); + const float sin_theta = sinf(theta_base); - theta *= theta_scale; + theta_base *= theta_scale; const ggml_fp16_t * const dy = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); ggml_fp16_t * dx = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -11377,10 +11481,10 @@ static void ggml_compute_forward_rope_back_f16( } else { for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { for (int64_t ic = 0; ic < n_dims; ic += 2) { - const float cos_theta = cosf(theta); - const float sin_theta = sinf(theta); + const float cos_theta = cosf(theta_base); + const float sin_theta = sinf(theta_base); - theta *= theta_scale; + theta_base *= theta_scale; const int64_t i0 = ib*n_dims + ic/2; @@ -15505,9 +15609,14 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor src1, n_dims, mode, + 0, n_ctx, freq_base, freq_scale, + 0.0f, + 1.0f, + 0.0f, + 0.0f, xpos_base, xpos_down, false), diff --git a/ggml.h b/ggml.h index 9d16c5a72..70eb25a6b 100644 --- a/ggml.h +++ b/ggml.h @@ -219,7 +219,7 @@ #define GGML_MAX_CONTEXTS 64 #define GGML_MAX_SRC 6 #define GGML_MAX_NAME 64 -#define GGML_MAX_OP_PARAMS 32 +#define GGML_MAX_OP_PARAMS 64 #define GGML_DEFAULT_N_THREADS 4 #if UINTPTR_MAX == 0xFFFFFFFF @@ -1326,8 +1326,13 @@ extern "C" { int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, - float freq_scale); + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow); // in-place, returns view(a) GGML_API struct ggml_tensor * ggml_rope_custom_inplace( @@ -1337,8 +1342,17 @@ extern "C" { int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, - float freq_scale); + float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow); + + // compute correction dims for YaRN RoPE scaling + void ggml_rope_yarn_corr_dims( + int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2]); // xPos RoPE, in-place, returns view(a) GGML_API struct ggml_tensor * ggml_rope_xpos_inplace( diff --git a/gguf-py/gguf/gguf.py b/gguf-py/gguf/gguf.py index 6b7d65429..727b4e554 100644 --- a/gguf-py/gguf/gguf.py +++ b/gguf-py/gguf/gguf.py @@ -7,7 +7,7 @@ import shutil import struct import sys import tempfile -from enum import IntEnum, auto +from enum import Enum, IntEnum, auto from io import BufferedWriter from pathlib import Path from typing import IO, Any, BinaryIO, Callable, Sequence @@ -53,9 +53,12 @@ KEY_ATTENTION_LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" KEY_ATTENTION_LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" # RoPE -KEY_ROPE_DIMENSION_COUNT = "{arch}.rope.dimension_count" -KEY_ROPE_FREQ_BASE = "{arch}.rope.freq_base" -KEY_ROPE_SCALE_LINEAR = "{arch}.rope.scale_linear" +KEY_ROPE_DIMENSION_COUNT = "{arch}.rope.dimension_count" +KEY_ROPE_FREQ_BASE = "{arch}.rope.freq_base" +KEY_ROPE_SCALING_TYPE = "{arch}.rope.scaling.type" +KEY_ROPE_SCALING_FACTOR = "{arch}.rope.scaling.factor" +KEY_ROPE_SCALING_ORIG_CTX_LEN = "{arch}.rope.scaling.original_context_length" +KEY_ROPE_SCALING_FINETUNED = "{arch}.rope.scaling.finetuned" # tokenization KEY_TOKENIZER_MODEL = "tokenizer.ggml.model" @@ -577,6 +580,11 @@ class TokenType(IntEnum): UNUSED = 5 BYTE = 6 +class RopeScalingType(Enum): + NONE = 'none' + LINEAR = 'linear' + YARN = 'yarn' + # # implementation # @@ -948,8 +956,17 @@ class GGUFWriter: def add_rope_freq_base(self, value: float): self.add_float32(KEY_ROPE_FREQ_BASE.format(arch=self.arch), value) - def add_rope_scale_linear(self, value: float): - self.add_float32(KEY_ROPE_SCALE_LINEAR.format(arch=self.arch), value) + def add_rope_scaling_type(self, value: RopeScalingType): + self.add_string(KEY_ROPE_SCALING_TYPE.format(arch=self.arch), value.value) + + def add_rope_scaling_factor(self, value: float): + self.add_float32(KEY_ROPE_SCALING_FACTOR.format(arch=self.arch), value) + + def add_rope_scaling_orig_ctx_len(self, value: int): + self.add_uint32(KEY_ROPE_SCALING_ORIG_CTX_LEN.format(arch=self.arch), value) + + def add_rope_scaling_finetuned(self, value: bool): + self.add_bool(KEY_ROPE_SCALING_FINETUNED.format(arch=self.arch), value) def add_tokenizer_model(self, model: str): self.add_string(KEY_TOKENIZER_MODEL, model) diff --git a/llama.cpp b/llama.cpp index 1c6d482f8..685882c20 100644 --- a/llama.cpp +++ b/llama.cpp @@ -54,6 +54,7 @@ #include #include #include +#include #include #include #include @@ -235,6 +236,10 @@ enum llm_kv { LLM_KV_ROPE_DIMENSION_COUNT, LLM_KV_ROPE_FREQ_BASE, LLM_KV_ROPE_SCALE_LINEAR, + LLM_KV_ROPE_SCALING_TYPE, + LLM_KV_ROPE_SCALING_FACTOR, + LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, + LLM_KV_ROPE_SCALING_FINETUNED, LLM_KV_TOKENIZER_MODEL, LLM_KV_TOKENIZER_LIST, @@ -276,9 +281,13 @@ static std::map LLM_KV_NAMES = { { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" }, { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" }, - { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" }, - { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" }, - { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" }, + { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" }, + { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" }, + { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" }, + { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" }, + { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" }, + { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" }, + { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" }, { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" }, { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" }, @@ -552,6 +561,22 @@ do { \ } \ } while (0) +static std::map LLAMA_ROPE_SCALING_TYPES = { + { LLAMA_ROPE_SCALING_NONE, "none" }, + { LLAMA_ROPE_SCALING_LINEAR, "linear" }, + { LLAMA_ROPE_SCALING_YARN, "yarn" }, +}; + +static int8_t llama_rope_scaling_type_from_string(const std::string & name) { + for (const auto & kv : LLAMA_ROPE_SCALING_TYPES) { + if (kv.second == name) { + return kv.first; + } + } + + return LLAMA_ROPE_SCALING_UNSPECIFIED; +} + // // ggml helpers // @@ -1035,8 +1060,11 @@ struct llama_hparams { float f_norm_eps; float f_norm_rms_eps; - float rope_freq_base_train; - float rope_freq_scale_train; + float rope_freq_base_train; + float rope_freq_scale_train; + uint32_t n_yarn_orig_ctx; + int8_t rope_scaling_type_train : 3; + bool rope_finetuned : 1; float f_clamp_kqv; float f_max_alibi_bias; @@ -1051,6 +1079,8 @@ struct llama_hparams { if (this->n_layer != other.n_layer) return true; if (this->n_rot != other.n_rot) return true; if (this->n_ff != other.n_ff) return true; + if (this->rope_finetuned != other.rope_finetuned) return true; + if (this->n_yarn_orig_ctx != other.n_yarn_orig_ctx) return true; const float EPSILON = 1e-9; @@ -1081,8 +1111,16 @@ struct llama_cparams { uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing - float rope_freq_base; - float rope_freq_scale; + float rope_freq_base; + float rope_freq_scale; + + uint32_t n_yarn_orig_ctx; + // These hyperparameters are not exposed in GGUF, because all + // existing YaRN models use the same values for them. + float yarn_ext_factor; + float yarn_attn_factor; + float yarn_beta_fast; + float yarn_beta_slow; bool mul_mat_q; }; @@ -2014,14 +2052,30 @@ static void llm_load_hparams( hparams.n_head_kv = hparams.n_head; GGUF_GET_KEY(ctx, hparams.n_head_kv, gguf_get_val_u32, GGUF_TYPE_UINT32, false, kv(LLM_KV_ATTENTION_HEAD_COUNT_KV)); + hparams.rope_finetuned = false; + GGUF_GET_KEY(ctx, hparams.rope_finetuned, gguf_get_val_bool, GGUF_TYPE_BOOL, false, + kv(LLM_KV_ROPE_SCALING_FINETUNED)); + + hparams.n_yarn_orig_ctx = hparams.n_ctx_train; + GGUF_GET_KEY(ctx, hparams.n_yarn_orig_ctx, gguf_get_val_u32, GGUF_TYPE_UINT32, false, + kv(LLM_KV_ROPE_SCALING_ORIG_CTX_LEN)); + // rope_freq_base (optional) hparams.rope_freq_base_train = 10000.0f; GGUF_GET_KEY(ctx, hparams.rope_freq_base_train, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_FREQ_BASE)); + std::string rope_scaling("linear"); + GGUF_GET_KEY(ctx, rope_scaling, gguf_get_val_str, GGUF_TYPE_STRING, false, kv(LLM_KV_ROPE_SCALING_TYPE)); + hparams.rope_scaling_type_train = llama_rope_scaling_type_from_string(rope_scaling); + GGML_ASSERT(hparams.rope_scaling_type_train != LLAMA_ROPE_SCALING_UNSPECIFIED); + // rope_freq_scale (inverse of the kv) is optional - float ropescale = 1.0f; - GGUF_GET_KEY(ctx, ropescale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); - hparams.rope_freq_scale_train = 1.0f/ropescale; + float ropescale = 0.0f; + GGUF_GET_KEY(ctx, ropescale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALING_FACTOR)); + if (ropescale == 0.0f) { // try the old key name + GGUF_GET_KEY(ctx, ropescale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); + } + hparams.rope_freq_scale_train = ropescale == 0.0f ? 1.0f : 1.0f/ropescale; // sanity check for n_rot (optional) { @@ -2371,6 +2425,8 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { const auto & hparams = model.hparams; const auto & vocab = model.vocab; + const auto rope_scaling_type = LLAMA_ROPE_SCALING_TYPES.at(hparams.rope_scaling_type_train); + // hparams LLAMA_LOG_INFO("%s: format = %s\n", __func__, llama_file_version_name(ml.fver)); LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch).c_str()); @@ -2389,8 +2445,11 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: f_clamp_kqv = %.1e\n", __func__, hparams.f_clamp_kqv); LLAMA_LOG_INFO("%s: f_max_alibi_bias = %.1e\n", __func__, hparams.f_max_alibi_bias); LLAMA_LOG_INFO("%s: n_ff = %u\n", __func__, hparams.n_ff); + LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type.c_str()); LLAMA_LOG_INFO("%s: freq_base_train = %.1f\n", __func__, hparams.rope_freq_base_train); LLAMA_LOG_INFO("%s: freq_scale_train = %g\n", __func__, hparams.rope_freq_scale_train); + LLAMA_LOG_INFO("%s: n_yarn_orig_ctx = %u\n", __func__, hparams.n_yarn_orig_ctx); + LLAMA_LOG_INFO("%s: rope_finetuned = %s\n", __func__, hparams.rope_finetuned ? "yes" : "unknown"); LLAMA_LOG_INFO("%s: model type = %s\n", __func__, llama_model_type_name(model.type)); LLAMA_LOG_INFO("%s: model ftype = %s\n", __func__, llama_model_ftype_name(model.ftype).c_str()); LLAMA_LOG_INFO("%s: model params = %.2f B\n", __func__, ml.n_elements*1e-9); @@ -3047,21 +3106,11 @@ static void llm_load_tensors( model.t_load_us = ggml_time_us() - model.t_start_us; } -static bool llama_model_load( - const std::string & fname, - llama_model & model, - int n_gpu_layers, - int main_gpu, - const float * tensor_split, - bool use_mmap, - bool use_mlock, - bool vocab_only, - llama_progress_callback progress_callback, - void *progress_callback_user_data) { +static bool llama_model_load(const std::string & fname, llama_model & model, const llama_model_params & params) { try { - llama_model_loader ml(fname, use_mmap); + llama_model_loader ml(fname, params.use_mmap); - model.hparams.vocab_only = vocab_only; + model.hparams.vocab_only = params.vocab_only; llm_load_arch (ml, model); llm_load_hparams(ml, model); @@ -3073,15 +3122,15 @@ static bool llama_model_load( throw std::runtime_error("vocab size mismatch"); } - if (vocab_only) { + if (params.vocab_only) { LLAMA_LOG_INFO("%s: vocab only - skipping tensors\n", __func__); return true; } llm_load_tensors( - ml, model, n_gpu_layers, - main_gpu, tensor_split, - use_mlock, progress_callback, progress_callback_user_data); + ml, model, params.n_gpu_layers, params.main_gpu, params.tensor_split, params.use_mlock, + params.progress_callback, params.progress_callback_user_data + ); } catch (const std::exception & err) { LLAMA_LOG_ERROR("error loading model: %s\n", err.what()); return false; @@ -3150,6 +3199,7 @@ static struct ggml_tensor * llm_build_inp_embd( static void llm_build_k_shift( struct ggml_context * ctx, const llama_hparams & hparams, + const llama_cparams & cparams, const llama_kv_cache & kv, struct ggml_cgraph * graph, llm_rope_type type, @@ -3162,6 +3212,11 @@ static void llm_build_k_shift( const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_gqa = hparams.n_embd_gqa(); const int64_t n_embd_head = hparams.n_embd_head(); + const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; + const float ext_factor = cparams.yarn_ext_factor; + const float attn_factor = cparams.yarn_attn_factor; + const float beta_fast = cparams.yarn_beta_fast; + const float beta_slow = cparams.yarn_beta_slow; GGML_ASSERT(n_embd_head % n_rot == 0); @@ -3185,7 +3240,8 @@ static void llm_build_k_shift( ggml_element_size(kv.k)*n_embd_head, ggml_element_size(kv.k)*n_embd_gqa, ggml_element_size(kv.k)*n_embd_gqa*n_ctx*il), - K_shift, n_rot, rope_type, 0, freq_base, freq_scale); + K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); cb(tmp, "K_shifted", il); ggml_build_forward_expand(graph, tmp); } @@ -3442,12 +3498,17 @@ struct llm_build_context { const float freq_base; const float freq_scale; + const float ext_factor; + const float attn_factor; + const float beta_fast; + const float beta_slow; const float norm_eps; const float norm_rms_eps; const int32_t n_tokens; const int32_t n_kv; // size of KV cache to consider (n_kv <= n_ctx) const int32_t kv_head; // index of where we store new KV data in the cache + const int32_t n_orig_ctx; const bool do_rope_shift; @@ -3477,11 +3538,16 @@ struct llm_build_context { n_embd_gqa (hparams.n_embd_gqa()), freq_base (cparams.rope_freq_base), freq_scale (cparams.rope_freq_scale), + ext_factor (cparams.yarn_ext_factor), + attn_factor (cparams.yarn_attn_factor), + beta_fast (cparams.yarn_beta_fast), + beta_slow (cparams.yarn_beta_slow), norm_eps (hparams.f_norm_eps), norm_rms_eps (hparams.f_norm_rms_eps), n_tokens (batch.n_tokens), n_kv (worst_case ? n_ctx : kv_self.n), kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), + n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), cb (cb), buf_compute (lctx.buf_compute) { @@ -3532,7 +3598,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -3556,10 +3622,18 @@ struct llm_build_context { struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); cb(Vcur, "Vcur", il); - Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); cb(Qcur, "Qcur", il); - Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); cb(Kcur, "Kcur", il); llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); @@ -3634,7 +3708,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -3658,8 +3732,16 @@ struct llm_build_context { switch (model.type) { case MODEL_7B: - Qcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); - Kcur = ggml_rope_custom(ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, n_embd_head, 0, 0, freq_base, freq_scale); + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); break; case MODEL_13B: Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd/n_head, n_head, n_tokens); @@ -3746,7 +3828,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -3786,10 +3868,16 @@ struct llm_build_context { Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); // using mode = 2 for neox mode - Qcur = ggml_rope_custom(ctx0, Qcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); + Qcur = ggml_rope_custom( + ctx0, Qcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); cb(Qcur, "Qcur", il); - Kcur = ggml_rope_custom(ctx0, Kcur, inp_pos, n_embd_head, 2, 0, freq_base, freq_scale); + Kcur = ggml_rope_custom( + ctx0, Kcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); cb(Kcur, "Kcur", il); llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); @@ -3960,7 +4048,7 @@ struct llm_build_context { cb(KQ_mask, "KQ_mask", -1); if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4053,13 +4141,15 @@ struct llm_build_context { cb(kpass, "kpass", il); struct ggml_tensor * qrotated = ggml_rope_custom( - ctx0, qrot, inp_pos, n_rot, 2, 0, freq_base, freq_scale - ); + ctx0, qrot, inp_pos, n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); cb(qrotated, "qrotated", il); struct ggml_tensor * krotated = ggml_rope_custom( - ctx0, krot, inp_pos, n_rot, 2, 0, freq_base, freq_scale - ); + ctx0, krot, inp_pos, n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); cb(krotated, "krotated", il); // ggml currently only supports concatenation on dim=2 @@ -7883,8 +7973,13 @@ struct llama_context_params llama_context_default_params() { /*.n_batch =*/ 512, /*.n_threads =*/ GGML_DEFAULT_N_THREADS, // TODO: better default /*.n_threads_batch =*/ GGML_DEFAULT_N_THREADS, + /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_UNSPECIFIED, /*.rope_freq_base =*/ 0.0f, /*.rope_freq_scale =*/ 0.0f, + /*.yarn_ext_factor =*/ NAN, + /*.yarn_attn_factor =*/ 1.0f, + /*.yarn_beta_fast =*/ 32.0f, + /*.yarn_beta_slow =*/ 1.0f, /*.mul_mat_q =*/ true, /*.f16_kv =*/ true, /*.logits_all =*/ false, @@ -7971,10 +8066,7 @@ struct llama_model * llama_load_model_from_file( }; } - if (!llama_model_load(path_model, *model, params.n_gpu_layers, - params.main_gpu, params.tensor_split, - params.use_mmap, params.use_mlock, params.vocab_only, - params.progress_callback, params.progress_callback_user_data)) { + if (!llama_model_load(path_model, *model, params)) { LLAMA_LOG_ERROR("%s: failed to load model\n", __func__); delete model; return nullptr; @@ -8000,13 +8092,35 @@ struct llama_context * llama_new_context_with_model( const auto & hparams = model->hparams; auto & cparams = ctx->cparams; - cparams.n_batch = params.n_batch; - cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; - cparams.rope_freq_base = params.rope_freq_base == 0 ? hparams.rope_freq_base_train : params.rope_freq_base; - cparams.rope_freq_scale = params.rope_freq_scale == 0 ? hparams.rope_freq_scale_train : params.rope_freq_scale; - cparams.n_threads = params.n_threads; - cparams.n_threads_batch = params.n_threads_batch; - cparams.mul_mat_q = params.mul_mat_q; + cparams.n_batch = params.n_batch; + cparams.n_threads = params.n_threads; + cparams.n_threads_batch = params.n_threads_batch; + cparams.yarn_ext_factor = params.yarn_ext_factor; + cparams.yarn_attn_factor = params.yarn_attn_factor; + cparams.yarn_beta_fast = params.yarn_beta_fast; + cparams.yarn_beta_slow = params.yarn_beta_slow; + cparams.mul_mat_q = params.mul_mat_q; + + cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; + cparams.rope_freq_base = params.rope_freq_base == 0.0f ? hparams.rope_freq_base_train : params.rope_freq_base; + cparams.rope_freq_scale = params.rope_freq_scale == 0.0f ? hparams.rope_freq_scale_train : params.rope_freq_scale; + + cparams.n_yarn_orig_ctx = params.yarn_orig_ctx != 0 ? params.yarn_orig_ctx : + hparams.n_yarn_orig_ctx != 0 ? hparams.n_yarn_orig_ctx : + hparams.n_ctx_train; + + auto rope_scaling_type = params.rope_scaling_type; + if (rope_scaling_type == LLAMA_ROPE_SCALING_UNSPECIFIED) { + rope_scaling_type = hparams.rope_scaling_type_train; + } + + if (rope_scaling_type == LLAMA_ROPE_SCALING_NONE) { + cparams.rope_freq_scale = 1.0f; // never scale if scaling type is none + } + + if (std::isnan(cparams.yarn_ext_factor)) { // NaN indicates 'not set' + cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_YARN ? 1.0f : 0.0f; + } if (params.seed == LLAMA_DEFAULT_SEED) { params.seed = time(NULL); diff --git a/llama.h b/llama.h index 75fe391ef..3f1becd76 100644 --- a/llama.h +++ b/llama.h @@ -106,6 +106,14 @@ extern "C" { LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; + enum llama_rope_scaling_type { + LLAMA_ROPE_SCALING_UNSPECIFIED = -1, + LLAMA_ROPE_SCALING_NONE = 0, + LLAMA_ROPE_SCALING_LINEAR = 1, + LLAMA_ROPE_SCALING_YARN = 2, + LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, + }; + typedef struct llama_token_data { llama_token id; // token id float logit; // log-odds of the token @@ -172,10 +180,16 @@ extern "C" { uint32_t n_batch; // prompt processing maximum batch size uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing + int8_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` // ref: https://github.com/ggerganov/llama.cpp/pull/2054 - float rope_freq_base; // RoPE base frequency, 0 = from model - float rope_freq_scale; // RoPE frequency scaling factor, 0 = from model + float rope_freq_base; // RoPE base frequency, 0 = from model + float rope_freq_scale; // RoPE frequency scaling factor, 0 = from model + float yarn_ext_factor; // YaRN extrapolation mix factor, NaN = from model + float yarn_attn_factor; // YaRN magnitude scaling factor + float yarn_beta_fast; // YaRN low correction dim + float yarn_beta_slow; // YaRN high correction dim + uint32_t yarn_orig_ctx; // YaRN original context size // Keep the booleans together to avoid misalignment during copy-by-value. bool mul_mat_q; // if true, use experimental mul_mat_q kernels (DEPRECATED - always true) From d02e98cde035d91ed8032ab943d1d504fe9da394 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 1 Nov 2023 23:10:09 +0100 Subject: [PATCH 063/859] ggml-cuda : compute ptrs for cublasGemmBatchedEx in a kernel (#3891) * ggml-cuda : compute ptrs for cublasGemmBatchedEx in a kernel * fix warnings --- ggml-cuda.cu | 78 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 45 insertions(+), 33 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 12ee10e3d..61cd1747c 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6696,8 +6696,10 @@ inline void ggml_cuda_op_clamp( GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - const float min = ((float *) dst->op_params)[0]; - const float max = ((float *) dst->op_params)[1]; + float min; + float max; + memcpy(&min, dst->op_params, sizeof(float)); + memcpy(&max, (float *) dst->op_params + 1, sizeof(float)); clamp_f32_cuda(src0_dd, dst_dd, min, max, ggml_nelements(src0), main_stream); CUDA_CHECK(cudaGetLastError()); @@ -7221,6 +7223,30 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor ggml_mul_mat_vec_nc_f16_f32_cuda(src0_ddq, src1_ddf, dst_ddf, ne00, ne01, row_stride_x, ne02, ne12, channel_stride_x, main_stream); } +__global__ void k_compute_batched_ptrs( + const half * src0_as_f16, const half * src1_as_f16, half * dst_f16, + void ** ptrs, + int ne12, int ne13, + int ne23, + int nb02, int nb03, + int nb12, int nb13, + int nb2, int nb3, + int r2, int r3) { + int i13 = blockIdx.x * blockDim.x + threadIdx.x; + int i12 = blockIdx.y * blockDim.y + threadIdx.y; + + if (i13 >= ne13 || i12 >= ne12) { + return; + } + + int i03 = i13 / r3; + int i02 = i12 / r2; + + ptrs[0*ne23 + i12 + i13*ne12] = (char *) src0_as_f16 + i02*nb02 + i03*nb03; + ptrs[1*ne23 + i12 + i13*ne12] = (char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; + ptrs[2*ne23 + i12 + i13*ne12] = (char *) dst_f16 + i12* nb2/2 + i13* nb3/2; +} + static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); @@ -7322,49 +7348,35 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_GEMM_DEFAULT_TENSOR_OP)); } else { // use cublasGemmBatchedEx - // TODO: https://github.com/ggerganov/llama.cpp/pull/3749#discussion_r1369997000 const int ne23 = ne12*ne13; - // TODO: avoid this alloc - void ** ptrs = (void **) malloc(3*ne23*sizeof(void *)); - - for (int i13 = 0; i13 < ne13; ++i13) { - for (int i12 = 0; i12 < ne12; ++i12) { - int i03 = i13 / r3; - int i02 = i12 / r2; - - ptrs[0*ne23 + i12 + i13*ne12] = (char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3]; - ptrs[1*ne23 + i12 + i13*ne12] = (char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2; - ptrs[2*ne23 + i12 + i13*ne12] = (char *) dst_f16 + i12* dst->nb[2]/2 + i13* dst->nb[3]/2; - } - } - - // allocate device memory for pointers void ** ptrs_as = nullptr; - CUDA_CHECK(cudaMalloc(&ptrs_as, 3*ne23*sizeof(void *))); + size_t ptrs_s = 0; + ptrs_as = (void **) ggml_cuda_pool_malloc(3*ne23*sizeof(void *), &ptrs_s); - // TODO: this does not work for some reason -- not sure why? - //size_t ptrs_s = 0; - //ptrs_as = (void **) ggml_cuda_pool_malloc(3*ne23*sizeof(void *), &ptrs_s); - - // copy pointers to device - CUDA_CHECK(cudaMemcpy(ptrs_as, ptrs, 3*ne23*sizeof(void *), cudaMemcpyHostToDevice)); - - free(ptrs); + dim3 block_dims(ne13, ne12); + k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( + src0_as_f16, src1_as_f16, dst_f16, + ptrs_as, + ne12, ne13, + ne23, + nb02, nb03, + nb12, nb13, + dst->nb[2], dst->nb[3], + r2, r3); + CUDA_CHECK(cudaGetLastError()); CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const void **) (ptrs_as + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_as + 1*ne23), CUDA_R_16F, nb11/sizeof(float), - &beta_f16, ( void **) (ptrs_as + 2*ne23), CUDA_R_16F, ne01, + &alpha_f16, (const void * const *) (ptrs_as + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void * const *) (ptrs_as + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + &beta_f16, ( void ** ) (ptrs_as + 2*ne23), CUDA_R_16F, ne01, ne23, CUBLAS_COMPUTE_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); - // free device memory for pointers - CUDA_CHECK(cudaFree(ptrs_as)); - //ggml_cuda_pool_free(ptrs_as, ptrs_s); + ggml_cuda_pool_free(ptrs_as, ptrs_s); } #endif From 0eb332a10f3f14a3746c391bf80ff5e7bdf29d5d Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Wed, 1 Nov 2023 19:29:14 -0400 Subject: [PATCH 064/859] llama : fix llama_context_default_params after #2268 (#3893) --- llama.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/llama.cpp b/llama.cpp index 685882c20..32d7d23de 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7980,6 +7980,7 @@ struct llama_context_params llama_context_default_params() { /*.yarn_attn_factor =*/ 1.0f, /*.yarn_beta_fast =*/ 32.0f, /*.yarn_beta_slow =*/ 1.0f, + /*.yarn_orig_ctx =*/ 0, /*.mul_mat_q =*/ true, /*.f16_kv =*/ true, /*.logits_all =*/ false, From 2fffa0d61fa10e4b466e78cabcc6a4e16717b580 Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Thu, 2 Nov 2023 01:49:44 -0400 Subject: [PATCH 065/859] cuda : fix RoPE after #2268 (#3897) --- ggml-cuda.cu | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 61cd1747c..57a528ede 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -4539,7 +4539,7 @@ static __global__ void rope( const int i2 = row/p_delta_rows; const int p = has_pos ? pos[i2] : 0; - const float theta_base = p*powf(freq_base, -col/ncols); + const float theta_base = p*powf(freq_base, -float(col)/ncols); float cos_theta, sin_theta; rope_yarn(theta_base, freq_scale, corr_dims, col, ext_factor, attn_factor, &cos_theta, &sin_theta); @@ -4566,8 +4566,8 @@ static __global__ void rope_neox( const int i = row*ncols + col/2; const int i2 = row/p_delta_rows; - // simplified from `(row * ncols + col) * (-1 / ncols)` - const float cur_rot = -col/ncols - row; + // simplified from `(ib * ncols + col) * (-1 / ncols)`, where ib is assumed to be zero + const float cur_rot = -float(col)/ncols; const int p = has_pos ? pos[i2] : 0; const float theta_base = p*powf(freq_base, cur_rot); From 183b3fac6c28e65d23ac0230c1dd6fb84bf0154d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 08:33:37 +0200 Subject: [PATCH 066/859] metal : fix build errors and kernel sig after #2268 (#3898) --- ggml-metal.m | 57 ++++++++++++++++++++++++------------------------ ggml-metal.metal | 16 +++++++++----- 2 files changed, 40 insertions(+), 33 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 611d5e173..b33a3cb8f 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1419,34 +1419,35 @@ void ggml_metal_graph_compute( default: GGML_ASSERT(false); }; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:14]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:17]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:18]; - [encoder setBytes:&n_past length:sizeof( int) atIndex:19]; - [encoder setBytes:&n_dims length:sizeof( int) atIndex:20]; - [encoder setBytes:&mode length:sizeof( int) atIndex:21]; - [encoder setBytes:&freq_base length:sizeof(float) atIndex:22]; - [encoder setBytes:&freq_scale length:sizeof(float) atIndex:23]; - [encoder setBytes:&ext_factor length:sizeof(float) atIndex:24]; - [encoder setBytes:&attn_factor length:sizeof(float) atIndex:25]; - [encoder setBytes:&beta_fast length:sizeof(float) atIndex:26]; - [encoder setBytes:&beta_slow length:sizeof(float) atIndex:27]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:14]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:18]; + [encoder setBytes:&n_past length:sizeof( int) atIndex:19]; + [encoder setBytes:&n_dims length:sizeof( int) atIndex:20]; + [encoder setBytes:&mode length:sizeof( int) atIndex:21]; + [encoder setBytes:&n_orig_ctx length:sizeof( int) atIndex:22]; + [encoder setBytes:&freq_base length:sizeof( float) atIndex:23]; + [encoder setBytes:&freq_scale length:sizeof( float) atIndex:24]; + [encoder setBytes:&ext_factor length:sizeof( float) atIndex:25]; + [encoder setBytes:&attn_factor length:sizeof( float) atIndex:26]; + [encoder setBytes:&beta_fast length:sizeof( float) atIndex:27]; + [encoder setBytes:&beta_slow length:sizeof( float) atIndex:28]; [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; diff --git a/ggml-metal.metal b/ggml-metal.metal index 471d7d390..7c35f23a7 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1070,20 +1070,20 @@ static float rope_yarn_ramp(const float low, const float high, const int i0) { // MIT licensed. Copyright (c) 2023 Jeffrey Quesnelle and Bowen Peng. static void rope_yarn( float theta_extrap, float freq_scale, float corr_dims[2], int64_t i0, float ext_factor, float mscale, - float * cos_theta, float * sin_theta + thread float * cos_theta, thread float * sin_theta ) { // Get n-d rotational scaling corrected for extrapolation float theta_interp = freq_scale * theta_extrap; float theta = theta_interp; if (ext_factor != 0.0f) { - ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; + float ramp_mix = rope_yarn_ramp(corr_dims[0], corr_dims[1], i0) * ext_factor; theta = theta_interp * (1 - ramp_mix) + theta_extrap * ramp_mix; // Get n-d magnitude scaling corrected for interpolation - mscale *= 1.0f + 0.1f * logf(1.0f / freq_scale); + mscale *= 1.0f + 0.1f * log(1.0f / freq_scale); } - *cos_theta = cosf(theta) * mscale; - *sin_theta = sinf(theta) * mscale; + *cos_theta = cos(theta) * mscale; + *sin_theta = sin(theta) * mscale; } // Apparently solving `n_rot = 2pi * x * base^((2 * max_pos_emb) / n_dims)` for x, we get @@ -1123,8 +1123,13 @@ typedef void (rope_t)( constant int & n_past, constant int & n_dims, constant int & mode, + constant int & n_orig_ctx, constant float & freq_base, constant float & freq_scale, + constant float & ext_factor, + constant float & attn_factor, + constant float & beta_fast, + constant float & beta_slow, uint tiitg[[thread_index_in_threadgroup]], uint3 tptg[[threads_per_threadgroup]], uint3 tgpig[[threadgroup_position_in_grid]]); @@ -1153,6 +1158,7 @@ kernel void kernel_rope( constant int & n_past, constant int & n_dims, constant int & mode, + constant int & n_orig_ctx, constant float & freq_base, constant float & freq_scale, constant float & ext_factor, From 4d719a6d4e74b9a98e75f826f865f3153717d54b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 08:35:10 +0200 Subject: [PATCH 067/859] cuda : check if this fixes Pascal card regression (#3882) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 57a528ede..e46295126 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7420,7 +7420,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } else if (all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (all_on_device && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { From b12fa0d1c13596869c512f49a526b979c94787cc Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Thu, 2 Nov 2023 02:50:16 -0400 Subject: [PATCH 068/859] build : link against build info instead of compiling against it (#3879) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * cmake : fix build when .git does not exist * cmake : simplify BUILD_INFO target * cmake : add missing dependencies on BUILD_INFO * build : link against build info instead of compiling against it * zig : make build info a .cpp source instead of a header Co-authored-by: Matheus C. França * cmake : revert change to CMP0115 --------- Co-authored-by: Matheus C. França --- .gitignore | 2 +- CMakeLists.txt | 33 --------- Makefile | 71 ++++++++++---------- build.zig | 38 +++++------ common/CMakeLists.txt | 42 +++++++++++- common/build-info.cpp.in | 4 ++ common/common.cpp | 5 +- common/common.h | 12 +++- examples/benchmark/CMakeLists.txt | 5 +- examples/benchmark/benchmark-matmult.cpp | 1 - examples/embedding/CMakeLists.txt | 3 - examples/embedding/embedding.cpp | 1 - examples/infill/CMakeLists.txt | 3 - examples/infill/infill.cpp | 5 +- examples/llama-bench/CMakeLists.txt | 3 - examples/llama-bench/llama-bench.cpp | 5 +- examples/llava/CMakeLists.txt | 6 -- examples/main/CMakeLists.txt | 3 - examples/main/main.cpp | 5 +- examples/parallel/CMakeLists.txt | 3 - examples/parallel/parallel.cpp | 2 - examples/perplexity/CMakeLists.txt | 3 - examples/perplexity/perplexity.cpp | 1 - examples/quantize-stats/CMakeLists.txt | 2 +- examples/quantize-stats/quantize-stats.cpp | 1 - examples/quantize/CMakeLists.txt | 5 +- examples/quantize/quantize.cpp | 1 - examples/save-load-state/CMakeLists.txt | 3 - examples/save-load-state/save-load-state.cpp | 1 - examples/server/CMakeLists.txt | 3 - examples/server/server.cpp | 5 +- examples/speculative/CMakeLists.txt | 3 - examples/speculative/speculative.cpp | 2 - scripts/build-info.cmake | 30 +++++---- scripts/build-info.h.in | 9 --- scripts/build-info.sh | 13 ++-- 36 files changed, 143 insertions(+), 191 deletions(-) create mode 100644 common/build-info.cpp.in delete mode 100644 scripts/build-info.h.in diff --git a/.gitignore b/.gitignore index 5d7c5479e..50cbd0b47 100644 --- a/.gitignore +++ b/.gitignore @@ -65,7 +65,7 @@ models-mnt /parallel /train-text-from-scratch /vdot -build-info.h +/common/build-info.cpp arm_neon.h compile_commands.json CMakeSettings.json diff --git a/CMakeLists.txt b/CMakeLists.txt index 3659279e2..611ed3f4d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -100,39 +100,6 @@ option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALO option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_SERVER "llama: build server example" ON) -# -# Build info header -# - -# Generate initial build-info.h -include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) - -if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/.git") - set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/.git") - - # Is git submodule - if(NOT IS_DIRECTORY "${GIT_DIR}") - file(READ ${GIT_DIR} REAL_GIT_DIR_LINK) - string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" REAL_GIT_DIR ${REAL_GIT_DIR_LINK}) - set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/${REAL_GIT_DIR}") - endif() - - # Add a custom target for build-info.h - add_custom_target(BUILD_INFO ALL DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/build-info.h") - - # Add a custom command to rebuild build-info.h when .git/index changes - add_custom_command( - OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/build-info.h" - COMMENT "Generating build details from Git" - COMMAND ${CMAKE_COMMAND} -DMSVC=${MSVC} -DCMAKE_C_COMPILER_VERSION=${CMAKE_C_COMPILER_VERSION} -DCMAKE_C_COMPILER_ID=${CMAKE_C_COMPILER_ID} -DCMAKE_VS_PLATFORM_NAME=${CMAKE_VS_PLATFORM_NAME} -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -P "${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake" - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} - DEPENDS "${GIT_DIR}/index" - VERBATIM - ) -else() - message(WARNING "Git repository not found; to enable automatic generation of build info, make sure Git is installed and the project is a Git repository.") -endif() - # # Compile flags # diff --git a/Makefile b/Makefile index c53c1e726..300c1e6c7 100644 --- a/Makefile +++ b/Makefile @@ -542,9 +542,9 @@ llama.o: llama.cpp ggml.h ggml-alloc.h ggml-backend.h ggml-cuda.h ggml-metal.h l $(CXX) $(CXXFLAGS) -c $< -o $@ COMMON_H_DEPS = common/common.h common/sampling.h common/log.h -COMMON_DEPS = common.o sampling.o grammar-parser.o +COMMON_DEPS = common.o sampling.o grammar-parser.o build-info.o -common.o: common/common.cpp build-info.h $(COMMON_H_DEPS) +common.o: common/common.cpp $(COMMON_H_DEPS) $(CXX) $(CXXFLAGS) -c $< -o $@ sampling.o: common/sampling.cpp $(COMMON_H_DEPS) @@ -563,46 +563,46 @@ libllama.so: llama.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -shared -fPIC -o $@ $^ $(LDFLAGS) clean: - rm -vrf *.o tests/*.o *.so *.dll benchmark-matmult build-info.h *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) + rm -vrf *.o tests/*.o *.so *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) # # Examples # -main: examples/main/main.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) +main: examples/main/main.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) @echo @echo '==== Run ./main -h for help. ====' @echo -infill: examples/infill/infill.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) +infill: examples/infill/infill.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -simple: examples/simple/simple.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +simple: examples/simple/simple.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -batched: examples/batched/batched.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +batched: examples/batched/batched.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -batched-bench: examples/batched-bench/batched-bench.cpp build-info.h ggml.o llama.o common.o $(OBJS) +batched-bench: examples/batched-bench/batched-bench.cpp build-info.o ggml.o llama.o common.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -quantize: examples/quantize/quantize.cpp build-info.h ggml.o llama.o $(OBJS) +quantize: examples/quantize/quantize.cpp build-info.o ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.h ggml.o llama.o $(OBJS) +quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -perplexity: examples/perplexity/perplexity.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -embedding: examples/embedding/embedding.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -save-load-state: examples/save-load-state/save-load-state.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h build-info.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -Wno-cast-qual gguf: examples/gguf/gguf.cpp ggml.o llama.o $(OBJS) @@ -614,7 +614,7 @@ train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratc convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -llama-bench: examples/llama-bench/llama-bench.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +llama-bench: examples/llama-bench/llama-bench.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) llava: examples/llava/llava.cpp examples/llava/llava-utils.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) @@ -623,19 +623,19 @@ llava: examples/llava/llava.cpp examples/llava/llava-utils.h examples/llava/clip baby-llama: examples/baby-llama/baby-llama.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -beam-search: examples/beam-search/beam-search.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +beam-search: examples/beam-search/beam-search.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -finetune: examples/finetune/finetune.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) +finetune: examples/finetune/finetune.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -export-lora: examples/export-lora/export-lora.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +export-lora: examples/export-lora/export-lora.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -speculative: examples/speculative/speculative.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -parallel: examples/parallel/parallel.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) ifdef LLAMA_METAL @@ -648,7 +648,7 @@ swift: examples/batched.swift (cd examples/batched.swift; make build) endif -build-info.h: $(wildcard .git/index) scripts/build-info.sh +common/build-info.cpp: $(wildcard .git/index) scripts/build-info.sh @sh scripts/build-info.sh $(CC) > $@.tmp @if ! cmp -s $@.tmp $@; then \ mv $@.tmp $@; \ @@ -656,13 +656,16 @@ build-info.h: $(wildcard .git/index) scripts/build-info.sh rm $@.tmp; \ fi +build-info.o: common/build-info.cpp + $(CXX) $(CXXFLAGS) -c $(filter-out %.h,$^) -o $@ + # # Tests # tests: $(TEST_TARGETS) -benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.h ggml.o $(OBJS) +benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) run-benchmark-matmult: benchmark-matmult @@ -676,40 +679,40 @@ vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) q8dot: pocs/vdot/q8dot.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -tests/test-llama-grammar: tests/test-llama-grammar.cpp build-info.h ggml.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +tests/test-llama-grammar: tests/test-llama-grammar.cpp ggml.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-grammar-parser: tests/test-grammar-parser.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +tests/test-grammar-parser: tests/test-grammar-parser.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-double-float: tests/test-double-float.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-double-float: tests/test-double-float.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-grad0: tests/test-grad0.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-grad0: tests/test-grad0.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-opt: tests/test-opt.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-opt: tests/test-opt.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-quantize-fns: tests/test-quantize-fns.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-quantize-fns: tests/test-quantize-fns.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-quantize-perf: tests/test-quantize-perf.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-quantize-perf: tests/test-quantize-perf.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-sampling: tests/test-sampling.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-sampling: tests/test-sampling.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-0-llama: tests/test-tokenizer-0-llama.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-0-llama: tests/test-tokenizer-0-llama.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) tests/test-c.o: tests/test-c.c llama.h diff --git a/build.zig b/build.zig index 9b58b74ca..699738f3d 100644 --- a/build.zig +++ b/build.zig @@ -10,7 +10,6 @@ const Maker = struct { builder: *std.build.Builder, target: CrossTarget, optimize: Mode, - config_header: *ConfigHeader, enable_lto: bool, include_dirs: ArrayList([]const u8), @@ -41,26 +40,24 @@ const Maker = struct { const commit_hash = try std.ChildProcess.exec( .{ .allocator = builder.allocator, .argv = &.{ "git", "rev-parse", "HEAD" } }, ); - const config_header = builder.addConfigHeader( - .{ .style = .blank, .include_path = "build-info.h" }, - .{ - .BUILD_NUMBER = 0, - .BUILD_COMMIT = commit_hash.stdout[0 .. commit_hash.stdout.len - 1], // omit newline - .BUILD_COMPILER = builder.fmt("Zig {s}", .{zig_version}), - .BUILD_TARGET = try target.allocDescription(builder.allocator), - }, - ); + try std.fs.cwd().writeFile("common/build-info.cpp", builder.fmt( + \\int LLAMA_BUILD_NUMBER = {}; + \\char const *LLAMA_COMMIT = "{s}"; + \\char const *LLAMA_COMPILER = "Zig {s}"; + \\char const *LLAMA_BUILD_TARGET = "{s}"; + \\ + , .{ 0, commit_hash.stdout[0 .. commit_hash.stdout.len - 1], zig_version, try target.allocDescription(builder.allocator) })); var m = Maker{ .builder = builder, .target = target, .optimize = builder.standardOptimizeOption(.{}), - .config_header = config_header, .enable_lto = false, .include_dirs = ArrayList([]const u8).init(builder.allocator), .cflags = ArrayList([]const u8).init(builder.allocator), .cxxflags = ArrayList([]const u8).init(builder.allocator), .objs = ArrayList(*Compile).init(builder.allocator), }; + try m.addCFlag("-std=c11"); try m.addCxxFlag("-std=c++11"); try m.addProjectInclude(&.{}); @@ -72,7 +69,7 @@ const Maker = struct { const o = m.builder.addObject(.{ .name = name, .target = m.target, .optimize = m.optimize }); if (o.target.getAbi() != .msvc) o.defineCMacro("_GNU_SOURCE", null); - o.addConfigHeader(m.config_header); + if (std.mem.endsWith(u8, src, ".c")) { o.addCSourceFiles(&.{src}, m.cflags.items); o.linkLibC(); @@ -85,7 +82,6 @@ const Maker = struct { o.linkLibCpp(); } } - o.addConfigHeader(m.config_header); for (m.include_dirs.items) |i| o.addIncludePath(.{ .path = i }); o.want_lto = m.enable_lto; return o; @@ -105,7 +101,6 @@ const Maker = struct { // linkLibCpp already add (libc++ + libunwind + libc) e.linkLibCpp(); } - e.addConfigHeader(m.config_header); m.builder.installArtifact(e); e.want_lto = m.enable_lto; return e; @@ -121,6 +116,7 @@ pub fn build(b: *std.build.Builder) !void { const ggml_backend = make.obj("ggml-backend", "ggml-backend.c"); const ggml_quants = make.obj("ggml-quants", "ggml-quants.c"); const llama = make.obj("llama", "llama.cpp"); + const buildinfo = make.obj("common", "common/build-info.cpp"); const common = make.obj("common", "common/common.cpp"); const console = make.obj("console", "common/console.cpp"); const sampling = make.obj("sampling", "common/sampling.cpp"); @@ -128,14 +124,14 @@ pub fn build(b: *std.build.Builder) !void { const train = make.obj("train", "common/train.cpp"); const clip = make.obj("clip", "examples/llava/clip.cpp"); - _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, sampling, console, grammar_parser }); - _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common }); - _ = make.exe("perplexity", "examples/perplexity/perplexity.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common }); - _ = make.exe("embedding", "examples/embedding/embedding.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common }); - _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, train }); - _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, train }); + _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, console, grammar_parser }); + _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo }); + _ = make.exe("perplexity", "examples/perplexity/perplexity.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo }); + _ = make.exe("embedding", "examples/embedding/embedding.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo }); + _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, train }); + _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, train }); - const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, sampling, grammar_parser, clip }); + const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, grammar_parser, clip }); if (server.target.isWindows()) { server.linkSystemLibrary("ws2_32"); } diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index fbb0ff095..0150114e3 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -1,8 +1,46 @@ # common + +# Build info header +# + +if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/../.git") + set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../.git") + + # Is git submodule + if(NOT IS_DIRECTORY "${GIT_DIR}") + file(READ ${GIT_DIR} REAL_GIT_DIR_LINK) + string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" REAL_GIT_DIR ${REAL_GIT_DIR_LINK}) + set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/${REAL_GIT_DIR}") + endif() + + set(GIT_INDEX "${GIT_DIR}/index") +else() + message(WARNING "Git repository not found; to enable automatic generation of build info, make sure Git is installed and the project is a Git repository.") + set(GIT_INDEX "") +endif() + +# Add a custom command to rebuild build-info.cpp when .git/index changes +add_custom_command( + OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/build-info.cpp" + COMMENT "Generating build details from Git" + COMMAND ${CMAKE_COMMAND} -DMSVC=${MSVC} -DCMAKE_C_COMPILER_VERSION=${CMAKE_C_COMPILER_VERSION} + -DCMAKE_C_COMPILER_ID=${CMAKE_C_COMPILER_ID} -DCMAKE_VS_PLATFORM_NAME=${CMAKE_VS_PLATFORM_NAME} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -P "${CMAKE_CURRENT_SOURCE_DIR}/../scripts/build-info.cmake" + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/.." + DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/build-info.cpp.in" ${GIT_INDEX} + VERBATIM +) +set(TARGET build_info) +add_library(${TARGET} OBJECT build-info.cpp) +if (BUILD_SHARED_LIBS) + set_target_properties(${TARGET} PROPERTIES POSITION_INDEPENDENT_CODE ON) +endif() + + set(TARGET common) -add_library(${TARGET} OBJECT +add_library(${TARGET} STATIC common.h common.cpp sampling.h @@ -21,4 +59,4 @@ endif() target_include_directories(${TARGET} PUBLIC .) target_compile_features(${TARGET} PUBLIC cxx_std_11) -target_link_libraries(${TARGET} PRIVATE llama) +target_link_libraries(${TARGET} PRIVATE llama build_info) diff --git a/common/build-info.cpp.in b/common/build-info.cpp.in new file mode 100644 index 000000000..0b945aa68 --- /dev/null +++ b/common/build-info.cpp.in @@ -0,0 +1,4 @@ +int LLAMA_BUILD_NUMBER = @BUILD_NUMBER@; +char const *LLAMA_COMMIT = "@BUILD_COMMIT@"; +char const *LLAMA_COMPILER = "@BUILD_COMPILER@"; +char const *LLAMA_BUILD_TARGET = "@BUILD_TARGET@"; diff --git a/common/common.cpp b/common/common.cpp index b182ffaae..e938dee16 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1,5 +1,4 @@ #include "common.h" -#include "build-info.h" #include "llama.h" #include @@ -1199,8 +1198,8 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l const std::string & timestamp, const std::vector & prompt_tokens, const char * model_desc) { const llama_sampling_params & sparams = params.sparams; - fprintf(stream, "build_commit: %s\n", BUILD_COMMIT); - fprintf(stream, "build_number: %d\n", BUILD_NUMBER); + fprintf(stream, "build_commit: %s\n", LLAMA_COMMIT); + fprintf(stream, "build_number: %d\n", LLAMA_BUILD_NUMBER); fprintf(stream, "cpu_has_arm_fma: %s\n", ggml_cpu_has_arm_fma() ? "true" : "false"); fprintf(stream, "cpu_has_avx: %s\n", ggml_cpu_has_avx() ? "true" : "false"); fprintf(stream, "cpu_has_avx2: %s\n", ggml_cpu_has_avx2() ? "true" : "false"); diff --git a/common/common.h b/common/common.h index 7be69f925..72a49b890 100644 --- a/common/common.h +++ b/common/common.h @@ -26,11 +26,17 @@ #define die(msg) do { fputs("error: " msg "\n", stderr); exit(1); } while (0) #define die_fmt(fmt, ...) do { fprintf(stderr, "error: " fmt "\n", __VA_ARGS__); exit(1); } while (0) -#define print_build_info() do { \ - fprintf(stderr, "%s: build = %d (%s)\n", __func__, BUILD_NUMBER, BUILD_COMMIT); \ - fprintf(stderr, "%s: built with %s for %s\n", __func__, BUILD_COMPILER, BUILD_TARGET); \ +#define print_build_info() do { \ + fprintf(stderr, "%s: build = %d (%s)\n", __func__, LLAMA_BUILD_NUMBER, LLAMA_COMMIT); \ + fprintf(stderr, "%s: built with %s for %s\n", __func__, LLAMA_COMPILER, LLAMA_BUILD_TARGET); \ } while(0) +// build info +extern int LLAMA_BUILD_NUMBER; +extern char const *LLAMA_COMMIT; +extern char const *LLAMA_COMPILER; +extern char const *LLAMA_BUILD_TARGET; + // // CLI argument parsing // diff --git a/examples/benchmark/CMakeLists.txt b/examples/benchmark/CMakeLists.txt index 14916d831..2bb47bab5 100644 --- a/examples/benchmark/CMakeLists.txt +++ b/examples/benchmark/CMakeLists.txt @@ -1,9 +1,6 @@ set(TARGET benchmark) add_executable(${TARGET} benchmark-matmult.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) target_include_directories(${TARGET} PRIVATE ../../common) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/benchmark/benchmark-matmult.cpp b/examples/benchmark/benchmark-matmult.cpp index f1c382aa9..76e3f57cc 100644 --- a/examples/benchmark/benchmark-matmult.cpp +++ b/examples/benchmark/benchmark-matmult.cpp @@ -1,4 +1,3 @@ -#include "build-info.h" #include "common.h" #include "ggml.h" diff --git a/examples/embedding/CMakeLists.txt b/examples/embedding/CMakeLists.txt index 0c752c7bb..8ffc33868 100644 --- a/examples/embedding/CMakeLists.txt +++ b/examples/embedding/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} embedding.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index 14075609e..3295cd240 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -1,4 +1,3 @@ -#include "build-info.h" #include "common.h" #include "llama.h" diff --git a/examples/infill/CMakeLists.txt b/examples/infill/CMakeLists.txt index 57d01cb0b..e4e8028da 100644 --- a/examples/infill/CMakeLists.txt +++ b/examples/infill/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} infill.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index 9c52b7bba..62f5ce3c1 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -2,7 +2,6 @@ #include "console.h" #include "llama.h" -#include "build-info.h" #include "grammar-parser.h" #include @@ -184,8 +183,8 @@ int main(int argc, char ** argv) { LOG_TEE("%s: warning: scaling RoPE frequency by %g.\n", __func__, params.rope_freq_scale); } - LOG_TEE("%s: build = %d (%s)\n", __func__, BUILD_NUMBER, BUILD_COMMIT); - LOG_TEE("%s: built with %s for %s\n", __func__, BUILD_COMPILER, BUILD_TARGET); + LOG_TEE("%s: build = %d (%s)\n", __func__, LLAMA_BUILD_NUMBER, LLAMA_COMMIT); + LOG_TEE("%s: built with %s for %s\n", __func__, LLAMA_COMPILER, LLAMA_BUILD_TARGET); if (params.seed == LLAMA_DEFAULT_SEED) { params.seed = time(NULL); diff --git a/examples/llama-bench/CMakeLists.txt b/examples/llama-bench/CMakeLists.txt index 7e395afd0..5bdbea4e2 100644 --- a/examples/llama-bench/CMakeLists.txt +++ b/examples/llama-bench/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} llama-bench.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 780398184..9bd82d565 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -19,7 +19,6 @@ #include "ggml.h" #include "llama.h" #include "common.h" -#include "build-info.h" #include "ggml-cuda.h" // utils @@ -641,8 +640,8 @@ struct test { } }; -const std::string test::build_commit = BUILD_COMMIT; -const int test::build_number = BUILD_NUMBER; +const std::string test::build_commit = LLAMA_COMMIT; +const int test::build_number = LLAMA_BUILD_NUMBER; const bool test::cuda = !!ggml_cpu_has_cublas(); const bool test::opencl = !!ggml_cpu_has_clblast(); const bool test::metal = !!ggml_cpu_has_metal(); diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 2d7979ecd..03d32c26e 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -5,9 +5,6 @@ target_link_libraries(${TARGET} PRIVATE common ggml ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) if (NOT MSVC) target_compile_options(${TARGET} PRIVATE -Wno-cast-qual) # stb_image.h - endif() -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) endif() set(TARGET llava) @@ -15,6 +12,3 @@ add_executable(${TARGET} llava.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama clip ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/main/CMakeLists.txt b/examples/main/CMakeLists.txt index cc1888948..d532980b7 100644 --- a/examples/main/CMakeLists.txt +++ b/examples/main/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} main.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 8a43b6ab8..8d985c82a 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -2,7 +2,6 @@ #include "console.h" #include "llama.h" -#include "build-info.h" #include #include @@ -153,8 +152,8 @@ int main(int argc, char ** argv) { LOG_TEE("%s: warning: scaling RoPE frequency by %g.\n", __func__, params.rope_freq_scale); } - LOG_TEE("%s: build = %d (%s)\n", __func__, BUILD_NUMBER, BUILD_COMMIT); - LOG_TEE("%s: built with %s for %s\n", __func__, BUILD_COMPILER, BUILD_TARGET); + LOG_TEE("%s: build = %d (%s)\n", __func__, LLAMA_BUILD_NUMBER, LLAMA_COMMIT); + LOG_TEE("%s: built with %s for %s\n", __func__, LLAMA_COMPILER, LLAMA_BUILD_TARGET); if (params.seed == LLAMA_DEFAULT_SEED) { params.seed = time(NULL); diff --git a/examples/parallel/CMakeLists.txt b/examples/parallel/CMakeLists.txt index 0bbf89eae..319535a6e 100644 --- a/examples/parallel/CMakeLists.txt +++ b/examples/parallel/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} parallel.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index 9a0b9c183..a78df305f 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -1,8 +1,6 @@ // A basic application simulating a server with multiple clients. // The clients submite requests to the server and they are processed in parallel. -#include "build-info.h" - #include "common.h" #include "llama.h" diff --git a/examples/perplexity/CMakeLists.txt b/examples/perplexity/CMakeLists.txt index af00b4e16..3c76d3221 100644 --- a/examples/perplexity/CMakeLists.txt +++ b/examples/perplexity/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} perplexity.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index bd2c73d87..de60c5227 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -1,4 +1,3 @@ -#include "build-info.h" #include "common.h" #include "llama.h" diff --git a/examples/quantize-stats/CMakeLists.txt b/examples/quantize-stats/CMakeLists.txt index db182e263..e31cf5e38 100644 --- a/examples/quantize-stats/CMakeLists.txt +++ b/examples/quantize-stats/CMakeLists.txt @@ -1,6 +1,6 @@ set(TARGET quantize-stats) add_executable(${TARGET} quantize-stats.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) target_include_directories(${TARGET} PRIVATE ../../common) target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/quantize-stats/quantize-stats.cpp b/examples/quantize-stats/quantize-stats.cpp index dd76b1cee..271282477 100644 --- a/examples/quantize-stats/quantize-stats.cpp +++ b/examples/quantize-stats/quantize-stats.cpp @@ -1,5 +1,4 @@ #define LLAMA_API_INTERNAL -#include "build-info.h" #include "common.h" #include "ggml.h" #include "llama.h" diff --git a/examples/quantize/CMakeLists.txt b/examples/quantize/CMakeLists.txt index 4a8eed544..6f374a2bd 100644 --- a/examples/quantize/CMakeLists.txt +++ b/examples/quantize/CMakeLists.txt @@ -1,9 +1,6 @@ set(TARGET quantize) add_executable(${TARGET} quantize.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE llama build_info ${CMAKE_THREAD_LIBS_INIT}) target_include_directories(${TARGET} PRIVATE ../../common) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index be0b2fe1e..d27ea5e91 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -1,4 +1,3 @@ -#include "build-info.h" #include "common.h" #include "llama.h" diff --git a/examples/save-load-state/CMakeLists.txt b/examples/save-load-state/CMakeLists.txt index eadd13cdf..cc6ed8554 100644 --- a/examples/save-load-state/CMakeLists.txt +++ b/examples/save-load-state/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} save-load-state.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/save-load-state/save-load-state.cpp b/examples/save-load-state/save-load-state.cpp index 38d05f4d3..48d801110 100644 --- a/examples/save-load-state/save-load-state.cpp +++ b/examples/save-load-state/save-load-state.cpp @@ -1,4 +1,3 @@ -#include "build-info.h" #include "common.h" #include "llama.h" diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index a23ddcc55..1f0d26f77 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -11,6 +11,3 @@ if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 84b04d5a0..fd755327a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1,6 +1,5 @@ #include "common.h" #include "llama.h" -#include "build-info.h" #include "grammar-parser.h" #include "../llava/clip.h" @@ -2264,8 +2263,8 @@ int main(int argc, char **argv) llama_backend_init(params.numa); - LOG_INFO("build info", {{"build", BUILD_NUMBER}, - {"commit", BUILD_COMMIT}}); + LOG_INFO("build info", {{"build", LLAMA_BUILD_NUMBER}, + {"commit", LLAMA_COMMIT}}); LOG_INFO("system info", { {"n_threads", params.n_threads}, diff --git a/examples/speculative/CMakeLists.txt b/examples/speculative/CMakeLists.txt index 6c5c9456e..810f3c46a 100644 --- a/examples/speculative/CMakeLists.txt +++ b/examples/speculative/CMakeLists.txt @@ -3,6 +3,3 @@ add_executable(${TARGET} speculative.cpp) install(TARGETS ${TARGET} RUNTIME) target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) -if(TARGET BUILD_INFO) - add_dependencies(${TARGET} BUILD_INFO) -endif() diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 323c74652..798684f66 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -1,5 +1,3 @@ -#include "build-info.h" - #include "common.h" #include "llama.h" diff --git a/scripts/build-info.cmake b/scripts/build-info.cmake index c86ab4379..73853dfa4 100644 --- a/scripts/build-info.cmake +++ b/scripts/build-info.cmake @@ -1,5 +1,5 @@ -set(TEMPLATE_FILE "${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.h.in") -set(HEADER_FILE "${CMAKE_CURRENT_SOURCE_DIR}/build-info.h") +set(TEMPLATE_FILE "${CMAKE_CURRENT_SOURCE_DIR}/common/build-info.cpp.in") +set(OUTPUT_FILE "${CMAKE_CURRENT_SOURCE_DIR}/common/build-info.cpp") set(BUILD_NUMBER 0) set(BUILD_COMMIT "unknown") set(BUILD_COMPILER "unknown") @@ -24,15 +24,21 @@ if(Git_FOUND) WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE HEAD OUTPUT_STRIP_TRAILING_WHITESPACE + RESULT_VARIABLE RES ) + if (RES EQUAL 0) + set(BUILD_COMMIT ${HEAD}) + endif() execute_process( COMMAND ${GIT_EXECUTABLE} rev-list --count HEAD WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} OUTPUT_VARIABLE COUNT OUTPUT_STRIP_TRAILING_WHITESPACE + RESULT_VARIABLE RES ) - set(BUILD_COMMIT ${HEAD}) - set(BUILD_NUMBER ${COUNT}) + if (RES EQUAL 0) + set(BUILD_NUMBER ${COUNT}) + endif() endif() if(MSVC) @@ -53,22 +59,22 @@ else() set(BUILD_TARGET ${OUT}) endif() -# Only write the header if it's changed to prevent unnecessary recompilation -if(EXISTS ${HEADER_FILE}) - file(READ ${HEADER_FILE} CONTENTS) - string(REGEX MATCH "BUILD_COMMIT \"([^\"]*)\"" _ ${CONTENTS}) +# Only write the build info if it changed +if(EXISTS ${OUTPUT_FILE}) + file(READ ${OUTPUT_FILE} CONTENTS) + string(REGEX MATCH "LLAMA_COMMIT = \"([^\"]*)\";" _ ${CONTENTS}) set(OLD_COMMIT ${CMAKE_MATCH_1}) - string(REGEX MATCH "BUILD_COMPILER \"([^\"]*)\"" _ ${CONTENTS}) + string(REGEX MATCH "LLAMA_COMPILER = \"([^\"]*)\";" _ ${CONTENTS}) set(OLD_COMPILER ${CMAKE_MATCH_1}) - string(REGEX MATCH "BUILD_TARGET \"([^\"]*)\"" _ ${CONTENTS}) + string(REGEX MATCH "LLAMA_BUILD_TARGET = \"([^\"]*)\";" _ ${CONTENTS}) set(OLD_TARGET ${CMAKE_MATCH_1}) if ( NOT OLD_COMMIT STREQUAL BUILD_COMMIT OR NOT OLD_COMPILER STREQUAL BUILD_COMPILER OR NOT OLD_TARGET STREQUAL BUILD_TARGET ) - configure_file(${TEMPLATE_FILE} ${HEADER_FILE}) + configure_file(${TEMPLATE_FILE} ${OUTPUT_FILE}) endif() else() - configure_file(${TEMPLATE_FILE} ${HEADER_FILE}) + configure_file(${TEMPLATE_FILE} ${OUTPUT_FILE}) endif() diff --git a/scripts/build-info.h.in b/scripts/build-info.h.in deleted file mode 100644 index e996faef0..000000000 --- a/scripts/build-info.h.in +++ /dev/null @@ -1,9 +0,0 @@ -#ifndef BUILD_INFO_H -#define BUILD_INFO_H - -#define BUILD_NUMBER @BUILD_NUMBER@ -#define BUILD_COMMIT "@BUILD_COMMIT@" -#define BUILD_COMPILER "@BUILD_COMPILER@" -#define BUILD_TARGET "@BUILD_TARGET@" - -#endif // BUILD_INFO_H diff --git a/scripts/build-info.sh b/scripts/build-info.sh index 3c8b1fb85..32682afbd 100755 --- a/scripts/build-info.sh +++ b/scripts/build-info.sh @@ -24,12 +24,7 @@ if out=$($CC -dumpmachine); then build_target=$out fi -echo "#ifndef BUILD_INFO_H" -echo "#define BUILD_INFO_H" -echo -echo "#define BUILD_NUMBER $build_number" -echo "#define BUILD_COMMIT \"$build_commit\"" -echo "#define BUILD_COMPILER \"$build_compiler\"" -echo "#define BUILD_TARGET \"$build_target\"" -echo -echo "#endif // BUILD_INFO_H" +echo "int LLAMA_BUILD_NUMBER = ${build_number};" +echo "char const *LLAMA_COMMIT = \"${build_commit}\";" +echo "char const *LLAMA_COMPILER = \"${build_compiler}\";" +echo "char const *LLAMA_BUILD_TARGET = \"${build_target}\";" From 1efae9b7dca2a5cc5aa21c1997b538022964ea19 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 09:54:18 +0200 Subject: [PATCH 069/859] llm : prevent from 1-D tensors being GPU split (#3697) --- llama.cpp | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/llama.cpp b/llama.cpp index 32d7d23de..bb60044b4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1837,6 +1837,12 @@ struct llama_model_loader { throw std::runtime_error(format("%s: tensor '%s' not found", __func__, name.c_str())); } + if (backend == GGML_BACKEND_GPU_SPLIT) { + if (ne.size() == 1) { + throw std::runtime_error(format("%s: 1-dimensional tensor '%s' cannot be split on the GPU", __func__, name.c_str())); + } + } + { bool is_ok = true; for (size_t i = 0; i < ne.size(); ++i) { @@ -2817,8 +2823,8 @@ static void llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); if (backend == GGML_BACKEND_GPU) { vram_weights += @@ -2877,13 +2883,13 @@ static void llm_load_tensors( layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); layer.attn_q_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "weight", i), {64}, backend); @@ -2949,19 +2955,19 @@ static void llm_load_tensors( layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); if (backend == GGML_BACKEND_GPU) { vram_weights += From 2756c4fbffab097736d5116007872d86456a544a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 11:20:21 +0200 Subject: [PATCH 070/859] gguf : remove special-case code for GGUFv1 (#3901) ggml-ci --- ggml.c | 58 +++-------------------------------- models/ggml-vocab-llama.gguf | Bin 595423 -> 723676 bytes 2 files changed, 5 insertions(+), 53 deletions(-) diff --git a/ggml.c b/ggml.c index 2c7fe476b..d5a49d8e4 100644 --- a/ggml.c +++ b/ggml.c @@ -18811,8 +18811,7 @@ static bool gguf_fread_el(FILE * file, void * dst, size_t size, size_t * offset) return n == size; } -// NOTE: temporary handling of GGUFv1 >> remove after Oct 2023 -static bool gguf_fread_str_cur(FILE * file, struct gguf_str * p, size_t * offset) { +static bool gguf_fread_str(FILE * file, struct gguf_str * p, size_t * offset) { p->n = 0; p->data = NULL; @@ -18824,19 +18823,6 @@ static bool gguf_fread_str_cur(FILE * file, struct gguf_str * p, size_t * offset return ok; } -static bool gguf_fread_str_v1(FILE * file, struct gguf_str * p, size_t * offset) { - p->n = 0; - p->data = NULL; - - bool ok = true; - - uint32_t n = 0; - ok = ok && gguf_fread_el(file, &n, sizeof(n), offset); p->data = calloc(n + 1, 1); p->n = n; - ok = ok && gguf_fread_el(file, p->data, p->n, offset); - - return ok; -} - struct gguf_context * gguf_init_empty(void) { struct gguf_context * ctx = GGML_ALIGNED_MALLOC(sizeof(struct gguf_context)); @@ -18895,21 +18881,8 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p ctx->data = NULL; ok = ok && gguf_fread_el(file, &ctx->header.version, sizeof(ctx->header.version), &offset); - - if (ctx->header.version == 1) { - // NOTE: temporary handling of GGUFv1 >> remove after Oct 2023 - uint32_t n_tensors = 0; - uint32_t n_kv = 0; - - ok = ok && gguf_fread_el(file, &n_tensors, sizeof(n_tensors), &offset); - ok = ok && gguf_fread_el(file, &n_kv, sizeof(n_kv), &offset); - - ctx->header.n_tensors = n_tensors; - ctx->header.n_kv = n_kv; - } else { - ok = ok && gguf_fread_el(file, &ctx->header.n_tensors, sizeof(ctx->header.n_tensors), &offset); - ok = ok && gguf_fread_el(file, &ctx->header.n_kv, sizeof(ctx->header.n_kv), &offset); - } + ok = ok && gguf_fread_el(file, &ctx->header.n_tensors, sizeof(ctx->header.n_tensors), &offset); + ok = ok && gguf_fread_el(file, &ctx->header.n_kv, sizeof(ctx->header.n_kv), &offset); if (!ok) { fprintf(stderr, "%s: failed to read header\n", __func__); @@ -18919,12 +18892,6 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p } } - // NOTE: temporary handling of GGUFv1 >> remove after Oct 2023 - bool (* gguf_fread_str)(FILE *, struct gguf_str *, size_t *) = gguf_fread_str_cur; - if (ctx->header.version == 1) { - gguf_fread_str = gguf_fread_str_v1; - } - // read the kv pairs { ctx->kv = malloc(ctx->header.n_kv * sizeof(struct gguf_kv)); @@ -18955,15 +18922,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p case GGUF_TYPE_ARRAY: { ok = ok && gguf_fread_el(file, &kv->value.arr.type, sizeof(kv->value.arr.type), &offset); - - if (ctx->header.version == 1) { - // NOTE: temporary handling of GGUFv1 >> remove after Oct 2023 - uint32_t n = 0; - ok = ok && gguf_fread_el(file, &n, sizeof(n), &offset); - kv->value.arr.n = n; - } else { - ok = ok && gguf_fread_el(file, &kv->value.arr.n, sizeof(kv->value.arr.n), &offset); - } + ok = ok && gguf_fread_el(file, &kv->value.arr.n, sizeof(kv->value.arr.n), &offset); switch (kv->value.arr.type) { case GGUF_TYPE_UINT8: @@ -19022,14 +18981,7 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p ok = ok && gguf_fread_str(file, &info->name, &offset); ok = ok && gguf_fread_el (file, &info->n_dims, sizeof(info->n_dims), &offset); for (uint32_t j = 0; j < info->n_dims; ++j) { - if (ctx->header.version == 1) { - // NOTE: temporary handling of GGUFv1 >> remove after Oct 2023 - uint32_t t = 0; - ok = ok && gguf_fread_el(file, &t, sizeof(t), &offset); - info->ne[j] = t; - } else { - ok = ok && gguf_fread_el(file, &info->ne[j], sizeof(info->ne[j]), &offset); - } + ok = ok && gguf_fread_el(file, &info->ne[j], sizeof(info->ne[j]), &offset); } ok = ok && gguf_fread_el (file, &info->type, sizeof(info->type), &offset); ok = ok && gguf_fread_el (file, &info->offset, sizeof(info->offset), &offset); diff --git a/models/ggml-vocab-llama.gguf b/models/ggml-vocab-llama.gguf index 63bfaf672f382c0f5bbcffe54736e2698ef3ac55..549eed8c53f438a61f1b00c9bd3b7d02325f2479 100644 GIT binary patch literal 723676 zcma&P`*UR1ap$Sn_e(o78#^2Oh~2PTy=H?Vo96o=l6K+M4GQRn05sb+Nl~||E>O2o zRkv6VpwVKREyR-=F{ggK}7oi~i+e-04-*vNN5H%m0Gk z{?qf{`~6~2{5SVKJS+yyL-wHk+uSU+TEB4gm=D^#bh%Rxr{(SG>-}J`7imgS*W{%Z%=36p2@yFn|*6#-U45hTjtzeotihJ(1z}M26oJ8GcV>_&t%~_e6%@6B&L_WcWRi z;rB#_-xC>rPh|K#nc??jhToGJeotojJ(=P6WQO098GcV@_&u57_hg3OlNo+bX81js z;rC>Q-;)`BPiFW%mErePhTl^eeotliJ(c12REFPE8GcV?_&t^3_f&@8QyG3wW%xan z;rCRA-%}ZWPi6Q$o#FR%hTqc}eotrkJ)PnAbcWy48GcV^_&uHB_jHEe(;0qGXZSsx z;rDcg-_sd>PiOc&li~MFhTk(8e$Qn1J(J=0Oorbx8Gg@X_&t;1_e_T0GZ}u*WcWRk z;rC32-!mD0&t&*Lo8k9thTpRpe$Qt3J)7b8Y=+;n8Gg@Z_&uB9_iTpWvl)KRX81ju z;rDEY-?JHh&t~|wGW=Q@eyt3@R)$|I!>^U$*UIo~W%#u+{8|})tqi|bhF>egua)7~ z%J6Gt_$_DnEob;GXZS5=_$_DnEob;GXZS5=_$_DnEob;GXZS5=_$_DnEob;GXZS5= z_^o94tz`JEWcaOQ_^o94tz`JEWcaOQ_^o94tz`JEWcaOQ_^o94tz`JEWcaOQ_^oF6 zt!DVGX85gU_^oF6t!DVGX85gU_^oF6t!DVGX85gU_^oF6t!DVGX85gU_^oC5t!4PF zW%#XS_^oC5t!4PFW%#XS_^oC5t!4PFW%#XS_^oC5t!4PFW%#XS_^oI7t!MbHXZWpW z_^oI7t!MbHXZWpW_^oI7t!MbHXZWpW_^oI7t!MbHXZWor{C@VGukxQ;hJ2j=q#K!ix)TiujBfEJm&>n zK>h4bR(*l8-uFLugZ>?b?2`Xp41{7UGDKGwX;&Va$>)}QepNn~!UDxm7iiBXu9Y}P z{?{s&wGJjMg19q9A(pRE}&(>ZG3e06f(WH2WtG&QTI)MeNOKMtj{Fb?lgNQ6^zN8YvMb@@1lX zd^D~Fsbi?%@Qf6O!!$z(+EFaY(ii-te*O0niB#BSiyTrNN4X&S6?tf-4%tu@K6#M| z^nM}lz9|Tnain%^L+CHJ1JJY@Z5ifP{2oX3OhJ6fL8&@D8Mmva*LC?JNuuh=U#-W9 zrN+ubGX?c?pObZ&_L6Wr)P2`e8ax~`%=9GR>*<0IYn^f!998x880v=<=u|%HSWe?% z{Qk%zPO^IBge(uL;eVC*fVDI!_RHxn${GLO&rZ&^4eL{!ef8{X4p=b>o~-UTG)tHL zQ+?ngFxC8T`4>nB8#;5&P3{;tE*F30|{==9=a z#wC04_gO!r7g*mF3fz$CvqRvHk2#x6h1Gk-QQ*|CIzAF~{ZJOGBVs+^>?E17^pH%u zA*y4Xwq?IBv+l#(G1aH@_k7u=y0KsJCyI7IV5~E(Fj0zFp+8<7^kT)TMU2k20)%=% zP%3g|W*`VmRKt8;erT!0IQyzow3vp-tg=z3LaQK3Jr1H&(>M}{kYX%fs!-==v86{72bzeoe}to)U10SB!5 z>ID^C^H2TH`$>QlflDW(BcXp~!G0yPj?3V;GKu+~SQgVV)~qVW7yq5Y1j^j^pOCp|%a>%Lv!Zd7UB2urH$t^?K&nGQ4*?@QW0@EJ z3LN{Pe@33f@i$2eKmW=6Lm#!DHeq?}3tkRZQh*%FxpM5sGUnT9PgLoX9 zqB4f`ITNGIKbU{w;~$i&$Mu7mA>~nKjC{%87#Dh4%zx-;m%~oL$J@uqT?d!1rqDPf zRcLrfuHc1}%u@IzAv;>De49;TUmy%D@525uMQdn+N9H_68Fi>h?r=cQzLwUfUmpu& ztf&zQW#Z*zQrENC2GuYO#E}u&$7i8KcO%g!B*H8?oXR-WW(WQnR(8-rHd}8RVASd1 z`A^QiC4dh@GuHp1lM80AIE~!;}cma1fo;0 z9EV0wCC}Pq8vdHPEib+?ixI0rJrVUxxpmZ+cZOy7XxMxHxeTs_{h6zHTO^21V@qaixX{%rO*R>6kv?w8o1f$c}1x zZZ;u(&sV2x3l`?)@d4^%Rca=pLUc;iIxdE(jKSzrPf5Rb6Hr%+5 zgSRX~0yM7AQ2LA5x=19a%&d()=@1s_3}BjxI?HZ`we>Ln9DqsK3F+W34ni|VE_|5A zR?+vfxBj{RP$sr5IXUq;8oM+*|%C5>IA0 z&OMi{5aq{l7!5+7d;SptW+!L~WaUK0tcJ;+XvZ+c9od?n``qVbsQC{PaBAwd;^6P6 z@X-8+>EI{r)Zd&WeGqZ;sepqd3in|$p6BnS^Bn{pkr%p{%mQ6T~a3p3A#R8aTFs?>!F;9 zlb#*kPO#EkhXR&qTk^H&PgL|y!X!fKDrmo6hdq1xzUsk;v2>S~z95q4H)RsE(>T5- z6FG;!G`r7soaphxiJ=r+jU8VPGu*7{iE1h1{`zzbu>{>c@iyNHpo?xe4`xX{KS&OWc9ZmeuG6EaWk>cNveM_9W#)Ef zN%pdU-EDsZ1b0;l>GVPpp$LzZKW^bp6qH9PW`?gM6pCs;9R=eu7WeL5A;T0;Krrhl zX*QpneN&#=uaYmY7$5p7RGp$pL_a6T{(U+JZa~3p{!iu0)qp06q?v63how$?uHWnq z|6yEoWSl~mv|A^S*h!otV^a7163fOSlmj8{a296IQ5j)QWOeB9HS$4T z=%z>|o`^cOI0J8}5;qy8*>$hOQu-Ad*8kgokY*t;pqa?GY8onBIZ4b>_PZjFMvdYi zl%4;X4AD)|Cd7G)XxIC~Bt{6AhYv@3J$S=C&M{Us;-9Nwwi+ozpcv$i&SME+cEX{= zLHDT-h5i^gR>^*xb$X(tVg@A9F#kBIGW9ruC)l$i<%OfL5YS8#a~`P_Ks`(rA$2Z7 z#LWn>eK~w4<8t5Gu`0t~y0#;foJ><>X1@x>dZ5zbs8&TCHc3)*T&Mg398_vD!d;be zYr9lAQdV)U-wk8yf-dD`ruI`kQXWapoT=K`)6DAL&aD3FxKOlV4ricJhn^m5{MLBL zDOe|ky`P~nNvRr{-djY3vYnU#zjSC)<%zHi-N}M!MJH+6C&7Q5KMvh&92;U% zWy7LL#zRSH-Hn*~%T;+Sw8axCT-8=;SbRX9#Hx~siZ(t8-z0rv`#$L)JGc6h0Dwpr zu}}@n-RN&&S_v4#VD9wkx#t9yxQb~V8Yiv_VTg8_^x)Hb%D3ZL zXfzyt0#qlI`UfABKhOWzXPfnlP%jF+bpr3W)YIf}SnCo@N`16fMI>#Q>lO1KCH6Bp~m;^AEo&Oi-L@u+bX z-m+d0=j^vK=SEA9lrnkZt$I(j?)keyQOe?+caZk}agu4m=z{n;<)z)E+KqAt{mCzY`o|=aP%YCEa7KH2Jtxh5KrK>@SSM(R%h( z4JXR#)X6#0Db+xl;`o*wNkWiX+29`Th*_jRo_%Bfq1X=%{^Efi7o+f85H)r(Arj_) zDs&LnGKRPFj%Xsh&@JQzQ9o)O^@XT#9H;!0SywmOdaW!x4jp43ZPV8ojp1F{K!uqd zPT{+6%8Cvez0ZHTu;iel*2Kq&daT%`@;VBn1Vpc2^MQ&m6gdBqW=&`?d0AE@!Z!`k zb4?CRJbyy6b{Qv_SQ`*_Efj&`mLT0!3Ohg>*JSBL<2cB)Pi!!MVLyB(ZI$Jdlqi^M z2eYBNE9O6T>Xj+-->)M=QS(_)o1~Cwk@P!IOWv--@F!e))yE!G%HRDeMYVVp_j~2+ zKyJ>r{iUiCw4Q%*_HBOzhK#rZPDWYdx^1CJr^|-!+vS;_fE}Xrvi%0dp|UM!wfs23 zSL;&)-M5?==*ANH9hNcgWeu3j-*;?IzZ+)&cFqS5{QE1#GAkU=uj{w!D>MtFuA4Ga94YN3#s6`MdrKfE@*Wb{mV|A7t8o9kS|J zT`?qE;QP=Av#4VV{ITm))z}YYE20NMrr?lDPT2ZbQARD$MOM3H%xNgZVTHqX8qYE> z!l(`}Vv+gq+ePxM^P6B&=(-t>geD`01IFA?_eV$dGxNg8+M#%d&})>E6jw+w-_140q~xsiNeRKkpOGE@c~bU6 z0YMH0FIU}^?K_Li*ju_CC-@13Gd6-Ar}u&$`!n5^MAI^ZECPgtw@o_m5toU{i15T-J`SxaPk+4`vgt51S$beN;R=GYPFoz! z!X&8C{=?~Chpw|f7TvnM-8gFm8)RWftYT0p)hzQ;hX2`He=0{Jia7E_*{MeggY(>; z>PwQAK^~>dy$%P;kofL$+hmetGuzg26Mxnq&g*{StbUzJJYIM~rb8DF|IM93sDfH` zpJ_?>%TY-#B~5tVSaont|Fv4pxPr#IabXLEpzMcPFjbcB^s+Rpk&+J;$LE?R&m3{! z*zpz;w156snX$#73aMRotB|krAD_ML42y)>4Q?dF@3_DA1$kor`_6(DCW1ebOswQ5 z>z22LG1xCzHpu*6_&5jSLc?s=Rg@p39MLysM2#G`ei>Pp^;mUdB(zW(FM}>aw+y3U-MCPBQ5q?IDO#F()h^Zff{Y6t(IBgI z(YvSj1Pi>S*|Xi0boFvly*p)-U?uw>xnMq6|4XBiG)yOJnT~@jxRUK+)bH@^p|lkT zu|om8I3PRU@h!sGUkTOZ81I=(IREEPH>lxM?o=s_ZbUaCe}uFY|5#;4_I9e197co` zD_Ql~$t(quuwP;uSq2BZBb%g)#^Cc5RUAVo!OpZGI!!*QRng~Q@16+z`QKPVu#8P3 zr33kWQle+YiAo?S1$a>xCbH`LnrPbet#T@#gaMB>BM@SF`XjT*bR^|VssHEx#6kF& zI3pbSx*JjejOBpG>+pd;RF31-qhuQ4k{93`-Pu?72rsg1)P<)F5#yjd5Wa~EOJ5Mu zO^T%D&;L4QJEPJ@*b7PXwgXmAV+*6(1iZyKbd!shea-{58QQVrb+TqoQmhdns$dZ~ zb&~c=ILqgUi)E#|ncK5lClAiMXKy(e166bmsv}v=@^;AfH$vvkf0$gXSTUORwH$D) zr1}a@)6kf9)?(IRAY3?(3JbmyX4+OuAX!QNb`;LcWq(X*hYXN1SPGKGLvWgI7iJZ< zaaxc&Cqmf!(ra*FIb~<5cIqT$CGO!Abm0c{PQM?$>YA-&?l^efF*>?0^&qA6Ou`p7n<#g5 zGF57#(ODBYK5#Kij*xOc)czxla81Wa(YbWd-z*2Jx(%>K{Fl>r6p(A>v9LL6kd&0r zz|Uz4>#1;(4M1ohB+gzh>=DTHE6#O|k`O7i`>MVZu@0M=06?|RkA2On{g5ZsB*K#I zIGV>rqy|=5IC7AT-g=w`_bq2N6+0elutkzg34{oA^M58^w)AvDfpS)}_>vH#nCZ%Q z$@laz$cIh*f>`qb_C)(rztzQugilULf%F$R4}&{Ll@AX!uf!v`8Z_85PO5r4BJal; z5sqhSDM&g%6IK@lS1Z-P+QiG_EWL2qf$3ZPNx<)jo9mV>Ze^q_)MP7b%+h`s)%# znbiFzr^u%P=lQ>sFZ&v|B?yfVFIXzow{nd5I|%d?E;w(iX_+Pk>W70*B*H2ud>`$u z2{DH0?IPO~gll}c3K0{pzZqWkoh+|%AZ4cb_&$8_KPcY?{Nq2_bjY=_Dh(q7Bo6-0 zb^r90ha5s;-{Jgeyz)+-jwO{i9hj*`wG#2WnA^!=Imk)Q)ww^pmt5|%j*N&|9ZU4L zKe;TwV0lI1ugJ7XB^7k$M0Qv)kWw7*^j;i`xtOhmm;s zAxDtsx0V&G}dI+DZ|6Abn_Mgx%1W2)zh>%vA9O zq~=A}+u{K}wB@JuPei3KS1ZisGZ5sRl!E*sWZ=8z^qtrSAk!x9;J?t0)zNS5_+*x= zp6P`fYQ$7IFO3qjF!qw5c9XB? zizquCigq4m!tbonDx67V70MnnzqERKQ_3Ne|DqFw&!Xv7)6Ao6YC#wcHlHsij2z1F z?y^&yObD5OpDLYr;YgIb-7Jp7&KFovF9m(mo#aPJfFZeDo*N*+{5jK6Qao&=AuZH- zU$j&wRV3o;8DQBq{_tJ|)u7g1Abu;9IdpbCVFyVUZPLyhG~WK%TmRBWz3>J3J65Kz zGfyqYPOJO6Z3e^&{4vwI^GVgZH>1**1=oep3Vkjz@<>*BSAIIe9U@;3>N?hM{)2R) zuk6V~gsMa7X_vYeE%i?xw30^I#gisQ(Lt<+wd@s?bkjuSWujrhhvu_$B2<0q#GRmp zW^WBFJmrs(h?`|x!O0n8y`0Rz-CZBe9 zJq%Td)ZTg|U}D3d*{a03FO)%Dl%lnVggjKVMs@fQ?uP55d_zXGWaW?r}8WdHPI-SG(VA32|OBi4d&|Mb2LI~G}%Wk^AQut+t;jjGXx`JV`S=4ye! zvfj-h>HxB<6e9da!Iq>ze8+izmQ+&+bVZ5VLkP*})sRwbAH6e7$Y(b`Mjr0GMUc8 z&+Nm>&$O>IZlv+}(Nyu4*H9^D1WD|055~oBLfL)H*TN4{m7TE~r&-ERL z>FgV4-x2Cz$~hK;VkBFYM0L&mJxYGRQBMy#>;~3a(BFr1}H=RM4608ERk>mK#b#Fu-Ocvhc9pcQ z(d1NZphT&Pa|1&16B%=uI?Fupnv-D~fy63R0C({1J0amH(~e}JsP;%bJ(GcMsaNWt zhmm6tqM{a(6=1fr_{cG7Au_}LOhf*LHeZA&W?Wn%+iLXdqA*g*cAxy5hcgvvU-8gW3)5S^~UYzWMxb|#v!B3 z`x*PHfy5W+juOUV_~B~n+;OT@M;MVZay85{H33A+sZxd^8zh{Pc2QAC=$bOUFvtjo z$01gY^>94>o)gbRffSzLC=A$4W%MsRHvdC^r%{DJ@->+xVLU=02{u$m#R16x(^^mA zmTDeXX0kB*_h(?t)V8+(luwP9z(1aTFT^5Ku`2#UyN-&Vakvh#QBn?XW0DKlgEDoC z#(WCRtwJjn(a?B8BhGuQd4ym1taBiotyMLUH`Ap_I!cv%y>44WvguO!cRig5EG`clX2yO*4i48&X~{=u6NuKN6c9WB;{Fse!N9xKb@ zyuT7|7`kgEbowC$tJpthznv1ah(b9yU)q#yNp6+@eJ56@lNDc1RNPV=(X2qq=Ov3U=IvzAT_qr90F+18U#6)7Fx1*UOEb2*E9sRm4$LB z+7NUa#**)5XN~0hWN^Rg74$V#{_IT{h>iaCTMTnJQdNL7W z&eYWiQ$AJSvB@3Yql)Ygp_u7rJ}`Of;Zs@*4+o*4Z_YpR_nZ7dd4K*dGQSZ=xw8pr zB~lGRK13+D{aQ&gDPytkuijYKZPlEA4hDEnLUZQ$Nx3@wau|D?NO=SVRO!Rhfi1MW z4iP^PJ|ImlYy5!s#{ob4os?*C_jTupwlrf(H>6OdmmeHBzhmE6x{trkw)XKWTqlAh zd)^-^4`PFYeywLD9`)D|QUtY6n=L<%k(Ql9*Arqg$l&y4%=sV4%tPrb{*5dXYr|Ul zgnrhJ5<^Cq%nsBuP49~!+!!TG=DK)E=*{_)k}!GD!LU7X_I3Zp_}j^a&bMy5ao~jq zeC*j2cfZq&+r4EP^BJn-u7(lfA>5#qXy`@qo~3ze-#JUTGxN!3DG!3nfoeB%w9uVp z5ZrxwPpLDM6w{LA499>mu7$9`ixNC!ZPOZOgP&7tx6ey_ghZ>^KuPMD!r=f*3(EGm zNOfL|ez?vZ^lym6qO!BdH`pL_cB{UXxw}P3oT_o(`h=(NC{xoL*D(NB%Mm*W;^Syw zrg-oBvHxiUol?{A@97W=e}ia=ix_xAD_KWNCDP2?SEHRPe*TdAB<)0Nf!2x>qeGBY z6^3e(rTP){^(XhFab1-PX_l70;8;zwu7dq4g7qA`>Ds5$dm$?jgP{#VF$*I2Y*zl~ zv=E8nRKSP6T!o#C5Qr# zZ!GWyw9}6r`hIy^B(gPTmaletI1YH7VG;jkt&ZoP$lfU;YpLoa6}BlKc+X#AV(bEW zY5ra3NZW9Y`^w=$2lbEm!JO`tyM{O``XoX5V4T1yqQ*SCQROomR+?l>3zyhYH{#Qs zh^|PnODpqM&=Jq^u%zdO-yf5>v4pN_b{O=Z48n@)}U| z7IJEa;~{8+yVkc|!*CzYKXTb|5cgKB?Ku`e(@qs+WK;ybN1=h?>H}P#fyC^PzVufT zZlz>iy|y?jL+fXnZW(3lxGux0y|fgUUm0_y>?77Fh4H>vhV5Go)gezyH#BpHjli7d z9#W*EXU@$sY_$tZmmSd;pMOLYd20?~iLXMRYpLz>MwVV;Wd}YAg{l6rotPFBd@+cA zHZswlnj8kVRjH^Mf#eTd$y(PTw;V}UK6XVrYnl8~9&fcCQo$Ic5dE&hVk(!l<9EjB z22&e5q)P5K6^OpKT2^-KFAT!>**MG*3BujYc7NB=s8hZsJ&c5`R658GXi57F7$?-o zr8ni3tt@$}%L%BP2JE2P6Wnb=TK*6M#z_Eie(9I| z@BQ*faK^z)5~+fka?uSCC)~rY3eJSeLX^>U|BPN29v2r$7~45v&Z96z0Z z zjx{22yy+UqqfT%>#wpSS8a2FN;Wdg69r*_$BGqK_3+Xnl`RL?WiH1t&KOw_`i33N9 zyi%tK;l2AVyD%3^I6!CNrKWrYG<^gTVg2LjJJY{^?gd@6z)1gB=BMjPa)VmIf;zk> z=?>M64Tp?HCSfQftcCQ0a^yV+7xNq;qaf6cJz>%=W+Z9KP9Pp#OUZ(hI%QGNYRK`~ z&AiP;c$=-EPTG}y%pzY$j#xFqtmxAWB3YxPST-#^{TEQ`E8~+A5eM8XbKeA?6ix3U zhn2o@4g~pQ>R)TvKF*0MzG*<9aYL&#b0x!5J<6#jC6k)f5rUn84|E+NFCOK6j=wUo~aZeke$}sJZQpFFJ;vM6V9b%<`*fW2SZx+!?E_zY^g5*S9p@i z9Ld4-#YyHFs!6kPVDX#znnAy&%|((_FYr&~@&prA-;6~lMwR$%9Dl!LB>Q~X^?1gD$saB2_8SlO{j1i?i$J)AJHnMY1@I#^C zM3z4g5RmQ=44;{A@11?qW$db+W|oxfKTM7V&xv{H{Fm$dqCqt}6{{i?BkV7bP`5$? zgYP9$wo_u~?0~8d$GhssNBB7YI=$nv5j$A*fMHf+d(%ppg&i;g%>D1vo(#zUak5&O zzYhxMP%9Ou@~$Q3$S{_(fxr?DE%8r)#S|wW)NfI&Gl)`EOYH(?~h& zkagof0M?s&@^5DH`4?Xh5XoTDK8twHe2}KCm3tf+kz>#Fe`Jd<7QQ*M>YOfg##Uyy zncsrD7HWgV8+-(Yr8AO$Ycj$<4n7Hmq8P`^xFteAHJEz$Wm*>Z=jY)y$jk!b`)V`n zma%glSkj!#MQO9ng&IPMic^PNk*tqc$2u+r+ok#+0-V8^_*jG$=&YEf>6vp;_^MBZ zeny!0WLH=OGF`oe8#|KZ(Zqd0CUr+od`ru60HKN_(#T$)7cIkD3i)GqzpNc+%O-Kl zVS@Vm<*{&*WgTK|`|o&b50cSCFxCg1yb+q}adxvHH5~p7DceFRZ14nAD_P8o?44M^ zS@L}X+$7zQOER+_Os)H_^5~|d2;QUI-ce|zz8+iG>PX*eWxa?@B@}?kg-n%AoT_D4 zL>@|A))z?YvuS_jBp9c|lNhWzNRe;2sANR!)J7EetD|06UQjOGLrWoy^9H)Jsu+J8 zcZ$urRU8_vWgoFfsoAK@NrO83$0^sxnoauNZ7IB+rh3td3W+go{lT?e|8zY(Nc|>B zLCDG3l#!R#30Z&W6@GROX84u7D_yOjJAMk`V{KKkE1e&C(SbRPf-T0g96DvkNlBTA zKY0?{4Q^568$DWJD)LJrvi6~EoL)j5N-ScPNQswSZ*jJ)7v^L{{+z#x>VbH6xR&(# zm6BRFe~dFpE_>)jDiVlt;^hU{mW}Fig5R%1J{03k&g=5+4yY6 zHBbBE@q2!~r?t;_fUd{kl=Zr|GJq6f=pc+7Rf=Ik=`jhmsB`sSX>AkB$h^0LgZR=- z@v>QV``hWiaa8b?C9gsKb9|fvP#Y^w2pok%Ftb%07P6$Z)DnS^d2tZ%LZNXHa5rfr zGUiQ=Fn`j#1xd!50_?L{x`@2QMTjawhPgs}9u% zsI07;Vj_PTXlLQ$#(ftDb1~DNgw_X3A*73}I;}B?W^zs=04xjE$pC=mG=ho6O#2P2 zB!$JYF3kEDx^#fuA#KVz1uwhSLXW5>T8Gi^uUl#Zdr97)<>AzTWEQk6_RvPBqN5zMX+!`yYhh;euRroyrouk!9;=oTW zZ+=1?;0LGG2cr`mQ$xbb)3{ zOm4DNKFC?aJX8V}YWhXm-vPsNx^RMvTAn&bW zVWI%;olvZ+vh$Bps;F@mk{#NCpdF z?PhA_ugmS-JBwOFAIkI)$CLMdBc$oVNR-7a`ms@VuV0gQieMiY+WD+#Hw{628iHyc z3!Zc;X$a*@SN6lGy0o+uFa028m(eyGeOA)$Ywzz%L-)yT&2OMpyBM>aEXZK}T{j7u`{3VYKY zP&|{P^!ovx#=-M5t1EM7kT9P;JoIF6y1X=hQcI z!#`jC*;~=A+}g*>3={*s?a(tJrnaoWW<=rn$r__-563|!I`DtUW+pV*599y!$iy-g zUWQ-M%64=43tXS$pJRTIOcdhCVGL5*oJ=vw7_;lQ8W(d?B-nU9F+r2mNA9&Zokn&% z2_g1UHjq6pxS8OJbIiI;`Uquv96Ncn-1qt5?bJG`DN5IUz*RfG!&a^evl36jc2!fL zQB{&(W+}?dXD30jT40sGgOyXS0S|+K}xu70~20xmZjP=QYo?MA+ z^*JM~n9Qf&*p*FY^}`t(ZiRGx@v|b6tTE`KyjzGSbu{Q0DeN^sRdRUn9(>Dkw`5K$ zBTF}tG&v$ERRK>x@W8eT87hh#gM&EPNyGGsK>P}QVO3^Mqa%AO1jmIJpLbs5jY}5= zu0i83@n5IbEzgl%IJ|wmSt|>S`&ZKDu-251_md4W8bKIewdz*v4^r8`Dsz*EL79+` zIAZPbtT?{ccl`%Iwg{elwOcNH_+qFo?H=g(ZlqtCcGcnL>v8bINWBH@=kO2El{3kj z4rn+v$nDNGQaCInED+=+QPF4KT+15Dad%O$>ir2q7aqFcm|R+VDAvKQwDFmZyK6lY z5b{5CGPQQ)u}P6BSeCvUqZ~A@1D~MLwd5RJ6qjTN!D3|aW?WB>eUYNZWwN4a*;{A9zoBIlJMuSlS1-Z~J{Rh5B;2sh*; ze4;Gmo1b;8_%i=Lob*>z=CpLLYL|N3->R=th{gEfZ!hjx>_CJW86=f6+9$J(m=!o{&E48|O1zbyoPiPT}C_MKT2 zOwWSXM*%F(et(z?YVa}mqM@_IzG8}-DFF|C-5;_m^&G<49sl%-w!Eb^nLk}g78T7B zHG_pjI+#bWuyi$l&tgPA_6@DAYuXCROIw)t&MGIVYM@=CAp2mZM*3$9NefwN?U-&i zq_RTuyCH$}pLqQtCmMfWO~%8*2`XB3<4UliP=WF#;rOj&Op~4>T9t(ZaI!fzy`-2~ zjZtckgWH5Vh6D+#OpSB5$smV!9bxMvEs)Irc%G6|)0Gx0XsAx_yI=5kH)!Z5oNJZ% zsdvX_t$t^VN(h0-U+_WLCu1Q`lWAiuWMp8PG0}q45vO0JgR&Xs6z;WY;K<c9)54 z|G)bug5;FyYx%7Vrxcq$7n6X*ita8iHC}twUAr&Ae`ak zKF3+|{8^veIQm4Q`h=F@RoupeyhbRta znd*|?=d@dCqeIReQ-6i3m%?%?$^DlVZfpw6rjr%6YsgYk61hh*(B4&HoRd=bO_ATD z(4nP|Jlon12TGZW`sVMYesG&jQlo4Yq7PYoWSh8;Q=Ksszhzb4xF9S{#=}TsL3XG| z3A7Ml3Sg?9K+d8X2_z6C^^xVamSCKIrTL6sEcEnWVHGe1eRa`tXP8>fP^549N19mq zLol@6ROP==F(T`73|{H5YO*4df2fm=UIt0Yp=CeJdfSOv6lR#(mt4h|mu1eb(?La7 zV`4um@iDw<<9jjNB!69XzSAiqh&QnW={5_ zQH;Idd_j4NN>D57kDsJRBuSAUW)(4`A`Y&mD1Z*l{+{Jw$@(CpV}H}>HaWR><>Ewa z=O4>E^FK|VQvj|e3S)82@_AUICxW+B7{Dw zL{%rFuqi+Gsm;)pWwL8$>aOb9L&`^!I&rP#FLn;I@(<2fU&GBh!hd)e1dYOHsnEGzIbOEkW=3=Y`~?*{NV9HYN<2avPxQ)pU;qhC~2% zj5_3lXClLfS06bHdS0yJpAH+7$||MyG$_-*%hl z1WAm=sHg^$SD*K@_|hc@oj<3$8S7Lj1@$Ceh4;^2-P?8){`Myi`CP2MYFnHdFr$OT z6kA1{?|a&3<~l4aOvUBRR%_rdB0IPeSII;(gH$6|9UwCum?||ETKbA3$HiUATTW9I zwZ^UOqk|8GRv;u_tpkF5McU*(2$fjm!dOQeSuf;ric>N8gAY3OwzeHP{)~gj!9Pi! zndh@?Xs4pm-8Yjl}~g>YL5 zMABfKRjm}+rK*s8M28Rwm4#1sN{OF96>k0XxU(CV!pr=ZEZ*4n=IeL zj8~Cqidd~?!@{i}Isr&*=6R+HaLAJbd-7$ zcIC3Fkkn;Hohr3Vdc@=Y61)&z+7qT$XFH}}tn4C1Dpao%RSqeI8iz zHFt%)xDEmLf%n{9~7>mgw!A!pZzeb|X09JR$c>wGi1` z{pgb7Ajq?JEHER)>Qw_f!DKD$B5j9+9?=^8A_2?iCc(81Cm{$;ra{O;S8bmVmPjIU zxRXsoyT%kirDvG)hMax-r}x5B$}HT-6ow2A`UpQ607-apq&t}WkbyZH1P{@ZLanRo zv3*WIPUSO`H!_h0J!I7gzO&OD>eIgn;ZMP%;EWwm>0T(kzkv+JRj2ti1TUu=2XNR; z_B!97nlBLHo%oC>j&#@BytYn+sta>DL-9UFr*Htt6y~oIVChJ!@;b;fF^uLIPvhA56Tv=7`vRTdAG+aN8c9khw1!`gyzTIP$1^B6nL*Mfip(0+567My_wjQ`_{c z@^$2?8yk)aUzVj8#IPfCTRs8Ks5nHfr(U~--IL+R;V3kmdaCEb0V#a`%pDcrH+w+} z8lU96eE&4K5!`mawCSsXlSmV2w{Rpla3`7v zjlRW2uEK$qjFo9PM_Cx5+w?Q`Ih(lZd;Sg3xR1*@zRE6uD*Chn5+*=yXX2@wN(C6^ zW~C0YbbTUqG(LRYS4Md75hosRXDw2Ra)rD3k+>2Paq5%;k}%<~C5N6>Y&%PxUwtA} zf)6Rp6?|G2U6=1xArh9+0}nl*G)kAty%2#9&c#gA+sd?)?Uc@G7Y_ztggB@M(d9E^ zBe6%ne8YhToW*YPKR_t!+n8L%{6Q`p8J6lu?DQ!(c5?6_#nj5;k|My5l_r!ey5^=z zcz(wc`ANDtG^_UcfB#66=rOudL$mroT?qH}SixH#HO0(;GNljGtUh?)wO51yrmfDx z{^bUSa7*9cAxv<=r$;*7I`-6;^^%44YMhe)kj>c%v6@_8=S}yWYtFqe?}|FM zXy1tN5$^uY-;{O0;}!=FDLh0hgweJ#N9!)pNB@y+3Q2ro2u9Q-b?Tbs&7p|js_wCi zS>ME)1q|Hw+EA4^du&cbUMd1;P<(g(o{OSMM1APF;^Wz>=S;mYzWt^8mZQ;x7vt)~ zU%cR3dWPpy&cK_E7cq>!wNNvkyX$X2f;W6rK681Q|3auxWpQ&=sINwocHs|FPOqo% zr6u7r%>MiWG(seyU^a~kd`Kq0w(FGO3w;UIoj~Xu!y(R@tFfXy`s+!L<95&BSz67d zh3B!=zS(TAeDk}n!(T3#RApM?T-RW(XhF!Z21sqN6P3U|}WBB@)bvSJpq z=u>QtVV5wc!?AX4Pe{=;Xo{|fgzl1B%B`hN%lVH|*ly9aTULHoqu5#%y?I@n#$;ig zTj}!ZcAWEA&8Z~=@#e4j)ac6 z3-5?#A7I1=`_P6+tjlG7&N~cm(~c_ZN9m?UYk7HJ z!q(|u3liPzmawMtk^D-dR{6G!!Fcy>v|(v>n@lqQgZaC%84>5BsLtX= zDlTN?wZ`RE^#7OCKALC#t^W!7i&TQJ%z6CA&ZbZ`zo*K|st zu*}FKMI-<+D`SjzS7Mt#3@y9W+nk3w;ty}W^f*l7A zG!G(Q1UVJ9DL=GQz7N77dG5KWSJr|K(M5##Ed(3zb-5YdVco+g>~mT)I#v(qfoPZU zNWU-i46WN{xB56#Z-j>pzjdM{Zov0s0f_pCiU2BiGyp?IhR_@ji*sy|a&B&=J_y!M zP}zb6DtVpcK>;2)T1S2l^Bz}msI4>^{_-sOF0h z%(Mv3*;bFj1_D4{8P|uAxD8)5)t@d59=t2lHn-A-Vge>~hpW^@MxL!uVN$3IgyO&K zMhap}SP4GQ>asHe!)|G${0=FhJ}4b`aP0awo&-Z@0`dXEvkDWCIPbq$WJgACDP!TW zPPZM5Vt}nCF6sl|@rfh-^qt66{pr`V*48#Zr>=TBu`uYN;H%5E<2YioJY>snNQWCR z3u@#N`m31S%TPgm%PN?y-&i%-8+!8Wp2bM6oHJR=w%u<-!Q%&M&f%&i45KoI_^HOcPL0 z7r{Vfly6tY8d;^USti~a7IqTUsh*wV zG_gjtC~gx@$415oVPPloN7IV)Iybr%Ijb3)I09sE7FuJxh*o8-KC=GhMJKkZRhtNM zqo0QV-PpbIIoj{phbMf*P@P5>?_nb*{KW5H0j@T^bb<@%5)kc_R=~*0 zio=E{vAV_~muPF?yM2323}ldAB$fNpacrh;IjyPk=%kjL}*#Y8w^|4b%isiR6F*FY$Shd1w6|`)@csirY?lCbb@^C_G?^)F!jXe@m9Ze*F-{qn+8>sgicy4JUm#UQXXi3Veh0X7{r zb#VyxjDK_wF zuaS%GE58bZX3`SE&NexzJ4nP!9@0ud8?KB9U2V?X`$PqMCCM8VXQFu!JG zxY=z;B86y!D0Rp5!|?s}!izrB{U-7-dCTc?NrzE23$9V4Ba7Db*vwr%>5i^WvI-%= ztCxK0qmb=IsbgcKrN$ZA-H_%8$F>q0?&^IE<_%5G+|$1d6K-NT;js?1Ea|F&YbOXj z<%L>jzhx~moiNQsu>OUpX4Z&#E$LvylEbI|z-4C}+IssoC@?m9!7D(Nj`a<@%G5dP zxu3oDJAz}^dK>v~h?T@ABnCJ!e^YxkA6LoE$n9V0-PCdkM{{yuH5#Y`ypuYJ7i~2< zNZa+1hSCLQTpUzqm*gg(|z=0~=Oe+bTM8 z_@P=2Cw8WbZfc#bP``4AH0f%N%ifxZE6z%-M&OE6O$?^{Re#}JbUH3>*^nDNz;iDp zPC`VrvaI6Cmzvz<_-v6c+4a^N@i`locb z&xU$D*wIGB$C-7SOk`FqoLQ4}moOzTDJA_lvU$M&?CU~MVz3{|a)GJuc&1Cv172Gm zXKB5CE7z5Qwz2b_c4Q;OgfMD0%p;FH;)EfI`D>=x$o!-{`20<#srpm;yD1cc4<*joio$9iuE`$Ak|69F0v~~3%2%DH zcVF?pT8h5_z@2d+WGO6A4aM|he~ZsiV-FdwsDbiQb~DyBule3`TG#3wlk==Pm_e@=FT$U$~yyil4P0>b)i4*?Dn$KN#nl^du=v$&FI);R2O8 zhSO-J^s4rP=tjcx`NbtN;QXJaCc@Q3?>$*9vuoIjUhGpkYWj-7X4zhtq<$A19U;(` zPd0+z0Q+DZhrvV(x~(EQlWoLB+H+ISJxkYkIE>5KRY1b@os}h&?QmPdef3!Z)bDY^DkU z)N)6XXsweDa?sN%`BLfrA;hu1ha6VU5va(KbY%$(!o7{;{~$FhS&giSg@09`-AX3Y zjUD-WJ&MdgprTW_4Bu!jKTd52%noy?NOXzKdis5J8ooq~z;VWZCM=K$RxqTLO;evFFX@UNF-HbO zCCOUGT4XFx+9`uItu~l#;}~Lr*RgA?Fj}P%?Hj&1|B2(wB%5^>g1Oz~qM!{pfw|8; z?2wbB0`ca>OAiR6PPn5f)Bd@Kj)0uw&hvXzEy-8v9YVx4HKeMWcPz*!4_i`{FJ^HV z`A}w%htHTcvJUi$WcM2d!dwK5&qp;gxBmK=lf!t|?(_6JdU(7Ea4mXSWY}}dflyI4 z5#`ek28}Az1=zN0hJ~-GLxM(0=;MCU$7$f6oQsy%=<{LuV2=uQHn}lCmV&YXG;H|; zdt_XNLeEJVVr=O(9~4wa=@O*tkl(I)bxg)5BOTCoO13`KaqHG0dXd^KlQNxqpoUhR z=^KrtL%Z(!L)jJ9(k8EW+ew{>#yvIg7u~lVhl)nYmHHzP=x~l6;k3~lE29mHyj=x* zE-Ve2E(%oKCR`Dr^tmu6aJmYSG5V&dT@^)V{v?#84ndadCHjFnS8G%$JA9&r*w@>)az%`;RxT0%VKuB!>I1MK1)l4q)yr4waOL39r_s?^n2Y|6 z5f4+#p1YT#pQingYSvKoL*2OW$cuu?K7EsMPsn(!RD#es)Z=Bb1yjtDDJs3A3CiP# z*HxuacWUCo<8-D;O=44vj8WE}xtK(NnOD|T{8&MO(~``-6u#lX0jYu6T4$MKb`i9* zIuBCs3!i|M34WO-{&tE0q!u zUsG0T-SOcrJg2RQimXh5^3)Iy*5q5()_OJ}f_yU0Zzwd+a-h)d{D6R<+Ft*SU9sedt z`eT2|f}U#d-8l|!AqxD7K)h39kj3G~WY?4Z#eH->@vZDOA2OfxtnA@L3bHZ8bI>13$d}x{$|m)MWP;M_jSv zKbtobP2SO?4Y3cuNn|H1Z8p23LigTq;^Jdes}H9@a^eC70mZ1TA-W(nIae@Z)ymm# znllRGi#x(pV|qFKDvZ3bvEgu1qH~~0WENC+bm%nT?0FI<1Y1BA=G7_!XJ&KDQtZ_l zx9Be15JE^~$W*gkTtD;KDHM~@imb%TJPf#w;*_TnS)P?LN07adoC>tnQbCRg9!J^! z&0uZn_mm>^nTz#aL2z@=h7*s`SFSrR@Wr}VI{0K>$YWm&qLxDybqJ1d1X)CM{;>>a zNkp>8I%r_Y8Mhtjy3Kp<`DB$jgypTmu71d+aApvXdMC=>81_kFXg+L}r6%P@4p#()Y71y=(@Kq=ZrRab`T=50=xuk=CkmcSDmDDc7kcaL zZHIwCw~GZR9no;6ycD-T@TG_(i|vKqJ!1E>iEgn8dpU2>NRH&V_9-Ao7=+diRme!| zq^N1D!WR4d=QYv7iojHBFK=^j>bZe)T9{)I^1sh*qpSgmuHt#OX;OAJJjdbxXK`GrcS0}k}*&G0idJMn8Jb5#V=j`#)dJ;+jupmu_%M}U+e)e2Vqe)^0wO$+S|)A+ zn!U9hAtpg`y44-*)RHo+2EExXRiA-w+z zsUG230eCWN+zO8Sg{6mt)=lSdq>_&XI(I)SUv||L+s6_S80LSTHL$g?Pr5biPN|2e z^}9R_4le#)GH|<`Bi@nTA4r!bTc;5Ryt<=(p>EX?$G1u--$D#V*@VAx`c6!`cisP3 z%rroT7Dz3O%Bd^qmU)1)16}B4K^42~+P4EIZm`1QreRkG27c?7Q~|9E4|u^>FX zCLCjRhwy|iHTisY!U{KDp}aA!qeSifw*2-I(Ky|Xt82bCwpB(Ua@gh8?9hSUoyHpS z+$0&-d@I6RY-G~ZRj_6SoJRb5BOE}4WYWDKTrE2k4^0y&Ym%eo)vsI>K=+F{Ocs~e zD~If)q#sK<*iQxn?qKczL)w_*hcPO{FDJ`m>5}BFSs$>bd%dp=-QLyS8l~E3q`NQV z-3vnh<`D6vGg8t=+_40Rv~J`?eMg>q32WbhUCXW=8IAAgIm_|v<5g(R!T!v(eRbPM z_M&3@ah`Q8=cNQO>N?w#UwI`UA-9SDN;L0^KfI!a~Vj_(IH}wU(dG-MiW| z)lx)=h4_r8f0Z57osfatMId_us%4^fcg^m7@CjP$0>_Il3eaZ8WDS@{I3i><5R#T4 z?=$V3UtsFgVMW%faGh9OLZVWB?MkzrLKp5rQ$>xOH_LMLbvXSCmBR$x#`@ik7!AUX zQcn4KhfOCOo?7LN<#LP`;cz#5E4rSJOT4}Q-r4VGjz3Fz%t9DwbFin7kXFW=ukwd0 zr8UP8gh^ge_MOWVB5&SnQn7Sv^gjIZjbs|DrV*+?d|TZ_2E zFY-!m5n2-#TqMK9wnMA$tAQ{oR6chyi`M$6mt7%J)4HNFO(kdMf)Y?XbJh6-{)Xda zDOs-2j7oai$k^82$|P4D_$1@W&qbRcw>9sU(UwvEgv_tJ=1hputWuBOTJjdAvYt8{ z8mqBxleF`b<=QOo>-uhyco}Vv(qCVtYOJom^$>g2>Ai3=Tb`qao%Y3-1qaH^iej4x_iA3mb@U4wQi6?CrC+d|@~q-=BXf(EJTxAQpd@0b9PX*r6MTmg(xb^4KzjQui@sV(ikY2YD`V8ba7(M6GJ59BmCOlkaMG9OCmm-SY5Pm%O#Y%P2R_e>w+>9%f zpykgSGoN5`LIQDUFio~*xMdl1SFM*TDG|XO5=qA+p#Vk3*JW&B_U5*0A>RC*l+N+O zgShz`-BNZP*v0}Wk-%kWH5zw0Z`$rhjK$F9+3`mgGJ*;o<8PqViClao*y z2to;kuo$!bK)V+WMbP^~!ArO6ZrhDW0;?r9C!uTE2$8PanW+exFr&P;2rD>_r{C@A z=ClS*^6Ylgcv*lqSFgqb=Km^lh+~nfsNE@5`PxPh`%oq~P5Y8X9F!R(H}gbTk}p&U zfxN1!2dVjs)m=@~m8ms(*eV&nm>tC!=(zWN^Uw$&D?y}`j`Cm!8D`nGyo;dRCz%MK zLa-VL=dEx%i``c07@pri;L?o@P+8`iDP3v>o!`&L*IskbnEZ||qBU|TBCmaOLx8+t zindQf=SUx7)#`WsOXKtPFJmj~_cv%iD}Zr!%eUd_J^yC?=u^7RBGLRrWSm(#V4oNi z44!{M!0ce{X#0Zm29N2aJT64+DlvmQniY zA#W2_3Kft`0w3|6#;p6;zVa0oQR=ca$GDChmA&ke>&Yx!E97}5zYYAHf*8mF$dSAQ zS2Hi*E}K&pEKfSl749p|WgZU6tQ&KK#A=+Qd?DiILz+;^v{q<_F&BemJ=sav9fzh@#L-Qx_`Ls^j92ya z>L)N+_?BcZ;BoAbHX}<DZ$ZyE%I2*Ht@gR))buipCyfsd;^D<5}0rK=lhBqC$m7=^vA|9 zix}?fmmPT`mOj89g;gRTSL~u(;WA5fN`5ZT!wuy4VQ{817Uz$@wutZ}RDBN3jbVth zf1EX=#34(YZsB$7lE6ARu#%uxS~GP2dpXnWXG{l7Ne6*hIC;p`cMwkCLw}LWBK+el zPuYWyNN*GU#i9`AZI^M0`bjgBv+tz4rYV04aa#0(OxEgLl`ncx`fBfPIDB?Zn}5bM zjFe?WcqmUGHO*DaE9tUgE+-HYT1ty33$(xnWm?$Dd=16q zcxD$D2nTQ4uJiRJqrvAVVv~U3$W3l7c%0s^4#|oZ?^r>jHJ_64kMGNpc!^WTB`-G* z9i%>TfZj@1_29nb&Ln>ljexWh|HhMFN_Whai>r|xhhZJ8#D>ePsHR7{KgazrLZ+8y z{m@$&zG-2n{wlkI#IEx8w@=>>k|Q}joJY1FFTWglVW)ou+|CT97zhT(DYNZ?rHAB) zL8@K!Vp8EDbEKDuj2M=PmgY#!zu_RFcngB;?l7ZKCk~V;(0Pdk`+KL~52Y4le(h!F z2n!~w*VeA2!N8SmG^6Pd7YK+zK)oaZFH`8ZB46UF-a-t!tG9p?Q}!PQHeBj}pa^%P zW-irk0_;l42!H4Sf1JP1|4*UBXf}zguSJTVuslWo&HrE+e;UmE4ZyR_R=E{DjO84zF0w1Mp=}sriZ*>`HHqkHwl^6 zw4EjBw@8spWl+_Dnrr1jRD#>(Aeb>+?5@FH+{svparHp z6;?W)YHpwRg@F~3AF>R1BJ~KzN9&JL;DT}C!|zT{vNUolj4HwjPiQs^O3$NjFvbj; zjZ8@9OXvf8<5}#e%MbcgO}`ai0cO?Bg}FJLDBs{4jK26JE}MPq5FWoJ;xTIP(0}M~8f`jnWuZ8i_y(3h&0HcGCd=dCmqgl-ccJbb1?724?pY!Rv5GysdhVg#fB*wF=SbY z3WBh+&cz8(5^}%opaAH`wxEwXrh3$(O+Z!;fk#KyaDcvmt}P=>5kd~gTIxQ{M0+pG zi`Ir8sGLMbFj}^TPSH4zzvH}j93;uFtQn~98TYaJ`FA<#6VBFR)@jK-0`Q5Cy|*L4 z69W!|ax>aF032dP9wUt-Amzv1tfk0==!orBCju1?KG(*}{>M9~?+7tkLmF{pH`VRV zgcwXQu7l}G6Aa+5D*9%oI;UrJPF;GH@|YtzHmAQz4ByxL7QVfgSzk%Pl9`VX`eZw*8NHOUj&D=BBhVUCqFbN;t;#OM0iwp?gBj(=>=}qcEerk78pbP@+hA8C=?`7@= zYv569h{-Y+9FK8vBy?{os34~Q+D`h2NeBm&BS~vFQ-kAzPK+{~RV79#ImGOW%FPh6i)_@iqy>O&ZGOD3s zhWd2=BN=LCaf^Vw-sG(M(Dyalyp}wirYmOY@D{9%r3VR&ULh8@d~&{Jm7I1tBC4&H z&Dx+@Wmg|>hOf(_8H&J;OrYswY)?|;;#ubBKyWPJ2;E*UXQdSMxQAu@Q4Y0GQjbV# z5W-mZ(5UiR`!#=qTNfhdoM{OgXPWH^?F)S8l#vN&^udD=sQu8C1m{aq_hj0emQooH z!U*#ZeCjK(pZ~&$w`QuqVO41!nKOUnK_HZ>@Xj_B06wgxrHEjjkgF!brEv*sD#R8s zecD0_$W8puv01wKkPL=Q4qp) zOBa0dy)yK8n@zU@h4Ezg`hzr4m5I4eHuS4OR@OwpToND`mO`6abiELukjRB@fO8{R zXQ71PL#H_{$G+oa!1b2Gd7L@QySphe3PMB`BA?2VrhNo-w&ct4x|T31cyX+?V^%XH zkguTXYs83L2RPp;zaH7o_4vM4Ufo5Tc_%5^UcxrFIKa~dWb{i@k#&<0cK{D~6K&{i zXPNf=TSq6!w;YKL!X~?AFcd=_hSUC9Yimu=#D#e;R3JFmSK^rc?4N7tv`!KTVGbul zPhDkA1S4x))ExUh%GulVPb34!q`RoMNQ;I0G9rD37GKt60yF@HISex&blQGgSqtyGB94f}FD*Qn8AG=&b*!-B7SWH(O#uSXYa@DDTWKE?CHXj|QoK{tM zy>QotR6PA_y&`FG4TDiwP$v!C(X2|1@RQ<0w`~87Mx*i)5pn;vkQlNU^yk$aL(5pf z^_A(t-SJFh4xCFwP5#i_?HfK|5Z&GN+0cwWOyU@yb*KS!3Xh%Bx2TX)jjo5ODGH1j zVh1`T!;w@nSq=+P#!_xdGA2@58IKB(NCpN@x;It&-J%AnBwROB)luR3PP*Xf^jOr%aL{(i}R~o|d)eG_)VuWnkP*zPC3(ImD3o!a- z8C9BgY?TgKXSS=!z*@2NU7>v^P5zKjmR@c}I1J7^5yxSKA}WbnIKeHqa;<4P zKGp2V>-*Y^25$y;zd0zU-$N0QCA%dZoZy5i?q1=`RrMCEW@TVa#Mrrs5~A3l6S4$# zr_|P}*}=wOz(4Jv+(0MOM=5WcgxGsgQ?a?khr~r+gxN3zDq$G^D?K=T;2FuIy6K_u zQCOX2D%1x>(otox+x3*91ZV9Hy}@nK4Jh1E)qy6F?BNP_9Idr(rZ#WZdJoeIZEI!y zEutD6T@^q5n$wgp^&dDs*R8jMaBLHkOI4LO#_DZdn~Z!6dQ*iz_v{;I-}3RStx?7U zq-gp(P%4g%qtxlCUq!NKa}SpqW>f3KDLC0}Z<%?GQ!KtW0apxeCl_l#*L&IYr0#K1 z`+;7$z)2G_l;@WtGwbx@h%O-Wf4+X4V1v)IaGeR`qh`mpzP7gKY_xLyny-I1>`MGr z)+ATnE=S3TwN7zOvn%1Ga)@&?cmkl=1-^n*xvniz$Q(F+=Z+j-f}j4;>G$OqD(3x7 z%Pm}lnuXj@C#Bt>sHD{J5wV%H-D=v;=gT%r_q_qZD|*v3y0(*HEi}vKa)ZPkD_IL7 zs-QHFw7I&j%36m28lqHpd~@HFpvpPpMBoHfHGDKvlf-IMOfPnZZWE0OHObOK9ro*C zvF^1*4{WpmsHQ7w+LO~6YFNPt&IkdHv;qG{M`d=9f8#zk@RAilc}VpRmJhG<%zuCW zu7fuY@Vb@HZ5_WXo`3djQFT!-28)DpnP&OtTG%zoTRNHDMj~80o@E{9uU+4BNWQ35 zqp-VOvf$%EtCGfE@in8for;xJwv3OlXEGw5qrBbGWjuUPb~Yj?p<9I%nqn^5h|0=<#?7FU?X=t_9R>w#ORKk7=$&x z)g%q$Cvakb#%`)~`^)eFtVuk{TrSzYlJh^wuDjaJHxR%>yTC#e+^o9O zo`VJ$D?N&exD-5?&-73S^QI;IIdAYV##(ytxK7FiE=#ACG0^-+{*9L|rp~&SdM6~N zM^s59YFv%~%Vc~4Bsv(UYCUF{sGsrjB`1BpwOpCr30_2_z>707@$MX*|$AIOc zH72_Z!5wRIIS*}QRU5|{SoWiRHk3kYhYyh|7++X^`Dt9Wgcru4r2T#b_4IhP}Q$pS5!(zq!N zyMFQEhn-Tl(!HM?t@wm}U;y48*V?MVT9%Nn;hc8&IiUhhg-m&GdMP*up;kq_p|!lJ z@{F!)Co&98(bIur(HaL6x0OG}My9h~>cHE$Vl8g>a3Z%!i5IjN`hEmDa*t$>h@-6YGMwVng$f*cS)6@2t6^;Q_Zr$lyHcDRgv(615X%c_(6S4 zLN*Q%8gXC4H&#IxGR*P;hlBL<8W7- zfsrucvg9-=>E5^^FhK3BCeHFYU=j8Ew@;+31~AOQ9zK>Phfv0&4^39<@p4jCd9_bk zyg$5pej$g`rc6eVC4nV2T!~x(3W`PD!H3w~=%Jg^rgt7;r}WiTL{Qk*d>WrFg-kyv z-*C`?biE)^P&N{#OysEXGg&ffFAcNcKN+)dUpDvFIAkqvZ)IoN1Q$X0bBKYYD?_sm zKfBrO4l6ybm5G4d-7mlSs?!lisH6GFW&gCR`S-NRMKw}^M1M#%Lw09+#*%xZ@`#J2 ze5#**9CCx|GTngOF(ucI?5^!d^CVNCyc zCCxZG3`9n|z9a8n)l?27S)`;{(|kt-zpVmCGb0}XziAz>+;@Q`Zn58u5hMe(n>xA{ z@PaRBuWE}Df8MqPUlZAtg5qRU1%Hr=L03WbWH+~X=B3zc5Zb9n847+!k=5)LyhOUM z?b2y;?l|+dS~9t{Y?Bjv#TGI3_92j0hjI6nB_|O^K?<%xex!tW9DraSB4Xe=semXx z-+lefSA|U*wI3g}V1Q~56e2trFy)fammg|4BxI4N=*A8y`99?dwydCOH=g^d;EtAG z4u|Fc$Jd)RM|NHJp8W;<&_hH&t3;Vq5?OqwMADdnDojSAswAsu@nj+a93YVg$^j^V zDhEYMo}?>Uw-xjt-ta zz1Lpjf1L`o=*tS>6tV{}voe~lUHZ}h&S-U!q3Pn+)fO$Ie7K(_D+DNIh1={udzzeMWlMag zC3W-foxXrjsrmbWl$IPLK&7+bWJEMYh6kW8h2hVzf_zn)90a`It;2ADcosL1`NYF^ zLCj*@0TyF$@u?4#`*!l4a!&+XPKhkpDeK`AnLNs%X;eRxm0-&lD4}mPQ#({PY&s-x zd!JJ@zP;dtDX&1@Cl)Q{$-C{>!{8^6p#o;caBkD)snSC87?OZ z1-aB}1el@Enht9cWp&uZ3ZD353w~vqulr6p|NMvk&&gXF!k93zeMM-rQ_2OS@$Rmi zOHR`ChLm|f@v@J$!o4PY;dneSBy^$QBK|1(azkO7$;joBmt`3f=~%@XCAtuIjSrNf zhkujZ*O(bJcEXCNLSW&pP@L#Jr=(4i(nBBVrm_VpPTn|23P&xq7kujrR5#c9) zFqh(S@GZf_D!)ALWV;Ojt$k?E-}gL!Y^d z!m`9b%>nQm-<2kbC~48|BW~9mrrAYT?d1!V7?rU29b{+y#+q}Hg^M%-za zf18s=S20)>z*`pGW|>OfnavvGlpaFk4TR$jgk8Wo$&9whBHv+rx0!EYJ~7Q#NH%~| z$q2`NyLEl>UwT32W;g|A;ggR!DpAw+VPJK&p|K1bnYRxokLLiv_!=*y=t)xI+qc5F!jo#m$ZaLgcbR zd-5n8)MSd;kqprXEih)XvZz%0*+rO`n|(Eq!do;(f16TuP62ZR`4E{1yCEY9eEF$3 z^vLfjT^iP_Fdt)~Wlg5LS-z@aK4Dwg8W-j`fEtw(eRFM9iMp?L#da4>$8YGPBI&-` zIk60Te|R?wp0Hg7kcEzgwoX8j&ycx)@koAbwB*Oj54~-?PeSo!4w^p!hDF9cd~BY0 z$>r(6IHE*|K#>Nmh==t1mFMLdV21Dbs`a_-rM-Rz2V)*^Ie%}1f6IpJBzuH^Xexe@ zM!=PCE_peNc8*!wq3D|l$7z)1l`R9Kn)LFT!KNRNMIF@B;%G4{uemaG(<&iNl=Rh-I5l2nedo{U9 z3x7E@Ro(xhC%<1a#k(Iut)x@|Nz0cixhXPDK!SN_& zPIVCO8h?!`v~n$Ae$5uQ2&Vc`1TZd9NJDdt>|PBN$`z0)gf6y=y$_7{^?5Mt6+~-4 zkjQu;-(!F*AcmHt9t`6G15QPsd1OA{txttL6q}}SZwzZWth)P{F)L(CJa9fxy4mfv z<=4|sKkq}ebR21W5u3=4hyYic8yA#(PCB)J4z#0^r-heWUTk%PcQA%_FPuWBT!TK% zg;l37sXXST_6DXUSmfZCfs4$m0@u&Vj@8HiTL#L?BR;i)z%sXV5&OVj_`@sz!=Eiu zsn0t0-;9-s6p)4fitNxAlx@!v4k4c#KT$&*rzxeZd+&qsv6Af+bV{E!^6Xz&=XMU#;oINGCA797qXSe)E?C}cCLhXUo=yHuEqu6t9!JXhL4gZ73ns6x5Gg#e z)@by59H3wjxE_2u*dq!X*YSQKplqx=QAn&}>x}|J6rVo-oG)}LC0wL2 z9TG0$OyD>eZW~_Xi+|2p_sNWlR7zGcZpF)gC<1saSIawvpcns?4J4Z)P!X9*tgCMY zUc8%w=lk8TcUGn)YjdAneB@-Sxfxa1sP<@I!&j_GcLtH8k3SNzXA1WkvoKp-G|aqX zm+~2oa@iEg{u05c<0HLhsd9-qj3GPOx7_fV1SZY-5`TC)y`!v9#nXNvBIG z9K#35m}bA_FZ1JoZRtINf+D-l28$OtP6%R^HfvGhMYtm+oP71!tAW1o{PW)YdQKIw zQ4|)RITa7#ZT+D{fg2rLE#JxfqKHe8if(0S{V^8e|4CVRbK%RZ0dtSbiuO1ha#Go!He zS!|aN=E}6>$M427{(yOGmKygx>|MGUspaEy;N>AUM%|njzc8PivoU4O*M zzbg3G2HJ^l09c9YpEHz}!vW@F?r26S=4B#K{Lxzc>&DaG*cyZqk{XZ%$3%Ac9>{<> z;$SY!q7R>mCa}MF7hn34&+*FCFD>;d1d6x}iJr#tEst+CXsGHqLv^+oEZ^aS$$Q?T z0U-FFsT$**&qN(M+Lo5vSgA8FW_1d5+N1h`yh|J(^-W|YO?`_)%&`>Kc$kw#QVIx& zT0X)#lrX0Rn+i`%9QB&uP9OY1)~2kaJiNtw-nz1)$=r%&3 zBw9q8n1Oe6f?ges-CLWyshZs$+0;InHH1mwT?Wq0e8LWMr~rr~TQZ+H25gIfk^u}; zFdRE?lDsow1on3=*os+y)Kk&ocI< zZrcZYNVOUNH@-ZzlU)2iJ|>GPGBL!PFqV^jc+7F!4^_hiV3}`-(%zQwNDHEtccw}6 zINF6@>`&bqvce#Va+mq+ffDh}VCyJJ#yG2doIKgf!Q+#Eugby$q+d#p0QTc#iWAg1 zkBi){jvVxLGk}rybD$l#^)exI8<>eF1Z4)oL4M=nH%Jg*Azr+X_QCHLetYKh0+29C|eF-MV`^H}xi=)A}o*UmfU4GiRxCIdm!3%QuNd zM))F3-ro}ff(?)TN=-FQ8<@0loadA96kB0KA;lg>Wxd%+`Sj^n1yQKt-CRXeep;kW zOt+NL&cA|o3xpH-IGa6<8Qr9KPS}DlXsIpFiYgrk_~kXNBWIkK@>O!OEI0)5IaEn{ zl`@l^VcQ_0YvO`rQo zH-Ng&HMkhxwoq%4`aAvw&O!g@S?64~wIyj1NniK?xmhEph54uHWKQ(0%>|zW52+I? zlT2B{ALqdA=0p)a5Z(H^xoEgUombzpw002Nnjc$TF5+omufKweGsl+k%EhNLm8&e& zY;>C~O2{uG$~q0;XEKMi>OeLwr5=hRmdhA>{jkv`jJHjdY_?nQ68^f`#0_r0*cLwI zBE@cVT2(QQDerED_07U(vfzeIDWsWRrDxZqE=g(!MOQ{|%5CsUJon!(^g30PEhHz$ zF)@7hts6JgP{oZtSFOSC%IY3>BVUI}UcBMsC58=m(Qa@Juv>!p zUj9Q)*j?@#rht!i@=?LmiaQ^>S{1aXFN~>vsJSu^o!{{UD;!qe5TR;Je#}n>J)FGd zpDu5&X^O&DbCv_1J%Q))@8#TEfU8Tnn=N@Tq{^qaiZr6Ox+SFNQscjTO;*fOrqJcW z^SoVyneQao&U>HzZ5Cu-wR$vpQ;+vc&&b`+uEKurtC1qB9t+$;bwTkh5>97xy)uNg z95h$~9yD|oOn3(#!YgN>Q{9-lMGoZPPYcb-{$5TBVFo^O_C;PETQEwd2tBOwr==Z7n+$s)GnP-rx!c|y>Wu8R_L@mgNE5zi*-rviXI+W*Jd4QE6IK#YL@+EPMqflviW4^2T+yd%SHFOhMrNYK2RgLyW89@%cqs)#^w;7-Mq2U$q z6s?A$tjuPk?!p$CquHF4RND`P?o`qP!IYYK-?UA4}0{6&;fSd$Adsb97 zWBuSep$QWO5#Jw>r!QxRiy{n8^oIoIM|kp9|4c?=+A|qs@IfJAvJYwMQBnu-a(E*q zf5`hO%{;b=C1MdcqQ%7czJO2V>>;U%Xk|6G67ILG0(@tWG;5g^4)5?OFslvq=5RCY zsNHXo`$QNj!L9F@70bE8wr;q!9Nlf!R^N*34~!iCiw?_J2TYU5@)5RRrM2RQ;AQHm zkk)H&NDCC%l^LS)bx}~AsufIA&RXB(0)KWm%C2a`wvqR!I9V$v+CjX`ht~Lv-QvDn zY-|&G^UcOkR~DqXA>wExTqjq(jF|d4cF*}qMlq3x$A*8ItslfSis)#EjRnvsrK1e~ z%iU2_;Xs!5hRte#Mdstn9}69+c_E8S#k0D1kg=2|@2R=n(`uFX8N!%P0^6)p*`PaR zR}x|}^=0}whgSEWa!T?ex%lF1cAT0Jt%`Iu4fLx>=-&^1v{tyEPU4bj&B^Rg2;c~# z2(qT%$v?}2*;T4K5>m_1;>LV_@Gc*}S?!`*{TuRkae~s+OjIARc2ii4KQ&*7T9Jm9*BQPaUZ-3{$npG~eBWnd zI!2Ds8K3&!*{kDzK%fyF(NPN>hUtRo&xf@KQ=U`8^dTQ0eO`R%5AI-IQ?cOzhd6<> z`x5hZbt*Y*-N0Q%EGwcqaAA^qAjc%(702OAH5t?ZxYy*lGvc{QY9*R&ED#Kb8<}vfuu#`@)7Plck<=a*Ety)n85G(Kg>SRFSUODq*x>?kX^&KJ;rWG;C-=~nvCf3cQ8*~_oJCg3cpNPOBd7V5Jn zfpzkh9LSwB%qMk6w!0O$Gpn`l5t-zdul|!B7YBOp6QjX;3z4QNLrzexWesFnBoirI z54byHFEFoN@E&{whdE;b$1`5Nq60HE`zGQ;)Xte4-@Cg_?EwdRYYqD@EM5X}qLD2>FpB$+RtONK<3F^TX@k zkxh;?n00PiBi2Z`{}PqL$P?SoX28Gr3!xoOs?okuVASZnhBzOVkI6?N7Mj0Ms5ak? z3IJ5+*48@XxT2Ev!57KgAL6rS)F@EhzT5&+xMa9=FYLyf5!lH47ynnG?#*W2eEE+V z+I!p^tC$aN2REzGjykwSRBf^&9%JJ0fzlHsQj*LPASVO_Jq@ui7JYOcm)66lrK24y zjH17raa)(160F?0bw~sVv!}|wiIdfFW7XV?F`B{wAe)b2jLF+~XM)d`xoi@cKLKe)Eke^{j{vDDYHXOlvwO9UR8QvTah)Yh=GV(-lUQo z7{sDw z!p{Y9!0Shi#m+~kgZ^2Migg~3n` zzV=L{P8}}vA>^7`HHQdp1&`8rQ+r*I2XvoFrUx1aU4$%OD}(RRBV=gMx! z%b)n@Qp&6IoRQ*tEEb-@7K%;ibJh8|>=*PZL;sf@x`M?#=D$J3%`9u+KZxE={a+8i zCOd&hLOzhOFXU>6Gk4|hFTM1V>=$wnQ}snT>gJC2H_VJN#!$!2EKQUtYBOeXm+u$x zVEYii8kPW5^C}0qSOAR--b{hn-a4pvRpgqy8H=;M>;q$Q0EnS#Vo!^kV}>fsl>h+w zx05&3@Qv@HaC=()D4)VK3SMW_!D!~Y*bvpd&8-Z7Vp@jEp4`28`6@6PgnWcdd0z&B z4s~S{z}Jg9e{*O$s^dO!{mC!;*ASs!<*ecJI*ueyo_P7bT;gyqtlBAHMd*&Zi?WnH zoi9Go$QlZVb)fQO+&YztWNQHPfd@{*VP{7!% z6*n|c<5|dNeuV!GA~NCOQ}<=4W{n&v)AZ>np`MQq^ieM;GzYgZwJ(ciK zS}4)z=e`vc#yAFYX6N_}o}wQnTYZ?g>BziRCGHN(r)7Ysx~qadDekZ^Ax8@(DJyLj zpBQSzbJzVGlisA(Zrhug>RkQao^2gu*?gGU7`lzx4Gh5>B}^?sd;sXk5e!iCx0iuD z0V3MF7w`BnV9K3=Ix#nh?KOX>BV=4 z0}>Om(=v$TKpR$eSh`q6f(*&OGdvOf7lRhoPfN%3)k89_di$8%rSkqkNZyp|^209hU4lNM<))FFCF#!S)xL)P++d0Vbshdt0nx)83q@D8f z^wfvWQ4R|+_9=zL!fOLg4Mjs?)k4;q&^+SM;j<5(2~%q3cQ`STykGcKU8E6!`B_9={Y!T%Pk-Y^vB*_1;ehjm9$*=|1l zv~=*=la$=nWYY!W0m60z`_cbc#2f?u15`aK@26nMJnw1P9;KyrC-yX z{ioroAe6-BMQDhq##Q4aaww+9@+3k{2zh%eL}ZxKR&CoXaJhu0(Y!C zrIne-2n$M)^)MI4F-EZ%mOJ9Ym1A-+iojo-3*oikyXLqE;jy$6pA0ka6sS>F4<{c7 z>om)(a3O<}P`Die!?-hiV-C9!w6g!0?eb2aeJMQQnXhWm_ml3q+__UI!6`V46PJ;+ zh*$3xcz+h+--N^Li${~UecLoDU)jyj*9&yJTW1BIN4_`+(Z40R;`{!aPO&Z2x>eAg zMjrC})yMvm>2<2MjC^exIN@gn%*SABbTN5L^Lj?7AsbYCVYK75ex=dx6kr@Q7R)Jk zViG(UUzWVkI8;)#u%PS(wKfa9M+|LA2Z}qRmF@61HTzlc#X%Rp_(V<$$Z5e$=>RcI zh}i}>CqhPgMh1}sEUYoZ8?GSEN|=Wk;iMtFs#*1+o0$S~jX7FLdRdO`(+J?i%pk|} z$`Po-;R3g`e+nd*@L&`Oz2@h0)t=*B-+1?0(mKL)Wp|;_2ScnL13+H_kVc3j%K>yNY5>Pk58#Iy`;jkk+eV<% zgJIBY@+f;>X74ypdMelcW}}?cJ7QBX8}t>#rfja3AyoPfXz+OFyNZZZJ-Hy0$#-?1 zKmj4kaaq1?@*icT5)e}27FbRR6y7HY)TIJpSrgLHZgr9~ps)d`u+2u1d#{<4$Pyx-?@Z{n6-OdzCmIDuC_G`Y922KFj!_vbVNqk%etLB& z{%>`MRiZ@ z$y>40dGyE6IT=usvp4n%zg?(%74KCf*I=r4rauE>skcNtO>T)kD0~zHXb2KIj{F%S zH@yMU>cW>Y#_A~~J3&r%LQu}k_Yu)kM4S5(-~XQf-6AG0KK0W+=k@^~9lBGl1o^@Z zZ@5)}h^1Iie>79Lr7096W8)gvLjll`-Ve)_t|-!9_MAhPlU9C=iaruQw z9u;3U>JG6uR<6u@uTuL?Z$^UmdKGKjUw zCn1>QLu}%zp%=EKw9LNV36tQ9nip#G#JDQ0E|ZYz^G-$hf^H0H4&LkS+DZwTss z%5FX(|NkhacU3dPUuApxWM~5byNr}YrIfG;Y;Mc>B|HWlM%fAoY?@<%O^z+?CzsHe z$it%#3J2!o1*Gk;%i%H)s`K7y5%e-!KqfEFiAVw~O=-DoR-e`QKwBcYM#urLAZgW? zmyXCB_4Rw^zW-|}w8&E{y6t~{`MM|%M0;6D666Ob;bwviZ~Ct8(0bA^*p2x4ayUMJ z$1@>g7*tx&^DORY>dE<>*2hs_kk7AdhSJ1J5zz_$-Y9fm{Ms5va7uuLw@QYN&utu> zhP~HlAe$kt8j_(<5!c?R5H!H@@e`DSI|~OFF~0@!OS{pl1aCkQWYr-^wsgv6RU$x4Oqx1whp` zvJn95%_s!ZKo5(A_ko#XS(@BH;uel)$|OjxKG%8}R)%E1`i)cXOt&>?E;%EKWopgK z@)Peh4iH>>*rBGaKexkW9u4pBu+i{k^G+GQfXfAZy|E~afO}qk&GdCc+XZ^@8*jH) z1t1HJtYb-Ya-i|3v<&!gnD==zD-K+0K4>gK0P1JYFYoPH)~+|X+ATc%Jw;|VlV5>l zZ=P9in1(ulAjMK0BNX@yxXqjlGutag<^X}#7KZL`E@W*B(LmPr%537jK_SdDm#_J6 z>mA6+xLWEAFm&U_O>Z4xNjC0U7dw0yR?WN=pJUYX#W~^jFDeXKB}1nce)LymKT>m1 zGf@o09C=$_Ah%Xlrp)KJKZfUFc&- zQEJ>c1n0pCGt*j-!Emu)PUxyGJk!QzHQp_@+Y*A4_r{=RdSNSS&B))^x*BWF1HgV$hb(jbS-Hb6{&fn{c=`8!hS!7NN{^VZ+ zRcG?6epiG){0f+^P$+!8ZxJc(|28;WZ{Jwm#fL&{%s?4I9pW^Ra>9?*y6q$G_TxUb z2S6qG6J!+ci5s#I|17D~pBej#Q5e9~Z)E&dO280sHBxqHakXJ`OzZ&jzU)!J8Dd8n z@JU=4llQ)ORZb-L2N|^SD#PE7_@{5YAhbyFQa(mr+t2Z725;6xaz#qFmLALkG-x!4 z42Bv6+Rk-YXcV|B*};7)DNNjw6LT)7i*u=P#Z=*Z-HA9dTEX-zl%T}xx}Pl`K5^5D zP8)&qy1w9Ba(P20d6>_ALp3Apiar)jfS>YR=xZA#xG%y*I3riKyzfS{GZ@Gg)pCSP zSLKj=aApFLfox=4`Q+q~y0X14+K6{TySdQi=yS|gl-8aM zfH^IB_tX=j6Ikez;RtiWWbv#f75g$EN_Qup>=!bb1tU5M88#-PvQi2`>5+;JLA@-h z6KEETun=f9hiN_eqFM{5^&yK)mX?J=`|Qa+^M7`ZBdLV}-BwuasV2vk^X_X*BsR?4M8? zQ^-Vgy(~GlUSVR!qKyJ_bdj%nveBmbROu}r2EMZ_Ys_rV_}?u@BTSR0Qlt6qobWV5 zbqG-hz$M+kW-o=W>2Qdv4z<6Xk-8RxjUTlel{4{YWm$&g2T?g|?DT3`MQlgN)TjJz&HBMvcYh7fThostlpZ`rbp3F)?nx=?rW<6FW4s0yVnbppi-?8nVP zoQap8@U>?cch+UoZB;6|SS@hbgLm4fh9QSNfBnYSrFV)W=@~SyL%8j#$-g6V_)1I< zm6@RYFN}gP0!;bw1%Fv=f=#5{eeH|gBIDv0(YJnzKp60696aJ%*#x4i;TbhdZ?4Mh zV)}N_!$9a%--1C!fB>fcP_L?US`CSr(aw(5Ju^dYDRM*|mJZA15i)NNRJdK}1?3P` z4lOU=*ZpX?qnvxDr)P|)*kH{wTiZTsMguP*vtDM+y^{>Z#x%TbME=mLNG4EqY&bS! zun*WAXN3%_l@2=xVO2(AwHjTocn39tm~XVMxs~L6?LZLa{L3Mc8kCC*7)N^Bk~$(5J_T!*s<&oU>d9810exOxAksWRd~G$XAJhG$W|Z#hlLQLnrHLkL)nJIaVJ7@ zY6EPQXO`e4KRZN)`s^*t8RKlg%FJ&`C*aRZ{Gny2n;^Ap6 z6CmHs#%U4@;8+=fgz|uI$Yns<%`{CQ1R{2S7HRO02HRptvso@fhSYoPUN%YqSL;)% zN$OFv9Chp2AuvFLrpI~k=Ma23jnKl6mIk=zSx2r)YYI7?h(9L8H?n}dMTo@{m5lLH;rD1En z=Hs2!Ae4Cu{1mH7bXA*|&}Q&AfjGC~FvLvIp2CPgX8*H>;SJ?DgI z7dh3bHdXNd8;Y}UptUIfK6{AuwXeyKxg}rh+Ae1`LQxndZdg9UC|-D|j%!7>1X*k} z4$sTH1_4b~tG0@n32uu6AuEn3%*_#%c4s$)3C^0^B@Re?Ttw_)$41rEsFoyjPlM2X za*Ths-t{I^9a{7B_|h2=ithj`l8bp?b{j>}dU~Z%!VHx~h`6LIalPt5$U^f*d+l(0 zdJi@jBB^{d`~;B#YKY`vmn0tVWOSDIcJTuhA10|V>;$xAai5u!Z)|P*6m1{KoRY}q z&%SuIicsw8;PW2})n*2dRY+!_G7plM0^IPKFFR`}9i1t}QYK-pY)^)4|4Ke z$RPk<^(tW$07?=to{`PSdJ0u&8F!3K$wndwFzcLrA0=&e)B&?-E;b}Zf6jzgT28*I zf37pS{r4^WthtP8fznDYZe7Dx}c@L51xc zNDp`(Bsa1-4&~^q$ON~*2rC-nD?bL}Uq(qXv?^Ix6#}N23YwH<^Dz?!CKI->iO28X zHQ>l)fw9RpVQIp=qY47axyn^ZO^>*pg?`l38ORQ?N_NutZbKD#8IL7n6H8M7n^a{~ zMOrpFbyt^rz>cv~x(Z6o;Vlpwj~PN7OAM$=HY#O=m7aDsD`nXUd{9=oQvo^`(*6`B zRaRuXTLniUhg*fMC$3)$UR`$sqH3izyY9;bw1$$&f_VKAJ|8(_tK$f2nP%Nj~S10`_f7RVkuI2SBPHUw&@u|6ZSvs7-8=8ZqwtvgU<0XLhnob8-c ziT_3x{n!xb2?z^LfRkV^_+i=-O7?odk&(}hl@_5k@awS7FDl;PRTW$%-%M7C@XA&` zrMU)6+8!ZBHY-_(A;l4urGOKxzH=>@vbm*xhIU3@hLkVA>^utKi^(r!l|b0>6OIrc zdviQYd0@T;x{3D$q<@kj=)@l<-w`K~G`wFp!@Pg2M z>!~GIUO*GH@{qDl({yzo@nLl?V=+m?Sie)&>;;B{>AxY7rCWs%h7 z7nTWR5dUChK^kiYL>kqh+7itn=i40YRyIEP9MdKXCI`inpRA>?rdT`3l|rOE7uV4@U!= zpcQMy4+Gf>B2--ZGEu{dmm#3U>E0tnNj6KTS04`_t6E|C9`Gwwm)Vf)aXkh0fXZ+V8A_mfTx8Gm9b#3i`*!NkQF{Om9bfs{mX|r9(js|Cj&SbE0q==5Gj^D z?6i8F%A7(5c;Fd?m2JRHx2IO}sa-;jPKa0@xZS`1eSlzq=`EI5MdC-r`~OYwwzC-54=RXf3iZE+ir<^4Io zDeE`dmvJVO{U;Zn2}cD|bb2`zj$Z2ToEG8Vb6FK*aDk<{H0M(qCuf)wYh?IyzN(Y| zh=1Pvi{Oa6T*Jb{4U9blw7#Hs9^s901zF}PSXrZ8-~++uC4J7PTgdzDoONW%NFbKE!QT)99ts?Q0#lRPG1Q zw@Te}#4vYq9?JPhl^k}4@ye{+_3MzHT|kmqtY#?O+6FCxu1V6tp-OuCbkrfMs0Zp!lr_H-B~%=3`xC-3h@nBu(!Y1*UC zi@@oZuSGva+A0xvL2tjT_n zaOsy%=#gwomNVRos5h`QKgD19y8r9thyJhceNSg;NA@v|cy3e1Slq?lF>_;s9tmGuSxWT${#1RhkE#;h8h2>UVpSXoEY<`N`qOBOrD zi)08HQlvu$9(1FO?#1se6=>vJP(oJ7D`G1XH%+akJcqJXvxJ1zU@j?GK66ZyMNr`q zt`cFuh8V6t2~3goHRL*r=<&ut2+kFN(Q3o)k9#Q8U3Sf#kc9bBtL8Q{3NDJz1gE7F zpBHDFaOsg0fWmT1W-FyhS#R|MTw5 zsu03b_Pm8QLX$o!19vIN^qK5s^tswx=ojZY<^Q|OJBgV3U$-Q3h1M`{}tKr9TL?sqx7`i=)wc5jfmmS z$I#81;ZSPJqr>jSxq1vt2dj4w5I==XkSQRNWWz5guh<`aaNCPAID=B^U=xpp>Q~2d z+00$#(HL5a+)+5l2CAT5VPg2^LYUJl!lXbri9a<%tSr{WFS9RwB`Srm5O*VFgdczS z!5e;H`L(d+ja|EOSUG8L(ZGkiuj)Y& zX3-jllFN|5Z;N&lT7!QDRQRX9%GFTq0S7?b=~aY;0~~?~5?3e@Y!hF*Z)=s2spuQq z`15~^OOx?z<~pN;uBvV+h>OZL=U${8pT%$p7AoNS z5VHPOPOL@sbEn1YG&8d$1KbotyM_9fd3sO<{WZr=-z`i_8wl6Fz+1u;3QfnxZBZ|B z$iDcc4}f?>_+bpLWrBiGhiF#Z%HG9EzS~w>+rDu(G&|r>Q&{j>2|pU5X%i})X=P&U zZXG5H`hK`M0n@HS0RQ*m|BwU9c0OtcZ^JlW+vwg4B3?AT4T0xmY?XGvW2@pAkM zUkhF%wwLcHwj00&A<9~<&@ixAx?iUEPcL7QDWU6k6$^MsSW|3nO1#LBVFpHD`O9t> z!}cKXpszjj1xCk+`)_4?!KYv%eb_1y8m?l9{VR@y!h!)L*g4D+>uPn>&KBYarRkIi z$q?32)VxKCcBoi~6h;(`%-+zfL%2-3cIXXX`f2nhpRdszC*?a!s@GMGO?dXn!A!Bi=w)BG`2;7bA%#;#ZsfRm)Y*lmUm-e_$XDr5E7WYM6;o&HRdO|x_x9&SUi#C zbw%%i>>zZ@k!mtRb05UUN6L!PTTr|e{&B)Ihf(2qm&&qwk35VMY^&|GH@;+ez$W5L zhalz^;iQPqp*@VE3&?QoDeszW$zp%RP4dmPRY!by?y>*mEZnUJL=X9%1e=^(fRL}I z@G^n_g%#EJ(K$Ryy1ku!&Ic{4R?0riB)g!P`pEZ-owNfc ziqpl?)%}PZS>BF+a{8Dy9ynY!wOl3xigUBbJW0lg#))7HUU2zCTvqSu1~y$Si3GBr zD6mwdc)}ZEf9&Q2kU8crUu{{vQ2D)KZe{ts2>?(;xXvxgXw7ybFE$23DVq)C3Y-h_ z0-(o*fsnhUJ{oJNrp0?^8zpg48T_Z-HJ_JLelX4;ZqsndV!r^L5VG;zH5pSX8W3b< zH3$Xp8J&SjiG=P8YPqXwX~If`(yrf@rAqK2>r^a{u~=J+26ZNPGlv`^gT%!PK(lXS zhIW*Vux!z!`}LyL`qwyO&I9;HX|0A5bZR*{L$qPg;bavu=C2v zv5-z|qfQ^%nTw<+a$rK=vInM(d-vm%ujl)~PMg{AWhz|1Y6HBRfYpWTGZHiJC}<{E z*@-!5vUCG97Fwx{KdEM)dAAH|?X2~ut3KIXJU{+#?o_o~lNGO*C}Ga3vI7sr*~``? zYU#*SEO@}VK3z`JIm&1#LccTb!sQM_cD6Adw4G>tywX=N^fmmJ{$AbIbxD_5zSL*S z8@V%iTiV=e)YFVz$HIzzMLWHw2)Nn_8+ppZQTy<5{lLFU7*n!Z=nMuWuKQ%;wycXm z4h(fIK{qsHJu%rMl$fhP=!%}A*Etr9k7QPd#k&;B46O>WHguuM`(ZcfV>ghUabR(w zz8kD8qqM!5^;7D&A!zy;o@@CRob)(Eh8gQETX2>|5ld@#i!HjaV>L{Kt*0WG2@1@6 z=|{|sf%ax|D@gW2o=r3x9}Q8HYTY2~WFWRg-0;6P#XPxGBI4UJe?r?~>s|l0&w(F! z+J`ag^(F6z`0}xj@*CfXzjC@}mE3(B z{6(J#phBRX$VBi3AM0{$V0!hJFMndm%Ki&-6H6n%2w;eVW_e@&gBKT9eD0I~z+#$X za<6L1`{KO#guiFD0fr`RoW&(EkxzLaG<8y+$Q@{4!sOZL^T{I_<<8`-P8y}`QwkYjs*_K;163Jb zdRiW>W~-o01Fo0h1SY=;VQT6S&9N%mnz{r&e~QSF`5DNH{Irk)>2xQTfuU2B=~%AI zH!i>LWKHR!P9!T~?GMmN_ta=iusoa0-$d996J8h81f|_-x9yE!;2g)5LNZbTy#|gF zUNGO!*?cy6I|fm;(b2py*sR z61e^ye~o;JP(&H~PU*up@O#82>39~s~X0k8972-7m;*a;4UGBydiCf1%ca-bLK;$eKO6n&cSca zXR%0waXaoU7epY0R%|`D7E=af|`ek+L3iIp4c8>+`-TEn?rfJv`QsdO(5m&BS;&9{Azej_^S2 z|Kiv|4bd53Y1~&~9XsDHYQHy0lxTrlHcNp~kp~Uh1=~Letv=r*E`V&}lVnWuIcv$x zIR(~+xsECH>+8bX9XvHYhounU)@J1u3UsOYbE@}cwVXx{BfQ}9hbFYiXxeAt0tTE% z*b#VLM2b7nsTtx^qP-&+S@067-}+ni=l>X#GU3|?zOT|1hh$ez^yH%)-)9(HI=E5j zS|o+b1K^#6>?u;a5aKLI*{pXm9|%jFh7S+;@yS~;>_ct4HP+D@yg*!zE!28%!CV8% z&6f$B^k)ocCtlzUecu zTH*0^BNhY`Nqk$HbIP@I zAgeo!rQ7s+-6|sNIX%v~ zZ-;I{ELqrS2NAYeHBwkl32j zuo_Js4MRX84@hQ+z#k?K`rYCR^^Jl{l59W!#lzc`G9u0s-sRxvJimC9F=ZW{QHFO_ z0@NF1^(764&sXb^>yrgUV+m%kBTJ|YZ;wJfGJ~IUs2WqB(|o}|-DIlxL{-%WbI81% zz6jQ&A!G_m7$)FzKPlV@^32#p6IZ4ozN(LdFuja}w@oOHPnur1th`S<(k1{(yQ%7q z^6oz?>SkbEQw9LOuFw49{ma+BlwO;cPNKC6_oBBzB$9G>4E+5=S-#!bld$*tqHnu{mJ}+Pb@*wa_>P<_$ZVIvzO^Q7qa=cG}cothX9rct& zD2?%%gIJYP{vDkf82JSlCh$*0xG|(m@_=X_NFI$O^P@m=OZ*^V(b5vEG_zQ-&q)5$bfgd-GRWW4hiDEQo>H98uUNZU-evr^?7( zJkeMjst2&AGH3ansZAH}$WCRSTJade{#GM+*NlKZ$o4qQ4g1|XI%XzYdP{Di#tLIS zs`oUo3NXNuxYfWiQGE6^1}!ttmlX*Zgo8i%#(X(>^>zZ8lW)zW9(#qNZ0uQix?GKq zgOFp6zA#=wr2UW^4ofJ3?JB}EIV2bzoEo6H%L#EjMQQQ&IMn~qev!<4T%E=866R<^ zv=Hp#r$vPqgw5di2^KmmT9$LJAHpTVdX!`+t-3fZ>?Fe*RuK?mmK7NQ;#sWuVRIt% z#yZ=p)Zz|^L%fOu-Vd_X!}EV1bPv_vHd~t~1k?`If%@Ik*qkiuaW#26W;rI&HkOo! zC@;RG=H9Y>EQdGM&p8e$h4&|a6Ew20snUL4@5eP_oZv8c1s^6=Bjlxky#CH`bVk`iZ{4DrV%1PlpYpY- zfdqgl?Z$T|I25XG(&e(Hn4rQK2Xo!5I-)Gim~qfbPRrQQ)=}tGrNRR@b$k0*4Lm8w z@u6n=I?;hCZlN(<{#6k)x`$gMqdb8uS642uZ!G!@X~Xi`vUgw&geUL7Qb{d3&*{|c z?G*}ps)sQQ@5{zLA2O}ZR$+&F;il-HXj4~yO8KK@5)j@@adkN_ELU)qeJpOi^P!j0 zm@7o+Iqo0!0r2y8XTa+4+1Q{+xeZP;v$`sa!={1_vgKqB?NZfia^Oo6A^~M`$}A8? z5&58tsID@PeO6q?Fd|7yr#aglsQV2=5>j_)0O|S&QxH=a_G=-W>89``j;rtYxgrk8 zLe*8bI>+SUW5)L0Xol@~0YTV_u+`1*!pvC#44KcI0C;h~y=F1ImjN}Ea=D!RhR5LD zEzo{25XMY?o0FQD(ASFOIKVtp_@=i*&X|t0+Oen|8B&cfCp(&NF_QRZpADZr8%Oye zsWh1yJqmEdScm)F;tHs_=zT0!%E#w~G&pMHx+0h$0bcmK09OCR-`%vl2H6K@Zrbti zJ{FdO+lh@4p`R1Quz|%b#XIwr3PGLPN@>aF?-*E3ci;n>&q(spZV_ByuyfovG`>`6#xREzXRb{Cpu5xL zG?OVG3xm;@?#r8&<6DUFgsQ3rZLBi}3|JastIvvnLTQM?Xvj#uLynAb3N3L*Mi!O9 z3P+R7r34f-pK=CCC$KOWC4PYvndg19kyyhBQ|}#OiS(fnp5R2L19FS}n~LJMVVFpZ zB!iso_LSA%E-bVbmZL0u7nUbSV22=u$u_&%XYQhKsfi z6Ga!rFuSs*#sB;m^DY($90EC_BpHa~V_vwGQGr_vTie1D?B>LF?<1`ab*1nntG%!T zpx9WIjCrn0r)CN>@$HYmpxQ?QfD3Vo!Pj62*|{ZekV+=5&HrrnMkhkDO? zgjV`t5It3$oE!D~zCMF8sw&w!fYNq5?X@$I}_bfhIjIyIhkK%m=}~xF6ZU zV=}dT3M9TTzleIj{A)<|L)nGbu3hsspja-=#HPY_?()Q9OLiQwo^V1ZJJ}Gi*YNOW z6sMJZyWq`J&?`11{M<*#(A(W?l1;nwBk3-MtY}?GhX+u?WT$L&JwzSGwLd@JwEohu_nxCB>8`kK5IIGm(NltJTW zxra$f&zoVmp=Sa)tI!y~_O(cym^@M~+75RX(?AD!-FMAlqbGVuyxTtBcVDz>XD56_ zlYdcmV$qNVo4S8J>d!K)`L3Ggxy$gThETA|3CzBy|M%zWHJ_%K$^l>%0aq_1^;OEs zI`!DJ$b@ot0L)&1Gi_GEe7W@MIGFce_9p0(0)4*C@TwRlgSW`%L%Z^1FK{tn)YJ6? znb%Li%#u@3_#|R`n2$BOXC;CIT6UdN z5L*fXBO?W|8DL^VPVmmw@2sv05zu!nBv=(e3nKF=0D-O}t5wCtT4a00$7JCxDh`fL z8Dwl7=CxMfuaQGfr5|q`7Kl{hOKAE7E&Cv<&bjCbQJPIR#a37KvQ?8!mHnThzj$jv z96Plb6#FZ)O-2!d=2@1c*K!Tnt}4A|L9(ETLIvDyYVDbrr|6lS2VaHDpI*M||E7w2 zTqca@js}Y`JgX)FF`QDovD>J{%}U1taoyzFb2hC2GhRh!360_y04-U3c7Hn2PIo@U zO&=96`AN)pVYryQkCDia-@})LkVsR-n`W<8A4AJZ1fyyCcEN|1>-?D*9~&JFs=$at zB1M83%ebexhMm~cun_e*0pD!k@!k_iD_Ihryww&F5MEFCECvXnz)_V20K!t`$vS3C zU$ly%y#8+x*$OkX$&}tXH2hX6U;P2NCS}-Xo|d#?9~gC=_>tg!jgIVx<9Oyg7~$sX$w-n#sp{5b(uZWm(Bqv7}Bk zUiI|C<1-rZ!zZ1V^t^u-2@$>KtQ%I6sqw1-!$nef5Pl1rp4KQPJaZc1pw3-HSn7{C zfg5Nh6huQGA&dHQ-*^}oCT(vvs`Sw_*(}2>8((|=k9-bbO-W4m`FX$x6=OUWz~)0>yfayXvT*fylSjzVvHgfX3TdH6 z3DMngGz{H!Wqn)3IfI?aS2jn%yeqO>PJ)drgH|Q`Z;5*_rU?msCd012MKDK>31z%y z@EKt*%r(BAS3kjV6Y7IZS(R$5yJuSGUg(+;?MNjh1yM>fxzG9bIQOl6dVt~ z^SM~$T6Bi666xJ6lPntinz%Pv3?MZ+hrWT*EHb`QN=FR|k%#-up4!;vxbKQ|#F-sT z9U>r1!|AQ)hy0N`R(9;U7MwO1Lf-RD0NjILYV+awq3gYD3!}tJ%oJrZI205v?te>w zl^ceB4e_p{&P1f#WSwy;T3}VQhm=@^g;V>+Qi_FGi0E++4~jb$3%EhAMztws@X|G( z*lGkKPu`sT!e>6;)zWB$NEZqFsEAH3BJ5PIS?v}m>?lTqkys;)DZr$VTME{j@VRAU zOx=0DG15iD1e`7m|ITe6{`$IXWSs6kF5*zLe$@@bKHvV~m46Yc0qT{RuPEQVcr=O3Hfi+2pO@CkLPZql|8C@|NrXB59^zy*^Do-9}W7yPbNpJ09yWFyN=DXe~x6;FUz(uYJ?fxZ~Y9#2F@`c;9IqG3f$YKzDg zpI1shi3(~k!CaeZBPEMFF}6U&^M7CbUB2J&gDBIYZX|KPGK*GYQNg|?lIJZssC@uT z`wxHl-+kTY3oRP-hln7-0`J|Uwuar?MEx;RlQg$F)~K^YMdfMe{S4VIKpEwoW4mB< zuaV25Czs@A?PIgihgeI3HdbCWp1Yy95@ul$-h&eFPJ8Y3|Zc<>=tvj(bd%Sm%i;yA!&!s%y@85 zj+6}F_|nr)3-2Jv`nR%GF?@UVT_-XeC}DZV{>nNPC_cHAPG(nT9U3h z=!O`MO_%Z+Jy7#5ivkR4C{;}5##3_ZEtyY0iAcU}d1Gc8-|QXUDdekU@{f{|v&v6V zbbY#)e|`Cybd%&`I}7j2enBB5_-+(oxLb3=*9*jrYKI(YmF#hr@5Ogte@S*X<_d8v zGMQVITEWrvcash7d{=`{Bx1uafmkSGnzv% zRU`3;9RA5$1xgEuqL_?g70exB(ulVJMT-)btJ%IfY7Dd8Uxau|v9|oc`dAeCf$xxf zs2Ydig6REEa2#>W1t0;$j6F)5gi8t){wK*p{7G{6P9|^4mc|cC6uJE2s6o&P#q)6T zmh8kWGf>OBd)X*16q;r5} zhnWf77j{D!ZYoojP3OzhuoC`?%R`6FIYfk(=l7bPrYQF(A4$L8!nhIp^RXNV!$r}V zqS4#jrRiI6BcG;8S_7*i;k$#P~@N^mF9?}ip#s%(%+Mo1{5jIJA!F4v-w+xC|=5YtT(@p;B>05S@f^} zNPa8t12$~=32HHKB?KvhtU}MdS(gNfS6K z3bg3zyAiv*xh0*m6m|qjsS%7oP+bG7^|4yP&s)1n-C1!$>*(&8(?PpSmHFFzl1K_)T7j|p!t zX7!L?cWaT~h+*cMMC_2)XS?C3#*nMB={HxtDZMTt+sbDrMR>s082fL5C}TN>&O*9K z7Sa;l)DW8)AsiB&P0pjGK3@Dzb^{kLV>Z(J?KPtjV(`@e^sY?g@_(Cy%x})|FyuDu zh%yf;WsFP{MpTY*?1^3#xIZn6(U!w$!M5yN89Ymek|f)U2<*aIfKq5c{>6H4ktXG1 zGG3GcPqswna&SB6HPCKlLk5$QI4YLK!EW!df}5y`1WAl#2^ zv&joELu#A|1+WW8Y$l1459D2g&F-p;$?0q!40+Guj5DGQ>(R4PK|Tn6lTkWU!H=I- za@9BYmKlP=S2>64w~esiOO01iI5%uI74nf8Ebz5c?avlu*Abe{)uA2Jjoq@|^mEr< z_77jwVEH*}obBnFljf*$2cy@sxW%?%M&4?3RG_D&&|Y744yc|P0KW!(_2EI48oW{~ z?Bp{<0X-4CrK%kciX`Qr{e-at&1euDY}v4P;{|<-Q1>tubn;$UrBgICpF_X< zU3qfxFS&F&g)V$DLrhn`hJ0W3^Z*j)8TRv~mb|vsXtWhWyyD){A6^+%4^O!*bvx~i zxff)6fk{|Iq~5E023jV&25?y{x!E9c?cr0-zQI&(dIG3v^_5_(=mRF`NE<2K7E88& z$c$GOgI-M_Vyw)_WM_*iN~R2D-HQgm5tRK(T!h`O@|P(Bf4D7?&t%%e5xFfgn$6&j zGZ|VId0yI^I#HCX;$SK;7+_Zn(N~ygrqFM8z#)uDUVlNBtA)!!ey(!FWLS6fS(UVBA-68ZdpRJaud^+tDOzy-DP)Q0X29FPBq!$_?8eS}17|iD1bU zqWGK4pK?-tD3gB)CQ2J~n|EY;AiQVn|JL@F^o>%tD?fqp(FL0c;8WY7g;-?;p&Bg} zXsABaQXp?4TZYl4W5?cJ_?8UGxPE8=(5_UvFMfXcny@H1eX2gMCms2-ueYY*saX!1 zQnJ$k*<=M)vd0}vz#luKfAXQs9JUbrlF7EtRQoY64R9Id{kf59YRL&7H0qY&gnyg? z8K3>Op945pE_0dHDrpC_7Copb##Naf46F(pR!bE(tHVnw=6}lPRv;0&2tCCE0)ASdE}Hglv3fGL-5h>kb}P7`>}F_%kkIc0kAC>IC@;(@R$b_@h%USM zCpcr&y{1lNgujuDnvDzW-sLK)>+lOu6sHbXtRA9!1`;le7U2@c9RF1AX~7kPVX%CeLTeEl-hpd1%S^G6FmJ(U@%=>Jc3V4}9W=e7V;v za$KjD4&RgJnF{-%1w+IV!lLxzY;F3`>5YpTzTOF61(K$v!rEK|b*LrHx6ATOZ$ecrKKgnxV263ou68BAgrl z-e_yFtWMg=Cpq2YIH&GUKZ>KmQbi^aE##7yvrQAA7d*ICbs7y{!Ixlw_p-3HndO5$ zK(!BwYE)!G3=l|Sbnl(2T`JYX)ip4KdSQ`R!NKo@V?5on*kP54G(xHtE@UfGBX6%3 zR|kzFS3YcC@8XrHOndTkNv1h%G&bziyR|9bgJIoKGDbe7n? zvH(h(3pqElrcR{O#!NUe!bQuQ5mIn+4QN@Gg4Cx}LfywOh$!@X`=Q9wA7W2PGgAiP zia46xHS87`wrkZvqgN%G zUIB3+ir?X>&#@CZo$GMvV)M)15Bb#Eh`Xb-5E(lmr_RT3l4S@C ziv(|lxB^jIodTWn(f-*FvzN2{dJs%3(e zH*^OL_#!hn4Cj^RyII@ZeIV0ZW~&(}qa3mYtwTRwfc2SEaYtQ|)(rr7gy{Kn z=<8q{XNzC!%8Cg0s)AmW3zlvXp^mgZ3`_!VZh^UdBvo9{);_vW{I-O#7-AA0Bi{jx z0n;kz#J*v+7G|~=WDSSi6TRMMQTGbecLdksRR8nkYu;4S>wVvEe`|BWAJA!Wa}gpK zQZmo3eaHFklP~$_?P|YYi0n8O%^2cM&S9&G`H>psdyRtMLPlVViikrb9=5Ufaq;Qp zPyD-T5GV8UZ%kN}CA`(>4T;+h)1lH}hEpY-rgA$zqq!yT?ecFgJ{CG#NUL0lZ6pjh zFFpw$QEMC_{yXi+X@^{!fYiodu_6%ID66|luwo9t{L?$3G#SGB?E(ib*7dO{&H_gb_8Tk>^d(Jn>6vRWZZveHxT;to6u{82 zDD4r_;z%>$u&8Q?*`DE9WcLfLSXCq66a4HnrK5b*7@U`rZJ9`1;mxQQXaphjO}CV6 z@gp=D+2RH<1Ixt9ARW$O&(>LOleX%Vp5K%g4O<~Rgp72ZoQahC=zGy(w=O>Pq1slE zp`h}IXTPl?16k23Z?)^fa=mj#A%&?0*t?O=bD^C}F~mdIITuizTO#w<(`BzP8wYgs0`IG+4MxB)}~VabVt#XXce`a0+y z#2M^pSm`(={lu%u1pAS{di}{~q(5{|RYH~!V?xpdcSUx~E-`J4)yGor^#T@Rw+iE_ z`F_n^;;*$OncMO|h)MiEGcvV$icJ(ZCzK41P&bWyvk-83x`Po`NM+LBIFjSW7}R|C znS`WpO9>Uj2Z$f~4Q~{)2&p-0u+J_)z1`Q$LhN0q*wa<+HgD1Vm$KUy3Q&2}jUW2M z6oD8_cHBoCchV_X+n2H>EM~J@X{%Mw)&60&s?EJ|Oo!<11R9JUp^I#G*%= zx=KuI8zkgl+Frb1Lz>B*q$oDGmQ?9EfQB3Rgo<%4(w%=oy8O@Uh4 z33C*u6(7UptG?zpul{oy^XZfKy|3TDd(%$~e>a=uAm56_cF85fDaM%im`#nIbL|e? zv?B6f{6pd)VaKNZ>QGH$sjh>vaiUXYFa{2ijgVlwM$o1$Z$x7p7T{|a|2>Hm3X|Ax z^b(;yoRA&(@PPV3=0VMHsxexM6IEd;#r}vJONKYccFAts$@@7|8;w)Q)+cLXFG)_a z0Gr{upr6x9WSfg$rqv_(P;H&DAqyBW`cuqjKpkOfWAI1xh#0*-Q< z+nBt492m6o8G*(=CNgd$`Nfcor1g4eGKY@|NL6`8fC9La-V9mgRQB@Dc8MwSi9s5~ z6t!mJ$F4>y1LI&spq92%@VIh!vQLEjCjiHcj0rNRcTDw}kS1-b7+dinl3)#noo^(D zwSs>Bz$Z5|ciUGM$x`@92?2?x5C4VC4R4CVM zJd0{RWFG-+qf&JjJ=O3I3d7|QF`c?;7c8jQ=SpA8m1@3g1Xl#-<>}?$`!GKL$55jA z`!(gP23TB_Za0-`kYe||BbE-=n=(*BX-!}gxzrp1BQp-l-;4$G$HIKZ0q3$iK>^_KVlr9TnHg|)CKPlbTs}O>?gKjvP>T}iU zJ7;C6jb$nZZ<$K0LK7Bq$7Effo^iA?-sc#xDOdrs==KldP;YJAQN;(%r2r+h3p)p- z;D=LlDF_(s=A0}`)AwJLgJwF~Jo^0SP8CL< z(d-f6zb{*b1^LWU24zRj>|yD0BJwS$MDTUFtjUic=+9&vA35StkgIo?-((Z_78-<{?N^CTAFj*?MZ{E8t3c}M&c9x5E05n7EHnHAZ2$DvU%u`;&040G|jeMPU_0Pty;5Ux2_-*h0t|%jf}yT~e6A7UoyB zec*&X2X5=ywzHX38l(`|(JA73v|TtR?Z_uQ&l(}}g+M=U$Jyz3!>|s3vus6}G_^^5 z*WKA#@Tsu1TQT&40!3aa98J}eR z&9)OZWt?hG3jP9u%}8+Q?- zxU$$-MFfMT)#V6-D>tD+IA5h`_=@7ajvJ30evh6s=D_Xx&gBo_8?OrwnEJor^ePf? z7t{ec2l8o=w8Y*|wTrw*1F{ox?LZx;y03Fxjc0(bL`)2~KrRWzGYk<#8~Lt)PCW}#kRg*AS+Z{ndoFVN`=sf6 z1Bo(N-hELz`;)i)-?(j6-=KuG2LB-jSYnI%a_6WFZye4Wz>lcG2ptNp#PKo`94Yg| z(KXVHN0XzdU7|vY!F>t|=f`2*a|22dGkEQZmwi>KmLnuW$SgB6XNc|Z2EfDAj%>zU z*`DpQFCw>vqEM#e%YGvpqNw3a#Y3lXvVamV=0lmiv*ddS!72x?+S-z-B|;E?dF&>} zlUOoz ziicgC;`)cf(d50td)@u6EC8JFC?}~~I$2#h(q{-ik&$gp6Pl!j`G%yV*c-{#){Qjc z^*c`NwfRT@CVf9PCJU$$V+5F(Um%?N4{t>HC^aC|e?j7cg(&(a-0EUbRGDp=tVVwb z<+tjP(fPGjE5BQk0J`brff2&TJsN~gK$Xm_O<3h2-;}uOV6lpMPM#BX1Yc~+6B3H0 zA{8O2vMb-0fi6!J>InX2=ms&GY8akC&<-AlQYy?6E0`+^ z-D>uOh43ws01|l!BwxHb_KwYD5lq+K9D;lz^t-no3yrhl^^6G4y8wP}$U1Cx8#ua1 znPk*hRW&snQNu!6I*Uavxpz0k^KgREftA^m_U_gLvlJ-}x47NqgE`!vzSAB)d`g}y zQA|?CN^mY>w08{CWEk=`CS$Dyy}5rD;t0c<-*D>jC9fCN&CqEIwn$6eS!Cxtg*1e6e z2Xa&N6h?;xWk34nAn<>$tS@eOTezpViF{d1zz+!uX(o&2jK@L}&t*tt3PuZ^^y}Fsk-m&BJ#Y=5LFJ_2hiJx&xlV#9eOcWnVvcgy5DuGqP~!Z z>{|jmM3{?fK;h!=q`~;VH;^IxYkHYH(o;pGexc^gVLEX#4ZHht)G<5ZFDMO2gj8|= z`wFIrE5tF`Yc1qe=RyWu@OWC#b`>`;+XHi;_dOc){`c&hz4$w(9lwR zwR4Y+W0`fY-WA4r@!`}|B8yHnR>KA1;6hYoTs^G*vf#(7g5@%nbLrq2d z29msa@vjML`<=_bH|g0m+XQcXm3PnI1Kbh|L5R;5C|c(Hu83eZKFW1y&Wa4)HrJM( zi`k|qRc6rTDdSt(+F@gb}F>&w3tNygHNSV&WxRLDO97=cyu zZN!%rCavP%%uW&Qf{Ag{ubxrnj z2^D%eYIhKF>L>fLSSZYp#N12Q6c+P+!HD6l!Cd*-8Ah@U;*km|#4boc&>mD4LK)8G z@c+0vng&w)__mS$&+7v@bY31qkAuN{XNCES!MMeOLCX3`yW3g+rv507Wm%WI7%j6_ zO%rT@obJHy?Gh@>Dv)3o_j7OQv}(9R-DK}L!O5a5wh_hOMN9|6LxE6`22}#Xz027b zdQ79iE`Kb8$CKKt&xJr>PTJO$S75k+LCGJu*9c)(MvZMFWz);rV`D9X5FVL$@4U7d zL>(GR+bp9)`38&7wc*$*BXQ4+^0`cu4I#s^*n!4XcQalS*R z5IFuOp|jl)IYrs~ffpeH9PdAT!e3+(=C~^0Ls}ToJ5eti6To90Y`!tZBh+WLm1ksW z3nTvIy^vV|Paj7V0Lc4r8oyPpe@XTr*MF*WO`?s{j2w;`7soq?T3Ib5^~-y(*ASsJv!Kd_2t zG7jv;-XcInapaiX9R*d8%J zK?bip)m*Op$;p!v`g|Ct0gaP(>Eqt6W29mAv|{BiY){jWHRThygYtP(LVqTR=ALhR(cO$V?}- z3hoU-YC*R(t1p`BSNmM-WOe(4SZMB^&}VTH5~C(>l08r>fQ=bru&lmk5rV#o7_0rC zE8E|aE84a^E_sNel)X)%%>9t2Q{IH{Kx$bDz#9W4KF^==3$Pcb4aUG2bmC0E_HE94 zYnZ2fM7Nqa=Jo_0BP?fH$=ur{VIx>I@7__X8}mRg^sNVyNmjH#SyXeX3xLUSh=S{3Ca@QW*K zc&27$_?!iye;^y$l4d#80UX9yp7^$qdUK<-Eh{vaO*s~`l99NsUwcO6kSQ)DVz%@I z^dE=502>0Y3LCtt>c^;=(j8$k{%iA$>q>7P6RK3kHPmh2*cM-6_BQDrPavkBA~t|9 z$4!|kp#!0R4bFX~AjnSev#O(6Ept>s%N!d#gB}iuu*psSBdm4PL`FpgnTEt9XJPOX zdU`tY@A$DY98@_rr{8?>^NvbrN#Vh%S0K};{jlwL}?(M~=3 z9#L6PkH_xVD!|CevI1|Xx*UXW=3*N_48u(Qh(7tGCY-jA$7HM6O=e**VGO{;1TKsW z-vUShtd7IU@anL5eV07Q2sPWdqkDC=z!&_p*y4q$iN@}BmANJ?a=G+L<1jyw?325M z9LNhvQqD1&tJz_RQ4Q;(e2J?jBr`fDbK7?}ziy59gtd|u2F6vg8cG9;;FUX7@l4Bl z;QSzDP+qnC5LwVI3jp#@zw@2zG67OdNF`OaIZq<-UJM-9g@Iwp7cJ2I<)Su0wurbe z5GNv1WHgUO6WtCkO+ERAyf&zZ5!(w%X01}M^m~GGQ%2u|kr-fKd_Y($iiA(|>?r2I zvn3+FM1s0r-P8L=1Ey z#1Fr3E-QH!NhC>*RPm~XvKm+EoJj%BE~^f@h#cNc3zxwB@c*&(Zq1Qh$DQx_0`qv{ zJiA4?yCt&uE{fC?fNB8U02)G}yD5>Bxa$I-fI@AkEZCA_TKB7D_36rnJK%7&W80iq*B-c zUOO3)owtIqpB2b8%t=*7t4HKOCP#~# zVXWV`gqXs+>b7}S7_M*JynhnKn~O1SBmO%32TiS#>jEJEXf%V#QXx(<6Kp!Na43K# zB0G7LKF&#!3=jGuJdpx%0hUP?KQ&CE{K{e=(;sO}R0?%7q@j}#;~(|{SF*a;6l#Js zMnAkq)f8zNm3H=?yo}r$`dUJssSTiE7zzIvJNL0&A;RG(F^A0LGX~3+7vezg(1QR> zCMRXy1^~npvsrr+U@^l3VR5lX+?&$cB`{sx zrc~C#o2K_1miPCB*i#NmGkgj~wk+%;dkyMbnbbBAY`gC#GX}$cs$6OdjW)*385G4} z6WMND=wfD@_ylXyd%lE;#D%oT`!JDHxRt{u{jZ4;T5k4a<%o?Y_)u0e6$lRz?%~60 z@(kcN%PW7NLEirj-CV#67D5`I+YRnS1-gptoic2HeH50}!zAMx~h-khv#`yOmUUO|x2#g$=NUo943-@NNYw=h` zjxI%NRC1h09rgOELf;}epb)v0t(pCklfo)V5ek6v6W3mp;hokg;zIx4@g>H^ACWT+OBb1XLc(IE#Rr824`L;9CtQq%OzW85!))#mc5yfg^ zo8+qJ9bXG7l#uok#ZKhs3RoTa35@bJ`FS||Bo53rur^r$&iN<<=^+` z(!DG-yNj1xwvQrC7G_YunNuAaHpQn;@OUZdn>9%FlY`ku7$V~0#2y$1D8vm7GbL9f z0-zzn^*~pBt@CMi5}yHE1Hk)8ev;=QlwO`vW>4rV)SCTEgs1G!-VDgYMm`jzASBmn*ihLq^jJT{^uVeT3Ia+| zDfDm}gP6Smn$>T7-bbRDr0lDcNI8JQ2boy3gd9#NiAx8vmV(kyo_c#$;Lr~ z#UfX@C9XmLlr@0mJa#%MgLbGy*%*+SCK#sW0P)3U^rVJR;hGym9$L0{r#wIn5opbK zSH2)q8={g~#asPT4>^u#3m>VOy1An(b{-IgWV*O1tie#jZ83iDb3VJwt_>sr0WR;43nD?iHln%4hx*Gp5+O3-Ud(Y9JFiAiVqT(lkYDhTIr383ji_3Cy%V*|9tQu0+3nl<))hQ z203hui2Ix<^9-Ap_rtpB&4jr*?&brKbUrT}MFkw4asKGCEe4XidE_K_aL4Uhlzf9+TXKP+^4= zf|GoDTNa<;!&@@*dRT(!xHsJK<+4B^p%HF=jm+zZ?T4?%pTFjtv|m7s&n<;5No?hE zG4DtoO=90Zc4p9Gc@>ox;g76!#l-%81!VwA^*@8A^B3{2IeofGX&WASrv+CRVKKq+O&Rc2A!nOr5}KDAGLC!xo-fZlkFzhv zosFjN4fc>+f1F&|{>rF8zgys!(-3j#s)-yDQmvGX_Cxiu!q-D&igKwv(u}I+2$x{6 z97hA8=|*5#U_pEeb8*0*8)xI_0?~7pJW)-Wz3`dr7dA(mj0m%8TB>f(~*8 zo4)sqti=e{Tx;Ta`ju?gw=qDSNS4b*6C87>gSNIzUV)3#7 zvfjr|f+F)!@Tj6mHtLUu^ zYi=p{RkJ=wdS=TFTTUr#M0)_Wj?)8Ypg?Me`@*nTv~^?ZXqG;RV|% ztFRDE3rPjndbYjw53`$w@9*^sz!2n-mehN@i9X?M#iVF9M?HJ(`cydw8MefuLhFmz zFv4!c@B3BI>Fl2iJn}t-Ii$oC?}|`M;0O@vVr0;k`m%3V;SeOct*F63b{Qwg{_Hnc z1e{QxeOb1g!WtBKa)L16j?Qe>vK>>VJs!UZj*FX+nD9OW?G42ZJHzK zSzRDpLc@!o6bUfPz8l`Ca}pyWioI1fMVZMwR=p^DwqWK!Un<&xxhO$*(O-OROd%`c z))s-8$)})$rhaiO^+T6I6&-~x0}`6s(OaaydyO?8YzyCl(8VfeB{EnSMy_qm7{}fo zi(~I5W(9SWRO8t@+Nc$#m!&k6MXFQpgP0NmC&Sz5DK?*K(&Xxq{a@+zf%#>iaxY`S zR|zP$tIrs+3;@SQQ3V|@8W5E*#8qRJyQl^cJ_2LzzPcw_#Xzu(^pnLQ-9EizLqk0nMcL^^|l2#^3X0mmta15f(O zpsrkb(mP>XjWaBYa&M*b_Bj%{6)rncQu+2IRLCD>auhqF76S{LltmjN8lB3HU;L%V`S}u&g`VE!jAG;t_4?u$7q9t?3}k8cwqFD* zYBwk5tvd8xyaZ%gR&N(JfWRH{PIFyuDudaX!H84feVH4sQvPnLNse_+Vk}aRzs}G$ zdf{0%h08>r`8RTs@pM#O0E9u@mQ>a}F}@F#xqtiw9p?mVhu}wjCiJyPNfg~@hR+2O zt#As9*nbi=glazNMM22fn{gT1l_CUq54=g1!nha-eG20fL>Gq~8Am=t_$Td>oDhRf zZa24_Z{d#$-Fdp(j*FY|6%lW@3(u4hA2rTozp$0zfWp_3ldCL+{A$@vb8#zD2?d{E z;nr9sB{c(p`*QF14t*Q&?q!7LdKFqEEv)w^B$F+~brchc?_A}z=baGjP7p4TAu5m_8;pw6hOA;dsXk;)xR32yJZV%8H200_v88B5Ho;SG~n zDGS(Xg+fJuts3`~{X`KMqism%Ph7itLr%M)6GEt=wI*D`-@cPzP&D~|QyX!dK_s~%EnSgWo-6J-HVqx75^!o-L2=YQ^8sWDaK<_(D2CIZa2x?sUt5yztK2UBS{O0 z%U15b2ROmMvwW8lm~vQ9izO#TF6zsvwy;2a4YDB!E$pzb(>*k^EKai{RF^-{8>)S5 zS7vrB5i%9%QmlF2)f`rLuiraya{LCOu5{ako75^mQPKlH$V4e~5mQ9CFc{0l-}?q4 zE01EgS8da*VpYsy_7+xbH7prA!Z?PXOd4zMo*t>?q%%>E|2<^beqf(?eI?A;*upps z-V*y{r5itVYNA4>?~Sb<&I?~!es1RcSytdeo(!wdBgrPE_u(cP#SpipQ6PMghRZ6) zdoy#1*Vp^v`xif#^-eh8Q2T+BTQjHIBk$;!g@P_4Y_lzt_YdM$TU^}mJqF!=s$>s`jy(YKDT+Aau&F`*Aq~Fx zUwj$JB@{v&s_-YM^gofguGg9tX3=urQWZ{j=5{{>-yEB5JAVGD%xZ4vge-mK%A$i? zy-4_G+Pg2>Fhm5#ya@rz6kd8phIzB_sa(mh?8D>GpJ-wS5w!5#%yh;=o*$cS~LJ&Al53y!yv`UbZQ+ZmQCU&g*x%K z5#i@+hcM<#yCHrU={CVSIfe2k+gh`Gc%Mm3^=25Bc`}eJry*IAoISsb9a>!VM5-YW zxc|qOrlvd?0+?(|Yw5O7{R(BM723zub(u-oTVNRH|5$q1xxM2IEp0p1&fdxiM)>v)-U>goJ-%RVmWSf;Pi(?qKah!p5H5qEf|37;_PW5WFYlXHf&LKg zl8c|_>MLcpNMkQDu)Rh1XT%yf&Sz5ULQ9_>xFUS?~~ zN2I|y{vh_M4k&&pOPVO3KmHUh3}=2HsHYl} zSiNJ2LNQ?&HI3W)S>d>*05@M2i>i&4yWI~W=%3|80+vENJTXaND$Xd^qLR{-6YoTJ zP^84+R3J$hKuIRtA0i3$mBVn2STvZroOgQdkzSCK!?3tnis0BM&K?dZpQ{CtKNgd< zQ5_3!GRHV}zObhy4OYg+Nw6ph`JzsFzlz+DL}e)cbiM_cr+f>U@n`3s`ik_s0eC>| zAE;e`>&#-<;zEOp79c}!ho8;t)lD2GMWo2+l5H7NQ9j8zg&R6haX0Of*TRXCTf2w% zRXHZXYDom}L%OLceldYSXu41dTNm4qisP!#LZ0oQ2{2_oJ_414yD&ZQ5i;4sI+RBu zgnUnV&FCYVAa8)4+f<3Z3baFAc_*LW@NjZ>U&fb;XNIoruE3T}I@B93%ghRfbnDp$|dZ?{3HZqOB z&2wZe-%Wmjql%M^fhv?B9udg~Diqc!uLGY8TP$Vg3frY6`tnui?<*f8A}4^g#8F^4 z_9l_I?Lu+ddFff_Hf=4&xo2i&xdibbb8-5dh&m@Xov&4cJR07JB|4AUk?$wGJ46d; zgy*Gsc62H>KI(@cDe#2L-A9{rw2Yt!Ck+g|NRVTWG0v9S=nn~w_W5mn!~eCm>BDEG zCI1HL=>TK1=)T`PUIt(iCT4!4g&-r^z~n@mm)?X|Ihux@FoblBpKnS2r)fJ$ubmM( zH~zvGju&6N;q2{6P+ca&vRYaTa!cotH( zp)TOeE-CXKj_FHZaj1tU^q%PwNgRwdjwL?>D97HZ<)u{DE?1)sG!P*jH30R%V zmh}A~mkQd;A`cfB#3u`fD=&wxSG#p!7EH1p0p5)1F`t_N)8$B64eU`Qc}IZmsHDpE zhJy{`ho)?}-)mdSNufcEhByZDRPc`kUmSk^3twmi4TVNpiqOL2cK#`euhovwt&U}y zAWw3}d3<@;W#xitK47lH@QO}$gcRZADg!mF;0bk(Tw4<&>Wj?v zsZ1{fBTe0AMV%CSozOOKHtw!%%WQzw6fk~1)4SsQ9}Hrcw>cdITwc=AQHgMF_1ex_ zj8C4w=PalB4cTu}|1&w+J95}@Q5B~tU7rAO3TCv~!{6@n+Q~OGR{w}gJ?l*$SL_hG zwKpQHxJt&M2F%_je~pahzE#Xw%=QZiw)6RGPC!WdL07FhORP|hG6+6eveHGq9M(v# zBTFZp*sDF+MZ%jIX5C((`{? zj%i_d%m$9fWmKFQtA1oAPydJ-cd_k2dgHdG(uF4ZNwTgvNStzxd*M$o@V^Z4CVa|} zCqk^J*ahkn`9mvZSeB0xEVg9W^64N-#m$?fZ1iZk zQ4a}~#3nV?uva8+!b^gcTl~KtZ)EfYFPHA9M1{5}BPTUls{Q3xg(cAmtRCA(q=%hA zlulo|{#Zj-I7d!RPe0*AV4453d`T4oVNLp4=dSb@Ew*1T6?lG-m?X9>P|AV?eWh&;3fF}>eSKW@ zG}29C)z)uFr6EMoM2*erTgn~!Z}Pa20IyD>t<^gl!Ye3WImQv?X%)Wk*ndL2mzgOt zU6=-Z{(oV&Kbg)~6P=a}}qmT@Qc&eKYL#SUu`tZqErs$CJlFcj*$=8Gk7I4t|S`zQb{Q>j* zvF|N)D^dQuNdmjGu*b*_r&efDPO{10CN%ozymN1L)qh#}=ZvI5PW5(?swxfbTI&@E zVS^j1oDUEM7xg4qKpJCQ701DQ=p7E;jn!L(hlWCFNj4$>GQxiXY z7B;7hn6?OfChU~CJ#DLm2@)|_!RESe_pl!cnp&+5A*Dsq2ZTIuIzXO~P8W(yAD4c$ zu%eNMys%ghg%{h(h!fAviNmm0%&Drf@<$rq_U2^e~#$UsSku3h!if3ufrPs#Qr*uB1z zvIXTJrT%ayOd^|@dSrl@Jr$C-+ixr30iq+zakdY#u&L&UuMg|>Ge1i_+rie&!pg`j z1}JO(TzkX*G$7WiEP%Ik0A54{5xbnY6m6Si&F+|A^h9b@RoyV=wDv-%;RwXP&EDuV zHBTk)zZfsKu8fuBQ|+n#!Tfg_Lu5o1Cy*a^Cc0ACEzPbUo++{d_|$ZhXYa%nV`+?p zlOUyi!gBsZ7%A5ZzAqGfJz-1J1Ld7Wh7``Ab-ago=T-aeB5UQ)5JtX16qbrZY!X2) zvd&%0gsfmD;T%D$8i4NIi42A;iE2nu9X0y$JXIHMj3x?R2Y}=xuT)Y0% zbJCot%fV{;V4G!tlzPI;Aur?g%&E5eRY&&4lu$z>`5b0%ndQlUF>Z7;X^Gg1tclv? zk&U(-^`GX|PlTq4MvuBXvUBAB!jTa+s1;<6d~G%?@r{$XT}g6~%@LUju6twiyzV5- z*Ia{_hPLEB%x-38a}dOW^I9g#6^Zd$ZV@}&vI<~&eL&TtS4016Y-~H-)`QvmYB+)R zlx3OAp@>t4J6clTsINq*zj_d;x)p~E-=TL7s7E6mVeqj#b4+kC+bdU{Iea;4t~AU& zX19NVQV6&aWw1*42)efSnOmT`1eVI+H9t(<+<$sibz zSWaHF)DKXyp{e}=CWS(vhmvu>`3~_}!-9YOZGKn#&b$%Uf(R0{hb#YM`7%kI^1DTx zylxdLV*yw)%}LEqDXEucR9xV8^Y(@{fj=lFA|hEKOtbW(^!rW3PAOeK3~q6<(G@-~?v*_rcBnmjG4J662WB-`5EaR|8Ng#~U;0fBt^Edt@*STW#@Nu1VNwP|lxZLEAOHJb^FM!|L71O_EsyhqH`B+C zd{e~ZY*eVx4DY|_33<+YfxiTgyh+TmjK`9#NZ=7^^7}_) zwdCWFj|CAXf8FY)g4i82yNkaTF0fEj=zxbs8>&}u4`&Jckm)Bl- z(P@<64Ga8{9U{*kcdj{j{2k^zp4NRi3}8ojgQ(?fBITv6en8>*h5v>5$CmOJKv{-< zaPlL8q6`n+&$okDZ->amk0s5tJBR$o4ZK<9Fte{c^nGawh>ZqN{&JGQPB(7L10dfr zgz6+B@GVIC;=8_%n>T&T(;{Kr7WPkBBxI`4#y3K4LQ4&!z;*tUr6z;JUINv`3uf99 zBEk>Px)=xbHadR2S!;0IX;p!gS{zZyF`{>$Dw|3s#Fb2|H#m4%bxw7}f!xksi5a1Xs-S5tBeP*${UCVtH_fc=}8 z3EwTF!x}%L@)_gt5Hs{D{gX*!7zwWVivHD%j&KN@SHTEj1Fl(Y)!yk*ozvqz`rD}% zr=zTn@>v{va@NQuy0KEMI z+KH45&t7@jU!e(N8GV-3Bs*^bus@KF3@eR@kWtBe3Hi=bvS;i!*R#aW9rrYFYE0?8 zIyed6gKZGvvSU=7tDD3Z6SXOWNRfLifZ77qshE`A+hkJ+_D&C$Qorzva^%pjifAv) zemRS%Em*F8^H}bqna)|IKq_$-6E}i&@6g*&<;yDg3z6WSBF$RnV;o2 z=S3ADt{Zrh3PAHh3~B(zsu_N3!*&;@5-@56w)l2i+2kzi+zKcn)BgD42eKK$dYzpMHIE<{>sjy!W`i7z2SBmzy2+5?7 zfo=S$qh>3P1}8~kb)e6=7cgCbvRg>saX}=Cr~^n$qc~O+du6Q#6ptyIX+gx;bOmvJ zlsCZ5!Z>OUW?9?@aXW8?p^zRM%ASc8lFN)bZr!*8Hkw=VXo0ZE6M@3zAnVK!4H`EJ zz3r)|l;c`tl~dj9f5kP-cqR|xmnG-PfEs(Y7as1#wTAD#MVhKPI?4ZB%nC$m40;cs zX5nY^M0I)CZDf{mxgraiy5~wlHXXQ_V~Wu)Ro>nJT!8d(eoW-4<5Y z#d0z*^Ng=BZ(RA7Orm8eDdaa+teh~4mvKJ&skc~(Y<#2^&Td4MA`L6G5o43@=h-_7 ztv1hqVy~&zK=6()(IzN=Sg$WV+pH3x)j*bQ2aRAJ68VcM@1Rb+H6t@9S7vt)yxN)Y z0*3-93ADLBs6)ZV>DhYpskcg@8B5h72a(d3(@}6KWMLA;`P+Dv;Wfcn8B*(1zqK@! zM_MNgCbzRAL!>}%8jJ4&EsV#3ujSXq@?spn{}~*H2Nv8Wi_^tniN#3%a-Xcq*t}#W z-L#}WcG}pl3eEVBKlmFRn%OAjSObRrLk=t(R2Uhg?R$v#y%6DO``TB0pGVkBR1%IS`gbDfV+xeVW~v7Ma}t$K2M=Mf0xH)W+R)nLJ+J z43A0kFh$XM*efsidNxy4b>ClzBrK?Wgop;&$8`eB@did(-2`FiAS50>v;4n8y_U_1 zYgE<{aX2uT!`T}$?A8Q6C~uOfB`75sfRSG<=UErLQEf z+)fTht)>~Dj!h;fr=7it87)SOyU16LiJlF$D*POZtBN}8r;61Z_8y3?WJUCLk$3ae zXBL^_)RW`1k5f@5u}k96Xz!?}x#Uv$K7Td&sz|{S+e(puct%Y~PXD_>@Q*X%{=TA2 zFjpyXs2bRyEyuf@*aTGL;`YrTm6_aZls>0&2#80~FERlQ@c0IeF~bsLV4OCoxsH8!5QdtZrsvL41%yxDTcJ!5>88;1hy%ecS{K zgpC2CLC%b03J8c@|N5}wyMX}lKt+bSGd+^a-6+)kc)rx^%xEgsKC^kqC`pVE<3R~h znV=ccAIp=y^d#bqU_^M>K(G5Inr^LU513v~AHh(p{)y^oi}Pe4KEJhQqt)~?$Lo09 z`w?&Q)n|R&mNOX`fqz9nU_K`)kDQ5MzRK8V!of_Ft)2={Kx!34@39$vCJBW0KKXRO zsrF@s5m8RWM?fDL48)t3X3P7(cKwL4T9rd78U&6hqUVy}gR&9T@ISzlXxus?2&VPOWnVc2evK=*PpZ_&7 z;Oj+cche^fwB@sBF&=%-e~|8R7m-iY_7R#l z1`)=;!Y&aDC-!{Pujtj4Io zZ(_&{r3D9d6Bo4KtoRN_RC*o9-8K(tj5+cuJ~;2yZl>M9WZcWSp6(9K@EG%8GpZmA zAgXZ;uoxc(8)2X~`>`Jwev5SP1_VIC%7(Z@*rj3Ibla_Pu~02Z`?!zD4&wvfdpujl z7I|3IN{&L#{G8b^A~V*YdV42Cu$hvl$`*jLLyncNAm40Z(kv2pQ>i=hS{?Lb!N$F) z>0~Qe6r44&btgw%m>Ek%gtox~Wc%q9kE#ses6GC`&=H%kdfeu zD8M;H8j)9~x&*kj6Um2~@p)_7VHWX|F*MPOe57Pfio4!aC8rQxTgWvT_ABiv>6Kv| z;LR0Mg?NKj55$izc<5Hzg*XZ zs`+X|gB~eB?;X-UPY;OmiPXpxHf)cNgt@sTK#$ezC&8N!Q~ZEXbCK+|hwn)m?SVs<1%g{LER}-I_Hyw(-v@yAKb83riz5Nv z-L?Ptq}Uw=DS_#%V{~+Q{+BZSF7+rePGq1KFD#CtF@9MFJVnT#|E-}? zd`!GRb`_;`^N!P^3LKb{gHKHj9~rlQ=PXAo;38FdvW<<=dm+%nva|RSBM;Bs_h*ct zbN2Z!RM-0$7RZ_%SyKY!gvu+hR#S66*f!z?9A)g zr)8#_#6pvcPYy(tf)NFDNatU8!8eiBeb+F(LKoNl z`KF=7iHytQujWn){}jYQanIkk)Coe2n{{NoeBmjd4ESVO zE2}P;{2nrgTPR@Og^fAXZ}5l=AoEiIFUYU{syzuS0$~JU>S89wBR=*QQ*ssAQ}mjY zNJ;kekY&u;#l#^R*~Y;7b2MIstLPO+8CY^`IZ?PzY+AB;W1OYF)w55uD|0&8R{NA@Z_{yDZx{rfGXj+Wm;1MkjV;d-&qR07mw?9s9(s2bECeD99_?yd z+>j~VUuM*i_3RBE{Y=&qlM@0AD!E#sn*HI4I(Ko+2xo0>ZQa?D-<1KY<@at~4TAPq zXe^U4M}Ce!e_e(H>~!LKgIzvm^fb}j$k30+O_g!)S=vi!fGiiGUjydNh9Lu^I*kId z3{Gu&{rT^mdKge+O}GeRJ-(-MHPj4l_r5Pbd=RU;%zTN1rWvP6#2?+#(M}O@Hpmf` zyClbwX5eHwCUP#CWj}>2DAq~Ybfb!FV&v985E$<&evqE!6Zb*${YH)`n(lx8x-Sw| zeH9@Qtb=6$#0*moF*MQ^;p?)Lvv>TF37F@<_epUkF}~#7)@@uMG6PcB5Vh!kXyMXL z_yTY1?&eZsyD6K5Q4#y>KuuF877kG04l!L95?ei_CTjKorHelm2zLIxc4yg#V79tc zp3(ons;zFcnp@j`wl03i{ESwA>Yq~q%~z6`oN%I$WiMYSR*6D+5Gs&Q5*|x)zF_o6 z`jlhURSY>CjDzP7LS?{Vycc~_jzW)13S#m>^HmosCdGxMAG@~0IXZijS`Y|X!r|sg zopGSaTHNBE?J2@NDVp*Tl2VlQ#eFT@iqAh| zp(yz39KcM}xMXC8SRBkTLrvHXKD9Mm8*7W(o`0X|PlWD?IBx z`lT!!jgrlj#rr^x5xj1PBcW0Y!2-#c;;RS5*8e&CD1v?Ib)^WHI;uP2b0#NAkS*2+ zawTTJi{%Fo8m8DoX>&BKG_3U`JT>!$_uk8CFX?@8PgkHb`#^ZmwHyAUcS6E0*+H^1 z^fZ+gyRai`1kmR)$iO>AGV;3)!nx_cSA6EIvwExGzuQWD@Eal4X)3 zWoUc&4Kxo2HS{dpApjg`ZtzdyHG{}oG3LuP-9dV|AsKE`GFPvg@rt;Vx^F| zlQb{nf|zPq{?MjnLD;1%bO0)H-eNXzCres>^shcLK>6fkIDcOjA_a29msV5O*ALZ8 z(xxTkMb6wsv*52Rua?-+kWQ6N?^|V9>1q{eI*YFLLB^_g%$}oi_)Vk(`Eqfq$(`)8 zpzjmpbR|$IHqJmRk|>?*MOY#*5>w~XvidSvN?M;R{Fi#U{)-;QRXFtRXk_(-XMoha zjT?Xb?mx&mwxzcZuRM}b~p!c?e0Y!AcF6W^9$?pL8%Zm;>W zJm5bc3vt=IDsYlsy%0-o7isoVMFCv~ln$Bry?T%VZJ?SlVFZgH`;t;4!Q;WVt>jqN zeHQJ$4BH0tTWrdy0uuhdoK^Uc#ZQhx4Pc>>jpDHKIn}=gimhB?cR&mq!TEwfK_AGX zjSAs>ycfX?l=Ya(vA;|ufrKG{v}$!$Ql`kx+a!w`GOzD>!vaBV06wwHs1a#~B2Naz zBSVJ}(wNjGkZrWX>~2m#*LN>7nhONYgfyB-w?SO*`z?cpL!w{xAq+ z+?x~uKPD-Z3Bz`B9-wDm)Z!Z5*{^+qn6j}QD)K$}lT#wv%|u6(&~eH}Ho0+R>un%Y z%jfG2FpG1&<4HwVIOl*T_9IG>bmK_?%zg$`)9{nx2+^L$C>XL07Ae{lM?dOCJ_Zd= zlqF@AQ5^=WV2BIZzhm2%Q33qgXgWulhih4OLZ?gW+=3&QR=@R%KWZRJ6yctWPk{Sk zp+X0giCDORe*w6Yth7RC2o(ga6`W+;Bj&Ulth04`hyzJjFi4i18Z7C(@~{iDP!Zn@ z_=iV@K>>Gi+(i2YA4Ig2riiya3^l~!#C;Oxy(_$8K$vBbNT<89y}l;wXo5RsXtz$R zgYC#cP#%HK#cc||8*u6asqw9-bbxsy^Yp9NuWR`yQ6*`f>RPhQmY5$?P*^p2VTV$R zPJqchY>7rAnMTqkCZjWdgAdkcZ(9_cuL6G&3gp-9hY<^gCnoB_ndt#dmUxP6A>r*l ziyLPhEh>ALxacFr2$p&TGuaf9FlFNm$ZUCp$DpY7l=5d_VD=;0P#4_MAS|;C#)4fs zq5ZHMT-MH49{nuTvPW-(I>$$Mo4yoH6OF^lKvu()o6=c{RhkVEB3*g-D}3AuBJo+V zGD)iN!}{nivw?sbB`FKVf-6N*yEK3?X9_8uDBiz_}ff){_6^W754 zr|~Wre9YwTE6*a_tV3+JD>w&R5)K-$#3|M??}Kj^=h*;#+N}6UZMrQWDT-pG9M`M9 zTo1qX$~9+hB>4x75WQOkUzj>S%m#zOFK&Yp1X$`^c`%y>NeG$P>cEO@g#fSVSmw-dAKW|@{Zz{4>7h^XIAr8#^I@E)sb6d-!=Eg&8xGRBluq8(JU z#)_|(+ey~y9zOhb2n~#V`EE8j{+k)!WPdDkxViX+miDUP$$+vy|Gv*^P))Tyb5qmO zK9;Cug)5G*I=NEUtyo|vJWF@Izd35ikHtz$E{x0EcO(Os@nYMq&ftdrJ=0q+Fbm{_ z=~((GA2nBn8b^~=MLs5{g8bT>z2U!-N8(3LtJY1!n`F$fLgUHt>>b}l-ok^M9f17a z?FHtVU#$T000F~`OCmg_X+Mt6<<{ zPKV~BmP$R>u3h)Fr&>ZGb;2A|{xWeja=O^-reVf9Ht;Ja&kQ6v!^ zgW!|VE|70KSQ>g>XiW8PSUPC~YeE(GxBXV^X|flf*!i~90v*nz$YiG5Qv|^}0+Tsc zguz3eg=&f6GrMTk*<#uMd$YHFK=6o+!LU`uzpYL^>W#*&&dm#{%1WdJGv2QGDK1`> zBSWz`Lm&A2P3)O8<)ICV3rBJu}naMXj~r^GhIDq41hI+Xn+-I7Drj2S)@#(h_1 z%#@HMblqNeiseYS03%JA*JMlCVHH}YN3ff&N*DxQn^2&rp=2!S2Z>Q6!O!3tse+_& zxS5iV`HyAf_v6$Lj})%ess_jMT1WM~^S{Z_YA4lRcuFx{Wro6>6gsxq57CeHLM$61 z=lJ>f{IB%9?{RDRw)5_~MG5%aj56gJ5Zz%|Pv^xKwtdl{o2-fVIU(}h`CqG*E>Va|rQ61_d~KF; z)kO}zE-4@#AN~@SG$;OVe8UGcCsS{zDug?W8pLtq4NS=!DKvTstxdd9B6cn!KQ$CFk1%Gifmvn zKKY{1Knn|&nqP>}$IGtHg0&v1P-yiuMfOWXY+pqFu-FR|E{CbB&2c%Bc{`+cdzlJQ zwV3wKT)~z>S<C%2o8M;ec{ZNk)Q=rikgs zvcXZ3p&g~nCb+Jb_G&+{`W~H!&77M0AHRtvvV>+g;OZ>T8}K?LRM(Y zC}08#hN0_m9vf^%2AEjoh%|%VJ_UnSEnYFJs6WGt7EGJjuN8TSuo0q2$ub)iq=X;Y z!enSeJ%+eK>$nnW@qUqp3`qm=lvX47p7O=QcFi&3KkW`d78VY01ahaq^w|wNRogpC zEjA!CCCLH;;Td&6-^h7FiX*#F$QMW?*u`WG=ZVg^j(sr9L?PTu7=V5M@t^p<@)p!0 zzqYH86^WJ717zsWB6b?>bQ*VQcbe^KmM8Z7_cHi#GWo^L#Z|2+UI??;wu%U%%{yD${@U!LfuuRPjP~&M`n&nK zn@#89M;EWl(T2XW184TpE}B~SIT`UvA${)kq6i7L$}m9!I@U|2(~FC8bF1m&Ahbqr zNou|4G_|b+vBj|HGot6SDwyo}NG-@1*ahLin`2-APAuC5?Jd*~WXqcKgrq(Ul9$G~ z>yuM;RzZr{e#3PyKq-tEp)&R`p@)+ARGAO_(JwOe3g8J&V;>mf+~m%3$~_$@j>k43 zhcfai)-=62&!qVIm^$N55p)Txb$vCq;nocXt=6*>Bk-Z5Qc z*uczd`8a&UfNp&ajVsCuW6i=aqA8oLJOk@I)G&P#d>YHJ)X=gf3)Nn1I@Bq->+$*) z3bxrn4HD(!VM;!pO<0rvyUpe3Q%U^a0Og>Y>;m$4xDQ@;N=wyOLuzO8Ko<(ZgX(7Z zVpK_8-nc6VXbTNSslRckW`nO|e-$>`;+iT7tG9^Z@}baqV{J{E2GE+hJx~ArUwj59 z{m*|P&9W>&wdD_6n5vFd>jH$=%_$D=YA#4qKhNKnL9Axtpi^^?_vJ2MoQuMchYw~4 z{Hyf6ntYV9$4x2YM_766;6gi+lp8YAJns*{@YMVp4h#4{^zoLco^|%YPt&0C^RH|I)oB?~%dn z)^V@wkjYhMNe%5u;eW*~nxa$W9WvH}i<4V_fpK6q(iOg4MPwcQ5gjbwn}}P~Bqm1{ z%2r{xSfM!0B&z+u+p}{wtQ$$ZxA`qisa&8D0KusoGLwFkDFeU`~o~?sB zBB_OR186?`PPVX2nW^4a{^Gw~{6y9RC@7}4e7A7oxY*@|R6rPp>AbBz2S%ReW>p|@o$SA;70Jgz($AQ~Qm5cih(wYqWhj<>^4OfdfI`~$;tORtnkhi|G) z0!20jZu`toKMK$&K+Rz-`tYUMnwZOr@5#DjJ-49f0=0LEj%A0^y^%Es21yEu=#luP zSSTJbSDyuwz*RC=Q z5GhFmSj^6t>w*B0prV#g0!Ef&t&?P)4a+4iT)yh;SE^lEQ5sYfREi%JwI?`Q;mlno zr2w{RUkS4UU;JFw|5j}ql^K&6ii=7&GzM0=C}<5h$KcCR5s7?th4w&XhMDGlpV*79 zKIa3t_`i*^{FJi$A>H({M{f4!o+i43SBG3hZCykG(aJ2Y!|GbHPBJZ~K~nDwADQRn zk{-54{9@={3Idh={^FUaY&GpaK-<9`VS9A`W<^-`S1_j`)=goWC*T%<&~dPC-9 z1q(FAl2Ker1TmiH)_92B%%l&mJJ zQjxEB?%y1#D-u3wi5{g*J5f3H8*W_TZ`8{*d$hdWst!pQEbF~Qo*;RSdEn>ge~=~+ zoeat6RF8LaFXSv@H2m*e$~iS2X)xko{J;#wzDe!gnH(wdv2Yth;nKh9L;X-gFdgJf zFvtgtSDK>onHt(l3{^mT;95Kchg5kRqF!#zTA(LLF4X$pQeST zY@snjA#K?8j>6u@Q?ePQf)w6I!tKc(lVqHuqGn&KKORS(45}f3P$4K#natPnKgcR2 zgm-96<7tL8VIsx)@1w-z1L7oShMavm$j73wwYbvQk{f}uiMKCV+c>3iIuA)hWzGO! zat3skYXMMCJ+m?6AzW4As+hFvqBtJqz>Lwnq37Tu+V9!vQ~%W0Lg;c!xx|Q+ryAG^4?B$43~zZJIC&%%0_`(JuE|7B4S(-L_aJIQ+EN|<2)dZZHKJ_eS>;(=LV z-#q@ZNQP!QEL?K%hi*vyj2wwCSN0~PBCJ`+05(L7^Ix=ludu?S?Baoti7d=M zE!8umpsapP_5xz~%oS%wY>g~Fp=!*=I~=9l_5LOomUbnTCd{ei-!RC7Y2Y=7sFb_O z8RV@H$|1+gL>=w|4Pi+VbXZr(Mc0qGZKkZ zox-qFSfU(nV&Q6%A3i3-r%I97AiOtdI&*RC@F`8JtO4;p@{2e^={A)=vaF(dcN1sT z36S;^U-N#L)ma4i856(wx$~*!rm$7QI_uD0rn_OAB@j~=?0zrjm!#=0M&&4ubQp@j zIH(bFtJ;?xGvg|@ekFxP1U{i5MB8>%n0j-TbAQb+NeR5_e_$|xlt67dog7V-E)o1e z`Rj^43ZUcd-f0Lg02zZta{sDT?Fc82uPX4f+1o-#_j|nqSxiv2hp`vW-S8JzF;;Wo z%3{E608-?5KxT2ZsCZ2j8CGl=HwoRH6Jq5GLA+z>YK5n46TA;xM?P#922FG2k9XqA zdZ19oBvV*6qQLGltfT$|xg~)!`J{Me*iau7<>=z@B7I#3Q~SE{a$`GQeQP(QFCMIO zH%Oz0r{nMVo0JY!F+JrB{+>VNQ0skx!#h>a@$H}lXW^}|A>42I#~1%KXDPNPok*kp z#T&Am1WV&&JpI3NG-gU4|1pbXGOFbrLt6D&J}{Q#bT~Z>xHTP&iwvq z>ZnkXNMW8eL@NvdY*Q(XgIHPAuMe3LkXdl7$lRIpoRMKJuykhqP#2zs)~Oi6;Xr(s z1D*;!hFQ-RrcyipJO!E!Sx%TmB0ZK)wpG;1h1-yKU=Z<(YnYm=5g9zdwn*P5iI!n1 z=Kubw)6x0goxdv_8Uq4CxSdH~6Qr?h&$UwNPM=I;YxZd;Y7EZaj8vUG!kjTwGzD@d zXgH)SZ`VH8MX)mmiD1u=ohSAit5ev3`KCM4k3et3lTF;08RI`ecav6ZQdna}PDvT_uA8`(XBR zE_ZbMcTKkSc4&10Sj&O1wB#%|sB&~>B{faUl$XqtPY@bw>W7M!Ca1tRvz_5+JPiOQ zkz7Tj`}s#+uh#c}Gd3LOWJg2r03KsFHNRd=D5w0xutdu;9a1(QD`;nKHW5O&*H|Op zwNn?vUX9G|H(}R$ZEkPfZF~hvEpaQMR?c{$c0`e_;rqDo**{B01z%K^IMeg%kz;Gh#1WPXxEn+Je zrhrp_--Keh0?a7Sykal75d!9j+7k=35@9CyNggLh#jbZ8C9?Lt2b zN9@Zgc1KgixgFyLkl}GO2bR-JlvUT;MH9lUyZdHS_j%IJ=*e|vND1Fu)$sI4ec|9j$p`uJ!_s}Q9^kL`%+38mvJ|k<3 zUC{78vb|>A3zdVaE^Pf7otCAG%q@s1g~1lx0VeapM<45OD@NR8{Ex9RC^<&cDmsQEfsIS%FCS|QMvz-%Cd5(uV)w-zz>}Sk1Zm^t1N-N# zq=b2DzE8OwAsJs=0|TZ!&i)75L+L4ZM8Op5Kp)fF#pIkp8|T3 zH0|Pa!f1{Igh&(3L7qK7t*mIoWZ3Vd&T}{Wtnpk#Q1#s<--w$aQ{(!dy&q+4W^WzG zD$v?i9oAVyL)G+2o}dS4jRbcQ@7F3EiWc7bq3FwqXGupk^^7!tmK-ONebFVC(5Jpc zjkq6tEl8*6o5V@@aZ!vJ*X57w3JVGfhKU}KwSrWHP4*3&(=Z32CSXB7Qd5I=lVj1C zib)d#+Rx7a;7>2*rnoO#rkJDRY`1$_iSPWsvoJUmiYcR-h4$SalhNN7c=mqq(>BW{AD#cqR3^E146;LZ zKomizH3XVCnT*UJJ4!R;z*5TK43%6MS)>!ov0elMAt?fv2x45v-5?{m40{*R(xA#d zrS?==+08p!PC?vK7#jRFOyIuA>*)9*w=tE5LKKK?(O}(fBpuP~`jsb~H2547LI#p@ zvBitqwPe5M#pNn8^;xgb+o5m=4vw+^6nG%5)H!vf}b-&=0PI77U6s|uv@(LuCzq5v`M^TByGRQ)x;u4v}lBemn(U4-*R zQwcBBp(@k!iDhLDV-}TcX9zWQ)$l>G;Oo`yX@<(OF2@<|+MoSPpj~~0CXy>Fgv z)nT$T-RV>$4*DFOp}oo46VQgFp@GPt^Yi9mAS)!|4%qpqJa;$5D7v*0DOzOE%oXNY zQSET!GK0S#`y=&~SyrZwGW-7Um=v|e0$I8^r-g>4^Y$OM(Lqw&G;<~ufzWM6;u|u* z?=OBNi(qLrIh($Svo}nM_Fv4c&e;kQYxqD0#VAX>v!MX}+c{F*e*v!}uH2a!VAi(q z2TMC)-b!DaD_4BL3?Lz{e89V@P)&8zkJC4MU!#=I|7D)eLk78$3ZM8mmTJqz9^>eZ z0uTk6r+{s&FM*dL4W_qbMnb&#*>A|%u2t^_FsnceQVt|+m0xcl4`jxP9|7a=flPOE zift~3B(&?Tk&?xj-^&+*g&}Pml6a^#m+9Gk4ZrlHbhlDlaA0Asy-fdEkbjeixVVo` z_SD|F49D}PuYTJ}51U3eh>{__@QP1&Tx+nzB6o%=Qy;+MEOh|!DMfPHBTJts^PXq5 z>P^m6hzEfivJ-u7XdE6kzmeJiQz9JQ7P@>5Sy3;71N;uY52OBmq!pt=4^-(Bx-$yV zXsg+Bs-XW(%iN>fV8IfmJH7G3avgdLzlc=jzz z-=+WfWte$PgO_bGi$f7C*zWD3dH59AnZ|cC4dFOB#SE3<%XLsa`b&S^AXw+0_!9~W z1&gI2lFj5KyjC+MU=Oea-1xE-jj5^R;i0~MRaV?W?=@7xd^}}^If}XM+5-InQs zOPLgiIE3v%++K~QpWv2V_ChS1)pQVn_&5IjUwvU8e)Vae?arY0K!#}^E|XWhMPmuE ztmdgY(!Hm=-TpY|qae$%FKNloG1{C4XcVfAOFF5NS$dOV_zOkhgq+ zW-5yf;0Rl|`8bP?+6ho^Hv%O?B&)@BkbR!=`A}wesN+(!fLAuEOHb zc+gjq6+6j2=%*36#MC}t87Z zoNM~pC?vvLMH2EILzon%E=$^v2I`B0M`y3rWYnzYzc~r_dyhU9${{EE#2okGs-p8h ze)s>CNm*8)Fl1R!zCS}IvQ)WIH-#?|&N5sd;UlzPNR#+W-)o9j2_pcNWG)n^5zF5c zQVLRQkVtfbhwhHLW6x7NN zSOT&|V>y4r#F>0hWLmUoj#F9dOXi=`;8X?LHi_+VJr2vv^X9)1)q^qL=>&TQgQF(H z9c0QjRdS{4xtEGv@Kxq{CPRTmqo2;*ei_|KexSDHRO$3Y^>a9*aTk zlCq*GG7BAok&`W&5M#Heo4LpM*mTWW$ zC~irGbE%S7UU|`HWKorI+p}Ib$-~o!$xGGe(Q6I-HAq)GOGTN7~I^FVvnpgR@#4)U% zku8g#k|yzZYecgx&G|<~%r1UR!(b^JF=T+uwaRz;z5W=Ntv_o`+vGfvky)_5idQ%b zy1UxcLaoMEcE+ljg+Gy-Kgba^PxH%hGC0T?1Hv*G@~`bHNk+X$Qfj-~5A5;a^rWss zncu$eUyS-04$`OAPZ$nwk_f?9aI2Y$k;@bbD9O??JrP`La=N^6NWnoJIzsA^m3Q$~ zn7Njb&0sVyN5+n|VO9oOS<7ta5w#qm>?Hp(m zJ5*?YXV6#EUdlJ1AR2V*(7;}LS~|l=QXs~#Z^eu4a4wF=K|G05=x|B4Gzx13$L-^S zOyNhC(=}-2i~Q$XN!j%{H!nRI1t{5_c1`UqVJ}s>n0;A!p*09&D*(tM#T3~^Ecjnm z-B^?CwHa18&59Hya~}N6sfYF#sE+5|F{iRPQ40O`NqqUs*I$vo3?RW4%h;){1D7x8 z#!w&0U+rCxnuU`f&2aq<@Yy;mL47HJ1UPQ6a$xC*>%xG6C;jduLJrAS7)CcHWH=FT zjh{E3^{)9bF0z#_8q7|{-!cI;XyUo6{^1@?)J$2LQsgW7gus1 z(hXqZ%`uL(oP81YSRjz7nvgBehS9A-i3L$ngk;IvvHA-|1_Eqp8Y+BQZb2pPEz6H> z?sQ)um_q?JS|@j+5C=Fsm4d%H)zko&GUxAnH8vZ8MP-Utzu@EK$I_?-RfU___GHq_ z;k0PC2P!Avbqa(SjTm~&Wx!|SbO9985Nw2Oi{_J!om2;pK2~4TpDBfpax$+^w1(ub_z940*T7^W)wIeqk!5RTTpBjHnQ0pnpnb)93qGl z5gQN~VNgFKl@P|cG9Mi=4*v*1bu6Mqm(Sh^70VV!r?*a8<)K~%$*F+A>$}H=4Cqw& zIC9lbu=M;?mgEpv%}7NQlYi{CkVPiSV;QC0WoCjFBMr;>W&o_qSHJoBjdohbcg&bQ8x} z)@cAQgyO)fjb)h_x0Jadd>TC7!0{(GtxL;3>6l~{?^tSzen)S=E*X2|4Xdt#@DWyy zMa*f^s{J%@awm8Lo}suZ7{&UgOs-u=Su=7r_-d}euYEbNy%tKsC5W1XBy%OMkxkS) z8uBJwO@@8R&%UP1~D?8VpSgrJz@vhF~5IDcWvsJ zHeZU~Ko1$Q$b2CKMuZ|L$vdT!PCcIee?B~xR2?#_V{o5BoQE3KZ*+tcKFp`9BtseCxfjP!1X=d6Vs)k%^G5rb}sXp$+O> z;FWelLizF~vVGMUL$e&Kgb8MMK#6%@#C`a>G~4PAIMUK{3X{p2cyCBP{rH%>GX9O= z(v`c&3^It%uM(M%+^9Z0fI>MFF)#eMNZF8pvIt~8FDl{shp85*46H!`A#%g`t;V!( zB357)ww!=IyZ&%bFJkWQ?^2-oV_9xnXzoi?1N8voJCH^~=o8WZ#?^UiMs^x?q?CWL~<%GyAERDjvo}TO_ z>8IHQ85pU%;h#a~kmEzs0hEK_d${;1XiR(XV;NY3*xK3K-bV(9`W730kuH78_~U19`^sPZFq@c| z#+@ShHaQf$4W4aJN1EGy^*AfC@JNJ{Wtcq=n4D_rq=UlAyF=pv*c6JdDf@Bh8u z9~=M~VG*;*%og`T5AW=muIF~*M&yw zvdvg8U#sL+mT%@X`CvIVPNZBuB#iT447GiB;y#zE+ihB?mg5wC(8@Hbv$Jr(RAlov zw3oMLj=~~k+0rOPDzlsWdl=yf4-wL(P@Sle3@Q~IIYipZ!Qzz@NFSDUDi;&wL zq%%1hjZNYAZ$zOZDcwS6h(cu5TPxRF8DZ=5b0y zEi=JjOjx&f%nRnD^5)=xh)lx1bJST*0+$SQKE6#3ijxUrEiybTVDs^3bhLFxhCx37 ze*JL$#S?ybwtLU}U?6a;{xVY1E(P_QQkHKNQlVt)Ujt!Bg&R9IND6XqSsGkC#o^6f zyQ9GMq(s%bz6yq#owOo#kWc*D_3Jl$a%gvj8G!5OuxEk6KC9;9ZJ9>}Jj<6NUiGVl zg~GC6E&};tff>p8VJjx?{=t{;D`7Mw7f^6Y;$4!xN?HbutzHH4c<4fr&6Tqgaz=Zk z^(;xj#!8Gs*ZbDDeD)S(6lO?Dc3@WWd0&0_r0-^nN}*Mxe<$iM!~shPtx~r6j;-34 ze~E-62!rZK=olprc*=A zwQ`R!bMZwp=Srs86xb2AGa@-GGEwssb0-c76N|H|m#%%`x^!7OdnRqqBQxa>V=8tJmC1l^utO*)jFy)A@8I7cwoef6Cz zQx$!;uPEat(nvSpZg-{={E;nCYgNFxE(pJrwzVh;(`gjofbmIWia8`icRu?^pXE9q%zo$5&wPyh0_vOV#`4=LM4V&_X6{(I zWXwRwFpd&#=iDu|5gFYOw?nt$iIpb?C!CAAm@ClM)4$#_3|Xj6vp2)#*n&VO6m(Qc zYIpWFn1~!kpEMeh0L0#5jkhKD6H$VJmZrs78o?T5UUOdx{rSik{eG>vu_zDJ`IF50 zOLh*tzpC&>Ey8Mj3vj7KBrXjv6q)RkHxST(_lzA7YncB^=73Lx8jcn3cUT8Bl6Jpx z%?X7cw1MR)*7oYN&k6Zn{FMcL+D-`F15FLZiw3Y7sX6(?VA4hOn3=4Vx3U)afmP(# zp8^e%9u^4kmY=r4K2JJa&aw;HWOHid!w1fcQ%-af=93#zWCsh>#-4tyM z?x|#mwh=*W`+DlPs-xokTmtZ9(T|9AD%JrLf_ILCW;X=I=dL^>w0Zt1X9T~?x>rxT)Atzu`C!?GhS$eha`F<>x2VohjMQ{VCK7@4KiIwI@q)7Jn*e0@K`F2pCMAn&oo~Pp#I>3W< zAO|1DgJd0xwT;MI=yyoG>hnulMWInnkV={VAR+>4+#}$xblxk|xP>uLtH_;9o1_*I zxy%d+IW_3!vhNBjeWH^~xkqh5`~@!JQ8=j%@al!>mMG1jrJ9@=#f6PGjQAGv0{L!$ zqlgHgep|2X7QQ|qB#u4naj0*=TK=od1@|=yy>HHQ*RBg09rpGMhb~)bK~nxZ1xjHb>R#Jio=?*Q&6_V;L#%VHEgQ1&Zuie2yhVhD2;@*q#Xa z!FS@bHuRQ|#5Py5f|yCu0dL6_HPIJ#bxRNqagI2zxCKvbC2d!l#N=7PC6+Pi=Gadt z8b`Cdjx%o*er+bc1s5)2n6`RAaI*K8Yx-0FJQ)BVB>S&!NUzPW)7B@D(4X|-^BObD}*@dA%9 zFpK=<5>4x85DTenETLukF7XK5u8Ng!96(^fow*27j@$DOg*D78jbhT0c9)<_UsZ-| zLA84E0uok6(m`$r9rN1tm!I^(j_R;ZfBsqk9BFVpU>Rhpg@XK)!?o~siB*bs_j`L< z&BzGS)5QxHgc99dD;(^y`a;}}T*Jj)j0NS&m9EU?zSK(Q*67EXZfq^d_S8qf4JYAT zy-$)(Sx3Cz3jf4ESV(}t)SMI8F(?2TEQ~iqV$ZQ?f;Z7}H?T6u23c~((3z}epMT2w z9g{c0djZJGVExwuN3p<&Y#X}UpITt$c^nX;`vP=OALxCCS5?{H^W+4S-n(+gw%{&) z;HpQj#(>8Mvp1Xq7;wtooNpQ#@II)6g1kvdIXEi7NtQZRh`Y<-Lx(FeMxA671HGAk zt949ChKAK_Pl)JJxB1i;26H+RlFzX|9OEI@(?RY}g%Tx$S+?Hc-9k6>6GEzQZabL|JU=xa{f>ld0Peh(~$ym3W{sVhUwwCdPI^c#3_IWHfw`4t@ zU%e+sZPh}AbGQSciiR!oeW0xs%9Y519ZvTjO8{(=7V)z_#j7t}e@1$%rXk)F?V3^N z%fYPZgQU1V)=3>MlHEEi!racGBVT{w2^qgR|6nrSJ1C%^2$ij2>Yn0)k_n{CKwbg) zLW1r>_anwPAkEc0!DlQ@!bF$0hj|?7wK`>+lE06+dh7S4V zYff4VT3?>Av~sLv11TWnepp~HBI{0J58i-nZ?yYB@52Sow7m80Cw&tse4{`$sA7UxCaMGhQ*WD=Uer7z1ZPm7lTx@i1ps~Hclb}p zWZMMsxE3zful}mWgcS%~%7`P!g0(O-DBTPwAT3K#;el2~!ZdV_ zB=@Skgo&xFX<#KaQWeOYaftwWpC9&rl~hs^SmY&H50X`Uc=4)|pQVya*SVj%gzJ<& zB6qEV3sLBClZ4`gMS}}Jj9v3w9@z+3AE7G)4Z)6?8N91tV0njaPhz#gT{ad{;F&zq=MY`(}s2TSd^yen zD`laGgD6mmmhr+YRwP02QS!~b!p4s?6sWYGsO?c8(5ON9m<)`eD>(j2uCsNESXtj; z1Ux-vFn+V$Am{qptkfr_#0#|~(2OrMaXF)K8!=w1?J~&QXu)MlFPC+4ym3CT;3IF0 zlQU9#V9PRHFbC=UHYxgJPB*^&ZD|Q_?|0;9V>y&$2?J*nyR)qM^9j1^joKuiQ_smw4tP%Ic7_Zgoj0gw6Y-;8S2cP< zz@YGWOQTpCh7jJOCB!B3fko|1Xr#b0SzNS*!~1HRKv=riAQK-JinbMg4{eamcs7z% zj4|*Bf$(J5i#$;P&O^5ZhpY}DqIp+f z)?)fHMW6*lC1~H-JEq7+3Dk$r`R>!}Eh007ype~+o0~R0 zU3{HBNSI`T zf?=SjiAC}|U=OLblB_&+u{B(peBL?#IxV1x6~WRL4wR8wyl}e?L3MkN_$A+e9s#qg z08aS0`3^2Dwk{(MQZ~g!bbuc~4tDRsrJusrti~93EH5-I0F;>QP%O-uSV;c;;Ys27 zH`lEX2kH0!XYAdYBfG9U&-ns=ndq2#m?zRyv&trm@6^RkB(7DsH4;@NiA9qKi31=D zi9AqVfCAX;kVw%Uw{9(4L0gt*tg`8r${mWP#EU836yan=%+mz$*7NF5GQYJGYwiE8 zo|zc799@abv(MgZuf6uV{MTBOT+$S;vCsUKK?q zI93iVjvlM%J03==ihWns3f!+xxvzvySJjWBt@RF-1Oh)rELBqoTXy$CRbhP8bj)4( z1GW+#dS5ORpb1+;^F84TWQZDQ00XU+o1g;Bt4{K*nVv-@NG`5-vj<>>2xu-cGV=+B z)1h~o#H(D=n4PYS&vG-LF=RHnAQe96ot#jLEB zw2#TGR-02HJ>D!m!fnDnK^C%_3gr7f%%2g3INf`dNSMZ23xJ0xHF%P*LuR_r7K$CA@)BAoe8il0vtOf}Eziv2o%mu3hzNk>DA#iS#ap2a zW2wi6870CWOWLF##}{Ar?WN{Y$Z%e(K+&T91&IZKA)V6<{1w+RH%-1DDT$Lg%>+5h zv_F-qsK`E1AuvYl?SmK)#sTS_k|2DZ|Hse1c+)8kNyxhrRqWtwH(P$erkE@{^>T_dWKLK_aiRfm%$e?l6`y0nZWu3K1nAg zC;P3Aa5UjN?8VUW$4_X2sd!-_bXuv8tg_-5Zc>xlSoRT(8wj`3we@6Yx$m zw3^^fKz_@!bSD#Up=sub=h(rS4E5FB&=hr&9f?>gq;}0BjJSowMLR2X50apN=BvZU zLS{Ap_o+}3-IU$-i88Z6b)c@^xY7FIrcYglo=Et)1E|smsZ+PxaH)8FFYsp+zjD5_ zE21#h=d915^MA4m!0h@0m>uN~&RDxxmDtkO59{5_zq?q~QMaBcFd)%qaLH4jbSbJkF7CQgoQ;ZbkHUq6UsHk=K}-6GbQ zP#Yh~N~x3F9OtZe-nQLf=w*NX^u68QzT%oJ+b2XKiCr>p+;^w4>7DKYI<(KL6MFoJ zGA%`E6HWATfxXQ}xh8o^U(w!QY>V6!b*3=#H z(Z{)fZBm1ObX|_EQ39@U=W_HnMMXl~Y@= z=Y!65rwFE=CM<1#EBfyOp2U=Eznw$rtYxluG5m_9T|s%(9g zj4P-0pS<>b_&FH7Q{VyX!9EjQ;UU9h06Tzet`{*=}t+-P#AFf)^WXmwX|vkKjh( z-IPn<8$xI-Hs{&vU;aJ+`<|$&%w^mVHuEvsCRCf^P+5I#1FicJ9!`!$qNWmiAEH#@ zVeuFA#$UYY0}?dmip))@FaGA@T{#bUXw8t0d(&Jo;UlBp!%$V6i-#D+=Ig+p{FUV zmO}+_0EJnxV7yUIz2(z(rHqTl)5HFh0q)W@>JSH;&p&~OfF(L2qdz%}Zx!T8YgZn&LXBVz3vGFY>FNHp13 z!dyGO`k=UM=_gh=_(x5|0%r+g@-VKn^Vke9ce-~oymUUV&7ch?VXO!(+N{!YtLSC1 zRVnm#j%aqSILZzFiLdzj_3K~ue|M?M?2{t+k+}3d6NI+f-Q%Pb&n#zMz)BP-JvkMU(5f8VsvEPb)?Xyeow%LtTJ$$J2i* z^ajEUy|~jYVk)P82SrpbLKG|DT0R%lUh`qb>Z~$ugdv&vLPyTw7S{wyf}HS$rIiTp zO+Q5|@o{;yJ)8uT2a$;K?>oYmPyexy&?=G_q85_K*Gs9dxKELX=bG>V^T~7_ndCOP zD+3F622|+jKXlfy4ofnLqN&AVaosOV($s_~iI315Pv7x>Q&l*;SQ61FU`W#I$jd$Ni})`0%hNCbJ#qS!NAyQezvpxSrq3vcTXC=D0_ zOE?m)H57unRc~)=U=ok5ak=0Ez&>Ry0K9#8!=VyY_+{ihtL-ezwA+L$n6}M$oBTVZ zE~q1UlD?Q3uW8gL$uYbuo10Fk0T!uFeKW`8c&Klc-2A5_ONAHX)k_9Ou#_7{agR$| zem%VWv{)o51ta@RdI#B0fmQ22yn?H83H$c;>8kAi1|$?=E2eqxrF(Lefg6PO#+RId z&@(VojZ@<-wnW6JxUf(HX0BeJ2tOH|ncGv%tz9+PL5x~go45jbVmWz!6_q^;YlN7@ zc1n7z@Fbl(lhe-0z1IM#bP9GOr1ShU&a~;WF@)P^_-4MBtL-B60*l35PR|0A$tMIV zI1|jj|K)#}{m0+bBw?^XwfRZo8N0KKlemKmKqk-SXK6Iwc6Mac%|?1f<`hDkKqa_s zp1i{8nN4*0*TTXUPLGR%Hcg>WVNn%r(rrr(H|B4rv*@^|{qMFDUPOl8v^+Zhx%Db4 zy1d>Y=S>(2SDKzIq<@hC!pbQrC%-2$8L*A&hzQY46biWrWW#2KtB+BwcPrsjCp$Dm zrv-%VT;CoK)%Sr2w@eAw4yv3Lf?WLkLZdc&#L}~ugn>uj68>JW4!`+Y<$UaJ5;>H_vLw2uR5GNRUmjB6f1Ux(H|$S01r?g&Qr3=Bsk=n zPdngdxfooAkhav1s)P4l_g7y*A9c3$UI(!)n?oSj?E1K1jA;&aYg>ae%GSyHEL=ig zI8g(xY6~Eb{ird@R%^{a%_>v+toW|7fU-=2kg?@#scV*EA?8tW!FXrI*J}M#b*E{P z5T_P#OAF;9U*TqiZFnUnt~LzCz99D`AM%u#W+gTJU1&kMIhd8>%mp4u?l#PIT}v?(IpKf?0+l+j9>;~e%b^LPxS>Iv@Q7_3ucfpWOLY4I8MdghEx3A zbiycfD35hOapbZxxc(73P8yt=84AUc)+eZ#F8SDapZFnp5K8(wVl$_``?8`Xf^p8e z=&IOq<{k6D@B(7Ylz5*gRuf(HnKuA3ssWS`< zR*ASrKvdmyTMJ7vr;}9gNzYRIpy$eInseG(3h#ud=;vfavz0D?RES5IXn{Q>E-idO1Gjh5*g`*o1bm~=w!kOtxnt3CLUZceZe&HC-QHsI%pJo!ge-L+WWqp zob^fe~} z1>T*e{C)a2@}bsRqR&42WmzIZvwJ9F(%eajhNxa5CMg+#h2tDxV7ej?VwpscSTvg* z;s`*L7RK72oT>F~F=EJN!Y2ew0=R?-WF@V>x)AA{CRMfpt)Ry{1tvqy<>dOcMr@2N zXPbg^J3Z>lIume*=*`Fp&4A|NWMA7;V)+8G1ez*@{mN555bf7OwP(_UNhk}ieMvZf zvXdnQ3BlZhPSd3A>CI#%*NSiqN^Swt!y-`pr`eRbm?Z&Tc`dOdt?9XubvNdU{v6iu zHua-@MtrrNi*kCHLP(w z(!$@6eG2r%W)de61*}s2(&u7o_Zlb#tRdhPk z9*p{${Gi+G%Fjpj1K_nj7(Nj_(T>>CV{UK38RrYuXRGYupZxuikdZYaln0sW%73~ejDo~M?$TM zBg|#>jxDjg>cpu{GNW?~XWak z5s|NNUIr5KoC{49pH&JZO@A@{pa^(kV)y%v^9r}jRRs>AmcisthLpFS*MJQ>c8slMTKfv+ZmoEB?|>$w=!ji;}F+2_X(xu~fF5pkbgoHa(O zgfEC$3=T_Fgxw#q0;)U?!h8otB=gv!kgptG(t&C!Zsk@i)IA6u9a-~bc@~eQy7Ld} z9Z)7t0H(gu_I={RT&2!y>vA)R8)N#g8}1i6(rYvh=apulQ7c$>I9*&MR~BW=F>lRt zo@FjC?-Zxl3YQo1>Gt-bENh-fx>I)gtL)_7ftCI5y9Dt>E>LKcBOw$wtvz;N<}$6$ zguU-do`FB+xpaPzc5|vUQF0CLP&SK5)j$>4HL4IXKaF=0lt5edEwFUFafrnBYTK7W z!IrRH&PL%%qX=0XGbp&1^^ru+2R<&JZWs23Ig(HoKpqH9w+S!#y1J8)2>7=r<8$Hm zt8648El@WaH~HJrjTr^VOs(QNdfR!9R$9ZbQ|y|^J~5e#b$papa>JRC;Ird8kn?~O z(U63(m8Q;J{&fydvU=`2&4HRho_t1+e@2xNpFBU_ne6$CCK>fjI+3N5$)v1!4K)&U z*556P$q{0 z=L4slMOka~fvYoSRcg;)RvwpKOP#a^#sWz)lBzr=;R71F{s*1El zS@nYioe6180+6v{=vrzq$@gpZZZ18!22>Qgyyy)232nUJ&xQN3@`zUXvX@G_T>e>d zT>kpdCU#!x$f&5!z(e7QU?^Pphz z#9k6)!Uok%U6SlGp52a<$_H+_6t2dj;BT8gqUX`E;z3Mrq85by3BNQ^h)S!I;657z zT^8mCq}#OlI!0AlEF=ga4^Quz$tdx-Ll3y*A71DGDdR-a7bF)XmsdqDAPeO^r9K;9Ee@86+Uesf#J|0yIbCDm;{W6 zh(_Mol%rs$9vGzWKC?+1!V)?doZu${=HnrT_ct_(hVAq_p~s`4i?JnUua9kYhiV z-@^}9rX_fSnt}*Bf2G-v7a#g3#+qN~$itmxZ%nKuW{B(-49~2RTE56wN)`lG@z&>f zB}9v0z&=k;LcV}euwa8zmP!SqxR)?D)NQ;ovBF_KG*hWt_uThm8-2-H-iPhLUE1|^wA#hheJvUAIskHr#M zl@}P;u!^=inAyK4qjpM7%wnj$-EdMf@M6<1^1H}MFVd(^qnrrGht9y4-|+)vRdD>@ zXbEvyFsP}B{#EZNr(CCs@4t1@B`5Ola>I-JlUOl%1UGKT8c7}n?<9Y)(C;`L9!4Mw zl;>Cg*C8wjs6NhQXJPBk!j?$H)Hp7S*1GIm1VSm&qZxjH(&0P6M{*y1GG>9Mf-v%R zS*iwMsh~1kD6+Bt@;fIKC2S&(npf&R?$l)n%+8{# z!cuRj<_Q6o++LHq38AC%=o+4HSBEMSy&hivZ6{Apa?*?e8@No=lpSD%sv9VMqve@i(S!$f$?Y56`C`7A~PK z^_Juprh)dR?}gPa-6j2o1SKUWeN<0>EhHnyi4b?#|7S-5>bImknf6JIvLK|no2dbv zNte7Y3jv;jrYUbV4bA7X-_z~^oX~zx4L!rsCOppN_i`By*rvGWp|oC07DSS{R@Rx+ zU$?ljGh@jMfu1O24=yjkIn_nPT1*a zY9U_s2a&BuH`y^~NZw(YUpTzFg*OTd5#d7il`AgK{-G}|c~%sZWT|0vj5?DsEdvnE z<2?3`a@t{kGv~W@hd7~0b<2|Qnd>+FD3+z;+gw&dRR=b4>8TJ&g&t%t>A0@$8f(c3 z1BQ@dc?s|0PncnXX!BJv$@xhD?8l9WH*@@G7Ds#I_WHKSlR07_8Zx$Q5}rwJ1OHjd z@x_s-2hy@8cNKSUyB`KM6G9~9aL!!4hfd|Q=UeLv@7zT(!MKX1A9AhB-q?N*G1^o! zrGhXuI3<^h}Q)6+sK3 zOfp=^v6S8|dwo2iAyVMTY&H|gvZr!PjxQuV9#4QgZK-sHBc__{TbM{_gwBzmpuxX* zANBj)dh(z*{bg=uG?Qb6a66(#4Ej`v#+mEfnZ9#Ok5K0Zb5oiX>C6Yr3Wtr8^ZGr_ z)SWN7vE~Yj{w&l0J}x&=8r(#nYgv+Yb>FNgu0pz;9KJRRGT=gQlPnpP~ z8r)G?cIx+^dO>)T(!AkqAPF>>9RKM@Ed5=PgbM?!x=2w-y5{(MQ9453WAe{F?O&Xu z-HMPMO)xQW-^!aWe{k`!A2LWhlWVy>1^vmL%&V<$xUgFcwPqqcBe9C@g!&HJL^K7Z zwW}>9jNTU?$V8}h5Eda9`RBd_<9naNPA-HdNg?!|>DA$BV$E+R=$2O%P8 zi>oc!rKR31OTu{+F*rQyP|QW^uQA~l?)mxPLx<}+G9~t-Z6n2Wsx$F05hF-QPi89* zQ}886kQr>b3258D{N_112$nI>JCV6^GLF6sQlku?S_x#?9+TalqE*o9+7b z*MuGR!LrJWD{AByiuc9OF5h-~KmMtfjkfA490(vmvO=!#S`jQai=IeG?-KzX;bjR| z#m)plr=rdJGJ*=qDCdfsK<^KfID{ovrJt5-FE}rs@t-r+ufe&%05R*g*K85dHZ zn2lr$XDLVK$cUp!W~&J2S;=66D>6p6gaUDy7YB7q`Oj8}pI!~|?St$+TDYnz z)PrJ0J6)cz+g1mfi^^Hq{z4wKC0mx!f3R_~+bulhMH_BP^3@vtIUwrjT-2ufP%f;* z8|I;Xqh&Bq1Qah|ak5;cjRy|U{OeE*569E@e3STceE57$I^pyG&;JSU(PgSgEhNu) z7?7Fg{`K2GmCc>m2Mu-oLub8TYn@X&=CZG?o2{=4;aC{uOXS^FyCZmclv?9D-B`$- zp1LY$)stj2I-y}0${(n4%3*fCzW1_B#E|MvHT1iqxXv1CRml)@5WkxE5ZwpPw@B)g zh;H=8SXR>eCKvm zJXB0TCO2;Y1m#j-vnR5t)XOQl^{mmNkLCGH$kY#r5^I?Q1B$sQks$tv>t9X{Y6rRp zS<|`B*VBLWHGgBWEP$XnMJ%BQiu>1$c5L1DWC2!|puE%SF$S7XaQbK*j!2$PK8}AD zPB<)GJ=)lp%p$v*{G?b7P8IdIfmFj&ln4WCq>c zhYETKvr+envV;&ee-+WTmgqKwhq7~`fl?Xq(7+XST99|iexupni5dt_uGxe(brK1 zWrxVF@sThkiX4gc`jmd@^4sx_9_&t2$VV)fXnkR3Fx(+??o)H5ITj*aJWL{u25|lY zq#wvKXF=w@(WeA;uPYBxA!1A~6u^Hn^cD>;`l>G=NpouR`PoP#@>lusS?&CnPp&)J z4Mps)hlmYIczJc6VELVE;^23%Htu)PM-Qfs-zfd*P2AT@2s>Jd@c-S zJps4@owWP~e~?iB;ywIvzcvS_NNIX`B`1(f5ja4Rls=b=;D)xD##I}vhrcIqbbO1x zSycV9C@IbfXDRNV*Zgg`dy;$5kK1HS9T)#VzDNcE;Atc1psS@c8`4gevWIqL-3FWh z>jtaPm=bWlerz5Sy~;2Nj>v9L^KOe&Ws(_I_qLF99!-+|T*R!!lVjkxtAzzf4p9!| zK@JMMQ*Ok_k_uRmjJ|0F5dn{9o0Lg!huX}CJ}x{wg3f*n;EU(W){#b{R#rN`1B)A5 zYyJrJll&2MQ=;KA$5hfqn>WmwR-EV1!xulG`iAcpuWSPHYbro-QUX3h z2Kn*{!);7;hGC0Nb@ph3 zC(U`rs+#&+7Qt^oB-w!@JX@1!GIUa)BiG3?ECM&f`hKH_%R&TJwSWx!0*XQRZ0>Y9 z59kXPa(q9lgwuD9)cXUrIG7pD;^I?Z>Ydi=?FCsBSLg0p?*tPRiC zX}p0VRt5dCpA+bVGXW2^+ddCI99L&ggg+>s+dn%6xGlfBGYcFFv(q;P|A~+pH=}lr zGRKtxHB~Ryb|WXno*18U3pz^y?~Mx(j9H`nfAJ4shAey+eG}34_w^jkkc7gs=zVO9 zA+G%e0D08|k#us^q-SVD86wqMBS@2*TQi_~nK#(02w7OQIbn~zBfZdQ%8)ST-Ehir z>BU)GOSlw&*t}NR#l?1wa@fr9ix|Jo4IbI^&-}RF!n6H} zaf7q(!Z0{_`k^9;`qqCzM()6+{3Yb;Q(yHJfbK>yaJrLwqrMWlIQF^{KQLDZr*&p? z3U68aGFMQZSENL$FJfRk`SEdGsk_yR`R3lo^!kh#4t~^L^mKw;%+&h&36oQXhE+K= zuD|}A5O)tRuh!bYuZ(LaIPAau;{W=t@J}k{kunlwhz>WRRwBCt7E=~tW#IhS4kxHZ zav;;f3-khaa)b~~tGayCpGAY&i;{(gqsUji1#HY)EhDeutio!Rd8{rhZu?k}>GasQ z;)U0QR={|uuHavN?cd5ex%(df*gxarHK=pkxyIDyqjna-7pB0AmigFo-4A|k!JvFK z@CVo>3x6a>oF%@)E|Rsi0oIZ3<7-1BuxPOe`qZ#h9Y|PmVRiI-d!ZAN;g?b3)H!h; znH)y+(0VY-(1cmZL&8WCen&uBG#VEm2hRzxy09K7Bm~3bxW7}rp}%N!7Wx{?Tmo^W z2KlXN(cT5bLUt?j^VP|YQ-vdklgw&Y`-$zHpM(B!8u4pfict=okNK|S!?QPD@EKtg z%F>cJ^7dac*b?f6d_=b?wyV}REzS9y4uL}Nt1?-o+)jV>-IaOa#F;hf9D7U@H+GyE z<8}-ue`noCXsvC^tmc}b-Uds=D0B8vGI&${C1+_ywHqYLQj#;}n`xG;F9Jv>;2VF4 znv2Ke>xFBLK^of8lqvL&e=0(DmI_*wx*y@Ck(J|?I;b?WHvu4$dFhkjHOEB?TW>%F zY34Omu;5)>C&)9+7ec*72*q)*;Jwi7W>e`@%%@qDielqcxZR>2jc*ar?p&Ic=v_Aa zneDBba+6NZ$)MTT?UdR z`?cRwcPa-K9oxi)to)pylzcP^E^I+IB9cb_FspszGh+qeoY^8+fGjHPe(G)|7zz7* z`KG*u8Zin3;--W>d{8J){9%@`SqRUhFz36Qj(A173_2er zZ&U=*xyfk=UZ@_cLbh0knRbwC;!#mZX?lp_;H$lAU5*TA_gSaU$?L zV&@`GD3VHf`@Oh#ct`wu`m3Yx16j_ksvh(>G#{=UE;6I0kB%blM#VGd|E-02dH=1f zAy-mZjhXHBClM^=qoDxM1PdDh7JT#a&o4f3dh)YZU-9=e-N-30rPkP0R$6sY8Mu&q zH+huyb*l6nlfMSaPe=~73T;jXIl=@Tj3-gjs|SQzO|f=^KgHhwJhgtXq0WxorXCG6 zR%2123vE1NK~M=-Fv;oP`}*3Qjt|Y>HJ-RloirhFHHcdXh?%U1$fsv58rVS|J&&Dh|!DVE`FV%nqVo1C1jt}5R6iq9XPA!QqV zBE9K<@qbo_*~38pqYyIaRH3%;g6-ap$PBEJK*i z2D2CU#)uk%MGqNV*+VmjgtpZ93RW=rM(&EdcAHxthh`K#F`$r;vFf7(yr@b2XkNd2( zu0_%C$Q?_>9$Wf*xg>Cpl7+slq)EBZ!|!GJ)M}I;ST1jL)0Gng4>q&pQ4m;QKTjaDq2YZ%yAF+t=gjtW3HZa`>LK_ypx3@2+wJLan?w?y2ClSdx|=0@%U<@?B?j<{#e4 zSKE7!!%m)l4C*6}h`Ifa)m*e>LFnm(p^~8sOf22Nx)~BU;0Uz&$UWF_=y8Y0x=^6L z=M#gRPyv>cKcG3Xa$)K%B7x8a*f|@P!5@ld49C zAyBP)gM4+rK2^ush;Va(9Kr7fuxu;ZpS#^DU+YVTBO)R{t#*F6MhfQuGLm z$RmfnzB$Kzp_Ir})xQf`veN=0Ji3Cu7s#O2#>R<j`&jg}m|vAC?oNN@pE@azNW5*N z8(B$h#GaD{Ng88vqvy(7MB?#M>)KJ|d|;;BuOIkjv@^z@ywQ^(F^q{U-KW73nHFwEaN z9>_=371jE&x5$4ibGnW*;sH}7ZZTr~mcA)lZfe#HyX$M1J}mM)TC-LGzO^_=-@f>* zYz<+qeyx*FKu`ZUlRs+0%Xj1H=lm^S=06$5>gnaSbc05zh`K?iuafssDNv+*FkGD) zXuUgq$DFkhnIj*bawBq{Qn31yqKW{9jL*3_;(;^GS^NvU8{*-}r3qLb(NRnikX(wn0hRvx3 z2;Ec+$v2PU@d5pPK_dlbAVhvP4);dW2myob8=}-tlohHBCzjPN6S^BOgz~)Kji?F+ zceu~{$Tpu*k7`~1xB>rYnV?4o%42a_@XUV%^(@#QpiNV}q&CE9?~Aw2op?V^-(T<;d5 z^cEMVY@TJMS;;f`6HnvqE;JZ(z*kYV$1IKzh}Km!1P;O&+e>fO1S8U@F^)6tQWtMr z-A|PJ>|qDO*s5PG<7f8uB&{`!$eB3P40+n_v7Npe@!AS*>?-q84+s0FVB+3j*4rMn z%{TI7qELys`egc{X^kQb2oRhUQOJI-nF|mBIqHirIntcU-W#Yab&zo@a&y^tlM)99 z5l`X4=dX!Y0|z{ju}gy(2#N@miA^7&+N+bs4EDM=)Vc~ldgaDW>T?SBW)Z`w;0!qP z(#BP{nZjhOJ-$52;0mU^lN&tnsh^ck7zuBVA1e&o)DFaXgWm#f(6?BEK@+Ct?iK1* zeou%=IltZy|JzlA9Q0eCWYy1r4D0*2;7r4N6M~HBxeslH)?g8#{7Frbivl5L@_3SK z<$zzV3LUcp_9jCkXxSYzef-83Z*}#;N=XN0qAe7a9Zg=b&ty6nuQ;+421!eGZMp?& ziqz|(NX2jhsLfZn#Z^YzKPS!@*NnVx!!k`nFhL;DBL33}I8GmhZf?-XcIj-*4PDHs5n;^~4a6#ovt+U9 zBO&IgRZ1$7!|qV1fFmj=1YvGUf?+yUx?#ux`t-BAMsv@&WqC0yqBW^@sXul^r6fN9 z`~nw1Za`>V*=1T>Gv=TdTHhiD;PUCK&s-C-&&0X~xaH@&P3iYSp%W&cq1g*fmH z%^hSGcfiGhZt~s7hoEAFib=9f6rho{jVBbL6rs0B0L7pgaVbB3BR5~bBY?CCiDCsM zovP`Qlqq6v_x9t$fA#uv&N93AWiFe!#W-~Sl4djlVm+#~uG_4DZeP`0Y^Tz{iM@Cj z$AuuJI&RQ$blbZC3v?S$%4PaD4QdeA%<*y5M_`(#XW?eUnIYgkHVNH+q|mro+U^H1 zCY?Ivb(`4UD%uRW2|&mBroFKylOgCyHh?@$X_1gqOo;HL>>zrN)rrn=TwFDAo~NJo z!S~2eCPaiC5GW&Mozi2w)K@yJjw2Y|RKn)yKiq7{)rcQaf4j)|$1*cZ<+_}Ju@p+Qoi@dfUIGsO&?jQ=!MP?AS+qKA^-%3EP-m>qYsSFM z!sfTFG6URS_jU;g_)Eye9*h-vb&R_wY+_z|UrUo=4^ngf@+}d5un;38mxPNMG}D5U z=DFXUeWt(23Jx2Au_U{LvXGoAvY`pFmD;rlNdY1bBfE|pGR5O^K>e)jdkRHR(}faA zM>A$B94Xiy&h8~@3LsWk$O$a2+QAjVCR8yf#=t%GiP{oSAhOwHr~+Bdsk-=hp0g!+NAK;te$H~ zYSJL%QrRO9Syn*p=z;Qaiw^gN&NmC?g{f%}18YMg6S_+&sq^YIV!WhG`(@M%<%}7d zAJLb}r2kK?A7!;`M#V`!uMYG&8C6|%>{B!OP?u42Z$|Kj_vGMEJN0|=3B3?u;ogG) zHlWVfxeEfKXkLY!1OlRMkodW}hrl24O4*N=iN3hR9{=U!q(CyQj3W0cg&8SoUqrW3 zMTfXVP!>!e`wEse<%r=3f7?Lcmp_vI;CLjxpf^s~lqKs8$7Z2*b;UJy&t}G5wTmV1h_^0(woS`2!iXrQeWvq$;kK z5j;9@YV;Lpx`<*)YLG&WPEEcmqa1_7k93ggyz+rwx$Y9Awe=05Fp}K31I&*|^EK`f zC!n4BChP)Y?5aqBYtrh6W6OpP!g3z`I8@fF!$+hb5m6)ml*pkgXm8wEnU~32{PX48 zLO&CkMSYRWKfV0E{Cu_E3k>heFGqiG%5Q()#R9BuYQ6?sjma)PmtlVg_*h+hs|A$oHYKrq zS?tuYwcGOzfLX$|*3hCRyh^vD`lapt<#+y#_ZgH%M5c)tSIAVY@Xfwql;wp0$ug#t z2|R(rKWajA3Cf6XUt?J0d5h}N0s$vFin!lpT0%=1}T&&g`@M|m5G zO?s{D*9SP{$6-?Rjv_#I{q@)G$v)og9rkE3hj-H7%xFki8C#u^ID~vo{BEi3=Zste ze;&UcZ0^6hJwV(EZz0Sn#qI}ndt1ITS5Thv!StbUV6p~3$$bsXH=)BW7sW3o{Q~qu zfjXQt5t20BE7U|*@zTrBi5_)KqI)WQQmD++zlAmR;9819xE|UwzIF;Eg;5O{GzS$V z9AZ6g@W#<3j&XOtCoI<`qeEUZz_%ML&rPDvdMc4;Bdj`&);XRs>pRlm-i)iez5R{N z1%F=>={mfo>q-|Cn^EC|1mTdG z5ap1dwk+9H_@Um?gBq$h5xI)-hx;d({T#$+EN;rNFF7giw_V zD5|3o`l4a3^D}|7FecCxK9{AE$40(Ffl3(Ascr~(%$cI;hHToZsR&WzmyBRyh$tg= z52k-ll`(}x&-y>RXd-Df3strU0D~TP%LkF+5XgAFGA0(aCUp)L?k4HYoPWOBi@(d& zW~^<&#b^GFW&6t(ZdNBgr;fn@{X>co2)iMUsje!2pn}KS7a#i1r;mu($yT5~Yjc=& z2_3Ln>O>dnF18*2^T(5usBju%@)rJbj_qeco|PoICu2Fx;3S;LZ^~7ImEmj=tI@+eRjO4s0bF$tl2_HTfHMEEykD>KxEC`CL|pBf08wU?0FY;?G2ex z8`NxW%YN?_xs*8c$%_^W7~Y?cE3r6#o>qIS~fJXtyFtPHKwV$1Y#~P0my1mz%@$v&bgm^EWHCjLlLpTSoZ5 zL!_NfN!fKx>ON+b{KAaslzk>MwtcjcL(cfT>1J~<7-%XlWq)Qp;V~j}Ci>;ia(*o3 z1r+&YU0ZTeZ)27I<&_L)tyMpkKP62Xn^68l$fieXa2!Ca;vePMGZP}CLXH4;N`{J~ z0tGT2aG3KQV#!)(_(zrshlKBvk24Ul{sp7U^aat&Zm(?HU2l#+XMyWa$pIe(@SSRl zg$0djjxAXf;^$l$AWn$SIRH*0f4F$a7KQL622)-bsB^gwo2{saYf5w&C$`}ugcXRB zCMQ(L6{PYdzC2&xbs>LB^_72q>>4+#cCCa9iQsI7I)w z@55MTj_`%m&nRF8WNwo7_CMwlk6=({Ai3mPS8+Cs#Fpu!L9J+OfJX9;QFr>w-1=!9 zxbuDyT!7#H<>&Hyb5~q^rl-=fAN=!eSivCAvOTDq8va4sH$SVhO4AG9+9_s%kHcGc zDU_jA$YDZ6GX5!l^tETAlCo7Wx%{?s2oDd7-U_RgK|N(oH!(T=Jg;&%;(edxgMNrN248A4kktZ|r?0N>r#fUZ_NWG#?(4!I zjMVdQ&EB_*Mve-`49(ns8Ei15U_cmw>Eu#*?c!Y@VFl4-O$|k6P_F_2a|sSY^N@SK zJAH4L)JHuh7FPP)@&}xxEcD(p@=Qw8?uB-}Ze>1YH;yTEEaR=10HwB4o;u?_p~~?W zeBgom=OZox8L}-N18ha03)~$AT;Sp%qDP%Kb6C-S4k^5&xIz4SnDm+RUVQ#}Wr%;o zr=iTZTB$l<$S-E&4_xJc6kaFrSR8yIf$0}^M_j%kB%KQwIrD)L!Ii@2Vrj&8Ofv6VA_9*pY%Yu$4wFCj%5xlS}$-W z`QdQ--YJ2zTuuh6n6tqT%~=t6C9!7u%dUdjI^6@h#sH@A>v?_K|J7Oa$$=sYt(=&0 zE|0TGzD!1^(9_3!e!{M1YC~U92te6^|45Y;gvk~#Ol47BTs0u~3!kc`h|5nX<;97E z);`;N&-!dP*%1HhT7=-CuSWDI$Ck3>=a-ahQxM}kvsoLaJIEC8R=F-cUjtRm5jVt1 zaTBEy2tI}DuZUbpPNFioaWElNTE(Tz*tFVq=Ho^vydKL2m<F_<(bVFdBHU7h&3;*+78hXL5+58PBA`{dtIce|GRXzHkmN1p54j~+0^Z6l(p3TLH{)UX@wFG^duEaDI^kC={8`$C z?7?<(corq0a0hFsCY7p_Aq^I6H;gkm_qTSjwqgp1#`3I9KQ!N7D&-+>UGJ7RL1lw>Urje5g&q^q4eS+rMygZFslR{+*9i(bY zP7VX;2FX#j6eE6hlnI>6!5QJ+DO^s3Dns9~@nEN0ei7rS}U-mvch*!h=*nxrp0t99H34!KWXFHCqltd3GIc zi)g)sgn3)ucwDEZr@k%{ErmHQ-pTq6O)vWD*hwZ4I*^>4lp`?xWf3Q(&>GkgS6vYYKa5zd>UTy_1>!e#B(0(YEY8iZ`}<=nKX_ zsuH9!$ffTB8=*PsM-tk7%k7B+j8P^#n`(*awf_gN`lfOc#tl8neFfL9U%&ok-xHD{ zWzom<#g#*gxTaf(aMNiMJwRFMLFuI@4UzQ)z1dMS*kw42ZBA5{Tc!6C$7CG^cyo`49Q6dYZCGt!}a7D2tT1Z>P%@DynV+~<^d_L~gdj41VxsVHe2BGG^ z-U4(#3GKDp*EFPaG*$TD6{}aD6_VgX`&OJPUb(m?OPIs__ZQf7`l?CdL$Rc(lZX;q z%_n)-Y&-eb!3iVBf0xQvX3!MjJp_01xH%Jpc2o#$Yd~Otyn%pfTDi>KDl{XeXk~wS zue=E4=78+mGD~KwMB9?v8LI&J8n-+Hq7&~&y@I(&*PDJCr@v1YjYy5O)ohF(C*U1L z_Q%EFm^JTIHIp!xU&1H_DjiU2^J&?cbsl~B<>&lJwcFuNjaIP+(iT(Ui!r+^hP(6vAG8S>#)r9k!bTKl`VtrC#R2!K*?8T0HqaC7yk z*p`N0Xv_&vfnpX&9kbe@@DZEs%V01(5!q3aHa3BdePLI3KdlSon1EIX>+8Eggnbh3 zo=s^#h10r#d90RrflP$al(=_96W#Wmtst~L<_wyvhWGHQ_ycUib_ zQK~?I(k~N0!3;3PB7vjAa4yPUZFP;oe8+Oiku0mZs zP|K36#vI_Gu#nT7!N*h{a^B+Ow&BtJYYdf!u!+0|p;Hg2>EXYy`T__H`32WDD5#?6 zySbD=EEE+x@ogVlp)Yy$<`5T&Z~q@W?F8ng=V_N^OcA}(Zj)SYe)>y|Lv13|X;-g) zh%P?0B1`2DZa9k@d8un(WHp^Mb$D$z)|Y&({ItaZm9-+0%n%A$NWS@};W^{yl+u|T z>CwYgn6ZI-Tr@s5S2+ZV3%oPgxsS+|SMeg;AN@1un2lU11T&b^U`oA@W;4Y}B#(oI zNzzZqy-ghA36T+q6b7Khqwl~BQtod2bnq;b`7<+}Eav5N>fE05^8)ln)tgPytmV}= zDPE=fbMX_a7wTJr7>AQlLS#9F>w?PQD0^`$P`{w78A7x+*8G!18QcUHpEzUv+pj8F zETY*Por!Eobw6;3>oe^TmjuUvkLu4;dyo`)QS|Qeq^OZIU*~S5+grC6e5ufR7pHeo zBosKYS*)`7CGq99Zg0CJYiJLrY9cfKT?zmZ2=X;b`i0#5W*~7KCkePF`vN5RNc~l~ z*!lg0F?O}-_M`j4^cPsUUvoTm5ds!XBMy|#{;dI>=N{ZB7)0Lo$>>q4J140Th z`8KhdxdngaLIR}8aEq226sa>vFcOkP7dMe;o{M}(7+wZ__^*VM(~mIqP4f*_x&QcK zF3!RnR%`95n0oP+1+vK%bs2WvQ@sN}W)$?H89MdkT#P*RtTPcWFfX}G>Z-C~*%7Wy zm~j}mPvi2`VmthR#25nV!;mdv$o*XrG&_h8&Y1OIQiR@~F|dY8NOUb?hx51wd~xF{ zw-^g!ke4ktQB!$o-5H9%CxuFO?Bd57I$A*yIMnNqhSb>51*Z6f)`G$zr|3uKxX?Rs zWqo~N%jH$l(`Q*;^4{I)qx;jJ``xn0_d*zW;$6fwA*jo5%d0r`l#d(U5C19anLdmQ zmroVFe`K+!9|ee(I_b4u<|FGH7|Ts41A}K3XBD*Tj#`+M$KA*6;zxj&u1)k-=h8N8 zVA&v;TEl^e>ON6kz|^k{86xxDsbn!r=#m|&;7C-5G(;R}iF&6}!&{plHuxij92FgK zqQVCTPB;RI&!5!+<2Ej#U_-Dm{Hf5sD?L5s1zdb2ryE7m%A!FZk-p#-fl8|*V4Y&T zE=^V~vqebx^u4dhr-Le#bp)j9tkkODddf`^@Z9;3%zj@3lgTq`85kcpC6E?Hwa8I8 zt*FxkN!3Gi^2N_YC=h2aE{$K|b5~C@rc+Wgd<}50#AQ3%TPy2J{vGq2eth5Ol=^r9 zr0~5bOfgQVY(GW(yk}ZE#)eFYeq5pWgD|$RwI3t|P)JemRAsZOuDVl{n6?wH}H6lNPbd5dUs{v`c9;u^C` z$Yr%zicy;o_FLg=`5c)hbMaeYBbYLQmt|8dJ~Ez=g6=PS{q<|=4&4H~B1EH>IWkfX zxoHTL`SYijH#*zS<}d=+htF$Nm?sy%$Qx<8CRT!5B+w97SY_gYz`2UgW@kqTzs-yF z=)d%$zlyUeD@+gw?4<}9UH?Pv25n7G!nbP3)C5OR7YUxpnL!3Uzi%={CXt#U%gfxj z4}u&JL38;;s&H|HNvsVy%&NC0+VYD8(V_}Bw`Lsh=JboVoZ?5vwJO9He|ixBJD86e z@W(Q}-oOk|v@3tQDq}(myKz^&^4c}uNanU&hf{6a(@vr|*oT$GA0A>Zhwk$$k%8k*vUc@O2_z09)I4`c+O3HnbZyPZvlw?NCrwOlU`|S zROVnYdSCAlG*i`rm0}fT*D^TUq7)|rfV@RIvC8}wCaM*V8Z|{{1*=|0%?6H7T|t!pr3tD1{tuSeT)x_zv(9&x22b({GuZ zVJq2LJ*!m4Sk;JB?n|y+L}lD0l!6Hf&ePnfpEyOysf0#g#N^R7H7fjje!hrUXBtr7^_CGeRfq+q>+%FSCPLXEafh20h;0*y${@GL1HGN05F z+wXWXtqcZdwyhI!`VMe`5V>d<%82GN@gFkR^-8}yZXRo94E7Sa(9Seyq=1RzwiwL? z`hh;-DphwT7Oa&CZ-S$byZE4f;5^%IC_w59VKOsMaX#MXXBybFRY-=go)h_F^DH9 z*n&MlL)X<^_4e@Brv-8uY`K zG+GGLapw?Fqk5(bZHi(C&oBZ2PMPghCA^psKR~XkmxSHhBh!2a>+ogpvz;6vyDEd^ z6yyo+jIlB}{vTuBadwP9_0&`Tdj?}eD-DxSiG)Ap(vju}HGq)J94L$x`U{A|c57}m za*-L5uxk^pCN%setYi+VY8#+F!SV$E^QgVasmIJ!3p;G?M~%+uFFD0Rk%eI-SyHUn zxu5YmTh4uaI5oAt;BS&|2VMDjb>J_@MH%XUa~MP75y*AYaUht{=f{^fpna-(LzQ&o zpAefR6^S3f26Nk8lI)>N6vZA_RGWio2bj;XD- zOd&q$*G^BLkY|Ik0N?$3m%lh&`qO_3!drcTTwsM)As2<1cjkolU37Ldbsq*abF|?C zcA%|s05nd7*+7~aW+0?uzo@X>B@{o7y+1orU5Mz>iC^h!TmEL;03kWOPpTU^UFoSp z$+%E9nXS6T>>d>6J8UAa?3ovR{Qw~go33R&BgT!-=P>XN;6ekGW2GN_0Lb~$^FqK` zjlvM}y(+-t{BnDuV31j`%PMvG!X~G|g|^ig6T#dRWOCAUaH9+5X}EwEk|Ecy#ZVn) zfFj_W1UH^@a$I%`(V~>XH+lK)e?o$pNJYbMq`Ut`Da&xHqbY{-?@=jf>i_9<7n~CP! zbG2dj_~t{)o5g*Oc5Jl~G*7@}q`5%UOM92pN1p^^jJ{_a% zYC;{Q%Q2z+SnW-mkIAx-hsdT;E(Uv%6NN3`Q%TH$&Al2ZcNTtBs?rrTM8W-lgysem zveM;IIPu~y&A%#t*s^U`jS)XB@`^*{BA9r!?k}M#>!PD!FUykJQyDX*{3p#n(RwCCdtl`O7Kj92fM$gX6H!l0`-kR?H@^lveWn?ZIRfAflqDmP=^HOO^V)OT+rEL5Ku}?K)HacT{%e~D9;P!s}R4WOWM}!dJm8cL?qa4Jc_*6o4p-flzc+4JJxw zpv>kbMB_^HweDBv=PsV^t1p`LsK;;kD_1QRD|d~JX^8`}A~Fe>RMEnz1EXf{Fw_yz z9GpW=TQvWknj-|eFb#Y*H~v`Ge|P$+%CWsd)}@I(!zD zMN*Y`OT3r0P+uXXkceo!g?QL6{9Or?gc6$yg7}hn`OD9_a7*54E$&@Cj*P4A#_B!D z3tGDr63{K7#^!28$D4K*6L8cm12?BVe>m#jp{X#2F}b1e5u*CzP;^q0e6Fh z$tG6x;U?;XuZn?A4^+U<8-uAr!{_iUai|4aRM#}k;|SeR7Po0NC4CBaNUOs8=O1~~cw$bd$A@D6Wzs2mba0&b)z3<$_F3_7v)skucVAn28`lY?wjqNAjtc zoX^n%vRXF3k*dAPR#~2%31zo`SRXS028uhbF0RoWwN8$Ih8k&ZWaa71#vEx%QebbX%adU0 zreDxP=_eE7k7Wooa|U0gYVW>@>>+B6b!U{MKjZO-F5Ee)2pv&$)YuK zJi5K(DwKj<()J7SxKl?FDxRKi`HR;mt+#?#U53U%B%jC{7ml@rd4Ck9OqqC2j_c1| zb5hg>xC< zrmwyDqA%t0f4%(i#V5j?_&O;r3C)URE&^fTIpn>V8F7M{6Huv$Gx4$A3o8LTG;`ol zbw4K#@jL4$?sVfL=!0$`&M9a#)mj%|SN6lXXEn_Rj#4w2gaxOsz`UQ!*~Xqq**0C3 zp8W1@`@JmZ0At*V)_WY8PKY$=NkL8)&|fF$!A=vWkpA1@7-1dfxgg0Qz65`wS5N;= zB*T>!pf!dT?BZ+Do!z&G!B%nvk56~zf=Bt(;R09X8#GzyjeNbhD6FwnouTiB#4!x) z<)8Y-o5V?|o0N7VlcX`TB{{3!*qy#l(?frf*CeCWz>S56v0aG#?h`3*vyJhC!62_a zFWb>Q&kC2VgB+DOe3pCGQ!z&As{_?+b|`;mWub(Is|E55{nS!2H7wPcq+3DAMa)oD z#X&EeDNN`N=sKD1A!fIKoE)PBXH}0M11+>zk#Hp$43tSfsEVcuRRP-vJEN`wtYECk zN8t*jOrM_{(^#->L(#Xu5y4$qiiuY&8J{8Ls;zjhlLvYAS<$@gqzP~qvro`AKN{7( z657w3u#1^<5F=b96;qL!ROZnvszaV1kh>8C%d)p0qN;3jW04o8_T7X`a3Qs&g~e zb}G{3t@BErL8eFDtWUfJl4O7tYzyLw|@S=&xo?Pdz1^X1;+L zw~JDgf`Ybh` zUgtb@ScT2PZI}z5v!(cN{8x1@*f@++&HaYU)926HmaFJx$S3@c&P|J38*9$!XfEh? zmp}5){1Qo#J4^HVE-k4PTVd)np<;g%&VG_rkOuLxn`;_U<)C4XN=_-N8NIvLItJ8&!&@ewWuu88CO*p#LRnruH`ee z955Na!O7MSx5ZO8oU3_gbHksRzMp-X#9Kxs*znzix%%9ZPyWzJQ&dGreoj)wga}DW zQf1HAFzHWxL%1TjmJsNgO~t)upk-hOa<0kstVvkxg-u~;8kaxvSPKP@z$Jy+glHWG$l(3jfC-~I;!iKXA{^6cB99G{wzJ8W;2Uyk zyKs3Z3opNR&&RlN;|2cbbb-M^bQ((57f5%t@G;}B4^)c*qDAAgc;S$n1x3fnUEL41 zqNG9-2(52)d~ED#Rk?`&k))u1x$>JirC?4&^5_=-omrcg-xvB?{|NNjB6kdB$WXm@ z07EN+T;~*+bvl@9zPw{GZ_xF%wj6?_3d|{DPW7>>i|vePlWoM!0Bp`D&-<3htM-pv zJ*+maMv9<^BA$M!>5b+=)#c7A`<%J(GJ~UvHU+z%nI`gqB4K8r3+q`<5z;O@4zmalexjoy#X$j9+Q4(TNnyTDtcRoVuBr`o5NReb+hvRB-nB?GQh1+X%{MmF3&kk9<$5u z`ql#3E$bcN^vk8hQGk#bT(rdnSfWWNKBadFU`sl_`|wN-oS=N_=L#mfJK;2_EW=SI zO^t?p-8EnYxJ$+p35lERHXJLXGi1hXvsDGXsGkh1_Ps^T8`Il1rt)9E`X%E64S zL53S2TQ0tznseJk7w8*(jb^1Xvqf#)S#??6u~{)%KM!2Ev+^S%z*9ygOYq+?OQbBN zxaBwK@OX=lD)B8FKE@m|C{6BR)Q{ivQ5GFXb+lUET3GNEE^KW% zs}L)A)ADQl;ZeP>CG?roUQbiOuCkU4qbj2HFu}rPet79s<$xD=QSC}V-0flHC9|T{ z3Z#W^8=ptju`rJ*$LK9jmf)YO$EWnR^U3dVHwi(VW98PIB~-ByGS@XCRPqyarqp{# zE6OCLEcNu=;<$CmSt`I?DH0&ONQBjN((ES1uRww zn@a9`fAZIC$GM+LVWHvSrj9-0gkU1!-PD%#c3J*6*uE`b=P=?N&{W0tZ z{1y>K73U1Q{(t8X%!o19tM4aWxbr@i$;Se0={sL!>+epRg6-%?L7!aP>|C zsYsAn-7H+M23PS%ncwQlx)Tqan0Y6K(afuXhrhT==Bl%7f?0c^%OCnFFsWpR(wEEd zGu(s!fe6to@yK7vFA(NOvce_t(YBfdZFDlA3X%BTMAaFA}yDbb(cPt357A4cl{!_ef@l-D^%Qb|wdr0zv4x8UKU)8)+R4WKdCD z{O1HSQo&6jBqlE=VC*OiqM_9ucj_t z7iBf`bscd}9h|6lKn)&*9{_<)cLslq zC8F{bRZ$34#-7u6mKTXUvE&@n`a)Pz?^<&)_9_3>)niTNT7reLyGQi{dHGE%sVocK z0~;@T2th!+%JMx~GAvs61CVo3;cls~?yCgTDUvm56{+mCMYu-}&*sDs(=N`T&b{mY z;p_-#g74i`T9Q23!x$T==)H@&GBye3ssYZrP|+EA1r@3qMCPki{|ivzosI}p1N;#r z#6y+!;G{(UR$fov0qMkUmAO;|Q0*+I@=brmj;P;#js$9<@dDBH{fq=)s@wrO^SgB) zcGF#+XIr}A$K9{@#O}6QWmX)!Xz`Wu50hU*n4z`XjpT90eh177|2LNNThXl4H|k=JMl%i@Vpjlp|53n%!q8Wf1n9iqYU{eZP9 zl+5blni3F4X8QLzmyPjF*H*tSl__AHqJQRGRA!e2v~eNl1PT&f;%Xwt(Ui^)a}l_& zzNV?tTryo%)i;4$7w~NyD|VkU23DkV#+6=zvE~eY`5!Mn^P7_k+0O=R!f~y6;6w;# zWiZT%Q)Ea|LN8$MJhui9`}_Eak}x3`$=`R%HSYgLkBe&*fq^|Kq*+U$URNB}B0^y8 z&V*%d0c{S)ApTx`^$vSbAE=MiHihKSZ0r(Fm{fYlwFcp-Tr28pC)WLqOK%oN(Xhev zQBgP(7CT03QpjlsM}>q}D0Y|iHU`-|h-!w1MW=!GRSkKOA?d(yIRPNoV#P$4flh`v zrFdhK?#{~~|8add#lYF6p7F9l9r8Z`b;{14b3OXw%#rFK{&T#hpbaevL%#1f;mo4e zYIaF&bIfS$KNNg334?3M;Ys^ zt|I1Sr`j0FX+k=7l(01U(C>>`Lv_>ymS)0E?ODwLzs;E0v(W+=uZphV9F;}JT9`HE z>VvxVr>74==J^N9CRXJ8hzv8IB$kKX6K2O2Jd6X!*^3z&FhHAlT>N#q=Wt}nK3HBm z@=K_+iU9C!8HH=D>SF_s)TI3jQ_mn{ipu7)M3`{1!Vvhke3+M>lN)TlrZsgc)aT2x zLewt#nrum|lu_B=m=Klay>#QojhE$6Cb3|eFrhgoCRAA3khM+!vFIc_F&~piIlZ=Gg(sFk0Lj0QCU<_DQkG7g38nR1o9DfOslkiVt@Zi#kT4F5%{FZ=~YjiNt zI6(WzAk{k)N;A}CbW&1hd=Hq_9=;x-u<15ds=07ub}3I9+s1N%V%s$5El zbCgUmFcg70Lk4N7Xn zyHUssC7Ud&%neY8MiJVw!A?8Z@CTPay?j$t0qjFODh2kPcm)%YQ{GF<6cKU0s#asIU;qRaFmgJ=zv-*QTqLmUgB#LI$YGvwSxUdM zJ2G$GnJIgI23O`6e9qA3_-vnz6S1<%E}zc&=Z{fTh5~3na*8ok^5$~oi!$XjJ)F9h6pf} zA2F&uoq+jL6cMmfNB!$(zU)h8m2=^V0&|WvCj2sw2X|x_u}_O?dUu zdr>8UOv?r@Q$Gtm##h8!zxY1c7P9!GvvK&yQDulN`4op0oG!nt2j2>fk%5kj5^%hH zGKkyDzJ@U4zMgLa<35_T( z!4A1aDD4b#lb%KXb8dR}#b=#HR=F43K`%^{p4woTwI&r<{aF4i8?aXQcg$0w0RlX* zqy#6{6;iIA6>h^7)Y^zy5bKu(k+o>Ou3a!d5{k6>tnVHu{&xYK#;z#dyVg>4)>2+x zJ^gTRPj&zS4j9mfN4w>EbmH>nOk8AS0)kz>El+RIyR#R{^W7C6$1H+EQE%t^u`Amv z8|zMN*0VSLa92$yyi3qu0VG=>#$*L2+!FpGoy?@Wx}2DS%!4aTuB$d@n7SjK)Ls0+w_msy8UM9jJ76ZKB4)I zHUH;i6vamd)q%=E`u|wuCwW%&MS{@s| zfOId@30B6TzAd0lCk7fK9-hnZJHuG%>2M)inO`R1-=WME-n4_Tw4P7@$p+9BnU1`w z{G=HA)de7;o-iaDRuOIkiG!lPERGp$#S2j&D0IkJh3WXP?D@CCVdZ_Sf;n=ncx*xx5U}% z@~z9a{oho$P>aq4Y;NL~;6RFgrM`$T&Y;msiU5NP(pfg5#Rz<7(r#mx6|mSf+L{L0 zcC31NQFs)CGq{9R)6)Xx!4U#`tvCllMP&NcPnsO-%_@k>>ac0t|c z6sJ@_TO>14*yI1ccK>F@5p@s33(xw@S;|@!7jJ449qB(EuCq|Y_7W<K*6pxD5HN)aMqAhZQpXO2_ z6LW0K$-;qOpj(GDqctT**0eVL)Dmpuy@OoLgf&wdqUh$pJ|mNInYlsu3ie3c%&y%J zi~8`KXCOrxH#6v|8OImejdp%vv2}ZOTMX=DJmRqZ@qvatR&iqE4H10-uk(H?P3N1w zaK|Z4KNz9(?s*Z@GzY&+3YNSQa;m+x;DkxQ3q^6J;=cYZPYtH;$fxs4{qLQA6JlaQ z5UN0`TY;#aUN{NSo$7W9>m)cWSA#ETWj*@H)ZRBwetyfkbFxpq=v{ETcqoN8aPDOT z4iiJq>c~9Zp(C+$sxHfRT!ESpU2?wL$mv40Cj^0Cw~4Am6wzCl<$_VwPIe6p)VQ&T zs!Z;FMcGaNoXQ^I?G&y}j>~-w9?(U5SDlc3;0!_!@+xR~IQ=`PhjBKCQRWX%W7x?S zmiNGssjGS*nKmkU#F_A`&=)|)+XVJhB%Ak%DCS#@08C^kCX<;K-2U=4L^PS zWAP$c@DCQTY$BrBvQ4=5jFWu24SN#y=8O04$uEftSW#zP3z>HEgH%=^OU~=BUz6Wb z;yn^<330qC^1C)7APmP4PaZM|!qY+O?^FPNNT3JZ*CvV=pM<%Q^(-AKAd zO~aL2sL|$18S16i$~i#^{Dk@JVu0aVP)b2e1~)t`+&;T!EI!`IBLuPPaInc4(ryB4 zcy)@%;eKuo0q6Q(07hHS^hk#kd+fxT?1t-r=#ue<|-%CKUZ>YH>$yL6wwES!;`uba-M1JHkzSl zq6Tg91E10>MG`!WO=%ijNWwACYn`D)9jq&PVMQ^y{b-K3u{EEYTJZKZ4i`%=@^$kk z%O8%IFo2E-r zpO(F$o#K@yi54qQraiJA9)3o#QILCTw;rBWM=nUtL+F{mi3947v3jy?6wPD7q@xJJ z(pXmI8bK0OqS`naCgQW8;BVq z^PrCjcabwqdm|T}qskM21E&zUwcMnU7g{ctKXv0@zx@-DF57I8`7#?dQZIgDoa>`8 zz=^5>uwv0D^x|*r2Eo(tTMPsvN(}CewFv-Yym0G7t-?cy$PZo6z@Nuqynj zf#PSfP8-T|)B*uSE}Kj6*xb}4r@P2SFLu)KidZX`aY|V8pEbLx$9=QXLBW`$B+z_u zW5%jyd7W?rrmKrD^aAS}86hVYeAL8cV}dTE2#$yEGZE=-*Xl3?-5O7*_= zY@|G&4`lIpO7UfwdgCi*e>J7|7TRQ``_kgH={Bxfa6v}os90Q+*sX60Ynfk(gX{04NuG-~<}>!XY(AgHe1q5{49}!cM(2 z36%hM(`SW7WFTY)7A{&CXaj6#lwzjX-`NS7ScD%3TT8%2DyTWd+`-Gd!|?I zQq*3~+z1dWxHH)*U=N1v&TTZNpBAZ>>|+QfaE%mpP<7^GE0cBkhH&lOfP+n77ZsZG zSV&Pd1oU-*d%?USETi;IA%17V{vL(y z#W=5i$xj~sZ~0|4F`%|ezSCZW!SAljZ!ZfAS}_(~F0I{rMbTC&L>3iIwwuTn*g~;~ zN5jxzdK!qnYb_(KNU@M+3-^6dYTJwh<8xzLmzytq$yoyRcBn&QXrOx#cz5EOX`CA+ zK$QErMmnEv4}tajU^EUsQM&{l>eaE6{F-~2z19OkEGT>h0iVYmCe$YV+{$bVNof{S z>zZl5Koryk4evRilB!RNk7Hfo8JtsFFMIj@#eceZUp|te8pG4Ia+VC|-!~^f`=-D0 z15O3QaBknNYi&)oQULlE3`!=;qBM{-iSS{W=0s{CegfV<1f`=P`yGKH-mw!>5i3-kYX=}c$W@fBPpZ9~V|1|?0fs1x?llyN6I(@-CfyU7M7rejNTUi zH(PJgB-<)k2x}9Vt6D{00DtMMYxB1_~-B;A|t>o zg8)Km2^>TXkSIz;f<+nyS)m#Ph=Cvgnzd(cyCZhpTm6Cl3;mqyan5~jR>LS+didw} zyTiF>e9t)+JTyx7i4!M9fx3-xeNe4!C@EkE@W zxoa4l&_1a98;#W17e%zKH2ShA+#j%PmsF44eB7tVOX%IMsCJ4A6kTax^q3ydzAuBH z(D;s8BY|^!;^yPN0t=_B8AVzJghs*rU`a1Ne$$t45l)th(vE*S*!Uu8OS;7t$c+DED+^rOE5xz%cKf$7dZ#^V|mY#HP9)F?9|5U=e%1% zcEZZ9uXd^@=<`8D6LRyFn6Qtlxb&R=nQ7KHWhX9wLg+|qk=?&>Q~0W@y0Vx19dcph zf^}01>r$NWQD4)~5a)rizLn0T9WG_$(GRcQkoP5hj5C(4A^F{!x%(Owkz@r)I0xsUE;>2b1!1h&hby3b`8 z-&xW!LNl@|&5A2DLoiw3ABs+mm#Q-&>`r!tFWTZ}MWo{!CSl}IsP>!FZ{}|wM@pz= z5EYRH5Y!}Bhps{c$Y3?_x%i#Y$(?O~Kc4_g`-wjMv>jzF=I?}i&q4uEAYc>WJj-xI z`FL9O?g{N&ho(<8AHhsm^GF`W#Rl`zER^w9;c5-)lWU-K6~`G+@T2>|Uuf%sNxH+rZ1{@6z(0?iib)DXjSOZGSzzWz4);*;)zTs4OdY(gc}}%U z|9JvBD;$WBClhyC#J7X$a`~wfZPiv&9V-Th7W~~ek&a_wYw;#J?sil;qF3>?Uem@xQ8GUxg#~=Mp+}Lc$(&_SUXgj@f0egMg4%{ z`Zl9KK+Refgi>~vCxwH{m%qKc<4eU7(#Rxj_^b?d%b(=4|MS5c(c5*X7j7MdzRcg$ z!lHqYaShD2P^s9ET7b3rFlhuphfm#^h$VOc>jki;l3^Ppl+XUj8()ofwJ5lli>~Kv zaqpAfggR0<-%$xwf7qOi8-Am|`KP|tVWrT}$$93I1lBFXMi#u22*J0XCG0_YPI!Fm z6Wa<)lh3HLMm?A(@*8MJa}g-jIUz^EiG*Jf_mX!4W<@3R+3wBseTd_6=uG@_Hk{Fv zAOO?gQ-levcbfB`!?$EAlo{9KC21z=5i%O-7r4(A&EKUKm@F5|p%?Pc%26K1w_r}l zzy3gb)4?}$f6<^}-g5+2Oeg`8w({G+9(>W{^~j`&r^aPpl$AOSuC;m(=bCKZ-9#6D z=r)dy5fysniN?MG_$x>KKN-G*KJJ9+YunB6xvoJ?k_l$TeB_LRFW;r_;HW{8N|nG0q3FxqXvQ zAN<(=*)Iyu*9%AKS#{hgo2XdaT`(5dA*Q@jn&0@a&;Q|Tvxm`2*ET9gtyZW`IH+>^ zT5qJu4rrTE{Dy6`2%|-$WoLJK{VD<$JVJC_c20 z%L_=xDcu=n5BHkBTeMyEfA*^jc~OH55?bm72cw=%?Dp=RegDy{pGBL2FST+tafnXz%XZMnfRnChR%@<7C0usfzE+lnRZ%FkamNN_+C^qm}Ftce`?i5 zW!@{Rbqy^|D=JqOO8YJ3cz850jzefCKki>%hM&s1exgTRtJ$*u2D6N17;r(0B`pw& zIfVwaWwWDMMwdTCRDe?vV)W9M8C^*~UX`2FEF+pI91@S=A{WlL*$V8qK2g8xf2~d% zx>-l#`OjMFh`Ri1bkR33OWxPnyp4=ZxK}?aN<|l4UWiqZ0zJ?e-v*@~wi|)eYQP)( z%;uk2g9X24-+b|gkP_&8tL){E($|qNhHo|ACG$f@zMn-iBQABYt-Qco@T9vzd{M;X zK@+hT!#Gfo27Pb5jr;6w@Z!O#Sh7ZBijx1G3KA++;3TQ=Tm&8xcR@aLN$;1C$ z5tt(+fjXHH9Jrygz1zDeV*A^0&)hrD5OD=mb8l>IwpAJ0HeaOdz4?h2ctk(I)E=g< zR%yZ`CeW>jXMGubZp*(cfA2FL={_x%<CRW)5T)ofa z;dfdWiY~scDri2tu)_NtB?kg+RUz6ezFc58=6~5vrHW#J)+qQdihkH;h}>?UXjtqq z&;_5@lEp`aZ-9fs!yLF2f~P2q{yha{Iz&Z?0r*g0nF>7wH=pW_8@;I{03rv-ofT1T zoTQ9tShmk^&7~oSnyOrbsdh%!Y=kOv^RcMe$f0d3qLNyB;YZ}VZ@wfWTCACS5_WT^ ztwuG9EhS^s3qDD?m0T*nkT}hW&iS@5{B4UKx8t-eva4rF)s}J8lTHDdXce@(Asb?> z_J8Db*3Koc@=1$)!^v;lIkRF!GLu!N-^k|7qPSTG3|JD4o>0mmk7!B5kODHF)7b#E z)VVS1r2Zjog;=WZ#Gg>0 zn!9I{3qO&aJKIaE`#u4(1h85nQ}52^fqADaRotJqPITF-w3-q{52mf0g>G$03((Z% zFU|1xQ#nw-+EJ%cDRpr=>ub5C`V8kpFss_m@~W&FkihsHML1&@*eG8RJK84wQ3z-Y zt`r)z`DTRPi25i0sEm%#8O==LaZP{`sow-gNX|a_YXTq}g%ki!5+dmIPbKSvD_Y6x#NUxZ7#kG0IOD1Y~LYWlq}2LF9stWSLD!6LRo zICQKz6Lm{Bu0pI@i0SGdGsvLR3jh&eM&iB7L5NBd+>#Y+Y}aM;X!JyR8#dQv>7aA$ zD{8}M{P>xpgTq&zBH6&0CT#9ECHek*pFK$r$(wssCx5vm{uUL z4PfKbLN+TNd3jNBCJdVt9MZCP)(%>W5L};zoHH3vjkYvRV6L0{@w&-H_};sf;=oVl z?`FJkN}&%^l@a<&zIVw#Oyo!IR{a}WrMnhIc-RTE@`amz9J}MT-1}M^Mqs|jbbDkx zGzj1b@`nvm4dbdaoo9RD#$O>oBSB;$Kp0=b>5TF+`vps8GBpq2iKU^;izCwOPwsppx;U-7ME zvrNu4=D+eKA3u0QeVOpNDKI8(opWUb(@aISwF}7bhM~d*37MAa??Qft-Hr5+k^{_T zSfnhFU|c|2pzw$Qko9Oq3Vo^kmL7k8G<;gaM=11#?~MY~ev}Yj3c31{j&V5zDVzOW z*t$)m#$nC?rVnul5z0o00*}ijX^OJB_)t+YL*GXU&1J*OubA`7Enkj-9&+VF97H%V z?9N<$*Nn zYv?zkB>v*wQE|)gwiIo>Aew$hU32D5Gz^t6Sqd%~h#+_p9%*S|!`udCf5vw2o?HoI zX9;$s{ypkDTF*#@l{7nDVje;dgN9Xg^JSrWSk|U*qTL#xP-?gJ0f91cIoau10lw-r z)cJ88LdRp|uedHK;}qAkHl4y;;Us9MY*?W}rqTo<;JN*&Ri__J1FJ+PL|sTr0~aoo zn7wr{^}oSHQ#T5$HJa*WRkREmrQwkZU3O$AmQQo-MEQV%c zIZXrVD;$M9j$5x$vghigcL(FHX6By?E zHy*z!tS#jxTS*($-xkRazH$7Xt)FRm4U3m&W@-x_Ym}VI^5{F+EoY#3nV`MRuA01H zn-F6Z-Ko>JLcJJGgX^&%d7jSUjWFQoH(RudxlIYJ1WgiZCN@KpWI6lo-aVHuhbMaR zD2RR53D+8H+gu4!#wIm!ssRx*$blTm+^kbfP*18p@VP7=xu43YID1{L)mLSnsREQv zXVIci20&u;t-r10ajwngKe2YFDSau^5p%zMD_7vbzyYR#;p=q9_jO9w#Y{+b6pldp zi*_j=8XD7gtiDl`tprXhpZYmJW6*hdLaL<7=2Wzg+M2Rt+6$oz%LrsE$$CwfvFrqS zVD#`c5o*_NdsTVO&Nkn=FU{%%`Knpguw=E9#$j2J4-M!CF3i{@sH@dAe1jY^OV3xA$9(g2dN00_LW^A;FXh^-Vr%^%4MUbgYDlNU6yx;{mn7x|@ z8)-D@cZ$RJ6-3^$oF3}=A0HJl2)Z?2|d8s_eh^Fae_ zVVK~(RCFy87gyAv0oSNXKy%hc*2`@)kFsHHgi%n1cZ`6K(;Oaat}@D?Srt&3OxC#n z<)^aYD@5=8RE}nv;6>WXiDDWN9#+!kgY5GpN}c^}n@Lso*&Wb*uhHJ4t)tJ*IyL)? z=I{2M%>Ljp|6^wuARm%bWj~wrGY!p(G5~TS`-`y0K3be__;f~QtJ4Ryka2Rz`e7zu zlKffA_zaIR$+rrOMiasSMKkk(GDXI}{3%=Xu`lU~r+!b?v3CfpL_S-0 z^?_;xsV@lK8NctZBxFzkkLaR&05p?+9R7yY{kk0HD&;Z#8**)QUcWG=Cy$QEdeD#H zSA5eF(R}LXHp3An4=#bm@Nttr+I$ppZ}$A&_-NeXMp1|YFbxW}njL$!gAIS3_27}$ z?UyyLy0GccD;b&m_Ch|*rcqlC2j~A!+D&m-AR|Q4wh(g0;291ExvST$m6Tl#QIY0s zP9V9Zv`;c0s&IUq|UoFp?S}bD`zQA89v~6ISwiyV1KPOWh^v?2_=6Y{nrl{=O5vG&D zqvfGwk`+WlOxQZxX7GttxSNdd%My8#5!(3Nd5`Ztj2WHF=R`2Et-jK_S%Cg1bG*p= z+&7&xBSlxp!Z#E^HXkUuHt=!z>DHc)QyDxh{J(r$|~Jn}!KBuddMm~SuPkz&fGhxngjn~2w!Wn%q+Q$ z$|XMpljj1xVh>H(kO|T3(~_e^*)-G0{LfcE%0LdP{=Phj8wwGivB`Q5%W|4P&VG9t zklM}Kl5o+xxbkynhd))+T9cx0GAJageth!Y2wlkc-QIC{;%#RUEAs8L(u#*~x5;bi z$-Oh|h;;~ua)F3jy(@Bpz3t4jZ9LWULzkxveWnmAh2a;IU!Bo9Tv>7*?E$me z*T)mkAGIp4Uh@sGuA~DcnKeQ?zEG-8C5txR%>ZlrLQn?Im1V3zfM^o>0DcX5`)K~D zWgDspJji|%Mn}pN=^KsfFuTu)N2s`2dUx3T$t(i=<5?t2tRED#O>&C4+GV?VTlUCa z0dtY>wy9UGl4Ju1X%!-D^T?oqvNGbG1|1C2w29IS@bYunp0(sVSFL{=T~^*Xgz1LI z8bkyR>4paXwr2NG&3t=D4NC77J;I;IqYvs?R-GEwTcpZ=#JrYKeB|TStkrWPbr`NA zo&x|~p-pvkg`nJv7%a+c_- z)*I9hk1Xt@fcXme6*;PZB?MxnBzo|6d4g&+N}7naVJVZ`!WyJ9}WvqedtbWShGv zfN_R9g`!-ts=HX5^1kP=qm^jt=O@(a^kJ;ZsK1LK<1lwY;mvorv8ra=0XYr3^5=P? z-cSf0fpI0S%YV5lQq<}2g;MelzP+_S*_2uD5#se*P}48E7>=?WJX39zbEC#c%>$y@ z6ICea>(^Gb=wCuDisTTM0m9Ld{PL<}v{O~%dqXa+c@&(0`6X_yFD`#0KRP6xE;hje zIh?*wVRaqW@=a*=A|f?^d!m{VE}0m|ikMd85Usp7ftzIeXs#3Y$`+L1!agnhEFYrG z1p@emugbMVx*A3+18liBL|2_aR`CwXdkjw~w{QuAk~onpaG_;i{@O1KzWAQbjMxil zsB}5wA-!Sxh039}dW)$VCaw*e8*$kO{bOXrxo{UR02v{W^Em z-$sQ>Uly-1P!VWES$7{{;196BBv1oLC>j7;&-g{r$5cbu_nwEBt^1w)m~#1Mg!@t| za(UjyGaQ-YU;m2eOaM4iy;=B(42cl$gdE}(_;mClPG%WpgFw$|_iLpmf;tm&bSVUg zb|CKo`%*a2tY4MvN_YNIxG1bRyXsz832s7p8#J`r$qgfclws5tM~mFt2_tJ@zYrB=et7Sg!SpCgp|y+AZsndnh0%}(N?sj z;1Ab{ZL%mbjScLl?AfyLCF7M2d%j!y__i}KE!Pg5uEs|i>fEZLTK>swY5^G^fW_Y} zytbQG2`S^kSGW+qYw*C_R-RY@48UJUc`#Xm8AyYp!Um>hW!!vXA{JKvLYPTCb)F37 z#jLnzmWc1Ph}u^0=Tt(iXyldBnh+0itO!0;DJHXgguG$B$7PV?Sp{2K6!N-%KKX*z%W|F8TbefS^5&377jrq_0B#93TZJa%f3dR6-8hV{*q9_lp zRT>LxpTS|}t#?fCPKcBUOx@Yvk^Q*VjX88xbN&m{=XzT&BYzeD=8XLlQ?KVh%sOp- zM&We>102rlYC^X!CnYkmVittTd@OP<0s8o-wVriwk;5^7;cOgTZomAr^LXZP%e@4? zi5s~Fg$WQkWax!kpM|IArB`0jmP??}4L{;)#FrlT>97eP)%+sz%HIzhE=3oZS`}e* zi?)ZQWG(xYhHnojhc7g5u9~5O1xd)7&RYFQF@c3DA6(&G6Qyl{8Y${0_hdBx;GudG z%(oQ_CMXm{WR^;HAtbgG#Lp6UunK+seB5r`)|U_*Qr=2l0j_y@V3kDPAaSnhudm+p zZ_S%)jw9brLb^poA*hgTlLW(F0mI$LouTITDHvr9{94O^$)Q=xP7a$pYqICud*Tnm zA4r`>!4`xmaVYsZr9lvl^wCDsXok1BwWSYQaxaP8zD_o+LCoUxe-?=1wU8%)HI*I7 zS)g}&ec~BbOIGP+WByS$v`gk0MF01H{qO!i-$v@q6h&~>Hn&#g{J5HO;jg1_s0Plj zw@IjK6t%X{PKVVQm(t%reT>;X*-79mY@P25`L=$=!O18s+%tPmUe4j0q62UAmtU`3 z#_kP|x$e5V=JNXN>P_JZ$Y}(A=!e^gZH)8CD?w*nw2c!^*qxouD8zxa5vVXr?^XXR z{tzP`nM;5E(P?L@Wz-0pgYPgx`VIfzvOj<0YwwXh;>&^p+C&fiBbnSl@**LcZjEw2 zwIkKNslci^KIY^w5hjzWCtryVZ3X+K;x56Tk>^DD6 zx+Mx7x5m`}mBYBMR$K{cw@wd>-r+Psmy7x?`3IxiAO5-kRLEZ<0az6R z%t`|K;}jq>EStiTu>f$YQuRpofh3W!;vcT74;1;?C&VvO#9QtS3xKQIyEU@=ycH|X z$|{z06gy+HNchC9Z_!4bL4P1d4^KhIL@r}Qz6*nc#DoK)13@h%eglT)pmVg6VWrBbw6fsz_WVV^*yp;tD?yWt+=t z23PX)3Q>Shnv(J2>#@W-EBWn&3%DN80|HDY1|PQoq^5E0)ao_=2|L`h7)PjtRJF?q zL2PDtN|N_pt(!Upm#lq6EZMNGel1%$CG&YmOkQ_Dd^!Ua@TPE?ope^MhNfzr`_ea* zVlaa!YDMJ(HEAr^ZsJrl7*<9Xs4agpeWj*k>{h`oZoKf6b2Lkb%cU4)zt=EMXxZs! zZaR@qmEi|fkwEfapz#l#OBv;o)Z-_`$96?6JmDH-n+@=5z*OX+9-#kIIXya$QR|>E zDv8TKic&WyW6rYRU5BK`=usKjkm08U2`N!{O{Tj9-<6p_GdZflkT@bx!sP)%OgYMz zuHr;RNh(feDwk|ny%An_%VXrQO6$jTciDZKJAxt$1%XKr*L4CT`B5G{?U9UKx)(_O z^>Z`7jQ{SqT|}*|`U*)rP9?T?(4!RZMHt09H!2O>)$za-%|b^BbR)9@lSJ(orDxnC zQE}{v``^@)CVO+x$$*&DlMod)SkwHcp_edA;z(M6jMGwDqdHd+<-7b_Gce?^&BNo= z&uGXFiy9G|P!hPSRaKfGS$dXJs{KGr*G2Xb6LluQV9UYF`EdVR^42k-P+2lO)7&-h zx!U&Zl6;A30j_x#yqnh;Aox9bP=X2K}U8p;?vD!AZ1dh#^x(sH4cyfr%kOtS0`Sfc09Uutgu2JAuMmDd9Xy>%H03eDBEopYX`px=5T`mH4HNDUL-aPff zQ^LHG&y}LzS8q8-SkP-x8FZ336bt5s-W8>-Cciy6s*R>Cn%)qSz2lO1GWmQMQgqgr&0cPU+Z=> zNi*!I^Zu3lPx=n2ITRMxkN zt_-vmAq!7RDs(LuIoY#xyptVUs!qaX-lD8+Hvk|nzbuc{n5mGZ`LK+gU;Vj^UNbEQ zZC?KL;Coz;VP8#bq9B^L0#qB-Sk5eQpG68mlyUWlhs@xti^83<-u>dte;G>47B|eV zC_OWXjSESpE>qzn<75HE$&|NnZJ@fxNuVkzccR$SSYcpxGABwqWF?RiTzWD_4cx{m zaZ}1%QOha;NevRB?jn}BKt`ze-lvX#hT}5-XDOrC>8grxMpBQH?g5k#tHo1L>OquA zoT+JPMm(ya>_^>FVIU*tg(HpJSqVl<)t2W$YVgLwvgI14pq$d(^X0W?R#}6&psicB z#aSU`r1~N3(B=yLXa}deRzF=>ctX2%+-Y)mZX>iKTXX&{EwO|+xbqU-<<)490C}9o z9(*rDFe%fL%WG00S~1|s)@k7kQY+XjU<7MoKBQRsm~?4#_JFw`_#+-R_^o*te&dgl zKJNeg=8LZg!vd^7q+4%hG~_BNd-{_Tbt4g6rCeNrX57OvEGYE4agZGBX{Xz>ykVaK zN{Zhx_NhIH&^-BossL|QBsQwV1nncoLHhSVA@vx}l@<9bsFLdgh4`CS_n(tD-zvNj zr`FWTAMBhc=ed95tG_2?0sqpfD+kH_C`#oPjRyL;3=yRQ#@bNFidZZP>d77f>NfXh zxbvPr4^Wr+U*yD4B+qcRy)qD!Q>C;K-${8C+(4?~? z$~91WjnxZBw+@#`5V((D2V6tSS*}Z`gdL1;G4J$j5Lqd#WAC=lwFZW=3TDVxFWqoX z_H10Ht=f`5&lAJ>FMI}7yHd4~_&vILxUiwMsiJ;vfjjkauD)-PS^qSxPDr{DK7)Zv z!lC@ZtimE(HYkF!crHU@Wr~N;8COxLwZdvRDk4Y7S%1A1sm)tO{VTZ!IFMB`p@~ia zLAmtU1kU!Hkgd5exSmBx8?;kw1~bb2R~7@Q`yN8+k=Y zhn2Rq#1U@9FVdS*iniEQo4Z!pp90*u-0h@4|GcPewY;TT{MA2Qe&~d-yzT!wSP>`( zjQ_gA5K>u=3P_OHyi^5KP!+}Y%~K@AKRIK&{3KY>MYiVoWCd(}r(h8g`r{hF1m&%p z=BQE zU+_iBF^nJ!nO?m?j%g?zRJ{%I#0gBK+i$}wzb`*x2^1Uk{UA-TXfsXg*l_sGa`7rl za%#yvT{PfM+L1^ta7YIKQlp)Flws3^oM>y6H+mXZLE{6g9hG-em7tLrW~2sSV3?Gjg(oy^UoA3fgMDHCjGV;RAL-oM z?x2F@Y6>YSCHYo;x2z#hw!09bMd>;SLJ#R={`R<4X6-EFb+EKsXLD)$_}PfCjU2Cu zOLPm3fuRlb1{znk7?nYD)fS(0k`qt47~DA=#k}pz^fcKe5V*=VP{DbTZ=1DM5Hh65 zi^08oFBlDwzkkASUN#>w5n3pJ25(xFZt(XTR00X-L!4rUp2P)P#qy_Km@}&9Ro+sy zJ)_B(2j+-Y5l6roRLuOD{_`*YXD7y+FFq-|Pr~w|Gqdb|3cZSmB=T3Z6yjvaPm+xx ziG-8SGbfw8H|dIqJ77K&ZWUavuP|l0FInEF42AV&o?~Uo37z3v#$xGS6BqfxUuaFI ztM4H)0ZhnRlD8VavQs=3V4f(0A>(4O_0^p?`T5V)=SCu=-q2bzc-b+y zz8JcgL38p9F22;(Ok{?61W6XbBc>0MTa0;fp{3MuRlaF7#Pz_p=d(rBh4E>e=khmx z?Dr1+85~4f#J)*AKXubt1)L&ggS9^nE5&Lf`H-iP?T7XjDG_&9{6z#3RoPAnM!lAk z94LYRp>D68DcNvx#Erp`Vm0cc3{SG|9mu2$;yFH}FMmaLw{dNn$)6U=`{l3wo;{oH zna_99fAEHr8w`)`ZNdSW8Zk!jQ(+1=E;Rf}m_|cv@Yeae7*u)eN{O5Mi18ZPG8Z{Q zVswjGQTXGackB~xR%YLdh)Hb3kZ#dCbqNOQt6YO)UIr8xiTOEgx*|?#M}_(5tJQ zhC)U5JPUmb6_|n){-zX2lGRWf7((@(Vg(m%b&oG1BMBB3(GkMhsz;vBKadG6(H}u* z2XpL0^z0wV8CHuqnWT@>#fQ5d0iVf&r1OCc|_bet{g zBUFS%4HZgD6W?cMzi*BbIr19q7Xbjt8xfJ1Dw#R)BakhV3|#;R>lvW;bn_N<;%A-pxOLKv$4cLFaIle9kYLNZ(hCWudsZf zj85T&4}Yvq`U|iuD(>rmZ0A2IdaqXFqXzY^(+2sdWP+h}%Z^Y$4}DMnkQ`X@2NX*L zxSudNwL?n#8a7qQ)Li$}CAs=be~t;8EH(VH+_4TuCg2%k{zA`q{(>Y@CkBajHCpE3 zFpV(deHr^)B|O!ggd`X6KHZx%2C9V6@g8hg!y1%~1sf)FBKJ8MnYmK<`V(0jE|FFr zSF!Aq1-gpvrHhIsMuASA3DDs$p`s5^VpaC)aMVgO9Tp{e=WiQO&hPBrZrw*?8UJipf=&gSYApUpt0>_i5c8(Pp6kc$Uzgub21puKPWNq(Jb){V=Y za3ol(gOwH@fx}2+#)h%{D1}O+uUmGFo($2CIH@rN!`CV{_8YVCu|0hI!T01)Om8uu zx)VF=J1C%T#O?LOR`FdNX{>d`c@%ZVH_C>sTc2y$%N%EJBWZ2fpeymC0fL z9R=56vXr_mK(Te49RxA_$a#WYGg0FV2JPcQnK>;=h{wo%kRlUld_5E96Bm!QC1|ZI zpNh343HX9B*>&8`By=9eW!AGOILYTlxdfCH;T^Db@EpwDN1XI?9vSscDU~EU1E=hT z+w~xtL!<~Fz%5zfY46zY(zCmAm}D3QkmOj(522$=haLd(R2=tJTSbQiQkHqo#uKYW zR``q2B5>r&bkT$}7?uq<>D`0J`mp>VnNc6h^bYZH_)c`^Z$~kgNo9NF^m)-x7a3EB zBVYc;SHEmMRaBs$t%-%?pk$dwo&2{#VP8XARg11Zf5+e5o&Q|6p|WzH%uHGH{J&>C zA(hmyh*S1br&dX!ov{h+2IaV%nC<>E9v%yS;fv2aCL6Rw^C;QjJJyWHf6G|QrkED( zx1T2;meu$(8`uyRC^C6756(xjAcV*KU7QkX5?t5C#P1jvgI9Iw&#Ns_8pC_m1H|v3 z^(Kjs!*Qdl8ubDUQ{ci=Zzey%L8Dupgmb=D*JfsyKYYZ-U+%Shf3Bib@IyoK7WJrsN>Wo&numK!BZcC)5oW@@jTys2MDy zh3neiiEao|Hg6h1mA?KGc!P8ud-25^zC-N)enWjx;P(m$SF)h7kmLu=xe84*UOl}U z#e2(yUH}4Z<;o<9jv|WDL#X&7thOl}cdGL5?+l7%K7PZK`gt(v`i4A}wEd6M|CfCy z!3S*7G&Y#)?TDBN2E6b}(Hz(+bXW?g`np$XIGLqBuO{bPP5#=jKs4A!hsI>kpj;ip zfY)-`)z}H*2^!>MW8c*cnmMfh>&JRG07v(oBOp24|O;wFdgRwRcfQy8n z2c4rvD-MV^5}3kY4(|~ohe}3PMe7lW4sGAbozqH)v=gCT2Ml_m0iee8gpObs;^Fh5 zJb6R?o0hb!b84KOXJ5#|^c?6M3Hl(d@ys|2VBSQH?BK z=n{2rMP8hQZ=_1{DAs(Sv+S2Z7lBIT&W-rvU_4i^`)D_xkr^EVy(6~j%)fqT+fS0A z*2?U=)fK;4me3YyL&>p86}&>VT)}OHmogkT(_9{i!mptc5ZDcrLX7i?6D0_lM+7f0 zi_2fB1fc`Wgd}#TT>=&Qs!`BKY2C=Matm(f1juAf?UN#@2 z#mis&PoH`0Ykwr1@}wFl+n!kC9_7=^MMsVmfW));JNm$&h>w$|Tlm=^d@7zeWu!ey z5eixNCJKcrg9jkLLp|UQW1ALx3Lx_=CXf<1!?J~1FoU{6?-r-E6hwS4@f)7nYJ_!7 z_Pk}fWBCe=)~gFtP}wGK)?2=Va5shMfgqs93O!}wU~80s^D9XxmiNohV9G#_`B)8{ zj{d00+#GY;+48>wjU=ArlNohEj<+<7V>KGgft-0girjlilKojO?Y}|6iURyVji8hR zDRZ0A;`UIr{{2cZDHOW1Y}p=6!RJ3~4;3P}UsdV?zFbt-{I$=Woj++DhYmu1ipiK5 zi#k_M7klH%^09(h12K?!vc8XVX>mqfqKIHr0l*w7YHjaa*B@Q92_9NY$pN&X}TNJ&W0b629CcD#Q2;7X=E7d)Eg+ z5)5_z88up+Kv{3SDjXz~VVXu%hOaR@@@8-s&8YV!z^AK__IGmy;)T|0?th;kP=pYP z#JQCsCe#cpw0XdqC%ty;ICGes(1+O9$L9k+#(MdD+5mtfdyV}-`x_?fuRm{`1acjY zkyEcxZNQe*1A5h8dj6E-mfLrPxh4RrM^uXw{*QIZ7ZQUo6(2lJk9U?F3af zvx2zERFb2>An;GrhajiLKiOcF<`#ZqW|%)y)cuhq3SSn@W%e7!(qwsn;We)M;Cu78 z3%nU2%S96tpR&d9#D?ImlXQsiFT6&(WTWI%Kbt5`W5Pwx=N)(}o+#f~qG37vYYzz| zdY#3Uky5fv6&2;k^yb9;_n243zK;pm0aOaAv=O%Zm{q4}a{B-m^O6ve0c@kiK7Nm83cO zYtx;5A8i)v+<$GIe!~xF7$>W<;9t^GDrfkc*Dl3>WlttAgZC)#RE7s?AQAi?l8qO3 z73H$`Ia1ow9=^gvQ-YHy$iI&{#y{O4W-{js~m0U)ac_#N*=gcQX zcR(e=l-l4s2w=)|65oOYq7&WuA=HV34E1hc!`@HPO=o&}#8O+vBNv`|ozN z-T*$JQV7>zLGU{#duFi*`C1D|Kj-wPu(9rJZmr10OJ0}XnQHFqr#ns4T@~14#ecpJ z47m?{N|uH@VH!=MngLto!TMdF2s{?xiO4&uX@IPfK>;I0L#6qr$}Uc`!ORXRcRD7r zBV+ba)aP$IRrCI{@dXzGy}?Q1YhRZ)VgV|Q3J2RXE(kx~sG3eG$v^l$9$i(n&#JEK z3C0{{QG>y539b=i`#AzU zx*r9KnNg%{F5Qeir!wx790`EDzOG{GdZKjdxZVag!x zR0#kbI-6<8tMD2B@gMubW&^XUoPMYho$7oScz&qDN&`QN;l`=!w+{!GCBJLYr&WrH zX}#zc*)skXHXN-Lp%?Kv%Pb7zI-z?(*V(O+pBVUx{q@snOito@)2vF0xUA)tdgmvh zH^c8UFrR=it{*43-Vs11;d7X~hKdOWj&#P`uYz)GcircK&puR-`8yd`g#IuNCeDhN zct0FRUUMD2)9VOVD~pnB7A{t+EMdBWFft@j0%C^iYCBa!l{%8F#J>B+J#d@!@Qk}DLuszGYtgv61`>R{ZSmoX<_k_T&$9%qw}JRwhNHTzo3+>MBB2q zUrNp!)Z7^8slevwGM>{Z82Es2vcH#)7mc$jGbp^BGS7WPVh;#&^csOjTQ!DM!QIdD z!T0*%5$h$eUnoo@(pXItbCUUmNB9i|xVbru27Lo-9xM~X=ba;N>6%CAmuaK!oggyg zd0e*>-L1QpH=@DgqfS@%YFwQoa(xx#P6sG*X7y|}`NABBWo@y4&1L$#UEwEa40Qhy zdOZXhVBHAf+HhbI=UE&xJWcDY91t>-VgeS^<7OplG{2i~B)?t5X($_D8eb*MaWjAj z@{pNQ;X7swyBQRAkWp80Mahb`F>>bbic1>4#bgjhqSulo*hpV|6nY6C-oFkG$Qvt$Y)^}^z2v8ui8O|?I;>CMdxde0Yx4xnZrEXzb* z&hZ-@=fp0S2fSD4iPn3>Rl&IGEnDwRH;U%=d!6x2)_X*9>ph^1Vkuf1_J#8x+~Y@$ z@I9~$^UPuM`sv@AxXHwKc0AW+<{*ky@9YWv_Hd~QDOvpsBGdjC(?hMvVNv-!sT?wd z`R9dLD6%A9f&^CD?eYY2DjK0n-c<1BO%o?hrivBjGwI!S{K&ybo$O|z7t8a|0d=hC9` zAv9;mdLu5wm z?YQBR7hQQ`sqQ&9?E$2qE+6B50H zrFk zG9w^=&+qaJU-k)akjpIZu**QKK`xP6N?fh?sK{&TKE!Gka~jn518Z%P$KX+e)#Ic4HeoK4e9E-Uw&FSm6^p3!^uH)qpnxd5eO1GR!Mi2E zrhWKO4_}*qaOR}VYwNSf+yTZc)3p+Pgw`TTcJ-H6Z~4M@_X8|L6r-$wjrrcz1W~S! zRW18nVj_WFgTM6xpI#5&0eg@G4dnJKo!6|9if`EJusy2a{=O{iuXfmH=5n%=y8K-B z2{WFiXtMH+#)W)s&7-CRGBgX zXV!WKOuX_HIV@DKi~+bX8`!L-?A@oqj|#%Q1{t@Zq@>)aK^Sf6?5}I)I2}<-winhZ z0=%p!h?9g`ckH4pWM`(f6!2`dy^e4)L@^8E5RK4E2}4j>Dab4#6wRW3@9GCaL1di~ zyBG`ew`y#rR!B&ib&%Gn%H_4w<6}R4p4g}2Ns*?1bYwLz{K$CX0I-)eAaX%OF)9tK z<2=ajAKa6khNW39hI z{C$4)y8PL?ho#oSSYl4wB$gESKS@p~&cCU(gxsSUw(dEq&j*1~2!Y8MN2; z1hSoUHnUt@t8LX+l}^UhA(ZH(H|kV?No6PSl@inTw~|0T1k$VDQyHZbf|g0%8Iio6 zL%!q2c_Fk6`BJrnO0FgI`{uF`YQ>ZVh5%{GyOQP*-5?-*=*`K1`mQo4m~UA9)f*g% zBB!9^5ltj~pOytS#mvvpQ`8f=+f3O589WQICUEQJ=lUu`K^pFZE@`r_lTOD%=l-fE zZ`^oFzMa1Vv}B}mnyFFeFG4a*hdiudd?t0&8stT7H+E;N4!JJCSDib>77)pl0dxYH zzXL#7csNTnq=w-IWnSp3<1*PlA(brKJc5$eGd8Pgt4mHBOu03Cq=+orf*+le(%b?5 zglP4hx$%Ge_NJWia&Lyu%};f+7Eq=OYIysfZ#<@{8nr=W$&qOQ~j_uBUf9CE-r8S%J`)@ z>lL-PD~a6xYM9j9ayC{auduT4n@|(9tKl%Lr0=I_XdLkT^OG6oAbkPxbT|dow$@U1j;WA56x^*U(cs_C*OXi zLl36xFn>Dz_a)gUR?sLwTHH}0`dV^nhWUWS&g6oC%pam$1fSznz0}EwvB;73u!pBw zwFshP!UdN)r}K|Ua1?t2I*$hGIF1 zA6m|OoXSQW^vo6%v-$Bch2VjI~!r{kRsYPdY zv@Z+pAsmV{o{6=g3cVj&e%9GNlmo|oWoeg1B_^}ha(c?@&a<*xY##zZ_8g7xRZb{X za+4c^ZfSBe?4)i-xAJb|e5j=z>qL=;M@uu>XJ=l~Gsm1z{x;R6ohR&p5qA=O;r-J+*9>an~zaK2^!5Cs~ei zb8+bQC8xMY6$m7!`~}OQ3AdJmS+9nR+SjtFtMt(!Zz9Ag{gHA&Y}Gs?AjEvi`>ShC zm0yqIcN9zN>syRTpqdq3O70BJDv&d`>XiA~vzp6~a;|{+as8EpoLIzDM4t`#tONB9 z4|8`39wGm*kc@W?svA}Y4>Zm~?pgkmMt&FR!of>>{U`^q@R1pBko`1gMr!Tgv|^;d z6^_F4ybVKr7BE3RKNn$3KAuE-kqYep38~6(+ zn5{|BZ$TBwzmZfrxdU{S>-wS)Ckxd8%SK3P*f|3+sW@h9(cu^8i5G9&kcUyuP8+#W z@$&A@+UB~yfJFl$B;F39)Y&J&L1I3>hdi;4GcGU&d`^yiLmB(I1K=kl#Ekq8JalK% zQmAAeDNdaojXZL@Dm=`@`E6BWF`~3YT4+oBvOb#>@}$KQR50Eq^9ZV!w|DMYVUY0J zFa})R`o?}P3%#>LMK1YlxrQpXQdtN2-b#Xp6SH;Rmt92-Tl9BxYT4YaD@!#cJArwM zs3mOy3LOWQn-u;iQ-VCCodthGEctn%r(hKL>7$T@w$LiFptd@ZVb(Ya(QFE2TtF&t zWKSo1RQ1OXeiZMqB&K+)xv`7-BWwJnIulj~p#udN#C>`E##1rLk_DVr+H|McIa1Ny z1kRvQTUvDpnc;sI>j&Wyi*I+LB46!}UsPOoxjrBZtQGactoeh`%%TEjgnoD&URo9^ zvd9P#7ITAm3kWByU(&GDj*((0P@?oBl#j3{gh!on6p zMp6yRPs#>pXfTdgmzB%TwX&$f<7YvZyJ8Wr7du4le8XtiQiOV&jR_T7#lGv9rzI&J zDo;+x_ULGL`rQFoT)6<@{>5K1@y*;n(jL->kLuXkSd>-N}J zf6s>{&0lxs@o&E%+?G_kzHeLmkX)sZHqUkf(Ah*qN8j{4VSAgQubT*z;JTHV6ArPB zwy#|A4CT2%OdU`mVM;ES&w;UtH~O2*gQrk8YblE0=Ggst6K_|q%7Vzi)PvYJG`9Mj z4Qz)#(Cz7I7_!C~x+y-g=SK~i3x!;&pfweRWLjYC)I)2t z%5996uQQ9=0(rUo>*QDY1Wsp;j~}?Aa~x>0`3FcHwZ~fB88OSk&j1Uu^k4sRech*I z9vU(pSJV0IDp+n7#A`-@Ue{4V;Zf%Dk-5Uc-lA21oRmj5-iF zi!FO40lJ(p@i7z~6b{GQAqO=_Q8c7y7yUS>BS|Y`ABxwlMnV=C&Ya$CThTjp4KZ7s6$ruufJAiu_vc<{90A=mqLSzBR)TXe9B2{vwk-O;Gd5kS>I($$3WwUS$ zS1oX9rl0uBc%on6w$ZJ@m0KeOc?72J>!CbKmuelNkdYv5usI>5MWI#972n_fgCB`< z(wkHue@ETkM6=IsRrCL%J{5ou` z_))NQV5%=i%T05KXCZ%dM2tsv9w98g84wh*1Bwm>?l^8SrS+AyGqX77N-V5{>^s1Y`2`bGP4 zuR&rsyW%h30q67QS8rYZM!jyd$;8EpZ_EjYaX=9lhLj=gtbeyA17H5c7_y%-Rm$e? z`au$~Mz!#bGY9rL-8Ot0j1SV=dMo*=xwa8VpAFBo_7juCttlkCO|B~UqQC%|lYkn5 zMb6^(BL=mlkrwTV>fqO5Yd%Y61r)AmDzr8K$d_k&Xh@pm(EGx#%%s(m>2(cGZuNEi z>vpA>!R)e--fDK$;|GuXf@C$RD9)EJ_ZUjbVn97UR=2eTZT@FxQF3` zWyrd9-it|&qZ{Cms+Pj@s1RXTWi|6DrZfpQX@@x)H_i&;>8e_Jhy{d~U0tfirm_4A zyMoH-N+70DXOp4Pr_Q6y$`GtpM9&GxtU;k^27jc#5!foYggiogA~|FWwYrfMA3yU4 zKOSb1u7UJ?B%cVvv{Jp=%^rp_x#%|@e#f`t&gLdEM+HNZs!G*J){^q9ODruur(Q@? zd?ZCr#bY!sb^7j5Hr0ole{@Lk{ZwY!dCS+e6X&E)LBk-dqt2nFzXe=^v!rIvg%8F@ zE?bWt;ejPXo`#Z=ithOVAXmcBnN2fnmobJj7`_u;j}p-aBlrE``z5%wShm;t9^DCw z?hEuF;fMLecS}Q>9@z+dtKng4ReCDxM1}pTKKb~Q z&Sq!BGa|<_UourNkNs;FcDtY)a}S2T&83dY{nlR1$A9|kFUg#Sv`+Q2W4z7DM2enT zJVm93Ibr68a{sMD`i+4iW~WVPJ^k2nQ`6$|tW<-7|fNK-P($7R>`^SuZ#Sg`5Q z;#vT-tPY=cU0)(<7H=A7JT#LB?E`$um%ZGO>t~oGfTt?q*GT4)6W^Xu7fipuO$Tn_ zqcFW5+M2(5a#p0NbdghX4Xzt(qzkGBqM7HHpUJ2-N>j6UPBOEjIKlnZ?0j>X2(jKRTIJQ~TNvM{Wa}N1n`qxH8HTtROxSRRejfOVhYQg!k9mSe&aV4^fA=~6 zlM`U1L|ndYR!z~j?KJbvCiJN*wn`80%P^+A00sz;8mn5Lf99J`-3ZJ||HOBD)ljF| zBV;@@?3FyS)`^^Uen6#wN4#wm-fkst8`ZA-+H3?N^o}DA$*)_Q3Th3I{j3I-RtEq< z7&Ylrm@=SR)0(8aesD+>bkNxUo{xjLUBO*b=N`)QxU3Bu4 z2j)~pDVF&!i*o)o;xnX5g@b|@_2-{yUjO`!aoiNXO)kHG^evrH?Ksyk-L&(<2zEw| zc@)LiOfgk%r-#U;s_z$LY>>z3Kls*tH(<9K=hzrzM;cf$EZk?FdCWJ4 zU#=F2Ty*tNLKCLKnr*0rYfv;;R!PTP#;()=G@ok7k!Ghe&`cKt!ujlYxix>S?iC)^ zt!-J>5(!)~*Um5;g%5tz*0Q%at$Ptv*oUqSFSb~|?;fy}Dnu~uE^96zD=`oaq#jh! zYd-v;5*Qg^s?)f?Gt2JH10tm_vqxvNC)7G63A`-rJH$o$;76)0tymSl!v3!!mcbMWU_Ct^N?P`j$TZQ;w4Kd|GX(|$NH||mLK`Z z-8m!+Cr_EvGVd1NhqIO{sw>y&60(V_;Fo1g^RR2DD(Xo^pf272OwQ1{UC0-O$OWE{udRY~4YIP8p z5Ru*`Kxl=f5ILcda_|8YZP0N&&UrLbCTxg)+iaOB38>R?I6gV+tx8iot)I^Xyrl6> z--ah|{GJSO`7`4${=!>aMLkF>omWuuD*l63_>~qS1iTofCVco_QF$ySn++pXoC$*S z?+jDW$+zF`Vm;F~omG8)h(C+RX2HaxxAFAI6l+tow4!4zRaipc?h;Hh0qg_v`-Dn) zuM`f}Ec3fUj7YQjPvke88JSVSgh}5g;BHV({}sOJZZ$jVI+w+pmLm_mcDq51Z-3bQ zH|oAZJP#EYX^vA1>Y)!rMcJ>v@U=hnbtfln0-yRVuv8wV=r6cS^VBBw+f-Isr477M z+*c6#{vb00T2;h!trA>yqSuY-s$8`C3lrsLke~(W!^^~==F}ihEY!AZ2^=j@tk;Ck zoyTo6{39-LQ^=e;fRs$LSEAAChYrE-ZdH;0ytY+a_ut?hgColtt-y}9??h!tVMBi> zLy=GrGuk10ml^2*APE&jGyk(3=HbV+I|n{gZBa0lK}pzU2!a@MQ7pFS&n$3M6)kaw;O;|MWAc%h?Pc7uI)G$rRs(Nn%h;|J?%9|Fn>~<**-Cn&xix zS_`mb%9$_q1Z05aIvXNAs>T23>;GhZmp_Gdd@OHUPGFyVvR$KIku1Exwq+pSStlqB zSB7LA6JnlWwL|i<2{^ndj+(I(>>C>QX(;e}mbll^0^i^bO+nN6i`9!UVVS`|bZiPk z2t2J6{3d86$7^mGJzhiukU^*I+OswPC&fGcq9UEbt!Li(qQ@O(?NoI+BEVSHstn>| zqaT?cy3wkYFkHj}{1shLl+H)V*cir+^7y~M`Y*D8WpJ_s`HX&*F|Z2F~e!OPT z9zSqOeuWpz-dwbH$Egm0M(!l(F$Sactui2BTo``6iZUtFBC%>5?Bs+zVeX_a;^BAb zzR-!LWxxN591@x>+FeoLjD|W?*UOCoco~hBqf*G`^D(7qZ~l(IgyPTUI+vmAAZbj) z^W|GhGvP3ep-KU6sQDWd{6|Q0W{D}PL#ISYS~nEfwd2OAzXLl!PlQT{U{t57cW$Ss z%RrwxgV&<}gaX8Z=YJNqBBz2oIY_o;lah_+r;U65`YqUi|NG&0S{Kd$u0ctr9if2> znUdSY+4xVeaenE%=%T2h)}DgI@_nc3OIBzoCpqK~LOA#}ti-nbWzqvKF)nr?MamnR zZD*e9!k~o(Vn~VK8vPtlK;tABBOA5T8z>)G_||Z>${O!l2TpZ~?KGk<4B`?^bf5tv z(>6_xCLtnk#{#4ZFn7$iVg7RhbWVPUJ?lXf;XJD-GRifJpRH|euBhg0C0oI$pgo6m zLHEiJbR!B~(sfvJ{;yN5=|7S${KE3cC6`ZW^FjpRa)xq=o6% zo{$X#@ZXsRHMYnFsmA|H4k|%J#_i19GaJ+l0P5+=)K}0s)NhWbsb{!5C#?r>1jeC- z&(!~XL`H0RFHNzg$?qbPTP2{Pl z5^7-w6b5&U+})O)VPzO){!$9P`Gm3DnpNm8i#qc@Av~8BJ8KOZ(%ogN{ z-V~exWK$KsBE&xcd#8udY!Jo~+3&o`U(8_E$XM=&odnN|rOGqgHErO~?i~A~a9RL` z^^I8j6My3tfON_y(sHV zW$~E70f<(il-B&+lZVYV+*6T6>ZV39JF!ZBTxG5T&udmD5tyA?D zGE?=vMlA|%i0m*?YrhZR5OT#Zz?^LJRSK zo>){_eTfc-a46YCv=x*EYvt{4XM@_E-aQwfd`v|+h)ENH@%JN;DlOZ!4CaIjS5OZU zOAnoVgjhK-fSjYt8!W|6-hU3*+24fKR>)&&3p5nb1|G}eutCBH`!!8kqr-8L3x@~R zWSqQg6R{_E`qr#{q$YVbt9(WsL){9E5O5_3BB!-Fybk(0yByE7=otpdl)$nOjs=Nu zdxn#saW68Cl0EEiPH9fgmAaZvI2l16Q&nQ|yv$G{WS*D`WmV8E(o$q2wy_g+QeYCP z`>X-arA9LfuW)J*&PDi7kc59#)LAw)f^JLl0(e^-6S;K}0AB#Tm4{%hkVPqT-=%@; zN%W?H?}~i^F+tNwX~}v=&vIXU-)?^8-(USJA!tM@Au=YIVa_avRMWAZJW4PH@sXff zsS{!$I*6Mub2EG)s+L}~t7gC#S}jF;5J-Rz;_;4?Ud|nP>q+UBHKTU*X7OaB9nuL8 z3)-wRC4jq@@`$(C$!&GJCd2Hf90y^4$P<6^+1kE;K_MMt6>|QBFMb7>Av`(oDuSl6 zi);l-WRtw1h>u=$4&}U;H}dedzrBLm*b3o8_`Tl8AN6rjvIO&@ad=N|7BEnYx)t{q zTG0YnYz2!_}re|MZ(=J@)@FbMz&l&#c>js@9hI4MVUAfqk)fyV3Y0JZ8Rksacb~Jt zkEI!dr@g`aZDm`no?RNhB1~vSgN7RolJk6I(-UQ@QVUZjKYm!lTTjPEp`C40pg>a$2zv2e54gZ7$QwFS$w~Ee~T7NozN&P&6LvLnuK%tH@xa|pV!s*3S6)8 zVvLi}=r9f?iwnEy*Z#w*T_B)fcA*{6y)+}zS3skiQSEDGis&r4=zhI3ooTp>w7t&5 z&x97a3f8lw*uB5E6O<8Q@(nGej$+;TPbAgXT#9@RzEV- zC95Pt#s-u07oPNv0wC~}KbAA>46Xeqd`Y%CC4J`_qzq4dmju1o5k!dw{f`Q4T}mOP z(3cE&XEMWm7VLuofep)oh|yT8LL5f7r%h)J@`vdx46MGoFLFD%g1M!fw>-01)KY3t zv4Lokuna&!>e)xP>jq31$r*YelNDMZLtA~NxWjS=^(~BNdBGW)zT5$zc0~~4+VAq- z<*)Q1DhFsqFPgx@ebl_cNxXbNHVjam#XpE6A1{^}>VCs;o>ww5lPE z8b<9TeWzZN(<7#G-07={(WZ}s&l(=mHAPBRet>P|A7>~XP?vw?y_1Xi2R`M&`6wzU zn66|#WbC}x${<5ANN&F{TJQY?*Jry7X+4o_~qFHx`HVQsT{$%_#P(FNK&6$PlP*tdBcogu> z$&Fq~4re(A%q%PQ?8{i*U#%Ui$|WK@)FM|wxYVyYxKsCa4(2~~L#ux(qju*%pB?s& zWSrU&M~c29yW1trLViQT=mA!!taOV^zqaTkSKqPg%t`(^SG%bCWr;K9#rUnfk<{pE zmx`jYdMYmxbd@O}!SqayXOwkRS@2T`hwMW11{|^cU%-N&q66Dga>TGFm@94I0=fa* zE|?22IMyyJ!>-S|Dpr1I0-Wclkt_87?q+R(IOFRsbnN~d-w&EvzQr}D?t&~_DhQ9 z6-EZRw0f6!D1axY)<_KvNJHI$2cya&l+8>vuHO z9drQTQgMzEl%&`$ZNY#-`L7Z&Xg$r&y>0_t)~)TA9{Eq`q;51HMnueo$fYB8pf*=5NiOT)YZir7akw-KJpm??&9nt*#iD_-P>5tv;}5&r7fO-^ zB3~#e;Vyo4`Js?9fQs{&o%Q*XbD)5;(!%A>xW6Cz$t@6;gb4mZMr)tLQ~k3QATtgf ziqPAjYGpRK=V6*T(#&aIRP2?%fzS99(alvWSm@q4#B0GvMZ_R{$0OX6@*cbR=yj1~ zZ2h^I7tusnD@6gd6ZEOow~F`-jHf84$RatB;=~EZ=ZX>gokJ!bb#jwWN*XS+u;;bL zSnbeSSo=277d6-4Z}6X-@n|L?~s1tv$U(X<%GtE{Z6n>!s_bTC-#X^A(EP>_o%8ClAF)B$Ycw$ zWipt3{-V^O-lf#j_x+WSAJ&0v&E-Gi5V3mIc#T}X9aSf?BI|QtWyy2R4lKm*{3V5P z7?+btZL_nov0#stG*N&k$sb@s#HpSaOaapd`xH-lwFijFaOr=vhVRe@+?-u%1?l>1*bR9 zzAu6ADCVapcbnqSO3R!iAy;`GohY;qD{|0QfLiiVki{CJ1mJh}3fo9=-dJ))jV)7S zjR(oA@cXN*AVZb|iUEFl`S#^cg&fF7A5o~5)OTlC=@xK~{uAr6RMZAdXol;$m`-E& zAG_hdXLoA#<;_i*(mDWo-*zf-ThFcRH#n${;3J~eBE${J1aH!C=NW~VhCY_d=a~l00{Os9sqw4 zTDx#-lu@$@WCSpC)ikoN=_y_U0T*R*J}LZ6rq058d=N7EXxECwl})y7ie!>RuE!MB zSE{U8G)k7;ZM5SU_(7d8O*Z8Tlt&RHd@$++Jm5{L(wY`0F6ONCG)Fb^Uea)#n%%xZcm#5SEHI}0UX^k zK^vaV8Mc=wp*7#)WKj7$s2t`0ynp|utQq9OBWeZL!Xl7rRd&xqC*GwGP%AdkvPa1N zRRKW&c9GZUG>ha7yKVk%p)=q=e-kHuXd&#uK8YA55*{{it$O%P=CLQ9xFMH6p(e># zDNNL6V-&99uYLW=8HpW6J3@G;n*f@j<-bC*BJ_3ze6jpXk z2$aG_p*Jzv`m{6Z`u5{H^?$DJulkaZbVNk_?(g0av0|;yEPUtVLpS|SH~e15z0j^3 zlqvM5cr6NOI1cD59h-sLTWI*r^TJt5YhysFE=l zy(d;{&FT+`fF~N;q$@4*J3~!Yw^n>V7=62;3HF#S1u_fnG$9K&N$v7y_HZ|HcyMSx5s;F;aAvT&EgxnVemdJx(ZP})g_Btw4b9Bi=l}(XU0^_@ zEp9G8qqw zsF1Zuse)Hj4`1^|rJF$qADJ?nW`GYNPz%Nb_ffTiT&v*uEEDeox~II;3^_z+(IOLt z4^@Y_!zbv1=+`oN6izL@Z&7>Iih+pqtqPTN+q%SAfVhjwLfkJc>3K9G%bGhYt4L}+ z{RVvL6l_SMTPX5&PAWX>G)d(S;gDL`ps+3+&azuX)yX!_+84!@oTgA8$H5pW_Yi6p zBwAcuJV3;#L(F48#*#h?xjQG0L}rRy97-7&)G7#UuL^lVHVUjw&JqP!&AA#uZOaDoConhA1IFQO_Z+_Q* zXqCo7ML_g&i2fCQoqG|$H$P*11NNvWRf&o|q2xvuq|9KifhR?%4T&X$L=YLTM_6Ud z=r#EkzC1xV#U{dSXXD^=3~qJ8xT1cB6l=0beK1%Kj|{Y|w%p5QF)U11Fo0w(=X_cQ zKPV%0N7Q`tEx~fmO1l{h*?GzKAn4@?=%a>1&8K*NLcTOp^pPy<;Y+$?>tjEOL_wxO zcmRWpKF2PDdbn?WLn#l)#N};RVx%q0fJhXo0E8H~R>J(kf6sSlUq)XO^EoRn2!F68Bl~^d3$tUjrYH zCsA+<%PtjF=KpI*`Cz6Y>*pJqP(K8P+@qVQ3*Cb#scLbaP?WigQqN>WgQ4)GpjiIS z*%zJdd}eSq=Hh#Caefj!m^xG;5K#3EZG#&@bz8$yr&=IBZf$%bvfI3|GfK@NH~ekv z1FgHAhVkBy!+vUYAvmW4EcV*;8k!V| zHRAe>f8tr3wtlknU#QTTo&ovPo3UR2gAV7P1VxTg<)=7tEXIHujZcH&^0JyaO#L}N zuN=!30qn}I*A-6S5evh*1k2;wl+uYP8)Ly6EoRq=>p8OIm`b%a%VKB{_;6~Yf)#!8 zK*z;O80U61lI_`>1^jz9k3cuEzhL(%zrhKE-_zGj8_N)5HlG=aR|vX=m^H>0}-0&;C^q4$IiQUgoB<224x`Czp+ZKg; za^bM~&yO@rf!g~PwnmER`pfn(E_D}Di4*;U*@+ppF*#L>>)~s@57p^|ANwp=(H23p zI1p1I;}Tb=^UqsdT@ywX98y&Kg<_GxQYZ8>SDaZpbxn{>xcnD0e4O$P&Qqwn+t(S2 z@``rVPYncP`e)e-b=rII7yftHkH#sugK%*i68t;ZW^zN#-69{`v%=eNG0qcsLfjILqTh}cU`~Vk3u$7;?U5S!2}DM$ zADdKfu9r#nO}RLK_?iEE3s|1a=kjkaKk=_nsTBQ4#!f@abWzE|JU~u|mV~NUjSYeS z-+$q$7vebrG-{Gt5{4A2X2B@yNmmcgmU>y+WR|-cZ;c~~c(dKkTzqoo`K6YuL zBA%@Ms(oFc#;K3CvD04gFQ#(Clh&nEZ|O9!ts3m2c+% zYL%>&w5>&OZX3ag?vZ&eTRSjUU#x5s;5kwdLUQut7xV9l%FLk+C+1JeTGDoH$7!Du zT0{)wP)rTDdc)X>5Hen5(%SeVwIqbrwg_eU$1^q%0F&(ap84*2(T<9F)v>U-`7+b3 zE#DlR07=N{99%vCpQ&Na@KBljZ3I)Y1gB7kfIpI2DHI2m zwWr-E#`9nJ1UN3yeQT#ylSELbyU7sAf|4m5oCRy(Q-55@1&b(SmX{@u z;-)JN9E%1`qmKg-tD@+|{G$jFvp`axmAaC<@N;mA;s3N}5TYTJvDJ}zA@KP6V5{!u z0R>{;I{i>aX8sGAC>M+Vw*D-?q-~^p+AF+bY;DSX5kgq1noafSz&qPuw)}N?;Cfo~ zI_1)cIOK)T&HU{6uuf7Dz_3F_G@+dHw@A|FDR7ypv=(?H?3WcVyt0OkB1hfg=b085 z3_!OY@`NLMFRwYeFv9nMY1!3=EVw;6Qu?3+}_fdnTIRf#z{4 zn?Q4`|Bt4d08(Zl)GYP%st|>@?(BeA@!wzqYRK)T;DltPwWV#X@w8lFaXx zcWy9zg+RR$Y$UgI-`sRAo7LhGim+arurQNfY+T%nU3OE2_3d{v!cy4T*Y5q4DZ}(m zJCtR()omHcjk58Kn&-)I;T!Dw42Pj5UJiEDtiEOnKD2)$lM?0+GxOjTLl@^0FRsYr z%8r3uLP-=!Y@7 zp*aa^4Qt@O!&PLyhNrR+G)rE6Of1vxGdHoXct{jA*9|7UxRZP#j4a!hDaj)y$<(r7 zV|kFVmB!Eawe>&G!z@i%+^+EIFlQ4SkMG~#MtnNhA;!kkyY+wc7Dksayvu{7LUAF{ zg2kp4T+&hhdAHB96y#jg(mm`!s>xlq(#((%GmU+1^wZAet#X8H#9eHf1k=;}&P|FH zR#Cv@8`^B_GpXLMXMI+13Z))CIP%cQBc^R{@!L$8Pob#1J&Nr48!pTFN?ADFIOqyw zBj(wS*G&_<*$KAf^RK$;E*q0W9;fxK#y^ys+vI1M@-X{vQsQY#taT9a?S8gWX^GgTbAv;B| zdLw%V5@%`TBafhC#}d?!A?lag6x#B|yYBU`t{y4ucD|&woqb@99OU!SCqHJkqdRB< zdT1s$%omol+tD~EhUaIZnfzzzvDP^A+v{rAQ;;Roc@E4g<}>^a?n08E_?E#Jv5K9N z{pGsWO)mb|{BlLR+grp?W!lV{{>o*EYqQ&QPR;NZaz4S3h2=gugBI3B&m#ekuML%B zbCD3*#x+UiO%@YE*er9hHiTZum!7BR5<%-n+A}qu(z5Wj#h&3XVwT#$&~)r6Qk@rq zZrN3?kXr+ys^QstV|kwG#J`ZtI$bhsT*$heC1e@L;vxLbkv zrVk)CYdLEbTqbt^0<=%aO9tn%WZ3+^1*L9iPEsxcMhksA<0Lnu>3OS%2|T0sTxQ)a zCFh0M>pER*k*9Zes~k3NBFQPuf`4N{p`mVVm*L#Ha#L%UsTgzrtJ*hUKy6S)2mIh} zfu;Xc^@tU~#ctp5LRzK&-@??9y&*f)=C&S;)xxU_YhjZ{Sa z6@0+Y&~u;0=XS!sUTtHSjsA2VxO*xj;Qdk&Uu3RAQ-`^Z|GlyF;o&Fk%a z621;nnK3!Q_C|&VY=vN_^JcR8vt6TyrLCKN=~)-H&eoU})2r6oMcK|ZAX8)F zZX3HdZwix;G)#vwB21e;t!0*P{<}kqIF~}f2j*tnjqp*!)(%G`Da7GZ!)JZf+`tKfi!Q_E=_@>U!OU5A`U`rtf-iQ3n~BG$3Oh;BABO#n$xmHK z6!z6NP5#eYc!w#ITeaV|RO>wO)fR#?nZYJKd~liMD|>NodTM?0rkf3o@5pD2O#Pf6 z^W*7L<`Da9dKqFd@i7zz5gQ6yS2G78F9=J)>}X?7t3m;mOG_~Jp7Iejnx=76Ub+-7CE(`-Wpi){*?3m(P2gkj(bj$b&R z3=1y5d9!9OF*3M>+>fd)nabo+?O`rQ!pVDCGS_Jh(UFJqX%zXsTL@luyAb<&_Rt*} zs^71g@Wu{2|97bCq z$-oSP#;k^PJ*lrLMRRT)mk-ErING00GI2Tk>JYG75Y#*tm&De?#|^E~j?CzpH>11U z)QIM1_}KL~2X<&BLy&J1k|uke;!)=fX461#FixNCAFIAt7|0)>^_UAA>DT?Ea*0iu z7s2hzv>r}cUn{m<2@a6!QF5jLbEooKc}8a3kf*D(l9aD0NBxu0(!Z3Fmw(wvDLy(w zlI|Y1Q)T|r5}T0FwP-AbJrOE{dU0%KUbCUo=KfiNdF^k`bgQ{!rStj4vzBRxhdO1+t6TcB7#6q z!=#I_&)V!ZQA?SI*6il=wpHZ44trPm@PALwHU{0EA1KBem*><|^2VD#?VE&KD?-9m zCtaR9+`i@32S^5kuoRDkHJJSBj2X}8BV__6Lgd&=7BKq~|pd#QKhV3gH6n%fkc)4j9X)&ir)i3Z#TSbCa(N7Lt}ww2?c}uozA_LaKa`9PwQ@ z=POEY!|v1ms`>&0295Wf@#h>S8%-gwkydrC{Md@kTUtBp1_&$K=#l*gsUX6#&X&ln z(GK&nE=7bjBHcQbAjFLMR-4ZcKSdXO!gZ16kBoHLVFTOEIy`QtN0MJ*eqi~P1m;?y zncLkOvgMW;nhGB>rC&$< zb;wjfjoErnnvY92v`3#{P{pj3~Y2xb`=|p;HHuZ{4v_XIW_UI zgkbC3>^o-OKQqL+eDeA~qYD|gGcPawM<5AHTzmm29fKF!uP^M_w1!2p?Us0RrUt;K zYtIKb;%MH&E)nf)sAyV*b&Kn!auJ!2eYJI!9doa34Jj~r$@Cr>e2Z{P;*RioYh>TB z_McolXS(Ntg>D&ve+I9l9nZ~hhN#9mMOt57oW6qg427JFSjAU&cZg|;_+EHjjf|{ zItxKf1LGurY8 zpZ^^%VOJ$1wS>wcqwTiCC4gaS3fR-keVJvj+Ga{ULt*jpopbNBUGl8!bI@MFEUhZm zb|p=2-tE!{6lN{Em?L3I%g~8wn-Fw>675nG6WqZNy5Lc|zL3>F!f4jbrG-tK zk;7w!owuhF09Zb8J4sPU)j1y^J~nK3dWNoN-4wm4;l?l>3T3H+rZalj{j*22NZe$B z>WUSfe{a0$KEsf+8Sxof6X#hNeNSsIq!gRQ=JeKdBSS{(YV-4!_0<6l?%KQE;&A`j z>JcruphTq3g2Z+dpF2;T0`_E|xxf_DZ6f5`vwEbE0A@|TLd%(n{!{JUnz$+5YbZu8 zPJcDmO`b~1r^3_W>6IFJP{!^Jb|P?fdl%Ck*7hK+5uPPC>E}P(-F}bZqiOjZNr6+H zLpBex_jPGqcDLLXSl6MF?JHK;7Q}pQwSku_SbF3&JGoN3^b(g)MU|Ml_HOi#X`gAZ zrk$q1`MD+!#y6y9Kw_Oec`erS8oFg)&(_WAp})7UFp-VV*YXF^Il-;y7Qzmn^MV3_ zXV43VJ;eCKhU#Jei9fjEv%Y>V@WTo)Bk@+7`;vvI3hyAz$x81$>#7+%n6TX)_CGM02J?+N|qsY6Sq9f7-f=>GzIJ zP08jX0lH@Msj1wa!>0^&+`M+do?IQ4jA6e>k1BTitZ(QUt62BR)>GzXY^B<|Olx5X zr}1^dRx@qieRpAnL++C}>Eb{}wi&gJe;RFLyEPZ|?!OMnF%Eh(;>#UQ$3}MA&lJ}t zx!k{Y4F~b)BsO-7GE5EfEDP2#%t#q;obyHn-#Dt4)iPl>Hp;@ZT9+cbW-z<&MX!U| zVABFV+S?dWxl1SyXEtS@H+R8TOjT#QX@CDpcFeOT{2ZbqJ6c)RcL0o}BS9JSLs8O@rTGf32K<81RyszcE4^-cCTc%8cr%@<7jHAt-?>&*5} zk+HqIkV%y|5ou}~l-(?t(hf>4cTt@7BxaX4d2m|TV8PRdA5Km9GVb!vw&DUeMc6<9 znSW4}ZLmGx#kSB#TM4v%_Wq#qJz-cG* z7q=NRW%5N}>Hf+!+Hd>bKk;qp0>I3#FjL&9-hYTKEii2Cc-QC7v46u9bu%U*|H1Ty z3+MR{Q}Hi4A>o(H-LEeF-S>yu>d@w)>K}i2s(bW|8?JyXKCFfKCNI5OIli z^FQP*#BW)z%{sDlhE+eLQ1T7KqidP0QYg|d-e{Mst|L7SQ~bVOteV8)Hcd2>u%C8o z0ZauSx_PtV*S&Fe=k5NiV}tKX+E?2-_?FJKoBT6tzTs)X&MciabNf7Uz7-LGo#w0}V95Lc)#2u6 z@Q8b=u5H!88EGvrz^r_}%EpW$?QeYX-g|wqXz4tE zkfC#jn^LnQ#5r!L+b$~t2{4V|GAB8!#JM&-n>q36(nfS#v3@L!=AD;+Si|bI?B(qX zle0Z0sjm{(YZz4%wC;I?zs%6UnXy|z%=4HG=M?w#>}b=PR>0NCX`1y65 zHrD6*oCj0>p0)qz>;Cu7@|Mdd*9o+Q|7=T}5+mRpH&^Ah`*5ikUP!(>U8sYfM|ChzDBm;vw_u75FN9s)q`pG_1XF6F896 z*P36slmT9_?kZ+>goms+&v5yk{4eLun6`w#B!9_^mNOS*$Vd0dw1$6kH!}|ArVAJ< z0;(ks)fZgI)SR>QWiT<&$1kr%G+z&922!jv1+G}Q3E!X6&Cpzq{1 zVGq6a?X;VrNo&taw=0Wz0Zv1kP=nv&I9>1h3*&N}?;PP+2rhqim>OC_*JSn3txgNA zSHR$;&6k&F2}b+G#^1{1_t=@&fy}QzgP;H+L2SWacP@)qG=K!N0hO6b*(-4A(TxyyQ+#$=2ZP zajxCqm73N;{v7{qnv(v>|7KpuL4kF{9z0mR;$$+J2J?!dlV?EYAeVWL9{*adXLZ=n zXKX1*Z8SG|?^y+4C_&6S3o+zaWUpDvrg^nZUa)P&vtbOiWqqft;Oiy*M8)Ik;e+lt~=MT(%pPx zsy5D`Hh*+8<+$72PE=JmArSM;(QZwha_~x5A?GFt(+_cq5zp1Q?opGDt zdhyTqH8b}Y6EThgv%?w|XJLBTmy%2-*uygy&hY97H@()%;QqCF3g_NtDzpK|u4(mE ztxMTgNgAp8TxT&iBiT6ZcnI6)%&?W!mL8c>(1PHmLiU_H+?Y35DRcsMeHlO4s6|ii zf*cFWNsr@+>%0Ubu`vMLWCWd4hc%XF-|9^{-oSu(wH+_#%aVg~>*==JsiWMgr|pli z46Vp)#5-MBN#mRt!7y)Sn{3~b)k9<{Hbf>*r&3)ldfwFwNq1AkhHzNd6>@6fAh8lGn^G?8g_c?s+SC}A3x_813m_`rtrhH>g z_4Ps;zGXd2t@W3$TD2Ux#Qk0h3$LE1AWq6Wn$BPIZj7M9-lJU%9ot%M;(%@z!RDrc z7oj#}=5fd%N%CaGQ z>@>uJKhvb7ws0F_`Znja@V70=tUKqnENL+sC7;r0p}D+NVYY(?_0e4Z^-xQGwqKFS zhqALSZu&pUZMKA^ptr19^V4Df^u+{2<|-^7gG!2)wrXKER;0#Cu(#WP-P8l8G11#z z1N6A<0IYz!e2bXbW|7$DOZ)uO#BCNVf~hqUzJk?5-5UH;?3UR^7N%UL5Xh9oc{2;X zs+N4ksu$xged$Yvw;)riJf*zd?XD#@Ze2>|kb+8LqAcXjPwzTvUyJmm@QKy5Ym{Pw*E~`h;9+Zeg%tHij{T z5;$%bzw!&Vgw)Hv~=|aQpO^y=1#e>Ue%ehM`jR@@u>q7uy-QY+52;PAb#BjTl}Y7#W2-5dKGh=qj!RoTmDpRC zvayqusT0|y7?-v$GYmbilVBRA2dpo#yA|c(zG9x6%CybGHS@Pw_q4G$2EICPCw=`h zp1yaZzD@0N#cK+(kN^@F8tZw98qVukW!TO+mEk@EM-#iSY_s+HY>$-YubFFM+T3=#KX{M4ROE=NP1#jVM z>oMzDYd0^XFfM|8(0qIv-f1=A9U+rsE33aer&R}d$dH3nCEbN|+EkXDWnRzZE5~dI zlp~Q-i>46>NXJDVHi+vAaT4p+bgC0`o;d4BB9#CvE{k&-cUinsNC3==MjOx7Q5`TP zpUS-I>*ZG3i+^T)#s3)+&)0Sg>@Rsp5n&jH=UI`m$G(x3v4z&jHwdY|U=%;ga4_AS zXKB3+lOe3v%}ROxU8OMr-(R%C4j9;5({^KRY@`zAzPfUsqbX4?-mjV_Zr#Y3)K{)^ zW9y{{yz=_mul#|Lg4vh`o<)aQ>IY3S@a7}xY@KwnA6x0|eSJ!`qMmi;%} zsKc$H@ZZ2}wn9DZHtDmPft2fo`2IZ9(eT-Ze7(KrD?WbbBEw8y#W`pSHZOG6t!uiA zVMoL)F?g`k%M9Cye=vD@rnM~bA6kS38)iMpmZ2dMgI=)?}w5f%0nn}!qL6taZ@ zuejxg-}mK3U!f)q{fW+7&jKH7I=b9ah8T?MHnxh?5Mz1!@({&deStL!VW~>bTB>X7 z>#}|jnzlUH{GBpC$h@~!i8RXPvTr_@;n5o~&JlzxU_|&Tv*=~ne}90x8p@JtLNsW zfA17+J%TY6@aMUS?JjGe zPNh48Uy?-Gbgi`EGSl+;8^Sj{6)ExjCAWJju`%j=+M8s# z=Kd|4?BxCi%=&iCd){=@U50wg$-%8T3O5N(QZfng`qr@P7i-X=-z-2EgSd$vAtq99@Nr|VZk(94A z>#83A-gcIM_&>KS*Hk+ZFV-naeQsrFZmM^O@d0pmX>pC{tv%L>$-rj4UDkOwBbN}s zjoo=Y^@jSLR!ciV$D56?pmWDqwY?2vIr9RN?=jdc1gSCoRfu$- z1trumOwA+94}eF|X8D-ay{@%tvjkmF9$~VrPh+heyyjaMx3?FQl`bv1Va~1|JBHod zs!Gzb_(6QG!Jyh7f6o+rf6 zKR-YBgKH+{M)v%^$!H{Jd1BC;vgVWLZY6=21Oh&7lCXRjqfrYVoANLFr(2d4*5@F0 zCx%}e*<(IAm7~;#b~C^ZGlTbi_Fi9hUcQ8AYR_W?4H7u#ugD6E8^K@kC^PY<)!lBz zg*0~a(Q>8~bLdIQS8wRZ@;}zVu2Xc=+?IvDQdmSb0A)5QqrDXH;Z|Fh6>eREDb*b_ zY!Y6Voq=h4Q^Bv&KBibMJ04N4rakg#4cvq>kocktHAJ!d;;kk!-YA{hn09INW41;3 z=@=Dnnt@<3Mlg?cuoT0VgcxDli{>4e;sqJi6Wijoz~+!piqeq30t4PowY$PGBsTS0 z#fBV&V|v0=2V7z((_E}HEiP2V>!#%#(`Wh*Z)j6&6`SLj$}Do@2lG8Q#3?3hf{s&} zhSk8g%`Z$NRnM{z#1aH6*7oRZklwyD_ilN*yLFnTDX~ednt?(4nb@=Q`7=m&;|q;3 z-%iM`_<0jPw$6=y1U*|5l@NHb&41S2x!#62b@#ACiXkLx@Nk^^kMQFeOk>%}Ul!J_ zX>-fe#Xh&NS+=iI_1o+1&~ju8LWWW5+if1fSYa9pFP0R`dX_L-PY+HJtZmaG(4SqN zY-i|Zfo&e#yhkW>b{N0Tf96}iTCo*lawY8l(zU6u5SfKftfx0$cQXtG3i)c{JF%19 zZhtBI9oq>w_mZnQM<)BsfVb@)+`W0IsaFAcmMu@A)txSPooNtwsyKp}g&f-068Cc% z>K9+V>1O|48gGG?mJ$sVEL6g0TD7RDWfoy;Ry)jEo{ztbYFpxV>@?x4&hzfSavOdl z>k7vt*r=gbE)!a5K!K+9RG)5RiLqUYv$}mfJ);SHzqT#JM7(y!x0cg$?rBFtTFG13gyFxMoxAD5wxQt2Q zQbReXuL?3mSG|JFe&*XuPhvGIE}5kzvQR$0&$Np%AU3A3yoR9?WinNp$La{r87=rH zSDAKWdmlHcoUbn)K4ttjE+ykS_A@3h&g-uZn-th}pDt6M?I(#gDeOYH9J_;^x4Psu zXxXG;3z=%gUDapSUh4N^>~YgNJ zwH-9x){!TuwkT{N$m6qC*-jtQGBIcBF~@D-itoXf`nGj$ znW8l|yK#DT+ej;DVRKd>GKF>Jgl!NLZ0O=vZV~G08*W;j@!eOD5-wq5tV5o^kXh^N znMZ`WTcT~)V0u1-?mMWV{dVr7efiL1o*nPI@9X#8Wq!B3i|`|BAhB-6?VIguj-SO1 zoqa(zF?R5;Vui&T*)T(AMJx(Wo*+flkm$&HNPX3A#|UH`SFl$w(>!Icm)7I_@BW63 z%eaL=*pMz|bDNEa?q2P}DE!;`wlgy&59}6=DEpUl*UhQo={FQ0bp~zgDR*hB)-m(+ z{IEH9^_j5Q1$Ty?+^DG>mlV-{!$sxcIrOibSIBlZZ8^gj!tFd2ao8*q7w}?k8BS(} ze?G+_Usditfc5>$+gD3bp_BDe%m^8D^E;Q~rS>h@iVXY4<#Tw*?80cAp6$%%ni4QD zVzv9RQ`pfSr*od*WhzaViOoRkOl=+n4KT`l;W6Sv>?f&T zunVl@q5DsUH~~fzP|RoxbOZhsM<&A)i(DL(xz5Y!NqEnCO~pcxPkh!^p6OQhWeH2< zW`<4w=C^Cb_l=+Wta&-}!|hfWNGoPXR(Sw^fhSjzk%;cktAcz?qObvqtcVFXIAb$(d?O1?0= zjT{QTGyS4+wGETpb)?Vm(E_pyn#+ytU3NXG+s&eFeYIjMfLRhrJXkT=??5Ne6pU9 zh>}JZv)LKdnkman@Fb;L+-hT-7<9cFI2C)33}{C2xYh8>w0t1%(}2DL6N6E&#&*artIy- zjJ92?q;wW9T;%D5XGuZXpDX!yIU~EUce}GS(8)HEghd{CDg(5PZTeq*fiC2_c=L;c>I$$eb&gka*Fj&fWnL$SwEl5~7Han|8H5LBpPk7Q$I<{B!gyzb1$*gPj?Z_f*TQ#_l z%z&QUjMzdxO42h*b)w3Hx0jc?eGkny+FgpTLgEv$VBz^H(6ECDcw>dJGK8eUo!wTh z4_=pNO;Z{1AabEF`oYfZs6H=?8aFDbCk+*rbSkI;o4CJ_lbC$tBHfLj^tE?N$a-d0 zc6Qi|fVZu-t9~H@GfPUsHP;0pFSk;~X12B~Ue;Z(isrL5h%c<`LRZ=m2D5$E$X+#- z3(EIg`QXTf%Hf(cOhK$T8+JG@_f0)-;k22C;B$JYH4X?v)8*vKvTcIB&_>2BtGQV- zOyHRRHnr|1Z6+hgLUPR8Nw{L#8jGN<`keXnb?>5#fa<`qRqD!^G{47_;vTo!S$D|C zY^AQcU9C+y*l67UiYw;FO#x6=s9z({Gni~PPtWSI#bv1>LqD&5I(jsb%zLLR5M%MI z1xpOeuWKLbTU}U6fC?tdnkfaVPiPbWSr%8J7|rdrdvF@a!`tFaAyeWz@Fn?65+S-o z`(}`wyAGz3^|DCb6l?-joZ4Fk+@{G%*^D`c=Bdr*(2GH$$~ zWcapiEnSL9jX*cVq+4zZfh zD+DR5SYmSwJHM4rhvjCkA!5st#iqn0liY%T${8ekwU=hB?PCJfS z+=gm*%V&Kp*t~p&QB7=o#O{}jnXS!2rXA{JcG;d~0o`=p&88w#2?U7LRVyHfc&m(5 zyR|~?HfU`z{Tx$Do;2MZJ+_EMkg-nOo8a>n8z!I0dNoUSd^_8RPZ)Bruo&wVY4j}N z)au97Ub_ZHd2}_yCsUp4v0|i7GM~+pO&XeY6DPBl*c?+w!UtFp=D+GzsjZajN3X`^RUucXh9KP77ZFlo*>07#+rN1lF{}2y#PPX5|+*Dft&}KKR&BaEY6P@?Yc8 z?oPT?Ygnu!cd)r%B}8zktXCKF?wN&oAnSm`BaC~c&||wa2yRepUWaE1v&vJuXN5#; z6=tP)C3ga9GlRU8t?Rrny7iUneYl4Ws?37jo3Nv8rtI8*Y`&jQuB){nhS$ctXg;GY ze@Sg@0C`Vmhvxskc*D)U{L5Aq23@bhvo@G?9bvG`?1}`|3=oZNq@)}31!WROe2ehH zdjew_FE>L4JaKH*3@f>40Mnwxt9IFtYaf7aY1B=Wou?ns7fVHUmSlD6PLB(}Gu%Fv z52N7Dppgo@thvD7uEL~gYtKCdX!wso)~|id@bZl5EuO^8^SWe;=6kr$(A^;^Kr1fu zf}7geBx<}n3d4TXqh(u$yu_`Uchd{IB^cVHfPyWBaCTbI4K_x0FnZTQL8m}zQ=7Ng zt?&zTRctb?B@o=O(`|Fe6-V2854iIQyS>!bF3%@}yi&c}x-M8$VB<=2Us66n(1)-|D%y+Ey6Co!kEcl7wb z4;|9dL;j9g)NQNP%|dYhLHt=1(ierGSlb`Z?6Juz(6Y>f`A_`GeNviyVor~>8(VUp zkkxif3$TJ_nU-=9^C4?_Q9C0A-@vnG`;zfi7QY#mbn$#HFN7~aPnXpu3s|z<(Qf2_ zt@GwNqa6||(qO8WWV*!07qjk?D579XZ6fohFGX&P_Idi;($=9}9*N+$KB9$m6d?|Y z!cXfUy+XH5^27S&Jcv4YN7`FuF{6eG3&!0Z=V1pZ*1nt=KH;@Q%PQOxzD(ULd+|41 zAc$r{osob<7QCyj&oSJ^G-Pqk8+Ks4=U+`q3@bFlhw}dEgn0q0aKmDt&Q9xwaTbl& zT`t{xN4Mh3k(3^5H!PwUg%LHDz4a8c5|%v7M2yIN^yTSB78f}eOSf}}8vD-- z8BU2ytF_p3#{ye_F->58xu%^U8M#-;O-}w0Q#1NpmzZFuA+_7Eg{d&}5bmb=Z2e*t zQXYEA0Kq)voguy{n6K#UfoNV8&{8X^hx~`=`OaXl5$6}9!J<_p&}hHM z6L1x|tO_mbdi1HNTvHI2OV`G?-F)#y{{j{*Xts#uGhK7lbMUcGf9@xJ*}0nGB8Poz zaeaeg=bc~}qlzw)E@e%tbRG{cZMWk$w|N1}tK052h5Yz>ip>NW`L9u>RaQ6N)SB{2 z|DN_Wodx#H6BZJfQdlN{=RHj5Tz3a}s*zsIRs|`yTEI;i7xerp92>?Hmd_&ODGxs8 zml}R*__$}Te1`>3JnD4w&7U(@OAGss4)3SOI%g`(?46q?G!&WzNBG~9d>_qe9$&P0 z?)+(swLFBceYxE_aW)JcYF+Jr?Y4Ca8D@EQItHy}&pYOuW_4R>$+#o|mUY8T!>+2CegLYMbdnuE_d3>B(UQM`^m#({Ry4k-QqEU4s z0W|4O#_dAV7YfWG2~QlJzX6Un#9;=Ug_VYhan13AsV|$)(DQ9#SYTe%wsNI9mTBwf z(u3x^!h4P4IvN6c+!_sH{LB|IG_cD(cP?AivsPv)Dc_fq*rv_qSaky{H*zXuT3W*b z`{kRZ7l}=8<7cwus3tQ|g{%!<{tC0bN>IzBmbwKVUNm!|Zws#IW7?J(a|w?KLCda( z@luQNt9=$+v`%5z=T@WHmBZ!wW~YrXrkpBVp1#|yd@|gyo}^VI z1oj1($_wh7lnC-dc;>pU4Qs1MJndWnO`$WTUe1IuwPv!*j-xtQUSDu&O=A+Kkf~u> z2VQs8fnwiquPN-*KGxc`pq=rfFZh%<*IzNBb87E@LEY8{-kV9VLm|+Wq3LqF zQjp0gx4f<08i;I(fG@+BhIJMGlz0Eg^;PL@{$#yBY4s=V{$#B`>GUUE{-oQVboi6y z{$!OuS>{iA{K*=Bvd*8Z^d~F)$<6*`sXy7|Pd@8UHu{qd{$#a3`Bi^%lRvr9pZtnH z`Mf{*fM$vyi0 zp#Rg?^`{g5Pn-3pbN)|X^e40Y$$WpZ$e-Npe>M{PKh5+fzw1vv?N8?E=|2Cb>HcJ~ z?un0n&)@qcfAR@`@(q8oz@PlO|NZc*{!g>@r!)Rfzv)kY!=L<)f2jI`|I`#8npUm+mzwJ-%(9e$hKmCFJbk?7Z^!uBC{?Xk)1l*|}zT!{*Soa3}pMKf@X=JPa)1Ua0JN?No`je7>x~e^P>+$3M?kD}-k^B8wRcWz@ zU-yrFRX3m4S;e1?Y?CsJcllrZr0zcA&xZQ^*~K&dY*=Kw_+og=K7V^;i$5Fwrp}($ z*^}WX2mBvL_WHA7{cZSB|F`OMI(ycijcoUY{!{;_OZ&o`59*Iwb@s478xB->LVwdQ zs;Bk0$8@&KpH;u5GgawQzyI5%{r;@_ZGSfWls_91-G(3ce;auz|n z*mp=IA9^fQU7s7)PexSr>N)?yVO3;U#2?w=?~a5DtAN#`{%-YEe>NNlIN<+Q-R{pW zNn9?T^nd$aD75(Gdn)O$8n}Ag|6+KzKdYYdXG1Uhvx~?4*|6ArMBf+=tTX7Js~+}e zBZe1M=!@cx;UoUR>Wex%p|h6*&G&{={raMaG^7?AzF!aSiv5d*LwQ7psx(>kjGj}S zt1sy_v3@+HMjqCWheMO~`q#Xrv#Mn6qIiBtoOwylU3$p>c1X`%enmytB^7ZsP+Q64 z>$@e#L!!;dH}&f++BUHs@6cc4XLPzPJbS zQYCLoJ6=2LjX^=(PX`zq^I$-(8*dSRcwQk5KhKm6o*PuHsWyQ*JY zJQQB@l7DbWn&`50*Bk0#8vQEv@C*JYm(|YoT@tOT^wT?HyNgmRZ;Jsho(hFfLtYh& zT@l+|7MZTT=6}-Ym7re|?Ji4${_r6^*sou|FXeni3ifSj!OKCr?GYI-YpchxCsoFa zs(-c5f2MxFI;4-zN_%`?n&7fgVEXt~Nl{galHs;g@D_a%Z?MZHT} z&ry#JAsxS}_vCm#0iY4nNsS0$y{z;n9a zFO@R#wEoyP;7{vMYg77qEcv`BPh5SQnCSgbxu>7^zi-6Sa_@xl4aoetD!F`9`o7k$ zdR`V~-xlkP4(g9r#ErH7&{g-z1a9=JbeE5+Nd0P>VX@i!{r+~nFUXHft}Ea6w?_xX zxR)#bkLjQ^M5ABY^|EyDWwr3-z+EcERsA@9Rt=X9e#^hT(JPsJQ!gJ5eQMu!&kS#B zRMF^HiR-Zl-{=d{zwIgi{(5--+YkF!4e55H?-~Ec%aZ9g^!m}$!FC8{*EZP+>A*4n zp+kSK*( zDKp^GdH-aiU&O8VN*$&zOX}(msF{ZJsgKU8Klz?GJUy-!sc#nxrpH66`vdv=)V#G* zdTpamy>4@_=cvaPJ^r@nk_xpF&`X$SwgKGaqZ}?K5EYZu-O4*={gRw`% z6!k4qU_-K!8@)2N8oh`8XS0XImg(_8YrQo)b=)__6ao6*4(Q1tu}?jg zc)S%fh>X7XLzM$_G-4@`TK^$mj!P1`bU?k?*eVl8s(*?|ZTv(m^rW=%D)e?3=;L)cx#$)Wc<|q;yd2 za9M+@dhZEeymastU+FjWruwd4f4|WuQZ)Lc_AhEs_O4WIBUp*=$pti|;pdPveWOph zrQRz#42Qn8SNEHH0&@&X>0gv?n0QfEX6=j=?qzZDh*9o3%?^b3ACNs-Ro`3RB4eh} zCx&nIil?%zDr@buh*s;@XWx(-NKZ*mW)DbWny;x>ysQeOrgT&dx6j^peEYb-2F@07Vpm{>p zN~1qG#0_4({sHQLy-vKH$5WWimO;ut+6flW_Iou`MzqgkMD z`kYixqhBKMu39kLuKN=YsAqmp{rsGUd8h#u*n45%P?^SR6 zmde%Wm3|mM-0KTdyP#jCM~~dw`Y!R-=sAtOvq#mN)Xok1HfZ#!3iUneXloZj zVR!8F&wg}HC8(WIpZ&f#D%-F7qo>7O&Fy0E(bLjVBuq=Ql&**5*G-mCHdI~to@ z)T^?sQX<(_3B^_YapD0H=CVxUbU@T@^zZc5N(ZH*>%C|EUmN|R{6xQ$Nb_L{e*Fo( zAv>vx*SEdk-!LAFktbqV-4p%6JH16bS>JcW|E#`S4A|(^1GPaZy5=j=NLSP+R%Pw| zg$j_K(wnkvlA83Wd?;^<9NEia)X5$C=;%2yM|xPNjo?77oec)7s8Qc0shbQMF+HpX zA01S&-;k1fSIVLmti1I2)1EA~Ac5(#Vx#o9x`O6Yfz=f=wSV$FkrQ4Xq| z8@=j5E=e6V`lVUMFGv(7pA#ph$3?~QL+WD2o)%M&hZY}uK~~DUYK5!Pg3aLYcvF7) zw^fwvQSo6qsP@dZY8?8eRKT^vQUM=?N1LyzP-DBKq{es1B&?l#)YE(H6^TN9kCflo zcKIYP%Q(C!5~nYV@U?#3zpCCgJrU%4cg26IzC(X)#A@0`{}%tiSTKaM9b%*Gun0eP zKqB#mI_YeuG{{v^YqBD8HlI>!HTSCu&10%;wo@MZ_w~oI0TC`eE@d#eTZPT`pZDdi zZ&O#)+!k8-U}(nUDs3aof@fQ0QjG1Bw#Y8@`}d|t#mVV_xPJVoK3qE|!)*Md1S=g- zh3nfS03Ytw8=42y3l6Itn%kx7-&J8gIxE44DKfJuvy8 zN^tEV$yM!wxS}4*D4E!=zt)0ej_sGz{IVWx^yxG^E$UC~7HOJ$Rf$GW@wGFO@(+%w zG^3~GZTW#paP5!^-so4&CLWNg9zCt5&UT80vYoGLq#(XagI}@HtDZVLAmU$A50M@f zTTLF+3r5dLpeCME&r^@pJ=gcC&9ecGo-fJE^N#wc+8OD*>{}AvZ2x}Gfc0InfzqQA zf^4S>lRo!=f3|i`!dKrWiLLLJf@;2W$dlue7_9!ZEReAm#b9G$5H=AMY`vmp%JxY; ze7JkBFT&_)34L}%bGq-T%gfGbq+Hdv>U%}R>}7FqGgdo~o(k;KC%xAQlNqDu#0cqe zc{YBiN3-~>FJZP*5?$+8i={z}PwZ1Sm7Wl7>w83{^r*-@7A%PIZ_6@CkE?Yiw}}Yf z)yJA!Lh~L_N7CG)sdU-U2#IA?-gI2(Lo7D_Na`I$#1E?qZeeaRMj@MGZK;7 zS=DCZpo-Y&)5k{7iA9@(YN)ZG$J0TXCRe2gn*CdSl>Ofx40yR&qh6M1)px1Bjs8G|ASI137m*&8>@-63(IrtS9SF@ks0f6=6q(Zj3DM+J zkNUzkcU|zOY3LFro)GOmdPywUJQ(_nQ|e!`=OhKqUUeT?=oy>4r6b25kv`3~sej4N zN+fH+kZ$zrWwn02VeC0IdvmYo*nCy}L-wfp!}OdKc{(Vw{|#|Swo`o5=oh=zVvR{J z$vVzX>RYvQBI(%E`fIi|sJ@DbH?~XV8rvu0)%S`)#-Eqo$ET!Pz$po3wp9}+m&DMubF%2_!R62h#BEljEUrDMZl!ikZus|3_-|zcd;DpA zpZbzUpGrHiM>3ZV>ValOrOCDhemx}~%XX*^6T#JyJ$ue;{da=)6;sr==#TH|rPp@s zQVDm7T=l?q&4*Om@$Dkw_yzrJB52g{m&LgC?P}?XXVihz&Z&cFJ|eD|+#;c_-!I}f zdR52lf(+lcRD_8mG9TX2Fu%D!aMkmYxDUcy>_qU7rsp)rF(ejFkE_eh!nkwnfHY9{ zm}=ALl{Rbi$ygmfCL&B64#eLlIc@Z+tGKGyHV>%ht8d#Hnj|p(HhpaDu=r}+qgymi8TgP->-iS`3sP*QjDxc3gT`opZKTq)89U zDrlaNJWcG=UndWSzT!DOP_N>|qV&-<0-mR#d|5ym)Q0B7)cV%C>5RfRpACG28eBiPpqE34Jyok^OLo>R8_) zD=9l5B1}9W4$1=OHg|>>dy-l?k58l@L{nDM2_p7{F@Hk|TieJYM zOTe3_gDYO&&5l0hKbAc#&b=1ecXGe#e(g2!etn08XZ$&}L?f1t8rvnJr-xOVM!%Y} z(Wh?mTAu_sJts*V+amM4DjVc2O#!?k9&bLaKfa}+)OSDO>s8+^;cOl}77gh7|=OsJYR(06*;M}V9 zOQ+ZRRq*tAadUc9Y}x1)$s1-^aZ1vc9t{tKdG1D^hSTq=C2ME(t46<+Lbg@Hmz~xG z)mwUW@b5~HE=hTqd!x}A*4(fFLn3}h~JFsoPWT4hB-8!~QBpI0KF!xf-LaQ6P4%Yu?%L~Wh}s#oSv?k=>w7}U&xza8<1z{+pE%}uVe+8r-ssiO zCikk@>$?I;PwIh*;Qpz_dcy}N)Z1i7Mabp>^(NU-30>{D`m087DA`jY*~EGAc6xG$ zFK=_Fbkrr$Jlh(+8-!!BqGo?r6dOIO*s-_8jp?A~7>7ieMxUB@EQBG}PD%JigHN)) zBWM^^wsuA=T;C(LG8SAIqo-7ZiTw%|eOJG#J*5tJJOu5e=cINfpHykHvts9TK&C`G zAYGC@Diu|`5QrRa_mutMHPNHdCqK}eQtqQ?#1mOX^tz%hp%w>yd3Kk7YkGXEKdprq zq(NU!Zc)J}_lngupty`nB~VypD!wU;Hlh-luGTC7+~~t_`>7j z(edqK$MoD5&x4IVNmsT_`ekB|oSBzZu4^x;4&%?LJDWTtk!w~|#_{0eY90t`*r(?Rie^Hnw9*dr?L=%A)! zF3Ts7?bd1fvOe29Al{$Y8wU4khw5Db?qFhbyN;7`A`?$ZK?Ud?H!|ILkLmIG; zA62y0MQNyPoBFk}ld|~VRrgaHR5OmAl@iTDz0(1apb$?J5eM>J(Pl|);J7g1FI~96C@og=*)4nS)oCqdcJ;?F3#}CMh`t(E6)Xm^E zXr9sx?|V`TwOC59-mBTqw?x}U2v{8r_EdIMa-5x2*E#;O-ZdUe8I7J-o77H;S0=Wr z2-#LWP~WRK@{5wKH-h3jtDd5MTtph%BEG!#tQ<1meNJLn(XW~Z6w!W3I;Yt$6RaLf z1vdHv+a8rsF@8=x$>fuP#kZ(~YQ82-F!6$FJGMhb91GzolLypgH;;=3ql2RD`_T)^92>Mxl7F0+$+nt`Ji-IEtnt^p`j+86yv6YK{^LihAg<@>pR3D znF-oBtP%Od8N~*@r+A@@(i*ihp?P+Ot|UZUHp6(SW`yry>4%BX%}%}=X!ERCa4gu{ z*{Lwtd`e9<{+zVYSn#c9`_(Adwn`*_D3KjMFCXYz@`k-D8r4IvOmk{jUL!mxf_1EmETD=*#JT-GvJH$<+rylk1Z9XE_9D7a;l!e)d zS_qYF^ob6QUa5ohH8tB<=(^Gq62fDnh!}IWT6(>dFhVv!|DY-IH8hdf%(UR z9?A}?{j;MYN%OcEFFmK?G{Q2L@i0kTdrFiZdr{`a_`{Npu@lk<=}Fm%&AktK>NKC% z&(fo^AFjP5NlFjvfkvOKlD9Pf{jNHP>_Lgi`0ILAE#B|HHF-e&&?WWkwX>rBc!+44 z*ryka4yvciURJp$_KVKxfRtBuzckk5Zc%h1H2bw<;?l_{#gO#_LBl>Pb<%uQ@;i23 zu8d1EKHpT|l3l3yI!^4`=TEOaC%qG{8Wx{t4~TDT zXC*M>&r4G4A@;N0r*e(Gu7ZCM8mAVkoziDDxEv8P*Un4S>rY9(vk)RSxl6=vgoRf_ zYSGctkNI}W9+Sn94M=jvUseewo)m+9a4h&ErKYoO(oxwC8UEkbliwG2jGa|k)6ihG zGivAY^U~s%6h-w{lB-$>>Zt`^LAF!Ea_w;uYe+#be<8t7Uy*#I!C8?W4YtOB9vC~I zj%xgc@W8>K@sFvWA3vnYn|Ia!O&(JdjD~5eW<>%p@shgvbU>#chVYMU&jH^^%{`*g zL@;-*Jr@G8<=?A?^aEq()iUE3LW3XDM9$l4`t+zu+vt;C9y_EScN@#+wYiYkFRFt(oNwA$qC)tk{0^lqlGIDp2?p@zMANmAm=4gn#^7;<*oAlV~=2 zRh(?6O8%A@eDsW@C3{R=Ky$AeAw4d&ooyA7vZIo$Y@0YWeO(<=dQ!Yz>wikTp%zPz zOQ+OgRi}1B$~=2T)%mbLmVfEsPaV`(HpWI>Wi}wC^Qxv^0lWP zmoS_cMLs$!Tl$hJ*$8Vw#zRt+YatYIav=OlWA`i!t((EjxpqwDstrozH&2RLnxPKO zJu*`!_e+g8Uk*PD!Q&r3sRkZ@NbNTsW=7J$`n5CtzUqxIl{|Vni1|*vEZeL0x%O=- z-)o^)NrQ)YBBVQ;Jg7IMVcp8;pbYcoZZ*~T^HLBKA$0bGU=&Y2^^z}d7Utob+oS`p zeOoUZ4QuUcVUD^P772c6Vi1mqrN?$_a^;QCD?BUCYR1yu<6-PQ`Gka~`I^-1wJ_^n zJ13cF^olbYVNKZhd8wZ93lheO&_I*>WZ^V-sa>;RyN#XM=XXe5-?^HNAvU*l-Mn(; znvR>h*^^;vL#8}4@_pCy7XRJh?QS^CG0*gQytOGIF1|9`_K?OVyj)6gtfbW4}a22Fr9Q-Z#Zy>rDML!CD9Q*|M zDeyDkXTi^dUjUx~H-gWC&x79pzXN_BlrBz{?-tJIfkj{mxD(t3%3wKI1v7d#0LfTutOJOc*6VemZoHh2jf1;@av;5c|4 z41&|(EI1Dma1mSr-vQqPZ-TeLWiSlh0e=bJ16RQhz+ZvC1}Uh6QP2eA-~%uT{uB6H z@Snkd0sj^JH}K!V{{a6JMAazz5%BlG-v>VmehmBr@DIU{gMS46G59CoC%{jFe+vE? z_$lzu!B2yK0e%MjOYpD2zXm@G{tfuI;OD^4gMSD9ef2tDasG?o2JlPZ6X28Jm%*pN zjo>EmY492FS@0|1X7D-idGM>?*TAoX-vGY}ehd6I_#N=O;0xgQ!0&@EfLt1kvLhL9~k_h<0-X(H@Q>=om*3y}}VhuW|&@YaBszoFj-% za0Joo96@xFBZvk$g6I@S5S_lxr)RhZ(OHflI>!-2=Q)Dt0!I)f96|JF96@xEBZ#UT zL3D{Dh=w?V=sO%i^j(f1`W{CRy}=PgZ*m0D_c?;-Esh}ibB-Xo%n?Lya|F>aM-Yv0 z1kpPjLG%|KLG+g#LG&(15WU9{L{~V1=qg7Lz0VOuKi~+WA94iIUvUJ{f8+?Fzvc*{ zzu^d?lp~0096?m)2%-i@5RGyKQN|HOO^zTM;|QX0jv$)g2%---g6JAY5KVFf(T5yC z^q)9_=p&9G`df}5`rkQ%=s$A=(f`2_ME`{&i2hHGAo{NyLG*ud1kr!v2%`U+BZ&Sx zM-cr#96|IyID+W^j zat&gTfau4#1~Eu}>{71dOaFjtkbvkPav#JX0nv|h4Puah=pS(nVvvC7A9D?2kbvl) za1CORfaoW<1~EuL^pjkJ7$hM2r(A;=Bp~`{T!R=SAo?k;K@1WQ{d2BC3=$CiG}j;o z35fm$*B}N7h<=7^5Q79n|B`DEg9JqXifa&q1VsOuYY>A3L_fA3M8C#0h(Q9PU*{UcAOX>Da1CORfao{51~EuL^jlnm7$hM2ZLUEK5)l0k z*B}N7h<=xA5Q79nU*H5w0nwMa1~EuLbSu{&1__AfaSdXSfM`C~AO;DD7H|z>kbr0**B}N7 zh!$}TVvvC7D_nyZBp_PMHHbk1q86?}3=$A6;Tpss0nu$-gBTA3L|^9` z#2^7tnQIV(1VpV|gBTT!R=SAX?5fh(Q9PHm*So5)iH68pI$0(MqmC z3=$Bn;u^#t0Z}{GAO;DD?&ccAAOX>8u0aeE5Or`3VvvAn4c8zB35eEm4PuahsFQ0D zg9JqDxCSvuK=ci+K@1WQb#V=1kbtP0YY>A3MC-W*F-Snv!!?LO0-_CEgBTDC zK?0(CxCSvuK(vW#5Q79no4E!tNI-Ni*B}N7i2j6Y5Q79n_i+tkkbvkC2gBVmm z0#Xni;yx&WGKfJ1Bp?ORGu#IyPzEumfCQu<`WE*=36wz$Dj)$Vhz7V1N}vp4Pyq=@ zLGK?#&W3@RW2DToepACy2D#GnEakb>wr?t>C2gBVmm0#Xn?&wWq=We|f3NI(jr z7q|~fpbTPA0SQP!^lk2g5-5WhR6qh!5WUEKPy%HTg9=DM3Zj>|4@#g6Vo(7INI`Uj z`=A8MAO;nXfD}YWxerR93}R3L2}nWoGWS6VltBzCAOR_ej&c9kl>J-gJO&kzfD}Zp za37RjnR+JXyaE!Cg6LK5gAyo%7*s$4QV_kyeNX~r5Q7RxKnkMc+y^C41~I6B1f(E3 z!F^ByWe|f3NI(jr*SQZ$pbTPA0SQP!bdvj^1j--=6_9`wM1$N1B~S)2sDK2dAUef; zPy%HTg9=DM3Zm282PIGjF{pq9q#!!OeNX~r5Q7RxKnkL>+y^C41~I6B1f(E3$9+%& zWe|f3NI(jr^V|m|PzEumfCQuwE_dyAiK@2J&0V#-vxDQI83}R3L z2}nWo9qxk?D1#VOKmt+_eV6;71j--=6_9`wMBn2+D1kDFK?Nir1<@Pa2PIGjF{pq9 zq#$~e`=A8MAO;nXfD}aE=RPQbGKfJ1Bp?ORTigdFPzEumfCQu<`g87s5-5WhR6qh! z5MAazD1kDFK?Nir1<~8w2PIGjF{pq9q#zpRJ}7}Qh(QG;AO+C~_dyAiK@2J&0V#;y z;XWvVGKfJ1Bp?ORUvM9kKpDiK0uqpd=r6esN}vp4Pyq=@LG&*7K?#&W3@RW2DTvC2gBVmm0#XoNv`R;$MxzT{qg=4sOcD(Scrb1JAs;x zfr*9ai#vguj)94V=oh&YsOcD(ScrbHJAs;xfr*9am$(zC=@^(;h%W8~YB~lc7NTG3 zPN1e^U}7QqW$pxOItC^dqF?S#pr&JBVj=qKPN1e^U}7Qq748ITItC^dqF?Dwpr&JB zVj=of?gVN&1|}Aw)tx|1$H2rw^sC(o)N~9?EJVM?oj^^;z{EoIYuyRdbPP-^MBm&A z)N~9?EJVM~oj^^;z{EoI>)i>|bPP-^M8CnEKuyQM#6om+Cs5NdFtHH*Mt1@=9Rm{! z(Qk4mP}4Cmu@L=ccLFsX0}~6;cXt9c9Rm{!(Qk1lP}4Cmu@L=McLFsX0}~6;Z*wP5 z(=jly;4j!HCM73OQPa}VGcYnSv#=8VFg+#LU7<^dt48 z#LU7<^h0`5asm}KEgd}rBNH z#LU7<^tF604nV4Bv@qzUyCM73OQPa}VGcYnSv#=8V5qeT`0u?na9X$gh z6Eh1d(I2TNB_~i()6&s1FfuW-uoC@IdQx%%6*Vm#Jp&^XGYc!x13f7@fr^@zj-G*$ ziJ66!=#SQuk`t(?Y3b-07@3$^Sc!hNo|K$GMNLab&%nsU%)(0a$LLAP2~^aybo302 zOw25-M5UgToIpiQOGnSZ$i&RTO7zF-Ny!OR)U z#LU7<^vCN-$q7`{v~=_gj7-cdtV9p>q~ruDYFava21X`k7FMD^K~G9fprWRwqi0}b zVrF3_`V;k}1SN=~4nrlq52U}R!uVI}(W^`ztkDr#CfdIm-&W)@bWzd%n)PN1TurK4wH zWMXDvCHf2Xq~ruDYFava21X`k7FMDsdQx%%6*Vm#Jp&^XGYc!xU!*4`Cs0w-($O<8 zGBLBT68*(`QgQ+nH7y-I10xeN3oFrIq9-LMP*Ky;(K9eIF|)7|HF{EV0u?na9X$gh z6EjQm^Z&o^-}_7Tq~ruDYFava21X`k7FMFaOixNqprWRwqi0}bVrF3_`pfmCF604nV4BviT*l0DLH|PnwE~9fsu)sg_Y>9*OQVHsHkb_=ouK9m|0kf{suiMIf06r zmX4l*k%^gwm1wIcB_~i()6&s1FfuW-uoC@^dQx%%6*Vm#Jp&^XGYc!x-=rrcCs0w- z($O<8GBLBT68+73QgQ+nH7y-I10xeN3oFrcJt;YXikg;=o`I2xnT3_;Z_$&I6R4F604nV4BviT+kSDLH|PnwE~9fsu)sg_Y=U)02`DsHkb_=ouK9m|0ly1$EI4ViHm^ zatZ<^6&q?ATDElT=-D%HVC2ZeiJ3DC7gnxBf4hBR5>hg93IZh+8)_O_wsh?1*)wop z66Eg$pZJq8IjwNl3}aDF~EQY^Z5y+0wD2XV1Wa zks}i)X3i{JSh*7Yo%V@INXf`42$WQ8sA*`~(y^mw&%l9^BNHcP&MaJ5xf1Q*68+uwiAhMw$SDYvRBWheXxY-Sqi4^+ zfsrE=Toa zl95voD5=;`)6lY|V@J=PfdeB)CQi(pS-7xrCHlGciAhMw$SDYvRBWheXxY-Sqi4^+ zfsrE=`&Pa%AGf%$bD?D_5eI z_K8VI$;c@PlvHe}X=vHfv7=|tz=4q?6DMZQEL>Q*68(MliAhMw$SDYvRBWheXxY-S zqi4^+fsrEDbY;XW+odk%<#CXBIB3T#0)7#3ZC-DbY;XW+odk%<#CXBIB3T#5c6`@|%qWaJbCN-8$gG_-8# z*wM3R;K0a{i4!wt7A~w@iC)VdYBn z58EduAtfWHAW%}Vp{Ai_OUI6$Jp%_uj!c}GIkRwKrlDm^ z$Bv#o0|!QqOq`fGvv6VMO7xG~Cng~!Bc~uxQn8_?p=C?Qj-EXO2S$!eoR~SYaAD<2 zw6{-8LP|zXL7=2!Lrp`=mW~}gdj<}S9GN&Vb7tYf%9ZFJvrkMyN=8mWprm3$O+(9; zjvYOF1`doInK&_XX5qrhmFOS0PfS8eMovMXq+&x&L(7(q9X)#n4vZX`I5Bf(;lj$5 z=%27pOhQUVPC=lgVna_|&!pfECwS8g|QZjN10won2Y8qO$ zbnNKaGjL$!$i#`6GYc11u0;Q&ePR+)GI9z6B^4WL8d|n=?C9AuaA4%f#EF?R3l~hg93IZh+8)_O_ zwsh?1*)wop=`&Pa%AGf%$bD? zD_5d_#y&9#DH%Bhfs%?1H4QCWI(GEz88|R66Eg$pZJqJPdlF$pOdIR$}|iVZalEn7Nv^z0cpFmhz##LSt6 z3oBQmH};81NXf`42$WQ8sA*`~(y^mw&%l9^BNHcP&MaJ5xf1>J_K8VI$;c@PlvHe} zX=vHfv7=|tz=4q?6DMZQEL>Q*68#JIiAhMw$SDYvRBWheXxY-Sqi4^+fsrE=Toal95voD5=;` z)6lY|V@J=PfdeB)CQi(pS-7xrCHj}_6O)jVky8*Tsn}4{(6XgtN6(&t10zQ!PRyKH zxUh01`j_n!laP{;QxGVr*ih5ZvZZ53&z^w;BS$7q%$!-cuyQ5(SL_p$kdl#85GbkG zP}9(|rDI3Wo`C}+M+9aGI3(& z%)*70E7545n1qy!oPt0}#fF-OmMtASdiD$)7&$U=V&=@kg_SGOzhR%4gp`b&f+9aGI3(& z%)*70E78AYpO}P{jGTf%NyUbmhL$ZIJ9_pE92hw=abo7o!iAM9(L4LZB&1~I6a-2t zHqDbY;XW+odk%<#C zXBIB3T#5c2`@|%qWaJbCN-8$gG_-8#*wM3R;K0a{i4!wt7A~w@iT+*t#3ZC-Q*68-!3iAhMw$SDYv zRBWheXxY-Sqi4^+fsrE_|&!pfECKekUyLP|zXL7=2!Lrp`=mW~}gdj<}S9GN&Vb7tYf$`wCIC%QpQLP|zX zL7=2!Lrp`=mW~}gdj<}S9GN&Vb7tYf%9ZFpu}@4wN=8mWprm3$O+(9;jvYOF1`doI znK&_XX5qrhmFPdUPfS8eMovMXq+&x&L(7(q9X)#n4vZX`I5Bf(;lj$5=s&YhOhQUV zPC=lgVna_|&!pfECgMDHWQZjN10won2Y8qO$bnNKaGjL$! z$i#`6GYc11u0%iIJ~0U?894=kl8OyA4J}(bcJ%BSI52W#;>66Eg$pZJqF-R2n1qy! zoPt0}#fF-OmMtASdiD$)7&$U=V&=@kg_SGOe{P?cgp`b&fDbY;XW+odk%<#CXBIB3T#5b*`@|%q zWaJbCN-8$gG_-8#*wM3R;K0a{i4!wt7A~w@iT+Fb#3ZC-$B&1~I6a-2tHqrlDm^$Bv#o0|!QqOq`fGvv6VMO7vgbCng~!Bc~uxQn8_?p=C?Qj-EXO z2S$!eoR~SYaAD<2^xxPgCLtvwryx*Lv7x4+WlP77o;?ExMvhFJm^rg>VdYBn-`Xc8 zAtfWHAW%}Vp{Ai_OUI6$Jp%_uj!c}GIkRwK<%&NTqZ`B|q-5k21WGD4)HJkg>DbY; zXW+odk%<#CXBIB3T#5cW`@|%qWaJbCN-8$gG_-8#*wM3R;K0a{i4!wt7A~w@iT->0 z#3ZC-rlDm^$Bv#o0|!QqOq`fGvv6VMO7uV4 zCng~!Bc~uxQn8_?p=C?Qj-EXO2S$!eoR~SYaAD<2^gr1rCLtvwryx*Lv7x4+WlP77 zo;?ExMvhFJm^rg>VdYBnKielJAtfWHAW%}Vp{Ai_OUI6$Jp%_uj!c}GIkRwK<%)k4 zMK_2^NXf`42$WQ8sA*`~(y^mw&%l9^BNHcP&MaJ5xf1;^_K8VI$;c@PlvHe}X=vHf zv7=|tz=4q?6DMZQEL>Q*68*3CiAhMw$SDYvRBWheXxY-Sqi4^+fsrEM=!OhQUVPC=lgVna_|&!pfEC|Flm`LP|zXL7=2!Lrp`= zmW~}gdj<}S9GN&Vb7tYf$`$`1if#~-kdl#85GbkGP}9(|rDI3Wo`C}+M=Toal95voD5=;`)6lY|V@J=PfdeB)CQi(pS-7xrCHlYZ6O)jVky8*Tsn}4{ z(6XgtN6(&t10zQ!PRyKHxUh01`i1t1Nl3}aDF~EQY^Z5y+0wD2XV1Waks}i)X3i{J zSh*5?u}@4wN=8mWprm3$O+(9;jvYOF1`doInK&_XX5qrhmFO4QCng~!Bc~uxQn8_? zp=C?Qj-EXO2S$!eoR~SYaAD<2^o#8ilaP{;QxGVr*ih5ZvZZ53&z^w;BS$7q%$!-c zuyQ5(CH9F)NXf`42$WQ8sA*`~(y^mw&%l9^BNHcP&MaJ5x#EK%(G6k}QZjN10won2 zY8qO$bnNKaGjL$!$i#`6GYc11u0+4oJ~0U?894=kl8OyA4J}(bcJ%BSI52W#;>66E zg$pZJqF-j8n1qy!oPt0}#fF-OmMtASdiD$)7&$U=V&=@kg_SGOFSk!jLP|zXL7=2! zLrp`=mW~}gdj<}S9GN&Vb7tYf%9ZG=ePR+)GI9z6B^4WL8d|n=?C9AuaA4%f#EF?R z3l~DbY;XW+odk%<#CXBIB3T#0^zePR+)GI9z6 zB^4WL8d|n=?C9AuaA4%f#EF?R3l~_|&!pfECH`ym9AtfWH zAW%}Vp{Ai_OUI6$Jp%_uj!c}GIkRwKrlDm^$Bv#o0|!Qq zOq`fGvv6VMO7z?86O)jVky8*Tsn}4{(6XgtN6(&t10zQ!PRyKHxUh1?CqSbgaD$t~ z+#=yNDR;=YOU^wC?h|-G$wMk0vEeZ_PiS~b%QLn-r{e`XUefc5J+B#f!-2PqyyM7w zCO&ZDBQu{k^O=P&T=>e$H?Djq`eEMB4Q>*1i-g;x+#%yGIrk{IPv8M152<*>hR4)A zq2Vbl&)D*uju-5BNzW_xyk_7H2i`LBjwA1x_`r#e%zWa^XBNJ2;VUcOxbmIohkHLa zxJk?{5^j@nhm5=A+@s(=fd`a4q~Z}99#iv#hNrYVW6N_oUa;dOJ+IjFnt?YQc+1E; zj=X2$11CN*^NBN`S@^<*udICI%6Fn4;r-m;CNZ~2xJ}9(GVYRdkAnLI9#Ha-ibrgC zOwAJ-p3?G+EzjwA!H$>oykgI52HtSsEhFzZ@}7wgocPGhC(e9k;R_eOvhs~9--&+U z{oLRtF}Fy#P0AfI?vitlg8Kv>Q1Xz9M{IaZ%@Z1)((;Tg&*^xe$H?Djq z`cdA`4Q>*1i-g;x+#%yGIrk{IPv8M152<*>hR4)Aq2Vbl&)D*uju-5BNzW_xyk_7H z2i`LBjwA1x_`r#e%zWa^XBNJ2;VUcOxbmIoM|(dvxJk?{5^j@nhm5=A+@s(=fd`a4 zq~Z}99#iv#hNrYVW6N_oUa;dOJ+IjFnt?YQc+1E;j=X2$11CN*^NBN`S@^<*udICI z%6EM6Q}hFFaFdu@B-|$D4jFgJxktf$0uLy8NW~*IJf`Le4Nqx##+K)FykN&mdS0>T zH3M%r@RpHx9C^>g2Tpus<`ZW=v+#urUs?IamG4A9#{0R!O=515aGR7nWZWg^9tHOa zJfP$u6_42Pn3^XvJf-CsTb|SLf*mjEdBvXB47}mMTSne-ZBp)#ahIHX6x=89fRcw)JYvIRYM#*Wl$K{~c}~X*cD$tL z6?%O7&A=NDyk+DaN8U5>ffFB@`NWye zEPUa@S604p=4#M~m`HYs<=xJ%AG3honlK*>WY9dnP_`;v+MkIP;l>FI@P_$~UfjC;Ca=&kb%8bBl!A zq}(CnE;;uoxKH2#B@d~1#D>SzJfYz!Ezj8UoQ@alcuCJI_Pl1`4F}#b@{S|#nfSno zkIa1H%x4z9aN#Q}-?;Lf=!d+Y8{8!3774dWxkJWXa_&)ZpTGl39#Zj$4UegLLc>#9 zp0VXQ9WU7NlAc%WdCkBZ4!mXL9Y@~(&@cFZe+KafzdrGinNOVg%)%Egd}ZYuSH2Vd zWbfw&H;K7L!fjISka3rsdlcL!@PLwsR6JtCV``qz@RXKkY4xkbWlQtps(mz;YP+$ZpWl8010V#8x6WOnl(PM`k{8<}(XlxbT&gZ(R9K^i#c`8{8!3774dWxkJWXa_&)ZpTGl3 z9#Zj$4UegLLc>#9p0VXQ9WU7NlAc%WdCkBZ4!mXL9Y@|X@qrT`nfb(-&n$f5!dF(l zapgPFPxF3maFdu@B-|$D4jFgJxktf$0uLy8NW~*IJf`Le4Nqx##+K)FykN&mdS0>T zH3M%r@RpHx9C^>g2Tpus<`ZW=v+#urUs?IamGAhbr|1XV;3hG*NVrYP9Ww5cbB}`i z1RhZGkcvlacudU`8lKYfj4jXUc)^aB^t@uvYX;tM;4LHXIP#u}51jbO%qPx#X5kAL zzOwR-E8mHJy7zN~o5b8A;WjCE$hb?+Jqqptg5_5}$+oaqf<1RV( zD7a7H0VNNqc*KUs)I6c#DJ{>~@|=zr?08AfEB3r*;0*`fGV+ci@0s|(iI2>D;>>3j zzHs3yE8n>Co#*z=l!Hyn7&$UBa_XW|1VJ~H!(GoM-b!iBG_eB;V@qTAlj4Q>*1i-g;x+#%yG zIrk{IPv8M1{|`fd{NjLp$8r5ZN(tpCM+qgASYtiwee2opSz|qGtY?k&tg+_XZ@x9w zSYwSf)>vbWHP+Z~tg*%#OBi8<5k?rt2qTOz!U!dlFhU6CmOeJ$(iY88K$Ulo@jtELpK; z!vkA(>^bnrkrRP`Qa?e?I44Ay2vIJ$9DN&|El^S&# zG-=VMLzf=+^cgT@#FzqTXyU@ z@W_!9fqzOrLC!cQM3@LsF1X~1Yhv6G=avL_BuSAbLzWzQ3KS_(rb3k(bs98j(WXO} z9{2PaFl5A-2~%dwS+Hcqnhg(Z*|F!qBS%hn@Emx8AZMHtB20uR7hH11H8F08b4!9d zlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#ufk9+zI7&2nagef!TELgH)&4vfI?AUYQks~Jp z|FnLBoN-QwFcG3$aLEX?+pqBq`El$dV&Zfg&Z! zRH#y;PJ<>b+H~mBZ)+OM*L+q)3w?OO8ARij*i*p-PQ94VtuQ)1ga`d-@C*GGfexDKq9QSh8Zx zh6lFn*mK~KBPRkc>LLCE#WgW* zh;vJVJCdYGlOaovJOzrBC{v+IjXDjQv}n_zOOJc{3>Y$E%!Da3<}6sUV$FsJw(Qt* z;E^LI96Jd-L69@f2@xhjlnXAo;+hyY#JMHG9Z6E8$&e*So&rTml&MgqMx6#tTD0lV zrN=#e1`HW7X2O&ia~3RFv1Y>qTXyU@@W_!9f&Z6&f}C+qh%gbNTyV)1*TlFX&MgV< zNRlE=hAcVq6ev=nOob{n>NIH5qD_Y`J?`lZ)+OM*L+q)3w?OO8ARij*i*p-PQ94VtuQ)1ga`d-@C* zGGfexDKq9QSh8Zxh6lFn*mK~KBPRm?ynce5aZZRZ5u#jh$rabcxFOCh3GPUeB29)Y zIr0=JQld9DN&|El^S&# zG-=VMLzf=+^cgT@#FzX?< zZisVBf;*C=NRuH;jywg5lqgf7N{u=VnzU%sp-Yc@`V1H{V$6goGv+KsZpmvlNN0{bm?(Vp8-QgjF~WH#+(I9R;=0Zz?L0*4m@(?MBrc6PmnXt z2@xhjlnXAo;+hyY#JMHG9Z6E8$&e*So&rTml&MgqMx6#tTD0lVrN=#e1`HW7X2O&i za~3RFv1Y>qTXyU@@W_!9fmihtw< zDpaXar$LhzZ8~)6aZjHCLq?35FlEM^1xr?}+3>)Y9eWNua^ytdU(rvHGtLPSCPI`8 zF1g~G7&pYZCBYp@Ql!a{B}bkDMM{*ZP^Ctl22EPD>CmOeJ$(iY88K$Ulo@jtELpK; z!vkA(>^bnrkrR%b2A&|u8Rvuu6Cuh4mt1j8j2q(IlHiUcDbi%fk|R%nA|=XHs8XX& zgC;H7bm-FKo<0MHj2JUv%8WS+maJH_;ejnX_8fTR$cezes-GZdoD(8UgeVtWa>X?< zZisVBf;*C=NRuH;jywg5lqgf7N{u=VnzU%sp-Yc@`V1H{V$6goGv+KaYLM265Np_MVbs*a^xvcq(qqtRch2}(4sZpmvlNN0{bm?(Vp8-QgjF~WH#+(I9R;=0Zz?L0*4m@(?MBsJ(1UciJ z5Md%jx!{s3u8DC&oLdsykt9W$3|VsIDNv+DnF>{E)M?P9MVk&?dfd}zz>pDRCQO+z zXTg#cYc@QvWyhWaj~qD>_&4+uw< zDpaXar$LhzZ8~)6aZjHCLq?35FlEM^1xr?}+3>)Y9eWNua^!>q=Yb~(a>h9!!bFI2 z!6jE*6XS+BwFqrbCw=_w*SsWW<;WQ)bLruw=!W4G(PDvFE@eM@|I( zZT$o}aYLM265Np_MVbs*a^xvcq(qqtRch2}(4{E)M?P9MVk&?dfd}zz>pDRCQO+z zXTg#cYc@QvWyhWaj~qGSbCbXm1UciJ5Md%jx!{s3u8DC&oLdsykt9W$3|VsIDNv+D znF>{E)M?P9MVk&?dfd}zz>pDRCQO+zXTg#cYc@QvWyhWaj~qD>_;>Xaw)Y9eWNua^ytdE&T*JaYLM265Np_MVbs*a^xvcq(qqtRch2} z(4sZpmvlNN0{bm?(Vp8-QgjF~WH#+(I9R;=0Zz?L0* z4m@(?L?Ef3AZMHtB20uR7hH11H8F08b4!9dlB7tJAxn-t1&Wj?Q=v+YIt`k%Xw#uf zk9+zI7&2nagef!TELgH)&4vfI?AUYQks~Jp|Gs{LoN-QwFcG3$aLEX?gHa6Xc9@LWGGB<$_DDxF*I8ac)U)N0Jn2GGxh-r$CVs zWhzvuQKvzZ7Hv9o>2Xh=0YgTNnJ{I>oCQl(tl99umK}QzJaXhj;2r%0IpdrVVIoAi z;F2q@iE%@mTN2!nBt@DGS#snlP^3he3RP;oN-QwFcG3$aLEP zBgRaaGGoqyB`el!cwozpJqI2+a>A#kfhP!Z#yKIvM2K?1C0AS%PBgRaaGGoqyB`el!cwozpJqI2+aw6~_=_kk; z=Y$9oA<6}pTyaf|8{*uO;Ep6I(qzbzBTs=MCCXH&Qln0TCN0`@=+fh!J_Ck~7&BqY zj5!OItXQ+*fh{}s9C+l&iNJgM33A3cA;Lt6a=|56TodDlIJYFYBT0%h8M5TaQ=mwR zG8L-SsMDZHi#8p)^th+bfFUEsOqeob&VnT?)@*oS%Z@z<9yxL%@E_|Z$QkE^2ooX7 z1(#fLO^h4j+>+pqBq`El$dV&Zfg&Z!RH#y;PJ<>b+H~mB9DN&|El^S&# zG-=VMLzf=+^cgT@#FzsBy;uw}=d z1CJaz5qMueLC!cQM3@LsF1X~1Yhv6G=avL_BuSAbLzWzQ3KS_(rb3k(bs98j(WXO} z9{2PaFl5A-2~%dwS+Hcqnhg(Z*|F!qBS%gI{!{$~IpdrVVIoAi;F2q@iE%@mTN2!n zBt@DGS#snlP^3he3RP;sBy;uw}=d1CJaz5%|yb6Xc9@LWGGB<$_DDxF*I8ac)U)N0Jn2GGxh-r$CVs zWhzvuQKvzZ7Hv9o>2Xh=0YgTNnJ{I>oCQl(tl99umK}QzJaXhjAg`YwXPgrvOoS*G zTyn)VF>Z)+OM*L+q)3w?OO8ARij*i*p-PQ94VtuQ)1ga`d-@C*GGfexDKq9QSh8Zx zh6lFn*mK~KBPRm?g?@sZaZZRZ5u#jh$rabcxFOCh3GPUeB29)YIr0=JQldLCE#WgW* zh;vJVJCdYGlOaovJOzrBC{v+IjXDjQv}n_zOOJc{3>Y$E%!Da3<}6sUV$FsJw(Qt* z;E^LI0{^9cf}C+qh%gbNTyV)1*TlFX&MgVNIH5qD_Y` zJ?`lKP6Xc9@LWGGB<$_DDxF*I8ac)U)N0Jn2GGxh- zr$CVsWhzvuQKvzZ7Hv9o>2Xh=0YgTNnJ{I>oCQl(tl99umK}QzJaXhj;J?;SkTcE+ z5hg;E3og0hnix04xh26JNm8WAkR?Z+0!2!csZgaxod!)>wCT{L$31-p3>h(I!ju_v z7A#q@X2SzpcI-Lu$dMC)qJDy$aZZRZ5u#jh$rabcxFOCh3GPUeB29)YIr0=JQldNIH5 zqD_Y`J?`lh9!!bFI2!6jE*6XS+B zwwCT{L$31-p3>h(I z!ju_v7A#q@X2SzpcI-Lu$dMC)|6V^q&NwGTmFqrbCw=_w*SsWW<;WQ)bLruw=!W4G(PDvFE@eM@|IF`U!HzIU&MC zh;qRtS6mb0hB&t*xFbo5G#Rqw$Wx$5i82+c)Tq;-NsBfey7aiG&wwE##!Q$pW6pvl zE7ojyV9Sm@2Oc?cBJe-xC&(G+ga{KM$_1BPaZQXH;@pzpjwC74WXO^uPk|yO%2cRQ zqfUb+E!uSG(&L^!1BQ$kGhxb%ISZDoShL}QEj#ucc;v{5z^D2Na>h9!!bFI2!6jE* z6XS+Bw{E)M?P9MVk&?dfd}z zz>pDRCQO+zXTg#cYc@QvWyhWaj~qD>_)I@R&NwGTmFqrbCw=_w*SsWW<;WQ)bLruw=!W4G(PDvFE@eM@|I(XZ-{@ zaYLM265Np_MVbs*a^xvcq(qqtRch2}(4c8Rvuu6Cuh4mt1j8j2q(IlHiUcDbi%fk|R%nA|=XHs8XX& zgC;H7bm-FKo<0MHj2JUv%8WS+maJH_;ejnX_8fTR$ce!JUq3<4I44Ay2vIJ$9DN&|El^S&#G-=VMLzf=+^cgT@#Fzx_=+Z9 z)8a?8`7s@SLYJS?<7eFSbNc*(0l#F(uNd)b#(cwsZ<+ENW_-t--?HF)mi)kq-?8TR zZ1@8Y{E;nxV#lA^^A`^Ml}G-@k-u}|9|Zna`||`(667h)c$#ybA;hzUd5#Fr6XgXi zc#%t9;)<8K<`rVR$_=j(=XGv*g9LAK$6F+Mn-uSm=3O$pN0#@=@d0^0q`*fM`Ir)) zQ07xAd`6YesqqDMzNEodH2InqKcdZ#>F^V}{FEL)=NAn4B}0D2h+i}28zy|q zl;1GpJLdeB1>dve2Uh%!HNR)WA9&!8Z21#A{>+}gaNw^z@;8qBofH2c@P++(f+q>` z6lXlmInNN{S;9O=gy)I!0vEi?Y9$DTe#|Py3kOCi36>JjC+1gpIXvgJ?g_%nO{!hyf?$lo~fcTW6+K;8a4!IK1eiZh<(oM#B}EMcA_!t+FV zfeT*bl9#yRWv+RJ7_V}}Ys7h-TizhSo80jhN!}*KJEVD+4DXTUeR6z2o)0PT5k)?x z#3z*blnS3w<#TF$L7gvY@D)wIrp1qF^J6;vgf2g&$IrOu=k)mn1AfVnUoqm>jQNHM z-!kPl%=nHuzh%MqEct;Izhlks+3*J*_#<2X#Ew6+=Pw-iE06q*BY)?_KM4Hq_U8$n zB*;^o@igZ=Lx^Vy^BfVLC&~+4@FJJI#1$`d%`3!sl^b3o&g3x_Q{xNjd`W|^X!12Jengue)8Qv{`6)eq z#yvl$&o3D8ONRW45x-{4H%$1JDZgRHcg*=M3%+N`53KkdYktp$Kk&dG+43iL{Fyy} z;lN*c z{EjuhXTu+O;E!zi6FdIQp1*M5uRQWMj{Kby{~+*x*qJKiG6+oX7hH1Cq(J+iz{jt|K5Aq767 z$j6lUgfgE};WMgyPK__9^CbOoyM)<)`%c8Tb60KEGhVFB$SHM*NyF z-!S1@ru>E(-!bR6Ecl)!Kd|C=toc0~{=fr&WXqq}@n`n@g#&-(k-u@|@0|DtfrkBg zf+q>`6lXlmInNN{S;9O=gy)I!0vEi?Y9$DTe#|Py3kOCi36>JjC+1gpIXvgJ?g_%nO{!hyf?$lo~fcTW6+z*qL?37#a#Q=IWM=R8A*X9@Eh z5uPW?3taFbm%PLkFLTW+#CVk(UL(%y-0}tq-sFzANb)u*-XYDqWO$D(?~~&L@_a~v zk0|mnB|f3dr&RciDxXv13+jAHgRf}vH7$Nbn;+BRCv^EKJ$}YLKc~+x81PGm{E893 zX3RHC_?9WZVa9jN`7H~+XUPw&_#JD0&xSwnz#rN2CwBaqJ%8cAUwPzj9QivZ{z2gX zvOiDoBtf3yjHfy08A3cunCFP_JW*cYf)}~uC9ZgxYhEG7tK9G!abD+^H%Ra%cf3WC zw@L91Y2GEndt`Z^93PP9LkfIEk&h|y31vQ|!e>{EjuhXTu+O;E!zi6FdIQp1*M5uRQWMj{Kby{~+*x+n*JKiG6+oX7hH1Cq(J+iz{jt|K5 zAq767$j6lUgfgE};WMgyPK__9^CbOoyM)<)`%c8Tb60KEGhVFB$SH zM*NyF-!S1@ru>E(-!bR6Ecl)!Kd|C=toc0~{=fr&WXqq}@n`n@g#&-(k-u@|@0|Dt zfv@e)6Ffo^nuFnsL1eo2xfNs=TNKK18%5=kPN z6jDheoeVO`BAXm?$s?Zv3MrzP5=tqfoC+$bqM90NsiU3-8fl`L7FubeoenzbqMIIi z>7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnf zoC_|w;+h+7x#OM(9(m%K7hZYeoew_w;+r3S`6J*T>rWs-1QS9iVT2PwBvC{YLo9K` zlRzR#B$GlaX{3`uCRt>YLoRvbQ$Qg_6jMSeWt3AvB~?^YLoIdG(?BClG}A&WZM4%t zCtY;YLoa>wGr%B23^T$gV~jJwBvVW?!z^>mv%n%tEVIHYYpk=uCR=Q?!!CR5bHE`- z9CN}cXPk4vC0AT?!!38*^S~odJoCaUZ@lxtCtrN?!!LgXjO$MzK?D;*C}D&XK_pQ` z6GJR<#FIcGNhFg(DruyXK_*#blS3|f!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX z>~p{&M;vp)DQBE>!6jE*bHgon-1ERAPdxL&D{s8>!6#pQ^TRKH1pE{I2_%SMLI@>{ za3Y8#ifCepC60I!NF<45Qb;9@bTY^!i)?bpC69axD5QvDN+_j_aw@2#ifU@8rH*b~@;!i*9=8rH}r9a+Lg!+Rg$7%|i?`!YE^mGr=TNOf$nQbIh~AB1Kq5&blR_$Kq?17=S!9z#E_virKp{mGQ$i_a zlv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~mGr}lij5EO`Q%p0%EOX4W zz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$EqC1Wz#~sQ^TI1{yz{{) zUwre!FMkC5Q~e1fh+skpC5&()h$M<=Vu&Syl*dYDyXE2YHFyZj(Qqsq={x)Xr+yII_RW}ZhGjYkA4OiWQbu#7-fuc zCYWT3X=a#Zj(HYXWQk=~SY?fMHrQl~ZFbmYk9`g} zg0fiJ%ObMlwQBDPwR8dV0wbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM z%m|~5G0p^&Ofk(2v&=Ei0*frM%nGZlvCamYY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0} zx7=~h1CKoM%nPr)@y-XIeDTc>zx)v}r9XiL5ljf7gb_{zkwg(q46(!!PXdV~kxUAy zq>)YrnPibo4!Pu!PXUD#QA`P?lu=Fvl~hqp4Ykx!PXmoK(M$`iw9!rnopjMn554r! z&j5oAG0X^~j4{pxlT0zq471EJ&jO1qvCImqtg+4pn{2Vo4!i8J&jE)Vam)#)oN>+t zmt1kp4Y%BJ&jXJ<@yrXayz$NlpM3Gn55N2o@Xz%pkRXByA(Sw}i6D|FqKP4vIO0hl zktC8yA(b@J$sm&~vdJNrJn|`^kRpmHp_DSpsi2Z7s;QxtI_hblktUjHp_Mk;>7bJ? zy6K^pKKdD8kRgT{VU#h(nP8GBrkP=uIp$elktLQ{VU;!3*h9qTyn)VH{5c^Jr6wc#4|6v^2R$KeDcLNKm77Xz>NL`5=1Z| zgc3$L5kwM2G%>^yM?486l0-5oq>@HD8Dx@0HaX;yM?M7y zM?DQR(nK>Yw9-a99dyz~H$C*yM?V7$GQ=<=j55YJ6HGG2G&9UH$2OwwoN~rF7hH11H8g0fiJ%ObMlwQBDPw zR8dV0wbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM z%nGZlvCamYY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc> zzx)v}t3QDR5ljf7gb_{zkwg(q46(!!PXdV~kxUAyq>)YrnPibo4!Pu!PXUD#QA`P? zlu=Fvl~hqp4Ykx!PXmoK(M$`iw9!rnopjMn554r!&j5oAG0X^~j4{pxlT0zq471EJ z&jO1qvCImqtg+4pn{2Vo4!i8J&jE)Vam)#)oN>+tmt1kp4Y%BJ&jXJ<@yrXayz$Nl zpM3Gn55N2o@PF!0AVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyblR+j~WRpWKdE`?- zAw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^THAVUl@!YE^mGr=TN zOf$nQbIh~AB1Kq5&blR_$Kq?17=S!9z# zE_virKp{mGQ$i_alv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~mGr}li zj5EO`Q%p0%EOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$EqC1W zz#~sQ^TI1{yz{{)Uwre!FMkC5U-}bB5W$2HN*Lip5J?o##1Kmy@g$H)63L{HN*d{8 zkVzKVk0rUD3ND#q<5K0)~L=Z_7(Zmo-9PuQOND|4U zkV+cqWROV~+2oK*9{ChdND;-9P)Zr)R8UD3)znZ+9rZNONE6Mp&`KNabkIo`-Sp5) zAN>q4$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E*9{U_{$Pve!aLO6y zTyV)1*W7T+9rrx&$P>@J@X8zSeDKK^-~8~)9|8Zj{sa<4Fd>8zMmP~f5=AsI#1cn5 z2_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3GcUaI#ycN;^2Ikl{PIV@g8l>&L@*(Q5=J-?L=r_b zF~kx_JP9O{L^3I)l14fiWRgWTIpmT@J_Qs~L@_0lQbsuyR8mDXHPli^Jqh9qTyn)VH{5c^Jr6wc#4|6v^2R$KeDcLNKm77Xz`xX=K!OM+giyi= zCxS?#h$ewe^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrI zMw)1*g;v^Vr-M$q=%$BW`sinXL53J+gi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_) z*k*@a_SoluLykD+gj3Eq=YmVFxaNji?zrcHN1k}*g;(Bq=Yvna_~wUS{s>sqpFn~L zCWKJJ2q%I_qKGDjSmKB$fkcu>CWTbeNGF3#vdAWfT=K}LfI^BWri4<;D5ru-s;H)h zTI#5$fkv8WriE78Xs3fty6C2dUi#=~fI)^BW`t437-xb>rkG}iS>~8$fkl>BW`$MO zSZ9Mxw%BHeUG~`LfJ2Tr=7dwuIOl>(uDIrgTkg2$fk&Qr=7m?@c;|ypzWC;cU;YUA zSNao35W$2HN*Lip5J?o##1Kmy@g$H)63L{HN*d{8kVzKVk0ZaN5ND#q<5K0)~L=Z_7(Zmo-9PuQOND|4UkV+cqWROV~+2oK*9{ChdND;-9 zP)Zr)R8UD3)znZ+9rZNONE6Mp&`KNabkIo`-Sp5)AN>q4$PmMfFv=L?Ofbn5)66i- z9P=!&$P&w}u*w?iY_Q1|+w8E*9{U_{$Pve!aLO6yTyV)1*W7T+9rrx&$P>@J@X8zS zeDKK^-~8~)9|8Ybe*y_2m=HnT31QJOinG{k*l~ z0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q>7tt+dg-H|0R|ajm=Q)9W1I;l znPQq5W|?E21r}LinH5%9W1S5)*m&DWjYUDygEH8fvMdo(39eqL~(2X``JEI_aXD9(w7cp8*CLVwe#| z8DpFYCYfTI8D^Pdo&^?JVwn|IS!10IHrZmE9d_Acp92m#;+PXoIpdrQF1g~G8*aJd zo(CRz;+Yp-dE=cAKKbICAAb2G;NR#^AVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyb zlR+j~WRpWKdE`?-Aw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^TH zAVUl@!YE^mGr=TNOf$nQbIh~AB1Kq5&b zlR_$Kq?17=S!9z#E_virKp{mGQ$i_alv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak! zFMaegz#u~mGr}lij5EO`Q%p0%EOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWS zoO8h?S6p+$EqC1Wz#~sQ^TI1{yz{{)Uwre!FMkC5Tm1yl*dYDyXE2YHFyZj(Qqsq={x) zXr+yII_RW}ZhGjYkA4OiWQbu#7-fucCYWT3X=a#Zj(HYXWQk=~SY?fMHrQl~ZFbmY zk9`g}g0fiJ%ObMlwQBDPwR8dV0wbW5h z1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM%nGZlvCamY zY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc>zx)xfrayrM z5ljf7gb_{zkwg(q46(!!PXdV~kxUAyq>)YrnPibo4!Pu!PXUD#QA`P?lu=Fvl~hqp z4Ykx!PXmoK(M$`iw9!rnopjMn554r!&j5oAG0X^~j4{pxlT0zq471EJ&jO1qvCImq ztg+4pn{2Vo4!i8J&jE)Vam)#)oN>+tmt1kp4Y%BJ&jXJ<@yrXayz$NlpM3Gn55N2o z@bC2}kRXByA(Sw}i6D|FqKP4vIO0hlktC8yA(b@J$sm&~vdJNrJn|`^kRpmHp_DSp zsi2Z7s;QxtI_hblktUjHp_Mk;>7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=uIp$el zktLQ{VU;!3*h9qTyn)VH{5c^Jr6wc z#4|6v^2R$KeDcLNKm77Xz=r+=5=1Z|gc3$L5kwM2G%>^yM?486l0-5oq>@HD8Dx@0 zHaX;yM?M7yM?DQR(nK>Yw9-a99dyz~H$C*yM?V7$GQ=<= zj55YJ6HGG2G&9UH$2OwwoN~rF7hH11H8CWTbe zNGF3#vdAWfT=K}LfI^BWri4<;D5ru-s;H)hTI#5$fkv8WriE78Xs3fty6C2dUi#=~ zfI)^BW`t437-xb>rkG}iS>~8$fkl>BW`$MOSZ9Mxw%BHeUG~`LfJ2Tr=7dwuIOl>( zuDIrgTkg2$fk&Qr=7m?@c;|ypzWC;cU;YT#)Sp0t2quJ3!U!jVNTP@)hFIc=CxJwg zNG63;(nu$ROtQ!(hg|Z=r+`9=D5iu`$|$FTN~)-)hFa>Vr-4SAXr_f$+GwYPPP*u( zhhF;VXMjP57-ob~#u#UUNv4=)hFRvAXMshQSZ0M))>vnQO}5x(hh6sA=YT_wIOc>? z&N%0SORl))hFk8q=YdC_c;q4$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E*9{U_{ z$Pve!aLO6yTyV)1*W7T+9rrx&$P>@J@X8zSeDKK^-~8~)9|2qX6G#xjgb+#?;Y1Kg z6w$;GOC0eekVq2Aq>xG)>12>e7TM&GOCI?YP)HHQlu$|;uj*e7TfHw z%O3k2aL5tIoN&q+=Ui~f71!Kw%N_ST@W>O-yzt5!?|ksd7vKEw%O3&%S$_fvBA5_D z2_u{cB8eiJ7-ESdo&*v}BAFCYNh6&MGRY#F9CFDcp8^UgqL>m&DWjYUDygEH8fvMd zo(39eqL~(2X``JEI_aXD9(w7cp8*CLVwe#|8DpFYCYfTI8D^Pdo&^?JVwn|IS!10I zHrZmE9d_Acp92m#;+PXoIpdrQF1g~G8*aJdo(CRz;+Yp-dE=cAKKbICAAb2GU|W9z z2_l#fLJ1?B2qK9hniyhKq5&blR_$Kq?17=S!9z#E_virKp{mGQ$i_a zlv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~mGr}lij5EO`Q%p0%EOX4W zz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$EqC1Wz#~sQ^TI1{yz{{) zUwre!FMkB==uaR)1QS9iVT2PwBvC{YLo9K`lRzR#B$GlaX{3`uCRt>YLoRvbQ$Qg_ z6jMSeWt3AvB~?^YLoIdG(?BClG}A&WZM4%tCtY;YLoa>wGr%B23^T$gV~jJwBvVW? z!z^>mv%n%tEVIHYYpk=uCR=Q?!!CR5bHE`-9CN}cXPk4vC0AT?!!38*^S~odJoCaU zZ@lxtCtrN?!!LgX{8#-6B#2-_2qlbgB8Vi4Xkv&Zj(8GCB#C5FNF|MQGRP#0Y;wpY zk9-O!q=;flD5Z>YDyXE2YHFyZj(Qqsq={x)Xr+yII_RW}ZhGjYkA4OiWQbu#7-fuc zCYWT3X=a#Zj(HYXWQk=~SY?fMHrQl~ZFbmYk9`g} z{a3Y8#ifCepC60I!NF<45Qb;9@bTY^! zi)?bpC69axD5QvDN+_j_aw@2#ifU@8rH*b~@;!i*9=8rH_6F7-WcH zMi^y`aVD5#ifLw;WsZ3kSY(N1R#;_?bvD>!i*0t;WsiLhIOK?9PB`U^b1t~#ifeAT z<&JwEc;ty^UU=n=cRu*!i*J7T<&S{>rayrM5ljf7gb_{zkwg(q46(!!PXdV~kxUAy zq>)YrnPibo4!Pu!PXUD#QA`P?lu=Fvl~hqp4Ykx!PXmoK(M$`iw9!rnopjMn554r! z&j5oAG0X^~j4{pxlT0zq471EJ&jO1qvCImqtg+4pn{2Vo4!i8J&jE)Vam)#)oN>+t zmt1kp4Y%BJ&jXJ<@yrXayz$NlpM3Gn55N2ou%|zP1QARKp@b1m1d&7$O$@Qb5l;e% zB#}%Csicuk2AO1$O%A!_kxv1I6j4kGrIb-l1(j4$O%1iwQBMPnG|@~8t+dfj2c2}$ zO%J{F(a!*b3^B|Iql_`m1d~iL%?z{5G0y^vEV0ZAtE{ok2AgcL%?`WlvCjdA9C6GE zr<`%l1(#fL%?-EQanA#fJn_s6ue|Zj2cLZL%@4o)5%AyjCy*e52_cj)!igY~D58lW zmN?=`Adw`JNg%G{)heq5=1Z| zgc3$L5kwM2G%>^yM?486l0-5oq>@HD8Dx@0HaX;yM?M7y zM?DQR(nK>Yw9-a99dyz~H$C*yM?V7$GQ=<=j55YJ6HGG2G&9UH$2OwwoN~rF7hH11H8qRnJo3adFTC=`J0E=V#Wz3v z@<+h`)Sp0t2quJ3!U!jVNTP@)hFIc=CxJwgNG63;(nu$ROtQ!(hg|Z=r+`9=D5iu` z$|$FTN~)-)hFa>Vr-4SAXr_f$+GwYPPP*u(hhF;VXMjP57-ob~#u#UUNv4=)hFRvA zXMshQSZ0M))>vnQO}5x(hh6sA=YT_wIOc>?&N%0SORl))hFk8q=YdC_c;CWctzh$n$Wl1L_nRMJQ%gG{o>CWl<|$ftlp ziYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8SjqQN|c&f=Q;B zW`4(66G#xjgb+#?;Y1Kg6w$;GOC0eekVq2Aq>xG)>12>e7TM&G zOCI?YP)HHQlu$|;uj*e7TfHw%O3k2aL5tIoN&q+=Ui~f71!Kw%N_ST z@W>O-yzt5!?|ksd7vKEw%O3$p`V&YH!GsV>7~w<^Nfgn<5KA2KB#=lF$)u1<8tG(^ zNfz1UkV_u<6i`SJ#gtG=8Rb+^Nfp)9P)i;4G|)&B&9u-;8|`$^Nf+Jp&`Tfv3^2$L z!;CP>7~@PZ$rRJfFv}eCEU?HD%dD`<8tZJZ$rjt}u*)9%9B{}H$DDA=8RuMZ$rab! zaLXO{Jn+a9&%E%;8}EGZ$rs=J@XH?o|BwCz5=1Z|gc3$L5kwM2G%>^yM?486l0-5o zq>@HD8Dx@0HaX;yM?M7yM?DQR(nK>Yw9-a99dyz~H$C*y zM?V7$GQ=<=j55YJ6HGG2G&9UH$2OwwoN~rF z7hH11H8T3 z1QJOinG{k*l~0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q z>7tt+dg-H|0R|ajm=Q)9W1I;lnPQq5W|?E21r}LinH5%9W1S5)* zCWctzh$n$Wl1L_nRMJQ%gG{o>CWl<|$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnl zR@!K%gHF2WriWho=x2aIh8SjqQN|c&f=Q;BW`CWTbeNGF3#vdAWfT=K}LfI^BWri4<;D5ru-s;H)hTI#5$ zfkv8WriE78Xs3fty6C2dUi#=~fI)^BW`t437-xb>rkG}iS>~8$fkl>BW`$MOSZ9Mx zw%BHeUG~`LfJ2Tr=7dwuIOl>(uDIrgTkg2$fk&Qr=7m?@c;|ypzWC;cU;YUA|MVx2 zAc6@YlrX}HAd)Dei6NFa;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8 zsiBrS>S>^nCYouXl{VVxpp!1T>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8oC6-xX zl{MDcV3RGj*CWTbeNGF3#vdAWfT=K}LfI^BWri4<; zD5ru-s;H)hTI#5$fkv8WriE78Xs3fty6C2dUi#=~fI)^BW`t437-xb>rkG}iS>~8$ zfkl>BW`$MOSZ9Mxw%BHeUG~`LfJ2Tr=7dwuIOl>(uDIrgTkg2$fk&Qr=7m?@c;|yp zzWC;cU;YUA|Me%3Ac6@YlrX}HAd)Dei6NFa;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@ zB8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T>7kcC`Wax5A%+=YlrhGcV3H}O znPHYW=2>8oC6-xXl{MDcV3RGj*yl*d9BTY2ZLMv^w(?KU){Ggj2dg-H| z0R|ajm=Q)9W1I;lnPQq5W|?E21r}LinH5%9W1S5)*ZqrIMw)1*g;v^Vr-M$q z_(3;4^wLK^0}L|6Fe8jI#yAs9GQ~7A%reJ33oNq4GApdI#yT5pvc)z#?6Su`2ORQ~ zBaS)Ylrzq`;F2q@x#5;O?s?#mC!TrXm0$ek558cE_=`xQc*9%X@tzNSb~@;!iyw5;Loa>wGr%B23^T$gV~jJwBvVW?!z^>mv%n%tEVIHYYpk=uCR=Q? z!!CR5bHE`#IpUZTPC4V83og0hnj3DpvnQO}5x(hh6sA=YT_ga>OwwoN~rF7hH11H8qRnJo3adFTC=L-~1uszcGIziQ)}!dB=M`@R3h^<_ll>Ml>2Z$uMA zEOEq>Kq5&blR_$Kq?17=S!9z#E_virKp{mGQ$i_al=GbmDygEH8fvMdo(39eqL~(2 zX``JEI_csE-Sp5)AN>q4$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E* z9{U_{$WM+q=7dwuIOl>(uDIrgTkg2$fk&Qr=7m>&@tZ$H{I}*$BvHKKE$?{G2R`zN z&wSx4--srLSmKB$fkcu>CWTbeNGF3#vdAWfT=K}LfI^BWri4<;DCavBR8mDXHPli^ zJqh$W7A5=bP8WKu{ajdU`|B#Ufv$R&?_3MizAVoE5bjB>tH zK_yjGQ$sCv)YCvCO*GR&D{ZvXK_^}Opqn0g>7$ z7FlAM6;@eeoeehGVw)Xy*<+sr4*AIu$DDA=8RuMZ$rab!aLXO{Jn+a9&%E%;FMjif zi2u&~i6n|QyyYG5`M^g$@tH4t)YrnPibo z4!Pu!PXUD#QA`P?lu^!iDyXE2YHFyZj(Qqsq={x)Xr+yII_RW}A9T}0FMaegz#u~m zGr}lij5EO`Q%p0%EOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#%_5;+PXoIpdrQF1g~G z8*aJdo(CRz;+Yp-`NePk5b@ueKaoW7hPS-qJsm&DWjb4R8UD3)znZ+9rZNONE6Mp&`KNabkIo`Kj@~1 zUi#=~fI)^BW`t437-xb>rkG}iS>~8$fkl>BW`$MOSZ9Mxw%BHeUG~`LfJ1(A#4#tF za>h9qTyn)VH{5c^Jr6wc#4|6v@{8a6!HJ|1e-TL(Z+Oc)-t&QveBv`-_{uk;i6NFa z;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqZsP6d@zQB4iC)KO0ZjWp3r3$3)# zP6wTI@q=!9=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf*kzA> z4mji|M;vp)DQBE>!6jE*bHgon-1ERAPdxL&E5G>7A0qw-^Cyxh-td-pyypWS`NU_w z@Re^w6GJR<#FIcGNhFg(DruyXK_*#blS3|f7~@PZ$rRJfFv}eCEU?HD%dD`<8tZJZ z$rjt}u*)9%9B{}_jyUFoQ_eW&f=jNr=7wADxaWaKo_OYkSAOxEKRAgz;x8hJ;tg+k z$9q2TkxzW)3t#z0G%>^yM?486l0-5oq>@HD8Dx@0HaX;yM?M7IOI zDypfWmOAQbpphn;X`z)i+UcN^E`HEW554r!&j5oAG0X^~j4{pxlT0zq471EJ&jO1q zvCImqtg+4pn{2Vo4!i8J&jE-0YzEeRZRa8?$Ep^n>KqE~w(?Tn4w9`Q+UHqV%9(w7cp8*CLVwe#|8DpFYCYfTI z8D^Pdo&^?JVwn|IS!10IHrZmE9d_Acp92p0$q~n#aLO6yTyV)1*W7T+9rrx&$P>@J z@X9ZK^9L7@MEpf0QM}we z^2n!vLW(G+gi^{V=Q|ZtQbjd2)KW)14K&h3GcB~zMmrsJ(!~$D>7kcC`Wax5A%+=Y zlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*5^4sHKj28fc`6W?E>ajdnWdq>CSP(?c(P z^fSO9Lku&*C}WH>!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{&KRM!<6HYnf zoC_|w;+h+7x#OM(9(m%K7hd_rZ~oxov53EjB#Jk@xG)>12>e7TM&GOCI?YP)HHQlu$|;<$R}tN~)-)hFa>Vr-4SAXr_f$+GwYP zPP+I(H$C*yM?V7$GQ=<=j55YJ6HGG2G&9UH$2ClmN?=`Adw`JNgT31QJOinG{k*l~0tzXjm=a1Uqnz(lP)QZl z)KE(u^)%2(6V0^HN*nET&`B3R=%$BW`sinXL53J+gi*#AXM#zlm}Z7q=9p)JMV44* zg;myAXM;_)*k*@a_SoluLw<6^F(;gI#yJ;Ua>X?_+;Yb~4?Ob3GcUaIi{Jbq;(sxJ zB8lP+Z+XXiKJbxGeC7*Z`9?G`#1cn52_%w4GAX2zMmiZ}l0`N-S>^nCYouXl{VVxpp!0s&`l4$^wG}%gA6gu2&0TK&IFT8G0hCK z%rVabi!8Cs3ahNK&IX%ovCR&YLoRvb zQ$Qg_6jMSeWt8)s3M#3hni^`Uqn-vDX`-1HT4|%54m#=L2i^40OCS9VFvt+Yj4;X= z<4iEg6w}Nw%N+A8u*ee2tgy-&>uj*e7TfHw%O3k2aL7-NIOc>?&N%0SORl))hFk8q z=YdC_c;MEooBCz2@M@RoPH=K~-4#Am+nm2X57Lo9K`lRzR#B$GlaX{3`u zCRt>YLoRvbQ$Qg_6jMSeWt9J`-GqN=I*h3BxRNTWsiBrS>S>^nCYouXl{VVxpp!0s z&`l4$^wG}%gA6gu2&0TK&IFT8G0hCK%rVabi!8Cs3ahNK&IX%ovCR&YLoRvbQ$Qg_6jMSeWt8)s3M#3hni^`Uqn-vDX`-1H zT4|%54m#=L2i^40OCS9VFvt+Yj4;X=<4iEg6w}Nw%N+A8u*ee2tgy-&>uj*e7TfHw z%O3k2aL7-NIOc>?&N%0SORl))hFk8q=YdC_c;MEtMjPb5*i;VtiY&j&v8 ziO+oDE8mDFhFIc=CxJwgNG63;(nu$ROtQ!(hg|Z=r+`9=D5iu`$|&bM6;x71H8s>y zM?DQR(nK>Yw9-a99dy#g54!20mp=L#V2~k(8DW$$#+hJ}DW;iWmO18GV38%3Sz(nm z*4bc_Ew+tmt1kp4Y%BJ&jXJ<@yrXa{Ngu%uvtvRUqlkc z8{YDc_k7?ZpZLrdzVeM|Vu&Syl*d9BTY2ZLMv^w(?KU){Ggj2dg-H|0R|ajm=Q)9W1I;lnPQq5W|?E2 z1r}LinH5%9W1S5)*ZqrIMw)1*g;v^Vr-M$q_(3;4^wLK^0}L|6Fe8jI#yAs9 zGQ~7A%reJ33oNq4GApdI#yT5pvc)z#?6Su`2ORQ~BaS)Ylrzq`;F2q@x#5;O?s?#m zC!TrXm0$ek4>pR5_=`xQc*9%X@tzNSb~@;!iyw5;Loa>wGr%B2 z3^T$gV~jJwBvVW?!z^>mv%n%tEVIHYYpk=uCR=Q?!!CR5bHE`#IpUZTPC4V83og0h znj3DpvnQO}5x(hh6sA=YT_ga>Oww zoN~rF7hH11H8q>EZ|7^w3Km{R}Y35W|cx${6EJFv%3t%rMIw^DMB)63eWx${Ooz zu*nwN?6Auo`y6n{PmVa|gj3Eq=YmVFxaNji?zrcHN1k}*g;##@n?KkfGU6{HiQ)}! zdB=M`@R3h^<_ll>Ml>;3J>-%oo1$jc8(sC60I!NF<45Qb;9@bTY^!i)?bpC69axD5QvD zN+_j_a=ueRB~?^YLoIdG(?BClG}A&WZM4%tCtduYn;v@Uqn`l=8Df|bMj2zA2_~6h zni*!9W1a;TSz?(LR#{`64K~?gn;mx9W1j;K`NO- zyzt5|e)9*L!$$l?BvHKKE$?{G2R`zN&wSx4--srLSmKB$fkcu>CWTbeNGF3#vdAWf zT=K}LfI^BWri4<;DCavBR8mDXHPli^Jqg0fiJ%ObMlwQO! zi*0t;WsiLhIOHcs9CN}cXPk4vC0AT?!!38*^S~odJoCaUzxd4`YziOo7m-BqhPS-q zJsm&DWjb4R8UD3 z)znZ+9rZNONE6Mp&`KNabkIo`Kj@~1Ui#=~fI)^BW`t437-xb>rkG}iS>~8$fkl>B zW`$MOSZ9Mxw%BHeUG~`LfJ1(A#4#tFa>h9qTyn)VH{5c^Jr6wc#4|6v@{8a6A>v<~ zKaoW7hPS-qJsm& zDWjb4R8UD3)znZ+9rZNONE6Mp&`KNabkIo`Kj@~1Ui#=~fI)^BW`t437-xb>rkG}i zS>~8$fkl>BW`$MOSZ9Mxw%BHeUG~`LfJ1(A#4#tFa>h9qTyn)VH{5c^Jr6wc#4|6v z{_EZUDB_=Qt^fEBf7|r;9e>-B{C9u%FB+5o&C|bW{V$sT_CNIZ|KOk2{Fnc{@Gp1& z{{Jh=eGoo_(7^vc8~FG9{|DhS2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*# zga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP? zKxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH z4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J z&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf z2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a& zfzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY z8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$b zp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN| z5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l z1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM) zG!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b= zLIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu` zAT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@GmqXdpBY8VC)9 z20{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*#ga$$bp@Gmq zXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP?KxiN|5E=*# zga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH4TJ_l1EGP? zKxiN|5E=*#ga$$bp@GmqXdpBY8VC)920{a&fzUu`AT$sf2n~b=LIa_J&_HM)G!PmH z4TJ{%T@C!l5&wK^{l|a!+or$o_}iA`zx%s?(U@HGkN@f4{F|5l=I?7Gt@x+6|2vlc j?Z373FH8TEe?Dl$zjf6Atmz+`{_#I=`u`mHpH}`q8xD`9 literal 595423 zcma&P`Ez8~btb6US4~fMOi#=bGtn+pn^hE9H1|a$?Zi@5pnxiHbm=Euappuc3k?>pbQ3AH^P z?U0GA%=hj+_uO;Oe$Kse<>ux8S(|S0?1DE#es<(LRpWvzg1OFfE`TL%F<=`+O`^CTK!G3jIk6#(KCG->+n@V~^9A3XWh$BC+Gdr%LXyY=|e-rk^pY0&oS{?Fm_AOGEl^v>kJ!4G`$ zWA_64&(4Mi&;Mum_p_7d3DdI=O`hjW&;HV#U;3r;>EZI}k@D%$^69bi>GAUEiSp^m z^69Da>FM&RQ$8(~Ps`=gO8K-}K3&dF4;T0zF7Q2E;Cr~h_i%yl;R4^o1-^$1d=D4+ z9xm`bT;O}S!1r*0@8JU9!v(&F3w)0h_#P?nJyPI%q`>z`f$xz5-y;RSM+$t86!;z~ z@I6xCd!)ekNP+K>0^cJAzDEjtj~4hIE$}^B;Cr;d_h^Cd(E{J21-?fMe2*6R9xd=a zTHt%M!1ri@@6iI^qXoW43w)0i_#P|pJyzg*tibnJf$y;b-(v;7#|nIp75E-2@I6-G zd#u3sSb^`c0^ef=zQ+oDj~DnJFYrBH;CsBl_jrNt@dDrD1-{1%e2*9S9xw1cUf_GY z!1s88@9_fP;|0FQ3w%!$_?{^6JyGC$qQLh=f$xa|-xCGCCklK|6!@Mf@I6uBd!oSi zM1k*#0^bt_z9$NNPZs!|Ebu*9;Cr&b_hfT>d`}kmo-FV^S>SuJ!1rW< z@5ut+lLfvf3w%!%_?{~8Jyqa)s=)VDf$ymT-%|y?rwV*e75JVi@I6)Fd#b?qRDtiQ z0^d^wzNZR&PZ#)}F7Q2F;Cs5j_jG~p=>p%=1-_>Xd`}nno-Xh`UEq7V!1r{4@96^H z(*?e#3w)gdU#Gy=De!d)e4PScr@+@K@O27&odRE{z}G48bqai)0$-=V*D3II3Vcfi zzNG@+Qh{%&z_(Q3TPpA^75J74d`ktsr2^klfp4k6w^ZO;D)228_?8NM%LTsW0^f3h zZ@Iv?T;N+S@GTekmJ58#1-|71-*SO(xxlwv;9D;6Ef@Hf3w$dDzLf&sN`Y^sz_(K1 zTPg6Z6!=yOd@BXMl>*;Nfp4Y2w^HC+De$cn_*M#hs|CK*0^e$ZZ?(X;THsqP@U0g3 zRttQq1-{h+-)ezxwZOMp;9D*5trqxJ3w)Oge3uJ+mkWHC3w)Oge3uJ+mkWHC3w)Og ze3uJ+mkWHC3w)Oge3uJ+mkWHC1HPYq^NaW&HA4KE{-^j`kNMwb=RcA(vkm;_-s4l zf2+Q~+x6c8zpZ=zSL?q!dSgJ%7aF|&4EOk5BN!^dQ1|%*lzgwREMDszY8|zV3_zIk z3MG?#W!iGkt_&GGw8npp+y4ds#lQaE{|Sh`XKZ)*&z{!a&6zInyK3N-Ec?-3B4x`z z%>1|Gzc2Z3ZIo8SM9MY?Y;ysJf%WcuJ4-6r^g@>SbfxKL3H)>XZoiIw>+uh4BmSAz zW4;EPE(kA)aYZoi63B|ioRI?)(X-^=b^mhQ&b*=>fz?0e@P~0cD$&)ntU1Y`6;+HI zPBzP_&v~4qeGd)W*OOlLPZI&#Bn+$m`7+1p9{8b3gZuCa9hG2e!hQyo-5`j~WXeC- z_Wufh@a=TJO8CmDIqw-h!5P}v_@uO3{BB=+)dg1lO$K~ysm8ZLf{5A@rAfdBq`N2efAC%PKsAYbqg%dP0OsR9GJ z?0)4%yk@{^v9c}+A-}n!Ndpr*e*Ct+6APT>R4;Jorn~R=ddX?7`-`Cb=8%uITyLoF9(ssN zX-8aGz1Ix?Tl@uXfiIqvUy6VId&vF||C7tz;ZPj%^oyro;-u9?``Fy&Y!@$S&OIY$ z{x|;6j=ft)RT4@iJhiht09rv+`0%GW$NYo&Ti%x!Iq+U$Y!U)tBmnlbPwlL)os7j- zVs&?7jO?JyrbM7v6OZke};R4 zX(*00|MC1oJ=)L1)T>3Yt>4j^IWm(!ARc3iYLe7e_w}uqSk*O_X2YHVuEze*NTkn1 zhNB55g-4WsuoK>HfwCKujiiG#K@n8|4ZkFx1pR&JK_^l(CasnfLgT_kJ~J`QjV46fqbrhenak8ABs9Y_5(&Vzrbia6Vfk-7(JVOQu;z%f;aRcR3r08=VKm%8junWX zzdQec@9)(qkNVnZtnjcX8lT}4<0?7C{QCs49(FYs_ZjEZ3n7Eo8YQK0T7>80qAC2M zm%iUj{Ao53ZGeF3@5X4IArFGbJAz=;V(;p}oqj3s(r=H9=;c(%umU|8Dr7s0<2sb$ zHR7U$*Pn#^K~O_fJPSjj7Dt-x@eyc~g4sm1KMlmjkMp0Le%%uf!&*qOJx;W>A+O@K zZm5yHR6}P%3sxcWzK|@il`47^hh%(g%;2zbMfEr|#ISHCQ|a;Tjvsn;rnj0Zwc&P_ zJ2~q6nPDv*9*k4O1ss2pdlcigacZXfb&B-EYMcj{GT4Kd6rr07lMMX$DZUy&Omy1E zp^syg@0NE{uKr5W$=?)yI2C)U?|A#ib#6}8*P&sd}Rjx*8GE{r2SfS-{cY9lW?s-#)fjgO3w0`nO}n3){fx-zUzIT|BzHNsHRDH z`$YqqzwePoV^J$3X&wj+FE+Ek>t}jR9-n@5AoPu^V-DSnC&|Bh<&+F6(d)QL#REtm z8vkALUalP)x`7I`9cgm7{LXJ`ZOw69Ub#n0YvOnkVp1w|i`e3LYx00dS zOuD)FZkv1bv)BJ6=b1@`YlnHft6s3ue*6hQ%~}}tBi#KEb%)Sm@54VqpZ_ucBWQT; z*EnnFQ{z^Qv93hPG7+U1R3b~96s}^n16_@?R>2!E#3YTLThfXi#Q$;7iFAixw$)yU z@h{Eai+A(5{p|HW;1{*6gsyfH_%XJJh+!IcCx6Zdn_9=Go$YBZ@XI_Ty*dQ#?HDh2 z=V)R1b#(NP#CQghSI<7=!EdBst0$hh8HR4LqsObeCc963;uC&l{=JweRl<%KZ9m*E z^X~_vMc9;2O+udG7<^dBXdp8!6v_G9MX!Iz5A>>O`W3=(dHI4t01M-oZBfT=Y7T5a zu^y88(xpoY<{NzYxahop`+xJ_O;?-K+~GeK+QjLXJ^N7=zJpN0+G6GJrJB@Bm&+xN z0Hwk&w!?n%nu%24y023Sr$Lgm-4Yis65I=^WFxT4k*Y(zHEoCHXOn$0Q7)#YndWII zW&E+~Px9DKq~hSj=2XAhN*6qNCuQ$}P<7$rr%e67p6j*!KAD(5er9$?SDPfO0oTaY zt%y?FVd3xPB=5Fqj||$P41&jd%2&KGCvWnLY8no+T?hv6Z#TFfL;HMmI>v$w<~HO$ zv36IJS3N1gvk>unp+xT}+W@B$f~m02ZJa-jH7n|8(cJcngX)d}+e(Js-Pfsr5FRDo zZ^Lvmn4?xl8hW~zb+a36VO&cd-MeRyr^qeP#*RV~d~o_T|FYYJJywkmh>50KDXxLRWro#n2{(rejD`F7nSgfan<$nRidxghNW{X9x_HM10S4Ksf4U+ z;NxWD^}tIV&ZN>D)^fe!Y)cPlLDPPq*T|~|4Z2NswU&ZvGKx`?Ua>9t5o>GzPe6br z{|El(Il$%pshNhpJ_*Fv{hrUtNX$9W^=AoOczVDB!jswVdIPusQqzMiD;@0Jm`n+`aalOlJY--t=t2ey5$uvAFyNv|+AgTJWO~DWJ z$Eh5*jr_VWTvBzzE;YHkJ9W~0-H6BDlfrZ^(KeZ-PSlPP=V2sFQx}~Ut#q?!y3=u$ zpaS9Ch&o@N9;L64dm-`;1)yI*nKU6kQubEzw(SIxVPEUl`Z#r*Gn#0(IG?I+Nb+Ot z8TgKl!&pQyY09gRg!&;4&pr7~Uu^0lueB=b%}zp1AA^5|cI1Jo9q861fj<1Q9B@$h zQ{N6-umF>CGoZQ0r=TKv(}&vh`0q2%c>JUN)Ya;-HD^%rXE{$bZ6zt8Y8%lzb;#qB zH*;^tGf4z6Gf&c$VsQWcd5ZpoBhC6%ShldXJny(pu5vJJ6BUy>f8KB8i5f0=_xy(; z<3VI%oDgAgeizE#e%L6pX^t{3tYW_KRcr(-r>~!WnXnx;VU@$z)D^;@H5=m)%i#|i zH?olSwTRQ-D9U&zSxOyV0cM<~grC2a`x|E&+<@CLDUum-4tAs;+6pl?S~w5Jb?)0% zl10C5#pM>z~L3>r=wYIj!>ZWO1fD*&pV$l%;FTs#c_P#XqX;JpX9 zZO~MUl&WUaVi--}pY%k7N{)pa+5z$)9>0r8Q=J%eT&Hlq+8*(Nk&M*SFQy9(32#J6 z8%le(P9=kcIaKd#uq9n7T{wc$>A;wEq~dbu&Iv2H8VO`YF^GxE$(D3 z1Pfgi^Q~?hNM%}bg0*W#mAUv{k(AJ(_`t4o_?f3y^h0LV_r(f$^0Ec zbMkHR1?V;Ja>k+5X_$X@8;2L2s}YsFJ%7*WUcFB}fJx~=<5|&?N3E>8s`w9fs<5E;MOv$OxM;Y%1oYOn{so1Ee__Z4 z;@&;|lHu4v&!GudA+H z^Ih30$`Ljo=`8-Ip0=pS&pjtXct4K_gfG9SCc&?f%ghmQL{nhk@bVkTk`xo|Zt9i0 zO@!OQ@RHv`MDp?Ubjr67Zt0#B=5DjF(dNIaxg1PdhiYK7L5>rs*;ot^l&^pFQ%xMT zQ0Tbpoo&HBD-JgJUXwn$DT}E68($FWI%Hz(;hzY<2$ zIQfBO!S2{HyS^hsz>qY3@;hCCrI&5+cx@BiDVY4%6K3?tTf)jB;rZ2?DhVu7d3)7U(A9|M91~ zpnTBil-Io`?5ZE1e%aKsr5%JTm$1J4DStcvJ#JkC6@p)fY83WRufAhsgI0Qf=9I7##w_efSEV*Q)+DRd&j3nm4XCy-u*r^q|Y`Pgl z0MF%1^AE!VyFVQW^^k(K6ShK0!D%s$pk;)+1t9OHezcm~OmDpg{NVJP{EN7pjx2~G z#K7t`qHJ6xeb-zjQPRn<%ubyOKO9ymv%x8Iu_tdD5?G$)_g(~LUJTLRtrdtd4TGj> zxAI?N@!B}!FxW+LuoUWqh?iXkK~?P~->mkKWk6UfDY?sH^+&LjlEnUiYXkILdAEtM z)(Djw{2H!${%ML5x*2(ELwWt6S*Dzw%)$W#-9=L@1k(eD<~UU&76f5zyiCJp zS%Ei{(_{Ph%@pQ;!=SL=N72Y$@|hZPN8AvOop3&aMNf>~-3)-L=7y_2mLdyj;2ELq z@#~qQehB>}7XF;y?#Uhj?;x;kFU@;1TWHYl@`rk?#|}gFfqTG{OJ;K!hp?Vah+#$H zC~)}uq+6f6_$e=IQiY~4|3O5oA#UY*!fE)jAef3QAr~}nH5ThSe@URGZm|>6D>Rtn z@VViD<`i%Ma*6#KJHs)t!^UKJ7p{jO2>avYyOn8x5hSrgPIGw1eH>jc zTn*(z>({$d8xGUMG#!V4xNx3N)q@mQ1867w=H!hW{91kF?T6TKItdEeylr_%Ob~EA z=p=0KOFsec{AKQ%7;|JGq)GrUsNAt|G=w$<|6x)f6e)iM+YGhxoAbB*6=|DeV*B1J zHpOOg=+FV-wn*m5B%dV7R~0ty)1gQ@n@UY%B+(C#ppc{-Gc7c`DTW@D@r)3pM;nxU z$zI*{0LbPjMgw&9@0ljI6S~uXPTV%T!6GMe$?b@3j;qK)D6Y6cD3R^BICB|0jbyf? zbh0=I3g?Jtn*U3G*ho$U;liz=FUWvbv&3$%J<^May~ZBkXz}4~OSiOX&Ocz>A0rgV z2f){~o1@0lUD4N2q0@0Q4mDs$p4g)T@X;)_3%h&eay4KpO-i60_*KRK&RyaJefozC zLf2-`Z3l*S?1+QwHt1c>pjTe8D`nF572`=>!3u3}$MOezsN(_%NQV3T?n8Lo`(;?N?rtC*VejB@Ud%2OI6Z`|%*o$$#Yk zOlUY?<7C()I8X@AjqCjH7ariu@Sn=akk^3A>CzEz6t)mMKGHEOm%$jcL#fCX@xu1Q zJuvI~O{k?p)!+W)lD~mQr6k8X&eo)HCKnBA&I^Sod}k+biq+LqNu(>Q{1JBeBAD~8 zL6R7k5_jnVgVC!(yoPG^z&U>n#>i2yhTSycaF~nd=HL3U0N@6f~cKoi#B;$#}Uc{K)+yQTFN!kVr7#>OX=WvqWWP^0()zT7=XvxeL)CM9fa*nD%MDh$q z75L+J#)r2B=xqrz=m*Im2O&=tW-L=+8;HThEl=zDQ zQ|qOo$F)Nh_gWy&6e)v4|1E!Y2sPdx_u5uCn}07j?(%j)!BB!@r%sw<$jv{(AN`i8 zbD}UGr)-9;X$DFGIldo;mrDCb8;yWBedM8Xu~kNV26Qd)Cj@&-^ai_KHt5B04sUJo zwR;zhqk32V5ZV}=zn&%%Ce5Ct`uzRVFY}GHZN3ry01^naxx^;ZZTNC`mJAz_GkoVM zkZkQ$j9|a;-P5m>NYirh9D~w0&nG{b$iqU6A8m5#ItO)R41tO9`Ij#VZ{t++pa9WO z=G^oSmaqyKLt*`sDno4KMbd8JeUnH*xVRnVw$!wD@?@NGl_2*hLEFZuJV?++t@P=8 zQswJc{U!t0qW90;=cPd@E;9J!O-n&&XNrpxL6A9b>F-ZMCkIE7sJ(QN-@>aaGXa)Z z@Js4X3B$Bf0>W&HYz%JzJl}lnOn?I}@!UL!*5q#iAczdjlkx~ zcM3&N0221oKl0z-4EvQ4FAvm7@`Z9r^nxP!1|U$F0j`h8JCl9HUx0W;`S5o;H()ZI zW!}YXP>$Rfn=8!W>>BId9L5|-J$q7{jV)2!clt_Mq{? zsQ!Qnn%Z$LQn!T8Empg+&RHNTIJ$C0Ib93ej9)a1-KbNXyc z;mN_sZ6*gvMalAtaE|b z8A=wD9I{TLzA}_thJDiUfX&tmlW;|Pg3aQ8IYPRyM>(6ObpE{Y^tV6puRF8G`MJ~9 zrx4UU$+%+`i|dRe(jRz<6Tz8|a3m>-teAg61JXA4;5-`kJTh#RFg};kDuHas3XSVB z_q?aDz;$kDt%{~J1Cew$7eRlp%(3oWk?tWBKfm1!OJoxhWI9ts|3EArt^uzLX+m7lkX&d24?uhPOA)`v5ZORGZ9Ocdh6*2byR6Xb&~XIAhk`} zvR38OJj!_Ar#{ZzYmjSXu(0GJ+YwZWEVOzk>9!M|_Ei}X8&P!HW^VCOafk0N{N{9g z_yFA;%ByJ%cPp8Az;INS-@CxgK9J7~Mq-FQ4d=s z4kCOf!w6dvMR1pp6yfwUj&9V_tKq&%Hv_|}Grf$nDZ~%ES+E7JU?|ej+mqlEy_-8`F}6wzH-N1w@K1Sfu>?=6{+nyy==;)p0!DE2CTO7fY1@APZQ+3PWNYwk?v zno+qCQlk_-!@7~aI)4XBVJesNKU>(a$SdKzFhCZpPVsDh+e0b_F*a^eGOUK7xJax- ziXH*j369iD%p$pkFScVj3fCpf7iVA%+Ji|KUi(O0zOCTR@Z`H3Zk+S&7TjKIQ_#V3 zhV!fZ7MDTXHiFpE&%^@hUxJ*=qc#aoOyH2fWc!lleULgqt&NO zss?A$w1Yh4%Q6*E`4iyuBg6WCuNgiM1fcVK_ zo4WcmG6o>vQq9ys(u#xVT5Kz%rcazXz zABuslmAKGSlvy!}`Hbg_NlY9Q5kH}zTV}fo7b=m30x6T|L}IWS?DV9Av?F;~W@E{R zk$fWKDf{~2Kq-~7csHw$Bx1m1xyDa?!aIeIfBm$n>N|D!e~5R1iVRzulYw`FpbbXk zfL(@gHeLX;I93B6wABeFkdcfy-O4KeJ5JV2t2@G+n=X*t$%MeAvUvL4%iN&S%+;gP z-K2MAYhMsRVT47T$9&YEr2yTpq;SJimz;f;*1B2{60D6?^906w0gopg)l``M+D^Ui zhpN8xF}}E+RMA6_UPr2yT11vhL{)vLibfDloAUZzPS?{$as40(#hanc@@MK)qx^nw zRy1ktR2fdcZkXV9tl-m2Y{540=|vcjfRHWcN*o_)oF05X{?BXNwNR}yFoUE#{=_iW z{73wRV+}Mr0*G|y&{|2m4a^AdJDnOJ>GbYZ{WL8=k}p(J%P61Z!#Vk@lfRI%1o2RI z8z&S3&Uj<1k4vFJxofl(dkfMac_#+uPK?4oxSwP-Xb+Qgf1AVcO!K*>pNrjV zgMe+wG~H5JcO;7(xy#@1yhEl+a1^9NC!xV>t?{D51S)W3p@E1w?_mSay;nrpr4{wU zhFO()j=ajR&40var#v)%cK)x6%>uT|Enp#OWQf2Y9yVf|OPJI`>eLt?R6o znMksOEk%q2`*I+p-3^PfS;Q`#zPo-%fKDnaV_{A7u@x@r;mKN!ErXLI6&GE1c=~ULm%3T;viRSdgW* zU@)i{5Zy2w!bd@>LORrcLkIfh5A|MZ>uvHucnw4xPHfsmnGjhjPG|QZdA9}w*ruxa zA9~bb8}<5MooBRYjU^kBUy&*YbWdh`X}u%ATnigRFnO%=7xLK89L&a#9G*0C`V~*K zSzOz_=rr-%eSCj5g_|N4oVpNg!aV)WP~gG+k(pkQF`ka0)2t<90FEkDU?@+~RxuJ# zH82nboV=MU89F3e!|CpoTnnB2u+qM;+)9c!`PAy2ZlnfzIV( zqs1|TI(!M7v!u6dDNATJY%+k1(gIP_P5t+-yfsd~=@bneRcitkHAC}+XyGRxwJD9@ zCotWhYYaq_myL{3DFa1eT_`j+LZe3Vn9j}&3b@t)^Oz*24TCf$z3-_%$m+H+vJ)9x z@R)1uBS|Khc$k(WAfT_)jCvODEYf__LWZH48da}WdM->(X_yx-s!04DoOfBgoUR@u-@X^c!<%qd@rSCP zEb143k^k=3hlUjfGy<8M^npbWVESK`Fm_Avh*%jn*$S-Js;CA{Ij+ z&<`!FzFseMEBL3A*+aB=0uZ9CmCwCwZjQN~E83SnN^1qjh$A9iDMLaB^27O$IK*n` z!`>KVdoyFzV-FExHGsVt)nBg!>MTFiD4!flR|k15rHd2%JtT0z_0Qyn75%00Jj%>t zge?rTqE3l20L(qP(}eb8d;KQdSNysujA}Ikm$2g{NuVDjs@F?O5EJzYD=PEwGm(Hl z;xcmRr3-|vF(ldXJlrR5l(H&()Yh(Pwd&kX2vBinu=hyb{0ebcLJybQnT8`Y4`jH` zWZ5Gf1|afj1cCYI2Zpl!~glLPfF=v743(r zQDgg=(GNKb4P{k|ktXnF8!B-2$aRL&74nx(+eJtm3@Y-@6}OYR#1Iid>qc8XJg*d@ zqfC*WjmYN=AEXDks{aAUhO@#kL{ZV+KK&Zc@=9_t2FCft${dhl1adj zR|eOzXF$FUxIAeJVg3l3&i0V$LU^0L{$StrujyT)8hU8*>0!yeyv3ALqa8{IT^Stm zcz`$wse)A+-Y4WqiK}igz~NWulgEKF^o*S7yg;ocHx#ci>zQVX)s2ed9V%IiEV55ln0qzWgHX}alexS<2B-E`RA*y$(6Eat0-ti~CtI~B z>As5@4~{k5ZDHgV!I7wqh23D3qcX9C?r2CFxqW0Emxa7t z2ozjXMiESZB|Jb=AaW*cH@P#zNuv1B>UG9BCbC6XZo;4LeLky#YXzE&y50K7JH%)r zG+cfkcbo7nI57EV@~WipQR(7_JQjYBaKBdseK+Rmgw`675Fu|?+9*t-qa*_A$*ekO zTB!8GVNU5@D`+Clku2<5t8i1N)TU58dUC`qBkKtqv+nr_Lg1BJlFW%kNs(4%=N0_T zMl#MEOrumGl|vtW9!LAq!mzsdHfEVOce}dj^hfFB5#KEE%cUrAB=jUz>uK7nN*ny4&!T z4hqW+=5PC~KJ+{%Imkje#@}))CpA)G`0H_KjxbgYAY{JhsTNH_N~!7&3@bl~6ck|z zsSZXk{br@Mb238wjgNx;d44*Ia2_{*2(v$aL;BcBXX4MGGNwADuGxy4%523RABc!i%HfYdL!w4&K zat!NXmGsUiE}rlfEZPaObp$86E=E=QKQKKbTqIo_8iI+W9SIeltjL|@wuuhvDc&QRm_Kh|XsND-d|GM8G7xl5wT^ z6%O#bx4^>qD>dyn%M{`qjk&0P2+ij52n-j@{jhDviLvpXGA_M_ds#ujRwv#rc5=zH z+A2AT4`cS3d;~E0BKSPZ8bV~k9ptR27EUJs6sXxk_%dSb#nn?e`KnpwMlXkup!c2H zwAa!F9GqEchlY(UOp;~E>jq_-kJBv<+VWFK*$K6Y9TDWOIQh0DIJR;Amxf}uARBrC z(FknjUL((t->?{o5PXIyUffA!!1(RXIKe!q>ybse^B%x9`4$*-lx+N5e5urLpsNm_ zf@b74imyl7W;~w zq*h(>b9X~OO=4z9!#f zbLCLk5DVbKu5B3vGg~}g^%NgELU=UQM0S2zXf+I9OBgd6cKQ`gI-6$X;N-XXZ#Yjl zCz4B(!YJYUN)7$w3Z>!uhK=;k*PrfSW6`sJOg~#ykas%rK#WUphF*@ z+)0s#@pmv?hT5)|J}Jyy<&!tpL=%O<-va6J`+aS~CuJO<9|{y^F7Y-DEiHU3M?m^i z;ho^Gyikt6R{h;nAXE~Vq>SUx3>Eyl6Dl2?2Ye3}gidZh=>~)~9DhC8n8T47y(wVO-S$@Qa8!d`Bo$+q&fo9S<0x-ex&WO z=Q_s(O(VjSMa}LmE24q-g4oy!SC;nx?bT3ZVl1sgN)q@cQcist}(Kn8BQ$7%^J!qnF$(#?S4x;0raOOl^+sm(UEo4;&d^;j6 z{M~TzK#BFTa+6QjJJH;HrB=T|&_I-@D7iOjq#IaVzUmEzKMFc#svZ&zVQ1_{zRip_ z3P%S&`H^QvtF|yNmaiMSP9zzC0yZRI5mKnt^#(3#z1>2OCx#q=;6QS2g6r^e)})a5 z*QI#gdqo()k;*IxD1A^*7s6=?!B!FVP#Q#B4q0#mr=eE}e!>JX4!yROha954Y~zMb z^79|}*w$1y-{Ke8S9<3Bk=;><_-dGLfqj4^8hO7| zexNgJG-My8rVVI2&DnLgaEu~A$x+u=OQ9QjBZ*$l24BuS zaE{Pii2EO);gvJfZ1=sVCsh$l3}fq*?MdAxVkMu<^CHQnFp!n#8V#ci zlt}2Xbde!0Ate|$*Vny`BWa4AO+ExOG>;hcRd>M0b+3G*4$T$i)@^a#4H+pXz&Dd( z7uUdEaFF`*QAoTS{(_}eIVZ?Yt6nj&QH@z$8N`a0Wi`7xol*<@Xsh} z9AIviD-K@!xG^~YS46>;JQtm$#q1nGdQJI@#4L;<|3h_`{t(j>A_PrVa~_vAZ)Rz0O1j zBSjM6*x--zACu9FdiuTO@V6qU2c3aGuuU6RKhsSypUX05lxYdijvk@FF(VHM)L}CW z1;b|=9^Mv_QRcU)jL;8l_yURJ$nMx_A;|d>#U=mJ1?#2i9vi-fOqI+ZAhVAg^;aJ7 zidRA@2?N&BUg|az8WTaLhK*2M5n?c(rmF*n8Hr*)1Q&Y_Mrj930`xpGFwB?~-&BGK z_L{)5irzEK|0LRLyKn0voQ0V7fW>#|K->lJ&?n4Ew5+XirotXY3o%NPP7M-IR^4#n*-#(-g# z`W89HP_0z&jU1lI_<1~5b)MRR90rKhOMZO*HMNkM4KhEBO)0d=-!H71=2toyqXj40 zPb!HlG+!iu7;vc4Y0z#253lxSMJz1(K5aEwtOy=Nk?qx9kOoyFf5RA+j>2f2G0L`H z5$-|E#xHGO^@beRNki45KS&qlCH}p$DZ`-ltpaaHz1q40m?jR=R4oz_!czQ$f+M4wovHXuzqDvL0$jnc#9wSsF!1+m_rLPuoi!V2HHGVDHyDe6V{^QfI(BiNp0aa`K90a`NxQ<|g}17FMFsH+tjv z;B3mg=2&QGBb|^!^7M6s_w2KYu0>B8XiDDH5bVB%8mS|LtPX3agw)9)>6>j4(&@Vkpo_gVe{~0~^ z+Q_!T>g>aUCCJCNHwnhy{zSf2;5VfdI1Dz^6K+)E#oP2|5SHuMa&HitpE56m4}$j5 zE;E;dMC{0AYxZzfIY|@6EEm5(z*_hj&(JW+^M zu!5s-Dz{mH5V&9WNp5U<703!A7{_5*JW~oY|EkcMa{LG#N`Z?O*DGy8Ad<20M9NAu z2d{ei;)Wt6oN1A+2@rN>(njv&Vh~Ie&+3HdbJ1HKr>MAYZq&(|uSNw7oQTH2F4{&K z!)U1p5$o@97l!B`#t7l(MrH67drPp;1jEJi&T>0>GZR~64|yPDD=9q!G4#Ybss)uQ z&hQpVW|F0KNL(}OLM_f?O5xR}dJ-;-6dvn~STCHsFj7sVx0NZlmN&nGd4%tb8wTCY zzzt5`2w^>uZn%^p6uF4XqeRYBvSgT6h7AV`cDL-zpr;tPvQHJg# zl~?Zd=CH|_17dOE3_SU63XEZRwbnAMbs7E}Zxcc;h^_O~FtLC;0S@~?=~_FBq5+V~ z3$2BFmZc;*pMAK;X)b-p9LHCj+QBBaj91WMEIxcBBzu0 zBRy#HYB^GX-B^R87}UlQPySNO0||nL6Z%J!bW&%IwqHYQ$3^Ro7fZjxts#A~6*;b{5uN8 zY~VJNO5FxPEDyHExM(Ce=&7O)WQAT{^8z+ek7Rnhxn|rfIR|~%wN?U-&yb8}yY6O) z?9DbU!bEQ z9)@e{GwZ|(jG7ESZ$Js>WXBo5hrDXr>~_GQQ;{Zz#XZt%ag~Xb9QiBX&e7xQqoy>O zs`@BfF|xr6$}#Fi+6Ol9R+ENUwA@3k5Q(boC$jH<$ha+~z3_U;tba%?0C9x5B8)CI z`C1r&%)sULz%@_p!}N$fL>j2!>CiLm&y;`CqV9X(zFbQ*TYFB<8SuX%1viRh*mO@G zuscl#cMP&<5_ME&0{#8^RRVx1f}#u!YcNX;!3}7Jiz`9UVm*&D5@(YX?4}0hnhbzN zMRjBz<9pPjxx0JGPWpfUI1?+Nj72e6yO(PCnHmRpFtW%Tf+-(B0}A2$<tc`X#II58tI3wU00-hyY4Uv43@x!4Tw>O>{gtlTh%o$93=*n;XT->VNd zTp3vm3}3N`62~x-JOt)jahL5x-X+vM@I7LAK?tD$^Nh8(365NWYN_`N=SaG<#bw90 zWL-+pTa|cyCU5F})vH5;F}?#gU!HfhV-yphdtL*=y=hckxad8DO1)@E23L-DLCM*E z^4gYHjJm~C$>9oyvLaq|RhR^M{c6ZNbXU|~qin8R-Ln$ba5GU#9Eg^{)cxkRM;=2( zAR6v}$cwqW2<=8T0EC$!mksZ?rid7M5$JG)A`w$Xw7IvCGzGqP91mB(6w9m6y(xEIG;PuS5UP>q=Bz3a!va)38Jeh zNr64dm%C9GGi+oIEk{CuSFlVC)-nn4oM8_xM~=UGZQGzTpb{03aLTv*tR+V*MOU2qU7MqONM zXc9$+mp@^*2`!u(`OQjH=ipi(F0g?oQsjMA0S7o#^0}?5Ur>V9bh3>325zs6gsB5r zLpM@+IE=1-EHgb5lsfr=0qK>FU#hc?EccG*Npev#7zS$*HaaRHi7Yp zY#b&28P~a&xZy2n_L_Bn9Hx^u_!J1xyA_3#CMINUif?+a@2MO@)aGL}|HE>B4OqjO z?)#adu_`mJ>i_SU1hf7B{mrENt9dAro~ED;`GgSk{sQ*$QapE+yG}P zNvjj_77%=RHP95)hZGg~mE6*Fb!xsIIudiYTNi8@2}G_9Nr= zK#2s{BV{~F!rV&|RfNhjqe`wxlWbCM-7ybXtMd2n~f}tt@#1H_|J77ol4& z8&xMTz>{98C_Th(@Edl*4^d68l1J#|M!_=>T|`EDctgHP?iH*SvE!3Blq>Ynmon#* z#0+klnk=*!A~PIzd=JR-M_LiWbW8jBs?I~q#m0+!TDen)z7nj zUm|6{pd08)*02l7!cAao37m|xZ7Cv@n+HduKcWw3D)hUucB)Zr#zCRWEJiLL4O4Q5 zKMh2ZJYX+!7iPIh6M&K`_NsCta^*NjX=@x&6*md&fibxrWf&brpe&~S4C6Jfg*i^O z0tOEb9sCEP{Hfm1g_oWNX&@!My;15C;ELz-%|U8qTR41TDPWTN=^%8SMfwi-+fuOK zexbmshq#A#?P~n!^c6h4B^9$<66|hpuS{9vm2rACQA<|Xg|?nx zw~=EEb{XhzB8kmZ0sR$<4iGx=7nJmid^S|raO=1g1O^e=DsxrC>s3ksO0R{gD-va< zjsYBfB!2}~i9@_F4Gv00D56~9uiw+g+>Vd|^5~;N0(Xm~xdQ7%0XVR|T>gG@+r)qX zG&mL44F4&?W)dX-!p2Ag7;vqge;CY)(L%xzK)BVzJql=qHPD4} z`f#H}UK>j-$OB&sOe1^JU*IZmk}3PJ@c$^uZR&xKnt*9srlHr<;0Qg7%$225@CLa@ zrb9$?ABfcppW}ZoU*<^c7=y9Dp;I|f*}%9`O*7O@)Hs_jftjd~jCXSJQ6!i67o;U5 z!qIx()BvTRG_VA%!mj5042|>U$|d(~5b0445$)kmYuBz2femU8lTInp`5lab>W-Bf z)(V4x^9^V;92V;x#da+!g}eN6QKRZ6W^?ta_IB9kCr_ z(%J^tq@RF(Eq$Ol)NY}!o@i*|mNX@laL8-OB_P&-{bw$kWGZm+4Dnyjt0ub05)=?{ zv*<66Am5}1M0EAhw@OtNJZ7oB(4JLPjvbk$c3*XA#CbW9P_&)h7 zsc{pQm4_c1y?0X~CAUFu3a{$)H(1ijg{VQMR>UGx)4*!TFZf^OkGSs=w*xH>_;UrN zCc0>2mSDwe*;)CSpS}KDhOfsO9PS!KcI*M1*u>wjXMO!q6Xqwng~fAG>H|YRj4DZ6 zPJAOeCE)6b!b`;E*+H7v#@$Zzb0=cx3dslKm?5#D@RfB@*k1H#xOe`N5yJCQgcTjO zVmk;Dl+-cAp%HfD3XY|f3{v5scRmI8PPw@bG?;|W?0f^%nJt<+nSnQZiFN3mer%)a zn%O}#4JYKomcMy9R*4!T>_(WjS50@|hna1HHk27iM-`NBb>e!XQ;a*!;52PV5EY9} zk;V)xZVKY``b#gFsTsM9RICd_U>ob?0SHKY7tmy@)aa@A8Wu-5H@5iSV9L1L)1#2R z!P~^mo`wnw?8&g2ME_Ex3Pmc!QMPnA{fZY1pXYl<8=xd5W2n-1U};=J>JDdyN9wZu>EMdAjP%x1C0ncJG04Rfz)#0va zUKx5Rq{jnDGrK!Oj5lDMcyK0ZM<^au6AGQ4e~?Il59oUyjrscoYFVW{{*spr@=GKE zK$`N6)87eq4r~bVPc+UJNyy7xobGHNqVa!x`bDny-siLhuroX``nTy|Bbg*61LZrP z!r!cgyOIje3#BX9HJopOsix|D9!=%6!Of&p+Ao8nCe^a@SII?!i%@+9_rue~grfJ2 zA|z;omJHr5!pVgDjs)GwyO9Fm&N>KYrcr~Y@u{AmW?5uh?X{^2Sh!Xt7n#SeYif~TNl;W*%U z*EBRcQbsM7>;IHg#NdtJV9qLmQf-;qJI!{EQy+x3#&#V!IBXOF$-ZDFGj#pH13D2c z?w|3u;i}O2IMb(Lkm(6WxXG3q(S-jBz@jxmunGqKPQOwu_{I6}N3nS`$v!(Pb!n|u?Ta>1@ft+w%mY9)iTrx78Y$sR$7{LG zdt)(@Nhb3$leW-6b@Dx~Y^$4BG9W4#t>untWIK^~a4~!elR=#xOva|dqY7&tE6Y>N z%RVA!wHc*VuX2Bm1TbIbY{)qzLhw)6b3P+XAPM(tPqC}Z9d9aI;(;G&%OLJkY_At* zDqu&1RFDq$(zUY9dm(cx>=?m(bl?(#ewCdBB2SucGq1^Dx$Rf5dXMrVE~nwoEMnYI ztL5|3)8U5y8o!T(fwC?g4@A2INiYeg`|3)jHrxaOG)J;wIyUcI^DI}lH`ckea#;Cf zY~P*80*?!PjJ@}+6g9~CKc9Ff`Mt@;FRHC zI?%oA(KbCKs-;~J!~&M|A~>Y-WJ!;9mOQT zy@>?$G?t!oTtepOFWi?@gx*y}3qg;#MF!e7GOkQ2$Y0U%Q*LE~+NDF!4=})_jXzLq z2}ln+9;MI(VAj?$|GbtZIV~H?483*Nf(43%KdL`@V2$J*<=WuDw3;IC^c+Ljy zsH1lAWdk#chEmso4z|?fGvnh?x`;_d1y9KDO_G9FCQ9m&9OhSX^#Rd8by)oD`> zOGg&Da5tjTX1t#Yb#yWyJPiuuo7CmOEU+`lj#2tL&dnQCi`071pEMj4ZhAI!L2ck& zRv!CE;n=5BpKp*rZe4i>fv3F&(<eKpBs z{>yoEx=1kLS`lR|^*IBCBny5^#0&{{nJ9x))wt6blc9MTYtpRK%3jtrTwcyb5FfwC z=_1}-2S~iNlhHrMAB^&M|L6B_M~VsjaJeO&yL|QBGg*pLl@t}EZHoNa93e@G4!n?s z;LGnu8X7Kp4HNQ4hf7EFtHe2P-~v}~?xZc<1~y|)rd84!E|yKj5J^;T8P>KG*GvH} zp&XY^#pp=GlZo0T@p~jH6ypp@xnO%9W4CQg?ec!;C76opIfibE;=r3(Q3|XfXv8(Z z{Sst#`=?(q{4C?4jk3Jz0ei@Z_caLYv1QqU?w2#!vq;B`SP*tUjArWO^LKm_?!U+d zq3w-_-+&w?%t{n^pAXSVnabbYBkcsdtRHyx8!Z&K)S|A3u7-Yc9Q1bS#Du6)@=5eD zFR850JVV@}B1?d7k?@T!v;goYxCgxF=o<|do8&V8t{1Q)r2&?V=pCqg<(1rN#NmvM zDzEvYWebf%Yn}?l>+m{B3kD%;E3=9lyXUu|e3$YHI195EuXC2Ju)n#nv8y6a104Oy zQZl`mm3wPzYXlwXOM4kWg9l9@A)}jHdn`2u)7mH*xzlmGEEy<|4GdqgEBO}(oJN*!dycr5R%CM3_;Hbw{4!1&;cM=MoMXjE^ z;>%zN=sDxPS-$SAK-59^x52CLpFXVx*TXz|ApWQhfY);VzF%WN$C%~4l41C5Dl|^- zW^_SnXu$iaitSbwC%q`Lqy6I~1t``bsL_TimDUdYrim1^M?e{VKE0YC+2Ntq5;steG!Dd!+=5K}{ zBkpCGB)Rdr>_zz};;C`v43;3Jkmbd>=YKj^9$cf?%O^jX4t*w_rG}x429w{wS>R0w zSN0j_gxd9&A5l07*D9e$WH5J|k$8Ru6*)+2Pjuc{-2Y$#M`4E{q&?+x!_*{kfp#3d z2N7UMzQYRUvO%~F)=5&ZA2PO4t(U%c!op1)@Ftn+23=kE89c8$X z7beDe@%$z31v_Q(|2PxM&9sU+8cy=xNODNc5-e93h0?OPP<dcIL2y)|J-V!tfe*5C7*~tvNl~Cs0nj}#i^rv9{(;5htnKB~Lrz$}HfbMb;> zb*vFnHw|J5ohwp58%0hoxCW7U?_5$onNLza`Z5qg3?8i~5qtWke|rHMHYZ*!Ep&`$YjB|w8Od)W0BIUvunanbCf$8|(eyCC3qo1D7 z;G7{P%urp0(|}N&ydlf1ok=FZ@=q;jJ3L6RdEx_uh7yUPs~N6H2ZWp4IFv9wQ_X}b zm&^`9$A?fk(eXJ^npdbX{_?YH}~7pR!Be3z84 zmG#MLjS;C-Cx2ZSZ&$h%`jVJlaD>az-dbf}77o(6Y)n4?yaAwkREqUSQ0R>r98@M} zaI_n4Y-#LVZe&u>Cu2`owgeI`y%zLYOJRq&9wVp3&B**th@BEM#GzX3*YBbC>4 zdFaq}E*frLhU~|g@q0>Ej)sS`Okn-l>wi>;61WH3%vp|riSmz4Ia+*NM83dH`KPX= zN+a*W1y5IzV5Je$3&3sM%jG~Fz=0uVN4-ou?#)IXklm)xXHW5o+i=f$Ya>7+QdF?r z`#Oq?VL;uux6D(s-$dQ>!jz_;)pj(eJWWQWJr1bFCg5O+%&@`of?qzN+l-)d?sN?ceP;--Gt!4czD_K$MX!zIbGhHv}srH-ONN*(s zP23)eHDA^dZBE{l63K-3bbsyI2E(p;)F=Vf``x?#4izf!C_5Xj9+rAaA4fR1f*qZH z`Sf)WuQd@yQD*b1u4|a+#!8`vk>0mG^Hyq2SHj6og+F8^oaYcQ#1pP% z+6UI)$MYiRZ$voZxpTi_8dc&F5Xtc)ao2#O(8D_jD+{@+;yB3aSD3@f53=X^Va!x*K7kT$PXylVPs5Wp`H6iuB2*Iz6~{h@)UE(=2`)YR zoTu$V!^)I$7okQHF*sRhYj;hzI7$fC5iM|4NP$9gtdzlEFp3FF1`BF9b(9wmA0c5z z+i-r}oP4Lm5mrlid|a%)S~lP&{kx{A@ZNF=M1u`9H1IYvmvDPpn+^QJPpv5V^UoSF zoog$6xjIZ#DA579P>kyQYl#Q2x2&y=snP8C<%9b?*|vBlM&5^5y!H2t|XxhkPz}&!izC zlwoEKvK+%);&j`k>F#7Ea|AKsuxCIA;n)FXgy&Yq&RnL%+CsgMaobv19mcUAyM@n*D6c1!%&CGR9!VqTPK;Yt;0(5((F9Cy&~y z6@MK^`8Q)xlH4c_G!T7fA41L2jn#;A2q;{i#YNplNH@CHW;DCqakvLk9%Sxe=*uMK z+>_r0#(-@2rDYi0W1>|7or$F%abPH?_wWvq6dlz2U|tDFb4zk zYf`45XA9f`1rf~cF5#Z-Xsv)LFZ=@EgB4wNH#)3{#VP(ly@mg}lOY&f%&fpVeCnJV zsYzew-!ln}=(f+zpj?$ACqL^emk2PtlYDp^Nn}1>%WP`ugTdfaxds3x5MhIo89Wrm z4<8FpHx4q#$+!tt!pjdl3=DC!TrfQ_LmhbG^OT?u5maZ6@KNZ=#@%CkmsV)5D|g2>L0ZFIO>xC6#Q2oh-dZ%pf5; znz12@cYd8zN4&x-&#~csib=pq!z2Sepyrx0EN6lgym9ab6`K^d2^Ek23*bs*4{Sjn zwSORsPPBi5*^7Qy{53)tW+R`8{&jV4e`Kz{Fy-i zUqu5MX=xuHAA5;32`VO*Wo+)Yq8&p z=ZJZ9%M@Kbl$SE2a4I22f>&8Fgq8Yg7{bqG8w#G(z~!QeHt!aURIp|*Rk(UD3L>!! zL_D&oIXg8gZ!-#bIWNB99VMag0Tqy1e2^Nl!J5BACEDA9NH#s_{%KFGygeXn7(awQ z88j-4Q}m@vvu>p=e$babN9-b@i-q@@bW8_MWI-a+3fD0c5=0BW;c=~HEOIX>jgV;T zeL_}rB$=VylW!tIl96f@z(*?Emh#0Q929||2q$*BVKBf{l52!G*KM9&8MJ3Z0ArNi zeLjHx2O$d4zgPdJeBcL2$%u0R!}92;7(2#bbB+(;>Q7zF{EXgO#$lo|*SJ z4*m&YgOn~?rPN`e!FNBO( zVIHq{*?QdDjcY$5rTrPvp*OB=@*gi>{FGPAf)~g>K6jk1E2{opJ`g^UEf^h1 z4I8}mq#xR~;tFu1Tb%^B-Egqs(^l4( zZz!t92|8DT-<3>DHZOz{k?E!!pk$`?VF7S_h-VaD7#7exFMj6y2nnY20)yc<*nuRu z7fV_VdU{}M_%oQDgz_Pr@Lr6XYm;DBKg%c3ttGR8;Fc|t`xvR_e5l;bHil*{(7sQ` z&izIB(L>&1h*m!W@zKWr3bxajx;;yw2Q4h)Zb>Ne(c?fhvb2?DqJ{QLaBzb7oezmj z_jJ>4Y?OK3%t3qC^8g2&Kx)9S{aIE zoSyVn!4D77Mr>SG7C}2cQ%c6WHNe2QWJJxpEw{mxqX`@8=<B)^^39zj2_)DUqNQSCGq0h^6*8YTr$Op~FG+>T6x!UM zY4sJ|peBtY1gkkY8mgsE2p!XED|prq^Eosy5hy94MvEK^ggL=bIm(iqa&=snhtA_g zZvv!bH0Ykd?@(;)hu|&dr#Jyd4(ir3-pNP?OtK>Hd*hkU40at}xD@{E&04}$TO1M6 z$j!ImI}i&;#|KeVpI$1i4i%tLh=C2&kpb+XI3mPm)X?oHG_E^{3vQ5`i0qzB=tE^w zlb`8LNydJis;HNW-uqnp$(u4Py0}LvqjUDu0qqEP!K=y_qPrl$WVApF7~<*)5-zQM z{(u8hVmEQ|IKRaxpZfwIqbuA~FIT!fTQBIKd385nqa_d<)g5l{GZ(xIyad|%=iFyO$(}x(lbLQ1^e=!HN>%i{68Dt)z_764*(q%Xsef5aM~h7z+?USzJMb1n z!X>DA_`Tg|P=tAj%u$meerCp$3cahLpe=)iW(>O<13%!E^I~Bs=&rKYIfFoU&BYZ;~C#x=d3 zqJXCQ=u2EL+`NBCn65Kt-a9S7U1Uez9H(WyIvMeK)LzLpKmF?I*R_^2WxTf$!sRnq zk=V^)R6X?@1%1`uEETsYYkCb6JJJ<37{;1D%Ml8t*i?#+A0}vo7}30WFWa_&E2VQ_ zNs+CS_vL8;|FEv$5#V&;r!eKpxeC&~wz|#jS-yUaxb8*K4IJGlH20G=Zu{Y;unJHG zTi&612p%EagRVd_+9S0kk6snNfQX!+=mPrmDKkV(xa8Poi^+rO}y2(3V1MEV0S zmI;z;(W+yCKHN%1^pnG6=UwOv1bEKE4AOAFoZSXPRAV|9s9N+;7B6MvxGdqTLv z`3cqkU~v5ObN5<;(MK)}b2W0T`R~Q8FJn#FQ`2x(n$drK`VEV8uzz-I2W^_}mVU1E zP{FSpkX?Vg^SI_G1^>0{${PE0W^lqBXOkT7!~Fm80tO|!7rK$o1H_(3TLDS&`EAm_ zenV=s(EvLNxe{V8H&}S-;ZOua;>7)}9D|qoz@x-5;9rQ_2UxIxqtB-clm&Yb(>vfd%2r9;s)VQ z04-9DEY4IpvU6=iF;uP?Mr`f?lWf$kx1TZF?W?PXZV$Of#sC6hgtJa39r7VWlh{!$ zpf}Na^D^|5j-~wuJ*=%7BqkOQx;9ng#~?sEN#P z@m6i)Yk42ZS7=zQjify0w6_?L^f*xF;>V*l1OUuht_cd!Tk{`!b?2ixo#DgW7?@qo zCFIdSJ!d#th|3I2o_^h!Wls`dF}b6?G6n#*!!{`eP>dz9F+F+5^XxlsAKcWijN0re ziC7Snf1-UQ-8%coD~ch&vq)^?lQ&FYQ48QnxAT&c(?6NNTNLuRiU!2xSI|e!4Wm%v zo!SHp&Bc8R;1IG$k%{E}{77xLV3lxoWKVf0h8Lpp27xF3ggQk2!Sy%%gK9lP#?CyV zx8-$zgC=}YH?nn|D1hpUYdQVf73M?wGZN1tT=+~TL)H|wM%&e5y5E4)Be!w>yOCFb zo;aB(fnFd<``7zO?dL{xdb;|KsJ^6V5*RepU{fcGy?6E}8ZGUDGj}S2O#N zvk&XrMVW=R0StR-&;!P4eA0avmE({PI`25nl1Erf8eP$ehX~j~s)j)hy+jV4g$270 zZ=>J&gBE8}=@>!F z1%j2NID#Jnb4Zc%wKZ@zarYqcqjwm_WbSVY8Msr!sefSPnj_<`1yW>{1d9eDYD#&%|b?Ps83rb$;cja{AwJRPU7$`Y*j3Xc-nf{WUW2Y?v2Xtb{?oeFQ@1icH zhm zu&MrgLA+wGe$}vCX7_Ht3{@%n@*`@JI@ho`y%FigE|zgO#;N>l6dpH$S}r>V{+?)) zS*mfiN|%`BeO{bKeBbkRpXi`U5Ffh4|88ae2d=S%_{jr=`qor?wr0sk5p&7&gGOOQ z*+=h-_L1F|#D{)iZvC^hdcy@sj0BA_D>wIJo_UVxPn z^?d%w5eDq|Qn_}?z2Qu3h~xSb;xcA*ahJUu^15;auRr_C-VA6^iu)X-J{L`@@yPWX ze*J2O4{*|=^bHL}2o7xVoD2m+0vf%I^~jm#kTBPNM^8J_jEg0-cHo?d+;4WQXt<05 zHWQ+=$$&F2S{6CjG^|%P)DGrH;Tykg5i1 zJWHW9UXHlL;u=6?NrYGD2GT(|JV*G z<_?sjVpy`4hgfPOAdma=q@Y8@g)TN)oVhjL1VPEhEVirB5ROXnjj#qsZQTYTKq8j_ul>KSGXxBZ)ONp{S-{A zxZugdOs2ElUDG5eD>$TKKCipXE4b*4d)#crt`%MhgFmGlh+Y+WqM!mQl;B@wp5gl1WM&|SCha0rbK}H&`J*;+sP09E zS8)NL6Mi%%(FENje`gVLLiksuOL!nSa+P{OwnNL+z{;s3-go*hB8hVy##bp8W7Lls zU0wmNsZF8;M;IaOn0E|-Qz5gLks?#`W^i9TM$OjC9OJZk=Gl;3mx%wuFB?YYm(#lR z!yqa3PQ9m^vK`>TU_jUCCs&1!ug*psBMrB_!Zm_{mlz*fa1kihlulD$K`D$JCfK8u zC4;;i1_!oJN1+0Fa7Ix7$#>wgH{cVbuZFV)p~7%DoV+2YFtXf8mSOnVJ+YQoUd;a> zx^lPsX*}bcWFE;j+U*f)y05%QU_P?LKkC?8bU7ct35`of;YzPxqiCVpl!fEExGeFNRP?PSQ6Dbc-5y1eDB`UvFGjIzNA=>Rr-Sz`p6aHVu+qfzyfgPE}E|@`H7Kd=;sVPBMnRikq7 zLBSTpXpfoe?LxEwC0yLe4@A1eA^%xVRW#njy)$gXdiuH^dtWy5=kNEDkbn+Ht^a|Q zLccI5Na)n&-;Jmzsvy36`a8z>pcpRqWn5vS%|avyS(dVd5A9lpq|B&{;d9x&4cw7q z?xJ^pc7;;h4IO^Kmpt88^oRwMSSs8H?vUtaeuZ7teICe1RXw|gY<{nUyU%gp81dEF zYfu|Vl#q|oOJK~FGM_8%qlCFo^OvG$1%?TC`T^O@u!GBdu&+2&jZ`cz?Yd+xymA>bOz$AJLraZ*=nDXVyo^N*{2-vg$(zGp|) z8EnwPHoT*rBuF;HE6ZsPDoAlW& zssw8A{5z9sX`c0`+=?$1iHft!eb5(P1s~j7(b19C`lN7az(_kAnNH zcCdzb8m_)Lerwy2!&#(CD?hCO9&vyDU3Vwn2am?a3@icbz5a^cLRt-Q-n;=3646EJ zHcH!6lla{B-+$w$xsLrSbp);e{DTBj42cgl3-}MW`r@73?N_Q(_LA@I(t($cP40iLN5F<<|A(2@;!?hi zM5Sb^{B1b?Wq!yZjM_+_3}#707XJ zS181mVpowM|w;c4gLk3AKQ(wuI>~ORik<5%Hd$~ymBU2Bet87mpCtCQGvR~S5 zShSJxUv#tBd##4uRI_-dOO&!6+AOpIrdL<M%T2(sB2RGviCMuDjL zO7u-o98E8guZlJgBEcIkB@46<8RaZ!LdHMz z__WREr!V+%Y!iL~#?UW{4mY@J{5%cZO))<*Y;Xt)@RmsTDWS(@J#=wNRw0=G#&BZX`&?N+iV?SR9g#Mz)|RStgaVydYrSa1HgXp{HM zDpx4?MyTb|o7S6moUuw@!0(%TMxpm4t>eA%o0{GB_=ECsqZZ`Vau%aX=@tzO9iZRE zPcVkdr~qCTx8Y0L;`QbVMG*V20Ac0IxqwGQDih>9Zui&q@?9!4c@ii*x$_tQOYaf9 zUm`+}>&mpQfTIJeJ`(mh*QO$Pe%x6B6!LEYz{YW%_h38RKDonWBg5-7bJ8VpTMB%O zGcBjO6`+dozjY@CRe)LMh$^T}HlYUTotr@`Y8C;un@&pFj$k7-Kyp%19|{>=g!fx; z2k+-j(|M_1nTeWR5i3WIBd1wubHQ!r*{75(LziYtMO=F+d-zyUc9p%K4lh_1ASc^v zr>IZP(Yle!_K8S=y#89#_=BxnD0Mlo#ma;ku9JEfh3Zf3(N$5zT*4NPf#TM{H?^^< z#>q-FR7P}v-QYK3v-X!1Gn^%SQ3nioQnGKq<5YqAs{B`kmf&xs~W4syxDas6L>?uvOJqK zIo&O15Q-ttgaA1VPb;$lQFu@X#{fNQnCA7`F-b7)ikclpuwH+{&DoIb7r)MFC1Ny; z!l_oidJeI!`Ev(zK8xL*JbcQ_wEAmv82AbE;PS7lEz1O-xjd|R^fJewDKzNca&*Zq zFVTvvLxo^M7~+p`jB2l2bSvqWKx#7Bb0$(?eTVUUf3xXzqKFB ztNFn()iw2PBu3eebk1xE5f}e-`Fif0AZDshukBbHP-jHTSPL3~9^@JDPMwV3EK+{u zg_4wg92XXbj(}YWq!EQfv29?{sgXf+>UQGMx+DX+!aX4fJc{W=)Y8N_gP<{taV#USHBt2${OxGW;?xOskIAu86p=ACoXK^^LILAgGdeYh>XnzT=hSDK>T&VUEp1_y4#5Vb!ru17OMF*Dp+@ED z9qx^=a&mDP0pVu?f_PAx5X$_oqN5HT`h0Mn*raJ>D=bD7;Hav?Ruf2&w=JI}_8RRO z_{&Vc@XkGSuF_&QsbHjaBW}Z@#A)RFAva!*a)-3t(x&lik%sOgK5=zleVNfC97#DT zEpRwG%d15`s%kIhE-eqswYUoUGt4KeuvO@TihvY6$<0{-i_?mS+>Jg6bOWjbdo40c zzxpU&MdA#4MXg*dxT+c=W%My(g6g@AWiw$oRP-ZiBs4*ob%VSQC=w@PqFM>k* zSOiuha8X5#c)q1uLyJhj6l1ER=9&>XW9|=~{7q_YEat<*sT7N-=N@Y&Js0IU6Zu> zE|)p!@IX9XpY5K6YgqX)`8O&}4(e8IAckX?ba%w5wS5qWM1F8TD0Y4}zp5tG=8blD z=M1PJB|a!>MoG#K4=1fTh|8w9Wzi(Tw+PgJB-_J_T6G?D1`Qxh9xH|pce!3_=j7&6 zVQ#QT8)Cg~{9d_03gTLKRoQWR0()J%pfuw=WkMa{=giyq=(!>oA;SGMCt0SeDglSu ze$X{IeA_C(Vsn|Dnpv$*=j>2mp#D}$fBet6abl;Xiy>81soPARyQg``tKom&o?UA< zBJz0+Zn0XNf1t)`#nu*879`&wFM6>I)nN!FNDeoX1}y68<*^t*Y&CeHBXDf40r(8;T$8-{)qL2eqg&;^Dy5>{E5N9RbHMuQncU)SdS1zf(v~zGl{D}+E$F{D zKBxb-WNsxQCTU}f=ueMs4iCLhG zYD>VuPF;?5vV4;IJJo6z7DSlE8?DPn9~6=*@H>BP02S!+0RV{Vi0QqJ^lbbN$AP zMfrapNTFdJ;H)D-BsNx{3(tCVcUtK?M;me1A`iGUfyLz6I`7{^=sGfGd~OY6&Rl}& z8L?2>!sItx3nQSTLJsBjO+_sW&76SKmVz>1#>rXl7epx!h=OHJy;+R*q4G|#m0H{? zT#;Y%*#*wNtdLB0>#>WJf^bBVU@3b?T6#V%cbcP`q_>HrIHBfbqA^ZgHAdAm?`7{| zM3l7OT%B~{hQH&AHSg8j+S+oS$ML8fRgfoEL3(sr_0pM^rU_z9-!TXKWqzCTBKg}^ zw0`dzT(%-}OtS`r+Bzq=h4Y}7|r1}yTL3uD?D@SDjBWXduwo1_9;1DCd{y1Ys8^;vRyMj85Yk&4f#;pGXBV7RQ|>sai;GIDpWqM zZ}mojYn0Zn&4QjF=-MO{vi?DGmMAL#J@45suM4>%r3kELIk4JHhet={`?KQ;+uJAntFkV{ zxcU~Eqmy${)Z${YI@Bd4@apkTUaNIb6tE5hpT9Y`Bu7IHLbO`(vKdAWB&IxxNKGLu zb16VOym!kGdJ$SvufqA!gz15O;)EFri=bW8hED-?D1YYR(Ei2l2Ifm~Iu*trI68Op znVgVgGO;p}hI2C1iM3S)m++*xiREp$-SlyR;edCRP!=9S!LJ+_OeH8~b6wQLI)hu% z9K7jolqxG5uL`YO7$$Fh_T}elcd0pGhr<0SeRZp4bw$(yc|k#^A`q7XY7oz7zLwK* z)#FjcHeiX4@RI#yvxgg~&~o6Nu!o?aNK)nx(IJQICE|XfwJxB9q%{H%_;~Wo3P(uU z&%@g|D$sGSd#HYX`5od#SW;TY(ZGBJpL&(pSZrw(IJPtnIx_t#b+Tag`qmK?%ZWSO zzP8e;!c&Y2+n278$z#`?n--pVy>f7Dt%Ox=7gV1t(zYD*hnU(%bLCQh(Dd@v7xjG* zIJvy_e!wx5C;MH!%`jEnnUl(mxYaz1t0+>||GMYZ7gDhTs*U6c&*RbXoR?IFU&|wE zV&DDAnZ|tjx!gS>r96;xn2pMcod@~vS6+FgNlnT*PSO$N8rENA8jLU);lBBm2@J0z z_eYZdEj-A2?Psl-qegclh=LpDhdB|Mond<)4WfMxn>v2eo3u8U(;#t$ij1u_JqATH zcS#7+vs53?Ex^~)+@8cT&EWz8QnR86xwl4@HT2z;NcE7$Q=$eQ8{5|fd!I`rcA{me zJO(Szx4|!3eZpxps?uNS7W?VkgoAD%anGBM^USM{k}=EkBDVyAk=zelaFnFfv_AQ` zqB3fjQf2*p!sT;b8`jrW*`d~9EBc^u(~M*(UZ7q6mE%+)H-OV@q9aeL%5I_KC8*tkB^@gUEW z51{BJ&8`n2d~;v~jaUwe58(u7#XrAvh5#9SG`UL3?;VwMbBQ=I7eHT@BP*AKu!n-{ zo&Y(iWg^8UB?2%0<>EKFH^X&L%*5MRtzI_%hyYGy?GVE&{Y1K@UOGe}%2o8hR_tY- z87pNeQA{eSjxv136z@Ywo}4JEwUkh4+Z<{EiEC}8cYOK>*0W%;+DWv5?jHSnO03Tk zzSVSCxT|#zPseZQ-{_hyZoPk8Sh;_hVFk+H z_%;5g#c)#<)y-3;4Bl=FdTd(&fdeD&e z1YmzvdKum+)&3@!D5AjC0Eke(8L3x%g;IvBg_nFTWwQ9BP#$obeQ|1K#G^lfWdgEsDDm+vN#wqAyT5%*HsV<5cpgM*&m)#aR zZLaut-^q!G*`aGP|E0b=9yZL+nuu4jn}|YghmS$T6h{>sRHBAUu5VFn!mHU(l^c z!VR)KtfpgG$g>V3E7Tk<;sL*(ByQP~1Y=P|1mU#6K2Dkn z-VYC3s7H__O^R9fTK1M|PPLXqgi*d)#+`Wc!L8eE)#+O?rRf zvsKvow11vFHc~_h=yLqIRcZ~;G(~=Gxf8~36)x-K%>2E(WkV9m zIOrB4gu;)s)g_pmJkmmf_lA^18!8E|Fi}g3IB2=G7=_lo%!5|LCvvmU!dcUTiWaTy zFIj&R-(+RN(2uI`FvgyypFCY5+DJ*AN3>@+LWW}Q6kgQkIq9_YK;|eul3TQP=0Gy) zev)ZA7E7?s!j1JPscXptP;`oQ&kP+9Kit$|K3pvsCAaV-VI1SU$c7{pk>&X9UbNHe zau;(g)1H>bxwV#bFn_RRTV}9VlI85okdmF_NyY*-$n&+Xh(w$s2UMVB!eO&*)`{H3 zIXv?!aN6;RC?s;g+CJs;a>R4e{fMdh-XS%J-9^B0zMK^}z47Rma|Jn!JJ!Lg$Hz7y z$8Uu9Y3jc2)N@gX8RDWKe(Lw~CuE9o9@?rhd$r?3JkG(}h;SFhl6jZU4GwUq=TZ(Q z<@8+qKR+fb#2iK?^yAE^!Z1;Agw)Bw=oT4hGxYOPBsdU2HJUh@YKpZr;c8J@jV+m( z(Q$qL^SN7i>ztvP+;GKY<1#1}K?E8?2puvqhiPUJ;jK|=TJ%P0TEa?SHS2RN>aU{P zi-)_YkCJLR{Y4d7)}Uum6KJ zh;zkq^%;9>shGGMG^I{)Mu)~o5jobw^ry2=Nj=AMX{YD}nmi@y zbnZmHKx$}fAN_^?t%z>)37jr{-E%xi)ctT zDn2vUt(`=Aw%pFSRtTu;1|LOLEKRSH=LYFfU8~_6sFhI+mc0$iIo>T3m12PbN|6!; zkJ8HiP=)G0&O-u8K$&@;CvqlN1znEc(5jw3eMb98_|;q?Ou!UDT+nCC6AGwoxhz94Jm<+a$fRd7R%Wtz}NmU zf2_VRxo3`$d8yGzch*{>oNGC+)DYF)*^PZz5tF7E7OCwGpvZ9`6 z3J$s;P|HQL$r?WTGsm2ZA`vpO&ZwAV4Sjhn>6VZS`cnx`vI7gxLr@P=5Y-ZLF?Nvm zQY0||HE0mF71dKDsqE}ZE=QFnxHX+3gKKT5EC;Tcjdto=fTwZ^+yj89IT&bB?cocL z^xxaoD$i6bS^s?XdMY(U-NfqP`C8_(%U28!5;bX!hm*y3*nsSKb#nZpZWZl(;yX(X zKh5jj$n_IhY1*gx_4^i5xPHr6SsxZV!rRh99pG@t!)&Pp0Ldu3p47OKHY65=O=0S3SY4?o$RkuT)s>&Fg>pT`l;N?|TrTzG@QRkC+;m7Q<=}xKZVa;7VTcX3C-Tjbz_A zB!5HN57O@9=Q+?BmR`P^?i}tFng0YW=|O@2t%?)C*t-K{xc*+0fiz@wumI zc6d7Ka{BS*>oqdzyJk-`KGRok+_6yj9~;RYt9f z_|a=cTR(2dBo)d)m&iX5{58{f`bR|~oQCd5z7lbkK%m=YQ0xp1il)hjkF2Bkm*x;& z>|Beox5{YP1;4L_bXI5cB`_!Tw<;aC^PlR2)M@YadUai!!L1HupHNy0~PuJ=B9EvXUy zqXxAiVCJ^wc-9xbl=FAIEB9zSVtz{18ydi42y%`h3+Sk|9N{v1L2FTeiE&9K%acD) z`CDRBTJUthp0>BQ+`4x>hER+v(T$yxp%u--_nHX!nU@2;c*!h#SSohSVIVTrI zF~@H=69q=|j)v%QpqkEJx5UjM$C=-H*2#-iI`Fw?u7t3Z;}4bP#-RoMtzy6W-sbq@ z?Izz8P)!QJJepKa0<>W$G7Bb1qXK`YEoV)02pt*mgg^7e1C0eZyZ`^oJ+o?cB zaD-h8`L%T3>Jdt3=8`~=t-v`sV=|bvTyi8*kHlFNSIQ1HE3|7BsC#o9|JS7!=U@@8KAGQV>?>N#XHP$GDKCg5BC+9T4eSw}a_NOhT6$$_XZ zlI}$z3PVuf0k)om!lf6ZNYKYOuO=ETsDP#<-}T0fKR6gT`l)(LmC&NXbd&vBQx~l~ zb>iJ-X{`h>FRc1PX&UA1KpsypvG`)RWBilx=`N8t|m?d9L65NursvBGGpQf#6O;>}E+yEs=FS4i^t!${Re_Ef zie$B2C8~fPr29KHOoYmfXZ}!2Dq9U6AIWp*VyQdX8=p_7yR2whedtP=c6~JSl^azH zMG5RE;+i=k<#V6QU#4&SRKNSeXcbMpDA&zI{<7EDrVAC$Du4dFbx^?%TP_&e4^My}Uf<1Zd2$w--Jw zUNLcUI^>&^k;^vbF6U_(U`&17A{tbu*OX_Df{>n+qV$7r z{Qd{-I?R|$6;wmLsLD}P;BX8f4f1xW>Ztx(Gr^T>roBJt+H_r5O_IP^&9XlfO+!+* z>=4MnsC_&&iK`qQS^;~~5YBS+)2_&vj}hfoipYtK<*#V=E^$D0P0H!%{e<0evU2}l z<++QLz{t(TE+2YBFnLOu0VG>D8z{BoIkN|@V|A1#p-O={z$Rtj%eNyCGBKEz(9xYl z1-yNK$hfP+Nxup*K(sAG_K;VYG%>PkiB$6`SJ85}Sc8*C>(bHjAoTLc)XuG0>vw%a zz$=$SYRBnP z#m6~<+K#~yB$bcrTDQ;){2|QA${EcHv9;7rs9$(iyPDec5bJM?aFS-1=YC9a$GRru z^p`)n3MnnGl&@wzDjBdx(cgijgYi#tOUcwRi$uZFZj6qNiPN_CkBg%$IPzv)$i*sv zzZD$`a6nd24u-UmA~C88I!Jh#M2+1N8;tIQgRtZq!*f@v+_QjKYX-aB^L&q_g4l{y zF8y6O!MO{9sWmqoD#mwmF66gpVKA(l`I;d}POz668HY(eMnzYBaDQPfDFDC?u5kIy zlR6ezg+@Z~m0L|x6LD^Z<=U5Wq%TI1vpI&(_wdO8X!xh8~QPp5?4odF={bf^^EBrp>bz@M+oMDsaB-n*6B1hb< zu%=^grQWzIL)}bSV1r_}_}I&Oxd(9CH)4}{XgI+ktqr6qm2bNH-@ zVT+F`-W={S6~~oux4}F9;3N!zb2@eB?$A0;#7HJxUGY7NW9y^-hH*wDuxVSyZyeZZ z1Ffu9Tu4c2o_H4~o|9CqkQ^GNug(%USRP2vEmD&ZP$-2B;+~Zn%;7>CMLhg{lbEb# zbI`5e-+Eu-J4YtTLva}Gnrc`SNy>F$B@3FRVqTnefafu#et7ZAoZ`yR=5crtdsg=={+JEL0FY*cN}ExJy{&K#C| zsxFkd^>D(i@@U}cmSyye|B)RhHjMAk!MxGp{iFQ2A&hmUt4?RD<4>BN%Z}wI7PRknXOxn|`C%^>oKQ*bf7|bTOHJg<;M|6dB=h~)2tcRY%LwHPE&T4dLZ=%jIjYx3;Mr1tPE@O(^lRzYr#j$Wpf)hQXZ%(S=6rt zNOEtL4XG%QzBsG)R-zBIS^HUVeU-^@zl^LwX?~p(t0MT3aB$|nO%3A)56p#8Lbb`C zykWmZRn^lA2)0zNDS}9!5J)RcZq~)%^i2*$KOS@za^i^GkM*?LWYA#OLy2rPZwC z4A7e%eI|)R&Au>2_Nun%?BgC-5e!PbgQgCe9vyTnlh9eWe7IMVbMRDnq4;=`9GDIw zBMkmYo9VS%`9h^b_3&SK(+;|a$)u(e7p2UQ6g@cWCfyPEJ@Eqw#$q>;pp?zh9A%`} zasD*h6S@i!8}y)1Ouzru4@IPWG`pfNUAuAP^ZByn!e9``JE;9O|HUJuBjl_=K9%T> zSYv+omXeP&UiWbeA*_xn=0iTAzi%YIo}z@sueH0~Z2NRKSGg`@!%p8_Be{v3x6 z?WC`7)=;CDZ&n9g$45h9q}Iv=LH0LE zt;)QI*ZC3XqO+1ORZ#~0D(K1tN61RhR|Ap!pqW%lC=S}Rl_dJ6S&7gTVM(I<{{W#hSxVSd`mnc<%qE4OjW_b!q*22P8)Yu&xAvwZ26)N@iRi~XL z1=?G8Fv7;liBU8pY2$F40x~#B@=0Vs))UgHjFkAT-IP9%w=kt|i@QNwmG?dc2vzq3 zWV*ghRhft&{_&@4;c(Kt23fyDvBEr=O>TyjD6HT@Nz6eeqM6O&qpsRh%uK*h%m7Vn zC?8UCuY4I+GkQtw>_d?IxyDIEFyCxsiHVXRy*?9QM1{3{*>_!0*ey=-;yyJ6Yp4Bc zR;he`sVX@jS(Yt?jmltc=cEb~Uwcl&g1T&HVa`OeD0sofK{DUv8IG;lA6gI7`dW9{_k#UGY?-j!0M#Is<&uRIBf~uKpx0Bi+A4F>r z%vu}1FFaOk*+i3e-Wn67Vus$$PQamZo&bM508v8I`{lQBvAtV|FMZE!KFis_?Pm&0+rv_iBDm^C?JC^EnztVqt`N+qnRrw_fmIG2i9lEp^p#rE6cFkkw1} zlr~h@)N&+lAV1JFlV&C*kMRB0l!TjXd#gyamU}ZotjG*AT?!uL5c^@EbIjJy^GH^E z+m+wWJZe)9heUPrduKyCN;uNUI+fC(HDCVi<+pQLsQuXaNmU9jV^8OeoHpPLEL7oJ z@Qwr*9vN?_eHZ1)f09#z?Uo(-z(qQetMC??Kb1%fyL0LOkUW7n%lTNb6v($e zXgNW%8>)p=@=?V#ogSx=4jjKxLVlR!R4d7)ETdWJEkt|EiU6r}lr}6b?w?7qwu{Nm zbD_AbX))pJU(MA}3?-VX-Mj4>8#+%DZH50%T{KAZZsb#7weNuMZ*@OWmI{*091>a3t~Iy5S{8tN9$TQgK8lw@FDQ`Kw&-Ip{`fDu$9A_QVe_zo}&#LaIfnRTkh~~7bGkHP{*B-E;9Xkpc*V$* zsJ^(>Kl5A1{~rFPUTZLg0D#1E0WrEr{#9( zo{~qHg8=rb@*iLVRFBEhVB zRFnMGs*+?rH)YpOW>hB4jq1(yJ`XDEFSOg1@oDiU(sZv$h~#|Gq0`>`k&6eb8kj}z%uZu zaL1MXLl$D5|I%aKqBVy_@wcE!nI6!k^JQ4H+of<|Z*HQJ>#IbiTVCE21w~Vis)REB z7P7(hb+dUgKXONB0!1b>r{z1y?K^2Q(8Eim`P{ZZT%5{Np#WT&&!Hc-P|~hl?`odc zA{y&Y?8+tSCod;0$w6RfGpJ9LtJ>${`EF1-R(~1%?nhpglnG5>yF&aB&}LEa_@jt( zDt9fHM8Ru)9WZo$KuT6wBtl0>GI=Ng3}i{>oLDq52ECH<7g5T8uv=rSj_U1#t(!rv zGMJv0>!mmbeQvDL!Rt2sN4RHrZso4L_?2dafX15EZ$kLBghHgrsQ5U1u;eczX~)UK zBn{hc7QvShJRz7?QSmj2+Wy>V6>!DX$*9 zaQ1OEq6cLOb1HbJbSYFDSApK6Gy=(Rd7=*@1Vl8XuEZ??wk8LO<|wLC%uvUzC6X~% zmZTaPd07f46lYZdb=3JHP{KP~!z3Gf866f(O^u8za4jwFjsfmHs7x>$4I*^rp}+N12uX{U z#R~xhCiRJH58S-*V(#3XejVe<(fHr}yucz?I%Lc=#{w6k0%2I;dI!w$@&`H6YWGxc zKOMiVm#%`|fh6l5;KHtgAPTlvHC!o%4$InUrllJl<Ls&A04B6urNav2F#`3Wnr8BB4`Y7I+$r@3p;?;wd>xx8F0 z?!QQxr9olIMk`>af<>G>> z;r+z~K$@E)77S>=0I0|xpwc$o=?(0j_{VdvW(`TJ>DLcIt*N6{I*Gk_NGa% zgtYY#Ri8CPBJ66fS)8RDs(8AyZC;q^n`FWcX5dYTW-Qx*Mr;wTT_-}RZTJqndK|# zXr$@OO3A?~7Uq549iHYsV7b#GBmWGT-+#u@tuLw$p?>c4cgx>5HIUbctclXY4(OYZ z`*qkIzwKF$)N>@~fI_7|%M~XYNTPL}t3yi_&UpM^y(WdkCccirw%L-XeYcpnRU*r+ zn>Wf=LRkd83cBayEXBDH)+CGt%okFsO(&!$ku~hL&T4;X05}X+pC6w*N&8q)EbkmD z>vP&C>a_!ZrstualK^OHu64rj5)f1^Dp~IK%4AmGLzObld7yQp7?#?G2QXaF8dr%^ z5%WY1%`KG@XzlFszqG*h7A~R4*m5y^DI!a^@-&6(3s1}<*;*Pz&{9P-wKKylnttX) zT-WE=o~WgVdOhbb)wTWgrJJdfs@a=w>xcP)&@n(1D0ZC$!`AtRY-25oQdnE<0c?{!jTdlWHU2{I$ z>~xM#vy%oFnCc&u`wzoPeIN7&d*yPSWS#a$FXd$)U*ovXWY;ARu!_Jc|*c8BN(o&f(Nu1xghw*rQmG-+jDIJH{G= zn55pJQd*i9lJulneByX_l7jwDfrszv@5|d0pS?`3jVxkD5oTXIP$~heBVbed(oDAm zQE#BKZb>Za>WX%w1kK~zpj(JS2HbD{GfBfpE<^mmG`$7`bnUj(sC`}ewSp9e# z=q%*dF1O4rwZ8$1&c;o=RKcvkPvUOJJkG7$8?{h- z_%N7F{^Ojj)}^Z3`x7Mber6)U;htaycYiq1E=mPslbIPUGm)9{^;B$E3MRWGgY_%5nA zlPxdK8wNYDc|aHVicGtBV%}91NLN;uzGw$Q*%Ei60>J9*%39LAxF`M;FKoAV3}0~h zj(a5CweBu6uuU!0;SZ4s48OFYo{|+Uf+5myl|U+jhUY5X6ID^2+`Y|igaA|%063>+ z@buB6q)!f|V(_b6`8;35AEBay_;uEQo&hPeMVW+_%xJO3p((%IN4IqlS869%@*DUMX zd7{^@UDNZdD$0{_Sok?418%M6iitOKL$wVcbbNMrZX+veJ~$HI!%8^gP~jJ~@P3%< zKHZPn1`;|c&(csOXG_SqkfJs+ln7#fubalt>hcbjR&$@H(Yt{G=Tg_~-)7Bsn>um- zpfneiyoJB!+X?xRN0o~ozvX;o;5rjP1|%kDWsbbuAa00SYm%S)oB=%JH%cQGA&2+_ zk^VhRcz0Ovs_0P}`kvbC>96B|sjET!h}La6mq*uK^OZ%+^vhKOpTnzaroI3LhX3sU z{c(A$18$mx!cJ1uqiGxqly%j}96GWzg%JfnTGACgg0)5%Vf4Uy^r{|bND3b6`0zFf zpRaGKov_$o?mdhK1GDy!41G~c=eE{HxwSlcv=HAMdLg?0985kZI*mGo`=n?DfRtU2 zG;5lit)hRL9qgi===9LV3ts-5H}L}-cN`$s&451hncs*?@xYdh-|srN8oy5XEb*;+ zSa!uXQm8?#5c`9m#z`gD(7#iGSq*mY{=z~|JVXDb%PN?ssSM7`FTOu06&5Q=XQ6i~ zq*CIO3ibyLu)ElD2&p|!8)|JoR}+b$EtFov2zo)ccb+;7AqwXKHv(Simrl1JIbH1J z486%y+bTx^XBJnOH&_5a2@PQ}YPj}GnU%$)U_et3H)+I>M@me~qk_ZH&Rl-~@-_XW zT6Rg&`xVsSb*iqz0+PBnTjN2MZlwVRJ=V25ko%Df4=kW3S-UwTK`l2NDlzru`*DA} zswUqdo39xL;}0;9M1oIWNis)jwZF>OP=1HA{gvj zu~sROe1}T7Ii5X+GqDC5Gbs+$l`meM@46>>J$pA#Ou`XZ&HQXSfPU>hkLedg*Tz zC(+LF4h0HxshyBkf_GFn1O33qNd(GVo_0>{6zFTDq2}+crASnoOt51}MxLcSkyD?@ z9NA)dmbc-Ho+3-s5Xb%c3xSA;L$eh!cuufOqn6wk%kOg7^-Ylzu;d>cl%@_E>FJ-l z*@iA_nam2!Yk>|@zMID`w~)ebx$>z9sqs?0I>XvFd^Z5swQVB=Xg<2r;tv!&0J~cr zlg{3HQrs;e!nd+1bE!NG2c95)~rK53*`MtVV0Xf_hI|2E<=XZ>A7{TVcadnkiJZ zw?aE&2O2LIptULX92m}}ay}yAZM|r<3XY|iB)pqcPaP}4c=O((YM35(8gIoMP%5~q zsIkyju4!@$1{I9o9REZ!p6OS~JRl4WG>+=1Wf74qIkV7j6+Te9j!shJ7twREkSXoj zy9Py4akvN@qhc~i#`o`PD);UsA>hQ;`-pRguGV^a{{9>PlB**bE-rm4%&ccW8XF8Y zeXZ8Xz|4KoX=NEX9vi>q`?2%8ALZK?$8Y716OoAo{iKyO)B^Jem}OBnEfL^Tkq!K7 z*NZ^`tZ@GXiA`d8vqtPhGF?!qS1+B{1v3fYB`+myE!;$ZfI_CBb%v}lHyZ&b)v^|X zG^I1EcX=VW3DjBVN`-m729WZod^t_cik+251vH zN>`jtS0UP4=#qX_UqOQ?0}PktNlpC=pK9&N06qLXj~fG7*`3nt^xXkv)pG+U`KY*v zsP2+?37xB0ZC3?Hv(dxn(?bH^3H`W`a|C z%OU*FzmimeX@CgLk?wSb{|emDwU;aBvj2E{P@S$=G$x~+3`AjZ$b|Bb#EE9j%$UX^ zST>q8STM_QK8$S9sQ%Ov*X&O!^>hVt@|Q@~5Rq=+{ZWO^Bv_EM`zE77|5QtKe*zuJ z@ouROoujV3njjP-guebHeloe&;`F=^T)cO*iMdl9J5H|uNm9dp7|X+rDgw|~PFjVh z!Ip%UF$=RQN?9D7Sin6bE|SNKTSV7SN4Z#)EL6jDc$QnOGIIE-| zq&$n_=L8hNA2d)qi!N6tVclH8e*%2*&%_*_j^9k;!UqZ}Bmc2KMEF?w+#SD_r@A8S zUH*NmzfA^0UN4ozQFam(mOH8>N<1L>FZG_mLdmc0;vyWCZblVOY@U1p3G2r}@Q?EP z5SDkDyKrsHQH!lK1ANFi9yE!{ke@#s?iCF9VEj>z`xT4`pZOJRj&en6wrJm)dk%4j zTcx2T;f_>aN2fA8d(zv^<<3NXGbC-vy}QkpT$havEG+5s_PrU=^1Y!^sk{AcTaed3 zD^h#u`Fx1>qxl=*c>U(mVD`IqZqMc_*@)!1REkDMrMM3`7Icv#Oto|nBTFKOwtH5| z9_xK-9$iWHR=FDdh`CRHIs#FwYdyq)qj?hwHb48PtR{pg)q9}cNv}aX0tM!B2EhAC zaSAEIf2^p`4J?f$7efZD`P`OP# z%3gHKpP-_|vS;)ZMejL$t%w+!gLL!wqjN8)dL3Y!m#=?1iFpo3PHyHj_ikd4;Mt7k zp78ra4>Uqe(hjVC-LbjP-pr}e?;!coS}jW<&YTStqc1TpP3jzVDc^i+{BiMzNyfdU zKI~Fkp*wKHZOvdFSr7F*(sP;98C9bp>)a1a)N&Uum!?a85(HH(aU$7kJdrn%WaoE|E??lI}CDL20S9{*Uw$7~u>+EFDr=Q6W zP=zeJ*Hk~M-9Lp0S{@k6uOY4Z4PvlPa>73O+oYcP11t<3B8OAuBKFioHotQ5KU$5r zLyTta$fAI$`o+FBjROl1hNe6ltIqUl1gJCcZPsZ|*C`^XSRl@x zR)wMab>j|}S8`ysM6{ z?eL6o;WZDcZMXVaOQVxK8E$hwC8MESW{F%_Z|T+W zdGivKoiY^CBVqR&r-1%a$^->IbFF>qnV6otTLpsx6jl*2xKMd_5GbuT7#WL_pVouE z!F_hlM9x>3H;Am`{gOrir5MZB( zjC^FOZ^tr*e#!4I&wn{by@kBb$4;A@ePx6{pk>Jj|EO!yM?&Pc)DkSolADY^nEw#u z^5PeH3NT;hpsf+->r~W^`~F)ECsC)La^i`jj*cxGv1fBWS;iN+V-yr*>@(QwI;Ut= zIElIeL|3t#auRGG%2^&Ah=``qqfn|qOWWraplN!S?o}j+tfZ`~WJ=v1c6NR#Uu4iw zi1?%Kj<(z4Qtf;IRpq=_pA?YuWMFN8LN)oPl~IXJc(Xb$<+v)+jY6jF7=JuzUHSF$ z%O3)osAq|asjOQq7N=6QFNMQZDn5=DYIAB!au>im2JaTtN5l7f)d zRJz3vtQko%J;VW<)89-)rOjK!!e!0?WFXSQ0?+a59**MVAp2~OS zR)RJ?F5$|rDzz#iS&;!{zvncr-gSh5wi~Zyat^M(3vcC&W_emG{j=FTM<5fhrW%-` z#CbKuOV?i2gD)4TWSV+;tuQ$d%aR|=q-b=7V`66dHoaVPK@sNH(L%6AUnJ5U9h5(j zl?>Ebi^6A`B}(dP=3RBy?F_O+LL0N8frb{nPv(@PZ|z|Pah8NavFBqKJLv zuB{RLCy$Q&)D{JQIM`ESU-5MT*x_rM;iYql`7%sDw&kC%Wvez{W!U9-4(N`&95VhL|{c^;2S{68h%roHqjXg}Oq^x#sH|n^Zk3aBxT>&SadLjR( zr@HGl^x$!(8qAH7XNM|1QBz(fTAwR=h;diuKaU?PwB5Y;0{o=%E-6&dy7-hQ2)ko#CC4UoznQ<1K2#gmsU~XcO|%iz-y2p*Mr9JY>sF@6qC5M<@6LXu zojB!LL@KOvqF7)2`119_uM5h> zi{5jqr4*{(EiljD=Ht^iJy=g}jZAU#ggfP+L&vuQ@}-xr=QBv&ER{o;kdC$6|4`& zf6z0Jc=|b)co15^z|0Fs~URO3XRv4U=2@Pz5pa z;v<~7Oxi+v{phtK03b5kwU->6z78OzF6lnjXzbg?8~mS3qKS9nXil!PU%uVNRej<- z(tICWm;+3z1(R*VCHhzEHMrnP)}i&2Qk7FC|1p3<{nkxk1HXCx73kG=GaL@w_Fx*< z%ND@Y-0I#@g(waDvMO#AvZJm{gB9uh<2Uze;b=uUJk{0Pigt}dAX&ki3aY#wTI)h$kKu)z%VOo5!5*hw!rbm9H1E1o{%zB(valXLLS}aiJT44$&djwQ%?8N z$-c9VmZjC?)=rTnsx4na##-Rz=bZ_O97Y9nj!xk5S}R0e2aQ9xx9fZQ%#Hl|)?UL5 zhJ~T?mFF$sm>BckMy^*e($(cWc^EVIN1IH;gSwMy-G?e{&Ew+VFaA!mkQu9pVcC-^ zHOV8*TTFDC>`ZO-7C5X>lT{NzJpVid5ae5nnCc@}y?5#%CT~HK#LS@F+W?4Lts2;Z zy_-^!TjRDl@WJ{PBcT^+jy`0{^68zU<7#LS=2&>=NVe5+Kv%i5VAX@Sj6V*@5$(6N ztMQP@iET4Nj3bFMol~>|%&*_+_J$Q|!JnhzD0PM9N-_DX9m{2PeNodB1c<5ny|c2< zT%We~MvjqYA24V_3xmD74( zgeX!ZxQu8cdvZt$pSsaMw_c!1skubLy5qM^JBI*Zb4TDqQA2)x;OU~n?iD(lDGiFn z5ds4}Cvo|i8~X37&n7WrP}MB4@mi2%He8+q>SAgwGJW!mw}h|C=|i- zHU5#&9}l++Q->I`l!*(V4Rp@_`$^v>OBX!Pw`}ggkMw4INQHzW{xS*3V^rwcb)QTs zvK{wZ-uz;BXIN|#EwRDpQ(@e7d)l5cqSU-c@Z=s(IVnVQ&?Qm+p?((@on@BAiwkLD zIq8akFVtKliOuwH5tuVsL&NG>tv~W>XC@cN{pmYk`^UD0)JUYUC)l{0*1A)m4s&PG zONU2PwA4UsNYB|i&wf$=Bn;bXsGq1upVByzKxdC@%1e*AwnbY|yhY|oRvh5pxXd)z z0&v-q)9kiT{}sdo&1>feB_xw_6kgT|LA0hyxN~XBv-f5+oa!8!7jyb5mZGQWZs*Nn zKNe=&@ihd&Mp3?;_<9>Xi^Cvbyp8iGd z$?W640aVwY`I=8K8W*pd{pM7Zu0!xbMe4JC9 zgzUOr8zpyhSj5(T)suqw@#MDJ6!ZWJkd#WzR!*YxW-X%dkmV6zif6BYmD$ePZ0_T6 z|FqcN6slpXG=d;+RQ%7Euj@RmgNshqzOp=<6x6sIAysPyEackPbY9QCVj}ErSEnQC z$2Kzb)oE-S&ky*THl9M!xl~e}S|k?913XmY(B$L0daiq9%Dnuq60W(0cZS6!qaAq7 zyF==84(ifbn$`4ejduAr7r)B&D~z73aUDs(jmO8MH?58gdD&Puew$@2lVjwUqsbG@ z&0Sp}SR29QKfP~-Ow~Uwzo+-2Rgl$o`8}gB=q); zp>Md0B{=m?tg3h?EDBJ&x1-^$(X835vdM8F7NGc*KhcV50kDxy8`sb$DNJg0enj$8 z|0pYe>1mJ;CBc@q?Z#Z#U zpA3~Mr_t3gB>4eq586uv#C&8q#y%QUk_PT%-1h42orBWsq$$5^Kv}gDGt2o$Ve>00 zeGjaUF_~sV#N#f0sn^5xDwv2$Zm;=zv3pR+CDYai%@eT^t!M-aO;VX@7~rYVQK?EB zQM{LW1!Y+rL_@7v19GN7qLjJRHQx|Ow4tDIec_!<+mMPQY!>1;_v6-R{F9`50*ho? zX(4z+$`;z`@0ccg72+Nl=;c4#JpV|4zW(eBb$FD0$Q`?lc(EiVCnZmX_P&B$=^cZu zJ*Nq5c5P_Orz>}o{zN+@@1t->#Ap96KH{og_4-P^kRvT$N~p1-j)G+5r?d;HODPFD zibiu7ftsHrRHaSvYx>9M`YBT(*K+ZX7r)kbU@pxXpxHnI zHIEKajx0{JN) zojNozOTd)JRi>dtcr-o`Cuiy3(=q@9l(-_=%5Bk1unQ8Nt>9;F8=Vd7a+$MU|#K9&Q%vU0&2%p3sa$UxpFM_mJbv=Gjcobyl=RFFvbh0kBxE;nr?$ zT*@-5Wr00#&%S&^Gr~MYJlOQtk)s|FuGl<^i+}XJDCBo{w@f>FX^tdgV;=5KRaXq9|Qo-}I43v6FjvQqv>`qc8i-ExMBOSqoH$*rD zWs3*EG+Mio&A{|euV@injDSjIfxxVHVCVVkSQz=X*)XcO;`qQd84?lqphl6!dGOrL z!2A;q69_0b(-9rVQ$cOjZMlnkLgV9v2pVGTdJo8}1V}0KDMuL4NIcRNNCPjb7%1;o zUjb=N#W@YONqi{}8}U(=Fh1q(Ny&Yjkthr~h5;LOlR+^h%XiXkWE+zut%0dV70wiu zmgS0grozUSaGyt?eyok?Oy4!HXm?|hqKnrMUr){tk5c*OY$2Q1Q8uSV7y6G$zR`#` zo<0&4%&?*I_xij}h@_=-L^lkz<_RZxRbzT>^#QB?NOea-Tr->{NyO$Sjm0n-cH$hK5L$_}d8#+W>l}_6~*fBA$EZr|f3(ct|Vv8vUloG#Rnbu@9C`woC z|Iy1yfKtsbzby(|MuE_N;AIXP9v(bV$->;O+k>G|zO19wa=!V3zA(G8vb3V%q%d$w z;UGJ~>4c#b?2?8HfqU&y4Wvd?a;Q#|5rH7+w>g);(xwrG^xec2rGGBAjDczJ6~K|o zP!`-RBAI&ZcvPArYHFz!Q_VRw0G8O|C}JH=hVK9F$4LrN@dPRS%{&r$LmaJ zOOMofkUi1t46t~CY*&6rGw)_+=GQb4;0w+(UtH5{!NOo+mlA^BQ^v~S2nd-bJj1>r zLam&P)=gsveLFrVsF24CgP>lLPV@fCtfm9Wgd=vwX^^=uP^0~~$0y@An=chnYC7Ox zb!7O0_^OVK0zk};%W9c3I&IjPE`9@?dO0XrIVV}i>hqIs5<5iYzDo!W|&4*qGjzHtJ25OS>16WNnxTS-64WS30-O^lY9bESla2CRs{w>M$uh6)_VbE%G9S?l`$lG%Jph zX-uWl%6Y6b*aQhHj=#WdT3Kd{0isVj3D!l&=7w$sRy|=IBG7h24uG2H(cP1GMAPhT zkKfWyT(hMYQChizqHc#mC)d2*(^T=+t<)FrOO~U)M^UHZf}Q=ks5s_YUsHFLm;e}* zE}iUZW@Ph|A}cmZTI}@NGp}lAsf?97i;%`>sVk#aQBDX5myeA6Jnr=;pS-4DDViqz zJkJv3PODQ)1xf9KEox$Udd<}R+d3YWuErRz`6)~vpA^xsb1NdR4^ut0?py% z$hr-cUP22}H84#US0P0ms%{5(H1vFJvanydG8gm-7v}pHALNVY#+5R~-4m+bZ|NS} z<49Jk*{)RYvTL$Mx-62sGJ7MOGs%!>Yc2l=)S{OwV^EVDf26o?9X9<%d}*Cr$eNIrSZeR*U)6hk~bCZio0?pUY2fQ#>=@KRDPymC(RkXkL*|;5MjQ$IJA0(t$AVnWg;Y+Vjaf zGqE^BWH^I-{z^~O8*%sKF3gOa5e76TcoR@U{4;VhY!n2IxK9Ay#vj}3rDt{6(1EjF zb=cF=5>(rcfIzWt5{^(v4x1vr6c`nzt6QD#pN^7!d`O;9l8|(ZvOmV(&4tSspzq-8 zOr`GOl5JJEPxCwi8WyF=F{7XJ_iinD2R~Av+Qf^pVhM({X;?aiV9)b4pd1})LNW0W z6!a~J^*xWpOi78-SSXGMMT@|b=_RbKy?*U8e&xG(+wC=>TFPq}MP*{5=5QEwStO z^J86^%>Nkl%WF-jxAfk-4bidAYfI*BQ|KZo?bNE+%TXOU95wx~Yvrg^z=M4Zs2~1G z!lQozkJ8U-0m3@QZxoe*RB0;WG-ipGblk6Y1DIa;XeLG#p)z>6cF_?uL|&fXDso&m zNN{<#|J0Ot(OTCWm!rT`)z~LTd;drL>EiG309+~m>`~(rXAT+zp&PMOrBU|=n|Zk} zXoDhiBzKi+tirr&i@?&fa)GyxhU1?ERS+-wp8l+$x#^h?h%x6Z(SmX$=!rN(-O|%; z1$*|pZ~#r_B`5geSZumlxHnCN#{o@s@uQZU5>1DOty9gvx5!4SqPlY~MRR=DW( zC`GJfxXq%!sfZ1yz=rDYrL61&zGoNnDhb|DqO`NoqAv&^*@4MaF{M!Ga)+3@y zkl@?TVA2-5kc(38@$vu3;1_>Kpu)}0CdZTGpAUAD7^{R(k&sGZgmRphC$Xb~*5zL2 z7;9OI7{jKOwSR!I+_F#jLaPY5Hc4+&B_(_vB4|Np%iu&E<(#xBpf}rbQOG;h5YjCw zxdh2t5)Zd7G@omUNZ#dl^5F2c(snx+M&7WW0WSm=k}@#L`%kP>Z1(2F ze?uTd@qyuyYgkb#E0q?P%P{N6V^ba793Iai9Td#V&`*g78S2_l_5LZSzUrOHs?I=@4%0)Ydq zYZO8;(t7k)!twpq-kNY@;I1D%eI~yu$6QP4Sj8#QbNyyi_bXm#?yoLX?hLzK_amep z4wvzJmNgUv8F_(JY~dD(V_pALo-M52^~bbo6usm(n)F`(Tu&}rOdKMgatGPpsP4u4 zZ@J1czhDgJ)K{UhW(8hJSjJ_#Urb&_U&uSm2;94R&5h5dEbG^hzyHB&c_X&ErcmdH zh0B*3W#ynn>4Qc3(nIL_c^3#w9wG>}U1p(^k)l}KjDVCtXaB3wEOrKe7eI576AwwLd~hoEnh1Xar7{Yt_mZ(Fbb z;JtO-LMb&6Ez(?Nu|m`I z2fC^tOPpwT29KN1j+CxV>&Tl;j-3ulNf{DA^g2~I9^RJ~ts%y_{=tzq8Xjid7OenN zJr~y_s!<6#GH$Ta-&0goJBousKjzoIlI&Tef}5Ph)DGOG=;yYz3S)kBvebIZBu8FO z>@!cBBp9s+KH^FEcKs}G>mn`*z3I-dnC~L@Ca+Lbyw-OC5*1+Pep!&r2VpBJB~v7n zJC#k#?+ULO4zymu6}~Hfrk-apEn(CUH+=}Yq9M+2HSh-MVkdwf%HTzyJl1@UXh@-- z03Av}h6Pa_GhBMq^Uu@gP73_-Vg6HKSt#0xdhF3NU(Ib;URqtt-I@+FNb#1)lDmHG zg(Q#2DMr1>(MUCg&|6ZI+NNhZsrDb$G>0UNqu-XFz~YK~NwAd;po$hE3d9fo_KSCN zcs!DrOU)NgsCPN1i;Ld+-qCq7i4EVwZm-Z$(^TUNZUKIN4~LI!O1fNYfB;)ownl_) z?5Hee@O6F~TykCk zS^)KmoHi+2-ttLQH1MppijZ`a){4F)BG+f1Eubo`=u5PV1G>YvCX5+IzD!O>0dLiu z&)pi>+WR^fO~;3q?$_Bbw4BvHGl|FnUE0}$*5S+bIg+shGm@_?&*cH`{QQG8hKN5e zgd-HA5lc9q#GO^KXVB0|CwE3$dEKMYi>=!H!Y%4{XytgF>vkUY3A^T>g;UB+T~VM( zkACfI*K;yJn}DO%wVnY0yyc1DB2vO~J_Tm9n6w0ai$o}~iwH};=}Ls^uDw}<@lWzk zM}y;nWK+GprR&k;1Rbx4ojlsAyhMl{B}h|!vR@l>o2XtnlH9HeQ~AF{fP8D2l$OZU)LGob#+y`|uw13I==%TPx7t^F7Rxz5=cE~qGN9wm*j zH0=EJY@Qg`x4`{WeaaC~Z^!TE14tF?f{q-P{@zhMK@a2CIF20N_ZT)Jul51e8E51KT4!3Ati*lR5Y6$S`*BZ5)yt!@e7 zBEpQg_^>vY0D$1Beq7vIheni*Ea}{cZAxQmy7yr>Wd*EuPpUz&y45$m8NjMhia^Io;gLvB#p1f{A%Ur|iP6Hp~M z95%TgboP>bTdH}Q+X@u63(SANtePlE$ihu7{XwbBcW^2sVNfakWc*WY|2&}l23~mP z`lEamnh-U>jTcg7uiPRGZ_>S@uluvt^FAL&Yy~p8YbYtd;FWGEXo2F(XM54{g-0H& zYGXe0lI)>*M2c!VOR^VjzO8q@4&8XeRcC}H4 zxG4B4dB6x(p*f16yMwA%D%&Ji09fNNeugr=PF&HR7&^l*CBWI z$kOqpRvt&v1!)GQSiqHvSg;axq67&HeC7nMeLAOiKrd@myRCn!$1X@BFGVnNa>iRR zm$QH#E;pSA#kpU$6lan_z4dN6<5s70*un^EhbfSWNKHBRa4xsnrH5LnHD-A6FO7#D z|Djtg%C&|=k(6T5H`)Q(H@OMx?_G^ugyTx6KbGNb4>7y>f{yLznNb<&s((n7M)F#ghu@O@Tzn6Tux_ zp5hCt8b#F(niFb>vucCHJ7!(kLq;Z+q}L6LcyzqXWu3NyEYr;^e@||EZMl3-dJcNPA*0_! znwPKpE=sRpW22z-Mr1v+oF|yGJ}*V2irDRUHP_{dy8%tmEzfBhC$8U@uNIv4fM+nAs;&L$ir-4oR=jGM_$u^^qQ)su#~yuMmp)U4A3CysHwk z8spl_&*_z%%V)m$nPiUVl$XVJsx=vp3o0#1jZ!ZY;E@%n@`@;4t5ez+h&X>&+_Kz1^HljAdn14t#*W5p#(p&g|p)@3|&XkHxC1e3oPJk|l^i9ln z!prk~K%AgS>TJ=y>&#Szx-Q_sVFktPA+1|@8?D-G6>|1UgsbjX)v}A9aC)wLsgVzU z_w786dsTA}oM%a1^c7I{`M{rC(-+_~TF5-iCy7j9#$qcR>+Nt_zi`7LAMi3ql_Jvd z++#DeSV3aLxA*jEmABa$?U#Og@!v0gme>DKMT-U#$RjN@NpH|Rs0Pu*-QQyanIz|> zB~pVhcl0>BmIAP7;AmO^X<6u26Q|>#UkdkaY-7>?;P$ax4ppgNLRdIBTv- zkO#Digu(rtIfMJ*&Vm>o3|cUXBQc}Zh_}#zbY-MPuxaIOTpvEE+duD)^wn1WrwSCj~#?CZH4TjhtcmrSEp_m;^o8YUq%eqk|rd{|zl(2mex) zMyEIt1=e&33L4eyj{5QuCif#A(Y4QuH2ZR45~OOewPzl^nor7?R@yhpyE6XrR9*VA z?)V}#(R-zKgSjemd9#a4L8p62bs|DXniZ334pO&DH6KRg;?dC9RBeaHbLaH%_-#V^ zNi8ZNK*ZVV0kNGjy{1~B)b*&djU(kd921c|9bErpTvdP;Mu9Zvdo0e zV$u*=Oat_=zO6Pl`TD?pw=#WGGFg*wxj01-G;y$`lz!5@x6_0Q^HCL0*Z4C7>shF( zN7l1ILBKil`akF_=;f>UcJdtNp(j_DXhz*yWl*xOr|D?Ch00$Jr|$9i?fSBclIL<2 zd2oGlyxqzARTVgbG<^KRv*xlE;RcTExU33n)qhth-YcaW-!kjX!pc;)QmtpJaBNZyJ z$`H?yQmMWLlBmu0D|x}x;40cFSt$a|IFuv9MBXBwBDb&j;X>DH4%b|_ zy0letQqhib@Mo?+dow9qUjFiK%yJX-1Px-jWyQ)-w5Ung#2pDVb$^u;OFmz&?+$6l z5%SZF*xuU<`3`xqZIt*zjz5SVcUXTXzS|TUh@g|@r;Z(*VBMpg@J3RuKdHR4x~z+J zyOV=swYL=H^{y*?%yjMA!7A008zo;|h+Ra|A*p0I*jPa>ato3KtzLygzp%|%iw6!R ztXjg5ByAYeZuh+M?9`Xw|KsXyzazh@JKw(`FYmoC?kgHI-3@m09%GY|RC?%^)Iz21 z#@GgzN+qfEP$jBVQcF&=0OJc8h#is%fshID#7yj&VHVI}j4{qyDfe1=Dg6ulCHM2) z=etjdaaQP3)$ie)efHUBKYaJzu_>ZOmT3)`giw4)vVtGbl(d!e_i>aS7L{;vPMy32 z;Wa0PiKXd!T>7&#&u%fq!|3dMJu))H_9pdg!&jqbVr_ZT3-+e!A)#ar4u_MUnPy#1 z@bEp|d^SqSpspp~DN-V;{ipw=-SFs{coQa9>MJ8OD02}lLqk`i;$sF+#-vlC>Dc+Eqw*riwoX|R+167Wku>Vs3V4CB~|UC za<*hLL=G8Y;i**~g%~ZF`R$NyO@U?qkKBm@ZfS32lo#%XHKezhTO0(7b$e9BF1^#Zj16h3OFa8rzc}$LQJS zb~u1nF!w~2dqRA=Q%h!O90I}@q_W^40>_RjC6Z9Z;pHFThj<7pWZ*qm>69wFIQVs) z@|lSOkp_ytq|KeoxP8RS8x3hvT0ylH1K5fX}U`6Ce$onaW1UJ?OS5iUkFc?YjW z8eEDXXovVA&7HHOJdr!c>21HyIo}J<1njjKI1Ow*I4JmAURs>eq7D^9P1grFi7a8( zO0Jv=4f&gXJ6yPAo~7Je85|hU!94nBHO-Y+PWhI4M1>LB3n9 z$EKD3f2vq$@YNt3Hmg^BRep4Yr{P5ZDbmW$&~xKRH^t;3;$$Aj$xLyQ@;iP9yUc<7 zBk0o)rfCf*jmP0-X^H}i_35N>%DdW+}Kb>x2L_U9`K-Ha>9xo`j3hC^d_ZD>i4J9 z@qVY|<->?{%JwW!%Epm&L*w)0H(Ck$jbK_#Pi#vZeg;+py|2|;Wf=PE&4VyO?loIh zf~4DGo|-unQ}whSunSACUYtU{8`-1_G1GS83g3gl5?s%iH>+QHSZiia|sMB7D`Q>S8Kgg3W2y%@1iPWDkG&~!uR-p{0tfzq@cKX zyF@azsq%o3TOK6CFTGr=#Y|&ih=upXfylG>r^l-1dw(hNvV;#2@u|F*#+KcS_74!- z;9N5C4QeX^m#doW48qRWNc%zG2{+T_n+3(cw6W+tK2lpzDOR4?GNM}Vr*lmh#{qFR z|0_Nf97ql_o?bseQ5-bzie7!d(?SC5bgA*1N(*PO6h}RJn^b!*DA7MoS$(Oh;#T&A zjtWiiw-$8ctRX725PQTo7hitRZV)h*>Deo_q+sr%3x%#ZEaex0>trp1T@}#KreELC z1=Jle4TRU%m>6@6O)&$lq~g{!aI{IqUVSO4ycvfU)?W{M|30s2&F=>O;k&vnCZ?Te4kLXn^gfH@7Px!Q^4 zK4O+??vZgl)#r6R1J30D32Hh**~TpI^opQ3FCci8;wWA~_7&Afa$IDr`r{bR*1}O; zKRHL>B(kf_FyqYUa6^@LH7xG}9iJXl3qtflHw-vzv)boSX+at6x1#-`@ zyk3H4@#MyC3FMQ<7JdbUY2o6M@`&zLWtwWRi;75RI+mm^)dT<-qFv0omZb$SF<8i) zT*NVhwW2s~YID@Oh_xUNsCNP|PVOOI;K3daLb(JTW_hCZh|9sgC+|k(@peQ42GvUr z(c>f%W9#uM)~kO9L@>nxoCW%4t+{SSIHG_DkR`JFOc0Vyi3$5ijsoibA@Z#d3VNKR zSR6yYk$AiOj>fIBJXN8Y{4AN7dbeAgH=Y@AN%JIev*2Qkv5c011|*8jmi8rtD+`*Q z2w{eu}?V;-xGz$2K_aRUw1Id90zoif5 z5U_Fx)fe=?%WEc)b1O;>e28HdSRg2pPE)ia&RBU3yf*TT>rnSWqZon^5J@zWQ8S&I z5*H;3bnEMysLY3JCigM5aQrelyBGf18dPbtU5LS7^{tub324oPNg{N&sKLsj22HULipHF1s%8dU!<{@5+ogu-iWm4MPzJ zB!hKToXhqFhprh-D`UNpjNm44q=|P>gf(~N9;Z7Ol|(O(hzFpGf*+#Boj0?Jnb1Nd zZZ5|W=Ozp5iQ-Z$Fz@!xAbkANB%6R;}yqD85TaK*u(vge;hlUc&{I=^z3@(UwlZRiq!G9+9w_=5tC9W1-~NpyXV)W zxiSG$@!@fH3qpeA`P5LcC+eDcHjpy;fbA6W1)q0_x?@n~wYo}_<7IJt+Z|>u3U)i8 zjm|gjEpK>Kgmo(Hh09VFUHqd$u!YRQJh<-su%juep;e00Yd>H7N>gaQV1iIah5?#} z9fi`S+!i90fKT0EYhC=`8XpZDMOeHqfo(nSj;|_+s2cf15OIj8QK&SNcgf}Pn`q~B zur*s_%AJp2)AP_?V%=JUh?i@1awEXily9%KTj(;Uadf%~3Zal=dg$1{w2H9s&TN7T zq{6Xy#7gSt7-Bj|<88UlV zqKaMrty1%X;V*4??(B+>ScgAo#;U`pe^ooe8q2TL2FZZ} zg!HB2P$npa4j{*pD09*c|Bb*geyv(fDq`Y=KSPfnoca$6tHYkYFhcNCfkFNXzPBg| z=c@@)P=(ZgU}Dj2UPI$(3rQRr20F7P?IG|^?6XnAWYIW)IE5Gywq5Lv@e*(kKaz@Y zXq+AmsfSS?PvR(w?3?D(s&krdZkZQbhcj# zJ<{%MC$`j4Nvx3&(K`wxyI|;Nmi??1WT<`=ZasyRCe{mTayW#UupywEfRR2eK|=d} zIe0~_!8p>3@mS66MTm&guhy6(q?)=sG1acH~elMUZt=6iSX%^^v$oR7Z&ICVjre}zBHmaBPOR7% zrvU?L)GT8%>vqyZ{$)Dcv63cZz*h37$Z9G8v|fKZ&c-!NB)=3BJ42s4z0F+A-2sih zTq=@n5cQKZC+7D9@2_qA7!bg36c(&#?})^HZyb<`B?hm%_KAJ?0FgjLUl-{-$Gh^Q zp1L+14X0z9?8dxe8U?X5rzK_MLjuhZoHrS8I)3WimCiLUh>qiw>akt zOo<__f|ASQ38Dd{G99*5nI%*DxNsrFc>%?hnuP~v$-S(fcFu_I3qc?bv9Ra3jU7@7 z52LQsvSb9nvS~@qp-L+xGcXz=xz&wwkcyCj>faWT)=C6TM0 z22q8m4X0)48W-S}-tjaUT8D2fNUf%rDr%|q-Wdd9PU})ra^@Re1PCd|XUK{HH^hwl zREgBaG|w12`qT9>WUEU?G+=Q^7|qs@zpp+!*^lT>Bh}xp7(81 zjbq#Nz%zJGlxD+wTIkSj*gMa`RMdF}qiV1fpJ= zdR|-NQN1ph4qv4nOwcHinFx}yFo7SaBfWNAjQ^RaA=Qv;FJUjGQJUhLVukj3ZON_Z zpN8Ly<3bA{XTQ_{k71KU4KFRa?X+yG;xs-R*vkZDl9yD|CKsFBtv0O$;>-reG1$tQ zL;zV!{cy!5QCj~Sp!?r)Y24-#JkMGok0TX`u+fPBubNy zD-{owCk92`PeT#f3O#oht&CbDc@*H|iezjC@`W#$V&?nbYxs1DERYJ$O>=fI1BBF8|J2j^-@9{Or`zqo zf!=uaCGB2-MgoJGV&t46cjfd-=`tSl%sleuBNc_8@RQbSGaUR?j6GE}wR|IDpk<}W zvoA(}6p5h0ojB_FFqhf?1r8HZc#Zzl8txY!iB2sC7`Yhbu_UN=wnBpL1Ih5C+oypz z7jXIV8@hQdI*X5MLgxCkTNUS7XxFR7k%|LA@43l;CPD3OShRY~I4=Wh@(M|n znQ=2z2#%C7O%NK@Dk{Lv$BB|+3E8=|&l}*F;{Kb~lib1QVkt_-91URnN0${Z$MZ%n z6j3y`AOVL%=2SBnV|Mz3lOVi1sBubN{>Yp}H(n~6?S#Z2;NLo#Eew8HC7=C3Z7@o1q@FH#K-E1~x)tQ6qIhz&rki4SKsZb3P^Kjv$oBIy z+j|g(l^80w0C^g5N_oA>xYUN$5i1KtK|}vS96Ml+P!BJI(@#lpsylC~jeCnx5p^`dxiOFGH#GcjuG0w2)%A$xl<9 zL*aBxXse1rSVqqRJ|KI>vu~{WTeM}DKVkE=R{ZqFFb}*Z*?;iGm!H)wxF3s8+xLTI zbn6Q4Z`LSt_>R5R0`DjXa!7cTq~ei%?pnqGsDK-rbv9J;771n2oV8(BnyVXLRkR}7 zpBKp;8I-A&+_tXA``Of*K;%rP5#AbS0-@|kq@JFOjTX|Cv7jHOwXYJVX+=oi)F_hN zwH5>?VjKcgjw)V?b1cWu3e{1(o!Kq>{eeo^`$VEXr-W+pj5JsWJ(kV#htk<%J7E9M zUHh!x1|CzRY#$dVF2Iq8evmK`i2Q<5(2QkWO0z3i4MehWh9}(wx{WBiILMYC#00!> zVzJ6^beJWVCp)8D}^Dx*k5@tl_ERoAi#Zg;V`OKM3Kh|8N{KIB3lX^8ojQYC zA!k;b>nY}eaGqv4feL&s8#hv{LLYqQncV$k$p+4H^1st~I(`_R&`lAb-V(>Vkw`GR z+z^d~Tum25=rWv~8N_NFbRWVEKF&)*W{P2)2CAYH4^`3Z+83}!3O~Ah(_*5PH`0uXv&ldi0XkXjJ4r)hC^m1b9SguMjq9emrNNN^#i=F(s3~8WvwpGmOK& z8)*8d6mHifOWN4ntTI3-S+rAg%eAj~KrN*(_%i^yTHXc+r>H0#P%(z7yR{Q&RkVa$ zNpM<4E>C`*5DIx}5?etP9)Mn&(i-R(#6SqB{#Kr>s!?o`1CxT*PkcpNf zyiaT~o4lv{JxRZbXrfjSL`>d$@}ZtU0W77!m^mpQIv&Q3Lq{NSQdi(TW*=q1sdbju z()kW0#>8QRZfDVT3hBms%x9xfCgQIy%u2{i{%3**1R6{w5oajYmH&D`)dlQ5k7_HX z@A>XgX!0%` zkPA(^Rb@Wo@S&T3-3Q|wVvXYr8^{AFDpl$G67^$$?JjQxC;-g~R>zLTx>@UGdn~7-H$sd$*KwLD>!4YC_K$prCP9f>Y zCzx&`fRI_8i^UVltXGP<@QBrMtzPn9i|<6xJ+0m9y%i50uWxPjQ2>qFhp)bKSDxGEHSdx;;zgv)?b`#^(GJ(HZXBosnhL?VUI(vY&;R9fO| zZ$A=}H>(y`GJq<)i>li}VvJOcizaR19X`)vbD63$ek|CJt;n@Ad*6;5&(Gr0=EG=B zYcddY=%#Z#yraJp&HLS5B#}O`2eIUG>#3>qA6#e;DOt?aTN(GZqyJZz--`j#f5z)a zhMxHhn;VCF_#d=3n^XwbFJYvCralhVq{`D3k<^x#H8>M(SQb}x6a|*xN-K~(R$k10 z5CsZO0r6@a0Bph@h$hNnVJS@@0(9kJM-<{vc$i1Qpff(o^PkmDf1@Wd97kg!#r!UQ zbOl1ib|vv1sd_Rmb5qe0|aD}oj}uyAa8mJo}GZ>_WRQ~8H4KGeOp z@fqpf)9&VeIpqnBxyJR^`On-8L2zB5{(4|Yw^CqM@K%laWRM;YnXpHC&dfw3^DmNsu%R_0=8A?NmIzLjf)adtJ5{O4+xV@6LO6lL z!2SFuo=GG>79kw1=R!VQA-YbXz7)BQVU0T!dV_{$^~z-GgZ8=e*Dqk$mjfE z6jlJgc6zCC<5JfyO;r+e1!G0K>*Y`TaK=j&>9Qr+oSUydCDu{-Qt|JNYvJzYd(4|k z=ytHz_~0lstv!TL3Ig?^@RMa8VkvO!d#G~+**lRG)r`wqW6Jf{fK(GbOm8MLD`dA+ zvSi*;XJ@aSPYQiA6nF-Q&U^h7-=#J}apN$=jkAz|RPvmf zmG)x!R^Dy4B+s1^yQ{s4;)3GNWfwn#0|uI@|grzerTqrW+111=x86mo<&V))|DcwB|HzjY-&YR z{h?xo2Hpj{@ejdR*FR~t7=bP-E7;#09eG2h#-1!Uc_U=QVPI;!$_MS!WC$?IXhCK? zY;QXMiATig#~uCe^O5Y{#mDBGyI>QhJDJy|VJG|L@|)rjKrC;1jNrEnu2y$qKLTDl z7uDub*S2ybn9-z<1S9X)Zq}#0RB`d0w+}o=T!JOq;+Fs-1pDcSUr8?{cKO@O@A>;a zsKAi8`^XGWK!JDzFFW)Vs{l4qb9@&XO!C*4KsPPv>mgaJ7(^N>f-O9TfJ?=Nf$z$o z)(^kAUwS>m#~0uB(6_qMB#?Q@Kcx{dmnf?F?zpWZqD_{XUuzE4C)Js@BK@2Ob|Gv?b+ zLQ*~Fiti=ugI~O-?A1vLzC7MKt?5;(@V_`*WP9?hk6+gtR$KTGr^0!$onqkPM+^k7 ztVRoMn(2UFI*g)9#9n{s@i(x!M9VPXJ}^}mdwC$M!&N3!LVIjU9XPouhlK17($_am zso6Rzt_%!>%Wr77i0Qlq+YyjgOadQs+63p7wP&W*HF+PW6{EM;PTJj}{Hn&j8Nigx zgKCVXji)B>s?38n-*cb!NY}tFlRuh=Zh$QvsD(VAd{M-N0+AHxwZO(5p1~Ggr@}fZ zB*>LO6dz_u9~PksBJCCOgc?QirwUce4LzYBDaP|JMDI{eI(C{#xgS#Yxo%PZHnCqL(q@s*dvkn4w*(t>m! zukJ!=->mn^x82UCDFLZ>g58@)_Ew~_*Rw;hg|m{Tf-(Oa4-WO6zL_aW@M(sEzpOMi zy$`uS-nxmeQ>N;5a`slN5stZHZZoQ4MEmj1F7n*F2Ty+Bxnq`b0HK1XrPl2APt)Ox zJIlMjxxRikLlSKPD*XM`Zd6TVq7Vhu60-Y1Cc^H`^E3HQcSY3>@9#ng3oO_i*$8J*2i7Hmxsioo`Vsy zWe){mQW$MlrTCK+9_=UkR>3hQNIh$DyxbMI zvpiZX1v$(_U3w+@F;Qj2yMyrF`=TNwNlSi3nHD*}0Z|oQx~9fJ)e1i%526?}@@rD* zAfIOK3>sDv5k)(Q{m)dUB@KZoE9Ox2Qyhin%$efUnH$8GJ?}pGiNy*xyNk_qe{R!_ zC{G*!H(|_ZriA(R&*Z%q^IoxrmPJ&Eei z-q2)G+5e#c5D+8{?-hXP98!Lq5<|{=3DiieN;sAn3I?8s(C?m(j`ZL!Jne5<)TBv+ zZ3~}L?#y6V>sd_tyd5(`-w2JxOM`btnW|{ho`@V@+9wKB>0kLiyxvcKuQ#e!Fe7^G zNdzbu>$>8~Y_Fg z^_p7!AdCWnr4@)DRuX8IED=;fQYG)%PKQ$GT|qMV#*lR`oIP-p>r{a6&5>{XB$vPE!fPi(v7Ha+)J7Bo#)ePQ#-3b z?UW@eLQp3c?1%6E%B%WH$=NkK2iQpuXbXT?5%tEOXWwd;?N^ttnJ5oqmF^9D`WyD> zB)s9;i+=JK^CZ&7$e^#(2HMGmVEM#ny)ywdzy#hIRLwn(=F*J*g~3=Ub`}npZUXsO zgf*do?Vf3mm;U6jihgbv!~|!I!)f z6_XmDdG#=3+NVGVu-4EcJt#qJbQM8s3a)t(qY)h^gQqd-f_E+L7+YtDIMzIIA~Pku zFLYmjNwfM|P({v{L)t$2UAUA(n(ftn3nJCGfz>?>RY@3a4M^U`>hU8q;E7QVU~yw* z*;|X+>tr5l=X$22{a951%vHLwKM&yah+=#tD&$~Z6j^fp=FKeKX0;PyQ?<@yP@x!s zay|TQ%3_Klvd3g(mt>33g6M5!TCGgp71Ged@yhbl$zPT%x$R!mDwJ7(RtsjRSS4IR z=h?#JWCRya8v;Eb@OWArZ`bft%tLGr>cX_cn^SAGMScZN2OSrUbRQlaWI#tCt4?TP zPzXV-2R->gsC-Y}N;BbJQ^RjcJqR8rlQwdbwM|sbHjac4i};B&h6piyRA&+=gc$b8 zw`-9g40?}tiGx18nUi-)MaoAnWy!l!iVB&It_z{3G&W`z^=c|{lprH&;%-VZbTp+< zmVnPr#aTm;WQkTgv8UH=J}a_xcfGz*9ScvWUWu;&E%Ng3OAbG<$HZ1)B*|ss1)U$1 z0371CfJA|&MksfTCFr5gmdsTR8A=S(?r#xl$!ISn?Y^a{J^IS4H^f~OmLbRz{=;3o z8aclHY=q;qNY8%$vwGkrcz`Hg(0W1N9#**Z9Jh(U_z9!en8pj6vMAmv^oSS|heeX< z#v9i@nF3()VW0}{&Uk&;)ipACDyzTxi-_^1NZ??hFlH!c>&?J5XgWMRT{=nu+MyI& zMVxO$Ii%ng<=(g(Ch-{}!W9y#`b$-%z{`^7`)Sb(A3geN2!FQ0!o6bl{+mo|(@PaP z|ro0)!pB{7D6P=X~|tgfhNWGG#8_w>9HnG*Xfwo)~a zY4edHRDM5xi}v|6>bX*50OZ+plzOI7AbQg14r|4O1@E83p1h?`;r02R)|}a{G=jWG z#lJH;KB+AV@ak6V!cI3Z6{;nj;{vVMXJqEXTC<=P}7Dph`5gUPWoO%VzD5B5U6y1y4{H4l9pc znx+)K4f?ce;A-}$7oH5Dj-Gk6lVY>oPb7b7hi9Jbrl)>|14|Ama~R8=imqat#f7y3 zc}?^ClXvw5_CZZ@+QdJd&PD2~7KMbg-)eqGl8)|&&XmhEYf;2dpd`(*sfIsrv&Z(z z=k+15F@7sNXKDlhLO*XGAQJVZ0M80D=}U^Y)J$Yl@vlYu;q62|!>$FuZ7I!hA|@izq# zc-r0#&njkH*(8iBjg_@tVv0Bd)rwDi^zmOMfA1oVyap>m))et_oO=r>KtCXErSOig z6s$WR0G=u=v`#U8bU(C3sbp!nl%&$U9635oXV{(bT&JOXU zzQLX$a+g*&8><*;(7Q^lrMadaJb7!+55`aE3BYCf`d>XEpj3)>@h{pwsbsr;}#6iwc&eFPY7#6y9I6-7xiGu&ulmVM#TNRPZUy zaxk#@d7etM7g@<#u?~GCu9bn==w1*js8^f>#wt%k} zs?rk|>L8Wf2Q^I)!-eCVCj=0kGQ{58axm$mWE1*M=BK?F!!!+vhnP;VImJ1t+mn+gvnw5@E$>8B{$FVc*4e)L_Z+B6 zEIbDK#fREJsJf+X+S=Xq4gEBE@4%@jMk?vP-i}+b0JpmQ-sS6NZFr+NxF+w>F{WTP z;L*i;V()gNa1*w1oWp<tK>Yx0Ld~Ar8OI@?oCG`wzh)G zLU=AS?cLxGEMC`K!P8rnXxY{Zk?CCwPw!e|Tj}3T1gC~&=Rw2-CB*a_vgUQIU{r}s z;{bPmJvgZ7hrcS7)`iu3X0CO#s^pf&VXF9F#BTStZXw=kf$tW$J2OgSdD)L3uwEnz z4D#>)qS2i8KK`+9XDUE1{Kq<`+DU45>Y!25Y42s8uvF8F$9}<5Ro&#m?pbbIEnrFX zqU~w>qowe9n#N6+d<3=~l$_a&q92L~%Of7@N2H?YAy6jqqewi}N+IXNgIKtN*i&>j z@WcXIb4nGA0!Y}TA1mwIUg*X-KC`aBsa}eDZxTd-o@51;0P%<~{z zYqidjv7%?q^o|c}`uA+2ArB`%OP!2?C1W1qo&_4ODRQz$WJ%pOa!mgFk7QJncXAS^r9{;Tld1FXBT~4x(N;7#nFuQ8Fh%ori>}KLk3s`MH)HJ4R zPyd;I=1UM9e&N#U{9S*ENuy5yoWNoDsg&_%Pp6H5QX61ujSy9-2Dk}4_XinoFK!C3 zqL<$^UGRhm*j%Hcj>tnfV--9A(i7sz;MVd4WhlSRUnlSR)7rSM`oY;@9Pf?wRn2hn zII*1^-P6OD67LDT0Q5Gk>I3$|&0aaV5;!FnY^k?I2!Xz0UIh%8ldO-%F%@+?ELqx} zz3f2&hfUiyXU0A(7*5>35U~p2&9Fl5> zGPiuK=JcZy8YwwQzLd(SxZl}BMPE^Fv?Z@jwm|PD^%!DB6jEcvG?nchvKeWaRSQVE zp$Kz$Ghp^oDe0__jFGW34A2i+MoO~{}PCJVhteHv};Yfv*ylTRDgEP<%l zPMFDPOr}*76w-4dHk#rxdQPT`0Qpf=4>Hl?5TVjog6NcU;28xvBeb^BxP-kh!|}}M z&6r6>J|KQFassPkR6p7sV} zE?h&$G-t`mN0iXh%p;6YP#oNR1Y{;kUD2&mi$WwqBO{0mroX0-sYG}lQeYtB^`74$ zIUnUveDu=Ol1Pg5__yu$2{APf6JM9sYOa9XqzI{+t4WK9fHUKQ;m1GL9pHnJdK!y} zsl)|6oYv?W)L~;}IzM6)eNoFb9&r+-oKVE=mjxHbp>up3Gs{kbdfGmyQr!g|O7%nH zQK;4_2#$^dlxiN?p}J@KF7Kiv9HgQj$QjjH)+13qWA?Zk%R9|%;89`%2h)fQeugjy zyzSN~HhWBRajMAR2`LRMj=lN3M-=G?o4vLrboG4nYsz>W&n4dNiIoly9VM}OA-2Y0 z^^E%^vs$lLj;U1QnX{!ln=#eH!=QqEdQ_h}C&|+|V=z==mGq&wDJh((>!eWR676wF z+dodOHH>3hYAg+%xwEjPyBZa9v!7!m_h!&^LGei%poy~T17mJEoP40-RVg!+6_dz$ zInFeUFlKc*Tf4Krkq$3_f6g@p*$_TjwqLI(CR@mNuhzv+YQa$@U2Dq}{c|x)9fBQE zD%-;m9vCWk9cQWHz(cDmk^Qh08Vw`iL@kxAfeVF2N8`BI#sq0OF1mvaPWK??KFlw>rR|H-@%9VrUVDn4&#}SY2qmJ%` zqfB=IK{D&<7*e*cX&zccSnlL%_ToKAcc$$ZY`A?XJL_SBs;TN7`RG~xO2(BnihxH4 z^wO*iwvf(wF%AzPDtTfr#Ny)q3XZ6YL+!WbnzuYq)6x`Yyc4!QV9}i51FH7>_D@CCH&{6gEaC)d7_YbYJa`%I z_+*m&vmB%*BuD;cySfObl|uBh;#(0|_q7DOh)!!=j>ah^P-Z~LCr zGnXmOt(Q?AF_@Q=&wytyO=BZ|erG4W{wv5G-+XvB`kH=K&vFwu>J@x5Zh7}GWe^VR zsNf|MQNpx(iXcJbK}#23{^wEwLEm(u%*dy1dCCBracG|XKU~GHQ65F@OmwCzPmt4H zBYmi?rF9)22AqORM-^|gRucJ86IlvfbZ>ph(;KE=m4~@Ch$e?M#{P^F4=>+(QMXXV z)B{?^0nb@$8h|&5P!IJv+k{N$?qNGG$X_x+(#}zXNpk{BNt=Y@SYS$Z78R{c=5@(E zBqOurLdya-XCcI^N(3XJqL}7UlrtUw!*=>#N;;y3;dcs&b|tmGRqO7ugTT@s1U_>a z7esqV)&{}*+%%lxQ>c{u*l%jhF8==F7nTqx{3svd)7TkZ5c0tnl=k-E*C!u#+=sjt zHp2Ch5eUv{B9@$VUY7Nog!qxA!L~5B~!G!G&UGF8J)!amJ;z6GB~Gn5wte9fl(+%o|#I)Kkf`_|ZnuB`QBN^~R7XdYp6VE;8;mK24 zT;|)g`RuIh$29dPKa|?St}Sr_sh6EJ(S+P9!Tkp%??gg^G8LvYlvJ_i`qs^Hc52E> z#>lUTXhD>@+H{2qdLn1x_{$$@@XY`edS-$c=tWev*LCRZzmN`KR+=F^v!D8a9N&UA z!~<2fRN86oS3K?j0hzz|2mKTkh|{J$8jT|m4Fazs#xLH>I=Q0#S`ZxPXsx~uXaNIV zzQ`C{Nnusl+?tL3+jc2D_2hqqfsw;3oa1}_O>=VrWDaum4L?C zV;X}$I|zea2gbz*o&`ZU^dryW+Gq3-987NnPjpiI#5f5j&DyFT4k0Lfg+dToIbjf5 z6M~t~xzA6*YEX1^eKN79pXvYws2S-WSwqah5*<2 zW-wShClwINq+ay=et=zdjT8op8#z`OThIr|6X9W0vY60fLl-|F>C!`yIL8TPdOUkm zKXZ;BdfsbP1TTmfukzM#oN>t$JT-nLEP9|k!|Hh3Qk6BKAbne=^C*+T?|SMP{YD@h zw@vDV;?3eEh)qN7&Cut4$5I0Aj#P3!cM_UH=%Cb3v5Z(L$uLyM3M&xTRBAG3kt-tNiX`)kBcv)No`CQrR;OHf6Bn>+zYFFiSsG?+3*d!i}M9 zc`o;5E_l8R-SEH{Te)TdP#j&9ML!>8frb^Dqaq6Qb#(jCvxNRf8w(C4%Nw0SqUD*V zwR~Z;6l|%ga>k-y>WAs4ceh~r>eK@N9IR^B4dQfrXb?4CNaC%!BN9Z@{Sx`XERW`{ z;A_->Fd*y|mtBB#2$L^l>M@F`LUv)Tt^0Dd{GB~Y~(%~6(^py6Y(l*+DV zfuX2WfDCDAK9u7 zO+jdXW4)OpF?40AgQdAhN#2882(k|~twxnUngbb8A{?B9XV>(=MfyF7y;!N()aW)v z%ZWmRrXjW&jU}{@;n43zX$qp1W}(K|W;Mp*4=(wMtE9jiY!7= zf;k-2gQI{qz$F~{qFKmf%Y?hMbMYbt9Iw?v`Vp-0O|NC>L`{Lp?YN&E$h*vPo;np zHjg?h{phnhAM@b(t=iIe2Dgf6i?mE1?d{TJvNQhz5(sO!8lMhJa>UekHAL1AsWq}CgMSsX3-Y=%5?p5c!bduZ#bx|QA^C-mD zr;vjIx{1}IM^5-rl$AXQeYS`1ckk zNmN(3Te7&N$|^2pqO7!c&T2{3lM^j~apd1K6-m8B`v{fGrA##6 zPe1J!ExNs!w}R8PE88?dA%q+;6x(7e78&Mw@l_3XRN6fA_bdq5 zRN{Wk)+FjJELCY)%kM`@)%Cbb_FMP(JQbDW$$N30aouWS8?jew)09XWN^&WUnDaLU z&py5&(P&7!r&#^~nP_l#hjDDJH(Or6HDxR%_s^*4h;HDe^mz0opk-lPJ#X@MF<>Td zjq(ub)AczM;h=@Oh{3a-l+Fj#W{wC;L5G@~Cuq@SL@Pj!ZI)9X zmhSLWohFY)RQr#ggsCL0+%%D8Pyv;jbl$8gO5Ryz!_V^%x>$m>>_nP>l&wo}Ur3v;!zgAv6oiVyotRFO! zXYMHJP1(i0QOS3pf`XNqgk|}SXNxtS?2qP(#wpztYY(gGl&IHnD z{wiJJw6iq+MmR7BK{N_s{9;-rfHa2GC()}Y-4G0qFYqrp*?wmt)7?vFo+dW=OX#Jy z0b?K)Qk2*oADRLv&jf6X>o3#4bZyi`&jlrr+Yg&p)$XPY=H03IP1MRHgc)qAE-i4Of8p)Lz=Bk@TS0a5qJGh%-Q149HT9m=deYvm1wZH= z)ZMD4UUc{>5FR{WyMru3!OZ_n?9l5z`OqsxZINR+*)t93kKg$39-K0v!l)CVUEgtD zsT9C({5yxDGcxf91g-l!U`s`73eR~>h<3>(r8QCRSe!1L@KFn595A^~g_nF%|7m!g zUcmwl4zG}~Ac5&cp0W;HOlkTVT_3$L3lz934qE`!nfLUTj-4q(d1%}(=WB9o5i%up zs9hufp`L{3>`$w!$k%0c2|0VW6XJn5QJS{2$mwoUiPw)VUO6^{sKEDSfpzW#c3m^Z z@sQY=2Ph4HlJ0{2mDUVUf`jS?rivuSJ3BDcf(OV}k-k?f7~JPml|7D5aaAs5n9)vE0U>JX9B-p6$!G4<7Fb^BZ|Tn(iV8G2;Z z^Cy8ASO1bcks2L*!I@bD3_rtqV5oPYJvd*D=cP3P>4n#uRRut_|VMSI~kstQ`=6D21X~#RJW`wt?HKh zsD}!y6ecqJ;TWK3kVOO{E1!g2Tx$5Cf*$AT9yKh4p)-t$GgG{G`JKxj#6ziWnIf{- zrdKerG!tj_JvP>sc&7h22E(|UM5X6oYR??iSUoX;Q2|zr_JfV^6QlzfwwIZhb!dJN zc2D#ZQjvpF9qQFrU()D=8coxoagdHwI!W}q84OS+(Kvf@i9>rCGs$W=c!Q(x0ZUzI z|I)S3<*`V=6M1U9{v3#LG|)ZA%e!3m2o?K%hj6#@^5p|mFq1?)U%oeq`jf?d;7hb) zRO(Mu=RglNfx{S)s&B5p{Ni=5S+TsPZLlg?6?Bh}xnQyo1OG`!b| zWY%;p-mAD@e5H!bT3W#fUa7}EV$6zHk> zo4%k+QA}XH8AZ7LL<*G@t$`=es7^Xb9sEHZ1_yeyeAf)e5yz;TJq%M;Gkw|@|LyWM zUB=PRMzo})3bF#MUd#wzqU8BS1J2OATm4URcVGtX=Pk51n*5y34n7wGbDseptbx$@ zUjD2fh!3hp2C?zt$IWnTjz+Oau_N;JB!UgYQh){DuS?GUQf_>#%cuBH6r^l;iKssv zg+ig&=74&49&bAYv!>1ut%ES&CvW3NHx0-t5jfs|X^WZGGG5^1jE0j%%3d@(&rqdN zk7R7*rF(t>(bj-I^O>8k`gu61fLY~OsPlznscWRjNBE@mDAH%(9^ZzjJixCTjKbNK zt~(rFSQ}UzA(9k;J?(9k{%+*frVn2+5^u6w&w1jbnn^yZ(5GqB(xD#f@ceaMsD|#P zoi>3~3A*t?IyZN^ev;rx9{&LCoCB{=#!3Hmiak6G4F`pg9qEXaoPWPxr zPetEjD<*_E8;_Zz)>ZRDE^tkVjZlMFE9CEbF^Gw#X7J^4LgsyD|5meDYpk0d*FWj; z&JCx*wr~||pB;A#G#5>GKjlB3w}=9AgCc`EU!M!EBM_L-X_N~gB6EYZ_i2xu?^$hm zNr}=4gs$<>GuhX2>VOXMA57C_{Lxb@2zSiN9!C_NAxfd&2UVZvR*O|{hdnrXD-#>U zh8!VUCq{F=uLY{Shc}NSu;^R_1+rXG zW5EMN?#UIDCl^jh+jjQEkS69?-&Gm}4T6#p$z<{`szAj764u5nrlX&rj{-DwVlUt| zOMl|Zng>ZkU$F>z1)BLH@>K5UJ4sJcp{n<<$raTLIN`n(Ppxn78yoWsTdbD~pY)tkJm{!rayM1cI#Kr0z zqnkKzwlYrQc1@so!W}DW5~M1X63{WeUsDVsh|htj@lknK7e)=$e>eU5TNaFjkTA=;?!75=_2$|6VBcvMOaD0Ll8Tgp>FD8H04 z94JJ=F61B)zA%&EkF8*9POoXOsUbH&`UDkTgN%OgDqF1tYuOgf6vmC63hT1Nzg<_u3?}B~#pwutu``5*zrp1+>0<)A&wz{riaX)7- z($Mpoq0o62pmR@e{c?&vcVN&PtP8Xyc3{6+TGt|^PF>&BX{clu4xZV&bIsf*KnJ~g z)JuUhMb-K2E2cH>AzVNqh_|{Kttnf3oM~N-hSkypst;HoavBgdACed|NW{8BaFHsh z%j*DS2pjY&A7Oxc{whB>va?7!g%Y+^M9m9uCtWV!n<9X|3~J4fT_P2&V3Y;>^)eg{ z0uK%FIg<-?c6!1i43w!(ceINwln7jQijq0hEc(sdE%=5m-vrYV9i;uz26*1dyL#R< zFfMW{Hq4_nZ2akycSW7czbxh%jC8ja+aN!{%d;^aW^I?HlcM;tuOUaCKZ<81|Fugw zZXQJ^@nU4suQ!S~4S+nDnkp)r6x&3_OGfx<8OR(!;_9ls_^&nDy0xDk%;?w}TU-T; zey3T$0ev0M09uW%z4%=mddC?qsyI|eH@h1;cxPAowr;0Kb$IEv+mQyNy6)2BG~fly z^eaHa8EjCndT75olSV0!Rl#stf*Fp}%V95#l1L_rnO^$M^Hce=Xz$b%3@>B98bof6 z_W5}@w%SyqQRGi|%8^;q59fw))X7Jp-*Bz~b@I|vWYH=q^NgaJVb6tAB4Td8)#}qi zs$dDRy2hj0(T6$2QN(WVf=jEBsxME))wa?PLh=zi1}p2=qVJC@G?eXURf(h^V$GWw zZ5>r56-E?x-Ip;MN!xZP#IQ~wAI(!m#ko@VoxW+gi>3Q*=Dm{C;aLzV6tS*Ne(9)} zObV_)38eJqUi4zxBFXfyylQXq&sqTvlk_ie{zUCWFB=9y?ybW%l|aqPQMAH8;A8-j zZDy9gPPejKInFFPrYOOZco~MM-mBX5S{B}4vl3>or7U08@A?(t;Cf$-_w}blnvL$4 z^b(i@uZ`bKidc!(pyCw?MQTg2VjCPtc?gNMH~}p0R3ZQ`a(J$n?A}o5QR*N_r%-7=HLao;-(WqW<<$iJ<+PH8tDJ^67hu|&pg?XW5Y)^Qraj2-+M z=8d-8%QrrG(~no?OHH0pmToC-i)I@}@28&qGb`fFKKZeOC_XKcD=q)r&nT!<|1s+7 zJo4n#o)c#EUt97CmcLlOl71G4c@|ODs{qiIgqPT!dKI-JO6F4|bN6A~a0Us1Q?`Pd zXn~4IKLLSD@1-gVHnFBQ6qtv$?S8GTxC_@LcLMC#@InD!ijBXQ zzN!VPf$!ATIZaNKtq5PjR807K`61+EF@gh9;bumKBM>w}k z@>p-@vcdbR#>bj!^iMu2HJYH)_LCoY9_nP0#rsrMhz6zFgNz-^FSUi}`VHLCVyJx- z8J4IVq1#EJp)M7;gxW}|7UNl095Gb5$=l%$ZOLh@&N~)JJCk>bITWaRXkLKkgO+$( zL=th)G(a^suKY#~_-Vy4gbxQ{Ix+SJnQK>CG+(ip=EOgGX1n_6W)TA-4{ob0d^uCA zyg=4BXX4+@!Q2)m~# zdGW8+1#v1!MMVM1oss*VIp90aV`OhkxXMqO0jBMbBC^+Op`1Nr>6da1hYuwfqAp2f z_erJpatL^hEfWz$)^+JiW)t9?HHl*d=Y4Kvm@nhNrBJRgnqD?4l6U~y1*0igPVMri z&SoB}gjl{GT&RSNe?fN(Q4J)kr!{I4Px4P2=&$~$1R?!;Zkrc-hybuwxlwNJcsq)`cnPA`7!G2E<)@TR7Zru3IMqr%5J}s zh%e>Jq(&o$hjA?VNsCwXkUwcn1(Us|rk{H8cqcXsQw%*ya=smTZm?_BvT(kpUU?@Y zKua~z37t97gY^9Xw(Z&N^aN$~y0=GXxwF?wJpo>|+Gv>pH;RP)_$LwTXi=XZlfl-i9PZBpZk#V@p6rl<+B{ zp&ecotdkliR6E2x*f_M5`;n0yaJFHi0Vl&ZrBN%w=Ox%c948Qh6c6@IseFpwvy)`v zz{LS2;eVUGUYgnYc1-RXf>=PD6bvg|f#cbi;D8)vcZYQ(LdCkR(5ZGOJgor?2jQ*w__YWa!>B6}&LGpDl@fQe z>+tN^a^v^C?rv64(;{c774KSBINiJ!39j%{!W|?ZB%MZ%)+5Gq<2-N;Wu#JRE7@3| zGXV#*b4NUDy&jOL?fK%}&M-K>%X+dBj?_Aeb!sq!3F#)1xoH{_TID>NuNsxDs0sBRQRD7hKMYqwD4iu0V*&Z-lW?P{57;x+PNc53&Yu^>UhIC{`!MC>3_R>#zWQ`*yORnU)Bf)rMVz&LP*dtq^D?Ao#%Q&cy+PHvxt>|< zISvUu^K7Sv<3MAgY+T{R@V4w!%#Ex+;mN#svz(Q|18a+G{%XFl+)6kG4eiA@Pdze0 z(Uvp#!r&EpPi_R$oPOWvAC%6jW`P@v1Z=q)LxQWW?1l&-2Ahoueuy)vL#9`*#j%t= zEt~Iw_%cHXVZd1xS^(w0&Rv1u!|l-D)gKA6q>hmpxVfni{i!^03|b8WG)HgC4k#0o z9_Sa{HT>Fuc2j^-#NXyWU$Y^_Bcx9{3A}<|F6Bh1H(q5ltK?l)%1PuQb$bwj6}W9Y z+$?^6&Zz29odM4*()qy{s$PMpKj+CdDex{T5`NJ?_v(7$_w!J}EK@O7<3|ZeW$EGv zC@~dGM>L{TWy32Ldd%m-m5zwiYN`WuF7j;-e$+FMY0%3Gc8z039{L;8A#$qwS{MSk zISwcW?culaIHv+6ts53a#`;IhuW4-^l}n1spCvmCaJpuJev=Be>_4r%&;f(Q@A3I z2#t&a>1i~{rM%F-0?Fx#>=%B}Faq>i#1rI5eQS@#KprFqK;UuP`_(&(d4p!h4$$1;Yh%Qp+@``SNp%;>7vDC-AP0Lv=3x{a3 zA?LqrA@l%^cyVwl#XNVL&^saoSg_itNHwuaU)2By=GwRpn5Qk^nr?){YImttsWO|( zeuuI%F}7P@{i^T63;Z>IY>X#A)r5P;yEyL3qre3Q^&{eDYV5QV`xrpw;(lpw1>!mU zK7a2$k^AZL``$75R#3q87`aMnwIU)2#dSW0;q9xH9|sBvmlDu?mBNbHmFOg<5TZmf zXlW+{z{~g%i9wr?EXg1R@$$IVTi~KJ0cQdAdaFfRw$eW|YxzmTsxH$@^Hn8=seW+z zJzcX+=rtRWxajPkFoA+VzL5Y)mpCBdg_w9?ee?+U@5LJx*@%36%xrP)GUEU}EJ%+5 zUzqPNC7#{Jmz$4pZ|ONvO~R7~HB|eL;cw4mPQ0zdvv9=*ClVtH$}MEg7yysm;U)>5 z>6w~`$=xb}Gr|vVP|qtcuwP6y`WWz~<%`mNr-l`WXWYecCLdqaJY{WaoJ_cALD5AK z9lldnAGW&Y5&~^t+L-26!ZNQ}7JwC_{UJ+~#TG*r`X$Di7uK zUTecM;()BUy9Q3Z5cq?wK_(Zj5dc#vn;CzlowazQd(bL3s|9*YoEsU9QC-*6Q#{=R^X?c z{cG%wk1pQRYWMcXsR1D4TmSxvTj5GK;>q>9o$d+Kym>*NQtRT z9!H(!o6kIBwrdX0Q$j1)7iBE;Q%S(rC-y8@YhRA&(t@3omM9umBYHb>eut_ep%p$= zq_U^mIFyf_sDQX-JSvWbf76MVLQy??s=5RzXJ-Aj2ZKG0o@!5ni2i|?$nB^$MFZd> zx-wMua8va5+ZAARK+w>#^?J_sK}jPcxIHh6ODsfuGs^#6X`Cn|vg~L*8x4wPjrt@n zUD^IRj@`Y5TK1l%!o2+d2BxiRXWT4NR+F1vkJ4TU@x;X1Wxn8WCl;9;d=!}VQ*yi zs2?{|3M9^=tE=>qza5XlV}vrA!&z^qvjvs2x&a6!M2$96hT7E$k)~cpZ4QM3I7;hX z@UmLL_s$>=_>EogZ5Abjpy{j?5|ZnX3f^K`5Yo#+sHrKUSDYIA0u_`f+W^D!#6L6z zWK~6Y&4?W_)oyzI)9R6zt5+AG9gIs#I-UlN#NHU>t@hX1p0~*qY!Q0`LoqQ|!t%?Z zkh1;oNp9DpVL+>3fx~Hz>`wO-nU~_DRA-3hgPS1kBi@EzHjU7R`rX!c7gwI3Qqv2DY6X*F9) z2hprC7I)$}-JQIB5GkHisE%V51MK}PcI6rUH%gfXIEdmwOQCzA{qi82uwm&X zE6*yMP;*j0mN!ajeQxc#dm=LM8Cq;Df z_U5c-NP7tUy`<@ShS5ciD2NTQ;$Ox7sUZ^p4v}_q{_*RfEWqnQK+Pg-e&C{F(a*gs z@{%)FT)wcQcYV#b0H{OoSl=oDSd2TEFs>)UJ0F9)XY|n&n^Nb8oopb*i+}ECt^m{WJ+0kzFn6h?77-EsLal#P zPJOzEc2dF*VS~^t&<~kMNfv3r5TQ*HVy2Rt$Z@A-Zou`3&KWw)@s(3Hl5y)MYD7)m z+>LT0P)pAnY@xeJl?VSp^b9J-ksGN%c!+DCzK{UvU}1CPn+T3aQ}9?g#(*;>|2Jmz zGb(S=i>PF6(!JVE>-i~^J5BcfV5k}&<|D498lbIQX(Nb;!j-YNHNsm77YFo-mpa4z zEAo0`TAAZW^DiF#()bFS3e+_J|Dk8hxz7GHwfpVebQx4hco4CqsD5?@P7R@^Bgu7I*V0td{4%53miipp4)&(W2E_l2@mn)Rm#%H2(>9eUCOeILH1qSQ z@`7(t7~F>8#EqD7L&p-1-vaEO6`9H@TKh;U>_ zCF(c~jtbJ8jlVHKkt{x#$>XiwPR7cFCX4M0`8`6+(o++?)!9X_(ED1Uw~TOf)sz;X z!|XfXbn%a+loPK)==gSQe>g*;va=Vca)I30xV{I^YaFA3@#r0kHE@J(6?b*gIG~C_ zIMeCyAgTySEzv*+k5WQ?^5JpfQ_(-vZYF9k-EPiqh*8-_(K1Gnay{6oHizT_X!Vz) zB~ok>Ey`VKgTXnaWXo^-fJpcmS|`%UlnPU= zrb{SuGmP);61N6aL-ABgf>ESKOikR{l^HdoEPhi(|HSrsVU*iI*-vy3YELbmI}60A z%W-?0f{xZM=w^qdjK*Pa3s1f%JHVk81~QRy*`U>mRnE&WeJLdOS0^`$krF&>6*9IN zlPX=_Tirxs$w=b=$&AoCJi7LD5``93Q7BPKnr~_;guv8D;Haf#T@1)ci^bpfS}hZH zHR>V7qzn4A6Dmk6Bn!FHX(Cg3p8P94<+v092o>Q?$)0M%1{}*2$5yIps3y|RJ&UK7 zBTAC0z+@TCT0_H(qPZ3nAe_e(ceFUcaalD`LbElDHB>67Zbj{^T!)h{?jx8dNg6VG zhtJ_(89vkuC=MtVJ;eh~mibP=;YyYJ zRXr#$>3W+Q6}o9Zc|Ushuo;hbeE?uk>AH$L1&@#-2Z@9_Eec^vhMC@&oAXh5G*)HB z(EsxN>V@&z#d{_%TWnf@pnb;VJuo@%@Zk9}lAn6jO>r`LH^*7e(DYG}C^U$O1Upo6 z>35RW4>rtt5!Uo#0R-rHOsRzX7iFPnI!YVO8#$m-uAJf+gg<<-^yOgn0>eUA_XJ8# zX8KW7KiAKqBz}#vD)(g@`IoD+t@;W)T(777LEmJ=>ZWQF=UENk-;pTv1Boft3JoXp z$t8H&XzVhlbVSn(D zR*lorUX>Rn%!Qc40wp4yMgzQ67Y_@r(%}8U_6EYbmsb z^mB^6%j8Sdj!mZRG2(Zbytkc|CIDI?bW73!8JZC}L2pQ}0G;QwZn(CQTm>}2Gs89s z*+5sdP&HZ`jSbClb{80TC9s5g9%&<5Kqi9*Mn^=#6&S?2e)HKs)8E@Z=6T04y%=Jj z%$7R6$#kl>A@GUVcsS1fb&-<#oj*0*?qq^D}8Lwc@DK#yOJ;WgvrBoz|9DYACt8u*QE- zs@6>2Iv`EOtWC8K0=iWv85MXh1lee3cVSMa9(t>9ctC}@eGE#C2jQi(g#c-Njib^$ zGLPz5eKPDW&8SortEZ33VChWJD)c~&dv;qm=E7ScEkxhJ&T1`_wYeIbc|Og~>sFjU zAw+l@z?eA8PP<^Jt&P;=yb!V;-TleC5Z5CKG%C1&qN|`lC7&zSRPq^CUhs{Bt$jSr z9%-vb&piJ?jSCEHZzvil=~Dsy`C?4T^O9w;SsSqyyhY*NL4TH*z;Pl|DNjo&y;|>R zy`3)dBq&=51#zwV z06olbxmurTNu2@^9pDg^a35rp9zi^Ld-78esxiCad$l!&;a3U`B2Q~-f!E|2+;F6v zdVg;Z2BB{b&nRK4M@{~F>c@Cc0JWqpj+4Kvu0jA>If@16kp>>HKLXP2F`%Oo@D%O% zp_4#*mHgBT&bvuBHB82xlqoI1e^2R1T{gSo9hYzTfu-lN7jzchCKEnUqUcN1u<>k0 zr|l?OO%M9o#DS8Ya;7IGwxp+;qmL*5qn-=6gG%Qkd1n3lCWm<{UmSLX_w*jr1KHQw zm?65uH3}CtTjtMo#1V&qIlQ^B6s}42A4>gob~ujK%D^4aLCnS1{@l}5b&)Cpn2Nt% zh&tYu3^Rl9Bk)?CO@8qPSgd~ZNfXPJE+DGr<`>lBNAp#@FB(`k#yh1&?Hy0v*1uFt z$WSP0&BZf^%IIx0O)Pn^*j)9x$lM%G&jOW*lac75DaT|SThSBPXeXrk1O)3rhVp}d z<3=O()fL67yMnr4TEK(|bB$DPSgNw<~BGqX8gl*N8@SL{>;aZ@i zGgg|uLmEt5tWfi`=F)5c1`P{mZB3Kwqsitl)+*HbKpB_%nTGeZbz!E>hC>#~m2u>S zb@9Z@*Q`keJgL|(z1?C{p!MoUhv5;%BJTlJnDsA#t8|FI^x{jo-4Jm82;an+fjX;I zPvLsN7H4c)H3WTe8+Zi$GnwD1>ebeyM9Wq2E0z1j@E{~=qqvu1E2GE7x$kwR(rOr{ zBFFZCaI|LI3AflYPigT(cqw}M2M*}(l_~^m(uA;`o@LG&8=ZIyYUTgSUF@%mE9w+C z*XX6@#I;JKs~ozCYVQHz&5^J68(#Xgf$NDDh^AiBo>p{mTva;T<&Vrba+imKNh!FZ z#&ZN_&1{i4GusG`9`~~L<3_|%{D?JS9W?;eRI8P6?}JtJs$ov1L4m}n2q;EQTBonU zTN)t`T7PkT#}@V!acHxo)s*E4s&@+z*P9ew0I-I*XZCoQ$%Kd#LAi7iv4_G_t&y&l};vI_D9@$%Sg*pJZWkR{9P2U`w!&2oE{B z8bXB3>)~=nuO z0ge6{siGdE+*Q<9^bufuJ~G35V!g4VPbC}L6CsDRR-M4jEF53D14uTJm7@XPkM4=8 zS)0_RkWX+jVu6y(6$^Ab++sWul{gE$qBnuDD$E{$L#1+P{+wW?*q>6kOY#C>Zs7nt z;FUp~+HfWw8V{I?Nkbqkgix5om>o4j9c__!R zSNP_oXT6wngG_fMH3YEwBQk|z625D{6aavV9Zng7dRCgXD7RMfX?31nJl00l=`W*D9JL05 zRR5BglUW!T1+Y4PGH+_tw~T90E>jOa8XS!S$^=R<0#0ygxT`yZ(+HfPSE!<-h#V+n zLj9AsQg{K?zEWOri$tIj5{&~VHX9su?S=+VkrgldG2T; zIs82pH&1DEKpCREf}|B!vV>YzLGT70Mx!4O6U&MyR^daQVHP3*bhAfZzle{FN(l%FfZW(Wu-9Qd5GBY?%r)o+!rBGJ47fL}5uA=oMluo*8Nt(t{ z?8oAR%e?QqHMbkRA~fa=>Q#6LuGU7#M4v=8-;Z4DDa|Z&(jCVo4eW(t8&L0>7o3Cz3$jFb`B3JFk|-6_dxOesLQQBrfOMLMfANw} zh1;B!HG8Q={%t0ocWVEIQTHJb0o@3;&d^)*tkN9`P6fTAQLzFw(z>t%D$GLS@ zzygO6`tb0ek_Mz;mW`xP+*m#dQ{%k7XC|M)cuJ!VpF(U&tgK{Hgq55UV1_S#7h?rq zlp4_?>T_C)3kRow%pX6@Ac&? z)!pp**Dj-B_c;R4vS^v)u*ttpep8*UsM&N1(%ncbXMT8wg1clFV3@5c=?xSX4T}80 zwBY9NYL0ufjvEq7a&PGjqggZuGyjE~RF*G9N``h+#zl^#3p{gRldqYaVv zL#-v{^M;{MNeoSGL%&BAL;YAJq7NIUrl16YyWAgPYc%O6!B53(__b0xH|j@ z*(sjr=6G8V8y6%9kTpRC{W2vLkt>j+_;yY9#QiL;D$yb^5_!oQ&~ne1SPR8eVov~Y z%rAG_Y+nll<2i;nnDUgvcZI5|2&!&e#`^TT7YM#5yDEd{vCc?;0ci^=J#ppR3V!&; zSrjYHv_PJ{-!A@%+kp^yR7e31c=M)O5QH~=N$7xSF^`olEx~ooihI51vwlzUnqQaV zHE5A|^EC8@>6~bbA;bEYm*4Xpl9vS5Qnso*SmMS6W5KC!@<-8wgFw*F&)9Vw8x_Sf zbg=JRy$;AWCt3LJ+Nx`65)0Q-db?DlWz>wNR&Nd;u!sN}=7!so$0A(mHWB&LoKK5$ zehr9RtH#!+;>sTG3<5f%jb-kvum zq?#>xbt0aho7|!)R7O&f!!JkeyJ@MlLH|7D?C>m=Yau=hi4k>gD|!Ixgp z@MhH*y7b+A^m5=JN>>h%1wRZyB#%wa5?CRV3u`;%lyVA4rtx$zFcIflA!Fco5epPq zPC->Y<)!f>wz}cV!c`io#b6(Qci@CKe;J?!#t>Uu49vp!D)X4OVsy5BVG z<3ftdmzx_A0HWff-ic8C{~3Fi=18yWT=N%r($Nt;>H%r$uu3M2b6t2kaR&=GBT-eN zSQME+CV&JInNVJU0(jUVk)qqOZb!C)vMjr$lIfOsPADvi6eaN@M>tv24q(uA&tIfw!>zi;og*Iw6mtz{f8(U(wr3b~s5k(DQDNKl3;9AZ4<%V0mes_vG? zz?mFKS;TsTX5dI5V6~OPK6*;dMmhW;!a5`8UkXqRy$#aiIf;s-aP(A6lgdiH|MSjA z36`iP@ufLGp>NR5X>Dzk8E$Z(P8N8OaZ#mQo;y(MYbX8l;&-FUZ~ zFlezNie7)22=y}M;viOWef`z}R~E@JaR zUgbv24|jTLmD6cg#H0LJV~za~I=O1{o!;aAf8&c@F!F1!CVB;?aqLjsvbMUhsH3B@ zjAA=B;1}%=YICm%d8_g+7lk#v!FdQ*fZ~C78vrw&TfyVu;xo!sT1Xmph zVsA>s?FE)==E2ecaT-kgwd?QuGn|1+a^f7QyXA7~v9d?6KJW_`7MuQXbFe=qmKASC z6FUD?DkqBoWLbsr)mcFA_2sUC9HxODPU7B>FHk^8y9ePv3b}^al%a;*iHZ%qboJFN zY4LT;6yGu0DJBexpejxx@Ag#^_CD+|KtO2|e)^r>^kJG9tjj-poG39fwt21FUkH;G0~6pC@)JIG=n#nlhDlA$T7O= z`7e4Q0CSW3nym*~iKHUzAJ>fAjV3cWcs7tP>zVE8yZZz&6^l;FP@a(6Qv^`kp)p75nDe<3Ewh|9TWS{ypZNh3o16-Z6};|X~5KVlL%aSh1{J-ISTAW_u}iXi@{Wp zr2iuIvsO-OSHJ3>6!^8q_g(!#Pm5r~zm*0_M`aouxSdl>61ric3U)FpI{7lf22Ur# zoHXPJW`Whc2wGefI~#DCH+c0AQWw5n$I}lkryo?;ism_~;ia{^)AzzUf2;$kq$e3T z?_|`O{@PmvBc};?`uoL6fNTVm^fJymC;)HgUfBir2uSIX7e!hIE$ydDBkDN^-RzPR zofFb{6b>NrNKAfzOXt85Re262^71Oc=J&VMDOQhMT3t4;jq=%$e!rLL9_;Cm?or1h zlcKU2@fv!fR1<1h(biH3OFs%f^R>@dFP_q!0er>uJ^dS({O!D&$q(ceh_ZUWg53ED zXJKfq6UeJ%iI6k2f@AOPjh$Fc3BeY{aYX!MOa zh?5&s*qzau=0_Qec-bX2wO3j|^Z{_CCKm7CL-T1%L=NBcJq4N<3R`l(?=H5>OfH~E zY8@S1A02rHyQNNmI9c#pKTwl&+Z}T?>z=u6Lb;_Cr*{2c!|IF_M!8D~~ zdAdPrv&!KX4_Z-svWohY3@NUr@83e1;yxbudSbK?#Vg3B`OP~UJH9h>#GW#otUx)R z3zJ`e4s0<_a5AZorR~JXi1L}|)O3I_3gwZ@AwB$q-JP26V`>sXwcFGzS5Ly_!Sg}we;zkqsxurLwcQ70r` z(W7@u!7mN!-Yp~G;Vl(0&G5GL;E-`B)P%k;xr{6^6VeEa`UG~0^1vhZY8JCQgsKlP z&N?ac#ws*peR>kJ@brC*31Ys^@DXjR^#Kv!Us`LJV#~}97P3fZ&2=4KG%3`{x=L34 z+WwSy6cp0ff`L`+4Wju=AMzGSc=P6K$vx!d{ooe;WvbzE_2cWeMUO{6%C;4%rYVUQ zuM)9c$JM=1`?eu9G#W>og+~*v#O?(7p4U-5Kgm!PlmsIUt6V~9a+wjM;;I|Z=>Zq~ zpAPTQ!||s-)-Tw4My82?R4&>bWs+G?Okz*&ISdNd!lATI0K(|+?9EbB2=B_U+j*{9 zC9#$?ymtOiUD-KvHE;;5yteP@g@OV6Z&s;w~ewK&v*Z=?j^ZC-j zuEI9KEe|6V^bE7vOzdL&FyyUt4?OB*UAtJ_8*VTeu8MAzxg>`tx^5KhfPH1}mf0$- zv{a7yn{+G0==49RX@{}JBllnSON2hrZqcV@oWOuOI)9uQA&foK$M+tH7nHLtQPJz~ zdM%2XMf?mAFRU7YcdZ8%0z>*Se>qW)ApfI8qP*slP9}4O_wX+_s@=U#*?#4^+tkhY z6TRtA{nJWq?0}rnz;9x+#0ll1N75y@ub#wbgT`yYs%pS$V4DxAsKR#CB?SE9wk{lWz$1Mp_`_$@BqUY$G|u2#2(C1I)&tr^XW=7dRvZi#l52zK>G1pXXcbTVDlB$P230S+Sr zDI#)Yf8(75XjHk;bSEG;mhJU-eJw#0rMGrgNoa5?{$7uI zmBFbucJ(l>CCwJ~AZ{&LM(aZlVf6>8d>GTq3exy7+cZ+@rB2|+7g+S=#V8}|#;_NQ z^WQ(ZDY6K~pvOml1f@jt9$sR`<@yF{<|G*yocjiVoh48jO42iEpDTGZg z35;+bNl?{ny^tnF9?j1$n$Z5|kgd78&<-;fIS=Z5|SM zh(f=ce!=jKPx>~zmqLt0w<5%$!w7R1sV5s;pVc@SB|`OXvP#1&Lexxgg+zSD#5>h; z*8d_;4M(Jeq6t-Nk_B{f_iNG~A1*E85pmQix=y3m^X8M!=y?kZX^EAv4hidlo7IsDq`5n`<|L@hvcs&3btmdJsyF;nLRFK`mg)Xr*_c?0BT z4TudHKamWmFptpwpxtYFgYMbLbn_5;LE+K&iyEH3bCP&C@D0HfXpO6nwcNXnwL6QU zDmZQZN$$@e?1rc_4tYO)NVLa~CLqLMLzQ>xTXd)`ZEMi!kt+T4tw&jVe4D0PK5j5G zdx_Y?hQf6=_cFpqAIb6a{_X8j_(jr^{;egQp;n%GKB(NZP6{QWOc8%woE-=+ZnGMXFzOWp(zXF7y;`f?p`(2$s&aiVzC~Q!2_9 zoZ$`Q9iGRND6p%0Eq*vD2N52Yn9mu2r>y|Nda<-9=v|xx>@YXqEy2#b!l&|OObT4( z>{KUbPf6{xPabbCRVSXvT98oE(W%wH7DB$U&NaFEXACNZC8M7w7ET?8QL?(`0rV^+ z6GB4|0%(w4MA#;2Wt5DszjCGeN&;b}@ikx0NW~^JjIh!WES`^L|0IPW&4dD1vKLWc z1ZD8(dgN;KVR?BqwmvE$L*CQ3P3agBbGi)jM{V@NG=K7TWnaLp;{42 z(L9@(iKtIeLe(EQWe}ndU(FCoPiFeLMvM%fzMInldAoXA`m>Y|u{!PI=IhUx0Q*D| zvd#nh5;rPN@c;Sw|ND)U9JNQbztTi{X4T?d^4Q6F)(680s>_TT3$K|3`j}V^~eS?tbVk5S9fcLhXyrV{#l3PxXBE*Xdvm&JDxRe8NRmlp5C-X4E6s z7B?a*#szp3ayxLGdKd$3O@U=Hcxi~=p4N6-WDY8fXKfOr`CvnwM)k`O*LE+NN|8AX zT;gbVf_SRXy{B(Ir#E42dU96>}>vt$Osm z)dinyM0elUsh<)NPRUB)Qv__98~Su(eamlRz)4D*Wn!nUg_JQYDBN#M7)hlllLlKS zV#?apVn}xsU!>O~nm$Z87CB?Mgay9w1&Dh6_IGL>!RTWf84t9p!AsZGnOF@~@E6GyM*_ z>B_^n*crMSUil>fJO3!jk>Y_i`u(g_E;UG6q!5)_*fxP4?p3wt)%`&!7U+Hbrk_TQ zPo}Goq8y1HKxduEdhgbvDR4At-$pibHmLNZ09We;4@08JY&u6)co%w=@}s=AOe4~s z$!E<5Q7?7EEIcf=s8nF(-j7iRTCE#%o8)v{Eid9UwuXVH`ogdkCeWS-7D2#UzBc29NIsoQaS~DNG8_gXcJJV5i6!d zB19*p>lP4?H2-KGbQ+9II==H@Go6q2pfjp-OcYhCjiwZQbr^|lOn;_z&NrK8#*BOJ zP!DM`wE7{@NHcm}yqrcu{=z($N2F$tAL&uMgRq}4o054DGl|!xT9UMlNgtW@f>D^4 zVh*p2C5@tIb2#);ySKieb>QPta;z70r~h65X&t+XB{x*>N3W&zyVKqET_ADA++g&q zw*sQeX8<>bMA09l@pe|~^&#rWl#c|+F#L+#e2QhTiZaW*cj^Lz^tzAWu>7GNq&rS^ z6u>Bn58;K>2x{q`GliRgZofO%a9(c z8z8{~7(m^1#1p;6!lQZ7rXNNG*Q?OX#!{{&6u_|-JTE+bL`%aOk48Ru)d5+ScoPiK z+NTR>saCr6)KiulBqhfQUwdJKIg*v0?5s8Yb~0U9Av0kxO11itJl{ zGppw~{SP!St2Se3obQaPVbiHVXe1-=xj*?of1eh9G?4M7q|XtBfoMR#v^6@8ORwq# z|LUl7o&k;#;pduB!LM{5KR9XqMIYW;>zoCa6c320lExwrQ$vg{fw+E4JIFDj)#I1A z8s$!D6UniQWCVXwXfg#+ztPMEFK}(v8IZ9?DZbV9JE>Gi7Ix#O-zgz3a)U*9VYXoz zc=ZjBeG>n$t8v;NRHYznO>;~m!agBpRI6Eg~vsqPZhs3 zJ-&-HFI9a+(}`N-lTn<0 z6hi@YMi=yzq}y`_RTZ0%0Ziasf5my!gSB`-6lPMU714IW2aKPWQhUy2!A00SHdQn$ z_(`7-Incuv7BfP$y0Igcd7bWlCg{1`EPd#t6&`p)Ob~aI%j6}@wJtEVVp*jilY!Y4 z7_mDUWe>5@7?_(^s_s4h5ml2G4K|);MHa&mMma4R1x%`te#GJhTdi>!b{@`^I8tHX%F_HW>^=T}x)!%yZ89nLq{GY#}8r9O$u|LS&qB`j6 zH{%u51jTG~k0T_hc&(2LnE50)E3{@NCc@_Zp!mdFn8YQxOzv@<5oxeavQH9IhR;^+JI*O4 zqlwK3bsYWVYxIA@hME^4T?mqHSVa(aQ!XH;8SQs73LygpV7$qo+6(ox(B_*kK!Nfg z1jrqa!#ksCOTZAG%V-YzWf-_@f+&N8XObLbq+j7W6-UQcRljJO=nTT3IM|Cw6|F=9 z=-1;u>d3lO{pq7h#?tC1Sqq9_p(-Hg=O`E-No^@V*EXDYKYc3(v<^z5vxFP3zpLkd z`lCl5n??GabIbvrsJ$xiQ7&C$Zr-F_N0gHl;`g$dUV2R;wIP*Z?z~s8_icn}@{tBt zQ0C}=@@b+RRpTh2(u&F_SxhB#hQNREfLp`m4cT!v(?^a z5?B~vq2BIgsfWGV-W!74UIlcMuL_JVCeQse|J=+F5}rG(++bTk81Jb z3Fa_S&&27Kz5@jRx##}fXmD=YE1?*jxXjshdWtU2 zxGCCd=n*QM%N23At4b`T`xIupArckq4j@2yn z&CIh%y||2->WdfAx8F&-nqJM;aWG1xr|G?MI7|bJ+fO21fBfug{_tf@7 zwW&vBq)_@WnD(WCraGScm549}T+A-F$)xDMeetOqCQDR%Aj50itvwq7b%ycco1Epb zm^5A-=R|`czvV8ueX?gd9DISiE>wt_d=v}$JjWV#<_o(-pi1eC5Yo(Dj>bS(`rM?r zH7a!0?n9s^O+alOgphyr<}*5?_L1MQRoa1bu|AV%P!jV@Q!(3@Mg+CtJa*j7=f?It z4%Q-An$8hADM`LZaNiVEowS#tv0uf{W$c(2aefi9N+WoSActVsF{4ax7N{^?I-=GoYxNE^)!J*N?kNcZ;(RG7h5UBivknZUr(VVU($=6 zyC}8XB>og)5T*?GFKP=)rIT8$=#4P}A)y2yAWPg_Wei^FM% zIHY}#5-gqF^I%!?62?qN$|kp`F@>|KE2!n7|ACK&@Y)C8>=n_|qMy9cpzC|pw3XlfZgN($XRD*Y=0#YPrZF|@7nKZgw@?fc7+TTtR@GJ%vuTdN_e>{PDy%FY z3i=*NDMsAp;_AWxQe-e&JwiMVvOW3$Hz7HOY*)cVK+(faPq}4n*Oru(5DAA8hgCmY z_{VQtijIi>2dLkfFHGDtjQ}B#M~B);FEi{*C~jbBnff}HoXNw98H&0bt;-l!^~z2C zqp|&>sen|`sgR%YA+IB|tbpdq)Oup*sW8Z+W##W!$m!TTJR}ysVexBZc|&VFDEUVb_HcGI@w+qweuT9av$fn6P7 znH~(i33cUDI}da%eN>Gxt~Z9Uw@h~tq!@vkWN`u^X#;|Yr9{tH8|Os#qd2%)rR+X! z6}x1dZtPl3E#9Fl&$skcZ6`$>4kl6q16qECmv$#Au0`|fum8_{00~_it7WFpuUA3R zfp8FhD9ObNHhl~)b23O!r0W^hV@`^W`R23F#|cq~BdQXGln1Ai;g&$eI5riJ_$-RW zn2bQ<1{3h0b4UPY9HvtDB!YuCUwMB{8($}G=oVPJRk zS^QU6M1Q%%T&m<>ga|ED#Nj(TUYOjh^7_N+LoX_<&5ugI9{Cjc^_5zjF=3{el_W)@ zI7SgFl@3fR|D~6ov2J7J21kvnQB8{JKXS~;yn+dZ>o8wXf{HQ*$gJ2V+QwC$`I8qG3)_JrTODM0XG|)@~{V>1t;?|q{t$~7;S~~fT@cyYvBdeP6rKv+>^#6-^c=FwR z{IDhlfWM@EAQ_*gc~au0%|!n4ZP31@%ff~54Yur9;l6-mvhBpV)1OtCJ6(iI6L$ts z_(h8NFg!GwjlTVPRn7@a2jJ*`f6nK7g2M-lQ1{tfdR}-?vb%yxG1q9;Ch6>Dl9Hkc zo!>_wNl}rUceof0cC(2*A{4D9BMnIrA*{XfDsq+tY5a&SP>;0f&UGr0kmZYq7w{mT ztgVkCx^43Ir|(1nl(uAQb*js;6_q4_kkeBh%E-$trq&>J&z%@5XZoGiJRhm=$UZ;$a=^w3=p>8C)f*;~s@0={vN)9& z=lw88UcY(M92oO!y!z6dC`AYtFAoFmC4Z|=(8)TOsu!!J*YNmksxO8%wM8CdQ6Oo&D4DXp6Z;I?+aA(4>mE-aL3gpj~9EO~y^QFK4{ zbp)^|$5xzt^^;O56qOjC=r=;uo0MCvGrf!HIS#Ta*5gehBB+}8Kd2aa`|1OIcKR+6 zNs|b3A{%pUIQ)Y)q=!W{39ujepFSSWR}Da564$s1?E8tgv>H-mGJf3PE?94XRsKwv zZbti0A}rs5z_C#TpD@Ao%$6j zGiO82g90dR(C@!uVF$y9XBo;64~w}UVlBtP!oUClq3bI$EOgszoQxK~;HH6jG1`L| zKQ&i;n^HoQiP7C|)}ZYodvp#gws}V?jX1C=L!b{&X&g|3Gs5$sf@wIFG8zoWY}V0w z7YwZS3bxFse3p!*p>Gj73K;vO#`(@3J=7o6ZrYFAIakF%GNWu@B+ws2``@~iXZ3&S zaIfF+S2&DwA|>V?CJ&^5yZ#1I-A}GQ)OwakC=Em7_lnLTsPgDzJ$JuI6ZGY*`2sa6 zF`L296tqwuCDraF6!c9Qvb?Ge8KH}z-i;_-ZVZR5i#2RENw&@TT=x&e6XoKxCiGW` zaumLVVXgwQpe%WC{rCDG>KO2?;pIglxxd^>D_J(K>@M?5lZZMU-LgVUrk(#HV>%Fp zUWMqXQY~}(3(WE8Cw!Q4s?t+kC00?)b^YUl-Bl+Ve?wig3W#{1E;zZlf5hCPg3$jA z3jV1$rDziM9N_5~Jp2ppa5&6PPa~00Z8EJ$m+rZYRG?) zijh9Be!R1~BX35It&ZU46K3vy09vRjuGTc>2{vhMVgv6f>FKS)KctNOkE@4}E-p7Q zubE&%jn>H|$bw-ie`py5QGUWlL7X-mx41h<2n~COleM*-5~eb_<~se_JhatrQ$$fb zW)Ya@y=T*ZlLq~?WMHGB7oQTR3((cgl(HfLY)-!?h$f(Qfz;LU|d!?E?w$GXh_&i}XorOi$~3aFNnxfrxuy_R^La zOEpvFJd)6QAjLS*>VX_%CmiW3LXY`8RL3#XD}1Tj{-5{+_lZ1BNr*;BC9mc%09ybh zJBivL8Rln=_NKomZRa+CRvgrz5D)wJpXT|iAIB%jtwI**=Q|vM;1wpBnrB-sVcE#P z4^}SoYR)lx{QWkC8`+73!$nl!)9ItHJr!LdROwPDQVeU4ptY7#<9^o*n6KjNH5fL< zvC0S>Es{rZ{k2^D-|6)Ia@eZ^#-+(i0oj9I>Wt+<2KgE!So6`Hqo`T*Ge#XyDPj%{ zE-7hC+4d4-W-AP^14S6L#33O3Ff@aMskFR`8TYGPH#Ow+{h-DI(8MgMg*HhoQ>NY~ zUnv~-N8x#Vt?xhO&jnPw8=>(Hm5Q5Er!;!-cUN`UsVl2??_+9|(U&T?P-VLEBW{Ua)ZgiBdth(gC*nPixy-5K8M=k8nFQ$VmOJnjg((?ZLyNcL*nf=M&EbaFB)|nYpXo_K${Uh~a8UR=DGBIiq z13$`eLW3@00U%?84UIvObxmWNaFw(b)q-4osI$NMYGj~&nd zW6c-nl4-OvIKGH7H^L1W5{q(De_c=pNii`Bay&aBtZ!#p2TcT(A)<$!j-D-8{wrLzFI(nM5Z0mUYD+S=a_#iq))VT2GaVy*%;x}PXh7dInSjPsOuKR7C@1j)~!SrKTK)C`pq$MTD~lCMN6pT{$+ z&j+arEsZwTLC*IElLSS9eydF}yUFNlOisjo!hrF&<972;69UndGv02ODXX#KyAA&x zIUGS)`bK@C7jl=XU3b8Y&D!4J&m;b;rrV}F1_}0i=mwots)1+)G4nmJhXyCT03aR- zkEVNyDW24cTErChpM6z?g@aTM`sQL_ggG-WLM6FcE0-OV{IEvN zClS25`AklJBluI=88aeMJEw#9L##kc6s0Xo#gpdKH}}6kGPcj#5O>hNofAPUc%rm(e&_)l-{dPdmH$bUk#39OH1os zjz;a}YkH}n@o5CYJBw(uYTEo^*^~aUi4$aYZ&Q&cBdmrk5gy@riX3E{6DrW?6X*fg z6HqRdX}9ZmEVWJG*?u5d@IGi^+*`kFdxj-Ab^^WheWl&(lkPlJ|ik_H-CFfS{z0 zBhrS?#pH}StyPv(C*H<%D&_8d_;?e=i3#EcAVLeYtfzI4*$UJOI0aFPsCpUeyycd6 z1UhQkWIg&EcvD!s5mBH;i??)G!RlEa#)ee@5^TCQh+wih(`t3psSn#MI;9&PKP55= z&_Y>95@SiD4855!g8V9~5WNg_*w8LQ;wxB@AO}?>xprUCf&y|`TU^kG2{SAFc9?y2 z`5@Idm{f23`A^7364%98siYT92vT@?q>CMngbg zSPckW`B7Y?PM?|*eSvyDnO}9Knw4IAsmm=Ri%=`TiF=%?kkF6c8sf4@j(zq?5do>r zr>(fr&yrN7^~>}Z8N1s;n9$ZeeTY5$%8JnX>@73;DSc!rg6?e4iRi%Q# z4W>aD#k~bv{&6N4D07;eBunDIasqoPiMmncjV%qWC3s2+v1N|3jCXHL9DS}E1h z7O-qT`h_kr6I18v2l6TO4@3wYfim&Z2uTJd$WeDoiJp4*_>Z(?P8?S3Na*2@ba?;i zi@6;|Tf3ti8WZ&<1W598^WlUMBqX)&t0h#LY;grx{F;?Xtz}y1ln%2y+jkbd_BO$g zT3imiK^CsRxa?R#*ZeWoB5*@=w0#V|WJxch4K|%bBGWVjx^R;I9d4$6 zTYx16>>1^%aRm_DWcC6ALF2QK3eP^D-5$^372j|QA<}qNR5CmC(7`O@Z0ieJI=ReR zvx*{!lK{0-vrnpi_ND=l|42VdP0;Jc;%QsWWqJv4KOk2udm@h?pR}}%Qdgd0hpS*h zva*0R;4j8KdC^NXb)?Am)dwaHko2mo8nSTwdo+ES0ss7SK``|dq6Wo(-5Jg>CEO3c zqTR~42*kp2+Ak^*d$~0x^_lBd7%qxfq{rBgY01&iY?Goikq-Ggx2VT1MuO5`T@qX^ zU|`%tfN-`&bdBX<6nUil5H`aP%@OfO%y?M|aXd$l9+qMq-m%unZlKYhc-Dvm+^nSY zWjFN*es{H}RR6flxbDZCB@-SHZ;gE}VD4*?<5p=e7=|XBS{X#oh~*7`CzMREITp+O8Hg(CDXr~scDVt z#N{z(u1`|3Bfyo+%yAM}DHIP*-;+D-|y_to#ygpYs})fkD# zCyxv)BaQtHnjYxpFwU(^<$@EQy#ECgfbq;a7%p|{e(Et0ccu9c-l;F$K?YC5OBa&q zw>l7RN3DP!g@Z@E8e30~>+I7X`!Wz$uLA0qsDzAAVA6#O(RxB#giYVv*U0Evx$Ne&T_=T|p@hA)qsUy&UP?%$eY!6QpBT+BdPvGsk>U zNpz*pY)hE)jQ}372;6cAZA?xXqcfJ zqucU);Wufct3UY2IP3sWm{^KajVD0Q^}yF(zmYEbHaQ({lHMlVX{$hK1j=gl>6Ohg zz>)(%ROx-AYGz&i+%1gI-&jLVg5f5%bR7tq=QSVf<{-}<9-M6d(&zLzc-O0pGz2zX z$bg%Fk)5d3+$wzgIvhIP7rg0W9I|(YG}Vl5G<5bu)2NTlFq191#sgWd>im`ShjZhK*9!-56(Y*OB7+qUm6fse^rvGnDq2;Ijz+8OjCWjHH)h)wnOH<^4bmG zy_HTlEr5H1Ct$53iycC^Xma(U(lfSZ^XEWWP!g?QPTv_v4{RaQ4nKx9TGRa^F-0xqbVX&jU7s`dSsz-n9 zW{m6w{B7#A^jngO-}9G3t)(~wGog52%4`M`)$pqV{n?ce96>Duhv~tG(k<8TU7Vx_ z9u3l%C2f{JZXtXnWx0#v(53+s#cNIp!0;&WrBwd$0xQgT6u3&DG_CcsSVhMs80D$8d6%9b_^Dk{g6xaN9U=% z@WCt*fZ0oneSdNwYwL3XL}L=*??l6;4jzLgL>SDqsF^->df)^DewL=YON9NB<`!A_ zfwQSNUdY@hZ@wC|6}YSx;&6d2q#0i%(J+0lOprbAMD@$cw)r~jyCAacipWK9zS&70 z;>>kpj0^&vh^)hJ_}p+~9eUlp)cnyW|kk5k(3T%2|mRp0G5L+Wy)il#7TtoG!BXv?9!X%IG_`jNv_hppn_l2L+TIuX_b^t zH(Y(L8nb{>qU(D2LZDo}dHsFA@?K|noPkShQtiXi9vMaek!o&TIT~}MG9mHp6}VzG z0}I^y^jx6kKEo$VO2kLETOx!mAJgmk6f$X?OoDGEMqr?f?#L5QJmDom+*XFGsBYn( zr7T*`xsq944M9W|*G49Hr!lt{_-9y#@a`a7H7uDySk5qSY7a8xYALV90(tTRI;YgZ z8+_?mZ?M$#hCu1I0z3jCb|#rHDZxHf2x?)I^EgqFR-;PwD+kH5MKp03&MtK-3J_H^QFc;fM-=)e z_WeSY;aCo;CrG@G?y}sG+|NCrq~6cytFop{haL34*A#QC^ae>!4@VaWoV0o{v^xL# zqr~G&>6Ydnm&C9#sj1Nrg>^t^dyu~+$xs6T$E~Y zA(dVmYUAGf)+%0rF7g+aFh|U5>s{=1U)aw?8+#rtVKfWSMfn)-zy|e-sNa=n%)=sa zRSm;-2iqhpOfpZHT{`w|Cu7%y5r>IB#FG_pH(L<^d%m<+g|K=IBCLKw+B%kLM%3u_ z5o;z`G>X3HMAp5Z(fDF7=g{M|$_CT!hU!_R-4m8Wz$Am>5t#qkHQ>U$S6eL#d?1*< zN-NBoPKta8Ok}f8MNx3F*;hzY6R0U|glq19B9$SZX9>3+_)JvS%U66Ry9DOF2TCz& z!9y)Pq0l}JH_AFt{G|yS^JD#hqt@l6uG&$KD*#&e=nXw`O>tezE`3*E9j^@J4lk?i z=9(OQIT2;Rh=k!}i3V6T-87>J@?q}iO}G9xzxv+vk@go_-L@;56ff zSYmW|t(7X#*+qbOuZ#MFfKB---{v|TR39+%Vi*8Xcw*jt`59eKVhz8qjY3+F0+qKj zP_2U>)L`IR4Os|;x) zU5lh}a5{y-rPgoy8%!HBs+txEr4imh_cZ0eBEdo}tcOQZJcaENgOm)Ypa}Sbzo){F zc^Y^~)Rfn1ZfTJdbg~z@1>_=>ird7SORj%-{X<>k?aj>{y_H}Eqf$){{=p0j{{z<( zbJ1%bQFIeg9zFdKepiy+!Zd;@5gzt8V|+#m53Y!eYO(4QFNwU!{h}HdHlwQ3%qA?P z1Vu(^J;9uCmrrf3M6M$WR@7&|#zZkQN%Lr5AbQia9=EbB#lCgKxa1h3=%aq|0~xb?0he^?)v_GlRaxxC8y?XcV!JIYwca#?){X{|tE@KQx66 z66LnTuc#RrjKU(OIqLaNjAG)aS6|5zuXXTFjIJ&7Kifd(>s8+EL;)S2UTEmS>y);x z5=AksaI*XpsBo`JfOdV9Lk@JL)_(Ju8`?NHogzNbo~@AcYn$`8eVVBFgi-C5yg6@O z-tkk{XX9XTHkqInee!O;{<;nW{*3V3^Pl@%@Y-KrfB)(uA27bQ#_}SNQx6dEBZ=zg zVj^W^kknh^&F8ye^#N*q^Dc)p64y~kjRglmx_-EeIwg%zJk z(_jIIL8Aa0L18aT*B+}&Z>D928X@S>+&c!7`6XwANajJCNLz}XS_estQn0q+Pd_m3-|;0n z5$f2>U7my$M(_7(Ds8iOf0Dpht?0m@Ecilg>FJxViYu_R>(}T>6iRj%jA@#P43XuS zGaIp8x~h4FS`w(1s5FGnlIf4pv}U*`p72S^)-T**r4<2ACHnNq7%kzi>}6Z4vo6IJ zj?`jJkEg%XCse$fJ6{+?9D|D9h!1hDk;N#X*D4`};ukUS5;QsX_fjIgGy$>m2VozQ zCV#9R4N4t$qe}$8E#*|6ahsR1lpOd7=uz#TP9OcNevN@Mz z-bFGRm#{#+)L9wKNyN0a)X+W1qgbQP?6?)S73H#}xd|l2H_~4^r2u&t+>G4B<@SnJ zTG5*mZpJUxYnQe+*MlA^!op}Xqi_@IT=#plzsnqj%!$*&$^>g%@5s}DnPj`B`AZy0 z(|Zn^wyOTHHs9Avr|;JqJFNQHMO+cBefn`pClWfXRwlVRGub7v_~qXZfK4CRh+F%xyBnLeRDNm`gJ%`p~e@VYQ`p~h*0wx#W_)vA={xYbHYw=vhy z+lBk{BL+B?y#n94<-4-AS!i4QYvNd=SyJ~`k)vabSjnd8gJ*+UArL+{u)xP}G8L^_ z&!jgIp%9^nHWg?13L_&0gdp^9z6RUc0r=Ss+epx*>+k6;3$!=NPY~fGrBnqB$on(@ ziL5Fn?5H&VV+(jU-1bw2Z2R|1?P)t80ijku?AI!B6#_= z`}+K?ThH-7?FS|yyRGbg(Cp*Gh(!>8iOh|_dDg6oZ4QxEC?%1jbri?8r;2ikju)Hy zK8cr9d&DaN{N%svFG{~~(8@F=iB+kIL6fARj}dl2#RsCSgf0l^gEizO#sAuaLN3*D zmKlakleM6T=2qm@xu&TPTYK2fTu+k|*MRh=pScXiT0bJLS#^j{8lI#Xzg%ZP$Z1#k zp*~BI9%g1dZaMsP38v)t)-K{1z72K3wY-rmsXb=W-D$fI$2RS zJ@YGBWT`3S*I!SLfRMGpN8_j!v+hdIyL#@xdj0Tu50QsMCLl&k-12zW}&hnXtf94>|#H*%>69ew{HLH7>lvhP3W7d>RcWexYt`V0_)Sseh5YiTcnlHz6{fUB^aWmPDSOF2byny%`4 z45T7g>z{h1vZcKn#(ztQP%na7gd!RTg!gP^cA#de1{zeG(&?o24T_ z#b+LweS%TfpQrC&M8rWinD#5i(u|>q&t8hC5-6L!jOa!Af9+jRD*uy;r8$832q9d` zRm>@YG-Wxy8%O}!diZbHEkqekT8>312P6jwB>)c0jDydR4Kw$s);$k#_-?r-jJ4Jh zw-*=npT+HM9TBmeH--7q2S=TQY)iwt_PYuDGxAM&csO39IF3}I-%0c()d*+ZMy1XY zoOg!VjB1T((dkW&TBixaBxJ=YKHkb^t}{NTU!)eakF4|NlM^gq3*Zhyc@4}n%+!b7 zgIW<3N-}x?nk5$&=k6@~dxw+SKbLlQwA3@lFu_I=Ykp44ILFb;^^zVBh#a6nFJ)e4 z9eObDF6ikPSwTz18fZi!gMF(BLGW5i@@ox&DJ#e#AlQL120eI~H^sv#OoS+PVt{sL z+6q8WLoEZvXvQFA7G26ZD4<%%pagm?=%IT2Ad7-&{}IQkSuA;9nx)l0tiMASs8Ey# zrH)-#)A*F~%*B8`6fcNu+%5iS>t#El9~cO_N4|W@XqR7vjfB6ErAem}gyMM`IioS$ z;0?2b1e+enTWcGdO}syR+5hoC0{5dusL?^mFe>85v<@90!RT~(5XDN}_?XvLH*^T( z66ICBtH%LjhE?7FEgOM){`)7tcJYyGLax>50f-vK)apjyUa#+ zY0v6qG7Q>xyg>vZ!;v@ce?g}qP%$>UKML+I&1u~t_@_D=87d$k?^8S{!tWnx;*kbb zvo@068R)L(yQB3^t^;OBf&7Bnfcf|5-Oh1X;%q?m!=QBi7m71V5Ilis`aSv)Wmmgd zG&{XClj#)dl`SycHKh1nalsmTwNF`*H^SsPHi<_VT-gRanM5%Sd23Fd2zaCf9+dI) z>Vxy47l!Jsl{17>gXZ-3)bo zDfx17LfSxdGz89{>|!MLQ!Ly=1&wtRMP!`T3_7i%@>=~sSE!mc625nvzVu4e67g(Z>_7PN8rM=frK=h+EmRGw4<$R*pzw^l%mJ`A9TthCGcC@?j_k>m++)NJ2n#=b z0dnm{{NSLoZ+l#A3DuONwgL=%XZR&Ph=!)O>v+_ds-KQ{=ulTnpsodb_s;-+;zcpQ7*jEB42WUTR^?mBf)7fO=NDf~cs0Uo zFU^=OU>p_xfdeGj%XhF~^#to23`srr`fsj235%=No)ZK~3^)eq$v-L83a@W@M9NJVnc8O1_B`;*b|bsTH)a zbr@B@$F&>JynDX*-UO2&^fWZXY)_P$bah;R3YA0Bh+jbJ;KFE^f^`cMfZYO=liV#<2R`A?0;=ip|2n<1@cJ)Up;u31?F=x2mQP;9*d9hi zyxDJ^VjJd0PNx6j?_G3S5yul}>(I0ppxRMtt^3gH*)*d{K7bi4K-B2=h0nJs_jI&H z#{0?DT%vyby~WbtuLr6?pPoJ>>#84wVotQn2!x?tj762QN%nB%1)M1}sRI4k4C?Cd zLssG#nm)p(a}T|R~&N(k%X4sY|vT5#dtag(+DIA@CFRwU?f;L|o|5 z{yK_ewL~?_(2X4wYiYHe9vkK!7v0NziK(nFr~RoAoGb`Jl&076rq?+Am)g~LB0tWz zl-vpSzjvjO(rnWGI`Y9U0kTs?InkUMb~C{JdIJKUX%^X`il<8W``fgT?{&Pzpo5M- z44e5DS(vy*itdZX+Bi)}A{sq?I2y!ogqtPAru)>2fY7U-_oJ7{RECZ4xj)x8Pq?@u z+1_c|GvR%gK;6o$cPZFp4B%8}+k+alYSV{I4emLm)TNFFwR4!13{us;Sxvr5?xby^;SJ)Qt^!T*gBtW`MUgN@ zAH|&n6@8Khc4%56_uenKfJCzcpGl@=vEXV%TPT{rYRz7$Zu2H3AMtDqlxl#0%Bul2 zni^&MA;}K~T&{m~{ia1fb|_+^VqPdl;YH^1ZYH4W|T0_G0jk%K9+!z9&%70U zbJ&S#!QdNXuti-IQD)%g+KIBZ#QFB~1@dt+;=kL8J{(Vd-kdoX-W)g-tU)uu0#SpJ zgtA^o6>KJ~W&aZ@hu@Q1ur|@bHLSBjO&@y5H2Cz}Aeg-PlZ!H95;a7;POoNyNGnn6 zo&Hy|`N?T5?_|*ok@E3LY>a!!-vrOqwC&$~Y8s$a3qNRqSVvw_Ty}66Uc)S1q<6Oz z15Ig2z71*UHFFfDeJrhgN~5zS{!gd&^yi`#kh&va6QyuPNp(h~yE_T^Q6B()Wdf~t zdb{#wG5|y*0GRUQ@sYKQ>JqPF5=5+txV{$$T2y_seMczH)JpChCXQi|{7Bn|NLwvt zOFMVdeEPwDj(J9q^xGlmunG{TA_7k?MBq+GlCieZ-=yDbH;jvWt9rF8pK7ekmp%Zi zJFA;n$G$)Ppq(Zi9vk$@gv>VC$Go;kB=6(0b{|@&Pf8$%h$nqgQtfJKrClN_^zq|p z1Y>d#Nnr?M2I=5;pIUQ#QQ#rPHM{zaz8MLsQkBUxHLv2!vp|NL5$Z(?o9p_|WE2H0 z{nlaLTY98LLkrn9Uis3X%9+r38uDx=B)>%x&99joT!cyo`Wgrs?DYx_S)oI(zq5y| zFR^{JaFhk=VALWbVj60lzzg&{lxX!T55wYnHvZ3qq02KJj8joXOx*&<(t#>uv|bH2 zlK-pCk>?TEpwENzZr%*I6!O@DcMH{!@s$<&+>25-UU|iICr(j)DR9SZCU%W@Ob(^6 zvGkYQiY}{)0_4>}iA8IYtB73*t9;F%>NuTStuHV*wsoF^EQ5QcOyh|bZXsBWF zPyInw5|28-$4vpDKo#>5#e&+HgB2dIPV3s(CS$-KYxBYF9?+vNzJm43MIj5Ux6r+N z)==e?8Mrd3Yg;h7No0VKriW*U);KY!+S&~}fS;B_{^?nuzPo!NGnpQM0aY?}kG?@H zo*E}}%1TGBsky7&x=hH?$*k8C#n`FF+GJV_tSlN*uphLT$T=NwL@qAubeS~Yqj zat8h4l}SDulbN7Q6>GG2j#PaqQDGK9MR5_RsTuzv4S=PXyuNjYsG*Jh?0zYZf$Ee- z)XqfEnhlk*(SF$ejG8MI_vpN=9fYSziIw~jBka6zev#dO7Z#Tqch+`new-2=fn`n) zGbFV}kOMEp`hZBqL6nz@=!>M9o~I@V0cm_KY;_}ge08$_q(5?7Z6TqX z7TpGqfH!Svfb&`mM$Dz;+mWIlp}$x zU`U#*Gx>8Ob)U@2QZP*-K8KG(UmbsUi*#HCnls9Y>nJfsz$=iVVM^1oTMzH_l{-z> zonIDLosUpCZzfn=^72sl87pAw*jZwC5LB$G*vi6oB;IGWV)oXFFCKQHsQkE_sLe;; zPgpkrO^!#1HzM99A8xEpFVqk83fuK__wW0!bUdK9M;fQ2*}#B+_LrM}^Yt5EH|<&i zL?&3kV+y60s0Jcfsn*HMde=Apv#EJXhq3C~-k~A8u218(pG{6Ff%2YeE~PJM zF>WfKohbC3&JNZ--6`?Dea0K5az;~luHW**air3rqYj=Rxvs{`FNH10d*~VJLNL)T z(_u{#Z~Ob+#mT9eKB@tKrEX$k1$l?&XF&a<4hZJV@}s}^0@R1PBxF!c47oMK6qt>I zXXC9h9TIBXV0W2ZQ;PVwx>XuFT8pj-@RgyAkMUZjq1N%BYUJ~vG&LdtM&LFiY?1>IAmxQWHTe&?*q@}JA+WNP8)hiX?4uepBgiGdAdNymiVpqgN z{7|MU0W#HxHn$f_gC&lA9>+&8I13=*nNoz{T+{|C^(arai@1 z!!(O>elO(dXm~5IU*$o-o5dIc@F3QqwQ*>Awf1Lhb#2EqQcoz64_k}-nl|t8dFwX?=0xI-70Ea)RgWn)~! zJYY`WE_?xP5zw=ziO`ELWw67y`uhO?Ju{n^txvx*1AD|M&ybwaW3JqCVIbq)G)SOR zgpw#lvI`{+K~~>K8IjHmIUy~pHyQYmnuZ+{#W{{^-Y5;-dC@^9+DBoRC~Kh&;UbTx zKh;liuItlU5V&_znr3oiYpgwi$Kfd+@-<}=FCcnEjslEDv zxWH_2mSUeHuFYc^Rpwt*{ZK|v@FW#chZUnr@L=E4tvWcRETc9B z(1{65&{<&(fawwi*bmwgk1Da?U&E}ndg}m&wB@6$f)(_%doq zD~84iN4|}QZY8}MaAA^kP4H8H8@5ucmDsRZX<;=MYv56G#Gsq*`NGm=syY(JJuhzJn} zk-tK(3YefnwX|KE5S}o@%Y%kV<>7#pQ~e6JT*YvqSWZ%7-7S&q=>2W7tC`8(cQn3##K^SE07cpfZ!~ z>Yr<;vHLe(yYKUPHToyWpN~gSapR9KV`Anve^aL#<366Gh{MVocC)~gNd*c6YD2?r zsXs#VB$kBR1C*}Xw|6Iz=fh?Dq-rezBY1$N$YMVcQ>xD+S1M<<>d!_386+Sxw*%8@ ze4N*;R_*!`V@C{Wsl3sV8zyo~Gj-@*EGA!FUfGH3xi^ReA6>HdG!EEnS?d&e1g_Og zae#Remyo)bnp$X(S0MN5Hx1#*K?H=BS-v}wq3TeTx*s(@yVuDU8g0heLBd!O5#8L# z?O`kHy_kDj737E{KKV$KkJN*(A*F58^y3O+g{E=Q$SG3*VFd`FfYQD?Gv!{S5GNc@ zUhV~rj2C~z09tPdQxB9;O+@}Fuyc>l8KFqO0)XHJ5x=xCmHH0KBB-K<+0VS}w~pG8 z$Wjy25T?-wi;v;>@g<#rvNCl>44`XYDHY}qj|C*lZD|6jVmWh}8+`ZL>ofRLk3V3qK494l!qJ65NYt_uoAW%Kq7&DxG7-hUN1J?J{ z%2drAyxVO;HMQ-B3?T7~7(w^@xzA_?m;=N7I))Sagdn?!Ak(=t3W7FAwHwl%A+Q{M zktWJ#XBN_*3E zoGZQHq&n&m@6TNQ*Q@vQFVhHj|^_^96xG(B(nOI;oXJnRrQ((eC z7Yzf{xSq$?jF%!t)Qd4QV?5nUA#+`7r>KS~ii$?!ren7q0yrv6VHAI3Ta8!!bFMj~9)Qsy#e*vS7xHP>Qh#_xQw{JfJ$i zO}04wNl0$s$Qc;b5tWo9vXz8{7KHVt8?RlcLke9hJjz9l*O`M*Y91&IYnMsX7*t&Rc;c@!RIY}rZ`e+R`lO<*wr*d zPLSa>@rCp%naxu?14-+-)gAvWrat*gM4Rz%_4oNkLvPT$x)fr>LU?`v!jY4WhB!>1 zii#FRTvn6`8jVY1iELvh+iye*z?dBp}EwVB6GgjCB^#wt?dlFy3Q%# zb&yJ0g5rfW`t+BQTqYJ>6$*50`tT^AvO==eXb{K@z}cZ^Xigf}hr5|D=H{Js69gcJ zsQT!`c9d>R-wn4_MmeWSGR8M)o}23*&j#HGJ|l;&&k_eDlQ#we22+#s*{z03z{9S; zo%HD`vFGc~x_n%guDc5-yDH&CxERN~_&oUG`ZwFf-#< zXmf8LX%{|GX}0ChkxVJdgfK-Y5MIB$bw?+B*?she9(v=mH@y_VGGxl*MUA+yy9nwq zKiL#|L4O$xvtEbfIRQj-&HCQ5f7&RJaK*O0FgY)NgQ}QHhYQv*Dl1}OJoUHwEPbJL z=>8i0AH#WR7DvGwnp<36-EfO}=@!h7;%#ej-GgwQSSy}fQh?!rS-RK^f5)QeB(2$= zy7z{aPQvd{-jv8g6e%tXA?OIHU;#}kdIE>bk8jHG@_|zYQ>O0v-*AqAsI?$QafAv~ zpsJyiZOAn0B@hHNMUl)-+vUhHpx{rYj3xIohk!pvc2uV`$@}=iV3c+PQv$|ujTh$T zLw3ARSB@CajY*uQG6m^iu}qcIl<@e5)%X1}RQ8^tGlKM`J}>7ZWRhAoO%kTIpxQ?Z zTa9ODn?BOv;4;IUOMFHvifC$Dw92pfi=783k6MMddqKr^*#P1oQ&@ytS%?k-zX_F7 zW(p44X_8}c;qW0>be?o0Oaqhd#GO=L^e$9yuy94q!3zc9)SEsuO`d+mi=v;LT0qA_ z9-o@rAl_Ci5sEGom!&qhG&-wlSy4+M1tJBy0nW&>6iQRoqy^_sJiNZSq2*$wWweww zmTILDQyBPJANusI=)K!dhFk_w<0~eWTbr%vh7i;X6~~Qh1i(XAuN6rqemb03w88tV zm&9dCpLMcsTFH%@pO3b{7y_7{(n)LZeCRV&N)580a#J^~j)pPP?WM11tqckC@iS8+ zZZtu2A;Y}jjUc{1vc-w|W*rL{=f$D=bxN31{v2nN_#e#*0u$uJBlhR#lE6phY6V-9 zd?q4!m|@sSf1&~GvBa80U+(Nq-{JoGHMA;85(ECqt`$I*2%9A0=8#%MFBway8}d>m z*8A~Waxi;cfL#u%bH8W3Ws~g>$R&ECu!Y)xkZ#brz@2#^8cCs-l+wAgxP2M{m%i7% zI1h=E<-7V^Bo;AA#ler;2nV40Qw6)mGFntMnoXUoEDvRGr0R|MK=WR|>HX0=wZ`y^ zaGP~P9A*NV01eizUX1LS#g>UkMlS!nv#W0MKBQuzSfbIbT&1@eo;bo`0NpJ@FjPVR zVbG+Hxmlg`^TAOk$L}EpiGpW_RTHi?VO_fkLOgn7@+dk(YjY)}r#)FjuvBZ%n>XC=j=xN_q9|W*0uD+W;|Y{~m|$@^4`qqz$YaP+(I5LiX3!i0D;` zv%YBh&6Nu7wc5C!BJPYfEe}cOYLkOrgj3t2Xmoq)&rBBOH&|c@!*$OAb)z(K9^=dp z@($C!AV|wZUx_m^e|x@cp#B_E)gLrrD6YvJ>@Oz{`U&|+1}5%wn50fF;BEeYylxq8b~kqxt(!z_#);_YCN*yINC6!~5_r-_}E zGD<;OWthByVP_H$S@kaQdr6CB*Co{1Zo$IFt>xy&%87Q1C#LU$3DFO*`XKS%MEi~i zP@ZQu^nk1=rJ{W>!iFDHQhlhZb+2P#v6-Q_@qnu9YN)(`{0?segE<6t#09>26~3H0 z!lK`7ZkRqwRmZZFJjg<_k{K>AFtXLVb>iLC_Yp@hrw=+Z{(n^DK|7^~amrOqP_CvX z9&f=l>O(4elAdVZ)4vAHjPwQ>7xS~dblk4~EBgE`Ose1Hn;s%ZBd;$KYL-W?mDn8+ z>R!euS15H@Z$+lB#hnlxryr}qkXZ4hmv8#mK+>q9P=BTmDIq111FuXzbKox(O^sOH z&U)Sfg;>gDwJ0F@>ZGvn7BC#23+7(t2_!XxbAc&_A%8|l(;r*2Qx*vhi98;wY2A@L z&kdJKcbrJO;irRBW;H!-D85*#L3#gv_klh|vR)x9ZnK1TMpVl*8KAs?pLXN5@U&yT!7OuK;Y58 zX7>6N>61YT1Qg+lVh?vOYfPSDOe|UtP{y;Swzn9Z-0i2rT9?mk+Q{!h`;iY8L8HCb zaayyH50>mnAfi$jxMK+~l%f%kgCh_F0t^p}3G-@jkV)WPtwficQ`Es?pH_L14iHgE zNEAOuav8$?N^1dsN-w-1{hfKA_V$cA<4Ib#bHqUW_bp$s927^Mjox|7Tnm>$tzt(NESauW!yPyLN{g>XA%PK zBe@1c`Wf`7bYs+I$`Ru`H&jT5ek{TOxVg;fvqtsKkY1yQ)pke>reEEjX zexG!WFXTK-OM@G%2>k&JT5bts_MlxsKa*Wafb>>Wv6gz4=yqIbUkNJgLPI}y^BGed zVF2XO|0tLjlLVDFq3>2FN79z|`OJ{Zko@^5Mj~z8S)W_n)(b%FV0i_S^3LjxygI?e zMNxH$G~ZG)QIY0r1!=Cfv^61j^>Z2bqSj9OmrUkOsdsrcscrC^6P${;c#VzuMX#EG zzJ|J#HYn_8-#ce|Su)@nH!bK1w`Juq(faV!{S<^5LkLk+))CaMaT7vG_%}KH99e1N z0WzX%pbnU8^=gJBg?1@bB9q-&Iu|oH+8daCqARmGOIb3N)md`I2*BFxLz}tR*%ev+ z@lF&&7KplHGLy!q#yjjM*jH%4|OKpj1LWwEhfk`i+-y-4yvNC}BGAXcLt?tzpf)kT5S zRd1z0zsLmfR`=;5C)B#XwNp_1z0&+!Z##Bmkw-2VAEo^1pdATHH11#Xf-tW2(i+zy z99!mB@>`AB1DYigsnF^A?@KbE(+!PCXq32d9yk$e0)Jl3%6iiqrFk7?Nf1Etw%0k* z%G%r`pS7?>NiDpc(BU0zpkX(W%z ziB>rOe5oP|gA5f8{>kf5tLg;#n@TC$hB(e{AjJDux8sqc%ZRxg4iYDFGW})2Fa+#9 zOkF;GJQuVJC*|s6^Ih{V*JhysjD~(EOx63hM54`M+q~Xr!}#)-qrlILv|h!KAkLV!tZ6 z(4Sv_{VO>--~~bPwNQFp3)drwxO&?>CKOamd-T0TWyrFbK^ztU7bIc<_+Ok(1j+mb zjL%q8p{z=VB4BG~CRMGU2%y8adhwlQozxefdd*i{^076IK?)kj=7(}OnP3ivB<2et z;Oc}wwc9CahJL1T3DuII^z({*Eorb{1K~-eD-(T8PC(2xr2~gVLSwahz4ay>%Q5)DO{kC=4ZL)<0heUWHse6+)`-c!4#{k4oEhQO64xE)hFc@D}@ko zfTQY%p_Eci)78hDE1k5HNz^W}_En^$L3;+|c$X@MRF-PUUkrEQ?@Sc#;W(=w6n4RU zPAuNG!m1($%&Ad;elMRAI#GW(9#7wcB%%yx&sN+)Yt02>XL?1pvGb+tcOp2AEqUGw zR0?u7Bo!xoS_0So4q`81By@_w5F>J&_Bc%lJxhQd6q)?tBe>!9!mvH`ycVc&&$AQd zmjU=t?kDdP4iD`z^BPAPT7{O*bpku_`pu_q`Kku~StUuSLkqabw1g~^ECErxyed}= z)O-U)H6#+lEhP$^28*63R`i3`36n-KRFi&6>3!@f1P?aBcNuIv7pwzpgF>OJLJV^k$$+?h>i7xVqm<6r_|b}&(xWl%3J6Gntns7jWEh^9dKYo|tB zbozxU>2DjF1az{tFVBUD)*D*qE@JKItA&tItCF_8qQ%?rY^zS zYp#T9kZ)c9f;J=sdqqP;gY1$FK*fUx@+(J!YGpJmDzQ?&*?oLfg<<(GBU zYXIZX%iu!w(1>;7+3I2xf5<&CLm=aLh&MF9vN(TRw8#D{6BD{oh#BC}g*3{@F{j|) zoiO4rd07z(<(7Og=zzdj;$jE&j}i~9QV#_n>yUCuo~sW{m<1yBqS(PCL4ZKEPBO=g zpi)`Vd&M^=s$Klf)i7c7)*ZTAw%S{?C)IhX`?MZ5{pGQa@-t8B9}J+>e*oZm&%5-u z4UGpB-?wVJgr&C8q9VqaakF7V(i;M&#DwFrkTVNn5iE%)1R&uAVW71zXkkEo0E?*N zlbjDgX8Ac+KV?h5)0&=s=FhyQt$k85{#jkqd#M4UrxYDWzpKX*0;-8JI!5o1@}}RD zp0v2#G(i_B-_~!yjOF|OMVB>i0tlo0k^GJrFAnEJ(QksI=s(Vp`Oxno0IOc*++-Z}qB!Ay0voHuBST@s zLm}vrV^vNFIh<^`7sf%x7=cU^k4RIQHgQTXhRlBc>FEAKV}kQcLV)>82mx)MvIm(0 zFC}2?olp^x^XeacoRK_K3O-lu^^2Ay1u<*F!aSW~x;&s$LbN_Q>FhcyC9VU2Bn`@D=Mh>z zT4?xpjjh$B1%axt;^$8aODkdEPt8wuv%sT-nDF7vA4(RrWdw;|M`Hpq77okZ7=LR0 zc7RDIIEMciW??-fV$A9)T#_|ekvcjhCP2i_-(Z9rDY(#&2nlqc-nTUzw9|)g%F;BG zUKUy~`-d=b_znx8=Sh=w`5i?A}Ux&G&tzC|`t7bceU<8I!t^ofs}{(E27QqBhH z%S80lV+<@|dedJ5CeWttzy8!M{TDMOu_scWS08BI=O97SG++`NW{Hy$QA5sI?F~>Sd(0GP@~jbZ?{j{6TW9l5$KZ3+3TQ(;QZy zyG)y6vMGLX{i6~tp_>BZSWSF1rPsSZE9)jT$t)Kwezh@Yvfm?QQ#urVB{P}36nFZ| z9DT-Xz%>$L^6wA3M~SJqjkxk}@4CGiJ6#9I)ipmV=TZiYYs1(7aQ)`&^XU$8z$Cj1 zG>POAK5+7%`mS*1g;v#Y;%0{9E0(}GIZZBLiY*RDF8>%W)bC>IW0v+c!}zwW34foz z<>zo&%Cw7FTU1)nSXAGu6Gltcyh-???MY;F2$9i))C@9B`xn% z9s!`GFpIay6H6rEnv{=!x2^%be(VZP`yj;N>VeX_ULkQ z>(~K)ZXcl#(G9W^1Tk6}*j$@DKuwAar!C50Yya>s6SV<#;gv_Dct!b4uX&V}e_WgF zb!Fi8v)q(KN?jX0Mkponc2p!*Oj4+MTFTguN_qomx^98;QF)jcebJO}y|JY?bx+Yz zH6S3PTetyziKouMt(}AEhk3-yB$v6v-a>CDRVh$EJ0g4qRq zul}(jCOthvDSH3la{tB$qY{cPGA1d5#qPi0Is6tD`L$%`@g%XejU}yrbz^67TYtED z^C=$|oL{C)s5>3LkZcA2X`lE=2m>YQ`t5ltf-={(UVGyov_fHkHkY{Ll80MEaiCXr z56jihdlB0@52$%@LXEh-qH1+rFM_1~(g_Qi+<|-3u4SB(8ingK{H58-6e~gUJix1e zx_&#iPj^6+%n!B8#+O#TiY>y(;+#hQ;t*rh%fgdur;I#h(4Buxw0o2agI<{JRAK*A zRah3q#KegpT!#CO)Bvd7L>O2ef>{$ckioi1Vjz;S2?fibyf-m$$WNPKq_W5~eK<-@ zhC3ABQNjCQm=%7JX!CYyWR_R9z#H^B{2iK)mmzWYKkwBcd6oMsAOiS{sIK0of%blu z^1kNGISf?dZAs~vs&nn3TH{VFS3uvx)i`>Cm7U`dSPP>_(WEAiU6qgi?KiC7O zREm0K)B}*LGAO#>&*TAWCHSTZ-|C4}S8I7ZN{B2n#9sF?!pG|dh)oiqHb_?g_V^?R zfs>x+t&Wra0!@)+h-+yBuxJm7HUx_N`OjrI$_+-iFaA1$ypw<)e5hp7(WGRS5aTaFYU2 zScUgwST!c-q9CtildkC1@5E@x?2N=Im6{oRm4TE@Em!_za_K4?1S9PFKMuLW4q&=e zNp>qEgXM2gaoWnfFW0&>*JLxvz0p>q^s{ZOubJ}1^Xk0A`OUSE`E5#Id;-+NqNWjxmQN&Lg0MlTT5vy!(dHiV)# z%GU$8ndQYX#ieTO=s&}Q48g7qqGQd;Bt~nS9QSIZU{#f51y8a7GijIH-_qKhDl3>@ zkE0yXOOYYhJE%4@wbliY@m_?}|L9B8j83R2wc$kp=yk@Z&nh#b)Ux970#b)aGI715e$<<&xc zur&U3Y4{r=HaaiVQSLEczF$)K@FitD$xYNh#E?yq8aWGa(dR$Q0A~JWW%`b&ORU-F za{lNIod5}`R+bddFw}os8f1L1eU2pXQ}4(oi40Bz^zLkLcrX7SS#Q@J`B|NPeuA9L z*{tU%jtOos<{U6kdY4;osMOuYu^&@!l1fskM3qWv$#j;%7?Q`lgpfFK2nkIf#F=px z*kEk1VXaj6T5~jybT)I!7nrXwziZ#uzN<5nmBebP>i@s*eeZj}UVCrb5)ePt)jzQu zyenKmO$7t?b1%d3kAy%L66~>?twZw(0%a+LWlYeI93u@`e~`J&Riy# z62p^GbQb>PC#+bWNpC=#T`9E}5>^@&?$<9*0zmNMOD|= zBtasYX}&s{zVk4>1o?xpU;=G%P_nd&YY^8gjS+?o<o4yF^3vBPeiEuTe-u2U63nv6nUp75RxzlXD_b z=odD_r_m$SB*)zk2(X>x8*UG)Sw3bYHr*3_-8a-_S)_EA*QOts?s!|oQ^N7s+-^#GtWCTw z%o`kZ@|9lDN_I6+Q9}513@Pac69M~HOLdcLv*3vkJ2#KhN!}4sBg97^_fo{y*e}x# zf}v)-*HV050ELPlh!dypoRr?}E{F?xOtc4!Df@p)P-~iiDs8D+CIFrS4scZ+ z_vcE0it!#0dy0s-d5uLO1@w-LAU~&u4@LQ7aYqp)o1}{r;T%Ox^DT!*HrV(pd_lxD zHiw#uj;V-M@yQaPE%-!tijhS6Pq+5>9!&_g_=vEeWkcphrEt|VlkyRDb~6bPu+;7N z8_P5cwJvz_Bfgl(#g`-Oon^NjpafOem*ICl=Se z%*#k41G5w;_4vb2lHf!GvacFZMjYr4wHT@PZCb zOOBY)cWsPdzRM^KSJ|{BuXft+#vTNuRI3p&1ArP0(yWkBWQ&5T2@l+8G#lobw&GKv zmZ@AV;5IfMNu!A3q=nX0=YCouiKH0hqevEZ3e@#Ej?s_XQNR8WnRq=gwvK60_1~%8 zx7}}+qM@TG;-lIi%`yevC3>xuI48Q>t?9}1#bZ7VcBgL0jpv`#?GzkxR=R9@EMaib z4mJckTxl*j8dl)dKfxovvb^sASgR*YeXDN?$iyRj6E2Vcs7df=K0Z1YQfE*NSK{-J zB3YDbuC?jAK*W-KIwlaCdDt8q#hYHn9J0Z0;*4rNElvi;ivK|9u0%J3O~;v5jp9hD zM{l%Q1~v&>3K3^koa`D?NMo=u()tMm~*Nq2vu@cT_)q^xmH(wL!!l^@{Ht zy*n`r;<`T+u zv`)~Een4qYh{elDE#A0NBGu2dqIR=0NaEwVeqpj0x@_eDcCg@B0bzSZuM- ziYZ1;orH$f>Q|dPO*0TSIDHYtV^2Px)3qWNK=YC7*L4fZ`PVA-xmK1q3^4Kwn`*M9 zpGTaZ%!3z!i;~qh$FlJ8QbNL&l2m_)OiRWqukK&J?2D&BaURD!cfF=}H(Df6{2}Ji z9Vh3(uovM|^#{&;_s5I$h)JEXLe7^n@k?biS-MN~!2==PrH5q0L{63FAZEIS_ROT& z4PVB8xReB2U8mw7bWnk1Ia^1-)D zDx8v^QZ7!njKr6Ooz}q-OAA`oGXgj+7_e4-pYDvh8kqc%OXCCSNxZK=13%Y){_KU9 z($2-!pd)g*P~hn@Zu;yXT{eQhl#|vh^cI$AJ$W~0Gl9=~`x>Mi0uaSxG1@(fP`!pE znf6-5Vrg|A?ec*5UV{ec*>)wq| z|B;V^`%wAc!BFZ`Q#81Nqp7avIH4m0%=yrcr$5!!hD;7g@u7%)IR?)0 zG&5Y!khj-&uD_!Qv3@!o4!EyoA`D}Iq5g-g&l*xZs=;AA@Py7Vs}NH(Upd4w+N%*d zWUwBurUYeM-BB)r07XhUG&|F|LIA9i`uw7gU#gy6X2{`ifV!kYT~+=wA%;MC80ty5P^RHMSS^IkeD8l5h1Pl{s8r^M9HEbN;N1Xm zga?El?N^>~41Q&^1r9fq2b~P!s0lC-6&Lrs^e6rhOQFzRd(cd)+(Qd1QDm5uf<1xl zI_A~V+&eUv1qt~xr54k;2cletV-<+8*VUfv74?6~V8_gJs#B(3vsP+rh+NgYA%6XF z)Mmc;F)?Xmm7{QE86+CkM;Y?;**`PcWa|Z`l&1D8xuFFDsL1xO&FUwuz*JHgtS)## zOeAq25mj_juQJ{lq=8c`(U04D_0W=J@DQEGP;sJ>E}5MZtF>9spG zT`RU8LbaBsatKMZ!Sw#9Gee@y)2U1_VvF6FZ*W3b%p{f=Wy={8MB4X{GA^xHo*)Qm zgO7V<_=>lIgh&|3xpbp{&A6Mq8kinDFbGfkAs;ctKuRH+Q2^}dCt@eFhE@`*Rk14N zR>~gqe5Edsi2BMIrt2V>a1o21T1H*C=Th#{$)z^f#)QhQB?v?HtC7F0kAL#db;wV> z@Pw%YXu5QpHgzK0Yw<}Wv$ko2V&LYyAwXA9zpMnLIYS675_fpl81CbS9wwZEPISV| z`m0!dia?~0h>VEINcXW|>IdJ<+PCX(3wd$%ex95n8^k3-FscYEkA+~jcD1~?5tL^J zrT+p5HMzFYjyrikvA-V&YC`G%^nd7j$eH?YxUkUz(XvTiE+8vB#ry!+XH2TgY`=nQ z{hH1U*1~_)DiD!jxMEdPrzq0j-PP}LxUw-#o7&rSOI6`i<6>Cyqp;RgbEdDJ1!5D8 z8L*@~3wjU%&@?y#1lRBANQw6UmtG1eFtlmXqjj{G`H<#Gc8;P~{wZ+2(x>c`pE3m) z4-=+f(msrpt6z!_ZwDW04dcTHuj(kduykwm?d6rkMakiXdD*(mFf5_43@dW~r*C1{ zm1hi-Ip9$Y+(3w3mME3ZAF-7w=%fCD7SJBb?zOy&tD+2O^vs>(qMNxmGy)%DjL$#z zWHKCuL|0M8LGgS#7SEOOSgafH*lA3^poLixeJFT|Qn6f2P4#n{r&KeB54jRi*T%eN zoSN$}S~65Zqqd5QXQ8iS8c>j-r$z9sK1~O%ow0_?ZiYxobc&s!XY^=7+^Lr@rtkP= zbMzAO8DRqc3OnPwekyYFbU*A>o;p{fZ1N$hXu6F>jai`u@u3)2G~ca+|+Bk#G!-l2?VCaFnza7 zs@dkYDrYDvu}xAzdWVAk&YQA+E)?iFsk=qovK_*BUdM8OQpLX_xo_90KPQtOK?z@T zcY@RKBeBgvRdh0GL&N~W(tf6I5!%yA#2o!_OW|E0S^Ig4b31op<5XVPH~&NM^6xVK zQ{21vu0HT0X+@JUJ`iQxTCIW3FnI&D?DvX)L*^I=tp0x1&+n|MM_UcaDzi>YEE^2l za#$>Z8?+rIf#s(nMlrE!NSF-IxTF)n184(sXoJr;!y}S(qZrSLCrG|U&D^jW#4r=8 zE@FE6vTAt3dYjV0?h#Pf8ED!XT~V3`2~_{>L;3)M0>%yHH8(Z zfa$CI1!N%~Y%?k&E9+Ax1?Au2N5L<<3Oi+&%t z>$sepI8BNx5({^wb4bX+dsG0hcDyZBGdgXtKqN}^Fe3 z4DLUE?8c|{E)Vsp_+5qBmCg!F;`%i`0&zsQ!w^$m9etQPp zJC%6TWkTCrX3T{4SH!~1PcFug3hyPocvmndYYaGnmrsWRz&=_uCVYOsH_p&1tC&RL zHP0$#3MeA`3WynUCagu@9v>F`A#+Y=W0tw${Rji>M3W@tA(R;OD$n`{;=9|MCMw9d zs1Q4FSs3<3+V-2{XRza@$#-~nQu}^>G zY^&8ezN>yz;!X9xiqO$PYd@`!k{GJElIU9}UN~wo(lr7!jb=7Ry!zfFcK&=nMgh4? zjvl{HiCtQiOuvk1KW8dL5Nb&~j{}?ayKZwnDVbmrI$>e;f|v6l07)oNhTvuTm-ral zJsQ5o_2>}(&~wqrpajY$3346gCrLTDtKkbT+%Si4x6-#GH(S$#VR2*K`9tk#3Rv1^ zBqbX$0xpl6Ab&Td1mN0}2<7w~HqXpZ1b?1?sWxI(7am>SjFT+VP_;;XNtwNNn*JJ$Va5|dLc=M&RVqUS z@G!+k8u{HHmpu^ijp{&CUGaiR4p_irNX5_?mWPQSYrsw=Hy5F%Z zO4OFweEdZ!8@YXsV(2)?MJ^U|PqSWb)0#xTa zTjv+WrzvLcQNEvm8xkDv$yc2SojBUII5q&P2%AuWAD_>;<{nA{Z!P%YA)pgakENeg zb?2m*Nnv5GM-o7?8Jc(&fIS3SYn_ z!~+kf>a>)0(Ig<$0m0kwdF??gS$%9;XCiCd3hFUHsBZhYbsSs{%X5GL#&KlRj8 zzQ60ZFQ5sE9~oz(3o`5Z!kAzapn(w(Zv5l-?tR3+1Mt!dv@w1F{m4SN(T*)_FunWX z*Qzr@9B?>F7&8C~#2W^o-la3X`lUYp^kbj-oHgHIc$60OSWVVacFflfQVReem|UEH zII3}UV92#l38tayMkf+`i>Q;=zKZfl$-D!SB-8@D92IT;42kdactHSTGpB^ zyjU<%{GwdQS}QDY(DOoUU^V+zHiws}cyA95d|f-{c4jJ;3<0$~^b-prHz*$!s}dUK z`F;d>NwSdo9*yzieUix~4!N4OwxZvzbeuFqnQLf_JjgPxI@mELgw6FqBTSff!C zB2La@VFGXj!3g!}zhua3X8_heJCMJmy%Kldylwyo&sYc z$%}j6B*X{(Zt9-#fZonNpB;Wozvqn68A%)Lq%zc8kH2UQ0?oqBh~D{9{Gn2)sEWHW zee-09U#s7-t&Nxxr5~2o^mU|Xs()orEF3ihs)P5Z6I0|1sXXJjG;paj7$)HQxkt4r zZGzOQCFIyw{Y)EeaQE@-bi70n3xaS9sb-z3xHS^b+bi>T6xh(`G5soS%ADa^k_l>| z+|fWAmI5lMfrJ{wjpOG=_e>87=#h5cA1I)tpVVQa)`4v)qLX*?j1hVI(k>j8$&7IG zYGobP8D15LbpoAc0$PUDi9C;e=9AtE*9nli8s+$7sKX-pSX)de5<74kkT6o-KD(!% z{;c?k#_q6nXjKWwPN@c;=jF-glTkI-iP~M7x3DgFM1_q86i4i;@4S^Uy>T4K}zyswYrZOAmZMtE^(ZhOvuZI8GRL=R~H1H;CF``**lL5!Hg4c zJD^6|Ey+CT7%hJXX&xp92TsAx3>&7J27eD9aGb~vBD)n*shFqeaGH^wJH4z0z~7n) z=LT1c`)W#T;i#(xs(ERv6}I24)wP8bAM%2JFB3)UrxRIQZU(mDfB7Gu+BT^@FB2Ex zG@9hKBi+x2ns@XPWB|z=%Sx(6$oBcmB-Wx4-SjuJ0ObY?;c{TOz)fF z&PSIp5&A(#@tM#1iCCLi*+4s*I{bTO*w)E}{~M!v{PFYQe(D5WW^ZI3^Db2uw((i? zdo6|{so$(f!j|W%XoKXyK=dS|DYnNsdYQO4ipkUjC#+msm-0#`S$ZjqV8~pxbxwrT zapOjTFnr?1W8xRsrcM-BWAku0G2DKoz+?6^4tZ=|k71)FyP}GQesH_IGR%W4FvP_1QgHq| z8La>ehZc2f23@inH)4bzoC9)9%tt~j0l`>knt9^K$U*dvY8vLH%(rOqSE73lzL^|H z@`pW}g#u&{`3_%E--3mWhJT`tR&UlcaQ|kz;@WV!BR4;XdGE^tTc}2ScBpGn%crAi z9Z!FjqH>ohz-g{4$z0OOpZ-(5LQ^beMPH*t7J=!%hFqQ=EcYHav>Ntiup3M!i zq;QlnS(Dp1D~MUGF1{))qzgzK zbTKb*CT{l*p=nE<%hvg@TsS;(zkeGM+6QtGkbLBYsv4r}i|G5{+lS#T_9LkqO0COh ziJQuK6w@OxkDV}IJ&xv?M{3aQ7dA0Df_TB|2_#3wGFUJdMW|q$JG>+8RR*IOubr$v z8eSlZwIXoYVf9SZzaYU*a)f7fj{jC#>+I+BQ?$Ztz~wICesuv7_i=wtvuqSnb2qbY zXNS;XHUd^35jt3e*q^0umvDr=Nom=fO21l@2>zo?yo6+kvnYnJC|$&*^YN`?m`&fx z@Rui8r%j+jO+O5DPDzEfraznhs`3>PE>J>M?x+u_uOe@kOE2kgkWL%1)d+0H)lv{B zW9GDT52uDU$cVT;picye@7v9CZC`l&Ni9X7H~Ahq5U@t!2}kD8mksPG$@UthzxoGc z1Tki>5G*|V^+zdT0uxeOZ(dO_r5o>$YWy3x4zHpa2J1DsTo1Wm;tt+XV}d9J$;-U8#-fw-{^Wbu+|);ZD5_ zL6ZvBxCize;gh{qqTJ=7!dg0kXU2{oi*z<&kD^(}#+G@akGtukQ@sZfZT(C7U5!I) zmz7X0{tA4VKv{~{{7aAy$q)N|DJ5?&2BS{j>0XAbwhpvFR7%EE;1UiOYE??;>8G>z zDF{Ezr#d9+2pC`<3MfZr##*%w!5)u&An|T>a zm^b-B&4Gb`)ZS0Lt|P7jikU~;2KNk!B~f-!#X$@(|42Y>Bvyxyy`8ZKg;H+{Q{AgCmDBvHc_ylHC)`0=<_8G{ToEozxJ-=g zGebNfGn)ELA@;qx9DbYPi>bF^7EK;zDd*bcFs(h!Z|mO`Z$JM@Eno$_wVxBAx|eJF zrOt4G3JHLO$$!fH2osqQB_R|X@9*X>)R^jb*MEQY^UTBvUG()xMPP2}h3j9?3f;jf zn(w0OPP4%X0Tv7uFGc0k=QFDmj6_CZgLn~%6dvR4uU)_D z$axWF;oe9uL{*?^xj>qXyp#^%@WHEoKgUh*WfF>ab)&gs($WgL#K>G~F8&d;Dlh=c zcq=|bTK!tQ6fnEc4;n%C=mat=Kw)>aYuZMlP z9Z4i2QL~O8nDgBP-NJ6Fp!|JYnkY5kuJ!}l(hOgLwU+=A*!|Rl4K?V}<`$2;F~dSj z!%pmm{V11Cl)L)S`@*cJ;i7qerFH3FtEt{;ylE2H{C+=!+7I8DzNJUDNS6ll7ShZ3 z(w>0c>0yoSjn$21(-^pQP4_95PKHaPz6IV>UxWvE7%m zIF4E!A+vhfj!*$&w)Y#vaZKyn>n9=?&K5N4@4UZFDyB-je%mqT6C#ZIBP7PcMzHzd zR-{xg8BC%{CsJRe_wxFIIVLlpy)Ugkn_uFrrTX;GVy{Pqb;yvfYl(hZs~t7h3#s@d zAZ5juM8->>G67?-i=K5UTUwT0iR!ev-4vc%EfB|{cmlU~l(n5m-%=l?Go!yiKP+Da z`K;O(=Z5-LcI-uM)cMH3Lef6m!ckka>Z94F`liD2_x4l^Vyf0}5EofY4OHba ze~y^1wcG}_Iml#sgO$dVJXY)yhtK7!a~>Kach*qZ+)P8NB^lk8u3s@F8R^%d&MXZs znzJG>CmC$HLl*lG*C9m{W<#cl#kENMB-55)vH2Q_fcFubOpy1l)Y*`ZGZAJKuK>`h zcOaxJr>o!V01~1~=|dfKDP0j9@T#r?)s*I4k{<0)>bR<U1+W3=Xi&$OV!sTO;a3+1V%^*L}6P zPsV4Cuk7Clv`*gyTw&S5l9e@4KG^R>|Bq$>|H!%t#U8evdI=X5x;W>7LZuoE37ZV-6-VI57-JOP61UaY&x zJ@OjKkWK6A>VD|`cf2wo$kH^&#HJv+dNEvJ`gh{3=(HN^8UMio69%rrXGjUkVKXVV zXQm&0!y`5blG@y+NE0seFynzPi>>eaXEbZC0om0(Lu;P}MH zy+2mOBzM09o+YxMC2tcgqOs)a^$bw^7o;E7vvig z$5}4jB!YLmH>Yu<<*xMQw3>s~>As}@I$N&7j=e>Olx4N(u8X-EJD4*s-eR8dG zAtF2)VusvBCh4yviA7)qI@3~{CzO*&n&^l_f5Z|s=kn5}&=lRG(gI2Ae_=5(SUm0W z<>7JZ_u>6k=xuBq!O#6^1PNC8k}Ku1ow09BGxPcLZ5TGre_Q>axJ+<>VT%dWNxOe6)Ma?5zzHsA)KY+44Yn9Fs^P3w>tIOUcNn+yjWa~p@VLaD4%F8&Y3EYX`j?fd@ z62Kk8alFm@yR|AI^-b-u8Dqt-*JQIgN5RD@@ollwj^FjK^kcx9Zm zq8Or=CCY&2?Lv3?Fq$lZTxWR`S7iEbU@3X$%FG%Z_wSx2+AxMGVFhQXmx?=4PG4wc zfRY%YT;EbhNcENWI6ghkTmyD#kLaZ~NyD!iYHqj2bOlJiRbEgilayJE(E{RTw%TMY z%ZVAMh91aK=uKe3gV+#(kiLmc^&y}O(?I4l43h{Kh75@nAN%wl>Fb(|e|V-|r`tl!1~^k-{=f_F6Q2 z3)!VsM}GZQKLi`M1Ta7{7vK-7!b}S_Iq(zo#?n4+-pJ@~_J#VZ@O$Xc5APA2kdiux z?B(1*XloTwFGU}hnvfKoxCLwsb*GsieMWLO5l5SV|7R8#YGJpcjff- zWV@8h>DO1(#Ol8bXf1DT+zL!10FeM84q6xOILcc)i~a=BU(r6IVcud3fOTnb#Kh%< zZ89f;r}#_ASs(ENeaSN}K^ws$UpJ|^B7zfP&DQ0ESNyGw#*SVkh)sU|i-@~eAtp~v zN~p)_%aHt?TI@vIc+*2YM-Snb0%N5+y?;ObfF`oX!0GfZT39gu#=F^kw4$%EOIh?N ztRilnKT(jO`XYVz5dGd-1M@KTc}#w*YKNIc(MXZDoT4`gj)>F2tWoJB@q|c^flED}oIX@8S#TNI=G7l$$w@i*`!;Wljt? zkwGhpV5axe4Aon^1=43A(}83!0;B)r3wqdcsnP#;P?p;GfRKcWy-inzmoD0~dZ?90 zN{1F`6=hm6L#-gl`{WEFzuvvu25zLzyz3*rztzOLkFortm{$jWxOSDp|>^am<#xdm_7FGnLsyuX% z94U+(@lzQh65oUjVXXNycQk`w~D`o^61DuK)4*5?A6RnN%9L>zNVg#hEl&TM{6!&9li4KPk-mrpU ztOaO?6Xqf$YVn!j^j}g%p+lfRMDyhi@PBXXB;b`w3}Vx z;zYxd6I%ReA#@5cJUbXv%YEr7T|crBX|R0Zk+w!C^J4WY20XmzrQNH3*;=v_k2r(z zudkEK`o{I^SMR6SsZm|Okk4o)0v%C}I%QndXFS2p%*RAXh|*M0PTvwy33j6{L{0@g zF6bVWbON=Gz|_iN;|Q)#jxVx?PC(A9Uz-Ki!SupUHNPbXfZE=QEnR=az!ns@wuNhg zf8l!iFiDfv25)QA7^x_o9#V|3F_xoPY~1PlwflmosE$g%?hZ2#to;^O!=IC@#i>`|C}0%#4?hqC4j?72atA zskRT!&QH=QEDaUPU|TuouHD;DRlSR)Mzkeedx;LL%xWvql_d{GmPRp&T8A{5@*9=) zSK(}|5sTCO!9&7WEVd2#J#I&`&ixVysOAQ96j4}Wj|V?EF6_}Dqy7j!jzd>FhU{3+ z1sY!-wjO?6ymxDL6}gr|sZ9mjFbB&J@)2YrDS-dh56O$~Qu~q~pbayqutB-Lwl#fs zm*zUroz!~nSQ`m00^7^06tRQhhET| z$ERmu9ZBxqvHytOEeHlI%4^MV{HE82s zdJ3>UZ-!)Dcx#zNcU&~AwW^p*k*_JIiiR0GzNsH?g894iKOm)PK!`3jYNG)ct(V!)xaQ3 zYAU^>n{*2IUFWj)(VD?H=M~z;P`ixHxq(zLY#O05&98E1MuT=8>{Z7z*$ZlJFV2T) zOqQgC^Om;aS`|aa{{Oe%JTpD{fGRo zDa1G95VciREo-Z19?`YqVG2nw3fL&}F#^3#xzF@xwKY$J zSQzlV3^N{1KS<2}^wl~vGF;lf*Y+oLnke=3tBSW`G)rjUI9KruSgCmJ-z8@Lp8m?# zAN!d3g~(EAlgYftRFsb72aIC9__zZWL;_2XAz<)d9hceguRVI*kN5 z>y_ner=)I%A&zkg)$pfsaxwSyk$(H(_ww*S6Q&MIf4ulWq($Ml@H7^IO4I_O29qAl zJyBqs+|O~shSGxx%_{Bdu;~;Vt>R%^=R3&8<+3F!_NV~QLPiZZeDFqUvjtVwR(pS# z6YdE#nORh<6uKGtH-kD-18M>u_Cq5a9|T+kHe0EA=&}f^Hm+Z`1W~-bB*k*g#Mk_w z&Pkv!x#^hx^29{W2DdG3@8~I5qgjpP3yf=T@EOdp(tM>qzRb_7SqRpXKSEguXXe+^ zF*fzeE+91j_85GNegs;hfP%l+WadwWCai-%y~sy$Cm1p)&hL(jD=@PuwxO0EyU1zl zLpXl^&CPxd>d+ezT#bLf(W{M(kO%xBodB>bGt3x;c9RT_@Ab~7KiRM5?z}ch$jH}r zoxCU&Ji3W@wj`Q|NJ^jUI$*sNTcF)asiDMa1{5%p>VxJ2uinluDX~<-F`7H2eJwV& z{Y(vd2@Xe^1bW-+=-@@1cT(Xm7_yY)0bAkkE7Fo3{)o!?1H^i=R7y?f{aePm+OYnWvr`#^eI1LW0KUNuk(*+n+pAmZ27pz3irdf)^B4PyBKkBh;c0OtLuN zC(xq|1a?FK=y_P}wrBP8Mw`QGVjaANdPc zKi2)xL!W^?lb|Y*tsJq4KhOyut%xdejFf1I68%~c8Gyv&1b>3p=^aPF&MR4L7XF|+ zuilH;RP471JDB1o9+h9+I7M65&csyugfQMDpeVtdi&n-a=c5!f$@RB<$VkQMKl^jg z5oDf)1JHZ|>`IU8lf*UlhYXyM6i`K1x=HITdXne^@|V)(Lu4OkPFl{q0+;B4=rr)z z=RXtUaUwtvNqTXRAZol?54uA`-ZpihDO*)xf;R^bqpZDX(s?yrtP0ZMAX$=Sq%2j< zLmj%nF)JXSy@P~J25T596oN=PN2@LMUouB)Gy0LRi4>(yhIGtn@hTYWmyxMwhB1Y0 zX|1uW&)_v8DZEYfy){CEIB@um$B$-F7QK;}1-N7Jv?PUnqVJNjCqGTl#uQpwjWS00$D{dpZTWM44rPkN{ys#ME$&(zt zZaI%;r1w+O%K<2`a@}|m7{1&fJfazduBV#QnbaHejfQ``x_|Z0{*f?1X5r>3xF4rH zj1n~NeGwcU3Hkb0CGyWzHKRjjAPL5VZ%rI>s)iUTqPu>#jtA;u-}zoQYFTZYOB6EU zu(j*W;vt-6wujRb7!@3cp4Hon)f}8uT;Ha$*LwdvAyVx*G5>QfzM>WBt674Gy)UkuLsWLVpt9fPk40gkCjkCeKTv`r``@&?v>^b#^r81Tyq z{o&zZ=wn1hMh=?O4~X9DB(MU6?C>hLdy!i_lxmZ~P|Pf)yY<!<2V{^?d_t^5N0~qO|9GueKjOOl+E1O2B-UN8{G?yO&gz7ZV-U8(c`r^B zSHuWu!qpS3ci6@oAuvKgeodWVk(`Awvn4=gFB_APAC}_GIGAdVBxFl+i*9$N!Mnmz z;zrG1HL2w$C8-ffq}2KfL@yFJ>NmK+d#84;DM7IQ_-Hc&0X3J-P^O0+HLpx#3+s z6PlHNY_J`|CmT>TnX!e8ZiSovl^0*qNBP7{pSOCFWWzThEN_aJC|51MP(YQ`OXN}c z-h2qqeyJJrx|O3%J$gQ7(AUGSrPBGw2qLN@7{FVz2?~VHjSh#Mv>yJ3VrAUDekl*+ z9qrL1A4h*~nJgAHx2fCoJDVI_Z0V?-@Hsgz5Fs_{l2TYAIwg#Gf!ehNJi!)rWnKjq zCip;5k@Ria(2FQ;zYZYZD6YNSQNyBRCYN6;G)H+TH1jovc$V4uo*NMPdX>SAw zm9|GU$9d3-vCr5*Nhzw4$>E=-wauIg0z60g*K_0nx-0ea^s8Z;LM=iS!3G_@(1hWm zt(Z-*#ud<#AAY@aDdu-FO&SgB#77Ut4w~s>*l9o09lNZxo840~fT9aE{6#5Idd<+K zL<)KEc^+Ax6vn9TAfo4GGA|6IXh$`SxxN^Ev!E#f|JEQbG@sn)ALTu8>-5T|nuSzd z$c-{L0uhUbKgxidw7KMN2&@_aCMK&w1Z`#d6M~dF;#~$=azy%3)h~?O(w}cP)>aoX zl%h5OpP|~GmT1GdT(5Y?X;)~tn#r$g=bH}Ld1Z1tyFkj{|y`oXh1Jk zlEH0rKl4!^2*90Q8O+rIPc-*GC4iu07d$Wm$QIw$@mK9NKBj3Q9gJz z@(~?;eEsvr~WOOz zD-%a3+_Vh_)3;K<$jHW&_sfV9#wtN`gjhJ8zUdDN+`+4p@fXR{fv7kQ$0H__=ym9u zd>tHHI-0_O^l#Mj@f_7}#Edk{Jw^+gUYz47_r_MrG#KU$K|^(5TI9WwqtmCp37 z!NY6?h^>Y2maNvJTBn7WW9(eZx`pXm{vHTwL|c4}o;G<^qelROwv1q=V@3kF!v!(Q z-_M$_)y&7T7-C`Di_K;(Y@Y~aikQ79ev3xx%gd(YvgTxvSvM6L)Mmg@W!enQLqZ^u zLnxJvB+Es(=7h3?UQRwz-)UH;u$v-s>0=Rks_j7)#{V#6CimBCqmw8%&6`H+pcvG4 z+(+N-_3zrsh{6(L+C;;|(l}+qEI*tlUjswd(?a?s;lT@8oDb?q62U0_{0l%BdK%Va zmC<1TTbcDF|EE}RT!fwNMuHdVAYJi~Lu#Xp!hY71!|c>e+fT^n_=IDr6QS{h zn0J)U2xif&ntct(mN&7EEt)SQhfLp$4A9OocFf!3pf1C|L2CYw+RH*}Vm2{>`x&GI zI~f7;0wBrzg4G3D)pXdT+3z5FlOYpqH{lAx8=-iLF6j&VrF}m2sXx5@Kg<}2TSADu zxWY4m;*OH?7!EYRLCR+3IR``pGzKr~%Hg}Dn!oH0+a;OU$*c;2BJMDelA!i{>7?8Y zW5nww7{k0DomZr!r@ zJamRU&*#8^qQL+2TvD^0a06d>;*%m0!3j8ZFT<4$##FRQjJdgRQ%||IGq3-=_~KLA zWg|=R>};Ua=Q(`@j{ID0bow79{yTR;nB4Sz=JvW)OD}jj)hjUIwXe~!i27pxi%=@{ zi4nAoc8!t5W68&@wumx6j|TC%s?&C)DR2B~5O+3r9A=_EQ6Gc3ehZH0m!`osy=?s> zBNp;US`CE()dtn&Q|!>5t`{0wkXzc6B*5(IuX)^udLQGFbMlg80tQuRQ4CHa0kPqJ zsSgfmWbc=9F+=jx4Ywdwn6r1TzI@&ix~4SHY4%R*pip3!y@wz8Y+)Vs_sY%|SAQ!? z`r8PUI_DFkq)mL+-&#~0g6x+6xk6`VpkrYv_f!q6T}#D8`qnq^*d0Nud-FCMit=K= zZa}GPh9iNY4oP!Fq7|A`fa+JZu`5LpAFq5$FdJ^bbM;dmS&(T8@4r*4=Ex7AImL4A zi;TlcY6*eKSE94UXjrM%!!r=2QAueZ3p^*ERL{6^?}m7X4qr7(Lv}cJ4`*Gysjs%D z?_>mE4eF#qQSvU$;!E1rcvo}`j1aN8Wm|RC;nS+B1r!u7)hq>)?^tbU zxoajzPXO^c%~8`9t(*{38PwmaIn{vKQi$j;-x@Q~_9*ikKpwm};$kt)MO)wRXVWpI za51NS8JmmSsi_oZfP4A>{@ZG;O=BD(yp|XcjYag)qfhq(=740)47$$%ly~~>%sef~ zY^IbaQk7Y*NoP{5HE?8~hv|%d18iAaMbZeE+m%Uw!Hjy*gHi%GW`WZBBH|XXh)|B) z2ybPtAJH04StSC|o(yO3yVCa9{SN;Cy#?u=Die1O>tg z`nTiD={q{=ql=TMMvxv;ECQSIYo;(s2*usS6nd-$;QM|6>C2$x)@)FfWRJ{NZ30{e zZkv|7H0oPp64*Lb5ar8BNx=7mEXd~?Yd(Px{^kc@g5;~FZ<(^X`;oiKx@_+u3y*PhqB**#cNfffMW8^e(NWLt~=eQ z!reQF&iF6;HAuyz*};So=#)c0;=n+HqALgj@P7euc#`goIeeb0hajLuWTZ++x5h;x zJ<1-uQf&F6_k}4l|DCi-g)PUK+fygbPU|S+bl)f-O3?AIm+p~gz_Pp(@Xs(#!OYUR zpc5uzxu%#wv+j#Zp%p>*LR8Rk{1xOkH@i4@`q=(?i)3z=N;$&cVqi357*3k!Yiey! z;A$$iqIr;ize}qK9~Mo1&cPk}4{dl@&!UZNlApRisuE3(=BY3c`z>q&S>G@IfBwhUp{dJ-DCi{J09pWftUk~Bgl6_y7UD9*GM&96 zL%twjWU)_{6ah|A$`EAf3ocDD6-$CgQBE(IgDJJR*x*_)z$V1LPc8Buzo^h=k{cKC zR`udEooIZK=Wz97X;o3PWVi|YeA5ioy+8)&XB9w7dV~_Y530a#A}qsYuUYrYA^Y>W z;3s3Zq(&(ANeLw@c$%4_m{O06ohyg&(XRoBR*ym6Np#d}reHTaW1p#>uYk$^dRCsy zVxd^2I-&SdU9sKUWiinMA$&VGsz&-em-UW~UA5@e_UN-)dgrwGA5*(XG(vZ%A|0dp zgL_fym^@xQt{n?SbocIk;V^Xhglt~w$1 zjN}lT#fY*L=c8LxX=Xs}84!-a3cLu@QxA&{^pb#|xTi;QyjVpUE&N9u7j-?KBieB>xGTIv)`RkZ23e%++uEnt|gzozd< zPLr<6zeMB5+SCmx`itw=eC73)+EZV#f+(~oc2@r|?JN>i^j@ik8=8c&nQ3x>-CFeuqZah%o?_l*og= z6*R}9tTlr&3v|c)IAT00qd*2jxcI`4uI^v`$VUNI{e&h3z~6NVgM*-}KGq?IUu)JQ z^Wb`Ko#nm9ZukN>n)9oxe%&%a8j*!AB4x~pp0hLD=pq`J@Kp; z(TP@@tKsglFz~Lc__b!@!qpNN{|ZM91lPb z!;q3C4~`5WX?$ELw_zdqtS#arWk_U)r=@u?7-Bzp)zL|*mAYoF6**Y$DLis43EG*Y zwSn4B2Z}*Ns5WiC|1i#$FSL$A(#ik9W}ZaikYA6}Kt5|nT2q6EG6BCOK^z|X1l!Qc zT|F3s3}O7~9aProKlkoEwmsAs$w_l|27GL%%fuC^T|h0&}5SIPHt- z*lYSv$sM7AitI<8ROE22b}DBeijztY2{Kl_rs@GK+=&9W=|qeSFGaSCYYQgvdMiQn zm|ryu$ZZ>gGA>Y_sM4xORtf|&HX;NEa6wUd3pW}fA}vHEGS*gKi|hVjaw%%9M>T{1 zsJ?Hi5lIBvN{^B)#|AG5MV>8l6s1oqg0#z_D)Vx_4vqohvQ~e=hgpKY$G{cwaB9*B zTNkU3LWmIKKJ}Xg5PIHg0}&6Q6fMi5de8w1~;q5ocB%5G~+Rrq21kXT|#Q|5w zAzD_kXOtKy)fWf4H4GUAf{QEahhsRTN!cI>E_9pzf+>YQO_k5dX7Ry%U{5J$;q45@@!0%F(oBM;PSq$7T%6e66t5wBq)uy5*8wF`kH@>ywG^}*enuZGRlam zY8qi(9Rx*hiq3~1fQTR#i~3=TZ%8`AmGuIINnJ3+tYl{vvD~9hR9nHaE{2PYqT+zF zC4{_5Lmx(*O}+MF08X3N>RUn~eLR<}9`$uFiNVNowiLr}TLULg|*CaPP+BPwIPU>|;O-qm)9tv9^+!uk9ldX=X|Mw-RL0^dU{BW!zhFZqjm9U9RCHGneqJ*!^rr(Lue8ttm zn1DB3Q^Vwx;@zOIf3K@Yw@ST@+sHda%*%^G$O4kCA#^Znlwra(NP9r|ji_DG_Zy>u z<+q{%6Ut7JNnSs##p7;(k_o#mM*4{MSm_PsWknjtXDr`w1{7{1!F5>h5LaV*w7HKwm52#C zM<0$xf$y7q1Pesr=}%LDot~3IPfpsdNv_@LdvVGqQ6(1?#}bY7*yt=RfrgxT{SA69 zfHIF*O5Ii+v$0NzRQ5mN4l7pK3syZ5D6CEn6QYQTwsB*r+@r@O8PYUl zZ|{uM;HSqRwKR{;w3OfW`BAkW&<(e)bb%wejykmpA(hb;opPd&EQtJ2&Y@kZ zE&IJ_s>^2N(J)!vM++heXHDNc;B_fSG=D^2#>Ld1cKMKn4>=g5D#n{Xx-;VP=rRPL zC!))~8cW<_;AycK^~=|4Uwq7eq&30^C}b~!(g&8UCzfKXQph;{$zFz(FbQ7AW{SwM z9=L^(nmdS+n*4|C&ypG$pQY9JFeZ)-&mR21YhELW7(|jP4h9~cO+PH7Z6T0yRNP{l zE=5U{gx!FC=$V)7Mt31^;Od7m&U68kvZ|)Qx>2HjRCjn7O*%zVnr;Va&-xF(sef~o zGL{5S2CFs$znNC0+J@;!Fzl0tYB^GJe4{umZj}ENh1kXYVq)BQyn#JvPL-SEAG?zp zy;0nCph-Hu#E0Ej4JI|Y+mKK)fcMgfkK`jUd>LroT(+9fW-c!texU!Zk#_aFuD*X2 zlQ>arnRy0n*2=O%Dmdg(>1+kW&3wp;Av^b;fAabG8v(F1!v$z^6Algy!zApIxG`$` znSySI*nM&b>IBBt%Eq(d5v18(@gnzWuVpW9d)Rehk|;uJ;M0D3VLJ$vn($Oi=oKyP zmio-=IqG(UOwm~#P2aWGvr0rdI!>ZyD#SF0dht80>Hqxw|16w}ccyQ*P7*AuTrM&D z5`qQp$o2DzzlbFC0pKX%^z>#4$%yIAdeADS7AFR$>hb^mrH`9?86=B>G3(6V+Fsk- z+R<%YS@8$TY!gcWkoVjV?yD&U3~5rD+xmJDFsg|qns3gUA}GqJ{%b$?lNfHO3bh)C zQM|>gplIOeqJUKhUAiR-QV7wbwXdIr>voDT*FQUl8L;@m1W}V!ePjiwo$<>6uF(9G z7y92o(*&(A6;9KCQlJnuQkGaI^uISOp?u1*^;l?kM?@Jvx$|> z4e$!q*0eE<`w)`&bHCDyqmC*)k9jvg<-3|QlMp8Qqwp9`)RZ~l(g|P}>Wg$VuvMQd zx&D53S{hA%suw^;+X;;`)y6nmU&akqbi>Yh%7MdCjiae4vQeI_h|ZJVr3?(@T5Ly^ zshZb7U9CADlkaW)sG=&gifO8@BE*J3h58`}%m5?$n;agsq@olpx~w3D_4A)`NoPh` zljz!M^fw~1cjmWtr|%}93aE9SWVpT`l1~o*oK3(u-4NK==*Z4A?RM6haTxOaKhP>D zFPi?eUM}?}*57#mtx^5cR`n7gLAcL8gt=k6ot_hSzDsUUkHqWjXCIHij9HxW&@WDW zvdaM9NHp$dwhcpte+Sr`kC6jU1EA)}u)h|-D0odP6{oI@Tsa562v{TuJqC7L#2~U( z!BZ`5jyk;vTWu~a`FM0STBT#$~+AxvM$( zCWnJPle!!I+rvuMw@IiK(|vYn7qUVb@XvR>#qC=glEtmVVW87H^!4?U(aHkK`E%Xc zbRWk1MbT}7-TP4GCLPAPu&2{#(nW-RMqu3IQ8pqv?H^$EMT+u;mp#BXqegOPQ<5#BnYTHg46-l zIE=8yQQO}W1`?efFjDqVqGhe)U>f{cva?$VlJH_ks%Q*euC}8%Wzk=Mn z#+Qn6wpuNB^{+*Caz=<=(|T>#HV?v%G+V<)H%(|r;3L6Z#RaQcklE7yi00T0Q;08I z(677{(M5CpxV(foBS72hL?LvH>QGA6T7?RK2p_$O_3=n)`#tLX9>R4Aa18Mr{Tfzd z2vecLJuYtD&3z`aIhx7V3M<5;xK6W;%*oTExr2T^|!9SV;ZkyTO~h|izht~U+Nbz zO)0d~7{|136a0nj_GItaXG#1&pE)P$-cDQgzrqxuwxL8R{GvuX9h?2>yPcdRgDk6U z({~SO=;LMDf+(IBW={n>u&A)mA`IV-3nOiJn)l!YGgSA~gulbGeVlSWQkYJEqrO=B z%yx+~NiqQk2txNsITx=)w(OVvBn2P!DMjgZK8*7nWw<9MI+M>fPes<0PFP$kZ~mEH zc#Ka!|B@ateXDjw-)Z&f^_!|zvFl$yV!m)sAP*9o;FF)U(@!fMyIg%o%n*#pdgc0M zZ*>D#B#|C^oVt0-I;2>aaXvXMgdSxvO|)I3UGM} zUcx%&b8(3&vJutrG4|;otshTX4DG8{0ds$5^1iXKpuMSjw3lsSOVqg5#&Oscx)ujt zG1zMqQhC!fvnr`hE*=eLTscEpGY=hn)eJwi3Ti->_(H_Cnwz(_b*Lz!b{cPU=sQ}d zRA2#}BWtY)n-9X%AF3M_HTp7DQln5zg0f^FYAy;Dy+i=Kt7(h%jt}Sum$}7fP>YFC zM!g5&74t$Rn>m(IDclO&O|cNyF~W4%kWkVr+Md1}QifRlaV8e%4Go`G1N`#5|KQc^ z2ys&(wzN&7oDw%7TkoJ|Kx}dGQ<<(SSX~1BNa-k>eBnz^S&i)SN>o&Ud+Cg+C6bxR zI2F$#F((H5(oes{LTBPZ>Id(+0q7j?R2aQ zk|`0D)QEEu;hV0l&U-K9aKnAV{TeMX86d;~{tNMNFK4cIw3lG+Et(a?P2gFzt~d5Z zTvWXJ$M?i$5ARbP;1{exSpRqvSJU!`{9hLgkvh2O3*4+y4+$p#arWn%2p2Cx)1zg8 zS-xY>De|f_oDi%`&4j!gWrIK_G!bqPJ;tqYniQDR1+RX|MkURn&i{ZA6k_YnkVXrC&|g>s{v+bj zp}eX8Ud){KCcz|s3L((enYADMzz>9g_)PwXVy{=f_l@KbPQvX*I0G)sYRuKSbQE^b z&VDX$2)9TKP;ea(xU}W)UEHX?+|63-CEvOZZYnGs9rr|CMI=&4?jk|GC}JM{&>oUq zkEo|6om$7y+w)6VGLHnIUvTPspasM+PBUhQD57){tP^eHya!qgWjJ#IByGPK#jTj+ z1FrBSi3C!6|1?40ayP;Pp}RcXWjdaZHKj${fObbwjnJ5DG#5qsgF-2}4Ao~;)ZL89 zDqBa|SX^c%Cw-vh$smWteH)qpxbgGMsmhz6v5M5~;HK-ns~Zc8&*}#yU(;CGRU{te zS32ma1!8=bo*cD^@wr|E_t!S>_+3Kb_u7G#B34k7-$cGO+9b3=pplx&V~J5f$*Vx8#K}Mi(~??lMVP-Ew*h;+cxL9Xn^hYh zpoR6#6`0!(Yv6QbfQinf;CF@hil~r`{#(h@L$})F_%4d~!Jl#2Py`|fv4;p878iGI zacReVxkFuatdR2^ls@ zFe8OPqOrRXsuK!a1RLvnbOeZO3AoNQX(|4LnKCX1=BH#0+YE6OXVB@TKrLBeyVb1` zccNXjzh-U5U82Icoh^Jftyk@GOxxv|93To8Y>(os1pxCT9`hzk%6gY_r&OHk(=^N&!Icww1nHT{op9gEs1AqK*j1PQb{T67g>#hFsyl+{wlr-Q{dijwT)xWJqv zZm8JS10eX9VmLWJ;HP>k;|c9{iMu92NU0xl`yw4j`iSeJqcpRh4YO8j2h&5pTo4*C zotN#!9UT%5VbRjR8ksiEaqBXBS8ulxMngOKQ6@gHQEMX+p%@h|u5^Qt3Mi30_p-Lo zlf7)0*CwPL(b&1(KK(pOZ@z@^X#YKoabM7ZYUs7|C!YG4X}DVo^~Hrc74<$(f0s&v z0_MJOqF{knd&H5v{x zj;>0cevZlsBcwER;|hh?L-a8CI8Q||zC@1)*cp7db_&C$irkj_;w?_!yz5IkH#y2S z!!&Iab_xAzRvT;nrD9y#%`H^2GGyjTEMG)oKf9W(*Hl2CSy!Sb4pE-H)VZ)falt-* zL+e751x%?&9kTa;GfH|-uM%M-Cbd|k86CCt)AUV@p|J0zXywqH`vtLJA#r1bh9j254K6X}& z&S|3-$p^%E4WA!ggc5k-u_s0S8#i9?0b?*l1BI|_vTZ9aT)a>5ujHXaP;a%4jN3-% zo=7z|G=0Vtgs`;^Z@qPZO_u?_X0+nSNAx zcTgNYh;A8}IPUGq*wQ;_kWST3CKD8%Cc)(!B8Qw>vI6i3wfl2FXQAYSKH~qeF2H6d zE+6-o#0*d`Zvq1><@@Fl)5y48x#8tvFk8fjahBMrn9e>-O$C6ui31>jTGv0hj=E9y!t&n{)?P zi6qf=H_w_VM`bC_kYU8>WNSW^8Ek#%vdu&*YNO-stcxMq6Iq&h z(h@r4kzxkImg-v+tz+q`oV@cF6&~H|ciweOR(W{5(FgNMIrz z?m0lTCNhp>W*VBq6xYBH5Mep1Yj76g>n*@F=_@QyTRj|DVJhHqIeizAA~2@;hi-J{ zWYVbcZa;-GiH8zyHq#S8FYWc9SCL8;6#eY#vnj3@9naQfYEJ`qz!b++4TAaCsOyMW z7NJ1zkI-33AaL>jo*GugL3&RFgo zp|C^-syj_>pTAW)D|wsb*hZPTlPQ28io}1U>t9g>{pz>=DD6}0n5b>SAwEuW%YuwRQRg%5l%FtIKNH$S75B<^YpSQg1?`Q$J{H3TF;J=?b| ziSA%-SC7qrUi}HnSh2>^$QnK+o$G}c@P3|;I_dH9Sa%qv-@D3=*U%tlb$g}fwJ{G% z3Pm%a6K$Pb2WWS9SN!nAje9<~C9F;pk(_$9&Kvz&^0-K&`BbM=gtQL*@g^Po!^mWe z2p+c{JAEr4Pp}K+fsi3B`f%9m0tG1btjG1o5jgw&OWqx| z<@#w=f!*Np0&Rge_EOZ1&obb7*dGz1O5H*^8J?ShoO&=ry?kB`61|Ggn?)2D2*6DU z!_GmtAXp(~p^Q_?ycniHD-++lK;D85U5>F87!89m(^CL@5dL6c@czD&@hXr0*MH%gNSuS4l%i3< z7#}AJVGk>Pvc^M|msjE_hyo3|iLQ;D zA)&@mWE1C!=!-Rvh&r1>i6}`UO0HNyFT9?$)r~KB$x90J>+5v9Omk+L%r!$MOI||# z+x*#EM9AWV%;LD$Fdq?$A*N>-2HGB#ld2ErzO{2574ZLx95b@gIl zl2JST))HC_Kw!2YRe0eyGFEsaLO(fO-A4#+_=SPT!M2SvxOeaAG*CdhapC+S`ZY_3 zoaUQx+y*mEqzU15)(n0&;Dye|%pqVz%Z2Y=}+qOX#T z{uHW57QP;)NvlX_Z#`?J^53X=MYzf|wYkmBHLv|2CE^rXYP#R%p_}ceq6Ha zztmfV>S-37De<_(UJ~^m23UpcH@DP#BX^jd)Ctw*1=%(mEqYAgFH&lM{6xZgPbgU; zoUgTkV#OLU$rea)^-}{x5!A00U%Yx(3iH|9`u|5~5?lSljKepXx~<<2zO41r#cP-i zpx3Amt;iJM@0`0nq6A>vCz90Pl2zZhl3i%bbxT6fTQ=-a+~$ND8;7G66Jb z(@Y=LR60Ds;NjxP*(MC7UkyPY#|d(YD8jX*B6!EVWaTjQnWd%0Z9i&xpJZ6t@5&TL z1Vy`N4z_jz*H*$WM$FGVt@${r?i2G=+3<0FbGPF2p7_|u^oyroih(syIqJJi3kJXv ze>d+f%S7&nwt#E$LQ|b4O(WMlS!^`%%E2KPYhKZvt1!74$CJvM&S8V}g$7vMAhOMR z^I=?LJ686Ne24-lTI*3SaKCI@B3Iv`d-7odIEyxA+`UQWI2ECNvB#gisLvy4OJl3x ze<-BKHhWQ!KorFro-YF)ycr~Jb={OHr_+JSykNYx&Xi(46}y^#O8N97y{R^eC~G~d zJ6wu(n*Me-ZfcS7k=XO^#gfmW%wK z_x*@n{Ot)dARPtof_YK^jqRI%1^E~AJwL02=Dd=n=T#f0Pp-~4c1WiAOI_k_-Y(*m zkd&S?sH8m%e&fI4-97Xy{W& zxl%ANYOYzwOyeB_`7?>>G7GA?0u-9SCf`kVIy}P9EB67#!RdN=S8vqbQ!CupaBJv& zdCF|kZ1vZQ(Po)F#WBtz5SVUg^qmZd-yFso|NQai#ZCYD+Z#{mzo?oJ5iF^o(#XzD@9Q;pFR-=r z1BW!oxTAsz0RyLnfR1rB4fA;|ah54zRy%h4B+qI6A}Zk|62`p%f2c&=oYkjs82Kdq z@%C!do53E4wEs$ z);rH?*RA4UDU?vWyT%clE^aSfs zSLbiBpQvE9+CcB4iaF~{>P&w+8qaoQ-knhpYhSy#{^s@fD$IL00R8Ugl@-e9NL5ZW zJwi9%GM&M0W-3&;4OMcOE(@zok#(W*MQ=%--SsP8xbjLGAS?EdzKQ<*HYgM>W13_D z0#S|m01A;x^U8(Id0j2SG=UvMYG#e_>MbwqjDUivzu#)ppDu3N^k>6aT{DgF(so0# zF+I;KZzMVY1+q(y6QQFMBrwl*dN zwdE&TQ5hInT_Ir1@*E^$4!#`UB^%?fEFHBnw1<|4=JCvbMPTX!w>IXbNlo0j*8cJS zaM$V?(M}41a7@K~E>GXBCAFPNJGE{gd-Ifc^!d1ic(U3pPIhEhy>k>-?Qc86H0`w# z^k1ifi`ACP(ME*cJ-{J0Jf7u0gs>zBW4CNyRKdliVB9UZ>FY>`G2A0@gGw%b#X^!R zfI^sfTL4jmd8_)M4y`0g@<_mAcMkKCLK2X#6%w&jv%b%i-d)ol$Dy^+fSInP;~PLu zlbCW3HFfpjz$f{Z#b;NWJ5Ar5h^SFo96@73s$^QYI0Ms^Ke9Du|nz#5hVOEr^Xj5$cu;n_&?$pRB`ef#HOe!N9{28P!p) zWZ1XCdF?V>9c>-99jv0>>|PT=2@oT?1rxRDNoH3qGbWoshnElD&?m^3SB+3GOIqag z{y3xX%_Z|uap&2T-y#pZ`h3km_KK}P_qosc{McG6ym&UFE`h-ktu|FJ^|}#EtldSj zzFP0^-z{}`djReHx)T6Ebqw2}cUe=cq;7_BNr<;Zw)eRI;0=EXY4FwSS1c%PmZRng z_u-GKgUS286zhT`5qSis5-7!kGF0sn5SA6Asxg?(;r*^)$mPIn#p(p z#7n{#CoR+BIAb3wD0Ka*_9VwHLsJ_54NKPE`Ut*AjMO;~SO2VlsCP)|ehKpI>@(Z2 z7oiqbzuXG6nT++k)r55ejDPgtRqc#^$h;HB+Uuhd4?DNtF=MO^NiGrkNnbffsOB{< z-9i92`tyr-d-Qw4Z<&N*~0 z)6+slDAM=df8*(F5&)Xen-S!QsGyJsu%KSPioQ$|0a!|rcQTq(krxAuEj~*Vinw=> zyL!fl>0M5ab7T}Riq3X^c!NkskoAuH=z%KKiB5mQAG{UHX~wlh2n!}que%%w#7xs^ zYr4`V5`Y#$HKk$XsN^wD7?7hU%QHnDWUVWa3p#oaV6CZtE$IRK24EAa;Hq^E)r5$yS1zG}{Bq1OuC>4WI zKU?u%t#0YurzD1N@yak+^YkJoyWs31_dBF2O}X}vLh9Hb-3XE%LSMdo{aw>TLQJIR z=32k3*ZG)>U0D@+mMkgZne;47i)8F8HPd|0FQau!#qmTsx?beOF2DrOZu#Jz!x8oi z*Gi~_k%zPN8M!u$qYP1FK9Zk6ONp$F=8l+Q3k#q`XQ>tmQx4MXXLJP|1Ol%1GHejM z@r#=n%5gf6eMax+-f^8x8?v#d0NB##Qyrc+fi-zvX0x-2Zoga5F4H%`6ct$4cf;C$6k4P z^NadipSsoJ7(D#`{&yr^tVh@{orOvi z<1V{5?KW6LjqtW2UgYF+99G>!{pEO)btu~h1d&C9p>V0fhAK08`+o3;mCd2q-ila z{)?s+eOmi5IIa(zO8;zjBiv3FD4li~_9mvemw3l^KEGw0VVY|{VameLUJaPkw7E(e z4mgG%w4->wxmsE5QU4?^L^z{E(iLslsw`@1GJ2@zKG%OX1oK~&<>VejMh zW|hh!L(QjI-Q{8ncX4TevE)2OWx^w3t39=0@qv#fNzBVK!Ve1LtJo`@qa#LNc}f~d zr&{E3$jFjYB)3F7{t=suUac82k8pW!n_&DXY~aU56}G89IW zmhfGqGOwd6IIITeM>hoG7I6uQiezyaEgn_pWeL(a+CvSV?#E5;?&faiS{eKatLf{t z1mRDge%@y*W6Dco32FtL?hEN*eP%u4B@qdGg1+N7eAA@OF=qV!2&M}KBu*lcp2dw} zf@K_~#4GfPCL{IyQdpCj8`rPj{ zESz&^KAS42`T<0YU-uj|^XpU%OZMM%itGq3a8{ZrJ&bVCYY#!n_)^_sB5Vwo*uqD} zYQC6<0kMVr=ObWG`6jRqDQ!a-M&1`Io*a;Fz-1}EW2w}2V8CR^T`$FK_8RwTEJf;D zB%J<4MRg`22;CW&6c03@QgaJ>@L4bNdo(-qqPAok`FS$SX2oypxpaRBBHvoCp}FY? zfU0sMSOF>3X{^rdbSZk#ML1ZK6&;_~h(o8-egz85Q|_dNV8}2oZ5(`YfzLTSmzTAa zX^icxyBsr*!wVnN_BV|eu$DOU{=+%F^#6(U71_Kyle?d))< z8dh@$XE(Ew7U9p#(dgr0to-NOH=atp9#CAeTSe28MrsAnuDM8hGjGFUH*7&)%^h`; zb$VZs7d`0~zF3AyPyy#*3I-n~rnoVg{&NpFbfV4sKwmC^zwwG)&9qxQqhZtBFggcwT6UvIDono3&^_joDKoU}M1n-m&grZ ze^RM4^R1OE)tx-bMh+6Us=iJ6O2(#k{p_Mw=pI<6w>2+99?-d<g{NNQWS>0~E z7?lr-O+bSbWaGG-O!)=8io7@-ihxo@KG(|nAII+mi+jy9BPq>6>u97ut2V*e=-8## ztx;z$q|q-eH;|cU31?xvnHn~Hw;HIoMx>ovS>6B(Q+l83x72NAQ|tjyH~_ic844_v z63R<@#s@dQ96bkoX~C+y^|Dri4JwpgpyS})$3B$|gP`WPYYisD0?d2N-t@j91;I(L zYgvtb@hP;DTPK2)ra~X%ERHkt$6m6w16He;bW$eZ9lTzl1AznReU0cAW0LLA1n|Dj z5?)SUYYkM3px|E8Ttw9+03ZG!V^sa_!|&<1@7;UxrRV)Q#zAvrejm6l_50S!@quJ& z>m?W1Nx>z@pupmZAEYBojaOuuC_IxFujSoF0b@^A9fa!C)6sDrb>kPW01hbSI+wk~ zX^an*((opw%7d5alnqOJiHh+O+3`at8-c!B(}P#Dh-bYt4xj$GHV1$B59bj&QxS4L zhrr<(-m$*AR;yhbT}&}juG%e*Qzs}dW1zyOLF-32D*U~06JQxVq<fpl78ASl&EPKJ|7d$Yi!J5p{Bx{9TfTc5 zp;K-!oKwODiFzq(wK9-AIR}1KKf*R7@#VXf)dEI`~q}S&v}oEF~p{p7B*G_ zD)jUah=c5RcK!Edt}cxaJNy4Hdw1d|M|s%!|Hm_9vv8v3Xou=(eXqh*#Z+$kO&QSHhH>x zx~B&Xp*Ne&ve|6*_j$fg^^pDj4Vl;LT{YEDKUMXdbyss78mcUpFm>0&6*qilu$Oc- zXRvNub$a;P&y*c3Pr{HBH;3~9<%+%M2A)|6o$WjI{Aa-n0pf3Mki;u^0=Qgfe%*O+ z|801K6Tirml`{za(_p{ldOd0x)}LOH{oC>@?7_x&TQkpa@qdsJ*K)RUa2jM>d;Z~M zFsDN(qQNyw-at`Sn~zN$x%Il0gNt(4u&w0WfI4?(8Or-TykU2+$>Aq22lupGb?xB# zTX;8JEP1*|p1v#_)Wz(!ULT)o2>l_~P0N5H3tP1^5uf@br_EH)um8ltwIuGPV@EA~ zdNpUD23I?|a=;az!CA`k_i)uzcsX3i9rEGUm4n@Z554{-gIiYUTUT9K3+du$O*eqN zBMEWXioE`FAYuDK|1h{3!%b6qtbkUee&-f;qQc`Jakq~19ii&J<*mP^)epbD84CR3 zYr<@=tccIf zg`Q96)WYRb4z0_=;9e76G!<$}$nN)FdGWgjy9c+i)*Mg_`U9Q`9PIUSX7Ps4(Y=*< z$^Fed(qHzht9U$|z0-wqJmvS;0^BgQx14=>#H@B0emh;=At%sRhor0ycUtQc z-f1zYl~d~m`*ys{e4)+^_9JL`bO~ifUCPPo@*D5IWw3Z|31t^Ew5pqK8(ibOL>E7< zdiNC{D634JA7%OE!mU4a2|T@7u0D5akyl~1+{8yiEMe zJKtC)@00P(RN)m!G_W%HAL4vrIrDHfoBOsxy?Y;}tnB8^IUgS+ic4x-kO{%u`nd1} z2Tyk0rc+K!-k6l-e?$0DmGUtJy0!44?f>T&F0SBg`s<8aqvJtiCMUlju$@-&I9UHp>`3>NC(A2tHdZwUFtCT37k zE?YIIzPIpmAcHf6oIVW+4(~c0>=@s~ErGYJEIYlhanw5#VigQ~`emh^dWb92%zkj_ zql0q3yYQPXHIAJZesJU}e$pg7bsgVyal^GA8|=?>&5~21uit{p(^TQeFRT>%N7oMC zinws3I%rzu1?oZSE?)Q{gX?b`G=nR7{P4Z+FISg4+;|c+zMXmXwVxXNyb5>iQ>(*U zGPLD->lZF6S1jJ}&m9TMDUM&cC27!hx!3K+!Ntw1=r1`<7czU*O}7me&6P;*F%Mg* z_g__3ns;$ea*-`4BiN$^AhE}b)sFlO(M!pik}nEc8#?f+R2%8}qP29t*aV92Y0DBT2Eq9AJ3gxxd z6jwOYzfh0(LK%N>!^$9CtGL1%(#qA3>y#zD3v{JExh-6BEweQ~2eNPoLZ&UesWIHd z@cM6~TzlJUu91Wpl8+zxNLe2)z2xeUBb-kT4=R`Q;ej|C|6i3+BC?l^~ z*v+Ro4~m~%WUg_R^Praww>s$^K`ih(YQ*5QXV{T?em#5dy2&#%bn$?Nz`C*aCcE1VeO z2eEh@oZ=`~RX>=?ZxNM~ynQn~12Wj{S+;VeUc?l(6${;g4*j@!DCFYR;Z}}8afByH zLl+jxzIZZT0}HR72-BbBs5C^lbVazgrmO+H1Mg#MS5zGCv!kB~m2X8|)trpG=C&|> zde`9X_iWApjV9-Cm>$bte8?=7T6NcG*{tJUAlHYsVU>kk4 zj=Kh}lv6$2HFmw;56krw?OD|LIVQcd^4;$#YYjI(4yp>*YQu9BVeYpr{KC-{;r+Nb zmi7MTaP%5>UP!906vW+8jG3@sYD|D=<$@m8De+eOUQFr%zu$Cr%Nte;H=MgJy$@RqRq9r{4;pD7nKp z-pICc<=}8AY>7hq3$L8HX_f9pp*Gz%*e29i#KN97do_bI58MIFdn~x}qQkQHT~gM| z8r{(?tHPmrxr-2PkKsz{_3R>){oW<7zb}i9d*N-#+@V-kPF-;|zZOs?kT)aTFt~&4 z((sjsyoc*#M1aAuiPEtEl7%{~*};6~S3Rw=ej{D?RGb=d7J*M^_<;5=|yK`*`Him{0Qu=gDy98;MzUsBru!jGPdr18f<-I^;-BrI2|nTDnd^E43>2x7mak-!+TM< z?WbHz`r`1Qz{WC5!b`q5Jy-^E_;z{tC9ZN(m%P~#DkldAE+M-($E4q4EYtF_sa;`) z^q7-uG`Wmht^~Kk^F&&?tlA157P)bCNc0Mx-4Eq<4)c9;VN6p)AaiRa-G=(;vKv3+yXGKYppstk4eh#8kN!Y!z-8+{$+2b?WTi zbHhUYSjBs2c)V+HrtPxp$_79+VNaK%v-pD0aKC!I1t(k(r?>{k&0HJ^Il;c$-JwSE z=GHRvSKX%lXYPF;oaN+1bs2W?!do)A#5OplyZovU8h&;7#z7W5f#&16^baC;ADU*d@~ zwrF9OKctXH9tW#MEg#hWaN2pHpIO1dNO{DlGeZCWC(eqU#lcW`dP`B66&GFfj`F|j z7T$4NY@_!(R@P6>QifBWWy;=7DTMFy;v8D}pbB4g+2t2srQ56MEZ=wI;Na=gr*~cZ z$?)y)+Pj69&|kuiz@WIVxxCEY@H#+kta;FHa5#D~hnQip;WiK+5GflE<HG0 zJi&2;5^gNwcE-V-Jz)m%r`R{JozXq0p$2~VtrwNq!s>^glL$G^TSFEe5@JQTNvX`w zH&ScLgmGq^8&}Ip!Qm-i;#?31r{(PEH9yPlPZ^O{nyK@;_}0tlZS`jO&~idY#s#Zz z9~3dmIkPcgA0?~@zo&7tc8+-^3_n0dzx@C181dTga%rF9Ci;(lPVJd{SZ?5}gWC-_ zleh5n!~6b#Gs=2F0Ve_n*NtlLx^hseTz0#@TmnZe+*lEMuuz-XY0&$M!WF57UCnT} z;^6uPD;ipSSvRk}X|PLAx3%(PI^3vTby=AgJiw!!F7^-N8yu;H%dRY=a)B)r%jXuh zn4Du;czXdS{XRa}4%SwDmPa0Srh_*{#eG#~!~*rok#P52nK!rc;2tk*2n&eMt%t1T z?vsT};q_ZSee=|#A)Vm@(n5#J`h`tVxz?N%9PH=a#O?D7hkzGzB5dKQhl_9GER5EP zRe9r|4{BB@x%h^l8`(DvD-zEjg#Xxief?!UY`ZuYx3JF|t|w{BeMvmZ<2@vU3U$e~ zw+(Kty@V(s&))LZKMph0TPhwe*ShxlTg@)N@m78wDLjA}zQy|89Lw}wT!>h>_|NX` zM+bLz>K-BPTo`POdBqUNAYn<|#}r1)IoPlv%L!{C>~;H_b>Bn*Br z460%9TVZfn7+f7TbH(xS?}x+Sm&4%FFnD7aT&&rtf%`kXd7`$I0!{Ogc!r&@R zD<6M5O#fUMyeSO+EDWv)gJ02d9}oY&NB>TSe}63uel-l1h7YG63jbai2FvuNBjMjP z41PTf-WvwL5eC;N_+a?=clGblFnoR_eDKF%@R2akD$Mogli}k((g*uA%=DE}RbIO(gnq9+ zxjPI`>xZCEPKN&rt@g=><2SeJOKTMHMGYU&@ctOQUBP4eNdFg&@c-0a4RRHcJoKg0vFu_Kh}kd{zWS_&Q>qUqRgR}sBB%8EsSV*fr}g>S$KsMz{ib8; ztUlVTdUQJ0i!bVrph30iCE>A^tQfWUOLlxnqYTb0KrYeDxn)OR-;~JNJ`q~%PjT+beftEbpgD%Y>Y;C&%IQ!2lyIPYEYtB)w5 zs=;Sfzn@m?ZjWej#Y4&zjkSJ@l5tkAzMQ>Z%Q&fZn;O<)Z&%}g z^{8s)*Hr7ymX%yHn^x&h>7k_7urltX3S~+i!YOSAPV4{Es)Q$xX}+7(M?9x>Y}r`z z?p4UJQazUQ!jARtiq(ewUUl6VlM!H!FXi z-lU0jsGb@g}97yb-mN7Gta1fHb-J!$BLlroteqd8Ktzz8g?oyuX zqxSHFVdAXn*3)rCkHo$(cIBh$|NMBag^$G4ZBg^=JgO`^sa%{^R?7X_f z?tQ!CdfyjD%?Y*8^U8(JW9skPHuf{h^!D(b;d|XJ%D^e*eS1U+Jg-)NF78mMG|$F< zK!tfS)^;_^uPggb#`=B#qnhFN*oxL{4pV+YE&BN#A@R>X89!2KKOeJYNc$Vz%}RE2 zMAf&mP2K2ORk*fQaeh@PosONRitg<8Ft?UfPPK>M3mt&hLmhqslY8DRwM6`P3O#p1yi?UHFDSswyzIIaY`vP0dWG z)}B?P?>wpv{#Udv-R-I!=d{wV9xdCaD&^P9J&lbqzhcpkt4REWD&5nuf(@xUoL3c{ znTV~*#=de?T~G6H>$eE*MCTr&HVRclepU`rLR9ya$7WmiNT>0_1 zE_)Qz$<=k7e;|Clw@b@y52-_YMv+ddp-q)#rPXT>YlY5k3vrqws$Vk`%D?tdoWroT zL}yjII}_>*XYW>Kw$`ZPpVpSHJ*3XPJ+vpxsb=j!q$$-c zt2y{_g`8G4x2%f%*;rHKu0UL^n4xW}irO6673O&=)|PSg9|3|18)y&5%v29lsN#M_OPZNb>i^Uu{wW-`%DFv<3m|o@E?6}sbdAMx; zT9ubn#_b_hF~3_`QK*k@9*sdGs>L%CO2EuvEx7ZPx}5fin#{~$RsPPS%GP#lGk$mM zs1B*OZI8sA`gK~#SC1+YFU6*yzNC3H?sAQ&?1!}dc`;`H4sFc3W9reG$JFe)_m$fg z?J_+bJBJY^?9~Z1`e(}4tf0nM^s<@!8}y^X(gq# zMTZ=n2b9eAh_+}?#~lGxhS}o^nVHnrn-^P{G z?ICUAPsYk{MD?USRIa*qBBnIE&TeJM>~8grt*zQJoKz*9nT*@I!?7pXqjKtuD{X$S zs&jipMSM;<(^;p_=I&4r_M-Z}xjU4c&IaYapHQ7^Sz` zswp!^HiW6x7M1gCLrtM~hY~t-LU}ebsde=esw3xBR86bK;>R`B8d6X7Y?=G2m%Tkh zVaCl9`i9@Hy3(A~u7KYio4A_$Gcj*BXsue?)gSnST7mf;D%J~kD%IWC$-bn@|EwC* z*R=9}Tve|Uw(3UJQe zAExF;;t}E+l}~Hyqv6BWW)(_%NFO#Q)G)e_sfM3dpEISc$KPm%{;q_=8GKZe52?pJr&Ld?rF7#C>N7fqeNMCOG*m8rBDR#Z+CO?m z)$qa|CHCbQ)O}ntpWUR&-dnHDO7mDFq;vK$6?JQi8p-TB9p{|Y#^t1v=MTgNJEG5~ z)m!;PvA%82RI=+e)was2_Q)E|H*WAc>*Ho0OK}ayV-Issm2rNv7S`EzTq`oF?x8yxb8JV6ET0SI@-Bynrg;cG`md) zq-Pb>9@eOHL@A!ztX1xgY2oeIY@3s+buZ_d^UM+L^?qG*ys%3PZI5V8=I&6FoH?Rw z>ufCZ{&8)oV^xU9G3_DsYMt#$@F}%!zrXC6^~ITEsu**7)ho2Dy0g|+WH;2526wF(syR zK&98UvGfkdtR7YyX~%=|nPbXGe^7g;-%wD;9t}(BY*bBajwnZbY^8Hs)n)iYN?U7- z*4*z`5@%zt+5573TYpfwFh8o5{HkWsT@$lwyBdCXtFojyS+0T(FU~0^+ry73p%W_f zPD35+{Fk&|GbhydPALzXlPdb=QLVw;4lSlVteMOlQ&Dv%lo_+Jo%#uNx6`VN-I2B7 zYHNQ-Y$JY8oM+t4X-@7?Hsq>*-MA^auu&OxR@I}mN$b@fiE9~4ziri%`#q`)?RYl( zl-Asj$LyNW8OFa=68*R;#Qe@iSXg({i7@i9SbdJV$8aI&bk#Yl~Z_e&j_I1aUp6=tZ(KpoZ_+x6QenOk$ zrEOfwa;`)Tfxp#nn&}Pk~RB)raEh5Gs%>Y*-e`N?AF)=jwxe%`&6ep z$F;KUVKs?vu1sv&m^t?;bNrsiL*_IOs}gnA>Hz(gtU;KnV>}HyCXEc`CVURkA+tJ zLTq_jW@}A6C{m;g>o;jBo3#3^SRCCATC3hVC8l>mUzv-ot9L*J*IK7sn|n}=zIp6$ zm`nF=<>>qxRkqgcN^pB9=EVtZ@SfEy=N?tJ`GWQky4zyT?^A7gIi8oA8&T5yF&(d; zR`L0R>fSnWA8B^GYIf%ytwDQ8EulTE4OlPNO3v+x3Eryu)E-hZpH{T)cJ(x^(X}!8 zv4BT4li5AW>)Cb6h4!#As&i-T&^D?=m|vrw;4AuKYqM(M{1&C7IiglIdpxf5xYoJ% zSS+daajCIaxv)_y`|=Yi%-#tdGoMq%o4sGlo?WMWZ647^VQ#ZpUhhs--{uizwm+z? zd7^Id_M0yAw;UyD4T=rgfctRQ$ zlM?IqXwL1CvO=mGx-hH?=8viN%&yUk-9r} zg(<&BrO`YVSM|8kG&8v+gv}mPPwDrojkVUP_+HtnruGf3@Z6YcP}9bxjcSEkqssG_ z4?Us<98;b*ZEQMQ)P0^)Q0qQrR_C5$;ftMIGnqTC1a=y+m5=MN@ub$Sw@-DUvsRs3 zEAH$!M`DjLqJ{c>Dn-9vMbsWrQrqR`{IF_`-yc7W=il1H+Fd)R9BLlbH`*hrl%2J) z@*mL&jb{}!f2Z2k?4+i;n__c1rVQ%br9HEm35|NWva7W@7TZYKmes~)Hz}zzC)Bb# zkH*TmRqN1wQmLDLM5i^LQF&Z=QVDNPDo4tYOFgEN@CVg7&fk}Zyqw<=6EdW) z%#SJiTAO0Bp3;YNaTmL3_05+LsYmGSSAx3R)%rX8RbiS3W4|>Nm#|Ywm^-fW^iQo1 z%j<4beLtnNcGkvu#e$#Dlzr!vf|;YbF7TX6$4}^d&uOi3dsx{u8!y8&53AD5#ABe= z`dEv#Qq4&fUu%o1?`+&PojI%}&u!DisB`*8bEi73UOWlykEu$}->;>0j;a{_xVle2 zu6o;PsJb^##01)PA!RQ=5o^-0_OqW+qn(+IjV06ipI5VP+SrbcZVIvd!L?!3j9>Jz z9nG)NLg&X+3^Rvya^j?#`b>GkYFyD;n>CZ}efoaOs)En&(80_zTGhE>)s@+&wCv7$ z9fZd0*3f)Wx+NV?{X75%}`H9#X4=WRAj>Opas;qkJ zR6PFJnout7Vbze%sOs?C79A*^)%-6!67%apbtm(?R9(86meY&-{N3%bs!nS2;dd+j z?cunKb4*F?98@20VXxY9Yr|MrYWIM$bj~XG{!UfNPpCY*k1K0t@7CgGCUmOcto9W< zn>F$WG^_4*mGRtI+!0i|dwahaWw$KAN27&erF

    8oe9ls{yyc;e7qpiw91jzecHlx4#Wg+RBm;SD0^F5V+9;l_3G`{N_4lWHJ(z= zH;?Jc_%o^(GqGW}ZA{z-RfX;v&8l;^*0TG!O1(9q&bJ*en|E&4hrK5i!S63qr`&68 zidp|9Met84w_EGgH(od#dmNQpGafd4RaIv$ZX#N-j$OESyNK)McdsY>E=D5<|JgoekTc=re*6PF7 zn68(eRQ-55Hj<-i<*kES@!2&hqYJyWbNto4sydmz(cP}Au%}d?yCd2Jw5*y+dn6Xy zes!X~V`};H_s1e#qYk6{glgm5Lt3lZ^;*nqJjFY|UEN#vU@W)^rEPw%R^0DVx7d9u zRK1n{PoJ&PI*FgkM`{5 zCUs8xMV;S1shZlHjM=a;HvM?Ds2leynq{TAOLcQDc31O{$93MVVwsKG&d%YugS1oG z(c7yUJ{yl?JKK~27uKo@eM8l%cU;F(&&KltTJKgodE7aoo??ED`lqhdN_HPr2K9zi zAQuiOF|Qs~9qWv%3-E{ZVQVz@6_aI^R*tr|DAT(e6xO;+^{O+jGV4C34`(J-@;;sv zoqJG+JYQ2Xnn%?bJCAKv_p@E~&>z!Md-o{zD`Q$)zb{s^gG%t+7A@fArxf<;(QP5R ze^S+LZdY7~wVLYeSMGIVsrvHV#(L%Y%;83e*S%ZCG`m+B(TPXm&3GB8J*@R?52;G} zCzLI-u?z8s;!f0N^();`txM|;eY10is&ey)y303`sxYgYHwH_rhigP z?>?bK`$LM}JD{BC-lf!bPN-*@J*buU$HtWCW9rSD<&~O4F%A1;(dO!4=I+y0c4l%s zMCd%FCf7ZxdG_wytq&j5Ds>*yth?*9*0WpVhT=YTTHSa{MQ3fy-FTMNe_59~g` zgIcKHqlI=3tD*XM#-Mpjv3l#ZyYq}roS)Z{oAE+%Yt69M<7li1ld+j@)j9HKA5ubh zs|%goq&dAZ7Ay94Wl8UllGfd#cGDh;&1bJ_jNhXb=^c4E%w=wms`<>MGOx8(3HAF_ zbaPe(*^HNSJNxyO?grJ#PF#BDxaw$534)v-KCPBJ)}D6pVIcGJGLRLb$6e>;`giXyzq!>h2NtO+r#Q{ zpVj%fb86t7J5~I>C-wEF-4^CCzg>OADfN8Kqe^ivo=u^gU-+?q#K33~ffqgwHLRBKrCpM6pb zeK}@A(<(3gZtWO7uR?4dSJi9nRGsU@O9bZLPR{^}7$73Kh$Z|Lg72iG*|N zYvyw$c_yCb=w>S9xkuFf`EiY2iKk#YTegRc=x)(^&&6)~!rpi>Q^zFDc&p;1|Z9=Dwr`|H_)!G3|@( zCEi;!XLa7{>)KHG$Muc5t)r?ILt40xn|%MIYH~C7`<>gBgw7FlhqFf>)b$bNdhcO1 zy3;yaa#nL{PR62oR4X@^saDT@Nk#QF&9=2gI}a}@5v{m8ITLU5nHy3jKd(m9c`9z1 z4k{yik85gfSL~(^>b%3#x@q89wX_%1ZO+EqCweE;I_6_9<&SHfn&oBgc=o)tTg5wb zSSji5jH!D}`Q1Aa3-4Z4!rqru7B4@cYSbRmtUDXE)Mr(oGn1+ToqJ+`KBh$bgKC{OmANBlw6-KNzlG!Lnvb{^9zyfR|rRgrs@yMA1y zHoH-6?`!Ir<{#8IJL}blb>a!`*?6Ga+1Y|zE0e2>&EW+LazBYC)9+yPbq`DaoOE1>UQV1shM^U#IM8)sIT0w4C`%B-t^)T zzK=!RoE!v2GMd&`ErgkBoIcy$NnYV|OtL=EtNbk6+b?<~K&|J)j`K{`m zyPIM?irb0V$*tk-WVf#5XG{38pj$5b_{TqW(?$Hcc=_^=@Z_HPLKyt#-+$)7f9Ak{ z<^T`Be)8tC#@G|&g)=6BMFs~C^H=dZmA~QvStO_-MI9ESXkZK(_94dv9L}J?G(39f z<29I*#(XQ@fbYQ~{4f^dr%=J0umo>M6~Bt5_#GrC`E6k>Np%V-NP>%XkF)k>hb3#FLo75gf&F zIGn^Od6#2?|0@nL)fe}X?nifeHluE$4l13re2V+C%+C-6z!giqmStVA7uhFh=-x8gQ@ z8lS;xd={U>pX2lR0{#Mj2|Hyh%n)OkVa6~cjA7O=hFQxP=61#~cQA$-Wel^9G0b|# zFn2P(Gv=Ay!1P^A!`#gnW+P*m24k3e7{h##G0eS;VKy;_*~}Pb3uBo37{iP)hS|y( z=6=R7+Ze-aXAHB0G0aZJFb^<>$r!`zVhr;jW0)^7h8brJvzsx@9>y?x8N=*j4D%3U zm@hMid6+TGBaC4lWel^QG0XwRFgatG#~8yr&KTwi#xMsN!yIA^^CV-Krx?RbForqI z80JV>PbZm%Im#I37-N{@jA2eNhH;Ex{)#coNyac!jA2eOhB?g`<|~Y0zRDQp3}cw5 z8N)oo80Kq?VV-3S^Vf`F&N7C1jxkKZ80LA#FfTBM`5VSCf6Ex=9AlUl8N-}s3^UCb z<|W23UuO*S4aP8k#~9|nGKTqk#xVcD7{)V(X)=asF@|X~hM8du(_swLWehXR7^cS< zW{xq;%ZyWv#-M;r0t*N8O-#eW!F)5*uy8Qn!Za)# z%(pTP3kUOUOvA!`+bQ+&$+t5N2lE|-!@|M5foWJcnD1m777pgSn1+Re`EI6R;b6Xp zX;?Uz?`0Ym4(9uqhJ}Oqex_mJU=}eA3kUN9OvA##{2Ju52lFFL z!@|M*DATZTFh9mLEF8>YreWb=ew=AoIGCSc8Ws-bCz*zYgZU|@Vc}qYnrT=#n4e)9 z77pgWFbxX_Q(+nw4(4Z>hJ}N9Bh#>OFh9pMEF8?6n1+Rec{9_na4RnTCafc^lKPa4^5XG%Os<+nI)igZV|KVc}pdVj30><{eDK!ogITuEu)m z_}}~z)39(bzsxi&9L%pU4GRbJt4zbf!TcK2uy8QH&NM6>%x^Fa3kS25X;?Uz-((sV z4(7L*hJ}OqZKh%2V19>bSU8w>G7Sp{^SeyL!omC=)39(b3DdA}Fc&io3kP!v)39(b zmog0t2lFnbVc}rj%`_|=%zK!Ig@bu7)39(b%b13RgSm`pSU8yXF%1g`^ZQK0!oj?s zX;?Uz%bA9SgSmofSU8v~nTCafsWA-;2lEF^!@|K_#WXA&%+*Z8!ohriX;?Uz4>Ao4 z2lFAOVc}q|VHy?=W;xTaa4>(!G%OsbIVc}r@jA>Xn zm|K{Jg@akeG%OsktR)-?Qdneg@G!R%jsz(zGB|jcI|xUD6c!mA zJj^KJNRYxJgM)`zM>rCsu*l%xVb&9l1Su>sICz*l2}gny78x8o%m%`dAcaK+2M==> z;Yg6eB7=j6xtnk#NMVt|!NY7M90^ibWN`2>4Z@Kig+&Gj4|5OUNRYxJgM)|pBH>7o z!Xkr%hq;$TM0*k6c!mAJk0%sBS8v_3=SS<8{tTh!Xkr%huKaz5~Q%m;NW3) z5RL>XEHXHFn4N?pK?;iu4j$$K!jT|_MFs~ClM#*tDJ(KLc$i&;BS8v_3=SUVLBf$B zg+&Gj5A!9$ksyUd1_uu_PB;>zu*l%xVRjRa1Su>sICz*ngd;%;iwq7PW-sALkisH^ zgNNBiI1;3=$l%~%9wHnGQdneg@GxH{90^ibWN`2>4-<|ADJ(KLc$h~BM}ia<85}&! zql6sICz-Dgd;%;iwq7P<_O_PkisH^gNKM+rxQ z6c!mAJj^k|ksyUd1_uvwoNy#aVUfYX!<---2~t>OaPTmWa3n}!k-@>k{1xFykisH^ zgNHduI1;3=$l%~%rU*xZ6c!mAJj^M=ksyUd1_uvwns6jYVUfYX!+eEsBuHVA!NJ3P zm2f0TVUfYX!<-=;2~t>OaPTls6OIHaEHXHFm}dw_f)o}R96Zd|2uFex78x8o%(H|e zK?;iu4j$&O2}gny78x8o%vr*bAcaK+2M_Zc;Yg6eB7=j6DF{b`6c!mAJk0ZiBS8v_ z3=SUV1;UXag+&Gj5A!#KBS8v_3=SUVZwW_&6c!mAJj^-5ksyUd1_uxGBH>7o!Xkr% zhdEC;5~Q%m;NW4V2}gny78x8o%u9qLK?;iu4j$&~gd;%;iwq7P<{N|~K?;iu4j$(3 z2uFex78x8o%zq^u2~t>OaPTmHPdE~!u*l%xVg7+|BuHVA!NG$@W-3UK!Xkr%hiMXy z1Su>sICz*A;Yg6eB7=j6X%mhFDJ(KLc$gW&ksyUd1_uw*Ash)(SY&YUFkQltAcaK+ z2M;q#I1;3=$l%~%dW0iE3X2R59%hbkBuHVA!NJ44OgIvxu*l%xVJ;Ak1Su>sICz+O z!jT|_MFs~C^9tcekisH^gNOM?!jT|_MFs~C^D5y;kisH^gNNx8jsz(zGB|jc|3)|x zq_D`~;9>rWa3n}!k-@>k{CC2UAcaK+2M_blgd;%;iwq7P=6?{51Su>sICz+UAsh)( zSY&YUF#nTqBuHVA!NJ44MmQ3ru*l%xVg484NRYxJgM)|pSHh7Xg+&Gj5A(kXM}ia< z85}&!zY&fEDJ(KLc$oh~I1;3=$l%~%{+)0nNMVt|!NdGt!jT|_MFs~CHn3(9DySks z4JqodXdpulhXNjbm~SFJDySks4JqodXdpulhXNjbm~SRNDySks4JqodXdpulhXNjb zm~SCIDySks4JqodXdpulhXNjbm~SOMDySks4JqodXdpulhXNjbm~SIKDySks4Jqod zXdpulhXNjbm~SUODySks4JqodXdpulhXNjbnC~DyDySks4JqodXdpulhXNjbm^Tm~ z6;zR+h7@&JG>{>OLjjLI%y$wW6;zR+h7@&JG>{>OLjjLI%y$tV6;zR+h7@&JG>{>O zLjjLI%y$zX6;zR+h7@&JG>{>OLjjLI%=Zu<6;zR+h7@&JG>{>OLjjLI%=Z!>6;zR+ zh7@&JG>{>OLjjLI%=Zx=6;zR+h7@&JG>{>OLjjLI%=Z%?6;zR+h7@&JG>{>OLjjLI z%p&5Wf+`Z!kfIKY1~TMuDB#hD`2pgif+`Z!kfIKY1~TMuDB#hD`9b2Nf+`Z!kfIKY z1~TMuDB#hD`61$?f+`Z!kfIKY1~TMuDB#hD`C;Otf+`Z!kfIKY1~TMuDB#hD`4Qry zf+`Z!kfIKY1~TMuDB#hD`BCDdf+`Z!kfIKY1~TMuDB#hD`7z?7f+`Z!kfIKY1~TMu zDB#hDSxkIXP(^|oQq*D5K!zL+1w8sNKTdp9P(^|oQq*D5K!zL+1w8sNKS6v{P(^|o zQq*D5K!zL+1w8sNKS_L4P(^|oQq*D5K!zL+1w8sNKSg|0P(^|oQq*D5K!zL+1w8sN zKTUj8P(^|oQq*D5K!zL+1w8sNKSO*}P(^|oQq*D5K!zL+1w8sN|AqLdpo#=Fq^QH9 zfebku3V8Iv!L?b03aUs@Ly9^q8px2tp@2sp=4XkI3aUs@Ly9^q8px2tp@2sp=8eQh z1yv-dAw?Y)4P?mSP{5-P^K-;U1yv-dAw?Y)4P?mSP{5-P^Cse>f+`Z!kfIKY1~TMu zDB#hDc{A}*K@|yVNKuDH0~vBS6!7T7{5bq)M3#;h8zwBJo+$I;-i8p64a2Q4vPjdd{j_Hf*MlPVbMT_91aCM`Y@Lg9~D%QpoSE6STv9!heH96KFqs_ zj|!?tP(zA3EE>p=!=ZpjALiY}M+H?Rs3Aoi77b*`;ZVS%5Az=4qk<|D)R3YMiv}{} za46u>hj}mYQ9%_6YDiIsMFSafI27>c!z?2{DySks4JqodXdpulhXNjbn9GQd3aUs@ zLy9^q8px2tp@2sp=6%FR1yv-dAw?Y)4P?mSP{5-P^ZUd{1yv-dAw?Y)4P?mSP{5-P z^M2x^f+`Z!kfIKY1~TMuDB#hDxt#c@po#=Fq^QH9febku3V8Hkt{^@ts3JiPDeACj zAVUs^0v>&sD~XQ^sz^{niaIPB$dJRKfJYyuMtoFIMS>bq)M3#;h8zwBJo+$yKzvkC zMS>bq)M3#;h8zwBJo+$K5g!#)k)VbYbyzfzA%{Z&k3P)R#76~HB&Z=p9Tp8_$l*}H zqYv`|;-i8p64a2Q4vPjdhxrKcQ9%_6YDiIsMFSafI27>c!~6;HQ9%_6 zYDiIsMFSafI27>c!~7}nQ9%_6YDiIsMFSafI27>cgNy2B5h|!6K@BPDuxKDd4u=9B zeVA*Bj|!?tP(zA3EE>p=!=ZpjALcsZqk<|D)R3YMiv}{}a46u>hq<2ksGy1jHKeG+ zqJa!K913{!VLnQHR8U2N8dB6@(LjbA4h1~=FgFk%6;zR+h7@&JG>{>OLjjLI%*Tk2 z3aUs@Ly9^q8px2tp@2sp=HtXi1yv-dAw?Y)4P?mSP{5-Pvx4}jpo#=Fq^QH9febku z3iuVLr@YNKiwHIxHHs#76~H zB&Z=p9Tp8_$l*}HqYrZv@lin)32I1DheZP!ayS(5=)-)9_^6bq)M3#;h8zwBJo+%7BR(pqB0&u)>ab`aLk@=m9(|ZU zCq62uB0&u)>ab`aLk@=m9(|b46CV{+k)VbYbyzfzA%{Z&k3P&7h>r@YNKiwHIxHH< zki(&XM<3=dh>r@YNKiwHIxHHK*B(ZCoo>_d(TIGjO&X?XO|$7?V{EFX)o7!@o*6-$v| z8EROL6f022YFLb-fiYy*ha3}dID-Py@aUnB*I&Ai36`OTj_;hJDB}0f#dvFb$6$ z`gjdyEz8FuEJg)OP{mRtScV#wBgG2Tu^JYmXkZK(_94dv9L}J?G(39f<29JuSw0qF zF)CPsDwZO_GSsjfDORA4)vy>v17par4>=~_a0UgY;n71Mufg2G^05euQNa>au@niG zp@!v1u>y6hhQ%lv7(<4A$T0zjGbk_(j~@DX4Q7<(V-Xgkf+eV8DH1G04a<>Y1?pH0 zi%~Q%h79|VV*(CmP+%G!J@oM!%sQ5jMOcgqmY|BINU#hwEJunJsADxOM$y0+GVDW+ z2{@cVfoXX3(8p^q>sdY)VKFLLf-06G!7|ja94S_yj@7UjMFV5Vun##V;BW>7rs2^; ze|@?Cd?(AtA}mG)OHjp9Bv^(TmLtUq)Ug^CqiA3Z8TKK^1RTzwz%)F1=;Jk*4J;pv zuox9AK^04pU>Ryyjub0U$7)!NqJc4F*oPbwa5#ek)9~n_kJn)CV)NWx4U8efKIE8y!xby#Zn|#h8mV5#R}B1 z8Wy8yU z920Ojg96j==%J6-U>YnRi?A3KEI}1Zkzg5WSdJ7cP{(RmjG}=tWY~us6L2_#0@Lv5 zp^w*K?qT^@gvF>}3949%1j|sva->*+I#$DC6b+0a!#?DgfWsLSn1)9WeY^(qMV606 zSd0pmpo*nPunaXUM~W4wV>SLij{YD#g7|^|`L_HJgl;w3LFm%ZLDbM-5Ufhu?epZY zhaC2h!ya|vkX*`4nuq-5k2lvLELY1pu(Wk*NP;BVAtNWh4Yh?p@Grp%bLV9APv zl#HB$l8Tx&4I8$!?C9tjJm@ndU_?kn%$NyNX3SZzWJN+sMovLVMa`Oq4O?1vbo31V zCVhqkj0lN{88czZj5!OItVl@7$SEkPs9DpnVN1)7j-J8)LZ2Z4BSIo##!Q$pW6pvl zD-u#NatcZ+YSuJt*wV72qi66p>oX)^L`X!;m z^bB%+h6IcViHI39Vakj-3zn=%NXf`4D5#2#JUp zGhxb%ISZDoNJz=ZDJZF^S<|p#OUsUq9yiks_81Z{A|xVa%!Da3<}6sUA|WLsr=X;w zW=+F}EiF4bdIo>HK0^XVgha%QnJ{I>oCQl(B&1~I6qHodtZCS=rDaD)&*1OSXGp+^ zkcgNu6Q<0VvtY@Jgp`b&f|81wH4PiKwCw2U8T_643<($!5)m_I!ju_v7A#qjkdl#8 zP*PE|reVXDmK_~EgC~841dIrYh#50s%8WS+maIrf$;c@vsi;}guwhHfj*gze-=)ux zfDs`PF=Hl7nK5U(bwF%zcDn6qHXiiDJmoPv^ynl%j@wzTZ%=o$P&`V0ve5fTwIX2O&i za~3RFk&u#+Q&3V-v!-FgmX;kIJ%dW0Aps*oB4Wl&m@;F|f+Z^wQZjN1N-ApBG;G+? zvZJGC@DJ-VBw$2HM9i28Q)bLruw+F-N=8mWNkz??h7DU(bwF%zcDn6qHXiiDJmoPv^ynl%j@wzTZ%=o$QD`V0ve5fTwI zX2O&ia~3RFk&u#+Q&3V-v!-FgmX;kIJ%fK-pCJJwLLy?uOqeob&VnT?5>hg93Q8(! z)--I`(z2tYXYfzxGbCU{NJPw-2~%dwS+Hb9LP|zXK}ki;nuZNqT6T2w3~GIb1dIrY zh#50s%8WS+maIrf$;c@vsi;}guwhHfj*gze|3;r70V6^pV#Z9EGGoqyB`XqAGI9z^ zDr(j=Y}nGWqoZf=ztv|*z=)8Dm@yNk%$T!a$%=%OjGTg!ikdYI8@9CU=;#^z@AMfG zFd`%(X3T^sGv+Khg93Q8(!)--I`(z2tYXYfzy zGbCU{NJPw-2~%dwS+Hb9LP|zXK}ki;nuZNqT6T2w4E||-h6IcViHI39Vakj-3zn=% zNXf`4D5Wq-5k2lvLELY1pu(Wk*NP;GflJNWh4Yh?p@Grp%bL zV9APvl#HB$l8Tx&4I8$!?C9tj{O|P{5-=hpB4*5lDKq9QSh6A^B_pSxq@rd`!-g#_ zJ34v>Z~6=g7!eW?GiJh+8FLmaS&@*EkyB7oQM0CD!L`X!;m^bG!w`V0ve5fTwIX2O&ia~3RFk&u#+Q&3V-v!-FgmX;kIJ)WGw9zy~~gha%Q znJ{I>oCQl(B&1~I6qHodtZCS=rDaD)&*1;0&yau-ArUcSCQO+zXTg#c2`L#l1tk?V zYZ^9eY1z@yGx$I2GbCU{NJPw-2~%dwS+Hb9LP|zXK}ki;nuZNqT6T2w4E`_r3<($! z5)m_I!ju_v7A#qjkdl#8P*PE|reVXDmK_~EgLi#~1dIrYh#50s%8WS+maIrf$;c@v zsi;}guwhHfj*gze|5cwM0V6^pV#Z9EGGoqyB`XqAGI9z^Dr(j=Y}nGWqoZf=f7543 zz=)8Dm@yNk%$T!a$%=%OjGTg!ikdYI8@9CU=;#^zbNUPk7!eW?GiJh+8FLmaS&@*E zkyB7oQM0CD!L`X!;m^bGz*eTD>#2#JUpGhxb%ISZDoNJz=ZDJZF^S<|p#OUsUq zp23GcLjp#GM8u4lFlEM^1xr>Wq-5k2lvLELY1pu(Wk*NP;9t^bNWh4Yh?p@Grp%bL zV9APvl#HB$l8Tx&4I8$!?C9tj{LA_b2^bL)5i@4Olo@jtELo9|l95wTQc<&}VZ)Y| z9UVP`e?^}m0V6^pV#Z9EGGoqyB`XqAGI9z^Dr(j=Y}nGWqoc>0qQM?R0!D;H#Eh9R zWyYKZOI9SLWaJc-RMf0#*s!H#M@P@#U)5(wz=)8Dm@yNk%$T!a$%=%OjGTg!ikdYI z8@9CU=;#^zYx)cc7!eW?GiJh+8FLmaS&@*EkyB7oQM0CD!pCJJwLLy?uOqeob&VnT?5>hg93Q8(! z)--I`(z2tYXYl{fXGp+^kcgNu6Q<0VvtY@Jgp`b&f|81wH4PiKwCw2U8T{+|3<($! z5)m_I!ju_v7A#qjkdl#8P*PE|reVXDmK_~EgMUMxAps*oB4Wl&m@;F|f+Z^wQZjN1 zN-ApBG;G+?vZJHN2cp3qLjp#GM8u4lFlEM^1xr>Wq-5k2lvLELY1pu(Wk*NP;NR3| zNWh4Yh?p@Grp%bLV9APvl#HB$l8Tx&4I8$!?C9tj{9F1A2^bL)5i@4Olo@jtELo9| zl95wTQc<&}VZ)Y|9UVP`|EE4f0!D;H#Eh9RWyYKZOI9SLWaJc-RMf0#*s!H#M@P@# zOP?VDBSIo##!Q$pW6pvlD-u#NatcZ+YSuJt*wV72qi68{(q~A(h>(bwF%zcDn6qHX ziiDJmoPv^ynl%j@wzTZ%=o$RK^%)W{A|xVa%!Da3<}6sUA|WLsr=X;wW=+F}EiF4b zdItZtK0^XVgha%QnJ{I>oCQl(B&1~I6qHodtZCS=rDaD)kBcFLJ%$8~2#JUpGhxb% zISZDoNJz=ZDJZF^S<|p#OUsUqp25GP&yau-ArUcSCQO+zXTg#c2`L#l1tk?VYZ^9e zY1z@yGx&G)84@reBqCj0lN{88czZj5!OItVl@7$SEkPs9DpnVN1)7j-J7PpwEzi z5g`#VVoCQl(B&1~I6qHodtZCS=rDaD)&)`R& zAps*oB4Wl&m@;F|f+Z^wQZjN1N-ApBG;G+?vZJGC@E_|lBw$2HM9i28Q)bLruw+F- zN=8mWNkz??h7DUhg93Q8(!)--I` z(z2tYXYl{mXGp+^kcgNu6Q<0VvtY@Jgp`b&f|81wH4PiKwCw2U8T=Re3<($!5)m_I z!ju_v7A#qjkdl#8P*PE|reVXDmK_~Ega1;WAps*oB4Wl&m@;F|f+Z^wQZjN1N-ApB zG;G+?vZJHN7odYbu*W__4hT48#1SFKM4S+F%9t}IoHONu8JEntV!<^_Zdh?k!W}91 zWIT}bNWl{&&s4ln^U9hx8s6FP!In>2zS!|i#}7Tf4E`JTv&TL|4hT48#1SFKM4S+F z%9t}IoHONu8JEntV!<^_Zdh?k!W}91WIT}bNWl{&&s4ln^U9hx8s6FP!In>2zS!|i z#}7Tf4E`wl*<+s}2Lv24;)sxAB2I`oWy~29&Y5z-j7#QRvEZ5|H>|iN;f|DhG9Jiz zq~M8?XDVK(d1cKT4exCDV9O^hU+nm%~0|E{iaYV>55hui)GUkj4 z=S;a^#wByESa8jf8&=$sa7W5L84u(@k7rq zgFo2M9{UVAAmES@M}!;`aYD=~W6qdx&XfygTr%g11=lROVZ|*8cck2t@j%Wa1y7Vb zQ}II0D{J0pcxS^0TRv&|V#hZfKlJ=E_+#v6k9~$55OBzdBSMaeI3ebgF=tFTXUYXL zE}3)1f@_xCu;P}4J5uh+cp&GIf+tFzsd%C0l{IfPytCnhEuXY}vE!SLA9{Wn{IT}4 z$38<22smWK5h2G!oDg%$m@_7vGv$IAm(00h!8J>6SaD0j9Vz!@JdpE9!4oCVRJ>60 z%9=MC-r4ZMmQPx~*zrxr4?VvO{y6*DW1k@h1ROHrh>&9c9>{s5;E9rFDqg60Wz8E6?`-&B%O@>g?D(eRhn`>j_-XJ5_Sk30 z0Re}MI3nbjh!bK?8FR*jbEaG{Q!5?owd+amhfPh0r91(I%#0fE{j5%Y%Ia4l}amk!37F@IB zh84FY+>vrm#sfKz6g*M#OvMW|udI2a;hha1Z26?+iyhx|{Lu5u;7_ohJ@y%LK)@j* zjtDs>;)Iw}#+)(XoGBN~xMa>13$9sm!-`uH?nt>Oy;)R-5*1Xa1&V~=ReA4p8 zj&C}C==o*vC)v*)`wTfC;E)kVgd7ubLd+>+&X{n{lnZ8DGUti~*DSeV#VrYUq}-G7 zK+YotPn0}U@j}fjYu;#hXTt|uK56-4$2T26^!zgTlkI1ZeTEzmaL9-wLXL?zA?B1Z zXG}O}$^|nnnRCU0YnI%w;+BLvQtrukAm@>SCrX~Fc%kN%HE%S$v*Cj+pR|0jS zCrX~Fc%kN%HE%S$v*Cj+pR|0j&9c9>{s5;E9rFDqg60Wz8E6?`-&B%O@>g?D(eRhn`;s z|1JC3W1k@h1ROHrh>&9c9>{s5;E9rF zDqg60Wz8E6?`-&B%O@>g?D(eRhn`;se~SI=vCohL0uC8*M947_C&ZjG=8OsFOu1mj zC3CJ=aLtk%R@{&rd$~_qmgdomu#d8FWpl4mMjsCi}08x8Ml_+ZN?Enn>TrsId6Uj~1w z{p_*NkOKk^8F56&F%c)koHFK&3Fl0?V8$hLu2^u*k{edsl5j`LJsA(=JW}vP$uku% z)V#9hjfQtNe6Zz{mM?aE)A2*kFN6P%{p_*NkOKk^8F56&F%c)koHFK&3Fl0?V8$hL zu2^u*k{edsl5j`LJsA(=JW}vP$uku%)V#9hjfQtNe6Zz{mM?aE)A2*kFN6QC{p_*N zkOKk^8F56&F%c)koHFK&3Fl0?V8$hLu2^u*k{edsl5j`LJsA(=JW}vP$uku%)V#9h zjfQtNe6Zz{mM?aE)A2*kFK&7o{DD388FE0tAtR0mIVR$Sm{Z1_G2xsk7tFY1&J_!; zS#raQTN3U_xhLa+oJR_tD0!yhg_>8^ywUK^h7Y!U((=WQZ#sVH`DO5@+0P#P3^^d+ zkP%0O920Ru%qe5em~hUN3uat0=ZXc_EV*IDEeUs|+>`M@&Lag+lsr@MLd`2{-e`Dd z!v|YFY58KuHyuCp{4)5{?PrgDh8z%Z$cQ6Cj)^!S=9DpKOgLxC1v4(0bH##dmfW!7 zmV`S}?#Xx{=aGUZN}j2Bq2`q}Z#2BK;e#!ow0yDSn~ooPei{53_Or)6Lk=+_2)7gga91 z$#@{=k%A{mo~d}D=9M*XG`zFngDs!5e6i!3jvsn{8T^^{v&TL|4hT48#1SFKM4S+F z%9t}IoHONu8JEntV!<^_Zdh?k!W}91WIT}bNWl{&&s4ln^U9hx8s6FP!In>2zS!|i z#}7Tf4E`+p*<+s}2Lv24;)sxAB2I`oWy~29&Y5z-j7#QRvEZ5|H>|iN;f|DhG9Jiz zq~M8?XDVK(d1cKT4exCDV9O^hU+nm%|iN;f|DhG9Jizq~M8?XDVK(d1cKT4exCDV9O^hU+nm%r;Isc!Z}kem~qLRD;8X{&azMZ#BaR3;CgOycQ^uSz;hZTK z%(!IE6$`Fea>I&S67ER3C*y&fM+%-Od8Xopnpf7m(eTcO54L>L^2LsCI)3Q+W$@?N z&mQ{>IUwMW5l4g^6LCV!DPzuCJX7&P z%`0o(Xn1GC2U|XA`C`X69Y6H^GWhS?&mQ{>IUwMW5l4g^6LCV!DPzuCJX7&P%`0o(Xn1GC2U|XA`C`X69Y6H^GB~oIJ@y%L zK)@j*jtDs>;)Iw}#+)(XoGBN~xMa>13$9sm!-`uH?nt>O~0|E{iaYV>5 z5hui)GUkj4=S;a^#wByESa8jf8&=$sa7W5L84u(@k7rqgFoMX_Sk300Re}MI3nbjh!bK?8FR*jbEaG{Q!CzoMd+amhfPh0r91(I%#0fE{ zj5%Y%Ia4l}amk!37F@IBh84FY+>vrm#sfKz6g*M#OvMW|udI2a;hha1Z26?+iyhx| z{Lu5u;4ieFJ@y%LK)@j*jtDs>;)Iw}#+)(XoGBN~xMa>13$9sm!-`uH?nt>O@(zmfI~(c5pqn#2{EUPIb*^( zQ!bcs$($<|T(jhc6}KeZk#bMQ138ZrJW=vY#S1mBta+p1oedvs`K0BG9p7~P(DTdS zFS4IK_8D?Oz#$`!2stL=gqTytoH5~?DHqJRWX=@}u32)!idz!yNVzBDft*JQo+x>y z;)R-5*1Xa1&V~=ReA4p8j&C}C==o*vKeV4c_8D?Oz#$`!2stL=gqTytoH5~?DHqJR zWX=@}u32)!idz!yNVzBDft*JQo+x>y;)R-5*1Xa1&V~=ReA4p8j&C}C==o*v7u(Ms z`wTfC;E)kVgd7ubLd+>+&X{n{lnZ8DGUti~*DSeV#VrYUq}-G7K+YotPn0}U@j}fj zYu;#hXTt|uK56-4$2T26^!(zWv%w$OW1k@h1ROHrh>&9c9>{s5;E9rFDqg60Wz8E6?`-&B%O@>g?D(eRhn`;se~JCord%-Nk~vo_xMs-> zD{e`+Bjuiq2XY=Mc%tN)iWh2LS@TB2I~zXO@=41VJHF}oq34&u|Hyv!*k{NA0f&q@ zBIKBe6JkyobH;>ord%-Nk~vo_xMs->D{e`+Bjuiq2XY=Mc%tN)iWh2LS@TB2I~zXO z@=41VJHF}oq34&uiT&)c&yWKG4jFMo$T1No#GEqbj0xvVxnRa6bFNr$&5|2d+>&rd z$~_qmZa;hMGvt7PLq;4Ca!kYtF{g|8^ywUK^h7Y!U((=WQZ#sVH z`NbipgFmpxK0^)&IAp{TA;(0V5Od0yGbWrf<$@WP%(-I0HA`+-aZAD-DfeVNkn>2v z6D7}7yioJXnl~EW+3>-ZPg=g%@lD4MJ--b8O8ePkpCJbX95UjFkYgfFh&g4<857Q# za>0yC=3KGhnk6@^xFz9^lzTEB$a$pTiIQh3UZ{Cx%^MBxZ1`ZyCoNy>_@?8Bo?ixk zmHq6o&yWKG4jFMo$T1No#GEqbj0xvVxnRa6bFNr$&5|2d+>&rd$~_qmR|j5s3Xn1~Z%P8oB?gmb1`FyoRrS1h<@ z$qg%RNw_2Bo{R@_9w~UDWYF=6MM#DQBKG^a}%NIMo>G+}Nm%(3aKYQ#mr;Isc!Z}kem~qLRD;8X{&azMZ#BaR3;CgOycQ^uSz;hZTK%(!IE6$`Fea>I&S z67ER3C*y&fM+%-Od8Xopnpf7m(eTcO54L>L^2LsCI)3Q+#R2DoKd{F>Lkg zdomu#d8FWpl4mMjsCi}08x8Ml_+ZN?Enn>TrsId6Uk3kU``KfkAqNB;GUAAkVgdomu#d8FWpl4mMjsCi}08x8Ml_+ZN?Enn>T zrsId6Uj~1J{p_*NkOKk^8F56&F%c)koHFK&3Fl0?V8$hLu2^u*k{edsl5j`LJsA(= zJW}vP$uku%)V#9hjfQtNe6Zz{mM?aE)A2*kFM~7t*<+s}2Lv24;)sxAB2I`oWy~29 z&Y5z-j7#QRvEZ5|H>|iN;f|DhG9Jizq~M8?XDVK(d1cKT4exCDV9O^hU+nm%|iN;f|DhG9Jizq~M8? zXDVK(d1cKT4exCDV9O^hU+nm%|iN;f|DhG9Jizq~M8?XDVK(d1cKT4exCDV9O^hU+nm%|iN;f|DhG9Jizq~M8?XDVK( zd1cKT4exCDV9O^hU+nm%n~-QjB!037G%Fhq(J2~wm;kRnBb6e&`qNRc2xg3GzgWnRu@ zE^|3A=VdPEWnRwZyv$`Tb2%^PGMBl`WiCjOB1MW6DH5beks?Kk6bVwKNRc8%iWDhw z&x;3sZ$EthfFBW}#E6q1Ns2TXvgF89ph$@_6{^&z)1XOc87z#u_}7$(FB zql6h_oCzkGB0`iHaS|j+ktRcy9C->9DN&|El^S&#G-=VMLzii0m}QQ67U;3a63eWx z${Kyv*ric(FMw|pmQl!a{B}bkDMM{*Z zP^Ctl22EPD>Ck1G8D^Pdo&|a=vcxhgtg=R*bvD>!i*0t;Wskr=(9Zya1Q}wO5F?Bd zW{hzrm}H6wQDVeNkR(N#3|VsIDNv+DnF>{E)M?P9MVk&?rkP=uIp$fQ$0AECv%)HC z^jT+vO}5x(hh6pvT-471g9I63m=Gh35@w8XCYWT32vK6hNsuH(nhaTTd7d8DNkgLktsQgi*qb zG0p^&Oc5bUj5rCBq)3w?OO8ARij*i*p-PQ94VtuQ)1k{WGt4r_JPY(#WQk=~SY?eq z>uj*e7TfHw%N`z^1@b+H~kL%?z{5G0y@$7FlAM6;@fJ&pI1yvc)z#?6ODTKhw_ug9I63m=Gh35@w8X zCYWT32vK6hNsuH(nhaTT9DN&|El^S&# zG-=VMLzii0m}QQ67U;3a63eWx${Kyv*b+H~kL%?z{5G0y@$7FlAM6;@fJ&pI1y zvc)z#?6ODTvVH~_B*+lMgcxCzFk_4}!6Z{eh!P`Cf+Q)@WXO^uPk|yO%2cRQqfUb+ zE!uSGGR+LL%rVabJr-GFnH5%9qt7}UY_i2RJM6MY;2-K|fI)%`F-(XNMhP>{E)M?P9 zMVk&?rkP=uIp$fQ$0AECv%)HC^jT+vO}5x(hh6pv{3HDgFi4Ogh6ypkC}GAJXM#zl zh!7=4oCHZyq{)yaN1g&jN|dQkrAD0wOric(FMw|pmQl!a{B}bkDMM{*ZP^Ctl22EPD z>Ck1G8D^Pdo&|a=vcxhgtg=R*bvD>!i*0t;Wsksrp`QT;2{ObmAx0P_%oyWLFv%1V zqQr=kAW4ce8M5TaQ=mwRG8L-SsMDZHi#8p)Of$nQbIh|qk42VPW`$MO=(ElSn{2Vo z4!i6TNb6^SL4pi1Oo$Oi2{Xnx6HGEigeWoMBuJ7XO@=Hv@)RgiqD+Mric(F zMw|pmQl!a{B}bkDMM{*ZP^Ctl22EPD>Ck1G8D^Pdo&|a=vcxhgtg=R*bvD>!i*0t; zWsksB{R}WjkRgT%F~TTe#u#UUNv4PpB}SYCNm8WAkR?Z+0!2!csZgaxod!)>wCT`g zni*!9W1awwCT`gni*!9W1a9DN&|El^S&#G-=VMLzii0m}QQ67U;3a63eWx${Kyv*FqrbCx$W|(D; zc^2rg$P&w}u*w>J*4bc_Ewet13@}KLA%+Pt!YEric(FMw|pmQl!a{ zB}bkDMM{*ZP^Ctl22EPD>Ck1G8D^Pdo&|a=vcxhgtg=R*bvD>!i*0t;WskrO{R}Wj zkRgT%F~TTe#u#UUNv4PpB}SYCNm8WAkR?Z+0!2!csZgaxod!)>wCT`gni*!9W1a9DN&|El^S&#G-=VMLzii0m}QQ67U;3a z63eWx${Kyv*b+H~kL%?z{5G0y@$7FlAM6;@fJ&pI1yvc)z#?6ODTrhWz(B*+lM zgcxCzFk_4}!6Z{eh!P`Cf+Q)@WXO^uPk|yO%2cRQqfUb+E!uSGGR+LL%rVabJr-GF znH5%9qt7}UY_i2RJM6MY;J?$)0D}Y>Vweykj1p#yaVD5#iU?6+#7U4OMVbs*a^xvc zq(qqtRch2}(4FqrbCx$W|(D;c^2rg$P&w} zu*w>J*4bc_EwsZpmvlNN0{beU#`S>~8$fgX!2vCImq ztkGwk4K~?gn;mx9Bk(WuGr%B0h8QNq2&04FqrbCx$W|(D;c^2rg$P&w}u*w>J*4bc_EwwCT`gni*!9W1amvp|nUmRM$mRo3XU z&IX%ovCR&<>=F2v`Wax5AVUljVuVq`j4{pxlS~mIN{l!OlB7tJAxn-t1&Wj?Q=v+Y zIt`k%Xw#v~G&9UH$2<%4SY(N1R#;_?KI?3-$rjt}u*)8Sl70plB*+lMgcxCzFk_4} z!6Z{eh!P`Cf+Q)@WXO^uPk|yO%2cRQqfUb+E!uSGGR+LL%rVabJr-GFnH5%9qt7}U zY_i2RJM6MY;9u!yfI)%`F-(XNMhP>dMvWUGApdIMxS*y*kp@scGzW)Kv_Qn3=(9BVM2^BN|-UmnP8GB zB1DN1Cqa@FX)sZpmvlNN0{beU#`S>~8$fgX!2vCImqtkGwk4K~?g zn;mx9Bk-^FGr%B0h8QNq2&04Fq zrbCx$W|(D;c^2rg$P&w}u*w>J*4bc_EwsZpmvlNN0{ zbeU#`S>~8$fgX!2vCImqtkGwk4K~?gn;mx9BT#uC_>b7feg-(eAO{I@h#?L$%n?Ey zWrSmla-1+H7~>@4oMM90Omc=P&Jy7qQO*nNU7j<|3ubu9EU%d3HS@e-fw%N{ z$0F}p;seWkWQ9+x@|iWh(B~`bd}D*}Z1RIGezMIkcKFRMf7s(Mf&bC|>|;Lz9AJ=x z1UbYIhZ*JwA&xS_F-AE~m=la~l5tKk!D%Kr!xU$UaE>VFiE)8A7fEo5B$r8Xg)~>m zaE&b2$#H``Hz{z7BDX1Vhcb7maE~hYsquh14{7j-CXZ?Hgf>s<@Qg0cndSvEykwSF z%<-Ce-mt)1dc0$i_bl;&Wj?aPCsz5)8ei!1m36+c!FM+K!4^N+<`+BsW|u$g@t43o z`?HVz3~+!!4ie-LLmXz9BZN502*()ZIAKmO#!1FG#RR9BO7>uBbq#>#S_{*rNcA2 zJZG90%glBd}M`Btn!&PzR>3@>wIH_ z?`-meEq=1iFLwCNE`QkLFM+E4*~fkcIKUtW337-b4l~RVLL6m;V~lc~Feez}B;%Z7 zg40ZLhAGYx;T%!U6XODLE|TC9NiLJ(3TdvA;Tl=4lj8RC<0RvpVuI65a)v3+65$+C&J*JTaW0bJ5=kzT z;tFZ5lHnRzu9M>id2UkR7DaAT;tpl*QsEv|?o;Cdbso~-5ltS`;t6e@(%~6ho-@r0 zW_ZagubAUC^Soh!xAb_&BJWw^1Iv75g-@*VnKi!9=PT=cV}tK(@`EjYvdu4c_{}bV z*yAsOn*G_weg-(eAO{I@h#?L$%n?EyWrSmla-1+H7~>@4oMM90Omc=P&Jy7qQO*nNU7j<|3ubu9EU%d3HS@e-fw%N{$0F}p;seWkWQ9+x@|iWh(B~`bd}D*}Z1RIG zezMIkcKFRMf7s(Mf&bP1>|;Lz9AJ=x1UbYIhZ*JwA&xS_F-AE~m=la~l5tKk!D%Kr z!xU$UaE>VFiE)8A7fEo5B$r8Xg)~>maE&b2$#H``Hz{z7BDX1Vhcb7maE~hYsquh1 z4{7j-CXZ?Hgf>s<@Qg0cndSvEykwSF%<-Ce-mt)1dc0$i_bl;&Wj?aPCsz5)8ei!1 zm36+c!FM+K!4^N+<`+BsW|u$g@t42@`?HVz3~+!!4ie-LLmXz9BZN502*()ZIAKmO z#!1FG#RR9BO7>uBbq#>#S_{*rNcA2JZG90%glBd}M`Btn!&PzR>3@>wIH_?`-meEq=1iFLwCNE`QkLFM+!K*~fkcIKUtW z337-b4l~RVLL6m;V~lc~Feez}B;%Z7g40ZLhAGYx;T%!U6XODLE|TC9NiLJ(3TdvA z;Tl=4lj8i%PZ!1%{*^d;4MAgvB-Ot_`otBS>Y3_d}fU=^!ds<-`LglBd}M`Btn!&PzR>3@>wIH_ z?`-meEq=1iFLwCNE`QkLFM)q&fA+DT0S++8L4q7&h{FtXgb+s=;TWSFC(H@PILSDt znBX*%oMDQyL^wy3^TfD7oQov5M3T#-xI&t%WVl9_>*TmWo|_c7MUmT-xI>w{RJcc# z`_yi%PZ!1%{*^d;4MAgvB-Ot z_`otBS>Y3_d}fU=^!ds<-`L$dBXy4>G6(5-m}C9mifpEpIGHHYkZ;4SJwH)2H)A_2V4AP zn_ukkn_d2}$6o^fyZzb6eg-(eAO{I@h#?L$%n?EyWrSmla-1+H7~>@4oMM90Omc=P z&Jy7qQO*nNU7j<|3ubu9EU%d3HS@e-fw%N{$0F}p;seWkWQ9+x@|iWh(B~`b zd}D*}Z1RIGezMIkcKFRMf7s(Mfu{Z0$9@Jlz#s<+a)==gGt3b}9A$)KjB=bXCm7=- zglBd}M`B ztn!&PzR>3@>wIH_?`-meEq=1iFLwCNE`QkLFMF|s$&za^0GrVM$SIqI6dET(VTY9`> zk@qa|fn`3j!Y5Yw%o<#&J!6lMhCdC!fTqVOb zvRo&}4f5Qiz%7d0ro1L{1a!6TYHro|K5Jf*`kx;$r^7tHXISza;6 zYvy^w0&nT@jz!+H#0Qr7$O@lWRC<0RvpVuI65a)v3+65$+C z&J*JTaW0bJ5=kzT;tFZ5lHnRzu9M>id2UkR7DaAT;tpl*QsEv|?o;Cdbso~-5ltS` z;t6e@(%~6ho-@r0W_ZagubAUC^Soh!xAb_&BJWw^1Iv75g-@*VnKi!9=PT=cV}tK( z@`EjYvdu4c_{}bV*yAsO|I7aDV?P5NV330ZIm8f$8RiHfjxxeAMmbKH6O3_^aZWM8 zX(ldBXy4>G6(5-m}C9 zmifpEpIGHHYkZ;4SJwH)2H)A_2V4APn_ukkn_d2}$6o^fxBc12eg-(eAO{I@h#?L$ z%n?EyWrSmla-1+H7~>@4oMM90Omc=P&Jy7qQO*nNU7j<|3ubu9EU%d3HS@e- zfw%N{$0F}p;seWkWQ9+x@|iWh(B~`bd}D*}Z1RIGezMIkcKFRMf7s(MfwukG$9@Jl zz#s<+a)==gGt3b}9A$)KjB=bXCm7=-glBd}M`Btn!&PzR>3@>wIH_?`-meEq=1iFLwCNE`QkL zFMF|s$&za^0GrVM$SIqI6dET(VTY9`>k@qa|fn`3j!Y5Yw%o<#&J!6lMhCdC!fTqVObvRo&}4f5Qiz%7d0ro1L{1a z!6TYHro|K5Jf*`kx;$r^7tHXISza;6Yvy^w0&nT@jz!+H#0Qr7$O@lWRC<0RvpVuI65a)v3+65$+C&J*JTaW0bJ5=kzT;tFZ5lHnRzu9M>id2UkR z7DaAT;tpl*QsEv|?o;Cdbso~-5ltS`;t6e@(%~6ho-@r0W_ZagubAUC^Soh!xAb_& zBJWw^1Iv75g-@*VnKi!9=PT=cV}tK(@`EjYvdu4c_{}bV*yAsO|IhyHV?P5NV330Z zIm8f$8RiHfjxxeAMmbKH6O3_^aZWM8X(ldBXy4>G6(5-m}C9mifpEpIGHHYkZ;4SJwH)2H)A_2V4APn_ukk zn_d2}$6o^fzx~@4oMM90Omc=P&Jy7q zQO*nNU7j<|3ubu9EU%d3HS@e-fw%N{$0F}p;seWkWQ9+x@|iWh(B~`bd}D*} zZ1RIGezMIkcKFRMf7s(Mfv)}8$9@Jlz#s<+a)==gGt3b}9A$)KjB=bXCm7=-5^4sHKj28fc`6W?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7 zWmZ^ajdeEIWQ%Qf*kzA>4mjk9V@^2bjB_ryyl*d7$7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;+h+7x#OM(9(m%K z7hZYeoew_w;+r3S`6FOie*y_2m=HnT31QJOinG{k*l~0tzXjm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q>7tt+dg-H|0R|ajm=Q)9 zW1I;lnPQq5W|?E21r}LinH5%9W1S5)*!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{&M;vp)DQBE>!6jE* zbHgon-1ERAPdxL&D{s8>!6#pQ^TRKH1gz*!AVCBZLMUN`6G0?VL=!_Sam15AB1t5Z zLMmyblR+j~WRpWKdE`?-Aw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpG zee^THAVUl@!YE^mGr=TNOf$nQbIh~AB15^4sHKj28fc`6W?E>ajdnWd zq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf*kzA>4mjk9 zV@^2bjB_ryyl*dg0fiJ%ObMlwQBDPwR8dV0 zwbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM%nGZl zvCamYY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc>zx)yK zuk|O8Ac6@YlrX}HAd)Dei6NFa;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYx zppq)8siBrS>S>^nCYouXl{VVxpp!1T>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8o zC6-xXl{MDcV3RGj*kRXByA(Sw}i6D|FqKP4vIO0hlktC8yA(b@J$sm&~vdJNrJn|`^kRpmH zp_DSpsi2Z7s;QxtI_hblktUjHp_Mk;>7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=u zIp$elktLQ{VU;!3*8zMmP~f5=AsI#1cn52_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3 zGcUaI#ycN;^2Ikl{PIV@hW-Q+L@*(Q5=J-?L=r_bF~kx_JP9O{L^3I)l14fiWRgWT zIpmT@J_Qs~L@_0lQbsuyR8mDXHPli^Jqh9qTyn)VH{5c^ zJr6wc#4|6v^2R$KeDcLNKm77Xz`xa>K!OM+giyi=CxS?#h$ewe^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrIMw)1*g;v^Vr-M$q=%$BW`sinX zL53J+gi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_)*k*@a_SoluLykD+gj3Eq=YmVF zxaNji?zrcHN1k}*g;(Bq=Yvna_~wUS{s`FApFn~LCWKJJ2q%I_qKGDjSmKB$fkcu> zCWTbeNGF3#vdAWfT=K}LfI^BWri4<;D5ru-s;H)hTI#5$fkv8WriE78Xs3fty6C2d zUi#=~fI)^BW`t437-xb>rkG}iS>~8$fkl>BW`$MOSZ9Mxw%BHeUG~`LfJ2Tr=7dwu zIOl>(uDIrgTkg2$fk&Qr=7m?@c;|ypzWC;cU;YUAAM_`XAc6@YlrX}HAd)Dei6NFa z;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVx zpp!1T>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=uIp$elktLQ{VU;!3*5^4sHKj2 z8fc`6W?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEI zWQ%Qf*kzA>4mjk9V@^2bjB_ryyl*d7$ z7FlAM6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;+h+7x#OM(9(m%K7hZYeoew_w z;+r3S`6FOQe*y_2m=HnT31QJOinG{k*l~0tzXj zm=a1UqnrvVsiK-1YN?~11{!IinHE}Uqn!>q>7tt+dg-H|0R|ajm=Q)9W1I;lnPQq5 zW|?E21r}LinH5%9W1S5)*CWctzh$n$Wl1L_nRMJQ%gG{o>CWl<| z$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8SjqQN|c& zf=Q;BW`we^2n!vLW(G+gi^{Vr-DkVsHTQm>ZqrIMw)1*g;v^Vr-M$q=%$BW`sinXL53J+ zgi*#AXM#zlm}Z7q=9p)JMV44*g;myAXM;_)*k*@a_SoluLykD+gj3Eq=YmVFxaNji z?zrcHN1k}*g;(Bq=Yvna_~wUS{s{PA^e2!Yf(ap%Fv5u-k|?5yA(lAeNg$CVl1U+z zG}6f+lPt2yA(uSzDWH%diYcL#GRmo-k}9gHp_V%8X`qoNnrWexHrnZ+lP%G_Vg!^Ac6@YlrX}HAd)Dei6NFa;z=Np zB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T z>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*{a3Y8#ifCep zC60I!NF<45Qb;9@bTY^!i)?bpC69axD5QvDN+_j_aw@2#ifU@8rH* zb~@;!i*9=8rH_6F7-WcHMi^y`aVD5#ifLw;WsZ3kSY(N1R#;_?bvD>!i*0t;WsiLh zIOK?9PB`U^b1t~#ifeAT<&JwEc;ty^UU=n=cRu*!i*J7T<&S`U{Rt$9U_uBbjBp}| zB#LNah$W7A5=bP8WKu{ajdU`|B#Ufv$R&?_3MizAVoE5bjB+Zdq>5^4sHKj28fc`6 zW?E>ajdnWdq>FBP=%tT-1{h?BVMZ8bjBzHIWQu8Km}QQ67FcA7WmZ^ajdeEIWQ%Qf z*kzA>4mjk9V@^2bjB_ry7$7FlAM z6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;+h+7x#OM(9(m%K7hZYeoew_w;+r3S z`6J+e*PlRw2quJ3!U!jVNTP@)hFIc=CxJwgNG63;(nu$ROtQ!(hg|Z=r+`9=D5iu` z$|$FTN~)-)hFa>Vr-4SAXr_f$+GwYPPP*u(hhF;VXMjP57-ob~#u#UUNv4=)hFRvA zXMshQSZ0M))>vnQO}5x(hh6sA=YT_wIOc>?&N%0SORl))hFk8q=YdC_c;CWctzh$n$Wl1L_nRMJQ%gG{o>CWl<|$ftlp ziYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8SjqQN|c&f=Q;B zW`3trCy*e52_cj)!igY~D58lWmN?=`Adw`JNg%G{!jf0B#2-_2qlbgB8Vi4Xkv&Zj(8GCB#C5F zNF|MQGRP#0Y;wpYk9-O!q=;flD5Z>YDyXE2YHFyZj(Qqsq={x)Xr+yII_RW}ZhGjY zkA4OiWQbu#7-fucCYWT3X=a#Zj(HYXWQk=~SY?fMHrQl~ZFbmYk9`g}8zMmP~f5=AsI#1cn5 z2_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3GcUaI#ycN;^2Ikl{PIV@|D`{H1QARKp@b1m1d&7$ zO$@Qb5l;e%B#}%Csicuk2AO1$O%A!_kxv1I6j4kGrIb-l1(j4$O%1iwQBMPnG|@~8 zt+dfj2c2}$O%J{F(a!*b3^B|Iql_`m1d~iL%?z{5G0y^vEV0ZAtE{ok2AgcL%?`Wl zvCjdA9C6GEr<`%l1(#fL%?-EQanA#fJn_s6ue|Zj2cLZL%@4o);bjx>4}k;`ObDTb z5l#e=L=jC4vBVKi0*NG%ObV%_kxmAgWRXn{x#W>g0fiJ%ObMlwQBDPwR8dV0wbW5h z1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM%nGZlvCamY zY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc>zx)yKf9p>m zK?D;*C}D&XK_pQ`6GJR<#FIcGNhFg(DruyXK_*#blS3|f!6Z{mGs7%%%(K8EODwa( zDr>B>!6sX5v%@ZX>~p{&M;vp)DQBE>!6jE*bHgon-1ERAPdxL&D{s8>!6#pQ^TRKH z_|Odahd_b|CWKJJ2q%I_qKGDjSmKB$fkcu>CWTbeNGF3#vdAWfT=K}LfI^BWri4<; zD5ru-s;H)hTI#5$fkv8WriE78Xs3fty6C2dUi#=~fI)^BW`t437-xb>rkG}iS>~8$ zfkl>BW`$MOSZ9Mxw%BHeUG~`LfJ2Tr=7dwuIOl>(uDIrgTkg2$fk&Qr=7m?@c;|yp zzWC;cU;YUAfAlAiAc6@YlrX}HAd)Dei6NFa;z=NpB$7!Xl{C`HAd@Vz$sw0K@+qK@ zB8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T>7kcC`Wax5A%+=YlrhGcV3H}O znPHYW=2>8oC6-xXl{MDcV3RGj*Kq5&blR_$Kq?17=S!9z# zE_virKp{mGQ$i_alv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~mGr}li zj5EO`Q%p0%EOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$EqC1W zz#~sQ^TI1{yz{{)Uwre!FMkC5zxoqM5W$2HN*Lip5J?o##1Kmy@g$H)63L{HN*d{8 zkVzKVk4wwS|A&?+~2_cj)!igY~D58lWmN?=`Adw`J zNgh9qTyn)VH{5c^Jr6wc#4|6v^2R$KeDcLNKm78C!{vZ~2qcJLLI@>{a3Y8# zifCepC60I!NF<45Qb;9@bTY^!i)?bpC69axD5QvDN+_j_aw@2#ifU@8rH*b~@;!i*9=8rH_6F7-WcHMi^y`aVD5#ifLw;WsZ3kSY(N1R#;_?bvD>!i*0t; zWsiLhIOK?9PB`U^b1t~#ifeAT<&JwEc;ty^UU=n=cRu*!i*J7T<&S`WuRnnV5ljf7 zgb_{zkwg(q46(!!PXdV~kxUAyq>)YrnPibo4!Pu!PXUD#QA`P?lu=Fvl~hqp4Ykx! zPXmoK(M$`iw9!rnopjMn554r!&j5oAG0X^~j4{pxlT0zq471EJ&jO1qvCImqtg+4p zn{2Vo4!i8J&jE)Vam)#)oN>+tmt1kp4Y%BJ&jXJ<@yrXayz$NlpM3Gn55N53VkzJs z0tq6R5JCwfoCqR`BAOUti6fo_5=kPN6jDheoeVO`BAXm?$s?Zv3MrzP5=tqfoC+$b zqM90NsiU3-8fl`L7FubeoenzbqMIIi>7$7FlAM z6;@eeoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;+h+7x#OM(9(m%K7hZYeoew_w;+r3S z`6J*z=uaR)1QS9iVT2PwBvC{YLo9K`lRzR#B$GlaX{3`uCRt>YLoRvbQ$Qg_6jMSe zWt3AvB~?^YLoIdG(?BClG}A&WZM4%tCtY;YLoa>wGr%B23^T$gV~jJwBvVW?!z^>m zv%n%tEVIHYYpk=uCR=Q?!!CR5bHE`-9CN}cXPk4vC0AT?!!38*^S~odJoCaUZ@lxt zCtrN?!!LihR1WxuK!OM+giyi=CxS?#h$ewe^2n!v zLW(G+gi^{Vr-DkVsHTQm>ZqrIMw)1*g;v^Vr-M$q=%$BW`sinXL53J+gi*#AXM#zl zm}Z7q=9p)JMV44*g;myAXM;_)*k*@a_SoluLykD+gj3Eq=YmVFxaNji?zrcHN1k}* zg;(Bq=Yvna_~wUS{s{R0^e2!Yf(ap%Fv5u-k|?5yA(lAeNg$CVl1U+zG}6f+lPt2y zA(uSzDWH%diYcL#GRmo-k}9gHp_V%8X`qoNnrWexHrnZ+lP%Qlmz@kAVCBZLMUN`6G0?VL=!_Sam15AB1t5ZLMmyb zlR+j~WRpWKdE`?-Aw?8ZLMdgGQ$ZzFR8vDOb=1>9BTY2ZLMv^w(?KU)bkjpGee^TH zAVUl@!YE^mGr=TNOf$nQbIh~AB17~w<^Nfgn<5KA2KB#=lF z$)u1<8tG(^Nfz1UkV_u<6i`SJ#gtG=8Rb+^Nfp)9P)i;4G|)&B&9u-;8|`$^Nf+Jp z&`Tfv3^2$L!;CP>7~@PZ$rRJfFv}eCEU?HD%dD`<8tZJZ$rjt}u*)9%9B{}H$DDA= z8RuMZ$rab!aLXO{Jn+a9&%E%;8}EGZ$rs=J@XH^D$pZc%kRXByA(Sw}i6D|FqKP4v zIO0hlktC8yA(b@J$sm&~vdJNrJn|`^kRpmHp_DSpsi2Z7s;QxtI_hblktUjHp_Mk; z>7bJ?y6K^pKKdD8kRgT{VU#h(nP8GBrkP=uIp$elktLQ{VU;!3*8zMmP~f z5=AsI#1cn52_%w4GAX2zMmiZ}l0`N-X?_+;Yb~4?Ob3GcUaI#ycN;^2Ikl{PKrE(tv*mB#2-_ z2qlbgB8Vi4Xkv&Zj(8GCB#C5FNF|MQGRP#0Y;wpYk9-O!q=;flD5Z>YDyXE2YHFyZ zj(Qqsq={x)Xr+yII_RW}ZhGjYkA4OiWQbu#7-fucCYWT3X=a#Zj(HYXWQk=~SY?fM zHrQl~ZFbmYk9`g}g0fiJ%ObMlwQBDPw zR8dV0wbW5h1C2D%Obe~F(M|`QbkR)@z4Xz~0D}xM%m|~5G0p^&Ofk(2v&=Ei0*frM z%nGZlvCamYY_ZJ_yX>*g0f!uM%n7HQan1#oTyf0}x7=~h1CKoM%nPr)@y-XIeDTc> zzx-i{Jm4P!2_l#fLJ1?B2qK9hniyhKq5&blR_$Kq?17=S!9z#E_vir zKp{mGQ$i_alv6<^Ra8?$Ep^n>KqE~w(?Tn4w9`Q+U3Ak!FMaegz#u~mGr}lij5EO` zQ%p0%EOX4Wz#>a5v%)HCth2!;TWqt#E_>{Az#&H*bHXWSoO8h?S6p+$EqC1Wz#~sQ z^TI1{yz{{)Uwre!FMn7d3HXOVf(RyrP{If&f=Hr>CWctzh$n$Wl1L_nRMJQ%gG{o> zCWl<|$ftlpiYTUpQpzZ&f=a5WriNPTsHcHOnrNnlR@!K%gHF2WriWho=x2aIh8Sjq zQN|c&f=Q;BW`37h6G#xjgb+#?;Y1Kg6w$;GOC0eekVq2Aq>xG) z>12>e7TM&GOCI?YP)HHQlu$|;uj*e7TfHw%O3k2aL5tIoN&q+=Ui~f z71!Kw%N_ST@W>O-yzt5!?|ksd7vKEw%O94<0{$V8Ac6@YlrX}HAd)Dei6NFa;z=Np zB$7!Xl{C`HAd@Vz$sw0K@+qK@B8n-YlrqYxppq)8siBrS>S>^nCYouXl{VVxpp!1T z>7kcC`Wax5A%+=YlrhGcV3H}OnPHYW=2>8oC6-xXl{MDcV3RGj*qRnJo3adFTC=`J0E=V#Wz3v@`uIIfPV-ih+skpC5&() zh$M<=Vu&Syl*dk|arzBuSDaNs=Td!&)heKN=-i)?bph9qTyn)VH~iri2O|>xB9SDLNg(TYdE`?-ArE;(5sxY62~T;(b6)V05?=9|HrkG}iS>~8$fkl>BW`$MOSZ9Mxw%BHeUG~`LfJ2Tr=7dwuIOl>(uDIrg zKirb=KkQE;NhFg(DtEZcJ<>?$J{e?^MK(F)@_;<@DWH&tJfeul6!V0qJmWbpcu5Jb zc+DG1DWjYUDtXI0-c!W~s;QxtkJM351C2D%Obe~F@riai_{EbKj_|6Z0@{8Ye z(?c(P^fSO9Lku&*C}WH>!6Z{mGs7%%%(K8EODwa(Dr>B>!6sX5v%@ZX>~p{&M;vp) zDQBE>!6jE*bHg8QaVRR`FA_;2nG{mF!(Hx?MmqP&Ad@Vz$sv~qHb7<2yh2$uEA>O%J{F(a!*b3^B|Iql_`m1d~iL%?z{5G0y^vEV0ZA ztE{ok2AgcL%?`WlvCjdA9C6GEr<`%l1(#fL%?*FJ#gVv#zepsBWKu}w4tKdn8tL38 zgG{o>CWl-ekVifR6!MTq6!Dm1p74}sJm&>3Dd82bc|$2>lv6<^Z+XXis`x-PHPrHv zI_hblktUjHp_Mj1(M|`S`9ddMeB~S8`N24mjk9V@^2bjB_ryOwwoN~rF7hH11H8=d> z76&2|{vweil1U+zJKW_SX{2+X3^K_gn;dd^Kpy!NP{>0bQN&}4dBRhk@thaDq=Z+z z<_)EkQBDPwyyYG5sp13G)KJSu>ZqrIMw)1*g;v`5L^~aP<_n#4@s)3U=LbLe#c#Ul zp_e}T8DNkhh8ba$F~*r-k}0N{VU{`OSzwVRmRVtyHP+c+lP$K{VV6DjIpB~Zjyd6! zGtRl-k}Iyc;SaYY{BQe{ND|4UkjfqIa*s69xlaa}WRXn{xjZ0`dq4$PmMfFv=L?Ofbn5)66i-9P=!&$P&w}u*w?iY_Q1|+w8E* z9{U_{$Pve!aLO6yTyV)1*WB=jTO5Z?_=`l6NG63;?r@iTq>;{jGRP#0Y;wru0eR$8 zKp_u#L=lfE<_S-E#&cfqk`i9=nm3eEMmZH!@|JhJr-~0$Q$sBusiU3-8fl`L7FucJ z6YX^HnJ;wG#aF)Zoge(<7r*JIhhF;VXMjP57-ob~#u#UUNv4=)hFRvAXMshQSZ0M) z)>vnQO}5x(hh6sA=YT_wIOc>?&N%0SORl))hCker@PF)2B1t5ZLMnH-%RSOa=RO%^ zl0`N-w8Rb+^$y?s> zo+>_2O%1htq>g$TXrzf|T4<$>Pqfp)XTH!$7hn0tcYg4bU;L(<9(w7cp8*CLVwe#| z8DpFYCYfTI8D^Pdo&^?JVwn|IS!10IHrZmE9d_Acp92m#;+PXoIpdrQF1g~G8~$)h z!vD2Di6oIs3aQ-TF84?yo%>{vNfz1Ukjn$|$ftlp9`cAH9#hN{p7M<6yx=7zyy7)) zD5Z>YDyZZw?|4rYAE>5=T0T-oJqEJV8=%kCUeB(Pm_{lGR(@hV( z^wG}%gA6gu2&0TK&IFT8G0hCK%rVabi!8Cs3ahNK&IX%ovCR&we9*{>q1r+j-M-=gxVxI7n zXFTTxFDc;_uX#f$Wt3AvC2x7hd#d7$7FlAM6;@ee zoeehGVw)Xy*<+sr4mskO6HYnfoC_|w;`*PK{zF1SLi2z5`@c2(r?$T}75?4d{pb3^ ze|h^KoBwO$-~PQ1{~ZbckdW|~$G`noNuD9TgJ|IY&j$XD-+zejAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U zqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvz zAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh z2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8 zXdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEa zhz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p> zfoLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~2 z8i)p>foLEahz6p8XdoJh2BLvzAR34UqJd~28i)p>foLEahz6p8XdoJh2BLvzAR34U WqJd~28i)p>foLEahz9;&4g535-DSc6 From 21958bb393a654591ed26f339791b752d58f5c8b Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 2 Nov 2023 13:10:33 +0100 Subject: [PATCH 071/859] cmake : disable LLAMA_NATIVE by default (#3906) --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 611ed3f4d..3c49d645c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -44,7 +44,7 @@ endif() # general option(LLAMA_STATIC "llama: static link libraries" OFF) -option(LLAMA_NATIVE "llama: enable -march=native flag" ON) +option(LLAMA_NATIVE "llama: enable -march=native flag" OFF) option(LLAMA_LTO "llama: enable link time optimization" OFF) # debug From 4ff1046d75e64f0e556d8dcd930ea25c23eb8b18 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 16:22:30 +0200 Subject: [PATCH 072/859] gguf : print error for GGUFv1 files (#3908) --- ggml.c | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/ggml.c b/ggml.c index d5a49d8e4..605a27940 100644 --- a/ggml.c +++ b/ggml.c @@ -18884,6 +18884,13 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p ok = ok && gguf_fread_el(file, &ctx->header.n_tensors, sizeof(ctx->header.n_tensors), &offset); ok = ok && gguf_fread_el(file, &ctx->header.n_kv, sizeof(ctx->header.n_kv), &offset); + if (ctx->header.version == 1) { + fprintf(stderr, "%s: GGUFv1 is no longer supported. please use a more up-to-date version\n", __func__); + fclose(file); + gguf_free(ctx); + return NULL; + } + if (!ok) { fprintf(stderr, "%s: failed to read header\n", __func__); fclose(file); From d6069051de7165a4e06662c89257f5d2905bb156 Mon Sep 17 00:00:00 2001 From: Oleksii Maryshchenko Date: Thu, 2 Nov 2023 18:10:39 +0100 Subject: [PATCH 073/859] cuda : use CUDA memory pool with async memory allocation/deallocation when available (#3903) * Using cuda memory pools for async alloc/dealloc. * If cuda device doesnt support memory pool than use old implementation. * Removed redundant cublasSetStream --------- Co-authored-by: Oleksii Maryshchenko --- ggml-cuda.cu | 130 ++++++++++++++++++++++++++++++--------------------- 1 file changed, 78 insertions(+), 52 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e46295126..58b58f331 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -181,11 +181,11 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); do { \ cudaError_t err_ = (err); \ if (err_ != cudaSuccess) { \ - int id; \ - cudaGetDevice(&id); \ + int dev_id; \ + cudaGetDevice(&dev_id); \ fprintf(stderr, "\nCUDA error %d at %s:%d: %s\n", err_, __FILE__, __LINE__, \ cudaGetErrorString(err_)); \ - fprintf(stderr, "current device: %d\n", id); \ + fprintf(stderr, "current device: %d\n", dev_id); \ exit(1); \ } \ } while (0) @@ -195,11 +195,11 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); do { \ cublasStatus_t err_ = (err); \ if (err_ != CUBLAS_STATUS_SUCCESS) { \ - int id; \ - cudaGetDevice(&id); \ + int dev_id; \ + cudaGetDevice(&dev_id); \ fprintf(stderr, "\ncuBLAS error %d at %s:%d: %s\n", \ err_, __FILE__, __LINE__, cublasGetStatusString(err_)); \ - fprintf(stderr, "current device: %d\n", id); \ + fprintf(stderr, "current device: %d\n", dev_id); \ exit(1); \ } \ } while (0) @@ -465,6 +465,7 @@ static_assert(K_QUANTS_PER_ITERATION == 1 || K_QUANTS_PER_ITERATION == 2, "K_QUA #define MAX_STREAMS 8 static cudaStream_t g_cudaStreams[GGML_CUDA_MAX_DEVICES][MAX_STREAMS] = { nullptr }; +static cudaMemPool_t g_cudaMemPools[GGML_CUDA_MAX_DEVICES] = { nullptr }; struct ggml_tensor_extra_gpu { void * data_device[GGML_CUDA_MAX_DEVICES]; // 1 pointer for each device for split tensors @@ -5772,6 +5773,16 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { return ptr; } +static void * ggml_cuda_pool_malloc_async(size_t size, size_t * actual_size, int id, cudaStream_t stream) { + if (g_cudaMemPools[id] == nullptr) { + return ggml_cuda_pool_malloc(size, actual_size); + } + void *ptr; + CUDA_CHECK(cudaMallocFromPoolAsync(&ptr, size, g_cudaMemPools[id], stream)); + *actual_size = size; + return ptr; +} + static void ggml_cuda_pool_free(void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); int id; @@ -5790,6 +5801,13 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { } +static void ggml_cuda_pool_free_async(void * ptr, size_t actual_size, int id, cudaStream_t stream) { + if (g_cudaMemPools[id] == nullptr) { + return ggml_cuda_pool_free(ptr, actual_size); + } + CUDA_CHECK(cudaFreeAsync(ptr, stream)); +} + void ggml_init_cublas() { static bool initialized = false; @@ -5844,6 +5862,13 @@ void ggml_init_cublas() { // create cublas handle CUBLAS_CHECK(cublasCreate(&g_cublas_handles[id])); CUBLAS_CHECK(cublasSetMathMode(g_cublas_handles[id], CUBLAS_TF32_TENSOR_OP_MATH)); + + // configure memory pool + cudaError_t err = cudaDeviceGetMemPool(&g_cudaMemPools[id], id); + if (err == cudaSuccess) { + size_t treshold = UINT64_MAX; + CUDA_CHECK(cudaMemPoolSetAttribute(g_cudaMemPools[id], cudaMemPoolAttrReleaseThreshold, &treshold)); + } } // configure logging to stdout @@ -6437,7 +6462,7 @@ inline void ggml_cuda_op_mul_mat_cublas( const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src0->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = row_diff*ne00; - src0_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src0_as); + src0_as_f16 = (half *) ggml_cuda_pool_malloc_async(ne * sizeof(half), &src0_as, id, stream); to_fp16_cuda(src0_dd_i, src0_as_f16, ne, stream); } const half * src0_ptr = src0->type == GGML_TYPE_F16 ? (const half *) src0_dd_i : src0_as_f16; @@ -6448,13 +6473,12 @@ inline void ggml_cuda_op_mul_mat_cublas( const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = src1_ncols*ne10; - src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src1_as); + src1_as_f16 = (half *) ggml_cuda_pool_malloc_async(ne * sizeof(half), &src1_as, id, stream); to_fp16_cuda(src1_ddf_i, src1_as_f16, ne, stream); } const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddq_i : src1_as_f16; - - size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc(row_diff*src1_ncols * sizeof(half), &dst_as); + size_t dst_f16_as = 0; + half * dst_f16 = (half *) ggml_cuda_pool_malloc_async(row_diff*src1_ncols * sizeof(half), &dst_f16_as, id, stream); const half alpha_f16 = 1.0f; const half beta_f16 = 0.0f; @@ -6472,14 +6496,15 @@ inline void ggml_cuda_op_mul_mat_cublas( const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); to_fp32_cuda(dst_f16, dst_dd_i, row_diff*src1_ncols, stream); - ggml_cuda_pool_free(dst_f16, dst_as); - - if (src0_as != 0) { - ggml_cuda_pool_free(src0_as_f16, src0_as); + if (dst_f16_as != 0) { + ggml_cuda_pool_free_async(dst_f16, dst_f16_as, id, stream); } + if (src0_as != 0) { + ggml_cuda_pool_free_async(src0_as_f16, src0_as, id, stream); + } if (src1_as != 0) { - ggml_cuda_pool_free(src1_as_f16, src1_as); + ggml_cuda_pool_free_async(src1_as_f16, src1_as, id, stream); } } else { @@ -6489,7 +6514,7 @@ inline void ggml_cuda_op_mul_mat_cublas( if (src0->type != GGML_TYPE_F32) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src0->type); GGML_ASSERT(to_fp32_cuda != nullptr); - src0_ddq_as_f32 = (float *) ggml_cuda_pool_malloc(row_diff*ne00 * sizeof(float), &src0_as); // NOLINT + src0_ddq_as_f32 = (float *) ggml_cuda_pool_malloc_async(row_diff*ne00 * sizeof(float), &src0_as, id, stream); // NOLINT to_fp32_cuda(src0_dd_i, src0_ddq_as_f32, row_diff*ne00, stream); } const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32; @@ -6506,7 +6531,7 @@ inline void ggml_cuda_op_mul_mat_cublas( &beta, dst_dd_i, ldc)); if (src0_as != 0) { - ggml_cuda_pool_free(src0_ddq_as_f32, src0_as); + ggml_cuda_pool_free_async(src0_ddq_as_f32, src0_as, id, stream); } } @@ -6929,21 +6954,22 @@ static void ggml_cuda_op_mul_mat( src0_dd[id] = (char *) src0_extra->data_device[id]; } else { const size_t size_src0_ddq = split ? (row_high[id]-row_low[id])*ne00 * src0_ts/src0_bs : ggml_nbytes(src0); - src0_dd[id] = (char *) ggml_cuda_pool_malloc(ggml_nbytes(src0), &src0_as[id]); + src0_dd[id] = (char *) ggml_cuda_pool_malloc_async(ggml_nbytes(src0), &src0_as[id], id, stream); } if (src1_on_device && src1_is_contiguous) { src1_ddf[id] = (float *) src1_extra->data_device[id]; } else { - src1_ddf[id] = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src1), &src1_asf[id]); + src1_ddf[id] = (float *) ggml_cuda_pool_malloc_async(ggml_nbytes(src1), &src1_asf[id], id, stream); } if (convert_src1_to_q8_1) { - src1_ddq[id] = (char *) ggml_cuda_pool_malloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs, &src1_asq[id]); + const size_t size_dst_ddq = nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs; + src1_ddq[id] = (char *) ggml_cuda_pool_malloc_async(size_dst_ddq, &src1_asq[id], id, stream); if (src1_on_device && src1_is_contiguous) { quantize_row_q8_1_cuda(src1_ddf[id], src1_ddq[id], ne10, nrows1, src1_padded_col_size, stream); - CUDA_CHECK(cudaGetLastError()); + // CUDA_CHECK(cudaGetLastError()); } } @@ -6951,7 +6977,7 @@ static void ggml_cuda_op_mul_mat( dst_dd[id] = (float *) dst_extra->data_device[id]; } else { const size_t size_dst_ddf = split ? (row_high[id]-row_low[id])*ne1*sizeof(float) : ggml_nbytes(dst); - dst_dd[id] = (float *) ggml_cuda_pool_malloc(size_dst_ddf, &dst_as[id]); + dst_dd[id] = (float *) ggml_cuda_pool_malloc_async(size_dst_ddf, &dst_as[id], id, stream); } } @@ -7077,24 +7103,6 @@ static void ggml_cuda_op_mul_mat( } } - for (int64_t id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); - - // free buffers again when done - if (src0_as[id] > 0) { - ggml_cuda_pool_free(src0_dd[id], src0_as[id]); - } - if (src1_asf[id] > 0) { - ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); - } - if (src1_asq[id] > 0) { - ggml_cuda_pool_free(src1_ddq[id], src1_asq[id]); - } - if (dst_as[id] > 0) { - ggml_cuda_pool_free(dst_dd[id], dst_as[id]); - } - } - // main device waits for all other devices to be finished if (split && g_device_count > 1) { int64_t is_max = (ne11 + MUL_MAT_SRC1_COL_STRIDE - 1) / MUL_MAT_SRC1_COL_STRIDE; @@ -7112,6 +7120,21 @@ static void ggml_cuda_op_mul_mat( CUDA_CHECK(ggml_cuda_set_device(g_main_device)); CUDA_CHECK(cudaDeviceSynchronize()); } + + for (int64_t id = 0; id < g_device_count; ++id) { + if (src0_as[id] > 0) { + ggml_cuda_pool_free_async(src0_dd[id], src0_as[id], id, g_cudaStreams[id][0]); + } + if (src1_asf[id] > 0) { + ggml_cuda_pool_free_async(src1_ddf[id], src1_asf[id], id, g_cudaStreams[id][0]); + } + if (src1_asq[id] > 0) { + ggml_cuda_pool_free_async(src1_ddq[id], src1_asq[id], id, g_cudaStreams[id][0]); + } + if (dst_as[id] > 0) { + ggml_cuda_pool_free_async(dst_dd[id], dst_as[id], id, g_cudaStreams[id][0]); + } + } } static void ggml_cuda_repeat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -7298,11 +7321,11 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const GGML_ASSERT(to_fp16_cuda != nullptr); size_t src1_as = 0; - half * src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne1 * sizeof(half), &src1_as); + half * src1_as_f16 = (half *) ggml_cuda_pool_malloc_async(ne1 * sizeof(half), &src1_as, id, main_stream); to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + half * dst_f16 = (half *) ggml_cuda_pool_malloc_async(ne * sizeof(half), &dst_as, id, main_stream); GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -7349,10 +7372,9 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const } else { // use cublasGemmBatchedEx const int ne23 = ne12*ne13; - - void ** ptrs_as = nullptr; + // allocate device memory for pointers size_t ptrs_s = 0; - ptrs_as = (void **) ggml_cuda_pool_malloc(3*ne23*sizeof(void *), &ptrs_s); + void ** ptrs_as = (void **)ggml_cuda_pool_malloc_async(3*ne23*sizeof(void *), &ptrs_s, id, main_stream); dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( @@ -7365,7 +7387,6 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const dst->nb[2], dst->nb[3], r2, r3); CUDA_CHECK(cudaGetLastError()); - CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, @@ -7375,16 +7396,21 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ne23, CUBLAS_COMPUTE_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); - - ggml_cuda_pool_free(ptrs_as, ptrs_s); + // free device memory for pointers + if (ptrs_s != 0) { + ggml_cuda_pool_free_async(ptrs_as, ptrs_s, id, main_stream); + } } #endif const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); - - ggml_cuda_pool_free(src1_as_f16, src1_as); - ggml_cuda_pool_free(dst_f16, dst_as); + if (src1_as != 0) { + ggml_cuda_pool_free_async(src1_as_f16, src1_as, id, main_stream); + } + if (dst_as != 0) { + ggml_cuda_pool_free_async(dst_f16, dst_as, id, main_stream); + } } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { From c7743fe1c1cbda5a886362aa371480360580fdf0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 20:32:11 +0200 Subject: [PATCH 074/859] cuda : fix const ptrs warning causing ROCm build issues (#3913) --- ggml-cuda.cu | 37 +++++++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 58b58f331..06c28f565 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7248,7 +7248,7 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor __global__ void k_compute_batched_ptrs( const half * src0_as_f16, const half * src1_as_f16, half * dst_f16, - void ** ptrs, + const void ** ptrs_src, void ** ptrs_dst, int ne12, int ne13, int ne23, int nb02, int nb03, @@ -7265,9 +7265,9 @@ __global__ void k_compute_batched_ptrs( int i03 = i13 / r3; int i02 = i12 / r2; - ptrs[0*ne23 + i12 + i13*ne12] = (char *) src0_as_f16 + i02*nb02 + i03*nb03; - ptrs[1*ne23 + i12 + i13*ne12] = (char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; - ptrs[2*ne23 + i12 + i13*ne12] = (char *) dst_f16 + i12* nb2/2 + i13* nb3/2; + ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; + ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst_f16 + i12* nb2/2 + i13* nb3/2; } static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -7372,14 +7372,20 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const } else { // use cublasGemmBatchedEx const int ne23 = ne12*ne13; - // allocate device memory for pointers - size_t ptrs_s = 0; - void ** ptrs_as = (void **)ggml_cuda_pool_malloc_async(3*ne23*sizeof(void *), &ptrs_s, id, main_stream); + + const void ** ptrs_src = nullptr; + void ** ptrs_dst = nullptr; + + size_t ptrs_src_s = 0; + size_t ptrs_dst_s = 0; + + ptrs_src = (const void **) ggml_cuda_pool_malloc_async(2*ne23*sizeof(void *), &ptrs_src_s, id, main_stream); + ptrs_dst = ( void **) ggml_cuda_pool_malloc_async(1*ne23*sizeof(void *), &ptrs_dst_s, id, main_stream); dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( src0_as_f16, src1_as_f16, dst_f16, - ptrs_as, + ptrs_src, ptrs_dst, ne12, ne13, ne23, nb02, nb03, @@ -7390,15 +7396,18 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const void * const *) (ptrs_as + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void * const *) (ptrs_as + 1*ne23), CUDA_R_16F, nb11/sizeof(float), - &beta_f16, ( void ** ) (ptrs_as + 2*ne23), CUDA_R_16F, ne01, + &alpha_f16, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + &beta_f16, ( void **) (ptrs_dst + 0*ne23), CUDA_R_16F, ne01, ne23, CUBLAS_COMPUTE_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); - // free device memory for pointers - if (ptrs_s != 0) { - ggml_cuda_pool_free_async(ptrs_as, ptrs_s, id, main_stream); + + if (ptrs_src_s != 0) { + ggml_cuda_pool_free_async(ptrs_src, ptrs_src_s, id, main_stream); + } + if (ptrs_dst_s != 0) { + ggml_cuda_pool_free_async(ptrs_dst, ptrs_dst_s, id, main_stream); } } #endif From 224e7d5b14cbabab7ae45c64db2cfde979c8455d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 2 Nov 2023 20:44:12 +0200 Subject: [PATCH 075/859] readme : add notice about #3912 --- README.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/README.md b/README.md index b56ecaec7..9c9e36ad0 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,6 @@ ![llama](https://user-images.githubusercontent.com/1991296/230134379-7181e485-c521-4d23-a0d6-f7b3b61ba524.png) -[![Actions Status](https://github.com/ggerganov/llama.cpp/workflows/CI/badge.svg)](https://github.com/ggerganov/llama.cpp/actions) [![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](https://opensource.org/licenses/MIT) [Roadmap](https://github.com/users/ggerganov/projects/7) / [Project status](https://github.com/ggerganov/llama.cpp/discussions/3471) / [Manifesto](https://github.com/ggerganov/llama.cpp/discussions/205) / [ggml](https://github.com/ggerganov/ggml) @@ -11,8 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics -- LLaVA support: https://github.com/ggerganov/llama.cpp/pull/3436 -- ‼️ BPE tokenizer update: existing Falcon and Starcoder `.gguf` models will need to be reconverted: [#3252](https://github.com/ggerganov/llama.cpp/pull/3252) +- ⚠️ **Upcoming change that might break functionality. Help with testing is needed:** https://github.com/ggerganov/llama.cpp/pull/3912 ---- From 51b2fc11f7f605fff49725a4540e9a6ef7b51b70 Mon Sep 17 00:00:00 2001 From: Andrei Date: Thu, 2 Nov 2023 15:40:31 -0400 Subject: [PATCH 076/859] cmake : fix relative path to git submodule index (#3915) --- common/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index 0150114e3..ac594b2ca 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -11,7 +11,7 @@ if(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/../.git") if(NOT IS_DIRECTORY "${GIT_DIR}") file(READ ${GIT_DIR} REAL_GIT_DIR_LINK) string(REGEX REPLACE "gitdir: (.*)\n$" "\\1" REAL_GIT_DIR ${REAL_GIT_DIR_LINK}) - set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/${REAL_GIT_DIR}") + set(GIT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../${REAL_GIT_DIR}") endif() set(GIT_INDEX "${GIT_DIR}/index") From 629f917cd6b96ba1274c49a8aab163b1b189229d Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Thu, 2 Nov 2023 13:58:22 -0600 Subject: [PATCH 077/859] cuda : add ROCM aliases for CUDA pool stuff (#3918) --- ggml-cuda.cu | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 06c28f565..baf02df2b 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -39,6 +39,10 @@ #define cudaDeviceCanAccessPeer hipDeviceCanAccessPeer #define cudaDeviceDisablePeerAccess hipDeviceDisablePeerAccess #define cudaDeviceEnablePeerAccess hipDeviceEnablePeerAccess +#define cudaDeviceGetMemPool hipDeviceGetMemPool +#define cudaMemPoolAttrReleaseThreshold hipMemPoolAttrReleaseThreshold +#define cudaMemPoolSetAttribute hipMemPoolSetAttribute +#define cudaMemPool_t hipMemPool_t #define cudaDeviceProp hipDeviceProp_t #define cudaDeviceSynchronize hipDeviceSynchronize #define cudaError_t hipError_t @@ -48,6 +52,7 @@ #define cudaEvent_t hipEvent_t #define cudaEventDestroy hipEventDestroy #define cudaFree hipFree +#define cudaFreeAsync hipFreeAsync #define cudaFreeHost hipHostFree #define cudaGetDevice hipGetDevice #define cudaGetDeviceCount hipGetDeviceCount @@ -55,6 +60,7 @@ #define cudaGetErrorString hipGetErrorString #define cudaGetLastError hipGetLastError #define cudaMalloc hipMalloc +#define cudaMallocFromPoolAsync hipMallocFromPoolAsync #define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size, hipHostMallocDefault) #define cudaMemcpy hipMemcpy #define cudaMemcpy2DAsync hipMemcpy2DAsync From 3fdbe6b66b7b5c6ad3b2f245cbad1517c27ff776 Mon Sep 17 00:00:00 2001 From: cebtenzzre Date: Fri, 3 Nov 2023 02:31:58 -0400 Subject: [PATCH 078/859] llama : change yarn_ext_factor placeholder to -1 (#3922) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index bb60044b4..cc0211ceb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7982,7 +7982,7 @@ struct llama_context_params llama_context_default_params() { /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_UNSPECIFIED, /*.rope_freq_base =*/ 0.0f, /*.rope_freq_scale =*/ 0.0f, - /*.yarn_ext_factor =*/ NAN, + /*.yarn_ext_factor =*/ -1.0f, /*.yarn_attn_factor =*/ 1.0f, /*.yarn_beta_fast =*/ 32.0f, /*.yarn_beta_slow =*/ 1.0f, @@ -8125,7 +8125,7 @@ struct llama_context * llama_new_context_with_model( cparams.rope_freq_scale = 1.0f; // never scale if scaling type is none } - if (std::isnan(cparams.yarn_ext_factor)) { // NaN indicates 'not set' + if (cparams.yarn_ext_factor < 0.0f) { // negative indicates 'not set' cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_YARN ? 1.0f : 0.0f; } From 05816027d649f977468fc804cdb54e99eac246d1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 3 Nov 2023 09:24:00 +0200 Subject: [PATCH 079/859] common : YAYF (yet another YARN fix) (#3925) ggml-ci --- common/common.h | 44 ++++++++++++++++++++++---------------------- llama.h | 10 +++++----- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/common/common.h b/common/common.h index 72a49b890..9ad625633 100644 --- a/common/common.h +++ b/common/common.h @@ -43,29 +43,29 @@ extern char const *LLAMA_BUILD_TARGET; int32_t get_num_physical_cores(); struct gpt_params { - uint32_t seed = -1; // RNG seed + uint32_t seed = -1; // RNG seed int32_t n_threads = get_num_physical_cores(); - int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) - int32_t n_predict = -1; // new tokens to predict - int32_t n_ctx = 512; // context size - int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) - int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_draft = 16; // number of tokens to draft during speculative decoding - int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) - int32_t n_parallel = 1; // number of parallel sequences to decode - int32_t n_sequences = 1; // number of sequences to decode - int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) - int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) - int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors - float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs - int32_t n_beams = 0; // if non-zero then use beam search of given width. - float rope_freq_base = 0.0f; // RoPE base frequency - float rope_freq_scale = 0.0f; // RoPE frequency scaling factor - float yarn_ext_factor = NAN; // YaRN extrapolation mix factor - float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor - float yarn_beta_fast = 32.0f;// YaRN low correction dim - float yarn_beta_slow = 1.0f; // YaRN high correction dim - int32_t yarn_orig_ctx = 0; // YaRN original context length + int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) + int32_t n_predict = -1; // new tokens to predict + int32_t n_ctx = 512; // context size + int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) + int32_t n_keep = 0; // number of tokens to keep from initial prompt + int32_t n_draft = 16; // number of tokens to draft during speculative decoding + int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) + int32_t n_parallel = 1; // number of parallel sequences to decode + int32_t n_sequences = 1; // number of sequences to decode + int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) + int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) + int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors + float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs + int32_t n_beams = 0; // if non-zero then use beam search of given width. + float rope_freq_base = 0.0f; // RoPE base frequency + float rope_freq_scale = 0.0f; // RoPE frequency scaling factor + float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor + float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor + float yarn_beta_fast = 32.0f; // YaRN low correction dim + float yarn_beta_slow = 1.0f; // YaRN high correction dim + int32_t yarn_orig_ctx = 0; // YaRN original context length int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // // sampling parameters diff --git a/llama.h b/llama.h index 3f1becd76..e8dc04bb5 100644 --- a/llama.h +++ b/llama.h @@ -175,11 +175,11 @@ extern "C" { }; struct llama_context_params { - uint32_t seed; // RNG seed, -1 for random - uint32_t n_ctx; // text context, 0 = from model - uint32_t n_batch; // prompt processing maximum batch size - uint32_t n_threads; // number of threads to use for generation - uint32_t n_threads_batch; // number of threads to use for batch processing + uint32_t seed; // RNG seed, -1 for random + uint32_t n_ctx; // text context, 0 = from model + uint32_t n_batch; // prompt processing maximum batch size + uint32_t n_threads; // number of threads to use for generation + uint32_t n_threads_batch; // number of threads to use for batch processing int8_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` // ref: https://github.com/ggerganov/llama.cpp/pull/2054 From 8f961abdc4e134c83bf8c2ad618ab256b4cae0f9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 3 Nov 2023 09:41:17 +0200 Subject: [PATCH 080/859] speculative : change default p_accept to 0.5 + CLI args (#3919) ggml-ci --- common/common.cpp | 14 ++++++++++++++ common/common.h | 8 ++++++-- examples/speculative/speculative.cpp | 8 +++++--- 3 files changed, 25 insertions(+), 5 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index e938dee16..20cc4a081 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -403,6 +403,18 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.n_sequences = std::stoi(argv[i]); + } else if (arg == "--p-accept" || arg == "-pa") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.p_accept = std::stof(argv[i]); + } else if (arg == "--p-split" || arg == "-ps") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.p_split = std::stof(argv[i]); } else if (arg == "-m" || arg == "--model") { if (++i >= argc) { invalid_param = true; @@ -778,6 +790,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --chunks N max number of chunks to process (default: %d, -1 = all)\n", params.n_chunks); printf(" -np N, --parallel N number of parallel sequences to decode (default: %d)\n", params.n_parallel); printf(" -ns N, --sequences N number of sequences to decode (default: %d)\n", params.n_sequences); + printf(" -pa N, --p-accept N speculative decoding accept probability (default: %.1f)\n", (double)params.p_accept); + printf(" -ps N, --p-split N speculative decoding split probability (default: %.1f)\n", (double)params.p_split); printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA. see examples/llava/README.md\n"); printf(" --image IMAGE_FILE path to an image file. use with multimodal models\n"); diff --git a/common/common.h b/common/common.h index 9ad625633..dd6b002eb 100644 --- a/common/common.h +++ b/common/common.h @@ -44,6 +44,7 @@ int32_t get_num_physical_cores(); struct gpt_params { uint32_t seed = -1; // RNG seed + int32_t n_threads = get_num_physical_cores(); int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) int32_t n_predict = -1; // new tokens to predict @@ -54,6 +55,8 @@ struct gpt_params { int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) int32_t n_parallel = 1; // number of parallel sequences to decode int32_t n_sequences = 1; // number of sequences to decode + float p_accept = 0.5f; // speculative decoding accept probability + float p_split = 0.1f; // speculative decoding split probability int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors @@ -66,7 +69,8 @@ struct gpt_params { float yarn_beta_fast = 32.0f; // YaRN low correction dim float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length - int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; + int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment + // pinging @cebtenzzre // // sampling parameters struct llama_sampling_params sparams; @@ -90,7 +94,7 @@ struct gpt_params { int ppl_output_type = 0; // = 0 -> ppl output is as usual, = 1 -> ppl output is num_tokens, ppl, one per line // (which is more convenient to use for plotting) // - bool hellaswag = false; // compute HellaSwag score over random tasks from datafile supplied in prompt + bool hellaswag = false; // compute HellaSwag score over random tasks from datafile supplied in prompt size_t hellaswag_tasks = 400; // number of tasks to use when computing the HellaSwag score bool mul_mat_q = true; // if true, use mul_mat_q kernels instead of cuBLAS diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 798684f66..3a8e27811 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -37,9 +37,11 @@ int main(int argc, char ** argv) { // max number of parallel drafting sequences (i.e. tree branches) const int n_seq_dft = params.n_parallel; - // TODO: make this configurable - const float p_accept = 0.80f; - const float p_split = 0.10f; + // probability threshold for accepting a token from the draft model + const float p_accept = params.p_accept; + + // probability threshold for splitting a draft branch (only for n_seq_dft > 1) + const float p_split = params.p_split; #ifndef LOG_DISABLE_LOGS log_set_target(log_filename_generator("speculative", "log")); From abb77e7319aabc0b5cfb7c22da690a692489b6b7 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 3 Nov 2023 12:13:09 +0100 Subject: [PATCH 081/859] ggml-cuda : move row numbers to x grid dim in mmv kernels (#3921) --- ggml-cuda.cu | 53 ++++++++++++++++++++++++++-------------------------- 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index baf02df2b..bdbcca0ca 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -989,7 +989,7 @@ static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); - const int row = blockIdx.y*blockDim.y + threadIdx.y; + const int row = blockIdx.x*blockDim.y + threadIdx.y; if (row > nrows) return; const int num_blocks_per_row = ncols / QK_K; @@ -1093,7 +1093,7 @@ static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, static __global__ void dequantize_mul_mat_vec_q3_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { - const int row = blockIdx.y*blockDim.y + threadIdx.y; + const int row = blockIdx.x*blockDim.y + threadIdx.y; if (row > nrows) return; const int num_blocks_per_row = ncols / QK_K; @@ -1197,7 +1197,7 @@ static __global__ void dequantize_mul_mat_vec_q3_k(const void * __restrict__ vx, static __global__ void dequantize_mul_mat_vec_q4_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { - const int row = blockIdx.y*blockDim.y + threadIdx.y; + const int row = blockIdx.x*blockDim.y + threadIdx.y; if (row > nrows) return; const int num_blocks_per_row = ncols / QK_K; const int ib0 = row*num_blocks_per_row; @@ -1451,7 +1451,7 @@ static __global__ void dequantize_mul_mat_vec_q6_k(const void * __restrict__ vx, static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); - const int row = blockIdx.y*blockDim.y + threadIdx.y; + const int row = blockIdx.x*blockDim.y + threadIdx.y; if (row > nrows) return; const int num_blocks_per_row = ncols / QK_K; @@ -4261,7 +4261,7 @@ template static __global__ void template static __global__ void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows) { - const int row = blockIdx.y*blockDim.y + threadIdx.y; + const int row = blockIdx.x*blockDim.y + threadIdx.y; if (row >= nrows) { return; @@ -4301,7 +4301,7 @@ template static __global__ void dequantize_mul_mat_vec(const void * __restrict__ vx, const dfloat * __restrict__ y, float * __restrict__ dst, const int ncols, const int nrows) { // qk = quantized weights per x block // qr = number of quantized weights per data value in x block - const int row = blockIdx.y*blockDim.y + threadIdx.y; + const int row = blockIdx.x*blockDim.y + threadIdx.y; if (row >= nrows) { return; @@ -4874,7 +4874,8 @@ static void dequantize_row_q6_K_cuda(const void * vx, dst_t * y, const int k, cu static void dequantize_mul_mat_vec_q4_0_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + // the number of rows may exceed maximum grid size in the y or z dimensions, use the x dimension instead + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); dequantize_mul_mat_vec <<>>(vx, y, dst, ncols, nrows); @@ -4883,7 +4884,7 @@ static void dequantize_mul_mat_vec_q4_0_cuda(const void * vx, const dfloat * y, static void dequantize_mul_mat_vec_q4_1_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); dequantize_mul_mat_vec <<>>(vx, y, dst, ncols, nrows); @@ -4892,7 +4893,7 @@ static void dequantize_mul_mat_vec_q4_1_cuda(const void * vx, const dfloat * y, static void dequantize_mul_mat_vec_q5_0_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); dequantize_mul_mat_vec <<>>(vx, y, dst, ncols, nrows); @@ -4901,7 +4902,7 @@ static void dequantize_mul_mat_vec_q5_0_cuda(const void * vx, const dfloat * y, static void dequantize_mul_mat_vec_q5_1_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); dequantize_mul_mat_vec <<>>(vx, y, dst, ncols, nrows); @@ -4910,7 +4911,7 @@ static void dequantize_mul_mat_vec_q5_1_cuda(const void * vx, const dfloat * y, static void dequantize_mul_mat_vec_q8_0_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); dequantize_mul_mat_vec <<>>(vx, y, dst, ncols, nrows); @@ -4920,7 +4921,7 @@ static void dequantize_mul_mat_vec_q2_K_cuda(const void * vx, const float * y, f GGML_ASSERT(ncols % QK_K == 0); const int ny = 2; // very slightly faster than 1 even when K_QUANTS_PER_ITERATION = 2 const int block_num_y = (nrows + ny - 1) / ny; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(32, ny, 1); dequantize_mul_mat_vec_q2_k<<>>(vx, y, dst, ncols, nrows); } @@ -4929,7 +4930,7 @@ static void dequantize_mul_mat_vec_q3_K_cuda(const void * vx, const float * y, f GGML_ASSERT(ncols % QK_K == 0); const int ny = 2 / K_QUANTS_PER_ITERATION; const int block_num_y = (nrows + ny - 1) / ny; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(32, ny, 1); dequantize_mul_mat_vec_q3_k<<>>(vx, y, dst, ncols, nrows); } @@ -4938,7 +4939,7 @@ static void dequantize_mul_mat_vec_q4_K_cuda(const void * vx, const float * y, f GGML_ASSERT(ncols % QK_K == 0); const int ny = 2 / K_QUANTS_PER_ITERATION; const int block_num_y = (nrows + ny - 1) / ny; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(32, ny, 1); dequantize_mul_mat_vec_q4_k<<>>(vx, y, dst, ncols, nrows); } @@ -4953,7 +4954,7 @@ static void dequantize_mul_mat_vec_q6_K_cuda(const void * vx, const float * y, f GGML_ASSERT(ncols % QK_K == 0); const int ny = 2 / K_QUANTS_PER_ITERATION; const int block_num_y = (nrows + ny - 1) / ny; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(32, ny, 1); dequantize_mul_mat_vec_q6_k<<>>(vx, y, dst, ncols, nrows); } @@ -4961,7 +4962,7 @@ static void dequantize_mul_mat_vec_q6_K_cuda(const void * vx, const float * y, f static void mul_mat_vec_q4_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK4_0 == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -4970,7 +4971,7 @@ static void mul_mat_vec_q4_0_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q4_1_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK4_1 == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -4979,7 +4980,7 @@ static void mul_mat_vec_q4_1_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q5_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK5_0 == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -4988,7 +4989,7 @@ static void mul_mat_vec_q5_0_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q5_1_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK5_1 == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -4997,7 +4998,7 @@ static void mul_mat_vec_q5_1_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q8_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK8_0 == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -5006,7 +5007,7 @@ static void mul_mat_vec_q8_0_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q2_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK_K == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -5015,7 +5016,7 @@ static void mul_mat_vec_q2_K_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q3_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK_K == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -5024,7 +5025,7 @@ static void mul_mat_vec_q3_K_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q4_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK_K == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -5033,7 +5034,7 @@ static void mul_mat_vec_q4_K_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q5_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK_K == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -5042,7 +5043,7 @@ static void mul_mat_vec_q5_K_q8_1_cuda(const void * vx, const void * vy, float * static void mul_mat_vec_q6_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % QK_K == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); mul_mat_vec_q <<>>(vx, vy, dst, ncols, nrows); @@ -5061,7 +5062,7 @@ static void convert_fp32_to_fp16_cuda(const void * vx, half * y, const int k, cu static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % GGML_CUDA_DMMV_X == 0); const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(1, block_num_y, 1); + const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); dequantize_mul_mat_vec<1, 1, convert_f16> <<>>(vx, y, dst, ncols, nrows); From 5ba37461711095c0284233dbd14f0d9010cdbf56 Mon Sep 17 00:00:00 2001 From: Xiao-Yong Jin Date: Fri, 3 Nov 2023 13:00:31 -0500 Subject: [PATCH 082/859] ggml-metal: fix yarn rope (#3937) --- ggml-metal.m | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ggml-metal.m b/ggml-metal.m index b33a3cb8f..acdb83843 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1403,7 +1403,8 @@ void ggml_metal_graph_compute( const int n_past = ((int32_t *) dst->op_params)[0]; const int n_dims = ((int32_t *) dst->op_params)[1]; const int mode = ((int32_t *) dst->op_params)[2]; - const int n_orig_ctx = ((int32_t *) dst->op_params)[3]; + // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); From d9b33fe95bd257b36c84ee5769cc048230067d6f Mon Sep 17 00:00:00 2001 From: Peter Sugihara Date: Fri, 3 Nov 2023 12:18:18 -0700 Subject: [PATCH 083/859] metal : round up to 16 to fix MTLDebugComputeCommandEncoder assertion (#3938) --- ggml-metal.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index acdb83843..78ae4485d 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1017,7 +1017,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setThreadgroupMemoryLength:nth/32*sizeof(float) atIndex:0]; + [encoder setThreadgroupMemoryLength:MAX(16, nth/32*sizeof(float)) atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1348,7 +1348,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:nth*sizeof(float) atIndex:0]; + [encoder setThreadgroupMemoryLength:MAX(16, nth*sizeof(float)) atIndex:0]; const int64_t nrows = ggml_nrows(src0); From f28af0d81aa1010afa5de74cf627dcb04bea3157 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Sat, 4 Nov 2023 16:20:34 -0600 Subject: [PATCH 084/859] gguf-py: Support 01.AI Yi models (#3943) --- gguf-py/gguf/gguf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gguf-py/gguf/gguf.py b/gguf-py/gguf/gguf.py index 727b4e554..a2271d225 100644 --- a/gguf-py/gguf/gguf.py +++ b/gguf-py/gguf/gguf.py @@ -393,6 +393,7 @@ class TensorNameMap: "layers.{bid}.attention_norm", # llama-pth "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon + "model.layers.{bid}.ln1", # yi ), # Attention norm 2 @@ -464,6 +465,7 @@ class TensorNameMap: "layers.{bid}.ffn_norm", # llama-pth "encoder.layer.{bid}.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon + "model.layers.{bid}.ln2", # yi ), # Feed-forward up From 48ade94538fa509465d71023e49d07aab0ec8cd5 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 5 Nov 2023 08:12:13 +0100 Subject: [PATCH 085/859] cuda : revert CUDA pool stuff (#3944) * Revert "cuda : add ROCM aliases for CUDA pool stuff (#3918)" This reverts commit 629f917cd6b96ba1274c49a8aab163b1b189229d. * Revert "cuda : use CUDA memory pool with async memory allocation/deallocation when available (#3903)" This reverts commit d6069051de7165a4e06662c89257f5d2905bb156. ggml-ci --- ggml-cuda.cu | 131 ++++++++++++++++++++------------------------------- 1 file changed, 50 insertions(+), 81 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index bdbcca0ca..dc14f2f5d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -39,10 +39,6 @@ #define cudaDeviceCanAccessPeer hipDeviceCanAccessPeer #define cudaDeviceDisablePeerAccess hipDeviceDisablePeerAccess #define cudaDeviceEnablePeerAccess hipDeviceEnablePeerAccess -#define cudaDeviceGetMemPool hipDeviceGetMemPool -#define cudaMemPoolAttrReleaseThreshold hipMemPoolAttrReleaseThreshold -#define cudaMemPoolSetAttribute hipMemPoolSetAttribute -#define cudaMemPool_t hipMemPool_t #define cudaDeviceProp hipDeviceProp_t #define cudaDeviceSynchronize hipDeviceSynchronize #define cudaError_t hipError_t @@ -52,7 +48,6 @@ #define cudaEvent_t hipEvent_t #define cudaEventDestroy hipEventDestroy #define cudaFree hipFree -#define cudaFreeAsync hipFreeAsync #define cudaFreeHost hipHostFree #define cudaGetDevice hipGetDevice #define cudaGetDeviceCount hipGetDeviceCount @@ -60,7 +55,6 @@ #define cudaGetErrorString hipGetErrorString #define cudaGetLastError hipGetLastError #define cudaMalloc hipMalloc -#define cudaMallocFromPoolAsync hipMallocFromPoolAsync #define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size, hipHostMallocDefault) #define cudaMemcpy hipMemcpy #define cudaMemcpy2DAsync hipMemcpy2DAsync @@ -187,11 +181,11 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); do { \ cudaError_t err_ = (err); \ if (err_ != cudaSuccess) { \ - int dev_id; \ - cudaGetDevice(&dev_id); \ + int id; \ + cudaGetDevice(&id); \ fprintf(stderr, "\nCUDA error %d at %s:%d: %s\n", err_, __FILE__, __LINE__, \ cudaGetErrorString(err_)); \ - fprintf(stderr, "current device: %d\n", dev_id); \ + fprintf(stderr, "current device: %d\n", id); \ exit(1); \ } \ } while (0) @@ -201,11 +195,11 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); do { \ cublasStatus_t err_ = (err); \ if (err_ != CUBLAS_STATUS_SUCCESS) { \ - int dev_id; \ - cudaGetDevice(&dev_id); \ + int id; \ + cudaGetDevice(&id); \ fprintf(stderr, "\ncuBLAS error %d at %s:%d: %s\n", \ err_, __FILE__, __LINE__, cublasGetStatusString(err_)); \ - fprintf(stderr, "current device: %d\n", dev_id); \ + fprintf(stderr, "current device: %d\n", id); \ exit(1); \ } \ } while (0) @@ -471,7 +465,6 @@ static_assert(K_QUANTS_PER_ITERATION == 1 || K_QUANTS_PER_ITERATION == 2, "K_QUA #define MAX_STREAMS 8 static cudaStream_t g_cudaStreams[GGML_CUDA_MAX_DEVICES][MAX_STREAMS] = { nullptr }; -static cudaMemPool_t g_cudaMemPools[GGML_CUDA_MAX_DEVICES] = { nullptr }; struct ggml_tensor_extra_gpu { void * data_device[GGML_CUDA_MAX_DEVICES]; // 1 pointer for each device for split tensors @@ -5780,16 +5773,6 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { return ptr; } -static void * ggml_cuda_pool_malloc_async(size_t size, size_t * actual_size, int id, cudaStream_t stream) { - if (g_cudaMemPools[id] == nullptr) { - return ggml_cuda_pool_malloc(size, actual_size); - } - void *ptr; - CUDA_CHECK(cudaMallocFromPoolAsync(&ptr, size, g_cudaMemPools[id], stream)); - *actual_size = size; - return ptr; -} - static void ggml_cuda_pool_free(void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); int id; @@ -5808,13 +5791,6 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { } -static void ggml_cuda_pool_free_async(void * ptr, size_t actual_size, int id, cudaStream_t stream) { - if (g_cudaMemPools[id] == nullptr) { - return ggml_cuda_pool_free(ptr, actual_size); - } - CUDA_CHECK(cudaFreeAsync(ptr, stream)); -} - void ggml_init_cublas() { static bool initialized = false; @@ -5869,13 +5845,6 @@ void ggml_init_cublas() { // create cublas handle CUBLAS_CHECK(cublasCreate(&g_cublas_handles[id])); CUBLAS_CHECK(cublasSetMathMode(g_cublas_handles[id], CUBLAS_TF32_TENSOR_OP_MATH)); - - // configure memory pool - cudaError_t err = cudaDeviceGetMemPool(&g_cudaMemPools[id], id); - if (err == cudaSuccess) { - size_t treshold = UINT64_MAX; - CUDA_CHECK(cudaMemPoolSetAttribute(g_cudaMemPools[id], cudaMemPoolAttrReleaseThreshold, &treshold)); - } } // configure logging to stdout @@ -6469,7 +6438,7 @@ inline void ggml_cuda_op_mul_mat_cublas( const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src0->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = row_diff*ne00; - src0_as_f16 = (half *) ggml_cuda_pool_malloc_async(ne * sizeof(half), &src0_as, id, stream); + src0_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src0_as); to_fp16_cuda(src0_dd_i, src0_as_f16, ne, stream); } const half * src0_ptr = src0->type == GGML_TYPE_F16 ? (const half *) src0_dd_i : src0_as_f16; @@ -6480,12 +6449,13 @@ inline void ggml_cuda_op_mul_mat_cublas( const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = src1_ncols*ne10; - src1_as_f16 = (half *) ggml_cuda_pool_malloc_async(ne * sizeof(half), &src1_as, id, stream); + src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src1_as); to_fp16_cuda(src1_ddf_i, src1_as_f16, ne, stream); } const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddq_i : src1_as_f16; - size_t dst_f16_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc_async(row_diff*src1_ncols * sizeof(half), &dst_f16_as, id, stream); + + size_t dst_as = 0; + half * dst_f16 = (half *) ggml_cuda_pool_malloc(row_diff*src1_ncols * sizeof(half), &dst_as); const half alpha_f16 = 1.0f; const half beta_f16 = 0.0f; @@ -6503,15 +6473,14 @@ inline void ggml_cuda_op_mul_mat_cublas( const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); to_fp32_cuda(dst_f16, dst_dd_i, row_diff*src1_ncols, stream); - if (dst_f16_as != 0) { - ggml_cuda_pool_free_async(dst_f16, dst_f16_as, id, stream); - } + ggml_cuda_pool_free(dst_f16, dst_as); if (src0_as != 0) { - ggml_cuda_pool_free_async(src0_as_f16, src0_as, id, stream); + ggml_cuda_pool_free(src0_as_f16, src0_as); } + if (src1_as != 0) { - ggml_cuda_pool_free_async(src1_as_f16, src1_as, id, stream); + ggml_cuda_pool_free(src1_as_f16, src1_as); } } else { @@ -6521,7 +6490,7 @@ inline void ggml_cuda_op_mul_mat_cublas( if (src0->type != GGML_TYPE_F32) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src0->type); GGML_ASSERT(to_fp32_cuda != nullptr); - src0_ddq_as_f32 = (float *) ggml_cuda_pool_malloc_async(row_diff*ne00 * sizeof(float), &src0_as, id, stream); // NOLINT + src0_ddq_as_f32 = (float *) ggml_cuda_pool_malloc(row_diff*ne00 * sizeof(float), &src0_as); // NOLINT to_fp32_cuda(src0_dd_i, src0_ddq_as_f32, row_diff*ne00, stream); } const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32; @@ -6538,7 +6507,7 @@ inline void ggml_cuda_op_mul_mat_cublas( &beta, dst_dd_i, ldc)); if (src0_as != 0) { - ggml_cuda_pool_free_async(src0_ddq_as_f32, src0_as, id, stream); + ggml_cuda_pool_free(src0_ddq_as_f32, src0_as); } } @@ -6961,22 +6930,21 @@ static void ggml_cuda_op_mul_mat( src0_dd[id] = (char *) src0_extra->data_device[id]; } else { const size_t size_src0_ddq = split ? (row_high[id]-row_low[id])*ne00 * src0_ts/src0_bs : ggml_nbytes(src0); - src0_dd[id] = (char *) ggml_cuda_pool_malloc_async(ggml_nbytes(src0), &src0_as[id], id, stream); + src0_dd[id] = (char *) ggml_cuda_pool_malloc(ggml_nbytes(src0), &src0_as[id]); } if (src1_on_device && src1_is_contiguous) { src1_ddf[id] = (float *) src1_extra->data_device[id]; } else { - src1_ddf[id] = (float *) ggml_cuda_pool_malloc_async(ggml_nbytes(src1), &src1_asf[id], id, stream); + src1_ddf[id] = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src1), &src1_asf[id]); } if (convert_src1_to_q8_1) { - const size_t size_dst_ddq = nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs; - src1_ddq[id] = (char *) ggml_cuda_pool_malloc_async(size_dst_ddq, &src1_asq[id], id, stream); + src1_ddq[id] = (char *) ggml_cuda_pool_malloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs, &src1_asq[id]); if (src1_on_device && src1_is_contiguous) { quantize_row_q8_1_cuda(src1_ddf[id], src1_ddq[id], ne10, nrows1, src1_padded_col_size, stream); - // CUDA_CHECK(cudaGetLastError()); + CUDA_CHECK(cudaGetLastError()); } } @@ -6984,7 +6952,7 @@ static void ggml_cuda_op_mul_mat( dst_dd[id] = (float *) dst_extra->data_device[id]; } else { const size_t size_dst_ddf = split ? (row_high[id]-row_low[id])*ne1*sizeof(float) : ggml_nbytes(dst); - dst_dd[id] = (float *) ggml_cuda_pool_malloc_async(size_dst_ddf, &dst_as[id], id, stream); + dst_dd[id] = (float *) ggml_cuda_pool_malloc(size_dst_ddf, &dst_as[id]); } } @@ -7110,6 +7078,24 @@ static void ggml_cuda_op_mul_mat( } } + for (int64_t id = 0; id < g_device_count; ++id) { + CUDA_CHECK(ggml_cuda_set_device(id)); + + // free buffers again when done + if (src0_as[id] > 0) { + ggml_cuda_pool_free(src0_dd[id], src0_as[id]); + } + if (src1_asf[id] > 0) { + ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); + } + if (src1_asq[id] > 0) { + ggml_cuda_pool_free(src1_ddq[id], src1_asq[id]); + } + if (dst_as[id] > 0) { + ggml_cuda_pool_free(dst_dd[id], dst_as[id]); + } + } + // main device waits for all other devices to be finished if (split && g_device_count > 1) { int64_t is_max = (ne11 + MUL_MAT_SRC1_COL_STRIDE - 1) / MUL_MAT_SRC1_COL_STRIDE; @@ -7127,21 +7113,6 @@ static void ggml_cuda_op_mul_mat( CUDA_CHECK(ggml_cuda_set_device(g_main_device)); CUDA_CHECK(cudaDeviceSynchronize()); } - - for (int64_t id = 0; id < g_device_count; ++id) { - if (src0_as[id] > 0) { - ggml_cuda_pool_free_async(src0_dd[id], src0_as[id], id, g_cudaStreams[id][0]); - } - if (src1_asf[id] > 0) { - ggml_cuda_pool_free_async(src1_ddf[id], src1_asf[id], id, g_cudaStreams[id][0]); - } - if (src1_asq[id] > 0) { - ggml_cuda_pool_free_async(src1_ddq[id], src1_asq[id], id, g_cudaStreams[id][0]); - } - if (dst_as[id] > 0) { - ggml_cuda_pool_free_async(dst_dd[id], dst_as[id], id, g_cudaStreams[id][0]); - } - } } static void ggml_cuda_repeat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -7328,11 +7299,11 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const GGML_ASSERT(to_fp16_cuda != nullptr); size_t src1_as = 0; - half * src1_as_f16 = (half *) ggml_cuda_pool_malloc_async(ne1 * sizeof(half), &src1_as, id, main_stream); + half * src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne1 * sizeof(half), &src1_as); to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc_async(ne * sizeof(half), &dst_as, id, main_stream); + half * dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -7386,8 +7357,8 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const size_t ptrs_src_s = 0; size_t ptrs_dst_s = 0; - ptrs_src = (const void **) ggml_cuda_pool_malloc_async(2*ne23*sizeof(void *), &ptrs_src_s, id, main_stream); - ptrs_dst = ( void **) ggml_cuda_pool_malloc_async(1*ne23*sizeof(void *), &ptrs_dst_s, id, main_stream); + ptrs_src = (const void **) ggml_cuda_pool_malloc(2*ne23*sizeof(void *), &ptrs_src_s); + ptrs_dst = ( void **) ggml_cuda_pool_malloc(1*ne23*sizeof(void *), &ptrs_dst_s); dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( @@ -7400,6 +7371,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const dst->nb[2], dst->nb[3], r2, r3); CUDA_CHECK(cudaGetLastError()); + CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, @@ -7411,22 +7383,19 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_GEMM_DEFAULT_TENSOR_OP)); if (ptrs_src_s != 0) { - ggml_cuda_pool_free_async(ptrs_src, ptrs_src_s, id, main_stream); + ggml_cuda_pool_free(ptrs_src, ptrs_src_s); } if (ptrs_dst_s != 0) { - ggml_cuda_pool_free_async(ptrs_dst, ptrs_dst_s, id, main_stream); + ggml_cuda_pool_free(ptrs_dst, ptrs_dst_s); } } #endif const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); - if (src1_as != 0) { - ggml_cuda_pool_free_async(src1_as_f16, src1_as, id, main_stream); - } - if (dst_as != 0) { - ggml_cuda_pool_free_async(dst_f16, dst_as, id, main_stream); - } + + ggml_cuda_pool_free(src1_as_f16, src1_as); + ggml_cuda_pool_free(dst_f16, dst_as); } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { From a7fac013cf1cc7bbc0160a226aa2412e9f22e78a Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Sun, 5 Nov 2023 07:46:44 +0000 Subject: [PATCH 086/859] ci : use intel sde when ci cpu doesn't support avx512 (#3949) --- .github/workflows/build.yml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5af497a3c..bc295d52d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -288,6 +288,7 @@ jobs: OPENBLAS_VERSION: 0.3.23 OPENCL_VERSION: 2023.04.17 CLBLAST_VERSION: 1.6.0 + SDE_VERSION: 9.21.1-2023-04-24 strategy: matrix: @@ -383,11 +384,23 @@ jobs: - name: Test id: cmake_test - if: ${{ matrix.build != 'clblast' && (matrix.build != 'avx512' || env.HAS_AVX512F == '1') }} # Test AVX-512 only when possible + if: ${{ matrix.build != 'clblast' && (matrix.build != 'avx512' || env.HAS_AVX512F == '1') }} # not all machines have native AVX-512 run: | cd build ctest -C Release --verbose --timeout 900 + - name: Test (Intel SDE) + id: cmake_test_sde + if: ${{ matrix.build == 'avx512' && env.HAS_AVX512F == '0' }} # use Intel SDE for AVX-512 emulation + run: | + curl.exe -o $env:RUNNER_TEMP/sde.tar.xz -L "https://downloadmirror.intel.com/777395/sde-external-${env:SDE_VERSION}-win.tar.xz" + # for some weird reason windows tar doesn't like sde tar.xz + 7z x "-o${env:RUNNER_TEMP}" $env:RUNNER_TEMP/sde.tar.xz + 7z x "-o${env:RUNNER_TEMP}" $env:RUNNER_TEMP/sde.tar + $sde = $(join-path $env:RUNNER_TEMP sde-external-${env:SDE_VERSION}-win/sde.exe) + cd build + & $sde -future -- ctest -C Release --verbose --timeout 900 + - name: Determine tag name id: tag shell: bash From c41ea36eaa3548776de4cb3d5d49b925cd3fc0f2 Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Sun, 5 Nov 2023 08:03:09 +0000 Subject: [PATCH 087/859] cmake : MSVC instruction detection (fixed up #809) (#3923) * Add detection code for avx * Only check hardware when option is ON * Modify per code review sugguestions * Build locally will detect CPU * Fixes CMake style to use lowercase like everywhere else * cleanup * fix merge * linux/gcc version for testing * msvc combines avx2 and fma into /arch:AVX2 so check for both * cleanup * msvc only version * style * Update FindSIMD.cmake --------- Co-authored-by: Howard Su Co-authored-by: Jeremy Dunn --- CMakeLists.txt | 8 +++- cmake/FindSIMD.cmake | 100 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 106 insertions(+), 2 deletions(-) create mode 100644 cmake/FindSIMD.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt index 3c49d645c..7b4eb1840 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,7 +10,7 @@ endif() set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) -if(CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR) +if (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR) set(LLAMA_STANDALONE ON) # configure project version @@ -44,7 +44,7 @@ endif() # general option(LLAMA_STATIC "llama: static link libraries" OFF) -option(LLAMA_NATIVE "llama: enable -march=native flag" OFF) +option(LLAMA_NATIVE "llama: enable -march=native flag" ON) option(LLAMA_LTO "llama: enable link time optimization" OFF) # debug @@ -510,6 +510,10 @@ if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATC elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "^(x86_64|i686|amd64|x64)$" ) message(STATUS "x86 detected") if (MSVC) + # instruction set detection for MSVC only + if (LLAMA_NATIVE) + include(cmake/FindSIMD.cmake) + endif () if (LLAMA_AVX512) add_compile_options($<$:/arch:AVX512>) add_compile_options($<$:/arch:AVX512>) diff --git a/cmake/FindSIMD.cmake b/cmake/FindSIMD.cmake new file mode 100644 index 000000000..33377ec44 --- /dev/null +++ b/cmake/FindSIMD.cmake @@ -0,0 +1,100 @@ +include(CheckCSourceRuns) + +set(AVX_CODE " + #include + int main() + { + __m256 a; + a = _mm256_set1_ps(0); + return 0; + } +") + +set(AVX512_CODE " + #include + int main() + { + __m512i a = _mm512_set_epi8(0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0); + __m512i b = a; + __mmask64 equality_mask = _mm512_cmp_epi8_mask(a, b, _MM_CMPINT_EQ); + return 0; + } +") + +set(AVX2_CODE " + #include + int main() + { + __m256i a = {0}; + a = _mm256_abs_epi16(a); + __m256i x; + _mm256_extract_epi64(x, 0); // we rely on this in our AVX2 code + return 0; + } +") + +set(FMA_CODE " + #include + int main() + { + __m256 acc = _mm256_setzero_ps(); + const __m256 d = _mm256_setzero_ps(); + const __m256 p = _mm256_setzero_ps(); + acc = _mm256_fmadd_ps( d, p, acc ); + return 0; + } +") + +macro(check_sse type flags) + set(__FLAG_I 1) + set(CMAKE_REQUIRED_FLAGS_SAVE ${CMAKE_REQUIRED_FLAGS}) + foreach (__FLAG ${flags}) + if (NOT ${type}_FOUND) + set(CMAKE_REQUIRED_FLAGS ${__FLAG}) + check_c_source_runs("${${type}_CODE}" HAS_${type}_${__FLAG_I}) + if (HAS_${type}_${__FLAG_I}) + set(${type}_FOUND TRUE CACHE BOOL "${type} support") + set(${type}_FLAGS "${__FLAG}" CACHE STRING "${type} flags") + endif() + math(EXPR __FLAG_I "${__FLAG_I}+1") + endif() + endforeach() + set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_SAVE}) + + if (NOT ${type}_FOUND) + set(${type}_FOUND FALSE CACHE BOOL "${type} support") + set(${type}_FLAGS "" CACHE STRING "${type} flags") + endif() + + mark_as_advanced(${type}_FOUND ${type}_FLAGS) +endmacro() + +# flags are for MSVC only! +check_sse("AVX" " ;/arch:AVX") +if (NOT ${AVX_FOUND}) + set(LLAMA_AVX OFF) +else() + set(LLAMA_AVX ON) +endif() + +check_sse("AVX2" " ;/arch:AVX2") +check_sse("FMA" " ;/arch:AVX2") +if ((NOT ${AVX2_FOUND}) OR (NOT ${FMA_FOUND})) + set(LLAMA_AVX2 OFF) +else() + set(LLAMA_AVX2 ON) +endif() + +check_sse("AVX512" " ;/arch:AVX512") +if (NOT ${AVX512_FOUND}) + set(LLAMA_AVX512 OFF) +else() + set(LLAMA_AVX512 ON) +endif() From 3d48f42efcd05381221654376e9f6f69d76af739 Mon Sep 17 00:00:00 2001 From: Meng Zhang Date: Sun, 5 Nov 2023 04:40:08 -0800 Subject: [PATCH 088/859] llama : mark LLM_ARCH_STARCODER as full offload supported (#3945) as done in https://github.com/ggerganov/llama.cpp/pull/3827 --- llama.cpp | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/llama.cpp b/llama.cpp index cc0211ceb..e16539000 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5164,11 +5164,12 @@ static int llama_decode_internal( // If all tensors can be run on the GPU then using more than 1 thread is detrimental. const bool full_offload_supported = - model.arch == LLM_ARCH_LLAMA || - model.arch == LLM_ARCH_BAICHUAN || - model.arch == LLM_ARCH_FALCON || - model.arch == LLM_ARCH_REFACT || - model.arch == LLM_ARCH_MPT; + model.arch == LLM_ARCH_LLAMA || + model.arch == LLM_ARCH_BAICHUAN || + model.arch == LLM_ARCH_FALCON || + model.arch == LLM_ARCH_REFACT || + model.arch == LLM_ARCH_MPT || + model.arch == LLM_ARCH_STARCODER; const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 3; if (ggml_cpu_has_cublas() && full_offload_supported && fully_offloaded) { From 132d25b8a62ea084447e0014a0112c1b371fb3f8 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 5 Nov 2023 10:08:57 -0500 Subject: [PATCH 089/859] cuda : fix disabling device with --tensor-split 1,0 (#3951) Co-authored-by: slaren --- ggml-cuda.cu | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index dc14f2f5d..9f873035a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6893,6 +6893,8 @@ static void ggml_cuda_op_mul_mat( int64_t row_low[GGML_CUDA_MAX_DEVICES]; int64_t row_high[GGML_CUDA_MAX_DEVICES]; + int used_devices = 0; + for (int64_t id = 0; id < g_device_count; ++id) { // by default, use all rows row_low[id] = 0; @@ -6920,6 +6922,8 @@ static void ggml_cuda_op_mul_mat( continue; } + used_devices++; + const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; @@ -6958,12 +6962,12 @@ static void ggml_cuda_op_mul_mat( // if multiple devices are used they need to wait for the main device // here an event is recorded that signals that the main device has finished calculating the input data - if (split && g_device_count > 1) { + if (split && used_devices > 1) { CUDA_CHECK(ggml_cuda_set_device(g_main_device)); CUDA_CHECK(cudaEventRecord(src0_extra->events[g_main_device][0], g_cudaStreams[g_main_device][0])); } - const int64_t src1_col_stride = split && g_device_count > 1 ? MUL_MAT_SRC1_COL_STRIDE : ne11; + const int64_t src1_col_stride = split && used_devices > 1 ? MUL_MAT_SRC1_COL_STRIDE : ne11; for (int64_t src1_col_0 = 0; src1_col_0 < ne11; src1_col_0 += src1_col_stride) { const int64_t is = split ? (src1_col_0/src1_col_stride) % MAX_STREAMS : 0; const int64_t src1_ncols = src1_col_0 + src1_col_stride > ne11 ? ne11 - src1_col_0 : src1_col_stride; @@ -7079,6 +7083,9 @@ static void ggml_cuda_op_mul_mat( } for (int64_t id = 0; id < g_device_count; ++id) { + if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { + continue; + } CUDA_CHECK(ggml_cuda_set_device(id)); // free buffers again when done @@ -7103,6 +7110,9 @@ static void ggml_cuda_op_mul_mat( CUDA_CHECK(ggml_cuda_set_device(g_main_device)); for (int64_t id = 0; id < g_device_count; ++id) { + if (row_low[id] == row_high[id]) { + continue; + } for (int64_t is = 0; is < is_max; ++is) { CUDA_CHECK(cudaStreamWaitEvent(g_cudaStreams[g_main_device][0], src0_extra->events[id][is], 0)); } @@ -7400,7 +7410,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const bool all_on_device = - (src0->backend == GGML_BACKEND_GPU) && + (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && (src1->backend == GGML_BACKEND_GPU) && ( dst->backend == GGML_BACKEND_GPU); From bb60fd0bf6bb270744d86dd45b3a95af01b7de45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A1i=20Ho=C3=A0ng=20T=C3=A2m?= <75922889+RoyalHeart@users.noreply.github.com> Date: Sun, 5 Nov 2023 23:15:27 +0700 Subject: [PATCH 090/859] server : fix typo for --alias shortcut from -m to -a (#3958) --- examples/server/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/README.md b/examples/server/README.md index 715007735..089ebe2d1 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -7,7 +7,7 @@ Command line options: - `--threads N`, `-t N`: Set the number of threads to use during generation. - `-tb N, --threads-batch N`: Set the number of threads to use during batch and prompt processing. If not specified, the number of threads will be set to the number of threads used for generation. - `-m FNAME`, `--model FNAME`: Specify the path to the LLaMA model file (e.g., `models/7B/ggml-model.gguf`). -- `-m ALIAS`, `--alias ALIAS`: Set an alias for the model. The alias will be returned in API responses. +- `-a ALIAS`, `--alias ALIAS`: Set an alias for the model. The alias will be returned in API responses. - `-c N`, `--ctx-size N`: Set the size of the prompt context. The default is 512, but LLaMA models were built with a context of 2048, which will provide better results for longer input/inference. The size may differ in other models, for example, baichuan models were build with a context of 4096. - `-ngl N`, `--n-gpu-layers N`: When compiled with appropriate support (currently CLBlast or cuBLAS), this option allows offloading some layers to the GPU for computation. Generally results in increased performance. - `-mg i, --main-gpu i`: When using multiple GPUs this option controls which GPU is used for small tensors for which the overhead of splitting the computation across all GPUs is not worthwhile. The GPU in question will use slightly more VRAM to store a scratch buffer for temporary results. By default GPU 0 is used. Requires cuBLAS. From d9ccce2e339ca0396560d18b8637f2c848d72a08 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Sun, 5 Nov 2023 10:06:06 -0700 Subject: [PATCH 091/859] Allow common process_escapes to handle \x sequences (#3928) * Allow common process_escapes to handle \x sequences * Fix edge case when second hex digit is NUL --- common/common.cpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index 20cc4a081..37e3ace8a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -90,6 +90,19 @@ void process_escapes(std::string& input) { case '\'': input[output_idx++] = '\''; break; case '\"': input[output_idx++] = '\"'; break; case '\\': input[output_idx++] = '\\'; break; + case 'x': + // Handle \x12, etc + if (input_idx + 2 < input_len) { + const char x[3] = { input[input_idx + 1], input[input_idx + 2], 0 }; + char *err_p = nullptr; + const long val = std::strtol(x, &err_p, 16); + if (err_p == x + 2) { + input_idx += 2; + input[output_idx++] = char(val); + break; + } + // Intentionally fall through to default. + } default: input[output_idx++] = '\\'; input[output_idx++] = input[input_idx]; break; } From 2833a6f63c1b87c7f4ac574bcf7a15a2f3bf3ede Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 5 Nov 2023 18:45:16 +0100 Subject: [PATCH 092/859] ggml-cuda : fix f16 mul mat (#3961) * ggml-cuda : fix f16 mul mat ggml-ci * silence common.cpp warning (bonus) --- common/common.cpp | 2 +- ggml-cuda.cu | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 37e3ace8a..6a7114200 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -101,8 +101,8 @@ void process_escapes(std::string& input) { input[output_idx++] = char(val); break; } - // Intentionally fall through to default. } + // fall through default: input[output_idx++] = '\\'; input[output_idx++] = input[input_idx]; break; } diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9f873035a..2d9ffffbf 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7414,6 +7414,8 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 (src1->backend == GGML_BACKEND_GPU) && ( dst->backend == GGML_BACKEND_GPU); + const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + int64_t min_compute_capability = INT_MAX; for (int64_t id = 0; id < g_device_count; ++id) { if (min_compute_capability > g_compute_capabilities[id] && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { @@ -7435,13 +7437,13 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 //printf("src0 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src0), ggml_is_transposed(src0), ggml_type_name(src0->type), src0->name); //printf("src1 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); - if (all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { + if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { // KQ single-batch ggml_cuda_mul_mat_vec_p021(src0, src1, dst); - } else if (all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { + } else if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { From 381efbf480959bb6d1e247a8b0c2328f22e350f8 Mon Sep 17 00:00:00 2001 From: Damian Stewart Date: Mon, 6 Nov 2023 22:36:23 +0100 Subject: [PATCH 093/859] llava : expose as a shared library for downstream projects (#3613) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * wip llava python bindings compatibility * add external llava API * add base64 in-prompt image support * wip refactor image loading * refactor image load out of llava init * cleanup * further cleanup; move llava-cli into its own file and rename * move base64.hpp into common/ * collapse clip and llava libraries * move llava into its own subdir * wip * fix bug where base64 string was not removed from the prompt * get libllava to output in the right place * expose llava methods in libllama.dylib * cleanup memory usage around clip_image_* * cleanup and refactor *again* * update headerdoc * build with cmake, not tested (WIP) * Editorconfig * Editorconfig * Build with make * Build with make * Fix cyclical depts on Windows * attempt to fix build on Windows * attempt to fix build on Windows * Upd TODOs * attempt to fix build on Windows+CUDA * Revert changes in cmake * Fix according to review comments * Support building as a shared library * address review comments --------- Co-authored-by: M. Yusuf Sarıgöz Co-authored-by: Jared Van Bortel --- .gitignore | 2 +- Makefile | 7 +- common/CMakeLists.txt | 1 + common/base64.hpp | 392 +++++++++++++++++++++++++++++++++ examples/llava/CMakeLists.txt | 46 +++- examples/llava/README.md | 7 +- examples/llava/clip.cpp | 86 +++++--- examples/llava/clip.h | 41 +++- examples/llava/llava-cli.cpp | 315 ++++++++++++++++++++++++++ examples/llava/llava-utils.h | 147 ------------- examples/llava/llava.cpp | 280 ++++++++++++----------- examples/llava/llava.h | 50 +++++ examples/server/CMakeLists.txt | 2 +- 13 files changed, 1022 insertions(+), 354 deletions(-) create mode 100644 common/base64.hpp create mode 100644 examples/llava/llava-cli.cpp delete mode 100644 examples/llava/llava-utils.h create mode 100644 examples/llava/llava.h diff --git a/.gitignore b/.gitignore index 50cbd0b47..708e8582e 100644 --- a/.gitignore +++ b/.gitignore @@ -46,7 +46,7 @@ models-mnt /infill /libllama.so /llama-bench -/llava +/llava-cli /main /metal /perplexity diff --git a/Makefile b/Makefile index 300c1e6c7..f2d4fd031 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ # Define the default target now so that it is always the first target BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ - simple batched batched-bench save-load-state server gguf llama-bench llava baby-llama beam-search \ + simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ speculative infill benchmark-matmult parallel finetune export-lora tests/test-c.o # Binaries only useful for tests @@ -617,7 +617,10 @@ convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggm llama-bench: examples/llama-bench/llama-bench.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -llava: examples/llava/llava.cpp examples/llava/llava-utils.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) +libllava.a: examples/llava/llava.cpp examples/llava/llava.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h common/base64.hpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -static -fPIC -c $< -o $@ $(LDFLAGS) -Wno-cast-qual + +llava-cli: examples/llava/llava-cli.cpp examples/llava/clip.h examples/llava/clip.cpp examples/llava/llava.h examples/llava/llava.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -Wno-cast-qual baby-llama: examples/baby-llama/baby-llama.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index ac594b2ca..4f930bdc5 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -41,6 +41,7 @@ endif() set(TARGET common) add_library(${TARGET} STATIC + base64.hpp common.h common.cpp sampling.h diff --git a/common/base64.hpp b/common/base64.hpp new file mode 100644 index 000000000..563247a6e --- /dev/null +++ b/common/base64.hpp @@ -0,0 +1,392 @@ +/* +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to +*/ + +#ifndef PUBLIC_DOMAIN_BASE64_HPP_ +#define PUBLIC_DOMAIN_BASE64_HPP_ + +#include +#include +#include +#include + +class base64_error : public std::runtime_error +{ +public: + using std::runtime_error::runtime_error; +}; + +class base64 +{ +public: + enum class alphabet + { + /** the alphabet is detected automatically */ + auto_, + /** the standard base64 alphabet is used */ + standard, + /** like `standard` except that the characters `+` and `/` are replaced by `-` and `_` respectively*/ + url_filename_safe + }; + + enum class decoding_behavior + { + /** if the input is not padded, the remaining bits are ignored */ + moderate, + /** if a padding character is encounter decoding is finished */ + loose + }; + + /** + Encodes all the elements from `in_begin` to `in_end` to `out`. + + @warning The source and destination cannot overlap. The destination must be able to hold at least + `required_encode_size(std::distance(in_begin, in_end))`, otherwise the behavior depends on the output iterator. + + @tparam Input_iterator the source; the returned elements are cast to `std::uint8_t` and should not be greater than + 8 bits + @tparam Output_iterator the destination; the elements written to it are from the type `char` + @param in_begin the beginning of the source + @param in_end the ending of the source + @param out the destination iterator + @param alphabet which alphabet should be used + @returns the iterator to the next element past the last element copied + @throws see `Input_iterator` and `Output_iterator` + */ + template + static Output_iterator encode(Input_iterator in_begin, Input_iterator in_end, Output_iterator out, + alphabet alphabet = alphabet::standard) + { + constexpr auto pad = '='; + const char* alpha = alphabet == alphabet::url_filename_safe + ? "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_" + : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; + + while (in_begin != in_end) { + std::uint8_t i0 = 0, i1 = 0, i2 = 0; + + // first character + i0 = static_cast(*in_begin); + ++in_begin; + + *out = alpha[i0 >> 2 & 0x3f]; + ++out; + + // part of first character and second + if (in_begin != in_end) { + i1 = static_cast(*in_begin); + ++in_begin; + + *out = alpha[((i0 & 0x3) << 4) | (i1 >> 4 & 0x0f)]; + ++out; + } else { + *out = alpha[(i0 & 0x3) << 4]; + ++out; + + // last padding + *out = pad; + ++out; + + // last padding + *out = pad; + ++out; + + break; + } + + // part of second character and third + if (in_begin != in_end) { + i2 = static_cast(*in_begin); + ++in_begin; + + *out = alpha[((i1 & 0xf) << 2) | (i2 >> 6 & 0x03)]; + ++out; + } else { + *out = alpha[(i1 & 0xf) << 2]; + ++out; + + // last padding + *out = pad; + ++out; + + break; + } + + // rest of third + *out = alpha[i2 & 0x3f]; + ++out; + } + + return out; + } + /** + Encodes a string. + + @param str the string that should be encoded + @param alphabet which alphabet should be used + @returns the encoded base64 string + @throws see base64::encode() + */ + static std::string encode(const std::string& str, alphabet alphabet = alphabet::standard) + { + std::string result; + + result.reserve(required_encode_size(str.length()) + 1); + + encode(str.begin(), str.end(), std::back_inserter(result), alphabet); + + return result; + } + /** + Encodes a char array. + + @param buffer the char array + @param size the size of the array + @param alphabet which alphabet should be used + @returns the encoded string + */ + static std::string encode(const char* buffer, std::size_t size, alphabet alphabet = alphabet::standard) + { + std::string result; + + result.reserve(required_encode_size(size) + 1); + + encode(buffer, buffer + size, std::back_inserter(result), alphabet); + + return result; + } + /** + Decodes all the elements from `in_begin` to `in_end` to `out`. `in_begin` may point to the same location as `out`, + in other words: inplace decoding is possible. + + @warning The destination must be able to hold at least `required_decode_size(std::distance(in_begin, in_end))`, + otherwise the behavior depends on the output iterator. + + @tparam Input_iterator the source; the returned elements are cast to `char` + @tparam Output_iterator the destination; the elements written to it are from the type `std::uint8_t` + @param in_begin the beginning of the source + @param in_end the ending of the source + @param out the destination iterator + @param alphabet which alphabet should be used + @param behavior the behavior when an error was detected + @returns the iterator to the next element past the last element copied + @throws base64_error depending on the set behavior + @throws see `Input_iterator` and `Output_iterator` + */ + template + static Output_iterator decode(Input_iterator in_begin, Input_iterator in_end, Output_iterator out, + alphabet alphabet = alphabet::auto_, + decoding_behavior behavior = decoding_behavior::moderate) + { + //constexpr auto pad = '='; + std::uint8_t last = 0; + auto bits = 0; + + while (in_begin != in_end) { + auto c = *in_begin; + ++in_begin; + + if (c == '=') { + break; + } + + auto part = _base64_value(alphabet, c); + + // enough bits for one byte + if (bits + 6 >= 8) { + *out = (last << (8 - bits)) | (part >> (bits - 2)); + ++out; + + bits -= 2; + } else { + bits += 6; + } + + last = part; + } + + // check padding + if (behavior != decoding_behavior::loose) { + while (in_begin != in_end) { + auto c = *in_begin; + ++in_begin; + + if (c != '=') { + throw base64_error("invalid base64 character."); + } + } + } + + return out; + } + /** + Decodes a string. + + @param str the base64 encoded string + @param alphabet which alphabet should be used + @param behavior the behavior when an error was detected + @returns the decoded string + @throws see base64::decode() + */ + static std::string decode(const std::string& str, alphabet alphabet = alphabet::auto_, + decoding_behavior behavior = decoding_behavior::moderate) + { + std::string result; + + result.reserve(max_decode_size(str.length())); + + decode(str.begin(), str.end(), std::back_inserter(result), alphabet, behavior); + + return result; + } + /** + Decodes a string. + + @param buffer the base64 encoded buffer + @param size the size of the buffer + @param alphabet which alphabet should be used + @param behavior the behavior when an error was detected + @returns the decoded string + @throws see base64::decode() + */ + static std::string decode(const char* buffer, std::size_t size, alphabet alphabet = alphabet::auto_, + decoding_behavior behavior = decoding_behavior::moderate) + { + std::string result; + + result.reserve(max_decode_size(size)); + + decode(buffer, buffer + size, std::back_inserter(result), alphabet, behavior); + + return result; + } + /** + Decodes a string inplace. + + @param[in,out] str the base64 encoded string + @param alphabet which alphabet should be used + @param behavior the behavior when an error was detected + @throws base64::decode_inplace() + */ + static void decode_inplace(std::string& str, alphabet alphabet = alphabet::auto_, + decoding_behavior behavior = decoding_behavior::moderate) + { + str.resize(decode(str.begin(), str.end(), str.begin(), alphabet, behavior) - str.begin()); + } + /** + Decodes a char array inplace. + + @param[in,out] str the string array + @param size the length of the array + @param alphabet which alphabet should be used + @param behavior the behavior when an error was detected + @returns the pointer to the next element past the last element decoded + @throws base64::decode_inplace() + */ + static char* decode_inplace(char* str, std::size_t size, alphabet alphabet = alphabet::auto_, + decoding_behavior behavior = decoding_behavior::moderate) + { + return decode(str, str + size, str, alphabet, behavior); + } + /** + Returns the required decoding size for a given size. The value is calculated with the following formula: + + $$ + \lceil \frac{size}{4} \rceil \cdot 3 + $$ + + @param size the size of the encoded input + @returns the size of the resulting decoded buffer; this the absolute maximum + */ + static std::size_t max_decode_size(std::size_t size) noexcept + { + return (size / 4 + (size % 4 ? 1 : 0)) * 3; + } + /** + Returns the required encoding size for a given size. The value is calculated with the following formula: + + $$ + \lceil \frac{size}{3} \rceil \cdot 4 + $$ + + @param size the size of the decoded input + @returns the size of the resulting encoded buffer + */ + static std::size_t required_encode_size(std::size_t size) noexcept + { + return (size / 3 + (size % 3 ? 1 : 0)) * 4; + } + +private: + static std::uint8_t _base64_value(alphabet& alphabet, char c) + { + if (c >= 'A' && c <= 'Z') { + return c - 'A'; + } else if (c >= 'a' && c <= 'z') { + return c - 'a' + 26; + } else if (c >= '0' && c <= '9') { + return c - '0' + 52; + } + + // comes down to alphabet + if (alphabet == alphabet::standard) { + if (c == '+') { + return 62; + } else if (c == '/') { + return 63; + } + } else if (alphabet == alphabet::url_filename_safe) { + if (c == '-') { + return 62; + } else if (c == '_') { + return 63; + } + } // auto detect + else { + if (c == '+') { + alphabet = alphabet::standard; + + return 62; + } else if (c == '/') { + alphabet = alphabet::standard; + + return 63; + } else if (c == '-') { + alphabet = alphabet::url_filename_safe; + + return 62; + } else if (c == '_') { + alphabet = alphabet::url_filename_safe; + + return 63; + } + } + + throw base64_error("invalid base64 character."); + } +}; + +#endif // !PUBLIC_DOMAIN_BASE64_HPP_ diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 03d32c26e..8ea3e5c83 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -1,14 +1,36 @@ -set(TARGET clip) -add_library(${TARGET} clip.cpp clip.h) -install(TARGETS ${TARGET} LIBRARY) -target_link_libraries(${TARGET} PRIVATE common ggml ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) -if (NOT MSVC) - target_compile_options(${TARGET} PRIVATE -Wno-cast-qual) # stb_image.h +add_library(llava OBJECT + llava.cpp + llava.h + clip.cpp + clip.h + ) + +target_link_libraries(llava PRIVATE ggml llama ${CMAKE_THREAD_LIBS_INIT}) + +target_include_directories(llava PUBLIC .) +target_include_directories(llava PUBLIC ../..) +target_include_directories(llava PUBLIC ../../common) + +target_compile_features(llava PRIVATE cxx_std_11) + +add_library(llava_static STATIC $) +if (BUILD_SHARED_LIBS) + set_target_properties(llava PROPERTIES POSITION_INDEPENDENT_CODE ON) + target_compile_definitions(llava PRIVATE LLAMA_SHARED LLAMA_BUILD) + add_library(llava_shared SHARED $) + target_link_libraries(llava_shared PRIVATE ggml llama ${CMAKE_THREAD_LIBS_INIT}) + install(TARGETS llava_shared LIBRARY) endif() -set(TARGET llava) -add_executable(${TARGET} llava.cpp) -install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE common llama clip ${CMAKE_THREAD_LIBS_INIT}) -target_compile_features(${TARGET} PRIVATE cxx_std_11) +if (NOT MSVC) + target_compile_options(llava PRIVATE -Wno-cast-qual) # stb_image.h + endif() +if(TARGET BUILD_INFO) + add_dependencies(llava BUILD_INFO) +endif() + +set(TARGET llava-cli) +add_executable(llava-cli llava-cli.cpp) +install(TARGETS llava-cli RUNTIME) +target_link_libraries(llava-cli PRIVATE common llama llava ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(llava PRIVATE cxx_std_11) diff --git a/examples/llava/README.md b/examples/llava/README.md index fc3446b60..323c5fdd0 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -9,12 +9,12 @@ models are available. After API is confirmed, more models will be supported / uploaded. ## Usage -Build with cmake or run `make llava` to build it. +Build with cmake or run `make llava-cli` to build it. -After building, run: `./llava` to see the usage. For example: +After building, run: `./llava-cli` to see the usage. For example: ```sh -./llava -m llava-v1.5-7b/ggml-model-q5_k.gguf --mmproj llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg +./llava-cli -m llava-v1.5-7b/ggml-model-q5_k.gguf --mmproj llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg ``` **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. @@ -51,7 +51,6 @@ Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` director ## TODO -- [ ] Support server mode. - [ ] Support non-CPU backend for the image encoding part. - [ ] Support different sampling methods. - [ ] Support more model variants. diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 61932e659..3c909c7d3 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -680,26 +680,44 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return new_clip; } -clip_image_u8 * make_clip_image_u8() { return new clip_image_u8(); } - +clip_image_u8 * make_clip_image_u8() { + auto img = new clip_image_u8(); + return img; +} clip_image_f32 * make_clip_image_f32() { return new clip_image_f32(); } -bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { - int nx, ny, nc; - auto data = stbi_load(fname, &nx, &ny, &nc, 3); - if (!data) { - fprintf(stderr, "%s: failed to load '%s'\n", __func__, fname); - return false; - } +void clip_image_u8_free(clip_image_u8 * img) { if (img->data) { delete[] img->data; } delete img; } +void clip_image_f32_free(clip_image_f32 * img) { if (img->data) { delete[] img->data; } delete img; } +static void build_clip_img_from_data(const stbi_uc * data, int nx, int ny, clip_image_u8 * img) { img->nx = nx; img->ny = ny; img->size = nx * ny * 3; img->data = new uint8_t[img->size](); memcpy(img->data, data, img->size); +} +bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { + int nx, ny, nc; + auto data = stbi_load(fname, &nx, &ny, &nc, 3); + if (!data) { + fprintf(stderr, "%s: failed to load image '%s'\n", __func__, fname); + return false; + } + build_clip_img_from_data(data, nx, ny, img); stbi_image_free(data); + return true; +} +bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img) { + int nx, ny, nc; + auto data = stbi_load_from_memory(bytes, bytes_length, &nx, &ny, &nc, 3); + if (!data) { + fprintf(stderr, "%s: failed to decode image bytes\n", __func__); + return false; + } + build_clip_img_from_data(data, nx, ny, img); + stbi_image_free(data); return true; } @@ -714,39 +732,40 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip // the logic below is to pad the shorter side to the longer side with a background color: rgb(122, 116, 104) // see https://github.com/haotian-liu/LLaVA/blob/e854a2bf85118c504f6f16bf5c3c7c92f8fa8c6b/llava/conversation.py#L113-L156 - clip_image_u8 temp; // we will keep the input image data here temporarily + clip_image_u8 * temp = make_clip_image_u8(); // we will keep the input image data here temporarily if (pad2square && img->nx != img->ny) { int longer_side = std::max(img->nx, img->ny); - temp.nx = longer_side; - temp.ny = longer_side; - temp.size = 3 * longer_side * longer_side; - temp.data = new uint8_t[temp.size](); + temp->nx = longer_side; + temp->ny = longer_side; + temp->size = 3 * longer_side * longer_side; + temp->data = new uint8_t[temp->size](); uint8_t bc[3] = {122, 116, 104}; // bakground color in RGB from LLaVA // fill with background color - for (size_t i = 0; i < temp.size; i++) { - temp.data[i] = bc[i % 3]; + for (size_t i = 0; i < temp->size; i++) { + temp->data[i] = bc[i % 3]; } // copy from the input image for (int y = 0; y < img->ny; y++) { for (int x = 0; x < img->nx; x++) { const int i = 3 * (y * img->nx + x); - const int j = 3 * (y * temp.nx + x); - temp.data[j] = img->data[i]; - temp.data[j+1] = img->data[i+1]; - temp.data[j+2] = img->data[i+2]; + const int j = 3 * (y * temp->nx + x); + temp->data[j] = img->data[i]; + temp->data[j+1] = img->data[i+1]; + temp->data[j+2] = img->data[i+2]; } } } else { - temp.nx = img->nx; - temp.ny = img->ny; - temp.size = img->size; - temp.data = img->data; + temp->nx = img->nx; + temp->ny = img->ny; + temp->size = img->size; + temp->data = new uint8_t[temp->size](); + *temp->data = *img->data; // copy } - const int nx = temp.nx; - const int ny = temp.ny; + const int nx = temp->nx; + const int ny = temp->ny; const int nx2 = ctx->vision_model.hparams.image_size; const int ny2 = ctx->vision_model.hparams.image_size; @@ -785,10 +804,10 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip const int j10 = 3 * (y1 * nx + x0) + c; const int j11 = 3 * (y1 * nx + x1) + c; - const float v00 = temp.data[j00]; - const float v01 = temp.data[j01]; - const float v10 = temp.data[j10]; - const float v11 = temp.data[j11]; + const float v00 = temp->data[j00]; + const float v01 = temp->data[j01]; + const float v10 = temp->data[j10]; + const float v11 = temp->data[j11]; const float v0 = v00 * (1.0f - dx) + v01 * dx; const float v1 = v10 * (1.0f - dx) + v11 * dx; @@ -803,6 +822,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip } } } + clip_image_u8_free(temp); return true; } @@ -1049,16 +1069,16 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i return true; } -int clip_n_mmproj_embd(struct clip_ctx * ctx) { +int clip_n_mmproj_embd(const struct clip_ctx * ctx) { return ctx->vision_model.mm_2_b->ne[0]; } -int clip_n_patches(struct clip_ctx * ctx) { +int clip_n_patches(const struct clip_ctx * ctx) { auto & params = ctx->vision_model.hparams; return (params.image_size / params.patch_size) * (params.image_size / params.patch_size); } -size_t clip_embd_nbytes(struct clip_ctx * ctx) { +size_t clip_embd_nbytes(const struct clip_ctx * ctx) { return clip_n_patches(ctx) * clip_n_mmproj_embd(ctx) * sizeof(float); } diff --git a/examples/llava/clip.h b/examples/llava/clip.h index 3d7261e29..f11df85de 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -1,7 +1,22 @@ #ifndef CLIP_H #define CLIP_H -#include "ggml.h" +#include +#include + +#ifdef LLAMA_SHARED +# if defined(_WIN32) && !defined(__MINGW32__) +# ifdef LLAMA_BUILD +# define CLIP_API __declspec(dllexport) +# else +# define CLIP_API __declspec(dllimport) +# endif +# else +# define CLIP_API __attribute__ ((visibility ("default"))) +# endif +#else +# define CLIP_API +#endif struct clip_ctx; @@ -20,19 +35,20 @@ struct clip_vision_hparams { float eps; }; -struct clip_ctx * clip_model_load(const char * fname, const int verbosity); +/** load mmproj model */ +CLIP_API struct clip_ctx * clip_model_load(const char * fname, const int verbosity); +/** free mmproj model */ +CLIP_API void clip_free(struct clip_ctx * ctx); -void clip_free(struct clip_ctx * ctx); - -size_t clip_embd_nbytes(struct clip_ctx * ctx); -int clip_n_patches(struct clip_ctx * ctx); -int clip_n_mmproj_embd(struct clip_ctx * ctx); +size_t clip_embd_nbytes(const struct clip_ctx * ctx); +int clip_n_patches(const struct clip_ctx * ctx); +int clip_n_mmproj_embd(const struct clip_ctx * ctx); // RGB uint8 image struct clip_image_u8 { int nx; int ny; - uint8_t * data; + uint8_t * data = NULL; size_t size; }; @@ -41,7 +57,7 @@ struct clip_image_u8 { struct clip_image_f32 { int nx; int ny; - float * data; + float * data = NULL; size_t size; }; @@ -57,7 +73,12 @@ struct clip_image_f32_batch { struct clip_image_u8 * make_clip_image_u8(); struct clip_image_f32 * make_clip_image_f32(); -bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); +CLIP_API void clip_image_u8_free(clip_image_u8 * img); +CLIP_API void clip_image_f32_free(clip_image_f32 * img); +CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); +/** interpret bytes as an image file with length bytes_length, and use the result to populate img */ +CLIP_API bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img); + bool clip_image_preprocess(const struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, const bool pad2square); bool clip_image_encode(const struct clip_ctx * ctx, const int n_threads, struct clip_image_f32 * img, float * vec); diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp new file mode 100644 index 000000000..19374c67f --- /dev/null +++ b/examples/llava/llava-cli.cpp @@ -0,0 +1,315 @@ +#include "ggml.h" +#include "common.h" +#include "clip.h" +#include "llava.h" +#include "llama.h" + +#include "base64.hpp" + +#include +#include +#include + +static bool eval_tokens(struct llama_context * ctx_llama, std::vector tokens, int n_batch, int * n_past) { + int N = (int) tokens.size(); + for (int i = 0; i < N; i += n_batch) { + int n_eval = (int) tokens.size() - i; + if (n_eval > n_batch) { + n_eval = n_batch; + } + if (llama_decode(ctx_llama, llama_batch_get_one(&tokens[i], n_eval, *n_past, 0))) { + fprintf(stderr, "%s : failed to eval. token %d/%d (batch size %d, n_past %d)\n", __func__, i, N, n_batch, *n_past); + return false; + } + *n_past += n_eval; + } + return true; +} + +static bool eval_id(struct llama_context * ctx_llama, int id, int * n_past) { + std::vector tokens; + tokens.push_back(id); + return eval_tokens(ctx_llama, tokens, 1, n_past); +} + +static bool eval_string(struct llama_context * ctx_llama, const char* str, int n_batch, int * n_past, bool add_bos){ + std::string str2 = str; + std::vector embd_inp = ::llama_tokenize(ctx_llama, str2, add_bos); + eval_tokens(ctx_llama, embd_inp, n_batch, n_past); + return true; +} + +// TODO: use common/sampling.h +static llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { + auto & sparams = params.sparams; + + // out of user input, sample next token + const float temp = sparams.temp; + const int32_t top_k = sparams.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx_llama)) : sparams.top_k; + const float top_p = sparams.top_p; + const float tfs_z = sparams.tfs_z; + const float typical_p = sparams.typical_p; + // const int32_t repeat_last_n = sparams.repeat_last_n < 0 ? n_ctx : sparams.repeat_last_n; + // const float repeat_penalty = sparams.repeat_penalty; + // const float alpha_presence = sparams.presence_penalty; + // const float alpha_frequency = sparams.frequency_penalty; + const int mirostat = sparams.mirostat; + const float mirostat_tau = sparams.mirostat_tau; + const float mirostat_eta = sparams.mirostat_eta; + // const bool penalize_nl = sparams.penalize_nl; + + llama_token id = 0; + { + auto logits = llama_get_logits(ctx_llama); + auto n_vocab = llama_n_vocab(llama_get_model(ctx_llama)); + + // Apply params.logit_bias map + for (auto it = sparams.logit_bias.begin(); it != sparams.logit_bias.end(); it++) { + logits[it->first] += it->second; + } + + std::vector candidates; + candidates.reserve(n_vocab); + for (llama_token token_id = 0; token_id < n_vocab; token_id++) { + candidates.emplace_back(llama_token_data{token_id, logits[token_id], 0.0f}); + } + + llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; + + if (temp <= 0) { + // Greedy sampling + id = llama_sample_token_greedy(ctx_llama, &candidates_p); + } else { + if (mirostat == 1) { + static float mirostat_mu = 2.0f * mirostat_tau; + const int mirostat_m = 100; + llama_sample_temp(ctx_llama, &candidates_p, temp); + id = llama_sample_token_mirostat(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, mirostat_m, &mirostat_mu); + } else if (mirostat == 2) { + static float mirostat_mu = 2.0f * mirostat_tau; + llama_sample_temp(ctx_llama, &candidates_p, temp); + id = llama_sample_token_mirostat_v2(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, &mirostat_mu); + } else { + // Temperature sampling + llama_sample_top_k(ctx_llama, &candidates_p, top_k, 1); + llama_sample_tail_free(ctx_llama, &candidates_p, tfs_z, 1); + llama_sample_typical(ctx_llama, &candidates_p, typical_p, 1); + llama_sample_top_p(ctx_llama, &candidates_p, top_p, 1); + llama_sample_temp(ctx_llama, &candidates_p, temp); + id = llama_sample_token(ctx_llama, &candidates_p); + } + } + } + + return id; +} + +static const char * sample(struct llama_context * ctx_llama, gpt_params & params, int * n_past) { + int id = sample_id(ctx_llama, params); + static std::string ret; + if (id == llama_token_eos(llama_get_model(ctx_llama))) { + ret = ""; + } else { + ret = llama_token_to_piece(ctx_llama, id); + } + eval_id(ctx_llama, id, n_past); + return ret.c_str(); +} + +static const char* IMG_BASE64_TAG_BEGIN = ""; + +static void find_image_tag_in_prompt(const std::string& prompt, size_t& begin_out, size_t& end_out) { + begin_out = prompt.find(IMG_BASE64_TAG_BEGIN); + end_out = prompt.find(IMG_BASE64_TAG_END, (begin_out == std::string::npos) ? 0UL : begin_out); +} + +static bool prompt_contains_image(const std::string& prompt) { + size_t begin, end; + find_image_tag_in_prompt(prompt, begin, end); + return (begin != std::string::npos); +} + +// replaces the base64 image tag in the prompt with `replacement` +static llava_image_embed * llava_image_embed_make_with_prompt_base64(struct clip_ctx * ctx_clip, int n_threads, const std::string& prompt) { + size_t img_base64_str_start, img_base64_str_end; + find_image_tag_in_prompt(prompt, img_base64_str_start, img_base64_str_end); + if (img_base64_str_start == std::string::npos || img_base64_str_end == std::string::npos) { + fprintf(stderr, "%s: invalid base64 image tag. must be %s%s\n", __func__, IMG_BASE64_TAG_BEGIN, IMG_BASE64_TAG_END); + return NULL; + } + + auto base64_bytes_start = img_base64_str_start + strlen(IMG_BASE64_TAG_BEGIN); + auto base64_bytes_count = img_base64_str_end - base64_bytes_start; + auto base64_str = prompt.substr(base64_bytes_start, base64_bytes_count ); + + auto required_bytes = base64::required_encode_size(base64_str.size()); + auto img_bytes = std::vector(required_bytes); + base64::decode(base64_str.begin(), base64_str.end(), img_bytes.begin()); + + auto embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, img_bytes.data(), img_bytes.size()); + if (!embed) { + fprintf(stderr, "%s: could not load image from base64 string.\n", __func__); + return NULL; + } + + return embed; +} + +static std::string remove_image_from_prompt(const std::string& prompt, const char * replacement = "") { + size_t begin, end; + find_image_tag_in_prompt(prompt, begin, end); + if (begin == std::string::npos || end == std::string::npos) { + return prompt; + } + auto pre = prompt.substr(0, begin); + auto post = prompt.substr(end + strlen(IMG_BASE64_TAG_END)); + return pre + replacement + post; +} + +struct llava_context { + struct clip_ctx * ctx_clip = NULL; + struct llama_context * ctx_llama = NULL; + struct llama_model * model = NULL; +}; + +static void show_additional_info(int /*argc*/, char ** argv) { + printf("\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); + printf(" note: a lower temperature value like 0.1 is recommended for better quality.\n"); +} + +static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_params * params) { + + // load and preprocess the image + llava_image_embed * embed = NULL; + auto prompt = params->prompt; + if (prompt_contains_image(prompt)) { + if (!params->image.empty()) { + printf("using base64 encoded image instead of command line image path\n"); + } + embed = llava_image_embed_make_with_prompt_base64(ctx_llava->ctx_clip, params->n_threads, prompt); + if (!embed) { + fprintf(stderr, "%s: can't load image from prompt\n", __func__); + return NULL; + } + params->prompt = remove_image_from_prompt(prompt); + } else { + embed = llava_image_embed_make_with_filename(ctx_llava->ctx_clip, params->n_threads, params->image.c_str()); + if (!embed) { + fprintf(stderr, "%s: is %s really an image file?\n", __func__, params->image.c_str()); + return NULL; + } + } + + return embed; +} + +static void process_prompt(struct llava_context * ctx_llava, struct llava_image_embed * image_embed, gpt_params * params, const std::string & prompt) { + int n_past = 0; + + const int max_tgt_len = params->n_predict < 0 ? 256 : params->n_predict; + + // llava chat format is "\nUSER:\n\nASSISTANT:" + eval_string(ctx_llava->ctx_llama, "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\nUSER:", params->n_batch, &n_past, true); + llava_eval_image_embed(ctx_llava->ctx_llama, image_embed, params->n_batch, &n_past); + eval_string(ctx_llava->ctx_llama, (prompt + "\nASSISTANT:").c_str(), params->n_batch, &n_past, false); + + // generate the response + + printf("\n"); + + for (int i = 0; i < max_tgt_len; i++) { + const char * tmp = sample(ctx_llava->ctx_llama, *params, &n_past); + if (strcmp(tmp, "") == 0) break; + + printf("%s", tmp); + fflush(stdout); + } + + printf("\n"); +} + + +static struct llava_context * llava_init(gpt_params * params) { + const char * clip_path = params->mmproj.c_str(); + + auto prompt = params->prompt; + if (prompt.empty()) { + prompt = "describe the image in detail."; + } + + auto ctx_clip = clip_model_load(clip_path, /*verbosity=*/ 1); + + llama_backend_init(params->numa); + + llama_model_params model_params = llama_model_default_params(); + llama_model * model = llama_load_model_from_file(params->model.c_str(), model_params); + if (model == NULL) { + fprintf(stderr , "%s: error: unable to load model\n" , __func__); + return NULL; + } + + llama_context_params ctx_params = llama_context_default_params(); + + ctx_params.n_ctx = params->n_ctx < 2048 ? 2048 : params->n_ctx; // we need a longer context size to process image embeddings + ctx_params.n_threads = params->n_threads; + ctx_params.n_threads_batch = params->n_threads_batch == -1 ? params->n_threads : params->n_threads_batch; + + llama_context * ctx_llama = llama_new_context_with_model(model, ctx_params); + + if (ctx_llama == NULL) { + fprintf(stderr , "%s: error: failed to create the llama_context\n" , __func__); + return NULL; + } + + auto ctx_llava = (struct llava_context *)malloc(sizeof(llava_context)); + + ctx_llava->ctx_llama = ctx_llama; + ctx_llava->ctx_clip = ctx_clip; + ctx_llava->model = model; + return ctx_llava; +} + +static void llava_free(struct llava_context * ctx_llava) { + if (ctx_llava->ctx_clip) { + clip_free(ctx_llava->ctx_clip); + ctx_llava->ctx_clip = NULL; + } + + llama_free(ctx_llava->ctx_llama); + llama_free_model(ctx_llava->model); + llama_backend_free(); +} + +int main(int argc, char ** argv) { + ggml_time_init(); + + gpt_params params; + + if (!gpt_params_parse(argc, argv, params)) { + show_additional_info(argc, argv); + return 1; + } + if (params.mmproj.empty() || (params.image.empty() && !prompt_contains_image(params.prompt))) { + gpt_print_usage(argc, argv, params); + show_additional_info(argc, argv); + return 1; + } + + auto ctx_llava = llava_init(¶ms); + if (ctx_llava == NULL) { + fprintf(stderr, "%s: error: failed to init llava\n", __func__); + return 1; + } + + auto image_embed = load_image(ctx_llava, ¶ms); + + // process the prompt + process_prompt(ctx_llava, image_embed, ¶ms, params.prompt); + + llama_print_timings(ctx_llava->ctx_llama); + + llava_image_embed_free(image_embed); + llava_free(ctx_llava); + return 0; +} diff --git a/examples/llava/llava-utils.h b/examples/llava/llava-utils.h deleted file mode 100644 index 320c71967..000000000 --- a/examples/llava/llava-utils.h +++ /dev/null @@ -1,147 +0,0 @@ -#pragma once - -// this one and clip lib will be eventually merged to a single lib, let's keep it this way for now - -#include "common.h" -#include "llama.h" - -#include -#include -#include - -inline bool eval_image_embd(llama_context * ctx_llama, float * embd, int N, int n_batch, int * n_past) { - int n_embd = llama_n_embd(llama_get_model(ctx_llama)); - - for (int i = 0; i < N; i += n_batch) { - int n_eval = N - i; - if (n_eval > n_batch) { - n_eval = n_batch; - } - llama_batch batch = {int32_t(n_eval), nullptr, (embd+i*n_embd), nullptr, nullptr, nullptr, nullptr, *n_past, 1, 0, }; - if (llama_decode(ctx_llama, batch)) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return false; - } - *n_past += n_eval; - } - return true; -} - -inline bool eval_tokens(struct llama_context * ctx_llama, std::vector tokens, int n_batch, int * n_past) { - int N = (int) tokens.size(); - for (int i = 0; i < N; i += n_batch) { - int n_eval = (int) tokens.size() - i; - if (n_eval > n_batch) { - n_eval = n_batch; - } - if (llama_decode(ctx_llama, llama_batch_get_one(&tokens[i], n_eval, *n_past, 0))) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return false; - } - *n_past += n_eval; - } - return true; -} - -inline bool eval_id(struct llama_context * ctx_llama, int id, int * n_past) { - std::vector tokens; - tokens.push_back(id); - return eval_tokens(ctx_llama, tokens, 1, n_past); -} - -inline bool eval_string(struct llama_context * ctx_llama, const char* str, int n_batch, int * n_past, bool add_bos){ - std::string str2 = str; - std::vector embd_inp = ::llama_tokenize(ctx_llama, str2, add_bos); - eval_tokens(ctx_llama, embd_inp, n_batch, n_past); - return true; -} - -// TODO: use common/sampling.h -inline llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { - auto & sparams = params.sparams; - - // out of user input, sample next token - const float temp = sparams.temp; - const int32_t top_k = sparams.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx_llama)) : sparams.top_k; - const float top_p = sparams.top_p; - const float tfs_z = sparams.tfs_z; - const float typical_p = sparams.typical_p; - // const int32_t repeat_last_n = sparams.repeat_last_n < 0 ? n_ctx : sparams.repeat_last_n; - // const float repeat_penalty = sparams.repeat_penalty; - // const float alpha_presence = sparams.presence_penalty; - // const float alpha_frequency = sparams.frequency_penalty; - const int mirostat = sparams.mirostat; - const float mirostat_tau = sparams.mirostat_tau; - const float mirostat_eta = sparams.mirostat_eta; - // const bool penalize_nl = sparams.penalize_nl; - - llama_token id = 0; - { - auto logits = llama_get_logits(ctx_llama); - auto n_vocab = llama_n_vocab(llama_get_model(ctx_llama)); - - // Apply params.logit_bias map - for (auto it = sparams.logit_bias.begin(); it != sparams.logit_bias.end(); it++) { - logits[it->first] += it->second; - } - - std::vector candidates; - candidates.reserve(n_vocab); - for (llama_token token_id = 0; token_id < n_vocab; token_id++) { - candidates.emplace_back(llama_token_data{token_id, logits[token_id], 0.0f}); - } - - llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; - - // TODO: Apply penalties - // float nl_logit = logits[llama_token_nl(ctx)]; - // auto last_n_repeat = std::min(std::min((int)last_n_tokens.size(), repeat_last_n), n_ctx); - // llama_sample_repetition_penalty(ctx, &candidates_p, - // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, - // last_n_repeat, repeat_penalty); - // llama_sample_frequency_and_presence_penalties(ctx, &candidates_p, - // last_n_tokens.data() + last_n_tokens.size() - last_n_repeat, - // last_n_repeat, alpha_frequency, alpha_presence); - // if (!penalize_nl) { - // logits[llama_token_nl(ctx)] = nl_logit; - // } - - if (temp <= 0) { - // Greedy sampling - id = llama_sample_token_greedy(ctx_llama, &candidates_p); - } else { - if (mirostat == 1) { - static float mirostat_mu = 2.0f * mirostat_tau; - const int mirostat_m = 100; - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token_mirostat(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, mirostat_m, &mirostat_mu); - } else if (mirostat == 2) { - static float mirostat_mu = 2.0f * mirostat_tau; - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token_mirostat_v2(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, &mirostat_mu); - } else { - // Temperature sampling - llama_sample_top_k(ctx_llama, &candidates_p, top_k, 1); - llama_sample_tail_free(ctx_llama, &candidates_p, tfs_z, 1); - llama_sample_typical(ctx_llama, &candidates_p, typical_p, 1); - llama_sample_top_p(ctx_llama, &candidates_p, top_p, 1); - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token(ctx_llama, &candidates_p); - } - } - } - - return id; -} - -inline const char * sample(struct llama_context * ctx_llama, gpt_params & params, int * n_past) { - int id = sample_id(ctx_llama, params); - static std::string ret; - if (id == llama_token_eos(llama_get_model(ctx_llama))) { - ret = ""; - } else { - ret = llama_token_to_piece(ctx_llama, id); - } - eval_id(ctx_llama, id, n_past); - return ret.c_str(); -} diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index f0974d5bc..d10bcf2d2 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -1,164 +1,156 @@ #include "clip.h" -#include "llava-utils.h" #include "common.h" #include "llama.h" +#include "llava.h" #include #include #include -static void show_additional_info(int /*argc*/, char ** argv) { - printf("\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); - printf(" note: a lower temperature value like 0.1 is recommended for better quality.\n"); -} +#include "base64.hpp" -int main(int argc, char ** argv) { - ggml_time_init(); - - gpt_params params; - - if (!gpt_params_parse(argc, argv, params)) { - show_additional_info(argc, argv); - return 1; +static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float * image_embd, int * n_img_pos) { + clip_image_f32 * img_res = make_clip_image_f32(); + if (!clip_image_preprocess(ctx_clip, img, img_res, /*pad2square =*/ true)) { + fprintf(stderr, "%s: unable to preprocess image\n", __func__); + clip_image_f32_free(img_res); + return false; } - if (params.mmproj.empty() || params.image.empty()) { - gpt_print_usage(argc, argv, params); - show_additional_info(argc, argv); - return 1; - } - - const char * clip_path = params.mmproj.c_str(); - const char * img_path = params.image.c_str(); - - if (params.prompt.empty()) { - params.prompt = "describe the image in detail."; - } - - auto ctx_clip = clip_model_load(clip_path, /*verbosity=*/ 1); - - // load and preprocess the image - clip_image_u8 img; - clip_image_f32 img_res; - - if (!clip_image_load_from_file(img_path, &img)) { - fprintf(stderr, "%s: is %s really an image file?\n", __func__, img_path); - - clip_free(ctx_clip); - return 1; - } - - if (!clip_image_preprocess(ctx_clip, &img, &img_res, /*pad2square =*/ true)) { - fprintf(stderr, "%s: unable to preprocess %s\n", __func__, img_path); - - clip_free(ctx_clip); - return 1; - } - - int n_img_pos = clip_n_patches(ctx_clip); - int n_img_embd = clip_n_mmproj_embd(ctx_clip); - - float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)); - - if (!image_embd) { - fprintf(stderr, "Unable to allocate memory for image embeddings\n"); - - return 1; - } + *n_img_pos = clip_n_patches(ctx_clip); const int64_t t_img_enc_start_us = ggml_time_us(); - if (!clip_image_encode(ctx_clip, params.n_threads, &img_res, image_embd)) { + bool encoded = clip_image_encode(ctx_clip, n_threads, img_res, image_embd); + clip_image_f32_free(img_res); + if (!encoded) { fprintf(stderr, "Unable to encode image\n"); - return 1; + return false; } + const int64_t t_img_enc_end_us = ggml_time_us(); + float t_img_enc_ms = (t_img_enc_end_us - t_img_enc_start_us) / 1000.0; - // we get the embeddings, free up the memory required for CLIP - clip_free(ctx_clip); + printf("\n%s: image encoded in %8.2f ms by CLIP (%8.2f ms per image patch)\n", __func__, t_img_enc_ms, t_img_enc_ms / *n_img_pos); - llama_backend_init(params.numa); - - llama_model_params model_params = llama_model_default_params(); - model_params.n_gpu_layers = params.n_gpu_layers; - model_params.main_gpu = params.main_gpu; - model_params.tensor_split = params.tensor_split; - model_params.use_mmap = params.use_mmap; - model_params.use_mlock = params.use_mlock; - - llama_model * model = llama_load_model_from_file(params.model.c_str(), model_params); - if (model == NULL) { - fprintf(stderr , "%s: error: unable to load model\n" , __func__); - return 1; - } - - llama_context_params ctx_params = llama_context_default_params(); - - ctx_params.n_ctx = params.n_ctx < 2048 ? 2048 : params.n_ctx; // we need a longer context size to process image embeddings - ctx_params.n_threads = params.n_threads; - ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; - ctx_params.seed = params.seed; - - llama_context * ctx_llama = llama_new_context_with_model(model, ctx_params); - - if (ctx_llama == NULL) { - fprintf(stderr , "%s: error: failed to create the llama_context\n" , __func__); - return 1; - } - - // make sure that the correct mmproj was used, i.e., compare apples to apples - const int n_llama_embd = llama_n_embd(llama_get_model(ctx_llama)); - - if (n_img_embd != n_llama_embd) { - printf("%s: embedding dim of the multimodal projector (%d) is not equal to that of LLaMA (%d). Make sure that you use the correct mmproj file.\n", __func__, n_img_embd, n_llama_embd); - - llama_free(ctx_llama); - llama_free_model(model); - llama_backend_free(); - free(image_embd); - - return 1; - } - - // process the prompt - // llava chat format is "USER: \n\nASSISTANT:" - - int n_past = 0; - - const int max_tgt_len = params.n_predict < 0 ? 256 : params.n_predict; - - eval_string(ctx_llama, "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\nUSER:", params.n_batch, &n_past, true); - eval_image_embd(ctx_llama, image_embd, n_img_pos, params.n_batch, &n_past); - eval_string(ctx_llama, (params.prompt + "\nASSISTANT:").c_str(), params.n_batch, &n_past, false); - - // generate the response - - printf("\n"); - printf("prompt: '%s'\n", params.prompt.c_str()); - printf("\n"); - - for (int i = 0; i < max_tgt_len; i++) { - const char * tmp = sample(ctx_llama, params, &n_past); - if (strcmp(tmp, "") == 0) break; - - printf("%s", tmp); - fflush(stdout); - } - - printf("\n"); - - { - const float t_img_enc_ms = (t_img_enc_end_us - t_img_enc_start_us) / 1000.0; - - printf("\n%s: image encoded in %8.2f ms by CLIP (%8.2f ms per image patch)\n", __func__, t_img_enc_ms, t_img_enc_ms / n_img_pos); - } - - llama_print_timings(ctx_llama); - - llama_free(ctx_llama); - llama_free_model(model); - llama_backend_free(); - free(image_embd); - - return 0; + return true; +} + +bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * ctx_clip) { + // make sure that the correct mmproj was used, i.e., compare apples to apples + int n_llama_embd = llama_n_embd(llama_get_model(ctx_llama)); + auto n_image_embd = clip_n_mmproj_embd(ctx_clip); + if (n_image_embd != n_llama_embd) { + printf("%s: embedding dim of the multimodal projector (%d) is not equal to that of LLaMA (%d). Make sure that you use the correct mmproj file.\n", __func__, n_image_embd, n_llama_embd); + return false; + } + return true; +} + +static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { + float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)); + if (!image_embd) { + fprintf(stderr, "Unable to allocate memory for image embeddings\n"); + free(image_embd); + return false; + } + + int n_img_pos; + if (!encode_image_with_clip(ctx_clip, n_threads, img, image_embd, &n_img_pos)) { + fprintf(stderr, "%s: cannot encode image, aborting\n", __func__); + free(image_embd); + return false; + } + *image_embd_out = image_embd; + *n_img_pos_out = n_img_pos; + + return true; +} + +bool llava_eval_image_embed(llama_context * ctx_llama, const struct llava_image_embed * image_embed, int n_batch, int * n_past) { + int n_embd = llama_n_embd(llama_get_model(ctx_llama)); + + for (int i = 0; i < image_embed->n_image_pos; i += n_batch) { + int n_eval = image_embed->n_image_pos - i; + if (n_eval > n_batch) { + n_eval = n_batch; + } + llama_batch batch = {int32_t(n_eval), nullptr, (image_embed->embed+i*n_embd), nullptr, nullptr, nullptr, nullptr, *n_past, 1, 0, }; + if (llama_decode(ctx_llama, batch)) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return false; + } + *n_past += n_eval; + } + return true; +} + +LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { + clip_image_u8 * img = make_clip_image_u8(); + if (!clip_image_load_from_bytes(image_bytes, image_bytes_length, img)) { + clip_image_u8_free(img); + fprintf(stderr, "%s: can't load image from bytes, is it a valid image?", __func__); + return NULL; + } + + float* image_embed = NULL; + int n_image_pos = 0; + bool image_embed_result = llava_image_embed_make_with_clip_img(ctx_clip, n_threads, img, &image_embed, &n_image_pos); + if (!image_embed_result) { + clip_image_u8_free(img); + fprintf(stderr, "%s: coulnd't embed the image\n", __func__); + return NULL; + } + + clip_image_u8_free(img); + auto result = (llava_image_embed*)malloc(sizeof(llava_image_embed)); + result->embed = image_embed; + result->n_image_pos = n_image_pos; + return result; +} + +static bool load_file_to_bytes(const char* path, unsigned char** bytesOut, long *sizeOut) { + auto file = fopen(path, "rb"); + if (file == NULL) { + fprintf(stderr, "%s: can't read file %s\n", __func__, path); + return false; + } + + fseek(file, 0, SEEK_END); + auto fileSize = ftell(file); + fseek(file, 0, SEEK_SET); + + auto buffer = (unsigned char *)malloc(fileSize); // Allocate memory to hold the file data + if (buffer == NULL) { + fprintf(stderr, "%s: failed to alloc %ld bytes for file %s\n", __func__, fileSize, path); + perror("Memory allocation error"); + fclose(file); + return false; + } + fread(buffer, 1, fileSize, file); // Read the file into the buffer + fclose(file); // Close the file + + *bytesOut = buffer; + *sizeOut = fileSize; + return true; +} + +LLAVA_API struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx * ctx_clip, int n_threads, const char * image_path) { + unsigned char* image_bytes; + long image_bytes_length; + auto loaded = load_file_to_bytes(image_path, &image_bytes, &image_bytes_length); + if (!loaded) { + fprintf(stderr, "%s: failed to load %s\n", __func__, image_path); + return NULL; + } + + auto embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, image_bytes, image_bytes_length); + free(image_bytes); + + return embed; +} + +LLAVA_API void llava_image_embed_free(struct llava_image_embed * embed) { + free(embed->embed); + free(embed); } diff --git a/examples/llava/llava.h b/examples/llava/llava.h new file mode 100644 index 000000000..e08ce7883 --- /dev/null +++ b/examples/llava/llava.h @@ -0,0 +1,50 @@ +#ifndef LLAVA_H +#define LLAVA_H + +#include "ggml.h" + + +#ifdef LLAMA_SHARED +# if defined(_WIN32) && !defined(__MINGW32__) +# ifdef LLAMA_BUILD +# define LLAVA_API __declspec(dllexport) +# else +# define LLAVA_API __declspec(dllimport) +# endif +# else +# define LLAVA_API __attribute__ ((visibility ("default"))) +# endif +#else +# define LLAVA_API +#endif + +struct clip_ctx; + +#ifdef __cplusplus +extern "C" { +#endif + +struct llava_image_embed { + float * embed; + int n_image_pos; +}; + +/** sanity check for clip <-> llava embed size match */ +LLAVA_API bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * ctx_clip); + +/** build an image embed from image file bytes */ +LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length); +/** build an image embed from a path to an image filename */ +LLAVA_API struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx * ctx_clip, int n_threads, const char * image_path); +LLAVA_API void llava_image_embed_free(struct llava_image_embed * embed); +/** free an embedding made with llava_image_embed_make_* */ + +/** write the image represented by embed into the llama context with batch size n_batch, starting at context pos n_past. on completion, n_past points to the next position in the context after the image embed. */ +LLAVA_API bool llava_eval_image_embed(struct llama_context * ctx_llama, const struct llava_image_embed * embed, int n_batch, int * n_past); + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index 1f0d26f77..859cd12c6 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -6,7 +6,7 @@ install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ ) -target_link_libraries(${TARGET} PRIVATE common llama clip ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common llama llava ${CMAKE_THREAD_LIBS_INIT}) if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() From 46876d2a2c92e60579dc732cdb8cbd243b06f317 Mon Sep 17 00:00:00 2001 From: Meng Zhang Date: Mon, 6 Nov 2023 22:49:08 -0800 Subject: [PATCH 094/859] cuda : supports running on CPU for GGML_USE_CUBLAS=ON build (#3946) * protyping the idea that supports running on CPU for a GGML_USE_CUBLAS=on build * doc: add comments to ggml_cublas_loaded() * fix defined(...) --- ggml-cuda.cu | 17 ++++- ggml-cuda.h | 5 ++ llama.cpp | 181 ++++++++++++++++++++++++++++++--------------------- 3 files changed, 127 insertions(+), 76 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2d9ffffbf..f87f18802 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5790,6 +5790,11 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { CUDA_CHECK(cudaFree(ptr)); } +static bool g_cublas_loaded = false; + +bool ggml_cublas_loaded(void) { + return g_cublas_loaded; +} void ggml_init_cublas() { static bool initialized = false; @@ -5803,7 +5808,12 @@ void ggml_init_cublas() { CUDA_CHECK(cudaDeviceSynchronize()); #endif - CUDA_CHECK(cudaGetDeviceCount(&g_device_count)); + if (cudaGetDeviceCount(&g_device_count) != cudaSuccess) { + initialized = true; + g_cublas_loaded = false; + return; + } + GGML_ASSERT(g_device_count <= GGML_CUDA_MAX_DEVICES); int64_t total_vram = 0; #if defined(GGML_CUDA_FORCE_MMQ) @@ -5851,6 +5861,7 @@ void ggml_init_cublas() { // CUBLAS_CHECK(cublasLoggerConfigure(1, 1, 0, nullptr)); initialized = true; + g_cublas_loaded = true; } } @@ -7158,6 +7169,8 @@ static void ggml_cuda_rms_norm(const ggml_tensor * src0, const ggml_tensor * src } bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { + if (!g_cublas_loaded) return false; + const int64_t ne10 = src1->ne[0]; const int64_t ne0 = dst->ne[0]; @@ -7843,6 +7856,8 @@ void ggml_cuda_free_scratch() { } bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { + if (!g_cublas_loaded) return false; + ggml_cuda_func_t func; const bool any_on_device = tensor->backend == GGML_BACKEND_GPU || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) diff --git a/ggml-cuda.h b/ggml-cuda.h index 57adc9cf3..528e66c33 100644 --- a/ggml-cuda.h +++ b/ggml-cuda.h @@ -17,7 +17,12 @@ extern "C" { #define GGML_CUDA_MAX_DEVICES 16 +// Always success. To check if CUDA is actually loaded, use `ggml_cublas_loaded`. GGML_API void ggml_init_cublas(void); + +// Returns `true` if there are available CUDA devices and cublas loads successfully; otherwise, it returns `false`. +GGML_API bool ggml_cublas_loaded(void); + GGML_API void * ggml_cuda_host_malloc(size_t size); GGML_API void ggml_cuda_host_free(void * ptr); diff --git a/llama.cpp b/llama.cpp index e16539000..d220ff3e9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -596,19 +596,37 @@ static void ggml_graph_compute_helper(std::vector & buf, ggml_cgraph * // llama helpers // +inline void * llama_host_malloc(size_t n) { #ifdef GGML_USE_CUBLAS -# define llama_host_malloc(n) ggml_cuda_host_malloc(n) -# define llama_host_free(data) ggml_cuda_host_free(data) + if (ggml_cublas_loaded()) { + return ggml_cuda_host_malloc(n); + } else { + return malloc(n); + } #elif GGML_USE_METAL -# define llama_host_malloc(n) ggml_metal_host_malloc(n) -# define llama_host_free(data) ggml_metal_host_free(data) + return ggml_metal_host_malloc(n); #elif GGML_USE_CPU_HBM -# define llama_host_malloc(n) hbw_malloc(n) -# define llama_host_free(data) if (data != NULL) hbw_free(data) + return hbw_malloc(n); #else -# define llama_host_malloc(n) malloc(n) -# define llama_host_free(data) free(data) + return malloc(n); #endif +} + +inline void llama_host_free(void * ptr) { +#ifdef GGML_USE_CUBLAS + if (ggml_cublas_loaded()) { + return ggml_cuda_host_free(ptr); + } else { + return free(ptr); + } +#elif GGML_USE_METAL + return ggml_metal_host_free(ptr); +#elif GGML_USE_CPU_HBM + return hbw_free(ptr); +#else + return free(ptr); +#endif +} #if defined(_WIN32) static std::string llama_format_win_err(DWORD err) { @@ -1200,9 +1218,11 @@ struct llama_kv_cache { } #ifdef GGML_USE_CUBLAS - ggml_cuda_free_data(k); - ggml_cuda_free_data(v); -#endif // GGML_USE_CUBLAS + if (ggml_cublas_loaded()) { + ggml_cuda_free_data(k); + ggml_cuda_free_data(v); + } +#endif } }; @@ -1302,11 +1322,15 @@ struct llama_model { } #ifdef GGML_USE_CUBLAS - for (size_t i = 0; i < tensors_by_name.size(); ++i) { - ggml_cuda_free_data(tensors_by_name[i].second); + if (ggml_cublas_loaded()) { + for (size_t i = 0; i < tensors_by_name.size(); ++i) { + ggml_cuda_free_data(tensors_by_name[i].second); + } + ggml_cuda_free_scratch(); } - ggml_cuda_free_scratch(); -#elif defined(GGML_USE_CLBLAST) +#endif + +#if defined(GGML_USE_CLBLAST) for (size_t i = 0; i < tensors_by_name.size(); ++i) { ggml_cl_free_data(tensors_by_name[i].second); } @@ -1418,23 +1442,26 @@ static bool llama_kv_cache_init( ggml_set_name(cache.v, "cache_v"); (void) n_gpu_layers; -#ifdef GGML_USE_CUBLAS - size_t vram_kv_cache = 0; - if (n_gpu_layers > (int)n_layer + 1) { - ggml_cuda_assign_buffers_no_scratch(cache.v); - LLAMA_LOG_INFO("%s: offloading v cache to GPU\n", __func__); - vram_kv_cache += ggml_nbytes(cache.v); +#ifdef GGML_USE_CUBLAS + if (ggml_cublas_loaded()) { + size_t vram_kv_cache = 0; + + if (n_gpu_layers > (int)n_layer + 1) { + ggml_cuda_assign_buffers_no_scratch(cache.v); + LLAMA_LOG_INFO("%s: offloading v cache to GPU\n", __func__); + vram_kv_cache += ggml_nbytes(cache.v); + } + if (n_gpu_layers > (int)n_layer + 2) { + ggml_cuda_assign_buffers_no_scratch(cache.k); + LLAMA_LOG_INFO("%s: offloading k cache to GPU\n", __func__); + vram_kv_cache += ggml_nbytes(cache.k); + } + if (vram_kv_cache > 0) { + LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); + } } - if (n_gpu_layers > (int)n_layer + 2) { - ggml_cuda_assign_buffers_no_scratch(cache.k); - LLAMA_LOG_INFO("%s: offloading k cache to GPU\n", __func__); - vram_kv_cache += ggml_nbytes(cache.k); - } - if (vram_kv_cache > 0) { - LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); - } -#endif // GGML_USE_CUBLAS +#endif return true; } @@ -2521,18 +2548,22 @@ static void llm_load_tensors( } (void) main_gpu; + + enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; + enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; + #ifdef GGML_USE_CUBLAS - LLAMA_LOG_INFO("%s: using " GGML_CUDA_NAME " for GPU acceleration\n", __func__); - ggml_cuda_set_main_device(main_gpu); -#define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_GPU -#define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_GPU_SPLIT + if (ggml_cublas_loaded()) { + LLAMA_LOG_INFO("%s: using " GGML_CUDA_NAME " for GPU acceleration\n", __func__); + ggml_cuda_set_main_device(main_gpu); + + llama_backend_offload = GGML_BACKEND_GPU; + llama_backend_offload_split = GGML_BACKEND_GPU_SPLIT; + } #elif defined(GGML_USE_CLBLAST) - LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); -#define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_GPU -#define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_GPU -#else -#define LLAMA_BACKEND_OFFLOAD GGML_BACKEND_CPU -#define LLAMA_BACKEND_OFFLOAD_SPLIT GGML_BACKEND_CPU + LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); + llama_backend_offload = GGML_BACKEND_GPU; + llama_backend_offload_split = GGML_BACKEND_GPU; #endif // prepare memory for the weights @@ -2559,12 +2590,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -2588,8 +2619,8 @@ static void llm_load_tensors( model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT auto & layer = model.layers[i]; @@ -2625,12 +2656,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -2654,8 +2685,8 @@ static void llm_load_tensors( model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT auto & layer = model.layers[i]; @@ -2695,12 +2726,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -2726,8 +2757,8 @@ static void llm_load_tensors( model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT auto & layer = model.layers[i]; @@ -2772,12 +2803,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -2803,8 +2834,8 @@ static void llm_load_tensors( model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT auto & layer = model.layers[i]; @@ -2849,12 +2880,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -2877,8 +2908,8 @@ static void llm_load_tensors( const int i_gpu_start = n_layer - n_gpu_layers; model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; auto & layer = model.layers[i]; layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); @@ -2915,12 +2946,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -2946,8 +2977,8 @@ static void llm_load_tensors( model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT auto & layer = model.layers[i]; @@ -2993,12 +3024,12 @@ static void llm_load_tensors( // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 - backend_norm = LLAMA_BACKEND_OFFLOAD; + backend_norm = llama_backend_offload; #else - backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; #endif // _WIN32 - backend_output = LLAMA_BACKEND_OFFLOAD_SPLIT; + backend_output = llama_backend_offload_split; } else { backend_norm = GGML_BACKEND_CPU; backend_output = GGML_BACKEND_CPU; @@ -3022,8 +3053,8 @@ static void llm_load_tensors( model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : LLAMA_BACKEND_OFFLOAD_SPLIT; // NOLINT + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT auto & layer = model.layers[i]; From 54b4df8886103b436a4bb3b60f4d84824f9e8868 Mon Sep 17 00:00:00 2001 From: Matthew Tejo Date: Mon, 6 Nov 2023 23:43:59 -0800 Subject: [PATCH 095/859] Use params when loading models in llava-cli (#3976) llava-cli was loading models with default params and ignoring settings from the cli. This switches to a generic function to load the params from the cli options. --- examples/llava/llava-cli.cpp | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 19374c67f..633afd1da 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -242,18 +242,16 @@ static struct llava_context * llava_init(gpt_params * params) { llama_backend_init(params->numa); - llama_model_params model_params = llama_model_default_params(); + llama_model_params model_params = llama_model_params_from_gpt_params(*params); + llama_model * model = llama_load_model_from_file(params->model.c_str(), model_params); if (model == NULL) { fprintf(stderr , "%s: error: unable to load model\n" , __func__); return NULL; } - llama_context_params ctx_params = llama_context_default_params(); - + llama_context_params ctx_params = llama_context_params_from_gpt_params(*params); ctx_params.n_ctx = params->n_ctx < 2048 ? 2048 : params->n_ctx; // we need a longer context size to process image embeddings - ctx_params.n_threads = params->n_threads; - ctx_params.n_threads_batch = params->n_threads_batch == -1 ? params->n_threads : params->n_threads_batch; llama_context * ctx_llama = llama_new_context_with_model(model, ctx_params); From e9c1cecb9d7d743d30b4a29ecd56a411437def0a Mon Sep 17 00:00:00 2001 From: xaedes Date: Tue, 7 Nov 2023 09:04:51 +0100 Subject: [PATCH 096/859] ggml : fix backward rope after YaRN (#3974) * fix backward process of rope rope backward process was broken after YaRN RoPE (#2268) implementation, due to missing changes in backward functions. the code for the backward process is nearly identically to the forward process: the only difference is the sign of the sin-values. to avoid future regressions remove the near-duplicate backward functions and reuse the forward code: for this a new function argument `bool forward` was added to `ggml_compute_forward_rope_f32` and `ggml_compute_forward_rope_f16`. the sin-values will be negated when forward is false. * fix finetune rope call to use correct default attn_factor of 1.0f * remove unused `ggml_rope_xpos_back` it is better to have only one `ggml_rope_back` function that accepts all rope parameters, so that `ggml_compute_backward` can propagate all parameters without having to switch between different rope_back variants. * fix comments explaining the sinus sign in ggml_forward_rope * add missing function arguments in declaration * fix function argument type in declaration --- examples/finetune/finetune.cpp | 2 +- ggml.c | 330 ++++++++------------------------- ggml.h | 5 + 3 files changed, 84 insertions(+), 253 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 649a3b7c1..fa7dbe496 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -643,7 +643,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( return ggml_rope_custom(ctx, t, KQ_pos, n_rot, rope_mode, n_ctx, 0, - rope_freq_base, rope_freq_scale, 0.0f, 0.0f, 0.0f, 0.0f + rope_freq_base, rope_freq_scale, 0.0f, 1.0f, 0.0f, 0.0f ); }; diff --git a/ggml.c b/ggml.c index 605a27940..009d5b398 100644 --- a/ggml.c +++ b/ggml.c @@ -4970,8 +4970,13 @@ struct ggml_tensor * ggml_rope_back( int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow, float xpos_base, bool xpos_down) { GGML_ASSERT(ggml_is_vector(b)); @@ -4988,11 +4993,15 @@ struct ggml_tensor * ggml_rope_back( struct ggml_tensor * result = ggml_dup_tensor(ctx, a); - int32_t params[8] = { /*n_past*/ 0, n_dims, mode, n_ctx }; - memcpy(params + 4, &freq_base, sizeof(float)); - memcpy(params + 5, &freq_scale, sizeof(float)); - memcpy(params + 6, &xpos_base, sizeof(float)); - memcpy(params + 7, &xpos_down, sizeof(bool)); + int32_t params[13] = { /*n_past*/ 0, n_dims, mode, n_ctx, n_orig_ctx }; + memcpy(params + 5, &freq_base, sizeof(float)); + memcpy(params + 6, &freq_scale, sizeof(float)); + memcpy(params + 7, &ext_factor, sizeof(float)); + memcpy(params + 8, &attn_factor, sizeof(float)); + memcpy(params + 9, &beta_fast, sizeof(float)); + memcpy(params + 10, &beta_slow, sizeof(float)); + memcpy(params + 11, &xpos_base, sizeof(float)); + memcpy(params + 12, &xpos_down, sizeof(bool)); ggml_set_op_params(result, params, sizeof(params)); result->op = GGML_OP_ROPE_BACK; @@ -10974,7 +10983,8 @@ static void ggml_compute_forward_rope_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, - struct ggml_tensor * dst) { + struct ggml_tensor * dst, + const bool forward) { if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -11033,6 +11043,11 @@ static void ggml_compute_forward_rope_f32( const bool is_neox = mode & 2; const bool is_glm = mode & 4; + // backward process uses inverse rotation by cos and sin. + // cos and sin build a rotation matrix, where the inverse is the transpose. + // this essentially just switches the sign of sin. + const float sin_sign = forward ? 1.0f : -1.0f; + const int32_t * pos = (const int32_t *) src1->data; for (int64_t i3 = 0; i3 < ne3; i3++) { @@ -11049,9 +11064,9 @@ static void ggml_compute_forward_rope_f32( float block_theta = MAX(p - (n_ctx - 2), 0); for (int64_t i0 = 0; i0 < ne0 / 4; i0++) { const float cos_theta = cosf(theta_base); - const float sin_theta = sinf(theta_base); + const float sin_theta = sinf(theta_base) * sin_sign; const float cos_block_theta = cosf(block_theta); - const float sin_block_theta = sinf(block_theta); + const float sin_block_theta = sinf(block_theta) * sin_sign; theta_base *= theta_scale; block_theta *= theta_scale; @@ -11075,6 +11090,7 @@ static void ggml_compute_forward_rope_f32( rope_yarn( theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta ); + sin_theta *= sin_sign; // zeta scaling for xPos only: float zeta = xpos_base != 0.0f ? powf((i0 + 0.4f * ne0) / (1.4f * ne0), p / xpos_base) : 1.0f; @@ -11105,6 +11121,7 @@ static void ggml_compute_forward_rope_f32( theta_base, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, &cos_theta, &sin_theta ); + sin_theta *= sin_sign; theta_base *= theta_scale; @@ -11130,7 +11147,8 @@ static void ggml_compute_forward_rope_f16( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, - struct ggml_tensor * dst) { + struct ggml_tensor * dst, + const bool forward) { if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -11182,6 +11200,11 @@ static void ggml_compute_forward_rope_f16( const bool is_neox = mode & 2; const bool is_glm = mode & 4; + // backward process uses inverse rotation by cos and sin. + // cos and sin build a rotation matrix, where the inverse is the transpose. + // this essentially just switches the sign of sin. + const float sin_sign = forward ? 1.0f : -1.0f; + const int32_t * pos = (const int32_t *) src1->data; for (int64_t i3 = 0; i3 < ne3; i3++) { @@ -11198,9 +11221,9 @@ static void ggml_compute_forward_rope_f16( float block_theta = MAX(p - (n_ctx - 2), 0); for (int64_t i0 = 0; i0 < ne0 / 4; i0++) { const float cos_theta = cosf(theta_base); - const float sin_theta = sinf(theta_base); + const float sin_theta = sinf(theta_base) * sin_sign; const float cos_block_theta = cosf(block_theta); - const float sin_block_theta = sinf(block_theta); + const float sin_block_theta = sinf(block_theta) * sin_sign; theta_base *= theta_scale; block_theta *= theta_scale; @@ -11224,6 +11247,7 @@ static void ggml_compute_forward_rope_f16( rope_yarn( theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta ); + sin_theta *= sin_sign; theta_base *= theta_scale; @@ -11250,6 +11274,7 @@ static void ggml_compute_forward_rope_f16( theta_base, freq_scale, corr_dims, cur_rot, ext_factor, attn_factor, &cos_theta, &sin_theta ); + sin_theta *= sin_sign; theta_base *= theta_scale; @@ -11279,11 +11304,11 @@ static void ggml_compute_forward_rope( switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_rope_f16(params, src0, src1, dst); + ggml_compute_forward_rope_f16(params, src0, src1, dst, true); } break; case GGML_TYPE_F32: { - ggml_compute_forward_rope_f32(params, src0, src1, dst); + ggml_compute_forward_rope_f32(params, src0, src1, dst, true); } break; default: { @@ -11294,216 +11319,6 @@ static void ggml_compute_forward_rope( // ggml_compute_forward_rope_back -static void ggml_compute_forward_rope_back_f32( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { - return; - } - - // y = rope(x, src1) - // dx = rope_back(dy, src1) - // src0 is dy, src1 contains options - - float freq_base; - float freq_scale; - - // these two only relevant for xPos RoPE: - float xpos_base; - bool xpos_down; - - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - const int n_ctx = ((int32_t *) dst->op_params)[3]; UNUSED(n_ctx); - memcpy(&freq_base, (int32_t *) dst->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 5, sizeof(float)); - memcpy(&xpos_base, (int32_t *) dst->op_params + 6, sizeof(float)); - memcpy(&xpos_down, (int32_t *) dst->op_params + 7, sizeof(bool)); - - GGML_TENSOR_UNARY_OP_LOCALS - - //printf("ne0: %d, ne1: %d, ne2: %d, ne3: %d\n", ne0, ne1, ne2, ne3); - //printf("n_past = %d, ne2 = %d\n", n_past, ne2); - - assert(nb0 == sizeof(float)); - - const int ith = params->ith; - const int nth = params->nth; - - const int nr = ggml_nrows(dst); - - // rows per thread - const int dr = (nr + nth - 1)/nth; - - // row range for this thread - const int ir0 = dr*ith; - const int ir1 = MIN(ir0 + dr, nr); - - // row index used to determine which thread to use - int ir = 0; - - const float theta_scale = powf(freq_base, -2.0f/n_dims); - - const bool is_neox = mode & 2; - - const int32_t * pos = (const int32_t *) src1->data; - - for (int64_t i3 = 0; i3 < ne3; i3++) { - for (int64_t i2 = 0; i2 < ne2; i2++) { - const int64_t p = pos[i2]; - for (int64_t i1 = 0; i1 < ne1; i1++) { - if (ir++ < ir0) continue; - if (ir > ir1) break; - - float theta_base = freq_scale * (float)p; - - if (!is_neox) { - for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - const float cos_theta = cosf(theta_base); - const float sin_theta = sinf(theta_base); - - // zeta scaling for xPos only: - float zeta = xpos_base != 0.0f ? powf((i0 + 0.4f * ne0) / (1.4f * ne0), p / xpos_base) : 1.0f; - if (xpos_down) zeta = 1.0f / zeta; - - theta_base *= theta_scale; - - const float * const dy = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); - float * dx = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); - - const float dy0 = dy[0]; - const float dy1 = dy[1]; - - dx[0] = dy0*cos_theta*zeta + dy1*sin_theta*zeta; - dx[1] = - dy0*sin_theta*zeta + dy1*cos_theta*zeta; - } - } else { - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 0; ic < n_dims; ic += 2) { - const float cos_theta = cosf(theta_base); - const float sin_theta = sinf(theta_base); - - theta_base *= theta_scale; - - const int64_t i0 = ib*n_dims + ic/2; - - const float * const dy = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); - float * dx = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); - - const float dy0 = dy[0]; - const float dy1 = dy[n_dims/2]; - - dx[0] = dy0*cos_theta + dy1*sin_theta; - dx[n_dims/2] = - dy0*sin_theta + dy1*cos_theta; - } - } - } - } - } - } -} - -static void ggml_compute_forward_rope_back_f16( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { - return; - } - - // y = rope(x, src1) - // dx = rope_back(dy, src1) - // src0 is dy, src1 contains options - - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - - GGML_TENSOR_UNARY_OP_LOCALS - - //printf("ne0: %d, ne1: %d, ne2: %d, ne3: %d\n", ne0, ne1, ne2, ne3); - //printf("n_past = %d, ne2 = %d\n", n_past, ne2); - - assert(nb0 == sizeof(ggml_fp16_t)); - - const int ith = params->ith; - const int nth = params->nth; - - const int nr = ggml_nrows(dst); - - // rows per thread - const int dr = (nr + nth - 1)/nth; - - // row range for this thread - const int ir0 = dr*ith; - const int ir1 = MIN(ir0 + dr, nr); - - // row index used to determine which thread to use - int ir = 0; - - const float theta_scale = powf(10000.0, -2.0f/n_dims); - - const bool is_neox = mode & 2; - - const int32_t * pos = (const int32_t *) src1->data; - - for (int64_t i3 = 0; i3 < ne3; i3++) { - for (int64_t i2 = 0; i2 < ne2; i2++) { - const int64_t p = pos[i2]; - for (int64_t i1 = 0; i1 < ne1; i1++) { - if (ir++ < ir0) continue; - if (ir > ir1) break; - - float theta_base = (float)p; - - if (!is_neox) { - for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - const float cos_theta = cosf(theta_base); - const float sin_theta = sinf(theta_base); - - theta_base *= theta_scale; - - const ggml_fp16_t * const dy = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); - ggml_fp16_t * dx = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); - - const float dy0 = GGML_FP16_TO_FP32(dy[0]); - const float dy1 = GGML_FP16_TO_FP32(dy[1]); - - dx[0] = GGML_FP32_TO_FP16( dy0*cos_theta + dy1*sin_theta); - dx[1] = GGML_FP32_TO_FP16(-dy0*sin_theta + dy1*cos_theta); - } - } else { - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 0; ic < n_dims; ic += 2) { - const float cos_theta = cosf(theta_base); - const float sin_theta = sinf(theta_base); - - theta_base *= theta_scale; - - const int64_t i0 = ib*n_dims + ic/2; - - const ggml_fp16_t * const dy = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); - ggml_fp16_t * dx = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); - - const float dy0 = GGML_FP16_TO_FP32(dy[0]); - const float dy1 = GGML_FP16_TO_FP32(dy[n_dims/2]); - - dx[0] = GGML_FP32_TO_FP16( dy0*cos_theta + dy1*sin_theta); - dx[n_dims/2] = GGML_FP32_TO_FP16(-dy0*sin_theta + dy1*cos_theta); - } - } - } - } - } - } -} - static void ggml_compute_forward_rope_back( const struct ggml_compute_params * params, const struct ggml_tensor * src0, @@ -11512,11 +11327,11 @@ static void ggml_compute_forward_rope_back( switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_rope_back_f16(params, src0, src1, dst); + ggml_compute_forward_rope_f16(params, src0, src1, dst, false); } break; case GGML_TYPE_F32: { - ggml_compute_forward_rope_back_f32(params, src0, src1, dst); + ggml_compute_forward_rope_f32(params, src0, src1, dst, false); } break; default: { @@ -15559,17 +15374,20 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor // necessary for llama if (src0->grad) { //const int n_past = ((int32_t *) tensor->op_params)[0]; - const int n_dims = ((int32_t *) tensor->op_params)[1]; - const int mode = ((int32_t *) tensor->op_params)[2]; - const int n_ctx = ((int32_t *) tensor->op_params)[3]; - float freq_base; - float freq_scale; - float xpos_base; - bool xpos_down; - memcpy(&freq_base, (int32_t *) tensor->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) tensor->op_params + 5, sizeof(float)); - memcpy(&xpos_base, (int32_t *) tensor->op_params + 6, sizeof(float)); - memcpy(&xpos_down, (int32_t *) tensor->op_params + 7, sizeof(bool)); + const int n_dims = ((int32_t *) tensor->op_params)[1]; + const int mode = ((int32_t *) tensor->op_params)[2]; + const int n_ctx = ((int32_t *) tensor->op_params)[3]; + const int n_orig_ctx = ((int32_t *) tensor->op_params)[4]; + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow, xpos_base, xpos_down; + + memcpy(&freq_base, (int32_t *) tensor->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) tensor->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) tensor->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) tensor->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) tensor->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) tensor->op_params + 10, sizeof(float)); + memcpy(&xpos_base, (int32_t *) tensor->op_params + 11, sizeof(float)); + memcpy(&xpos_down, (int32_t *) tensor->op_params + 12, sizeof(bool)); src0->grad = ggml_add_or_set(ctx, src0->grad, @@ -15579,8 +15397,13 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor n_dims, mode, n_ctx, + n_orig_ctx, freq_base, freq_scale, + ext_factor, + attn_factor, + beta_fast, + beta_slow, xpos_base, xpos_down), zero_table); @@ -15590,17 +15413,20 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { if (src0->grad) { //const int n_past = ((int32_t *) tensor->op_params)[0]; - const int n_dims = ((int32_t *) tensor->op_params)[1]; - const int mode = ((int32_t *) tensor->op_params)[2]; - const int n_ctx = ((int32_t *) tensor->op_params)[3]; - float freq_base; - float freq_scale; - float xpos_base; - bool xpos_down; - memcpy(&freq_base, (int32_t *) tensor->op_params + 4, sizeof(float)); - memcpy(&freq_scale, (int32_t *) tensor->op_params + 5, sizeof(float)); - memcpy(&xpos_base, (int32_t *) tensor->op_params + 6, sizeof(float)); - memcpy(&xpos_down, (int32_t *) tensor->op_params + 7, sizeof(bool)); + const int n_dims = ((int32_t *) tensor->op_params)[1]; + const int mode = ((int32_t *) tensor->op_params)[2]; + const int n_ctx = ((int32_t *) tensor->op_params)[3]; + const int n_orig_ctx = ((int32_t *) tensor->op_params)[4]; + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow, xpos_base, xpos_down; + + memcpy(&freq_base, (int32_t *) tensor->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) tensor->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) tensor->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) tensor->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) tensor->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) tensor->op_params + 10, sizeof(float)); + memcpy(&xpos_base, (int32_t *) tensor->op_params + 11, sizeof(float)); + memcpy(&xpos_down, (int32_t *) tensor->op_params + 12, sizeof(bool)); src0->grad = ggml_add_or_set(ctx, src0->grad, @@ -15609,14 +15435,14 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor src1, n_dims, mode, - 0, n_ctx, + n_orig_ctx, freq_base, freq_scale, - 0.0f, - 1.0f, - 0.0f, - 0.0f, + ext_factor, + attn_factor, + beta_fast, + beta_slow, xpos_base, xpos_down, false), diff --git a/ggml.h b/ggml.h index 70eb25a6b..26654fc8e 100644 --- a/ggml.h +++ b/ggml.h @@ -1372,8 +1372,13 @@ extern "C" { int n_dims, int mode, int n_ctx, + int n_orig_ctx, float freq_base, float freq_scale, + float ext_factor, + float attn_factor, + float beta_fast, + float beta_slow, float xpos_base, bool xpos_down); From 413503d4b92500d82b002d03c580a71a54747138 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 7 Nov 2023 19:25:32 +0200 Subject: [PATCH 097/859] make : do not add linker flags when compiling static llava lib (#3977) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f2d4fd031..d6be254a0 100644 --- a/Makefile +++ b/Makefile @@ -618,7 +618,7 @@ llama-bench: examples/llama-bench/llama-bench.cpp ggml.o llama.o $(COMMON_DEPS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) libllava.a: examples/llava/llava.cpp examples/llava/llava.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h common/base64.hpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) -static -fPIC -c $< -o $@ $(LDFLAGS) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -static -fPIC -c $< -o $@ -Wno-cast-qual llava-cli: examples/llava/llava-cli.cpp examples/llava/clip.h examples/llava/clip.cpp examples/llava/llava.h examples/llava/llava.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -Wno-cast-qual From 0a7c980b6f94a049cb804573df2d8092a34df8e4 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 7 Nov 2023 12:43:04 -0500 Subject: [PATCH 098/859] gguf : track writer state, free unneeded tensors, cleanup (#3871) --- gguf-py/gguf/gguf.py | 82 +++++++++++++++++++++++++++--------------- gguf-py/pyproject.toml | 2 +- 2 files changed, 54 insertions(+), 30 deletions(-) diff --git a/gguf-py/gguf/gguf.py b/gguf-py/gguf/gguf.py index a2271d225..7e495cb19 100644 --- a/gguf-py/gguf/gguf.py +++ b/gguf-py/gguf/gguf.py @@ -646,18 +646,17 @@ class GGUFValueType(IntEnum): sys.exit() +class WriterState(Enum): + EMPTY = auto() + HEADER = auto() + KV_DATA = auto() + TI_DATA = auto() + + class GGUFWriter: fout: BufferedWriter - arch: str - offset_tensor = 0 - data_alignment = GGUF_DEFAULT_ALIGNMENT - kv_data = b"" - kv_data_count = 0 - ti_data = b"" - ti_data_count = 0 - use_temp_file: bool - temp_file: tempfile.SpooledTemporaryFile[bytes] | None = None - tensors: list[tuple[np.ndarray[Any, Any], int]] + temp_file: tempfile.SpooledTemporaryFile[bytes] | None + tensors: list[np.ndarray[Any, Any]] @property def pack_prefix(self): @@ -683,27 +682,47 @@ class GGUFWriter: GGUFValueType.FLOAT64: f"{self.pack_prefix}d", GGUFValueType.BOOL: "?" , } - self.add_architecture() + self.offset_tensor = 0 + self.data_alignment = GGUF_DEFAULT_ALIGNMENT + self.kv_data = b"" + self.kv_data_count = 0 + self.ti_data = b"" + self.ti_data_count = 0 self.use_temp_file = use_temp_file + self.temp_file = None self.tensors = [] endianess_str = "Big Endian" if self.endianess == GGUFEndian.BIG else "Little Endian" print(f"This gguf file is for {endianess_str} only") + self.state = WriterState.EMPTY + + self.add_architecture() def write_header_to_file(self): + if self.state is not WriterState.EMPTY: + raise ValueError(f'Expected output file to be empty, got {self.state}') + self.fout.write(struct.pack(" None: diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index f0741a7c2..c6cb2c37a 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gguf" -version = "0.4.5" +version = "0.4.6" description = "Write ML models in GGUF for GGML" authors = ["GGML "] packages = [ From 875fb42871a0f5a88fbe31a0b5edd697b84038e4 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 8 Nov 2023 13:15:14 +0100 Subject: [PATCH 099/859] ggml-alloc : fix backend assignments of views (#3982) --- ggml-alloc.c | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index 34eba3f83..b553eb7c1 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -378,9 +378,13 @@ static bool ggml_op_can_inplace(enum ggml_op op) { } } -static void init_view(struct ggml_allocr * alloc, struct ggml_tensor * view) { +static void init_view(struct ggml_allocr * alloc, struct ggml_tensor * view, bool update_backend) { assert(view->view_src != NULL && view->view_src->data != NULL); - view->backend = view->view_src->backend; + + if (update_backend) { + view->backend = view->view_src->backend; + } + view->buffer = view->view_src->buffer; view->data = (char *)view->view_src->data + view->view_offs; @@ -394,7 +398,7 @@ static void allocate_node(struct ggml_allocr * alloc, struct ggml_tensor * node) struct hash_node * ht = alloc->hash_table; if (node->data == NULL) { if (ggml_is_view(node)) { - init_view(alloc, node); + init_view(alloc, node, true); } else { // see if we can reuse a parent's buffer (inplace) if (ggml_op_can_inplace(node->op)) { @@ -424,15 +428,14 @@ static void allocate_node(struct ggml_allocr * alloc, struct ggml_tensor * node) AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); node->view_src = view_src; view_src_hn->n_views += 1; - init_view(alloc, node); + init_view(alloc, node, false); return; } - } - else { + } else { AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); node->view_src = parent; p_hn->n_views += 1; - init_view(alloc, node); + init_view(alloc, node, false); return; } } @@ -463,7 +466,7 @@ size_t ggml_allocr_alloc_graph_n( hash_get(ht, view_src)->n_views += 1; if (node->buffer == NULL && node->data != NULL) { // view of a pre-allocated tensor, didn't call init_view() yet - init_view(alloc, node); + init_view(alloc, node, true); } } @@ -474,7 +477,7 @@ size_t ggml_allocr_alloc_graph_n( } hash_get(ht, parent)->n_children += 1; if (ggml_is_view(parent) && parent->buffer == NULL && parent->data != NULL) { - init_view(alloc, parent); + init_view(alloc, parent, true); } } } From 57ad015dc3011b046ed5a23186c86ea55f987c54 Mon Sep 17 00:00:00 2001 From: Mihai Date: Thu, 9 Nov 2023 04:00:34 +0200 Subject: [PATCH 100/859] server : add min_p param (#3877) * Update server.cpp with min_p after it was introduced in https://github.com/ggerganov/llama.cpp/pull/3841 * Use spaces instead of tabs * Update index.html.hpp after running deps.sh * Fix test - fix line ending --- examples/server/README.md | 2 + examples/server/index.html.hpp | 4396 +++++++++++++++-------------- examples/server/public/index.html | 2 + examples/server/server.cpp | 2 + 4 files changed, 2211 insertions(+), 2191 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 089ebe2d1..a6eda3b32 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -122,6 +122,8 @@ node index.js `top_p`: Limit the next token selection to a subset of tokens with a cumulative probability above a threshold P (default: 0.95). + `min_p`: The minimum probability for a token to be considered, relative to the probability of the most likely token (default: 0.05). + `n_predict`: Set the maximum number of tokens to predict when generating text. **Note:** May exceed the set limit slightly if the last token is a partial multibyte character. When 0, no tokens will be generated but the prompt is evaluated into the cache. (default: -1, -1 = infinity). `n_keep`: Specify the number of tokens from the prompt to retain when the context size is exceeded and tokens need to be discarded. diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index 5d3bdfbdd..207412513 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -374,1189 +374,1161 @@ unsigned char index_html[] = { 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, - 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, - 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, - 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, - 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, - 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, + 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, + 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, + 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, + 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, + 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, - 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, - 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, - 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, - 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, - 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, - 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, - 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, - 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, - 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, - 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, - 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, - 0x74, 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, - 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x6f, - 0x72, 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, - 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, - 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, - 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, - 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, - 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, - 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, - 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, - 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, + 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, + 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, + 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, + 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, + 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, + 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, + 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, + 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, + 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, + 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, + 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, + 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, + 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, + 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, + 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, + 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x6f, 0x72, + 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, + 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, + 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, + 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, + 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, + 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, + 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, + 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, + 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, - 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, - 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, - 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, - 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, - 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, - 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, - 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, - 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, - 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, - 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, - 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, - 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, - 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, - 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, - 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, - 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, - 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, - 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, - 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, - 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, - 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, - 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, + 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, + 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, + 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, + 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, + 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, + 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, + 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, + 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, + 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, + 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, - 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, - 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, - 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, - 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, - 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, - 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, - 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, - 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, - 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x69, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, - 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x3d, - 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, - 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, - 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, + 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, + 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, + 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, + 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, - 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x20, - 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, - 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, - 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, + 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, + 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, + 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, + 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, + 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, + 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, 0x74, 0x69, 0x6e, 0x67, + 0x20, 0x74, 0x68, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, + 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, + 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, 0x74, 0x69, 0x6e, - 0x67, 0x20, 0x74, 0x68, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, - 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, - 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, + 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, 0x27, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x74, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, - 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, - 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, - 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, - 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, - 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, - 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, - 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, - 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, - 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, - 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, + 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, + 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, + 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, + 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, + 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, + 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, + 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, + 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, + 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, + 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, + 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, + 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, + 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, + 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, + 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, + 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, + 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, + 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, + 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, + 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, + 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, + 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, + 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, + 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, + 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, + 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, + 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, - 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, - 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, - 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, - 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, - 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, - 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, - 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, - 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, - 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, - 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, - 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, - 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, - 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, - 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, - 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, - 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, - 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, - 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, - 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, - 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, - 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, - 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, - 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, - 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, 0x73, 0x20, 0x74, - 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, 0x74, 0x3f, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, - 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, - 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, 0x72, - 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, 0x72, - 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, - 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, 0x2e, - 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x73, - 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x41, - 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, 0x3f, - 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, 0x2c, - 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x61, 0x73, - 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, 0x61, 0x72, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, + 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, + 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, + 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, + 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, + 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, + 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, + 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, + 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, + 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, + 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, + 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, + 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, + 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, + 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, + 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, + 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, + 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, + 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, + 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, + 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, + 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, + 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, + 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, + 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, + 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, + 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, + 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x22, 0x54, 0x68, + 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x77, 0x61, 0x73, + 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, + 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, 0x74, 0x20, 0x62, 0x65, + 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, 0x22, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, + 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, + 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, + 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, 0x6c, 0x72, 0x65, - 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x22, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, - 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, - 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x24, - 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, - 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6d, 0x61, - 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, - 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, - 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x22, 0x54, - 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x77, 0x61, - 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, - 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, 0x6c, 0x74, 0x69, - 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, 0x6f, 0x6a, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, 0x74, 0x20, 0x62, - 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, 0x22, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, - 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, - 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, - 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, - 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, - 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, - 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, - 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, - 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, - 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6c, 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, - 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, - 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, - 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, - 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, - 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, - 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, - 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, - 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, - 0x75, 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, - 0x66, 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, - 0x6c, 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, - 0x73, 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, - 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, - 0x65, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, - 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, - 0x64, 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, - 0x77, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, - 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, - 0x3a, 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, - 0x73, 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, - 0x4e, 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, - 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, - 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, - 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, - 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, - 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, - 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, - 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, - 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, - 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, - 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, - 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, - 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, - 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, - 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, - 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, - 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, - 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, - 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, - 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, - 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, - 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, + 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, + 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, + 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, + 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, - 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, - 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, - 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, - 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, - 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, - 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, + 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, + 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, + 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, + 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, + 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, - 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, - 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, - 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, - 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, - 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, + 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, + 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, + 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, + 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, + 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, + 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, + 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, + 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, + 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, + 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, + 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, + 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, + 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, + 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, + 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, + 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, + 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, + 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, + 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, + 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, + 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, + 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, + 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, + 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, + 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, + 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, + 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, 0x72, + 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, + 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x5d, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, 0x6f, + 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, + 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, + 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, + 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, + 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, + 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, 0x52, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, + 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, 0x73, + 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, 0x20, + 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, 0x74, + 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, + 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, + 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, 0x22, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, + 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, 0x69, + 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, 0x26, + 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, + 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, + 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, + 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, 0x7b, + 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x3d, + 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, 0x6f, + 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, + 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, 0x65, + 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, + 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, 0x6d, + 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, 0x69, + 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, 0x3e, + 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, 0x65, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, + 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, + 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, + 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, - 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, - 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, + 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, + 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, - 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, - 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, - 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, - 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, - 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, - 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, - 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, - 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, - 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, - 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, - 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, - 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, - 0x65, 0x20, 0x3d, 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, - 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, - 0x64, 0x61, 0x74, 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x73, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, - 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, - 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, - 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, - 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, - 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x5e, 0x5c, 0x73, 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, - 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, - 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, - 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, - 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, - 0x3a, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x72, + 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x3c, + 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, + 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, + 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, 0x6f, + 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, + 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, 0x2b, + 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, 0x73, + 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, 0x33, + 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, + 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, + 0x20, 0x3d, 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, + 0x61, 0x74, 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, + 0x61, 0x74, 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, + 0x73, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, + 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, + 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, + 0x5c, 0x73, 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, + 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, + 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, - 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, + 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, + 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, - 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, - 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x69, 0x6d, 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, - 0x77, 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, - 0x7b, 0x21, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, - 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, - 0x60, 0x20, 0x3a, 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, - 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x66, 0x6c, - 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, - 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, - 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, - 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, - 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, - 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, + 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, + 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6d, 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, + 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, + 0x21, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, + 0x20, 0x3a, 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, + 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, + 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, + 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, - 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, + 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, - 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, + 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, + 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, + 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, - 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, - 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, - 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, - 0x20, 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, - 0x5d, 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, - 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, + 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, + 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, + 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, - 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, - 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, - 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, - 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, - 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, - 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, - 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, + 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, + 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, - 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, + 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, + 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, + 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, + 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, + 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, + 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, + 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, @@ -1571,1109 +1543,1151 @@ unsigned char index_html[] = { 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, - 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, - 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, - 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, - 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, + 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, + 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, + 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, + 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, + 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, + 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, + 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, + 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, + 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, + 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, + 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, + 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, + 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, + 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, - 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, - 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, + 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, + 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, + 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, + 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, + 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, + 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, + 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, + 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, + 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, + 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, - 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, - 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, - 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, - 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, - 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, - 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, - 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, - 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, - 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, - 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, - 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, + 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, + 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, - 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, - 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, - 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, + 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, - 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, - 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, - 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, - 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, - 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, + 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, - 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, - 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, + 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, - 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, - 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, - 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, - 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, - 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, - 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, - 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, - 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, - 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, - 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, + 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, + 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, + 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, + 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, + 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, + 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, - 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, - 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, - 0x74, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, - 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, - 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, - 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, - 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, - 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, - 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, - 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, - 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, - 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, - 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, - 0x54, 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, - 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, - 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, - 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x6f, 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, - 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, - 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, - 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, - 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, + 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, + 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, + 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, 0x6f, 0x72, - 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, 0x2f, 0x73, - 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, - 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, 0x53, 0x2d, - 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, 0x5f, 0x7a, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, + 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, + 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, + 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, + 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, + 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, + 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, + 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, + 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, + 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, + 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, + 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, + 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, + 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, + 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, + 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x79, - 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, 0x2c, - 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, - 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, - 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, - 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, - 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, - 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x46, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x66, 0x72, - 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, + 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, + 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, + 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, - 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x30, 0x22, - 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, - 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, - 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, 0x6f, 0x20, - 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x31, - 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, - 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, 0x2f, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, - 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, - 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, + 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, + 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, + 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, + 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, + 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, + 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, + 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, + 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, + 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, + 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, + 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, + 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, + 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, + 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x20, - 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x22, + 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, + 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, + 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, + 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, + 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, + 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, + 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, + 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, + 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, + 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, + 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, + 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, + 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, + 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, + 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x20, 0x7d, - 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, + 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, + 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, + 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, + 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, + 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, + 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, + 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, + 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, + 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, - 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, - 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, - 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, - 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, - 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, - 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, - 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, - 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, - 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, - 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, + 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, + 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, + 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, + 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, + 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, + 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, + 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, + 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, + 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, + 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, + 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, + 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, + 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, + 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, + 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, + 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, - 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, + 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, + 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, + 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, - 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, + 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, + 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, - 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, - 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, - 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, - 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, - 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, - 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, - 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, - 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, - 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, - 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, - 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, - 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, - 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, + 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, + 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, + 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, + 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, + 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, + 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, + 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, + 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, + 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, + 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, + 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, + 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, + 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, + 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, + 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, + 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, + 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, + 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, + 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, + 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, - 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, - 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, - 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, - 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, - 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, - 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, - 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, - 0x73, 0x74, 0x72, 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x24, 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, - 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, - 0x31, 0x30, 0x30, 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, - 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, - 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, - 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, - 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, - 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, - 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, - 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, - 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, - 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, - 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, - 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, - 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, - 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, - 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, - 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, - 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, + 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, + 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, + 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, + 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, + 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x7d, + 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, + 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, + 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, 0x29, + 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, + 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, + 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, + 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, + 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, + 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, + 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, + 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, + 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, + 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, + 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, + 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, + 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, + 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, + 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, + 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, + 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, - 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, - 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, - 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, - 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, - 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, - 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, - 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, - 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, - 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, - 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, - 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, - 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, - 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, - 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, - 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, - 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, - 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, - 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, - 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, - 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, - 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, - 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, - 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, - 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, - 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, - 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, - 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, - 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, + 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, + 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, + 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, + 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, + 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, + 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, + 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, + 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, + 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, + 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, + 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, + 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, + 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, + 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, + 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, + 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, + 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, + 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, + 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, + 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, + 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, + 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, + 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, + 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, + 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, - 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, - 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, - 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, - 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, - 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, - 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, - 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, - 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, - 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, - 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, - 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, - 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, - 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, - 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, - 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, - 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, - 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, - 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, - 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, - 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, - 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, - 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, - 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, - 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, - 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, - 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, - 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, - 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, - 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x6f, 0x70, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, + 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, + 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, + 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, + 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, + 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, + 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, + 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, + 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, + 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, + 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, + 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, + 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, + 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, + 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, + 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, + 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, + 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, + 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, + 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, + 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, + 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, + 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, + 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, + 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, + 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, + 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, + 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, + 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, - 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, - 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, + 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, + 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, - 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, - 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, - 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, - 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, - 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, - 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, - 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, - 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, - 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, - 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, - 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, - 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, - 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, - 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, - 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, - 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, - 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, - 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, - 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, - 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, - 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, - 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, - 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, - 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, - 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, - 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, - 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, - 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, - 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, - 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, - 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, + 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, + 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, + 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, + 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, + 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, + 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, + 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, + 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, - 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, - 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, - 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, + 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, + 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, + 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, + 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, + 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, + 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, + 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, + 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, + 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, + 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, + 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, + 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, + 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, + 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, + 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, + 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, + 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, + 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, + 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, + 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, + 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, + 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, + 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, + 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, + 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, + 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, + 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, + 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, + 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, + 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, + 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, - 0x27, 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, + 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, + 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, - 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, - 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, - 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, - 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, - 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, - 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, - 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, - 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, - 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, - 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, - 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, - 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, - 0x6c, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, - 0x74, 0x3d, 0x22, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, - 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, - 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, - 0x6d, 0x6c, 0x3e, 0x0a, 0x0a + 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, + 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, + 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, + 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, + 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, + 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, + 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, + 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, + 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, + 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, + 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, + 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, + 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, + 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, + 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, + 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, + 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, + 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, + 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, + 0x0a }; -unsigned int index_html_len = 32105; +unsigned int index_html_len = 32269; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 39d7bb93d..60659c147 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -219,6 +219,7 @@ repeat_penalty: 1.18, // 1.0 = disabled top_k: 40, // <= 0 to use vocab size top_p: 0.5, // 1.0 = disabled + min_p: 0.05, // 0 = disabled tfs_z: 1.0, // 1.0 = disabled typical_p: 1.0, // 1.0 = disabled presence_penalty: 0.0, // 0.0 = disabled @@ -744,6 +745,7 @@ ${IntField({ label: "Consider N tokens for penalize", max: 2048, min: 0, name: "repeat_last_n", value: params.value.repeat_last_n })} ${IntField({ label: "Top-K sampling", max: 100, min: -1, name: "top_k", value: params.value.top_k })} ${FloatField({ label: "Top-P sampling", max: 1.0, min: 0.0, name: "top_p", step: 0.01, value: params.value.top_p })} + ${FloatField({ label: "Min-P sampling", max: 1.0, min: 0.0, name: "min_p", step: 0.01, value: params.value.min_p })}

    More options diff --git a/examples/server/server.cpp b/examples/server/server.cpp index fd755327a..cbf36ad67 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -679,6 +679,7 @@ struct llama_server_context slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); + slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p); slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); @@ -1113,6 +1114,7 @@ struct llama_server_context {"temp", slot.sparams.temp}, {"top_k", slot.sparams.top_k}, {"top_p", slot.sparams.top_p}, + {"min_p", slot.sparams.min_p}, {"tfs_z", slot.sparams.tfs_z}, {"typical_p", slot.sparams.typical_p}, {"repeat_last_n", slot.sparams.penalty_last_n}, From a75fa576abba9d37f463580c379e4bbf1e1ad03c Mon Sep 17 00:00:00 2001 From: Galunid Date: Thu, 9 Nov 2023 11:09:29 +0100 Subject: [PATCH 101/859] scripts: Generalize convert scripts (#3838) * Replace convert-*-hf-to-gguf.py files with convert-hf-to-gguf.py --- convert-bloom-hf-to-gguf.py | 247 --------- convert-falcon-hf-to-gguf.py | 253 --------- convert-gptneox-hf-to-gguf.py | 221 -------- convert-hf-to-gguf.py | 890 ++++++++++++++++++++++++++++++++ convert-mpt-hf-to-gguf.py | 227 -------- convert-refact-hf-to-gguf.py | 272 ---------- convert-starcoder-hf-to-gguf.py | 210 -------- convert.py | 4 +- mypy.ini | 1 + 9 files changed, 893 insertions(+), 1432 deletions(-) delete mode 100755 convert-bloom-hf-to-gguf.py delete mode 100755 convert-falcon-hf-to-gguf.py delete mode 100755 convert-gptneox-hf-to-gguf.py create mode 100755 convert-hf-to-gguf.py delete mode 100755 convert-mpt-hf-to-gguf.py delete mode 100755 convert-refact-hf-to-gguf.py delete mode 100755 convert-starcoder-hf-to-gguf.py diff --git a/convert-bloom-hf-to-gguf.py b/convert-bloom-hf-to-gguf.py deleted file mode 100755 index 6e866d943..000000000 --- a/convert-bloom-hf-to-gguf.py +++ /dev/null @@ -1,247 +0,0 @@ -#!/usr/bin/env python3 -# HF bloom --> gguf conversion - -from __future__ import annotations - -import argparse -import json -import os -import re -import struct -import sys -from pathlib import Path -from typing import Any - -import numpy as np -import torch -from transformers import AutoTokenizer # type: ignore[import] - -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) -import gguf - - -def count_model_parts(dir_model: Path) -> int: - num_parts = 0 - for filename in os.listdir(dir_model): - if filename.startswith("pytorch_model-"): - num_parts += 1 - - if num_parts > 0: - print("gguf: found " + str(num_parts) + " model parts") - return num_parts - - -# Supported Models: -# https://huggingface.co/bigscience/bloom-1b7 -# https://huggingface.co/bigscience/bloom-3b -# https://huggingface.co/bigscience/bloom-7b1 -# https://huggingface.co/Langboat/bloom-1b4-zh -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert a Bloom model to a GGML compatible file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.bin)") - parser.add_argument("ftype", type=int, help="output format - use 0 for float32, 1 for float16", choices=[0, 1], default = 1) - return parser.parse_args() - -args = parse_args() - -dir_model = args.model -ftype = args.ftype -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file = sys.stderr) - sys.exit(1) - -# possible tensor data types -# ftype == 0 -> float32 -# ftype == 1 -> float16 - -# map from ftype to string -ftype_str = ["f32", "f16"] - -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{ftype_str[ftype]}.gguf' - -print("gguf: loading model "+dir_model.name) - -with open(dir_model / "config.json", "r", encoding="utf-8") as f: - hparams = json.load(f) - -if hparams["architectures"][0] != "BloomForCausalLM": - print("Model architecture not supported: " + hparams["architectures"][0]) - sys.exit(1) - -# get number of model parts -num_parts = count_model_parts(dir_model) - -ARCH=gguf.MODEL_ARCH.BLOOM -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - -print("gguf: get model metadata") - -block_count = hparams["n_layer"] - -gguf_writer.add_name("Bloom") -n_embed = hparams.get("hidden_size", hparams.get("n_embed")) -n_head = hparams.get("n_head", hparams.get("num_attention_heads")) -gguf_writer.add_context_length(hparams.get("seq_length", n_embed)) -gguf_writer.add_embedding_length(n_embed) -gguf_writer.add_feed_forward_length(4 * n_embed) -gguf_writer.add_block_count(block_count) -gguf_writer.add_head_count(n_head) -gguf_writer.add_head_count_kv(n_head) -gguf_writer.add_layer_norm_eps(hparams["layer_norm_epsilon"]) -gguf_writer.add_file_type(ftype) - -# TOKENIZATION - -print("gguf: get tokenizer metadata") - -tokens: list[bytearray] = [] -scores: list[float] = [] -toktypes: list[int] = [] - -# gpt2 tokenizer -gguf_writer.add_tokenizer_model("gpt2") - -print("gguf: get gpt2 tokenizer vocab") - -# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py -tokenizer = AutoTokenizer.from_pretrained(dir_model) - -# The number of tokens in tokenizer.json can differ from the expected vocab size. -# This causes downstream issues with mismatched tensor sizes when running the inference -vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) -assert max(tokenizer.vocab.values()) < vocab_size - -added_vocab = tokenizer.get_added_vocab() -reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} - -for i in range(vocab_size): - if i not in reverse_vocab: - tokens.append(f"[PAD{i}]") - toktypes.append(gguf.TokenType.USER_DEFINED) - elif reverse_vocab[i] in added_vocab: - tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) - else: - tokens.append(reverse_vocab[i]) - toktypes.append(gguf.TokenType.NORMAL) - -gguf_writer.add_token_list(tokens) -gguf_writer.add_token_types(toktypes) - -special_vocab = gguf.SpecialVocab(dir_model, load_merges=True, n_vocab = len(tokens)) -special_vocab.add_to_gguf(gguf_writer) - -# TENSORS - -tensor_map = gguf.get_tensor_name_map(ARCH, block_count) - -# params for qkv transform -n_head_kv = hparams.get("n_head_kv", n_head) -head_dim = n_embed // n_head - -# tensor info -print("gguf: get tensor metadata") - -if num_parts == 0: - part_names = iter(("pytorch_model.bin",)) -else: - part_names = ( - f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) - ) - -for part_name in part_names: - if args.vocab_only: - break - print("gguf: loading model part '" + part_name + "'") - model_part = torch.load(dir_model / part_name, map_location="cpu") - - has_lm_head = True - if "lm_head.weight" not in model_part.keys() and "output.weight" not in model_part.keys(): - has_lm_head = False - - for original_name in model_part.keys(): - data = model_part[original_name] - name = re.sub(r'transformer\.', '', original_name) - - old_dtype = data.dtype - - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) - - data = data.squeeze().numpy() - - if re.match(r"h\.\d+\.self_attention\.query_key_value\.weight", name): - # Map bloom-style qkv_linear to gpt-style qkv_linear - # bloom: https://github.com/huggingface/transformers/blob/main/src/transformers/models/bloom/modeling_bloom.py#L238-L252 # noqa - # gpt-2: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py#L312 # noqa - qkv_weights = data.reshape((n_head, 3, n_embed // n_head, n_embed)) - data = np.concatenate( - (qkv_weights[:, 0, :, :].reshape((-1, n_embed)), - qkv_weights[:, 1, :, :].reshape((-1, n_embed)), - qkv_weights[:, 2, :, :].reshape((-1, n_embed))), - axis=0 - ) - print("re-format attention.linear_qkv.weight") - elif re.match(r"h\.\d+\.self_attention\.query_key_value\.bias", name): - qkv_bias = data.reshape((n_head, 3, n_embed // n_head)) - data = np.concatenate( - (qkv_bias[:, 0, :].reshape((n_embed,)), - qkv_bias[:, 1, :].reshape((n_embed,)), - qkv_bias[:, 2, :].reshape((n_embed,))), - axis=0 - ) - print("re-format attention.linear_qkv.bias") - - # map tensor names - new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) - if new_name is None: - print("Can not map tensor '" + name + "'") - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data_dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - - print(name, "=>", new_name + ", shape = " + str(data.shape) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - - gguf_writer.add_tensor(new_name, data) - - if not has_lm_head and name == "word_embeddings.weight": - gguf_writer.add_tensor("output.weight", data) - print(name, "=>", "output.weight" + ", shape = " + str(data.shape) + ", " + str(old_dtype) + " --> " + str(data.dtype)) # noqa - - -print("gguf: write header") -gguf_writer.write_header_to_file() -print("gguf: write metadata") -gguf_writer.write_kv_data_to_file() -if not args.vocab_only: - print("gguf: write tensors") - gguf_writer.write_tensors_to_file() - -gguf_writer.close() - -print(f"gguf: model successfully exported to '{fname_out}'") -print("") diff --git a/convert-falcon-hf-to-gguf.py b/convert-falcon-hf-to-gguf.py deleted file mode 100755 index 8e8f3c3f8..000000000 --- a/convert-falcon-hf-to-gguf.py +++ /dev/null @@ -1,253 +0,0 @@ -#!/usr/bin/env python3 -# HF falcon--> gguf conversion - -from __future__ import annotations - -import argparse -import contextlib -import json -import os -import struct -import sys -from pathlib import Path -from typing import Any - -import numpy as np -import torch -from transformers import AutoTokenizer # type: ignore[import] - -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) -import gguf - - -def count_model_parts(dir_model: Path, prefix: str) -> int: - num_parts = 0 - for filename in os.listdir(dir_model): - if filename.startswith(prefix): - num_parts += 1 - - if num_parts > 0: - print("gguf: found " + str(num_parts) + " model parts") - return num_parts - - -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert a Falcon model to a GGML compatible file") - parser.add_argument( - "--vocab-only", action="store_true", - help="extract only the vocab", - ) - parser.add_argument( - "--outfile", type=Path, - help="path to write to; default: based on input", - ) - parser.add_argument( - "model", type=Path, - help="directory containing model file, or model file itself (*.bin)", - ) - parser.add_argument( - "ftype", type=int, choices=[0, 1], default=1, nargs='?', - help="output format - use 0 for float32, 1 for float16", - ) - return parser.parse_args() - -args = parse_args() - -dir_model = args.model -ftype = args.ftype -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file = sys.stderr) - sys.exit(1) - -# possible tensor data types -# ftype == 0 -> float32 -# ftype == 1 -> float16 - -# map from ftype to string -ftype_str = ["f32", "f16"] - -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{ftype_str[ftype]}.gguf' - -print("gguf: loading model "+dir_model.name) - -with open(dir_model / "config.json", "r", encoding="utf-8") as f: - hparams = json.load(f) - -if hparams["architectures"][0] not in ("RWForCausalLM", "FalconForCausalLM"): - print("Model architecture not supported: " + hparams["architectures"][0]) - - sys.exit(1) - -# get number of model parts -num_parts = count_model_parts(dir_model, "model-00") -if num_parts: - is_safetensors = True - from safetensors import safe_open -else: - is_safetensors = False - num_parts = count_model_parts(dir_model, "pytorch_model-") - -ARCH=gguf.MODEL_ARCH.FALCON -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - -print("gguf: get model metadata") - -block_count = hparams.get("num_hidden_layers") -if block_count is None: - block_count = hparams["n_layer"] # old name - -n_head = hparams.get("num_attention_heads") -if n_head is None: - n_head = hparams["n_head"] # old name - -n_head_kv = hparams.get("num_kv_heads") -if n_head_kv is None: - n_head_kv = hparams.get("n_head_kv", 1) # old name - -gguf_writer.add_name("Falcon") -gguf_writer.add_context_length(2048) # not in config.json -gguf_writer.add_tensor_data_layout("jploski") # qkv tensor transform -gguf_writer.add_embedding_length(hparams["hidden_size"]) -gguf_writer.add_feed_forward_length(4 * hparams["hidden_size"]) -gguf_writer.add_block_count(block_count) -gguf_writer.add_head_count(n_head) -gguf_writer.add_head_count_kv(n_head_kv) -gguf_writer.add_layer_norm_eps(hparams["layer_norm_epsilon"]) -gguf_writer.add_file_type(ftype) - -# TOKENIZATION - -print("gguf: get tokenizer metadata") - -tokens: list[bytearray] = [] -scores: list[float] = [] -toktypes: list[int] = [] - -# gpt2 tokenizer -gguf_writer.add_tokenizer_model("gpt2") - -print("gguf: get gpt2 tokenizer vocab") - -# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py -tokenizer = AutoTokenizer.from_pretrained(dir_model) - -# The number of tokens in tokenizer.json can differ from the expected vocab size. -# This causes downstream issues with mismatched tensor sizes when running the inference -vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) -assert max(tokenizer.vocab.values()) < vocab_size - -reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} - -for i in range(vocab_size): - tokens.append(reverse_vocab[i]) - scores.append(0.0) # dummy - toktypes.append(gguf.TokenType.NORMAL) - -gguf_writer.add_token_list(tokens) -gguf_writer.add_token_scores(scores) -gguf_writer.add_token_types(toktypes) - -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) -special_vocab.add_to_gguf(gguf_writer) - -# TENSORS - -tensor_map = gguf.get_tensor_name_map(ARCH,block_count) - -head_dim = hparams["hidden_size"] // n_head - -# tensor info -print("gguf: get tensor metadata") - -if num_parts == 0: - part_names = iter(("pytorch_model.bin",)) -elif is_safetensors: - part_names = ( - f"model-{n:05}-of-{num_parts:05}.safetensors" for n in range(1, num_parts + 1) - ) -else: - part_names = ( - f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) - ) - -for part_name in part_names: - if args.vocab_only: - break - print("gguf: loading model part '" + part_name + "'") - if is_safetensors: - ctx = safe_open(dir_model / part_name, framework="pt", device="cpu") - else: - ctx = contextlib.nullcontext(torch.load(dir_model / part_name, map_location="cpu")) - - with ctx as model_part: - for name in model_part.keys(): - data = model_part.get_tensor(name) if is_safetensors else model_part[name] - - old_dtype = data.dtype - - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) - - # QKV tensor transform - # The original query_key_value tensor contains n_head_kv "kv groups", - # each consisting of n_head/n_head_kv query weights followed by one key - # and one value weight (shared by all query heads in the kv group). - # This layout makes it a big pain to work with in GGML. - # So we rearrange them here,, so that we have n_head query weights - # followed by n_head_kv key weights followed by n_head_kv value weights, - # in contiguous fashion. - # ref: https://github.com/jploski/ggml/blob/falcon40b/examples/falcon/convert-hf-to-ggml.py - - if "query_key_value" in name: - qkv = data.view(n_head_kv, n_head // n_head_kv + 2, head_dim, head_dim * n_head) - q = qkv[:, :-2 ].reshape(n_head * head_dim, head_dim * n_head) - k = qkv[:, [-2]].reshape(n_head_kv * head_dim, head_dim * n_head) - v = qkv[:, [-1]].reshape(n_head_kv * head_dim, head_dim * n_head) - data = torch.cat((q,k,v)).reshape_as(data) - - data = data.squeeze().numpy() - - # map tensor names - new_name = tensor_map.get_name(name, try_suffixes = (".weight", ".bias")) - if new_name is None: - print("Can not map tensor '" + name + "'") - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data_dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - - print(new_name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - - gguf_writer.add_tensor(new_name, data) - - -print("gguf: write header") -gguf_writer.write_header_to_file() -print("gguf: write metadata") -gguf_writer.write_kv_data_to_file() -if not args.vocab_only: - print("gguf: write tensors") - gguf_writer.write_tensors_to_file() - -gguf_writer.close() - -print(f"gguf: model successfully exported to '{fname_out}'") -print("") diff --git a/convert-gptneox-hf-to-gguf.py b/convert-gptneox-hf-to-gguf.py deleted file mode 100755 index 02d1fdf16..000000000 --- a/convert-gptneox-hf-to-gguf.py +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env python3 -# HF gptneox--> gguf conversion - -from __future__ import annotations - -import argparse -import json -import os -import struct -import sys -from pathlib import Path -from typing import Any - -import numpy as np -import torch -from transformers import AutoTokenizer # type: ignore[import] - -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) -import gguf - - -def count_model_parts(dir_model: Path) -> int: - num_parts = 0 - for filename in os.listdir(dir_model): - if filename.startswith("pytorch_model-"): - num_parts += 1 - - if num_parts > 0: - print("gguf: found " + str(num_parts) + " model parts") - return num_parts - - -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert a GPT-NeoX model to a GGML compatible file") - parser.add_argument( - "--vocab-only", action="store_true", - help="extract only the vocab", - ) - parser.add_argument( - "--outfile", type=Path, - help="path to write to; default: based on input", - ) - parser.add_argument( - "model", type=Path, - help="directory containing model file, or model file itself (*.bin)", - ) - parser.add_argument( - "ftype", type=int, choices=[0, 1], default=1, nargs='?', - help="output format - use 0 for float32, 1 for float16", - ) - return parser.parse_args() - -args = parse_args() - -dir_model = args.model -ftype = args.ftype -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file = sys.stderr) - sys.exit(1) - -# possible tensor data types -# ftype == 0 -> float32 -# ftype == 1 -> float16 - -# map from ftype to string -ftype_str = ["f32", "f16"] - -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{ftype_str[ftype]}.gguf' - -print("gguf: loading model "+dir_model.name) - -with open(dir_model / "config.json", "r", encoding="utf-8") as f: - hparams = json.load(f) - -if hparams["architectures"][0] != "GPTNeoXForCausalLM": - print("Model architecture not supported: " + hparams["architectures"][0]) - - sys.exit() - -# get number of model parts -num_parts = count_model_parts(dir_model) - -ARCH=gguf.MODEL_ARCH.GPTNEOX -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - -print("gguf: get model metadata") - -block_count = hparams["num_hidden_layers"] - -gguf_writer.add_name(dir_model.name) -gguf_writer.add_context_length(hparams["max_position_embeddings"]) -gguf_writer.add_embedding_length(hparams["hidden_size"]) -gguf_writer.add_block_count(block_count) -gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) -gguf_writer.add_rope_dimension_count(int(hparams["rotary_pct"]*(hparams["hidden_size"]//hparams["num_attention_heads"]))) -gguf_writer.add_head_count(hparams["num_attention_heads"]) -gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) -gguf_writer.add_layer_norm_eps(hparams["layer_norm_eps"]) - -# TOKENIZATION - -print("gguf: get tokenizer metadata") - -tokens: list[bytearray] = [] -scores: list[float] = [] -toktypes: list[int] = [] - -# gpt2 tokenizer -gguf_writer.add_tokenizer_model("gpt2") - -print("gguf: get gpt2 tokenizer vocab") - -# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py -tokenizer = AutoTokenizer.from_pretrained(dir_model) - -# The number of tokens in tokenizer.json can differ from the expected vocab size. -# This causes downstream issues with mismatched tensor sizes when running the inference -vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) -assert max(tokenizer.vocab.values()) < vocab_size - -added_vocab = tokenizer.get_added_vocab() -reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} - -for i in range(vocab_size): - if i not in reverse_vocab: - tokens.append(f"[PAD{i}]") - toktypes.append(gguf.TokenType.USER_DEFINED) - elif reverse_vocab[i] in added_vocab: - tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) - else: - tokens.append(reverse_vocab[i]) - toktypes.append(gguf.TokenType.NORMAL) - -gguf_writer.add_token_list(tokens) -gguf_writer.add_token_types(toktypes) - -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) -special_vocab.add_to_gguf(gguf_writer) - -# TENSORS - -tensor_map = gguf.get_tensor_name_map(ARCH,block_count) - -# tensor info -print("gguf: get tensor metadata") - -if num_parts == 0: - part_names = iter(("pytorch_model.bin",)) -else: - part_names = ( - f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) - ) - -for part_name in part_names: - if args.vocab_only: - break - print("gguf: loading model part '" + part_name + "'") - model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") - - for name in model_part.keys(): - data = model_part[name] - - # we don't need these - if name.endswith(".attention.masked_bias") or name.endswith(".attention.bias") or name.endswith(".attention.rotary_emb.inv_freq"): - continue - - old_dtype = data.dtype - - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) - - data = data.squeeze().numpy() - - # map tensor names - new_name = tensor_map.get_name(name, try_suffixes = (".weight", ".bias")) - if new_name is None: - print("Can not map tensor '" + name + "'") - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data_dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - - print(new_name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - - gguf_writer.add_tensor(new_name, data) - - -print("gguf: write header") -gguf_writer.write_header_to_file() -print("gguf: write metadata") -gguf_writer.write_kv_data_to_file() -if not args.vocab_only: - print("gguf: write tensors") - gguf_writer.write_tensors_to_file() - -gguf_writer.close() - -print(f"gguf: model successfully exported to '{fname_out}'") -print("") diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py new file mode 100755 index 000000000..f7fe29fd4 --- /dev/null +++ b/convert-hf-to-gguf.py @@ -0,0 +1,890 @@ +#!/usr/bin/env python3 + +from __future__ import annotations + +import argparse +import contextlib +import json +import os +import re +import sys +from enum import IntEnum +from pathlib import Path +from typing import TYPE_CHECKING, Any, ContextManager, Iterator, cast + +import numpy as np +import torch + +if TYPE_CHECKING: + from torch import Tensor + +if 'NO_LOCAL_GGUF' not in os.environ: + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) +import gguf + + +###### MODEL DEFINITIONS ###### + +class SentencePieceTokenTypes(IntEnum): + NORMAL = 1 + UNKNOWN = 2 + CONTROL = 3 + USER_DEFINED = 4 + UNUSED = 5 + BYTE = 6 + + +class Model: + def __init__(self, dir_model: Path, ftype: int, fname_out: Path, is_big_endian: bool): + self.dir_model = dir_model + self.ftype = ftype + self.fname_out = fname_out + self.is_big_endian = is_big_endian + self.endianess = gguf.GGUFEndian.BIG if is_big_endian else gguf.GGUFEndian.LITTLE + self.is_safetensors = self._is_model_safetensors() + self.num_parts = Model.count_model_parts(self.dir_model, ".safetensors" if self.is_safetensors else ".bin") + self.part_names = self._get_part_names() + self.hparams = Model.load_hparams(self.dir_model) + self.model_arch = self._get_model_architecture() + self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess) + + def set_vocab(self): + self._set_vocab_gpt2() + + def get_tensors(self) -> Iterator[tuple[str, Tensor]]: + for part_name in self.part_names: + print(f"gguf: loading model part '{part_name}'") + ctx: ContextManager[Any] + if self.is_safetensors: + from safetensors import safe_open + ctx = cast(ContextManager[Any], safe_open(self.dir_model / part_name, framework="pt", device="cpu")) + else: + ctx = contextlib.nullcontext(torch.load(self.dir_model / part_name, map_location="cpu")) + + with ctx as model_part: + for name in model_part.keys(): + data = model_part.get_tensor(name) if self.is_safetensors else model_part[name] + yield name, data + + def set_gguf_parameters(self): + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_block_count(self.hparams.get( + "n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer")), + )) + if (n_ctx := self.hparams.get("max_position_embeddings")) is not None: + self.gguf_writer.add_context_length(n_ctx) + if (n_embd := self.hparams.get("hidden_size")) is not None: + self.gguf_writer.add_embedding_length(n_embd) + if (n_ff := self.hparams.get("intermediate_size")) is not None: + self.gguf_writer.add_feed_forward_length(n_ff) + if (n_head := self.hparams.get("num_attention_head")) is not None: + self.gguf_writer.add_head_count(n_head) + self.gguf_writer.add_parallel_residual(self.hparams.get("use_parallel_residual", True)) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + for name, data_torch in self.get_tensors(): + # we don't need these + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq")): + continue + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + def write(self): + self.write_tensors() + self.gguf_writer.write_header_to_file() + self.gguf_writer.write_kv_data_to_file() + self.gguf_writer.write_tensors_to_file() + self.gguf_writer.close() + + def write_vocab(self): + self.gguf_writer.write_header_to_file() + self.gguf_writer.write_kv_data_to_file() + self.gguf_writer.close() + + @staticmethod + def count_model_parts(dir_model: Path, prefix: str) -> int: + num_parts = 0 + for filename in os.listdir(dir_model): + if filename.endswith(prefix): + num_parts += 1 + + return num_parts + + @staticmethod + def load_hparams(dir_model): + with open(dir_model / "config.json", "r", encoding="utf-8") as f: + return json.load(f) + + @staticmethod + def from_model_architecture(model_architecture): + if model_architecture == "StableLMEpochForCausalLM": + return StableLMModel + if model_architecture == "GPTNeoXForCausalLM": + return GPTNeoXModel + if model_architecture == "BloomForCausalLM": + return BloomModel + if model_architecture == "MPTForCausalLM": + return MPTModel + if model_architecture in ("BaichuanForCausalLM", "BaiChuanForCausalLM"): + return BaichuanModel + if model_architecture in ("FalconForCausalLM", "RWForCausalLM"): + return FalconModel + if model_architecture == "GPTBigCodeForCausalLM": + return StarCoderModel + if model_architecture == "GPTRefactForCausalLM": + return RefactModel + if model_architecture == "PersimmonForCausalLM": + return PersimmonModel + return Model + + def _is_model_safetensors(self) -> bool: + return Model.count_model_parts(self.dir_model, ".safetensors") > 0 + + def _get_part_names(self): + if self.is_safetensors: + if self.num_parts == 1: # there's only one .safetensors file + return ("model.safetensors",) + return (f"model-{n:05}-of-{self.num_parts:05}.safetensors" for n in range(1, self.num_parts + 1)) + + if self.num_parts == 1: # there's only one .bin file + return ("pytorch_model.bin",) + return (f"pytorch_model-{n:05}-of-{self.num_parts:05}.bin" for n in range(1, self.num_parts + 1)) + + def _get_model_architecture(self) -> gguf.MODEL_ARCH: + arch = self.hparams["architectures"][0] + if arch == "GPTNeoXForCausalLM": + return gguf.MODEL_ARCH.GPTNEOX + if arch == "BloomForCausalLM": + return gguf.MODEL_ARCH.BLOOM + if arch == "MPTForCausalLM": + return gguf.MODEL_ARCH.MPT + if arch in ("BaichuanForCausalLM", "BaiChuanForCausalLM"): + return gguf.MODEL_ARCH.BAICHUAN + if arch == "FalconForCausalLM": + return gguf.MODEL_ARCH.FALCON + if arch == "GPTBigCodeForCausalLM": + return gguf.MODEL_ARCH.STARCODER + if arch == "GPTRefactForCausalLM": + return gguf.MODEL_ARCH.REFACT + if arch == "PersimmonForCausalLM": + return gguf.MODEL_ARCH.PERSIMMON + + raise NotImplementedError(f'Architecture "{arch}" not supported!') + + def _set_vocab_gpt2(self): + dir_model = self.dir_model + hparams = self.hparams + tokens: list[bytearray] = [] + toktypes: list[int] = [] + + from transformers import AutoTokenizer # type: ignore[attr-defined] + tokenizer = AutoTokenizer.from_pretrained(dir_model) + vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) + assert max(tokenizer.vocab.values()) < vocab_size + + reverse_vocab = {id_: encoded_tok for encoded_tok, id_ in tokenizer.vocab.items()} + added_vocab = tokenizer.get_added_vocab() + + for i in range(vocab_size): + if i not in reverse_vocab: + pad_token = f"[PAD{i}]".encode('utf-8') + tokens.append(bytearray(pad_token)) + toktypes.append(gguf.TokenType.USER_DEFINED) + elif reverse_vocab[i] in added_vocab: + tokens.append(reverse_vocab[i]) + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) + else: + tokens.append(reverse_vocab[i]) + toktypes.append(gguf.TokenType.NORMAL) + + self.gguf_writer.add_tokenizer_model("gpt2") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_types(toktypes) + + special_vocab = gguf.SpecialVocab(dir_model, load_merges=True) + special_vocab.add_to_gguf(self.gguf_writer) + + def _set_vocab_sentencepiece(self): + from sentencepiece import SentencePieceProcessor + + tokenizer_path = self.dir_model / 'tokenizer.model' + + tokens: list[bytes] = [] + scores: list[float] = [] + toktypes: list[int] = [] + + if not tokenizer_path.is_file(): + print(f'Error: Missing {tokenizer_path}', file=sys.stderr) + sys.exit(1) + + tokenizer = SentencePieceProcessor(str(tokenizer_path)) + vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size()) + + for token_id in range(vocab_size): + piece = tokenizer.id_to_piece(token_id) + text = piece.encode("utf-8") + score = tokenizer.get_score(token_id) + + toktype = SentencePieceTokenTypes.NORMAL + if tokenizer.is_unknown(token_id): + toktype = SentencePieceTokenTypes.UNKNOWN + elif tokenizer.is_control(token_id): + toktype = SentencePieceTokenTypes.CONTROL + elif tokenizer.is_unused(token_id): + toktype = SentencePieceTokenTypes.UNUSED + elif tokenizer.is_byte(token_id): + toktype = SentencePieceTokenTypes.BYTE + + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + added_tokens_file = self.dir_model / 'added_tokens.json' + if added_tokens_file.is_file(): + with open(added_tokens_file, "r", encoding="utf-8") as f: + added_tokens_json = json.load(f) + + for key in added_tokens_json: + tokens.append(key.encode("utf-8")) + scores.append(-1000.0) + toktypes.append(SentencePieceTokenTypes.USER_DEFINED) + + self.gguf_writer.add_tokenizer_model("llama") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + + +class StableLMModel(Model): + def set_gguf_parameters(self): + super().set_gguf_parameters() + self.gguf_writer.add_rope_dimension_count( + int(self.hparams["rope_pct"] * (self.hparams["hidden_size"] // self.hparams["num_attention_heads"])), + ) + self.gguf_writer.add_layer_norm_eps(1e-5) + + +class GPTNeoXModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams["num_hidden_layers"] + + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_dimension_count( + int(self.hparams["rotary_pct"] * (self.hparams["hidden_size"] // self.hparams["num_attention_heads"])), + ) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_parallel_residual(self.hparams.get("use_parallel_residual", True)) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) + + +class BloomModel(Model): + def set_gguf_parameters(self): + self.gguf_writer.add_name("Bloom") + n_embed = self.hparams.get("hidden_size", self.hparams.get("n_embed")) + n_head = self.hparams.get("n_head", self.hparams.get("num_attention_heads")) + self.gguf_writer.add_context_length(self.hparams.get("seq_length", n_embed)) + self.gguf_writer.add_embedding_length(n_embed) + self.gguf_writer.add_feed_forward_length(4 * n_embed) + self.gguf_writer.add_block_count(self.hparams["n_layer"]) + self.gguf_writer.add_head_count(n_head) + self.gguf_writer.add_head_count_kv(n_head) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_file_type(self.ftype) + + def write_tensors(self): + block_count = self.hparams["n_layer"] + tensors = dict(self.get_tensors()) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + has_lm_head = True + n_head = self.hparams.get("n_head", self.hparams.get("num_attention_heads")) + n_embed = self.hparams.get("hidden_size", self.hparams.get("n_embed")) + + for name, data_torch in tensors.items(): + if "lm_head.weight" not in tensors.keys() and "output.weight" not in tensors.keys(): + has_lm_head = False + + name = re.sub(r'transformer\.', '', name) + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + if re.match(r"h\.\d+\.self_attention\.query_key_value\.weight", name): + # Map bloom-style qkv_linear to gpt-style qkv_linear + # bloom: https://github.com/huggingface/transformers/blob/main/src/transformers/models/bloom/modeling_bloom.py#L238-L252 # noqa + # gpt-2: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py#L312 # noqa + qkv_weights = data.reshape((n_head, 3, n_embed // n_head, n_embed)) + data = np.concatenate( + ( + qkv_weights[:, 0, :, :].reshape((-1, n_embed)), + qkv_weights[:, 1, :, :].reshape((-1, n_embed)), + qkv_weights[:, 2, :, :].reshape((-1, n_embed)), + ), + axis=0, + ) + print("re-format attention.linear_qkv.weight") + elif re.match(r"h\.\d+\.self_attention\.query_key_value\.bias", name): + qkv_bias = data.reshape((n_head, 3, n_embed // n_head)) + data = np.concatenate( + ( + qkv_bias[:, 0, :].reshape((n_embed,)), + qkv_bias[:, 1, :].reshape((n_embed,)), + qkv_bias[:, 2, :].reshape((n_embed,)), + ), + axis=0, + ) + print("re-format attention.linear_qkv.bias") + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"=> {new_name}, shape = {data.shape}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + if not has_lm_head and name == "word_embeddings.weight": + self.gguf_writer.add_tensor("output.weight", data) + print(name, f"=> output.weight, shape = {data.shape}, {old_dtype} --> {data.dtype}") + + +class MPTModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams["n_layers"] + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(self.hparams["max_seq_len"]) + self.gguf_writer.add_embedding_length(self.hparams["d_model"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["d_model"]) + self.gguf_writer.add_head_count(self.hparams["n_heads"]) + if kv_n_heads := self.hparams["attn_config"].get("kv_n_heads"): + self.gguf_writer.add_head_count_kv(kv_n_heads) + self.gguf_writer.add_layer_norm_eps(1e-5) + if self.hparams["attn_config"]["clip_qkv"] is not None: + self.gguf_writer.add_clamp_kqv(self.hparams["attn_config"]["clip_qkv"]) + self.gguf_writer.add_max_alibi_bias(self.hparams["attn_config"]["alibi_bias_max"]) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers")) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + for name, data_torch in self.get_tensors(): + # we don't need these + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq")): + continue + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + # note: MPT output is tied to (same as) wte in original model; + # for easier implementation in llama.cpp it's duplicated in GGUF, though :/ + if new_name == "token_embd.weight": + self.gguf_writer.add_tensor("output.weight", data) + + +class BaichuanModel(Model): + def set_vocab(self): + self._set_vocab_sentencepiece() + + def set_gguf_parameters(self): + block_count = self.hparams["num_hidden_layers"] + head_count = self.hparams["num_attention_heads"] + head_count_kv = self.hparams.get("num_key_value_heads", head_count) + hf_repo = self.hparams.get("_name_or_path", "") + + ctx_length = 0 + if "max_sequence_length" in self.hparams: + ctx_length = self.hparams["max_sequence_length"] + elif "max_position_embeddings" in self.hparams: + ctx_length = self.hparams["max_position_embeddings"] + elif "model_max_length" in self.hparams: + ctx_length = self.hparams["model_max_length"] + else: + print("gguf: can not find ctx length parameter.") + sys.exit() + + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_source_hf_repo(hf_repo) + self.gguf_writer.add_tensor_data_layout("Meta AI original pth") + self.gguf_writer.add_context_length(ctx_length) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) + self.gguf_writer.add_head_count(head_count) + self.gguf_writer.add_head_count_kv(head_count_kv) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + + if self.hparams.get("rope_scaling") is not None and "factor" in self.hparams["rope_scaling"]: + if self.hparams["rope_scaling"].get("type") == "linear": + self.gguf_writer.add_rope_scaling_type(gguf.RopeScalingType.LINEAR) + self.gguf_writer.add_rope_scaling_factor(self.hparams["rope_scaling"]["factor"]) + + def write_tensors(self): + # Collect tensors from generator object + model_kv = dict(self.get_tensors()) + block_count = self.hparams["num_hidden_layers"] + head_count = self.hparams["num_attention_heads"] + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + head_count_kv = self.hparams.get("num_key_value_heads", head_count) + + for i in range(block_count): + if (w := model_kv.get(f"model.layers.{i}.self_attn.W_pack.weight")) is not None: + print(f"Unpacking and permuting layer {i}") + model_kv[f"model.layers.{i}.self_attn.q_proj.weight"] = \ + self._reverse_hf_permute_part(w, 0, head_count, head_count) + model_kv[f"model.layers.{i}.self_attn.k_proj.weight"] = \ + self._reverse_hf_permute_part(w, 1, head_count, head_count_kv) + model_kv[f"model.layers.{i}.self_attn.v_proj.weight"] = \ + self._reverse_hf_part(w, 2) + del model_kv[f"model.layers.{i}.self_attn.W_pack.weight"] + + for name, data_torch in model_kv.items(): + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{name} -> {new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor(new_name, data) + + def _reverse_hf_permute(self, weights: Tensor, n_head: int, n_kv_head: int | None = None) -> Tensor: + if n_kv_head is not None and n_head != n_kv_head: + n_head //= n_kv_head + + return ( + weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) + .swapaxes(1, 2) + .reshape(weights.shape) + ) + + def _reverse_hf_permute_part( + self, weights: Tensor, n_part: int, n_head: int, n_head_kv: int | None = None, + ) -> Tensor: + r = weights.shape[0] // 3 + return self._reverse_hf_permute(weights[r * n_part:r * n_part + r, ...], n_head, n_head_kv) + + def _reverse_hf_part(self, weights: Tensor, n_part: int) -> Tensor: + r = weights.shape[0] // 3 + return weights[r * n_part:r * n_part + r, ...] + + +class FalconModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams.get("num_hidden_layers") + if block_count is None: + block_count = self.hparams["n_layer"] # old name + + n_head = self.hparams.get("num_attention_heads") + if n_head is None: + n_head = self.hparams["n_head"] # old name + + n_head_kv = self.hparams.get("num_kv_heads") + if n_head_kv is None: + n_head_kv = self.hparams.get("n_head_kv", 1) # old name + + self.gguf_writer.add_name("Falcon") + self.gguf_writer.add_context_length(2048) # not in config.json + self.gguf_writer.add_tensor_data_layout("jploski") # qkv tensor transform + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["hidden_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(n_head) + self.gguf_writer.add_head_count_kv(n_head_kv) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_file_type(self.ftype) + + def write_tensors(self): + block_count = self.hparams.get("num_hidden_layers") + if block_count is None: + block_count = self.hparams["n_layer"] # old name + + n_head = self.hparams.get("num_attention_heads") + if n_head is None: + n_head = self.hparams["n_head"] # old name + + n_head_kv = self.hparams.get("num_kv_heads") + if n_head_kv is None: + n_head_kv = self.hparams.get("n_head_kv", 1) # old name + + head_dim = self.hparams["hidden_size"] // n_head + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + # QKV tensor transform + # The original query_key_value tensor contains n_head_kv "kv groups", + # each consisting of n_head/n_head_kv query weights followed by one key + # and one value weight (shared by all query heads in the kv group). + # This layout makes it a big pain to work with in GGML. + # So we rearrange them here,, so that we have n_head query weights + # followed by n_head_kv key weights followed by n_head_kv value weights, + # in contiguous fashion. + # ref: https://github.com/jploski/ggml/blob/falcon40b/examples/falcon/convert-hf-to-ggml.py + + if "query_key_value" in name: + qkv = data_torch.view(n_head_kv, n_head // n_head_kv + 2, head_dim, head_dim * n_head) + q = qkv[:, :-2].reshape(n_head * head_dim, head_dim * n_head) + k = qkv[:, [-2]].reshape(n_head_kv * head_dim, head_dim * n_head) + v = qkv[:, [-1]].reshape(n_head_kv * head_dim, head_dim * n_head) + data_torch = torch.cat((q, k, v)).reshape_as(data_torch) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + +class StarCoderModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams["n_layer"] + + self.gguf_writer.add_name("StarCoder") + self.gguf_writer.add_context_length(self.hparams["n_positions"]) + self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(self.hparams["n_head"]) + self.gguf_writer.add_head_count_kv(1) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_file_type(self.ftype) + + +class RefactModel(Model): + def set_gguf_parameters(self): + hidden_dim = self.hparams["n_embd"] + inner_dim = 4 * hidden_dim + hidden_dim = int(2 * inner_dim / 3) + multiple_of = 256 + ff_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) + + block_count = self.hparams["n_layer"] + + self.gguf_writer.add_name("Refact") + # refact uses Alibi. So this is from config.json which might be used by training. + self.gguf_writer.add_context_length(self.hparams["n_positions"]) + self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) + + self.gguf_writer.add_feed_forward_length(ff_dim) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(self.hparams["n_head"]) + self.gguf_writer.add_head_count_kv(1) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_file_type(self.ftype) + + def write_tensors(self): + hidden_dim = self.hparams["n_embd"] + inner_dim = 4 * hidden_dim + hidden_dim = int(2 * inner_dim / 3) + multiple_of = 256 + ff_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) + n_head = self.hparams["n_head"] + n_head_kv = 1 + head_dim = self.hparams["n_embd"] // n_head + block_count = self.hparams["n_layer"] + + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + tensors = dict(self.get_tensors()) + for i in range(block_count): + if (w := tensors.get(f"transformer.h.{i}.attn.kv.weight")) is not None: + tensors[f"model.layers.{i}.self_attn.k_proj.weight"] = w[:n_head_kv * head_dim] + tensors[f"model.layers.{i}.self_attn.v_proj.weight"] = w[n_head_kv * head_dim:] + del tensors[f"transformer.h.{i}.attn.kv.weight"] + if (w := tensors.get(f"transformer.h.{i}.attn.q.weight")) is not None: + tensors[f"model.layers.{i}.self_attn.q_proj.weight"] = w + del tensors[f"transformer.h.{i}.attn.q.weight"] + if (w := tensors.get(f"transformer.h.{i}.mlp.gate_up_proj.weight")) is not None: + tensors[f"model.layers.{i}.mlp.gate_proj.weight"] = w[:ff_dim] + tensors[f"model.layers.{i}.mlp.up_proj.weight"] = w[ff_dim:] + del tensors[f"transformer.h.{i}.mlp.gate_up_proj.weight"] + + for name, data_torch in tensors.items(): + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight",)) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + +class PersimmonModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams.get("num_layers", self.hparams.get("num_hidden_layers")) + head_count = self.hparams["num_attention_heads"] + head_count_kv = head_count + hidden_size = self.hparams["hidden_size"] + + self.gguf_writer.add_name('persimmon-8b-chat') + self.gguf_writer.add_embedding_length(hidden_size) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + self.gguf_writer.add_head_count(head_count) + self.gguf_writer.add_head_count_kv(head_count_kv) + self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + + def set_vocab(self): + self._set_vocab_sentencepiece() + # self.gguf_writer.add_bos_token_id(71013) + # self.gguf_writer.add_eos_token_id(71013) + + def write_tensors(self): + block_count = self.hparams.get("num_layers", self.hparams.get("num_hidden_layers")) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + if name.endswith(".self_attention.rotary_emb.inv_freq"): + continue + old_dtype = data_torch.dtype + # TODO: FP16 conversion produces garbage outputs. (Q8_0 does not, so..?) + data = data_torch.to(torch.float32).squeeze().numpy() + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + n_dims = len(data.shape) + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor(new_name, data) + + +###### CONVERSION LOGIC ###### + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Convert a huggingface model to a GGML compatible file") + parser.add_argument( + "--vocab-only", action="store_true", + help="extract only the vocab", + ) + parser.add_argument( + "--outfile", type=Path, + help="path to write to; default: based on input", + ) + parser.add_argument( + "--outtype", type=str, choices=["f32", "f16"], default="f16", + help="output format - use f32 for float32, f16 for float16", + ) + parser.add_argument("--bigendian", action="store_true", help="model is executed on big endian machine") + parser.add_argument( + "model", type=Path, + help="directory containing model file", + ) + + return parser.parse_args() + + +args = parse_args() + +dir_model = args.model +if not dir_model.is_dir(): + print(f'Error: {args.model} is not a directory', file=sys.stderr) + sys.exit(1) + +ftype_map = { + "f32": gguf.GGMLQuantizationType.F32, + "f16": gguf.GGMLQuantizationType.F16, +} + +if args.outfile is not None: + fname_out = args.outfile +else: + # output in the same directory as the model by default + fname_out = dir_model / f'ggml-model-{args.outtype}.gguf' + +print(f"Loading model: {dir_model.name}") + +hparams = Model.load_hparams(dir_model) + +model_class = Model.from_model_architecture(hparams["architectures"][0]) +model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) + +print("Set model parameters") +model_instance.set_gguf_parameters() + +print("Set model tokenizer") +model_instance.set_vocab() + +if args.vocab_only: + print(f"Exporting model vocab to '{fname_out}'") + model_instance.write_vocab() +else: + print(f"Exporting model to '{fname_out}'") + model_instance.write() + +print(f"Model successfully exported to '{fname_out}'") diff --git a/convert-mpt-hf-to-gguf.py b/convert-mpt-hf-to-gguf.py deleted file mode 100755 index 70d154b3f..000000000 --- a/convert-mpt-hf-to-gguf.py +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env python3 -# HF mpt--> gguf conversion - -from __future__ import annotations - -import argparse -import json -import os -import struct -import sys -from pathlib import Path -from typing import Any - -import numpy as np -import torch -from transformers import AutoTokenizer # type: ignore[import] - -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) -import gguf - - -def count_model_parts(dir_model: Path) -> int: - num_parts = 0 - for filename in os.listdir(dir_model): - if filename.startswith("pytorch_model-"): - num_parts += 1 - - if num_parts > 0: - print("gguf: found " + str(num_parts) + " model parts") - return num_parts - - -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert an MPT model to a GGML compatible file") - parser.add_argument( - "--vocab-only", action="store_true", - help="extract only the vocab", - ) - parser.add_argument( - "--outfile", type=Path, - help="path to write to; default: based on input", - ) - parser.add_argument( - "model", type=Path, - help="directory containing model file, or model file itself (*.bin)", - ) - parser.add_argument( - "ftype", type=int, choices=[0, 1], default=1, nargs='?', - help="output format - use 0 for float32, 1 for float16", - ) - return parser.parse_args() - -args = parse_args() - -dir_model = args.model -ftype = args.ftype -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file = sys.stderr) - sys.exit(1) - -# possible tensor data types -# ftype == 0 -> float32 -# ftype == 1 -> float16 - -# map from ftype to string -ftype_str = ["f32", "f16"] - -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{ftype_str[ftype]}.gguf' - -print("gguf: loading model "+dir_model.name) - -with open(dir_model / "config.json", "r", encoding="utf-8") as f: - hparams = json.load(f) - -if hparams["architectures"][0] != "MPTForCausalLM": - print("Model architecture not supported: " + hparams["architectures"][0]) - - sys.exit() - -# get number of model parts -num_parts = count_model_parts(dir_model) - -ARCH=gguf.MODEL_ARCH.MPT -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - -print("gguf: get model metadata") - -block_count = hparams["n_layers"] - -gguf_writer.add_name(dir_model.name) -gguf_writer.add_context_length(hparams["max_seq_len"]) -gguf_writer.add_embedding_length(hparams["d_model"]) -gguf_writer.add_block_count(block_count) -gguf_writer.add_feed_forward_length(4 * hparams["d_model"]) -gguf_writer.add_head_count(hparams["n_heads"]) -if kv_n_heads := hparams["attn_config"].get("kv_n_heads"): - gguf_writer.add_head_count_kv(kv_n_heads) -gguf_writer.add_layer_norm_eps(1e-05) -if hparams["attn_config"]["clip_qkv"] is not None: - gguf_writer.add_clamp_kqv(hparams["attn_config"]["clip_qkv"]) -gguf_writer.add_max_alibi_bias(hparams["attn_config"]["alibi_bias_max"]) - -# TOKENIZATION - -print("gguf: get tokenizer metadata") - -tokens: list[bytearray] = [] -scores: list[float] = [] -toktypes: list[int] = [] - -# gpt2 tokenizer -gguf_writer.add_tokenizer_model("gpt2") - -print("gguf: get gpt2 tokenizer vocab") - -# MPT token embedding tensors have dimension 50432 (hparams["vocab_size"]), but -# there are only 50254 (len(tokenizer.vocab)) tokens in the vocab, presumably to -# accomodate some "reserved" tokens; this is causing problems down the line in -# llama.cpp, so we pad the vocab with dummy tokens: - -vocab_size = hparams["vocab_size"] - -# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py -tokenizer = AutoTokenizer.from_pretrained(dir_model) - -added_vocab = tokenizer.get_added_vocab() -reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} - -for i in range(vocab_size): - if i not in reverse_vocab: - tokens.append(f"[PAD{i}]") - toktypes.append(gguf.TokenType.USER_DEFINED) - elif reverse_vocab[i] in added_vocab: - tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) - else: - tokens.append(reverse_vocab[i]) - toktypes.append(gguf.TokenType.NORMAL) - -gguf_writer.add_token_list(tokens) -gguf_writer.add_token_types(toktypes) - -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) -special_vocab.add_to_gguf(gguf_writer) - -# TENSORS - -tensor_map = gguf.get_tensor_name_map(ARCH,block_count) - -# tensor info -print("gguf: get tensor metadata") - -if num_parts == 0: - part_names = iter(("pytorch_model.bin",)) -else: - part_names = ( - f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) - ) - -for part_name in part_names: - if args.vocab_only: - break - print("gguf: loading model part '" + part_name + "'") - model_part = torch.load(f"{dir_model}/{part_name}", map_location="cpu") - - for name in model_part.keys(): - data = model_part[name] - - old_dtype = data.dtype - - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) - - data = data.squeeze().numpy() - - # map tensor names - new_name = tensor_map.get_name(name, try_suffixes = (".weight", ".bias")) - if new_name is None: - print("Cannot map tensor '" + name + "'") - continue # for the sake of compatibility with some old published models, don't quit - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data_dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - - print(new_name + ", n_dims = " + str(n_dims) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - - gguf_writer.add_tensor(new_name, data) - - # note: MPT output is tied to (same as) wte in original model; - # for easier implementation in llama.cpp it's duplicated in GGUF, though :/ - if new_name == "token_embd.weight": - gguf_writer.add_tensor("output.weight", data) - -print("gguf: write header") -gguf_writer.write_header_to_file() -print("gguf: write metadata") -gguf_writer.write_kv_data_to_file() -if not args.vocab_only: - print("gguf: write tensors") - gguf_writer.write_tensors_to_file() - -gguf_writer.close() - -print(f"gguf: model successfully exported to '{fname_out}'") -print("") diff --git a/convert-refact-hf-to-gguf.py b/convert-refact-hf-to-gguf.py deleted file mode 100755 index f0cfe84d8..000000000 --- a/convert-refact-hf-to-gguf.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python3 -# HF refact--> gguf conversion - -from __future__ import annotations - -import argparse -import json -import os -import sys -from pathlib import Path - -import numpy as np -import torch -from transformers import AutoTokenizer # type: ignore[import] - -if "NO_LOCAL_GGUF" not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / "gguf-py" / "gguf")) -import gguf - -def count_model_parts(dir_model: Path) -> int: - num_parts = 0 - for filename in os.listdir(dir_model): - if filename.startswith("pytorch_model-"): - num_parts += 1 - - if num_parts > 0: - print("gguf: found " + str(num_parts) + " model parts") - return num_parts - - -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser( - description="Convert a Refact model to a GGML compatible file" - ) - parser.add_argument( - "--vocab-only", - action="store_true", - help="extract only the vocab", - ) - parser.add_argument( - "--outfile", - type=Path, - help="path to write to; default: based on input", - ) - parser.add_argument( - "model", - type=Path, - help="directory containing model file, or model file itself (*.bin)", - ) - parser.add_argument( - "ftype", - type=int, - choices=[0, 1], - default=1, - nargs="?", - help="output format - use 0 for float32, 1 for float16", - ) - return parser.parse_args() - - -args = parse_args() - -dir_model = args.model -ftype = args.ftype -if not dir_model.is_dir(): - print(f"Error: {args.model} is not a directory", file=sys.stderr) - sys.exit(1) - -# possible tensor data types -# ftype == 0 -> float32 -# ftype == 1 -> float16 - -# map from ftype to string -ftype_str = ["f32", "f16"] - -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f"ggml-model-{ftype_str[ftype]}.gguf" - -print("gguf: loading model " + dir_model.name) - -with open(dir_model / "config.json", "r", encoding="utf-8") as f: - hparams = json.load(f) - -if hparams["architectures"][0] != "GPTRefactForCausalLM": - print("Model architecture not supported: " + hparams["architectures"][0]) - - sys.exit(1) - -# get number of model parts -num_parts = count_model_parts(dir_model) - -ARCH = gguf.MODEL_ARCH.REFACT -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - -print("gguf: get model metadata") - -# Get refact feed forward dimension -hidden_dim = hparams["n_embd"] -inner_dim = 4 * hidden_dim -hidden_dim = int(2 * inner_dim / 3) -multiple_of = 256 -ff_dim = multiple_of * ((hidden_dim + multiple_of - 1) // multiple_of) - -block_count = hparams["n_layer"] - -gguf_writer.add_name("Refact") -# refact uses Alibi. So this is from config.json which might be used by training. -gguf_writer.add_context_length(hparams["n_positions"]) -gguf_writer.add_embedding_length(hparams["n_embd"]) - -gguf_writer.add_feed_forward_length(ff_dim) -gguf_writer.add_block_count(block_count) -gguf_writer.add_head_count(hparams["n_head"]) -gguf_writer.add_head_count_kv(1) -gguf_writer.add_layer_norm_rms_eps(hparams["layer_norm_epsilon"]) -gguf_writer.add_file_type(ftype) - -# TOKENIZATION - -print("gguf: get tokenizer metadata") - -tokens: list[bytearray] = [] -scores: list[float] = [] -toktypes: list[int] = [] - -# gpt2 tokenizer -gguf_writer.add_tokenizer_model("gpt2") - -print("gguf: get gpt2 tokenizer vocab") - -# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py -tokenizer = AutoTokenizer.from_pretrained(dir_model) - -# The number of tokens in tokenizer.json can differ from the expected vocab size. -# This causes downstream issues with mismatched tensor sizes when running the inference -vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) -assert max(tokenizer.vocab.values()) < vocab_size - -added_vocab = tokenizer.get_added_vocab() -reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} - -for i in range(vocab_size): - if i not in reverse_vocab: - tokens.append(f"[PAD{i}]") - toktypes.append(gguf.TokenType.USER_DEFINED) - elif reverse_vocab[i] in added_vocab: - tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) - else: - tokens.append(reverse_vocab[i]) - toktypes.append(gguf.TokenType.NORMAL) - -gguf_writer.add_token_list(tokens) -gguf_writer.add_token_types(toktypes) - -special_vocab = gguf.SpecialVocab(dir_model, load_merges=True, n_vocab = len(tokens)) -special_vocab.add_to_gguf(gguf_writer) - -# TENSORS - -tensor_map = gguf.get_tensor_name_map(ARCH, block_count) - -# params for qkv transform -n_head = hparams["n_head"] -n_head_kv = 1 - -head_dim = hparams["n_embd"] // n_head - -# tensor info -print("gguf: get tensor metadata") - -if num_parts == 0: - part_names = iter(("pytorch_model.bin",)) -else: - part_names = ( - f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) - ) -for part_name in part_names: - if args.vocab_only: - break - print("gguf: loading model part '" + part_name + "'") - model_part = torch.load(dir_model / part_name, map_location="cpu") - - for i in range(block_count): - if f"transformer.h.{i}.attn.kv.weight" in model_part: - data = model_part[f"transformer.h.{i}.attn.kv.weight"] - model_part[f"model.layers.{i}.self_attn.k_proj.weight"] = data[ - : n_head_kv * head_dim - ] - model_part[f"model.layers.{i}.self_attn.v_proj.weight"] = data[ - n_head_kv * head_dim : - ] - del model_part[f"transformer.h.{i}.attn.kv.weight"] - if f"transformer.h.{i}.attn.q.weight" in model_part: - model_part[f"model.layers.{i}.self_attn.q_proj.weight"] = model_part[ - f"transformer.h.{i}.attn.q.weight" - ] - del model_part[f"transformer.h.{i}.attn.q.weight"] - if f"transformer.h.{i}.mlp.gate_up_proj.weight" in model_part: - data = model_part[f"transformer.h.{i}.mlp.gate_up_proj.weight"] - model_part[f"model.layers.{i}.mlp.gate_proj.weight"] = data[:ff_dim] - model_part[f"model.layers.{i}.mlp.up_proj.weight"] = data[ff_dim:] - del model_part[f"transformer.h.{i}.mlp.gate_up_proj.weight"] - - for name in model_part.keys(): - data = model_part[name] - - old_dtype = data.dtype - - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) - - data = data.squeeze().numpy() - - # map tensor names - new_name = tensor_map.get_name(name, try_suffixes=(".weight",)) - if new_name is None: - print("Can not map tensor '" + name + "'") - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data_dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ( - ftype == 1 - and data_dtype == np.float32 - and name.endswith(".weight") - and n_dims == 2 - ): - data = data.astype(np.float16) - - print( - new_name - + ", n_dims = " - + str(n_dims) - + ", " - + str(old_dtype) - + " --> " - + str(data.dtype) - ) - - gguf_writer.add_tensor(new_name, data) - - -print("gguf: write header") -gguf_writer.write_header_to_file() -print("gguf: write metadata") -gguf_writer.write_kv_data_to_file() -if not args.vocab_only: - print("gguf: write tensors") - gguf_writer.write_tensors_to_file() - -gguf_writer.close() - -print(f"gguf: model successfully exported to '{fname_out}'") -print("") diff --git a/convert-starcoder-hf-to-gguf.py b/convert-starcoder-hf-to-gguf.py deleted file mode 100755 index a9bfed85e..000000000 --- a/convert-starcoder-hf-to-gguf.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/env python3 -# HF starcoder --> gguf conversion - -from __future__ import annotations - -import argparse -import json -import os -import struct -import sys -from pathlib import Path -from typing import Any - -import numpy as np -import torch -from transformers import AutoTokenizer # type: ignore[import] - -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) -import gguf - - -def count_model_parts(dir_model: Path) -> int: - num_parts = 0 - for filename in os.listdir(dir_model): - if filename.startswith("pytorch_model-"): - num_parts += 1 - - if num_parts > 0: - print("gguf: found " + str(num_parts) + " model parts") - return num_parts - - -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert a StarCoder model to a GGML compatible file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.bin)") - parser.add_argument("ftype", type=int, help="output format - use 0 for float32, 1 for float16", choices=[0, 1], default = 1) - return parser.parse_args() - -args = parse_args() - -dir_model = args.model -ftype = args.ftype -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file = sys.stderr) - sys.exit(1) - -# possible tensor data types -# ftype == 0 -> float32 -# ftype == 1 -> float16 - -# map from ftype to string -ftype_str = ["f32", "f16"] - -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{ftype_str[ftype]}.gguf' - -print("gguf: loading model "+dir_model.name) - -with open(dir_model / "config.json", "r", encoding="utf-8") as f: - hparams = json.load(f) - -if hparams["architectures"][0] != "GPTBigCodeForCausalLM": - print("Model architecture not supported: " + hparams["architectures"][0]) - - sys.exit(1) - -# get number of model parts -num_parts = count_model_parts(dir_model) - -ARCH=gguf.MODEL_ARCH.STARCODER -gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH]) - -print("gguf: get model metadata") - -block_count = hparams["n_layer"] - -gguf_writer.add_name("StarCoder") -gguf_writer.add_context_length(hparams["n_positions"]) -gguf_writer.add_embedding_length(hparams["n_embd"]) -gguf_writer.add_feed_forward_length(4 * hparams["n_embd"]) -gguf_writer.add_block_count(block_count) -gguf_writer.add_head_count(hparams["n_head"]) -gguf_writer.add_head_count_kv(1) -gguf_writer.add_layer_norm_eps(hparams["layer_norm_epsilon"]) -gguf_writer.add_file_type(ftype) - -# TOKENIZATION - -print("gguf: get tokenizer metadata") - -tokens: list[bytearray] = [] -scores: list[float] = [] -toktypes: list[int] = [] - -# gpt2 tokenizer -gguf_writer.add_tokenizer_model("gpt2") - -print("gguf: get gpt2 tokenizer vocab") - -# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py -tokenizer = AutoTokenizer.from_pretrained(dir_model) - -# The number of tokens in tokenizer.json can differ from the expected vocab size. -# This causes downstream issues with mismatched tensor sizes when running the inference -vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) -assert max(tokenizer.vocab.values()) < vocab_size - -added_vocab = tokenizer.get_added_vocab() -reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.vocab.items()} - -for i in range(vocab_size): - if i not in reverse_vocab: - tokens.append(f"[PAD{i}]") - toktypes.append(gguf.TokenType.USER_DEFINED) - elif reverse_vocab[i] in added_vocab: - tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) - else: - tokens.append(reverse_vocab[i]) - toktypes.append(gguf.TokenType.NORMAL) - -gguf_writer.add_token_list(tokens) -gguf_writer.add_token_types(toktypes) -special_vocab = gguf.SpecialVocab(dir_model, load_merges = True, n_vocab = len(tokens)) -special_vocab.add_to_gguf(gguf_writer) - -# TENSORS - -tensor_map = gguf.get_tensor_name_map(ARCH,block_count) - -# params for qkv transform -n_head = hparams["n_head"] -n_head_kv = hparams["n_head_kv"] if "n_head_kv" in hparams else 1 - -head_dim = hparams["n_embd"] // n_head - -# tensor info -print("gguf: get tensor metadata") - -if num_parts == 0: - part_names = iter(("pytorch_model.bin",)) -else: - part_names = ( - f"pytorch_model-{n:05}-of-{num_parts:05}.bin" for n in range(1, num_parts + 1) - ) - -for part_name in part_names: - if args.vocab_only: - break - print("gguf: loading model part '" + part_name + "'") - model_part = torch.load(dir_model / part_name, map_location="cpu") - - for name in model_part.keys(): - data = model_part[name] - - old_dtype = data.dtype - - # convert any unsupported data types to float32 - if data.dtype != torch.float16 and data.dtype != torch.float32: - data = data.to(torch.float32) - - data = data.squeeze().numpy() - - # map tensor names - new_name = tensor_map.get_name(name, try_suffixes = (".weight", ".bias")) - if new_name is None: - print("Can not map tensor '" + name + "'") - sys.exit() - - n_dims = len(data.shape) - data_dtype = data.dtype - - # if f32 desired, convert any float16 to float32 - if ftype == 0 and data_dtype == np.float16: - data = data.astype(np.float32) - - # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 - if ftype == 1 and data_dtype == np.float16 and n_dims == 1: - data = data.astype(np.float32) - - # if f16 desired, convert any float32 2-dim weight tensors to float16 - if ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: - data = data.astype(np.float16) - - print(name, "=>", new_name + ", shape = " + str(data.shape) + ", " + str(old_dtype) + " --> " + str(data.dtype)) - - gguf_writer.add_tensor(new_name, data) - - -print("gguf: write header") -gguf_writer.write_header_to_file() -print("gguf: write metadata") -gguf_writer.write_kv_data_to_file() -if not args.vocab_only: - print("gguf: write tensors") - gguf_writer.write_tensors_to_file() - -gguf_writer.close() - -print(f"gguf: model successfully exported to '{fname_out}'") -print("") diff --git a/convert.py b/convert.py index 9110f1580..b0f44dbef 100755 --- a/convert.py +++ b/convert.py @@ -26,7 +26,7 @@ from pathlib import Path from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Iterable, Literal, Sequence, TypeVar import numpy as np -from sentencepiece import SentencePieceProcessor # type: ignore[import] +from sentencepiece import SentencePieceProcessor import os if 'NO_LOCAL_GGUF' not in os.environ: @@ -328,7 +328,7 @@ class BpeVocab: def bpe_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: tokenizer = self.bpe_tokenizer - from transformers.models.gpt2 import tokenization_gpt2 # type: ignore[import] + from transformers.models.gpt2 import tokenization_gpt2 reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.items()} for i, _ in enumerate(tokenizer): diff --git a/mypy.ini b/mypy.ini index 55c168f2d..7215a05dd 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3,3 +3,4 @@ strict = true allow_untyped_calls = true allow_untyped_defs = true allow_incomplete_defs = true +disable_error_code = import-untyped From df9d1293defe783f42bc83af732d3c670552c541 Mon Sep 17 00:00:00 2001 From: Galunid Date: Fri, 10 Nov 2023 14:24:54 +0100 Subject: [PATCH 102/859] Unbreak persimmon after #3837 (#4010) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index d220ff3e9..d682d2864 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4209,7 +4209,7 @@ struct llm_build_context { struct ggml_tensor * Kcur = ggml_concat(ctx0, krotated, kpass); cb(Kcur, "Kcur", il); - struct ggml_tensor * Q = ggml_cont(ctx0, ggml_permute(ctx0, Qcur, 1, 2, 0, 3)); + struct ggml_tensor * Q = ggml_cont(ctx0, ggml_permute(ctx0, Qcur, 2, 1, 0, 3)); cb(Q, "Q", il); Kcur = ggml_cont(ctx0, ggml_permute(ctx0, Kcur, 2, 1, 0, 3)); From 4a4fd3eefad5bd17ab6bcd8e2181b4f62eae76cf Mon Sep 17 00:00:00 2001 From: Jhen-Jie Hong Date: Sat, 11 Nov 2023 06:49:33 +0800 Subject: [PATCH 103/859] server : allow continue edit on completion mode (#3950) * server : allow continue edit on completion mode * server : handle abort case in runCompletion * server : style improvement --- examples/server/index.html.hpp | 4807 +++++++++++++++-------------- examples/server/public/index.html | 38 +- 2 files changed, 2468 insertions(+), 2377 deletions(-) diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index 207412513..f22b77e7f 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -229,850 +229,860 @@ unsigned char index_html[] = { 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x31, 0x30, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, - 0x61, 0x6d, 0x65, 0x73, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, - 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, - 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x3a, 0x20, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, - 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, - 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, - 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, - 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, - 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, - 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, - 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, - 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, - 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x5b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x77, 0x68, 0x69, 0x74, 0x65, 0x2d, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3a, + 0x20, 0x70, 0x72, 0x65, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x75, 0x74, 0x6c, 0x69, 0x6e, 0x65, + 0x3a, 0x20, 0x30, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x6c, + 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, + 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x30, + 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, + 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, + 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, + 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, + 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, + 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, + 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, + 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, + 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, + 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, + 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, + 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, + 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, + 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, - 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, - 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, - 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, - 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, - 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, - 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, - 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, - 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, - 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, - 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x62, 0x6c, - 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, - 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x3c, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x6d, - 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x20, 0x68, 0x2c, 0x20, - 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x20, 0x65, 0x66, 0x66, 0x65, - 0x63, 0x74, 0x2c, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x20, 0x75, 0x73, - 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x20, 0x75, 0x73, 0x65, - 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x52, - 0x65, 0x66, 0x2c, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, - 0x20, 0x27, 0x2f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, - 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x2e, - 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x72, 0x20, 0x73, 0x6c, - 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x2d, 0x31, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, - 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, - 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, - 0x65, 0x65, 0x6e, 0x20, 0x55, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, - 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, - 0x6f, 0x74, 0x2e, 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, - 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, - 0x6e, 0x64, 0x2c, 0x20, 0x68, 0x6f, 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, - 0x67, 0x6f, 0x6f, 0x64, 0x20, 0x61, 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, - 0x69, 0x6e, 0x67, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, - 0x65, 0x72, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, - 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, - 0x64, 0x69, 0x61, 0x74, 0x65, 0x6c, 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, - 0x77, 0x69, 0x74, 0x68, 0x20, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, - 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, - 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, - 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, - 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, - 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, - 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, - 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, - 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, - 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, - 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, - 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, - 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, + 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, + 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, + 0x0a, 0x20, 0x20, 0x3c, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x2c, 0x20, 0x68, 0x2c, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x2c, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x2c, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x69, 0x6e, 0x64, + 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, + 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x2e, 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x76, 0x61, 0x72, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, + 0x3d, 0x20, 0x2d, 0x31, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, + 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x55, 0x73, + 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, 0x74, 0x2e, 0x20, 0x4c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, + 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, 0x6e, 0x64, 0x2c, 0x20, 0x68, 0x6f, + 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, 0x67, 0x6f, 0x6f, 0x64, 0x20, 0x61, + 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, 0x69, 0x6e, 0x67, 0x2c, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, 0x65, 0x72, 0x20, 0x66, 0x61, 0x69, + 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, + 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x74, 0x65, 0x6c, + 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x70, + 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, + 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, + 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, + 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, + 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, + 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, + 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, + 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, + 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, + 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, + 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, - 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, - 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, - 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, - 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, - 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, - 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, - 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, - 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, - 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, - 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, - 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, - 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, - 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, - 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, - 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, - 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, - 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, - 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x6f, 0x72, - 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, - 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, - 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, - 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, - 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, - 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, - 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, + 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, + 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, + 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, + 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, + 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, + 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, + 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, + 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, + 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, + 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, + 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, + 0x69, 0x6e, 0x20, 0x62, 0x6f, 0x72, 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, + 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, + 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, + 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, + 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, + 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, + 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, + 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, + 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, + 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, + 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, + 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, - 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, - 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, - 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, - 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, - 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, - 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, - 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, - 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, - 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, - 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, - 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, - 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, + 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, - 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, - 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, - 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, - 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, - 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, - 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, - 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, - 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, - 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, - 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, - 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, - 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, - 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, - 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, - 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, - 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, - 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, - 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, + 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, + 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, + 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, + 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, + 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, + 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, + 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, + 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, + 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, + 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, + 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, + 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, + 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, 0x74, 0x69, 0x6e, 0x67, - 0x20, 0x74, 0x68, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, - 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, + 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, - 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, - 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, - 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, - 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, - 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, - 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, + 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, + 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, + 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, - 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, + 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, + 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, + 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, - 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, - 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, - 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, - 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, + 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, + 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, + 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, - 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, - 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, - 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, - 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, - 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, - 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, - 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, - 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, - 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, + 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, + 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, + 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, + 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, + 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, + 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, + 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, + 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, + 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, + 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, + 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, + 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, + 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, + 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, + 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, + 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, + 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, + 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, + 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, + 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, + 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, + 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, + 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, + 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, + 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, + 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, - 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, - 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, - 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, - 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, - 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, - 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, - 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, - 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, - 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, - 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, - 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, - 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, - 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, + 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, + 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, + 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, + 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, + 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, + 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, + 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, + 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, + 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, + 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, + 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, - 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, + 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, + 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, + 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, + 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, + 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, - 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, - 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, - 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, - 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, - 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, - 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, - 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, - 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, - 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, + 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, + 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, + 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, + 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, + 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, + 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, + 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, - 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, - 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, + 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, + 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, - 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, - 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, - 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, - 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, - 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, - 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x22, 0x54, 0x68, - 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x77, 0x61, 0x73, - 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, - 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, - 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, - 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, 0x74, 0x20, 0x62, 0x65, - 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, 0x22, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, - 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, - 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, - 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, - 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, - 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, - 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, - 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, - 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, - 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, - 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, - 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, - 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, - 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, - 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, - 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, - 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, - 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, - 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, - 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, - 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, - 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, - 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, - 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, - 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, + 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, + 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, + 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, + 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, + 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, + 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, - 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, - 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, - 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, - 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, + 0x5f, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, + 0x20, 0x21, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, + 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, + 0x72, 0x74, 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, + 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, + 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, + 0x6e, 0x27, 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, + 0x64, 0x2e, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, + 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, + 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, + 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, + 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, + 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, + 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, + 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, + 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, + 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, + 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, + 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, + 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, + 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, + 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, + 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, + 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, + 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, + 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, + 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, + 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, + 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, + 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, + 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, + 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, + 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, + 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, + 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, + 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, + 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, + 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, + 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, + 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, + 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, @@ -1091,494 +1101,524 @@ unsigned char index_html[] = { 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, - 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, - 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, 0x72, - 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, - 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x5d, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, 0x6f, - 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, - 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, - 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, - 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, - 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, - 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, 0x52, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, - 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, 0x73, - 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, 0x20, - 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, 0x74, - 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, + 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, 0x22, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, - 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, 0x69, - 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, 0x26, - 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, - 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, - 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, 0x61, 0x6c, + 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, + 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x20, + 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, + 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, + 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, + 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, + 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, + 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, + 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, + 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, + 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, + 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, + 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, + 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, + 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, + 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, + 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, - 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, 0x7b, - 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, + 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, + 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, + 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, + 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, + 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, + 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, + 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, + 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, + 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, + 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, + 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, + 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, + 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, + 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, + 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x3d, - 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, 0x6f, - 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, - 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, 0x65, - 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, 0x7d, + 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, - 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, 0x6d, - 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, 0x69, - 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, - 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, 0x3e, - 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, 0x65, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, - 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, - 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, + 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, + 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, - 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, - 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, - 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, - 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, + 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, + 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x72, - 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x3c, - 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, - 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, - 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, 0x6f, - 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, - 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, - 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, 0x2b, - 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, 0x73, - 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, 0x33, - 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, - 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, - 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, - 0x20, 0x3d, 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, - 0x61, 0x74, 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, - 0x61, 0x74, 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, - 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, - 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, - 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, - 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, - 0x5c, 0x73, 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, - 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, - 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, - 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, - 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, - 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, - 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, - 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, - 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x69, 0x6d, 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, - 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, - 0x21, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, - 0x20, 0x3a, 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, - 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, - 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, - 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, - 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, - 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, - 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, - 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, - 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, - 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, - 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, - 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, - 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, - 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, - 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, - 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, - 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, - 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, - 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, - 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, - 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, - 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, - 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, - 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, - 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, - 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, - 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, - 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, - 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, - 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, - 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, - 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, + 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, + 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, + 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, + 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, - 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, - 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, - 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, - 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, - 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, + 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, + 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, + 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, + 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, + 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, + 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, + 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, + 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, + 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, + 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, + 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, + 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, + 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, + 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, + 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, + 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, 0x3e, 0x20, + 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, + 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, + 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2b, 0x2f, + 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x73, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, + 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, 0x72, 0x6b, + 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x75, 0x73, + 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, + 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, 0x74, 0x72, + 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, 0x3c, 0x2f, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x73, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, + 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, + 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x60, 0x20, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, + 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, + 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, + 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x69, + 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x68, + 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, 0x65, 0x79, + 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, 0x67, 0x20, + 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, 0x20, 0x60, + 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, 0x7b, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, + 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x7d, 0x20, + 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, + 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, + 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, + 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, + 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, + 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, + 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, + 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, + 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, + 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, + 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, + 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, + 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, + 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, + 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, + 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, + 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, + 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, + 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, + 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, + 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, + 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, + 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, + 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, @@ -1586,1108 +1626,1137 @@ unsigned char index_html[] = { 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, - 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, - 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, - 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, + 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, - 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, - 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, - 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, - 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, - 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, - 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, - 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, - 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, - 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, - 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, - 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, - 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, - 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, - 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, - 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, - 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, - 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, - 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, - 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, - 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, - 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, + 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, + 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, + 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, + 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, + 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, + 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, - 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, - 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, - 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, - 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, - 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, - 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, - 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, - 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, - 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, - 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, - 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, - 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, - 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, - 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, - 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, - 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, - 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, - 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, - 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, - 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, - 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, - 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, - 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, - 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, - 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, - 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, - 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, - 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, - 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, - 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, - 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, - 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, - 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, - 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, - 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, - 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, - 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, - 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, - 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, - 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, - 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, - 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, - 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, - 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, - 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, - 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, - 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, - 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, - 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, - 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, - 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, - 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, - 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, - 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, - 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, - 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, - 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, - 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, - 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, - 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, - 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, - 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, - 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, - 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, - 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, - 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, - 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, - 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, - 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, + 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, + 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, - 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, - 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, - 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, - 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, - 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, - 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, - 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, - 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, - 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, - 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, - 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, - 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, - 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, - 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, - 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, - 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, - 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, - 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, - 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, - 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, - 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, - 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, - 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, - 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, - 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, - 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, - 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, - 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, - 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, - 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, - 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, - 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, - 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, - 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x7d, - 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, - 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, - 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, 0x29, - 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, + 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, + 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, + 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, + 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, + 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, + 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, - 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, - 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, - 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, + 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, + 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, - 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, - 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, - 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, - 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, - 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, - 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, - 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, + 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, + 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, + 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, + 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, + 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, + 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, + 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, + 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, + 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, + 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, + 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, + 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, + 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, + 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, + 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, + 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, + 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, + 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, + 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, + 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, + 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, + 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, + 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, + 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, + 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, + 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, + 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, + 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, + 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, + 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, + 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, + 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, + 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, + 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, + 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, + 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, + 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, + 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, + 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x22, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, + 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, + 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, + 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, + 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, + 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, + 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, + 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, + 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, + 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, + 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, + 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, + 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, + 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, + 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, + 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, + 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, + 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, + 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, + 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, + 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, + 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, + 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, + 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, + 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, + 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, + 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, + 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, + 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, + 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, + 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, + 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, + 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, + 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, + 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, + 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, + 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, + 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, + 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, + 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, + 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, + 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, + 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, + 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, + 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, + 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, + 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, + 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, + 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, + 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, + 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, + 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, + 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, + 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, + 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, + 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, + 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, + 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, + 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, + 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, + 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, + 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, + 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, + 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, + 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, + 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, + 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, + 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, + 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, + 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, + 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, + 0x2a, 0x20, 0x31, 0x30, 0x30, 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, + 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, + 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, + 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, + 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, + 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, + 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, + 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, + 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, + 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, + 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, + 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, + 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, + 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, - 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, - 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, - 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, - 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, - 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, - 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, - 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, - 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, + 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, + 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, - 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, - 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, + 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, - 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, - 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, - 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, - 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, - 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, + 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, + 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, + 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, + 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, - 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, - 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, - 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, - 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, - 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, - 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, - 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, - 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, - 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, - 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, - 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, - 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, - 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, - 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, - 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, - 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, - 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, - 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, - 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, - 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, - 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, - 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, - 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, - 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, - 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, - 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, - 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, - 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, - 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, - 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, - 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, - 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, - 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, - 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, - 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, - 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, - 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, + 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, + 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, + 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, + 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, + 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, + 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, - 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, - 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, - 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, - 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, - 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, - 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, - 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, - 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, - 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, - 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, - 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, - 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, - 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, - 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, - 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, - 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, - 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, - 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, - 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, - 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, - 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, + 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, + 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, + 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, + 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, + 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, + 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, + 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, + 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, + 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, + 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, + 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, + 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, + 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, + 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, + 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, + 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, - 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, - 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, + 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, + 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, + 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, + 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, + 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, + 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, + 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, + 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, + 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, + 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, - 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, - 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, - 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, - 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, - 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, - 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, - 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, - 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, - 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, - 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, - 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, - 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, - 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, - 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, + 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, + 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, + 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, + 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, + 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, + 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, + 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, + 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, + 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, + 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, + 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, + 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, + 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, + 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, + 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, + 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, + 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, + 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, + 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, + 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, + 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, + 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, + 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, + 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, + 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, + 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, + 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, + 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, + 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, + 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, + 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, + 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, + 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, + 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, + 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, + 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, + 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, + 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, + 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, + 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, + 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, + 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, + 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, - 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, - 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, - 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, - 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, - 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, - 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, - 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, - 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, + 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, + 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, + 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, + 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, + 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, + 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, + 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, + 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, + 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, - 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, - 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, - 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, - 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, - 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, - 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, - 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, - 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, - 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, - 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, - 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, - 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, - 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, - 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, + 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, + 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, - 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, - 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, - 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, - 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, - 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, - 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, - 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, - 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, - 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, - 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, - 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, - 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, - 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, - 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, - 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, - 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, - 0x0a + 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, + 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, + 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, + 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, + 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, + 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, + 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, + 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, + 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, + 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, + 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, + 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, + 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, + 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, + 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, + 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, + 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, + 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, + 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, + 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, + 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, + 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, + 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, + 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, + 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, + 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a }; -unsigned int index_html_len = 32269; +unsigned int index_html_len = 33103; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 60659c147..175c52478 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -160,6 +160,11 @@ height: 10em; } + [contenteditable] { + display: inline-block; + white-space: pre-wrap; + outline: 0px solid transparent; + } @keyframes loading-bg-wipe { 0% { @@ -462,18 +467,23 @@ }, "{{char}}"); } - const runCompletion = async () => { + const runCompletion = () => { if (controller.value) { console.log('already running...'); return; } const { prompt } = session.value; transcriptUpdate([...session.value.transcript, ["", prompt]]); - await runLlama(prompt, { + runLlama(prompt, { ...params.value, slot_id: slot_id, stop: [], - }, ""); + }, "").finally(() => { + session.value.prompt = session.value.transcript.map(([_, data]) => + Array.isArray(data) ? data.map(msg => msg.content).join('') : data + ).join(''); + session.value.transcript = []; + }) } const stop = (e) => { @@ -573,6 +583,7 @@ } }, [messages]) + const isCompletionMode = session.value.type === 'completion' const chatLine = ([user, data], index) => { let message const isArrayMessage = Array.isArray(data) @@ -582,20 +593,31 @@ const text = isArrayMessage ? data.map(msg => msg.content).join('').replace(/^\s+/, '') : data; - message = html`<${Markdownish} text=${template(text)} />` + message = isCompletionMode ? + text : + html`<${Markdownish} text=${template(text)} />` } if (user) { return html`

    ${template(user)}: ${message}

    ` } else { - return html`

    ${message}

    ` + return isCompletionMode ? + html`${message}` : + html`

    ${message}

    ` } }; + const handleCompletionEdit = (e) => { + session.value.prompt = e.target.innerText; + session.value.transcript = []; + } + return html` -
    +
    - ${messages.flatMap(chatLine)} -
    `; + + ${messages.flatMap(chatLine)} + + `; }; const ConfigForm = (props) => { From 34b0a082074b073eb14c2bd93c0c070e20ddcd16 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Fri, 10 Nov 2023 22:04:50 -0700 Subject: [PATCH 104/859] gguf-py: Refactor and allow reading/modifying existing GGUF files (#3981) * gguf-py: Refactor and add file reading support * Replay changes from #3871 Credit to @cebtenzzre for that pull * Various type annotation fixes. * sort imports with isort (again) * Fix missing return statement in add_tensor * style cleanup with flake8 * fix NamedTuple and Enum usage * Fix an issue with state init in GGUFReader Move examples to an examples/ directory Clean up examples Add an example of modifying keys in a GGUF file Update documentation with info on examples Try to support people importing gguf/gguf.py directly * Damagage is not a word. * Clean up gguf-py/examples/modify_gguf.py whitespace Co-authored-by: Jared Van Bortel * Update gguf-py/examples/modify_gguf.py formatting Co-authored-by: Jared Van Bortel * Update gguf-py/gguf/gguf_reader.py type hint Co-authored-by: Jared Van Bortel * Make examples executable, formatting changes * Add more information to GGUFReader and examples comments * Include a gguf Python package version bump * Add convert-gguf-endian.py script * cleanup * gguf-py : bump minor version * Reorganize scripts * Make GGUFReader endian detection less arbitrary * Add JSON dumping support to gguf-dump.py Which I kind of regret now * A few for gguf-dump.py cleanups * Murder accidental tuple in gguf-py/scripts/gguf-dump.py Co-authored-by: Jared Van Bortel * cleanup * constants : remove unneeded type annotations * fix python 3.8 compat * Set up gguf- scripts in pyproject.toml * And include scripts/__init__.py, derp * convert.py: We can't currently support Q8_0 on big endian. * gguf-py: SpecialVocab: Always try available sources for special token ids gguf-py: SpecialVocab: Try to load merges from merges.txt if not in tokenizer.json gguf-py: SpecialVocab: Add 'add_bos_token' type bools to GGUF metadata u * cleanup * Promote add_X_token to GGUF metadata for BOS and EOS --------- Co-authored-by: Jared Van Bortel Co-authored-by: Jared Van Bortel --- convert-baichuan-hf-to-gguf.py | 2 +- convert-llama-ggml-to-gguf.py | 24 +- convert-persimmon-to-gguf.py | 2 +- convert.py | 16 +- .../convert-train-checkpoint-to-gguf.py | 2 +- gguf-py/README.md | 10 + gguf-py/examples/writer.py | 40 + gguf-py/gguf/__init__.py | 6 +- gguf-py/gguf/constants.py | 470 +++++++ gguf-py/gguf/gguf.py | 1149 +---------------- gguf-py/gguf/gguf_reader.py | 264 ++++ gguf-py/gguf/gguf_writer.py | 409 ++++++ gguf-py/gguf/tensor_mapping.py | 257 ++++ gguf-py/gguf/vocab.py | 164 +++ gguf-py/pyproject.toml | 8 +- gguf-py/scripts/__init__.py | 12 + gguf-py/scripts/gguf-convert-endian.py | 113 ++ gguf-py/scripts/gguf-dump.py | 116 ++ gguf-py/scripts/gguf-set-metadata.py | 90 ++ gguf-py/tests/test_gguf.py | 4 +- 20 files changed, 1982 insertions(+), 1176 deletions(-) create mode 100755 gguf-py/examples/writer.py create mode 100644 gguf-py/gguf/constants.py create mode 100644 gguf-py/gguf/gguf_reader.py create mode 100644 gguf-py/gguf/gguf_writer.py create mode 100644 gguf-py/gguf/tensor_mapping.py create mode 100644 gguf-py/gguf/vocab.py create mode 100644 gguf-py/scripts/__init__.py create mode 100755 gguf-py/scripts/gguf-convert-endian.py create mode 100755 gguf-py/scripts/gguf-dump.py create mode 100755 gguf-py/scripts/gguf-set-metadata.py diff --git a/convert-baichuan-hf-to-gguf.py b/convert-baichuan-hf-to-gguf.py index 67ccbe99f..789602351 100755 --- a/convert-baichuan-hf-to-gguf.py +++ b/convert-baichuan-hf-to-gguf.py @@ -16,7 +16,7 @@ import torch from sentencepiece import SentencePieceProcessor # type: ignore[import] if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf diff --git a/convert-llama-ggml-to-gguf.py b/convert-llama-ggml-to-gguf.py index 871add64d..d898d81c4 100755 --- a/convert-llama-ggml-to-gguf.py +++ b/convert-llama-ggml-to-gguf.py @@ -12,29 +12,9 @@ import numpy as np import os if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf -# Note: Does not support GGML_QKK_64 -QK_K = 256 -# Items here are (block size, type size) -GGML_QUANT_SIZES = { - gguf.GGMLQuantizationType.F32 : (1, 4), - gguf.GGMLQuantizationType.F16 : (1, 2), - gguf.GGMLQuantizationType.Q4_0 : (32, 2 + 16), - gguf.GGMLQuantizationType.Q4_1 : (32, 2 + 2 + 16), - gguf.GGMLQuantizationType.Q5_0 : (32, 2 + 4 + 16), - gguf.GGMLQuantizationType.Q5_1 : (32, 2 + 2 + 4 + 16), - gguf.GGMLQuantizationType.Q8_0 : (32, 2 + 32), - gguf.GGMLQuantizationType.Q8_1 : (32, 4 + 4 + 32), - gguf.GGMLQuantizationType.Q2_K : (256, 2 + 2 + QK_K // 16 + QK_K // 4), - gguf.GGMLQuantizationType.Q3_K : (256, 2 + QK_K // 4 + QK_K // 8 + 12), - gguf.GGMLQuantizationType.Q4_K : (256, 2 + 2 + QK_K // 2 + 12), - gguf.GGMLQuantizationType.Q5_K : (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12), - gguf.GGMLQuantizationType.Q6_K : (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16), - gguf.GGMLQuantizationType.Q8_K : (256, 4 + QK_K + QK_K // 8), -} - class GGMLFormat(IntEnum): GGML = 0 GGMF = 1 @@ -125,7 +105,7 @@ class Tensor: (n_dims, name_len, dtype) = struct.unpack('<3I', data[offset:offset + 12]) assert n_dims >= 0 and n_dims <= 4, f'Invalid tensor dimensions {n_dims}' assert name_len < 4096, 'Absurd tensor name length' - quant = GGML_QUANT_SIZES.get(dtype) + quant = gguf.GGML_QUANT_SIZES.get(dtype) assert quant is not None, 'Unknown tensor type' (blksize, tysize) = quant offset += 12 diff --git a/convert-persimmon-to-gguf.py b/convert-persimmon-to-gguf.py index e022ffe46..240f87306 100644 --- a/convert-persimmon-to-gguf.py +++ b/convert-persimmon-to-gguf.py @@ -6,7 +6,7 @@ import argparse from pathlib import Path from sentencepiece import SentencePieceProcessor if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf def _flatten_dict(dct, tensors, prefix=None): diff --git a/convert.py b/convert.py index b0f44dbef..a4b87e088 100755 --- a/convert.py +++ b/convert.py @@ -3,11 +3,9 @@ from __future__ import annotations import argparse import concurrent.futures -import copy import enum import faulthandler import functools -import io import itertools import json import math @@ -23,14 +21,14 @@ from abc import ABCMeta, abstractmethod from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from dataclasses import dataclass from pathlib import Path -from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Iterable, Literal, Sequence, TypeVar +from typing import IO, TYPE_CHECKING, Any, Callable, Iterable, Literal, TypeVar import numpy as np from sentencepiece import SentencePieceProcessor import os if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf if TYPE_CHECKING: @@ -851,7 +849,7 @@ class OutputFile: elif isinstance(vocab, BpeVocab): self.gguf.add_tokenizer_model("gpt2") else: - raise ValueError(f'Unknown vocab type: Not BpeVocab or SentencePieceVocab') + raise ValueError('Unknown vocab type: Not BpeVocab or SentencePieceVocab') self.gguf.add_token_list(tokens) self.gguf.add_token_scores(scores) self.gguf.add_token_types(toktypes) @@ -905,7 +903,7 @@ class OutputFile: return dt.quantize(arr) @staticmethod - def write_all(fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY, endianess=gguf.GGUFEndian.LITTLE) -> None: + def write_all(fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE) -> None: check_vocab_size(params, vocab) of = OutputFile(fname_out, endianess=endianess) @@ -1114,11 +1112,15 @@ def do_dump_model(model_plus: ModelPlus) -> None: def main(args_in: list[str] | None = None) -> None: + output_choices = ["f32", "f16"] + if np.uint32(1) == np.uint32(1).newbyteorder("<"): + # We currently only support Q8_0 output on little endian systems. + output_choices.append("q8_0") parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outtype", choices=["f32", "f16", "q8_0"], help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") + parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") diff --git a/examples/train-text-from-scratch/convert-train-checkpoint-to-gguf.py b/examples/train-text-from-scratch/convert-train-checkpoint-to-gguf.py index 887ed2e21..ed93673bc 100644 --- a/examples/train-text-from-scratch/convert-train-checkpoint-to-gguf.py +++ b/examples/train-text-from-scratch/convert-train-checkpoint-to-gguf.py @@ -9,7 +9,7 @@ import numpy as np from pathlib import Path if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / '..' / '..' / 'gguf-py' / 'gguf')) + sys.path.insert(1, str(Path(__file__).parent / '..' / '..' / 'gguf-py')) import gguf # gguf constants diff --git a/gguf-py/README.md b/gguf-py/README.md index a28d8c57a..502b6a510 100644 --- a/gguf-py/README.md +++ b/gguf-py/README.md @@ -11,6 +11,16 @@ as an example for its usage. pip install gguf ``` +## API Examples/Simple Tools + +[examples/writer.py](https://github.com/ggerganov/llama.cpp/blob/master/gguf-py/examples/writer.py) — Generates `example.gguf` in the current directory to demonstrate generating a GGUF file. Note that this file cannot be used as a model. + +[scripts/gguf-dump.py](https://github.com/ggerganov/llama.cpp/blob/master/gguf-py/scripts/gguf-dump.py) — Dumps a GGUF file's metadata to the console. + +[scripts/gguf-set-metadata.py](https://github.com/ggerganov/llama.cpp/blob/master/gguf-py/scripts/gguf-set-metadata.py) — Allows changing simple metadata values in a GGUF file by key. + +[scripts/gguf-convert-endian.py](https://github.com/ggerganov/llama.cpp/blob/master/gguf-py/scripts/gguf-convert-endian.py) — Allows converting the endianness of GGUF files. + ## Development Maintainers who participate in development of this package are advised to install it in editable mode: diff --git a/gguf-py/examples/writer.py b/gguf-py/examples/writer.py new file mode 100755 index 000000000..f39eed1af --- /dev/null +++ b/gguf-py/examples/writer.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +import sys +from pathlib import Path + +import numpy as np + +# Necessary to load the local gguf package +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from gguf import GGUFWriter # noqa: E402 + + +# Example usage: +def writer_example() -> None: + # Example usage with a file + gguf_writer = GGUFWriter("example.gguf", "llama") + + gguf_writer.add_architecture() + gguf_writer.add_block_count(12) + gguf_writer.add_uint32("answer", 42) # Write a 32-bit integer + gguf_writer.add_float32("answer_in_float", 42.0) # Write a 32-bit float + gguf_writer.add_custom_alignment(64) + + tensor1 = np.ones((32,), dtype=np.float32) * 100.0 + tensor2 = np.ones((64,), dtype=np.float32) * 101.0 + tensor3 = np.ones((96,), dtype=np.float32) * 102.0 + + gguf_writer.add_tensor("tensor1", tensor1) + gguf_writer.add_tensor("tensor2", tensor2) + gguf_writer.add_tensor("tensor3", tensor3) + + gguf_writer.write_header_to_file() + gguf_writer.write_kv_data_to_file() + gguf_writer.write_tensors_to_file() + + gguf_writer.close() + + +if __name__ == '__main__': + writer_example() diff --git a/gguf-py/gguf/__init__.py b/gguf-py/gguf/__init__.py index f9b70a85b..110ab342c 100644 --- a/gguf-py/gguf/__init__.py +++ b/gguf-py/gguf/__init__.py @@ -1 +1,5 @@ -from .gguf import * +from .constants import * +from .gguf_reader import * +from .gguf_writer import * +from .tensor_mapping import * +from .vocab import * diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py new file mode 100644 index 000000000..bf1ccf669 --- /dev/null +++ b/gguf-py/gguf/constants.py @@ -0,0 +1,470 @@ +from __future__ import annotations + +import sys +from enum import Enum, IntEnum, auto +from typing import Any + +# +# constants +# + +GGUF_MAGIC = 0x46554747 # "GGUF" +GGUF_VERSION = 3 +GGUF_DEFAULT_ALIGNMENT = 32 + +# +# metadata keys +# + + +class Keys: + class General: + ARCHITECTURE = "general.architecture" + QUANTIZATION_VERSION = "general.quantization_version" + ALIGNMENT = "general.alignment" + NAME = "general.name" + AUTHOR = "general.author" + URL = "general.url" + DESCRIPTION = "general.description" + LICENSE = "general.license" + SOURCE_URL = "general.source.url" + SOURCE_HF_REPO = "general.source.huggingface.repository" + FILE_TYPE = "general.file_type" + + class LLM: + CONTEXT_LENGTH = "{arch}.context_length" + EMBEDDING_LENGTH = "{arch}.embedding_length" + BLOCK_COUNT = "{arch}.block_count" + FEED_FORWARD_LENGTH = "{arch}.feed_forward_length" + USE_PARALLEL_RESIDUAL = "{arch}.use_parallel_residual" + TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" + + class Attention: + HEAD_COUNT = "{arch}.attention.head_count" + HEAD_COUNT_KV = "{arch}.attention.head_count_kv" + MAX_ALIBI_BIAS = "{arch}.attention.max_alibi_bias" + CLAMP_KQV = "{arch}.attention.clamp_kqv" + LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" + LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" + + class Rope: + DIMENSION_COUNT = "{arch}.rope.dimension_count" + FREQ_BASE = "{arch}.rope.freq_base" + SCALING_TYPE = "{arch}.rope.scaling.type" + SCALING_FACTOR = "{arch}.rope.scaling.factor" + SCALING_ORIG_CTX_LEN = "{arch}.rope.scaling.original_context_length" + SCALING_FINETUNED = "{arch}.rope.scaling.finetuned" + + class Tokenizer: + MODEL = "tokenizer.ggml.model" + LIST = "tokenizer.ggml.tokens" + TOKEN_TYPE = "tokenizer.ggml.token_type" + SCORES = "tokenizer.ggml.scores" + MERGES = "tokenizer.ggml.merges" + BOS_ID = "tokenizer.ggml.bos_token_id" + EOS_ID = "tokenizer.ggml.eos_token_id" + UNK_ID = "tokenizer.ggml.unknown_token_id" + SEP_ID = "tokenizer.ggml.seperator_token_id" + PAD_ID = "tokenizer.ggml.padding_token_id" + ADD_BOS = "tokenizer.ggml.add_bos_token" + ADD_EOS = "tokenizer.ggml.add_eos_token" + HF_JSON = "tokenizer.huggingface.json" + RWKV = "tokenizer.rwkv.world" + + +# +# recommended mapping of model tensor names for storage in gguf +# + + +class MODEL_ARCH(IntEnum): + LLAMA = auto() + FALCON = auto() + BAICHUAN = auto() + GPT2 = auto() + GPTJ = auto() + GPTNEOX = auto() + MPT = auto() + STARCODER = auto() + PERSIMMON = auto() + REFACT = auto() + BERT = auto() + BLOOM = auto() + + +class MODEL_TENSOR(IntEnum): + TOKEN_EMBD = auto() + TOKEN_EMBD_NORM = auto() + TOKEN_TYPES = auto() + POS_EMBD = auto() + OUTPUT = auto() + OUTPUT_NORM = auto() + ROPE_FREQS = auto() + ATTN_Q = auto() + ATTN_K = auto() + ATTN_V = auto() + ATTN_QKV = auto() + ATTN_OUT = auto() + ATTN_NORM = auto() + ATTN_NORM_2 = auto() + ATTN_ROT_EMBD = auto() + FFN_GATE = auto() + FFN_DOWN = auto() + FFN_UP = auto() + FFN_NORM = auto() + ATTN_Q_NORM = auto() + ATTN_K_NORM = auto() + + +MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { + MODEL_ARCH.LLAMA: "llama", + MODEL_ARCH.FALCON: "falcon", + MODEL_ARCH.BAICHUAN: "baichuan", + MODEL_ARCH.GPT2: "gpt2", + MODEL_ARCH.GPTJ: "gptj", + MODEL_ARCH.GPTNEOX: "gptneox", + MODEL_ARCH.MPT: "mpt", + MODEL_ARCH.STARCODER: "starcoder", + MODEL_ARCH.PERSIMMON: "persimmon", + MODEL_ARCH.REFACT: "refact", + MODEL_ARCH.BERT: "bert", + MODEL_ARCH.BLOOM: "bloom", +} + +TENSOR_NAMES: dict[MODEL_TENSOR, str] = { + MODEL_TENSOR.TOKEN_EMBD: "token_embd", + MODEL_TENSOR.TOKEN_EMBD_NORM: "token_embd_norm", + MODEL_TENSOR.TOKEN_TYPES: "token_types", + MODEL_TENSOR.POS_EMBD: "position_embd", + MODEL_TENSOR.OUTPUT_NORM: "output_norm", + MODEL_TENSOR.OUTPUT: "output", + MODEL_TENSOR.ROPE_FREQS: "rope_freqs", + MODEL_TENSOR.ATTN_NORM: "blk.{bid}.attn_norm", + MODEL_TENSOR.ATTN_NORM_2: "blk.{bid}.attn_norm_2", + MODEL_TENSOR.ATTN_QKV: "blk.{bid}.attn_qkv", + MODEL_TENSOR.ATTN_Q: "blk.{bid}.attn_q", + MODEL_TENSOR.ATTN_K: "blk.{bid}.attn_k", + MODEL_TENSOR.ATTN_V: "blk.{bid}.attn_v", + MODEL_TENSOR.ATTN_OUT: "blk.{bid}.attn_output", + MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd", + MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm", + MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm", + MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm", + MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", + MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down", + MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up", +} + +MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { + MODEL_ARCH.LLAMA: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.GPTNEOX: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.FALCON: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_NORM_2, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.BAICHUAN: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.STARCODER: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.POS_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.BERT: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.TOKEN_TYPES, + MODEL_TENSOR.POS_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.MPT: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.GPTJ: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.PERSIMMON: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.ATTN_Q_NORM, + MODEL_TENSOR.ATTN_K_NORM, + MODEL_TENSOR.ATTN_ROT_EMBD, + ], + MODEL_ARCH.REFACT: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.BLOOM: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.TOKEN_EMBD_NORM, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], + MODEL_ARCH.GPT2: [ + # TODO + ], + # TODO +} + +# tensors that will not be serialized +MODEL_TENSOR_SKIP: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { + MODEL_ARCH.LLAMA: [ + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_ROT_EMBD, + ], + MODEL_ARCH.BAICHUAN: [ + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_ROT_EMBD, + ], + MODEL_ARCH.PERSIMMON: [ + MODEL_TENSOR.ROPE_FREQS, + ], +} + +# +# types +# + + +class TokenType(IntEnum): + NORMAL = 1 + UNKNOWN = 2 + CONTROL = 3 + USER_DEFINED = 4 + UNUSED = 5 + BYTE = 6 + + +class RopeScalingType(Enum): + NONE = 'none' + LINEAR = 'linear' + YARN = 'yarn' + + +class GGMLQuantizationType(IntEnum): + F32 = 0 + F16 = 1 + Q4_0 = 2 + Q4_1 = 3 + Q5_0 = 6 + Q5_1 = 7 + Q8_0 = 8 + Q8_1 = 9 + Q2_K = 10 + Q3_K = 11 + Q4_K = 12 + Q5_K = 13 + Q6_K = 14 + Q8_K = 15 + + +class GGUFEndian(IntEnum): + LITTLE = 0 + BIG = 1 + + +class GGUFValueType(IntEnum): + UINT8 = 0 + INT8 = 1 + UINT16 = 2 + INT16 = 3 + UINT32 = 4 + INT32 = 5 + FLOAT32 = 6 + BOOL = 7 + STRING = 8 + ARRAY = 9 + UINT64 = 10 + INT64 = 11 + FLOAT64 = 12 + + @staticmethod + def get_type(val: Any) -> GGUFValueType: + if isinstance(val, (str, bytes, bytearray)): + return GGUFValueType.STRING + elif isinstance(val, list): + return GGUFValueType.ARRAY + elif isinstance(val, float): + return GGUFValueType.FLOAT32 + elif isinstance(val, bool): + return GGUFValueType.BOOL + elif isinstance(val, int): + return GGUFValueType.INT32 + # TODO: need help with 64-bit types in Python + else: + print("Unknown type:", type(val)) + sys.exit() + + +# Note: Does not support GGML_QKK_64 +QK_K = 256 +# Items here are (block size, type size) +GGML_QUANT_SIZES = { + GGMLQuantizationType.F32: (1, 4), + GGMLQuantizationType.F16: (1, 2), + GGMLQuantizationType.Q4_0: (32, 2 + 16), + GGMLQuantizationType.Q4_1: (32, 2 + 2 + 16), + GGMLQuantizationType.Q5_0: (32, 2 + 4 + 16), + GGMLQuantizationType.Q5_1: (32, 2 + 2 + 4 + 16), + GGMLQuantizationType.Q8_0: (32, 2 + 32), + GGMLQuantizationType.Q8_1: (32, 4 + 4 + 32), + GGMLQuantizationType.Q2_K: (256, 2 + 2 + QK_K // 16 + QK_K // 4), + GGMLQuantizationType.Q3_K: (256, 2 + QK_K // 4 + QK_K // 8 + 12), + GGMLQuantizationType.Q4_K: (256, 2 + 2 + QK_K // 2 + 12), + GGMLQuantizationType.Q5_K: (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12), + GGMLQuantizationType.Q6_K: (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16), + GGMLQuantizationType.Q8_K: (256, 4 + QK_K + QK_K // 8), +} + + +# Aliases for backward compatibility. + +# general +KEY_GENERAL_ARCHITECTURE = Keys.General.ARCHITECTURE +KEY_GENERAL_QUANTIZATION_VERSION = Keys.General.QUANTIZATION_VERSION +KEY_GENERAL_ALIGNMENT = Keys.General.ALIGNMENT +KEY_GENERAL_NAME = Keys.General.NAME +KEY_GENERAL_AUTHOR = Keys.General.AUTHOR +KEY_GENERAL_URL = Keys.General.URL +KEY_GENERAL_DESCRIPTION = Keys.General.DESCRIPTION +KEY_GENERAL_LICENSE = Keys.General.LICENSE +KEY_GENERAL_SOURCE_URL = Keys.General.SOURCE_URL +KEY_GENERAL_SOURCE_HF_REPO = Keys.General.SOURCE_HF_REPO +KEY_GENERAL_FILE_TYPE = Keys.General.FILE_TYPE + +# LLM +KEY_CONTEXT_LENGTH = Keys.LLM.CONTEXT_LENGTH +KEY_EMBEDDING_LENGTH = Keys.LLM.EMBEDDING_LENGTH +KEY_BLOCK_COUNT = Keys.LLM.BLOCK_COUNT +KEY_FEED_FORWARD_LENGTH = Keys.LLM.FEED_FORWARD_LENGTH +KEY_USE_PARALLEL_RESIDUAL = Keys.LLM.USE_PARALLEL_RESIDUAL +KEY_TENSOR_DATA_LAYOUT = Keys.LLM.TENSOR_DATA_LAYOUT + +# attention +KEY_ATTENTION_HEAD_COUNT = Keys.Attention.HEAD_COUNT +KEY_ATTENTION_HEAD_COUNT_KV = Keys.Attention.HEAD_COUNT_KV +KEY_ATTENTION_MAX_ALIBI_BIAS = Keys.Attention.MAX_ALIBI_BIAS +KEY_ATTENTION_CLAMP_KQV = Keys.Attention.CLAMP_KQV +KEY_ATTENTION_LAYERNORM_EPS = Keys.Attention.LAYERNORM_EPS +KEY_ATTENTION_LAYERNORM_RMS_EPS = Keys.Attention.LAYERNORM_RMS_EPS + +# RoPE +KEY_ROPE_DIMENSION_COUNT = Keys.Rope.DIMENSION_COUNT +KEY_ROPE_FREQ_BASE = Keys.Rope.FREQ_BASE +KEY_ROPE_SCALING_TYPE = Keys.Rope.SCALING_TYPE +KEY_ROPE_SCALING_FACTOR = Keys.Rope.SCALING_FACTOR +KEY_ROPE_SCALING_ORIG_CTX_LEN = Keys.Rope.SCALING_ORIG_CTX_LEN +KEY_ROPE_SCALING_FINETUNED = Keys.Rope.SCALING_FINETUNED + +# tokenization +KEY_TOKENIZER_MODEL = Keys.Tokenizer.MODEL +KEY_TOKENIZER_LIST = Keys.Tokenizer.LIST +KEY_TOKENIZER_TOKEN_TYPE = Keys.Tokenizer.TOKEN_TYPE +KEY_TOKENIZER_SCORES = Keys.Tokenizer.SCORES +KEY_TOKENIZER_MERGES = Keys.Tokenizer.MERGES +KEY_TOKENIZER_BOS_ID = Keys.Tokenizer.BOS_ID +KEY_TOKENIZER_EOS_ID = Keys.Tokenizer.EOS_ID +KEY_TOKENIZER_UNK_ID = Keys.Tokenizer.UNK_ID +KEY_TOKENIZER_SEP_ID = Keys.Tokenizer.SEP_ID +KEY_TOKENIZER_PAD_ID = Keys.Tokenizer.PAD_ID +KEY_TOKENIZER_HF_JSON = Keys.Tokenizer.HF_JSON +KEY_TOKENIZER_RWKV = Keys.Tokenizer.RWKV diff --git a/gguf-py/gguf/gguf.py b/gguf-py/gguf/gguf.py index 7e495cb19..651a81eb8 100644 --- a/gguf-py/gguf/gguf.py +++ b/gguf-py/gguf/gguf.py @@ -1,1146 +1,15 @@ -#!/usr/bin/env python3 -from __future__ import annotations +# This file left for compatibility. If you want to use the GGUF API from Python +# then don't import gguf/gguf.py directly. If you're looking for examples, see the +# examples/ directory for gguf-py -import json -import os -import shutil -import struct +import importlib import sys -import tempfile -from enum import Enum, IntEnum, auto -from io import BufferedWriter from pathlib import Path -from typing import IO, Any, BinaryIO, Callable, Sequence -import numpy as np +sys.path.insert(0, str(Path(__file__).parent.parent)) -# -# constants -# +# Compatibility for people trying to import gguf/gguf.py directly instead of as a package. +importlib.invalidate_caches() +import gguf # noqa: E402 -GGUF_MAGIC = 0x46554747 -GGUF_VERSION = 3 -GGUF_DEFAULT_ALIGNMENT = 32 - - -# general -KEY_GENERAL_ARCHITECTURE = "general.architecture" -KEY_GENERAL_QUANTIZATION_VERSION = "general.quantization_version" -KEY_GENERAL_ALIGNMENT = "general.alignment" -KEY_GENERAL_NAME = "general.name" -KEY_GENERAL_AUTHOR = "general.author" -KEY_GENERAL_URL = "general.url" -KEY_GENERAL_DESCRIPTION = "general.description" -KEY_GENERAL_LICENSE = "general.license" -KEY_GENERAL_SOURCE_URL = "general.source.url" -KEY_GENERAL_SOURCE_HF_REPO = "general.source.huggingface.repository" -KEY_GENERAL_FILE_TYPE = "general.file_type" - -# LLM -KEY_CONTEXT_LENGTH = "{arch}.context_length" -KEY_EMBEDDING_LENGTH = "{arch}.embedding_length" -KEY_BLOCK_COUNT = "{arch}.block_count" -KEY_FEED_FORWARD_LENGTH = "{arch}.feed_forward_length" -KEY_USE_PARALLEL_RESIDUAL = "{arch}.use_parallel_residual" -KEY_TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" - -# attention -KEY_ATTENTION_HEAD_COUNT = "{arch}.attention.head_count" -KEY_ATTENTION_HEAD_COUNT_KV = "{arch}.attention.head_count_kv" -KEY_ATTENTION_MAX_ALIBI_BIAS = "{arch}.attention.max_alibi_bias" -KEY_ATTENTION_CLAMP_KQV = "{arch}.attention.clamp_kqv" -KEY_ATTENTION_LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" -KEY_ATTENTION_LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" - -# RoPE -KEY_ROPE_DIMENSION_COUNT = "{arch}.rope.dimension_count" -KEY_ROPE_FREQ_BASE = "{arch}.rope.freq_base" -KEY_ROPE_SCALING_TYPE = "{arch}.rope.scaling.type" -KEY_ROPE_SCALING_FACTOR = "{arch}.rope.scaling.factor" -KEY_ROPE_SCALING_ORIG_CTX_LEN = "{arch}.rope.scaling.original_context_length" -KEY_ROPE_SCALING_FINETUNED = "{arch}.rope.scaling.finetuned" - -# tokenization -KEY_TOKENIZER_MODEL = "tokenizer.ggml.model" -KEY_TOKENIZER_LIST = "tokenizer.ggml.tokens" -KEY_TOKENIZER_TOKEN_TYPE = "tokenizer.ggml.token_type" -KEY_TOKENIZER_SCORES = "tokenizer.ggml.scores" -KEY_TOKENIZER_MERGES = "tokenizer.ggml.merges" -KEY_TOKENIZER_BOS_ID = "tokenizer.ggml.bos_token_id" -KEY_TOKENIZER_EOS_ID = "tokenizer.ggml.eos_token_id" -KEY_TOKENIZER_UNK_ID = "tokenizer.ggml.unknown_token_id" -KEY_TOKENIZER_SEP_ID = "tokenizer.ggml.seperator_token_id" -KEY_TOKENIZER_PAD_ID = "tokenizer.ggml.padding_token_id" -KEY_TOKENIZER_HF_JSON = "tokenizer.huggingface.json" -KEY_TOKENIZER_RWKV = "tokenizer.rwkv.world" - - -# -# recommended mapping of model tensor names for storage in gguf -# - - -class MODEL_ARCH(IntEnum): - LLAMA : int = auto() - FALCON : int = auto() - BAICHUAN : int = auto() - GPT2 : int = auto() - GPTJ : int = auto() - GPTNEOX : int = auto() - MPT : int = auto() - STARCODER : int = auto() - PERSIMMON : int = auto() - REFACT : int = auto() - BERT : int = auto() - BLOOM : int = auto() - - -class MODEL_TENSOR(IntEnum): - TOKEN_EMBD : int = auto() - TOKEN_EMBD_NORM : int = auto() - TOKEN_TYPES : int = auto() - POS_EMBD : int = auto() - OUTPUT : int = auto() - OUTPUT_NORM : int = auto() - ROPE_FREQS : int = auto() - ATTN_Q : int = auto() - ATTN_K : int = auto() - ATTN_V : int = auto() - ATTN_QKV : int = auto() - ATTN_OUT : int = auto() - ATTN_NORM : int = auto() - ATTN_NORM_2 : int = auto() - ATTN_ROT_EMBD : int = auto() - FFN_GATE : int = auto() - FFN_DOWN : int = auto() - FFN_UP : int = auto() - FFN_NORM : int = auto() - ATTN_Q_NORM : int = auto() - ATTN_K_NORM : int = auto() - - -MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { - MODEL_ARCH.LLAMA: "llama", - MODEL_ARCH.FALCON: "falcon", - MODEL_ARCH.BAICHUAN: "baichuan", - MODEL_ARCH.GPT2: "gpt2", - MODEL_ARCH.GPTJ: "gptj", - MODEL_ARCH.GPTNEOX: "gptneox", - MODEL_ARCH.MPT: "mpt", - MODEL_ARCH.STARCODER: "starcoder", - MODEL_ARCH.PERSIMMON: "persimmon", - MODEL_ARCH.REFACT: "refact", - MODEL_ARCH.BERT: "bert", - MODEL_ARCH.BLOOM: "bloom", -} - -TENSOR_NAMES: dict[MODEL_TENSOR, str] = { - MODEL_TENSOR.TOKEN_EMBD: "token_embd", - MODEL_TENSOR.TOKEN_EMBD_NORM: "token_embd_norm", - MODEL_TENSOR.TOKEN_TYPES: "token_types", - MODEL_TENSOR.POS_EMBD: "position_embd", - MODEL_TENSOR.OUTPUT_NORM: "output_norm", - MODEL_TENSOR.OUTPUT: "output", - MODEL_TENSOR.ROPE_FREQS: "rope_freqs", - MODEL_TENSOR.ATTN_NORM: "blk.{bid}.attn_norm", - MODEL_TENSOR.ATTN_NORM_2: "blk.{bid}.attn_norm_2", - MODEL_TENSOR.ATTN_QKV: "blk.{bid}.attn_qkv", - MODEL_TENSOR.ATTN_Q: "blk.{bid}.attn_q", - MODEL_TENSOR.ATTN_K: "blk.{bid}.attn_k", - MODEL_TENSOR.ATTN_V: "blk.{bid}.attn_v", - MODEL_TENSOR.ATTN_OUT: "blk.{bid}.attn_output", - MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd", - MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm", - MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm", - MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm", - MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", - MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down", - MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up", -} - -MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { - MODEL_ARCH.LLAMA: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ROPE_FREQS, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_Q, - MODEL_TENSOR.ATTN_K, - MODEL_TENSOR.ATTN_V, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.ATTN_ROT_EMBD, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_GATE, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.GPTNEOX: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_QKV, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.FALCON: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_NORM_2, - MODEL_TENSOR.ATTN_QKV, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.BAICHUAN: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ROPE_FREQS, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_Q, - MODEL_TENSOR.ATTN_K, - MODEL_TENSOR.ATTN_V, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.ATTN_ROT_EMBD, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_GATE, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.STARCODER: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.POS_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_QKV, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.BERT: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.TOKEN_TYPES, - MODEL_TENSOR.POS_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_Q, - MODEL_TENSOR.ATTN_K, - MODEL_TENSOR.ATTN_V, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.MPT: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_QKV, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.GPTJ: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_Q, - MODEL_TENSOR.ATTN_K, - MODEL_TENSOR.ATTN_V, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.PERSIMMON: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_QKV, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - MODEL_TENSOR.ATTN_Q_NORM, - MODEL_TENSOR.ATTN_K_NORM, - MODEL_TENSOR.ATTN_ROT_EMBD, - ], - MODEL_ARCH.REFACT: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_Q, - MODEL_TENSOR.ATTN_K, - MODEL_TENSOR.ATTN_V, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_GATE, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.BLOOM: [ - MODEL_TENSOR.TOKEN_EMBD, - MODEL_TENSOR.TOKEN_EMBD_NORM, - MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.OUTPUT, - MODEL_TENSOR.ATTN_NORM, - MODEL_TENSOR.ATTN_QKV, - MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, - MODEL_TENSOR.FFN_DOWN, - MODEL_TENSOR.FFN_UP, - ], - MODEL_ARCH.GPT2: [ - # TODO - ], - # TODO -} - -# tensors that will not be serialized -MODEL_TENSOR_SKIP: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { - MODEL_ARCH.LLAMA: [ - MODEL_TENSOR.ROPE_FREQS, - MODEL_TENSOR.ATTN_ROT_EMBD, - ], - MODEL_ARCH.BAICHUAN: [ - MODEL_TENSOR.ROPE_FREQS, - MODEL_TENSOR.ATTN_ROT_EMBD, - ], - MODEL_ARCH.PERSIMMON: [ - MODEL_TENSOR.ROPE_FREQS, - ] -} - - -class TensorNameMap: - mappings_cfg: dict[MODEL_TENSOR, tuple[str, ...]] = { - # Token embeddings - MODEL_TENSOR.TOKEN_EMBD: ( - "gpt_neox.embed_in", # gptneox - "transformer.wte", # gpt2 gpt-j mpt refact - "transformer.word_embeddings", # falcon - "word_embeddings", # bloom - "model.embed_tokens", # llama-hf - "tok_embeddings", # llama-pth - "embeddings.word_embeddings", # bert - "language_model.embedding.word_embeddings", # persimmon - ), - - # Token type embeddings - MODEL_TENSOR.TOKEN_TYPES: ( - "embeddings.token_type_embeddings", # bert - ), - - # Normalization of token embeddings - MODEL_TENSOR.TOKEN_EMBD_NORM: ( - "word_embeddings_layernorm", # bloom - ), - - # Position embeddings - MODEL_TENSOR.POS_EMBD: ( - "transformer.wpe", # gpt2 - "embeddings.position_embeddings", # bert - ), - - # Output - MODEL_TENSOR.OUTPUT: ( - "embed_out", # gptneox - "lm_head", # gpt2 mpt falcon llama-hf baichuan - "output", # llama-pth bloom - "word_embeddings_for_head", # persimmon - ), - - # Output norm - MODEL_TENSOR.OUTPUT_NORM: ( - "gpt_neox.final_layer_norm", # gptneox - "transformer.ln_f", # gpt2 gpt-j falcon - "model.norm", # llama-hf baichuan - "norm", # llama-pth - "embeddings.LayerNorm", # bert - "transformer.norm_f", # mpt - "ln_f", # refact bloom - "language_model.encoder.final_layernorm", # persimmon - ), - - # Rope frequencies - MODEL_TENSOR.ROPE_FREQS: ( - "rope.freqs", # llama-pth - ), - } - - block_mappings_cfg: dict[MODEL_TENSOR, tuple[str, ...]] = { - # Attention norm - MODEL_TENSOR.ATTN_NORM: ( - "gpt_neox.layers.{bid}.input_layernorm", # gptneox - "transformer.h.{bid}.ln_1", # gpt2 gpt-j refact - "transformer.blocks.{bid}.norm_1", # mpt - "transformer.h.{bid}.input_layernorm", # falcon7b - "h.{bid}.input_layernorm", # bloom - "transformer.h.{bid}.ln_mlp", # falcon40b - "model.layers.{bid}.input_layernorm", # llama-hf - "layers.{bid}.attention_norm", # llama-pth - "encoder.layer.{bid}.attention.output.LayerNorm", # bert - "language_model.encoder.layers.{bid}.input_layernorm", # persimmon - "model.layers.{bid}.ln1", # yi - ), - - # Attention norm 2 - MODEL_TENSOR.ATTN_NORM_2: ( - "transformer.h.{bid}.ln_attn", # falcon40b - ), - - # Attention query-key-value - MODEL_TENSOR.ATTN_QKV: ( - "gpt_neox.layers.{bid}.attention.query_key_value", # gptneox - "transformer.h.{bid}.attn.c_attn", # gpt2 - "transformer.blocks.{bid}.attn.Wqkv", # mpt - "transformer.h.{bid}.self_attention.query_key_value", # falcon - "h.{bid}.self_attention.query_key_value", # bloom - "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon - ), - - # Attention query - MODEL_TENSOR.ATTN_Q: ( - "model.layers.{bid}.self_attn.q_proj", # llama-hf - "layers.{bid}.attention.wq", # llama-pth - "encoder.layer.{bid}.attention.self.query", # bert - "transformer.h.{bid}.attn.q_proj", # gpt-j - ), - - # Attention key - MODEL_TENSOR.ATTN_K: ( - "model.layers.{bid}.self_attn.k_proj", # llama-hf - "layers.{bid}.attention.wk", # llama-pth - "encoder.layer.{bid}.attention.self.key", # bert - "transformer.h.{bid}.attn.k_proj", # gpt-j - ), - - # Attention value - MODEL_TENSOR.ATTN_V: ( - "model.layers.{bid}.self_attn.v_proj", # llama-hf - "layers.{bid}.attention.wv", # llama-pth - "encoder.layer.{bid}.attention.self.value", # bert - "transformer.h.{bid}.attn.v_proj", # gpt-j - ), - - # Attention output - MODEL_TENSOR.ATTN_OUT: ( - "gpt_neox.layers.{bid}.attention.dense", # gptneox - "transformer.h.{bid}.attn.c_proj", # gpt2 refact - "transformer.blocks.{bid}.attn.out_proj", # mpt - "transformer.h.{bid}.self_attention.dense", # falcon - "h.{bid}.self_attention.dense", # bloom - "model.layers.{bid}.self_attn.o_proj", # llama-hf - "layers.{bid}.attention.wo", # llama-pth - "encoder.layer.{bid}.attention.output.dense", # bert - "transformer.h.{bid}.attn.out_proj", # gpt-j - "language_model.encoder.layers.{bid}.self_attention.dense" # persimmon - ), - - # Rotary embeddings - MODEL_TENSOR.ATTN_ROT_EMBD: ( - "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf - "layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth - ), - - # Feed-forward norm - MODEL_TENSOR.FFN_NORM: ( - "gpt_neox.layers.{bid}.post_attention_layernorm", # gptneox - "transformer.h.{bid}.ln_2", # gpt2 refact - "h.{bid}.post_attention_layernorm", # bloom - "transformer.blocks.{bid}.norm_2", # mpt - "model.layers.{bid}.post_attention_layernorm", # llama-hf - "layers.{bid}.ffn_norm", # llama-pth - "encoder.layer.{bid}.output.LayerNorm", # bert - "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon - "model.layers.{bid}.ln2", # yi - ), - - # Feed-forward up - MODEL_TENSOR.FFN_UP: ( - "gpt_neox.layers.{bid}.mlp.dense_h_to_4h", # gptneox - "transformer.h.{bid}.mlp.c_fc", # gpt2 - "transformer.blocks.{bid}.ffn.up_proj", # mpt - "transformer.h.{bid}.mlp.dense_h_to_4h", # falcon - "h.{bid}.mlp.dense_h_to_4h", # bloom - "model.layers.{bid}.mlp.up_proj", # llama-hf refact - "layers.{bid}.feed_forward.w3", # llama-pth - "encoder.layer.{bid}.intermediate.dense", # bert - "transformer.h.{bid}.mlp.fc_in", # gpt-j - "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon - ), - - # Feed-forward gate - MODEL_TENSOR.FFN_GATE: ( - "model.layers.{bid}.mlp.gate_proj", # llama-hf refact - "layers.{bid}.feed_forward.w1", # llama-pth - ), - - # Feed-forward down - MODEL_TENSOR.FFN_DOWN: ( - "gpt_neox.layers.{bid}.mlp.dense_4h_to_h", # gptneox - "transformer.h.{bid}.mlp.c_proj", # gpt2 refact - "transformer.blocks.{bid}.ffn.down_proj", # mpt - "transformer.h.{bid}.mlp.dense_4h_to_h", # falcon - "h.{bid}.mlp.dense_4h_to_h", # bloom - "model.layers.{bid}.mlp.down_proj", # llama-hf - "layers.{bid}.feed_forward.w2", # llama-pth - "encoder.layer.{bid}.output.dense", # bert - "transformer.h.{bid}.mlp.fc_out", # gpt-j - "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon - ), - - MODEL_TENSOR.ATTN_Q_NORM: ( - "language_model.encoder.layers.{bid}.self_attention.q_layernorm", - ), - - MODEL_TENSOR.ATTN_K_NORM: ( - "language_model.encoder.layers.{bid}.self_attention.k_layernorm", - ), - - MODEL_TENSOR.ROPE_FREQS: ( - "language_model.encoder.layers.{bid}.self_attention.rotary_emb.inv_freq", # persimmon - ) - } - - mapping: dict[str, tuple[MODEL_TENSOR, str]] - - def __init__(self, arch: MODEL_ARCH, n_blocks: int): - self.mapping = {} - for tensor, keys in self.mappings_cfg.items(): - if tensor not in MODEL_TENSORS[arch]: - continue - tensor_name = TENSOR_NAMES[tensor] - self.mapping[tensor_name] = (tensor, tensor_name) - for key in keys: - self.mapping[key] = (tensor, tensor_name) - for bid in range(n_blocks): - for tensor, keys in self.block_mappings_cfg.items(): - if tensor not in MODEL_TENSORS[arch]: - continue - tensor_name = TENSOR_NAMES[tensor].format(bid = bid) - self.mapping[tensor_name] = (tensor, tensor_name) - for key in keys: - key = key.format(bid = bid) - self.mapping[key] = (tensor, tensor_name) - - def get_type_and_name(self, key: str, try_suffixes: Sequence[str] = ()) -> tuple[MODEL_TENSOR, str] | None: - result = self.mapping.get(key) - if result is not None: - return result - for suffix in try_suffixes: - if key.endswith(suffix): - result = self.mapping.get(key[:-len(suffix)]) - if result is not None: - return (result[0], result[1] + suffix) - return None - - def get_name(self, key: str, try_suffixes: Sequence[str] = ()) -> str | None: - result = self.get_type_and_name(key, try_suffixes = try_suffixes) - if result is None: - return None - return result[1] - - def get_type(self, key: str, try_suffixes: Sequence[str] = ()) -> MODEL_TENSOR | None: - result = self.get_type_and_name(key, try_suffixes = try_suffixes) - if result is None: - return None - return result[0] - - def __getitem__(self, key: str) -> str: - try: - return self.mapping[key][1] - except KeyError: - raise KeyError(key) - - def __contains__(self, key: str) -> bool: - return key in self.mapping - - def __repr__(self) -> str: - return repr(self.mapping) - -def get_tensor_name_map(arch: MODEL_ARCH, n_blocks: int) -> TensorNameMap: - return TensorNameMap(arch, n_blocks) - -class TokenType(IntEnum): - NORMAL = 1 - UNKNOWN = 2 - CONTROL = 3 - USER_DEFINED = 4 - UNUSED = 5 - BYTE = 6 - -class RopeScalingType(Enum): - NONE = 'none' - LINEAR = 'linear' - YARN = 'yarn' - -# -# implementation -# - - -class GGMLQuantizationType(IntEnum): - F32 = 0 - F16 = 1 - Q4_0 = 2 - Q4_1 = 3 - Q5_0 = 6 - Q5_1 = 7 - Q8_0 = 8 - Q8_1 = 9 - Q2_K = 10 - Q3_K = 11 - Q4_K = 12 - Q5_K = 13 - Q6_K = 14 - Q8_K = 15 - -class GGUFEndian(IntEnum): - LITTLE = 0 - BIG = 1 - - -class GGUFValueType(IntEnum): - UINT8 = 0 - INT8 = 1 - UINT16 = 2 - INT16 = 3 - UINT32 = 4 - INT32 = 5 - FLOAT32 = 6 - BOOL = 7 - STRING = 8 - ARRAY = 9 - UINT64 = 10 - INT64 = 11 - FLOAT64 = 12 - - @staticmethod - def get_type(val): - if isinstance(val, str) or isinstance(val, bytes) or isinstance(val, bytearray): - return GGUFValueType.STRING - elif isinstance(val, list): - return GGUFValueType.ARRAY - elif isinstance(val, float): - return GGUFValueType.FLOAT32 - elif isinstance(val, bool): - return GGUFValueType.BOOL - elif isinstance(val, int): - return GGUFValueType.INT32 - # TODO: need help with 64-bit types in Python - else: - print("Unknown type: "+str(type(val))) - sys.exit() - - -class WriterState(Enum): - EMPTY = auto() - HEADER = auto() - KV_DATA = auto() - TI_DATA = auto() - - -class GGUFWriter: - fout: BufferedWriter - temp_file: tempfile.SpooledTemporaryFile[bytes] | None - tensors: list[np.ndarray[Any, Any]] - - @property - def pack_prefix(self): - if self.endianess==GGUFEndian.LITTLE: - return "<" - else: - return ">" - - def __init__(self, path: os.PathLike[str] | str, arch: str, use_temp_file = True, endianess=GGUFEndian.LITTLE): - self.fout = open(path, "wb") - self.arch = arch - self.endianess = endianess - self._simple_value_packing = { - GGUFValueType.UINT8: f"{self.pack_prefix}B", - GGUFValueType.INT8: f"{self.pack_prefix}b", - GGUFValueType.UINT16: f"{self.pack_prefix}H", - GGUFValueType.INT16: f"{self.pack_prefix}h", - GGUFValueType.UINT32: f"{self.pack_prefix}I", - GGUFValueType.INT32: f"{self.pack_prefix}i", - GGUFValueType.FLOAT32: f"{self.pack_prefix}f", - GGUFValueType.UINT64: f"{self.pack_prefix}Q", - GGUFValueType.INT64: f"{self.pack_prefix}q", - GGUFValueType.FLOAT64: f"{self.pack_prefix}d", - GGUFValueType.BOOL: "?" , - } - self.offset_tensor = 0 - self.data_alignment = GGUF_DEFAULT_ALIGNMENT - self.kv_data = b"" - self.kv_data_count = 0 - self.ti_data = b"" - self.ti_data_count = 0 - self.use_temp_file = use_temp_file - self.temp_file = None - self.tensors = [] - endianess_str = "Big Endian" if self.endianess == GGUFEndian.BIG else "Little Endian" - print(f"This gguf file is for {endianess_str} only") - self.state = WriterState.EMPTY - - self.add_architecture() - - def write_header_to_file(self): - if self.state is not WriterState.EMPTY: - raise ValueError(f'Expected output file to be empty, got {self.state}') - - self.fout.write(struct.pack(" 0: - ltype = GGUFValueType.get_type(val[0]) - if not all(GGUFValueType.get_type(i) is ltype for i in val[1:]): - raise ValueError("All items in a GGUF array should be of the same type") - self.kv_data += struct.pack(f"{self.pack_prefix}I", ltype) - self.kv_data += struct.pack(f"{self.pack_prefix}Q", len(val)) - for item in val: - self.add_val(item, add_vtype=False) - else: - raise ValueError("Invalid GGUF metadata value type or value") - - @staticmethod - def ggml_pad(x: int, n: int) -> int: - return ((x + n - 1) // n) * n - - def add_tensor_info(self, name: str, tensor_shape: Sequence[int], tensor_dtype: np.dtype[np.float16] | np.dtype[np.float32], tensor_nbytes: int, raw_dtype: GGMLQuantizationType | None = None): - if self.state is not WriterState.EMPTY: - raise ValueError(f'Expected output file to be empty, got {self.state}') - - assert raw_dtype is not None or tensor_dtype in (np.float32, np.float16), "Only F32 and F16 tensors are supported for now" - - encoded_name = name.encode("utf8") - self.ti_data += struct.pack(f"{self.pack_prefix}Q", len(encoded_name)) - self.ti_data += encoded_name - n_dims = len(tensor_shape) - self.ti_data += struct.pack(f"{self.pack_prefix}I", n_dims) - for i in range(n_dims): - self.ti_data += struct.pack(f"{self.pack_prefix}Q", tensor_shape[n_dims - 1 - i]) - if raw_dtype is None: - dtype = GGMLQuantizationType.F32 if tensor_dtype == np.float32 else GGMLQuantizationType.F16 - else: - dtype = raw_dtype - self.ti_data += struct.pack(f"{self.pack_prefix}I", dtype) - self.ti_data += struct.pack(f"{self.pack_prefix}Q", self.offset_tensor) - self.offset_tensor += GGUFWriter.ggml_pad(tensor_nbytes, self.data_alignment) - self.ti_data_count += 1 - - def add_tensor(self, name: str, tensor: np.ndarray[Any, Any], raw_shape: Sequence[int] | None = None, raw_dtype: GGMLQuantizationType | None = None): - if self.endianess == GGUFEndian.BIG: - tensor.byteswap(inplace=True) - if self.use_temp_file and self.temp_file is None: - fp = tempfile.SpooledTemporaryFile(mode="w+b", max_size=256*1024*1024) - fp.seek(0) - self.temp_file = fp - - shape: Sequence[int] = raw_shape if raw_shape is not None else tensor.shape - self.add_tensor_info(name, shape, tensor.dtype, tensor.nbytes, raw_dtype = raw_dtype) - - if self.temp_file is None: - self.tensors.append(tensor) - return - - tensor.tofile(self.temp_file) - self.write_padding(self.temp_file, tensor.nbytes) - - def write_padding(self, fp: IO[bytes], n: int, align: int | None = None): - pad = GGUFWriter.ggml_pad(n, align if align is not None else self.data_alignment) - n - if pad != 0: - fp.write(bytes([0] * pad)) - - def write_tensor_data(self, tensor: np.ndarray[Any, Any]): - if self.state is not WriterState.TI_DATA: - raise ValueError(f'Expected output file to contain tensor info, got {self.state}') - - if self.endianess==GGUFEndian.BIG: - tensor.byteswap(inplace=True) - self.write_padding(self.fout, self.fout.tell()) - tensor.tofile(self.fout) - self.write_padding(self.fout, tensor.nbytes) - - def write_tensors_to_file(self): - self.write_ti_data_to_file() - - self.write_padding(self.fout, self.fout.tell()) - - if self.temp_file is None: - while True: - try: - tensor = self.tensors.pop(0) - except IndexError: - break - tensor.tofile(self.fout) - self.write_padding(self.fout, tensor.nbytes) - return - - self.temp_file.seek(0) - - shutil.copyfileobj(self.temp_file, self.fout) - self.flush() - self.temp_file.close() - - def flush(self): - self.fout.flush() - - def close(self): - self.fout.close() - - def add_architecture(self): - self.add_string(KEY_GENERAL_ARCHITECTURE, self.arch) - - def add_author(self, author: str): - self.add_string(KEY_GENERAL_AUTHOR, author) - - def add_tensor_data_layout(self, layout: str): - self.add_string(KEY_TENSOR_DATA_LAYOUT.format(arch=self.arch), layout) - - def add_url(self, url: str): - self.add_string(KEY_GENERAL_URL, url) - - def add_description(self, description: str): - self.add_string(KEY_GENERAL_DESCRIPTION, description) - - def add_source_url(self, url: str): - self.add_string(KEY_GENERAL_SOURCE_URL, url) - - def add_source_hf_repo(self, repo: str): - self.add_string(KEY_GENERAL_SOURCE_HF_REPO, repo) - - def add_file_type(self, ftype: int): - self.add_uint32(KEY_GENERAL_FILE_TYPE, ftype) - - def add_name(self, name: str): - self.add_string(KEY_GENERAL_NAME, name) - - def add_quantization_version(self, quantization_version: GGMLQuantizationType): - self.add_uint32( - KEY_GENERAL_QUANTIZATION_VERSION, quantization_version) - - def add_custom_alignment(self, alignment: int): - self.data_alignment = alignment - self.add_uint32(KEY_GENERAL_ALIGNMENT, alignment) - - def add_context_length(self, length: int): - self.add_uint32( - KEY_CONTEXT_LENGTH.format(arch=self.arch), length) - - def add_embedding_length(self, length: int): - self.add_uint32( - KEY_EMBEDDING_LENGTH.format(arch=self.arch), length) - - def add_block_count(self, length: int): - self.add_uint32( - KEY_BLOCK_COUNT.format(arch=self.arch), length) - - def add_feed_forward_length(self, length: int): - self.add_uint32( - KEY_FEED_FORWARD_LENGTH.format(arch=self.arch), length) - - def add_parallel_residual(self, use: bool): - self.add_bool( - KEY_USE_PARALLEL_RESIDUAL.format(arch=self.arch), use) - - def add_head_count(self, count: int): - self.add_uint32( - KEY_ATTENTION_HEAD_COUNT.format(arch=self.arch), count) - - def add_head_count_kv(self, count: int): - self.add_uint32( - KEY_ATTENTION_HEAD_COUNT_KV.format(arch=self.arch), count) - - def add_max_alibi_bias(self, bias: float): - self.add_float32( - KEY_ATTENTION_MAX_ALIBI_BIAS.format(arch=self.arch), bias) - - def add_clamp_kqv(self, value: float): - self.add_float32( - KEY_ATTENTION_CLAMP_KQV.format(arch=self.arch), value) - - def add_layer_norm_eps(self, value: float): - self.add_float32( - KEY_ATTENTION_LAYERNORM_EPS.format(arch=self.arch), value) - - def add_layer_norm_rms_eps(self, value: float): - self.add_float32( - KEY_ATTENTION_LAYERNORM_RMS_EPS.format(arch=self.arch), value) - - def add_rope_dimension_count(self, count: int): - self.add_uint32( - KEY_ROPE_DIMENSION_COUNT.format(arch=self.arch), count) - - def add_rope_freq_base(self, value: float): - self.add_float32(KEY_ROPE_FREQ_BASE.format(arch=self.arch), value) - - def add_rope_scaling_type(self, value: RopeScalingType): - self.add_string(KEY_ROPE_SCALING_TYPE.format(arch=self.arch), value.value) - - def add_rope_scaling_factor(self, value: float): - self.add_float32(KEY_ROPE_SCALING_FACTOR.format(arch=self.arch), value) - - def add_rope_scaling_orig_ctx_len(self, value: int): - self.add_uint32(KEY_ROPE_SCALING_ORIG_CTX_LEN.format(arch=self.arch), value) - - def add_rope_scaling_finetuned(self, value: bool): - self.add_bool(KEY_ROPE_SCALING_FINETUNED.format(arch=self.arch), value) - - def add_tokenizer_model(self, model: str): - self.add_string(KEY_TOKENIZER_MODEL, model) - - def add_token_list(self, tokens: Sequence[str] | Sequence[bytes] | Sequence[bytearray]): - self.add_array(KEY_TOKENIZER_LIST, tokens) - - def add_token_merges(self, merges: Sequence[str] | Sequence[bytes] | Sequence[bytearray]): - self.add_array(KEY_TOKENIZER_MERGES, merges) - - def add_token_types(self, types: Sequence[TokenType] | Sequence[int]): - self.add_array(KEY_TOKENIZER_TOKEN_TYPE, types) - - def add_token_scores(self, scores: Sequence[float]): - self.add_array(KEY_TOKENIZER_SCORES, scores) - - def add_bos_token_id(self, id: int): - self.add_uint32(KEY_TOKENIZER_BOS_ID, id) - - def add_eos_token_id(self, id: int): - self.add_uint32(KEY_TOKENIZER_EOS_ID, id) - - def add_unk_token_id(self, id: int): - self.add_uint32(KEY_TOKENIZER_UNK_ID, id) - - def add_sep_token_id(self, id: int): - self.add_uint32(KEY_TOKENIZER_SEP_ID, id) - - def add_pad_token_id(self, id: int): - self.add_uint32(KEY_TOKENIZER_PAD_ID, id) - - -class SpecialVocab: - merges: list[str] - special_token_ids: dict[str, int] - - def __init__( - self, path: str | os.PathLike[str], load_merges: bool = False, - special_token_types: tuple[str, ...] | None = None, - n_vocab: int | None = None, - ): - self.special_token_ids = {} - self.n_vocab = n_vocab - self.load_merges = load_merges - self.merges = [] - if special_token_types is not None: - self.special_token_types = special_token_types - else: - self.special_token_types = ('bos', 'eos', 'unk', 'sep', 'pad') - self._load(Path(path)) - - def _load(self, path: Path) -> None: - if not self._try_load_from_tokenizer_json(path): - self._try_load_from_config_json(path) - - def _set_special_token(self, typ: str, tid: Any): - if not isinstance(tid, int) or tid < 0: - return - if self.n_vocab is None or tid < self.n_vocab: - self.special_token_ids[typ] = tid - return - print(f'gguf: WARNING: Special token type {typ}, id {tid} out of range, must be under {self.n_vocab} - skipping', - file = sys.stderr) - - - def _try_load_from_tokenizer_json(self, path: Path) -> bool: - tokenizer_file = path / 'tokenizer.json' - if not tokenizer_file.is_file(): - return False - with open(tokenizer_file, encoding = 'utf-8') as f: - tokenizer = json.load(f) - if self.load_merges: - merges = tokenizer.get('model', {}).get('merges') - if isinstance(merges, list) and len(merges) > 0 and isinstance(merges[0], str): - self.merges = merges - tokenizer_config_file = path / 'tokenizer_config.json' - added_tokens = tokenizer.get('added_tokens') - if added_tokens is None or not tokenizer_config_file.is_file(): - return True - with open(tokenizer_config_file, encoding = 'utf-8') as f: - tokenizer_config = json.load(f) - for typ in self.special_token_types: - entry = tokenizer_config.get(f'{typ}_token') - if isinstance(entry, str): - tc_content = entry - elif isinstance(entry, dict): - entry_content = entry.get('content') - if not isinstance(entry_content, str): - continue - tc_content = entry_content - else: - continue - # We only need the first match here. - maybe_token_id = next(( - atok.get('id') for atok in added_tokens - if atok.get('content') == tc_content), None) - self._set_special_token(typ, maybe_token_id) - return True - - def _try_load_from_config_json(self, path: Path) -> bool: - config_file = path / 'config.json' - if not config_file.is_file(): - return False - with open(config_file, encoding = 'utf-8') as f: - config = json.load(f) - for typ in self.special_token_types: - self._set_special_token(typ, config.get(f'{typ}_token_id')) - return True - - def add_to_gguf(self, gw: GGUFWriter, quiet: bool = False) -> None: - if len(self.merges) > 0: - if not quiet: - print(f'gguf: Adding {len(self.merges)} merge(s).') - gw.add_token_merges(self.merges) - for typ, tokid in self.special_token_ids.items(): - handler: Callable[[int], None] | None = getattr(gw, f'add_{typ}_token_id', None) - if handler is None: - print(f'gguf: WARNING: No handler for special token type {typ} with id {tokid} - skipping', file = sys.stderr) - continue - if not quiet: - print(f'gguf: Setting special token type {typ} to {tokid}') - handler(tokid) - - def __repr__(self) -> str: - return f'' - - -# Example usage: -if __name__ == "__main__": - # Example usage with a file - gguf_writer = GGUFWriter("example.gguf", "llama") - - gguf_writer.add_architecture() - gguf_writer.add_block_count(12) - gguf_writer.add_uint32("answer", 42) # Write a 32-bit integer - gguf_writer.add_float32("answer_in_float", 42.0) # Write a 32-bit float - gguf_writer.add_custom_alignment(64) - - tensor1 = np.ones((32,), dtype=np.float32) * 100.0 - tensor2 = np.ones((64,), dtype=np.float32) * 101.0 - tensor3 = np.ones((96,), dtype=np.float32) * 102.0 - - gguf_writer.add_tensor("tensor1", tensor1) - gguf_writer.add_tensor("tensor2", tensor2) - gguf_writer.add_tensor("tensor3", tensor3) - - gguf_writer.write_header_to_file() - gguf_writer.write_kv_data_to_file() - gguf_writer.write_tensors_to_file() - - gguf_writer.close() +importlib.reload(gguf) diff --git a/gguf-py/gguf/gguf_reader.py b/gguf-py/gguf/gguf_reader.py new file mode 100644 index 000000000..8682765ed --- /dev/null +++ b/gguf-py/gguf/gguf_reader.py @@ -0,0 +1,264 @@ +# +# GGUF file reading/modification support. For API usage information, +# please see the files scripts/ for some fairly simple examples. +# +from __future__ import annotations + +import os +from collections import OrderedDict +from typing import Any, Literal, NamedTuple, TypeVar, Union + +import numpy as np +import numpy.typing as npt + +if __name__ == "__main__": + import sys + from pathlib import Path + + # Allow running file in package as a script. + sys.path.insert(0, str(Path(__file__).parent.parent)) + +from gguf.constants import ( + GGML_QUANT_SIZES, + GGUF_DEFAULT_ALIGNMENT, + GGUF_MAGIC, + GGUF_VERSION, + GGMLQuantizationType, + GGUFValueType, +) + + +READER_SUPPORTED_VERSIONS = [2, GGUF_VERSION] + + +class ReaderField(NamedTuple): + # Offset to start of this field. + offset: int + + # Name of the field (not necessarily from file data). + name: str + + # Data parts. Some types have multiple components, such as strings + # that consist of a length followed by the string data. + parts: list[npt.NDArray[Any]] = [] + + # Indexes into parts that we can call the actual data. For example + # an array of strings will be populated with indexes to the actual + # string data. + data: list[int] = [-1] + + types: list[GGUFValueType] = [] + + +class ReaderTensor(NamedTuple): + name: str + tensor_type: GGMLQuantizationType + shape: npt.NDArray[np.uint32] + n_elements: int + n_bytes: int + data_offset: int + data: npt.NDArray[Any] + field: ReaderField + + +class GGUFReader: + # I - same as host, S - swapped + byte_order: Literal['I' | 'S'] = 'I' + alignment: int = GGUF_DEFAULT_ALIGNMENT + + # Note: Internal helper, API may change. + gguf_scalar_to_np: dict[GGUFValueType, type[np.generic]] = { + GGUFValueType.UINT8: np.uint8, + GGUFValueType.INT8: np.int8, + GGUFValueType.UINT16: np.uint16, + GGUFValueType.INT16: np.int16, + GGUFValueType.UINT32: np.uint32, + GGUFValueType.INT32: np.int32, + GGUFValueType.FLOAT32: np.float32, + GGUFValueType.UINT64: np.uint64, + GGUFValueType.INT64: np.int64, + GGUFValueType.FLOAT64: np.float64, + GGUFValueType.BOOL: np.bool_, + } + + def __init__(self, path: os.PathLike[str] | str, mode: Literal['r' | 'r+' | 'c'] = 'r'): + self.data = np.memmap(path, mode = mode) + offs = 0 + if self._get(offs, np.uint32, override_order = '<')[0] != GGUF_MAGIC: + raise ValueError('GGUF magic invalid') + offs += 4 + temp_version = self._get(offs, np.uint32) + if temp_version[0] & 65535 == 0: + # If we get 0 here that means it's (probably) a GGUF file created for + # the opposite byte order of the machine this script is running on. + self.byte_order = 'S' + temp_version = temp_version.newbyteorder(self.byte_order) + version = temp_version[0] + if version not in READER_SUPPORTED_VERSIONS: + raise ValueError(f'Sorry, file appears to be version {version} which we cannot handle') + self.fields: OrderedDict[str, ReaderField] = OrderedDict() + self.tensors: list[ReaderTensor] = [] + offs += self._push_field(ReaderField(offs, 'GGUF.version', [temp_version], [0], [GGUFValueType.UINT32])) + temp_counts = self._get(offs, np.uint64, 2) + offs += self._push_field(ReaderField(offs, 'GGUF.tensor_count', [temp_counts[:1]], [0], [GGUFValueType.UINT64])) + offs += self._push_field(ReaderField(offs, 'GGUF.kv_count', [temp_counts[1:]], [0], [GGUFValueType.UINT64])) + tensor_count, kv_count = temp_counts + offs = self._build_fields(offs, kv_count) + offs, tensors_fields = self._build_tensors_fields(offs, tensor_count) + new_align = self.fields.get('general.alignment') + if new_align is not None: + if new_align.types != [GGUFValueType.UINT64]: + raise ValueError('Bad type for general.alignment field') + self.alignment = new_align.parts[-1][0] + padding = offs % self.alignment + if padding != 0: + offs += self.alignment - padding + self._build_tensors(offs, tensors_fields) + + _DT = TypeVar('_DT', bound = npt.DTypeLike) + + # Fetch a key/value metadata field by key. + def get_field(self, key: str) -> Union[ReaderField, None]: + return self.fields.get(key, None) + + # Fetch a tensor from the list by index. + def get_tensor(self, idx: int) -> ReaderTensor: + return self.tensors[idx] + + def _get( + self, offset: int, dtype: npt.DTypeLike, count: int = 1, override_order: None | Literal['I' | 'S' | '<'] = None, + ) -> npt.NDArray[Any]: + count = int(count) + itemsize = int(np.empty([], dtype = dtype).itemsize) + end_offs = offset + itemsize * count + return ( + self.data[offset:end_offs] + .view(dtype = dtype)[:count] + .newbyteorder(override_order or self.byte_order) + ) + + def _push_field(self, field: ReaderField, skip_sum: bool = False) -> int: + if field.name in self.fields: + raise KeyError(f'Duplicate {field.name} already in list at offset {field.offset}') + self.fields[field.name] = field + return 0 if skip_sum else sum(int(part.nbytes) for part in field.parts) + + def _get_str(self, offset: int) -> tuple[npt.NDArray[np.uint64], npt.NDArray[np.uint8]]: + slen = self._get(offset, np.uint64) + return slen, self._get(offset + 8, np.uint8, slen[0]) + + def _get_field_parts( + self, orig_offs: int, raw_type: int, + ) -> tuple[int, list[npt.NDArray[Any]], list[int], list[GGUFValueType]]: + offs = orig_offs + types: list[GGUFValueType] = [] + gtype = GGUFValueType(raw_type) + types.append(gtype) + # Handle strings. + if gtype == GGUFValueType.STRING: + sparts: list[npt.NDArray[Any]] = list(self._get_str(offs)) + size = sum(int(part.nbytes) for part in sparts) + return size, sparts, [1], types + # Check if it's a simple scalar type. + nptype = self.gguf_scalar_to_np.get(gtype) + if nptype is not None: + val = self._get(offs, nptype) + return int(val.nbytes), [val], [0], types + # Handle arrays. + if gtype == GGUFValueType.ARRAY: + raw_itype = self._get(offs, np.uint32) + offs += int(raw_itype.nbytes) + alen = self._get(offs, np.uint64) + offs += int(alen.nbytes) + aparts: list[npt.NDArray[Any]] = [raw_itype, alen] + data_idxs: list[int] = [] + for idx in range(alen[0]): + curr_size, curr_parts, curr_idxs, curr_types = self._get_field_parts(offs, raw_itype[0]) + if idx == 0: + types += curr_types + idxs_offs = len(aparts) + aparts += curr_parts + data_idxs += (idx + idxs_offs for idx in curr_idxs) + offs += curr_size + return offs - orig_offs, aparts, data_idxs, types + # We can't deal with this one. + raise ValueError('Unknown/unhandled field type {gtype}') + + def _get_tensor(self, orig_offs: int) -> ReaderField: + offs = orig_offs + name_len, name_data = self._get_str(offs) + offs += int(name_len.nbytes + name_data.nbytes) + n_dims = self._get(offs, np.uint32) + offs += int(n_dims.nbytes) + dims = self._get(offs, np.uint64, n_dims[0]) + offs += int(dims.nbytes) + raw_dtype = self._get(offs, np.uint32) + offs += int(raw_dtype.nbytes) + offset_tensor = self._get(offs, np.uint64) + offs += int(offset_tensor.nbytes) + return ReaderField( + orig_offs, + str(bytes(name_data), encoding = 'utf-8'), + [name_len, name_data, n_dims, dims, raw_dtype, offset_tensor], + [1, 3, 4, 5], + ) + + def _build_fields(self, offs: int, count: int) -> int: + for _ in range(count): + orig_offs = offs + kv_klen, kv_kdata = self._get_str(offs) + offs += int(kv_klen.nbytes + kv_kdata.nbytes) + raw_kv_type = self._get(offs, np.uint32) + offs += int(raw_kv_type.nbytes) + parts: list[npt.NDArray[Any]] = [kv_klen, kv_kdata, raw_kv_type] + idxs_offs = len(parts) + field_size, field_parts, field_idxs, field_types = self._get_field_parts(offs, raw_kv_type[0]) + parts += field_parts + self._push_field(ReaderField( + orig_offs, + str(bytes(kv_kdata), encoding = 'utf-8'), + parts, + [idx + idxs_offs for idx in field_idxs], + field_types, + ), skip_sum = True) + offs += field_size + return offs + + def _build_tensors_fields(self, offs: int, count: int) -> tuple[int, list[ReaderField]]: + tensor_fields = [] + for _ in range(count): + field = self._get_tensor(offs) + offs += sum(int(part.nbytes) for part in field.parts) + tensor_fields.append(field) + return offs, tensor_fields + + def _build_tensors(self, start_offs: int, fields: list[ReaderField]) -> None: + tensors = [] + for field in fields: + _name_len, name_data, _n_dims, dims, raw_dtype, offset_tensor = field.parts + ggml_type = GGMLQuantizationType(raw_dtype[0]) + n_elems = np.prod(dims) + block_size, type_size = GGML_QUANT_SIZES[ggml_type] + n_bytes = n_elems * type_size // block_size + data_offs = int(start_offs + offset_tensor[0]) + item_type: npt.DTypeLike + if ggml_type == GGMLQuantizationType.F32: + item_count = n_elems + item_type = np.float32 + elif ggml_type == GGMLQuantizationType.F16: + item_count = n_elems + item_type = np.float16 + else: + item_count = n_bytes + item_type = np.uint8 + tensors.append(ReaderTensor( + name = str(bytes(name_data), encoding = 'utf-8'), + tensor_type = ggml_type, + shape = dims, + n_elements = n_elems, + n_bytes = n_bytes, + data_offset = data_offs, + data = self._get(data_offs, item_type, item_count), + field = field, + )) + self.tensors = tensors diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py new file mode 100644 index 000000000..75fb6976f --- /dev/null +++ b/gguf-py/gguf/gguf_writer.py @@ -0,0 +1,409 @@ +from __future__ import annotations + +import os +import shutil +import struct +import tempfile +from enum import Enum, auto +from io import BufferedWriter +from typing import IO, Any, Sequence + +import numpy as np + +from .constants import ( + GGUF_DEFAULT_ALIGNMENT, + GGUF_MAGIC, + GGUF_VERSION, + GGMLQuantizationType, + GGUFEndian, + GGUFValueType, + Keys, + RopeScalingType, + TokenType, +) + + +class WriterState(Enum): + EMPTY = auto() + HEADER = auto() + KV_DATA = auto() + TI_DATA = auto() + + +class GGUFWriter: + fout: BufferedWriter + temp_file: tempfile.SpooledTemporaryFile[bytes] | None + tensors: list[np.ndarray[Any, Any]] + _simple_value_packing = { + GGUFValueType.UINT8: "B", + GGUFValueType.INT8: "b", + GGUFValueType.UINT16: "H", + GGUFValueType.INT16: "h", + GGUFValueType.UINT32: "I", + GGUFValueType.INT32: "i", + GGUFValueType.FLOAT32: "f", + GGUFValueType.UINT64: "Q", + GGUFValueType.INT64: "q", + GGUFValueType.FLOAT64: "d", + GGUFValueType.BOOL: "?", + } + + def __init__( + self, path: os.PathLike[str] | str, arch: str, use_temp_file: bool = True, + endianess: GGUFEndian = GGUFEndian.LITTLE, + ): + self.fout = open(path, "wb") + self.arch = arch + self.endianess = endianess + self.offset_tensor = 0 + self.data_alignment = GGUF_DEFAULT_ALIGNMENT + self.kv_data = b"" + self.kv_data_count = 0 + self.ti_data = b"" + self.ti_data_count = 0 + self.use_temp_file = use_temp_file + self.temp_file = None + self.tensors = [] + print("gguf: This GGUF file is for {0} Endian only".format( + "Big" if self.endianess == GGUFEndian.BIG else "Little", + )) + self.state = WriterState.EMPTY + + self.add_architecture() + + def write_header_to_file(self) -> None: + if self.state is not WriterState.EMPTY: + raise ValueError(f'Expected output file to be empty, got {self.state}') + + self._write_packed(" None: + if self.state is not WriterState.HEADER: + raise ValueError(f'Expected output file to contain the header, got {self.state}') + + self.fout.write(self.kv_data) + self.flush() + self.state = WriterState.KV_DATA + + def write_ti_data_to_file(self) -> None: + if self.state is not WriterState.KV_DATA: + raise ValueError(f'Expected output file to contain KV data, got {self.state}') + + self.fout.write(self.ti_data) + self.flush() + self.state = WriterState.TI_DATA + + def add_key(self, key: str) -> None: + self.add_val(key, GGUFValueType.STRING, add_vtype=False) + + def add_uint8(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.UINT8) + + def add_int8(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.INT8) + + def add_uint16(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.UINT16) + + def add_int16(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.INT16) + + def add_uint32(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.UINT32) + + def add_int32(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.INT32) + + def add_float32(self, key: str, val: float) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.FLOAT32) + + def add_uint64(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.UINT64) + + def add_int64(self, key: str, val: int) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.INT64) + + def add_float64(self, key: str, val: float) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.FLOAT64) + + def add_bool(self, key: str, val: bool) -> None: + self.add_key(key) + self.add_val(val, GGUFValueType.BOOL) + + def add_string(self, key: str, val: str) -> None: + if not val: + return + self.add_key(key) + self.add_val(val, GGUFValueType.STRING) + + def add_array(self, key: str, val: Sequence[Any]) -> None: + if not isinstance(val, Sequence): + raise ValueError("Value must be a sequence for array type") + + self.add_key(key) + self.add_val(val, GGUFValueType.ARRAY) + + def add_val(self, val: Any, vtype: GGUFValueType | None = None, add_vtype: bool = True) -> None: + if vtype is None: + vtype = GGUFValueType.get_type(val) + + if add_vtype: + self.kv_data += self._pack("I", vtype) + self.kv_data_count += 1 + + pack_fmt = self._simple_value_packing.get(vtype) + if pack_fmt is not None: + self.kv_data += self._pack(pack_fmt, val, skip_pack_prefix = vtype == GGUFValueType.BOOL) + elif vtype == GGUFValueType.STRING: + encoded_val = val.encode("utf8") if isinstance(val, str) else val + self.kv_data += self._pack("Q", len(encoded_val)) + self.kv_data += encoded_val + elif vtype == GGUFValueType.ARRAY and isinstance(val, Sequence) and val: + ltype = GGUFValueType.get_type(val[0]) + if not all(GGUFValueType.get_type(i) is ltype for i in val[1:]): + raise ValueError("All items in a GGUF array should be of the same type") + self.kv_data += self._pack("I", ltype) + self.kv_data += self._pack("Q", len(val)) + for item in val: + self.add_val(item, add_vtype=False) + else: + raise ValueError("Invalid GGUF metadata value type or value") + + @staticmethod + def ggml_pad(x: int, n: int) -> int: + return ((x + n - 1) // n) * n + + def add_tensor_info( + self, name: str, tensor_shape: Sequence[int], tensor_dtype: np.dtype[np.float16] | np.dtype[np.float32], + tensor_nbytes: int, raw_dtype: GGMLQuantizationType | None = None, + ) -> None: + if self.state is not WriterState.EMPTY: + raise ValueError(f'Expected output file to be empty, got {self.state}') + + if raw_dtype is None and tensor_dtype not in (np.float32, np.float16): + raise ValueError("Only F32 and F16 tensors are supported for now") + + encoded_name = name.encode("utf8") + self.ti_data += self._pack("Q", len(encoded_name)) + self.ti_data += encoded_name + n_dims = len(tensor_shape) + self.ti_data += self._pack("I", n_dims) + for i in range(n_dims): + self.ti_data += self._pack("Q", tensor_shape[n_dims - 1 - i]) + if raw_dtype is None: + dtype = GGMLQuantizationType.F32 if tensor_dtype == np.float32 else GGMLQuantizationType.F16 + else: + dtype = raw_dtype + self.ti_data += self._pack("I", dtype) + self.ti_data += self._pack("Q", self.offset_tensor) + self.offset_tensor += GGUFWriter.ggml_pad(tensor_nbytes, self.data_alignment) + self.ti_data_count += 1 + + def add_tensor( + self, name: str, tensor: np.ndarray[Any, Any], raw_shape: Sequence[int] | None = None, + raw_dtype: GGMLQuantizationType | None = None, + ) -> None: + if self.endianess == GGUFEndian.BIG: + tensor.byteswap(inplace=True) + if self.use_temp_file and self.temp_file is None: + fp = tempfile.SpooledTemporaryFile(mode="w+b", max_size=256*1024*1024) + fp.seek(0) + self.temp_file = fp + + shape: Sequence[int] = raw_shape if raw_shape is not None else tensor.shape + self.add_tensor_info(name, shape, tensor.dtype, tensor.nbytes, raw_dtype = raw_dtype) + + if self.temp_file is None: + self.tensors.append(tensor) + return + + tensor.tofile(self.temp_file) + self.write_padding(self.temp_file, tensor.nbytes) + + def write_padding(self, fp: IO[bytes], n: int, align: int | None = None) -> None: + pad = GGUFWriter.ggml_pad(n, align if align is not None else self.data_alignment) - n + if pad != 0: + fp.write(bytes([0] * pad)) + + def write_tensor_data(self, tensor: np.ndarray[Any, Any]) -> None: + if self.state is not WriterState.TI_DATA: + raise ValueError(f'Expected output file to contain tensor info, got {self.state}') + + if self.endianess == GGUFEndian.BIG: + tensor.byteswap(inplace=True) + self.write_padding(self.fout, self.fout.tell()) + tensor.tofile(self.fout) + self.write_padding(self.fout, tensor.nbytes) + + def write_tensors_to_file(self) -> None: + self.write_ti_data_to_file() + + self.write_padding(self.fout, self.fout.tell()) + + if self.temp_file is None: + while True: + try: + tensor = self.tensors.pop(0) + except IndexError: + break + tensor.tofile(self.fout) + self.write_padding(self.fout, tensor.nbytes) + return + + self.temp_file.seek(0) + + shutil.copyfileobj(self.temp_file, self.fout) + self.flush() + self.temp_file.close() + + def flush(self) -> None: + self.fout.flush() + + def close(self) -> None: + self.fout.close() + + def add_architecture(self) -> None: + self.add_string(Keys.General.ARCHITECTURE, self.arch) + + def add_author(self, author: str) -> None: + self.add_string(Keys.General.AUTHOR, author) + + def add_tensor_data_layout(self, layout: str) -> None: + self.add_string(Keys.LLM.TENSOR_DATA_LAYOUT.format(arch=self.arch), layout) + + def add_url(self, url: str) -> None: + self.add_string(Keys.General.URL, url) + + def add_description(self, description: str) -> None: + self.add_string(Keys.General.DESCRIPTION, description) + + def add_source_url(self, url: str) -> None: + self.add_string(Keys.General.SOURCE_URL, url) + + def add_source_hf_repo(self, repo: str) -> None: + self.add_string(Keys.General.SOURCE_HF_REPO, repo) + + def add_file_type(self, ftype: int) -> None: + self.add_uint32(Keys.General.FILE_TYPE, ftype) + + def add_name(self, name: str) -> None: + self.add_string(Keys.General.NAME, name) + + def add_quantization_version(self, quantization_version: GGMLQuantizationType) -> None: + self.add_uint32( + Keys.General.QUANTIZATION_VERSION, quantization_version) + + def add_custom_alignment(self, alignment: int) -> None: + self.data_alignment = alignment + self.add_uint32(Keys.General.ALIGNMENT, alignment) + + def add_context_length(self, length: int) -> None: + self.add_uint32(Keys.LLM.CONTEXT_LENGTH.format(arch=self.arch), length) + + def add_embedding_length(self, length: int) -> None: + self.add_uint32(Keys.LLM.EMBEDDING_LENGTH.format(arch=self.arch), length) + + def add_block_count(self, length: int) -> None: + self.add_uint32(Keys.LLM.BLOCK_COUNT.format(arch=self.arch), length) + + def add_feed_forward_length(self, length: int) -> None: + self.add_uint32(Keys.LLM.FEED_FORWARD_LENGTH.format(arch=self.arch), length) + + def add_parallel_residual(self, use: bool) -> None: + self.add_bool(Keys.LLM.USE_PARALLEL_RESIDUAL.format(arch=self.arch), use) + + def add_head_count(self, count: int) -> None: + self.add_uint32(Keys.Attention.HEAD_COUNT.format(arch=self.arch), count) + + def add_head_count_kv(self, count: int) -> None: + self.add_uint32(Keys.Attention.HEAD_COUNT_KV.format(arch=self.arch), count) + + def add_max_alibi_bias(self, bias: float) -> None: + self.add_float32(Keys.Attention.MAX_ALIBI_BIAS.format(arch=self.arch), bias) + + def add_clamp_kqv(self, value: float) -> None: + self.add_float32(Keys.Attention.CLAMP_KQV.format(arch=self.arch), value) + + def add_layer_norm_eps(self, value: float) -> None: + self.add_float32(Keys.Attention.LAYERNORM_EPS.format(arch=self.arch), value) + + def add_layer_norm_rms_eps(self, value: float) -> None: + self.add_float32(Keys.Attention.LAYERNORM_RMS_EPS.format(arch=self.arch), value) + + def add_rope_dimension_count(self, count: int) -> None: + self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) + + def add_rope_freq_base(self, value: float) -> None: + self.add_float32(Keys.Rope.FREQ_BASE.format(arch=self.arch), value) + + def add_rope_scaling_type(self, value: RopeScalingType) -> None: + self.add_string(Keys.Rope.SCALING_TYPE.format(arch=self.arch), value.value) + + def add_rope_scaling_factor(self, value: float) -> None: + self.add_float32(Keys.Rope.SCALING_FACTOR.format(arch=self.arch), value) + + def add_rope_scaling_orig_ctx_len(self, value: int) -> None: + self.add_uint32(Keys.Rope.SCALING_ORIG_CTX_LEN.format(arch=self.arch), value) + + def add_rope_scaling_finetuned(self, value: bool) -> None: + self.add_bool(Keys.Rope.SCALING_FINETUNED.format(arch=self.arch), value) + + def add_tokenizer_model(self, model: str) -> None: + self.add_string(Keys.Tokenizer.MODEL, model) + + def add_token_list(self, tokens: Sequence[str] | Sequence[bytes] | Sequence[bytearray]) -> None: + self.add_array(Keys.Tokenizer.LIST, tokens) + + def add_token_merges(self, merges: Sequence[str] | Sequence[bytes] | Sequence[bytearray]) -> None: + self.add_array(Keys.Tokenizer.MERGES, merges) + + def add_token_types(self, types: Sequence[TokenType] | Sequence[int]) -> None: + self.add_array(Keys.Tokenizer.TOKEN_TYPE, types) + + def add_token_scores(self, scores: Sequence[float]) -> None: + self.add_array(Keys.Tokenizer.SCORES, scores) + + def add_bos_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.BOS_ID, id) + + def add_eos_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.EOS_ID, id) + + def add_unk_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.UNK_ID, id) + + def add_sep_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.SEP_ID, id) + + def add_pad_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.PAD_ID, id) + + def add_add_bos_token(self, value: bool) -> None: + self.add_bool(Keys.Tokenizer.ADD_BOS, value) + + def add_add_eos_token(self, value: bool) -> None: + self.add_bool(Keys.Tokenizer.ADD_EOS, value) + + def _pack(self, fmt: str, value: Any, skip_pack_prefix: bool = False) -> bytes: + pack_prefix = '' + if not skip_pack_prefix: + pack_prefix = '<' if self.endianess == GGUFEndian.LITTLE else '>' + return struct.pack(f'{pack_prefix}{fmt}', value) + + def _write_packed(self, fmt: str, value: Any, skip_pack_prefix: bool = False) -> None: + self.fout.write(self._pack(fmt, value, skip_pack_prefix)) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py new file mode 100644 index 000000000..22ad8b8fc --- /dev/null +++ b/gguf-py/gguf/tensor_mapping.py @@ -0,0 +1,257 @@ +from __future__ import annotations + +from typing import Sequence + +from .constants import MODEL_ARCH, MODEL_TENSOR, MODEL_TENSORS, TENSOR_NAMES + + +class TensorNameMap: + mappings_cfg: dict[MODEL_TENSOR, tuple[str, ...]] = { + # Token embeddings + MODEL_TENSOR.TOKEN_EMBD: ( + "gpt_neox.embed_in", # gptneox + "transformer.wte", # gpt2 gpt-j mpt refact + "transformer.word_embeddings", # falcon + "word_embeddings", # bloom + "model.embed_tokens", # llama-hf + "tok_embeddings", # llama-pth + "embeddings.word_embeddings", # bert + "language_model.embedding.word_embeddings", # persimmon + ), + + # Token type embeddings + MODEL_TENSOR.TOKEN_TYPES: ( + "embeddings.token_type_embeddings", # bert + ), + + # Normalization of token embeddings + MODEL_TENSOR.TOKEN_EMBD_NORM: ( + "word_embeddings_layernorm", # bloom + ), + + # Position embeddings + MODEL_TENSOR.POS_EMBD: ( + "transformer.wpe", # gpt2 + "embeddings.position_embeddings", # bert + ), + + # Output + MODEL_TENSOR.OUTPUT: ( + "embed_out", # gptneox + "lm_head", # gpt2 mpt falcon llama-hf baichuan + "output", # llama-pth bloom + "word_embeddings_for_head", # persimmon + ), + + # Output norm + MODEL_TENSOR.OUTPUT_NORM: ( + "gpt_neox.final_layer_norm", # gptneox + "transformer.ln_f", # gpt2 gpt-j falcon + "model.norm", # llama-hf baichuan + "norm", # llama-pth + "embeddings.LayerNorm", # bert + "transformer.norm_f", # mpt + "ln_f", # refact bloom + "language_model.encoder.final_layernorm", # persimmon + ), + + # Rope frequencies + MODEL_TENSOR.ROPE_FREQS: ( + "rope.freqs", # llama-pth + ), + } + + block_mappings_cfg: dict[MODEL_TENSOR, tuple[str, ...]] = { + # Attention norm + MODEL_TENSOR.ATTN_NORM: ( + "gpt_neox.layers.{bid}.input_layernorm", # gptneox + "transformer.h.{bid}.ln_1", # gpt2 gpt-j refact + "transformer.blocks.{bid}.norm_1", # mpt + "transformer.h.{bid}.input_layernorm", # falcon7b + "h.{bid}.input_layernorm", # bloom + "transformer.h.{bid}.ln_mlp", # falcon40b + "model.layers.{bid}.input_layernorm", # llama-hf + "layers.{bid}.attention_norm", # llama-pth + "encoder.layer.{bid}.attention.output.LayerNorm", # bert + "language_model.encoder.layers.{bid}.input_layernorm", # persimmon + "model.layers.{bid}.ln1", # yi + ), + + # Attention norm 2 + MODEL_TENSOR.ATTN_NORM_2: ( + "transformer.h.{bid}.ln_attn", # falcon40b + ), + + # Attention query-key-value + MODEL_TENSOR.ATTN_QKV: ( + "gpt_neox.layers.{bid}.attention.query_key_value", # gptneox + "transformer.h.{bid}.attn.c_attn", # gpt2 + "transformer.blocks.{bid}.attn.Wqkv", # mpt + "transformer.h.{bid}.self_attention.query_key_value", # falcon + "h.{bid}.self_attention.query_key_value", # bloom + "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + ), + + # Attention query + MODEL_TENSOR.ATTN_Q: ( + "model.layers.{bid}.self_attn.q_proj", # llama-hf + "layers.{bid}.attention.wq", # llama-pth + "encoder.layer.{bid}.attention.self.query", # bert + "transformer.h.{bid}.attn.q_proj", # gpt-j + ), + + # Attention key + MODEL_TENSOR.ATTN_K: ( + "model.layers.{bid}.self_attn.k_proj", # llama-hf + "layers.{bid}.attention.wk", # llama-pth + "encoder.layer.{bid}.attention.self.key", # bert + "transformer.h.{bid}.attn.k_proj", # gpt-j + ), + + # Attention value + MODEL_TENSOR.ATTN_V: ( + "model.layers.{bid}.self_attn.v_proj", # llama-hf + "layers.{bid}.attention.wv", # llama-pth + "encoder.layer.{bid}.attention.self.value", # bert + "transformer.h.{bid}.attn.v_proj", # gpt-j + ), + + # Attention output + MODEL_TENSOR.ATTN_OUT: ( + "gpt_neox.layers.{bid}.attention.dense", # gptneox + "transformer.h.{bid}.attn.c_proj", # gpt2 refact + "transformer.blocks.{bid}.attn.out_proj", # mpt + "transformer.h.{bid}.self_attention.dense", # falcon + "h.{bid}.self_attention.dense", # bloom + "model.layers.{bid}.self_attn.o_proj", # llama-hf + "layers.{bid}.attention.wo", # llama-pth + "encoder.layer.{bid}.attention.output.dense", # bert + "transformer.h.{bid}.attn.out_proj", # gpt-j + "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + ), + + # Rotary embeddings + MODEL_TENSOR.ATTN_ROT_EMBD: ( + "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf + "layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth + ), + + # Feed-forward norm + MODEL_TENSOR.FFN_NORM: ( + "gpt_neox.layers.{bid}.post_attention_layernorm", # gptneox + "transformer.h.{bid}.ln_2", # gpt2 refact + "h.{bid}.post_attention_layernorm", # bloom + "transformer.blocks.{bid}.norm_2", # mpt + "model.layers.{bid}.post_attention_layernorm", # llama-hf + "layers.{bid}.ffn_norm", # llama-pth + "encoder.layer.{bid}.output.LayerNorm", # bert + "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon + "model.layers.{bid}.ln2", # yi + ), + + # Feed-forward up + MODEL_TENSOR.FFN_UP: ( + "gpt_neox.layers.{bid}.mlp.dense_h_to_4h", # gptneox + "transformer.h.{bid}.mlp.c_fc", # gpt2 + "transformer.blocks.{bid}.ffn.up_proj", # mpt + "transformer.h.{bid}.mlp.dense_h_to_4h", # falcon + "h.{bid}.mlp.dense_h_to_4h", # bloom + "model.layers.{bid}.mlp.up_proj", # llama-hf refact + "layers.{bid}.feed_forward.w3", # llama-pth + "encoder.layer.{bid}.intermediate.dense", # bert + "transformer.h.{bid}.mlp.fc_in", # gpt-j + "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon + ), + + # Feed-forward gate + MODEL_TENSOR.FFN_GATE: ( + "model.layers.{bid}.mlp.gate_proj", # llama-hf refact + "layers.{bid}.feed_forward.w1", # llama-pth + ), + + # Feed-forward down + MODEL_TENSOR.FFN_DOWN: ( + "gpt_neox.layers.{bid}.mlp.dense_4h_to_h", # gptneox + "transformer.h.{bid}.mlp.c_proj", # gpt2 refact + "transformer.blocks.{bid}.ffn.down_proj", # mpt + "transformer.h.{bid}.mlp.dense_4h_to_h", # falcon + "h.{bid}.mlp.dense_4h_to_h", # bloom + "model.layers.{bid}.mlp.down_proj", # llama-hf + "layers.{bid}.feed_forward.w2", # llama-pth + "encoder.layer.{bid}.output.dense", # bert + "transformer.h.{bid}.mlp.fc_out", # gpt-j + "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + ), + + MODEL_TENSOR.ATTN_Q_NORM: ( + "language_model.encoder.layers.{bid}.self_attention.q_layernorm", + ), + + MODEL_TENSOR.ATTN_K_NORM: ( + "language_model.encoder.layers.{bid}.self_attention.k_layernorm", + ), + + MODEL_TENSOR.ROPE_FREQS: ( + "language_model.encoder.layers.{bid}.self_attention.rotary_emb.inv_freq", # persimmon + ), + } + + mapping: dict[str, tuple[MODEL_TENSOR, str]] + + def __init__(self, arch: MODEL_ARCH, n_blocks: int): + self.mapping = {} + for tensor, keys in self.mappings_cfg.items(): + if tensor not in MODEL_TENSORS[arch]: + continue + tensor_name = TENSOR_NAMES[tensor] + self.mapping[tensor_name] = (tensor, tensor_name) + for key in keys: + self.mapping[key] = (tensor, tensor_name) + for bid in range(n_blocks): + for tensor, keys in self.block_mappings_cfg.items(): + if tensor not in MODEL_TENSORS[arch]: + continue + tensor_name = TENSOR_NAMES[tensor].format(bid = bid) + self.mapping[tensor_name] = (tensor, tensor_name) + for key in keys: + key = key.format(bid = bid) + self.mapping[key] = (tensor, tensor_name) + + def get_type_and_name(self, key: str, try_suffixes: Sequence[str] = ()) -> tuple[MODEL_TENSOR, str] | None: + result = self.mapping.get(key) + if result is not None: + return result + for suffix in try_suffixes: + if key.endswith(suffix): + result = self.mapping.get(key[:-len(suffix)]) + if result is not None: + return result[0], result[1] + suffix + return None + + def get_name(self, key: str, try_suffixes: Sequence[str] = ()) -> str | None: + result = self.get_type_and_name(key, try_suffixes = try_suffixes) + if result is None: + return None + return result[1] + + def get_type(self, key: str, try_suffixes: Sequence[str] = ()) -> MODEL_TENSOR | None: + result = self.get_type_and_name(key, try_suffixes = try_suffixes) + if result is None: + return None + return result[0] + + def __getitem__(self, key: str) -> str: + try: + return self.mapping[key][1] + except KeyError: + raise KeyError(key) + + def __contains__(self, key: str) -> bool: + return key in self.mapping + + def __repr__(self) -> str: + return repr(self.mapping) + + +def get_tensor_name_map(arch: MODEL_ARCH, n_blocks: int) -> TensorNameMap: + return TensorNameMap(arch, n_blocks) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py new file mode 100644 index 000000000..71192a928 --- /dev/null +++ b/gguf-py/gguf/vocab.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +import json +import os +import sys +from pathlib import Path +from typing import Any, Callable + +from .gguf_writer import GGUFWriter + + +class SpecialVocab: + merges: list[str] + add_special_token: dict[str, bool] + special_token_ids: dict[str, int] + + def __init__( + self, path: str | os.PathLike[str], load_merges: bool = False, + special_token_types: tuple[str, ...] | None = None, + n_vocab: int | None = None, + ): + self.special_token_ids = {} + self.add_special_token = {} + self.n_vocab = n_vocab + self.load_merges = load_merges + self.merges = [] + if special_token_types is not None: + self.special_token_types = special_token_types + else: + self.special_token_types = ('bos', 'eos', 'unk', 'sep', 'pad') + self._load(Path(path)) + + def __repr__(self) -> str: + return ''.format( + len(self.merges), self.special_token_ids or "unset", self.add_special_token or "unset", + ) + + def add_to_gguf(self, gw: GGUFWriter, quiet: bool = False) -> None: + if self.merges: + if not quiet: + print(f'gguf: Adding {len(self.merges)} merge(s).') + gw.add_token_merges(self.merges) + elif self.load_merges: + print( + 'gguf: WARNING: Adding merges requested but no merges found, output may be non-functional.', + file = sys.stderr, + ) + for typ, tokid in self.special_token_ids.items(): + id_handler: Callable[[int], None] | None = getattr(gw, f'add_{typ}_token_id', None) + if id_handler is None: + print( + f'gguf: WARNING: No handler for special token type {typ} with id {tokid} - skipping', + file = sys.stderr, + ) + continue + if not quiet: + print(f'gguf: Setting special token type {typ} to {tokid}') + id_handler(tokid) + for typ, value in self.add_special_token.items(): + add_handler: Callable[[bool], None] | None = getattr(gw, f'add_add_{typ}_token', None) + if add_handler is None: + print( + f'gguf: WARNING: No handler for add_{typ}_token with value {value} - skipping', + file = sys.stderr, + ) + continue + if not quiet: + print(f'gguf: Setting add_{typ}_token to {value}') + add_handler(value) + + def _load(self, path: Path) -> None: + self._try_load_from_tokenizer_json(path) + self._try_load_from_config_json(path) + if self.load_merges and not self.merges: + self._try_load_merges_txt(path) + + def _try_load_merges_txt(self, path: Path) -> bool: + merges_file = path / 'merges.txt' + if not merges_file.is_file(): + return False + with open(merges_file, 'r') as fp: + first_line = next(fp, '').strip() + if not first_line.startswith('#'): + fp.seek(0) + line_num = 0 + else: + line_num = 1 + merges = [] + for line in fp: + line_num += 1 + line = line.strip() + if not line: + continue + parts = line.split(None, 3) + if len(parts) != 2: + print( + f'gguf: WARNING: {merges_file.name}: Line {line_num}: Entry malformed, ignoring', + file = sys.stderr, + ) + continue + merges.append(f'{parts[0]} {parts[1]}') + self.merges = merges + return True + + def _set_special_token(self, typ: str, tid: Any) -> None: + if not isinstance(tid, int) or tid < 0: + return + if self.n_vocab is None or tid < self.n_vocab: + if typ in self.special_token_ids: + return + self.special_token_ids[typ] = tid + return + print( + f'gguf: WARNING: Special token type {typ}, id {tid} out of range, must be under {self.n_vocab} - skipping', + file = sys.stderr, + ) + + def _try_load_from_tokenizer_json(self, path: Path) -> bool: + tokenizer_file = path / 'tokenizer.json' + if not tokenizer_file.is_file(): + return False + with open(tokenizer_file, encoding = 'utf-8') as f: + tokenizer = json.load(f) + if self.load_merges: + merges = tokenizer.get('model', {}).get('merges') + if isinstance(merges, list) and merges and isinstance(merges[0], str): + self.merges = merges + tokenizer_config_file = path / 'tokenizer_config.json' + added_tokens = tokenizer.get('added_tokens') + if added_tokens is None or not tokenizer_config_file.is_file(): + return True + with open(tokenizer_config_file, encoding = 'utf-8') as f: + tokenizer_config = json.load(f) + for typ in self.special_token_types: + add_entry = tokenizer_config.get(f'add_{typ}_token') + if isinstance(add_entry, bool): + self.add_special_token[typ] = add_entry + entry = tokenizer_config.get(f'{typ}_token') + if isinstance(entry, str): + tc_content = entry + elif isinstance(entry, dict): + entry_content = entry.get('content') + if not isinstance(entry_content, str): + continue + tc_content = entry_content + else: + continue + # We only need the first match here. + maybe_token_id = next( + (atok.get('id') for atok in added_tokens if atok.get('content') == tc_content), + None, + ) + self._set_special_token(typ, maybe_token_id) + return True + + def _try_load_from_config_json(self, path: Path) -> bool: + config_file = path / 'config.json' + if not config_file.is_file(): + return False + with open(config_file, encoding = 'utf-8') as f: + config = json.load(f) + for typ in self.special_token_types: + self._set_special_token(typ, config.get(f'{typ}_token_id')) + return True diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index c6cb2c37a..624e1cda6 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -1,11 +1,12 @@ [tool.poetry] name = "gguf" -version = "0.4.6" +version = "0.5.0" description = "Write ML models in GGUF for GGML" authors = ["GGML "] packages = [ {include = "gguf"}, {include = "gguf/py.typed"}, + {include = "scripts"}, ] readme = "README.md" homepage = "https://ggml.ai" @@ -27,3 +28,8 @@ pytest = "^5.2" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.poetry.scripts] +gguf-convert-endian = "scripts:gguf_convert_endian_entrypoint" +gguf-dump = "scripts:gguf_dump_entrypoint" +gguf-set-metadata = "scripts:gguf_set_metadata_entrypoint" diff --git a/gguf-py/scripts/__init__.py b/gguf-py/scripts/__init__.py new file mode 100644 index 000000000..77132db7a --- /dev/null +++ b/gguf-py/scripts/__init__.py @@ -0,0 +1,12 @@ +import os + +from importlib import import_module + + +os.environ["NO_LOCAL_GGUF"] = "TRUE" + +gguf_convert_endian_entrypoint = import_module("scripts.gguf-convert-endian").main +gguf_dump_entrypoint = import_module("scripts.gguf-dump").main +gguf_set_metadata_entrypoint = import_module("scripts.gguf-set-metadata").main + +del import_module, os diff --git a/gguf-py/scripts/gguf-convert-endian.py b/gguf-py/scripts/gguf-convert-endian.py new file mode 100755 index 000000000..b79d86e07 --- /dev/null +++ b/gguf-py/scripts/gguf-convert-endian.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import os +import sys +from pathlib import Path + +import numpy as np + +# Necessary to load the local gguf package +if "NO_LOCAL_GGUF" not in os.environ and (Path(__file__).parent.parent.parent / 'gguf-py').exists(): + sys.path.insert(0, str(Path(__file__).parent.parent)) + +import gguf + + +def convert_byteorder(reader: gguf.GGUFReader, args: argparse.Namespace) -> None: + if np.uint32(1) == np.uint32(1).newbyteorder("<"): + # Host is little endian + host_endian = "little" + swapped_endian = "big" + else: + # Sorry PDP or other weird systems that don't use BE or LE. + host_endian = "big" + swapped_endian = "little" + if reader.byte_order == "S": + file_endian = swapped_endian + else: + file_endian = host_endian + if args.order == "native": + order = host_endian + print(f"* Host is {host_endian.upper()} endian, GGUF file seems to be {file_endian.upper()} endian") + if file_endian == order: + print(f"* File is already {order.upper()} endian. Nothing to do.") + sys.exit(0) + print("* Checking tensors for conversion compatibility") + for tensor in reader.tensors: + if tensor.tensor_type not in ( + gguf.GGMLQuantizationType.F32, + gguf.GGMLQuantizationType.F16, + gguf.GGMLQuantizationType.Q8_0, + ): + raise ValueError(f"Cannot handle type {tensor.tensor_type.name} for tensor {repr(tensor.name)}") + print(f"* Preparing to convert from {file_endian.upper()} to {order.upper()}") + if args.dry_run: + return + print("\n*** Warning *** Warning *** Warning **") + print("* This conversion process may damage the file. Ensure you have a backup.") + if order != host_endian: + print("* Requested endian differs from host, you will not be able to load the model on this machine.") + print("* The file will be modified immediately, so if conversion fails or is interrupted") + print("* the file will be corrupted. Enter exactly YES if you are positive you want to proceed:") + response = input("YES, I am sure> ") + if response != "YES": + print("You didn't enter YES. Okay then, see ya!") + sys.exit(0) + print(f"\n* Converting fields ({len(reader.fields)})") + for idx, field in enumerate(reader.fields.values()): + print(f"- {idx:4}: Converting field {repr(field.name)}, part count: {len(field.parts)}") + for part in field.parts: + part.byteswap(inplace=True) + print(f"\n* Converting tensors ({len(reader.tensors)})") + for idx, tensor in enumerate(reader.tensors): + print( + f" - {idx:4}: Converting tensor {repr(tensor.name)}, type={tensor.tensor_type.name}, " + f"elements={tensor.n_elements}... ", + end="", + ) + tensor_type = tensor.tensor_type + for part in tensor.field.parts: + part.byteswap(inplace=True) + if tensor_type != gguf.GGMLQuantizationType.Q8_0: + tensor.data.byteswap(inplace=True) + print() + continue + # A Q8_0 block consists of a f16 delta followed by 32 int8 quants, so 34 bytes + block_size = 34 + n_blocks = len(tensor.data) // block_size + for block_num in range(n_blocks): + block_offs = block_num * block_size + # I know I said f16, but it doesn't matter here - any simple 16 bit type works. + delta = tensor.data[block_offs:block_offs + 2].view(dtype=np.uint16) + delta.byteswap(inplace=True) + if block_num % 100000 == 0: + print(f"[{(n_blocks - block_num) // 1000}K]", end="") + sys.stdout.flush() + print() + print("* Completion") + + +def main() -> None: + parser = argparse.ArgumentParser(description="Convert GGUF file byte order") + parser.add_argument( + "model", type=str, + help="GGUF format model filename", + ) + parser.add_argument( + "order", type=str, choices=['big', 'little', 'native'], + help="Requested byte order", + ) + parser.add_argument( + "--dry-run", action="store_true", + help="Don't actually change anything", + ) + args = parser.parse_args(None if len(sys.argv) > 1 else ["--help"]) + print(f'* Loading: {args.model}') + reader = gguf.GGUFReader(args.model, 'r' if args.dry_run else 'r+') + convert_byteorder(reader, args) + + +if __name__ == "__main__": + main() diff --git a/gguf-py/scripts/gguf-dump.py b/gguf-py/scripts/gguf-dump.py new file mode 100755 index 000000000..5141873de --- /dev/null +++ b/gguf-py/scripts/gguf-dump.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +import os +import sys +from pathlib import Path +from typing import Any + +import numpy as np + +# Necessary to load the local gguf package +if "NO_LOCAL_GGUF" not in os.environ and (Path(__file__).parent.parent.parent / 'gguf-py').exists(): + sys.path.insert(0, str(Path(__file__).parent.parent)) + +from gguf import GGUFReader, GGUFValueType # noqa: E402 + + +def get_file_host_endian(reader: GGUFReader) -> tuple[str, str]: + host_endian = 'LITTLE' if np.uint32(1) == np.uint32(1).newbyteorder("<") else 'BIG' + if reader.byte_order == 'S': + file_endian = 'BIG' if host_endian == 'LITTLE' else 'LITTLE' + else: + file_endian = host_endian + return (host_endian, file_endian) + + +# For more information about what field.parts and field.data represent, +# please see the comments in the modify_gguf.py example. +def dump_metadata(reader: GGUFReader, args: argparse.Namespace) -> None: + host_endian, file_endian = get_file_host_endian(reader) + print(f'* File is {file_endian} endian, script is running on a {host_endian} endian host.') + print(f'\n* Dumping {len(reader.fields)} key/value pair(s)') + for n, field in enumerate(reader.fields.values(), 1): + if not field.types: + pretty_type = 'N/A' + elif field.types[0] == GGUFValueType.ARRAY: + nest_count = len(field.types) - 1 + pretty_type = '[' * nest_count + str(field.types[-1].name) + ']' * nest_count + else: + pretty_type = str(field.types[-1].name) + print(f' {n:5}: {pretty_type:10} | {len(field.data):8} | {field.name}', end = '') + if len(field.types) == 1: + curr_type = field.types[0] + if curr_type == GGUFValueType.STRING: + print(' = {0}'.format(repr(str(bytes(field.parts[-1]), encoding='utf8')[:60])), end = '') + elif field.types[0] in reader.gguf_scalar_to_np: + print(' = {0}'.format(field.parts[-1][0]), end = '') + print() + if args.no_tensors: + return + print(f'\n* Dumping {len(reader.tensors)} tensor(s)') + for n, tensor in enumerate(reader.tensors, 1): + prettydims = ', '.join('{0:5}'.format(d) for d in list(tensor.shape) + [1] * (4 - len(tensor.shape))) + print(f' {n:5}: {tensor.n_elements:10} | {prettydims} | {tensor.tensor_type.name:7} | {tensor.name}') + + +def dump_metadata_json(reader: GGUFReader, args: argparse.Namespace) -> None: + import json + host_endian, file_endian = get_file_host_endian(reader) + metadata: dict[str, Any] = {} + tensors: dict[str, Any] = {} + result = { + "filename": args.model, + "endian": file_endian, + "metadata": metadata, + "tensors": tensors, + } + for idx, field in enumerate(reader.fields.values()): + curr: dict[str, Any] = { + "index": idx, + "type": field.types[0].name if field.types else 'UNKNOWN', + "offset": field.offset, + } + metadata[field.name] = curr + if field.types[:1] == [GGUFValueType.ARRAY]: + curr["array_types"] = [t.name for t in field.types][1:] + if not args.json_array: + continue + itype = field.types[-1] + if itype == GGUFValueType.STRING: + curr["value"] = [str(bytes(field.parts[idx]), encoding="utf-8") for idx in field.data] + else: + curr["value"] = [pv for idx in field.data for pv in field.parts[idx].tolist()] + elif field.types[0] == GGUFValueType.STRING: + curr["value"] = str(bytes(field.parts[-1]), encoding="utf-8") + else: + curr["value"] = field.parts[-1].tolist()[0] + for idx, tensor in enumerate(reader.tensors): + tensors[tensor.name] = { + "index": idx, + "shape": tensor.shape.tolist(), + "type": tensor.tensor_type.name, + "offset": tensor.field.offset, + } + json.dump(result, sys.stdout) + + +def main() -> None: + parser = argparse.ArgumentParser(description="Dump GGUF file metadata") + parser.add_argument("model", type=str, help="GGUF format model filename") + parser.add_argument("--no-tensors", action="store_true", help="Don't dump tensor metadata") + parser.add_argument("--json", action="store_true", help="Produce JSON output") + parser.add_argument("--json-array", action="store_true", help="Include full array values in JSON output (long)") + args = parser.parse_args(None if len(sys.argv) > 1 else ["--help"]) + if not args.json: + print(f'* Loading: {args.model}') + reader = GGUFReader(args.model, 'r') + if args.json: + dump_metadata_json(reader, args) + else: + dump_metadata(reader, args) + + +if __name__ == '__main__': + main() diff --git a/gguf-py/scripts/gguf-set-metadata.py b/gguf-py/scripts/gguf-set-metadata.py new file mode 100755 index 000000000..3ebdfa898 --- /dev/null +++ b/gguf-py/scripts/gguf-set-metadata.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +import argparse +import os +import sys +from pathlib import Path + +# Necessary to load the local gguf package +if "NO_LOCAL_GGUF" not in os.environ and (Path(__file__).parent.parent.parent / 'gguf-py').exists(): + sys.path.insert(0, str(Path(__file__).parent.parent)) + +from gguf import GGUFReader # noqa: E402 + + +def minimal_example(filename: str) -> None: + reader = GGUFReader(filename, 'r+') + field = reader.fields['tokenizer.ggml.bos_token_id'] + if field is None: + return + part_index = field.data[0] + field.parts[part_index][0] = 2 # Set tokenizer.ggml.bos_token_id to 2 + # + # So what's this field.data thing? It's helpful because field.parts contains + # _every_ part of the GGUF field. For example, tokenizer.ggml.bos_token_id consists + # of: + # + # Part index 0: Key length (27) + # Part index 1: Key data ("tokenizer.ggml.bos_token_id") + # Part index 2: Field type (4, the id for GGUFValueType.UINT32) + # Part index 3: Field value + # + # Note also that each part is an NDArray slice, so even a part that + # is only a single value like the key length will be a NDArray of + # the key length type (numpy.uint32). + # + # The .data attribute in the Field is a list of relevant part indexes + # and doesn't contain internal GGUF details like the key length part. + # In this case, .data will be [3] - just the part index of the + # field value itself. + + +def set_metadata(reader: GGUFReader, args: argparse.Namespace) -> None: + field = reader.get_field(args.key) + if field is None: + print(f'! Field {repr(args.key)} not found', file = sys.stderr) + sys.exit(1) + # Note that field.types is a list of types. This is because the GGUF + # format supports arrays. For example, an array of UINT32 would + # look like [GGUFValueType.ARRAY, GGUFValueType.UINT32] + handler = reader.gguf_scalar_to_np.get(field.types[0]) if field.types else None + if handler is None: + print( + f'! This tool only supports changing simple values, {repr(args.key)} has unsupported type {field.types}', + file = sys.stderr, + ) + sys.exit(1) + current_value = field.parts[field.data[0]][0] + new_value = handler(args.value) + print(f'* Preparing to change field {repr(args.key)} from {current_value} to {new_value}') + if current_value == new_value: + print(f'- Key {repr(args.key)} already set to requested value {current_value}') + sys.exit(0) + if args.dry_run: + sys.exit(0) + if not args.force: + print('*** Warning *** Warning *** Warning **') + print('* Changing fields in a GGUF file can make it unusable. Proceed at your own risk.') + print('* Enter exactly YES if you are positive you want to proceed:') + response = input('YES, I am sure> ') + if response != 'YES': + print("You didn't enter YES. Okay then, see ya!") + sys.exit(0) + field.parts[field.data[0]][0] = new_value + print('* Field changed. Successful completion.') + + +def main() -> None: + parser = argparse.ArgumentParser(description="Set a simple value in GGUF file metadata") + parser.add_argument("model", type=str, help="GGUF format model filename") + parser.add_argument("key", type=str, help="Metadata key to set") + parser.add_argument("value", type=str, help="Metadata value to set") + parser.add_argument("--dry-run", action="store_true", help="Don't actually change anything") + parser.add_argument("--force", action="store_true", help="Change the field without confirmation") + args = parser.parse_args(None if len(sys.argv) > 1 else ["--help"]) + print(f'* Loading: {args.model}') + reader = GGUFReader(args.model, 'r' if args.dry_run else 'r+') + set_metadata(reader, args) + + +if __name__ == '__main__': + main() diff --git a/gguf-py/tests/test_gguf.py b/gguf-py/tests/test_gguf.py index 512531dd2..0adeb7d55 100644 --- a/gguf-py/tests/test_gguf.py +++ b/gguf-py/tests/test_gguf.py @@ -1,7 +1,7 @@ -import gguf +import gguf # noqa: F401 # TODO: add tests -def test_write_gguf(): +def test_write_gguf() -> None: pass From d96ca7ded77df764db797b68b4a29e34c5b56285 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sat, 11 Nov 2023 05:48:21 +0000 Subject: [PATCH 105/859] server : fix crash when prompt exceeds context size (#3996) --- examples/server/server.cpp | 58 +++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index cbf36ad67..46862a84b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1557,6 +1557,35 @@ struct llama_server_context slot.num_prompt_tokens = prompt_tokens.size(); + if (slot.params.n_keep < 0) + { + slot.params.n_keep = slot.num_prompt_tokens; + } + slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); + + // if input prompt is too big, truncate it + if (slot.num_prompt_tokens >= slot.n_ctx) + { + const int n_left = slot.n_ctx - slot.params.n_keep; + const int n_block_size = n_left / 2; + const int erased_blocks = (slot.num_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; + + std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + slot.params.n_keep); + new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, prompt_tokens.end()); + + LOG_VERBOSE("input truncated", { + {"n_ctx", slot.n_ctx}, + {"n_keep", slot.params.n_keep}, + {"n_left", n_left}, + {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, + }); + slot.truncated = true; + prompt_tokens = new_tokens; + + slot.num_prompt_tokens = prompt_tokens.size(); + GGML_ASSERT(slot.num_prompt_tokens < slot.n_ctx); + } + if (!slot.params.cache_prompt) { llama_sampling_reset(slot.ctx_sampling); @@ -1566,35 +1595,6 @@ struct llama_server_context } else { - if (slot.params.n_keep < 0) - { - slot.params.n_keep = slot.num_prompt_tokens; - } - slot.params.n_keep = std::min(slot.n_ctx - 4, slot.params.n_keep); - - // if input prompt is too big, truncate it - if (slot.num_prompt_tokens >= slot.n_ctx) - { - const int n_left = slot.n_ctx - slot.params.n_keep; - const int n_block_size = n_left / 2; - const int erased_blocks = (slot.num_prompt_tokens - slot.params.n_keep - n_block_size) / n_block_size; - - std::vector new_tokens(prompt_tokens.begin(), prompt_tokens.begin() + slot.params.n_keep); - new_tokens.insert(new_tokens.end(), prompt_tokens.begin() + slot.params.n_keep + erased_blocks * n_block_size, prompt_tokens.end()); - - LOG_VERBOSE("input truncated", { - {"n_ctx", slot.n_ctx}, - {"n_keep", slot.params.n_keep}, - {"n_left", n_left}, - {"new_tokens", tokens_to_str(ctx, new_tokens.cbegin(), new_tokens.cend())}, - }); - slot.truncated = true; - prompt_tokens = new_tokens; - - slot.num_prompt_tokens = prompt_tokens.size(); - GGML_ASSERT(slot.num_prompt_tokens < slot.n_ctx); - } - // push the prompt into the sampling context (do not apply grammar) for (auto &token : prompt_tokens) { From e86fc56f7521ca4b18d1d9939e82abd40c2f1c01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Sat, 11 Nov 2023 18:35:31 +0300 Subject: [PATCH 106/859] Fix gguf-convert-endian script (#4037) * Fix gguf-convert-endian script * Bump version and update description --- gguf-py/pyproject.toml | 4 ++-- gguf-py/scripts/gguf-convert-endian.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index 624e1cda6..e21c3cd94 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "gguf" -version = "0.5.0" -description = "Write ML models in GGUF for GGML" +version = "0.5.1" +description = "Read and write ML models in GGUF for GGML" authors = ["GGML "] packages = [ {include = "gguf"}, diff --git a/gguf-py/scripts/gguf-convert-endian.py b/gguf-py/scripts/gguf-convert-endian.py index b79d86e07..10a16ad06 100755 --- a/gguf-py/scripts/gguf-convert-endian.py +++ b/gguf-py/scripts/gguf-convert-endian.py @@ -28,8 +28,7 @@ def convert_byteorder(reader: gguf.GGUFReader, args: argparse.Namespace) -> None file_endian = swapped_endian else: file_endian = host_endian - if args.order == "native": - order = host_endian + order = host_endian if args.order == "native" else args.order print(f"* Host is {host_endian.upper()} endian, GGUF file seems to be {file_endian.upper()} endian") if file_endian == order: print(f"* File is already {order.upper()} endian. Nothing to do.") From 532dd74e38c29e16ea1cfc4e7eedb4f2fab3f3cd Mon Sep 17 00:00:00 2001 From: Richard Kiss Date: Sat, 11 Nov 2023 22:04:58 -0800 Subject: [PATCH 107/859] Fix some documentation typos/grammar mistakes (#4032) * typos * Update examples/parallel/README.md Co-authored-by: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> --------- Co-authored-by: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> --- README.md | 2 +- docs/token_generation_performance_tips.md | 2 +- examples/main/README.md | 2 +- examples/parallel/README.md | 2 +- grammars/README.md | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 9c9e36ad0..af39e8c0e 100644 --- a/README.md +++ b/README.md @@ -424,7 +424,7 @@ Building the program with BLAS support may lead to some performance improvements ``` The environment variable [`HIP_VISIBLE_DEVICES`](https://rocm.docs.amd.com/en/latest/understand/gpu_isolation.html#hip-visible-devices) can be used to specify which GPU(s) will be used. - If your GPU is not officialy supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 or 11.0.0 on RDNA3. + If your GPU is not officially supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 or 11.0.0 on RDNA3. The following compilation options are also available to tweak performance (yes, they refer to CUDA, not HIP, because it uses the same code as the cuBLAS version above): | Option | Legal values | Default | Description | diff --git a/docs/token_generation_performance_tips.md b/docs/token_generation_performance_tips.md index c9acff7d4..d7e863dff 100644 --- a/docs/token_generation_performance_tips.md +++ b/docs/token_generation_performance_tips.md @@ -17,7 +17,7 @@ llama_model_load_internal: [cublas] total VRAM used: 17223 MB If you see these lines, then the GPU is being used. ## Verifying that the CPU is not oversaturated -llama accepts a `-t N` (or `--threads N`) parameter. It's extremely important that this parameter is not too large. If your token generation is extremely slow, try setting this number to 1. If this significantly improves your token generation speed, then your CPU is being oversaturated and you need to explicitly set this parameter to the number of the physicial CPU cores on your machine (even if you utilize a GPU). If in doubt, start with 1 and double the amount until you hit a performance bottleneck, then scale the number down. +llama accepts a `-t N` (or `--threads N`) parameter. It's extremely important that this parameter is not too large. If your token generation is extremely slow, try setting this number to 1. If this significantly improves your token generation speed, then your CPU is being oversaturated and you need to explicitly set this parameter to the number of the physical CPU cores on your machine (even if you utilize a GPU). If in doubt, start with 1 and double the amount until you hit a performance bottleneck, then scale the number down. # Example of runtime flags effect on inference speed benchmark These runs were tested on the following machine: diff --git a/examples/main/README.md b/examples/main/README.md index a3428b487..c7997f665 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -142,7 +142,7 @@ The `--ctx-size` option allows you to set the size of the prompt context used by ### Extended Context Size -Some fine-tuned models have extened the context length by scaling RoPE. For example, if the original pretrained model have a context length (max sequence length) of 4096 (4k) and the fine-tuned model have 32k. That is a scaling factor of 8, and should work by setting the above `--ctx-size` to 32768 (32k) and `--rope-scale` to 8. +Some fine-tuned models have extended the context length by scaling RoPE. For example, if the original pre-trained model have a context length (max sequence length) of 4096 (4k) and the fine-tuned model have 32k. That is a scaling factor of 8, and should work by setting the above `--ctx-size` to 32768 (32k) and `--rope-scale` to 8. - `--rope-scale N`: Where N is the linear scaling factor used by the fine-tuned model. diff --git a/examples/parallel/README.md b/examples/parallel/README.md index 4d0fe5cef..df0456733 100644 --- a/examples/parallel/README.md +++ b/examples/parallel/README.md @@ -1,3 +1,3 @@ # llama.cpp/example/parallel -Simplified simluation for serving incoming requests in parallel +Simplified simulation of serving incoming requests in parallel diff --git a/grammars/README.md b/grammars/README.md index 7f3b11ca5..e1383fa5c 100644 --- a/grammars/README.md +++ b/grammars/README.md @@ -55,7 +55,7 @@ The order of symbols in a sequence matter. For example, in `"1. " move " " move Alternatives, denoted by `|`, give different sequences that are acceptable. For example, in `move ::= pawn | nonpawn | castle`, `move` can be a `pawn` move, a `nonpawn` move, or a `castle`. -Parentheses `()` can be used to group sequences, which allows for embedding alternatives in a larger rule or applying repetition and optptional symbols (below) to a sequence. +Parentheses `()` can be used to group sequences, which allows for embedding alternatives in a larger rule or applying repetition and optional symbols (below) to a sequence. ## Repetition and Optional Symbols @@ -67,7 +67,7 @@ Parentheses `()` can be used to group sequences, which allows for embedding alte Comments can be specified with `#`: ``` -# defines optional whitspace +# defines optional whitespace ws ::= [ \t\n]+ ``` From 21fd874c8d2a14dea2d56724e4357c0824aee6a8 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Sun, 12 Nov 2023 16:39:37 -0700 Subject: [PATCH 108/859] gguf-py: gguf_writer: Use bytearray to build metadata (#4051) * gguf-py: gguf_writer: Use BytesIO to build metadata * Use bytearray instead Bump gguf-py package version --- gguf-py/gguf/gguf_writer.py | 4 ++-- gguf-py/pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 75fb6976f..c3b8c588f 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -57,9 +57,9 @@ class GGUFWriter: self.endianess = endianess self.offset_tensor = 0 self.data_alignment = GGUF_DEFAULT_ALIGNMENT - self.kv_data = b"" + self.kv_data = bytearray() self.kv_data_count = 0 - self.ti_data = b"" + self.ti_data = bytearray() self.ti_data_count = 0 self.use_temp_file = use_temp_file self.temp_file = None diff --git a/gguf-py/pyproject.toml b/gguf-py/pyproject.toml index e21c3cd94..af777c3e0 100644 --- a/gguf-py/pyproject.toml +++ b/gguf-py/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "gguf" -version = "0.5.1" +version = "0.5.2" description = "Read and write ML models in GGUF for GGML" authors = ["GGML "] packages = [ From bb50a792ec2a49944470c82694fa364345e95170 Mon Sep 17 00:00:00 2001 From: Kerfuffle <44031344+KerfuffleV2@users.noreply.github.com> Date: Mon, 13 Nov 2023 01:58:15 -0700 Subject: [PATCH 109/859] Add ReLU and SQR CUDA ops to (partially) fix Persimmon offloading (#4041) * Add ReLU and SQR CUDA ops to fix Persimmon offloading * Persimmon loader: More helpful error on CUDA/ROCM when offloading too many layers --- ggml-cuda.cu | 72 ++++++++++++++++++++++++++++++++++++++++++++++++++++ llama.cpp | 7 +++++ 2 files changed, 79 insertions(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f87f18802..8d03ba664 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -433,6 +433,8 @@ static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_ #define CUDA_MUL_BLOCK_SIZE 256 #define CUDA_GELU_BLOCK_SIZE 256 #define CUDA_SILU_BLOCK_SIZE 256 +#define CUDA_RELU_BLOCK_SIZE 256 +#define CUDA_SQR_BLOCK_SIZE 256 #define CUDA_CPY_BLOCK_SIZE 32 #define CUDA_SCALE_BLOCK_SIZE 256 #define CUDA_CLAMP_BLOCK_SIZE 256 @@ -553,6 +555,24 @@ static __global__ void silu_f32(const float * x, float * dst, const int k) { dst[i] = x[i] / (1.0f + expf(-x[i])); } +static __global__ void relu_f32(const float * x, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = fmaxf(x[i], 0); +} + +static __global__ void sqr_f32(const float * x, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = x[i] * x[i]; +} + static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { @@ -4759,6 +4779,16 @@ static void silu_f32_cuda(const float * x, float * dst, const int k, cudaStream_ silu_f32<<>>(x, dst, k); } +static void relu_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_RELU_BLOCK_SIZE - 1) / CUDA_RELU_BLOCK_SIZE; + relu_f32<<>>(x, dst, k); +} + +static void sqr_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_SQR_BLOCK_SIZE - 1) / CUDA_SQR_BLOCK_SIZE; + sqr_f32<<>>(x, dst, k); +} + static void norm_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, cudaStream_t stream) { GGML_ASSERT(ncols % WARP_SIZE == 0); if (ncols < 1024) { @@ -6128,6 +6158,34 @@ inline void ggml_cuda_op_silu( (void) src1_dd; } +inline void ggml_cuda_op_relu( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + relu_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +inline void ggml_cuda_op_sqr( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + sqr_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + inline void ggml_cuda_op_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -7160,6 +7218,14 @@ static void ggml_cuda_silu(const ggml_tensor * src0, const ggml_tensor * src1, g ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_silu); } +static void ggml_cuda_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_relu); +} + +static void ggml_cuda_sqr(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_sqr); +} + static void ggml_cuda_norm(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_norm); } @@ -7891,6 +7957,9 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_UNARY_OP_SILU: func = ggml_cuda_silu; break; + case GGML_UNARY_OP_RELU: + func = ggml_cuda_relu; + break; default: return false; } break; @@ -7909,6 +7978,9 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_SCALE: func = ggml_cuda_scale; break; + case GGML_OP_SQR: + func = ggml_cuda_sqr; + break; case GGML_OP_CLAMP: if (!any_on_device) { return false; diff --git a/llama.cpp b/llama.cpp index d682d2864..a5f3876cc 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2877,6 +2877,13 @@ static void llm_load_tensors( ggml_backend_type backend_output; if (n_gpu_layers > int(n_layer)) { +#ifdef GGML_USE_CUBLAS + if (n_gpu_layers > int(n_layer + 1)) { + LLAMA_LOG_ERROR("%s: CUDA backend missing Persimmon CUDA ops, can offload at most %ld layers. See: https://github.com/ggerganov/llama.cpp/issues/4038\n", + __func__, n_layer + 1); + throw std::runtime_error("Persimmon CUDA offload failed"); + } +#endif // norm is not performance relevant on its own but keeping it in VRAM reduces data copying // on Windows however this is detrimental unless everything is on the GPU #ifndef _WIN32 From 4760e7cc0b68570d58f55e8dda469805d1759d0d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 13 Nov 2023 14:16:23 +0200 Subject: [PATCH 110/859] sync : ggml (backend v2) (#3912) * sync : ggml (backend v2) (wip) * sync : migrate examples and llama.cpp to dynamic graphs (wip) * sync : update tests + fix max op params to 64 ggml-ci * sync : ggml-cuda ggml-ci * llama : fix save/load state context size ggml-ci * sync : try to fix build on tvOS * sync : pass custom graph sizes in training examples * sync : update graph copies to new ggml API * sync : update sync-ggml.sh with new files * scripts : fix header in sync script * train : fix context size calculations * llama : increase inference graph size up to 4096 nodes * train : allocate grads for backward graphs * train : allocate grads for gb_tmp --- common/train.cpp | 1 + common/train.h | 2 + examples/benchmark/benchmark-matmult.cpp | 21 +- examples/export-lora/export-lora.cpp | 4 +- examples/finetune/finetune.cpp | 23 +- examples/llava/clip.cpp | 2 +- examples/metal/metal.cpp | 10 +- .../train-text-from-scratch.cpp | 23 +- ggml-alloc.c | 594 ++++++---- ggml-alloc.h | 80 +- ggml-backend-impl.h | 87 ++ ggml-backend.c | 591 ++++++++- ggml-backend.h | 151 ++- ggml-cuda.cu | 16 +- ggml-impl.h | 14 +- ggml-metal.m | 25 +- ggml.c | 1055 ++++++++++------- ggml.h | 89 +- llama.cpp | 40 +- scripts/sync-ggml.sh | 12 +- tests/test-grad0.cpp | 7 +- tests/test-opt.cpp | 11 +- 22 files changed, 1994 insertions(+), 864 deletions(-) create mode 100644 ggml-backend-impl.h diff --git a/common/train.cpp b/common/train.cpp index bc15b7a03..964b156b5 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -32,6 +32,7 @@ struct train_state * init_train_state() { state->opt = new struct ggml_opt_context; state->opt->ctx = NULL; state->opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + state->opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; state->opt->loss_after = 0.0f; return state; diff --git a/common/train.h b/common/train.h index d86c93cc4..263d940c0 100644 --- a/common/train.h +++ b/common/train.h @@ -9,6 +9,8 @@ #include "ggml.h" #include "llama.h" +#define LLAMA_TRAIN_MAX_NODES 16384 + typedef std::string mt19937_state; struct train_state { diff --git a/examples/benchmark/benchmark-matmult.cpp b/examples/benchmark/benchmark-matmult.cpp index 76e3f57cc..284733b10 100644 --- a/examples/benchmark/benchmark-matmult.cpp +++ b/examples/benchmark/benchmark-matmult.cpp @@ -171,7 +171,8 @@ int main(int argc, char ** argv) { struct ggml_tensor * m11xm2 = ggml_mul_mat(ctx, m11, m2); // printf("Creating compute graph\n"); - struct ggml_cgraph gf = ggml_build_forward(m11xm2); + struct ggml_cgraph * gf = ggml_new_graph(ctx); + ggml_build_forward_expand(gf, m11xm2); printf("n_threads=%i\n", benchmark_params.n_threads); @@ -180,9 +181,9 @@ int main(int argc, char ** argv) { std::vector work_buffer; - ggml_graph_compute_helper(work_buffer, &gf, benchmark_params.n_threads); + ggml_graph_compute_helper(work_buffer, gf, benchmark_params.n_threads); - TENSOR_DUMP(gf.nodes[0]); + TENSOR_DUMP(gf->nodes[0]); printf("\n------ Test 2 - Matrix Mult via %s code\n", ggml_type_name(qtype)); @@ -200,7 +201,8 @@ int main(int argc, char ** argv) { struct ggml_tensor * q31 = ggml_mul_mat(ctx, q11, m2); // printf("Creating compute graph\n"); - struct ggml_cgraph gf31 = ggml_build_forward(q31); + struct ggml_cgraph * gf31 = ggml_new_graph(ctx); + ggml_build_forward_expand(gf31, q31); // Set up a second graph computation to make sure we override the CPU cache lines // printf("Creating new tensor q12 & Running quantize\n"); @@ -211,7 +213,8 @@ int main(int argc, char ** argv) { struct ggml_tensor * q32 = ggml_mul_mat(ctx, q12, m2); //printf("Creating compute graph\n"); - struct ggml_cgraph gf32 = ggml_build_forward(q32); + struct ggml_cgraph * gf32 = ggml_new_graph(ctx); + ggml_build_forward_expand(gf32, q32); printf("n_threads=%i\n", benchmark_params.n_threads); const int dimx = sizex; @@ -223,7 +226,7 @@ int main(int argc, char ** argv) { // Let's use the F32 result from above as a reference for the quantized multiplication - float sum_of_F32_reference = tensor_sum_elements(gf.nodes[0]); + float sum_of_F32_reference = tensor_sum_elements(gf->nodes[0]); printf("Iteration;NThreads; SizeX; SizeY; SizeZ; Required_FLOPS; Elapsed_u_Seconds; gigaFLOPS\n"); printf("=====================================================================================\n"); @@ -233,7 +236,7 @@ int main(int argc, char ** argv) { long long int start = ggml_time_us(); //printf("Running ggml_graph_compute\n"); - ggml_graph_compute_helper(work_buffer, &gf31, benchmark_params.n_threads); + ggml_graph_compute_helper(work_buffer, gf31, benchmark_params.n_threads); long long int stop = ggml_time_us(); long long int usec = stop-start; @@ -251,7 +254,7 @@ int main(int argc, char ** argv) { // Check that the matrix multiplication result is in the right ballpark // We cannot use the exact value from the F32 multiplication because the quantizuation will be slightly different - float sum_of_Q4_result = tensor_sum_elements(gf31.nodes[0]); + float sum_of_Q4_result = tensor_sum_elements(gf31->nodes[0]); float delta = std::abs(sum_of_Q4_result - sum_of_F32_reference); float allowed_delta = (sum_of_F32_reference) / 1000 / 1000; // Let's accept an epsilon of 10^-6 @@ -266,7 +269,7 @@ int main(int argc, char ** argv) { } // Running a different graph computation to make sure we override the CPU cache lines - ggml_graph_compute_helper(work_buffer, &gf32, benchmark_params.n_threads); + ggml_graph_compute_helper(work_buffer, gf32, benchmark_params.n_threads); } printf("\n"); printf("Average%78.2f\n",gflops_sum/((double)benchmark_params.n_iterations)); diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index d803cfd5c..c8754ce70 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -240,7 +240,7 @@ static struct lora_data * load_lora(struct lora_info * info) { } struct ggml_init_params params_ggml; - params_ggml.mem_size = ggml_tensor_overhead() * GGML_MAX_NODES; + params_ggml.mem_size = ggml_tensor_overhead() * GGML_DEFAULT_GRAPH_SIZE; params_ggml.mem_buffer = NULL; params_ggml.no_alloc = true; result->ctx = ggml_init(params_ggml); @@ -334,7 +334,7 @@ static bool apply_lora(struct ggml_tensor * tensor, struct lora_data * lora, int float scaling = lora->info.scale * (float)lora->lora_alpha / (float)lora->lora_r; struct ggml_init_params params; - params.mem_size = GGML_OBJECT_SIZE + GGML_GRAPH_SIZE + ggml_tensor_overhead()*4 + GGML_MEM_ALIGN*5; + params.mem_size = GGML_OBJECT_SIZE + ggml_graph_overhead() + ggml_tensor_overhead()*4 + GGML_MEM_ALIGN*5; params.mem_buffer = NULL; params.no_alloc = true; struct ggml_context * ctx = NULL; diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index fa7dbe496..5a6cf22ce 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -772,7 +772,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( if (enable_checkpointing) { ggml_build_backward_gradient_checkpointing(ctx, gf, gb, gb_tmp, checkpoints.data(), (int) checkpoints.size()); } else { - *gb = *gf; + ggml_graph_cpy(gf, gb); ggml_build_backward_expand(ctx, gf, gb, true); } @@ -1615,6 +1615,7 @@ int main(int argc, char ** argv) { opt->params = ggml_opt_default_params(GGML_OPT_ADAM); opt->params.print_forward_graph = false; opt->params.print_backward_graph = false; + opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; opt->params.n_threads = params.common.n_threads; opt->params.past = params.common.opt_past; opt->params.delta = params.common.opt_delta; @@ -1741,11 +1742,9 @@ int main(int argc, char ** argv) { ggml_allocr_free(alloc); // context for compute tensors without their data - size_t estimated_compute_size_wo_data = ( - ggml_tensor_overhead()*GGML_MAX_NODES*2 - + (GGML_OBJECT_SIZE+GGML_GRAPH_SIZE)*( - params.common.use_checkpointing ? 3 : 2 - ) + const size_t estimated_compute_size_wo_data = ( + 2*LLAMA_TRAIN_MAX_NODES*ggml_tensor_overhead() + + (params.common.use_checkpointing ? 3 : 2)*(GGML_OBJECT_SIZE+ggml_graph_overhead_custom(LLAMA_TRAIN_MAX_NODES, true)) ); struct ggml_init_params ctx_compute_params = { estimated_compute_size_wo_data, // mem_size @@ -1768,11 +1767,11 @@ int main(int argc, char ** argv) { for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); alloc = ggml_allocr_new_measure(tensor_alignment); - gf = ggml_new_graph(ctx_compute); + gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; - gb = ggml_new_graph(ctx_compute); + gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gb_tmp = params.common.use_checkpointing - ? ggml_new_graph(ctx_compute) + ? ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true) : NULL; loss = llama_build_lora_finetune_graphs( &model, &lora, alloc, ctx_compute, @@ -1801,11 +1800,11 @@ int main(int argc, char ** argv) { mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); - gf = ggml_new_graph(ctx_compute); + gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; - gb = ggml_new_graph(ctx_compute); + gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gb_tmp = params.common.use_checkpointing - ? ggml_new_graph(ctx_compute) + ? ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true) : NULL; loss = llama_build_lora_finetune_graphs( &model, &lora, alloc, ctx_compute, diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 3c909c7d3..c26ee4957 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -664,7 +664,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // measure mem requirement and allocate { static const size_t tensor_alignment = 32; - new_clip->buf_compute.resize(ggml_tensor_overhead()*GGML_MAX_NODES + ggml_graph_overhead()); + new_clip->buf_compute.resize(ggml_tensor_overhead()*GGML_DEFAULT_GRAPH_SIZE + ggml_graph_overhead()); new_clip->alloc = ggml_allocr_new_measure(tensor_alignment); clip_image_f32_batch batch; batch.size = 1; diff --git a/examples/metal/metal.cpp b/examples/metal/metal.cpp index c05a4fa93..16c1146f9 100644 --- a/examples/metal/metal.cpp +++ b/examples/metal/metal.cpp @@ -34,7 +34,7 @@ int main(int argc, char ** argv) { struct ggml_context * ctx_data = NULL; struct ggml_context * ctx_eval = NULL; - struct ggml_cgraph gf = ggml_graph_import(fname_cgraph, &ctx_data, &ctx_eval); + struct ggml_cgraph * gf = ggml_graph_import(fname_cgraph, &ctx_data, &ctx_eval); // this allocates all Metal resources and memory buffers auto * ctx_metal = ggml_metal_init(1); @@ -46,13 +46,13 @@ int main(int argc, char ** argv) { // main { - struct ggml_tensor * input = ggml_graph_get_tensor(&gf, "embd"); + struct ggml_tensor * input = ggml_graph_get_tensor(gf, "embd"); *(int32_t *) input->data = 1; // BOS ggml_metal_set_tensor(ctx_metal, input); // warmup - ggml_metal_graph_compute(ctx_metal, &gf); + ggml_metal_graph_compute(ctx_metal, gf); const int n_iter = 16; @@ -60,7 +60,7 @@ int main(int argc, char ** argv) { // the actual inference happens here for (int i = 0; i < n_iter; ++i) { - ggml_metal_graph_compute(ctx_metal, &gf); + ggml_metal_graph_compute(ctx_metal, gf); } const int64_t t1 = ggml_time_us(); @@ -70,7 +70,7 @@ int main(int argc, char ** argv) { // debug output { - struct ggml_tensor * logits = gf.nodes[gf.n_nodes - 1]; + struct ggml_tensor * logits = gf->nodes[gf->n_nodes - 1]; ggml_metal_get_tensor(ctx_metal, logits); float * ptr = (float *) ggml_get_data(logits); diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index 2a257e632..f049a3923 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -436,7 +436,7 @@ static struct ggml_tensor * llama_build_train_graphs( if (enable_checkpointing) { ggml_build_backward_gradient_checkpointing(ctx, gf, gb, gb_tmp, checkpoints.data(), (int) checkpoints.size()); } else { - *gb = *gf; + ggml_graph_cpy(gf, gb); ggml_build_backward_expand(ctx, gf, gb, true); } @@ -1006,6 +1006,7 @@ int main(int argc, char ** argv) { opt->params = ggml_opt_default_params(GGML_OPT_ADAM); opt->params.print_forward_graph = false; opt->params.print_backward_graph = false; + opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; opt->params.n_threads = params.common.n_threads; opt->params.past = params.common.opt_past; opt->params.delta = params.common.opt_delta; @@ -1108,11 +1109,9 @@ int main(int argc, char ** argv) { ggml_allocr_free(alloc); // context for compute tensors without their data - size_t estimated_compute_size_wo_data = ( - ggml_tensor_overhead()*GGML_MAX_NODES*2 - + (GGML_OBJECT_SIZE+GGML_GRAPH_SIZE)*( - params.common.use_checkpointing ? 3 : 2 - ) + const size_t estimated_compute_size_wo_data = ( + 2*LLAMA_TRAIN_MAX_NODES*ggml_tensor_overhead() + + (params.common.use_checkpointing ? 3 : 2)*(GGML_OBJECT_SIZE+ggml_graph_overhead_custom(LLAMA_TRAIN_MAX_NODES, true)) ); struct ggml_init_params ctx_compute_params = { estimated_compute_size_wo_data, // mem_size @@ -1135,11 +1134,11 @@ int main(int argc, char ** argv) { for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); alloc = ggml_allocr_new_measure(tensor_alignment); - gf = ggml_new_graph(ctx_compute); + gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; - gb = ggml_new_graph(ctx_compute); + gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gb_tmp = params.common.use_checkpointing - ? ggml_new_graph(ctx_compute) + ? ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true) : NULL; loss = llama_build_train_graphs( &model, alloc, ctx_compute, @@ -1168,11 +1167,11 @@ int main(int argc, char ** argv) { mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); - gf = ggml_new_graph(ctx_compute); + gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; - gb = ggml_new_graph(ctx_compute); + gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gb_tmp = params.common.use_checkpointing - ? ggml_new_graph(ctx_compute) + ? ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true) : NULL; loss = llama_build_train_graphs( &model, alloc, ctx_compute, diff --git a/ggml-alloc.c b/ggml-alloc.c index b553eb7c1..cdfe4caf6 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -1,51 +1,21 @@ #include "ggml-alloc.h" -#include "ggml-backend.h" +#include "ggml-backend-impl.h" #include "ggml.h" +#include "ggml-impl.h" #include +#include #include #include #include #include - -#define UNUSED(x) (void)(x) #define MAX(a, b) ((a) > (b) ? (a) : (b)) -#define GGML_MAX_CONCUR (2*GGML_MAX_NODES) +#define MAX_FREE_BLOCKS 256 //#define GGML_ALLOCATOR_DEBUG -//#define AT_PRINTF printf -#define AT_PRINTF(...) ((void)0) - -struct hash_node { - struct ggml_tensor * t; - int n_children; - int n_views; -}; - -static size_t hash(void * p) { - return (size_t)p % GGML_GRAPH_HASHTABLE_SIZE; -} - -static struct hash_node * hash_get(struct hash_node hash_table[], struct ggml_tensor * t) { - size_t h = hash(t); - - // linear probing - size_t i = h; - while (hash_table[i].t != NULL) { - if (hash_table[i].t == t) { - return &hash_table[i]; - } - i = (i + 1) % GGML_GRAPH_HASHTABLE_SIZE; - if (i == h) { - // hash table is full - GGML_ASSERT(false); - } - } - - hash_table[i].t = t; - return &hash_table[i]; -} +//#define AT_PRINTF(...) fprintf(stderr, __VA_ARGS__) +#define AT_PRINTF(...) // TODO: GGML_PAD ? static size_t aligned_offset(const void * buffer, size_t offset, size_t alignment) { @@ -59,20 +29,18 @@ struct free_block { size_t size; }; -#define MAX_FREE_BLOCKS 256 - -struct ggml_allocr { +struct ggml_tallocr { struct ggml_backend_buffer * buffer; bool buffer_owned; - void * data; + void * base; size_t alignment; + int n_free_blocks; struct free_block free_blocks[MAX_FREE_BLOCKS]; - struct hash_node hash_table[GGML_GRAPH_HASHTABLE_SIZE]; + size_t max_size; + bool measure; - int parse_seq[GGML_MAX_CONCUR]; - int parse_seq_len; #ifdef GGML_ALLOCATOR_DEBUG struct ggml_tensor * allocated_tensors[1024]; @@ -80,7 +48,7 @@ struct ggml_allocr { }; #ifdef GGML_ALLOCATOR_DEBUG -static void add_allocated_tensor(struct ggml_allocr * alloc, struct ggml_tensor * tensor) { +static void add_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { for (int i = 0; i < 1024; i++) { if (alloc->allocated_tensors[i] == NULL) { alloc->allocated_tensors[i] = tensor; @@ -89,7 +57,7 @@ static void add_allocated_tensor(struct ggml_allocr * alloc, struct ggml_tensor } GGML_ASSERT(!"out of allocated_tensors"); } -static void remove_allocated_tensor(struct ggml_allocr * alloc, struct ggml_tensor * tensor) { +static void remove_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { for (int i = 0; i < 1024; i++) { if (alloc->allocated_tensors[i] == tensor || (alloc->allocated_tensors[i] != NULL && alloc->allocated_tensors[i]->data == tensor->data)) { @@ -103,7 +71,7 @@ static void remove_allocated_tensor(struct ggml_allocr * alloc, struct ggml_tens #endif // check if a tensor is allocated by this buffer -static bool ggml_allocr_is_own(struct ggml_allocr * alloc, const struct ggml_tensor * tensor) { +static bool ggml_tallocr_is_own(ggml_tallocr_t alloc, const struct ggml_tensor * tensor) { return tensor->buffer == alloc->buffer; } @@ -111,7 +79,7 @@ static bool ggml_is_view(struct ggml_tensor * t) { return t->view_src != NULL; } -void ggml_allocr_alloc(struct ggml_allocr * alloc, struct ggml_tensor * tensor) { +void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { GGML_ASSERT(!ggml_is_view(tensor)); // views generally get data pointer from one of their sources GGML_ASSERT(tensor->data == NULL); // avoid allocating tensor which already has memory allocated @@ -162,9 +130,10 @@ void ggml_allocr_alloc(struct ggml_allocr * alloc, struct ggml_tensor * tensor) } tensor->data = addr; - AT_PRINTF("%s: allocated data at %p\n", __func__, tensor->data); tensor->buffer = alloc->buffer; - ggml_backend_buffer_init_tensor(alloc->buffer, tensor); + if (!alloc->measure) { + ggml_backend_buffer_init_tensor(alloc->buffer, tensor); + } #ifdef GGML_ALLOCATOR_DEBUG add_allocated_tensor(alloc, tensor); @@ -180,16 +149,16 @@ void ggml_allocr_alloc(struct ggml_allocr * alloc, struct ggml_tensor * tensor) } #endif - alloc->max_size = MAX(alloc->max_size, (char*)addr - (char*)alloc->data + size); + alloc->max_size = MAX(alloc->max_size, (char*)addr - (char*)alloc->base + size); } // this is a very naive implementation, but for our case the number of free blocks should be very small -static void ggml_allocr_free_tensor(struct ggml_allocr * alloc, struct ggml_tensor * tensor) { - if (ggml_allocr_is_own(alloc, tensor) == false) { +static void ggml_tallocr_free_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { + if (ggml_tallocr_is_own(alloc, tensor) == false) { // the tensor was not allocated in this buffer // this can happen because the graph allocator will try to free weights and other tensors from different buffers // the easiest way to deal with this is just to ignore it - AT_PRINTF("ignoring %s (their buffer: %p, our buffer: %p)\n", tensor->name, (void *)tensor->buffer, (void *)alloc->buffer); + // AT_PRINTF("ignoring %s (their buffer: %p, our buffer: %p)\n", tensor->name, (void *)tensor->buffer, (void *)alloc->buffer); return; } @@ -199,7 +168,9 @@ static void ggml_allocr_free_tensor(struct ggml_allocr * alloc, struct ggml_tens size = aligned_offset(NULL, size, alloc->alignment); AT_PRINTF("%s: freeing %s at %p (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, ptr, size, alloc->n_free_blocks); - ggml_backend_buffer_free_tensor(alloc->buffer, tensor); + if (!alloc->measure) { + ggml_backend_buffer_free_tensor(alloc->buffer, tensor); + } #ifdef GGML_ALLOCATOR_DEBUG remove_allocated_tensor(alloc, tensor); @@ -253,91 +224,180 @@ static void ggml_allocr_free_tensor(struct ggml_allocr * alloc, struct ggml_tens alloc->n_free_blocks++; } -void ggml_allocr_set_parse_seq(struct ggml_allocr * alloc, const int * list, int n) { - for (int i = 0; i < n; i++) { - alloc->parse_seq[i] = list[i]; - } - alloc->parse_seq_len = n; -} - -void ggml_allocr_reset(struct ggml_allocr * alloc) { +void ggml_tallocr_reset(ggml_tallocr_t alloc) { alloc->n_free_blocks = 1; - size_t align_offset = aligned_offset(alloc->data, 0, alloc->alignment); - alloc->free_blocks[0].addr = (char *)alloc->data + align_offset; - alloc->free_blocks[0].size = ggml_backend_buffer_get_size(alloc->buffer) - align_offset; + size_t align_offset = aligned_offset(alloc->base, 0, alloc->alignment); + alloc->free_blocks[0].addr = (char *)alloc->base + align_offset; + + if (alloc->measure) { + alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows + } else { + alloc->free_blocks[0].size = ggml_backend_buffer_get_size(alloc->buffer) - align_offset; + } } -struct ggml_allocr * ggml_allocr_new(void * data, size_t size, size_t alignment) { +ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment) { struct ggml_backend_buffer * buffer = ggml_backend_cpu_buffer_from_ptr(NULL, data, size); - struct ggml_allocr * alloc = (struct ggml_allocr *)malloc(sizeof(struct ggml_allocr)); + ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); - *alloc = (struct ggml_allocr){ + *alloc = (struct ggml_tallocr) { /*.buffer = */ buffer, /*.buffer_owned = */ true, /*.base = */ ggml_backend_buffer_get_base(buffer), /*.alignment = */ alignment, /*.n_free_blocks = */ 0, /*.free_blocks = */ {{0}}, - /*.hash_table = */ {{0}}, /*.max_size = */ 0, /*.measure = */ false, - /*.parse_seq = */ {0}, - /*.parse_seq_len = */ 0, #ifdef GGML_ALLOCATOR_DEBUG /*.allocated_tensors = */ {0}, #endif }; - ggml_allocr_reset(alloc); + ggml_tallocr_reset(alloc); return alloc; } -struct ggml_allocr * ggml_allocr_new_measure(size_t alignment) { - struct ggml_allocr * alloc = ggml_allocr_new((void *)0x1000, (size_t)-0x1001, alignment); +ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment) { + ggml_tallocr_t alloc = ggml_tallocr_new((void *)0x1000, SIZE_MAX/2, alignment); alloc->measure = true; return alloc; } -struct ggml_allocr * ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer) { - struct ggml_allocr * alloc = (struct ggml_allocr *)malloc(sizeof(struct ggml_allocr)); +ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { + // create a backend buffer to get the correct tensor allocation sizes + ggml_backend_buffer_t buffer = ggml_backend_alloc_buffer(backend, 1); - *alloc = (struct ggml_allocr){ + // TODO: move alloc initialization to a common ggml_tallocr_new_impl function + ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); + alloc->buffer_owned = true; + alloc->measure = true; + ggml_tallocr_reset(alloc); + return alloc; +} + +ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { + ggml_backend_buffer_t buffer = ggml_backend_alloc_buffer(backend, size); + ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); + alloc->buffer_owned = true; + return alloc; +} + +ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer) { + ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); + + *alloc = (struct ggml_tallocr) { /*.buffer = */ buffer, /*.buffer_owned = */ false, /*.base = */ ggml_backend_buffer_get_base(buffer), /*.alignment = */ ggml_backend_buffer_get_alignment(buffer), /*.n_free_blocks = */ 0, /*.free_blocks = */ {{0}}, - /*.hash_table = */ {{0}}, /*.max_size = */ 0, /*.measure = */ false, - /*.parse_seq = */ {0}, - /*.parse_seq_len = */ 0, #ifdef GGML_ALLOCATOR_DEBUG /*.allocated_tensors = */ {0}, #endif }; - ggml_allocr_reset(alloc); + ggml_tallocr_reset(alloc); return alloc; } -void ggml_allocr_free(struct ggml_allocr * alloc) { +struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t alloc) { + return alloc->buffer; +} + +void ggml_tallocr_free(ggml_tallocr_t alloc) { + if (alloc == NULL) { + return; + } + if (alloc->buffer_owned) { ggml_backend_buffer_free(alloc->buffer); } free(alloc); } -bool ggml_allocr_is_measure(struct ggml_allocr * alloc) { +bool ggml_tallocr_is_measure(ggml_tallocr_t alloc) { return alloc->measure; } -//////////// compute graph allocator +size_t ggml_tallocr_max_size(ggml_tallocr_t alloc) { + return alloc->max_size; +} + +// graph allocator + +struct hash_node { + int n_children; + int n_views; +}; + +struct ggml_gallocr { + ggml_tallocr_t talloc; + struct ggml_hash_set hash_set; + struct hash_node * hash_values; + size_t hash_values_size; + ggml_tallocr_t * hash_allocs; + int * parse_seq; + int parse_seq_len; +}; + +ggml_gallocr_t ggml_gallocr_new(void) { + ggml_gallocr_t galloc = (ggml_gallocr_t)malloc(sizeof(struct ggml_gallocr)); + + *galloc = (struct ggml_gallocr) { + /*.talloc = */ NULL, + /*.hash_set = */ {0}, + /*.hash_values = */ NULL, + /*.hash_values_size = */ 0, + /*.hash_allocs = */ NULL, + /*.parse_seq = */ NULL, + /*.parse_seq_len = */ 0, + }; + + return galloc; +} + +void ggml_gallocr_free(ggml_gallocr_t galloc) { + if (galloc == NULL) { + return; + } + + if (galloc->hash_set.keys != NULL) { + free(galloc->hash_set.keys); + } + if (galloc->hash_values != NULL) { + free(galloc->hash_values); + } + if (galloc->hash_allocs != NULL) { + free(galloc->hash_allocs); + } + if (galloc->parse_seq != NULL) { + free(galloc->parse_seq); + } + free(galloc); +} + +void ggml_gallocr_set_parse_seq(ggml_gallocr_t galloc, const int * list, int n) { + free(galloc->parse_seq); + galloc->parse_seq = malloc(sizeof(int) * n); + + for (int i = 0; i < n; i++) { + galloc->parse_seq[i] = list[i]; + } + galloc->parse_seq_len = n; +} + +static struct hash_node * hash_get(ggml_gallocr_t galloc, struct ggml_tensor * t) { + size_t i = ggml_hash_find_or_insert(galloc->hash_set, t); + return &galloc->hash_values[i]; +} static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml_tensor * b) { if (a->type != b->type) { @@ -378,27 +438,40 @@ static bool ggml_op_can_inplace(enum ggml_op op) { } } -static void init_view(struct ggml_allocr * alloc, struct ggml_tensor * view, bool update_backend) { - assert(view->view_src != NULL && view->view_src->data != NULL); +static ggml_tallocr_t node_tallocr(ggml_gallocr_t galloc, struct ggml_tensor * node) { + if (galloc->talloc != NULL) { + return galloc->talloc; + } + return galloc->hash_allocs[ggml_hash_find_or_insert(galloc->hash_set, node)]; +} + +static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool update_backend) { + ggml_tallocr_t alloc = node_tallocr(galloc, view); + + //printf("init_view: %s from src %s\n", view->name, view->view_src->name); + GGML_ASSERT(view->view_src != NULL && view->view_src->data != NULL); if (update_backend) { view->backend = view->view_src->backend; } - view->buffer = view->view_src->buffer; view->data = (char *)view->view_src->data + view->view_offs; // FIXME: the view should be initialized by the owning buffer, but currently this breaks the CUDA backend // due to the ggml_tensor_extra_gpu ring buffer overwriting the KV cache extras - assert(ggml_allocr_is_measure(alloc) || !view->buffer || view->buffer->backend == alloc->buffer->backend); - ggml_backend_buffer_init_tensor(alloc->buffer, view); + assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->backend == alloc->buffer->backend); + + if (!alloc->measure) { + ggml_backend_buffer_init_tensor(alloc->buffer, view); + } } -static void allocate_node(struct ggml_allocr * alloc, struct ggml_tensor * node) { - struct hash_node * ht = alloc->hash_table; +static void allocate_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { + ggml_tallocr_t alloc = node_tallocr(galloc, node); + if (node->data == NULL) { if (ggml_is_view(node)) { - init_view(alloc, node, true); + init_view(galloc, node, true); } else { // see if we can reuse a parent's buffer (inplace) if (ggml_op_can_inplace(node->op)) { @@ -409,16 +482,16 @@ static void allocate_node(struct ggml_allocr * alloc, struct ggml_tensor * node) } // if the node's data is external, then we cannot re-use it - if (ggml_allocr_is_own(alloc, parent) == false) { + if (ggml_tallocr_is_own(alloc, parent) == false) { AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); continue; } - struct hash_node * p_hn = hash_get(ht, parent); + struct hash_node * p_hn = hash_get(galloc, parent); if (parent->data != NULL && p_hn->n_children == 1 && p_hn->n_views == 0 && ggml_are_same_layout(node, parent)) { if (ggml_is_view(parent)) { struct ggml_tensor * view_src = parent->view_src; - struct hash_node * view_src_hn = hash_get(ht, view_src); + struct hash_node * view_src_hn = hash_get(galloc, view_src); if (view_src_hn->n_views == 1 && view_src_hn->n_children == 0 && view_src->data == parent->data) { // TODO: the offset of the view parent must be kept to ensure that the op doesn't overwrite // the parent's data that it will need later (same layout requirement). the problem is that then @@ -428,170 +501,267 @@ static void allocate_node(struct ggml_allocr * alloc, struct ggml_tensor * node) AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); node->view_src = view_src; view_src_hn->n_views += 1; - init_view(alloc, node, false); + init_view(galloc, node, false); return; } } else { AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); node->view_src = parent; p_hn->n_views += 1; - init_view(alloc, node, false); + init_view(galloc, node, false); return; } } } } - ggml_allocr_alloc(alloc, node); + ggml_tallocr_alloc(alloc, node); } } } -size_t ggml_allocr_alloc_graph_n( - struct ggml_allocr * alloc, - struct ggml_cgraph ** graphs, int n_graphs, - struct ggml_tensor *** inputs, struct ggml_tensor *** outputs) { +static void free_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { + ggml_tallocr_t alloc = node_tallocr(galloc, node); - // reset hash table - struct hash_node * ht = alloc->hash_table; - memset(ht, 0, sizeof(struct hash_node) * GGML_GRAPH_HASHTABLE_SIZE); + ggml_tallocr_free_tensor(alloc, node); +} + +static void ggml_tallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgraph * gf) { + const int * parse_seq = galloc->parse_seq; + int parse_seq_len = galloc->parse_seq_len; // count number of children and views - for (int g = 0; g < n_graphs; g++) { - struct ggml_cgraph * gf = graphs[g]; - for (int i = 0; i < gf->n_nodes; i++) { + for (int i = 0; i < gf->n_nodes; i++) { + struct ggml_tensor * node = gf->nodes[i]; + + if (ggml_is_view(node)) { + struct ggml_tensor * view_src = node->view_src; + hash_get(galloc, view_src)->n_views += 1; + if (node->buffer == NULL && node->data != NULL) { + // view of a pre-allocated tensor, didn't call init_view() yet + init_view(galloc, node, true); + } + } + + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; + } + hash_get(galloc, parent)->n_children += 1; + if (ggml_is_view(parent) && parent->buffer == NULL && parent->data != NULL) { + init_view(galloc, parent, true); + } + } + } + + // allocate tensors + // if we have parse_seq then we allocate nodes following the list, and we only free nodes at barriers + int last_barrier_pos = 0; + int n_nodes = parse_seq_len ? parse_seq_len : gf->n_nodes; + + for (int ind = 0; ind < n_nodes; ind++) { + // allocate a node if there is no parse_seq or this is not a barrier + if (parse_seq_len == 0 || parse_seq[ind] != -1) { + int i = parse_seq_len ? parse_seq[ind] : ind; struct ggml_tensor * node = gf->nodes[i]; - if (ggml_is_view(node)) { - struct ggml_tensor * view_src = node->view_src; - hash_get(ht, view_src)->n_views += 1; - if (node->buffer == NULL && node->data != NULL) { - // view of a pre-allocated tensor, didn't call init_view() yet - init_view(alloc, node, true); - } - } - + // allocate parents (leafs) for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { break; } - hash_get(ht, parent)->n_children += 1; - if (ggml_is_view(parent) && parent->buffer == NULL && parent->data != NULL) { - init_view(alloc, parent, true); + allocate_node(galloc, parent); + } + + // allocate node + allocate_node(galloc, node); + + AT_PRINTF("exec: %s (%s) <= ", ggml_op_name(node->op), node->name); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; + } + AT_PRINTF("%s", parent->name); + if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { + AT_PRINTF(", "); } } + AT_PRINTF("\n"); } - } - // allocate tensors - for (int g = 0; g < n_graphs; g++) { - struct ggml_cgraph * gf = graphs[g]; - AT_PRINTF("####### graph %d/%d\n", g, n_graphs); - // graph inputs are allocated first to ensure that they are not overwritten by each other - if (inputs != NULL && inputs[g] != NULL) { - for (int i = 0; inputs[g][i] != NULL; i++) { - struct ggml_tensor * input = inputs[g][i]; - AT_PRINTF("input: %s\n", input->name); - allocate_node(alloc, input); - } - } - // if we have parse_seq then we allocate nodes following the list, and we only free nodes at barriers - int last_barrier_pos = 0; - int n_nodes = alloc->parse_seq_len ? alloc->parse_seq_len : gf->n_nodes; + // update parents + // update immediately if there is no parse_seq + // update only at barriers if there is parse_seq + if ((parse_seq_len == 0) || parse_seq[ind] == -1) { + int update_start = parse_seq_len ? last_barrier_pos : ind; + int update_end = parse_seq_len ? ind : ind + 1; + for (int i = update_start; i < update_end; i++) { + int node_i = parse_seq_len ? parse_seq[i] : i; + struct ggml_tensor * node = gf->nodes[node_i]; - for (int ind = 0; ind < n_nodes; ind++) { - // allocate a node if there is no parse_seq or this is not a barrier - if ((alloc->parse_seq_len==0) || alloc->parse_seq[ind] != -1) { - int i = alloc->parse_seq_len ? alloc->parse_seq[ind] : ind; - struct ggml_tensor * node = gf->nodes[i]; - - // allocate parents (leafs) for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { break; } - allocate_node(alloc, parent); - } + struct hash_node * p_hn = hash_get(galloc, parent); + p_hn->n_children -= 1; - // allocate node - allocate_node(alloc, node); + //AT_PRINTF("parent %s: %d children, %d views\n", parent->name, parent->n_children, parent->n_views); - AT_PRINTF("exec: %s (%s) <= ", ggml_op_name(node->op), node->name); - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - AT_PRINTF("%s", parent->name); - if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { - AT_PRINTF(", "); - } - } - AT_PRINTF("\n"); - } - - // update parents - // update immediately if there is no parse_seq - // update only at barriers if there is parse_seq - if ((alloc->parse_seq_len == 0) || alloc->parse_seq[ind] == -1) { - int update_start = alloc->parse_seq_len ? last_barrier_pos : ind; - int update_end = alloc->parse_seq_len ? ind : ind + 1; - for (int i = update_start; i < update_end; i++) { - int node_i = alloc->parse_seq_len ? alloc->parse_seq[i] : i; - struct ggml_tensor * node = gf->nodes[node_i]; - - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; + if (p_hn->n_children == 0 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = hash_get(galloc, view_src); + view_src_hn->n_views -= 1; + AT_PRINTF("view_src %s: %d children, %d views\n", view_src->name, view_src_hn->n_children, view_src_hn->n_views); + if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0) { + free_node(galloc, view_src); + } } - struct hash_node * p_hn = hash_get(ht, parent); - p_hn->n_children -= 1; - - //AT_PRINTF("parent %s: %d children, %d views\n", parent->name, parent->n_children, parent->n_views); - - if (p_hn->n_children == 0 && p_hn->n_views == 0) { - if (ggml_is_view(parent)) { - struct ggml_tensor * view_src = parent->view_src; - struct hash_node * view_src_hn = hash_get(ht, view_src); - view_src_hn->n_views -= 1; - AT_PRINTF("view_src %s: %d children, %d views\n", view_src->name, view_src_hn->n_children, view_src_hn->n_views); - if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0 && view_src->data != node->data) { - ggml_allocr_free_tensor(alloc, view_src); - } - } - else { - if (parent->data != node->data) { - ggml_allocr_free_tensor(alloc, parent); - } - } + else { + free_node(galloc, parent); } } } - AT_PRINTF("\n"); - if (alloc->parse_seq_len) { - last_barrier_pos = ind + 1; - } } - } - // free graph outputs here that wouldn't be freed otherwise because they have no children - if (outputs != NULL && outputs[g] != NULL) { - for (int i = 0; outputs[g][i] != NULL; i++) { - struct ggml_tensor * output = outputs[g][i]; - AT_PRINTF("output: %s\n", output->name); - ggml_allocr_free_tensor(alloc, output); + AT_PRINTF("\n"); + if (parse_seq_len) { + last_barrier_pos = ind + 1; } } } - - return alloc->max_size; } -size_t ggml_allocr_alloc_graph(struct ggml_allocr * alloc, struct ggml_cgraph * graph) { - return ggml_allocr_alloc_graph_n(alloc, &graph, 1, NULL, NULL); +size_t ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, ggml_tallocr_t talloc, struct ggml_cgraph * graph) { + size_t hash_size = graph->visited_hash_table.size; + + // check if the hash table is initialized and large enough + if (galloc->hash_set.size < hash_size) { + if (galloc->hash_set.keys != NULL) { + free(galloc->hash_set.keys); + } + if (galloc->hash_values != NULL) { + free(galloc->hash_values); + } + galloc->hash_set.keys = malloc(sizeof(struct ggml_tensor *) * hash_size); + galloc->hash_set.size = hash_size; + galloc->hash_values = malloc(sizeof(struct hash_node) * hash_size); + } + + // reset hash table + memset(galloc->hash_set.keys, 0, sizeof(struct ggml_tensor *) * hash_size); + memset(galloc->hash_values, 0, sizeof(struct hash_node) * hash_size); + + galloc->talloc = talloc; + ggml_tallocr_alloc_graph_impl(galloc, graph); + galloc->talloc = NULL; + + size_t max_size = ggml_tallocr_max_size(talloc); + + return max_size; } -size_t ggml_allocr_max_size(struct ggml_allocr * alloc) { - return alloc->max_size; +void ggml_gallocr_alloc_graph_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, struct ggml_hash_set hash_set, ggml_tallocr_t * hash_node_talloc) { + const size_t hash_size = hash_set.size; + + GGML_ASSERT(hash_size >= (size_t)(graph->n_nodes + graph->n_leafs)); + + galloc->talloc = NULL; + + // alloc hash_values if needed + if (galloc->hash_values == NULL || galloc->hash_values_size < hash_size) { + free(galloc->hash_values); + galloc->hash_values = malloc(sizeof(struct hash_node) * hash_size); + galloc->hash_values_size = hash_size; + } + + // free hash_set.keys if needed + if (galloc->hash_set.keys != NULL) { + free(galloc->hash_set.keys); + } + galloc->hash_set = hash_set; + + // reset hash values + memset(galloc->hash_values, 0, sizeof(struct hash_node) * hash_size); + + galloc->hash_allocs = hash_node_talloc; + + ggml_tallocr_alloc_graph_impl(galloc, graph); + + // remove unowned resources + galloc->hash_set.keys = NULL; + galloc->hash_allocs = NULL; +} + +// legacy API wrapper + +struct ggml_allocr { + ggml_tallocr_t talloc; + ggml_gallocr_t galloc; +}; + +static ggml_allocr_t ggml_allocr_new_impl(ggml_tallocr_t talloc) { + ggml_allocr_t alloc = (ggml_allocr_t)malloc(sizeof(struct ggml_allocr)); + *alloc = (struct ggml_allocr) { + /*.talloc = */ talloc, + /*.galloc = */ ggml_gallocr_new(), + }; + return alloc; +} + +ggml_allocr_t ggml_allocr_new(void * data, size_t size, size_t alignment) { + return ggml_allocr_new_impl(ggml_tallocr_new(data, size, alignment)); +} + +ggml_allocr_t ggml_allocr_new_measure(size_t alignment) { + return ggml_allocr_new_impl(ggml_tallocr_new_measure(alignment)); +} + +ggml_allocr_t ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer) { + return ggml_allocr_new_impl(ggml_tallocr_new_from_buffer(buffer)); +} + +ggml_allocr_t ggml_allocr_new_from_backend(struct ggml_backend * backend, size_t size) { + return ggml_allocr_new_impl(ggml_tallocr_new_from_backend(backend, size)); +} + +ggml_allocr_t ggml_allocr_new_measure_from_backend(struct ggml_backend * backend) { + return ggml_allocr_new_impl(ggml_tallocr_new_measure_from_backend(backend)); +} + +struct ggml_backend_buffer * ggml_allocr_get_buffer(ggml_allocr_t alloc) { + return ggml_tallocr_get_buffer(alloc->talloc); +} + +void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n) { + ggml_gallocr_set_parse_seq(alloc->galloc, list, n); +} + +void ggml_allocr_free(ggml_allocr_t alloc) { + ggml_gallocr_free(alloc->galloc); + ggml_tallocr_free(alloc->talloc); + free(alloc); +} + +bool ggml_allocr_is_measure(ggml_allocr_t alloc) { + return ggml_tallocr_is_measure(alloc->talloc); +} + +void ggml_allocr_reset(ggml_allocr_t alloc) { + ggml_tallocr_reset(alloc->talloc); +} + +void ggml_allocr_alloc(ggml_allocr_t alloc, struct ggml_tensor * tensor) { + ggml_tallocr_alloc(alloc->talloc, tensor); +} + +size_t ggml_allocr_max_size(ggml_allocr_t alloc) { + return ggml_tallocr_max_size(alloc->talloc); +} + +size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph) { + return ggml_gallocr_alloc_graph(alloc->galloc, alloc->talloc, graph); } diff --git a/ggml-alloc.h b/ggml-alloc.h index e38758878..dde2a06bf 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -6,27 +6,79 @@ extern "C" { #endif +struct ggml_backend; struct ggml_backend_buffer; -GGML_API struct ggml_allocr * ggml_allocr_new(void * data, size_t size, size_t alignment); -GGML_API struct ggml_allocr * ggml_allocr_new_measure(size_t alignment); -GGML_API struct ggml_allocr * ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer); +// +// Legacy API +// + +typedef struct ggml_allocr * ggml_allocr_t; + +// initialize allocator for use with CPU backend only +GGML_API ggml_allocr_t ggml_allocr_new(void * data, size_t size, size_t alignment); +GGML_API ggml_allocr_t ggml_allocr_new_measure(size_t alignment); + +// initialize allocator for use with ggml-backend +GGML_API ggml_allocr_t ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer); +GGML_API ggml_allocr_t ggml_allocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer +GGML_API ggml_allocr_t ggml_allocr_new_measure_from_backend(struct ggml_backend * backend); + +GGML_API struct ggml_backend_buffer * ggml_allocr_get_buffer(ggml_allocr_t alloc); // tell the allocator to parse nodes following the order described in the list // you should call this if your graph are optimized to execute out-of-order -GGML_API void ggml_allocr_set_parse_seq(struct ggml_allocr * alloc, const int * list, int n); +GGML_API void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n); -GGML_API void ggml_allocr_free (struct ggml_allocr * alloc); -GGML_API bool ggml_allocr_is_measure (struct ggml_allocr * alloc); -GGML_API void ggml_allocr_reset (struct ggml_allocr * alloc); -GGML_API void ggml_allocr_alloc (struct ggml_allocr * alloc, struct ggml_tensor * tensor); -GGML_API size_t ggml_allocr_alloc_graph(struct ggml_allocr * alloc, struct ggml_cgraph * graph); -GGML_API size_t ggml_allocr_max_size (struct ggml_allocr * alloc); +GGML_API void ggml_allocr_free (ggml_allocr_t alloc); +GGML_API bool ggml_allocr_is_measure (ggml_allocr_t alloc); +GGML_API void ggml_allocr_reset (ggml_allocr_t alloc); +GGML_API void ggml_allocr_alloc (ggml_allocr_t alloc, struct ggml_tensor * tensor); +GGML_API size_t ggml_allocr_max_size (ggml_allocr_t alloc); -GGML_API size_t ggml_allocr_alloc_graph_n( - struct ggml_allocr * alloc, - struct ggml_cgraph ** graphs, int n_graphs, - struct ggml_tensor *** inputs, struct ggml_tensor *** outputs); +GGML_API size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph); + +// +// ggml-backend v2 API +// + +// Seperate tensor and graph allocator objects +// This is necessary for multi-backend allocation because the graph allocator needs to use multiple tensor allocators +// The original API is kept as a wrapper around the new API + +// Tensor allocator +typedef struct ggml_tallocr * ggml_tallocr_t; + +GGML_API ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment); +GGML_API ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment); +GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); +GGML_API ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer +GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend); + +GGML_API struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t talloc); + +GGML_API void ggml_tallocr_free (ggml_tallocr_t talloc); +GGML_API bool ggml_tallocr_is_measure (ggml_tallocr_t talloc); +GGML_API void ggml_tallocr_reset (ggml_tallocr_t talloc); +GGML_API void ggml_tallocr_alloc (ggml_tallocr_t talloc, struct ggml_tensor * tensor); +GGML_API size_t ggml_tallocr_max_size (ggml_tallocr_t talloc); + + +// Graph allocator +typedef struct ggml_gallocr * ggml_gallocr_t; + +GGML_API ggml_gallocr_t ggml_gallocr_new(void); +GGML_API void ggml_gallocr_free(ggml_gallocr_t galloc); + +GGML_API void ggml_gallocr_set_parse_seq(ggml_gallocr_t galloc, const int * list, int n); +GGML_API size_t ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, ggml_tallocr_t talloc, struct ggml_cgraph * graph); + +// Allocate tensors from the allocators given by the hash table +GGML_API void ggml_gallocr_alloc_graph_n( + ggml_gallocr_t galloc, + struct ggml_cgraph * graph, + struct ggml_hash_set hash_set, + ggml_tallocr_t * hash_node_talloc); #ifdef __cplusplus } diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h new file mode 100644 index 000000000..211e3d424 --- /dev/null +++ b/ggml-backend-impl.h @@ -0,0 +1,87 @@ +#pragma once + +// ggml-backend internal header + +#include "ggml-backend.h" + +#ifdef __cplusplus +extern "C" { +#endif + + // + // Backend buffer + // + + typedef void * ggml_backend_buffer_context_t; + + struct ggml_backend_buffer_i { + void (*free_buffer) (ggml_backend_buffer_t buffer); + void * (*get_base) (ggml_backend_buffer_t buffer); // get base pointer + size_t (*get_alloc_size)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // pre-allocation callback + void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // post-allocation callback + void (*free_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // pre-free callback + }; + + struct ggml_backend_buffer { + struct ggml_backend_buffer_i iface; + + ggml_backend_t backend; + ggml_backend_buffer_context_t context; + + size_t size; + }; + + GGML_API ggml_backend_buffer_t ggml_backend_buffer_init( + struct ggml_backend * backend, + struct ggml_backend_buffer_i iface, + ggml_backend_buffer_context_t context, + size_t size); + + // + // Backend + // + + typedef void * ggml_backend_context_t; + + struct ggml_backend_i { + const char * (*get_name)(ggml_backend_t backend); + + void (*free)(ggml_backend_t backend); + + // buffer allocation + ggml_backend_buffer_t (*alloc_buffer)(ggml_backend_t backend, size_t size); + + // get buffer alignment + size_t (*get_alignment)(ggml_backend_t backend); + + // tensor data access + // these functions can be asynchronous, helper functions are provided for synchronous access that automatically call synchronize + void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + void (*synchronize) (ggml_backend_t backend); + + // (optional) copy tensor between different backends, allow for single-copy tranfers + void (*cpy_tensor_from)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_to) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); + + // compute graph with a plan + ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); + void (*graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); + void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); + + // compute graph without a plan + void (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); + + // check if the backend supports an operation + bool (*supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); + }; + + struct ggml_backend { + struct ggml_backend_i iface; + + ggml_backend_context_t context; + }; + +#ifdef __cplusplus +} +#endif diff --git a/ggml-backend.c b/ggml-backend.c index ca8d83daf..f6e5fceed 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1,7 +1,9 @@ -#include "ggml-backend.h" +#include "ggml-backend-impl.h" #include "ggml-alloc.h" +#include "ggml-impl.h" #include +#include #include #include #include @@ -33,6 +35,10 @@ ggml_backend_buffer_t ggml_backend_buffer_init( } void ggml_backend_buffer_free(ggml_backend_buffer_t buffer) { + if (buffer == NULL) { + return; + } + if (buffer->iface.free_buffer != NULL) { buffer->iface.free_buffer(buffer); } @@ -43,15 +49,20 @@ size_t ggml_backend_buffer_get_alignment(ggml_backend_buffer_t buffer) { return ggml_backend_get_alignment(buffer->backend); } -void * ggml_backend_buffer_get_base(ggml_backend_buffer_t buffer) { - return buffer->iface.get_base(buffer); -} - size_t ggml_backend_buffer_get_size(ggml_backend_buffer_t buffer) { return buffer->size; } +void * ggml_backend_buffer_get_base(ggml_backend_buffer_t buffer) { + void * base = buffer->iface.get_base(buffer); + + GGML_ASSERT(base != NULL && "backend buffer base cannot be NULL"); + + return base; +} + size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + // get_alloc_size is optional, defaults to ggml_nbytes if (buffer->iface.get_alloc_size) { return buffer->iface.get_alloc_size(buffer, tensor); } @@ -59,12 +70,14 @@ size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct g } void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + // init_tensor is optional if (buffer->iface.init_tensor) { buffer->iface.init_tensor(buffer, tensor); } } void ggml_backend_buffer_free_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { + // free_tensor is optional if (buffer->iface.free_tensor) { buffer->iface.free_tensor(buffer, tensor); } @@ -73,14 +86,21 @@ void ggml_backend_buffer_free_tensor(ggml_backend_buffer_t buffer, struct ggml_t // backend ggml_backend_t ggml_get_backend(const struct ggml_tensor * tensor) { - return tensor->buffer->backend; + return tensor->buffer ? tensor->buffer->backend : NULL; } const char * ggml_backend_name(ggml_backend_t backend) { + if (backend == NULL) { + return "NULL"; + } return backend->iface.get_name(backend); } void ggml_backend_free(ggml_backend_t backend) { + if (backend == NULL) { + return; + } + backend->iface.free(backend); } @@ -101,13 +121,23 @@ void ggml_backend_tensor_get_async(const struct ggml_tensor * tensor, void * dat } void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - ggml_get_backend(tensor)->iface.set_tensor_async(ggml_get_backend(tensor), tensor, data, offset, size); - ggml_get_backend(tensor)->iface.synchronize(ggml_get_backend(tensor)); + ggml_backend_t backend = ggml_get_backend(tensor); + + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(backend != NULL && "tensor backend not set"); + + backend->iface.set_tensor_async(backend, tensor, data, offset, size); + backend->iface.synchronize(backend); } void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - ggml_get_backend(tensor)->iface.get_tensor_async(ggml_get_backend(tensor), tensor, data, offset, size); - ggml_get_backend(tensor)->iface.synchronize(ggml_get_backend(tensor)); + ggml_backend_t backend = ggml_get_backend(tensor); + + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); + GGML_ASSERT(backend != NULL && "tensor backend not set"); + + backend->iface.get_tensor_async(backend, tensor, data, offset, size); + backend->iface.synchronize(backend); } void ggml_backend_synchronize(ggml_backend_t backend) { @@ -156,7 +186,7 @@ void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst //printf("dst: %s ne: [%d %d %d %d] nb: [%d %d %d %d]\n", dst->name, (int)dst->ne[0], (int)dst->ne[1], (int)dst->ne[2], (int)dst->ne[3], (int)dst->nb[0], (int)dst->nb[1], (int)dst->nb[2], (int)dst->nb[3]); GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); - // printf("cpy tensor %s from %s to %s (%lu bytes)\n", src->name, ggml_backend_name(src->backend), ggml_backend_name(dst->backend), ggml_nbytes(src)); + // fprintf(stderr, "cpy tensor %s from %s to %s (%lu bytes)\n", src->name, ggml_backend_name(src->backend), ggml_backend_name(dst->backend), ggml_nbytes(src)); if (src == dst) { return; @@ -234,6 +264,8 @@ static ggml_backend_buffer_t ggml_backend_cpu_alloc_buffer(ggml_backend_t backen size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? + GGML_ASSERT(data != NULL && "failed to allocate buffer"); + return ggml_backend_buffer_init(backend, cpu_backend_buffer_i, data, size); } @@ -271,8 +303,7 @@ static void ggml_backend_cpu_cpy_tensor_from(ggml_backend_t backend, struct ggml } static void ggml_backend_cpu_cpy_tensor_to(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { - // for a backend such as CUDA that can queue async calls, it is ok to do this asynchronously, but it may not be the case for other backends - ggml_backend_tensor_set_async(dst, src->data, 0, ggml_nbytes(src)); + ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); UNUSED(backend); } @@ -383,3 +414,537 @@ void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(ggml_backend_t backend_cpu, void * ptr, size_t size) { return ggml_backend_buffer_init(backend_cpu, cpu_backend_buffer_i_from_ptr, ptr, size); } + +// scheduler + +#define GGML_MAX_BACKENDS 4 +#define GGML_MAX_SPLITS 256 +#define GGML_MAX_SPLIT_INPUTS 16 + +struct ggml_backend_sched_split { + ggml_tallocr_t tallocr; + int i_start; + int i_end; + struct ggml_tensor * inputs[GGML_MAX_SPLIT_INPUTS]; + int n_inputs; + struct ggml_cgraph * graph; +}; + +struct ggml_backend_sched { + int n_backends; + ggml_backend_t backends[GGML_MAX_BACKENDS]; + ggml_tallocr_t tallocs[GGML_MAX_BACKENDS]; + + ggml_gallocr_t galloc; + + struct ggml_hash_set hash_set; + ggml_tallocr_t * node_talloc; // [hash_set.size] + struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // [hash_set.size][GGML_MAX_BACKENDS] + + struct ggml_cgraph * graph; + struct ggml_backend_sched_split splits[GGML_MAX_SPLITS]; + int n_splits; + + struct ggml_context * ctx; + + // align context_buffer to GGML_MEM_ALIGN + #ifdef _MSC_VER + __declspec(align(GGML_MEM_ALIGN)) + #else + __attribute__((aligned(GGML_MEM_ALIGN))) + #endif + char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*sizeof(struct ggml_tensor) + GGML_MAX_SPLITS*sizeof(struct ggml_cgraph)]; +}; + +#define hash_id(node) ggml_hash_find_or_insert(sched->hash_set, node) +#define node_allocr(node) sched->node_talloc[hash_id(node)] + +static bool ggml_is_view_op(enum ggml_op op) { + return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; +} + +// returns the priority of the backend, lower is better +static int sched_backend_prio(ggml_backend_sched_t sched, ggml_backend_t backend) { + for (int i = 0; i < sched->n_backends; i++) { + if (sched->backends[i] == backend) { + return i; + } + } + return INT_MAX; +} + +static int sched_allocr_prio(ggml_backend_sched_t sched, ggml_tallocr_t allocr) { + for (int i = 0; i < sched->n_backends; i++) { + if (sched->tallocs[i] == allocr) { + return i; + } + } + return INT_MAX; +} + +// returns the backend that should be used for the node based on the current locations +char causes[GGML_DEFAULT_GRAPH_SIZE*4 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug, remove +static ggml_backend_t sched_backend_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { + // if the dst tensor is already allocated in a buffer, we must assume that it is critical to keep it there + // ie. kv cache updates + // note that this doesn't allow fallback to CPU. need to add output tensors to the splits to copy the data back to the original backend. + // dst + ggml_backend_t cur_backend = ggml_get_backend(node); + if (cur_backend != NULL) { + sprintf(causes[hash_id(node)], "1.dst"); + return cur_backend; + } + + // view_src + if (node->view_src != NULL && ggml_get_backend(node->view_src) != NULL) { + sprintf(causes[hash_id(node)], "1.vsrc"); + return ggml_get_backend(node->view_src); + } + + // src + int cur_prio = INT_MAX; + size_t cur_size = 0; + + for (int i = 0; i < GGML_MAX_SRC; i++) { + const struct ggml_tensor * src = node->src[i]; + if (src == NULL) { + break; + } + ggml_backend_t src_backend = ggml_get_backend(src); + if (src_backend != NULL) { + int src_prio = sched_backend_prio(sched, src_backend); + size_t src_size = ggml_nbytes(src); + if (src_prio < cur_prio && src_size >= cur_size) { + cur_prio = src_prio; + cur_size = src_size; + cur_backend = src_backend; + sprintf(causes[hash_id(node)], "1.src%d", i); + } + } + } + return cur_backend; +} + +static char * fmt_size(size_t size) { + static char buffer[128]; + if (size >= 1024*1024) { + sprintf(buffer, "%zuM", size/1024/1024); + } else { + sprintf(buffer, "%zuK", size/1024); + } + return buffer; +} + +static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + int cur_split = 0; + for (int i = 0; i < graph->n_nodes; i++) { + if (cur_split < sched->n_splits && i == sched->splits[cur_split].i_start) { + ggml_backend_t split_backend = ggml_tallocr_get_buffer(sched->splits[cur_split].tallocr)->backend; + fprintf(stderr, "\n## SPLIT #%d: %s # %d inputs: ", cur_split, ggml_backend_name(split_backend), sched->splits[cur_split].n_inputs); + for (int j = 0; j < sched->splits[cur_split].n_inputs; j++) { + fprintf(stderr, "[%s (%5.5s)] ", sched->splits[cur_split].inputs[j]->name, fmt_size(ggml_nbytes(sched->splits[cur_split].inputs[j]))); + } + fprintf(stderr, "\n"); + cur_split++; + } + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + ggml_backend_t node_backend = node_allocr ? ggml_tallocr_get_buffer(node_allocr)->backend : NULL; + fprintf(stderr, "node #%3d (%10.10s): %20.20s (%4.4s) [%4.4s %8.8s]:", i, ggml_op_name(node->op), node->name, fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", causes[hash_id(node)]); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + ggml_backend_t src_backend = src_allocr ? ggml_tallocr_get_buffer(src_allocr)->backend : NULL; + fprintf(stderr, " %20.20s (%4.4s) [%4.4s %8.8s]", src->name, fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", causes[hash_id(src)]); + } + fprintf(stderr, "\n"); + } +} + +// creates a copy of the tensor with the same memory layout +static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, const struct ggml_tensor * tensor) { + struct ggml_tensor * dup = ggml_dup_tensor(ctx, tensor); + for (int i = 0; i < GGML_MAX_DIMS; i++) { + dup->nb[i] = tensor->nb[i]; + } + return dup; +} + +// assigns backends to ops and splits the graph into subgraphs that can be computed on the same backend +// TODO: merge passes +static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + // reset state + size_t hash_size = sched->hash_set.size; + memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); + memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); + memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); + sched->n_splits = 0; + + struct ggml_init_params params = { + /*.mem_size = */ sizeof(sched->context_buffer), + /*.mem_buffer = */ sched->context_buffer, + /*.no_alloc = */ true + }; + + if (sched->ctx != NULL) { + ggml_free(sched->ctx); + } + + sched->ctx = ggml_init(params); + + // pass 1: assign backends to ops with allocated inputs + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + if (node_allocr(leaf) != NULL) { + // do not overwrite user assignments + continue; + } + ggml_backend_t leaf_backend = ggml_get_backend(leaf); + if (leaf_backend == NULL && leaf->view_src != NULL) { + leaf_backend = ggml_get_backend(leaf->view_src); + } + if (leaf_backend != NULL) { + node_allocr(leaf) = ggml_backend_sched_get_tallocr(sched, leaf_backend); + } + } + + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (node_allocr(node) != NULL) { + // do not overwrite user assignments + continue; + } + ggml_backend_t node_backend = sched_backend_from_cur(sched, node); + if (node_backend != NULL) { + node_allocr(node) = ggml_backend_sched_get_tallocr(sched, node_backend); + } + } + //printf("PASS 1 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); + + // pass 2: assign backends to ops from current assignments + // TODO: + // - reuse sched_backend_from_cur + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr == NULL) { + int cur_prio = INT_MAX; + size_t cur_size = 0; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + if (src_allocr != NULL) { + int src_prio = sched_allocr_prio(sched, src_allocr); + size_t src_size = ggml_nbytes(src); + if (src_prio < cur_prio && src_size >= cur_size) { + cur_prio = src_prio; + cur_size = src_size; + node_allocr = src_allocr; + sprintf(causes[hash_id(node)], "2.src%d", j); + } + } + } + if (node_allocr != NULL) { + node_allocr(node) = node_allocr; + } + } + } + //printf("PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); + + // pass 3: assign backends to remaining src from dst (should only be leafs) + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + ggml_tallocr_t node_allocr = node_allocr(node); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + if (src_allocr == NULL) { + node_allocr(src) = node_allocr; + } + } + } + //printf("PASS 3 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); + + // pass 4: split graph, find tensors that need to be copied + // TODO: + // - when switching from a less preferred backend to a more preferred backend, check if it is possible to move the switch to an earlier point for the same cost + // find first backend + int cur_split = 0; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (node->view_src == NULL) { + sched->splits[0].tallocr = node_allocr(node); + break; + } + } + sched->splits[0].i_start = 0; + sched->splits[0].n_inputs = 0; + memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK + ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; + size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + + if (ggml_is_view_op(node->op)) { + continue; + } + + ggml_tallocr_t node_allocr = node_allocr(node); + + if (node_allocr != cur_allocr) { + sched->splits[cur_split].i_end = i; + cur_split++; + GGML_ASSERT(cur_split < GGML_MAX_SPLITS); + sched->splits[cur_split].tallocr = node_allocr; + sched->splits[cur_split].i_start = i; + sched->splits[cur_split].n_inputs = 0; + memset(sched->splits[cur_split].inputs, 0, sizeof(sched->splits[cur_split].inputs)); //HACK + cur_allocr = node_allocr; + cur_backend_id = sched_allocr_prio(sched, cur_allocr); + } + + // find inputs that are not on the same backend + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + if (src_allocr != node_allocr) { + int n_inputs = sched->splits[cur_split].n_inputs++; + GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); + sched->splits[cur_split].inputs[n_inputs] = (struct ggml_tensor *)src; + + // create copies + size_t id = hash_id(src); + if (sched->node_copies[id][cur_backend_id] == NULL) { + struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); + sched->node_copies[id][cur_backend_id] = tensor_copy; + node_allocr(tensor_copy) = cur_allocr; + ggml_backend_t backend = ggml_tallocr_get_buffer(cur_allocr)->backend; + ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); + } + node->src[j] = sched->node_copies[id][cur_backend_id]; + } + } + } + sched->splits[cur_split].i_end = graph->n_nodes; + sched->n_splits = cur_split + 1; + + //fprintf(stderr, "PASS 4 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); fflush(stdout); + +#if 1 + // sanity check: all sources should have the same backend as the node + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr == NULL) { + fprintf(stderr, "!!!!!!! %s has no backend\n", node->name); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + if (src_allocr != node_allocr /* && src_backend != NULL */) { // ignore nulls for now + fprintf(stderr, "!!!! %s has backend %s, src %d (%s) has backend %s\n", + node->name, node_allocr ? ggml_backend_name(ggml_tallocr_get_buffer(node_allocr)->backend) : "NULL", + j, src->name, src_allocr ? ggml_backend_name(ggml_tallocr_get_buffer(src_allocr)->backend) : "NULL"); + } + } + } +#endif + + // create copies of the graph for each split + // FIXME: avoid this copy, pass split inputs to ggml_gallocr_alloc_graph_n in some other way + struct ggml_cgraph * graph_copy = ggml_new_graph_custom(sched->ctx, graph->n_nodes + sched->n_splits*GGML_MAX_SPLIT_INPUTS, false); + for (int i = 0; i < sched->n_splits; i++) { + struct ggml_backend_sched_split * split = &sched->splits[i]; + split->graph = ggml_graph_view(sched->ctx, graph, split->i_start, split->i_end); + + // add inputs to the graph copy so that they are allocated by ggml-alloc at the start of the split + for (int j = 0; j < split->n_inputs; j++) { + struct ggml_tensor * input = split->inputs[j]; + struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_allocr_prio(sched, split->tallocr)]; + input_cpy->src[0] = input; + graph_copy->nodes[graph_copy->n_nodes++] = input_cpy; + } + + for (int j = split->i_start; j < split->i_end; j++) { + graph_copy->nodes[graph_copy->n_nodes++] = graph->nodes[j]; + } + } + sched->graph = graph_copy; +} + +static void sched_alloc_splits(ggml_backend_sched_t sched) { + ggml_gallocr_alloc_graph_n( + sched->galloc, + sched->graph, + sched->hash_set, + sched->node_talloc); +} + +static void sched_compute_splits(ggml_backend_sched_t sched) { + uint64_t copy_us[GGML_MAX_BACKENDS] = {0}; + uint64_t compute_us[GGML_MAX_BACKENDS] = {0}; + + struct ggml_backend_sched_split * splits = sched->splits; + + for (int i = 0; i < sched->n_splits; i++) { + struct ggml_backend_sched_split * split = &splits[i]; + ggml_backend_t split_backend = ggml_tallocr_get_buffer(split->tallocr)->backend; + int split_backend_id = sched_backend_prio(sched, split_backend); + + // copy the input tensors to the split backend + uint64_t copy_start_us = ggml_time_us(); + for (int j = 0; j < split->n_inputs; j++) { + struct ggml_tensor * input_cpy = sched->node_copies[hash_id(split->inputs[j])][sched_backend_prio(sched, split_backend)]; + if (split->inputs[j]->buffer == NULL) { + if (split->inputs[j]->view_src == NULL) { + fprintf(stderr, "input %s has no buffer and no view_src\n", split->inputs[j]->name); + exit(1); + } + struct ggml_tensor * view = split->inputs[j]; + view->backend = view->view_src->backend; + view->buffer = view->view_src->buffer; + view->data = (char *)view->view_src->data + view->view_offs; + ggml_backend_buffer_init_tensor(ggml_backend_sched_get_buffer(sched, view->buffer->backend), view); + } + if (input_cpy->buffer == NULL) { + fprintf(stderr, "input_cpy %s has no buffer\n", input_cpy->name); + exit(1); + } + GGML_ASSERT(split->inputs[j]->buffer->backend != input_cpy->buffer->backend); + GGML_ASSERT(input_cpy->buffer->backend == split_backend); + ggml_backend_tensor_copy(split->inputs[j], input_cpy); + } + // ggml_backend_synchronize(split_backend); + int64_t copy_end_us = ggml_time_us(); + copy_us[split_backend_id] += copy_end_us - copy_start_us; + +#if 0 + char split_filename[GGML_MAX_NAME]; + snprintf(split_filename, GGML_MAX_NAME, "split_%i_%s.dot", i, ggml_backend_name(split_backend)); + ggml_graph_dump_dot(split->graph, NULL, split_filename); +#endif + + uint64_t compute_start_us = ggml_time_us(); + ggml_backend_graph_compute(split_backend, split->graph); + // ggml_backend_synchronize(split_backend); + uint64_t compute_end_us = ggml_time_us(); + compute_us[split_backend_id] += compute_end_us - compute_start_us; + } + +#if 0 + // per-backend timings + fprintf(stderr, "sched_compute_splits times (%d splits):\n", sched->n_splits); + for (int i = 0; i < sched->n_backends; i++) { + if (copy_us[i] > 0 || compute_us[i] > 0) { + fprintf(stderr, "\t%5.5s: %lu us copy, %lu us compute\n", ggml_backend_name(sched->backends[i]), copy_us[i], compute_us[i]); + } + } +#endif +} + +static void sched_reset(ggml_backend_sched_t sched) { + for (int i = 0; i < sched->n_backends; i++) { + ggml_tallocr_reset(sched->tallocs[i]); + } +} + +ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_backends) { + GGML_ASSERT(n_backends <= GGML_MAX_BACKENDS); + + struct ggml_backend_sched * sched = malloc(sizeof(struct ggml_backend_sched)); + memset(sched, 0, sizeof(struct ggml_backend_sched)); + + fprintf(stderr, "ggml_backend_sched size: %lu KB\n", sizeof(struct ggml_backend_sched)/1024); + + sched->n_backends = n_backends; + for (int i = 0; i < n_backends; i++) { + sched->backends[i] = backends[i]; + } + + sched->galloc = ggml_gallocr_new(); + + // init measure allocs for each backend + for (int i = 0; i < n_backends; i++) { + sched->tallocs[i] = ggml_tallocr_new_measure_from_backend(backends[i]); + } + + return sched; +} + +void ggml_backend_sched_free(ggml_backend_sched_t sched) { + if (sched == NULL) { + return; + } + for (int i = 0; i < sched->n_backends; i++) { + ggml_tallocr_free(sched->tallocs[i]); + } + ggml_gallocr_free(sched->galloc); + free(sched->hash_set.keys); + free(sched->node_talloc); + free(sched->node_copies); + free(sched); +} + +void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { + // initialize hash tables + size_t hash_size = measure_graph->visited_hash_table.size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS; + sched->hash_set.size = hash_size; + sched->hash_set.keys = malloc(sizeof(sched->hash_set.keys[0]) * hash_size); + sched->node_talloc = malloc(sizeof(sched->node_talloc[0]) * hash_size); + sched->node_copies = malloc(sizeof(sched->node_copies[0]) * hash_size); + + sched_split_graph(sched, measure_graph); + sched_alloc_splits(sched); + + // allocate buffers and reset allocators + for (int i = 0; i < sched->n_backends; i++) { + size_t size = ggml_tallocr_max_size(sched->tallocs[i]); + ggml_tallocr_free(sched->tallocs[i]); + sched->tallocs[i] = ggml_tallocr_new_from_backend(sched->backends[i], size); + } + + sched_reset(sched); +} + +void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { + GGML_ASSERT(sched->hash_set.size >= graph->visited_hash_table.size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + + sched_split_graph(sched, graph); + sched_alloc_splits(sched); + sched_compute_splits(sched); + sched_reset(sched); +} + +ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend) { + int backend_index = sched_backend_prio(sched, backend); + return sched->tallocs[backend_index]; +} + +ggml_backend_buffer_t ggml_backend_sched_get_buffer(ggml_backend_sched_t sched, ggml_backend_t backend) { + int backend_index = sched_backend_prio(sched, backend); + return ggml_tallocr_get_buffer(sched->tallocs[backend_index]); +} + +void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend) { + int backend_index = sched_backend_prio(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); + node_allocr(node) = sched->tallocs[backend_index]; +} diff --git a/ggml-backend.h b/ggml-backend.h index da134b0db..966687320 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -1,51 +1,20 @@ #pragma once #include "ggml.h" +#include "ggml-alloc.h" #ifdef __cplusplus extern "C" { #endif - struct ggml_backend; + + // + // Backend buffer + // + struct ggml_backend_buffer; - - // type-erased backend-specific types / wrappers - typedef void * ggml_backend_context_t; - typedef void * ggml_backend_graph_plan_t; - typedef void * ggml_backend_buffer_context_t; - - // avoid accessing internals of these types - typedef struct ggml_backend * ggml_backend_t; typedef struct ggml_backend_buffer * ggml_backend_buffer_t; - // - // backend buffer - // - - struct ggml_backend_buffer_i { - void (*free_buffer) (ggml_backend_buffer_t buffer); - void * (*get_base) (ggml_backend_buffer_t buffer); // get base pointer - size_t (*get_alloc_size)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // pre-allocation callback - void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // post-allocation callback - void (*free_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // pre-free callback - }; - - // TODO: hide behind API - struct ggml_backend_buffer { - struct ggml_backend_buffer_i iface; - - ggml_backend_t backend; - ggml_backend_buffer_context_t context; - - size_t size; - }; - // backend buffer functions - GGML_API ggml_backend_buffer_t ggml_backend_buffer_init( - struct ggml_backend * backend, - struct ggml_backend_buffer_i iface, - ggml_backend_buffer_context_t context, - size_t size); - GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); @@ -55,50 +24,13 @@ extern "C" { GGML_API void ggml_backend_buffer_free_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); // - // backend + // Backend // - struct ggml_backend_i { - const char * (*get_name)(ggml_backend_t backend); + struct ggml_backend; + typedef struct ggml_backend * ggml_backend_t; + typedef void * ggml_backend_graph_plan_t; - void (*free)(ggml_backend_t backend); - - // buffer allocation - ggml_backend_buffer_t (*alloc_buffer)(ggml_backend_t backend, size_t size); - - // get buffer alignment - size_t (*get_alignment)(ggml_backend_t backend); - - // tensor data access - // these functions can be asynchronous, helper functions are provided for synchronous access that automatically call synchronize - void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - void (*synchronize) (ggml_backend_t backend); - - // (optional) copy tensor between different backends, allow for single-copy tranfers - void (*cpy_tensor_from)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - - // compute graph with a plan - ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); - void (*graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); - void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); - - // compute graph without a plan - void (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); - - // check if the backend supports an operation - bool (*supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); - }; - - // TODO: hide behind API - struct ggml_backend { - struct ggml_backend_i iface; - - ggml_backend_context_t context; - }; - - // backend helper functions GGML_API ggml_backend_t ggml_get_backend(const struct ggml_tensor * tensor); GGML_API const char * ggml_backend_name(ggml_backend_t backend); @@ -133,11 +65,72 @@ extern "C" { GGML_API ggml_backend_t ggml_backend_cpu_init(void); GGML_API bool ggml_backend_is_cpu(ggml_backend_t backend); - GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); + // Create a backend buffer from an existing pointer GGML_API ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(ggml_backend_t backend_cpu, void * ptr, size_t size); + + // + // Backend scheduler + // + + // The backend scheduler allows for multiple backends to be used together + // Handles compute buffer allocation, assignment of tensors to backends, and copying of tensors between backends + // The backends are selected based on: + // - the backend that supports the operation + // - the location of the pre-allocated tensors (e.g. the weights) + /* + Example usage: + + sched = ggml_backend_sched_new({backend_gpu, backend_gpu2, backend_cpu}, num_backends); + // sched is initialized with measure allocators and cannot be used until allocated with a measure graph + + // initialize buffers from a measure graph + measure_graph = build_graph(sched); // use the allocr to allocate inputs as needed + + // in build_graph: + build_graph(...) { + // allocating tensors in a specific backend (optional, recommended: pre-allocate inputs in a different buffer) + alloc_cpu = ggml_backend_sched_get_allocr(sched, backend_cpu); + ggml_allocr_alloc(alloc_cpu, tensor); + + // manually assigning nodes to a backend (optional, shouldn't be needed in most cases) + struct ggml_tensor * node = ggml_mul_mat(ctx, ...); + ggml_backend_sched_set_node_backend(sched, node, backend_gpu); + } + + // allocate backend buffers from measure graph + ggml_backend_sched_init_measure(sched, measure_graph); + + // the scheduler is now ready to compute graphs + + // compute + graph = build_graph(sched); + ggml_backend_sched_graph_compute(sched, graph); + */ + + struct ggml_backend_sched; + typedef struct ggml_backend_sched * ggml_backend_sched_t; + + // Initialize a backend scheduler + GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_backends); + + GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); + + // Initialize backend buffers from a measure graph + GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + + GGML_API ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend); + GGML_API ggml_backend_buffer_t ggml_backend_sched_get_buffer (ggml_backend_sched_t sched, ggml_backend_t backend); + + GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + + // Allocate a graph on the backend scheduler + GGML_API void ggml_backend_sched_graph_compute( + ggml_backend_sched_t sched, + struct ggml_cgraph * graph); + #ifdef __cplusplus } #endif diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 8d03ba664..163402446 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -81,6 +81,7 @@ #include "ggml-cuda.h" #include "ggml.h" +#include "ggml-backend-impl.h" #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 @@ -7751,11 +7752,11 @@ static size_t g_temp_tensor_extra_index = 0; static ggml_tensor_extra_gpu * ggml_cuda_alloc_temp_tensor_extra() { if (g_temp_tensor_extras == nullptr) { - g_temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_MAX_NODES]; + g_temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_DEFAULT_GRAPH_SIZE]; } size_t alloc_index = g_temp_tensor_extra_index; - g_temp_tensor_extra_index = (g_temp_tensor_extra_index + 1) % GGML_MAX_NODES; + g_temp_tensor_extra_index = (g_temp_tensor_extra_index + 1) % GGML_DEFAULT_GRAPH_SIZE; ggml_tensor_extra_gpu * extra = &g_temp_tensor_extras[alloc_index]; memset(extra, 0, sizeof(*extra)); @@ -8070,11 +8071,11 @@ struct ggml_backend_buffer_context_cuda { ggml_tensor_extra_gpu * ggml_cuda_alloc_temp_tensor_extra() { if (temp_tensor_extras == nullptr) { - temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_MAX_NODES]; + temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_DEFAULT_GRAPH_SIZE]; } size_t alloc_index = temp_tensor_extra_index; - temp_tensor_extra_index = (temp_tensor_extra_index + 1) % GGML_MAX_NODES; + temp_tensor_extra_index = (temp_tensor_extra_index + 1) % GGML_DEFAULT_GRAPH_SIZE; ggml_tensor_extra_gpu * extra = &temp_tensor_extras[alloc_index]; memset(extra, 0, sizeof(*extra)); @@ -8160,7 +8161,12 @@ static ggml_backend_buffer_t ggml_backend_cuda_alloc_buffer(ggml_backend_t backe ggml_cuda_set_device(g_main_device); ggml_backend_buffer_context_cuda * ctx = new ggml_backend_buffer_context_cuda; + + size = std::max(size, (size_t)1); // cudaMalloc returns null for size 0 + + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaMalloc(&ctx->device, size)); + return ggml_backend_buffer_init(backend, cuda_backend_buffer_interface, ctx, size); } @@ -8227,6 +8233,8 @@ static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; + if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) + continue; assert(node->backend == GGML_BACKEND_GPU); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { diff --git a/ggml-impl.h b/ggml-impl.h index 5ec18a50c..d88f26144 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -230,7 +230,19 @@ inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { #endif - // TODO: backend v2 PR +#define GGML_HASHTABLE_FULL ((size_t)-1) +#define GGML_HASHTABLE_ALREADY_EXISTS ((size_t)-2) + +bool ggml_hash_contains (const struct ggml_hash_set hash_set, struct ggml_tensor * key); + +// returns GGML_HASHTABLE_FULL if table is full, otherwise the current index of the key or where it should be inserted +size_t ggml_hash_find (const struct ggml_hash_set hash_set, struct ggml_tensor * key); + +// returns GGML_HAHSHTABLE_ALREADY_EXISTS if key already exists, index otherwise, asserts if table is full +size_t ggml_hash_insert ( struct ggml_hash_set hash_set, struct ggml_tensor * key); + +// return index, asserts if table is full +size_t ggml_hash_find_or_insert( struct ggml_hash_set hash_set, struct ggml_tensor * key); #ifdef __cplusplus } diff --git a/ggml-metal.m b/ggml-metal.m index 78ae4485d..c2cda0bf5 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1,5 +1,6 @@ #import "ggml-metal.h" +#import "ggml-backend-impl.h" #import "ggml.h" #import @@ -23,7 +24,7 @@ #define UNUSED(x) (void)(x) -#define GGML_MAX_CONCUR (2*GGML_MAX_NODES) +#define GGML_MAX_CONCUR (2*GGML_DEFAULT_GRAPH_SIZE) struct ggml_metal_buffer { const char * name; @@ -744,6 +745,20 @@ void ggml_metal_graph_compute( struct ggml_tensor * src1 = gf->nodes[i]->src[1]; struct ggml_tensor * dst = gf->nodes[i]; + switch (dst->op) { + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + { + // noop -> next node + } continue; + default: + { + } break; + } + const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; const int64_t ne02 = src0 ? src0->ne[2] : 0; @@ -797,14 +812,6 @@ void ggml_metal_graph_compute( //} switch (dst->op) { - case GGML_OP_NONE: - case GGML_OP_RESHAPE: - case GGML_OP_VIEW: - case GGML_OP_TRANSPOSE: - case GGML_OP_PERMUTE: - { - // noop - } break; case GGML_OP_CONCAT: { const int64_t nb = ne00; diff --git a/ggml.c b/ggml.c index 009d5b398..da78e6de9 100644 --- a/ggml.c +++ b/ggml.c @@ -100,6 +100,49 @@ typedef void * thread_ret_t; #include #endif +#if defined(__APPLE__) +#include +#endif + +#if (defined(__linux__) || defined(__APPLE__) || defined(__FreeBSD__) || defined(__NetBSD__) || defined(__OpenBSD__)) && \ + (!defined(TARGET_OS_TV) && !defined(TARGET_OS_WATCH)) + +#include + +void ggml_print_backtrace(void) { + /* + #include + #include + + void * trace[100]; + + int nptrs = backtrace(trace, sizeof(trace)/sizeof(trace[0])); + + backtrace_symbols_fd(trace, nptrs, STDERR_FILENO); + */ + + // backtrack_symbols does not show line numbers, use gdb instead + char attach[32]; + snprintf(attach, sizeof(attach), "attach %d", getpid()); + int pid = fork(); + if (pid == 0) { + execlp("gdb", "gdb", "--batch", + "-ex", "set style enabled on", + "-ex", attach, + "-ex", "bt -frame-info source-and-location", + "-ex", "detach", + "-ex", "quit", + NULL); + } else { + waitpid(pid, NULL, 0); + } +} +#else +void ggml_print_backtrace(void) { + // platform not supported +} +#endif + /*#define GGML_PERF*/ #define GGML_DEBUG 0 #define GGML_GELU_FP16 @@ -1352,6 +1395,7 @@ inline static void ggml_vec_step_f32 (const int n, float * y, const float * x) { inline static void ggml_vec_tanh_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = tanhf(x[i]); } inline static void ggml_vec_elu_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : expf(x[i])-1; } inline static void ggml_vec_relu_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : 0.f; } +inline static void ggml_vec_leaky_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = (x[i] > 0.f) ? x[i] : 0.1f*x[i]; } static const float GELU_COEF_A = 0.044715f; static const float GELU_QUICK_COEF = -1.702f; @@ -3769,6 +3813,14 @@ struct ggml_tensor * ggml_relu_inplace( return ggml_unary_inplace(ctx, a, GGML_UNARY_OP_RELU); } +// ggml_leaky + +struct ggml_tensor * ggml_leaky( + struct ggml_context * ctx, + struct ggml_tensor * a) { + return ggml_unary(ctx, a, GGML_UNARY_OP_LEAKY); +} + // ggml_gelu struct ggml_tensor * ggml_gelu( @@ -5411,7 +5463,7 @@ struct ggml_tensor * ggml_conv_transpose_2d_p0( // ggml_pool_* -static int64_t ggml_calc_pool_output_size(int64_t ins, int ks, int s, int p) { +static int64_t ggml_calc_pool_output_size(int64_t ins, int ks, int s, float p) { return (ins + 2 * p - ks) / s + 1; } @@ -5458,8 +5510,8 @@ struct ggml_tensor * ggml_pool_2d( int k1, int s0, int s1, - int p0, - int p1) { + float p0, + float p1) { bool is_node = false; @@ -8921,6 +8973,48 @@ static void ggml_compute_forward_silu( } } +// ggml_compute_forward_leaky + +static void ggml_compute_forward_leaky_f32( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + assert(params->ith == 0); + assert(ggml_are_same_shape(src0, dst)); + + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + return; + } + + const int n = ggml_nrows(src0); + const int nc = src0->ne[0]; + + assert(dst->nb[0] == sizeof(float)); + assert(src0->nb[0] == sizeof(float)); + + for (int i = 0; i < n; i++) { + ggml_vec_leaky_f32(nc, + (float *) ((char *) dst->data + i*( dst->nb[1])), + (float *) ((char *) src0->data + i*(src0->nb[1]))); + } +} + +static void ggml_compute_forward_leaky( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + switch (src0->type) { + case GGML_TYPE_F32: + { + ggml_compute_forward_leaky_f32(params, src0, dst); + } break; + default: + { + GGML_ASSERT(false); + } break; + } +} + // ggml_compute_forward_silu_back static void ggml_compute_forward_silu_back_f32( @@ -12454,14 +12548,11 @@ static void ggml_compute_forward_pool_1d( ggml_compute_forward_pool_1d_sk_p0(params, op, src0, k0, dst); } -// ggml_compute_forward_pool_2d_sk_p0 +// ggml_compute_forward_pool_2d -static void ggml_compute_forward_pool_2d_sk_p0( +static void ggml_compute_forward_pool_2d( const struct ggml_compute_params * params, - const enum ggml_op_pool op, const struct ggml_tensor * src, - const int k0, - const int k1, struct ggml_tensor * dst) { assert(src->type == GGML_TYPE_F32); assert(params->ith == 0); @@ -12470,6 +12561,14 @@ static void ggml_compute_forward_pool_2d_sk_p0( return; } + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = opts[0]; + const int k0 = opts[1]; + const int k1 = opts[2]; + const int s0 = opts[3]; + const int s1 = opts[4]; + const int p0 = opts[5]; + const int p1 = opts[6]; const char * cdata = (const char*)src->data; const char * const data_end = cdata + ggml_nbytes(src); @@ -12480,6 +12579,8 @@ static void ggml_compute_forward_pool_2d_sk_p0( float * dplane = (float *)dst->data; const int ka = k0 * k1; + const int offset0 = -p0; + const int offset1 = -p1; while (cdata < data_end) { for (int oy = 0; oy < py; ++oy) { @@ -12492,13 +12593,15 @@ static void ggml_compute_forward_pool_2d_sk_p0( case GGML_OP_POOL_COUNT: GGML_ASSERT(false); break; } - const int ix = ox * k0; - const int iy = oy * k1; + const int ix = offset0 + ox * s0; + const int iy = offset1 + oy * s1; for (int ky = 0; ky < k1; ++ky) { + if (iy + ky < 0 || iy + ky >= src->ne[1]) continue; const float * const srow = (const float *)(cdata + src->nb[1] * (iy + ky)); for (int kx = 0; kx < k0; ++kx) { int j = ix + kx; + if (j < 0 || j >= src->ne[0]) continue; switch (op) { case GGML_OP_POOL_AVG: *out += srow[j]; break; case GGML_OP_POOL_MAX: if (srow[j] > *out) *out = srow[j]; break; @@ -12519,29 +12622,6 @@ static void ggml_compute_forward_pool_2d_sk_p0( } } -// ggml_compute_forward_pool_2d - -static void ggml_compute_forward_pool_2d( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - struct ggml_tensor * dst) { - - const int32_t * opts = (const int32_t *)dst->op_params; - enum ggml_op_pool op = opts[0]; - const int k0 = opts[1]; - const int k1 = opts[2]; - const int s0 = opts[3]; - const int s1 = opts[4]; - const int p0 = opts[5]; - const int p1 = opts[6]; - GGML_ASSERT(p0 == 0); - GGML_ASSERT(p1 == 0); // padding not supported - GGML_ASSERT(k0 == s0); - GGML_ASSERT(k1 == s1); // only s = k supported - - ggml_compute_forward_pool_2d_sk_p0(params, op, src0, k0, k1, dst); -} - // ggml_compute_forward_upscale static void ggml_compute_forward_upscale_f32( @@ -13743,6 +13823,10 @@ static void ggml_compute_forward_unary( { ggml_compute_forward_silu(params, src0, dst); } break; + case GGML_UNARY_OP_LEAKY: + { + ggml_compute_forward_leaky(params, src0, dst); + } break; default: { GGML_ASSERT(false); @@ -14651,62 +14735,109 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm //////////////////////////////////////////////////////////////////////////////// -static_assert(GGML_GRAPH_HASHTABLE_SIZE > GGML_MAX_NODES * 2, "GGML_GRAPH_HT_SIZE is too small"); +static size_t ggml_hash_size(size_t min_sz) { + // next primes after powers of two + static const size_t primes[] = { + 2, 3, 5, 11, 17, 37, 67, 131, 257, 521, 1031, + 2053, 4099, 8209, 16411, 32771, 65537, 131101, + 262147, 524309, 1048583, 2097169, 4194319, 8388617, + 16777259, 33554467, 67108879, 134217757, 268435459, + 536870923, 1073741827, 2147483659 + }; + static const size_t n_primes = sizeof(primes)/sizeof(primes[0]); -static size_t hash(void * p) { - return (size_t)p % GGML_GRAPH_HASHTABLE_SIZE; + // find the smallest prime that is larger or equal to min_sz + size_t l = 0; + size_t r = n_primes; + while (l < r) { + size_t m = (l + r)/2; + if (primes[m] < min_sz) { + l = m + 1; + } else { + r = m; + } + } + size_t sz = l < n_primes ? primes[l] : min_sz | 1; + return sz; } -static size_t hash_find(void * hash_table[], void * p) { - size_t h = hash(p); +static size_t ggml_hash(const void * p) { + return (size_t)p; +} + +size_t ggml_hash_find(const struct ggml_hash_set hash_set, struct ggml_tensor * key) { + size_t h = ggml_hash(key) % hash_set.size; // linear probing size_t i = h; - while (hash_table[i] != NULL && hash_table[i] != p) { - i = (i + 1) % GGML_GRAPH_HASHTABLE_SIZE; + while (hash_set.keys[i] != NULL && hash_set.keys[i] != key) { + i = (i + 1) % hash_set.size; if (i == h) { // visited all hash table entries -> not found - return GGML_GRAPH_HASHTABLE_SIZE; + return GGML_HASHTABLE_FULL; } } return i; } -static bool hash_insert(void * hash_table[], void * p) { - size_t i = hash_find(hash_table, p); +bool ggml_hash_contains(struct ggml_hash_set hash_set, struct ggml_tensor * key) { + size_t i = ggml_hash_find(hash_set, key); + return i != GGML_HASHTABLE_FULL && hash_set.keys[i] == key; +} - GGML_ASSERT(i < GGML_GRAPH_HASHTABLE_SIZE); // assert that not full +size_t ggml_hash_insert(struct ggml_hash_set hash_set, struct ggml_tensor * key) { + size_t i = ggml_hash_find(hash_set, key); - if (hash_table[i] == p) { - return true; + GGML_ASSERT(i != GGML_HASHTABLE_FULL); + + if (hash_set.keys[i] == key) { + return GGML_HASHTABLE_ALREADY_EXISTS; } // insert - GGML_ASSERT(hash_table[i] == NULL); - hash_table[i] = p; - return false; + GGML_ASSERT(hash_set.keys[i] == NULL); + hash_set.keys[i] = key; + return i; } -static bool hash_contains(void * hash_table[], void * p) { - size_t i = hash_find(hash_table, p); - return (i < GGML_GRAPH_HASHTABLE_SIZE) && (hash_table[i] == p); +size_t ggml_hash_find_or_insert(struct ggml_hash_set hash_set, struct ggml_tensor * key) { + size_t i = ggml_hash_find(hash_set, key); + + GGML_ASSERT(i != GGML_HASHTABLE_FULL); + + hash_set.keys[i] = key; + return i; } -struct hash_map { - void * keys[GGML_GRAPH_HASHTABLE_SIZE]; - void * vals[GGML_GRAPH_HASHTABLE_SIZE]; -}; - -static struct hash_map * new_hash_map(void) { - struct hash_map * result = malloc(sizeof(struct hash_map)); - for (int i=0; ikeys[i] = NULL; - result->vals[i] = NULL; - } +static struct ggml_hash_set ggml_hash_set_new(size_t size) { + size = ggml_hash_size(size); + struct ggml_hash_set result; + result.size = size; + result.keys = malloc(sizeof(struct ggml_tensor *) * size); + memset(result.keys, 0, sizeof(struct ggml_tensor *) * size); return result; } -static void free_hash_map(struct hash_map * map) { +static void ggml_hash_set_free(struct ggml_hash_set hash_set) { + free(hash_set.keys); +} + +struct hash_map { + struct ggml_hash_set set; + struct ggml_tensor ** vals; +}; + +static struct hash_map * ggml_new_hash_map(size_t size) { + struct hash_map * result = malloc(sizeof(struct hash_map)); + result->set = ggml_hash_set_new(size); + result->vals = malloc(sizeof(struct ggml_tensor *) * result->set.size); + memset(result->vals, 0, sizeof(struct ggml_tensor *) * result->set.size); + return result; +} + +static void ggml_hash_map_free(struct hash_map * map) { + ggml_hash_set_free(map->set); + free(map->vals); free(map); } @@ -14726,7 +14857,7 @@ static struct ggml_tensor * ggml_recompute_graph_node( return node; } - if (!hash_contains(graph->visited_hash_table, node)) { + if (!ggml_hash_contains(graph->visited_hash_table, node)) { return node; } @@ -14741,17 +14872,17 @@ static struct ggml_tensor * ggml_recompute_graph_node( return node; } - size_t i = hash_find(replacements->keys, node); - GGML_ASSERT(i < GGML_GRAPH_HASHTABLE_SIZE); // assert that not full - if (replacements->keys[i] == node) { - return (struct ggml_tensor *) replacements->vals[i]; + size_t i = ggml_hash_find(replacements->set, node); + GGML_ASSERT(i != GGML_HASHTABLE_FULL); // assert that not full + if (replacements->set.keys[i] == node) { + return replacements->vals[i]; } struct ggml_tensor * clone = ggml_new_tensor(ctx, node->type, node->n_dims, node->ne); // insert clone into replacements - GGML_ASSERT(replacements->keys[i] == NULL); // assert that we don't overwrite - replacements->keys[i] = node; + GGML_ASSERT(replacements->set.keys[i] == NULL); // assert that we don't overwrite + replacements->set.keys[i] = node; replacements->vals[i] = clone; clone->op = node->op; @@ -14788,26 +14919,26 @@ void ggml_build_backward_gradient_checkpointing( struct ggml_cgraph * gb_tmp, struct ggml_tensor * * checkpoints, int n_checkpoints) { - *gb_tmp = *gf; + ggml_graph_cpy(gf, gb_tmp); ggml_build_backward_expand(ctx, gf, gb_tmp, true); if (n_checkpoints <= 0) { - *gb = *gb_tmp; + ggml_graph_cpy(gb_tmp, gb); return; } - struct hash_map * replacements = new_hash_map(); + struct hash_map * replacements = ggml_new_hash_map(gf->n_nodes + gf->n_leafs + n_checkpoints); // insert checkpoints in replacements for (int i = 0; i < n_checkpoints; ++i) { - size_t k = hash_find(replacements->keys, checkpoints[i]); - GGML_ASSERT(k < GGML_GRAPH_HASHTABLE_SIZE); // assert that not full - GGML_ASSERT(replacements->keys[k] == NULL); // assert that we don't overwrite - replacements->keys[k] = checkpoints[i]; - replacements->vals[k] = checkpoints[i]; + size_t k = ggml_hash_find(replacements->set, checkpoints[i]); + GGML_ASSERT(k != GGML_HASHTABLE_FULL); // assert that not full + GGML_ASSERT(replacements->set.keys[k] == NULL); // assert that we don't overwrite + replacements->set.keys[k] = checkpoints[i]; + replacements->vals[k] = checkpoints[i]; } - *gb = *gf; + ggml_graph_cpy(gf, gb); // rewrite gb_tmp->nodes[gf->n_nodes:gb_tmp->n_nodes], // replacing references to gb_tmp->nodes[0:gf->n_nodes] ( == gf->nodes[0:gf->n_nodes]), // by recomputing them from checkpoints @@ -14824,21 +14955,21 @@ void ggml_build_backward_gradient_checkpointing( ggml_build_forward_expand(gb, node); } - free_hash_map(replacements); + ggml_hash_map_free(replacements); } // functions to change gradients considering the case that input a might be initial gradient with zero value -static struct ggml_tensor * ggml_add_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, void * zero_table[]) { - if (hash_contains(zero_table, a)) { +static struct ggml_tensor * ggml_add_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, struct ggml_hash_set zero_table) { + if (ggml_hash_contains(zero_table, a)) { return b; } else { return ggml_add_impl(ctx, a, b, false); } } -static struct ggml_tensor * ggml_acc_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, size_t nb1, size_t nb2, size_t nb3, size_t offset, void * zero_table[]) { - if (hash_contains(zero_table, a)) { +static struct ggml_tensor * ggml_acc_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, size_t nb1, size_t nb2, size_t nb3, size_t offset, struct ggml_hash_set zero_table) { + if (ggml_hash_contains(zero_table, a)) { struct ggml_tensor * a_zero = ggml_scale(ctx, a, ggml_new_f32(ctx, 0)); return ggml_acc_impl(ctx, a_zero, b, nb1, nb2, nb3, offset, false); } else { @@ -14846,23 +14977,23 @@ static struct ggml_tensor * ggml_acc_or_set(struct ggml_context * ctx, struct gg } } -static struct ggml_tensor * ggml_add1_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, void * zero_table[]) { - if (hash_contains(zero_table, a)) { +static struct ggml_tensor * ggml_add1_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, struct ggml_hash_set zero_table) { + if (ggml_hash_contains(zero_table, a)) { return ggml_repeat(ctx, b, a); } else { return ggml_add1_impl(ctx, a, b, false); } } -static struct ggml_tensor * ggml_sub_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, void * zero_table[]) { - if (hash_contains(zero_table, a)) { +static struct ggml_tensor * ggml_sub_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, struct ggml_hash_set zero_table) { + if (ggml_hash_contains(zero_table, a)) { return ggml_neg(ctx, b); } else { return ggml_sub_impl(ctx, a, b, false); } } -static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor * tensor, void * zero_table[]) { +static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor * tensor, struct ggml_hash_set zero_table) { struct ggml_tensor * src0 = tensor->src[0]; struct ggml_tensor * src1 = tensor->src[1]; @@ -15695,7 +15826,7 @@ static void ggml_visit_parents(struct ggml_cgraph * cgraph, struct ggml_tensor * } // check if already visited - if (hash_insert(cgraph->visited_hash_table, node)) { + if (ggml_hash_insert(cgraph->visited_hash_table, node) == GGML_HASHTABLE_ALREADY_EXISTS) { return; } @@ -15711,7 +15842,7 @@ static void ggml_visit_parents(struct ggml_cgraph * cgraph, struct ggml_tensor * if (node->op == GGML_OP_NONE && node->grad == NULL) { // reached a leaf node, not part of the gradient graph (e.g. a constant) - GGML_ASSERT(cgraph->n_leafs < GGML_MAX_NODES); + GGML_ASSERT(cgraph->n_leafs < cgraph->size); if (strlen(node->name) == 0) { ggml_format_name(node, "leaf_%d", cgraph->n_leafs); @@ -15720,22 +15851,24 @@ static void ggml_visit_parents(struct ggml_cgraph * cgraph, struct ggml_tensor * cgraph->leafs[cgraph->n_leafs] = node; cgraph->n_leafs++; } else { - GGML_ASSERT(cgraph->n_nodes < GGML_MAX_NODES); + GGML_ASSERT(cgraph->n_nodes < cgraph->size); if (strlen(node->name) == 0) { ggml_format_name(node, "node_%d", cgraph->n_nodes); } cgraph->nodes[cgraph->n_nodes] = node; - cgraph->grads[cgraph->n_nodes] = node->grad; + if (cgraph->grads) { + cgraph->grads[cgraph->n_nodes] = node->grad; + } cgraph->n_nodes++; } } static void ggml_build_forward_impl(struct ggml_cgraph * cgraph, struct ggml_tensor * tensor, bool expand) { if (!expand) { - cgraph->n_nodes = 0; - cgraph->n_leafs = 0; + // TODO: this branch isn't accessible anymore, maybe move this to ggml_build_forward_expand + ggml_graph_clear(cgraph); } const int n0 = cgraph->n_nodes; @@ -15756,25 +15889,6 @@ void ggml_build_forward_expand(struct ggml_cgraph * cgraph, struct ggml_tensor * ggml_build_forward_impl(cgraph, tensor, true); } -struct ggml_cgraph ggml_build_forward(struct ggml_tensor * tensor) { - struct ggml_cgraph result = { - /*.n_nodes =*/ 0, - /*.n_leafs =*/ 0, - /*.nodes =*/ { NULL }, - /*.grads =*/ { NULL }, - /*.leafs =*/ { NULL }, - /*.hash_table =*/ { NULL }, - /*.order =*/ GGML_CGRAPH_EVAL_ORDER_LEFT_TO_RIGHT, - /*.perf_runs =*/ 0, - /*.perf_cycles =*/ 0, - /*.perf_time_us =*/ 0, - }; - - ggml_build_forward_impl(&result, tensor, false); - - return result; -} - void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, bool keep) { GGML_ASSERT(gf->n_nodes > 0); @@ -15791,11 +15905,10 @@ void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * } // remember original gradients which start with zero values - void ** zero_table = malloc(sizeof(void *) * GGML_GRAPH_HASHTABLE_SIZE); - memset(zero_table, 0, sizeof(void*) * GGML_GRAPH_HASHTABLE_SIZE); + struct ggml_hash_set zero_table = ggml_hash_set_new(gf->size); for (int i = 0; i < gf->n_nodes; i++) { if (gf->grads[i]) { - hash_insert(zero_table, gf->grads[i]); + ggml_hash_insert(zero_table, gf->grads[i]); } } @@ -15818,26 +15931,54 @@ void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * } } - free(zero_table); + ggml_hash_set_free(zero_table); } -struct ggml_cgraph ggml_build_backward(struct ggml_context * ctx, struct ggml_cgraph * gf, bool keep) { - struct ggml_cgraph result = *gf; - ggml_build_backward_expand(ctx, gf, &result, keep); - return result; +static size_t ggml_graph_nbytes(size_t size, bool grads) { + size_t nbytes = sizeof(struct ggml_cgraph); + nbytes += size * sizeof(struct ggml_tensor *) * 2; // leafs + nodes + if (grads) { + nbytes += size * sizeof(struct ggml_tensor *); // grads + } + nbytes += ggml_hash_size(size * 2) * sizeof(struct ggml_tensor *); // hash set + return nbytes; } -struct ggml_cgraph * ggml_new_graph(struct ggml_context * ctx) { - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_GRAPH, GGML_GRAPH_SIZE); +size_t ggml_graph_overhead_custom(size_t size, bool grads) { + return GGML_OBJECT_SIZE + GGML_PAD(ggml_graph_nbytes(size, grads), GGML_MEM_ALIGN); +} + +size_t ggml_graph_overhead(void) { + return ggml_graph_overhead_custom(GGML_DEFAULT_GRAPH_SIZE, false); +} + +struct ggml_cgraph * ggml_new_graph_custom(struct ggml_context * ctx, size_t size, bool grads) { + const size_t obj_size = ggml_graph_nbytes(size, grads); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_GRAPH, obj_size); struct ggml_cgraph * cgraph = (struct ggml_cgraph *) ((char *) ctx->mem_buffer + obj->offs); + struct ggml_tensor ** data_start = (struct ggml_tensor **) (cgraph + 1); + + size_t hash_size = ggml_hash_size(size * 2); + struct ggml_tensor ** nodes_ptr = data_start; + struct ggml_tensor ** leafs_ptr = nodes_ptr + size; + struct ggml_tensor ** hash_keys_ptr = leafs_ptr + size; + struct ggml_tensor ** grads_ptr = grads ? hash_keys_ptr + hash_size : NULL; + + // check that we allocated the correct amount of memory + assert(obj_size == (size_t) ( + (grads ? (char *)(grads_ptr + size) : (char *)(hash_keys_ptr + hash_size)) - (char *)cgraph)); + + memset(hash_keys_ptr, 0, hash_size * sizeof(struct ggml_tensor *)); + *cgraph = (struct ggml_cgraph) { + /*.size =*/ size, /*.n_nodes =*/ 0, /*.n_leafs =*/ 0, - /*.nodes =*/ { NULL }, - /*.grads =*/ { NULL }, - /*.leafs =*/ { NULL }, - /*.hash_table =*/ { NULL }, + /*.nodes =*/ nodes_ptr, + /*.grads =*/ grads_ptr, + /*.leafs =*/ leafs_ptr, + /*.hash_table =*/ { hash_size, hash_keys_ptr }, /*.order =*/ GGML_CGRAPH_EVAL_ORDER_LEFT_TO_RIGHT, /*.perf_runs =*/ 0, /*.perf_cycles =*/ 0, @@ -15847,14 +15988,85 @@ struct ggml_cgraph * ggml_new_graph(struct ggml_context * ctx) { return cgraph; } -struct ggml_cgraph * ggml_build_forward_ctx(struct ggml_context * ctx, struct ggml_tensor * tensor) { - struct ggml_cgraph * cgraph = ggml_new_graph(ctx); - ggml_build_forward_impl(cgraph, tensor, false); +struct ggml_cgraph * ggml_new_graph(struct ggml_context * ctx) { + return ggml_new_graph_custom(ctx, GGML_DEFAULT_GRAPH_SIZE, false); +} + +struct ggml_cgraph * ggml_graph_view(struct ggml_context * ctx, struct ggml_cgraph * cgraph0, int i0, int i1) { + const size_t obj_size = sizeof(struct ggml_cgraph); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_GRAPH, obj_size); + struct ggml_cgraph * cgraph = (struct ggml_cgraph *) ((char *) ctx->mem_buffer + obj->offs); + + *cgraph = (struct ggml_cgraph) { + /*.size =*/ 0, + /*.n_nodes =*/ i1 - i0, + /*.n_leafs =*/ 0, + /*.nodes =*/ cgraph0->nodes + i0, + /*.grads =*/ cgraph0->grads ? cgraph0->grads + i0 : NULL, + /*.leafs =*/ NULL, + /*.hash_table =*/ { 0, NULL }, + /*.order =*/ cgraph0->order, + /*.perf_runs =*/ 0, + /*.perf_cycles =*/ 0, + /*.perf_time_us =*/ 0, + }; + return cgraph; } -size_t ggml_graph_overhead(void) { - return GGML_OBJECT_SIZE + GGML_PAD(GGML_GRAPH_SIZE, GGML_MEM_ALIGN); +void ggml_graph_cpy(struct ggml_cgraph * src, struct ggml_cgraph * dst) { + GGML_ASSERT(dst->size >= src->n_leafs); + GGML_ASSERT(dst->size >= src->n_nodes); + GGML_ASSERT(dst->visited_hash_table.size >= src->visited_hash_table.size); + + dst->n_leafs = src->n_leafs; + dst->n_nodes = src->n_nodes; + dst->order = src->order; + + for (int i = 0; i < src->n_leafs; ++i) { + dst->leafs[i] = src->leafs[i]; + } + + for (int i = 0; i < src->n_nodes; ++i) { + dst->nodes[i] = src->nodes[i]; + } + + if (src->grads) { + GGML_ASSERT(dst->grads != NULL); + for (int i = 0; i < src->n_nodes; ++i) { + dst->grads[i] = src->grads[i]; + } + } + + for (size_t i = 0; i < src->visited_hash_table.size; ++i) { + if (src->visited_hash_table.keys[i]) { + ggml_hash_insert(dst->visited_hash_table, src->visited_hash_table.keys[i]); + } + } +} + +struct ggml_cgraph * ggml_graph_dup(struct ggml_context * ctx, struct ggml_cgraph * cgraph) { + struct ggml_cgraph * result = ggml_new_graph_custom(ctx, cgraph->size, cgraph->grads != NULL); + ggml_graph_cpy(cgraph, result); + return result; +} + +void ggml_graph_reset(struct ggml_cgraph * cgraph) { + GGML_ASSERT(cgraph->grads != NULL); + + for (int i = 0; i < cgraph->n_nodes; i++) { + struct ggml_tensor * grad = cgraph->grads[i]; + + if (grad) { + ggml_set_zero(grad); + } + } +} + +void ggml_graph_clear(struct ggml_cgraph * cgraph) { + cgraph->n_leafs = 0; + cgraph->n_nodes = 0; + memset(cgraph->visited_hash_table.keys, 0, cgraph->visited_hash_table.size * sizeof(struct ggml_tensor *)); } // @@ -16007,13 +16219,252 @@ static void ggml_graph_compute_perf_stats_node(struct ggml_tensor * node, const node->perf_time_us += time_us_cur; } +static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { + int n_tasks = 0; + + switch (node->op) { + case GGML_OP_CPY: + case GGML_OP_DUP: + case GGML_OP_ADD: + case GGML_OP_ADD1: + case GGML_OP_ACC: + { + n_tasks = n_threads; + } break; + case GGML_OP_SUB: + case GGML_OP_DIV: + case GGML_OP_SQR: + case GGML_OP_SQRT: + case GGML_OP_LOG: + case GGML_OP_SUM: + case GGML_OP_SUM_ROWS: + case GGML_OP_MEAN: + case GGML_OP_ARGMAX: + case GGML_OP_REPEAT: + case GGML_OP_REPEAT_BACK: + { + n_tasks = 1; + } break; + case GGML_OP_UNARY: + switch (ggml_get_unary_op(node)) { + case GGML_UNARY_OP_ABS: + case GGML_UNARY_OP_SGN: + case GGML_UNARY_OP_NEG: + case GGML_UNARY_OP_STEP: + case GGML_UNARY_OP_TANH: + case GGML_UNARY_OP_ELU: + case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_LEAKY: + { + n_tasks = 1; + } break; + + case GGML_UNARY_OP_GELU: + case GGML_UNARY_OP_GELU_QUICK: + case GGML_UNARY_OP_SILU: + { + n_tasks = n_threads; + } break; + } + break; + case GGML_OP_SILU_BACK: + case GGML_OP_MUL: + case GGML_OP_NORM: + case GGML_OP_RMS_NORM: + case GGML_OP_RMS_NORM_BACK: + case GGML_OP_GROUP_NORM: + case GGML_OP_CONCAT: + { + n_tasks = n_threads; + } break; + case GGML_OP_MUL_MAT: + { + n_tasks = n_threads; + + // TODO: use different scheduling for different matrix sizes + //const int nr0 = ggml_nrows(node->src[0]); + //const int nr1 = ggml_nrows(node->src[1]); + + //n_tasks = MIN(n_threads, MAX(1, nr0/128)); + //printf("nr0 = %8d, nr1 = %8d, nr0*nr1 = %8d, n_tasks%d\n", nr0, nr1, nr0*nr1, n_tasks); + +#if defined(GGML_USE_CUBLAS) + if (ggml_cuda_can_mul_mat(node->src[0], node->src[1], node)) { + n_tasks = 1; // TODO: this actually is doing nothing + // the threads are still spinning + } +#elif defined(GGML_USE_CLBLAST) + if (ggml_cl_can_mul_mat(node->src[0], node->src[1], node)) { + n_tasks = 1; // TODO: this actually is doing nothing + // the threads are still spinning + } +#endif +#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) + if (ggml_compute_forward_mul_mat_use_blas(node->src[0], node->src[1], node)) { + n_tasks = 1; // TODO: this actually is doing nothing + // the threads are still spinning + } +#endif + } break; + case GGML_OP_OUT_PROD: + { + n_tasks = n_threads; + } break; + case GGML_OP_SCALE: + case GGML_OP_SET: + case GGML_OP_CONT: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_PERMUTE: + case GGML_OP_TRANSPOSE: + case GGML_OP_GET_ROWS: + case GGML_OP_GET_ROWS_BACK: + case GGML_OP_DIAG: + { + n_tasks = 1; + } break; + case GGML_OP_DIAG_MASK_ZERO: + case GGML_OP_DIAG_MASK_INF: + case GGML_OP_SOFT_MAX: + case GGML_OP_SOFT_MAX_BACK: + case GGML_OP_ROPE: + case GGML_OP_ROPE_BACK: + case GGML_OP_ADD_REL_POS: + { + n_tasks = n_threads; + } break; + case GGML_OP_ALIBI: + { + n_tasks = 1; //TODO + } break; + case GGML_OP_CLAMP: + { + n_tasks = 1; //TODO + } break; + case GGML_OP_CONV_1D: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_1D_STAGE_0: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_1D_STAGE_1: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_TRANSPOSE_1D: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_2D: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_2D_STAGE_0: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_2D_STAGE_1: + { + n_tasks = n_threads; + } break; + case GGML_OP_CONV_TRANSPOSE_2D: + { + n_tasks = n_threads; + } break; + case GGML_OP_POOL_1D: + case GGML_OP_POOL_2D: + { + n_tasks = 1; + } break; + case GGML_OP_UPSCALE: + { + n_tasks = n_threads; + } break; + case GGML_OP_FLASH_ATTN: + { + n_tasks = n_threads; + } break; + case GGML_OP_FLASH_FF: + { + n_tasks = n_threads; + } break; + case GGML_OP_FLASH_ATTN_BACK: + { + n_tasks = n_threads; + } break; + case GGML_OP_WIN_PART: + case GGML_OP_WIN_UNPART: + case GGML_OP_GET_REL_POS: + case GGML_OP_MAP_UNARY: + case GGML_OP_MAP_BINARY: + case GGML_OP_MAP_CUSTOM1_F32: + case GGML_OP_MAP_CUSTOM2_F32: + case GGML_OP_MAP_CUSTOM3_F32: + { + n_tasks = 1; + } break; + case GGML_OP_MAP_CUSTOM1: + { + struct ggml_map_custom1_op_params * p = (struct ggml_map_custom1_op_params *) node->op_params; + if (p->n_tasks == GGML_N_TASKS_MAX) { + n_tasks = n_threads; + } else { + n_tasks = MIN(p->n_tasks, n_threads); + } + } break; + case GGML_OP_MAP_CUSTOM2: + { + struct ggml_map_custom2_op_params * p = (struct ggml_map_custom2_op_params *) node->op_params; + if (p->n_tasks == GGML_N_TASKS_MAX) { + n_tasks = n_threads; + } else { + n_tasks = MIN(p->n_tasks, n_threads); + } + } break; + case GGML_OP_MAP_CUSTOM3: + { + struct ggml_map_custom3_op_params * p = (struct ggml_map_custom3_op_params *) node->op_params; + if (p->n_tasks == GGML_N_TASKS_MAX) { + n_tasks = n_threads; + } else { + n_tasks = MIN(p->n_tasks, n_threads); + } + } break; + case GGML_OP_CROSS_ENTROPY_LOSS: + { + n_tasks = n_threads; + } break; + case GGML_OP_CROSS_ENTROPY_LOSS_BACK: + { + n_tasks = n_threads; + } break; + case GGML_OP_NONE: + { + n_tasks = 1; + } break; + case GGML_OP_COUNT: + { + GGML_ASSERT(false); + } break; + default: + { + GGML_ASSERT(false); + } break; + } + + assert(n_tasks > 0); + + return n_tasks; +} + static thread_ret_t ggml_graph_compute_thread(void * data) { struct ggml_compute_state * state = (struct ggml_compute_state *) data; const struct ggml_cgraph * cgraph = state->shared->cgraph; const struct ggml_cplan * cplan = state->shared->cplan; - const int * n_tasks_arr = cplan->n_tasks; const int n_threads = state->shared->n_threads; set_numa_thread_affinity(state->ith, n_threads); @@ -16038,9 +16489,9 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { if (node_n != -1) { /* FINALIZE */ - struct ggml_tensor * node = state->shared->cgraph->nodes[node_n]; + struct ggml_tensor * node = cgraph->nodes[node_n]; if (GGML_OP_HAS_FINALIZE[node->op]) { - params.nth = n_tasks_arr[node_n]; + params.nth = ggml_get_n_tasks(node, n_threads); ggml_compute_forward(¶ms, node); } ggml_graph_compute_perf_stats_node(node, state->shared); @@ -16051,7 +16502,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { GGML_PRINT_DEBUG_5("%s: %d/%d\n", __func__, node_n, cgraph->n_nodes); struct ggml_tensor * node = cgraph->nodes[node_n]; - const int n_tasks = n_tasks_arr[node_n]; + const int n_tasks = ggml_get_n_tasks(node, n_threads); state->shared->perf_node_start_cycles = ggml_perf_cycles(); state->shared->perf_node_start_time_us = ggml_perf_time_us(); @@ -16109,7 +16560,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { /* COMPUTE */ struct ggml_tensor * node = cgraph->nodes[node_n]; - const int n_tasks = n_tasks_arr[node_n]; + const int n_tasks = ggml_get_n_tasks(node, n_threads); struct ggml_compute_params params = { /*.type =*/ GGML_TASK_COMPUTE, @@ -16143,121 +16594,46 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { struct ggml_tensor * node = cgraph->nodes[i]; + size_t cur = 0; + switch (node->op) { case GGML_OP_CPY: case GGML_OP_DUP: { n_tasks = n_threads; - size_t cur = 0; if (ggml_is_quantized(node->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; } - - work_size = MAX(work_size, cur); } break; case GGML_OP_ADD: case GGML_OP_ADD1: { n_tasks = n_threads; - size_t cur = 0; - if (ggml_is_quantized(node->src[0]->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; } - - work_size = MAX(work_size, cur); } break; case GGML_OP_ACC: { n_tasks = n_threads; - size_t cur = 0; - if (ggml_is_quantized(node->src[0]->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[1]->ne[0] * n_tasks; } - - work_size = MAX(work_size, cur); } break; - case GGML_OP_SUB: - case GGML_OP_DIV: - case GGML_OP_SQR: - case GGML_OP_SQRT: - case GGML_OP_LOG: - case GGML_OP_SUM: - case GGML_OP_SUM_ROWS: - case GGML_OP_MEAN: - case GGML_OP_ARGMAX: - case GGML_OP_REPEAT: - case GGML_OP_REPEAT_BACK: - { - n_tasks = 1; - } break; - - case GGML_OP_UNARY: - { - switch (ggml_get_unary_op(node)) { - case GGML_UNARY_OP_ABS: - case GGML_UNARY_OP_SGN: - case GGML_UNARY_OP_NEG: - case GGML_UNARY_OP_STEP: - case GGML_UNARY_OP_TANH: - case GGML_UNARY_OP_ELU: - case GGML_UNARY_OP_RELU: - { - n_tasks = 1; - } break; - - case GGML_UNARY_OP_GELU: - case GGML_UNARY_OP_GELU_QUICK: - case GGML_UNARY_OP_SILU: - { - n_tasks = n_threads; - } break; - } - } break; - case GGML_OP_SILU_BACK: - case GGML_OP_MUL: - case GGML_OP_NORM: - case GGML_OP_RMS_NORM: - case GGML_OP_RMS_NORM_BACK: - case GGML_OP_GROUP_NORM: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONCAT: case GGML_OP_MUL_MAT: { - n_tasks = n_threads; - - // TODO: use different scheduling for different matrix sizes - //const int nr0 = ggml_nrows(node->src[0]); - //const int nr1 = ggml_nrows(node->src[1]); - - //n_tasks = MIN(n_threads, MAX(1, nr0/128)); - //printf("nr0 = %8d, nr1 = %8d, nr0*nr1 = %8d, n_tasks%d\n", nr0, nr1, nr0*nr1, n_tasks); - - size_t cur = 0; const enum ggml_type vec_dot_type = type_traits[node->src[0]->type].vec_dot_type; -#if defined(GGML_USE_CUBLAS) - if (ggml_cuda_can_mul_mat(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } else -#elif defined(GGML_USE_CLBLAST) +#if defined(GGML_USE_CLBLAST) if (ggml_cl_can_mul_mat(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning cur = ggml_cl_mul_mat_get_wsize(node->src[0], node->src[1], node); } else #endif #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) if (ggml_compute_forward_mul_mat_use_blas(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning if (node->src[0]->type != GGML_TYPE_F32) { // here we need memory just for single 2D matrix from src0 cur = ggml_type_size(GGML_TYPE_F32)*(node->src[0]->ne[0]*node->src[0]->ne[1]); @@ -16266,62 +16642,18 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { #endif if (node->src[1]->type != vec_dot_type) { cur = ggml_type_size(vec_dot_type)*ggml_nelements(node->src[1])/ggml_blck_size(vec_dot_type); - } else { - cur = 0; } - - work_size = MAX(work_size, cur); } break; case GGML_OP_OUT_PROD: { n_tasks = n_threads; - size_t cur = 0; - if (ggml_is_quantized(node->src[0]->type)) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; } - - work_size = MAX(work_size, cur); - } break; - case GGML_OP_SCALE: - { - n_tasks = 1; - } break; - case GGML_OP_SET: - case GGML_OP_CONT: - case GGML_OP_RESHAPE: - case GGML_OP_VIEW: - case GGML_OP_PERMUTE: - case GGML_OP_TRANSPOSE: - case GGML_OP_GET_ROWS: - case GGML_OP_GET_ROWS_BACK: - case GGML_OP_DIAG: - { - n_tasks = 1; - } break; - case GGML_OP_DIAG_MASK_ZERO: - case GGML_OP_DIAG_MASK_INF: - case GGML_OP_SOFT_MAX: - case GGML_OP_SOFT_MAX_BACK: - case GGML_OP_ROPE: - case GGML_OP_ROPE_BACK: - case GGML_OP_ADD_REL_POS: - { - n_tasks = n_threads; - } break; - case GGML_OP_ALIBI: - { - n_tasks = 1; //TODO - } break; - case GGML_OP_CLAMP: - { - n_tasks = 1; //TODO } break; case GGML_OP_CONV_1D: { - n_tasks = n_threads; - GGML_ASSERT(node->src[0]->ne[3] == 1); GGML_ASSERT(node->src[1]->ne[2] == 1); GGML_ASSERT(node->src[1]->ne[3] == 1); @@ -16342,8 +16674,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { UNUSED(ne10); UNUSED(ne11); - size_t cur = 0; - if (node->src[0]->type == GGML_TYPE_F16 && node->src[1]->type == GGML_TYPE_F32) { cur = sizeof(ggml_fp16_t)*(ne0*ne1*ew0); @@ -16353,21 +16683,9 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else { GGML_ASSERT(false); } - - work_size = MAX(work_size, cur); - } break; - case GGML_OP_CONV_1D_STAGE_0: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONV_1D_STAGE_1: - { - n_tasks = n_threads; } break; case GGML_OP_CONV_TRANSPOSE_1D: { - n_tasks = n_threads; - GGML_ASSERT(node->src[0]->ne[3] == 1); GGML_ASSERT(node->src[1]->ne[2] == 1); GGML_ASSERT(node->src[1]->ne[3] == 1); @@ -16379,7 +16697,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { const int64_t ne10 = node->src[1]->ne[0]; // L const int64_t ne11 = node->src[1]->ne[1]; // Cin - size_t cur = 0; if (node->src[0]->type == GGML_TYPE_F16 && node->src[1]->type == GGML_TYPE_F32) { cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02; @@ -16391,13 +16708,9 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else { GGML_ASSERT(false); } - - work_size = MAX(work_size, cur); } break; case GGML_OP_CONV_2D: { - n_tasks = n_threads; - const int64_t ne00 = node->src[0]->ne[0]; // W const int64_t ne01 = node->src[0]->ne[1]; // H const int64_t ne02 = node->src[0]->ne[2]; // C @@ -16417,8 +16730,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { UNUSED(ne03); UNUSED(ne2); - size_t cur = 0; - if (node->src[0]->type == GGML_TYPE_F16 && node->src[1]->type == GGML_TYPE_F32) { // im2col: [N*OH*OW, IC*KH*KW] @@ -16429,21 +16740,9 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else { GGML_ASSERT(false); } - - work_size = MAX(work_size, cur); - } break; - case GGML_OP_CONV_2D_STAGE_0: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONV_2D_STAGE_1: - { - n_tasks = n_threads; } break; case GGML_OP_CONV_TRANSPOSE_2D: { - n_tasks = n_threads; - const int64_t ne00 = node->src[0]->ne[0]; // W const int64_t ne01 = node->src[0]->ne[1]; // H const int64_t ne02 = node->src[0]->ne[2]; // Channels Out @@ -16453,141 +16752,66 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { const int64_t ne11 = node->src[1]->ne[1]; // H const int64_t ne12 = node->src[1]->ne[2]; // Channels In - size_t cur = 0; cur += sizeof(ggml_fp16_t)*ne00*ne01*ne02*ne03; cur += sizeof(ggml_fp16_t)*ne10*ne11*ne12; - - work_size = MAX(work_size, cur); - } break; - case GGML_OP_POOL_1D: - case GGML_OP_POOL_2D: - { - n_tasks = 1; - } break; - case GGML_OP_UPSCALE: - { - n_tasks = n_threads; } break; case GGML_OP_FLASH_ATTN: { n_tasks = n_threads; - size_t cur = 0; - const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); if (node->src[1]->type == GGML_TYPE_F32) { cur = sizeof(float)*ne11*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*ne11*n_tasks; // this is overestimated by x2 - } - - if (node->src[1]->type == GGML_TYPE_F16) { + } else if (node->src[1]->type == GGML_TYPE_F16) { cur = sizeof(float)*ne11*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*ne11*n_tasks; // this is overestimated by x2 } - - work_size = MAX(work_size, cur); } break; case GGML_OP_FLASH_FF: { n_tasks = n_threads; - size_t cur = 0; - if (node->src[1]->type == GGML_TYPE_F32) { cur = sizeof(float)*node->src[1]->ne[1]*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*node->src[1]->ne[1]*n_tasks; // this is overestimated by x2 - } - - if (node->src[1]->type == GGML_TYPE_F16) { + } else if (node->src[1]->type == GGML_TYPE_F16) { cur = sizeof(float)*node->src[1]->ne[1]*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*node->src[1]->ne[1]*n_tasks; // this is overestimated by x2 } - - work_size = MAX(work_size, cur); } break; case GGML_OP_FLASH_ATTN_BACK: { n_tasks = n_threads; - size_t cur = 0; - const int64_t D = node->src[0]->ne[0]; const int64_t ne11 = ggml_up(node->src[1]->ne[1], GGML_SOFT_MAX_UNROLL); const int64_t mxDn = MAX(D, ne11) * 2; // *2 because of S and SM in ggml_compute_forward_flash_attn_back if (node->src[1]->type == GGML_TYPE_F32) { cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 - } - - if (node->src[1]->type == GGML_TYPE_F16) { + } else if (node->src[1]->type == GGML_TYPE_F16) { cur = sizeof(float)*mxDn*n_tasks; // TODO: this can become (n_tasks-1) cur += sizeof(float)*mxDn*n_tasks; // this is overestimated by x2 } + } break; - work_size = MAX(work_size, cur); - } break; - case GGML_OP_WIN_PART: - case GGML_OP_WIN_UNPART: - case GGML_OP_GET_REL_POS: - case GGML_OP_MAP_UNARY: - case GGML_OP_MAP_BINARY: - case GGML_OP_MAP_CUSTOM1_F32: - case GGML_OP_MAP_CUSTOM2_F32: - case GGML_OP_MAP_CUSTOM3_F32: - { - n_tasks = 1; - } break; - case GGML_OP_MAP_CUSTOM1: - { - struct ggml_map_custom1_op_params * p = (struct ggml_map_custom1_op_params *) node->op_params; - if (p->n_tasks == GGML_N_TASKS_MAX) { - n_tasks = n_threads; - } else { - n_tasks = MIN(p->n_tasks, n_threads); - } - } break; - case GGML_OP_MAP_CUSTOM2: - { - struct ggml_map_custom2_op_params * p = (struct ggml_map_custom2_op_params *) node->op_params; - if (p->n_tasks == GGML_N_TASKS_MAX) { - n_tasks = n_threads; - } else { - n_tasks = MIN(p->n_tasks, n_threads); - } - } break; - case GGML_OP_MAP_CUSTOM3: - { - struct ggml_map_custom3_op_params * p = (struct ggml_map_custom3_op_params *) node->op_params; - if (p->n_tasks == GGML_N_TASKS_MAX) { - n_tasks = n_threads; - } else { - n_tasks = MIN(p->n_tasks, n_threads); - } - } break; case GGML_OP_CROSS_ENTROPY_LOSS: { n_tasks = n_threads; - size_t cur = ggml_type_size(node->type)*(n_tasks + node->src[0]->ne[0]*n_tasks); - - work_size = MAX(work_size, cur); - } break; - case GGML_OP_CROSS_ENTROPY_LOSS_BACK: - { - n_tasks = n_threads; - } break; - case GGML_OP_NONE: - { - n_tasks = 1; + cur = ggml_type_size(node->type)*(n_tasks + node->src[0]->ne[0]*n_tasks); } break; case GGML_OP_COUNT: { GGML_ASSERT(false); } break; + default: + break; } - cplan.n_tasks[i] = n_tasks; + work_size = MAX(work_size, cur); } if (work_size > 0) { @@ -16609,12 +16833,6 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { if (cplan->work_size > 0) { GGML_ASSERT(cplan->work_data); } - - for (int i = 0; i < cgraph->n_nodes; ++i) { - if (cgraph->nodes[i]->op != GGML_OP_NONE) { - GGML_ASSERT(cplan->n_tasks[i] > 0); - } - } } const int n_threads = cplan->n_threads; @@ -16687,16 +16905,6 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { return compute_status; } -void ggml_graph_reset(struct ggml_cgraph * cgraph) { - for (int i = 0; i < cgraph->n_nodes; i++) { - struct ggml_tensor * grad = cgraph->grads[i]; - - if (grad) { - ggml_set_zero(grad); - } - } -} - void ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads) { struct ggml_cplan cplan = ggml_graph_plan(cgraph, n_threads); @@ -16823,12 +17031,12 @@ void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { const uint32_t magic = GGML_FILE_MAGIC; const uint32_t version = GGML_FILE_VERSION; const uint32_t n_leafs = cgraph->n_leafs; - const uint32_t nodes = cgraph->n_nodes; + const uint32_t n_nodes = cgraph->n_nodes; fwrite(&magic, sizeof(uint32_t), 1, fout); fwrite(&version, sizeof(uint32_t), 1, fout); fwrite(&n_leafs, sizeof(uint32_t), 1, fout); - fwrite(&nodes, sizeof(uint32_t), 1, fout); + fwrite(&n_nodes, sizeof(uint32_t), 1, fout); fwrite(&size_eval, sizeof(uint64_t), 1, fout); } @@ -16916,7 +17124,7 @@ void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { if (idx == -1) { for (int k = 0; k < cgraph->n_nodes; ++k) { if (args[j] == cgraph->nodes[k]) { - idx = GGML_MAX_NODES + k; + idx = cgraph->n_leafs + k; break; } } @@ -16943,11 +17151,11 @@ void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname) { } } -struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** ctx_data, struct ggml_context ** ctx_eval) { +struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context ** ctx_data, struct ggml_context ** ctx_eval) { assert(*ctx_data == NULL); assert(*ctx_eval == NULL); - struct ggml_cgraph result = { 0 }; + struct ggml_cgraph * result = NULL; struct ggml_tensor * data = NULL; @@ -17019,13 +17227,11 @@ struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** const uint32_t n_leafs = *(const uint32_t *) ptr; ptr += sizeof(n_leafs); const uint32_t n_nodes = *(const uint32_t *) ptr; ptr += sizeof(n_nodes); const uint64_t size_eval = *(const uint64_t *) ptr; ptr += sizeof(size_eval); - - result.n_leafs = n_leafs; - result.n_nodes = n_nodes; + const int graph_size = MAX(n_leafs, n_nodes); // create the data context { - const size_t overhead = (n_leafs + n_nodes)*ggml_tensor_overhead(); + const size_t overhead = (n_leafs + n_nodes)*ggml_tensor_overhead() + ggml_graph_overhead_custom(graph_size, false); struct ggml_init_params params = { .mem_size = size_eval + overhead, @@ -17041,6 +17247,12 @@ struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** } } + result = ggml_new_graph_custom(*ctx_eval, graph_size, false); + + result->n_leafs = n_leafs; + result->n_nodes = n_nodes; + + // leafs { uint32_t type; @@ -17079,7 +17291,7 @@ struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** tensor->nb[j] = nb[j]; } - result.leafs[i] = tensor; + result->leafs[i] = tensor; ptr += ggml_nbytes(tensor); @@ -17131,10 +17343,10 @@ struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** continue; } - if (arg_idx < GGML_MAX_NODES) { - args[j] = result.leafs[arg_idx]; + if (arg_idx < result->n_leafs) { + args[j] = result->leafs[arg_idx]; } else { - args[j] = result.nodes[arg_idx - GGML_MAX_NODES]; + args[j] = result->nodes[arg_idx - result->n_leafs]; } } @@ -17186,7 +17398,7 @@ struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** tensor->src[j] = args[j]; } - result.nodes[i] = tensor; + result->nodes[i] = tensor; fprintf(stderr, "%s: loaded node %d: '%16s', %3d dims, %9zu bytes\n", __func__, i, tensor->name, n_dims, ggml_nbytes(tensor)); } @@ -18091,10 +18303,11 @@ struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { case GGML_OPT_ADAM: { result = (struct ggml_opt_params) { - .type = GGML_OPT_ADAM, - .n_threads = 1, - .past = 0, - .delta = 1e-5f, + .type = GGML_OPT_ADAM, + .graph_size = GGML_DEFAULT_GRAPH_SIZE, + .n_threads = 1, // FIXME: GGML_DEFAULT_N_THREADS ? + .past = 0, + .delta = 1e-5f, .max_no_improvement = 100, @@ -18121,10 +18334,11 @@ struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { case GGML_OPT_LBFGS: { result = (struct ggml_opt_params) { - .type = GGML_OPT_LBFGS, - .n_threads = 1, - .past = 0, - .delta = 1e-5f, + .type = GGML_OPT_LBFGS, + .graph_size = GGML_DEFAULT_GRAPH_SIZE, + .n_threads = 1, + .past = 0, + .delta = 1e-5f, .max_no_improvement = 0, @@ -18266,14 +18480,11 @@ enum ggml_opt_result ggml_opt_resume( struct ggml_tensor * f) { // build forward + backward compute graphs - struct ggml_tensor * gfbuf = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, sizeof(struct ggml_cgraph) / ggml_type_size(GGML_TYPE_I32)+ (sizeof(struct ggml_cgraph) % ggml_type_size(GGML_TYPE_I32) ? 1 : 0)); - struct ggml_tensor * gbbuf = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, sizeof(struct ggml_cgraph) / ggml_type_size(GGML_TYPE_I32)+ (sizeof(struct ggml_cgraph) % ggml_type_size(GGML_TYPE_I32) ? 1 : 0)); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx, opt->params.graph_size, true); + ggml_build_forward_expand(gf, f); - struct ggml_cgraph * gf = (struct ggml_cgraph *) gfbuf->data; - struct ggml_cgraph * gb = (struct ggml_cgraph *) gbbuf->data; - - *gf = ggml_build_forward (f); - *gb = ggml_build_backward(ctx, gf, true); + struct ggml_cgraph * gb = ggml_graph_dup(ctx, gf); + ggml_build_backward_expand(ctx, gf, gb, true); return ggml_opt_resume_g(ctx, opt, f, gf, gb, NULL, NULL); } diff --git a/ggml.h b/ggml.h index 26654fc8e..0118c99db 100644 --- a/ggml.h +++ b/ggml.h @@ -58,7 +58,8 @@ // { // ... // -// struct ggml_cgraph gf = ggml_build_forward(f); +// struct ggml_cgraph * gf = ggml_new_graph(ctx); +// ggml_build_forward_expand(gf, f); // // // set the input variable and parameter values // ggml_set_f32(x, 2.0f); @@ -213,15 +214,14 @@ #define GGML_QNT_VERSION 2 // bump this on quantization format changes #define GGML_QNT_VERSION_FACTOR 1000 // do not change this -#define GGML_MAX_DIMS 4 -#define GGML_MAX_NODES 16384 -#define GGML_MAX_PARAMS 1024 -#define GGML_MAX_CONTEXTS 64 -#define GGML_MAX_SRC 6 -#define GGML_MAX_NAME 64 -#define GGML_MAX_OP_PARAMS 64 -#define GGML_DEFAULT_N_THREADS 4 - +#define GGML_MAX_DIMS 4 +#define GGML_MAX_PARAMS 1024 +#define GGML_MAX_CONTEXTS 64 +#define GGML_MAX_SRC 6 +#define GGML_MAX_NAME 64 +#define GGML_MAX_OP_PARAMS 64 +#define GGML_DEFAULT_N_THREADS 4 +#define GGML_DEFAULT_GRAPH_SIZE 2048 #if UINTPTR_MAX == 0xFFFFFFFF #define GGML_MEM_ALIGN 4 #else @@ -245,7 +245,10 @@ do { \ if (!(x)) { \ fprintf(stderr, "GGML_ASSERT: %s:%d: %s\n", __FILE__, __LINE__, #x); \ - abort(); \ + fflush(stderr); \ + fflush(stdout); \ + ggml_print_backtrace(); \ + exit(1); \ } \ } while (0) @@ -451,6 +454,7 @@ extern "C" { GGML_UNARY_OP_GELU, GGML_UNARY_OP_GELU_QUICK, GGML_UNARY_OP_SILU, + GGML_UNARY_OP_LEAKY }; enum ggml_object_type { @@ -531,37 +535,33 @@ extern "C" { int n_threads; - // the `n_tasks` of nodes, 1:1 mapping to cgraph nodes - int n_tasks[GGML_MAX_NODES]; - // abort ggml_graph_compute when true bool (*abort_callback)(void * data); void * abort_callback_data; }; - // next prime after GGML_MAX_NODES - // #define GGML_GRAPH_HASHTABLE_SIZE 4099 - // next prime after GGML_MAX_NODES * 2 (nodes + leafs) - // #define GGML_GRAPH_HASHTABLE_SIZE 8273 - // #define GGML_GRAPH_HASHTABLE_SIZE 16411 - #define GGML_GRAPH_HASHTABLE_SIZE 32771 - enum ggml_cgraph_eval_order { GGML_CGRAPH_EVAL_ORDER_LEFT_TO_RIGHT = 0, GGML_CGRAPH_EVAL_ORDER_RIGHT_TO_LEFT, GGML_CGRAPH_EVAL_ORDER_COUNT }; + struct ggml_hash_set { + size_t size; + struct ggml_tensor ** keys; + }; + // computation graph struct ggml_cgraph { + int size; int n_nodes; int n_leafs; - struct ggml_tensor * nodes[GGML_MAX_NODES]; - struct ggml_tensor * grads[GGML_MAX_NODES]; - struct ggml_tensor * leafs[GGML_MAX_NODES]; + struct ggml_tensor ** nodes; + struct ggml_tensor ** grads; + struct ggml_tensor ** leafs; - void * visited_hash_table[GGML_GRAPH_HASHTABLE_SIZE]; + struct ggml_hash_set visited_hash_table; enum ggml_cgraph_eval_order order; @@ -571,8 +571,6 @@ extern "C" { int64_t perf_time_us; }; - static const size_t GGML_GRAPH_SIZE = sizeof(struct ggml_cgraph); - // scratch buffer struct ggml_scratch { size_t offs; @@ -617,6 +615,8 @@ extern "C" { GGML_API int64_t ggml_cycles(void); GGML_API int64_t ggml_cycles_per_ms(void); + GGML_API void ggml_print_backtrace(void); + GGML_API void ggml_numa_init(void); // call once for better performance on NUMA systems GGML_API bool ggml_is_numa(void); // true if init detected that system has >1 NUMA node @@ -709,7 +709,7 @@ extern "C" { // Context tensor enumeration and lookup GGML_API struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx); GGML_API struct ggml_tensor * ggml_get_next_tensor (struct ggml_context * ctx, struct ggml_tensor * tensor); - GGML_API struct ggml_tensor * ggml_get_tensor (struct ggml_context * ctx, const char * name); + GGML_API struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * name); GGML_API struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor); GGML_API struct ggml_tensor * ggml_set_i32 (struct ggml_tensor * tensor, int32_t value); @@ -943,6 +943,10 @@ extern "C" { struct ggml_context * ctx, struct ggml_tensor * a); + GGML_API struct ggml_tensor * ggml_leaky( + struct ggml_context * ctx, + struct ggml_tensor * a); + GGML_API struct ggml_tensor * ggml_relu_inplace( struct ggml_context * ctx, struct ggml_tensor * a); @@ -1482,6 +1486,8 @@ extern "C" { int s0, // stride int p0); // padding + // the result will have 2*p0 padding for the first dimension + // and 2*p1 padding for the second dimension GGML_API struct ggml_tensor * ggml_pool_2d( struct ggml_context * ctx, struct ggml_tensor * a, @@ -1490,8 +1496,8 @@ extern "C" { int k1, int s0, int s1, - int p0, - int p1); + float p0, + float p1); // nearest interpolate // used in stable-diffusion @@ -1732,19 +1738,22 @@ extern "C" { GGML_API void ggml_build_forward_expand (struct ggml_cgraph * cgraph, struct ggml_tensor * tensor); GGML_API void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, bool keep); - GGML_API struct ggml_cgraph ggml_build_forward (struct ggml_tensor * tensor); - GGML_API struct ggml_cgraph ggml_build_backward(struct ggml_context * ctx, struct ggml_cgraph * gf, bool keep); - // graph allocation in a context - GGML_API struct ggml_cgraph * ggml_new_graph (struct ggml_context * ctx); - GGML_API struct ggml_cgraph * ggml_build_forward_ctx(struct ggml_context * ctx, struct ggml_tensor * tensor); + GGML_API struct ggml_cgraph * ggml_new_graph (struct ggml_context * ctx); // size = GGML_DEFAULT_GRAPH_SIZE, grads = false + GGML_API struct ggml_cgraph * ggml_new_graph_custom (struct ggml_context * ctx, size_t size, bool grads); + GGML_API struct ggml_cgraph * ggml_graph_dup (struct ggml_context * ctx, struct ggml_cgraph * cgraph); + GGML_API struct ggml_cgraph * ggml_graph_view (struct ggml_context * ctx, struct ggml_cgraph * cgraph, int i0, int i1); + GGML_API void ggml_graph_cpy (struct ggml_cgraph * src, struct ggml_cgraph * dst); + GGML_API void ggml_graph_reset (struct ggml_cgraph * cgraph); // zero grads + GGML_API void ggml_graph_clear (struct ggml_cgraph * cgraph); + GGML_API size_t ggml_graph_overhead(void); + GGML_API size_t ggml_graph_overhead_custom(size_t size, bool grads); // ggml_graph_plan() has to be called before ggml_graph_compute() // when plan.work_size > 0, caller must allocate memory for plan.work_data GGML_API struct ggml_cplan ggml_graph_plan (struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); - GGML_API int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); - GGML_API void ggml_graph_reset (struct ggml_cgraph * cgraph); + GGML_API int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); // same as ggml_graph_compute() but the work data is allocated as a part of the context // note: the drawback of this API is that you must have ensured that the context has enough memory for the work data @@ -1752,8 +1761,8 @@ extern "C" { GGML_API struct ggml_tensor * ggml_graph_get_tensor(struct ggml_cgraph * cgraph, const char * name); - GGML_API void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname); - GGML_API struct ggml_cgraph ggml_graph_import(const char * fname, struct ggml_context ** ctx_data, struct ggml_context ** ctx_eval); + GGML_API void ggml_graph_export(const struct ggml_cgraph * cgraph, const char * fname); + GGML_API struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context ** ctx_data, struct ggml_context ** ctx_eval); // print info and performance information for the graph GGML_API void ggml_graph_print(const struct ggml_cgraph * cgraph); @@ -1816,6 +1825,8 @@ extern "C" { struct ggml_opt_params { enum ggml_opt_type type; + size_t graph_size; + int n_threads; // delta-based convergence test diff --git a/llama.cpp b/llama.cpp index a5f3876cc..76ee4ea23 100644 --- a/llama.cpp +++ b/llama.cpp @@ -91,6 +91,8 @@ #define LLAMA_ATTRIBUTE_FORMAT(...) #endif +#define LLAMA_MAX_NODES 4096 + // // logging // @@ -3618,7 +3620,7 @@ struct llm_build_context { } struct ggml_cgraph * build_llama() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); GGML_ASSERT(n_embd_head == hparams.n_rot); @@ -3730,7 +3732,7 @@ struct llm_build_context { } struct ggml_cgraph * build_baichuan() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -3850,7 +3852,7 @@ struct llm_build_context { } struct ggml_cgraph * build_falcon() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -3972,7 +3974,7 @@ struct llm_build_context { } struct ggml_cgraph * build_starcoder() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); struct ggml_tensor * cur; struct ggml_tensor * pos; @@ -4071,7 +4073,7 @@ struct llm_build_context { } struct ggml_cgraph * build_persimmon() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_rot = n_embd_head / 2; @@ -4281,7 +4283,7 @@ struct llm_build_context { } struct ggml_cgraph * build_refact() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4372,7 +4374,7 @@ struct llm_build_context { } struct ggml_cgraph * build_bloom() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4466,7 +4468,7 @@ struct llm_build_context { } struct ggml_cgraph * build_mpt() { - struct ggml_cgraph * gf = ggml_new_graph(ctx0); + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -8208,7 +8210,7 @@ struct llama_context * llama_new_context_with_model( { static const size_t tensor_alignment = 32; // the compute buffer is used to store the tensor and graph structs, while the allocator buffer is used for the tensor data - ctx->buf_compute.resize(ggml_tensor_overhead()*GGML_MAX_NODES + ggml_graph_overhead()); + ctx->buf_compute.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); // create measure allocator ctx->alloc = ggml_allocr_new_measure(tensor_alignment); @@ -8597,8 +8599,8 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat if (kv_buf_size) { const size_t elt_size = ggml_element_size(kv_self.k); - ggml_context * cpy_ctx = ggml_init({ 4096, NULL, /* no_alloc */ true }); - ggml_cgraph gf{}; + ggml_context * cpy_ctx = ggml_init({ 6*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); + ggml_cgraph * gf = ggml_new_graph(cpy_ctx); ggml_tensor * kout3d = ggml_new_tensor_3d(cpy_ctx, kv_self.k->type, n_embd, kv_head, n_layer); std::vector kout3d_data(ggml_nbytes(kout3d), 0); @@ -8616,9 +8618,9 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat kv_head, n_embd, n_layer, elt_size*n_ctx, elt_size*n_ctx*n_embd, 0); - ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, k3d, kout3d)); - ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, v3d, vout3d)); - ggml_graph_compute_helper(ctx->work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k3d, kout3d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v3d, vout3d)); + ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); ggml_free(cpy_ctx); @@ -8725,8 +8727,8 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { const size_t elt_size = ggml_element_size(kv_self.k); - ggml_context * cpy_ctx = ggml_init({ 4096, NULL, /* no_alloc */ true }); - ggml_cgraph gf{}; + ggml_context * cpy_ctx = ggml_init({ 6*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); + ggml_cgraph * gf = ggml_new_graph(cpy_ctx); ggml_tensor * kin3d = ggml_new_tensor_3d(cpy_ctx, kv_self.k->type, n_embd, kv_head, n_layer); kin3d->data = (void *) inp; @@ -8744,9 +8746,9 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { kv_head, n_embd, n_layer, elt_size*n_ctx, elt_size*n_ctx*n_embd, 0); - ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, kin3d, k3d)); - ggml_build_forward_expand(&gf, ggml_cpy(cpy_ctx, vin3d, v3d)); - ggml_graph_compute_helper(ctx->work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin3d, k3d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin3d, v3d)); + ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); ggml_free(cpy_ctx); } diff --git a/scripts/sync-ggml.sh b/scripts/sync-ggml.sh index 4311268bd..4024531b1 100755 --- a/scripts/sync-ggml.sh +++ b/scripts/sync-ggml.sh @@ -2,14 +2,20 @@ cp -rpv ../ggml/src/ggml.c ./ggml.c cp -rpv ../ggml/src/ggml-alloc.c ./ggml-alloc.c +cp -rpv ../ggml/src/ggml-backend-impl.h ./ggml-backend-impl.h cp -rpv ../ggml/src/ggml-backend.c ./ggml-backend.c -cp -rpv ../ggml/src/ggml-cuda.h ./ggml-cuda.h cp -rpv ../ggml/src/ggml-cuda.cu ./ggml-cuda.cu -cp -rpv ../ggml/src/ggml-opencl.h ./ggml-opencl.h -cp -rpv ../ggml/src/ggml-opencl.cpp ./ggml-opencl.cpp +cp -rpv ../ggml/src/ggml-cuda.h ./ggml-cuda.h +cp -rpv ../ggml/src/ggml-impl.h ./ggml-impl.h cp -rpv ../ggml/src/ggml-metal.h ./ggml-metal.h cp -rpv ../ggml/src/ggml-metal.m ./ggml-metal.m cp -rpv ../ggml/src/ggml-metal.metal ./ggml-metal.metal +cp -rpv ../ggml/src/ggml-mpi.h ./ggml-mpi.h +cp -rpv ../ggml/src/ggml-mpi.c ./ggml-mpi.c +cp -rpv ../ggml/src/ggml-opencl.cpp ./ggml-opencl.cpp +cp -rpv ../ggml/src/ggml-opencl.h ./ggml-opencl.h +cp -rpv ../ggml/src/ggml-quants.c ./ggml-quants.c +cp -rpv ../ggml/src/ggml-quants.h ./ggml-quants.h cp -rpv ../ggml/include/ggml/ggml.h ./ggml.h cp -rpv ../ggml/include/ggml/ggml-alloc.h ./ggml-alloc.h cp -rpv ../ggml/include/ggml/ggml-backend.h ./ggml-backend.h diff --git a/tests/test-grad0.cpp b/tests/test-grad0.cpp index 0a559b27a..7fe9154dd 100644 --- a/tests/test-grad0.cpp +++ b/tests/test-grad0.cpp @@ -231,9 +231,10 @@ static bool check_gradient( printf("GGML_N_THREADS = %d\n", n_threads); } - struct ggml_cgraph * gf = ggml_build_forward_ctx(ctx0, f); - struct ggml_cgraph * gb = ggml_new_graph(ctx0); - *gb = *gf; + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, GGML_DEFAULT_GRAPH_SIZE, true); + struct ggml_cgraph * gb = ggml_new_graph_custom(ctx0, GGML_DEFAULT_GRAPH_SIZE, true); + ggml_build_forward_expand(gf, f); + ggml_graph_cpy(gf, gb); ggml_build_backward_expand(ctx0, gf, gb, false); ggml_graph_compute_with_ctx(ctx0, gf, n_threads); diff --git a/tests/test-opt.cpp b/tests/test-opt.cpp index bb8af5962..2c9997fca 100644 --- a/tests/test-opt.cpp +++ b/tests/test-opt.cpp @@ -109,10 +109,11 @@ int main(void) { struct ggml_tensor * d = ggml_sub(ctx, c, ab); struct ggml_tensor * e = ggml_sum(ctx, ggml_sqr(ctx, d)); - struct ggml_cgraph ge = ggml_build_forward(e); - ggml_graph_reset(&ge); + struct ggml_cgraph * ge = ggml_new_graph_custom(ctx, GGML_DEFAULT_GRAPH_SIZE, true); + ggml_build_forward_expand(ge, e); + ggml_graph_reset(ge); - ggml_graph_compute_with_ctx(ctx, &ge, /*n_threads*/ 1); + ggml_graph_compute_with_ctx(ctx, ge, /*n_threads*/ 1); const float fe = ggml_get_f32_1d(e, 0); printf("%s: e = %.4f\n", __func__, fe); @@ -121,9 +122,9 @@ int main(void) { ggml_opt(ctx, opt_params, e); - ggml_graph_reset(&ge); + ggml_graph_reset(ge); - ggml_graph_compute_with_ctx(ctx, &ge, /*n_threads*/ 1); + ggml_graph_compute_with_ctx(ctx, ge, /*n_threads*/ 1); const float fe_opt = ggml_get_f32_1d(e, 0); printf("%s: original e = %.4f\n", __func__, fe); From c049b37d7baf558944501705b91ac89b26ee3e41 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 13 Nov 2023 14:18:08 +0200 Subject: [PATCH 111/859] readme : update hot topics --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index af39e8c0e..c7d232778 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics -- ⚠️ **Upcoming change that might break functionality. Help with testing is needed:** https://github.com/ggerganov/llama.cpp/pull/3912 +- *No hot topics atm. Open to suggestions about what is hot today* ---- From 3d68f364f15778dc326f5024f2e5af1ad6dfddef Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 13 Nov 2023 16:55:52 +0200 Subject: [PATCH 112/859] ggml : sync (im2col, GPU conv, 32-bit arm compat) (#4060) ggml-ci --- ggml-cuda.cu | 106 ++++- ggml-impl.h | 6 - ggml-metal.h | 2 +- ggml-metal.m | 106 ++++- ggml-metal.metal | 108 ++++- ggml-quants.c | 241 +++++++---- ggml.c | 1073 +++++----------------------------------------- ggml.h | 19 +- 8 files changed, 586 insertions(+), 1075 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 163402446..7be63925f 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -4489,6 +4489,13 @@ static __device__ void cpy_1_f32_f16(const char * cxi, char * cdsti) { *dsti = __float2half(*xi); } +static __device__ void cpy_1_f16_f16(const char * cxi, char * cdsti) { + const half * xi = (const half *) cxi; + half * dsti = (half *) cdsti; + + *dsti = *xi; +} + template static __global__ void cpy_f32_f16(const char * cx, char * cdst, const int ne, const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, @@ -4742,6 +4749,25 @@ static __global__ void clamp_f32(const float * x, float * dst, const float min, dst[i] = x[i] < min ? min : (x[i] > max ? max : x[i]); } +static __global__ void im2col_f32_f16( + const float * x, half * dst, + int ofs0, int ofs1, int IW, int IH, int CHW, + int s0, int s1, int p0, int p1, int d0, int d1) { + const int iiw = blockIdx.z * s0 + threadIdx.z * d0 - p0; + const int iih = blockIdx.y * s1 + threadIdx.y * d1 - p1; + + const int offset_dst = + (threadIdx.x * gridDim.y * gridDim.z + blockIdx.y * gridDim.z + blockIdx.z) * CHW + + (blockIdx.x * (blockDim.y * blockDim.z) + threadIdx.y * blockDim.z + threadIdx.z); + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst[offset_dst] = __float2half(0.0f); + } else { + const int offset_src = threadIdx.x * ofs0 + blockIdx.x * ofs1; + dst[offset_dst] = __float2half(x[offset_src + iih * IW + iiw]); + } +} + template static void get_rows_cuda(const void * x, const int32_t * y, float * dst, const int nrows, const int ncols, cudaStream_t stream) { const dim3 block_dims(CUDA_GET_ROWS_BLOCK_SIZE, 1, 1); @@ -5642,6 +5668,16 @@ static void ggml_cpy_f32_f16_cuda( (cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12); } +static void ggml_cpy_f16_f16_cuda( + const char * cx, char * cdst, const int ne, + const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, + const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, cudaStream_t stream) { + + const int num_blocks = (ne + CUDA_CPY_BLOCK_SIZE - 1) / CUDA_CPY_BLOCK_SIZE; + cpy_f32_f16<<>> + (cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12); +} + static void scale_f32_cuda(const float * x, float * dst, const float scale, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_SCALE_BLOCK_SIZE - 1) / CUDA_SCALE_BLOCK_SIZE; scale_f32<<>>(x, dst, scale, k); @@ -5725,6 +5761,15 @@ static void soft_max_f32_cuda(const float * x, float * dst, const int ncols_x, c soft_max_f32<<>>(x, dst, ncols_x); } +static void im2col_f32_f16_cuda(const float * x, half * dst, + int OH, int IW, int IH, int OW, int IC, + int KH, int KW, int N, int ofs0, int ofs1, + int s0, int s1, int p0, int p1, int d0, int d1, cudaStream_t stream) { + dim3 block_nums(IC, OH, OW); + dim3 block_dims(N, KH, KW); + im2col_f32_f16<<>>(x, dst, ofs0, ofs1, IW, IH, (IC * KH * KW), s0, s1, p0, p1, d0, d1); +} + // buffer pool for cuda #define MAX_CUDA_BUFFERS 256 @@ -6522,8 +6567,7 @@ inline void ggml_cuda_op_mul_mat_cublas( src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src1_as); to_fp16_cuda(src1_ddf_i, src1_as_f16, ne, stream); } - const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddq_i : src1_as_f16; - + const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddf_i : src1_as_f16; size_t dst_as = 0; half * dst_f16 = (half *) ggml_cuda_pool_malloc(row_diff*src1_ncols * sizeof(half), &dst_as); @@ -6698,6 +6742,45 @@ inline void ggml_cuda_op_alibi( (void) src1_dd; } +inline void ggml_cuda_op_im2col( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16); + + const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t*)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t*)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t*)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t*)(dst->op_params))[5]; + + const bool is_2D = ((const int32_t*)(dst->op_params))[6] == 1; + + const int64_t N = src1->ne[is_2D ? 3 : 2]; + const int64_t IC = src1->ne[is_2D ? 2 : 1]; + const int64_t IH = is_2D ? src1->ne[1] : 1; + const int64_t IW = src1->ne[0]; + + const int64_t KH = is_2D ? src0->ne[1] : 1; + const int64_t KW = src0->ne[0]; + + const int64_t OH = is_2D ? dst->ne[2] : 1; + const int64_t OW = dst->ne[1]; + + const size_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; // nb is byte offset, src is type float32 + const size_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 + + im2col_f32_f16_cuda(src1_dd, (half*) dst_dd, + OH, IW, IH, OW, IC, KH, KW, N, + ofs0, ofs1, s0, s1, p0, p1, d0, d1, main_stream); + + (void) src0; + (void) src0_dd; +} + inline void ggml_cuda_op_diag_mask_inf( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { @@ -7610,6 +7693,9 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F16) { ggml_cpy_f32_f16_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + } else if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F16) { + ggml_cpy_f16_f16_cuda(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, + ne10, ne11, nb10, nb11, nb12, main_stream); } else { fprintf(stderr, "%s: unsupported type combination (%s to %s)\n", __func__, ggml_type_name(src0->type), ggml_type_name(src1->type)); @@ -7641,6 +7727,10 @@ static void ggml_cuda_alibi(const ggml_tensor * src0, const ggml_tensor * src1, ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_alibi); } +void ggml_cuda_im2col(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_im2col); +} + static void ggml_cuda_nop(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { (void) src0; (void) src1; @@ -7934,6 +8024,15 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ return false; } + if (tensor->op == GGML_OP_MUL_MAT) { + if (tensor->src[0]->ne[3] != tensor->src[1]->ne[3]) { +#ifndef NDEBUG + fprintf(stderr, "%s: cannot compute %s: src0->ne[3] = %d, src1->ne[3] = %d - fallback to CPU\n", __func__, tensor->name, tensor->src[0]->ne[3], tensor->src[1]->ne[3]); +#endif + return false; + } + } + switch (tensor->op) { case GGML_OP_REPEAT: func = ggml_cuda_repeat; @@ -8012,6 +8111,9 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ case GGML_OP_ALIBI: func = ggml_cuda_alibi; break; + case GGML_OP_IM2COL: + func = ggml_cuda_im2col; + break; default: return false; } diff --git a/ggml-impl.h b/ggml-impl.h index d88f26144..06c07339e 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -39,12 +39,6 @@ extern "C" { #endif #endif -#undef MIN -#undef MAX - -#define MIN(a, b) ((a) < (b) ? (a) : (b)) -#define MAX(a, b) ((a) > (b) ? (a) : (b)) - // 16-bit float // on Arm, we use __fp16 // on x86, we use uint16_t diff --git a/ggml-metal.h b/ggml-metal.h index 096b844e3..be2731f8b 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -26,7 +26,7 @@ #include // max memory buffers that can be mapped to the device -#define GGML_METAL_MAX_BUFFERS 16 +#define GGML_METAL_MAX_BUFFERS 64 #define GGML_METAL_MAX_COMMAND_BUFFERS 32 struct ggml_tensor; diff --git a/ggml-metal.m b/ggml-metal.m index c2cda0bf5..3d22b0b27 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -86,6 +86,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(norm); GGML_METAL_DECL_KERNEL(mul_mv_f32_f32); + GGML_METAL_DECL_KERNEL(mul_mv_f16_f16); GGML_METAL_DECL_KERNEL(mul_mv_f16_f32); GGML_METAL_DECL_KERNEL(mul_mv_f16_f32_1row); GGML_METAL_DECL_KERNEL(mul_mv_f16_f32_l4); @@ -114,6 +115,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(rope_f32); GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); + GGML_METAL_DECL_KERNEL(im2col_f16); GGML_METAL_DECL_KERNEL(cpy_f32_f16); GGML_METAL_DECL_KERNEL(cpy_f32_f32); GGML_METAL_DECL_KERNEL(cpy_f16_f16); @@ -126,7 +128,7 @@ struct ggml_metal_context { // MSL code // TODO: move the contents here when ready // for now it is easier to work in a separate file -static NSString * const msl_library_source = @"see metal.metal"; +//static NSString * const msl_library_source = @"see metal.metal"; // Here to assist with NSBundle Path Hack @interface GGMLMetalClass : NSObject @@ -142,7 +144,8 @@ void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_dat ggml_metal_log_user_data = user_data; } -static void ggml_metal_log(enum ggml_log_level level, const char* format, ...){ +GGML_ATTRIBUTE_FORMAT(2, 3) +static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ if (ggml_metal_log_callback != NULL) { va_list args; va_start(args, format); @@ -210,7 +213,13 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { } else { GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); - NSString * sourcePath = [bundle pathForResource:@"ggml-metal" ofType:@"metal"]; + NSString * sourcePath; + NSString * ggmlMetalPathResources = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; + if (ggmlMetalPathResources) { + sourcePath = [ggmlMetalPathResources stringByAppendingPathComponent:@"ggml-metal.metal"]; + } else { + sourcePath = [bundle pathForResource:@"ggml-metal" ofType:@"metal"]; + } if (sourcePath == nil) { GGML_METAL_LOG_WARN("%s: error: could not use bundle path to find ggml-metal.metal, falling back to trying cwd\n", __func__); sourcePath = @"ggml-metal.metal"; @@ -281,6 +290,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(norm); GGML_METAL_ADD_KERNEL(mul_mv_f32_f32); + GGML_METAL_ADD_KERNEL(mul_mv_f16_f16); GGML_METAL_ADD_KERNEL(mul_mv_f16_f32); GGML_METAL_ADD_KERNEL(mul_mv_f16_f32_1row); GGML_METAL_ADD_KERNEL(mul_mv_f16_f32_l4); @@ -311,6 +321,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(rope_f32); GGML_METAL_ADD_KERNEL(rope_f16); GGML_METAL_ADD_KERNEL(alibi_f32); + GGML_METAL_ADD_KERNEL(im2col_f16); GGML_METAL_ADD_KERNEL(cpy_f32_f16); GGML_METAL_ADD_KERNEL(cpy_f32_f32); GGML_METAL_ADD_KERNEL(cpy_f16_f16); @@ -329,7 +340,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { if ([ctx->device supportsFamily:i]) { - GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - MTLGPUFamilyApple1 + 1, i); + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); break; } } @@ -380,6 +391,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(norm); GGML_METAL_DEL_KERNEL(mul_mv_f32_f32); + GGML_METAL_DEL_KERNEL(mul_mv_f16_f16); GGML_METAL_DEL_KERNEL(mul_mv_f16_f32); GGML_METAL_DEL_KERNEL(mul_mv_f16_f32_1row); GGML_METAL_DEL_KERNEL(mul_mv_f16_f32_l4); @@ -410,6 +422,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(rope_f32); GGML_METAL_DEL_KERNEL(rope_f16); GGML_METAL_DEL_KERNEL(alibi_f32); + GGML_METAL_DEL_KERNEL(im2col_f16); GGML_METAL_DEL_KERNEL(cpy_f32_f16); GGML_METAL_DEL_KERNEL(cpy_f32_f32); GGML_METAL_DEL_KERNEL(cpy_f16_f16); @@ -467,6 +480,10 @@ static id ggml_metal_get_buffer(struct ggml_metal_context * ctx, stru const int64_t tsize = ggml_nbytes(t); + if (t->buffer && t->buffer->backend && t->buffer->backend->context) { + ctx = t->buffer->backend->context; + } + // find the view that contains the tensor fully for (int i = 0; i < ctx->n_buffers; ++i) { const int64_t ioffs = (int64_t) t->data - (int64_t) ctx->buffers[i].data; @@ -567,7 +584,7 @@ bool ggml_metal_add_buffer( ctx->device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); if (ctx->device.currentAllocatedSize > ctx->device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN(", warning: current allocated size is greater than the recommended max working set size\n", __func__); + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); } else { GGML_METAL_LOG_INFO("\n"); } @@ -1024,7 +1041,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setThreadgroupMemoryLength:MAX(16, nth/32*sizeof(float)) atIndex:0]; + [encoder setThreadgroupMemoryLength:GGML_PAD(nth/32*sizeof(float), 16) atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1133,6 +1150,7 @@ void ggml_metal_graph_compute( switch (src0t) { case GGML_TYPE_F32: { + GGML_ASSERT(src1t == GGML_TYPE_F32); [encoder setComputePipelineState:ctx->pipeline_mul_mv_f32_f32]; nrows = 4; } break; @@ -1140,13 +1158,18 @@ void ggml_metal_graph_compute( { nth0 = 32; nth1 = 1; - if (ne11 * ne12 < 4) { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_1row]; - } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_l4]; - nrows = ne11; + if (src1t == GGML_TYPE_F32) { + if (ne11 * ne12 < 4) { + [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_1row]; + } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { + [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_l4]; + nrows = ne11; + } else { + [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32]; + nrows = 4; + } } else { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32]; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f16]; nrows = 4; } } break; @@ -1336,7 +1359,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:nth/32*sizeof(float) atIndex:0]; + [encoder setThreadgroupMemoryLength:GGML_PAD(nth/32*sizeof(float), 16) atIndex:0]; const int64_t nrows = ggml_nrows(src0); @@ -1355,7 +1378,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:MAX(16, nth*sizeof(float)) atIndex:0]; + [encoder setThreadgroupMemoryLength:GGML_PAD(nth*sizeof(float), 16) atIndex:0]; const int64_t nrows = ggml_nrows(src0); @@ -1410,8 +1433,7 @@ void ggml_metal_graph_compute( const int n_past = ((int32_t *) dst->op_params)[0]; const int n_dims = ((int32_t *) dst->op_params)[1]; const int mode = ((int32_t *) dst->op_params)[2]; - // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal - const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; + const int n_orig_ctx = ((int32_t *) dst->op_params)[3]; float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); @@ -1459,6 +1481,58 @@ void ggml_metal_graph_compute( [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; + case GGML_OP_IM2COL: + { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16); + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int32_t N = src1->ne[is_2D ? 3 : 2]; + const int32_t IC = src1->ne[is_2D ? 2 : 1]; + const int32_t IH = is_2D ? src1->ne[1] : 1; + const int32_t IW = src1->ne[0]; + + const int32_t KH = is_2D ? src0->ne[1] : 1; + const int32_t KW = src0->ne[0]; + + const int32_t OH = is_2D ? dst->ne[2] : 1; + const int32_t OW = dst->ne[1]; + + const int32_t CHW = IC * KH * KW; + + const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; + const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; + + switch (src0->type) { + case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; + case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_im2col_f16]; break; + default: GGML_ASSERT(false); + }; + + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; + [encoder setBytes:&ofs1 length:sizeof( int32_t) atIndex:3]; + [encoder setBytes:&IW length:sizeof( int32_t) atIndex:4]; + [encoder setBytes:&IH length:sizeof( int32_t) atIndex:5]; + [encoder setBytes:&CHW length:sizeof( int32_t) atIndex:6]; + [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:7]; + [encoder setBytes:&s1 length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&p0 length:sizeof( int32_t) atIndex:9]; + [encoder setBytes:&p1 length:sizeof( int32_t) atIndex:10]; + [encoder setBytes:&d0 length:sizeof( int32_t) atIndex:11]; + [encoder setBytes:&d1 length:sizeof( int32_t) atIndex:12]; + + [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; + } break; case GGML_OP_DUP: case GGML_OP_CPY: case GGML_OP_CONT: diff --git a/ggml-metal.metal b/ggml-metal.metal index 7c35f23a7..5d1357cd7 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -792,7 +792,7 @@ kernel void kernel_mul_mv_f32_f32( constant int64_t & ne0, constant int64_t & ne1, uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]]) { + uint tiisg[[thread_index_in_simdgroup]]) { const int64_t r0 = tgpig.x; const int64_t rb = tgpig.y*N_F32_F32; @@ -844,6 +844,79 @@ kernel void kernel_mul_mv_f32_f32( } } +#define N_F16_F16 4 + +kernel void kernel_mul_mv_f16_f16( + device const char * src0, + device const char * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]]) { + + const int64_t r0 = tgpig.x; + const int64_t rb = tgpig.y*N_F16_F16; + const int64_t im = tgpig.z; + + device const half * x = (device const half *) (src0 + r0*nb01 + im/(ne12/ne02)*nb02); + + if (ne00 < 128) { + for (int row = 0; row < N_F16_F16; ++row) { + int r1 = rb + row; + if (r1 >= ne11) { + break; + } + + device const half * y = (device const half *) (src1 + r1*nb11 + im*nb12); + + float sumf = 0; + for (int i = tiisg; i < ne00; i += 32) { + sumf += (half) x[i] * (half) y[i]; + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } + } else { + device const half4 * x4 = (device const half4 *)x; + for (int row = 0; row < N_F16_F16; ++row) { + int r1 = rb + row; + if (r1 >= ne11) { + break; + } + + device const half * y = (device const half *) (src1 + r1*nb11 + im*nb12); + device const half4 * y4 = (device const half4 *) y; + + float sumf = 0; + for (int i = tiisg; i < ne00/4; i += 32) { + for (int k = 0; k < 4; ++k) sumf += (half) x4[i][k] * y4[i][k]; + } + + float all_sum = simd_sum(sumf); + if (tiisg == 0) { + for (int i = 4*(ne00/4); i < ne00; ++i) all_sum += (half) x[i] * y[i]; + dst[im*ne1*ne0 + r1*ne0 + r0] = all_sum; + } + } + } +} + kernel void kernel_mul_mv_f16_f32_1row( device const char * src0, device const char * src1, @@ -1229,6 +1302,39 @@ kernel void kernel_rope( template [[host_name("kernel_rope_f32")]] kernel rope_t kernel_rope; template [[host_name("kernel_rope_f16")]] kernel rope_t kernel_rope; +kernel void kernel_im2col_f16( + device const float * x, + device half * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]) { + const int32_t iiw = tgpig[2] * s0 + tpitg[2] * d0 - p0; + const int32_t iih = tgpig[1] * s1 + tpitg[1] * d1 - p1; + + const int32_t offset_dst = + (tpitg[0] * tgpg[1] * tgpg[2] + tgpig[1] * tgpg[2] + tgpig[2]) * CHW + + (tgpig[0] * (ntg[1] * ntg[2]) + tpitg[1] * ntg[2] + tpitg[2]); + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst[offset_dst] = 0.0f; + } else { + const int32_t offset_src = tpitg[0] * ofs0 + tgpig[0] * ofs1; + dst[offset_dst] = x[offset_src + iih * IW + iiw]; + } +} + kernel void kernel_cpy_f16_f16( device const half * src0, device half * dst, diff --git a/ggml-quants.c b/ggml-quants.c index 740be6dc5..a48eda732 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -14,26 +14,6 @@ // #include -#if !defined(__aarch64__) -inline static int32_t vaddvq_s16(int16x8_t v) { - return - (int32_t)vgetq_lane_s16(v, 0) + (int32_t)vgetq_lane_s16(v, 1) + - (int32_t)vgetq_lane_s16(v, 2) + (int32_t)vgetq_lane_s16(v, 3) + - (int32_t)vgetq_lane_s16(v, 4) + (int32_t)vgetq_lane_s16(v, 5) + - (int32_t)vgetq_lane_s16(v, 6) + (int32_t)vgetq_lane_s16(v, 7); -} - -inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { - int16x4_t a0 = vpadd_s16(vget_low_s16(a), vget_high_s16(a)); - int16x4_t b0 = vpadd_s16(vget_low_s16(b), vget_high_s16(b)); - return vcombine_s16(a0, b0); -} - -inline static int32_t vaddvq_s32(int32x4_t v) { - return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); -} -#endif - #else #ifdef __wasm_simd128__ @@ -47,13 +27,15 @@ inline static int32_t vaddvq_s32(int32x4_t v) { #if defined(_MSC_VER) || defined(__MINGW32__) #include #else -#if !defined(__riscv) && !defined(__s390__) +#if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) || defined(__SSE3__) +#if !defined(__riscv) #include #endif #endif #endif #endif #endif +#endif #ifdef __riscv_v_intrinsic #include @@ -61,6 +43,7 @@ inline static int32_t vaddvq_s32(int32x4_t v) { #undef MIN #undef MAX + #define MIN(a, b) ((a) < (b) ? (a) : (b)) #define MAX(a, b) ((a) > (b) ? (a) : (b)) @@ -283,9 +266,31 @@ static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 #endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) #if defined(__ARM_NEON) - #if !defined(__aarch64__) +// 64-bit compatibility + +// vaddvq_s16 +// vpaddq_s16 +// vaddvq_s32 +// vaddvq_f32 +// vmaxvq_f32 +// vcvtnq_s32_f32 + +inline static int32_t vaddvq_s16(int16x8_t v) { + return + (int32_t)vgetq_lane_s16(v, 0) + (int32_t)vgetq_lane_s16(v, 1) + + (int32_t)vgetq_lane_s16(v, 2) + (int32_t)vgetq_lane_s16(v, 3) + + (int32_t)vgetq_lane_s16(v, 4) + (int32_t)vgetq_lane_s16(v, 5) + + (int32_t)vgetq_lane_s16(v, 6) + (int32_t)vgetq_lane_s16(v, 7); +} + +inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { + int16x4_t a0 = vpadd_s16(vget_low_s16(a), vget_high_s16(a)); + int16x4_t b0 = vpadd_s16(vget_low_s16(b), vget_high_s16(b)); + return vcombine_s16(a0, b0); +} + inline static int32_t vaddvq_s32(int32x4_t v) { return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); } @@ -311,6 +316,96 @@ inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { return res; } +// vld1q_s16_x2 +// vld1q_u8_x2 +// vld1q_u8_x4 +// vld1q_s8_x2 +// vld1q_s8_x4 +// TODO: double-check these work correctly + +typedef struct ggml_int16x8x2_t { + int16x8_t val[2]; +} ggml_int16x8x2_t; + +inline static ggml_int16x8x2_t ggml_vld1q_s16_x2(const int16_t * ptr) { + ggml_int16x8x2_t res; + + res.val[0] = vld1q_s16(ptr + 0); + res.val[1] = vld1q_s16(ptr + 8); + + return res; +} + +typedef struct ggml_uint8x16x2_t { + uint8x16_t val[2]; +} ggml_uint8x16x2_t; + +inline static ggml_uint8x16x2_t ggml_vld1q_u8_x2(const uint8_t * ptr) { + ggml_uint8x16x2_t res; + + res.val[0] = vld1q_u8(ptr + 0); + res.val[1] = vld1q_u8(ptr + 16); + + return res; +} + +typedef struct ggml_uint8x16x4_t { + uint8x16_t val[4]; +} ggml_uint8x16x4_t; + +inline static ggml_uint8x16x4_t ggml_vld1q_u8_x4(const uint8_t * ptr) { + ggml_uint8x16x4_t res; + + res.val[0] = vld1q_u8(ptr + 0); + res.val[1] = vld1q_u8(ptr + 16); + res.val[2] = vld1q_u8(ptr + 32); + res.val[3] = vld1q_u8(ptr + 48); + + return res; +} + +typedef struct ggml_int8x16x2_t { + int8x16_t val[2]; +} ggml_int8x16x2_t; + +inline static ggml_int8x16x2_t ggml_vld1q_s8_x2(const int8_t * ptr) { + ggml_int8x16x2_t res; + + res.val[0] = vld1q_s8(ptr + 0); + res.val[1] = vld1q_s8(ptr + 16); + + return res; +} + +typedef struct ggml_int8x16x4_t { + int8x16_t val[4]; +} ggml_int8x16x4_t; + +inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { + ggml_int8x16x4_t res; + + res.val[0] = vld1q_s8(ptr + 0); + res.val[1] = vld1q_s8(ptr + 16); + res.val[2] = vld1q_s8(ptr + 32); + res.val[3] = vld1q_s8(ptr + 48); + + return res; +} + +#else + +#define ggml_int16x8x2_t int16x8x2_t +#define ggml_uint8x16x2_t uint8x16x2_t +#define ggml_uint8x16x4_t uint8x16x4_t +#define ggml_int8x16x2_t int8x16x2_t +#define ggml_int8x16x4_t int8x16x4_t + +#define ggml_vld1q_s16_x2 vld1q_s16_x2 +#define ggml_vld1q_u8_x2 vld1q_u8_x2 +#define ggml_vld1q_u8_x4 vld1q_u8_x4 +#define ggml_vld1q_s8_x2 vld1q_s8_x2 +#define ggml_vld1q_s8_x4 vld1q_s8_x4 + #endif #endif @@ -3557,7 +3652,7 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t vzero = vdupq_n_s32(0); #endif - int8x16x2_t q2bytes; + ggml_int8x16x2_t q2bytes; uint8_t aux[16]; float sum = 0; @@ -3576,8 +3671,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri vst1q_u8(aux, scales); const uint8x16_t mins = vshrq_n_u8(mins_and_scales, 4); - const int16x8x2_t q8sums = vld1q_s16_x2(y[i].bsums); - const int16x8x2_t mins16 = {vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}; + const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); + const ggml_int16x8x2_t mins16 = {vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}; const int32x4_t s0 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[0]), vget_low_s16 (q8sums.val[0])), vmull_s16(vget_high_s16(mins16.val[0]), vget_high_s16(q8sums.val[0]))); const int32x4_t s1 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[1]), vget_low_s16 (q8sums.val[1])), @@ -3605,7 +3700,7 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri #endif #define SHIFT_MULTIPLY_ACCUM_WITH_SCALE(shift, index)\ - q8bytes = vld1q_s8_x2(q8); q8 += 32;\ + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32;\ q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits.val[0], (shift)), m3));\ q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits.val[1], (shift)), m3));\ MULTIPLY_ACCUM_WITH_SCALE((index)); @@ -3613,9 +3708,9 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri for (int j = 0; j < QK_K/128; ++j) { - const uint8x16x2_t q2bits = vld1q_u8_x2(q2); q2 += 32; + const ggml_uint8x16x2_t q2bits = ggml_vld1q_u8_x2(q2); q2 += 32; - int8x16x2_t q8bytes = vld1q_s8_x2(q8); q8 += 32; + ggml_int8x16x2_t q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[0], m3)); q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[1], m3)); MULTIPLY_ACCUM_WITH_SCALE(0); @@ -3949,7 +4044,7 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t vzero = vdupq_n_s32(0); #endif - int8x16x4_t q2bytes; + ggml_int8x16x4_t q2bytes; uint32_t aux32[2]; const uint8_t * scales = (const uint8_t *)aux32; @@ -3974,7 +4069,7 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t q2bits = vld1q_u8(q2); - const int8x16x4_t q8bytes = vld1q_s8_x4(q8); + const ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(q2bits, m3)); q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 2), m3)); @@ -4238,7 +4333,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t m3 = vshlq_n_u8(m0, 3); const int8_t m32 = 32; - int8x16x4_t q3bytes; + ggml_int8x16x4_t q3bytes; float sum = 0; @@ -4250,9 +4345,9 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri const uint8_t * restrict qh = x[i].hmask; const int8_t * restrict q8 = y[i].qs; - uint8x16x2_t qhbits = vld1q_u8_x2(qh); + ggml_uint8x16x2_t qhbits = ggml_vld1q_u8_x2(qh); - uint8x16x4_t q3h; + ggml_uint8x16x4_t q3h; int32_t isum = 0; @@ -4268,9 +4363,9 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int j = 0; j < QK_K/128; ++j) { - const uint8x16x2_t q3bits = vld1q_u8_x2(q3); q3 += 32; - const int8x16x4_t q8bytes_1 = vld1q_s8_x4(q8); q8 += 64; - const int8x16x4_t q8bytes_2 = vld1q_s8_x4(q8); q8 += 64; + const ggml_uint8x16x2_t q3bits = ggml_vld1q_u8_x2(q3); q3 += 32; + const ggml_int8x16x4_t q8bytes_1 = ggml_vld1q_s8_x4(q8); q8 += 64; + const ggml_int8x16x4_t q8bytes_2 = ggml_vld1q_s8_x4(q8); q8 += 64; q3h.val[0] = vshlq_n_u8(vbicq_u8(m0, qhbits.val[0]), 2); q3h.val[1] = vshlq_n_u8(vbicq_u8(m0, qhbits.val[1]), 2); @@ -4772,7 +4867,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t m3b = vdupq_n_u8(0x3); const uint8x16_t mh = vdupq_n_u8(4); - int8x16x4_t q3bytes; + ggml_int8x16x4_t q3bytes; uint16_t aux16[2]; int8_t * scales = (int8_t *)aux16; @@ -4781,11 +4876,11 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri for (int i = 0; i < nb; ++i) { - uint8x16x4_t q3h; + ggml_uint8x16x4_t q3h; const uint8x8_t hbits = vld1_u8(x[i].hmask); const uint8x16_t q3bits = vld1q_u8(x[i].qs); - const int8x16x4_t q8bytes = vld1q_s8_x4(y[i].qs); + const ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(y[i].qs); const uint16_t a = *(const uint16_t *)x[i].scales; aux16[0] = a & 0x0f0f; @@ -5134,8 +5229,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t mzero = vdupq_n_s32(0); #endif - int8x16x2_t q4bytes; - int8x16x2_t q8bytes; + ggml_int8x16x2_t q4bytes; + ggml_int8x16x2_t q8bytes; float sumf = 0; @@ -5170,17 +5265,17 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri for (int j = 0; j < QK_K/64; ++j) { - const uint8x16x2_t q4bits = vld1q_u8_x2(q4); q4 += 32; + const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); q4 += 32; #ifdef __ARM_FEATURE_DOTPROD - q8bytes = vld1q_s8_x2(q8); q8 += 32; + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); const int32x4_t p1 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi1 += vaddvq_s32(p1) * scales[2*j+0]; - q8bytes = vld1q_s8_x2(q8); q8 += 32; + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); @@ -5188,7 +5283,7 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri sumi2 += vaddvq_s32(p2) * scales[2*j+1]; #else - q8bytes = vld1q_s8_x2(q8); q8 += 32; + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), @@ -5197,7 +5292,7 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); sumi1 += vaddvq_s16(vaddq_s16(p0, p1)) * scales[2*j+0]; - q8bytes = vld1q_s8_x2(q8); q8 += 32; + q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), @@ -5512,8 +5607,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri float sumf = 0; - int8x16x2_t q4bytes; - int8x16x4_t q8bytes; + ggml_int8x16x2_t q4bytes; + ggml_int8x16x4_t q8bytes; float sum_mins = 0.f; @@ -5534,10 +5629,10 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const float d = y[i].d * (float)x[i].d[0]; - const uint8x16x2_t q4bits = vld1q_u8_x2(q4); + const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); #ifdef __ARM_FEATURE_DOTPROD - q8bytes = vld1q_s8_x4(q8); + q8bytes = ggml_vld1q_s8_x4(q8); q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); @@ -5551,7 +5646,7 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int32_t sumi2 = vaddvq_s32(p2) * scales[1]; #else - q8bytes = vld1q_s8_x4(q8); + q8bytes = ggml_vld1q_s8_x4(q8); q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), @@ -5785,7 +5880,7 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t mzero = vdupq_n_s32(0); #endif - int8x16x4_t q5bytes; + ggml_int8x16x4_t q5bytes; float sumf = 0; @@ -5815,16 +5910,16 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const uint8_t * restrict qh = x[i].qh; const int8_t * restrict q8 = y[i].qs; - uint8x16x2_t qhbits = vld1q_u8_x2(qh); + ggml_uint8x16x2_t qhbits = ggml_vld1q_u8_x2(qh); - uint8x16x4_t q5h; + ggml_uint8x16x4_t q5h; int32_t sumi = 0; for (int j = 0; j < QK_K/64; ++j) { - const uint8x16x2_t q5bits = vld1q_u8_x2(q5); q5 += 32; - const int8x16x4_t q8bytes = vld1q_s8_x4(q8); q8 += 64; + const ggml_uint8x16x2_t q5bits = ggml_vld1q_u8_x2(q5); q5 += 32; + const ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; q5h.val[0] = vshlq_n_u8(vandq_u8(mone, qhbits.val[0]), 4); q5h.val[1] = vshlq_n_u8(vandq_u8(mone, qhbits.val[1]), 4); @@ -6218,8 +6313,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t mzero = vdupq_n_s32(0); #endif - int8x16x4_t q5bytes; - uint8x16x4_t q5h; + ggml_int8x16x4_t q5bytes; + ggml_uint8x16x4_t q5h; float sumf = 0; @@ -6234,8 +6329,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const uint8x8_t qhbits = vld1_u8(qh); - const uint8x16x2_t q5bits = vld1q_u8_x2(q5); - const int8x16x4_t q8bytes = vld1q_s8_x4(q8); + const ggml_uint8x16x2_t q5bits = ggml_vld1q_u8_x2(q5); + const ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); const uint8x16_t htmp = vcombine_u8(qhbits, vshr_n_u8(qhbits, 1)); q5h.val[0] = vbicq_u8(mh, vshlq_n_u8(htmp, 4)); @@ -6511,8 +6606,8 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t mone = vdupq_n_u8(3); - int8x16x4_t q6bytes; - uint8x16x4_t q6h; + ggml_int8x16x4_t q6bytes; + ggml_uint8x16x4_t q6h; for (int i = 0; i < nb; ++i) { @@ -6524,9 +6619,9 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const int8_t * restrict scale = x[i].scales; - const int16x8x2_t q8sums = vld1q_s16_x2(y[i].bsums); + const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); const int8x16_t scales = vld1q_s8(scale); - const int16x8x2_t q6scales = {vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}; + const ggml_int16x8x2_t q6scales = {vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}; const int32x4_t prod = vaddq_s32(vaddq_s32(vmull_s16(vget_low_s16 (q8sums.val[0]), vget_low_s16 (q6scales.val[0])), vmull_s16(vget_high_s16(q8sums.val[0]), vget_high_s16(q6scales.val[0]))), @@ -6538,9 +6633,9 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri for (int j = 0; j < QK_K/128; ++j) { - uint8x16x2_t qhbits = vld1q_u8_x2(qh); qh += 32; - uint8x16x4_t q6bits = vld1q_u8_x4(q6); q6 += 64; - int8x16x4_t q8bytes = vld1q_s8_x4(q8); q8 += 64; + ggml_uint8x16x2_t qhbits = ggml_vld1q_u8_x2(qh); qh += 32; + ggml_uint8x16x4_t q6bits = ggml_vld1q_u8_x4(q6); q6 += 64; + ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; q6h.val[0] = vshlq_n_u8(vandq_u8(mone, qhbits.val[0]), 4); q6h.val[1] = vshlq_n_u8(vandq_u8(mone, qhbits.val[1]), 4); @@ -6583,7 +6678,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri scale += 2; #endif - q8bytes = vld1q_s8_x4(q8); q8 += 64; + q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; shifted = vshrq_n_u8(qhbits.val[0], 4); q6h.val[0] = vshlq_n_u8(vandq_u8(mone, shifted), 4); @@ -6987,8 +7082,8 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t mone = vdupq_n_u8(3); - int8x16x4_t q6bytes; - uint8x16x4_t q6h; + ggml_int8x16x4_t q6bytes; + ggml_uint8x16x4_t q6h; for (int i = 0; i < nb; ++i) { @@ -7002,9 +7097,9 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri int32_t isum = 0; - uint8x16_t qhbits = vld1q_u8(qh); - uint8x16x2_t q6bits = vld1q_u8_x2(q6); - int8x16x4_t q8bytes = vld1q_s8_x4(q8); + uint8x16_t qhbits = vld1q_u8(qh); + ggml_uint8x16x2_t q6bits = ggml_vld1q_u8_x2(q6); + ggml_int8x16x4_t q8bytes = ggml_vld1q_s8_x4(q8); q6h.val[0] = vshlq_n_u8(vandq_u8(mone, qhbits), 4); uint8x16_t shifted = vshrq_n_u8(qhbits, 2); diff --git a/ggml.c b/ggml.c index da78e6de9..3202a517b 100644 --- a/ggml.c +++ b/ggml.c @@ -271,6 +271,12 @@ inline static void * ggml_aligned_malloc(size_t size) { // floating point type used to accumulate sums typedef double ggml_float; +#undef MIN +#undef MAX + +#define MIN(a, b) ((a) < (b) ? (a) : (b)) +#define MAX(a, b) ((a) > (b) ? (a) : (b)) + // // global data // @@ -604,6 +610,18 @@ ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type) { // simd mappings // +#if defined(__ARM_NEON) +#if !defined(__aarch64__) + +// 64-bit compatibility + +inline static float vaddvq_f32(float32x4_t v) { + return vgetq_lane_f32(v, 0) + vgetq_lane_f32(v, 1) + vgetq_lane_f32(v, 2) + vgetq_lane_f32(v, 3); +} + +#endif +#endif + // we define a common set of C macros which map to specific intrinsics based on the current architecture // we then implement the fundamental computation operations below using only these macros // adding support for new architectures requires to define the corresponding SIMD macros @@ -1616,13 +1634,8 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "ROPE_BACK", "ALIBI", "CLAMP", - "CONV_1D", - "CONV_1D_STAGE_0", - "CONV_1D_STAGE_1", "CONV_TRANSPOSE_1D", - "CONV_2D", - "CONV_2D_STAGE_0", - "CONV_2D_STAGE_1", + "IM2COL", "CONV_TRANSPOSE_2D", "POOL_1D", "POOL_2D", @@ -1653,7 +1666,7 @@ static const char * GGML_OP_NAME[GGML_OP_COUNT] = { "CROSS_ENTROPY_LOSS_BACK", }; -static_assert(GGML_OP_COUNT == 73, "GGML_OP_COUNT != 73"); +static_assert(GGML_OP_COUNT == 68, "GGML_OP_COUNT != 68"); static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "none", @@ -1703,13 +1716,8 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "rope_back(x)", "alibi(x)", "clamp(x)", - "conv_1d(x)", - "conv_1d_stage_0(x)", - "conv_1d_stage_1(x)", "conv_transpose_1d(x)", - "conv_2d(x)", - "conv_2d_stage_0(x)", - "conv_2d_stage_1(x)", + "im2col(x)", "conv_transpose_2d(x)", "pool_1d(x)", "pool_2d(x)", @@ -1740,7 +1748,7 @@ static const char * GGML_OP_SYMBOL[GGML_OP_COUNT] = { "cross_entropy_loss_back(x,y)", }; -static_assert(GGML_OP_COUNT == 73, "GGML_OP_COUNT != 73"); +static_assert(GGML_OP_COUNT == 68, "GGML_OP_COUNT != 68"); static_assert(GGML_OP_POOL_COUNT == 2, "GGML_OP_POOL_COUNT != 2"); @@ -1768,13 +1776,7 @@ static void ggml_setup_op_has_task_pass(void) { p[GGML_OP_GET_ROWS_BACK ] = true; p[GGML_OP_DIAG_MASK_INF ] = true; p[GGML_OP_DIAG_MASK_ZERO ] = true; - p[GGML_OP_CONV_1D ] = true; - p[GGML_OP_CONV_1D_STAGE_0 ] = true; - p[GGML_OP_CONV_1D_STAGE_1 ] = true; p[GGML_OP_CONV_TRANSPOSE_1D ] = true; - p[GGML_OP_CONV_2D ] = true; - p[GGML_OP_CONV_2D_STAGE_0 ] = true; - p[GGML_OP_CONV_2D_STAGE_1 ] = true; p[GGML_OP_CONV_TRANSPOSE_2D ] = true; p[GGML_OP_FLASH_ATTN_BACK ] = true; p[GGML_OP_CROSS_ENTROPY_LOSS ] = true; @@ -5128,82 +5130,6 @@ static int64_t ggml_calc_conv_output_size(int64_t ins, int64_t ks, int s, int p, return (ins + 2 * p - d * (ks - 1) - 1) / s + 1; } -// im2col: [N, IC, IL] => [N, OL, IC*K] -// a: [OC,IC, K] -// b: [N, IC, IL] -// result: [N, OL, IC*K] -static struct ggml_tensor * ggml_conv_1d_stage_0( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b, - int s0, - int p0, - int d0) { - GGML_ASSERT(a->ne[1] == b->ne[1]); - bool is_node = false; - - if (a->grad || b->grad) { - GGML_ASSERT(false); // TODO: implement backward - is_node = true; - } - - const int64_t OL = ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0); - - const int64_t ne[4] = { - a->ne[1] * a->ne[0], - OL, - b->ne[2], - 1, - }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 4, ne); - - int32_t params[] = { s0, p0, d0 }; - ggml_set_op_params(result, params, sizeof(params)); - - result->op = GGML_OP_CONV_1D_STAGE_0; - result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; - result->src[0] = a; - result->src[1] = b; - - return result; -} - -// ggml_conv_1d_stage_1 - -// gemm: [N, OC, OL] = [OC, IC * K] x [N*OL, IC * K] -// a: [OC, IC, K] -// b: [N, OL, IC * K] -// result: [N, OC, OL] -static struct ggml_tensor * ggml_conv_1d_stage_1( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b) { - - bool is_node = false; - - if (a->grad || b->grad) { - GGML_ASSERT(false); // TODO: implement backward - is_node = true; - } - - const int64_t ne[4] = { - b->ne[1], - a->ne[2], - b->ne[2], - 1, - }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); - - result->op = GGML_OP_CONV_1D_STAGE_1; - result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; - result->src[0] = a; - result->src[1] = b; - - return result; -} - -// ggml_conv_1d - GGML_API struct ggml_tensor * ggml_conv_1d( struct ggml_context * ctx, struct ggml_tensor * a, @@ -5211,44 +5137,18 @@ GGML_API struct ggml_tensor * ggml_conv_1d( int s0, int p0, int d0) { - struct ggml_tensor * result = ggml_conv_1d_stage_0(ctx, a, b, s0, p0, d0); - result = ggml_conv_1d_stage_1(ctx, a, result); + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, 0, p0, 0, d0, 0, false); // [N, OL, IC * K] + + struct ggml_tensor * result = + ggml_mul_mat(ctx, + ggml_reshape_2d(ctx, im2col, im2col->ne[0], (im2col->ne[2] * im2col->ne[1])), // [N, OL, IC * K] => [N*OL, IC * K] + ggml_reshape_2d(ctx, a, (a->ne[0] * a->ne[1]), a->ne[2])); // [OC,IC, K] => [OC, IC * K] + + result = ggml_reshape_3d(ctx, result, im2col->ne[1], a->ne[2], im2col->ne[2]); // [N, OC, OL] + return result; } -// GGML_API struct ggml_tensor * ggml_conv_1d( -// struct ggml_context * ctx, -// struct ggml_tensor * a, -// struct ggml_tensor * b, -// int s0, -// int p0, -// int d0) { -// GGML_ASSERT(ggml_is_matrix(b)); -// GGML_ASSERT(a->ne[1] == b->ne[1]); -// bool is_node = false; - -// if (a->grad || b->grad) { -// GGML_ASSERT(false); // TODO: implement backward -// is_node = true; -// } - -// const int64_t ne[4] = { -// ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0), -// a->ne[2], 1, 1, -// }; -// struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 2, ne); - -// int32_t params[] = { s0, p0, d0 }; -// ggml_set_op_params(result, params, sizeof(params)); - -// result->op = GGML_OP_CONV_1D; -// result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; -// result->src[0] = a; -// result->src[1] = b; - -// return result; -// } - // ggml_conv_1d_ph struct ggml_tensor* ggml_conv_1d_ph( @@ -5310,7 +5210,7 @@ GGML_API struct ggml_tensor * ggml_conv_transpose_1d( // a: [OC,IC, KH, KW] // b: [N, IC, IH, IW] // result: [N, OH, OW, IC*KH*KW] -static struct ggml_tensor * ggml_conv_2d_stage_0( +struct ggml_tensor * ggml_im2col( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, @@ -5319,9 +5219,14 @@ static struct ggml_tensor * ggml_conv_2d_stage_0( int p0, int p1, int d0, - int d1) { + int d1, + bool is_2D) { - GGML_ASSERT(a->ne[2] == b->ne[2]); + if(is_2D) { + GGML_ASSERT(a->ne[2] == b->ne[2]); + } else { + GGML_ASSERT(a->ne[1] == b->ne[1]); + } bool is_node = false; if (a->grad || b->grad) { @@ -5329,81 +5234,51 @@ static struct ggml_tensor * ggml_conv_2d_stage_0( is_node = true; } - const int64_t OH = ggml_calc_conv_output_size(b->ne[1], a->ne[1], s1, p1, d1); - const int64_t OW = ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0); + const int64_t OH = is_2D ? ggml_calc_conv_output_size(b->ne[1], a->ne[1], s1, p1, d1) : 0; + const int64_t OW = ggml_calc_conv_output_size(b->ne[0], a->ne[0], s0, p0, d0); const int64_t ne[4] = { - a->ne[2] * a->ne[1] * a->ne[0], + is_2D ? (a->ne[2] * a->ne[1] * a->ne[0]) : a->ne[1] * a->ne[0], OW, - OH, - b->ne[3], + is_2D ? OH : b->ne[2], + is_2D ? b->ne[3] : 1, }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 4, ne); - int32_t params[] = { s0, s1, p0, p1, d0, d1 }; + struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 4, ne); + int32_t params[] = { s0, s1, p0, p1, d0, d1, (is_2D ? 1 : 0) }; ggml_set_op_params(result, params, sizeof(params)); - result->op = GGML_OP_CONV_2D_STAGE_0; + result->op = GGML_OP_IM2COL; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; result->src[1] = b; return result; - -} - -// gemm: [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] -// a: [OC, IC, KH, KW] -// b: [N, OH, OW, IC * KH * KW] -// result: [N, OC, OH, OW] -static struct ggml_tensor * ggml_conv_2d_stage_1( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b) { - - bool is_node = false; - - if (a->grad || b->grad) { - GGML_ASSERT(false); // TODO: implement backward - is_node = true; - } - - const int64_t ne[4] = { - b->ne[1], - b->ne[2], - a->ne[3], - b->ne[3], - }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 4, ne); - - result->op = GGML_OP_CONV_2D_STAGE_1; - result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; - result->src[0] = a; - result->src[1] = b; - - return result; - } // a: [OC,IC, KH, KW] // b: [N, IC, IH, IW] // result: [N, OC, OH, OW] struct ggml_tensor * ggml_conv_2d( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b, - int s0, - int s1, - int p0, - int p1, - int d0, - int d1) { + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1) { + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, s1, p0, p1, d0, d1, true); // [N, OH, OW, IC * KH * KW] - struct ggml_tensor * result = ggml_conv_2d_stage_0(ctx, a, b, s0, s1, p0, p1, d0, d1); // [N, OH, OW, IC * KH * KW] - result = ggml_conv_2d_stage_1(ctx, a, result); + struct ggml_tensor * result = + ggml_mul_mat(ctx, + ggml_reshape_2d(ctx, im2col, im2col->ne[0], im2col->ne[3] * im2col->ne[2] * im2col->ne[1]), // [N, OH, OW, IC * KH * KW] => [N*OH*OW, IC * KH * KW] + ggml_reshape_2d(ctx, a, (a->ne[0] * a->ne[1] * a->ne[2]), a->ne[3])); // [OC,IC, KH, KW] => [OC, IC * KH * KW] + + result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], a->ne[3], im2col->ne[3]); // [N, OC, OH, OW] return result; - } // ggml_conv_2d_sk_p0 @@ -9498,6 +9373,8 @@ static bool ggml_compute_forward_mul_mat_use_blas( // TODO: find the optimal values for these if (ggml_is_contiguous(src0) && ggml_is_contiguous(src1) && + src0->type == GGML_TYPE_F32 && + src1->type == GGML_TYPE_F32 && (ne0 >= 32 && ne1 >= 32 && ne10 >= 32)) { /*printf("BLAS: %d %d %d %d %d\n", ne0, ne1, ne10, ne00, ne01);*/ @@ -9536,7 +9413,7 @@ static void ggml_compute_forward_mul_mat( // we don't support permuted src0 or src1 GGML_ASSERT(nb00 == ggml_type_size(type)); - GGML_ASSERT(nb10 == sizeof(float)); + GGML_ASSERT(nb10 == ggml_type_size(src1->type)); // dst cannot be transposed or permuted GGML_ASSERT(nb0 == sizeof(float)); @@ -11434,416 +11311,6 @@ static void ggml_compute_forward_rope_back( } } -// ggml_compute_forward_conv_1d - -static void ggml_compute_forward_conv_1d_f16_f32( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F32); - - int64_t t0 = ggml_perf_time_us(); - UNUSED(t0); - - GGML_TENSOR_BINARY_OP_LOCALS - - const int ith = params->ith; - const int nth = params->nth; - - const int nk = ne00; - - // size of the convolution row - the kernel size unrolled across all input channels - const int ew0 = nk*ne01; - - const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; - const int32_t p0 = ((const int32_t*)(dst->op_params))[1]; - const int32_t d0 = ((const int32_t*)(dst->op_params))[2]; - - GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb10 == sizeof(float)); - - if (params->type == GGML_TASK_INIT) { - memset(params->wdata, 0, params->wsize); - - ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; - - for (int64_t i11 = 0; i11 < ne11; i11++) { - const float * const src = (float *)((char *) src1->data + i11*nb11); - ggml_fp16_t * dst_data = wdata; - - for (int64_t i0 = 0; i0 < ne0; i0++) { - for (int64_t ik = 0; ik < nk; ik++) { - const int idx0 = i0*s0 + ik*d0 - p0; - - if(!(idx0 < 0 || idx0 >= ne10)) { - dst_data[i0*ew0 + i11*nk + ik] = GGML_FP32_TO_FP16(src[idx0]); - } - } - } - } - - return; - } - - if (params->type == GGML_TASK_FINALIZE) { - return; - } - - // total rows in dst - const int nr = ne2; - - // rows per thread - const int dr = (nr + nth - 1)/nth; - - // row range for this thread - const int ir0 = dr*ith; - const int ir1 = MIN(ir0 + dr, nr); - - ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; - - for (int i2 = 0; i2 < ne2; i2++) { - for (int i1 = ir0; i1 < ir1; i1++) { - float * dst_data = (float *)((char *) dst->data + i2*nb2 + i1*nb1); - - for (int i0 = 0; i0 < ne0; i0++) { - ggml_vec_dot_f16(ew0, dst_data + i0, - (ggml_fp16_t *) ((char *) src0->data + i1*nb02), - (ggml_fp16_t *) wdata + i2*nb2 + i0*ew0); - } - } - } -} - -static void ggml_compute_forward_conv_1d_f32( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F32); - - int64_t t0 = ggml_perf_time_us(); - UNUSED(t0); - - GGML_TENSOR_BINARY_OP_LOCALS - - const int ith = params->ith; - const int nth = params->nth; - - const int nk = ne00; - - const int ew0 = nk*ne01; - - const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; - const int32_t p0 = ((const int32_t*)(dst->op_params))[1]; - const int32_t d0 = ((const int32_t*)(dst->op_params))[2]; - - GGML_ASSERT(nb00 == sizeof(float)); - GGML_ASSERT(nb10 == sizeof(float)); - - if (params->type == GGML_TASK_INIT) { - memset(params->wdata, 0, params->wsize); - - float * const wdata = (float *) params->wdata + 0; - - for (int64_t i11 = 0; i11 < ne11; i11++) { - const float * const src = (float *)((char *) src1->data + i11*nb11); - float * dst_data = wdata; - - for (int64_t i0 = 0; i0 < ne0; i0++) { - for (int64_t ik = 0; ik < nk; ik++) { - const int idx0 = i0*s0 + ik*d0 - p0; - - if(!(idx0 < 0 || idx0 >= ne10)) { - dst_data[i0*ew0 + i11*nk + ik] = src[idx0]; - } - } - } - } - - return; - } - - if (params->type == GGML_TASK_FINALIZE) { - return; - } - - // total rows in dst - const int nr = ne02; - - // rows per thread - const int dr = (nr + nth - 1)/nth; - - // row range for this thread - const int ir0 = dr*ith; - const int ir1 = MIN(ir0 + dr, nr); - - float * const wdata = (float *) params->wdata + 0; - - for (int i2 = 0; i2 < ne2; i2++) { - for (int i1 = ir0; i1 < ir1; i1++) { - float * dst_data = (float *)((char *) dst->data + i2*nb2 + i1*nb1); - - for (int i0 = 0; i0 < ne0; i0++) { - ggml_vec_dot_f32(ew0, dst_data + i0, - (float *) ((char *) src0->data + i1*nb02), - (float *) wdata + i2*nb2 + i0*ew0); - } - } - } -} - -// TODO: reuse ggml_mul_mat or implement ggml_im2col and remove stage_0 and stage_1 -static void gemm_f16_out_f32(int64_t m, int64_t n, int64_t k, - ggml_fp16_t * A, - ggml_fp16_t * B, - float * C, - const int ith, const int nth) { - // does not seem to make a difference - int64_t m0, m1, n0, n1; - // patches per thread - if (m > n) { - n0 = 0; - n1 = n; - - // total patches in dst - const int np = m; - - // patches per thread - const int dp = (np + nth - 1)/nth; - - // patch range for this thread - m0 = dp*ith; - m1 = MIN(m0 + dp, np); - } else { - m0 = 0; - m1 = m; - - // total patches in dst - const int np = n; - - // patches per thread - const int dp = (np + nth - 1)/nth; - - // patch range for this thread - n0 = dp*ith; - n1 = MIN(n0 + dp, np); - } - - // block-tiling attempt - int64_t blck_n = 16; - int64_t blck_m = 16; - - // int64_t CACHE_SIZE = 2 * 1024 * 1024; // 2MB - // int64_t blck_size = CACHE_SIZE / (sizeof(float) + 2 * sizeof(ggml_fp16_t) * K); - // if (blck_size > 0) { - // blck_0 = 4; - // blck_1 = blck_size / blck_0; - // if (blck_1 < 0) { - // blck_1 = 1; - // } - // // blck_0 = (int64_t)sqrt(blck_size); - // // blck_1 = blck_0; - // } - // // printf("%zd %zd %zd %zd\n", blck_size, K, blck_0, blck_1); - - for (int j = n0; j < n1; j+=blck_n) { - for (int i = m0; i < m1; i+=blck_m) { - // printf("i j k => %d %d %d\n", i, j, K); - for (int ii = i; ii < i + blck_m && ii < m1; ii++) { - for (int jj = j; jj < j + blck_n && jj < n1; jj++) { - ggml_vec_dot_f16(k, - C + ii*n + jj, - A + ii * k, - B + jj * k); - } - } - } - } -} - -// src0: kernel [OC, IC, K] -// src1: signal [N, IC, IL] -// dst: result [N, OL, IC*K] -static void ggml_compute_forward_conv_1d_stage_0_f32( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); - - int64_t t0 = ggml_perf_time_us(); - UNUSED(t0); - - GGML_TENSOR_BINARY_OP_LOCALS; - - const int64_t N = ne12; - const int64_t IC = ne11; - const int64_t IL = ne10; - - const int64_t K = ne00; - - const int64_t OL = ne1; - - const int ith = params->ith; - const int nth = params->nth; - - const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; - const int32_t p0 = ((const int32_t*)(dst->op_params))[1]; - const int32_t d0 = ((const int32_t*)(dst->op_params))[2]; - - GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb10 == sizeof(float)); - - if (params->type == GGML_TASK_INIT) { - memset(dst->data, 0, ggml_nbytes(dst)); - return; - } - - if (params->type == GGML_TASK_FINALIZE) { - return; - } - - // im2col: [N, IC, IL] => [N, OL, IC*K] - { - ggml_fp16_t * const wdata = (ggml_fp16_t *) dst->data; - - for (int64_t in = 0; in < N; in++) { - for (int64_t iol = 0; iol < OL; iol++) { - for (int64_t iic = ith; iic < IC; iic+=nth) { - - // micro kernel - ggml_fp16_t * dst_data = wdata + (in*OL + iol)*(IC*K); // [IC, K] - const float * const src_data = (float *)((char *) src1->data + in*nb12 + iic*nb11); // [IL] - - for (int64_t ik = 0; ik < K; ik++) { - const int64_t iil = iol*s0 + ik*d0 - p0; - - if (!(iil < 0 || iil >= IL)) { - dst_data[iic*K + ik] = GGML_FP32_TO_FP16(src_data[iil]); - } - } - } - } - } - } -} - -// gemm: [N, OC, OL] = [OC, IC * K] x [N*OL, IC * K] -// src0: [OC, IC, K] -// src1: [N, OL, IC * K] -// result: [N, OC, OL] -static void ggml_compute_forward_conv_1d_stage_1_f16( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F16); - GGML_ASSERT( dst->type == GGML_TYPE_F32); - - int64_t t0 = ggml_perf_time_us(); - UNUSED(t0); - - if (params->type == GGML_TASK_INIT) { - return; - } - - if (params->type == GGML_TASK_FINALIZE) { - return; - } - - GGML_TENSOR_BINARY_OP_LOCALS; - - GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb10 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb0 == sizeof(float)); - - const int N = ne12; - const int OL = ne11; - - const int OC = ne02; - const int IC = ne01; - const int K = ne00; - - const int ith = params->ith; - const int nth = params->nth; - - int64_t m = OC; - int64_t n = OL; - int64_t k = IC * K; - - // [N, OC, OL] = [OC, IC * K] x [N*OL, IC * K] - for (int i = 0; i < N; i++) { - ggml_fp16_t * A = (ggml_fp16_t *)src0->data; // [m, k] - ggml_fp16_t * B = (ggml_fp16_t *)src1->data + i * m * k; // [n, k] - float * C = (float *)dst->data + i * m * n; // [m, n] - - gemm_f16_out_f32(m, n, k, A, B, C, ith, nth); - } -} - -static void ggml_compute_forward_conv_1d( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - switch(src0->type) { - case GGML_TYPE_F16: - { - ggml_compute_forward_conv_1d_f16_f32(params, src0, src1, dst); - } break; - case GGML_TYPE_F32: - { - ggml_compute_forward_conv_1d_f32(params, src0, src1, dst); - } break; - default: - { - GGML_ASSERT(false); - } break; - } -} - -static void ggml_compute_forward_conv_1d_stage_0( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - switch(src0->type) { - case GGML_TYPE_F16: - { - ggml_compute_forward_conv_1d_stage_0_f32(params, src0, src1, dst); - } break; - default: - { - GGML_ASSERT(false); - } break; - } -} - -static void ggml_compute_forward_conv_1d_stage_1( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - switch(src0->type) { - case GGML_TYPE_F16: - { - ggml_compute_forward_conv_1d_stage_1_f16(params, src0, src1, dst); - } break; - default: - { - GGML_ASSERT(false); - } break; - } -} - // ggml_compute_forward_conv_transpose_1d static void ggml_compute_forward_conv_transpose_1d_f16_f32( @@ -12055,12 +11522,10 @@ static void ggml_compute_forward_conv_transpose_1d( } } -// ggml_compute_forward_conv_2d - // src0: kernel [OC, IC, KH, KW] // src1: image [N, IC, IH, IW] // dst: result [N, OH, OW, IC*KH*KW] -static void ggml_compute_forward_conv_2d_stage_0_f32( +static void ggml_compute_forward_im2col_f16( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, @@ -12074,34 +11539,35 @@ static void ggml_compute_forward_conv_2d_stage_0_f32( GGML_TENSOR_BINARY_OP_LOCALS; - const int64_t N = ne13; - const int64_t IC = ne12; - const int64_t IH = ne11; - const int64_t IW = ne10; - - // const int64_t OC = ne03; - // const int64_t IC = ne02; - const int64_t KH = ne01; - const int64_t KW = ne00; - - const int64_t OH = ne2; - const int64_t OW = ne1; + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; const int ith = params->ith; const int nth = params->nth; - const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; - const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; - const int32_t p0 = ((const int32_t*)(dst->op_params))[2]; - const int32_t p1 = ((const int32_t*)(dst->op_params))[3]; - const int32_t d0 = ((const int32_t*)(dst->op_params))[4]; - const int32_t d1 = ((const int32_t*)(dst->op_params))[5]; + const int64_t N = is_2D ? ne13 : ne12; + const int64_t IC = is_2D ? ne12 : ne11; + const int64_t IH = is_2D ? ne11 : 1; + const int64_t IW = ne10; + + const int64_t KH = is_2D ? ne01 : 1; + const int64_t KW = ne00; + + const int64_t OH = is_2D ? ne2 : 1; + const int64_t OW = ne1; + + int ofs0 = is_2D ? nb13 : nb12; + int ofs1 = is_2D ? nb12 : nb11; GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); if (params->type == GGML_TASK_INIT) { - memset(dst->data, 0, ggml_nbytes(dst)); return; } @@ -12114,20 +11580,22 @@ static void ggml_compute_forward_conv_2d_stage_0_f32( ggml_fp16_t * const wdata = (ggml_fp16_t *) dst->data; for (int64_t in = 0; in < N; in++) { - for (int64_t ioh = 0; ioh < OH; ioh++) { + for (int64_t ioh = 0; ioh < OH; ioh++) { // 1 for (int64_t iow = 0; iow < OW; iow++) { - for (int64_t iic = ith; iic < IC; iic+=nth) { + for (int64_t iic = ith; iic < IC; iic += nth) { // micro kernel ggml_fp16_t * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] - const float * const src_data = (float *)((char *) src1->data + in*nb13 + iic*nb12); // [IH, IW] + const float * const src_data = (float *)((char *) src1->data + in*ofs0 + iic*ofs1); // [IH, IW] - for (int64_t ikh = 0; ikh < KH; ikh++) { + for (int64_t ikh = 0; ikh < KH; ikh++) { // 1 for (int64_t ikw = 0; ikw < KW; ikw++) { const int64_t iiw = iow*s0 + ikw*d0 - p0; const int64_t iih = ioh*s1 + ikh*d1 - p1; - if (!(iih < 0 || iih >= IH || iiw < 0 || iiw >= IW)) { + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = 0; + } else { dst_data[iic*(KH*KW) + ikh*KW + ikw] = GGML_FP32_TO_FP16(src_data[iih*IW + iiw]); } } @@ -12139,180 +11607,7 @@ static void ggml_compute_forward_conv_2d_stage_0_f32( } } -// gemm: [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] -// src0: [OC, IC, KH, KW] -// src1: [N, OH, OW, IC * KH * KW] -// result: [N, OC, OH, OW] -static void ggml_compute_forward_conv_2d_stage_1_f16( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F16); - GGML_ASSERT( dst->type == GGML_TYPE_F32); - - int64_t t0 = ggml_perf_time_us(); - UNUSED(t0); - - if (params->type == GGML_TASK_INIT) { - return; - } - - if (params->type == GGML_TASK_FINALIZE) { - return; - } - - GGML_TENSOR_BINARY_OP_LOCALS; - - GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb10 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb0 == sizeof(float)); - - const int N = ne13; - const int OH = ne12; - const int OW = ne11; - - const int OC = ne03; - const int IC = ne02; - const int KH = ne01; - const int KW = ne00; - - const int ith = params->ith; - const int nth = params->nth; - - int64_t m = OC; - int64_t n = OH * OW; - int64_t k = IC * KH * KW; - - // [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] - for (int i = 0; i < N; i++) { - ggml_fp16_t * A = (ggml_fp16_t *)src0->data; // [m, k] - ggml_fp16_t * B = (ggml_fp16_t *)src1->data + i * m * k; // [n, k] - float * C = (float *)dst->data + i * m * n; // [m, n] - - gemm_f16_out_f32(m, n, k, A, B, C, ith, nth); - } -} - -static void ggml_compute_forward_conv_2d_f16_f32( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F32); - - int64_t t0 = ggml_perf_time_us(); - UNUSED(t0); - - GGML_TENSOR_BINARY_OP_LOCALS - - // src1: image [N, IC, IH, IW] - // src0: kernel [OC, IC, KH, KW] - // dst: result [N, OC, OH, OW] - // ne12: IC - // ne0: OW - // ne1: OH - // nk0: KW - // nk1: KH - // ne13: N - - const int N = ne13; - const int IC = ne12; - const int IH = ne11; - const int IW = ne10; - - const int OC = ne03; - // const int IC = ne02; - const int KH = ne01; - const int KW = ne00; - - const int OH = ne1; - const int OW = ne0; - - const int ith = params->ith; - const int nth = params->nth; - - // const int nk0 = ne00; - // const int nk1 = ne01; - - // size of the convolution row - the kernel size unrolled across all channels - // const int ew0 = nk0*nk1*ne02; - // ew0: IC*KH*KW - - const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; - const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; - const int32_t p0 = ((const int32_t*)(dst->op_params))[2]; - const int32_t p1 = ((const int32_t*)(dst->op_params))[3]; - const int32_t d0 = ((const int32_t*)(dst->op_params))[4]; - const int32_t d1 = ((const int32_t*)(dst->op_params))[5]; - - GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); - GGML_ASSERT(nb10 == sizeof(float)); - - if (params->type == GGML_TASK_INIT) { - memset(params->wdata, 0, params->wsize); - - // prepare source data (src1) - // im2col: [N, IC, IH, IW] => [N*OH*OW, IC*KH*KW] - - { - ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; - - for (int in = 0; in < N; in++) { - for (int iic = 0; iic < IC; iic++) { - for (int ioh = 0; ioh < OH; ioh++) { - for (int iow = 0; iow < OW; iow++) { - - // micro kernel - ggml_fp16_t * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] - const float * const src_data = (float *)((char *) src1->data + in*nb13 + iic*nb12); // [IH, IW] - - for (int ikh = 0; ikh < KH; ikh++) { - for (int ikw = 0; ikw < KW; ikw++) { - const int iiw = iow*s0 + ikw*d0 - p0; - const int iih = ioh*s1 + ikh*d1 - p1; - - if (!(iih < 0 || iih >= IH || iiw < 0 || iiw >= IW)) { - dst_data[iic*(KH*KW) + ikh*KW + ikw] = GGML_FP32_TO_FP16(src_data[iih*IW + iiw]); - } - } - } - } - } - } - } - } - - return; - } - - if (params->type == GGML_TASK_FINALIZE) { - return; - } - - ggml_fp16_t * const wdata = (ggml_fp16_t *) params->wdata + 0; - // wdata: [N*OH*OW, IC*KH*KW] - // dst: result [N, OC, OH, OW] - // src0: kernel [OC, IC, KH, KW] - - int64_t m = OC; - int64_t n = OH * OW; - int64_t k = IC * KH * KW; - - // [N, OC, OH, OW] = [OC, IC * KH * KW] x [N*OH*OW, IC * KH * KW] - for (int i = 0; i < N; i++) { - ggml_fp16_t * A = (ggml_fp16_t *)src0->data; // [m, k] - ggml_fp16_t * B = (ggml_fp16_t *)wdata + i * m * k; // [n, k] - float * C = (float *)dst->data + i * m * n; // [m * k] - - gemm_f16_out_f32(m, n, k, A, B, C, ith, nth); - } -} - -static void ggml_compute_forward_conv_2d( +static void ggml_compute_forward_im2col( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, @@ -12320,50 +11615,7 @@ static void ggml_compute_forward_conv_2d( switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_conv_2d_f16_f32(params, src0, src1, dst); - } break; - case GGML_TYPE_F32: - { - //ggml_compute_forward_conv_2d_f32(params, src0, src1, dst); - GGML_ASSERT(false); - } break; - default: - { - GGML_ASSERT(false); - } break; - } -} - -static void ggml_compute_forward_conv_2d_stage_0( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - switch (src0->type) { - case GGML_TYPE_F16: - { - ggml_compute_forward_conv_2d_stage_0_f32(params, src0, src1, dst); - } break; - case GGML_TYPE_F32: - { - GGML_ASSERT(false); - } break; - default: - { - GGML_ASSERT(false); - } break; - } -} - -static void ggml_compute_forward_conv_2d_stage_1( - const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { - switch (src0->type) { - case GGML_TYPE_F16: - { - ggml_compute_forward_conv_2d_stage_1_f16(params, src0, src1, dst); + ggml_compute_forward_im2col_f16(params, src0, src1, dst); } break; case GGML_TYPE_F32: { @@ -14580,33 +13832,13 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm { ggml_compute_forward_clamp(params, tensor->src[0], tensor); } break; - case GGML_OP_CONV_1D: - { - ggml_compute_forward_conv_1d(params, tensor->src[0], tensor->src[1], tensor); - } break; - case GGML_OP_CONV_1D_STAGE_0: - { - ggml_compute_forward_conv_1d_stage_0(params, tensor->src[0], tensor->src[1], tensor); - } break; - case GGML_OP_CONV_1D_STAGE_1: - { - ggml_compute_forward_conv_1d_stage_1(params, tensor->src[0], tensor->src[1], tensor); - } break; case GGML_OP_CONV_TRANSPOSE_1D: { ggml_compute_forward_conv_transpose_1d(params, tensor->src[0], tensor->src[1], tensor); } break; - case GGML_OP_CONV_2D: + case GGML_OP_IM2COL: { - ggml_compute_forward_conv_2d(params, tensor->src[0], tensor->src[1], tensor); - } break; - case GGML_OP_CONV_2D_STAGE_0: - { - ggml_compute_forward_conv_2d_stage_0(params, tensor->src[0], tensor->src[1], tensor); - } break; - case GGML_OP_CONV_2D_STAGE_1: - { - ggml_compute_forward_conv_2d_stage_1(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_im2col(params, tensor->src[0], tensor->src[1], tensor); } break; case GGML_OP_CONV_TRANSPOSE_2D: { @@ -15588,31 +14820,11 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { GGML_ASSERT(false); // TODO: not implemented } break; - case GGML_OP_CONV_1D: - { - GGML_ASSERT(false); // TODO: not implemented - } break; - case GGML_OP_CONV_1D_STAGE_0: - { - GGML_ASSERT(false); // TODO: not implemented - } break; - case GGML_OP_CONV_1D_STAGE_1: - { - GGML_ASSERT(false); // TODO: not implemented - } break; case GGML_OP_CONV_TRANSPOSE_1D: { GGML_ASSERT(false); // TODO: not implemented } break; - case GGML_OP_CONV_2D: - { - GGML_ASSERT(false); // TODO: not implemented - } break; - case GGML_OP_CONV_2D_STAGE_0: - { - GGML_ASSERT(false); // TODO: not implemented - } break; - case GGML_OP_CONV_2D_STAGE_1: + case GGML_OP_IM2COL: { GGML_ASSERT(false); // TODO: not implemented } break; @@ -16341,31 +15553,11 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { { n_tasks = 1; //TODO } break; - case GGML_OP_CONV_1D: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONV_1D_STAGE_0: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONV_1D_STAGE_1: - { - n_tasks = n_threads; - } break; case GGML_OP_CONV_TRANSPOSE_1D: { n_tasks = n_threads; } break; - case GGML_OP_CONV_2D: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONV_2D_STAGE_0: - { - n_tasks = n_threads; - } break; - case GGML_OP_CONV_2D_STAGE_1: + case GGML_OP_IM2COL: { n_tasks = n_threads; } break; @@ -16450,6 +15642,7 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { } break; default: { + printf("%s: op %s not implemented\n", __func__, ggml_op_name(node->op)); GGML_ASSERT(false); } break; } @@ -16652,38 +15845,6 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { cur = ggml_type_size(GGML_TYPE_F32) * node->src[0]->ne[0] * n_tasks; } } break; - case GGML_OP_CONV_1D: - { - GGML_ASSERT(node->src[0]->ne[3] == 1); - GGML_ASSERT(node->src[1]->ne[2] == 1); - GGML_ASSERT(node->src[1]->ne[3] == 1); - - const int64_t ne00 = node->src[0]->ne[0]; - const int64_t ne01 = node->src[0]->ne[1]; - const int64_t ne02 = node->src[0]->ne[2]; - - const int64_t ne10 = node->src[1]->ne[0]; - const int64_t ne11 = node->src[1]->ne[1]; - - const int64_t ne0 = node->ne[0]; - const int64_t ne1 = node->ne[1]; - const int64_t nk = ne00; - const int64_t ew0 = nk * ne01; - - UNUSED(ne02); - UNUSED(ne10); - UNUSED(ne11); - - if (node->src[0]->type == GGML_TYPE_F16 && - node->src[1]->type == GGML_TYPE_F32) { - cur = sizeof(ggml_fp16_t)*(ne0*ne1*ew0); - } else if (node->src[0]->type == GGML_TYPE_F32 && - node->src[1]->type == GGML_TYPE_F32) { - cur = sizeof(float)*(ne0*ne1*ew0); - } else { - GGML_ASSERT(false); - } - } break; case GGML_OP_CONV_TRANSPOSE_1D: { GGML_ASSERT(node->src[0]->ne[3] == 1); @@ -16709,37 +15870,9 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { GGML_ASSERT(false); } } break; - case GGML_OP_CONV_2D: + case GGML_OP_IM2COL: { - const int64_t ne00 = node->src[0]->ne[0]; // W - const int64_t ne01 = node->src[0]->ne[1]; // H - const int64_t ne02 = node->src[0]->ne[2]; // C - const int64_t ne03 = node->src[0]->ne[3]; // N - - const int64_t ne10 = node->src[1]->ne[0]; // W - const int64_t ne11 = node->src[1]->ne[1]; // H - const int64_t ne12 = node->src[1]->ne[2]; // C - - const int64_t ne0 = node->ne[0]; - const int64_t ne1 = node->ne[1]; - const int64_t ne2 = node->ne[2]; - const int64_t ne3 = node->ne[3]; - const int64_t nk = ne00*ne01; - const int64_t ew0 = nk * ne02; - - UNUSED(ne03); - UNUSED(ne2); - - if (node->src[0]->type == GGML_TYPE_F16 && - node->src[1]->type == GGML_TYPE_F32) { - // im2col: [N*OH*OW, IC*KH*KW] - cur = sizeof(ggml_fp16_t)*(ne3*ne0*ne1*ew0); - } else if (node->src[0]->type == GGML_TYPE_F32 && - node->src[1]->type == GGML_TYPE_F32) { - cur = sizeof(float)* (ne10*ne11*ne12); - } else { - GGML_ASSERT(false); - } + n_tasks = n_threads; } break; case GGML_OP_CONV_TRANSPOSE_2D: { diff --git a/ggml.h b/ggml.h index 0118c99db..8e6b64606 100644 --- a/ggml.h +++ b/ggml.h @@ -403,13 +403,8 @@ extern "C" { GGML_OP_ROPE_BACK, GGML_OP_ALIBI, GGML_OP_CLAMP, - GGML_OP_CONV_1D, - GGML_OP_CONV_1D_STAGE_0, // internal - GGML_OP_CONV_1D_STAGE_1, // internal GGML_OP_CONV_TRANSPOSE_1D, - GGML_OP_CONV_2D, - GGML_OP_CONV_2D_STAGE_0, // internal - GGML_OP_CONV_2D_STAGE_1, // internal + GGML_OP_IM2COL, GGML_OP_CONV_TRANSPOSE_2D, GGML_OP_POOL_1D, GGML_OP_POOL_2D, @@ -1403,6 +1398,18 @@ extern "C" { float min, float max); + GGML_API struct ggml_tensor * ggml_im2col( + struct ggml_context * ctx, + struct ggml_tensor * a, + struct ggml_tensor * b, + int s0, + int s1, + int p0, + int p1, + int d0, + int d1, + bool is_2D); + GGML_API struct ggml_tensor * ggml_conv_1d( struct ggml_context * ctx, struct ggml_tensor * a, From bd90eca237b498dd106d315dcb9ad3e6fae3906f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=2E=20Yusuf=20Sar=C4=B1g=C3=B6z?= Date: Mon, 13 Nov 2023 18:20:52 +0300 Subject: [PATCH 113/859] llava : fix regression for square images in #3613 (#4056) --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index c26ee4957..fc0656c23 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -761,7 +761,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip temp->ny = img->ny; temp->size = img->size; temp->data = new uint8_t[temp->size](); - *temp->data = *img->data; // copy + memcpy(&temp->data[0], &img->data[0], temp->size); // copy } const int nx = temp->nx; From b46d12f86d56bef3dc8b596dfb3d22f3b08102be Mon Sep 17 00:00:00 2001 From: afrideva <95653597+afrideva@users.noreply.github.com> Date: Mon, 13 Nov 2023 17:03:40 -0800 Subject: [PATCH 114/859] convert.py: also look for plain model.safetensors (#4043) * add safetensors to convert.py help message * Check for single-file safetensors model * Update convert.py "model" option help message * revert convert.py help message change --- convert.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/convert.py b/convert.py index a4b87e088..3d6216f1d 100755 --- a/convert.py +++ b/convert.py @@ -1036,7 +1036,8 @@ def load_some_model(path: Path) -> ModelPlus: # Be extra-friendly and accept either a file or a directory: if path.is_dir(): # Check if it's a set of safetensors files first - files = list(path.glob("model-00001-of-*.safetensors")) + globs = ["model-00001-of-*.safetensors", "model.safetensors"] + files = [file for glob in globs for file in path.glob(glob)] if not files: # Try the PyTorch patterns too, with lower priority globs = ["consolidated.00.pth", "pytorch_model-00001-of-*.bin", "*.pt", "pytorch_model.bin"] @@ -1123,7 +1124,7 @@ def main(args_in: list[str] | None = None) -> None: parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") + parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin, *.safetensors)") parser.add_argument("--vocabtype", choices=["spm", "bpe"], help="vocab format (default: spm)", default="spm") parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default = DEFAULT_CONCURRENCY) From 36eed0c42c5b0bf74af81fb9243d262014f9382f Mon Sep 17 00:00:00 2001 From: Galunid Date: Tue, 14 Nov 2023 11:17:12 +0100 Subject: [PATCH 115/859] stablelm : StableLM support (#3586) * Add support for stablelm-3b-4e1t * Supports GPU offloading of (n-1) layers --- README.md | 1 + convert-hf-to-gguf.py | 30 ++- gguf-py/gguf/constants.py | 17 ++ llama.cpp | 284 +++++++++++++++++++++++- models/ggml-vocab-stablelm-3b-4e1t.gguf | Bin 0 -> 1768581 bytes tests/CMakeLists.txt | 2 + 6 files changed, 322 insertions(+), 12 deletions(-) create mode 100644 models/ggml-vocab-stablelm-3b-4e1t.gguf diff --git a/README.md b/README.md index c7d232778..4de064765 100644 --- a/README.md +++ b/README.md @@ -93,6 +93,7 @@ as the main playground for developing new features for the [ggml](https://github - [X] [Persimmon 8B](https://github.com/ggerganov/llama.cpp/pull/3410) - [X] [MPT](https://github.com/ggerganov/llama.cpp/pull/3417) - [X] [Bloom](https://github.com/ggerganov/llama.cpp/pull/3553) +- [X] [StableLM-3b-4e1t](https://github.com/ggerganov/llama.cpp/pull/3586) **Bindings:** diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index f7fe29fd4..e7db75912 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -150,8 +150,6 @@ class Model: @staticmethod def from_model_architecture(model_architecture): - if model_architecture == "StableLMEpochForCausalLM": - return StableLMModel if model_architecture == "GPTNeoXForCausalLM": return GPTNeoXModel if model_architecture == "BloomForCausalLM": @@ -168,6 +166,8 @@ class Model: return RefactModel if model_architecture == "PersimmonForCausalLM": return PersimmonModel + if model_architecture in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): + return StableLMModel return Model def _is_model_safetensors(self) -> bool: @@ -201,6 +201,8 @@ class Model: return gguf.MODEL_ARCH.REFACT if arch == "PersimmonForCausalLM": return gguf.MODEL_ARCH.PERSIMMON + if arch in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): + return gguf.MODEL_ARCH.STABLELM raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -294,15 +296,6 @@ class Model: special_vocab.add_to_gguf(self.gguf_writer) -class StableLMModel(Model): - def set_gguf_parameters(self): - super().set_gguf_parameters() - self.gguf_writer.add_rope_dimension_count( - int(self.hparams["rope_pct"] * (self.hparams["hidden_size"] // self.hparams["num_attention_heads"])), - ) - self.gguf_writer.add_layer_norm_eps(1e-5) - - class GPTNeoXModel(Model): def set_gguf_parameters(self): block_count = self.hparams["num_hidden_layers"] @@ -824,6 +817,21 @@ class PersimmonModel(Model): self.gguf_writer.add_tensor(new_name, data) +class StableLMModel(Model): + def set_gguf_parameters(self): + hparams = self.hparams + block_count = hparams["num_hidden_layers"] + + self.gguf_writer.add_name(dir_model.name) + self.gguf_writer.add_context_length(hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(hparams["hidden_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) + self.gguf_writer.add_rope_dimension_count(int(hparams["rope_pct"]*(hparams["hidden_size"] // hparams["num_attention_heads"]))) + self.gguf_writer.add_head_count(hparams["num_attention_heads"]) + self.gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) + self.gguf_writer.add_layer_norm_eps(1e-5) + ###### CONVERSION LOGIC ###### def parse_args() -> argparse.Namespace: diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index bf1ccf669..7f63361bd 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -90,6 +90,7 @@ class MODEL_ARCH(IntEnum): REFACT = auto() BERT = auto() BLOOM = auto() + STABLELM = auto() class MODEL_TENSOR(IntEnum): @@ -129,6 +130,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.REFACT: "refact", MODEL_ARCH.BERT: "bert", MODEL_ARCH.BLOOM: "bloom", + MODEL_ARCH.STABLELM: "stablelm", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -299,6 +301,21 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.STABLELM: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], MODEL_ARCH.GPT2: [ # TODO ], diff --git a/llama.cpp b/llama.cpp index 76ee4ea23..01522fdb4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -192,6 +192,7 @@ enum llm_arch { LLM_ARCH_PERSIMMON, LLM_ARCH_REFACT, LLM_ARCH_BLOOM, + LLM_ARCH_STABLELM, LLM_ARCH_UNKNOWN, }; @@ -207,6 +208,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_PERSIMMON, "persimmon" }, { LLM_ARCH_REFACT, "refact" }, { LLM_ARCH_BLOOM, "bloom" }, + { LLM_ARCH_STABLELM, "stablelm" }, }; enum llm_kv { @@ -495,6 +497,25 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, }, }, + { + LLM_ARCH_STABLELM, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, + { LLM_ARCH_UNKNOWN, { @@ -2216,6 +2237,16 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_STABLELM: + { + GGUF_GET_KEY(ctx, hparams.f_norm_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, kv(LLM_KV_ATTENTION_LAYERNORM_EPS)); + + switch (hparams.n_layer) { + case 32: model.type = e_model::MODEL_3B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; + default: (void)0; } @@ -3087,6 +3118,81 @@ static void llm_load_tensors( } } } break; + case LLM_ARCH_STABLELM: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + // norm is not performance relevant on its own but keeping it in VRAM reduces data copying + // on Windows however this is detrimental unless everything is on the GPU +#ifndef _WIN32 + backend_norm = llama_backend_offload; +#else + backend_norm = n_gpu_layers <= (int) n_layer + 2 ? GGML_BACKEND_CPU : llama_backend_offload; +#endif // _WIN32 + + backend_output = llama_backend_offload_split; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + + if (backend_norm == GGML_BACKEND_GPU) { + vram_weights += ggml_nbytes(model.output_norm); + } + if (backend_output == GGML_BACKEND_GPU_SPLIT) { + vram_weights += ggml_nbytes(model.output); + } + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + /* + llama_model_loader: - tensor 4: blk.0.attn_output.weight f16 [ 2560, 2560, 1, 1 ] + */ + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + + layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); + layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); + layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + + layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + + if (backend == GGML_BACKEND_GPU) { + vram_weights += + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + + ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + + ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); + } + } + } break; + default: throw std::runtime_error("unknown architecture"); } @@ -4565,6 +4671,177 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_stablelm() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, hparams.n_rot, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * tmpq = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(tmpq, "tmpq", il); + + struct ggml_tensor * tmpk = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(tmpk, "tmpk", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + // RoPE the first n_rot of q/k, pass the other half, and concat. + struct ggml_tensor * qrot = ggml_cont(ctx0, ggml_view_3d( + ctx0, tmpq, hparams.n_rot, n_head, n_tokens, + ggml_element_size(tmpq) * n_embd_head, + ggml_element_size(tmpq) * n_embd_head * n_head, + 0 + )); + cb(qrot, "qrot", il); + + struct ggml_tensor * krot = ggml_cont(ctx0, ggml_view_3d( + ctx0, tmpk, hparams.n_rot, n_head, n_tokens, + ggml_element_size(tmpk) * n_embd_head, + ggml_element_size(tmpk) * n_embd_head * n_head_kv, + 0 + )); + cb(krot, "krot", il); + + // get the second half of tmpq, e.g tmpq[n_rot:, :, :] + struct ggml_tensor * qpass = ggml_view_3d( + ctx0, tmpq, (n_embd_head - hparams.n_rot), n_head, n_tokens, + ggml_element_size(tmpq) * n_embd_head, + ggml_element_size(tmpq) * n_embd_head * n_head, + ggml_element_size(tmpq) * hparams.n_rot + ); + cb(qpass, "qpass", il); + + struct ggml_tensor * kpass = ggml_view_3d( + ctx0, tmpk, (n_embd_head - hparams.n_rot), n_head_kv, n_tokens, + ggml_element_size(tmpk) * (n_embd_head), + ggml_element_size(tmpk) * (n_embd_head) * n_head_kv, + ggml_element_size(tmpk) * hparams.n_rot + ); + cb(kpass, "kpass", il); + + struct ggml_tensor * qrotated = ggml_rope_custom( + ctx0, qrot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(qrotated, "qrotated", il); + + struct ggml_tensor * krotated = ggml_rope_custom( + ctx0, krot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(krotated, "krotated", il); + + // ggml currently only supports concatenation on dim=2 + // so we need to permute qrot, qpass, concat, then permute back. + qrotated = ggml_cont(ctx0, ggml_permute(ctx0, qrotated, 2, 1, 0, 3)); + cb(qrotated, "qrotated", il); + + krotated = ggml_cont(ctx0, ggml_permute(ctx0, krotated, 2, 1, 0, 3)); + cb(krotated, "krotated", il); + + qpass = ggml_cont(ctx0, ggml_permute(ctx0, qpass, 2, 1, 0, 3)); + cb(qpass, "qpass", il); + + kpass = ggml_cont(ctx0, ggml_permute(ctx0, kpass, 2, 1, 0, 3)); + cb(kpass, "kpass", il); + + struct ggml_tensor * Qcur = ggml_concat(ctx0, qrotated, qpass); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_concat(ctx0, krotated, kpass); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Q = ggml_cont(ctx0, ggml_permute(ctx0, Qcur, 2, 1, 0, 3)); + cb(Q, "Q", il); + + Kcur = ggml_cont(ctx0, ggml_permute(ctx0, Kcur, 2, 1, 0, 3)); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, hparams, kv_self, + model.layers[il].wo, NULL, + Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; // @@ -5034,6 +5311,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_mpt(); } break; + case LLM_ARCH_STABLELM: + { + result = llm.build_stablelm(); + } break; default: GGML_ASSERT(false); } @@ -5209,7 +5490,8 @@ static int llama_decode_internal( model.arch == LLM_ARCH_FALCON || model.arch == LLM_ARCH_REFACT || model.arch == LLM_ARCH_MPT || - model.arch == LLM_ARCH_STARCODER; + model.arch == LLM_ARCH_STARCODER || + model.arch == LLM_ARCH_STABLELM; const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 3; if (ggml_cpu_has_cublas() && full_offload_supported && fully_offloaded) { diff --git a/models/ggml-vocab-stablelm-3b-4e1t.gguf b/models/ggml-vocab-stablelm-3b-4e1t.gguf new file mode 100644 index 0000000000000000000000000000000000000000..ebb0cdb7d6a4ac313f45758010a6fda6ac530443 GIT binary patch literal 1768581 zcmd?S`IlVRapx(|zV`IYnYE{u+BB*FU1ae|XgVPmlh)TaW8$H9T8Q`+Lp2?#~y~`rqL<``7+v^Qt$jhok>s z_3}x__ixzGy!&50`&d2d z)q_DZ-pvpGdms6T&o}z(4cq>shy8Xjo?B->$-n>j+0J(Adhl?ko$goDLGJIr{>VrE zn`fV$wv+nopc&QUS<{Zyu?_q~4_EVfJ)ZO2*}b|N=;5E_;eYaM&x=|8a8dze;P`M_ z&zixa8vffn^%Fn!?4#;;SRK^UhvRlSdRR|p&9EK+TRi+bKREG{Lt@@Os>jXy_4MrS z?r3;+)DG&Q?OFjX|J|L;FZ^dm13dF=`8fM``O#kZ`EK>pe|Yrpr@v5-2knmS))!v= z_v6b+Rrcizul~f*S0DApF>id#8^^uzQ{MQvH%@rtq&H4^?z3?TuHw5zzQ)|K>B^*zm?{-q`fU z6>nVi#x-wT_r~kqxZ#aAyzyCY-1NpRZ`}6A9dF$A#?N@;o;S928+Z$<03qxCz{`n_oVezg7|THlG*ccb-((fXrk{c*JZ zBwBwOtv`#_pGWI2qV>IK{bjWNDq7!<)?a(;@JrG9au*1Nx!@#h^z_7!>u*1Nx!@#h^z_7!>u*1Nx z!{3b=1g;$ht{n!h9R{u)2Cf|jt{n!hbwBU@b^k0{fe~F`L>Cy*1x9p%5nW(J7Z}k6 zMs$G@U0_5P7|{hrbb%3FU_=)f(FI0yfe~F`L>Cy*1x9p%5nW(J7Z}k6Ms$G@U0_5P z7|{hrbb%3FU_=)f(FI0yfe~F`L>Cy*1x9p%5nW(J7Z}k6Ms$G@U0_5P7|{hrbb%3F zU_=)f(FI0yfe~F`L??_0>eB`F>4N%nL4CTQK3!0sE~rlz)TayT(*^bEg8KX*@b3qK ze?JKP`@xr80PB|B(narXIajlCtlg`<)r`G0jqlpA3*pw2{qt`g@d|p=tM$}htw$k)$8%LoyBey)iJCiV_5_zI=7gZHuiG6Tp_=Mj zfu>!kw}F}?Zz7L-2Tflc%v=9<7_fcrIsdMj398k2;QEY%-2dy3PZJDRS`2$)`{9#7 zFYssk(W=!NYGTvcU+R`I^I1IF#=^Q~f^Mv#8Tvp?Y)u<)H>3DXXJ<1lo1W4RyZhy>!c2z~XT_RtNrUGvD*E7XgWW+Qsw! za}%gMOFtiIj)qtIw4Zuz67KKSS~v={&!bx&)10dSZ-4J6ZOQyO*8WZZ^^WFRWgEck zmQ8@6Ti((PCetL!SUz#@QrGWG#09ehV*)8vC zUI*<$ANH%6x<2k*zqe4=gXjfiT+d$cXWIbiG${2X(X`k6oxOUhakoX0UwOr!XoOI; z4)$Kc&iaWzTdfQktL@9A{B;e|ZxdYGbU@-v3=?6z?<*2@g2D+9vtq%q0qwZjpUVi*Ui5pAzTdaF$Tq6hzoOVJCedU;A zQW3oa!_w9VBkY#%EnE*;$p!j<&1o-*-(UK{m;Mvq$s@w0UDHA@IjkOj;H~MxTh(Lr zoiOs$8GpZ(C|ybDMlAM~cMR0kr@^2r>6b-d#nY!gw|aQsi&lpm&0r?YG>s`$;`!&jhiV*XUS%HiLOdYC zDrD`JpVd<85a#MC2sj=SvbFW)$lZ`{w*7M6M>2c=Z>%U-UNO zMIsC%1V5j~i9~~pVh3ez_Y)(*y%zY7f*6E%f`sK`AT2cB>ytDZ+CwY{tuKytI~@oi zM^&O?le|5Zc0%%KI}HRvIBW&U*47|g>D3}fZ5v_(ZrIq!X6Q%J&^!O*i`hv_+?{Hy z{VaYxt`mQq1*_|ppB8-1U-m8!f(&)b&-qlQb;8Q6H0UfWNRQ&Z#Wc9mu!-OIGsWEt z>QyZ=ii|@|F5=!_Yl6q`Yjsr*q9{mK!&$3O2rvmd=1^%|$mLG}^lqoMl&xVKcCmV@ z19IfArh^f(gac_m=8y2;K&s}UGrs*99|93}rpaNR2-tMX6aG-QoQ9z)3rQ_Vsh4L+ zZ+fE*qgu1m1golb0FY~Atf}_l{sW?T>p{Jek!Hw;3JIVjDm8mUu+H6ORH z6a8*FeBJUzJw9l59Pn5zgMmIh?ww4V*bO*E@BlVHAlXQpYZJR8s6vy6LInL9G8rGV zTi!^tkR^%=gCVh$X}j+Tx*K$DCoQUxGJgJ?citZ^LSEdflj^t;=+h1pJ!~aeoWCp> zjq5l+-SVlvpDkkJoAD#9b=W-0Y#Kzu9Aq>=v?*l8YXFsc05c> ze_HoNQ4rG8PR$Za=X}SO9Dcp~xel51M(JH~|}4EKVS(~h+2IB$ud~FSCgZDkE;^Y7JR{8A`L5MN^8^osufP{Jdc3RQz}~TX zo|zSYH255 zc2MpoX%JhxKhcf@voSl!*CSiQ;g>)BO@U#CMZmjkhwZNBOn8}h3aA|4fBKl7sGlSQ zz*G|+3h3Zl6;`EBKd*5gjoW>FVgZT6%|x=BA=X-pJ+nV^_{?qHKADFPHJM49Oon4( z`C?Ttet+S?N&j|!AXh;n-`n}~{y?`pA4a-n7CqGAm16UCrm-c-mG|_%Piv7a!x(wg z`A*ng=at~lPd3@+iN5b)CiFhd5}Obr#PBxssV0tfJ4{0AviR%ndDtvb4q)3tikb#}} zk4F9zy?R__#Q}chLtRaNky$g+VVv(3H1eHByoi(DKM))zb(=(Q6bPa92ph0NX;G({ z?i9rJo~Mg`THp>clK+AUA{bNg!UWJqlFk;gFCgEC%HA!*1UYQP3Hrlim&H2}jHStp zx!T2JrI!}7H5A-W?zwtRBirYI`{buA#5ZHnL-7qz^h0WU#st6|<{n(_2xI^CE^%xG zzgw7?;$1d<$!8^&*Xe2B#8Pe6?7%pWZ~d{ST4FhVv2|bRl0l{!SBcjpuU929Ft1}S zNK(G+4hw4_%`wp-x{sk?i|>WqZ!4ab@=#Ahq~VkfG-bHTAl!|$9R^=m`^0B}&ACnj zSBY5H&_nDj4%=?|_GkS4Q^&n^KCEc1tas-x{^Zdqmo4;1w=@)ZGWNIrCfeXzp5DTu z=M!x4km#5Go`mKHVU6BdFS!`drAeJVd)6_pTV6PK zP9Fx@^)YJX+gsK}($G+&D(k_6Ht(FQio>jBnXIg#Fn~Hh+BT_W`c^vGB-_mfYj#wF zzN5zBS3mr|_c@j8e<<>7rj^X6W_TX`Z9^e{yV(gj1B}8T><_Jr@3s#jTfN#iy?0R! zqvvX#T{XR+BPUM@o_%x-fAzu1kNy17(e=aj|J8rBUjs2?X>V8}gY^;Ul=M~vmqgJ~ zDlo4)UOzbFt@AJYnyYNC?@O#AJR^P9+ z-}Ik2kq4piaCrUvbW2mClW0)P9TCDg9qGaO9!?)Y4eP5u^y)iBc# zwx)p9LfZiPP2&g?e^8BO-CR?wQ;^d}lBIeza*L00BY?&RIMBM~Zm@Nuya|Fc**PIe zf1nlPu?zc6AFT7qcledSQ9I6Kv&1gCaKlSM68q+b14ldPXq$=pr%kVQ1GZ4E>TBncy^aF<}2C3w(-ADvz2>^98R6`*2R#JFh|Q1{#GqDlub3WqK4*+c?@k#*^T{%n|{j9sawwS9kiwMWT?Z=IHX9ZtZ!!@}LPqjbR7#jEo z_|$s~2gY3AV;+_Xq#A`@W7Z=KKK}Z`v}2%B$fbI;za#?EBxmU(Tb$r4gCD zz2URKsu>_PkAL#(9)8*1tf$GcKGx}bJrOc9f~F9Z9h~%nAX2k{!%`9S^y91w5eq|f zit3on31;$%0_>2r&)oN8JS>J0^@ZN(tB0dZ*Sa(Fbf6=Gov)5jya}(-JSWAh<>O35-(ix$INBm?P)^)O3`P>!*X{b&}UjrlAU2TO3EoP39^7k_8UIePd@*= z*8RHt5rYzl=m07_J6OoUSYjZqjiSKl$r1iH?~)j1F|6Zfd`XADs#Li)xJosOY=|m? zcM(5AhPV@0KTNXw+s8@5JP|7} z!-WPdMHK`Aql)@*7YlI`M39NRYlNFWOK%Z?>u`gzNPov!^$k6SR1@p_e7g{$H6uGY z8W+oI6PY4|qy`Zil5A6^2smV}-u9_=ODRNCNpaPhzp9Sz6(Bv8u>x|M&gXQI%sGfB zxB+6=Es^nJRF-+HZR5yq9x)<;0wI4WOf)Z^1@-86E4kqd!XFnL31@`dT05~FU~+nS zh~%T~0Mb6&3m5b?TAl;^Y!TuA+Rs%p4Y2^%eGckvi2F$fTB?Z1UR(8wvrhx;R6!l8 zh+Mz1?GLcWvUi&QEn7WI-D{IL`X26u*y@)90_1u4(&o{sldBE8ibD~ACPbnDNHQWI z+xWC_JfzU6xBYA5fuV0)3O~*#LnG|(1vzE1LJDM>d9t3M()_Z+nO*YcUXI1SU5CjQ z!Q=%Y1wNioZXmOZq{J{xQX!!v<0M&uvMkrfs^>wUp^af?O`;JH2>G^Zl8I&)Lq_-k zt~H4#4&**Qp@7aoxU!1Gqyt|;SQIAz(eRM%r?`W55?k6l(yOL=W`B|uW$|5toGZ>7 zE}&c?KF> z5Ho2Ue*)}R+Of~j9QkpyF^NR{1wPOup8hcq>UiBXgDev{+$M_cm@j8oqeRfrHb_n_#16 zjiyKdKnQ#kCGy7zU~&{)@U^yYPa6enb5O*K5k82foVGt3Of3Y+hrk2vE5L}zJOrA# zse`_ZjhlK5!)~&C8|73!crOl>m9P>yFx^Q+zL{4;Ikr}AXtvlF2tk+zzL-ZN(2_Y? zO(lm44~EFO?tB?m5pwz9tfoPf5xS5s>Qsp5NZ@IFIR9Y8rZMIg#Eud1dJgLw1tzZr+KEdlrc3 zLt3CwnDm~W)YsJ@ks!Ogt%r1c}9sDqJH__TzJ{{#ZJCzAVnqEMd@&gfNIfI!Z_%0hJPy z6sLZ3+t-Fe-DiY(qDgAA+3*Rc@n8FUcyNU+*e>8i@A~@84Hky+ z3zsz)fELg_tGI7sir|a4=Qa8;7QP2XxKE#vt}yyxnLD}-oaO`6(C3{an0lG zKru^)zw*upA=03&ps#2pC;78o2#oi^fk~R<`OBBJO5*jJZ2dxtIiLa!em5JT9_oLU(K){?^T_f|8tWcc-m((L{=DO}-mDckik% z=S0q1Le=)~6Yh7*V1HoGZzZ%uC-Zfh>Kf^*drdeZE-9`HoY%YNbnpcj_ItY;jRIWt zFdGSHq4M6?baXZc;#kF3D3R>oxp>)oM9@v5ulYC;0{q85p?&_!iR8A&h7cl7iSh`R zaPrire9aJrER<~whkg#tjQ)&;33GT3>bjOpBxE6(nk3+um)B>Aw<07FB;jyXQR3vB z4_7FJ>CL`-%`6;Li3_b z<0!d&s0zduY7$@*#j?dyykKmqhbM*$493jH&OQL5pj!<6D7vA3n<{FzB~0*D55DYe^wA*7)k0G$-Xie9pF_e2`w?es?O$15$`n2UIhw_J?8^8?LhKjF|1l7hcDI&nxtdgTi~zrH!CIin7_e- z@f~4aNzfdzVm^1T*5el4{k?!01Kk^HSUF(qI$PY+la$c3S76m9yFpv z69!7wh?3sb(}zAyf_Or9Sb71KL=)$>Fl5ht0`%5AzN=InlW3Xu5tX58sh6k zJmL5Ttj@&B2@~*duWtLmPaj77;<|4Je@@`%EBCzyJV3b66a;5EMo}tt3dLDn>WL_}4Q6<4ePPD-s2zd2CttT1Xub zYq5t+_N2JLZlOyyT`dp+GlYGdkUm>bNYtAYW}Qy#F*(|5XBzc!UXDxx&W#YHe#U^j z5gP5J(cDb2Tv171Ce%*TY)L!QCzg=}z3sJ9Bb$mzUF@VlyTYgCK)~3cHCe5f1i_nW z%U=s*8ApwllP4rEkqOx#6$@??#kQD%QE=F@sGOt-P{zVnWV@105j%=;Z<2KTVi*zN zlMW_C49x}>U|e+V3p0x8i=(Sp{J?D7b;KctF02(VUiO)h=}ywOug)$y37&x9u3|jR zPV6`W&=ei~KzobBn6E69Kp;0KOtNV~QNt%=Gg)^UFch&QK2v1aFce{c6*a4dJDRuE z64a+YUTyz`EcsjA9|iox9ALvC2B=pI7Mk{^1v~0vmD9Z%)D)j1Ub8r_n+lt4vmvhP zB>-DjTF%0f>?vfC&#$nI~aWK6rOM&k$(vc#$ zK6#OfL}Ks^JRmtVSRH|q78~l&#h8*p_mgZzJWjcoCo}hgV zW8vbY-s%hT8q6QzKr+9V_e&&C^kQ0{=0;c&608npvAV>wCZh;H=QswO{_TdL_0F4m zVlTN4I5okrRM5vXD2-MD?@&J2cOMut)p$pfEb+xbVIHJnjvw{aqN*pNU~W+VWr{gN zK!VVl)l)y_aJrF6qW)7ekSJ+20UVY{THYMy8PBq*|ISoYd}UBv=K$F01+_l7<)d1h znEyiP@tTBmPgT6GWy=O>1R+OSn3sjR2zd3_tUlf+zjMH}1D3m)0JoUr$RGl$We>Rg zg5I+GszRb1hf!nHBobdgNe=Uu)62wrscr)CzEZ|Ut56%Ct68iwcVF|vozJSw?=ncZ#(kJNZ_2%U6YbJyo`8houCGenW%NhmklGEKoh?F!U*h)ZbvE zm4|4SE8QFL+;zKOkq`sPTlChg{6qOKjgzSY zR3)G1uvl(=(iSKfhws!U{cC)EnZaWWNkZZzd3TAX;gtKlZ`=@=u=Iy+R8K>lyBv{D z4Tc0C&-iEpF@aWL@pw;}F?G+uXnKN0m;1nXZuu|f(#t1&^*4Az(DCh-+~dDtpWZ;262Rv$LnHzgH>TQtEWDF={5x0NeD7y7YFEq7Bf z0fHMbl{gUaB}PaT?u1GT*epynSun%b@2}EvvNof;`EG1nafC$oRYjU+kgyxo-yh$) z>W^0@(Rla9UHp-kUh>w9FAH5Jdu<}EV+)~>biB&Z%JKteCaZZwZ#iILP~I!Y1Zhy+ z%-hvBKE-Z%T-NDD@uc|F#sNswdIObhL7-{qOgesQvyRn<9JXdJ0deeq3V`D<+towT+_4v_=3-#a7-JhI-V7% z@&yn-@dqj_SL$&2Po@WC5A?~mHLn4&@X3`A*>K1kK?$-=Mh?5Evuy~djNx)DeSY;g z?8K!MM1sgkrIeeE+h>8=KF18;(*^eutPYnSe#3#VAAvo)l`v)%%os!&1uqMdxJ23% zaYJ2LR0Qp1_Z6RoIe@4(=(A`F?-AY-2U)&JG+pEzgYcF;ICWMM z4IU=2%1oY`%{)R?D%8aIU^F>+^(gGEmm>7patX3!+NwGwVpu=vYwJiAz*DGI4{}l! z5kwB`moGaelvqmt`bH2CD#s*CJWOO2L!HRi8yGGf42yC7nIm`ckFI6*b)*u#qv<3vt#Vr#@$p^OPJj$*blT|cs? zhxCRiK!&V^)ko26%xWEH&Y}yGB4VeE!F_$lo}N1E+i>Z!4#29ySF(j2$thwqtB^wp zYpVw|KQnbYR1FHIW~u*GpjLpE@R4j~By3%lPBcV#hfjE$NMfxN@AZ3$%Wwe^X-)np z%1A=kTUDysidEY}ztcgZ2!Nv%L223wlWR)Ci{@_0V!&HpVAHX@UA+YPu&4?t7ELe7 zr$7pts497Qg%c;p-e~HV1VD>z!Ix%*%QSvr-Q#3MIgsbiu4MnA?SG<;DOq4iqVTCZ zQe3&}t@=G59ms{s)F>f*lxBe6Uh}iqRBo#|d308P6i<4VN7-jh5}N@MzE9--Lm>d> z&N?Yd^VTZ#`9>1*rAZX=RM%rP(kTwsBmGt5E`+h&{sW-J&0+fZBPiEWAdOj3iL23*3 zUhloz_C)`R7ZGIBh;|GWp);j&kB@#Ysg}(X|F&EHoK^xuOpXS{9^JTo-8ZUF329Ac zQ%cmi3l6ipN$whCt1UzB}$rK4CO8<7mjD=OWg2O8t6u6m((Z zx&!jgmbcD^sRI>O!|(5>S|sF|Dvr;h<^zI|MASnc>Z$4+TGA>6-C+i3XJLbm6nAG` ziO5k$DFMt7|Sw&>9C7mJe=O~&zLJ#!3 zC3&+neX**hoaC;1bak!z^@KdFw+OxQ6%lZx z37M-64&lgT=o_f;YFzGwjUy>Z8~I)X{K%u^{Ou2QD+clJi9A?7v$8V?`j>kK^>7V{ zJ1{9Nh5Y?0c#f&~vaXP7zOKZQAp}qGqls%4Ez8k|Xx-$DCQoD9vRq&JuALjf#RS{6 z)C-=*7~Kk^v=_?*ql-6jm9>Z4_C`bm5F zl5Zn{usXxl&|GQ4Vy0S(ReaDC@f2P5YF$aIs zL4WF`c7Mn0fW8y85?D$ykoRxlXxbuNt6-RSw)whm_*S8Q{)ycDRN1kqlV#3sJ5uT_ zVua(rS< ze6ouad_884#{{ZpF&>8p>zycNKTq=YPBAM9I;g&SeSnY5Kdn?sJqV-uC$8)N2nkm_ zz!yPYrZ=a1;gU1oNN}}H^ily0LWW4-@E`#1uYH{suEWlGclexrmUiYo{FY9$F;oJ) zl2xeWLaGurX5v57qKI^fo+yKR_*IR_6`VqCXrWef?p)m7W5si{fR)JUy_ayTm~t*G zLv~QIwJ7UxGEDP^Wa5NL)Oe>#;R@G6B`U!?i3cmp*Mw%9L9Djg-;jL zaP>rI-O}5VT_24Rh1$Ejk%C058me{_B{9h<;HP{(s~py6L@RS*?j|!DC4WmKk?#t# zBHhBFSdEgxzoD>$b1!S+#r`gFb0Z>|tVUcmz8<*)QYr(gl?Aytbb4e%`HGJ}=bSH_ zpZ!Uz9WTt;!NC|HwrExT1gune`FZ}dPAWDhR6^uTMkkBVjc89iX+EoSFA4cDlfs{U zMmPn65Ny1r39M}@B=#W3^x&0Bs?~&0r&twvBqkFJ%tS93*VOzs!9gD#U)yc{0>O)f zO+@|(XF%8pgSG{WCL7z%Fd2Xe=|GlOfBOWr1OzVnn8eBkV;mY=(hCre*vLy49GS>K z@OK}adHXZ|_^QUE|3U!xZ6E8>i#jV?*$!^XPKFKVn|03qL6FPv2~j`KhiAXl-QAoh z1B*|qwc@6tZ&uX>^O#PAQiov#34yTiak#aNiL8*n+I`a(YyL}JL*0x6XL6Vx+n1;f z83}7@FJT$RGJT{Rx{a5DWUB~3B6Y%_+MADt89YUFVO^=tHDSDpNF+Tai5#H1~uVa&N7l+ad zX(6**t`(jJIOO z)XR1|V?BtA@css&RnK27<|ENs*hZ;c?sK&(;I+^5mPBLdeSx~3j#AIog_H(&0N)xxb)d^y^<=E!z#5i2rlIVkF?gVMIG9v3^TJ*NV*{V8a zQwE}wsEL)J)Z|PhUyyYha^<{AGM5!7QhA(HPaOj04)fKZAsAWku}wGCMQSg-=;Y%0 z)84vq*C7mKbSM4xJ&~Jw{6k(Aa%WQ*pm`-Df^Ij9P)f{f`yupOip*f)GsaL*Ly~>Q zX7lZ2IR? zk#)2r6NVh>h!%e(Jj1$f8RGsUd!8&*Bs)2f!S&rpDt#;EwhZaqVPLAYYJnfPK-zETT-tlWut>G)&A;$(>@nHWqCriR^H0 zv%ihNpQHiazyFUy9Fz%Zz2#HnHmrD)gSHmmLS+z+f$!nwO>eyrIvb?|R7U`Pz{=>x z+3WL3kiqK()exxJi6D9*gi$TuU{xkDt}+8)~Kb| zPjrLf%NKoFcdz;)3`e|E!nM?C*(dL(nDM{sZ*K)bzwtSVLl)<|^5}Zs%r|zLX!~RS zJy{{C^2sXagN=095pS!AKNx+knD~f_WDau+REHzQiAQo-yyhq8M}z~fpP;|%leTJA zGAT0@D7eGyN%k9TB5q5ChbQ52!sLs3;h?*A%|VJrifz}wzjM=pgS7hx-o`~4biOWD zZh;#Fvgj@PmZY!>6e*Fz>f&c2%o*1Q2^gAy3D(;1W=vGFzqVc;#`4ROPHwdz2x=B# zCcSc9$-Hn`;9G$~Rkn_u`joG_jnzTBv2&|O0zd3BAb;?I!X7LM*)atS$kf{;l5BKD z6ITVgy*e>B2uH|xL<{>ccem8@EI)4^FHO!0vtV6t6nJ|4j8GCDg>BElXhm}Tx4}`J zJpTF%%q%iZ5Z^j3AX{dz}NNg?P5}vz`(EH*3z!jsYZs$ zg4v`#P@fnJjyq^n_lZKbc;tOmEC06;{LOn>({&OuG~M2Mg8kS+&KwqYZJge_n}Q&q z>G}M;xvgdF=bL&dih>&8voaQ)Zru3d%uPn?bpuf^!t<@*f?{*JmQgTm{^~p_c`aUK5ynBh~bQ zQlMM=`fNeLFVX0^OD<&)2; zefTwhXWg>OP+%g?GhDFg$MonicqdUyniyDWb#y{6IgwI!@oprKEajI&P}~Ug zx-y`ktvK=4z~?K)1&l#q2w$_nU7L<-4Xi}pAc*~lgHzm65OHdNwOlnJ#>2R?hUU8|#9!lPywko#_+_hEa_5Xv9?QMk8+yz_ zht$EHYnmbkBB(PqX~_acGH|wQZdMuxo)(Br|va3%9W zmEJK*3I!><6D0SY(>?^(5G2jOxLR0spiNPp@#SzFw>buu4NlGRC8FHA$zCkq_aC*lH9 z!OT5^XqNSuckg2+6VHv^x3# z1|t9t6|%x|3EEQl-5<3ho2q-D5&VXJ;u!QpQ8A8#Unj-gScW&T2m;Lx)YgGf?WWzm zE4lG%D7`OT@Cjmio&_mJ*mDTk4LLewP%*YzqY>tm%xZY~vZfly(Cqx7Uap_7B zqXrtI{#TK{0`f{yT?>R+T|cFm0>W9fgR3CQWDK(?8Ud61zd<5*rR-$NZ+I#A*-FEZ z&6j!!E2~7Bn$;vRl-2ApCGF{|w-*yxczQ-KBVaSq`;B%7wM9eDV*D!q5#zU0kS|1h zt|i8%=oYA$<(#F{w6Z`1kX!1``9@QNc@SN`kn~(DGw2T$L#m?y$y-T2S=|&k%ybfi zKmz%GH48Rq=O4?jub>}-45FBS`SN*hU5@~sRS0nM@PH*d*`W+^$aMk=qf(I!aydgV z#ACfkM46iiDAOWAu>ifK)e3w^Mw5_+kx_+15cN7pIUclnkUCq%^I^Ew^QMv~l7a(< z6aZiGEY4ZAy9gpihL2$X(N}y^tl~p#DelJPHhK**pjWOZx1<#>jF*5*D0&3kddF7E zJ@5nT*KX)gT+9$^1`tpD8O~$!l8zpMdUorb(;jqjpAfkxNhLqZ?u3j8Mn=UFdEZ0? zPN#}PtCC#T>qM~GcFPdg*%}jirtN@#n^dueDJ$}kh(JM|jfiSJb)u{keNS^83fnr=j$ zLjTb;(9~2t=cS7R+7(E=_7Re66>t3KVm=HuQUci>Z77GDi|^WI*lik`rbXzf3W^zp zwu!%-rq~-wdP%2%Vi3TKk`DEwdMn+#C7{AFvA7V3GI>#|U^<7CSd~yyLrpZx7ya7;bY5rB-GanSs2mVyZ);!4w@w9$7RFjm5AASwfLpqyU?vOS8oYu$Eb@V$!A2l@`f=ZH7vY zluD+?tv{)eQt#sP|AdAM&SHk3lk8f7MR1;@Ai~?z#J1e(^=N8#s`E(>Dhwl^6(+n zA)G^vs23K9sI*Kp_zajeslgbarN%12W4e%8^VWt>h-GHoq7+vUqI2Iz$>??{zXx4jHN^6x0|3RX;`1;=hN3p#4}zpS zfngC!H@1+|U>NrwobqXyGT^{C{YBA-tyRTAf2?6a8u@iP^Iy8<$3L#uR{qBT&)u7P zX%A%6C;S|VsB!aMMt@6efc?l@C+EGz3n(xW30z6GMxdQ6WK^1oLrl<8n3j_R#t5RjR2rx}_ zeOxlFW0Sk(&F~dbj?+>(YXyjyS;6e~2qJ=O@qn3ZBA~Tn5GuW=NbYoWTXLHPxp^BQ zrIbyw?tC{+9(N2xsNdEg<@(U*k1G9N|M2%TtGh6)qFM_;l+#qS8^v@Z@~vCG9oOG1 zrb*&50*zv?E?BGF65aB}xTOiIqRbCGOWB%YWk+)QWa0YSsuCO(C$5=|lDN7BBhB)I zW%D}B+V$dEgvD9yKF!sNu*AMv+^S>EVw`NXX6|0Ihq7LOuw4YEJysc6W2sG2Pjih= z1jD59mm<|5P}(zuh8)|6xCx>-(ppIcMWTTMvedkz@`aRq`nu!BDFT`C~da%i%(X4V5PBG;C}fi|TtNG>!v&LOu~tp!kENKAC9 zo`*Mt`1W)bH&)d&mCd92A<$>-?7)IhH8tY_0>kS$gy|YikhE10-QU!c8~KKcO=yvR z!kVQ1-=Kb=1N^n9ZFDEeOH}t}f`SHY$?kh}Xw+3w=b*W&60%A=)5@?T=!lvXUj)6# z`NVYTh!>-nm7Y1Kh>0~y`@=DM>JH5j$zvMv9eo5NUos4BO$#G@O;dc`>d-nkt|l;X ze_x^$y?k>WW}37^aBv?9;S}-0@e@8A?0`~maW0VIVO#zVRFF(Hhiz}Iz^fBI?~zLu zl1K>~@QI&##XCR}_nxb{xz7EOJ7F*}OTMD;Zzj_Th1?6UwDl&Ra5B92r%xjU?ES1^ z8}x~N6lI_gEdi}Q6a1Cu@(uEqH|*G1Seq{pJt%-l_7)i9BcD3;Db31kx9K+&7mZ{| zx85D5kJt_Fph$Y%YpT3}4v_>D6O2i6P9i^%`wqi5Z)*;4O)H7x^W(-FUlDb=4}#L( zSyLG4J2HOAM?=gK$W~=Ge5fnY9U=9@#0TCm1D`Ljon|S6pc}yfh=)#2ENs9V$BBr~ z51I9_n`=fEGna`d?olYbhTsOrFZrU$*pqTl_WP0!8L81ixUEf{dF(r@Ql}2a#kA*Z zBS}1rniy*Wq7Ye45h9&zBh;J3xaU(cFJ&Tvd#Y$k1goNwYy*?U)E4CL zV)nW#R6xO3Y&Ym63l9?ZTJ+rB0Wz2<{3?WVm||d8Ipdsbz5Ha<>nd`prkwm(1YXHNKr zaO?FsXMcYO^Lv&ZdKb6azz|Z)lN->kk5U-wFfuL8XcT_OBz9Glh}1ThD@lc=#Ao+Y z;j4`!8>{xKBmiJLQ4d5BL5r8 zT9se*o}SUQ*UDuFt!}oHuL%g?gJ%%5q7Z&x_Dw z%L-w@e;f5)mXZK#DdB4g?Di&_vXEXHmPYplo74f4O{-^+#-LdkVe9$GXKgfI8$D;jo? zTz}=d3I7R8Mrfh;ADs0GG;_iflAC_UeW9znmi$G6))MCenn@(L61`zz8@8e|*H=bI zUl`@*vm{@xWYy-;3vAT)cvE&SZbS|{(RC}f=+6+P<_iE-IrZuc{|-(He~>&#Uk67m zZ0+Zc{#HEN#+#EIa)Z52LtX01QDj})1G}}+L80`gujbL91vAJ9+4)LiEJ|-@1g|ME{#ohvd(PN3g^2Oh- zlbzkNYt@^Vu2coQy&GWB=>E(_cx;#>X{ZZsB^3vatro+pCh7Oz<&hG{ zLJ~jF_|ug!o8i;KpOLS#-iHunBtXcv@&~!&oeDKYGsMi|AF3XHXg^pbG!Cge2P!jr z(#F-<-b!V6YIf%>UmBLWxsDZF6m>L1PYbz!d+i>La?k{aX9`17C3)J!{cKwi zfmE+aWi5J!uLtJc1UxMBZ#Ng-K*g~zGMyiaFhd=*mU69Gu}eF)5N%-+5wuvVyM_v? z-?`e#=ycLPQTmyZo}iXfp(j2`=A}|{#+CE~c6W`#X+HmP7yJK~JRynvj;54UPQn7> z+Njnd4FN5!L+Usg}Sq3OvI^e&T7X`}>RYmrAIhz}{zSgc2rj7jLkld@W)qBpJn17egX-9i z4O<`c`E-ltuu2Em;;(|OH&kR0Edj#}IzCxy;e=dI;!I$!)nJWdiIBRV_&$C!5XwOX zEMZ-7R$DSy_0wcVDGK4!K(}&*zhd_LDz?Rykwgvn*2E zeXEg)myyN!EG}7tjqSt>lMFSfB$$}}RU!ydtPR9yjq%{RfuuQXQN+TIR=lL)h=f!& z5&8jis~z!sye(K9=6TOnDoIi&w_o=$NcoL1>?*qq5xXortb*^quOZBqTWGWQ(k<_Q z+IPu}0u$lO&Z+P7YWIr z`U(uRp!ty1t3^u5vye>?A-M`|6LCW;OYdRCGNAYc!gHHIogmpK3tH&I)c#5kIJ$c+ zP#)?Q3qH_uxzdQ1Ozw}G-mlS*f+7;uTHsjX_|-HF ztQepP2J@Y=*mVmA)UYN3r>v%84);;f_I^|I;j;8(Bq21!E=|zSR)#}UJPNf-%Q26( zhAkXOvRrEf$$^A`IqK;|5=)f*z9FsX33JUS;$(&j;Re-)omjrMrFVKQ5qSa&zD1H& z{IlIcZbFG3x0($J-Z;XttJP`|<#0e#jZN_J{w!wE?3P*~F?8yjYD?2J)|f-9oo^NL zOfr!(i@t89J$X_+A<9gX5z1~+U~^+XfBl(TYbZ0Cid-Fq6=+wi@&0)1-@JNPiS4+7JsadTvICFU@vY!hNha; zE%A8HaG0J~48|vjik<~iiVL9o8R>QixA{uU-Xa2awzq}lh#7EMrKMp$O+M~^GwtU{ zbZlEC{za?D$ZnQKp$TFTih)Dpt>hepMUcd*p^XMiakJ@N?z~U#x~4?B2xQbEY?G0R z)LJP|TreU_Rf}m>t4ni9pRwjC`ls+ws@DmBSR{SjyPjm^#fj4j7 zbbLYo(o&EJf~&ss2}jsFQG=0c6#nJq3*LGZmYFRTik{0%0Bue+I5F+|@nHdzy+*vd zDD?=?aca!MjU5vSFm%5MVQCiRfAlG*6T@Hxj;ns$hKN~?Y>*-G( z=^d~99e+ANmWUnJNUW~f=W?$>GV8$Gn#UT^rpA~K6M}`33}l`k3|syq@l0WZIQ5){ zfDR?u2&x|%`Q?lLE^*jtW5}vRkxJ-rPTmIt6`O>?kIcK|9eGqG z2M)YDN{v=ZHz6P_i72Zgt+D@xz@f8K0Wv0FHVonUO;l0JPdSK*)^|8wyQ!y2!s9f& zMQ`5p=O5So<3F}BAm2(x`8;@=MzE`))3mpd3#sdn()~Q+Xvi+ibGlX8Z>zMoXy#rdoenP zIr4QadNi&>SjCFZbM#W7q&c9cTaM9XGfrCV}Qb9q>G`SSYp#w_5J~F*y(`zZ-b!D41AvSHH-7seMD1YJX+nUKJ zU(#|XWw0V$8g)`gU=e@Js%VOugQncnogzfItjr?8lZ}I7JihHuAAS%SW78^^%!da# z+E?RLf#ha!_4#1t>(0?=7tWniTA{itdYz9+7H-t6KX>kek8<|-afiXw95~+~ZmM?nQ17jTC_qKfnn8^@Kh3MFN+x@iBE4QauL=m^4n<97j zv|gwtd%_ETX{Kdi$U><6L{xy{`;e3WVJB!HGCxu z{80jA$;n0B6YVWF?Nkj9lOM8KULYuc*>RZkH-}USAlQjAKcHE@WTf9F)VE-;@QI&5X5zy;03tmsMdT&24?s8=y2z5=u|4Qys9j5ykD z$R0KpX6rAPsHvBQN;ogq%D@^s$n^&7bfH&}Ozh~kjn1Ur7SMh5h6V*+_b0pM!-qc2 z$vjv;)FG>|uXCG5Hc0MOVYM%Hv!H}qz7gES1Xcx7mbR^MsA~J2Y2#1MzEVVrwUeQZm zu>#CZYm{M?wWyi*$OqBFEI%|kX`jCHLELk7>K$Jo5_O#j^bupqlDq{JCKR>{-I;^~ zx|g#(5H0+B3~-v)3Z4n@#nmPsb6DsEWNT02fZI9C^ zd5P=mj%(P7MTLb)n@R`{0=urab++KPcC$bV_JD;&FV@R`fJHf1fM<-pY9VHJr2d0% zFaV4t0Y@C?Q%E~F>^r76AuUDkQp@5Rg@?cNM8|I23l}jbXvHv zi<>%zQMn(JLKw7(edPXmhYiA*_WInpivp*4OtLq~5>vF zpzfe#>00$ur%!9Qt6mD9GAmBcHw%*mCY_ax=VM!iv3W8_a4DR+vU=`G)LWGq7w|14 zmi4Nu*HM^#@}pTYB;rnGmjuz7**95J9*V08@-A4aITX~p!x!PJH0V% zqI64M{_LYBm-NFl5?gsH&cJZppBA>ROV346(<1qwswX(p#)Mz zr`__P;f^=`jsZEB?koY{*_DUsJ){r{j|6xrPm=F6d2LlK+vh{5PC6NeOr+R4K2H^a zh5VPQ!C5sf&5V>No1DXyC1znC8>7`5^FH7C;H2P4#2b4M$hdrzvVWdFO+F`!lu!1) z*+ITnVHV+S_@ZR-rDsv6d*&rD29V5S1%zyz3Jq8ivoFH}4pgxAZ;3zmm631Z?`{$% zk#^EFz7UZ1@CxPQ`xpr?`}Is-f#T|jF0YcA;`?Pqp1j>H4}Z-^gePVv_dcl+!bf9m zNqV99ftB^p1_IkDK_lLmX+_DyQa?dK3${i!DE!>F%%t_4XoDymcmR5fZEkE|bvV7X zm#kxRR5js)1%Ec_oXxOjD$v2|A?R7GR~T`$2nwAcIVZH=S|q1kGk$lW3LRJOISdVR zy^R4jRd6ci$8|`U`#C30Qr9`G3df$F(XJxZVo=GLokzcO$44Z3IME67ulp|*am7vp z2VpsN)ykjZRK%Ib{Pm~4gND0NBh9pq*juKYlSB|J1D&ikkaukLczusc;7BMHPoF*% z<3vowamGR=j1V<^q*ZG&TlszuD*)9-Fp!m((SyApCkMx`6>zt zvU}USw{ne$&;v~n0vRmf?Gur~&h;Z!tE3@(W1db48ME^eCkvI^c0$ z$YNR{W{2*$5&3MUCkSWW3X7>me@EyyAbtf{K$7qR1!$!rBnY5op#bTba-mf=@mWrF z3}4pk%eTvD?axWntw00}a~i_xa$pG3O7}^S)=5AxsZH+pUhbwVWxxX?rMfbi>&!TtDl_{*j2{TX03CNsWqo@#jai3Bn6k?K5h*E@UQerDMFmE&TdVLrG53o zKXTBrGY5&v-7#TXz03~Ae4)>9Zi1`*Nag*-&*fi#@|a`jiBq47F~8k~0}b@5#UYJc zbApPZsU;T6o9(jqg_x;%+?bgM=JdZJyt7_m#UatV9dk(se_Fb-=LY<_gyP zq&Mhsi$=1^dUsY;rTdm(+*>3;bakJ(1BQaEac`1o4{X)E=`L|1}#{3Vo+bm$&o z-}7I%ndY>(3QAKkQxtnI6 z7ufKSWE1#ZGD>`K2x+9xy{lzg;c^{OtiLAg>I=eQAnTd?TJXJMA6{#M6C!#ES>Jv~ zlfuH#53>`~6*6M(E9hv3B;N=to<0$j3}0c`Nq0c7`mR3=PDxC(w1i}-u)3}NWRUr} zpL0;K!jy=)k>qyRQnJgIAb%Z$8D8I9$BL=pV@h;^c%Nd+(9#W!dpc$XB?ieIoAYi6 zMAl}bj3qk3x)kEsEuVW;502|a(z487mM_1eQfbKNC~>#^cq8k3b1eYe7(qnBSxwhn z?n7?+aswV2ukG}yOvk-+GXYMg$P%`^bM#GN69<=Te51U_gHH-l)P>4*1UH5=`6LMB zN()A#)oay&I*+0ZW=u4*q(6QL9n6&BZjLE|&*CWw2KxR%csn-Jc=-2H7}-i`2;J`5 z+GH^5G)b+Uaqnv8jb_ZEeuAIv8wqM;KBlUV1m$J&hSr7&*pb)LRDF4f{YYWHla)0wHETiRhGJR{yOgi%=iie=WjNh70f4)14KlzX1?5n5e_T?Lmk)6C4v2@^bb zbU)-O5J3G~1bc`ox2}qSucQrVUI>NvA0!F+)3FiLT)d0S{qUIo)EVE76Q2}_#|mlh zM=Cw^MpJ@nwn?sL1}nRg=7v%aza^bLR?9UyS+~P*A;cr3gg8Q<%jR8SD}nI|cOiix z_U}^y-QRdO)sjbY&*Sls}9Q4lRH=p8v-o zzajma^#joXZr@LmuUX2o>uk1m2?a8D5LV;0`(c84ecNZzEib>InHTB7CxLOKNe1Fw zjHi0*(hJTdu9ouC@E89Axr*6i@)Nw@pcIKlAzgYWofq5GpfFb7D$Z=VD%Qs|Z?aE~ zyoC%rlkl?3*u>K$F@YgI_`54gEga#3sibh4J0W{aFUGx4nE1Y%76{{joWyjXG(qpX z<*84JK$SuX$>Id2B#g%eFbheG(yTsBtLcOs<%oFtWQ>GkGpUWYWy;`{lnT#v~S)VG?#C z17@oC4S>X($x$OwA3V?!1fQmAFDsE-;2A7}ZM|^roP^0zLc_w}ec{00Qh7tNBcjNc zfDcqJns=2YNaQ9oM%D~VicBQo1`?GF$cVhe6KWYq#3_>+I~D6PRMWbvA%V&cEmVXQ znDTkZvATk4#!YD`7zsKzs~q;mPaviY8d!tP*KUhaXQVI0ZZA-yeFC`4Yqc3Uus*i#Dul(bPLin!`1>JCUl>FCb{=RxF z#U!Jj<-kI!(|!`e9YGi@6;S&oV2k`M4YDfH0nVr^i1u3HFzyo5uvWP&7H$TTkQ28o zq|5iSR9gLQ(Ex&7ODR&H1XL_l2j)(!Qe(m2Hd#`)tnZK|=I@cQ6k|ZT@P$WCa@=g> z`RwYA(ANm|LUP3W6~=|Ti0=v(oW7Ruox z!TG&+yHP-Ru`!!b4^Rt7^q?%rjM@P)9S-`pGzTKpV@4dVwZzLII;xBeNO0wX$71Bu z*HDpCj+0ZKbKN=FcITm^2N;!Qw|ka$tlwU*lAeuzZk3uGE3btVi?=7agGf~|6)fls z_VApAWS4yr(Ch#^ckTrZ*IPsc#<_Es{Cikz%GIW}m%obv(I@lfEeE^ZC*y4j1SBLv zBkiZ0bBe4cG&0S@2ebs25C$=@acx>i)0Sx@fwr+qsmd@|@iu(bsS5SQ7uD@B%Xo+$ zL=gB|h(FSyJKCeXF;+@d3$GIUnl1W>AhT4z>gk!t1t7{Q5E94BF`o-tG)VV&-bB9A zX@lN2ku~$vL3~I))XE9FAR{(dmIdZz%bEp;W#i8EHs1tS>V*i^DTnmPRp!Gl{oqUa zU!SlY%CMo7N}@Vjlv)JE^yHnL2r*|}&>sgfb!s%K6&lf+K}kgvbjz2{i5GfMs2*-9 z*&PK-%%3XV#G;GImGa50?pu*>)NmtPAQ0_0g{|A4)AV5U5n?gfoA?s�XD_>n;^B z;?z8U@;RR+3tAWQp!MmBjV+WdANcSWKKzbDZ-`wfOb^7!`!n70&ChC;W!Pz|EO=DV zG-&j@)#EhSE%>E(1X)Ts^e{w|fqmI4bH2}oV9rHyyA^7i#P=2d(!;l^WSXX@92|hf z0iu`91p5~BAW+tp+GS_iDaus|eHauik|fc6E3{Nn<$U5ua~uvmVc{ju*4ev9!k|A- z1k*@M_9xMUA4D#9~&7eL7WVkNz}4)=o6#_-q()T*&-!{_7|;v!r@oBi@SF9V$?6 zKW12V@N#7R<_W5HFqTD+fbIyBJ@)*o4$ZCCHQ6JD+j}Ra(g(%;6BMh2@rBrVjp%}; zO7AWL$mAOg;skHxJBKagEPf$8MI;V(uoiwn;7c`><4LJ+*@(+H*$jlwCDl+Uvna_A z8U+(cNpW=eT2x|^K4D^}YRhIgUJGO65C)GTl-w%wdq>y~6tIfBHpT995Gm`=J_;vS zI41YLU6pZNZt~N9x+W4Y%B>nqKHs#@Dhj%?Y4C3^Rvr(g0Q@q5_3>J zkr~q5uA_)(%S)%pI&c@MF;ER@rU)NCZ<5}xdK=;Qd!lU^FyTt*n?4^LE1B6{dRf>! zBK%r#wbV1m*qa-AiZhT#$^Rx%(giWWr}3MlgZV@!yC zI!~TD6U`6q3nX0VJbomkvOHUW=PR&;eqrF2`747XT4f~lQ&~k00TYL1nA1#OAzVlM zgNIzIHm>aCJKRU5WsedPta2qX`hrdxhzP^~{~aLpQ`(-FZ~+heMeMvn6U!X(;z44y2!D7-FWf*KjiMnDiOA2pdHP&h5QOf04_3=DW7 zV%8h?LZLDxTKK*RS+#?&Z(xXa;c3`)y5;tMN((YKat^F6WoEDu`2=rPquwBiXE4`v zd}|`i{YkswQxwN6reszIc|h{0LO20A6TQ24JEboG4&R!^Fv_@`eQ?r;A%u0E0Dn(w zU6r!Qk{cM7%M9E_xYRabVMWlRH^;XsNt@6a3(wNi#XxKIdHiYJBY98PzY;%CuQ*G4I%+Swa2E_y~!}j6V zy03lsH9fqZ5yqViN!KfdQm4U?@%CDRZX(|7`ky&Qa6W?VY~9dO$BT5@u<9p&AcmTJ z(0r@&pE*1Pt$ai5gb>VoVdVMoP0@w$F^NpVNiz_+r8fo8Uhrd z#6iw2$y6Pc91bZEm<|&uGS!{RTk6R29FxcI&53M<^os4UY=U|pG`r>m)e$$N1W_9) z#ioQ=QtVX+701=uo#X(`qV?3t4(S)>PSxpKSwG;e!Q!`JTE2PN6~?XHIRqNB8428} z?~0O)=HS&DEF)-5Odd5)VzR&3m45`G{sB1?K5$KNzi?uQB5S_ao451|!9S9guu&}% zyjDq2ie-SCEs_fsLZ_|J+aL)hDU1RW!K&^uCPLbQfn1 zU2ivO0vPwJl~zIn&L zq*$AfwIsu}N*cgklvAHe5h~Mk1Vl`4DX)bS=t$sb#iTC^f|ivPY|U;j3fjKHWOYm$ zSyJmjJIk{bqI0-S*>w~>8~M%HD2$bANSK{cR-QinKqTC&1qH&v(z$zCGvB_GK(h+M zPV}Wu#fCH^+@W^Oi1x%Jgpxm#+5bY;xke^ONGkwcm-ry*4^#B;k>icSt4-lVV%BI!*6xp_ccF#PWz4%At?~Ori8w0H|@gG*!8=2e3cfoo03Hs7G*8g zVcBqr5TK-_@?AJX!lhxQ-l5fLMHW1mzd#AJOn;yGL1IAWJ!jw`w}fMxl}+e$>4LxZ z@%v&2M-}`4q!x3l8czNm;+|x)zkf^9XDczT>+@`3yvdhv;;c{COd+A6K1$^BHLF>x zt+N|26LR-Vn@%O0N*2|~ybH<3di|z@&n`uCnuS-@r}5fNpGGOtEtpd2{lFsC0O6X-L6{j6TGj~n@RDcsczlEoQJ zE>x*;I1?u7BXrBp#@A<+ZFG|R1BD@$_a;|8bwD`pTAh_o5+Oq7!;US@jNoD7>;@Wn}|G9(v&8vFkH3bckIhkFeFe>O!Lbln+^7?%-)bLH#05fPpTEjNRNlezGcQHGr z?b}eLtP>A3l}&{%m%31MAw{7ACE!WQY)fQ|5b&PL7_z!jjzbh!c#-YmNOWH*kqS!bXlfNsgCy_nLp?MEm6b3~?vsd^WEgSFLv_KIFo#PS+T#tH*`!N44fn2+IeMkY7>QViWkT@6abtpZ1C2#lyDaBXP}k z8%1IFn0x>H(7-@>i>SeAeQF><0KI zj5>EijP8~s8~dQhq;=0fkf4xlPnhS(?(wsQ!^@Ip{i7AzA(+%L?jV#bYQO3kg06BR zEJ0Yla<)I64%UP6DduSFvphJzvvO4n{>T4+KTa4|2$%Iq<^TfhVwZ&F#Fw+l{@iOf z{1^8RID}9&+Aw+(X#oiQ5EHSar;y?N!y^k%5JcWICC0lq=;Nb~KKAOs21|qs@7mak zn22AZ#}C^B0O(_oy`VTGll&EAiwTi}DgGcyUb$Gw$~PDZQKx>|;b!PJOKQkI(pNI8 zT(fdITEsQ<8d;@C!b;);zQ<-q8%xyLc*sTM4mFI^>Su<4q^ul**TQScr3$`;Z3c^D zPbs{VeuPQgpCSra#|J?yb9yWtV#=1NmDG8{e-j#-`es_T_gd~T@DE$f?kH`nh2F=5 zMxK-6bLFZ4dlMs_9|F6j!k2FaBaHTIo6YG`;v4jGAMc!SiMtRb(=peydg_hpIs8LS ziW}ad1Yv}Yz;u(jk&Mlxu383xPnBZHVc)-g&u3(O^auaUpRqE$+UimS0`*-BGI;{f zXMTfVW#va59lrR*FKXH|5;+pbz&6na-XLB0sFddkxZ`cmF}@X!1}cngSSB|Zq2_dT}kt(>sY zAodMYwZxKfVF>55g?6Nu&3Z3$0s*Y_?@uBM$F7vpo0olBXmbanF!7kjP1M2UknVT1 zXGa#egadQRSV`?F*J5W~vRvho=$T1967vD#pZYXTTbb}P0_l}1q=uvWlQfN0Hos7@ zh-)dooQpYp-48$bAlVX7zwjnnsEM#{b!Ew#VQ}RZ)6m5T?J;l`-+5rf162>aG%R*ajTZY6^U{A z;WzZIneZ^-Xi{qtC!9B!e8xE!=t5}Q3rV5G5^Ef&Rgi<%QGc{w<=&8FZz9S>(hUw6`l_C_gPr5$qJl)yX6zW#QJ{Ab|!9phtab8-SGF?-*f(z`DFF_ zw&RVMh>h(T%BZUU<(xcu^5o^oOm#j1#o}*|PsCmOXN6Yq6DYD2YT#qP z8JD8hkwO;|c{Dk;X~QFr8@dv)X;LXU&m`rNT7p}#|KgI@5szk(N6GwLC|&QT2Y7YO zB*H(lQ}hs+8;MvpXkQhN3k@hMM%}s?{EHua#T$c?9V1*uv84FZzL`blnrqe~qlrO; zNbVwdq1g1y*EE^pRGYaM&)yqx(#3}}e66`=O_F`rw38F?wDhlQ?d-sP`3iDDfRzXH zlEmEe38Nkkz)<1xpyKbW#4;*$VZZe^|Ed|YrMC#AYLr*G05l!$xoENyJXW2b0>Sl|9` z>aMr*ut(CG250n>k58@NQsEV)Gt?^ZU$F+Z|pcav}#OeOGo@fh_8jh4s@<3V<}Xqd-9 zR9*SE9>X4%K|GHn{JO2Sn?EsD4~e$s+W?Rx)j#fS`&`jtk=Fi_bk5PhTxnVmS!&e#Mq&+!>hVqx&9-OnKW>HwL4syDT!y>gw2@ zP)#r8CkSLjWH26@n<~Tj$qUV`V(Q{P1SnKsq8)4qb<~fL{#LG7IB5CbCV=`$+3(dv z#Sep0VCiO*VQsLW&TbK~l%WHiTBFjtYykyc%}mM+RNDXxl4)HK4$IhDGkt!rj&+#C z6Wn#k9CB1D|Ep}ZwAjMG{Re0?91Jn!_2feVgI$YlGRjEG&6`2FTg(YRRlHtSvwGK+ z$lxBbC$0v%CJ2T8M*X~<} zQD!$2N(s0TFChViq`5oY{ZPuLQsxu>1&61y#-GzSqkAn(El}eOWBm4EQrN% zo#|D#iv$=q%K&Z6Qzb5v-h3r$7LkWk&eKRZLBEeQ8Kkyw>tLo(3`3$TDZ;9_PAZ^1Q3>0=6%qH))E<& ze024SDNS=BSAa%=otrsBE4uG~DaOv}bUj#Zxu&XY6uasjbcJ|7%)8>3;>tYA;czC# zqi|*aq)cA1c~^;XY87Yd8xHFx&bS73|GB?oTpXI|I&DtAU%wd3G{A|hlx7cG2_mT= zk@*rE`it4$<@Si(Iq41^wPEK zug2f3qfdvo_Ri#qVXILb3-bkQ-(ty4yX{*gr_}C(C$jC^HexE@Ar$L zL!|vaMGL%y7$5s-6*X1QH#Zn>+3!=pRn;m9PLpD6k)XI25#=-wAIOUxv&JQr`wzd; zrbhvud{|O=Tl@mn2ogQ|LTm^klY3=4sC2d5CsH->1#Ga5pwVhCla|5V#IyEU#;;sE{HQ~zS5A6|XlaCVNr z)&sD)A-!`bKT0Py4)q|<`mLa(}NjX&9{n18-qVg`(>PqF%+knlG~V(_RZm7 zjrmc0D;{%RV)MHkN{8ypRyqlw`N_<71EXdo|AogSWI%jU*xVK^26W64*WR!=8dm!c z%=&NlRM^U$KmlFEZI>7CR;LRkMox-mcMIJt-i)yfkk;(>G3peGjo%{sKrj+NdbJk( zZK;2`s-OsKy}+l6rOEAW&Fb>vwO4}Waav?>N-?ZnzD_BBIW5RkOlfd+DU^mgDQeGJ zE14KB@p%U~2%Mn_Ibn}T=68eDNGMQlQLHpIOH34Dyzr#OXoY`>A3G>FCT8%(Vkyul z+%cu_@|l%;H);~(=8~C@Gj}Ep357Jqs1PtlwLh+H(;?AorA=1df2b}Q$T{8~ms+`j z58p{s8dTcVUqBNn9J9r1QjUeS^Od7uW1GuP(unT zBp(!792Dt)V+!Q(>!?K=bVq0GBVru*ASa-@dok(f60HpjOTEM=9OK2NIz9jubnb0;0rqmV&ukGEKTV-d`MAhA7aQ*u;!{%@6%&$_TE6~8hIGyAuo5Gu(I(yW(n_`d$>2aZ^@e5z} zOnO97%5Rjt3-YYyX*{T1sZ^0xHbm^2mQ>p!2*HHUTz%18rUb6SSZEE7IIX%(>!?p+ zkmn|(x@Ufax)8p!bw-aDv^A=@0ByYj@_$<(`nXuXjq#BtwqT`}0wwQjn7aB#zijpf zj34U&H{HlKsvZ)RoWc!M!*Ig#dbL}&L0cQoi}Y9;0|v9G%x$Kv5$pX=u@I+OTX3^h zKwPcqC?z)K=OH?Sq{xUFV_3PBgMSi>9 zp}z#5G*Lx6^xNQFj!gmq>SO6|6cbbV)Zk_SyYVN5dtxfY+yysxa3iYZ_qUO}pgM+b1_3_9sB1&Ns1wa~R z#9MWx-=kokAIBLZ$z#lMt26mW<$`JMVad!?PE&^n_KPW+UiLwY;;`&~?=gkFhw1rR zI4DM`l#ndGIXHpqf+{M3#)8GdtAbnnjYV0w1&<1DLz9|C!TXW{mnZ-Eai)-MZ3dB5 zj3lM`^=sE--LHNSq+Qrv4V!GzH-BAU!nEI3NJpMt9^(eXI93{w{4q~N6=Ht4XS<~_EC-EHmihbhgeLOiz zIQah^tAbTr(o!G`UmJx@{dS_*t>0w4sfR-)XiB&?z9yvdW-JCSK?@T6Gs)h= zaiY?}xmW9l_P*qI$8)K3ZCkp3jAL)uG8|o(l~t;h)};%lB7!i-o1khuK@pGz2VM@s z0VNv8^o5S%>KMN6FH-1GnMPhd{3Om6r1MsPx56kQi`{3IV%NS~nVBhwIa_0d4gH)) zCq@`io>=eARTFtN<(RDC%OAuqG{4DJp=^YAPjj_-G-%MT#Yxz(w^-JAo~uJyB0d&^ zLJmO{RM=WdkQ2;}({_=}4Or>2OOvwFd+xf8i%x#6oZ6lE>!?(ro`yiHGfYJ-l05Lv z;9ik?(=OG6JfA5)VG0ni#ZXEu28oZCmpS?h(&*5N%8`zq*DI#HobJP;vPY1`uvkz= zXoo!LImBFcmdZ?YgDDbXVv~T=%fRP5xR}f&twKj~hMKOYXhQ*xA7E1gf4&PuiHljO z_;oTDtn!`5e`mRc=p}l=|irPpDasSa36?#@R3?_vsmyanMkzSlQ+4psmu^LYjjsUJ4tGxHa!} zO0^X=9p5JLKt);0Tkuf(R
    E_VQ{h>>uNY`F7mhlUoO$j|tWedJJqoLZ$7Atow_W z%3e|@Yw_#fnUo#KmM9=$ttb+}&*@yKC4+`&c;~DSk~L*EKJ1VVm?u11cd{-n5Ozac zrcb4&OT#9j2i8(Ul-o;R){Q}lZA*TUlV&4Z>ZXf#E>R*8NoYGQqV#kV#GNx^c9o|Qa&$M3 z_U3gV!@$IQub&<7@dn0-a8ne}>*gc@af6fMbWo8wm&XI!@;C9u8|%SwV1O~BYQTdJ zfHmVy7+xvJdgJr|Wg7UJnl=C3{!a|=^5P3`g#aQ}U}lz~i2)E3{_unNYbQ;l%@QmG z50HVO4_|n}^uJw8aZt>*X!qa;2QmcGkfcoK5504z=|8)6^C;D;JGpi@U(Uqc|@cdqcWN&;v>j3;o92S_I$z z;iLdWiPljPo?H-Ha~&9g?_%{YFJ5^i{sy*^d+nJ=>4%%!QNya@%ATr}UpNSJc#s_Q9tL*VaxLr# z|5e2V&DF1NT^>SuJ$f+~PTf%b_vYMPC1;qw# zno7?RKc5&)8XL=7x6FHxxZ`)P2PgZ%nIVguZ@)IPw5?jmG^AKgF5T6)vT)~Uy<11KclJ zn<2W)6^yeOKkIob{+{EXS7(FnCPdP_b3nSl2vyO+Z`mVO^IOufFPsDcv}D9`a3!uBY&VotQe+%_g1A&VGr* z;nAay%nDt#dg*H6XoK)f8|_Y#HI|b$sXvK#6G_?6CNkS@xeX^IVnAiQ>sDjCS9DV+ zn9~ilsl1yBt$oyR#L`v0v(4P1K&zjMz@y zu8wjs6ktSL&iN)f)Pl@!=8h#t%ce zO|kkjcLGW*n4x9VE%e!x7I@YjLXr7l$$^W29I{etQq=+A;aYZ_c&cZ#KP21H#6wYN zri;IkHi`aR2#!*Qw2Eh<#Hcs!?2#CJ;%Do6O-aayIy#mjvIexg?7!q_pp5hz2xdsCA; z*sxBF%S2PwvLXbr`gs3JZ0l*@tMY(YQQyuBa2VUEhm#i&nUQM^4pWANk>w!gQGqq% zpDqXS$85}?%w!~s&#zR?MOtaJwDilm}z%k5ZmV%j60Xc@O3;`T}ZC+-jRRqz{d zr>iJdiaxewCbBKuh({yfY7}mBF@E5DwkC(U(fS ze`-Aq3P-h+q83;#E^MOmrHro&C6|O`ZH<0DPAmq(Ak;&w7VABF&(1cdN4-TVZs5QoM*ec%gui zIbKRUKKT5jFMQ<3fL_3PQqgF86CW_RVbG;Lh}JPW)hq&{qnNF#BTdRljf_V{-Eo9l z9TP_g$fr^Z@3z7!Azlb*(nzJa>OlZ?D5^s8n-?{1{F#TSZO6We^;9(=&u6k=Z5@98nA#={x&WTCF8f zR189C|4wm%Uxnfdee-M2$HdrC_+wM89^yUmo2V_8iWbM1Y!^>Rem2Dw{S7Ub37l3$ z5Rn0VIey?8Do`vUUg7Wg7mS!fU84FP;N}i$gg>H}oRminA5>S7?BrtD9;o4P!bdjx znF501p5R_$avNqC5};&h$H>6{&%|ZHoGA4&R&i-sy48%aJxQhY+4T7+4(f=(G$Q+H z%n3yRKt&q3H_|`@&`@gcQgZAxt--@V?LgP5&Q;cRkkslJBWzGw2u+(qd)&8NxOm~6 zn7tGxXTv{HiKt&c?;i!p3WkMlei*JqvxO9UaxHU{cOJF4cnoABB?Sxw+&? zV;z4l22Uu|4iK()OluSOZ&}AzUiSm8zxn2y@u?|wR>2PiK8?{l^XRIlgMN#^cBTfZ zT6Nwco$`>xo~@j)jewAr9dkKNgdQ?1-h~S*hqP0@i7N;jZ;lp^ckJb6iC$3K=w6=1 z>ea@ z;_w~hMsQNZK%`=gOKs2QF^cIP;}EW{0>a@BOdC)TlVYnq#F_3Nm4ASkizi{g?v~;_ zEMYm2yYgMIi|Ntw2?;sI>7h~*F-aJ}JRuLaELXO9@B}qARN%r7V-bN6SLI^MXap#o zKAOSDN68(V8y@>=r2@)`MF^QWeoxo%q*uK4%BF0-D3f0y{@udA3q2C#W9T}b8G2mJ z$?;QFhb%u+IILJ)tUyf=Ii?$jmjJbi60hf`)yeQ=>OW5rgoQ_URX_g#E_@h-e)QH| zD^j3=yS6dDm^j(s_}@(78_)eS|9qQ@MUMZc>20;GlC2<7mCL*)NVfHDVE1EjH;&InQno67xd@ zlJTcG+}W`efJv|U?^j;-RVQ$Hunrh0X@IF@$u*Pb+tyrMw%KY0@0IQr&50m)Qr@Z( zc!kTs_9zfg)GoBX;{Wfo8)U9|M@nmh8+i2yT-G$nmTWUjIV*2xez?52S*tpB)vS;f z!KQOJm;78R_Do7B;zl2LyEBZgKKE>pZWS+{*=(nmg%%F8#VUu79}GF|fE9uy7v#ww zUViFR2l_#HDoV{z06;#yp%7AjQ^vwQcp0>K+QGxm>0!zkpJb}9C$Hm?hNN0rcW%0kEQlhRne0lM2%c-McHRS|G$!W06 z;ysB{bbcTp7J5&X;k-8-oims;X*Bfz0jm?A^>Axqkgx zP=61ltMJz{He`sSQcyq`y5f!A=oOR$0rSIBE(19%%(8?)umxV1S-8RtkN~)>JakeU zg`SFe;b2OlZ_-GjpYd4~iN7^N6CA}eGz)55$KO+D1etT|BkjIbWNM$r4=jN-o;d?m+j>l;7Mm2NPU_FSJ?wV&FdI)5>74Fwy1|aBsG1mC*@}27?gAIZCx` z{!T2l%E>X+_y~_t&Y|oDl?J1_LhOBXlb9bUwGv%xbv{{VGrGLhOwUi|*!=pWD(lLY z#JD>w2#;n&j=CP2ulzTws+SkBK#Iwdy-|jB(mStYB_{~^>rspB80e)3%|D_j2N~EY zRVQuv9wF#=QvIMZft4T7ZYg?jcRQ9vQ2bV`SMMRbUY2tjZ47c!uuuw^xuf@BkSA9s z>*7hAR?b2G>}O3HaI0vo@d$k5ESGOucHJ1#$}C(+1KNyxWPo{IB;zFJf?_S{##g1p z)ci;id^6~?vS~@6Ld!FjWN&AHT>hIHzaFGhr$J3V_OgK=93$J}!8bNzc^2R=&lqER zrgs!4KFUvAJ3Ki^ge?b(`VbeG?Cv=wzAMgGR(+CO?XbO&dQZ>@g$@?-{Qe-qN|fgQa$mb#}_O_hdJ*>&!$Xe0d1e0--8+aeA0zE}Lk=31LNl=p^DcFCv|o}TqyrO!kW0v}S$rzzxd6cMahb%p z5xKd)u`<~qW#H!oFIeNa7SL8X24rcK(m~o@s>Ne+A@C8Th0EUt7!+Kq0(~lu6VgiM zI>nxLGsQvR_A5USX!)E|yPO^zJyxOVMzk6yt)glee_jc-UHrx9h7<2_heg!#NL zRJDK-SBNY#an`Ex!q5!S6WMFk^8Fd>lpw4>?@Fagr`r66-%Ux?1Dmc=*GOap8%Jo% zWW={eO^mWnIRRUAp{$#jCI~VbR2bz}m)2m`+^ix}EiH05{{ctTANjpIsBx%_@mO3S z#pkk@VHtSYmn`fZjqh#d>QeOsoT70}M5>z4^O~Y=9qkgn< z1U!7onF^%j=gkj_bCr_pQfg8U#ARLfC-E90UgcSwG)N_Xft07_Po_R>PssR;nN#HC zQx9qnl&0ldhlql^OahvBk*8ozC(&`>HKBdUap8ql*=SHi8b%BnT-jA!)HB91JU%Zj z=Q$#@o`Z0Q#S2bN0?ulW3tNmjb+Lpi3nnB1YY!JFSnYTO#%`3XL8#CduGg9e?}CCf zjc%||rct2v3aY2um0gNaOzraGnP)>7F5lkri{$Fg&2aa_t1re?R0Q?(c9hRij>uL* z2L;3QfdJAtV%{uT-Ie&T5uL@u-nQ1B5N-Nue7n9J59*!NTD(gIh>c)JASQc* z8-C4XJnWISQ|c16#IR(Kt^snMJoo~jqE`xq!*MHwjel7lANC4Hai>;cog+B{PK|XT z>!RwU`@PazQ+y*mr|ckAyqdXTp_c_BP=kfuD_6J5y7hqMq`v>!zryOcyohlt(V&O{ z4%!k(8h|E`HTunxqq5!=x%SrCCRiOu-n23>j?ehLYq%6|Bx~6kH_jDUcK=gg%x z7iHwi9yWAI0bZa+8US*i-4vx$Y9ij{Jn{!S!epA)$U$?AfX!jVDy;re><_B@<~dix zjU?;&QDbeuAU+@q^e2;U85&nsf`>EnSl#sEO>{K+Q$G^zx!Q2!ue-xGx(iOM!b7cc ze_1+07#4&deDIZcgU%$63w;pvB$x;SOG~D-iNE1o9&8VHlY*PX0eYp1!4WDHM6EhL zP?v891J9M64AaL_wi5iFD9!)}apxA^39&~MB*sn6_kv#QTBEYGmVYfSbFZ+D$~JhW zq~!t~^W|O8CRGlwKfS`*3Wi~h%m@qGTE$?B9>Gq=E(&71I@}GOl{BVSjjoarD zeQ^&~q^bfIOR@ucl)Rtjokqj1SuK>dr9w@uzpg%#_Nk-Z$l~=nqF^3F&ZG~vJsMt& zNWslm8*S{ODo9N%JBWQZldu{AOsml-(Ws8dPD!m5j_Y$?;mTRKCJJ6_mzL38LrmpTJy&h~Xb z_+XMQ3W3D^uF600C(0Bg0R1Fw(94Up+rhxM9(q|bu$P(*2$*tQw)57f z8t(m8d?=&LA;o8xR;4hcHXzR8>sGI#NR5~ z<4H~n8WckqJKD7=Q7di7D>pOuD+SbEig^pQwwuXFtf_qX@uOC)w)Pk(c!5S|_Mzxw za59N3UM@;;8^$`-yL{mr-rMrb$yiL$LK47I*JJj0y=#qS1Z|f{XC7l> z80AHhDLH{Pc3K%)!yT{a7TyU#C_QGd3)XKqD+NoiNJ6+`q{n{y?ca`(sTf@LFz6H+ z4{F@sucBkM%SQIC;J)Jn#I!dVrD0NfG7y|QOVNS+D^V8u>Yn+1yRd(WtzBL`9J;)9 zhwC1sJ>2@g1iPbLrq39zt4Tmql%ByiDk5PQ*NeaPMU$}A4vZPKBFMoxYa5xjf$8WG zHw*8u1R%B%J`xbV1a+ENeX)@1sjM_ZI^q_m!&s#b+ZmGsA!JzO^%nc6oO77c1hc)X0%ovPQKO6U5w2|Yv6xXAucsprL`b|oV^iRv9 z514ncw!yw9?}M$RIju>Au2gDx2_0>JCYb^SA8>Y7ItDA}I<2Z(n-&%e(Urk=L+NAh z_fn}RkN*WV{|NSMkg|&1BP7{VXe&1LIEAse>O435^Ir=lK(#2o2es+Mvt#WBokD<< zXw@qn_Pt}!%GbHb))Mei@INvv5QdWxTn1)K2@ z-pVQ$I5Is*v=cCWwd=~ND!?ODW7uRIoAxDjHt2U;10ZVsqpYi#C>qT#gI_raD>zK zh7*gA;S^kh6u{$~n;R#6E5#gZ)(Di7RHm6ZmiiB@3};NdY03uWkdmd|GK+oJiQLNy zBE%~sUtIW$PY2|g#v>UlrK#Ov1_Z4q$`J?>qkuP-n^Mtq@1Pxh6sgX^mF#<4G_?@VZ>%Qh;3hE=Vo zc@YVV_p3$jVxWt5U?Yfr$$!7H@~2|&=nI0mRl5QxR|d#wQD|;mwRMXUvFL@NuGn9j z-13MZPJAD*kHSVu?(wa2qgrKBslh3s@iW}=RFdld3yWzD3V@1-PEGlXm$@XXcc{WN&DmI4E)>TS~1R5kn&1xs-z`cN8gwSZGC z*z(EmUw#_QMEfbPvg%ynm5%FWLsxz+q;hkMC6ni;if(Z{{1Ki_X_%Rdnv9!9BkkYeCvDrj~C*d9)i6K28;Vxa(#O zFO&+UVV%!l?(&7U61U?9nDqu)}!8?bg0NhzTic`qus7ah0fSur9+X2#2GF zU`9ne0INtUasZRcD6bXoi;aJI@wHFlZ5wh1#19|iSPCYYCjd25Vu*?{sd%V@)~sno zmwJr;m=W6>1t=cNsZdlDy3-Y*vNX;uBG=jFPrCn&QK*s}=%^Si#Jra}i6{$RB3162 z$xQqL!&?s_tqnhr9|AJPF95$+O%9$(fIB9lRAF0;09gEx^@WU=%@?Phs8%%d=ZcMa z6HsDN)M=SeCRW@TcgkUVpQv4M<)V9|K;T<3Mx82q#V0rBEI0%2@rixF4~e zDFKfjJs;QJzHRY|W{ex`m==if>)iXEA%MC2o=9`Dpwh(et&i2O;$;%y@wdR9-rKDQ zr4d#92y)Mt?&Nr(cM-yBc$+61N11cyp7IvU??l);wk|r@uL)#)*ZW&p97h}xC0ZtU zlMf7eJ~*6e$(Pj%?V1J^QmrSct8}84$%aFsq^Z2SXoe<{BSywaBHJS$GvUr4rUXWR zv=|EAQ(aE~Ta+@tRrZ5(1kS3#j-H&u0hpM&D)Mlc=ke!j9~>IM$-5itH`wo$e3_}O zVmGVug}v{wy+%hRWv8oAEVVlxw@Kbv3_Nm<%t?S-;MHO-DX9ca*v8Lk}?FnR`(4ZSGW2ci9*Z@C0e}EtfcxD35O!OK`6x)Qzv*#OPW}0u=#X+ zefZIfalOn)JeZy;&%?=FnH?`h9o`P~l!|JAhayg0~fJkN zF7!dpcYzm;3~WAQi)@_;%+ZGl3g3aGQp|CEJP4{0Zge;lrDH<3x43NNQg|vr9bX^x zN)bLfkER@J=c|`)3R|#=LPNjchb!5P2!>7Xnv-+7<2H$Szc-ER%}2U z#Q-1O&ExmQ7vf9mS;Wg94t*wLtxStE`MbgK0bWFfMq-=HOEpZsBVDtkFQCUDaC&9u zH@CX6eXFZpYc)lf*{8sLV7_>d5)drU;RFCHu~G+UWkuNnj27(1&if0G{)i;P-cwnG zEB%5Vn;K{}DkCEZ#K+Qqk=$T17#D?|0#A!N!SCgZ|a&dr=r zXcsE?R?fyX?T&aGTK#bAvSy>gkYO^9lJDGYY>+gZ@yV3aZ-GQJ3|a?lL!c7>Pu7&!gm>C zTtT0=FkP3j7))@xU+zJFND3mH4_hPNSW3-8$dS17(I*M|!xT(Ob>IpXmlyA^-HFR- z1?B}u2I1^(?!Or04t0tADnj6>5b+y`l`^hY%B=#V1k4~xZ}wOfD5LI5+(3Cs()7dB zo;o}K(p22U;_#<~-fn*C&V(2fam4~19_Gn~c<>>*vf-tp>51ou`-2_k5l>x1arMDD zp-KPR8i`eJ=na4qF%UV>;%|6$$$x*G-`{=i?sM@t{6k8TIL)@3$^h_;QJqRneX@h? zjiWrJonm-=?aNjKvYgK<=&_RyVb1Q}$O2#ryn@v{(v1Wxpvet0lJw4a-f@y~; zcsMM!{VuTPAe{0hOSo4^PA@ifhFko>kJ=unv~+BZ&azJ%OSMn1Xyehc{4@4-R&bqk zx~ecwnN#JZsz*qT;?&FtfSZUMWc|YRonmED@mYmU%CW{EsORko91P?l^AM?d9viOb zs<6@-eGmg~C4R$>3F6*Zi@%=x$d_$1d0d6HPQ&U>K}?_3C=X{lS*l6M2^1M+gjR(< zJ!I82wed7LDU{i>h0w~S893FMHXfXeT4vz26OB=_%*WKcDhxJP4RxGnY6xaJZ}d;7 zcp`sQuq1NfuRLT_V?2dsBn%@!iXNS`u{L=EXmFq)_TZM=z`k*pv4 z;T4ZxkdSx+q8CWlAcJhM6Y-cL`a$j#h$iv4;tbCDlofgCg6o}+Al#V1>X>g(&1vTk3Rb3 zS&xPiChp$2o-zt;42e>zY{D8!cM&Kr*!rxT4eB6NJneGcXZE(ne}0r*bhgLo6DK9_ z?}`pI9vXxy&sU6iG~UZa(+cUci7+_+SV~R>H;RvIDme+~@vJ@m#oyaBj00HKjZ=WJqig^&{@-i@yMbF<|Fv+|l8(}dK( z?4vOh^QCf}#UOv@jx8buh%wIbp~z!zr9ePC`kIJ9aaIA6K%TS2@tkKr@bmA(Q637) zP*rkT>B5)-W_(H+p=Hwk!9_b$;QXM6TAAyNFJLCWTMX)w#b+QV;FcJ{?XfUCmjzT1Pkx76zvt+Arnza36LDk{)Dz=auA;krJ zVvKv~qh`H~rN^?{N&9ShVaXF~wre@Hc7;JH8U&$>SMv~hm^RovP}s`)v`)t<806DN zK+TD9Jq6#!R78jH*@pcPHwtz{L_Zj0;ayV)em`^+yHgt?y_qa~U4IJ-QmkVOB?5<<3ZD|;Q!1)LFo z_O`Q>&0Ojsi}PL=xvwmX*+<0~-y{{ozs=3B6aX*0RcHok#gBZO8TV5NLaSl+6$D99 zOQvx|3dakLUISV$i`^ab3@I!hy2l zue0DeA=sJt-U z_!^TUx3juhrO2f}7Rwh;#lz#XtpoJ<39r6s89=^!9_mczDc5MLQ>#Ndp1XWppo;4l ziRNGAS*wZf#o}pL2L8-4Fe-mu?LQdoq~ikTOsTj@wSzRXIiA*+&3h7k49SKd+Sg7g zxIzM|P`Va@mlt=x>=`qPk}r+JmZM+6@>m13iQs|xrML~{ct)W`0M68D6S$W*s}3O}p-LhrTPDjOHGn`;Vhe3Hg@yzeu)slU^r zYK2siNZuUUn**lt->b zmWa%S#`?vl{smLaZDs!JVzIYirLSkqmWHCm_8@&=OaLZGimWu66+MPxc`+`|!Stw2 zdTFUIuExEbA=Up$X{-qm<}|rM*OYovLPpTu%mKB;L=BI0{4f|q%0(txP)1eZ% zOESmiu*bG&ll>-jvdOzR1oj{h2erdcwbxypIpjYC7=mW9h-APe488ykA z=`cJc&9}@QiQe$Pp5W0iV~UdZ#wc*Iei9T}UyV)8F~Mn7;_!d?_*ed|?dY^a8Ob0~ zA<9dVFUJ?7cCrCX7&JYpIcDx@C&^si?U`60&;^DPj6k;&`WnQ;IciY2Ek@BlN+q=u zSuw?*gUL>kL79}eC?19Tg|pPV1yc_IPcQ=KSEzIV@Z4PhlRYNNp0gR)GuoQ^s7!)ZT<>aKAr(dZA>$t3UY!n>P%_e2ywXqC|X47NK#8fXoef)hd%Q~La@`wtuZ zvUo^3s(>NU{i@B{p{P&A^+yqlMrjC-}WQ%j& z89#uBHqG&?VY8ca@H34H_biqFBVhDMNKsgx<=a!mcOPBxq4v1XQelbCRV9JlV~#4a6!npWmYccJSTJ zrLC9(l5j-hp?Dbf>F;}K*T{K|?SMwX5!bp+IiY7bC<>*P0<$n?Bs^lNBh!D-07(`& zIE?Y_n9+MXB&e3CKA5C&q6Apz^b1e|FH4D^_8!EfP=;etx6mhWka}Gd6-P~sOOcUr zDViZ^oXOmBhoBr~z73jEZ9pJ7j`Ypt*eB^DueJZ^k1i)+Dl zwZIHT88{E)*_Rh@yy_wDN#+Nc@q^ZCWQ#N_m6aMKzM0|{>|1>K;WP1IbtUqHK`Oe= z>AziyA3B|=#5n0tEiB5o3cj;Z22seLhzU|^C-xjV^rx3Uh!-Oxidoda?=d5Rq7q)B zu)bY-m^~?#sNv4WN^#FbiaIQ?y^3(8de}-gykag^n6WS=Rk&lwrOH3XdQG{+tz^*| zUe{u(v}1v+t#TYJ4oaqL3k`P#|NR^tTO7yLkB-Jy%inZaAK3OuyP4h{3z)Zl9KlsA zXyj%r9`cDma4Fd?F9?T8VdI8Ck7dFsMn@rkLTVIg-c{xDv5NvSO>*`7FsSd9bDRJq zR7}-kt@PtLLC#i-1ur>%sTBvsAAO=0t;S9a3KVqK2^Xti;gu-I2 zPtOt)rLGi3U7~G7svZsWlGLYgNP4+&kRRodz90`@VYn3-7Rr42QDH^mYfE9&+gK1E#HXTaP#Z&stt9}rU0}#mm zJl8UF{c5a;wslN|xmq-^xg=vjR3fEnB7{Bs6P5i^r>0HjpIdCSESzy_Fazz~7%V2u zE5U8I7h(ayVR(dW&O@Q1;>Wqor5LRG1Qe?ex^t3Cl9#*LNCePwpdyxeQE4XW7-`xP zZaZ@oP}Qm`pg~RmX~ftsFFu!5{RNh)yE2-EdUHw7Xnhp@ct9{duJCabvAA#`5DEm2 zSpy~Tv#0`kPCWRas476I23a!ijD!x$8(@iOE+UK9@riOg?nnDAR62~Z)z{loiA;fZ+ye_(RNiUjOr&5#U}Z#CXHdi;5$mw92SH- z@F`*(H7+oQgA;A9LY%3U9MY#QW;3_3QAa^Tjy?-l6i#8jhIuP>!crSUBck1XuG0fm zck$}>KtRwd&;&;csCZKsmy22O-LT~0Zc2=Bh{kpt9e8kHrv;5iyNa>j>K_*MznMxi z5xPfU*oP7YsA@7$q+r;RhJ`Qe9NK?ZD7mF70T3hvwF2&_$*jxdUm zzcE&lU@Q=(5~2v_sk10^zz8S~QD|fI3TnjP*d;}m-DW$~{76jSzY5|uW72QE1-eF^ z1s>R`9bngvAlN2ffC2cwU4F;Qft2YV{^q~Zb#r;)2jaA=5#hPfz#KRR zfY&+KOw?t$SxRu|_T1`HumlJDa8wfi_DJw49w|JBXKCdS7xmtuegmvtY+I%534#l$ zJ=7JIvoR5Y(eBV4XcAsqLyu&ceI8}^SMQnlTKrk+l5&`|FREkMn+JZS(r zVBf2*I;P}czPP!byffw6P>w5d)}uwW1!gi6gnGMwhDjaFt8(!v{4gyDvD8}42wE)k zqOR_MYUSVzep3&+c$B&x6uL{V$(;^cGp@L6pKjSfqq&8$kn=3DDL<0Z>A5h$*8eTH zsDK?I!tN~lw7~gNEFUC#Q87rHfs+OW6ojj2KjjDT3DduGsn0~970gnTy%GviqoiK> zypK?`+QF{3GekC3@kc-~5-{>K9&${ZJr)ip+d(gig(d9}y+!=D@Q-T2y(o2x%#o(Sl_KjScZol`RyP4Z=feumOk$Z2~fML7`3fAU{&6Vp^; zS^(ZK&Q1Nwc}N5tng%t1xh4%5kuu@Wg<+xH}>1_=3nK@iUxg=2Nw>589wb zU@Md(Ljs{_$keK#?n%`-_ed;rM0{ysD;R`h_s?UHs$H!%1o0c*fSeZ{MONZ_&i|L| zz&!hv^5n~l=dOCdbC>}o@4LlzPXFiNihrcIYZrxy*W!gKvJ9A?3aZABPyX=l|D7fG zYU}C%upp%MZn5W~POS~h9^)HNn&56cbO#RfLnY1{upnkd(OY@`;vLUE7Yk2*lO#9hib$Z0&2fR2SB$A_<(xZV$}thAZG2|~fE34UYa0vQRS%$0%(^Ju;m z1=U;+O`G;yX=*;>zQ<#Xko1hCc^TD&rP(h&z$r@88RwWa>da}m(A)hOHY(8HZ$19G zUrzL}whdfk#e-VglVX(N9;BeC-3D8?ib{djnBJmQ*9Mgx;c1Kq_$3P%VhWr>l6Y~c z`OmWmZ3LB132nuA?rg01O46qAi_Zi>mPSQ0YN?@~CXh)_jI%iQGI|Py2)*D}p3TZ& zBaX!w25Dg>;f3@0mLqJvMsHCdO!81rU z1T*%J(t2W$DlU3Y=E&~#?jz3&)hXVV8IMAiVVuU; zfj9reODQwj7_f%2i3>^RP2B3rV$h_;n|~bJ@AFsuQW>>rY;pkPE>abjQf_<9bBp%^ zv%Z96?}@$*c_S|9&^n!--FpgGZ&=m72W5C{(=P54KA!sH=Tr?lsX)#-N@u6X3C5jyF{6}8r5#fx& zr3a~|a7vJD=!GY%({)~3&Qzg;GN@)_k4J+IZ$)O_C6~vj&tL$*b*q-wg;(WstHudy zGE6AMlTRQ2)HXFJ8+kQ?RAA;jH3idy1%59*pC3G9az*Zct)T)J-@^mp#~_UgBdcnL zf7?eQdYZ&@TC_ZU4?Qy8#RV~KtkNQ`TgCJCyO)P@BoZ zVxz}fkUV_C@bVX`SVvsQUu>Q$KYhN~fyV7SkxI8XW)pO#_@J~BNTI|G0Me0Q$ ziR>3Gg9su@;vFd)w+?L%MV$rn%NAx#Y;?rIaJM{gy(`QRyhubg{(N-xOThv>B9+4r zKY7IekBiuDaS;aObuXZNq6t_bwBRYUk^_}_X>vdTl^pW^aJcVw461%g(95^C&N9_? zBH@myTw?8NBIdJ}lFy@J=x^gfzZb*5{@S%zvsqq9UQ%w#ItvoBlIq4_PM}sX z*&}8=EUN9XI-RjNhJ$Bg3_srJuk-$+xnxFI98^K=}V| zX^6RaQk$#s_me-k{PURL<;CQ9QmD)=6rJ&}a{a|i@0OY)(!8Gb@l7FqaruXLV=Y07 z4TneNEf~za$Mop@kBFhd8;30mGEWiYc#K~?DdNJ^&R&RL*{83YtnaoWY(8CD*?nQ$ z%VE4Ybt}ej(QQcv}_Zyy&-`0vpTC)%F3Lltcb$(Hz)Z@l0UN`JkV zo*d#&>;A24^B6AyI?T&$Bk=J*x6!w+7*d8-7cX>yRhlP^CXq?>Tc zdg*nz#V>tY8Th?qtJGo00h?+4^J^wHwxa?iRQodNx^}c!03oBm?&n)b1&9qR0v9~! zKHJwU-{R%n62$cou9pEQbHxib<#1JZ#vFX+Spq}LWx4XT7}QxUk}MzdwA@9KXS1Dq z9uc&U+%D5eai7BVd>rhh@4TzB*Mg6}1K>+9%@Qs42w{eQIW2;J z%nR}_`965$V3OP*1La){lv3W%iq^sc@df6$G7oBPHHuiONRgS1m_~XE2n*)loM68&&$0K^ z7l@aY&4#$8Ih-FgPlcEu6V)CI*8n^4n3T_mMpAe;8kr>0behc_kG|0m@s`=t-kptK zk`_mLc|LJk$wHjwo1g^5^+GJH-HE|6yFBd>>Zs5}k+P@)Dw8q{{;p6%(P{is#9w2G z^a3r;4HCG18V|=gmq-+{bz;*^&-NTM6{VUL1&QO}Wh1iH@zN9;L-V2(68&mT!h*Qy zS?W|&7ll$lWsB0IB@dh@|HtJIOdhpvKd~R>n~%R2Q|lr5PRhAUERo8{!id|Of0Fa}c-3_GN9GOzP0K#a)z)mxzSVAz;6%R%7gY(|e z!A1a-LT`HZYq1Oz@0vaE7XW4#f^83v3X)MKNv#Tb)<0x{DihpV-i(*rnhOf5vQ5RM z-CB%aWVTp(sB&{AlB)ESM|_@9Jrz4;>Or;3i!Z+w%dd0+xr}n>u`IpJ=S0Z@vDzPd zky3e3EF~UDf>05F$;NM951=EvK4nd^XS88<08=S{qvM-oJ~jDHqODkfG^%o z(yMWCGG>RMiI3!I34CeFZ=)cqc$FznZbD?fH~?-34Q5Z#Jb?&Vaw#Q5bnmA z6(RwV_bVl$nmi!&pH8IPu1f0Khyw2(4^cB=`}_qeB?fHLZ`_V%Pw+iJy&qLs7 z%*o&02r|4?`0wPFwcO#pml;bVv6A#kkC!SkkZcL! z9I%;xn)iyMpTTQ>@)-Eu0ovhA)PO)oK_v8ON!I59BQK>_GKZA z1Y=Q=cZTBLAQT13k$AxjpA;Vf!i2Sr>(s&bxOkH`2ZXlhDwLv{-C5RZoL|CQ zxYwRg@(&NPGuhfZK?!xeEIvmTkzjqW=SKBxED5M++23J}jTP7K!WCB(l~WZnN|KYl z|7yttR{%D61s!vj6W!Yx6T1f@CD`Ei_~#c-t^0DhdWvxhd6jS&PCz%a$g_tNYlQT- zAZ%7Zs|64)7`jo{-+TfJmNgvej<-_Oi#s9od2d^g!Bg(*0m;9O;_ zd-PfsIbMCh>_0JiuM9#n{7ZmXhFtFq0{rbam&7@y zb&$G6ax)DehI&U2hweRYm5lZocX{#PDEXH}$QTZAJ=UTOM{z|} zzVaN|k@11LW{1Uty3iFTj?qcOV+R!;T(PGbql@GLj-)C9T9f%1IYZ(T2cx+IKv9to z#C~{(S6jLoOL4%5yA#Gz)_>6EZ-r(G}g2>3_P#6bzRJ@3I#368(O83o7 zkOi!42BpA>3(5Bn(_p#!sGwLTat6i%P_dbSV@Pe_O7T)0;nG-Y$h3Fd(!FB3*wbl# zW1FWt1FpcP@aD&e+COHO)i47%|13A~?%YbepV7o>Lcmrt-;-NYk%jn(`lO&-KkDRfV$KYjpO7fV3y9l5uf|VC|}9?0w3!%DC)rpjiHDE;f;*(O+F6Nkbf97Q)bL@(H9inA5BZ1 zdVETb=@0kfrWS_&aZr~CXTtUInZ--yHWp0})og-PX;5Ppqj|-pn@d~43wTlf1jhTh znX9Rq6n8ZyV$uL3et6TCd3p?QvFX!h@>6xnG4=3zNuJu*L81De#bhRwA`GVJ&l;H; zL;Qr2ZFsDF&$S)6%Bjx8F;*t&9zm5iS?9XiY5rn-G?d=$smWYa1S;O!3WNurDvE@2 zZ-+fxEo};`Si!^q^wX5-joO$t-ETe{WbZIiw(^l#wa^MN;hlTlL5h!e7N_A}63pJP ztuI#4^z!2M5(4iEep~Tx=mh+4{dS-QOT6a1Z<+T4YU{;<%GhT?<0I-TXL?as$1+?C zJ8Tc}=M@or)(Ub+?-rco@R!x^p9STZ%LjeH^T zOoVxP=NPbwN#45#s zxoU<$Jxr>Rs*+)lt@qcLf||roe(9NP_bulz`tFzQEDVS8_A~C+d*F8Qn6+~r80>^{ z{ZwS8Ly%aVI+|=c}FZYFLRd^MBv3f72 zhMe~gVQTz~JINn+vb1i~4NMjGexAoOfIzORVGkV86*xNXPm1W>RNn>Ekf6bX6ndC& zY}Pbr2myo1H-UQ9NR>JnHtBs__#oqI)p~d;q%Xb-h4x#vh+&^ zME||Gixc+%fURFL?TcV4k+2k$-OS*ZDyEc#!#=nK-?1`CagF$1GQmhC+VeJSfxww< zcr@OxCOI84>XC}oG2BV@8cQbQc~8FeIB_D>#VcAY7;~JON!cg=>GE%5=1^H`Oc0&w zOR2>ZWHd9QCS&b5j=1M8qjwX(Ph24$w7PVA%kP+1c9%Eow9u>5I$nA;yU$R#J!tqI zNu+5%;j>VJP%Oe(Mn%XvcTooRdkXLQmdYFtJ3qW%qkb(@U{H+VJtWmn3R`conLL4J zmDW*ownp*yLy_c2CRCr)iI8-(( ztL!0)KBbnYaOEX&LYebHyZ2H$%N7VVpz{MG^IPAJSVw&Dxu%p$D>fNYi)(Y^p%H*F zEQM7#F3??ZNJwg$k=X8NWT$1L+;p+1C*OJem+|H&-}(D*nq>f@_jY%HXUBTdvDn28 zQB7X)D`;-ieCYn>s1=5&RnVs0XNwo<;_+VwG4a3Yyo~j}{?f~_ruZCir^F99K;xE1 zy`7SCaW*(TP}ITeuN0C=3i_*8ujYDe_$f%DDv!ZB7VnO{z4oBdW%TtJ$s4s4dZGrP zd|a?l>qbq0rGRWrP8D!vm=!QjK?qqA(rlEtZYhq2YSYeXlSJqkuN2j}@hhS3dFEZ! zg;=>(rrdV}!O8Q12H^L2SX>EDIVg;$vd~EzvM}b0FZzMxy5_EF#issqu#|v+@i+DM zzh!FVEtio=F;h~@uX5XVQc(H@j+B^}OSSG2VlDM8C3`KKtSmSYc0Ltph zQn96tVBT0;jnT4EOTrHBwh2bCb)}QxFkO&J4^OoZsnMFOn4-6bS*8=Ph6Ao$weBR( z<;BB^*K`g=d{@f2`GU%%YTs+ zIa_n1YOp3gF1x}H5-BkyIuh2jV~PQFJ>O$w)NA2g@kzPp_6@;cMo}Tqu!=mxJq&Ri zPqBmEt+A@+tD%!ZOo1{@%x{>19KwRmY+OSLjHlf4?jFK)H5CB2EZV!sLKrql?0wg9B&&N`VU`{;$c9u;jdKv4Sg zHqMEI4s~yN6NZT zNXF<)jP)6`u2j2z$#K}jK;l^zj=BJO3(uEF&a1|q2i&qiir~I-_mkkV^`(uVc=OCA zmBGE|z9cPzG~(FPxYzm_=hd>JTf{Tq0Jdp7{H0^l+M?p0&lbmJ~1s2C}~EW z9+jkU%6+JHp7c7)Wmctib8uGO`rL-+ExVf%y|KOxzG#|Fi@oQ@1o{nj)=PF1ITEY9 zSiLhh@L|LcSD(*v4@%%R15le6*P2PNqp*xmC%pBnCsNUqAwodvlO%6Ed~@d@{=BoX z+-r3Y zZ1K~%g^nw^@IBHfi`uk|-c*-DY$Cq$$no`%*?~CnD{!Lj&RWFsV|nQJ)g&oc4Sxz6?J4vD^41Q5%`bTVyFJX)o|xI$G*)U3}IIh*Q8)jc|= zUwr@KQ~$g&s@N4z;|O@F#2R(WEflEk6z9}?$$iZ|Q7cYQTi=>)y8vuQ9(qJ3+%up7tJj>G8rBvmWWx zRa)t-*th3GFClhH&`a4yj4;*Gxvg9NskI;|=P&Ed2(|NK6qM;C{1D>#?xQQ-k{U5N z86L>A7Eh8!WfVw2^XqQl39R0Bk`!l(S>ZRRa&cICZj4XafAOSd)BNvt$PwuL+v;hd zh7o=%$vX3JYv31yXYY2zKLtO{oBz`)+ME$pZ}CPh(X`b17n&*K~zv;%CfT)SD0`nrr+*#Vl;6 zpd((|Z(n}<@wekjxj;;Y#vFy`hgthzegY837jpbHKUDDQ)?$!mV|W&{AgxUonWg|7 z;)a`RF+QPhs*Tn{cX;h$getog*9szXxEBxP*%rqtQ5%=tXLHySrLI+;zM5NCT+JJA z#N(8Op8#0oTUy`p|HPb#nyOy5Kkg1^x_WW`suf!v(3ck<4-2+oxdSPb+oeXt2re() zdh4y2_h9EBM?f8})bgMF`Q^7`qR@k4nejAjTg0?~#Br1;uP%qy!MC;6{w7xKq?gL# z_KcfX{_J$h@dDvTP0VwE?=DAj?Rxsp1~uEIDey^=b#$w=Qt`Jh`Ekm|m}F>zs+MFn z;(f7drX=335M*p?l2^JGIYGlA>3OBP?BTE!zsDKaj;WwfGg!YDEO~3i{N3VHtV~>q zpOT6^u{30me&4?UlP~kR`;E8?1v2ko9>tfbL=e#MxulmDzm>~=^|@zXhy_LO)In3` zuL{Zp`l|Xio(8I7DIeV}8LbOiF+Xl=2q&4|w_D>sKfbq3Mcb$HrZKU-7p407Y1)BM<5l7kZLn!<~9cpyto3%0oe5ovT;YlbDO5?H-p}N z2I|4Jo=9HMUTa2B0cnWHqMxIPjS9R(0W9H0OskzCHci?uIMMtQ1?UgY%gq8)3f2)? zj+|L4Hxufp2_7!0ae9=L8-}@S!d-tkllbx2uEu0Rd1XN-6vvmXceFFQRd+{-dXQZq z2;aV3PCw4{R?H^esw{^Tra~$jApm?OB{R6vqDa8c669zl!HIi=2VMX%tv0jQ76+hE zXx>QZ!X+LHFt_fj*Gq|*b{Lr_I0;)|ByHN>!-s_t2zuo{#K-?i&JvJ9p4*5v1W96> z9)0a@T!xITWulZI6B6Sj+i?vZJ~XKhduQiW=a~Z=RYOT#p@w3oDSFTv=EXBs(8uIi zFsCT*2b>Zq8gUKI28wgvD@PHvglfcuiaf;p?Qp$*Ju3|El|lp0K0zV4{8ezqRm@LM zeX)i)nxTr@DlVn%(JQgmgVDXB4PDMC`(xcDw#FODT8tG)ou5=Jpp=1 z-ko7F0zsrHZ`?^?5uUz?&C5arB8lt<9+W>A>(m|`wQ?b(&vKvKlhnn1nnK~ z^e!vSxp()|5CIzI%i?tRU{9$q9Q4>7q*raVcfOq~avFQxei| zRNM(Tv*J07;qPJ$TZ%Ww#MS=-&Il6z?(hC?+(PH-BoGVwFhoV|5xAgiXtN!24yeDBg5TOTUWcN>Ig8>V?9>TbL_>w8NB}bfCt4 zaz0;W@VxVB)n`vh!*LtOM2!qb@8SGe&{n5j2wr+p0BA}PZpL8{4dWz14Y${AUXw|d z&4+^{u3vj4UIPLO7gRaA=j0sMN1*#a4aFQ9;;kcl`U2{rpc>KmV7M(fQY26C+2*EbbV>L4#B| zQJsqZf+v?EA_eJ-n)OnwxKfCUdW(`$JBoGUwutV9miK2;uNTPP;1p0%7Fg@b3ivfk zN7e!E;8X5)3uwH2!*eJ32gT5gBsPxBi7*AXJQ(3nP zN@opC15B$S#;s{i8Qi2DyL2O#j&2h%o<|?VAX+H|lWGxqrsgZ%q3C328hDWU1#aGt z6y$o0P%@%~Dtm|fNVU_0qw-Xc5HY6f*Io(=-<}Z>A3kp#3NI;DF^{WGLjgV)EmExv z6JPMYR`e_$ti#jOy?#AyfYZ|~|Jpk%6zw15mYX?871s5MN4!HNe<%sHQ{Em^!Od7GXSTM?8+RI*m9IBeYw&S>JEN&l^4&hr z%^=X$@Fd>$8=sE>-NYph&(!^RUj@nR>U|T|t0&=k+&=Bk9Xu z_G{?!UDKmCZqF4aB3)yIA13Xx=RYa%p=F9^SoiI?E4W>f#iWtV9+bz8Mf#y_WO?x| z2|6ruU1wZ=;H=8VV%e0={&$`N!Af~s{RSy`)ir`=*=FPLEQ2FdgXKI467|kf!GYBZ zYtA+AVR5~z0#m34h8%ew&Yn|#@(wY=G;uewaQQcG^io|Jfy!{#@12lig@5w#&a z{Wo8Z%e2p=x5P+sy#3_z+wtq=f4Kbjmw$BmN2Vo#uP(OX0ZxPevxkNXYL#FxjzO3| zA1n7Q?g1~onzy7-!2$47Gj4NRyO{1(tKyEWf)HOg>+*nfsGsij#t#;vOH2z^BDeC;xKpfKHvr z9RSLPERh+ul~ITHHP()Xs&luuyvlE+l7mQDi=mCH)d{L-P~6||un9P#E%^jztPb&( z?37yhwd?7)rdps&c2T_P4Lko^LRKqLIb-n>C1mn3T6XhZuqsVja!L(+8b91#+KRn~ zv#CZr3Hepgw~EX%gRZx9_vR;zC%sr(REbkZ#FTO{+;HCmd#Z(d*EPZ~tCZrekUq@_Numxu&a6!&0ot9$R#QgU=7S7>4jw)Qw-U0mn2N{4 zU8H_Ydu&@rR%;0*9Z-+4IvApwVmSpN5+RMMOR|gi2FbqnyP6p>^G#4u=GM%`D1x=s zc;~c^(cP(FQ<9HZ(sxzfnt7uadbVqBoNZJARRMcmk*k$(6LMpgtuK-!89I#8@_ZlZ^;L;N9 z1r^H!_%BZky3R1$nBCg3<&kiKwWga2m-xl+b>b>EG#c0-T^ddMLyPq`;oJfAO2H>z zPZu`N`eZ=aYfJQ%fzX{XC{F*PhjpE7JP;+P5M7L$n>4`x;uoz{1SQFbz7?xgk-WH- zpz1^+F@Hu)nNdsLydFzGzvZixJ8cK{9Uv-(QK4v}Y{pEsi|cgjfVg`RJM%O6JU#@! zVB&fs5(gfTTU<^@Ksqfq(c@9!w8~P;pi4^C-81FvRfCfNL_h1#Fy)M33VH=Qnv~bC zr-$}8T8!`4CML3O590ZYX`W}yU8yW)h_+0sCGP8m%|%%H3z9~Zj~9nYi+QK^*|L3$ z7~m)!pwD`Xs|kiw<{14@v1JeDjn_=D%?4OAk+Duxw26)7Yj&}Ot;XCh?nfbeQ(9eK zeBp~Pc*F!YG9-a%T~rz-i{r~z_V@R5bSPIv&Pue)HRThLN`zDuRZs2WHu#;%3?P4n z?7LK`>B0HKcojKr^xB9|n8ff;(1^qDdU{!v^qWd2q%0QBHVv5V*;ps5HUdH9JX2UJ5SVrI+Fa&5?DJl=2xrHrXN$*5wZeZ5^l4Jb8n)j6Q zaZX*S;sJyPxDru29KNTS8>KDE)`M_ei@_?_x{fv0`T!4cjJyDk>*Ia=X7?^SX6#Ogx8@)%`N2o~QGSy9k* zXSKWrj>tTT`VRz^E*8-iF>Oz3317~B24#E44?|9V{;pRahTH+cYu1xW;u*8$5Tb!J=jzVT63PV!MwlbR9LIS z6?^o%3ZmPhCEagpb;WNXB*P(4k!&Y4Y4=;#u^PQNP5h~gGkh2HKtT1zCAWkOG2(Y- z+tx`)4k_no}>CtxgG^33v%%Wvi10?mBkUQ~WkcqBO;L~qEl$JpsAZeR?boiq=EdoX zhfvlv-U~{zUws133c##K4fh%ljyOC$AXhp!n*xq7hPM|1KcbVtD4e92oqoUx740bb zdvcIlXKp7&`be=R4{F~zNkrP0)?y(V;>1)V$Xf}45O1W3sV~;vl}yHEQIxNgtN#a= zrLYL~ZAw}RaPmnxy;Vt=H=3T1Uasxh>qIF?Y*R1K85>UTHcL)3*PFc+qd?nDOx13$ zNS`gTR$}@$$bTij$P^}bHxK7lPB~@K9h8a$G0bSK>l~OBw!_Kl+ShY|DD$eqjl9P1 zL~Lb#J2=hi@(?z!G5{4P5}NCW{iEk&#X+RT!*R_2@#S~CRMM6>JgOi6jKd*LfYtQ=A0&1GSjR)Z5G~LjRtG7l zZ-Hru_pi=o!KX2P442r%_1-CcJ7v@i2anmMU;kTa$}S5c%m zb5CIVe0)1|AV7>0U_t&({`{ZDL#R_5m!MhX?Ha9`pvGdoyXuv=e*NXRMTqp6z#chJ zZuGusa5aLrIpK?&SA3y8TQv|JK&fXmVNI)>q6Pqb5~3j$;B7pQT~|JzBplo1scPZ# z1fWk}kX+}Ueu~j6pkfBuJcS8jGKedRJX)kE_guHe@0^URL_8QM97QM{e!rS6s0R1a z&5vR*j5m`~^1UD^ak2`_-#w}(XONtva9OPV+D*^%CLs4-iQ$$Vz%Bp$R{?T<83$9j%QYEfT#mP~za!Br{v9Tksi&1C#2c_Y$rqucAx;yPSOR%Vn|R5Lr! z&Qm_G^!LilDX8j)Z)y?D1}*%MXCs=)q?BM*SU9Ng-0wZ-*+}h1jQI;dE%#Xnf1!k6 z!XI9IGM@!iBR3^CC#MKpnY88dnCKnZ?(qZl=8gj}@tLRonHdPwp>&~5#DLMkJZsRB zB}eqn9{(`jL?-2+c<-KBQ*DO;FDo?-@E%|PY-th1ZkxoLiR{D+JuMd3I zhMj!?&P%l7)?ikNQZT#LTSTt}G|j@qTNF%r{IeLy0#2O4J{L#|#wvqP|FLSXdv!qX zk$sW^_Q`*_{F5Na^2+ko`*EjsAe}OkuP!g{lqeD$511ju&%wmcUqID^&tfSi<2-$& zyo`>@vTb9kO44|5BG6|5Kgyy;MIPrU|??sW~=*hH_#l^Uc=;lV5)MOjs z6`$p45xIhfu;mWb?r<&gWsn)LO<_rTvLtooW+i91=(SVSR&p~|QNNVORbbfOL1bi* z39pRDJ6Adoinohg1w~7hjJOLtN6bWd86?RT8+3=xvEIYiUwYlN!uC`ytCs>i)8Vc< zu*94x5kZflV1A;a$}x_QybHfsdgr!5Smur;LoDms;lBAvJP>J)RERl#eKIUUYGasE zh_1RjS&KqCUhUjIUxmu9n)Q+t&O&)&_@riSrkl63zZ*7q>i)~OH*I-Dc`#XbMUf0XpH{0Z+f+?JgyfB#NS&JF)WU!F zd5`J_j;x`2ZN!sd2$be{GFnbctgZ_00%FTtvaS+1gA^iv)lE83f~1gT&AYYRO%Xs}xWMe_uk%jyD%Oe0}q>82^!a>EBFo>hx!&&>Mz zo3CAqSxpfje;c3ieN!2C#^c4>TY;HYf}f$kdL&jL6p-Q|@so?kpFaMT2SZ+*042%5 z==F{eefY_X!2pz7`&B$c3ifg82n<*t;h*_eC7*lvbOf3?->I9MDj|)ZuEa)ce*B+2 z;~QIipsWEUh`Ww8eiJdg;s6&Wt+*IA4Jlc=?|blBlpxxBTjoa;rY`q9L=q#7`!Q1wY^`KcH02zt@ECfCr_TdJejH6xV9uiE4f#KRIS@E zRUnYKo_bHt`g#Ln^XU}pPKZg}Bmy)t0W=kYaW>)SF&&wVCyU!bko(D7{03eG%K5~B zlez(lE#ATLKg%5=pQB*PL9&oy6fCrP+mnY{GDyutY8-5I#-m49f&;MofBodg{$@+( zeW!SRPz= zgKeH?S`tL1APPd#^C1>axsn)LdQ_>DIon(;G3dqPXG%$rKgOZW26-g%t;e;;7?&S! z^49dw$P3mHh%_g@*qrMmI#22mayO!4r=CQ6nxZa~NEG|GlVI&Fa}ZubHJ*zJat%{L z#DyC0!~P~dW7&Fn-s4;e^gZ{deVV--7s&y5=v6Olj|vjiycq0AWj0CYMn&uD0SBYW zYEhrJO1*KxO1<++&r__-f5D_yygM~3F?_4knaflf9A-e8Qn~``{;t<>NW+`9Bjs0c z2HQam+U{pC9%TstO{{y19^jN+2C@ z^=eQT35$t_{Z@nQ7?R8Skxw9AKVe_ufNrWcYvJ{IsRsp?kLO3efa>5nP(a;|&*`a` z%>_Vqz%Io4*{U&=j%~g(+^GfRw#TKvbnDL0^{tGlk*~v%jwAZ@VbTuRM>-jAF3!K< z*EQ5{yl#_x*1l%BNbFl{0si~PD0fw}fDqvf&PdU^$X8bWt;S8go z)=w$(ppxjVR7XskNM|10JA`jXr8G0Rm&xn`VwV8VCIfdW;v(8sqn2=g)yEJAnK&{W z4!tehdUA+z?*;9lu^7nlgn=V_T-DUbAR8q;V^wN)3?YvZ%n*a%h&^)EK+NqQm=$QeK%I!7oB~QYOXS`AX;@L7{}X>} zWuzQ@lb8yv<992%&j#j2JgHV6lRn;7hYc2rQMWQzRpwTD zdet#KhTlb9G!x<<=#q=P7_=kh9>qe}Cvr2P!718Bj;7AV@#LQCb>odbD}L;jO zT)lkF#|S+KIJyH~_~E#B%+WW|&pbcXXk^J=19u2@q~SOJiRR97oL9x@vI@8rqdEL% z-kMF{71+IWUJ9bC7>qib$D<838go0RZgd685;s8p`>YW1u60w%A5K-RfUEqfd&3=C z%E#!9PI)mt{qW-NpL}FGKn7L)@uCq6W;qgf`i0W!@OLouiZ~T5 zK`7;ZzL5kjKNW{ zaDefKtRaKBopRY9T>RMIJnfVA>mBCmVM4)7?C?12xHg|#`He%EZYaD?=(EV-;Eknu zB@>ub52#Sruu1+C$3n}s%jl?Ocl(AX1?>5ku1CJm<9}&!1ARZ%-9ThMYt-X;WRi2< zFR=+GIc?ipMUB`y*siOI6Qmk)8EkuT@EyuLE}^-c|EH4A|GR&UH{n=6OaX@x{C?0J zQ9$+bcuc;|>{zF`Apd^t4^eqyEoz9O`>+oBLoZ#>6Gd9QIBJ4?8qXtFa|XJbis#W$ z>G3q?=zt;1t2qMl*2F>i${*Tc1e@aw{?t|7rHv#nf9J_>3&Ss_0&2qxQ-~dh zxGe}E9C_c*uB@BDh>#OfwS9EQM>2(!99{I(4*bo8CmIIN_jAmZLC&78TL}Wc4Bh-~ ztXnNW1XGFallfa(EE3vG*9YPH51xFY{~PNi2OT!6Qzs^1KSlq%c%PN&(XY|-&MF!d zbB7Fhp@PsjgK>Kg;-_1Ae+i9}_6i+bvVsazfBo~4m{X_yjgJX#W%!CC zIgxR@%+`2OgqT$AJP5>!ZFFOx$sZh_0lXdOK_JR7E-DnB^T~uC07q0k@K>j&UyaFa zjcdJm_^>MoSZ;0VL&47RJVEQF_!)3iDf3MTopu>!bNNo?Z}j#b4j=M)OrzN~W<9wc zaJ(UUVtA~Zzw?(jq{|l&EH(#yLM8?8GV(jV%{hA2vJgW9)opjOB8@KGeyY@pBNxqF zl?UVP4Soo4){+7ctPqsNW1%=dKEUJRV;=xV9i*0yud#G$$2iFbs=?z30_v{>8{T~s zuje5Bl7FAXX}O01{)u9twD{nK7p{6b_xt6t@2nsA-p3pHIZM(hx(p`Drf{~?_4C^jO$=gawqE`_?H<1sZ3cLEWVB*XfG;iqQObG_Y99-#A7@Cr&EX#?^2 z;3&(yvIpPgUpCnyB9Sk|PxWKJ{0m<&74SM)nb?Hg|eI5t)w0l{g}km)YME2jV9Vp{|_xp z^2)&#G2S%;3A6x;IfmFHIGSq&%!ay>qrCIZK?xOdGUUQiH>F@QdJ%j|2l;i3E1yRE ztVDdj<9HufQf>z4F$9Av=UrxK3(08+2(l)DPK6D)f&7XkDy{pFfzDM0ui`A? z$Sdt2$%@(|!5s6&ru}LR4XKcN{$!Z@j8OCWqDJ^j!G?Q->SfcEPnsq+=SrQ=1OYgO^1Y zMTq-L(^Idx%QTGm~4pDI>?u+k5od=N)UQ^ez9E@@!} zRu=COp2kzc%Sws3q9L+ds5(6Fxm*XzV7lmn>lD?>UqHe?b@6@g;q%R5mdd zhI%pS3YY0zkQv;jmu;fGI#Ap8ztalENr{|~yheq;gx`y1Cxohr!em?hG|y{!A}@#x z#FS!KVdb0JVbuXKK`s@N#wkH$;^|Aiwin^lR$C+pa(mGS4N^0irl-d$Z<3mgBJTlc zgxR&B^Tu&llqMZtK}gH?^a_=wt2!k9%fWD$(AnW&A3W|GBqf(XagiLio$Wvb@ssxg`e!MK5%}-N`^dwW(E*?M3Lx4*P zmwzza&0+lX*>CyF!h#oOLTA@yecEt zUdOno3oJoNZ4CzFExdPxl=i?Y!%{T>BMGgv0b2cq(Fx_%eRS}m0=+2J19!O0z2~<| z179>}ltS}3%GiV-8*7MmxNHY%K3@iUg z^+9X!6c%@su48JSk$krOVqBoVr?y?aUSo-i!p6lf@~NjG1xzP(QE7%18bY2fW!cQ? z9CJZ;>s11!kWeM;xC6w5Yc9yA?Cj>?kE4g$_};~bamduwA%12iMFf!QpPY_zTU|O` z25xR2X28tnO*@dHMHj~os_F?`+0^$S?Pbk|kVjhu)~Q=*urn7#{}+b{W0|rcOj3HF zvh(Cahdje{FzsYXg8PWC;>t=CFxF&zW#1ib=zv;;7Wr0(Su=%h-@kbJ-%KY!B%vHj*7m-e6UdSSkM8p9$M;x<`I&H+>)rxfHYf*Eg+7A+MDa28)%q&dRAjPD#7ia<79DxK>L%4VQjOjO)v@ zd2I_~Vc81Kl;hQUbjB1pf8!zy$jg*q3=ZEwSTxhQuD4Yn=AoD+(Hx|CtPSC+o5e5P zIX_kXs_M`Yw7`r#H#i}D;WsF6zH4FOV|Zy?x&u6I^5Xr|O6LaX*3ud@Ei| zG-<-iMX;}e>QL6xy^9kX^5pxdpI+e@9gK(v{IW#Jm`?N9{wn4bgu=cvt?O3P!ZS0iGa-~+Pqf2!VZZ~818#Q>kU9p$K8d;mU``Fy(^Ho`&F-`+em~>()s^1$kt8!OtlsO%PP#Hxr z?{B^oY;|YCoqv6pLoZ+5(dt7SU>-xfffK3te>@6-Y&Tq!zsXF-C_!&q_ z*@{+vf8SnXP8>ovL$4TbMVbO+9GrCNxE2wSq2+X>r5me36!v*gGO^V1Po+*FO-z&| zvHkM`C?-R(3mV{@FjJ^!SYdX0T3`jlCNHlCoxk`@7MYi;1wmm52!LUaM-rWx3}4D& zQMr|yB&8#-ND{Ig`9l^~T+6a>&9RmyE0e1?`bB{{L~j07&iJKL>!P_%tNy*(e<$|p z>7QTx#FWNPfcca-8J`2#%RnmSYN` zgi@>`_eCzne|Kpd&|mi-{bv%9XO!>5J?u(6Z@pW>^du9-4iU32ohEk_yz)m!Lyt@) zlvJf~z@ya=+6n4WCo;bE^gliMZu|^CO&0$X!j)5Gk`WYaaF3vUgZalVX9C=B{Hqy7 z)mGKdG0TgOF8(eSLUv318AYt@B(7#l4ERbokfN(2*b1(IAPMd`-!GL7B6^YLY z+hscNrxq6qde67&6~VWc1yYg;2iz+0qzWp#fpW{J`kO-F<{&^m#;P!UGs1lLGj~4| z!+=GGb8r4D6FYViyUWP8t*WH=k3LlzwdpEdL_g*xnRAf3z&1$?saPpbq`Ch9R2-dKv}aWPZr6dqcboaC{lih= z$(~9DD2v+T&9*Y;8I{GVn(bK=TBo__5EalM6d7!}zShJ%Sm<~_;oO2(3x9xo1;x*# zy0mH4ONposYphTrs%EP#lvJx*Pd_p?it_(|5EMTiXS^1bvH%-S$Ed(NB_7_6DsZIO zAQ8SyW|q37emOJ2$K|VNi&_CFVyQ+fZ7!{?`s2$~=#;?RIDhfvXJ!&`C4E4I0nB`faDtpSk3I89$E7{j@KNgd5tZv+Z0KFPi#rK^*lo-zRR0lnwUifRtc zoyKACSJ6LlWw9sy<8p3<_7@Z!wu=>`N{t`J3Q(n>jyGmqX^ydig|*3SdhA;a`LjdN!p$h=_tA%vlLUV( z@}+AeguBH1h-pnrsAOdPAj>ho{bBLJA#C#y%k+!Q;uO7iQBp|XpzNo6Xy5J^4skVp zzg~PrRM3Z7>-YSL8TAuE#%<5j@hE=z16>>*3tv5al*K^kjTlU`nKf}KIPzXl4`gn{ z#myPu6UG#(kggCXko(d1{(BeSi=}s2HSyFYQ1+M`Gkq|iYh+MzVQC|Fkc(YOd4BS1 zc#BN1shO99vsTs?dzt78AZnP)dGt&?L$~Se0!1PP%mY(102RRG=^C|_Kag~>Pj!zN zOUV?Uk7!N9dRVS_=jE~x31Fdf`};QuIqEJ#0V6%?B@X_#EYb{!$B3aSh6*Xcg)X8r86(Brh_}8dB2=fZ0fp_+M9qKTg`@1GMf0b4D{k>Pkt@jB!Z|8j z;$Qn)V9ZQkT>nK*w&I@>y?&lT;?>LCm0JsnDSxkABnrrJX%_IrGmpM#5reM@E= z<$S@9+%Pd9&iiU)LqDpW>|`}>L`H+^#-hg+?s?Wvrk60aFQ2S>JeppTTuO(&z-2Ua z5uYH7I#5}o2HF>t1vvjV@iSwl2tmI*OQCo5%GG#gcu=tnYRqgqRCx?}5-%Pf`1&$Jx8DyCE zMFC!IpbEckc2|J!AOP;nUk8KCA7mB;sCs5U*i9GM-M8M1$EIg8s|z@6N#IluU}R#j zC*isAR*;JV*oggdqlvHjC16LH(ZizS%c&%Mw#l+|deUc39-SCZPHFm@A7tZ2o}eF# zolu6`6*QUO3I8VXNE!H14m(qZ<-DTO6qK3-cXR|d+sWRLKP<)|z!EcQ*?iMM5lD>2 znB0f+xGWY38K~Z3AaW+DmnOiVU)5EhGw5k^NQKRKFFK;1gl>B^<&8FmX)F%U-Wini z{}E>d>Nqxno;+de&7-?{6Mz>6N4umBMLl`U+h@8^aJUfCQsBAG7p@FHG1D>0LJQ29j-{MAwT`Z$GQGkg#;`fXH3MeyO z1!z9mw8D4=wHdF5V3t4W$@iL9>3czmOJ`I$7^IibOCi3~JhiP(_I9~YTRg|~ns){@ zuJ2Fc-DEkZCZKVOI8#sV$`7siZMq(JTNorL)AcFw3|YNm-N7BF_cBon^{Qc2s$SQs zFToKpDbi093#s zI(|cZ@S@n|rypE=-?Ie2>QYb7QV`D{H7{^{OPW;dj@=qI^!pdzw~u<3=DAJ$HBng(;7Jl~ z@o3vb>+oE%Yl2J|+5gwYhe6?Ys!3172^=h3sPS%mehFP7L8HVW2M3It5r(e9Z3w^7 z+a?)yZAIYUr>98_u3pt9CTR>Or7y2As~#Wq8bZYv@oSbttJ3-a;k;|(Ru8D0LCH!w z(qwM^7rvY}RBQO4Zq4yOK~edDa(Td`@nr{5@lnw3x%Wx zzEEJTau3yU!6U8IuEu2hH{Vcv4EV(0+OOP=SIRCRR&2VAwq&-l7;liW{9Ni+ibJMi zb04P9LK|B=vVwSnP)+X=>g#>GygIe39g4yVSZU}^CL@*5&n8Mf%q}mNOI4_PoXA9h zVwG(~gaQ4V;m@tugJx^k5u#fFg zWHv-Cj*4xPPpMbrg{}~PkNIM7R9TPG_rdzV$&GaJ*b`PlE`XQXXoWPB3=usjWftTI zOo_CyIJ;okta4V|#7Q&wy%;_M^Rh2q^&6$y1I~y0K>?-P7A5GW1C?k*UCRw09+0QC z7??oqHM2{=w_`BUek)lhHuSMt)A21Q-&Wzid8|FJ4dX}Byy&IuBKNijCIFHGmuoP8 zCVe!uC$(&jZ*Uj?@^^opzkP7=n< z4_uoMSOB=a{N%={$gJh_f}NaxITNHPm-3Mrgi3au@*MMp7!ca~aC%65Jw62CjG5*A zT%YUtqrC%nZbaw8_2W@)5Kt}>Os)oyljSdp&DOI|O_e%HLY?BV5E|5?_0eD42!V0y zq~wMx9ZJa&F_k{M zntZHS1^UG~=b2%Ho|`upH!Ry*xKNr`P5JCyV(a8y3@T83&5)($owt}}?lQ>ZQA)M- zAI2|N)hjmMP2T;w7x2c>s05s-5bpDWzX&9MeC=7l?Eczn|9%G4X?*Eva7C#oN4#0t zeZ`^x1AlVQC$RN$SL#LCA!RkhJ4fKIJfB&DJeB~|x-TIEn9mQYb8w{-Rkma17@+Sz zc|XP{KvD|rp{nhs*A-G{9wZ=ess)=Pz@H%DF@XI_9>J=Qaqr-xj-RhDEid{xI%jaQ z;xjJFoEb_zy@}#vEez9XiwA+?L&ujOs$RZ^cY=3kZ!dalDP_J}s>OlV#?LCg@(nlQ z_d_zMCH8}!)DkH0Cjz%b4Ngiv-DM4AukQ?{zrlO`C)^yg7NZDxEb?r0&pHytQ~$xT zVS9Ko;gDNj+t~CXV7r<|geqR`Wjrze*@Tzxr+PX!nwQLY9A(}+_;|52}yiThfg${Ag4?vHtxQ(8805c^`fc0d!nFToSXg@#|^~ukIPo`hZ>Oi`|(lYHUQkkdg zI~U)J;ZZUAyPnPpFDq&DySis&rv`E6ax0f+mp#UDKOIQ>6oob)qjOTA-sJ(w!EZ$y zyjetBA#bn{x57ddm8k4Nh^3R4-i*-EnYOXrr>tF-Wv&xd5^{+*Tg1dKJjpm5+}osf zd?xgmnFe(_GuednLHc=ac-i=}Jz{Sxe#0!%iWkSASUIJ5xSX3PMtLWTy$>z7xaXy3 z$Yk(vt&v9zhxc)&OrtAtnrrRy9O7@x8F2cSQk z+sipgc={r_sqqv7ifb_5Cb5y{U&sn}$4~B~jF*ku-KCgE@p=Nj(k&8m5~304z(~-E zI9M-yIcR|B|4EE2XT^kVM7_HBjIx2ayyM{Cca;`}uU{!$n+bX}c&0_IlIj7q9Hm9Q zHrXNRYrVBqPZmVdr(oQ(>2aN!p7v?PBZr^LTifE?u`D@m{gJBgq&%WFUbc%lI8W~Y zow(B72jcUT3&J5J;x^fHVPnlSxjCxwBBjfuXaRQe2W1B649QtCEgX`+a`(j;a74M~ zS~x9mv9;`1kFctuv1Z)iQPQYggw<#{bnXF(?6`rcy zkjcaw7HBdAnY^HnsJI>LP@JqnY?`ly>y|q_Ht6lqFhu|wQ-AMBfhh0p;vsdUi!26G zjj|t0)B(&cE&DVS&yyqkL}Ih3$nf#g|7KEPGANg!Kx`G9uQ}Bq?!h(KJw}00pERyv z?uSX0TH#GE94Rq@vKi74)BsuK#RZ8Jl>gm#%pk08SZ=bGfz74`xh@`m=1o7$wRBdr z+vXVo99lA-h^1uBG?n{PzHhu*Db21$%h{X^7lH`PUD}(7Aw0U`DJ+ajo*gZyOeNAO zsRuCO;8S(Hc00rr@SNh5v8+BF^CmPje1PQ@1d7uMfERH&zTLt)nFje>I@x-3N-@D) z@#D0nThH&8LaP0pw+Rtx{eGkAS5MlYC|d&}{qnWTWg9mO zp!VQ0A3OCR80Xq9Zw-rb6)}dF2F3kzS#YA)ou<0UDIo zdQmqg66i;%K+jxD@C=b^60W@2pDnePaf#5uJcgqPq77To$Vf@9uBp;pc4v5E2Oado z=Y_%{#I>1QeXb!`46a)-XK|fF=MNTYEKsSDCNc#W?pMIg{qcd&NhrRtAaE@O4b*uo zg5j8#7|DD=_fnkQs+s9m2;Npp9W)tWRY^XYa)a=KExkTwPuTAJC3-{(6hIXVBN?CZ zC`#wrdQ2OyR`C-Zb2+xQ@bN5Jif363VVJJ$uNikJoN){WY~lSNJiZIze7pe7Fg?Hc zV)Ax=kiva|I~>$<3`bS8v?g%c~KmnFj%e?#84tN1Zefl^D6cm=%ZO~J|;=ea|P8|Dtt z_J=f3e&4h*Uw z2Zgc9PzCxa^1)H+r$2u3$;A(23@|aC&1{zo>Ftuc3)iIQeeOMem+;bFW z;mB6(qy$+VjQTB27&UvD0_QwvzkV2^yWugs=(2kGUr(sD+ zID*911v_nBp$Y4+QctSdnB*8M2(E4*pxX^6c$+5UofT1%wOo z@OC9h#>RmU@TTBmN$XqPoA^CB0;41q2Eqgjv3sSC z#vIbN9EzsdPXyD5Y^k)^Fa17(PTK|ZOO0q#s5$NNxdk~!HNw?pU5+w1*tC4hL-w~2 z>yA+OW_+yXyn+C0rM9NplFni#Je=&pQ^b@hpIOA>DLfCanZ;V8&1%RH1FV90p=M|3 zA|^bK=HmfcXu;)QHcR89W1(K{(EQSuf+(}S0&^Bf#I)@R&KEq5+F7E3>r{J`2iX#= ztER1*d>-HdHg{g)^BJH;SN-6kv{ps0(fP1MR;}dbAUP}7AnCSHBSggKu6o&BU^Y)P zHK6imyg}1(&{UJ92)KjMdDR*}KQ$E(w)Ql!vL)`@Xir&KOISWq;Vn^SA>FvJI^_fa zji+fyKxE*s_Ln~MQWlV3yFF`)sWH~+&G0Awo-DZE5Ap4uh^?Nxbt8C~JZ-in_I7b~ zal>K+t`nlI`m&NrIVya= z0WJLol~!>#$acB^z|aspL9*ZQbv9$nD?t$>TX$j?)Ty-0l&~I*Z2BLLIQM=p;t5sk zh5zpT2u`|q{K5+_cyz8Wi+VCD3(Wv}S4~cBcD69DeVCuisyE2tenHhxJ)z5a^s=Zx zF4aEH%EpEn7m~D~r`I@pfQ-(YU}mPY{i0f?diNMMgQui6%f(|e#F8p53e`gO;&+OI z_`$_T!F)h*xyHRsx{6vJPjC#%mrVzNyF)uQ-^{SiLK!j=mXSL*-i5qAC?64$u z%?iaD$ABw7&#Rzt7eBUAfSo>&4_01)m>Y+{zV$EtkryNlf)!a>Effrf0AYpM*%0c8 z)ZqFd`O-{z*NLlOgC{C7sFtF+N+rOGZi+g~E!+Gb-ek(jPdo&8j`GCpLQOeeC$bV* z)FJ+`i-2OunPS_rkGB6I|Gqm4O6(IY%#JN|68E>Qd3rJ!LAb)rt~a~Bex~0n5iGQ( zqjYr8&dDr%1CS$0O{J{BKFJxV|1HzYbm6)+EOdxH=#L0CRxdrVx6j96>3zd&3Z$TT zFE_{3A}lVj&Ij8>j=!=DO@k^_D5|JY3IuwIY`cnjTAhAMBKec|dj?2M|0>*T>y9n`hdP(3WCV|&ESKq{aT`yaI`>!cr0RlAmq^u{2QTx0y0|>aKek{ zRH?FrO2x!XjuGD{!)(uj6;5u$@A2e^9z--6YNZ0GApZXr4Sh zpbjwK4Z2^4frFgmvrH*_m`FHTPc`nfr2_AyI|r$NR1S+rid^FuZhFS?Eu}uRjG&nF zaO?#Ug5w=n7anugC`4QoMfYu4Q!)8D2pK<h-63P!wTXH;@35evmmqY)l*R-QAf?c9ZBkHO`rm%1g5ysh_# z8J(Y9y%Fph~HfNFlI~oPF8~B5TvBT#*cY|6pW{Z?+J3?OB`m>39_`6 z&=vU7+k~r#c}_Gwr?IJ7R4bd)LjOKw&v2D}{G`}-ct-^$^W7nTrWN6*|M7Q!7VD+*#>es8 zC4eb|8VL9Fg5u3NExr(zdp1B}G68hCX7R2~SlAy)#ut^p2R=TpoGzuidMZ)2F_xNT zVb;CrOGc$0jz-5pYqD@mi5A?ka8eSpS&3V+YcCS~*D09sn<@eLfNlZLjMoD%JShQ8 z+)~m(;!{EfHh)1el_y0Lb}Xj4c~TOx_t960HxzeI7l7pXpd8!!-jkoc6fZ(j*Rqmx z0@}u-3jP3JH>Y*=sVkZfrGLBPW%29d&yB%VOC;|U{7}b5UF3{aWXWvrjd&0Ic$qou zaaJ}`{Q%J0FY1I4_1+GI73I7+*};CiwWU<~5nVV`e%(&FOv$-%USkhksr%{oe)s1w z1HvaYrHn%}kPkB%{_&Ha#^wFxuf$)shI=J(kkX%wg;E1HHXD}QNWKwec+v3CHbgwU5+5pz;FR^^^n&}1kAyF(`@vvNGpHeV> z5UGRJ9m{^W8!v^!-hc9=U?SlS3S-NqUfNj8jH-btyfHeZ_lxur-XE8(-%Bn40jGFd z>kpsa)r-eVM}>dMHaW`~UVT31#I;FPnv|?-D~rK85Fo1U`Vc7uoh9S{DX8bjHEok9b*i!GN4Py#UX`HrIo+c(J(GdUM8v{rv!|`d+Tb2am4Zb1$ z{-~&IM1AFh31&GfO1Jz5f{>QYq0VRS+We~5Ee~T%|H@nMdScx8$AetidOwXTG}Nur zqODNkI0ksug=|1DjN{MK*Ix6+Qos}+8k2KFaf-dfD%QU48T49j_1OI-iho+-5XBLC zv0pUx7VD40vI<8SxfO0Qe ziYL&%M3gDnVpR88>Bs(dUu8*@mK4l-g-kZnQF<|TcZaPGelW`1(xw*<7&Z5RPVc!} zM8nSZJ*&u7uAz-3EH^agc(Ve5W|7T)c39hf$A89bkLStTE~V^%JAabiZS}xq==d7P zt!8)17D5jzK^7#Aw#L(y=NXfZzr~`7(8q9URZzmLz0OA+pW1zJ&)` zHW>rQCByYI-fI14f)j46#1bD+x!WULggd0t14 zUVQf1cm?i+qM=dDAU=S+ugA}4+`5)M3p&q!ao3RrOihq#ZQ+(F39(6d7r8A+eXl0# z$k7ttpYNx*LX;bxquxn1cD7s77&k^K)>Nb7q{QIVM~LBlygFmXb7ZHPe$B&QrU_gu z9_d+PLMj1KK?SWYiu{`1r>*gBsy_-0pJfksJVYm8(IJL+UycHy732FTqr4z~qZl$% zkEuNU-jkoj&jcFF`4V)iHJdJs*-D4m-q`OSB&44gYRLwN@iUd;eqdsv`&_m%GK*XuC+jd>r}|*{!Y^Q=ku8>1z-9$<(-QmjtBVt6a+M-4nEe!d00>g`z}mH9#^LGNOkj9 zV60#HF2ZDj$v>68AnbE|fq~c_6A8Tog|!?XQLKg?4>%n+NnKthSF z5MimYLjqJ)h!D>-dI$tVP~+)$o_sf6Fy1Mqg)o3g796G|pPE5&Wg;P+ML1<(LQ3Gf|4#ge{LJLm3A~jPTjdelfm;U|x%#Pe#?) zSto%tJ|TZ&&Kstp`?lPc>;xmgjm$3={3S42|1aK+%h2G+xGZ&sOV*odR_Z#Ey^k4@Xz&CJe}mUuKe z_uFyXo~R|8YZ*wH9T1oB>Qokwh(NqqCGkDMJE}8}s-pv}x=VVbF_na`W;aa$p$tMS62)14TL+)6ot?8LY?Lr|d`bKbI#l%D1r%9l=8 zf%bsHU;Zxg4|OGE2WjniO>$p=6y(n+99cU}Z3LSV89oFrV^$eSs6a6yJq-yj#cy0b z6hjEEe*VfA%oFIWRRcH~WcBIZbvrHuH;oF>ekpl%6Um;@N-H*(Q@VOR*39kpEB^hh zzYdCq!pPS9iCp>0=ewh$!e5{RiZ>XeqEtaSvrJ}>W;Zs2dMj*5&;Z+E-{_HbbXl|H zJhywPYXF&rGyy*8^VO*!lMsJhA|7&7(l+q=V$u{Cgj7H|**u^VZekvHke2|;V)A&^ z(>$JC-(1{?53t{o^$8l6lEMTfFtvl?O7T%R#1sJMpFWXJhf>agcSJa9SS-zDYVLlQ zfA6Ct{M4g%*>1s2Th%;U8a+sfNsR}>3bmC3f9RqbW{}=EL!plvZ zZHX2>av)^{b#&T>90LAWTAMP3$`KYWBnSz!l=x43@}%Tuk1*)Flp*h$u;i_5Zr*;} z(lH(lZIWX;wERe{|&9Qi^m~K=jUv(b)JIOsM^%+G%Cdbihxpu zwi-JaZ7JCj<$NY5U9Ridqs)s`JbW03%L*?Ui(~w}bXnMiQ%m}J&C%a+Lq;qxl>mKzDRE^Bq z!_3KqSYZDj`LOdrOQVSO)A-a1K=T0C02S}&o*2VA9(#CFv2nEVZZ=bpVrFV)%G*d> zokD37?iOkAo=yu9l|omPX)8s!+96MF-We9#0Fj3bFwxNrUh{ho&r#w|iXiXo%*isd zJzQu-3|?30hm!IZd0WV_y;MvPNE9-!#_?hQ+^rNgv4@4^@%33C4h8#%wig{tJl}?iea@&zovMklX$3Gt_lwz(<rPO!#^Bt#@OS@$#kc;*)ZHbEo{+8@59)pI|~|93+F2E>aZK zz6$YrOZu%hk6%G!>3gYx%EMb0*{TpIp(`HeCIW>}s>Gl=;mph(R_u*Gl*krEl2td* z@9!yIn_p;keDTwGBkwG2-dwwts>sc=v-EEZF<*8-$VXL~4;m2MyuKO)egKXxX5IBm zPB)WMw}!=Kp>8|?fMXbrs!fr(Z0{Cx71m!x-Al}rK{;qGr zKa|`NjpqD0;uNJkwo3WONpRFQ;N|!;-MnL|hBP1x;Ez-a)CMuN4prT*^;~e1D2VQC z`svq-OdcO|2r8awXV|c9!Ni!R9U#i{rjQr_zR)V>7B~D6h9UUA*c+8>FOH7sSF+;6 zV5jeEtuvY1y%D5DT5zFyTy}%?2B|P$5M&U-%)=E=h*y?+(D~x=2D4+A z;?O_+>x*Cbm~fpmAF%(T#GMLYg&opjymi4A>Y5Wj=~t0tl$BE|`aze&;kdYIJFp}tCx*pxc`e?yGP~)y9rPd2O)#}8tXK3> zCnc)!dQ3+evXf{Vv;0OjO6(4k5-vVoNgtJu{Z-CGO5q#yZjYx__9!?^Xr@xchRvpF zm5>WXK+hRxd+}KLLFSdMhQ)}15Ak%B*Hfs#^IX!Pz6VI!L~#SuWU^3nMlFCmICWgP zrebj9WwFtBX$*k;Z*FTJFiDF(Auy)zVUC9nNy_(o@LlV$#<*-#@x;z@&BZ6=85LMM zB}=$(LO-~XVqxvhs);#k%en#Af2IFx4547bzDvMgao+$8qqs>?$U4K(=L5s-4HK?i z0yRbbFq+nMQD3To~|F3R{A+n@P|WI<{wy7M#r z|DeS5%b$-IgZ3?32k)1!(q4`7Kto*9juN9nl~6EhBPugrG5a>GTx|YIkt<9J@4Pj^ zmD$x`y$+0AF;y~(aHL8H3395aR0bkuUbz+oAQK7id8}uSvhwlvY6+_{5A*akw?kAf z&~J>!o0QLsFE7@EA%AiHE)zsXk`?)OBO=zsqcL4>y~>&MrR8nI4#(ORK-0fiz|+T1 zejL=itk~q0GK;)mI#b?W>Of;(06^K#v3uSs;iN9C~r^EH7 z7KZ5sf7>o3VxssegS&e3Nh%cPgE3b_ERN~S#TLlf-(V*3;nV+k@pHd)%~vTJEgqX7 zm~g4&&=jZiu2{nt7zgEWjNdt^h*O+aP-(mza2fd{QmD2p$9DQfHz1RF`<|aXqlZob z#w$%FA=!Xv?#A*_cw@sj8w7Ygaow2}>NWs^P&~&CxSd-haD5Vc@a<%*i^nssykaq@ z{yl(SMpbSLeOHAIrwP?D-DS7oRQr8#%Ex?xE8tX#4I-%AzZqv*w10_t4GhhaO19k%qe^&U2}J7Fs|( zaD}5HnOmAoh_|ox%-I6#<&MhaC$ekNQodsa4PH+r!f zdpMASgO;IX;x-f4kBT);H%b#N=x?4><8Qg_ zgKJwE9?y|XKHr21`;lyjsl(Uv9*>xcN_N#!;NTa)3D(}x2$)s_$OR@o_msZK7k?Lo z93Keu9lueG1j*p{)#gY7V&g1K@qj7YA}D|9o|`vu=*4ED)Kw6?8l|9e1dSCoRuPqW zF}uyh65kd0qtu8(@DD-%6xT)T0oZCY1TkrdEmFm=#KZ_AWF!qd?9SvRhNj`SEuNN} zmLfsbmT9J546F93MUzBnD>ut2?aShf(e^H=C_t3O9#uSLvBbJ@TjFF2eq3LTFZFi} z$Kn!O3917JZ-{QhO(}!11-MbAjbc=+WAKBP8?y2#U0$Tnks4jaD2!}tlC=)kS^~LMlRuk8L@#n5LpqLYKmzP!-V**{}fHHwvY)4%beAIFJ zknbL3!c~@!qMa1AV4W*{oMCv$v3%!S zZ@pzIH|@Sa)n%U^W+Z%mZT0%njhMJP`NUL6^8CTY&w|C|`@~wdD42-*`20}Hyco7P zS~4C@cm+3o4PF8jExJ{wTxNX)?pR0YC+q3}d7|CGQ4Jt-U625YO%n_;+)K8DkBCXe zvWOJWD%Z=pS+hsy4*{Ljk{^KM_)il#VEL1Mkloc z(i^%bxKf7a-r(w^~l*vl)cF^DaU)@gCYwd1FT+=UzjRE{!e$E_~y7Lgf4poidA zH({}`y+MFixF%x7mq{htsWwtoF*th+_{z6D`2x+P0imtVs!dNT^a=0f# z1;>`$MMi1tsnX#ysxV$qAWGGR(fw{b2j4FWaq9%-R@hrb5R=;pbJ=InG*k45>ZHOt z!dUm{dt*F+{>;va$`SzG^MvJ!qY1b;^0q>_}TSy<79L=%eEM8GrZ z6HQQE!lg{AWwllT@bYWkHk4BrY`>wru%I+()dF#xo12dh#+;KFK&Uss!dB}$s|bk_ z8-emhWXkjF9GZ42qAu7=htnwih#*r};+3#%`YvuSZWPOzMA8}{I+87){%%fkOjKg< zVa=hz`v_7U!`Dw>j_eQq@N%~>P5&905f)w#1_xk>GrU1DfxP9hJnX1iy|ujTt)-m| z5C@ZwT2NHg#d3_@Rx851ZUmL%FvMrH@0ndr?peI;VWa{y=dJ%l!^Bp|JTD>iwqHE? zNsL8R_?^?Q$J+lHEC1(E=hM#wPbiXlR@`pnfRJ?Lxi+d00!=Ea8Z)0)sUUMJ@S2@s1MzqfV#{dr#Z7Vt zK_ov&+CgXiG$k$`%U5Z+CcHowL-)Iuhd}7Oc4=QHS+4lqM=?WUWQVB+&o^LH8q_47 z&%7KXu27Y8v{SUT`TkZ|k#D`=xhUM(sd@I;)jDnv4CT`aBQfj1GHPljlv*X|Z85`RWZ%=QXVCn)f) z0FM9W$fo}$_X}C0Tb2ye0Uea8Cb#=pBl7O&f+$?q_fi_7w|JHm1Wq%MLOHQ9!q=e5 z2eoc)t`uP}Xz`fVj=?MA5=QL=5(^FThyD{}r3$B&qj_rYcc|^U zC2)f>L@f(+eO-dPV{M|bt6uaKjt_E(2eYdA4d_u=0a!3D-53Nq(5fzGHs3Gamid#K zmjDtKuV2thG3NN0l)NPU#^QBr`0ey4_Coo!!YRPxUiL#saEYU_fc7~&w)CIG@Wg_% zAfzI<#2A~$JLCseAIP9{q*$zi@JW1mX~HyPQ%9|Gb9{X9cJ2p z=f8NfrinA5AtmF@SD$$%m~um8i!;H^c!aXBAHwiBX^4x&7OcpE)#$e4>u}LBSS|K6=#u;T~wwo3m!1|EC=s2+*M7FC_NJ!fz(c|-r z$8-kv_mwJ7nIe1mc^GG4JWcWVqcfM%iVLOfNfLxJEKmP;O~ISQ9%5BWT!^3XPG_OY zmZXJg6R*O^vf(?wu^6mFN>FU*1F#hZx!AhznTd~qEO5;yy5V|yFy+`N77b9$+QW(? zgI!-tfODm=#_JzF`%=shpUAlM`y^k4zBwp;L`3|^+_*$zZ-Xi%HO-&m{|s7OdGzkx zkbG3Ez&}VrO1yP<^w7uP{(y8veVUpdB88i+>b3B846F{KF%C`~69@pKGA=R(xlS8O zGxaX`eA~dAV6TfROtO>3k1Rcx-XK6!ZjYn%XQK+d{@P3|eO1oy_=Q-${rq4r5=?;8 z;{7K-394X>b#2jNWz?(sj>*#0UkKroI1~=ps!PiU7jN5lhwjyiLdQ(lQfHh|hpK1` zidUFAbL7k)q!nIb#xXZMCAmIQneHX?-5`pU+N?ab*z2y4^3scbM{_isR#AtB5(s1; z*mXVb`;V{V>!EbHISh{+(BG0Mc8hC2mOM3c&C{T;5}EixOHM}? zY*kiM{GIgvzYiJSn~Q;F=jS~T+6uKQ{X5wsY-VgAt{ivYL)KHjJuSTcJ|VN1I}C|V z$K&y8<;eK3wH3A0c*(_iSGz;=Vfl66A&PLRx(25Ql+7br$zO0l!|ugdt=l)$9qdWic2Y`GCR&- z2+8%a;7iYaRlHF6@=M&&F-4-E#Q{6tULRwR&ePta07R@9KfvOGSEXc7s#2IxlpZVY zFF;Yu7OHr!TEC`v$KW)nMIruQ1p^=UkJ0wyJFT&aO-EGUEH9z54Kmbgrw>wnmsH=(4|B!*A*go=#Bo?yOvo5I8mi1 z>hZO#4s)U^)BGdr<@sH ze%2Q!5Qx%Vk(r40){G%oY5KMFhk)uFr*MJ__Ji2F3C?ZWi3y7~K9v{Wgh2~v`1APA=-@b=@f!o=e!2S51-kk~#qWSC z7&MvCoH>jCLbQs1-ltk$eYiK#*0c3Tg3xAyvj-$c2T`!t@p@CsuXYXE+#k@k*$b1} z1Jr@5x^}*3j!h|jj2?@FxQA7f!DH97o0#eXSg(ql>$#OQZo%YwdA05r8YuM-p^q{e zyH1>}YNmTIeR}FB>Q~S@isS6`Jt>tHYWTg>#d_t0%!6ji%dzv!m*Y;(K5`=cB9ESn zbs_mkmhh${T!t#qlw0=r){!njUy0~2pa63{#HdFFMnwo63Eb2i@B3wK@012`e(}fy z!bGSkLhYAQ+X-ODr`C?{Wb=7J25E*LiOe4!2WxO$ogu{3h}nOVhDHMJ;B>+d(@<8O zz^pIp%JkYUlm4BK8fK4#Gz9bFF_ki7dq-m|bib8?8ia%=R3zQ{rOpCuNMtoF2!$ML z^*(-6J}po994G`(&|uv6X`JGtne&6QAm+=+lf~bKaZ1eu!c=XYS09y=(lFR2P$6$v^MG?M{g;;TR9rV)w!9o& z&kh=xhqbBYWYM%Gmy)1E893fMT*H7O3Lt{EzixJSoH<0ClZ+%?ee;_c1sLIMFUh%` z(jONA_3+ZQr(4lPlPCWCkN=IK2N#bETSPm9Q`%7mnitnXf~Y%Hla?t}EX}!zkg1SB+ zrc(2<6(NbpEY4Rzc_*gF?Cu_%ha%Q5P#9F7PC}dMiPLB=vXtW4uGg<)&Hwrj`yHe{ z_5h^@G(`{(D5Zl6ICcPX#z%;zLAgiv(K8s=+YKfG#}$7H&VE2TV*E_!jh}hGkQN>3 zEUl1370gMk1OU$8$M^79r+0n&^{EhQgRBQ3O%m5r;jslWzCkC(2!uNmE=6p3D*k?* zny0Zz!drq$4U%B-5w7=^Debw+n64vX&3fq6(-<04^vCft1*Fn3xqfTYgVHq^BwjqG z{ASN)1@$CJCvZ&cL374>48d$&++h0J%Q0M^g3Nvl?F5L7$=^z5P3)bH1Mbk1qQ5LY zY)z)ERH;JS0sCZ5B7L4@jB;D{)QBN$gW=A@`0<6iU-GwvrsU<8$6e&Wp=@+l-MpK(dxtBH-ec>MA!Q{I8KEgA8aH-Zj~rP1Whl5<^L{sgBz(%i6QK!6?x zA=JRQK`K>sj0LIu`*snYJsNMrz40j8-nV05iRKbMlb<15PxT#M5z;pna@xAKSk5>- za(ps@!0Fv9EvO{UTN0IJg`DYl$ZPuT?F=1Gzw&A@&ef}TO*EEHgipnVV?LJ^l#7}L zO(}6#$Z>c+DCNE>+zXGjn?J^AVbH&L@^?X^t5>hSXlg$D?7Me^U{am()hz>^a6){D z6sqdaJYMZs#9R%I+x=&LZP9)o&y`E-*s}nWnQE+3U{WT_)+;v2rZWv16i^qVN11;8 z)zAAF?ocL%FUQA3oN-deZ@5ozeR@i_v8(x8!67jTdG(aul|0-W=U&!^YGVdj$>b|x z4ta7_Fvg#rBnFFnjPQEA8>5@zxH9TJ^%dOhkR&FHqB62}*10uajD5kuHOdJNcc~^C zBg-w{ssN=rf>gpZif11Q2IS&H5}n27bkdu^{$642K=c&5iNoX#MHusZPu{#$uKx>5T+H-K#3JR9RltmPhcnUFeQ&@F&3A)VNH@j@N( z*!E}sD7Ia8V=a^|e?zPr`&^y4VgQ+DK9DMZsvMyB6b5-K)+$%K5*|DdG4@eWmUW>} z6&)F!(~}p$2d9G>f6BMPty>{ey`kzTUJIk^pM`awM00z-Xt@*VWL@|rK_`^?bFwO@ zag=qA@fhC<5)hs&5qUC!xJ{*Gds+L)o`yR#qH%WqX6(h1tes@Dhy3Sn3V?T6#J+G1 zF2quRw8QqrXmIq&IIkkaPR+>?x*vYV#aVvH5HrBRYlzs$AsnAcg?h;?^ zuvrR=m+QBoDvLa$&b+W5afq;1`dj8K<#UnHk=GsEL`B|^7IUtTsk$KYRCyd~;@QC~ zXv^q`I>IIAbZB~;ePMi-eLDC#hSnS&i;JtjNV1o%P`$EUK(3@~tE2?yvfddUhX8u> z*?4??aemXJrB)UfW`>`K#~EiN$Z?b$!v*8O^Tp$5z7*UsIxj-!rkXnkiMfSz7K2xo z1oMyC%w0G4LFthy2gLVKJ=AB-ClQR7jC>8|Wh%EmRz!5=7cn2oJX)s1jgtX1N)x3T zM|wOMxfoF%?|K5>9=TD}@afoWCR~33F`+`s{vD#%T{OfCqFRhgbtzkb_s~7LbQaqUqJ%lU^gbnxr(>(#iIX^ zXgkVOfZ1E|{K>F{Kybelwc|33kfnb()2D_IE4D;Fkf(uBcTy^T%sVMn*$~yksq&fU zMRx5e5xJI^kt&!f3GqK@V3JVaiQbqs|58_R`dg^nO<6DJ;sh9djdyMJ zcMj5TJQ#arx7otP$^uGh22>Dmbd=5*Dcgi-Q?2KeXEoOU{c0Q&+(O=O+6;iM3l>!c z5@*-ZX{>7bsEEIgG2|7_W4KCo0Yk=T+vj{SeqjpRH)42H=K{;XE9+j1&wca7IJwV$ z(O=#%j8!5FG6!?vS6U5O$oWSYjO$4mVx6FJYA^(tC)VQZLxHC;3_}ON)KwwR%ocy=HlGg?*qh1 zXMuDILIpu39N3O!j4O5l4QLC$K)n3Hv2Pn#w%K*gm5@~>kfiMcc)VPQXe&A-jsya~ z-76-7v>E%4=o?X*EhosV9J<*Kbm;RSb}L8(AVj~ltjeOQm?5$7Z3!@czeUW=hPYzf zlm+ukwoh;Y<}3jS>(N(egBDa;NlO?33&;5dE7Y>46L`E{Svy&5k3bwYsXyw~(9TLr zH1Va8z|J@r$mJ3<1c4}7;ukMIRGL+qG4s`-(W1n{yB0m3)T1+8x_Jgzy*&GV8$RrE z-;m20gb4(c7c#q*EH1R;lOOulWRw#EHItEC>HPM_4=Lh}C&R-NT^`=c>q*(26I|av zdgwiG$TyN-p+zA&62wxK*aVlO&w~7K8NXBU!2BY1Vr_4?&`&Fj^3{b>D!6|3Km3oK z1!8Z}|380A>bJ&CJ*=>M;uzT#PabR+C`D?d8BT2X zpx6srT(B`6faLMx=pdtaq$`c$&r9=fnCGaZ2GjWXEgca-&}AHVgg~a0&+z$}rJ$(4 zPi(w@uSl*3%8aT-CpNm0uRb(%XYuLnf)jZuqe^Dit}D-$La>m97<{Y%|Fx(0HMK8` z;gz&gv@ZlhzgPuo^E^r;SDvXd^X9u6r!q)UxRJiFnambvg5>1zCAX}GC?)5s4=gFA$W=m!d_kx<~qu=9_@}Z zbf9c%uE12l?-T>Axom&)rXxaVIpbCw{)B5XxB|<3T#)c@D7w7kyO@e-c~JmF#xM!; zZd{^0gVoU767tJc(Mnasu1w9m7LSn=kY#C6QR9i}Yp+`T%Vs7;KdqORJyY)F2WTi} z9`4OSK~PbUcdKAY)Q$pP3-)~TrI^Ul>h-nw`Qq{OU-HlELvQbpyx`8`WHB;75xv54 zMK_P}=d!8o-9LzLa8u%N@Pu*B4N9#Lg&vZ_Tcb`@^W%1mEmB@wLPX}dHPK}YAmCK` zlE6CgzVRUoE;5#OuKD$rc7Siy9JNcE_z5|xx_H1}t!F3{jK|Xi+E|Olu+ep85A&EyHv{uTaf3`8`j+~^SOIJ>bLf0yo&F7)lusS2zm;+W!|MadB0xY!lj=Qu*x zsu+n(I|Glp0uJ#J)#1#e?>-xUew!`_C5H?5*k$HHx%MtQG1j|-+EqJcT9_cmd0Wg< zsDt7X4qk$S-LY~e8ydh;JH6(S)*WOg&s<}NNhoIa?aUX+))8K@Y}WOqO;f)W)hn`P znMudS(V^zA3{MwC;$eJ5yd0NU>?t=N?PQ9VFx|FAIaT=OH1D?IJMU!rEdhG!dK`KA zc<_Kpb-JnFxBx0Fq|-b(q8+9*=72S+wlHQ~!bk+UfDptJ*!F<=lYE_Bo99VP?sq-? zPrv)KB7QD&aqg!HO0q<;&T#R;62Z09Jd0N@KEw^~K+^o$!lFsPKrE(T)NKLu;OZ0# z&5%(XG7c$j(kd>kB2y)yDTzW_RlW&O#2R}Q{C2F zA_|Vyn%({VtRAqq@r_tnmkiK9@Y&<54Hu$oGxi?nyo6D_d}gW?6PND6)Xe1yil?Q0 zE{|J$@8xURy5-hlF9vVmDU=7W7JQ+IkQ5cCOOl2EksP)$H-aZ)%6p`$)#}kT$ye=MP+WwxYT*cB zgD!nH-nC1So@z7_{xrL8E&w7JB>>^+M;G4@_3(a4pMgh-v{v%|A$0A!k3gr=h=hQac2 z^P?VCS>C5~VocQ;19$sg9FH$QbM@-y@_%EAu))yp=>R*heS8cBe(Oo07Z+s%wdhh+ zk5{1dP>K}&p-Ge&15y;f+}d%mx2Uit12YcU(J=iUsDx=hQx*`@HQ1tU*I!b12e_PwOM0a;J*#tE9(eBDcpOa`9m^|dQIaE+lb8wu z7!Pt$$=9v*k$gj_6j*GmfvSGxvLhiAP-m^z`87fXy%uDS_disQOp*5UC*!hKn4utk zkk;f?$2U1_H2U&YQKtpVart%PdTKiDXw%;32%*OG+;Ys?JPL#hkcSD>#ErSw%xw z+vvqt2zZ&B&~`d%1w|FRJ1IDtKE=;lUyEX%;!4yNW`7+a6*2(4{;pzqg84QrFBX-Y zU(`5q!+4z&zw%CKKX_932*lhbbH+LQ+)Jo@*Ztn%Vm zjX!2a<#5faJgD~r6Ka4nHmz0N4it#@+d4e<$!-gCS8`CqC|htV5nOh*BWVK&eY~B*9x6R zjz-Wiy%37d;T{OSx%Bx?NbN(QQ7J2%&YemLQjPU^Ir1P@5db`~w&#U3Txu1GwcIZU zgu%|nlkRQ8!>X(jq*h$I+(->%QScxSq}Im5N6ajKmWLsJMsQ^I$n_G8ZKj-??tlu(JZiF$|4V#@h&?Jzu*hx!(s2JY*fYEkcgN>`&wsJ#E1!G*`Scv) z4SB@}u32j>QkysY^tCJoPv5~0`1igUzJ-I79^^l5p(;NZ?zrCqeUES&`|v4ejoVVt z{$dnDHhfQHl zUYOmS4XU?5`VZp0poS-I^)Z-@#I~z!2Y}yLd2?!(5-HSo?B9!F$q1#WQfwwg(n~vV z?w4>D#Jw0C7ym!LxY9qp=Y zp%c8H&Q37G+Qu94O&*e(O?RW8V%PG-*J#_e=&91&0v( z4@zuD-s^Dz>F-kK0wG=R;SK{ip?TQ3ALPRP9TWxcjE0eAQ_$TDo(D$}?yJ0tX-rAE z!&+5Deo1BB4*nlYc+yVF``=Bjsx+;pRD~yUlXqo4Gx&`v9X!El`sx1?n@ZcWNi@BF zizn~?$y@JwYgCkgX%-2m^alG}RShKT_W5gay$zcBAA2a&r?u4oZ89*n6XbYbMcz>$wP1?x*`TdWYmpS;X0^KzxdX-yfu}7j@=p z+z;p-m=ky>9(rRG`r@<&Yiwf&1wG^+dh%0~nq#TL5O#^Pi}HI1g@jhhfj~K=E2%%O z&K3bFxiRBg6tQsrAeU!t;tcs|F@VY&n|rg|^~*v|6bJ{w2h<909}*luX1qqxnc^;( zC*Fw|i%sm`chwg`r&}w67)qDC&?P1sJR~$he0%zpYcavu%x&Sm#juM2tIWK239W80bfFftVruG=X$a>j$b7$;m6w%Q>e=hJ z$|P}&%=C;2ENx6BG|DC+KPLb$Yr|5eiHhk>{f9fMVF$sDRWuDX>soz4dgGG&fn)dbwh`P*EJ1Q34W@zuf<;H4Wz0 zv6?-_B)HQ6I9WJ#Qs4f!`+}{-92I6N$Lv@xW)FmfNqN1pv>F=^bTI)~iz|z(n;|oI z(zYZFPtr@gg5)iev#!sR=76LuvAG5tfcf;8#Sa+ zl`}Cj{c21c2g}QLeypk8A<{`)lkoMiG0zJ&Kkaru$wsbV4&4vP_KY@NsSTi<;C z4?GT&Jk7_rsQ!I?v5k7;S@NTpCOTYi`> zsB%;qP4V=5Pks{Pj;MeWKdVkvne^zqMEx(z@pUSG?4@+iYawK=K9gxU4~vnhVrq0_ zG1UlkpODoT%kB|RcWyO-QZi+V#}}_e^~;>V`z>3_<6)R8A|G?QBP?&sX>hb3KTd!C zwV1Mbs9bTl-~!rkCmRMp4b0q{HabGsY19nMAQ5 z1dr9?Y<$!L(hE5>2Ig35_H>2rUyjF zOX3c2&eu#dUdym5PWjc#3q>v6|0Vwg<0DVL^Skf(x4FrxMhb<*hg`rDWwg}PRVJi~ zC4KPFPO9zU3G197_SQf(G1KYIZH0arl)G#MQj$|o(QI3Wtlt4nUHSerl)>nuZVjP|CfI&B*WzwDOfkn-ejM{TQZIJ0}Gybj&9|KZwL%d&% zmo?mT)Ic#Y2q=>2(})qm;9ga(dbfk$kpabyS-{mYs+pjB6aBq*N9AID05*Cbh1+uk zIEDcC$=K^q#qp|Vu3X7nMIuGD7#oNkLizEaBC#G-4O`Cg!UY#L%!I>{a?ZVK7&n5% z{yNTRCh2WC0k|%;&R<)+%!(>fV1rPHhv=VzeaVR8%v7mM^8mO5-wT$wqu!utoSvmC z6ekm|1K&?*y67yW1R=bG9E-i8=tPVt=)p-*=aE0*krrSOVTOO6vAD!N;ZyZzDTdtw z3F)RlgADR~yaK-}$$jyg3dp9+D{Hn)gJ2c5}9<+HpZuM^sk(O(VTh7RLrxVE)V5Zixi0vtut zFlJRcmz$oswv{0&2DZ)h)Qg$612m?NN4|fW5M)#%QP1qS+=#0($%X zHxop_7415Ugcby44*IA%?PPhg9WvuR%Hn1yORcPb{Bj-VY&op66C8e>Xhw3W3eMQ7 z5Wp-@l}d~LgnF~E_rBazO#1lQeWmfWmb zY?gE_Kt5OhUzwmWU+IRH7;-by_^;+7QpQy-*cybHBf4hhpxsA?vN6IQ6pIX0RBFaE zQ!mFm=V=ZZ-v=%A>x+*(1H@~Wml8XyA|h;fo;)mg7n>OFmAgouq^=NfY8*LAhNO2WBc3CF@*m3Xs{H+rw8%oH>??I4rVr=VW|T zN=@y9ADs{y3%3`1m7jt)M)`@kkTJu)_+y;3B)HWRL^K?uH|VC6YN<};8(l0u22{Nr z!`2(3)CLWq@o^QAYx8cZ)U@okTUxEy2<DMR#y;RDV%DZf(^uG9lbnEa$=%o=7(oZU z@I4ecK~_+kuK$E(OL!hW2)4TW?p=RMzN#Xr2X)>1JYuPhE5Yx)r&Pb zmku+pK)#7rOJiApT{VL zsjIIp_ImM6x;FhPCN`oSZdx)|u4ZJg)hzWM9GA*6*U@uY^N1kX%HoQjq2s}Dz| znmt)~$E`Y9Jlj=Ch)EH5T8v2vihDn3AbCnPHQNCsg3E}Ll^I{AxbIJ5Hq)OkCBNn- zNAUXb$Z%EwDR%*%3?(vQ=lkR zkG!IGx~>WwZx1L5ir2pAqPR6M@P+M(wQiuj`i127cn35011@&iY?O z1p%n;Mr-I;OZJO##KhXK?Acdso1{e9vhI>~kLZTJ*Z%Spo_w-s*%3SA+1Q8~%`}Yy z{&oK68Aill88ExKzT0IGKtn9ZoobiZ$pO2oe!v~@xwe`jzq(ejF* zZ@Y@~puE*xUD3MUeDh8Hclkbw6B>YA-6 zhn0s5bZF{Ebf%0esaz_Nar&s1YS?I{QpAWWcg$h|x*aP%d1uAVjRP~88+I?+$>Fex zHBIaGaEP$dC!*XRo`1{BxxKKmu&(9I#VN7@-rgMWC_s3QU_<6n=Bsa)#wp=h3)Dlk z%`qG^y7l6vmo8F>%S``H` z&I+NagfZcr!<1>Zo7cm+CL+j`Jaf7@2k0zC`C#V)-KTdnXyV73Kok|^`#u?646rKq z*?)q5$c>sZdXG-PT45km@~(}F7>$$~KQilxQ2AjX&63_=T}x%Saf-ax7N8E4TpN!h zSKr;xcL$H{3~E)PathI2@Qg;RWsf6b!+tW_lcE8&J!brJ07FUKP3IAIdkGuvqNb+4 z7&=8m&5NqEj3M*etH$A8!}#R<#A(y)sc!InUm`aNGmd+h>+e8rQu&S^$HgOaQ4}xO z-kZK0l8D>N+t}OX#68sMo4#??e?pG*Yc2&Q76AlLeQtQ{8Uf+3i>6>UA93}hhK6(e ztLI{~ic2PZx|Win<IGAL=%OuqG+)ucmsms7-? zvEcN_*qzc}Vre)cXOfN>UI?{V*gi8qCST*&JjNTL)1rT;t`E%QXZ-t)h(9&+n)m_r zh5J4d(ftDUqNMeVYLv3Wqo_6pFabH<$`gy5+a9M&bz{g)VH3?aNZTigV0-(HwGTwd zVcc~jAD8pQnXIW7qz?giFa-h`97V}tHyvZL@|`o^z~`% zh>nHJiyha^{A!jWedC1^#RepFkSsd4bT?gtwDc20KL7aqr}~+cyxboYI*QS3r9CnI zS~-HC%(znZ_T6F`ob-@;QQTSky4vmf5{i}gz`!HTD==fl7^B*vsg1o6^3<0X zoVia)5p5Mt(&vA2{-Nd(;We?}rAt|}eca7Q56z)g`Rq{QF^Ydp>{b_wdJemNAOK;2 zYXB5T^aRRl35Sr&i4(M26iU3baW6k=P1DzJXcC;| zAL(bH?8$4x*rTE)+~Ydb*GVuH6XMTWTPvQ9d(}gS!bIu#RO?P+{h@uE>WAdQ7ZM?$ zqF7~>KT#dk6@u8d<55j;p95o;nmXBpi&?O4bh=aC)2F()mU;g4)1UFi9h_nQA4e2W z%bU6~WuBYItehyXvl{{y5$R2#AZx3ipx0e=d(|!?AK)j0f+8a5FJH8!Dq^6ytc&)4 z`lNpxnwO>z$R~Xy__g%w&B1Q5V*}=gpwyJmZCPbGwY$yu^wGiVs}CWDiZ2^2xv`-s zMov}Se%xT@-mo#pnT16G0MN+1izt8Z@@1a~0CSNo?Ml6zhoMyttUC6#cNkZcNePD# zC>HnO@*NCIy(?yMLg4-rir`y)5*MRbfxgCRXFqgzZx<@H$_%0?0l_t8G{XdiUcU6Q7(4ALFLv&xCp)4Wc<30W7~PigfQ%lb?zDrxRJZ`K z{)zqtoXJvvaR-lJb=u(Xdr&SK76!LzN}+*kz9|35Ya~OGHklcN{BJ{z_B7C*_lM(P zfY}X%QhztPXL(JC{O|b%A+5@-j}!LC&py<{_z2h`Z^`ZB9;t--D-1=JuI7!?`#aiK ze806%>)zS+WU^>;YDH|vLwC=x1B*V?4Nn-EC=yS%`Ux4BzEi$YS~ z9Yj9$=R-ObvwbJ5?Gkv}=nX)NX~~0=JW_x3iisW}B7dU)Qr=$pZ@5^rA(vkL!b|yF zWL0Fk)ZL}dx?VDK?TUAeVzD|6Yj_^UNMMb3{`7ZJCx{dwcf5)O&WK)X%uBDln=hui zq057ZzZwc?!~--lzkG9;X8?ju)S5(<)^pedTh-i+%G^;;QS*i=qb4r1^KbAnw=@C& zCZZ9QR$L<`Ub=%kW52YV&idQl{uZsPCnm~g*s=foFVBCd>*x4vB7+`ZaViC2B0!iL zTi*hk=3@z0QW?sE5-YeE;s7JCDT~J~0%p~eIvsK*JZY*2LKR-;7tpbXpg=LNKt6{< zO?v|rv;=zL%GAEzy$q_@M{Jy+6AnKQPG)Mj^0a5+g2=!)q4yN(TYEH5r8br~2Imj1 z$?6vj>^2%&>RxfjyC2@`FI@Vvc@0%H?Y?lQ^^UV?D&z@Ig~dQ9Wm|bktlqk%m2+*T zeSKQg*1O3NGgH}y4pYZxJf=>yFBFHAiuZAmX?{ea*p>2l-1U^4Kn{vhkv?AFuED1C zgYZTYUZHR*-XifPJpuINXtx-(tw4E$%lUoHsX)*O6~?#dn3cvp4ox&_z`0UpzvaS*LhBq8tUbNx7XS;zUbepb7ZkGJ}i$O*+$?>K`q2#z-wMQdN4A`{|T6+|!UW ze<>${LvV`Q!R!&Ut+6$Vu4fyV-4amS!QRCYR;j(Y+YY5(6j1=}3+L5U*$LYIo`_CY zDAQ8cT6llVXiylT<&l;K3Ge=+A}4!H+vngSKAM??&;PoDv5A6ELm#;#z2H+B^F{L* zCii`gjnhmCWt$a9#~(odNoAQf$GvihI~*ol>fRT^WKq7~DJh+Cp9H;OYIfGE+v;;Jbh#A5g{=5?gQL@}kOnL{nfB)`+-Z=cyNV|n6xm@Ef>WLduQ~D7| zrMZ??JvHZrb|?6FCBV=j<9;|S`$2%1&x2C>F2brD$D$o^HSqr}8IQsf+6)p+lA^YG za!{rtBCIKKDdLs?n)1=M)C#9D&g=D=+Gus04WOIXCsyr1F90YRm&AE*1Uv#8ZBQ`%Lt+&HGf zp}rqprro?21sf?==*2Y4Z(3CdB0(C01P|JG=uH995<_6a>mg!6^bgk*2=zF$_554U zes=yd{b^J?nsi2aE78{cw6|yFF?)2?O+^ze#J=vA&jWeZCkniM=h?SRcQB)&qN6wJ ziO4tib$k>>eEg^9|K+^&ND4(Nu4eyzn~;eXgRV5p1H^@&WhS_;%_oiOoR%We(4fhY z^c>gl*@P@9sp?ZR<_aVru0{u;IEi7dRy_`loS4Og(c5W7$u-Kw-t!Vki6~)#<1p72 zPErHSgoe=wn0Qg)2{<|Hd|`dXrvt24#c@YXP@n8plygv! zyFinM0X-YQh)Mj`6u={51cu(Dt%L808`l`ArKs) zXfWP=9Qnc%8zhN996TX`5?uaShR|C5Z`s!LHSrsq0=rv*IEEptY0oUO-&? zqb*%dON-*H=!G0*+f0_NNGT94(RnD);)gfSqJ*Kw$JJR*Mr$dG45&#_`z(Bv2J89L zcYmwAh_9CtH)ER~s(9s;a@Yr};mJhhIR6YI z62}9?6y3P3!w*T4PVPhk8K%)L0Z(V8)9r{hbNF#L(SW__A+N{PyYvn2hkkXrE)|z6 z(LM17Nf5^qHBOt~d02Wuj<4J1I@X$Q7YH@BSS_vb5-#9xoPXbx2M}Y3;PI%W7^0(6 zL^FLb-Gmn1U7XjO@SFUDR@6-A6iaj!$8#i#z&pdJ8{p5x3ri9}92zS^Y}t2-F#o_$ zclIL~V4_eu-hbSq(u=dQxAqfoPtKzz`sPcQe%s$T9T9g*Rt%4)I0=D2cEfw`s2PGS zA7-E4jUd@5r=JUP;2isgmRDaXKB#4`OReJb zuWOiAbp!j}!=OnqztCQM{dU9N8*`Vxc+f5fi>D|G-FzJW9#<|5#EsEE`GgTEgrFg9 zyp&g& zMu6FBhrq~FE*ccl=kK0!Y4nh^zeiERh_|c*iC)} zxWfMMtOkuDEt-EHXlXB9dgV<&A6ia$^~G5)W?fj))h}#MFC?xg*MUHtst{E?^hJH5 z{sC#>dMTZ9-qS1Icu~J)S|52U=k6@bz3q=9un2`5p;ePCW9u!vj%=Yu{oU7dXCrS@ ztTQ!pqcC3--fsM-Z@rPZ`d~0d`#bvm=hjUk4En&sq}hn%1hA8d2k)%uOD8GS$vjCt z2`AVn<-+u9rM?Hk9q%ki7jHU@NHByg79GI-Mnwv(Evn#_+lO**#Q3~Fu=wAc|FWFl zfK)U!KIydm#M~2|Y^dk%UqmNjlwhx&zD}puUsd=I_j6YXER~rJ-5U^x73!SlMa-H4 zfBwv@SfnI*vxzdlYpGw-SDGP-cW=A+@F-OqDAVdoKHA#Mdx6oYj^HJrrvD^Vb*~a5 zCrE6XuH8)lY!7>;l%vIBir8RnW<{m#;^&U@6M;GL+ste>oNC(6kMj>#uIopTKAH^v z=;$H7RpKW{!=nfuO&t6NjX|u7E4T(@`5N%J|5%H`rgmqNxNy@1js0s(&#iQd9C$$h7? zkA8pt^!n74r!vQa-PCYzbC{xnl$(qsVXe%*U{lNyeu(IV(WjCPIR7LKZ+ z$-e#@8U(5NwvQvuPqp1E@P`mc)Vc^h((Ipq^;cglA``^|krmi9PYA8DOz5J?lL$<9 zPdV|xFe!Ayqd-x3yUm72MLu;oF7*Nf_lu7tJ#|1!A z9z@}e1WTc9gg4XgCvcU{ag<9?4!p~#Z?@;dbV7z+xtr1L#R%a<)S^VC>EF9mrQz+> zHO+Z;`ifTefVhAT3Kntb!lL|g@3_`cKt-Y1wP_jESrDfHlf0{V7x1~Ui)^4?Hhm-O z*tAkm6V>a1?G%K5YZ?}vjWj!8q-kHVoJNikP~e~iTG{jGeD=%zE(obs6rO>UU!mjb zJ>070{stUf=J13graeOUJs#wF2_?78hLnZS;56nD*{Y^ia4oMdjE!wmQq{it>>u=S zvkv|?{`;#x)?l%{V%d`dds=uL@JF|%4(tIiszMU7vT9wER;mi7GR>9)2<4AtIS)$S z)@YO7oE8_jG-_4daV=Lb19xBgGFCV5TUB%C=8P^bEG&quw>BmVI4_4H9=fi@ zeW^jYy2pt3sH~~&1&TOI@f;;^bTGqW-U1{gYLX=Mb$y9b^}S-u(7GNiLA;A4qwlOQ z%x#!TM~IM8u>~JdAeQQBjj-7xZ7EhdMd`NUiH!fEcBMha1T)Nhap~y2VVh|>SEsJ( z*B#Dg{=DBWDCYI17)sV zyo0iDxxE)M)A|976X}6=pIp?m`dF{r9#)@S2Y%PM$8=C~yP2abGuzvas$$k|A{BeG zG#8nEhO)Mtb5ZiuPru;JY>srDu6y&o+X7a_lKcu3sAk$~{b4*6Q@{6!N}qZ~x1Z&? z@1*2?>Am;F6A<7R7s=WGAR>^Tmg1MPYcIU4!8TS`M2^-TCwU%|X_ye|+q8K#rS9Ig zS7Wj($SazEl;_GWEEf~{r8f;D9T|_n)-)*6YU=RU*l<5i;Tz5wm&jaoATbRM^_=IuTEgjZrZ7?q zbK*V=SX+cl{ARlBY@Pkv3;n$Sfc>K=i3`LIlPKrwLGTF~p@fS0J&Pq$-ASPqK`wn- zlN|KQdbfte=JOhCu^_^CB_H-CXm-tmD|DDF)G7;VGs1d)_C515S49+MNKT5p@M8LL zSocC5_hdWK=_ZNCpNi6jx)P3A^THesC`|L(geADCH%6?9-)~_dYRzA~{Gt?z$QYwK z@g9yev}RLs8}~s5NcjtI)VKMHIFt88B@KsfXz8LC(qu5>!uUjb$($5{n`pm1&O%Fp zh!T_86Uab1B-vdSeBGl+owo5XA#WswjSu;EOqj@n#f4STWK5VkwN?D}Mq878xDYg< zD<-Qf7ST#l;qU-$F*#zLa+Lk7_7Rxn+6rZRIN6^Um{x7qOKif80;aFJXGhc^{BxHG z;C>tB8$)sHrF-m;F8(K?lSP#}zE2K6nJN?<=_5NUQBgl(PxJNk^-}ogCLYa_)eX2B zN8wmK)HckG#1J?NfZF4l*Po8FaW2R4<7YoHhn<9yZMDV4K2rpe73wpz)T6Tt`ywtp za0xt=f8wP&d??Q)Vt9ujsNYFNuhX7JxVf|-vY||;##=Jtjn&gnyM3;r7%K*QSmWF2hgaq(P^V;neDOch)? zt(%S(vP9mT#R&Ha`)XhVPmc>hMu;LWM_jt??9hbK>jo`cWX|HaaEU0lJIFP27wo3h zK8Dw7c7&8$a~DhG^Pir7;2j^g4MaiqEf!Un-7k?d{TJsShs!zUC!)Rmwl@F{i6FGz zcwzKlzGzFrzV%#TpQ2A~pA~0Wl+#2VMa-E7UDDsR_tEHnzgM+a%Gy_xgeYT}93s)< zla%&oc!N!k3WT`?wz@qU<7O;}la57EYq3Y`j0!+I9hO$}yTDLw9e@5Mza--I=B-FK zCoW)(5hb{~Og2H2R<2sdwH@hwCiFb+sGdO#apzOeUc^}jo7C%1l&yf_!(Cj1rQMCr z#zI$|%F%l4eZv*VH)^ue&AE^~H}L5Z;P&cq@8@x2Y2x(rr<|a% z@m_R>C0r2`L8E8iXrZGF8iB+RJE6o4Y%yagDtUoIy)bo z|E)gJJ|0+%XN!)v9bkr9c^Mq)s30ysi5J&yM0S@nL{=ckVN)~D!-o^GAGAa0zkhy(GTi{ zLw&0PG9YI$+%Tr=oqiFXVb>sbHO}|*^VpT)?&H$lUqABYwPaEu<*G%evl-G*b ztJj5dm@a%iQE~dmAaAXNW`;$ffAz%|&5UnRsm-5g#8;ZGg^vNaJ2n1Gt^-MVyCoy) zEp|l|nP0eh_qMm4mrl4$T4^+I6|vXw_i3x9W^ZVmC?%ABu+YATQF1^`d{F%K(^sx& z@>Bq#?zQ)egE`K8({TtW2-qcHSegTu+t_?CEx}hqW38b>(fU%U8y^iSvv;GlkHRi2 zVmZRgPW2gXZkBj-;D$y760~A6IJ{}_aZx(1kgUV`Zh_Zq^$w}htM8wDNn3z>rJgnP3yPhzG+OZ#)=n`gTHNorRglt# zpe5r=^M*8}SV$v15PtLHjE=ti?#qcMZfJuv%$AUUK9$`Gk$-ZQr>XaZcB>|6f)vt& zhXdqglZ3RY!{mer6%o2M!~v28w9=RJEp%YCSFsLC82&=a5Sn#Bt3flr;3P6;MNj&j z&P@3OAzF_V^%lTRdg`=^jUXY+DpFw&5my%J3C9*s-@SD`)r*qX$9@a+&{yzaZ0Zm6 z8>!-hP&4EClxz*7AckQgz{7eY@$AgZjE0GJ53BwfH7f9;IE{T6_P@^xPQI932zdW; zK>75O#xw3{@# zxs)#1KYPMFv~e56{`GUbQq)J=9ThS~v$qKIr!1yu?MX3;5aR@ynn7E;o2gvKR&hG+ zgV9Sr1&6pNFAB;m*)!V&5>Jt0X7tX=>N_2C?;&}&M?K;;DOT>*-so^v=)e*=Zx<;= z4tWJYo>K3$cwO#~k|_HW1k$FwZO;|e*)TLjEr2Hp9j8Ni9>pjgk}?PzVSAWhir!Yz z53XNJHJ;;;Xk_|){=HuD*2szZ=TDyu?&qv#r><#}Q7@vL%n!t*#4S(zr65A5_Gu0j z`d}x8;+Lt{qlNY!$MZMs~*EL3zUZF3t-iPaWki?_RjDA1ZqD`@0^uQk4m1VsW;jttF0i{H(sUBD` zbOD|%716`D=DPwds-#&H7b9XjF4ZWhx{MQwoWlK5ovl?s35AhY*_YRQPz;E@ep>l- z0wLqaR6z}iE_oA>&|!1(75B|snE8Y$QiFu+$~+rdq6Jo~FMtGxG~=+KMDs6xUi71b zw8)P}7zU~qiAF=(Hjs5umALxHW-~(5dwvur&K?dC6McX5cvP!Gj=KbSu(i&{9nEHa zUa#1ui<@Tl{-rnc>u}R+mbd~!L-j=NKsQe$iaW8FChZu67dxYYmwPqlji)253;hua z=uHGb5k`4;Q0fVu(Uj(DW_-H07rYTo!!2TP2X9yAkYJ2cQp`%z`BTi<^a7AO z2-!4Lx2X*diG>mqxuTj}l!dCS7x#YI1}cDZG|DlXnwlzk>>FWevJbxU(NLI+fmNNA zu5Yq+Wt{tYiiU-+te7rr{m9UV*{Q3NAl%i%Y!5d(HKQGmY>x?FH~ke`4|Q?J^1{5w zcxN#axXz!x{H|vL0u|7T=@>eWML9`urJh=YA5>Ka4ZK0Wpp>Q+8uq}Y-KeI-bqtnE zl&1+C2j8sF{hTMsm9NPId-aHObnLUe4i+GHon#(PRDQ`2glLnZ0Y>B7Ui;g;JwIqr zfUlblfyawl+6qKD<&P68B%dHDiy=lBWKPXz=*k(iNjE$L`$=Fo*`Zt4;wcsse08;LCZ|EVw2qO=?J{)F@{bBK6kB%bK z&-))WOInl06umGuNiu_C{)LAvF9J&0{nD@5K>B#fj|RuSZWDT!C;XW5Vuz5zCUed62$(wBAwa~ED2}A0h zB{3HQ&7sQ~bA;?d!_hfft>T{F1msmSLdrrmeU)lV(Z8wq8nzlNUw@|vQUeP6GY;gh zpMPINcPMU{=bJR6i9#LRg8@RzC>}24IZXpF2zs=JLHg_uwEuIYtip2s+sQC>XAU^D z*Po*9N|B_bpgi=(F$@}j0~t88bTq&4<@aBeTz>aAeME3=wp76xpUl|P+#Xa)jYAG1A?;*1@MUa zcSh9$iS~OUfE6xo5nvCThQ>dCdcXHDsd33)0)<9>FH4#pIcy@!edMuIs6uOCHYwZD zag5GhYy|}*CbmwKw8+<$wfqj}*`;<*hr?3YtaWcSPHb`Ic31Ogb+fK%_A5)`Ary&v zLjhXQH^xo4M2P2KJO8N(@!}UtKjGU8>HNWBV?r7s;1`D}Lt*N4Va*THzuUykPN}zi zw_Ea4U8BebM=O-2eIfKj~fJo%1LFc1Ky%B_gWOS!+Mu5s9x(&qxYp3bW(j7KeJ`R`=1YuN z3zNmDBat^9S8>41ex%uhUK#gk&W7-g%gglT+mQT~8-x!4=Ug%C+eY7Ve|23eow%&A-bihlnV zxLHl)tsZ=?Kj7YfPh7&mi58dU(G#LW6IViabqo^LH;@qFJoEN(;Sw$cn6Fa=8&Ypw zuf}X}R1ZI8g)=BMR;*>%f}e zRF;AS7eFU{KT;1tK8iVJu4N`fhpOZH_3Ttht)Qtj`0T*V)IC8aM*2{HoS3lST%v0Dz>>DVivaCj8UGsO9YnF~-eEq3=krjDX&ev^JFDElZ*w!$ChX+(c8{#wB=!oP#Bhm-;hK z$2Bh<;A8#e{BS@eh13W;sS!Tx7qND{cSap3 z^8teehCQ`C9C^U6HSw2T_znHIbyWNrpe7HzPr*pFWlR5}({0}OaPE8-8SIf3FCIXH zEk&-HeH9QVD;PnNsMf;P_#6|BBxGJB&8}q@#z-%5Kap8`oJCtBp{`!tYk~fBT$#$k zbrIvFjdE;(e;)aSaNM}~xU5r`h+k-zcTmnvAGUGRVtcWh3MgS(qZ2tPqXJ?lmIrje zi$>_NO?d=*yCY8q%?-10GEtxD0xM!`w1GkqDff&|y3_a3mb!oE!}W3DAj>i4lk#oSZk;nm;Tdi;F5k z;rqNtUw$Qb{%uP1X}1Zl5B&3;HT`_XsGdL|lnR&LBo;DEU_$HXuI~+ws%?3*rhoK7 zj31|~GW_5n5G5-}`K_G?=2n-NJ1g^^!a90%;*w#m(I$6RsQn`Afy1m+K=z?%_2s~i zX@*<;FMpm$B|@r$@-AlF3MzyiT`5K4DH9j~8sw%0nc03$r@sy=o=4{mDJQSgs=Z68&oa+C@H8{uZ`97 z|6u4Le7UICt;t`NMq8BqZXM};Ni;}BSU6yr?1UeCt+JZ;b>{+*ZmTL`%I@u zXuE^s{jp-{uK*X_N$Gtz0L)s^8Qum>iaxmBukfq(0OqCUi#wVIa(>Un z{=rk*%3t{JUh?Jqsi+yv98<&udZm{pS{ODZ)Aa&q`O)*g(mHQ1tcy{%@kTT}sSg>k zIRX(iKtnV!+-=1gCngk18%=95Dl9ZN;&1l4S+@ck|`0IJ(+J5w^ z4KB(U6Cs?CL%1Tcq|4osK8DzhV2GwcbJJ2_1EGQg4eSe>tz{v2qO3_TL6W=8z?Y0e zh9FeDi-74GmmlbK zlqnG~gm&t$b9Xvx1pD>39yVz5El%EE!J-fZ&NUL}qM)2ez=cksdz4NMLmKB#%6%4P z*blMywLd%$cs0*|h(0T9k#qECe)4A6LVnRk;LMNnphc=fO}CgQTw10TVFvU{?FD}< zrth;~KKr?Y4ZSGcfmap*k8rct;WHRxe(zsV#DmB*BY9f%0tvXFUKFWT)?K&9bvn$B zTZ1Uu#*2y{s6P_%0L%C8i~rbz&+2r{tuE;qyl)kK-+e?}`lLWJS2`P>?>Lc;m+9Mt zrdJa8#j4MW(|DX9&BNY;cf#QD(x@GjBVobp6un~;+u1W;T-D5bo5@sLdn71YX1k+E9gH8FTL`vHxGSwlmWSja+Fhu_17<3Pc7((i&YQ? zIY}qhQ}H}?-dPawkE!uuITrO;3!bb8sJ%v1@T0q4*X@Zz8a`JT%={WHV(?7`DBkCUx#V`-6C#4(uT@k7X1VZd=YyPgqaxV&A>P3J_ zOfUL#Z&NgC&+PrEKbfjawt%+=9EP8yp8bsJDFF)T+RdG4PX7!7oKRqo_L8C7rB}xpO?+;{bAH>h+wZ zwC-<_Q0Yl-j&_Td#B?^p`Q>FajAg!>d+q@Lh{=2M2v>#P^8Djxf2~=e-858@PbnV! zHnN@w-w_ACgbmhKH8Ax^!@;FTqc}E!isEzY($3E0lEGPg${>9yLyiopR>sQmy5>p= zjVvaCdWr&J6ihU9)3%56H%X02)8=7SFXf!A=zRsctF6ZjoNB|CUX z^D2()nA#k+c|c1+!?}C(pi<_QSPnuj%(Nti>Sr z-&)o*hZx);wTVJ!KVA74mlO^wG`wa`Ldi)l0#U)dMW(M$+cf|QNJpj|^F*gynqa^i z8kRhS@9C^`vNlFg$m!Zt{k2JdG(@iW8Q}`FG-b5Z=gdY0G|KUQia!mil z|Cu2-@r+D>Bu>uQ_J)P?jYfXq5mHlJP20Fb&<{QmNYKdc{*j0X%3!pagu`0#P{h@7 zV-^t+mjtJx;^EiGuCOZ>w`fqsQx5m)X)zVpN^ardVKAz$fZK~JerE$pI<(ewRxuFv z-DxUV;SW{){UR38W2==GK8CuZmcALd(l>#;`vp-1(T(Xk zE2XCM0;zTxN$Skci&O+f)6zRCCmU>i`al;^EVa|f?NKhF8B3AnoHi=SS{fNcF=Y+A ztYzbv7?O57LC4R$`8#5IR0iUAUJ^Vz5m*Dr0Y1i0KmXHbzwpyzd&E~kq(EZlTM;{E zw!B*3r(^~uvc8W_jX-kbB^8Ch#0e58@3sK%)|+pwv%ax7=jW-{PcY!9%xeSGn#QKuONo87Bjy_i#DaC7 zqX)Y_lM$tf0zbrwxcfyNbl6rU&+-dTi#>wvh8vUwPfCN03++#gLEj31-6qscnsagW zdqq(?9wVZ}$lF9BVOl&$9R4(e-s^LkP;1NcFz=&)4M~voP6`IG!I_FCM?ZS4QRyb) z515s-OuZl?@xA@RoGnu~VJhLd2R);A&fBvXnnGHz4a~2lAh>#k!sDtHe)}9g)MYYL{7EN zsQ6X$=w@HL;uix%o;At9lJ1qBS-{q}qMDZH@+dNP(lVs=OsdH=X({O(O3h4TIr88$ z?S)j@pPQ@5(ZyWJJw{J+MppR0e)W~|>|Vo8dB$CGh0}o*d%hFcB%LWRir=t+i6T2_ z|JJR^uqjGP7qvkAfWYupQn5XIWI|GsCLI}YR&Lu?JzI`5T=?i^S5_NzYe^jFgf+j8PMz-6GT=+(s!yi4*2hDy4>(#X7zufzy9^D1>cXD$dxo0nHnsh z;)limi~^nJ02Q{1O@>n;A{C}0w$9cA?$SYtjW%!7DLQ)5V|Xk%gIt~;x(xXU?4}kt z*IBbyElN*e*N@>i-jwO92`B=QMrc^8CBgFN{x&!KuvIV(+>jZD;TTtnKp)EvOitL& zrbVR7+Ydx~I;wp1;#6ZW#XD4-E$zdibTkK3UXjH+8Ulp^#?=y| zduK8D0WcRY2|6G_l2oZVa_%~gVl`b7d^-(ys!>~!LQm~cxbF3S)Me1>`XkW;OpZ*i zfpfF5C{~^`?%*1XBMS!%w9qZh$r}KB!VBf!w9+0)dqC-H1CkJjea#55C?HNQOuC_WRysYmIii2!Eu3V9k9 z?DvAZw3^x3S#9r#>cG9qQDl)grh^aly5CGpvm(?2V@Z5iBLT|U>?dpn%HIvrPAVhj#oeb=-Kzqe`HbBB2H1J*%lPca-0>A;#hk*!M6BG{Pjqx1ENKZ z#}o{dsw(Io3Uc%w+6&D7sHa_8^-uO}-5mG##_3aSqx|AXfIu2~&EbtM{*H#a7(UX` zTP+D$-2eSu(%w?Y?1s;y$#01i&St5^f)^=eC5Nh)D_h1NRWCrASC*iXtcp8vTbkdv2(f-G3urZy#65p{ zq0DaexB;ZKUT&6U{e0LzE72?pqL9_gMy8iJ`0(Ip69v~~ ze`|1XPz2LE9j$;XW^q|_#>EX3Mx6EFk(nPCI*JozE>8~r&;NBSH2%l18vg&gO5doD zfNA?gMmkUNy@hMIe{8W7?W+!3EwFljd=B75IV~%}om6Y_J1_>w%CKS^cMynFQ1*Ie zi)_6|c&E(e7oFwnwMXE|XAE5?;FjV(G@Q|K)kn5kufnP;^v1y^R3wk`4qzcvACH1V zDUB5R3RPwC4j5A3v=^sP=W4`NUIYeln8;xCiy$+u^0cM`$4&lU5!Br9J3D@!@(Czx zDf>TvSqJN_%p7Z#Kg!g%$!nI#xx2pTHBoLMHL6X}Y7pO3Z6HWgaP0(|=V+hcxz2b) zLTlLB=@}1;Pjg03MIL^tM>DfmEIdf^KOBlmx5&LV8w6Zp9OL?T7pg?4FXun_r~kDq zX@UKdW-{cYZlrh}fHp2m3ta~MyO%~XG~Q^B7FcBZ5CFU_JF`vzwuU6TxH?PsT7dhQ zHa%L&rn%j8NRTkvqc6THDlHTK@p){6YHbo{{*R$M(RwhDJj67!( z_$Fz*30EKhL$;zGSxUQyj5}467xs}lC+7lGUMJ>dY-W?zimk{#wSq& zf^-yfEeYH}q%+(=&BoLh*wi1Le^-x!Bw>N*Ncu(ilySkDromC5{EG2Dcj4;SDo8t8 zv%I(n&eV-0g&ciCEQ&~S$_L-=r7JulpN67dpAuss49!dmG-|@*FxiC8L1M7Y;W+M5 zJ6MC=@0C#3Z~R8i3XxK@^T8p?eBe^V8J*ECCbQ4X;EEqr6TiOnDIXEe423HExq+>Ad}MGPp;U+8eaj931hGT6qEKYuM8G;_ z58jxtLCe^f-?n+5fAH*Q=Reb&9_At_aN!{_GV73IA)ihe{6aq$Cp#ua(-R7-`Y6E(P(uRgE9)I4b8EZ%Ofm1gfgP?eh{fjZVe|6f?PK zcILVUBX)`wre#KIbQ4`>KU(QN2FPkGOGs8jgH{d>^GKKkP!M-9# zm1t3u&a2xuH$0Kaa!Xvqg$~0;2k-iCzA2gv9+ZeM;Nl`#BJPA`mQE-*@gLn$pf&tW zN!+J?NOsSF2ci^{-an70rB)BI& z2ES6+Kw2J%xTH|$FwH75lwN(+rwqmXQSHMLtzS_sG_j~w0dtwWfDJN(mwr3nuX`Q` zAKPDtbU%TjaIfoStTA3BsN@k|Tq{1F3ow!vLE*0S_klb4;8|93!|_=pA>llH)>>9K z>Q?K;*4wy#qq0BEy}Vi%W|epDT~V`4StC^E>n@s=QXm&DKFO5{3f@ zvl71KjzvZ|Iis+VX47?bwP+w=Y9eX_M;g9N|B^mMtxgHsqXF;?X~wW} z#HN0vKXFn#d7NmNUMUVay>oMV5D>LaHPM^A=idABmo4gP^>$%G!ixU&SlNpcU(kmw z^(A~hey$|$?UxfaPtwIE_;J1d`L~|^T+4}kzZ0@>%F(y)1SvInR-4UEN{POW3d*c% zrpDXMNQ{LzB+#a3L#anXC&XYaV3*pnKve~BsWz#n(4}U~d;ha}nat;dqJ+KES-QK> zHM>Q5Ow@j7gUnn_!z>acTndPe*q$B73e>piK<90}{;iSK0(5{>+nZP_)(<5@xGbqU zX$5qCabt0HMWdoU_VRw%eInMFxt@`w$UZ$TC`$(9PCwdTf@?LXC0UbxD!!=C%TuujLDtezqn4drC)flC>i5x zLtjDOZipcO_N{lljbMeo7XM6Qw4a^U{X+xK%-$%^|M|1;(!K4Q+DMYIQ4!43l-=3i z&WoAsJ2EhG>exmZ2#hip)*nb0;HcI+VbgJg2DsD<;me6AXNL-^gDge-{+or80jV7w zSMQ+^KWXI z_sI|S^&Q@Wao~z;-Uo$WN0uH3BdN#HZOs%-q9dwPNkp(3y#SQs_x;cz`>b7Vs)`qg z$`mMB{bGM`;e6CUa~tbRW?J9{{fC}-Ct3?eLNIZkLNxSgvNcMgO{iN~4z*bH+Z_KC zP&a`X^Vt(-qti^9n!TdgMeZ{RNxgcn6MaBJLsTNKU3*Jj6iez{; z*z21kf>dDgWK6-Ep4M%!9R37oI_ZW+fD0J|($`p&c^Mb=7x$cISi5n<-|9@TEPMo; zm4bVCSsr;ezjFRfGd6sIS!~AU>qTK&N>_LQ=(jUVZ{SWYQDNCwj1~pVn>?ITVGb4W zEDjI9d@jc9ZLt!WLqBg4n5FMCbTa z+R3xgGk|}7te>NRyf>4oeRzPxmX!M%pJ5-H@N7idXZyWL8!GUGrZ&w+-Lb%yeq~yz}Hf;U;i`g6J_~k_|tx*rqZgI?8#$Af(AP;?5*#J*m;WHKEA-_0A%4{pi*) zId!GW`|=>`sbIx%n)JQaWJfG8x75+tcy+$5XQJJ7`j@cUxL8t?MVt6Xbg#ehs|gGT zh@~6^3^GDn^@!eG^G0H)P+l@6@-3J;wOi^MJYj1zDbR)`iHF;$_-j`m<>u^#tEODm zQO~Ssx+FR0h$!1>noeJvwm#8H&~O*0x1iOgEcP*_M|by72pL#(3Q>@obBT(%CQ8GG zfDzD2qK=UTEuN{GB|tgF$jlSxhn(L}gOMbI(}IY{kM=MS5E^)N&`X1Vsh5ZQjTc_f zCSRG# zF;ar{Z7efxQxT8M((Q69cO!E`uf^+y9+>yf!iv*+#&XtEIHR5>l34GEtDwdc2?+&2 z#~Gl#2;fxlgU%c1p46BL5)Vgp<8Xbf~~(JN7+s<#=jD zh4Ry*{^o<)3t;KOd#R_v=Jj^d`h5PU=O1|roVY0MV$P!sH%gig3s%5qMaf9xgx~$> zJRw})jx#f*w$eP52r#L)lR=v`QA9p|OO1_t36Mk?8{3;(!e2+V$5Y1$npuxnpvblU zS^f66-~H`u&`Ef50!JPW*W$nDKh-Q|r(V<25vEr~rpS{D6;!OW zLF^pirjO@OAK+;Ep*~W7=|A#M^XBFla@pZKGVLhMSR`ncLn3L{niK6O3wi*J!%7gG zpG?(SOlqZQWUBB5q4mD;xv=EqnTSs@Wa56iHek-5zWB2EDqMhh9Kcy8hz#RmU{qlw zyXDofOGu_#A&4tNPUW2SAMa7F1R>`cVHktcK!{coZA2rdW{GQiUwU1<)LKQb33=mU zKXmm+=7!&r8(JH(i4Z*=WHY9#R;KuxHJtfw7}l2_)140`0av^ed1;xKQ)$ z{fB-K?d%`TS5*I;NFIqqn)8qS_eh+szUzp<6fWXy2PUA%UhvlRK`Xwf1=6)+s@evy zrKi%uUT9Jg4b(nSO2B^q`3pIxlHZ zDM?Sz(alNA@JvXklvtsL7p!+TBEZ$f8Ayw8ahgKm=`T_uA~1Qt1I2ul%+T2=X~bk8 zkp-h+E)Y3dee}7~pAh}%%*CUI5*b~?%uI`%>wrB$GYtT0pkQRU5V!j9L|%upQ?n)} z?viljYfk`K^9v0`i{I46VXommqv%P5|5u)Uta;u&4D4*4H?Hc_?Cd#nW7_WvskPrv z2hSH?PDcoy{p2;AAK6y36&cOz9#dtaNbWf4PtyQQK=qVzchEUB0g)$9v@XQaFSL#b z8T{(2X6&$|@jj$szpHb1>z-ePn$#c? z<+O8z4o4oRM@TQ%!ck#XX6}mbKkB_l}s2);CfkW$hC#gvawp&tr6fuP97N$8o9~NG;kx!p;W-2I0vBe$!5}XRN`!bP>s;C zXY#fUT2S1!@ZkaXvjpBSgp3)4n=raUMed?D2OKrFO4*!o;Rd_$c@}pf(nn*y|02!t z^r6+M6Z>AebUBmbTJnVG&v)9eD4F-Y|RLk&2h@(PE=vp)h?U=sunkl&uw7Da}9?e?)WDqex33fflZ_<1VB>>(N zuaIO{8mVAAVD422Cvdit!nw_h7;fPz=qXE{37sKteVz}{PNI618IU;#4eAc#EbVXq z`_#4mxuS?(;RWJ`f?#65s4E^_^7Q$l9Lz8Mor~-!&^P(a7!UUyJ#*nl{s3R{Kj?SO zq{LjLKUXRN6iT1mL-Y$?u7lh_vctUOvC}95U)E19W;pKh4h5)DoB2dBm=^Cb5s756 zHv1*;zUJ){;ENC>dLGiTp1nsBvF()id+Sd(of6} zQ>|*W>FLn>ZCO`nuU5f^Q*2zW7hhvv&`H1gSt5-DMAw-&)#Ut+l*}1`3?^w1laA6=91osoNuewu8S~U~%I8I*)5vDkhroyx zC&HsBpP82+9`F6s-=Y;4cY*euJtFkYFSIt`YF+>FXt*7T3i>17{Guy#1Mfykv9~b- zUMPe~TWs+M*^4oxTIp|dGI%_J18!Yl?F#@Olv)bA63)K=UBdd;RIt?yK|Fk*Ex>sJ zUaOFST4$bXq`C(rT_hwjRYSzsccaWoiCXKH9dF~B)cd*=zw%!?hue7>AP!K?T-JkP ze3NO=4Oj2^L?fRtlKqMCM5UCmWRTmZQ@^x#R%-I z`MMeJTfqE11m3}c>ujOAl*4;WX1!Pa{Lg>&$ENvisM(rw6lB*ip1E?xPf-MAn5N(K z^>Q8DukGPby8;1dwjj)xJBG9DI7jbx%K<04x0{D%$VxPy^QZ5B*%QAB7+te_|I!yd zJ3?e$I&QUZif(feD)Mtcm-~-(%PxkGlj$MlfA4}*pMT|7U-@tO=iLZJwss5*SP7Ecv{#sCVISV+F4fEt0`uwP$4yd1?c22t#1V#=zC+PwmvW`Nn`7zN9mB&kT=y`B z^&JFCwYv~q)f^2z@W+j6B4i|hQr!gmHb;#HE`|SCe2Pwn?srxT!o*RWgxGt;v^b^$L7?)6DS zl5lOtsn!I@pXWvw`4BW#qQU{1&WFy%T;es6h98`kC|qdsk$9HtAZ}|R&Mt;NSjey0guAw^Fk1ot*?|=Kk#wHc}>(F6$_7YMz~W*>1ryamWjs-@gh3+ z9_0E7abP2Ra__9`E8!-5Qql}@c`}A?WG0bhiw;(v?*8CGpQ?OCH3JHIlDfcWenXy* z&wWlyc>aH#e}w-2V@(A6D%Cew>~OoaJ=Xd|_v7-@>v4;_$SBEmA3yts7Xi1Wdn($C zZWY$#g6fx>WfAm+NObv&mwg~0UUX-0H}BIYUhN!ZkfJ11E@fh!E zf)9=`30GGZJScFX@P8Ai?iLTSmjUvH55YhT zKotZ&O?;AINQuMR^<3913L}1AGyXgKA0JUqiRv_)UQTm zY?U=r7hYSM(?&JeLP!#E%+l&@{W#_BcO>ln{5<`Nx2V})(`(iv3%n3CD!=G(+`KKS zgG4#A3~P0^T0Lbm`4hbVdK%wlRtUR=wAF;i`WCX;U!zt_%B6so=+&Q2)DBoLPcI-` z!ujTrAQA%j)?OeIxqjd{~ zcUVj`j6{5}E&Ocx7Hnk%_NgYanhjzEtF8>{cFo9hEzX9 zwJlY|60(3**LvS~$6cH!qAiQ$EBhmj&{0e_D*+XkNj7bc=~@= z0%ek30a8z=2>U%K&Pv(xxoNK@UJPT>Shi&-7XUOphi}DlEB@^P&QK;YnonO?|fMn!|(}kDj2BpFLHbp1WbI6H3?imE#U08p|r;nHh z_PPWQ>Ue5EGqcz9qSd==p43Hc63^nw4LwI`ZT$!i+b0VCj6C;dIM z$P7{^MFRf7ue>;X=?jRaRqcT2Z7;s^qWSqK$_v{Ndmd#!etGFt5phUJnX_RMkb_0g zPJ;854579JQ?onzgZ&gUv#)79lmpDnJZcX^InXL^AsaRD>!0_6BGMh6PtAYiYSkRo zUV3Cu;9B7$9mDi>l0+%~;XOsJ)+(i!H(+Fr zT3JGE$P!TsZ9r68IR6Ko+O>t-;vG&P%1Yr2y4^tO42cHl&Zh)T>XhshZ&bAU*(RkT z-}86p-+cCgD026+ev;TJk?-0l@}ggGM!~A9M{79h4N9q#=y(Vy2zW=u1<`ftFoN!W z7yb`CcJ((=tgwQRk)=Q3-?ueyaNGr)$|2u#-P>{%&ws{lC%vN;G9dx*82le__KPxE1u_2){_F%s^^j~jD%Xr!$ zUR&g>f~8lgCqAasj!EspY+gEfa1rzk(D^Je`9lZT~&QcE2{ZAWOaFduiM7`=T-y zS8&L*w0|KgDN&x_h^Fa}=%^ub1)4AviC{S2*by+m%52(6FgAbM%UuTR1LX~6`ndbwaQPidJw>=i40V`*Jq!1gbPmm*bz67vxxR3qZJxH_A= z1G3*?8nUVv^p|Kw-^*J&E)rZMX&;0wRx-?@?iJU*zq{9~4b0yfIZ}o(Ft2%cZZD*4 z#5-T1ER>%R<>*3eN^Mg)vlj73Qld`k#p8aHdOCt5k#nn+M~Cyoh*6A2n>KFoU7)I5 z=oov^u3Iw&!5Y`XuW8sIXh}*Q_f|^#VX`WJxRCorO%BN#4TuP^Xt_`NrRK|W zlpj#F37xjrKci=tPj)Rdq7)z)o$;k1ibM*;u!!fafZcJ~`0vmE{QUc(7#A1Y=r%Tp zu(641xzRa(0F1Afj+Xrl84p0B_^MLNWloyjHJ~WSuf8Ix6LXlX9~Cx}qny|{MU+L! z!~_()|L)80YQ{t`S66N=7B%pY0@Qh8wtN7QrvB1ei^+N_z3uy>@;YqJ)1m*0=3V4P zB#moH;S+Xp3Yo$+(hLLD*ueCaQfV+i0BL2d-?eOo@uDn?Q`~TX!vV=2vs2f-jOff< z8xzhw;riFyg1~OVf$Bx<%sl(Z(1#FmrH>cqo@+piWWi28RDgp#)mQ^NW>esecV93A zoDj8Yy4?Pmi*i$f67@AK%(y%?o0B~hF-^Neq|9#3h)h**{4#ehqEZh>!4ju0ExC+W zIPXU(7vK^alK5J~8_nHJxU0mXRyS6?R$O~j9VpUv`pS%C0a-fl=>R^i1!?D47Ns1# z+H5=zkJR3(t?*mG>5@;kd#Th6Th_B{<2?V%^Y7~Tg#()733vELJHcc=#nyITk_cnO zHC`B)T*Rufg?gdoJpb_7N9HJ^lEer_-T-zwu&~lu(w8oQ{`8kxW@PSWyD9v~O$^)J zEOj!EPBPdpW?EELm{8#gIytnD5*EAD-!B$h_Z0LX`==@O_K%7?n4P8Az){)J$lg58 zI4%H@ALyGSA`a$Tl78UaPav0TBmv-1~6LaEV>JMNUQGdRd0VXu$ zqN-k$duyl1y9uC5rH6Hh&}C3w1L|$+PT$CqwtGF-TKtGs5gEfGJeX{Zw*i-3abheH zDWH>QxoM+;=^hHUcbDQ6@Ky0>uSIg4HZ1zv<5WBIXYT6FVRG#8;Hb1;I|#g5tii)` zbHqvVGAYhIVL;r9dOu~&?&OQtZ!7g1EQMC_O;1L$cd}KK0Sz|uYdaXbs1)T|7{hp7 zA(vAzF^|=#Y^=>cfBI5Jgm^RNyjhg~56mml*2Vg7zF_ju@R50HmN=~cy0}#7vV(9K zln86YNkg`I0Pao#6Jo5T)vSbYRtrD|F&rA?%Ip1yi*wM5i2e)z(WGy!NS@+`Myb@4 zqX1}eA0D@AvUrJ2n)S3ufy)$_LARz{0{xHJi^+yXb8!Wj7eZl&IZ8~KTVpTqzD37J zBrrW6o^Uco?tdwyL3>Co>yJL8g@gt?V~!vWg)6PNsPACyP+!%L-|Q7P5#gH1u`C!P z_+VHPobi}O?QbJK))teZ9K_@PnWpseZ|WVe!$0QrAO5PwIQ%k|9N9zN6zgJ>B`BLkqeH(lEsa%(2; z@cilLU-ASF00xKo=<9tYf;xVnx!EUh3iv(D9%DH{l5V+pc7hw6tIoY2OZ1su?(c6> zlCWkvjuY$|#RrN|1eq(E*9Fu(c^~%Mp8$Wre8`2;3y2zg-|=VxZLB%$ON(<0E8PVR z&!x-(&+GOJFTSkp4B0MjT0jUSqx&;A@-}Q9wOC!%U0L0`5 zY!!KWM7aKB&vU27N4Nz_I^Y@wT0{5b@QELZn>BM_@=^)SnU3F@TNj^7YZKrl&9VFD z6*~A7d+Oe@zR3Os@TNL_LwhFvY!WWtaS6`pEB-y()=CG+xR28k!ZQZkLwt2K zv^$B!_s3pH*Zm5|ZN#0o5(XoH+2SA%ycwzf{`T~hX%W4-K}z5GK;b*?e(mZFe{1#D zEp0cp=Hs!G--zIQR;clWcDq_cG;}n>h3Bp11Bvnxvc`+-vl&_Gn3tL$xC)BYorEv z_ukViP5|;p2WvCgNq_JJspwxs0xi-;FCl1+6>4VqJbM`p=Ja=f$E0~{9aA%TfMC@q z%2ckO@TN59h^E%UQ<^i8@((o4Ataa#FU;NSdUx?VM)L*D2gtj53iK1Mi z;|#=xF4#*j2#y=)&_UTD2!`C6z^uf-l!;t?$U!NGm)`U%fX-D|a5Ql$B#5E|-#AE> zF6vDfh5Eqhb>Z;wibJ$Ta$^yfq-tv`q11lGnSmZD%J{g-EcdrhW0$~2yBa;h?O~pW zE3ZvmzA!`1|MNeWv>4K_{y&d2eRW!FMHHn#M3yloOeQqR8crm!4I%7NR@~l}(~F$2 z=|F|}FfJyw-x>EtBEa*F^Y3a7)No9y@d{&<@u&{7^c&mRON7&%Tbd@`n=sKd7HtWC zU{4>KS-LWH24|-d>-CDYwcm*Uc>xH+FjFJEC_X)Z`tId-FY7(H-HM(zhrbOop(y1i zk5F1HH5OqN*gfMavkcbveFi~TT z@RFzsPDj6{#%p?zZc_S4D?FX&cZB(*}c z>V_Hcc9CkFDoP6}<5%*!S}?nY>>?bED~^+rjC?vMj^=QsA;UB^P_nj>(3!u@zaRP( zqmnkC(1c4Znp(pF9%65B-yfK+c;iv=9^UEEd$=ZZk2?rX=#UtgE&o1e-fn*Rm$#2m$E=Ayi39Nqi{j83H3Q;_tr@hBV(d^uOOEs}pSwoz2Ym@-pjbS>J7V?_k zz<-q#M2)E#!zYh=N$qeSd;)C}26NM#wZZJv1P$;L@8W7ZDk|Ihg_FS7MpK13+o*!+ z-@tHbna`pJdT8sfptJmBuOCrL-{745sE}Qiu*i%y5_*k>IuoAmKlns#OY&$-9Z%G^ zA-zZs(ULRVze@c)h%@nTq@-}BS&AhlaXOl^F_4Fwvo=vJ`88N97WsI42*!SE1o%+4ez8Tyko zRK9%FNAdN^<{pB4-Vdtb(1yel3fFD3_Wa0w($AfLcI4_8=w}B-uEs)nlcd(v)bv#y zy3GQQY$Z9Oij4@=;G!Rh)^UdtYOplH#T!MNL_k`mU(14blvF#xtCE-7ASy4V6boh7 zy-1#a_^YpI5aOZR<<`E#fzY&J{TzU3*xp$&?OM%c?w<;CAf+NprZvIG%Cs-9yz-{W zh&vh2W-glCqG!MKDlaOWXo8U_(A6*}y-aPRMvw`0uXkn}pgZmZaNw$oHE*=P9I3rIp<29qdU3pHQ=_ z%KfkVvtGTvu%REPZ%m8MG!gppyZZgsPST2FLgi><-NA60?ue+(qS$d>&=W&(_am)P zu*RL4&081c+%dGp6*};Udcb2o4BVZ@xHoRWF1F-yPIaI7&!ty2I9#P68hjL(tiJOW z2iIG58?DPvvT-oIMM@}T+k@TWglnj4y=CNH0~EFa0hP!Sc)IHJ7hP)nqD|nG{nXZ! zD+(a;Ct4|&LfDd;^#BK`eg@{XtSRhN9d?dV_&z)ff$*)>6wu1qJ-fNk(n5x4R^5iP z7#59lo9m7EfZj`yv3wZ6N4U|_40-8=-xT+RFD7qg%1!F`faFJo8IE9$lr~7a56}Nb zFS@hPnU|1!^OBaj(aCf~QcU#scVE)@{ZMs$%z*QZyCv!SGgoH(SIPvVz87L3wkbrG z;Yn426mjB)Ac$K0pZmjt^Fd{Lkc;72yw&C+uD$4ETR0W_SASl>`~euqOtcQ-M%CT} zdlTl7o)5(me+b0a)aKA5qhp(%;7#G_p9rYD(>xDi`)(vTm7w*#PFI6*JB-k{lJ8JO z@*ctMR)RtAzfWOyjX19Ctvz|^_w@U%VOSt#@K)ztJpOP1wxG9fB^I^v0)0^YL_P9d zQ#@lkKn)GwN>(S`@!?>~`ekDP(62?&lo;TduOAN9;!=rY1cIOx*qynN)r6a?cA|#d z9ZGMSy*7tuG$$yD+TG?;6@#B062aLqQ(xm7ouH+h2=;k$XQr{~Nt9ytL_(XvgZqr zEh2cRhW#v(j#|nfjkYpdh^jWl(H|Mm^EJfPZsPxkntL-FSlO3nniZ=uqLg?QlU5nn?{8fQ zu$|dICJL38Oy1@7MZYv+Z2iQVZBtg%Vvb^vcqvcIwTJtFp7cdoI>f>8vFH)`F~cxQGg9mBut$31EP^MIC2uzz?7O)I@5%13aNf+Yc(}@rd!CWf&}uI!*42I@4mr z(J{dzQk;FMK$DiE-;UXEeOQY&$7%>;xCs@VHir)_$`AVhR3cN+zuGu!HRo%-83i8m zI_4TBp`jQy)CMtTlQP{6NGX=WZqI{^+LbXLY?SS^RzZ}PU0B_0U6zbZBz{+*TD$va z=#U~`1VSzAHzQBdE9nEMYf0yt4S>pY&WE{n3WJKSQT7n6LwnkI&Tm;-U7y#d(S8)o z(#5<7D4I@4(SN($PpIMSl`C2` zo^YPjr?5z#{m5?%w+ zHPyun%wb8iJHi_?Ji##$)`o}M7;I|J?IuryI;hxE4a6s0s`|0_Aa%i)frr1W5pWRD zZq)Zk)Q*dKF~dVc)UZu1N(4`!P`eSR-^ohg1i9=;;uH}w_U*Mq%3}?_ktf^=j+;Fe zmKM7meQfysdX=4o(xY>NEJxRMe1R^)8+W@F=KyBM9e*`k=xQfU*SOy+Njc3>c57us z!QeQRVlr~RZrWgDMPX}bBkHXqPhCfV{ z;*=GX6QTNmYilA8Bf2T)0LNzR{O1HZ&=(DrH8fF$fLMVl2_fO?;V=1uHGKlS-6k#@Z5I(xbVCgh$0U8wt_DL;&ExIrvQ zrgtx2)+iC0JS}nK-yh*s@$R1Njhn&yo(g0l;Y%+CdjPdqD_zBA;eh(j$PGZXdNT!* z^{0!B{q(2=O7AcaBtWv|H9Z;%0s-fo<_0bluoV^)VZB7IsvVo$>)O8&zA!%;&4|aq z5V7J?RtDktP~$Mw6)im`==m?;J*mYyKw*}J zFQef4**m>s>{SoUeV>4P>`oF^Sgf6h#MCm)VO((vu!6t6j`;F^BCdGc%!^S|%6*dap(FP%<$VGpjAH_d?CKVzQAh-O5 z<#E23@2W^AgIj7ZjxLf9w?rxiD38A+aG9m?`!%ix_eZT|zoAFA6vN#Y+5SI@0-rw> z%?|JiJ@%HLppy90TB)ZS&TF022vHVSJe5nn{i{FLIh~y;)tuO&0JK)UEZmIP!;*VQ zUzCrQm=GST6w=KbXU{Zcv*H-@thZ}z1b{HHXTZW{b#BoF{?43B(Cl!yaGV=Y4V>0Q zMkBGb@AB{4qRAar|EFoek0cTs5E|(V2nTNuexd35>FPCeYA5X8$bIyQ0X=!)=t`M->4lfQ zr|WQM-ncw##R+xp>($xPB|0Wz(I~iDYQ5vn4$?Qiv09>3BkFUvmQG@nL9bv1=ZJYT zTb&FHE$BMj$)L!}>OH;jAX3*vO*~}@Wcc(Kep}y1>|=*ds9*E_O#%)YN&@==tigznvr;BjGTwAAi~hPM#efuzub>`sE+!cR^zupjLo26Eq@$GI397i33;S}k@qrqO)=Fe0UTV4fR1!gGG$W`94L zpZbuaS*YrhDLaR@p;Pz93mLmV>^`mo`t-U&f0UA(bH+pP5;u$QA zzyNn_gCIynS}f|w7GP;l6?uj(Gogw{gEzAe)80?lBK&$-l$CCzOdb{|$bE)_zdW(% zSuaAZe=>*j-S?gBiO^otlIMWDQaza~xg7E(H2P!m;qp}Cu867@+7_xU4fp6IshNkI zMUT+O^QZSIC{uV0ZYW9^J!Ze}NBc({OVcPyngXv2p4BfGBZzJc(@Vsu~Zq5ZFT@jI~C8K^9F2a4+tS!xl zDt9T?Q5=YVppg4;5NL|rZMIG?yV-Kei|HE)@;Z@ROSKpg7?~akhMDWbRS%lMi{M5p zGLLYC3sMb7o9sgMhg0346ne(1##sz9i<+&Yaj=GFii0#7uli%F;BL0KdE9?IC~UGb z(K{Wdb|Rradr;ya2VAdVD`^?yw!@-lZ?|^UWk`G_dNO)@**g;MjY+{hHR)O5;!%LUSd|$l>r`K`~8~ z>E-X7lp;>=a6yVIh|rXX;W{xblLx7Kyrs7w9=N!)PJ&j4rR3HUULBe?Od6mkXT;i5 z;X)fIQKE(!(;VoLR(#E^_BMMars~XfCBi~GYm14~rKzB3{6ECKZFAh`aqd~W-(dG_ z?b$uA5=D+hS&~5j1W7weGI%0E0U(S4Nh_wM9RLGhNB{#5FeE{lsl?v1b#|+EH~T7Q z-!ZmiTejn8J91>%RV4Ji_ILdU-B*unr?zrV$~uP*X6FBJ-+lMpcRyU+J$Kt5W`~*b zEPVv434hZco$Jj_rq#djKk?4_UglwJ2qCWcas0N~Enw}D#!znh8UQ8vb zbVd@mX#SM#<4E!pcj#n-cdJej>)W+*CTFh1siSZg$BsBWWDQ@9`loss5lHtlkh?JJ zH>qxo{BOnaMC@|Wk5UhNM30HPu%AaGnIfHi6|0I>#EX_%H@#qrq;5PVa&?(UF*1-3 zzV+o-BIfkld(AVMh0Rj=zFu`pDg(Ph`zfHlVB3Db9ub?QkP;8|d;g-A4U@&$Y4(cO ztR^^mAoCQmD+))4O`p4-Or;P$}o-c2dT%Z)8lF7nJZmf}F%#(VWWfo$W@oKn4a4Q4+R#vG>|b*UX*!{ZlX# zx!dixk4KYm^&tM{xXr`9QA2XQUcdW}`4Z!iv+v*iely#bGB@g6c{hzrPX%MM0{GoO zz8lmcYW>S+zlcu)LswS;&!P`WCm}VjzjHU3t*N?J9Y?~L3LqIYnY?%<{+!+1jQKwQ z-r4sL1exq_~54!?tIx09oVR@`(>y zqJ+nEAt?inSD?hIjiV_EC}dP~USlE>d;VaH$}pu6qi&G(Rz`}mc{Zj|&WdsYDksyCMN6FY9Ra4$id zF(YxuJw}njd-G)jtStm3#k!pX%YSM|?l$hPV1f2RBK>bzMPS<4;;{e<4KP!`;P|K* zW*82M!=d<&-%M=!pW_qBI~>}k2Z|JBmSPZ-SBkS?qc-V%Wo8R3VJQu&r{ zDN`;RkCkYcE}lx7w`Z`eqVuNz(s^-mizBrt9yDOK8eu%?Z};o4x1>4+`!J4_3b>GB zc1=pj-?<%k1HmnB!bqg_Xp{&A<~b`xAkGd7wsd=h2vATl)}W)pFhN{UBLErV^{f`T zQ9C<9hQoed2siZhGmMEnS{jqA(zj_l?S%gDky#JAAmviRa@gEY1z8xCpa)jqgBS&x zAl(iQ(|w7av77CE0WD#-j!6hYLuHtNMN)APFCH8Z?buu)Jm7^@=s$g)YcQkb3}Cx( zM0zq5UBy3@K{?=0`Cb<4-DltNr41g)3DgpG2nfWHFJX5LjtbW3KR^3^(0#a{aIdvN z|Iw+(C1XMJXFbgWIEw}5N@ENd1GPqDo~QUn2z$he>x1h&EO(M6WyJgRY;1aow6wU| z-Qu~x^3DWCBF$uLmMssF3=)$5v1@1Ltcpp-Sej}m8Hv1b;lk%VDN+gu_q%KC8v1N- z2;p}*g40BhM{_cWx3T_a{MmVs=-}1a__DRM8eFGyJieZqnU0r_fbab82XWz(C$$(a z`+dx-h6^fPeoVI{21>BkkWBBkWP2D(uoGJtatYf_CY+Zno98E|`!^qkaUwtv=tMbVN*896Dnzwob&VJvjqY1?`SV|ctfP#ElBucUE?O*>0-7SQ<>K&GtfYJVKb|X6zpd2k zn^?$O6bbW@widf7WFszz3HucUMUIPGTO4Gvek@2EL_yq%hE}UuK4gb>Q;D9sJQ2@9 z#1at#LtYEhjnGOdOb}aObYi8E9c#8{6Ca@AaNF)9Q{-j@Yj@_%Uo7oaC#MH?d*Rw* zEch3QY!bnKCZ*Z+c{F z^4I~}u|zF~j&+3b2<2wb50h&?D~Oy}>m=Cq{n!_v{y+YDP=bRH9{2Ed ze4ti(^4D-8r{YG`vLFx?|AkgohcUCy6>4LZ;p&BXWh|v(lJpfV@e?R}< zDp`j}e%c+y9OS?(Dz+-HR;8fzmF$I$&Qn}zDvtt_dV0sF7zA^Tswln zLZcWL=2mfEVo+Bv_6QI@8;}HQF_suZ4~IxF`{kJ_4^UxDOkTg9Sdz<9;`EershXgqNvQNqIfW2Z z0}?udIs{QtvBm+BSZJ2Vpe`yVxQX-RbU_Ai9)h2zQfCZzy%we4oRswTvl=>=SBjEf@Uo5Exa zHkK4o1__dRNpmPZk&*kr^F$T_-;VwK7QyrHp8f2zqyLjTk41Pto{lx>F%EnL!rGRvKRw_=x+lM8H8`laOJ96C>Ync=6uONh`#Q> zWxd9QO~wHd-E+7qBPc=P@8_1* zJ)GMeFhfOors0Js<{hDy6fC-LCqX^-1iAt|@>>iC3A4hQ~ z7GU-}ELFkRF)?wKdmt3RS6LZfeJ}4MS!Tmi!h8XgP~HmQ(8HRO^2{zM7`3MOMP}}x z96Ff>{SQA%kdfZkek|zFDX(`E|A@i%M-@ZdE%po0Sc1r+XViPil?ASs;}eP(FEF3B zFs|#1FO_mBDjHl%L2)=NwSDnCY3Uy2bq1E~Ev5Q6PaD;zDea|Ei-y^80Zc>N^zCo} zI(Ccm{Ioozr5xl~qjk{979fpKqnJb`15wNdcWlBm$2IF5g9Z`~C*)Go052>1and}u?kh{#%Q8n7$GUOP zbOAD`7 zO?^Pc%91gEapr0Wqu1_!Db|*0biiZRLacnm43252c7AT54s#3&tBQTB2|malyN4`c zi7S`lG9@Z_j>4azQ=&EJ^Y;s5J$Wso`5Yz`yonbGpgmnsYRmo92#DKt(*PF)Jf4g~ zQ=>(Vy~~*s17q7K-6p4}%+S;tggb)5=QgH)p6}epEnBv^_Ain?UMoy#!b~t>7SDB3dt=KI-Y3;rTXX9fU$xFa4H9{B!QNgiN&JlhSE2&5NXo(R z=wz#C3UYR_-D0|nOUp45X)?rqs$^bRk0~oPF+&7k;^=e`#6ciZ{wtq2HGSFZyTk6* ziu+nBZFP72ktV>$8P`A!I6*@%uJfo8Uil4uEq9GxZ0P8fi7O`C-M8a!Jg()4qL7_J zwuv*Cx|rP4SC6TfG3(tes$SFmthRgxyUqc(LKb!s7<0$#odhxph6 zG;l7PlmfWM!^5?-#(sX*Iq-Mrt(#i;v#e3yKie;IaoHUtdyA%ve zyG>_B7XEtc<(GUN-IthU1=)&Rr$mo;tcp3neZ_bq3HN>x zPkW#)>{Uhj;gtwVuPrYv%*O<7Z7le)9IG&f;R(BLLqSU0wb_lhi%gxM233nM#@BPb zqU0|E^ofg9Uc|3KeBdl}Pz<;sXMJtCDi>DGS|u@U&?|Aw`)WH-EDVrdJh2>Ns_L$y z(7`pkY2$&7?-jcn1&sm;vC^nyN0h(zN3crWtR4Q!YcZeQ-k|vF$!m25Z|bFRwscCl zh2bG4ZzR_G(JYCzfam}5S(dh&^&1BdEH&tvB1CIAhH)Or(*%eWb1#(*nC^1c2E1FW z09&n>PJfMJq6UvEUAv5}wVsOc zI~m>Y$%GzzH19AA6k!slZoIHzm-zRvL}jc#Hz zJGgGfTr;hs8`h&JfkhtwSe67^nu;d!%MMdDM!M}6^9$s6>48HQr!!G;%2i88Z6`;& z!m?QzrBz*TjjBTxE~3|;$$?*?738>ATJAmoViJ=?FAze}@1Sbo%P+qYtTcIf+N<%q zMB!jqZSSriC|U1>fZM1gkwltd)f0e5|*8TW|NWNrW zF(Hv_WrPup@Pom->Tv;}44C z?%JK86kcYkyYTPlevANV>@n+WLeK0rrQ}mivJH#%i9K3RFEv+{8$6~!QD!`X@d@-N zp4-@8oHyNczRFFFV%n^aIf5-rqkG>z%k%x~0fAB@hJP#P_BLBCh)0&)mrcBNm8A7k z@!|CY1$OrIw>;^5VVW^Lon^UxaxDmx5-(Eve>}MfGko8#eG}|qL1(qe42av#o_^|y z=>k3@egKJ>0+Rz}!Y}l7tLmAq#0xVOAvMfm|-=kdf&y zg0Kb;bheeIR}}zA41O+!p4=s+P8Y%FUwZV4%7djEcMdd+7%RdHLD7Ny=Xa5lMRaqKLyRtO2 z*l>hyYU;AjAmZ4gjQ-%O;SMmMc+zOEnNVy@!uh$?SXlVQgU6@_TjzcoYx_geoGkrE z(p)EEi0{KR0OywH%u4GN*GLLtwB|;V-<1T%D-<&-=0{@w~q)qJ)2sp>}0@D>mpSe**(3b;ZZ=jgLF6`w@m1KJ)b107q zt};i2CY{2Hnli=jOgtVzCahbm;p?9={r5;9>!fIF4v!Cm6e?I(t2(K3NS?oJK|96T zr+FyKg%#90>6dXZZgAkFx$PH=0Ymcmyy>TziLGDT1X3ATJNhug>tB8Aos{cp1Nd95 z08LbI?}+YdAfREPgd^%rK+Q`$p5o<&CR+H5SP8&=2^2m% zc3v;wKV)p=ca{pGAe!&LMMi%bBgD(dug}MwSEP!u?GllR^-_7bgH$>Al#9i#Ga$by z7UGm^x#e7hm~F&V@@)zdv(kP8HRm%}tu{t5X<9H;_QPz0kiPt+Zx>(xem0UIPSz*| zCz%}j*%xB^{DR-6d-yNY>))vAkVlR>cfWkX0qRXNIvTyNCoiU7g)`N6br0{AGWK&E zjl~ey0EfF5J7sF>ilr`Qq8f4|_<|-#QGBc!{JxhvY-;9;55Cc{+2mw-40JFrY_NG! zqB~U8Z<nl`34%dZ&0ZV?*%<&ZEGzGKQmZJHLxAD$K6}?YlA{uL zl|9u9L^FbtjVC%ddybtIKWs`inivpgn9kw;$b04DOij2bQv6Fk9Y+cFDx#i9Kq7oV zV3mzafU}pbTyRRYaOF0IL}Y&UwoDr7Axx;zHDcVPJeQ&$Yeb>gP^QiGlauuCwK&0I zR@8)06g{3o-`i5<3wXxKFn9KF(=Ab#=C1n);;6;5omA6VThfB$!-ucZ&!@Gn5rl^N z1!XVDu?mS7)m&$f)A{iXg3$5Ojy#@^%l2pSzh%CbTG^Na8L3rNv~iT-!zm;psmhy; zqI_hD1t(Bv{-}sX#R*b|Y$S+>bL{^##ovRr-9tCPwuy?i5RFCL2mA#vZ? z)4#}WV0~3a#LpFH>h?p=lY{ZR+@GHV+oe*`zhA>r;Mw-8!g-22ghB-P&b_dhCmbUw zEjEUwcnQ(|rrJhOY~pYb6l2Dw^m*T;yg`O!vL$gku@esrna30J++#QX~4|^D@bs z{jqN!Y5PbcZ`t$UZ%QHgx!I;3hH|h;Zv%P}uMl93?HV5uGN_xy{$=MC)CKiqI;Fzq zHbC1dgE-h8)g)c9D#{a)Og|qHagYP#adKsNfJAxl%*)sde#5_3>`TR4VDV4%1t$*_ zh+M3Zth9bX9FV+^Z<$-!@u zr(?{Z?TUB%UN@oi3m`I}zw7D?2&^OCoZi}}3RVKD{`B>`W@AVPTjvnujL&Lrz*8Q3 z_I4cAo*R~E`ly2rp(`Juu*%b?hBtY|Fn-veK>{n z%yEbEtfIaRR6i>2xWQ*mc~61pHznfZ*`NUYlY%@h^e8NWJm-ZwI^@2&KKNpn)UZ-5 z_3Y^vzZiU=W(`AA2KUgtX3vbB+l#%EBHNpOvsGX(Y}=8_`Bv*SRJs9E?*KDjB7 z`%iw>EjjOF=?RmL;RSGV;IOikf6J}^9zi|x#b1}=#B&5uP4U)to<9uy^rW<=x(-t} zWuIgJb?7WsfF-#eBv9CS*D4w>`OBI(IbZdU=@u89%%lVE!rD*1KGP+-4&_M15MTebZ0pzTB z_CZiVx+|t>?1gz$hYe0;+e%c366B_)eX>X881Rthq}|u{N*sl_BH|sKm2@^vu?@Py zT}7J0q`|bXiJYE7%H(Lj^XwmLeSp?3d|3;{0`IiIfsKqzMA<9h^e2PeK&_fxt6nESS2{~!MT9*fG|pj#9p>%72hzeM8za(3^h)KN})#Y@V#puR>CIKZxnE)(dqQE_4v%T0z#u*!X+@9_)jVd)abc}?x-pf-qKwL$qHuElRg z@}C#yJVFk<3EUNuex1h+A!mMOagpH5wNO)%)fVo-A5(vi%>)=bsuv>Tp;)h&0PF(fo3V` zW}+xnMu^Uye)+{1z@NVUrv*kQp8{b46={gYo0^*T;?A9$Vue3j^9v#1A3O-2pfTB%3E%0W~7yd;#xT&5=yX}z@3Hpg>5s(^|)z6o(~bonvtv^>mp>{E9m)tX~8=$QH0 z)2WG4U>qDqae$GArt>~e#P!@nq+YuO3Ff5za6Ja+fzkY-@c?==p!LY{!HRg9MquW~ zAn~|fd^Hz+X@29n`9xKEi?zE))^Bdh7e4n+`o+NqtEG*}2F9f}H^BLq^I2=Hw^oA3 z5X1$@xrz7~%Xe1H=i{%(8XeWr`WV0kuuRTw8RxyugU%z1jJduL0j10`aV2OPV|f1l z*?X~?V0XL5g%ZWKT_H}8F)klmjOdlSi3(AI!V%VeJawZaYA^P>``g8|Lro}p1F_c+ zp8d!YN~dTIL}{yYc>IWe|Lo-+9w7&NwGaZRMoHfo019zCmbmE~>3h%HniN!Wcp}Vc zw1I!=rI=^GUrOH|?5B zykw&{6us*RgkwzgfC2+$SKJ%fq(wMxV|fZ~A49w3DmuivZJkhOp+MTz7^L>ZrU-1Q zPxMABo;JK+)){9hxj*sK#7y?+QpX{5w2+Sb1q?;bL@|WE^yOD#Vj@iffaNX8CYG}q zeZ0{-@E%+s$%1@4FA2#j)YppLO*Z<*lH1Cv#zU$TE7gT456J!>KwuYsMK9f_R79}$ z;?4CH|9W0IzTlW;UZLZ}<#bOjdM_#qp>j(}ykts9MM_#?oEHT+-1YXSj)VgKjpX#r zSY2{9jDZr{R+!KAzMqA7A#4L6E@++ExS;2?HWBXvg{c+Y+IeYq7M5A+TgvQSPDR_^ER?|V{$_?uy zvH3S9oqDLrf1Sz)RjyV2*%KivalfR~F>4`eiv_cHN~z!N-s6VW=4T4$6x z15;1k6lGJ!n47A&Ucbh@M`a@gFFclIBA3N6PrRj!{5(6{BX}sUS1r9KQy!nNrt3E) z_6bCX-?T}&(DD(`M89aI42rrONqpKJef)KE8IM;UFs5uwPDrNQt*!NyxKqaXu(-&z zZvfH2R2BnDHK?v&N765J>o9^uC%|yVn z#Y{xP7s>^R_o3#l-&%;fhUh`@?uGV7Tm=8HXXA&dOQj{qEa>ZE{E}dr9UFpJ&;Qfe zhdoaNcTHnP#n0rf^ouh5rMvON3JX}g>_?ohqwauc0$v*)CmG;db4#`KAxoKvxl_OMdqoG*^ z9sfZC(oS#C^?s6X9{+(Kq&~{12r)oPaDAJ9rH!3Vy(A(|=foT1uQ1Mo3Y@sVg1&gH zmgZ9_9BSLg3k35!I@wDJ1@w+sTJcqXfqfNgM$k16m?r)~&8k5F*>Q^INIwCff_w4E8 zs^F;mL6AtvRIc;{64?^`Cf(hOgLUD6*tma1pxY>vq3i-ao25r>11ZG=;u|^qhQ(tx zIzNlNq%yFM#~3@Y_UZ>->!bU+GS}qQvhUUiZ~8qrguA6|*DYEY79ZmJn4vm3aV6e| zcfctsWTsx6G0QZljlx3nLYt=i?%ls1i@4k$27%}0^{*!|##pHarSK*Yh!lVV*VGEg zG{%KBkC5^82-C4wvxq2@)q4znhi)+G3K#a&pDw508qLBFwIWw_X?Ze#>pA(tTKQv6G&E>)DS7e*o2c zmo5g!ET9waR$-C_->`bQssE!a#=X8zLO71y@tW{DKRDiZd7Mj38 zs%#4LjRp28&|rGqk$xOgBfdW@W*J^$PWTc&d;WK4zlvL1SSLuTxD^0{LxOAUuCN&Y z{KdEf`MZZXo0A@-Es2(g7d7r!xfYj9T$zYJTdS+_oKvuP*X8<@wiJU*^Asr zB>WF7atyV#er=(YA(Ybjn4^gL@IL=IB?hOBcQ#g1S`g|}If(a9AwaTD|Fnq49Ri-U@55EeljocWYx$kP^; z^_uwyeKu2pq)IC3%Ar5(w&7-mROL1BYv(Y_TF-G+y$izYoH&QUn)ADRI=n7-(XhG3MuA`}l7?$89y1Dr-f77WRl=&=ilt zol{Gtu24NXp75#j|H|sJK*F5|=OP>Gd5n2t10MTc1&|B(=(tw%4zJWrxpE{6yi(9a z@y%e(w_d*M%V*`oRn7PqAwjXG|Cw*6aBZ2dAlj zYF|H&EjJ1gzYwSVZ+9R6#5XaJ#u-rtGIiyWmmR{>=#XTmM0MeE9ALu~vgnB2Z7O!= zWLBy}WmM$3l3j@^F#4EVgVKNnw%xcjD~J>EtCXwUY|<8{O!hb21rXx|%y%+4p~eq` zF;?2w%nhs~?AgtUha9+p z4!Q&|$5?X?FqZuAe?DKn-;ozL`pVa1!wgGFx|PP-@$`;aIreh~*?>Iw-r4@AZ{_GT5D?yt>t$7xXeEE!^t%&R+&~MA5JL4CiQTOZ1Hq+f&57a zm`ve^#WREp8sH7eSvf)HY4BBw)se1OI(d)?h)~*9a%Ze#d-tu6SFHpKX5MYFkWt*HHYzlo6H={Ne3>rmW849<= z&lp*Sg3AshJ_H|Z75m96@7#^)Pfrvt<029G;5=wZM4_B&92OKzu25lQ5^XruL@u5- z+u8RP8_6mT{TLptZE_}5oXK`o>HrYe!0#Cy-Y-Z{-o;)d0h%gv9-`b4CE%}OC3*M} zs@&f+-f0sijhA3DX@<|@BI%dDxTri^_>0&UsjA;vUo~5j4bJ)L%?a|>AKvJc zqO9vGmqgsu8?j5ICB{Q5%HkX#obnT*q2G*OX%Db!j={9XkD5utU(M|1WzOrOF^-Bq z=v0cunY?@@UcbcATvcqKVfX9dF#>?rwU!2qf0}oAD9F%Vbb)v%yhv~@K5+!n|IkZ? zhkiV0NJZlEEJ}c*jU}tF74w{W_0Lo5X_BRo1zv(}mdxNZwNbMxlx^29p6F}yYe7=v zP055)D@3q*sZ5`{A;0T=sOs)iI4q&~djNpqcPgI1B_egK{=w<-SJL~_CqWR+)bGCg zQv5A&S}ad%buMVfVfDRd@5aNVOq8yV;Nb9m!uQsL*qS(oDw~pF>7Qj%Q+WrW=kHMP z(G{h`_%$2$=FF|k$M*nYYQohyfV|N&;}XHl6DcWU7s2E}!Pd#36sM+)70zMCQ=A3D zbl@HJQ7rTfaPuQxR=i4R<&8rmsMiAjX9iQl(gHOhASXGW7&owigxiRm1-+*yN*Uy> z(k|<@w-cv{5yQ~pz+K4v-P<>Vl5EzWYqJWz;GIDb<+L0nOsDEn4^&Ri(Hz-!4S2I+ zNhlcc*j$e2WB0^3jtX`K1@irr|I}D`J&5!Ce?R-s>$B3DD)BKJTYMIgInp#@&LF|4kZ)@Xpy&>^~XG9?H_7^OYj3=p-WH7&piy7OX3c{|4S%Mo0Sf z7@wWcXEjELkpyw#6EYg17BnZqP$~g}SS#@~Ic`DenJ7BPq>aX?5MG=Kij%QXp*UsYQPUHXWjLL(VIf{FBgQLOfZC8~wY70GCW=F_m=lC8VOX@#8eVII zj6sr^sU)9jJe#DtbC`r_+yYyM<1N03;`U`LsD*IuQMLjOHq-N_PN10F>dg(;%!2<( zc2MS%ml(I`KD|#W(o67UabFBtNk5jd#0vU(5I!jSE~wD+y&qEZjrAna-#`0qj8VO1 z$UyT#+&J%|8nbP341;mbd#$_`g8}wWm2EFb15jw1AXp{>Smh`xWJbPLy%-6WSr`Q5 z3o;Z;6IqE6G{!>kvbM#M@e@BGq$=Ut{Fa=nG8AN`Sp^$V&LI|d>e5X7NwqUmenI(D zV|{uK1gW>Vg|?`8WRQYc)8sEc!Lo2) z!YxKEcXbh!j<4}=1pWE*a52*QsPSv zmePrphGg+Nb!pmnlbEZdr!+HHFu|%yM2wwtr*QK*JsS+JiG)u0px1rqFAfM!lxj?K zs2cU$IY4?VDYxuu2#CJh1wksPkqdb_BvY+zQr)O zJm|Q1>~iZ(ult+KuK&>F*eB@D_R5V3Z%fe$A;DS|@yuxiT5`Xb4+uvE6=DJ^?SORk zsK~P}E@JjP4-4ieUs zAplQZvw`)>sOl?ee!kJIP5g+^{w4-@q&BGzJlt7YEfXCpK(mDZ)p01Qx;6<{K~!i` zj?WcMDE9W$l`Q&G1|rX_jfW8?)t*i%EU3to2OyPPN3T-c>SI|0-lknA+* z9p(ErC=On%yEhiz^kkb74Si)%l?806yh-rLH6^v#H>m<@@$$uU=t4{kMaOyvyGX6- zf<;ij3ldR5Wyl4%j)v^r%u9pu7>KSgkBQtn;(rac$58S1PF^ z*k|or{SRu)Z7c^TDSno7M$f^}HXCqiyjLpxafUqmvFDERd_tz9XVLUT!)q4L-bUyZ zUA9dcZkAzy!>*L(a;#=&7vH~q%9JEdkHsi;m}|^P%k7xl!pPTCM95~Z^p8c$QLQJB z^5&=(Rqm~}lATJaG{s|Zdd9}UwaIxLdsG%0l~-bhC~!84A|B{#n-U#yEgt$#x;ok< z-`anGSEPs_vhE7q zhN;(?5OvwwvqarS9Y8`zng?SOa*sdPT~TU!Vk#Kt2xB7N)jdRGDf~%r!&qLNKiB5W zH2i`m!-^yQRUf2H3CARkc3I*esJfJa)hsvWMDM0JRo2>yVJ|J*iSeo9WQ7oR4~fUX zs2&wU9JR`X+;dSC6^4+X#$+D_Ft9&jbZ?Z%3wG1}eyM~`wGvY5QnkaQB%ZXL=2mr% z)~O^-KK$($yijtdpO-IV+U0TwXG};B%o*ARo)lnm(*#|F4mw@<<9N?jw^vF%BIX+d zMz4ygfMl?O*7#~F=kIsGh$wClBO}0r&F7(S_3^#P%jMzGbd^iu`RGTNv$Gn#1L$Pn z?MSXi-JOI1j}t~MKO^kWwND6nM0u2MrU@*~rAVdb5$vdE z=yWP48;SyZ*ibACLSPD3NJ$y^`?Fv8P+9C9;@d3J7f$)7pg&Rb0+-tE@??YE37W{7 zEiJC`LGx;?xga;GQ{OxLMiA^Z)Z}lR{lcWix*Hypi5_!cL9lrH-14n<{JleJrUy-9 zB?2C4)`I!$=hjh%0p96&3-s8QW#*e@B5(Ig=Y=l}yPMsTL`a_ENk7werlwQP=eRDy zc)m;MIL0^IntLN=t;q9m%y$n_)VaY-b^WxQa1?8(^%IWBj*rpH&4AgK1SMOP);fO9 z`a;RDlj~Q!9HQ-7KSP^Hr{Ta}8Hz9!wfW+eQUQ`Zq)ci(C-y&5Pxai&u9Gfa;#v{u zN@B26J;tTETPFS?VO#cNTnS(2w*RZYihI674WSem9H)dEwhNnVQg4E1#PFI_?`C+W z+E!nUwt^5dm$JAff#Dd8jM#Tvdx{>-2j|aN(A^+0WWFshC8p}`b zT2#UKL=1OnWg${ws&*2=DgM7jmn_95i9O!b7`!PM zWl>}iA{f4xZ6}AlA^@cuD4RO93S11Snkbd6upU}k>cwaxBjWkgL2BabX$}G_%gt8} zTqdM|Y!Z~BAT&y9{CU#Pa;X8Z#cu4@n7|_hbOpIC@F@U1@WxFzX`_@W)-|MCZf|5dH0cWu( zM4Voo$W)8XD=S>DqJ3x6wkZ7_3tV__x!CPOtd z$yWQNJfaFr93}W=cv1i#(joA6WIUkBAq;9f)Q&KOl4d{O3oaZ1jXn5v%7@GLim{x+ zYr*wMxAiQcLeUhfd$Lxt=gVC2-CtWtWtF76g`!cRnL{J*k8`nLkTBa+rCYNk9Hrdx zl_q~LemR}Vr5#7{Ep#U*M6QwRX!3KmZ}#Z7nM&c8?!Nrxm;NMXhy~Rz z@X8yQV9v2y+E`eR9W_5JYf9Tuq#lk0v%VIFb(b5=Oh=XkmW(SJF6|b%_|B8h`^q`k zT!HUcJuwMtTW%vt<0rea2@Sg<&B>=2_A;7L79N>9OxT%^=pDgMvl zF^UuW*_|U*l0vzT!Jr#^=~jrDQ4=O>K3ur)a?JeF^i@AarFU_W)+mb##CL|gE^e5b zESPbQS$tQ*I-6&yNr;_eK3XDZ%2g>s*dMXJ05H-Tl859V;lavUscH)q3Or04T#6kz zU5Y(-l5%;MM9LzTmS}eDB~wa0qxAspzUUt@nkhF`a+VYU5UvY-C*}v3Kx>nngF@v8 zkV7QsuAlzaU7z|0hX!4Nu9A_Oa7TG=Y*w+R%@*yCK@kaZz}IqF*E%_K)Z5BU0^`CL&i=1;qk-RLl25<0oz+=JCzEx_X^R z7zRN|a(~kVDFKLodJwq`|3-!IYqhN3{yXTj)7y!m%Ej!Vp7&mCSRtk-n3J;m>r!D< zeh^f>Ie%+8C7HrOrtrC4Y%zo2N<~+NGO{?7b}eAr6uLkJinEXiE`iY)GQihzIua=< z0#A5QX}TW#dBw<6PZEKF+-Y#Zh2M_h1v>-wXjK~2>YhA~KbN})W?`XQ3^4kWIaqQb}jX{ zJbLhu(%bC2#KSTu9t84Gn~iR#q@pb6SfR=gbC4W(ZajHI5k^S_D`wb2slb2?l))EU z=I&QZ9k2&j^iD9q6rt{+nQH!KT2FXOZ-=sWtK~~3Co-OL-nO8bLXi0glPz96b>(XO z`SQ!3Gk+d;ALSW6*IM_si(r`TFJV`&#@eF;a3GWX6EBlx5`Ux8=9x}jYkSd;?KNeP zbhgGA=GCkqadQBan>oaLByUwMChgwVMuDKOuLSqv?0kSal%kN+6GjhSmMPh(1>{D( zfeSK6W<43KF(}+iC^>OE1>R{rSU$=M$jqaeiS%+1LTJsGyNH?>mm4;KhpQ5u20S6B zXi-ys<`4xa5g72SE>6ejY=E*!7q(>gpvhxInidgcGuv&`WgA*XT; zS%o6uHhN`~FOV{ZG6q{$y_j5nN)DceaSF)bBmVumaU_B4Hf7gA@)p^kdRe`lG zwo`r`Hit+t0JN0otGwLA*&Y^%0T{t9-hMDhd1jGV9n8Pc&t^7DQo6QNcLxbHimL?| zO@At^A#*5rct_)an+^2TYw{#Oew>koScl|%}ig4$=~TT z`33|6V?g-s)7wj&_Iq&?vend`BT65#Ni_>(uCl>AHo>Ei#r|S+=oJfR69LA_0;5C&ZmF1*F(k_iw`0waOQo&E3TK6>0<4;@o`f_~PBl1g8wy0~2b5$> z*>~Zs0wM;55-(XJh8ln4oGfri1^Lh*3~0K(5(mlBjreOLkx6U9EEmvO6SqCUky4b( z6%}&!}sxaS`g%!_4vTShxE?$M!(bD6fiTU1d}JE^K*asLOcYIk29LorY{{=bIXO#78q(! zeB^RBh@{iJ*i(O~N`p;lkQ6FAJlUCL0(=#(9&(uVi*W*S%xy{(2?(HBkNS&XnouV~ z7EWEc9Dfp{F}tZp&rXb}k4IDr0@*031ZH4`&(nTDcqh01vSxgiJG1tjAu;7l%r-j)w?sR9j;c z3hV0kAm8(EfBd)c_GZ9wqq;v?-B;Wc?f8#Ulm2akIlsZ@>@h+^rC5)7g>m%eN)zt= z+OyxpqD)VhJiOKZ;9+-{q|W$tYG%gNm;?KOY&0#K6M768h&9k19>whNlv*DJA68Oc zC{p*MUDE7 z=B*+K)vk_23aV_!8hu)Vf=lG{j{9l;kS81QLv~im zo{etjAl^7LamCY@C%p=N&YJoH!cc_)I5d97zaPbLRuo-{x5Ib;XlB~%Z2@^MkHUZv zz&-{sF2tM~in|Nmp1wQ1&h@~Ut6Yzq$ca$#YV9;C>E3Zzy z<6(wuX!lU>9UeZ!GiB(vvca$r;#HK5W2`Y1>@5$G&?;e$atOd7)Uit8diL~%x8lZQ zJCb4IgY+3FaMR!MOb#djC3WY&{clFXqKQHML0ot#k^aDMkA)ywi5?kx!cGDFtEDs< zIl-x9v(|%nIni+P=2|&_*t-vU4c#>ETfs}5AlxM^KkuATxy%>|8-1(~&*C7X?^Bmc z)-H+KzcjbV$z7sK(*z*=z-8Po7949x(XB4liSGtxzJtaQcPO*psqKLMPwo2J(*=HQn9MdoebYB=S@i3iTDFSH!| zYOP*dh{xUAI(YQ|=Q#d1Wd5Kc@aq)oXHQ>#$;ZM-tf{q*lW;h-&{wNsgPDfj6HlWR zUO~5Fs~dI*ap3s02_HQwfwpNkak|}cNvwVpxDkS z^wqKXFwg6Qq3gjbDUE&q@6TED_f#^f#2IZNZVS$C4SaSTs;q@KT=x+XCDe%G?z(6@ zUR;KR4MtK)?a;fF;7c{NSWBF#4LjSW4$wObt() z-;bxvS`)~GA3pnKJYBX$1z9c}r?St)`c$z#x@@d%!>!X*(-FIli8RG(Lq;>I)S^`2{ z>2LkhKP{fFi&rL1atx^9K?w>0q{RVHV^G~kDrYeAEykMC9B{vYG8TYGkI3-A871ERWyt@7J>F<{bWHx9Tj9?GGTWLg<|E{tO z(>Q!`;etm<>_fg10>EW8h<=--iwq}M8<@qKM+yw1mxk}G%c5YFla!ElH65TXT{4l# zl|$={;o}uvnGfR7)cIQ$Q0NQ*GxKmX4evTvn|3x%j-T{k(RV!_EVJ-i=_@tphx zQh0ysf4vyAl^LWFwgBf0c7CVO`p~z`cRBZiAShVycBpq0MEYI-#cJNSx%>G)ivch8 zaXZB01pN1MG>D=SqgiUL#$An9E;)zlo6W8zx*r!TFSObV-sYU*lGrs|awy54P82e! z9Xz`@bvd(J3Dfo-RZeSfV1juYVV;NtF~{D$47;hcL`+PODkt<2J8C z$SaK)1YA>JH<14(mYyTH0HjGlMSY8#+f6IyiZ5(@P`LDZmQd8^P_-<=hecaDK01o| z+~|}b!wWCIVv8V%X(Qgq`Xc+tsc^0H~H zxYlZAJOWIk-LM^JV7u(s6|^cU`3D!!FXklOvoF)~@ip^|KnqCNF=cMG>(udcms=2G zxtsbH+jAqnCJx{^@6b-$M+f3zX*KqZ&OlCrxE9jjAhZvSMgP@GTr1s(^3>jl6fUN5 zSQ8X2qkypmv9 z-WVas3+1WBV`iztR6-qSS=w7g*IU_|SGDdEEktIi18{kWa{gWkMo|4mE1Io+PMA`; zdbuP0OTmB`J0Z6ZVsPrhGBETL(rj*~9Db@2pcI+P^SHIY4Unmn3lp-bnqD`lEzMYh zDaj*&8e>J`NtW`qQ1%a&V=N468OccQ*ET%N8tG)0I=A{9bD6FvB;lSJV5^U->b!tb z5S`Xq25BqRuLgt8AwG~d20~q00-L+Z#K@qmwD;swvF_LA3U*jsVxkK zn|W-&Ygpm2awKcrA07p@6r(t|@ZvFe$uiFw>tuU-V382X>Sf%REGp7dq|h5qY@uXF zNP$}iCKp&ZbT~my9B1FEt$T0xGI!@rQ>)cPU0U5RBhAuKD_0(O)96I1zV$^)|1QMb zo2>dg3Q9R{k|&T1K>K0 z_6kPP!RFw!_<|vJBf*PSMwI%2TukvID#lp$caC>m3s)wCAAK7@&tXAZb7JG;te$+_ z6$G4$n;Aw(u9|UqN8#PX<{rVtD=Ez~O2T8*r`$RM9);=$4M|ka&X;EeF`Wp2b~9lgiH zn4B!j)mB(`H*GW}h^Ov<_VkOJHC?vd2w?N@bp)jhve;Mqy|1KesI~ndzNWoX5pZ&e zS)^@t$U#=b?`y&IzgzG9UApkU(RVRya_7G9RRIy(D<@ljYioE^06$HYFr{Y1hHJ_| zebcPEGhkWY^Tct*c21X(BC#$A{$8X4)BWcwl^+`{R^xby_u+i# zpiZS2o0?8$=A0;r>?$pqUOSjg#gi)Pnt7pZF)xb_8LVK}5TYKE`mC4KEMd~mF*hMH zg(p+dsuGTQh|4PX;*%$z2}&T-9g(FLn{`^X=`}DGg8r&3Nt1-XHAWZ2!HGc6Wu({e zfTiV&y;R%{q*D2A9eJvRDleru9^*(`vFPH6b$lzvbD*trN7>w~R5I`b06?i|NbbfX zO2(yDfX}Mr(W$m`gY^xW@%jq|#H==5$E8Hac~Y?E6AY||u|ZQ9ty}6dF^}v7VX=iW zCfLik3-1fJ=<^t>*g9};m9@>aX#EgpA!wCZ!w1=wG;-->z#@*H)`f2Bwqvl8ehDxVjFBMPD=)ts?>Oq5 zj&E@#)G)pxGTuSp{vxgiElA4PUhFYGWIc&d;|Zgx$Vn_pVB?~1wpNa}Y5g8gB6L*> zXR)Dnb}-gcK3<+EK$-b-b)bSCOEKRevbk2O*12qhQ=#4mwYTjYA+TBvcD=J-V+5+y z%&tt&ZKF<%b8dTuWHZlIZLr9zq0?<@D2uqi?ck6pb8r}X`bNSmRE#U_A{-dYUTjhU z8%4$Ekid4M9y?~P#-OZIB{jL$>OZewp8m*NA7ut%v?AsA+TE**Dwn_Hz0 z2J6VJ6Rb_sP!o+KL}_2S^{4nR-hrTS!Dk2$)YUgie~@tytcps>`k4@z4^SUpoo9&~ z6`!DQ;0eWdv|2+JUAhjaCr0Kx$?OG8@^}dvBY0|)0d$&8R)azE4A1i zzs*&^wfmZHr4)cm?>+zSv!8l+Q2D%?RjlsIpM5#jkD8x@;u~n1T1$~OI}fR1b##=f z_wye<`_;#P8!zTC{0>u#kEmzbPr+xekNxL$AHgNC-(nQk5jAtHlb>bBrd^@4U&t1Q-!@GaJc8CiBN5RY`f4?$)dx!(0~rdyp;fXV@l+BacCio+&-xyEe(5JjtrnDcB&*+# za8CsQ_v3ebl<-#k+6|7vtcpWrH(PUqaB$>JfOgZD6CZC{KZ~$(c6Vr{p`!0aHRzzf z7$J!B=YG*6?@%u(&*0s|Verlyvmtzyk`sS_g$g_Ix4Nc#hReLu)KvSAUTI2+@MtP| z`U+hBsMKDQCfoqQD@K=tINx?>uUAZm=4xR-y!Jc8^apUDl$`p#+Mu%Osx=gSVj8&p z=i@1AZCnUP9e*Te{3x8&oNHi#1t?bt_2J$xRVMpb2e?-X#?q zkswDu3VKb>B(`3rTI}IFS^p6M&ao1YBVKUJ1E98}+GduMa*MO`r$KW1#epq~Ti<@? z)w{kGWr5gBmB3sYHxl_`h(TifZmo@Y3JoLHy%uU$Tbtym?*I>tS#0jbUw`tNpEh|h zJ7(c&-#y4=cqS(YL^=tczxVOq`p{5*mKrA)_FY+xr{b}$K7@$UOD8gi!{8_q=YRL{-^4QFqQpm`F74gpuclx4Uk?=hFTFDeN4SlX!<%Di-AmYkq%oHh za({w-q)%ie?zQV{@jY2!4!Ke>AdL*FUKNPI?PKP=qqiPc-HGwN!M*Cv`GSXMkoZ=m-50L}H(^gXkK}g~a4hghA>m7<} zfW5_Yn;jY?Ty3`@lnqOC04_8u9VpR)^E0MIg$EG4@%)2FCPnK&9ElN zwD9Uy$Kb4@ng8ybsm83F(FflOU{&adpaNUscnmLngrf^jk#9ibCL_J<3#be3-r zt1&G~Rk;q})Xdcw+JKl8>SkPqc6W6u76nv5Yqn(;+Uk>X#-h0Z>07V98k1OFT74sK zlnHv4K@n9}6b1btf#pUuKg~x{>9B%Fh}sfkXe@W|tSHj)csK_mGd_U^A_5YhNPI1) z9#P-b_ysUAC|?hP;*A2nV0Xk6NI=UqTKfbbQwkVGw#xZBY#g3JxNTAt3InizB@lz5 zA0(qMk$p2LA{ik)ira-yUG1hNM@8@I5J3I00n9&Pg3f>n@MGRb4cVwIA<)H1Z%aU1j0+!n%)LcDj16seGAb8%PDP!^uQlC*pKZtv}Mb(${vmF0%&1r(d=)i7bqsF5JhC^~qBQd-nvyAt zsPw&5za^B{G5FJ?Lxh@_8?&VFusHpPOa4R?;xwxY5@${m?384PVi ziW2!tH2=JdjhmYPWvM)|j0eBiJCl=Fa~%J0_TJe~f*hTq7_rf3JvXE~N!=bVGd=7- zB(8uXI+z>v`R<=tn0EwsYmUr#*$O+%$&!Mk+F0*x*Fso85XssoPo{&rxtR{iZhFaM zi2lSYh~2&S>^m_ca6{Ja5qPO4!UDx#NAprlZq%@HT+?h-I}ML!Sy24G%*h)=ZD|o& zuKBpvOSi?SX--Z7E8rhe*l-RtLN(5!`P=d4L!4&vMJVMf_r~>Gu|Q`}mok9(=?fQ1 zP1RX?$EsO$2-nM#rI4m%q=MxIxriV}oo0$9)&7Jz$NeOI22F7+piKrDvUGsggW^L@ zdSQrPjN!5a%CemRNzAH5IL*vmBpQ~Jd6;fjAQHXOYz~*YaP3Hu7#hk1TQG(;>LZ2H z>L>5+pOV($3-BEibjSbn1hL0?5M7f-5IaG`huUet^N$vD~IqYR`UkX zDnCn#0U4t_%in2PMprFnj5A1+(CGpp^alaV*emf74Hz%^r>(9tQ`WtJI{Z z4)IFti#-q`#XS{ZWCoY~%2DnJf^e$EJupl3m-2*W037hur%J7@_xSg1&+2|=K2J=~ zcy-nrIBm>mqm!qH%EYi1uxAXp@e^4ZBRW7;Y$uNj^YiTI9??Q)td;KST&v|Dk5L5k zAY!0D5?K9@&J7dlX1vG0cYojijY+YAb1R|=Cte;8QEI+OZ6GfmwaQE2GtAh=e&bc} z?>Yt0wF<^%6;LmJx*RP!ubOw2#lZc+NstahMJlGu1W2qJ%AUAX+?R^Wc_a15YMhE) z2jEq`Ry*`WaPUbsN8y#V^#zlc!noU++O?i#+Gi%FJ<~aM4$i$AF2b8n1Sz*BXE_lh zvz46B^32!Ttjt`HhvFEgauz-@zvp;wc(P5bC)kMIBmKm{pdCoS2MwC~OC>72*Dobc z&NB>54{T`=ba8K6Mp9ZsdaxCgOA}LZ8>hfO6VfjiQmP_L*`lYVU+Sd`7YfRM6nR5X zrQ}<0v>HnQX!VJH5v*7D(8e6OYQQZ585!f2QsTh$!XfBI|Ae8D85Fp-ESQ*qavlzM z{rFmq(Lo`6D0j__7*=pBd>>y`;jca;tl%Z{Yp6QbT_jVc=p0(ApeB+3qGbl;}I< zGnD8|3)7*vqxXB8Ib&(!6}zb9*Aku^m1dL-j2CaLw@l(D9@i@N-kh15R{7%HBRE%fP`aq)P36pv>R>ZAA&!0strqB-cw<1$81k! zSWA76I}}N(W$u8H5$!66_3Utad#^aA;Bwr-JTvAvb?KtH1ge13ch6|WK8_2#cT|5N zCOCpB=l%dSh>qb!mC{@@=+p(+>IrZ+ADT2q$6z+E9Gw*R%RUVLuEz+j{vmFsZTp_U=Z9zSdZTa@s4`-_QqzT8AgwvLd5GEQzA7V9u zDuaoee)7Q;3x`-t{X;oNVvs`qIoJQ{)eI5hRp{>)8?${%sZm!$Q*^%CFZe(S6PDQ4 zJPc^HY)_p-Of)J!V)-8wH^F+JJTbsAkAolpGx}Le6*gwfLA*`5GvKV`c6t$Z_xne6 zUU;8rVQ1E8V+kZc7W_x>+swq(pr=T|+WyivZBSLYP>MX~EHB((xIEVD^UJ<%r$=>) z`f>lD>>?G}tq!Lp;d`h|XX>a0b%*XrA(;sTzu{N)wi8{df(AG?I7c+cUeq*(qV{ z<=G%3n&{V`ean;Nr05-_?3UGH4RG@6WL!ve`(5BEKZ?h64o-=+1m!_^3KtPSDg)p! zYpu06(&B4(QY&9Wbc-8eK~+skWtmi%%+G0q`HXokouqbiX`;ZhRof9{QmyX~h%Cpp zIqXSd#zYvZ07At=q!(9zJA>qF#8=n5rMKgW$}`kjCbM*9mbGsjcKuj)_t&M}=p|dd6OsizEmv7aEpu$g#x~;0XJJJi-Y^ z4^Jqn;>~@m%BfYMr2-pfg)czzsgGPN%uAns%_AhllSUT0(t4@TKw)UbFvpU%4mQ!w zDB4q$8=x4c_8kb6o(XNGmQTEV{pM;&0wH=VDpY7!ph~cbUW=WkB`$_x@=33$)*4Tn zTe}t16I7eA1qa}tv_^xTV819jPvLrK({qK_RUCD|Z<=NFgH-@|J1Thmo{Ac;#)~21 z&Gk>7Is~DhV~m=L=svkA9MYk@af(wVt=8T_qrn8tz){Yb2SK+(GA}0TPx`i-ks$Tt9$PMFL6X zVaf5%MMf5a%Fgf=iDX%SjX21%XL+ru*%;GUQ3`yGQ^Kay$&Lw3O;5&^bKu)RPRB2| z+SlXH#icKLPMr8yE#>UJIcpgK5K0X|M*BKQw+uIVxtDucnYsahgENpzDYJwwJh>1f zV(&^@G=W;j#A}$1ziF%;f}rs({7m;fRuxU5&z+yQK!7`E_~y-9QTOF{KvMLTpQM;3 zvo@8tdmvE~R!fYyo%ZC!bgCendo>D(r129Ls6gRBwSqE!-6))pt{t54MU--Cl&WL+ z`ieezR#phg1~asnYxjqGOl{5#J+@?glEQ2~g!ZxIE6 zjS(M-eA7Fvpn;EvB$|v>zaacJ*2fv3Q!m~33x|R-T880^F63GM1N^$(~;f z;CGnEJ@uu;zj>PAJjXxmS%4ssjB|}YVl-;l1egV;?UkZiFtO@}!DoKewmK&V+o8h} z7~4jNu`IbbF*$8w_s@|ZxlNYl7TVqet1h`m`K)*>adAv2j-ujbyMd}(h_eP7eAtEaJ>* z^+bTXfnRfj8cKZO8uIDID5z|PAKB*&>R_58TGs| z%oBUH{~+drh!I!B$Hwt#P5$Pch4m2XZ(s2Gw1=fLtKcb=xzC*|e~Vm6o()!ydN_=G z6B5*$3h04qGQhLu$vJ|^6cY>Xrb2vb@}$P3K?It;zga}+_;9F5%2a& zLN;;fy{1_i{OlP{U7T1ByY0VPY0$H$F_W{WPhRv8+e506desKqdl^1!6BCO_0#?VU ziO%0W`ze3_Iv#)zjjrNafNDT?x;6nUbPQ8yoU}xe$+7 zJEgEki5+or4cXOBMd<81CwGT6F2l#KEE^-Jqu!q>4w(rDX}kqn7U!Plc`R@_W-1Yw zkeITI!#?)7Z+7nEZc2vDXQ=My`%t75#9)rcfh*#AlcCr2-DJWW(ALw&+8aK`2B6|ERRZ5Q`0BPnZ%py*kWYZS( zDbeb}WIC8qCu4YBX_@lOE>?7xLpKa$-6BNX&5sXb;`Vj)gc5UtE z`i7-mi=dNAOvNmkln9@EO%Vh)420dp{TKHGPlVCS)(4L`N_VxGZleJY zaxK7f{03ad4`UYCb))9#URJ6nlwAli?POQg$BKq6(91CK?`yNcJk0#C@G_iCbPxT@ z&t5`X@X$a~Az?feY@_jlOZdBEs_>$cNaAKTY?HMWk48#xO2ow&a-t-BMzZn2sw9iWzBrmR2)Dk1@p)s8!tb5diU)w#SgtB#}SD#&Xr+I=kpN%oe zi5_>2u$tZUT0Qx*xatD`dBQRv2>TVl;jQ~kxieQk!(2cGDPrLHe>(fHcuMA)W+?WD zN@3Ai7<9we(0>@?QeABrtiU{5Bg9>EdImj)`-#tMdghuPfv;|+48MHoQrtlu5>v1( zo6Kz|sb7k>Am+bLSRjU`*zr4Foa;~{erk%q`MqrghvN|s0qMpfelFX8)0rbKID7h+ zFT@3p)7s! z<6u=XLQ?KtCNTA@`w|eQR0gxkdiPXq^HQEKr_UJ)(JjTdi9DP`$b%kpx7uw>cUBzJ zQGv6V3_D20ZF5LF$DykW=5&l}NtLS<6!aof3|rum)?m4iQg$1j)T}=Y>q1R$2_I<1q0zge3(WfhJRdxKs85z zsE_Z-q$uZ9tG#A#my$F(bUkHT*^ZUqvbuB9c|@%n+|BVg^pB;Os~}HlEnf(P(YjPcm&_xkYICDF#Lez^N z3o>eoT~l#`ZXU&0Zy&}oHCS&t(KFW|gACB>zif8om?LSszm>cyL}w;Z&o9g^2Y0jq zWMx=tdOA18%gujr72O)#@l~Nn>D@(gi0G=G7uVkaUYhTIi~37Hh_z^~v||R1#ZYZ( z!7!7(Nwm)c1}|U5wvc4lfW+cKD@_A#6LP-Ey(-a+g$xk|K~=$s647#4#eO>M_X|4l zHa|Zm(>Niyn7+A_=SZ@#*u=B^J}Or3gC|d(l=Bw=!6~OsQiGI;PVqd_P_>ssAbl>& zc|m35MUd@FUw$!Yt3W{E(Zhc4IOQ}tb3G)6`(vQq>4{<^Gk^9@fzoZ)0`nW{5^9Pz z{?XH7yDc@odnA1A4vKkB5NgayhBiOVsRNYVO@Vo3qEzbNkzcH+iL$0>Y93Q2kE0d? zTf?}w^96FhlO>&-BQ0)|uW&jj__&){1e7a5_#_PxV~5x$!3X{QlgGhXI*)ClU~;wR z;wNVID<>w1mK=9}^W@}AJP>_u)?)Dae|+}qlG=yCy@S8mIk<0qHr{|*Mge6l&jGdXnGh zWp9f1SJK$)mCh45U*fiVo8DG82Xgmh0E1%?9CPpxrL1}-3y`A-(CJBWpK%z6+sA}m z8vrcATNo3CX9w6grBoNXo9!s^Si12mti|h~PGy5yf?RU@Vo3qG4X7msjZ=RwMP?WG zTKv$Vr$EYf_-85h<+bkZxQ3hi=~|j_Zz2k){^0BblWn<6G_0ySZ{p8%`^bicf{RFYe0>sf{O)McCtGqDNU3M`_fYJ)Jhr zQ9^hJ>)OgCDV)lP#DU-;6YJYULkeOnD5aE=Am-4ct_zcX|Jk>F=WYDGm^?u$xsu7l z^6w4i*9Mm$9K*Pn$_y&j6CcUh+wK^~d1vG-i!n839-dDxCwcoR`KY|G*Dpp1m@+hp z!FKT?5K(E`I_LllS^BBhV~UErmed0_ESiu)H04MJ-Vrz6dBdZozQ>^4N)jg}TF~0; z)skhFdElmO~m3uR2|1?-&2AI@dr@2@v*(^tS4$_*WUqU;GWgQl%kNSpk*m_O;l-#GhKOl_Yn zm0W&6QeTB6;NuW-8Xk_D6{>e4*mn?*J+HDO|zxwipg6TOTt>(j^J2BW&=jm44Y@om$ zey+lx!oQ!0xqS0(^?$0?=vEA7eb4OC1XyvepW~A3bpXrCp|YcHMnMG(y;YwLly+9# z=jz|u#Km07ea|hdZFr258+(8sdRu-*_wey)vwzJX*qu6IiuZ(;qkR`_Lj6O)6lOy` z^m?pN_^#|1MZT%_?cZO*a{sa4kCopg{=QXoEn=8IJdb1k+Ji^iSIhx6H-VSu<~?kl zL~N%E^O3k3(l6gIEy;K8?)c{QrlFsI z7DdW2ywN3T!oR8nTCoLMTWSc{+lra3)y`CtGndOskf=`B6+_>|-vpGpiJszbWT18M zg!+ZYGThDHR0KzQHzsauhV#+{O$3+H?KK4p-6I{H^U6+(wL7AFQ;fM?!$Rk|4TYA& zO}1zHi>EG5c~0k^#bR%&z;(*V=kGrI041FNefC|i#YXQWHuMXh_RQx%+u&0!Hf9_3 zDz3bp2XUKoA$HMXlTK4viNeAv)D^cK6{B5PUiNYo__L(c$Fo$9ymREasS~ImZw`qQ z_NuEp=lu(2jT61ZsGGV3ocQjytc-$^7epXdo|agG^JuW1@4gx3N*mM<&MMP&HyP&WTU*NDTi!5U%O)bJa zI6_m75UW9emcS9M-mn=Ixek%=1{Gkum_!!waj>(?90}BS9(edv8rOQcwArzwa!~Zq zK^np!7Q2J=`86o@F_oSq=a}W%fRL_p_{n$qf6l%gqdG@(sKV;IF>1B23PyO))hQlV zV;#`#PuYHB+(&woWxr|-)_p71QQHC+1^wKof-mSGKQ|xa9FCfhhn%^yX!gUz1z}Xh zOdAhd+(@iQ12Pro(Ti~lJ;)Pw{$_eMG1aI-8`C40fDc=c+nZ4*H-Q%H=7$7eSw^w+h%8H2y?=_u;ob4!M{%yY6aEDDAM z${H*)8lu+>_a$rQ>V}-92*orFu)W9P_Mml|uJpM#a001jN>FhW6pzJu=mtcMC7xS^ zl3PJM%#Dq5$g8&@5c*Hp7E>PZ!U*VG8qwf}x=$vRvZZoZz~9i5vy*ZoFXD~D37f`; zAWBczd4*3O02~@45w&%YPQf@FMx-C+xgmXArevBP9Te2y)u#Sv?3I_ZwZpxF%xlU^ zDY>ae()hB8)Op@`U>a|+7nB6;p85_5@9mT190(3kklL{jbIS`cB2^#FQ)nPVF@C{_ zyL>r_L^Xmu7j#bE^GMXPu}|u4T#8pXX){7;6o0|}(mLW6o+#nn6vztGsHbkz$6CTc;+ zh9C*Ye7}T*2r?*9&f^gNjMHKi74_7})mE_r4@}wp#l44hL?`s3#N4Bm60RnXi$r@m z3XWMPD$-rTolS9DW^aC2!#hj}mgwbNflX)_MUO z0VRD*(Q9wh)-Jd?!tQtgO}HJrjGi)^!b0MJh5wQUTj#i`=)tQ)Ge7hC{c_+7yqyMB0+TV_9>#scP6hc4 z{XL$46UD3CrKlt5oAEWJ=sa3=y2Jc-yB_)z56QOCSodMAZdd*|AETUS+e zxlUug3u#4bk}@sYqeoDzL;?{1U3BT>@SXRCu8>V;YVW8>W!ZA!_+rSngQrc&urcnEtS+Ta+L12@M^c+5jaVnAEot*Y}H_emMB-@<$SX zrJZ8AB4;v@863;dKaGAs=Owq)A7x04?c`Nypp@qGe$~=b{o<$JAN+9ev467Spn0?0 z$KV=z=6uCbIb#I3#sZy=O8*(^{0~jd*RHUVPbC*Zi)YUK#4>~=sPN>k-|X(vMAF%? zGV0tT&A|L}BK)wQi;x?Jn4>z4uLHY={3@p7Pse?fCXRQueQ;j{r`8H1v#5IZ3*o({ z?su+Qf7E;UP*F1jdfGj{H!78=c2HOT-i^o4yeJafCYvS6c??jBl&3AyFj$u^H(8UU z9b0^BKejYh0$nm)HV$Foy;N!h#i>+@5Y-3^ot%*(;#5ZV%q9>EUS}h_zy7Mei>5r)szVcCB1h#nSVbJ8r*8=*6U&C*svg7xBj3|Tr z$sHEmojX_W*G*Xkb_?*RLf!OoO<-^X}uXr?MslJ%k(sH`SKRM!?}v z!wDiq!GfA25I$hLR$n|cX)8uCQXHZ=0tRo?sy>R1?5O~Urm)PZd8K@1uuM~Z;fmFk zQ~3doBD^XANnFUFtb5O=ek8>J9FcgXkQ=IMR>Z9};XgF&NjTk=X+CW7rqjjClSsS(~Bo4olq z4x4!L{|v0~-{&sT;rKWZ=PPir_CCQTG-r6)C&MXj`b(M|;+1x9PKWKoRLQHogOl5; zXlNnp1ljc7rM0`-qrmpe?$;izvVqKZl#U~G2zLYch=_ktAV3+!oQ7CY0h=*5WVpc% zu$zs&67R@y>K&5_KEAn?wQBSvSYY6?WO+^wW8kI{5Sr6(?rYBL=BKl>!=m$|0+EZ5 zrX*&i0swQ=`oQ$|Az0Wp(TmJye+XI`kxVB$NlhZ8)jc5`>{k*u{`~SC--wh$8WZWj zR#3%Nw_>$*9^}aFVLw5Mt?f1(o*pF6P=4p{$W$;SO;)limIdOHp7d^&LXlt{vu3@>iX=MO@edjIlmza0Ns z%YU&X@qTFh)#XYmyu%MDt*#NyZ) ztYw8Ej!qA*3Y?~qoLuA$X-U3_COxR$C!luNPEK_|uy&7-N?4fh3_ic{yiXSF5%cj^ zSV~o@xJo9+t;htOn7pa8hXj|a%3-;*07hRij=_*#vpu64e&me{x0gAc1(rP z9kAnDmY3+GGWZ>pp0(b?(m+Y%S_|c*Y|SZqpIr<(Ot+F2en72Sy$l&;E0)-tdcDT` zzKOA6L-*lpEPVe3F*N3XZ!~KWFUGe9t}fg?*B<|+c%OK&B`W4@l4g4+qMi6U2z1R0--4=2{JM=MI}*1$AW4nylXY+r1qbpaw%qH8XbEIb}%EomqGzW+c+ z`A&otIZ(SHX6=lZAH>`nK8PJ_Yo4fF4oP;EWDeGY8BeTrID-+Lze0q*%fIjG!mC5X z_)=dI*Rd{hcHB619zVVpT+P4`&VT(cACP|-ociDJi}UGKyuxWv675n=sdEmhvr(|X zs~gRh3;pWSn*aHwdtdTDtNmcN!ILjV(-2#3m^>_D<`{Dn3@nz>(ayUv;$&usZsMFK=V!j?r3^+^SJ;P!gs{$)q*x&`u zQYd;SXa6d!{y#u#zZx+#Ii}YU^fu(z$aq$lnofvicuKu=uHAUu8RTL_M<1U~SXvY_ z^glv&l5k*knAUN@t4e&dk3a?;Mr0LAuKDBHOtIC_k2~Q)XkwB0MI2@6kbLI)wLkSq zqA30%Y)9hl75`!IWcFq%_d-{)J&6cb8N)$k(!q0`m}^FSaMGbpKstOk*_l@3BRuFu z>2l&xjgdNuKHA5RpLxN5dHVL{XP3Y5Pgrf}A)*#WYZn5PW6!VYAE9Q3Sb`+UjW}_0 zMw{6nT;&keflU@pFS#d;=B6r&CRn&SF3pxfKOY(tA`VIbw#;Fa=PXWTj3Pv;Z&~;f z&P^G1tIJFN5;MXi#9xoShr4V0K4_IVuM98a>Bf5RX0Q>&Y^yVapDgKb+hhUjMN!IWRK(-X90EL3G;19B70~8u9thrt- zvjfuGZ74ym$^<|8hO3|GyZJwW510Z($sCui%3c5 z=@Ir=qk&>9{&Z*nm^G@SY_MU5sY#Ri1iHeXZJokH7sYa3s-txtVG!l8=)4wjbiN- zHQO!~0s;1IASnG>ISRDsuvCw-XHl3#Y1$|P(?@06f`tH_vh_+e(PCKJOa+?F3BwL6sD)HXiFKu*d3WNh+H&Mw&}BZI^ZbRba) zydy1PM)ClMJwM2wcS0d^l5|}{9*u-X`;=;>?Wj-8mbRbVF&3l5m5b*iMc0}fdn4M( zLKY(!HdY$!AiWh+kIJ%IhAOyK7|1yR$FH>&Nd`{6m(rQ3Dvv8Tj)dMtbU1kQXCk?^ z-hpb!JH5wF1tP_w^0ujoE@!Qgo1v@7)90+vwgd5iHGU%Y-e}J2nT7xd&*5XgQ4G#c zuDb}KrzfZwd+5LWqqK8>3qb&x-32N9F6P3|eCP`c%Z++1q@+1+e96H609F(s7@omd z+5f%;HaqDAnm-{-EP=3{6k8*-kCXU6J&b-mW|IZ)DUQ}3}JK>3{Crfp~1F?fV6 zik9AlONHtN&NZ-{m0le=x0dlC>33kkcu}`wQ(;4vN{>!p+V+r|BATID98i`?JDcs1 z6a(S!XGp_6;UAg z`@5H^0^SPM{sAQVaMm`hz;g1wf3~V^C^FYLaQxmoC5b#ws4;C@dc%;yU@x?{`LlFtM zFl-r2X_6R|DVb2EY8NzXj!&eDyp8sK z&9*dK3|wns#(C z(m4Sx3MVKwn1KUC^dj+otp!qgQnA({p({UGO*Tc`HB#qREu_O#WuAP z(B`z04f#aI1f5U(mQ(9{ui={8EUTS!0CVIouu>3du0ZRP|FGTP?H{cd&!pww6rHt~9d^Jm7!{f=A8L&4^bsJqo}j%sAnGn0}tXrG)nfohy_>n3j9^gqXE#&ruG7S)h}c)e6{_I^9rDp-03?kjO|5-zS)|zti3VV28D7uoHJqy@6r#@!+3c8vf54S{TGqaZWqWm4=b; z`qJR?dDm$NV*#2#-08>5xDbuyp{PPqMXo_@y)aIaU(@`f=+KA9y5mkS9l33=@%~l| z?Lx({$8L^|`9o*MBG(MvB()6z z?9XNsN@!+m!tcS96aiP;R>j7NV%0Kmr8!Z|LGFKB7*-rf=K~Y;>FhVn+X8ecqGskw ziOB7y$u`kuaZs&kQI}wZC#iIEz#@~r8jMu}IQ(rp^P~eh^9;0BG7b$3;Tn}ugh6f1yvmfLknvx6 z(O*IHto2|)g3}58Kuf+YM9@hN)NF0_OXN7TnMw)sIq6BM3AE|G*C`pN*#cWd>D2&H zCE{$u7DNiA-wyhTYfg#~iRz5R@^z#rv1UWDcRh#WKMZ6`V#A2d3G=NJmDe;C z94V@oR1WAKaF%KVuMii2;yTr;!xEg(RkCxK9zs!JN$y5L4=L1)g0j3?bIu~dk&VlG zJI(W|3acN?_rpTqCPJ>Dm!0mGuBB-=I^C2mY=?t+3g9)(j?{K)#3bTWkX{%qKCD9m zW$9?eI1h%l0;S{rveUC1rfXgtlJ4lg*(mcm?qgi1XY%v0FX3T74l8O0u-YMupZ?<) z-||7Br1cR$dcM`$676xOV`URcJxWl-x{6=0#L4dL9t!kPplWFZ2>9cWz2~EK)Yf4W zjo{|&_^dNu=?vv#rS3TeAA_E3vo_{~ZZzbyK9MJtqzBEl(l)0)4h~ zz`|*o^Uew6ANoW6Md)W+skEcHrvY`NP#s0rh1EDengmH;o}7M%7c=!`v&6{|F@&i5 z$q@o45oL}r3Om-EbIxnHx8J_}?cit5rU1;Am!mT6#MFdn3^2*E9Kz?a&K%g@@!r=^ zbW?^kq_JWTc}YnCC9A`7c3fZ-3ny!KR_4B!Du7dGch=7(<{V}Nj^M94EpJXv-JJ9Z z^ep!~44rv`WmZ_xAHnpRzI&f}bkA>Ung1#VWh8>2evc)-`*?9$9FTG-l@;`?u}Fz_ zi=R73R(jJS;C|db{))eLY%0M-fX2b34vxd&{Qqj!mDZAK23Bu8d_VAgyBi8!rSDfR zAEA5z!d2Nz_HXLd-hNAzSw}Zb0v7BN8dLBvMjz~frRRU4Xq43=h(dJw{Cax(aomO+U_t}1aAIF@s4DW<8%!9-x~3vfqYatK&J*yOR# z12GA&C9z)OEGa08F6h^?yr><{bkkFl{%)xJR?x!2JX%4loBia;7&dF=gx{e7jrMCktZa*dNr0vsa94u23!aL|>+I_^H`q&AyadxzaZ*5)eJREc-_wB42)xrl>8^Y$uW>KNdTZ0CB&c|7`@lcdz=O}-%nM+=R2F4Z+$D81us@w`u?Do zc$1iIf*9d%8HuzvuM#d|yUdQI`0q-)8(B-!V-r3vL`?D|*J_KJ7NX&d;9%I4gmC>X zIy#0bZ9eZ#KoIz)B8fBHd-}XW#NjA{#GE;NN{4(w3Fb=A)ylj`(cB9=`oPBJ%duJ1 zb&VTOOHsIdu*42O6~p5gvCIg?RUbco`JP4yT!#d5e>7Nsk{k7C#3eo!g}IUbxJ^@b zX$P+h&>OvugBvv%F3=;bq`7AG?6ZL}2ZNKcO3`V)Gx(j}jVwY?=0-fcTh(4@{_PJn z9De8uc+aB*9DcE+%a<_lPkdv|j7_^-!_qTrMw2L>JhZRiIvCAl5-wINkSZ$I>^Fvy(a6_gxUH@PvY6Uuxvc5 z;;RQOHEIja4klSga;3I<_;vNG)hS4a1x~~CBX;^>NRa7UDcocZ1@niBhI_CxaUW;n zR*(9sngXiS=61xj37>`O12W=Uev{?cC2b85f>qtu#Hy*tJIFCis3h~b+QOY0(R^*8 zva!ZhRY3hc3+T)B7yV@Ke#5UX9kBiD8-MO!51xGOYhQCh%v=BVR0*NTJhT*3 z*>pptTJsx9wo`0RY+LN4fCwU%IEA#Gi)X}QYQFAvYMNrK&kYcl8TJ9ApdcND%01AY^~&GrA<_O@5{|mP>-xdZ5R%IA&5xMtl3MB8Giop%%On^Cv^IlTK0iG!VC7 zNfK}mc~%g%zFDXuD6$w_-W&-UBBGjULK~kUP>yN8mvET>NyS?q8Cwj~Dy%#r3xJ&A z6TfugRycz>#s9C2z`?(UcldjLW%uZz|JmM6!fvw|K6;;}^M<7CJit|k78@f*PDWS; zzX?W4avB<1-Hc>Cj1-pb&VuJk{dcj}eTSGOmf}DR5_dOoi4YwnLE$H2m{Ytf(g;sG zrQEDNCvxn zjI+ys1xqYiB@omy)*whGPDXlv@$S zu;V`b_?>XISot$UV`to$xfeTucA37V@@?;d>ie7;bicR^{6Fa<$~Arb_$6&!wip>t z%7U%bSA4Y3J#zB&kM^ne8*<|54+p;-e5ympAYY|s_Cb=Jd|}W6r^6C{uZ#cbYI~q0 zzkB(S{de#|`R_*>&|GKFAcQF?wKP9PX6c%}_GamGsc(t_f_klQPA_nql8m4u9arfw zhk;M8^}xr5=dY{GkE>#Xx({b2B2%+}|MJtz5B%Pew&Fq#7i~=VghE{*Tn0~GHG^d< zr75jKw7qj2IXX*FVoI`X&ZaT3O6(z5rv_q-nn#Zg9|LKKjoX?z+K=VsN9`Oi$zewz zWFHbf(4H(CDB%lupVdx6a>p5g(v_>*j8BC_6-y09)-c8=Rr&G>C+pt!Nhhg^WEaO- zjx;68;e^BKl|tkN*DLy0hjk^j?STsrS{eaQ)tU*M$T4ETU_E>(8Tgo0%ur8X?3Vbe zcJ^}8rSR$Yx3`mL$5P}^NTmuX#z7Y&ZyFl+G$p|AcVVXNoFXVgf$6w;@Z|bmYg$8D z2IYNl#?+-5whP9}N=uX4p3|-9Ai$nDa(!XVxG;F9xPlljiHA8mWkK2|?sL^nF#B-u zQ-6}BQ5qIdPpVkNg4|tSckCXeKQ%U1{d?R+^Hn`&jly&1DK1aiR@l%b6)Sca&*K$7 z>FKFyJ+XEe`MQ*Zh-)Tph41BcGNp9=X6IbRfr|DytU!WMlw{S8^(xjmXVE}?GEZH` zq=QW1Sag1Q`oVyzoBj!69UiBIZM?UZMl(`@4qK@cq6f_|N2Z#1U0|5ldTiJJrk0uT z(SGsWI)|7?uC(XW>HrgCHu<5{hv6{G&Jv)^X-9O9-lCDTrtq+Lxan+0Il1r;85$yK zz81pZ5KI~x4ht=}b%D*Pf-MF?G?F`XjY;I(j1fM=ZgK&AKZsl+l!+({opa}o$(cz# zZ_Z4~Wba2~Tc1viYMlPJ<+}L+XC)2M1^wd=zF-FGA6Zps8j9#jEfPvgJ*wERrE1O! zYNT$Y!hR<>_QvDu_q3P?9OoQSTS>8R5?r!I8!<*AFOgV?3(VJu{sjEs6Cnxa1OElh z_#X#9b8cY%-C@gFy>^}^iiYS(!4{e`Sx1nxA?OFr>W1&j;*$ygfI?|a8*TMsAda@TzE_qC^neEF@2UL(XC5j zP&(@^RUPu`t|GY2`qNjI3~y$3_(q6Rs7)Xn3v)oVwC1fkPw&_XZCS1=FhCwq5FO9X z6+T)6^wNy_2HDOc>?XB-{qzEG5>{6_ZkiS$qONYxl+1%rlV4SVB#MG0!>NH|jPp-i z2nJ4lZN_d*`6SS%mntpaKg86JDNvc%evjOM0B~Tnn~UF97nDUh`4`s0<_Gh>wl?pV zQG6w^spwf%9~A4?=dAR1o6i>x3d7aF1Eve*z+H*Bhd&XM;j_Ufng0;ACDy74Y(xxL z$A9}r_)A|9b69OeI%X+_|IJ6y1AtDtT1YN5*Hg&PW!PpnLCkI=OGIzXgFsM%#88&O zVl^CM=7tza++1C|t9@j3ZopWLdQgC|Il;+#3o8QzSUcQa_)c?!KuM#Us3R3E;M2iR{2ts;oNJsfc#53gd!+T8DxH}q#HWgE8DRZ{{&+)~ z>T^J@r9igTe6z5x^Af2^sG`1BV+!IzjLx~g)`AlwWv`0E&|)=V$G29Rntih%LPGdr zqeHOdnyTPV%I8}tTjJ!!@c51WtH;qO+92PqkFi%eN!nh!mU7y0rXTI+rV#|u+WS`( zZ=57e2<3khj!Ev9mgPZcfk@IJsJs=C0haL{r`5|ZMDr}`COhF2g>aNM^vAPz8a}a7 z`-{>gdOl}IDV?n)w_ZA@od;yhJKw$h=X&yXU&Uos@QtPQvEq4p(L$+o<~Jh$5xYL5 zv9XD$uahFizjpGPwwU0oD5|6GklVeGL|EPLUB2tD!CCO$<*)rUR-|7`2zUx*IpTs4 zskfa9NZa{T`de99lj-w4^Vm`P!zT^!@4ApL@>9zkp$@F_9CjXbyTQkAH9&EVi=Y z4_NQSO~aDbkxR}&`Ri<I}A{1IVDNL zNIEUSL~C{GKO(Z<^+!-)t&^0;ul5Zv7=aS%ZFH@Llh5^;0=xK)Q+|5(mgE9Lr%!}G z^fh`1Jv+jTR?EQ-LMbT(P&v}gS+4kUjkI?cBO9rN02$yz&xg;#fJPD?<{1_?6tAR^ z9C#w|<%+yd|MMWFumX!VkBOdCL4!Pq>U1+RQ$Be-iIhZ&!(+bknzk?gw&L?oVi|>T zHwlwrcqh&Rr)cB7Foh}Ahv zO%k5|{ot2Q)T@R?{vMS4fAYs2^aBO5gk_|6E`e(HNtrQWaIkD!V{&v(qVBdu#_sq0 z1uJWnd4D%rhUv{|%mt_5mvVqdOLMdV>i!3m4*j*Xfb>hNSVTVTeI+xtj~+&)lirFi z7U;&aWS-UfpuV=Llj*~v|GBBS{%mSBCAbNW(&JJZ$gie@gY`Ys$IUi0@prMN8?kUZ z?EnZ>4nVIvMe8Ny)o*DNm`ahUGdwEct}jO~@}`Ysp_u!0DODY$FLth7s^M3wasjHie%MV6xsJiC=Y~qF7#ZTovbX%^_DNzaOI%H7 zg?S{PCf7^4z-|mSxf+*b+kg|D7GNRGRZnU9@X5NmY63+&k^S6`=0m;2! zhXXgK^U3gr?JQL(i{^V_7`jSZjTF?=QpUx3y`1EJIs9dH;(WE?y+v^k(P9g#_>Fj% z)DU2fPIy|Z3I+xNL2Y2-jIkgS)1fexKCwU78{jTO>x!f}#VIF~zuDz<28|Y)3I<(? z#_Hv!_gd+rIE5Koy!_f=MmOC;Z~)Op4z4?5r*j6J*}{ELEHBbDt|#lqFQg)F{uJeW_@9R`r8(BDNZWGkI(= zXu6gYlb%O~hOIEPaX#z?#$uuOK)V~|`D9y1PjLqf6X9drgC{S&^5vIYdeZ0B7uGZi zr$6OLDSF^K>TXKQIK4QCS}0pvT_Wv2Pg1_ zLCj6FPvWXMp{7a21=`$F$tn^oUV76j*P1IXQXcY9xi$ zERT2qB?JJBh?=-8E%mF_7Wi?ZCxzrj_gwqr(buZJly`Rs(1`zTQdp>`T1ttTl7 z`Y*W#@pt+#&=)i75uG`S>Yx@@WHLOm7(PK#3_p-InpST!Q~#7c$syHI>*mD?Y(Ie8 zIjLk0Lzn0KvxW`ioa(zOGl1tZX=lVqN%>Ay_pdUOY^9ZC@1A9zFBBq&We4z=zp#}4 zh%E{1T}SqvZs&aNVmWyI=FPDQok}RTR)$R+gbKC?Uzo{3C%HhZd>fQpsy8m3WLBhaXzLS~A%4>-cMVU7K*n`CstFkvfI z3FRXf(i&Ae2kBF@(rEgbm5*qERKemnT7MgWzEtyguLKfdvD>N zEF_9U*?tHkdgt`ceJyOW1?g-zW~AX_hLhl;J0U0EZzNncH6bbw6H0w0arcLtt(osR z>!+l)Ivo*-13Qg-+ZUkg$PyNf@gekxuqSCSUys`K^!7N9;ZU{oQFPumt=5MEd^3D( z&%F64ZRG_F%ssK`)>WPHBy+d+zz7C)Q(6~ojk#IlTm!lV3@n^lmS3h{+vbrW^~fj6 z0-#J0Ig{{U+7$ZgQdvnnut1Vv9ACJt(@!Kgs0Tc*zTaz|bletU0Q#H+0WeZRN?ze- zG|V}Ywkg1ol%qLdVsZh_+|&esa$7#S25Tke<4(}nkR$fH<=Q4V`bu~6qOA=;Z0+a# z`)mIFsp~P5x2Ci(!_MnT=K*jt+K+>gSQsXmF^IAsD~;4rHBVC_Rkhyq&9u&{t8E?u z%%ADS1^R*Bl$LVcb_hs0HxZgC@Haj=;a|V^Qb18xQQK7@dWc>r*gc+0ZM}8LN@fAzQ`;n3&45_)vC`0` znAD}rpjM*_LN_!p4qDI<{v2E;p_*_YkfN;mzuTLUUec$u7iV8O?Q3{;!q zSFs^=X_Ii=z-`*BeVZPSZ zL|!V5ZCkQ(($!kEkq)t?vH~miCw~mI%9;mh>{zl2#e$fdldMHu@SJN6##Ab^@=`2r zukWI?MF}~xsGUsKTU?~rH>rCR=g@=j1XI!0g#~rLa_JWAFFfeP(L)gd$jhhX!2w_$ zyB7lhy*WaNRifPZF%v^oA>%`mEHp{v!PZqvU|$$~2Z&7nWMGJH=rq9jKh`fKgG4oE zGU`vF($K=jYPGezwko<=bEJ|f?X+nvEjNAEr*C(0(-kARXGjxA$p=AMgXFSlAK;J2`~1$ z!Bv3y(lF+z(~(X*y!0OuWdDGFKXgVdQCP4eIv3TMFdq(_y$DpF()u1W@DH?k=?1dx zFaO$~Og!QfDE+;-%+R(Pn^m+={d!8ly+cYxa7;&FZ;rUWb`1vm2@X)7>?V;OsZfP` zlipLju%lQHl9?@SEmb-~={jb436;bSyfrrA1U8cipNF;H_BoMOUkL7>iBq+y0-bQ8H8FEw6`-uF z>N$fHI!m*tlywu?;dexSHjQw+kyoTEOqD5Vj-duqMTJ~-e`4(cIM>8c4RoZVF!cbt zoY3!8MCm-R;z<~{KA9cWaztaKQ`<>`AmC-sy&?!>=NQVX?QEYTyo^Z6J}+r%VbC&l zbBs90qJ5e_Od~6bA04J+aluz~+ako3H9&HR?lJ6Sd8;$+;K_^kd}fokCjDjBEEtRTU207xwAt90h?4tBF(mhva{#B4Gat3WP9l1amWXK9#x|VwcAC32e2at$ z0?;2`e&mMC?bm$ttF^DSG`XUafQw8tIwd-YTRC~PddUi-&BZ|d(;dTV!?A59XJ)lh+ju+)2JffKt$799r9+o8H^r_n`S0YJLsw4# z6wDc`8VQQTFQ9ULKYVCx&vvp?uQpK7vDG;`8dr?{zcCry-v}u2Bpmt-H4a8t#;RUL zly$gJI##D=C#7kYqA4Z^1@xqvU^`jSSnSpzJc(brj9EIocp+i+0Zv z5L10xBn{q2Gr{TfvL z5Ur?j2;$%}M1WL$YOp1RzjR-V3yA^s)NTgw9-9D|wW?`eDSRLM3^3m2Z%e3hq7%#R zLn|jM)t7Z`V=pTM4l7)Q^ku*94hNM5ocq!rM^H`{b9s}67H&W>mGtA#v#|ZOVXmRTUgQGBO z{fF1zc;rjFHos9!C%@2Y_vA?B6;3P(XChcSfI#@nhaZ1pX2OR-++;=5HLZ%EneE0X z<+-ro*W%9zG8q!dzsG7wN(gGL#hmfrI*l3zC9w>}NQ!*@dUD<$S+Oj?mXn{13|vo1 z|EpYe4whOsHWd|P7)Zi~f@va0&5!calAV_1#xVaOIhRj6DD^3Uhg8(WTBP3{XJi*a z(o@^cej4xO^f0i+hsml35JfMsbXpNOCR*u3D=zz7VF9~=d7DhrSurdX4;)aeM>TpU zJJH_sKD00EpW^HS^W)pe`p*z=gf{0K1gU{!_<-s|51-b-4k-%-=!q~UK0qDO<{#@8 z2O0LT)8stDfy8a~G^B@Z-QcRPuKP6ZGbFvkjGV)8ewzm{q(aeF{VrlINvgt`Bal%P zWlFeljO)R`*%APSiV$0&?9iH21fIA#?j*spN*pE3+-kMfY-;w_Zq#5rMUnOUyDVeu z5j=f6(ksjh&g%M=fJ2rdf3;+$rl zS7ELYMMowP{8z|Kj<{Hblq$nA#OZvI0!}3=pm&TJ0Zd) z=@4;jwxK~FCb`{avf)6f!inuLi!l=WTRp4@J-1ldlygpOMrtWls`SJ@(cWA_+I0j5 zBZ}U)t*lwCccL}$mtGQ;z}v<8P0l9w*A5GmN*kS!#@;zc_Ui@f7C{#0G|vb^=NR=_ z4^DC3Sg*!@+ykj%Imy23ZV+|#abg1P#&&-8FZ>e2m1z1%%mmz7p4&)g<8#lw8LyP? zN56D)M0)4@e$Z_vS}X}IEppY#KZj#`w|zavCw zeM^cU@ps;Br(-%} zp4WYtS-J5X;a5xYm%eJc(^+`AA@L{t`9=PK*(W ztuJWvW&RhR7lMo_JL8jhwBmaMa-0S&zz9plz(0g&^JRuP{LjhR>GY(OPmO%VqguE_ ztPS7_m@Lor*;QZj2X%I0faZKt2KQgQ^nzbg&(#t+m#v47PjipODhuYrY1DGBmZI#Y z$U@DEb2w$6erxbs{{27$1)^%1)w!;q#imYhAdCN1lG;hz!<5RExv=bLJ?i8ods_@d zrXvICjrzmx1)apX4~reKTy3# zd=8yRVJgX@Kk*qX^bJ_z%d;4fuI1HAwtX$r2J~YNa#c2}{*b3{(pV8Lx~e%Cr;p`*Sok{K-wBG|O&ir{II>Y&=0C}p z7F;7L?SxsjRB7nl0iNI%-ytu(@RDx`1S~*#;`8EaiiH`ZEN7n*>qH?8yV{)lBO|DH zh-5nZZ5&heBu{K&Eahn8E93dyQ1s{NzUV7+cdc>T`2jcKPjzro$WleZ@V<_mCu3>~ zTp?=OIoDi>l$RiQ)G9^MA%i+sDl3TbT+akRZjoam5iMs7li?E_rl5Y}E4SUoS>+p( zy<9qA=w?H>B`sqOmxRx8@Z_26kM8Nx_9iWSI-xri{W~kwlNR*)uwRt$RcCv)F;@4wK8Ej3|HL!too6W9!QUb0ZI=*&RyDxJx z?w;NWUR03?_C)Dndpe2QM;E!V6~+I1IF`&7g!(yP{x%`EX z#o+j7NiQNzO!iv;1O#P_p%j|d7~7=^NwVcp)bSSPWiguvM1Z2UPyaUxg}i8gGf^RjbNG{QpsQ;FZY5XnWS4b}B+u$Dt z-*)E5qY%aFa8Fn^pZ{b<*hTN-?tPF|fed~8_?c(K380uQMUZv&x%Rq7AAi)+TD|7K z5JE$;i{B}9S^5v@X;yIvX6@$eb0`Zu$r;2-Jq9ZI3>5IDGie19p%{xTAYI#12<;?x zwm^xnz+OxSIUf7mfDBtfgD+is<;xoQJYw93z>!v2@wXCdHnl~+Eu18{9yl(NLc`%! zn#`2-@#_KpADsTLe`IvPMb7DVKz1yc?4-s6|5rXNVwbeONZP(sc>E>0e{P_qP!)N=?%hrhySDz21*{U`Cw_>H^+RJ+>cs#%SWg)O)D8C&M*w4k7mEqO`Zj8Fpy#40-&Rc zY$BXy=`04WUxcl{{P(=@{0Oh+W+f#29LGGbFdrXkBQ=Lck8{=2?_GX>@TpH6lLpZp zho|Lz9H$f-ket8Qh6|9>nTf5_U;xrrV@-!fB2Gyk{G@Wo4><&*A_#&J%4v}G_FE`) zVP#lQn__1{u4?PG^_oBP*=JsK8e0y2mPHmIyK>n1)i=HwA>i4u5rsG#)iLi0K^EGyuIf8j&(G5GwdhY{qU9#d=`TS*Jq_*2US&7b4?-U$#RY>``IUmFV>}lJSBQ4W)FHqUM~>#5BTGQ$l~im~$Br+@#&xAk1a@P4lI0Y=Hm|N86CJMGLBavaiPW-7R%-mGeI zYY>OpRv90Up7{e8p13SV^&n02Dl8Le#&+br!2diHCOv?|ktst-{UOiJN}k<9pxq`z)O* z9XE7K)-*~}3SuQYp%vV``o|wjA}EHJwpORVp8&5<|If>}MWEw!hEReLTwsdifPKK4 z6xe{Xjs<+r-(#`fi)fJg`inPw__nNR_|vmvIuERp3IfBKgesp4ReivE_OUfkPJF-B zV@YW(5GKBrNKJS=Y&8p9TMDf|1J7dGa)grsZ=->TO3b#*aF*=)bl;Kf!9z-tSIi>l+h+!8qMyY7FME)gG#Pn$wrtyi- zk-Y4`5Fq^3WlCG+@rgO;_mVOkN9nnsc`IZ@ zP(VvqolAkmVd_)*{q9n&5z#*?S(tcA_yg=)GfFb*m`a>hF3tp@n4G{ok@Z*&RHB#x zWlUnK6Iu*w|#$fMkPHV8iUNtU=as3P>DTrC9S0$WdWo9BWD;a^R#hM2NrQ z@#DwO`;V+H+6zfr{sk;f*c)DU_9Z8azH$DEs(K#`e(D!jyz7w&RR!F?pLAMAmP>+8 zj7QDcVYe1%pn5;ar1eU;@HfC!=I5;gWy_#mB$hcyGVG%SDUn1WM`{mnNu4EpDZ&OH zE$i-!W0OP?@xgTW6EmIm32mIllorrmiA?CwylNsOfZL6GuRDQQ!R?ZxsV_Q|(;Trd zA9iR_s(}#9IgXO-B&p`A0K};++>wV`IY?5#>QP$1Bt4BJ@lP&)ISORQ5U}p0g@hMv4egE>iDW#R!y`zpA)!K#6N@&ilO!j8vcMGz6}4v8{%#UEf|c8Z zdBR0vwTdI^Jejlo$)do|DtTg33!M@Q*j$oztMQ8F*O0y^DleJdMXMyEy z9`DaJQFHTK-PPo)#=JzcD%o0opE*UWqN#tz2z?9XJAv{`YeS36CBc6TCXA&O50yMF zi^}N$XXqbGvw3&${uh6%iM`e)6z=atUm+*$jPDu+gb{&92#_7g;w>n zGt+goW7fMVfN03{gA1~15*axNEU2cX82Ug*+p+U5WZW4Ymjkau`o4BGX+bGFe*D&q zUSJ3l%bD+50Gz$r2-5YAYNJj|_N}Cdw>8t1+=7NU zL->1WD7l`5#$FGfsMrDk9lgLVzZW=*JIl4Zy1e8q#yfF*_jZ!%#<_xURZ+7A4X1OI z+I`b6n!02Dqlu@4LbHI@@28{^0n#eAK)I$cWDSisd5UoW0jqQ-DH7e<4SXkOFtv-5 z*xz3;Arg-q!pO#>4&h3*8!&|xH8O<;ssoRGQx`);1?HnMF5(%R%G*1n;( zabb9$B4IHqEhP{1WKL&(wNDc+NeYh46L1QeQR!f7tgpwN^(mjAkEcDnuObQ3db1q> zFLLEN=;b+{33d-;k>9}D{?2lBHH1cil*Yz>=JiL9Q)dLK`R!t@`nj^$jsQ=s8mw*p zC^?}GDI%%CTC}y^{jRhM7FB#>Ch)b*xa$Aj>T$ej;Kn-w3l_^ zU{l}jJJTVbnxYwcq59RJYQ^fXa;Fg*H+eiP3K8SQs8`-7&T_11r=-C&Y&U6iDDdW8 z%_8nDO?5&`Cq_m*T1EysU@4k2idHjF1MHd-8Sp)VD-@|Xr-~R*($(X~U-u1K3dU=L zn(GR7wJLYHb1ZyUioxHHgZ5#Z8-|P&8IQv|!5YPpW}rfmmu+|)BsFLCgcAuQK?%?M zXEPeG8Il*3q$Yy3>ebrb*-r-NIkF=>s;^eWwd*AHvaj_;5phiV3!{{ES%geo^jND^UIw9x#e z;-PvsdS+Wo#^T5EUnx8)K>6#hzamixSCzVbTJ5{mR3ma~>fx%fpyYP4k-A6L+J(ba zw^CXOgrK+{Ig2r45ge_g1xxXFNCY6ML-FI%z0N}sz2x^>wMxB%Dv}91Xc^Ipz)jy8 z*NaM~U*-_BrC|q6e;6x={=OY?0x$<25{I*S zYm7#w9BW*gmQJPai3@9udLvD0qED=}g!U^-{yn_!KbnU1(0@dxwn>E! z7XYF_KM@ebMD}&wKx`t zofw<(M-84}+L6}nRDfqTT1$Q#D#;tZgzVrgO6E%=EU{%;z(Pa62LANhb1f}6yEC7D#l_B8aEyqg48)@WCz?Pul7v>BVbopqM+8&5D;6b32$}S z;aV$*TPu!;$;nhXh{&*Sk;=+#QRrL=5O2QlN_sa*4VI%Q&xZ+JAUQh54FC`!9z%X_ zs&6NDQb;QM3D>rQFi*dK`L^DKp-M$6RNZW4SbC`Rp@RO3SZH-^wbjs+;iuqr*BBELN?W%h1}L$HtP~HkX3RDaqkwpB9|UaQz%c}4d|(xW z{)YPdjZ`nMe`F!LkUafOY4lbO1T#a#!MR`<7ZgH&u9aG8r{Pl`$f5`j1|ptpqYULo9we> z^jU^{-Jv;_L5Trb=2>119^B!T(oQ z^kIA|xm+6@<`(twVc1fca;C!r*pj9Hn0e>#GGM!p(;#i*semfcIHVM2nrH}0-h`6o z)~YlXYvCD$)yyJwy5J_jAr=)PaX6t3w-5gJSTt{z@&En*>LkY@@lX8w8_xHiiqaF~ zftRb6nmtayB}zg28d8N`r||HYvO1rMbPm9dPAd|4_AW!N!(FRl~PmTEdlL!j1HiCsaWgOCqS19 zG7$U+9^uqrV-;GF=@03Mc1x59Y1AT;4>C!v+8oe+A};$7zv8m;@E+ucau zTENZh!@CF6(Fee?!=8(+Ivj7~DhU0`csX-cb zu%US~a4Sm898q0JpY9SA_ZO45mKevp2rMaU$p20k?p6P&|Df6^DcL8q^coJF&Xr2BIF8ojAgM&Hf>sY5W;z zviLni)oM1Pe0`e=KzT?+@>ZmaqvSM4&hFAKB}Ytv zCmHs`Y1u4(bV5~WYu(KX=Q&<9LlFDABHc|ZO^O9kyD|Tjpsy7<2;S%)r0$^>?Nhm) zc~h5C{|DyuzN$6SCGxrF{HdnX5)z1T1L~R%FHXZ{r<~c{odznbB$Gs=+BMShBK9%ij1>DuMLjxYL-A}M zYulhcBH0{doVOeO2dV5&x;>sAy>c;o4}E`RifwuAZI$|;%NZZ0QGa(kK< z7tLzKEFx8#z6QZz1wNYO^I|(XpkVd=!{8&W0J}HEF>BT{+=asJT4PnjgbFw%A)ThZ z0w5#Y<_5vL3a|SEy12WwSY#>MB%CW=xqd_QLE~YE_zuG&Mz^)|-!Go4G^CDM+Bq45 zIOLO;?LJsJ?$LPX**XQq-gWZgLbMv_3@$$77UwkS5ztiguQ3jGF zm_Vi|@-b9vLpi`}pZdz+O$hPCIq*J;+i#sGtc+P+Hk%kaPB2u<*tOuNNZ8UgFf%8B9?#x*#vg6c zNEruh3YhyZ!%S3vHl133>A#@EU?cfx9l91Wk|NiVzs0zXo@r#UJO18%xz+?nqkXY3;V)+ZAJ9nrlYC(541TguX9+xAf)L!2pwmM zR&BA?@YfXV-p6{)xwU%m!M%9x>$(noGo6Y6ygBThBbaLYkeNC;S-}FtY4Mm6(~$fY38sM)x1ja=*A-lH6h#SISVQ^mj-KqGE@RhU}(zgB?y}ZrN@XgilL2#d16r* zALuIjf@o^TW-sZU-&n84y7dk&M(QQjDWK?2K?`~8R7I=Y4B8K#EJwX@1k72S3~j6D zhW9AB{V<)(Un8HnYLM%(<(H8rQ75?7 zIm}i5(6{^+kc4SGP3xxNoO(U^TUXbZrIN;5_gk zp!#UiZ z7}nO)FSAOH8okLJWwoQ6r6COSQCI^lvq8Wu3o8@^kZxeAoi@~>{%G(+jd)^gT6Dz$ zf;W6l+KSL)o@=xlbjwvsZ7 zQe^EdoFK$@ANd4^;Zgro0#Cp3>ELI`DU;2i(jMEINO(g;)Vw;hbXRTb*72MH^UKAzV>vwxTR9D&a?icbxn0L?Ri6C}9MDC7O>VdL*NVObW_W zv2re@k;@vvmAg1KuV5*@Gx&v=>2^uhOoG`!FhLH}m5C~@vD+v%iYlekik{&m4D*(J8g4jy_9H{vD z;CucF-5HI?2{UF!-UaUs=9g(wfOX(4cwXN%_OA{@84El_N?QgGL@NZ*YzhgCFQ zF(UJ@!*)b{Im2nDz11nrEqt-=w$q7Qug&}DaAa+0bhh@rq{4D|lJf7*Xyu=9VN;4t zx2~h(s~Ai19Qfw@@|%7cZZhCnxs;)~qn#T-Vb9*u6pP!2{ohN$R?|gsBKh+gonbq0 zoy8Od$m#|J8iovfzUVl=@%h}En)O#Bc)C%Y;+4PDim&u~Nu9My$vSg5d<}J_Fb6uf z$#30Fy-V%pzZ+crpO3{VuIXBk@?n6H_7=MF^1^FQ$ip4Kl_KO}3Rv&G5kElLImv#) zJ)99T=KhaB8J>X-9zCbS;K^UUaKk?w;;u@kJQa-nm$};F@~V?@Vq#p2hAlwECA%rm z992yfjYc&WOmu$nEv+5xF?*?EfCXn!veQs4%RXG6OYGZhUg0SS{ka@yM`?F?V82U>m>QcPc5B|bW1(J^bpFr-EX zhT+lwauo^v=K|O1J1s2MnuSmiP3v&@lp)2W7$fYUfnR+so5@ZLbU(Pe*ldS zlDCQ4I~M=);lm?8_~LJ6=CetNn{h%3qX$oz!s<(~o-xgv)b{cpu%iCf|0Xt*PDh-f zNIo$lNabGeQh@+=k`@BY5;ubB?wUBRqFrm|t#5dP1+hb!>%z+Z_Pfi?I8?xw{rEDX zD$hPnMF0ugSVt#~s|K<%ZOFOYnwF|+1}$>B4ddPZ!s^CyOOK(D@_9N0`)~;UNs4o5 z^tOoRIQ4Hzh0dSZe|@_E!t96GAgd&!bt&71Mk?MR74E-EO|p0pr z^hXfX*+w;V0$PfbznGSDhlyc#N(x_M*ao7)CHCTmF9+}l|JXaG3x`tD;c|j9{v!mp5Vam1sqK^@ zxX{|sN{L#IX36-STOHbl=S($@J;RuZTC^KT_3cn%&$B@eX8Lp$<;7=cvQ%tS@m(&)NvX%HKsadg}tQoJ5kTP$q-+;rbN%KHw zq4{7>%x;YgSfCzBq1SclzWJ}HMay3tY)`}#BQdURe`1O0Qzu{B)%JU)>`KHr1eZ{qZ5d2C$zkQ z|D9B*6QGg;mPEmXNB6Wt2;y7e2rIkbY~JdoEoe|}Cz%EI;|RIKk>o$XNII@TUjKmN za>*cCT3@O9XiWVBrRwA|`z0$Vhpn}S&wq`?SWXDaLHN~G${}nA{@F4|Zi@5JsyY_5 zu|vn-Ml2bxNNje)&~u16PteA=b3$eBwDGDvj&LJ~4}wUE)r!NU_sJ3fl+UDP(U6d9 zEz=u9RX0ODoUn|;4}5fX{MtD$;TwI6Z-oVGtQb~aNzfVLfhy;90)3U#0!nrymHO4* zW}MtaV}8O0{Hc=!=!*3q^^X)kvK9-@u&3xR1{38xadv3*k`er>Anx!LO(rAuhZ164 zB){uqg4T!O-Fkz6v;j)HU0h&Cu@bBk8Vh9il_pfTyBTI5N2Mj(1|}F8uJK#l-jq;! z7ZcFX(=Iuwzj50yHoYrpiXc4uR1i}3GI$mZ!076qaQR_v`(s!i$|uC7JEgq0&)bgU zr1X@G#B?}Ms^1RvbE5+5&;aun7_@!6n+kQpSTJ3fbPV7<7(MGEB%SY)VBm{(mfdjl zyH7XocD_MyGDkJ6o3}H7)e^Y!jrojt+dFAtK$VR9YbIx>v{8tr!^QnWEi}P~?l4T{ zypNo+67wOPhE_&ul+1PqCBYVUdDvX^m8_nf9Qe0qsqhD&)yxjm1`EyO6kXc@?6E_U zRGdr_8lV*3DV3Tjmgvu}f6d=m2j}L5A+TgF-7JotemMB-@>fm^IK*e34?&4q+xH}} za;%+ttes6_GKhq5JleD+QSMqgU0!X6tpMEDghP@}FzGfv;SkF>wU%7I*{&ifsGVd9 z=txo+wHg=}M5%QH`Q?DYLe_yPYR)?!&;ckD@-CLI{{--BrQzIL1#6TL7`JXs`!6V& z)-=_3RezJPdYoFrtQ5cW)!v@9%XW%_)u3bd&^fHpcka^(eOs$(N|v2uriuBoob+Sg z;>O3bmAv||)e`rXRN!vxpjsou2D7uswBEak3sSnCRkOkbk6BTQF9YDrLEwXV^UWx1 zRwWun^*twjm4m(wFyl|GH&TNKP`mN5sPuV=91`6be5xbyVS=h^H z5sIaONsy&J0j!SS4SpytaVqH{B^FCosvY1?kC~Y$`%{6Xx0a-sbH`cGf~ySF2|mxzuwvh^h* z{^5rX&W`uA2J^k7y&`wcB6x$OudLtJW2mKM>aTM=QJdoYgSVZT5u&(~{XHk${RC;+ z88&SQU7sYlUOHo~O|)~R1J>*))Z6AjuUaOK(kH%q`L4eU|Ni&WoK8ZcM}r)xcY21; zR^SzY2SYy2(oxdKm0JOYe0ds?S4pm&^%r?hiUai~q}u;zp2 zC<&Fk2Vgwu%t%-D__Y(0y=8OP9P3-F8rwEC69@!I4zgBOn&7Gmt;EZ!d8i0k`Cd9L z89C|2n>|LQ^)zRY-Z?Wi?mw}wNs@QY+Z12`H68oRf1sGQ6AU}S>N7=Bh2<8d%tmHN z^KOz|bRKL|3?o+wM7|!;Eg^X&tborDcI<=fqMD~hG^P?+)XgNYkLmHf)K&|K2Z&9- z8(+h8#M7)HV>%c}zUzr|;QoFp-_+j3f{RoH3%^poFb)lpvW`RZ`m6A{aIUAY4{9r% z>AX={pO4K9{6^cr3W);e$ewZYF56M>ltu@}rj#1Qk%yEQw`?3)6g`12YSdjhmX8)} zUici5>Q~2zk8F+|39ylir740ATFU@{3>L$h~HGvZX?u4T=m%*zEHD#SCI5c#} z@{uKl!QcKayuoIV`iSVK6(JnDO>O5ArPfsv)Ko0~wQJY38!Sc2GS+?hSFkP~h)#wN zIXO-#n5&&L^T~#K3p;;irmr@=MOsULwY(WkP9izr_fcpgssY-0XlN`G;*|@1I5ehG zf&%bP;EcZgI7!!4*L}_s+~a&i->_h+h}A+U2QLz+T1!<2*&G22!6~~QZXA5Ci>oe? zi>xPu#nc;d*b_J~Q*_jr#zt^as&|^xO1=K2&z2fMok*(cL(I^X(*fMBo zHapg62Bc2#CEQ`DUE!jjhFUVg%p`-$q+yn*K9(bTU^pajT43~vu{{@n8^wd`wD<9W zT4pi0SD&R7(CM%f(<91j4k+-PGs#HnoB_u!A;Wg2i)Sm(QxxCCmRh*ChKP(C7;@rX*8P%Mt}v9XDW ziFA%0c9Y|&*xe&Qt+89;87u4ky?9LLs26`jiR3o~jZBmQ72Jg$mTerr^R;z7aisur zHCN0R%P2_9VU>+?L9!?&z7_ff?GRgQzOgKDJ7kWKC_Mm^NgCbnJ@DhC`&M|vINc~v;PY!WyjitVfSib3%7CS{$Fff>!=YYM zqgZIC<>1M)*ZfV#?X)x4IK89p!(!Hn8q{&UpMo>(O}5IWxs;sUo__EJppp3W_T{IS zf$1zTNVqXkb9_Q)-5d%cE_ew5%5_7#AU;~t)_y$Ms%5|LPvQ_)4Ir}GDRbf0l^Ww< zFn6gN;FF;uH065ZU?p;3?jnD_rMVr4AT)=o%KlmLsp$Awk5$_;ewGlx+lRIirU$6U z&}weH6zxz*cJN7X`!8bYUS({@VK4TvV&a2U=8EV}@$Tl%&K$G}F2B0ATJz`Qj?Q_V zEqpmFollq)=$V-@Nrfx*xTxOFEY^6{nWc8Rr3Wu8ulVHcule#h-2F906LMBJ9}SvOJROT|<6m823SRz0|M=3gG*(zsYgH#= zcGy3ZaAhNLo}W>iDtQD>$#Bu!$czd{Y9o#~6aE~T7l*M~SI zK}n}83xK=L5{3N%l@@`)riNZIkFv`nYNGT>!-8Y{&9A@Ymk#^AB&Yf^6&Den0cSs^ zf{iaU!KcaSbg{Ov>JKr47SZLGJ%0Sgqw790TBavCixrZyO8~LZ;?gfA0>EJ_PH$NS z_ERe1j~wvS>y0&Pn4{<<-D16e`1kv`w?7j{VvNuoMW3Ng>a2zMA$nmo4Rr;6R~{@8 zNa71tUJ|EPPC+9lbacNRA@ixRo4)igYnc6blG$mR>^AT4&H12v->HYP)n4@vB{DA) z+9J#)Qf1m!Y+A#a>anepUOEU+&5lk1vSzE%3R#?`Yd{AA@?df?gN5$SzmAFsf6!0m zgIrjyt)!*^Ra06l>)n4x^=l{Pv1a1jSqI|Nt0&wnILpEDSgF{&zp3F7YlE6(#W&;Z zD%+I!o{TN_%0`RC~-Xr`Xn>9S_=Er+o@Vu>+JgTx+h!dg7<>H5<## z=&d*oZ0f%~zV`gT{g(;&!V8jgRI9eKh#KdrHFstdK28KF>afPSOg7lCf4dKkXw95o zT{OoOGP&P7u>MxD!uLCAMfS{9?oa8_zCyz?TsNo}*F@!E|S{jTmql7*w1FGk#V zF5mGHadLu~311 z*I0($R>1lTW*)@6mH@m7Ua=5tR?tEZWRid=EzBjBTdzg_SH0F+TAO$BVJljPQYXhM zhM!(@m9gcgVf!2Y*TaNPs?d`py@?c1r@m=qvw@0w`n|#LG{c@j=)>MeoLQTf`+D_N z89+U1wbxG7b-{iCO(B9S1?uMPba*(;F(mdyZ%m3XzUWkTi;luXZqYo#1=FKI6;jnH zg5efBYjR>-GdLy(NJ~JpG1;iwv>gS+k;u~Oq~+v|;6J3SQK{V0Z%D<82qXSVt!P0e zoYUeW*xZa8VHHn14><6gR<+g2+)B;IkFcxwpv~GjKojsQEqf#bh`=@Gc8Uk|L&3oJ zML8m3&(7>@k6lS@953$-KKbMSD>cCwz8GfW(D_>ASD__fGW!Iq?3@OS(x2!I;F$(; z85;)fJ0}@h4KsfbKDtex3(}BaHb=St*xbwAT}#!mN;Tx@^kkBwlWMcqwt~i*78`+* z3;14qccIg9LWZ4T2WYAci%+5lQdYc1)2XnZ8ieSQm!Mv8xkOn*)%eEO!-ipDsm9Q%R{p4_;nQLxCV7@fH^Q!&;hDl%g1YL8<89jI34{cG^qabRW=pcCiiZ#3JtWwdwY>W*XLx zbXGGR{WwfuYSpE5$@x+fW0VaI;*osz`1(-Ie1(<-cC8zn#3xjMAvqOeuBeYSSjdWonrtP`mVelwf!e%XPr^{ zC$Le{U*_A2g|7EEhb^w0aUVZ^@tJ3QD8y7O7dHG<*Pj_lRUqZjuVfce%SNs!{ORl3 zch;!51iJ-?H{jC0aVz#e4$##9`Tx^Oo^G{wEbT$GmR7*729+XCY*KTz*hqceXV!njKg{)z(#{r4 znTU~wNy9aej9DXZ4BJV?&U$>~%}35@D_`}8;9iIn_Hn1~OtS=QmrpHZ3;_hp%37+Wo`eJG%Q#=k$|-_Im0hh^08OZjaxW6FS{mgr#w zOFY@5Ye&>!qYO{-K4MN;%3g}+EW{Ej>hv}9YEcN`7 zG_$AKv6!I4g-Z`N_R@x2cnx)GF#ITa{yi%WzSF`sI`_* zX@`L0aostU2>#&?*3I|Z2FvnaYk+NzQ-#ASPIqbz6qQ%^?2MHE!ENx7-@kz1%)lQL z`;0Y0PF8)5=_hfDz$^p}h3xegUUAZ!<3G6vH^Dq5K$3|rv$;=QCNdBJB!B#WwZ*A< z2aV7`{mmTA0;v$|iNgwIxLT2th6Fhx&BB5c@?XLEztpq$IA{$^nHmZ$8_LeqAAg-m z{*b`9VNZIb?i=*OH_4y9AnM~(VWiFthQh)G)tSN8HEfBm$S|o|ERL4JacUL*vQulm z*Y~YYxZxuNpqZGz@`sql`Y>q&G!x&lo$BK^s7pA!W~Qb!`(ao z-hXcYz5J0LO<2&$@KyrcK%~;$CTltD3>2gk+e61IVIDngwU(R~o4xQOSuCQ!J2mI} z2=|p_NBwxaCw#imv~Or0f6ZxcC)Z-4(L_NDrHOZ2ecTTII6x)fTKsNwm+{n?2$P){ zSkf>n+X?-SdsK^ClGv`05?X`p&L$C_G`t?@jAYN@gYJfOF)04UQHm*?bZ~?CKn)I) ziy}|loR+kZ2kV`<=^AwIlE`WuJSkm_iF z&Us=%v&Jll)eChG@n4TEq z*+U6fNChqd%Q#DZpN#>}#QN=UX6OAej~Kx15x=9tE!9@VI*1`6awG6;n4QxomyQ|c zL0NoOGheu3dHKP=@|Can3JtqlB@+QR%*k0v6^vux*=L@2eQWUK`k(sWv_1`1T3%gQ zUad*;?RTkOb`jb(zRI(xR155psukKPM$)TuEmWG&oqf42^>k9tuY(#wn)|P~ae4=0 z+9JbCNrylcvE>9Q#BGZz7;@QC)K2~jdvEp}_jTlZ#{3C0F)=X@^J2BSxmcn=Vj*B( zd0Z$I77?4O08+9jsS7{>C}JrXg(3*bywPpnI}vkZZnyiPqi@H3p|x7_CR?q=k|o~& zOaIq=@)W+AS?*<~WBPXV!%zmzLj9I=^5n^r%QrKNI>NH0>JY>hl%TzSX7ZkYh)g*{R}&zT2`{sUT8i{ zep}Un$N;b777^lt!H7ZMT3+#D=_ApTm{I-`bLeeR79+6dup)E-lMb4ZxiHyx&aq^z zzkB{wECY^rEBwi!>kCD2ButIjIG3~sHcb+bBtD|-xZ4R)<+Mce!JkQlJ3=kyK8dna%~LnF zG1HE%$fu^mNLClpaXiRa)1VP(Q6p-F`lM00=rKiHh}rKqgX6Z4-&R@^|82+@1O>2h z!^!5vt7RIV$9$1Nc3Y?ooQ8ALK8Q`YJ}!n*R37C-)D+0fd++=$T@9TlQUc~YnIUO< z)5mmQ@;DsHd5Oe0#cnUm)Mm8uRtCw66~~k37iP60OR(c`^;4^26}Qd-DQh_ruCZ2M z*JHM0xt`%JTLy0B62+ zm>ZzuqH7k6rArL#-e9AN@#`A*Jo=@Z)^u*Lj^&aJwP-@|?6X&|#(bzPf`co(;Qhx` z`;Soc#N_o5LttXiL6V%MERlm$7*p40}&f!zdDOG&RLw2pYDXV^g|xgEz#2 z;i_fN#f21`h%e_*_5yTeC64qEa<|~?WGcU#PsDugwQ>8EWhgk?VyNJ@DWnpgr(0yF zB(%X!+gpeds&u80T>?~-G`U01%dip_rdYAjT}EI^CN{tsb&#ZgIiB@q(m!Z^nEe#{ zv>DoU3kAiNf$3`88X(q=J%$JtT$8=~gV*%W>5eZMx=waFRJJV0tL@1V{!ae`vDSQL z*)c6BAtytSAjGNbHl`&IzIJKT;g22yYQ~Ql+~n6^sgdlr9!jBvQxKPHjg049}n2k|T_lA&z0T49E>2`8Wckxrf z%TYFe16iew)FAN#hkl%o)Q(3jOx#Wkb?Ht_$6V<{E|0rExEpsjDR*sloMS|b6XIv( z?+=0mG+{#2Nw663n*-pa^jFis49xm4o6|E?_Dudj>aKUu-9)Pf)}Bi_IQlFx>TT8t zXOEuT7=+ApCq9`@atf^C#ZM7A119-y7#X=f6_cPO!BD}0If}?`$DFJtNe5A%28p)R zO~-N7yk+zV3T_3W&-QkIj8&z3Qj~z+c2dj1DK>ufb$usYJ-f%#v#CJ z(dI}4C4S?_R}%$2jYO_qac}|H=))%EYZ4Zb+}NBRuuA8IVdo4=VjDdAP>8xzF#Al) z00PJLqY7Oe#B}x4ZDY<#3X2CjC1=yE_eYN&J$+5XLz(%V2a;9?K+i}RaVgfewOH)D-ER)&M4QC)QC7F6;~s6rcoP*d>_Yt7NbVfD%#rHAHqeS$ZB(f?HBg3OeN z@#<}cv`VLrc@$vND5+!Q%w^(GzK*(_A+a#^o~v66tG6|>E7zXVB~Sk2`CHyA5zW&t zKiN+s=B&vZ7lW6~06lGTa!OA_5wAEJ6#eAwuYVL%!;T71SEcH3VFSscgoQVq5EBVk zDcg!R@}gIvUaIxTrRKRTput#GByy=ZuDcMBA4D4#>%nAbFQ=3Bu}NkL$8g%7_)pqM z19WgrxObGi@V2ysK-~!hS|gu8DykI}sx7T)Se9XR9Xk(W}kT+7ay*n3w}p%?H#>uh5irW&an{NzQf(n z1c~yQ_dVA0XtN!ncpii%h2TO0B73maMZbH^WX&85x_bQ4|7+*nvl=3Pij7>u01av1 z<)dwlRx12%w(SsvuXcn#FY4*Rxo5alL({a{!qmsLu4tn1rd5sR0Mf2L;G_)p-X+ME zd?RR~9?>70a2IWNZzF^oEg>yG0Wt>dN-l{Nq|uX+2g8M<*J#B~d`@J5ONK&X?C9U6 zy+k@`nYkDcB`G-h=J=!*WpXOjESQN_!)#2~`DirkXF2Ydq>mcxZzw`t52KsPpBifu z@sv;e{hrrX#A$%uo~4u&6@ zm@p}Hd!*IqyB8^2oD`GIW+@tg9;;;<8M!g0ZS?5wH*}xvXK#0UoYaO}xFlkvR5eNU zCe1+71Hcpo(2i|^eiO;A=E8-ybS-HhQy%|9mo&RmW1_fe%zT?ou#0k~FK^yVt}z2w z?l=>bB$zl##wTuQ##NAz@koM4Eix|?p987GZ9>O_xX7G2T;m#AZTWV<$VziQs}F1W zib0w~tna9kLEkH|{p1ddMY<)MFxI$DMN8j_3yc64nMMp!GC9QYyt)GkU5E5<4`Tsd z*Yj;NcikK}C*W>M7+EWBPtkwVB#~a$e|+tEJp>`2^5W)J7gl0^RUD5k*TwOLCrAiU zUCTHEXyRLkjfjbmz`fL0^2OrIb}L#wudi!g|M6Xo(fG%icBZmcQ}q7t^}@Y&?YR(f zJ002|bRCK}GZz%!08NvQe88i`E10gP*f3{2t1e=cD2i{anz_!zhYO9W?!p6v>6D9t zSP|`RK8^8B_AEb9cY{y}iG|orUh;2Mw1y0?jN*@n4Zc)Tx{U~3G!Nq)1?7BeDZpbh=47) zTg|^-Ta2#`DOb~S%=jf|MFVv%rocUiL}??9r#oh9@oV#$X>QicNi^a(3oDeetLZkY z2*Y|Lxo?4e{-_kp#1f#wh{Wp5aU}Pjzr|Ss??wXai9f1BiWa2n6jsh{YSU zJq@@BLN__aEXj^ixS9pwt*EvLq%Qf_=9Axg&TcCW#54#@Z4P67V$952L)z+qEsw{d zhkg{2gtcN5CB>p3Jv>(=BBC?J**)?g&Yqk~E$(*__}(n-`IMTB-g}61J^QKltP^eW zaXq^1#ZbsNFk>cw7{xUn+NxnLt&~_`gr^B^IDf1~tfAcs%%jCDx=z93)*FpyvY3{F zo4oz~$75lt4FEAZM8Ek1?HURt?fQ0vWvQqit71*SmtwTvKQG)i0+ zxxoRa_zqcjNer3gRGKx$yPF_&hN9!E2CPlHZjRid^btK_Cn?c`@{^_-#2_d}qQPyn zytwP?{LA4msKN)(*P>p!kN8V=y8`e(hZjLKLFRp*s=m_=xuVA5`@ozC0_VXTlOppAh?hnY4u<%g7kMiW_4 zZCejkypuS6Sz`J#ZL6nbF2f{qcGw{sXC4y(@@@)q$X+`HABrXbDPxTYk6N$#_Gail zNM@5)MRRni71{vbPmti9{j;M~ zKWAlQ0AvTMAnBy^=|tS>dVN6*U=im-*DguM(ie+0Qpe`Cz@_P%hQH8tP8>87h{1D? z;D9}FFEdSMzu@Qh;*ARGQz|}&2?WSNFX7l;S=I-_kVgrzYnf@0GK}VW?B!CrhY40d z(o2cy1n-(-z|wsS+9OtlD$6vH;n47Hd|_$%q43((t0KlAh^kNB1FHraK(zXkg8#!lsl=nPtsY~+`r$lXuIA3B`BI5pDLId!Fi?7L6?w)_N@i}^>4zz ziM>s~BWEPti=sqnq+DwlbHNa`#e))qr)vbqvkS)x7DUg@vrciUuv4`1TLe}tU8c1a zgg4-{J+y2GUC4eS%PSk6SRym2^Uc#wUyeZ>;J4}>B-{>1ITW?7Iia<2c)-K)nFMX- zCq60uL4CQBPGWPgrwUH?_><(!p+91Q&&I=X@FnNi48C#>*kFZH*`6AF18h-D z8o_gbM(0A2B5;*O4Y6P=wYiRt9L;J`z*0drO}4v7oX@op=nDp?B%>vwQOW@V#LG`H zj!nnV``B{1s15>p1CO6`4SEH;G|iF~IswHe4znssdKBoku#QpYyrjM7Y-#6PX6nX9 zvL?C)u<^6pItvAtQqa}>Sy4cp4PjkN^jbbPyap*A$I+Pm9#DHwP$85F-CY#83ITle znX9p+OcuvlJbG-@O%UFLSss_JnF*6TA&6hhujtOV-$`0a!;m|%hrzt3xY$~F-T|KZ zPF`PJYsBkH73h=^-9EH)B<5)a1AbM~Q8TLZ@kL(}c-m*?ICzH_`o7@IOEvChC=trt&{ zWJ8X8Sj3L^BnWY?9>p-}v4L+l#;|1d3m}o5mO&I66PWkIeRdCOnoWIW#jydVpM~Xw zli$xcLMiPGt^^|zx_tqi?I*UT)*v<3^@iGmwx1^>6XUTN0DeVSxK>Z@J`f3eEmswA ztm!6PG?oLc=`VLqW-a0rM;aUVV*PauYNJ&%|478Zg>q3)UF>cWI`Muj!}+Z48KShg zA2QwILIk1*L3HspDbBy|f38oI+Z{?Gm~C&w{cs6tB#$*TrHBNC>LrcGBE~5&UORL& z8B%C^WqmEC#)A&O-}9-RZSG^kVy{V(o@}~vl|!I?Uw77v8zXPy$`-v^7?7qgy*@iz zt7~|;Z9dU6k9x_7p9iwANod6t7)bq-RP68$_9%u;uscVEmCWk6ivgzy`9!d%rf)z} z-(ZY78P_uajl>F23=B>3W)ak+4SAAGq#)yP$Ua&W)1Ic(e|&pIlcj|eT1$MheD=R6 z;B4hPKQCyNi{ktsR^ANpGi#3I2#L_@A*Mhbc~6-;IU<^MKTm-+5y+lJ_^$SHHn+c z(ugc5DnyxLD2SAj2&C_8%WDgj#aIx~2pxCkVOC{qG5)HrzW#Gv%TZhd2B(i!nH#Wa zY~xY@zQ}RC3((pJ!Gj=pM1p$FPeDT{>w3gKR)qMS*_+PdcH6^zDlI_?(V>dxeowhE zvCP|ToK44W^pott;h=l4W7&oTlFk37DK)H-@8*zK68a{X-CDJ-DSZ1+{z$9aq5IvD zo`Eg854t2KQNa+3bU_2V^?w!1gMqkX2Z^3l1=8T=d+C>*p|Ga%cDugciHPn-16<18 ze0KhY9)v&|pAu8y&~lBqn2Jsn{2GuUr~+2Wv}`=9t~WjT;n#l_R9!MOZb$-}zZX3@ z$>Qbk+^n)nL5T)(=B-XJjCBk)7ihL9*tJ%);Y(2n`XhQ+Mt8fITZLj&A)5!bf{7>S z7_3xdoEmoNo^DSOK?v_RV;}};G4DcjbcaX|VkIStK$h11(Ur?u>S@bRYaRcuE_VI_ z=nZfk-HCpt#c5izPfd{wj3XSCM5lsOTf|U9LeZp7?=&kPLqrd^e~n=sl@Gc!WebHjaCqp*)8`wC<*8YD}R^(W2<9 z&52QvUm^J@reiG|1N99QaWB>cRSsd44X$;=9mV=KsRbLCmo5QiOKYGn;>e2ceWLC{ z0;t776NCgSrw?5N(&9vO>aVYpzToCaZ81!tN&^D83wLWATF5u^VY zodai1JciP3+vYWnE6mjwoqY^%=~9mDa$VOs?17t(=z9SiV+|#_Ku;P}5}4JK;)n`( zwry~nH>JuRnWZKbXOhZr2Twa3Z0`1ukL2u`HtAC(H0QB|`GwmtOG8mu@LilUHgQ7} z!Cvps%GE^%Q4FuU-+%l)_cqjtG>O|z9l3z9lVCBlCVQX_M`-K6oPVUZY+2y}D){O% zp1x)ae_ULRVd`A7u=POf+JF4r@5V(``8*B66`8vU)Hq}k*$MDadJO>&e3tP9j)H?A zU{S8a8}sblXT6)x!iwu#oS1q*YZIHvX=Qn_!QO=I(U6)HfzS&Y(jc@#hQS!svnul}qtr5S)53OU?loyN+0a z`OhQlPao0`B)>>w8LrQE+fXN(AsP`13cMN5M(f`eqx^{@;n{hub^;^LV|aVyp2D$z z*8fF}b*FV2AL?z70Np3T!zm>$p1e5~%zz@rDUXysNs5@&8xT2Q4VMEHoZ%Xyu$w%_f)(|=FT z=|6t@mAkQ)G@c|Z66ySVgmVs0+!&?(Y67MLMZ_b76w^Dc%VO-b6YFu2eBpI$1QJ+t zY#nb+iB~)NtzLU^z1DD_B?eb7B}6S*z4rZ71p&}1Ie9Uc#ZI#60j2MtVg}1JNKrrm zj1iW4E-uMeR~DAnboKF|wd?+wSn6A^YWZJ%<`qr&(yLd#9jtKSmJMz{z?nD8ueSdSt0ZCwb4kO4C_4-+<4s@hz| zUET3zee$#Z$6;I?x7zd&!qI9`0!{k(Bpt*rP|lVEK~JlLx+VxL(p5^R4HHZE;%5`V zKxH@p2bbeJ=U?iZ^;8Z98UCaGyRocuxdWlB?IO5gJ%i^BO&i#8U-B&jbJAD01Z z4`uaa5@y@mu4avoxy3{$r~m=%Tid~y-olNsCDiU!nV{>JbM1JW>20P zx%g&4YOHXj#S(Pw^G{t8>zQ+s$~3N!voCRBX$B_Ob3OJ!_GsGq^F35KjBY<@$1)<2 zm(YSg$m%n;VJkcG^_+G57wt(xGz^Go~io=ezL zaNM~QdgtQ_C(==Nry$8IOErz|%C)ECop`bfN?%{1ed+b)h%*ykQ`xo{qys(Znc`Jy zUcLM36;E77E_M?M3lxJo@JaafPI5~LYZw}jwYoSyfWP?qM`+`3Yk!s$3~R8g3&>yF zB!9s-xPfr+@9eua+*q559n$r(^yY85sbwJH9J2eKuZAjXe&VTkITME21@CU#cJ0Br55Wy9n#*Vc=!>JLZmFk}hZ3=wn{ zH61Z=m~ai1V}cJMg6iwbL8Ibsf#1_B$TUAm&R~}3q?96vo6Ii-TFokgrN`Esu0$80 zz0T!D2Vbo;d~bF(lFto42*T5BB+;y1Cy=0WXwQ!&~5TfOy)rxKy;#NTO|##IQKX%vgByE#V%D z@0*9<`81DBB2TzK^YzvmHy43gwM?F_`Nm_!Cg*xhD?6hx;8?q@mk#pr@ly`nU@jqm z%rOw0IJxwVH@OJ$(|=5*Z1-3JC?D!T0N}us%-95MY6w{{ZyMIh>S9g9LRYYqpXi2c zK{G#Vp8Re91AcxQYxLSH-xaaf8!t;FK6&r!KR#L-OhZ1M& z7}Lj_o^fA%EFGTN!(RIAJI}`CO!jbt%~&s|a^-rE_yro#0n}+9C5f>`wjcKe1t|H0 zOiu_j1oWUCc3fkZKqZazNhyMTP4l-%(4O;;c{QQAH`AuHYd4O=Y0I(N+@-5_>PSX0 zA7q6nOPo8NybLZy^k;u&?q3ia&;ATL&Qgj(chNs-Ka{x}Q%_&)bT1?MPy8b& zU`@tQDn$Vcf@0zasmazNG({vSc?GvkR^vF-;(OSZ;`nB|_wO80>lk z^6ezE>VhS3@#G}S3?80TsFJO*l~B=eik*5XMb)}KYKi)fzu`s|;niRFf1^1g-hsRU zvDSF$UK7X8IvMduNzuMmUlFH(TfkGF=!%!!D(E@$BKapqhL!@Ss0CJnctFwI?4!ug zVf4(ZgG?|Xuu&?hxY^Ku{M2`2C4poUrHH;dq?))N;ze_l1XIGsSgdjDLGPIVWv*P^ zZTVq)F;Efj`ozdc=nNGCcAy3$bdC+3qWm?BQ=Yi?0Tr&Ns*sSD0mry%1#2aWU@X7i z|CNYc3C|kG)=sPtwq7sUAk>pY%4o2x=lAt*5HJbvQkqK^sj4?LB1eSVIOJowj$7My zz9eKUAzPf@hrNUnsaAX^92S*8JcJUSeIyk0pe|ZsX7WZJrWhEtUNz*cp_ss036>Kd z3i1Odf+t|}U__L%o}2P`;W)+hWep57U{HN5UAzyI`%G|O7l4KZpu|^j%5B`G#UTmLq?Ws9!!L#~}$SRndl29X~OS@4_bGaUy~Q8KusbrfR(Hjy)0 zKmvT;|7ARDalu+}22boBB(AL;W?4{cSg0nbgGT?M2H0-(9*ARCqXPk9@TRK|I|mV8 zMv7Zd&VX%Lom$Bbo+pGVrBk(%0GLg8m#hcXgliO+Zg#o7@6&!35@8zCqMegpa` z3{I6`3p6n6Q`$M!OYgYf8eMbEs}ftw6l)Wd#mo2&ZwN$6t|(~g5NFv?q9)#EGm_&H zlq__Jtz;41;us2^AmGI5rzJSW_m>2?C0>deXngm@I);IF%bM1NLwg^8vZnaW=k?%= z+zUMbDLj@yuZD3)O2nQ;JLkBtx~eggZtoCRR1VcIt#~p9!Z4{g=5?XgkJtj;U$da% zxSuMWP2#sX0NVosO0dCacIC>`*Yu3Zn_l{+UYm(|G&&ne-S9kpIsh_Rg)pb0kf zXXkJFlRr5B=<6Q_qr%!W987ewQZ|^E8W!NY5cb;u=erp^KuzsbLXX)3=SWf(pjFa0 zzK0PV3k)OfS=20EG~M!W6YOC*#z96D{H{mi7CRKC2&Se-4g91D8Y({5Qv){iKpgsw zl;&tkD2(mT860-;PAfCRu@SE!VBgz!JRmMA@SH)%Ra^ihr)HSrd=byijDO0Xm@HTd zeSq9~$I_PU8Lj-?M~}4H=4@lFa%m^1`oKXjmExNj~UafMn=_C2uPkwm*p2mif zp9-~+{cGaJ?=@#eI=(Cc?m;=xL>iK{%t%NI9!e42i_C8(CdaFG&|nsao*rdb1)`gQ z{4-pFv+kfLug1Gd5eZ(B=*9a`5HteQ20)lQWC>Y9LnpnPfG0>h&Qv$Aji8-QszHHP zo}x17#vI!CQpA>~lO}f!XBpkSGRaszV~()GSP1PjM;4Q<@K@q)4OadG=b;5 zG2%;4$3qQ2kZ>^zFVH>PxJXl3A#KIiAxPkwU#<@qQ6=&$-8$L;2y)N)LX z-wcxC%OM{yIO~2JOjhzu!#Ppw*}#eH3(dL#54%SJp%<^iUQOry%mOVJ7OHD8Q*8bX zo*Rc-XqNd1+5+nnuqL!5T0m$Eu{vgmYAiDpF!C^Cz2ojJ65aMRyETpvYT&|wBE{rb zs-ur~>cWpwsxev&=6(D_qU@umuUyk8&|2|xdl-cDq#!LoSQ>cfN(4X;=@9(5R)l6& ziC0$^HL&XJTu{?m2E}8nW=_)fS&<>^8R5nv%yce3?V6h<*BoYV^X3`th@nY7sH-dP z(YNA7sn2@d+#vA0hIp&x*#oo!G*DKCP@1(#s9qPN_iClEV5@bR2DttP&8oQB?J_fJ z{m!MxTRDmR^ZB;pvk?e$WmBZqv;z zHmd&JfBdb>@$c52p0jNS3&*zWnVdt3+_Q**B%zGN;ERRmHFl1^ds;`zC-{B)?)@pez2k0IgL_A_{c4y)dKH?FQ)_aDrXjQgbq2-t)7n41&08- zhBFHxV27hT`QZG+{+nq;9Xj4S1vlD`$XKZ_i0--`KTL`MfWQ0ASoNdsnK^JeJNPU` zAhQbqB)D2r>3Y&Crs!S!2?n=1_PC%lRP!{m^Fm=UY{`Jt;&;HiTBY7!N<78zqjsjI zU`{KynwTN1MYK2@9~99>X=IJ{ zq)o1t%pix;J^2s0Pawr0;AyaUx!BW&RlPHRMyyr6q`rCp^x7-0iFM&Vn249VY7A1e znc9O6wmJ>(lA8@Do3+-{GcYhY?7#{+@1Z*$%%|8rQUakfEGJNt@rWgl|HwHt1C#ocR}EAxzY1 z)l1+t)Rz9^yRTf;>JiBZZlorb?y(9&lz@mq9>Lt0wrQEj4I=97j&Rr?jmvmtD6o2C z@?z-QR&YB~6z@JQ(L_a* zFLjyanG)vAfC~W3CG|Q8LpPMzo6zFOH5*(SB&Wwb=AsV?07Z_YkV&OXiKkRlt7}0~ zs>4S60o2OR&)A{()s zk&%g!kQf`SU6dWnwCwq#H4=e+-}9XyOP92Y7Cvhqy9S}vBh5^Ca_{SnQPL{rgdNBOG~)uO@mxd5K^mXvZ@H)_hw)(ZVAHleSOcUHTSDt$JBo&q z@$lku{4`$0vxC%cpMTl^R6M|5jxz`TZzj=opI$`q3cRVfk$h)?25)+0{+SdS*BzEZ zQxf{cVMyrY$;5kYXD<6OC{#lo-AGCCgVur$Sv8B*9KInraP^@X#NkdBrj?EnXG~0t zYMu{vPYx0gV5XC*`r!9<0n4ge>c;(0fruztb|`fRc~LCMX))0_@$?4s4W`bbtVl7J z<3X0#LLg#*ABYF73d$-vtnQMc2VG)Atx~0_F+j0~3IT80K0xn@b)KVuelQUV#(^&< zPfSP$Ycic3`NfDy=M>G6sc!*KIk*TD{SDFe@++@u{KX(P_RLNUtHTP6DJFn^Mm(i> zlQme>#-E@6#Gm-hD`FE|hsh!`SXFOdPCH8-{R)g%$v61v0SY|t53%i772UWi~DgHYIfu*{J8(Q-pHrF zj)h~vt?njI6(d_N{Q0N+{Z4QT+8}$gU^~MMwgWJ0sSV=vZQt1({%FZ!BNheII&7uM z$q6l*!8I__Ei@Ap>>Z_I2=pyl(Fj*5&)!DQ1Wvq&7nd- zMc5ZJ@X?kUR=8*Z(~t_c<}$5q^1 z@40z`!>a8YYXd>9dr-H4zG-ll`fE-NeLHYO>eG{-o&Ti&Rj?K5e+4?+N$%l6Ol$Cz zuz0A^58`Z4o?Q3Cd~coqto-fCxrdt@Y5ZeTBeAk*k!&14f^SaU)Ue=ns>#Ha%d}WV zGY=xoSz?FCLLL7sLFbyzX8p%2%Zo2ZC6j^rO~ZEcz2c=nSx-jm=;Va>qK4A1x8abt zGbM9|ExPnvOD*9!qI!}YGB+C$F9n4MYn&>rvTWn*B1my31c>bZL zh@6g6cQ>aJklwvA%Rm7(ps)FH_kFjOTZ4QQ-Z69~hwqu)C@MVU&Z6A==WO8smX+jWSNXFnYFl^ zDn>}9q;**wl0E2K^$y95IVYh-QFs!Ya|X<5doPs(Ejj!Y?(Tm?RWSS zk6CV=GX7wzrB>9SZ$9j}dePeFGQF+1lkO)sAOV1kh-4ogyHYhZHL7iZss159zvIXA z=J2+`AsP^ER9f(Z!`kjF$9}N06~vtfp$IGzhQg69QWBZa2#rgUwtTzP!V+UWy8Ol) zo*X@}7C^&!EQ_->(V3!s7_31c)DI^mK5B1Q*QTgbq^qnfH?$2*Y!+KPQW+>^;NDk| z@NSZAjZ+(4#drL&PYp;CdF5fv=MAODH?52h~FF^fnP(H2{d z*;%wC^`Q3v#25M}#*dbXqpY~z^&(XEiy+c5B#GASXf?NU(*0;wbi(A zpd`5CqE?!6r~Q8E(KD_F4LVz1{H;gh!D}RiZ}i5L#>h92*WK>a6>?*i6-Nx4Co_5N z{$u}bM|j}C|Bcp@bitOT7oY?OSFFv~Ue<)B#;0^RuO{7c(LBH!Ovmu~9cQ!FAiN zrP)&nxyhY!7vI7+JrCwa}G1BCEK7m4G71syudO1c{(`0a4tOOoIXMXH7rSTe*G zIG&(PX(XEKaHFD|sTw8AY9KXt(t&HHG644EDB2(bmbvBfm1p#Voz{ujT{(%45~Xx@ zFyEe*`)D&ceAcOI8r46xtcO`rb}Ze-FoZFsQiTEkj2HQ{`$`~FdS>Bud#<<5TqfdlrAvSBA121qi3n% zLB`e2k;!Y(W_-en-!ClRt}HIh=+L9iu0>J@CbS-pHV(_g({MYY0lPlDiccu?Fq>oS zW^y3#P_m?ck`S$b5Rcl0Ixw%^M4=i=o@tmf&176qZ=bLOMuWeX>|o*@qB{u{?v4V{ zlC+}Ai@IiEEmy!~i-j^_=hOo~EQzp2xi$@_+w^h^I7+NYubvAAGv%LNl$1yo^2+oJ zwd$H^Tzsy^*oImY588{xvSsMceWD?83KSObd{5zOqghsmBVAOWy@t+a_2IN2Z-exlf-^D7=uLbWVPWX)hMm@Qy34IuUvU1W=dHjJeIL1 z=uvZ=_El$4tP{T1!^n|%v;X^;iWMg|sBeIO8w4Rrkut3cXB7i*@^^cb{t8XqoD>#$ z($yvI6cVSXV(+E_lQxV8JrS<5mUK>Q2gKPf9^S)tkFl~`h-j$pr}T0qML01U4^qt7 z!su+=^d=H3Vw5z_Idzz&B2j6JPrtAl7vpBnv5>_U$Rm%J(i%wPB_i!odzaFJu?cD> zic>*g7@_AJI{@sPbP7UkSy4m^uEijxS?PQMmMreFbQa%2JO@kB24;|tE=6_8s~wSi znB8L+L2ge#SWOsRYeQAaU1=Nz@tzaXpiHC@-EX@hgM$i9-yJE9NMoTyoK-AMRa^&V zSdO#cIww^&=*N)p^u(TLZB6OZsI1Ne?j(IA(HGU)NrPHW7*2H9zZaJ^5k`)p;*{BI zooX!&Kp4wG?BGp{G3Ztbv6yQw1~`LjC@Zn3{wF&<-*-5r0iehD*>o7N?h@dTc4lNp zp{`T+As&uFh`5cN0NAkT{8`08yCF`i{evM!YBL zfOhr?97SY-orI9ph#Gj-Op1wC1WA_l6mv4V3zIV&%_kB*aLjzn&qw@xrBP5Lb+Z{| zA8>X>VyOYtpxdwy0*#nQ>ULI*9s_=^ldbaMGa9F%qF5!@a*DqqNram>+^>nXOGYlL zSgYw#m^qXp)rAijH)?+xIPvkCvJ{v z(ukYQg3*X&A*GKKsc>}6mX2D?UJsaSZv>lpJ$Eho)u&>AlO-NrqeqV(J^hU4y8%5ELs*R5!!=GV32@$svo3e!~FBXv_FsRXY9 ze#7U`v0^J`ENe1LJJNXn{U7}CCEJQ)2TV~8e2?a==YuxEg#cV)8*e4v>vFs_bMV&6 zsMQyy*VjZagXy+YZBN__w3$mni-%_z6{Q03Q4+Y;K%W80E8ZXruAPz+sTf0KKIJkN zKt_>S@%rnNHyyHVC*!Et;nmmQh`q(>I#>YH0Y$}%M>xiEr*|ssr&Gy1alKS(;vvb7 z&24}*VyBA}SPN1if4Fm6ln-+lCSr0S`kKo+LyVzx_$C7s;Rk1H*ts%I3Jc;b;(>Oy zV8}_Du+IU;ne`pJlyYu|TPEV&(}Uqa<(tSmo1@ia@^ZY8YEO_}u~dD2@bMkDR1H{^*BBxv)mt1lf&0ka5w10HOMZ1 zH`*@wx(EV&+1iSp#E8qviy9QC_k2}C2EkOEsxCh!gLhVDUe?1p56;||M@wl>v9t&o z@6@CqQah)iUbJ{l9x{vJAtCN3e?PUyO~=;oR%z6hw60}b`L36tDD1XMklpEsiq?y= zNEE3E;X)%!IEY7w0Ht$tX*mRw#F)($rc4S8>0zk6Uo^-8D%a6d*n}QQ0oIk2Dce{H zhJjB6zBq}E2V+{7UMmWj^Dx$0yKJ)HR(!c__7!G$EKl*Nhl{yX3XE@YNsyxGZp($G z%3Murwz^n(S#sGjWmsp2J1th0XM#|0Hkpm65~2wu<`A}V^TSdNPT#W1n~@=plxQbO zXL%RhaM!WfE)WBUNv3V|uu4wczOHvtEi!1ijVhj9J0x$yWHD=!L6~;({eY+?Jwy`+ z{QX*t{RrdZxUKJSAvmYa5A#OJ5E+1xH2#Wphiq@r(Tm2Lu4v&+YoUXJjg$<%=9K_2 z1$sLs@?x~6Ctw){6SnRHyk?oiTE3o>Iz>s6cfLNYsl4zc3x-o9Ah4@|fgiWob3WYfIEh4s{BE=A2_Y zGdB0M`VCsl$7s!^VD@rzU+c}`0NxZ`0Hbs>dE#gzLl&p~7sCM2l9tbUs}@+P#2A&! z-*_Wvg-I1p;nI*!{UA& zVn_pMvqBm0OPf5X7ASh~y2hs(Z_j+sA}gLp+flA*{mIiY&Qifi1RWU}Pr!0E3wBRj zx`?Kju=KRzB?ufWU$w3)Z7@~wt43=B!3kycg8pacf9L;x{L-Q>T^%Ifnw--63K0A> z9W0z2s0*%RQCQfUN|yX0a$Q_zRLOaY)fybPVd?<>bwVNiPCKzGoKA?5-7{Q2iHi)> zfXq1TWl;{0gRSLu38C}DB*PBK@d$0zdI&lw|A`e}d^=bVP){1pZ0?pu8Z=BzG)AK7 z)^0F71)0ezKIsI~&j9|G4G}|U+ynqD&n_R41})Mu5PWuT1!fK&D)A6(({fFIeB5IU zc(ttA%11$9@*u(VpZYRm1bSq+d72CaBV(bOSQEd69r)>l+_19yhYm!VZ91ZU8pAMZ z?N|_``A`gGdSJnqMNb$Q#Im><(bIIZj45Wvzt8jy$7=(tvLS^HBN5 z%{3O~3w}Npv(!@|Eaadje&yOL+GOy$HGgj zLg~Snho(x$z0@-Ij9hDXfQQHxhOvoJQHFq@ozB^T1B-N%m`4RcGd5d7_mCJqD1oM} zv1})=l@~?vhyj6t%`OO3T?}JiQD9rxX}j6Bsao<&qw;ariy=f^T zv8ZUbkU}emKKzkbVPUnVH>2uUS7kdy^zC%dWtJUH8LLTO$)U81a$njnH9$)gPSwNN z(F0Spa~!gkLZ8+T#uQ|YwYS-3svl*>kCy0`w`RD1Ei;@zt^qJTT&l5C8ns$bjPetN&^j&c$VL+Q> zPO+swrp-iGfQJChl1_dSeZ?mjx15O@GF!ov>0){43Zl8FJTczoxm>xFz`YlKH>D&y zdTOhU7y_oZ)y~jU>7ztXuOK9L{sAt7B(Q)QP)PIG)o;#@&bHPY*M6x`LFi2*!`awP zO^Jx;;rmKQ147aoH)QK`kJCc8<9586(4#A>3(G@w6zR9>UI!fZj(9FjZ+@$VfmQ(* zZBjZh`;abQ@ropt@YO#QV7l2T3rX3{H+?vW36+`&T=V>{!w+fVnQJ5M)Xmm@1@avVQF&59{Jd2b7%i)T{$VF4o8v zBKb7LyMZ&3C69j7d3d&y9Lq-P8G>TL}0 zrk{+JnMMo>dPW-!LP^C+4ponmDQ&{!#YGY;xmrzzPjM-6SY)pS41^~>77pw7xaEYj zo~Qkv>03Y)<^b?hp5DT!r_My=*5u^jS@d;paRy{F1!@fQJ8)FRNE_?7MbN47lgBwvv^<-QEFBmK_ghKBjw$X|U?SR|UPhPk!|E zk7BWAP+3Zy;JAVc+NOyd< z)byNLR-rt~eS;{!?rXFWfZdDTO6-Oe&q?nb0+#M?2a6UKCn8H5;=lAndMm!gM#6_4 z6Q?3V^vy=){4GC@1eh7>pE^1@6*S}+0gIQrn~jfodso9u7pr{u?|AF&)?sLH#eF)| zRQ^ZjZ^!8Ytm(_If2bJ%b~UJGngMMka~NsO3Lv_)N4$X-33&s&=v2+%~R_6QJUCjX43bfI~FlPQam%PE|(gMU$s; z1@cmDAnl-q)fCdf<4O0Fq&BWCHe$~#b#|=@ko&D)98^yWPQ1r}e-cxB0p5slmU6^a z#*N_y&C#og0^>7^3km(ukq)1mKZyw1V)W)6A+SX9I76qkBK?1B@G&&@ht;gH(Jf=I)lB`v7 zYb)Goh(POshl*i1k@$VFV|l(+GmGWN_+h=@iRbO3ju7h(VwSv7o)r6#>>Mk2gW!#; z3_*n!>Gp%A3b~?s)CQX6tFf{^H!tBeDD!xd3Z2xpH~~a{`}_P|&5urE3`L(h2WPBT>~AnYgBm)zfGFL~6CDe|_bL z?mrkl0RQzr1%2zdj$>n5%c9su1l25W?$`W$g;U|}VDv%0R1|Kz7lBPq%6m}hJYBn=~-WEt|PJ+IXygK=layxxMs%b zfn&^Fg=06y^ejU2q227!@lbqT0OX&<1MW~V$b(p9J$>H)B>1V;Gk=JlSrD{wXJsFE zP;gNLOVDBvBJLp$CDGFW#O66JOBkDw6BP#$u&MZ!l|*J^wziUfL7hV!FeogjqMF9$ z@!3(XEp|JQ3D6Z`n2d7{9P z*StZ|2E#&##j+L=DtJdVWDcdbBV7=Z49ZxO!=x+tU333Pe0p$SPoekm&tp-R!Hb2d zjY9c?256-rD8*jHe$2LWyph&{!^d@V|G)8Hk^-^P8?A@2J+T@i zwHwYrTKQ=|sQCU?umxPMmckO%SMI(NJV!m0-cX++tFs8R>*DF~sR5|_ZERk=zGWa~ zR|KCn@I?|K1OY8>fBu@dok&n}%5)E{pI|D$WmQQHX-NiY%>2&6@~+O~f6y%bmn8V$ zS$a7t(5{!{;CL9?|HvX&1PA#y4z}pAJQvkMW9CnNJ7{EHJNoqv*@j2N7QX(GlfAm+ zF{3V+Mo;}cJ##S(7A6@R+r6P$BP1wT_Lw_0Fb1NdkmX2m8jA_g8@@2zLGvaxY-L#^ zzkKQ1b0Nf=Te#9JP!QZ$Mc{Z!F4`-_R6zIpv;V693!eR3J-bvQ(6iy)+o8;t{PXO@ z8XdhJZw8T14tc66eSDS+6EvyJrgu=1CX!HqlDSTfH|2-tzthl5;g+qQMrsm1>md!y z#CSb!?ScG{UlC{L>|&`k9Wxv>9iS;ClPY<|=12hBOFA_Y9cDjCE22RB(&*&|gwn$u<-o4Ou%-!Gsio07h!CzcK*e86~zbjE{nLx@;O1QEv$91M3| zU_Lzj8C;|b1`$i=FI*0t6jeAOqL*~^;&eOGFc#Zh$K# zCs>@0rz}Qwo5Lj$rU2g~Mrd^f%vg(Q;d|S$M5v@|T)TP|_@5r!M7d8qi=ma2v2Bbq z{7MN$MuY;^)y?-ndAMZ=xk~z8kPqJRHKWf>(_@!t8+FeaMsPI47-zu6oo)V}0Irh> z4q^)LO>o@NAqp4q_-0xSGcS=BqVH^_n!n93%Z0nETzlDkeC1WZzc92OA|PC{KAr#=vxt6!H`jicsGV; z2Gvz`-c`L*K-GFS$Ad1Kv}$tHsw!HuJ`?yo-9=}N8z@(kB~t(K%WkGlH{Ag4KfXRW z;V@-$h$USC08S|G=(ujS+C;Rw9Syk@g=aWQOvg%Y4}`S^(IcPbspo>R z7IAMzj1KX5NbS5(<#2gD>>%AsKX^^v6wBSDt^xNjy+xEH%9qP1fVslWy6J&MdbYUn z+S%F=B{&GY5K3caQ4h0Rma!miaTjPC6n8edmuGA~{m3OH>PGu0PGnS4CYpqyg>n{x z3>73mlwae(8N!|1IV)A2Vi*QX-_@>OyAy1UV#xAGbdWxL8pQKY4FHJztqCB6ZMlw8 z2>s<=g5BZ{wB7_UOk^Mf6~jDeyAFDxsC^h`$wktFIZyRE)DAQoJiem2dDyk6mC{O7 zgSA86TGHb})k-X4f+n>`u{LMlwJ5GNllw^Q(oZHRH%&G|0>2KLsl?YWba2_ld^Zq` z`OKoWFmfV&Z$2P*){#9l7h4=3RhORx!iC(OkopDpoz#aK{%0-e%F7y(K@Ay#s60W&vsW zdr^{i)yn;8p)3u`1qVQZkx|_-F_F5{un?3rigRy!INSz`7ssxi$#z+p5!>Jt&sbu- z+okJ}zNUV&E0gA|YB|FC_O$5OJUnae_C#dB^6iZcCkDfZ*EV;<^^BVd#(h}KEIk4d z{QLep+IqOl2E3h)CJwy!{NS>JR#<9!B_;)Mk(@C=Rb%iZT>=pSX#sYhi-{2B7!C(7 z4kt{bwd?tmi$(u;*Hw)i-q6HQ7H?OBGpa?pXkJ^b4frIjyEVE52a?$QsVlk|$`R(t zyPmf51%Biq6GX$u=KJO6Eq;C-OS;kWYI6j09MA-y;LE<30tHl-6cr8#1BAuvXF!c; z4G!21(A3y?j|eZ2&8mqGV8PA+)GigiSjeL9lg!P&-m0t zkO8>`6R~{){y8yzBSt;TsS)o*X*1VCGHHmF|C*oFqKC@zsTmN_M)+}nfDt{LW)5h) zsM=Vo)Yn9Xk&&@+4Z`%? z>yr1t=TNKYED^t;Wr)R)MTusQec(+u$wrRm$L!oBL~eD`n%drM#pblw)&RmE%KS82 zO|)od{cy9FszCxLD-+X|BUuB&mu>xBHM9L5`!e=Sso0s0quwFarS?yD{kQ_!mFqO% z^6j+Knrya@k2xUP&Oaj)9*wlx2;Gyj{nU=f3L79U2%iz>5a9u?{anMduvqdSq*+2cnSV&AoaBgBq%Z}?g=PVq!Qb`|`TyUqt<|e3mmZG2LB!VbR zRHyj2Q$Ma5(oAqaZn|b}CsXm#xRVBNX)0f)sDNl{y|;(Z$#({@fAQ{m)lne`<$YIT zIE3DZ$z9xBfK6Ew&6$JjmtX&xnCb;W@cITOrD(CdLFv;&t8DDXc8+!|~d|@YY9j2!p+hH6zr9FN_t^>nL>&cqlHg z=w_*btM^V$UDrN2e5kw7J#Z4mF-0E9yLJ}Fu$FKphn9ohiJUn|3{?<#xF#|(d9HK# z5bve?9-1WnQ9ML1F5)QaXahoq-Q9hS&T9MBtXGRy@OP zAQAz#?%Uf$N)AbM7WY72zMOO`Jc^#H-8$$6(~VBJPCYd;8uY@<#3?!j17e+~HPt zZ%DJ9r;HG%X6vEGV01}ZjVFv5|nMW_MF>9%G9=HUG0u%s~IICmAjKny7=8$Xdr=t7F*s)3N zyUI*TWzo>OK*-{54qbe%VgwFnU>;(!=P%#YO}m|=w4b(HCxHP)J``E*Vt%o9ySAuj z90r`nl+{kxO1~WVX7!Q=K@h8EorDC|WdU|M6JQdUCs9)<%GY1@Kh&D@mad~Jth#mh zvIjQiK&KCnn1QbFx(5mB%N5sPS+0wGR!yk7_EKCr-`nwbSxi*lR89W4TMCUlAUNG!oUR*#HFQ$gbxvzQFWTMk8xk0*O=VaBP5j?wX;BT6LI2@jkP z%zWS=v5@KS;&nJvv4otLK*PNgIV%Gf9fz@oivXc9N-HTT0C8YHZ(|pdJ6H{V1XBjf z5|xT+S%}mMTQ|1GLpDn!f+Kj)#VeisLvmFmc7V(ZnC0-LyU5R#$WmguDOEInwsol_tk#T1O> zD1+7HTW`ukiwkoa_!*_+RY!)?_el5em1f%kv*hyDvrVN1EnWR=;54DTlXBg^|5rVetFG#f2AXu5P@F0S0JY4Ekf zts}q#t}CM6ehNbM-Ttr6e;ET<0qP^xTb)iBkSHhMf|75vODh~Z?pp%X^x4snvfcMa9@IdPOG9uA~??_@5ZLO%8zTv!Jb}?#16|VT`yFjJV zxU>r0<5QQmlo~h3B<{>~WNlhn3wmGTvC8od(3x>9SV8_GZDavaQ6a4pVBX=~kx~^hEmxjwk5=c+d z-n1l9^W#XpqV^Yg{w2X^Gkc3N>seA_n3&;JD0%?1I4nRvQ$SjYY;O=#?3O>8#Xm>Fx?8CYVj zu2Zt>yf#(6vC@d~FLb-)_Um<5=rs}FSzVKTU!o`P_utO}nfuLj&eE%ltgfMBZ=QV> zyQ0)x552yWi%il0kzz4i&wj73uX++2I(gUCiPALo9jyi`H6a6yzEOK|UH@jS(+)oF zFqIe#Cx8{tiJ4+M5SooOT8PZG=bwH4SzUNhspR9>j~BT;oX_G`j5e}n4Z#qPYb-T_ zby@OO>RWBiSc(5&WTkUqnns3r)Wp%vBTpn@(b2fBYMYKlC4q0VzjlzN&QiQ|ye>E2_+6HE1H1bo=#>qBSAD z(U18V>P#x*-~|^C9Ct~#JV3GkNi5|m0EOi9;(Uty7i=mjI3y_V7Bve|_hMtfYQ^5v z06E#u|PN3}P)7L8-E1%ECyxpV7=z#8Y1TR@^bO;IL$L ztlgyRjE|&#A%~W%5T`a3BZ3sS(FvSrYxT-ow7ZXP}w%_FXR- zyOB{{)bLEZga#k@f<<2(Td`RE#}i{CvCimQy9VXa%37z*{+luIC3asnR6#QtU+8kv z4cbXlpe!5Vtavf^#-)Y|(*Ey1e&dhzZv?X|Cs)4p{M}IbaC)HFP4Mz5+~(AO z^8Wc3@gj2XX*AVxb+2)f#9_reh-u4?-x!ZOt&*0V`*HkS#AwoC-JsNE()F$IrKMYG z)|R@SX?mwkb+HGUWu1`EkO1s*@TQZk%At*OQ>$l1pCrO?u9Uo}1aq%f+;l@ob&rBN zR;SIM(tmvQ^_XL$w{Z{c!z4P8_YJ zw!dC$UG`EzoaRGzO*{4k;Xa&&N#U8H(B3}bxs=*5HZiWr+L(_WU@|^67Nn}mpUn$m zI0)Y%AP8b0va9gg9J`YhS3K(Eelp8|nj8*rV(z%|omgw~pLta@5{STZ>{-$#rS2DRe3)$RV9*24zC^ zpu^U#81?zT_ecCR%{Vo}18RD>`eF`p)137sNhCc_DzJuA?F2!?h($f!+>MJzC#FL1 zQ7RX7t@yB1ZO^qgZ%$3dn$*ZUJp?SB^ie`sC=5Y5vVU^4)0~>c#Kt8e)l zb4EE+JV+0iY@#&bFzc-nv?Qa7TFGJgSZ-acD3w)ECsY0(;?WR^Nslw(Q~G&p1Sb3w z7MB9hoKns}IC1t9OddaQ4hk==`mAu`wNqgz4E`;%QqzOx)>cC-tsIkch71g=mxGro zoaHg1YN>TW?r=;H-ONgmNwS=ElegVcHV?6>QoaJNcgtl?CK%`dwzSp-buXnvQQc@S z(VoFso7Ye!QS1F)w#{+#G$}|sxc*&nTh6-#`zD`(961(%(;#tnNB2$ohqT7Sr%HZonN5~EWP#!3|5mc--lFmHK#wVPe7zAyS0x4Sf`C4TrOOi@; zjk2a9Ke6JRDa*rRAWG5>p@f;;PRHCfN2=Q|TN~+oI-oi_XMwMBfszJ$m4v<~=aEK@ z3^q+JoGsQC;kX&<93FJ!9ZE8rJeCc6#ZEQnyYF23b};C!IV!~ja2tufI- zKU(C2s|Opn)f|-qjfSf0+3Lhdmk>sg*Q-kC@o>fnnQUK+DJtu0UU{qCI(ca8r*RfP z3%*RP9)2)9ra`$A(vXpmE%$eAQo$w~6q-ZPIv9!ymfHlZS7LFw?I#bZu8sraP%@Mj zu2)?+IW;D+@KO@aSc{v)?uS(B2IwHm$%ru$Yl0zJeJzwZKMBWyEYb7ls|n-x>#n1X)=F2qWNaQ10&dH+{ zYZtt$2jz>2kgFcga?H_#Gi$Q6mJ(g*KcscLH6x-?2l(CC@1v79b@g=5Em(6g=2-md z1#C~3QWa4w35p3MogQfURPL7JbaGYi`pESO-F+{-l?G~yluld+%`>yDHR?IH#Yu2? z#4-@jXeUJ4iWdT1Xsmk8_r^sfD;)*^Yc^1Ut%6hT9z<8{=2nzHx`S> z4+CCk3QK2>$v|C(<#&_z1IBDy3)nbm;YRZP2{`0~zGqrLICE=t{m{kM$b|cIwwqqh zwz0OdD*1?5*fZ#J;tSw)Ie1RJOOEtdBkTbOh3~fR#}{1Y>t*eyHEwG-g#N9Cc(Lj$ z49+X{udp(%<~M_=y0!nfG=b3$#)|@d45}D!U09x72^lx=G{)pWkaUi^&WxBXYzcim zO<7gEO<@M`I)b5kcJuTQ>7!2=^f>n7)2+kKgQs0MjQl_T3Favx(eRa7ZSEcJTSZ;B zLCy7r$aSW_4-hM;!kUFUmlKU(7$PCJVr(}HIH1kN%idt@`*4XcO+h%a6R$A~&em;(oQkNPOWPC2YL!Ej6EZ3LyrPT#6 z^HX%#KBjPhKA@uZlDxoN$LXb+tv0HnEp#e2S1zV;y9LNU!j_bJ$ayqdIB%1VW-)Ib zK?s<3q2O-lRF-K^%8>v251x~Rbq^l-x!N>AgUH@kww zj$u2-ep>{s8(7p2IgGuwW;JQ;mh61SXHDOYheJ3n9#pboMdt>7U$?J_yd>rHgd#*q zRWuMewm~8t6`$C-mJk{2NEZvbow)oUv_rOVY-q#S`!W&`3#wVY#&!~jXi7WDg0!4A zFPS=i^Ljk8j!)I)F#+%0(4#JhmUI5r3H%vKSOSiy#^?R_^l9Le`5#@}u{mr{D-bUJ zyY$T6m@##?k(enH^<>x7Rd_0vnUL2|6#>rhmBksom9XEh`fsJH$s0lVd#nVVjr=;j ze0ARakx$+`|D6_+aL8&k$XS#?XDgJTqK+VpsUjFxRf}wsnzM@gC>~dd9)CwCo<)z= z3$yzpDy2G9+*ZnSXkO&4W4}wzF3#yaL>j%V%33d&*f{@!P}&bT<1G{WFZ=JAUq~cC zAnRF>;3NH-Dq8wyWwqfr*ZDPp0Tm9I5bQ->Yi)^OXcz)wACBhbRBJJKFGWhpe!zl4t?#1X>%a5cw8{H`Q|A) z8v6{53b&h{3XYh59xob;HoPS}dwfxJiQ`WJhy#x*sEU0KAR8@Z@hPTJX31pU1e!M)hYFVf?^WO0oVeu zv!(s^tt;^?c2;ybDX39b4!tpM&>Zg$p2{QFg>ikda%m`2sghO|OEpsPtal^GIbDln zZ0^P`s2p~Q=EO7@mQo+a>u~-E$)(n2hEEH;1Uj3t~?(SiGHXgKMZ<}h}aKOsUccb60{>XJtkT~ zX~lzTD!+z_UvoPk1r)sMXgIL@`>^VJ`V5syt*=;4azO$diJA(tTK2IXO&AWJpl*jA zZ2pXgv1mJ`8vT619WvBZYaWjWR;88!5$jb}kTdLs9(<-HHp|2DRsfjI=<{2zPmZL9 zKrPa^IDUd-kJxg!HZ@W!8%IpXRUB+0_tIKj0AT)c>0+C`=99F#V>F0K?tJPNmRjJk z?nDhNe3tdDb)DuIXSz;6YGGq4bGfOy4ms7c~k zb&TIW$$J_H!vegnt0)uiwEzr=o>CtTty#QsXux{&3=Qb4N5g)~0hl}32&;V?>Z;pu=BFolugOj~zX&a)Lr8T50kpN!98XfBQ7dq72 zVi7jfS=zT|wS~t3vy$K`K7BpBft3^wpM~1#q`Jelb!1!P$UUSWXxu-I@s6p05~39= zudtdWZq{~olLQ0qoT>^WXoW@reM#eRzHQ!1-X!MxR?rmH*CJP;qgep4oS%jn)C{S{ z<)vc~VUd4OEBzN;SemZTJBV3qk$GmF;v^sLeq->Tp4-@MyK|$lT63_`b8jU4B(59M zJHT4QAL6u1Jvpzoh-~iV@V1FE3&@F^ERl|V!U2=`<8}L^1-A%$3LVd!A^M0ti=LPW z*c=Zv4V0JOF&$Q7q9;?i<@LT_dz+iX-f&y8#JLr?$*l%ECOAZgdZ2QMf$X&@ zg+Q=Q92!0W=(R>HYJJe3QrFhOP`8^RhUpb3cviGlw_9J*|cam-HMx_9poL^}4EM*UK z(ZZ>E0g84~qkCX|<6^@#fRi~%=PDAU1HM;pMVD`-g=AxT|(!4u^sHhMhS!)F!tQsNPV{asGsTdTyMrwK1qfA$rQ7kKTU?T zj1%`{U3uy|dLi&lJ0~>Ad(91fL+->G;m+V4Y!bs6#rI3-sqR7BZh60#>TZYppx6Sq z@%B)i9D!?2S`SL0wtXnNn8m`+PlJH4ghLq6n5;0FE@pYi(9iXiyL%j>F}%7}iW1+^ z1LKo8)&L(N!|)8HeO&Aw(Sai;9)!WR=>Q)by%d0rK`hnQ=2zS&ftv^vxQI^lI34)C zw$$F*otWWsSFip%eIkNJy}Ed|&HgBP0hc@8TCfCmeyiE_ANL+==r_l&#}i5;W#7#f z#J{iWy89{Dpk7;yL66al&cQ8J4gGTzl_;r49?k*#3&z#*q$q8OyJ z{S-g&J{vZdPhM(z(m4l)D?Hb)kBr2-vB1%YlZ%_4MkxCz9l?&xlVnE>@{mnPCaW2< z+%V9`&3wX9W{*}uX4CrlIyN;GtZ*Nh_5N)+y8gRV&ku_U7x8aO^+8<4u?^R-97)=Y zUl0c-r!hgv4Pk8Vx=q)7*P0|91t9iEq@Dke3>^MHb}0=9Ig$es{*71ez9OEe)a#X( z}`mVk)L~Q@7APb48=G{vBo88$ARf_VU)3KX?_Mw!d zRmKO5%+kY43gk!ngig!)>eCYZzMD4vO!RD_f+hoQdVin0|32P`!iPc@+iXP}M3dv3 zaO()g6qwOjoaOKK-~IZ}W1}oB*DDQ)nxlj0n`YgegT;&M3Ug_gT~^8t3``l&BiH{g zfVswgK^y}*f;|-h;71%q@iozU2B z1~l8d7n5umKqALR*XC>Wn(n*ylxwv8$Lnj=aMoMw#QF`ji)$<;uppSN%3TP;8>8`R ze9rn{K33-gVvR#O66*B{9-QoDd7-!ETEkQZL~faI_1B#px|z!FKe4S7!U?eVn=$nR z+yTUXG$7;6l=Rsks_8!D-4tFl!@*1z-B%jMi-$T7unavSho(>(gy~yc3Dj@XCp(PK zKNP8FYju4=$MS^WnKtW|dM;v#4nj`v#uKwTr|n~gA%1B+aD03+_-=s^ZCJ0nTg}Ah z$XIKSF76>pi74LM&b`iO7iKFlI?9cD({N6jpK{`S#{d617VMSR<3-$g~9k3|Z?(_YOChfEHQfw^mu@}MgT6D0(dA#wJ5 zIoooWfr;ZONqrz-vE4!@AA7W@vbZ)c`jg^vS>wTChHe>M>C3v75Ao**PyzJUKS15hCZhlz=g57aWZ4ZE>XJhA=%Hi-KPww__^X#V=IC zP1-C5!C@9oYtGEi`oGW+5kou4&vNl*O^v3yXp8!^uyBAAR(JO7Fwv|HE-02DyEEq8 z1g(-5(lpIvYaX&q;OdyD!S(PO`iXCzrCnZ{4mEQi>Q(LSB<^Ns>RNa6p_Ts18ea>k z8QZxD)_c&HB5oiv&>Tuqw*KUsnkV&wR+qIpL<@+X*^2u@VxDXzg`nG^azO%i7eEq` zbf?=r98W$!!*bNB3zfxC2AZ4bI4rj@l_+mVWydW`LcYK0f25JV;19F*;B7;z#;kz! z@-*MR%_HD$)^ammH^WVcno@8@Q#LGOGPp)3N8^czWV>3Iq8%(W#A=qiKsY-EX=)EZ zXBx~R+Z<-xo4d&?3b3PL4EQu$$?J_7(VT;skO-B@bF&*VzF?)YO*A&!BKU2>mr>of zP$$u+V8^QMd@%**Qc@!tx$VG{ANM~B8kNH2=s5_QM3&Z$TNd~uI~jg5lQRu19R+At z7+A0n=zl|>Cy(9DsWFj}^7j8XbMO8f_j#Rp{t-L7wLi^H6geVgi3SOP0Oi<8qtUpC zxU|p!DOs|d2G9VSxHQy_CI~8YO{7WHBvqM8wzg_&_b1vCB}=j;JB}`vEU5}?{a^bz zZ{wWjv^|rVWG0(cBAFn%zu)(A-t(TzbDmS^r+l3A?QUE&O_HV|O62x)7K6Ak#GtNILo_kSune}2qtQGifZiki?!_IDNfaIii7pO z%f}FJ!t;Ecd0ofi)c%1|mC7{`3a`0o!|{)^{p1b~0+VpMr7z-)TaAMRje?2om_3CA zp&=~u#5BUJx%8(`_({H>Mq(8wWY_gTf?z5$#MFA{{FnMAA)b+-2#63LH5!Kc8j+D) zw(3k5i~crkVx)a(JkQDsCp>n2O7A$rk9#DkAwja>YIwd<)#vt@TskGSwTk4Xf6PfX z8`fo^-P@1VW~5{!^x-OTl7ccTJ9^YxD=3s?XCZ!$V`||OETK#J93m@<5 zVsc@ucCUZvf`e>#=|_XZp~5I2JV6LRoZF~fkivNM2kgbqeL{5E4$sMonRHjJrJ&Ak zKUN6WAX{n`a}_ig`j8cDu!>y`gC!ML?_BWW`uj;lgsxyNS=cK?NThSQ$*kg^W3A}m zPY1v7=NtmP(7MBVj0(q!aKYi8-itT|$v0B;^flA_8(hxq4E$c<+ zQgMBJ%->R3oNf4Lh~leT!s2>$L8l)1gcLL|BrFltv;Hd7g%wrk(|ZJfvElq33nX&= z&+FI(df)iOb!oTZ#c1^>s>w2D7C0O?JKJ#s8SFEs)n4mUx+)#8Znt*$9aR0{&E%9u z16`d%3EaqrNcQn9Z!9G+#SCy#jds72I$dEBZd;yIv!+p)bxx6q+9`Yp_~-{g`&NPD zh}Yv#WhZ~RlX`f9i?GzK`CNMT0*Eh^iN@01OK#sRZss=EWnj600MmTB*gv+K5*+i5 zt>c^?RGFRi*I0*v(xX|QuaJthk?Ql~Q>jGN;spe2PEwTjqE;dq)41o`cT$Ngp~z3{ zU%h47vQU6E^cGOsQVo>F0VU0-kS2LgRqI;jJTc)83-4p!E?@r6rDy#h(JlC=gAaYP z;R27UNK?V8>wBQ`XL9KN?bVI-`o_9{+nOZ$h)cPQ|3Feu;dO?t1UKCfNh_7$MapX! z5tb?w7L!J~^yDS2(sl<+5a-^v3xkxhZDQk+I0LzWV;KcJQOs`{dO!R<)nOqUDm#z({KO1X_ znnIwnKtAcBTc9>U&oj>uNE79#zDO2!?wr~BYA+_PPWg@v9{%z7{TI#qBXq0vUaKEh zACX*Z0z??zciuV#PKMHKbas+52Ai;l8Jm`1hBS2SVzbxWPsybh2#$r0-($KyxC4%v zuY=j{x2WmYR#zZ6p8Sz*pg$c$N>Q6JV)ZuZESl0r7|HAB=l@}RYY z?gwaA5HH%k%2MOsdQRkT2LDJRO7pkAXed^nEz$fE9`WH`e()f;ONis4N?dII>^Yq= zPN1=dki7OuueA&&PR-we&hiwc)X2QhQ zDc3IXDSskXrV$bZ#QKQ`(4^`>G-&R;&$SSDPpRsBFD4ACYA0^$A*)Udmc`@g8SrZ1 zgpJ<(e$6*6v-LTCE%1@Ql;XzYNSozr(6B z{s$KUFer(p$8zCD4VZ%BWI*zdOQu`=%Y>!FX0Bs;#4mVbT)Y;v^YuP8pPp+}ON^6z z+%!C_uS$o!nH-E^f>rst$*GAnm3>oJv3b?irIBpj;)}ijlvwzX7i+b2{EBPUi{jAf zvDBDyr#*B6{hJktJS1wCA|3V?kTgOcy%lWhM#3~=*GCoY;txXlua5*o5eJMnKTXz% zDh7aqU`9x$VuypsVel&j*BvLk#nqZ5)f!ZHM-Xi^G(jcnOFiIu-C>|f z5s#Q9aA3sR}Az@4RTgZdhkuzRlwlGcJm| zpmNZ)J_M%HP1%&~(mpH2Iv{bDrNiA$EJ^WUY}~W^IvF@XrOrp*UVA@^Y0E{uvF!7^ z$;?~HrEglL_b?dmA9?)lH*XF;@Sn^C&lN12Myg33IKvZarB8H6cU2uyw~LTK3h5BV zG733aN}K$cp&e9X!cF7Hd)*{RwO6kU-t^zFEj=ReaWH8_?!(pWP21+j4m+t!2@E7q>Zuwu&WVmnLy7BtcR!S>6AC-X+UHZwU<^%^McsfqR z{7UgS%EKRI6;(NIHE}H=1i8=fI3yZY?r~QK*2Ej&h3rr?NHclUb$^ zfrssGWw^<>IBK0*Y=UFpgCwy3D97%(t47F=DhIoC?TTi*7DQ?u>c|v2qQ#kw_0_ew z2uiod1m2s&_&F2srHXKKbg@o2C|e=d<{D4?sy8Vda6QejL79@mNcyZ~hfGnsNR$c& zFdjZFf|0;Tv^}*_4_HDz()P&j(Ka^t0eFoB{+_X5ygzuW3s*0WmdgN!#$qH2jjnIk zPC!nb@AZ3|JU5kqlu~G_3GC|d=i+Bz7f1-{Pi2mZkAI8e#H~G7~OMTe@SG+QFQ`eZW;34yf4+&Co7HeZC_4V~q%ZtH&9%iz=M$@G@D(3yzw* zDXr8$0uf8N*^gJZ1$B`)OxhSY5jE#f+rDTuv51cOu2$1r0fJ#R74076nW-5$M*Kpm zr8*=I73LDL&R_UIsHz2k9&s{rHo?MUru)WHz4oaDG_@+0_k>Pqh0lCrGBMi~GwQE> zd+|4Oi3~Y+D89mcUM7&(4u1t))e)z`hiML7isde1(Gp);UGXgjsq7!ElB^{K;&O{B z<}&oz3?6>#Tl!7yu$>GgNFoqoTht`DRb+q+s2!+y*Z8$+|?0Fy%;K-|Jq{5yYTkvsI4K`1Rhte`B7B;wv` zaEwNfr}Nh4Dee4IV`F=QxwBS_)RRFo*OgFP%^vq|}fhzo9|OAoRfm!=`KT=x)m6_Qj|Boqo#j1{5x z(|$SEDa?|JivXD!wp5*RGy1!0#1x`T3NWa3bWR}$&^xA(wL_bd^wlijg`fG%p|#ETRONu{EO3CmjxJrol~#lp{=;@B$r8cTV?vPHB1&AbPKN*s z*R~f~nAUa#RUiI-Un2dD>jZij`id>iEG{jszvMqskKG_@ycFC>N!$NUU>5^XS*s>Fo!381P)b?Fn9xtDSW6I0jkptr|K$$Lp7 zYBQ#NAE@(^!~j&yO)BP^F)1Tc$S5HKuNu-e+;LX&F0){efh)d4aY zp&5JC4Gp_dTl0~=kSd7)jz4mFwn!>VDqoTNkbYhIHIyMcolb1W#Eo=)Q=&-iLHcax zY?I-pzkrdJEWC!h2DCKORlzIJ$6gK2F|Y#u?Mh=|ablR4eaoM(Y3OEx zYT?*#>&5IkhpWjdu>8|7oSOp`Ag;~Ggc&=50Vq!r_6=B<;Z+b)*UczQ4w(PMo!0Dy zfU@~paYq^Mk)(67E+!@>eMc^T`;xEWO!H_zQv8qqpt=d&Eje5Br@#+7G|M(I>N7o`HnU-j&32#^IIo4qx-hpH;?9u%W zH7w0ZO32(lvz&a7u3wMq2=685I~o>}2;b0*b=Bp9Tu)&WqX!G|uyotF&U+nCm&S$C z_l5qTAYAl!2tK5*b`AL!97My2VOqbHnsA}*$YPaCy_2r`5<4u(301SPVqSJPB=o6Cr5J#fSYuyyHDB z$y&+PC+`KaiwKR7{|2J%BVf2>Am~>W`?Z_7ahnu^g=Qir_|w7H!A)y69SiD{EpxFd zJo@Lj(y$N~Kv>X#9X+M#2Ys~!IaG0h&(2LA0FntaJ;Q8Zhv>r zr@RSEPAhQs^~$;*3%CG%5AgMo%>GF7qqec;<43Q5VaZpQ+?yd09jer%?aSHgj}%`J zEXI0u# zF3P|+f`vSZTlo90#}x%E)jz@)vf(l@HSVv&0kGc6{z2QviTpe8e7;r}fWKq_O{hX} zNVS*eMQjH1C1DM$zbg(yj$0Z6B4Of*=-v!m&OL8Q{fW?Ce(b4#oKHC#5U>Pu&VU@T4JMBt9fGf!AwAmqQ5h}K{(%D7ON zv(Y3wBjCo`+O5TEt>JTP(ZwbQZvo8`76EAO@31N#vX;I{O^bB+4_q5#Q<9cVtSgpH ze?efcg;S=#SjMCWC94?@+pn=P{yIztjMA`^$oXYA3kzG{Mc>&q)jV*O^5g>-7mJa# zc4XWd#6fRZwl+ihvBDxJWpaHovG=XbRsHF!2@ke_&*Upu?uxfGEOJ@EmqrvAcW2=- zR;e;~a|%!p#5Jf10_p*p1$U*w*8uRe+@GW zCtrWD+}=LrsA=uU44#-l9tWlR;*0t?<_rs!B9a#;Z!~m2XYvy%G8h@OY>ieT0b{Ps zWE8Ro6n|(a1;xgpO<$k9dQ~+NXc9*RFEoJJ%1ip;sFQd_ z4!PjcAppfC3Wx^}E&wYU;a%D)Mb!!8D|Y<=rKL4iy3N!}8P}Mw3xGa<=?rW_{-{?TTzT?I|Am$9`KS*%p|xjQy?rRQ^+Etd z0$4u&+i%|Rr# zKunoE3jyaU;Dy;rRkPno0q;Alm}QZHykA_#DeSq@PK}sw{J4m%ZG~^kgIB+;Lqv%Y z<8{sT%pYlG;e$JYe8LAqm+LPEzxL&$Wyqzdp~4f+aYN1m${**_Kiq`TH)1Xio_Qtp z_+m2!mbV~VcpN4BF{K?EqISh>1aX*IdRj^p0VSW??$*CL4d*EF%nae8F)OmWD0?c0F2l?l(N zAg65r{MJ5*wt&}{+im-Oa8~nsf4YVk55yl zovI_MLmSFpvfqOGG6%F)Z;80U44rS@@Q-4=8~ci{x5Y2o38iV?Ab~ZNi*uP!zU}K@ zG5my2fXgdf5X;?xf$-1m_Rn&)H(|(R1x=0xEdh-eoBO()hZX$A;@9!sYSRlM&ID=X zegq=SH^vjw0T`0e|B?gno=

    I{twzcwRx)iTXUq>b*33J|G<4S7abF^ zMbajVqKc$=mQ1UwP)Di`aG^+A!y?%UAOR!`Kth?wI*_I#+U~w^$9O!x8uuM6QDR&6 zNVe?up>4@AEa-pD^X@O#dRLZO4j(R?4%-DF^ZWK*d+l|6*ILiFXXBq&+j?vYa?z$J z{&2h9vi!oL=-DzwtmEA(5-h-0rKSyHpuRI6D}&D?x;Vx3YB~gQdtoZg$CCW9{vLm? z?O-8Ny5|sz#@O7~JnQJuAEGw>KuU)8Ch7gdTh@W^ zU}mOU!C$)F7GzPJ;3wuvRhu2{yKbD)Z5t0g)RR`d?#OP^A0-roP=1wn_(x>0+rU3$ zp>b$0w-^2D$$=6Gm4JfZ)?|J+fMVfX6{~OwM}uCaSloCBcec8syab`QY1;&fRGoC8 z6>J}hwg%Sjg$Yd? z*}dZfw?U=EElbVOh~{#nuEhmMO-q3|ho!sdx8ST0Tup7$oix94awz|xHb#*<1)eFM zhuB66q-y6`6=jLmk8M9BxRJSAiQQw|9xns(S1kIYA0fs=urP8X*FRKd0ix1#@?WSzs%kqD`&xm#Tg71 zS!>tgpRd?!o#2$Z z8&qP*hWVq<2T*T4A4`{iRI5*px%`{9X0!pZqSpEg<#B475A^*|T_QMZjmA892fWJPS5qTEw z+rN^Wkk?V5a)Da1BVEc+32CoTW#vx0ixZ*4=@y zGgDB408V-KaE|e`Ar156c*KU|l~>z4Hr>>YWSQ;{stKc*oTqY@C*m(E)hhj;5GFDz zkUX&=Fz86@_zC-i2y!gw^yDrmxBI?=J#AvQKrN0IYjNw`_|^CApZ71HqS9l8Dyj}+ zhsjR_8;U^22s}J?oC95`?BZrG1P)?eYL~n#*~r<5=4QJV#FF9uXYqpyIZ~w{a+Q$q zA;4z6Qy>D#_6zt3h2TfZI`}ca-m)TqC@%JZYqCC*2 z)e-PFH?0J#6sd6MP%Oc6j1oaJ;bbki=;HC5xfj)_ zRB)UOZv8X|1mU1;^=7B>SJ|CkHAQYKI$>M-kboh!Hpz37cL-P`D}0`aVmLJ(DNpCS z{`?IX1@^HKqrZRdonAme)Wy?vt|mb^`+TzEDo@4fl&*Rx4q-XsDNDG7D+J$ex4cDs zrCI8G!910r_g;v?0H=+;VDcRoJ_eX-U`@XQ>?{`awr72*o8zSD^c_G$2&7n`Yz1AB zG_TIK56$r1v?&o)MJ~O3F2f?TV?#Y*`rX&tM!cfuI9ObPBO#NKHn5F@cw9vN7(3Xk zt$&D}8y$9f@)pw6Yh;pWo5N&2MrsSs`X@D_i>HokYl+)l_oslpB==)`#@dM)K=&E!?MSpRe?lh{pkrq=gfY57{hJ1yz1u!TMzLEt0BDs6`DZU zhze)KO5|n!H~)30TP`10k=9Dn9=bWDRFE2;Y~@OMti!EvN6Bo$)hvX%n>e(mYtg@Q zRmC#qjc#v4Z@A#q9(VNoZhG2P<5%V0HI#LP;p#F|sh*V#6wW8HYwvu(Q)jSY=*nSc z1rDfk($%cqTzQ+MB*R($&K#shA60A&2u<5?nhumMn*8D-hYBo&BE6{E&xR>*eMb8b*Sbz>)kjum>4Z0DG47v%;1XkQn*&aHP+N4rcqOU3? zh6cQ(!Lh&reoPVdTCX@-tgkW(PBX8vDEIV)NTe5{BC+D)5aD%=*{yctoIlyYZRVDn z?UI+P#)2pTU{;pC>X3sRDX6O6H%xwZJXBMiF{?}m!e{a?<1L8m`=uaAT}{#^zIda~ zg69rk3dD1J@!3)F_PapMZeet`kHeo*gEu*@s)wCmj!ER@^?Th=1N48Tj&6}3ar}^8 zT^Z5nD=gY!ooiHbOub)>?U)h84kafwgBcHtV}o14B{Z=q)N(hjnRfRFP*vlShV zvx_V2sfrPfKP~={+Hc!k6mV>CQ!RdH!oTLH4(m zJ{d7JL=Au|-!RmS^5TUDP*D5n(1Kqm=f7TzUb3_ym0qi3kcpe8$mg8WqvDoUjJyIh zu!9*uc*_#GB;o{3%Ae)h21)w(qy(&S9`2|~K%my{FOl#jzwB+q2j^psTF#O*SVdTG zlV}MdE}=#w)p5}iRQqxx{XCitvy4(%^qQwt^)O0(yPCQTL7lS5DZzox;}&ZK=768% zm3Xy^7&MqE1))aSX{2k3v_6Q6uJ|X*YG`s4ag@JlC_4ZGtQnI*;!$O#-e(Wl5L8P2 zGF5xfI3y+jP-Peoex>^U640eX5<({rQ(32C3~0zOZ`d#VE(Bzq3w0muQ&Orb@$^*V zEyIdsJjQBXZ!g9_FW0KCafer$=+c2ZD|KpeuShvpr6&LAfi}raWidO0AV&^2& z>h9Z>K;qG*GZq4l#om+-qgV@|BD!g3o;_6luf*LItB5D5B&@_NqWpU$MAM~XkwR=sSjd!T$)zI?5Yg|G2OfJn!l>8O*g^H}}e2!(VGJnhHo2-(mS`ArD@0J20e^ zWdl+>H9NAs2$&6Iu0f5De&>tt*4Q@^c~%Z$c-_OqVNe9Kk%lXxhfVvWnZ;jG2?3#=!bu!9Jh@`C-h%c?{Enf=J_?F*bl-@7s~Ev-gJ1&| zMz7-FN!jHHM0^@A53}N-jVT_m?_ic}>4aWEQ3R~1sBZIqFlDyAqoz#+!jT=DyMj$T zM-xtesf$mpqdAWYD2!I_hmFS@d#jrwD|u8k^a(;MduRTJA?+na`hVBM+9;j{RzM}3`YmkjdFg^j;qnA8z1qGS4AjA1UKm=$&=mz&bGxEPdpW0J|T=g8l(C~ zQnbtjm4h;Afn3^(MtcaCR5{sihjY9FOsKkzTjN3xs)}=UigA=v6w5!Q z)%y-BUvKB)C>7_a-qQfY@a>pVPRdPnw*{`X2Yjm1zLkrEJIBXs!mGzz-4h7{^LIZ! zDsakOXW6~ULQq>Obk~_V%unZfuC`0&1RRg}K@;(k4d~faKpdJkTo40r85%=z;-JXs ziSR@5cCi5nBL7GV!Kkr|ZC0!X1c4^P`Lp2PNB1qdB_fe?#8>0y5}__a21x{0cpa=Q zubjM>nsMT93Tm3435Da5`paM+i=MfOiinqG72kL9DQs@W!E%ATy^ zeA9CAhRYLQB@Ien>txm~qJG8X=q4V)MazH}V`)4z-#>cX!rkiQl;a&lHjs&FCNUK% z_CyZxsNh0lh)b=xyTm?`M5z}ELmH9R5nY}yeR}dWJ6pZ=LJHdM0F!OV)smcKa-o#1rG{6+kVT!W~0$C7Aq@02}Gy{jIQXB@wd~HUunPM zuP)QkNwoFaa+`A9@s9VRRBREKoBUYs&C!Cw9Q2-};ROexuox;K9@dB7da!g)Y`We@ zeWbn@3nr>zmlWo7qyLQ>b(9+8^d05ZNt7g-dTymK5Uwr*|Cqyn;U1AxN2+UR8~{?I z$R;ptf`6DJ1vCZfAD<>8o0%8gjvMDJ*FIVhzEs@6w_m}N!BF$1RXw;VNP$e8_{Db3 zI#Yi1ZjJm3Tj^7cfmXp{O19p4FII`SK1;>2ieW12P}Cr~M#D|gpi#`q6*oe_OiQek zr5$By4g#Za%)2-Jr6h0_6H-;dZ)ca9T!bP0$6}G1t z7k-(~S*JhGvS~XJTjuY(ISqw%mB=|MrByIDCcOt}P~VnJiGh+qldjq)#aUlz?~F;i zGKy9s62xfbNQL*ReJ?c69)1eq2vdi<7#9(C#O#YR`=GR+mCXBYu+SUr<@m)8uN7~|pKE_jtd#1Bi@RG_}q{t+rTmh~BHM?9>c zbQHZUvX37#wMAUwxZLIFp6pV*o*nMZksw`q8)D0chojUPC}Ala6Xpt6!&|&W9?uiW zaPE-nf9wxa^j%JwP89Bv*fD&?yu&SUr^EAZ2gg($AN(BOfW?}1H)SVdZv}x+{Kc7- zSa$~`YztMHdhzH1VnlJ6cwlvk@O+F1gYti9NioD6)xY7%#N=e2;uk?st&4D;WfIL2 zOoRwC;zY^(73UKlu<+6q_UYdAgow>W7%$c30RmLg=8ANc(~~g~-_)=cte)6E!7E$$ zaH8`}a!t_k@jc`#E@6@!d$?37;qok{X*?H+Q1OK7_YpVlOnWm9PTobl08$8f5udtQ zYexv}yWZtmO^O^UBcLWt<__r1=WTIQo0jm!k5^(3{b(Lc%H`Tdc!BVaH31WO;l=i< zf8_3Z2K}H_W5>cvB$biu3XjA-&wz#X0`HR*5ZlFH6-@LiP_MPfj7FC61_by~@oLX= zui{Sx0;};PBcIiIB2J$MoY0B4+YPS1r%OS|kM6vuhN4^`dDet8fW$D@($#o3Da10NNsq|?Zdg#hB|Jt1YGXsoVBz+qm? zBm`;=!%I0-A&tt$4K2&k@^^H%jM$)pqKu$Z zRL^@N*5laV5O(n&f%iQurX{M|YV}}b6ptc8MJM+*3_L3i3gKqvK-5X0mm4(nkt$J? zBTl(`iHhK^o0N)zI0>V;pF?H4IAK&)`on-bRaI| znQH^eEZ9e#Bg&aE|14<4s2fpZb+_OFI&8^TUzrUTAjsv}l%ySAM?HUW+$G3sK7|P|K%3IQ>ii0tq2|A>Pxi>Uhsh z5{SDs%fJZPUV!1RZ@|5j)c6;>_|}__X>{1WPii@0v6@_rox#uu-a15r#F<@gmtrHj z`51piUSaaO&Qy@N)M27d4eZkZI=`ooA2K7ZpMVC;sEEz$bbc5=Xsm5^>8bZvRqMW@Z`8?HaNIOWH)JD&I!YYU{=z@DM}Kho-JlQG^dR}1$S${bQ6eW> zV7o_K@k&xg!BpKTO82lg;E7MY(OxyHu;n2k;rH_^Pku(zz$<<#QmI^tO|XKVl1p$w zswdv_X?rtvA(yS>eu1*l%Rc@H2do4(&&E;^(zFgJP!En*2rU)<$Ct4mW<_U;%q6&? zs#q zc7z8&fcTN;2-=WniboP3%WWytn6*GJ@E;TYUUOdX;0cB=sgD?^FrTcT`2}n2gG1}8*pjKiqNp5ghakc{7?(!qv;m)pb&=Zb=q@ z8u*borPxNw>*Cvn5L$EovSWOT4!uu^7Ywc+wokKwRm!U4zzAo|rILtxLaJx z0p~fI(L2P{74`rShTlQ)^wlnmw zKHMt(dz40HXOOX&JSsdn$|@1WPOKX4i2l3YB;n&MnNWfTb77HEU#(Vj!E7!ckGJ;t z$o|x0E_H~#j4&R0mk-=e-T0B=R0`;qtErx4FjyS8kTRr){4Nd*cIrVCe@5n1!NZY6 z8aYmMyMg7iu$U=|RiKTJcK{X&VK#XX=L4)LE-YcHLeYB|JkA6&1s2ZnTfo1u1|Pd} zd2}ExP9wnz;itPVt_l4orM664iGvrP4rk`E0>AKC;{PxEO9V%AP_-;anYV*dIWA76#)=SQ z#&gi5nd4+*nburu7h_3eN50ceKMPvN*1TKX@qUeqzqg#7L;fP3(k-G%!Lp6e2m|%w z$)piFSSru|fl(%y5?gm;n{dotu5_cV$M07r2ulANU)#ZM785GOp*SnvL_x#o$$vV@ zgIAxgv%opv!mYigE3FG4j5Wm#A9jMpxU5H}-9Zk3pp2mQ<>|v5vy2~TG{Tj56&ms&Uq!4HT~RpYpf^kkmioAT z>^E&_IOz!GSya-$=q;>5`qWQ}UbyN?|U)oGY9SXZw@E^xh6)w0~ z7A;mpxzNJ1a)5$b6@Tn_sB!kOsr!=At{vo?!UhaXDZ{-mG|t#6IeDEi%SVI+Z9X(M zhUk#PdJ@s&LSWc_n#~1IPqfOPn0I1;YF-nyZB9cMxm_rFf4r&4MP5htQ*yUGm1pwz zzJHiqX7Jv;B95*HMZAna+7wPeLlISs)9v0%Mz@EKdByBPWC-Ev*LMrP^c){@74de( z0RGr3Lu1z_q&LZ?PGK{DptltAY6CJ_HrJ64e*K{GY;;b$g!5+LolSFwplua^p~D_* zD1T-fxMCfhtawh*R>+p*jPq^7-`gZPStnQMW=^k)bZJj8eRhOd&W_oB8G5HGJh6ksZCZv2U9)(5cO%m?5GW@hY} zIu-*H3!~lSSG#>>@bkR?%i|N^XgA$TQZA4wKC#}cu9#HAC?K=M1}`X4a2U9Apm!`Z zqCbuv+eyz2lvN3UFDI_%4a5-lh~_)(X_F1CCctt3e5CY6s{YNk=>r%7n#JA#lQzu= znR1gIX9ecDvp7&A|2{q)>`Z6}K*Eh^@&C zoJFx8HRBV5w_aUxAW)!|=Bj%wu1=gLZmqMzLjBNI!%L7Tfld`mJYAitJy8z+FUr{- z1LK*NNSzAu?ILRWmLtRD^J_v*ZKr10?PLk|FeO8-v7K0bH=un`LJx!MjK3qiW=qxK4+|@dABcUO*Lk*`jW4P}Mdg}h=YhfmN*9?l;RBiKOKdAK z&R~eslb70y7LaS);-X}5nbi0r*plFhG`x&Ie`S}#n3S4&+*f10-MA|Km^-k9)PjPn z@fl%>j*IKh8>ucUI$;Q?-=p_uELjs!vnMQp2vkTpl&Xi1ik=d7) zUe%kA{nsA*`Ay$FbSojI;4uZf{mNm4PYF0ntxoRwy?u?mIyNvOxKScbB=U5XlUY<3 znNgZE?JIthID=47JV!1;#wgyXCB<)5z{wtT$7QR5RJHr9QQ~tXR3~}iSbxbx-Ds>| zM;=GkLueV$JbU(eS~@)+f5eiR&Wg~QBiY6oRmE}CC0UWo_)#k!HdVJNe~{ANJ-S{k zxJ*}M_jUqx8iK@ z6zMtj_i^Jj7w2w0e;p49QZpRCDD@)ThV()C3*o;luVVN`jR&qFhvHQ{wq=#Sf@$L% z{X?Y%^l8TxPdA8Z%%~I$nj}QeSMNlswCU$T_%40d$_Z<@XbFR=e%L5Pqu3f5qFU z_X{s96W>-ys90sH)Z01EmgrV25pc?WX|CI?_SzRmqcYreof@Om6rZ-Q`^@2jWrj=K z>g99{8Bu_sSr0%xtl1#I1y~BJe%Uq>bp;jQBd*25BP8rISBdo0LvcE4#w+NF1RSpr zC=-qeH*n;0qjYhZM=+ITHI@(@P3udiHy}Ts#fAiPsP(N}_{Zu?u_kLdsIr>V%uC8X z!$^yRdBbDZu=-v8bhEt^AGup=jHgF(>MMg^7v~oG9w6$wd1~oCWEx7JQUD{eBI|~A zPu>Wo-@vO0^7YKsXeWE05e*0TVP}P?U`bGrbt18~n(EVNEgui5=U%88^t?DjwG^33 z$1N5{eA8d)ZTFBYUQ!>yQB3A#C2Jg~65$Xjwj)Q>PXo%=CqtPZ5U22;x}d}Z1}NX_ zW;wj$?d{q=uxp;$_~dvGoE>mB8WF@*yDjI?k0irfy)?lcvHau1rzbSVjujJTNHYZX z#bNPMsCFCeTQPS$M1}yhEG}k|^SG_B1Mup2HVjj=i@1}F(LB`@XIDp6o$uz|yc88v zT~#YK6*mZ0U2p60Pi*MenJ^%JJUSt5BlK+>YOwY|DaW9|tgzMX{OA^>C9tlTX_RpWKPvoM|7#LOG4YaZv)H4y$j{ z(XoZ+55@`<7<&%-qNFg{Uz+WTZhDDxDQIs2385a^z6S3SLfW5H)+T$4H(}#piRG&D zh(Hq~UxFLH*~TS&xfpt6A_z+s4NcdbX{aD&Lq^9U6!Bi4ib7aO zuSM-P)8|~VC+~qz0p$4hqWq>iqt)NP==cchrAtBV=61Vyt9-FncGYAxhKpiUrZ6u% zF4lx5lw-zt&8Rc(lOO>8ohG_G-e;59ni^f@VIfM{WtKsqt%xBF+BXTkIBoPRkags!K z@Ga!DtmquguPY%N=u?=I%~4$AmoKMX_*gm~90K?ht|&Gct2>yta&47YM=+bC=VQdR z!Y|%|$a%LYAxwRYZ9bAamLVD|>Y)skM20{SCcZe}6!lDMQzuxIB+-ko*lX(V<*7Lwd+~j6Qa9R#IGfXxC)yKUu?D7A@Ez-1xYYLqr#8o~W>ka(^=4h~vO8kD zPknjsCNmG+?1NQ0f=*%c*!p@UCQO@do4{$@_|)c@e05M$D&ag9JjvK2qQWx+l{l`J zPS2T@fas8*a(7YmY&9zA65^-b+ zD#KDA$jkf*^*I4+n(AaIkf@1UB}dXa;#@{aclBGUbH8w41<}hpN^oynly&Di3h_f` zAhXiVQXZ%D;_Ov}UZjfp)dnGsAGwKW^WPQBCEA$W^T8+W8MA{BnjBBE?#bO+o=45w zGp2N}Hz^_2`NLM{2BFJi4QlJ;ushF(k%D7yWwVYFavUhjxf;`sRaoa@lZ$rz5luJ(l7; zF(z=?i`tespRB@#z`YNGox|3Me=~Bc*c5bPn3kHn4so88U^{uriu~@~S;Pyfn+N~gM&#%P~M88`xk%vgHPS&sP;DlNc?#q%YNO4x|8n$EIwuRpZ;N1?c|JX}*-No#mbJv zNpA2&6L*uAd7u&EePmpG<$I@_!a*1$hoz?`CUMkO#@7k;PfCWVPc&@znZ6oS;p|uB z+H5?ORD~vcuRWbDG8nP0-b)j75@1_%9x=xrAuQ2_UK%ned`mj6&7uYf_!;20zlGkA z5!EAQ=AeVoN4N;oz{Z|>wN3m$Dd_3<;(k5T&SWtKnpSO+<4+1>cqIhKNTvF$r*s0a~51J*fl;z;y3@zWzG<~ zl1>g)QzS~LAdZ9H*zx4<0GYMu71?9egsS5OtmA_aS<3dV7Z2(COme~Rgne6Yq))tSK#(m;UN?DPD`;QQyFkJn#!Gadw4iSmyQg$3I$%IZFv zw&YH(3*ahuCLXn32eCorAwuix0ngr>l;H2BF=}5WC9(hBM<-Sa)N{yW`vC%dR3Am= z^5UReKxo6%OdpmuP(olw*aaEY1%%{&IJY^tL}ctTI@wElqrDJ^h*wu$My>>_A`Vq+ ze>Qv33(@(UpYeKoA%2*`Y2_iCfEs;P8alHmXlfRn)yi1%M+Q!pinIr^J(WBxPQ8q4vaU}|a zGtNX*pEOn)Kq*6L&f~(x^N6@%j>n7MYaaxSiEM(e+REUaat-EN!l~liZXKmdri8`x z$ix+M>x4@@O~&7k%aX|m;{ zZ_g&68FYEwTIg0Q19p^0%!UGRhZwKu8QuK}GY*24-fv$s`QAm+)E2Mjpq8^p!Wepo zLPZvlgC1svn6F-jfa4pFK0p0?pxb7nb*`$JA2rVfu?0dW%gj=()szh3quSUr2E_!+ zU0Pihagd`)DkQvS0vh!5_IrC^kR~ockE@lIDs(RGdN&jqfby{=9;Y~byjmu8`P-3f z#W;1`(_)LrR}HEI^4!zij-M6rXAfx{Kk%?3II@IBFvV(^7a%^!my}b3)=2pQ%79>1 zxn3hs4aXTNWB|?MAtzh_`xz?x89#{4P+(OUL4u>m77Wlb9dLaBoCI|r)M-bGw|m!I zcy6;-sGJkL8}EGL*V?bfzbEbJ$ehOa&@&uMyo3OhAA_5(SdZCGn$d5)-7~ZPE3KD_xJ2glDc(~2V7fk!g{e3K7nWUhDa z_M_-cJM!Ejys%;L+axKCN@^J*4BN+XywP6qHUuW0 z<`5T!lO`pWjM?aRRvNPTN?e?a!&C$KFn)s+lx`=iY8uipZL1b%qpWtyP1C6xw4ngF zuRUHerdF)F0}X;^iX2pucB`~v3A%lq=8wX9P7h)6b&cQ?*---99`|zOcDz~8_SfPE z1yO%oHcST=p$v0jl3b}=@Gj4M-S?YzEo!CV*j;a4;FD0gW}Zf&`f|tL6rzF9d796* zXY!4hYnAbCAX$U_`dTT&f(%}%>>IZv>XF&_8aIs#ls>ZBYaf|d7sM}HDesgFyB>?_ z%^lKbBP{66j~~cfUAfO_oz$pA@O?V&VI1+<9I-0ajx-` zfW8%Sa8*cDhx4)BHAKssQmItw#h|F)0Y$dFBG{C=g#EU;$moL{pn`$=mV~~aC!_b( zptds5Md4p-ql(r9=`2Qe@Q1)WN`;CP_UI?7L?{`8qA9m(REwDW;AE#aY4!JTTrK3@ zmwF7bx!x{R6*WL{$i(jJDAxALkD$ zAxNF7G+dl%??~nD!n%tK12a$@ho#ZWkTma7DBdVbBzUP0#4-1rgEQ#SYEobmQH_uN zgF_eJk+}H27o$nOxi&5)y(IH+2z_Bm?K&KT$;pUkDAj!^{~n1no6=!fqM~-wKsq4A zEA&*jIG@0+-mD-POoafW5`IwVLT&7Vg)i9-X9;&pf{y{n-R(@NMX(g3!nP*Nxpf!3 ze-IJ%`HEkQph;j4e|b6n0xM!V1>q>haPLdSYhzt+x9jm4X8P_VXh3Bwh27+tZaATE4p`VU*H3xWQ7eSn8y_YlYpWPCPdksbbOY)cW%YL&v> z0-*$Lgog8#jU%kb*4t;NB!C-A0sefk)q#ek;~y}NEVGf19JHXXiWxdNB^;X z@#jHQK!{ThkmaR$Z%v5;lvr~H(^c~sa|QFXVj|F`db;10 zY{;jJ#ZD!H+wn@a%!eLyYOIo*!1&ai@@I%F{Cv05TZpsbp|nT8fBOBT5>y#^+jCHG z#GB&C3C^aU!y%sLl#ZucW+w$Yb15v#@zaL-lvE4nTRwOLGca}yUs*w26^tnJ2(gZH z*V?uC__OVdPj#(lAPojpa4qu!{Cm4x?#mqzk)9z+aC%sRSyYvmP`7Se1-yb%^jqaM z5)CbSZCSnRD|wdxs2o3g*585}YN~bkd`7{-KK>v7{{qF-2ZHmk1Mmss{75ymD?uiy z2lHF+^;nEzc>sVQ;{woo4*8i-@h95zzZN9kXji=K2tve5R8SpnS57a5;Er&nzEPLB z*#@0yko+QPuY#WKv|Z6-x4rC+k<7md3S|Kx)x#i0!+J0;D8O)9kv&pmCdtIx{Y>7* z>6$S~pKNtz_FY~I9}L~w<)PWVBx;xDF0<=O&>c}Km|b-&VfXumAWH05 z@lmJ9gJY>er;Rd+@pi44Qw0ZBOj+jTF^FI@MaM7JmI>>M|0P!@CO-(Ft{NBP2O02x zXO)z-iD?63L zVk?#5#dv-nH)mcdYGlNh=!vdAJYf2%*x9#*m@X4<(+UyUQwFTJ+d)1?D4z+@Kz+D3 zFUoC|%3L+5QhzFOOMIDgDR2p9C?V0}2+TTja%&Qi;ubrYhZpWWtndbhkEK8>&`KVwiM0FFH{{)N#psNpd?$pyqhF z^>Y-t2UF1#tPavyLuz*cVmXa6#^zZ;Pf&7ID)&0jpcWyFAJ9&V#Uwirh&|ZcE{3GE zdyxPn13a*?&;ParBac83_Q0}J_RuJ;Cixphkltc0qEAN@iu0z>Bb!v6ZSOx5bOgWs zxRC!lZ+^b7Vu}?ra~s`Odz`GxDE1*P+bSX6gEdF3bhVR)6@JiPfWAarXH*Bp7iKM2 zjaY5dKDDfVMmdS$^2&9wmL5eIxmZbe)zAbCISEiPmCrzaN$CWtpPvBQtr z(y(aV2!9xxd{+8PQw+T{CJkRyc-jUofboO~E5I`<&&Ok6M}W8Di0~ki1{{iS6496* z;PJ`cXg5;~tQB<#kY2_~>=#i?{BgzO#7x-x%D==y^vmh_wPlw;sn)@*O`~cxbabP= z?S^VyIqak!>(O$I&*6VeXilYKRpeNSOXCBNzH|DW_QiMNNBQ>Uud-QJ9NeKJ_N)kt zp;d9w58{fy37*lVN(|0MyXvh|LtO5yk}o9*tPoe7Mvk6OhlJonnK6+DE)RGi8;B88 z|BR_CW^nZW5H>5}5AjfqU2e}9flzo%iaay{4AtX7rqd04=At_Kaa0&+MiR;0o#sz=)Zayk^>)g z2-Ed;Z;Es6oj40A#TpQ|_Zna4ZY!-@T$0{Fc$E1Lggtkob~fkgl)jH#oDEN`+er5xMEsLMd_+l=xx$*Z~_2 z0KlGENrCYt5=X@V+)=?34b%pMc%5#$*NQsw;wQi(jL23KDMqkos0wCF!bgKy92bwCoce&ZONzZI$^g zeNrkCTv@3&D{bH<)3w)A77tscJn_6G>&K4*BmhM7O%R8w65qd(c!2TJXFS`oH3Ctb zuE+gqRYb30w8{0&3`nbBa z((Bo((}Smyy6n;S+UcLi?Yz)7;-9Fe;5gI?o)!!q@_<*sL2;-Ohw&F#OGcImw}!X` zxEX&ZV8&^BQ%mvd1L@h{Nzr5gWN8-`-|sp7e46}@>i9%@6<#n=9w41*Mf+{d=PV*D*rd*0n%t?Tf-ot;!nvG)XwW2>SG?g3*EXp25Q;(I!H5ZAE#-5 zGM(*84SxKX33^*7qFArofmuc_K3ER@^ZJP@)(kmOm~u;Y0JE$#>yo2fN(vvMOAFwg z_#>X7?@FIYn!1k?jTLEfq<2*li|Eug0zVD_j7M?8zv)U!#CVnhKBGavW~5%iYEZ}N z$$7~O?^Z@h(Q4#M;x$?Jh5O24O{uCW8@|IC8y`r+mJCbNs zLeI%t5EKZe!NjXAA&DIGG-e{i-?_oHRt+pLAMv}-A%9{~Pq+8|2fWr)S3}>tW3vu& zMABW)wkMC@@vv0LuA{4Y)x-%@BCEt{(w$awH*+rS`H9HHs^uc4;!^fGp`e(#7`3s^ z`}pBb38SA%H?wsJ>F00I>$w886pbqQ4Xm9WkS_vHo=J5V?jFi-4*pW&EmjL(8#-}S zEy*94X;y&q?7wzw+?p|(Yz5Yj8Xad_-{k#uDatEj&9aL+`m|-%4rt;ak%J1 zgG(YrXlo8K$i%t!pMxXTykFvsnOI|sJJVR;24wbbX`pBK0NGGau|4`vacSyr`0hwwpmd^6_3N)+BvH(v{HAk69h9(-WoD zl|YM*?%_E25eOk%gK~j-$_mYa4-rIEqs)^3zKy&brQ+PGSBtNWX2smSOpMp@V1kU< zz5C*=wh_-qu+5SaPESzpC2YApQIQ%hV!F;kDQ3z~45$Jp1b95|(MkK_d-21Xy8GH5 z5^4@}QU_^@v+4?5J7Qq6!i_~^2dt~2$me1ZFDdyrJ)Fr&0{lb%% za?IBSR5ZC zeLl@bLJE?ZN&>_C-pOXHW(NVGVjFZoUm@W(`Rl^6k}s%ZSFIn(xXTmU;KUBf(f0zD z{A-Vyr7%;@{-P<4;+Ume=2)j0WKtI!wL&^_r zTgh4&t&V!Za!LaG;1GDTv@;+5Hh73&&^~`8T^!PfMh7b=S%8a=e)I(e&Ofv^Bc#_6 zh{W=JF-Am_xrFg*Hllv0rrJ>3B<>7KBlVBU1C$CW`jK$9`XIXlOniT^Ex+&YV@@U*^21L+Rro9jT9G^^mR7@|6h!U8wJVX= z0VJ^LfuT)ro)AR!5a$V9isjgbtEF^TnF_) zS~N@TAaR{2R-?U0Jb~qb<;Cdr`!vQf$VCO=qmwT_v+`gwOqi%5b`{RVWfg?Ba%Y;K zXN?DS$}CKCULH@;H4y zZXp{@^eZo7c~6RIsj+^WmXNZ+Nrjl`H^w;WW~Fa06gqab9{2obzZ0L96=y!hhJ1tv!Dj8RS-(9*QjhwWG zUAW)DXk6su(tt>q>|Vqm-zeSy{}D-upYGFV@A1mC&rQ)DX+18pW)H_$rW|sS)*Ix- zD8h&}sPWdwH;1Yc1<7P225o=6U5iuhxhCgk2*K=Fs}21PiCLsR7UbmHW`izRgr7hy zr)-(DtW$%>!*xfK7tSM|pnI0y3I0FDJNV~9tZkAxb&I! zE4~LGL8Hk$Q$;=Bl99RY=#}7PFV87_@Q=fH%yf^Ye(hVsQn*xH{(S zqv?HAK1EXg)i$@LXW#uR;G2p8kM=E;6#N6{CNnc`A1%lV#+67x*In_Hn9s)yn2>Yw zG|O}v4L|7qVT}>dp+cjuQ}Pk^4)S+&{9%Ty;jhevU2d0S`&S5SE2AIlPz*tXj!}+5 zFIwIr%Hj){lZfNJlx(b0tz**O;6kQfqDrI-u^sRWwLn3d+?`TSPk^KsNw;cJ5~orZ z-Fz0!VVp-Sdxh8Fh+GaE_eFrP>Cwc(NL1`f zJ9A=brEEcs==~^7TB!pcHeksT&Sd3kcS4g#Q;h_K1RHesj`qg;1h0$lOrVy0L){hD z;RC-{p|6^#e8%jKpF5VRTnh3Ktnxnu+mB7EXwQ6%celAeio0{Q;(*R=w*gERF(jTjWlULm-64kgrXC!}f> z$P{~2)r-N>-*mS79KN+2yX7_}mEX6+=Us1ZjGE8<1Olnt3 zQ^~Uo@Vv z1Yp@+ICT``46fe1;Tv-RN`=@<9JPW=Wk3W*b4}}O4yNUWGU`*`M`bclY{!dqtf*-B zx&e7{d^ioUFW|>_)0+9Toq5W)Qd9XnJ3vv2G8!3Lu0T^UR?Jb#;Jg@{xr)7IQ|eZA zBEKo)B~~JKzvj0K#ea}1Q%Wr!NWbsDYQ=KGh9RRBJcZuOZ>g5;jfnF6++2>D>({@3 z0e&jhht*^IRsq!0e`sgp2q#;DcTa1JvR;mr2Am4dLMA-la@5(4no;;jC4xRmAR^0d zuCWC6b8oeo(U3qb!ghK@=1*CuI1Mo_&Yblm4hNFNBL$Bi*6PE4yV1_^bu3rx#EdH* zka;++ty{Nbd~6oUct8#xNQ4ivx9`crlYKNU?v8NE41kIqe)WYH+bi*d5=nm^Kj<$n z1qCm;?D>iI+!MhWir-K3J_{;Q6)n0>gb7AbblFsy_aT=npdgIGd*kBDvG5rUSIX8@ zz`mQBR-|)?mu3uVKv3Q2KmZn#saKNh)=DZRHF07V=@tVgAs7^7P-HJJ@97ECp-gF- zev)1EWfyb+NVaPUK5btPQlY?TXI1vEmaL_Hv!KffPSMr86?DV#K8k+}>=jSk6(6KU zt=hz!x8iR~(oOHh9+Ye>P8*dKJ;5H`NzJhOcEF`9?LRj>~ ziS-N-gCE3?+BvEre(dlHJN61E#2OVkotA=%>-48Jt>2^0mle(C)E_0sJ06!FsSpoZysJ&avP7>XxvV9;xsvRbq8x zex9uN=np7!3clopT$Xz#a;t?x1ors53zs{PCZx+qt{Ej9o{)T@_R;M?d>Sjof|WOm zt@K@IE=0=XqFGgnpZKgl%XKJiS-EjU8Djl3B!aaMM&_Hb?x<4ML2ubrX^$S#3bOP~ z2Q;eC4}pOUEeJz-CMXLtBma`1`XjuIM%M*{>Q?$M>T=Jhi{z!F7S?-D># z4JzcHWzrN0tC(z(4G>gukZBdMH5Nd~PggVYI1+4KSOMqX@u;4v_NTGNQ|YK5^Yf-V z&NpC_;z`Ti`>H9S-jnG8fP?t6J{KaWEJdClR+Nx&TYmOP;%KXEQYe_MghlAfFI!%Z zi3axMNoE@n(%38Lyqd{$Rh_>|HW*6<>p2bOOP=MWx{h?=F1Z@=zDn?t9edXu^H-l{ znAQLy3&j=m95^k1QW{`tf?-L|_bg6j_zhvnwBnQxW>fx3FB&u1xI?4@N>FND;Gxip z3O{12bS%tc8gEZ|p215u@_yUK;lI*Wn4{$B1>V1(4w;4q>$rSlEB=G$+kZYwi|0V-opW6Kr!a7 zNEj~euDAQvLIbbj^Sah#GRR%Ws;{l|cIK3~rs;ctsw^Ud!VyJ;X z?W7O^@#&uVNwnz>p}zQwfDn^xmLXKyBr!YE5or$-q>S%O!5ljTI|P19uxb-yB3olo zVO|<-brM(ZLg;P=8Nna9)tYXL>>%%G8%Tr^bc(;wcfkirE{fxuO-e{7+S}X5p)edWw*9i!Y;sfQHz+|Y!9gZ zu3yVl8)|vymTxLNT*Bk?d%&d#)RhBJK2D6`*Q4cUBsyaGYbXVuXE%+~_Mt@tmX7}_ z1g|YUxO*!r80AtIo{r(QR(sFC*Cg%cW@MI zRt?|^SdxsiaAEOxOn>Y;^tyimN(O_xn8=4n81=nK**V&RfMO2(;F2_(A z`i}*XaCEU#^Q?4^CG(0|0EzY=a$tS~7Dn8l|HOM%h18`X$XiXTY)**8^g+S zggaK)5_4rVNog+yAB*kP5VldLTMq^63YOSt*G>KF7LPqBaQege+Xb1qab#*_W9f&t z$9}yq`2w$oDjqQCy@BRRguiQ(c%E9yW{1eyD#~YdOn)FqBf?6JVtxfKhq{TVVIJ53b#2(ZY$23t6AEz*lEG7(K z?=m5?m}y7gNl+BU!cfWcsDwF|GeXPVLf*@44q|oSOtOvCr(2GzU)^ZMLts%>Q8If{ zOd(yW92Tz{P|`Jw4Q-k6!CFW|^>MS95^Bkf|41R17&RP)*!p|lRK`ZB&@eW$+A89y z_KZ&m=RmVHiUK>qh!}nBXYJsxmiieP@3pRCB&lHeNf5Y|WU!!_Qp(%)yy(t}G|d|Y zQm)jafjmhC{ZxDEj+aW~FGzr{^{>eN;F%*)Og8n_$a-K5#?Kr7=es3Hq7978Gz}c` zS*>bu!{1XSSzK`g!Mx9D(D)E<6by;roiMPahw1d3r?T# zf*Pg7_k^VSi+J^=wjSKsN8qT?&A*V#G4m)+HtEjMsP2dV;h$KACw}_J{#(hd|6?Eg zMSqLEot}KTy&1$pxEFV+``Gw1vGe!ad+|?F_VLe+njJbHY~T(g_2(=r4%pUk9x2S>gd@x>w}d z&J<+t%45|Feq7)^=Q=(LCQF+Z&6INhG5btUn#len&Y6&C(I?wdFn{d32M-^X;q>Q9 z3$XZ*G7A9T%=i04rLE}>W?{g*`B&YfQ83A55+>(ikqWoEX%rL^WPMtQiWI`13tbWt{Qg5&WHZ(+i!D zjKOi-j?`cztLXo?9;a=Y&;)=+NifRuq%tDS@gUdDiNTHta*2*NpWtbIVbf7oa@;{1C?y;l6v z!&@Q75~`rLEDNnb)=eQ4;&DifQAW}xK)`~$By(Q%Ym{Wmpyk!u?TYzyo7CcPM|@UP z?C;TsI9XdDs(+^h5QxBzlX}= zIb`~Z@A3|M)aNb9vBE14E}+4)RFX+`)}h&9#~eMp8QIC z({Cp}!G_~%2qj_cn#4evX#w+LB9FE*dwY8FRQu$UJ0>>FD#xb4Be3pNJ>cW(8Ek)p zc(pQYg&Z6JQEBhzNFL3!gy$f3hsYmW?NPtb(twjPniR=$NYffe{poh3-cq!*^qwi> zDK5%QjdX}VvN(JpuZBRuA%fC!>fQvmM|Y|038xM0dZ{Kj!iH^!&>?tT1xUAi2t#AK zcxP{4TUZhUnbu_fDnvIusW_Zwu<#Kq`aM5ZEeLlr8ASPTD$)n-pfVY`t(B>!8~}+9 zW9{TamIa*`pu32%K0dh{;^`|Xs_YsOwU}Di9H57bF!IpDTgK-s)H(9-F4BF|`=+;I zx`nu}O=i9nZ;G92%>0eV`Q$d|EoQnEo?W*_F%Y*x+ z!6ykhJUw}(z2Yazqs~w2+ANA?+Ven;6?gl>%(M((aL+;y5%K=O>~eXE=`(b>jIgCT zmJnuC9Jh$8N1wOTzlzl=1-3hlCjzC(aIh49qD=Z!h0dG zTbQ0{q$y4p9D;AXA`>@S(S=us=|3(wRJy~-ukKn|+#x_!Z(iC00~%4lWM!ZQBr6sK zlTE0mFLDQ?8IG>)9J&pR7f?=uhBaGd8pY`esGt{oZfFpfbm443-`K8Bcv{}a!?(CU zwOtDKUxs(+*XF~R-^X$+K4X_cC3K~@w{B_uN)Lf~W&F5bayW7k4AuoHi{;^!akY7ZGqjwNY8wYlDf%#gy6mO8&tXbBLb|L;GQ^T?z z3v-cv3N~nw5vttGmFbl06f?~yECFIQ6we;nH>oy{x5>>KWvu^9`}9uyMKtqYaKXM6 z{7lA;CDw?$RGTG<1oQ3quyO;MP8g^7^;QPaTw`kWq%dDE_5E~Wr z>6h*Wx>5*8L^3N2E~S*jWtf7KZv=NSdst1jjkvzvE`(e^(?0klK8Gu;@`#cjVl6)K zLmW}r-^FZJAm8o3*6|Vlm;ZH$m_A2uZlCq6CeGda%}(Q^!xD(4Jq75j1`+hvPeTRQ{&UJ^r!@4w}|r-RV|5d zUXh~VgDAVO>X=FJ^)QuYW4j1lRK6-J?tnL^HU5#l7g zom^1lz36!OWfCg}U)>5&RVjdb@e&L>63Nv-M>S{!aS1pNpcqOB%I{Ud{NttpL|7#m&Ut7Mm#j(BEN+ zliE|1DY%FjPM!PO&3k;=B0-nX>S~)PeN!gsA)5yo~3ie(ToK97Z8LJPujRm^ZN+XZWw!6IfH)xodl zb^y;-cmFz4m<-+#4@B?< z2Z6%C>${uR0s}x*BNbhKcdQOXhP|S@*a%y9W1Cm0I(8g}9!a!Ff+woIPHzl*3b)<ePK*&XY;*y@oq!!<%!iq?O z4`CNqdGx^aU_kA3Q98cRr2V)cCS(<9@N_&5g8>cZccWZua$1Fiya28;iBy#d8@nKb zX6nY1$*DKvD5=oBbEHMAS*9)eqDUFeSZ$1|!E>9qMLmKHT}?isfIlwCM!V&WU^W!G z$Vjq4QVpDUu$z%#yeSe-D zCnxZwCC=yB$NmG*y!xNN$bARh@W1`S-Ur^*xs~0DkyLw==V^GW_%0<^91*3gx**MU6vyv6H zSR^`A5>oP9nm)BC%~yYUKm{tk|ta2C}@J5%heCAOMC?* zgel^|q?k)J1-6U!s5Ou>{Njlghg*_s*QrP^Rpz_03IzZhmfdo#V| z*Ytky7B|}bdRrx5dAM8Hw8t4G5iIq`L0I+Ya6g`JXn+&FVB(qA+nN_Bq)?H8J+Kgu z(R7W&;%#%~Ap)+ZEreUW>9f1cpPu|5)y|~PRy!)80n9Jd1sKxeDP=oPKNS=d*`vCy zciB&#;OWVyEvqbtY`=x#RQ*I<5~|=6%|$ai2s(UMW8HKvL6Bc|mz=?R+^zljQOj@> zuq+D}<~?FK>ZzU_rL z7GgX5l@qVZdycEY&XPj}D4dnB|1B8b+%y!B+B_hzF+-0&Rt+o|XvI!k+LwW_U97-u zgbX?=Igy~Yz1|H*5uxCM1j1VVwa3&WIG7*m_%q zW@s9eYm$dp?h07(IURfrQX7KSh=ACHQqHe?6XJ=L@>oD&H0q68fPGjXHV2dTfLe|; zloToW+)KU1@uV-Tc9Yzvqs*17Ff)T`kY!S=nz@SM>V(Q@1X)dvjkaNq7XSn9k;e7{ z1R*Ow;>hn5X4`;67PFNkB^A>enB{!3l57VF&phAW4(^)~+)~UUeuf)+t2}e}ReW(8 zI%Xj#CzDY-Atg(0l#(XZ^HwNP&m6E@KOZ4RM#5s{NMJh{jGRRQ1Yz&U9%O3>(O3TE zHumX>d18y}g7ov&jz$caD3s=sFcz#N4~S?T3kO_AtvzPsW5y5v6MVR?<&<9uaWMK=!pf4S!?~3>4gw>6JI%~JBZ9K2!I#1_9w%&u z8xZ_*`(>}3xzOm;^d)el_c%v?@oG^duovEBkU&K*8}WYKma(DLjPWC8x|(R92((WVy#eeg4q9y70|*Y!(bht)IRdD18pTLa4)aJrM3}EU*i-? zq>+e%I3`|1Fa`Stvn)Q2!@QTMaa{7BN`DpD_x8g>#bpYJy*EqB)0htJ)#Gtov{4$A zvOHl}AN5cjR#FwR)=d}Jd&Y9J8p2z@jRmU;9Y2+M#y|lG6j%nKfr#Q#{xmL1Kx-Si zp@$3wC7wNio@!^}*R<72M;wbCgVbx3TTYV)0B^qkGJ}Xp_2x>!G-7%DSTmcdF8vrf zV6hsFpbpB(+)m*R#WOF-0J&B=s{p2~5n3o%K)smTE5d=7Ae9QK1~l_=5tai|-phXR zjW^m0Cih#@J)Y^vaxVva;6=aOUiBW8w)M-@+y@0#nQy5Z{I(sL9q}|>{DfuPM%xA( zC@=yfAQIJV42FDok5s4?P zU4}SiiMV)=fRAg35J9hbEA$^?>wrnjx`(usP#yAYV)G4WQXO!8n>2o0{rwPv{-(P0 zK^37N^UR2!r%02%*$1+CoBNYP<>38`?cN+L!;|OhASG<(`JO#@*_4zYBfONWG=CB< zCBji}s6y5~160nmb5@);QRfQzy{)ffdze@7Xp89WGne8hPJgeRg1+?oDGcr19pm8& zUX%zG3MixsT$ZzWgbBJBU+HuTZeFyH^IbH_zjY%6RYU=#hzxP;r6y(7L|07OA(>b5 zP&f(LJw)p3L4yIq3k09Y7#zavSNhaExJWDp(HU)9lG*B%So|E z4u&SJSuRAJgWByNg_lFND*)F&qXYJe|8<`!uE=aYl?Y|+MN|;ljzX`5YwHe!Rb)Z7vw)Zd7?cN z?;}B#ur-OCIt}g)ojlLR2a#T7vc?v5E3C#+hYsC1m&7B?79xp^px`q}k$S0>Ngy&- zQXWHG1_2ODrHh2USq)>z=|dkFtRfTa_3~PzAE>wz#?Ah7u@^?q+=`RB(JsZ?Wb@tj zgH)(0C!zlNo%q^EhMuV6+NcMp7=rSw0^ObmVoFFM7Uu|LdL&pZrTTld{{fJ z4RxByMLoa@wS9>?mfDOXE67<$!KVGUtMj%$I*3z|MN<1`1FDtln5Gf4idM{N0^2FFUo6lNnz%ASWmM1{Bf(Q z8Q$Mvukc+r5)ZacPmW%V&z7+5(9ux=a{(Lg|B2Nz8+hU zh2|!>e3<$qEH+=6H4r^plu+1!pqZzFxENd3Nci5>6I9DYxHaX3?vsaQ>1KR2t`t39 z=BZvWmK@zjloYu+@9CbPwO~3A>#WDC8puW23;kD6>5izZpgepHx*vXyuD3FS+buCyWfc%qkesb|4+h66G%x2G6s@eavl6~hI)76IqG$E97ZMwg}ALqmn2)U+QH z3`h6*EOED(4I?ofEYgcCYV#vtBWm_x3O<(jr*)-HSS`j-6^*R3x<39lM~3 zfW32U2@_3;ATDOSO7Ni-JR&!tcHouqy|;34dw|3{Aw-@2&6ntZ;Wu+02;*ROEQgE! z7UlLU4wPDYJ(hN$bZ@^(cBpSbr4ARW%n3%70kZDLs6;OOujDTKM*jYAqsb+;gh#v&OoOsL8 z6s3d$xbg&1;^<`#IjkXfzLM`X_jDGb63z(nbK-IylPUU%);Uy@!!A4O9mdp1RKbgM zD2!(jA5i)y^K4-k#!cX}uUk66_%bpv_v45tM8#N0_}azxPJ9!LdSX6-Hy$A};!jKv zE0VDJy_H>$T4ZdnnILW<*EqUd5hGGgBLkzpw>Utt~Hg7?CX%R|_*)ZBg9v z<{RxBvBuswGpFZp!DM22d2geC1rSlAtMWhD^!h;1yV`BjiF+8Zh#mly2eAl1-=N=5R< zU`{~80;`cNSDF}hl$#^R>Mxs}mG#~G_swhkk*!&uZdmTXhDQ(k92~ntYVaYgU$c2! zbvlft9lq8FR>d2jH+rQaO-Vqv(OrHOFQv%9EU-Bx1Rj5Ru6-ESOZHBY5pStdm57*Q z>ex9)a%Y#LWG%V?4iEVL;FPNj5sb%jhYj8ANoDu@kut*b$T*|Sesjx4yLlrv*_AVR zuDEAIcmsQ_L>*;Xp7*Gx^oDx4_Op>rdDw_+%~(q4=;z6@MG zC^F!m)nLKD868N>)l|^^Ue)AOjHDF@Uq?)-FHwPX$hYv-(#qigPgUklO3}qy6|yR} z-MbK(Q$wr8-PF8sImksd_Si*tvP{k+1^LB(#aiHd?tX~uf_W+)oG=uy`V{!6+rrJL zU;aERE#$EPWa)I1Ea*uPddG5^#x)oQ6OzZp%ay6+^1lVy5R+TYc35({KReQ#<-2M* zTd~%Pg_M{#y$Js_=%|lY6>+>oH>~8g)V&km(2d|NRRCgR7pwzy7E8uYH8&-z_z=D)tQ- z6++_&AwsUTn=`@=wlcop3Si{E%im}Z)$ z5jbecvvHyP($|!|VfouTd-ban95@t6*7PXVHYL<3Q6mkSTS8<2$8a$=C)Rarjv+ta zuE$L$5h4_QybCV^RqhBc^5`&Kj48?Gv|qkb8%EC;+eC*}qCAj&oOToLDt3d<=%b~H z6RS{j+R434v|$^UmrAaO|sD*)n;TanEny(!=f!6h~%`s>dg)#_A2m^-eAzRgy+`Ct;-(s zmpYn!#kL%g+s9q;ExmyXq*YbiQV7f=;wMpbdRS#NcXL^4=sovzQ^m-v0OIO@i$!( z8$`@`fRbr!NX{Dyj02?(Vg+Ntld-$ff#WJF789()b=wSTO0TYE_LIU`^MgpK=tr>b zv#mJQv~pjm*kMRdZLn5jJ%FH0U)oO1(?z_1Sd4>|biU%xDS%S~oOAR}yT4aG-%Hq2 zDRD8di&ISb?Wp8Ru>Of+u#+GF!?LM;d}0{$_|sVV zeVC@umwV>JJscA+LyE^&2Ru>vG)cL54vk`}q$kNohqGWHv?xwNu)I+3!eGVEJ!Z$K z?YRGs{x4Pj|NiOc78B}50DJ@PcK)*i>#U$AhosFq(l*{IgQbEDBk7Zdf$A;yO<9be zu_|w<&ILwLawaA^sayj zm376T2vD1*wGdL%G?1QN8Se&Qs|VEMi)6S*r93U1FNd_%H~_I^&OlKdluEf}=Xv=I z@yrd(u#8n^;h(upfGPO8_<_;g(ec4y&R)hYUf{pdbwrbjXjy6ISIu+xcg+&_2ygi( z0ay!R$;6N>4!)GMVVFu;5ZP~O2Ox3r8F;d{*oaMv(~p9C&O2`S{s--Bd`B*GqmhQU z4p2<(1TxdF;6IYnALt=v&OLe z&dP1%aBQ1kQ2vF+KLwION@JRAk3nbz)T8gW)6y|ifm2~sVkw_`DQoi1;XrSV)8vvH zP$~bq-V4=lcTtvVlm2SC6&YvL7n|Ei@&*4$vK&3F;AVDn4?00)`-d5VsW6ClNi`aVm*3%PpzZkx3%jL4rDJhhp_FCx?5yfV_tR zDA>PPv&p2Y$Sx&7@;5;KkBSp8BspA3alKtPJ20}hD;ZyPp~~RYpfh>9zu z%#-c`TC?(k3&2PQ`E+rA;H?~tLHc5`uG9lxYvTfEgam}>0KpW=*~<_B8d|(Xv)jzv z5_EH4Xx5tpIprP(-Qmu9ceQNqSwgEcr6!D5b2aCF#VOFcL$=d~?@)Dlz`HnHceR7Y-06PJU# zLWDQY<@7(d)BiJn0BZG8$kK}~0TB}-HU2tnY|8Xm6v=kmB((2`Ty@JwfK(`;cqAcz z@8!Bgx1WlVDy=m^HL0k;=e(?duV762+xY8h#%_TN1crcv-z5NU@fOH}w*+{hoaZIo zw%)DVV3Zs9D}Ib@nxrhI+djVD=BzCHx~gy-KmPO+gH#DDTl~SKsrpV&zSVBN9WT`E zOfv;iZHCQ*Pi_l=(OorA2r@!;O;BGrsItWAj7FF6gLwvwKbotKNxIGf>LPld+nhzJ zX#`(6hX^VEnLoWDXUaeBp}q4*!pfSu8>>_PxWMK~GR6t2*4cXeTJ2z>DP6vuL`ii$ zy`hM$#gq0PHWJbP`!Id8hMn;vT|%ioEF-V)yaD^Yqe%Sa&a>^#v++=jAEF?KyT`dP zuqAuBY1MK4|Fpg9bDY<8?)g8QsrfwHvSLfJB#V+Q$yVe!APAALc!3E>j$_GA189&f zHqdZ4KoXMWs^ernBvo^gZ<*A5VQl$!oY={U?{=Ij5&pmC_w2XVdR8-MoSHd_oeKeU zzwh2_ue~nMx?yP)cI3Q`bgC(^lRGeB@K$5-a2NpPyI4oRwezO2jt7tv-Rk=i5`0eKFNUzDL&lyj>g) zqGhrFCFqgJq2@w29D6TP2@A0@BD3*DR?qei%g;j3YBRoQLSHVELg4XaO-UT5pWn5i zEI`u_^$3pJQ}n#SMPtr`cf;8UcZ_E${_98yO>>j--PpB3K>juy&5XNDE_|)sh+RI_ zzT!{rj&G0dBB*O$m}`5rfB?U~C@3rqG85CjR0xQYyDTw-qpGzJag|~g0_K#Ybnk^? z@$`N=W`v*#N-SMCuT!o)+hJF6LR0L%)krpBIJa_8^Vy1C07;%nl=%Q|j*n@}Bn%+| zV8!D9(d3tjKRvl^hQkR|lC9S%;}3_#Ul|>pX)H?oLaEg!DhZ8^Vb`j9-!}#k7YFlp zd)y!E8AkCK00`w6P7+5N9Wrnn9p=v~O{|Dfg<6wTidUZ^y!=*BvY;cj5)lI@4hJOl z6Mvu!je@*lB_G6@f>Ndk14wa@93a510w;HR-=~Rppk8S!({;F*({TwfDy}MdE0Z%8 z{{C0qYfpGxYO~~dI_UfO6NaO-ZfSH3|Db(@H5ij2F*l?t=g34Gp+3Q%LQ9>12EApT z@5Zio`6)h0RqbzTnl+_uPO}DbbqotD>mYeW$JgeIgMtc7j@|)N)`orXVtgR5cv&wE zPJ$ocOs9vkdj|F6i((K3iBkYGoFkZJi_ylVpQEbT5l!=7?xKi(B&O}PFSIx8{FTx?RoB=^QBo*I zXK(o9v%EJR}R z#GCr?wyIA%JG&{pvWm|ZXL+R&j#77b;snnh;D0E`HN{T{|M)-We-X=5?nm}mXIsLQ zRw53nGh=%}u-X1T1~{MVAB&atJg?JIU@G0!?_IuH&CbP_75<@A58K=Xk#X;yKmwL) z-MWUO#qHUoG|}4}^6d)C@Attn#iUWZ>C^FL+{AQJE-LH0-j@BbQjb$Tae25C_fy@= zrK@!fRDsE;u}+m|WPat3+4;Jq>7T_n4uqh?&145);A#=0)S`W6!VJsHaRMe5^K@?9w3 z#+UZTd#_CrvEtzq21(2OFiG~ulIR)ZP>>E`7*|g)9nwMP!kx))1)y)UWJcB=A6i9H z@!|*gBRF7vPz$smD0QG5A}X_H9}guB1v^*%M(%+di#~MM5=x(nq0$9=yibM>FoA%7 z6Z=MUjuBpMZHcVn(LZq1;(wSP@8AW=EJ0U%=T6t)9p$?H$j6|tv^Ev|xn1WjGb>94 zCr5i{X7!F(b{{j?TPSwC4Z{kVu$uyf`_CaIjE0w;;EuG})p&+bzPROz#RDLlEQC)> zUjaGLbVu<4E0!>qeX>^%0t~TMDrw#uUo@i#PE)#7(1n((HoP8^o&NnG$!W`EQ&bGw zN%KigLGow6OY@XH*oa)jI&Ny8{Iou?L1vLpAgVvS-yJdsf-o08-JXjj3QxWef8gJr zi9aZZJmrBJ?;Ay27tZ0_U8PZ2r9uD%Y&MWrb>3&<6kt$7j%i$IcaH$lLmP5wu~_=% zQ9E|3EyAK!xd;a3oa+73BXM@}^5Ty55Ql$pjpX9VUgA4iVQGuZ@)wCZ;Gj^iYepr! zf)JI8jpb>#d!<7Zb>~a8+k||pea;lna!O4QfVqqirBuWz$hrwW0d9z|Dq(rW7?)UG zX?01MX?{|8hgNd4yc)WrKaFQ$lgR7y4U-l<%|PgyOP;ZyP7f^~SuO!-op8HKkct`u$h~)huh)cG z2l;jU8Ap=@$zR2Wbvd#h;^kI6#SQgt4^#V-%m1|aM=;JB{Y-;#E>*ZwKKa)&c;BP^ z-q-k8m&gq$d%!-0Em#%oS*|nX4e>sNLSh$}+BF}QyfQ1L?cS=U^te%fh|*TCR~&I7 zUJ|YU6=Y1K3~@G;&G{qD|HU7!t`8M~%5@_vCjF^ROE4v--cP~&kfPlm#i?i_9+pzk zM~pFm6^~{$xT5rTmEZus2M6Fa@-y{meiHUHK8=5tZ*p2xh;n|%rA0XDw!IIrYE{e? zm?&1^>x4;a;`5GL50yvuDW)aRv^YCbVm)OXejt{po2Jnh#eexc=}re0aEs;jHsaVF zkF-){VNrSRbD$|rZ=deN2R$t&T6b;AYHtf|DPFALK}cII>WANFOs!S%?K2vyG3}6> zWu8(%5$YRhf={(+pD@}s%j>pi?p6d5mo!0p9W?fmH$)W@u!j>|m8Y{FyAxrp2WgC{ ziY!xawoBqsi#_^zDVpYaBDX$i3(=%E^9CYV0PKu0~N8FSNky}U7 zF_knS3N=s~TxbNIH;f0BQDi9msOD5lz+0XZSWC4z0VEzH5_J?|9x=47Q~P9gOiDO#(Op9Be|DM!z{qz zh6m5!n!%WEy9OZE4&wp6Q%Jmr%G6>VNHP#;Kk7{pO!QsJmK5fk^UV?5=nn3ece zq(S8^K@p1MtC(BXoD^!3Ly(RI8tYcc&WI*hXn|lMMz!vkE}Ae-gMiqJ-iJ&K#^guh zHq*3i4r&GSuDWMwbw$BIw9<#?5cFpNzJekun97V8>5B_VM)_$V{qae>Sm$dJ)^3O_ z-w;gL-~DNPRPN-b&Dg}Jx%miG@A5|LJwJj3XXtD*9EpO%^-y;)e@A2D3jnB$;dr6S z=zr`__KR^Vi+SH~pLolstVgQ=4Q?w-B$Xf9RZBu++C1!J2H3&i^WZ<_d7Vx_6R^p@ z0h$;-8U6)`GW<99T!;9c|GD3CrHxAIwDvWEvt!%mA5bl3PT5lSK=~1Yo~>=iK3_|usbR?tZq^ga&N@_}IZ9s|;`J--z1 z>}zHI7I+Zong6_reegBwk3DRtp#&>lM|=ea4Z7nrzmm-LK+p$s*gN1>YI(`=;7fA6 zx=irX6)7D$T?DBv+V&#%ARESBQ{SDQ-$1TFZkt5Li6-+W>LP)Xt5tR5+c#<1LZ9@)|3q$WVKmILaKK6J;)(vR1SJ6k2VAjT`WtzmN zv19i!>+?PcG?L|(MQ^g>YL)%zv9d=E-~hwiFAPHcU@z)7g{jJzB`SFhCiaQL^jX*h zdXc54=GTc#1n)QHW}p`;9(ml>XaLQ^CN_%*Jl@%Qca zrER~Jk#6X>c;MT#>+;_^hgWfl7?ar@pH_OiH5d0<6)ADJ)Kwm(uD@YBhUBR_cnbx4 zi~&^&4GH(fGsL84<*fOxbaDLo{K3oZ(_St)a448K0ks>h?Hexzv=wsUvL(6_$l}*; zx5wi7m`ioakl=W?s{Uh-r&$_p1r*plT`0|o6%?6Dj!QRneqJ^!={X=qOXb_8tfLZ@d)ZRCkWMizXpxFlh;N_N=|zf$e#)dj zmVPetk~O&@){hICLW28+>*N&*n6q$Bw7aAu3~Go;XN6M4&`PEIkS9RzVmyC8t7}Xw z@fzQN0{{p7nPrI;@r5L)x(o)fd+I=)&wHC{CEx_)vbl;`faHFf5*;t1g19jLP_*Hv zD!x;aX3$+#N}~MLx6oD{rV&D+*SPz&yI4SXip#u9S2^x zq$vBYCDn?{dcEY}?%lD}rWFn0ec~}DJH zu`y%ea@sx>oO!*i$Juk!N&{bA?&(=!jkRsV>wQ>FadtToeA%8T;r;3k9^x|OqM@KZ z6{}(y1x4y1WQ8?N53Ussxwj?D_mot>*vR>VkF^{sBjkpbT7G@gJaCNz_r+AD;V}2u zFQF$td>fAh3Tj0puHPm*gjmEVeR!px_`vEWkJ;jbSc6jhwBi_R~4G~ zEk;mOzTEWy+cJ5XTm!myI6ee~;AO<`$Nr8_>K=Z?c>2o!Eac_uc@WMz9Rl1h{%qa(QX8hZ-)gfVgS?ipO|4RAO2 z3#!FzI9VgKr9C#YonW5rDOcT#g7x3E3DM2Mxi7T!zm&g`|3;ZTbY%Z>YF+sHl@-@9a{6jeRmHd1y2i z0UwNYDgDMLBS4M7#7ahjqLY~SE73es=;;qxR5o_KBty_0p-~({rAkzbN5hP$Zps`P zc)k{rlbLn88$?u3#iS@ZspDl8!4q@EXTiG8zZEaqh!u!B$t^*2ndEWmD5s#6xHJsf0t&{szCRxAxh{J@-*Lx@p2f8xvFy1kEWtfqVMnhPjH-k3l)OK z>kcr%tE}mkS&@2Dj*%M4q89 zum)pYjqk|>d7-Qk8!hQ^wZ7o|!Bg$YIBb$C<FB$Wi)&8KhGb; z+p!_)0V^x@s7xCq>*9C+#fsq?Tbj=nGZ)nFS7bxY%4$P+Ik5aLwTPlFX?akOGAY)T zq~nl<5?fSLbD^oBcAjWY1>G^Fjx0lU_lC3_&Obgv9pKI24kj5PNXEA;4{l)YVpY`o z7)h>q>dPmw%)M_mBG-qsq_|vQEm)R5?mhT|-1Rs|j2w+ci^X8H+>dYe+Q!_}V@?cO zhgwHPa^k)w!WXq_{}ksY*3JOFZhol(B3IK3qmUf4tT@XcGKir^a(YJlG;1hWk;o}Q z=I3K@9{d(*%fo=6-jsDGkR zWxV>0o9(JUOgSU_p!Oi~XepO@F`|c3aBrIiIc1-UTm~1(bXu9F_IP_D8!4jTJbZ(= zKGvFyasQ@iV*2a-t)ye86!cky!?XK z$>6D(spP9Q!P$NUPNdVQhUQw|_Ft9F`_s5Aq*UV_gaml3J+}7$D&DGe)4x~5-J^f3 zgJ6~8MFK^<64k!aXyL{7e7yg9Z8pCvgjeb=m;zp!q}|AMtZs=8k2buj|$QoOq z@l+9HJ%)2OwR9CGBt6|-$v7sZV(v{6%R#%9;^FYRi|va5#ida8WnM)jNNrmS2Or-q za5PoC0&9FOs&hU#@ZD8YP7GiFf}RANNqVPW9JgI_CBh5=7t06Ol5!4`pFbd#sn7s~ zZ;vA~)m42N6W-z(J#te=!_kpSBJi~49msx2ouPYin7Gnfd)-n)HQvKK)i^sLuPNQ0 zZ12Ap?^W9aMrsG^ke6-o6~16b7oh?vP_z{PY7F$AQO|^H2}WEMr!(qkQt%h*~23%qJ@#0six{ zl*JXFMc6tbdUd67vZ^)W?tZ*I?zdy(!1nP3>6yHKy#-a7&9G3*yArNb8>JGDCsIsm zh=qVMC2so4b+BETsQwkTDx}SoXR3Dq&~zRsqmyD~SKH)H>KOpjjo8h5?Xmbgm7Xa_ zxRx2-Ia9)GJV=fGQmwp(q(ZfLNxhaIAzpYvgwk$#VhWlZCxjkkY65vn( zMnFqkVAvk5r>?Jl7Qrx2{O%3*7P%;Zec1`#b|z6IDl@AjV5GcrBJkE3aULVMv9y67kk_QnCDz2eO4sb^1A@fopj^QFodLRHtkk z;dcb881MXQ`<&_C+eTswR;>eKD%gNUJDI_ye(DyBJSaAgLh-@FM6F(lQ&zr7_DXpBz4sgc8=oU1KmAAf z%@=&We2ZXte31A_2?Y+_Oe(X(yo&kd`fp)O-}0OwjiOZm)2kn*sMVZTbVvt<(??TB zKFU6%8pJ~by>C-0HcDoioJS3g?2eIAr2PnkTV!QG!_096%r3ya?MtiHaM3Q|t zTz+#2`Fry{<)p?p-fnNjNux(+DBAKe!3Mm~Ck?)<2rEL(k7R&&S2n4!2r;J@&}S#b zXKh736Pwl9zGOj!ovuDhR%KHKli*-3T0BL?W*xM3ze`!>^m$A!hvT@MDlhWVWt|U_ z(EG(ICE{qV8T!N-y~f&;tjvt4ueWEsR!)9G{*-sJ-M0MKI?x^`ckrCoADKl}DzL~0I)11|2*Ug5?Nd>eh{q+T1 z7jKsjU=aHYs!Zp1Dc1xH6gUDpA)=~&?6aCc8iHsFFi}|;KZ-kvF!==-q&n;(PS<+# z2U$1M(#ex_UTH{W>cl~+5bt)rXnsm=(_Sh~1@08QEmYDx^Za|yKgHS(uAC6-#h{@1rITSFqx%0U3Ur~x`xf;L@PlOc_PY-E2u3d z-)bovoM`Aoc^>~9TRwmAOcl}C3upD9B>sOCpCu1#RP+oIO2JtBG79t7u~G|6g9YGJ z1vB5oh>5ck(pe{8u4%+4rfNFKLtu`JTAV{r1X!s^vSWgAFiLO}EqiE5E+8U)!!_%* zJGCR_U6~A+U}i}!%u!~0Ddw9$NMrum;)K|s=Hls{MVHZHh-hx{^e^&)l-?TJu!4)! zxjd973%M>W#CQc!NwIkr&PKT`Zm$U~Nbz&*5Tp)UTCu^+Wz`9-`!Q0FZ8AAO$r zI$>QDJ+aKRhw|3eY7-+!tB_twwV1Ay%H8S_i=d4%^cZfA(G=8_4at}M<5UH~vBZb} zHwo>g*ylMc(8l~LSLBEBP2NOJO40d?p|-6=P)IlI@Gg1Z4ghE8@JAe4T6Cb=>BA6j zBWw9qvXyRUX(&PfWPTWz3kaJ|I40gcfdXQK;wcl4^W}bm;95fN(f{!{4*>&lPV3+_ zKc8q{jgJBY^~J`|LNq6LCxwV)-yN3Z+wNo4=Dha^7;|`)O|G=f?XVrMw6!2>#|vtb z2bE2b;^M?`$l$_o;>$LD=&LpKGrMhY0iOI9py?9oeW9qLO7Q zaMmX%BSx@@;>ym&kn>+8`VBIoOUXUCc{<#3APcon*rz|ZS8^)2@%W@96b1k4EMubt z#=j3T6c_1)P8;#3KE-R1m;-OGIDURVL_j0XRcL%B*!h7Rt+liHtsQUI3@j3QKkxgOY zF?pX=WgO9BzwM?x$3;EOjAl9NLYaasR6wcRVc(Hmnf99H=_A1iS%Ga)uQl0aO{1&l zBoF)cab%!jSbvK=F*Q;H2@JWS7;UdOH^o%aLg_06H*<5R%36t$(mdJ=#Ip|wNY){E_;SI!+EoGld)C-!Xn z%9sDMPpqR<{*n6iMPYdgvD958W3bo>3Dbcu6wV&i04#!DZVj0=RK$8mQLgFJ5Eeb@ zz2b^tLF0Bxt_vnf=FS*%65u(=e~%yB%8kU__uuy(UgMAH_g-%~zUkFgI2B9T-YZAp zlqpygSh#R8yiuYIgrSehU0Jp;Nb;(6?S{9EX(r!uQ7`2(DQ^{fU+=mz<>OX+GdKej zruQ6(yg%d$jAf~zgc(BcI(E5V zK&xu%vf_^6+p7?uj=kuiMOm2s&W=*6ie-ft*xotKJ2=D2tvc(3a6pmi>fIu~@$-hs zr@<(AKI-Hhe(b39SbY>hk&UNu$}!n5iKH8aBgjM3CL}t6aD~JnK(UYHxliPp3}PDA4MkV=;NabI3V{dlD4sbCezW}x*33? z=8KTxH&4(SPZPERg76Cj$Wz!6%zC4}9;8uDj+2A%#SV{^!x6kpEHkYRWpms*J1**l zyn2>h$|jUXEj+VWPN)}+TJZa*nk}6odeGfO#GA&ZurL5%z|Qc#zwwRsjc-`L^{Ne) zP`<&4PW8wu4cY);Pf6$(53FiJfpa)82@xZPt%0xcg2kdFPmSSTiN5g$W#w+hJWndnU^LW8?q)GM0@TL#!=Ilk3Y0#1_6$+;)VsK^CleQwJtu73*?u zW(iZYr99Q$a$Bdmz~(Z3c(-EY;~uFP?G^nmMka~EFUMBhr%Am_Z`F_UOR?SS?M4Ai zdk%E$zIzV}a0NBDYPV$M5_j33RmN258iQ0KF1wrMoBgdj|pN=4UgsDD@L48dqr z+r|bix9wQ7IIG`!-@Va%cLGq2IS`kaP`dFbDt6Qf8@&>H8Y=(I<@c1|R+9e?fj)5~_NX4Y85ry#p?jveEY=d|=yM$Ukmw*InN<*y7K-t>hkw`3|HXfu;|Rdu@mD3a z=e-^3sj3Aa-))w**7<|`?J(ILF`O5JY+D_ZkB*r4vhkk@L?k;iC)RPjr-jP=-22eS zLJ^lrb*eDkRo>tia1CV-A&h)gNY`Y@!-~kF>QMUF9E3`%nXm-wWayh6dWrw-**z2J z^_I6Pf6Fj4S{Ztvp*9f|$Od=A|t*9CshxzVLD87vK2mv{aA9Q;! z#e>B6xSL5GWSDZF6dp9HSx1#Jm;Ka39tfg*P0Loxr_EADB9l5v`p?l2jG3MVv0U{U&uIF+!m!pF~7*W zCpu6AQ8%SxyrIznP~N=cLa}6-u}jS_rh|E;F|t_joPD_db(ijm?L4s;>?iMSK7L-8qIpdxQ-=Y=8jHE z$*6#ATwXs8n@6%dg$Yy!0QKK{y$wjUuQ-w!ej=n)Jni1z9s9?pb*#~eGb+fAGpFXX zy_0lA5(gc5qZ-YdT*~^O0UEg!P~I!S{^O&wgZSQ%h452ff)`!aBG@tBMNZ!(Q;0Ed zUD6f@@d^?w?=QUqc7k1KZTRhm?*Emy)s17QOvE+TfhC}PBfnxW_W4zOf&t2_a1xZk zRWV5c4ZEl|5~SHDzl^QXhij4t1VgNN)_pN%DhOK)A(fsKIjH{EP$r)x|L-&bB${iM znS5(_4Qepy17IZTOc0-UN{j6i#Z!{0nIi#df~i$IO5z>q?5jTX$%w#oUL55|_>sK> zgo3@n-=A$y#Ca^!@(g-N2-ZGOa5Aav<|L&`3~n}JD^89qc}T8=AX4GifiF$o-h8p>De=w6!WgmXB^Za>H4s9HTK_9L<%cxQoGY&sx5JsqipoQ8>=c?+<)I$RV_ zmYZ_yr;tkOR0yab7P9D3_B8!Y(n0xy_KN>FO!cl11{$gZ>x5QGzT-AW2HXz6K@?AvlgFkCWtfPofFh{LMzSnC87DHa+6FRu*-TM$KlsdNs~T zyDH2}+}&l115P?MFO7~B)14K>0Y<>ci$%?jX!Zcd7Q*fKN<|yN%6p4<%fHT94s@XQ zt;7#KK879Q!GbUm6zSE{!I?6IqpaWj_|xs<=C3LB*_^sx(;ogXo-{OT*(=^*%;hn^ zz0q!74MyDdC?@UB}O=P zOViK5a@$KEL4I0>Y9CUinb}5|mv9`4gN)+ZkI|?2${pxKKaS^$so=bL0{b( z7hH!Blx;j(CdLQVp2GcJBsfw@po29;C_Z^cU1<&L(*1>dJ1MBZfhysDR<6)C#!PW$ zxqV2z5IUPH=M9$_3L*uL6t0Y}>7)rB7H$<->G;zHdQ0169PJPJS{&^ z(eOfiedM*XyrQw#9Cb7PM6b3uvXR&uZUoPNv5x;mLx)*1;P5CIR)gan=Zq0?JAY75 z$&xs(U=v)E7Z>HMo`t3B6Z#2KD#KdJ&m2cPQY0J(L>!qix7{3%6uS@qAy&W-zxz{* zDJnF0>mH%%`0fnl>Nc`6nm*+>G39TFYxq}(8vX?cS@dYkoi8XH^Hu&K_#05OXM_hs z(026&U*2sbeIh2po8xPpq+CJ2`rZWVRN*0WNH5MhO)v9FYTtcSH~lECI?k`(3cj*m z;P;NU8)C>VG>G# zN(V^{ufbyQdVA)TkMZb~DQew5H7S-YUE*Aa*%p4n{tU4i1OWoxy8~(#AC_V0)Bs_^ zg?7R*ED1^NdiGgAb{B&%Z<`wI(Pb|-3VdxxL{nY<)*dYQ5FiF97^=*BxQ2}4oZLn)G@ zb1F@;t3gKwF_t?GqKS@L<3A-FE6jUqi_p?aQ^W zuo~Z|#UyL@fzT?&6ECFGe(fjhLeK%x4bnh}?ySUmap`^`Z{{DphRt>@o-RL$^Rl{D zhnYcJCse_FV0;X}lfGuKmeSJVDM(~(B7W?BaVmBKz0!8$@guxHZ}&}4DObtkoiQxTWke-S5kVP9le?y`xhI8|Iw2E`oOkk z)F`x*vfsPwP5ub9HxMCoMQgTN*pKVl0P>6rzC<+jQ&nH?7tVRUjR zs8tqkY)UA=>%Q~cW!8GvE>z#84{-=lS|DF#*DG-prr}J=y@RoJLE=8l_B0?oe(E`D zY31oOXQ}Rv5d}pqsvXQ~H*dKD^K5_GHGHw*ZxRL~WiWP0W>ZUxofbQ#fdtKm5LZKr ziZ3eHdZOV%T!}cTt+sa4gjn_q%$ttYxN^f{cu3xBDk7jv2J&Rnq9@|vmsU`%*OXcl ze|qximYrpKn+NjK;grQO7U;aB%Tm3jGDJ!Cc*QmzU;G;vc*ECv z(Yq+b8+%e^tq|fe(bC-?M<`&sn@Skiz4|C_6xm$p?<86$LV9LQi{@|fE(N+Mq@5J$ zDYOg`SVM<+dcAL^x7-_os2P0mD8R&_0eP|JJRFa^UN>@tZkoX@x)Y_q_z>}|&~*Iy z{r2qRkNe6T4Bgs{h0sEQRo=;6qF5-t2cfS@ z6#^X-^pnZb!vG5MlisXSFv5tjZ8RogD)u2LVcTEsUB5EctCj=2$}5uXp;leCvZX|E z;VR}p&`^Owe{0l3N9w#1Z&d@4U-$#3M?)>5qt<9@3fe>tuWd&9dhH;fU|cGaK1gs> z(q+&AM2CvUL3^uS=$c0;2yOO}WxnmhZ~pF^^?zIaLI3tTghkaN&27bWVrOzLvtuo1 zUOP+NA^PT%#AWAu^ttrCKztYjeXU*cJB8C3^_8_=DTDbrTY-HvR-IWv{yv5cMP2^+ zV5-oWuNi+=FvzRJbMHQIIxaN1BzbgldT4Zlt9Z!K$)4w+7V(E?m;b?s8vf%A_6~;e zR@4mgd8HdA@ZdYar@cHcXKsAaTzWPIyFtd7e`4yrX}Pa7uDuckpfniKTjK&9CWN~~ z@!qu=qWsX+I#P&_A{|T(r`=*iM6FS2ZO19z_1Yw_&S~oGt2H$)4WCENeBg+;M&ZO;1VD3otd>V911{)Lgx7P;H~|YAx?$2 ze{QfsDU^wFaJ*)DL1}r^86ha7)bn4&fgNB56$vj1OmhjiJw6~{ z9h)n@-1i=TI~A~8djUQC*7>*Ehu?}1%YrFlV2{XJ_lM(_pq+z8V5%g1nTWh+VBh%c zPP-K3rtI}zkcY$Xsj%@J5Yl#ejc%zHQWNN+(HA6I_M>GU8T zi%2Tu?Y&gvlT_9yPd@fcam=(F7W-Ah-}tLW2)!H+?Xw-ywI)5(<|W%Y$O9yHLe;{g zToSOUm3f%pI@q!#mc^sbAG||taB!>|RUs|W_CdDZ%^Z`)pMUk$_G+vWDSSIET@rWB z(qnn}1G*E%&Y=O@I$Xr6*w^8k>iF+~f{x0eT~~QUZQME9T@Y(}WPp*H zN%<~!Gu6x7m6KS^%3+p;NpOw3m*dNXG0xJ3ey=@Y{dmU znf$f9XOh7F;|OKb&dLM&cy(okg$0JFzyh2r$#5{DecB!4!@QX+5{*;*NbbJ*{X%;o z8-WNYzZQF!Kb9TrWa_?PV@Qg4m=Gd~;pLLQE5YDLY__6#!N@he&t*^uNgx&zPkmL= zy2fubYJuZhvm;=CI=b}y;b2es4tX zdML4sS5J7g4`E8Z(OEUg)-XQiH6*w{IO)YDT@c_w4kB0LIad{|QzPZCp7upU@Lc2Fm$w;|QK44eW8#f(+XNjSo&b z&|iF7y8BZ}5&ZE26W+#T1Gi$O>Eql~f+P)si?HSxrfD04a}@Ysa0<}usJv>Za7 z-nia2V*8*=MF?&6Ui5yWmU=7cb`ON04^^pD9;tP~AvobK6~Y0h?#Egq+=p7L6 zqpZ635Fdgc7b$oUdTOj(ZMMB-0a|PjiyPB0Q7&bFxSyRC^P5-<#tJD2K?!QOx`7Z! z@F#!L{v_6bVxA`jl@5i8AK$}kVac`dlq8B%7ioaxh%4izYS?O&Na4()<8A|(r4CZs zAJ%8A0bAu({!IgXaTDV>2>KG$hj59Q_i}xc1jF@oB|F~P;^o)4DnN9CaA@V^XCvp; z6*wW(I)O z7W0Kp0&x`5N!dcAEKa8U85 zqS&2VgKJ3J`Bc2E5AR;FBGx*%7DRhvxQf9UY#SCd*|%`GnKGL4><#XL1z=Rm=!o5EH03_w3C}rYzs!!pJ|%zA`kZ$e;Vx({^F4aU%#&x z*niC5V)e3#3wqVZ2c&o;OQuHHLC@9Mi2P)Ko6hsW3MA3sIC;-9o9Z;qjEjS=03uYNvY}t$wHv|N*Y=dTbjG%*T~p;Ar^R#1=S8;aM_~7H-yWbjB{)WJ4aNTdJFl8gLPOQjrVjRR);2fK9)@yR+TjE?Wb+qDY1;m>QS|^0N#(? zm96*d@v(4W)i)k4j>mU}?d`LQbZ78{k7Gr+o<+TlkKXCXN)T$e276;GSYlI@YhaHm z*6;UA`}7>-;#aZiK~NF{}Zeav|8q-v!FCkPTN+Qhs@9Em9GYPuOVNI#g^rPS!> zz00RS)4Zth6vMP<566DgEQ`E@w3@(TW~vKCR{6y==2dzt?=GjfMoVEog{}qY$l$X} zRS&B&gukjkZfsoL>|^BKkIvX>c}*GVgyYZUNEI=^68*3 zpt1#{x(44)oeW9728>%q?;I!}k1fW2)tfat&&ZN^DP)3rna&Cv+rb*_wq#BKyV>b9 zFOe#{q^yIx7VWO7yg*mo@fonDy)1$rw^FN4cx9H4RIGgTjvHz#8v=2VE6~^*d~V-% z`P?eULy5-*N)ML_pASGg3H?ixfl`4Ol{u?fk(L z?eR=%djBgloARH4^Aj-HW_Ff=jPnO_h|(}s$GDT#N)?O%R^w-3V2msn{>Mm%TG0u% zE@0w;+z@hY3eU)r;o`Sm~|7_=fasKD$e{KpZ zg>(o)d8uTGbQED#KM}4#bvPqHS#R8!d^Y_lux<=+44L^Sd2-SWzwiYlG2#}D71Gvv zwf6jiaTm1Vj}3pEse5rRNn?`+OQ)L)goietWqR=t75h7{Ye(1M*6oj`3wQo+IdSnW zS!1G3tYh;{kJS%r|cuon@2ekj-=+!H@z+jJA%(oosAHG`=L9ZupK@CtDdIlM;q_oomxD6KtLJMBEyg80rrbiN*%a*uhmK zsHY{VW!=kRk^>MCYFc&+UUBpl21fe@`zopLAjk`Fl}XhME0oO?e*#b5334lPQzP62 z#!&MnA0lZ|I@fK1=;0uBU?v{-?8+ENWtYu07B-iq@7&Eb@1E_)+Yx=+(6Nz0talV36lbF+ z_e#19KtO`w9~t7e|6&>=VDX6Ruosx=}+wo z3$Sr@wg)=Rb}&E(b!zkH7*-Q1(_7&X1pH-v-~uW27mI z|F(-eH#PLLg=jijEJKWywCH3uv7#P0UL@|kBxWshtyWQimAkspu6lo0TDk)Ll ziul?-B~4AWPvsKO-a=8)BJhccaxHQTc#o-`Nete&WM+^4)+G$cXg!4QBXdqsV$M5% zO2gis_HjkiT5x(dP{rTCuAXNf8pSLv?^zO4h~nA~l|hs#>mZc2}s zA&0~6xL+`~+^V0LzU!cslJH4?`34u_3CKX0S2#6Bx$or~auvzgT}SUk>15#Cspi4@ zcG|U)V@UK&q0^erf6ToqFDKh$e&TZ*BlcycAO@0fFbfn}jsqZFsQ7Zrjy&;4Qf=B7u9p2M z(IH2cTPSbuw6z@{)3z`c ztes$|jhpWEu7o_@)Y!nWvTReqgV!|*DdLN)x~LU7YA4t*s1$q`9im18s*EiaYc=Z+;=<*Zevx@9j(x4-_=lm*Y zajA{V=q+*w5UX)%d&OY<@Q^V?rQ^|;+IUn7wp3?vTI`43HlEj*%p8hbWeij^MXvzl zj{E5Dshk|oyL6w8UKib^jMd0WKaD-0Gm9YGLG7iUrr1IjZ?V}`xSzIxXnyD@16C+v zAs)MGYHyM50`-E>gHrcQgtg3Ww0Q&P>TSU~=MN+Wvnd9`ni?k~LOpx5@(74~Z2(J$ z2y(*jAvNQHlXq}zkIN}cnR_(Ji$nbLr0tfW;|L>?<_qUM(f%NtWqhSQ9UDjZ&f*<3 zdq9(a$LGkVj-{q*bOmJDEsn3XjLQ&*ivv>`!@I$iH2Edl**}3crzRj$b2st%YisR~ z;t4WIbMlkaW>3!Wd8}ct)Kobx{b44%OYJH}>NGED$13=hd5>-s5>ZDvMdQ`>Uc6kb z2@4aU8TZPQghSd^LbRTc#a8_!qz}(757;4 z3x#@EabY_|-cIV88L56jrgxT9N+BmlrMPn2M3ifwTaHFS9($__BvzUmg08p=@pv*c z*=*^j_oOE@(wZ^@Fug`Gh^d;b9|oPl&QKlkQ|I%{umo?9KXfm+d~6$K?mLSJRf5U`3;wv#E_>&f5tzH9lJ~h_w!q$P z!VUh+VQ-UL&Kb2&I+`nHu_rnKytobB+oCHGK$=V>fIT>Y)&lo2Ruqs58gTp7QPqrb z2;2%H&wxJ%VaV~UkQyzFF~@4aus6q+*AhoXtgO9eVK{UJUKFeo(Z;J|6v>ZPYwAg7JCK*9V^*(uzc#6-F zBbGr_IHo&`!CX=RHfLl8NYccsAAUe@>G-ut|w8X)&aUphj0g1B}P6A9BA4RUsN0Z!lVZjxC=U}i&F;iqeXD)Ww*ibq>Z8v{Gf@%!hBO2Y7Te-b zB?&ox=B&n*u3#T~ix_%DqmH;){vFmZl>>{5t3XV^28` zSz&dlU5ZaCntikEkycd5!}vvqrB?ms(H^NQAaD9@ke?c-mC_3?d{SM_8Bl7sidi1n zgUS8)epsjR(q(#paaY=mPJ(o#vm`Vs@mWRzk5pA(gtj~z+VsRXl@{bz0BYj?7Z039 zDSL22e43_mFZlrRygB>ofKaA^{d~Lh&p~Y*w%s&B6bV`b$=CnK^`<1v|JYN-fN15FEC+%t9H4;0f)7d=^BAyS^AX!R?Yofjj zg0B3DbqPYV(w1JZSCsm8%bQM7TJp6-DhjPmGE(W_a=>p==k;LRUXb2|M(;QZ)Kq&Xc=VFhMOZ4;j z1#t)(pCj)~lpQgLfed+z>e^Z4uTc07na06T<-scB5f2}Hh$-+}uWF>Z?RrLhUgb)T z@xA?Cdo~C}W$%Z-jR#@wqdwDMVe!B111$V^L}|rS5CuZc{P!&xPVsC_MtnSOG07!L zFL}&_7~(hObZ9+~S*zJ>exO3MB^rS*P7-izO9-1YlrT{Jy_*CdN^JCtZO6QeL8nzC z@Zy-Y>iF(9JcBhHn^1y8n&?WmMP;YpKyFa4Xa<2R`=BcS2K6filHbrk0?W#+c9IYg zY7)6>@@%LebuWo|{@}g#UaYYXNP0PQyHuoi;WMTRxLRqH35JL*yu9mi92ZeVii-yY zP>FdDAO0UJlfk#!T6_u}k_*D}bD5xod_}PZ%BHBne1qkc-BqAC!f6Yd9eL4LxLmQi znL;<1hCxTAK*~^bb4TSd*SzK1E3n2osI_ktVo&j0@WSjZ)wvOyMod`-Y^=45@k;L8 z?K9n)RBXVNFZ-{kW>!>vcH)JrjruTmNtQJukwy|m4s}DZSB8T3P>WCgNc9lkF3``L z_{PbfJU&okKu;Aj z`BKqs5r7zdQrg~;A0!K=;2Pk76`~-WA-N8Vd^+`^E&BAJ{=nc7nE{JbN6h{Wt5q6% zK*MUf%ixGwD&7LOAbqmV_#^^N_bWe!jAv!l^zf*UdYhWmF&TCK9B!_}Ko3~P>S$Vt zMl?aRBt=?{YOD)EXH|XKaHh#} zFbOFZ5bj^x?JZd3uuN!Fycp`qQAjfm?Dw=-{1>R=i{ssQ0RwHk%5x9;q}Nz0rU17e zQ!Mr-1pK%0SLo1Jkp=~jIP+_2Itk;-iILEQr>S zOx;alzlam<iAEJR?vYH*h8A9#Fu2wyg-bu_cM=J9`KuS7QGnGg0BZ? zB*bl6YsMIn!?LK;VcuH_dlH`E-W}kp#&cycyzes+qN>cQIK&<3f2)=ot#@0c)Uwd! zTL=jR@KY%1mz++J8D@OJ^d>coPZ#LpGI);~Y9%M7QVQz8zL|GBSl1@|)Dw$P7yR55l`a8o65dwEj_J zM!p{}CJV}6)bRQ`I|zoLfk1pz7I0~8%OyNc#N^2*+b7H5r)|L$^*BI{M*G!ef1-Un zcEkmwfGVhh9x5`2P%JejK9k)ciOa+}z_hxR%bg~xE18ilA(^YNZkV4VY_(~0_f)aCJtPUAAkMffAa^9m*HIh29`olm=9z@off19 z3xV~b{9`}7vtF8)e3=i&LmV%UMEZR#4v-GK&p!J}{wIEM{@^agF?~K`wRl0RF@89? z!^rV!8dtyMKdxAqX0`1t3mh*$R;$M)M&95?zZf)rR#k+|=fz}phwucRaO&{&+ zCzD8B$vK}`8xHnH{F_rK$t=CKlFcdf`4Ud>R=_(ZmM)UUo+2k-c2r~`iAAyK^9L%b z%_ZQ(el1pv&VQ^u<~L|YtG$Nv2VW!oN$f?RN;BsrOfFOrJXQ!NQMh-fNY2AvAT*j9 z`nlfK8u2s*+RYCjGXoswH&DJlo+?D>Rt+Sy;C+N0u+z3os^tJKxHER#+q}1#@5=Gw zj9cDEH)F+{t$~M;e3MjZN^F|JF7Y71Zx!VQ_dH~}#D6oh5d2bKnjmHm45sIcPxC=b z@9`n?V1DCoGN3>$JWP0m6wmU{#}h1)Lc2}*)fA^bf|Cd)p{rc1K%Lhrdo%V-R(dR7 zFiHkiH65UKe}sKVJd7vlzSmsF4ZQly3(eBQa%&J7>s!j0FL| z)wV^ztVGzlK@xX_MV6p0PB9}54cPT?W`-#*x2;%y*Jv4nc&)ah!v~S>izN{EtY#T7 z>0ui8eNxpbzCLuYgw=u*cU#t{cds&G_B)iirHI2BU6gTISiM|URr#3}vE9I77FkXM zQET}%R9Wy8M<8zDTcjhy4>{UFD*2?u6o$f77vzGP>#M$iSYQBA%mFgj6XmKSr>?*a zC4UAZMb)Y+!57u7sWisda-sg7-QDuILwFhR$DJZ$P6+Cf6(5H@ z%ZBicvaOZWn<#pZM5t3A*Kx^pN3G z(PwhQ+OGFPJu7HpUQ|V}z7y9B^XldJ_YT4}-r39N%L(*pyVHsH$4#Pq8;3YWGedqX z#^i|2@`4g(h?k&JNM#U=dOiNw-(Jx6v_ig0jb5>PfEKg1eZVlwkHR++#S`&vQ*mUD-dtz= z*7k^=CO>!q@6+7dKCXLO`52!uQS z{`)=#q(uk8SB~KwrSI$!#SOB#KMNx2BI;s^K=yUC3M%cIyL%&5RADim4?NyLf z7yU1B6G1}L$HcU)6aNl~cvbi MF*7z7qtJB^hP*A6O^p8?$MOZLqYF7GhHZY)Mo z`~vkcimbR>UsO{Knw|z*bsgjLG<|)Z2wKrsJwwg(KWLTFvXh zfzp(|Y3aJaoasj-0pd05$Ou8D+0bz|!9hW%z*K@b>f6Vk<9smvhV1hAdoLV6Nh?Xk zqsgHJ>_&_qFop>TT?7yfe)jIf(O&ped(MnV&9F2kxJi&+#ZYN)tX*qcCi74&Fezve zhB8hb5t{LSv8#uI2+jljm5Qi`>+-?Z@TcRABQVOWjEH#KKL#PVY_ndh5jUgz{bc~M zg4hWQk;?I%4}a72|AKJpSpi4ZZ75P>_=fz*g8Ks<5G$z}75n=g=dMNh^wv=BnXKdB z^X+yo{`rP=CH6AM5<7kPe>L}b8UVyk#orXCe%c3fgs;rWK*acfqLjbPP4V27Qv0hq zHt%s?BNyoF8$<*5M|TA8;e)~CF9diB_7UB+JzTm zotT<+eu=_fL#x$dG8}{JfD6P|l)tbUKd`bz83sXGg;M{0Gu8sfHHRGH81ac%&)}|E zI|+?ZpaMx2FOk#s6F=al%XIq&D9=fOYI z8IV0wE$ZIbH9n0i+YO@v@G8$+Nx%ij>1iM&UmTYPQ~2k+KAj8#45xE@*mzs&>x(Al zNx?soCUl7kj%AcUEQ1Tk%(cJz@HZhawn*fP&z-emjuMBCD)C?}vL{GP@u+fcNb;|m zcPA)FFWmD?Qeg;Jl|Z22(;`mBLnb8Vq=491CTfOlf_=v_#bt52x(mgoUoaRgx7A2jWR|3rW~APppSgHz5PTmoNmk_(^QF(J|8RLRzFd?x~Pu< zr(kj^UF4)pm?rBV2lM2Va2Wa5zR%QgvhHc_R=G(&%d4Dax_z`gde#fWm0fYkx>v_t z@uL}`Df}xtb0q8Ae5ZN8Ilag0mIYCtq8&#;YL0J9kR(S+p{Oo50O?m&g$;Wyj5#VqfZn!;&{6C<1O zG{vSy3mvDTk_s!gQ+fCkHCGE-3Mk`&9B@1gOYl5p_F$*v-!SNpGkN(^%d7|P-an|$ z;al;L9*6RhcTrDw+5ws+?t*ltzVShP1M6imv;6DG!fsENj-?WOL8#(TB#{vkT}%d< zo^78sHHk#wF~#GR30(4>53!vZjqS@}g98`WiZNKHCHtAfBNYc!rxaP^)ULLqK%}{v z>-@pz;rBSqx7yRC$M$ROVycdwFA=Mhgvg|oUY5pw;~7K_GIW6dn2ZADO3nc}akDg8 zSYfv5hS=X0#d9zv(r273q)5q+y^PPc&-y;%gp}y1LM$+P93-t~9;n~UgZT1@cBBPp zK;rHtE+XAfXO@3~YvX1*R9@nI#e30QK%90-G~I_gRzntBQ{_C%FG-eAQ5`;9q65!8 z2ifA0vY|(bnDvX|KjAMQnAMcyqCEcC)h)WF7Dq&ZUf|mJ>$~rg-ehq@)B^|*SweDQ zFyuBt#z#$~U&Z%3;uAkm4l>Pp3pnC7^47sgUFhs!-SMpP+;2vs&w4*NingpyCA*Yk zC*Ppx7-08#IoM4+U7)T!*avC@jwR?L&@2JqQ-c&3WnK2i(&f+p=a6quPvX>no3vyE z8QQ*_8YuBhusLR5cKjx-&9|Le4$zf51#uVP*V}W)Dawg@r_=@`L#LKPXnrhP73TW$ ztg)zoEm&Q4mkBCXSE6`LP}!g_&yAb0ubxI55A0@T&U3*Jpw9F^B`koogcaM1)Vs1U zWtD&id!Lo4s@_bYk5bY@zOA(lzi8CHR&u3=`9W>+?zr?_S%=EYI$VDbi0m_Q#?b2> zUBTi@^oadIUSJeEhj^>-Qum!6(RNam8j9(+T!PS+<^;l(Dl9C>A5kAVUpT=r98eL# z;-1tzn<_95&H|7kn32pR^t{do!JlRHUU6ed7yBXo!s9BkT3%M;@&tVbS2hk6Z);q- ztt{K47n_!Ylo?jkd7nu$FBR`fHR>j|sshY~nj86u#-)DgU*e1}w=3ReZ&Vu;12zSM zGsU0|i(Hhv9&k|>)eRBvb#Z&fRf~r((u*&}HdP*#N?d8gx*tVC^xSK6DLW!?h=WjimHElwL9ZoFTS?wAG_r8auX1a0c?4#yjL ztN$H9%Qzhg=vLA&)~F$I%vHt6@kwRs)am2g>e4=ht-;@^1kq%!de!`}c6!GI=EFVd z*ggdzzwg%s4@Kve&{CYi)%GRRNly?$UN}cyR02b)B;lpCIP=DRDUNbm;ASFxawh?H z;w>LL-sOd>-KzsP&G=04W7YpI74h?=~-{H zix8fDpOBV;axnTTCXaDPdo=$$4gn6!E!7~Pdl zgyJK7kgnBNX>SIxg4^f^%gz(HM#r_+p)`HkFg3k=nxjnh1qbdRrTx|8>!3vt_RavpxKefaCV z1K8+wMBI9>P{AxWWf}bk3;1!w7ct?>2asRbl>Pz`!`^%rCw=~a=ymLxGDgw|vV;pE z6Kwj4_7y*&7e^GXJHhwqiozd3h!!M?#)c=^!0ft5L2YlgKi-I!OS`Oe_0zyiCAazm zbOb+5XJLa$jsDX*C{Su%WCJ42s{xDzl!y;6q^QmJ99z1;%99JqY^r66pRr5folhRy zw_E{&Fv4qBCIt7cG#wO6`%qdj*T~J}*`r|BC6Ua)@^Ry*M*Y@Hh`pODa-?mLb;j807R_@N<{7_j$5-Gtak|O->~}mq0!s z9QVlQgOem%&6xeNRxRV^yRGCVQ+WqD_SfSq{*9CP{pTbMA`yrVqnXOHd-$5YCs3#u zxj^1^Z+%G0P>_#p?akt&gb}jVh6r8))|zKtS)ZLA z=aFuZi%P^RKB|PT5+Nn8%5$7vBlGlfe2|@vOW!YLO>4fk#%srNh0UCn_#z^MtOm~; zxk;-5smMx|=rzyBV!;Z+p)6vn2!6tUb?U@ErC4}b)oXhVD7LS`w|>evEyP#qhJb#8h=O8Ed6rzv z;wdh;zmDT@ZDfV5Ivd5RAYy zHEbeB_5|PlT^w$&u`5^%)hxx#Si%PJ;aRUkGoeRQ0*3Tv*iS-}<5#rFcDrdZOY9Y~ zLBpA)FK=oLO-~DG$1l1ynC4keT}_N@+XWWyGw|&#uZ)O!S`U8 zA2Zb66v7B1>kWGB+M+NyS#>&e6vjl}egq5FExFx^V~dNJE1D#P?M@@WY@Ys;5kKm5hd3ka04=GT6?s=51Px zZmjS437=@6h#!>VRIqyHcmQwRHdaqIk35^SV7RK20(9Tfxh|{lEo!o0(jD(MX^;?f zlKnU?b&(75YSfil2OMiwH;3D2Q!_PF0++EIfEzOootiPiOpgOBV*+@=1y|cv(EpY8 zSk5fzh#4Bmc|`21=w9mn8C@Rh?TOIEXHZ>WTVMvIc1H0~E*YzmjIH%>S7p_tQ#MH9 zyazWy*}VKi6$AfmF8`U@VTaKRLV-$c;B}@Y#X%y=&;P+kfY*a6?p4t72{JT1HWo}P z`-30}DmaTg*(+%&xE4GIJrqYnM?K#4-{Y}L5BXGVy%!gb7NvvD7XE5&C?-2bespfS zH_TVYil2XVHUD}?CUl%A4so1Iw`=q4BR2ePQI4?^` zSib_XL7sA2jD!Oua*@Ald>KJBD`-%!Y%8*r`km6$;3}=njWVEnFeog>fSY-rO-?L` zTjT$A>FvDw$bXuCHrc4~vzJDJH3&p(7pSk~a3FhMh}|ipqs)We?-5A<5l!e9>;aOL z_y#T}oNfRIVYwUMq^rP(-;N(hWNxdiRWV!o;B~u!eJCzZON}yA#l%^o!9fVN|NY#) zM?J$>>-qQF`A>puU@Ix=K^y8JYX*V^56stdiHgrL!#*v^C&aQi9_K_MH>K;Xd(GrY=n8PA;WY(3gKo=oxIf@}o{)q=M}7fnsvt8`CT;08J4u6KaXe@=E)| zPsYE=$Xk)@m*Ysk(w+>Ok$zwMvlG~$A8$G)OshO(d?4OReEt{!GDH0&P9V(*O~->T z){fHa)VNf<`Jjgzin%r=|J?nSltAl~ADYa?PUI@bjZ&ZETI)#|AuV_=*HH;{t;4UX z_{M6f(?sb`1%fhoL}rNbefdchLEdFXG7$IiiThM38!9`9l5#UE3`_|mJw}M@7HPFc zHVM`0IqFo-&Dyptvn_xf2q+UY_SZeq+^zb_+ZsBij|r%nlglc2w02Ipn@7AZ<99f-?*CZ#Xsfl}UL zZ~Z_La>3dF)F_cUR!OEz3A60dWd!@sh73`@%^TOP6Bo;F2<0-WZPh=SkzkMg@%?(t zJr}0{DOgEZFfb3h6Y{nm<=FVJ~8b#o-Zw&Io@7fGyo9 zkyT;kAep^<3Nm>7tmNV#7!m!wH-^8Bw<`pY;~E{85fdtl;Tm@S3<7}FwbT2t8`*Zy z%SX0FTT%}dywq;Q`ti$(0V9KcTqfn{DJvEf2C351Yv@TqwNu@;g32zZd+N8oSTOAj z>j@4g(4N>8jUH>hC)jE!7}#AVyLltB^{X>QLEWA7_2`IvX6mUwDmmm++{xO*5*r&c z1E0wcrS5mMIQB++OF|MJ+&T@raE>esY7>mZEA|&|_mO#=NRd-u3fx|Ey#S)CL3Tub zfia=#Zi;~J-Nr$Jp8%(qqVU&gVT%Hd=e&ucvgj)_Ud(G$m|!f_M&5!T(7Lf1P4XCJ z@}hE!`9{bwQdICQZcj?v9sFEa&(H(PBT=CpcQ$`W{?xaEM4BEHQ!dOrWU$antvf0i zSyA}P=Et|%`FDdBpt~wu3n@YfW>LByo?vh?%sB|d@Z6hmz}P>%ZZ`I$Bw(O5Q;}rU z7lInsZs5gUi=Su@L5@x6;`iHQ@duPVUVQvC)MEPi#Ff-!&4+-JQwxvsn`_`NGhhxs z0oUOG0S+xt3JjltIG9>?su!tPU;0RRw&$f!mIQ*p9&a3t zhfy*w?FkRh$km89%i^f%sb^&2dl)r#W2fVI{=dY&2~s{1aW>U4X;q)(rS8c0@Q;E% zQ-}1q3pF```G46XwY}p`vnc^{bYSczS zX+)Esdqu8@v6ns*R!~|j2vwGwBcTZ4T0%a{NZ@KD9Ap?BFe)(;HMOr%o;~PLERY4U z=QcZtH^TE-V4|=@@FF1DouYox9H=5Q9OppEqzMmO%!>df1f_rlg56)iw?QDA7Ry$Y zK}OZyj*8AwbX6%qCC%os_uDI$R+Lf;A5_k9d8&99brw%WT+r&k))Yil*QNcO@t{U* zJq0}oLbXJGy9?jECtXfEuA=rEAjRHbhu;n@{<{;T@+{{c?P++19RlT?2EiYGvz`Aa ze$bOs6e+vpBqb^HI-iY?kx5Y94>YXu2V?K5{qa3MfAF{)qcS*Ro^MtwxKElyvgP&= zZzWY+HczC#zbTd{ z<|M#dhp2)`8WI%$mUHX}Uc*=6U_HNRTk(k6c2iL&*wDVipo*fyQV{zxvUfGxat$4j zmVn|HqikVGE{Rh4s9s7s0gh#|k07VD3}#_j<+zwjHD|Mzl=?;zrQ#iFP$>ubxuC0% zu9Con*gq&^ixna0yTz$d;&ED1}BvVCrKQRiZ={nQ1$uxTag@S9izq;OZj-j$#T z$OX+gQ8tBA7>sw^FA<+uer?c7QwUd#JgCu>^AH@gps!wV5C$Nr7ai=3Um>VwN}=Ex zu{2%1GtW{MmO)4I$y& zGY2kFkDxC>g_xF5`jkXTT20&C;s|P!n4wNHQNG^Ng_bx$8hLl|24MdCl3yn`(z<;# z5cB`-XA=18e}lWYTD{ew=?^cd5lKaVH<1W55Dg;ZpbSuGQIa=nq7uf$@syp%#!ytB zF|)?RG%}fjYaV{^Ax(9E6+cP>@xEXHwch!G7pz2Vx3GI$V=ev10e6pY$_y;IIa5tn zot7(d6+SyH0N5}^#E{c`*%C8(Wm(rcDruQL|4ZG*Ou}RQyIy=;IEtA9LwG#a_V5Sn z%JG4mkpA1^A3^AjnB{Jj z?nx{mXbp7pGjf23|L1ot6L9hOV!hhUYrZGf1^OY%f>g>oBa5pf`0)mBR112^bx(C3t>fiX z-+)EnvR%yI$z3CcON+3ZF@;&E6#?-sa*SCS2;Kd3d=%rqdbOzXnOeHY5j!f16d!Bi z&tMV>BEp=Wr8hnqwah5^0)_7u4H|IYVZ69zL(iAzgMlz)9f+PTTR6paCav}p=Dy`T zuyU9(RE|rwso%!%PU-*wve&-LoZn%^irba74 zD^w!#ETek&kjNQk?S6fVwwoJm?UK*7H|$d^A`Vk`t@v_1O`YTNi(cnyr=6oL*6hyM zYw6Qq=_Re0x%yHTB}5F6sxIGt0`tfW@=EijRL3NA*zkxnpK1vFqW$hq;~@{fbN*X8Cx+7b zMtjm6%#kTsJw!C#XYYjw{hdL+?pD}BI5Mq}WTAwlhzLMUkNmidK(GX);3OSVRiD3* zm5J)y^nLRY9Y2>ge=HvQNYeU@7O?*`ucY$%@ge=Ht#)xczRT1gFucN&aAd_oFJtT1 z0fU@}WMuu7qVY{b219yi%0gW)yxFtwf?MC^xQg)DxEWu3qs>dBh+c;XHIqn4NCZc$ z9OvbOcK%b#VsswD2J}idBOZxtDq2rv&9#;SXI_Z-ZTE{b>Oqiuh3X(FG%n` zA-nX;dlhcpx*moHFY)!=RhcAQUfcpyjMPFb8=^a-3stQmM#~WmN-1$1MT)$OE98W( z5KnK4ZFH9X@Pl{_qP6DPC>!D+(QOQfqb$@_83b+L%{2>pR*RKtcLaukk)@nEAfPgt>IH}SB@pNNEv-`hskCDye*cqav6Raq>?f6MN3-FtcX^nt#O%dDko zq7sOV-a-~c(Yxdk(x*OZtR;2#pt(ofv92DT_aYAHW#EiKps{)8HIQs@;ewy#KIZCFa`1=+Oe0IDc@#nmFuxxNv@d8kb^S_8iljeh|29W1p+~ipC4*ZLI$wMXSFpR>uq+DNSJIZk<3ZnW*4I6N7)zx4Nqdt4r5-;Zt?!2Nz=Zx$IxeFb6X^`ge;p;HJ+fk)TM>A3>` z-p-xl3Y?lY0BqUH%SZ9>FV8{c|Lcc8k2ij~J@u5o)Pr02kvi-k;ts%6k%n7JY;dcY zYw%0#^LD%EeUZTAbXFy5*G12=jj$pdL4T$BA-;;QfOBKXKa6yaH%PRU!3b&`F-dm1 z{94OY8>toDDPJ+bqYhuyV-S9tj`I0~uePUs4hL`rVcNK1#QjHy1Ek}XAuWMTP9?~-(_V{z-gW}Q){4n56s_kgP$(E*s`k`S`))L z9z_-v;8JjvLjB*3KS>*o4!p2yqzu>A8V94hQlNP0L9mN0&w8`1UG;+y90|NpTU+7n zeFZgZo7G`ZF(#$ulJX2FmQTYKt~xCO15JW=S8jvQ1DcVUR{#?<@7Yh>9>yFNda*>$ zIN(G9W&CkX&yjg{?LPu93gQa{Nv~;{&S@Dzi+;7Mtcb`u8H#(gk`;ptSJ@)&t1d63 z&5BVF`VJ~wWZjzSyTfA76 zA!U+;&|}qTq){LNH952(L=g^&Yup9%rD}uh{|MK8-ei+9LhtS8NvnjF3uWF!45bIi zRjmofz%ieSi>OgrNEk9P@VVof-GeWuF5}nL%7Tpzfb$|WNk8!1RwODrjV<(&OU^Ey zs!5P<~9C9Jb#%06+d<=J1(Qi zy96FWbb!;PKiaY6Pd{hsDmK-#i!H`M$q2E~t~&vf8*?$P-}d>oedD?pK0f9;ua+{b z8QrFi;)3rn2&ZtqAe_98|A)ADeU9`x&OBrOg^k$Vm~TfkJ!DfPMK+tHD2uYuX!J#F zbYlQCX$>iH3-@9JCB zgzsAMaBUOUMCq^I3$&oxE7#L=t6jb64-8p?2#o~`f1DGxU5q?n+>!bku&p(QCN6Yg zhpsZ2apB^#LOqxqlMrQYGRreZ(H<2o_CX%fkhtzKBx0T4Q?s`ity%$%b`P;oOUaof z3{6UwTO3s41ji-LGL5WDPcf232`I(iMD8~&kP0s!Q{g4v23&DM1w7!8tS7;*IPS-# z4%Ud2lr0vYq z^U+{7WSTp;tF+44&a;O!wT&I6sT)pPks?E2`4eld(bjv4?`cK+ zev#NxHj01lM4OFlT)Yf_N7Dw-g;u*d6jHVkJ{*AU~0mo#>0O3VS zB*nD59G!jSaVsevbY&j-SU7H(bSXY*@$t|sT(B&4=qs8dW|`pO=~)OU4dv^ix8|J*3-JnVt17UjX%UHzJXRsdVs{sab|+>{>HBv-$httiE1M(HJ;s$E zRg%Rd9e|5;D2e3~XMxN|OT(vIDW?x?#QrAk4duJ|^?lt;@#VUFp_Oj&=*(l|ofsm= zSl`${;%j-Tn<7fGh~8N4;~dUaMXLR$D@?)z$L~EZi?4Dn(0{z~_h$Pb#oc6&=|Pcd zg={9M0X!!}Q^=;o#E+ogI6;SACd|9s*_O3j%AiPm6?H(X+*P~=cMZ=WW&%s_yRF3k zNI?|!)23qgy5rNbly9%Z6tF=mZ}DidPbEFjy`=YX-pE;xGc$HT;ft7L@f6^FEZ0ib|L*nn??MI@=$d09^F??I>25CJp}45JrojD}kp$YDc9oCu5f20>5On4r z1;*r!5AnG+3E#NNSC3r z@5h(P(<>hG!g*S=Hxq2ON3gXDR=43LIMZbFpcPz^3wAuk@KNr9*Ze(tJCs`xx9Fw# zBk?M9PC-h~iPpexRvKkr<4~Qq(`EPKQ1@!}#k#>ZRYL)PKzleY$vl{thsAR3jt6^5 z1j$wisUDSA2+pbtshA5pD3;pr@H`JwUTREH^j9K7v9;vqKKemO2v;M_9P`Q6RhKO^ z&6Qf77-Wp5Gcj2*2PjP)A6#z*D?z{u>tRc&B=vIK68Vh%$wmu;X&#hAEoRg~3|79~ z%@8S37&!8#*i2x+mpn*`GKR^Z^1V?af{U3S7c^Kn9OIsW8p4S57lpIX-i*&;wDocu zEN3hkqks?NfUuY(-Y~!@V-advh|Omb!zwm2#2rPMz9f{cSIEQEo2xctMgcIREpI4Ha zOM8XsVUSS!UkqLX&RF(7EJ6?R&J*D+&B|WL*X0bv5-9Iom|_GUY}DEyQ<1%qN-vm# z!D1nJuG}w>jyzW^qLS$(V}O<#O1$zzhVYMTQ?1ulfA~zwMaecfpKqU!by`@f1b?+4 zC|kRRYv6KZ@=J>u?^FPkVzBqmBm%M|&}rm1D@dd(n}~2i4A{ z6~qhi*&~MLt_C-B;YY)03mFEHFF+z#)S}31`}6Hn&pj7g zL(FW!!|M$rr#Awu{Y27crK$hR{(a7EkC$D()2_wFFv4TDEcETtE!a0!6HpM))SQ=+ zj3>~db}g8m>Ji>CWutS?d7C!(afLCe;`J|HYKcmUyCl}moJQ#Fc;!Ep^_Gnzrm){b z#vk9zilmRbdGS3Hj0nfD=O~)tJ}5kI>_n~+LYif3+T*W3`c)Y-#;&s5ye9|1J2fRx zDQ!&Z)Boa4$^?TDw6llw9ec)aKsO-iu@mb3Ru)lt$ko(T+qexO3|0eAxiXiC!=o}| zYO$QBEqjXeq(D2l&f#iX^?I{JY$7plulE8WUP|=EE?#UGOH)jOf-Fa6W3f394XrN# zx5oI1jJSXs*c4t3*v&R$(H7qc85P+ad(L9n@4TL37^6W(3(tgg$$<+f*xm*6_RNRQ zuXCF#MO|u#4B2aQ7aQeX53(>H*8+>@c)v`H6ef~2enDop1JB?U+q9;Zuiy}k(qkRz z_@3eIQG#M5TQWf$$S)}n#C3tUKsR1exNzL+noB&Fs7ui=*Tl`8!U*!N z?UCIitfaTe^*RA!690=+I+ow-KPs49gC>fkem3r|we}@TG{ln%4X<;}k#gmA1?-l3 z6L^t=rP?nnbgYwp9Hm7ry6mjX7^zSUuE;Ri8Q*G;{@U-@J_PuH#x}pjRIC?B97EMB zd5FKI{BslmArhMlq7VJoSXqp%OafQ97>wJG8{oDd#%CdVy%cNNqn`1@G7Dd-G%qN; zX;Oktt5+R*Xd#j*d;CVMQTO}P6YRK0Eh>(NBO=-JMPj}J)-X-{DsW;Mi=bX)#_(FUjj8^eD2e zJcu|%`1$+WYH4b`>8j*CRvB5(Bu`koo||YrC4jks7Kb$82AUUxb57?w6PooVH|vFVK!9WNHa@E_P{b-s5jU{XhGDj9l4r z--sUoHD;+mq4~WOa~G*KKco}o?YIUpu4ro1k1%xj!LWqjyp!Oap+LRt&J zB^+szD4#s0a&CoxSAyd@96I09<3kmWcdQyASF}2Y|M>gu>~CXn6dl88PfgkDENmBT zsMw)2`Mvb)l$(`m98|a#k3sY9jW&G2Y~L53q97I=wd5M1<1fah2$Cf96Ju1gEeA~S zW0r51c~SJ(JCBl*E`^Zt2FIV5=#Qt3fn zT!n$jMcCjX?eUU0%su8*-grnbaB>tf$Y##GHeGoEv&DGuqaTzjj!XKy`&CFP_OPtt z_;D8(Z^P$%gNlzCC<1&0obbB@=+*Ea-O|7!;>}{tl@CWWE{4rQl^P9uSlS8*6>w}w zQozB_{g~fKHO7x2fTrhRNk(= zuDyeF!ENe9rs7vY`&nL|_uDiz-{WtQvKB*u1V5MX3CQ0QDRTE7(`WuCVMV;&zL2|$ z0xi@J$~=peQ8Wfn1{pGyFOap8YVHt*aW6BAWjzLz3E9$T($Z&N1wJWkrobowVKzlPo9cK@3kh2p*fbONv zwmI16+tdCN!5ARHIC{4UyoH$RZlI=#j&b~%@^mj-CG{I;fE{-$xuP#F$44}u5RUbw zwzlr!3)?*2zVBZUQ-LmC96qFg$OkIbL+#ZMY(gMjg#y&+=S6=C8Gk>oa~;o{R!<^) z>w<|rlP=%h&$B_(hU@k-sr=X<)>pzKJ`&zE$-2M1feM1eBK%B9qLX4x-QdsWdZ=W_ zVKFUFWWyA0-AOIq-SHHayYynPp+r|g{ubLUzgH~5actQI7z<_4@(Z_fqAOU?{vILo zX|bWOOVXEkvXtIZM&c|U4_5f9ts)9clPRhDeLrKlH1o+}-fn;Bv8;(~<)AXlbF^;* z=n&gXq}x$uiQ%epFb+oE8Nhex*-xgBEC_%1N+~W@RVDE+Im~g)1xi;)K!qSv2Vn8y zZN(DFj|D=Dxnu5r)iTx13Z5HV!sQVke7aruAYM20PmB*S(0Dk0I5g}6qN&&l-rn3$ zDbG|S4wHkMB3Bn`j8}I^SxJq+q|XFIhH^HScvYiY=p8inc}(jWEuPk<%4*I zhe4@iRCh)p-fnOEZ^ybS?EB;Vpen1oFSfl1;;|MYa=zrd!f7dmi@Cw9o#?>)rrAh? z_Ae@uq~ySO;sqt0dUZJ<*mjWlIQMbk@k z0}2+lU=lNgU6!|L7a3X6e7(+fji03Jc zkyqe{&$cI@jK2ujDW)XKE`>$Q{#twv1>kZT#5YkGCnd4*yq6f1tysU?SK1vfFuQOU zL_qssEgcbT9qo8e$EM;LiyYTjRVA&O0>)7xFkPad{33NsF9?vzQXDF{#@@Z&FmLYE z%2dKx^KcOb)kv*nJZ_?g@*0qdR8p3^8q!>;S#T4yG zvBUeB1*2o6@yGJ>N2)-^QT=___7}`wEIiD($x*J;*~8b{J258Z+>|v2SqS>4B{>eF z5gNQ^9kqD#L0i5Pk+g^ZvY_Ta^{I@r)el%Y1zQj07Q6~<8!1f|B^ZKW=QSf|Su~ry za(k){6Yd(~5tcwNQozRfF=Q6X!p%c~Ah@M2&@;i||Vwt~b8e+&Od_!$x$gXYD>+gs`J zQ|=2dmd>SAVoX}vo!9yUTqHfX76PUH-x*g+f}!>ptffSu1d>+rQHxA@W@z>m0Wgv}@ zi?czZ4GBA^Frng|m@1+V`<<8#NJA=U2mm_iMKgyY6)k_ibVzo&!7syi@mK^DMxu$* zZ`>{=+WXx3f0QN=-w=hvLsSYK)yVI~UrC3|Ryt}wq-^?XODaR}un_N5SFJO24Jut} zSG~7r0s!Ec=h?#-+Kcgn?29Z;0INm{CxK)kqm+*DqA#pFFI_e*B2ri&z~xX;Aw8S_ zC&nQN;7K89P*@b%+fHa(?R|s?K^BDb=7wfNRgGT7W(E9-ox-$8l2T7VeD#%BS=5Sw0$>8r zjTkKWX*_^9YZleQb-eglpB81MX*~-$P~mJv{o_HDleeZ=k%qU7ZuK&OrLqTV7)FC` zsd)K|uePEnPq~0H(-uf^Vfpd1@HZ>Vod#^(s(rtrS-P|s zf4mcy+1XHvy5g@DR27{^_6_!w&8L#(76Eg+0njs2xBu22X($<$&<9%{g^SGe% zCZ%dTq$;5fu@z9Y5gbBTUJ~WUBzAOe=;_+^W{GnmSxnI2@z>72!T)`vzVAQrqw$-R zoEl<9sG;oO4T74fj43BWsF4{~3k2h^_csrb5MDgXFh3Qe(u}$AOe^KX01Rv}E+|M{ z%O*g^Ql#sF6Nu&Bl^1stQ>l7I1$3rSEU&mSC4>iiUcmX(yEmbtm5u-^!M{|J1$Mub zPb`Rxg?K283~SnR+F^X|Hf~>tfy9Svuw08o{JyL};1rerdc)vDm>rH+yimHThIRnhC8X@tOF8sNJPZrtpZ#AiuN+MOgrHPbGTTsX(=Nt5}G6$KPFP+r{qSP?ZXw-F-dzt|eSU>FD& z(G0mFSP_H8dacaO!%q8PzvAzty@($6mQM+)}%$cl># z5&e)-&m@YYgRYhW@ova3de!fzq8`Di<06UOw-yPpRtaAd3|W-wRXub}OA-$QTYzDw zfr_3IeYc_m2Vm|l@4hZPKx`{7=s<~KjoI8nK*b=X-`1d_Jn}2so$pg@IGQ69rHrkg zv2L<5ypVA22h+iN?z}A!D0HshjjJtPYfuX+J&~kuO&@Gn!vZjck;d3xrIXx*kH>ekS`%cD);{xaOZgVi+Lm%j%+qb%0xxPdy#_xM|WusyqTJ(;j+*Pv?gwN%Et-8P9Aa{eSSb1wu`O9{f385jrt1bVvRQp7eYR9_pjV$F*MPOqyN*$qfq z7C;t?r9Doogf6Z`m|{G=6+^8#Mcxo{r79=!I2^2^@u{T1x!Ee<($;vOT*fQ$#<}0b z!}A;nH?fkbq%M=g<3ECCL59yTH^f?Di230cUv^a)bnD1hDW4iJZUiwVL|rc6ydU}L zS145te=3$wb+;?8oq{(Ca)rvfRgoas-qWI?4`jb5p4x%ULSPUbUN?qct?C1qc>V<{ zOZp=OR6vCS4>9(gF@-vurXj8-0VE%afz3E){$ghk-0xx3Sbw5WLrmJ1!r#&y+K-{4 z%YeN5R{#{_qa%hEhgq5NdFh`$e5ySa;vlW#R}prWF*EU_hAQzqD!xEB$b6E-Kd%lJ z157fVEJVMC$no4teJ~t(wt7}z@pGlrox?Lg6t?oZQva`y{dJ`?r;?}A_c*tr7b*=|>J%&js{YJnZhb$xC*@h9a8ShKiETqQ)UX_8L+L@K;K|kv;er;hva~5I>QMcb}x4e7{1y@jR#~E-4|6|H4f;Bnxy#@V`+YN>yH8N}OEi48; zh+d5G$TfWcq!28#{i@7`*w7Ec9{l{;vOJ7VoX+p1r@2sdrn z^AdwHewuV@YWxIVLy~X2xmVV!VV=|A#Qug$I)>qU+SzQd&V z(GnYgz3#fgcB#(MB+~}CP11wqWWorKVG~Az7~#cQJ1?8+zu#)Es)q*8P{ey&qcKKN z*nojU4%Zgv78JK)*O!7ub~#?C5X`_=%C-1&+9dexx1q4aTX0C&n%o)jtufw@OGsd+ z3y=>y=j(t7yFs3=Q8vbTBU8^}mgFN+wDzI#ST|x96+=Zu4L0w_Hep9lucj6~5RZSH z*$E>)?yWoysg zqz*pSq_|E4L$s5|I~t)w01(F9hkXyeYNf$*J$Z7fP zfH>v9UQe~IyObJ#bPGBA+gK)6o!Abs=R>EtW7>~Y47G9eNZl-KB|JQ)Dah@~;lK-r(AaK$)OXX!!6JilmOSc?m4lh8q7i9s@Bt^ z3@^{HXOrCX%cwK2T5pDIda?Gizl`tWc{36pa$p23%Je2VRIkM>-ejG9W^qh+glyGH z&ES=Ai+y$SnITgg05Bv>A_F7K58{^MI_85V$AfCqYW@p!5MNWtOE4Ngo4O2#q@*6qo)&0vL3r&MYQ#C4s!4|GOoJr+~8xvC$l8^s+lDo=0P#dbQ%~x z;gi_@p^lix%aVi%aURbQ<#t4IrH?VL&8sc1Eb93zLKp&~MC`N%1<|mv2 zcB+kIEWHS~$dy%_!~q9AWCi*V%jtu{%`z6ltiU>J1DRiO?^t2n(B9xLekOLwfjrKL zD5{@p#$(W(iQVIuV!{=lYzrXrsQe5RM-FQ|ythFwb}#VYK{*gpSUq<`G#Q0qeEBq9 z8A>a6Pb_w#TLjYtJyr}cavn?v)TOx``$6Whk{bkQ)S65iqUft&TRdC%z<9f4J7R4U zbtM-iCZ&UnlTmIQMa&C1n`E`Tcp*Ef3ky{Jm?sx`Vvd5VOEITw9FS@+Y|<2)d$EA% zD_U;wvRtW~?Z&O87(2#qr5AQaViBj_zsKrA+yaVZggh$>nOc*LD#^0K=(rK)kU(rO zRkn^-pBDwQn5-9+t`Fq6SSKqoDrO~xgL9tme)rw>uBBQfzaJ=NKHelekd6}Z<0AZ} zt2eI2EPC6z_!}wQ-)m<-^~P~GfEZYz)L>nE&O#53LlB-Hy4sSFKs-#LlNz699cRVx zJGX_i@`~{z#D{~ER zAQU*5vV!z;PcatXDyqgMILO#~_mg82r%te>jW+sGoaWM?6 z24&fMxuH#c)?!JVQ5RAyz@X0*n+)A@y=XsJfxD%Q1{pTpiZU&Nfc4kO8!$~5Ah&2vAthXj4D zhw5N+DU#72gT$Buhs%;Esu)u${_TJdPfKRQf$HCOwGOpUKq|#ZEE3!f)7*xf<#r)VP-(=1k)_EiHABXIh za=a+I)HWvg3o%dhJoOy@?LXnPrjr_{`bsei-x56M6;gKDiQ7}k51174q*5C}pncL| zT5xK)*UPVZRtG9EUdNuk>OIBOLPN&LuMyikN;wn-yU=nkx66yM{G>VAtLWKBkmG%q z3|^lA+}I)o)?iF{TY8VxRPh?$JM6GV z1}dQ*mITQuv5uOlCTw@|iIBEfA$!|T1s5uB->!JeB!b(0ID~)GgeMO~10#-t_hu zF8A+x*9+afdF5eS{+vKFihqq1E3IS*3|vf=%<+n-=K68vE)Gki4C9!k zq`~L|5{7zIY@{-c1c^@*Kt1nM@0Viz9p!Nl@Zw&fqTOaLG}om(^jI81fZVT^ZjvW4 zGP%w?{(BJr=L#eo4Gqh@1?P>+d8~hjU7>IXYQSS$mLcXJyT=r-o(vh+B4)(c7BP>j z4SOWTT6{ZI#VNN4sUX109>H5uX99`RzdNDzx?c~-vsJ*+-pzfWR9G}`!ZWETbT2*CD1+U|=mEAWoe)Q9|jvAq<}U8XoPa6mC8nd(N|)9>+)I zOOd5FQMs|K@3havUTsV!G(?Xna5LuV=;UN0lw6|4$V2br1;r9xC%eArJ&4r&xoqhI zw~hz9xCb{}khD8q=u(`OnE81ikcr3Hlk`gDGoJ#Vs)XPcCkF>Z%EBBp0R?}gv~Mev zepj9+H^kPk^kmqjfZ_=Al~|xIuG#&9%N1&J3<@I!6+xsgp40)mmN+J&Tp%gxAQvQV z#5G4}p!8fi#~>CI)}pC7t4r;w!2lgQvI_aTKq9|boXwZ~F|G&cHf+>dM;;`h{ZYI` zG=QwYt7yIG7d_q(iU{WxMEilRsE`oEAR*WrIvQ3oZ24qUIyL1&`9rVy14HjI1xft^ zc;y5w7-{_Kvgm)xoUeIW*GUA84e8)(LB6gHF+&F*rQ-_~)q=L8Es>o%=oD|f)aIIt-5=^GO+#viP(9g@9@Xjqa;4o& z@W7BXd0iS|US*DA12O>4wG!zoNx=&0q&_RRWJ zVOlA9$cHDPHeh1JLwn$_G+g04(;qi+`kiWUbL5a!)I~Me?;q?pgA4+Z;aw3XD1iM2 z1w>ObsiAtjd|i2Ad3lmdUDJ)^n=Zskp>jthPNK50^6$~9CE7KlZuK1uAp|_#rlXYN zU2(#CUx!Mwp(J^Yfd((c9xJWBKcB)VipE-GT2{yJ(Y0r6w? zsKeBbZI^$vNpQLk+sD2(SUTJ-tZDE&pp}8c7I!a~3AqB3)LwA=$@56v;7?(ZLB2u%aoxGDOXsZjW;0zC}9#;$M2IDJUyfzb7?N5kFKHM7d2~OjeKlsp} z=!PM6*Bo>a1rcyHzJLpZ$?8Iay1aCE4ZD>~q~MDPZ{P5;UT181B`k0!rX#1Cq}fDH z$&!G-?w$ekgVPzue{r=f#lMlS1T9Xn8YyLwxQ{X>kBUEcBx;MwA;pO&hcV{VF`we5 zVOMh`tipQpBxv~-t_Qr@WBTw(*Ujsh{Y5%ecc0zM zj%}t8<_GV@hk*pq3QF;cRlu~m@1t{Xo1WG}cbs*Ea^gCzw9tMU;_=Y8Y

    xNl7% z(yfXpWte$IU7{`LJAxbR|F7eNRl4h4C}vh>91=W?+;lwXszeTJ(F0xh7h^GQ5fS!T z!v4Z-kFl%xYC226IUnH=#!4wC5O}qCNE90R&7XRy2J3yU!~u~aqPsS!woqvBx!67f zp-19hoT6-FfZFm=GE^tx5?Jc5@%<9a!x1~79cCOj69U`_6wt)eh4nMNn}T{x!N|PD zGT%8%7CcwGFAwO;v?@^Srexj65QvXq_HoT{cf_&^S^0WQ@A>x5+i(AYWnZB0B!v^) zWRh_}+M>s|dW0}TU#Np$h`#r62rX$qm*U?<0@gg5C>B@EeeU^F4}cPJ$bk2;`=F>L zP_lSCQAiLS?}7yCl7FP%R_*6-ZD`vSf9GbctT^O$gwbjzovT)MM{#hjPC!FM42egm zs`UQk)S?Kui7!@TD|cjp#1fDcb;-x-6qlAf)z}TrR7+Hu3&2S(?9`?6 zY>R#zW|Tt0{)LjV*zH&>4mr1(*Y_%+M^}y+u^R5$t?o7$pg?6YIyPgs=vm0V4|>~E zFNILAg7cn(fNA7lmxIE7)59BWko@e%X(BnN)g#ILNj#dP2K-eZGj1Uu46pjKGL7Ju z#TR<}mT8_o=o&+}sbZsZ7pYr_*2g2_;zD&LswbL98czU0x^ykTT|YowbVN|Cj@bM> zCU3iZBnq?#g)GeqN2259O8A+*N)=mgS~Zq{$R$o2Jg@U@J%#?0UfM0;uNC(4q%?(% zT2WrHtK#i*!mR>#_Tk(QX*fmg;>Y-!Ae3>^c3J3e_t(&&MAo;r={WE*3`{7AqV zb0e4JsGEs$D=jiIPVNir1gQ38iU z6srgQZB&dlh z?Ae1fpolM?{U8P`ll8c`7*moD%5C)cKheD+q(Y=xJX)^>#wbQ!_M%YfXZf_H(;wGI zw-Hk0$mvSpND6vZ=nn#HrzI5+34~TvmrlJjV%ui;7v;2F#pNwq3SrQ40ZdDIyZtn^ zxWwz9gUe=-J?Q~d|4cDI94-HhwF@)St3TS>8v^`Z8wH**roDE!?lor;gKoc|dmG2V zW_+dM0V;8@TbK#=#+clF--{MgwBQ^ZFBjOMaw+(Bob=l_+m#p!P&8Yfhgtc@36no7 zA|14n_^gC=&g40qQNXj9&R}Eh?ANg;x~?|8EIkl9AGTZW#{tCJZ&>9!g+DyR>MZY8 z($4rK6|k-)W2_`ExtiwQ$?3+!^QdTGPtBS+$Z~-$DSOUz?4j}!^`M&?_B6{Gr-l}n z4BE$BeBvHCaSIy7u#c@>=|S-ee;eez;PB^b9*g+++cdARo@b=kLE`nq2c+e)Dh35p zUx-~M;+pIF_?vQSzHNcLHHc0;MDX&u!4Slm`;oJ%2#eQBnG_>J{AuAMfkzo6S;b=n z@$ou9OzX*spep_)Ks>xoo_x=$>wrrX=>sJzy#-rc6s&@JRxSk zod*a6g9wEBGSm)v>zq^CTX~jZm_j8Q+Hv{U9Y20x$g@LpMyd!+1=tt$IDzOJZewWe8k-WiWW{3=y?qzL-f$ZMq9 zszcp?v=-M|IaP}|BKvtMvz+oGiozJtl!acA2_Jn}+h&mMP}x5UJ&sj}&cbyEG?p#0 z%Eh3O&5*MbS7B24d$1~GqxCB1mW$~7kP1aeRSAsG`AL*xDti* z708*!6Hhk}#zs3h@)?tNLc;5I?aF`=Nf=15*c@mzFpLH*Twe)#{$)0AO2LzihkFAq7GNu!a0$$RQ;jUK|)du2-^51`_hfF|Wl)RKzH8IhLb4 z6!CaTLhUnir|svfg%BN<(={X_e8NJ~cx0yLA>d-rb!DaWu*3UKz2VgM+sbz(*3u}^ zS+D{sHF?4(0LQZTV#Lf2UoKP{#F9cZrmt@S?Qqc2=Ar?ws#V29L_;&(mNXbLrhM$E zgui`qoPpP!n&h`}JYj@J17-S`OGhm-_^VaDz~r9z=~rWCmNU-4FhmQO*=@&U!oUH+ z+D=QNQ{bpJ>cJjRoe&)#+EZQjDCF(G!vU(=71M*tkeO-9;2@-VxCA2vH`L2&$T=)P z7Z*J@fGkpjS?GFZ$_1)-<@pnVaH(?E;PWunu{@13coIUhraF^mOr>yHXyS-x7 z;_h&{|7-`Vok0F@UI8$y)Yi9qWl~C6cxk^n6NK%gVO|6Vux|5@G;b6qKomOPUsHuP zHl^^vpleWG0CIQe8!R$d7N&~;YqVRp$|LAVh@)Tb4c@>RVCL>|dmimY@y%5P!P`P# z$D6p@<{1N2#Pv#YGpk{VKirQ^{@hd1015qt0jA8WOZ?};nit!l?Hqf77It=tGsr?5 z72t;tN%_zDh*LDZ_9$UrGUL1+T_KWZS9#o|IC+)dB;@n69I_i<;%WwxE9Lk0KrF1r`U2Z;XcXYm6gb3;TR z+!q4sq~u|&v+P}T%3+bkD`;$W+KR`BueuyRh#Bxg{DA$wU$g;841yNRB)1kGN4!Y; zhh$Ynb>0WKCJddM((wXtfyY0LABaX6@m*m={KK94ITe?p`pE@vw#_dHGLjNaD~DI&HyQ3=4>wLtSN zm*l3vv#k_s5co;?t1cp#oN=X`J$wmTg~xImAAtOls-{t&-NwqB?w%f*t062P33am3 z$0d1)L<;fH6!A|-mSCv+lJ%}3n?)_JQ8iS;9+E4J`>Qcpxp38dK&x}h#cqy}&iI-F zk$F?{A(a`o)+!m!+&OdfE?l0{lu=g((B#gnNM?^|5T&TO_RuS16kvp}#J`^>faRrR zF^Jq^vXAp*N3Hx~1FHVlWUHdUq;74O4iMB~=jhE>T2jngwS)dH=Fgj-e>P<3?BSD7 zwh!KkRqQrh4q>nj2=v6CrPN$xNp~Jm$Cwf6ZN*9sFcm|@;!O}^?mR3@PYUeva4-WM zpVZ)3rENXI?hJM9aMgPe0)&OS7(F*EBIPjrk{5^rKxuD<3{@% z5czWhIT967r~NQwqXN_rL=6MssrCi$8urg|+14YLO5z=}#2NeW=~%lC8J5T&zBeSu zGNvgg-drXUFlx6!JpIw;uzhh{QARJt%aHPEVU{RrvIYIgU&cpJ?td{G@s}%HAi@pm zFwuIHch|@nih%)#*_8g>UB><6kJW<_&vc@d}*+M0^h9IY#;}DPH%vh7}5>F9?Mf#C{ z8PsIp!gz1j5+N8*wBHZGP;xx5LHr0|0r4sp3j`tD(s+t8Ik&z$xPB0^am2%KS|ebBrh7TO%70l7NKFc< z{-R62X^< zIVet3V8iK*Dl;mE(Th^%wskUKd_}zA()9(NP%FW7h=IDl%rjDo& z);7g%NllUO7i)b8&`vhjT!FS;OS^VuoGJ=S_^TZAUcys>5DW1#_9@ko;*_o#h>_u% zX!h^5Cwvf8HDR1~=R03+PkXe7+q;2E`j=hpi#qO8EyJo@4GUDUM+p?SV7iT);fOH<4yR)p_#22c(48<5F;Fkv;P!U%#KgT zsv@fiTyYUmjNsT9fg-tCIs1QW&&TOfxpPYd>VY)zv|4&8Bxk`Ys9zlx6(|0Ocu*%L z<{`etxm3x>`0FMheZ6s(H`}$?B1Oc?4I%{G9gm-CUw$?u=*jkb@w|?A7MWaT@@5Un zgSizO(HY+=loV1b$U3!8kLDov@o6-OZj0Lg{G6bjFgI5Y+u)Hv(kR%_9tF)i`V+5% znOzNTR>r4Pcwt)ay)jG7mDD9~yo`!z7LrkCQ*D|@fHz)lt^8Flw-Qx3f zL(9+`uJiV>UuwQ%IkF4q99f^EE}BJ6KZdCGYpcI)h0=98>$Th|k zFV!g~jcamhgU9^jI8Rdjh0pikTupcYOTaNYbfGxNuPcbpKoSs@yLcqz2;y_xt$|&E zhBbE#T1UO$sO(=3)j8=dmR=0FjnuH6qxp9_I-nUSwwgukoCu4llMb_DbkhquUtTNU!!?L{`Ywn~1iTmlo zDo#F1Z*u`-D7B81B~3a`{>{G`9IHUt{{Q;l|L!g)|Dy*QqKF|_M4tjf38G2WaK};u ztz5-qZ3%3x(E|8M_^c^gLx^c7&c?xfwP-1uZ4sBudLN< zRtK6+PD5AYtf;l~piu%{GIz6^YrS-;i-Mtdf##7R^#>;QO;r6Zw2Dv{@N<>cxGNT6n2B+TwB!!mV(pr_rlbIsx zCEUu5$PM>?(f!=Tn=^J2>mnux#6ZS^{XL4<7NJm}>#H-1IAiT=T%(p(KvG~$#RHe_ z4t9U1?}_O@eq7+f>86A|6tR5=lmXC*V_jZQ4s zD%U?HB8#4QLrzw#`f9M0V^3vLCYzmGsNYR_g8~k}8lP0!OcoNC%8Ffm@lt!m7Yn9@ zqA<>r9eNuUI{;l+(<^jKZx^!OKA8VTp;Gl`NJwQY+S~297!sx4uilOy&K^G9KJOpk zy;?NK6a(O95=du>TExmpvxkG+R3KcybD!?Hj@}(+a#34YiGnCNTrM)bpumFH8yof&;h-+ z=9BIKq+iY+6qwBS`FBNgp48Ph> ze0)$YL%PwDpl^Fb6rd-#FB{v^LpoS@C5zT4CS2T*bx?UKcUJ0!;%KkMxCy%IHZ7Lr zRkap9Ni2uNPnITRm%_5QVqbbdH=`Jv6hxy(`}RRhMdg)q?UZe;0xBN)CiW-{TQ3CH z1ho6uvh%tye%AsU8Upq~doun2w89Cp$q2m2aT;tnFJ1IOuJFaa7;j~Oa;A>*YFdDz z2^oMB5c3hIyYwXkKOpVtybhKHNvz8K*;d{OAiappexVi1(>(92U2noI(J%0J0zgC&9A-)yDq8w-Lj8tJV6`((7> zns&jY#NNXxNPBVy#G3a8`9EBGvDo-56{^~zQXn#2Knv-*THtk!#KAZ-9sfQ1o7gJ= z>avipz1Ci{i3RCGvd|IkIiF0pfUpE_(i9pjWv*j{Pqz;~C=8iRcK=*8`YXe9b9pG7 z?W%D+f4f{C*m^`(@$BRIi0NL^9(wYbOE)hJ)_%yw!p*kkT^X2P08_q1bmAx-#kbtG zfehuWIkH5V{22AE;j$U+`^fTjv59mIc?)3ub*&bIxcEYCEP)x+T@s&&PK!u<;Xwy0 z}&!GM8JUd5TXaMbNt4c=8%1gGVJpcS>`2TJApO~`f6l1@R zb!NYGA;kwCvnIdx@L+G%>doTaoI4xwB2W|hZR$*Svn`&Z#VJv{+oYQMMDz^!!|hZL zD7M6fgm7kiEj%^Mh4i6@&HUQgH)DV@9tr)&X9cH z?-_prPteE~7f=q>yyMD_p}?Psf3CM{@i674B^ai>qxge}*WZhk0k{C(Fq+Oec|YbQ zkCU>*e)Sp}gX@unjgN32D(38|F9HT3$`-R80s*v&Z^+0e56=&kXCb4mdO1Wo?i$+c zHKo!xC9w^ILuCYGLLf91WSgLt?&*Z8)7hIOu7sND zk(4r5&_H}F2ac7@)oU+?)@Hz5CDc?ku)%p%C$Ny&ue7hkhgC=sf-HNNzJ<11C0b&s zX^;0pjBY%^-cH4YXOF#D8203vj6L9b;_N5KbiJsZC9qZEi#{zkuscxw&c2MRNB>J$!*3D!W)}0-^}T)I z$1JGpTW``^gQ$J!itQpNVn==pOa!&Tg z3Ct5@XlWTEUo@*Ck+~SHoC~iF0)28ap=Z;eza&AL= z9Nx>Yf!g5s#N(7{@k)^oPD~XSq~kcR5L8UftX@=l%>FoaJcfqgl9<4bJR&c_5xJjq zoUJWQ`i96=k5f!QJfu%r|Nd3kt8*FTtl{CvfO_~i-42PwbEp@ILgJ`7B)31 zhw2{bkHmP4$#mT}6nKg?>uiK_`weWILL!2yg*hGzfe_7#X66vYU!;DP;|IW1N8AUe zRm&Mu@35Gy3p~;X8MWOaZ*G)T6wY89{Z)Le*MoBb(4AH|SzLBv`8O5KjXUZypZHJv z#Lrnv_`+wNZcjfKgBMctvQI&GNuoSs0&~VGC<)6t|OeI6A4%h@?{G5rfhEs&5FOg_)>e( zPopBs6zzk)^UbZDCMChwJllXHP>pPMRt}QMOw?>$Ux?iph!o$8_lpxhXID`Efy}bl zAC#ABhn1kM&PA!wBrfmhZaiF-)i$h>-f9c0OO{e78?b8z^dv50Nu~r-(%JtJVzP{? zy6d&z{EhZJk_4_S9>J3XTHR&-lQi&yK%eZ@KVhKwNWo#=;Ep~ zSb0WZ2XxWs;u9M}w#K&^`lG-1xuX{6g;);}f~a2+*UDux&amL{8rerLk2+@~v0bD> zATX}ph<_76EWJUekn*wv){yQ|R3wLLpZG~k7*Fj~*KrJ6MjVKt?m`b0bmDOCQpcJU zIOaGnuOYL(raZ$T-ERyiIh)Xd{a{HU@MoTb=K^hdg@1Bvpl zy(HKwIC$|bZnl2Zbhgf1Par{wOC^5hHgyUbg5;;1~ za-U*Oi>T-15Bfq=G!~21FX4Er)(@Us=}2BGCsRStqH#XTYF_m!dFX9F5gP&xtpdwQ zemKA+3q>Id`zQZvZ1!KO9?)-L`tjL!{;0kCfu9V%)TOuRN*ORG#mgh&&7-Ge|q7amKmVE>T$tip{4=ef!UNDx^tIMKXbQ;tZ_Vdk}M@O zov2*gl-+lQY?9V-CoW#}8bLEyzg4B7@T#uI4KD&-J=w7_Yf%JQ1m6MN zsfD>e6>nz_zu_LdM@dzWT4deSXdGIOR6HC9kD&X#?F@a$wtC*0yho!+Yr1a%=uyk@ zX37tIpQA)`JC63~C*A{&bT2UDc!YdP#T;VQG~2uRG*z9<;L?b!@Pb2O{jd@)Wu?M6 z&hgcST?3VD3=M18{mieq-f~g#6gt7CP=1K0D&FgrQRoojaEw!!W+g8zDwd5}j6KGG zaqqdFBk&*9-zJ{dZ44!ddnFMy<~K_CmQ2b^ahoqVO^kb9u4}yKYP)mG8+3dBf&6Z7 zhe~D@u5ag#gAm4ZKWv|igRg{`r@T`@vT7L14IFQx^W}OBpW{sYf$K9KbhEt~|GZtV z-$iWKjrhxb$NIE%&+bi*n zw-9_6S|hCkH|0M)`gt6TUI@~}87@UGMr&aV`XLlow@f=B56hc~-HAp#$|dhD2fG>P zr4j#)n%}na0RLjMIf(4`mC$6Ei9+aYao52{d zI$8LlcInzdj*0iWS4bEqRzQMe8~d-iwm4R?%))j@Wn+6^Kh>u6H-su3-YqxrdAUS$ zEjsAPN4!8sWJ_b5;X_(r+w)JvTi+BHE8TQa{M1at`?4r|m+Lkb6ZsRKkZI)bSc&NQ z(Mv6T{4Wx*P7&l#mPRPk!Lu!g1P?5MLV(SSo`7sc>7TlR@=-7~ViIz2#--jIy)=3R zSbjkDfW@gKVKPR$+R&dN#!L)YFLPG=Zls^ShPCW(!L0P2EU}4LjDC#7t!!i5I~nK} zE9~b!AG9o+aq=okv;yaT|KHnM`YluGw~eR&x8Hoi)~b7}5)Q}7=!9oDl^Huc#FWB|4i+3NPG#U5s8B% zJF%KOzgR2=#H|RD#^W$5_;z_ac68ltneTZUkW_UmHdyr9WS1^2x20R_p@_Fyy3r6Q zl|2@36Kq?J&-Y;A5m3+p_ks)2mmr5;zR*7Z84qv+bEd|avYc*_Hu@Qfw3#~uGEoi@ z2j`WTh0v#$j0xQy_(Jb@LGwFhm>g#+nwUB5?4w72;~}eh(RQ)qF~N-DeWiW*>EH3g zWNlB23Gzhy-5AXUeKJNp+9Z?DVb2N1C4kX5!8#C|_P!OsdmYo?DJ2FlD1qE#={s?1 z!=IndHaq%j?#knDwMRdSp+ITfNF9Z~+suS4lqdDL3lk&?mky`P@8br&6njXgp=U!p z?ziGn_qyaKlVbA0bzrew$9N41gv#IrB(wqZ)jtE%0F!!V$2u@QJF#pKVy>5*12@}?f4}(I_G+9i@i%(s6+eh-hP|mo>{8Ed85OFpN|o)FRi|TFMy@t%V|8e? zYcU+PRH{9|ZUDZ;PRtIlq5V3X#Ix_mc%}y!k_1(b%JgAHp-*a`86ml&+)TE)lFCLK zv`650)D^zXwJ|F0HjD!i4wR}HOT$IL*S5FHF9@{XWr<_l5NK+1Qdmzo27JS+iDmCR ztK#ro$3Ca-Rw5*@pJ2~sI;>ceb7zw~VXGP`R0e8)^p0gi%JhC|;?%Fi*|F-^#~R}3 z!JDB*R9fTbbjAugLCJwXX+=SISUja}lh=hK{(X3ug>gE^{gqdJi}7}zjWXIiSvbGA zp803~)Bc&iKV!*gkN@;Hf9fBpL_SxLtrHyM&)obRqhceGX@b_pW%k;2ll8!;-sbpN zNoaHHzrk6f=y1jwxEy5%hm=G+ssbWUF~zk6#^MHY(LuR0r}X<^M`ExFDibb?k%^Zj zDBpr~yWQRIg<0agxP1?{l!oE8R~`{36CQOc0Z|w4rAzhVrFPNZ$DiZg%5w}b`cVzp z-xj_}c9vHM)gvA%0zKHSf+Q`DB(=;#wRYc3z<(0)pV{bB{(H>%{!^b+sjf2lRe41euo&sI1_>= zZ1=P<(-H9`1^5xb{~E7%?3joyf>aBbEJf~N(IKmdWGRPA{AGW8J43`5DG%ZqpYqoV z0|Nq69W`CZ6E{Tym=l!Z8w1;%kg=izA###1RAOwT>es}d1Sy~mt-7VgA_&0cNNC`I z7>OQ;D0mH~y@6O7B|YuX)ceYMNO)RQu+M9OO>#_+iU2n7beRuZVgpeM*5H>icw-7yUj0lTYrt-$H zB$;I_BXv9CpV!)zH~>V8QUR!To2_%{!T7?>>bOJmQ}x+UKmwJ&6occ=C3sxiWo}Zu zCa(rXAvtn*#{LyGbA(~d4q{9s#cagWMJ>4-e?WJZ;#iybiJD-1RJTboOJpCoYAwMY z`En&1+!MEIbw~1FHw*1?sjbI;4IN?Pr>jE3%VA%bKNMFLXiA~(1+eTS^~CC!%pN4n z_&4yt#h3;nH_jrb%5wc??81RbzzEs*;HAW}Z>cJ44U)))^Fi4mD{Y%7NZxwjUfdxT zVbGr0{?W;2{>4uBoKqbm?-s0;ukd|(>kxPI?^IV?2_iLA207Jr4|^D*CZAJOr#PJl zSafO8b4|Py?}oX1lu8~_-aoXn@A$O{(PY}wyJF70W)TvR`U}m%RT&!s++6NHkT=vU zzr(tW2LOLbVoaN1OMcZ#oJt8rCnZXFTk8RJ!MuI~6RK(Mn~u1@?59FCSm8cp@DlT| zl6E25Vd5Rc%n{tGl>y0ShoySn9{4kQplF{f?aeEGC#A`8H72JaN@SpE%lrW@0OoyI z58j@7)l@5GXRzzam+0gp-a$OImjrVH$Lmjs32lW8qh@m~rB!F5dbtl0a=u=nY>bWv z<=TUHM|>8HGuG&cg+$7Vr$hWnmWo`(*kcYsMmMGU2ku+#H-i-AfG zDQGjJi8t%S19YZeN7$66F+Ytzv*if8=|25Fi=FJPCoEq}$~c_lK7%eDTlDFL=VxBU zn{vuy*Mqrk@3!Bws-?E3R?5Xgblk>evv(nQV^bvLf}{Mg9OWPTWj7=bh4=}E&`Y(& z=HI3)>0lzoyuSQgd;1SUe|C@fJwNUUeYc(AE+}}#vXVE!hQ)JUcmc|1{J=X7v%t%< z?d|xzj!P~R=0Gx{pgmIXHa-mQgaE!n_|<=(krAY7uKNWDekrHBEHi%dZZ)#RH(Y!>5@c$jh5rG>4aF5gOjIAFw`7v z3m_f`1dq(9lcJb#+X(aVz9-5AKz^~JV!`&P{I3twnoh<{&bpMEF) zjpb4Zu(Cq8)3kr4hH@{rl$2Fm$#jz!+FF{!IwF%IU*Pt}@lX( zk*JOLb5C_s)>552fV;I>YVs3#@ z200Y}f_bhZA!xCX*<09~+(f?LeM4>AX!#@g_J8T*(L6nLbK^82j_N*%S3NpF9x!lh~t$Wmuw}av#<#NT;|5+>MLdf&4{RKQ`jM7+7`JZLw zIgj(j-LZjC2y;phl>o}8v3U}FY)GJqMZxYnr<}%{2JY;4V<5d2E6?yTT9bcSFBK$X zEBeR8Q&e(cvY+YivTqy}@6~qt^2S?~@v4NO5LfV5YO}>j;Jb$bvoM8hx1azS)bVlnDLG-UBS|%_0E`uCdvA<*<2dwQFY2kz)74q>aWdQm9C)xsZJlRw| z@p{LlLV}`Ly+?dZpx+AR@M-+GsqtrRTq})?Zcb+0_+&JekO>u`6eaa>0YB-w%%*gB1so@onCBfM z3@9YtbzyGLl;Xt}$9`i)Q#SYgyjb68*R8uzMTecweGg;7-;g0WEztrM(WmK-m*l*L zY$=E)a?z~d!h-FU3&B-k;atXGodO#eIhFZraN!Sk*H4VY1A3^axZ zXbTZtyxHy~){3j0e|Suj)Q|I1OfdHjcZ&*HSd6ircoF6Jis%ST664ry#l4v#D^%=f zx#PRHNpJVvMh7K8s5X`B7@{I8fcQX-iSBV$Cine;1qHrqTvY^WW|;?3WQ*cmsvc=e zKC8~aaZ;=(U4s^`uiqr)jT_2p5#$IugslBCNHPC8E&ul1>`)1nv!4zB-RcjPno-k< z2%+y`Z4f4GpqSW->_EBPmIzLfj#9bWF)qM67+a={qWEZKbmVKar zgsjh+<(U4B2RCH?0p!Kq(&Mr9OGCkO1`?m>7+3}Bh|%{rLwrED8L4YA@Rwd~FURWK zY&RB{Zmlha0J2}3<*Gc`<_^V7*er*I+^KGmd(5LP6-UI(@e6OT(-}`=3cCDau?FaA zer+}sJ3+0)XGrx?Q$cH3WRv(w44WGjE#<7CH5qjN?s;f5v@10vsG;dUtD4CEb}40b zNPeekg{i(qk>!Q9V2KbM#!He>@|6$TGqGx{_juDEz05W%WbWVWk9!BKqRQB`_#9w) zO{1YyZd8=?R<3EGJ(yjR?1t2ecnH-4#+%kH!L6u=n*|XzvR|MCx@5xPLIK3AH&Eto zCZQPYjmVUE{S*Ib!_Ohmv{!#E1|#Cu&*KMFoDUM@Pxh}^D{tuzj%T0(l9B=nxYAY% z`t#Nt&PnGL5Px@l;c|##6fM1xx1@+-DW^@QL1)=9b>| zuXZS1){C#iTY*78q4GGvD2Fs*6?!UdIHD0_Q6)pL!1l}d`b9pc@+R8_6w!neG>1pyb=zQD&1{rWkU@9FtKHFPrQ43Ng1v58x(+C3 zYTCL-?@7CBNs4Y|IS~+Ra`e+)SCf>MnP>yW>XKLCXu7Q;@BW1#GXQlj-=644U!~y@ zc0viEb@-|011F#P8K90h2l5t0nlR@=Y4(>rZs^IfcPh3-WsUvB#%AqqT6KhnD#8GT z6?vF%grldI>dHH!U{+L1u<6}$scD*sWgLL+LB!ZGrtvt5r{wS_*L}tIc>JxiUlN#(&%>D0?!}vB!H$&i8-G`< zlpOq+gM!*W4hd3w^4d&&@4*(K`@FbreVYhaHj`7W;7ckDz+J766{osntib>71E z1%txX_KtKXbVwS`Q=}vrnN|MO1Z)Tw{1brVPKO(cYrQ@i17aqihh${~jz4bCc{9|1 z@nio8;@}Qn1WpA_$5s)P`SR2G(Z4+Vj|BeXZ+C8zI~^C(MWPw8I08Oe<^ZgK#(=i# zP{udU9)8}o3BzQi6OX_>P}I{Ex-PWm-pLUFa6&5zwbi2ptZ_V-m|LX|fB@&Yk;_}S z_zD|1i$j6^&DsEvf}J{OHQ5^1aHNhC<+`N5<&a5ZWzj=Kbpdr@mE^OXv66E(Qh892 z_60ixJ(U`NHS+6za z(9Ao}h&u24m?k+LA((UIm1wTILKeU)3^W!E#@|Wa8=HH8U69?1NeWapxOm>i-a`Y* zSlWh5=y6<^c;x4vXcs<+RZ*R9Vi&P3q|OMiIEbIc8{eFR=TI#ixx9^Tx2spo>xC$;Pci`+2K5@thx(yP zhj*|G_3@}25Mp3ToC05q^AE>3eI(4v;?Ao-g%mSV?bixuVNH4v+c$WZI8@|V_)jeF z);4(qu1Kshr5w(bNqG!RJ62l(C<5OJpdpWg-tA=Tm2WMYR_Ua~V?4lU= zErLBF6FY`uh$G`Ee$f6PHfD|e_6p}+%KefmSD4&H5RV^s6HO2h(;v5@e@B?S1tyIU zc$&12zusyR0x>GOt*Gi{Jnd7C^2-#OYMf`hSY!rE%jj4+(u3XNjeb+Gba5*d z4hbM4{9Z>Qom>f6Z|Q+AoVVPG)qz3wl4q@R9OGPUcjBL4ZXfzHD{U{8iJ)=m=i=S% zbbl|U6R!Rtt<~an|GwQ>ihobARb$75;oF)Zc{)BsKlc~n-{Jy$_NQvgQA{pUqd1YN zcPWA+X9hvGR#TxPeladJLVEz6(_9`T#Do8{%023Ea}N>4c%lA|^2q7R5)UB8CVxd9 zetcMdX^|kgi_@3l#Qpbo;`COaM9ZNOt79aFP3k1fzL%`U!tC?JDEh5aPO3_|HhcCf z#Ah$tJy6xI4B{kX7~l}4$39kdmG2xUB)iaV#MGBna61M}4G{{(#zQ(fx0~0oqLXwu zWZC@U7;*WPu0OedX{safGrC5_3p+C~1V(U%Uq|r7Y-`2m9G-mqL#T^qzlwp*=2~kl zv{}(%ci=Sc+18zsSc@3q0Ms4Fc%IvZS3Qa=1fOyZ9)I)f$L-M%y(6MN1G8Y#HzCCAc;#Mewh5OO&#==l=FrF0B!Mox! z6vx>UK_VBbL($H90ph(J%e2VBCo#whHP%_8@73>|9yW3=7^rZe%TZ7|`j(H>b(xI# z`|+N$homlAG4w*nC~}Jn*#vN=@X*J|Y;Fp-06BYpEG{pH!Rb6QDXWI+@ddN=xxR23 z+;;_^_lMo_^~|6-?NJz4RM!}*P)Obp2Zyuf29Szpyc+6^TMDORRwz<9f9J?w>^fB( zidC#Km=&16LdA&@(Mb#8DlhkZF*-V**5kc75-i#iSK>>;<2_t(tXa8rZfP>L_#}58 z%_+Uacp*S^F~D^HeQkzc{YwMbL;?V$o5k z73I%U|4LP0W6gV=9#5JAOL`tu>*CYyi^nrd!~5*vJMGin8HKwxO9`%XfP;nd>QWnK zU)&y#s<#RA2PAE1@UO?}DAW`~?4fNh6|q<3nMyV}2RU8){s2rl;2O17j(tPkizrK* znbw1GLa7B0?AF3sTZrXaR7iFM+ubXq3*o$4VSI^u2#uObpw})mye&631~RbJJX8X4 zLj6%G)(huXeaGx$39c!g84ppwr`GX#TEVyPo&BA+1hED1kG15=+we+bSWlUAqE%}x zscU!IjVtj+aVUQf69hg`s4;$If&$!Pkr1&7YPzaU1%s)5?=NuJY@t!Z?I+|@KKgq< z0q!qhnsPdc7$k~Do2nVLqFkK;BIMneo${4q&|hdzU-&&g^n?m{MQ-~I84JO!!p3Wv zytULT0@&!-ODKKL*Wr6)WS3*FDoZcMOK`r|IObLg2Q|Hl_oEX^cxvH9r|~4)^N5ID z%0(B%bE>!)$apz2LVZ(x&`Tig7U}rTtS{OK^h7~~nC`%i=87x07cec(pVF{uuG$d+ z4VLY2JPLJlwJpbD!*xn#44AI$jgU(Co8YeT6Qv0bYtG1)Qr_c%@3yD?If4bLws`EIo!Qo66^Wjg^;iBPsNEnT^sq zs*30yIT(vGzje!_1a>J0NDQ!9mD^B&~RPsN6%=jQg|`y-E%5UgAX8533| z%hau%WGx+KQh++^e;$1+grfG}1cyG7`S)LYfr4V)p~$}99(@$jAsqfV zK@euFei}p)|Qqr{|48?k4;gD%uN~D4OZJ+v3EOVJ)Tb;(|IVK{jPYW`& za@>_$4b4x0{7|8Uqx^0*iQ4cSr}EVfS4>e3e>@znntr7fOiiqhPAfL9Pc;gRh-^ z6!Ux<{Phd|O1HffX^8{%q>FY_$s+u1PpaRDE-WKkuTrfQF)7X`HBHO^zkMz@o~Y7ZvL-O+ zUn^pI9~v`s8J-m1e7$914<$;>_IKhhUuvsfH(^#3My^G&7r0lpGf(Huwh~{QI;IOE zaX0N`0g1DOj-dsWl0w`c<1H0GE(Z;vC2XbpBc@Gm28YZ6P!ZUw2A~cmcJPLGk3;d- z?d;d(`1gK)-8;&~+iLGy%E-=u0#^&J$<0=oPA>*M3^_}ick2ZM=$PA1mTk>SC1teS z|7+U-Cmx~t{8Ad#zd%iE&c6RK$Yw{OFm9rFyq|~iol%$AG@c?g#XD8H!uUsABt$BNI71Dqsfs%fAiX-bFFl5gy87%)q2EJF%Ry? z)X-~Vx610beIgcUFaZtfhZZ02MIR0fNs{-(S|~ushswA>SyTuSODR;5(pYQR3iT2= zr}X;aw+f^<-q~p-Ku>{#fqGEJvfM`RZ3Y5n2_>)gFMp*y>$8D3##$44 zh>x<=C7pA9c1*Bix)(b?#K~VSqI7@z7zl(wQY;%=FfCevg8G!NpzVTvU&0ZI$w*MC zs}>8ot4lMrAr(WP4q?JTQc#aS$BV@DTaGVMr4L6aW+LBWCgwOa9;AaVKfg>UQ-|@)Su*{%s6#cTJ+^QHT_p-~cBVw}x{NURmurhD_ zIx#>%equ-NthcN2Z}8;eVXd|G)p!ZYxSAVVEaZOP8dqC_Gprg?oOvu?l)II(p@L+w zqGu0Z1r7G77G;xDFghAqBDv$r{H-FZO9)?LL{RdI9fCt#W zHJ*tftHK7Uf-~Aqw;o8ZaTCXtd5eIG8|35%w(grP@!bx8?j|Ai zhJtQAu2i@Q(_Un+D79Vo_4l zaNxY@ATA^y=>9^X#5-BtS7Xh3MP}AFB?<^uN1^(Mmx(LCR}|7BLrM2hlj{pqLOm+% zq#kvlUhsPagA@}Zsn8*E044Zvu@c=hj~@gI7XyN%0NG6O*Y|ZgWB)MPx$W|pNp36B zr#v5-nekoxy;1SSB;$%4@bnm#qJkxRltR%zh!yXi4#qeR&u{)Dp4hiPBz#(lcAO8f zc6U>6DSs48y_%f+w12Agm4=FZ2%i^nxpVLQ^OUcQOYS7*7Ok?ac7#%uDhE+*+jCNs zFJYeB8|}SnX>H(xiuLO2ZfU74#cE$&Z*R?&=U?vW^*FO+*oS~AQEzXApB3xY{lH^r zi%P7CbyoiBEH40vVy=!Mynrk)hgKgyY>&PZU*tIcv^r^gRpv^FI+yFc*qha*wz%$< z!{Y$%_H6OBJ3p=-_8tN~^~hDab5pmEtsW-g^ceN6PNqTJa`FD|fyTh-;P7(%VFAY> z1J&w8T*&>M9^kaaHJarWC~R#mvzmb&6#a2Jfj5vxN~h@kShXG-FTgN$u9&|6%I%*V zm+R24%88%=*4|vc3r5=zUWy%$>V4mh`E5}~qgc)hVmpTn9jgtKTcjPeo%jnosoIJ* zy*hJHd|B@y^-8E=65}2!Qp5^B|5SUzpXAHPRGiGW+7;iaozlk){U(e&p_VQ}-QB|i zeGsak+;Y4LV<;OmTyLq`@r~aN(NQmuDAw?USQlh|TCRmB+7r*lBiGyQTd@#Bd$&9) zatbG#@yBaCWW(!sr(HAZ^NA)^&&4|Rx`an%yBHchdg9jWd;{Gj+zK%s{IUX7fM|47 z*Wb8`s^yrnIWaJ<0ENHeEd=_uIpnTc^kz%Ti7LW`==@3M2Pq|;8-?*XnyEOkYq+7P zQ51`dXbW}0PeM`oF?dOMlc`j$a8cxzfskePG3NFD+&v1}Qb*i=dA|h+3jQzWp2#PN z@BPR2dMqy>5e9y2GEYBDM%+MuF(VGD<=Ia?;$>72l@vlsFSMs({l^eH{JG7^1g=|N z!99=%`@I&jyR1}?7?IjfT0%3NMpW zpL5D@fqup0RPmY@$0~AG5*g=pCvs1HwNV!%ErSJ`_$D(zz|klr8NUH;Ymv`g`70&w zQRdZaUYp(q(72-x0;+94xu1vTVp8E2J)y=yKlag5F)yKjmD-`DWykMTL-GTux_q*-UKlte+|KKZFz)R8P zTqLfJbRLMEajy{PaDE`f+*GI{kr5-09S3)i@Wv8dSJ2zQONz}c5^8{xUGOg z{IJ{>*4~IGii-g*77qdKVy=_Qj+OJZJ4^6E&d+u+m0?;W7=#ir>tc~$NvzmOyXQRv z@KXKLN$t+Hiep32#UDBkMgc!rNr8QV-xU<)rczve=+rlsjW@M{FHmo{-%BHbR7LdkNaV%6@qX zxw+Ry^Z+@@Pz}lgHwnAg`-?(8#@F@)h_>GK;7B#Z4URWoc)LCAcXc+0rAq}Vv;)=W zL(a>)HpGvv!nca-^_*N!_6oAd9g!zc2rWU5JWR`Q;o4|>t~L(0WG4>Q==j?D!a4<+ zQd&5Y_r0k?Sov1XIHWq?UB5nDmR~2_jc9bR>N8{`v=k6TL zDE7-u0X1_$KPnye*k1~t74H5wR%=|Rh~NTcpRr2?LWlxq}I4gJq78O_}rIm=bb(L z5>I$N(CPoiT;|KAkel&O{lA#>1M2k%sWWLg1f9`gu}J^k zwml*1QZfxQF->qR*rM`O&#zEX7!BS;J(Jg|^Y8Ji;5pT|vL-3U#*5x-**0#qAbnb2 zg(@+g4tgR>KUBq)rFPZ20Vk+ud{n%2YqB|uSh&Zd+9QVrcfiiLWNo#Kj|Qzjj8`;z zM5Oq~FJlkD;C)ZL!Gf1Na0MWPdNV+`u?a%ttNka0Eg^93<$@W>RPYx?G#VYmW5hzP zc*Aw@9wOsjgH=JQy1@!Pgt8&ysea9PJWGIi^63iBgZhYchnV9U46~xOpKnh-X~F#> zqb`9#?C49SLn+t`d0)8ud$nta`bF**juy|bPt9&#zH`lT(fgN|wpc`E$ulYzPP7UP zae#mRTC5Zn&3HfWH@Od3;17-xc|T|+eDdO@b}1&dCjGQuHj`s5m~vxQ4mR2et{f!5 zkSVUyb7#matehd%J|?ulp{*Soz_WyR+Pdg{6XgUKl`jJq4IUdp@OGo{5^hQ-&M3MO zD}dZ>gf0=BKDgOf2jwkth9H4XkdYYLBDWpILK3P$M^i{=cP}&qpi-=>u&64ny;f1y zLVl+e*48Eyyf;eAeA>Pm50R}?Pbr4p_Ks4K7->vZZd~z@I@${?q{AFrrRXTGoTXok zMWO00o1kP$LRaz7XWwbhCVO5;61Q_ajeFye>OJvM$|2NSW%{7yDICE`-7f~1`7CMR ztC82UvAnXzn+#&Ps-N_Yx0XP_$NMIhR}<+P@dc3vPG{kQ45glSQ9}JPzz#B6ofAu^j2Gq55nqK;M*@# z5Bd0-iW_XipVSMh>LKU`dTQKHNbN)^j4V)mqth^FA>uSQyy(BYb-ms8SE+^r(1a@! zA0TXsuv_?nP0yP%)cmQTTV+qXC@#9X9u&>02+k*4VhA~6&(c4_1+AIlya49eVX^Q{lw&(m&ILdcfni%XBw;4>2_`8ZTaM4yaA!U^IAGG%wQS9ovz7aq4SI4R&3`FvL zEY&I%^R3t<7(I~k?C+_-7FSunxoRs=G`!>#C%eG8`{Ix$Vkadq@?DL#9&OQDjfd@~ zeNQopTkZCn3zqf6?Nf1<2)Ctvy3_V7Ns5OtKj(tKQS5w4?m^;4`6X)+kOi*MvN{QgJtelbmhrPFbt~4#qJY)U@ z&+hET#(ufGfpx#rT~%~-gTYowsxCEBm5|ikFf_D}-cOgLBXp#yQbSLKvAq#?#CScQ zcJ^Bt^KP&Kg8^e8qDuT<`@8N(nO7ccSjW!BeDU_gn38nP^V~0a=bd@wm6=6{^;Co1 z)V9O9MK~)o$vBE2&0+SVdP>NTMzy!(BL|l2H zC6LuFE)jQ2f*w=M7U&!v8@3U7UbG&m{N@Smg%ok~iFh5w%Aib8feg1(Mm$h$6j3i> z1tACU6=J*OB*tcZzI`@+&1uR&*qZJjT=ST-(%L^V?AkGTO0CcZ5 znNjlT5k`oY{E{lFU5m#<7^p-D;`A_+?CA)u2-#Ke9R-(H6;`pTWX0ehh1Pl+HA!6^ zT|@&5MXl530su0x&{hw8C#}LPKrZkDM~TFGFNH5Qx|hus`G_b@2!q_L8g89+UxSGDWqnb9g(8Yt z-pY?cIB;GP4!cU|Ird99{3Ls<@|P>IU<4R?Q3yZz2I~J^70$4PEiU*$bT`h#%8Q53 zwYUAvO%w`TsEQ`a(a0O)fPScaAYMK2RjQRG){YR#DXSNsQq&9UHSO6|whXHPQeo@? zvIth6_P31Y&v?9Oz68jL*p=tb-=Qb&T70sTMp7=wq0?E6B|wd&-GF;)$_oqLZI2Lu z4w3z6pvQmHE7NJCYR?d#qvlYj@KRUOPexHz1}u^32vp<}DjS70^pm+|f8I&2F;sba zu`+xbyDvj3LUVhUu3qfa^2Hg#6q3}&q6^LD>>k)WnvykfnH^A1hlsmum|uucP7;+O zq)TxSRZ55H=l6BV?WG7pcYeVxo&p6G@W9tPDC8Xxji++>e56&6bb2?FRZ6{;#rCO~ zk-n#!G`5=<(KS;u0AoMB;k$stC388qAA&`8&=GD{%yBLlMy0E@w+sf>?Rx6q=o*poFw>Y9B(W zJr5|}rc$84^xHOMJI5fqk$2+jZ6JXE&NEX-P0N^l|1`{!mAu!6v;INMm*52B*n?hE(j#mQRtpS`h#W&~ADaCqn~T?5?8h7P%pSw$ce( zZ|Awfix?%Q2fCb z)+R+RsCw-M0p<|_$S)Ijmp!RPq7`r+auQ;|ef(+aLYo;ERCW=LM6Y&b6#{h2P>4rb z9qU)s?<#Jb2+8GnWUh2NWhzuK?qUHd7D?3Iegq!dq@c=>0q=Qzp7u;txDY_u02!{7 zi!8h=eeaA$Oc>0L1AF;I%dlNz&z2x;5)}w-Lt0%q&+E6_YFM?j8_9J0qJN@6 zLjq@zk@qK)oP};Jcq{Q39fZUJ7dVJ9luoPS-t|M@;O_GE2TpgQlRG{99mfFw>>~eP zyZEyhc}G3{Bq-`6HTXTmtJs2Ajgz(;vnQ%`B@`ft?`2s7>(eusRqWN7@>87;jK?65 z(kEk;8yFZnM#EVbl|SY+L`C9{OShTiTCsVsL_{GkX4)F!f)9?Y#yD~NvZPaQ%cC19 z-j6lV35Gw8nTQTP5VK+k>l--C#d+K3(&P=XMX(JWQeIt{{p=TmYr;7^uZbDet)?d`@lTM8JtB5NSnk#!t&g$9_t*%9y(b$5JNt`gsR~r z9+iq1*yfl0k>dOzv)~GO6%$f|QWOJH;IPtQ?h4e|DFxCnZc*4s+h`BnTU)PUbz)K> z|8BcE)be~#a_Vol8*{PJSTLoz7Mxr>f1lvdW#3v)4Nn0C-al^=ku$-+TgoLNq!;;k z-jaBPsAf-8KV%+|eJP0_IE#`pU&hDyNF5$*DF$>WJv+gaif53ECQ4nRA5_B3%p%Z~ z`Nfzo;73VVnrv+`MU279b-#4YLGeSkDI}_njQ6}UQ&ma9&WIyC9rEs{r{|T@tb~W7 zf=Q0<4+r03@X5Axd%=&LC*FsHdtl|i-}C#_HeshEL@8<>a~E$J=}9%c$}x`V5~)A+ z)9)TZpDP-x)91=0hm{Y}e&Z62;k9u%PGN|og{Z=x=eZ*t83&dLAs&R#o5(#XP_R4*?g(RY8Vi~#}NH8&ezK0S)RP#RM`NApva#AeI z=;5MqBX|J4sWxI5Q=bs0&-2Q{u~zXWWmmBlIT8Vu8@Coq;`wNEiVd;ppfUC+=@P{_ zppzbZ6-zICNW|Upej2-3>#^l1x5Svqg}fDlD1N8PQYpUZ0b&~fj)zoGJEU|2CgLH_ zWT)^7b@$WQYFbtciM!(A(f*VpSwx95R9R(mE<0dRFC4HqR94D1z3Q1?qb1Pcj?z+N zrPZFcL;>N{$l^x}#|MP!mYQm!{}%cj(>WtywpvEq=t^#h=_43b07 zh@WhC;-Am8%o*YGORF7Sb}be~AWa&-iiQ)h_vPL-@~Z%!AulhnAvLUE^gd95sm-I#S;sa)a>lJ`SlLLYEBl6adn%FiN2 zCY(P<_?ak8)mr!`sRV@-k4oP5ErxR#QxJ+;;&4#ZI-%J-12&B{^)`*Rl?t$EO7B5+cc6~D}Vf2UoQ>@<9S`HYK^cNMcg>2$g;3)JC z9kEjojn6zREKHSfhjmbFT86IXX>4132^jFU-*IV}#2@LQdEI~HD3EbhBD%+f!%*Kb z>-VY8`CR-kk8(S;m7ygY>>z|V9xv$);k|7d04}rvwNBK%7b^rXi(%~{X@%fLc-9CX z;h)|`(wcUPy0Vj|I3tc)2!uo!bjHtwU&pB|5+F{{o*cVhqug+QQl0mKAtOFxK*iig zSc37Q&W4=n7BqMlc?dB;an6#aiH6+t9J+0j`WXB8UMX1hrJ9aGM5mK(>`M_5!0j$G zgfPaLu#Cf>;43;8u{CT#Ip%v}qwJL!hwc)ryIP-qNER?-Kt_8S_c+UwbTL$`E;CHd zF;>&vR3JTj%)moYT$*dftAH4xF#L0m;-tT;bTlv`7lx4)&`SE84g}JXm&p|Z4zWFY zSLus}x|TLh$OZl#3e#^^cy&|+g5%aP%XnNTwAHGnchQRF@ai<G|^xjY9<6U)y{ zRjNu8#e)Q}O}3u5tOLy*G!2D-v9&eO2V#^c$V6e9*yIjUYYOt|t*0z{zTrN}5+Rhl*)MfyLAHkO&d z?VT0UcWy6u8P>2w&CN#)K}HTq$;JYU9pyl!fuqv*RutUqp!ZN4W1^JK*h;jR070c+ zD=tV;zNCQ2?!u;tuioCdZ--2Y1QI$Qd{ppkDDNg3y>wlY^I-wzFr95$9z?A}R|2?t z;`1W-vt;S#S?W$k{sdJKDfJ+Gj^#MxtB)TN>$u1e)C(`X&o}+lsFxh@i&VQ1D~cU` zB>qiBix*>Pw}6>3B||O*eWk}p=yW=x%ZYodvr>E)Pv-V_M-0ECZ5<|5Z_o9 zJhGFrzCa>P?8nd{MW$;UCym9oA_@inx)cKxFmud8;)!4Z#T$lZ$z_*Gl0zleYx;)z zlZ+4HP{m@Yn7(4>@w0L@WnUL_0a3ZE(Hw6gW#MFookAdfoJijsW=u`cc%9xmROj*B zUSbz~wihu*AVO3pmN4k(0Y;g>LKrpuDCG@(J=S;%$EWJ42jZ>Piba|KM)j2fUyimx(1$)AGtR^Aqv^8VXwHz(7Smj zym5*tmdGG?zZE1Zv|K(*y@5Gg(lp%hT6k13^|ZT8RzBWNgaLkIp3iT}|C#;$Z%GvL z2V>(TxYHES&WIW%qBp^$+9|p1;tWF>CLid(#(oeeS&LIbj1bbi&|k|AQC9_vDy|6) zzr&X4F;c*2S11XXiuZy0fbi9x5VTKSE=(;*@c-uD187*j>0#pPV>}S-i+NpnwOx&W zgHT+JIU+#<&k0rg%Yz2R!HzGG_i-h}Uo5TbvD%z7PG$Vm>m-%d#*ziO$=KY-c|DP8 zr3|bWtjMTX&jA_w55-#q&Sd$iH)?LD!sW_1k53|@2c^OfDoF?=Y%1g+CW+n>#7$w( z&?k7N0y^SH^pB3Hmq1rgue%jzW~?&V;qlIV-(40qf#|fOg=&X{uZv@ zP(pRDsHb)EhT}y&b%VM%v8ci)PSe<)zt!eeA%etT0cD|H#!rgfjS8zt41k=n_+C`v zj5Sf%8m0&4b9|fh?0>9%!qvb7Q?iB>0B_NC@dZpY456Gn8GocO#yk{kLN%2nwGA`{ zG50*2N*ChJ@fQ}jRIAORHOcq?{^9`AT#Iu#lNGJ z5Q9-ZuGfQD7u8&k*L?d<5OEG?CmIpJcK_92M-QvsfPiGoyDyP_;Rqj*kK4 zFUQA(VAf2P_4|MUn9cEbS(9@c9y^p03>>Y5Ke5EWL(*j7d)O-j#aeUf+^SCo@M%6}4$B}c$lq`8#5Y8}decMMqzSxM z_odhwp zGE$!Nj(+9+6UTh#Zmi5x&$qYzH}OEvGQKM8zaC+^xI0BU<}XXaRrU@8BG0Gn16t?J z#n_e;u#9m9^}A!1TQO4JM~!@`%zz6RzNV0y7e=HBF+445@Nn#9tfQ3}emPTDOPg4H!jMU)GrtHv)CzgPKzzg7u? zc%KMKUcpUjVUq%ITDpMWXIE!3^wGTqG7)>n%{%ZA5h3in5L6m}#GBrK|0yWn@zY4G z(t;QqP9%Jwuoi~{1<{LzxQAef-@>B>hLm?VZ9JLG(hLuhkN*5SDJeDBb5D^A!va)g}@vnb_!ua#KKWde~a zg{_zi|M*WHeZQsKOf$|`VieMg)~uohuFiQ6?r>KTFpCc>dF!BX?4*qlr9^ks9-t=fYa(>Mm@ zRz61iVLLH1{montk*d>;+F{;P*O0I+)oZIC5c6)B5X3VzLoM zVHlZ91~@>5Ha*vE25+e-I<(56Hb7ntozsMh!Tt0&rpMwBou{nT0^~%vL2;R5Nj1{fqsT%>z}1EF(c5Apz7BDPS%)UIn7Js56WD4aEX$ zrWErBTOz8He_L>}n*!hI(jK>>8I*jI(DITvwy_EX*})xSLi|BwCa#6_%yF&rMpG0e z)fNY+pxLJra_#wRS>6w~PyAk-lioAi*~zLf2qUw`M`8bnYUM9H{?qp87hcCpAIo>P z(wTA1(e`n4G;j?eoOea(U2hj*oR*I|cui+_lsEQQjrh?BdH5fn*WSPK&pB z=Ky`dy2k4<0@&HfgOFx*a$bxdfck5p*&fzIndN=j3F=jDR_E4esWBfrgFVul+hg^| z^yKcrpNu!);@A2Fy>JJki;Whk#WvY-ySwKTt#BH`LzKA3Edaa)=63qK`+WW~vXdGES5Du-4{dWGMKa z?H-HKNojFLfG`SKcA(7_9CoExcA3QGMMl9N6=Ro}998$dM2m_7tSR`Dmc=rM) zJGMX?=OlgiA?d|`8*!}tSnw(-D-UCy2bG%$ixC){$e$ko(EE{C4<)RMPvY+r`#LEG zGf2&Lo(z=yHa?5;=QpIKzoyh3MC@0aiyF1IPx}fw8?k;W2{VJJJ9smZ(%U5H3 zI!*^|zjT$Poit%gP(rW4eT4|N6EUJUS%Hb)2VC+0t^wwWnq;~^t#npZ1jb`nJ!}TN8%Km#R<84A_+kM*N#YU zVqO4YPoT`uqB<22yza7Y_lmr`kC57avxn-fmY4gOAk(r4C>G<=3d)E`kgJWPXG?GSqt0x- z^xUjt2-|f_)>>l&xMN4~n|~z-v)ri4fibEAr$dD_DeKp*`20 ziFvByK>Z{IUeTZFlA~C5{TiiGD3hlKiJ?9lmjvK@#Z!Zy&>M=!YnNL|Jwy?cPT(*e zgdtA&K|w%zADK#}pQ^=tFHBjmI*4$c#1b+>0Z#D8Z+H}hBXP%#3n}=0oQPD9GN(i- zQOPtSDi8{ zm)DZ^gFoiL*8c-H<~JH{9_{KIYi;$8YyB|sp%ZfqqnCLvC2nB- zm5|qgk|fP|Rf4)}bKzIa@@oMWeR&H2oXOhaP;*C^a`5=;bfdZWu@zqr^{Fi&z_W+9 zf!`fs0A_SD^5oRq7e}bnspOUc`d3Po6|Ac36*`!z_>Aru_F&tx*=xcR@3noqTrimT zhr~lJ&-)_QK0 zL_P54e-a`e8V9CQq6(^uawnurbYTl7#tM}=i}eBqRSP=9K!4G@I6Bya=>tZVS1=Z* zinEk~5*t$^=~QRB!d7B2z}3%7Jdv)sps^}Gm+(9y%T2*d4hk>wbwWmmMa8|@=3+X! z%$8O{87f8qFDgWk)X8$>&_U2IDHA+B{G)$2D4u^CKF@ESLhH`$khEBfKFd1Ej^RT)7 zq&HQ?8>`DtLWj9B1KGO|YUu42tA#p@Rlwfs=QK{;85To@FbQqROe`V5`=G`AGJQf7 zKf+o&F6v%6mb<$iA~m&8K}NYh+n$MI_quZ}hsr=@9{7ze8^wjaUy_|mhV9f(7Lq=QoazZ~}ebvRpxWgw!WV?3;2wNy* zYTE)@??*z3?;UAidzVdHW63Cf@5!ty-?=o9-%%9{H;~k`7qo6*rVspUe{?T2R3fh1LbO z9$yp`@U!@Vo7xHGK-UT$bnyNzU;bg%8!i+-ohtBUW1@B=gX!4izDZDT&~+YMnu)aSUZF z>ax$^KyrUVoKbHju}R+_rBSIkw3GI((B>#4P#V^!E5p4pcts_T(P!1tIia7l1J7M$ zsCauk5fW61Dz*=NAP0zRU6WSFMm9ut<<2zf>9WrJAIz;Hz7BJ&%_AdYIou_ zU3SlkN)T*R!gZZ-x3leIraEdqWU}4WEA4tb5$q8kB8~Gk^(w=mtjV3*{Ww(d zXTPcis;CcQnz2p9GJAdUa5of!EF>C;F_2oQ|3)a%ZrIT%o+2T=xXvj?L=MBzf5*%K;;ZpEe9OpRb ze!NTp3+Lu!Sebav#lsJ^PsHgZtE$B8Q4X~LQE$hEqnRijVhcW-kgUWA?=oJz186tN zzDtNnuLTzaSrGm;BLc8*z7T>v*#afqD*fWF*D?~F`oZOAbdnI}H{-qdCi|nj|HJ@2 z!EB1@QY6-Nwz(rSVM*E}anDEeHl;W2*G^x(`TEXju`>y*B8QfQn`pSqseerS#vfL( z=;{N&BhC=8eGFM%t_$d_^3a}>m0DnlO8>ENXs1hZT1-jYx@`Rh&*-c4>x@D5(oZmP_ZXCrt;uHmw9dV0PR14LOvYby*Gx(RKPOKl+j2%^Y@?;T zw(G*?Yv`bJVb0z#%i;1TKq%QGusRr;>+Tu&Kx^EgIFBzBa~p>A>XTO z?M}Q!_b`fX*Ty2ZhyMuApW<4jViRhJ197yMUM&`r^Gitm71reMuan1s@jN2|YE%Ho zeEq)-@rpyDOga#!^t&%B+_Fzz6;X4R=Gz;~b8FV<$d|y=zE<%t@hW5h##iTW5x8%2 zHoRcB>g0rA}l7U2Qgl>?2w;%P?$K}Lb;=Q`+v6VBavi?`{8fS zEw?uo*DaSS9}EVGj))yWZqxtlDL}3@hOifE7PgQIaayNB-ps3>ipI82Fh%Tsa2epn z7w3)&LWt}=s_~khztEO8mhRk+XN$Wu%3El!y_bW%?(iQG1z?LpmA}^7kiJ8qm1^w&Mo8q*soL}2 z0ybPh9=5<=A*)_g+^_mK*l61OypcFkoJtNu$gRTPiI&PDTwDvqh`H8#3*))EnV2Fj zqqKGKC^&tt@zD$MC8{+AA#w={<+Igq@G4ZxuS zynrD#O7*6MkOWS-mRv&RbM1r0;^A^4fwf3KRklNDL=atDkCzZ5`|=|i4Do-w^jKxP zSkqWIDNtA2tK}A{XC{F#+YAoZEyDqJphJ7hoA1K3x31r@eB%azSCQUP+mAXEd#d-%vdAGEpR+Howu0ra|e5d!EFca*stR zKw4{Zw{UV&lS;}NPW>?$5KHB!!3xHkh@Wrp(`*|QwA4YmfW-2y#W+=_U=!xr$PO?G z2w(G`FDup;_SH>JgqmFYGejIEqbX>;oCW(IMII#U*3(FU&sjz#W?j5N3fUbl5f`De99`e)k*v-6Zn{N~A2ajf#3?NPg544Tba zr7;^Wo|UEK#U)b{{>9KK%9HOFzjTld)b0SE^&(FG+_rQ(WU;iIdqF+Vm)VyWZJm&z z3X+f);tI+rmiO`;#&^+n*v}y)^UG~6wpbY0_u^w}TjIDB$Jj*<&-gbPumt>76-OuwkdL35b`_h zRLpoElaYKZzo=JWEJix=;OEL=NT-}smzQi64rQ#c^Aa{xHWkJDeR8STytlu}!Q}C= zc0+_B#{G1oEVosOIGuugukmR~>Hvq7K5fGCu{7iT zBF7a%%CyJZ;;{s zZ085;5J$(IuNEnt9zzpDNw%EFaz5Vanmz{GFP$1SX5BDYIV1T(PFMG?Ec3G0p?})q zhbO@5a}mU2dvN07yMC|A|ELXnT%T{W8}Uz?Oxv&pBfkXh7#@*Ts zW?4b1+De;vI+3GvyC454MbkVM4l9tvum0FlLEi+VqF5{Q?bES_Dhnc1$)8?O1m8C^ zQdugxEkN(?9BA)0W-4NYgH&!7`cSUL8-V6|_i(>KC zJ{`aR7=5|3?e&21l@tm=_cLZCU=>-uWnhbKn$as(oq9#_lGhBBCJ`qT4kD8365Ex$ zv)C;CgmDqF3}Q7gk!%k5=$5CwLFOUb5bH9({08XR?brMPPR-fjQQ9qvLP2M9D;r1^ zEj1azf0fJjKsLB3OLdo?F9#Y{(JE|9?y%}^eo7uYep*c6;^7lie~fvvT4`#|(b6P# z;qm|3E`AXQMqJy=UJ@3d>1YZ>k7(YNC`6K{&6D!X<_w+=%?dQ&KnyuF{r zaOjlqN(@1Y`XjM)N-FFaTdeH{oC+z-`1#`DUBPt!5I|`!~VujJGSRAi0ZpO+aWln2U$_h^f(#3i;aJKop?jhUJj% zJlLnik$fPJ4RqjuiYmPohL3ZPn ztO`_QGuJnu{d0w1_8y9G#wBb+`916#747l$BU!?wF6USzfC5ik zb~*M2zjUo#52?g+zaA?9I&tv}Z%tNr;)e9Z<~XY}WW@nk%f%64fIfq9PB;W3wR+8W zV08Z|hQcz=AZZwK9cGNb!zn93mXKn$D0X38WX8jcO~JtGg@4j2?kuB9*cqr!qCe&0 ztgA>1hcq2Kt{T(3*A`}`82@|9Q-n!?{KK_gL=qG+Hm69<*OYT)qO}qG(KY?ykfdz*UM6#;| z0?-Wt3igU6)cfDD5Pr&{iY>@qiv3Yu$h1OMmvQEQA0gz2D}+LOu=X-m8`_z5MkjHb z=%q311##+X=E=d%7upn{Ca(~bVQ$Qa+LQ0cCao{Fc?)h&@aV_K`r~u$+4%77_Qp!Q z660`F86<@vFx03znM_g{V4M~%E&(OA5vi#(-2=~vDQa&oZmo~E&-x@B>IV5*oP_&0 zbm{y_gAvhBi|me;(Uh?K{jwx)@tMR0c=I~BR`Slf@ zF)6&%fZdrWNe=6s1yFfaou$K#nwB@J57%(BC>scYTz)<&^P)S)#?NiE4R5>>{YWW` z?Fr6TEJyFr7=P6EVDuM&l0bm66!t82OL9s?xMLyXA)=NP_7xW~90OEdUc(q9SG~l7 zl@w!%@sAG=^_zGaZ=94M0toz=7$mAz{`yxi&d>p1{Lwihi-rwmkd`6IS+36+#%jBP@IxU9O_WG5q&G*ys=&KhZyh+dPE*>8IN>&%~e9&Ark! z1w-LqVeQ4jt+wUG_;fcKG7yOEf%+-A+e7l+Ns(KwPs)g;m5Mr&TTqj#Odu9{3 z&0(GemNN!IPQrCdxFSg1KC1=KdqAoZl)%~RgTSWtQx>A_qk)2lZ7bUY%Tt>80p!LWGX`ZE+m$2He1Nq842$Z}!gBSHpJ9jqNAox% zWBd)Da4WAI>U`Ja9eg2lNT>b80*GWeEn?mg+7l3Yz8DKimI)D%taR2@xtHva*^=;I zbC0%(X|pI8ht_bs7s>r!(-`b*Dbc2W{r`Dsg3m<(!&ut~l1MSR;uld`>X-gnPe;@( zFiqhOn&$W8Q^N372Ua-#-^MSy3rz%C{Uy^9!GOcet-DJ^USsgo!~vhAoq>38k|tK% z+h@@hA&$9dKI8IMc&%M|RV_(OlBh#D1S27wt+EXk^`qrsOI6mp|weEt1Ferls1HKkq2A`)MMXaHU!)Hpb zK8r(Efn|^=v$J*G?xcYO=7Wt>EGQtwQVCW>_7QQcR25iYfBLIGYGWU2z*;r!8c^n# zJ4lJjWyAm<|4C^arzlU@WzpIFFa?tf5(%iq@0WU79nh%{XIH}_8+nPktKm0_l~%x2 zn#Q34V{}Y{3Il)T=c)cWcuEaEDeM~}g%IRaE*}1F`#ryw(W0t0;}_Xq(XRqPJxhdd zyA?8n8X%5S4ts`lM&l@_0>aSVOKajSG*^rVN91ZO?L1CXx=iw2opN#YT6;Aljjtat z(n9G`=D3uuM~YURrZ0mOxpD#}yx)r{VzzX3ncR=xU<(y70Y=hAaPJQu?V)$29LLCLT*B8`9Va+M>3b+U@Y^e~8QAU#b%xQvrv1^wpR-xDF@S zp09%WJ4M`k{R^Fo7^vclqk`MYlRe98*9`{%Lrlll1sR+!VmPMU$0t1k=wmIY!h<-vO2}Z z!}r@4eSRS@xqsOEPq)u|N@7lXHN*<|ZphL+TA<1j!REc42XW7%d}aLi`)KY`upfW< zS6}{Z{~ucFZ~uT*$>!NugwwLy!pGEO-DBt01P1j-7BL@awuqM7#8v}Cr9b7b7yYn_jI>Z$8zAfV_tbHc4%R`={piHWZL_{ z=vI!UReCoS&*j(Wqh$vk6qT&iNR`YNYV%GpaeDmNR-LHP%F6Kqi~Eh?=8=iWkdnsT zR2I6I;x7jMmT?dc6a!6qkFeNs(GdK+>iyx5x>u@&PEl%I*)kzxBp}f0ffB2akkgen ze(D%=A~IlF^GQ3#@A+ywEm=KsNpLoyv&8vPz}}Y##C{l1CwYRDv612pTOR(Q(t|6Y z4S^eC9wt^1OzB|Sx9+Ca?HN(T_`TM+^Kvm_e?gouW!2r&>TV>%$f@_EyDb-_H|{t& z47H;l*J7~%U&Vp-8mom253)(1(ZuR0%y(1{%mMwn+I>pu`b>SQSNIPs=)>&;@z2k+ z&%}ft{~w}|{)NlGsg_QfnFu3y$1%lZaz$jPDs~s+P;|lt;wbY*+ktO;N_^Ov>k7VO z_Ire{%djpdcZrK3(duW~=bwpX1|S9KSNEQsWEPyZ_qrk}=4heyyv zBu3tatbqOU*{RDhQqg5gEEXQgP=+`S`PJn%A9IkF8l6fd#mX(;X|Kh zCu$f|r5M~H1s}YuYwTGKZjV6q+$`lrdo8wu3+D9PhD)y~NEQn@Q}JwN-r#h{7al*P z5L*0jv#q+|J$A7SLUzqxCi);Vk>T!pV<{OUo3(GG~T`A z{gw^0SvAci))LH|Dp>W79F1Atyt&YfXGxBeTF50)ge5P5j+MRYQ%O4SY-5t`F$$D2 zeMP2TYpd(A1hP8-VB$y6cy9RY_NrA5ZN)^VV0E1>*g`nm{ zpky6t%Z4|OfWNyV>4N}eKn=Dz)n3Y_c4H-e>bXhjAim%Jw^;ovl<1BR^)$m0*xB4+ z{d9rwexo%= zTX8)C&OnS?D$GKzkP*%8wn0te(RdgDAr``2nIw1Pud6KeRuM@YhMkjAl9jN>6j2Mzt)93>4Hip|UMS`uL(`o$hezDpQ|sN2haPS50$fW)$^ZHt&Z@mg%SB5j}w z_u`WrZnmUW0^2?r(Ly-%63yw?-n1Mk=mc~TeEPFKg2Jkme=slqgUyXIXsS6?W{YTqpJ-<$Sz=KP*jxsB?FF3ggUn$7nYq!PuHK0u9 zTL>D&25^DgVMr8KveM_rxrtA<&pl_!&}B^9;#kztS=)_2Vr%|&w>9Gz8t^ zG>=CRS=dZWrDn-B4~mJnq@FU14#!5BQA#PW@-;G~1UkX7DSD7wK}dXTH~2KKRCbY1 z-CLkoUSDuX%QLjD_@p*;G2|UV*vs0--K!Ird#_bU*XhPO(tn@P!yduP%^h81eZ80uP-0gc18(ClfiYi2%C{z%CMXy+cvM1e8 zZMpAM>MAHESJJ^8R@%jG22oT2=AL zJ@l=8I!wywv9jrgF~pVA5=$EfkCQSkbxu_lW9OjwmzV8L&8HSa`5fCJ#R8g{aIW$* zwb}rxJmi;VLLWD9U_(wiQVWPepX*K>~miGx{w;Xo-4ZAx(nn~;<737V z`WYsS=n&OoU+)>eA&u)8NT>Vp8e#dOl!;E0N?9f2OK{+Hq72s(3xn`Zh;ksN#_&f6 zJx1VR>}tcSuDGK^Q433MtK31qkMmvvcge5=J+*m-^WHkt#X~DcLG|~xn+``zk8{VC z=7K5mX{nS$esnkgctl}|7{92aa5&>{+N$+fdnFuie6)og{Cr#dYTKdRh z2N;+1Sy>Nhhr9Tc)j3~N+{I7kRF+&iAVCVTr}vLJ^EV{3`qR_)h4NjUAWyYx>Jqi; zv3!%cfsg*FrdAWwVyP|Px;#2WW)-?NAt0=7i6zW%uM^R5IK7N(T=3;bb$Km*F^5q$aNr=(L;{CjarD?GK5#dUtGl(8868H2o;W0g>F36q@H}X2 zzf-PTgSIlpJ)Ot@h4d7!OXvLvI`l3qb96%Nhe5VSdDY^db!5@^`cb#z=*-jw#E&6h zJ@#S~j;AVf@ut#IL=r; zb+@iO0+=rxPjx>JmHT{q!57L2c|M8}#7bei9T1j^g-G=s*8^8n$v`0AF;Qfd=KES~ z=qkD^Wm=qoyQ#oM+hs(6w~RjoU7Q`c@bR-Ebb^mO=LO`LdTA>A5{o@h=whf@n#a$; zTcZe|p8;S+DMMYVsb?-8UTZI2^H-**$m6sOMRtmrqw`BSn@^Gp1!Na5zj*i-MkbkX zu>dOOIf{X5Qzq$vBEbc!*l5dZ@g<=kJ_ql@vk zi-)+qqcj`(l?7^>z78HI$)FYsz(#ttx+b6!@*(H=aa8Pa&{J;BLQG-~>o8R=h&>j9 z$E#8|v7p3)3oD*;L)n=!LNsv0_>EUy_G*p=SEhaEdB?NiYYy9msGe%HN>>w6< z@xxd}-IeUJo2MxYU~tX-kU3}r6u2EF`VW*4n@v55(^5%ui`cT-w^iH}2aiO+FUM{S z`py~#Cgb3q#Unv}s&<4o$5O^0-+#M3_jZh)v^NL~H?~v z9~-{b-mvOMM0?>V=baoP%Kx*|FYN;u8~<#sXZQkyU1U&f!5+B`Z-?V=1yx@7q_B^1DX z30#Wy5X*gMquue;!AbeZL0L0)_DV>q65`&8FFv4ZvcEX5%bQv;p1s;=!G+WkYxcge zaDIn%w@`z$q^&hk_7myXiTx`l@&|Erkzuhkb=~=iQ<5r98zApdMzPWTgA-}VnGnyk z*bUL?!wh@DZ+P@|%bh~c1c1G?06vcT+@5R8u_#ZA!|X2(InKG+0&d4?dZ^|Qhi)%z zgaz42a+Ioayc}PX zd$Uta$9c3`B5Q*)u8?VHmv`Dn{mqrR_UZV;huhOJj~T8`r(+>tv1L8uQ5I%M3i8Rk zYtT(Kemcd_=O6jX=mRnAQRTYLYmEaKLIn9a<9?)3WgvBOGw%P1ZrVC9Rdq|cCU zDuI%5P(IO~j1eg@21m{jJ)#AF)e3HncF}z{E)jLAK468TEBz2|Y??2p{4KXTSy<(~ z)pEOic^vZPmTjXaE{_d$ooD`2q34uq7(J41?$bYHp0sB@WaLZlIXte`+6&S#`txibqBuk^A$s1P5-3x=0ype}le|#b+eGhH97pmKTcz zWm3q8kTuBc7RD8$S*4^xtTRWLT|bY(J=Z=IuR>sc($WM`#m*U8(cCs7k58Oy4!+;= zd=l3>DUIw`h>%jlha^0&FW8=rP_?GX&A}%0(A4Wbkzj5o*%C3lPSLyPpJ<+#kx_OD z5XtMhx=#w((Ksox7Y{$%p7KZz#etWMP) zm;s6NbB7sQj#s##HYxR5dzn*^gL)jgw=87}5|IxU4~pE2SgOw5JD4F3A*IqSrKz0? znSS4o>{Q%Z=Yl3q^NY9F7vm*ts5EE_G1W?+i__L`k62(GcIJRJ7k}<>h@Z2;tyiZWM- zM#nZJmz1W#t&#Hckb39bGoPXL9VJhx4~^$X=CMHunF7inNMG@-w&KT$%>}=!GU(VA zF%Z12EA5s`A-9*V`7fl5lrR?tNI_0vxVmz`y%dj? z`fV5z`BrfTLpX5-V3}xa1@a~oKxY*WLV>#f3lc4U8&viJUco9TM zetbbG-u^#6m7`s%fb>=K?_Q(i9r9YR{!6FQbH@vw?Hn|B&=Tqsa@S9tcZagADjLLT zEr!VuWJdgpLjbCW(BN{`*AoIJ>|H8hy>*#(6FdFRJMF1Z+?3Gb0x+)J8<4=x%2*My zqOw(kTz7UrBj3@Y|ESzv;`(zT$;~nr@!*(>!0(` zshoRqH&%=)&3WuViMR@}QY-=;ZR*fb3{qCMGiSw__-c%JqD7ev{LHr->)w6pUG2*m zf3LxW?}zvRR6e6SiLi$V&$Ep;Qc7*O}W9sEYUZ1glI1!UkPlwTJ10m zufz+f;`40~!59q1a$S3i7XknKRZzmRk3_#MlTJs&_*h|8W)%DoPbBL1=tqW4<;^Ba z9NP;@om9i;i$(bX{%arbbkMQ8q~&iZcpMYJ;(MmO{Y< zrT4gF-C|LYK~5M)A{~#pQfYah#8?K28=~N2jfnrgQj`X#4RQ5wKLhJD18*dVBF`>E zM?J>Z7UMypf5pN;@H6U0T6{I+Z?{_o?!3ZX5Z_Xac+O+lgXT-PI@TYp8_^OEZw_fG zszoi~g-^u?phA7SJ^Ehkx;##lc?O|LGUWKE+!qTT{`_)ViB~Gfs{4GrTf8n^Z5Kak z>DS$)LDPz-u~umdXj&XjLfvm<2pIoE;^4>(OoPz;Gv%GbFuqf zqaKj2ozTw!JjA<|7nCB4RdBt;J-e|5Q7@!zd_;~-xkS*_!Ueo3iUqa94EZ8GY5F6l zORF34QT%N@v|=UD!v!BPqXBXZI;$l13brn)rbOh$Dwy0&A?HR@-4T-HeHd2vsm6~? za>7#d*{%Zd_s|rIF(#<4Aa(+=`Fq>0fAAx*qp@azr)HeH1DTvNrxJ|#T|ZPK4*??V z7K;F!Ke|66rD*1eGk^{``vSwQR-agms5nh?lwgm?_Z|m@ZZQ;|V0`92a=U-K!D-E$ zg16iIS*=MHO^G3&j@VQcVX~Oh21wAhwZct^bup9;z8-U1Y-w+7p!}SofsM#5;=P>N+IqXvDI<4dsR6#Mc=gwO8*TOJqFaxH)pUr~c4k9xpvP4^G04V73{ z2yYvGk3AmBfClnA7?}n3Lwl%M0{*J^j~2tqI*Tu1oNwKoi8lO4C5tBn1J;2!?wopb zSecV8f^)L+lL~T&tyaEFJQh<1yn~yz zFl!k|ue$7v9R`Z0nPSx)@4xqQ96> z!@Zj%e71$`(+Zj$qu0Suim>4E?-d^^T^7z&XtF)w{2Bzm)Qdf|?aRXW_}%0PPlYU}~29U@1Tz>HA!lNryLe3KVroU00@ z0zT!9N;_08pfdMOwk_c>;;_H^a!dnG%VfJ)4ulZRgVb5crx4TxvMG-2&{K=+t}dtXAE|yh0cCsOzRfY8iyP4-zSIa;$ggeZLw{{zq!O;ij`NRyf3~Tf7#k?xpDx+ z7^v8y@yA8Da~5QD9ZsGvCYs#JyVSks55d?&ug3#OuF2b8#+rknq*c*dF60L$=+`O8x>o$!IDvvai=*-ssX*}#wx!HK>Yh3P@x9komBxagZZ5@OKn8R~te9jnvgvyT z%pCF!;$`qj(vPPA7bnqmYZN=Fo=G%Zi%H5J={Qs@VWxkpn19TiYX{f(Aa&1J!R43^ zB*V56Vpkn@j>zp{@hj@6-kkZ?(Dgc%lSZ!QYfof(Hg9 zv6!Hd<&@rkU!6SSxoFMFp|hr+YJU)a0XmmcRo;Pgmd)T2rP%i9z-k;DIM^w9CNZZ7 zf{au5;|R&Yid4?mMri8AxIxg@5gy6YzG#&)qPohZFNBtyoLVA^fd%S6H#IxO84@PWII-i-*- zhBQYdW`A{gkyzbg3D$K5z~gPc3QEW9EcW!99P6F;=r9wfXpXw`e&#k&at69Q2-DqOeUH};JZ9I{>sThAVI>4Q;WSQY9}%gOVOFEA=YpKL5vBFc=QM= zWF%{Bp4uM&$;H<~0b@l+FHx&f0oni~v23!m39b~SxTW@0FeUurhKSB4ZfEGsM0;-6gq4#UU`Z>1VABlfce=J>Y zZN{WfE(m}YKWfb3rre^JSmsfotE~_`4Z&1N!Lrmcp%ud{4yb-5qq;c%b8fs#Xgf{m z*Eq!l$Ya3hi}b{Xe#5?$Ndf3dSf=obWf?+yzhx=vt23n>Ae}vkg$xp- zSBC!gW9PZB@wkgkrV>`u;xW;WSN_hHDs*`96IfX1@;)u}EI?2ilZw^0l{tD8D4CQ9 z%ytKOu>A5Y%~8XHh|p1jt?*p&tx4NB!cOw~z!}@EOe?U1m;G02PR!`REI15Ix z6us@n1V<;fW}VfGf4{Nb=KQQ>X|{6f-)?s@CJ$ojh|8*~m959ZP}edRR;jCW%kf^3 z<05v5WpfoDyEd3zkrR>Vky5!&NIdtLwEGBQVlv>-Tu1Rdxg2P~bl_In+LB+6LqNbU z%keUnox+BubZ}=xl^a*E%IKh07&_|Qix`fw(BO~)gT;g|{`}(42$#iD{C;~P1Wp@~ zCB}we^OgA6U3cXX<-#?KUdNC^mQv6#XRwLmk*W%i;`q__yRnO^u;Bf_z1ZH0^|^Sc zq&>Ga>5(G=3Na&{6@tUeI+ji74HSF>mwPIEn9-1B5r0h5T-cZ${sd>k>hg4Q#;=qa6>Bj^t!~f$ zzHYz5#9pOwWjqVs&U{;n7jX$g@&B2>I&`P;(0a)atxJ5P^FYp%KGzk-~V2_5h}<2j1}!kU(~pt-1o4LCheY=;0Uu)7`{0~yyRS2?3JT| z8l^dE8U^S~sNS_wAJvICXWm(*9`MuNv#nbo*7d z8Pwyoh1k!t1Kh;Q(W5Gt7hr*-SLA#SN*|G)^!j?Ky>hCKxd~X$;wTV?gI`|0@ELSy zNG+WeON?<#ZD-731s^}c8YzpUt0hTs@3wbiHO^(<#1ALJZer$xG)GHkB1xL?*AEtp5AjJoouy~^SQRA zz8e^yK5>*$6xXKW+(gSrkI@0~fI!x8>SETa{#r`W5R3al2=)H?Hd997dT_U7<@r&% zzpMAOOory@D7@n#MOt~>s)1ScMHj`rg0{{wc|W5Wki)cr?!9cHDk>- zfHm{X=`uJi{x4@>h)u<(Z*nN}{Hevu>mD<5``&w*1WcyMT3hpm?mXDq5tT1JA6+0a zcPINZs3e)8;yAhk-i_acYSwY$^aj1{F`^|e?qFnf^FJVl+g z_UOkRo$xSS#>M@)(K~_HHb;X6pzve}`-j`_$3K_aCp}0mqBObY+DSoSH!xu+Lmgrn zWZ9A$dklm%nF_p-7WXqZ$gB5s#9vEz0lj${G>y6IQjdQVqD!0NIi*TG7mrc%4myVW zJX3sKMHYo}n>$I_!^;BjY)c6ot#{tz?rscGF-s$H1C&s%%w-jSar8`yu%NAqLt2C& zUoNfB#m_^=OT9p+-6D&{}CR0fZ>4wRO!7UWPRh2mKG)A6S^SRJk-Q)ZV@_ zy>v=um*h`)yTr~@4tt+bj+u8FSUKi(j1T~d)Zr?#i|mCM;JqHsb{rjmNYNQg95BXs zrj%oP@QVHvUNzOXI)BJcvPccv|MGSBUoO61rp)cJS6~E%(WtH!BU2&3sy${wQpLQj6;rPX{~3YPU-2 z-p|EPV*~HJ0i$lw8?}ohqWFt0I3T4py>>Yp>VM+NEFy(m4W7E_#APMar=qx#xA+lf z-5(dMmdPpj+oXiCJjh1O7I^M7TYf}Ah1yjOP_{;ir2A+wX;GKER$zvC){8*J(P#eE z>g_kFz4-laP=5~RXasxmArRqM_zgvt5_(r?Rsvq#b$3+Ad?JJfRZzM;{@$Rkc~(WYIjUI6fz*Nq`h7@oLDv zOpw3vzEOAsL}2kG-QaA079zn${-Bq0i&dgKJ$SSqF(*ovnwWUuxbCKD$Sc-9O_xR> zg~(bZZx|q)o9*C$GZ&-ez>iM;r<4SmKF|E8F3>SPM0QIY>-5Ap@rZuyejr=-C6A6D zKk~B0c~0$PaP4c#?23*FHA;C->dU9wlh1j>2^9dg#+vYPwl7`*UGST1Tl@%8&Kb&M z$|s4``AMj+PmeP=LCK5~W5m6=Y-AX+H0FT%(9DC$3%`lq$a`e9C%Z|`2ZntsM#*0b zJwX^_a$bJ7eePrNkdBFu1eAD<7j`xktB5CI>mZgfCbCb;N7s7PCmjzi-pN_21@4X> zb1k?j^$#&LxS8_l0G=s~(Tk%Zqn8hZ8Bkd%vuPqr8Q!w1{%`FpoVZN8vqlf6$K ztr`PJi&bi4Lzd5-wTwZG@V{*3Fes8!`)qC6s^YVV*xlTVQ7mZ%l%?)vTVS9HipNJ5 z)U;2*R)6FM1d=@pg!mrD7iay4qIO>IdD%O0IzIlSo8lay={85n0Od4rZ+w9q?b67O zGA<~jI+C#th?0~228AOE5+XJZ%IY30l`zCA;u&$6br05xYEZ5)4CzcAQ^4Th9x1v>Hdj%j%hcX=;l0Tj*Urv% ztxQR@`l!_NS5ek(vN~B@bo;&^<&qWdI4$%{>6993yn*(6FfKOKE{PfJY-}Qt1a}np zbUQ3DszV4&oIT(72fAfj3OW$dVJW4yi!p>)#d`F{mj;YTrR1$;d4&Fy@XeKW<(lPP z@=f{QlsfF3*a)q&QGX7%bg-|0cU24~Gtc~canhwpHp%>OdBCgJYZXh{vE+6Ry#EC6 zKi1xli4mr2*bp(;4&I_4a#?y5JfuoEmuMU@dYK7lxklo6j`CtL9{DH(3cm^O?!BB7z9 z;0EmMjnyJ(*3q7x<8`?8sf?0B#~f&jaR4|UmDmQ?6-0^sjHR8MZ|i=aJe)yu*gaM8 zt@$@&gc|T!DaHO8k!#HM(NWasQMP0Kuhk#)?|Ab9NL#4a4eCKoJ82;P%7zxm`O={C@r{GX#o!Unwv@|p( zhJch#fQ{`<PGWtI-D5+QbcG>X z!8{WLb?3wpdHaPsTbv%NBUa3Xi#pWR0v0#( z-L8Ymp#wA!%;)^|f#I-OL|s9(8Fv^%w1hEM_uboA=KbnotZqjfMu)y*-X$YzuY-p% zxAjHm!@N!`$zO%)YSOQ0UGndJG~q}V-xF~$rHc{^xxoV?;UD9%%F4u+jcr4%;y_V4 zE`~!Bs?y?Vg8_L%k)?e+KHzugLm~b-@)YyFLYa z$=I}9@X|KxHdHWZ9yy6lMy#%^#WyD?BbNHc*D|#KOYRPKx0w z*U^J>?>Byr9@e>degg=-s$kTr7aMrpDevc?C;LRBLiDiHR5CVRxXk*-;(|k*ZTZl# zJeNV3bSVi+7Dz|vL#7_XE#QEkWPlTr5u-5kjPp)KwVeclZ>UUAaadT{?Tjuz{d9Z! zW1gd$?m_*SrcAA3b`-G&LjZkGcc%l!S7L@6Ywa`fZ}7Gxz>`N3zXnvf9gBc9(I)W+ z8p2(RVd%_KYL@@`Nqm<4tXfZ9_{a=4iQQN`3gFXPmr%=}OVU421+k{@2l0q5eI;an z>B`u^#ilu_f(DhX=X}LJbI|wp9B+P+|L|pA^!5(WY47z$yWLrk3Fj7wo1d1c>`*O66(q@qcxYf^=m2q~0y!gh7+M)I8p>aZ* z+sA0jbc3I`|@Z?YlSU9Z4XDKKCx^qH|!n2p!!>+^Cc4jxD<(3S9t*`)ceNOm!Ru+Y6L}M#>dI zDfluq;JXJO4_I!C3mz&|pW_bh#WO`EYhQ?yL!|=PBCZ!r=3;jNTnG{aGgs!)`SsJe z0w~K(?u!?K>;=V66WMv!Eq+~79OW@r;K4WI5iYG#-7$G`DZmPu z=AoBj4+NUb>-q^9q9u<+X2DT$miGlPZja8(Gv~=1D2AXSKc>;M_vQX2DemX((a$}A zOEa>f)^E}#?q*CFJaN}MLMjQjs2bFVC~%&MuecD_#lz3Eax)N-|6z~Fd&_oV?CM~6 z8Hf4VES{969hROZfB0nk=B_a~F^Di_*j%n}1J4Gl_t zJ^D__z0zWS^~WK8x_(Q}0?au@5K&KFjn}yRZ3>8pR#1zf-)ijyN&F~00yFYuNR`ls z@{kK341LEW?&Ac7^b)2%PV3|G|4x}Jul5<23(>I@XDU4)kY0PG&;dEbd_}_i!dW~R zTdX3I02koxGd-{|;AYb77TYozYd!?KAYq-)&9cQ)af_TIDX$V^p=>_Qm1?OcazeO| z_R8MFp@4$Ix#>QBY#~EE_BPDPbT4XSk0UYzm7n%Hz=&adpXhAnF>+;aN#&s*y=S_j zfJ=b2_}2B8+f^^NV%2i^Jr2bIT?iOhStJNFpM{&uB4uaQI}g2wb?yCC;37Q(kQwv- zSv#x}Z{8A16+@#4LSc?@52JNi$DGyLOetWcPnT;Bop)ZmIlv7l1!Qc^WV^1TRriX4 zg{BfuD)&C)gX73Z8Hji8PUJ2o8E zs3l2Jzef|L#DWm@0aNj;hdL#PaaVtyigMqkW#@;7|BZd6Jky_Cd@J6;W2mopQm6## zg2hOx&Nya9DWvP|>^N2(Z2wiS?AZ~J4b>Npn7vno!}H!y5i8z#m!1HsT@+7Qf`Jmp zStU3Nf9OEn#=Hp!J7;L&!*Mp4fqyv4BB!V_gXX<&&sKVvDq31Q9MRCKHlmS#cijdA zC*~M@fumLK%2l+ET=){QhKehOWdV%ft6Yt_5PfQQLfhjMDswe0P{R=B5_JyUWS4wNE++Kb!xi|tFX3pXf9lB}62a76rJSy`g-g^#v(JhWc9)UR7Q z+9s6@wVI%i-W87-joK)q-0*i5SRCiXL=7nx%)#V7s(^u7rZH-I*VtcJ^P^Nm*T^lA zET z65o4oX8hk5Pwl%N|MFzg)A-`8;GHk~`*MiWuSRxIswb8=__Rz(HT_cW5l$Qz1+|CX z7ZkK_iJ^|z&xkj~qFg-Wjx8cCLMeQheBhgSxf}j4-bp49JpS6R{}dP~pzdC0PlbSpsP|0#RRlNH4*e(382nr-ySmYlYjpk z{dPYP6Iwos=R;53!qHZG01!ZRk0`gQhz=^=UqUeQZXv)*Mjvbvzhs$U4Js<;M8IN{ zS0!dC@AqeiuF55K>ppN0i4H!{gx5$u4w2}k(Q7CgL!>}nL=8}Zh3iB4YQ)k{yN#mW zO8VDZZQesU-3KtMwW#)KF#&%uR-euI$^|vW`v0W3zFKV^v1-)|)ErRx?fVHULLhzG z%Lv~rAvF;6)al51TUztL-W-XTR%m2uyQF3GDY}UT!oIxFO8KT<2fh}(yD>*(cio>~P{|4p zSS{yjhAnQvG%q(5(&C60Jxz{l_V*E=WIZN^IiQ=@VS7$3wzpqIekEXP9OE@q$8nqm z0v6Jf{D#&T*6-?pDyn@!MPxFN7-UpTi4jOkDp<9YosXA*epfQk0CSWJa2qi@(`Lk7 zOwB(p6ffG@{tv$;O#QrvDyKx4Y_;omD1FXQC;pW3c)4?GN#FyPi5Z6A$t^R13yKz1 zkUx1;NJ{J8^ z(YB=iNtk1Boy~Zuu!_V$46xVEhF0VEV2td=ZfOluJ~&p_uXry=n5fYeq0MQD6TLdj z>`Ri%*EjNs-0x+)%g73ztykc(q^?y15<-rdkOluPouicHyM*B>?fYxUY^~|Lt_r%8 z(H+OWDweWUtT8-F)*i6YgTXg!R7Ue^E^;1r)U~xH)DJzVgvELL=Mb}cD|OCn`p0uX zqW&zt?f7srhSh^Oug31a(BA%R?7g6a(Ue*NRY?x@hU3eCHN{z=lAB8z^foB5d*Hj@ zN2$cucqc*V*rJ}`<2j29sq%jq{qcF!N|C_#+Y8C{05?%>u6R$+Qn*%He`98+Z8A|~ zmPDFx<=iVxRYNO30jYKjH`t#fPYznKcG}TA{1H$kdUJmuUVUOJP=I(di)-@{dy?dXWIv3-OgA> z5Vn1<$pX-?Hx{IDRBj9CvP55jH7Y1c6(&pyyDqFtt$agleR6QGNV6a;S|`OS37ve) zQ`(>_u>d}&L zYLj}BUII#=jf)*Cey#&Jlufv95!qIfOeZtuhoK-{&H4&^_Q*GhDzbahM`A|kS!KZ$Ey%u+gr zVnLr44^BOJiowABDiG4NC0U+GrU|0Ze}U&bqZK}!(R2=*S=3hME`)+rxi}7?UiMn*~MYEX73KdeE_3maIFwwAbe9EkG3e zi4}EPXkXZ9rny_>R#{sYf~QXQZ6?SRm5#k zyc^uE$peNQJ9e30;c@lG6=^@H)&_aJR6`C(_Lh!s`=UiHqjxT@sZ zV@~)*`yeb(cl{zD%gwxGSMi}17h*PVY_vshx{A#MKbp=$S=7`5LK7v+l(P zsVMzR@duoy@@^7Jrp1i_@AT_V&j>_G#^P7v$l^NGuIajw!SFmgl1D*HO4RzUD%;g` zUr2{~Y`HsoClIv)hzn`2rBkc?k#PYbgtQ#*6t{^0B25rNHbex(D9ol3lx&GrKxR%# zAcc~P%{?9Ho5BfWfi1^u6MjF43P|I0nT%mD6{Oy7Z%O@%>l}IUL2Z zRuXY9l8^|Oa!^W21#5+l^RwpKTH5ySfflFc=6aiHEb*2SC2$MktF&9-NtXTlYQqvg zzR^|}<~9O80Q|148s=X?{=N&_>wDvi#=o+fJ)}`n*wStp)@$g?{C3+b_ZhOL0Zmxir&U1?K5E_Cqk}JAV6T;BeOhUF+;$j`ZT;huZsIDKLrrlPI*Q-?aqrr(bJJmLPmP3W;#J7Px`S$u(Dp z7HGxcV)(YJ=+A`qNAbo2isOi#WuYZPrRtWY5m0xBu=P4aJA`Po-IHspC8BCKFg#O{ z3R&WHc`6_ONxS%|XCl%D@yxEDz5n;snc`r)n*-F%k69MGuV7T}6FJhYFv&?om5ZQi zvZ=j^_f>#frTfmPIFm@zV~VXNdqs+c;6c1gfBSAMluFQ3ML*`R>RI4qb=PS8#LW`w z9;4>EZ@dHAejKU|L)unnyW*V1+)wf5oa8LILbhpsI-`w^kXcW z2vGQ+@u&4$wV3y99wTEY9;l?Fd)r(FV;{i{1IJ7f9i`R0DE~n(Ezd^ZO z8CZ^^DPJgy7O{h)JojpEq1csYn!Z4aGne+b!2b|7*r}=Rli1>qw-1F-=?>)%8Wm&z z@n2BEIToka_pC+hdV8ycNp%w$UtFi3dmIeZk8=UR5zB})3GyShPXBM}-u=1m^EmUY zt^E&n_lMn?t=cz=9E-9fOCm){c5DX(KoSx_!T>1RF)i)EjW{BJ0|PjOKt?il)PuiRB=GF0UsU-dn8}E9lcciyO-tYoWGO3V_ z^1D9C9tnt%JTZw0|172_CTcPnJ7vl$eqNO@D6BTz*P_Q0y3!>}Cw`8yBmX*MSyA(> z{%_CIjjs?!Q$dh)uoMENE=N8xO7F!W?<$ZMJ4x6IGdVS_e#~uD=?&+vs8Ax03IVtzXYRc%kc`$p(fL=K?cZfalV|^*%Zt72FiENH#Mf^d{RxYA>Ys zr|Zb0 zzsa$AyR<=p-9^cTm@b+Gs>AV4s4DJ{SFFp)fx#&i*{&ecj03uPEZy5JYy8vs;uT-C z=U4TV(b?0O#zjWf+i~fdNqZLMS`6-AkuB*2%u;i- zQ^bT0|Ml5-{Gjt@A+dWLX%`q*45*fV1~#HDcpViEer=od8Mn(E^-{AygfR%xc|FBF z3|jyb$*HF(J0~q0^lA+(ietH3P2*FNVN{V2Bi{Rtv!$CVbbU7MIjO z9ooXyQQfOSscgI{DmlJV>NBWR?{HB4R^%GmsVaq(x1ebFUc42rg+o>geh`@BO7BK) z$R(fOFRrvgV`yydOY-(CfPfU-45{{dL4{G_rBGWQ6Smi9@-2nLjSf4uEFd{X1)k5M zIvj9KbcvFN$q4vs9)(rPA3IA^l{Vji9IE-NE` zK}rw-e~Ryfp#4Gn)E&z_%gLG_=U52Zk&76w7MuBjPb|4T$)dm{H*taXB+xg?LBVM& zk6a-oEkyo78^=g!Li@ajDbF09yk%9*7J5i*-?uj+^-b_SN(uk<+57&k;Gz9tLf^tu zI8Rgd_25BaG*4o;U6sdF(H`zgfh4%B3;B*15s8JNX%uyO*q=4Vhm#njull_@S3Bh(){8p7@02Z1q63QR0I=RBPef7k0g_ zs+K$CDaMyb4yTPqB@3uN+s3_A_o(q=TBA(l-NssjDq>Phg~AWWlvO%FeB|oZI*gD~ z!}{#u=i28iMS{(|Sh%HcFuc*KxT$diQu{Y7yf;F;LBwTCojX9(f`_T4e*) z^e{1!hZeLehcsS5Rl6wV!7dvGWZ~B>g1qNuO(NSDV?~9F14<-vL%yH0cW~9xC4iP+ zcv|yca+2+&u=&sZ#9NK**%aV;d3ZH@3g`!vk4ML`db2dxMJXI6P4MjDpSDtNewG`o zn3yw^NAe`EaHdNl(RFn10mWwyGqgdANe>Z^mJ@2(18<5SR1hE&n#HMec$^?qMLKWe z)=3wJc;j$A?*XmAC`cA2da&FB&{PRjg;c3Mlb4G?0O7=6$1|7kmSbd8?rh6ee@7?| z7P`pQ6*00rN%iczS9l1drl=r^qIWUOj^I4{MSMpfq1^E}4s+NsZH;UlJA+iiL%(nb z;hDtOIOM$`qsKOj4&y>9(S7bQU;u0Y(mbDV78@5pGjRxkO%=vglVi@NYl* z>Qs%9nY+sWul2o^jty8HgJMYBT)OIFc6`1EIxDs#k3zjfQ)%=rQ$piITK~k!KPWzo ztncQSzIXP0>Ji&I*&DWZR7cVRkwkZpLO-Cf$EaBc_>05=yr+fXcnH;^8` znIiN$J2o;^Zi24w$zswHzWH>}H6ATs2=kQ$)TBA8@x{dUis39ylD|Tn$jEYpP(Iqhae>4U&x175Ah-WQ zdG^1EM+tZ&Y1c~^;nolgu!EZxg~c|Pe1t=cAr*IwPg2$-Esm$3YEOGg<6Ti53qcNf zU9XaG1pA4zYzovYACKpfL%L=O84%?FNj%*3a^3q@{9CKTug+KG88?TKiWXM*+l zvK6H&oe7N>G!*Y8DJ8v6c(oVfQA*~ge{ifm;m~*A^Y}&-u_Um7*BH-HUpxC61Bilf zou-}p1Ieo;zLz;O7!UEGaCMiY3E%x^f=PcD9{Rg}Utrakzk(uju{LM|Rr8CfsK~u? zDc%oFDD!Dr$D-RX{il2y@{G|F@f9lWsg+kEbE?3X0ll+_Uu;jtNdDkc?GFmE6O3g< zFzQI)UhD-p>m~2{?76Ec$1XlGgM$&b$NhFc1cqKuqutZ-|C`GmYpfg=vO8nIv`Wkj zg9VyW`dDxk<3`p7*}~+6zZAu$ikfLc01eAe=? zFxS?uTZ~j^-62=PW%w&T#!q~ry_?Yp(cfw4RMa9O2_)x_D@vNPTb@~IGfS~%D6;*+ zKVFyWnW}dn`bhiWE3ttb{?ecYT&R`Pejsv^=kPWb$>1Gpn^|-Tds^z~D+b6?$6y|R zr=5KxBnM79xOFNfuH*amkGix7i~#D*k34h7EFuiYcQ9o3^Vw_QXz4jz9c~fJPfJYE zNOs3|3xz{}f!`UL0pvm`XN?<5Pv~y?A^-hPklifFHf3@mbw z)Vsbn+tNs^gAsy+2oYze4BDHd9>mm1N;=px4N83BaowT{weR5g(iwF2xwcYjD%my#XPog6Y03mmCkgn6#TvUy)p2 zE-#g;Un(nUM+=tU{uBk6Q#&pj%tZ{jKdY?0-ezL?#*}#WvIvSEx#ZLH?Q%A&#LSYv zFbIXaTM2i{{4BoR7upw}i9Ke$u+RJxmwEP*hTRn7cnwjoI-K8DOuC9FDPbxs7{^Q1 zD%KX<9KP3%Nt0%23X)Z5{pWlCOxHoVRe#sT>)-oiYyuSSaxvwByn1t`E&1=!XQZ(8 z6|a@873u+vSOE;Nr{OdEqzGPwtf$&hkw-Bs=KBo$uXR zE#`d~q<BbX8efS_$@j4p48#i#NN+Ii@x=}*h0RI{xnNeDNAPQyg{O_<)Krn z(5^AD(S%9divxxPk)3OV`&%gO7}wduKNkBSe(Z70v!BNtbuU%dM?EYG$y6l`ejBe< zNQ5guA%nzqIa*}RRriFF;=8*g)ss`T>h$w%buB)pgf9+!jPK%0wMP}2 z^x)X?h2KMAr;x8$!N>pR?3Zom^La$(Zg&gg11fPcD%SfK+MW0lf1ts=-*C|Dx5+gU z%HoYX6vmARvVk!pRQ2Lw%;G%A9im_C-IEM@{9wl z!P?D8R04^O6$>j>0%Ih8!>^0Mb<*nDFXF+M=)fG4c=O%%RA$>^6%s=GMttsCTb#F? zQNuv=!}ue~zzH;hCD~qM17D37EAe&kpkUiHIYIU}Q*)j@{A9ZjpI${^7H*UZ_1j#I zc(MhdU*Y`iZ)LK%GUSd6E1=ug_Ud7kz|rcluh?(Se*tpTtPSA8lT;H-PFZ{!WkdRk zrC_+fLnbi?(9YTteYJ8;uuZLMmKBU2Y~*0cqZ+nxeHgUIo|TXsPQnqnsK~;oBpRNe z0p+E{SRen#N8f7vzaLo{B3aZv^B`;FRLTF!|#HU?PqftRW9^aC@GR5W?>3vi4 z#H#SAh{iEYXy%W8bo9^UIM^%rFZ&mchR3;xvamTf34AWpq}rF;3x-=djG+&4fy7c^ zgXXeOw;`b+uDoBj5563NsFacCo+v70%(CL(#!vvHI$be@laS691?xvCe~>H_;*tRu z?}rAJ>#Q@rTegXk)0aX5rAE0u!^@nhnLLyV*E%@Hqw4+u%rTfAuhss?1`_YDWl3*$ z%RZ(mvd6gioTCH5+O>uTO?JN8v>P!7h(cr1R`Y2X!Nm&L-P*q^`hW$Y>oyP>^kBjp z`p$eQ=eofy{Xo|q4MlwCauWf#b?7@*7Y2~h18Y4pBCM=Q%GijQzeQ(T!AzKjG5Bpc zPB@n_F|sN1G9(#fCvR-lOl_DpD-=RC14wMuoLALlVk=eT|H*4&0JZWx1WfD6vGIo&kLe57K$tT zA>^-OMHMRViIBLXkyu3`!&IvE4Swk*my6M*n}J>IUdn>S=-{j{|A5WDR0tK zks^Tp1^sN$F9pY-{upW*-BAlz<9=-B1^5yE)Aaw!<`Ys9Vwz%(hQW~Y3aEO8Bb8ts zXnM#TegehBU%2T9lxQ?q&w8n3h60YH$Dng0I5sLMoxH3-FHj z%|Nz{f8#3@z(ftUIy~WpW%x`{?9BMeh4$WC9`1I}$SviKiT3vhhg`oHwJdU_Wyk!9 z-%o{e2^?&w40YZgTt(}7yZnQ~K6p5JFA`sY0^(DvY6hi#8Zv&G*0R(>dl0L_vH_`V z{C=1jfAwMWV5(`H{ur}9nWP~bAHGO2DT@UgLOWqBzTLtyWZ#JPmvw^gfG5&xC1-$~ z!&b~D>iA=uP8|#-dU07!BR-tBlR~YSpvBWRntW5*izLx{zz2Yam^Frxryg$~Ge zyX<4UF*=yS(AeWn#PtkNg%l6}SLZwXk7cM%e&=t4>JteqU-9Af z>Mh)*=;~q5zv7MvO$MJ}m{}s*9LkN7p(tK1+TW`xqe1lf*e4+m%eTTt!talNKGi-F zZ7wB7_PlH_DqbG3gvznG-WF#p7Nx9xOx@(5*4!0P~Th7-htp+LUk*ozSonaE@h-C>?-H_&sH^_x7 zlYx+!5#kf5@5M)W?-`o=#fm@$c-dll6*)*2y$DXDsRk(AO5BhbXV&iMxFfhaQU>uz zjE`NP&x!Jr?GYnYceb3D)^9!jg7tWsi_R^0oV)<)nA*qYov;q5cpzD_z@yc{>ix5d zB(=PB2}Sb|6Y4Oj9k-MRl(((^ok#?f%VRjYq0%uh*oh4gHcnGI9Jf~5jSyPg6DG^$ zZ3z*V+&_^yRSIUWD8P<|lrLnQqE^O|khVE2NC~m8AB!FH_4xbkHnVcWf(5pKK~0pa>YTb?Q;jyBZo38s!^yN6Fp=M&UvDQ?gA-8=HR_EvTkz}jHNlfy9IGCTi` zb~C2OS?BuWtc&I1SCUIMI4u{K%1*jQfTUCfNF5=Q9}D~`xU7(o*6~Xo_*lp|BF!Rf zkb_o8P-+I&@}8kUOZ!wT*yF!!XTP+ZEw!21`qL=A3(>pTNgoaIv+zC*L?awXq`wU; zT}<*zu*kdw6mgWrmd^|69gmM<3Nq(n?4Dq5Fk3GH$$uj-(vGOcw;q3k zCZA`&3W)&fk4-1#b(o^l%?_bm{_Bl4_Nehj$P)iN`Kmn{Gu#$N3gvVb{EEQjv63Aon7PFs)pb*w8zOBC7=t=L~e7hQd6UvIBiC3eS zg=E1Y^)mIlQ{^K3A-d5sf_g=4p`h`kc7eF#J@Yr9oLcy*HRt265Sv$KMU@@%juhlQ zE-)>0bYUK0#k|{|Ou7IuTo@stQ9EyDl3;J|pE~Ab*LhkK1CJ{$o`+6r#%Yk8>vpV=lr zxX=LV8V^|q4B_8;1#@5XS6 ze+ro?p5$xf@Aj14m@m){Nx>nr(HHE^7#2w}L}E+m^~?}5gCW}TSjuA-tLSfR#$Y; zPLPOs>W*{5AAqJwKgu5hb&9cGzEsP*pd`DSU7n3KGHDfMDFcYbtYZhqMSZBWDTaGK z7az~Ed+vc0dEw*2SZuNMzJsp)-$MpPHBV|-#sZ3sr3LU_Zi!dSHmv&skPax9vgy}x8_MfUx~3Jo~{U*0IKZ{-7tGFgQ` z7P%A5y=)3b-yd$B_&0k41=?IzZdMul^^)7;b^s(Lpyd66DKJu!9lwl)&{^jUXK)VZ zx%-Fdu|gXq_wPF)P@F9I+ZM>#cDj+LaN&qM!3Dk-=>QOt*_)|C4?Xevz=GvmLM4i^ z$dgy*bF3G;SgS&F=i)p=lkqCZUQ^5`p3v*WptwCgvXN`zBU*zuK)Wgd+htHxf{*1P zd&E!OR{)C}0ARRT?Ldkr6LgD#s(xIOyucpvD&IM7<+RFF5luOi>ZyCsuWBA$E-Uc5`ZwC zOoSyX`>TBE*Zmr4thDeTMG);uhZ#qbM(z_&=GQT6dKLS$ZsLSZPO#-dP7K9Is+4hZ zj9Bd<*Jx4?Vfj|OvgnDikk~IJ4F~@R$fGNTyp#Jae9;5JuVB8~R6j^p8v*$E1tqhE zJs8!N%}RBRI2sB8k4k+8+&OR$54k^GgM958dZ~7Neto`uz9@oT>}90v#k^&RF)5gxP~|nHnIiBZ3WVtlVUc3E(Lsxt zCc#X?9U+61m>(W7zxb{^h{6@26;GyIfcCxUU6y(Ri*m3V@K>E~yikg~rkynI$&$7I zEY-dl=Qu~LXn)COC*ZCEHYwlAoLG!eo;`$Z9Kt<=x8S#Sq$>Ud@8EKLhut0I)82y@ z`!VaG&yHo|+2uHU_|f)}5ctow&&Kn>3SP;7(+9=!Hz8@MJ-j!YOA)iBBflXM99g_P z>=l2CZjDNSP`u0o5~izekAA^6(o3liR>XUNvhVbgQcGw~HPffc!%D0vKI71iY>IJO z(eN||<;B9V=w;{ zRmlS1zbX)qr^RB$`cSrs{=EFsqXE3bBZ(IZZpyp&ws4j>YR9egwF1#gY!u)_H;SKl zMC{y^fhP<|bE8o`FTddJq1I1P;B->iFXgm2L@YhT>b&)p;uQNZv$|UQeX=l96T>7R zE^Kv8zEpQzD}x+$oB=<8iHv)J6I_fXIZ%dbyc1!>lq%v(7>YZ_^SRNk%~}hbfEZ$n z+R~*tI@V@9S|v-8C%#Cy!YsvwmO*tFU3A$8)!<`X03cn7#nE1Xa1%p4r34&kr3bUM zkIe@_QG}nH0vxH32|)!C1L^f<1Z!E-^+1zM%k*wvui-K%{FJ6EEi#bT0m-j~EoS2817CXFf+Mq0f2VUm61A%LhkHuCMR*);7vjfxsr@)Wk+~Lz` z{o_PkXulV)!?+vndQrGbCcc?2sF)Wq7l86OW)oq7DBdFD?d5DndRO+~VOldVX+;{#K@dXSQF(JOS-Z-lLSI+Ola_P!y;WjlF5Dyq5nsN-@r^o++x-Wi( znz5=wQuzQieSRJrBQ`~$>Y*=qzB569suVFFZhN0q91w#A*EJqV665#U+1Fy{o}2Jq ze&#K}m&L#-^FGoYxwVys!0ckXwHiZ0r|%Z4VnZb{*g>ekw)bVSS3c8VawRbD;)ZAb#O9nd0Y00 z*y}LIhjkEAb^LMr#LdT3c9}&r9#Gs0q&hiM-hVFIE6miCr8Ufg?|xRj%BSY&auO(-=8`f_TanTcY>YJPS93*u$Dxi@ zZnNJvc19+@>s+i*CEeHBznl7T73N+Hk!{OD))y{(ocadO#>Mk=`=odNKwaOff_(B$ z;S`ZHm?Nhm0Kvs_O({KGJx6*GV?VKH02xYB08UESiME%q9%BkDIw0pTiK1|{%aK@Z zx0Yf;6pKoe2!X})5r@UA28)A!opPCL6Nnvwk%|XPVwXcgfF48e#g<%J z^G_7Wx#6kF#b5}&{(6lWU?+zq6mKMAJIb<|H2lE#Hd?-jp+1!osV042x8TcX>)EJV zn?)BqLddXhH9tz;!b$v@ z%kdOXqyWTo2x;L%y@qKADLl4*nTw#9?id-k!kaX{i|3Py{qz@~`u$iA!7QUf6n;(4 zrgLH`E;PRqXOS3q*>k#t3uy#k@oDUiHj<&c!t!|vUseT(@+{!y6ovb4d+(F}@GQnB zgoB!7Oeth=-<+cQs-)!#Y|P1z*#j?{$RJwLzyifxjh@&SB<98KnFP5uP$p;y^M0I% z>+OoirHmfbjDdaCG2x~&iG`}7rcsHyxV0DKf~s0;m*G$L2#W9hNdl#jds%RNKc{({ zMD4Zte4J`G#3at+#0BOezlNCMm^7}mv1H1*b`j?Hlt>X)tNSIQz*q`3HvW}@RIZ4yG#_Ili^^xdtNuV1V|StW%1Mj-haO>Y#BWCL#hOW-k85q1!J=4XikTdV>K@h zKpJf?G^&G^PBL6n=X3$TVQ`=fy%F5^I0S?vct&WGB?Uq^@e5=67cR7?7^OU%6r>LuVBR)5}5kJo2ws2kwG?fu*JH<%^ zo+oDs5T|`A zKI4<#&MU-%D5_U72Nj^fQ<=hCG4R1;N^D;ipC3Z;s;sw+GLGF62gf6{7u7ot>a6OKk(nfw58%k-7qF7ij_*W>0 zdgo40iO3~_vQbEDs!+wwbc5Wxu1F33d#&fRfh{A5`fhw`js{oQ{?#0(EJpjhSExpb zetr`9KqUP9Oa5P*yeyUsgJC>N%rsq`Eo@z6j)zl$Ap)=+<$X`DmpFv6_o0153I{+qxlZ+*|cZP&4?x zG{%lo(kVC6R$gU7x3q9)ys=W?7QX?v@V&V~<_TaP;gD)gb3+^&ID7o9_UH%J(VCgv z$8NS`Rcw?K{aSl11|_hjqxYUr^!2Qi9BM}5Hi^)e5C%?}q zUMEf7OIoG352dmsWf0r(DvL>irydjKwif0VqTjvdK`peY;y1$N%=qGxA7fMI@Px-< zam6FxDe;O*B*u@ygp6edrAYKgxwkWO<(pUwwZ6UF7!SRdMxLfJpA!A&3!=f|SFed$k z`FD8Bgi2lwd8&Y@e(P!Y;vj9m=D)wm{D#;v0kD++@sT~k;KT^uPX7&P@-+1yF`<_IWUzVg#y~d3`jRrg(5TBptq6fH#L_R+lw!z#9eAH#cDxb z?HM|RcVlSkcahNg z+qnkZP)K$?ItvZDs@lbcr7o?p;Jy4KueS7xtFL+v6kQ5+56p2NZbbJCVoDSyO|B>G zmW#12D%^{iwFz@R)@6H(O7?MH91keD-= zud+M+320uOu${Dh3HflN6kNyS8U5AYd`Z_n6DO zu~n>;WF~Ke&enVMGs8{yN4q@}`w$Lej1gZNLRB$&>B|fKJI@n$U9EuiHhJAZSl(+2 zXX2kw>%gxWZ1;9q-C7BWyNzvrtnw;;?<~=yoXBHJK7Buykfd*aOlWNO)vfp7dGpBYO15tAu``<(eg62pAd*s>?0s&hf7uMHkxq{Cs>34|>Xso!O;; z^=XMH-6lLpX{~gUxYt{fl0VV@=!tlec+dvIlwn}16EV#qk@KYbWWez}^kEtj*1JaF z#W;a>eqNg>^%DVjkznK9opD&uQCXjCZ>$Ey+*Ys=YMu@Xql+51V$c=f3j^%sIAsc` zdCfN=IJE5;+em=3j1{85jt+9S2?jg?p0~<<19IUp${}_WYgz811rSq~6HIv@e}{Ag z?uF{Mf%X$TE_Vu64V0O9C}&bU$?^m_J0SAb$nrm_x-k82euHM2l4{QK{h}-hXQB)F zL@X(;-v@sl%a3}1Ng1Q)9S`le^awe&{qLnG@VoJd4)J@itST6?0(H6Z0(02yco({X z2>x$lv#7XP5@v6pd``CRlBFGgr+?~Yug!X(OCaOe*4>HyYbEtD!6(nScV-Wm4vm~5<<%)7&4=I|k1 zpK08&64gDR4k4FHykHHNcrV_J@&Ag~L)i$dsMkZ+T73`U!^&^Q9R9mb&W?dC@%h8d zUj)W{{5O`l_2C9>r79&uewp%5Vl>ZRYOh)fdYxPzh_3K2adDXqaJECsa<}*q8_oki z(ajV;Dv6sC_TD7iGZ|-&-OWmSpj!$s3TmIhwSlUFl<$xCxnX%{ra*93sKe$quoTyn znu&SIqx3ouL#GZwt&#%rqRM@#w9g(s(cX+NBa?a4N#^%*nu@Ts(YEKcN658XVgK!a zGtoF=5&Vl{p#R@{TL^)2Loss0K-q336Xp$WHWelbF@`yUsS^iA%-bYmc8uJGfFNJq z8*+1*u)aifB=g`GITDiVAAe z_v*RFS=q!Ymz9Ru)vIk`#gcopNBX+J&RBFA2_w${c9-^u4x|1KHX<(t35)NXJ$$1r z`#T(Jd}>mAa#FOf8lBXhs@E{o68S?Tu5K@E?AI1Yvr_4zKRPi)@pVIbJwv- z>RMczkJC#B^S#smg6sePiG!18-wX8FmY2lY!>8L*MnNVzQOGwL1aXmL&&isuW|d-5 z*j@2RppII>iH{c=EEbEhE)&py=bhRv41*;__6@FlG*=uz*8A`3NaqZnK~}n4;*qNE zi^YjU;Zue)d5n0+FESG}#>~C+#ZSiO(I1x5E@?$ficy4>tIJzN@=55;hX?O}d2wNW zF2>tafzsiYk0_koqDmuz$us;2At)?u>Kr}dauQPK_yUp}Q-E7S^cE68OhGqb#ote5N3nRf3Q=BuLu>qx=K}n@>af-2IzMye_M2&k zX{IZDJe9!+s!O~NQ?%fMun==CC5_VCG4b9;!Q!bP_tOFGcQUMl^W%;nC}H z2LC~k!2jBlm!~R@IQGbc7@nl_m3pXD_f-|336*+K#OM`K%io++nhyqP5lXwqGnJ8e zGsG32RFN+W#)`0q%E$&~K+%?9nYh^9+9|HX3Ob{;V?G&bHyA5*6-*W54J8+4VpJJJwr<##} zBCjEV!qR!+`Iq8>q~5@k zVS$V11X-DRCmcJ8LRP}bHY&7JpJVQ$^$sB>1jdQS1HPB)0nCQ7O0h%WJykhe!S36w zzzoRfehC1elmR$B74F`v@#0nfsGan5w&hvlgv4r0k`NJLZ#PBPF_^QDhp1`8#yyLgwHvLv^({JQIc~%_S57#RrVhhm`NF^2q#H}XoX^7mnDi;)n?M=5? zLnaSPV-;5Mpvc#34<^LnB3k=CwJc)ptnoW-Cw}S+QDCDP1(bGcD7i~`rWh@AsN=hM z$Mk{pEEUx`9zmas9ir$jp>@nVFB$yLY9RjTFQv`*)7v1%& z$UugVu9?Qo@m{-d!DA%~)d3^I`(>M;Iv_o!yrVOyhV9t4$TOSTFFdvu}E07%pNB=Z*ZSC;q5?EW`*KUgiA!>8m=Z z+oMuwRp)V(fm-4}TonJ)KVe`z>q+V+bEypS*Ir0Xb55G?4sR?mLoM%K1r#$?OxcW6 zyUyX}T8z1T9tPsI>ud32=gGfN`c6Q&O&trGLs($HgfownAGuMg5zU;(B>g;MEQ540 z6hU83JWiERCsdg<`IN%?TArhSE7zX?XG*XF@)SE7<`+h?FTJIUZJa6{yB@>6moByt zP(unOoL?uj=$vq6i0|KvZK7J=&(6L!`TxrD$sfE;tCFa6r{<9^z~g^h1^&|Cg6WI# z5i0~Ltg{f-CuQ6|^qe)j)+q~&sxJa6)7vKix zyXqmR_jr~*PO2EBHHkS7VQj}a1*ICMK_*>523!11?L>-?L*1UFFQ{6r(p7%#5mQ13k>U;YHFAk>YLClV% z*@V7?i9PwQR|0gfW2mvQ@>A(K>`5s8240ybhQ@6w?au0X2pX1JAILn92MI;k9TdD) zqZXUFRt+WocYc1dGvsP5Jw|F~@BZR>-E7AR^gK;3<@THfi<}#BOvFFV>qJb9E!T|* zd@@!BI$Yjo!l%~a4|tx%YupC^plX9hLDc$2NL#;nu^nQ`SS%r{MjH`|U$v$9xY~=T z|9}ti`VhzSbe~`?lmsu|6WD^!#X}Xk#dNKShuAQOi9%3i_^mYER1(l-d%D8C7$S$R zG@YKzyvtrboDz%-oL&PL#B`2T_%mjHMOEaAA0iV_r?NQBOB-#D?gP7}*v-0n3L-S+ zK|KfenV4*^gln^S22v*^Uyp2&Gl@P>946+WFzGM6rq_mtP!(+lRD;1 z)ETLtt3Tl$<>=@IoZo=_FEa7>nur3Jk zEQJiRoBp&lzbol{vG8Oz*+w&U2!PKQAC(ir|rczQ2 z2(e}o-0ex}Y`+8ydd&iN1+&z`0YZ0v1=e83Rh4X?kHwgj*~lvgt0ilsDzyj&6Kg^k zq@`UCC;J=XO8qcAlizD^#V`oF!Qk|LLZKC39~d$S+=gLA*rq6MpZ+&s|HX!5QCqut^CL7pI?fcOz=3%EJ|&xs?& z3KJK_a4yYkc%P{JO7Mz_&b00Amj67mWGN&^HGnejhf95e^S^TsuRgs-A{JJp4YvGS!2jOAF{$L5E%ySFX<^jEobK;&z*+>pe z%_q<-CscUfzTG{CB?*e|v4PFe1j~w3=@+C5^+Jdn|675XdmqWna! zHEIL{1Dr*;ua5=}AErOZZu!+D&Cwhmlz{H$q7-hZ$ceuJm#)_$$^(2r$xP>V3g|8A ztGz~l=c>o5(X6qM@_S>-qf+iC^7}Qpkao7!u{2$YTcxHPlfufELCD@%`SF|1j_Tr; z%H*@R)JHkCn5e;!YLCUZRDe+AYNcny+fltQRQwg=asRaBGhXRS9l`~K#_El-_6q3S zRPO)Vbd{VgtvExf=P>P2UJgncPwU?H(Q!Ua_6>MrsU>zyWV{WLsqdtp#7EF&K$0PG zXAhrgPd*uEPSsoR_Ku6bm|+KTR^szq-z<1&3N=mNI?U+Y$`(hZvf_7%i4JRjVq`zY z&sugFjGuv%b7A_gya)KDGRb40J-5lLglUZB6XSXO%@6*po&CmV@HH$CDD4iB^nln# zc2PJInc44B9u0m~tn)JzjEz?vpyH>MgaBGjQYBoy+fe2|i?4ttqw9`#V(9uQFJ#&p ziEC}19)Res743PLq@TH%U9a0_n^qymQU#EcxV_z*7{%~kCKJZ`eJ;$AO2Hkpcam5? zM)Pw^cr|v1#4znzE2vL^sO%n{z{%Q685UK1uv_CbMUrs0Xb;^*GBKCBngUL^4(MkU_Ts;wc28o0)xA;;(~M*Ee%wGpO{Qu74bX4h80@% ziJCahb>_>&P-^3@aEcnzbyByHJVlS`Vtf9j_(|F82WjdlT6M+O;L%nuyDgjC2xW)9 zLY5GXQcPW!6*!k+#*6d~x)`sLzH#M#SV&@Kdl$MXBM&F!y4NtQ0XOb-Z3}RxCyh3z zlv|MiDX9_#zJZNut+%z&n`x9>!tUrp&$HYe!S)&#PoDF1(G@mda<$Fnxpvh9xyFmA ztl;NhwR1;Ue!18LOQCUhi)Y7KSGv4>ubB5Dp)M#2eiG}3x{N=I*Glv%MBsDn^Zuj^ zIy1%=S#Q5zq55-+CShQ8LV3<#S6z@n#ugucmjIxT1glSQzN3<{`%kxzKOMVEYU<#y zAS8oL#@Wq(AE2=+@f{o_%JTScvE)AZ^B4e^@VA9s#n0rtbfR?#%wArjSP+Na!(5{!Wg|5!+sT0WcbYA6dl%5c zC`e0845k;8b^&GaYwhgY76xEqEYIU9gn`_H-XAk&*U{oMB;mSjC9E|Gzg@C?9&WMC z#J)Fc*GFo!0=SkjzYWm>5S3+2x}FkQ>G$l*;i`)T^|xHMq+P`Wkbk^U#z98KAng#r zx!?AoJLgJ3@U8ZM2w``}F-fWbp`hdSgp_J456lkE%>HQC6Wt!rvPl&sYETJ=$KjS4 ztMub-_lnhfd#VT|^yP8L`|IsWyqfr)TikKSJ9N0V(rzXGr}zRdw}j6SPG?ChE2P~| zDkb|fL|;&21=Z~lAPy}zQydK}2`quK{*}eVx%ZR!Xmo6N<|HJrKnl; z{@jOo#hS%z)huK#_F$~K`NiUckflZh@#oIxd-+4#%1{LF$LJ3^RS-+!EAlhmNu(Kl zo9=%w6S1Ee&C`EcV3s8n8!e!{uT0iDc)yC-En%P9SV$%1uRQ;OF z+-?#0d-Bw9=WD5AA#T3!bq)N`pI4=d{)kvd;8jaacjJA=@h#CmRu5EmE&v3o67SJ) zA=DO%Cd7K8yx7C-xN9y`y1V>IzK4HYgr%4T5INoWu5oFob|*$k$0ooO{Oe%d{8Yt4 zy!NtT2ye#P1520iMqg8bZ!vkuk!=|F6{?2RMLT|zLK3B!o}%~wK-JuXp2&I%^Ngn) zftqad4lg;GA#!T>ONH**e-Uu-XAgGlwEyStYKF*I4AnUdM{ z4XhI@e`P@eh9a^tC=D^`0g6FYV1?c8l#On#`Al#mD8p4FbgKv4AS9%C9i#_ySwP*n z0aE0K+y^BTPibN`mChkuZ^wJPhMetA^!q$+5$IiC$Jo;UoP9GSuiL^?WlNU8(D0;l zWFT3(w!Ndv1gSbzDmJY%)-vF5vu&g(JtlMe`~D`A#*^5t`ikC!QWoB1S}afZuAie6 zQ%VH!{8}ELF2-I!3JdI|O1z9&{Z2kVkXr!ye4}R@bAQ+P$oL& z8|7l!Q&RcK4vwh5G(!cHokI56BNW12k#`%xy_!>~YGp%ggpAIRfm?2Gp$RS7l|*V( zuEsKWquq`lC<64spT#DF{~jID0YMUb^j?DS|Fs0?|9hWr_3N>U3YOyU`5-N<04v5M zIM|2Dc0L|J#SQ7|W=(ckSiatt<1b`da!BF{sv(jN6H1%&l@KOmg!aX!e5%osd{dn8 zc$Ug-y?e*P#QE}Z75hUATAjE7^^F7&As1eI0*%L92GguXQqGl&5O3-QWy*V%ZWn8W zHi8)k+dkCTO1t87jd22GtEcyK9bdY!+Ey8co#+u&QuwUT-5v$OICze2Qx8VT0r8f- z_Kw$PhO{)q78Jq1yPYXc)@fo-Y*3oO2LR8B81?^fE_C6!#sA8K^S7Z}ck)cE3ib}x ztKSGCQve~Ln!iKaRB{?>P6e(`C%=|Ep+_}gj(S+fIAi;%1u!^KB3x%7H!vS+q&-5O zQ>v3wb(hNRihCMdy#OdKlb)X&bU&tdbwc5QM?f!g(NT9x(5Rk3Q7y8 z;*>6)KFaM|7Q#}g!+Z@@AN^_-D2<}AV53my%NABIYE@at9trjVmSU`cT$xuh&x)gljwv1(bvr7Wre0q8b_y_Ec*bKB z#f6J7enFx#3J>aJ?5A*FviM>ky^}Oc?LW`AYgbqNqjDchoNQTEkH{J+)*q{d7U5$E z>d0GBrVQ!tejLW#IE|+HH>l$3yI$dc5%MI_)|IeVkmYpcDZK2wwyjgvB@PuBN)1Ka z1_VZ^W1>pV65q@}ugVWQuyxoUA#Tk@9g1gBf0uTSKr$wH%f^fv3UY-j?)_B zVpAqddG7FVgDNqE;Lrv|$dz1D`b(>6AO{&GV;v>CM#T{HO|PLU{+4;HQ9#W$_w1O? zz*8=W3cI}FtA@XFB=!comUyLi?E+NSly>^B(m!odfTrAqc+>C1vnZI>4NWYh7@Uq` z>39H$!!Jl|ihJx@OJbvN1z7;GB8aV=6dvZa$G+O!G>;rdEonpwN8u9lofwX&kK0n| zwv0O{V!!Em%CM%dMGcyjjG03BRb^&2-cQ5dr##e@OsUcWD1ZmJzS{$zF$Y0=LJln( z9jdw%%R{czuiM#AEgNJ zHH!+Q6#otU@6{a8SD}BM{Wy*gVKbX+(iB%b)gA!bEv`B<-yfY2=n?iGXF(PC@BE=} zi@`3I%H)cQcP{B*I;B}0TD*|+_Iw#LnzvIX3au6;uTT)WBvEp)iCBbC{Yuo1O(p(8{!SL`awQRO z6@&)L+Sk#3E`DI~Yp~~7Y}rA)Mu4NmkX=Em=lqFz#`cOyGc0XbiD{?`h4fL3QkD(p z*W1^7BVuR!`FEl6k`3jLP@M*g=5BtZH`Wv@BL?3iXqI?|leQ9=%S2qoIU(vd%}C#Z+1v6?!*07sLmetmPymK?u4jW*-Hlj zMsKD-cP81RpL%Tbpi(s<`v&DLXZ&ZbDo9i@9LD^^o~{u^g&u$9>>K>|{rF$ti|Jp) z3^CSxibt$*W{wcc8z%gun+@KSCjdoO&Wz2(T^b#sqWuZoHK>-#le(b}4WxJExVgXp zLBK*SuB%}1+&786_5jjF|6K}@tc&E2Jx{k5pEsP2mW7v~zf2dAMzpzKAzR0~DW{L~ zf>=x0QJ>`V}N{T`UX#iefG-7XsrqQ#enKO?*b7 z0tNh$=lGR!TCtRG_bM$P`uXYbGJw2=-uAkJ=R4%+2Q{=AsPYl&wp^kXlTMyAwqGG~5PjP{i zA9?3gXJV`9 zCt;2)gMyq+v-w01N=_v}n$jtXB}?^N*{UI*U|;E@YRBA;1_t=c9^?@Z0)tM4Dw(}o zPAg^cfh_avU8XdX*Hm4=cP=Et`1s2od|CfhNwUemKmp^Q{+C}q1JhczSIEbW71HQp z287El#eVe&R?0=^tGZ9wHDO}xg`0OQfmJ2sBB^p%OV!Itg+q)Tl`SWL<~eS({et53 ztRhmvbbSeRljTp0s+?G#9F(h%nXdNO8w z@ul`ceaX=Q4yaG7f~%CV^d7=!5yddJLY#cmZXPRQ{K&~X`{mjDUadVC`=oTI zT)&1fHZC-IOvp*cR;!Vl6E>WT_?TeNP>{nDo+uYG3)l}j0vECk#tV!Qa>EsVf5;`S z0C|j0QzaY`+wv;8PVpz87Wj3=uU=)_F;>%+zs`kKQh?+q*L1yOJxE}l4%8nY@93b;@;=52 z?Ona2t8!}6FF_q->q@NDS7Vh~oT0Y<@BPI!?iANSm{6;-2>gUN=y6|fbBq4!HeYq4 zTUphFP|D#O_5x4^saxo*F`D0b!Dhwk9nkwP?l{HOU-uhP3rUa$U8U6~(h!>~+bBT4 zjrO)5@rCwgOc}=fq`69rdsaEfMWCgJ`9*982leK%*+FvV2Pg5u_u9wr#Bw$WOx=O*h7;V#py70t$Ecx6Gt!( zb*B@&FIK_(XMYm|7+{X>VpVx#Znv43Cr(4DpfTHStVYQ2?BQGOtq=Vtwv+up^iOI- zvAcAkI)FJBZxutXLKY-SJo;LU6Yq*-#2uC5x|e1VP)Rwk8PZL_c^QHs)fi$LSOt|` zJGaHnPe)m;Xnf>3f`VU&nchZc#g3x&E!>}nsu16huYJWrsFq9L^rX3xpm^u)(A_ax zt3(N3jL$-EU>pAZqaVhz26P-HHWsS`BSPj>KB}k5?2lrst7|R&+f^Du#bcMb96I*l zg(S-!P)qn_A)zud7ja#HrV|flyGv?<%Hoof1yMJKsl;l|VGMH}q+Gf<-p{aEWP%6& z#SM=CXfp%P%j(O#vUT6u`&boTP#4xVR|fb*>uCHK6_8f}+Eq0JHh@1r8>RJQSa`3v zU3C(}PG{c9IGCv67!jzPAzDtMgJU$$E3|)a_1!}t+R1)db)kLOYzo?? zkSJV&(mO0;rvp9J!TZ@LA_pRHkb^@B!U*xpsC1yBxPp;l**g#k5D=&6s;>x^rtPH} z0r&$hvR$ixAN69@)evW~MSU!SA`GkCD^Gp2J!iQD5_jEb*>NkS$_vH)S$f4P%LGU( z!-Ax^STVUU8}?7r(pcp-AUqL^DL0P|?#mRrM5X*2vAqWz*^;cj-}^UdfhrYc8#61%bb|vs7Bb;4`*ZE5}5U^P%f&-w-wpo2!rBkEbs3 z{jJTY6ZE?(_B?BCh|jR{;R{aJPBF682g9e6Dn$b-%-1LnR(0)h2QXi?vUtCjFcv2!q*KI>7g@3KBR@0K@sI$G`Y z9qxRjJsF~)eIZZ;3|qs5R9NeDyu75ra|Cpo`Kt0ONw_1a(}gG%`&#s@uVJlI`8GC+ zNY?>XLP{(ekw)>Swjize#rW{7hDEkO%#`%L>+$ca3+;2BB3H7Uv2yj_Y*(ykVB8%E zl$j`o5c5Sre-;RS%q9ap0|G8eN8*m-ofq1B7h-H2{chTNDOKh=`8QzJa^2{bOqTXCFp-1>B$^mg+z*u7SPGbj1?H-_vN($6X7Pb zq4?X?t8gbQnTwRfy0Wmiu=e>_boga_-3he}Qr5*FR?(0rga%Lj#h4)a#76;o z7qc0s&2a@kR^px)7gef=hm@68wy@NutD^QUZbbitX{r`d9f=BNa9`G&pF=#}OJ`ZU zA!y{Am{l=fOnS;!T=E`_f;W^$Yhkv{uUb-Wa8<-qRU^>CzYNtt5Mco!@;zz5CB*RM6T*=*b*qN8E?|7N~cYso>J2e#Sq_kidXnbaQZz z`i71#!QJ@a%dutf-ts0yr<1KiarSs%_w<4!7UB$5dr}c0_L1A1k;VhLno~)mM%7Zt zj{_0bEURC<08>&c6El5-i=QT>@i-F9Ux+_-*z#hn0|=!=UEmSH_zXpQzz+(lMLdD( zDhire@({e^VZL!)CbnzfD}=)VDMz1xHT%RFEjT`OMkF&vsm`b&BER3B2`#eH%LP_d zL`^JfV$~@6X$&0DmMt$1a_Zub z@S%;3!lP!dsx-<29;LxZ;GMnzC9Xj|l&!X(ci-8=&-XGWu?C-S^RWgZ9KlFT4{FW) zQq`R9w9De}tU^|H*Rq@>q07NQdFBy?Ll{e-3;C`=L{K5>F;Cy2 z*V$EltZ47$Wn9!X>r$e%Tn9ipF(u3^JR0BhXAj?PAB{5)n$?OrLGPv)LRZj1K7`53 zd?=iOt#>xIM7*h98n+d3>)a*^bW%wCuh0J0r~3pqomOgj=6W*BupHqfHSoAQf+|tG z1ITKqz1`Ta<#r`x4A%>mjP*96PsCQnVXUgiffy~J4>&IfF4O^!38#-Q)S_{4GP3Bc zWGpwh1b)zNawP=GmjL^GT1Igi*QWXlZcqgabWT-#a|&=~ zboY3~o$Co^?BCp1Dn8h`f1{+Ob!zg*N-0QMS@~CijOOEy0(yt3+wrwA@M3)MGA{YC zfSHIvO>>^TNo8ouU$0Uku?P|Cv~s`bPJ8PM@h2cghlmwIo}KlZDCI=Gpu@bL@WaL# zphDNHD{4r@T>ESc7cqUu54ox+7soAjPrSUT&>_EAU2f@Io;`f7eLRLI3UppKh`LzX zTZ#9**Zwd*cmc}L1up^a5NpKIvRvM}hgt!WbL~eM;&_Q>&R`Wj{x@g8EKt(yq!E|# zX7wxX;6$wEI>gV>iRX5#l$DxPJfj*L=O|ciC<~d?(Id3(M!G;$xIiDQ=OyF2}uPs3h{$I_(>h|930Jj!tD@3bOa#R>+>n#YcZfb zC&F}E6!78J5Tu#xqf|TtypYN^MB)TZhdkg;qw%vt%_9C@T4$GDagQ;sF;Q~;?tW8% z%lkKQZ0Pf1EVP@bA!t_wF{Yc0ux6a1;%rPL~Omjy)nHQ@3XQ zsE~m=0>;!If8~R(po&8P(I0|dOA3%CqU&9=2(hGZ^b0&3??!Aw6MZ>A@Vz4N2aefB7>fsw=E zOKU*B>M!FNN_v3p)GO-6vUK0OqQj&j$z-hP+a|>#i*_ZgC3Ab`=G*LIi2N#R;IJYc zShHc@)BK03aq&U!Oll5!)aRH^Vu0MXxU##dHyU5TD+h&)pWakOxgW$X(Vb&8<}>kv zD!R~%>*aA!L5?KN3O21wDUyR*Co9I&matDjc0JNL*6(0!NKB@4dr#r9;ZvQ&)`Je+Ki=B`-2 z@!hkB^k6Jd0V~o&*Zw<{p3;hBOoB-(nDr)oRg-JApTcoz;rgId4abNzNJ@sck@KQ@_Z|eUSKqUEf%|0UM9G2;vdS1s-+Mq_;85v9kb9ryqopo4C!r${UMfRM3^rbPO&oJF)TjKZt zt9v1k6uV0Yci2j=BmAxOIo@vT)_pfIMzNMN&ZHX{4u44I;z%x`y%H`zmJt-zupHiU zW>xxw%bdA&p|PIm4B`Si!NPhd%3&VxE{zGC1xu>)rMyLKwYbNusJBRtcFCCmZ`vsF z#Z}Ck*dK5i*T2Fv!FGv1Nz4{v8LyG@8!Mnl<|wf_BAHqJjq27vXlK9hS}ty}6p%zb zG6*E@=OduF~73l+_p!h!NUlwC{_!43*9`t#RqaG zGoKv_fUcKV9~SWYD1yfsCz-0NQfsXI|K;B?vtA~^>{_a*O|FsHbEOW6s_2+2;@_Z;I%I(Wr}*3uyhY`^#NoWM z#EGG&Ne562g)XG=@%P)KZ~5Dx=jh~GWmg{ev`>yTM-e8E64uW+R-0fJNL#H0zkuyw z3i54Xxm+kQt8U5DPa~0HWmxgT*n47+6dNfH)+(h7c;ew^$^ItxMaeOKt;;r)`bXNQ zVu-9gr)o;cQh>0B)QZAHP!8kEx7+J6Bkq)Y*qY_*q+?SlN=B{$LE$ub@G)%7f>cER zNUO2^L@h`qz^uJjHmzrPA6t!SS&*XL@)6p<_F#y|wRGB6Kf4$ki4fPGNh)=in63y> z_y#e%kGHfzx_RbdUSVa6LnR8=SoPb>J+qJWo8L=0KbRrKHULn^0Vsv4))4a;BR7;w ziSX*T-hKCu#ra?d3wVS_THT2gg!$;$$u^uB9_Zut2KSTj_u71AVpqtw@M87QYq@)M z(ROFrl89eJ^~h>rbCOhp>R+Gh)DQQeD9tMD?H@s~+EG7_4;5ZQ?`$A7Hk>Eft+Dph z2SAVfDnxd-aT4=Ta3m>DnAf?&YB*iss0e&KKsP>RJpAJvwee-gG~Gy7e6Ad#nRYcRZt1cg=4Gam+Lb>IO-`UcxjSJf z;CPfi-9(^O|AZlg^iaCYrR*2T5B2UUQ^he!Bv^M|e)hl0iQJtDr=x0KAC7#;{h}j%BE) z+(CY0`W-Q{>RR98_8M*@sg4rse!SrmLE-*h*T15I>s^~TLv(h&Ew6dB^74;&;K&R& z{G6@*2NW&S;kOWVlOA8Wk{PsRQH0%g^`b7}6HI|Z>Xbmc2Y=!zAs0XL zH~>1Z>A{6cT_d8@v>=4TF3yGL!NtOobuiPbzSw5xJOiGldS*2v^5_KBS$LPd%nc$F za!HNKkA)24m{q&K*YM^lqQQa+t z%M~H0ndmbcy|M0aBj&k`tL3-D0(!aZ5v9xzF|i=&dso)QtX{`z@~^O?!D*?B?SUVO zx%m?xh9e3#prD0QCfpiDO?u3pz8*FI5sS-pTj#_p%oYm=Al1%pU^%>;v*MXxX!$qm z0H+-649Y+;xabK6N?V+2k`f^8#YY~$|L7NT8~C3`-}N%BaRx&hJkdVtm6(ONB?P}X z(nPs!tk9|;#FJFD{bVW=KTvLjg{wEv#g0{ zvx$jMZfZpLkILrHa&^TH;?R#VDGEnnQqV*#P!lv$-c8NEJKEaq7*xpC3bps+f%w-r zq(=$c39C=!Nnp-t{^_oo^>_=kU! zNYtxGB#JHaw#@NzS>T6u>wZcT@H?6+fk~E^5+ZRl3o{w3$I%bE3+&(!bn-Viv2n5CJU@fTtNPGb|ho;TK=t05bGdN7ByFK48 z1Z4#B`Ad>7ta#mgq3}M@J`$5vdim^J44HTXkrr#L+wH0a6~`Wggva>Zs|{>2zO2Ze zhUT@M!<-lW;eOukP_Gg!#_dv_@OYYnUHBv(*lhd!a_qvV+Y_IR4}y0gj~N$y8&T^C zQr0(DsCa_{N;9V?W>E#uPoPuHULF#uHP_WcKk{@FnZ)#)RCSF_!izuqmEXoeAP1t_ zIK9wmvBjxNuX&&Zl0jk5YY>SLGa^O~ zfx%Df)TcaHREW7BD@q1wTPvo9ItFtO{R3a=q6a@Oa+oY5Pm@B6_msh?dpHLuE+!WK zPRP|n?Zk`s5a}YY`#oC=e4q&ReO$lVFiy)a_IP>2JbK8fe!NX!6L&zo1%B1DonGwGA7_V}n6uL}9VX)Il!#DR3! z+Y?pE2Gu$75PKz%LN=jJ38AUIVpn0>KomVJRC9`P;OQe^#q|XwMBuzV)F7V({Vg%e zVEqK95bRRi7pv;cJFzNy_t2LLcWP)w3kcPGb46!XTH&(o^YI5X-SZKjxvP|L*cT`h zujMudZUOFej_frEuquacQJFb;sszXkO7wSjk&;ELamr{5;Fn-*j!Pq^v?G|4x8rES zJ1Ft9bw)bC{mv9Fv_#vrq|paj45#z9o8HFj%DK#{;d`yz=LA)@DDo=(>FqghK~b-G z%#2|&55P0=`F-IluVsqtteU}w=b@92?#CXX8c{_abFa!ZIy?~Ux0fE)%^Phl_8l1M zaOeXtDLx@0L=0eNLZ;eE76>4vK;xr~uZq~X>iHd=fZZy)(b9BY6=|AVMZyo_ZcU8w zgLG`v196@rTy~9#h+O3XgPvf_;Q^#F1{X3Zjk)YD+X)}qdr$mYZa>5x0h2aZTb*SF; z=)|XXL)Fkl(cz;W|7~4A-UPZK;J;M&j*L3ch8T(BwA=n+uFZuc%xs<)mzmbpJGxN| zaZ9hK87uIsFRk({WF!iq!IwRJy_f8@tzZ`cIaC1V^4$HEV$E(tmtZU zz=7s6iqAj(-r3)@v+u@_y&GsfVfVKvTRs_btUPzU6N)M7b~$zdpXAs0B!APr6`!PD z$ZjebMBRn3xQbM5PWp|+FxI*K=-eFgIwP+(*P0*>w}Rl!kYL4eddo4Jm$MKzRXHm* z2oF`gp^&$Dhf2kig#ZTUz})vnQ1gi)0{$Li)B8;X4sb|8GeU?uo#?>>M?U+FqOZki znwfE-lqC#`SV$6{pM1{e^>u9b8rQ&BR>z8RA+whl_x{F_v{o62C|>tkY6ScpMmmgv z0@29w9hH(8oXJVinKD1JFJX*M!>6wn0#xs)%yz^u6y83;(DH_B?tq~%8Dj;g!SHeK z#1hQb6M&jfdvCPF^svib3UTTPr=p@n)XJPz=G#o(GNW3O8KRnp#Se$l=$;t~a4I(A zY|s~9cD4dFMLAWj`Le{n6X!d1fhQFlqfw+CKc;BzcS6cAS_O5-VofNK z6`{ISbCh&N!y$|nc>J}qUz~lnP5viV>r1UTi0!%OJ|3r}H%S3s?2arujv*ebYrtxcCSi50W!W)xJVDxznC0?Xf1#F0vQLz8z;AEyjCFq)`54g%m+rYE(i@A<9FAY`xJ| z{iJI+oP~oYl(XJ}gALVP;)b>SDPR+iI<5U@Ce|OQUG5?Tx2>a6GzD{IU-3D4AhCHY zMqUwII+OG4@uA!Z_-E?t^OxES|69;%9{$U+XFf#Y@roR@gT2_jMJO%>G7!B53>+R-kAMH&xuyF_YXwUZc zYgh>7-oubdFeHXX@KS|DcC&~Nx39dzJOco`WDdqSI{m!5vmj+4E|s7_o-7wcjP%MP z_+Iw6xvEMrcHQNd)&P@$6qEhAZm+b@`KSTNV=XYaYCatxKB=IU6IsfHTv1ISfQJ}M z57C!37W{> zhb&UKgk->R?WVM#k>n5=0Kt}BPdq^Zo^i?NLZf3SIvB=9o20&8`_?95zl4l?$qTE@ zqHA4bygT9#EG)&DC3IRUpWfu1UZ2}ips^9;c z-)?8$3Ngx!gxT@Mq*p^oMM~F3lsD$bu?jII!z2~t=h@K z3?2V^OPn1SY1jK2gv+lhlQotH&g$h%q<|VBu0SXZvP)6^BYR8aFmi^Y5S3+U6%dmc zaJtM_+qICJ?)T^56;z;TK-yBQIo8j4vGG_|8U5Tj39>xhKJ6i397E@c_4UD3o&*)N zV-a@Rf%U0^;l0^WjQE@jUCa)#ZfBQcQIs5>)&2U(b3=l6(B&Q}OZlr>y#(S(tq8t? zg0tDAAfOyK!?k2x784wMgdOl!%NgPYKaRERTc5oeRIyFLib++lVx64mw((o(%EoaR z(&{)C=cMh0U~`$B?%}1WBgt^%fBvV*TF=HBEmJ*h)(fF~)64&v_G$k~ZQ$xnaRjV? zP#j0L2en`?QPtW9V2R$qRXieIQ2hp!6xa*?ygx-5FdQu|u#jKG1Vgv1%w1Nucxvj2 zWygc-CY?Z?ikG}b46f%%Oko6HKcgwlS7SLANt{@V7x#)0Z_LO^%NGm)z^=R`C-61d<#$HxvDtLF3 z$~xZ0-rG(4<^{r@RopHO6S%&_!Tst5NG7TOyB^=l5SDI2Ks28RayrlHd=^oVNCpy1 zz?u80nCb7wFH|ZT*SLE9Bw(_D9^%0+iTW@MJP+y~A952po#Z}7qv0Y_&PwjWq;gNs zHc!+DvX3q{k5Z9M^Hc%N>X9G>(#tEL)Vv7_PDn%Gy?HJpax7WQSiYsn+48_~Xa>hJ z0KDf&%s!Fn^D@(AL^!GSC=>Befv*B)ER7j7@Lq{(DD^~NGZXId*Ur9%Rd(T-7?X%? zEN{F>_mX0CA(ARU^>XHNQ$Ftn-*RM$G39(FH#pS_7D|^ZQu<*9&0H`Ju|6!H-J7doNZbwPKmKd7d?;G z+6$Oc^?ERTjuLoBA^$=^>(|8s@kje*CF&$H?B5ci+v}3e`o`Y8_a3N4i~`jq#XtBR zxAKQ6)&JDbg>wCme>7wwLP#D5@^i6N$UEUeggwZ({gKzm%3q5MWfm7V{{6{2?UV8E zS>h)#)w746Y}wk32U5+Sc$?|*xCinUthczmRY@iGc1`)@88zq$(fj*@(n{kPD*5;> zmf$!s!{^$kLbz4BlJ?CXx}b=4jI2HdhA9mXE$nmGIXed~hCLzD9VX%;JM{4QUnS`A z;kAx!h ztF{!Q03UGhiG6Td3IvJ>yCFPLf%v-zV&R^GgRw1E` zUi3;+**N9w;cbkWWV8}1fAZN_?#ExHPTixQ#g9rI!yfVy0)-E?B>k3+PqD6;!>Qi1 zN(GOF^!keRfEa^{!0VRwB9PHt&|cX;B7G&x>FlALdf(7;&E#54xJ_Ozj1W?kM|%am zg~lyE4>cVJ9ubz9XQ^A&vt76zW9h(;F9`ak`0?-`lo+^CQq~8ZsMk$MKmWey$M47L zKqIFZbm`-&N5)Ov0|I=15fJJbX z`^6DZa?^)$ab#b4-+{Fe9XWUspD1!NbN-(e|FC+whTVS7MfI2&#xL75Uedd$K`Ok& zk|QiKxP7p1wY5+dxbt8=6g4dvDCYEOFWQ|P#p~_!@i3)apitvsB$=N5Jhn%{-N4fZk)P&*_)y||MMW-2L;ieRj8dBpT%tk0%Xr;s9F3DMUle zPsMF(u2>H?4CM#KcZt`)4dV{*(9nbs(FR8)3P@R@K2%Et?{u-o?!1;#Lu5N};kfn} zun5XBUa4|oh^rIfu@1*+$laRg;rMX3TSTah^r>f>pF3madq(?S9?`=K$SjUiolZWeSGeuH+t>p;Hn%N9XQ^*?H@MBSCA0y|gC}UHa3{y~jr|!$2p> z{R$9(oB_xSLBZv`H+>^na$U=?XUBw}!NuSSGvWc=s*t5+$^ zzcimjfR1a$W{O_|k=E-1)lKVCTYLfd7rcWezeLmG;pAfU9N1WI%DE+Q-Anwq3=6q; z#b76uK~=^RO~9?{rG#R$D6to(8+zTaIEZt|OkPy)z4>l?ItGIIcB(g0|i3M3!w(X0xDPB zaE}1X`CAfjZnLZtTz4@UHu5YYML&_xoYo;7=f2sV{M_@##-Y`p0-GUADk)p+6Ow6u z(@wwT%>hy_DIf%OkG%vJ0H~Y*yBYx*kxV3$`*B54^AUg$r86uy=;WIEjegc) zgB|1zlmWmUj@2Gx7ITC9Ft4_M(=ON{;z}J#jKr3Yb$*2e=NN(@U0}O11kNH;iutl& z=;fFKT}IdfB@y^vX3q*Adx(R3RARy_szu&5dyO4i7EdV>@@JmGt^QI!bb(dpuxB6R z+wCNk-CwGrFg#QJznF%&A{1DOKX+hmki8MldlRb%6db=cruBI~U%wj*GP}|i;}iP2 z+*wHQg|l%tUkux9jDlbkS#wi)m%J5*d-1i4>ZR6^xN`_ zf8<@D>KFqozo$)5JPbOwD=F!FCwnnbL_vxUA3h${6+HzUii0@rTTvK)J()!k6~o~% zJfr|pGtxsIKlsu(h7>;-*T5^+I|<39g8DL!n14O)M2eXa6MCSCjX!7g}W|TU=v$7!?};=sguWdy(APQQhUxc%4PGSH)|sv30t10-Z%$KK9&$w)V1g@{11rh z=;WYik|Roo$YwO^FzW2@kSgvLtQ0>$Ss>?ncPT{Gf%#ZcLh%5a2QiHWva%#+>m|@Z zfRy&-{h$=ZtA>DYEzjP%oo=P-G$1ByaI6;Tv?)%I9}Y_dk(;VFzlgCCyTLG_v`~x$ zzefduaFSxEqJtHstIUvV@g`USH{wzJlDaWO2;zt1Vi+l_P*}&&JO6-F!nIPgjVkEs zp5^7lzJ5nDF~F*qTUE%gKL@8G^k zm7V2U%?Z2_c#ng`gJq$Y6GbA0XenNJf=}x89&g{o_Ex${p2(hm5<^+J-DYQ2)MYL(M#OThK zJ#jeNHP?k$?aT3qx4+Wfi$`#rSn&PaE|kEM$sLsZxDvm5`Q?_$T!I~vexYgsuDn5F z)HUlYLfXWP&{?5_q@HHuk7=&ZGVN7XJ3e~)m{Rv)J-^Y)GEa`%_ml-ypg^^v(~Mum z_J$_D7r&oscYH3kaKNPHVqO^Fg=}$?!QL%9vjmYgOG{pbHyl5suV}QrRm&qMI z{d1p8$ey^^APG)&dl#?md#8W@!ubERcfD(aHEs<_ZAd7J{^#wn1o;CcV$RD@i z*t5JmzysyX#N@a!C<{S)JU?@wse=IWh+L|-Hc{&hs02!up>d9Dfmzx*B39tag1lP$VF2ew{mBY3nS|A zFezw!_iFqO@lDlBcwm5|X7k+v!(Xy$6O!sCiw-xT3r^bYSFQre3q)ZnNv8_|Yj z1%*=Y$nmDJZ*Gtf1gKvjI{3i;Ll#b<%3;rl;_L@qAuR`7ultAYe#k^}GZtsuGa# z30>I=ZpKp2@cpu0czy`07guW?@`EW*DlL%RMkx}H6#6QaA{~h70R{9*{8~*ntuL2Q z0Gw*sOn%v9dzjaik^)?JePN=_+Y(T!+&Md)JmZsnf*2Kx z9DxzGofrQM9sRu168}x>31P0X-qP9qT)X6PFSMl;T@>(sA%rWt((H14?*t;JjF zRD^ip1RxR-+}IR=*!84M13^GeI-t|w>B!a$)vn*zx%H8)yk6&_Z^US6O6$yNZQJOu;TUV6h297+z`~(Q&Y zUwI{7I_`oPPg+4?Vsz&^>$1bvD9hDT2K+2oM72;I7( z5HjcADxC()a*$vKIzSs3Llk%>BRc*DDe1?+$9$fXMmREb5c-lRaYx|dJCq^kjivf8 zCr7m;>Wu{-ewZ7-4jdH{efs!Lo3p^cz!1@RcE?RnOCj_X78_I zPL9u=C(rU@$8}lBeZ!lQUti>NeXvR0YhO=7umyIE3ZH^lGWw(#3o9I;;qEy-L1cNP76FUvH{A1_;?mi^Yo8SyIyM$J-B9$8AnNjLY9Mf}??d9jroqR|!FU64* zCT9SUtCJS<3qmtP5$(ZTZgLtV^19|O2d2<7s%_}K`{ z5IRxJc1p2NRjKt)Nr#NNKGQyUJAMd20o;<~W7*XB_aJ^JPrZC7!)8BIDTM0K+Z|M< zGaoGe!~I_CSY)Qy>kr%K;`J{QfsBEUxtgGS!PW%t?~(og_y4>7KmXL5e1I>a)T;D7 zbVdd{X>U%nllby1y=->?F0cc0Ww&;LKq}Nbj{X)!H`CYPsP~`=5muBIVfzcq6qbe2KGq925(T z4HWV^G5LC|I{NWwfEr9i)OE>tD?ZMDq-p7p2C-f5h4jx)wAZI&?|YH2G;=XTs8A9z zLR3C4Y*oFdny3e(K7)Bd$owBA(A2r%1s_M6K~poq{4?#n7|MchL7RK3EVN4;IX+&V zZIi2hB@0`(4{i=em*|hrZZFj$dFxAReu6hQsj6I z^Qf@c1V^2tUaZKV?Y2=ojev9s{LewsNsIJ4xLHh-LL82_P*!!i-LkBpU^w>Z%Xlb= zcKY$Bj~CrdwZ|QX$y3p16TW)R&aMAp4x0@VIpm2#T-^B^#mX2HS!+hkBn3RYIx8rY z_=|8BLyWCj8#^1GM9*J;h%FdXe){e9M1`33gQ2WPe3@$x|?ei!WYy&&m7K>sSSr(IUAr^hApfLsDC$aqiuGeGF z2rhL0%nR8eQDu>&>|dnsSuHNr4^1kx6@lcKa`ALkgZx+j62_NTL7B5zFZJoilsJ9z z)A*EHE+>ThJU)@Ih{Hmmh>eC+zgu<*+9njc+J_qjb^>`be}I&@bj67}-6nuaGzwjp z^oY~Pud(U)83jik9I1q}8VFeJ^a@Zgdt+}1{y;vm4!y~_$0Pwg&*%&!&6ch+mlBVK z`C55G;{toq^08YSNVr@l2Y#?R1n!O7ZO4jX>D(w#JT>iZ`V@?tXQz5x!YZ@=r&cWK zuG;%*PH(i&t$DsmNkIqA%he zmYYRCP+;4eJ~iEDChp9w#@xvXY$`hxf6J%RGdzTY(p+L()g)wCo@lO;a5~9hY#ZUi zUx;Y%u?M>(N81mkUSHZ}v+P$KMydfn@_^@&F#(zJQpI`hSo=^-L@EDf$czaNfA!=C ze(!6<;fno=OE9S4}exH1J4*Ey^^oSOqPVxt=noH2d>1a zalA~$@U~!x#BbDu=f;hglh~VHxW9qt4C(Kq`0nCjyEQlM4U*-4oK&i3+okZoxQyaA z#21)Ld~mV-QG9Uv_zUd|Ux+zQsTJr(DW=jDDaKQ0`};Q&ZE`w((^dFJ9&b7I_<-43 z2Q4S&jCG$tBSAHLi5%(1#&h2}-!Wrwu56jaiChdvkYSqa9A`KNgOjA?mDBoT5>|jFeVIB2@@K zUDPptx!k-A7{ny*S0z5!R8T2oKv!DHY8q_3-%h0DW8Ycl&Mc2jQI_?v+~E5NQ8tef zAE^*aQITWzc!)1MQq*`)B@wXV(UNtfI%X_aj88P6>+yRKk5q>gecOpa>Tu#drc28@ zh}n;RiC@A+Nbp^C!vD`o?E;|`n)}`OGBWTe{u{rqs8H@}K*2P<5eKYHU!T;a0cpel z&*7thTS;bG#%ijR8uiHPIDu!fL2lT_@Kz)%{kpKl|X|c=Ba2WM-n4}GmjQ0W*b zpw4yCR8xClC6=XklK?J`zmcc!Q34_L)fAl*IdQ}b*OiM1#@SKC7dLM=XH~rIvTWVD+oQ>LynxW_$>hydRcfR9lcUG#A__T7AP*( z5nJSXJd!2;l~QuPefqN}nI=Oj77^e0I_|{_emnA{5|<(1Z*4>PQFrNq|4AN|YB3b;c0?stS*fLRm5}HIuA7+aYI_ia zR9gIwGpIJ&xKOKSC6t(9criYH@(GEM@%yew(ot1>Gv|-k4638~n&KzKnS9O`uo#^K@0>I*e&e{-TsM#yX(0|4pT*F-A9QZ>;I%_PQ4*ig$bzlFU^-N}mYb{Yc>) z%bHFdTG9K=YgGbH*Frj4f4UGr6lsTCK+A?N+c398wU; z0`lI%q#!O^bndK#Ar6YTY)DLj!3g7XjgrZjS`p6AC}*(((_HtCM?`p;+Q1dnQ|N4m z3>YuNv+dt`lH*NM_mX&s&VcA%hj~R$%(Yeje52j;&pEOx+sbVp`#yuK)WlD=;1JVS zvhYT3x@xT1G*$q^#aFs7eEQRP9J{umY6azS6N>PWYM8dQ96Bc+F1I@iahyQdrrM;R zwaDQc+%F^z(k1s3LKvq*=FvnCsM1F$Kd|5He(p5wPGU{e*et(>g_01}JIYh^Z{WM$ zcSW({G)Azn%Ut{$vGFSEYIb zU^&0Xh^@sr5v2WLsxf>h4zG8RJiuPjxa=UIk2TAIDyTxk1S$_P5?f>39@~#0Cy|+* z4_8vMitDq2Ni#UOMl_qEOs@~G=1^gmcojBXr!F3U92hWA5)u0ay^Q*Uw$a5BAy zZb%=Ko2Fw+9^9UWYE^;cV6n#EBpw}YGwAr6O~2o_*Wn+{NqTj=@Oszp?U%G)6>e#Ml)l%Vc15P^x8`-RPtE9*D*1w_^D~ z2md1h*pL`jM5#mCL%(VYcY&sAfpVm{0v3T%HuhY-@S!h9LY{v`n4AMX7J z5AE?HJmeT4XyNG4zZ)E`5$40<+^KccdzggTmkV|4gt7O^60xo`s)&RvdM;xhQ&Fp! zGA<8Q@0EpD3=V5|A?yBhF#GboF(33~ zB{PiRaH^elN}$pKv7KWdA(Uj`p6OtSS>zPksr4N?0NVp=i7&6VxfQR*%9u!$n1GN9 z7O~)Y6C3S)OaZ@RTsJ8e9Z%|)s(n$~qpqv8Kln`uD~!zjVNv84Xzb<@PsrZrb@1&dy9qtt8TAmcY;s6Z#A z9seTU1~y(Z6JhEpteN3n77lNn8T$Twj0^rJrXrb^^Maz$=Tx;_8vR)bWZMT^_?)50x(2CWJ4M+e!gnQQ%eV~=22+X&iODjt8jmG^sfLpY6hpq)$r8X% zzeOHh{Bj+TycJUy-aALmaX{4=*ioaxnENwlys6le>r{5PdVv7P`7PG_>EpNCr))qV zx`N+>+9Y1yIQyH@&&lRoe2tV zat!D!0PO<|=!PkLvTG%$nt@z6r22U+dSIgHS)l%&1r}mJKTtbwiM=p(z27K1KjlRrczUvDJ+;(>=`4)j+P(UNbZc3| zf*g!^o4?A&&`!Ku0Sin#SNv(OFH(?~X{t2oK1q&;__FvggY@X zLU26Ae2&90HYws4$YHOi8R8%*y=55_TL8o_J$NjBdM(D6Z-Rf9>v4hu#A0Bb#TTF% z9p+w9CNgchag+DI^NWDwWbftyP^Wb$72KHRpSDhVy^29Dz8qN9P%IPfcDt&%CBCFl z<{&QoN&AxDdJ6|rE}XY}kuX~M6C34l7@M7h6f6W9Jq0% zih}|D`t7^JEs<3Vi-fWC`@sk55#r=uEK|4}2qrH?mM z!8@lJp`u<{{BKKEh$M@U0^M4Lk`6g_qh|e9LD&zJhADR)3GWQlgnQ07Ru7V z;&8T&DA})MA*=Bj%7S5|HTDG6?Q99Saf3&Q<+;o)2+qIb8l3OMq~4&1bqwVELe?y7 zr30Gr2&g|!ea$(O@L16)W_X|?_X(aUc!EgyWO75?kEvtpcb7I}3RG4{@7Ny{NsoF% zy3^wcT-}3;PRm!@Q3a6lP-?!ALJ_~vptm#NM?n68HG@q5!wp|?9Umb^(_t(hnHSot zfBHfwWNzBr$rCk|^!J4RCBb2^gWSZQ_|(F8yaU`E%y=b^AqwM((^SdBST zZsi)=qPxPl>ZmUPa4KR5Ud?$bA9-GP1Te;jH|20=sGVM4KL?NYcXfA{!%Z&9&VG^% zAGK1EfZz%H`kk_1grisBVPHH`q}dkpd%2z9GsN<57azJWvfuoC6uB0^Fj zczUR>AsA^gLIfZasqD)U@n(aODug`cYoD{rqximc@lCa2=uj%`*2&db_rUDfQow#bE0y!w~zBr6(NPB>1QtK62n=FF_0xg zT!@3_uN)n#7oE@S)33GDU&Y&SjJY}E)QW0UqI&?+C?)5AEVFC1U&dhV@PvJ-a;$|I>W))Fo_fi!iy1;btUNX?zqgZ1V8iw~1fA$_2hk4xOh83@!ZNs%q8%5>J zdKHm)@`Ol+p6u0jwQh*?MJc^P!050reRvXiC=4@`)_8X*I0HC=6azoT;;*4KBvlz< zniSIs?Nat{lIywrxzZ+C44b}3UGejB`HP&Jc$ZK>rbReZmCPV6RG}=AS9K+6T zCtgJH8qrB`EvfG18$O5D{h6Qz+rpaLp>1JCjRsBTz92Szv1V$N{ zn^n^kH7}rLIxP11EEUW}oL1Ul$&P#W?e=QStJmnSVG1l8^pS7(?GusH#D9gxWQhd) z#9{H@_%*nS@zjG}t~yn?@b^R*K$MckG>dNy5jZBU6+cPsrWSCc`|(*ns=1&#Ehv@v zPpQua5J?W2eQ6O3V_-wEPZys@wweRI6a)GQ>aHc(@N`iQ)j> zd|(v<6Kif%rVX6W&L&6>QT-U0>Pdg_c8r6f0BfnvmHL%{k~s17W)?#UiK90`Oy z9#}4Bl-+AVz0)%-CpvZ9jWmUhKTJk$XwV(B?zw?$_k!UCk^fRW3JmcY2wZ79Bqa?DY`Mg5+5$f zcXNjwg{%M81HjMhJ%%bxW~rYmSe#+vQYXmICdPF&K6tf#)*Gd6Xj^H*sSX_z+b=%C zpmvyy;epq&0Hp>nwbe+UsolAuTDMM8uK}i&w!Ks0IS<=t%O)t=YG+gRIl9y5TedKI z3qBK;Us0z=+ahK-*d%0@RqtdFXvpt^D#H`D!} z{z6+?TwaZl)4$sCOv0V80+ED=B^pK7oi1Wh$9Xnx(G>K~lI2YNgf8d>lq4TOTV4A% zQu4flQCRitA(9x%QMtO$$8=@SPAq$VLKb@6R&UcJ7rVi6UGsNc?*jR-xx}@j#BSGf z-&kvzv2}tH!k4Ok*R+uI^cacJT94o;vC@@*Nqub)lk_*KD>Y!#i}OKKl#OCrNhcJ)VnDbYGqv{d3N# zj9V}mxPM5m_Z1{Z6(^`6Wly%>0VpvFUSU6H@sZ^k>Jz(Q7uK~icm#kjIzRgJYI`A^ zK7re(;P-fkHmyrKX80e%YIr+kP(3)0+WshZ{#$Xu2mb zJ=s5q3>le*rFQP7q|Ak*xH5;9r!AGQfwE&Uqz49P`a!6QUYT^1#X*>OHLF&p+oD&U za*G=rGgj5-kn(=#TD*@dwXmBP{uxr=@3+G9nPtZ@D}3T-+D&V4OdgcuxB!F4BvY@u zvl$KSoisx|ud2qTFok$1AZSIj@5DsWq5cJ!h@*GMeHVpeLf;f$#X`Zkfpf|m(RGQz!bEwH*imN zC*q|~|3f?doDstuqn12azo?u##ag3H+Z{tE@Umg5@NUlHjrRsax7 zbcq03(%lc5PCt8c<)!7@mFF|=hzyT@`R;C9O1C!M2?B`R9A>G$yqKgIK%^3 zWY+TBtN5b!(I&9LxAyBjOLrdvDFstpA8~{4b-sJNWQL0+Zv(MZZh4!L-{GP1{`w(` z1!nS^J7eEU8&DxJHzw#2oATPh8guJceo=M!5It8y=B^KZMsSS1h`JkVxPzCwlL$E$ zqk2@w!0?UKo|hLnfHkXw<1{%DNe_!)FN)ieJ|@Lq=ZaNDj_3IF#({^ln)hgDa}7%^ zMJgr82|8z`-$Y>y68&JyRqWD}<#hqZ?vN#rU9hc_QH+ib3qtH3r1sx|k05<#l{i4fefsFY=CgNjZ_ z6H4n9bYh%6T6oE0#3&Tuo?3__2xTjK`tdPHYV193CVTIdp_fZ{Xvnkj&pP9`zdQu| z^zo(kcFdDDN7v$=RD|pPx1WfmO&S;iWU@1{3?`Fz0-o+VzAx98UrBIZHw_qFjC7Jm z943c?o;h%DKe1kbPa;e&#v$AbtK?gGM|^yW@%x?7)33L;Vk!&RUaNlb*g(2>ZrP6R z1HJb|wwN;bR&g+?DaVZi2Peg65&wnr9A8(_+!zUQEKWzpafCe}S20T@1bf`Gm_gW2 z(k>C4wQp5|=cqvLU`=VZ1!ABV<8nN(XVG5wZ}1o>zQz^9bDhZwm%PR@RKE?M%W9Dz zF6D{qH3A5%3MJ>|i;%)0wP`W>JzFp&_sD__!L7@URj#N5tP&|$@8vYYQaHMV#1+*$ zS9a^HYJ8qGa_wI6CT=Ke9}d9X@|q=X#=8iiuX~alh_GQT>xPN-!gc^0xA+2CsUok~ zNAVTzU<#ebUn)d3C|-G-y7XjLaeeo%nrrbF|CMr-!J~-(8!rMngR2%|yQaJuFBw(? zHeg26&oZ1@sW z)6KP(5-+P2jTh=A$%SZ#d_#3~2uYCLFn?4o){ zIG3`qVyKJrZDH0kJvyteC0`{&F~S0vc*-Iggm6i`fn?k2vZxmlEV39H@iU@ImRt7f zSSgy|2r;IxNWbUbtW34n{TmDlQ5jBHzgTS*)%VvY+M6peA#~A60X@b&JUCGCHddaS zWbiOIZl3!r=A!FxSx{PMy?d^)Z-nOYC1!MMOWmeZcs(eMNd=YdM;_JGM zOd>3C(G7%>$HFZuwT9mlYsqOo+e!ol@xEp61E((_>;SW0Tdx5d*nZh-PMgK*5@CQ)bPRGFHmM0^ctETJny8vy>M0@!De80R_ zwjt0)^5r-%s<&6IG6-#=)M0RX@{~An>I9L14wHI=bS0{d+}#jWsYh|K3;hHSXm_V0 zi)?)Yv@{(F8BmnokCYnH*-zp7d|L|PBlLJu7EYNToWnSH9Z5~)vMf|TJx2my)nWpv zZ088inE2{?i6P;cs8)L+FwJnl*3$x;gZ&sUhDFvcUy1z#xK@}e-TTSOHodZ9gNcsd zV{%wGzT+93lme`xpf)#cEy*s&{3{p;C|G8uPalHnB-fvug41u5v2^GnmRbe9{n)|Y zLyVK1yxRa5zAr}mKltZ_j5x+252EbgY{t_7+4G|@Ve%I?nFxq*k;CLjX%dz|AjuEM zp^YmGxR zu?r%BUN4oyd&%>=iI~6EraVEwG@*@L6V{#@x(t_YZ&;FEry~f7j4{MXTt9GnZ^YgS zyN^@khj*9(dAwWX@fHjOD)ITTSU1poig%m#3WM@811y95dx~VCZH2D-OnV_lrP7Od zX|cK2$cyqbHnn%9K-(cb#G!<7!}vQ~4bTQWs}cwuwg%D^Z&;di&+%=Mom7q> zj`~H#)?>`_?zU1Pe5(C%d<$3IENrv&+5fJLgip$!$a>1Pjl7OwC05nS-;B@K@HiL*Tgl{2Q5UsYN@N zq?pw>st09udsxoMw>J&)$Yl#35<(S7uMW$~zvRPZ^AA!JDMt*I;a@Lv=dY*6Rvgp8 zp{KO%`$s=`i9qTbvs3;pU>Nj#Z$Dvn-Mttno+zf>aRwP!dVt)&dh)ASljU~X&%dca zckMGqp-==x8j<*dMTH176a(y<(@Bh}AISR(ZtF1(*=vCwa} za?k};sltzs{wiLv4s3{nAU_O-ypgyFsamt_F_>nhXZ9Qt2pe7r>Lm&w z0zLG%ARNegO@Kk+mh&T-$Jrg^VY@@Q99=(Ns;iXOu{Z)aBBC2R>9tI;3fURv&9=Dw zdWa0*>rcz2eXqy*Luj%SV@wX8Fzc>NFb}Hyd1hE?V=_cmk7fly!_Jj$>_{lf<-xM( zq#L6dy&iN*WBN|sI7=@7hnVjM0=boydk7<7(Vg2$pg(2RrP;G6z$T`A`uLe=L67{p z!5R(eSg_c5VYTN=*vXpA~R@;5bj)OQ^ zbm9?~uW)4FJspU28wC8tPM-9db9lLpK~-y8M$!v|_7!5sEuwIR1s{kEod{s+`7344DMAJh33WlXxmU8^KkY#&Jj1Bg2zYWY@vsyhoKV$|`B{vK2oEMy zvDjdj(CaHcKA{vI5JQYb;l;yZk`aMbDWrmI$lm~mt=lDk4%E5u#Gd}|Pkz$q>GjKa z4R;y}E3Y_I($`xu33^ae8xDvN=k8%UvE$#dP;P0&?VOF|R4soOPo`&7>o8cI3abPe zNg(qSTFKYqtPnLg%*vi%XGoq4Okg`#dfNfGGmPM*HB)`%nP)61Q1*K^FH#UwAJOd? zI4@pyyDiR7$7c#ib9Go?Hil0NXI2v62oMIB6X%}AoQ%_<_w?v2#cW9fKi?@qw zAVbQZLiR2RwQ3D_fa;1R=&jM%I2MqF7Wu%%gq^h?Ye6xe7vp5zYips6 zX7^=atq_JBSa=}+ZsfRtRCfy8LeL56H@vUjU-tLJA1d{mNoE-<@fgMLhDG!-f_+>< z#rzA|W3dq9%Lt~B)!>R8^+FeW`rRj#T>T_g0@q6SLIOt0WkTPDbP4nNy^fU7>F8UH zfE=YUv7_#jerb1!L40E(ggCRR#Gbl{XDy;0Pp|r7e(*j(US%IZZlEY|qDqsi?X9Kh zc=D&(GnYap*z=u2N&@>L*2%lNK^#B0*uIj{f7&+{1a65Mbew3PQc@h8xUSmNn_e|k zBf|_4dA-fW77$!npHwzoJg3K_Z3|Dfm6iAi#2*H3RxSZsjj4#inZo$&r`jLJ+V;C% z89I3(t>^}F)DYMOdj#wyCOp~pQj5dLI?O|-t!9QtMO-!GV-9cpwx;7qG;0T3>Mr|M zT#&6^`;h`eJgnTtHDr*WU$ysW%T=mYEC9`?+qB5(4sE7r)#OafAL|Q1P*qtiqTrn* z2XiIGcKm7k11~Y5&Z}3f%K@En!vFfoulb*!#ks~b-91@Hffp-7YI%?WrEwehE=hj0 zI=>e3_a39t6nz!(Pyl!cswUmZ3qIM41%A+GW-K}Dog|SLA;%p;N{RtG>5AVi22VOz z@ZGM&df|)`;)xf$2;dn1{Ri#hrC7B|@XQ^U`TJ&1$?&1thhG7~Y`+jG6f7tBlo-BB zCCNQZCqi*iu7`LX<9#Xqdx*{JdCrSvT)U(kfG<-bIHaF)2h~Ep*#-~1Q{3Z#4Nz=~ ztM?%gZk){(l#@J_cs1Qc(u)B`>09WbppYN%w0QFuS?{641o#eEC%DvOno2AUYscOc zFdhSe>#MKVuvmIW%BLSH-PsfuuQhMEw0$ zcpD6c<44HL^Wv#6_P2}Gxj~)@Xo9i{;_ZU5cd4MoZ$o-|fGLwU<)US&2C_$3sdz;< z@QKssJ8{7{Sbz1~uleam%6+KSg~W)9eW5A+bFrX8ix8do=*CMeC1;+DOmF@)3x;2M zE^&lZ>?c2n-RE-o9V<;$F0E}iccZ-m z#n3J)Y5gF^2Z=1`h?l~Tou#oK8#iok#O9tpzR>>I%3?aQc6~dZ)gOozg@wLJ9*)wH z;-Ao5z2wfh1QqIJ-*0(2-EN7YU)G+2AO+F-55H4(x1y4BsF?D ze*59u?JMzHl}T4$t@!LxyL7>m>W#GV2%z7F6M+(JU%OT+RUW?8i8}iFgdI1c^}Pt&xv*T4kWdV8h1XU8}jIIeUvR94^p4n za$w;VD81dODdO4qb`u16Ms0TT;wk{X;x``YO@jF$ke_d9N^~*U-~V6#Wu8Mk;8uGh zmR2_?u$6^#5~~T#Xy2qevIX?H1-UhTEm)y+Vb?Ljm5YU7%r;Yr0sp)`DS7G2Xe|r4 z?Q*jOkO#`#;*|Lt6dRvjo{l4@)+SPT1;~I)ftt}_uzXSy8Aok>TW+wAg@_x_E^?z} zu;p~_Bc6rM)XT$*@fmZ1A9N+wxwHQ(-m+Bt(KWy%-G0Sq)al_c;b1yh13Vc@p zQmpSJU)aYVju&#b-0(78Z8u(w*Hd!$T9({JNvvZKY6+^*OWD3^-;LSbe52-E2$v@9 z!wdA~lb!>Zmjw{WhtI{w+I_4lDC+Sq*#2DlM_V=IIj0V>aZV^zOIjnQ% zrJ7XM@ZI7kvis=wI`J!sT66ys>Pm$OZ>*@eC3+Iai|>dUT%WoG&9YSjJ1A;TAgc7C z9*~xMkOBPZ2ZHpD3uhX=C0HE6h<&40kq8fI$nH=wI%bSU$FPPHkrU(;?(7mjvZWAR z^31-x*xr7}PZ(_A>>Q0sTb&^#jpYD%1el?;75Co?!yRNxT$Yq=^XY)KfZpgCU^(gu zcGB8#S5TY!1NxC%H*8gGoC3Un{Kmq@C0D=WVc*Bn(5di4p_-4ofcT&2lbt^a$ zL`sGsv9jy1VdKwUqStmNgr$K$T6svHrBDW!(FQMv&bWobj-``sw_c@*i7a!~@9=5QE+iQ91Ra_27kU5N2lN+G4A~RGzZ2#WN ziKz=AU8lyf096Cq6d>3S#_a>~{YM#6>Ku_+5rm8J;&5X87ARZ$D)E&di5 zipYKJ6nprMS6fUrwQR0#BtA7}9}1I6#ZLttQH|WB9Gvg> z6;mils+8W}qkVw-9mOHU5Q3d}{#NfYa>6C#)r#A++~9wI@ebR!A3GU5 zi`=7_!X&8#^RdzbX3XQdB`eD7+N;10_es<7qc2|}SvUS1p&uwG?4i6)stO^1w6bv^ zUD#22=!T_j6Jc#-%jbwg#{N?C+r~MYKjc1+A#*Ig?H*pl`hZXU)cGFmkGW1LB;#EX z#epr5V;WC`0!jxqi`tPc1D#EO0a+^{O=v*^-13ylU%NdWJBS_NXVE%Nnbme>xkZzE z(}Vfv?k!#T#prvTt?`+?gBtFEhaM=74M5Y%TZ`#~n>B2o#5p!=;g}TwFpH9I$@L+9 zPWQJGZC4X_=IWWxool3~SWqHl>g>E^ML4-lhqGEe;#jmY-TZs87jLxL={Nn0aprNp z8{iWdzF3#K!qD5L&f|;+hP}1vN9~SIRD>};Mh~J2yz4zU`MWviu#-!#$W>9e^TeD! zo(A%(Rn>%;d)yj$gs51isi5sDa?2cJygC#N~PSc z0vhz8r|QMI7|?GvK1^{BK;nV3-hxDt7#U{fI`zlN_< zOBHifb8NnRVx>ji*%^~5PKB7xCsbnb-$Z;sx{Ck_t{N0Kg!1$~pQe0;Z6VV;{z1Wu zo}KTlSRQkvQ!zaA&Icd1(u7A9QMYi>t>Xp>(@(>VjzyB|f$+tjoug+e-9wJm$P7a` zL%@LXNcfsJLPjxzK)tv5Hpy=uW$!hCk>LeXlwLiFU4LH9;^XzIBSnTsNT;;jiWd_5 zm17hi3VY9;ljb{0wgg$L#APsrN{u4W4fJTd2mqL*(p+E}NphEq5sVum9Q&NH|0aP` zmx&V16=s5=II&cbe-*c=6)$=xWNwC%2PAw6u9sI&QA^oP9D!I~FoQ3};T|^{@!=Ne zCnHyBjJ1rYB4s*B{Bzt9k6NM-xKM2;mX9gh_mLHw*C>I#O5uVAF*wMD?$Ls@nOVjW zq2Zk@Wh4iPknB-Gt7*8PK)t)I4>sG~;$njr!KaDgzSbrzK1(PhX$Jqx>LHpFDANnW z9b0bCE6Ak3y#ltox3f`x-w!KAbu5tgaM!5p3I8GlO}S;dM?+R{VEHp0%ua=J4-#yu zojhfWtL=8|ABpBy{hHp55HGBT7{pLfG3S~W^g9LI6bnhAohGwmcl6L=fGVXXJB zAA9N3P+U5|RIqUR8c36ei8kIS4&ctd+LGyF_s>y`swMA(A|2ifQ?3_7u$R*X3-Y5G z^;Rl1Vwx01dF0LLl|i`Lj*~svo4^2FWMos-5jguXPfFcfi8bM{C?k)7`iU5F2)?>o z$Jp6b{16HV#v|@P`LV9h31%Y4T}Q=lfFOD4o&&U<-%!~lWL>~9sxIGPqpz9_dk=7} zXEwTT@yy~f1u4_*;4LU4zja+pO#ZIb@F4thE5@{xd18sgoMmNn>0<)r zi9+buSt}K4WA`t{D|$G;8d2k-WV(FQRlg2Y?Dg&mLcJ25rBC&8R5a&1F|^NKXwQV$ z!e~NpL*}rt|g^D4VfiPJJ=^5#S41oUAnjOKSiFf;KDiw zqwcZMP&8D{z<@f+eKY6ZgE|5pRCb;>YZ51fdm`RDyV7PCZpB~J^&l`EUmh3jzzT?u zyZQGhK1NwXa-)iHEErHw0+YZEki$U;Vw3Jj`M3ngGFNd80&*ALi`fEKfA5rPnBRQz z%O^kZyDzjqxZro6D6`eAvlW8@DMQng6LXk(_+z6g20As}3Vluqmv>W3Dm@!uHeS>X zeY!s-ap~#bw9_B^AkNhcn7ACDe!V^Uxd#rf{6;+Q^s&5HDn7)z^jw71+woIur1oy? zFdBdh?4TfVon3&RN;$ba9xOEna&yzisPn?`Cdc=?2ST34wRpDLs&Lz<_1?2Zh>XOz zNVdfkPoqygp*;ukO`*e;Fd*&KT(S_`0KLDJh68r=eqyh0w7h*gO}<2DM1JUKw_kJ< zncpf6JRf5Q@uk6f@@%mA$m55s?>c2Y7)`FoM{!=oI8~gs6dmtu7=I4#+Q|+XW)k}2 zh2tK{3la8WGCy@7;N%r$hl#gUR3l^KU&LFz$)xW!wsU{x{{qdFeY=^+`FQx*har^L48 z1*YcHhinVCY8<1d--iL<7vi7e9|HC|Hw(Gp+)Fol9q^nT^qyQ?XsgTqP-2&B)+KdV zNJh%X9&izzvn$AN#5XV4`Ww5z$vb;H#o-qds7HaSJ%5&(CO^k#fmk>8ePJoA5!x}5 zHqlA$X}Ev6RvBe=*MRy{i4m4EnxiUXsFPwPfwvI1!C803P1S(HarE)5m0Mh4x(V0_?5ClcFx`AkUmySeoA z=W$w}-DeoCdk9kdBZ|oE#u6Ru2fLH@h8zJsY{!3d z17;fKjMzRg)VJG*?|l$MK`;cIu*W5!!vdT1d;0jj_G~;9;$1;*AXTxn4#5#lNUHcO zq`MGVDIEmBaH|5y%(uL9pHeibY&#&~hTY01EdnMyMSIQ;#w^I~Us3wUIWt2p#CKxS zstWYT$^f)emsQ71(v!K_=4WF+SeJ7-2hyZT%)!1xMlv>l>(?)bFtV_da6pyJW|1ML zFDe*4)^b&lYA|LRt9jQ|vB0e%R+igjj31#lcS?0=aF+5^c9d{4UM6(-lvo7+=gE&^ z*e9ECqjZJk6uCyEK(QntnRh#Bdf7(A6VUvbWv7w@3 zn|mO5aMtTSv5QNdloO%Sh5CZ_T9~M05i)(+@hCy4v35!u{1r&j@nr0Mq|5--?3BT6 zWKU+0FY8IPRBq`UO2=#xSRwJqvn;Iyps&;W!GgEZtwkH#O6>sau^~0Ce=9u8he4n_ z=pVHt4yqNc76$|3H5uT!iToB8eb07eka|I^GN2Ruu^6iwgu+Y97@6V^Tgph$^2See zIZS4E11_Xkk$0bd^~sOgyCK#Hxq!=lLzhtqq63lWJLb*D;v*xg_S_yLxK;>{_tJvczEhK%G1nHaDaJCdx!;n#N|GNq?j3v=F z`JIP8r4!eQa^Y>vwBnwtqKkJ!F`%;?t|Cf3R|{m32Eyy{kPGeEP$Oz%h05D6_6$ZX zraRVxrDmxgz?l0r5?10-Z&Y{dOo`_aUsij7f>?B*Lugo7LZ7{G>Cy}N|2O_C#`Ok) zA8wiROdghbK(XsscOuu{q)f<@Ph!tDQ2VLuuqr>6dHGKvHB^byz2{kNikFbif$~Yq zSSgJvs?UBoJ6~dem)^02PPa;GfM;1~!}v41gGBa}L6!|_V(DlA?q_We+P#>U5}G;0 zuE4+vJx&mo_!-TXu^ms!X$qvoibu)ldn6&n>=kw+-0wHO0WK5^N3oe{$l7!pezcB=B26K zap*Azv&E<^9?n_(8cVNJLv0-*2jJV|Jhs125q`Oq=xOiw6T=bSL73fQvWnN>P|O$F zT*xTzQuA?PDL&fD(>_xrsi&7j`ihlboo=_L6EYZ6O26l~q2lS~663sdp?xU^*3&UY z4&^Ui;xpaml z(PI;O4Rx7og~Vkf{m2|UGqU<9tX@~hDjus>4`%F0isvs-D@ME^WY7z(>l)+0JM-nX zG?{xUChECrw0H;mB|4A2A4@GOcrEwj^zm%V9u(**_^h$GgQL6bHc(puQ1K2axHBl` zDx3*2dMuwXV{t&v!C91D80GQ3_WwN-rBQi+5J zXsK=>B9g@#0xyXbA=YoV3(v(8KrQ7)@TO44Xmak(6ww(k?`?7A;>Sd!()W(6u3}Kl zVtc4m@hJUh@v-+eP6~;34H2qE4<@K`LcAe{iz~vhHp6F*0ZPfllFUHkEfF8lH;Sx~ zAy#9mg7?y_5lA-NjF~)a8#zb}!{hTLn@uh(>+h!@|MsgihOF29xTYEBD#qPm`GaAe zR?VrB8ZNZ2#1tshdstFd;ZW+zNl8?Jwcl9Y=LvnD472hymi?#O+n;{>)3!4Zm~G0$ zD9G)mHww8Om-_a{R0EK}R%Q!dz;#qd4X*AD(`% z>Hj?WrdMG?##|AJc^ruQgOj_%lJ-E1=~&`xF%h>~eR|`_;Z(Bi=MAA_wxpT_f_JOS z)qB|Pp^Q8j+O$7hr&{ks$y2l$6<9g=8E`3^t4|>IU!MrhB_&JVj+uGUVu z{stoS{d82E+A$kEeLnU1VXM8l=ueUp?uA+Mpk*41JOT1KW59TTt$;2ZG=*z%FUo9X zb?g*A(3q-GnGh=Nc)6qxjTz8Hp^T2Gu>W36uqTye_XrYTx&IT=gGf_PV%J};eIX07 zjO8r3i`}udQ4T~;gG^K9aI~&Og4h_E5Kz7KhcOz6Jk&z4h=|JvG?9=6NlFoIzwIV9bAb#?)8u6u-VWE-3w9@aP5qT( z*A&kGAZBwG1&==!F>I&5jL8Wit4Yt?kK-(zc$Nqe(;lTzwDl8?jM$R#%t`2GII!_d zRUCCAW?7*ROIVN$iy>5%N_?WnLobq)LM7w_K5+isl{4Bnju_gS=!Z3>v>^4GVyVg2 zL%(d-cAC1qcaD9mh(z}O+-;OpX9U0WzLll5o`ubyen%qlQ5={zoYqP8E7&aMYUsSY z_2SWyfkZ(*RfLaAfT5IQKSta8Z8CkM6@pyAM8g*A;`1Ny-Jh;3(&YC;96;|hGlhAWM6a9kA5yl8oDx<-Z2 z?mEY_#f5u0>FJQokbry`Yx|pa`dvSp6fILQ0nM(&V|dAVM4I9U@mcT0Th0Wys8%em zVQMUM3&r#hdv{e3&Iqn?RCELyo=g*+SKFqLCj_Z?jK(`NBe-SPh0EEx|JY7{?b+Z7 z)Jk7+9TXts&W!J{0N+C2#GeTBY zp#G*sK*|2a$Wx`TM9Y<{fJMGu;`u!>{2N)&T5SM}(27X{ZIo3iQoyNHQT7-)%O|Tc zq#moNT=?^CdMXAgB=C+0%C=#$7TcSPn}WjWfvleUkVpco$#S_Z;jCcWgFlsLJ6 z+TM#732xjYl9p-=%TghM_#yE?iZ5Gbbd*b4gR}*90%PI ze!_vE-c$&Ge%k+EJN@$*m8i7of~y1}CPaL;#|EWoKReUjiZRmYEWUGi($>?pkxOwN z92qB478g;x{K~je>u!%U!j(&9`390*D z<03_`@0U|}$bGx!ItZ7ZQhVa`m#zPw-^A<(oL;xm5NEN3xwsfGE6<$}yl(+xv1c!| z4`R>M{6RSZdsaD<7?0e&#UG=LEDlc|IB{x*n_iP%^NFm=G+`FkU<#trYFF|j zmE}M+`aO}h9BaRW4mjrvujrbW+CPhV|FaZ^vk$!MxTPvQ!@}g4IHv0Dz!l+vQ)G6W zA{d8T*B-_220Q)^fnbg(D6wS%w~i8Xo(M(51YZ?3^YjN~4Ekc>ToDM>m1y!PBGb+6 z*ceVv;qYT-y1p3N)>q|!)kp6fDCW?nak zLn_XA4>A48J8gE=&mxRGxSy`0Y8oR$d&CMW&#u?p6+|7gQBn)x0OCNj z;xFW#OjJt+ozM`SLSZFM_4o4u)iy{NM7#!@jlRPYxl*qazk-MFZwONhm318gNxT|w znwohrd-=^AoS5*N+Z@*YVY<#-Xw;U<=4!&=M!RY6D#AOFRlwh+EPp%)qUHvADYbo1 zE{qY(u+w`*yfh3o7H6Yp>+rC z1J#?h_IKh7JLfygaoOS{st#8?h2uReWkF?q->=X_@6We@MU_9$tz=O_&kau$%2_mo6hn{r8eZotCZ5OLLyQBm%t)#>GsXQekfr%>7)H0AF~$ZfVwPTI1KxvtXrzEu44^*-cB`{_S)*=a;(TA z@8xR3GDx*ICi5rnzb9f{wvYmbfB_on!lM16Jm~eB3+>BJMo1)N1<s;(vwM=XotyM~rld*mv0Y9@LCb1U`WQQQiuSXp%vDl+)8G}sfv|WbJEe85AI(-q3|+Hy?6#}sHUdj@i+D_ ztMAnGBUx2qJ9B%5#8B+3#GGD3Nc`&+9O257l~jED>3@o)V-LWULZaS%x4m142ynz9 zMuQ~o=N_?#*ijDOdlb{%;DsQ3e#e5Sr zWUs~`F$Y~L`^?dd_Yjm;Sfi9oMgk?D>Ay0PZ;GE@B1-qENwKA}QL&|{n}TX?k5QSB z)UDx{+mWacG1Z>v=%z~4A&CfFVl%w-BUjYuRFBJ7;%(R3D=+zci4TC(pX^zEDn)5$ z9Bqt2u-d&pN$R>wV~^?AR>UX_acfWkp=MXwtysfJ+K24Mzk`!`E}~&qk)cAOyF{?X zT*{QZ$OA6T$6aXU1E=L9DnUN^jcU=x8|K>=;?2M!D=Qx5?6?PfQEpGaefld>Zen!= zMaNQ{yM3+4(H#)nx_5CDEDQyf(LGQWC`Ziy*(qtICpLWgLHDf^;6uS~^Bz>I@2WEINwP2!g>J!0r5mS~&gAR#d(TpW4Jx&ta#7arcMnoT_p?l6RQ*S`BD`Sb| zOGq1f9|@0kK-u5Gz@t_}y#JO8cpv1SXK1Y@A37Cb1jteal?Bd4khQ6P55XLCyk$aH z21Ui=Jc)Mlw=qdt;jG5{pMH!La{8d21t4DyRign&5`7-rXkb*rcM0iyES z!wRXLP?~n(iAMd8}*3>UU1=wpo z3bYY{7&JdhbAY=eac@kLYJY+V;Y3@+6r~&0{2|Sq!OB_jf-Z3(P)$1pNiHz~=1>^8 zvLy6|qJ5$Ld+*(xDn}G=oFwzR2wWw>O+migc=+G{cVmPJ;?D14O#grTI|Sm*_Ijr8 z&!HQTLtYRV6ea8ZIN*t;2Rr$zvz$$n+k^qiaU+H{STbR)8gz7elf znC1I*hKK2ioJd`{478k#6VeUSAlApbMFfEk>04I})YUe*GV*qozq85g2nKZsaC(>QEqS% z;wNobm$Hw&n8{hGaOF}cB|wL5ElVQ~0Ec!ewSl-? zKCeeTmd71d0uliETnN|~9@vo>$u`FnGvns4NZp}dB~*&S)0xI_-8nD_F1l!L2Ny+L zP%%bOgv6bB20x6?R_Lu2%PZvI$~DjCa0n9>bFRvxFsVY&JAl|AG}J$u z-50iq@14LEq#pf4>MS*uOh^|--Nr^O6Rp0^BIu=jT)5CKSnzsM;u_zHL8FfMPab&f z2y;>!@SxP0SwKtl%7?K59D!eTN8fq_bwMUVZ!J|0veYj(unx%irkA2s;ZdZ?$PNm{ z=DRU0^<;Ddk#)>8Qhej&>e60v?~j|9hA`7&i)8I6ULJqL+ESo4#a6M!#Zs|VKK@?3 zSD^P&VqBO5x7vcwC=;TrL}$FjV(14w``Bv?aEux+Asi`$QsWa#!V~3$LRpBhvh@H6 zwZ#SJ_o@ds(YCTD#^9`2G4TmQ6!bkWgx>do7;ImT_YM6OZ##WFgE}3iWRdFgQl|Qd z`E&ZSbGI?zOy0iugFL!ubxJ5?x)STQ+DbQ19Klc8Zd!G#ZJ|_w1C_lM@(0@o_8-@0 z8fcw}QlGW^!!xHoy*Z(n zpCyehmk`!ckTF&8IN~vCe2$#MQHXy*(AAKEu4#TS=qSr|+zX(3fl#|>jh-CqK||Si zSXk6L@!u3nzAgzj5Vs3&#|nLOtsqcFDIX(ody8fRaCi{+76G1w4?EbnemADO%Alw2Os*0uiRsb>W2;g4 zFy&sDMtfM~N+|rEG7_PE#R{K3{#3gVFGNhrVWM^`G~Jy?-Cgzi+~%UYSE6TwHFCQ# z%z)iOE}wpj_G3@Jk$-+2&GICP$hufYhuF|w5$@!xzFu$2lqD0;8e~V63JFETFJ#ZH zXS-`6T69PFI6j1ei+vm~KNeH=g$@9K4wEFLL@=rXsd>Z}+Ty}=JW1@Anxnlcq>Gx|0_H)tVX z?Gna)_Z~u~5F-~!o2HYqA3xJ(=R(>b5MT3y0h>md#f74`62LuaC7eEwO5GpI%h)-x zc2qez7)%%_DI{YX%@fClpqNI!L%`;p|LKfq^4Wjp|7)KO!57G_O2*#EdBr>TPWG%` zIDsf3P!l2I}qRoEtihbLD<}>XxpNWAkwrS5uN~^3r+&=1* zbB7(Ku7<9B$(zI02&hzXfOyD+R9CHUIe`$5?uULOY{gheC>SJW|4w@$_TsENSqW&& z?&E}UUNX4@L)XC;8U5F?eDva-wh&M1-aP6sB$T?7?|$%X43cV`wKj^JtD2~#O4oFo zj&G~>7VIh?j#V}1ID%y(CV{_&^X(m6?r_>+`IVa4DNpwMgcN>d>BYrD03Y}tv|yBw z@QRU@qMmc>JN^!Brc`>)s#VEkcmPFL_kt{*ggD3d1{sBVsl9kPB!ygC!yuwia7H~V zQbc{|IT-h1NkO#-Z6W@`@WZk&jv!jWIKFrelkHq-KYSI_yj(fs6lwZbRIFS##PGO$ z3*st|)?9NsGra15IXm*F|9pX*=VP=p^sbpJjb$b=rC6YnAu2a#P%PbPCBWsCA_$tM zbBRU~qyTrPmiLHChMt~^1?8?`!hJ)Tj(K0AvQb?GYC~x8;`*KJeO{$Y0$pdTEXYT} z#f+CAyWAn)^_e_6*@BJFk-3+Lb{dZ@H(L-+Xli3WwF^V!OC{f8#L~}L@*eRlMgUC{B_}aTkz;Pg55{(GiDlgzk)X{QnqiBDyQ&OpVO2g4GeVVC7vgv8W8DGDB9)ZN9E1WD5L^4PNMNMEq{BW)< zI^c|a5WIDkruf|DhFMeA46bIZ^kiFDjsIp(5;N*a&$%cg(QramgryR~Di{`BjljLI z?5y^IJAeB%Z%)rhMU%zCc0AU(MryD?xqTNE6nvPZa&lcSUW#AtJW_RlEX3Y(Af9{_ zW8==|hkIG^NdqsS63tNXxacPt-^XL+I8_Om#KFB6FB7H!E^2}4p^LG{6sr$>*2>P1 zk+`2y{^1Ai@`iDT=k6HC${ov@45;C5>0^dr}fvExZ#SLL>e*xp9xY&_18D_wseL4Q~Q7h!P32F`3Hxf819VajvUrF-a)9;c?;R#Y3 zUl^xm|9i~e z_%D25bEO}Uey}XeQ*C-OMhD8SME%&8UXn9DQz9rnx$nfSL!#3W}+qJ9Xl z>t$~scd`QQuI2ewTgtM|J&JNU77$WDim<{Q?c&=ZN4<#KBQ3W4%qxzs zi_t%nF7sLn-CxhQgTg3`JM!mvSm#U(v4pbJL{BY1U&Pm8*us=LM_6EMa?=5*Fxo9e zc;Q>{-t*4xCFH1Pqk?ijwku4FMws?QuljwTGA9^Uz<3_ZaT>+-#72yBvz7Dcmb@bZK&O&2WZ#oC$pD#$_xsxvI-=#pAH9 z$(0lhyTB4XHF$&kBa7e3KDoIYs^u>o9ix-~2Ig6d9AhIB;W;WiS-95%^{bqO*7+x& zrkmtO_j&-iC#xtj-#^n2goVTE+-E-VehCHUnyHUlToXk^=)T5@@V53wXH;d5^Zcy4 z!(45zyb`C1yBuLF2G!J{IqbdEQ%glW2q_1Gy7bI5F(@uoDHI0}pMBc~ved2aH~?5ZaLeB3XJa!@Ha&W;;-X02SKAEc*K2ZvjEx$5X;44--v#n_}$m|}hV zC0@w+78r%2w4X|mC*s@0s@%k{slb7vo5jSWD=8y}Deg%uQ@BD|?MdAD@M3%KgOJF% z_W5`;5a^5T6$|eK7LNR`kWvNQmnN3|Cp@{mV&>0)VGM~{_<741cs_BPJF;*{!Yff7 z%J1g>9UKZ^DHfta^bV6Ex%@gdTj-}C&~JL8s#;0Z@?qJnUigKGY-lUuGdtmP{4;QV z{&UO!I{Ob#d=eeG=7~)Lz;imOT^=qMG}E6x`SG~HLk696op=`zRr_0%o1;nmrI;MN z+TjpZVoK56bX&~`j0D1*Th41IW>`JH`+R#QB%YF9!{pEGVk#g#T-mYvpKc%eognlG zN(Kt9lh73FrdX1vk3}{t3=T}}fupJ`#LKowg z(7IkMmG{xl88zqrQKkP1-mWT|K%YuJv68_>VO=d0>REb@ih2w3spou`cR)e%_m$dm zj=^&J_=OkRAH>Fvxrlh&YB(Qn<*r9JRs4Qc`7u5R;9Z~bUw4P?jd*INk?0KvdaC<(r*NVuzR zp|RUNHd+hiV!3QvaIt-X4s(}IAmV8O^G@Z$B#}47h&=PGQoKm(G8+zLXW>e5d=;|u zQr%(8t&lLSw|CZd%3a1)rUqZJ5$Jb3uv*wj#1634%&5Xr;yOO(m2XqR8w+_*_KSfO zyzFER`~uJgtsgH^m`7!@z2w^psX7^rqGqJ46?)_zIU>S9V_ga9iX-F)+#7U0V0_M8 zRn*HXG|A1UHn`nP_s_^|7T+qiT2+3&6VI5DI z*#zb}iAS3kqi(0k2^)|-0L1+-lfjsu4$43J4cofl#s4j>@n$>mQ}$TAwO=f$+YzC| z(WYQo?7%ox|E4|fJyC#+L3@?(SxRCBtF}Y>Da@qUsIyWE-fXNL?(kk0)mEP%{Lx8h z4q`yxL7#SK`seW=S=a4Rv4z)hX?lun%tgVdgG?F@kKZ>gj^%~eStX`!g^f8NrnpB4 zS)~+fpH;k+-LybV5aDzF$9lV4Fq?5M=^3ZSo3k-o!wu6t8?PZvXt!2g+$FralPXH2 zz=v(WM0vO@0f%Cl5Pb5j%JI2Mi)P=0%1*|*b`6rQIiG$FXA#h`P&lbPK70^DDXrFx zMx8o470cYfSfxkW)qKTYsPv2?7k?ht!^A;*Erv%KhL75lPrPK5HzTQEKLdE)zB z6`i*%*VDC-qmGb_^4*|FD9-(3|3H4*{UWRKG~QnyW)_xXS%j{HWE zWvYPSd2<2z)s*Q}Z>jan$>lq|Nt({qp$=YQzVezA(;6Xg{hrLjj4+{TQ}#kVQd5{u z4Jasg9}Dw#n_jSZq0rE@h-t}7;B4sDt97i-k`3jCc_C&=&_qq1D*gKe9 z0#1sX144zH(zAH;mibhB=DE1VP9Hzp{xJT4bnb?K@XY6C7p83!+(k)J`yzD(=r8qz zLMkBWLdY!1d0ObWMAN_uL%)4Cj#f_|zvj2hi;%`;5uXEdVYOMnxgm6qo~v%jN#)(K z2qK8X)Jc_3N&^N%RuBY8E1=z*x3TwcPcO$3pV`h707Bu%+)f{V5gEzg^y%ZfW`8L# zP7YI?7G<6){q&dqEug~g?*C|?{;UwxxEz$u<)6pM1^H1jszk$74X{K=Nf&)oknyY~ zQ)z{VLs0@cS;u2|y)7)h84^RV`f3^PkGgXg>#dZ@lO(Lk29dnQ*98hU4s1#{Y`<@{ zC%^XoaMV?7SrL=mLxrvEG{JG&iOc+t+XZjE(oz+w3UMTxqQ)>_s9laJ;02WJ*k}j2 zzJJ*xv=u2ieH;%|37sqz0ihBG85IXM%xo0Z12vL0x|<*gcBPiKgq69 za&(P^lc{o;(HT|6K|EuDZ-d2{xg(r}io$-!^fD+>dj9gLs9hAxk2V_TNyW~>Dw|!M zp8CTx>H4o_%aL9e!|16O8KEISP9s6v%yLe6vy~Q1XR#DGX;!a9@Vd#E@l|0gxxV-# zkVePwRty(2Qix9OBH{9w8f}HZ!r*AdcNx-Bn@z|<_7=L?TwwL`b-lYqJc2zJTYF40 zE|)L`hb@^KTn;#sYD`cFUg(-#Wk4O^;uem@!s2S1_ViI^{g*ZgBr3OV#%khGa7z4r z5sN6$%_SVup$6f%Vd2IPwEWjgy7X=3R+Y)#q%k8$DFjh7FDt*ay!ZxfHl_=sf$+o{ zoHIY4-XAluSMzfC&X=I(+B+x+A%%-5eVqaOmDS~TC!V#z-CL-&NI~5t;rv(PMPP8` zEu$C2+zV34@{Atc!p>F1xP-%5;IEeCqc)4_++$23qQzduC!`i)A? zf>^~m*U*H@+hr}Wr(Y#O+EZG=fH}MyTizGRZzi3Xeow_F?HA`O@olPa4>IqpM<KBB#jaRS)oB=$?{)OUB1`^b z1#N5~#0aQUOz(#qEK;Ypx*w#nMJ~PIUF!&^RTJ0a?^ItEB%PZ^ zb)6J_;^vP$h#hHbomY$P(Fi;n)mLH=6Eq*f*({Q+x|@5BB218>Sgjz+_Wt(IV%bDo zI!gLy_Y^2a5p#afW;`4Ua&eaukaBPV!poX3+apQ{N&1ILx&)ateP;!;WQX92e{i2; zj}co=2)j7n-t;aK&45*30in0sSKjs$2J4*+MLJ)Mvxkw1lc*ut_q-?&P9!QsbuQNxZJ+d#d`J(3l(zlX1f`OTyuC7|9;TETxNr4Jt~AQ8o+Z=kW!s0 zmhzGWzdWTaZXQzA;+Sz|gDF$ zx={ywgAbp%6eGpR`lpuhKlm4CS|=ox%CA2m*DeHVqhH(DL@}g!%EYex-^{&RbEMaG zrWx}WW-cb?Vj{L^+ALBeRb(All$shPfUE*YoQOnKiDr>fW+DM33qXQMWL2SPcG%K5 zCc0-@J-X=VcJvKI@gPdFM42Wf>KI*>{9p6D`@=dU+wDV4*kRd}2q5$O_FjAKb$r)a zAw@`8)8;c*({GI?1<*&j&6v!TP1zIYH33=Jh*TJ+n z_=~jkZ_0xo*>aG7b*QXq3;GzUNBK@wv++wVsv&^ln`3;pD6i}t@zy|v(sb*JZAvcb z+KMgb0O#DdPn4=(=ADz&L!y}n6lA|6FHe?>JwVN&9CQWDyTf-J>kB#c*?9FebhNRh zGzuv?+k)I{)s7i)xu66BsGv^RKFYF3#OBk2=@XGHBeKhlhmgxuI#}AZ_}A8q)4cBj zlx6TxcdF@})TDXrNC}98uK~{=L{RrtB@5m9W26XM5hUw^MezQv2-9r5>5D&^(7VQH z0bA)FUP;=myy$0)elX8plRPu}L3={|N8ZH}V})BB;=RUMaC@TJ7Zg@JBKFlow;qns zRdHYlCAS5a*2@iBTZ=c?9t6c%gbR> z@aY;EK9*e-X$_9(*wCP#`MMbkigBLL>IvKep0Ryyg@SeSPZM!D0?Z=ug-E-6> zCMyk4!0^MjZh0KikvQ0T5md3dVn(W8v39~1#;I?hIb%`#oV^+`J^GK2g+67A>Xum%ze9ig#77eg;+A=E9AcQxS|je?s3mk4^}bj*&wna}5nO!efV0AB3v2 zgWH#Sj%VyDUz+0oGMaa6`xh@4kB8I5q!((0G0d8oBB>SR)c92RKcUklQ|`hBv9HY| z++)(MMi=<#g!=AS5CY-1C0#&t*?}#QQ5#q6->*;u6zsgf`fSru&8|h~ z2@AL}p*U4q9+faC&5Ti?zluj5)N9!m-UZn{qnw6DAvt^J-gey|E_mo(WLP&Yk?jhY&L=ZcS#j?MD-4q%c z@c-Zc{T(1x+a#8YMl53{1t%(;LKXH*RE&ZFG7jmwEK%dmxV9QMG;&;px7catpKsxI zQW_jLbSrp^2*glrRYHa~I1+Ad{A!FYk~s4qH2`u&-IJO`fgQRHDxYHPn(O)Vd3q{F zcgN#j6}FN3ZM4XLj_%rZCs7%&Kf6+|Gba3XI|K5SS_$@=5t2ZX0($mo_3-@2WD8A+#ZiUlLLxQNnyk0gpV)xu1$}f;Op>HX}F^M-$BV4xCdrI2In2}qQOmXEZG_y3; zLt;uzF5_Js4KG8S#jn=8HGU8|N9HyR1}Q>9s_Bhj!8W}e&@LGH@fvlPo&mQ24A@KQ zEKF2EV9-cLH*%rdDK))?qpUF(x@hs(t)^Ve4nsd3e4z=68joW>dqRQ^SpZG*H+-nT9s@vhG3aC zm58;a`m3-2D5a-8O|2Opc}>>2TrYJ=#5D?j;+D%GR82d*UnQ$)JrAkRd+XxHKbFUgT+(t-sGAEU7`L(;m*bSL9CfRFOk zsN*958cvdM=#SJP4gR%9wpYuIz%P>c?Bjkar$Je9I%6U=w4}6~t2vKUPSPvV_-Nd8 zs5c_WsVDK}*LX=rBsmUSJg}xVyn|Hjw&h>KLxb%<9EnBaE!(s9*l-6e3&>a8$EPKY;+j)>EL#A+% z92&Y%uEnc+0%j$vV~Zd!849*n*O2T1g1w+23HaI*1YijXM-ZnXnFdF}iNeD-HI6F9G6JE8QuT31;3q!;WCEoGWQ(i(k00lB@qMiM;GT3ZiC|2z+p1=W!v5H-I2 zEcx)D8VPz9>+ZouyDY~a4+dZ`iWf{g7HkU^8ACZiInYO-l#U_GopjAbxf*P=t*&SN zAtf3fH&;6s6H(TWhyZ0A2dZ8}Vh&rN8efsR$pJuyi7*<;)#z()A(daZy;lIIJ0~r0 zwejqZ&WxUnaq>+Q9XfIo-#bX{g(-qkc_+tXz`Ms@Q9e1cX2`+-lblwXE!UPITUbKk4RQZb(PgLP6=8vC*fXdOj z7P*}hO^Z2M0PcnF9w;JYtI_tb2kb|M0w9DfZVS%oQDkf@k?s6(oGj;vn9qhzyLQm9 zr`74D4?eVS6Q4YEtwe18jd&8e7fGH^oh-(|Nnr8hZH(n#*wO^Pk;n>Ct0*0Tmddg& zqfF!T#HeoP%TMDRv4)){TD_KurI@$y6?`Zj(U(t_OsO$d3dx;Udc#cq9n81v;6_{ z#{}ZQjOtA%Ror+-3=>0UIeRr5WOso`#+4wzE?TAd2+!RCfbDgjL1W*d(T+%o>qY61+ zh$q(+2-hYhUTXKyD|Sj}=ql6TUoL-Ui$vd`DD87<1UKK)C|&KJo0AQwNM*>}B*GV^byW;V+z+TQu0_y7smN!}+v;E9JnyCe z3F(n21+Hk#XJa=U+Mb)|NMLz(_8>JLJWP4B8yLt#S04bVA>qFJWDmciaYf&mz8ia< z95APY`<Mxjr)2HWurK;WovnJ=+uxm9U{?S-=0is$VQ>B^Bd zu~Fvc?M&v0Mg?Eh@ExrM6b-SbQ_~?4V23Y{<*I#~0prhvp@RttVQV-FFt2@Pomf&3 zkjr*HR;KLXZI>MjnDIOAQZ2X3RkuXw0Gj*0!DPdfk|bByPxBsyte+h9!Y8G@f@?;R zO0|Br2<&orYHiz=3qm2)++gFVSB8SHNnY*fX7x_eWxL63!~;ne2K9g6o(U#3X&sW_ zCn}d6_QNnk_E!m56+{T`>jJ-_xv0*xXVLA*t!^$Fe@EejK<4CNN%m)gfVV04%ZE(5 zznO9GJFMj}wfI`=ot1U~X0b=CGn5;f4qMfDLW#cX8%y!y7ShKJVq$vULt`0#{MzQC zF+ejL)%LP9j>%=R@vvNDCnzl(B|-}(VjuqJ>2EY2{ttF^EqO;+NwJK1QV+>PS)t{w zy_pnqIZTlBS}7X+0kFAAnb^WuSBAmhk<=Vqu)5H?s_GYMb0<6oQcx#HVmXIq8i&%o z&DV+E*^89X5mR>tf?n8r zq){1_5I?q%C<%s9JEQ=3iuOjhP{(c<_vLRJcaZ(V6h*@xhKxe*)p3ToWjy{5sbfL= z6L;e13-N&(v$eDcW9?4d5gj$*Cp>E7jIB|BBAXs<73aZYnoQs|8EAltlAIA;EoKR0)O?fjul;4cj3Ts{9j2gv_$I^WAnz1SQ(2Z3v za`7`Fa;Ep7UQ&)z@M~}~sHJr2orpZ-fF~dpl((S$OR!2EhN6z84RS4Lfc(NnOXU^I zYnmNpi*4}0#`#2%9Gb1XZqwsCt%GzVd0V%Rpt>xFI<+>@QFFyO=2*mCX3Ytzy>G7- zt~a;+lJM`5WN^O1#?k#wd*TJB==R>wh`#(CO#S>Pj>IhF;H>xvlN>cIM3qg+Cs!jrK7xJD&ES+)Wg@XY% z+CTARc_NsMy8seg;z|?nxWNkS8L0_B8nx8k!1XWj z&$O*pkGps5p;T3{Nsh$=`cyt+8W;%~wsyrHD+f3HCAu};3HqaNiPC6Ba%wm5@Vy5= zD-S-kwU3T=kg00$8H|9P4DfnJFwW*z#(3n4c_-OQE`D$rE7pi7BI>c06*a#%_cX|~ zP1s7|ArFDxD{b4_##&i($jAm3B1k6t8xkyM0|gICCMvCB5`G#tLgsF|2a8fQ(aw(s z3PV?1=gZ|w!T%r*`K}S3@ro&g3Sb2n&lwv%*|RUMtDcH6w?>dvvJqF6HVm1|%{LaY zPa$)Ulv(@O=tHClNmXZU7}r(>knRgH-sGLD_oJNl?p|mOW%W25X>x9&U51B0o&JGd zUiPB~`_9zc5P|E(GvFiwL$${iCX*e4RKhoHVRA7pB!EPlWPyecp!tyDp-PwmMx zRZS#Y6s)CBHq1>bPT&V6b&S0!H=NRexFDz`B$2MST_gNw%J(=c@6K1T(9W;gK?*Qn zap$lX-VtDecoqq4uFoWGkj4W-h{pM;_jegjY=hI&8)eN7Q)3dbqvCI?<>Y@g%)u%7 z_FpUK?$ccWJ-rj}=HRRAVAOhpwZ+cwsLfifK(wP%YF1x{a5th2*#`Y_-0 zfO=>~CoCuAMzTm)O;dAcpJ=ZF9&RlM9}BE9a{GjA8m>tIB#8C>zI^@{_HitM(gu-f zKWiofDZ0H70#tS|B%~?|dnMS(A17wTDEAmP=>}K6S24yCLv3@z)Any-$)Z_}nD>$8 z_IxQ-Xk(O_C!cg15KmKe7HqIREcb-Sv#+UQ3WwdkhW}oZC)-qY1Q5Q1?2#8~H&t~K zB6~u9GOuvMmi4vrCx2w8KSCvr4uewHpmQ%hh=va!1U&mlnaaE5+k*)KKfqwAVHka$ zyF(9S;4t>Jve=;d?;%Vb@J#4Rxii}@6H=K1^BY^9q}!$E?M8JmHQkoiWLtuR_C-RR zh&HD$%6tG&kW>soY&Qldoc#RHV=ee8tKxp7L^gF@W-+5zTw_A8Tar@M?v?&Jz@h{dsMfl7LP#O*kl`v$NMdHz?3qQGPrgLqr$9gfGoy}*65S1ZGbCE?jaFpz@?R9~bJz`jN7Bgrw!2K~-H z4}vh<7tTfQfpog6(~O*&zKshI3?Ve1OqL+laq*}>qD-2mziV$_KrY0FaIeWCrX4~& zF=&WmdVcXMd{U2hVY_s1$8lkA%IVK8<@It%L9Z>CiWjWEWB*Jh%ilfn%{3KKzg|7Sm6Rb34cm?V0Xnh58tu`ex(g4?hkL{=FDJ!@V?t~o6YCj{r+?mFsOMwjv+ zAPLKhu|WJWWijr$hG9eSnSGU(KWp|?fFo%z-1Jm75CT7*MLSD&#@dX8FdBi=G^Gyn zooJywT#YB}qg=lU!yday_~{no>LuxdjRb*#+~%H+sirjdN>9fQY@py|_6K887{@y={xTTU)Q&FLcids$`oFeG-#-mbS`0 zWA5qcH_A8S!L(N{cY~abk=F-RA0n;}%m4%vZ&Oz4%}Ki0gmjMfgRYHt5OJ%gZk4Be zF5ME{VvNMn*0vTxe>jCs!CLno7oyczZ(VxJ*n2t{n%FZ41Uo+b{$ua#FHi?7q2IT2gBv1?;!^3RuYC7%V-KK2U|b8mp@PN^FlXP+2vS zGF3^|P)r-ad1Rp6hjyw8yemPn?_i;aKYj2)nZA=3*STRoDU26g2l2|A)`<9NZcBke z$p9~1KwBvO8%-QOu!j*iFR$K;v+mHBiU!)z0)!}Wk1q7?g_9H+C%|=|#Q^griEUd) zOYS>2;X~EoN+~6=fzQJ{J@v5w>usc#;;^1bx>>t9sp0B+NlVR z&6LB~*fN2__`)2?Pwa%`9d*=p0&CTd3GYaKrlL8*`dV39-Lyv>olRxmJ2BS#YI)6W z^=nU-SB(JNj>o?i+@fSEehRbva{TcX+=ScAkIo?%lGL&}FgfBp7*7iH5>_<1&_h+@ zM>W*4rFXHGp=xCMFyjRyM$p@%07BH6HZ#x>tm><_g-%(UH~y~)18hdy(!-x%m`14V zXfWKz86`j|sbCKvdyq_l7a1W+?Tp6}HdK260>H@Gs)&3V>AX+bE85Gd@n23U^U7XX(8M?mJ>xs$1v@ zDIz1HzRW#SNGG26N97T_T*|4FW40e_eRKpS1q z)UN!_j`{5u$~mJ41*9nEI@re6Ef!X;$3D(n%g2mO75r5XE%z*RtBC>HeiXN1x}vd& zxbRpIi6lK1VQUiZ;4ke37iZ9`G$hjONc-cLUM=VB-@p0I@=bfw8p50Ap|NoSu6Jz< z;QFEK%q?U?)VFUXkF&OA-E+3W=Xt}$_aSkf1mWi(O4f+P-*`a^JV{%$R@%1E$Db~b z8iiGZiNB@5g8fLFz7}} zrlOtICc>yjXm5b$W~`6JdE}nx!|#3m-P)uuPLDJe;oNZ3RT&J3KO6>mC19Bti(q@} zY?cLkJesZaY_&Bw^_-m$zPS#>Nup}y{K*TcZ@ zWSCc^mJgQ{hs=de2~80J7G#sssCHIe^1DbrBP@duQ~Pbk*l_q^y=cZo+7+P(Y6fb| z$PNRWxtl_ltL!-oHaaDcpI23-bH~fqQ>|y6UUA?Oy~Y!Bo*c)K@Zrq%$pEGXyiKxg zhRo~oI0UIk-5*;-5~Ambk~h~^gKP>-A#bYuHFiO{GCm*PRvFSm!#* znrNc36!A-mO)6~E7t7Y75akU!p0nN^#DQnNBd1{t1b-}usua#JxV@RJMAT}=3A+xJ zJl;o0VRobqu#{>O$5W695Eo*w6I&lKz2&y84{d-vAg$tc3m9Se2u7KQe?=;b|N2p! zUU~i1Z`c`J@03?eB0v1#^S_`Gki7jWzFwY+y;dQ{cjOh=A~eeKdQhI;#f-8h7IK(`&K5Pga{7WK>Q#oyNKnWv zF7e?m7`_w|sUL!Hc7_k%nZ7gqv^;I}kP@J)x=~VbWA@bDx3$q!x;qX9Jf}P+L*EW& zCUs(x5N~yBQ;>&v{;Cj8b}+h@+uQbV1qS_3f=c9I588W3pBww8dgSkgy;+`p{*TWD z#Xq8#Z=T_-(VH@VjUqMOO|T;lfZ8?{;*bSQf!1vT_;6?-L-4sX-L8>ZS)BCZe2{oR zpt700spp2Zfz5TdFi_I(BwvWQ>>uO58V4MIQJdW=3uRU#XI(S6e!{`!hX#q-!VTIOS|(QR|fPxsJCr+tZ)9jJ^(q-WrdAkz=Yjdp{(8@J`UU zjS+<~aWss`@=R*IOVnIeS*#HBlt6yA95Qu;3Vv=p3?$V5gf)a{^wX0^J4p@|?x~C; zTew`DH|+=EsS494@JEq_a6MdA=P8|(zBz4+bSfFTh?EGBOL3D{eh zVQ7?P+K3ud7)!Uo?92bRpM_^c&87HbOaqM1ATK$QU?*B_(#94Zw~r`}f(S_HLn?!F zkWCH{TiXSbp7*5t1Yck!+j`}tr6GM}31Cy>8w=OCB3VF@#;7uVm*5AsXHcktzDYxe z2;4o{Vna+i?M04s5gF=4lYSyMX9ux>VV1ON)08^>V!yhEiA9Oy9meIL5rBe4{&Y|x zzb@0C*`^2{k@aF4c5n@C0}_h^ip%n(jrmS$EeKZVBk_@~mD2OcJ!5ZjsUGhYB%mbC0lU1I^7pZWN< zECT(>bUycZdF`C195Lc}U=VNrg!~$TB2%BY$~SDQa)B4^hfVH^_c3B8N8>bctH$?T z+tGFX*$}|YAy6L#u`h_T8Rk5wJrZDJd-=`cIfLmbonXAxJYhF9Vy(T8Qv~X9-nI<> zEoFmmq7antNLsH?+hSPIqsvNRsE86^b)57|(t5!+$``U^JDj@$mL(OzWV9paVcDg3 zY;8E9h*TpTk%(yBUF8SxF5B{#%M14W>blOri!PRn7viW?`5|Ew+7SOj%Ki&etO4Z` zr_T@$s!_b22@#t|p4YGxUCNA*AcR~8qEy079rcyB&pG!AKxQ1{*eG+TAAQ;$LWmJ8ZQ=YTuj$b=Qf8V0Rp5XzpU80Gx5Z{q6GYZ^!20 z>A0G>n_Fq|n%nuL@y4z4_S?n|idZ6?(M+*ZQO;#2b~J~oVQBUNJW++S?8Qh6`n1Hc z)RGn;%gIH2gBIjtOqL3oaw@5HF+w^w0%;vk^-FJ0TPaZILz4q?Qy;uvs<-+FV{M@m z9O?cBS{sJg%kppp6{#x;CfLMq9wuy^+#H4Wh!#C zm3<-b)ZBStK_g)bc+J^d?~mb+y81UpW2-kvQv8n2b7o}}Bt5~;9_rTNX+ zIS~-1QFaePpC(H<>pM!oWZcBFsT}Be-3}*L{cl?oXy9QW_BH@w7-7&h{o8isH-V9A z;0WAm8@NI+Azp&siCn&#T*u0Z8IV%B?e6&7kCKNpAw~cf!4L&VEg}a&a!p}osFZzJ zuFeD@W5SvfZ-j9+=-12IvaNCcYPl9`#Qi=>HE5g`{SV`1s?zFZn@mvCDjrQAKN|L> zbSwQYa#V?yz0A@?Hl}+&zn1HV=*RYqQIrf4&VsT^9?n5Tdbthd$Mnvdh5v4WhQu~W z8UJTJ<1g=he#2ZHqN!&S8+p95XZ7g)+bcZE=tO6@+xG7`myQpV5FffNbFFN&+b4ugs9_#b}f^Y2uUeXO|Fy6#K}FdJ#k zm*>tKFAzD8j}8xXUJ{{puVgYzZpUS<>PsQhtkFqE<3e_Q*50e-$#^EP0doIR#zq#h zgS})xRY4ZqO~yAM`tp)Orq=vx|Ms8!Q{vW7#ieV;fZNJYAQBB?&17U9l54?xiR~}b zB`y}nP_5}NgZiW^Fd=f=6`-3C7Gu#0P|NX%hX{Q18{;5uAbT42ohZsK zVcqAZo~x`R?;7t?5o9}H<-lDLqUk7l`Ovmbo1?PUv3JAMVkG-xl~k$U1!-(D80D_& z1!8@TFER*n)v+JUp+qjuX#egO8-elct2GkNQd8vZ9;Ux=65B3A9zz1yMOiG}yY^)) z4012H@FARNETgH6)dUQZW1k)(-{Q6CDl{%SGtK~mPe-NEimM(B$nh}|O9!4H?Q4r=H7>>>)iyEIO3;$)u^si+g|<02 zfZ3?LpOTjGo)*q2xpo4b*cb6^%0!KJ|9ok0QdlNNX`$FXxErYP3 zay{@43DcQKs%*5|PAD0uB)pxSkWzOX&-C!e4?Y5pnwCa`wZJ|tVU{?8XKf>Zub=^_ zSxx4Vazw3`9l#p4JAWVCHKw7^I~nF1rFAtpbx3$*d@Ky700~Bw8a$Y~DW|DlKZpYm z!lIBHcm1_bj6Dxg0tQAPE%rs#Q1aF1>-8mMVeCWLIKC}MTVN(%%6A|92uy#xozSBk z-qF4hb-8+b_Ic%`NOi@_g-5;94YoKOpVU~_Xor-G(M&E5c$IKnNj`W@`qNK8eXHod z;<}76?yxLV1RVq;cQy>DicD%c+{Z74EF91%;dnQdlv5~vNH=WT4({H}z)K}AVX;7~ zvsQGXGV39xHfzL0;?|?h6+wq>B0lsRyp4 zcI<^{u{BzXb=*a_2J&7~BNcE`;8m>-!{REZ(?`3~+9@|xOkZg^eay=Qo>l`sL zMX!W-_u5m=sUsRB$A)to9u2Q96~P335P(jK^Yvb0BGinw~8)m3dDj~OG%Zc`$Jlk)LT4*AYG0xpjpUq z=idl3BiU(O(d5~~gqoh-dbK=ZC#y`8ZHSN(K~ei2G2q%-CJ1?m-J*7-=>)076Xw8k z5j=uSgpGiR2h)u8+4yOGBSu!P5Y4f_&?StYYys7%oCzj_3#Gsd6GyTp!sRD9kvok=?z59_V{;N@JP?`;Pn@ePJ%255dzL>;gNMCz|7^9WE1y50@VJQcxmT zl;!`1PXmq(3{hOng}wVCUh4vPQtzfij^MxY64=M$aV%PN-U7m z9txkE6+~;xeD0;*8E3t8+xES(d%W*g zSyt?*7X%Z4+J!f4q3Wk{CoD5itjt&#k;vBjf*0-r?1rAGS$hv(1|}AZHV&KLDZP8m{kAUC>d>=}}gGLh;Z$3u`4dMVUy%#i{&3 zBJKN8BbIXt8(5ICXZB(wqTI5Q)Zz;FiKeh{-o72ILKJTqvCoxPU$eb5VQg`7G(*6+ zN5r7=lLv?S2zlt%ho-c3KYahePqA?0v0r}N-lrs2^7NEE!Bnx}r)quq=XWal{RRJg z7DocpCKL&zLRfE~B&TVitleC-*X$xV+{YKjCI;VQ%uz?k7j36X3Il~g4fj3tM@l{? zJlJ7h-8pPW^!P{>p^6#&+TN^|wg1U(R~;LNjHXmArs}9+#)DsKyz6_xP#CS;YO1I> z!tmUwSi`+fR&@uo2X>7S8Vf|$y&XBH)pYx`byC=TIkvfiA*XR-p$?fLi>Ykao(!W5 zqBNC+KxEX{12BLY)4KsbHT)a@Isa1g>Iy-aUU+gA%d zrJ66C4Do}?Hs0Z%pV`;tDLSByPy9i>g2g(yHVSB^665K@QyH)L1sN`*8^?5gZW(N= zNg?5(WKIP8Nm0d5tC-KRv5uhm6qk-&ib*6-=(BG7LGnp8#{+0HZ zQ;H@arma(#uL_)aZVm3*j}L$H;0I;;``|v6>SCkoJ3&ijBsu2hN%8BwuJX-{uF&KP zdqkZMtMJJo_R?N1(qc8v9fMcSm23a!fMeSF=vab zp6nBJ@&hHMxxP-$CO!kORCYa-jomG3&Sc1WNkI8tZHaQzvH(49hrvX6685$g+@Jt~XtC{5>A<9X$=< zKa>MJ@FI~^#M~|uv%_~vvVRz*0&+5FQ~R9HfR~mCfZDF;j(fpYjI*HW)wX?U-bma~ zGM5ZXSxm@AGVgY9HDXSL$JX>n3<8m7nH47xYcPhvcvpu<5OX$12C>wKz?Qj4#PhL# zQXu*%+lC_Iw_P%^DQYT+nanba;^|4UYgpxsp}XBx%gPiWXgrR)aF6@ffUdkbuS{bi zQzMTe5fLOfVw6S%3Ds#AyIxb82~-0nn>y-QgT-xIsReD%#s}K!H9|1a^T(9#T}l_6 zu+6}OY&5q|C|(Y*vM}rxC%9bO@W%7Sr1a^EgzilWkda}*T!TZUVUPB6Nf}K)Zl{LK z%_w3RAnqa;YjC7MAO922!tddAa`NreNi3!tBB&ae9ZoI)9x6*+n>YGAUfzEFt#etA zE1yN|AEPf4X4qY+6)<=h#qaD9RWy0hojyD%b1O?9ffl`*HkkUBk`Ex{x#aluY$4s@ zCRcgv({k3!E5ZBnsb;~UxKL5g@bCkAo@K(RF96CI$b=CI<#;cS5N{S~YC;G5B-g4d zGMi2_su<<)s9DzagJLf{$-wz`y0K5zgEm zu%qs~Ven;;I4=UMpTH+FhMdCks(jq*P~GaH`U`#AvAZK_+(6`_^hd2wJjSiwWzOO_ zzbSbSNHD3j?I9?7D)drCAHs<;UsPf8tau?38)w$_HcLf;9r!FCZb2Oe)8y2Qp_buJ zV}fPw;eKj-cm^*GSV0d!wy~GAXU%}vIgmNUGz+^yf?&i5uVB|})XIs??0wWNu3z7@ zm9*PsJwAID>-2C=`<_INRD^y0vGVqlZk{n;+Gb4iFZKdecXpCp3gYH@Tdw{F)g7R+ zh@f*99#c>1MnU2f|NP78yTt|jLwX!9Bn#;``M#&2%Y!&=b=dr%jr zJ*_nxrAv^_1eUzF-^V^>y8xn`IcFS3d=0z64D=fjl2xN#+b@9y+aCc_`Frks%4m)8 zpJx>sq26lu#0Ni)b=K=?=i9*&3o+3~%rXghwB=T99arHpj=K3?$&TD}yswVrsQJoVHQPXsO3V33V+ zbE(tzg?xY?_uw}sOF{gSN~PFLAsM@P7&1)^$i7rxc-uLKd$0UghG%TEtqN5}*lpLi0+*544SbyQA&j0R zACQMJjNx@T>CwQktqxl|N#|p>F{~D#eGsIlL}uoxbLH_T?M!w6#Wf(uAS+#?873qi zb$gbH*eN`A9W9)i4uQp@z%K>nC}7EYnlvX8B9F(WwM%1r4fmQpm}dhYSFL_{GTLzl~hDO~9iUZ?M%{hfkff~ocz z!8V8U;VhM4Au+yL;{H(1wN^Mh^tsQK~DYdWh+?I&oK9LYm);=OKIg@)%=Rh=S1+e#V_<}jOCEX z#IWg=nz)6I&UiOJwPTe4Tea`=_TMttINNu86K08%?M2!n8p7^&Q&mSbx@Kl=RIBl} zOW}Jb<-WZUa+9+_y~Hnw%q#6;he+`a{$2LQzw$)+lKr(F7k!h%`&9jiLv%cg#|T0$8N$rKP>i(hUu z?yT&+IMo|vY2B^~%_zci=swT*J2EM%`^P#qDwBtfGY9W3s z=rP9hhQq;e()^T<*cV7lu#}Kpjji~8T05Nr1P@HaY?aMqB-cTAX;#B+;iY2PoTL;0 zr{Vlbho{YWKiYY`pNb4XRbY}+9TTUgqNMo3HYlFEuFpM>m_!7UY&|{?KcI$!|Cjm2 z-q3NZc>k^{gi3I&uuGqi?f6=|%K6mY9dEolarRb03v|WONWx)Doo3aCGFCGuPRJjT z8&B>gC4ygKYV=-k9<$4E#>GBnTdzdhHlhHBImBpa2QSxI&-n8?i?O!$vjq{ z>r?+4++8neqq3j$-oUBFFj)TTp2A*v!@eme3Bl&bJrvq)&cd!}1-aVlNg9y%sTo20 zy4pe=a@T?Z78Fs-mN$X1#FgHE*7}<0cz(e%E_UbOuRzDCTy9w zJ?A_Jqn5KT0v-Twf%Lp$7&Mb zR(h8phb^t6Af=?Dkx!QGsy(U(a6L$j(V%cp)J+k@`hTVFVRaz1x-;e zE~F$r%WP9NI#!XA4b8=gk|7D0IrU#3<=MUv7fyE5t0wcaSb2^zUn4cHKDGDqaRyz_ znrYf)TucBGpQUjR7%Q-D1S~IK3KW6rd2`de9$kNFS9|DjLpp(fd8X3(3`>@rU?S}k zO;U=x(T2gO$MO>;vnmWtGOdpY~4_rGdTEvtc+|U1a5G9T-gIg5u;WE}OA+VTLWiACfRu z>W>P*xYWvI`K?pN4y68$->dx^V+@1;esR;YOB#gt*{WNbGub7=o3RTIe@-Oh!5{2Q z$vgkHESnnmQXcb$3j%OGrz40}0!5K?7feL?Fzuv(9eID;Cl%Q>(+C9ZZt&ZYQlrUg zsl7oQkvG5GDoZQ2dO6+H{0N~DrXuo$o^-6>V8DHj2X|^lA%cRnKEy@5Vhw8h5PZl< z9Cn8;xu}StxIX^8A^=`c4V{ZQ#tV&V$~HxjwVIHT-Z4RjQ@fKo(P91RgjuzJz2fDb z4QA^o-H`J7$W3V}dz)~((cwz~!cUC6zz;4M6_~np7K;skX=@TzL~KveQxfMlbQa)9^93q^a1~TlEsT6Vzw?N zitA0abrA-;V_#SK2aaK#I1<0(9+Uh(;M&=Do@b_&Dx1T42}skz={>v)>g`n zS?d4x>_5f+#m^IaHgSqD5_U3DQ@QQIK6xRKN{+xlACZZ$17baBGQ?&|5YSd~lUk{{ z>zhVIp<`gU^d&A0gIbde)D6hUz43r}1+23sF{aJdH&d z%XsDr^;q!?=GR;>D!)cEg1D4Oc?hlE>9SRgw5_Ol!>_p#he8ZbG_n`r(yOgV9P0f+ z4-a@7aF%hM>3=(_R{!?F=x$>QDF&M;%tvKWy7&})SQe1qP9QYJ9RYeX%*3DO)DD)+ zYUFykC}SKo7vV?>2Ha3Pm_cx3v3-#i@t~OCSwv4_DA+Ykd`~Y#gGZGF)rTtf|!5RP_{>Hd6>(~o1qxwHG;dy%}QtEMa4=qkU}nkt#loiV&dTp zMasyn_@$M_vO72y?82!F7lGAq4-mTf$KeD+!?js7x&z69ZSSJBN;o$+Uh}>w(_2X)84v23B z+=9!)WdIYWNJNn9n3Gd<)X3HJS&hGUF%3!2I2&L?wyw49IM*p%d?j9ajrW0!u)p%h z)rB}ME&>I~GHQ%SY#TzRECYEHwaT==!vlQ8lcy4u!z4Nh)et|3;Fu646;L9D7+dH~ zNRi@M8b1=xPp#`9Fph)r46zSNuxB4ZO%-}0e*~B~QFT|3R6vy1;{k^!swEm4Arw+^ zh6L3!!#z!@RRemny|ie%p;~_iT^<8YHZw}IgE5%77~aR{>EfM3u#hM8f8wUc-mAHS z4;;)&fVoSMtt#g@n;s$GT2133e?3}CcQp<;IZDesBcbm=bL_^6!G>GetKKD^8-JY_ zCewZ-C-_^c2-JtrUac40W3-(2*lh?`U~xZmlxLG^pwFO!Aam|RIuuTXP$szI+Qa- z9}XP1J?s|g)VrfNFBGAK(e4RBvcze^z94sKhnO`57qX5cvy%{t;vfml{Sz1@jOz)7 zR|79tX^KnU4Hg}NHS$BP9$o^)VfU%&+411O&Xu5N@I0ON-<%AodQmA3^Ha9 zwJXjS0tOYnX6GZ|fg7&`FcsCa{fV~*?u&p4@x%|&z&aG{`4Gnfds z0exj}q(+ClHS@4d05TiGM{R3JU73EGcE{#a&XCxY5^!;Yr_x1!bT>AJJrVMGO*SbG z^kITER^fb>9=1(0t8GA#4e=Il#$w^tP|0Sef-OtvFMcB5=f!I3t@>857%9$hNQsU7 zMiJakjH5Pk&p(M)TQ@@`UNsrQCjxjEf>a$d4xc&AH}#R6FQ5nVWdlc^3gFp<*6Wz@Y+dLe*h3jafoLetfV|81& zB*jBt)Ew-gad!UqGibr|^ou>Bf%4dYf@t^(NQ5S8!l{N7r_nN5yvDG%Wl!*HQD+~|+?0zR zgx5?KeDVmw!zp}&_>zDu#@x#2wA(>VrsSfm={jLTZV_hRHa?O!t=_+~={+n>ali<4 z=L}@$(rj1<(Mfv+d#57%0aj6)f?wL(uJae?WAeIn%ejdD>JXj7wt__gL=i>{>q8~| zb{=x+(ZsfV-JNZ205>CW-SjcR+j4lDyvJ*6!O{5r2M1}g875K=h=I23#9D|vpcBGF zWZ-6>IhR3R!X5hrPjX*sP$;aDDi0%%j%2hQEVm9k2US24)jpxb&3HdvOM;esW6WczsGb3{y-Kh8Jp{HzO zyay1OcS0gorjj~J&+UXpwG=b&eYI@Fm#?xadyi@Wp11E&0xX*{e(WrjKW)d4>P7Rb zpb*wKHqhXH{N@PV2ziogNk|$fGBlEp9SkrQc;5r4HG=qjJVxD!N%FaRz^KLO^n@vK zXfDf+@9$NxDYu@8{SiWOdvzPa?8pudcA=C`)?^MqYA}2w+)WNtb=k$ci(k--4#UZF zO|_uYXtUH5`bRjh=@O(uQOzDSYLnDBPEql7?6umxB$H+z3oU?694=*lp2sKB1p|1^y!MS zwdkLkLev(A%{J4vA`o3y$E}6ZiRDq}HZy4%!s3}E9#WgYA#jNrJ&I#qpU)*WH3Fwd zSPtdj393{mXd{p@F-SYMI-h*{6Z^Z+HVTC}qjv8IRuCbG>FL@`vw*8|IeZEzCeA3O z)C6i*SceM{i>4awhLIWAb*j4DR?Y8w#SLMBxPN6|+$00mO-AT4`x+kjKI*iitrYIq zCI(OIk{r#!8dG%6oo^YY!1>}F#vJVD-whGDp#DrMS8#IRECA)m2Ma_>&Y#g!aLdui z!5ic&h{dwd42Tubl=s@sB$(&DdQ(b|Q9FrXE5Hp*GU$AGAb+U+^gvr{Bcn!v>Df5%DP0PzqertkyY_~SiSs1V}a66O)m zx!=+7*cRIc@C3p*2L!KjXgT$_SzF1vK0#dKWToHjvW1dqsZV{OQ9Uo&OYOcIhe-%{?4)?M z*6gkDDL+i}D4Gr>>DpnSr|erFj81ni8@QWgxfAznWkke?4mzGLsAKDAAgym`?LUac zyr^`4#7=5wc{S`)PB{pM;wJ5{09$+%{ER|J8mSk6(^_pdavz#rx9&sl)QnKNy8;zP z4;1o>ZgnHD(6Fo`2@J2fMi9o11-ZiL5BElf)D=6dtFkX@XGQxBf(o$*m$>Mf6xu!- zJ>>p^FEp_xSJPVo;YOkRjA7<$11tLpO!mEOqb$f%j3RUfZ>ypuB!{9agKb;XSZQtn z6%%ICMhSCx_L6EGT(mC{3*(do)rfE;LbXnj#n=(LDE`oJB5yfBlLULHAv)~vUOaP8 zrV&_&pfH7g95PRf`)duk5a{taU{M?>;whHPSOB-H4++onN2Bc8^>S^|B%!+JDpi{L zEVV(F#mH~*7;y_bC1|VAjp^@6c_T>(0l{)l4-!sP$GG1G*~qbA)q50Q!FZ^ZwkHt+ zAHXEG!nI~jXxw6j9B7_lC)&{f6)uVW41VkjP5AKKfL^8BBU@bXL^MXCVjc7ffIkIA zTICL@6kCbbFN47;Nm%&V%jf=SRn||%Iv136c1T_=jCjA+M?84n$jwkqT^RduM4*(D z4B-H%?5xk1eS~=E(zVhFl9Dd2*j@y)ae#)ad4O^HU(2iC^+3w3zWtZ}38f#z-rypG zGO~?9h#g8H>|&7wRUs>VH8eW+`_p&hI-C_JM*GS)QmR@EFO)>IX;1_?`v!X_lpkTC zfe5#=!t^9`o-Z#B2^Z6@0T(P~{Fz;fec=uZau4NL@r8Cj1=oze5fv?#jz^J;U)!tk zkdL~w{R%m;jpPB#;L@D16K%~$9`Amtyk!SX5NPZ&jJrnu&tgBdt+gx8pn6-Q5%yeV zDy+d)gPrQB?A20FI~iG^C)p&f7ck;25Ey?<#)R!(;`BMMx7-Z-mCGyDw z6!Y-ykw;r8sJLiP1e(Di+P={;(O<7AoA@-Ia+8>F@w6kEL`Ls?nkYaf6d7($-coMU zA!If5NW?6-t%ILi6GHZK?7~3`v7H6zt7Q}y7Yz=0U<+7Y{0_i9CIq0stz_S9$rO_s zkhN?S?;o8Y6;m_C8(5ODK_;4Z4^m7*3XLFp`g|uo=d4JkNKUH|5Ks?Y(z$^6?FPJ1 zp0*nxe=h~3#z-83&1s;cE>lJPQso_#xXC((rs>ZT|9>A1|Ng)IBU!{=ukhEd_wzx+ z%*qETVYflgD%BimGl4=F1GW8f(BP5swYcXsoXIp>q^AiXNf1oz&%f(xdVVDmcgQC? zT$lpCTTWb5p{vHp9oojtL3MfLiG?$w0Ylw(CPyX4-qlJ&q8r8_m?tC;u~F>fN(8K} z+BVKTR-XP!sDSR4qKo)J?ndH|;j=~=Jw_;Png~63=mz8Gul0t{> z{_57m=u%aLq6PNl4z_gyp7dq<)>{#414SWK2s7B1s%K*dek`*KSs!Ob`vQt8nI94A zS_+}gGq#1`h$%W3z1S2F;U!eTu)O%<;Si5m#<-E0lQ}HcjDzS4^-QqiodzwZ{n-cb zQd|KF;tT-T9u9m`VrV$SM*CFFM=XW#CZR|r6W#7ROE5bhs?KGUvNy+Sm71J~8j383 zww`73DfaJFPJ2B^Q{Rh^7eY`LGb*yyNGAPE9CRJ-cjqItQhw-g&mR0t(C6w zGGmj3B*|b96HR8#(=>$uX=;z}@_|UnfP>(Ok|nr}Q%PJdM|zbFLKD~n-oUpF&lUmI z6wC%q#bJJ+)6*Mx_4ZvEHXB~?kNhcJ^M!a2yiv0Q3U@_Oh8h-uw*%K2=X7{5pt;|{ zkW7wU95{pTOX?Weua?$)%HCe((${V>acTCLxCWEhFVxtUJlAnbGzde1ptK_ZfC$z# zW1aBG)`(#+9o=ZbEv4ASX+flb9H{Z@x$=K}H5QMWtQ7(A8x{Y7OUN#mLIAoA zwwwjjwpurtdYz96G5{<1Lp+T+n5-sRZ=4jxZD&T zxl2Di1z&&J@%4%jW_v{#t+1kHc)M#Rl}P~6HZ&n=phhD0GZ9hs-FI=(6n97W75zL& zO(W{PGiIwpG2S!zL_bj?a5U8o;YYvP8BzGB{MkHonWfIt+Cpm=ZfrGPY*ZmVz)Xg6A~ziUVAPLDGhAo}e|8V8BGe#2fFL zVAw1-gC}Lh!Xc5z)u38i%?pn-xL3hnZ8wc*rqnheBwnF(KK-4bsm9mDidABU?6d6y zd~mxNMy}T6N1?Z@W=BzHdn%$-lM2Wf+hNq~h0&m;m`;Ca>xKuol9h@cELv7C-DHj} z)I=yE-no4fxnxURn3+OEg0$~*ch#kWdeDTVkcX-sEXY8b_odCZTY*zmN|_3aCLuJgZaI7L2Qdg2wo(^T9pyWaSbsZVh^3ro1pbST8X z=QQT7m#fC-kCo^ur)F3749v@@>$!x<5+YT`HTQ;V7m#F%@Vy}&mY2d8u>g!qwm`eG z_!UqHauam(j(wvk$7xpG%MI%`04XCgM~HW0Ygf^CobT|UiE_kUvh-2 zhU`O#c)aTMw~EKB<|S~r4#_O+&>m`I)5t*6I_O^wq(V_HS?0%;_FI8JshbGEhw z3b0*v!J^Xu%N{^e0s8XTvvu(kcfW*4jx%THNX)p|3d`2On|5bUVBE}}+nHG9uCLJA zB$I>uz!aN(8Y$1u7-xXNR4Cz#*<)H~?%@aAl0;w9f0(GW!`ow2lWh&8AX(|SokY=b z2h(x&het=81Sk_<13j#an6+Wb`1oWm4T@8v6NHZPn(Vad~%Z^j@ zf+TniJ^ZCb8&CiKSmHnTUu&!~j!I5307~OoQiAG&Wv8@M_8n$QfeivWu3!skri}Q> zILr-+QK%!)Y%1+*dBWtvV81p`>bqYd0ydE>4`ivFCUs-<9E{zM8xUNrOX3V}BI{M= zV#{W1wZntdwsS86gaU=3-d$iB0GeYm8lW zIe*30aD$<+u|ni09cv>LszfM+j}8fkL=f(T*hQzv2mC_N@2rY+rnE!Q6G#-n?X|ng zsK%?;)EN&!YWvwnUHLU6TbCmKhzYJ?Fb5q~q@V15n1dDA0(A+SroFB=``JH|E z%+n4uw^p~1N(?AUL!Zg@4myc%)n+%&a~mP%rrEzKU$>*h9C8BU^gJD}CF*+@-nX3+ z-zJ~d_nj*OuMBEjA~K>F-?ClowaR}GV~>wCLqOGt+}?kM_^Ms6gL2=#3D0O;b4V`= ztN>ArJ#sD%aV&nMK$~X7(1e5=l`6S<0BP)v0ZN(b(ieia+d63%p zvUW5YBLZN|ex&W(&j6I#8D1@m@kXk#)uTOn2&o+F+qOD-?zo+xr73-E`;gO;XKbi3 zto@A!(Wqbhsb1tOwwl{&2C?IRTqgJsaN>+G zq6r|olLktZMa0EKDHFyF7VJnqBYfhK#Q9J}OxCk65k=hXG4L_=2EI+1a;R=niI`jt zEAp0!{fW@ae)2}X8G{7Y6rmNS!QKQ7@bXW*hEjopc4`_5Duug_&Lci=bQ?)Hs)?KZ z0=gmM(hL@z`o$}DV?o_F#a^}6%N98nC^4QOl}}{bI0fqfF7jy13+3D``y%f3VTxmH z!!)6xM*ys=ws2Ll+-5TFvT-%#ACCRv68A^r6XIClF715O+XO1pfFMXj>;&9V9LD@E z-{4LJo5-!na~~U{@cYw`j1zem(I4AFR~p5fzv=0jJ8eZ9@nUvD_9JMrvC7yV;~O0% zHMJ@_x82QemT$(V7lG9HD|zGC6%Mg>1c=A9b-gjTj08lcI8L?Kw+px4w00%1w~CKxc=(^Eza(e>;K#uqq}xGU zIahPXqC*YIGes9&uHa@+DsB%(QzkI;7u<2ek@qtb5KD>&ekKbLzTW&IB zCUm4}a;Nk*cxc%F#H@icRLzDDgAgc_i8su9qr6Octp`lnWq!d{&Jn^?_LCyb^Vi}jYUQ%`_?ee$703Uh z|Mj9|TOp2A3hXw+PZUP5Iv6@1{Brs?&3~8tgAt|H0>vI-UQ+9(?5aXN!2+biPX@r( z`CutdO{7aIB&_i!@9f+SUXCY7%_J*;#=0n9WVOQ~j%0^=q5>(UbYx_-5XJ)mUEQ9+ zJUkH=JlLk`Y?8z6Txy=_Zh|}-vcMtVw*kFVSw8YIN{zDXdIw(|jS8&DTkC4@SzLgo z{b*Peg4c+uZesLscd7iIBqNDvq{gd+_bF55dM9{9z*IXT5p&ZO?7y4V|0~V^O#fYQ z5`9+!!!9+i(uzczE!3teDQD?bk(d$h<+6rl6jr06Q8`gXo3tlgw-JB8iG7Ah7{LxXK8e0kfRgt01& zPyC3QKS+tQMXGkPdhDM-X|k_Ji5FW(2$7tTsLI9T9>SNTs;G7DIZxxJAS=bR$wqAGZ)Gu5?MDT+guF zAH}9{N|B$wCJv5hPqc#D)AF&=Va${lYyq5vDlD%7SEzT zWkR3yp>f8*yoBRDOr}>;#M-q7Z;;jO#AcS@Uq&a}UjhE+z!&3x1-Woid`OSJoG{~# zB=ycfPRc+yo}|teRhkyp2*ecbcK9VtPu(O=5KOY8qD{;OlRdpeRO8~uZqqb#+uyU* zNgI&5v8{C4<>l-41CY~v{fUpUvshLG#L017xj^LKgXl~2OR}fE@Kkx>DWjqFJNhJU z@>waIj00nCd{T?ZlmTIDUuRpKx*9~?P(YR8JzT3exOD^yVk+gN5N_Np zdaR|allr$1o;>qZ>=cC%@yKD9h9mLVIkF-rfWhpH)u5rGu!7f+^jps7yJ&Gd%Kz(M zQ!*$D&8`6$Q}IVKJZRY{9U0?909dCiYXd*2u_p-6vx={Y_QRh}Nfo^l7j^|Q5U`8Y z?C^Z?j(tOZ5Hyc(bR+{}2)Nq62jE5%F#s7G5vHeal|Qy0>ckcM;qkAQ7jA{8PhE|? z65@L^mALo|0=I;woErer3sAV8|7dI%y{DSt2qjl+&uHjJc#Br_K4Vl_NTNAtJTEck zFx1vR0_Uq@0*6%S8x<2dBj`2XIOx>^xdSxP_BlPJJ+ZMm?jwg4>#cZV0l0#?WZZMi znQZLu%QQJ&(XVVb}ZjgetO;WZ63#lWf z-vAIcm*U&C?;lCQ)SlduXmEzDxr2T?^29d!S9x+3FWQo2bKyb;sc*vi{n0&pzy_Kx z1Lg%NMc*eaI*(XYwAjuTlOeotqMS{AVXE{%I=N#iUw7=yXQ3(WVj{>pJ$<5lE4Iwk zr8i^+Ipbm?96IRzEheglkvw8R6Npa&m2n!F2;|VhD3A+Q$2phevft)*e5wYU2mkS3 zmqIoWi0wx{{DZ$KPliCJ<=>Fzk-F+mB1%_wJwc*E_nhjbY6hPu3~G>SzX42RVsG~Z zC_r_of@bQ2B_0CIWeXt#M5JnzSYnj`!S-m+?W6NFR}2p05nq5kOyIGB_Z^|t%`sOn*Ii zyKj81JXjJHM2@>&2TqaXFZGYAC4XsGNhbSp(r_{?xt)$vUAkJ@b}jdnW$dTQ9?q1b z`}73oMCuJN$#m4o4Hi|2rx1@ zFZb%C2zw=k%#WMyv`(3aA5K4;Y1_L{5g*iHyryBoX)#M2IqZos+po-SZpx~7NOoqx1Ebt@j+5-!}E z{?!n?tpMfeaiqf`nEVh}Bs*uPoRRRx&~q9cfJ!_ovb3l1iCyFB7ryYH^1=(Y1InkO zdlbQ`d!gR(FtB1^!C-BwEj+V^^NS6eu#N(f0p9JWfw0NAAdQiEnYZ2ZLlq;`$-w zdqWRJa7+|v1v?qeDd{mO&wfH3AfXk*qS3u`C6=&kGNWJZN9BJcxKBUNwP1XDzpF2y zn{XIU6LWp{%bP|R40?g0xR?DDtf_|u{5>rhh8_zRZAV=J@$q-MHC6HLSwVFeOF`nn zJzb!BeT|No(jS#+3Q(%4>x}JBP@K!_k8ERVJA#Iy%Te(Fn~<}Y&&SCk+{He2Mr=_f zj~;xN5_0fZQ%jbIaZsPjeGlnHsO-HW_!{wpU&c%BVqp@4wU?09tN1me&G zrSS+U1ZRHw8K;)e1lfR-L*O_*r7tzJD=4hGMLO4qFIQ_^;~Vs6lF5`5H$u09WRr>6 zzQ>XeWSbfrspRWI0+Q34A8j9OhAJS9WZP&^r8(_iR@Uv0L*-P4saGBNX2d&Axic+n zS~l0i)LDZ`xtEga#*w&pNIgAk==C{GOB}ia#CFv|T+I_rLY<6rlA~5wAK)tDz1o2( z<)MBB$+-jvB#B@rrcRZq#nWZ z`4EFqfHht~&Ivz;(a3?1K3BeN2Z3>|c3m^o#n))(jv((R$<;ribz(BxcCaM|ZdtDK z;kY0;YfvyYgE_+s?s$$s*@c*2jerk#vZwUGzHLk~^OkRnD1kzBMgo2;_svM4vt7-? zL2ig^ydW%EMaIPa!l!O};Ki?cgzb+;qy2H~%pd#8Kk%RO5B&Wrw#0}3wM@MQ_92qW zcSiCa64^dwUBCWToETwa>=Y;wT#VG*1EL#xuT6jNAn{dVMclwdbTA=cZi}mq#Wvu( z_J)y0fy4Pt8bjL}gci-k?s{UZ$U8EGa3iP!lsry-TOl#`$hLYVslxrykr*nPI-xm1 zJqHig-iy3HO1P2j)zXY0l!9DPAOto!3z7*`PpD-;oG0$r2@sy(&aaopz834;t#MzY zgG_CDc^`X%Sob{keKrCf0pk`*Paq11FIA9j9FuHWPA(R`f@O#Fu-DP!pkdK?1fg-T1*C0sgH7Ryd}?fJE-V!-;)T*;_HMccjSd z5#KSgAlrm^a5>9`PSM1TfEre$ESyOy$7C|d@QY@1e6u96FSoc4a)X`4>44S78-TOp z@adJPtQ+iMA5%aX^Go<6d&^>(cSIJ|gQv8Yeeg5;JaFMAO&W);j+;hL^-Nv~ij!Wz z3YF(~^8J6EA($8l#72KrrtjNPlg`Cb^^@n+*tm;nYy*Nz?>Y{n@o;j_`Ku^Xq#g0; zeD61wjLMX)xc&H2Sug^s74iG=4RUJF**6%I;J&(SOpex0h1VNe)ILr3fCPP{5OM4a z>4Kw-=!0BWw?dye|4g}TkH8mqwietom0c4krkGpNN8`t9#e)#^wg~t=2<0@)P8D9> zHnVb=4P=+*G2;d%G45J6V1DmB2_fpUStT5`0(p;`|%;8?a z;5YGr;iTKKn=CQnt9#paO*?4TL#i^29l#F#81ST#h5>!+z9rXdL)Roc2!qNuTs!61REm!bUUiBW~-PUnNpd9Tv9(R>A2^k4DPK+;JXKZy)ltJWoG-1+3 zD3T_lm=zd@5$pu-m6n~LoP)`JBnOFskB)kjiq^sb`&0u?w4Y!cgqof2O+l|w5VkT! zwPIzx>T2wb+;~#dbA8DkML(iec_n1MoHc?5h=i1aXa;e)vU`MbDm!>sy6WS;E1p}g zm8WbeJLNEy-Z53H>)C-DGXbd@dIk4xdlD2L3M9x4Zmu&ETfmsQW2*)^t=6i)Q?bt{ z!NXVyBXGI#i)I9O^17~;>#O!>l40n0$Y_+=W-@FZ+XaIqxuakOkwV+(Ivw$i<_geO zXEOKXxiOG~5S2gV2XTC3yIX|E^K1k`9E2&RsMuDB35r)mUO9bBA+&y+h0K`QOE@oB zL_C2!s|q>e2|~Vaxu;RB+-2LynFAq!s~M3P8gh>aBUA&-PC!n+P`QHYxx>M2=^n%_ z-DGK7H8z+-{Y3hyUnF~x!8JS#vl7Ukokd5%b$1(YB6bs}9FK6PlRZmi3ZJGxHC9KP zI(ek3QVwEmE56CzOax;-)5RnOC`o-GDD8-q!wf>!Cbq*QV=>-{U0 zO#E#_@oJa#!f5Y?bq682{R+RwIb2un7G>zswukRbX#)F!E&VKB^*_7(l;rItz z$7AKmTR{PhNI-$5vhm|x^l}Y`#LG!9!8VCdvLe=&7wqNaZGTRoT>63vtWYnhUS&;F zRBRFJpyAw7`hcE$cFbxuxIQ05R#h9J1!F$9k5t`}M}*8b!qT3(PScYl?pj<_do_yG zduY&5N(#bDX%`yp|*IhB{#V&-oQQ+(Y z1-Jx@?vh;W?TDxBg$Xy-%zKw`;`40P(T zSNt{=z!~!iL9or$TRkpIeFBM%c%sOV z+ex=4((`@lf1CkG(+Z%9Ic%=j z=YanFQ6Gvr+G2KQiptW0??P}dFmELks-CgQ}9w3H)drP(3M!^a+7G z>8^PGd>qk$Qb|C82}gOz9u+UPMGHT{#-lMU+5ZTHt5*nyNjlDDd{T^|`1%Gu zGD<&|OnX=2#xtUr;$Z5EbgSZf>+lvW225=%EtgkpkFp~pT&t#}=l;mP*z^^c$!=dP zmoD04I;4K%1UD#d>Uq`OsC44wXjB!(dINLzBVZuGhq0D2t-bkANYhq+evdtqSHeoqkvMtmmh81sVQy`a+$9$5^;5Z~^|lEeD1`r0Ul zPvhcJ;o^aNfduQ`&yB=;gI%=>N`^jDktLq*aK}yD+~a4iY&GBFUiF1FPI#e3?l9g< zES4l0mmq$o7a5x|BGOo6n2de=uH&*}(O zgn&TmN2*qBRyc&@xY14rmNA-jXe4cGBTajlB5hT{V-v;wmP)H}Q2X|1H+N|jjz2XU zsR$M(aAg}oWJgIjlXnR+2D?4{UU~4t*cd@gIgI15A_+3|q`2kG27Zeop9Ak!MaU0N z*2v#(W)r*JQLUEAer#!Ls0azBh)J1SU%K_BqW?0gbkHRpAnmME7Hhlz3SisNUc|WY zcHnpE3HwNEqolYW<mT?3&9hW-xus~0D zIx^=h;=0h;_iazuQ&^n4-L&sx*i|7 zgM%OlYLlH&x2pZcA;ToOk;+op?8K3yQkYsE!14`^I~jJ3I_elK5Y#kX!jCls4cOEh#n`U4)cyj+g;Z=@`MNJ zoLMAqm{G}NhRZ>dy*%QuBV#=s5)rpi&vtjMv|7PR>|A*_v(H{GL%VEeY~qHy3OVwu zJs%M1hS>s^y<7c?6&$BwE_eA^MYwi$j3T4!i!JJL&>;3=L5*`g;2T)?_BYCl$_Wip zgU1k4>L-@sH4L&O0Buj+rp=Qb{5*XFD7uTg)>Bp+%g(XnH6rc_#fN`YrXLxtzg52%k^=K%!MlIUoh zI@_B{Y2s;Q%IV@6tK-m*+)mb{oA&17nulu?Tch^DjzgG%Y&}aa>P!+)b zElmX?$xVLnsGBt_zCfR$by~j1SuU36pAUAqqon7DH!W89&ofvh^anxtzfp768l8{p zyUE(o1F1}p#6NVNr1}Zr=Y@Cf_T66Jq^68yh8@?#@6vDLJE~{nU*dW*CSHlvsDbzr z1E*g#wm_S2v|fo%A95KhaqR=@kFc^HA3zZ)f0@CGo66{@Wty*qzAVDS{c!s3gP$8C zkOYfXV9YzfEk5x3^->xC-*yNI3vJ=Nf64z%aYVbdXJLTY$z$ak_BVASF4Nx!fp$?t zqE8YD)cuaTVceFs7v9k~Hrs*pAq#B?)wexc7jYk)#kdMJ*e4HA7F$ryCq~)=)JDSnmxVgdZFDH}E3Qw);4+!OC?b|6 zpcWqhvLO-WCOR=B*Zxgm6)MXXs;*P)ThL)un;$KaFV#K+sBDF>s8-bZdv=K|YS|)GA zrV+~oE^_L7a}f|%+rInsBgM_{>FM>dIUgw#rM8&&LuZK^QcT$wRh6mHXOgGSUWg?i zah(%uAl+F-*h)@1cnzoExEyF&hf!~O`XcSn-Dsh$^ZB6kR+ZHLT5MU55+lMttyeB& zoT?k%30mw^C5+pZTz2kULLGKcM0Y%5zgtqySk4buAov4whAtDfX4>5Y0rSKvc8XBM z@ppi`n5(n;|G`Ig1{Bq7*pt{j64ABs9C-RJ&GRFRPJn$z>91e2dnfM~JI8Hz*yu`7 zZi&4?dE>ubcqrWs1h?!Aj|>tsM#R&3ZIaIAD&;?|SSZ(PZ`+t0efFc;T9SYHh5Zuj z^+AQE$xgRrl1H!cqF@*LJzTNB)}nd9W${-O&w~BL8NVBmJL?VhiET>Q1n<|lFKwE( z*gvAFc|wVUUC%f@LAtmrD(11bEvaxIS<{D1I4OP$D37mM0-SQAGSLl``hT1{u2!Jw zOPB3=;?P`&bVOKwf+J*nfLZFWAr3(H&rG_dPC1P8ec^@jSddM#7We(0A^dQUoFNfG z>>c)x&u`*{M5b3jyMrjd))9zmEYrDqJ&@NEW&b|V=*peRs z`WoEbU)$loSSIv=Gzw~Vbh7~4=pi})sDS;rL=S5M-S%ztM?xdgB_c)3n=Qyt;&GVnw z*^T`)r`57W$`U2k$zNJ^G$cNMAt6j4AyC=^LhO~W8{*isgWBp*BZYLh&7J0{nvr7=N~W&gQM!TWc$}anLUafa2w{QuYa;J2u^R z0Y?;|cQ+Tfg7J0$;!iNyzCttMnH-9WeJ!?@!Yt`-QbrSLsIq5UQg7+8d(Z^=e~LY3 zCe>>Z0qK)ZgNj2N#T>;^=b4^y);&;7sxkImymg-MK@#A#$q*r+OItV_y3CSmm*TN< zwDpbn`^~c0iPfm}sdF%G;Xy6+sf#c8(Ndw?da`VgT1K&d0^DhjiUQe$3`0)aRdtxu=oU;SO=qS#&6BUQ?6GQ1^9jr^ z^6%3+4a&B=NL4B_a|w7R?G3%rDw{BzM#PLPNM$K&D@;F=D0bPFR((!r6%vAegAk=# zk1Qr3sn`@MvLz7(q!&tMe|~U``p;wqqbkWRl0T2~FD-gy(D}oXCT+^OD{>$>0wjb-|I7d%N$v$>TR@4HCMci$e zLfmG=f|bPi=7Iv5jg70?7D`$5%xwnMTl_~kjnqD{iJnoFc&05sFC~4R*noWm6(v=J z2-qly9poBzhzHfjNN2DEd%q-_821!uoB4iJj#A%OL%q{{2Wz;HIk}~EuKX`g*<&A; zG^W7=#SJrRCFn5nfB}L)x_%tTK*W<7oJc&Pm~@BJ1u|libN*2wrP_X)_SLN{6#F_0 z!h#0@{zb~*$E)^*5PV>(g*!^)*TWOlj=u`Hi(k|f7*bo=z%F=@Ut<^7g=}j8)Ui-1g z0*fiDqb1=UWw+Zc}Vev9bxTQn)a%j3f$Wd8?sK~y%ayc8alJNlGzn;T4SCIXvkx-{rWCD z6ogapavOu{f&=Qh3L}bKEeD6EsQs#VN@g>3bRA@bMK*z$_cqeya%(B^Zm41ts!0v5 z$Eg(qxC^E2W-=6MjEPY+N_O?QDV_pcG4hg^Eq0K61yn=5MC_aq*+|{8w+AW!CeS}D zteV+TU8q=0Vui`YoyW6gP~%iRoxah+x@DJXg+sW&28b}sLkM>Qe``S8NXVt!NWlo9O?q4=aQq! zhBKVt5|J*W(g8L)NLJm$U7HZv1U~^LohvXXO)s2#-cH|>@6bx(^hYKN>h|C{%vHZf z#_gk2??6-%d&KIkjk0!Y!yZxl4BKE(JTM6CC(-62xWSR-kefoMpRh`^$e;~JoI6L6 zfXL!m%#o6FPnUDCbp5f6R)CI}`b`yWm>wp>Rv}+eq13t{K1Zz&X=DfPxZ0g0D2y~x zbdVK~uJ3>{gvmVJ@2VRh%9_{w7MxP6FKC`?eT_K&Y{b>!#<0 z*|3Tki_;aqP_i{a}B0 zcT+s@4J1F3`G_0E9mvu~y074ia+|UrvBxddngY$YwSvm>Yt;(;MJQ#ugh^E%P87Yj zOKq#%v)R{fsjt|=AQ$%>2?VDR_#Ur=Xn<`rN~xF)?}86G1?T`xz3JFYXQQ=I7Crx| z!gfi?O%!a_Hrdi*Eh)NcKaOD0V*S|YKq`x)^}2pPLd6z*{{dx3xh$zV`>mK$X=DitW%I+ zX4z$3TNT|0bQ-QM; zBSmYg%A+hF`z2KZ@LK7+;}DQTHj6=>ucj3p&sFWuc}PtyeaFm3b$OL>eOor0Z^(t+Jlg462Zno z9yRe4-IOWYgh^BR9@VCrfA}w{8-hxq78mrm&@0&ghk!;LeWb%ai#0%at`V1Hw_beF zUcbSZ+bOYMWgC?AXTo!UC_(3p;Yimi}18;h%D;lbIFkF}h~ zxWRgHK~;^d_$$~giYwNP`5h&%hO_~0x-GWcrYJW?Oz-%4sS*lp{E7&;Yqus0+kGsN z>D=)ZLgwbs{R(ezf5*~;5gcG1bIq6tfi`g~C$Xs!^;j9w*)FGv?KY*eOj6scG;&CQ zCff2%TT^T!S5b>)VIdve#&eu(+D9S6r-sj4!}35u6rUn?z`}Y6WetdL!mSI|D2Smj z0dKY;!fCkh0+(O*EcwOq1wS!Yua!$NJC3=yt}cf9-qkcP6b?oNKCW%c5VDsqH)G(6 zo*wpMq&T0AoaN?yJz+vvt&aH#e~>Wxe5X`>a|~{svhTiGijU-Ai^2wo_TYd`y#X=tA_GxAC0Sg|Q#O5oGF_I;{vC21r8x)XkMkj;)O$Z$bQpV~w(k z$|^lv+E|);Csbya6u1qg^1>63w9m3(StS6768zlqHXIBK;#mof$0R>44?{R7UEZNG zG2zi7&z>c}x{ifVZY|qvUuTsmHy0D>QtiQFBKDytAJI|s^!xUsd0Vl>*Y4psZpF1Jmpi6jJr%J(LjJ&224N%j)Y&KVfw+AM1iDAIAyb#5W}^E}IsIzr z*aK{;LoX7jtKMOzZR*}7uTaJgIFHODdlc!`t|&0|QYm6A8E%l&MjS!W6!V4k1x$io zw|C!xc+8=JhgoFA#h;|1-L@x@a%@AmmYO9~{sc#MoPgJA*OHFJ^i5wue2KIw^py(y zL==#~o?N`3g5!1qy4zIM z4o6muDYAVsFa_6ec5J+aS?Ca#;wa?!V_PL>2QPnnKqq$lhA=lgt~hG7eA?n1kk#tC z-!n~>yF?_tjw(4i4hEGHnSi}fj!keQCJ0n=Es0BRPKC7bjhP~%l`~V1xh+c1h(R`D z?o{dkrs@I@MCL~JXXpE$$ zs{@;{DVcRIg#y9GV044-+o3tcd_D{j9*&pZ90{ZZw}X0v}bl~N_Z)y|3XNPiWgd;lM+Ce6y$ z@jFHcK(tiHi+nLgCrb@{#)Mj~ba6`XL`wGngsD9+=M`QpX` zuD2@UnFNT0Z=WV1N9FhY;i6d0gxfz;K4S-f2rD$%WWP~vMwCmfbuy9KvTBKOXAcdU z)1{a&7ACsg!95V{oWOTq0f8OE1>P-AVCDVvxm~e!BCgDtiNQ%7rx2aGYF&+!K)x3G zA3d|@Hh{_qSyfBdXLB&rY7aPg&Oa=3P>OKwH0-aXjM0&5=qt8o(rWRSBWJ#-`I{ zqEd&7Aer3CLi4q_xloqu1$-Bp3N_1YZ50IvO|mbdB)8nCc!@OCD<)0ULx<@V!5JyC zxt15gK(;8J(`1usj<%B|MPgvtPnCq`3hu0TV_bRId{t-Z0gkrHp@f*NpHu(HOj(Iy%+^y!?dVn z5w7{tLfMG%u8Y>1KnAW>wI1Q&w(6&kpDACCEvMv)T9l`^Z{7vTKEkE35w1v6ZC7cs zb6^pT-=eyIZQCny3$4~{C8l5!`C4p^0ND5AH~>r~uc^ruc3yNJyE^nUEDwC6VMF6kU&3?vU zM&hGKN#!HODMwvgcCeBVHIuYOlTK8Y%BKvAF18RZSx)dj!0iD#vkjmnBwb#sA`B5x zQ!t#i*w#B`E&j@l2Ev`V0@Sv_4woR<{^)#!P*EHQFfRUv5t2^D4RotZqdc1f_x+g_ z))F38l!u_oOWN5pi#1lEMXL#!X?RB_Iz4%4h#=l@2hJNhW?L3#!u? zE58P=ST=WKjO2-8eFHYeXV%%C;b1F%u}PlV_|SnXc?hq(5~HS+?YfQnjhv;Y+=< zG>8dr1}sP$hx0H^T)+u3ur(gLO-Q0xG9Z*oJjz8IC&WN|3_ad3p-easKniNDlNdG5 z^|mRGGsV6xd$wX?kJuAhSWJs6H#SyR>^IyJ6$;C+rY2TIl6=JT-&YrqDRgB@2Me^0RMpO1LiAZW*Nm~(RB zp>Q1RuN^h6OBV`$2;QTe+Kb8+3{kP@>rX;IH!0*wBe#zY8jh<{v`r>nD+@7;YBI-a zA`hs^2xjRbRK}xy0aL&FhRz)96^~-7nn`8*9wiPY35_BPU6o=nF%wnZEYSh2?wOH>kR z#tSjZDF~592Xr235YlG5p1jHFUfi&GwD!8?VcfL%h#R&Ei|8+^PV`f}m59uXwjuPO z2#v(+NdOOd-|F@RVY)kSGZXl1Z1gZEX2Zg#QT$@qX9dd$A@{Hm!NR1|GJWlkn zu`0&N*r-r!6EBLicJ*4gc*UNkz*Ti%;`N6+G^J-$6alw2sUe7qCPS44OjH!J$s|s< z@Qew23z;)H2NoZ?VNO5}CI$2Y`|Fd()WC@i<2k}zlS8(|4!S8W?gMrHY$%vbgwVTF zy3N`_#8!8Inlpk29d(q6?|zepeO@%YTAeJjO}@^o=Eg#Ej)Tw_3Ch7$dR@`;?~k!I z^JO)$a%<9|n>Vr-QpJ(EA(*zW{rl8LgfRB8J&MDHpBuIVc+;8Pm2GVLtmziEFDX56 zeK8jB4!EUE%uvO0Q)gqRxcmwr=OVb0+=P3eCO3B+d8m!+r`ujXt*823183I~#@(>x zSM5~W9b8I;xJ+k)QVVcbdTi6IAjU^ z64L>^bjgMT^hUMK+W**orPGY5(Y+U}Hq^sR6>Th4n)3*CB%6m3O;z5xl#&>4yD0-G&O7q2V8z*;}J-$5iS4vshJjA)H zO=xWZ5{WA>+nWes()W6l5JlVElEj@p#f^BqernUPpd_T ziYJgXrCPsjrc#sZwSQwggZ2nQ%(rIfw+rjHA-OGdY^DRKmv}YtEgA<2Fq*`h7r_yT zAG`kIrU3wo?ZGmBIkZ`iKkFil-_VX?Ag?oivn;O!vUj}otPv?&7HhJFvA6Fq6#?8C zCsNKqd>{Q;9rEtTB=za?$Ye$UBW+8=8W&$UK`qN%-FYEfyttQC(p4xlG2QiYy=^by zYO{q@2nmPKe$>T8s3b;+)I!@6ifDHiI-RwurRbwN0hR&3a?A#_TsEzsBU4X+sX?`J z$Kh{dfXJK&x!M~UvmqBG!-O(UZM;}wWxBOxhg+#glmF48{Evrwd(-{#4#5}VJ&ZV; zV>_#M~%vtu5F?sk)-J@U{=q!nIb1P379Pa?zHuU)~J?S(gn+rSSO8G7R(koYGiO zj1HCt9p{EbyS({kd_#OjRxjbac4%jVDA#e*$U?Uo#GMHp?N7R1jtN;vX0oqKva6NU_&Y>?N3q`xAG*Z%=yxKE1tPOE~$PI z=aT~)!&D!oC?LaicK+!L@%Y{0ND3C~A)z2dXe7P12;>~NO$-MZ^UefANlV_zizpDd zW0q*DDyRS)YS2v&mI*E7m^hJ*em^o6$7k^%d%kK2zGM^S%wPb-(Dy}H23o@D`gbmy zWMp$f2vm~!)8sa!r63MLd@_WlUT$TJUT!wk*6q>?p}7r}MDB+T2QJ}>H+2v#+lHof z$Q<4q)~G2ypm29=$`VR-H?4Rr*4Z1z(AH{N`ebM9)yU__kLlR+h$dfn`e_?i&8f91 zU@`l(Fb|-+do*_VAJD97H~S(lwFq|Dg1{x>UrHOr`gG}033TDg zbDbbDO#+g!!>m<>%#QWz(NSVW&T+aPxs2rjjnWB~x`Ro|l@#0PPoFj2%&z z`|jbka}9Bvv(#}OQova3M$qSNzh~)eD)y~Cjw$0@tGR*g5FrwN3Q4D$#2!8|eGF_o zE30LG!5%;~exH6!tU>%n57qr*oAu-yryrb>>BK+cLw)cztRSE9Ujp4k+zWeGIcuCb z4rn$Y$^F&cl`jO)mDp22M+DAx3P*dzhE3<7i|l_(-|8O86u{!v+DqL+6n}Qu)f6s$~th)lFbkW<}|P zxDoLyP$uq$nMn3Aki+!QkB)L~vQx0iLEY!8O+;qpFwGheqcOtpz2brQ2J-dEwTvk5 zjT#0vxsu+RrYm%f`xz{9*bPxwhLFfA#p}Dck-{d!gIFocsnQ5XT#kuL{T3G5VB{VM z(msixbg}++UYxe{IJ}&381Z=r6KDx2sE`eKe(lR4S#k&iD_-BgEIxbWMZPI z3g<1VLSo(|tCpkQr1^6e03l#3i~p?x+qifgWH z*ft3E82i$nJ6F!x!w@=sHHWjvR>d4M)lAWt-xIuPJJgow;$2ln~cQP zac(i7wa@XNqW=rL5Mi$Oajn)Lb5YViDLuLrW5b5~qCTNhO-i3CZ^slknv^zA6lxK% zS@)D@H8{Yn1v8HKY=LZ|E~J!fa@bYe{-&a8^?}qr>N+3Z-;Dc%x&(yqalBK5>9LqJ z(x{H%D}(Qby>$t22Rmh{hGuM;2wf$ULRT@R92^vp-TCMp`z(I^B^w|2EHtG^EbZoy4Jv`S>q-&Y7@K|KfGQ#_5%QD@q)jUmAPmmY8_>|oGSh0 z;;$ICUQ3C4@)!IMTpu&jRzwnx*LsBVJxuWa0VNPNhq!f^ME0f^UnyQ7cD<~a&^`H> zu3;zN2)SZM29&>-_cU}d=5~~gW79dWJWYSX2PZT`{9ztaUlU=sT`TT}=|#Rh7-+Cw z-lgBsdfJp)r6h#21{mU^YpzNY8j(yz)UJp{}79 z+@KcWWG7@nm`ClZsx>u1!dJNy2Dp+FyFlSE7BbjIhu4cHAVM66hvWE;igKQ@q#vSl zgFZdjr_1;luIJ^3isSRzIwb|G@%;|unyVSn1ggO+TPVNbVe+j zSW*}|WzP#mU>eE{v_IpDA}!gu>l`EAU}J8FQ9wb~D3_7VnoZdvZfrl|_+Zc{ARjKR zYz0PXuTgzoq6{fCiGzcW9}^>du2UT=|9T?LvUT81u}NUhx>sWjD6Pu#bM22x)oLpI z#7pj?eTBa#M59G3RQJSje$?m$o5)VI=I%dGIU#t8SVTZ2JgoI$KP7~L4)3t2^+jQ^BYGhQ;@MNl%cdoBqT{Js`XhnGV3;>0 zi>w=qeQ8%QuWzS`u#K#p{aMdk^rW0uUMVky0wrxuHDt8!S(V>m9wBzz9OHMV@uk{u zNl^hH`g8-Uh1pMH6!7`kuR(kb;{e0+$K6*0Ik(DCIEprSQZHu<2s4?~ zI2nY%;@H3(oO-aRxyL*J&FUp0A(WYe&;2)tqrE89Mmc!*U-<2RF34@ak6^d`=D+gK zIDHHIiXbAUrd&Zlu7=FxDX4tD@5=T>vhhJI7uWca_m6%;WbM}`CN}}@a#6q)E+`Ih z+**q&ML?BQslIq4qK2l-r$8a#2t%{k5FG#=Loyo8|y%-&og=-vyNheu9g3u>I;N+Ji)NJRcz@a(Gz5@NmjK*9O`|87n*X(InU z|6Bg^mz((YFwYW1B32R*Nv=sWC*+paF(Jch>QR&s5b6|*h=Q55W=Q%R8I0^~7}s|Z z=1<ip-f9=P-T3T<@~l1k$%hz7pL{jO(j<{U zJ_#$4F>hhvaT=;0W9O*4^bcGFz$?@8*#qZ6TuB4YldBBkM43Z|0a3?$b^!^BcD6VD zvj7@w6COQX{>)yFB?B03KM|KnbxK}%q5Mf~7T6lvK;ySpV1W!BjtA@-A3pal)OcB= z#gS48B-)ypnfZ2k&(^nwd;^J6EvMi~3=gnzxsmL9H*)vur7<0IsT_`(W78K)F}<`N zxUjQi>c!;?S!Csgtqyfx$-1;X!Qmzyt{N*b%zLyEn}(c%xFX+GZek&cwVjEQJ@`7E z_JSHBkm)#+UKY^@B8wz6Lf+BNUe|4A%0s7Xn%3_d>#Os(+Dhh%7vsf}4No_ybU(Iw zO4g0f4ENQpQy`V?vMQNewn;Z>f}cuMp!5yr$j5Sl7>>d0Uz?1C3aU0}Og^X9AjNbOjzP}R zkKBIJ9&Eb9a^XqC$2@<*{=N+PF57I2+hxHf_*!|*{`K9AU3SX5DXfEy3}fIy8tOe@ zmmJL?VN>>(s3SkK4I`>BbneZ1mkpb`^?>ZFVy%6YX_p5!@7*%*xga$oeBx*;z|LXn zyaz3men}>9c(BB|u~7nRZ!T6OG;EE@RChk1=G}o??6lKUHz~h0#PAs(f&@v6+MP_h z&@RQbUrXl{Bb5lK{&hlq%2e(U7>P057s*)VArFtmFctoST9ttxwa)dsVggoUA?+|q)L zj6t#}sx=V{hUJ54qa?@KI6=Ftt;MF0r@Pom?3tV6tu3TKIWJ&97vmawovbuvlOd?_ zkR{xLzMnG`vrKd4oP@lr9ggd5koKQKomC-*k_2BzP7l_6$2m5*^;PV=FPic!|po+9Y&zy=4;H({TosWD@(5JWF|jNKYviruS` z+;NgbQM}n0;rV|45S)=EGnUV(=;6-}_k z=4L@h;}L~njUh@J{d(d*jwugfx^00lF|jWDQKicmp#8|*^J98y*)Gko5lQ!uQ3t)< zKC9?DfiU~*OP44U7Jqx`Dj9UP!F2G6pn(#>&{=r)_G#2M*md&KdrG1S>#Lorm<=gh z_ONRYzKtx}0BX}0`$1{z*X;SMh%nUHn>mbm?d=XJm!Zwhy3Qx~wvu8_e`Z>+p1rgC zXqmPr*GpT*jzJ^dfnrHc*Pa%XV~J2!6`>^QL>>EUj1`_G5oi*>Hi)%0`A9 zKYbA6!bqCHU5$C2UD+fg#k1PAx&`b8j%JL6LM+`$$`g}npK=-T)p-6oBtfbK+5xr1 zg~Jph77>>l+$)yolBhL!w1V+EbSQ;NW1y-YTkXzt!e9=jl#$(GA4lcjfMXq4K$}%J zYz#9ao=0h9IAyx6?49U)=+;qeMuBV8t-p`U=6lgUhHT;Ql_&q{B;}WiIaX9e)4^~q zEKFrHUAWM;32{g@ikVDs!z>n@|1m)KGE*C#(b{S|EtZRcOQeB~vQ zSYC7%p-Ll{Wg>*0uCSXMsHy*Fm@yB^oN0WymoHrjRoOb)K~~TNwo5pI)nqdfBdP(L zTnf>Syt3%IS?a~$uJS`}G8?q!8MC;WYfJreZ}rnPbN4L`g1HLFC4<@gk%tIC2kbj{GM33+*%%I=Ejba1&~9rYGc_s-1i2@ob6atYMF2(tI=({E@mHVlvCI@3Da(X79)5SKnwhMIjf?`$&)Z7R^Nf4xV3S6w#CYn1x_*EWj}#n^W6`K2!3o&& z(~2=kfo|J!b@IjSh$F=2oxw#hnQdS;nk~mK+OU>MIkbmxDbT0uck#~K9Ni&V&2g^a z##Mgr%-pcPlD4CpQ{}rPHBqH*BUb-9MZFkq-%&-i#>*vKD172kXqC3Km1DE}-IVOV zK|Iv1i_aB@G-?vC_az~&j|=8X90Tj;GdV2RSt&aa^1NBhP2yz5G#Zou z4Qne}xxjAR5IH zUbo?r=Nin3N7V0r$NnPk)^`F!5!9XwBi?};;P!MatORR<6|v+CQ$hF%>ss9?Hy091 zX0e7|{dM@oZj@9qdxUa8g1Imq4Y*cbzG7cfT+-b%F-m7VOqx)h#Vu{YsW5{oaa~Ny zAZI|UgpMa%vg5~&1XSt<8;e;TadV{_naXG-B)67IoTOx7>@A8O#d+sem9awc23stA z9=5Bp$n1&OzsVflhYi=WrRP*svhhdu_3GQA*Z@pynoqt%Gv$+C*;ka90uGe3r>IZE z%8h#w-EI(w!)ZzySU}l_p^ee7SB{-)0Q*4Jlh5jxs^l7*C#=}t< zn16TrcjfeNoB!Gv4=SOPT$Wg)Nt@OKO~4egX~+hq0=+%y z2|clY8Dl%Zo#XY%YNQJG2*v{sjoV-!e9BYY&O!sP#tRVG{q2|z&ITHR0`G_LYRm_z zrZJXrv4633-D0rus1081#bN+^WaTLjFh;CwVKq^V_gUYzPc3^j?%5{$s!+0~9=wSi zfW01;rYUfqmk>BSi0j?C#nPRX3xIOs>C`Fu4 zWja<$gvnrg=@wZ(HbS`@jQiVeY;Z92z&Q2b8dR&fmwCiu*-t$qj^I`mss>KEEtC^MXM&b1AS!dd=N)atZ6^zO!d?)bo0fcuIS zVum11*#DfVJWFl3QKKk7Qz1i0<52Mzlrc8~c` z_JV;t^b46_RjoZuRX3X^oc>zs(_Y(xKuNJd`uW6@wz`=9ssuOa$Hf7*I;{yB6eiqs z=V+nqxs`a_C^CB!P$ITajyI}kN!Rnd7J(UUjmR_V*~DtbOl&!JX=}sACcu}{E)|0M zk(reDO0rU5=-_aeybKOGVLnx=sW%X#y-IlacDZLqNUg(bfG!A|q{_)D+B9~`zgjz2-G~A!r6RN+I8jtgY?Y&S4FP9V# zuZibg99XP5Wfu6sZGEaU0sh9+Z`azPjI@xjcVFjB*+oKxxMqrCivCm%+8<0XaL}lN znNH$O40p#VN%Bc8IJsHQ^3K9Gp5S$eOgc^wbqUDaQ!J*XgH*CM3e7~Ij#6ecO)4(cQx!x`rl~ek z8~B@$K}rDv=+%mhF^9bx-*WXp@a@2nRjxYB0ok{~f`7j{?)33O4Ggb=MzyqG|oH@G^rhJi_FM@3l{*6r|W zMQqsD0R2Vj;*+NCUZo@#L(i((G+OrklGJ)b5-*+vvWamM~u z+5pQGQ$7O3y-S8tavq`7_M09=rk(Nas<)rP(`8DV=Erb9%WH{W5zmL1JjflI=7#cl zm8S2i@e9!xZKBCUWr-KJdql{j&8NEl48-fC_*Pa9c!w_7ueAegQy9u{9o!aP%o8;5z1vI&%*{l z=GFYhP5TLfqe@K3Gm2w0q}+ES(6+2sk=Ym(L6u6^j#X>w$UtLHc=5c>3K})0%2}Z_ zt($~5Da;#(kq7~&6Zx@swkR1MqkNNX3Jp|kWpC66asp!PF9Pji>CqJSZMDJ}II|tR zC^h;$ef-(-r!lo9Y%S0o_XW?Dx1X^Awa2Ap8?W3T!ZkLc?a>s(Co1M~pMvv!*QT!U zDf<@pEj2fEQ<(NbH!5T8_m!S(qoyAR3u0pTMS1V`gMb9*4t7B<<5d%a`*E>;6=(6d>sE|*jUmRO)PL%Pj-xF-B2 zb8;8T=2Xc(@mjPdZ3F0686d|QItA%r9G382N%1u%OjNjLbBbSM%TgE8#@bQ+#3Ze{ zxi&rggeP=*(CoM-$NauZw?ctq3TC!)NV_U8?j{*#Xafe-Z>x&GR@*~GKj?BiQlZQ z&%-fVm^WMM^)d|Ep%lvR%IP<4Y|5H-_)7e;Bp?SW_Q2igZe4Xvy>SRjT88mfsBTBv zu6Wh9k_y6)S?5hDxKI2+D5T9<)$MHc85BNTqWMu+Og&vD|AU)aaP7ssKzd;a@5o`d zFLP(1`}Vb#rf&`2%+d;TKkWj1kAL=ha}^9RFj?oCv2Z&Pb4c#Hx!S^gS+fE zrXUN>rYX)XbxYw<_A^MV2pj%r)oyZCR1xyX4qkY(tl6AdSoEO#(Sr7Qsq2?vIXevx%DfFzb~ME-Oa$9y&e_Dup5~6WPXgN`V+jVlqi9de2VAHYEJ|7& z0gK90LTZ*`w>HY1)%b}Z_bavvb8nT61^W|if6B^22+|S<2DYP{O{^m&&D?AfLgwmw zA9AQ2MHKMwq=o+&Fdq>Xn7g&JQcjJeiLjh%x3+Zg*;8n$j+VsGwUq`puDVRVSU|S~ zW^e~boJx3oGZ}_l$Qb@L#n7I)Ot51@?GP8nP8p5w#|toC{xbr~1ezUH*X$9f>MkWk zN-~Nk#2J8M!%NvDC5XD`wwuDeQua?%rRj3KmEQ1`x)%gP$lVD?yUoK`2YX!IhHmq6)U_>t%c#+3i;;+B{SCCORL&ah=JEUnI*kDOj*I zf-z1%;x#h4g@2Tn}@JdQK#7jxSy_QP+OjP%Y zJJJr{8>JQdraoPO`JtCKYA=&`lU0g{g;vt5uO!aLCc(YX4=t49sEZ>JTS<9O`42M_ z*f~b3=^bYau42m*kEVyANbigoEJ7!+B8Kwxxo66S7(0IN@n{gopq7EJ^+ROl2Pxs% z=TqX*2PYpBs*Y#M6HXT}wU@d|Ep~7F^zjnlOQo^zq%G~W(ByMel z(B_K;|J!o%o7gOF1?&ZFmd)ZO%xQZKDA1mw1<8)ii>&o$4 z`M%Os#2rY12O<%~C#0jeI!p?On^G>iU}8+UvlfgHT5%~IJ>+l&Ta+!297S;?Q?Fd` za``67z-h|>!3v1=eiIKWq_0idFw?hRpv@#xp(W_2aPnJQ=XJt7gA^q}d2J zw$@e~CrDT+LZ!=UCm9>qD|{i;?kqf9IdGr=c8DkRP*-5QrTY9mI>a6i^t)@}GhR%>jbN z;uCrd#C7_ieV070k`2-9SgoEHhJ)tWTk^p5NK)0c417ynpf=`$eTC5vorbnBek6qq0SN ziPkSp+#yepz@6U!+(h?m-mT>|vV!alZSo>W1@8L$0-6x zMi6HdABN31d2&m-IFkjk0s@_THB$bz+)vv@!K?C4yg*Sro)DNMzzUpaz&V=N4iG@=?=f$$jbYO}Yy!*m-elt;W*9-S>t z1#vQ08tKP?v1yA1JKeP@a?k-^s5EUN3*tPy8}sFhn1Ef{!Y4-7+1>uBqD@k)ftiDF z)#9oRu4#K9Yz4FFfgQMW<A(`P8JGYi>|61ZAt*s8WG zjnC=|itZ@uOVP53Iw7x^xT8jpcyaAwDve`Dy~^|mYIWa2W_!o@}~r#mqLsv_O*8)>84&})?&&ypX}Mj%FPNC1_OkXWfND5m)LLi zSzSPp$ffzaW2LJ<#VMP90{FNN4%f_vM7nLl;}pV>E~R~heO6@(F4+&KkD+qd7sX@0wH`8n88sX~AdiNB zaUG>g;BdGb%C{(nu=US_&_rDME2RFfTM9}i(L@i4e<{%--%Q%lY8i~z`gkfZxlDkkoYv%=n z<-6Sb_R=bD$Ywg}mBidb%iAPiZmig#QJ-+r#=Tfw;x#`~sQW|k0N=YxGjoGSKP}8h zJSUL?(&}_7v%ofyW+s$=u%8H8CI$h0rpSQ!vCZybNyiYXN-s8*64^}<%5RsOy1!!f z*tSBvY^u8?Ys5Ge7OxN(_;F}L%pvv-MP@Qkiz+x>y=a(|*=UJ=g`T$2MV0bD`X z)tq3YAlZ3q3j9unA&ZP&N=a^+?MeqXt?O+7xyU)OGbrEn(rVec0Nm}~w{_m);jsxF zsEr_mHZ(eeR@NyPc$=u4L3#ajVj=LfXD5H3ZsI4MdE%bP%>90)NQP? zQT8UHE`*qDTRGV}d=z~N;a7?0%;(?f9x#(5tm@djcBS9W?B~30N}lD>g%0bMzkxjp zN!d;FM_(cq=mkn^6zISt(H?y~b@sX4{aiSx4LIirLvRO2eS9AV=7r70XW;TD1a7 z|Cp@jXwSp_vR>MWX>gg2pRr<;eS@YrLu6fGSaI+H$2dR=F51cv$-8JHxB-b_*>=5y zubBJD;H0{Q*V{6|{5r)`5Tei+*ktMA;XI@>RQoWGy>3BuA@P6X3VV{KIt0cGEy3kKk4-GtL1qc z^3psAiSLTyYn+r-@t9Q>ieIuVRo%Cn|-?4}(B*rOSq|rB3a)Rzft#y-MK= z)rFg{KIySi_rczQaze~Nuw*SsKw$L4*P^s6grMX&S5{fQ6KvYA5ty@S zbI}Tdu_0DFUShI2Tg8$R*J^_ZniQL)ECDhtyw(|Q?`-wfP~Ren+FRJ*Z_;!GP&=iFxZ~>fYtPj zZ&B+cYHH)S(Z<7v4g8LY?N~AvN(3-jX9z7HhxGN-vlw1tPFihkL%G4psHGZht~E>| z$JkDbj_^1!10iz&g)poX$hiscEYUmGu_R#!g{mNC!(^L+&oMbZm2EKVHJg_G27nDf zMNETYm(?l18j-#s-M_?Zp@L-evu>_*d+DadNv4lwgFTD{1ZO-P6zt6PQpl+~H0 zU~ouD_pB_Ay?{f9<2n=USjT!{c%~>i6zFf8oUbgDx8h+FI4}yb+eek!?$PFH54^`) zt5;lloX!Y$^YAFFFO(5?#wJd1)j+o1)4m9kzErRRhJ&=NASXy4hMz|e+k&qSBY_M@ z7c6CP1jAz#Z-di^bFZLHeVa**T*7g>Q-xAf;3c#qfjn+rg7|PI_iWaXih>m{K)PP3 zN#b$5!g8Rf)mMWt3ct=fU=vrk6x#ySuCmRx5P(R!_H3i8uB19f{0vEKR&$F9Q@xfr zzjLbg-ot{7Us0}wQPh>`AB6%K8`KAkMsutZROT-nOj)5bt6`_ zpRCr&jyym1-v&V0NrcvB(Uh_IMLZk#4+2>niAmEKIJOq23mM;baV8j9LlJ0ody0;Q z?t6P>g&fimKPy)r&y)y50`hflDB3A!hG0L1ifzyezhqsm!<1bsfKEP-MUyOxEnk2q z|B)7)K34VvI@m#CMWT+}XJb3ug}{<#)nm}F!&TSGH1qsc)MH}8fNV=Pe_f3Cvw`O& zbL|tbUoofS**#+6+bnYI1FA{BhwU=qh^VauJT==LuCq{5{G(kBoCL2e0mP&qG4`VD zbc<}Dg-*xb1NZjiLmMT%Klo_abTvs7)ypE-UA5jS*JGPD>4D_&UgF-@Y^`1|DFuD0 ztk~CEv>!An!ssL%F!ekTP{qp$2Upq#hbyTFj6UQDEG5n)7ZHFjYRaa7Hees)6)XjS zu8kP`?pp-T0nxV&R2%K|gM0}eL$tS<7{F9UK z+Zj;8aO~4OCm>dQ?-&7=ee8@T**@~LvJZlp2i4LE3XDMF=94D0$>zo)vF#G${_|&i z?Egwd`{#4T=;z}o&C#UcDAi-R*#!)D-ivBwM4T86E)Zz!6p1gDKZ^lXxuS(b%zKES zfr_SCaB<*bD}FExbm zFDx#Xwb;?_rtD+)flePkRW3Yb!=Sn#Xjq~!X)6$s1g-Cp_!=hW(U?9LaQ#eA{z-W* zmPK_Ml%-=&$4u6Fv_T4D@^Hi-+gKo}D>2F2IB8;i2YYqBc%~asi{F&f-(uX-qLKf zc*ZfXV2?&|euA~bP?LExV5{xga=B%XswH9)V%`7~vCiO9m;ToCAS`M6zt2Ct*nNoH z0q7A80Fs8p2vH!xuT1n0mz9$nVF&|SlelGTLH9n}_*!CX8@DHd?tGTh5Fc2a9CiiB z`{fCi2;$=Hj_@*KOSx7DM-L}l@bQ2)&JjaM;S#F&*E;?&DWSF;i3D(RaHvMW0SK^B zei(KY`A&^f_S5*jZUm`dJ9rdj9XchFo7fevLK~laH61)a+phOP(j9!|rFiVJ(*K~_ zo9Halm=g>$1<)ve?GCVNe10Jx$<~@zs;)UGy-dGcK~tE}-IoS(ZALk`qDvVjdzKp-JlCB27%~uN=qpuudfP^biNswJD^oLc zIc4$p)>>(Y4qjU*t7}or87|21kkMws6zA@sW}xKTSO^icu`q)u-+4@SkLm)&)Rs3( zdknj4QG_=H4rldMjs@k6;Fx%AL2mS&`rRcG+v?elV6z%n_n-9 zZ~aD%hM3g7^TqPc+a@cO(V7@}M0U#BIL+)nI|G#qo5czDWC#rClo%7`BngG06p)Rt zQq4h~eV$d_VUD4_{mN4Wt((4rg+en`KLnyZKs$`R1Ae&{FD15suR~iek7TH%Dd%%I z-Lqvt?Rp62-FXxS@rHd8H%kCgZgEWihL_`I zlx`MCp$sgWHFANpPdp__@c}i4Oq&j9d8cFGA39W^`)`qV2nj%4@$_mx5q`j!wkzZH z;;QxZF-3-Lv+$9}LCyixUM>rXT3C|+(BXDKx>SEi(k-e9?j%iB?Ri>;xmA-Pz&<&` zHbwSBxhBbn*@T_If`;hMDV8<^CCy_=jRT;nIJLHZe3(LgU9}q4WfMDq^l~thbkIYVKVlhCPmKm6*3{?VQ}`qczg*(#<>_5B)YU##`KpF0Qf^nA${_7D#XXb z=ru|2b+@t*MbwDe62~GwM3PNcV_M@j-rQJTibpKLc1+p4b!zr)c*Fp7w(;$flz^x- zT|hx0M?Ysgb>a$%?#A⋘i*T%!Di81EG9X`fB@ci{1OF>c_SV~33tvVuDN|#xe5gB~UK_h3iInP*gC&j*!aWbM>@w>V-jO8{8eeTTx`V#gKY8{m<+*q}5C_hILWFVbtMrxM&HOOxewvih@G=;;fWoXcqV?2ZD zE7COMa*gjP(Mn3&|u77b>Ys++W z?G2_U>?rhLuN7+x&^*mgnX&9-LL?T*iX9)TY;l?oMJ>c-HEq@{7no13!U3L1J)6 zB;&z-f)4$FV^&Q=AF(w3Kma~RBV_8@R#FN4t#n(y*2PQerN!AEpmU`^a>@e5fu*)g z%3yvHTQFVNp`T65gc0C%)jl>VPyR#h>?7XeUe_61AUf_Q-4RX7mJ#}HHM4l5iTiWq zJ(C5sf{?`oH3(i!9KGw@&IqBc)sqjQ<(&L1R`#{-9@fxicJtuIxI?&oR5F!Slw};S zz#!R3(*3>>&!hZ)^+jU@kOVkP9>TiK8E?+MEr^J_*ODuFAY&47wOOu|sjWfPj}4S& z>-_4g<<(bho$2jzCC0+(nz8HXvjfY*OH82Ab1V0X(ec_CRtq!L1iw-l5DF3Z;uEyb zS&N^*Y+jDfx450kG1?)?)dEt^`HBxwF&AeXcrR;LLFn}ANC5g0p9%H9jPlG_5$u62 zUjZF#y@sZ_~7&xBzM>sb?MrguF|dUhJ8-t0j!ZYQk-tI-mw}H#9FU$ zz=8LCo5Adfc;kUPx((qaHQ1nn10B<@+aAA_BPVP=ZGLUk3cKI)p^3`j@jp3^-F}L; zqf&{@;-|v>c3iYFv21F`$K}|qOZrWG>Dic=t|=9$z+egSCErku?rb$k6#ip5{m6g( zh`iy`4{Z?_J7qb(+DBV(B<-`Hyk9LR-?txEYaHE8vh(GO>Au1MN{Ia&4k3)~h?@gd zW31q0+9HX(mra{P0?yk}dT|Z>GHI1LXd{s;k<=eH_Hnt7;`^bA zE9acWb{~t}9t%mq3po*2&t4z$nz{SyBBDTN*amoVT$VAAiwKMIKW(oEMP*U zJ2=x<7;g6-ZhrtO)gA$ALkbBVe@H^jYf*aBUy;QCBzd0hh-^(NccT|a z7plvCLez**>w*gjT~y<@bkRP!?%Cd*o(a@z(W zIt%kA3@i>d&j+@A9Qtcfya{{=LI5AZpM{~=qZ=vOE8#Jh+QxYQHu)0*c4s{gtUKeVdKQ+ z0>M*iImOQLKUGt~ry*TGCEF>M>e*+@+cqir4m}}nep(c^+n%P7a&_GX)R7tZ{XnHs z_mzaPpq$CO?WZqMmJ*uYp7tzWj96YWk?j-SBJ;%!*X!Vm&|)%+k=zyTj{NnPUkU3M zz5xrtLB-dL({?x@i?lx_=nk4-D>h#?=1ntzT1J#t*7m-ezCzC9ZC}8UvM-?^hlzxC zz!e3`Y*XO`)Mi+=DFR(NZ#wVxtL-s0Us%|Kp{r#ao~S|!l*M0Y$)hMc5# z9U%&wKo>v05t1XEATQc9cZb5HJu6F{%U8Xm3aKW5&1slLm|81kC21eHBzwcek`c8U zdoL<%Ei6PU6431&d7+|1Vy~ez;u=L&_mcu4I_uTpSxy1#$K?jW6_0>%iG8D_4sHXQ zCUSBxfuaByr{S2}7z8IL2hhVneY283enZ$sEw5md<-;*z#Us1P1> zKndzNejIwroKY$-DL^y;w^(BHp)#w6^mon^dP(9!Z@-}po5)@&Bgs>lcaBdrzDz>M z&p;eLY4fXf)7)@fp-$!~u>x))(Edkd*)Dls=V`fbtD>CUi!pH(?h!X2c3^XOsDuOA zqc-$sNU^dptm|fZV24*(n*@7&Z{B{V{Auhid`ofH?J?)yE>FjsQ6DqajY+Mce(2Xu zeifUv%m$?PM3o9JJk8!9(S&W2YY8+Iu$nz+Gd_R5Jo~(90&XK7%F1J2hgQX6#I-)F zGGcR6C|%?b6JV9|&^3<%Z$qq+-TmZJL(CWDcqY$5jR6|kX%eM17>U;x-;5#1Rm1IV zGGybGe3=suK`2Hg-zjUZ%jM;aH2|rAv8GY0abDfdSo_04)4VpnhudHNLHZR+0+Plc zX{Oi4l~5;VDyio5cv;nYih+8o7n^ym*furERGEc zDamo=66w;mHKw6?QKu^0ul-%sk&h|th!7}>dx-bi6z5OXt#Sj!ee)S+SR-%Px2kiB zrXuo|>qKnhS|i27%@8gM#)$mILdJ~gG=1_1I|J8VE*E3*Zj`y@_z9rnW&501O#TS! znEm$TkD;58`<>|o1A|^h|C6y*{{HmuoiP%G6?=MXWu+{56o|kd1`DL+<(s(>^5RhmW5`L#9V5At4n#5S zdX~ax&i+&EU+h(1ypT;if@r9Lfgek7i60O>2sz-)gKV-_COaD(ceV-eFXo8#=2gj0 zACtm{f=VpT!M-^1O6s*iY@m5!0e7dB$7XL(NV(FZ91l@4q`eIR1Ua`2W+4>!wyz2+ zMq!{=?E~CbL+(WT0JrT~SB-f6hA`lUeAdB{ zulj=Fqs9qrs3q(JIWRE?6=m=q=olkj+zFd4im_qyI1e?uXOIUr^z{ zcq7#%C@g^-Q@huutYQ}!ZQo9}z8KavpnLR)L|ko7cz?3UxG`m56XUcg~=ckWfM+EOXxw}v3RNiD5+1@ibd9i3Qvl~N)Ts5Xao*-VLj1PtZm#aK3vxx3S~CBr4=+NH-EuAHAY zp${~UjJG2i%9M{^bO!2k<#CI4xfAPz;n}6#2q)Oyza`Z%)3Uc#p{sn+(Q$l}Zj{0~ zc(84eu!t)cZM~^XAY3zjfs7=~D6XxT%m%S3DLeZTdVzNzhbT=T^xzjl-HfcVDf8>5 zC@(fQOJegSb5L3Np|QjroB?>aiivQi*4!L+#&sm>&i=o%!2YLrC%+L9#fCW@K@f?U z)}<%>_-&Y!u5<%eodjb(~o12I51~*j>owd!0)xMdYUIPZFaf+QhS$%vD8mNIt5Qs zHSE7$#@i20KTJ)NajNLYf|S~-!*pza*$l9M+vZL7%2x=;r+t->CVc=+8mmwR$!!gk zrsGxR4;2%$Cjshv&#}|TYNZ+L4RUqX%qo@)p9yxr5y;Psei9aIFB?+U%Y8$rBC_vO zsmpF(Vo23;yqC4yn+y#?laR3xX-zZ?Aj9{YaVnS4sHTI6onss%3j!I&DMgy?0U~Q)3+GU#;wZUe-FRzjf&zB0@z$ISm=yg9=1J}^H{6OCg*2^3&$lHOD8J$S8~;G&!zyC5LImFmSDnJIK_UWeGsoZ zSDB!WIF8Q$uT%OOw`I+8pP!Zgj>Eu7%K`+VVdBf`(}3U3El?lvfV{) z06dUbE+M#5sGpdwhz*p9qkqLCjW=2 z?<8y?cK`qV+l;i^Yh(DG(zzMSPZrvyx1gv6sb(sVDhG%y3Gth!Atp5u?5NMJmN(7$ zd#xeC);f7>JB0=$z@HdR`y8<9Qf``{9(8)ctM z|GVWbnIZ4n6{IW29Sx{Hk>(2kHB7N7vK?zs!L@_70$pMcngXJ@4XuFCw#tf!+9R=! zjlt`(WoZI?-wK%)eAah2W_ZAXMQ_BR8JyKvvIo>!=v^-Xrt*|35$05L9O$F8YfIFG zC)8tW%?U~bm}NP4X$NU%1gX&GMuGlkp0j~s8V}@W+tx@KEA_}P*0n>9>yew1LSbq6 zH%Ybu23c*7NgrdUjH~c^s$dUZBMt~ws|`YY^tbE>`ssb%`|0W4KOqa7Na@MX!sTpG z)(VH>t=MqAv{NkOKAm6_m(zHoM;=1Krl!lDqDp|k9*OIu_6#h3_0H`r3M66(^pi?v zSy0N;l#y63QLgIv()Nff?>#>pz$ihBVJSe4xbJP?*lj3&Kj`2iscfp!9+GWyc4=ln zbuz^X!I`wTw;q_u}nyhva?w!!e;l&uCI|u3j4?D!f3CTwfOLX1R+J4@xTD={OHK8 zju(;BnnS{(l%gP%GZyOeNTu@FyY#DlCI2e?Vv9pIdK?FV^Y-;aIAaxi3eb-L&kCh! zZ!E0aI*D7*b#=7Cqf+7szD^UZv3H$5K36^)U%kW45!XE7qRPp@5-P*JvG3vZT%wtwS0C#0u--$pBe}^p*{8iajo!7Uo3z43x3a z04-dKCBWJ~NVnbLfB+%lDgm>#mp`6ZB6sU`XFCkS_ER7uDE?nP{aLKp94B;i6hBkW zgHt)PV~SI_Blwt~kSDIrTEiy2PPHzwJ0S_P#RQ^YLt`>ZqxwbscvRL`yi*9q2uFcJ zeeU0E1Ig24***`eVjt#?{fL^FaB^%BZ~$|d2)->k2kup}Q+ooJBOiSuRPUqmS&yt& z$Th{IR;el!>-2gBM&iCv=Ntvla>TaV*DZq}+t?JiOH^|yAi)l9@WDyAYL39G($oLsk@qT!57;pf`$H}0( z75(v8*OY1^5~oS(TDvUUcGP5tLtpLAQB$YVZ>b$TdO#89lT!oI|3+o^8ud2uW1qPB zv9@Bo%Q!`miovy%ViF9Apm6fDmZ=_b813Dme{>&4TSgGfChN&w2Mj5G*~)l^6}G2^r8)s+SC=pG>W zc*_LsLOCO(PYBVAU+ql^DIbo)_+z@=1V23JofqKcYcx_7K5Sd5c*hA!w@{lA7>!j+S-hFrPkec9o zhQn0u+L521i;lr$IdC_{Q`ag28m9`dYzqK3#!8HlG7E<(>UbnoQyEmeU`_f)DpAtA{;_C!l?YCJ`9hR2qaRd_t{i>s8LbTEfhh%Z3vq5Emr5IHo`Q3JkhR)o} z4j>*3g{awc5EM_@S;h>(4>9ffR++wVre^+|3O7Cfd~7sk(??X3NWXXtmT~gE&=`m> zs+w3wDol9R4f-dx`C{@P`Bi@fPO1%`TmQva?;|SSyY$vtRe>ke&I^P6zd4)HllMP) z|6lVz!iWF52gGKvM!zi=Vntd+!j+T{z!vL9bG&{04Eq#8iw{o$g+Bb`e?9fM^R_ZG zvl;^di9*%Ov^lVsR#rVYXZQuUr3kVb++p6?>N1RkgVugbDQeXe%KF$o%pQ$xcvx#S zb_2=~Y2eEown3%{5V7P^f*)L9Gb;v{F6s(nDq*jxm&m39R5$3y8HZ z&=2YwsdO<$0Y5Ulli;e5M^`!sn<1V&-$W>EB&@g4Qa3R$h*aT*>Spu{@crthWQqFf zt1tOhmh0E!>9W?5H4`lisgn2+n0jy)NfQah(8kdPok^c=*s!S?%=ui8>DHz-@<7v00 zEdwgS7n$O~+peJxC3O^>ONXVhJ{W`T#~YPe8>zn7n5Y1g6_J%hvcU62c5S1@gS+-( zJkH1bwIN_T6UP8fpv;Gf4&_Z#k274jt*uyPMeT;7rN$E~90#bLTSFbx zJhP;FOkl4&k+OldYVrtOTsUPyurZ*NnTuFD)e9>l1xapjGushS z@zk>A@L-R;>6!FIoW*|sIGUH=n~qeKN$Ewfya&nrBCysyATBDO+3!dBA(s;|ouJBM zXKg*+K8Lw7we3X4+}~qKajjAL-DD177xuY+J=D??UN`VHu9M+TF_oGswQ=7^k07w( zRO3pyiewRmtS%EKlG=&itktC1oVPNEti?gt8WO(1ld3m_y`7{y6ULh|?@5XWx3~S5 zjTM!zgjD5*oG6cG3`wTYEVVUPtgZ@8#=&px7t$C_%?r;+Min3xZN_6pB!qI7GLIZQ zL8X0XQc1sDhgIN=`>?yAb|G_T-3=iE zu``*6Yh~qz?TxY?c)$;g$_ zOK>S(H>u3w2t>9$3H{l1Y3 zJ30Leryk80t2K|t?Iknih+I?|WAO>%=J2L$&!2gwJRKsT%seDz;=yW4{9Ur4iwpmz zEtrN1<~~a+{@MwoQ_>sl;k??n;`H&C%LRLrvU0Ud2~AieiPyeIFShv@Flp1A<*W^K zUy?;vzaRI*s&Ek-9|=Afs1R7mgpS|wC0+IKdt(#17gUDJZOm<=hw;}w^UMVh(Hj%j zm3La}RI^&S5rQtW7o17qZo8V*b%a;smL2_p?h!n-a)vjZKK?X@HdE~Ud0UL0?vD`?t*^vp!hm)Zy9h|v4`$==+253kJH8)t*+#2! zDB{nHWp3VnTD(E>-F&>ODSV~loVXJc;qCiwE~0w32zyY%H`lJfLO`V+c)$qG(}ky9 zP;Z0#469j@_K|88)eeolBWz7Owm@sgierV0<@eDL5(V0I=LSH{u>q*kFvw$8RG7gr zsqR^<%U%We6CtfBKjP=~kif?$U$up*f%k`YumqJ{wsBEn_@=$9vbvf&{ij|>Ky)n7 zG?&+-Nl8c@5JXMho~aal`j|=)_BOP~Fcqkx@!{&NHT$FLAwL_^wTd#BaVgK%I6e*z zZNhnGQl%}q3HN$JJl&RwRUXiFG|EG^`b`qCU$rm^-4Y_}3*(hLW8jjiZ12T6U#fC_ z&HkMP6t~kbLko4EGl6GLxx{`6W4(-1))Z#_f%pQUh_Mf99w)3ocf2qJFDFfcZvi5Y zblK!A05S{#+!D2vvzZ=@AtiMnr#2q8g*7PSCLEj!Snip-mnjs{`a(}`;bKTvh=tC*Pp zte+*@#KeFv6;?jlNzA`F^cRIh6TzqhI;$!v4)83jZehMyR54t*iRNZ5QHmM51abd*KJ@-!eiisUUan(M_ z095}iDd#n%)^a7d-C1L(!0WbmGKr7`dp2P?Xoz9;MhOZ#-97 zzK2;YV4d65(*e~K2aa)qblTW>m1y7~Nk!kdFGJ}xb+A~LI5v&>-*7%+gXh>Tw%k^) z7IX2RP9MKqUc4Bi#bgC)1J81M5Dy?K>bm3vOKu_xM;&slHhxlXq1u-@dhx_5ZWX(s#?C8C%#mVR731(TR5a}#vDRlA(T$< z)2{kGkIVyMg?y5i%K^yA^Vi`a?Ut=+BxDn&5E$yinm+mVdB=od!ghK_lHR( zfk?_9qoZMC_#~(;`)fEBw4?d$$uI5sG+9U19UyB5+P_>{>#xTQ6rJfs$~5<#md)nh za8_*oLZgZE`ZnQHO7)E0L{uppwQqIY77~PW1$ohK^toa?Y9l;XJAm>yoc5}F1Dt;j zmaQz-kC$|iD+_jMV}u=1R@=5{^+pn7+gXXkyBJk;0adr9S3_31s@z5znB zv&E=z;%xhsxH?GY_-Oio3s19&tq?dh9YOI4=%9j4wL3zrOdKv7-fo!-)hC>qbq&(r zMg?4wL8jbZe1=qNM?MBi#{Y+awxLdt?iAM$<>69Q?kC?<<+&f*{JIEcuK+m~xOF>XkSQodOXTmB+3%nXyx;kTq{sxH?Qpjal> zsy<6TG2(%HXIBxvkd5|0z1%(Jh$ttrzifY0=3-=ukeXbp^pX}HqB2KKP_7N%M;o<8=a17rRC7mSdBD(5!+MO7I@eFI(p5i z!kYu3?fH^!0QJSz!LtW+Hz^4KfrtII`{W9dGLtVNk-4>NO$_Oo2Dn*z_PIxo%Jc7p zwxE4e?VXTK*QyCc@Adc4s?8pCP*TTa;#Bw!_OUO&SN_^w!_Anxj_d>Z1ThmI5Xp7< z-y?B^i$0={=tixyLo!EY`(Ydw)dNL?#hXSY9fBHH8Se&;p{V@v9@nQWWHs=Im8cXC zortv>t;H)*CjLr*m?t<>vPtirQHSDfX%@B?&z5uNLUoIj(a>m;ZUb|B@uGbm7uG!x zjMLwhUHi)olw*vfU;Qx$2Bk*A&-96LD6*J0TRmW6Q#gK zlxKF5!)je3!WnRC*}Nec53Q%TY-s~Tf5%)zCu__ zO2V~oh(N{T39W?m0{1i;D+vJhX{9tI1*9^)AZ!l20{^iY?wEkTES=m2HvHW5>)Sw${t!mcQ` z>xI0~7`wh>kDe~C#<(w1qT|Jo0)KM{Cs9g!ux%G$is4ynqI z@IT7#eEsA%_DdBh5oW1qm@_}nA1DCg#I|xm+&ogQ2BKppMXrt5GBvg`BT@*k&5e%PY{vS1M2uW47kDfxEIdj+#GXOxyLu(oAAVNTrpd<6 zv27~k|3B=#+jHF4mG4)T`~^v+DwT&+ZPT11QPzbNNs+Q7&or)th?~$LY1x!D-9Q6q zqR|a^qX~jCT^W0vsxuF+$xDu>@GloUVH6z{jRm7U2W$o^L!dYfNup^*>O}YBq>irD8WZM5*{fzk?X_ ziZ#q%briANgO3G!$Gz8NV+j-qcXv)EhG|GTMkUt_p)D2MyoTc$^B;c)oEYc;Z?b+m zGtN7qsD7RNoPC}%z=GM_P9lu4+K9S?Ft@)=nPe};j;6m%mj^rFtGMt%YeJiQxcIH) zZOn7LaijZVe1c9myl<6vyi^VuPv>Pj5DcZ9j-FN@i_n^I@xb#}G*7x}>$$Za?XdCt z(RD^C)%Me2BYwou*mHHG$i?)uahVc{^wIOhDE8_%ZKbrawuzdXp>n@C)V2<|qxqEW zLTIMMV-yJn_hS%EY*-B7IFHlVjy>QW6$?xHWh6y!$3lMFP0PWU3ML z*9bV6F8t`%pK_pBUA8 z)xKS-%)y9teqK$XR0Trv6DV7LwLWVDwYLnhus zju79Z-^O$cu{F^m>)OTG;vKc-P7P7z_@)bvsYZCnZ%MPZf$ z+J)GbIfX%LZLI993_5J%I2-G;D@;kS7BM$0QU!bLJl-l_N}Pn|ueaM)qpsl>FDCL5 zE$K1~$qgmNy=Zxvv<1pS9bkeCjhkh`R<3?#*Mo%9!+}CsUWEjzm{?4*jA*;VXwP8| z;H{L#JakUlf z#N8*|qCjr2Bt!K&-JS{I8tdm{2P%V!&OT&bJ}xIew@uE_qtk@O>vX*<&1$8@y_Qnw zb&mX&)yz#0j3`+OFISOLrH(xliKs?rOd3fVRE`TsU^_NteNksdh-b^^ZFOo09ZQjy zstFJF*XPRlyY`IZ4^MtglSm_NMT4i(9^@)e(}}em1gb96q_2O5B+JGA?~E%S3o&sB z@*Rb_eHPns(3(VZO@uVbwA@TWV=|&tPzGD&$>WELwb}+q<6p8b&&)Ek%)TtsPrb4$ z%LOTCsG_f76#J5}u*r{Eo;`z66OWv#8>5TLt~b(h(C0&^r@HOg(ul=2ZPk>c{GmK~ z&;CT;@$12vAcFTWnBxVjG=Ytu2pL?q=V(qNM6@3X$L0_(VFOiV6Atqf>jXNaYZpwg zIS_Ir z;l7J<8MlLZCv`f`N_K?Acdy2~0j8x8)g4?f;onUdtq?WU0J5OZ@sG*C#1fX}EtoJpO8VJHD?0!}5zmpAc9$4U*e3n#hb!XxwQeNbR>D zoGE7+6&lq!>e=B{-PFGZsJ3eIfW5442L_3corw8yMcrRWrmm{CWwxJ$CygjTtl6-csL<5N5blXO;2^fycW+c{D`EvXSVaBP5Y6)bk z2~~SFwMdtOa2Us4U%Jjs3_5M7y^qpm>1uPt%CEbsp`9opl+=QCW6~5No{x#?-D$CK zjy#B?_8{do+nH1$a(Cngj3!L4dxTZ!gL!-C%B?cB=vHTgV$FrMrP&#K6okaAB!^(X zfQsbx&-D2p;cXL#qFk!uZo=^g^yvsz9M-Ol^)9;=MsQ2sYAe~>YmIWp7RH=_e?Ixt zBtN{8!DosD11d?`=z+L6^C??d3<)#A6~T}pVI~nih5G*W$sgm;kH1y^^6fad)=(~$ zdhOYf-zh6=wn$l6V-KG36J!S&oBQ3PmF`nt#RF_FihA7(u7qi@M(v_>Kq*d4FMA(W zqqB%G9~2P9mnjs4@b+b8X~|R~1#E%1>Syj;OKw0b-E~s731xbR@7glYJt`MovFFfa z;SAbv?i6MgM~rU4!P4Nm5PHxeL%>EdgAhLReO|M#{t|7*KE zxC8*JrX!MMw|jD$X3X*!8Y;nFZ$X@(%Gn3~5MOd}NZJu`rSl$K6FiJKJKJ0D0wwj& z$0F473q&4>LF4AyAn)Y2_Mpm%(2PU$H3 zqv3kB@i~lzawL;1RaK)a@$~$UN@QP4vVD&Q`

    nF zcd%E&(ArL*j)wWd4ig`5+vwcBB~;q8p^Cc&D?x zJq9PTs@rTJSyp9sz#2w+0yhJJ$GK|pjv0U?$*QjAU3@{l@34azWjn+e@u2kzk$huZ zg7Cd`07D92JU{qQvS)609G`H`GD3pN#%G+gPnk z3Gc?6h}97+4(l1$_eyM=%Cp?)ns7XZxl;)gx&{*&=S=2p4QkDPynrc!6?zp9C6zq# zok;WCG42^q#u(q+(dEM-jgzJ*MS9L2Re{+jzpx)`o;47E)Rw=wx^7>dWm0qva|VGn zD6tO#Mhppnt9=~LLNDJO*wWx@&$`G=n_V~lr_z6rydbWwyCS?Y$qM2D%~S}w#kfDD zx^T7rg>!(4?Tt7PFB|(*K7sVOX91&aawjQ>3u&(s?!a?`mb;$r_!8;O5L5&@Id;Tx z+!G=@aPnXu0=vk<;Jj}Kf@M9y8GBlu4v+!)AY<0&Dc=BUAoK^0vPrBlgRP@2ewk43M zbY_--x2J0?1wJuG;Le)+nc8Y&E-SAi2ygGwbrziFY1=qveYr;%1i0P>Q#+{Xdk`-Bl{5aH_C#2uOb}X71yQ6d`nC74YwqzgX5^C7a z4R`(nx${4ZPf`CSrth5X9IESyS^CNv{l4n3DX0YWRCGJLdKi;yBs~VkW(WVM_$1axZ$RTXz_{KC3 z2yEc>Gm;O-I6*O(J2vQ{#DeYs?6#1Fwbb0sN*C_MYj(ZZO|F}w5FovpGahsug{;v( z>Ao<^XX5tr4*1Z^_zCO&~IMz_SOx%i5G;^8EBkGYp=riR*Ec0v%pM`6^- z4^5lVZY_m%w`_pCu^tseHQDRo^IAaYng-ljOd@&hTsi-=t+b+v(sV-7EVvjs-?%`$ zv`!pAircq@Pw9dZu+{PWhsM-giHQhHV*Os(Y&!@P%TSd~j;_@Wt)_+DK#XrvFu${x z$ye9-sd$0VUrIGb0}*}#wGhX=&(CxxcLJ!lprZq@!g9GZ`B%SawUQN|F5bn#oh~mf z5Tg!ST{&X$u_fB60th5jW6Q3tIY>C$)nG)tH`)O1BILuU51C_&b03C%X!`QMl>$ zXIM-@LCN`S=$e-E!hL3A88_iIO3rn##$fYooCynBzgNCdLPLa~zpX0^(8{ zSyc5qyBHLu5{iLm_SXvC%lem?R`>pyuP>E#cQhvT(MZc6?CMUpM@*W0yFIh4a7Ir2y;!^lE*`}7RB{h|UKDa=nPjHWCjOi5dbipeR z-JS$r_voGY?0%~pCRx$1aBuY0PO{9@iy>0A&X0m05_8J)Cr-x@D!Tq>dk0zTpPs=P zby8a|vYC}DG-phdv?lJ;uFZe?LL{p`~C@R;}WBbo2Ve# zj_h~HpE$7Ub0x)_R&SQ+pgkg=tQ3+cmj9yawc>Iv3mY`(CCdw~$z_$}84VPv#igZ> z@$piJDL4x>$&AA%uZTOQ?O=DNsUlr)#4xdFYG>T2P?SmN$6XI;ClGRRHKa`F*ANR; zqL&U#4qwA9mIr80Rjft+f$%aP+3CDgUb+@svxH^8?Sio?KySDwaBe`!1hxvaIWS&g zQbAO24iU*I955A4qZDW-W@N%JU%9nRLk&A^Deu;Olib|ER+5SO5hErJO3?Ak_G8U! z9VHtU1S-UaiS2ERnmVq`xc@?@p{ROZ<_7!)wng4dtZJGVh#IMf$Cqb=!E*rSG90O*!##He9h-y~a{IGJP484QmET9X{jh*Pz z=gO-FSXe3PZdIH9415h~ix_C7rjbjndsuKOhQ2z)4sUsid0L1RxiR*(m&>Iz6JZ^X zXD}pn5~Sh6Uk(=C#0^IaIb}gup-_UdtF*hp6R>R_ehWpBJUL}5f~;~3BF{@RcUC- zJvPdG!{{`JM7MPia6B6Z{VhyUN8Em7Pr5^OLMnWOWXOo&k)3P5+_NPrL_WLr=Ec7; z19_TZwA9jb_L#NV^>S-%If$dSe0WWmWbr#Doz#Adn@LSBAnfzFk}cxV;8}X^rBT5= z-c%`tyI2(*6`uaHV5oI-$$CC=cYo^ZA^ur7Z)Y0()Ga9s+qV6ztd8vXxcG z!FI^wRD3VBc;G!8sI|Nl95KR_a5q9R+`#%xaX*Z`)cy;}kZ9Rcn}uXUyfG#UZ%YJH zfiV&^t!5A4!k{Ug%g3OJ;3uOPy}U2lzkjVf^K3{tRZyw(^jT+Fsxc`Dtggk`cq85e z5#62(kozT#2=6Q0m(uc!2vPLHLf>W!P{Y7v-jWAL1j#60KTNg}Jws?2(yQU0AlCtL znH;+!L!m}>OpwfNZlHIxuVE2f;-t?vR8zDu2naM>Hgu}0byKSrC`W{>B2|QKLu&=C z0~KKOouxIPos~utw7RkKZ5*+I8?$77om;R(xrok4WV2IE3ML$(Le3=KOC}8&#?4GQ zH~o%wte8?k4F}uS?w{2{OdO_m_%9ltt4)O0axSiwH(~=#OuF0#YyE}c6^KAL!7RR?*x^CQ8+li_z2Ylk0Ied?CPc%Cf6) zyU&M$3n* zZ%?2NImvp-W6KIsrAsBv1WLVRH)%(?wap{i)pSz|?{@9>OG?%@PyX}h3tGbY6ERNboU$O?RR`EGdLU|KPW+TQWCF7SKX5! zgLR;4QO>m;B^vv=F^XZ$Hrc6O3jja(UMY`-A$1-meP z&Gsf5)Tsoy9PIeJ<>Zg?%sEasm#0yoi4WXO5R`Quiqut`8P8T@oaO$3)g3)p<=(aoU^!lPQ*@Lpml7hOD{@o$9>x*aXp9a5&lV1*st)w&w{N9DkB6R5?0q z^G(z!DE;fsn)*5S8t1`_na2X7*ylJ(22evgi@3JjmnNoG-b8%%W)M-5?Z|dptl6VFUzN zR`u8apntJrWwK34@wvuwnU393TqqyZc6&OHGX6ekrIA)~{L}K6Z`t3`CC~<8YVVRu zosRerb%v_M%!)KJDo7Zzu!)Ip|FI+urS|uvl4DmS9cwG6Dou=*w61CfF=Q8LifW9q zRco)`1OG_R^a;bjH_$crKE zl5J|bP#S8wcqRg+#)}c}tQ~fX2G`iPD-t0EBA61ug7bi+eQ-Ov*eTgt6ALNKbN{D!JY>3O>4UzMMDmzD|h(-fvnz zmA3v9;UA_n7>8k+Ln|*(rhtTXt1J1PvkRep5b#gyf*2W+h=<03#@T@BsLj*uB_j}B?oARCe8HaoeEHhluYUC(f;E-rq0|_v zd*1dXqO&coCo3o~HITQ5JLj zAu;ijU)h1xW{B)9Gof^|eA9U2(uOcJ@iQS33`AStWTKp}Kyg;&Z;CAeYKtVgA~Cx@ zHoIE|i*X?RIc|hORw|#_n#q0IC%7XRmywg09hRigqC#)}T>d_uS?y8#Go?Hl^yWcXrG&1(XdmwO z!AWIr^%nkeo(DQhkM_732OU*qp)c*Mt1r>&Q|=wxoB+d>F)~6fO`QSma$WcQZ-R+! z{wIg#|8Q zM9CHZQaSgmv5gY3_w9{B4UIMRJAR>>d3I&jqD8H5zBV$77~}0O5ZG>-f9{VwOL?z+ zJ(fZo1#>|lD%S?5%ZMk^yfwnOsniHqx`s3*TMGK;CZ?jq4Ac2st{seuIrtz2&QA3XCy1j~pEaccdf z=+|Y}V@k(vgmWis&l+quz`8s|hdwGt;bd}FgkAW%SSI#>p`@zHxSNtwd?Ixp5qy3U z|Fx9@$+(#3Wy8!aFy1MTJmQCMNrW&PK{v6?4pK39Ynv>A0OQah7PX|naxZ1~W)NtT zg$8IwhMGzhjNY}OqQ~H?&1k!tM)n_2gq~-K5Cp;YN#qBx)pPB=!wYp8Q*4ve7l;nR z5k>_G05`k3=+Mx;lbH4(yemcl>=5|v3N~Qu#->guO?Zl(eiq%RSf@vz8J5C|{KCG7 zVYnykE5Vd_!;V4|@&$18gCx`FG+(cbIr~98a8PZgxwqU#AW80ag}`QkiWDG z18-r^p`T;+=9HkZb9|Zq;E28avVC`>D|>G2`5R`_rKms31lw<%oTv&pgSy z3mkW#jxycvQV3!zVyf0uS}tKr;@R;9VXdf{**ZuuUS7RJb0B=B>`ils%(*2p2S_|f zPY4Tj)*d`C>MQ^}wLjBTAJ0V7Q9YbObRcO=Gal8N`mqs;5(aQU-+l72J(B6QS1ctv zQ(lk#(}Y3xf4ZELsho1aZ2yYoq&VgcSc(ykG@fz^uZYgjr@2;mQu4sY?n9RO~g{YEL=YMW{u6u7nHCNupAC@N{gbGCg zw>BpN&~~)MW#4uOQ;@V)^cG!cf&`thk4oDP6552mG(NUVu{YepHfYoEd5(XQrm~Ta z%w@T>!*k7y6hMUeq0;_!VR_wXh-?b2aE*afc4C65jz@_e0oI@cTdl7h-cq_Q$;a)Hju`CUwbQ@b>#n1Q!GF8I_;vSNfowQ(%iZnGn zy0spXu9`ofB{2Hjxmu|6b&~-OEPhjQ*?4$&kIp(3DF~X!b9P@Y3qhJUk)U3$4t={y zdPH)buXhFVi=&+_OU>9Xsd{8fuECuZXFf2dTvAUx3^xzx9W*LsWA#Ah*Wzq07vqCYiCp86j5zE%wSuTf$F*T+=8Z;uH4hlgIBA zAE>BErcS`VEa!m1r?FwWcz{D3rREs`r_qhE01|Z=YxK8|H_i~KA4EG1&Hj7xp8c6B zut~2}BTB7hO%DWvfi;EHvkxpZ>G{7FZ#=@O+R-ftGmu;(HQ?CoGNWxXuS)<#Ie>iK z&lqG_I=si8tZV{mt;hTTd$oeoH)c)RDiYGcinJH7PTH-!ZrK~c&1e(ZlmZ8SMKHkd1UNZG(+_8fS- zmipxuh8heJf7f6WI^=k(tcs!o!EFQ}+k8*02kk70St9DTww(RRumVg?!V+vJbdQYd zT9jNF6^V_0LRU6>2jx66H(b1oFiq;vL|bNe@BoA$m%=ZSxVXD#XGZLIwCkbo(}txP zN43a&lgJ|h#@RWe$>~{n{o;#%Bi7MDj;H@po(tLtOGF6W_IdL7O`71`v}=3=`@=Zr zf}i;m8I0x5P%D&N%F3C6_e{ECvL8zhCf4(FDr}p>IIxXJxNZ3~3BFD+WgLlisMi-rN1;PUM(+QwST9f)#VWU6OgEii0qAYgt$wG zVEcFjwPFYGPynppz-=lp!v#B{(6Hq(I62!jTV&r|UoWdo`(bfmsw7c$k0yrL*(HHD zZNLnW@|gw9tHs8Go1xDuK2n`m1Db5n4 zw=I_oJ@6C3LMGbxAWo!u_r03w>I@@LgHGO{VWi9k8YkpA zOY|G1X2q@^cOzAfs~PT<*j)wM4to3EQ~JxLXO|zmE%N&q93H%2T%u1PKtU88@sW9=uQi(T=Gbr@X0U z$QUy!5g6pgSmKAQ4I@RHW)CT+gB8WKE|H!cskkDIj|aO`odZ`XxON1894WNJ6c+ok5Sv(TMqhH~&SBr4qGM6EhjO1iA!Hj*Ci}nRJZ{1l9GVHBDknYK0nWx?QG>LYR36 z2OhtfnFUa7+}RRp4@)KANnJ~9OENU4VtkLURa~2 zE=_h_7M99NTmnV_0dlO2Y41i74Jpe@?i3A);#etws{N5%;tF!c|2|d+qJIy1tBECH z?i3#yl3Mc$OiiSuzX#}@Px1iFukfX0T+e9;1xFSqFwfhWmFNC6$wuP8M&Sz=USKkm z5eC780N*nTtE}W7?UMv{ahv10y%sT!!G2Or%6eogQ!>5nmmi5%w(LdNa7Ydh15yon z-`*pqB}^+sd8h0gZ6@N*MQ=;=1|c>7CWi6YnG4I~bbw3rKYO-((^jC1OvWR~&+19- zs+hr^ck=ivWoK-}P_i*WE1p)fkDokwKL`TAd2omyYkadZ6$2)YfA|F4@9*Qold8C! z#+?IN$3YoWZmD-)8|`ZEM|eh{Yrd>5#xf~(yJ8>1#cVlv=wJZfw*y^CCSiwm`Xr8G zC9BH4lf8<^EtQ!yd)&$6SIg&&(FJ6r-ozhYZ#%v42^QtvrWqn6A14i8%&Pmx5!2!q- zea4WxQRQEfa-?DviAxYUf#@p!$*8L3B&t$s$Rvk2Ydkhi5^bnq>T%7OZ3-et0luL5 z9>o(SS*(;_DT#;!YF5e3l4#kd4^Vj!3~`{AB3zDm;5uWVBO0iSo$D~j*4E^6ENUMf zoU~lB!`1jMIM4Qa=}Nhl;?Z;jiGg^ZFYlZWYL8kGr$ENY(u+nu>sl$ZBV(n^%i7y_ zI~3MtYw8jc>f;bA;Q&3EZrKc+$EitTlNxIy3n~Ch*CSLjgSyxtq|_j~6c^CGK`e#ams<1b$96hwim}jq`ii}j z8%pvk(_qpSj~f)5P+Tru9D*DW#f2p~`5ci|vjtR}ohI#G&p#Ur#LUT!rKtG9omJd0 znkuQG43!%39lB<(rMAe-9+RBPq5L)o`qN1(JY~xWJ%weI!s@879B8~+uD)c-k~XD|sycZ*@g$N;x<_f6 zeN?UaLsHe86&D=lun$}-SG<#7Yfm-6MlPbWlsUxR5_C#49bH;>deosM9P;aB`DSqU zKFuIy8Ft+TAvdi$jcgbwn=bs6y>qj6v@lMPZvZ0Ot@({=^qF3eD#PqPMF5}-;vg%E za&Q>;x_XG^;4nBvFq6chNQu^#J%92K3>~-Y^TGiQx$!gTx8XP+t+d3vEvDx z*~zN%uVK$31WX1XHE_TnNgmU#)RaAmFb9V~g4|xkqXiL7`AeGiBjIDGboQ#413TZh z0AAQSrcg;q)(Ezlpr)$gMO1d%UiC5H_3B7N^DtYz;1&Efeq58jh}XYSn&bf^C%}T` zGb{EK;Y=jA#Za6%s1~OYK18>@^EH*>MV=53w)PTmkPi4KSN`f*^U_88?#W|HsTDhp zU%ZZx!WA*HTqpS%f9yOEAGwt?1v4b19Kw=`nX$(e;SZC1t!WKrrX(qIDrx7)*x z1uhmA(OA89?(oUH4@)Edz>QATtw?wL6g$J?55PeO`EMYTlCrB_g$qFC)Cc396KsCI ztOqBlmMfrXi<%7^1_``?>H*;+`_ooNNbtvHWEICMu)|Q-KTdvEfA}CC-k`X$?=R*> zNgHj`Q3CSWjz~sCR2vXEHhJPNcy{QUf9v^j{=9upeHd>AB}l#pT9xq5kyHIXS8*j?SPQ1B4z~D=;6q!lnbw87;n&qbErfYNp#KBWKFX$ko9AERctEfO6 z9OXjdZm!qP1e2%7H*0Ve3flTPP>pOga>(5~)5N>8hqff`8L^DUTA6R!t_;8Qt9b8e zP<)@Fmt>D!I<1{FI;uU*CIh?eL8|IoooTGk+QOCz0y4eGK2Q8dfo@-@Km%(Fb}KQ; zA216{{Z?!zwE=nxG%Ng|(8IfSI3(nKeh)5=XM#c3D0t&&iI-@09IK=RTJ>heZ2TGC=Ka(=ts;1o=W_oVZQ3|AEeVPS(8xMMnZRh%^(uH zp$A5!SCXwbT=h1lhT`l_138e!4YeSP#1JU z__=d-I_fz#%{k&+aZGoi4bC(Ay0!2y(1(;G*TtfMQG4SFs*Lv#ibABEt=vM zxXihQ#j-w|KrhtO^tU^*>x~ZF07rq6Xi}sl$G#e6bS+HG8F46FS=PXZD%-Pn%-u^i0EEA?Dm1!L zM(G*kzr7Rw1xjpxl-Y`fZL3oV;|-(7I+bhjIj&f^U05A7gI;zB(@T_c2bCQflQy71o)KYBod!`!o_eje68e?4?B+_l^3nFM&D zJQez0@bM8H(}YpA)85n2Cnp(U;GdT#zp*ue-IS5$HxiUd>QZ<>?r)S!g|U+Qwa1m6 zMjOMz{{0*!L-yR)CXJ}QT_nB(xwX`5bW?yf8A_O?Z#Q+c;3}3jHQYPbsG6)rQ=L7}WOn4?+1b$El8(S1+mE$? zBDY(SWDG|LiylxHzX&y@gnEF%7`&i`E`}l=?W)bFK;fPQgD=8$)ekDD zjaOq7Uo>g3Q``bp>6(_2-1P<^Nh~De1C{Q!9&~!ap$LkK39d~Rj4YZVeLvRz!ZV*M zuZDy=*pW&lS{YKP^WYTWk8fu0B4mMeu%E8qf_4?eKY9Fj<@sP5WH-4i9izQj1-dji zwG)x0iA50RN{zu|lX*J;L4*|fvVE*UQ;&x#j7+3E`Ek^YU|U7lxCW62sVD`O<4Z}h zHYL+M%Epn=ugF%*rQ^egwptAk1;*HSWk6?SH4F1z*vrbj>EVfBI}pIfP=K++L>u(S z8$H_4#itZN444w_=0KbiDkrPIbukim~?uW>3bkSMcDWaq6r5f&j^@ODj~p+Nb`R2Qa}l7hfbo< zKQ2$cZ=#pRC0;S2i%S5_4x76{-v&EhB-&5^^1tNCD?+*3XW?u{F4#|mG-xOtms9mx zN4i?ht+zOKj1PzR5$lC?@qPF*`4wROxNCk=iNbaaSQ#XeB>0qEX(f4pf zIMw|i66OU10>@wi?QFXD%GmzB3EHV8d9^Wm0 zXVPXgDb=!tEh*Ny7a&La5ajmeZ84C1e;-`kEG`6&|Mzn81ADH_Ln7nG?>gaXgydRD zrW{Hk`SJGDv|n+B*brSF{2AScN9ng~+omUqfG?%D?C7FW(_cSTHkE<ds?G^jhxWL&t->Cf5|$X_N&%_YLN*07p#;PsB&wPaLH$&h!ZZID(<+u+MY zTxg2Q55h!QU~cxQj6``F5DF9{(0T06Y| z%KpriNAB6doZ3&o@Qh>R`U7s^AMc&K8*DupHX1)OI{=f$zN8yO-GIb?WE$v_K)I5= zSLvdmhlhCYk&xrR9{sof&BQwS%w;`mq5S_p{eOW1#$7U-l)(({>hyuRBMnhSadDZ` zFqujbG5C$)5y?@_(nIwUg-^*QnhtUmw?5OOUD`OMdt$R4>7$ zTFV>bm#7&I^Hha1abmbj^6%m;niO#*NO=6MC%-BQHMB(`5Om6=l&S*Ra3A7`ztTZF zwh4aRNsKVeP&0xo9gIq&IZ+=+34!DNYI(+#hdR)H6z52fFrpptMY4fcV+W+5w-cVT zFThv)RX6B?{kU^P1~`&xR4?Kb{yC^aG7+8Rx*Eq-HG3%tNfc+%9#;FpZJ)VUb(*&t zn7N5JnQohA0;&*8fnH%1?MY3XH+s|B?w2)|$%r(I?H`mNU#l)vZVJIuF9zwCN^>cq zEOVqlq*y*2blu?rj=t`gnByOv{Dx3(d>_b#qjzW&bZW^5b0`BEg=v_EYDXMje^8E6 zWe}-wW`~P-wlBGx&hT;>Rgae}oq5z~I*VGc?!ir>3rMH+thqQ@3v#W zBpkFue2^yY4UR<3AX@WGM}I8hja_$*C$+QwaN<<>W)2&k#FFtgYq24?z2F|1J<(Tp zzzx6~=f>E&7Fk2mX$VtN#VpJAHI!#4x8JVDBBIRxD7;fOU6WF*AI5jE&)_5>^V?ld z@vztyquOw~u-I*(>L*Ef)m5$%^Cy3vq$^T|Jf?AEeXyE`-fBf)8obq5Cym6Ntul1ke

    gi@#h^;Wn7)GhY@q=3{(+(BdPCbqlG|Qo@X;l|RETXB~O5bT0b2R0l zMM<#a9ZggkI^R<|M|YS$aYbisAvu&7^oSm<-_)haj$7!Osb&^Vm|gel>O;;fA`%?E ztb7WqO8j#ZukoQ>#7mdTwQF%s2P7eDfHq!utIVw!f1TcAB0muvnFn}c2hTZ?Aqx3H zWz(OCDh35~+kltYFFH)TWOvy{L8IpkpQ3n3MOjy|%1vH!hMkePg_c1>lfhMN7Qnw7 zyP&ONxW=PLPcwtS(V9AW&+=7;F>)T5qx(+0afv{0*`~!sqH{8r`4P`Vz$Fs%w zC1pW`_iG^82#@{+Qf91(CGq>3H#R8)uVa)$JGWual?(3}p>bgr?LqU%k34~{2@00N zqXdPXYY^)QO3+`QpaEusyl!pk^p{KLZl0RnLqpg)W9hdDW zB;!4%T34DnwV-ka2t*HL4)ztA4uCA0FNTX;1jcXcW>P2uA3Tx0lEg;pXoIe+qzliM^T9aFjKGF6XPV;x zg^?9!gaVsBWC(dIt9;^xpSa?Ah? z2{J^jy;JrnS!erF&ENPjC_U{?7<~2cXfH9tBqZ$;c}2%WOyV#OnqEB$Le+KJ8$i`y z@!C&ywrV0T*Hdm_C&U5b>Wb|Valn^(Ph7Xj(1eWlM5GEE(eqFNvE&U#tgsdPS}jlM zz;KS8nwyL6`eavWvGp@!KL&>YOLg{(wSYds*zx**JQflm%2L(+I!Tq8Z_I>nh6C4@ zJ@PGITh>uJwrPNk#!y$Oa=L2EhFfa?z9{CK3{Dw&w1QwZu3MT`u!FlNM3FS99r>!l zLL2GoFOzHQA-7^{sEpszbU79c^Wciv6KJsXqw?hUwn#Mq`=XsFpasg~?3;BS8tfK4 zQL$3cO>R_h`i4encWa@TeRY~{N64hGt=<<3``_cGQ+X&ljR}$5c7d;!D_3l>wew}L zF(%q?%E`~{ub7c&pxw)-;DghqOMmk}m`rJ>1DjD$Gah9VOXJR59>E|x@`H?0sL3WD^8IGBfiQNs4vTh4v)9R20&RFxc+jcR|zd87`= ze!^@39u;S^dA9c#eFpjac2dXde;r{N$r9XmDa@9)LJIH5M^^Sc_OOB}Fwp#m>~2r< zsSS^xEiq(f`bKPlp|0c&jT5h5FIRnx27r-XGUs4KhlJOPEbm3dZKyr3eIFUZpOG=q;kg+&9R|7X2=E_2<`Mx3A|2p;#2&1wR4re@I4G(7{ z*=ul7Dk!e=E)XB?t{A+Jd& zBdFC-k0Q@{QpVmI?bxyjQ2R`Ry>i#6E~g?}=bqaedS7!^z`oXD5B) zdk!sv!|YpQLFyF4E(g~Ypt71s)eM4oq)X zTh&~a*g6rOEk#L)ING~vmnBN?;?cAW<>k>HYMOSa_Gyw)ghygFkOH4#DGxglh~8l* z_%?2|){GM+ShWr-Ueg|eaRYQ5G*UMqjJPOxp!kAK=veO6ywY-(kiqNIcf{Ii2>s#80khD{G6$;TqySwFkx~< za*{;=4ALZAoWyAu_pVMW$o3#@gTaGbLp6u6H5`Atocz|djZL9yL;G6`$1dHcT4?44 z;!Mw#KnUHvs2o*B#5RSgbFHGmOj~YGD%V2v->$?eH!Rn{%SpJFoCJ+En%LMOS2h6V z2=dDBqpFbbMNHSmH3yy4Pa2mR&dH+f#T9c!L?o`%2J6D7O43t3$Bj)?-xhTTJ9vMo zevY1?#3*O$tc|M1N;{K^VQ)A%_5md<#8~d&GCOtF4v74zW`@M-WmuK{ zSp7CtNSm!a52Y_-DcEsJQF@A97cAJD!&I#dYAz&6qs+}MtStp@#XDevkkaa+2`-2M+fa@LKI(V!eO?Sw{8cg8^2F z1wKp#b&8p)N-_b-S_g=4l(kdz_=obV-ZxWZHuVG~Y~xW=xv^dEYpHy3E=u2|zo~}} zkjTZwvo?~uQf>;DP1dFF)-0F%n$KH4o#i0WC;9g){`aX-Nkh}Se)I&_qr6d&j>W|Y zk`UnIz17z|TbF}+R9R^pN9O?vhdS@0<0cqW)0;+*ISc?}Fcw3z*#GV1hxUt&9oR#i zOKpiQYKy|tK6(5!$cbm{j{?CplUh_UVLm&_V&n7U?{g|oei|2$dB~%JFY9)<;E>(z z^QWfu$>X;ReaHWc|A}=x{kigVkN{A-PN`0Zu=gvoRUWv6K9CT~pza1N1Yg-BVPJw% z_LBfLmwcQ>&D3B-MJS3)h#5F;u-EVS8Xxe}y)fhHVc8&O}Oi8FmVaaO}j;S$Vz* zQE(djvVGRMN`nKRTY%$6w{)h%c?44da>QjMgb$&9H2xN1iVS6pAxBAW7*ntG%d`sE zttDH$bsYnI&zH=t5~d?~iva~(Z0MQB_hMhlMPkGixmIIyd|>a2gH?j2^P*w&rXo-F zPGWu)C}CvcqrWVf-)SBIZcB|$?JG)DV?X*M#NQ$qrlzcEi4!7eY3i!8SwPf3%b!Y)W+s`n0TN_`ehXh=L z9CZ?MNya4=nlk%9>R!t5VYqa_GZy1oVRDZ9Jj6&u#jL_=p~q;CcsSbiPO8#l_(Miy zX<2IX#jD0~68VGP=Ktu*N$^Q6U-Owh=6`?U7w;MWD22VZf8YRSlMf%pRi+~( zQcYDe@ey#3-97tJnwzXsd_z}Hw=*ZFI2W$g8I-RYbESU2nwSCmL@g_|z%0dUJun$? z796{$W_sL^okZZa8rfoGgmD7!t0!X%7d|O=#c)grOyk8sJFajr8uyy;pER%_*0xRg zRQNmb<2!=r*z0S0e$vV*ZZ{oj=p`4d*96!HKgryUU(!|+9}!VKP964yIM~;G5aW=$ zbgX{b7A@yk%4Iv@${Pa9fRTs?*B+t*3}nZSkwxs6KxC_DOGh(-4-}9!S@S z4`UUz-_1rE82`^qBpwJJX&;weF;1o2r_;5AFGpWz(Fe&Xk&dA89L40p%6IRUyYZDC ziYSd|RM73DgT^b2S;9@e6@sCF1Rc1rDK4BcqyT#?5TGi*C z5}R}q*yoUvEzPn8kwV+p^C-H?ZQ04x4GvO#9xQzwZ%OzhblYhs7=}V6<*KyoZ|(4j zq%fI4VIq6afJ9D;eR9U-~6wi3sI9?GZ{updi zizkHnj02b|vx{*6r-jDxjRx^}nr}n^{87!C#y1uRnu?nO8;$BQXZ$CvV2OQa#|EGv z5amulQBVNJq&uV$aD@fW;$N3TK zM8@rgE>ODxj_6=$4`8Br2u8BB%zWcy|VgCwdlnUb&xFZoxI{b}wD zhfFdnIB&GC^aXpd?NfsO`!vfJT?Bf<9;S{~*W!FvcPN>jm@h#JWS{J7N1I8E5|8---tV@|jAVYBt#6I!&8@}tlwwos z$t^$bwt6}s8a<0lSTSMiu(sNE*CbmyDg2>45b3@Il~~v#S5_Mq#VsRjQQOA42$Hj2 zQhS<3Mw|zUVIp}mq=#Gqfv{m7+q^jznCLos{BPySpW-?AO_+_ex3R+^pOeHQR}%!0J-Eir?1PfE zkzi-gjW>9vF9-%->`U2&Ue6waEGvHVnCRtiK6!kTdl&rA94d%ME#es;SoM1^o^$8$ zv!EidLudYDZw25MfPYlk4Ue*(JpMA`#y+<~Q}8ug`YMqz7&A2?kfU*ECyp~Oq+~G1 z$>a0oA3_}7M*yb0!8%QkIn!u=0fN_W$n(!XU;h4FoJ-A4?{s=8V|@rSSJ%?kD%MN@ zYd{p+X?TXfUjyc%WQG*RhWW!G7qOucw|1oJbDC!JzN;L@$EF=<9PSEEL-Gcr-Dr1{#ls?I#fotSKm_B+?s--A}4e zLckDIQ+IKpGh&JzM)tTBY`!`iI4s|2twdpvl7)bOiMql^ua)QS+D0d6pg-A<0&G*k zY684f7K~e_5s2Q~?K2{d;7{vekU~jSZg-wiy*EC9yGSh%h%t8Amsgl@k(~#Ve%lzj zZs_TbVA_v!7;CyLtiNdwsW2jU#MS$P8H@N0}P@HAUF=RD3Z={crX) zYz1~Va;%hxMq+MqBJlztWI5z^wYgf1aAvS(@Yje#$sqv{DP?%X!{a+^a2S(g?HBA_ zeQ3fbtZ~{5k|MVMvVD32O$Q0+znED55l_YzP{=WhlvbY_LB<8xt{6gd%P6Zj+O@0p zH1*eFlLL=1isMP*^s!Uo8#4>#O=B1d#y5?x5Dm>T)3Ep8etNeaz31v>d&ayx$Rsh* zE?pP#sO%r-X$J~p)mF2??8y8fR$|N6JxlnBa?Or=F?3lu)1~G%Pe>%8pF1Z`tT!Hc zT9fRAB{$q(JCNokrZ-ya|!) zgcNR%C~CECKhXc5d{CQz{^g&H^b39U{e|c3#dGL0#$g$Q%#sB2H{cYx2TOUkpPxqW zso!CufQGCYBVBl<+%>m`yrLQ}*I5lA0SNeQQbC?ko$B0+zZry=fh|#xA{J{#99b}l>s;`?t1Mkm(ifOl_^cXhNd-YO3edH0qJt67wRL zmC3=xie2+I`GB}{MwRE!l~-cXZC=4m@xozJ+%Ysl5ms`nc0@J9F-~2#8Z}ph9gu>J zD$~0kyIp62n=Y|Qm_kZHG>s30Zj$}N>Ao~JQwrf}8GC8*! zzu-^;>`va!)CyI7MmY7#Fx~lu^2lyYl@`pst3o55Bs(QJ_Sze=#{S+wFk=R!gD3!(^Z_!9T^_;soh6i%Sbg*RI_*18^`-_k7v3c5$3o9Z>@JM~M*=A@q(7Pw;SU z#{?MF-Y`Ux4uog~yR$P#Y09C1)u6SJlO9;VpizWTq=Jl*ZT2(P9G5qqD{pBs05GCb zJQuQeliw$kku_GdqCH8yy68Oaej`z7&k@Z`;x&mxJVB}aPi&bdk6*kB=+Y=gq?5=e zVJJJPCMHF)^p;TA8L!w_nq4DW>&DL)%ef17uJ_As2(Z@)PpXtuolIRNK;M1Bbv~c8 zIs3*fHOO0Nc$}8LEbpBipZ)JbM15%M!&QpKQgqcjQvltsQWLqG_9sKM*Nd~m+z~W{ z(stJ|G|jneAFxcaGRW`7EgQ*VOtT6ZTrcD{SruEEIMLHqON3ClrN+t%^Q^4LPtTMK zuf!7RI|G>fFb=qhAlujt*#@Iy8Y&l|s~=ySCkjE&1zQ2)D;ivo)WRO4pk9%+IDOfK zNjpdajPfAy`&3SLyes88oF6nGkX=Dl@iYSNd6V zcCV#qlW~B|G})CE*chL>hr!p1b~?1oD5?JxTgMR&THzn6c+Od0-Efr)kLQuaZk>r^ z?Np5fLG3*8n2UoV>NWn6oBlZHDK<3;NZJOF%#&_+{EI`*84*Cumni}O(B5KIlAq2N>sUYH0u-qKCJu*4AM!Xy z-f?INkEV;Ea4AYFNm7JO&6ma+xhPxlP`b@>8(O_|Z9$>!SC9dT=d?F2fxCgPmM=Yh z;T7A$Q|0eN3fz=opTGnl!&9x-jg%%M`ryH53U~?dkE1m_R@Ph;u1Jh2$B;ILO{Vy9 z3cs&Ijf}w9L9oz2#a6jh)?(YBebMgS6sMp|c{S4gstUP~*jhDro=lcZ8>Ud*hzYHj z)0Bq-3T_+v<1v$1}Gb|VL2*2Z+GE^XUga7 z>X0Vv4An77ONdR@XLLfE5EBu;EglG1c-zNwR2}x@1G~m|==QK4^lnnL#GWJ6fLJA8 zm0m`@AaF5P$;Tg-CqIbY@5>$;^kRbuQyd%m33*&uacnoEgyU^Y1vQz$TsGZ8`eJ$K zZd@zbsk9iB!QkxB!ocWJOdC6(m4>2S;YKU$aplF>!5zhex&!yp$RU~$DaS%mA$-w=}zWacomXrU)Q8I=Wg|m&6&AD8B&dbU8J8adw zrCsd5D^J_TW))KQV?z0o3$kr0kcep;(fg+_lyh+xNHC7~Vj-P=4g6JcZ+k&`);Uo! znRGf>Y{ni$(}yXOr2wg2)M*OcZGB+u%F>kmq%H>C`?kL0Zz1pbzfbI$0uZRb zs-@gk5=zJaO#UjQKxeP5NB|%3hE`MMrah0|vTIBBHjLWcZhlk~uf-3Ma_~>flV93N z@nM!KaceYxuAGZMHYfMWZ|6C#XV^itw1I0?U9C_v2V+X7>HaW@NZT5ff}S_})N%^H zj#IM1HcsLM$uxLV`XRWlFQH2hPo_Wnspa zNm%b(k{QR}!gsd6sZ=sGU9JPHK8?Gnp^B%70VBs#<+U%_zrR&3d@-146~;!}W0(zH z#4G?*S`Kxlxb>mNi`teIm|k9hl3CEh;Z>to%D zuzi&Eh>#9O8q!hNYgh0fIN+hMUL;ut`|-ZEYPKrAFUg}iLOh3#W^k7<=wj8t#|J4U zFim=jx5~XX65VbrCU)IxQ3zuEsY*~cU`dRF_KI^kkT;(pxhse~)qM}G2T0DRm?<#% z=b2PX{!=h3=oulb@=)xi%vIsRv*(zos!}Tq!~rNSBfuiDRVW(_QfJ>`;v1nW(2fTh zp(}wfTI<2uY9JVC{J|!1!f1fCBN1EUD<%)(GO`Xdi*85cD~QAp8RNbf;qk?Jpl6_} z??>cJha+##4nbj0Z#39f@MXW}2>;n~!?sGejA&T`0&A#&x(1;(?K6}9^{)Q0vEF|| zyJh}y@?-yu2IoEbfxVvTF#BFDsbYgkH6djPMlzDwv^cFdCM+d3k;Kdm5cBr@iUf)| zCsdOv9Tu&3h)R(9bCCQrim>l?66QJKL|dDhU$yM^;i|iR3bvY-3+Nd8Bd{&6Fv2FA z7@+ld!q-`n#>kUY))bL!4-V~h6cd#+;r0$dI?x&^^*n`I6#&WJ`Z7(lj7pU`%l;S} z{VsMl64FWeybIle&%gBTk#oxJ}im-|5bO8RK!2HDNh<{qJXhZ=U_cN>!`|;#4 z;&w05l$^n&A4$qJ(qV)q)IW(OEtJ(pJfVr7lS(L7cc?nsr-;R90DvuAhDontzYI8_ zn{*TL%oqemnT%3&LMdB927Y9cj9EobYe2El=fe5&N-PXe@RwZ#;fg7L8k?YV$JOAa z+Q{l}Ywb5ZpN(ZU9cDfj_euPzN%%66&F`zqn46Vf_3v{p?A2l$; zmhPoGuGS{!jsl5c+#o8DlB8MeIHZ?P8S82qnX-QAj-F@pqp$tD{qLlvFpFQb9dc~G?3YfJ=>2zO8XdK#RfY)|VhA@+j zR1knjxnf^MvLl=0k4MJ>iFuG46MGasuapn*9T?58FcunKxQEIJgeM7^7UWW@w*u=OaOGB|MFC!8Yw z6A5jJxcJn!7}_*pg0i9dxBq(`AoO4Ucx-SBEtCf}t|qIZ@HZ)je>RfA}WuHHl z;;;Qz{!?Ci%~k|O)jYvb-wLV2{En2K1*Jq_M;42bvMb(*>(G#sL^8sjsNN(((iyYj zENmpr+rZk<6FDh+;0^A=ui3_AK1nF$7jnw#s#-aHuTe1rlah*Q70AF^1?xd42Yry)gA+_ zIYJ_sXj7y5RWCllg(iwINJ;A|vUix-Y0FV1n33wK=+~pBT$Ee7VyD-EyV%%F89Tau zV>Si~&{?Ux5Huw(L&D1zCqC|b?1{^VRLankCc>@XNu|OY1Cq*OGg~cSLiYs-9?^LN zu#sJF!os9sVg9f{2!I@LA~-mn!~w*fX}cosl$3Qt$vr3hBf!n}Il7-e8`ooI zjkXB(3cw0B+l#ym2_V+^}t;qT(`9q zP2DC;Bc`&Qurv~dANh{}O3RZ^f(3^<8Yd@q*j4*mjjmzp+nT_Sx^*F3X#Th`X~uFJ!?Bhm$?mGbq|A+W7`WO=2^*Ljh)T0K;*G|bWK1I zH1D+M&6isX#?x}WS)q}v#*$Fat_z1)&e^bJ{0_oCS=S4bo@>N*>={i3V~9wlFx+X! zLOzO(XoR|WiP@LNZb&Ko%6z#%a+Sk~UD_4lyR?lx9hDlBlF4krU#=%N} zM|;mo>nIV?p|j&S$yQG(5iqgFRj90Q$P_i*Tm2JY@3OFu!m zF*en(N7>44;{v){d@1N~lNg|Soft@_LVbRv|9|qsNrA!7Y*T7^eDO+%6drwA-WQKs z8=zgt%^I8GaN!qucM_zTmuIgN76+-V za{<^ z4n5^syG6r)j4D!hG0Gd1?pald0k}al&dYtD0V5i($2y|&EO|Ph#^5TvrC0*LqGUBz zNL^Gf0eICuWUxHHXOHDk$+m&Twy9K%V^8_Ls zF+tRtAPAmB_+7X9FbqDFK$$_3CPme@OO95tjbEoOLu*LbQOB2t1NNVb5GZC(B;m7- z1_qwJLle}NSwWk|(rUSBn*%1!sDSd6o!%) z5)^ce%bK(U&z!;sz-o}D>4gda*-d7hdv4Eu?F(7XY6=^^kue;%Po07}iU7o^$*F*> zlh+$h3Dz&WE8SF8SM8^t*P&UaSp?JAAJD08YkpzT2070hg#RyRT6^T;R# zv*6-OP7(cxHRH?H*q=Q{#>Ui?5rdJJC^WbAsjYJ9#LTC=$-qT!*PI>Ewb)Ve;UC`S~Pc}@8mr^5Ujr7B06@sfzJI#A=d6`vFt_q zek$yiUcA=YDy>d(q1SQ96FhE2aFamZ&_yqqBmnF19XiJ^kIIBKOV`!{s2cA-ws#ZO zNZxLZM7BL&p(esO682@ac$rG`T|plSQ{^ok-ziTS56Xp~F&>(krStppY%q<)x59|= zV8(r2vhOa?cGLsXZ$O02R5uOp}MmO@fY1hn9;_8qe4#pdbv-a~;X?VE z?Q;M;AUUq9LgSpt436j=T2d*?q~Ibu%{M{uwszC(S_UI=SKQ+|dHkrn9jC2ZL_^dt z{qd|O_(9znyZ#tKDT<0~?>5GU3dQ+k5SKP!hj^ALhIy!%d-St-t^8w94UP^ZALSmc z;&b5dw0i2!5@Nk}O;y7@q_z=SWd+wS#`AkcU0m^dOL13>zzujaYxGr}g4(4<< zt|5K3CT-IEh%loaF}+COtRL!3!9Vo$sXDVIu1QJwb%XEMB7sacGDm<037v(wAhHo( zR0D`zkKEM14jyzj`InR+!P$7x4vh(jSM1*pgFy*Qu+kz41DbU4vP1NQp^C4P^ zA0z}zW3N@DH;f!WNMp|7QviD7D31SAIr)vfpn8;aZT!T(cueP<_u|8A9BMMur&n&3 zW&h|8%gG;N^;>LNDE6qkAz>5sg{q?Ay4ud~GJDZp!-)8g%M%9*Du6~-(EnNkd4C9A zX)@O$0HbtQRRu((*^hrtTYmfU1oW2*k5#n_`wFdW`?$j`NX9iSD{%+7Z|ua#r1*Jl zXKBns{DUqL_7?0y)|}3Jwk*%bcWGe`)3~HZIB%*>-kwDh67+!m)TK8}?s$%aNXRk8 zN^E^@MLu<*JohDA3Z;5PL4Rv6$1q{LY}Wp+K2L-FWac(1A|`hLIXrcv?JIcY`1pQu z*JyKYr%3;tDO>pQcZhYn%Y|if-9B|6Ki-JVJsVC~wc>QITeQ1nEQzmF&L zmv7r@uwwN0&@=d5vMR*_!6S4Yqbt8rLG@}_C1InfGSJq|EYWYg{f<2z`u&bFSNjn( zm)d36XGy`J4eclV7^<}FC*^1QyPwPW4q-e_|2IJ>whAEjs54(qeA@DYJj za{6)2D6&M@&r?5TYQ=nSHwyEx3371x5Le<&YTT6Ytc=QfmoTZ&{`Dh8` zNGQCd_sd`YDfC)P*=bjzGK@5iFEQ(7uIYsGo|B# z327Ruq(JbxU80l6i1c{ikz$o41;aNx6f;5LGkybsem54BZ45NO$`UB%wOztbM6+6{ zW)EyM0SFgS@-$Ya;s`KhG-}8*Yn0LNy1(;sx#o5N$wS66I)VeSt5_j1HQSi(rJrQ2 z5sl#-HA1M|m1_ok^d;=|SU02w9v_F@*izPA12FwNwJU6Fh2RXFZN&d7GZT!DYKSRX zU2tq~f-uW-(TPXNW>;v(WB@fyJT4Zr2W7(JYNAyL+{9g`YDCpWfH>nTvi!#JKz$`~ zBURZCj9568xnTscLG03v+Hu)=8`uABdp2IE+}d=EjWX>v73QTt{c6o^UznMlMk=h$ z+U{_&0aXNf={(A%ui5|D(*zT$+(UeAp{%67#v{0^<3q8ka6xtlJJGBGKOxRYzWutp z)Gej}(%wws-d-?iupak;L*3lWVR&IfSdbkvPM#Ojs;%*zFteExdFg0>w`PhNmp(wp0Zk27wm-#bnA^5!d2=TWK4n5 zlVC|4JHN6rQ=0Jw;^cX@!|A=T0a7T$vmMa{VT^U)B_(t!p&#AgY^~{DrUcj?*<>Zm zLgH=_m8d39H~_?t)c1@h;aw&#W9mp9?>K4?de5JYAMPP1H{GdeVfFP>6u5N=Vh#xq zZWccKUj@yoOYn6sy2LQL;x~AYF(>vzM+m661=SeZ9=kuPa9@~88l$J7q4K$mZ5HNf z1v8zT_tUXaNB`5QO;R%|!GE&4}%9RT5?{Zwm`y!dVvXc15AdJ9Qf3l(o_!+j^}(ArJ>$wmQZ1dFLH&Sqxm z4N9OP2KaTO2K`ep?A#7OjGn;_&BzU)5FwBVw2n9zr+(-sII6TQ2@<=0Z<58be+SZ&t~!w#Hx023hP7HWL@(mI zR(^CgEdxd$;LlX~)g+UK#>rU|XZyOavj*p4I@#(Olg zlV#(n0eq&HZ&CbrzW{Rwmip5|AAD{hhEUQk6#ep=G zDUnR^n@Z4_) zaIa3W#S`g6jsdgf8Tc+MLWj4(56E91Tm1)rbH- z>?U6tnX5QG?2+UI2}sTsPdJDx0ADJ;V9T$*5Il#2Si9SOYQGX186#Y{7Q2y{eOVI= zXrwOWIf6)o>ojQwvj@gSR|jp;68rwZ8mW#I*1nVBSbVXZKksdh)mo0)&v?)rH1l3& zWCfRvKKf*1-EM;tV3mb3+jn4-Wa_S#xnv3{4a?A+SeC}@UA%ZPj;l>ci9o*g#+tVM z#C}q1=&%zTJ;Du`V)3}07Oh|+lu>WG+cIkS*tShB6M>y-C0#z$gPomDo+!noxahMrvk;HpPyi~@wRDc`GWR3?PA4;xA8uu%e1AsK zVVXF^kSWR2p~iaKN{Bt{ceVOEg%@kXMb~hr_c>lQ$5lEr2kT&m)l^7gJoxKls|p%j zJvATC^uqreeyHv9|IgyiwK?wVNbjE_l}aTqjREbefo4i{dBi&7uZ=3V-c-?ce-;#{`6h05k>%l7&0)*{!oUgJFDE)Z`mROfv79@>~&6UFA+y87>`rB4xSzqtZcnqG?JDEOX_y#4Iqf|954OH?Eq!+=(nXf< z!&sQxJ$2?wUM=LeG!aT_y=f0ZbB|LxFDXw91ZX0L+5qQ;F-itPOnx1c7YbNrknNSb zwh=eVt=P7jc7>H?GpP!VZc6d6iW^Fi#{`ze;m`0AxiLqH!U_`_I!d@0hqc{K!qR(n zF{2cj8bX(Aeas3Q$B4`bOs`}o0o#B#jDRQXGhBsev^`I=yIvM-j<4rtd97+oB(sb& zj43O1$kSped4pQRBZQ{jVN;}Xg@0CFd)?k`q!)W3zKES21#qYu7%PZ6r6>k%;&t)a}_S+@xe0dmx>Seh1+t28wvXC+QLdk3E4(C}m~ek@Z^cyKa*mN*!Pu zz&pfj-*$F~Ql<+z)4$-qzcR@~s&eaxEm1Oo>|ziwWsj6w6*w7^h^=Dp5CL+S{Jth3 zkU$z>s=;9t6IOV*y_9%epq@I!5HFBzo8?Q~OKf!%!u+#j-u$e4u4+=AhmGR&81IR(O^$-4tF)}7z=t z3e|_|LP-mu4={$)A)!UhjL zRb^Y;tx|#)K2qEU7WPKx#4l0k$b98i+k-HnAcl3Fmhg3oSpAW`l13!gHsUmB(;Zhh zINUF{lMd|;VIwM5?>8)bjJh@PDyEk@AOQ0VybC=_@rfDJ{~G@(`k(m7GL>9ZT_%nY zjud@V=~v>?I8x#;B1A@jO$2)~NNDmi85s&Y)?WVFK=%Tlv(2q3fvyY`xaBLmIpj9u z250y1_xX4EeLfT?Y`eCiojIiR!+~hyDi6z9x3$|JB>dsVczVq`^dgFDyoeP8+@4V-dey={r3~8F2%`UuY{Qz!z8f^>a(M)T$_ygNoNN zD7ts-$m8)nX8#65O+1svw02d}I*e!qG_A{24wMVF-}WSSlvbu7f(*-~bc3Z% zyV<}*kBlSg;wVlLFWAQ58M}Y+*f(ccR96@}Z`at#=bqKV7FO=0AHZx$IUMjy#!~w%JMx|?zVpDW! zM3b}VHWEC4&SVlsg0)>I2ZQ78VIQYZ~3Mn19G+gLq3UuaDHvath*dbF)1U;Cx( zajXQX@slrX8g~d|h5q3#pmufSoawr0G~@5waC>E?;}z5AVWCj~b?SAt@ zh$GRcO88GPH;NztLC|b2LLTT%4%nUy4%?;EpPc=YqM7!d%Jn8k*Cu)T->|!4lB|bl zy-1i#P=_-c6W&5t@S<$OZT=X8>{65u;R!eZirsf@MldMJ2e6+S_;1lf@Z7VqL7}I@ z21W@Gi3tkR6Hk=aZNR`wpIWi7hc-o&7o*DkI^kTUy0!ob99Db8uE351tr*6o%~K7e zxiGigpkJqeU9+*dwB`f=EG)h;_6c35km;>eTNf$=y_Uv<<9H{jK|oVjpaRYC&>JGVM?F8Al}B~;a13vAF?wVsrXt;;JKmn7s4Ul89920l ziqP4cB?Tzl{=ua`0D_1GlbjNj301+*F!^lD$SOaF0Hves&)Dq@Cpv-=$*4v3 z0>#(O;k3ghYMpCOK2PmAW{(-wLX+VmD$zjD=8^m4FHb6P&Vt)I+@<~=R?u+Y&I$=! z(ZN*=-VQWoTF|-8h+!KwBHOI{U=XM|2k7TYEq#~&zRwSTZ|76UT4*OFJkZO)wZ;vA zy-gbuhOv0iVfIy1tPVU9uwdVQpErE}>;qh|U&e*WDUhU8E|XHle#o&E#26C1%{la z0QRfo5r2^=-6=-_2c=KmG>ab3y8(1FPFLZoQiC9T%7mD0F$=(i>rOV2t-;=2*-P0D z^F8G|boOj)(v7%iwtj3ISE>Cpg%0hbK&=Gl+h%$J+kH>C-1qpeeL=Z!x&1OlSAZ;H z9xJ3n^b?nE4-*wD%^sPh;fX(6D;596m)p<4O1@REg1f zF*)N3<}*oYsNNw&c+=#{Q6?};)+-t+9@L?k0H>R>i{`eim2@jBkuaCxTiX%}CXPsi zoDgWPB=!FoMk89vy|qK3tO%T-k8#rC<} z6c{sFf$O99x8`U4K|a>+<6`~upZO=fv=CY4tPYbW%u$|}GjnAorMVnmSXWHmmoz$t z;&;V%SNL6BX-tbuky@lRT@owAD>B$a9I^&@La@hDHL+o+H(q8*+dG)DeM&Ja% zw}7Nt85Slbbfmv+16UTP%5;ukeMoWkw|RMSA$db$7%{S2cwoWyMcC@H^z2b67jfO= z#}%wR2hM1Vz@0q`%M{jt!=_v1u8lysV$L;D`6XAD(7&2t>CO{ikoVze|Fqh2^69W5 z4`bo3BMTTPS@Gc4s%RAK|CnRD`mymd_&oTx2@$bw9UL-=3+r(sUK0Nuc5dRNO-VIGUHzW{* zz;-+#6(G3MdD0@1`Ix#_9BWHcr37XD?fKm>q30Oi_XP1jDv!ldOu@sL?Rm9~-geWJ zPoC3N|r@bf^ zZ6w6Jc6+;{Kx@})Ziz`p5*|Y}SwjilL)3gpdguL8ofUEVkjVboZ$g(eJZHMn zHX}MyLwP43+S;jBBU@>Uyfjz5D#r`u@(Z?+GWzewL{RxKiA+RvX{c>S3Lp|Tu4@n< zVdi2S+x1*)E$sEl_eQkhQ~1#AoyT{v|0Rp(c#G}+>0eR#)p-hYQ>uW?;6_>T){m%n zE+Jbi*+h&A+{GrMbZ!|#vEb8Ki)d%@yB#?!+&US*z7+Itk( zmkS*~ukyU83y2XyagwMVjMrXww(F4pRG@?xM$QTh(v`WjWt$%DG=QbhT=zl_1y>EL zk<8d^g<7j+HCDWqKI(bR3-R_{(Ba7evUWyJcceC(5CW45ZoZrbwa7fYqE0n^Lya&c zoUa7X)^5jbd(tv?i90AA@Jntc+ipeJ+Vi1)C>uE_{7*JP2=)pFb-?ND0XeL1d1B|& z<%xJ2)zJsm-TwblsP4t`V!Rg(1z0j<{(b{Nna7c(Coc@1$tz1zfA7-&P4wfURSacXBi%gq_x(n^KM$y}xu+bpJ5jqGKzb%D=CY{?h+p`BRu}i7D z2G^1pTMF>SH-P68BbN9h?ZoV_g3tTiICwe?)V||@lqCD3s^IVB9EUc6%$an{M!DVb zToG8fm!7kCRA3^7QQDYeQ9toYx%9lvN6W!I5(~0P4~$3m)E^sm5!izN2B- zDV2185c`5PEG)|tK4@pjnq`X_&!*-Z#epH`UU5uQ>=(*~w{0@A!=AlpBP2#H_KaIJ zzz6u~LpPRHNW108J+RZfJ#%HVtj7yCrzbRC0$$~5OxP;C!=G7GD$^C&rDeJJwKgX8 zS;o0as|c@z_`hk9>hu7j&fL3ARWNt{@Qt#t7>}!+jNZ3Bst)?l?-YC@M(51bH7Jtl zMMM$-Ao2x2ExTl@7-oL)9RAaf&VDNj#SczCIP-QwaFfSk6~u68UWJhq8@$f`sYq7b z&gUrLhxLToXnKmi)8_)wHuHL~B%L^ox^o=eXKztqeKV%r=I(S30D;UXU-lR+j8Y=k zn7RSUdm;5kWzS-7VeVat6L+^?8yuhta+O~p)08tw#xqF@tVO0dtn?%Q6}9I&LU!B0 zZwRFqas;Gn-2MQ(#q`D)vz2(f{HX~hmum9WLF>6HlB%=}?jMBkQaIp{>P4Z!h1dx( z?4&g3?(p#5w9arCOmiUl_S$w?UAMKa?OjfOZiBet@L7l;@|3t|MTQ$Hd>VMp}l>XAP-+OiLOKh9QI7E*{2I@J>>RuG-d9W zM8bjOC&r5dEtocZD=rSMBl6s1n?wcUqNBW=jpX!wA=p2)ANO&ix`!TMW{LVCw{XpB zp*!l~`fai{e}fJ=vC7CDSqJH75YB#rs5Mn(XjYuZ){2pnX}C3Xe~ubGc=S{@b5;R2 zYj9U<8e%#ayB&cWG0nzgSCz8Rpti!4VimO$PFfsFTw>3edgG1qbSx#(4Dov=X|h45 z`;O}gjJWb`+n5zxZNK#K?#Le5-%3-aUT5cU4_cm7wtb1KGy=vd{6D5xy^R(Z1q~v` zo~;lL7i&gCC7a~9f9F0ZaF@m+PUIxZ$D0qGJ@|H6@_f6^vUDvTwW(r91K&a@BphBF z7>SJ~)sY!-xz$RKVQjwkwb#B@Ui(@scUr?Sxuz%Gg*`mPJVUv zLwgD&N<7cNMFXa;xhL^>2G9HAXT*~Vcs4syh~({OUuInxpR%~^RpPQd`gr_AzmHT5 zmvRw^YvjDwZXs8%*b7ED?T1S5G^wFi;FFKoTA#lECA|Va3B}Rb+Nue%vJ31RyZD*u zE~b>y)E{#!krOO65&0TiIePkFuOBo;WXB7w_JmC@`=Z!c^Y{~5kaQE){ zsb4H!u&3-Hp4!heq6a35bQxVVGBd?blFyr(J&|Um8jM4biEn4A;l(lPj-C~9i zFm$K}?l;QCi@=_?Vl^gbTV5;I<0aT1L0d|y=|ywY8WnJ)0{k?!bGeB}26$*+Mj1tO z<{=rWfgB>ApHPn3KB+2Dz6$aY&j<;L60{L?l>hFUFp?`N|gVf@#SSxn;V{_VoxB2X!zLu7 z%lR3VOvg8pYNWWR7uc_@Ah>mHvP3ypjRe)*;!wh|a)CPikfk%#hqaPQGyqPh?uTvX z#(A}Md$ckraIbA1pkha0$+gUbE#atm;)&Y#Qn@RK{J}PMMO==x=?O6AnX&HY*JLK^ zB7fqiWx6rQJP}HKY&OV-ryks>w|Mx7%&L9jq_Klg*GF&)xP`ax4Z=AWrJK1;QgsaQ z=ca83WE(T28>DQtad~9plSc%GFtHK#}(&1B*3T8oE5)EV_7{&{PX)-5e z84a`*$-30+@H|cAg3vn}UNBS4h5lHB+P@$H;5Zw2w5BIzMJoF$Rb<#@NQ{ETG=W?O z(;6mPD&y8?s{I}-%)P+k>`vm*WVR~V(AGkDjD4H}u2Rsws5vMu#e`~-DJV380S-Am zMF6jHT<_afOb}SF2|Wi@FoaPRBUFX_$VqL-m~YO>dHS{D5dx8=CCV}EWvYawr051g z#t^sW+G=UHUcS*<-CSxc#~WdpGPVqGcLbcMe@{GF5)K?^3!>WJ_HQ2B2ss+DMXIQn zzPk=}g7aky$kC?qgmjXfP>vww3I|?{hXyt8yzPN7m@zc|%T*N{svm;x#yP=C+0StJ?u}Im~KTu8seU+#h2~RG&pxV6Z|*0EWkuH z>HX>-Kz34EmZ_jtq*kNqPCgCwu!kOT01%yx>V9Yo!Lf$fVn40SR9IE)k#_LobjM)f za*u-9wwkY( zSDkDq8)7Bl=kjJM*CM{YqgqjT-Z8*efc@=ZS18?M{{}EiX__JwAPfohkjIi+=t1fQ zDpX2fTAgiN0M;uPUgu0tlL~s?PZGXey!E+fY-~3mn{2f0FO;N9qaJay+lnj0{$gzI z>0i$_Vt)(OkM2c5*@G+Js%a0#lnnPm&4}V-m&=vs_XoG$6k;gEY?T;Kg zECF|;F;@fz*o>h&J#XKm^kBNvNc>TTlC3N*L;%KzRr0V=O{F!>$c$$JMLus|hLRD4 zE|kC>*B7cegI!}^_W;*Ah{n*KEW9n9Q$xY z60FJRcNF-D_u~e@6^Nf#OY3H9BerpeucSY3L9q)iIvqcLS57{#5h;FOA$(IHx;4=J z1khZ-u@#EDMPzwJo;>|1kf^8!aQY*P5}tivn>6)4?WeN`SIaZWAzQEBayK~=w7ZZE z*&BzSwXH7dtiNueFjI(9P0QNns3VZv)1Oe1%Sbc z&aG^5lYKP({ z)MAxk(n}C2Z|_Qhdk7OGq4c$7ck1~*PqG-QJE_fiT*DX@1z|8z6*{rkuBc&e#Il7$ z4F}`Plz(1Zw$BUHKxA#HbXuEkY*X-bZPh;6+ykhKtmj%qC1fF@E^*SSfAeVh zUfg`>PqrXKRg7a3C`lC~RlU^Mj2*_`i|1%$zv|duny8n*8}*B{}rQ!E%#De4JzuHRv>=|4VRS8 zyl@L5iDA}OfR~b^$_B_40JxSm{OA`8*2_xSilr2~*yE}lQAuu)_!)e0#BCtU zKCHcFxz|m~2#Qc#idgf>P@J0ofHyOvyzHYSiL?=-KE9^_;z7Equ+?z4QYrdP?kWOq zKr}gFQ~8|95GHEhj;B&+LkNCSWu&4L6^LWlI!{&wE_Yi4=-B0%{E!QwKFImPQOK@l ze-~w)azi(3#&0DK*}1A((0%}_FDa@>EEXfrB;k;u7I$8aG3 zP!C6Nk)0e+Z5^K6ZLcPj)Zjp@i>`gOWsR-D*tzT0sS>KbX@W+HvTLzkO_DLD-p6}h zvqP3!wDKW*i)O){jWMl?BA)3YVtyUsYs$j4_b3A_zS8YeQL&Ra2w3y`(0`QdZ_A<7 zt1Fl6JLqu*-A8G2>D@@LDv{B1dFH104Y^|7F1OvZUxz=ME0nC4*oRsb!EMa52XBSh z3?QSL4e5{FD7+1(+68j6b>{lor8~0yDCz+MYCpb1gH{Ut*n*HD;!p%9>%L99zmI)% z6p%J}$bpi7$S0IOG)Yr8zi)=DkiAzcxMfUGd&qsi%J*RpYgN?x`>5d6xFEvaYuwnRH*+?DpvHmbWCw|yEbMXjsG4Q5mBpOq(Xls( zmr_mQ@3oFc9LyzB)wJP4d(lL;Y76l0=j;bEQ37=)$|U6)>6Vf~kO6yx>;#rSM$l%_ zW{6Dy@m?4Cm@+wsD2LDiEqcMg`c3}BP44`(hhLB&`Y5V0;+YreMNMuX$}t{0v~yv=u_pfP@$DvN5joiTBJAw5)cV5AP0*UWIn2p zPb_`=9?2Av(-5AUvX$%;_7xPIZ7TMCRE+I(cJAUhe(4Q3&K^*Vq??X5IGF05*sV@y z>#LzDWmRz}ypg zr8GULfEY}AWi842xxa$PYcoIn5F`D|KMy#g5+r*V!nKf=%~17?Ou*z;uP6DNL+^o? zh6qg#I6n%;DjFK!Vh!E&RU_0VV|t2z9{OoJd!T$$8==VJfS&doEaO3Rk{EF(9A-dM z2wu|OYM~UbAu)Aayvq@3IK#JM)Klo;yn~vp?%GOet=LE~YLtueZ|&GW_(#eR=fdtA z_#Uyaxc-Is0IEbhI(sH`N6MCb8hd{YCl{wO!R4~;6_(|$39Th!iQ+avZM;$4@+i&F zW+hy0)Y9s?{C#6Y!wL^VBK?_KMY{Q6qDYL-VM@l3$5{($r%k(6?m6bG>)Sorl`{0R z17np`477qRfDmIIYzr=zE)^(N;(Re`bZl@Bf5nWSUn0)f#?_gHiFy$(yILCS|i%5nCw8{{-YwSRz zqQW8kTtvc8ViQ%&1nNSXJ{4S^dB&9AHR1K^1R3oY3o6rh6DPoip|la;gupT_I<`-Diw*e{cyg-H$FOVyHDeKG8^UR8X$ zo0>_>Yd+!_RyKLeSHVREmORu_$&AykBKD~k$`sg3enp%C^a!6k+JPPwf{!vV$gH zG9J0KQJV2W;(|v#zX{kJ{1!)m6s2z@EPf8K0G!nqzM{mnQinNi>t)AgrY~>BrYY#A z>_Zz!&6`Swv~DG~i4$PYS}nKiS?vNm8tbqlf{Jv|+`XlwA+KRYW1h!5FzvQV3SOw6 zO!CwAi0`Y6LCA%|S#Bl?_*H=*d^=l79O z9@DfdYWzCip z=TL~+l6n$2Y}NsT%C(5Mh4eSgnrTYN^`K+DJ`&{BT($; z5WN(0+=fcS{XFV-Z0y@Z94(KduMi7BG+j(Z+zy(V;4V9SiV~GgeQ3Ky8ZLpRnf&rwsI6Q$pvpI0mrY2pnR*9SaH@je z?@#}?O#d&&+Q3J0XqT`v5OaW4jD)ziDA#^T+}6l!#n2K&gKQCK*+1Gk5m zUNpv_$B^Gdb>KG7vj;Df7hZ^;1VzVB4O$o3=s398bt1voh>nGZ*ii4`8QNg9Z6`kz zVD;YFC#DbCd0J1^V(j*-u}sPX34lAX>)dv+y-_TCT4n?fvNT_T}tP#!|T z)N;6b>`MH76?sU!>qhlrz4Q#cQm)$y!e_@90&Aj|&yn2#{GQtqL!NtC_9>;qsa!44! zR-OCdfGU$vLP8{JEGSeW1txGK0b`P6wh3vdIBwd5)*)VB&#g#`6>v+ZmmffIVV`7e zgqem4Czp#thtlOS_=Datv@)EE^!;ks%H`>$-%bntgX7==#AgA^C9;@`xpMm!ew}g( z(%jo*xT`*iDG(-_v%zrdK_#7X4a=35LAEd|5;~^rKJaw1QHPXqVmP4SCb!2p*CoY8 z=KG{Q@}AsHgS7J_-&U+bx4s)xkQGbV6dA^heLeg@E@SZnNk&Qui0lo-L}-~|`fxQ- zh|&eCaie@Yj?i^5i{V}GcmzbjWdyl2-aJ+!*%Um8ul2!b$q&dxA!9nDgH6xIPv zg{R#1>H?YwlikZ%=a7K)+6Hh#5Uh%$I7VC4Y1?a0Dck>ny-2}QYT?9`+eA8{X4=zk zR&u2ML}hs(G5blGlutfln=8p?zSFaF<=i_~(!{^yKee&VQ%}X{R;0hr^uTQ=JA^Y+#Lyb+{|H@O$p2QYzH64Gc;Tt1Tz~X&7 z8F*XH$dT-H{QQ9>5Clz0$_F%*vf;H_rPZ=W9%CY))a*x;wPFGxV_;|lPyy%H;GkY? zp0DpWzHz={|B^+X{j4pnc)S7k!%fWVzDRT2kdayz^f19uJ9sfOB^8^1=leDzD1x$h z>_yexx)eXLYEA+WBuG`OxJq|`BLea`d+=iUkM>P<>)B6krwG}-vAaex6@Q7r`0(tf z)o%Rq&+UtwGISk_Qd6yYn;T9~N1NsbgQ(9QK~Z5z;^7HZ!aczncYIszGQ8h;t@b?d zzMdfAefK1jDSQ!4S|##nPy68tQ`*hUdP2D2!T48!OK5nPmX?xSQ!&hqTta+3mHOcF{|0Yn92j zfnK2H`Q!FOhvd2g0}-y3SM2w4Gm`2)=-~*l75jcHbfW2WVH2*X!pUc$Ai1{0nSsKC z%k9dL&0(Y=PFm~u6+4l>`c|g zyJ<>mQ+Wyi41CLI2H(5|*Nb6(L@%Kmjwhf7dL+6zjK+ z=tX@lIyT_zd9P_eSXXm8FMusE@9V`24#M^XGKz;TmmY<2W1ZV%3fnwL)5zp1l{!qN z7%2A+w49!Vv3w`l?qW4~?`C_!qfeJtU-w3sGCI&2RhaChKtmLWeACu}s1>WncH5oE z+4q`{i^#68n8wqkJa5{hc_q<6FtP78fdov6p}Xl!A>IA( zpJPy*M(&5CK59oHdk+=eJo<+uBMd*Xb-GX7fs&gp=#{XTbF<+=3_UDPJ<4btg=z6I ziu~;JP^Qov-c#$kRHbXBwYj)Plci0Wo`Q*umW8_BFh3tK6C zz;UI^%o2;6<*onaF=V5_{Qvp?e};H-_P{ocYwmsdlAqhTsO*uLBz2Y8np#HH)@ekJ zkckF1^XKL46Z;W!OpK0LWoS&pdo1%zMM2y%{8VaT#OIECSj^HdZrN>fu6A~8KduX{ zP9Eb5e1mCnYC`7Tk{%Dq=uGM5j>a!ZcFO$ z&`pl*T%$=py^XaSIT8uTNj}qNJI1zj?Z;n1 zR%hhUVA>OW0WZr=Vi90md?mMHnX#*^fk{%MtTt@!9C@xfr*i4bGc}RaL_O{%Qs!@l zO`&YAkzbl4E~?IcKW4Nz?Vc?&(w0Qlek7?<{s)Ee{WG`oZjf=cX``f?$tC+Y^+u90 zLAFE}d^z4ksJp)dU6v3;EODC{8@1~l^;s1L0-%`eJ6NVNAxPQG`6^0bAtl0;B#vkG zY;7zVBGb3khcBoTe^*X^8x|8Myh8sUbKc*z@giHwG?_IJ$Z==eDAO9R#T%fFOl%YZ z#cYo+sI)~cRH7WMl(XBy1|iehzE4J>9vFubAA(qUB0DT|2IK9EI2hT2qdlKMyEx^0 z)axU~!=5kI%?`x+i?%LoCB8ALFq>PBs`$x9`ei!gFvt5x>MEbyk6H}+ep;USTQKM6 zV?NtztGSmlqA6<22yJ{d9W=_BfOn(ylu2yklV6M)$jBk|AYuqJmKP90H&u&+_lGc= zcmr;6564fqO#XPRM($4%Cro)&V`%QZ*Ws!fV(Z1bUL&u8WAhxQ|X z7J0}f(}J>y><7VexLt$EUU`D+!53Sf1cq|`Fqa~I3Z~&l06oOCUgk8$z;Thz6SCuX z!!5xSD(ApO-GoU}b7k)yW-&LaNebP*pLa7;0c^jYkBGE|@=>w# zZ`*@`^3?|-&f}EP5h%El04v)q)RoNN+Gvd;dPy2$B&x)*5lb;OJZmz(tsGi5q4@QI zM9lMS&K|s4{>X+Ur6(UdyMY~&y+DIQ6_BKjeAriN16Zmg-FIQfCX}x0yT1qHP4{b@ zwc3BuRz_AfrORyd+RDJ$G(i!xS5rvrb^M}-o}yeyINy_Hc%D(4 zijj4za|e_dZ`cPJAWmx&RFy}CgYD;Q18vQ=hwcQnaoss(jh`1=jQJIt5Y&`Lx!#Hq z?+Bd)O^=;oOx27|WCbkvzbq$zABUDBM72w8y*-!$`#&gWKeLsp6?OfOymgn-;0AC| z0mbAM?=hn9(_}q-p~KXV5}V`PBezu6~LG1g@Wkt;dnYB3nKkJdNJvI);}2@nvIN!F6QqmP9IMmI^-Chefpq*W9J zw6|i$q*6%@Vo67YT|SEaRDu=q5>Tg_Y$dMKqT)}fR~<*=$3Q+OKey?l1PBJk%v$v} zG1=uFQ$(xlDwFWWMf)l%vNudl%0Oj~K_U?^6JNaYUzL-e#(0}@yA%zJU(xwWhq~h} za6oob&ExOH={sVt{IXUpzp&A$TZd#PSY+?sA&mflayt8y#D;)VO#~H*5Bq^yx+AdB z6mQ;Fgpygc{Dj{VG%vQ)~avqOQeS(3k?rL2x`P`j0s?hF4cB)oANb8fqIUZEb7J&cfkM8q~g-%wDotZZ5B+LlN_WVHIE>!|=gvWJNx z=-6yE?#`Xs%$QqIh&d5-$$p}OfK9ti(tijPX{pQc(_I2Hv0_pUl~omMN@~o$mtUUd z7+wtHM%xIR92OcRw0P=5X|2W`h#^dc39dlDox%KoVh&vYqCp{)V6!+fsdK@@Z5-7e zj$_R!ma6vNr)4650yD>To<3ETz*Uta$$uh*c}_ol<3zRghwIL zCuaeBZRnns6sK1)1bONuq)d@C^c04$JrltlQ12}6OuLZ0ha9{ z7}k@pDQ?@ZRmU`zWl^|9wl{>%gmlJh&plsz;Cgx4HW(@XO;tYp<5PPJu#s~f*{lKO zQa!7V0%{;dLl}6j;mQ9xwo?Xpq72)FqEbJrDuJ{!y#ven4Z=>PvZb(HV+?~-R+jew z_)b}j(?O~0bH62cdNP%`e8B(`xWg}hS`NlGwMPiO$G&Z>mlZEN&M3V<2;qvzRr&gq zrTb|d5$bc^43X+=j}WS@a4waL?B!yTg5=7WWNPi?s5fShC4o(hP6cQa{i{J^=vl0uz9i6jcfyhw)@r5Ol z8`RY&L{Gl!@X+G+=v`8BRNoCMHC{3?xY#O^rtkvreDR4)BvHK#3OR{)04OQ-Dgk8; z<@7|;DWu`ZF0mO!#Xg>f^7)SZaP&8 z)Wlxa;Y2Xl!wHDHatU!LotDXk(r4?Q|xj-zUAi2Z9Os%6$+% z0z?!*O;(k?QUEr_z&awG{d8E`tkgdBS$pnc!sYOb;$FdZv$wNw2+PsF-%VgLFFX^* z1uI4~WZDO5>EfM>Yqx4C!Vt~QcCF^g*y2;KDD#cbMF^JiT|TyhgF8d*JJUk*<+|<0 zHKlFZ56?VH^>16e6>JJ%{G`B_R>{Pfr{u1EUJg%^ngy*+KV5>9f!e;4$+l`PGTgPz z-vltUf0HfiV_#NR#?Vh~+^)Va?+Y-<HJ{E1G~jK zkz^Ugm+n)HA=_;S+}XK2Kh5*FE+HP;5SqZno&} )6E{pxaT+j+Z5ltpYCNDrqm zV!ln-=)AD+QJVSEbZy^e^=)^i*~-z{sG)LaF{ztuL~5}@KM!DpYWk%8QsEV@MB4(^ z*`eJw-SKRS710hcZFfd@&|=U!j&uFhtqfr=(AemI4?u$k>BG>EXOzGQJ(b9-SA zFg(H3i+g1ZMVC{+>579i8lw&RTlQ+0v|_Q@k8<28uNp6DbL|n-i@(3DV_+gjm!3zT zw7=nyC^j4`syILmf0-!-Omy617s*~bO8444B?BR|`^`j=U>aLUXL&?RJRE37v3w+= zy%0Zfs*+dLRCEX2K85vO!kq57Q7LC6F@Ikxuf1xrs91H#wUX+N-70A;luer~aOp2` zoope9Zxts7B6+CKbOCOV5Lf(qk{3)&Fd+(!`S}gngtu+!u*e>sn`!nwQER|;V()HHa8SLj=HDNgA;H#HC&7Ddc>UDVGM$YQ8+JVGFLbT?+z zi&a7vuI)vuynLgvY|C=`Ujg4^3QY{s)Ki^leqKCg%lT7~E`&?aveWiu_w6UY}YB#ad^+(r+ZO_yar5YLxqUkD8{&r#V$U)aY<#DV5a(8b;%X7&WS zVH{Kft)zs>GC}g~NvziqzaqyWrjDY|EUrzgHqABfC(ot_2^NmMUw;1#(se+La>mYS z07ig@JLBr5a-+RzTfr4M%u=|D3K}N&dEKs>8Iw|$77ToPkA@}Bp+x5 zPsrL3PsJ%?%Erg=PD&2UVBeP+twb^fn_;_!ZLjdR?YhvdyqTvTQ1j#LXEu-K+Io4# zwhS|0x-CF28s#XjB^{vJIVW753on+Z;z;&LR;h$|*L>|-Pv4Opk$+NE3M!HLO@v7)T5;B=m107BRQ~eS_y$j{n-WNR^)*dLq49>Qric`q?!53$ zc_O|KkVPRD);+1=Lse>dXe-ARev}M89l!kqq-tg;{lvAb-D6}GW`I_0K}38GLzHAV z0`9gL9sX1FKlWOop04o#A(bKTF7(ZEE#!D~Od=nl_O!GJ3w#v(pAf^vGI39)Y9StP zqE!3foqb}~&G*W;>@Nxza_fbRGdbCqHNqOJxILU~QN}o3g|bRm_nGHJrpD?2pcaKG zB639z1&H;M;=HXL0QmG>BmSKlqZrF2v3_fWo+YsL>k5mCaVxl2JIdha*qU>}swLun zLNq{y_O_2Ybuq(*Q<)8_ch}(2Fd3d)cPIoNZPT2lTRZ;JF3m;zlZ>zaAX(9 zeGFPxzSsFw^JZ#|<*?fG9V5t)$kA0!x}+U%FTgMW9@o~za%J%(a|XpG2PUu@ zIe)mD@Kx-II8?-cZOD~h05#Ix|2;BX+yo&00v{pn3YK3mjXg@doIyI9&=f~bqHuGTl3fR3Cz0Og

    =mTgC?eHdrvDDyb`@jK^>Pm&4)PDO!*uKL}eRtk) z&Fyr}*^xXW#h1=}6=b@<9M_GER#M4^0q4luy{Z9}*d`XZ{>BQq_=|L%oppA(=QS*V zq0->1n{b$7`J`d5H4}l_N%Ni=TRtjM2OPQ3ZPw5stN`qpR#GyhZWy3%_6Drvi91rS z-^2vL(`%=B;=31P2Peg~6P&aZ@RI{i*vxs$7iv=T{d7^)4KP8@Lz1xEF}@JJw>VkZ z{9aj3G^Wke2w6FfU-bNX*tZW|p+k$Auy}Ne!3`tIOB-4d`NR%>6X< zu;De-b^N7%GMOoIJ2=^kg2~NOpR{SPgsB-uV^A{rv|Fn!-sBqM^#_;z+t~i6;e71K z{%#(I2CG)9KX3!?Ukom)W!X7s?po@~mu2wq{;3aq=Oi-on0A6j7?(3KH~-2q4G<+X zoEP&xh@bfC8b;9y5B;%sa_r?4s6rW%NW$f~Ar#FJQ1k=j* zd_-$>67(g<+wX**bLrA0e~vL)Li3igC0iX#A6K-{J$!lol^(LT9KL*^-1!IpUk}1DK3P@%g}jdOy@l8Gwisieo1oPp$GUwY&yX^W)Y*fm(VeTwqXs z^FATWdICzrT!8O*y_qg`jxj-^hy-BPJ$S`GI6a-CR>aKWxT2j>(x}%r3Po9p7-8fi zq-m_)9sgQRI_A@&O9z!44gWK7yP&3=x^d0NQ8`ISCN89YtehE)f^*`GTqqvrRniU=U<-xL?Xk&)1C#^M^?-~1Hc;N!e?-q zN-_TpJUs6Xe&IV|r)QhN06C^>a_y*?MTU%E?{Ik64Buxj+Fp+K-{3_}n}i6XxV|2AlWZshrl zro{K83KZ1;Ob$7G&{@23j;8#wR!r55SZn28ChGsEpxUo$+bF!Q*-QS`o=Zi zez3i?t7jpOQM`b4SCdp2ABd#EM?{Rke2{*6c^6)u6{^0!4`EPwl z_wM`O+qqOIhn-nU#d;9Z5{BYQuE8u>QA}rz%zDkrkEm8s%bx~(h}Ln?-(_S@=Rb8R zc>Es*Z^fNXt)K7R!dcv&R+JYCn*0&_A%VS>fK!31tZBUhC@{-UVY54e5(hdc^!nZ-_v_rH-1{qOv>j)Av)cXc*){ko(Xj~oi-*o0l|h4V+gfJh+J zA|bDj2A}z|71*|j;il|Fv@uI*Rt)*9)XL_1%2(Ef1EZNpCEPN&bD9R%_^s|uR zC8a<#AsEmAH(;ri^uLdtc-8pRH@)SXg86KHQWv1HWxSX@U8jH96Gr*j=)Z%np#quHGX$y8VB; zPx=2F7Gp1XbOD1{M@xfb7>+b2$J{!yq7e6=4u z`CWf4CWRhHA6T&weCIoVpe-SU1Sda3_LB`hD@ujAUb^4@r|T>oTMTh}$+%tj^6S2q zmelV=^aY9)rOeyMq;ro>{5$ihBj-|w-U4^6JjLhBK~68TLnJ!~6H7Pa((1~*uT0en z!F=iQiilmC-Jr4EZEp1?enbx2L;Sr~%~CqWBl2g?EiMTbKv?B0V;mMu=u3p$RS=gG z%q(FvO;4m7ba7>UZFP2|G&k29@%X^_c5;xVvPp5EKG8n1zGcK@d?%qRnDA*5I4c3q z3?hcUg4UA0lvgf?=zt};o{$OKls-c5VJ zeV9m{BkLCT^_8Ry?N;4)|I)Yh*K4bh&zhxa$yN$|(hAX-_oW?)eZe!GRvT8Nh&7o< z1h^+%S!iq$58Uu+m#${9hTu;x1qD?=81b)&2t=Q+8`UqvxOZ zKfM-?1b4=3DXoAmu(D7I_TA3!)HG{P7gc?D(Q5T%coQt^gqa3w$a%IMfgj~+sv4l6lnF+1 zK>75;h95j!TV3>l8*Y#m+df$<3xOOW78f%ZZ`#+5Z*I5}A<+1%!P|cNti;0NiBiZr zR1BpeNQ>rRdFi#(%w{H0cElp6{`AKBTCK9I_r5SAuM-CtumR$28YgE&W&sxX1B_Qi zrFIm1X@&svF_c|M9=!fze~p0_;udZe~cpXNKU!CHuV zw`0mWVy`GYydMr$R_6wz<1?-=UPzI&qHjLcAUJVGfdbN84}jJML8ry551cM?C#m}i zDNg^5ndlKkf!J?I59lv`>N`Ggj0FIUj5Bh5#RX<^J7a3~q@Gw6j3H@hnz-xMJMLzhH6!S;sIkw0*xyCzqZAgRY>ZA# zJVXtranjk(7@-Qmi(UjPiildp|6AwDNg=QenVg=%&STd3vNW3sy2m?dM2bxK>3WG8 z7XLG@P%MP`Dx4wzRhz;oZLy_Y3tg2@uVG0l?)t5CCMm~96-PjiYQ~&km<-;@N*~fWW>0A zSuH+54>T@PmrqPQlKTmdhO_4L0Om_t#eW(+Twz%L6ZgU|zxfN_&6$xT^%)rQcnF{m z+-m>e)$y^b{s{0a*T=?u3m4}U%C_5yc(qg`+LaWYmShtVmYq`-S5Uio|9+sYZV6*! zP=FZ2*x0njP$LYe4PYX+d9>+D-rlgIZ3;bdULFm8?OLmxp`o(>(3cffH7k{O{f(P& zMm$=rg-O!Aeg3`<0IaZpb`f`9j>+p&K1S zF+<6l2jB;{LrG%?wvi?uu+j8xYXGIGBrLZ^3*sLcJcRAjCDj;=n&DFcg2Qg{-#z}t z;M2iZzPVQ?Ci#E$@-+(2(r4M_a6f4%YlIfo!@U{3dh+U|G+X6__J<>ytzx#3j$nb} zX2zkS_Dyq#?|8ozdaD@zfv>)CJ#cRrX4NHMf4F**!#9%no@wq{4;qaY9tQnD6_XA~ z39Q++<-6blD`N%gngeI1%{A(a3Rf5gkLwJ4Nnc;<*o4)y7v z7Gm?UP}L5%m`8V8XKIy(-m&mPi5}>!cUX{I{<@CCG?Kj_MgzP!DF+Vzo&K=?-3RpG zd(Zkp&017Q0*-G%JuQrQalQ>g%)!wL!F1AU9>$p?@z%QK%Y}7{(XQh_O~Ww!p$Y7= z!kGlS+N7sR_;QA=82nZ*yM+fy$)t`5U?u}mmR0fT>|_l^uMI;tq*HG4v@S88Gaem+ z%Dw_jXXIW#jLan*Z~h^|AI(%1v)+lY)3>kqX9$K|3mSUTzLY!ov@QE!7LAX1PF#j& zqxkaS;!w^TKOrC86n-iP23_b6wH#E7{!;#VcvId!|H>t;hIsj(w@5U*%CwSqh~xcB z>kBxDv;INT_x>A9oWp*cj6H9m;J=2~BYCvw#gGVEIx8yo{Z!AsygI9$g)_*iH+(XN zHp3H*1ANlCC#)9%?VCPC%fk7eA6$O$ivJmxFP0lCtYwKYJ)A?_K8}?_%F(+Li5m7S zv5VO4RqJ1-1zR+XWxNNnNM8fP|H{|K1|i4jTRc5A>0cqq70wJte#G~pDc3Ls=Ffy8 zf>@b9Wxf)KQdlpd@T=21OE^7qv`G3fCI1>F6#mDGqKy_S>ZMY|);-8bbXW~7f>W;c zsrU%=MMC4pq^@VSP@|CUrM<8p%*TH@csndM#5%4Fi+qQ@U{o;R zl|$KgkHf`5p1`bQ`$QbilDj~VwkLpL|taNebtg zw#UlokCMAq;fQn-6+4j(Od$#@^MwWR_l3UqG45GZ#kUMJI)_*9z~wmVhhN(}IgI2E zl@bu*nv>u|wMe;?2hJphUs|!-&4ZkX18+^!dl_zjZHVUK|M|!Y`pHMe))F=IBeUO( za0{Aj2qepKAw;`y%9Tqe-29IA9QnBth0ZS4A88ee!~AuxonsQ78;Lb zvzVaJl!z%)YsHOek;Q}Xeu1WZq7fapif1DkAv{l81$&$?0~WS`ox|Ytb3OyE3S>t! zz)CmVA9Bz=bKBkRO#feB!yVyBun?L@0p25?QHt;~%LhXs0GQr%(G%x)7I> z`<&Q+T%v%I(o!N9pYa#F;SzCQ((oc#Miz6S>t27;XJ&?hwsd`=vhE|YsN~@55?%Sw-NL)cKG?=63I$BuIBvg6cXt>b^LHR9 z-QBysHrRqC*(1apwLO@Nn~p*F7sIuC!rTRzZZt{m?6@b6CY;_9^Y^L06gGlY+xsK1 zLnHdYdK~q75%BrW_pkVY0>I{yX&aU-&)Fs+fl%xi>>__07WQR5U;zjt_ztKmpo2%} zzxGF)hmjhdf&@H_Slskf%9C1HK9SH?tOs!{gn8{KIiFw~T1^*PYYJmv`Y#$d1a3a0I#> z+n@MYAAd0Tm2cNZd<5U29rp`1E}LW%^hwpsvp{5HhsZ7T!hgGy8Yy80kOL4Fz$kJ3 z)g7W6+J}|KZE3$=pU`_OFVnQLml+rKhu`}j{6~i$ddK6~x52|d_ya$27`0~v)l#io z^o{x4PdgC?7KX3{)gBuApyiUXC`nnlqOay!3jNj z*3t=K?p@SvJkpRa9Qlx73bFKVoFI$VNKs;Zn74~bK#8-<9&}^`*In9J0pG3ZvFXGr zX87Aqdxgu^!HbQUw1%tBIi!2WvSB8o`B8( zbLkRQ-D|N$DMeT38XTr5{o6;)RD#;PBlgL`!x#GPNYt0`Rj-h>r|?hvh!DMzQu=;z z+u;TIQc}%+!+%pp?eus3ty>2PbV{P?sB;Q(7=nxl-J%Mw-_X2hHPt_ez)fW%1sZl> zDnp}b`HTS_KQDIC>go6=5mTGamyj7(Mkii;dvSJsL6TDG??^^D`cQusNa5Ykf!C)~ zrEO^qn@O$<9|Z8<v?ZwRKY!+xVJTi#p zX1sym!IpVX{>{cIsB{0JbIcL<9UDCSqd%0~E@9sEo8dNHdig2eziOxA8%rQ8VQqI0 zaHGZD&h_@T?01V|V1LHtN1c7bf>`{Ff4}4o`p^f5hti3Pz#rnxYM?Xo{vrDBDw{J0 zwgLdX5A@2@!BE9Ek5;`H;4RnP_u>Eh9;QvSENQZz zC$3+WQZP;ovjCHM3H^knDv~pQK;P6+?rJ#CeIl#NiPf-dXr@?(9|J^rMiJDAEK|~; zQ-O~ADZ1`s1@ppNz1uQcp|mk#Ssq_`_UiY0r2XCC=URVSo5SE5fh(eJE*cLWynf0544D2m`i>P^ z6B*QB05mt^5np3+Y(6sxVRQQf{A#QqeGwzX5E=T5)wS8$T5Z;~^Ubyd1D1o|4u0w* z0&g7Q8Gt}(`;RI8<{k?V$Q-vIi)Z!PdfV;xiEWAAVL(>-0Bi8&>USlaAFc=7P`=2( z3GQdu74d9xZN2{`3>%|ZH|PniT7f;M^exzl0|`yD9~SeJ2$?QFc-05IjwGE}y50Yx zz^+HLY!f#oeOx`ON5I3mI(6)_s+CPF_X3xbJtuQ@)GQrLzdZphq@ikm?-wdwu zWX}}eIG^B&xV=hmKy0HzDarjkd;MdQ(PFAwa0dQsL#!(uX_lHk8-n~fBsYZt<{)WGNz zYsjKtBO*|U*Ax{bKUzuhS{ETluXTt5jU@aF6%C6fDZoT@#@CIpUh8mCE3QdVXcvn{ zU}ozqdAJn@M0r?J0c&8kk*6%TduPWTy76cMSJfRmtT3q4HN@lMrE3UJUV7?^20Yt2 zJ7_6@ZiRrLkXhU!@h&A#KdSMM4dF05u#B05wdBAIO9=RO0|+DCF?KX zh-yq0JPmmU)r^nj2aKcr5&ssJ0j&Y_Q~F_FUE$l{p}OHf-{0$mp)oc#o}#R^nOl0h z-JJ1ujNmWMKiB!WXr3y?u~4tOM!E|c#vcP@*68=O>KJM%o5ih6CBA={Gy!)(+>KCt zZ(II21-^-d5Bou*)?K`6_pBTCS^dl zLiW-vZ$QONQfvv^VK`IT6*IzRfznllyPg+$>-=);84-abtGES0yThg|ExFi1wrWgm zaM`J&F*XWV_hiV;n~9p*o39d3gXq9dVgZ41d#7gt{=gDtakNSB0JXt=r7IV}Z)nAW zz59hfvFuQ~#J=-Hv@JF^;~Op*tIL6dE)CHv0H;yRSVuS#Bu5EVJTV^k^MfZZKjRPD z?xu|IZi~pf?<(i%nu`tqGUK!nfD|5wc7(h^k^E-xj(-n$dn!7(;E}Pkw#H|VLXy$I zS|!j@1=Z@WM3Cr!iR4JOnbTc=3zkC~_-t)vBOK6DbmH^F8+O?NK5#Emo)$r|{TC_B z)KswinD3tAFiX%g`a?F`i8P1RO4YQ`2}m^Rg5dQvEX;c~wmET2b(4%<3A z^Em%$!tfWQjJ73NEw|v7-*=wzL6OO6ZE2k&vlSPR8AUZw{9zOXk&0@}&_cXR8B}Xg;^39tr?6wC8i;ei%%T-EXBsHHji+f(AenhZk0ukumlZ~1SKhZ>@zGl$*0R>6y1HDith^+Fru08( z7Cc#3jM!Zs5kVqDlB;Vexu+#q?_=uxdO~WbAc{yfg*H7 z3=WdJcEJy#Dz-YHO}jevNOOa^08%Mq(U^%{w?POAm6$`>&29Ty)&@kClKQfS2!?Nw zA!9Lopk}J;sCGK^kJ30SBUDH#;>1~;B4g*tWJ|!Xp8NY+DBARq3h*;Q{mvadnQuXS zbHGin=j{%e0cdYsE#hq-^UBVT>fpiM-O zsMQVCLcPj}WZ$l=1aCfVQ?5!xxMdEH&A1_pmbQVy)LyW)LfHtzZ|@R%?lW{fV&)*&XW?r(nO#Zo{)Gf6rHR{f`quc=g#XYvgb>{)hWh=D z|Dpm9a0;6n7DueT6e^GVmj(}C3Jfp4!p54m7@MVSIUN2sGz)6F+uqO0iP#GH-O$wh zk+miO;6iNhpwcne|6;CK((Jv4Ff-?NUE?f?!;$}T=mUp&H_M2Atw5Ld^Y%{ ztJ(YMG~H}&YTqDdC0muE*$d1?vxVS0aSZLZZfS6qBrQ0bT%~Ln)i%n<_qgy4pxYuj zMsTp$$C56tHH*elPm}#bZjK(#vWcA5x=zs>OB&nAMj#0xKgLj?p@XO7qx$)jMAcqA zj98?>b5T9M(OkwbrTR}CL+mGgyG~}X*OrhmWR!{dFTNEG2A2w!sp7%l@?R1St5jUh ziS9?S8|hTNq(P~HfPN$)*cX2Sm#CN>*IDK&heBV4+n)jX=nXp6I+Wgldf7rspI9_a zm?{ioLSOuqzXSeu8K=-Inp)STV~AeRQj(p$CXIc}O4SZkhn#h>%oZmpr!g`jjb`d4 zSA{>oBB4Hj2d}<S$xQAGQsQDz{@}6SDPmf@y#V zu-rFeiE<);J?ov*_ZV#mk=0)LoS9yMs zPI0@eRa26AV)}+wvvv=d*a%Fp)=WWe&ORGT)X8f2whLa*my`(CNfK%slT`xi8o;5+WO@@5QBzvz3Vtr7TI!{W zf{cUDvCI7garMzXJ_XaSg{@v)WYKzsv!vMU2&(psKyO#{>zIl6!Hb4zM0Uv*l1y!m^o}hur0#c3m!{v2ADQRF7_Ac@ zC->^>Xn^+fpXqzESn1%Kd=`W=`qp6Hb&yew>vtC3?j=3|<8C&y-ye=^4;o&p$}uWb zeA@p*Yqel@oxXBRuH1h*<|+{uhMR}VSRV>F_DJHeqVEnq(qlKx1~MKK$zNp`xA`Jg z?=vW&d7MQTehy+MfwRaD5CLGp{qs~=^*1poeRyjH-=U+9gX-j?{t;>v;Y`*NklL}% zd1-;?OW#C|oo3Ny_31k6apZi23`S5{RQ>pbk8QpHurp^Lj&F{nE{av-mho5~B|n@? zrA=WE?o|?ufc{Pdh5pGYSl$|rS_79i5#gVU46%BLlX{Cf3tHEy<074 z+DRf0!QhE39ScQ44NO)b+jf0x0u$>3K(Uo}j#vR=#l7I$0FJNQP0_U(f(NY=GWiF3 zkAbT$io}r|UvVbiNw|HyTSyrsR@RgSnrvM0UKA7dsW8&nv}Q>w|16m@PLM1+5ed54A zfR&BqncAAqpmKDS?7`k1{t^4@yH90NJ7U0vZQ^4+6$XcF#{d*WQAkwqQS2{VXh~*S zung7fPe9D=&h>Yh%biZ?kPlanvtTWjiO*t)X7xu|p*sAT{!7w6S>bXpvMYkkYX+xE z*Q+}*;m3b@{#6Xn?g|LRH9#CA7gR_U{qN01P4m79j+Z6$YkMPQmZ(AwV1%)YW-H2B z9sQ{mp-qo<_goNZ6=`Tl=Io2JR+-aQY&CZRxCWTd*X3puqA-XR|7K-2and(hC;b4s z)5JXmX91`a8coL=vkLovD~0N3C^*BJ31=Tt^b8fo!<1)^3H8?C9T!z2XWlNIIeb{P zm7xB&%Y@KK2~tQptDV!$&RMJ-EH~}_5PkC%xUN%71diBr)pgr2(NK3Afn+RrGWa2f zc5tvJd@Jy6J5o|m@!;AfE(0qLBSzqx1%h&VGW$u~s@R`8cg0H9)Jwe=cD3GMnHY=d zCKwR6ImgjRT9Or1Zl6gvv;EZD=QQLCO^ZuMD^xQqQK%7Kofe3^ z7#k=-WNVSm?Pvb!G^Nit1*g0v&j(-nC@A@b10j0GtVY=)v8~NJnIi=^4Fizl9nW0e@TVB7LyF=S zpWK(G4S7BZTYYJzY;0vHz0^kiVZWs93^c!wI{(x3g8m@E3Mc41U*=$=S|MW zQhSt>28+FJlPGcAX#~Z7s-_fo``Ww}9Jl*DZd(@~?np9-w!(4ThSUTPHIEW_+)4pD zPJ@M8b`>)-&jn+~pd{wS{Jp-6>NcI}a%OoXSG7mmp(zQjfAeNKN!wHwYM0@T(gT;t z;)6tX`B;Yi@KBwp2?vX9n!Ei6TI|_MJ?K#lY){`-y<(*mM&hYmAwI^j>>66FTss^{ zd|H1vmub57Pdf)YY01qsF=|p@g1Zn7XgYMCw1QlJ60s!0y#7%*RuW$gSu%ak>i#%2 znn9&rjV*jwudRZ%(WjxCv(CM`QaMM3N;F@NL_$FM=reebXS;+oj#Bz|c?%EN7<>HL zW?xvaFFr%18fs=FdSKYDRtgZ}VAy3SFxZ9YL=CS+bpD#6#Uji}OD2|1(MJuR?dw)o5G-hUZ&F_FUNzODQ z2#KQ$;zZX!vIqoBJ2Y{9T5lsd*W|)eFr8L`xI*oIbiAE!MPnzh`&2@+XH@_A@Ovoe z+%Akz$zI49ua0XKPdO;mXtXn$P%(4G>XTKmGB$O?mz0~u8pr#OW|lVvajL8@uC7#; z6vjYut&6kSX)#Izcyv-oZ-z(cD3Ixlr!z+w0{0|-bRqj4m_~{G1R+lqtA$d&2uL(C z@N^b6%L-M6QmNf;cpwX9XR+YY^nw3P z%(iB<@E}Z7H#csE79@wX;&MUpfU)G$Sxen${BfyNQ`*4h7lVd@x4bnqMd@A*6B&-UN92fxr7ArvLE z?~7YsiWI5&!zp(|$@N}bX^q+odIluG7>wEIoNe86Nu(`gRU1o}8|VkwSbe*JAebkr zzoEB9F^7G5A-OFOXa5<18ZpVN5Up~8{G!(K2imRT+;hAV*o@4k@$u_)SC06(HfeJQ>meipp2vuz5|%%&y^9+$kS_iki%h=GA%2wP@;t)}H@-7|k>gfa;f z{g(Xg0zL;yT#PzbuVipd;rVcJxjq|w>T?Isbr_-jee#6*mR;oGI#34RBt7r@{`8NZ z(OWGnSMp7!ptUbUwXTrd<4528#rfy{V%B*k85ZTbPn*i6L__&bQX~!Vq-2Anv_M%# zEej{ms&k1psFJY|qyFE6clWnDAA#zAXT1(jq9yZ4f3fI!Q6SM7YyGx^1ua>!lpd zkIUiw&~<6)zQVWnECBBBE1*vGWE(uRR5SYB=Kxv^cv$BQCQW}f)(FMxfXcXGq~MO= zD8mZ!@BJ5!Nvg)5Xc>Eu70|sog!)5?)ZZ;$5bQ|U12Z+LIvuSvf3-B|q z`(Cq0=J5>;Wa*{3<^j^>3-kCL;!nTU8j_EukwnpQ`Uh@tPRr@_`O~~-I=NaP`MIDYQnS+cITK(Yu9hv6=lI9RI5{=x zZ{VyPj#y8fyCm47qMVYkbc`+L#OpMe%4RAIWHUm2 zVrn4Fvn>z7BaUf5`_Wt;XXVbOks)XY~=K+hmFKAAKA@jnk9zWTHdsa+n_ zqGKmWBF+4eQ$y|H*MoO`l!#YjaK_u7BCkuNk#p>Hf%PMZWrLv`^o!X zL@N!$=PujC?X%|00=W*SF&*m8V@_pyh zr2rb8injWw74#`h+D2hG%Ff7#Yr-Md69T7X8Ya8Uh=Ts&D20kmH z&~QYN2-6(?*~UouKW*tta6{$|0L&DY;mcGnH}&#ilSoUPYp@0b8vMc+@xe1s`aDT< z@O|wGQvE1|{D-eUc;!bg`?sFTG4Rd4(B6byfhYvU#dY!iF4EP)-&CXcL($)50@eN- zZuqWvPGAH1V(`dEWKro&1syl-lo@+37+`-g5OjXJTrTU4l#`N@6Uvjg#&fhQpeb2{jSvu}%|MbN52^|(f5Lj$7 ze|K8;f4=*RplGCbD?|2t?IajtO0RT+C$EJG$vW;rfUD1+U{{d*9V> z3A|Z3k^bT)$N=zQniBkuO2%QPTM|_xY1C$YM_CJK`+V|aH>O3IQd*}#OEQErB^5^R zzfLobgdrvYL-}ClI0BaWh-5~Ez75KqV9r(gDYnAN>$?HPR-B!So> zmn1GeyqnUSFO+v@C;SSFCSZJ@awhZ*2-tv59kMfesE;0zw$>wlb_fr0a_q$V-|TiEYG|RkLwk~M3uhEJ%@^97+w=GJ+|t=l ztJu1?2bj$?wr>^X2G*gn(~TI2o&R?VpmOBDKEI`Qq&>JxK^&MeiDB*Xlcqn{Pq}cr zdCd+iVN8AI2mRccX|-A~ z%r`NLHICU@f)`Z@F8NLyW?3JDGuj1*rL!pS**unIBvrUseiGph3v6?|OS~hC_eDqU zq=ewL2G5_|()kM@WRwCCO)xSRLPwe`rUrEO+Fk;a)K$_kJFdEw&dCY}m2Y0XGV4_8dB@lJwJk!i{aYD2E6>rA8Pvy8=z$H z<(Hq-8#(;Bu$sDoFZuUV&0)EHkXl9#$Ms4n7^yXa)mty+r-A-L+W3J~B%HX67mI%0M?qpx#zITWnSa>|d*| z)#eso)Vs~2DJnUsMe~=;+|4*qT#c-7CZ@8NO@}4i1&!6Z42?#0x~>+fx1LBw;TsCfSQUp>H>>Q%=JE?J zH0HLMf^bo_1`E(!idq63zd!$CO4#{%2lug<^ngFHQTO)E7&6U9FUGMMrY5!a1x ze*{g=*HS9TGQ~=5F5KS#=8%NH8T`%fgAx2K+y}MDHIM-{HsjQjkDL=%ukQc}tP}^Y z9q;{40;*6TTN>3zifQ5_WpU{1=KE*uFGo0Gt>oPRSXwEri(>$N&46ov>v#U~m;CTb zvZXB1PiENPickLY!Kd13PA&j{F<7k=(B@Dc#+76gszGyGBA^t3Xi%*qOdqQk^p~$I ze~iD^H>i=s&cahE2tZAiFFUfOOB+;if{zo-F>QLzDvrgL6FdZQ+^sP*WUv} zi8GQC=jcgm6s1V@u#pwt^W7YqzN)h{gbxs%?4>#dq&s@6ZuWy(WgAuKv%}CxMX>dg z?i`E`mj0Cn$Jm7vORu7$93+TQ>4#O0zQH_tJFaER<43*WUAl}&h)F386W+d`Ez`KZ za@-925=rw6;Kr3iUzGjJTHr1ue&F98Jp9&WKeTYYfK`sP-8T6)=A_gPgat;wg>oC& z3Shagj@p2I195qI83N0vQXFj_Afpy?;Nm;KfuJ}26@WUtluP$mlDOF;JjMkH2a!GD?58!EuheH3G)pdOww*@QLTyY> zq-(cQZAgC}cM64Nc(sRzT|H?CdBm{LX~X!9E3#t7n=i@h55DVyM{(<0&@{d2)4@)p zb7!*q*We6?Ff4Qdn2ui?#zS(lgb&gdNC={&&)}^1NJGN`7;RFgX1{S|`%FtIDW7m*;Mm7a)a~4p$|uI* zx$gL~sXxkTWGVf8@Q!*p;)`JPx5KBX*Q&3&#q%%PO2aqw#$<}J8Lm8?Y#^si&Z@+M zYJ{LAruZ+g4o9tM?J{^sBQdQm5t?B;5iJrfNB8W6Xvc_xA&%(F+umu0F9kFJgE?`$ z;lUpdSsRkBg}g(S1Larg-lvOIQnU4}MHX7!`w}%{#>D#VaeKLDK=HnP{)I~ccjnK{ zDwI-O(yo$Nr7&9u&ZX49Y)N_u|5B|3VfcjKjNFJbDB!@Gy^E;w>#{s&d<%O3^3u>_tyMWzR#~T2$gFK=3$)u12+M)txz#)gMfEa~xeDyLo0j8Y3j(|;sy3jj17^~r0~J~^ue zPqyMgn<$J~ZvU5S+K0dIS^jJPjmF++EVu-+T-=zNvK11BnMB&Lf5Hy9yz$`{xa>H! zGpjGU&-8qFDu4$r$*p^>eB-3Zg-++b$k!K90%sX_a6n-uoJVwPuDlN^3(&b7@Q{N5d4 z@o%_#{H>Q}=ZXd8RIxJ7Vehtwx}v5|H9%MfRyNs2+^2&J7_f5t}>adJYls4(i zkX}BUOHV!ZjE@eei@BlxK2)a_RD=anhhjwCF|BNn4)N)V#tJf4UAis?*cL+H#DYwy zg7iO@p}!mq?@RvY%MZR6t%%A6{B~KoU&}!o1_~{#$C~*dooYo(c0{u{-3kb-0rpOk z?_<5{NM;be47(V>oi75bY^Iw(-y+>)4U-0C84f`>=nUA+#?7g3(2kS5txs&36XU;Q z?uAO%M~j(6D9=yA*!9E-NJc%`6~mh>2L*e5yzZlTYvsaw^*2$YM~rGKMWim8v@7hm z)Wm)YlZlIyjnl%_ypMnu2I>G^m~8Dw=l|d**G^jE%^>q=)sUgWtMkHgdzCD_wLopSNvPFkV!Twv{#u^Y*-9ZiYQ|3-Z=j_ zVmQ~YrrODc)eV;vqvF(2M1R6eqMI%6e_S zw&sgsshdzE>wp0KyC7FAe~ak*O9l@w{gJ+QyOpw3i-)B^1+OC=V302a*8+(6_&=W~ z%s^pGlZL|Ri3^UO5$)ddpr_4z0IrY{JO z^8;)y2mk&;ddAKe_0QT9>+Eg<+zkCfjH^`0>N8-4heo{K{pkGD^G7j~k7!l%YmJ1A z7D>gC^{u^FtH#HFcK*}z4}6SB8rNNpD`AJg#fesvhBjU0@JG(6;Qt=`5A7WM$LlkGLE^_25zxh<0Rbh;cU)go_Jhs3wywUOU{R zb~;)6)OpKhSY26q$v32rg^ME6ZC)KlBYY6>TKcLHA ze%3eCgnX>&oUE->d>t?ZC~4=Q`>$#LN)t%`sY(K^6L}QNA|8Z#K)I*v#E`y?GB}Bs zF$E}JwH_P%;Ey5&Yazpw|5)v&+1<35faJv3lt|NLf4#ZQ0YW&n4Yh>Oy-*%;b&O42 zcT@Vg*L{0f>^|gM#O6f_UI^~)W9u|Cfi5?Jab@@l)5d2zYA~&*8dsY(wAZ5buzs8a zxt{E%jbKbW>ZJVxNax~LuP*x*81%avtJ*jzWNmSKU@{X92psNN?z^#J&}-ig3T=(~ zBK4nGHl7E6;cTu^gEnBqC0VAAxO&IVaFwcGl^1dNdvJ)Gn?&M#os83A%GJNc7HN0L zT5D}?QiKq9nz7ti9kjkp`vL9Mg$>F!_)h-#d-~`TPo#7K$0eI!=p#DDXM@+!XU4*j zLWEaujF0)i&bWL=0tZyqv{Uoqb@&JoZ{H>4h+7MTg*Z!5((SGOOUbwygc<)jj7p_@U2GL=?fOdN4pJY`|yfyGe1Xtj*IBF&3 zY1r?`xWJ}mX=d$3y|Zt38tTJ$z)F}CtMSD2Ro{1m%XiFsNJcT0r9_81hx!b(Uz<=t zClNg+JG$qq@c4uC07m0pHtz@BGC6fb!gsDwDp%6Hfd!BpibNvt1n4}qQq?^o;O3b4 z1I$%B+D?1DW*vH@W^V6wVr%d)?!*~Vi$eefBW%U5C`GafGz^G=hbG&g(S4L&Peyk@TT& zG8NfRJ*79&kSjTvR3g4N_)zm`9~o>TX-xyZwtdI3T~8?5C1>tiOl5mewBk=66S(pj z8&uJHtYV?`lQja;G1yEj$+pSGn#(fC$c66Y&jr+`C9rIFT;iZ1-uDA+iRh!yN?ZsB z*62}R>_HwIr4|?kQm=79*5=K`4sRmulR@fIt%-uTr_ZQQ6{KhwL{A>QJtzlulxT1Ba zkUOP}m&lRU>Uk{nR&;bAfc_4t#&^E?_k$?u-!&4$q=Z17gc-2hiUNPY*p|dstPJ0c z6Et+1d_B9FZmTrTP>Q55)qLcir1)F|M_(B49QY33uw@Qk&LIOB;!^CT>-oe4U*gSZ zsLVU?;cWqK>Xjx(c8_YstaLQR`-q^MUsXI4eW>0%J4*Ktx}IjeXU?fX0Lg27zbb$J z!u5vvAT$#p8RfIw>;}0brr1IAP+Fr#=DWOC0KUpbRgbb54tG-==E?Ev+Etv@ktVHV zWUUU#)fKXwoR3|jRuVXPFl?qZV$7_{dd3V&PEh4!2)Ruq9{8hV@zl(xHt(;+Oi2{a zTW4E#ZsOSepdEcF0e1LCW0p0d0{SwL!szYBB)YAL*jVucR-!NL;>w&q0kE@@zNU?# zeGf2x!3!&rjQVT@5&!k8sr6q~HDi?aOBQx)3fh&{kZnIE1x2xsgGQCT6ttMxXe_Qk z{^lBmo0lMMzJd?{gCghy7@=Xe8R%10s0J1X^XeZh)Z}q}3?AMmLDNomvc(}YUiIqq zjcLun0v%YhNfxbeA2L2{!}O5Z5+~P6Y(u~p_dKSE!Je>P2^9FIGzB9Eb3LsVu8VVX zl3nayM95_Z`z3)6xp|L_-caYdYD8W+iE|5`6ZbNq~~T~u7WfHiAj=g>3-hr%R@mZ z{Q0p@X2zvy63DS;fhLcB#(^$PkLh_AecnPFa8bBigIS7Omr2mc4E=?GO0vxnOl}pi zZM~(}3N$N0X()PKJPU}A?86A4ikw3;(O_cI+odlI7Vfq4q5Ay@2GrQ(haEc-wJoL; zQ1DdVVKIkt$7|MT7t8B~{vj1YP$36=CZ?~u@ZyI6Fyo3MC9-z@R?es-t8X{U?Ux99z-iyI%I8{$MhXR9PE`^a5ezvsF^%jOs{Q{jQ-sUF{oB7r%eEj?+Q@-N7U%W(YZ01NM@*~ zqFh2pWtp@D8?3*mTg6^G9DK&wPmP;)=yYw-DKq{2t#c88Tfj)BqLx^)Z!qSJx}~FS zb89jVe|dM37xV%qby`QuddK0d98qZHS^$SWSug-+c9LCY^=XNo%PR8eVQ3L&E%gz$ zn^{r54-?mK__H7X=Rq7?PB{Gz%E6VZOeWPp#31o5EL^1zxG=Uc3egj(3nam{<0v|( z=JBnR`>A5sW=KaxeTGpPFhD0W1}1}0*z|ox2m9zN)w=H?YMUZLC^vLMssK4tl&7{$ zSkt!%r+X`WS72@&0_g}kn==>0-Ir#GY`U+ZY4L&(O>quxXq*%Ogc%MycD+UPu2_IM;g-`Fcvx+j5Id-mDy`pai2`1fHG-%*;DkT%u%8P!XiK{f!I5-dro5!x}l z?TDa3?6oql@cU@rZ!M%1&c>w%6;v*es;|uZ7T8^aj(RQn5Ccb#B^W$>FL>JvJD>o;p_fqi(&c_Qh8Zv z==vLO9rCNX0j!m&>>J@)3c3RsU-tiI!HXmd;lwK9vL(_$DaT=Z>ZvES<6L@(K`*kJ zlh?<6xIlic?-g8DiCe(9&6zZ)w)c=)3${tBxFEGjE>;^KC^ZLKn^ z{e_vZo7)wdZA#z$YaTTiB6ol#dkd7X5C! zPeVL0uXgVCLbctltgRrKIx?&&Pqvh(FM@eejxud&h>VS6n!IdXN0B@0gD7Sjr zEv^qU^Yj1$G*&w4a!CuS@Xglpi$D(;JCJYl^V00(v0C>v5$GaXLtjSAK=D5>*3X4& zAfR%EFVvUC0wVW)G(-m`{Ee0Rg1@BTaSGM*Aj*ynOGE6#KYh|mVJAFrs|X$8E*(64 z`elC&f%=4tonKqsi2mAyh@Y0+ta6;Q(=}X7e+-^eiUODclh%2`(DdzCg966rAB3z| zwoAccTzmcfFg|!JRv|6SLj1VQ7H52>mAShAapGDSM3{IS`Hak%!R?Og5U^M(203ow z;%tuqs7hTK2R#(aNNX(ARd=NJlTeFA;8X9f9iV*JQqsb>rzD}!R@qdy+DI%4IR+MN zqm@k1EI$2hoytS{NBU3L!MKBNscppViMKTt<_?!#YEtLq@;UKSm=KLHGo89)NTyF( zvX8BMD((_2%0+-&sazBnNH-#VWFcY9(&3sx>%c)O0csGduvK-DsjaQQq+>yCIO@** zEzEtANKot)4et}TGPTT=@){~OYiJ9#v8ak)5$DcsWpR!Q_mWSxDbYGt14=v6w^n$H72yMABiU+yD*T`AFvjN~tDrCRmcF;iAp3;+++=)U4zfTBJqLVK&TdXt4Cwh0KQ8Cj2- zFgGp33YVO|P{m0u4FA=9w?Tk~CzDQOub?9*reIf)kEE9d~DS{S{fv* z`(_^;Vn7Hib(8CGL%TIc)MJ0A zuToC}<$x_RLtmL5657o8bNjEW^e4WpH*#6_(*;d$?$f9@<%aqr`kSS~Bhhvr zK-AgUA{!qAhh6Bi!F#kC_~QH%e>A;a`GoIdHNwx!m9?9`rmVpULDt}F;T~c=e7iu|-p!Ua{KzBuRfqoW%-#N6M1O`%ETLLA9iFfxCGZGg3S?@H~ajTI3Sj_JS+6x*-tMF|Gt;SK*_t>l6i^i^N^QXo7E2EHKi^6o^g;g~G#yMpYhrjk7Csj@mk8hoawA-Cmx=rdI^=cEYy z5s%?t5-sfG_u+H;rJn*z1_j0eErp}t>vq_Y-q_lU0pVYBDfx$pDlAFS6_^Y_D2rlo zA^3+HP({0p*jBZ-Q_z7*Ib$*C#%X5_fH`SJs^r=+yW*H#y*55E>1K4P)BCHz#sB#W zua3!AB41&>GK2Y{O%~H<&tNjt9wxj*enuj?Z5>!4&rFJOg&>9G16!3!8I6@QMfy zgc&IDXk6mULPRv(d3Y9w&4hino>v@ZfB0r+rV||EAPujqJU`5Vi2HVO@MPPkYvxH|4nIX=~piLh_$zJLAC{rPC(#5RvpCTtwO5}g8E zJP!fQTQ<`lc~NuV^E%nR@1HbkD^?){1!zhVYK!O7fgsX*q-)Q>Kc;FlLRkfFqUQlC z#tQQx8xTUj+cflKK-Nm7C>tQD1#M#k_i;Xb>5>$zW#bD-BI%>6H^#2{hYxyTzGL_O zHX3El3kOW1+kBTOCPWErrJDqF>~RW~l#UX+BYo?t88bXSouC7FT)286b_lTDZGNJV zDV>;#^=V9IZ;jhA$=2>cUj`#*8cIn1)K%kDffceV~A zoL_EG7Y>GI`Keg45)ANrjB|}X^T$2&%FF(X$M2Gkc*{>P>tPF$j8)di()^0>;z~{i zXc6IW1xMVa%YWuiC9*(ylMk7yoL~@W?so%L3(Z(Vs|{~Cw{bcd)T0q|kOG@Ueyt%> zn0bvOval3T`wB36E0zcuvcL?<5+6oqdp~yN!IRHvtk=fI$F#{P{lgKk*qvT#kJmV} z*bZifFBxKj(`A&ge$p-o20r6|Kli^g5&~jwrp%Hqpz{M;=HnxV6j@aKhE5_?HFcTG zirl%)f^-7^5#c(2nwk3kXK19u1b~r6frμ9=G=-d|GAWCUHQkvQ!87MoP(oO(Mt zp&8xF*1q35DAysLK4T9Nmx+{5s&+umhCwroU?mU4K-|iA7TnXM7Mfa&(n`Y@m9sab zTSf)?RH-!ku6DML{J09CvgkO<+cu6c!O@1<0AXybYmTttULUpC*{H82bPWQ-pJ=@K zsT;VpWkea6{tdIfSbS1Ww0N|1lMk@R?l(go9{=UwOESel=R0i36C4Mnwm4*;#Xjgm z2ch#ryHBbvWx2k2{txOZ&M|(my5#47a_X8^75ngzz-WTy%y(0``lwGjOn8$Yz8+;g z6*KvL-OrA5&XUj!F(v;UQQ}CCOin~U>W%f_X2tjI#mLcB&3>3aTrsV~P--~r)v900 zq@%#D#1M#0#F1I0+^0*3*^Ps{0b=BN*i#Ypq+ddko%Ixt#QG>^A@-yz7%Ch}1j8qG z=U6q|zgNWi;Bt!4;~_`Axrz0S8!ki@e4G^jMIv_4)TC^*LztQdNKM0_3DkB@cF%E9 zg&H(nW58wE#{HvVqR-G*et0agT=30=6nrccy>&+lGf@0KR|_j7VJk7iP&SueefdxQ zA#+?p(KG0U=UvvVR9(8e} zK8N|Pi~H1#ajAE*EyGNJOHl>d3p#WAhi94zciLc@rp;n0hg}2zYDht$<=_ci3h~9D znvRoBbn@>8ANUd-m=Wa*%Z=8S3RUq`+V0X!C2Ga2f>gCpm6V^rJn@m1uXMVwL!zaU z-&DHkO2xtp2pND6Wo&FJ=wr;Tr9Lr{_u%boE3osEC|T7C&hQ9NF!9G&)JnbP>-*fZ z&uZOxLx=O_k0smuoAXb6m8=vrLQaE+cRCSiUg#X8Syyzlh?jjf_)H>>6flDm;AlQY zGBI2FJ?1%yXe*%74qOmLV)7(UoA?oi9>#?$lplUyf(jI=98jMIbhZ4WSz2rW-@*ls z0Ly6A4|_sbRMW29T=d~W7BsB~Rl6Oo;)pq*jtB>6957@6#mT|sO|~QzYgA_%J_s&^ zRhT)Y*;rF9I{l0#Od+~YiSyb#i?e5zbgls#0YqcFV#r4t#^_n|)Yi57a4eSs3vFi4 z+8+8lIa7d0H6oxxAmOx8hWdlo59c|q+gT1_SB4V20hz61(x6oU^tJE+)ZNL>HY?JYy|y!Q-juM z#d*@6^}xO+6W|WcOzZ49$9(+tDDTpLKTnRY4??lG7#)erI`j(lGa^6fLPt(cbZnOBtR@fiy=KWUUsd)U$Z~kKN#o%pULc6yDi1_Tc zv^GY;n}Dr_S!PGl!uh2`7U2&`+X0@NR69XuK2yN{BH$p>|EodN+0530_Ud^!qZ-9N z(DAftkCu9_Tb4kp)4ZF)pQaIt%FyAh;HgkM6xN^t%19lk{L#L%&0mX9p1Bo1_K%`M>9BmWfiL6{ff!HQ!leary1f*bbKqE}Gg!)9o z`NIbf&p>GDr$b<;IuWKnsRkzaknAQ>tf|6l26Yqra{0<-t$XXpkhB6^SC^AnzP>tJ zdC5g}E;0G9;Qwk-@CEch%O=o{J7p^_7;B66(Cp`V1Sm}3Z-VHzB#@;BA*95&X>xp0 zFWv8_2+wUcoXv10vO4TYt_36D|4Y4k1y_hq>o1^)uw189HV4USVP!M{01T^T@bKjy z>ASE|+IiD=>wL6CZTD2+W{(oVD!K(eua zwH_1Gl~vSL@Bh%)rbo3UQEXb?at5kz-0PG_7?=0ytgURO5j7^*sW+`8Aai z@AALD<=>C}bmNDS&^4D}D}uX=)eYamvFUN8N| zR}+2FdOzYp1|B2V#Ax(ou8X@ZD~$E=6U@0IVOt zQn?sM$k-}<#kg+s=^>y_aftuY@_Px(H~@um*Gg90b&V6?c3rGxh&dQb%nlfI*r|k%4v59VV=@4z1lIe?^oQ0oR6+ zIO!Zo{g|78CNx;O=~~Gs>c}*BxIR?iUeVi)WCs&dK9w;{4L+0TF0HQ2>!pP{+cy!S zuOU~j)Fqo}OoRkx^_@8~AK_*q(ok=lYIq!&xB2&{K6?s%m}GTwudl)? zRU!gG1o+#2vJXY1Ep~&?k4qb#kcUw0# z^#S*|e3IX^#;Cig?^kmZUR$fn%CxsLfNDmz7lx+xh+dt%nqmR%9*jE2=^Km0Qb?I0 z-#);}8{;k;cY)}#elAbw5!1C96KTr%koaH#jGw!rW#mFgmhH_>iUb<)ckTWQkL!AXf%KpaZ-1KG%Zq^Mxy~VacCHgCJ4%OWjuRQ zmAOdv)s81ul&rHowq^Mw%d)G$_V?Pq=WVP5oSE6XCR3?YDso9Ci0=RYJ*;=V>+q~+ z^*24j#0nBC!7#KpuZ*j4ZH*N|1wF#KiKoxdMK6AmNfn>QJv``Q!`PhaRnO}#AGl^F zZ;@-s<$DMGnl|{h|O%)!r=dqK6^ip(cK|l6(78D?Q;Au zdF`f2Yst?Dk(o{FwP7S)*4aAO6Y8d7^Se0V;QOtehaO<90=2d`2|oIX{eVy|FU0U> z34p}30K^2a9%`Q=+j0D54isWML& z{H9lZs^PFFQOFtLGsc_hB*E|`b#l+h|EY5lIOt*^8{t=o@xlA>>J{08~DCc&~VpY+TTo8!xo1K&n-3+Fh1wmo+4!H=$H0WiMG3i zQEl8?Y1tQRkI}1y>q2P2=!G%Yp;FHSKG^8IblvOpCL-vMX;f?3?a+s=-9&_}RkU}7 zur@;UqC^=r3GG>Q;B((M~i#pZCGDwSWV*ygco+Wt_d}3IS@{8g3O;gm4}7yy@Qa_eP(N ze)Y=u|6&C5%L_BLM$?|MfXLfuC{($ABYhIyRJQ8?_FxT_*-sn6_NeWKA7R~<-N*5bHJDKW$S0Dt(hv$Z;oH-_N3PWgqe?)7JADw(^Q$-`RlhN}?pon6|*$E!V z-)`@u{wlvKmLE+ykb*DyvkwcI`RwH9&p#f0aPn2i)orpe8P>kjXNm0fGosG5AE++L z^JM1#T(k&ge0?gY1i->F=V)wHT};)%E)A*oF4)@h0dk$C6;o z-m$kl9MCol7FI3#S;$MU8iRtI)KH9Sy1|cG!IgMy}d^0vR@4F81qV5 zG89=haax5`NJ`)WYODrIcVi#w+okog5R8c zVIz?ug32-;4g6Tis~`#Br;IMyFI>1jVPYDdH5S!ptS%jM>?Nw7!}E!kusP#Fg&aYP z-R|MD#D^9MmQZ(^?bzuam?fJC{D#C~?GD&Y5t$B&RR0);acTt1rE<+Tmto*h2(53c z+^j6xx*?+S)YE~b(gdhYzQJn*eMvoHwe{NeJ(;-A|L)I!V=K|wVygB_=`k|$X>e62 z6=N0FI3_q`$-Wz4N^v*I2`qtscp5^0D@~$`ev}_9|6l6HJefHx!{ft7kM9-!7U%UAb(# zZi!O+Szt42coUQ#hGO?Mz_$GG)39da7L-Oy+@ zT}>|a=&1HL5p6W4{hqjCbi-_NI~)=tB!ce%nXjdI%F69(EON8(CTIKlDkUc|qb0%v z5RJqo$u%Zc;vG;t_I9$(`<_x+?CuTg)OE)8lB&XeVG2@_v1e-kJ5FBM-&F)YIF+>! z_9kei5O$P3#oIAHl7x_=IGT^-5AU)EBA34?RyO}{yKv*iR0stsV$*e*ZYA#}A(lJ0 zQUJ|FuxM2=msmh65sy>|9*c_A&^)=Eue!Xt)VgO&Ce8iG1iQ_ti}~^s#zTk@Y=hVA zCuLBMlXXb3aEad8w+V2R6srHssw>i)2iKACJdtZ3gQ+ez?JMZE4q7~27Mq!{3v`HE z-J%lS>w6IqD=p9V^H>|AD}VvQN+W<@>%UZ*|fqnF1e;HEWTi&fx$q0KvVRV_+U$i92jy-Fo3lZs`7<>=sW4R&@Le~mM3cYwS+4tcq#ZK`uQhf9ve zB(%OCOWlB#DDKpfMPfzerr7qeS?AvS(i)Veqp2TbHkd zgbb)@F)1VF7)Y$?)keIU2wNCC_mWR-A?1}d?1EH`klrdQI;T7 zt4&LyPLN>VX>}DVEYNsa9wC&TB(`O(E!AvWxw+cC815z(j9ZD5w_FygI#Dhplyk-i zF_l0ccM>89A6@aA+}~M$g;EfGV7zR%%Z(VqD7w3`bssBCLBv(tQ4D7L-^FjK@sG>_*%#IYk#W@U!f2^gl5bx$kemldk2P^nBIPW7|hA)I69#1fg)MS17+WZ z+=y9I0lLRWVw;b8-kwUTjpO3XS#!N)^HnL`jp7vwz2V(tGi?*TdF~x=`9}3`U2U_H zFgJ0b!-$!@cGU(@Mf)cgNG*^2G~QbX3wm~=jLj+k_IEw)J70H6i)k+MQWF$&TyyAxW4PqHg-Y8-D$c)7J6>^)=8DV|m89{5k_ zq{H@OI#02@CLs5}|MolHL=x*93xcxSGrAB}h}g%fedC?`CIxHlwK!8E#Ug$x`@~k2 zY$N7Zx|SeLT8m%efdu+Zokl=QCc}84tA33(9Zgy!*1@GhpyPiV{UIJV+uO;dHJ}fn zyhu=k7TI1>F=8gM2~160w!Nd!)?>Gd&SOc&y0J?IYoEkdu^u@He*KT0tuD>jgRft? zWe+B~=ryl1Tph8qrLxR@Rg{!ebw>q4wmsYvUH@`}6*74mM>nt?WBtjvs>)9>lo+_c zZK|-!Z^gaa@!-ZHHzPl{9XfZfgsth96j#I!HqTB^Y5!9dl z`G@w~()I||XD>mB(k>q9k~zSriIgKDZ~#AHXA*z?L%KTH8?ufu5t3J;RVjD6Eoi0H zjLGY2CK?_+`XKQ6f$gJEyQnD}90%VM*Wgvb&W1Cwyi#k}q!dx-i|$C@ymieW#wYtMHG ziL>{R!yNh#v~r6UQL=J&Qkm!S5!vk6tt!R0P#Fb`th417Aq~E4Ah0VRcRcuv$Sk!? zR|r+7UgW8mwpNTlSvK~8njH6w$E^l%Pf5MC~wzHHur1H=(*E7u!!?X#Y zpaVN{`Ic+tA?)L}$jiz%kI`7RlkJC5MMmKxz^nx3d2jT^=(Eseb6o%>w5^N3;jU6H zDiw*`X2+A8G@#A8js4tfuhP-h-&`QqsQOOArKHFVkgJYKiDV`(rs_!Nw4n!JxyJmG?CcAow21a4hI*7{md*; z9T}woG&P3Orrvv}9TO9Z5@Pubx`cNoF*$SWQ0wZEZ7U~}WOtvqppChid*~EI#h<<% zi!M7a)`f!Bo`(<#W8S=KpX}`ca!Fp?>Hrj*y#j#Q`Y@#WuuE|Xw|ER`@wmN5)o4tt z;AkakwrGpf84ii*>A%7JU;j^zS^lptj{JY_leR`h5Iv%Imt3_Z$$q;dKt#HHh|nuH z=Mql+azL6@PZq_}ju@OZI$kas-WjM2BL)&NN!GFEE`t-ICF6}x=Cm>nSyFh5Z- zk(|?SoqN#+jS=}M4$$mSaq0M(a$6>1uWpk!PGv&;u=5_v+E z1XiA|15ggwA6FZpU*sAcIa?VWGXzK30grKCvbk zQb;dWW$FlX8^vcULqM(Xz!o~)IAsF>*A=gf#@fL(T64=17+i$*_#|=_GPml<9}-=g z&@U(F8+yIcl_E~b7*QeO%`?|IjakL37TXME%OQSqF}pK?|MYN2WZMpTRwQai{uWBX zCdu_%nX_GxvVj4^GYg|EJBk6zYe38%NLR z#pdoFkvgF@CoISHtKPlC^;;|z{z;1Zl#Fb-7ej#-8|;o1dZa`_H{Oa?waR)*t+9z!`Kia4Qu7t$L5X;z2bd>J2tGVdOovGi49hgLiBy>^b*#DQ98HSV5t#vz6WY7CcoSLKboG&0 zIb=h9)*NBCbVSzS(py5V{S@`!4#GWB64?Zac@(E4(yvc|kJ?$qbQ7i4`}p~S2K>^# zg@L#W#t`X#m(VYifLz=q~TQ8OFs#5=d>;34sfzhtM>v3CfzHrBW#q}Te7Z87SVMf)YJ z%=EtysMmh^q?c|mxBLEn^vu?Uisdi)_cQy#ck(RAQixIoQ(I|bAJaXg-h614M};=+s<0HLE$AL)BPAN|$_cyG~04GEd; zdg<Ln~MAr#eE)5 zNjOmSam09T4~!XT21@PP``?%&&&LJ+=uij;$k)TU8q^Thhs!$)y3dpXi`* zcEP3-M6-7y5*rQ;kfEd+O5Gav9u-!hKwMdaH19pKZzIH&Mr_|Ucug+F9BZc=cr3=- zZ@=wSr5NEfiX`l3hxfH_#?-mx#V;LWSdhu!XNL&k&qu$B=Mw()F@L8_R?z1>VYox~5?A(L9nz#SbW^t4SndEds-KV<^6S*vg^JqvAdsB6T2 zU56yPVK)T)DL5co2z8iQCkWJe=x#H(* zwl-K};*+(t0lxhoMj!ajaQbaxs2-#+3`hIK+^_X~v z?1iO{?1PvuRkp0Piam0GhXg^s;qRylDf4_m6dnRA`ufhXlhg>{1oG7;++KeOokz+q zK&i2@;o?jtg(|mr7hzPrkIL15{zBTrmukvZ1?_J+DXRf~^a` zUp=K9X4-;L%Fnu>Wdd}>lNW}MBUypnvxVG!Vq0~YvcoI?I=tCSFU1~{3?;N{a~nOo zx7@gGr*VEJI6yuf{XWErLP7fmpk5ZtR!hNAAXtPbY?u!@AMv(%>ZUUP539B75r1%m zTfI~=!a3+}BC5%+TwF}NUmzGDIE#mG6JF{jUuTwL16EjOJQYURt0)J*|JIxK zwMrOnPXNw@1Z+~v?#K9;(4@zca6C&|!q&OfT4~y!=~0$c%tHWIn5+phx0;Gk6r9M= zvT{+&nNNEn;~}r=(H32j=A{sl?{s3lNeZB@hFUwMxOOOI#X>uW{Zr?Y^zTR_L)9b>O=EM zs>mT>Nj5myy5b`xrV*auKQmfJ>7n(2(GNlx@H*uCd*i)r9%Y-qDi?Vf=Io#hgqvgokz^D>uC(MY+tJmUrEmGqd?#3IGkB;o? zwyvHqnAbrUD(n^Mr1jzMuA5*OhTBQWP`;$YVnVjncMN z^zltu$Zwv%7!Om(aAU*P{*9JNC4$&Xt!JbUN?P581eNL564s4s-TZPUxpssMnLM?y zcI~-^0XPkEKenH7$AlZ!9 z`QC0kWo_K4D;+Q|Xi5WJtU9j!!2_pg5(Drj_IifLZ4ruq>flX?$0ImJLnK$m>LryM zMFP1bu#lADO{!^9InCI9OIG7r!FZYhB)T$%HvKpg4NgPB0ALS@o|y#TnM~Vy(5vRK zXOozzE!0{y`xxZ1vmptU17bDyEoM7&+r9-=#7<(&L~)N?zme3Iwc`Ft1-zW^Zhdh% zR03@ql<7;=S86SXaldy65r9Y?{0ZZXA)H@p?;C9^G{Ab zhy?}Fcm#u-O=Ef5*~UrTd_4Mne6rN-JsjE)}&?Repf7p|t3u{uL zX^T*()jaY-RR$xB{nNDsaYuNp1P+D=iM>imytJDDBuK3Dd3zfNZO8=?b1no@oQD{= z5+gBKmOdeZXR!w3lSJvcYixNYCeFWTv(Ua|HWw-w@bCzG1+$4_EBn6p?A>Z3F?Ooi zpvD~=z1B^_K25{N+xELNY5!TFtYg^H205&oD z7LT;yf+?N6VRy_KlC5HcySAk9LHD734mxMEK)8RE2q{==`}NdyXS!<}@HOcJGdutLT+)pN=R zH%?91SCM$4ZQFj}SqbmqyG}qe*KJ?Fm7I%hE)RPMZaum=lL29#heE?EVx}H`WD|uv z4*&GQ)B7$u?KM{-I*gkM&qrNP;W~DJd!N_BAHhDyRGa)={M2BJVnyF*V%|nfn&+d~ z6;g=+9NP6XTHt$6ajCNY{xjbp;t_c!flb6^XY#lkq+IYz+(@OPN>}CYy$&VPuA5vp zuUxnFB(C&W*SfC?IC|SZDK?lNV^+&Rzmg(UC>{GaK~EO@1qp4slPL<}!j71Lsv_Sz z3O=dCUlX;D*Q)T;6%2hav_P-BFay|X zCG$!htBnWm@za5oE|$WdkM-Zt7v;|%#&av2On)QTf>_qgcCLf(fggxTOk;fK%eib_(BCtnRsw~XC-xi?FwS!Wv31Wh9PXHEaSLd|Ce9^u z#)uxKrV1PCMoF{bwBS2zt)GY;0@xWJU!Sj4_=Ky;IQJd#%k>jHls;Zt!B+2u-RsQI;5IF}pabWhhAmlZ@*(oF#=5zU}S-^2QaO zGL36$KO508a+KJFxVhOlrccav&WHhy)C#1#wn1XcMheQxeVB0r%GdyQtFT?#Fq_K> zYO>h&{_Iz7UGt8aB(*Z*9fl6jQO(xGXJILH0rwMyW1gE9ABcyAMZSbeCX3C$!~2aAmzm7Yzuk$W@7F-JVKGDTR;I5@yXbS*N)u`JNK<~ z_QY?No5-osM$eAuk`q%w--_SUzj?6lAhQj?s3!MBa8h_{Pkdh_$pVtXt@9qYjyH8d ztFMa;YWleD5C)<#M2Bs^Q8uNz+}_Z|8L}-aEvr7WpX4g#;0^jL90<%~AUGLusj#)M z9qF>x6{YwnH<^QTiPR1`_HrzE z6el0sN`llT%w<1eQCFIYduQA!G&-B$fbk-zv22U5#*zmn^hGizF@w1p5)fPoly0A{ z_3;3Z1)f9{S~#6#r1ay_=Qcw!m;GeHO;?h`#D-);U&M+YIlycrWFTy}Z4CNVStVj{7~qi0`#IV1x6hExe#Wv`_A z55_+GGR7QNb3|Yi6-GOGjj*sDNUn_?q9PCEp#ccOrZWIIo`|wrQwnwV6YgF@o3&sQ zMdYEQj@UzNh?p@DN!k#>X5i}VxwhsGRZX+~dWm$79-CHYtAFGyT34`K}8wGV zmsX7Mk?@hBjP zU;dAN7dy`VY*!vo@3ELoYQo(lMmW>`BRm0T+?^|8MBO7fG9k|;J<9>HovnSRT3bog z;s@Q5=86U22A5+Oe}PUB&#bL7q-I3419&DfN-s>3z}cI)RzEv=*OcVep+n<2eguYW z4UzTyaL5z>=S-#F_g<$**bBb$iZLI>8B>Lcs@`1&SND2c`;N6j&SS*Hf^wR*Bp&>@ zHM<4LV6!dIY)-!YER9SS$pKFs6ttFNro*%gTr_~7T^jZweXw=tZ~O5i6At{*)&U3o zOepNCH)8C=oe-azo~wpztkGA*P5JHKeyqrF4-O8s1-2LW9uDn~&F?u9zrI*$+>3`( z-EJaGg!{<8Fm=tDQ%2A3f8BN%qBZd5bVD4SN2=S_^quc}Gxynr z<$S#oC)jaqx4KB7fqiGI@5+s#a13K?O>t+1lq&oa%^G%`F%=>e?6UY8R$^$@OP5}} zbSXfEXFJ-)vqkpOPKH)N^rXEe@N@{eC|^v|q&c{oV(o;QSx!WxL@hzk*O%C&7* z3BC@~gU6W+c?PjY>Ju92xB4BTc=m13ouTkJ64dENPzY&#Ux>^1?b84aYmGP_i)UIO z-O86`F>1V z{n@$4?6(-czZiXC>I55UEn;_w9gJ-NhzS##FoO0MJwZ%-vAz%sI@2Y;kS=bahBx|; z?bC|LxIrQXFQ3Ax=eU>>a&L^qBpif?SV-U>qH@-M<<9H>bAl*+ciw*av5=h5!Gj z|4*%e$rUDy5@aTcSRQ*q2urbsc}GLj!d4z(V~8WkyTSY`pdC(0I5OyMfqTy~FU%A0 z3-!YlwMrd9<6a!R_MY7N^s`tdJu1NylM4w4EV|z&aOw)1;#g`sN`u!dFV^DbZEnFZ ze~@C>dz^{iC)=U|3nEZ@d$RDY#DC)fY8a9ZyfQVOmLZ4kU9+aHOqx&GN{Mz_Bg<6U zNym~96p^h|M!LX(<_ZgYM}tUbob2V*6e_|v=&t)EiNg7%Qfj6fas08J?5aJ@Vkny- zp0r=YH95|rA_umJQ#Zpb#Bhym!MVamis!>4em#~;J;xI~g#MQFugX}kgh^keDe zL@xy@In?+;0t}JT!=$sZ9UM9y1nS{8FWGk>RZNYX2h`gp8jH!BlQHN;NCD%gJ^8Zo4 z=N`n;Y|G&g&p#RcHuw*ehj+9F-u~ELj-4mF7L_%4P4H*NYabt{5(v_$J4&PaOYzfO zfaPxyZ%Zc7TqZlE5cePgb~xVI7RA)yvX9IEG*bB|qLp99Sk~GJCk-(sTp+fLYXjeF zz{EIL_KRkKnenWDdh%{4OND$7)MzQ0-&Jr9_Civl2dNB~JspF+q(*FGIi=+FVlAs% zJNM3h>|XS131)ifuMA`5lS4G==7f1=&)4Y=_C2U-*5c%oaNOyu0kREPs<{zk zS%BTtJ;_qF=Z_^kLk|a2E_kR4gzlD#D1cV+2n>;+h7^O$-T?8pQ@enH821c{uk;$X zsZh95B+YhWAsEzzX3;9fm&ZaGp=vg<4=UXg4Gd9j5Y$A$ttyeMK@MjdIu4vNs7Rnn=DP_2-DMVHOM&0b|tA& zF2Odb4Xs4FGtjDV6#?L~X>g2)KaJVvVk6+P&n)$|b&OC(Z(^e5+UeQ7b$1x!C948y z1(ENrXI;LtXwRjSqH>#Q8%WUZvoW?R&22AnUVj+J6V$6prFV0r5Dm3b4^>8NAP32K zQOW}&@`|k6lBnv_H=^3C$LQfo1y8LV@B`hbIxJ)}*fa$`pndM6%?&+LuEO`7IQ*1;|8exG z9ZV3aYVqpx6Wju&o)oAEv0_SyGFnWAOpFb-r!q;CB$Okb^?{{eik530ahS2AhG$O zceX?BBmIde6Pvc@lh^vw=si0DuV><0N?uS)+CcmDgQw@Sw8{=H>tfraOrEkzfW>W- zafv;MO~B}G94l`0Pd0(X3$7UtkS!sO%@ld(#`c&R`r`TDjy{O>Qq_g3L}GrZthq1=X$HQ81m2|PX@4U%zIp*n{Wl zP5UZUV<<%XY4q%Fx^fGzo`Xubh1d-n5I5|sbKE{WZSELbg+(D==C=CuoeUU_nD}>88ug00*1ho;h zI9rFQuzR&q3-M9iAS@cr%|VYfdzA(P+QAOFlEX^K5&K~;QzlpGUi5o`MA`tX)mwlJ z#^5DtiA$aq2PVd#Kp??Ow~|yBufSY_|#!<-2>Gvz<@Ccj>}IR zYJf9UVm)%K;v}o=4G_P8)V4XG<>O^9U3w{gK=s+`Gos`N&D^oSheG2G+w#Ygbg#w5^j>-H5p zw*i9k=$Rr?_PkI+m#k!*@wUnYQEz>BUM}DC)>)@f^!PNz&V#KZZCm`^K7u;Nd)cZ^ z+;<-kDA(0EN)&5T^z$Sn_SOwQ97OK?s+f53e5~9OtvLzuB}+pH2uvkgmN^2sxmbBk zu!IBjb3(n-?|iI+iz2s8SjEOu%0ITfzFCQru-M)kZs73YGiT>umz<@2L#YH`Z!(*H z-*)$&9Pg&xF5y&Hoy+V7&iGO6B@`R78f9bHvkBNorG;>4X(c&l92Tg9Y|SUHUAI|r zq6Gy^5bQ#(o$+@v!>NN$`4vh{;I4M_SFvs4tU;nbJL??HFrEMCn>4hTab1G5w}tm$D>Hhw znR1TYIq?|o816sM816fTH?>cIGfAAR`(hN>iTA1JCKqOwij43Rlw``|o zKSKao2Ryhv40ScWEa=n_yTg%CzKU%O*_bal4x8AySI?dEEhp_J`_F!dAVxPwaYwtl zQ2wQzGs(MLpquG#Z2ezi3OK$PY&Mhj!A7V|5V9;muM^{5-Y?w>(Jm%ctcy}bHnhb` zt2!UQTcfRpgAZ=roV;c7wwCg!qUd*!t%^;Y<%>!G?jYU$z}D0n!bkhyz9&IpcHB9% zS&)MXu@u37j0pYF_j`M`o(f6ldE0MdswCwlCHQ0UFD;)6uA z11*Dk^thH{iOGMO>92=#RUGBC8jFCj-S7~Sit<#GgIVs@EgR~3+bOJf@7MvYwdTht zdi&H~7^jym{lFeHdNy^Y-x-dfg)&;JCQUM}?azQqQXW&>=K1z(wZ9inZti=%D>O0b z;v0bC3I%xchKE|!8QRM?IXU<>I29=b3@HwnkD`n1X43^E2tDhL=*HW*JmRR$3P*&n z+3H9Oa&u(bQ*~1p=EK&j()3tO4Ih#~bqJ~m-=h}??Q%p=8P8rG6RU-=8w#bJGBqpE z@5f}>U#Nyb2)Y0exDtJMJQbEakZ*oCtBXmHR$$Oe+ro3sB}xo_&SjLY=P3l}ErlOnKs+kU7k(P9HA zcY0F2gy8o5p1yh2Q8whx|6mUwr}oIRB3xm(WU(M}ueeY9sSt04+ZC(WzFym1%f8kO zd)!Ypw9sFv74mK37j~nucb~D3zqBWj!hhsDg?}!I&vE^dk{|!2@6YbguQmZ257W>o zq7=No>pm5(+wW|lxX-DLGL_J#2Ys9kth}DroPw}n5OIMnh#ubkXAA*=P_FTb-)wl= zrWgd?plYR!nVw?-*Qb)14RCsFu!c_PfW+Yb=dYs*ii}Nq^#IUky88gDl##|n4yc+= z#}7_7e2|nwgW=ZJUX%_VPun_E|1*4yhrSl7Kxc}ARXjP{Nc=Kvp&a@LTa&Asrm@|_ z8N0Vovu#-2Bc+s3Mlxbj%h%2R`g(iUqq|Fe_m#P|Jn70w34vw52rcurPy&_N*?MC! zK$0D@CLtAAL|(dd`SP_XQ&;N8jk=2bNqx5N!Q%&SUa*y8n`e^Y2Cj85C`?~ANBC0+ zM470q;KJIv5lbx-w%>upaq=96OL7$F#?{H#f+MKdHrHWROhGir5lUb^`icnbHh@6g zW~4feB4svo5V_GjrMRu3>*VvJZ_l7i)O{kBRbIsb+h_6ylvR`tF-l~bUa2w)>?5SK zdrzLEf*`4NnNaamgjhrxu3$;-zBl^7uC!XPXw58mqz~_|=j?7|6?1{>I;3m1zUWQY zp^QEAvlX1HVpsO2>r+$S95P70)gV&+W52Q`q)ltj_x5#273>#GbzvcLWJId|$w~xn zD_r1M_U>aM-XeOBuyI6*~K?6U5cU2 zHPaIg%uEN;)7lo#H|CV zgUQS2*@KHVBC;UA7=3OVaqj6`_LBBE)L{ESGL0@^8A8Lv4wUf^9kr<-8T7e+i`AO* z@!R3vDKU_}TB20B7wgsUbcUFU_*iagt@lcN$xgbui^A;?z6RB6?T7YTTw>1k8F~~f zd?kZyPPpm|m8Qc&>F{pfReG_!I(-y)e#%m_9pfyAed24lF7J&z6A#}+CVpNXWSBLE zaEV=*1z&eonaS%nVvnoy%eF_{;x|GpMv;XQYMpEo!g?V%f|~9xyBf~ zy$ps%rN?bi_~5Uy0E6Ki5IxBPQvomOUzeVzO2*CvRx^;#LwEPiJ(#c;t-$2MsBOJT zt$3!CZ>13_tb;@3AEb)kDxVdaFrEeWns3)FA(! zdD5_&*4SB}42~D)FY0_z!~#|6R`@dXed!5Iu6}$>ZKT5inK+xmdgZVZ`^S-C;S@u?>x2!=Suqm|nkme?Qchg_b_b+% z{8ar;)uX-*gu{LV@|SZO1A_7;8nZtsnD1EE$!oX%Tc22|Nj%Md1C$I1WfzYMv7)gI zD+6IeNC8a>*a^~E@yKS+bzK8aAkz}sLre;bw%KU8z?N6Npc|p$7_Sjx^Ps?TwG{$X z3L^_--Ht*lJF~UgvRk}c-6I>5T7)Jow<8{waX2y0xejyo4=)5i zA(rJGP#5;zlCv#TD>g9X5JHHgwTPK@x}@$psd9om2%gj)4cP~SK@TdtN)`7 zw5R^?_Cx9EX`;MZuiD>(8XD}|-@ka_TpW*aR9Cn=`}I7%>JWPU zyc2-wJodE9%HM!sXIU7-)DTu`*+Wwn+8ZLaK4J{rUNS9DN=OMQsyX65|vVg976*?`lwj zBR@FojVv-@$w4BMm%(EUj&KN#lNz_pd7qQpjZ$R*WUAgIQe5d0ym=rrKyMh z2U&}8!g>yNQy|Djk5Y_`>#ZwJKMD&#FG-Wo)N zMbXu-x%2eWzxk)i%%+bxz*iC2EY;WdA0|@RsyCjBL8td zg~5jaKZ_Zl37RH`7MmHO7H!u6dLOtij)r$mKy1-%O(%YPo@Av{1n4wq$Z(B69@ zJIp3Bx3U@sa&l_Y9waAa9nPd!MZlH7IED#eGgH535`Y@JXUosQZq9hW5eNX%3ns7G z4|BLqNxQ)_=~|y#^V8&jbF`F_N=Ed6B2A1(8!MqVO2oQu+>43KPnTMWXLSf$Fiuux zKOR>$`Vta1g@`tk7J=!`mIqA#p1O2cY3zR?Dhf{kgs?>*aZ8fRM)O>c+WS9c=uzh>NH~(z`io^ zjr(ztY=|9HY~CSz%x)dLR8pu!J8{Hfg%#@MC}%LK>Y2t7qgWT}VU&^!?WznHX%#R$ zaa{5K&cNA1rF*JlC*Px*VoRoL<9~8DXB6&wV3C zN-z|`_bSBKQHZwmgagUR*-r|r4-?@{MfnIWVn@ldG(?GFdM#RkyIv*7h9#9mtpJZe z`@~f;pjmT!+>gtI0Urh>a+scib&U8Y+YqShHIt5Q+^4v_U~+Xn8ygkmgm3Q$f( z)hjrL_7qXg#sJtfx}cf>v8(p6E4Qw=yUvhQUtj%vXvNRAyF8PITOo>Dg%G@z^)#fd=ge9%Lg1#(Nmy^8LXvVRt&1 zaV7dtZr7Uj>C2a=CgWt7{F*cw9gX!hv1)C3*@OG2E@YIe#IqQY8d-P#E1oI$8 z4w(&F!hTyhpv8)Vm55(?!V_r6p{u0kA(6l2=F%XIdpWJPEDNOH=0y(8Wg7YFwP>q7~_Z#YicV8lJ1u$@7e;OLhw^PXFE_UhIk~t#khG= zcH))XNUo6#GKMCsSUmJloEs)?`}-b9m?=?-v?6?)TmEw!-00cW$;*kEpI9C7726Lm zpy?e7sim8)vRG@>qrKUy=O%3H9&Lxtc^xH5R9ltyz0x6^f~Pup+>x z@cx-Lp8rV7?;{&k4{BNa^TBZE*bWrdnpj#72;H4w4cO=1@fl1EEO?4vJ;*8Eidnmy?g;S;?NDrOZ$?Hv;-plKxgN2-SY1hXbA(as@yy*XL_1@x&$vB?JZOQnFTG@uMRr zcGs6y3BG_75^`=IuS75n3<-5HCV~sj$yn}L0x7n|GKUla5~wUT<6-HTV#|v6nwVY+ zHKc7mU*9WLJ+_F13goEAqiT;SAWKS!rjx~rwuj?Ey7#TK_=%g(*mV)SFOf7Gw4ek0 zD+qz3a_1TVENt(Ncbz|wRqDa~Wxr|G5_F;9sQSHxh8(~zf&Ur>>ZmXjtdJt1=M;@t z11h>1scT4n$fa`(#w-lBs|O|Z7ji-i_*fa3^C15T`-b2MwWjL|CfmMECm92}V2{N! z{`BOxu?K{!P$B7}921Sks}$dBCxmDBU|8~NF*5kz;(2f<%5zPp510^z&uk7T^Z3$m z&wZuWzIAcp8#e3<-!^d==WY@nWAa8SOqq|o%l?kXMaACwa2V_*gdS!cn2+vJfIF%1 z!65^CH|5h?;Ej_2ZsD&)^SzBmOq1*75xj#cF6uWGaN*$1=8ab8+Ddq&j z4H>e{4Cnz;7F6(JD8q~O2lF$7w1sS`sw%{bbC1zU^EBIT>@vdHCc)6-+Bx9R1C?Ve zygH_(?-K}!Zv*I0me}hAXzX{oSx!E*3Er*LTd^2TQEk#)%SI_Qc`62Det>%X<{@VM zUdY?Nu(gTG1~rB^kkXrkSKVqMQ(Ku1fl;SOH%VT7=YAXz%s+Bgy03k=>97L^C`Af6 zsMxFFRjL%~YOKv&m7Mk6n;{2v*Jh9V>2u{`8M<*UZVk7CRE7=vM|z|166mmGe@|UY z#dVe0GYta>LyZ9|_S%W3uiTIEt?r8&DV=1sxZUtoCKvC_%LK4hAnYd)i+gj;wm>&> zM(3_~y8$4wpn?BCfrJbY2&?Y18)5faDS z)ZgL5z}T!1pHA8TrXYLic%fJD0Bl{0pEZb(9M)$iaxYJwL`_4qc6~bWqN7D%N#c3! zo!E;~O@`H&pnIJyXX1t8G+sS}z2A4w5h~s>`pnjfws}R_j3vy(<<0XB{=r|s|I*cK zoA9j%_NS)%Jh~gmPlVzv2|iq!xDn*?6n8up7295o zz(z_@U3|UKs@k?v4xJnZ)$?qnpMhMR$#xSWq)Bq9Z>qM|y}oCG!ZjMW6ko?LHEFU9r{PlfR%TmRFIntc=`@_x*Mh%QhTTV!E?4`Nf{ zyr9IE$Y5BJLNm%DIgN7J7r|w&IxYmo2th6Hx6w}@x?7?gK|Wji{q+m zI4!}@Lv|wGI60M2;#6;D{=@4d;kz9(2O(9{oaeiVC5ZOnV6baS^9?JxW z!R?`BYyenc5xKx1z!Lzf>UV4*0kE$YozUNp7p+laQczY&ji`NTj*ZOV;Y1-WYK9C; zJfW(bn@*i?-kr)q2;pvf%j;Yz?{KS-T zO-Lbt2rx0ZO=nZyCCI%nSTReI<3)5Gqb&N1FF=)(i6+!fC~-9)t8(3KP0EU>I+yLU zQD}kI5V7tccxJY^T1r5{&UBnC27u--*RdTDMIOXhY1>Y?r<;iK?H?ozS^=U3(}DxkavOV2QLOr^>Ltwt6Rf8w@k2g z+aB_SW5)Am8(p{kz@w^Jvpd6UJuj8O{3+I5*@SQr7E$V%F2BL;v(`VdnNt>#I``e} z?U0;L$;oLj5L`w!=UX=u084v5L=}IMZio%;Esq{Kzh`Jcws&*fo&vB)Mzu8y#?)^G zR7R{PH1f%apgbFheW*8ZyyBBJv7?#kkig2sO z#6qud#Hhis_wrM23@IAe(rd+jPpYHYuooZiCZCn2HK~46-IfWZpiS*n9Q$$Al&t{{ z=+AO>&M`oT&^^Si3%5YZSAm-U-$y@*kDvSOt44q8^C~ybjtc+G7p5l6$H`75AA@z{bZ!amfv7Q^q9mQL8U(VpeXPsoE0n zdqR!00qr9$Pcn1!u84nR!o(zQAEeU<(++Gk$v1262LM2(h8P7hKO)8mvyL#pOb<@% zlD3SM;Q%4%9O;M!-n^1#(dn;Y?%Q;yK^vQ5FCGI2RM?6*W9@ZiYyo7NQX!MhUPp|W zLfoI6{L;7cL5g()72@u+4QiCNus7EzB(e)us1{By9BLt?Tx+M|9gsM$)GAdI{OuN)gE^&bf>P zdo%q>zW=t<4c@rseBa;wPKV2i;06 zg&8&u%q3DUw#pzgTeda0KIYO1cZmafTK;7pSgg#&Q1RuvNwI*mL=3nuvvmyODL9_R z78lh~9CDlj+u#FuO~yc08yMYJ_tp*6_s>{?*&_VE`{$V0Mt3)N)L5Bm*}68U+SHcD zHltu***JK@gSO_IrEKe-uwr=vw9f0m$g6Kbq~D0&#n8a1x9s zN}@W-*f6CC&DOLHgZSP4czl>n#7r+zWy*Qk1eL`Qti>iHGpUpkrg1#~yU|ynO@@04 zv2q#4MJRYe%xlqc~0tYKQgwuteub(14b0T~vd*i`zcr~Mm7Rm$0Ilo;S__;+)&YVqw z0J(@TFHLyn+G`=Xdvr~Oda>9z=&D~&DWXrmd#BH73yK}F;dCp{sm^KkP|0H(VF={+U22-)^1Q80AmHFb3 z{vH=*Q63Q+j=G^CQPgdZSyeaAood`~{ulqTk+FVMQ9D`|J%vC!+_VPbL}=hRr7|Bw z!YjBJM8)l47-eVxznmTuj0q{MU}*3#cEmPeUP%m(6hHDM&}s=>9@CY_OJz-3!1Jn3 zjQ!%eLqABlrvPMii)mVZM{XwPR4?-`hxu$aDGG>B2Kkjt3T+ zHZ07sZ=ZY7B=*t|F4{Hn;#2!NsLuq)Rv-2l-~zD=H_k{3A(T~&_GB8|9xB%m-w@YY znlmwGac-wCaHH)EAKQrj`o+Hv+wb(irsj|6QV?-?I@h0QGP+-<+rgH3q-~l@}ZaFv53oemuxnD2&Mg3%2=KJ-j)|P zxO5YI|volJ;$KV(qia@__|+!GHel$(N%a+w4fMSJUNG6OJb`g3tf%zw3q|pr-^YBKx~{rsMHi zdov+_Fqst&Im@#+U9H(v2VFNRbtBO1G3HqxKsL%P9+Lr-%}IB+r&*eU+N0)H>#;vo zhGpwh;w49cX$5#r; zZrn%>Knn3<5>LU9umEi2WFnGNwV;Y+jA0>LHtj(g+9Y1!cSV#>{l zhHA-FyJ{=0Bf2h7-QR_c&W1-|Cb~Nj|D~y(_)(JE2s3)JHfcX<%ee?Wf<2~0uWg`` zK{b*edfl-ivi3C`A%|g*g>g3#{M|>fJ5ABF`m2cbyBi?uiE>kJAH;MJ5(cT?&eMC4a|AdRI7_#bi&J-T^ zJsrdhL`)NVR9RfPZSw(G3$yeETN>a3;}lk)*4cHM{}BYq3}Czc{$9E8Vhd)#5ghVw zOsqQVMR2?sZufm_Mel7NFN)uDVS1w>xCrGbhN>hh=~m#v1GyH1_zW!)zj%~JUM-R!L;8Muwjf`J*@2#tq6p@_-#K?#xI0T7oHLK-bvdC*MV?vq1 zfl7^rf_R-h0E_N~DTub_y8^o%syrImdH_`qf^!e2Xz6BBRQU-uy> zjlr!T$Y)Ya#Q6>qVjtT`DECkYx7$zJ+gtH-OLbN5os6D6eLH@~hQRC>)-!&{DV{4V z%K2f+*0%b7y>}=sMHpkGl)@dI8Oagx-zZ)G=3mDOZEy^2g}}JCy`Ah`m_NJ8)L^c> z1P?beJ5Jra9FGDS;*(b)v2>%Ycezov({Z=!)HPtexe_NI=9)q_n#f$f zdENeq4l%Ptikx^4JOU}4Hb*$Em$(_^c7hC1qBZufvfGwYw;4a8JFq?f!o;hw;?Mt& z(Qi$Jio5lrlh5r-*hufkCmXoY@l$nqb;&>X@#xDK@dk5*YBp*L=u4cwxWI|Ck=%dr zygiYwJD;66WQ%YRK~MjkoSZL14w?{|yD3g%uo5-!!TkK>r}o8yz)G1Q#nqrxPU`~M zcrsI|(^4ZhwfD9)Q7l)yZMLgxibdJ;i-no?v$ixB--8pE&5lu5*10b2F-K(g{EIA5 zrq5+RAN5mc;B-`Opk0`F>034eT$ry;erGS`{_7}*X1^tq-rrAVV?{`ZT-VQSrBiL6 zmwOuINmEy+Y@=Eg$IFvIecyeP8&_`HM;=8JH}V>O)M$HCjXZ@YVQB9w6VJv>bo1Tk zfBWaZwJ{7!ug_5b_$QcaiVMY34ydL`z5)sbt!M**WzJT4LYM`6FiZ8tpMPjSkxgZ_ z?1Sn_o^%)~C;JUKfW7ViDQEJP&HrcqwGooo%_M*gavU~YPIMn0(F`C4c(y|?9yc3E zqvF-rvjAfZ$?C=VA_OJ5%UO?^!sZa}_CjT8&X(i*=UkMDVu2cHyn^073vX-DiZC@Z6y!ehc zFCy{Onh#mPKojxrvSc$6@M^VDi`#)-tLl#EM-V8@t;_Li zDoV;V6)V@IoIJYt0IsP!;0*;~%75YDArxZm1JxD#23M{~cNp(AK4Psd7^7FUs@IUsXp%Xn+bFX0BGi#$E*QOK4M zEl!v373u}u6ff9Bep1j3H+6BjXF@pN{6Fu5OXnZ?5bfri_a$+ zqqCE3GTv6I5Eq`A>Np)Imx#Asc-x+eZ^rDC_*JTIyNtscVtutw&(wf>G-@^&QNiBH6NvBBmlM4vG4-zZA30^vjg6#ATeRl!-RbZx+iR37pQ$>9nPiYm zR46RzSQIiTipuaM7ot*+vd7fNSM1`uLW`xlK`Bwy?!?wp_~$9v`cidabtd)etVyNA za<&zC>g{&oTA9gG(A^_b5f9$p>#x~qAgv$6N7*7Eu@qCe+qB9*F&7dmjd)_Sw!X{a z94cKBPgq&0+neg0v{2w70mRsz6Mi~t{mv~f&opCPRH9SnY7-bA0_)sSNQM3abr$b` zlF@%eHW0*Y5^=s#N`=Fs{7a}tXnXc#V}$)oXhifutUNiVI7!pTFhjAM9)=Y?dd!Xv zM(gfgw()#Zn6iC@Zqk@cB$0}7MRv?z&;FhleBHQm-Ou6fvB#aTZc?qQtdGs1duhb0 z!ZI0mcPL8V*0erXTdF4ib=}7Lll*xvLH)S+h`HNAy4Q7>)NzD66pe7pi+XDC?z#DmU^QI4-F3^U zKreo_Xd%lz*kk zMco?QOAgaN0<>le^15imEtI>GfQ95Z&SawTyD4FdBfQ?`18J2N=kgWbh|o%XZ=KwL zP59=N7la__?~nem9Dt8wI!o2*db~v3!zwio5reP_5n~7JiBnf^#c$=l;(Y`+s3=@) zstaAflJV@?E)Zr+1sgPHsOq|JY@Iu$nvk{eO*Kc8$76~GEi2NX0L_6PTYZWFlC-13YK*J3(FoQV^J4AMdug@Lb~cw3yK%8RQ(y8}6~-O8@CfHL zh=P_|9!Ova>>1!NDnAq?@pme}VjwarfJDdPA|I33H|epI&i-bi=>}xaEpQvUNbI#w4V4cEscr z8#vz;cT))$C&?mph2a&LO5{T^gLvH2*Io%} z!h!lD|9)kE1CkSaR&WJ-{TlvLj2{05$D^CWUu(JGsC3RiR&1pd=HB;bDSm3JHs)Ci zv;5Wb=bdnqkSC>P@r+r(?Y3QaYD;#e_*kW7(v8k~Y=C(idzXLW!z;7N_#j`0;S+gZMC&0_F0`(w;!;M)@)Ww?MI-4?PH`O zDrt1{eZec@m;3G1i-T)`Bnv4SP6C^Vl|-lPesRB3x->CnvAOL9HpI9S`mhS&5t;IA>hZ_^rS(VbZb3KO+tk zO9qtOHRO%+_f~~F6NWFkl0$xF@}R1Tgr|*F1gj^C@sH@aD+yf5eYp_ z5!{e(vy7jmi~%L2Ye`R+oA-kdr2(+IGjP|oXmma?tKe%%BLk#)KL)9bkn~}?4v1?e z^IYtlY1Nfk>q}wUQQ7M~wgoGF)i!j+#kgR))wSn5rywZQ3{H7bQBTAIpb|@Qu$$?< zy@%tnNqbYhRWbQ|@p;MGn{1d0eVMUc#3lFsPSRQm}HM2@up{jpaOL90xM zqm4&LfSw$*qDuBQen@YlFoHI)4h(r63sMp6SVoXE+CR^-5tdU9)cgLD5yj1leN$JX zRCqz>9TJ#>WHE_>ftE}uVS+(M^;@VK5v&Mc7R0H{3(0@dO(FM6t!W2-CPU{*hLw;| zEXmAF!+szfJV-ZLwc*?;7&k!aQZZ_syYGNhT5^MkYjX8=d?f*^zG_ca+Q&r^rTf9? zr?%h(UXsQrbj~S~SHfd)2a-g@pDlK~iTJs+T^fCPUE{ z@ptU?p}HIFBn-~o?oP*3(w81Y&xG3CY$K$bU3D94Z-4h;Qh1#{U@)m68yQUW((U%6 zuttdwn}9Qcs8CXr6K4B)_Vl0lzyh?Vh;JOmS?bPMqCJ-~=t&jN1Q~4$rIeyQq4;5n zS;ZpDvyoQfQ?`F9P4d%Bl8?`C6f>IC1N0q<7}TaqA{sg?GgIx&fft zijLO}tn{chOea3Qm9Y2=y@JXryiJXi*2jjtc2+6Vpv zH7C<`TUjCXQ*!F|9@}px_vzCM_QxmfWDQithI6b*qwscRATsF4Eu=fw?cM3qIP{26 zyc^cMIwd>E=I&+L-yNccLdMU%{Bo*|nU>JTD%QyqrPMvzL2hUa5F7^4t?&{W$Y*@J zqTs!~i9VBqhr`L6iY~5TS)mF|U}ReWqGz+AF|kF&Nu{ zHWY+TXN96tlcngXJ9*Qyw)9Z4hs`am+F>GOooGOz`P?~-vFLJw>w|t(9Qi1F!`{KB zwc+3_Vmii*kKjK_`79A@b46eeWWW}UTY0kL523*-8=U6Clxcq_`}X8dQn)97u(5Av zTm$(fFtA>=!1OCsQw>C}k)p-6vk{5^5;MRSj@stn^(H6xg1wdNUhT`dICJ8^*@6kU zWusVVdzySh@x8=xt0b8(4(9iJ0b1#d)7drx(u|Y0LdwW_2>+aNIRtq3`!wo_h&%Cl z;10U+ZTX-7XOxXJ+E$3`}GMrdT%E#->qsV4KkGB`VEqXcOn}+tQ-< zh6mfp)?u5mf5P&gCDR?kS0X4`6F~5F=%lzH$)273G8Pyfx(zaCZe$7oGx~ivOh9Ib zaHolrAem#xdyx1J_b~x--}~7hD)Fx$3Dus~aYb2*gH!Pi}D z8%!z>iHRx8y~oKnGn!pSGxqkb{p;QESRmw&TtFOka1Y^3shEvrv2n?Y%`4A9`c7@6 zU^?;klY9 zN!gX#+^u`G@Nv@&@;5JtpelD~CnCBWh+mZ&$#G9(kDC!R3cVsL>~TanS1S#tdBEZ% z02&rnF@7}Ks2wSsIUnG=e2CxjTmJpZHh=2M6+0DjKITk7h=;fZCJ4)!@E-j3u`^7Z z)pq9?RT+v4ZlBQ?_NvKiH$#w6p~Ib=bnk%v^FG`vK^u6wn(GIkyuSe$Z9-~e;j2Zhjw*9fQN1`Pc3~s@yRm4BOR&vjN>=ndZ>)JyKoecm3$$t!`UD&4KA2UB^ zIo*(`ksMT6UMn%4M>&izw!nmkcjKv5Ylb>lTzZ;_Zu# zSg6gmY;RY%s^W`KW<;K49X|}a0VH6rV^dC-RH8?&Dj(#JQDsd68!WU_991q;&&#?f z=7?FGmeBslypw+;|Ks1sfEoDnf(sdy|O$XIjMsJaZzY;sEOGF%n zwkBo>yE4`>AYN|29|M62+owH4d`rV`!Yc8x>F&DhOB+}X_VbViJmnPb1gv@BZpM&e zb2|#`yo$4a5i{DL0?QA+xQbJ9VO-W3DVzImz4=nQ(yOrZUboM}Q+7;5EaoV@N)T^xskIht!rya-w#jTi#b3%dWa0Ry$BB6rqCK`!ERl>8nOxfjT|i7Qw;SjSK>qZyR_H6 zxb&|s#MeqVET%WEjPI^boAx-r=LH9G&0CBFX#1;pqEGyF*yY-uQ-i>fDte_)WOmcB)aiYb!-UYo%u2X+MO3`=fY) z&L0V)_~L_?orIpSIa^wKbEmK! zNAoPzTJfYmvV~G8KCmnGX)nLOb>o&j5F`3Yj2+NpRE49{1L3fg(C#f#ns|{Wi zp%@~uvL)UxxKR96DH{+ag?$Ol3BJQGAwg@E^h=t4p>j7q+?0nIKQ7Ur+|O^H)C`Bm zb`CdEo7eGZbXhr^4)(Y7X%M{N|`FEyenajil>k z?x34pcG;VQ&Gx|YXLC9wegpwnrs@ou?K%I2EXz;<%mQ?8z5rI6O%t7k1Yd6t;Z4nk zP;(~K2E1uI{F`ro6sOXVwi2Uf4~LH(*7WiVCK#O1>W=lv{V(!6#Tzja2Uz4EgsG3m zTEfd{uXrhn>USX>5LXJTbeO6z)M8caP4Q%Iy^faB76Ck6ViY7!OYvO+0mmj0%q4Lg zk2r#e6;O0K@zrW2hC#exxTBWvG0rR3E+?FNeQBoNs8!wfL*;;J3;jB~x{z!!=yf7> zfuh->rn1+`?u3enb!6jx6*x6w{2QgwXbcdFDLD>%DImNkfr?GLop}O45wg=h+r8d) z|53c^E}(7u;w%w~6y!Txa~lSDZF0LoeidHg7!r0rlwNGQJxUn$q^CBhbPG+0F=7KK zjhd1ID;&AXr=^1|ua6nTf;5LuhTFbChzE4rVYTgbhbiBLpRGLMS9Z#wcTrM2b35TD zx#ig=T#4uER}^J1ntBkf75gn?*TG!zKrAPlI%l^1(B>z$n4u_M`Z$&!Q6XgGp>*r*<@X4ljjom0 zZBWW&#%y2{6YtpgapJFBwQ=z+kP<0jMsB(9!A2-cYaUdtR%YyJi>;M-QLCJ@c>2^8 zug}0pz&VXyZRL)Wo#O&#w2`P9b3g7S0Sd?t$A*pRaq=Bl?IzxU)w%h2l6p>jU>m_& z!}J(GE${Rk5xBgcL;{yxk=Arm77`RwnLwDm()al%qd$(`BUS(5$QwW$^#JH*Vh!3g z4t>M$K}uu46h7w^#Ip~j} zTD?w4>FcUJvlrp~QhH*OKrB8+EdFF-K{SoHt5`?l>l?I>@Wty7IsB*)+kPQ8{bB#W z!+vz~a~mSq0(FExG9~Cm>$COOH80j_tXGqy4$cL`lcTr|~I?E@nh(3tGZ# z%7WwJ;sI1?BY6G+an_&NPrKdS&b|xJqh2p$5r~lZp)&qtd1kfRiUmQAbBdKLX)lFc zG&xm^DV$*|!*B<%AG+JKBHJ9oAfGO^yB>Ivd6dL_vyBT8N3$~A1X_f)v&B>w@zMs+ zK**MPkJ_RFzs5>W6~>9VM-t+oA-bHcu3>kmNRFI{|~EpX~-j%3^0`&-9@ykkHj zJjY0ER7}Sz6l#vdUyW}_*FoH>_shh6MCj4@*%i%?dwprkd?AY4+WzH>urNj*DpcV;!4H@ z2eQ_2vr%XIC?-nJ4f}!G>*!L$F00})p4)^zh9*YNi$bdIcNlBWPQDm@YES5~i+d5w zOTiUj_iAG8fu8AMUoaR_ z+*(VO7gklg&Wlvuq*5bMl2_8&mTbwAWm&2Mn*Ucm=ht)2_cS4G*-oW4l{)ZJJKg>J zJ(u%*pL2iC!x>zsl~snne94<0Q(na)Vh-xl&%GQMEIs}9$6tN&Lv4F*kG4a3CYSy3 zo#8OaL_!9w;S!tLR>>;j_{fI1*&-HZ$^dQ`HOA%Cu^$K)S>51lYOUbn7wI3E=e2hf zJ99`rtEOmG@w`(uyDnj#k|i_;q?%&qe!^(O5xIprmXvUEf%eI%*>Ul6GnvqM@7SwO zl5_m?zfJG=CAOF>{M&!Rw~$Kba08sB`-F*ptHf!}AZ=s2<%HEG145A| zbiJP=QDR6W6|Jq`bRI`ci?Ye++RoF+oG?j>7V}t(b?Hoa0WD+OfokbaJROF`?D+S8 z`?a90l?2b`=jvdboK6(TMY9g5a8=$%&O;w`QiTkRa*l#}qxa{fd!1w?lw)Z6(DCz( zrVI#hJGsV@bkD_u)W^wTV77(bRxV2P@MzMU2s{^BIUmv_;xGcvC#8$Hg2^hzB)7-C zUJh|kK*KJ3(K6vv;#zHxw{?^*Z?LS`{{A$bh`)bt#o<5!wcxC*8Nyq`B(Bj`Om+G# z-@fJqx`Td(3^n8YZ{>pzr!H zaXk35)FOnBqObY0#L}mn7fHbfYl$=9PA#^^sPn#)eEOghrAeLtrcNzVK58sg74=2{xT$11NnX@+!LbTdd>w!Ki6E-9w zpeeC|EHNv1;2ZaKI+$x9u7wkU11z8|J7WlHvx}Voh@WjWOPj50&i4UJJwIaUq1;7%nby4*7xLWke`8ZC>`NI2=ZqCgl8tGU!0&*hvoL2AYA1z$;`WWuq zIwJMz%JPcO4Cd`*w)|Z^vvldA_9>py4t7N+T=9s8h`ZvQO~kMgP?2Z=TyC#uWMejw zBbiHX@7!4pmoLFvTXOMRl$Dd?SN=Zy4J*k<_}X(XJpZEq#JL`#5dMj#Fo%7HOG_91 z9efiKGaM>M7Ww2!t_sVFiW0U~A*DG1&@w4Cvvl^NzqrXs2mgD(p%@>8!EzAvWDk8e z%=_7~UHz*~&B;RC6Un85%8I2Mt`nU1saDImvto`b=12)ln1eg51W^Ea0C#jA@ABDm zTG_3>xxKKQ)hH3{l%&m}quw+{qmcA_War6G#*IRDX3*Ewv}oeR`gmuR@}Q#1HjWql zxM^ns{6`AA|MJOqd?3)I^&C)v!pfgMs7AONc{#j73yYq;W+Qr=1Y5DCPGFiJ5Wgj!XUKNfiO6( z4ni~Ow!s+@ln`WV#>-KJcpypJ3B}42y-2soUK@Vmj=F380}Y1!JXAj%^-jFo+%u>k zCmxpJVDdivCHULM-MhE^VJvx&D9Eg=gxb2ce)o<)w$etZoV1q7skILOzBVgGE1v&~ z;LREs&G!1+;%lH2;&m8&xXx`b}%_4|Wvm^TlaDgm|w(Z%VlNt?9rhj?rw1de! zCKsY$-L#v^zZkQbM^?66kGs7o`r#w%A^pZM@j(j8^VS%~4Zo3IwL;lgb+`uyb&!(| zA$6=(q%k`6=-JClF}9Wa%R=#l%aZWX`C{V*qax2F-8-_Nw^U#zcmT4o_hY2m;mQi;MzbhCb@&`6wldU=<{p%e6tw; z^1sb_tfj>>u^*E5fFciUEB$%{Y5!N=i{*1i+yjMk)FDudej*xi^VmYgnl_!@{L~@? zGT3gM(+Uj8()OWoJmk|3p|u;=#dTIlN^jNokK&*{ijpp1z=Mz0G)zgmciKy9)dC)j z_QD!w>pgIYMMLlQr`!%K8rr=CcBxxV6S#Qkg1^vvY=PKuT0d^I;?fXOcyraz72?~u z@Duk$lSXYiofGwu1ujHzJ1`xZ-0b*qEDdP4pqwbxjf$_h0=t6$pl8=OioTys7SM;3 z_4ptz;CWpa>}>Ym_W||ycBUhFT=4{;fB@VbV`S?7#ZL>5oHDYiNZG$-NmK0}O(<NOwI(?6d5wfATzTv1ITJL9Px zpZ~?%{xbrFjx-5|l$W_(qD^z1~UNtG%a1iL?kt zYA(vT&pCl{r{CFic6n@+$YG%0AAS}}171NYJ3Ftad)4`Jzi@Noo&H&2{M z(a@#y7yMpx+T6N{ z+fl!=eyKtO*QD5b+U4-@52w(LTX(Pe-*CBbjS|WLgF5=vLhCQRZ8jlPR?q9L%UqKDL7x>~P;mZ`xhN&^H(r*;#vuzmoEV+U)bccqw8c__@r%lrYJy z5**e{du`ilD)^}T#N4~@i&1+*2vhN~(Bxxdwn=zC;!s8MTfsG9Kniz0{okK_+X*;3 zesk)b;Z$W#&V$TD$#R(ILS1!HJLub=G5x${eVM0TUtU>%*>$+TNA|C;1uV52`Ig8f z?mHE$ft&4<2i#_H`tEeMof32|pTF#rBDD`0qIJL8$JR>{0CnwxHv~HEcp_yYd@Su< zexlU}DG)|2V(;VAFTIqmOSTJ0381UdeIm*hK^GZs;~?iCh{!mw;?jf}(pxKbhKXa; zY3Ss!^}Bj${irRUHQf^nr11G^OGOzh>9uDEpetlplRI}1R%H!nh==1}2GCZ`o| z3qtnxP26cZH~ZX-BIoV$B+ev};dS@s9W}@#EuU}UOawVvoTW%0J3y2VJF&OOe1Di! zA`-vD`N%Y-*?2hV%O3!ysWW8_Bm&*7m;?3Vj3)IQ}Nb^9hK1s)5@8)n{| z7$3fg?%rO#?E^ruAPN7!^UeD!3TV!beRv$d>Fw>wTr@RHX1TP9EX$w3mqui>jaT#NEaxaK&09y|pg^LA*>`&w8-X;ziPcW<3FGUeNhd9bWl~*Dd^LwY z03zK(D^`h|h(P4i?>_mS|Bmn_B*X;?_y^TCSshXL(DcEK(0ePngyW+QW{EkY#Guw$ z`iE$6J~&LPJQ%U1U1;h-+A&4O%Jiw#t1a}2?e7b zN`F{2erfu+fMu50<6+p6{Why2j{1@_Z5J}MNs->q7Qsi{NlDJwJU2-DQ%CLnzLP1j zm)OcMkfS|s1MLX&%4*MQtTfwA*Ik-fIWqSW59u|FTUij>zrm@^buUaR6th+1eV<5q zfE_%Bp1uxP-_!jEonW2o8{Duf>JRq2_wOu+O9zW4;bHmOn`i~*=ofolO{}>&nDqGO@oRV ztWwynx8ds|0A?aSG;OQ5cu+vh^)Y||fW z6E*E!?th$&t>T z0m;6hr!9hwT6bb7p_#P)N>j1IyM5{0WgqnH_>27mO=)vT>|E)Al$5y>gXWqYr~1XE z%Ra01FZy3^e_8EpaIzV1lE2FdrVZ_f)+oe5?BCpLuj{lG7F6HZH_w58rv6WR@YCD- zt>;eto|rYrWm9m$kT=MoAzWKIYXU>+qnVXl`e^oVeClgf@z4jqeAkJGHwiH>98Efp zW!*phhM8rWCq?{Gmy_{-2e*z!&4~i4cX`Y3WOBdtX``P59-LEyooH5DN0y&T>V%JX zYuFn{(DinkwUp4YaA_f@7VrC7;ek|_mRYDz>+l+%%j#pI31))&=;7t$Zgj%|2*X1~ zjT0c07os>uKA`$muXQ_$awIbiLt)sW*tXYyY5i#5&i|`1E#nnc6X-elKsN4H%U%s! zJzzji9=3Qd(&SIQ^ZThuu4nRTQWA5wJwD{>$nql`_9q7+y{Y~tXW_L$NhACv|NVmF zlizacnE0}kKL2bH5vy@42+r?!kje%LqrIV_ zG3A!PZdW1^i||xiPsH1^0-CW8VDl7Z%GbnSrChDtLIF)s#+m88?BFIc6{Z^6@9>Sst?QeA_9qqW&YTm7>L~#>|5w)KkCBR z(meTIyFfqGTHDniR`%DwX;Eei9M;{vPFlcm??Hqs2%h5*OAENN+ffF%X#?h}R^2ab zlhtJ|g!i#sybw;q_DEh5A-HUjoAPa089j3f$_~HR!Fukb;Krph7qoAg|$ zq-sTXh`DH`md(6NNooi+bofa>n}e&}l$(2%{2j^|B}?yKKZV%H50BGRharC_h$r94 z9IL}21jF+6Bs`jHx!<|J>@*@oqLEUKj| z%JMkXEGD^{GA6N;htuukQ8Hs^;hqpv6xH8b{XrK+J&lhrFSPSnw!+A4ckSX8M0A)9 zOXmaJcCIASefRs3B15e`WU;#3HbWI@DZLIoWwJ{e(8Gl^qL?~#VL$oGiqPGvv>-J2}FYwwQ!vp@(_^pP#-#a8HXg}@xR#$AT zg!E-wEgFXfO0*`2I~O2YcqO){)jcqxo3f>C9z~ z!<6n}R8sh2?lK=RMC}>wahJVI+pYdq9PF@6aWcfH`W!5JUT{$(zK*w*Iye3{0&P=6 zSm8=Roj*Q>TJycxkMyq9VPuB>?sf_qko?h}(F$ht-PzyF{=%OGvB_bxbpCui_U$L% zntfm5nVS)jc$T39H`ES&)M7s6KD1#GZ7LGNd%3g&rCPq@U6Alk5ev_tl=|NH;bux`U@y=Oa+p5ma(WGLzs@cm7vBf;m8 z5g;*hUz|QafBHuse@*;nZUvj?A?xCseUvP=KvMMaoPXP)P6~*|w9$#J)E3K3XCU`G z0fSZk`K%7vH&Xt3`{tY$e(rPd3}2b|b?Mx>t0%O4_0_*WB^TfDQIpUzjjE1Z+m}du zvzNl*c%nFCC|GPy(M>|okY9)s)MsUxRg*X-VQY24yJP=!B1TM`)RmEVqwuV<4rj-& z{f@uI?F)O!E+b{6HwlZvEW4kemVImXXQrHe{0IKlhABY~1+F`}=A*Yu-&mUv+a#5S z*J_iZrUa|~QB&?MYVFNXL_Ka?Wv5WW*?+N0u}*;NZ3xgmJ^jwde;~wgdzi;8uqr+> z%L0jKHDZ6@P0{d8e;KtN_m}pSZCk1yaQTSi1pnE*nxFRnNjddgXx<9D!XrWq+&W5W z#as&n42@}oc7&~gd&)^kVc*Mf-0jp?o`2DY+Ii5~cSHL67yMfbQEN2&z1D!(3m$7X z#Od&z8P-pJDxPLXj8PELeba$prkE>FbvYLp9Fo$;%6o@)tKs>h#@6)U4#&wg7!Ct+ z@STieZG5h4v!YsUSz-s{e8j25sX5!)+-M==7T3Qo-&fO=n?RhBRG>$=$`jLCRJ?I_ zBRx+A6l2~yuCdt6xli4Ad5%mnFxrh)SCMGupY)+ZO$)U#3?IH7I`h z@xRh=uCKo7DB2*A8ykC&+z|N9=2fnJK0OB9CF{td*MO#%7=VRGr#!O-BO-J;JB<>c8+l>?}IMAnkp-7Q#Dt zm`+@L7ysJ#eWBhbBW@tzS{9j3;owi{4MS*Amo2XMpz`kzKK_H*&!p-gtUuyvpi~y; zHg_)Hy_P)wq*f4d!)EnST~y^BhTlzx0+vBnn;Y+B!NW=c^M0Gd;)FmLKK?%yZTvf? zJN*3tbi;Y)-F`ZnU{XFh)8N}3;aPfm&jI(f!hAteZf{%rO`;bDWkIPFh6Xmn&b zxcUmC25XLkyZXukS_?;Bxjbfj+kyW&vLKmPxBKW~#vsK#&?3wc>$5SJ(_@&fAYkW1 zEu-3pNv${}fPb*REQ`c%vBv(% zq+2drLMfsxBYI0EmvKZ5G!Bx^=#d%x1K+CTfkO55g3ATGy#!rIjZmYaE9(@!#|l+PlMO z7&R7$n!<5r)|*uoe#6o_nQ@anSIYc@gw;lKK@neHRnXPAyF%w z#6i`gpsGEGmg&D-Rrm;Thc=G=?fcM*7N(L0m=^QXo*Msf6a5cbM*b;rLBF0;I|*kL ze%&^)%FzxNHuoL!I%{3`Zmp>AYwNeP8XU608zzm%^vNgRIepsaf8*Bt7Hp>uwEEtY zU&JS~Ori$X<+CYt`rPSPH6|;53O9mX_%N=mUSGSDuIydZ0Tv4CtXj!T?~ww!WNfQt zN-FFgwO7Bna6WV9V(4-JE#xF8;>0#^_kZ4-96rX^8pfj4)FGfeJfYm1mF`#4$9Z-2_!DQv1WJj1qWvjr)vtNmh^ zHwRbX0B4)w7x?9Q*i%2f|2ncx+Xg-tK8jX%{h_bWI;Rdc90umDvQ&B>7oW*pA>&h4 z>Amq0Lf-;+WX}j-B#~mAsZ@+b+|3js14@nWbsiAn^Ep|iRp#U$Nk<}yD1t(!zU(W( zh>tf{@817{uO{+k`Sn$ea~n4szFsW|V`&xC%z^Nt{-rh_J7U_k(>2~E-oJk{`*SCZ zRfUY7=x%2v>I!}#oT$bpoCxIg!_r3mgLG}zTiBBj#0KEF=0ue zx;|x7LM+%ZoM9q3oWgik5-aPJde{t3c$?tQx^EV?bCgL$99a-E6=8rAgOBjbQStEi zG(4`dr1DJ)1w)m2Z2^=XUpr2T^bW_#KG>uZoW-UBpq%z-Z3};63wsf z2)^;A51(jFa0V<=04qG&iB1k2nd>)?XJhsA3;8WKR_@-qdq39JI&<$6;l$qVJpA2i z)V_iDj}jQiWZ)!A6=O%z;okjQ?)teUpO)z;=qIh7f?rE_Z&@|I4y=eFTXf00qZ*GR z3FfrFWtS;`SKrvl5i%Y_C$}m)q8`>Pw@)T8JpH$`Uq1Pf&zF7NNxWp$n7t#CUG#A>P7WZ)j`{BFub=#x2DyTfVi6a8GryOl6PFrb7^yzz7h@ld zrn&~oqM`wNvNb5^EvCGCbU+Bj>P68A z(?!}Sv(KwdSW6z@)$w}{Tpp$VcBHTnJpGedh{oMcHqltXpEH+O6>wo=GhDt<8)+uEETmDAL3 zqsnRl+c9$1!C^Yxi&}f+6y!|$NIi?D2v*c>uoKr=;A$993(Xt7bSX-L zz%xQ(+=of@QlUUgVdI{hl-u!ECnjka$5rCvA#W@EiETV@ZtLZ=rrSg$Q~o2zI;7XF zcOD5Ux9;B3hey41BQm?&Jl3Hpaqk(_5~cc{~x zKVkyYH2>^|M+~v^7J>+bntc(>-1WcDUO9W#|HkqPhRzQ9o7oSYmA%0pY3oh+FfH?O zG)QI2HZ2I>oks9&{eVs6yqvZ8=}9EPV(5S4bb%d^T`QJHwR39I`EbsiJ$pv`$22ue za~8qJ(57JZK2h`??ZndSybbni)cm4mXW9$%gS{x1-kha>Pj`~%XI^)d7JPs=h_l({ zr~*lM@X$v{NR3st_*{-sa9}G1WIs=hY%w0omzt7NA!^)OyS}PV%iK`y3jkxJ)JXyu znisi;8ap>=ZN>cqU;f~o4&=_B!cgA6nuA%9q67G-L#KeSDM> z89CtI-a78{$5}lu7oAus?vh*UtIkfYbySMQPWw7pjSo;5C)`&ro_g*xzJ7N%mgB;h z&Vy+P^atK3W&1<~4(D~Us5e)Hg|AxBsk^yB=xyEmoKJx#|CaxL!GC|@?RRz}qpd}& zQmH6e(%V5f78AbSJJfnjIM$c^epZNtU7YBT6q%ogP%?(49W8hx?QVKf8GWc6$-q;1M}g8Fey)LrAMhlE<4kXO!z0%Ju$gEyl1avDag_GXV6;EoDeZgoSJ z(wW2|A`FRQy0W8Wk27gL>ktv>;Z`f%|;TjBu#IB?-Icq^w9rPm|?cUm*8&3G$9Lpg-p!K4dW70T~VO#KQ&HR?R zy|voQFc=GZ=mFG;?U?Je-Zl=i#VY0doQHv7OUcHILe7q5Kx0|w8_6M>Z6(2AeY!cn zx7??(G6+U~lnP#hLNwdG5E^IBTsb4r4t;*af1kMc;pW}9tbms~<7mFcO_XVG<;=(+ z*i0xTxBmye50F!;ad+)?Uja^9#r+Cr&Yp|Q9B#K&-V&ExyR))(ZS~HEzlf2LAaMg) z^pjY6G~9Za6B>!W4x^~z#xOSbriOcq+)1&Obz<;g$zHj9Il4t}@F_js=AUH77C{R0 z?sPEz(q=GgF#;9lS<}g__qq5h4H_LLmw4tbBBYnp<`pg-^R9`OlY@Rp5B+{~Esj|Z$ui7o6}WFv@a!Bx&dwmt={K;4^A#tS z00}QMozI6mo^UiW&I^*P$e8>$Bo9>t)%<#!%cEaEmk9Eum!5ywf3f24*@usE-v(7& zZNW`{hyb)hJ?VJXHA^EIr#K^pmQg42eK*(17!j>5E=9twc{-8_vO*ZaYFP?)pQT1p z=xaQkvK!xd=WX8`#4+^>2Y(Jj8Cbrt;j_cN_mkNV9SEfBN1hglLXfM4`yAZ~=Ky9Q z0lj2RFJHUH{Wnw>z z_#XR+fRFYOj?YMHMzPvU`uhHGey5hxld~FD2@Sbslhcac1U23!z}ajvEB;Q86ov`HyU<2K^*@X69?6ZG3GV1Fq`QV z-toLPzPvX?VHXsOW$bWvx>NSEZ>(?la#Q<1!|RR(52eoCI}rj4+8P=U-f4}8GZI+d zTkLXglq+(yQD6)fY}4~xZOU!(9EA+e0RgWLR#-Ruyt}YmUnAIgHfBlmd0hi+Llcv> zRtRZ6jWHKQgmSK$jLOsp(A>-k@k`nZj-?ysn*(?0noa#E75Az5dV>e*VaMw z$#+ivd4G1k-uqj(au>=)e)GDpIX=WuH?(_?ydj`K#z7X3H>aWg8~>nThlyP)H2a}z z6kqv_e{T_|-E_8s>7c%~Avw))YdGDUWZ8r|0DW^C%2R*(e?0kySOi#s%O*bepym9% zND{kzHdVntNnl8UdW+XGWv5jaFN?OAMTzW~6PAH!vdj%qd&!Q+?XgSAcO#q(9Xn8u zn6~f)5rk0c%;kfPC92t}&G0p>ucW-%+xGxjNR>FYhMI)ky(P{}|B25eMeeV0o-NT!KkxG8!ZYc>1ZiDET{H}TqPS&ruB zlfw=!M**kg@v*DJ#KgOxlCrF6`0KZ?YV~2FA@RVIc~%pb>+kOW_b2IWFa4T-J}w3h zuLT4?aI4WaqDqapz>9Ccq#p?*KI{bxvx);)xq|InXD3I(6`otUXdXwx26#o)G|m@a z1DH#?GYD0b%e<_Xmgu#CD>2*>mQUCwBzR-%D3T$2Eyj$`CQRVnj;aceVR0;!(-JzT zbGu|(UCp&7`!>6idIE)k1m|um; z2``@pm8b?u+%F$eaJ_>+0#e{TXFI|r41a%!Ee~V*JlneA8$i2qK(KL9jR0BTe`%T!+a_ zdbkxG?Eob)z_H~7azzwdP7J|Bx!D#z9`2lSn!qcDI*fnnRH_ZSKC!k?r$ye*d)yxS z@z=Z^4ifpFuRWipc=6JCpV6Dp2=Bf>wsbBXxH{~RM;S7D?f&OZqOT4G@?ce1@hXSf ze9(m9zd!q~vqU?j@uWSf@#If5=T&OJ?sM?S8-tT<#*ylkwm^k3QO&Kb)g-YoZ0|Gf zcReCvmnDo*S;ls|33d5>f|eYGrK}Y&iN#sO1D4gXK)&#w>Upkck@I8u^$nlL{4^WN zDcj%a_37tc_6bmQev;G+Z_y`ti0M^$f2qjMy?m{AZ^4JyN41f_qQ1hgHT|-xa$LAmDDfx2crQD^J_2 z;(ZM*L^qGWYRht|0t$WW>*)+6yvc?~X1DbwEb-sz&Pl8AcAn&KemActZP{)C2AzRZ zeChIK9{}u&W58B|CV~gXqQjn^+QEjiW|Nt=Mo_wWJ@GP;X4CSKF`_l+b_P8Zqvfy-IjOP(6>ou zR6vbOGjY#2rlJ3CZ#x(Sr$bPqIaVS~@xmp4mcm~o@`Tc77MtZ^s-tm_Et1MIAuMsN zBq2=v^c%hv?Z_>_XyXw69iuVt`Sg?75es(N1}E%9XEM>PJf-M-b>)T+c$ZKTC=lOh zV?(wdbTWR_dw|Qt@iW{^2~ivzhU)JhrN)=n?t9nfDmx<2A*m;K2XJ!D1(y^)lIHgH*50){U-W@O@hn}caO%fAmnc3Q*F8^YC| zV!S=B5VK%Ia?5fy?WAIwF3Q@QK6W}{@>p7v6X~-#QVO&|JF0nrgtHo0o+9NZbu(%;z`nYxUD0gN|Qo&w42MQV7qMk=Ey3c$N=YLE^ z?+oyXC>M*!xr50(2*^tfmO8&kz+5MC$O^Ql0X2?W7F+XGxqSA#)5u~F-EdKx`+6&K zFqkOUxcN&LFP!u3YZXDV-Up{Q5c$vM0U-_>Y9Vd{bg_hX0|F0VlWQ18Q|$*ZMnuVg z>)l^f^{_Tp|6_0bT(B|!O3_K!=hvj1N9*c zhv^^^3=8MB76qLSD}3@lo>)@9zIxVcJ`ft%=YmM)D2oWM>^uh|5hkVQW3q1c(rM9q z;M=5S&JcC6e5Vu&}A*MFP8W?>T7Rv@HpZCVyI9h99H;!WN92{t=f%}3zv~oW5+q+7g#J#s(`g(xGY_t|ja?d)oDW=w z!(ErD9rSf64x2~t8*;A&3bskkZ>Hnv9!nBZqW_RuLQdv8H&(O(xEycg)*I_p%G%dI}+dlGNoX8P3#!vFKpt_%;fVf|`=1*a_D3ORgkVW~=E zonK!RjAngG-b2=jIjE3pgDhLlFW1>SG_B;2e1P1SEh4V1T+{f+$(v(oO31JP0h0KK zCW*D^_vYv61V)-|FA5G`w%>~gA)c|$RLgGheXb5*Q)>wk$-_`it)vCT&C-4{c?+Y% zt)8?OZq#CwTuF5h8|b>uecsqu5r)z7e=+;9PnPS6BX#rI+ImEp@B<)PebKrPQ|-$g zjx?myR!%1kCh4A|6Ne>vTML#v-b#)h%0EnGFj$m@+l?e#h!i$-{h^$A!plra&JosWKxogd0vOGoS$bF2D z$QH%Q`ia1WOJvf+`Kn7RJA~*$zkiD>JqeWh+#e9VReXWFF8v1%>(ZwK2FGnm-=80X zoTU3{Ejx&kuxvEHJr@6Nu-(5-!}Zr@zK&sC83HQk$J?D*&rFx-I9iEQP276@z4IJu4f zUQ$nZ>3&FP$03j*wuisN2XD?q`(kJs|I=8_j_BQbH;Mp(_q=sYpXF8IE*ZQG9 z-Nw1vqF?%km^a2*w#g~2om#Wv%(>heP=M(yG7)SDA_~L3guSaI6j=U-I@&;iq8LWf z%F$hbmCw;K$#M(a=hK?ULNDcFv~4Y0G78PFL_N2D>)vomb++RNlVs}5w$=WQoIO|s z>@)5$KA_!nB^i>`5}nPq{iQfO8v^4Q#syFGu3mWIMFECs+AWZdxd=Pu*`xY!;8f^+ zJqK{F6SGls2pdy|oyF~Nk%E3az*m!;w;R*om|VJb`t++>fg7Al#mPB|2TYVJ_bXa`3rxN~jwwoer=Oy9f`$UcYv!{j0&Jwt7*j@(%OI17 zRpFRaunX%EeK0Trh3HBIibSAX@HoIzu(z z6~CCDx@pxs%n+Z$?bW~=!G-KixQ6AZrq{U|&qJJTc;ch>f4^h@JG1}&*6dH6M)%>) zA}O_)wK&*%l;hK_bU04nV%hNmH#5Iozblg_veXx0Zng&dCp)P6+uNP}a2pfIz;++Y ze{CLq9N^)tw}P;Q6-9AexporQiF37|0&#ftrf?shv;Y23w7b`f8l#-+?oakQ2cqSP+M2+PbCuw7+;_cv z)Kk*9xj*rtT3CV2DWPPk3z6Kv7pEio$cu>G{)iaTyfJ@(cGwd~c8uP4#>R<@hf#~H9Lh{Pe$$dh7P3A!`{9h>aM1OGU$FxQHDhQPWm$P0wTO60Y{KSjG@n8rp=!((ZTmhG9Ohym9|NN!4nOa|0>& zqZj#S4zWuYE{G0xwg-Hud%N}fVe(lY&Z|gAcjo3qD66{_ij;VJ6o{Os+xZs=I$W|C z-o6^B2tVLIJ8<%&^xcpB$chiL(@n`v>+H|{ovU00h2O^4ojZ@V(x~pEYaEKo56lNh z`m;~WinC#*gvZt!#o@T*?^$G`qXXB^MI<}u1=esNLr$se4Nf8u+Wvx{H57PIiJ zV>5s0%aFcvvg<2HethEy^e-H8C!ZZv^N$Yh_LK8{``{SjPZt4AF@3I_ogVuyZ%Fe zzuD0b!)DvN>NF#L-}HlM!)EI5LZ_|t_n!aVZ-()Bzv-%(Ni+232crEe%`5(Grx}at zo&9F(zl}ApWjR?G`@`QDHJQM>xvyOxPj6pz?ds27{&S!=n^Di-q{EoWSdR~zm@7|Y z;s;Fn&A^A)*ZK|BM!(t913Rh19sl}~NHkqQ+D3YMEseV$KpLv|&caw<^u`Cxk-zy0 z3lJ+X%?RuIVss`=Uu!yS4*lKP@n#<7D2;60U!EOrH^a1Pw*2L%-)v^z{7+(qC%a8& z;=e&xbvPAX`F=TdvZCi%AO8W$rT&im>puI~=RXyNx=mj1v;IV1dttor#F2)~XlBRy zc-ZtO(L;Y>1MT)ijxJO4G106!v5p-ihra?v0^P$b7B(Ag(Sg|M zD6v^49c|L}TRx&;bKrkI)QYpwqs8GO^SCdv?l;4{D{g5zFe3l3*Yx%${!X`LgFYXq z5o1EcgphuzdCA|N9q(&Yj60CR-*>zn{(i+@===xDKeYY8pE=c>_OCnX5y%}>nTlBs z(<7ZkqJFcfF&!qpIc<6x0y4(?f^#R$bE36{UkA7BY^L>N(C5|ETC?wqK07{yZ3kw$ z!p7`4;ga`1t1+6mP;0#71Dg({Vf2%H=_IZX=3*`3x6u$^_eL647ixu=F=66;&{k3u zGMZ<{TT*p9O*gm%uS7S@q#vSUI898c(>&CqI^q~-nzNd^F$rJ&PIHjQnl`|Ka<$q>6@Qo#D_{?<_Y z?{st8znuzxNClKwwagF`hir^PEYJI)qks4Ro$RJx|4^D%p6=T|tb<0AI7Y{K+4`O68 z+9T7HHv#vyxolfZcEpGt$EII*O{}-2h?EPVkb7se$wkr%U5e-sevXWW2#BVXaOn& zz%&oy{XcAGf9M}Z=7SJ*_t@Amq8FOP;A}+FMuvx3m>n>s*gs}d2dh8eL;Ny39yEPl zmdCl5Elpz&)j^-I5z`7{7-~r#2C!)$Ng-5g5gDh)g7DkyC5AU_ zh7Y~HUY2U0-gkAoIkY?F{Mi;enH}#mBlQXAoJxKzt{dnXdjfx=~o!Z#VzKUo|y-tdW`*D8wPOf5D&Q?@_o97nHmqc0p3E;6!_A zm4;1tKKt7&fe(kN#QvEbKh!$ms)%)Zr+HVa0UHV7*8w3uGGHjKY)&k_0>6%ZY5G|D zX^h(EEtrce0Rn^ucVPHFVq{134>f+rzh||=(QC9&_b|H{WuF}yYl`+8TbHK3Ao}-_ z9U&pi^_>QZFHvoq-svOgCKmCa=}9@I?Ipc}hOzBa?(gqNw13zX$J}u9J_6Jk(+i`pv_v zN*Flx>lp7$8q&=SLFkyUioFs02BSRoCx}c9pn|Azl%YXVPrztb+{!GIw3uuKy=b;k ztl86#n%PHA&dzkfI+`?x!4JOFd?0#c>xey@=7HP|j{p|3X_xK@U;~V@_gY zrPsxcyfBPfGW+liSREP!3DEa0*}DDA zmH=s*0$bD{G_!md91BLgA6}QU%{k!*K0X?!$sRSmPVCGB+nH){3+|{-tm%&0nc(={ zAL}#XDya>qx1p_Kwu&wlTH7<5~a_%DCBua*+ zp7>(3}AH zbSHVzUlt+WY2J3mGCMZc&2$iO>A# zkV?i=`N5!MlnuU=*ZmspE8i+oCzhWT zF0nVW<6vvJu(&uCeDXL|(rb_+OJs9c`-3=ov!$xN@e-Jyu_v}CH|36VO{$VfMTM0W!F~Fd4(!g*# ziP|Vv`(f9^^SeoQj4%bJqp$?tZC-X_oNX@qD&UVw%1Jk=ftFL0dcxT~Nya5snQWJH z%|#y{W5}*$K2Bn#;P!zg#Irmft@Rfh`bg%I?SJ2Rv*RN?4$pdze(bZtuGX_COv7=~ zRLxs1;^HGtJQX-38lWSkEr8sg`czKWyVD3G>NR{%Jf$U^K-cklUE+j_$cj?XNUfghX)Eo{=T+VmTv8IXpo5 ziqOaG*n$G_=u2c+z2ulN6T|<8KH;*$cnj0-x#lyzE_a(d8V$)M9j%ffb`)HIh@VAY zoB$|z*^$UtLrUSwRU`0&|--t=#5#hDm0CjzbD#aip7J*=6>MBG!wly3K@%~ zDKEt%beUl+BZ}1H=omTkvc?C^`}+?S`q{=6+iK0~1$1sDuo90A7?{k;4S(8a2dN(V z-#qj=C->RrlE}^Chw}JJ^BJdxd6z=h>paZ#BOhHuH%Er@dXbkJGCy^aA8Q9;6EK?{ zuMiOOQNWa^V%PK>(=qv??X*ALYc^#)y!(No-pucPb!~Qa0Px-Bop<#U@Z-wut zNe}lw<(xjN3cs3P`+IGy%c*-L7RI2SepR&U9GKt9CvD-Set0F8n+^TI7U=sY3sQH3 zK@PWadck#MnP#@vOuON8zil*0VPwev=)}LBxgsP91%}~{V?l*CT5+_lzw0J;hbtt`Jd;TQ0Xk7FbHzv36C%cV$NQN?|diz*gwvW z-+8-vJ1}7?L(q-QjV-T(2#9K*H4Z=lY=`)UK3M?T$8NTndfEc}DGWBAOxa{wBy+6v zk^|fHno-D^9c`8MWa)P$MOZMGgi{E6Ab!EA5B>cY@@vDUYkhRnQS^-%#vPC}U=^n`Xz4 zAl)>Y;{)LW#|e&sc3(S9(8Gft-)WZBQ_8v9BkLrm3p7u^=C9-FNs$sf5Mg@|=hjM( zr%1|_JhcrkO3D~OPNcWl?`F*cgEBNk3nQcqB=#IO8HY{ra`TR}1ooM^Y_*tX zPdh~rX{2T4M!Pr*MOzqQ2t4b}r*7g-3yo2|i6hZRg6}wc`Ov7$fDOIuTyw#nY(ql+ z0r4@`qJmxLgU`!*`hw#2crp)?vu4atMGT` zU2Pd8s2J%ZhH|I{HI6PDbcm+yQwM3`dtW@dVXhco2Ga^cEqJc^w0}=G3YEX0DYZ@@ zb#L))S!Zpc;C_-fI~GhzdXuSh-h-)RKNb1SHy#u* zK0{+=8XtarB&jdKR*n{Ip%Wx;U~B#OgR9LQA0`BnZHUnt3)E>AVwlJbf16$Hi@^ND z??X#jVIk=^Im}F*+Ek2kwOJQS>Fk)YqtFJ%NbryqTmBuSq3B~~%|PfKn`G#3_tRXl z`sKE=plygni#VO}!J|gSYS|V@Ex=Ve$sn>-@Y$bh&S}S49+$7i={6&N_RDATo83vUEwrqp zWWQ$tq)?SED+CPZkFN=r`?Pyaip84a$^m3YT@vTOIg%|7^pHS3=h7jvH%PEm^Mk>K zxphHdyiYe_Fhil>-8Oc?2Q>`-Jq*zS4xj#!FM_S3{|$}PUcJ-w!pCkZ@kJr*5u*+x z&$#+DnR4djPDZvdsQsiD8qlGD_lXxT8AH2rx@VnkWTAO~1D{^~v2OQI$3FQSlhh3!b^bu-i zh~opiV_I#Ls%_==;#oL}2ssUhgW--C1-kSovV$!2o~)X6HnrNdh+M2HA;{<+Qhd=@ z+Ysc^>L5oNFUq$4{sjtKAhD%1_=S4KEZnDR8X>| zqJN$p)b#geem&CRfks-&~w#2_ofN7!F9gqNE;4&0^Yf|JaWZ~bpe&d?k%q)9#zW{}LGu42E0aS3E{ zq57_N`4y|yIln=Nrwz4_u$tNXZshNN1pc=XRUqU^sC2*pcEk1(I<{ zWn^ix->B89e1a%QaVnosVE7o(aab~ciphQ6|7NELH-!0wDm35Njt7`_W(UU4lBGhV z#MVv07(4Kn^b&#z;br}>dC^e<;kK00IXk|VW9>8%&Tcpr8n%zcg7V>PGV0|xBxgCS<9PunEPcmu{+lGHU7Dd;p$vkL~Y(royKu;g2{he4WhiF%LC z!zD3^_B$M=Xp1?5CbA&WbVg|ayXMW8Ea|e0mL=lDefJ$$oR5O?0U^yzbrqm2qL}Bw zw6?-E$A{SE?Bw*_s%T$EIKd~L{lcgGc-Lq8^$}Pjz^9OJkfls-X1JPMGek3 z7k#N#vlrL=_`K~5x46ZaKDZ(4F0%v{}B ztW}@T(9)RNs#WivyZSp8uydz#tsP1$u1P#&3`e_N!;_zB_{z=r_J-w zv#imBBsa|X*Rg_DOoiga_#&GKO#$gE!`>|DQJ9$T&whUk=e zYdDs&K%7Es1COuCi%q9BG60F5d{vu-0QyM~aNJ;LNAHiPJC;+Fn5dmc!FX_zy!;6d zCw9a`hXDvl?%tiD?X)m~Op}g)3S-iVkMqrvuQ3x8C5Rw+%O+JFOzpY{Gvb79^1RIj zY)tRzA3ym8M-sesxIizOX-oKol*3t(GCx@5B(*W#Ab<}A*L;}9=-VLx=ccv~vTIS} z2;z}gHnotIFYH>g?#)>s&Hu(cil$$HK;D?~bBWVWabE6I{!?dfv)M{2?Ust0tu@!Y zzw_BhfRV8>!qD9KN*jgk5JKTrvzn-x_f7g9DO{5-se#8NG?QREQ51TkNY7Yu(m@TINw}Jz4Ve+54G(WNQix0_EP9x$j27V2eTj| zNr-Vuk_`KMPD$f4S*ax3X5bro?z=JzUZaNAgrR(Rv*YVc@;TYjMDv@8<|uE2a5ceh z1Vq#FQ>2_M8}GK|I<1Sr2aFCFYyuBtYTp!>5J0nIba-Dz1eu0`4be{~s-@Z|?QK14 z!rfg+LM)`0<^>ld^wjK0LXg)0@8(POVWGR&fXf)(3s293sRBI$=8)$_SX2A5f!5~U zWiJmW?owxXxf8dX+tPpKbBE!nr(L4=EU`ud@EAK$Zht&weSk^jXW2nm$B8-86%4jh zGr%I&CvC=@Lu}5@RF(wIoaNFaEQcrQK~}6mA$3c5)g%lp9gaB%27vcgwE~a7mUp~JNn=gIf5Hu(9rqVy)AWqOWL)~XhB-Fp}JYdUfHCY9?xBoah zzM%-l5_4)vMU#|Lx`IySsXUcmd(K{xYs<&;7gWmGG>tg}FD@+3$(RbhbDSeIq=A2fZ9vo*P4#_(dN z6v3gyECoZ(<*~xo3>`!)thJIBB z1wIo;>x<0`sdac4L!@AW%fxJ zfinyZ9}i)Mzy%5$X+UHaY5mCtq4V*CBo-~l9{SpT3Md%sjxZcYoFQ)aVG0JBW3fN# z`l!v^4`&>1a-)+L`6x8|DrQ zdRVfukAiDERLkN_b3urO&m9bg!EB7&!+hk3k~(Qm9-=j*FkU z7+y{YAct|6$d`IFHb*>FdXEK*&U=U03b9#DNenvYd`pg>W?P$VpraIW?Czquu|$Cf zwqUf0*iei;OJMV!e$!_J%gp)i#^^(wvDpxtLaFHoqh#u1Wy#Y*C?$*th8d@C?UbW; zX7crdwiIVS5Ivx{M zYcnx@F-^7%57k?He1l>D`UYcTG(iQ?WZ;(GIw?LOS}ZQGpQ4PGhTR_~mc`U~AbN(& zEhr}z@Nw)B@bW+JHgAY@tc0RMw&lWSmkO-W@K#dqP^e%kK4L?M4}n|))?Kupr}78e zdUv5<@NYQ*@N8rr1NIcRWya%0(bD>({Z-DQ=9E%4$^#vzMt77>jIU$T@iCi*=zrrS zN#n=-GS6a{P(CE;4OR_Ms2`7f$;c;v(NvUqOJ&i{l$ zY$sPB|4#Cp(4~3Qw1#$QKP?Y~80XOZ6qjpqhgrQHIa01P7qu^kR-`giWBe*kBNUA! zRPghpgpA9%1(F)s+71Wu>5(hmK8=jCYG@gD3tERI-TAA=;c{UDtirTPC}a_8KHwZ)Vq-oApw&-fE(g)@PGA4@EfqAq8GX}^I3~Usj zoSp>DkrnYu^Nu#kiFeAk=#r|!(XS$_UzUks@%pqThZZ!B18~?tREAj$+@J}hp&(3E zxsNDY2z6$?x49L`w3az5z2j8#X;E)wLGzf93Yj!rf(Dx17^?@4;j>$7)`{A|M^qU=33iP(2?vO6#OUuccyC?B_AIEaO zF~|%P|ACw=ks6F}Pg_VhYgCrT?D)2#Irvw6Ds-IW`2eE|)87aEiJB2pxzj()1vCtt z;gC1h;#>*#!Pd|0&8)qJ6~})cOpKjnhCravjIKdS%!hg7P9BHIqr9L`1IJ?MKtylb z(+<)KV9Q-Qjq4z=%WUGq?D(omk1zy_>v^+gE@@LD3xX43Yha;BfPrJOLkz#c4>93` z1Cpf&_sZ6U6*uTCinnPHex6#LoNDGN_d(^lptuEtJ)(Q%R5v0Bt$#HsIhKMj+DXdU zR*zguos}}aoFbQ&YUQ0miEx1}sFXKT0;#p1EL}bZ$60Q!IU!B;nnYqLibUEKBwPk7 zTW?nV732=;moHx{+I$OKYA*SoOU-3RF3>^yN<17d8G$mX{dkZquOKTpZ5gMueJHdZ zyw60Wx6K|ijaF^p_~*Oi&YK-SpTY_p6F1@KqeR2-X_+fTnhHhM5=Q{Uv9fJevcFHV zrT>XX#A#xl^kDU5h6u)7*sUep4*g{ z@Pa%?A24~x4NUNqPZ#}lL3;pfId(ih{1*GtN-3T8S8-A^@~95$~8P>O^Y!?85|4oI3J#o^Q^I z8!};1c^yY{&vw%2P4{Qe{(QH0(1u6+d2{zsN!I7YE*fP&CXL4q*~LzaTpenv$X9o_ z*-(2NhUtemy_j{)xl7AOV&z|OR0b=A;bZ+fnHW@<;lzs_k(Q^B%-~UvG$&5`q&O(D ze9jBh5BT7`avot)EV?;}itE$7@40M&cj4P(t-0aEYNPeCx?v}Qv)0MzgEr?x3YKDj zncVPJ<^>7;lZk-synL2#G9K%|$CgP`EENmv_>@kPfE4>VK)2&}91v<3K{J)~;v7j^ zlf{2;Q!~Tcyxc7Nd@22@oxR*(Gkta?u3pH#9`?PPYa(NIF@{#eNM=h$Ps-~C^GAUsqzwLL^`e`cZcW8G` zfz!KV*;wvQD-8Puj7>fK@0+kgcxV;+FY` z>jtNc)8&E=8K8bGh7-1iI0};E7Aw80wcSD?z$Gd=P^}Mn;G?x1IPc`LE)=jZjLuk+ z`t5_?>){C0^kDk)LmQbdezqCz#?B@}(DRG4ygKEt`~ROQWBJJkeOgby`tet(X!P+{ zeOTrY3s^Galxf3EI>~irN7WRF!&jVn$sYQ3$ebl*>)|TOwx6l42pNILk`YZG$pXZK zA)#~PIss3<@PL=Cq-8;G#hu~5TH0hMW%aZ>bfDWpp2NmD?dx`-xuQ+eYil*;A`Y%b ziTw^YE&Vp>mnV;CsIa>{Vdz4xQSvg_Cxed0yV_(&Z)Z%9A-wcvDkG|Vu{N5cgEVu? zg?KN?wXU&VrHh-{mC%hklxm2NX?c6`;D!}BadcZ@Y^_qOc#XC|zsR-MLw#o<^o==N zy9hnK2Ol!GFp1CJE=!$MY^Z0YsJoe?VEJlq5 z&y1asml4NFSB?7y3Qc~Nh#ChJ)m4|hu6=0UTUm|fkKIcGie+ze%hdWh48+BCYck}u z#yASi=&~$BQu>_qPycl@`_97XTK!Akz*OM={Se5w1W0<0kS8C}&PK_iBISG}B;$`` zvW9o>5tg)IEl^%yH}i6HGK6Y6Zr&S(9f!e`(z#BOw)-KmNGx_jw9G80iEP4kbD=gO1i~(CDJQ!F;))DM?BsVth8uokePy3Vk0|4 zOtvTmb85jgy-PaJrWk7*Sq47IYt`;c1+dWe`hnXV3cv#sXH3yt{$~--WO0fau{$02 zYj>I(zK|5q7f{V?kkxpmIp+lA0D{I&;4eATyU;8Pq{Cg~q~5%7s?cC?9`%y?h^=V6 z$op7nR{T$FYX9?JHviJwqCSX(fqA&%RU8@q5Ht{>EKI0z-^gykuUjecbeCTLaS}YO zFFzNopg_3=K?Dq|>D-;hVZ+`*n}V+DaxnhGSYPjJ_J=+gbJNJ34$4BYvvlKpa0!j! zVJ`N@IsF_#hqX+CpFT36$VGkM8#WMuBzF=4Z0X_z6LC?7#$Wb z2Ui5M6|?WXKV*QaJM6L57c#UlH5J7~*r{{_321v_H&D8*$R|y}wkf6igQCZ_*U83F zDL_k??}XRY%GR2N>WIf+jrfotszuYXPiML+r(4@~OOP$Q9?c7{wzst=Hr_HMwbXnHRF8~`xb4*im?2Jfqxv`k1@lv1 zTHM*OI<(U*nT~r_K%vheTFHvGunV3M?--;fA7f{qlTrCBvibD%@;toViTrwe6m2$5 z)|SbJ%9Zxe*&1swHF9J(0%R8|94{Zbh3P59h|=$x-`Ey#oDB`*PU8dY4L!8kV+fD* zd5g~S0N-LtaWW1$nKHC+*S9L2Z{!y6ftSzd;zz3)nM}fE(4!-^@H-@M;j;$^rjPQ$R!5wLTEl#zO~gEmy&RL{UYgkZ*2+_{<{Q zg_i1`G8s5p#aO8J@3bH{6rgJvTh|q;soJ*}I94FaZ<=>K{Jm@jU5H{H%%nGRO+>)m zLh~VEERGp_W`5%F>plU{2T~Fk_gHB-Vr<^aDl-C(Q&a)a?-558!?7xllo@!``_12+ z>#BKw{rmX4W!e8eI`%$&*~!j=q|SuxVa%lHC_pgu~U;xrIlJ85ReUunLg?J;jaA?8cED0bRB6h*M9A7+dB z9_UXKry2$uKwlhKsr1YONwS5E1haFgv(_@5gjsP{*_5;67n_%a`{AbffyjK*fXp+Z z1O`}_id&f>(N9D=#OPQ!asHJ1(3{!MG~5%dJ=P=L0s;BoQ?DrhY|?lEfl)T^;GZ!<)+8FgubrkeHB;Vi^0dk34+$@?axR&OSi zL@Z`Co+!+9oy@yrhMI(m(3V9me6NwcA=L>Xn9C)=$5Sa9vY&_oIcacDq2J2i+wS_E z2u^-UH@r;pv0B2{6NaTp5Uo##@vxYtvBIX;VNL5<6KDUEJ3xMwV(uJQm$}xazNw%mx})(U?`th*Be8D7eJ#2f#z}9&DY~QR9XcDQ;Tgq39H*@q zwUa!PV^3}qSF%e@!vR51H~B$0TNw(~ndY))F^`R?bUNh;yur5CXLfsg&?X%yGW&7L z83YF`u5BHjdA@nYwS4#Xw_yN(xV^kp2;iy__JaS|=5Fgp=*OP)A76cyYPdd5D+#=S zI#ZD$p0DY)-2fLZL)6B|I!QS2dKyWk*?I9w~H8Gvj1F0zo#S zzm1{L7Q)g3SSpOkZp~)J;<7#rJ5X~+zZo-)L)};}N{%r_C}Iq4zH|@(%bY|=&D2WD zV3nvpV!`oS;4M;KzV?9^b=AJ#e;Rpl}FSd5(Ox&1|_e|F%-L zc;~2x#|<+=dkOaM!^XLjGtu#m8LBe?>>i!4h~xn@k2j)WV0 zd;2@8U0`WeVvl^{VP8+Oi5>E-c!qtcoesJOdZHd+j(fS{6b_fXJTSw(|c&hhy8PAx40sbqpFTy&w#1?3RbHVau zd~)r{0rPpO&$hgY=4zwgN_cs#`Hc4itdo|o(cE11R?GuMhM&V3NjcEfpVZH1NdBm0 z>%i~4O-r-&*9HW5v)XhU@fAYwVsq)gi4El?L+ySnk=JtAW8~mGA6b9;)#k~c`08_J z*ZXHI;Y$F=3!YC)S9{TMe8`%I{y`v|3ZT(fdz-yCj==H8aZH4cX1Q3Z8DQ~Re_>5m zRj_zb%G%T^W4+#(d9(a)tEc2tn;qX|Xj(u76ZzBn6(?tCb5{Bxv)u(ZaT|R~i6@rZ zq^c2AnNt2IkG>~7vigGo0?kLqIs`V!R(sV+XzrM<6bX0HcCad)78mop#tWs<&G8#s z1^A;WrWo*T2c|YNd z^+;&HB|;q92G&lvpaW(fa^1B$N2IoD3G1qL+9S7gxR51FfIi_D5w@$eOynCG7m=sj zDpR4`ni7OR2e^NOy)Nk4OPC;E*{rbMBJsYIW;-4Jo6

    ` diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 39d1e83d1..5f93dcb66 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -36,6 +36,7 @@ using json = nlohmann::json; struct server_params { std::string hostname = "127.0.0.1"; + std::string api_key; std::string public_path = "examples/server/public"; int32_t port = 8080; int32_t read_timeout = 600; @@ -1953,6 +1954,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --host ip address to listen (default (default: %s)\n", sparams.hostname.c_str()); printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); + printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); @@ -2002,6 +2004,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } sparams.public_path = argv[i]; } + else if (arg == "--api-key") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + sparams.api_key = argv[i]; + } else if (arg == "--timeout" || arg == "-to") { if (++i >= argc) @@ -2669,6 +2680,32 @@ int main(int argc, char **argv) httplib::Server svr; + // Middleware for API key validation + auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { + // If API key is not set, skip validation + if (sparams.api_key.empty()) { + return true; + } + + // Check for API key in the header + auto auth_header = req.get_header_value("Authorization"); + std::string prefix = "Bearer "; + if (auth_header.substr(0, prefix.size()) == prefix) { + std::string received_api_key = auth_header.substr(prefix.size()); + if (received_api_key == sparams.api_key) { + return true; // API key is valid + } + } + + // API key is invalid or not provided + res.set_content("Unauthorized: Invalid API Key", "text/plain"); + res.status = 401; // Unauthorized + + LOG_WARNING("Unauthorized: Invalid API Key", {}); + + return false; + }; + svr.set_default_headers({{"Server", "llama.cpp"}, {"Access-Control-Allow-Origin", "*"}, {"Access-Control-Allow-Headers", "content-type"}}); @@ -2711,8 +2748,11 @@ int main(int argc, char **argv) res.set_content(data.dump(), "application/json"); }); - svr.Post("/completion", [&llama](const httplib::Request &req, httplib::Response &res) + svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + if (!validate_api_key(req, res)) { + return; + } json data = json::parse(req.body); const int task_id = llama.request_completion(data, false, false, -1); if (!json_value(data, "stream", false)) { @@ -2799,8 +2839,11 @@ int main(int argc, char **argv) }); // TODO: add mount point without "/v1" prefix -- how? - svr.Post("/v1/chat/completions", [&llama](const httplib::Request &req, httplib::Response &res) + svr.Post("/v1/chat/completions", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + if (!validate_api_key(req, res)) { + return; + } json data = oaicompat_completion_params_parse(json::parse(req.body)); const int task_id = llama.request_completion(data, false, false, -1); @@ -2869,8 +2912,11 @@ int main(int argc, char **argv) } }); - svr.Post("/infill", [&llama](const httplib::Request &req, httplib::Response &res) + svr.Post("/infill", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + if (!validate_api_key(req, res)) { + return; + } json data = json::parse(req.body); const int task_id = llama.request_completion(data, true, false, -1); if (!json_value(data, "stream", false)) { @@ -3005,11 +3051,15 @@ int main(int argc, char **argv) svr.set_error_handler([](const httplib::Request &, httplib::Response &res) { + if (res.status == 401) + { + res.set_content("Unauthorized", "text/plain"); + } if (res.status == 400) { res.set_content("Invalid request", "text/plain"); } - else if (res.status != 500) + else if (res.status == 404) { res.set_content("File Not Found", "text/plain"); res.status = 404; @@ -3032,11 +3082,15 @@ int main(int argc, char **argv) // to make it ctrl+clickable: LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); - LOG_INFO("HTTP server listening", { - {"hostname", sparams.hostname}, - {"port", sparams.port}, - }); + std::unordered_map log_data; + log_data["hostname"] = sparams.hostname; + log_data["port"] = std::to_string(sparams.port); + if (!sparams.api_key.empty()) { + log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); + } + + LOG_INFO("HTTP server listening", log_data); // run the HTTP server in a thread - see comment below std::thread t([&]() { From 8a5be3bd5885d79ad84aadf32bb8c1a67bd43c19 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Fri, 15 Dec 2023 22:16:15 -0500 Subject: [PATCH 244/859] llama : sanity checks for access to logits (#4274) Co-authored-by: Georgi Gerganov --- llama.cpp | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/llama.cpp b/llama.cpp index eddb70859..58fe7492e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1505,6 +1505,10 @@ struct llama_context { // decode output (2-dimensional array: [n_tokens][n_vocab]) std::vector logits; +#ifndef NDEBUG + // guard against access to unset logits + std::vector logits_valid; +#endif bool logits_all = false; // input embedding (1-dimensional array: [n_embd]) @@ -6150,6 +6154,14 @@ static int llama_decode_internal( { auto & logits_out = lctx.logits; +#ifndef NDEBUG + auto & logits_valid = lctx.logits_valid; + logits_valid.clear(); + logits_valid.resize(n_tokens); + + logits_out.clear(); +#endif + if (batch.logits) { logits_out.resize(n_vocab * n_tokens); for (uint32_t i = 0; i < n_tokens; i++) { @@ -6157,13 +6169,22 @@ static int llama_decode_internal( continue; } memcpy(logits_out.data() + (n_vocab*i), (float *) ggml_get_data(res) + (n_vocab*i), sizeof(float)*n_vocab); +#ifndef NDEBUG + logits_valid[i] = true; +#endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); memcpy(logits_out.data(), (float *) ggml_get_data(res), sizeof(float)*n_vocab*n_tokens); +#ifndef NDEBUG + std::fill(logits_valid.begin(), logits_valid.end(), true); +#endif } else { logits_out.resize(n_vocab); memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(n_tokens - 1)), sizeof(float)*n_vocab); +#ifndef NDEBUG + logits_valid[n_tokens - 1] = true; +#endif } } @@ -10052,6 +10073,7 @@ float * llama_get_logits(struct llama_context * ctx) { } float * llama_get_logits_ith(struct llama_context * ctx, int32_t i) { + assert(ctx->logits_valid.at(i)); return ctx->logits.data() + i*ctx->model.hparams.n_vocab; } From c6c4fc081c1df1c60a9bfe3e6a3fd086f1a29ec7 Mon Sep 17 00:00:00 2001 From: slaren Date: Sat, 16 Dec 2023 18:58:46 +0100 Subject: [PATCH 245/859] lora : add support for non-llama models (#3333) * lora : add support for non-llama models ggml-ci * avoid leaking ggml_context on failure cleanup ggml-ci * lora : allow 1d tensors * lora : include embd and output layers in size calculation * fix style --- convert-lora-to-ggml.py | 86 +++++++++++++------------- llama.cpp | 133 ++++++++++++++++++++-------------------- llama.h | 1 + 3 files changed, 114 insertions(+), 106 deletions(-) diff --git a/convert-lora-to-ggml.py b/convert-lora-to-ggml.py index a937410dd..53bb8a3d9 100755 --- a/convert-lora-to-ggml.py +++ b/convert-lora-to-ggml.py @@ -3,7 +3,6 @@ from __future__ import annotations import json import os -import re import struct import sys from typing import Any, BinaryIO, Sequence @@ -11,43 +10,15 @@ from typing import Any, BinaryIO, Sequence import numpy as np import torch +from pathlib import Path +if 'NO_LOCAL_GGUF' not in os.environ: + sys.path.insert(1, str(Path(__file__).parent / 'gguf-py' / 'gguf')) +import gguf + + NUMPY_TYPE_TO_FTYPE: dict[str, int] = {"float32": 0, "float16": 1} -HF_SUBLAYER_TO_GGML = { - "self_attn.q_proj": "attn_q", - "self_attn.k_proj": "attn_k", - "self_attn.v_proj": "attn_v", - "self_attn.o_proj": "attn_output", - "mlp.gate_proj": "ffn_gate", - "mlp.down_proj": "ffn_down", - "mlp.up_proj": "ffn_up", - "input_layernorm": "attn_norm", - "post_attention_layernorm": "ffn_norm", -} - - -def translate_tensor_name(t: str) -> str: - match = re.match(r".*layers\.(\d+)\.(\w+\.\w+)\.lora_(A|B)\.weight", t) - if match: - nn = match.group(1) - sub_layer = match.group(2) - lora_type = match.group(3) - - sub_layer_renamed = HF_SUBLAYER_TO_GGML.get(sub_layer) - if sub_layer_renamed is None: - print(f"Error: unrecognized sub-layer {sub_layer} in tensor {t}") - sys.exit(1) - - output_string = ( - f"blk.{nn}.{HF_SUBLAYER_TO_GGML[sub_layer]}.weight.lora{lora_type}" - ) - return output_string - else: - print(f"Error: unrecognized tensor {t}") - sys.exit(1) - - def write_file_header(fout: BinaryIO, params: dict[str, Any]) -> None: fout.write(b"ggla"[::-1]) # magic (ggml lora) fout.write(struct.pack("i", 1)) # file version @@ -61,9 +32,7 @@ def write_file_header(fout: BinaryIO, params: dict[str, Any]) -> None: fout.write(struct.pack("i", int(params["lora_alpha"]))) -def write_tensor_header( - self, name: str, shape: Sequence[int], data_type: np.dtype[Any] -) -> None: +def write_tensor_header(fout: BinaryIO, name: str, shape: Sequence[int], data_type: np.dtype[Any]) -> None: sname = name.encode("utf-8") fout.write( struct.pack( @@ -78,11 +47,12 @@ def write_tensor_header( fout.seek((fout.tell() + 31) & -32) -if len(sys.argv) != 2: - print(f"Usage: python {sys.argv[0]} ") +if len(sys.argv) < 2: + print(f"Usage: python {sys.argv[0]} [arch]") print( "Path must contain HuggingFace PEFT LoRA files 'adapter_config.json' and 'adapter_model.bin'" ) + print(f"Arch must be one of {list(gguf.MODEL_ARCH_NAMES.values())} (default: llama)") sys.exit(1) input_json = os.path.join(sys.argv[1], "adapter_config.json") @@ -90,6 +60,14 @@ input_model = os.path.join(sys.argv[1], "adapter_model.bin") output_path = os.path.join(sys.argv[1], "ggml-adapter-model.bin") model = torch.load(input_model, map_location="cpu") +arch_name = sys.argv[2] if len(sys.argv) == 3 else "llama" + +if arch_name not in gguf.MODEL_ARCH_NAMES.values(): + print(f"Error: unsupported architecture {arch_name}") + sys.exit(1) + +arch = list(gguf.MODEL_ARCH_NAMES.keys())[list(gguf.MODEL_ARCH_NAMES.values()).index(arch_name)] +name_map = gguf.TensorNameMap(arch, 200) # 200 layers ought to be enough for anyone with open(input_json, "r") as f: params = json.load(f) @@ -117,6 +95,7 @@ with open(output_path, "wb") as fout: write_file_header(fout, params) for k, v in model.items(): + orig_k = k if k.endswith(".default.weight"): k = k.replace(".default.weight", ".weight") if k in ["llama_proj.weight", "llama_proj.bias"]: @@ -129,7 +108,32 @@ with open(output_path, "wb") as fout: v = v.float() t = v.detach().numpy() - tname = translate_tensor_name(k) + + prefix = "base_model.model." + if k.startswith(prefix): + k = k[len(prefix) :] + + lora_suffixes = (".lora_A.weight", ".lora_B.weight") + if k.endswith(lora_suffixes): + suffix = k[-len(lora_suffixes[0]):] + k = k[: -len(lora_suffixes[0])] + else: + print(f"Error: unrecognized tensor name {orig_k}") + sys.exit(1) + + tname = name_map.get_name(k) + if tname is None: + print(f"Error: could not map tensor name {orig_k}") + print(" Note: the arch parameter must be specified if the model is not llama") + sys.exit(1) + + if suffix == ".lora_A.weight": + tname += ".weight.loraA" + elif suffix == ".lora_B.weight": + tname += ".weight.loraB" + else: + assert False + print(f"{k} => {tname} {t.shape} {t.dtype} {t.nbytes/1024/1024:.2f}MB") write_tensor_header(fout, tname, t.shape, t.dtype) t.tofile(fout) diff --git a/llama.cpp b/llama.cpp index 58fe7492e..f49214c13 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8647,53 +8647,60 @@ static int llama_apply_lora_from_file_internal( const int64_t t_start_lora_us = ggml_time_us(); - auto fin = std::ifstream(path_lora, std::ios::binary); - if (!fin) { - LLAMA_LOG_ERROR("%s: failed to open '%s'\n", __func__, path_lora); - return 1; - } + llama_file fin(path_lora, "rb"); // verify magic and version { - uint32_t magic; - fin.read((char *) &magic, sizeof(magic)); - uint32_t format_version; - fin.read((char *) &format_version, sizeof(format_version)); + uint32_t magic = fin.read_u32(); + if (magic != LLAMA_FILE_MAGIC_GGLA) { + LLAMA_LOG_ERROR("%s: bad file magic\n", __func__); + return 1; + } + uint32_t format_version = fin.read_u32(); if (format_version != 1) { LLAMA_LOG_ERROR("%s: unsupported file version\n", __func__ ); return 1; } } - int32_t lora_r; - int32_t lora_alpha; - fin.read((char *) &lora_r, sizeof(lora_r)); - fin.read((char *) &lora_alpha, sizeof(lora_alpha)); + int32_t lora_r = fin.read_u32(); + int32_t lora_alpha = fin.read_u32(); float scaling = scale * (float)lora_alpha / (float)lora_r; LLAMA_LOG_INFO("%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); + // create a name -> tensor map of the model to accelerate lookups + // find the max tensor size to estimate the required temporary buffer size + size_t max_tensor_size = 0; + std::unordered_map model_tensors; + for (const auto & kv : model.tensors_by_name) { + model_tensors.insert(kv); + size_t f32_size = ggml_nelements(kv.second) * sizeof(float); + max_tensor_size = std::max(max_tensor_size, f32_size); + } + // create a temporary ggml context to store the lora tensors - // todo: calculate size from biggest possible tensor - std::vector lora_buf(1024ull * 1024ull * 1024ull); + // TODO: use ggml-alloc + size_t lora_ctx_size = max_tensor_size * 3; + LLAMA_LOG_INFO("%s: allocating %.f MB for lora temporary buffer\n", __func__, lora_ctx_size / 1024.0 / 1024.0); + std::vector lora_buf(lora_ctx_size); + struct ggml_init_params params; params.mem_size = lora_buf.size(); params.mem_buffer = lora_buf.data(); params.no_alloc = false; - ggml_context * lora_ctx = ggml_init(params); - std::unordered_map lora_tensors; + using unique_context = std::unique_ptr; - // create a name -> tensor map of the model to accelerate lookups - std::unordered_map model_tensors; - for (const auto & kv : model.tensors_by_name) { - model_tensors.insert(kv); - } + unique_context lora_ctx(nullptr, ggml_free); + lora_ctx.reset(ggml_init(params)); + std::unordered_map lora_tensors; // load base model std::unique_ptr ml; - ggml_context * base_ctx = NULL; + + unique_context base_ctx(nullptr, ggml_free); std::vector base_buf; if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); @@ -8702,6 +8709,7 @@ static int llama_apply_lora_from_file_internal( size_t ctx_size; size_t mmapped_size; ml->calc_sizes(ctx_size, mmapped_size); + base_buf.resize(ctx_size); ggml_init_params base_params; @@ -8709,9 +8717,9 @@ static int llama_apply_lora_from_file_internal( base_params.mem_buffer = base_buf.data(); base_params.no_alloc = ml->use_mmap; - base_ctx = ggml_init(base_params); + base_ctx.reset(ggml_init(base_params)); - // maybe this should in llama_model_loader + // maybe this should be in llama_model_loader if (ml->use_mmap) { ml->mapping.reset(new llama_mmap(&ml->file, /* prefetch */ 0, ggml_is_numa())); } @@ -8724,27 +8732,35 @@ static int llama_apply_lora_from_file_internal( std::vector work_buffer; while (true) { + if (fin.tell() == fin.size) { + // eof + break; + } + int32_t n_dims; - int32_t length; + int32_t name_len; int32_t ftype; - fin.read(reinterpret_cast(&n_dims), sizeof(n_dims)); - fin.read(reinterpret_cast(&length), sizeof(length)); - fin.read(reinterpret_cast(&ftype), sizeof(ftype)); - if (fin.eof()) { - break; + fin.read_raw(&n_dims, sizeof(n_dims)); + fin.read_raw(&name_len, sizeof(name_len)); + fin.read_raw(&ftype, sizeof(ftype)); + + if (n_dims != 1 && n_dims != 2) { + LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); + return 1; } int32_t ne[2] = { 1, 1 }; for (int i = 0; i < n_dims; ++i) { - fin.read(reinterpret_cast(&ne[i]), sizeof(ne[i])); + fin.read_raw(&ne[i], sizeof(ne[i])); } std::string name; { + GGML_ASSERT(name_len <= 1024); char buf[1024]; - fin.read(buf, length); - name = std::string(buf, length); + fin.read_raw(buf, name_len); + name = std::string(buf, name_len); } // check for lora suffix and get the type of tensor @@ -8758,7 +8774,7 @@ static int llama_apply_lora_from_file_internal( std::string lora_type = name.substr(pos + lora_suffix.length()); std::string base_name = name; base_name.erase(pos); - // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(),base_name.c_str(), lora_type.c_str()); + // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(), base_name.c_str(), lora_type.c_str()); if (model_tensors.find(base_name) == model_tensors.end()) { LLAMA_LOG_ERROR("%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); @@ -8777,22 +8793,15 @@ static int llama_apply_lora_from_file_internal( return false; } } - ggml_tensor * lora_tensor; - if (n_dims == 2) { - lora_tensor = ggml_new_tensor_2d(lora_ctx, wtype, ne[0], ne[1]); - } - else { - LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); - return 1; - } - ggml_set_name(lora_tensor, "lora_tensor"); + ggml_tensor * lora_tensor = ggml_new_tensor_2d(lora_ctx.get(), wtype, ne[0], ne[1]); + ggml_set_name(lora_tensor, name.c_str()); // load tensor data - size_t offset = fin.tellg(); + size_t offset = fin.tell(); size_t tensor_data_size = ggml_nbytes(lora_tensor); offset = (offset + 31) & -32; - fin.seekg(offset); - fin.read((char*)lora_tensor->data, tensor_data_size); + fin.seek(offset, SEEK_SET); + fin.read_raw(lora_tensor->data, tensor_data_size); lora_tensors[name] = lora_tensor; @@ -8822,13 +8831,11 @@ static int llama_apply_lora_from_file_internal( // load from base model if (gguf_find_tensor(ctx_gguf, base_name.c_str()) < 0) { - // TODO: throw LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); return 1; } - // TODO: not tested!! maybe not working! - base_t = ml->create_tensor(base_ctx, base_name, { (uint32_t)dest_t->ne[0], (uint32_t)dest_t->ne[1] }, GGML_BACKEND_CPU); + base_t = ml->create_tensor(base_ctx.get(), base_name, { dest_t->ne[0], dest_t->ne[1] }, GGML_BACKEND_CPU); ml->load_data_for(base_t); } else { base_t = dest_t; @@ -8857,43 +8864,45 @@ static int llama_apply_lora_from_file_internal( } // w = w + BA*s - ggml_tensor * BA = ggml_mul_mat(lora_ctx, loraA, loraB); + ggml_tensor * BA = ggml_mul_mat(lora_ctx.get(), loraA, loraB); offload_func(BA); ggml_set_name(BA, "BA"); if (scaling != 1.0f) { - ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx, scaling); + ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx.get(), scaling); ggml_set_name(scale_tensor, "scale_tensor"); - BA = ggml_scale_inplace(lora_ctx, BA, scale_tensor); + BA = ggml_scale_inplace(lora_ctx.get(), BA, scale_tensor); offload_func(BA); ggml_set_name(BA, "BA_scaled"); } ggml_tensor * r; if (base_t == dest_t) { - r = ggml_add_inplace(lora_ctx, dest_t, BA); + r = ggml_add_inplace(lora_ctx.get(), dest_t, BA); offload_func_force_inplace(r); ggml_set_name(r, "r_add_inplace"); } else { - r = ggml_add(lora_ctx, base_t, BA); + r = ggml_add(lora_ctx.get(), base_t, BA); offload_func(r); ggml_set_name(r, "r_add"); - r = ggml_cpy(lora_ctx, r, dest_t); + r = ggml_cpy(lora_ctx.get(), r, dest_t); offload_func(r); ggml_set_name(r, "r_cpy"); } - struct ggml_cgraph * gf = ggml_new_graph(lora_ctx); + struct ggml_cgraph * gf = ggml_new_graph(lora_ctx.get()); ggml_build_forward_expand(gf, r); ggml_graph_compute_helper(work_buffer, gf, n_threads); + // the tensors in the adapter must be sorted such that loraA and loraB of the same tensor are next to each other + GGML_ASSERT(lora_tensors.size() == 2); + // we won't need these tensors again, reset the context to save memory - ggml_free(lora_ctx); - lora_ctx = ggml_init(params); + lora_ctx.reset(ggml_init(params)); lora_tensors.clear(); n_tensors++; @@ -8903,12 +8912,6 @@ static int llama_apply_lora_from_file_internal( } } - // TODO: this should be in a destructor, it will leak on failure - ggml_free(lora_ctx); - if (base_ctx) { - ggml_free(base_ctx); - } - const int64_t t_lora_us = ggml_time_us() - t_start_lora_us; LLAMA_LOG_INFO(" done (%.2f ms)\n", t_lora_us / 1000.0); diff --git a/llama.h b/llama.h index 45a65cacb..15ab4f80e 100644 --- a/llama.h +++ b/llama.h @@ -39,6 +39,7 @@ #define LLAMA_MAX_RNG_STATE (64*1024) +#define LLAMA_FILE_MAGIC_GGLA 0x67676c61u // 'ggla' #define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN From 5daa5f54fdcd2b5228add1a4c43a1897b2168f35 Mon Sep 17 00:00:00 2001 From: Bach Le Date: Sun, 17 Dec 2023 18:57:33 +0800 Subject: [PATCH 246/859] Link to cublas dynamically on Windows even with LLAMA_STATIC (#4506) --- CMakeLists.txt | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 57b43c136..e3cd43ab3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -291,7 +291,12 @@ if (LLAMA_CUBLAS) add_compile_definitions(GGML_CUDA_PEER_MAX_BATCH_SIZE=${LLAMA_CUDA_PEER_MAX_BATCH_SIZE}) if (LLAMA_STATIC) - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart_static CUDA::cublas_static CUDA::cublasLt_static) + if (WIN32) + # As of 12.3.1 CUDA Tookit for Windows does not offer a static cublas library + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart_static CUDA::cublas CUDA::cublasLt) + else () + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart_static CUDA::cublas_static CUDA::cublasLt_static) + endif() else() set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart CUDA::cublas CUDA::cublasLt) endif() From 62bd52b7bf90819e75f427a95a484cd5eee0b3c7 Mon Sep 17 00:00:00 2001 From: mzcu Date: Sun, 17 Dec 2023 15:54:37 +0100 Subject: [PATCH 247/859] server : allow requests larger than 8K (#4500) --- examples/server/server.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 5f93dcb66..a9f8b3747 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -10,7 +10,8 @@ // crash the server in debug mode, otherwise send an http 500 error #define CPPHTTPLIB_NO_EXCEPTIONS 1 #endif - +// increase max payload length to allow use of larger context size +#define CPPHTTPLIB_FORM_URL_ENCODED_PAYLOAD_MAX_LENGTH 1048576 #include "httplib.h" #include "json.hpp" From eb16dae7e70ca97396190698b29c0f9ee3388e88 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sun, 17 Dec 2023 14:56:09 +0000 Subject: [PATCH 248/859] server : fix possible ambiguity in content type charset (#4501) --- examples/server/server.cpp | 44 +++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a9f8b3747..be7b5b95e 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2699,7 +2699,7 @@ int main(int argc, char **argv) } // API key is invalid or not provided - res.set_content("Unauthorized: Invalid API Key", "text/plain"); + res.set_content("Unauthorized: Invalid API Key", "text/plain; charset=utf-8"); res.status = 401; // Unauthorized LOG_WARNING("Unauthorized: Invalid API Key", {}); @@ -2714,28 +2714,28 @@ int main(int argc, char **argv) // this is only called if no index.html is found in the public --path svr.Get("/", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html"); + res.set_content(reinterpret_cast(&index_html), index_html_len, "text/html; charset=utf-8"); return false; }); // this is only called if no index.js is found in the public --path svr.Get("/index.js", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript"); + res.set_content(reinterpret_cast(&index_js), index_js_len, "text/javascript; charset=utf-8"); return false; }); // this is only called if no index.html is found in the public --path svr.Get("/completion.js", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript"); + res.set_content(reinterpret_cast(&completion_js), completion_js_len, "application/javascript; charset=utf-8"); return false; }); // this is only called if no index.html is found in the public --path svr.Get("/json-schema-to-grammar.mjs", [](const httplib::Request &, httplib::Response &res) { - res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript"); + res.set_content(reinterpret_cast(&json_schema_to_grammar_mjs), json_schema_to_grammar_mjs_len, "application/javascript; charset=utf-8"); return false; }); @@ -2746,7 +2746,7 @@ int main(int argc, char **argv) { "user_name", llama.name_user.c_str() }, { "assistant_name", llama.name_assistant.c_str() } }; - res.set_content(data.dump(), "application/json"); + res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) @@ -2760,12 +2760,12 @@ int main(int argc, char **argv) std::string completion_text; task_result result = llama.next_result(task_id); if (!result.error && result.stop) { - res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); + res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); } else { res.status = 404; - res.set_content(result.result_json["content"], "text/plain"); + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); return; } } else { @@ -2836,7 +2836,7 @@ int main(int argc, char **argv) }} }; - res.set_content(models.dump(), "application/json"); + res.set_content(models.dump(), "application/json; charset=utf-8"); }); // TODO: add mount point without "/v1" prefix -- how? @@ -2858,10 +2858,10 @@ int main(int argc, char **argv) res.set_content(oaicompat_result.dump(-1, ' ', false, json::error_handler_t::replace), - "application/json"); + "application/json; charset=utf-8"); } else { res.status = 500; - res.set_content(result.result_json["content"], "text/plain"); + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); return; } } else { @@ -2925,12 +2925,12 @@ int main(int argc, char **argv) task_result result = llama.next_result(task_id); if (!result.error && result.stop) { - res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json"); + res.set_content(result.result_json.dump(-1, ' ', false, json::error_handler_t::replace), "application/json; charset=utf-8"); } else { res.status = 404; - res.set_content(result.result_json["content"], "text/plain"); + res.set_content(result.result_json["content"], "text/plain; charset=utf-8"); return; } } else { @@ -2979,11 +2979,11 @@ int main(int argc, char **argv) svr.Get("/model.json", [&llama](const httplib::Request &, httplib::Response &res) { const json data = llama.get_model_props(); - return res.set_content(data.dump(), "application/json"); + return res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response &res) - { return res.set_content("", "application/json"); }); + { return res.set_content("", "application/json; charset=utf-8"); }); svr.Post("/tokenize", [&llama](const httplib::Request &req, httplib::Response &res) { @@ -2994,7 +2994,7 @@ int main(int argc, char **argv) tokens = llama.tokenize(body["content"], false); } const json data = format_tokenizer_response(tokens); - return res.set_content(data.dump(), "application/json"); + return res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Post("/detokenize", [&llama](const httplib::Request &req, httplib::Response &res) @@ -3008,7 +3008,7 @@ int main(int argc, char **argv) } const json data = format_detokenized_response(content); - return res.set_content(data.dump(), "application/json"); + return res.set_content(data.dump(), "application/json; charset=utf-8"); }); svr.Post("/embedding", [&llama](const httplib::Request &req, httplib::Response &res) @@ -3025,7 +3025,7 @@ int main(int argc, char **argv) } const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true, -1); task_result result = llama.next_result(task_id); - return res.set_content(result.result_json.dump(), "application/json"); + return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); }); svr.set_logger(log_server_request); @@ -3046,7 +3046,7 @@ int main(int argc, char **argv) { snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); } - res.set_content(buf, "text/plain"); + res.set_content(buf, "text/plain; charset=utf-8"); res.status = 500; }); @@ -3054,15 +3054,15 @@ int main(int argc, char **argv) { if (res.status == 401) { - res.set_content("Unauthorized", "text/plain"); + res.set_content("Unauthorized", "text/plain; charset=utf-8"); } if (res.status == 400) { - res.set_content("Invalid request", "text/plain"); + res.set_content("Invalid request", "text/plain; charset=utf-8"); } else if (res.status == 404) { - res.set_content("File Not Found", "text/plain"); + res.set_content("File Not Found", "text/plain; charset=utf-8"); res.status = 404; } }); From 8edd2b40fdbcafbf630f2cf29306b29d5cb48c42 Mon Sep 17 00:00:00 2001 From: AdithyanI Date: Sun, 17 Dec 2023 15:57:56 +0100 Subject: [PATCH 249/859] server : fix grammar being ignored (#4494) Fix bug in identifying the grammar. --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index be7b5b95e..c97efe97d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2414,7 +2414,7 @@ json oaicompat_completion_params_parse( llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); llama_params["tfs_z"] = json_value(body, "tfs_z", 0.0); - if (llama_params.count("grammar") != 0) { + if (body.count("grammar") != 0) { llama_params["grammar"] = json_value(body, "grammar", json::object()); } From 0ffc92d2d23a789625f018840469af045be1e3c0 Mon Sep 17 00:00:00 2001 From: olexiyb Date: Sun, 17 Dec 2023 17:02:16 +0200 Subject: [PATCH 250/859] server : disable llm logs if SERVER_VERBOSE is off (#3792) --- examples/server/server.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c97efe97d..04038530f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2645,6 +2645,9 @@ static void append_to_generated_text_from_generated_token_probs(llama_server_con int main(int argc, char **argv) { +#if SERVER_VERBOSE != 1 + log_disable(); +#endif // own arguments required by this example gpt_params params; server_params sparams; From 45668633fdb522a925c3dafc1ecf426f539efb27 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 17 Dec 2023 16:05:56 +0100 Subject: [PATCH 251/859] finetune : keep allocs alive until all allocations are done (#4486) --- examples/finetune/finetune.cpp | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index b9849e8c9..6a668d764 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1620,8 +1620,6 @@ int main(int argc, char ** argv) { opt->params.adam.gclip = params.common.adam_gclip; opt->params.adam.eps_f = params.common.adam_eps_f; - ggml_allocr * alloc = NULL; - printf("%s: init model\n", __func__); bool existed = load_checkpoint_lora_file(params.common.fn_checkpoint_in, &model, &lora, train); @@ -1725,10 +1723,9 @@ int main(int argc, char ** argv) { // allocate input tensors mem_input_data.resize(max_input_size); - alloc = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); - ggml_allocr_alloc(alloc, tokens_input); - ggml_allocr_alloc(alloc, target_probs); - ggml_allocr_free(alloc); + ggml_allocr_t alloc_inps = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); + ggml_allocr_alloc(alloc_inps, tokens_input); + ggml_allocr_alloc(alloc_inps, target_probs); // context for compute tensors without their data const size_t estimated_compute_size_wo_data = ( @@ -1755,7 +1752,7 @@ int main(int argc, char ** argv) { // find best evaluation order for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new_measure(tensor_alignment); + ggml_allocr_t alloc = ggml_allocr_new_measure(tensor_alignment); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1788,7 +1785,7 @@ int main(int argc, char ** argv) { // allocate compute tensors mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); + ggml_allocr_t alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1804,6 +1801,8 @@ int main(int argc, char ** argv) { params.common.use_checkpointing ); ggml_allocr_free(alloc); + ggml_allocr_free(alloc_inps); + // tokenize data std::vector train_tokens; From 919c40660fd27157b391b5832d2a577d5afef4cb Mon Sep 17 00:00:00 2001 From: Matheus Gabriel Alves Silva Date: Sun, 17 Dec 2023 12:23:33 -0300 Subject: [PATCH 252/859] build : Check the ROCm installation location (#4485) * build : Check the ROCm installation location * more generic approach * fixup! It was returning the path instead of the command output * fixup! Trailing whitespace --- Makefile | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index fb775ae5b..8273f8400 100644 --- a/Makefile +++ b/Makefile @@ -439,9 +439,15 @@ ggml-opencl.o: ggml-opencl.cpp ggml-opencl.h endif # LLAMA_CLBLAST ifdef LLAMA_HIPBLAS - ROCM_PATH ?= /opt/rocm - HIPCC ?= $(ROCM_PATH)/bin/hipcc - GPU_TARGETS ?= $(shell $(ROCM_PATH)/llvm/bin/amdgpu-arch) + + ifeq ($(wildcard /opt/rocm),) + ROCM_PATH ?= /usr + GPU_TARGETS ?= $(shell $(shell which amdgpu-arch)) + else + ROCM_PATH ?= /opt/rocm + GPU_TARGETS ?= $(shell $(ROCM_PATH)/llvm/bin/amdgpu-arch) + endif + HIPCC ?= $(ROCM_PATH)/bin/hipcc LLAMA_CUDA_DMMV_X ?= 32 LLAMA_CUDA_MMV_Y ?= 1 LLAMA_CUDA_KQUANTS_ITER ?= 2 From f7f468a97dceec2f8fe8b1ed7a2091083446ebc7 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 17 Dec 2023 10:45:46 -0500 Subject: [PATCH 253/859] gguf-py : fail fast on nonsensical special token IDs (#4489) --- gguf-py/gguf/vocab.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index de3e5edb5..76924d8f2 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -109,8 +109,10 @@ class SpecialVocab: return True def _set_special_token(self, typ: str, tid: Any) -> None: - if not isinstance(tid, int) or tid < 0: + if not isinstance(tid, int): return + if tid < 0: + raise ValueError(f'invalid value for special token type {typ}: {tid}') if self.n_vocab is None or tid < self.n_vocab: if typ in self.special_token_ids: return From 800a489e4a8be199122259a995b1ee9dd7fae320 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 17 Dec 2023 19:38:41 +0200 Subject: [PATCH 254/859] llama.swiftui : add bench functionality (#4483) * llama.swiftui : add bench button * llama.swiftui : initial bench functionality * force to use n_gpu_layers on simulator * add download buttons & expose llamaState.loadModel * update project.pbxproj * comment #Preview & fix editorconfig check * gitignore : xcode stuff * llama.swiftui : UX improvements * llama.swiftui : avoid data copy via "downloadTask" * llama.swiftui : remove model from project * llama : remove "mostly" from model infos * llama.swiftui : improve bench --------- Co-authored-by: jhen --- .editorconfig | 3 + examples/llama.swiftui/.gitignore | 1 + .../llama.cpp.swift/LibLlama.swift | 182 +++- .../llama.swiftui.xcodeproj/project.pbxproj | 898 +++++++++--------- .../llama.swiftui/Models/LlamaState.swift | 52 +- .../llama.swiftui/UI/ContentView.swift | 114 ++- .../llama.swiftui/UI/DownloadButton.swift | 122 +++ llama.cpp | 33 +- 8 files changed, 895 insertions(+), 510 deletions(-) create mode 100644 examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift diff --git a/.editorconfig b/.editorconfig index a56e9ccc8..16d16b3b5 100644 --- a/.editorconfig +++ b/.editorconfig @@ -23,3 +23,6 @@ insert_final_newline = unset [examples/server/public/*] indent_size = 2 + +[examples/llama.swiftui/llama.swiftui.xcodeproj/*] +indent_style = tab diff --git a/examples/llama.swiftui/.gitignore b/examples/llama.swiftui/.gitignore index 9bce6af39..e585a2a4f 100644 --- a/examples/llama.swiftui/.gitignore +++ b/examples/llama.swiftui/.gitignore @@ -1 +1,2 @@ xcuserdata +xcshareddata diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 3754f0551..272e1fd8a 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -6,16 +6,34 @@ enum LlamaError: Error { case couldNotInitializeContext } +func llama_batch_clear(_ batch: inout llama_batch) { + batch.n_tokens = 0 +} + +func llama_batch_add(_ batch: inout llama_batch, _ id: llama_token, _ pos: llama_pos, _ seq_ids: [llama_seq_id], _ logits: Bool) { + batch.token [Int(batch.n_tokens)] = id + batch.pos [Int(batch.n_tokens)] = pos + batch.n_seq_id[Int(batch.n_tokens)] = Int32(seq_ids.count) + for i in 0.. LlamaContext { + static func create_context(path: String) throws -> LlamaContext { llama_backend_init(false) - let model_params = llama_model_default_params() + var model_params = llama_model_default_params() +#if targetEnvironment(simulator) + model_params.n_gpu_layers = 0 + print("Running on simulator, force use n_gpu_layers = 0") +#endif let model = llama_load_model_from_file(path, model_params) guard let model else { print("Could not load model at \(path)") throw LlamaError.couldNotInitializeContext } + + let n_threads = max(1, min(8, ProcessInfo.processInfo.processorCount - 2)) + print("Using \(n_threads) threads") + var ctx_params = llama_context_default_params() - ctx_params.seed = 1234 + ctx_params.seed = 1234 ctx_params.n_ctx = 2048 - ctx_params.n_threads = 8 - ctx_params.n_threads_batch = 8 + ctx_params.n_threads = UInt32(n_threads) + ctx_params.n_threads_batch = UInt32(n_threads) let context = llama_new_context_with_model(model, ctx_params) guard let context else { @@ -56,6 +83,26 @@ actor LlamaContext { return LlamaContext(model: model, context: context) } + func model_info() -> String { + let result = UnsafeMutablePointer.allocate(capacity: 256) + result.initialize(repeating: Int8(0), count: 256) + defer { + result.deallocate() + } + + // TODO: this is probably very stupid way to get the string from C + + let nChars = llama_model_desc(model, result, 256) + let bufferPointer = UnsafeBufferPointer(start: result, count: Int(nChars)) + + var SwiftString = "" + for char in bufferPointer { + SwiftString.append(Character(UnicodeScalar(UInt8(char)))) + } + + return SwiftString + } + func get_n_tokens() -> Int32 { return batch.n_tokens; } @@ -79,16 +126,11 @@ actor LlamaContext { print(String(cString: token_to_piece(token: id) + [0])) } - // batch = llama_batch_init(512, 0) // done in init() - batch.n_tokens = Int32(tokens_list.count) + llama_batch_clear(&batch) - for i1 in 0.. String { + var pp_avg: Double = 0 + var tg_avg: Double = 0 + + var pp_std: Double = 0 + var tg_std: Double = 0 + + for r in 0.. 1 { + pp_std = sqrt(pp_std / Double(nr - 1) - pp_avg * pp_avg * Double(nr) / Double(nr - 1)) + tg_std = sqrt(tg_std / Double(nr - 1) - tg_avg * tg_avg * Double(nr) / Double(nr - 1)) + } else { + pp_std = 0 + tg_std = 0 + } + + let model_desc = model_info(); + let model_size = String(format: "%.2f GiB", Double(llama_model_size(model)) / 1024.0 / 1024.0 / 1024.0); + let model_n_params = String(format: "%.2f B", Double(llama_model_n_params(model)) / 1e9); + let backend = "Metal"; + let pp_avg_str = String(format: "%.2f", pp_avg); + let tg_avg_str = String(format: "%.2f", tg_avg); + let pp_std_str = String(format: "%.2f", pp_std); + let tg_std_str = String(format: "%.2f", tg_std); + + var result = "" + + result += String("| model | size | params | backend | test | t/s |\n") + result += String("| --- | --- | --- | --- | --- | --- |\n") + result += String("| \(model_desc) | \(model_size) | \(model_n_params) | \(backend) | pp \(pp) | \(pp_avg_str) ± \(pp_std_str) |\n") + result += String("| \(model_desc) | \(model_size) | \(model_n_params) | \(backend) | tg \(tg) | \(tg_avg_str) ± \(tg_std_str) |\n") + + return result; + } + func clear() { tokens_list.removeAll() temporary_invalid_cchars.removeAll() + llama_kv_cache_clear(context) } private func tokenize(text: String, add_bos: Bool) -> [llama_token] { let utf8Count = text.utf8.count - let n_tokens = utf8Count + (add_bos ? 1 : 0) + let n_tokens = utf8Count + (add_bos ? 1 : 0) + 1 let tokens = UnsafeMutablePointer.allocate(capacity: n_tokens) let tokenCount = llama_tokenize(model, text, Int32(utf8Count), tokens, Int32(n_tokens), add_bos, false) diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index bc1fd15ce..2e6159928 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -1,481 +1,483 @@ // !$*UTF8*$! { - archiveVersion = 1; - classes = { - }; - objectVersion = 56; - objects = { + archiveVersion = 1; + classes = { + }; + objectVersion = 56; + objects = { /* Begin PBXBuildFile section */ - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; }; - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; }; - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; }; - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; - 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; - 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; - 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; - 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; - 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; - 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; - 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; - 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; + 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; + 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; + 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; + 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; + 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; + 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; + 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; + 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; }; + 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; + 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; + 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; + 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; + 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; + 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; + 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; + 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; - 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; - 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; - 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; - 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; - 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; - 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; - 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; - 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; - 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; - 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; - 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; - 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; - 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; - 8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */ = {isa = PBXFileReference; lastKnownFileType = file; path = "llama-2-7b-chat.Q2_K.gguf"; sourceTree = ""; }; - 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; - 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; - 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; + 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; + 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; + 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; + 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; + 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; + 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; + 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; + 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; + 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; + 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; + 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; + 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; + 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; + 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; + 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = ""; }; + 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; + 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; + 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; + 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; + 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; + 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ - 8A1C83702AC328BD0096AF73 /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, - 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; + 8A1C83702AC328BD0096AF73 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, + 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { - isa = PBXGroup; - children = ( - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, - 542376092B0D9C40008E6A1C /* ggml-backend.h */, - 542376062B0D9BEA008E6A1C /* ggml-quants.h */, - 542376072B0D9BFB008E6A1C /* ggml-quants.c */, - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, - 549479C62AC9E0F200E0F78B /* ggml-metal.h */, - 549479C52AC9E0F200E0F78B /* ggml-metal.m */, - 542EA09B2AC8723900A8AEE9 /* ggml.c */, - 542EA09C2AC8723900A8AEE9 /* ggml.h */, - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, - 542EA0A12AC8729100A8AEE9 /* llama.cpp */, - 542EA0A22AC8729100A8AEE9 /* llama.h */, - ); - name = llama.cpp; - sourceTree = ""; - }; - 8A1C836A2AC328BD0096AF73 = { - isa = PBXGroup; - children = ( - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, - 8A907F312AC7134E006146EA /* llama.cpp.swift */, - 8A3F84232AC4C891005E2EE8 /* models */, - 8A1C83752AC328BD0096AF73 /* llama.swiftui */, - 8A1C83742AC328BD0096AF73 /* Products */, - 8A39BE082AC7601000BFEB40 /* Frameworks */, - ); - sourceTree = ""; - }; - 8A1C83742AC328BD0096AF73 /* Products */ = { - isa = PBXGroup; - children = ( - 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */, - ); - name = Products; - sourceTree = ""; - }; - 8A1C83752AC328BD0096AF73 /* llama.swiftui */ = { - isa = PBXGroup; - children = ( - 8A3F84102AC4BD85005E2EE8 /* Resources */, - 8A9F7C4B2AC332DC008AE1EA /* Models */, - 8A9F7C4A2AC332BF008AE1EA /* UI */, - 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, - 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, - 8A1C837C2AC328BE0096AF73 /* Preview Content */, - ); - path = llama.swiftui; - sourceTree = ""; - }; - 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; - 8A39BE082AC7601000BFEB40 /* Frameworks */ = { - isa = PBXGroup; - children = ( - 549479CA2AC9E16000E0F78B /* Metal.framework */, - 8A39BE092AC7601000BFEB40 /* Accelerate.framework */, - ); - name = Frameworks; - sourceTree = ""; - }; - 8A3F84102AC4BD85005E2EE8 /* Resources */ = { - isa = PBXGroup; - children = ( - 8A3F84112AC4BD8C005E2EE8 /* models */, - ); - path = Resources; - sourceTree = ""; - }; - 8A3F84112AC4BD8C005E2EE8 /* models */ = { - isa = PBXGroup; - children = ( - 8A3F841F2AC4C824005E2EE8 /* llama-2-7b-chat.Q2_K.gguf */, - ); - path = models; - sourceTree = ""; - }; - 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { - isa = PBXGroup; - children = ( - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, - 8A907F322AC7134E006146EA /* LibLlama.swift */, - ); - path = llama.cpp.swift; - sourceTree = ""; - }; - 8A9F7C4A2AC332BF008AE1EA /* UI */ = { - isa = PBXGroup; - children = ( - 8A1C83782AC328BD0096AF73 /* ContentView.swift */, - ); - path = UI; - sourceTree = ""; - }; - 8A9F7C4B2AC332DC008AE1EA /* Models */ = { - isa = PBXGroup; - children = ( - 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */, - ); - path = Models; - sourceTree = ""; - }; + 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { + isa = PBXGroup; + children = ( + 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, + 542376092B0D9C40008E6A1C /* ggml-backend.h */, + 542376062B0D9BEA008E6A1C /* ggml-quants.h */, + 542376072B0D9BFB008E6A1C /* ggml-quants.c */, + 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, + 549479C62AC9E0F200E0F78B /* ggml-metal.h */, + 549479C52AC9E0F200E0F78B /* ggml-metal.m */, + 542EA09B2AC8723900A8AEE9 /* ggml.c */, + 542EA09C2AC8723900A8AEE9 /* ggml.h */, + 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, + 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, + 542EA0A12AC8729100A8AEE9 /* llama.cpp */, + 542EA0A22AC8729100A8AEE9 /* llama.h */, + ); + name = llama.cpp; + sourceTree = ""; + }; + 8A1C836A2AC328BD0096AF73 = { + isa = PBXGroup; + children = ( + 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, + 8A907F312AC7134E006146EA /* llama.cpp.swift */, + 8A3F84232AC4C891005E2EE8 /* models */, + 8A1C83752AC328BD0096AF73 /* llama.swiftui */, + 8A1C83742AC328BD0096AF73 /* Products */, + 8A39BE082AC7601000BFEB40 /* Frameworks */, + ); + sourceTree = ""; + }; + 8A1C83742AC328BD0096AF73 /* Products */ = { + isa = PBXGroup; + children = ( + 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */, + ); + name = Products; + sourceTree = ""; + }; + 8A1C83752AC328BD0096AF73 /* llama.swiftui */ = { + isa = PBXGroup; + children = ( + 8A3F84102AC4BD85005E2EE8 /* Resources */, + 8A9F7C4B2AC332DC008AE1EA /* Models */, + 8A9F7C4A2AC332BF008AE1EA /* UI */, + 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, + 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, + 8A1C837C2AC328BE0096AF73 /* Preview Content */, + ); + path = llama.swiftui; + sourceTree = ""; + }; + 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { + isa = PBXGroup; + children = ( + 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 8A39BE082AC7601000BFEB40 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 549479CA2AC9E16000E0F78B /* Metal.framework */, + 8A39BE092AC7601000BFEB40 /* Accelerate.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + 8A3F84102AC4BD85005E2EE8 /* Resources */ = { + isa = PBXGroup; + children = ( + 8A3F84112AC4BD8C005E2EE8 /* models */, + ); + path = Resources; + sourceTree = ""; + }; + 8A3F84112AC4BD8C005E2EE8 /* models */ = { + isa = PBXGroup; + children = ( + ); + path = models; + sourceTree = ""; + }; + 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { + isa = PBXGroup; + children = ( + 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, + 8A907F322AC7134E006146EA /* LibLlama.swift */, + ); + path = llama.cpp.swift; + sourceTree = ""; + }; + 8A9F7C4A2AC332BF008AE1EA /* UI */ = { + isa = PBXGroup; + children = ( + 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */, + 8A1C83782AC328BD0096AF73 /* ContentView.swift */, + ); + path = UI; + sourceTree = ""; + }; + 8A9F7C4B2AC332DC008AE1EA /* Models */ = { + isa = PBXGroup; + children = ( + 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */, + ); + path = Models; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - 8A1C83722AC328BD0096AF73 /* llama.swiftui */ = { - isa = PBXNativeTarget; - buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */; - buildPhases = ( - 8A1C836F2AC328BD0096AF73 /* Sources */, - 8A1C83702AC328BD0096AF73 /* Frameworks */, - 8A1C83712AC328BD0096AF73 /* Resources */, - ); - buildRules = ( - ); - dependencies = ( - ); - name = llama.swiftui; - packageProductDependencies = ( - ); - productName = llama.swiftui; - productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; - productType = "com.apple.product-type.application"; - }; + 8A1C83722AC328BD0096AF73 /* llama.swiftui */ = { + isa = PBXNativeTarget; + buildConfigurationList = 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */; + buildPhases = ( + 8A1C836F2AC328BD0096AF73 /* Sources */, + 8A1C83702AC328BD0096AF73 /* Frameworks */, + 8A1C83712AC328BD0096AF73 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = llama.swiftui; + packageProductDependencies = ( + ); + productName = llama.swiftui; + productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; + productType = "com.apple.product-type.application"; + }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ - 8A1C836B2AC328BD0096AF73 /* Project object */ = { - isa = PBXProject; - attributes = { - BuildIndependentTargetsInParallel = 1; - LastSwiftUpdateCheck = 1500; - LastUpgradeCheck = 1500; - TargetAttributes = { - 8A1C83722AC328BD0096AF73 = { - CreatedOnToolsVersion = 15.0; - LastSwiftMigration = 1500; - }; - }; - }; - buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */; - compatibilityVersion = "Xcode 14.0"; - developmentRegion = en; - hasScannedForEncodings = 0; - knownRegions = ( - en, - Base, - ); - mainGroup = 8A1C836A2AC328BD0096AF73; - packageReferences = ( - ); - productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */; - projectDirPath = ""; - projectRoot = ""; - targets = ( - 8A1C83722AC328BD0096AF73 /* llama.swiftui */, - ); - }; + 8A1C836B2AC328BD0096AF73 /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 1500; + LastUpgradeCheck = 1500; + TargetAttributes = { + 8A1C83722AC328BD0096AF73 = { + CreatedOnToolsVersion = 15.0; + LastSwiftMigration = 1500; + }; + }; + }; + buildConfigurationList = 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */; + compatibilityVersion = "Xcode 14.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 8A1C836A2AC328BD0096AF73; + packageReferences = ( + ); + productRefGroup = 8A1C83742AC328BD0096AF73 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 8A1C83722AC328BD0096AF73 /* llama.swiftui */, + ); + }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - 8A1C83712AC328BD0096AF73 /* Resources */ = { - isa = PBXResourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, - 8A3F84242AC4C891005E2EE8 /* models in Resources */, - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, - 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; + 8A1C83712AC328BD0096AF73 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, + 8A3F84242AC4C891005E2EE8 /* models in Resources */, + 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, + 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ - 8A1C836F2AC328BD0096AF73 /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, - 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, - 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, - 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, - 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; + 8A1C836F2AC328BD0096AF73 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, + 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, + 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, + 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, + 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, + 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, + 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, + 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, + 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */, + 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, + 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXSourcesBuildPhase section */ /* Begin XCBuildConfiguration section */ - 8A1C837F2AC328BE0096AF73 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ALWAYS_SEARCH_USER_PATHS = NO; - ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; - CLANG_ANALYZER_NONNULL = YES; - CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; - CLANG_ENABLE_MODULES = YES; - CLANG_ENABLE_OBJC_ARC = YES; - CLANG_ENABLE_OBJC_WEAK = YES; - CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; - CLANG_WARN_BOOL_CONVERSION = YES; - CLANG_WARN_COMMA = YES; - CLANG_WARN_CONSTANT_CONVERSION = YES; - CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; - CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; - CLANG_WARN_DOCUMENTATION_COMMENTS = YES; - CLANG_WARN_EMPTY_BODY = YES; - CLANG_WARN_ENUM_CONVERSION = YES; - CLANG_WARN_INFINITE_RECURSION = YES; - CLANG_WARN_INT_CONVERSION = YES; - CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; - CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; - CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; - CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; - CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; - CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; - CLANG_WARN_STRICT_PROTOTYPES = YES; - CLANG_WARN_SUSPICIOUS_MOVE = YES; - CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; - CLANG_WARN_UNREACHABLE_CODE = YES; - CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - COPY_PHASE_STRIP = NO; - DEBUG_INFORMATION_FORMAT = dwarf; - ENABLE_STRICT_OBJC_MSGSEND = YES; - ENABLE_TESTABILITY = YES; - ENABLE_USER_SCRIPT_SANDBOXING = YES; - GCC_C_LANGUAGE_STANDARD = gnu17; - GCC_DYNAMIC_NO_PIC = NO; - GCC_NO_COMMON_BLOCKS = YES; - GCC_OPTIMIZATION_LEVEL = 0; - GCC_PREPROCESSOR_DEFINITIONS = ( - "DEBUG=1", - "$(inherited)", - ); - GCC_WARN_64_TO_32_BIT_CONVERSION = YES; - GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; - GCC_WARN_UNDECLARED_SELECTOR = YES; - GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; - GCC_WARN_UNUSED_FUNCTION = YES; - GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 17.0; - LOCALIZATION_PREFERS_STRING_CATALOGS = YES; - MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; - MTL_FAST_MATH = YES; - ONLY_ACTIVE_ARCH = YES; - SDKROOT = iphoneos; - SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; - SWIFT_OPTIMIZATION_LEVEL = "-Onone"; - }; - name = Debug; - }; - 8A1C83802AC328BE0096AF73 /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ALWAYS_SEARCH_USER_PATHS = NO; - ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; - CLANG_ANALYZER_NONNULL = YES; - CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; - CLANG_ENABLE_MODULES = YES; - CLANG_ENABLE_OBJC_ARC = YES; - CLANG_ENABLE_OBJC_WEAK = YES; - CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; - CLANG_WARN_BOOL_CONVERSION = YES; - CLANG_WARN_COMMA = YES; - CLANG_WARN_CONSTANT_CONVERSION = YES; - CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; - CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; - CLANG_WARN_DOCUMENTATION_COMMENTS = YES; - CLANG_WARN_EMPTY_BODY = YES; - CLANG_WARN_ENUM_CONVERSION = YES; - CLANG_WARN_INFINITE_RECURSION = YES; - CLANG_WARN_INT_CONVERSION = YES; - CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; - CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; - CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; - CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; - CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; - CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; - CLANG_WARN_STRICT_PROTOTYPES = YES; - CLANG_WARN_SUSPICIOUS_MOVE = YES; - CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; - CLANG_WARN_UNREACHABLE_CODE = YES; - CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - COPY_PHASE_STRIP = NO; - DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; - ENABLE_NS_ASSERTIONS = NO; - ENABLE_STRICT_OBJC_MSGSEND = YES; - ENABLE_USER_SCRIPT_SANDBOXING = YES; - GCC_C_LANGUAGE_STANDARD = gnu17; - GCC_NO_COMMON_BLOCKS = YES; - GCC_WARN_64_TO_32_BIT_CONVERSION = YES; - GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; - GCC_WARN_UNDECLARED_SELECTOR = YES; - GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; - GCC_WARN_UNUSED_FUNCTION = YES; - GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 17.0; - LOCALIZATION_PREFERS_STRING_CATALOGS = YES; - MTL_ENABLE_DEBUG_INFO = NO; - MTL_FAST_MATH = YES; - SDKROOT = iphoneos; - SWIFT_COMPILATION_MODE = wholemodule; - VALIDATE_PRODUCT = YES; - }; - name = Release; - }; - 8A1C83822AC328BE0096AF73 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CLANG_ENABLE_MODULES = YES; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; - DEVELOPMENT_TEAM = STLSG3FG8Q; - ENABLE_PREVIEWS = YES; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; - SWIFT_OPTIMIZATION_LEVEL = "-Onone"; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Debug; - }; - 8A1C83832AC328BE0096AF73 /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CLANG_ENABLE_MODULES = YES; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; - DEVELOPMENT_TEAM = STLSG3FG8Q; - ENABLE_PREVIEWS = YES; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Release; - }; + 8A1C837F2AC328BE0096AF73 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 8A1C83802AC328BE0096AF73 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 8A1C83822AC328BE0096AF73 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; + DEVELOPMENT_TEAM = STLSG3FG8Q; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 8A1C83832AC328BE0096AF73 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; + DEVELOPMENT_TEAM = STLSG3FG8Q; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 16.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 8A1C837F2AC328BE0096AF73 /* Debug */, - 8A1C83802AC328BE0096AF73 /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; - 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 8A1C83822AC328BE0096AF73 /* Debug */, - 8A1C83832AC328BE0096AF73 /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; + 8A1C836E2AC328BD0096AF73 /* Build configuration list for PBXProject "llama.swiftui" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 8A1C837F2AC328BE0096AF73 /* Debug */, + 8A1C83802AC328BE0096AF73 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 8A1C83812AC328BE0096AF73 /* Build configuration list for PBXNativeTarget "llama.swiftui" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 8A1C83822AC328BE0096AF73 /* Debug */, + 8A1C83832AC328BE0096AF73 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; /* End XCConfigurationList section */ - }; - rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; + }; + rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; } diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index babc60cdc..3393eb242 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -3,24 +3,26 @@ import Foundation @MainActor class LlamaState: ObservableObject { @Published var messageLog = "" + @Published var cacheCleared = false private var llamaContext: LlamaContext? - private var modelUrl: URL? { - Bundle.main.url(forResource: "q8_0", withExtension: "gguf", subdirectory: "models") + private var defaultModelUrl: URL? { + Bundle.main.url(forResource: "ggml-model", withExtension: "gguf", subdirectory: "models") // Bundle.main.url(forResource: "llama-2-7b-chat", withExtension: "Q2_K.gguf", subdirectory: "models") } + init() { do { - try loadModel() + try loadModel(modelUrl: defaultModelUrl) } catch { messageLog += "Error!\n" } } - private func loadModel() throws { + func loadModel(modelUrl: URL?) throws { messageLog += "Loading model...\n" if let modelUrl { - llamaContext = try LlamaContext.createContext(path: modelUrl.path()) + llamaContext = try LlamaContext.create_context(path: modelUrl.path()) messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" } else { messageLog += "Could not locate model\n" @@ -31,7 +33,7 @@ class LlamaState: ObservableObject { guard let llamaContext else { return } - messageLog += "Attempting to complete text...\n" + await llamaContext.completion_init(text: text) messageLog += "\(text)" @@ -42,4 +44,42 @@ class LlamaState: ObservableObject { await llamaContext.clear() messageLog += "\n\ndone\n" } + + func bench() async { + guard let llamaContext else { + return + } + + messageLog += "\n" + messageLog += "Running benchmark...\n" + messageLog += "Model info: " + messageLog += await llamaContext.model_info() + "\n" + + let t_start = DispatchTime.now().uptimeNanoseconds + await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up + let t_end = DispatchTime.now().uptimeNanoseconds + + let t_heat = Double(t_end - t_start) / 1_000_000_000.0 + messageLog += "Heat up time: \(t_heat) seconds, please wait...\n" + + // if more than 5 seconds, then we're probably running on a slow device + if t_heat > 5.0 { + messageLog += "Heat up time is too long, aborting benchmark\n" + return + } + + let result = await llamaContext.bench(pp: 512, tg: 128, pl: 1, nr: 3) + + messageLog += "\(result)" + messageLog += "\n" + } + + func clear() async { + guard let llamaContext else { + return + } + + await llamaContext.clear() + messageLog = "" + } } diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 0bd16a806..219bf4dc1 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -5,24 +5,97 @@ struct ContentView: View { @State private var multiLineText = "" + private static func cleanupModelCaches() { + // Delete all models (*.gguf) + let fileManager = FileManager.default + let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] + do { + let fileURLs = try fileManager.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil) + for fileURL in fileURLs { + if fileURL.pathExtension == "gguf" { + try fileManager.removeItem(at: fileURL) + } + } + } catch { + print("Error while enumerating files \(documentsUrl.path): \(error.localizedDescription)") + } + } + var body: some View { VStack { - ScrollView(.vertical) { + ScrollView(.vertical, showsIndicators: true) { Text(llamaState.messageLog) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) + .padding() + .onTapGesture { + UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + } } TextEditor(text: $multiLineText) - .frame(height: 200) + .frame(height: 80) .padding() .border(Color.gray, width: 0.5) - Button(action: { - sendText() - }) { - Text("Send") - .padding() - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) + + HStack { + Button("Send") { + sendText() + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + + Button("Bench") { + bench() + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + + Button("Clear") { + clear() + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + + Button("Copy") { + UIPasteboard.general.string = llamaState.messageLog + } + .padding(8) + .background(Color.blue) + .foregroundColor(.white) + .cornerRadius(8) + } + + VStack { + DownloadButton( + llamaState: llamaState, + modelName: "TinyLlama-1.1B (Q4_0)", + modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", + filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" + ) + .font(.system(size: 12)) + .padding(.top, 4) + + DownloadButton( + llamaState: llamaState, + modelName: "TinyLlama-1.1B (Q8_0)", + modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true", + filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf" + ) + .font(.system(size: 12)) + + Button("Clear downloaded models") { + ContentView.cleanupModelCaches() + llamaState.cacheCleared = true + } + .padding(8) + .font(.system(size: 12)) } } .padding() @@ -34,9 +107,20 @@ struct ContentView: View { multiLineText = "" } } + + func bench() { + Task { + await llamaState.bench() + } + } + + func clear() { + Task { + await llamaState.clear() + } + } } -/* -#Preview { - ContentView() -} -*/ + +//#Preview { +// ContentView() +//} diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift new file mode 100644 index 000000000..4bd75cb69 --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift @@ -0,0 +1,122 @@ +import SwiftUI + +struct DownloadButton: View { + @ObservedObject private var llamaState: LlamaState + private var modelName: String + private var modelUrl: String + private var filename: String + + @State private var status: String + + @State private var downloadTask: URLSessionDownloadTask? + @State private var progress = 0.0 + @State private var observation: NSKeyValueObservation? + + private static func getFileURL(filename: String) -> URL { + FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename) + } + + private func checkFileExistenceAndUpdateStatus() { + } + + init(llamaState: LlamaState, modelName: String, modelUrl: String, filename: String) { + self.llamaState = llamaState + self.modelName = modelName + self.modelUrl = modelUrl + self.filename = filename + + let fileURL = DownloadButton.getFileURL(filename: filename) + status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download" + } + + private func download() { + status = "downloading" + print("Downloading model \(modelName) from \(modelUrl)") + guard let url = URL(string: modelUrl) else { return } + let fileURL = DownloadButton.getFileURL(filename: filename) + + downloadTask = URLSession.shared.downloadTask(with: url) { temporaryURL, response, error in + if let error = error { + print("Error: \(error.localizedDescription)") + return + } + + guard let response = response as? HTTPURLResponse, (200...299).contains(response.statusCode) else { + print("Server error!") + return + } + + do { + if let temporaryURL = temporaryURL { + try FileManager.default.copyItem(at: temporaryURL, to: fileURL) + print("Writing to \(filename) completed") + + llamaState.cacheCleared = false + + status = "downloaded" + } + } catch let err { + print("Error: \(err.localizedDescription)") + } + } + + observation = downloadTask?.progress.observe(\.fractionCompleted) { progress, _ in + self.progress = progress.fractionCompleted + } + + downloadTask?.resume() + } + + var body: some View { + VStack { + if status == "download" { + Button(action: download) { + Text("Download " + modelName) + } + } else if status == "downloading" { + Button(action: { + downloadTask?.cancel() + status = "download" + }) { + Text("\(modelName) (Downloading \(Int(progress * 100))%)") + } + } else if status == "downloaded" { + Button(action: { + let fileURL = DownloadButton.getFileURL(filename: filename) + if !FileManager.default.fileExists(atPath: fileURL.path) { + download() + return + } + do { + try llamaState.loadModel(modelUrl: fileURL) + } catch let err { + print("Error: \(err.localizedDescription)") + } + }) { + Text("\(modelName) (Downloaded)") + } + } else { + Text("Unknown status") + } + } + .onDisappear() { + downloadTask?.cancel() + } + .onChange(of: llamaState.cacheCleared) { newValue in + if newValue { + downloadTask?.cancel() + let fileURL = DownloadButton.getFileURL(filename: filename) + status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download" + } + } + } +} + +// #Preview { +// DownloadButton( +// llamaState: LlamaState(), +// modelName: "TheBloke / TinyLlama-1.1B-1T-OpenOrca-GGUF (Q4_0)", +// modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", +// filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" +// ) +// } diff --git a/llama.cpp b/llama.cpp index f49214c13..fd9fd6ed9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2397,25 +2397,25 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { switch (ftype) { case LLAMA_FTYPE_ALL_F32: return "all F32"; - case LLAMA_FTYPE_MOSTLY_F16: return "mostly F16"; - case LLAMA_FTYPE_MOSTLY_Q4_0: return "mostly Q4_0"; - case LLAMA_FTYPE_MOSTLY_Q4_1: return "mostly Q4_1"; + case LLAMA_FTYPE_MOSTLY_F16: return "F16"; + case LLAMA_FTYPE_MOSTLY_Q4_0: return "Q4_0"; + case LLAMA_FTYPE_MOSTLY_Q4_1: return "Q4_1"; case LLAMA_FTYPE_MOSTLY_Q4_1_SOME_F16: - return "mostly Q4_1, some F16"; - case LLAMA_FTYPE_MOSTLY_Q5_0: return "mostly Q5_0"; - case LLAMA_FTYPE_MOSTLY_Q5_1: return "mostly Q5_1"; - case LLAMA_FTYPE_MOSTLY_Q8_0: return "mostly Q8_0"; + return "Q4_1, some F16"; + case LLAMA_FTYPE_MOSTLY_Q5_0: return "Q5_0"; + case LLAMA_FTYPE_MOSTLY_Q5_1: return "Q5_1"; + case LLAMA_FTYPE_MOSTLY_Q8_0: return "Q8_0"; // K-quants - case LLAMA_FTYPE_MOSTLY_Q2_K: return "mostly Q2_K"; - case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "mostly Q3_K - Small"; - case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "mostly Q3_K - Medium"; - case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "mostly Q3_K - Large"; - case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "mostly Q4_K - Small"; - case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "mostly Q4_K - Medium"; - case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "mostly Q5_K - Small"; - case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "mostly Q5_K - Medium"; - case LLAMA_FTYPE_MOSTLY_Q6_K: return "mostly Q6_K"; + case LLAMA_FTYPE_MOSTLY_Q2_K: return "Q2_K"; + case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "Q3_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "Q3_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "Q3_K - Large"; + case LLAMA_FTYPE_MOSTLY_Q4_K_S: return "Q4_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q4_K_M: return "Q4_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small"; + case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; default: return "unknown, may not work"; } @@ -2533,6 +2533,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); switch (hparams.n_layer) { + case 22: model.type = e_model::MODEL_1B; break; case 26: model.type = e_model::MODEL_3B; break; case 32: model.type = e_model::MODEL_7B; break; case 40: model.type = e_model::MODEL_13B; break; From b1306c439490c7fa4ec33594500d980d1e9e15e6 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 17 Dec 2023 20:16:23 +0200 Subject: [PATCH 255/859] readme : update hot topics --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index edbe6ba57..01aef2afc 100644 --- a/README.md +++ b/README.md @@ -10,11 +10,11 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- Collecting Apple Silicon performance stats: + - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 + - A-series: https://github.com/ggerganov/llama.cpp/discussions/4508 - Added Mixtral support: https://github.com/ggerganov/llama.cpp/pull/4406 -- **llama.h API change for handling KV cache offloading and data type: https://github.com/ggerganov/llama.cpp/pull/4309** -- Using `llama.cpp` with AWS instances: https://github.com/ggerganov/llama.cpp/discussions/4225 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 -- Collecting Apple Silicon performance stats: https://github.com/ggerganov/llama.cpp/discussions/4167 ---- From 2994f0c5a2e8c96955b422dedc93ec2595d16b82 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 17 Dec 2023 19:39:02 -0500 Subject: [PATCH 256/859] decode : fix logits_valid for legacy API (#4516) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index fd9fd6ed9..d6d575f9e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6184,7 +6184,7 @@ static int llama_decode_internal( logits_out.resize(n_vocab); memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(n_tokens - 1)), sizeof(float)*n_vocab); #ifndef NDEBUG - logits_valid[n_tokens - 1] = true; + logits_valid[0] = true; #endif } } From 3c04bf6da89eaf4c7d317e0518f0687dfcbf2de7 Mon Sep 17 00:00:00 2001 From: hankcs Date: Mon, 18 Dec 2023 05:14:58 -0800 Subject: [PATCH 257/859] llama : fix try_override for bool_value which always return true (#4519) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index d6d575f9e..99facbf77 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1937,7 +1937,7 @@ namespace GGUFMeta { target = override->bool_value; return true; } - return true; + return false; } template From b9e74f9bca5fdf7d0a22ed25e7a9626335fdfa48 Mon Sep 17 00:00:00 2001 From: Ebey Abraham Date: Mon, 18 Dec 2023 17:27:47 +0000 Subject: [PATCH 258/859] llama : add phi-2 + fix NeoX rope + ggml_mul_mat_set_prec (#4490) * phi2 implementation * fix breaking change * phi-2 : various fixes * phi-2 : use layer norm eps * py : whitespaces * llama : fix meta KV override bug * convert : phi don't add BOS token * convert : revert "added_tokens_decoder" change * phi-2 : scale Q instead of KQ for better precision * ggml : fix NeoX rope to rotate just first n_dims * cuda : less diff in the rope_neox kernel * ggml : add ggml_mul_mat_set_prec ggml-ci * Update ggml-cuda.cu Co-authored-by: slaren * Update ggml-cuda.cu Co-authored-by: slaren * cuda : ggml_cuda_op_mul_mat_cublas support F32 precision * cuda : remove oboslete comment --------- Co-authored-by: Ebey Abraham Co-authored-by: Georgi Gerganov Co-authored-by: slaren --- convert-hf-to-gguf.py | 22 +++ ggml-cuda.cu | 117 +++++++++---- ggml-metal.metal | 13 +- ggml.c | 46 ++++- ggml.h | 12 ++ gguf-py/gguf/constants.py | 13 ++ gguf-py/gguf/tensor_mapping.py | 8 + llama.cpp | 307 +++++++++++++++++++++++++++++---- tests/test-backend-ops.cpp | 1 + 9 files changed, 463 insertions(+), 76 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index e46a7813a..e71a96c48 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -182,6 +182,8 @@ class Model: return QwenModel if model_architecture == "MixtralForCausalLM": return MixtralModel + if model_architecture == "PhiForCausalLM": + return Phi2Model return Model def _is_model_safetensors(self) -> bool: @@ -221,6 +223,8 @@ class Model: return gguf.MODEL_ARCH.QWEN if arch == "MixtralForCausalLM": return gguf.MODEL_ARCH.LLAMA + if arch == "PhiForCausalLM": + return gguf.MODEL_ARCH.PHI2 raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -980,6 +984,24 @@ class QwenModel(Model): print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") self.gguf_writer.add_tensor(new_name, data) + +class Phi2Model(Model): + def set_gguf_parameters(self): + block_count = self.hparams["n_layer"] + + self.gguf_writer.add_name("Phi2") + self.gguf_writer.add_context_length(self.hparams["n_positions"]) + self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(self.hparams["n_head"]) + self.gguf_writer.add_head_count_kv(self.hparams["n_head"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_rope_dimension_count(self.hparams["rotary_dim"]) + self.gguf_writer.add_file_type(self.ftype) + self.gguf_writer.add_add_bos_token(False) + + ###### CONVERSION LOGIC ###### diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 0a63c1ecf..d0f3d8034 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -4998,7 +4998,16 @@ static __global__ void rope_neox( const int ib = col / n_dims; const int ic = col % n_dims; - const int i = row*ncols + ib*n_dims + ic/2; + if (ib > 0) { + const int i = row*ncols + ib*n_dims + ic; + + dst[i + 0] = x[i + 0]; + dst[i + 1] = x[i + 1]; + + return; + } + + const int i = row*ncols + ib*n_dims + ic/2; const int i2 = row/p_delta_rows; float cur_rot = inv_ndims * ic - ib; @@ -7057,6 +7066,7 @@ inline void ggml_cuda_op_upscale( (void) src1; (void) dst; + (void) src1_dd; } inline void ggml_cuda_op_pad( @@ -7073,6 +7083,7 @@ inline void ggml_cuda_op_pad( (void) src1; (void) dst; + (void) src1_dd; } inline void ggml_cuda_op_rms_norm( @@ -7376,7 +7387,7 @@ inline void ggml_cuda_op_mul_mat_cublas( const int compute_capability = g_compute_capabilities[id]; - if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1]) { + if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 half * src0_as_f16 = nullptr; size_t src0_as = 0; @@ -8300,27 +8311,27 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor } static __global__ void k_compute_batched_ptrs( - const half * src0_as_f16, const half * src1_as_f16, half * dst_f16, + const half * src0_as_f16, const half * src1_as_f16, char * dst, const void ** ptrs_src, void ** ptrs_dst, - int ne12, int ne13, - int ne23, - int nb02, int nb03, - int nb12, int nb13, - int nb2, int nb3, - int r2, int r3) { - int i13 = blockIdx.x * blockDim.x + threadIdx.x; - int i12 = blockIdx.y * blockDim.y + threadIdx.y; + int64_t ne12, int64_t ne13, + int64_t ne23, + size_t nb02, size_t nb03, + size_t nb12, size_t nb13, + size_t nbd2, size_t nbd3, + int64_t r2, int64_t r3) { + int64_t i13 = blockIdx.x * blockDim.x + threadIdx.x; + int64_t i12 = blockIdx.y * blockDim.y + threadIdx.y; if (i13 >= ne13 || i12 >= ne12) { return; } - int i03 = i13 / r3; - int i02 = i12 / r2; + int64_t i03 = i13 / r3; + int64_t i02 = i12 / r2; ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; - ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst_f16 + i12* nb2/2 + i13* nb3/2; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -8376,7 +8387,41 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + + half * dst_f16 = nullptr; + char * dst_t = nullptr; + + cublasComputeType_t cu_compute_type = CUBLAS_COMPUTE_16F; + cudaDataType_t cu_data_type = CUDA_R_16F; + + // dst strides + size_t nbd2 = dst->nb[2]; + size_t nbd3 = dst->nb[3]; + + const half alpha_f16 = 1.0f; + const half beta_f16 = 0.0f; + + const float alpha_f32 = 1.0f; + const float beta_f32 = 0.0f; + + const void * alpha = &alpha_f16; + const void * beta = &beta_f16; + + if (dst->op_params[0] == GGML_PREC_DEFAULT) { + dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); + dst_t = (char *) dst_f16; + + nbd2 /= sizeof(float) / sizeof(half); + nbd3 /= sizeof(float) / sizeof(half); + } else { + dst_t = (char *) dst_ddf; + + cu_compute_type = CUBLAS_COMPUTE_32F; + cu_data_type = CUDA_R_32F; + + alpha = &alpha_f32; + beta = &beta_f32; + } GGML_ASSERT(ne12 % ne02 == 0); GGML_ASSERT(ne13 % ne03 == 0); @@ -8385,9 +8430,6 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const int64_t r2 = ne12/ne02; const int64_t r3 = ne13/ne03; - const half alpha_f16 = 1.0f; - const half beta_f16 = 0.0f; - #if 0 // use cublasGemmEx { @@ -8397,12 +8439,12 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const int i02 = i12 / r2; CUBLAS_CHECK( - cublasGemmEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, + cublasGemmEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3] , CUDA_R_16F, nb01/sizeof(half), - (const char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2, CUDA_R_16F, nb11/sizeof(float), - &beta_f16, ( char *) dst_f16 + i12* dst->nb[2]/2 + i13* dst->nb[3]/2, CUDA_R_16F, ne01, - CUBLAS_COMPUTE_16F, + alpha, (const char *) src0_as_f16 + i02*src0->nb[2] + i03*src0->nb[3] , CUDA_R_16F, nb01/sizeof(half), + (const char *) src1_as_f16 + i12*src1->nb[2]/2 + i13*src1->nb[3]/2, CUDA_R_16F, nb11/sizeof(float), + beta, ( char *) dst_t + i12*nbd2 + i13*nbd3, cu_data_type, ne01, + cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); } } @@ -8414,11 +8456,11 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA - (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB - &beta_f16, ( char *) dst_f16, CUDA_R_16F, ne01, dst->nb[2]/sizeof(float), // strideC + alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA + (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB + beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC ne12*ne13, - CUBLAS_COMPUTE_16F, + cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); } else { // use cublasGemmBatchedEx @@ -8435,24 +8477,24 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( - src0_as_f16, src1_as_f16, dst_f16, + src0_as_f16, src1_as_f16, dst_t, ptrs_src, ptrs_dst, ne12, ne13, ne23, nb02, nb03, nb12, nb13, - dst->nb[2], dst->nb[3], + nbd2, nbd3, r2, r3); CUDA_CHECK(cudaGetLastError()); CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - &alpha_f16, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), - &beta_f16, ( void **) (ptrs_dst + 0*ne23), CUDA_R_16F, ne01, + alpha, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + beta, ( void **) (ptrs_dst + 0*ne23), cu_data_type, ne01, ne23, - CUBLAS_COMPUTE_16F, + cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); if (ptrs_src_s != 0) { @@ -8464,11 +8506,14 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const } #endif - const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); + if (dst->op_params[0] == GGML_PREC_DEFAULT) { + const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); + to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); + + ggml_cuda_pool_free(dst_f16, dst_as); + } ggml_cuda_pool_free(src1_as_f16, src1_as); - ggml_cuda_pool_free(dst_f16, dst_as); } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { diff --git a/ggml-metal.metal b/ggml-metal.metal index fe0ada445..d5b54e112 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1702,8 +1702,9 @@ kernel void kernel_rope( dst_data[1] = x0*sin_theta + x1*cos_theta; } } else { - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 2*tiitg; ic < n_dims; ic += 2*tptg.x) { + for (int64_t ic = 2*tiitg; ic < ne0; ic += 2*tptg.x) { + if (ic < n_dims) { + const int64_t ib = 0; // simplified from `(ib * n_dims + ic) * inv_ndims` const float cur_rot = inv_ndims*ic - ib; @@ -1722,6 +1723,14 @@ kernel void kernel_rope( dst_data[0] = x0*cos_theta - x1*sin_theta; dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } else { + const int64_t i0 = ic; + + device const T * const src = (device T *)((device char *) src0 + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + device T * dst_data = (device T *)((device char *) dst + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; } } } diff --git a/ggml.c b/ggml.c index ad546a731..6da65bd92 100644 --- a/ggml.c +++ b/ggml.c @@ -4098,6 +4098,14 @@ struct ggml_tensor * ggml_mul_mat( return result; } +void ggml_mul_mat_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec) { + const int32_t prec_i32 = (int32_t) prec; + + ggml_set_op_params_i32(a, 0, prec_i32); +} + // ggml_mul_mat_id struct ggml_tensor * ggml_mul_mat_id( @@ -9168,6 +9176,8 @@ static void ggml_compute_forward_norm_f32( float eps; memcpy(&eps, dst->op_params, sizeof(float)); + GGML_ASSERT(eps > 0.0f); + // TODO: optimize for (int64_t i03 = 0; i03 < ne03; i03++) { for (int64_t i02 = 0; i02 < ne02; i02++) { @@ -9237,6 +9247,8 @@ static void ggml_compute_forward_rms_norm_f32( float eps; memcpy(&eps, dst->op_params, sizeof(float)); + GGML_ASSERT(eps > 0.0f); + // TODO: optimize for (int64_t i03 = 0; i03 < ne03; i03++) { for (int64_t i02 = 0; i02 < ne02; i02++) { @@ -11562,10 +11574,13 @@ static void ggml_compute_forward_rope_f32( } } else { // TODO: this might be wrong for ne0 != n_dims - need double check - // ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py#LL251C1-L294C28 + // it seems we have to rope just the first n_dims elements and do nothing with the rest + // ref: https://github.com/ml-explore/mlx/blob/dc2edc762c797e3b8de50b1dad4dc0a131691033/benchmarks/python/llama_jax_bench.py#L11-L26 theta_base *= freq_scale; - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 0; ic < n_dims; ic += 2) { + for (int64_t ic = 0; ic < ne0; ic += 2) { + if (ic < n_dims) { + const int64_t ib = 0; + // simplified from `(ib * n_dims + ic) * inv_ndims` float cur_rot = inv_ndims * ic - ib; @@ -11588,6 +11603,14 @@ static void ggml_compute_forward_rope_f32( dst_data[0] = x0*cos_theta - x1*sin_theta; dst_data[n_dims/2] = x0*sin_theta + x1*cos_theta; + } else { + const int64_t i0 = ic; + + const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; } } } @@ -11715,10 +11738,13 @@ static void ggml_compute_forward_rope_f16( } } else { // TODO: this might be wrong for ne0 != n_dims - need double check - // ref: https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt_neox/modeling_gpt_neox.py#LL251C1-L294C28 + // it seems we have to rope just the first n_dims elements and do nothing with the rest + // ref: https://github.com/ml-explore/mlx/blob/dc2edc762c797e3b8de50b1dad4dc0a131691033/benchmarks/python/llama_jax_bench.py#L11-L26 theta_base *= freq_scale; - for (int64_t ib = 0; ib < ne0/n_dims; ++ib) { - for (int64_t ic = 0; ic < n_dims; ic += 2) { + for (int64_t ic = 0; ic < ne0; ic += 2) { + if (ic < n_dims) { + const int64_t ib = 0; + // simplified from `(ib * n_dims + ic) * inv_ndims` float cur_rot = inv_ndims * ic - ib; @@ -11741,6 +11767,14 @@ static void ggml_compute_forward_rope_f16( dst_data[0] = GGML_FP32_TO_FP16(x0*cos_theta - x1*sin_theta); dst_data[n_dims/2] = GGML_FP32_TO_FP16(x0*sin_theta + x1*cos_theta); + } else { + const int64_t i0 = ic; + + const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); + ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); + + dst_data[0] = src[0]; + dst_data[1] = src[1]; } } } diff --git a/ggml.h b/ggml.h index 68f7833b6..f1003984f 100644 --- a/ggml.h +++ b/ggml.h @@ -343,6 +343,12 @@ extern "C" { GGML_TYPE_COUNT, }; + // precision + enum ggml_prec { + GGML_PREC_DEFAULT, + GGML_PREC_F32, + }; + enum ggml_backend_type { GGML_BACKEND_CPU = 0, GGML_BACKEND_GPU = 10, @@ -1057,6 +1063,12 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); + // change the precision of a matrix multiplication + // set to GGML_PREC_F32 for higher precision (useful for phi-2) + GGML_API void ggml_mul_mat_set_prec( + struct ggml_tensor * a, + enum ggml_prec prec); + // indirect matrix multiplication // ggml_mul_mat_id(ctx, as, ids, id, b) ~= ggml_mul_mat(as[ids[id]], b) GGML_API struct ggml_tensor * ggml_mul_mat_id( diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 12133882b..390dca049 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -95,6 +95,7 @@ class MODEL_ARCH(IntEnum): BLOOM = auto() STABLELM = auto() QWEN = auto() + PHI2 = auto() class MODEL_TENSOR(IntEnum): @@ -140,6 +141,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.BLOOM: "bloom", MODEL_ARCH.STABLELM: "stablelm", MODEL_ARCH.QWEN: "qwen", + MODEL_ARCH.PHI2: "phi2", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -350,6 +352,17 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_ARCH.GPT2: [ # TODO ], + MODEL_ARCH.PHI2: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ] # TODO } diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 0115ea1c6..6fcbdbc1c 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -17,6 +17,7 @@ class TensorNameMap: "tok_embeddings", # llama-pth "embeddings.word_embeddings", # bert "language_model.embedding.word_embeddings", # persimmon + "transformer.embd.wte", # phi2 ), # Token type embeddings @@ -41,6 +42,7 @@ class TensorNameMap: "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen "output", # llama-pth bloom "word_embeddings_for_head", # persimmon + "lm_head.linear", # phi2 ), # Output norm @@ -53,6 +55,7 @@ class TensorNameMap: "transformer.norm_f", # mpt "ln_f", # refact bloom qwen "language_model.encoder.final_layernorm", # persimmon + "lm_head.ln", # phi2 ), # Rope frequencies @@ -75,6 +78,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi + "transformer.h.{bid}.ln", # phi2 ), # Attention norm 2 @@ -90,6 +94,7 @@ class TensorNameMap: "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + "transformer.h.{bid}.mixer.Wqkv", # phi2 ), # Attention query @@ -128,6 +133,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.dense", # bert "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + "transformer.h.{bid}.mixer.out_proj", # phi2 ), # Rotary embeddings @@ -167,6 +173,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen + "transformer.h.{bid}.mlp.fc1", # phi2 ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -198,6 +205,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.dense", # bert "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + "transformer.h.{bid}.mlp.fc2", # phi2 ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index 99facbf77..edd2910b3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -195,6 +195,7 @@ enum llm_arch { LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, LLM_ARCH_QWEN, + LLM_ARCH_PHI2, LLM_ARCH_UNKNOWN, }; @@ -212,6 +213,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_BLOOM, "bloom" }, { LLM_ARCH_STABLELM, "stablelm" }, { LLM_ARCH_QWEN, "qwen" }, + { LLM_ARCH_PHI2, "phi2" }, }; enum llm_kv { @@ -550,6 +552,19 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_PHI2, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, @@ -1420,6 +1435,7 @@ struct llama_model { struct ggml_tensor * output_norm; struct ggml_tensor * output_norm_b; struct ggml_tensor * output; + struct ggml_tensor * output_b; std::vector layers; @@ -2635,6 +2651,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_PHI2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + + switch (hparams.n_layer) { + case 32: model.type = e_model::MODEL_3B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -2987,7 +3012,7 @@ static void llm_load_tensors( (void) main_gpu; - enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; + enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; #ifdef GGML_USE_CUBLAS @@ -3630,7 +3655,73 @@ static void llm_load_tensors( } } } break; + case LLM_ARCH_PHI2: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + backend_norm = llama_backend_offload; + backend_output = llama_backend_offload; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); + + if (backend_norm == GGML_BACKEND_GPU) { + vram_weights += ggml_nbytes(model.output_norm); + vram_weights += ggml_nbytes(model.output_norm_b); + vram_weights += ggml_nbytes(model.output); + vram_weights += ggml_nbytes(model.output_b); + } + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + + layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + + if (backend == GGML_BACKEND_GPU) { + vram_weights += + ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + + ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + + ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + + ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); + } + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -3991,6 +4082,7 @@ static struct ggml_tensor * llm_build_ffn( // if max_alibi_bias > 0 then apply ALiBi static struct ggml_tensor * llm_build_kqv( struct ggml_context * ctx, + const llama_model & model, const llama_hparams & hparams, const llama_kv_cache & kv, struct ggml_tensor * wo, @@ -4002,6 +4094,7 @@ static struct ggml_tensor * llm_build_kqv( int32_t n_tokens, int32_t n_kv, float max_alibi_bias, + float scale, const llm_build_cb & cb, int il) { const int64_t n_embd = hparams.n_embd; @@ -4024,6 +4117,12 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); cb(kq, "kq", il); + if (model.arch == LLM_ARCH_PHI2) { + // for this arch, we need to perform the KQ multiplication with F32 precision, otherwise we get NaNs + // ref: https://github.com/ggerganov/llama.cpp/pull/4490#issuecomment-1859055847 + ggml_mul_mat_set_prec(kq, GGML_PREC_F32); + } + if (max_alibi_bias > 0.0f) { // temporary branch until we figure out how to handle ggml_alibi through ggml_add kq = ggml_scale(ctx, kq, kq_scale); @@ -4043,7 +4142,7 @@ static struct ggml_tensor * llm_build_kqv( kq = ggml_soft_max(ctx, kq); cb(kq, "kq_soft_max", il); } else { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, 1.0f/sqrtf(float(n_embd_head))); + kq = ggml_soft_max_ext(ctx, kq, kq_mask, scale); cb(kq, "kq_soft_max_ext", il); } @@ -4250,9 +4349,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4433,9 +4532,9 @@ struct llm_build_context { // apply ALiBi for 13B model const float max_alibi_bias = model.type == MODEL_13B ? 8.0f : -1.0f; - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4557,9 +4656,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4657,9 +4756,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4866,9 +4965,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); // TODO: not tested, could be broken - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4957,9 +5056,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5054,9 +5153,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5148,9 +5247,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5261,9 +5360,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5320,15 +5419,15 @@ struct llm_build_context { cb(inpL, "inp_embd", -1); // inp_pos - contains the positions - struct ggml_tensor * inp_pos= ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); // KQ_scale - struct ggml_tensor * KQ_scale= ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); cb(KQ_scale, "KQ_scale", -1); // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask= ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); // shift the entire K-cache if needed @@ -5378,9 +5477,9 @@ struct llm_build_context { llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); - cur = llm_build_kqv(ctx0, hparams, kv_self, + cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, cb, il); + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5422,6 +5521,122 @@ struct llm_build_context { ggml_build_forward_expand(gf, cur); + return gf; + } + struct ggml_cgraph * build_phi2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + struct ggml_tensor * cur; + struct ggml_tensor * attn_norm_output; + struct ggml_tensor * ffn_output; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // Q_scale + struct ggml_tensor * Q_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(Q_scale, "Q_scale", -1); + + // KQ_scale + struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); + cb(KQ_scale, "KQ_scale", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + attn_norm_output = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(attn_norm_output, "attn_norm", il); + + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + Kcur = ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens); + + Qcur = ggml_rope_custom( + ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Qcur = ggml_scale(ctx0, Qcur, Q_scale); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, + freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, model, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f, cb, il); + cb(cur, "kqv_out", il); + } + + // FF + { + ffn_output = llm_build_ffn(ctx0, attn_norm_output, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(ffn_output, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, ffn_output); + cb(cur, "l_out", il); + + cur = ggml_add(ctx0, cur, inpL); + cb(cur, "l_out", il); + + inpL = cur; + } + + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output_no_bias", -1); + + cur = ggml_add(ctx0, cur, model.output_b); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + return gf; } }; @@ -5437,7 +5652,7 @@ enum llm_offload_func_e { OFFLOAD_FUNC_FRC, // force offload OFFLOAD_FUNC_KQV, OFFLOAD_FUNC_NR, - OFFLOAD_FUNC_EMB, + OFFLOAD_FUNC_EMB, // embeddings OFFLOAD_FUNC_OUT, }; @@ -5522,6 +5737,7 @@ static const std::unordered_map k_offload_map { "pos_embd", OFFLOAD_FUNC_NR }, { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) + { "Q_scale", OFFLOAD_FUNC_FRC }, { "KQ_scale", OFFLOAD_FUNC_FRC }, { "KQ_mask", OFFLOAD_FUNC_FRC }, { "K_shift", OFFLOAD_FUNC_FRC }, @@ -5606,6 +5822,7 @@ static const std::unordered_map k_offload_map { "l_out", OFFLOAD_FUNC }, { "result_norm", OFFLOAD_FUNC_EMB }, + { "result_output_no_bias", OFFLOAD_FUNC_EMB }, { "result_output", OFFLOAD_FUNC_OUT }, }; @@ -5623,6 +5840,7 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_tokens = false; bool alloc_inp_embd = false; bool alloc_inp_pos = false; + bool alloc_inp_Q_scale = false; bool alloc_inp_KQ_scale = false; bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; @@ -5690,7 +5908,7 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_pos = true; } - if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { + if (!alloc_inp_Q_scale && strcmp(name, "Q_scale") == 0) { ggml_allocr_alloc(lctx.alloc, cur); if (!ggml_allocr_is_measure(lctx.alloc)) { @@ -5698,6 +5916,23 @@ static struct ggml_cgraph * llama_build_graph( ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); } + alloc_inp_Q_scale = true; + } + + if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { + ggml_allocr_alloc(lctx.alloc, cur); + + if (!ggml_allocr_is_measure(lctx.alloc)) { + const int64_t n_embd_head = model.hparams.n_embd_head(); + if (model.arch == LLM_ARCH_PHI2) { + // with phi2, we scale the Q to avoid precision issues + // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 + ggml_set_f32(cur, 1.0f); + } else { + ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + } + } + alloc_inp_KQ_scale = true; } @@ -5922,6 +6157,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_qwen(); } break; + case LLM_ARCH_PHI2: + { + result = llm.build_phi2(); + } break; default: GGML_ASSERT(false); } @@ -6055,12 +6294,16 @@ static int llama_decode_internal( ggml_allocr_alloc_graph(lctx.alloc, gf); - struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + // the output is always the last tensor in the graph + struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; + GGML_ASSERT(strcmp(res->name, "result_output") == 0); + + // the embeddings could be the second to last tensor, or the third to last tensor struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 2]; - - GGML_ASSERT(strcmp(res->name, "result_output") == 0); - GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); - + if (strcmp(embeddings->name, "result_norm") != 0) { + embeddings = gf->nodes[gf->n_nodes - 3]; + GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); + } #ifdef GGML_USE_CUBLAS for (int i = 0; i < gf->n_leafs; i++) { diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index df2c3fb6e..f04b9438a 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1555,6 +1555,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_rope(type, { 64, 8, 10, 1}, 64, 2, 512)); // neox (falcon 40B) test_cases.emplace_back(new test_rope(type, { 64, 128, 10, 1}, 64, 2, 512)); // neox (falcon 40B) test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 20, 2, 512)); // neox (stablelm) + test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 32, 2, 512)); // neox (phi-2) } test_cases.emplace_back(new test_alibi()); From 6ff39b129d0281d045f83d515e51b7197b44b253 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 18 Dec 2023 20:05:12 +0200 Subject: [PATCH 259/859] llama.swiftui : add more models --- .../llama.cpp.swift/LibLlama.swift | 2 +- .../llama.swiftui/UI/ContentView.swift | 31 +++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 272e1fd8a..464fb3277 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -203,7 +203,7 @@ actor LlamaContext { var pp_std: Double = 0 var tg_std: Double = 0 - for r in 0.. Date: Mon, 18 Dec 2023 20:17:43 +0200 Subject: [PATCH 260/859] llama.swiftui : add tinyllama 1.1B F16 --- .../llama.swiftui/llama.swiftui/UI/ContentView.swift | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 9cbe8efd6..c78f107b3 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -91,6 +91,15 @@ struct ContentView: View { ) .font(.system(size: 12)) + DownloadButton( + llamaState: llamaState, + modelName: "TinyLlama-1.1B (F16, 2.2 GiB)", + modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", + filename: "tinyllama-1.1b-f16.gguf" + ) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) + DownloadButton( llamaState: llamaState, modelName: "Phi-2.7B (Q4_0, 1.6 GiB)", @@ -98,7 +107,6 @@ struct ContentView: View { filename: "phi-2-q4_0.gguf" ) .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -107,6 +115,7 @@ struct ContentView: View { filename: "phi-2-q8_0.gguf" ) .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -115,7 +124,6 @@ struct ContentView: View { filename: "mistral-7b-v0.1.Q4_0.gguf" ) .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) Button("Clear downloaded models") { ContentView.cleanupModelCaches() From a7aee47b98e45539d491071b25778b833b77e387 Mon Sep 17 00:00:00 2001 From: arlo-phoenix <140345165+arlo-phoenix@users.noreply.github.com> Date: Mon, 18 Dec 2023 22:33:45 +0100 Subject: [PATCH 261/859] ggml-cuda: Fix HIP build (#4528) regression of #4490 Adds defines for two new datatypes cublasComputeType_t, cudaDataType_t. Currently using deprecated hipblasDatatype_t since newer ones very recent. --- ggml-cuda.cu | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index d0f3d8034..f20846fef 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -31,6 +31,7 @@ #define CUDA_R_16F HIPBLAS_R_16F #define CUDA_R_32F HIPBLAS_R_32F #define __shfl_xor_sync(mask, var, laneMask, width) __shfl_xor(var, laneMask, width) +#define cublasComputeType_t hipblasDatatype_t //deprecated, new hipblasComputeType_t not in 5.6 #define cublasCreate hipblasCreate #define cublasGemmEx hipblasGemmEx #define cublasGemmBatchedEx hipblasGemmBatchedEx @@ -40,6 +41,7 @@ #define cublasSetStream hipblasSetStream #define cublasSgemm hipblasSgemm #define cublasStatus_t hipblasStatus_t +#define cudaDataType_t hipblasDatatype_t //deprecated, new hipblasDatatype not in 5.6 #define cudaDeviceCanAccessPeer hipDeviceCanAccessPeer #define cudaDeviceDisablePeerAccess hipDeviceDisablePeerAccess #define cudaDeviceEnablePeerAccess hipDeviceEnablePeerAccess From 328b83de23b33240e28f4e74900d1d06726f5eb1 Mon Sep 17 00:00:00 2001 From: Eric Sommerlade Date: Tue, 19 Dec 2023 16:17:01 +0000 Subject: [PATCH 262/859] ggml : fixed check for _MSC_VER (#4535) Co-authored-by: Eric Sommerlade --- ggml.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml.h b/ggml.h index f1003984f..beacdc8be 100644 --- a/ggml.h +++ b/ggml.h @@ -303,7 +303,7 @@ extern "C" { #if defined(__ARM_NEON) && defined(__CUDACC__) typedef half ggml_fp16_t; -#elif defined(__ARM_NEON) +#elif defined(__ARM_NEON) && !defined(_MSC_VER) typedef __fp16 ggml_fp16_t; #else typedef uint16_t ggml_fp16_t; From 799fc2268989482054944c902874cca76337580f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 20 Dec 2023 15:41:22 +0100 Subject: [PATCH 263/859] CUDA: Faster Mixtral prompt processing (#4538) * CUDA: make MoE tensors contiguous for batch size>1 * Update ggml-cuda.cu Co-authored-by: slaren --------- Co-authored-by: slaren --- ggml-cuda.cu | 118 ++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 93 insertions(+), 25 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f20846fef..9f4b188cb 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7830,6 +7830,11 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { } #ifdef NDEBUG + for (int id = 0; id < g_device_count; ++id) { + CUDA_CHECK(ggml_cuda_set_device(id)); + CUDA_CHECK(cudaDeviceSynchronize()); + } + for (int id = 0; id < g_device_count; ++id) { CUDA_CHECK(ggml_cuda_set_device(id)); @@ -7881,8 +7886,6 @@ static void ggml_cuda_op_mul_mat( const int nb2 = dst->nb[2]; const int nb3 = dst->nb[3]; - ggml_cuda_set_peer_access(ne11); - GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); @@ -8781,16 +8784,21 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + const int64_t nb11 = src1->nb[1]; + const int64_t nb1 = dst->nb[1]; + const struct ggml_tensor * ids = src0; const int32_t id = ((int32_t *) dst->op_params)[0]; const int32_t n_as = ((int32_t *) dst->op_params)[1]; std::vector ids_host(ggml_nbytes(ids)); + const cudaStream_t stream = g_cudaStreams[g_main_device][0]; + if (ids->backend == GGML_BACKEND_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; - CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); - CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, stream)); + CUDA_CHECK(cudaStreamSynchronize(stream)); } else { memcpy(ids_host.data(), ids->data, ggml_nbytes(ids)); } @@ -8804,37 +8812,93 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; - src1_row.ne[1] = 1; - dst_row.ne[1] = 1; - - src1_row.nb[2] = src1_row.nb[1]; - dst_row.nb[2] = dst_row.nb[1]; - - src1_row.nb[3] = src1_row.nb[1]; - dst_row.nb[3] = dst_row.nb[1]; - src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; + char * src1_original = (char *) src1_extra->data_device[g_main_device]; + char * dst_original = (char *) dst_extra->data_device[g_main_device]; - for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { - //int32_t row_id; - //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); - //CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); + if (src1->ne[1] == 1) { + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + //int32_t row_id; + //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); + //CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[g_main_device][0])); - const int32_t row_id = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + const int32_t row_id = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); - GGML_ASSERT(row_id >= 0 && row_id < n_as); + GGML_ASSERT(row_id >= 0 && row_id < n_as); - const struct ggml_tensor * src0_row = dst->src[row_id + 2]; + const struct ggml_tensor * src0_row = dst->src[row_id + 2]; - src1_row_extra.data_device[g_main_device] = (char *) src1_extra->data_device[g_main_device] + i01*src1->nb[1]; - src1_row.data = (char *) src1->data + i01*src1->nb[1]; + src1_row_extra.data_device[g_main_device] = src1_original + i01*src1->nb[1]; + src1_row.data = (char *) src1->data + i01*src1->nb[1]; // TODO why is this set? - dst_row_extra.data_device[g_main_device] = (char *) dst_extra->data_device[g_main_device] + i01*dst->nb[1]; - dst_row.data = (char *) dst->data + i01*dst->nb[1]; + dst_row_extra.data_device[g_main_device] = dst_original + i01*dst->nb[1]; + dst_row.data = (char *) dst->data + i01*dst->nb[1]; // TODO why is this set? - ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + } + } else { + size_t as_src1, as_dst; + char * src1_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(src1), &as_src1); + char * dst_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(dst), &as_dst); + + src1_row_extra.data_device[g_main_device] = src1_contiguous; + dst_row_extra.data_device[g_main_device] = dst_contiguous; + + for (int32_t row_id = 0; row_id < n_as; ++row_id) { + const struct ggml_tensor * src0_row = dst->src[row_id + 2]; + + int64_t num_src1_rows = 0; + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + const int32_t row_id_i = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + + if (row_id_i != row_id) { + continue; + } + + GGML_ASSERT(row_id >= 0 && row_id < n_as); + + CUDA_CHECK(cudaMemcpyAsync(src1_contiguous + num_src1_rows*nb11, src1_original + i01*nb11, + nb11, cudaMemcpyDeviceToDevice, stream)); + num_src1_rows++; + } + + if (num_src1_rows == 0) { + continue; + } + + src1_row.ne[1] = num_src1_rows; + dst_row.ne[1] = num_src1_rows; + + src1_row.nb[1] = nb11; + src1_row.nb[2] = num_src1_rows*nb11; + src1_row.nb[3] = num_src1_rows*nb11; + + dst_row.nb[1] = nb1; + dst_row.nb[2] = num_src1_rows*nb1; + dst_row.nb[3] = num_src1_rows*nb1; + + ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); + + num_src1_rows = 0; + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { + const int32_t row_id_i = *(const int32_t *) (ids_host.data() + i01*ids->nb[1] + id*ids->nb[0]); + + if (row_id_i != row_id) { + continue; + } + + GGML_ASSERT(row_id >= 0 && row_id < n_as); + + CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous + num_src1_rows*nb1, + nb1, cudaMemcpyDeviceToDevice, stream)); + num_src1_rows++; + } + } + + ggml_cuda_pool_free(src1_contiguous, as_src1); + ggml_cuda_pool_free(dst_contiguous, as_dst); } } @@ -9370,6 +9434,10 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ return false; } + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT) { + ggml_cuda_set_peer_access(tensor->src[1]->ne[1]); + } + if (params->ith != 0) { return true; } From 1d7a1912cea2227f9a1a449758ed622c560542f9 Mon Sep 17 00:00:00 2001 From: LoganDark Date: Thu, 21 Dec 2023 01:59:27 -0800 Subject: [PATCH 264/859] Fix access violation in ggml_cuda_free_data if tensor->extra is NULL (#4554) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9f4b188cb..28d378784 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -9091,7 +9091,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { } void ggml_cuda_free_data(struct ggml_tensor * tensor) { - if (!tensor || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { + if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { return; } From d3223afdad0ed2821a8ddf739c291cd410c92a11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 21 Dec 2023 17:34:17 +0100 Subject: [PATCH 265/859] llama : disable per-tensor info prints on model load (#4562) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index edd2910b3..90d860eb9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2083,7 +2083,7 @@ struct llama_model_loader { type_max = meta->type; } - LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); + // LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); } switch (type_max) { From 139882392258671ffe5acdfcadc0bc08572d6eef Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 21 Dec 2023 18:02:30 +0100 Subject: [PATCH 266/859] cuda : replace asserts in wrong architecture checks with __trap (#4556) * cuda : replace asserts in wrong architecture checks with __trap * make bad_arch noreturn, remove returns --- ggml-cuda.cu | 82 +++++++++++++++++++++++----------------------------- 1 file changed, 36 insertions(+), 46 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 28d378784..e7c9dee45 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -512,6 +512,14 @@ static size_t g_scratch_offset = 0; static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; +[[noreturn]] +static __device__ void bad_arch() { + printf("ERROR: ggml-cuda was compiled without support for the current GPU architecture.\n"); + __trap(); + + (void) bad_arch; // suppress unused function warning +} + static __device__ __forceinline__ float warp_reduce_sum(float x) { #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { @@ -1972,8 +1980,7 @@ template static __device__ __forceinline__ float vec_dot_q4_0_q8_1_imp // second part effectively subtracts 8 from each quant value return d4 * (sumi * ds8f.x - (8*vdr/QI4_0) * ds8f.y); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2010,8 +2017,7 @@ template static __device__ __forceinline__ float vec_dot_q4_1_q8_1_imp // scale second part of sum by QI8_1/(vdr * QR4_1) to compensate for multiple threads adding it return sumi * d4d8 + m4s8 / (QI8_1 / (vdr * QR4_1)); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2046,8 +2052,7 @@ template static __device__ __forceinline__ float vec_dot_q5_0_q8_1_imp // second part effectively subtracts 16 from each quant value return d5 * (sumi * ds8f.x - (16*vdr/QI5_0) * ds8f.y); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2092,8 +2097,7 @@ template static __device__ __forceinline__ float vec_dot_q5_1_q8_1_imp return sumi*d5d8 + m5s8 / (QI5_1 / vdr); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2114,8 +2118,7 @@ template static __device__ __forceinline__ float vec_dot_q8_0_q8_1_imp return d8_0*d8_1 * sumi; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2145,8 +2148,7 @@ template static __device__ __forceinline__ float vec_dot_q8_1_q8_1_imp // scale second part of sum by QI8_1/ vdr to compensate for multiple threads adding it return sumi*d8d8 + m8s8 / (QI8_1 / vdr); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2181,8 +2183,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_impl_mmvq( return dm2f.x*sumf_d - dm2f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2219,8 +2220,7 @@ static __device__ __forceinline__ float vec_dot_q2_K_q8_1_impl_mmq( return d8 * (dm2f.x*sumi_d - dm2f.y*sumi_m); #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2260,8 +2260,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_impl_mmvq( return d3 * sumf; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2286,8 +2285,7 @@ static __device__ __forceinline__ float vec_dot_q3_K_q8_1_impl_mmq( return d3*d8 * sumi; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2320,8 +2318,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_impl_vmmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2354,8 +2351,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1_impl_mmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2395,8 +2391,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1_impl_vmmq( return dm5f.x*sumf_d - dm5f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2429,8 +2424,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1_impl_mmq( return dm4f.x*sumf_d - dm4f.y*sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2460,8 +2454,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_impl_mmvq( return d*sumf; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -2492,8 +2485,7 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_impl_mmq( return d6 * sumf_d; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A } @@ -3359,8 +3351,7 @@ static __device__ __forceinline__ float vec_dot_q4_K_q8_1( return dall * sumf_d - dmin * sumf_m; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A #endif @@ -3543,8 +3534,7 @@ static __device__ __forceinline__ float vec_dot_q5_K_q8_1( return d * sumf_d; #else - assert(false); - return 0.0f; // only to satisfy the compiler + bad_arch(); #endif // __CUDA_ARCH__ >= MIN_CC_DP4A #endif @@ -3954,7 +3944,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q4_0_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4023,7 +4013,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q4_1_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4090,7 +4080,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q5_0_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4157,7 +4147,7 @@ mul_mat_q5_1( (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q5_1_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4224,7 +4214,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q8_0_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4291,7 +4281,7 @@ mul_mat_q2_K( (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q2_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4360,7 +4350,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q3_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4429,7 +4419,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q4_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4496,7 +4486,7 @@ mul_mat_q5_K( (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q5_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } @@ -4565,7 +4555,7 @@ template static __global__ void (vx, vy, dst, ncols_x, nrows_x, ncols_y, nrows_y, nrows_dst); #else (void) vec_dot_q6_K_q8_1_mul_mat; - assert(false); + bad_arch(); #endif // __CUDA_ARCH__ >= CC_VOLTA } From 66f35a2f48e1965a13835a523e677223dbf148be Mon Sep 17 00:00:00 2001 From: bobqianic <129547291+bobqianic@users.noreply.github.com> Date: Thu, 21 Dec 2023 17:06:44 +0000 Subject: [PATCH 267/859] cuda : better error message for ggml_get_rows (#4561) * Update ggml-cuda.cu * Update ggml-cuda.cu * Update ggml-cuda.cu --------- Co-authored-by: Georgi Gerganov --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e7c9dee45..1ca071d90 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6815,6 +6815,7 @@ static void ggml_cuda_op_get_rows( break; default: // TODO: k-quants + fprintf(stderr, "%s: unsupported type: %s\n", __func__, ggml_type_name(src0->type)); GGML_ASSERT(false); break; } From 880e352277fc017df4d5794f0c21c44e1eae2b84 Mon Sep 17 00:00:00 2001 From: howlger Date: Thu, 21 Dec 2023 18:07:34 +0100 Subject: [PATCH 268/859] py : open merges file as 'utf-8' (#4566) Otherwise, on Windows converting bling-phi-2-v0 () via convert-hf-to-gguf.py will fail with the following error: ``` Traceback (most recent call last): File "C:\Users\User\git\gguf\convert-hf-to-gguf.py", line 1061, in model_instance.set_vocab() File "C:\Users\User\git\gguf\convert-hf-to-gguf.py", line 52, in set_vocab self._set_vocab_gpt2() File "C:\Users\User\git\gguf\convert-hf-to-gguf.py", line 264, in _set_vocab_gpt2 special_vocab = gguf.SpecialVocab(dir_model, load_merges=True) File "C:\Users\User\git\gguf\gguf\vocab.py", line 33, in __init__ self._load(Path(path)) File "C:\Users\User\git\gguf\gguf\vocab.py", line 81, in _load self._try_load_merges_txt(path) File "C:\Users\User\git\gguf\gguf\vocab.py", line 95, in _try_load_merges_txt for line in fp: File "C:\Users\User\miniconda3\envs\gguf\lib\encodings\cp1252.py", line 23, in decode return codecs.charmap_decode(input,self.errors,decoding_table)[0] UnicodeDecodeError: 'charmap' codec can't decode byte 0x81 in position 1415: character maps to ``` --- gguf-py/gguf/vocab.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index 76924d8f2..cd1942975 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -84,7 +84,7 @@ class SpecialVocab: merges_file = path / 'merges.txt' if not merges_file.is_file(): return False - with open(merges_file, 'r') as fp: + with open(merges_file, 'r', encoding = 'utf-8') as fp: first_line = next(fp, '').strip() if not first_line.startswith('#'): fp.seek(0) From c083718c895b7c8c7fb2a4660643fb78d0c64dfd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 19:27:14 +0200 Subject: [PATCH 269/859] readme : update coding guidelines --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 01aef2afc..80ce194ca 100644 --- a/README.md +++ b/README.md @@ -982,6 +982,8 @@ docker run --gpus all -v /path/to/models:/models local/llama.cpp:light-cuda -m / - There are no strict rules for the code style, but try to follow the patterns in the code (indentation, spaces, etc.). Vertical alignment makes things more readable and easier to batch edit - Clean-up any trailing whitespaces, use 4 spaces for indentation, brackets on the same line, `void * ptr`, `int & a` - See [good first issues](https://github.com/ggerganov/llama.cpp/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) for tasks suitable for first contributions +- Tensors store data in row-major order. We refer to dimension 0 as columns, 1 as rows, 2 as matrices +- Matrix multiplication is unconventional: [`z = ggml_mul_mat(ctx, x, y)`](https://github.com/ggerganov/llama.cpp/blob/880e352277fc017df4d5794f0c21c44e1eae2b84/ggml.h#L1058-L1064) means `zT = x @ yT` ### Docs From 9154494808dc865475c59022c29060b4947a803b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 21 Dec 2023 18:42:59 +0100 Subject: [PATCH 270/859] CUDA: mul_mat_id always on GPU for batches >= 32 (#4553) --- ggml-cuda.cu | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 1ca071d90..036668bfd 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8773,8 +8773,6 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s // TODO: mmq/mmv support #endif - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); - const int64_t nb11 = src1->nb[1]; const int64_t nb1 = dst->nb[1]; @@ -8803,13 +8801,21 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; + src1_row.backend = GGML_BACKEND_GPU; + dst_row.backend = GGML_BACKEND_GPU; + src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; - char * src1_original = (char *) src1_extra->data_device[g_main_device]; - char * dst_original = (char *) dst_extra->data_device[g_main_device]; + char * src1_original = src1->backend == GGML_BACKEND_CPU ? + (char *) src1->data : (char *) src1_extra->data_device[g_main_device]; + char * dst_original = dst->backend == GGML_BACKEND_CPU ? + (char *) dst->data : (char *) dst_extra->data_device[g_main_device]; if (src1->ne[1] == 1) { + GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { //int32_t row_id; //CUDA_CHECK(cudaMemcpyAsync(&row_id, ids_dev + i01*ids->nb[1] + id*ids->nb[0], sizeof(int32_t), cudaMemcpyDeviceToHost, g_cudaStreams[g_main_device][0])); @@ -8837,6 +8843,11 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s src1_row_extra.data_device[g_main_device] = src1_contiguous; dst_row_extra.data_device[g_main_device] = dst_contiguous; + const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? + cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; + const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_CPU ? + cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; + for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; @@ -8851,7 +8862,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); CUDA_CHECK(cudaMemcpyAsync(src1_contiguous + num_src1_rows*nb11, src1_original + i01*nb11, - nb11, cudaMemcpyDeviceToDevice, stream)); + nb11, src1_kind, stream)); num_src1_rows++; } @@ -8883,7 +8894,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous + num_src1_rows*nb1, - nb1, cudaMemcpyDeviceToDevice, stream)); + nb1, dst_kind, stream)); num_src1_rows++; } } @@ -8891,6 +8902,10 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_cuda_pool_free(src1_contiguous, as_src1); ggml_cuda_pool_free(dst_contiguous, as_dst); } + + if (dst->backend == GGML_BACKEND_CPU) { + CUDA_CHECK(cudaStreamSynchronize(stream)); + } } static void ggml_cuda_scale(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -9289,7 +9304,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); - if (!any_on_device && tensor->op != GGML_OP_MUL_MAT) { + if (!any_on_device && tensor->op != GGML_OP_MUL_MAT && tensor->op != GGML_OP_MUL_MAT_ID) { return false; } From 8fe03ffddaaa0ab5d48feaafe398151c9f22d4f6 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Thu, 21 Dec 2023 12:55:34 -0500 Subject: [PATCH 271/859] common : remove incorrect --model-draft default (#4568) --- common/common.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 93d5483e4..b3425ab09 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -920,7 +920,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -m FNAME, --model FNAME\n"); printf(" model path (default: %s)\n", params.model.c_str()); printf(" -md FNAME, --model-draft FNAME\n"); - printf(" draft model for speculative decoding (default: %s)\n", params.model.c_str()); + printf(" draft model for speculative decoding\n"); printf(" -ld LOGDIR, --logdir LOGDIR\n"); printf(" path under which to save YAML logs (no logging if unset)\n"); printf(" --override-kv KEY=TYPE:VALUE\n"); From 562cf222b5129e40b312877e928eac3a02e4ec33 Mon Sep 17 00:00:00 2001 From: arlo-phoenix <140345165+arlo-phoenix@users.noreply.github.com> Date: Thu, 21 Dec 2023 20:13:25 +0100 Subject: [PATCH 272/859] ggml-cuda: Fix HIP build by adding define for __trap (#4569) Regression of 139882392258671ffe5acdfcadc0bc08572d6eef HIP doesn't have trap, only abort --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 036668bfd..61d92d7ef 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -80,6 +80,7 @@ #define cudaStreamWaitEvent(stream, event, flags) hipStreamWaitEvent(stream, event, flags) #define cudaStream_t hipStream_t #define cudaSuccess hipSuccess +#define __trap abort #else #include #include From 0f630fbc924aaabeea6eaf466bb4b47d13015c3e Mon Sep 17 00:00:00 2001 From: Erik Garrison Date: Thu, 21 Dec 2023 13:45:32 -0600 Subject: [PATCH 273/859] cuda : ROCm AMD Unified Memory Architecture (UMA) handling (#4449) * AMD ROCm: handle UMA memory VRAM expansions This resolves #2797 by allowing ROCm AMD GPU users with a UMA to dynamically expand the VRAM allocated to the GPU. Without this, AMD ROCm users with shared CPU/GPU memory usually are stuck with the BIOS-set (or fixed) framebuffer VRAM, making it impossible to load more than 1-2 layers. Note that the model is duplicated in RAM because it's loaded once for the CPU and then copied into a second set of allocations that are managed by the HIP UMA system. We can fix this later. * clarify build process for ROCm on linux with cmake * avoid using deprecated ROCm hipMallocHost * keep simplifying the change required for UMA * cmake: enable UMA-compatible allocation when LLAMA_HIP_UMA=ON --- CMakeLists.txt | 4 ++++ README.md | 16 +++++++++------- ggml-cuda.cu | 5 +++++ 3 files changed, 18 insertions(+), 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index e3cd43ab3..6fc6508c5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -91,6 +91,7 @@ set(LLAMA_CUDA_KQUANTS_ITER "2" CACHE STRING "llama: iters./thread per block for set(LLAMA_CUDA_PEER_MAX_BATCH_SIZE "128" CACHE STRING "llama: max. batch size for using peer access") option(LLAMA_HIPBLAS "llama: use hipBLAS" OFF) +option(LLAMA_HIP_UMA "llama: use HIP unified memory architecture" OFF) option(LLAMA_CLBLAST "llama: use CLBlast" OFF) option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) @@ -377,6 +378,9 @@ if (LLAMA_HIPBLAS) if (${hipblas_FOUND} AND ${hip_FOUND}) message(STATUS "HIP and hipBLAS found") add_compile_definitions(GGML_USE_HIPBLAS GGML_USE_CUBLAS) + if (LLAMA_HIP_UMA) + add_compile_definitions(GGML_HIP_UMA) + endif() add_library(ggml-rocm OBJECT ggml-cuda.cu ggml-cuda.h) if (BUILD_SHARED_LIBS) set_target_properties(ggml-rocm PROPERTIES POSITION_INDEPENDENT_CODE ON) diff --git a/README.md b/README.md index 80ce194ca..73fe59bb4 100644 --- a/README.md +++ b/README.md @@ -432,14 +432,15 @@ Building the program with BLAS support may lead to some performance improvements ```bash make LLAMA_HIPBLAS=1 ``` - - Using `CMake` for Linux: + - Using `CMake` for Linux (assuming a gfx1030-compatible AMD GPU): ```bash - mkdir build - cd build - CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++ cmake .. -DLLAMA_HIPBLAS=ON - cmake --build . + CC=/opt/rocm/llvm/bin/clang CXX=/opt/rocm/llvm/bin/clang++ \ + cmake -H. -Bbuild -DLLAMA_HIPBLAS=ON -DAMDGPU_TARGETS=gfx1030 -DCMAKE_BUILD_TYPE=Release \ + && cmake --build build -- -j 16 ``` - - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS): + On Linux it is also possible to use unified memory architecture (UMA) to share main memory between the CPU and integrated GPU by setting `-DLLAMA_HIP_UMA=ON"`. + However, this hurts performance for non-integrated GPUs. + - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS, and assuming a gfx1100-compatible AMD GPU): ```bash set PATH=%HIP_PATH%\bin;%PATH% mkdir build @@ -448,10 +449,11 @@ Building the program with BLAS support may lead to some performance improvements cmake --build . ``` Make sure that `AMDGPU_TARGETS` is set to the GPU arch you want to compile for. The above example uses `gfx1100` that corresponds to Radeon RX 7900XTX/XT/GRE. You can find a list of targets [here](https://llvm.org/docs/AMDGPUUsage.html#processors) + Find your gpu version string by matching the most significant version information from `rocminfo | grep gfx | head -1 | awk '{print $2}'` with the list of processors, e.g. `gfx1035` maps to `gfx1030`. The environment variable [`HIP_VISIBLE_DEVICES`](https://rocm.docs.amd.com/en/latest/understand/gpu_isolation.html#hip-visible-devices) can be used to specify which GPU(s) will be used. - If your GPU is not officially supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 or 11.0.0 on RDNA3. + If your GPU is not officially supported you can use the environment variable [`HSA_OVERRIDE_GFX_VERSION`] set to a similar GPU, for example 10.3.0 on RDNA2 (e.g. gfx1030, gfx1031, or gfx1035) or 11.0.0 on RDNA3. The following compilation options are also available to tweak performance (yes, they refer to CUDA, not HIP, because it uses the same code as the cuBLAS version above): | Option | Legal values | Default | Description | diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 61d92d7ef..32603a8d1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -60,8 +60,13 @@ #define cudaGetDeviceProperties hipGetDeviceProperties #define cudaGetErrorString hipGetErrorString #define cudaGetLastError hipGetLastError +#ifdef GGML_HIP_UMA +#define cudaMalloc hipMallocManaged +#define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size) +#else #define cudaMalloc hipMalloc #define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size, hipHostMallocDefault) +#endif #define cudaMemcpy hipMemcpy #define cudaMemcpy2DAsync hipMemcpy2DAsync #define cudaMemcpyAsync hipMemcpyAsync From 56fa50819f7a3ca2128f63b81c17c08a4454479e Mon Sep 17 00:00:00 2001 From: Finn Voorhees Date: Thu, 21 Dec 2023 14:55:02 -0500 Subject: [PATCH 274/859] metal : fix `ggml_metal_log` vargs (#4373) From 31f27758faf4a4bd08101a57c7ec3a473f771f86 Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Thu, 21 Dec 2023 11:57:48 -0800 Subject: [PATCH 275/859] llama : allow getting n_batch from llama_context in c api (#4540) * allowed getting n_batch from llama_context in c api * changed to use `uint32_t` instead of `int` * changed to use `uint32_t` instead of `int` in `llama_n_ctx` * Update llama.h --------- Co-authored-by: Georgi Gerganov --- llama.cpp | 6 +++++- llama.h | 4 +++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 90d860eb9..63ebe581b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9532,10 +9532,14 @@ const llama_model * llama_get_model(const struct llama_context * ctx) { return &ctx->model; } -int llama_n_ctx(const struct llama_context * ctx) { +uint32_t llama_n_ctx(const struct llama_context * ctx) { return ctx->cparams.n_ctx; } +uint32_t llama_n_batch(const struct llama_context * ctx) { + return ctx->cparams.n_batch; +} + enum llama_vocab_type llama_vocab_type(const struct llama_model * model) { return model->vocab.type; } diff --git a/llama.h b/llama.h index 15ab4f80e..0be4b1337 100644 --- a/llama.h +++ b/llama.h @@ -314,7 +314,9 @@ extern "C" { LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); - LLAMA_API int llama_n_ctx (const struct llama_context * ctx); + // TODO: become more consistent with returned int types across the API + LLAMA_API uint32_t llama_n_ctx (const struct llama_context * ctx); + LLAMA_API uint32_t llama_n_batch (const struct llama_context * ctx); LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); From d232aca5a73b290e218a2e48b91023d5e994203f Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 21 Dec 2023 21:07:46 +0100 Subject: [PATCH 276/859] llama : initial ggml-backend integration (#4520) * llama : initial ggml-backend integration * add ggml-metal * cuda backend can be used though ggml-backend with LLAMA_GGML_BACKEND_CUDA_TEST access all tensor data with ggml_backend_tensor_get/set * add ggml_backend_buffer_clear zero-init KV cache buffer * add ggml_backend_buffer_is_hos, used to avoid copies if possible when accesing tensor data * disable gpu backends with ngl 0 * more accurate mlock * unmap offloaded part of the model * use posix_fadvise64(.., POSIX_FADV_SEQUENTIAL) to improve performance with mmap * update quantize and lora * update session copy/set to use ggml-backend ggml-ci * use posix_fadvise instead of posix_fadvise64 * ggml_backend_alloc_ctx_tensors_from_buft : remove old print * llama_mmap::align_offset : use pointers instead of references for out parameters * restore progress_callback behavior * move final progress_callback call to load_all_data * cuda : fix fprintf format string (minor) * do not offload scales * llama_mmap : avoid unmapping the same fragments again in the destructor * remove unnecessary unmap * metal : add default log function that prints to stderr, cleanup code ggml-ci --------- Co-authored-by: Georgi Gerganov --- Makefile | 2 +- ggml-alloc.c | 16 +- ggml-backend-impl.h | 20 +- ggml-backend.c | 80 ++- ggml-backend.h | 7 + ggml-cuda.cu | 89 ++-- ggml-metal.h | 3 + ggml-metal.m | 228 +++++++-- ggml.c | 24 +- ggml.h | 13 +- llama.cpp | 1196 ++++++++++++++++++++----------------------- 11 files changed, 926 insertions(+), 752 deletions(-) diff --git a/Makefile b/Makefile index 8273f8400..512407a1d 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ test: $(TEST_TARGETS) ./$$test_target; \ fi; \ if [ $$? -ne 0 ]; then \ - printf 'Test $$test_target FAILED!\n\n' $$test_target; \ + printf 'Test %s FAILED!\n\n' $$test_target; \ failures=$$(( failures + 1 )); \ else \ printf 'Test %s passed.\n\n' $$test_target; \ diff --git a/ggml-alloc.c b/ggml-alloc.c index d3049efb4..a97436b17 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -449,11 +449,10 @@ static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool upd if (update_backend) { view->backend = view->view_src->backend; } - view->buffer = view->view_src->buffer; + // views are initialized in the alloc buffer rather than the view_src buffer + view->buffer = alloc->buffer; view->data = (char *)view->view_src->data + view->view_offs; - // FIXME: the view should be initialized by the owning buffer, but currently this breaks the CUDA backend - // due to the ggml_tensor_extra_gpu ring buffer overwriting the KV cache extras assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->buft == alloc->buffer->buft); if (!alloc->measure) { @@ -736,6 +735,10 @@ void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n) { } void ggml_allocr_free(ggml_allocr_t alloc) { + if (alloc == NULL) { + return; + } + ggml_gallocr_free(alloc->galloc); ggml_tallocr_free(alloc->talloc); free(alloc); @@ -775,7 +778,7 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } if (nbytes == 0) { - fprintf(stderr, "%s: no tensors to allocate\n", __func__); + // all the tensors in the context are already allocated return NULL; } @@ -789,6 +792,11 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } else { ggml_backend_view_init(buffer, t); } + } else { + if (t->view_src != NULL) { + // view of a pre-allocated tensor + ggml_backend_view_init(buffer, t); + } } } diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index f588af602..05859935a 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -20,6 +20,9 @@ extern "C" { size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend + // check if tensor data is in host memory + // should be equivalent to supports_backend(buft, ggml_backend_cpu_init()) + bool (*is_host) (ggml_backend_buffer_type_t buft); }; struct ggml_backend_buffer_type { @@ -31,15 +34,16 @@ extern "C" { typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - void (*free_buffer)(ggml_backend_buffer_t buffer); + void (*free_buffer) (ggml_backend_buffer_t buffer); //void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras - void * (*get_base) (ggml_backend_buffer_t buffer); - void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + void * (*get_base) (ggml_backend_buffer_t buffer); + void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); // (optional) copy tensor between different buffer-type, allow for single-copy tranfers - void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); + void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); }; struct ggml_backend_buffer { @@ -78,7 +82,7 @@ extern "C" { void (*cpy_tensor_from_async)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); void (*cpy_tensor_to_async) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*synchronize) (ggml_backend_t backend); + void (*synchronize)(ggml_backend_t backend); // compute graph with a plan ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); diff --git a/ggml-backend.c b/ggml-backend.c index 3a22cd085..0c8c9ec43 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -35,6 +35,13 @@ bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_ba return buft->iface.supports_backend(buft, backend); } +bool ggml_backend_buft_is_host(ggml_backend_buffer_type_t buft) { + if (buft->iface.is_host) { + return buft->iface.is_host(buft); + } + return false; +} + // backend buffer ggml_backend_buffer_t ggml_backend_buffer_init( @@ -94,6 +101,14 @@ size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct g return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type(buffer), tensor); } +void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + buffer->iface.clear(buffer, value); +} + +bool ggml_backend_buffer_is_host(ggml_backend_buffer_t buffer) { + return ggml_backend_buft_is_host(ggml_backend_buffer_type(buffer)); +} + ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer) { return buffer->buft; } @@ -378,7 +393,6 @@ static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { free(buffer->context); - GGML_UNUSED(buffer); } static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { @@ -411,6 +425,10 @@ static void ggml_backend_cpu_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, GGML_UNUSED(buffer); } +static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + memset(buffer->context, value, buffer->size); +} + static struct ggml_backend_buffer_i cpu_backend_buffer_i = { /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, /* .get_base = */ ggml_backend_cpu_buffer_get_base, @@ -419,6 +437,7 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i = { /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .clear = */ ggml_backend_cpu_buffer_clear, }; // for buffers from ptr, free is not called @@ -430,6 +449,7 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .clear = */ ggml_backend_cpu_buffer_clear, }; static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 @@ -455,20 +475,70 @@ static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_ty GGML_UNUSED(buft); } +static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + GGML_UNUSED(buft); +} + ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_cpu = { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { /* .iface = */ { /* .alloc_buffer = */ ggml_backend_cpu_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes /* .supports_backend = */ ggml_backend_cpu_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, }, /* .context = */ NULL, }; - return &ggml_backend_buffer_type_cpu; + return &ggml_backend_cpu_buffer_type; } +#ifdef GGML_USE_CPU_HBM + +// buffer type HBM + +#include + +static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { + hbw_free(buffer->context); +} + +static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + //void * ptr = hbw_malloc(size); + void * ptr; + int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); + if (result != 0) { + fprintf(stderr, "failed to allocate HBM buffer of size %zu\n", size); + return NULL; + } + + // FIXME: this is a hack to avoid having to implement a new buffer type + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; + + return buffer; +} + +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type() { + static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { + /* .iface = */ { + /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, + /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes + /* .supports_backend = */ ggml_backend_cpu_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type_is_host, + }, + /* .context = */ NULL, + }; + + return &ggml_backend_cpu_buffer_type_hbm; +} +#endif + struct ggml_backend_cpu_context { int n_threads; void * work_data; @@ -505,7 +575,7 @@ static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); cpu_plan->cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); - cpu_plan->cgraph = *cgraph; + cpu_plan->cgraph = *cgraph; // FIXME: deep copy if (cpu_plan->cplan.work_size > 0) { cpu_plan->cplan.work_data = malloc(cpu_plan->cplan.work_size); @@ -1180,7 +1250,7 @@ void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml // utils void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { GGML_ASSERT(tensor->buffer == NULL); - GGML_ASSERT(tensor->data == NULL); + //GGML_ASSERT(tensor->data == NULL); // views of pre-allocted tensors may have the data set, but still need to be initialized GGML_ASSERT(tensor->view_src != NULL); GGML_ASSERT(tensor->view_src->buffer != NULL); GGML_ASSERT(tensor->view_src->data != NULL); diff --git a/ggml-backend.h b/ggml-backend.h index 58d5ccae6..a9d2fddd7 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -21,6 +21,7 @@ extern "C" { GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); GGML_API size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); // buffer GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); @@ -29,6 +30,8 @@ extern "C" { GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer); // @@ -76,6 +79,10 @@ extern "C" { GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); +#ifdef GGML_USE_CPU_HBM + GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); +#endif + // // Backend registry // diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 32603a8d1..f5e060d32 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -9081,7 +9081,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { char * buf; CUDA_CHECK(cudaMalloc(&buf, size)); - char * buf_host = (char*)data + offset_split; + char * buf_host = (char *)data + offset_split; // set padding to 0 to avoid possible NaN values if (size > original_size) { @@ -9226,11 +9226,10 @@ void ggml_cuda_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset) ggml_tensor_extra_gpu * extra = ggml_cuda_alloc_temp_tensor_extra(); - const bool inplace = (tensor->src[0] != nullptr && tensor->src[0]->data == tensor->data) || - tensor->op == GGML_OP_VIEW; + const bool inplace = tensor->view_src != nullptr; - if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; + if (inplace && (tensor->view_src->backend == GGML_BACKEND_GPU || tensor->view_src->backend == GGML_BACKEND_GPU_SPLIT)) { + ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; size_t view_offset = 0; if (tensor->op == GGML_OP_VIEW) { @@ -9317,7 +9316,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ if (tensor->op == GGML_OP_MUL_MAT) { if (tensor->src[0]->ne[3] != tensor->src[1]->ne[3]) { #ifndef NDEBUG - fprintf(stderr, "%s: cannot compute %s: src0->ne[3] = " PRId64 ", src1->ne[3] = " PRId64 " - fallback to CPU\n", __func__, tensor->name, tensor->src[0]->ne[3], tensor->src[1]->ne[3]); + fprintf(stderr, "%s: cannot compute %s: src0->ne[3] = %" PRId64 ", src1->ne[3] = %" PRId64 " - fallback to CPU\n", __func__, tensor->name, tensor->src[0]->ne[3], tensor->src[1]->ne[3]); #endif return false; } @@ -9523,7 +9522,7 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { - assert(tensor->view_src->buffer->buft == buffer->buft); // TODO + assert(tensor->view_src->buffer->buft == buffer->buft); tensor->backend = tensor->view_src->backend; tensor->extra = tensor->view_src->extra; return; @@ -9554,23 +9553,34 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g } static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - UNUSED(buffer); + ggml_cuda_set_device(ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + + CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); } static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - UNUSED(buffer); + ggml_cuda_set_device(ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + + CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); +} + +static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + + ggml_cuda_set_device(ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + + CUDA_CHECK(cudaMemset(ctx->dev_ptr, value, buffer->size)); } static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { @@ -9581,6 +9591,7 @@ static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { /* .get_tensor = */ ggml_backend_cuda_buffer_get_tensor, /* .cpy_tensor_from = */ NULL, /* .cpy_tensor_to = */ NULL, + /* .clear = */ ggml_backend_cuda_buffer_clear, }; // cuda buffer type @@ -9632,35 +9643,36 @@ static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_t UNUSED(buft); } -static ggml_backend_buffer_type_i cuda_backend_buffer_type_interface = { +static ggml_backend_buffer_type_i ggml_backend_cuda_buffer_type_interface = { /* .alloc_buffer = */ ggml_backend_cuda_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cuda_buffer_type_get_alignment, /* .get_alloc_size = */ ggml_backend_cuda_buffer_type_get_alloc_size, /* .supports_backend = */ ggml_backend_cuda_buffer_type_supports_backend, + /* .is_host = */ nullptr, }; ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_cuda[GGML_CUDA_MAX_DEVICES]; - static bool ggml_backend_buffer_type_cuda_initialized = false; - if (!ggml_backend_buffer_type_cuda_initialized) { + static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_types[GGML_CUDA_MAX_DEVICES]; + + static bool ggml_backend_cuda_buffer_type_initialized = false; + + if (!ggml_backend_cuda_buffer_type_initialized) { for (int i = 0; i < GGML_CUDA_MAX_DEVICES; i++) { - ggml_backend_buffer_type_cuda[i] = { - /* .iface = */ cuda_backend_buffer_type_interface, + ggml_backend_cuda_buffer_types[i] = { + /* .iface = */ ggml_backend_cuda_buffer_type_interface, /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, }; } - ggml_backend_buffer_type_cuda_initialized = true; + ggml_backend_cuda_buffer_type_initialized = true; } - return &ggml_backend_buffer_type_cuda[device]; + return &ggml_backend_cuda_buffer_types[device]; } // host buffer type static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; - CUDA_CHECK(cudaFreeHost(ctx->dev_ptr)); - delete ctx; + CUDA_CHECK(cudaFreeHost(buffer->context)); } static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { @@ -9673,24 +9685,21 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm buffer->iface.free_buffer = ggml_backend_cuda_host_buffer_free_buffer; return buffer; - - UNUSED(buft); } -struct ggml_backend_buffer_type_i cuda_backend_host_buffer_type_interface = { - /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, - /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, - /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, - /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, -}; - ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { - static struct ggml_backend_buffer_type ggml_backend_buffer_type_cuda_host = { - /* .iface = */ cuda_backend_host_buffer_type_interface, + static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_type_host = { + /* .iface = */ { + /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, + /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, + /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type()->iface.is_host, + }, /* .context = */ nullptr, }; - return &ggml_backend_buffer_type_cuda_host; + return &ggml_backend_cuda_buffer_type_host; } // backend @@ -9722,8 +9731,6 @@ static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tens ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); @@ -9733,8 +9740,6 @@ static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggm ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); diff --git a/ggml-metal.h b/ggml-metal.h index bf52d9cd3..b5e02b668 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -98,7 +98,10 @@ GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); +GGML_API ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); + GGML_API void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb); + GGML_API ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); // helper to check if the device supports a specific family diff --git a/ggml-metal.m b/ggml-metal.m index 465679a6b..e60b93b36 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -180,7 +180,15 @@ struct ggml_metal_context { @implementation GGMLMetalClass @end -ggml_log_callback ggml_metal_log_callback = NULL; + +static void ggml_metal_default_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { + fprintf(stderr, "%s", msg); + + UNUSED(level); + UNUSED(user_data); +} + +ggml_log_callback ggml_metal_log_callback = ggml_metal_default_log_callback; void * ggml_metal_log_user_data = NULL; void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { @@ -607,12 +615,24 @@ int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx) { } // temporarily defined here for compatibility between ggml-backend and the old API -struct ggml_backend_metal_buffer_context { - void * data; + +struct ggml_backend_metal_buffer { + void * data; + size_t size; id metal; }; +struct ggml_backend_metal_buffer_context { + void * all_data; + size_t all_size; + bool owned; + + // multiple buffers are used only to avoid the maximum buffer size limitation when using mmap + int n_buffers; + struct ggml_backend_metal_buffer buffers[GGML_METAL_MAX_BUFFERS]; +}; + // finds the Metal buffer that contains the tensor data on the GPU device // the assumption is that there is 1-to-1 mapping between the host and device memory buffers, so we can find the // Metal buffer based on the host memory pointer @@ -622,17 +642,29 @@ static id ggml_metal_get_buffer(struct ggml_metal_context * ctx, stru const int64_t tsize = ggml_nbytes(t); + ggml_backend_buffer_t buffer = t->view_src ? t->view_src->buffer : t->buffer; + // compatibility with ggml-backend - if (t->buffer && t->buffer->buft == ggml_backend_metal_buffer_type()) { - struct ggml_backend_metal_buffer_context * buf_ctx = (struct ggml_backend_metal_buffer_context *) t->buffer->context; + if (buffer && buffer->buft == ggml_backend_metal_buffer_type()) { + struct ggml_backend_metal_buffer_context * buf_ctx = (struct ggml_backend_metal_buffer_context *) buffer->context; - const int64_t ioffs = (int64_t) t->data - (int64_t) buf_ctx->data; + // find the view that contains the tensor fully + for (int i = 0; i < buf_ctx->n_buffers; ++i) { + const int64_t ioffs = (int64_t) t->data - (int64_t) buf_ctx->buffers[i].data; - GGML_ASSERT(ioffs >= 0 && ioffs + tsize <= (int64_t) t->buffer->size); + //GGML_METAL_LOG_INFO("ioffs = %10ld, tsize = %10ld, sum = %10ld, buf_ctx->buffers[%d].size = %10ld\n", ioffs, tsize, ioffs + tsize, i, buf_ctx->buffers[i].size); + if (ioffs >= 0 && ioffs + tsize <= (int64_t) buf_ctx->buffers[i].size) { + *offs = (size_t) ioffs; - *offs = (size_t) ioffs; + //GGML_METAL_LOG_INFO("%s: tensor '%16s', offs = %8ld\n", __func__, t->name, *offs); - return buf_ctx->metal; + return buf_ctx->buffers[i].metal; + } + } + + GGML_METAL_LOG_ERROR("%s: error: tensor '%s' buffer is nil\n", __func__, t->name); + + return nil; } // find the view that contains the tensor fully @@ -2361,6 +2393,7 @@ void ggml_metal_graph_compute( // backend interface +// default buffer static id g_backend_device = nil; static int g_backend_device_ref_count = 0; @@ -2388,34 +2421,31 @@ static void ggml_backend_metal_free_device(void) { static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; - return ctx->data; + return ctx->all_data; } static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; - [ctx->metal release]; + for (int i = 0; i < ctx->n_buffers; i++) { + [ctx->buffers[i].metal release]; + } ggml_backend_metal_free_device(); - free(ctx->data); - free(ctx); + if (ctx->owned) { + free(ctx->all_data); + } - UNUSED(buffer); + free(ctx); } static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy((char *)tensor->data + offset, data, size); UNUSED(buffer); } static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy(data, (const char *)tensor->data + offset, size); UNUSED(buffer); @@ -2433,7 +2463,13 @@ static void ggml_backend_metal_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer UNUSED(buffer); } -static struct ggml_backend_buffer_i metal_backend_buffer_i = { +static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + memset(ctx->all_data, value, ctx->all_size); +} + +static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, /* .get_base = */ ggml_backend_metal_buffer_get_base, /* .init_tensor = */ NULL, @@ -2441,8 +2477,11 @@ static struct ggml_backend_buffer_i metal_backend_buffer_i = { /* .get_tensor = */ ggml_backend_metal_buffer_get_tensor, /* .cpy_tensor_from = */ ggml_backend_metal_buffer_cpy_tensor_from, /* .cpy_tensor_to = */ ggml_backend_metal_buffer_cpy_tensor_to, + /* .clear = */ ggml_backend_metal_buffer_clear, }; +// default buffer type + static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); @@ -2453,13 +2492,46 @@ static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_ba size_aligned += (size_page - (size_aligned % size_page)); } - ctx->data = ggml_metal_host_malloc(size); - ctx->metal = [ggml_backend_metal_get_device() newBufferWithBytesNoCopy:ctx->data + id device = ggml_backend_metal_get_device(); + + ctx->all_data = ggml_metal_host_malloc(size_aligned); + ctx->all_size = size_aligned; + ctx->owned = true; + ctx->n_buffers = 1; + + ctx->buffers[0].data = ctx->all_data; + ctx->buffers[0].size = size; + ctx->buffers[0].metal = [device newBufferWithBytesNoCopy:ctx->all_data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; - return ggml_backend_buffer_init(buft, metal_backend_buffer_i, ctx, size); + if (ctx->buffers[0].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_aligned / 1024.0 / 1024.0); + free(ctx); + ggml_backend_metal_free_device(); + return NULL; + } + + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); + + +#if TARGET_OS_OSX + GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } else { + GGML_METAL_LOG_INFO("\n"); + } +#else + GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); +#endif + + + return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { @@ -2470,7 +2542,13 @@ static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_t static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_metal(backend) || ggml_backend_is_cpu(backend); - GGML_UNUSED(buft); + UNUSED(buft); +} + +static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return true; + + UNUSED(buft); } ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { @@ -2480,6 +2558,7 @@ ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { /* .get_alignment = */ ggml_backend_metal_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes /* .supports_backend = */ ggml_backend_metal_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_metal_buffer_type_is_host, }, /* .context = */ NULL, }; @@ -2487,6 +2566,87 @@ ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { return &ggml_backend_buffer_type_metal; } +// buffer from ptr + +ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { + struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); + + ctx->all_data = data; + ctx->all_size = size; + ctx->owned = false; + ctx->n_buffers = 0; + + const size_t size_page = sysconf(_SC_PAGESIZE); + size_t size_aligned = size; + if ((size_aligned % size_page) != 0) { + size_aligned += (size_page - (size_aligned % size_page)); + } + + id device = ggml_backend_metal_get_device(); + + // the buffer fits into the max buffer size allowed by the device + if (size_aligned <= device.maxBufferLength) { + ctx->buffers[ctx->n_buffers].data = data; + ctx->buffers[ctx->n_buffers].size = size; + + ctx->buffers[ctx->n_buffers].metal = [device newBufferWithBytesNoCopy:data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; + + if (ctx->buffers[ctx->n_buffers].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_aligned / 1024.0 / 1024.0); + return false; + } + + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); + + ++ctx->n_buffers; + } else { + // this overlap between the views will guarantee that the tensor with the maximum size will fully fit into + // one of the views + const size_t size_ovlp = ((max_size + size_page - 1) / size_page + 1) * size_page; // round-up 2 pages just in case + const size_t size_step = device.maxBufferLength - size_ovlp; + const size_t size_view = device.maxBufferLength; + + for (size_t i = 0; i < size; i += size_step) { + const size_t size_step_aligned = (i + size_view <= size) ? size_view : (size_aligned - i); + + ctx->buffers[ctx->n_buffers].data = (void *) ((uint8_t *) data + i); + ctx->buffers[ctx->n_buffers].size = size_step_aligned; + + ctx->buffers[ctx->n_buffers].metal = [device newBufferWithBytesNoCopy:(void *) ((uint8_t *) data + i) length:size_step_aligned options:MTLResourceStorageModeShared deallocator:nil]; + + if (ctx->buffers[ctx->n_buffers].metal == nil) { + GGML_METAL_LOG_ERROR("%s: error: failed to allocate buffer, size = %8.2f MiB\n", __func__, size_step_aligned / 1024.0 / 1024.0); + return false; + } + + GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB, offs = %12ld", __func__, size_step_aligned / 1024.0 / 1024.0, i); + if (i + size_step < size) { + GGML_METAL_LOG_INFO("\n"); + } + + ++ctx->n_buffers; + } + } + +#if TARGET_OS_OSX + GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } else { + GGML_METAL_LOG_INFO("\n"); + } +#else + GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); +#endif + + return ggml_backend_buffer_init(ggml_backend_metal_buffer_type(), ggml_backend_metal_buffer_i, ctx, size); +} + +// backend + static const char * ggml_backend_metal_name(ggml_backend_t backend) { return "Metal"; @@ -2499,10 +2659,6 @@ static void ggml_backend_metal_free(ggml_backend_t backend) { free(backend); } -static void ggml_backend_metal_synchronize(ggml_backend_t backend) { - UNUSED(backend); -} - static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { return ggml_backend_metal_buffer_type(); @@ -2529,25 +2685,15 @@ static struct ggml_backend_i metal_backend_i = { /* .get_tensor_async = */ NULL, /* .cpy_tensor_from_async = */ NULL, /* .cpy_tensor_to_async = */ NULL, - /* .synchronize = */ ggml_backend_metal_synchronize, - /* .graph_plan_create = */ NULL, // the metal implementation does not require creating graph plans atm + /* .synchronize = */ NULL, + /* .graph_plan_create = */ NULL, /* .graph_plan_free = */ NULL, /* .graph_plan_compute = */ NULL, /* .graph_compute = */ ggml_backend_metal_graph_compute, /* .supports_op = */ ggml_backend_metal_supports_op, }; -// TODO: make a common log callback for all backends in ggml-backend -static void ggml_backend_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { - fprintf(stderr, "%s", msg); - - UNUSED(level); - UNUSED(user_data); -} - ggml_backend_t ggml_backend_metal_init(void) { - ggml_metal_log_set_callback(ggml_backend_log_callback, NULL); - struct ggml_metal_context * ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); if (ctx == NULL) { diff --git a/ggml.c b/ggml.c index 6da65bd92..236148514 100644 --- a/ggml.c +++ b/ggml.c @@ -2383,20 +2383,8 @@ size_t ggml_get_mem_size(const struct ggml_context * ctx) { size_t ggml_get_max_tensor_size(const struct ggml_context * ctx) { size_t max_size = 0; - struct ggml_object * obj = ctx->objects_begin; - - while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { - struct ggml_tensor * tensor = (struct ggml_tensor *) ((char *) ctx->mem_buffer + obj->offs); - - const size_t size = ggml_nbytes(tensor); - - if (max_size < size) { - max_size = size; - } - } - - obj = obj->next; + for (struct ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor != NULL; tensor = ggml_get_next_tensor(ctx, tensor)) { + max_size = MAX(max_size, ggml_nbytes(tensor)); } return max_size; @@ -3093,7 +3081,7 @@ struct ggml_tensor * ggml_view_tensor( return result; } -struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx) { +struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx) { struct ggml_object * obj = ctx->objects_begin; char * const mem_buffer = ctx->mem_buffer; @@ -3109,7 +3097,7 @@ struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx) { return NULL; } -struct ggml_tensor * ggml_get_next_tensor(struct ggml_context * ctx, struct ggml_tensor * tensor) { +struct ggml_tensor * ggml_get_next_tensor(const struct ggml_context * ctx, struct ggml_tensor * tensor) { struct ggml_object * obj = (struct ggml_object *) ((char *)tensor - GGML_OBJECT_SIZE); obj = obj->next; @@ -19213,6 +19201,10 @@ char * gguf_get_tensor_name(const struct gguf_context * ctx, int i) { return ctx->infos[i].name.data; } +enum ggml_type gguf_get_tensor_type(const struct gguf_context * ctx, int i) { + return ctx->infos[i].type; +} + // returns the index static int gguf_get_or_add_key(struct gguf_context * ctx, const char * key) { const int idx = gguf_find_key(ctx, key); diff --git a/ggml.h b/ggml.h index beacdc8be..b17314897 100644 --- a/ggml.h +++ b/ggml.h @@ -735,8 +735,8 @@ extern "C" { GGML_API struct ggml_tensor * ggml_view_tensor(struct ggml_context * ctx, struct ggml_tensor * src); // Context tensor enumeration and lookup - GGML_API struct ggml_tensor * ggml_get_first_tensor(struct ggml_context * ctx); - GGML_API struct ggml_tensor * ggml_get_next_tensor (struct ggml_context * ctx, struct ggml_tensor * tensor); + GGML_API struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx); + GGML_API struct ggml_tensor * ggml_get_next_tensor (const struct ggml_context * ctx, struct ggml_tensor * tensor); GGML_API struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * name); GGML_API struct ggml_tensor * ggml_set_zero(struct ggml_tensor * tensor); @@ -2135,10 +2135,11 @@ extern "C" { GGML_API const void * gguf_get_arr_data(const struct gguf_context * ctx, int key_id); GGML_API const char * gguf_get_arr_str (const struct gguf_context * ctx, int key_id, int i); - GGML_API int gguf_get_n_tensors (const struct gguf_context * ctx); - GGML_API int gguf_find_tensor (const struct gguf_context * ctx, const char * name); - GGML_API size_t gguf_get_tensor_offset(const struct gguf_context * ctx, int i); - GGML_API char * gguf_get_tensor_name (const struct gguf_context * ctx, int i); + GGML_API int gguf_get_n_tensors (const struct gguf_context * ctx); + GGML_API int gguf_find_tensor (const struct gguf_context * ctx, const char * name); + GGML_API size_t gguf_get_tensor_offset(const struct gguf_context * ctx, int i); + GGML_API char * gguf_get_tensor_name (const struct gguf_context * ctx, int i); + GGML_API enum ggml_type gguf_get_tensor_type (const struct gguf_context * ctx, int i); // overrides existing values or adds a new one GGML_API void gguf_set_val_u8 (struct gguf_context * ctx, const char * key, uint8_t val); diff --git a/llama.cpp b/llama.cpp index 63ebe581b..ba970ce8d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1,11 +1,12 @@ #define LLAMA_API_INTERNAL +//#define LLAMA_GGML_BACKEND_CUDA_TEST // for testing only - enables ggml-cuda through ggml-backend, disables partial offloading #include "llama.h" #include "unicode.h" #include "ggml.h" - #include "ggml-alloc.h" +#include "ggml-backend.h" #ifdef GGML_USE_CUBLAS # include "ggml-cuda.h" @@ -32,6 +33,7 @@ #include #if defined(_POSIX_MAPPED_FILES) #include + #include #endif #if defined(_POSIX_MEMLOCK_RANGE) #include @@ -712,38 +714,6 @@ static void ggml_graph_compute_helper(std::vector & buf, ggml_cgraph * // llama helpers // -inline void * llama_host_malloc(size_t n) { -#ifdef GGML_USE_CUBLAS - if (ggml_cublas_loaded()) { - return ggml_cuda_host_malloc(n); - } else { - return malloc(n); - } -#elif GGML_USE_METAL - return ggml_metal_host_malloc(n); -#elif GGML_USE_CPU_HBM - return hbw_malloc(n); -#else - return malloc(n); -#endif -} - -inline void llama_host_free(void * ptr) { -#ifdef GGML_USE_CUBLAS - if (ggml_cublas_loaded()) { - return ggml_cuda_host_free(ptr); - } else { - return free(ptr); - } -#elif GGML_USE_METAL - return ggml_metal_host_free(ptr); -#elif GGML_USE_CPU_HBM - return hbw_free(ptr); -#else - return free(ptr); -#endif -} - #if defined(_WIN32) static std::string llama_format_win_err(DWORD err) { LPSTR buf; @@ -758,40 +728,10 @@ static std::string llama_format_win_err(DWORD err) { } #endif -struct llama_buffer { - void * data = NULL; - size_t size = 0; - - // fallback to malloc / free - // useful in cases where CUDA can try to allocate PINNED memory - bool fallback = false; - - void resize(size_t n) { - llama_host_free(data); - - data = llama_host_malloc(n); - if (!data) { - fallback = true; - data = malloc(n); - } else { - fallback = false; - } - - GGML_ASSERT(data); - size = n; - } - - ~llama_buffer() { - if (data) { - if (fallback) { // NOLINT - free(data); - } else { - llama_host_free(data); - } - } - - data = NULL; - } +template +struct no_init { + T value; + no_init() { /* do nothing */ } }; struct llama_file { @@ -879,6 +819,9 @@ struct llama_mmap { #ifdef _POSIX_MAPPED_FILES static constexpr bool SUPPORTED = true; + // list of mapped fragments (first_offset, last_offset) + std::vector> mapped_fragments; + llama_mmap(struct llama_file * file, size_t prefetch = (size_t) -1 /* -1 = max value */, bool numa = false) { size = file->size; int fd = fileno(file->fp); @@ -886,17 +829,22 @@ struct llama_mmap { // prefetch/readahead impairs performance on NUMA systems if (numa) { prefetch = 0; } #ifdef __linux__ + // advise the kernel to read the file sequentially (increases readahead) + if (posix_fadvise(fd, 0, 0, POSIX_FADV_SEQUENTIAL)) { + LLAMA_LOG_WARN("warning: posix_fadvise(.., POSIX_FADV_SEQUENTIAL) failed: %s\n", + strerror(errno)); + } if (prefetch) { flags |= MAP_POPULATE; } #endif addr = mmap(NULL, file->size, PROT_READ, flags, fd, 0); - if (addr == MAP_FAILED) { + if (addr == MAP_FAILED) { // NOLINT throw std::runtime_error(format("mmap failed: %s", strerror(errno))); } if (prefetch > 0) { - // Advise the kernel to preload the mapped memory + // advise the kernel to preload the mapped memory if (posix_madvise(addr, std::min(file->size, prefetch), POSIX_MADV_WILLNEED)) { - fprintf(stderr, "warning: posix_madvise(.., POSIX_MADV_WILLNEED) failed: %s\n", + LLAMA_LOG_WARN("warning: posix_madvise(.., POSIX_MADV_WILLNEED) failed: %s\n", strerror(errno)); } } @@ -904,14 +852,81 @@ struct llama_mmap { // advise the kernel not to use readahead // (because the next page might not belong on the same node) if (posix_madvise(addr, file->size, POSIX_MADV_RANDOM)) { - fprintf(stderr, "warning: posix_madvise(.., POSIX_MADV_RANDOM) failed: %s\n", + LLAMA_LOG_WARN("warning: posix_madvise(.., POSIX_MADV_RANDOM) failed: %s\n", strerror(errno)); } } + + // initialize list of mapped_fragments + mapped_fragments.emplace_back(0, file->size); + } + + static void align_range(size_t * first, size_t * last, size_t page_size) { + // align first to the next page + size_t offset_in_page = *first & (page_size - 1); + size_t offset_to_page = offset_in_page == 0 ? 0 : page_size - offset_in_page; + *first += offset_to_page; + + // align last to the previous page + *last = *last & ~(page_size - 1); + + if (*last <= *first) { + *last = *first; + } + } + + // partially unmap the file in the range [first, last) + void unmap_fragment(size_t first, size_t last) { + // note: this function must not be called multiple times with overlapping ranges + // otherwise, there is a risk of invalidating addresses that have been repurposed for other mappings + int page_size = sysconf(_SC_PAGESIZE); + align_range(&first, &last, page_size); + size_t len = last - first; + + if (len == 0) { + return; + } + + GGML_ASSERT(first % page_size == 0); + GGML_ASSERT(last % page_size == 0); + GGML_ASSERT(last > first); + + void * next_page_start = (uint8_t *) addr + first; + + // unmap the range + if (munmap(next_page_start, len)) { + LLAMA_LOG_WARN("warning: munmap failed: %s\n", strerror(errno)); + } + + // update the list of mapped fragments to avoid unmapping the same range again in the destructor + std::vector> new_mapped_fragments; + for (const auto & frag : mapped_fragments) { + if (frag.first < first && frag.second > last) { + // the range is in the middle of the fragment, split it + new_mapped_fragments.emplace_back(frag.first, first); + new_mapped_fragments.emplace_back(last, frag.second); + } else if (frag.first < first && frag.second > first) { + // the range starts in the middle of the fragment + new_mapped_fragments.emplace_back(frag.first, first); + } else if (frag.first < last && frag.second > last) { + // the range ends in the middle of the fragment + new_mapped_fragments.emplace_back(last, frag.second); + } else if (frag.first >= first && frag.second <= last) { + // the range covers the entire fragment + } else { + // the range is outside the fragment + new_mapped_fragments.push_back(frag); + } + } + mapped_fragments = std::move(new_mapped_fragments); } ~llama_mmap() { - munmap(addr, size); + for (const auto & frag : mapped_fragments) { + if (munmap((char *) addr + frag.first, frag.second - frag.first)) { + LLAMA_LOG_WARN("warning: munmap failed: %s\n", strerror(errno)); + } + } } #elif defined(_WIN32) static constexpr bool SUPPORTED = true; @@ -959,6 +974,12 @@ struct llama_mmap { } } + void unmap_fragment(size_t first, size_t last) { + // not supported + GGML_UNUSED(first); + GGML_UNUSED(last); + } + ~llama_mmap() { if (!UnmapViewOfFile(addr)) { fprintf(stderr, "warning: UnmapViewOfFile failed: %s\n", @@ -975,6 +996,13 @@ struct llama_mmap { throw std::runtime_error(std::string("mmap not supported")); } + + void unmap(size_t offset, size_t len) { + (void) offset; + (void) len; + + throw std::runtime_error(std::string("mmap not supported")); + } #endif }; @@ -1148,6 +1176,26 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ return std::string(result.data(), result.size()); } +static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { +#ifdef GGML_USE_METAL + if (n_gpu_layers > 0) { + return ggml_backend_metal_buffer_type(); + } +#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) + if (n_gpu_layers > 0) { + return ggml_backend_cuda_buffer_type(0); + } +#elif defined(GGML_USE_CUBLAS) + return ggml_backend_cuda_host_buffer_type(); +#elif defined(GGML_USE_CPU_HBM) + return ggml_backend_cpu_hbm_buffer_type(); +#endif + + return ggml_backend_cpu_buffer_type(); + + GGML_UNUSED(n_gpu_layers); +} + // // globals // @@ -1348,14 +1396,10 @@ struct llama_kv_cache { struct ggml_context * ctx = NULL; - llama_buffer buf; + ggml_backend_buffer_t buf = NULL; ~llama_kv_cache() { - if (ctx) { - ggml_free(ctx); - } - -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (ggml_cublas_loaded()) { for (size_t i = 0; i < k_l.size(); ++i) { ggml_cuda_free_data(k_l[i]); @@ -1363,6 +1407,11 @@ struct llama_kv_cache { } } #endif + if (ctx) { + ggml_free(ctx); + } + + ggml_backend_buffer_free(buf); } }; @@ -1402,11 +1451,11 @@ struct llama_vocab { id special_suffix_id = 32008; id special_eot_id = 32010; - int find_bpe_rank(std::string token_left, std::string token_right) const { - GGML_ASSERT(token_left.find(" ") == std::string::npos); - GGML_ASSERT(token_left.find("\n") == std::string::npos); - GGML_ASSERT(token_right.find(" ") == std::string::npos); - GGML_ASSERT(token_right.find("\n") == std::string::npos); + int find_bpe_rank(const std::string & token_left, const std::string & token_right) const { + GGML_ASSERT(token_left.find(' ') == std::string::npos); + GGML_ASSERT(token_left.find('\n') == std::string::npos); + GGML_ASSERT(token_right.find(' ') == std::string::npos); + GGML_ASSERT(token_right.find('\n') == std::string::npos); auto it = bpe_ranks.find(std::make_pair(token_left, token_right)); if (it == bpe_ranks.end()) { @@ -1448,7 +1497,7 @@ struct llama_model { struct ggml_context * ctx = NULL; // the model memory buffer - llama_buffer buf; + ggml_backend_buffer_t buf = NULL; // model memory mapped file std::unique_ptr mapping; @@ -1464,11 +1513,7 @@ struct llama_model { int64_t t_start_us = 0; ~llama_model() { - if (ctx) { - ggml_free(ctx); - } - -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (ggml_cublas_loaded()) { for (size_t i = 0; i < tensors_by_name.size(); ++i) { ggml_cuda_free_data(tensors_by_name[i].second); @@ -1482,24 +1527,26 @@ struct llama_model { ggml_cl_free_data(tensors_by_name[i].second); } #endif + if (ctx) { + ggml_free(ctx); + } + + ggml_backend_buffer_free(buf); } }; struct llama_context { llama_context(const llama_model & model) : model(model), t_start_us(model.t_start_us), t_load_us(model.t_load_us) {} ~llama_context() { -#ifdef GGML_USE_METAL - if (ctx_metal) { - ggml_metal_free(ctx_metal); - } -#endif - if (alloc) { - ggml_allocr_free(alloc); - } + ggml_allocr_free(alloc); + ggml_backend_buffer_free(buf_alloc); + ggml_backend_free(backend); } llama_cparams cparams; + ggml_backend_t backend = nullptr; + const llama_model & model; // key + value cache for the self attention @@ -1530,18 +1577,13 @@ struct llama_context { // input embedding (1-dimensional array: [n_embd]) std::vector embedding; - // reusable buffer for `struct ggml_graph_plan.work_data` - std::vector work_buffer; - // memory buffers used to evaluate the model - llama_buffer buf_compute; - - llama_buffer buf_alloc; + std::vector buf_compute_meta; + ggml_backend_buffer_t buf_alloc = NULL; ggml_allocr * alloc = NULL; -#ifdef GGML_USE_METAL - ggml_metal_context * ctx_metal = NULL; -#endif + // temporary buffer for copying data to/from the backend + std::vector> buf_copy; #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -1563,9 +1605,6 @@ static bool llama_kv_cache_init( const uint32_t n_embd = hparams.n_embd_gqa(); const uint32_t n_layer = hparams.n_layer; - const int64_t n_mem = n_layer*n_ctx; - const int64_t n_elements = n_embd*n_mem; - cache.has_shift = false; cache.head = 0; @@ -1575,13 +1614,10 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - cache.buf.resize(ggml_row_size(ktype, n_elements) + ggml_row_size(vtype, n_elements) + 2u*n_layer*ggml_tensor_overhead()); - memset(cache.buf.data, 0, cache.buf.size); - struct ggml_init_params params; - params.mem_size = cache.buf.size; - params.mem_buffer = cache.buf.data; - params.no_alloc = false; + params.mem_size = 2u*n_layer*ggml_tensor_overhead(); + params.mem_buffer = NULL; + params.no_alloc = true; cache.ctx = ggml_init(params); @@ -1595,9 +1631,7 @@ static bool llama_kv_cache_init( cache.k_l.reserve(n_layer); cache.v_l.reserve(n_layer); - const int i_gpu_start = (int) n_layer - n_gpu_layers; GGML_UNUSED(i_gpu_start); - - GGML_UNUSED(offload); + const int i_gpu_start = (int) n_layer - n_gpu_layers; for (int i = 0; i < (int) n_layer; i++) { ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd*n_ctx); @@ -1606,23 +1640,35 @@ static bool llama_kv_cache_init( ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); cache.v_l.push_back(v); -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (i >= i_gpu_start) { if (offload) { ggml_cuda_assign_buffers_no_scratch(k); - vram_kv_cache += ggml_nbytes(k); ggml_cuda_assign_buffers_no_scratch(v); + vram_kv_cache += ggml_nbytes(k); vram_kv_cache += ggml_nbytes(v); + // HACK: mark tensor as allocated + k->data = v->data = (void *)(uintptr_t)1; } } #endif // GGML_USE_CUBLAS } + // allocate tensors + cache.buf = ggml_backend_alloc_ctx_tensors_from_buft(cache.ctx, llama_default_buffer_type(n_gpu_layers)); + + // buf may be NULL with full offload + if (cache.buf) { + // initialize the buffer to avoid NaNs in the padding + ggml_backend_buffer_clear(cache.buf, 0); + } + if (vram_kv_cache > 0) { LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); } - GGML_UNUSED(n_gpu_layers); + GGML_UNUSED(i_gpu_start); + GGML_UNUSED(offload); return true; } @@ -2073,14 +2119,13 @@ struct llama_model_loader { enum ggml_type type_max = GGML_TYPE_F32; for (int i = 0; i < n_tensors; i++) { - const char * name = gguf_get_tensor_name(ctx_gguf, i); - struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, name); + enum ggml_type type = gguf_get_tensor_type(ctx_gguf, i); - n_type[meta->type]++; + n_type[type]++; - if (n_type_max < n_type[meta->type]) { - n_type_max = n_type[meta->type]; - type_max = meta->type; + if (n_type_max < n_type[type]) { + n_type_max = n_type[type]; + type_max = type; } // LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); @@ -2221,34 +2266,19 @@ struct llama_model_loader { return gguf_get_tensor_name(ctx_gguf, i); } - struct ggml_tensor * get_tensor_meta(int i) const { - return ggml_get_tensor(ctx_meta, get_tensor_name(i)); + struct ggml_tensor * get_tensor_meta(const char * name) const { + return ggml_get_tensor(ctx_meta, name); } - void calc_sizes(size_t & ctx_size_p, size_t & mmapped_size_p) const { - ctx_size_p = 0; - mmapped_size_p = 0; - - for (int i = 0; i < n_tensors; i++) { - struct ggml_tensor * meta = get_tensor_meta(i); - ctx_size_p += sizeof(struct ggml_tensor) + GGML_OBJECT_SIZE; - (use_mmap ? mmapped_size_p : ctx_size_p) += ggml_nbytes_pad(meta); - } + struct ggml_tensor * get_tensor_meta(int i) const { + return get_tensor_meta(get_tensor_name(i)); } struct ggml_tensor * create_tensor_for(struct ggml_context * ctx, struct ggml_tensor * meta, ggml_backend_type backend) { - if (backend != GGML_BACKEND_CPU) { - ggml_set_no_alloc(ctx, true); - } - struct ggml_tensor * tensor = ggml_dup_tensor(ctx, meta); tensor->backend = backend; // TODO: ggml_set_backend ggml_set_name(tensor, ggml_get_name(meta)); - if (backend != GGML_BACKEND_CPU) { - ggml_set_no_alloc(ctx, use_mmap); - } - n_created++; return tensor; @@ -2306,90 +2336,137 @@ struct llama_model_loader { return gguf_get_data_offset(ctx_gguf) + gguf_get_tensor_offset(ctx_gguf, idx); } + void init_mapping(bool prefetch = true) { + /* + // prefetch only CPU tensors + if (use_mmap) { + size_t size_pref = 0; // prefetch + + for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { + struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); + if (cur->backend == GGML_BACKEND_CPU) { + size_t tensor_end = gguf_get_tensor_offset(ctx_gguf, i) + ggml_nbytes(cur); + size_pref = std::max(size_pref, tensor_end); + } + } + mapping.reset(new llama_mmap(&file, gguf_get_data_offset(ctx_gguf) + size_pref, ggml_is_numa())); + } + */ + // prefetch the whole file - all the data is needed anyway + if (use_mmap) { + mapping.reset(new llama_mmap(&file, prefetch ? -1 : 0, ggml_is_numa())); + } + } + + // for backwards compatibility, does not support ggml-backend void load_data_for(struct ggml_tensor * cur) const { const size_t offs = file_offset(ggml_get_name(cur)); - if (use_mmap) { - cur->data = (uint8_t *) mapping->addr + offs; + if (use_mmap && mapping) { + GGML_ASSERT(cur->data == nullptr); + cur->data = (uint8_t *)mapping->addr + offs; } else { + GGML_ASSERT(cur->data != nullptr); file.seek(offs, SEEK_SET); file.read_raw(cur->data, ggml_nbytes(cur)); } } - void load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, llama_mlock * lmlock) { + void load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { size_t size_data = 0; - size_t size_lock = 0; - size_t size_pref = 0; // prefetch for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); size_data += ggml_nbytes(cur); - if (cur->backend == GGML_BACKEND_CPU) { - size_pref += ggml_nbytes(cur); - } } - if (use_mmap) { - mapping.reset(new llama_mmap(&file, size_pref, ggml_is_numa())); + if (use_mmap && buf_mmap) { if (lmlock) { lmlock->init(mapping->addr); } } - size_t done_size = 0; +#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) + const bool legacy_offload = true; +#else + const bool legacy_offload = false; +#endif + + std::vector> read_buf; + + size_t size_done = 0; + + size_t mmap_first = -1; + size_t mmap_last = 0; + for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); GGML_ASSERT(cur); // unused tensors should have been caught by load_data already if (progress_callback) { - progress_callback((float) done_size / size_data, progress_callback_user_data); + progress_callback((float) size_done / size_data, progress_callback_user_data); } - // allocate temp buffer if not using mmap - if (!use_mmap && cur->data == NULL) { - GGML_ASSERT(cur->backend != GGML_BACKEND_CPU); - #ifdef GGML_USE_CPU_HBM - cur->data = (uint8_t*)hbw_malloc(ggml_nbytes(cur)); - #else - cur->data = (uint8_t*)malloc(ggml_nbytes(cur)); - #endif - } + const size_t offs = file_offset(ggml_get_name(cur)); - load_data_for(cur); - - switch (cur->backend) { - case GGML_BACKEND_CPU: - if (use_mmap && lmlock) { - size_lock += ggml_nbytes(cur); - lmlock->grow_to(size_lock); + if (!legacy_offload || cur->backend == GGML_BACKEND_CPU) { + if (use_mmap && mapping) { + if (buf_mmap) { + ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + offs); + if (lmlock) { + lmlock->grow_to(offs + ggml_nbytes(cur)); + } + mmap_first = std::min(mmap_first, offs); + mmap_last = std::max(mmap_last, offs + ggml_nbytes(cur)); + } else { + ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + offs, 0, ggml_nbytes(cur)); } - break; -#ifdef GGML_USE_CUBLAS - case GGML_BACKEND_GPU: - case GGML_BACKEND_GPU_SPLIT: - // old code: - //ggml_cuda_transform_tensor(lt.data, lt.ggml_tensor); - - // TODO: test if this works !! - ggml_cuda_transform_tensor(cur->data, cur); - if (!use_mmap) { - free(cur->data); + } else { + if (ggml_backend_buffer_is_host(cur->buffer)) { + file.seek(offs, SEEK_SET); + file.read_raw(cur->data, ggml_nbytes(cur)); + } else { + read_buf.resize(ggml_nbytes(cur)); + file.seek(offs, SEEK_SET); + file.read_raw(read_buf.data(), ggml_nbytes(cur)); + ggml_backend_tensor_set(cur, read_buf.data(), 0, ggml_nbytes(cur)); } - break; + } + } else { + // HACK: mark tensor as allocated + cur->data = (void *)(uintptr_t)1; + void * data; + if (use_mmap && mapping) { + data = (uint8_t *) mapping->addr + offs; + } else { + read_buf.resize(ggml_nbytes(cur)); + file.seek(offs, SEEK_SET); + file.read_raw(read_buf.data(), ggml_nbytes(cur)); + data = read_buf.data(); + } + +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + ggml_cuda_transform_tensor(data, cur); #elif defined(GGML_USE_CLBLAST) - case GGML_BACKEND_GPU: - ggml_cl_transform_tensor(cur->data, cur); - if (!use_mmap) { - free(cur->data); - } - break; + GGML_ASSERT(cur->backend == GGML_BACKEND_GPU); + ggml_cl_transform_tensor(data, cur); +#else + GGML_ASSERT(!"GPU tensor without a GPU backend"); + GGML_UNUSED(data); #endif - default: - continue; } - done_size += ggml_nbytes(cur); + size_done += ggml_nbytes(cur); + } + + // unmap offloaded tensors and metadata + if (use_mmap && mapping) { + mapping->unmap_fragment(0, mmap_first); + mapping->unmap_fragment(mmap_last, mapping->size); + } + + if (progress_callback) { + progress_callback(1.0f, progress_callback_user_data); } } }; @@ -2983,25 +3060,16 @@ static void llm_load_tensors( model.n_gpu_layers = n_gpu_layers; - size_t ctx_size; - size_t mmapped_size; + size_t ctx_size = ggml_tensor_overhead() * ml.n_tensors; - ml.calc_sizes(ctx_size, mmapped_size); - - LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, ctx_size/1024.0/1024.0); + LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, ctx_size/1024.0/1024.0); // create the ggml context { - model.buf.resize(ctx_size); - if (use_mlock) { - model.mlock_buf.init (model.buf.data); - model.mlock_buf.grow_to(model.buf.size); - } - struct ggml_init_params params = { - /*.mem_size =*/ model.buf.size, - /*.mem_buffer =*/ model.buf.data, - /*.no_alloc =*/ ml.use_mmap, + /*.mem_size =*/ ctx_size, + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, }; model.ctx = ggml_init(params); @@ -3015,22 +3083,21 @@ static void llm_load_tensors( enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (ggml_cublas_loaded()) { LLAMA_LOG_INFO("%s: using " GGML_CUDA_NAME " for GPU acceleration\n", __func__); ggml_cuda_set_main_device(main_gpu); - llama_backend_offload = GGML_BACKEND_GPU; + llama_backend_offload = GGML_BACKEND_GPU; llama_backend_offload_split = GGML_BACKEND_GPU_SPLIT; } #elif defined(GGML_USE_CLBLAST) LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); - llama_backend_offload = GGML_BACKEND_GPU; + llama_backend_offload = GGML_BACKEND_GPU; llama_backend_offload_split = GGML_BACKEND_GPU; #endif - // prepare memory for the weights - size_t vram_weights = 0; + // create tensors for the weights { const int64_t n_embd = hparams.n_embd; const int64_t n_embd_gqa = hparams.n_embd_gqa(); @@ -3059,13 +3126,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3115,28 +3175,6 @@ static void llm_load_tensors( layer.ffn_up_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); } } - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + - (layer.bq ? ggml_nbytes(layer.bq) : 0) + - (layer.bk ? ggml_nbytes(layer.bk) : 0) + - (layer.bv ? ggml_nbytes(layer.bv) : 0) + - (layer.bo ? ggml_nbytes(layer.bo) : 0) + - ggml_nbytes(layer.ffn_norm); - - if (layer.ffn_gate_inp == nullptr) { - vram_weights += - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } else { - vram_weights += ggml_nbytes(layer.ffn_gate_inp); - for (uint32_t x = 0; x < hparams.n_expert; ++x) { - vram_weights += - ggml_nbytes(layer.ffn_gate_exp[x]) + ggml_nbytes(layer.ffn_down_exp[x]) + ggml_nbytes(layer.ffn_up_exp[x]); - } - } - } } } break; case LLM_ARCH_BAICHUAN: @@ -3156,13 +3194,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3189,19 +3220,10 @@ static void llm_load_tensors( layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_FALCON: { - // TODO: CPU-only for now - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); // output @@ -3220,14 +3242,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3248,11 +3262,6 @@ static void llm_load_tensors( if (gguf_find_tensor(ml.ctx_gguf, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i).c_str()) >= 0) { layer.attn_norm_2 = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i), {n_embd}, backend); layer.attn_norm_2_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "bias", i), {n_embd}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(layer.attn_norm_2); - vram_weights += ggml_nbytes(layer.attn_norm_2_b); - } } layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); @@ -3260,13 +3269,6 @@ static void llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.wo) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_STARCODER: @@ -3290,14 +3292,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3329,16 +3323,6 @@ static void llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + - ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_norm_b) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b) + - ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b); - } } } break; case LLM_ARCH_PERSIMMON: @@ -3360,14 +3344,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3397,8 +3373,6 @@ static void llm_load_tensors( } break; case LLM_ARCH_BLOOM: { - // TODO: CPU-only for now - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); model.tok_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}, GGML_BACKEND_CPU); model.tok_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}, GGML_BACKEND_CPU); @@ -3419,14 +3393,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3458,16 +3424,6 @@ static void llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + - ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_norm_b) + - ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); - } } } break; case LLM_ARCH_MPT: @@ -3489,13 +3445,6 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3518,16 +3467,6 @@ static void llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + - ggml_nbytes(layer.wqkv) + - ggml_nbytes(layer.wo) + - ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_down) + - ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_STABLELM: @@ -3550,13 +3489,6 @@ static void llm_load_tensors( model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } } const uint32_t n_ff = hparams.n_ff; @@ -3588,13 +3520,6 @@ static void llm_load_tensors( layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wq) + ggml_nbytes(layer.wk) + - ggml_nbytes(layer.wv) + ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + - ggml_nbytes(layer.ffn_gate) + ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_QWEN: @@ -3614,14 +3539,7 @@ static void llm_load_tensors( model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - } - if (backend_output == GGML_BACKEND_GPU_SPLIT) { - vram_weights += ggml_nbytes(model.output); - } - } + } const uint32_t n_ff = hparams.n_ff / 2; @@ -3646,13 +3564,6 @@ static void llm_load_tensors( layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.ffn_norm) + ggml_nbytes(layer.ffn_gate) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_up); - } } } break; case LLM_ARCH_PHI2: @@ -3676,13 +3587,6 @@ static void llm_load_tensors( model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); - - if (backend_norm == GGML_BACKEND_GPU) { - vram_weights += ggml_nbytes(model.output_norm); - vram_weights += ggml_nbytes(model.output_norm_b); - vram_weights += ggml_nbytes(model.output); - vram_weights += ggml_nbytes(model.output_b); - } } const uint32_t n_ff = hparams.n_ff; @@ -3711,15 +3615,6 @@ static void llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - - if (backend == GGML_BACKEND_GPU) { - vram_weights += - ggml_nbytes(layer.attn_norm) + ggml_nbytes(layer.attn_norm_b) + - ggml_nbytes(layer.wqkv) + ggml_nbytes(layer.bqkv) + - ggml_nbytes(layer.wo) + ggml_nbytes(layer.bo) + - ggml_nbytes(layer.ffn_up) + ggml_nbytes(layer.ffn_up_b) + - ggml_nbytes(layer.ffn_down) + ggml_nbytes(layer.ffn_down_b); - } } } break; default: @@ -3729,16 +3624,78 @@ static void llm_load_tensors( ml.done_getting_tensors(); + ml.init_mapping(); + + // allocate tensors + size_t vram_weights = 0; + size_t buf_size = 0; + + ggml_backend_buffer_type_t buft = llama_default_buffer_type(n_gpu_layers); + + for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { + // GGML_BACKEND_GPU tensors are for CUDA and OpenCL only, which are handled separately without ggml-backend + if (t->backend == GGML_BACKEND_CPU) { + buf_size += GGML_PAD(ggml_backend_buft_get_alloc_size(buft, t), ggml_backend_buft_get_alignment(buft)); + } else { + vram_weights += ggml_nbytes(t); + } + } + + // create backend buffer + ggml_backend_buffer_t buf_mmap = nullptr; + +#ifdef GGML_USE_METAL + if (n_gpu_layers > 0) { + if (ml.use_mmap) { + const size_t max_size = ggml_get_max_tensor_size(ctx); + model.buf = ggml_backend_metal_buffer_from_ptr(ml.mapping->addr, ml.mapping->size, max_size); + buf_mmap = model.buf; + } else { + model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_metal_buffer_type()); + } + } +#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) + // for testing only + if (n_gpu_layers > 0) { + model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cuda_buffer_type(0)); + } +#endif + + if (model.buf == nullptr) { + // CPU backend, and indirectly CUDA and OpenCL + if (ml.use_mmap) { + model.buf = ggml_backend_cpu_buffer_from_ptr(ml.mapping->addr, ml.mapping->size); + buf_mmap = model.buf; + } else { + // allocate only CPU tensors + model.buf = ggml_backend_buft_alloc_buffer(buft, buf_size); + ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(model.buf); + for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { + if (t->backend == GGML_BACKEND_CPU) { + ggml_tallocr_alloc(alloc, t); + } + } + ggml_tallocr_free(alloc); + } + } + + if (use_mlock && ggml_backend_buffer_is_host(model.buf)) { + model.mlock_buf.init (ggml_backend_buffer_get_base(model.buf)); + model.mlock_buf.grow_to(ggml_backend_buffer_get_size(model.buf)); + } + // print memory requirements { - // this is the total memory required to run the inference - size_t mem_required = - ctx_size + - mmapped_size - vram_weights; // weights in VRAM not in memory + size_t sys_mem_required = ctx_size + buf_size; - LLAMA_LOG_INFO("%s: mem required = %7.2f MiB\n", __func__, mem_required / 1024.0 / 1024.0); + if (sys_mem_required > 0) { + LLAMA_LOG_INFO("%s: system memory used = %7.2f MiB\n", __func__, sys_mem_required / 1024.0 / 1024.0); + } + if (vram_weights > 0) { + LLAMA_LOG_INFO("%s: VRAM used = %7.2f MiB\n", __func__, vram_weights / 1024.0 / 1024.0); + } -#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) +#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); @@ -3746,39 +3703,26 @@ static void llm_load_tensors( LLAMA_LOG_INFO("%s: offloading non-repeating layers to GPU\n", __func__); } -#ifdef GGML_USE_CUBLAS const int max_backend_supported_layers = hparams.n_layer + 1; const int max_offloadable_layers = hparams.n_layer + 1; -#elif GGML_USE_CLBLAST - const int max_backend_supported_layers = hparams.n_layer + 1; - const int max_offloadable_layers = hparams.n_layer + 1; -#endif // GGML_USE_CUBLAS LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); - LLAMA_LOG_INFO("%s: VRAM used: %.2f MiB\n", __func__, vram_weights / 1024.0 / 1024.0); -#else - (void) n_gpu_layers; #endif // defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) } - // populate `tensors_by_name` +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + ggml_cuda_set_tensor_split(tensor_split); +#else + GGML_UNUSED(tensor_split); +#endif // GGML_USE_CUBLAS + + // populate tensors_by_name for (int i = 0; i < ml.n_tensors; ++i) { struct ggml_tensor * cur = ggml_get_tensor(ctx, ml.get_tensor_name(i)); model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); } - (void) tensor_split; -#ifdef GGML_USE_CUBLAS - { - ggml_cuda_set_tensor_split(tensor_split); - } -#endif - - ml.load_all_data(ctx, progress_callback, progress_callback_user_data, use_mlock ? &model.mlock_mmap : NULL); - - if (progress_callback) { - progress_callback(1.0f, progress_callback_user_data); - } + ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL); model.mapping = std::move(ml.mapping); @@ -4211,7 +4155,7 @@ struct llm_build_context { const llm_build_cb & cb; - llama_buffer & buf_compute; + std::vector & buf_compute_meta; struct ggml_context * ctx0 = nullptr; @@ -4221,35 +4165,35 @@ struct llm_build_context { const llama_batch & batch, const llm_build_cb & cb, bool worst_case) : - model (lctx.model), - hparams (model.hparams), - cparams (lctx.cparams), - batch (batch), - kv_self (lctx.kv_self), - n_embd (hparams.n_embd), - n_layer (hparams.n_layer), - n_ctx (cparams.n_ctx), - n_head (hparams.n_head), - n_head_kv (hparams.n_head_kv), - n_embd_head (hparams.n_embd_head()), - n_embd_gqa (hparams.n_embd_gqa()), - n_expert (hparams.n_expert), - n_expert_used (hparams.n_expert_used), - freq_base (cparams.rope_freq_base), - freq_scale (cparams.rope_freq_scale), - ext_factor (cparams.yarn_ext_factor), - attn_factor (cparams.yarn_attn_factor), - beta_fast (cparams.yarn_beta_fast), - beta_slow (cparams.yarn_beta_slow), - norm_eps (hparams.f_norm_eps), - norm_rms_eps (hparams.f_norm_rms_eps), - n_tokens (batch.n_tokens), - n_kv (worst_case ? n_ctx : kv_self.n), - kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), - n_orig_ctx (cparams.n_yarn_orig_ctx), - do_rope_shift (worst_case || kv_self.has_shift), - cb (cb), - buf_compute (lctx.buf_compute) { + model (lctx.model), + hparams (model.hparams), + cparams (lctx.cparams), + batch (batch), + kv_self (lctx.kv_self), + n_embd (hparams.n_embd), + n_layer (hparams.n_layer), + n_ctx (cparams.n_ctx), + n_head (hparams.n_head), + n_head_kv (hparams.n_head_kv), + n_embd_head (hparams.n_embd_head()), + n_embd_gqa (hparams.n_embd_gqa()), + n_expert (hparams.n_expert), + n_expert_used (hparams.n_expert_used), + freq_base (cparams.rope_freq_base), + freq_scale (cparams.rope_freq_scale), + ext_factor (cparams.yarn_ext_factor), + attn_factor (cparams.yarn_attn_factor), + beta_fast (cparams.yarn_beta_fast), + beta_slow (cparams.yarn_beta_slow), + norm_eps (hparams.f_norm_eps), + norm_rms_eps (hparams.f_norm_rms_eps), + n_tokens (batch.n_tokens), + n_kv (worst_case ? n_ctx : kv_self.n), + kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), + n_orig_ctx (cparams.n_yarn_orig_ctx), + do_rope_shift (worst_case || kv_self.has_shift), + cb (cb), + buf_compute_meta (lctx.buf_compute_meta) { GGML_ASSERT(!!kv_self.ctx); // all initializations should be done in init() @@ -4257,8 +4201,8 @@ struct llm_build_context { void init() { struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, + /*.mem_size =*/ buf_compute_meta.size(), + /*.mem_buffer =*/ buf_compute_meta.data(), /*.no_alloc =*/ true, }; @@ -5737,8 +5681,8 @@ static const std::unordered_map k_offload_map { "pos_embd", OFFLOAD_FUNC_NR }, { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) - { "Q_scale", OFFLOAD_FUNC_FRC }, - { "KQ_scale", OFFLOAD_FUNC_FRC }, + { "Q_scale", OFFLOAD_FUNC_NOP }, + { "KQ_scale", OFFLOAD_FUNC_NOP }, { "KQ_mask", OFFLOAD_FUNC_FRC }, { "K_shift", OFFLOAD_FUNC_FRC }, @@ -5845,7 +5789,7 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) const bool do_offload = true; #else const bool do_offload = true; // TODO: set to false after finishing refactoring @@ -5873,7 +5817,7 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc) && batch.token) { const int64_t n_tokens = cur->ne[0]; - memcpy(cur->data, batch.token, n_tokens*ggml_element_size(cur)); + ggml_backend_tensor_set(cur, batch.token, 0, n_tokens*ggml_element_size(cur)); } alloc_inp_tokens = true; @@ -5886,7 +5830,7 @@ static struct ggml_cgraph * llama_build_graph( const int64_t n_embd = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; - memcpy(cur->data, batch.embd, n_tokens*n_embd*ggml_element_size(cur)); + ggml_backend_tensor_set(cur, batch.embd, 0, n_tokens*n_embd*ggml_element_size(cur)); } alloc_inp_embd = true; @@ -5898,11 +5842,8 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc) && batch.pos) { const int64_t n_tokens = cur->ne[0]; - int32_t * data = (int32_t *) cur->data; - - for (int i = 0; i < n_tokens; ++i) { - data[i] = batch.pos[i]; - } + static_assert(std::is_same::value, "llama_pos must be int32_t"); + ggml_backend_tensor_set(cur, batch.pos, 0, n_tokens*ggml_element_size(cur)); } alloc_inp_pos = true; @@ -5913,7 +5854,8 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc)) { const int64_t n_embd_head = model.hparams.n_embd_head(); - ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + float f = 1.0f/sqrtf(float(n_embd_head)); + ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); } alloc_inp_Q_scale = true; @@ -5924,13 +5866,15 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc)) { const int64_t n_embd_head = model.hparams.n_embd_head(); + float f; if (model.arch == LLM_ARCH_PHI2) { // with phi2, we scale the Q to avoid precision issues // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 - ggml_set_f32(cur, 1.0f); + f = 1.0f; } else { - ggml_set_f32(cur, 1.0f/sqrtf(float(n_embd_head))); + f = 1.0f/sqrtf(float(n_embd_head)); } + ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); } alloc_inp_KQ_scale = true; @@ -5943,8 +5887,13 @@ static struct ggml_cgraph * llama_build_graph( const int64_t n_kv = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; - float * data = (float *) cur->data; - memset(data, 0, ggml_nbytes(cur)); + float * data; + if (ggml_backend_buffer_is_host(cur->buffer)) { + data = (float *) cur->data; + } else { + lctx.buf_copy.resize(ggml_nbytes(cur)); + data = (float *) lctx.buf_copy.data(); + } for (int h = 0; h < 1; ++h) { for (int j = 0; j < n_tokens; ++j) { @@ -5952,12 +5901,20 @@ static struct ggml_cgraph * llama_build_graph( const llama_seq_id seq_id = batch.seq_id[j][0]; for (int i = 0; i < n_kv; ++i) { + float f; if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { - data[h*(n_kv*n_tokens) + j*n_kv + i] = -INFINITY; + f = -INFINITY; + } else { + f = 0; } + data[h*(n_kv*n_tokens) + j*n_kv + i] = f; } } } + + if (data != cur->data) { + ggml_backend_tensor_set(cur, data, 0, ggml_nbytes(cur)); + } } alloc_inp_KQ_mask = true; @@ -5969,11 +5926,21 @@ static struct ggml_cgraph * llama_build_graph( if (!ggml_allocr_is_measure(lctx.alloc)) { const int64_t n_ctx = cur->ne[0]; - int32_t * data = (int32_t *) cur->data; + int32_t * data; + if (ggml_backend_buffer_is_host(cur->buffer)) { + data = (int32_t *) cur->data; + } else { + lctx.buf_copy.resize(ggml_nbytes(cur)); + data = (int32_t *) lctx.buf_copy.data(); + } for (int i = 0; i < n_ctx; ++i) { data[i] = lctx.kv_self.cells[i].delta; } + + if (data != cur->data) { + ggml_backend_tensor_set(cur, data, 0, ggml_nbytes(cur)); + } } alloc_inp_K_shift = true; @@ -6010,7 +5977,7 @@ static struct ggml_cgraph * llama_build_graph( static const std::unordered_map> k_offload_func_name = { { OFFLOAD_FUNC_NOP, "CPU" }, { OFFLOAD_FUNC_OUT, "CPU" }, -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) { OFFLOAD_FUNC, "GPU (CUDA)" }, { OFFLOAD_FUNC_FRC, "GPU (CUDA) FRC" }, { OFFLOAD_FUNC_KQV, "GPU (CUDA) KQV" }, @@ -6083,7 +6050,7 @@ static struct ggml_cgraph * llama_build_graph( offload_func_t func = ggml_offload_nop; // this is needed for compatibility with Metal for example -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) static offload_func_t ggml_offload_gpu = ggml_cuda_assign_buffers_no_alloc; #else static offload_func_t ggml_offload_gpu = ggml_offload_nop; @@ -6305,11 +6272,12 @@ static int llama_decode_internal( GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); } -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + char * buf_alloc_base = (char *)ggml_backend_buffer_get_base(lctx.buf_alloc); for (int i = 0; i < gf->n_leafs; i++) { ggml_tensor * node = gf->leafs[i]; if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char*)node->data - (char *) lctx.buf_alloc.data); + ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); ggml_cuda_copy_to_device(node); } } @@ -6317,7 +6285,7 @@ static int llama_decode_internal( for (int i = 0; i < gf->n_nodes; i++) { ggml_tensor * node = gf->nodes[i]; if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char*)node->data - (char *) lctx.buf_alloc.data); + ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); } } @@ -6344,23 +6312,23 @@ static int llama_decode_internal( n_threads = 1; } -#if GGML_USE_MPI +#ifdef GGML_USE_MPI const int64_t n_layer = hparams.n_layer; ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); #endif #ifdef GGML_USE_METAL - if (lctx.ctx_metal) { - ggml_metal_set_n_cb (lctx.ctx_metal, n_threads); - ggml_metal_graph_compute(lctx.ctx_metal, gf); - } else { - ggml_graph_compute_helper(lctx.work_buffer, gf, n_threads); + if (ggml_backend_is_metal(lctx.backend)) { + ggml_backend_metal_set_n_cb(lctx.backend, n_threads); } -#else - ggml_graph_compute_helper(lctx.work_buffer, gf, n_threads); #endif -#if GGML_USE_MPI + if (ggml_backend_is_cpu(lctx.backend)) { + ggml_backend_cpu_set_n_threads(lctx.backend, n_threads); + } + ggml_backend_graph_compute(lctx.backend, gf); + +#ifdef GGML_USE_MPI ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); #endif @@ -6412,20 +6380,20 @@ static int llama_decode_internal( if (batch.logits[i] == 0) { continue; } - memcpy(logits_out.data() + (n_vocab*i), (float *) ggml_get_data(res) + (n_vocab*i), sizeof(float)*n_vocab); + ggml_backend_tensor_get(res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[i] = true; #endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); - memcpy(logits_out.data(), (float *) ggml_get_data(res), sizeof(float)*n_vocab*n_tokens); + ggml_backend_tensor_get(res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); #ifndef NDEBUG std::fill(logits_valid.begin(), logits_valid.end(), true); #endif } else { logits_out.resize(n_vocab); - memcpy(logits_out.data(), (float *) ggml_get_data(res) + (n_vocab*(n_tokens - 1)), sizeof(float)*n_vocab); + ggml_backend_tensor_get(res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[0] = true; #endif @@ -6437,7 +6405,7 @@ static int llama_decode_internal( auto & embedding_out = lctx.embedding; embedding_out.resize(n_embd); - memcpy(embedding_out.data(), (float *) ggml_get_data(embeddings) + (n_embd*(n_tokens - 1)), sizeof(float)*n_embd); + ggml_backend_tensor_get(embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); } // measure the performance only for the single-token evals @@ -8395,12 +8363,6 @@ void llama_beam_search(llama_context * ctx, // quantization // -template -struct no_init { - T value; - no_init() { /* do nothing */ } -}; - struct quantize_state_internal { const llama_model & model; const llama_model_quantize_params * params; @@ -8643,9 +8605,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s #endif llama_model_loader ml(fname_inp, use_mmap, NULL); - if (ml.use_mmap) { - ml.mapping.reset(new llama_mmap(&ml.file, /* prefetch */ 0, ggml_is_numa())); - } + ml.init_mapping(false); // no prefetching? llama_model model; llm_load_arch(ml, model); @@ -8944,29 +8904,10 @@ static int llama_apply_lora_from_file_internal( // load base model std::unique_ptr ml; - unique_context base_ctx(nullptr, ggml_free); - std::vector base_buf; - if (path_base_model) { + if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); - ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ NULL)); - - size_t ctx_size; - size_t mmapped_size; - ml->calc_sizes(ctx_size, mmapped_size); - - base_buf.resize(ctx_size); - - ggml_init_params base_params; - base_params.mem_size = base_buf.size(); - base_params.mem_buffer = base_buf.data(); - base_params.no_alloc = ml->use_mmap; - - base_ctx.reset(ggml_init(base_params)); - - // maybe this should be in llama_model_loader - if (ml->use_mmap) { - ml->mapping.reset(new llama_mmap(&ml->file, /* prefetch */ 0, ggml_is_numa())); - } + ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ nullptr)); + ml->init_mapping(false); // no prefetching } // read tensors and apply @@ -9058,7 +8999,7 @@ static int llama_apply_lora_from_file_internal( offload_func_t offload_func = ggml_offload_nop; offload_func_t offload_func_force_inplace = ggml_offload_nop; -#ifdef GGML_USE_CUBLAS +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { if (dest_t->type != GGML_TYPE_F16) { throw std::runtime_error(format( @@ -9079,7 +9020,7 @@ static int llama_apply_lora_from_file_internal( return 1; } - base_t = ml->create_tensor(base_ctx.get(), base_name, { dest_t->ne[0], dest_t->ne[1] }, GGML_BACKEND_CPU); + base_t = ml->get_tensor_meta(base_name.c_str()); ml->load_data_for(base_t); } else { base_t = dest_t; @@ -9364,7 +9305,39 @@ struct llama_context * llama_new_context_with_model( // reserve memory for context buffers if (!hparams.vocab_only) { - if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { + // initialize backend +#ifdef GGML_USE_METAL + if (model->n_gpu_layers > 0) { + ctx->backend = ggml_backend_metal_init(); + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize Metal backend\n", __func__); + } + } +#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) + // for testing only + if (model->n_gpu_layers > 0) { + ctx->backend = ggml_backend_cuda_init(0); + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CUDA backend\n", __func__); + } + } +#endif + + if (ctx->backend == nullptr && ggml_backend_buffer_is_host(model->buf)) { + ctx->backend = ggml_backend_cpu_init(); + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CPU backend\n", __func__); + } + } + + if (ctx->backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize a backend\n", __func__); + delete ctx; + return nullptr; + } + + if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, + cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -9400,12 +9373,11 @@ struct llama_context * llama_new_context_with_model( } { - static const size_t tensor_alignment = 32; // the compute buffer is used to store the tensor and graph structs, while the allocator buffer is used for the tensor data - ctx->buf_compute.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); + ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); // create measure allocator - ctx->alloc = ggml_allocr_new_measure(tensor_alignment); + ctx->alloc = ggml_allocr_new_measure_from_backend(ctx->backend); // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); @@ -9413,98 +9385,50 @@ struct llama_context * llama_new_context_with_model( llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); -#ifdef GGML_USE_METAL - if (model->n_gpu_layers > 0) { - ctx->ctx_metal = ggml_metal_init(1); - if (!ctx->ctx_metal) { - LLAMA_LOG_ERROR("%s: ggml_metal_init() failed\n", __func__); - llama_free(ctx); - return NULL; - } - //ggml_metal_graph_find_concurrency(ctx->ctx_metal, gf, false); - //ggml_allocr_set_parse_seq(ctx->alloc, ggml_metal_get_concur_list(ctx->ctx_metal), ggml_metal_if_optimized(ctx->ctx_metal)); - } -#endif // measure memory requirements for the graph - size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf) + tensor_alignment; + size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf); - LLAMA_LOG_INFO("%s: compute buffer total size = %.2f MiB\n", __func__, (ctx->buf_compute.size + alloc_size) / 1024.0 / 1024.0); + LLAMA_LOG_INFO("%s: compute buffer total size = %.2f MiB\n", __func__, (ctx->buf_compute_meta.size() + alloc_size) / 1024.0 / 1024.0); - // recreate allocator with exact memory requirements + // create allocator again with exact memory requirements ggml_allocr_free(ctx->alloc); - ctx->buf_alloc.resize(alloc_size); - ctx->alloc = ggml_allocr_new(ctx->buf_alloc.data, ctx->buf_alloc.size, tensor_alignment); -#ifdef GGML_USE_METAL - if (ctx->ctx_metal) { - //ggml_allocr_set_parse_seq(ctx->alloc, ggml_metal_get_concur_list(ctx->ctx_metal), ggml_metal_if_optimized(ctx->ctx_metal)); - } -#endif -#ifdef GGML_USE_CUBLAS - ggml_cuda_set_scratch_size(alloc_size); - LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); + ctx->buf_alloc = ggml_backend_alloc_buffer(ctx->backend, alloc_size); + ctx->alloc = ggml_allocr_new_from_buffer(ctx->buf_alloc); +#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) + if (model->n_gpu_layers > 0) { + ggml_cuda_set_scratch_size(alloc_size); + LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); - // calculate total VRAM usage - auto add_tensor = [](const ggml_tensor * t, size_t & size) { - if (t->backend == GGML_BACKEND_GPU || t->backend == GGML_BACKEND_GPU_SPLIT) { - size += ggml_nbytes(t); + // calculate total VRAM usage + auto add_tensor = [](const ggml_tensor * t, size_t & size) { + if (t->backend == GGML_BACKEND_GPU || t->backend == GGML_BACKEND_GPU_SPLIT) { + size += ggml_nbytes(t); + } + }; + size_t model_vram_size = 0; + for (const auto & kv : model->tensors_by_name) { + add_tensor(kv.second, model_vram_size); } - }; - size_t model_vram_size = 0; - for (const auto & kv : model->tensors_by_name) { - add_tensor(kv.second, model_vram_size); - } - size_t kv_vram_size = 0; - for (auto & k : ctx->kv_self.k_l) { - add_tensor(k, kv_vram_size); - } - for (auto & v : ctx->kv_self.v_l) { - add_tensor(v, kv_vram_size); - } + size_t kv_vram_size = 0; + for (auto & k : ctx->kv_self.k_l) { + add_tensor(k, kv_vram_size); + } + for (auto & v : ctx->kv_self.v_l) { + add_tensor(v, kv_vram_size); + } - size_t ctx_vram_size = alloc_size + kv_vram_size; - size_t total_vram_size = model_vram_size + ctx_vram_size; + size_t ctx_vram_size = alloc_size + kv_vram_size; + size_t total_vram_size = model_vram_size + ctx_vram_size; - LLAMA_LOG_INFO("%s: total VRAM used: %.2f MiB (model: %.2f MiB, context: %.2f MiB)\n", __func__, - total_vram_size / 1024.0 / 1024.0, - model_vram_size / 1024.0 / 1024.0, - ctx_vram_size / 1024.0 / 1024.0); + LLAMA_LOG_INFO("%s: total VRAM used: %.2f MiB (model: %.2f MiB, context: %.2f MiB)\n", __func__, + total_vram_size / 1024.0 / 1024.0, + model_vram_size / 1024.0 / 1024.0, + ctx_vram_size / 1024.0 / 1024.0); + } #endif } - -#ifdef GGML_USE_METAL - if (model->n_gpu_layers > 0) { - // this allocates all Metal resources and memory buffers - - void * data_ptr = NULL; - size_t data_size = 0; - - if (ctx->model.mapping) { - data_ptr = ctx->model.mapping->addr; - data_size = ctx->model.mapping->size; - } else { - data_ptr = ggml_get_mem_buffer(ctx->model.ctx); - data_size = ggml_get_mem_size (ctx->model.ctx); - } - - const size_t max_size = ggml_get_max_tensor_size(ctx->model.ctx); - - LLAMA_LOG_INFO("%s: max tensor size = %8.2f MiB\n", __func__, max_size/1024.0/1024.0); - -#define LLAMA_METAL_CHECK_BUF(result) \ - if (!(result)) { \ - LLAMA_LOG_ERROR("%s: failed to add buffer\n", __func__); \ - llama_free(ctx); \ - return NULL; \ - } - - LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "data", data_ptr, data_size, max_size)); - LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "kv", ctx->kv_self.buf.data, ctx->kv_self.buf.size, 0)); - LLAMA_METAL_CHECK_BUF(ggml_metal_add_buffer(ctx->ctx_metal, "alloc", ctx->buf_alloc.data, ctx->buf_alloc.size, 0)); -#undef LLAMA_METAL_CHECK_BUF - } -#endif } #ifdef GGML_USE_MPI @@ -9796,7 +9720,7 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_embedding = ctx->embedding.size() * sizeof(float); const size_t s_kv_size = sizeof(size_t); const size_t s_kv_ntok = sizeof(int); - const size_t s_kv = ctx->kv_self.buf.size; + const size_t s_kv = ggml_backend_buffer_get_size(ctx->kv_self.buf); const size_t s_total = ( + s_rng_size @@ -9924,7 +9848,7 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto n_embd = hparams.n_embd_gqa(); const auto n_ctx = cparams.n_ctx; - const size_t kv_buf_size = kv_self.buf.size; + const size_t kv_buf_size = ggml_backend_buffer_get_size(kv_self.buf); const uint32_t kv_head = kv_self.head; const uint32_t kv_size = kv_self.size; const uint32_t kv_used = kv_self.used; @@ -9940,17 +9864,12 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - std::vector> kout2d_data(n_layer); - std::vector> vout2d_data(n_layer); + std::vector kout2d(n_layer); + std::vector vout2d(n_layer); for (int il = 0; il < (int) n_layer; ++il) { - ggml_tensor * kout2d = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - kout2d_data[il].resize(ggml_nbytes(kout2d)); - kout2d->data = kout2d_data[il].data(); - - ggml_tensor * vout2d = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); - vout2d_data[il].resize(ggml_nbytes(vout2d)); - vout2d->data = vout2d_data[il].data(); + kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); + vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], n_embd, kv_head, @@ -9960,20 +9879,28 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat kv_head, n_embd, elt_size*n_ctx, 0); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d[il])); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d[il])); } - ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); + + ggml_backend_graph_compute(ctx->backend, gf); + + std::vector tmp_buf; + for (int il = 0; il < (int) n_layer; ++il) { + tmp_buf.resize(ggml_nbytes(kout2d[il])); + ggml_backend_tensor_get(kout2d[il], tmp_buf.data(), 0, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + + tmp_buf.resize(ggml_nbytes(vout2d[il])); + ggml_backend_tensor_get(vout2d[il], tmp_buf.data(), 0, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + } ggml_free(cpy_ctx); - // our data is now in the kout2d_data and vout2d_data buffers - // write them to file - for (uint32_t il = 0; il < n_layer; ++il) { - data_ctx->write(kout2d_data[il].data(), kout2d_data[il].size()); - data_ctx->write(vout2d_data[il].data(), vout2d_data[il].size()); - } + ggml_backend_buffer_free(buf); } for (uint32_t i = 0; i < kv_size; ++i) { @@ -10071,21 +9998,19 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { memcpy(&kv_used, inp, sizeof(kv_used)); inp += sizeof(kv_used); if (kv_buf_size) { - GGML_ASSERT(kv_self.buf.size == kv_buf_size); + GGML_ASSERT(ggml_backend_buffer_get_size(kv_self.buf) == kv_buf_size); const size_t elt_size = ggml_element_size(kv_self.k_l[0]); ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - for (int il = 0; il < n_layer; ++il) { - ggml_tensor * kin2d = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - kin2d->data = (void *) inp; - inp += ggml_nbytes(kin2d); + std::vector kin2d(n_layer); + std::vector vin2d(n_layer); - ggml_tensor * vin2d = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); - vin2d->data = (void *) inp; - inp += ggml_nbytes(vin2d); + for (int il = 0; il < n_layer; ++il) { + kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); + vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], n_embd, kv_head, @@ -10095,13 +10020,26 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { kv_head, n_embd, elt_size*n_ctx, 0); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d, k2d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d, v2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d[il], k2d)); + ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d[il], v2d)); } - ggml_graph_compute_helper(ctx->work_buffer, gf, /*n_threads*/ 1); + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); + + // load data into the tensors + for (int il = 0; il < n_layer; ++il) { + ggml_backend_tensor_set(kin2d[il], inp, 0, ggml_nbytes(kin2d[il])); + inp += ggml_nbytes(kin2d[il]); + + ggml_backend_tensor_set(vin2d[il], inp, 0, ggml_nbytes(vin2d[il])); + inp += ggml_nbytes(vin2d[il]); + } + + ggml_backend_graph_compute(ctx->backend, gf); ggml_free(cpy_ctx); + + ggml_backend_buffer_free(buf); } ctx->kv_self.head = kv_head; From 4a5f9d629ecfd0a53afdddbaf54a4fa02d9a9ce9 Mon Sep 17 00:00:00 2001 From: Samuel Maynard Date: Thu, 21 Dec 2023 22:36:26 +0200 Subject: [PATCH 277/859] ci : add `jlumbroso/free-disk-space` to docker workflow (#4150) * [github][workflows][docker]: removes hardcoded `ggerganov` from `ghcr` repo * [github][workflows][docker]: adds `jlumbroso/free-disk-space` --- .github/workflows/docker.yml | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 9c90c77ac..a7165a38f 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -52,6 +52,23 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + # https://github.com/jlumbroso/free-disk-space/tree/54081f138730dfa15788a46383842cd2f914a1be#example + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + # this might remove tools that are actually needed, + # if set to "true" but frees about 6 GB + tool-cache: false + + # all of these default to true, but feel free to set to + # "false" if necessary for your workflow + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true + - name: Build and push Docker image (versioned) if: github.event_name == 'push' uses: docker/build-push-action@v4 @@ -59,7 +76,7 @@ jobs: context: . push: true platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/ggerganov/llama.cpp:${{ matrix.config.tag }}-${{ env.COMMIT_SHA }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ env.COMMIT_SHA }}" file: ${{ matrix.config.dockerfile }} - name: Build and push Docker image (tagged) @@ -68,5 +85,5 @@ jobs: context: . push: ${{ github.event_name == 'push' }} platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/ggerganov/llama.cpp:${{ matrix.config.tag }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" file: ${{ matrix.config.dockerfile }} From 32259b2dade6f6856739bf7ba0a4ff7b474dc760 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 23:07:58 +0200 Subject: [PATCH 278/859] gguf : simplify example dependencies --- Makefile | 2 +- examples/gguf/CMakeLists.txt | 2 +- examples/gguf/gguf.cpp | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 512407a1d..68df7702a 100644 --- a/Makefile +++ b/Makefile @@ -606,7 +606,7 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -Wno-cast-qual -gguf: examples/gguf/gguf.cpp ggml.o llama.o $(OBJS) +gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) diff --git a/examples/gguf/CMakeLists.txt b/examples/gguf/CMakeLists.txt index 7d1806af3..6481f087b 100644 --- a/examples/gguf/CMakeLists.txt +++ b/examples/gguf/CMakeLists.txt @@ -1,5 +1,5 @@ set(TARGET gguf) add_executable(${TARGET} gguf.cpp) install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TARGET} PRIVATE llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE ggml ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/gguf/gguf.cpp b/examples/gguf/gguf.cpp index 9e24bf24c..e67be4fb2 100644 --- a/examples/gguf/gguf.cpp +++ b/examples/gguf/gguf.cpp @@ -1,5 +1,4 @@ #include "ggml.h" -#include "llama.h" #include #include From 769a7bc85eaa44e3d7eadf39abfeff7bb0b9cc2f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 23:20:36 +0200 Subject: [PATCH 279/859] gguf-py : fix broken link --- gguf-py/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gguf-py/README.md b/gguf-py/README.md index a27d2fc0e..22d7ffa52 100644 --- a/gguf-py/README.md +++ b/gguf-py/README.md @@ -3,7 +3,7 @@ This is a Python package for writing binary files in the [GGUF](https://github.com/ggerganov/ggml/pull/302) (GGML Universal File) format. -See [convert-llama-hf-to-gguf.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-llama-hf-to-gguf.py) +See [convert-llama-hf-to-gguf.py](https://github.com/ggerganov/llama.cpp/blob/master/convert-hf-to-gguf.py) as an example for its usage. ## Installation From afefa319f1f59b002dfa0d1ef407a2c74bd9770b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 21 Dec 2023 23:20:49 +0200 Subject: [PATCH 280/859] ggml : change ggml_scale to take a float instead of tensor (#4573) * ggml : change ggml_scale to take a float instead of tensor * ggml : fix CPU implementation * tests : fix test-grad0 ggml-ci --- examples/baby-llama/baby-llama.cpp | 15 +-- examples/export-lora/export-lora.cpp | 2 +- examples/finetune/finetune.cpp | 42 +++---- examples/llava/clip.cpp | 8 +- .../train-text-from-scratch.cpp | 14 +-- ggml-cuda.cu | 14 +-- ggml-metal.m | 6 +- ggml.c | 42 +++---- ggml.h | 4 +- llama.cpp | 119 +++--------------- tests/test-backend-ops.cpp | 9 +- tests/test-grad0.cpp | 12 +- 12 files changed, 82 insertions(+), 205 deletions(-) diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index 2dc2988d3..e7d2ad592 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -575,10 +575,7 @@ static struct ggml_tensor * forward( // KQ_scaled = KQ / sqrt(n_embd/n_head) // KQ_scaled shape [n_past + N, N, n_head, 1] - struct ggml_tensor * KQ_scaled = - ggml_scale(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head))); + struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, 1.0f/sqrtf(float(n_embd)/n_head)); // KQ_masked = mask_past(KQ_scaled) // KQ_masked shape [n_past + N, N, n_head, 1] @@ -844,10 +841,7 @@ static struct ggml_tensor * forward_batch( // KQ_scaled = KQ / sqrt(n_embd/n_head) // KQ_scaled shape [n_past + N, N, n_head, n_batch] - struct ggml_tensor * KQ_scaled = - ggml_scale(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head))); + struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, 1.0f/sqrtf(float(n_embd)/n_head)); assert_shape_4d(KQ_scaled, n_past + N, N, n_head, n_batch); // KQ_masked = mask_past(KQ_scaled) @@ -1131,10 +1125,7 @@ static struct ggml_tensor * forward_lora( // KQ_scaled = KQ / sqrt(n_embd/n_head) // KQ_scaled shape [n_past + N, N, n_head, 1] - struct ggml_tensor * KQ_scaled = - ggml_scale(ctx0, - KQ, - ggml_new_f32(ctx0, 1.0f/sqrtf(float(n_embd)/n_head))); + struct ggml_tensor * KQ_scaled = ggml_scale(ctx0, KQ, 1.0f/sqrtf(float(n_embd)/n_head)); // KQ_masked = mask_past(KQ_scaled) // KQ_masked shape [n_past + N, N, n_head, 1] diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index c8754ce70..58fbe204d 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -309,7 +309,7 @@ static struct ggml_cgraph * build_graph_lora( ) { struct ggml_tensor * ab = ggml_mul_mat(ctx, lora_a, lora_b); if (scaling != 1.0f) { - ab = ggml_scale(ctx, ab, ggml_new_f32(ctx, scaling)); + ab = ggml_scale(ctx, ab, scaling); } struct ggml_tensor * res = ggml_add_inplace(ctx, tensor, ab); diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 6a668d764..7b1333a9d 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -269,7 +269,7 @@ static void load_model_hparams_gguf(struct gguf_context * ctx, struct my_llama_h float rope_freq_scale = 1.0f; GGUF_GET_KEY(ctx, hparams->f_norm_rms_eps, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS)); GGUF_GET_KEY(ctx, hparams->rope_freq_base, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_FREQ_BASE)); - GGUF_GET_KEY(ctx, rope_freq_scale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); + GGUF_GET_KEY(ctx, rope_freq_scale, gguf_get_val_f32, GGUF_TYPE_FLOAT32, false, kv(LLM_KV_ROPE_SCALE_LINEAR)); if (rope_freq_scale != 1.0f) { hparams->rope_freq_scale = 1.0f / rope_freq_scale; } @@ -612,6 +612,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( const int n_rot = hparams.n_embd_head(); const int n_embd_head = hparams.n_embd_head(); const int n_embd_gqa = hparams.n_embd_gqa(); + const float rms_norm_eps = hparams.f_norm_rms_eps; const float rope_freq_base = hparams.rope_freq_base; const float rope_freq_scale = hparams.rope_freq_scale; @@ -680,10 +681,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( checkpoints.push_back(t01); } - struct ggml_tensor * kv_scale = NULL; - if (!enable_flash_attn) { - kv_scale = ggml_new_f32(ctx, 1.0f/sqrtf(float(n_embd)/n_head)); - } + const float kv_scale = 1.0f/sqrtf(float(n_embd)/n_head); for (int il = 0; il < n_layer; ++il) { struct my_llama_layer & layer = model->layers[il]; @@ -781,32 +779,32 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // make sure some tensors are not reallocated by inserting new temporary nodes depending on them int n_leafs_before = gb->n_leafs; int n_nodes_before = gb->n_nodes; - struct ggml_tensor * one = ggml_new_f32(ctx, 1.0f); + // output tensors - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, 1.0f)); // input gradient - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); ggml_allocr_alloc(alloc, t36->grad); // KQ_pos - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); // make sure base model tensors data cannot be used in viewable operations - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->tok_embeddings, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->norm, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->output, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->tok_embeddings, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->norm, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, model->output, 1.0f)); for (int il = 0; il < n_layer; ++il) { struct my_llama_layer & layer = model->layers[il]; - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.attention_norm, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_norm, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wq, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wk, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wv, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wo, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w1, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w2, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w3, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.attention_norm, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_norm, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wq, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wk, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wv, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wo, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w1, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w2, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w3, 1.0f)); } // allocating checkpoints in one block to reduce memory fragmentation diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 112465968..f06ec400d 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -330,12 +330,6 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima ggml_repeat(ctx0, model.pre_ln_b, embeddings)); } - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - ggml_allocr_alloc(ctx->alloc, KQ_scale); - if (!ggml_allocr_is_measure(ctx->alloc)) { - ggml_set_f32(KQ_scale, 1.0f / sqrt((float)d_head)); - } - // loop over layers for (int il = 0; il < n_layer - 1; il++) { struct ggml_tensor * cur = embeddings; // embeddings = residual, cur = hidden_states @@ -356,7 +350,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima struct ggml_tensor * Q = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].q_b, cur), ggml_mul_mat(ctx0, model.layers[il].q_w, cur)); - Q = ggml_scale_inplace(ctx0, Q, KQ_scale); + Q = ggml_scale_inplace(ctx0, Q, 1.0f / sqrt((float)d_head)); Q = ggml_reshape_4d(ctx0, Q, d_head, n_head, num_positions, batch_size); Q = ggml_cont(ctx0, ggml_permute(ctx0, Q, 0, 2, 1, 3)); Q = ggml_reshape_3d(ctx0, Q, d_head, num_positions, n_head * batch_size); diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index f7ed63365..4a9a2340b 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -369,10 +369,7 @@ static struct ggml_tensor * llama_build_train_graphs( checkpoints.push_back(t00); checkpoints.push_back(t01); - struct ggml_tensor * kv_scale = NULL; - if (!enable_flash_attn) { - kv_scale = ggml_new_f32(ctx, 1.0f/sqrtf(float(n_embd)/n_head)); - } + const float kv_scale = 1.0f/sqrtf(float(n_embd)/n_head); for (int il = 0; il < n_layer; ++il) { struct my_llama_layer & layer = model->layers[il]; @@ -444,14 +441,13 @@ static struct ggml_tensor * llama_build_train_graphs( // make sure some tensors are not reallocated by inserting new temporary nodes depending on them int n_leafs_before = gb->n_leafs; int n_nodes_before = gb->n_nodes; - struct ggml_tensor * one = ggml_new_f32(ctx, 1.0f); // output tensors - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, one)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t35, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36, 1.0f)); // input gradient - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, 1.0f)); // KQ_pos - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, one)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); ggml_allocr_alloc(alloc, t36->grad); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f5e060d32..ac91ee12e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7700,17 +7700,9 @@ inline void ggml_cuda_op_scale( const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); - GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - float scale; - // HACK: support for ggml backend interface - if (src1->backend == GGML_BACKEND_CPU) { - scale = ((float *) src1->data)[0]; - } else { - // TODO: pass pointer to kernel instead of copying to host - CUDA_CHECK(cudaMemcpy(&scale, src1->data, sizeof(float), cudaMemcpyDeviceToHost)); - } + const float scale = ((float *) dst->op_params)[0]; scale_f32_cuda(src0_dd, dst_dd, scale, ggml_nelements(src0), main_stream); CUDA_CHECK(cudaGetLastError()); @@ -7757,8 +7749,6 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_GPU; const bool dst_on_device = dst->backend == GGML_BACKEND_GPU; - const bool src1_stays_on_host = use_src1 && dst->op == GGML_OP_SCALE; - // dd = data device float * src0_ddf = nullptr; float * src1_ddf = nullptr; @@ -7779,7 +7769,7 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_ddf, src0, 0, 0, 0, nrows0, main_stream)); } - if (use_src1 && !src1_stays_on_host) { + if (use_src1) { if (src1_on_device) { src1_ddf = (float *) src1_extra->data_device[g_main_device]; } else { diff --git a/ggml-metal.m b/ggml-metal.m index e60b93b36..51a72ae33 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1293,7 +1293,7 @@ void ggml_metal_graph_compute( { GGML_ASSERT(ggml_is_contiguous(src0)); - const float scale = *(const float *) src1->data; + const float scale = *(const float *) dst->op_params; int64_t n = ggml_nelements(dst); @@ -1304,8 +1304,8 @@ void ggml_metal_graph_compute( [encoder setComputePipelineState:ctx->pipeline_scale]; } - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; diff --git a/ggml.c b/ggml.c index 236148514..f27920a2d 100644 --- a/ggml.c +++ b/ggml.c @@ -4171,23 +4171,23 @@ struct ggml_tensor * ggml_out_prod( static struct ggml_tensor * ggml_scale_impl( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b, + float s, bool inplace) { - GGML_ASSERT(ggml_is_scalar(b)); GGML_ASSERT(ggml_is_padded_1d(a)); bool is_node = false; - if (a->grad || b->grad) { + if (a->grad) { is_node = true; } struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + ggml_set_op_params(result, &s, sizeof(s)); + result->op = GGML_OP_SCALE; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = b; return result; } @@ -4195,15 +4195,15 @@ static struct ggml_tensor * ggml_scale_impl( struct ggml_tensor * ggml_scale( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b) { - return ggml_scale_impl(ctx, a, b, false); + float s) { + return ggml_scale_impl(ctx, a, s, false); } struct ggml_tensor * ggml_scale_inplace( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b) { - return ggml_scale_impl(ctx, a, b, true); + float s) { + return ggml_scale_impl(ctx, a, s, true); } // ggml_set @@ -10325,19 +10325,17 @@ static void ggml_compute_forward_out_prod( static void ggml_compute_forward_scale_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - GGML_ASSERT(ggml_is_scalar(src1)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } // scale factor - const float v = *(float *) src1->data; + const float v = *(float *) dst->op_params; const int ith = params->ith; const int nth = params->nth; @@ -10368,12 +10366,11 @@ static void ggml_compute_forward_scale_f32( static void ggml_compute_forward_scale( const struct ggml_compute_params * params, const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_scale_f32(params, src0, src1, dst); + ggml_compute_forward_scale_f32(params, src0, dst); } break; default: { @@ -14383,7 +14380,7 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_SCALE: { - ggml_compute_forward_scale(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_scale(params, tensor->src[0], tensor); } break; case GGML_OP_SET: { @@ -14839,7 +14836,7 @@ static struct ggml_tensor * ggml_add_or_set(struct ggml_context * ctx, struct gg static struct ggml_tensor * ggml_acc_or_set(struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b, size_t nb1, size_t nb2, size_t nb3, size_t offset, struct ggml_hash_set zero_table) { if (ggml_hash_contains(zero_table, a)) { - struct ggml_tensor * a_zero = ggml_scale(ctx, a, ggml_new_f32(ctx, 0)); + struct ggml_tensor * a_zero = ggml_scale(ctx, a, 0.0f); return ggml_acc_impl(ctx, a_zero, b, nb1, nb2, nb3, offset, false); } else { return ggml_acc_impl(ctx, a, b, nb1, nb2, nb3, offset, false); @@ -14975,7 +14972,7 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor src0->grad, ggml_scale(ctx, ggml_mul(ctx, src0, tensor->grad), - ggml_new_f32(ctx, 2.0f)), + 2.0f), zero_table); } } break; @@ -14989,7 +14986,7 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor ggml_div(ctx, tensor->grad, tensor), - ggml_new_f32(ctx, 0.5f)), + 0.5f), zero_table); } } break; @@ -15155,17 +15152,12 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { // necessary for llama if (src0->grad) { + const float s = ((float *) tensor->op_params)[0]; + src0->grad = ggml_add_or_set(ctx, src0->grad, - ggml_scale_impl(ctx, tensor->grad, src1, false), - zero_table); - } - if (src1->grad) { - src1->grad = - ggml_add_or_set(ctx, - src1->grad, - ggml_sum(ctx, ggml_mul_impl(ctx, tensor->grad, src0, false)), + ggml_scale_impl(ctx, tensor->grad, s, false), zero_table); } } break; diff --git a/ggml.h b/ggml.h index b17314897..75918502b 100644 --- a/ggml.h +++ b/ggml.h @@ -1094,13 +1094,13 @@ extern "C" { GGML_API struct ggml_tensor * ggml_scale( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b); + float s); // in-place, returns view(a) GGML_API struct ggml_tensor * ggml_scale_inplace( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b); + float s); // b -> view(a,offset,nb1,nb2,3), return modified a GGML_API struct ggml_tensor * ggml_set( diff --git a/llama.cpp b/llama.cpp index ba970ce8d..d6c192441 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4032,13 +4032,12 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * wo, struct ggml_tensor * wo_b, struct ggml_tensor * q_cur, - struct ggml_tensor * kq_scale, struct ggml_tensor * kq_mask, int64_t n_ctx, int32_t n_tokens, int32_t n_kv, float max_alibi_bias, - float scale, + float kq_scale, const llm_build_cb & cb, int il) { const int64_t n_embd = hparams.n_embd; @@ -4086,7 +4085,7 @@ static struct ggml_tensor * llm_build_kqv( kq = ggml_soft_max(ctx, kq); cb(kq, "kq_soft_max", il); } else { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, scale); + kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_scale); cb(kq, "kq_soft_max_ext", il); } @@ -4231,10 +4230,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4295,7 +4290,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4416,10 +4411,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4478,7 +4469,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4536,10 +4527,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4602,7 +4589,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4659,10 +4646,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4702,7 +4685,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4759,10 +4742,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -4911,7 +4890,7 @@ struct llm_build_context { // TODO: not tested, could be broken cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Q, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Q, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -4965,10 +4944,6 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "inp_embd", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5002,7 +4977,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5056,10 +5031,6 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "inp_embd", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5099,7 +5070,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5150,10 +5121,6 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); cb(inpL, "inp_embd", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5193,7 +5160,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5253,10 +5220,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5306,7 +5269,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5366,10 +5329,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5423,7 +5382,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, NULL, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5482,14 +5441,6 @@ struct llm_build_context { struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); cb(inp_pos, "inp_pos", -1); - // Q_scale - struct ggml_tensor * Q_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(Q_scale, "Q_scale", -1); - - // KQ_scale - struct ggml_tensor * KQ_scale = ggml_new_tensor_1d(ctx0, GGML_TYPE_F32, 1); - cb(KQ_scale, "KQ_scale", -1); - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); cb(KQ_mask, "KQ_mask", -1); @@ -5531,7 +5482,9 @@ struct llm_build_context { ); cb(Qcur, "Qcur", il); - Qcur = ggml_scale(ctx0, Qcur, Q_scale); + // with phi2, we scale the Q to avoid precision issues + // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 + Qcur = ggml_scale(ctx0, Qcur, 1.0f/sqrtf(float(n_embd_head))); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( @@ -5544,7 +5497,7 @@ struct llm_build_context { cur = llm_build_kqv(ctx0, model, hparams, kv_self, model.layers[il].wo, model.layers[il].bo, - Qcur, KQ_scale, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f, cb, il); + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f, cb, il); cb(cur, "kqv_out", il); } @@ -5681,8 +5634,6 @@ static const std::unordered_map k_offload_map { "pos_embd", OFFLOAD_FUNC_NR }, { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) - { "Q_scale", OFFLOAD_FUNC_NOP }, - { "KQ_scale", OFFLOAD_FUNC_NOP }, { "KQ_mask", OFFLOAD_FUNC_FRC }, { "K_shift", OFFLOAD_FUNC_FRC }, @@ -5784,8 +5735,6 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_tokens = false; bool alloc_inp_embd = false; bool alloc_inp_pos = false; - bool alloc_inp_Q_scale = false; - bool alloc_inp_KQ_scale = false; bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; @@ -5849,37 +5798,6 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_pos = true; } - if (!alloc_inp_Q_scale && strcmp(name, "Q_scale") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); - - if (!ggml_allocr_is_measure(lctx.alloc)) { - const int64_t n_embd_head = model.hparams.n_embd_head(); - float f = 1.0f/sqrtf(float(n_embd_head)); - ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); - } - - alloc_inp_Q_scale = true; - } - - if (!alloc_inp_KQ_scale && strcmp(name, "KQ_scale") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); - - if (!ggml_allocr_is_measure(lctx.alloc)) { - const int64_t n_embd_head = model.hparams.n_embd_head(); - float f; - if (model.arch == LLM_ARCH_PHI2) { - // with phi2, we scale the Q to avoid precision issues - // ref: https://github.com/ml-explore/mlx-examples/blob/08e862336ade809bc37d1035f94b359e7d1a5152/phi2/phi2.py#L64-L66 - f = 1.0f; - } else { - f = 1.0f/sqrtf(float(n_embd_head)); - } - ggml_backend_tensor_set(cur, &f, 0, sizeof(f)); - } - - alloc_inp_KQ_scale = true; - } - if (!alloc_inp_KQ_mask && strcmp(name, "KQ_mask") == 0) { ggml_allocr_alloc(lctx.alloc, cur); @@ -9054,10 +8972,7 @@ static int llama_apply_lora_from_file_internal( ggml_set_name(BA, "BA"); if (scaling != 1.0f) { - ggml_tensor * scale_tensor = ggml_new_f32(lora_ctx.get(), scaling); - ggml_set_name(scale_tensor, "scale_tensor"); - - BA = ggml_scale_inplace(lora_ctx.get(), BA, scale_tensor); + BA = ggml_scale_inplace(lora_ctx.get(), BA, scaling); offload_func(BA); ggml_set_name(BA, "BA_scaled"); } diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index f04b9438a..f3df8a8c6 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -766,18 +766,19 @@ struct test_bin_bcast : public test_case { struct test_scale : public test_case { const ggml_type type; const std::array ne; + float scale; std::string vars() override { - return VARS_TO_STR2(type, ne); + return VARS_TO_STR3(type, ne, scale); } test_scale(ggml_type type = GGML_TYPE_F32, - std::array ne = {10, 10, 10, 10}) - : type(type), ne(ne) {} + std::array ne = {10, 10, 10, 10}, + float scale = 2.0f) + : type(type), ne(ne), scale(scale) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * scale = ggml_new_tensor_1d(ctx, type, 1); ggml_tensor * out = ggml_scale(ctx, a, scale); return out; } diff --git a/tests/test-grad0.cpp b/tests/test-grad0.cpp index 81c20a89c..14914def5 100644 --- a/tests/test-grad0.cpp +++ b/tests/test-grad0.cpp @@ -881,19 +881,19 @@ int main(int argc, const char ** argv) { // scale { srand(seed); - const int nargs = 2; + const int nargs = 1; int64_t ne2[4]; ne2[0] = 1; for (int ndims = 1; ndims <= 2; ++ndims) { - x[1] = get_random_tensor_f32(ctx0, 1, ne2, -1.0f, 1.0f); x[0] = get_random_tensor_f32(ctx0, ndims, ne, -1.0f, 1.0f); - ggml_set_param(ctx0, x[0]); - ggml_set_param(ctx0, x[1]); + const float s = -1.0f + 2.0f*frand(); - struct ggml_tensor * f = ggml_sum(ctx0, ggml_scale(ctx0, x[0], x[1])); + ggml_set_param(ctx0, x[0]); + + struct ggml_tensor * f = ggml_sum(ctx0, ggml_scale(ctx0, x[0], s)); check_gradient("scale", ctx0, x, f, ndims, nargs, 1e-3f, 1e-3f, INFINITY); } @@ -1395,7 +1395,7 @@ int main(int argc, const char ** argv) { ggml_add1(ctx0, ggml_scale(ctx0, ggml_soft_max(ctx0, x[0]), - ggml_new_f32(ctx0, 1.0f - eps)), + 1.0f - eps), ggml_new_f32(ctx0, eps)))); check_gradient("softmax", ctx0, x, f, ndims, nargs, 1e-3f, 2e-1f, INFINITY); From c7e9701f86564088350209d2f9d71c96ea00527f Mon Sep 17 00:00:00 2001 From: crasm Date: Fri, 22 Dec 2023 01:19:36 -0500 Subject: [PATCH 281/859] llama : add ability to cancel model loading (#4462) * llama : Add ability to cancel model load Updated llama_progress_callback so that if it returns false, the model loading is aborted. * llama : Add test for model load cancellation * Fix bool return in llama_model_load, remove std::ignore use * Update llama.cpp Co-authored-by: Jared Van Bortel * Fail test if model file is missing * Revert "Fail test if model file is missing" This reverts commit 32ebd525bf7e5a87ee8a3dbaab3d92ce79fbf23d. * Add test-model-load-cancel to Makefile * Revert "Revert "Fail test if model file is missing"" This reverts commit 2796953257ee5383fa7c8fe8fa8fc888c048fb0b. * Simplify .gitignore for tests, clang-tidy fixes * Label all ctest tests * ci : ctest uses -L main * Attempt at writing ctest_with_model * ci : get ci/run.sh working with test-model-load-cancel * ci : restrict .github/workflows/build.yml ctest to -L main * update requirements.txt * Disable test-model-load-cancel in make * Remove venv before creation * Restructure requirements.txt Top-level now imports the specific additional requirements for each python file. Using `pip install -r requirements.txt` will fail if versions become mismatched in the per-file requirements. * Make per-python-script requirements work alone This doesn't break the main requirements.txt. * Add comment * Add convert-persimmon-to-gguf.py to new requirements.txt scheme * Add check-requirements.sh script and GitHub workflow * Remove shellcheck installation step from workflow * Add nocleanup special arg * Fix merge see: https://github.com/ggerganov/llama.cpp/pull/4462#discussion_r1434593573 * reset to upstream/master * Redo changes for cancelling model load --------- Co-authored-by: Georgi Gerganov Co-authored-by: Jared Van Bortel --- llama.cpp | 46 +++++++++++++++++++++++++++++++++------------- llama.h | 6 ++++-- 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/llama.cpp b/llama.cpp index d6c192441..cb0546c95 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2372,7 +2372,8 @@ struct llama_model_loader { } } - void load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { + // Returns false if cancelled by progress_callback + bool load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { size_t size_data = 0; for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { @@ -2404,7 +2405,9 @@ struct llama_model_loader { GGML_ASSERT(cur); // unused tensors should have been caught by load_data already if (progress_callback) { - progress_callback((float) size_done / size_data, progress_callback_user_data); + if (!progress_callback((float) size_done / size_data, progress_callback_user_data)) { + return false; + } } const size_t offs = file_offset(ggml_get_name(cur)); @@ -2466,8 +2469,11 @@ struct llama_model_loader { } if (progress_callback) { - progress_callback(1.0f, progress_callback_user_data); + // Even though the model is done loading, we still honor + // cancellation since we need to free allocations. + return progress_callback(1.0f, progress_callback_user_data); } + return true; } }; @@ -3044,7 +3050,8 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { if (vocab.linefeed_id != -1) { LLAMA_LOG_INFO( "%s: LF token = %d '%s'\n", __func__, vocab.linefeed_id, vocab.id_to_token[vocab.linefeed_id].text.c_str() ); } } -static void llm_load_tensors( +// Returns false if cancelled by progress_callback +static bool llm_load_tensors( llama_model_loader & ml, llama_model & model, int n_gpu_layers, @@ -3722,16 +3729,20 @@ static void llm_load_tensors( model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); } - ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL); + if (!ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL)) { + return false; + } model.mapping = std::move(ml.mapping); // loading time will be recalculate after the first eval, so // we take page faults deferred by mmap() into consideration model.t_load_us = ggml_time_us() - model.t_start_us; + return true; } -static bool llama_model_load(const std::string & fname, llama_model & model, const llama_model_params & params) { +// Returns 0 on success, -1 on error, and -2 on cancellation via llama_progress_callback +static int llama_model_load(const std::string & fname, llama_model & model, const llama_model_params & params) { try { llama_model_loader ml(fname, params.use_mmap, params.kv_overrides); @@ -3749,19 +3760,21 @@ static bool llama_model_load(const std::string & fname, llama_model & model, con if (params.vocab_only) { LLAMA_LOG_INFO("%s: vocab only - skipping tensors\n", __func__); - return true; + return 0; } - llm_load_tensors( + if (!llm_load_tensors( ml, model, params.n_gpu_layers, params.main_gpu, params.tensor_split, params.use_mlock, params.progress_callback, params.progress_callback_user_data - ); + )) { + return -2; + } } catch (const std::exception & err) { LLAMA_LOG_ERROR("error loading model: %s\n", err.what()); - return false; + return -1; } - return true; + return 0; } // @@ -9141,11 +9154,18 @@ struct llama_model * llama_load_model_from_file( LLAMA_LOG_INFO("\n"); } } + return true; }; } - if (!llama_model_load(path_model, *model, params)) { - LLAMA_LOG_ERROR("%s: failed to load model\n", __func__); + int status = llama_model_load(path_model, *model, params); + GGML_ASSERT(status <= 0); + if (status < 0) { + if (status == -1) { + LLAMA_LOG_ERROR("%s: failed to load model\n", __func__); + } else if (status == -2) { + LLAMA_LOG_INFO("%s: cancelled model load\n", __func__); + } delete model; return nullptr; } diff --git a/llama.h b/llama.h index 0be4b1337..af76bae2d 100644 --- a/llama.h +++ b/llama.h @@ -127,7 +127,7 @@ extern "C" { bool sorted; } llama_token_data_array; - typedef void (*llama_progress_callback)(float progress, void *ctx); + typedef bool (*llama_progress_callback)(float progress, void *ctx); // Input data for llama_decode // A llama_batch object can contain input about one or many sequences @@ -180,7 +180,9 @@ extern "C" { int32_t main_gpu; // the GPU that is used for scratch and small tensors const float * tensor_split; // how to split layers across multiple GPUs (size: LLAMA_MAX_DEVICES) - // called with a progress value between 0 and 1, pass NULL to disable + // Called with a progress value between 0.0 and 1.0. Pass NULL to disable. + // If the provided progress_callback returns true, model loading continues. + // If it returns false, model loading is immediately aborted. llama_progress_callback progress_callback; // context pointer passed to the progress callback From 0137ef88ea9f8fd837a065700814329d24adeec3 Mon Sep 17 00:00:00 2001 From: bobqianic <129547291+bobqianic@users.noreply.github.com> Date: Fri, 22 Dec 2023 06:47:01 +0000 Subject: [PATCH 282/859] ggml : extend `enum ggml_log_level` with `GGML_LOG_LEVEL_DEBUG` (#4579) --- ggml.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ggml.h b/ggml.h index 75918502b..338f355a4 100644 --- a/ggml.h +++ b/ggml.h @@ -484,7 +484,8 @@ extern "C" { enum ggml_log_level { GGML_LOG_LEVEL_ERROR = 2, GGML_LOG_LEVEL_WARN = 3, - GGML_LOG_LEVEL_INFO = 4 + GGML_LOG_LEVEL_INFO = 4, + GGML_LOG_LEVEL_DEBUG = 5 }; // ggml object From 2bb98279c5a087d62949972b35cf63ff974ffe6a Mon Sep 17 00:00:00 2001 From: Deins Date: Fri, 22 Dec 2023 08:49:54 +0200 Subject: [PATCH 283/859] readme : add zig bindings (#4581) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 73fe59bb4..8e17d5ba4 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,7 @@ as the main playground for developing new features for the [ggml](https://github - Clojure: [phronmophobic/llama.clj](https://github.com/phronmophobic/llama.clj) - React Native: [mybigday/llama.rn](https://github.com/mybigday/llama.rn) - Java: [kherud/java-llama.cpp](https://github.com/kherud/java-llama.cpp) +- Zig: [deins/llama.cpp.zig](https://github.com/Deins/llama.cpp.zig) **UI:** From f31b98489824a86c937fa62ccf5dfd4bb0327b86 Mon Sep 17 00:00:00 2001 From: rhuddleston Date: Thu, 21 Dec 2023 23:56:34 -0700 Subject: [PATCH 284/859] ci : tag docker image with build number (#4584) --- .github/workflows/docker.yml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index a7165a38f..7f4de50ea 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -69,6 +69,19 @@ jobs: docker-images: true swap-storage: true + - name: Determine tag name + id: tag + shell: bash + run: | + BUILD_NUMBER="$(git rev-list --count HEAD)" + SHORT_HASH="$(git rev-parse --short=7 HEAD)" + if [[ "${{ env.BRANCH_NAME }}" == "master" ]]; then + echo "name=b${BUILD_NUMBER}" >> $GITHUB_OUTPUT + else + SAFE_NAME=$(echo "${{ env.BRANCH_NAME }}" | tr '/' '-') + echo "name=${SAFE_NAME}-b${BUILD_NUMBER}-${SHORT_HASH}" >> $GITHUB_OUTPUT + fi + - name: Build and push Docker image (versioned) if: github.event_name == 'push' uses: docker/build-push-action@v4 @@ -85,5 +98,5 @@ jobs: context: . push: ${{ github.event_name == 'push' }} platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" , "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ steps.tag.outputs.name }}" file: ${{ matrix.config.dockerfile }} From 28cb35a0ecb9852adc3494aa51dde60141939d64 Mon Sep 17 00:00:00 2001 From: Michael Kesper Date: Fri, 22 Dec 2023 09:03:25 +0100 Subject: [PATCH 285/859] make : add LLAMA_HIP_UMA option (#4587) NB: LLAMA_HIP_UMA=1 (or any value) adds MK_CPPFLAG -DGGML_HIP_UMA --- Makefile | 3 +++ README.md | 8 +++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 68df7702a..42686ce71 100644 --- a/Makefile +++ b/Makefile @@ -452,6 +452,9 @@ ifdef LLAMA_HIPBLAS LLAMA_CUDA_MMV_Y ?= 1 LLAMA_CUDA_KQUANTS_ITER ?= 2 MK_CPPFLAGS += -DGGML_USE_HIPBLAS -DGGML_USE_CUBLAS +ifdef LLAMA_HIP_UMA + MK_CPPFLAGS += -DGGML_HIP_UMA +endif # LLAMA_HIP_UMA MK_LDFLAGS += -L$(ROCM_PATH)/lib -Wl,-rpath=$(ROCM_PATH)/lib MK_LDFLAGS += -lhipblas -lamdhip64 -lrocblas HIPFLAGS += $(addprefix --offload-arch=,$(GPU_TARGETS)) diff --git a/README.md b/README.md index 8e17d5ba4..377d3928b 100644 --- a/README.md +++ b/README.md @@ -440,7 +440,13 @@ Building the program with BLAS support may lead to some performance improvements && cmake --build build -- -j 16 ``` On Linux it is also possible to use unified memory architecture (UMA) to share main memory between the CPU and integrated GPU by setting `-DLLAMA_HIP_UMA=ON"`. - However, this hurts performance for non-integrated GPUs. + However, this hurts performance for non-integrated GPUs (but enables working with integrated GPUs). + + - Using `make` (example for target gfx1030, build with 16 CPU threads): + ```bash + make -j16 LLAMA_HIPBLAS=1 LLAMA_HIP_UMA=1 AMDGPU_TARGETS=gxf1030 + ``` + - Using `CMake` for Windows (using x64 Native Tools Command Prompt for VS, and assuming a gfx1100-compatible AMD GPU): ```bash set PATH=%HIP_PATH%\bin;%PATH% From 48b24b170e3b4f9dc28200306840cb07d1c123df Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 22 Dec 2023 09:26:49 +0000 Subject: [PATCH 286/859] ggml : add comment about backward GGML_OP_DIAG_MASK_INF (#4203) --- ggml.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml.c b/ggml.c index f27920a2d..15e1984d1 100644 --- a/ggml.c +++ b/ggml.c @@ -15335,6 +15335,8 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor const int n_past = ((int32_t *) tensor->op_params)[0]; src0->grad = ggml_add_or_set(ctx, src0->grad, + /* ggml_diag_mask_inf_impl() shouldn't be here */ + /* ref: https://github.com/ggerganov/llama.cpp/pull/4203#discussion_r1412377992 */ ggml_diag_mask_zero_impl(ctx, tensor->grad, n_past, false), zero_table); } From 48b7ff193e64c97ab174280ba0eb8d14b47c49ba Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 22 Dec 2023 12:12:53 +0100 Subject: [PATCH 287/859] llama : fix platforms without mmap (#4578) * llama : fix platforms without mmap * win32 : limit prefetch size to the file size * fix win32 error clobber, unnecessary std::string in std::runtime_error --- ggml-cuda.cu | 3 ++- ggml.c | 6 ++++-- llama.cpp | 36 ++++++++++++++++++------------------ 3 files changed, 24 insertions(+), 21 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index ac91ee12e..37d7f2792 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7702,7 +7702,8 @@ inline void ggml_cuda_op_scale( GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - const float scale = ((float *) dst->op_params)[0]; + float scale; + memcpy(&scale, dst->op_params, sizeof(float)); scale_f32_cuda(src0_dd, dst_dd, scale, ggml_nelements(src0), main_stream); CUDA_CHECK(cudaGetLastError()); diff --git a/ggml.c b/ggml.c index 15e1984d1..3656422d7 100644 --- a/ggml.c +++ b/ggml.c @@ -10335,7 +10335,8 @@ static void ggml_compute_forward_scale_f32( } // scale factor - const float v = *(float *) dst->op_params; + float v; + memcpy(&v, dst->op_params, sizeof(float)); const int ith = params->ith; const int nth = params->nth; @@ -15152,7 +15153,8 @@ static void ggml_compute_backward(struct ggml_context * ctx, struct ggml_tensor { // necessary for llama if (src0->grad) { - const float s = ((float *) tensor->op_params)[0]; + float s; + memcpy(&s, tensor->op_params, sizeof(float)); src0->grad = ggml_add_or_set(ctx, diff --git a/llama.cpp b/llama.cpp index cb0546c95..4e4495739 100644 --- a/llama.cpp +++ b/llama.cpp @@ -778,7 +778,7 @@ struct llama_file { throw std::runtime_error(format("read error: %s", strerror(errno))); } if (ret != 1) { - throw std::runtime_error(std::string("unexpectedly reached end of file")); + throw std::runtime_error("unexpectedly reached end of file"); } } @@ -931,29 +931,29 @@ struct llama_mmap { #elif defined(_WIN32) static constexpr bool SUPPORTED = true; - llama_mmap(struct llama_file * file, bool prefetch = true, bool numa = false) { - (void) numa; + llama_mmap(struct llama_file * file, size_t prefetch = (size_t) -1, bool numa = false) { + GGML_UNUSED(numa); size = file->size; HANDLE hFile = (HANDLE) _get_osfhandle(_fileno(file->fp)); HANDLE hMapping = CreateFileMappingA(hFile, NULL, PAGE_READONLY, 0, 0, NULL); - DWORD error = GetLastError(); if (hMapping == NULL) { + DWORD error = GetLastError(); throw std::runtime_error(format("CreateFileMappingA failed: %s", llama_format_win_err(error).c_str())); } addr = MapViewOfFile(hMapping, FILE_MAP_READ, 0, 0, 0); - error = GetLastError(); + DWORD error = GetLastError(); CloseHandle(hMapping); if (addr == NULL) { throw std::runtime_error(format("MapViewOfFile failed: %s", llama_format_win_err(error).c_str())); } - if (prefetch) { + if (prefetch > 0) { // PrefetchVirtualMemory is only present on Windows 8 and above, so we dynamically load it BOOL (WINAPI *pPrefetchVirtualMemory) (HANDLE, ULONG_PTR, PWIN32_MEMORY_RANGE_ENTRY, ULONG); HMODULE hKernel32 = GetModuleHandleW(L"kernel32.dll"); @@ -965,9 +965,9 @@ struct llama_mmap { // advise the kernel to preload the mapped memory WIN32_MEMORY_RANGE_ENTRY range; range.VirtualAddress = addr; - range.NumberOfBytes = (SIZE_T)size; + range.NumberOfBytes = (SIZE_T) std::min(size, prefetch); if (!pPrefetchVirtualMemory(GetCurrentProcess(), 1, &range, 0)) { - fprintf(stderr, "warning: PrefetchVirtualMemory failed: %s\n", + LLAMA_LOG_WARN("warning: PrefetchVirtualMemory failed: %s\n", llama_format_win_err(GetLastError()).c_str()); } } @@ -982,26 +982,26 @@ struct llama_mmap { ~llama_mmap() { if (!UnmapViewOfFile(addr)) { - fprintf(stderr, "warning: UnmapViewOfFile failed: %s\n", + LLAMA_LOG_WARN("warning: UnmapViewOfFile failed: %s\n", llama_format_win_err(GetLastError()).c_str()); } } #else static constexpr bool SUPPORTED = false; - llama_mmap(struct llama_file * file, bool prefetch = true, bool numa = false) { - (void) file; - (void) prefetch; - (void) numa; + llama_mmap(struct llama_file * file, size_t prefetch = -1, bool numa = false) { + GGML_UNUSED(file); + GGML_UNUSED(prefetch); + GGML_UNUSED(numa); - throw std::runtime_error(std::string("mmap not supported")); + throw std::runtime_error("mmap not supported"); } - void unmap(size_t offset, size_t len) { - (void) offset; - (void) len; + void unmap_fragment(size_t first, size_t last) { + GGML_UNUSED(first); + GGML_UNUSED(last); - throw std::runtime_error(std::string("mmap not supported")); + throw std::runtime_error("mmap not supported"); } #endif }; From 6724ef16573ec7ecce620be56cbbff145856b2fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrik=20Forst=C3=A9n?= Date: Fri, 22 Dec 2023 15:34:05 +0200 Subject: [PATCH 288/859] Fix CudaMemcpy direction (#4599) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 37d7f2792..da8fd1e09 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8843,7 +8843,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_CPU ? - cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; + cudaMemcpyDeviceToHost : cudaMemcpyDeviceToDevice; for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; From a55876955b1a83464171de8d578d3ab062a7b62d Mon Sep 17 00:00:00 2001 From: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com> Date: Fri, 22 Dec 2023 23:11:12 +0800 Subject: [PATCH 289/859] cuda : fix jetson compile error (#4560) * fix old jetson compile error * Update Makefile * update jetson detect and cuda version detect * update cuda marco define * update makefile and cuda,fix some issue * Update README.md Co-authored-by: Georgi Gerganov * Update Makefile * Update README.md --------- Co-authored-by: Georgi Gerganov --- Makefile | 22 +++++++++++++++++++--- README.md | 3 +++ ggml-cuda.cu | 7 +++++++ ggml-quants.c | 4 ++-- 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 42686ce71..6a998091b 100644 --- a/Makefile +++ b/Makefile @@ -282,8 +282,17 @@ endif ifneq ($(filter aarch64%,$(UNAME_M)),) # Apple M1, M2, etc. # Raspberry Pi 3, 4, Zero 2 (64-bit) + # Nvidia Jetson MK_CFLAGS += -mcpu=native MK_CXXFLAGS += -mcpu=native + JETSON_RELEASE_INFO = $(shell jetson_release) + ifdef JETSON_RELEASE_INFO + ifneq ($(filter TX2%,$(JETSON_RELEASE_INFO)),) + JETSON_EOL_MODULE_DETECT = 1 + CC = aarch64-unknown-linux-gnu-gcc + cxx = aarch64-unknown-linux-gnu-g++ + endif + endif endif ifneq ($(filter armv6%,$(UNAME_M)),) @@ -357,10 +366,13 @@ ifdef LLAMA_BLIS endif # LLAMA_BLIS ifdef LLAMA_CUBLAS - MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include - MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib + MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include + MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib OBJS += ggml-cuda.o - MK_NVCCFLAGS = --forward-unknown-to-host-compiler -use_fast_math + MK_NVCCFLAGS = -use_fast_math +ifndef JETSON_EOL_MODULE_DETECT + MK_NVCCFLAGS += --forward-unknown-to-host-compiler +endif # JETSON_EOL_MODULE_DETECT ifdef LLAMA_DEBUG MK_NVCCFLAGS += -lineinfo @@ -417,7 +429,11 @@ ifdef LLAMA_CUDA_CCBIN MK_NVCCFLAGS += -ccbin $(LLAMA_CUDA_CCBIN) endif ggml-cuda.o: ggml-cuda.cu ggml-cuda.h +ifdef JETSON_EOL_MODULE_DETECT + $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ +else $(NVCC) $(BASE_CXXFLAGS) $(NVCCFLAGS) -Wno-pedantic -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ +endif # JETSON_EOL_MODULE_DETECT endif # LLAMA_CUBLAS ifdef LLAMA_CLBLAST diff --git a/README.md b/README.md index 377d3928b..649c3b333 100644 --- a/README.md +++ b/README.md @@ -396,6 +396,9 @@ Building the program with BLAS support may lead to some performance improvements - #### cuBLAS This provides BLAS acceleration using the CUDA cores of your Nvidia GPU. Make sure to have the CUDA toolkit installed. You can download it from your Linux distro's package manager (e.g. `apt install nvidia-cuda-toolkit`) or from here: [CUDA Toolkit](https://developer.nvidia.com/cuda-downloads). + + For Jetson user, if you have Jetson Orin, you can try this: [Offical Support](https://www.jetson-ai-lab.com/tutorial_text-generation.html). If you are using an old model(nano/TX2), need some additional operations before compiling. + - Using `make`: ```bash make LLAMA_CUBLAS=1 diff --git a/ggml-cuda.cu b/ggml-cuda.cu index da8fd1e09..b124774a9 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -90,6 +90,13 @@ #include #include #include +// CUDA 10.2 does not have these macro definitions. +#ifndef CUBLAS_TF32_TENSOR_OP_MATH +#define CUBLAS_TF32_TENSOR_OP_MATH CUBLAS_TENSOR_OP_MATH +#define CUBLAS_COMPUTE_16F CUDA_R_16F +#define CUBLAS_COMPUTE_32F CUDA_R_32F +#define cublasComputeType_t cudaDataType_t +#endif #endif // defined(GGML_USE_HIPBLAS) #include "ggml-cuda.h" diff --git a/ggml-quants.c b/ggml-quants.c index 0e8163a16..a15a24048 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3677,7 +3677,7 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const uint8x16_t mins = vshrq_n_u8(mins_and_scales, 4); const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); - const ggml_int16x8x2_t mins16 = {vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}; + const ggml_int16x8x2_t mins16 = {{vreinterpretq_s16_u16(vmovl_u8(vget_low_u8(mins))), vreinterpretq_s16_u16(vmovl_u8(vget_high_u8(mins)))}}; const int32x4_t s0 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[0]), vget_low_s16 (q8sums.val[0])), vmull_s16(vget_high_s16(mins16.val[0]), vget_high_s16(q8sums.val[0]))); const int32x4_t s1 = vaddq_s32(vmull_s16(vget_low_s16 (mins16.val[1]), vget_low_s16 (q8sums.val[1])), @@ -6626,7 +6626,7 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const ggml_int16x8x2_t q8sums = ggml_vld1q_s16_x2(y[i].bsums); const int8x16_t scales = vld1q_s8(scale); - const ggml_int16x8x2_t q6scales = {vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}; + const ggml_int16x8x2_t q6scales = {{vmovl_s8(vget_low_s8(scales)), vmovl_s8(vget_high_s8(scales))}}; const int32x4_t prod = vaddq_s32(vaddq_s32(vmull_s16(vget_low_s16 (q8sums.val[0]), vget_low_s16 (q6scales.val[0])), vmull_s16(vget_high_s16(q8sums.val[0]), vget_high_s16(q6scales.val[0]))), From ba661751322a7c201fd3bef71af077c5aebfaa2a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 22 Dec 2023 17:53:43 +0200 Subject: [PATCH 290/859] sync : ggml (fix im2col) (#4591) * cuda : fix im2col_f32_f16 (ggml/#658) ggml-ci * ggml-alloc : fix ggml_tallocr_is_own --------- Co-authored-by: leejet --- ggml-alloc.c | 2 +- ggml-cuda.cu | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index a97436b17..a27dd54b0 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -72,7 +72,7 @@ static void remove_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * t // check if a tensor is allocated by this buffer static bool ggml_tallocr_is_own(ggml_tallocr_t alloc, const struct ggml_tensor * tensor) { - return tensor->buffer == alloc->buffer; + return tensor->buffer == alloc->buffer && (!tensor->view_src || tensor->view_src->buffer == alloc->buffer); } static bool ggml_is_view(struct ggml_tensor * t) { diff --git a/ggml-cuda.cu b/ggml-cuda.cu index b124774a9..7c2a834e3 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5273,17 +5273,17 @@ static __global__ void im2col_f32_f16( const int ky = (i - kd) / OW; const int ix = i % OW; - const int iiw = ix * s0 + kx * d0 - p0; - const int iih = blockIdx.y * s1 + ky * d1 - p1; + const int64_t iiw = ix * s0 + kx * d0 - p0; + const int64_t iih = blockIdx.y * s1 + ky * d1 - p1; - const int offset_dst = + const int64_t offset_dst = (blockIdx.y * OW + ix) * CHW + (blockIdx.z * (KW * KH) + ky * KW + kx); if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { dst[offset_dst] = __float2half(0.0f); } else { - const int offset_src = blockIdx.z * offset_delta; + const int64_t offset_src = blockIdx.z * offset_delta; dst[offset_dst] = __float2half(x[offset_src + iih * IW + iiw]); } } From 7082d24cec35e9ce9147535a2224dfc67ee0a78c Mon Sep 17 00:00:00 2001 From: LeonEricsson <70749762+LeonEricsson@users.noreply.github.com> Date: Fri, 22 Dec 2023 17:05:56 +0100 Subject: [PATCH 291/859] lookup : add prompt lookup decoding example (#4484) * initial commit, going through initializations * main loop finished, starting to debug * BUG: generates gibberish/repeating tokens after a while * kv_cache management * Added colors to distinguish drafted tokens (--color). Updated README * lookup : fix token positions in the draft batch * lookup : use n_draft from CLI params * lookup : final touches --------- Co-authored-by: Leon Ericsson Co-authored-by: Georgi Gerganov --- .gitignore | 1 + Makefile | 5 +- common/common.h | 3 +- examples/CMakeLists.txt | 1 + examples/lookup/CMakeLists.txt | 5 + examples/lookup/README.md | 13 ++ examples/lookup/lookup.cpp | 230 +++++++++++++++++++++++++++++++++ 7 files changed, 256 insertions(+), 2 deletions(-) create mode 100644 examples/lookup/CMakeLists.txt create mode 100644 examples/lookup/README.md create mode 100644 examples/lookup/lookup.cpp diff --git a/.gitignore b/.gitignore index 76b3d2861..def74a1e9 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,7 @@ models-mnt /llama-bench /llava-cli /lookahead +/lookup /main /metal /perplexity diff --git a/Makefile b/Makefile index 6a998091b..cb5a4e948 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ - speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead tests/test-c.o + speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup tests/test-c.o # Binaries only useful for tests TEST_TARGETS = \ @@ -664,6 +664,9 @@ parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + ifdef LLAMA_METAL metal: examples/metal/metal.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) diff --git a/common/common.h b/common/common.h index e87ce1133..9659aa045 100644 --- a/common/common.h +++ b/common/common.h @@ -51,7 +51,7 @@ struct gpt_params { int32_t n_ctx = 512; // context size int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_draft = 16; // number of tokens to draft during speculative decoding + int32_t n_draft = 8; // number of tokens to draft during speculative decoding int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) int32_t n_parallel = 1; // number of parallel sequences to decode int32_t n_sequences = 1; // number of sequences to decode @@ -240,3 +240,4 @@ void dump_kv_cache_view(const llama_kv_cache_view & view, int row_size = 80); // Dump the KV cache view showing individual sequences in each cell (long output). void dump_kv_cache_view_seqs(const llama_kv_cache_view & view, int row_size = 40); + diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 6744944fd..4cc13d6e9 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -33,6 +33,7 @@ else() add_subdirectory(simple) add_subdirectory(speculative) add_subdirectory(lookahead) + add_subdirectory(lookup) add_subdirectory(train-text-from-scratch) if (LLAMA_METAL) add_subdirectory(metal) diff --git a/examples/lookup/CMakeLists.txt b/examples/lookup/CMakeLists.txt new file mode 100644 index 000000000..c060b8f56 --- /dev/null +++ b/examples/lookup/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET lookup) +add_executable(${TARGET} lookup.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/lookup/README.md b/examples/lookup/README.md new file mode 100644 index 000000000..5bfb0de93 --- /dev/null +++ b/examples/lookup/README.md @@ -0,0 +1,13 @@ +# llama.cpp/examples/lookup + +Demonstration of Prompt Lookup Decoding + +https://github.com/apoorvumang/prompt-lookup-decoding + +The key parameters for lookup decoding are `ngram_min`, `ngram_max` and `n_draft`. The first two determine the size of the ngrams to search for in the prompt for a match. The latter specifies how many subsequent tokens to draft if a match is found. + +More info: + +https://github.com/ggerganov/llama.cpp/pull/4484 +https://github.com/ggerganov/llama.cpp/issues/4226 + diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp new file mode 100644 index 000000000..d8de7dd38 --- /dev/null +++ b/examples/lookup/lookup.cpp @@ -0,0 +1,230 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include + +int main(int argc, char ** argv){ + gpt_params params; + + if (!gpt_params_parse(argc, argv, params)) { + return 1; + } + + // max/min n-grams size to search for in prompt + const int ngram_max = 4; + const int ngram_min = 1; + + // length of the candidate / draft sequence, if match is found + const int n_draft = params.n_draft; + + const bool dump_kv_cache = params.dump_kv_cache; + +#ifndef LOG_DISABLE_LOGS + log_set_target(log_filename_generator("lookup", "log")); + LOG_TEE("Log start\n"); + log_dump_cmdline(argc, argv); +#endif // LOG_DISABLE_LOGS + + // init llama.cpp + llama_backend_init(params.numa); + + llama_model * model = NULL; + llama_context * ctx = NULL; + + // load the model + std::tie(model, ctx) = llama_init_from_gpt_params(params); + + // tokenize the prompt + const bool add_bos = llama_should_add_bos_token(model); + LOG("add_bos tgt: %d\n", add_bos); + + std::vector inp; + inp = ::llama_tokenize(ctx, params.prompt, add_bos, true); + + const int max_context_size = llama_n_ctx(ctx); + const int max_tokens_list_size = max_context_size - 4; + + if ((int) inp.size() > max_tokens_list_size) { + fprintf(stderr, "%s: error: prompt too long (%d tokens, max %d)\n", __func__, (int) inp.size(), max_tokens_list_size); + return 1; + } + + fprintf(stderr, "\n\n"); + + for (auto id : inp) { + fprintf(stderr, "%s", llama_token_to_piece(ctx, id).c_str()); + } + + fflush(stderr); + + const int n_input = inp.size(); + + const auto t_enc_start = ggml_time_us(); + + llama_decode(ctx, llama_batch_get_one( inp.data(), n_input - 1, 0, 0)); + llama_decode(ctx, llama_batch_get_one(&inp.back(), 1, n_input - 1, 0)); + + const auto t_enc_end = ggml_time_us(); + + int n_predict = 0; + int n_drafted = 0; + int n_accept = 0; + + int n_past = inp.size(); + + bool has_eos = false; + + struct llama_sampling_context * ctx_sampling = llama_sampling_init(params.sparams); + + std::vector draft; + + llama_batch batch_tgt = llama_batch_init(params.n_ctx, 0, 1); + + // debug + struct llama_kv_cache_view kvc_view = llama_kv_cache_view_init(ctx, 1); + + const auto t_dec_start = ggml_time_us(); + + while (true) { + // debug + if (dump_kv_cache) { + llama_kv_cache_view_update(ctx, &kvc_view); + dump_kv_cache_view_seqs(kvc_view, 40); + } + + // print current draft sequence + LOG("drafted %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, draft).c_str()); + + int i_dft = 0; + while (true) { + // sample from the target model + llama_token id = llama_sampling_sample(ctx_sampling, ctx, NULL, i_dft); + + llama_sampling_accept(ctx_sampling, ctx, id, true); + + const std::string token_str = llama_token_to_piece(ctx, id); + + if (!params.use_color) { + printf("%s", token_str.c_str()); + } + + if (id == llama_token_eos(model)) { + has_eos = true; + } + + ++n_predict; + + // check if the target token matches the draft + if (i_dft < (int) draft.size() && id == draft[i_dft]) { + LOG("the sampled target token matches the %dth drafted token (%d, '%s') - accepted\n", i_dft, id, token_str.c_str()); + ++n_accept; + ++n_past; + ++i_dft; + inp.push_back(id); + + if (params.use_color) { + // color accepted draft token + printf("\033[34m%s\033[0m", token_str.c_str()); + fflush(stdout); + } + continue; + } + + if (params.use_color) { + printf("%s", token_str.c_str()); + } + fflush(stdout); + + + LOG("the sampled target token (%d, '%s') did not match, or we ran out of drafted tokens\n", id, token_str.c_str()); + + draft.clear(); + draft.push_back(id); + inp.push_back(id); + break; + } + + if ((params.n_predict > 0 && n_predict > params.n_predict) || has_eos) { + break; + } + + // KV cache management + // clean the cache of draft tokens that weren't accepted + llama_kv_cache_seq_rm(ctx, 0, n_past, -1); + + llama_batch_clear(batch_tgt); + llama_batch_add(batch_tgt, draft[0], n_past, { 0 }, true); + + // generate n_pred tokens through prompt lookup + auto prompt_lookup = [&]() -> void { + int inp_size = inp.size(); + for (int ngram_size = ngram_max ; ngram_size > ngram_min; --ngram_size){ + const llama_token * ngram = &inp[inp_size - ngram_size]; + + for (int i = 0; i <= (int) inp_size - (ngram_size * 2); ++i) { + bool match = true; + for (int j = 0; j < ngram_size; ++j) { + if (inp[i + j] != ngram[j]) { + match = false; + break; + } + } + + if (match) { + const int startIdx = i + ngram_size; + const int endIdx = startIdx + n_draft; + if (endIdx < inp_size) { + for (int j = startIdx; j < endIdx; ++j) { + LOG(" - draft candidate %d: %d\n", j, inp[j]); + draft.push_back(inp[j]); + llama_batch_add(batch_tgt, inp[j], n_past + (j - startIdx) + 1, { 0 }, true); + ++n_drafted; + } + return; + } + } + } + } + return; + }; + + prompt_lookup(); + + llama_decode(ctx, batch_tgt); + ++n_past; + + draft.erase(draft.begin()); + } + + auto t_dec_end = ggml_time_us(); + + LOG_TEE("\n\n"); + + LOG_TEE("encoded %4d tokens in %8.3f seconds, speed: %8.3f t/s\n", n_input, (t_enc_end - t_enc_start) / 1e6f, inp.size() / ((t_enc_end - t_enc_start) / 1e6f)); + LOG_TEE("decoded %4d tokens in %8.3f seconds, speed: %8.3f t/s\n", n_predict, (t_dec_end - t_dec_start) / 1e6f, n_predict / ((t_dec_end - t_dec_start) / 1e6f)); + + LOG_TEE("\n"); + LOG_TEE("n_draft = %d\n", n_draft); + LOG_TEE("n_predict = %d\n", n_predict); + LOG_TEE("n_drafted = %d\n", n_drafted); + LOG_TEE("n_accept = %d\n", n_accept); + LOG_TEE("accept = %.3f%%\n", 100.0f * n_accept / n_drafted); + + LOG_TEE("\ntarget:\n"); + llama_print_timings(ctx); + + llama_sampling_free(ctx_sampling); + llama_batch_free(batch_tgt); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + fprintf(stderr, "\n\n"); + + return 0; +} From e0a4002273907b2c414b6b5442d99e08bfe2df35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 23 Dec 2023 09:16:33 +0100 Subject: [PATCH 292/859] CUDA: fixed row rounding for 0 tensor splits (#4594) --- ggml-cuda.cu | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 7c2a834e3..490081cac 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7937,12 +7937,16 @@ static void ggml_cuda_op_mul_mat( if (id != 0) { row_low[id] = ne01*g_tensor_split[id]; - row_low[id] -= row_low[id] % rounding; + if (row_low[id] < ne01) { + row_low[id] -= row_low[id] % rounding; + } } if (id != g_device_count - 1) { row_high[id] = ne01*g_tensor_split[id + 1]; - row_high[id] -= row_high[id] % rounding; + if (row_high[id] < ne01) { + row_high[id] -= row_high[id] % rounding; + } } } } From b9ec82d262cb20d7f0a8a1157bfa9aace40e2625 Mon Sep 17 00:00:00 2001 From: kalomaze <66376113+kalomaze@users.noreply.github.com> Date: Sat, 23 Dec 2023 03:27:07 -0600 Subject: [PATCH 293/859] grammar : check the full vocab only if necessary (opt) (#4306) * Check the full vocab for grammar only if necessary * Fix missing logit restoration step (?) Does this matter, actually? * Fix whitespace / formatting * Adjust comment * Didn't mean to push test gbnf * Split sampling into the helper function (?) And also revert the changes made to the header * common : fix final newline --------- Co-authored-by: Georgi Gerganov --- common/sampling.cpp | 48 ++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index f4e76df31..5b15204be 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -149,11 +149,12 @@ static void sampler_queue( } } -llama_token llama_sampling_sample( +static llama_token llama_sampling_sample_impl( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, struct llama_context * ctx_cfg, - const int idx) { + const int idx, + bool is_resampling) { // Add a parameter to indicate if we are resampling const llama_sampling_params & params = ctx_sampling->params; const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); @@ -173,8 +174,17 @@ llama_token llama_sampling_sample( llama_token id = 0; + // Get a pointer to the logits float * logits = llama_get_logits_ith(ctx_main, idx); + // Declare original_logits at the beginning of the function scope + std::vector original_logits; + + if (!is_resampling) { + // Only make a copy of the original logits if we are not in the resampling phase, not sure if I actually have to do this. + original_logits = std::vector(logits, logits + llama_n_vocab(llama_get_model(ctx_main))); + } + // apply params.logit_bias map for (auto it = params.logit_bias.begin(); it != params.logit_bias.end(); it++) { logits[it->first] += it->second; @@ -210,7 +220,8 @@ llama_token llama_sampling_sample( } } - if (ctx_sampling->grammar != NULL) { + // If we are in the resampling phase, apply grammar checks before sampling logic + if (is_resampling && ctx_sampling->grammar != NULL) { llama_sample_grammar(ctx_main, &cur_p, ctx_sampling->grammar); } @@ -252,9 +263,40 @@ llama_token llama_sampling_sample( } } + if (ctx_sampling->grammar != NULL && !is_resampling) { + // Create an array with a single token data element for the sampled id + llama_token_data single_token_data = {id, logits[id], 0.0f}; + llama_token_data_array single_token_data_array = { &single_token_data, 1, false }; + + // Apply grammar constraints to the single token + llama_sample_grammar(ctx_main, &single_token_data_array, ctx_sampling->grammar); + + // Check if the token is valid according to the grammar by seeing if its logit has been set to -INFINITY + bool is_valid = single_token_data_array.data[0].logit != -INFINITY; + + // If the token is not valid according to the grammar, perform resampling + if (!is_valid) { + LOG("Resampling because token %d: '%s' does not meet grammar rules\n", id, llama_token_to_piece(ctx_main, id).c_str()); + + // Restore logits from the copy + std::copy(original_logits.begin(), original_logits.end(), logits); + + return llama_sampling_sample_impl(ctx_sampling, ctx_main, ctx_cfg, idx, true); // Pass true for is_resampling + } + } + return id; } +llama_token llama_sampling_sample( + struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_main, + struct llama_context * ctx_cfg, + const int idx) { + // Call the implementation function with is_resampling set to false by default + return llama_sampling_sample_impl(ctx_sampling, ctx_main, ctx_cfg, idx, false); +} + void llama_sampling_accept( struct llama_sampling_context * ctx_sampling, struct llama_context * ctx_main, From 6123979952385847d8348e295d77d6e01da8aa84 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sat, 23 Dec 2023 09:31:49 +0000 Subject: [PATCH 294/859] server : allow to specify custom prompt for penalty calculation (#3727) --- common/sampling.cpp | 8 ++++--- common/sampling.h | 3 +++ examples/server/README.md | 2 ++ examples/server/server.cpp | 44 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 54 insertions(+), 3 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 5b15204be..8e45909f1 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -203,12 +203,14 @@ static llama_token llama_sampling_sample_impl( } // apply penalties - if (!prev.empty()) { + const auto& penalty_tokens = params.use_penalty_prompt_tokens ? params.penalty_prompt_tokens : prev; + const int penalty_tokens_used_size = std::min((int)penalty_tokens.size(), penalty_last_n); + if (penalty_tokens_used_size) { const float nl_logit = logits[llama_token_nl(llama_get_model(ctx_main))]; llama_sample_repetition_penalties(ctx_main, &cur_p, - prev.data() + prev.size() - penalty_last_n, - penalty_last_n, penalty_repeat, penalty_freq, penalty_present); + penalty_tokens.data() + penalty_tokens.size() - penalty_tokens_used_size, + penalty_tokens_used_size, penalty_repeat, penalty_freq, penalty_present); if (!penalize_nl) { for (size_t idx = 0; idx < cur_p.size; idx++) { diff --git a/common/sampling.h b/common/sampling.h index fdfa9eed1..f16ef97e3 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -36,6 +36,9 @@ typedef struct llama_sampling_params { float cfg_scale = 1.f; // how strong is guidance std::unordered_map logit_bias; // logit bias for specific tokens + + std::vector penalty_prompt_tokens; + bool use_penalty_prompt_tokens = false; } llama_sampling_params; // general sampler context diff --git a/examples/server/README.md b/examples/server/README.md index 0751b9612..f1e586a1c 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -148,6 +148,8 @@ node index.js `frequency_penalty`: Repeat alpha frequency penalty (default: 0.0, 0.0 = disabled); + `penalty_prompt`: This will replace the `prompt` for the purpose of the penalty evaluation. Can be either `null`, a string or an array of numbers representing tokens (default: `null` = use the original `prompt`). + `mirostat`: Enable Mirostat sampling, controlling perplexity during text generation (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). `mirostat_tau`: Set the Mirostat target entropy, parameter tau (default: 5.0). diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 04038530f..72dfe452c 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -761,6 +761,42 @@ struct llama_server_context slot->prompt = ""; } + slot->sparams.penalty_prompt_tokens.clear(); + slot->sparams.use_penalty_prompt_tokens = false; + const auto &penalty_prompt = data.find("penalty_prompt"); + if (penalty_prompt != data.end()) + { + if (penalty_prompt->is_string()) + { + const auto penalty_prompt_string = penalty_prompt->get(); + auto penalty_tokens = llama_tokenize(model, penalty_prompt_string, false); + slot->sparams.penalty_prompt_tokens.swap(penalty_tokens); + if (slot->params.n_predict > 0) + { + slot->sparams.penalty_prompt_tokens.reserve(slot->sparams.penalty_prompt_tokens.size() + slot->params.n_predict); + } + slot->sparams.use_penalty_prompt_tokens = true; + } + else if (penalty_prompt->is_array()) + { + const auto n_tokens = penalty_prompt->size(); + slot->sparams.penalty_prompt_tokens.reserve(n_tokens + std::max(0, slot->params.n_predict)); + const int n_vocab = llama_n_vocab(model); + for (const auto &penalty_token : *penalty_prompt) + { + if (penalty_token.is_number_integer()) + { + const auto tok = penalty_token.get(); + if (tok >= 0 && tok < n_vocab) + { + slot->sparams.penalty_prompt_tokens.push_back(tok); + } + } + } + slot->sparams.use_penalty_prompt_tokens = true; + } + } + slot->sparams.logit_bias.clear(); if (json_value(data, "ignore_eos", false)) @@ -992,6 +1028,12 @@ struct llama_server_context slot.generated_text += token_str; slot.has_next_token = true; + if (slot.ctx_sampling->params.use_penalty_prompt_tokens && result.tok != -1) + { + // we can change penalty_prompt_tokens because it is always created from scratch each request + slot.ctx_sampling->params.penalty_prompt_tokens.push_back(result.tok); + } + // check if there is incomplete UTF-8 character at the end bool incomplete = false; for (unsigned i = 1; i < 5 && i <= slot.generated_text.size(); ++i) @@ -1183,6 +1225,8 @@ struct llama_server_context {"repeat_penalty", slot.sparams.penalty_repeat}, {"presence_penalty", slot.sparams.penalty_present}, {"frequency_penalty", slot.sparams.penalty_freq}, + {"penalty_prompt_tokens", slot.sparams.penalty_prompt_tokens}, + {"use_penalty_prompt_tokens", slot.sparams.use_penalty_prompt_tokens}, {"mirostat", slot.sparams.mirostat}, {"mirostat_tau", slot.sparams.mirostat_tau}, {"mirostat_eta", slot.sparams.mirostat_eta}, From 925e5584a058afb612f9c20bc472c130f5d0f891 Mon Sep 17 00:00:00 2001 From: Samuel Maynard Date: Sat, 23 Dec 2023 11:35:55 +0200 Subject: [PATCH 295/859] ci(docker): fix tags in "Build and push docker image (tagged)" (#4603) --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 7f4de50ea..87904b75e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -98,5 +98,5 @@ jobs: context: . push: ${{ github.event_name == 'push' }} platforms: ${{ matrix.config.platforms }} - tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}" , "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ steps.tag.outputs.name }}" + tags: "ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }},ghcr.io/${{ github.repository_owner }}/llama.cpp:${{ matrix.config.tag }}-${{ steps.tag.outputs.name }}" file: ${{ matrix.config.dockerfile }} From 708e179e8562c2604240df95a2241dea17fd808b Mon Sep 17 00:00:00 2001 From: slaren Date: Sat, 23 Dec 2023 16:10:51 +0100 Subject: [PATCH 296/859] fallback to CPU buffer if host buffer alloc fails (#4610) --- ggml-cuda.cu | 11 ++++++----- llama.cpp | 16 +++++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 490081cac..f9830328b 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6729,8 +6729,7 @@ void * ggml_cuda_host_malloc(size_t size) { void * ptr = nullptr; cudaError_t err = cudaMallocHost((void **) &ptr, size); if (err != cudaSuccess) { - // The allocation error can be bypassed. A null ptr will assigned out of this function. - // This can fixed the OOM error in WSL. + // clear the error cudaGetLastError(); fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory: %s\n", size/1024.0/1024.0, cudaGetErrorString(err)); @@ -9674,12 +9673,14 @@ ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { // host buffer type static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { - CUDA_CHECK(cudaFreeHost(buffer->context)); + ggml_cuda_host_free(buffer->context); } static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - void * ptr; - CUDA_CHECK(cudaMallocHost(&ptr, size)); + void * ptr = ggml_cuda_host_malloc(size); + if (ptr == nullptr) { + return nullptr; + } // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); diff --git a/llama.cpp b/llama.cpp index 4e4495739..5699a0fcf 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1177,21 +1177,27 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ } static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { + ggml_backend_buffer_type_t buft = nullptr; + #ifdef GGML_USE_METAL if (n_gpu_layers > 0) { - return ggml_backend_metal_buffer_type(); + buft = ggml_backend_metal_buffer_type(); } #elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (n_gpu_layers > 0) { - return ggml_backend_cuda_buffer_type(0); + buft = ggml_backend_cuda_buffer_type(0); } #elif defined(GGML_USE_CUBLAS) - return ggml_backend_cuda_host_buffer_type(); + buft = ggml_backend_cuda_host_buffer_type(); #elif defined(GGML_USE_CPU_HBM) - return ggml_backend_cpu_hbm_buffer_type(); + buft = ggml_backend_cpu_hbm_buffer_type(); #endif - return ggml_backend_cpu_buffer_type(); + if (buft == nullptr) { + buft = ggml_backend_cpu_buffer_type(); + } + + return buft; GGML_UNUSED(n_gpu_layers); } From 5bf3953d7e9831ea22b0bc017ce97409b801ccf1 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 24 Dec 2023 14:34:22 +0100 Subject: [PATCH 297/859] cuda : improve cuda pool efficiency using virtual memory (#4606) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * cuda : improve cuda pool efficiency using virtual memory * fix mixtral * fix cmake build * check for vmm support, disable for hip ggml-ci * fix hip build * clarify granularity * move all caps to g_device_caps * refactor error checking * add cuda_pool_alloc, refactor most pool allocations ggml-ci * fix hip build * CUBLAS_TF32_TENSOR_OP_MATH is not a macro * more hip crap * llama : fix msvc warnings * ggml : fix msvc warnings * minor * minor * cuda : fallback to CPU on host buffer alloc fail * Update ggml-cuda.cu Co-authored-by: Johannes Gäßler * Update ggml-cuda.cu Co-authored-by: Johannes Gäßler * ensure allocations are always aligned * act_size -> actual_size --------- Co-authored-by: Johannes Gäßler --- CMakeLists.txt | 2 + Makefile | 6 +- ggml-backend.c | 16 +- ggml-cuda.cu | 499 +++++++++++++++++++++++++++---------------- ggml.c | 2 +- ggml.h | 2 + llama.cpp | 6 +- tests/test-grad0.cpp | 3 - 8 files changed, 328 insertions(+), 208 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6fc6508c5..545aab267 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -302,6 +302,8 @@ if (LLAMA_CUBLAS) set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cudart CUDA::cublas CUDA::cublasLt) endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} CUDA::cuda_driver) + if (NOT DEFINED CMAKE_CUDA_ARCHITECTURES) # 52 == lowest CUDA 12 standard # 60 == f16 CUDA intrinsics diff --git a/Makefile b/Makefile index cb5a4e948..28c6d79bc 100644 --- a/Makefile +++ b/Makefile @@ -367,17 +367,15 @@ endif # LLAMA_BLIS ifdef LLAMA_CUBLAS MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include - MK_LDFLAGS += -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib + MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o MK_NVCCFLAGS = -use_fast_math ifndef JETSON_EOL_MODULE_DETECT MK_NVCCFLAGS += --forward-unknown-to-host-compiler endif # JETSON_EOL_MODULE_DETECT - ifdef LLAMA_DEBUG MK_NVCCFLAGS += -lineinfo -endif - +endif # LLAMA_DEBUG ifdef LLAMA_CUDA_NVCC NVCC = $(LLAMA_CUDA_NVCC) else diff --git a/ggml-backend.c b/ggml-backend.c index 0c8c9ec43..526ce732b 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -297,7 +297,7 @@ static void ggml_backend_registry_init(void) { void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { GGML_ASSERT(ggml_backend_registry_count < GGML_MAX_BACKENDS_REG); - int id = ggml_backend_registry_count; + size_t id = ggml_backend_registry_count; ggml_backend_registry[id] = (struct ggml_backend_reg) { /* .name = */ {0}, @@ -330,6 +330,8 @@ size_t ggml_backend_reg_find_by_name(const char * name) { return i; } } + + // not found return SIZE_MAX; } @@ -340,15 +342,15 @@ ggml_backend_t ggml_backend_reg_init_backend_from_str(const char * backend_str) const char * params = strchr(backend_str, ':'); char backend_name[128]; if (params == NULL) { - strcpy(backend_name, backend_str); + snprintf(backend_name, sizeof(backend_name), "%s", backend_str); params = ""; } else { - strncpy(backend_name, backend_str, params - backend_str); - backend_name[params - backend_str] = '\0'; + snprintf(backend_name, sizeof(backend_name), "%.*s", (int)(params - backend_str), backend_str); params++; } size_t backend_i = ggml_backend_reg_find_by_name(backend_name); + if (backend_i == SIZE_MAX) { fprintf(stderr, "%s: backend %s not found\n", __func__, backend_name); return NULL; @@ -396,18 +398,12 @@ static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { } static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy((char *)tensor->data + offset, data, size); GGML_UNUSED(buffer); } static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - memcpy(data, (const char *)tensor->data + offset, size); GGML_UNUSED(buffer); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f9830328b..ac3b3c14d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -86,17 +86,28 @@ #define cudaStream_t hipStream_t #define cudaSuccess hipSuccess #define __trap abort +#define CUBLAS_STATUS_SUCCESS HIPBLAS_STATUS_SUCCESS +#define CUBLAS_STATUS_NOT_INITIALIZED HIPBLAS_STATUS_NOT_INITIALIZED +#define CUBLAS_STATUS_ALLOC_FAILED HIPBLAS_STATUS_ALLOC_FAILED +#define CUBLAS_STATUS_INVALID_VALUE HIPBLAS_STATUS_INVALID_VALUE +#define CUBLAS_STATUS_ARCH_MISMATCH HIPBLAS_STATUS_ARCH_MISMATCH +#define CUBLAS_STATUS_MAPPING_ERROR HIPBLAS_STATUS_MAPPING_ERROR +#define CUBLAS_STATUS_EXECUTION_FAILED HIPBLAS_STATUS_EXECUTION_FAILED +#define CUBLAS_STATUS_INTERNAL_ERROR HIPBLAS_STATUS_INTERNAL_ERROR +#define CUBLAS_STATUS_NOT_SUPPORTED HIPBLAS_STATUS_NOT_SUPPORTED #else #include +#include #include #include -// CUDA 10.2 does not have these macro definitions. -#ifndef CUBLAS_TF32_TENSOR_OP_MATH + +#if CUDART_VERSION < 11020 #define CUBLAS_TF32_TENSOR_OP_MATH CUBLAS_TENSOR_OP_MATH #define CUBLAS_COMPUTE_16F CUDA_R_16F #define CUBLAS_COMPUTE_32F CUDA_R_32F #define cublasComputeType_t cudaDataType_t -#endif +#endif // CUDART_VERSION < 11020 + #endif // defined(GGML_USE_HIPBLAS) #include "ggml-cuda.h" @@ -200,45 +211,45 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); -#define CUDA_CHECK(err) \ - do { \ - cudaError_t err_ = (err); \ - if (err_ != cudaSuccess) { \ - int id; \ - cudaGetDevice(&id); \ - fprintf(stderr, "\nCUDA error %d at %s:%d: %s\n", err_, __FILE__, __LINE__, \ - cudaGetErrorString(err_)); \ - fprintf(stderr, "current device: %d\n", id); \ - GGML_ASSERT(!"CUDA error"); \ - } \ - } while (0) - #if CUDART_VERSION >= 12000 -#define CUBLAS_CHECK(err) \ - do { \ - cublasStatus_t err_ = (err); \ - if (err_ != CUBLAS_STATUS_SUCCESS) { \ - int id; \ - cudaGetDevice(&id); \ - fprintf(stderr, "\ncuBLAS error %d at %s:%d: %s\n", \ - err_, __FILE__, __LINE__, cublasGetStatusString(err_)); \ - fprintf(stderr, "current device: %d\n", id); \ - GGML_ASSERT(!"cuBLAS error"); \ - } \ - } while (0) + static const char * cublas_get_error_str(const cublasStatus_t err) { + return cublasGetStatusString(err); + } #else -#define CUBLAS_CHECK(err) \ - do { \ - cublasStatus_t err_ = (err); \ - if (err_ != CUBLAS_STATUS_SUCCESS) { \ - int id; \ - cudaGetDevice(&id); \ - fprintf(stderr, "\ncuBLAS error %d at %s:%d\n", err_, __FILE__, __LINE__); \ - fprintf(stderr, "current device: %d\n", id); \ - GGML_ASSERT(!"cuBLAS error"); \ - } \ - } while (0) -#endif // CUDART_VERSION >= 11 + static const char * cublas_get_error_str(const cublasStatus_t err) { + switch (err) { + case CUBLAS_STATUS_SUCCESS: return "CUBLAS_STATUS_SUCCESS"; + case CUBLAS_STATUS_NOT_INITIALIZED: return "CUBLAS_STATUS_NOT_INITIALIZED"; + case CUBLAS_STATUS_ALLOC_FAILED: return "CUBLAS_STATUS_ALLOC_FAILED"; + case CUBLAS_STATUS_INVALID_VALUE: return "CUBLAS_STATUS_INVALID_VALUE"; + case CUBLAS_STATUS_ARCH_MISMATCH: return "CUBLAS_STATUS_ARCH_MISMATCH"; + case CUBLAS_STATUS_MAPPING_ERROR: return "CUBLAS_STATUS_MAPPING_ERROR"; + case CUBLAS_STATUS_EXECUTION_FAILED: return "CUBLAS_STATUS_EXECUTION_FAILED"; + case CUBLAS_STATUS_INTERNAL_ERROR: return "CUBLAS_STATUS_INTERNAL_ERROR"; + case CUBLAS_STATUS_NOT_SUPPORTED: return "CUBLAS_STATUS_NOT_SUPPORTED"; + default: return "unknown error"; + } + } +#endif // CUDART_VERSION >= 12000 + +[[noreturn]] +static void ggml_cuda_error(const char * stmt, const char * func, const char * file, const int line, const char * msg) { + fprintf(stderr, "CUDA error: %s: %s\n", stmt, msg); + fprintf(stderr, " in function %s at %s:%d\n", func, file, line); + GGML_ASSERT(!"CUDA error"); +} + +#define CUDA_CHECK(err) do { auto err_ = (err); if (err_ != cudaSuccess) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cudaGetErrorString(err_)); } while (0) +#define CUBLAS_CHECK(err) do { auto err_ = (err); if (err_ != CUBLAS_STATUS_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cublas_get_error_str(err_)); } while (0) + +#if !defined(GGML_USE_HIPBLAS) +static const char * cu_get_error_str(CUresult err) { + const char * err_str; + cuGetErrorString(err, &err_str); + return err_str; +} +#define CU_CHECK(err) do { auto err_ = (err); if (err_ != CUDA_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cu_get_error_str(err_)); } while (0) +#endif #if CUDART_VERSION >= 11100 #define GGML_CUDA_ASSUME(x) __builtin_assume(x) @@ -516,9 +527,17 @@ inline cudaError_t ggml_cuda_set_device(const int device) { static int g_device_count = -1; static int g_main_device = 0; -static int g_compute_capabilities[GGML_CUDA_MAX_DEVICES]; static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; +struct cuda_device_capabilities { + int cc; // compute capability + bool vmm; // virtual memory support + size_t vmm_granularity; // granularity of virtual memory +}; + +static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, false, 0} }; + + static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default static size_t g_scratch_offset = 0; @@ -5875,7 +5894,7 @@ static void ggml_mul_mat_q4_0_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -5920,7 +5939,7 @@ static void ggml_mul_mat_q4_1_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -5965,7 +5984,7 @@ static void ggml_mul_mat_q5_0_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6010,7 +6029,7 @@ static void ggml_mul_mat_q5_1_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6055,7 +6074,7 @@ static void ggml_mul_mat_q8_0_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6100,7 +6119,7 @@ static void ggml_mul_mat_q2_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6147,7 +6166,7 @@ static void ggml_mul_mat_q3_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6193,7 +6212,7 @@ static void ggml_mul_mat_q4_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6238,7 +6257,7 @@ static void ggml_mul_mat_q5_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6283,7 +6302,7 @@ static void ggml_mul_mat_q6_K_q8_1_cuda( int id; CUDA_CHECK(cudaGetDevice(&id)); - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; int mmq_x, mmq_y, nwarps; if (compute_capability >= CC_RDNA2) { @@ -6543,21 +6562,24 @@ struct scoped_spin_lock { scoped_spin_lock& operator=(const scoped_spin_lock&) = delete; }; +static std::atomic_flag g_cuda_pool_lock = ATOMIC_FLAG_INIT; + +// #define DEBUG_CUDA_MALLOC struct cuda_buffer { void * ptr = nullptr; size_t size = 0; }; static cuda_buffer g_cuda_buffer_pool[GGML_CUDA_MAX_DEVICES][MAX_CUDA_BUFFERS]; -static std::atomic_flag g_cuda_pool_lock = ATOMIC_FLAG_INIT; +static size_t g_cuda_pool_size[GGML_CUDA_MAX_DEVICES] = {0}; -static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { +static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); int id; CUDA_CHECK(cudaGetDevice(&id)); #ifdef DEBUG_CUDA_MALLOC int nnz = 0; - size_t max_size = 0, tot_size = 0; + size_t max_size = 0; #endif size_t best_diff = 1ull << 36; int ibest = -1; @@ -6566,7 +6588,6 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { if (b.ptr != nullptr) { #ifdef DEBUG_CUDA_MALLOC ++nnz; - tot_size += b.size; if (b.size > max_size) max_size = b.size; #endif if (b.size >= size) { @@ -6593,19 +6614,20 @@ static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { b.size = 0; return ptr; } -#ifdef DEBUG_CUDA_MALLOC - fprintf(stderr, "%s: %d buffers, max_size = %u MB, tot_size = %u MB, requested %u MB\n", __func__, nnz, - (uint32_t)(max_size/1024/1024), (uint32_t)(tot_size/1024/1024), (uint32_t)(size/1024/1024)); -#endif void * ptr; size_t look_ahead_size = (size_t) (1.05 * size); look_ahead_size = 256 * ((look_ahead_size + 255)/256); CUDA_CHECK(cudaMalloc((void **) &ptr, look_ahead_size)); *actual_size = look_ahead_size; + g_cuda_pool_size[id] += look_ahead_size; +#ifdef DEBUG_CUDA_MALLOC + fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, + (uint32_t)(max_size/1024/1024), (uint32_t)(g_cuda_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); +#endif return ptr; } -static void ggml_cuda_pool_free(void * ptr, size_t size) { +static void ggml_cuda_pool_free_leg(void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); int id; CUDA_CHECK(cudaGetDevice(&id)); @@ -6620,8 +6642,152 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { } fprintf(stderr, "WARNING: cuda buffer pool full, increase MAX_CUDA_BUFFERS\n"); CUDA_CHECK(cudaFree(ptr)); + g_cuda_pool_size[id] -= size; } +#if !defined(GGML_USE_HIPBLAS) +// pool with virtual memory +static std::vector g_cuda_pool_handles[GGML_CUDA_MAX_DEVICES]; +static CUdeviceptr g_cuda_pool_addr[GGML_CUDA_MAX_DEVICES] = {0}; +static size_t g_cuda_pool_used[GGML_CUDA_MAX_DEVICES] = {0}; +static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 36; // 64 GB + +static void * ggml_cuda_pool_malloc_vmm(size_t size, size_t * actual_size) { + scoped_spin_lock lock(g_cuda_pool_lock); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + + // round up the allocation size to the alignment to ensure that all allocations are aligned for all data types + const size_t alignment = 128; + size = alignment * ((size + alignment - 1) / alignment); + + size_t avail = g_cuda_pool_size[id] - g_cuda_pool_used[id]; + + if (size > avail) { + // round up to the next multiple of the granularity + size_t reserve_size = size - avail; + const size_t granularity = g_device_caps[id].vmm_granularity; + reserve_size = granularity * ((reserve_size + granularity - 1) / granularity); + + GGML_ASSERT(g_cuda_pool_size[id] + reserve_size <= CUDA_POOL_VMM_MAX_SIZE); + + // allocate more physical memory + CUmemAllocationProp prop = {}; + prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; + prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; + prop.location.id = id; + CUmemGenericAllocationHandle handle; + CU_CHECK(cuMemCreate(&handle, reserve_size, &prop, 0)); + + // reserve virtual address space (if not already reserved) + if (g_cuda_pool_addr[id] == 0) { + CU_CHECK(cuMemAddressReserve(&g_cuda_pool_addr[id], CUDA_POOL_VMM_MAX_SIZE, 0, 0, 0)); + } + + // map at the end of the pool + CU_CHECK(cuMemMap(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, 0, handle, 0)); + + // set access + CUmemAccessDesc access = {}; + access.location.type = CU_MEM_LOCATION_TYPE_DEVICE; + access.location.id = id; + access.flags = CU_MEM_ACCESS_FLAGS_PROT_READWRITE; + CU_CHECK(cuMemSetAccess(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, &access, 1)); + + // add to the pool + g_cuda_pool_handles[id].push_back(handle); + g_cuda_pool_size[id] += reserve_size; + + //printf("cuda pool[%d]: size increased to %llu MB (reserved %llu MB)\n", + // id, (unsigned long long) (g_cuda_pool_size[id]/1024/1024), + // (unsigned long long) (reserve_size/1024/1024)); + } + + GGML_ASSERT(g_cuda_pool_addr[id] != 0); + + void * ptr = (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id]); + *actual_size = size; + g_cuda_pool_used[id] += size; + +#ifdef DEBUG_CUDA_MALLOC + printf("cuda pool[%d]: allocated %llu bytes at %llx [%s]\n", id, (unsigned long long) size, ptr); +#endif + + return ptr; +} + +static void ggml_cuda_pool_free_vmm(void * ptr, size_t size) { + scoped_spin_lock lock(g_cuda_pool_lock); + int id; + CUDA_CHECK(cudaGetDevice(&id)); + +#ifdef DEBUG_CUDA_MALLOC + printf("cuda pool[%d]: freed %llu bytes at %llx\n", id, (unsigned long long) size, ptr); +#endif + + g_cuda_pool_used[id] -= size; + + // all deallocations must be in reverse order of the allocations + GGML_ASSERT(ptr == (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id])); +} + +static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { + int id; + CUDA_CHECK(cudaGetDevice(&id)); + if (g_device_caps[id].vmm) { + return ggml_cuda_pool_malloc_vmm(size, actual_size); + } else { + return ggml_cuda_pool_malloc_leg(size, actual_size); + } +} + +static void ggml_cuda_pool_free(void * ptr, size_t size) { + int id; + CUDA_CHECK(cudaGetDevice(&id)); + if (g_device_caps[id].vmm) { + ggml_cuda_pool_free_vmm(ptr, size); + } else { + ggml_cuda_pool_free_leg(ptr, size); + } +} +#else +#define ggml_cuda_pool_malloc ggml_cuda_pool_malloc_leg +#define ggml_cuda_pool_free ggml_cuda_pool_free_leg +#endif // !defined(GGML_USE_HIPBLAS) + +template +struct cuda_pool_alloc { + T * ptr = nullptr; + size_t actual_size = 0; + + // size is in number of elements + T * alloc(size_t size) { + GGML_ASSERT(ptr == nullptr); + ptr = (T *) ggml_cuda_pool_malloc(size * sizeof(T), &this->actual_size); + return ptr; + } + + cuda_pool_alloc(size_t size) { + alloc(size); + } + + ~cuda_pool_alloc() { + if (ptr != nullptr) { + ggml_cuda_pool_free(ptr, actual_size); + } + } + + T * get() { + return ptr; + } + + cuda_pool_alloc() = default; + cuda_pool_alloc(const cuda_pool_alloc &) = delete; + cuda_pool_alloc(cuda_pool_alloc &&) = delete; + cuda_pool_alloc& operator=(const cuda_pool_alloc &) = delete; + cuda_pool_alloc& operator=(cuda_pool_alloc &&) = delete; +}; + static bool g_cublas_loaded = false; bool ggml_cublas_loaded(void) { @@ -6660,16 +6826,33 @@ void ggml_init_cublas() { #endif fprintf(stderr, "%s: found %d " GGML_CUDA_NAME " devices:\n", __func__, g_device_count); for (int id = 0; id < g_device_count; ++id) { + int device_vmm = 0; + +#if !defined(GGML_USE_HIPBLAS) + CUdevice device; + CU_CHECK(cuDeviceGet(&device, id)); + CU_CHECK(cuDeviceGetAttribute(&device_vmm, CU_DEVICE_ATTRIBUTE_VIRTUAL_MEMORY_MANAGEMENT_SUPPORTED, device)); + + if (device_vmm) { + CUmemAllocationProp alloc_prop = {}; + alloc_prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; + alloc_prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; + alloc_prop.location.id = id; + CU_CHECK(cuMemGetAllocationGranularity(&g_device_caps[id].vmm_granularity, &alloc_prop, CU_MEM_ALLOC_GRANULARITY_MINIMUM)); + } +#endif // !defined(GGML_USE_HIPBLAS) + g_device_caps[id].vmm = !!device_vmm; + cudaDeviceProp prop; CUDA_CHECK(cudaGetDeviceProperties(&prop, id)); - fprintf(stderr, " Device %d: %s, compute capability %d.%d\n", id, prop.name, prop.major, prop.minor); + fprintf(stderr, " Device %d: %s, compute capability %d.%d, VMM: %s\n", id, prop.name, prop.major, prop.minor, device_vmm ? "yes" : "no"); g_tensor_split[id] = total_vram; total_vram += prop.totalGlobalMem; #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - g_compute_capabilities[id] = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; + g_device_caps[id].cc = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; #else - g_compute_capabilities[id] = 100*prop.major + 10*prop.minor; + g_device_caps[id].cc = 100*prop.major + 10*prop.minor; #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } for (int id = 0; id < g_device_count; ++id) { @@ -7178,11 +7361,11 @@ static int64_t get_row_rounding(ggml_type type) { int64_t max_compute_capability = INT_MIN; for (int64_t id = 0; id < g_device_count; ++id) { if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - if (min_compute_capability > g_compute_capabilities[id]) { - min_compute_capability = g_compute_capabilities[id]; + if (min_compute_capability > g_device_caps[id].cc) { + min_compute_capability = g_device_caps[id].cc; } - if (max_compute_capability < g_compute_capabilities[id]) { - max_compute_capability = g_compute_capabilities[id]; + if (max_compute_capability < g_device_caps[id].cc) { + max_compute_capability = g_device_caps[id].cc; } } } @@ -7297,8 +7480,8 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics #ifdef GGML_CUDA_F16 - size_t ash; - dfloat * src1_dfloat = nullptr; // dfloat == half + cuda_pool_alloc src1_dfloat_a; + half * src1_dfloat = nullptr; // dfloat == half bool src1_convert_f16 = src0->type == GGML_TYPE_Q4_0 || src0->type == GGML_TYPE_Q4_1 || @@ -7306,7 +7489,7 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( src0->type == GGML_TYPE_Q8_0 || src0->type == GGML_TYPE_F16; if (src1_convert_f16) { - src1_dfloat = (half *) ggml_cuda_pool_malloc(ne00*sizeof(half), &ash); + src1_dfloat = src1_dfloat_a.alloc(ne00); ggml_cpy_f32_f16_cuda((const char *) src1_ddf_i, (char *) src1_dfloat, ne00, ne00, 1, sizeof(float), 0, 0, ne00, 1, sizeof(half), 0, 0, stream); @@ -7354,12 +7537,6 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( break; } -#ifdef GGML_CUDA_F16 - if (src1_convert_f16) { - ggml_cuda_pool_free(src1_dfloat, ash); - } -#endif // GGML_CUDA_F16 - (void) src1; (void) dst; (void) src1_ddq_i; @@ -7390,33 +7567,30 @@ inline void ggml_cuda_op_mul_mat_cublas( // ldc == nrows of the matrix that cuBLAS writes into int ldc = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; - const int compute_capability = g_compute_capabilities[id]; + const int compute_capability = g_device_caps[id].cc; if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 - half * src0_as_f16 = nullptr; - size_t src0_as = 0; + cuda_pool_alloc src0_as_f16; if (src0->type != GGML_TYPE_F16) { const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src0->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = row_diff*ne00; - src0_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src0_as); - to_fp16_cuda(src0_dd_i, src0_as_f16, ne, stream); + src0_as_f16.alloc(ne); + to_fp16_cuda(src0_dd_i, src0_as_f16.get(), ne, stream); } - const half * src0_ptr = src0->type == GGML_TYPE_F16 ? (const half *) src0_dd_i : src0_as_f16; + const half * src0_ptr = src0->type == GGML_TYPE_F16 ? (const half *) src0_dd_i : src0_as_f16.get(); - half * src1_as_f16 = nullptr; - size_t src1_as = 0; + cuda_pool_alloc src1_as_f16; if (src1->type != GGML_TYPE_F16) { const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); GGML_ASSERT(to_fp16_cuda != nullptr); size_t ne = src1_ncols*ne10; - src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &src1_as); - to_fp16_cuda(src1_ddf_i, src1_as_f16, ne, stream); + src1_as_f16.alloc(ne); + to_fp16_cuda(src1_ddf_i, src1_as_f16.get(), ne, stream); } - const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddf_i : src1_as_f16; - size_t dst_as = 0; - half * dst_f16 = (half *) ggml_cuda_pool_malloc(row_diff*src1_ncols * sizeof(half), &dst_as); + const half * src1_ptr = src1->type == GGML_TYPE_F16 ? (const half *) src1_ddf_i : src1_as_f16.get(); + cuda_pool_alloc dst_f16(row_diff*src1_ncols); const half alpha_f16 = 1.0f; const half beta_f16 = 0.0f; @@ -7425,36 +7599,25 @@ inline void ggml_cuda_op_mul_mat_cublas( CUBLAS_CHECK( cublasGemmEx(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, row_diff, src1_ncols, ne10, - &alpha_f16, src0_ptr, CUDA_R_16F, ne00, - src1_ptr, CUDA_R_16F, ne10, - &beta_f16, dst_f16, CUDA_R_16F, ldc, + &alpha_f16, src0_ptr, CUDA_R_16F, ne00, + src1_ptr, CUDA_R_16F, ne10, + &beta_f16, dst_f16.get(), CUDA_R_16F, ldc, CUBLAS_COMPUTE_16F, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16, dst_dd_i, row_diff*src1_ncols, stream); - - ggml_cuda_pool_free(dst_f16, dst_as); - - if (src0_as != 0) { - ggml_cuda_pool_free(src0_as_f16, src0_as); - } - - if (src1_as != 0) { - ggml_cuda_pool_free(src1_as_f16, src1_as); - } + to_fp32_cuda(dst_f16.get(), dst_dd_i, row_diff*src1_ncols, stream); } else { - float * src0_ddq_as_f32 = nullptr; - size_t src0_as = 0; + cuda_pool_alloc src0_ddq_as_f32; if (src0->type != GGML_TYPE_F32) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src0->type); GGML_ASSERT(to_fp32_cuda != nullptr); - src0_ddq_as_f32 = (float *) ggml_cuda_pool_malloc(row_diff*ne00 * sizeof(float), &src0_as); // NOLINT - to_fp32_cuda(src0_dd_i, src0_ddq_as_f32, row_diff*ne00, stream); + src0_ddq_as_f32.alloc(row_diff*ne00); + to_fp32_cuda(src0_dd_i, src0_ddq_as_f32.get(), row_diff*ne00, stream); } - const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32; + const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32.get(); const float alpha = 1.0f; const float beta = 0.0f; @@ -7466,10 +7629,6 @@ inline void ggml_cuda_op_mul_mat_cublas( &alpha, src0_ddf_i, ne00, src1_ddf_i, ne10, &beta, dst_dd_i, ldc)); - - if (src0_as != 0) { - ggml_cuda_pool_free(src0_ddq_as_f32, src0_as); - } } (void) dst; @@ -7761,18 +7920,17 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s float * src1_ddf = nullptr; float * dst_ddf = nullptr; - // as = actual size - size_t src0_asf = 0; - size_t src1_asf = 0; - size_t dst_asf = 0; + cuda_pool_alloc src0_f; + cuda_pool_alloc src1_f; + cuda_pool_alloc dst_f; ggml_cuda_set_device(g_main_device); - const cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; + cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; if (src0_on_device) { src0_ddf = (float *) src0_extra->data_device[g_main_device]; } else { - src0_ddf = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src0), &src0_asf); + src0_ddf = src0_f.alloc(ggml_nelements(src0)); CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_ddf, src0, 0, 0, 0, nrows0, main_stream)); } @@ -7780,14 +7938,14 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s if (src1_on_device) { src1_ddf = (float *) src1_extra->data_device[g_main_device]; } else { - src1_ddf = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src1), &src1_asf); + src1_ddf = src1_f.alloc(ggml_nelements(src1)); CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src1_ddf, src1, 0, 0, 0, nrows1, main_stream)); } } if (dst_on_device) { dst_ddf = (float *) dst_extra->data_device[g_main_device]; } else { - dst_ddf = (float *) ggml_cuda_pool_malloc(ggml_nbytes(dst), &dst_asf); + dst_ddf = dst_f.alloc(ggml_nelements(dst)); } // do the computation @@ -7799,16 +7957,6 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s CUDA_CHECK(cudaMemcpyAsync(dst->data, dst_ddf, ggml_nbytes(dst), cudaMemcpyDeviceToHost, main_stream)); } - if (src0_asf > 0) { - ggml_cuda_pool_free(src0_ddf, src0_asf); - } - if (src1_asf > 0) { - ggml_cuda_pool_free(src1_ddf, src1_asf); - } - if (dst_asf > 0) { - ggml_cuda_pool_free(dst_ddf, dst_asf); - } - if (dst->backend == GGML_BACKEND_CPU) { CUDA_CHECK(cudaDeviceSynchronize()); } @@ -8122,17 +8270,17 @@ static void ggml_cuda_op_mul_mat( CUDA_CHECK(ggml_cuda_set_device(id)); // free buffers again when done - if (src0_as[id] > 0) { - ggml_cuda_pool_free(src0_dd[id], src0_as[id]); - } - if (src1_asf[id] > 0) { - ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); + if (dst_as[id] > 0) { + ggml_cuda_pool_free(dst_dd[id], dst_as[id]); } if (src1_asq[id] > 0) { ggml_cuda_pool_free(src1_ddq[id], src1_asq[id]); } - if (dst_as[id] > 0) { - ggml_cuda_pool_free(dst_dd[id], dst_as[id]); + if (src1_asf[id] > 0) { + ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); + } + if (src0_as[id] > 0) { + ggml_cuda_pool_free(src0_dd[id], src0_as[id]); } } @@ -8385,14 +8533,11 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); GGML_ASSERT(to_fp16_cuda != nullptr); - size_t src1_as = 0; - half * src1_as_f16 = (half *) ggml_cuda_pool_malloc(ne1 * sizeof(half), &src1_as); - to_fp16_cuda(src1_ddf, src1_as_f16, ne1, main_stream); + cuda_pool_alloc src1_as_f16(ne1); + to_fp16_cuda(src1_ddf, src1_as_f16.get(), ne1, main_stream); - size_t dst_as = 0; - - half * dst_f16 = nullptr; - char * dst_t = nullptr; + cuda_pool_alloc dst_f16; + char * dst_t; cublasComputeType_t cu_compute_type = CUBLAS_COMPUTE_16F; cudaDataType_t cu_data_type = CUDA_R_16F; @@ -8411,8 +8556,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const void * beta = &beta_f16; if (dst->op_params[0] == GGML_PREC_DEFAULT) { - dst_f16 = (half *) ggml_cuda_pool_malloc(ne * sizeof(half), &dst_as); - dst_t = (char *) dst_f16; + dst_t = (char *) dst_f16.alloc(ne); nbd2 /= sizeof(float) / sizeof(half); nbd3 /= sizeof(float) / sizeof(half); @@ -8459,9 +8603,9 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA - (const char *) src1_as_f16, CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB - beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC + alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA + (const char *) src1_as_f16.get(), CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB + beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC ne12*ne13, cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); @@ -8469,19 +8613,13 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const // use cublasGemmBatchedEx const int ne23 = ne12*ne13; - const void ** ptrs_src = nullptr; - void ** ptrs_dst = nullptr; - - size_t ptrs_src_s = 0; - size_t ptrs_dst_s = 0; - - ptrs_src = (const void **) ggml_cuda_pool_malloc(2*ne23*sizeof(void *), &ptrs_src_s); - ptrs_dst = ( void **) ggml_cuda_pool_malloc(1*ne23*sizeof(void *), &ptrs_dst_s); + cuda_pool_alloc ptrs_src(2*ne23); + cuda_pool_alloc< void *> ptrs_dst(1*ne23); dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( - src0_as_f16, src1_as_f16, dst_t, - ptrs_src, ptrs_dst, + src0_as_f16, src1_as_f16.get(), dst_t, + ptrs_src.get(), ptrs_dst.get(), ne12, ne13, ne23, nb02, nb03, @@ -8493,30 +8631,19 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const void **) (ptrs_src + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_src + 1*ne23), CUDA_R_16F, nb11/sizeof(float), - beta, ( void **) (ptrs_dst + 0*ne23), cu_data_type, ne01, + alpha, (const void **) (ptrs_src.get() + 0*ne23), CUDA_R_16F, nb01/sizeof(half), + (const void **) (ptrs_src.get() + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + beta, ( void **) (ptrs_dst.get() + 0*ne23), cu_data_type, ne01, ne23, cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); - - if (ptrs_src_s != 0) { - ggml_cuda_pool_free(ptrs_src, ptrs_src_s); - } - if (ptrs_dst_s != 0) { - ggml_cuda_pool_free(ptrs_dst, ptrs_dst_s); - } } #endif if (dst->op_params[0] == GGML_PREC_DEFAULT) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16, dst_ddf, ne, main_stream); - - ggml_cuda_pool_free(dst_f16, dst_as); + to_fp32_cuda(dst_f16.get(), dst_ddf, ne, main_stream); } - - ggml_cuda_pool_free(src1_as_f16, src1_as); } static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -8529,8 +8656,8 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 int64_t min_compute_capability = INT_MAX; for (int64_t id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_compute_capabilities[id] && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - min_compute_capability = g_compute_capabilities[id]; + if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { + min_compute_capability = g_device_caps[id].cc; } } @@ -8843,12 +8970,11 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_cuda_mul_mat(src0_row, &src1_row, &dst_row); } } else { - size_t as_src1, as_dst; - char * src1_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(src1), &as_src1); - char * dst_contiguous = (char *) ggml_cuda_pool_malloc(sizeof(float)*ggml_nelements(dst), &as_dst); + cuda_pool_alloc src1_contiguous(sizeof(float)*ggml_nelements(src1)); + cuda_pool_alloc dst_contiguous(sizeof(float)*ggml_nelements(dst)); - src1_row_extra.data_device[g_main_device] = src1_contiguous; - dst_row_extra.data_device[g_main_device] = dst_contiguous; + src1_row_extra.data_device[g_main_device] = src1_contiguous.get(); + dst_row_extra.data_device[g_main_device] = dst_contiguous.get(); const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; @@ -8868,7 +8994,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); - CUDA_CHECK(cudaMemcpyAsync(src1_contiguous + num_src1_rows*nb11, src1_original + i01*nb11, + CUDA_CHECK(cudaMemcpyAsync(src1_contiguous.get() + num_src1_rows*nb11, src1_original + i01*nb11, nb11, src1_kind, stream)); num_src1_rows++; } @@ -8900,14 +9026,11 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s GGML_ASSERT(row_id >= 0 && row_id < n_as); - CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous + num_src1_rows*nb1, + CUDA_CHECK(cudaMemcpyAsync(dst_original + i01*nb1, dst_contiguous.get() + num_src1_rows*nb1, nb1, dst_kind, stream)); num_src1_rows++; } } - - ggml_cuda_pool_free(src1_contiguous, as_src1); - ggml_cuda_pool_free(dst_contiguous, as_dst); } if (dst->backend == GGML_BACKEND_CPU) { @@ -9678,8 +9801,10 @@ static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buff static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { void * ptr = ggml_cuda_host_malloc(size); + if (ptr == nullptr) { - return nullptr; + // fallback to cpu buffer + return ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); } // FIXME: this is a hack to avoid having to implement a new buffer type diff --git a/ggml.c b/ggml.c index 3656422d7..73600ab05 100644 --- a/ggml.c +++ b/ggml.c @@ -19351,7 +19351,7 @@ void gguf_set_kv(struct gguf_context * ctx, struct gguf_context * src) { data[j] = ((struct gguf_str *)src->kv[i].value.arr.data)[j].data; } gguf_set_arr_str(ctx, src->kv[i].key.data, data, src->kv[i].value.arr.n); - free(data); + free((void *)data); } else if (src->kv[i].value.arr.type == GGUF_TYPE_ARRAY) { GGML_ASSERT(false && "nested arrays not supported"); } else { diff --git a/ggml.h b/ggml.h index 338f355a4..67d6bc4f1 100644 --- a/ggml.h +++ b/ggml.h @@ -255,6 +255,8 @@ #define GGML_UNREACHABLE() GGML_ASSERT(!"statement should not be reached") #elif defined(__GNUC__) #define GGML_UNREACHABLE() __builtin_unreachable() +#elif defined(_MSC_VER) +#define GGML_UNREACHABLE() __assume(0) #else #define GGML_UNREACHABLE() ((void) 0) #endif diff --git a/llama.cpp b/llama.cpp index 5699a0fcf..a24621539 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1281,7 +1281,7 @@ struct llama_hparams { if (this->rope_finetuned != other.rope_finetuned) return true; if (this->n_yarn_orig_ctx != other.n_yarn_orig_ctx) return true; - const float EPSILON = 1e-9; + const float EPSILON = 1e-9f; if (!is_float_close(this->f_norm_eps, other.f_norm_eps, EPSILON)) return true; if (!is_float_close(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return true; @@ -10300,7 +10300,7 @@ int llama_token_to_piece(const struct llama_model * model, llama_token token, ch std::string result = model->vocab.id_to_token[token].text; llama_unescape_whitespace(result); if (length < (int) result.length()) { - return -result.length(); + return -(int) result.length(); } memcpy(buf, result.c_str(), result.length()); return result.length(); @@ -10330,7 +10330,7 @@ int llama_token_to_piece(const struct llama_model * model, llama_token token, ch std::string result = model->vocab.id_to_token[token].text; result = llama_decode_text(result); if (length < (int) result.length()) { - return -result.length(); + return -(int) result.length(); } memcpy(buf, result.c_str(), result.length()); return result.length(); diff --git a/tests/test-grad0.cpp b/tests/test-grad0.cpp index 14914def5..8ff76c891 100644 --- a/tests/test-grad0.cpp +++ b/tests/test-grad0.cpp @@ -883,9 +883,6 @@ int main(int argc, const char ** argv) { srand(seed); const int nargs = 1; - int64_t ne2[4]; - ne2[0] = 1; - for (int ndims = 1; ndims <= 2; ++ndims) { x[0] = get_random_tensor_f32(ctx0, ndims, ne, -1.0f, 1.0f); From 753be377b69bda2d65a7e089f2b7f0c53ef3495e Mon Sep 17 00:00:00 2001 From: Shintarou Okada Date: Sun, 24 Dec 2023 22:35:49 +0900 Subject: [PATCH 298/859] llama : add PLaMo model (#3557) * add plamo mock * add tensor loading * plamo convert * update norm * able to compile * fix norm_rms_eps hparam * runnable * use inp_pos * seems ok * update kqv code * remove develop code * update README * shuffle attn_q.weight and attn_output.weight for broadcasting * remove plamo_llm_build_kqv and use llm_build_kqv * fix style * update * llama : remove obsolete KQ_scale * plamo : fix tensor names for correct GPU offload --------- Co-authored-by: Georgi Gerganov --- README.md | 1 + convert-hf-to-gguf.py | 86 +++++++++++++++- gguf-py/gguf/constants.py | 17 ++++ gguf-py/gguf/tensor_mapping.py | 37 ++++--- llama.cpp | 181 +++++++++++++++++++++++++++++++++ 5 files changed, 307 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 649c3b333..09338d226 100644 --- a/README.md +++ b/README.md @@ -102,6 +102,7 @@ as the main playground for developing new features for the [ggml](https://github - [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) +- [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) **Multimodal models:** diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index e71a96c48..303d08170 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -184,6 +184,8 @@ class Model: return MixtralModel if model_architecture == "PhiForCausalLM": return Phi2Model + if model_architecture == "PlamoForCausalLM": + return PlamoModel return Model def _is_model_safetensors(self) -> bool: @@ -225,6 +227,8 @@ class Model: return gguf.MODEL_ARCH.LLAMA if arch == "PhiForCausalLM": return gguf.MODEL_ARCH.PHI2 + if arch == "PlamoForCausalLM": + return gguf.MODEL_ARCH.PLAMO raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1002,11 +1006,91 @@ class Phi2Model(Model): self.gguf_writer.add_add_bos_token(False) +class PlamoModel(Model): + def set_vocab(self): + self._set_vocab_sentencepiece() + + def set_gguf_parameters(self): + hparams = self.hparams + block_count = hparams["num_hidden_layers"] + + self.gguf_writer.add_name("PLaMo") + self.gguf_writer.add_context_length(4096) # not in config.json + self.gguf_writer.add_embedding_length(hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(hparams["num_attention_heads"]) + self.gguf_writer.add_head_count_kv(5) # hparams["num_key_value_heads"]) is wrong + self.gguf_writer.add_layer_norm_rms_eps(hparams["rms_norm_eps"]) + + def shuffle_attn_q_weight(self, data_torch): + assert data_torch.size() == (5120, 5120) + data_torch = data_torch.reshape(8, 5, 128, 5120) + data_torch = torch.permute(data_torch, (1, 0, 2, 3)) + data_torch = torch.reshape(data_torch, (5120, 5120)) + return data_torch + + def shuffle_attn_output_weight(self, data_torch): + assert data_torch.size() == (5120, 5120) + data_torch = data_torch.reshape(5120, 8, 5, 128) + data_torch = torch.permute(data_torch, (0, 2, 1, 3)) + data_torch = torch.reshape(data_torch, (5120, 5120)) + return data_torch + + def write_tensors(self): + block_count = self.hparams.get("num_layers", self.hparams.get("num_hidden_layers")) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + if "self_attn.rotary_emb.inv_freq" in name: + continue + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + # shuffle for broadcasting of gqa in ggml_mul_mat + if new_name.endswith("attn_q.weight"): + data_torch = self.shuffle_attn_q_weight(data_torch) + elif new_name.endswith("attn_output.weight"): + data_torch = self.shuffle_attn_output_weight(data_torch) + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Convert a huggingface model to a GGML compatible file") + parser = argparse.ArgumentParser( + description="Convert a huggingface model to a GGML compatible file") parser.add_argument( "--vocab-only", action="store_true", help="extract only the vocab", diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 390dca049..4cd87cdda 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -96,6 +96,7 @@ class MODEL_ARCH(IntEnum): STABLELM = auto() QWEN = auto() PHI2 = auto() + PLAMO = auto() class MODEL_TENSOR(IntEnum): @@ -142,6 +143,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.STABLELM: "stablelm", MODEL_ARCH.QWEN: "qwen", MODEL_ARCH.PHI2: "phi2", + MODEL_ARCH.PLAMO: "plamo", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -349,6 +351,21 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.PLAMO: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], MODEL_ARCH.GPT2: [ # TODO ], diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 6fcbdbc1c..446c6b688 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -79,6 +79,7 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi "transformer.h.{bid}.ln", # phi2 + "model.layers.layers.{bid}.norm", # plamo ), # Attention norm 2 @@ -99,26 +100,29 @@ class TensorNameMap: # Attention query MODEL_TENSOR.ATTN_Q: ( - "model.layers.{bid}.self_attn.q_proj", # llama-hf - "layers.{bid}.attention.wq", # llama-pth - "encoder.layer.{bid}.attention.self.query", # bert - "transformer.h.{bid}.attn.q_proj", # gpt-j + "model.layers.{bid}.self_attn.q_proj", # llama-hf + "layers.{bid}.attention.wq", # llama-pth + "encoder.layer.{bid}.attention.self.query", # bert + "transformer.h.{bid}.attn.q_proj", # gpt-j + "model.layers.layers.{bid}.self_attn.q_proj", # plamo ), # Attention key MODEL_TENSOR.ATTN_K: ( - "model.layers.{bid}.self_attn.k_proj", # llama-hf - "layers.{bid}.attention.wk", # llama-pth - "encoder.layer.{bid}.attention.self.key", # bert - "transformer.h.{bid}.attn.k_proj", # gpt-j + "model.layers.{bid}.self_attn.k_proj", # llama-hf + "layers.{bid}.attention.wk", # llama-pth + "encoder.layer.{bid}.attention.self.key", # bert + "transformer.h.{bid}.attn.k_proj", # gpt-j + "model.layers.layers.{bid}.self_attn.k_proj", # plamo ), # Attention value MODEL_TENSOR.ATTN_V: ( - "model.layers.{bid}.self_attn.v_proj", # llama-hf - "layers.{bid}.attention.wv", # llama-pth - "encoder.layer.{bid}.attention.self.value", # bert - "transformer.h.{bid}.attn.v_proj", # gpt-j + "model.layers.{bid}.self_attn.v_proj", # llama-hf + "layers.{bid}.attention.wv", # llama-pth + "encoder.layer.{bid}.attention.self.value", # bert + "transformer.h.{bid}.attn.v_proj", # gpt-j + "model.layers.layers.{bid}.self_attn.v_proj", # plamo ), # Attention output @@ -134,12 +138,14 @@ class TensorNameMap: "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon "transformer.h.{bid}.mixer.out_proj", # phi2 + "model.layers.layers.{bid}.self_attn.o_proj", # plamo ), # Rotary embeddings MODEL_TENSOR.ATTN_ROT_EMBD: ( - "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf - "layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth + "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf + "layers.{bid}.attention.inner_attention.rope.freqs", # llama-pth + "model.layers.layers.{bid}.self_attn.rotary_emb.inv_freq", # plamo ), # Feed-forward norm @@ -174,6 +180,7 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen "transformer.h.{bid}.mlp.fc1", # phi2 + "model.layers.layers.{bid}.mlp.up_proj", # plamo ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -186,6 +193,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.gate_proj", # llama-hf refact "layers.{bid}.feed_forward.w1", # llama-pth "transformer.h.{bid}.mlp.w2", # qwen + "model.layers.layers.{bid}.mlp.gate_proj", # plamo ), MODEL_TENSOR.FFN_GATE_EXP: ( @@ -206,6 +214,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon "transformer.h.{bid}.mlp.fc2", # phi2 + "model.layers.layers.{bid}.mlp.down_proj", # plamo ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index a24621539..0b99f1e03 100644 --- a/llama.cpp +++ b/llama.cpp @@ -198,6 +198,7 @@ enum llm_arch { LLM_ARCH_STABLELM, LLM_ARCH_QWEN, LLM_ARCH_PHI2, + LLM_ARCH_PLAMO, LLM_ARCH_UNKNOWN, }; @@ -216,6 +217,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_STABLELM, "stablelm" }, { LLM_ARCH_QWEN, "qwen" }, { LLM_ARCH_PHI2, "phi2" }, + { LLM_ARCH_PLAMO, "plamo" }, }; enum llm_kv { @@ -567,6 +569,24 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_PLAMO, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, @@ -2749,6 +2769,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_PLAMO: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + + switch (hparams.n_layer) { + case 40: model.type = e_model::MODEL_13B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3630,6 +3659,51 @@ static bool llm_load_tensors( layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); } } break; + case LLM_ARCH_PLAMO: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + backend_norm = llama_backend_offload; + backend_output = llama_backend_offload_split; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + + layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); + layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); + layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + + layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -5555,6 +5629,109 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_plamo() { + struct ggml_cgraph * gf = ggml_new_graph(ctx0); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + struct ggml_tensor * attention_norm = cur; + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Kcur, "Kcur", il); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, model, hparams, kv_self, + model.layers[il].wo, NULL, + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + struct ggml_tensor * sa_out = cur; + + cur = attention_norm; + + // feed-forward network + { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, sa_out); + cb(cur, "l_out", il); + + cur = ggml_add(ctx0, cur, inpL); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; // @@ -6065,6 +6242,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_phi2(); } break; + case LLM_ARCH_PLAMO: + { + result = llm.build_plamo(); + } break; default: GGML_ASSERT(false); } From b9f47952ffae4e0d3420905526003c23333f6c98 Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 24 Dec 2023 21:01:12 +0100 Subject: [PATCH 299/859] simplify bug issue template (#4623) --- .github/ISSUE_TEMPLATE/bug.md | 177 +--------------------------------- 1 file changed, 1 insertion(+), 176 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index c003fe7c1..ce69e6395 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -6,179 +6,4 @@ assignees: '' --- -# Prerequisites - -Please answer the following questions for yourself before submitting an issue. - -- [ ] I am running the latest code. Development is very rapid so there are no tagged versions as of now. -- [ ] I carefully followed the [README.md](https://github.com/ggerganov/llama.cpp/blob/master/README.md). -- [ ] I [searched using keywords relevant to my issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/filtering-and-searching-issues-and-pull-requests) to make sure that I am creating a new issue that is not already open (or closed). -- [ ] I reviewed the [Discussions](https://github.com/ggerganov/llama.cpp/discussions), and have a new bug or useful enhancement to share. - -# Expected Behavior - -Please provide a detailed written description of what you were trying to do, and what you expected `llama.cpp` to do. - -# Current Behavior - -Please provide a detailed written description of what `llama.cpp` did, instead. - -# Environment and Context - -Please provide detailed information about your computer setup. This is important in case the issue is not reproducible except for under certain specific conditions. - -* Physical (or virtual) hardware you are using, e.g. for Linux: - -`$ lscpu` - -* Operating System, e.g. for Linux: - -`$ uname -a` - -* SDK version, e.g. for Linux: - -``` -$ python3 --version -$ make --version -$ g++ --version -``` - -# Failure Information (for bugs) - -Please help provide information about the failure / bug. - -# Steps to Reproduce - -Please provide detailed steps for reproducing the issue. We are not sitting in front of your screen, so the more detail the better. - -1. step 1 -2. step 2 -3. step 3 -4. etc. - -# Failure Logs - -Please include any relevant log snippets or files. If it works under one configuration but not under another, please provide logs for both configurations and their corresponding outputs so it is easy to see where behavior changes. - -Also, please try to **avoid using screenshots** if at all possible. Instead, copy/paste the console output and use [Github's markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) to cleanly format your logs for easy readability. - -Example environment info: -``` -llama.cpp$ git log | head -1 -commit 2af23d30434a677c6416812eea52ccc0af65119c - -llama.cpp$ lscpu | egrep "AMD|Flags" -Vendor ID: AuthenticAMD -Model name: AMD Ryzen Threadripper 1950X 16-Core Processor -Flags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl nonstop_tsc cpuid extd_apicid amd_dcm aperfmperf rapl pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb hw_pstate ssbd ibpb vmmcall fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt sha_ni xsaveopt xsavec xgetbv1 xsaves clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif overflow_recov succor smca sme sev -Virtualization: AMD-V - -llama.cpp$ python3 --version -Python 3.10.9 - -llama.cpp$ pip list | egrep "torch|numpy|sentencepiece" -numpy 1.24.2 -numpydoc 1.5.0 -sentencepiece 0.1.97 -torch 1.13.1 -torchvision 0.14.1 - -llama.cpp$ make --version | head -1 -GNU Make 4.3 - -$ md5sum ./models/65B/ggml-model-q4_0.bin -dbdd682cce80e2d6e93cefc7449df487 ./models/65B/ggml-model-q4_0.bin -``` - -Example run with the Linux command [perf](https://www.brendangregg.com/perf.html) -``` -llama.cpp$ perf stat ./main -m ./models/65B/ggml-model-q4_0.bin -t 16 -n 1024 -p "Please close your issue when it has been answered." -main: seed = 1679149377 -llama_model_load: loading model from './models/65B/ggml-model-q4_0.bin' - please wait ... -llama_model_load: n_vocab = 32000 -llama_model_load: n_ctx = 512 -llama_model_load: n_embd = 8192 -llama_model_load: n_mult = 256 -llama_model_load: n_head = 64 -llama_model_load: n_layer = 80 -llama_model_load: n_rot = 128 -llama_model_load: f16 = 2 -llama_model_load: n_ff = 22016 -llama_model_load: n_parts = 8 -llama_model_load: ggml ctx size = 41477.73 MB -llama_model_load: memory_size = 2560.00 MB, n_mem = 40960 -llama_model_load: loading model part 1/8 from './models/65B/ggml-model-q4_0.bin' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 2/8 from './models/65B/ggml-model-q4_0.bin.1' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 3/8 from './models/65B/ggml-model-q4_0.bin.2' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 4/8 from './models/65B/ggml-model-q4_0.bin.3' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 5/8 from './models/65B/ggml-model-q4_0.bin.4' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 6/8 from './models/65B/ggml-model-q4_0.bin.5' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 7/8 from './models/65B/ggml-model-q4_0.bin.6' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 -llama_model_load: loading model part 8/8 from './models/65B/ggml-model-q4_0.bin.7' -llama_model_load: .......................................................................................... done -llama_model_load: model size = 4869.09 MB / num tensors = 723 - -system_info: n_threads = 16 / 32 | AVX = 1 | AVX2 = 1 | AVX512 = 0 | FMA = 1 | NEON = 0 | ARM_FMA = 0 | F16C = 1 | FP16_VA = 0 | WASM_SIMD = 0 | BLAS = 0 | SSE3 = 1 | VSX = 0 | - -main: prompt: 'Please close your issue when it has been answered.' -main: number of tokens in prompt = 11 - 1 -> '' - 12148 -> 'Please' - 3802 -> ' close' - 596 -> ' your' - 2228 -> ' issue' - 746 -> ' when' - 372 -> ' it' - 756 -> ' has' - 1063 -> ' been' - 7699 -> ' answered' - 29889 -> '.' - -sampling parameters: temp = 0.800000, top_k = 40, top_p = 0.950000, repeat_last_n = 64, repeat_penalty = 1.300000 - - -Please close your issue when it has been answered. -@duncan-donut: I'm trying to figure out what kind of "support" you need for this script and why, exactly? Is there a question about how the code works that hasn't already been addressed in one or more comments below this ticket, or are we talking something else entirely like some sorta bugfixing job because your server setup is different from mine?? -I can understand if your site needs to be running smoothly and you need help with a fix of sorts but there should really be nothing wrong here that the code itself could not handle. And given that I'm getting reports about how it works perfectly well on some other servers, what exactly are we talking? A detailed report will do wonders in helping us get this resolved for ya quickly so please take your time and describe the issue(s) you see as clearly & concisely as possible!! -@duncan-donut: I'm not sure if you have access to cPanel but you could try these instructions. It is worth a shot! Let me know how it goes (or what error message, exactly!) when/if ya give that code a go? [end of text] - - -main: mem per token = 71159620 bytes -main: load time = 19309.95 ms -main: sample time = 168.62 ms -main: predict time = 223895.61 ms / 888.47 ms per token -main: total time = 246406.42 ms - - Performance counter stats for './main -m ./models/65B/ggml-model-q4_0.bin -t 16 -n 1024 -p Please close your issue when it has been answered.': - - 3636882.89 msec task-clock # 14.677 CPUs utilized - 13509 context-switches # 3.714 /sec - 2436 cpu-migrations # 0.670 /sec - 10476679 page-faults # 2.881 K/sec - 13133115082869 cycles # 3.611 GHz (16.77%) - 29314462753 stalled-cycles-frontend # 0.22% frontend cycles idle (16.76%) - 10294402631459 stalled-cycles-backend # 78.39% backend cycles idle (16.74%) - 23479217109614 instructions # 1.79 insn per cycle - # 0.44 stalled cycles per insn (16.76%) - 2353072268027 branches # 647.002 M/sec (16.77%) - 1998682780 branch-misses # 0.08% of all branches (16.76%) - - 247.802177522 seconds time elapsed - - 3618.573072000 seconds user - 18.491698000 seconds sys -``` +Please include information about your system, the steps to reproduce the bug, and the version of llama.cpp that you are using. If possible, please provide a minimal code example that reproduces the bug. From a206137f927daef1752753cf5e281220b449a468 Mon Sep 17 00:00:00 2001 From: Paul Tsochantaris Date: Mon, 25 Dec 2023 16:09:53 +0000 Subject: [PATCH 300/859] Adding Emeltal reference to UI list (#4629) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 09338d226..3b202a336 100644 --- a/README.md +++ b/README.md @@ -133,6 +133,7 @@ as the main playground for developing new features for the [ggml](https://github - [withcatai/catai](https://github.com/withcatai/catai) - [semperai/amica](https://github.com/semperai/amica) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) +- [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) --- From 77465dad48d7c945c367ab46b6f2ea98ae9b7b15 Mon Sep 17 00:00:00 2001 From: FantasyGmm <16450052+FantasyGmm@users.noreply.github.com> Date: Tue, 26 Dec 2023 18:38:36 +0800 Subject: [PATCH 301/859] Fix new CUDA10 compilation errors (#4635) --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index ac3b3c14d..f32e83ab6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -102,6 +102,7 @@ #include #if CUDART_VERSION < 11020 +#define CU_DEVICE_ATTRIBUTE_VIRTUAL_MEMORY_MANAGEMENT_SUPPORTED CU_DEVICE_ATTRIBUTE_VIRTUAL_ADDRESS_MANAGEMENT_SUPPORTED #define CUBLAS_TF32_TENSOR_OP_MATH CUBLAS_TENSOR_OP_MATH #define CUBLAS_COMPUTE_16F CUDA_R_16F #define CUBLAS_COMPUTE_32F CUDA_R_32F From de8e496437c59e7d1cc84109e3e49a3478aee25a Mon Sep 17 00:00:00 2001 From: WillCorticesAI <150854901+WillCorticesAI@users.noreply.github.com> Date: Tue, 26 Dec 2023 05:42:08 -0500 Subject: [PATCH 302/859] Update comment for AdamW implementation reference. (#4604) Co-authored-by: Will Findley --- ggml.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ggml.c b/ggml.c index 73600ab05..d24560480 100644 --- a/ggml.c +++ b/ggml.c @@ -17456,9 +17456,9 @@ static void ggml_opt_acc_grad(int np, struct ggml_tensor * const ps[], float * g } // -// ADAM +// Using AdamW - ref: https://arxiv.org/pdf/1711.05101v3.pdf // -// ref: https://arxiv.org/pdf/1412.6980.pdf +// (Original Adam - ref: https://arxiv.org/pdf/1412.6980.pdf) // static enum ggml_opt_result ggml_opt_adam( From dc68f0054cd279cddddb0cae0c9ef4f9cbaa512a Mon Sep 17 00:00:00 2001 From: slaren Date: Tue, 26 Dec 2023 21:23:59 +0100 Subject: [PATCH 303/859] cuda : fix vmm pool with multi GPU (#4620) * cuda : fix vmm pool with multi GPU * hip * use recommended granularity instead of minimum * better error checking * fix mixtral * use cudaMemcpy3DPeerAsync * use cuda_pool_alloc in ggml_cuda_op_mul_mat * consolidate error checking in ggml_cuda_set_device * remove unnecessary inlines ggml-ci * style fixes * only use vmm for the main device * fix scratch buffer size, re-enable vmm pool for all devices * remove unnecessary check id != g_main_device --- ggml-cuda.cu | 483 +++++++++++++++++++++++++-------------------------- ggml.c | 3 - llama.cpp | 3 +- 3 files changed, 243 insertions(+), 246 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index f32e83ab6..abad9cc39 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -68,8 +68,9 @@ #define cudaMallocHost(ptr, size) hipHostMalloc(ptr, size, hipHostMallocDefault) #endif #define cudaMemcpy hipMemcpy -#define cudaMemcpy2DAsync hipMemcpy2DAsync #define cudaMemcpyAsync hipMemcpyAsync +#define cudaMemcpyPeerAsync hipMemcpyPeerAsync +#define cudaMemcpy2DAsync hipMemcpy2DAsync #define cudaMemcpyDeviceToDevice hipMemcpyDeviceToDevice #define cudaMemcpyDeviceToHost hipMemcpyDeviceToHost #define cudaMemcpyHostToDevice hipMemcpyHostToDevice @@ -163,7 +164,7 @@ static __device__ __forceinline__ int __vsubss4(const int a, const int b) { const int8x4_t vb = reinterpret_cast(b); #if __has_builtin(__builtin_elementwise_sub_sat) const int8x4_t c = __builtin_elementwise_sub_sat(va, vb); - return reinterpret_cast(c); + return reinterpret_cast(c); #else int8x4_t c; int16_t tmp; @@ -174,7 +175,7 @@ static __device__ __forceinline__ int __vsubss4(const int a, const int b) { if(tmp < std::numeric_limits::min()) tmp = std::numeric_limits::min(); c[i] = tmp; } - return reinterpret_cast(c); + return reinterpret_cast(c); #endif // __has_builtin(__builtin_elementwise_sub_sat) } @@ -212,6 +213,28 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); +[[noreturn]] +static void ggml_cuda_error(const char * stmt, const char * func, const char * file, const int line, const char * msg) { + int id = -1; // in case cudaGetDevice fails + cudaGetDevice(&id); + + fprintf(stderr, "CUDA error: %s\n", msg); + fprintf(stderr, " current device: %d, in function %s at %s:%d\n", id, func, file, line); + fprintf(stderr, " %s\n", stmt); + // abort with GGML_ASSERT to get a stack trace + GGML_ASSERT(!"CUDA error"); +} + +#define CUDA_CHECK_GEN(err, success, error_fn) \ + do { \ + auto err_ = (err); \ + if (err_ != (success)) { \ + ggml_cuda_error(#err, __func__, __FILE__, __LINE__, error_fn(err_)); \ + } \ + } while (0) + +#define CUDA_CHECK(err) CUDA_CHECK_GEN(err, cudaSuccess, cudaGetErrorString) + #if CUDART_VERSION >= 12000 static const char * cublas_get_error_str(const cublasStatus_t err) { return cublasGetStatusString(err); @@ -233,15 +256,7 @@ static_assert(sizeof(half) == sizeof(ggml_fp16_t), "wrong fp16 size"); } #endif // CUDART_VERSION >= 12000 -[[noreturn]] -static void ggml_cuda_error(const char * stmt, const char * func, const char * file, const int line, const char * msg) { - fprintf(stderr, "CUDA error: %s: %s\n", stmt, msg); - fprintf(stderr, " in function %s at %s:%d\n", func, file, line); - GGML_ASSERT(!"CUDA error"); -} - -#define CUDA_CHECK(err) do { auto err_ = (err); if (err_ != cudaSuccess) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cudaGetErrorString(err_)); } while (0) -#define CUBLAS_CHECK(err) do { auto err_ = (err); if (err_ != CUBLAS_STATUS_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cublas_get_error_str(err_)); } while (0) +#define CUBLAS_CHECK(err) CUDA_CHECK_GEN(err, CUBLAS_STATUS_SUCCESS, cublas_get_error_str) #if !defined(GGML_USE_HIPBLAS) static const char * cu_get_error_str(CUresult err) { @@ -249,7 +264,7 @@ static const char * cu_get_error_str(CUresult err) { cuGetErrorString(err, &err_str); return err_str; } -#define CU_CHECK(err) do { auto err_ = (err); if (err_ != CUDA_SUCCESS) ggml_cuda_error(#err, __func__, __FILE__, __LINE__, cu_get_error_str(err_)); } while (0) +#define CU_CHECK(err) CUDA_CHECK_GEN(err, CUDA_SUCCESS, cu_get_error_str) #endif #if CUDART_VERSION >= 11100 @@ -306,10 +321,10 @@ typedef void (*ggml_cuda_func_t)(const ggml_tensor * src0, const ggml_tensor * s typedef void (*ggml_cuda_op_mul_mat_t)( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream); + const int64_t src1_padded_row_size, cudaStream_t stream); typedef void (*ggml_cuda_op_flatten_t)( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream); + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream); // QK = number of values after dequantization // QR = QK / number of values before dequantization @@ -515,15 +530,15 @@ struct ggml_tensor_extra_gpu { // this is faster on Windows // probably because the Windows CUDA libraries forget to make this check before invoking the drivers -inline cudaError_t ggml_cuda_set_device(const int device) { +static void ggml_cuda_set_device(const int device) { int current_device; CUDA_CHECK(cudaGetDevice(¤t_device)); if (device == current_device) { - return cudaSuccess; + return; } - return cudaSetDevice(device); + CUDA_CHECK(cudaSetDevice(device)); } static int g_device_count = -1; @@ -538,7 +553,6 @@ struct cuda_device_capabilities { static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, false, 0} }; - static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default static size_t g_scratch_offset = 0; @@ -580,6 +594,7 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { static __device__ __forceinline__ float op_repeat(const float a, const float b) { return b; + GGML_UNUSED(a); } static __device__ __forceinline__ float op_add(const float a, const float b) { @@ -701,7 +716,7 @@ static __global__ void silu_f32(const float * x, float * dst, const int k) { dst[i] = x[i] / (1.0f + expf(-x[i])); } -static __global__ void gelu_quick_f32(const float *x, float *dst, int k) { +static __global__ void gelu_quick_f32(const float * x, float * dst, int k) { const float GELU_QUICK_COEF = -1.702f; const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { @@ -710,7 +725,7 @@ static __global__ void gelu_quick_f32(const float *x, float *dst, int k) { dst[i] = x[i] * (1.0f / (1.0f + expf(GELU_QUICK_COEF * x[i]))); } -static __global__ void tanh_f32(const float *x, float *dst, int k) { +static __global__ void tanh_f32(const float * x, float * dst, int k) { const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { return; @@ -727,7 +742,7 @@ static __global__ void relu_f32(const float * x, float * dst, const int k) { dst[i] = fmaxf(x[i], 0); } -static __global__ void leaky_relu_f32(const float *x, float *dst, const int k, const float negative_slope) { +static __global__ void leaky_relu_f32(const float * x, float * dst, const int k, const float negative_slope) { const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { return; @@ -780,7 +795,7 @@ static __global__ void norm_f32(const float * x, float * dst, const int ncols, c } } -static __global__ void concat_f32(const float *x,const float *y, float *dst, const int ne0, const int ne02) { +static __global__ void concat_f32(const float * x,const float * y, float * dst, const int ne0, const int ne02) { int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { return; @@ -805,7 +820,7 @@ static __global__ void concat_f32(const float *x,const float *y, float *dst, c } } -static __global__ void upscale_f32(const float *x, float *dst, const int ne00, const int nb02, const int scale_factor) { +static __global__ void upscale_f32(const float * x, float * dst, const int ne00, const int nb02, const int scale_factor) { int ne0 = ne00 * scale_factor; int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { @@ -825,7 +840,7 @@ static __global__ void upscale_f32(const float *x, float *dst, const int ne00, dst[offset_dst] = x[offset_src]; } -static __global__ void pad_f32(const float *x, float *dst, const int ne0, const int ne00, const int ne01, const int ne02) { +static __global__ void pad_f32(const float * x, float * dst, const int ne0, const int ne00, const int ne01, const int ne02) { int nidx = threadIdx.x + blockIdx.x * blockDim.x; if (nidx >= ne0) { return; @@ -4727,7 +4742,6 @@ static __global__ void mul_mat_p021_f16_f32( const int row_y = col_x; - // y is not transposed but permuted const int iy = channel*nrows_y + row_y; @@ -5402,7 +5416,7 @@ struct bin_bcast_cuda { cne[3] = 1; }; - auto collapse_nb = [](size_t cnb[], int64_t cne[]) { + auto collapse_nb = [](size_t cnb[], const int64_t cne[]) { cnb[1] *= cne[1]; cnb[2] *= cne[2]; cnb[3] *= cne[3]; @@ -6566,18 +6580,16 @@ struct scoped_spin_lock { static std::atomic_flag g_cuda_pool_lock = ATOMIC_FLAG_INIT; // #define DEBUG_CUDA_MALLOC -struct cuda_buffer { +struct ggml_cuda_buffer { void * ptr = nullptr; size_t size = 0; }; -static cuda_buffer g_cuda_buffer_pool[GGML_CUDA_MAX_DEVICES][MAX_CUDA_BUFFERS]; +static ggml_cuda_buffer g_cuda_buffer_pool[GGML_CUDA_MAX_DEVICES][MAX_CUDA_BUFFERS]; static size_t g_cuda_pool_size[GGML_CUDA_MAX_DEVICES] = {0}; -static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { +static void * ggml_cuda_pool_malloc_leg(int device, size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); #ifdef DEBUG_CUDA_MALLOC int nnz = 0; size_t max_size = 0; @@ -6585,7 +6597,7 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { size_t best_diff = 1ull << 36; int ibest = -1; for (int i = 0; i < MAX_CUDA_BUFFERS; ++i) { - cuda_buffer& b = g_cuda_buffer_pool[id][i]; + ggml_cuda_buffer& b = g_cuda_buffer_pool[device][i]; if (b.ptr != nullptr) { #ifdef DEBUG_CUDA_MALLOC ++nnz; @@ -6608,7 +6620,7 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { } } if (ibest >= 0) { - cuda_buffer& b = g_cuda_buffer_pool[id][ibest]; + ggml_cuda_buffer& b = g_cuda_buffer_pool[device][ibest]; void * ptr = b.ptr; *actual_size = b.size; b.ptr = nullptr; @@ -6618,9 +6630,10 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { void * ptr; size_t look_ahead_size = (size_t) (1.05 * size); look_ahead_size = 256 * ((look_ahead_size + 255)/256); + ggml_cuda_set_device(device); CUDA_CHECK(cudaMalloc((void **) &ptr, look_ahead_size)); *actual_size = look_ahead_size; - g_cuda_pool_size[id] += look_ahead_size; + g_cuda_pool_size[device] += look_ahead_size; #ifdef DEBUG_CUDA_MALLOC fprintf(stderr, "%s[%d]: %d buffers, max_size = %u MB, pool_size = %u MB, requested %u MB\n", __func__, id, nnz, (uint32_t)(max_size/1024/1024), (uint32_t)(g_cuda_pool_size[id]/1024/1024), (uint32_t)(size/1024/1024)); @@ -6628,13 +6641,11 @@ static void * ggml_cuda_pool_malloc_leg(size_t size, size_t * actual_size) { return ptr; } -static void ggml_cuda_pool_free_leg(void * ptr, size_t size) { +static void ggml_cuda_pool_free_leg(int device, void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); for (int i = 0; i < MAX_CUDA_BUFFERS; ++i) { - cuda_buffer& b = g_cuda_buffer_pool[id][i]; + ggml_cuda_buffer& b = g_cuda_buffer_pool[device][i]; if (b.ptr == nullptr) { b.ptr = ptr; b.size = size; @@ -6642,73 +6653,73 @@ static void ggml_cuda_pool_free_leg(void * ptr, size_t size) { } } fprintf(stderr, "WARNING: cuda buffer pool full, increase MAX_CUDA_BUFFERS\n"); + ggml_cuda_set_device(device); CUDA_CHECK(cudaFree(ptr)); - g_cuda_pool_size[id] -= size; + g_cuda_pool_size[device] -= size; } #if !defined(GGML_USE_HIPBLAS) // pool with virtual memory -static std::vector g_cuda_pool_handles[GGML_CUDA_MAX_DEVICES]; static CUdeviceptr g_cuda_pool_addr[GGML_CUDA_MAX_DEVICES] = {0}; static size_t g_cuda_pool_used[GGML_CUDA_MAX_DEVICES] = {0}; static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 36; // 64 GB -static void * ggml_cuda_pool_malloc_vmm(size_t size, size_t * actual_size) { +static void * ggml_cuda_pool_malloc_vmm(int device, size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); // round up the allocation size to the alignment to ensure that all allocations are aligned for all data types const size_t alignment = 128; size = alignment * ((size + alignment - 1) / alignment); - size_t avail = g_cuda_pool_size[id] - g_cuda_pool_used[id]; + size_t avail = g_cuda_pool_size[device] - g_cuda_pool_used[device]; if (size > avail) { // round up to the next multiple of the granularity size_t reserve_size = size - avail; - const size_t granularity = g_device_caps[id].vmm_granularity; + const size_t granularity = g_device_caps[device].vmm_granularity; reserve_size = granularity * ((reserve_size + granularity - 1) / granularity); - GGML_ASSERT(g_cuda_pool_size[id] + reserve_size <= CUDA_POOL_VMM_MAX_SIZE); + GGML_ASSERT(g_cuda_pool_size[device] + reserve_size <= CUDA_POOL_VMM_MAX_SIZE); // allocate more physical memory CUmemAllocationProp prop = {}; prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; - prop.location.id = id; + prop.location.id = device; CUmemGenericAllocationHandle handle; CU_CHECK(cuMemCreate(&handle, reserve_size, &prop, 0)); // reserve virtual address space (if not already reserved) - if (g_cuda_pool_addr[id] == 0) { - CU_CHECK(cuMemAddressReserve(&g_cuda_pool_addr[id], CUDA_POOL_VMM_MAX_SIZE, 0, 0, 0)); + if (g_cuda_pool_addr[device] == 0) { + CU_CHECK(cuMemAddressReserve(&g_cuda_pool_addr[device], CUDA_POOL_VMM_MAX_SIZE, 0, 0, 0)); } // map at the end of the pool - CU_CHECK(cuMemMap(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, 0, handle, 0)); + CU_CHECK(cuMemMap(g_cuda_pool_addr[device] + g_cuda_pool_size[device], reserve_size, 0, handle, 0)); + + // the memory allocation handle is no longer needed after mapping + CU_CHECK(cuMemRelease(handle)); // set access CUmemAccessDesc access = {}; access.location.type = CU_MEM_LOCATION_TYPE_DEVICE; - access.location.id = id; + access.location.id = device; access.flags = CU_MEM_ACCESS_FLAGS_PROT_READWRITE; - CU_CHECK(cuMemSetAccess(g_cuda_pool_addr[id] + g_cuda_pool_size[id], reserve_size, &access, 1)); + CU_CHECK(cuMemSetAccess(g_cuda_pool_addr[device] + g_cuda_pool_size[device], reserve_size, &access, 1)); // add to the pool - g_cuda_pool_handles[id].push_back(handle); - g_cuda_pool_size[id] += reserve_size; + g_cuda_pool_size[device] += reserve_size; //printf("cuda pool[%d]: size increased to %llu MB (reserved %llu MB)\n", // id, (unsigned long long) (g_cuda_pool_size[id]/1024/1024), // (unsigned long long) (reserve_size/1024/1024)); } - GGML_ASSERT(g_cuda_pool_addr[id] != 0); + GGML_ASSERT(g_cuda_pool_addr[device] != 0); - void * ptr = (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id]); + void * ptr = (void *) (g_cuda_pool_addr[device] + g_cuda_pool_used[device]); *actual_size = size; - g_cuda_pool_used[id] += size; + g_cuda_pool_used[device] += size; #ifdef DEBUG_CUDA_MALLOC printf("cuda pool[%d]: allocated %llu bytes at %llx [%s]\n", id, (unsigned long long) size, ptr); @@ -6717,38 +6728,32 @@ static void * ggml_cuda_pool_malloc_vmm(size_t size, size_t * actual_size) { return ptr; } -static void ggml_cuda_pool_free_vmm(void * ptr, size_t size) { +static void ggml_cuda_pool_free_vmm(int device, void * ptr, size_t size) { scoped_spin_lock lock(g_cuda_pool_lock); - int id; - CUDA_CHECK(cudaGetDevice(&id)); #ifdef DEBUG_CUDA_MALLOC printf("cuda pool[%d]: freed %llu bytes at %llx\n", id, (unsigned long long) size, ptr); #endif - g_cuda_pool_used[id] -= size; + g_cuda_pool_used[device] -= size; // all deallocations must be in reverse order of the allocations - GGML_ASSERT(ptr == (void *) (g_cuda_pool_addr[id] + g_cuda_pool_used[id])); + GGML_ASSERT(ptr == (void *) (g_cuda_pool_addr[device] + g_cuda_pool_used[device])); } -static void * ggml_cuda_pool_malloc(size_t size, size_t * actual_size) { - int id; - CUDA_CHECK(cudaGetDevice(&id)); - if (g_device_caps[id].vmm) { - return ggml_cuda_pool_malloc_vmm(size, actual_size); +static void * ggml_cuda_pool_malloc(int device, size_t size, size_t * actual_size) { + if (g_device_caps[device].vmm) { + return ggml_cuda_pool_malloc_vmm(device, size, actual_size); } else { - return ggml_cuda_pool_malloc_leg(size, actual_size); + return ggml_cuda_pool_malloc_leg(device, size, actual_size); } } -static void ggml_cuda_pool_free(void * ptr, size_t size) { - int id; - CUDA_CHECK(cudaGetDevice(&id)); - if (g_device_caps[id].vmm) { - ggml_cuda_pool_free_vmm(ptr, size); +static void ggml_cuda_pool_free(int device, void * ptr, size_t size) { + if (g_device_caps[device].vmm) { + ggml_cuda_pool_free_vmm(device, ptr, size); } else { - ggml_cuda_pool_free_leg(ptr, size); + ggml_cuda_pool_free_leg(device, ptr, size); } } #else @@ -6758,13 +6763,15 @@ static void ggml_cuda_pool_free(void * ptr, size_t size) { template struct cuda_pool_alloc { + int device = -1; T * ptr = nullptr; size_t actual_size = 0; // size is in number of elements T * alloc(size_t size) { GGML_ASSERT(ptr == nullptr); - ptr = (T *) ggml_cuda_pool_malloc(size * sizeof(T), &this->actual_size); + CUDA_CHECK(cudaGetDevice(&device)); + ptr = (T *) ggml_cuda_pool_malloc(device, size * sizeof(T), &this->actual_size); return ptr; } @@ -6774,7 +6781,7 @@ struct cuda_pool_alloc { ~cuda_pool_alloc() { if (ptr != nullptr) { - ggml_cuda_pool_free(ptr, actual_size); + ggml_cuda_pool_free(device, ptr, actual_size); } } @@ -6839,7 +6846,7 @@ void ggml_init_cublas() { alloc_prop.type = CU_MEM_ALLOCATION_TYPE_PINNED; alloc_prop.location.type = CU_MEM_LOCATION_TYPE_DEVICE; alloc_prop.location.id = id; - CU_CHECK(cuMemGetAllocationGranularity(&g_device_caps[id].vmm_granularity, &alloc_prop, CU_MEM_ALLOC_GRANULARITY_MINIMUM)); + CU_CHECK(cuMemGetAllocationGranularity(&g_device_caps[id].vmm_granularity, &alloc_prop, CU_MEM_ALLOC_GRANULARITY_RECOMMENDED)); } #endif // !defined(GGML_USE_HIPBLAS) g_device_caps[id].vmm = !!device_vmm; @@ -6861,7 +6868,7 @@ void ggml_init_cublas() { } for (int id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); + ggml_cuda_set_device(id); // create cuda streams for (int is = 0; is < MAX_STREAMS; ++is) { @@ -6976,7 +6983,7 @@ static cudaError_t ggml_cuda_cpy_tensor_2d( static void ggml_cuda_op_get_rows( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_d, const float * src1_d, float * dst_d, const cudaStream_t & stream) { + const float * src0_d, const float * src1_d, float * dst_d, cudaStream_t stream) { GGML_ASSERT(src1->type == GGML_TYPE_I32); GGML_ASSERT(dst->type == GGML_TYPE_F32); @@ -7018,9 +7025,9 @@ static void ggml_cuda_op_get_rows( } template -inline void ggml_cuda_op_bin_bcast( +static void ggml_cuda_op_bin_bcast( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7039,7 +7046,7 @@ inline void ggml_cuda_op_bin_bcast( static void ggml_cuda_op_repeat( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_d, const float * src1_d, float * dst_d, const cudaStream_t & main_stream) { + const float * src0_d, const float * src1_d, float * dst_d, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(dst, src0, dst, nullptr, src0_d, dst_d, main_stream); @@ -7047,16 +7054,16 @@ static void ggml_cuda_op_repeat( (void) src1_d; } -inline void ggml_cuda_op_add( +static void ggml_cuda_op_add( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } -inline void ggml_cuda_op_acc( +static void ggml_cuda_op_acc( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7073,23 +7080,23 @@ inline void ggml_cuda_op_acc( (void) dst; } -inline void ggml_cuda_op_mul( +static void ggml_cuda_op_mul( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } -inline void ggml_cuda_op_div( +static void ggml_cuda_op_div( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { ggml_cuda_op_bin_bcast>(src0, src1, dst, src0_dd, src1_dd, dst_dd, main_stream); } -inline void ggml_cuda_op_gelu( +static void ggml_cuda_op_gelu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7101,9 +7108,9 @@ inline void ggml_cuda_op_gelu( (void) src1_dd; } -inline void ggml_cuda_op_silu( +static void ggml_cuda_op_silu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7115,9 +7122,9 @@ inline void ggml_cuda_op_silu( (void) src1_dd; } -inline void ggml_cuda_op_gelu_quick( +static void ggml_cuda_op_gelu_quick( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7129,9 +7136,9 @@ inline void ggml_cuda_op_gelu_quick( (void) src1_dd; } -inline void ggml_cuda_op_tanh( +static void ggml_cuda_op_tanh( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7143,9 +7150,9 @@ inline void ggml_cuda_op_tanh( (void) src1_dd; } -inline void ggml_cuda_op_relu( +static void ggml_cuda_op_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7157,9 +7164,9 @@ inline void ggml_cuda_op_relu( (void) src1_dd; } -inline void ggml_cuda_op_leaky_relu( +static void ggml_cuda_op_leaky_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7174,9 +7181,9 @@ inline void ggml_cuda_op_leaky_relu( (void) src1_dd; } -inline void ggml_cuda_op_sqr( +static void ggml_cuda_op_sqr( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7188,9 +7195,9 @@ inline void ggml_cuda_op_sqr( (void) src1_dd; } -inline void ggml_cuda_op_norm( +static void ggml_cuda_op_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7208,10 +7215,9 @@ inline void ggml_cuda_op_norm( (void) src1_dd; } - -inline void ggml_cuda_op_group_norm( +static void ggml_cuda_op_group_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7225,9 +7231,9 @@ inline void ggml_cuda_op_group_norm( (void) src1_dd; } -inline void ggml_cuda_op_concat( +static void ggml_cuda_op_concat( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7241,9 +7247,9 @@ inline void ggml_cuda_op_concat( (void) dst; } -inline void ggml_cuda_op_upscale( +static void ggml_cuda_op_upscale( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(dst->type == GGML_TYPE_F32); @@ -7258,9 +7264,9 @@ inline void ggml_cuda_op_upscale( (void) src1_dd; } -inline void ggml_cuda_op_pad( +static void ggml_cuda_op_pad( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(dst->type == GGML_TYPE_F32); @@ -7275,9 +7281,9 @@ inline void ggml_cuda_op_pad( (void) src1_dd; } -inline void ggml_cuda_op_rms_norm( +static void ggml_cuda_op_rms_norm( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7295,10 +7301,10 @@ inline void ggml_cuda_op_rms_norm( (void) src1_dd; } -inline void ggml_cuda_op_mul_mat_q( +static void ggml_cuda_op_mul_mat_q( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { const int64_t ne00 = src0->ne[0]; @@ -7360,7 +7366,7 @@ inline void ggml_cuda_op_mul_mat_q( static int64_t get_row_rounding(ggml_type type) { int64_t min_compute_capability = INT_MAX; int64_t max_compute_capability = INT_MIN; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { if (min_compute_capability > g_device_caps[id].cc) { min_compute_capability = g_device_caps[id].cc; @@ -7418,10 +7424,10 @@ static int64_t get_row_rounding(ggml_type type) { #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } -inline void ggml_cuda_op_mul_mat_vec_q( +static void ggml_cuda_op_mul_mat_vec_q( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { GGML_ASSERT(ggml_nrows(src1) == 1); @@ -7471,10 +7477,10 @@ inline void ggml_cuda_op_mul_mat_vec_q( (void) src1_padded_row_size; } -inline void ggml_cuda_op_dequantize_mul_mat_vec( +static void ggml_cuda_op_dequantize_mul_mat_vec( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; @@ -7545,10 +7551,10 @@ inline void ggml_cuda_op_dequantize_mul_mat_vec( (void) src1_padded_row_size; } -inline void ggml_cuda_op_mul_mat_cublas( +static void ggml_cuda_op_mul_mat_cublas( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, - const int64_t src1_padded_row_size, const cudaStream_t & stream) { + const int64_t src1_padded_row_size, cudaStream_t stream) { GGML_ASSERT(src0_dd_i != nullptr); GGML_ASSERT(src1_ddf_i != nullptr); @@ -7637,9 +7643,9 @@ inline void ggml_cuda_op_mul_mat_cublas( (void) src1_padded_row_size; } -inline void ggml_cuda_op_rope( +static void ggml_cuda_op_rope( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16); GGML_ASSERT( dst->type == GGML_TYPE_F32 || dst->type == GGML_TYPE_F16); @@ -7717,9 +7723,9 @@ inline void ggml_cuda_op_rope( (void) src1_dd; } -inline void ggml_cuda_op_alibi( +static void ggml_cuda_op_alibi( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7748,9 +7754,9 @@ inline void ggml_cuda_op_alibi( (void) src1_dd; } -inline void ggml_cuda_op_im2col( +static void ggml_cuda_op_im2col( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -7783,10 +7789,9 @@ inline void ggml_cuda_op_im2col( (void) src0_dd; } - -inline void ggml_cuda_op_sum_rows( +static void ggml_cuda_op_sum_rows( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7801,9 +7806,9 @@ inline void ggml_cuda_op_sum_rows( (void) src1_dd; } -inline void ggml_cuda_op_argsort( +static void ggml_cuda_op_argsort( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_I32); @@ -7820,9 +7825,9 @@ inline void ggml_cuda_op_argsort( (void) src1_dd; } -inline void ggml_cuda_op_diag_mask_inf( +static void ggml_cuda_op_diag_mask_inf( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7840,9 +7845,9 @@ inline void ggml_cuda_op_diag_mask_inf( (void) src1_dd; } -inline void ggml_cuda_op_soft_max( +static void ggml_cuda_op_soft_max( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7861,9 +7866,9 @@ inline void ggml_cuda_op_soft_max( (void) dst; } -inline void ggml_cuda_op_scale( +static void ggml_cuda_op_scale( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7879,9 +7884,9 @@ inline void ggml_cuda_op_scale( (void) src1_dd; } -inline void ggml_cuda_op_clamp( +static void ggml_cuda_op_clamp( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, - const float * src0_dd, const float * src1_dd, float * dst_dd, const cudaStream_t & main_stream) { + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -7974,12 +7979,12 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { #ifdef NDEBUG for (int id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); + ggml_cuda_set_device(id); CUDA_CHECK(cudaDeviceSynchronize()); } for (int id = 0; id < g_device_count; ++id) { - CUDA_CHECK(ggml_cuda_set_device(id)); + ggml_cuda_set_device(id); for (int id_other = 0; id_other < g_device_count; ++id_other) { if (id == id_other) { @@ -8013,7 +8018,6 @@ static void ggml_cuda_op_mul_mat( const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; const int64_t ne03 = src0->ne[3]; - const int64_t nrows0 = ggml_nrows(src0); const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; @@ -8056,27 +8060,29 @@ static void ggml_cuda_op_mul_mat( GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); - // dd = data device - char * src0_dd[GGML_CUDA_MAX_DEVICES] = {nullptr}; - float * src1_ddf[GGML_CUDA_MAX_DEVICES] = {nullptr}; // float - char * src1_ddq[GGML_CUDA_MAX_DEVICES] = {nullptr}; // q8_1 - float * dst_dd[GGML_CUDA_MAX_DEVICES] = {nullptr}; + struct dev_data { + cuda_pool_alloc src0_dd_alloc; + cuda_pool_alloc src1_ddf_alloc; + cuda_pool_alloc src1_ddq_alloc; + cuda_pool_alloc dst_dd_alloc; - // as = actual size - size_t src0_as[GGML_CUDA_MAX_DEVICES] = {0}; - size_t src1_asf[GGML_CUDA_MAX_DEVICES] = {0}; - size_t src1_asq[GGML_CUDA_MAX_DEVICES] = {0}; - size_t dst_as[GGML_CUDA_MAX_DEVICES] = {0}; + char * src0_dd = nullptr; + float * src1_ddf = nullptr; // float + char * src1_ddq = nullptr; // q8_1 + float * dst_dd = nullptr; - int64_t row_low[GGML_CUDA_MAX_DEVICES]; - int64_t row_high[GGML_CUDA_MAX_DEVICES]; + int64_t row_low; + int64_t row_high; + }; + + dev_data dev[GGML_CUDA_MAX_DEVICES]; int used_devices = 0; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { // by default, use all rows - row_low[id] = 0; - row_high[id] = ne01; + dev[id].row_low = 0; + dev[id].row_high = ne01; // for multi GPU, get the row boundaries from tensor split // and round to mul_mat_q tile sizes @@ -8084,23 +8090,23 @@ static void ggml_cuda_op_mul_mat( const int64_t rounding = get_row_rounding(src0->type); if (id != 0) { - row_low[id] = ne01*g_tensor_split[id]; - if (row_low[id] < ne01) { - row_low[id] -= row_low[id] % rounding; + dev[id].row_low = ne01*g_tensor_split[id]; + if (dev[id].row_low < ne01) { + dev[id].row_low -= dev[id].row_low % rounding; } } if (id != g_device_count - 1) { - row_high[id] = ne01*g_tensor_split[id + 1]; - if (row_high[id] < ne01) { - row_high[id] -= row_high[id] % rounding; + dev[id].row_high = ne01*g_tensor_split[id + 1]; + if (dev[id].row_high < ne01) { + dev[id].row_high -= dev[id].row_high % rounding; } } } } - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { + for (int id = 0; id < g_device_count; ++id) { + if ((!split && id != g_main_device) || dev[id].row_low == dev[id].row_high) { continue; } @@ -8110,42 +8116,41 @@ static void ggml_cuda_op_mul_mat( const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; ggml_cuda_set_device(id); - const cudaStream_t stream = g_cudaStreams[id][0]; + cudaStream_t stream = g_cudaStreams[id][0]; if (src0_on_device && src0_is_contiguous) { - src0_dd[id] = (char *) src0_extra->data_device[id]; + dev[id].src0_dd = (char *) src0_extra->data_device[id]; } else { - // const size_t size_src0_ddq = split ? (row_high[id]-row_low[id])*ne00 * src0_ts/src0_bs : ggml_nbytes(src0); - src0_dd[id] = (char *) ggml_cuda_pool_malloc(ggml_nbytes(src0), &src0_as[id]); + dev[id].src0_dd = dev[id].src0_dd_alloc.alloc(ggml_nbytes(src0)); } if (src1_on_device && src1_is_contiguous) { - src1_ddf[id] = (float *) src1_extra->data_device[id]; + dev[id].src1_ddf = (float *) src1_extra->data_device[id]; } else { - src1_ddf[id] = (float *) ggml_cuda_pool_malloc(ggml_nbytes(src1), &src1_asf[id]); + dev[id].src1_ddf = dev[id].src1_ddf_alloc.alloc(ggml_nelements(src1)); } if (convert_src1_to_q8_1) { - src1_ddq[id] = (char *) ggml_cuda_pool_malloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs, &src1_asq[id]); + dev[id].src1_ddq = dev[id].src1_ddq_alloc.alloc(nrows1*src1_padded_col_size*q8_1_ts/q8_1_bs); if (src1_on_device && src1_is_contiguous) { - quantize_row_q8_1_cuda(src1_ddf[id], src1_ddq[id], ne10, nrows1, src1_padded_col_size, stream); + quantize_row_q8_1_cuda(dev[id].src1_ddf, dev[id].src1_ddq, ne10, nrows1, src1_padded_col_size, stream); CUDA_CHECK(cudaGetLastError()); } } if (dst_on_device) { - dst_dd[id] = (float *) dst_extra->data_device[id]; + dev[id].dst_dd = (float *) dst_extra->data_device[id]; } else { - const size_t size_dst_ddf = split ? (row_high[id]-row_low[id])*ne1*sizeof(float) : ggml_nbytes(dst); - dst_dd[id] = (float *) ggml_cuda_pool_malloc(size_dst_ddf, &dst_as[id]); + const size_t size_dst_ddf = split ? (dev[id].row_high - dev[id].row_low)*ne1 : ggml_nelements(dst); + dev[id].dst_dd = dev[id].dst_dd_alloc.alloc(size_dst_ddf); } } // if multiple devices are used they need to wait for the main device // here an event is recorded that signals that the main device has finished calculating the input data if (split && used_devices > 1) { - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaEventRecord(src0_extra->events[g_main_device][0], g_cudaStreams[g_main_device][0])); } @@ -8154,17 +8159,17 @@ static void ggml_cuda_op_mul_mat( const int64_t is = split ? (src1_col_0/src1_col_stride) % MAX_STREAMS : 0; const int64_t src1_ncols = src1_col_0 + src1_col_stride > ne11 ? ne11 - src1_col_0 : src1_col_stride; - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { + for (int id = 0; id < g_device_count; ++id) { + if ((!split && id != g_main_device) || dev[id].row_low == dev[id].row_high) { continue; } const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; - const int64_t row_diff = row_high[id] - row_low[id]; + const int64_t row_diff = dev[id].row_high - dev[id].row_low; ggml_cuda_set_device(id); - const cudaStream_t stream = g_cudaStreams[id][is]; + cudaStream_t stream = g_cudaStreams[id][is]; // wait for main GPU data if necessary if (split && (id != g_main_device || is != 0)) { @@ -8178,34 +8183,34 @@ static void ggml_cuda_op_mul_mat( const size_t src1_ddq_i_offset = (i0*ne11 + src1_col_0) * src1_padded_col_size*q8_1_ts/q8_1_bs; // for split tensors the data begins at i0 == i0_offset_low - char * src0_dd_i = src0_dd[id] + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; - float * src1_ddf_i = src1_ddf[id] + (i0*ne11 + src1_col_0) * ne10; - char * src1_ddq_i = src1_ddq[id] + src1_ddq_i_offset; - float * dst_dd_i = dst_dd[id] + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); + char * src0_dd_i = dev[id].src0_dd + (i0/i02_divisor) * (ne01*ne00*src0_ts)/src0_bs; + float * src1_ddf_i = dev[id].src1_ddf + (i0*ne11 + src1_col_0) * ne10; + char * src1_ddq_i = dev[id].src1_ddq + src1_ddq_i_offset; + float * dst_dd_i = dev[id].dst_dd + (i0*ne1 + src1_col_0) * (dst_on_device ? ne0 : row_diff); // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed if (dst->backend == GGML_BACKEND_GPU && id == g_main_device) { - dst_dd_i += row_low[id]; // offset is 0 if no tensor split + dst_dd_i += dev[id].row_low; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary if (src1->backend == GGML_BACKEND_GPU && src1_is_contiguous) { if (id != g_main_device) { if (convert_src1_to_q8_1) { - char * src1_ddq_i_source = src1_ddq[g_main_device] + src1_ddq_i_offset; - CUDA_CHECK(cudaMemcpyAsync(src1_ddq_i, src1_ddq_i_source, src1_ncols*src1_padded_col_size*q8_1_ts/q8_1_bs, - cudaMemcpyDeviceToDevice, stream)); + char * src1_ddq_i_source = dev[g_main_device].src1_ddq + src1_ddq_i_offset; + CUDA_CHECK(cudaMemcpyPeerAsync(src1_ddq_i, id, src1_ddq_i_source, g_main_device, + src1_ncols*src1_padded_col_size*q8_1_ts/q8_1_bs, stream)); } else { float * src1_ddf_i_source = (float *) src1_extra->data_device[g_main_device]; src1_ddf_i_source += (i0*ne11 + src1_col_0) * ne10; - CUDA_CHECK(cudaMemcpyAsync(src1_ddf_i, src1_ddf_i_source, src1_ncols*ne10*sizeof(float), - cudaMemcpyDeviceToDevice, stream)); + CUDA_CHECK(cudaMemcpyPeerAsync(src1_ddf_i, id, src1_ddf_i_source, g_main_device, + src1_ncols*ne10*sizeof(float), stream)); } } } else if (src1->backend == GGML_BACKEND_CPU || (src1_on_device && !src1_is_contiguous)) { CUDA_CHECK(ggml_cuda_cpy_tensor_2d( - src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); + src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); } else { GGML_ASSERT(false); } @@ -8216,12 +8221,12 @@ static void ggml_cuda_op_mul_mat( } if (src1_col_0 == 0 && (!src0_on_device || !src0_is_contiguous) && i02 % i02_divisor == 0) { - CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_dd_i, src0, i03, i02/i02_divisor, row_low[id], row_high[id], stream)); + CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src0_dd_i, src0, i03, i02/i02_divisor, dev[id].row_low, dev[id].row_high, stream)); } // do the computation op(src0, src1, dst, src0_dd_i, src1_ddf_i, src1_ddq_i, dst_dd_i, - row_low[id], row_high[id], src1_ncols, src1_padded_col_size, stream); + dev[id].row_low, dev[id].row_high, src1_ncols, src1_padded_col_size, stream); CUDA_CHECK(cudaGetLastError()); // copy dst to host or other device if necessary @@ -8245,9 +8250,25 @@ static void ggml_cuda_op_mul_mat( // If dst is a vector with ne0 == 1 then you don't have to do this but it still produces correct results. float * dhf_dst_i = (float *) ((char *) dst_off_device + i02*nb2 + i03*nb3); GGML_ASSERT(dst->nb[1] == ne0*sizeof(float)); - dhf_dst_i += src1_col_0*ne0 + row_low[id]; - CUDA_CHECK(cudaMemcpy2DAsync(dhf_dst_i, ne0*sizeof(float), dst_dd_i, row_diff*sizeof(float), - row_diff*sizeof(float), src1_ncols, kind, stream)); + dhf_dst_i += src1_col_0*ne0 + dev[id].row_low; +#if !defined(GGML_USE_HIPBLAS) + if (kind == cudaMemcpyDeviceToDevice) { + // cudaMemcpy2DAsync may fail with copies between vmm pools of different devices + cudaMemcpy3DPeerParms p = {}; + p.dstDevice = g_main_device; + p.dstPtr = make_cudaPitchedPtr(dhf_dst_i, ne0*sizeof(float), row_diff, src1_ncols); + p.srcDevice = id; + p.srcPtr = make_cudaPitchedPtr(dst_dd_i, row_diff*sizeof(float), row_diff, src1_ncols); + p.extent = make_cudaExtent(row_diff*sizeof(float), src1_ncols, 1); + CUDA_CHECK(cudaMemcpy3DPeerAsync(&p, stream)); + } else +#endif + { + CUDA_CHECK(cudaMemcpy2DAsync(dhf_dst_i, ne0*sizeof(float), + dst_dd_i, row_diff*sizeof(float), + row_diff*sizeof(float), src1_ncols, + kind, stream)); + } } else { float * dhf_dst_i = (float *) ((char *) dst_off_device + i02*nb2 + i03*nb3); GGML_ASSERT(dst->nb[1] == ne0*sizeof(float)); @@ -8264,35 +8285,14 @@ static void ggml_cuda_op_mul_mat( } } - for (int64_t id = 0; id < g_device_count; ++id) { - if ((!split && id != g_main_device) || row_low[id] == row_high[id]) { - continue; - } - CUDA_CHECK(ggml_cuda_set_device(id)); - - // free buffers again when done - if (dst_as[id] > 0) { - ggml_cuda_pool_free(dst_dd[id], dst_as[id]); - } - if (src1_asq[id] > 0) { - ggml_cuda_pool_free(src1_ddq[id], src1_asq[id]); - } - if (src1_asf[id] > 0) { - ggml_cuda_pool_free(src1_ddf[id], src1_asf[id]); - } - if (src0_as[id] > 0) { - ggml_cuda_pool_free(src0_dd[id], src0_as[id]); - } - } - // main device waits for all other devices to be finished if (split && g_device_count > 1) { int64_t is_max = (ne11 + MUL_MAT_SRC1_COL_STRIDE - 1) / MUL_MAT_SRC1_COL_STRIDE; is_max = is_max <= MAX_STREAMS ? is_max : MAX_STREAMS; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); - for (int64_t id = 0; id < g_device_count; ++id) { - if (row_low[id] == row_high[id]) { + ggml_cuda_set_device(g_main_device); + for (int id = 0; id < g_device_count; ++id) { + if (dev[id].row_low == dev[id].row_high) { continue; } for (int64_t is = 0; is < is_max; ++is) { @@ -8302,7 +8302,7 @@ static void ggml_cuda_op_mul_mat( } if (dst->backend == GGML_BACKEND_CPU) { - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaDeviceSynchronize()); } } @@ -8412,7 +8412,7 @@ static void ggml_cuda_mul_mat_vec_p021(const ggml_tensor * src0, const ggml_tens const int64_t ne12 = src1->ne[2]; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; @@ -8444,7 +8444,7 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor const int64_t ne12 = src1->ne[2]; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; @@ -8515,7 +8515,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; CUBLAS_CHECK(cublasSetStream(g_cublas_handles[g_main_device], main_stream)); @@ -8656,7 +8656,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { min_compute_capability = g_device_caps[id].cc; } @@ -8799,7 +8799,7 @@ static void ggml_cuda_mul_mat_id_cublas(ggml_tensor * dst) { const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; CUBLAS_CHECK(cublasSetStream(g_cublas_handles[g_main_device], main_stream)); @@ -8917,7 +8917,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s std::vector ids_host(ggml_nbytes(ids)); - const cudaStream_t stream = g_cudaStreams[g_main_device][0]; + cudaStream_t stream = g_cudaStreams[g_main_device][0]; if (ids->backend == GGML_BACKEND_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; @@ -9073,7 +9073,7 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg const int64_t nb11 = src1->nb[1]; const int64_t nb12 = src1->nb[2]; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; const ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; @@ -9163,7 +9163,7 @@ void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { ggml_tensor_extra_gpu * extra = new struct ggml_tensor_extra_gpu; memset(extra, 0, sizeof(*extra)); - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { if (backend == GGML_BACKEND_GPU && id != g_main_device) { continue; } @@ -9234,15 +9234,14 @@ void ggml_cuda_free_data(struct ggml_tensor * tensor) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - for (int64_t id = 0; id < g_device_count; ++id) { + for (int id = 0; id < g_device_count; ++id) { + ggml_cuda_set_device(id); if (extra->data_device[id] != nullptr) { - CUDA_CHECK(ggml_cuda_set_device(id)); CUDA_CHECK(cudaFree(extra->data_device[id])); } for (int64_t is = 0; is < MAX_STREAMS; ++is) { if (extra->events[id][is] != nullptr) { - CUDA_CHECK(ggml_cuda_set_device(id)); CUDA_CHECK(cudaEventDestroy(extra->events[id][is])); } } @@ -9296,7 +9295,7 @@ static void ggml_cuda_assign_buffers_impl(struct ggml_tensor * tensor, bool scra force_inplace; const size_t size = ggml_nbytes(tensor); - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; @@ -9373,7 +9372,7 @@ void ggml_cuda_copy_to_device(struct ggml_tensor * tensor) { GGML_ASSERT(ggml_is_contiguous(tensor)); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - CUDA_CHECK(ggml_cuda_set_device(g_main_device)); + ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaMemcpy(extra->data_device[g_main_device], tensor->data, ggml_nbytes(tensor), cudaMemcpyHostToDevice)); } diff --git a/ggml.c b/ggml.c index d24560480..ed56e60a8 100644 --- a/ggml.c +++ b/ggml.c @@ -4041,7 +4041,6 @@ static struct ggml_tensor * ggml_group_norm_impl( result->op = GGML_OP_GROUP_NORM; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = NULL; // TODO: maybe store epsilon here? return result; } @@ -5541,7 +5540,6 @@ static struct ggml_tensor * ggml_upscale_impl( result->op_params[0] = scale_factor; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = NULL; return result; } @@ -5846,7 +5844,6 @@ struct ggml_tensor * ggml_get_rel_pos( result->op = GGML_OP_GET_REL_POS; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - result->src[1] = NULL; return result; } diff --git a/llama.cpp b/llama.cpp index 0b99f1e03..4aa59c4c0 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9519,7 +9519,8 @@ struct llama_context * llama_new_context_with_model( ctx->alloc = ggml_allocr_new_from_buffer(ctx->buf_alloc); #if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) if (model->n_gpu_layers > 0) { - ggml_cuda_set_scratch_size(alloc_size); + // the CPU buffer adds this padding in case the malloc buffer is not aligned, so we need to do the same for the GPU buffer, since we use the same offsets + ggml_cuda_set_scratch_size(alloc_size + 64); LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); // calculate total VRAM usage From f56d6077d0c37e6606ac0a4fa3169de70593acfe Mon Sep 17 00:00:00 2001 From: wonjun Jang Date: Wed, 27 Dec 2023 17:37:25 +0900 Subject: [PATCH 304/859] Add byte token type when tokenizer.model is not exists (#4641) * Add byte token type to hf format * remove unused variable --- convert.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/convert.py b/convert.py index 7a3cd615e..1f0c4f2f4 100755 --- a/convert.py +++ b/convert.py @@ -357,6 +357,7 @@ class VocabLoader: for tok in self.tokenizer.all_special_tokens } self.special_ids: set[int] = set(self.tokenizer.all_special_ids) + self.reverse_vocab = {id: encoded_tok for encoded_tok, id in self.tokenizer.get_vocab().items()} self.vocab_size_base: int = self.tokenizer.vocab_size self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_dict) self.fname_tokenizer: Path = fname_tokenizer @@ -370,15 +371,13 @@ class VocabLoader: self.spm = None def hf_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - tokenizer = self.tokenizer - reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.get_vocab().items()} added_tokens_ids = set(self.added_tokens_dict.values()) for i in range(self.vocab_size_base): if i in added_tokens_ids: continue - text = reverse_vocab[i].encode("utf-8") + text = self.reverse_vocab[i].encode("utf-8") yield text, self.get_token_score(i), self.get_token_type(i) def get_token_type(self, token_id: int) -> gguf.TokenType: @@ -394,10 +393,13 @@ class VocabLoader: if self.spm.is_byte(token_id): toktype = gguf.TokenType.BYTE else: + token = self.reverse_vocab[token_id] if token_id == self.unk_token_id: toktype = gguf.TokenType.UNKNOWN - if token_id in self.special_ids: + elif token_id in self.special_ids: toktype = gguf.TokenType.CONTROL + elif len(token) == 6 and token.startswith("<0x") and token.endswith(">"): + toktype = gguf.TokenType.BYTE return toktype From 951010fa53a0ffe81b7d2e87c4349e0d3cb3d19d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 27 Dec 2023 11:02:13 +0200 Subject: [PATCH 305/859] ggml : fix dot product for ARM (#4630) ggml-ci --- ggml-quants.c | 363 +++----------------------------------------------- 1 file changed, 22 insertions(+), 341 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index a15a24048..05ef8f9b7 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -407,6 +407,18 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { #define ggml_vld1q_s8_x4 vld1q_s8_x4 #endif + +#if !defined(__ARM_FEATURE_DOTPROD) + +inline static int32x4_t vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { + const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); + const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); + + return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); +} + +#endif + #endif #if defined(__ARM_NEON) || defined(__wasm_simd128__) @@ -2468,32 +2480,12 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) // dot product into int32x4_t const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0ls), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0ls), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hs), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hs), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1ls), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1ls), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hs), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hs), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -2776,32 +2768,12 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) // dot product into int32x4_t const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*y1->d); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0l), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0l), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0h), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0h), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1l), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1l), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1h), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1h), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs; @@ -2963,32 +2935,12 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3275,32 +3227,12 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1l = vld1q_s8(y1->qs); const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); -#else - const int16x8_t pl0l = vmull_s8(vget_low_s8 (v0_0lf), vget_low_s8 (v1_0l)); - const int16x8_t pl0h = vmull_s8(vget_high_s8(v0_0lf), vget_high_s8(v1_0l)); - const int16x8_t ph0l = vmull_s8(vget_low_s8 (v0_0hf), vget_low_s8 (v1_0h)); - const int16x8_t ph0h = vmull_s8(vget_high_s8(v0_0hf), vget_high_s8(v1_0h)); - - const int16x8_t pl1l = vmull_s8(vget_low_s8 (v0_1lf), vget_low_s8 (v1_1l)); - const int16x8_t pl1h = vmull_s8(vget_high_s8(v0_1lf), vget_high_s8(v1_1l)); - const int16x8_t ph1l = vmull_s8(vget_low_s8 (v0_1hf), vget_low_s8 (v1_1h)); - const int16x8_t ph1h = vmull_s8(vget_high_s8(v0_1hf), vget_high_s8(v1_1h)); - - const int32x4_t pl0 = vaddq_s32(vpaddlq_s16(pl0l), vpaddlq_s16(pl0h)); - const int32x4_t ph0 = vaddq_s32(vpaddlq_s16(ph0l), vpaddlq_s16(ph0h)); - const int32x4_t pl1 = vaddq_s32(vpaddlq_s16(pl1l), vpaddlq_s16(pl1h)); - const int32x4_t ph1 = vaddq_s32(vpaddlq_s16(ph1l), vpaddlq_s16(ph1h)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(pl0, ph0)), GGML_FP16_TO_FP32(x0->d)*y0->d); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(pl1, ph1)), GGML_FP16_TO_FP32(x1->d)*y1->d); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; @@ -3550,7 +3482,6 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t y1_0 = vld1q_s8(y1->qs); const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); -#if defined(__ARM_FEATURE_DOTPROD) sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); @@ -3558,26 +3489,6 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); - -#else - const int16x8_t p0_0 = vmull_s8(vget_low_s8 (x0_0), vget_low_s8 (y0_0)); - const int16x8_t p0_1 = vmull_s8(vget_high_s8(x0_0), vget_high_s8(y0_0)); - const int16x8_t p0_2 = vmull_s8(vget_low_s8 (x0_1), vget_low_s8 (y0_1)); - const int16x8_t p0_3 = vmull_s8(vget_high_s8(x0_1), vget_high_s8(y0_1)); - - const int16x8_t p1_0 = vmull_s8(vget_low_s8 (x1_0), vget_low_s8 (y1_0)); - const int16x8_t p1_1 = vmull_s8(vget_high_s8(x1_0), vget_high_s8(y1_0)); - const int16x8_t p1_2 = vmull_s8(vget_low_s8 (x1_1), vget_low_s8 (y1_1)); - const int16x8_t p1_3 = vmull_s8(vget_high_s8(x1_1), vget_high_s8(y1_1)); - - const int32x4_t p0 = vaddq_s32(vpaddlq_s16(p0_0), vpaddlq_s16(p0_1)); - const int32x4_t p1 = vaddq_s32(vpaddlq_s16(p0_2), vpaddlq_s16(p0_3)); - const int32x4_t p2 = vaddq_s32(vpaddlq_s16(p1_0), vpaddlq_s16(p1_1)); - const int32x4_t p3 = vaddq_s32(vpaddlq_s16(p1_2), vpaddlq_s16(p1_3)); - - sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32(p0, p1)), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); - sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32(p2, p3)), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); -#endif } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3650,12 +3561,10 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m3 = vdupq_n_u8(0x3); const uint8x16_t m4 = vdupq_n_u8(0xF); -#if defined(__ARM_FEATURE_DOTPROD) - const int32x4_t vzero = vdupq_n_s32(0); -#endif + + const int32x4_t vzero = vdupq_n_s32(0); ggml_int8x16x2_t q2bytes; uint8_t aux[16]; @@ -3663,7 +3572,6 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri float sum = 0; for (int i = 0; i < nb; ++i) { - const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); @@ -3689,20 +3597,9 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri // We use this macro instead of a function call because for some reason // the code runs 2-3% slower, even if the function is declared inline -#if defined(__ARM_FEATURE_DOTPROD) #define MULTIPLY_ACCUM_WITH_SCALE(index)\ isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; -#else -#define MULTIPLY_ACCUM_WITH_SCALE(index)\ - {\ - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[0]), vget_low_s8 (q8bytes.val[0])),\ - vmull_s8(vget_high_s8(q2bytes.val[0]), vget_high_s8(q8bytes.val[0])));\ - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[1]), vget_low_s8 (q8bytes.val[1])),\ - vmull_s8(vget_high_s8(q2bytes.val[1]), vget_high_s8(q8bytes.val[1])));\ - isum += vaddvq_s16(p1) * aux[is+(index)] + vaddvq_s16(p2) * aux[is+1+(index)];\ - } -#endif #define SHIFT_MULTIPLY_ACCUM_WITH_SCALE(shift, index)\ q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32;\ @@ -3710,26 +3607,23 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits.val[1], (shift)), m3));\ MULTIPLY_ACCUM_WITH_SCALE((index)); - for (int j = 0; j < QK_K/128; ++j) { - const ggml_uint8x16x2_t q2bits = ggml_vld1q_u8_x2(q2); q2 += 32; ggml_int8x16x2_t q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q2bytes.val[0] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[0], m3)); q2bytes.val[1] = vreinterpretq_s8_u8(vandq_u8(q2bits.val[1], m3)); + MULTIPLY_ACCUM_WITH_SCALE(0); SHIFT_MULTIPLY_ACCUM_WITH_SCALE(2, 2); - SHIFT_MULTIPLY_ACCUM_WITH_SCALE(4, 4); - SHIFT_MULTIPLY_ACCUM_WITH_SCALE(6, 6); is += 8; } - sum += d * isum; + sum += d * isum; } *s = sum; @@ -4043,11 +3937,9 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m3 = vdupq_n_u8(0x3); -#if defined(__ARM_FEATURE_DOTPROD) - const int32x4_t vzero = vdupq_n_s32(0); -#endif + + const int32x4_t vzero = vdupq_n_s32(0); ggml_int8x16x4_t q2bytes; @@ -4081,28 +3973,12 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri q2bytes.val[2] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 4), m3)); q2bytes.val[3] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 6), m3)); -#if defined(__ARM_FEATURE_DOTPROD) isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * scales[0]; isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * scales[1]; isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[2], q8bytes.val[2])) * scales[2]; isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[3], q8bytes.val[3])) * scales[3]; -#else - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q2bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q2bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum1 += vaddvq_s16(p1) * scales[0]; - isum2 += vaddvq_s16(p2) * scales[1]; - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q2bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p4 = vaddq_s16(vmull_s8(vget_low_s8 (q2bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q2bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum1 += vaddvq_s16(p3) * scales[2]; - isum2 += vaddvq_s16(p4) * scales[3]; -#endif sum += d * (isum1 + isum2); - } *s = sum; @@ -4328,9 +4204,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri uint32_t utmp[4]; const uint8x16_t m3b = vdupq_n_u8(0x3); -#ifdef __ARM_FEATURE_DOTPROD const int32x4_t vzero = vdupq_n_s32(0); -#endif const uint8x16_t m0 = vdupq_n_u8(1); const uint8x16_t m1 = vshlq_n_u8(m0, 1); @@ -4382,22 +4256,11 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 2), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 2), m3b)), vreinterpretq_s8_u8(q3h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; -#else - int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[0]), vget_low_s8 (q8bytes_1.val[0])), - vmull_s8(vget_high_s8(q3bytes.val[0]), vget_high_s8(q8bytes_1.val[0]))); - int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[1]), vget_low_s8 (q8bytes_1.val[1])), - vmull_s8(vget_high_s8(q3bytes.val[1]), vget_high_s8(q8bytes_1.val[1]))); - int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[2]), vget_low_s8 (q8bytes_1.val[2])), - vmull_s8(vget_high_s8(q3bytes.val[2]), vget_high_s8(q8bytes_1.val[2]))); - int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[3]), vget_low_s8 (q8bytes_1.val[3])), - vmull_s8(vget_high_s8(q3bytes.val[3]), vget_high_s8(q8bytes_1.val[3]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1] + vaddvq_s16(p2) * scale[2] + vaddvq_s16(p3) * scale[3]; -#endif + scale += 4; q3h.val[0] = vbicq_u8(m2, qhbits.val[0]); @@ -4410,22 +4273,11 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 6), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 6), m3b)), vreinterpretq_s8_u8(q3h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; -#else - p0 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[0]), vget_low_s8 (q8bytes_2.val[0])), - vmull_s8(vget_high_s8(q3bytes.val[0]), vget_high_s8(q8bytes_2.val[0]))); - p1 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[1]), vget_low_s8 (q8bytes_2.val[1])), - vmull_s8(vget_high_s8(q3bytes.val[1]), vget_high_s8(q8bytes_2.val[1]))); - p2 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[2]), vget_low_s8 (q8bytes_2.val[2])), - vmull_s8(vget_high_s8(q3bytes.val[2]), vget_high_s8(q8bytes_2.val[2]))); - p3 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[3]), vget_low_s8 (q8bytes_2.val[3])), - vmull_s8(vget_high_s8(q3bytes.val[3]), vget_high_s8(q8bytes_2.val[3]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1] + vaddvq_s16(p2) * scale[2] + vaddvq_s16(p3) * scale[3]; -#endif + scale += 4; if (j == 0) { @@ -4864,10 +4716,7 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - -#ifdef __ARM_FEATURE_DOTPROD - const int32x4_t vzero = vdupq_n_s32(0); -#endif + const int32x4_t vzero = vdupq_n_s32(0); const uint8x16_t m3b = vdupq_n_u8(0x3); const uint8x16_t mh = vdupq_n_u8(4); @@ -4908,22 +4757,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(vshrq_n_u8(q3bits, 4), m3b), q3h.val[2])); q3bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q3bits, 6), q3h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes.val[0])) * scales[0]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes.val[1])) * scales[2]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes.val[2])) * scales[1]; isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes.val[3])) * scales[3]; -#else - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q3bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q3bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q3bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q3bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q3bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p0) * scales[0] + vaddvq_s16(p1) * scales[2] + vaddvq_s16(p2) * scales[1] + vaddvq_s16(p3) * scales[3]; -#endif sum += d * isum; @@ -5228,11 +5065,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri uint32_t utmp[4]; #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); -#ifdef __ARM_FEATURE_DOTPROD const int32x4_t mzero = vdupq_n_s32(0); -#endif ggml_int8x16x2_t q4bytes; ggml_int8x16x2_t q8bytes; @@ -5269,10 +5103,8 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri int32_t sumi2 = 0; for (int j = 0; j < QK_K/64; ++j) { - const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); q4 += 32; -#ifdef __ARM_FEATURE_DOTPROD q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); @@ -5287,26 +5119,6 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi2 += vaddvq_s32(p2) * scales[2*j+1]; -#else - q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; - q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); - q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - sumi1 += vaddvq_s16(vaddq_s16(p0, p1)) * scales[2*j+0]; - - q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; - q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); - q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - sumi2 += vaddvq_s16(vaddq_s16(p2, p3)) * scales[2*j+1]; - -#endif } sumf += d * (sumi1 + sumi2); @@ -5603,12 +5415,9 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); -#ifdef __ARM_FEATURE_DOTPROD const int32x4_t mzero = vdupq_n_s32(0); -#endif float sumf = 0; @@ -5636,7 +5445,6 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); -#ifdef __ARM_FEATURE_DOTPROD q8bytes = ggml_vld1q_s8_x4(q8); q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); @@ -5650,27 +5458,7 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[2]), q4bytes.val[1], q8bytes.val[3]); const int32_t sumi2 = vaddvq_s32(p2) * scales[1]; -#else - q8bytes = ggml_vld1q_s8_x4(q8); - q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); - q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - int32_t sumi1 = vaddvq_s16(vaddq_s16(p0, p1)) * scales[0]; - - q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); - q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[0]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q4bytes.val[0]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q4bytes.val[1]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q4bytes.val[1]), vget_high_s8(q8bytes.val[3]))); - int32_t sumi2 = vaddvq_s16(vaddq_s16(p2, p3)) * scales[1]; - -#endif sumf += d * (sumi1 + sumi2); - } *s = sumf - sum_mins; @@ -5875,15 +5663,11 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri uint32_t utmp[4]; - #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); const uint8x16_t mone = vdupq_n_u8(1); const uint8x16_t mtwo = vdupq_n_u8(2); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t mzero = vdupq_n_s32(0); -#endif ggml_int8x16x4_t q5bytes; @@ -5938,28 +5722,11 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[0], 4), q5h.val[2])); q5bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[1], 4), q5h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; -#else - - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q5bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q5bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - sumi += vaddvq_s16(vaddq_s16(p0, p1)) * *scales++; - - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q5bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q5bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - sumi += vaddvq_s16(vaddq_s16(p2, p3)) * *scales++; -#endif } sumf += d * sumi - dmin * sumi_mins; - } *s = sumf; @@ -6311,12 +6078,9 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - const uint8x16_t m4b = vdupq_n_u8(0xf); const uint8x16_t mh = vdupq_n_u8(16); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t mzero = vdupq_n_s32(0); -#endif ggml_int8x16x4_t q5bytes; ggml_uint8x16x4_t q5h; @@ -6348,32 +6112,12 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[0], 4)), vreinterpretq_s8_u8(q5h.val[2])); q5bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[1], 4)), vreinterpretq_s8_u8(q5h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - int32_t sumi1 = sc[0] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0])); int32_t sumi2 = sc[1] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[1], q8bytes.val[1])); int32_t sumi3 = sc[2] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2])); int32_t sumi4 = sc[3] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[3], q8bytes.val[3])); sumf += d * (sumi1 + sumi2 + sumi3 + sumi4); - -#else - - const int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q5bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - const int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q5bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - int32_t sumi = sc[0] * vaddvq_s16(p0) + sc[1] * vaddvq_s16(p1); - - const int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q5bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - const int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q5bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q5bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - sumi += sc[2] * vaddvq_s16(p2) + sc[3] * vaddvq_s16(p3); - - sumf += d*sumi; -#endif - } *s = sumf; @@ -6600,13 +6344,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - float sum = 0; const uint8x16_t m4b = vdupq_n_u8(0xF); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t vzero = vdupq_n_s32(0); -#endif //const int8x16_t m32s = vdupq_n_s8(32); const uint8x16_t mone = vdupq_n_u8(3); @@ -6658,31 +6399,13 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[2], m4b), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[3], m4b), q6h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + scale += 4; -#else - - int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q6bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q6bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1]; - scale += 2; - - int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q6bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q6bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p2) * scale[0] + vaddvq_s16(p3) * scale[1]; - scale += 2; -#endif - q8bytes = ggml_vld1q_s8_x4(q8); q8 += 64; shifted = vshrq_n_u8(qhbits.val[0], 4); @@ -6703,34 +6426,11 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[2], 4), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[3], 4), q6h.val[3])); -#if defined(__ARM_FEATURE_DOTPROD) - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; scale += 4; - - //for (int l = 0; l < 4; ++l) { - // const int32x4_t p = vdotq_s32(vzero, q6bytes.val[l], q8bytes.val[l]); - // isum += vaddvq_s32(p) * *scale++; - //} -#else - p0 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q6bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - p1 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q6bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1]; - scale += 2; - - p2 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q6bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - p3 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q6bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p2) * scale[0] + vaddvq_s16(p3) * scale[1]; - scale += 2; -#endif - } //sum += isum * d_all * y[i].d; sum += d_all * y[i].d * (isum - 32 * isum_mins); @@ -7076,14 +6776,11 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri const int nb = n / QK_K; #ifdef __ARM_NEON - float sum = 0; const uint8x16_t m4b = vdupq_n_u8(0xF); const int8x16_t m32s = vdupq_n_s8(32); -#if defined(__ARM_FEATURE_DOTPROD) const int32x4_t vzero = vdupq_n_s32(0); -#endif const uint8x16_t mone = vdupq_n_u8(3); @@ -7119,26 +6816,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[0], 4), q6h.val[2])), m32s); q6bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[1], 4), q6h.val[3])), m32s); -#if defined(__ARM_FEATURE_DOTPROD) - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; -#else - - int16x8_t p0 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[0]), vget_low_s8 (q8bytes.val[0])), - vmull_s8(vget_high_s8(q6bytes.val[0]), vget_high_s8(q8bytes.val[0]))); - int16x8_t p1 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[1]), vget_low_s8 (q8bytes.val[1])), - vmull_s8(vget_high_s8(q6bytes.val[1]), vget_high_s8(q8bytes.val[1]))); - isum += vaddvq_s16(p0) * scale[0] + vaddvq_s16(p1) * scale[1]; - - int16x8_t p2 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[2]), vget_low_s8 (q8bytes.val[2])), - vmull_s8(vget_high_s8(q6bytes.val[2]), vget_high_s8(q8bytes.val[2]))); - int16x8_t p3 = vaddq_s16(vmull_s8(vget_low_s8 (q6bytes.val[3]), vget_low_s8 (q8bytes.val[3])), - vmull_s8(vget_high_s8(q6bytes.val[3]), vget_high_s8(q8bytes.val[3]))); - isum += vaddvq_s16(p2) * scale[2] + vaddvq_s16(p3) * scale[3]; -#endif sum += isum * d_all * y[i].d; From b47879b0dda43f2d26415e88b6840295817e552a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 27 Dec 2023 11:15:31 +0200 Subject: [PATCH 306/859] scripts : add sync-ggml-am.sh --- scripts/sync-ggml-am.sh | 131 ++++++++++++++++++++++++++++++++++++++++ scripts/sync-ggml.last | 1 + 2 files changed, 132 insertions(+) create mode 100755 scripts/sync-ggml-am.sh create mode 100644 scripts/sync-ggml.last diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh new file mode 100755 index 000000000..83abe3681 --- /dev/null +++ b/scripts/sync-ggml-am.sh @@ -0,0 +1,131 @@ +#!/bin/bash +# +# Synchronize ggml changes to llama.cpp +# +# Usage: +# +# $ cd /path/to/llama.cpp +# $ ./scripts/sync-ggml-am.sh +# + +set -e + +sd=$(dirname $0) +cd $sd/../ + +SRC_LLAMA=$(pwd) +SRC_GGML=$(cd ../ggml; pwd) + +if [ ! -d $SRC_GGML ]; then + echo "ggml not found at $SRC_GGML" + exit 1 +fi + +lc=$(cat $SRC_LLAMA/scripts/sync-ggml.last) +echo "Syncing ggml changes since commit $lc" + +cd $SRC_GGML + +git log --oneline $lc..HEAD + +git format-patch $lc --stdout -- \ + include/ggml/ggml*.h \ + src/ggml*.h \ + src/ggml*.c \ + src/ggml*.cpp \ + src/ggml*.m \ + src/ggml*.metal \ + src/ggml*.cu \ + tests/test-opt.cpp \ + tests/test-grad0.cpp \ + tests/test-quantize-fns.cpp \ + tests/test-quantize-perf.cpp \ + tests/test-backend-ops.cpp \ + > $SRC_LLAMA/ggml-src.patch + +# delete files if empty +if [ ! -s $SRC_LLAMA/ggml-src.patch ]; then + rm -v $SRC_LLAMA/ggml-src.patch +fi + +cd $SRC_LLAMA + +if [ -f $SRC_LLAMA/ggml-src.patch ]; then + # replace PR numbers + # + # Subject: some text (#1234) + # Subject: some text (ggml/1234) + cat ggml-src.patch | sed -e 's/^Subject: \(.*\) (#\([0-9]*\))/Subject: \1 (ggml\/\2)/' > ggml-src.patch.tmp + mv ggml-src.patch.tmp ggml-src.patch + + cat ggml-src.patch | sed -e 's/^\(.*\) (#\([0-9]*\))$/\1 (ggml\/\2)/' > ggml-src.patch.tmp + mv ggml-src.patch.tmp ggml-src.patch + + # replace filenames: + # + # src/ggml.c -> ggml.c + # src/ggml-alloc.c -> ggml-alloc.c + # src/ggml-backend-impl.h -> ggml-backend-impl.h + # src/ggml-backend.c -> ggml-backend.c + # src/ggml-cuda.cu -> ggml-cuda.cu + # src/ggml-cuda.h -> ggml-cuda.h + # src/ggml-impl.h -> ggml-impl.h + # src/ggml-metal.h -> ggml-metal.h + # src/ggml-metal.m -> ggml-metal.m + # src/ggml-metal.metal -> ggml-metal.metal + # src/ggml-mpi.h -> ggml-mpi.h + # src/ggml-mpi.c -> ggml-mpi.c + # src/ggml-opencl.cpp -> ggml-opencl.cpp + # src/ggml-opencl.h -> ggml-opencl.h + # src/ggml-quants.c -> ggml-quants.c + # src/ggml-quants.h -> ggml-quants.h + # include/ggml/ggml.h -> ggml.h + # include/ggml/ggml-alloc.h -> ggml-alloc.h + # include/ggml/ggml-backend.h -> ggml-backend.h + # + # tests/test-opt.cpp -> tests/test-opt.cpp + # tests/test-grad0.cpp -> tests/test-grad0.cpp + # tests/test-quantize-fns.cpp -> tests/test-quantize-fns.cpp + # tests/test-quantize-perf.cpp -> tests/test-quantize-perf.cpp + # tests/test-backend-ops.cpp -> tests/test-backend-ops.cpp + + cat ggml-src.patch | sed \ + -e 's/src\/ggml\.c/ggml.c/g' \ + -e 's/src\/ggml-alloc\.c/ggml-alloc.c/g' \ + -e 's/src\/ggml-backend-impl\.h/ggml-backend-impl.h/g' \ + -e 's/src\/ggml-backend\.c/ggml-backend.c/g' \ + -e 's/src\/ggml-cuda\.cu/ggml-cuda.cu/g' \ + -e 's/src\/ggml-cuda\.h/ggml-cuda.h/g' \ + -e 's/src\/ggml-impl\.h/ggml-impl.h/g' \ + -e 's/src\/ggml-metal\.h/ggml-metal.h/g' \ + -e 's/src\/ggml-metal\.m/ggml-metal.m/g' \ + -e 's/src\/ggml-metal\.metal/ggml-metal.metal/g' \ + -e 's/src\/ggml-mpi\.h/ggml-mpi.h/g' \ + -e 's/src\/ggml-mpi\.c/ggml-mpi.c/g' \ + -e 's/src\/ggml-opencl\.cpp/ggml-opencl.cpp/g' \ + -e 's/src\/ggml-opencl\.h/ggml-opencl.h/g' \ + -e 's/src\/ggml-quants\.c/ggml-quants.c/g' \ + -e 's/src\/ggml-quants\.h/ggml-quants.h/g' \ + -e 's/include\/ggml\/ggml\.h/ggml.h/g' \ + -e 's/include\/ggml\/ggml-alloc\.h/ggml-alloc.h/g' \ + -e 's/include\/ggml\/ggml-backend\.h/ggml-backend.h/g' \ + -e 's/tests\/test-opt\.cpp/tests\/test-opt.cpp/g' \ + -e 's/tests\/test-grad0\.cpp/tests\/test-grad0.cpp/g' \ + -e 's/tests\/test-quantize-fns\.cpp/tests\/test-quantize-fns.cpp/g' \ + -e 's/tests\/test-quantize-perf\.cpp/tests\/test-quantize-perf.cpp/g' \ + -e 's/tests\/test-backend-ops\.cpp/tests\/test-backend-ops.cpp/g' \ + > ggml-src.patch.tmp + mv ggml-src.patch.tmp ggml-src.patch + + git am ggml-src.patch + + rm -v $SRC_LLAMA/ggml-src.patch +fi + +# update last commit +cd $SRC_GGML +git log -1 --format=%H > $SRC_LLAMA/scripts/sync-ggml.last + +echo "Done" + +exit 0 diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last new file mode 100644 index 000000000..1ec144116 --- /dev/null +++ b/scripts/sync-ggml.last @@ -0,0 +1 @@ +76e7f47b69e8334384dc718480c496dafbd47999 From 879b690a9e1eb1ab0a29b58236fc76978fb4d902 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 27 Dec 2023 15:16:55 +0100 Subject: [PATCH 307/859] finetune : fix output formatting in print_params (#4653) This commit fixes the output formatting in the print_params function which currently looks like this: ```console print_params: n_vocab: 32000 print_params: n_ctx: 128 print_params: n_embd: 4096 print_params: n_ff: 11008 print_params: n_head: 32 print_params: n_head_kv: 32 print_params: n_layer: 32 print_params: norm_rms_eps : 0.000010 print_params: rope_freq_base : 10000.000000 print_params: rope_freq_scale : 1.000000 ``` With this comit the output will look like this: ```console print_params: n_vocab : 32000 print_params: n_ctx : 128 print_params: n_embd : 4096 print_params: n_ff : 11008 print_params: n_head : 32 print_params: n_head_kv : 32 print_params: n_layer : 32 print_params: norm_rms_eps : 0.000010 print_params: rope_freq_base : 10000.000000 print_params: rope_freq_scale : 1.000000 ``` Signed-off-by: Daniel Bevenius --- examples/finetune/finetune.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 7b1333a9d..e0520f64c 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -196,13 +196,13 @@ static const char * LLM_TENSOR_FFN_DOWN = "blk.%d.ffn_down"; static const char * LLM_TENSOR_FFN_UP = "blk.%d.ffn_up"; static void print_params(struct my_llama_hparams * params) { - printf("%s: n_vocab: %u\n", __func__, params->n_vocab); - printf("%s: n_ctx: %u\n", __func__, params->n_ctx); - printf("%s: n_embd: %u\n", __func__, params->n_embd); - printf("%s: n_ff: %u\n", __func__, params->n_ff); - printf("%s: n_head: %u\n", __func__, params->n_head); - printf("%s: n_head_kv: %u\n", __func__, params->n_head_kv); - printf("%s: n_layer: %u\n", __func__, params->n_layer); + printf("%s: n_vocab : %u\n", __func__, params->n_vocab); + printf("%s: n_ctx : %u\n", __func__, params->n_ctx); + printf("%s: n_embd : %u\n", __func__, params->n_embd); + printf("%s: n_ff : %u\n", __func__, params->n_ff); + printf("%s: n_head : %u\n", __func__, params->n_head); + printf("%s: n_head_kv : %u\n", __func__, params->n_head_kv); + printf("%s: n_layer : %u\n", __func__, params->n_layer); printf("%s: norm_rms_eps : %f\n", __func__, params->f_norm_rms_eps); printf("%s: rope_freq_base : %f\n", __func__, params->rope_freq_base); printf("%s: rope_freq_scale : %f\n", __func__, params->rope_freq_scale); From f6793491b5af6da75edad34d6f503ef86d31b09f Mon Sep 17 00:00:00 2001 From: "Nam D. Tran" <42194884+namtranase@users.noreply.github.com> Date: Wed, 27 Dec 2023 22:39:45 +0700 Subject: [PATCH 308/859] llama : add AWQ for llama, llama2, mpt, and mistral models (#4593) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * update: awq support llama-7b model * update: change order * update: benchmark results for llama2-7b * update: mistral 7b v1 benchmark * update: support 4 models * fix: Readme * update: ready for PR * update: readme * fix: readme * update: change order import * black * format code * update: work for bot mpt and awqmpt * update: readme * Rename to llm_build_ffn_mpt_awq * Formatted other files * Fixed params count * fix: remove code * update: more detail for mpt * fix: readme * fix: readme * update: change folder architecture * fix: common.cpp * fix: readme * fix: remove ggml_repeat * update: cicd * update: cicd * uppdate: remove use_awq arg * update: readme * llama : adapt plamo to new ffn ggml-ci --------- Co-authored-by: Trần Đức Nam Co-authored-by: Le Hoang Anh Co-authored-by: Georgi Gerganov --- awq-py/README.md | 116 +++++++++++++++ awq-py/awq/apply_awq.py | 254 +++++++++++++++++++++++++++++++++ awq-py/requirements.txt | 2 + convert-hf-to-gguf.py | 27 +++- convert.py | 14 ++ gguf-py/gguf/constants.py | 3 + gguf-py/gguf/tensor_mapping.py | 5 + llama.cpp | 27 +++- 8 files changed, 443 insertions(+), 5 deletions(-) create mode 100644 awq-py/README.md create mode 100644 awq-py/awq/apply_awq.py create mode 100644 awq-py/requirements.txt diff --git a/awq-py/README.md b/awq-py/README.md new file mode 100644 index 000000000..59354f4e3 --- /dev/null +++ b/awq-py/README.md @@ -0,0 +1,116 @@ +# AWQ: Activation-aware Weight Quantization for LLM - version apply to llamacpp +[[Paper](https://arxiv.org/abs/2306.00978)][[Original Repo](https://github.com/mit-han-lab/llm-awq)][[Easy-to-use Repo](https://github.com/casper-hansen/AutoAWQ)] + +**Supported models:** + +- [X] LLaMA +- [x] LLaMA 2 +- [X] MPT +- [X] Mistral AI v0.1 +- [ ] Bloom +- [ ] Mixtral MoE + +**TODO:** +- [x] Update version work with both MPT and MPT-AWQ model +- [ ] Add OPT model +- [ ] Add Bloom model +- [ ] Add Mixtral MoE +- [ ] Support w3, w2 + + +## Contents + +- [Install](##Install) +- [Convert](##Convert) +- [Quantize](##Quantize) +- [Test](##Test) +- [Benchmark](##Benchmark) +- [Results](##Results) + +## Install +Install requirements +```bash +pip install -r requirements.txt +``` +Get the pre-computed AWQ search results for multiple model families, including LLaMA, LLaMA2, MPT, OPT +```bash +git clone https://huggingface.co/datasets/mit-han-lab/awq-model-zoo awq_cache +``` + +## Convert +Example for llama model +```bash +# For llama7b and llama2 models +python convert.py models/llama-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/llama_7b_fp16.gguf +# For mistral and mpt models +python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf +``` + +## Quantize +```bash +# We only benchmark and confirm the results on q4_0, q4_1, and q2_k types. +./quantize models/llama_7b_fp16.gguf models/llama_7b_q4_0.gguf q4_0 +``` + +## Test +```bash +# For all models. +./build/bin/main -m models/llama_7b_q4_0.gguf -n 128 --prompt "Once upon a time" +``` + +## Benchmark +The perplexity measurements in table above are done against the `wikitext2` test dataset (https://paperswithcode.com/dataset/wikitext-2), with context length of 512. +```bash +# For llama and llama2, and mistral models. +./perplexity -m models/llama_7b_q4_0.gguf -f datasets/wikitext-2-raw/wiki.test.raw +``` + +## Results +Results are run on OpenBLAS (CPU) and CuBLAS (GPU) for fair comparison +We use three types of llamacpp quantization methods to work with our version, including q4_0, q4_1, and q2_k + +### Llama 7B (Build with OpenBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|-----------:|--------------|-------:|-------:|-------:|-------:| +|Llama 7B | perplexity | 5.9066 | 6.1214 | 6.0643 | 6.5808 | +|Llama 7B | file size | 12.9G | 3.5G | 3.9G | 2.7G | +|Llama 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-LLama 7B| perplexity | 5.9175 | 6.0252 | 5.9987 | 6.3692 | +|AWQ-LLama 7B| file size | 12.9G | 3.5G | 3.9G | 2.7G | +|AWQ-LLama 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | + + +### Llama2 7B (Build with CuBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|------------:|--------------|-------:|-------:|-------:|-------:| +|Llama2 7B | perplexity | 5.8664 | 6.0260 | 6.0656 | 6.4496 | +|Llama2 7B | file size | 12.9G | 3.5G | 3.9G | 2.7G | +|Llama2 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-LLama2 7B| perplexity | 5.8801 | 6.0054 | 5.9849 | 6.3650 | +|AWQ-LLama2 7B| file size | 12.9G | 3.5G | 3.9G | 2.7G | +|AWQ-LLama2 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | + + +### Mistral 7B v0.1 (Build with CuBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|-------------:|--------------|-------:|-------:|-------:|-------:| +|Mistral 7B | perplexity | 5.6931 | 5.8202 | 5.8268 | 6.1645 | +|Mistral 7B | file size | 14.5G | 4.1G | 4.5G | 3.1G | +|Mistral 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-Mistral 7B| perplexity | 5.6934 | 5.8020 | 5.7691 | 6.0426 | +|AWQ-Mistral 7B| file size | 14.5G | 4.1G | 4.5G | 3.1G | +|AWQ-Mistral 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | + +### MPT 7B (Build with OpenBLAS) + +| Model | Measure | F16 | Q4_0 | Q4_1 | Q2_K | +|---------:|--------------|-------:|-------:|-------:|--------:| +|MPT 7B | perplexity | 8.4369 | 8.7956 | 8.6265 | 11.4913 | +|MPT 7B | file size | 13.7G | 3.9G | 4.3G | 2.8G | +|MPT 7B | bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | +|AWQ-MPT 7B| perplexity | 8.4944 | 8.7053 | 8.6750 | 10.2873| +|AWQ-MPT 7B| file size | 13.7G | 3.9G | 4.3G | 2.8G | +|AWQ-MPT 7B| bits/weight | 16.0 | 4.5 | 5.0 | 2.6 | diff --git a/awq-py/awq/apply_awq.py b/awq-py/awq/apply_awq.py new file mode 100644 index 000000000..11132c5d2 --- /dev/null +++ b/awq-py/awq/apply_awq.py @@ -0,0 +1,254 @@ +""" +Implements the AWQ for llama.cpp use cases. +Original paper: https://arxiv.org/abs/2306.00978 + +This code is based on versions of the AWQ implementation found in the following repositories: +* https://github.com/mit-han-lab/llm-awq +* https://github.com/casper-hansen/AutoAWQ +""" + +import os +import torch +import torch.nn as nn + +from transformers import AutoModelForCausalLM, AutoConfig +from transformers.models.bloom.modeling_bloom import BloomGelu +from transformers.models.llama.modeling_llama import LlamaRMSNorm +from transformers.activations import GELUActivation + + +class ScaledActivation(nn.Module): + """ + ScaledActivation module wraps an existing activation function and applies a + scale factor to its output. + + Args: + module (nn.Module): The activation function to be scaled. + scales (torch.Tensor): A tensor of size (num_features,) containing the initial + scale factors for each feature. + + Returns: + torch.Tensor: The scaled output of the activation function. + """ + + def __init__(self, module, scales): + super().__init__() + self.act = module + self.scales = nn.Parameter(scales.data) + + def forward(self, x): + return self.act(x) / self.scales.view(1, 1, -1).to(x.device) + + +def set_op_by_name(layer, name, new_module): + """ + Set the new module for given module's name. + + Args: + layer (nn.Module): The layer in which to replace the submodule. + name (str): The path to the submodule to be replaced, using dot notation + to access nested modules. + new_module (nn.Module): The new module to replace the existing one. + """ + levels = name.split(".") + if len(levels) > 1: + mod_ = layer + for l_idx in range(len(levels) - 1): + if levels[l_idx].isdigit(): + mod_ = mod_[int(levels[l_idx])] + else: + mod_ = getattr(mod_, levels[l_idx]) + setattr(mod_, levels[-1], new_module) + else: + setattr(layer, name, new_module) + + +def get_op_by_name(module, op_name): + """ + Retrieves a submodule within a given layer based on its name. + + Args: + module (nn.Module): The layer containing the submodule to find. + op_name (str): The name of the submodule. + + Returns: + nn.Module: The requested submodule found within the given layer. + + Raises: + ValueError: If the specified submodule cannot be found within the layer. + """ + for name, m in module.named_modules(): + if name == op_name: + return m + raise ValueError(f"Cannot find op {op_name} in module {module}") + + +@torch.no_grad() +def scale_ln_fcs(ln, fcs, scales): + """ + Scales the weights of a LayerNorm and a list of fully-connected layers proportionally. + + Args: + ln (nn.LayerNorm): The LayerNorm module to be scaled. + fcs (List[nn.Linear]): A list of fully-connected layers to be scaled. + scales (torch.Tensor): A 1D tensor of size (num_features,). + """ + + if not isinstance(fcs, list): + fcs = [fcs] + + scales = scales.to(ln.weight.device) + + ln.weight.div_(scales) + if hasattr(ln, "bias") and ln.bias is not None: + ln.bias.div_(scales) + + for fc in fcs: + fc.weight.mul_(scales.view(1, -1)) + + for p in ln.parameters(): + assert torch.isnan(p).sum() == 0 + for fc in fcs: + for p in fc.parameters(): + assert torch.isnan(p).sum() == 0 + + +@torch.no_grad() +def scale_fc_fc(fc1, fc2, scales): + """ + Scales the weights of two fully-connected layers in a specific pattern. + + Args: + fc1 (nn.Linear): The first fully-connected layer to be scaled. + fc2 (nn.Linear): The second fully-connected layer to be scaled. + scales (torch.Tensor): A 1D tensor of size (num_features,). + """ + assert isinstance(fc1, nn.Linear) + assert isinstance(fc2, nn.Linear) + + scales = scales.to(fc1.weight.device) + + fc1.weight[-scales.size(0):].div_(scales.view(-1, 1)) + if fc1.bias is not None: + fc1.bias.div_(scales.view(-1)) + + fc2.weight.mul_(scales.view(1, -1)) + + for p in fc1.parameters(): + assert torch.isnan(p).sum() == 0 + for p in fc2.parameters(): + assert torch.isnan(p).sum() == 0 + + +@torch.no_grad() +def scale_gelu_fc(gelu, fc, scales): + """ + Scales the weight of a GELU activation and a fully-connected layer proportionally. + + Args: + gelu (Union[nn.GELU, BloomGelu, GELUActivation]): The GELU activation module to be scaled. + fc (nn.Linear): The fully-connected layer to be scaled. + scales (torch.Tensor): A 1D tensor of size (num_features,). + + Raises: + TypeError: If the `gelu` module is not of type `nn.GELU`, `BloomGelu`, or `GELUActivation`. + TypeError: If the `fc` module is not of type `nn.Linear`. + """ + assert isinstance(gelu, (nn.GELU, BloomGelu, GELUActivation)) + assert isinstance(fc, nn.Linear) + + fc.weight.mul_(scales.view(1, -1).to(fc.weight.device)) + + for p in fc.parameters(): + assert torch.isnan(p).sum() == 0 + + +def apply_scale(module, scales_list, input_feat_dict=None): + """ + Applies different scaling strategies to layers based on their type and hierarchy within a given module. + + Args: + module (nn.Module): The module containing the layers to be scaled. + scales_list (List[Tuple[str, List[str], torch.Tensor]]): A list of tuples containing: + * prev_op_name (str): The name of the preceding operation or module, + relative to which the layers to be scaled are located. + * layer_names (List[str]): A list of names of the layers to be scaled, relative to the preceding operation. + * scales (torch.Tensor): A 1D tensor of size (num_features,) containing the scaling factors for each feature. + input_feat_dict (Optional[Dict[str, torch.Tensor]]): A dictionary mapping layer names to their corresponding + input features (optional). + """ + for prev_op_name, layer_names, scales in scales_list: + prev_op = get_op_by_name(module, prev_op_name) + layers = [get_op_by_name(module, name) for name in layer_names] + + prev_op.cuda() + for layer in layers: + layer.cuda() + scales.cuda() + + if isinstance(prev_op, nn.Linear): + assert len(layers) == 1 + scale_fc_fc(prev_op, layers[0], scales) + elif isinstance(prev_op, (nn.LayerNorm, LlamaRMSNorm)) or "rmsnorm" in str(prev_op.__class__).lower(): + scale_ln_fcs(prev_op, layers, scales) + elif isinstance(prev_op, (nn.GELU, BloomGelu, GELUActivation)): + new_module = ScaledActivation(prev_op, scales) + set_op_by_name(module, prev_op_name, new_module) + scale_gelu_fc(prev_op, layers[0], scales) + else: + raise NotImplementedError(f"prev_op {type(prev_op)} not supported yet!") + + # apply the scaling to input feat if given; prepare it for clipping + if input_feat_dict is not None: + for layer_name in layer_names: + inp = input_feat_dict[layer_name] + inp.div_(scales.view(1, -1).to(inp.device)) + + prev_op.cpu() + for layer in layers: + layer.cpu() + scales.cpu() + + +@torch.no_grad() +def apply_clip(module, clip_list): + """ + Applies element-wise clipping to the weight of a specific layer within a given module. + + Args: + module (nn.Module): The module containing the layer to be clipped. + clip_list (List[Tuple[str, torch.Tensor]]): A list of tuples containing: + * name (str): The name of the layer to be clipped, relative to the root of the module. + * max_val (torch.Tensor): A 1D or 2D tensor defining the upper bound for each element of the layer's weight. + """ + for name, max_val in clip_list: + layer = get_op_by_name(module, name) + layer.cuda() + max_val = max_val.to(layer.weight.device) + org_shape = layer.weight.shape + layer.weight.data = layer.weight.data.reshape(*max_val.shape[:2], -1) + layer.weight.data = torch.clamp(layer.weight.data, -max_val, max_val) + layer.weight.data = layer.weight.data.reshape(org_shape) + layer.cpu() + + +def add_scale_weights(model_path, scale_path, tmp_path): + """ + Adds pre-computed Activation Weight Quantization (AWQ) results to a model, + including scaling factors and clipping bounds. + + Args: + model_path (str): Path to the pre-trained model to be equipped with AWQ. + scale_path (str): Path to the AWQ scale factors (.pt file). + tmp_path (str): Path to the temporary directory where the equipped model will be saved. + """ + config = AutoConfig.from_pretrained(model_path, trust_remote_code=True) + model = AutoModelForCausalLM.from_pretrained( + model_path, config=config, trust_remote_code=True + ) + model.eval() + awq_results = torch.load(str(scale_path), map_location="cpu") + apply_scale(model, awq_results["scale"]) + apply_clip(model, awq_results["clip"]) + model.save_pretrained(str(tmp_path)) + os.system(f"cp {str(model_path)}/tokenizer* {str(tmp_path)}") diff --git a/awq-py/requirements.txt b/awq-py/requirements.txt new file mode 100644 index 000000000..5fe604329 --- /dev/null +++ b/awq-py/requirements.txt @@ -0,0 +1,2 @@ +torch>=2.0.0 +transformers>=4.32.0 diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 303d08170..7dbc28147 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -46,7 +46,7 @@ class Model: self.part_names = self._get_part_names() self.hparams = Model.load_hparams(self.dir_model) self.model_arch = self._get_model_architecture() - self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess) + self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess, use_temp_file=False) def set_vocab(self): self._set_vocab_gpt2() @@ -59,7 +59,7 @@ class Model: from safetensors import safe_open ctx = cast(ContextManager[Any], safe_open(self.dir_model / part_name, framework="pt", device="cpu")) else: - ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", mmap=True, weights_only=True)) + ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", weights_only=True)) with ctx as model_part: for name in model_part.keys(): @@ -464,7 +464,11 @@ class MPTModel(Model): data = data_torch.squeeze().numpy() # map tensor names - new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if "scales" in name: + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias", ".scales")) + new_name = new_name.replace("scales", "act.scales") + else: + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) if new_name is None: print(f"Can not map tensor {name!r}") sys.exit() @@ -1095,6 +1099,9 @@ def parse_args() -> argparse.Namespace: "--vocab-only", action="store_true", help="extract only the vocab", ) + parser.add_argument( + "--awq-path", type=Path, default=None, + help="Path to scale awq cache file") parser.add_argument( "--outfile", type=Path, help="path to write to; default: based on input", @@ -1115,6 +1122,20 @@ def parse_args() -> argparse.Namespace: args = parse_args() dir_model = args.model + +if args.awq_path: + sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" + dir_model = tmp_model_path + if tmp_model_path.is_dir(): + print(f"{tmp_model_path} exists as a weighted model.") + else: + tmp_model_path.mkdir(parents=True, exist_ok=True) + print("Saving new weighted model ...") + add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) + print(f"Saved weighted model at {tmp_model_path}.") + if not dir_model.is_dir(): print(f'Error: {args.model} is not a directory', file=sys.stderr) sys.exit(1) diff --git a/convert.py b/convert.py index 1f0c4f2f4..c3f3fc0a1 100755 --- a/convert.py +++ b/convert.py @@ -1187,6 +1187,7 @@ def main(args_in: list[str] | None = None) -> None: # We currently only support Q8_0 output on little endian systems. output_choices.append("q8_0") parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") + parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") @@ -1200,6 +1201,19 @@ def main(args_in: list[str] | None = None) -> None: parser.add_argument("--padvocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") args = parser.parse_args(args_in) + if args.awq_path: + sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" + if tmp_model_path.is_dir(): + print(f"{tmp_model_path} exists as a weighted model.") + else: + tmp_model_path.mkdir(parents=True, exist_ok=True) + print("Saving new weighted model ...") + add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) + print(f"Saved weighted model at {tmp_model_path}.") + args.model = tmp_model_path + if args.dump_single: model_plus = lazy_load_file(args.model) do_dump_model(model_plus) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 4cd87cdda..c9be21119 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -120,6 +120,7 @@ class MODEL_TENSOR(IntEnum): FFN_GATE = auto() FFN_DOWN = auto() FFN_UP = auto() + FFN_ACT = auto() FFN_GATE_EXP = auto() FFN_DOWN_EXP = auto() FFN_UP_EXP = auto() @@ -169,6 +170,7 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down", MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up", + MODEL_TENSOR.FFN_ACT: "blk.{bid}.ffn", MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate.{xid}", MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down.{xid}", MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up.{xid}", @@ -269,6 +271,7 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_ACT, ], MODEL_ARCH.GPTJ: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 446c6b688..0b8f70417 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -188,6 +188,11 @@ class TensorNameMap: "model.layers.{bid}.block_sparse_moe.experts.{xid}.w3", # mixtral ), + # AWQ-activation gate + MODEL_TENSOR.FFN_ACT: ( + "transformer.blocks.{bid}.ffn.act", # mpt + ), + # Feed-forward gate MODEL_TENSOR.FFN_GATE: ( "model.layers.{bid}.mlp.gate_proj", # llama-hf refact diff --git a/llama.cpp b/llama.cpp index 4aa59c4c0..bf1b01a90 100644 --- a/llama.cpp +++ b/llama.cpp @@ -354,6 +354,7 @@ enum llm_tensor { LLM_TENSOR_FFN_GATE, LLM_TENSOR_FFN_DOWN, LLM_TENSOR_FFN_UP, + LLM_TENSOR_FFN_ACT, LLM_TENSOR_FFN_DOWN_EXP, LLM_TENSOR_FFN_GATE_EXP, LLM_TENSOR_FFN_UP_EXP, @@ -473,6 +474,7 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" }, }, }, { @@ -1285,6 +1287,7 @@ struct llama_hparams { float f_clamp_kqv; float f_max_alibi_bias; + bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; if (this->n_vocab != other.n_vocab) return true; @@ -1388,6 +1391,7 @@ struct llama_layer { // ff bias struct ggml_tensor * ffn_down_b; // b2 struct ggml_tensor * ffn_up_b; // b3 + struct ggml_tensor * ffn_act; }; struct llama_kv_cell { @@ -3471,7 +3475,6 @@ static bool llm_load_tensors( case LLM_ARCH_MPT: { model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - // output { ggml_backend_type backend_norm; @@ -3509,6 +3512,9 @@ static bool llm_load_tensors( layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + + // AWQ ScaleActivation layer + layer.ffn_act = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, backend, false); } } break; case LLM_ARCH_STABLELM: @@ -4039,6 +4045,7 @@ static struct ggml_tensor * llm_build_ffn( struct ggml_tensor * gate_b, struct ggml_tensor * down, struct ggml_tensor * down_b, + struct ggml_tensor * act_scales, llm_ffn_op_type type_op, llm_ffn_gate_type type_gate, const llm_build_cb & cb, @@ -4083,6 +4090,10 @@ static struct ggml_tensor * llm_build_ffn( { cur = ggml_gelu(ctx, cur); cb(cur, "ffn_gelu", il); + if (act_scales != NULL) { + cur = ggml_div(ctx, cur, act_scales); + cb(cur, "ffn_act", il); + } } break; case LLM_FFN_RELU: { @@ -4401,6 +4412,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } else { @@ -4580,6 +4592,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -4694,6 +4707,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, NULL, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -4798,6 +4812,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5002,6 +5017,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_RELU_SQR, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5088,6 +5104,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5183,6 +5200,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5268,11 +5286,11 @@ struct llm_build_context { NULL, LLM_NORM, cb, il); cb(cur, "ffn_norm", il); - cur = llm_build_ffn(ctx0, cur, model.layers[il].ffn_up, NULL, NULL, NULL, model.layers[il].ffn_down, NULL, + model.layers[il].ffn_act, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); } @@ -5381,6 +5399,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5493,6 +5512,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5600,6 +5620,7 @@ struct llm_build_context { model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(ffn_output, "ffn_out", il); } @@ -5703,6 +5724,7 @@ struct llm_build_context { model.layers[il].ffn_up, NULL, model.layers[il].ffn_gate, NULL, model.layers[il].ffn_down, NULL, + NULL, LLM_FFN_SILU, LLM_FFN_PAR, cb, il); cb(cur, "ffn_out", il); } @@ -5887,6 +5909,7 @@ static const std::unordered_map k_offload_map { "ffn_gate", OFFLOAD_FUNC }, { "ffn_gate_b", OFFLOAD_FUNC }, { "ffn_gate_par", OFFLOAD_FUNC }, + { "ffn_act", OFFLOAD_FUNC }, { "ffn_down", OFFLOAD_FUNC }, { "ffn_down_b", OFFLOAD_FUNC }, { "ffn_out", OFFLOAD_FUNC }, From ea5497df5d138c83b2b0ca70aefdc4b1175c1001 Mon Sep 17 00:00:00 2001 From: manikbhandari Date: Thu, 28 Dec 2023 09:03:57 -0500 Subject: [PATCH 309/859] gpt2 : Add gpt2 architecture integration (#4555) --- README.md | 1 + convert-hf-to-gguf.py | 66 +++++++++++ gguf-py/gguf/constants.py | 11 +- gguf-py/gguf/tensor_mapping.py | 10 +- llama.cpp | 206 +++++++++++++++++++++++++++++++-- models/ggml-vocab-gpt2.gguf | Bin 0 -> 1766799 bytes tests/CMakeLists.txt | 1 + 7 files changed, 281 insertions(+), 14 deletions(-) create mode 100644 models/ggml-vocab-gpt2.gguf diff --git a/README.md b/README.md index 3b202a336..48dcd6464 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,7 @@ as the main playground for developing new features for the [ggml](https://github - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) +- [x] [GPT-2](https://huggingface.co/gpt2) **Multimodal models:** diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 7dbc28147..3557a825e 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -182,6 +182,8 @@ class Model: return QwenModel if model_architecture == "MixtralForCausalLM": return MixtralModel + if model_architecture == "GPT2LMHeadModel": + return GPT2Model if model_architecture == "PhiForCausalLM": return Phi2Model if model_architecture == "PlamoForCausalLM": @@ -225,6 +227,8 @@ class Model: return gguf.MODEL_ARCH.QWEN if arch == "MixtralForCausalLM": return gguf.MODEL_ARCH.LLAMA + if arch == "GPT2LMHeadModel": + return gguf.MODEL_ARCH.GPT2 if arch == "PhiForCausalLM": return gguf.MODEL_ARCH.PHI2 if arch == "PlamoForCausalLM": @@ -993,6 +997,68 @@ class QwenModel(Model): self.gguf_writer.add_tensor(new_name, data) +class GPT2Model(Model): + def set_gguf_parameters(self): + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_block_count(self.hparams["n_layer"]) + self.gguf_writer.add_context_length(self.hparams["n_ctx"]) + self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) + self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_head_count(self.hparams["n_head"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) + self.gguf_writer.add_file_type(self.ftype) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + # we don't need these + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq", ".attn.bias")): + continue + + if name.endswith((".c_attn.weight", ".c_proj.weight", ".c_fc.weight", ".c_proj.weight")): + data_torch = data_torch.transpose(1, 0) + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + # note: GPT2 output is tied to (same as) wte in original model + if new_name == "token_embd.weight": + print(f"output.weight, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor("output.weight", data) + + class Phi2Model(Model): def set_gguf_parameters(self): block_count = self.hparams["n_layer"] diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index c9be21119..ae62cc575 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -370,7 +370,16 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_UP, ], MODEL_ARCH.GPT2: [ - # TODO + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.POS_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, ], MODEL_ARCH.PHI2: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 0b8f70417..80c1d5449 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -17,6 +17,7 @@ class TensorNameMap: "tok_embeddings", # llama-pth "embeddings.word_embeddings", # bert "language_model.embedding.word_embeddings", # persimmon + "wte", # gpt2 "transformer.embd.wte", # phi2 ), @@ -34,6 +35,7 @@ class TensorNameMap: MODEL_TENSOR.POS_EMBD: ( "transformer.wpe", # gpt2 "embeddings.position_embeddings", # bert + "wpe", # gpt2 ), # Output @@ -53,7 +55,7 @@ class TensorNameMap: "norm", # llama-pth "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt - "ln_f", # refact bloom qwen + "ln_f", # refact bloom qwen gpt2 "language_model.encoder.final_layernorm", # persimmon "lm_head.ln", # phi2 ), @@ -78,6 +80,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi + "h.{bid}.ln_1", # gpt2 "transformer.h.{bid}.ln", # phi2 "model.layers.layers.{bid}.norm", # plamo ), @@ -95,6 +98,7 @@ class TensorNameMap: "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 ), @@ -137,6 +141,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.dense", # bert "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + "h.{bid}.attn.c_proj", # gpt2 "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo ), @@ -159,6 +164,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon "model.layers.{bid}.ln2", # yi + "h.{bid}.ln_2", # gpt2 ), MODEL_TENSOR.FFN_GATE_INP: ( @@ -179,6 +185,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen + "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo ), @@ -218,6 +225,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.dense", # bert "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + "h.{bid}.mlp.c_proj", # gpt2 "transformer.h.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo ), diff --git a/llama.cpp b/llama.cpp index bf1b01a90..68c7cced6 100644 --- a/llama.cpp +++ b/llama.cpp @@ -423,6 +423,15 @@ static std::map> LLM_TENSOR_NAMES = LLM_ARCH_GPT2, { { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_POS_EMBD, "position_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, }, }, { @@ -1256,6 +1265,10 @@ enum e_model { MODEL_40B, MODEL_65B, MODEL_70B, + MODEL_SMALL, + MODEL_MEDIUM, + MODEL_LARGE, + MODEL_XL, }; static const size_t kiB = 1024; @@ -2552,18 +2565,22 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { static const char * llama_model_type_name(e_model type) { switch (type) { - case MODEL_1B: return "1B"; - case MODEL_3B: return "3B"; - case MODEL_7B: return "7B"; - case MODEL_8B: return "8B"; - case MODEL_13B: return "13B"; - case MODEL_15B: return "15B"; - case MODEL_30B: return "30B"; - case MODEL_34B: return "34B"; - case MODEL_40B: return "40B"; - case MODEL_65B: return "65B"; - case MODEL_70B: return "70B"; - default: return "?B"; + case MODEL_1B: return "1B"; + case MODEL_3B: return "3B"; + case MODEL_7B: return "7B"; + case MODEL_8B: return "8B"; + case MODEL_13B: return "13B"; + case MODEL_15B: return "15B"; + case MODEL_30B: return "30B"; + case MODEL_34B: return "34B"; + case MODEL_40B: return "40B"; + case MODEL_65B: return "65B"; + case MODEL_70B: return "70B"; + case MODEL_SMALL: return "0.1B"; + case MODEL_MEDIUM: return "0.4B"; + case MODEL_LARGE: return "0.8B"; + case MODEL_XL: return "1.5B"; + default: return "?B"; } } @@ -2782,6 +2799,17 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_GPT2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + switch (hparams.n_layer) { + case 12: model.type = e_model::MODEL_SMALL; break; + case 24: model.type = e_model::MODEL_MEDIUM; break; + case 36: model.type = e_model::MODEL_LARGE; break; + case 48: model.type = e_model::MODEL_XL; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3710,6 +3738,60 @@ static bool llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); } } break; + case LLM_ARCH_GPT2: + { + model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + + // output + { + ggml_backend_type backend_norm; + ggml_backend_type backend_output; + + if (n_gpu_layers > int(n_layer)) { + backend_norm = llama_backend_offload; + backend_output = llama_backend_offload_split; + } else { + backend_norm = GGML_BACKEND_CPU; + backend_output = GGML_BACKEND_CPU; + } + + model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); + model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); + model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + } + + const uint32_t n_ff = hparams.n_ff; + + const int i_gpu_start = n_layer - n_gpu_layers; + + model.layers.resize(n_layer); + + for (uint32_t i = 0; i < n_layer; ++i) { + const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT + const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + + layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); + layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + + layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + + layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + + layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); + layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + + layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -5754,6 +5836,102 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_gpt2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + struct ggml_tensor * cur; + struct ggml_tensor * pos; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, n_kv, n_tokens, 1); + cb(KQ_mask, "KQ_mask", -1); + + pos = ggml_get_rows(ctx0, model.pos_embd, inp_pos); + cb(pos, "pos_embd", -1); + + inpL = ggml_add(ctx0, inpL, pos); + cb(inpL, "inpL", -1); + + for (int il = 0; il < n_layer; ++il) { + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, + model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + llm_build_kv_store(ctx0, hparams, kv_self, gf, Kcur, Vcur, n_ctx, n_tokens, kv_head, cb, il); + + cur = llm_build_kqv(ctx0, model, hparams, kv_self, + model.layers[il].wo, model.layers[il].bo, + Qcur, KQ_mask, n_ctx, n_tokens, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + // add the input + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpL); + cb(ffn_inp, "ffn_inp", il); + + // FF + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, + model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + } + + inpL = ggml_add(ctx0, cur, ffn_inp); + cb(inpL, "l_out", il); + } + + cur = llm_build_norm(ctx0, inpL, hparams, + model.output_norm, + model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; // @@ -6269,6 +6447,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_plamo(); } break; + case LLM_ARCH_GPT2: + { + result = llm.build_gpt2(); + } break; default: GGML_ASSERT(false); } diff --git a/models/ggml-vocab-gpt2.gguf b/models/ggml-vocab-gpt2.gguf new file mode 100644 index 0000000000000000000000000000000000000000..1fbc72c1e4d9e210e5c5689b31e1debfa33d4b6a GIT binary patch literal 1766799 zcmZs^`I97Ba^FYYwgw*99`uE-njL~9w2aEis*35X zjNClBx`rf70(4UMeM>2Tph$@#MM&;i)TMzEv>ZYeBw{QJ3 zISylfbTS(zZ+<)(HuH6SE57m1`TfZ-Ur*nye>|Jc&({}iI{fEf@JFVXC)3l@etzx` zvDkmnA3B>(Pk($iEU&udsXxaD|0RE}Td$|{bwA8Mx|nvSq zKb|gD{cM>3!#wy`e(%Fi{@Hxt24ZLZZ2IH%^ATNG4FaA(o{NcR#BYE**UVJAn zzMB_6mlr>u7vIZ^@8`uwdGUk1xRn>T^Wsik+|7%7d2v54KF*6z^5WCHc#s!A%!^;h zi$9tdznB-llo!987k?}-4)Wq*UL5Adqr7;W7f%7?H z#oN5N%8Pe-ah(_6Zra$o0|EaEiyRLqxuKq$@{cc_T#k%^R z>*_Dn)nBfw|D~?}*Sh*Eb@f;4>aW$+|6W&ry{`U`y81us>i?>%zfo6zv#$Q{y82sn z_5ak>|65moyRQCDUH#p<`v2duwYvJvy85lU`q%5~->9p9v#$QFy85^4 z>ffoWKVMh>Ze9I*b@lJp)qhY||6yJIM|Jfd*VTVgSN~~U{bzOcpV!rYQCI(EUHw;e z^#C^keNo-}qPq7*b?=Mn-WS!qFRFWARQJB9 z?tM|+`=Yw{MRo80x0*rG-}|D!_eFp2i~imh{k<>xdtdbTzUc3L(ck-`zxPFd?~DH4 z7yZ32`g>pW_rB=webL|hqQ70y->&FySM;|l`r8%#?TY?(MSr`Zzg^MauIO)9^tUVe z+ZFxoivD&*f4ic;UD4mJ=x&FySM;|l`r8%#?TY?(MSr`Zzg^MauIO)9^tUVe+ZFxoivD&*f4ic; zUD4mJ=xvZG5*~>$1xocQMVYex6s$`r9!7hi)ViZ|l+NQX{SMt&j2>q-OFM22PFov!rqN&WFue>}@e)bqUDZ5R3IuxT1Si`i|v8o!(6{cfrI>p=rt z>d(WOu6tXHP2xh(>gK08c3-RQYK^m|nFnkGu=zc#fqZGW%&XZfZ(}dy$(6>x+D|uP;xg`Rs0cJ=C}RqKV!1p{8+FBMkPyD}m*T zkL34u+uM1$)>0>X(-$bRQEUNMMj7A9dCrUWdr|3b`zY`CwJUeqJNd`12y42m{qV+y z{~)Ke+kT!eO)mupEOkBcl`dcB<;8TVr>{C=9R=5+X1?1#$&YLbXx-ZI)fE9R`+Rh< z$d3(6L(#G0c~zTjv(kJP%RvacC_cQg1$ImO`e{L?6W4*eXpo_(vp+BFPDPUasus|l zA# zD}w3;^X0TQ1b(Z0@%?Y_emln-HZ|wn_KSQ1CQ&3c?P`1VZ}R~rSBM+LM+ed5c`>bp zEq_tV?^eYxciV{x?>9BIOYO8;23t|22x@XdyX~oM!1KW#l6XF^0Y9`=f0-X#x;nAy z^3S{N*Bb3)W7EDGj3XJo@PF#%S`Y3{k<}HfHMhg?M&n;y*hA=M1?B#MV5DjfGFFyGj+S2f@MvH-V`Q2wYy;)(v)e^Zd37^w|&ql;D7*Q|G2ag3!s zgIZDeo=mj%W77mCMS|19VW$;zC$n04zYuJ~r!J6RT^Q~4oxapx>Y?rgylaH3sbnDv zSuy*~i3szeD~7e(PMc4Bk7FcQ$*Avrpzl4M>w06?(oA|I>q#xGD}m50uQlbXkO_s0 zRSALRwbl`wG^u1#tZf)-&$G_FFKXCrf0X0)=OuW1Tl;WkR>`_(=dBxCzgA<2^$)9h zmMBasifejuLpZ^%@^=!6dj^spblyUkZZ|~-S$?#f+ktS3U-pS ztXmj$Pk4)ItW6VHxE<54YY*+V%W0ttxhajj(+rnVQM+aFv|(xUMg^)Hom7;;nAyrz zHj_$MW$*R#8e`VmtZqNZm18p&JL^}qx#p)Lh;Fm8bzk&l2%$tI8Sl1#QX|S@!)H)W z_c8=f4oXpAFW9Pwxv9UinZUhT6jmm+rl~af?sB1rJD1BRwZVE6k(}^mS@Pkm^o?OS z)gTu`q3%Mu%x?Rk7)=-ZCW)oH(z^t!^UuTEymCcjL2NpwGF_Hc*H29T6ML7O?ANxF zI#U47dLyRY_G6obE6aZfN981U+p|)?M>1u& zicF0CJE36i{zgq%zMJ0NqL(P`f)Mj5L=0%!nklPX+IqgUC9kICEFD$_qm6qQro|(7 z+h5cwUKdn)@t>uC2F@f?kFTr%OuKi2ZD#B{=^ zQZjnC{hnUET`CE>y}_o_8a5}0at*tjn%Z=r{nft{&VxDycQ_DSBu8i_NdADNE=#rN zj{d&doM?o$dH$v_elj(_&@IiD|EduAi$&Qz*01JvI+z5RpBK$9JKGfx5*<@HC>O=j zu*gnxGR^SlO5{VdOaIoDaZISc#6b6Ev`Mf`O83|-xs`xG(bRNdNfHv1B6-y8d>lki z3s+1{e$hxg6F^UTn+>M7PlnPQdh>LX_9?ixl-T^- zZhI)z0d+-xKeOmlU*>*4Eq1hLqDiWPdWdQ|go87FQpT@ot9#E=E+gx^&+9y{dkJJkPdwtveg9io7NiT63N&vcp0E(Y; zR)%(1`|(ecIA{2T(qy`@&9Rax`q4)pX+U5hvuyfCY<4*fhln18)0~=4vsznNb~Y_l zwKGpos6840BYk$N@nOIi9|Hb-QyVVww{sPEQHpnU;$3*YYoWXC)3e+eKlnkuxZ6H6 zF20-^ES4j#ytD`af+lCQaW@GtGE-)4?rDhH1P;bSZ8UV4#5AEl!QmYGGpWBS#Ba_8 zorAVFh1eB3r+!@!qZN^0F$R(4!U@S56L=(Y8_KYJYQR7N)lkiTav#uzg?-QEx9h%1 zFkyrUW^nd*=J^~-uMYLwCUssLd$)yOnpb~0{Ym+|rjj1Kogdt7UoFch>sM~wyLmbT zL&`&k1f^oL&tHrMGeL)dG0QH1Ilt! zukW@$)CwWD{6Vx}veq=uzQu%3C6IeWqeujnT6^;Log58*F1dzvTM~kuT8~dn*`R76 zp}4uJTtGJA&yHHlb}dlD`>4@W&U zCNa4XJ%W=2ByK1Z{H>}gZ}r^DdEw&E%U)~SWzO>MYy^WR&Vk8>A5riF&ifTM02i1y1(BN@SU;}K)%XQEHcOJ#y~U*{AylPrteZTEsOY#RYKQaeeV zNWkv4vuUx&pkMdwFE++Ieu7n5B-zc4ld`jlP24e7KX=OwZZk2>c^3H9Brxd(f<|L2 zTWPo}bsLYvNRI*m>!oQfp&ST}w?EZD@Y3N*Kk?P{CDtYvgG|F=Z3~BdJr9P$%~lGz zx9gz>k`}a(*3*n?ewy2wuwi21gCFJq1h8qH0DHfK>yuV%952T3S7vdv! zx-s@}NvL0|K`)d7x7%LXw2w)f_@_#gBJER&$Q5IT7tA6oQ;IoSk-krKh%Jy1cM3v9 zDx3pc*v0XPqBIG=}wdbd< z-6znUR!9!ldWrkxW@L$I123C)m;=w)%(?9I@;+m9l=wcE0}(&Nh%d~hXGX?&P%ycL zlhbZpyBRvYEpTD75@pwKuxuW6rc|qdY@F_;mZ{h&n=K zAS36v+x{8N@qAeSXu#fyb6FhqpOd;#|0%q&i`^ zhHjBx=)*0dXD6dnoP{cnZr@9sDW8pKdvPKXj-{}WmRt0{m<@IPi~Q5KNVzco##4!h z>8zkh$Pj4fWFy6+$l=HQOE8CdU`t#M5tSMJ;hd0U4nxG-C;r+@?vImZHp{|Wgj-72 ziB1+}ULaVtoM_yUUqJD8R(Xb7Mq3n12$en`eH)2Q3ELBM0EW@*+!vfWDU>eWFU?E| ze}MV(PCQ2i6*@nPA={M)iPjwXeRq(YE=yjbT_>bN@Z~5t;GC5%I@8c`SB2r7uR+Gf zd=^8gZtEgxp2koXn7Y|mQeL{hQ-l(N5rHbCbP+kuk3{q)v2y#a zpaLB6_v$wu6XNd1i{f_hg=BCqR!~>g5xL6od98nSB`yE~ zh4V}|Z{N*p2#U3O#TC)U;z1~l7ecoA z6E=urp?B3Rx+H{?Vne&_bA@k#`PrEefUDH@8b)qHa$}zEd1+{hRk;m~9%0!&_s)r z0eYfkb&ba;pZL(Q_|m8>Oh1$Nth1C2JO{QZ@ULiT;Z)Ls&=?Y?{ZFb+_4X(EM*=rO z2Zo$!%~*1k0Xtk2F-8bD2`aL|fMqV^;6>hOGPajKOt@0FX5$Rbx5a*9<15<_7$Wcs zV1zj|Qv#v|G`xs5!UT&}L^+*Dq*U*nBYt)F%eklYV2Dmp$`f{sDsSvdb3>!{=cK5q zH8kKO_&Dz@4JD)sXdS!l``>g>n6kP;^dw$7B@#@=c^g;#+!V-WJB>0bZ$EgT@gwW0 zFbCiNL<%M5Tyr_-VMwyG<)CdAiGEOtY8z$46SDltroctqsU%f5JJW~u@mQq*&PU#) zh)*a=*PN%wmK+sU<~i0+j6BJtGJ?3Ac1Fm!C~HH6_p;onh#A!#V~!WPNU;_mh9&V@ zO@UrqV$E>Yu#@%h3M0+3-d1c{3CfU~=0imN?#)wy3TxCDp2ATu4)oY}mqR4@=7dq( zT!m>axl+;DIW8HF44hqGdVT**vk8;pcH5uJ%h}Y*sPKDw5o4J?qzvWr07RRLXbQ=g z`b};tCgBHrTuDfguhbJ^4`M7|R8HkfV~hlD9v8k<&xEl^7!ibw8&B zt#>8Bp&4-igiHUwR?M`oaGmWNquZ89fcas1PP|8Ikd2M5l;!YU;aSL4)=1t!wxQ1 z0Hux+>nKYq5XaoKnU}Lg$h>fdHu3``B11*S% z%A*CtAzo0ddA(E3I{myc2+0Nyd0(RrbJi0YTBtHa{2)MmP6)l(z3$%$1E< zxc0<@uRPnY?H-pdN0D|Q=!cSbm5+`E;q z!m3^t-y|sE{G1r!VA^U^xJ0j))2Zpb#9fuRcju0FKwF#vIdWx-;VipK)!0N}P4RFR zO}O8~X;5^c0?$@$S%MzrB3ZfaInGPEl1rpJcP0B`T?~jQ8{WrUA;2B6(a^L|`%7~g z*>X=ybvk$^BB99BOT*v)=I{MxE&;8nunIFZNQOYSDPb8}z^>(em|%OZy1U+-U5rRF zT;jOJ@shq8mz5|xH8BjqpX6h$HK_*|Ob!MwyAbcyv)`@ZW%Q7{v{?fUL((bI1GOIiTI#i8t)z^z)1Sy|_y6l3&+ z-Z0JA>qPb3NeP(@G*Jt*pKeKGK62_ssx{na&^nVBqbAyz zOX(DOh%(Ch^Txt&)OCG3Hibt>Y>n{B$Y_Ddl}ZgYY|hUm3HG6KFXV}|k89yC9>-TH zgdJXl3oW+=3Zk(g@bUy;TxdLV63wdeg=NzpqG68mr~^+bI9C_l|U zF@5b4!^O65#4;miRNo;t$jLc;MZz*EAIc$F_6yrDlNlKd8fFpBWv%7cP=f_5@?*!U zVmFCgIz5hv9@+gn>JhW&OObFM9Ym)HLlod*EY3q@fG+MGQu)wyi^E%>)By%v{9p2tg>y$ zx)T6!!?YNQNVg?Ww`{r`m%yr5mXbkVPBEez2`nNgqUN)v8RaeHNsfhyrmzHiVknaD zDakU3u(CnZ+~?f5pqtB?i8)gy51F?@pKOzlzN?`r#Z|t|xU?wB94$DFu@cT(&HcFq zBXx450VUl~3i*CV^7800}YZRql1b^{t#?LP235;63oaub0-rT`w& zP^~;xR*{cNLi|4`ffvSenWp$)-L}P*=`5uqY?Mn3`c;Gj`CBWX*2;hu)=| zfM`p>M@TKhhoIS$>)u-`!P%%*G`vGAZ5`q(;lSm6Atqc>3VXUjx$%hh1tEllOn1I1 zwd!Ix)h-|%q9vV+)nRNbtx7vu9rfztGOVk`V8oHNC?K;bR@6h6LNXa1Q+<&g5Im-= zu{#R*j7t2X%&i;&cRtOH@Zf<4{t8mluk}(BAmC~cCOgu&1$K{K0@xDnUYJK$5alRw zw1}wv#0NX;N{rsUYe4KFds7OWfj=57`g9Wml}JN@GAr@(s9h55_EnMM7;rmILP^Sn z<>zHTScM*0Ug4#KWq}{a|b+>z9LM)6^TZouOVP%?`6sktM2R z@qpddiqQI%X^v2C$FS(c^z!ViZ6XjHgI zxT;WkR&ot5p!wFq5$M0kQylTZsFET=l%J;p4n!mhnX2uES8VSRklhsQM~=oFe8h#EuKd{MDQ8c!_20^I1qv22vqSucT9w@8OXv(9P#T;5ZafF)r z)Eq3QLwQajo{kkG++zi54%Ab+8iOE$xfMT*1P?cBKX(4#qHvELs94s+9O4-S?#4938)#@?|0TT*G$NJ)n$j{4#PC$vM zYwshJVzP0w?Z;t%SuZQE*7Q{A250fq*VrXv~=L_gkuCXsJ zt_^XN=NdfdG-5IVKC4Y1=+8B?lCq?U)aFB*=BmwKSj?y^iOJbUw6M>nH;oftTl17B zcvydQQq)fp7QEp7u}QSv6Chu^61Tvj&=9bdg#jGLB45a@OVg~pq(rEeIX@S5s8SJ` zk`LpE5)R`F((Y)um*YfPUS1;}tu64;)VLXu12xv{!v3Bg z7)=3 zx-g&vx|cvvoK+G;h?leARPy_GHn5s%N>=sDoDDG80PE8k0a!A-*9f~~4G zZ=&3LP0MOyvX-nNRpOH$_}c;=$$l1kI7BxS?&xs^i`KK3ly>B{I4EQ4TE;dBptd<) zG}Ou`j{Eb(mH}Hak0W`sVfX4|-P{w|aFZ9~3gtlXM^<*l79#^BNqmrBeYgE}4SYoF zoqWc78hRJcFFe-|4@B((`cWBNQR!8G7A`;Y^PwfSoSP&yg5aLf9~~G}5Z)>9P4uu# zZt^9y{bnXuX~tH}i@F%=hZWuvxmVCCLk39U#%-G;5@B7cI4|`abcqDjK%K}!QPWDq zgg>eT6Wh4_JoFX3mGtrSQC=N97e&$N2op)aBU7t`eq?57bjeP?1LYEAen@z7-|ZBN6i^-35OE_;s9EkM=Kfz zG?hc2eW*0Q@b+Qk%Q28=l7 zR(vhG`G7rX3(XZtgL^GCOr5Ev5rS)gRy^PnYU8B1DgE-+(_Xa@;~ z55D(-UIe<4I+Ik0uWG9LoTXaXLBA!aA`dcwaZ;QK&yGf!(9?0c(GymLF}9(H`R52&3S3Smy;sPu3eIc!XSdU*n15)@0C zNaeoRZU2~U#&4+s4~b3LcQ^AW#1yr1{FUh}Q2?!b_`u9d5&jEzpojl+)F8|IA593+ z351bCRBP>t5$4P(k;w%`@233om5N-WHas7pYl9^UHbVf9V7|mYOUkC`Q4JpY(^JXE zWv5mEU}0o?6PdMY>no~G5+%naPRKX=o&4Y^6nucTmp4c%%_&FLm*8*|Wk3nvz15uW z3=0dgMITQiIgE$e4m9{T0T1J;)`?W`7R&X{6#cwT9qOZyML;@Q*xcCQqNsjp6+UN6iNY8zZ%sawG)} zyIlgS&y5*%yzwu8L!zy)M-%u8hoyeug0gtQDSqU{TNE#vVvE|QEEyaPrStcryqC~$ zRFaG9O+g3P<0{fp>H|cs3bQx}i_c88i3u|njZ*NEQhI){nrriFdQzPI<&iLn?u_xM!LK4?O;%mo6=ThW&|(7Q+L#UYlqo!< zi+Ou{e7MTx3Lkf~*F|cPYzSub2U3k9Uqh>5BDPsW^-~M(KxFO`c91Y0iQ1upY8V1+E7Ef~^xU8(@m(K;R=($q6mFSo0t{D8|x(Jr2h%NIfR{|{GoReRb zFiSAseIRQ2%r?o5Syc<#`}XtM4&^wZGAMx)+OYuiuP_Z^yCnxlWK8t&Sql;5mZ6ec z)5cBrNlo#dM#;o;(>_RYtNI{pdP;KzLKGFx;3sLxLbRmmjxy=UNdl}nohkKoX7+Iq z4rPX#`_omZYMNNMV_Ful=9s*Y5I;g}1 z1a;X(b*-`}fp}(wx}x|?GvAv!ASUNEfmvzNFw)+-_C@kveIW#5(KVrU{8oA~BG$%J zZ{5!gfHG!rstK6~+Eq83)xLkW&oRh#+_lQw?sOle(^?QJ5eIi;KMeR0`wv$?E_uT( zVSNCaU)Ypd{pv!iiKs3iX#Jtp0H{(ayh)=7HGXk?SR%#KsdXUVOlR`WkVXKIUC_79 zZM1R71d}eK{Y>R{e^b1ktyjizziv4Xf_D^>YbKbzU1{a0UG*MHCQ5K=B2<~_qlexl zGrF4DTeM@J8hK%eS;&xzN>%?qJWL{^8BWZ+rQpb>N?L%P-%t)i^d<-+TJK#cY?z%wZWM~9(D*Jn7TSqaPQmJUM7SZ(Sn(V(3~HTz z#t^gZ%lyX+VLekm{#c8K8Cz_L>SSZ)AY#{iIvnCK zJ`HKX?Iilv%j()029@RuD>BX!Ph$hk{a#vG_tq+?_ellboF@9%7hLv%G{I#79}u2e zlub8b=Z0QPBsu*y_?Hb{q}jt^6Triw9OF}$@X`B7qd(^`Sy9&8snuXi--Sn1^FpTS ze3brjQ@#&M@5?9kRBibvw=!rwf=cAP;D{%E70<6+g@N^GY&fb_AR(dn(WEcK`Q9gb zZ#w^dT%9Z-!9i+^mV&2E9;R@Esig%H6kI8VttB!Q6FPDEg9KBcnIT}$`UJv$YBu5* zp{wSSBbBEV|S+g>Z0&au~W36UC9$%zM#a+a+&y<`@?q)6P=RQ45WiIV=L)QSEZrksPs zPWLLZ=MGm)L-|P1t4#|$aIf7XPq1b8Dhc?wh_&~pFcG1@+xqbh&U=Tc6 zBM{C+&gEU~XKSTS=tda4^*}EX2g0W;vOF2e#p<5!N6rGz$kWZ~e0e0A-#52$DM00! zr>G%9i<+|-Gx0E>^5vk94>h6Z3aH~G<6{>4ggXd^3#+4)k5mbA1 zdhI58OQ4?>2o6fS*tn)b-l`vS>}tK?5JIaJP2DwtEbC2BE&gUFZ6Vl|(oA~84o zE44o4Gm>NdJGp}!6`4>Q5RP(NAG{LGaEcY)Te}y%MLWzelM*#(X(owUZ*3ESOL7Yn zU6}346p4m!B||Y5^PzCY7+?G>oW{9CQsf1P@c-4&VZNHbjPedW7&6}O9eKuLtM~DZ zch+*QwFCB9X}MO?hYH@r3k4HYw=)!3!S4-iOY{qGpTVfnUv1fsEi@gX*f~Y{M{ZU8 z1KZFVq7bxmZ&En_6Oiq+$ecpdAh{Qtqd1HsMon2kC<+-1F(cl1OjjoKl*9(UksKbE zJE!ajYM%`eCQYajDXEMY`o0wtiVP^1tmNC^Ze2k4o*!I<9KF2F_uTY$uQgsYR-O0 z5|qpZe)yII3`~obw<>&}4Azq=LNs{`x#UT>R}7W6qTD3k!!eKdFl-OsZXR_ zbb9|(qH7xI3wB4j1gM?)?#C4ahW%M-M=SBqPp^nI3b1tItzD|VwYbMDXDZSdB+s#~ zJOo8wn~?)M=)^I86a@+T9!lR^ageA+=&S3;m&UCSZoIcPI!7yx9#(oe(tQ+ALFIa$ zaCl|}fkx`(gwIEv);_j|q7?APLN!%7sTX%5OFH4#qvweH$QfIf*>u-ZfL983e*B3B zrG<(qTKkf`Qft0O@zuVK)CwXx`U8!a<9G*A0o=*%!ZFd~^al)9{8y$}qJ>&?jnPV% zyyz`2eSa)QO)9Ks&7YdvN;$A2gyq%@-@P4d6;+lAx#-U=$k74b^WTW$kTaAY+Mo2D zE1iYIKYwua!_>+>sQ#emlZK5FLsP^MXdVX$8x0v)iqL_@2q@oTN^a#)tyHQF8{M_N zM>cwxj5@!#Thuyzwtpu!%cBu#_2FpO6FsCBGIQ0#r8KcVN zAWF6@hb#R$YProK7UD8##jKyJ%xN&QSmFfOxM zwbqha+$jl?#xR)v8=*TTtD+HPg@^!Op6SsW>N3scXMQGENYw%Y`wvzuqQ&R3S|}ev z6pv`&35+5?1?3wnB(PDOC(1^h2V79nmiRxI)m)=sFxQL)G;Wf<#=9h-MMX-}z>-qv zyrn~=i5we?#@H)`x|Q7tQ&>SXpwFo+yKizWc>b*cV{{Z7x!zuxizDa<_GcOJ1+cdRNGnYxjt)6|- z(@{;MMhgJXdxa$lWqXQDdPH%;JC?s%8lIW2k%A?Mm`<#xU9RW4kPPA}=+O+z+d+hb zBB6<9rsnI*!8SSbid>PJ3h5@|zfrVTo?BvcqjnA`Bt9F`+E<3KXwDOvzk=2Q>Csj~ ze8wvCtFT3!_5#-VEOPGh)thviTubCslp;^#_k0YJ4I@$mF6Dj&(FW%p#EWwp?9R8eDNO3cFtok(g=f*?=stcRj`iYO>O;5Y-717-Te~)eOJ=1I={^?-Gu2VO z7YU_BL>~p)=ciKx7gPp#${0HQT=a3Uuj`ikt8*MbABzigqop~Wzrv;M1^x{ps0k;=gvQFt0-}$a4a7qrN5oesYy8wRTbn%>{#l+pPWLy{yorsUL6N`9*9zmfiXA6Ga*?UDxH&mGc|K4;sSj^0ePt(BoAm6^e;AZ zwFUPgc9f%FRtw44;J=a#_u*3~jn&$0Y&^vHVx(gc-g+f?My-Pkp>S+lbM)b)^}5Ng z1A!~EBQf5~L~LHM)6&RLTK&L-DEn;^J&_QtIE6SQYVBpZVLCg1)}5F*2rY>>ZR@uK zb+sD0Vslf!L#36T7)`9KfD-RD6=dQuVd(|e$>ox$t`vuxP7gWhmMB_MGi>VWfO79+ z4S5akk0N@_2=4(d5Aoa9#wU~=dnDMX@v}o5U^%BK9W+BHiU4T=rVmDwj8MBp(k+eA z**VL^<)hExn4F!OO0%4jdScWR5xKNB2ovPx;}^#{spk35A7uN@%VGcYvwVEI9Q4T} zR;?T&8kM=*H@d!-sStEzdSWgQ!v zgj5;|))*kJHAN*sqCbuRP>+zO#qs?>&tLxK{2HCWY6qh-dK58Se048+u9XUP{}U}^ zpA+ji?V{o@*Y!t#Aav2$Ljs7sQp(T0yFT7I;Ycm!Gg3Q5L#P0F)0FrDmoHTBw$F3` zBErI0>B*FDiwcgX?=<24VK=5Of=C+j>gm^-^ls~2!8m*aykBkGVdar~PJ>h-I*k=a6qqg<;=oxmY0(?gq6t)Z`|e}%LJRU6ia1Rk z98p`Jd}EaIK_e_PT;d1>+mRDe@N!3VwWr99n3zS?QeV7;E!y&hjU^DQ2-S*mDH&&n z+B$GIdekn?km2>5E`m0}7rE6oy)Uff?(uVdZVq`TqgZYhwu8<)+g(lV(OVo^ZxVwP z0URg!VL0%)azi+wC86@Ll(h+mSZJ_;u9iaUNn{}8SmWeH_=!mQ_+qcDh%&E6lB8KA zE=kCSR>$GJK49U!Xs9r%a*D=M;_IRBCYb2=VPcz13hK_LOZS#T80-GLZK_73-(@%LKrB|#MF|CQ^@9SwboJ5+F;`PmDkX7p#pSR!fUs*oMd zOSG08r(eTXjCDrU$g=PYbAFTOwjf(>Dtq>)aZ7W`f{#(}vX$?HMUX4y9SN@IrgOJb zv)Qu;V=<@hky-7*5?d30k zUvXKM+?SpNTO4^O!!fMy&}Koop|6f~Bd}=NYgDeaU8W~EEBURNVKC3jZ|!pMk&uA* zRh{e)f0*#@CzkOY;Ikf-E7*du=Yapr&l6)iKggYfR{?KXf+h+`O1t{xfml4OCF%ym z$B<-e#Ve_RW5|TuN-rPf6qXU7(UUi4iMb+!c5hpK)v`nj zEc0T^(bmj(7_Nfa3eh&a)^ZbNFa)3+MNw4F2%2!y1NO&;6f&b;*-XiHES~X@s3I$8 z0IeC#k|qlQHOBTX600;XD4>W}Zv&3TZ)=iXUn9m~Ia~c{87L$DB zmWo<4Wmxt~^d&MZDhM6h1bM$O?o61Z<{mwPi6@oQm*fbeN2(R0Z$nNTyy!<9eq%@@ z&}OTk8XZ+5QN8SbVq}8oi`EpS6AdGaZ{JARs0K|7i#n%3_}$%Zk-cdKunQxMV1 zt>MYJ&G$35zQ7e-nXpT9Bae?hJXDAb%Pzv1;+VuQW|18xB0CN@y+&?1g5o~k|0Pig zP4UR^)NhnsmXmXUsw~kpigoe17bJyCp1(50e0#`}L_!W^XLpWS`>vwdmX; z$_8=*WYAbkTNq=ab9Bbxa1seMT0Y#yGxi@5&el!Vq)il=O6RtGPNAjDPmsfKi{=!< z{7dfxMO39P{^ve0LKsp(VRlCNuZXD#-1{JghCg;g3vU;A*2C?HjUpcy#mI@;&PdSA zZ?K_tzl<8e1Rnu6vGhnSNq}EbbduN6)x0qHBTW()P>w@$Mve0l6Dw~lhho+btdTa0 z*Eq+Udc3P;@4^L=L&_GPHcN)3KRG z?$j@ja||k-g@~gmUfwb>qRu}T)ecq^%yAnjHrz5!Gze#!fxvu`fbdX_1Svc+nv6r* zo#j1KUJ{PIM9@PcQp$!khv?XE0*9DhixvqYWX=XD)Sa2fpm6#x-=qfVkjCHtN=sPc&eC2{(2^z1#U$)azm>a2>ZqaNiqWK46*UKx<)w+TGBvC_G2*09-V#>P=XDbEZhQAmj({1AHJH3`}JdfQd+X!OnE60EHiTB&y! zLP7`ZHR=Dl{*2-zb_|sD_+?j_BX|Wydm(zeaiDFE_Vj7K6$J)msNc5+3>0LVqwe+8 zg0V0@M|3n86(}Z=xt;q5Y?Lv8HIhr-ZQsxYTJZdu=o8wufKU1eCYuK3z!#U*R14`I z`Tc%OOZeZ+FBrklAr~9HwkouZ^-su4Q+f^G)s&)b!^k9Bc)y--rWSPCxssYY(OuM3 zvC`K538W=V@Y-4$1Jm6nY3S1wY(?BboGih?3NV6^w?=$TQ7&?*%tb`fM5}dx;dE zQ?smJSI9Vx)t$mVG*<9ju&+5r>jd|hMb}_&Wle_^$|N;jOJpVXZR3yn2!Q3Y2+vXmqlHq6X0*uL^@Jti&@;ibt~t@0tp?rsC>@p z=_|eVBwS31%|&0f&u;sN^-#oPOrOOst$ERPwP8>IM-?)=sa14SyOM;|jTl4%r*j~fnN9)c{;IehAo+JdCZRDsAs`e=4xYks|r@;i2Ym z(zg>&^%5P!g}M_WR+LUR@nk?fx^M!C-9<+#0koxk zn*Y01`anBo(>-q|K*?(rXC%53LYa!i9^et-cQCIwd!zAQLk%&7xfL$1FVqqFX=Q;B z7PDD^6Dlm1y%zEsBGfO6(@kZiC51@7^<7f02TmTTv(+2)5laFH-d>bWu|2*EDM*Wn z(pxJlZf&}v(}U;^kDJ6d=lxp10s>5$h_lg)7#lyU@Dz>aHF=iIRy1(lbTW4f5NcIY z0~jX$ox6`z_q^=h+Nnn;oR_Obo)HrXqcACkja4cPng!7}v)ZU0C&x_dJtCl9O!M$K znL%$^iY@3a7(=YEl#Ft=gB0yQIp58tg&*MhXJb%a)xtKXWiC@vH`vrLrL-Z5KjhFS zu47-2K`0A5av(x`@kGtV#&#@iII!4+#!hK3Bz$@&?E~OeJOGQdFChfO-&iOGerl5@ zj5{*V!=>;05jNJCglL3N3}#}rI{oS2`|01+#vn>>xC|w{j3_*;LL=45-U>eQ?W;M* z`@a@rCT4%hJos=ElH@fC+oLmJg%epyZn*FTxJPo{5YspmdBjE^Y>?;y>Z)4w<&baq z_;bDa3PoJa0wZZ4G2XB34C|(k>a94B%@tszP=b#`_6)FWi}JOGm$c+o1fOj?UC|3a zGJbM90iv?$B9+!RYefGAzBshNIM)qN7pCL5B_eIwxojo0A(tRgu&tSqPbu@`XZa`i zA89c(@=z%=QVlaRk-e@8I}{AxHt&D)r@y1YjOA^Bm}ZQsfD_O5%gPPJ$gnL5O6dGs z8YU?~)$J&PzXMuVq9$@Qb;?9!`n4hwP3f%ngf$jOE_<%(%h&l_n1mJ!jN~iv!9#S- zq##bC(5i4u;svRV)DNbvpF+MyT#*o3`Gk=QEndUEZ~3_5H8_9VXU#EQ8Y${EykE?B z{#dPnEI7*$C1y!EOzAdpK#hQg&R%+5oZM=SY9j@b-gf%pJ|-yY#6{pf(-!&5Il$69 ze?1!7AxAY9hlXPA=U6w1cmil72W^%X?jy~KlLNf^4qRBuDa911DOwlqEt9QBgK0ch z{md}T31JXjT20Um@lkl4lHmP7Q$VQ&ayXYZUYJU_q^&;h@ParkyR){=(W@l4Pd?U4 zVNns7&;*)mE_#G$F%;{YDI2PN>Dc%0NOCV%m!YT=x;u90Y$ty!D-i_hqIVG@Dj z(W~;2lP6mtyElnY|5>GjS}7DoE?hzFOZHWFdJh$KeApG`+0qk;DoJk%uEt4N$mIwku6bgAjf zsI3Uap-CJq9GX9Zr`9ecBk-6k#aGr#`5Tt##E zR2ChX_sK>%%1ERiPU$Jyq=~$T`%W79U2s$GujSMd9d3%J^0{aQoJ-_@Au30V;)$AU ztO_DhYEct%s?o38n?N)N3?z|@TG~joti3949u5&>meDD|onQpQ2<9hEL|#s*`?z!n zK8gN`5R6`ZH673#1*WDIMB1CbLYj){9~=p&o2OICLQNmxjNR~*)@0E2eMdA;;>xO* zRvV6@TCFe}HT_r1UV>|Xrc{`E0^--3OLIp=qpwag0%~wr`yHv+Mue?DJ(Cg&aj@x> z-j%D>G!9f!EmbECa(qmbkoax}Sa47r2}uoq*vjQ1 zcTHHr`kxKgbh;I<*%t|yn!xFE0zu=;hQa5aUw`9a6?9+GJ5Rs~b0d7wKHf+x+>0zcSdYB) z;lhY+hoKN_Z8$FcV^8RYB&mCI8iTpYpDS1QWMg4`deyZm6ak#yxbTIX){@m-v21+O zA5CXDM09S<#o1cj13|$IEuiMo{AyYUB;~|-8qLErnP}>#UK7++tCqOeq1JsxJ@K4t z=oKCT+wL^yK(*4OoUc=2&JU^>~OALFcn}WJoz#X#$}k_tie4hp_HMJQ!JW4da>C zn2+HN+!ML5A>t6oqVP>r(@6@1QU}Ln_a@rE>6A7Qk{d;X4oF6t2g}YlLWIpMy&-Z6 zc7JpK-<&;xTOg~oeKw6^f&AXfLrsr;5(KGh#;`c zuqeE@2z@(V($(`wd9iZxU>f_v?j#_N6m~xlEi4lok)}M_P?uhy|8V4^BbB!`2uiYw zXkV`6(~tA7B&a+RM(6v`WIp!Ey#y;6C;gk^)1-er&MC#F#w5sBX4PRU8ck|scs+P1 zsrcywp{P5Jws5KMp}uNp@GV#2ivzubmlKU6KlgcyX~LWyErW@gC|;s&#(+DC=%*JVKll(Xn7WfT|Q$}x6h+pIt%40Y`Jzm(Ng&;q_*iQ-_ zaL+OYcH8g&Qo<`-V5W57h>zsQ5#)*gW}i|T6}d0ONpI*Ip` z=6qgwh38^Lr*kH!so&_z_TkITST&$s)mXMY_iU9`Rb)iJ?~OY8CRTg6F-2}1ePO!i zj}(!NL?$odVGq!=apZo)FhsQ%{n3P&#$W~>l=Z~&=8X~IbIwXjP}5()i`!6NNJmND zabz!jO*y7G30rJpyOn~Y{1Q7cAG=A<5mHz_%s!v&&TYWx!CY=S!aK#&pT5@n)VNqS zMp%qR#>5;Fs{JCQW75??s5IBJsb%lBzq&EdMN%tVy@0Z$pRNd_>2${Gs24$hvT3ze z0s7r40_&JbLT0)}9Fqk0gusbX@+T2+)pLlWir$N|u#n@}XZ9V*~%g&$S_Fy-?c*cSCaG&U zD+<7PbUPcBQQSZIW#}b|3;3ajn~&YG)^r=+f;d7^-xCOQLKQ@*(HjNVpOd+)`R%o; z^5$~X^6&oiZ_BKQHAI_#_^^9ie&ory;h(GZ9a9@wJAy_4xg&^}!r&`w88Wpk`c1X4 znCdjrI9Zv`CN!eWof$q6KPip*@k^~1-3a!fw;uNER9OyAaWG|Cl_tivU)2UAb?-%P zCxY*zdVHc3Gt06haOvG?fNFFjN)&Y7A1}gsPP1WlVw~q3I4dD6tDI*nwRMI?BQBQ0 zWC*&rFoS;mM5Hb<1&&&@svw+))PfV}TnlhY?! zGyxdeNp~xl!4^Euzf(AB4z}P_pjWA>aWc{^k5K??Qmv&V;Mt3asU-q$W>y7)W6$_n zkOAP(6b~_9OG(3slO9Z(f%=vle3k(C=T_^#8e-%fA{YEhWJ8M&THiOiNTA2z&3R2vRO5$; zF)?8@r`|$9QHSIl1R`0P=c%<0Lx77Q_NrEb2YD@!ATV1{TguZxo^>PaFtwF4=P3GJ z&R5QdNHd9+x+51DElmlRc|`Pcbo0XCT|v!~k`8`N8yu3ej+tLW;=; z7S24`RD5Q){jnx|gxF%jNdn|%ycTLe3{p-9l;g+uSXvw^ZjQ0$2vK6naIGG$Y$0NX z30BxX+&y80LDU&J1j+zmG29M1%IW?-gn?xJmcMIJT03HzkW+E2P+J+96AADRUDn2z+a%nF;6Gb5a@tI%R{ zf<2c|r|G@3iX%rnE}g2dcygG;z1FGRBM}-Mg5h%F1udFI&7dh$(bSG-5@(YE)~ID( zqDo^~IGRSaU+0t{mcPbz^>(62?t^GaltX+@j2Oj0fP-LA!vwg=uqc%+(z$8lmE+)M zE-mDf3d?drON7X31vBQHZ!9-`G33~N`pnCb$;s&((RWL?*9iCo@8j1;nl7fQrMCjT zq{^TFc|FwBXoDIGxGJFOlAtH?p;65)fX8vt>@qzNZ7H3>hY2wb2)4t^%PI+%yC=<~ z)xg{n9+=j}oyeRt8~aAxat#6UDRBRp52PCOG(KF74jV?#Cf!KTi(=$NICgd9`-l%7 z+;oD{8k@kk#&8iD(wPf!_*2dwi~W_zxjb`uJ4$8~AHVS%d39c<2Bti^AX;S+2>O`i zCIJN4hH<6OaF;EFoiDH$QO$GUH&ro3rsw5JQXpFG(ScqxRNa%cNk)PC+EDPb)K0=~Tlio0Dn*plABG6nYqwkH*NwG3V z3~-|XhrrS-w3wYH|9o1!^Mmi!EMMdULmY*hK%~MBbwo7Z5}}($kuP2sL|X-ezTBF> zMdElOcsUqFMcmdzTU6c{Es%_+fzVO9_xLZFq<_+)cHYz0(df z1$lxkfL23xwk&jRz|6xvYzVR@!ns^j{90Bi0XLO3;%~dBsV%1w71L*<$FuFf{TV?e z37;V;$_m|w%N%t3Zcw@RDSI7#2q$UjNH~F+sy-V>gPFI4DjF0>ZBVp=hzloSv}Pz; zL!_O`hZKhu>R$6(nAu1&38=^^a*CF1+;iWve*nW^`Xu~s^ zUOtSI1_y;%&U^2l)=w7x7=ifREPJpX{--t*kN$NTu8Ce_VI>&b`1KUFw> z`XpD0uJR_)^>I0p@b8Zz647g5f-fL92);#$#(hzK>?ZxK`R&_&NZLh9->hj)sMW;D z6QioL!ww5hI|gsTeOt^4R(jqvhW{O6(>UXhGd+P7y;T45;O=XjRd^6Zc9&YVZRf&o+}vAfH}B$)bsNSfv>jRCwnk z!i>Ty2*wk8mVlm%!K#L+q8vLtcn zn5Xg&g=*F|%gAL+v1oqr)t5OA9;1<8HNFyl;$I}7ndn4Y*+*aU zD>%G)eN2CGT#oxdDX2j^HTGq-Uft3{`mhEy2OMx%3%jtm3EFgm5VufyDPQ_|<{km*=WG_;M%l$|N|Fe+0C;q}%}ZP|QnUn`BWON>yxYEb zQq;f!RHo!T1*ff(q%9Yf9DwB?n)MSO&ed@Ilodg=GG`Y?v|y+wnNqHsM3h`eRHk52 z=bhd5XL6$F!~u=dJ@hUwcyWx6v`olv1z20oZjRbpi%m{c(znkr#EKTrV_^c?vr$jd z6PmB+-b3;LG)k-E%4t81A{af18fK_JIL`fXzR}K`zJuV{UpYi17*!yqbz;(Z9N19W zBqsxuM$vsU>MPd{{&KfMgaeK8aSCWm*lyWu*A@ZTJAgTv%=FpIAsR!A?$*p)J-vqc zb*bEVvDoQa<;`&dCcInGhZcR$QrWUc%meCkO~n;aQ1lj6XD~tX?f!hhe{}K(lQ~q+x}vumo!PO!B_N1 z58qhH?nYqBpFDh=Tq+7r+DQWf-7E46uIM>H{W?fnoegh3pF-myq63`#t5w0j@giqyg5z5T9gpe+N zfc;?#MRaXK{27WPfy_-+@>Et9(|9D$^7C@Fe^z;UM+>85>}f39oKXZd1#3FA*;YB* zc)zEJPQ@z4Jj4BLb0oJ!OpQV*ga1BsUYzRDk$`~M5?XE=JK3PmqT95<878?b3T#i` zWM6uRgQ#`YlbEmzR4Zbm?Sd|CGf7-7bC5T9nB$XBdzM_ zO-Y`X7g)QLuwh|<)I5D5ywBztV-n~hqO`V6JO=X+r%C9hVSTLTl_c)?wpA;AnK^AHo{s zgu;pGfOyWAeACejS1&7{qs|1gR&tb>-fe$FccMbv%zW}%-oVv!$;1mQ;rF5vesOwL zS}_i;Vs??2VVwk4*Ow=`;h)!#7zrg(V9K@4XrE}O6NI%V3jAtXJt#3O>}(`Bts;KA zZKM|ypv;9n^^JRGwJwt=cF(DOP&^qS>n@LdW>~46yDdF+@}+n0G`rW{ZQ?6p1L78x z9vgGKxULRTs2Qa{W6dPI(b+5F^6CBj*vA$OA}7YnC7)wrY9=f1)i#G?8fw0}FyA&O zX17{BkrVw=ZZbzi8)T?lk9aOJ8=}kQ^P}$aGakxecS(K$t7RUTtew9dI z4gVhIVvdC9d-ybQNZd+)r#pl8+fBe(GPFsoV7LiQH1=wQNk^=*TJa!j%!3z?v<<`O zhw-@<*Se`I3hmimT6GhB_a;swXEBT*f!Iy~@sMn+x1B^P(6?vcI&L2!6lD{K<`w@l zCLhN@xUg>73~RHJ5jFdBSp~4DpR!s%uE&&*5_ z4iveR{p7wl0)!FR6MoNHY)#*2Ss%Jhh8XVZ{L1lbjnwk#Tzw(Bjz{NOy8*^W(+YOD z(lFbZ0~$R}ER?E=6fi=|Q!WLILyugCbBDZCjRhLnnA&zX!mY4G!Hv%ZK_M8?1;O;o zy`?IWK!Z1jD@Ha+QTlEajO$3lsIc{{jA-B^z7LO~?NjK)0{8;utN7EnbZ|~Vin*=h zpceuJfKzkBr#uc4a^kz@hGc{P>Radl-l6oFn%f-kC3jY~v#&b|MVKK{vT-(xPrPDv zN{b|EVpCZwupO@*;w<8k3vaVHHwB-JkA8?=v(jLo5NFD)D& z$LFz?a*T01AbbF(<>AT^j$rxw+Fw)Z5ISWwJ@w-v2zkBCkJKa#Q|*U?dAZ^sLg&K4C(MG4G<$8_J!%Uufwfij z?D})x)A%&D(cE9o)tM&xSDhsiIEu-=R?#_%>Qs&L_}xOlZRbB=rmw9K45Nnw>l$h4 zb6?@q&=^OHatgfvt=(S`L4O_^L(XKkegBsXpq!08ElV?MW~JX?GzrpiV&b^{EsiNo znIZ`%ccet_Lo4yVW{ zEik+z9F+5kx~0zioZU^UM6WSF!`1C_aG&a8-Y2M!65ZMq?-1i8f58E$@YSz~#S09iXL!ZbtEghrvkS8K6RjYwbb1<58s+SpDnmJMjoudS z6$~$+L^#e}v&e^&R?wWEAp^mDfd_M1H}$8g`HVv9o*W8h?Y%!=)7mDX@}?^@_caK~ zfjLIpoi)u8&3n7;=Z~~%%En4>cv(glF(t%?HUm|!`8ta;7DA~OQ8o>CV4c$YH`cO) z0K-k-UE75d&2i6)lcm+-lnaVX906OR0pa};wIiM)zM^5-E5!RXr$^Q^CG-Q``5dmG zUvyEDrDOY{!KC`Mn^eP*zS4(fx!+#KVi7reryu-c~gfp|JZaM z0eYnFZE-BF%C3*=u!;yOP0OkxV>boY(I&4R*!iEh&rsaOj#}(w1jEP@^_-M_4?H=J zIVdNT0>Ote0i}r zAe{3<(dovr`QXwNf{J9CX5mwM@$y)ABbOau!>{pEarn2icN+c|k=a8dfC4FO~j6r;I(g{oJc|6aUT?@SHtK$EwDD@g&E|l2#Z_^GGJ4Jus>B0AoMT`0_;7Ag6{!wI&`=s z%;!fEd^fTh=n2VmsxJoRa3i~ToDTmkj{b+BMMbTLaotw zV@_J!A*h0<2xQk`jtg0rv8k{XYWbTPyG4(mZ}3M5*R24eHq++P=%&>{)NF&5B>?5^j?AcD&gFE!_1tNbdT2hqU9qQY#N@xrp}rozsnnRVw4h5$S=a? zW&0zdoi~ThY3f?2;MCXWg5XRw3MWu1I2h@7rhSn0ODo;{9(zbbkS=4!8{Q=?DM*y+ zYb_~IXr4!kLB^h8xY4i?8NlL^xS=@ZmxnO#a=ccm>E-dQB!LvmvZA{AAX79v6ipKw z9qJM~O%BKoVUw9Lg};@02)U9wg8t0`oo}=_#--^goMLJKedhL4xSp83nBvq@<*1h% z=F%Gd@QI>COqk!*+YgD@CK@p^jDA~_ccXlc8*{Qf3RhMF6lG+%O5)sT^K>+fw1rTI znGbQyP<}R$lh?FpFE|o`g6vA9zIvo>_wBq*(s{-;s-byS&ggik*aT@%cVq&2-2#~{{|Po78s zV2#6dlqVD?SLGZ-1jOOkIsN0uuQkce8@Q82X+|6V{MrKS+0fZEljm6U)wG@3=Mhfu z;fi14v<-Uq>hg|$2Eyl6?f2+2ZZsaj5-lR~*u2t?dSToi>L;+W)Kgqm^Xl0hdT`i| z6d}j1D$N&TP@q2A_d3=Bse|(JdOA~|S2kFyS=`wEy(yqA)I5Kt{S%aziwz;4D~eqf z&H~}$k+&^u_DxHu(*qBOieA2~J&BrQo*-!RGK=Z*3;F+=6JEKy`3vI%3eI2QA6yke z5JMiwCMp)bK~3nX(~GlAf}rP6Zf=VQCbW({VVYA>goB&Y>=Hns{J*2BI)OLgfF9uo zm%`f4_KEWhP2Ia5tbxcO`%7OrMA@QGM(&1%hp6_=Tw$I$@h7h|n}_5vcq$f-e4Lm0 zjtNLyrKVj;awjzV~~%V*FI52N8fP-Ih4fU{VcX(@^1j2F2@48q zsHzhFB4OEB6rnUMsidqZBQl~s!ndI}Uk4H9lavc61pex|{*2R5%R2mA!X6LFz7JP@ z4ZiP{N9Y;RKzNi%hBz6Q>qHu!R*OeA`d#PO(Ff1X&BSiAa5+8^@j{YefPcOh)|9z4 zpW!t~^7`_*I4bH+G_3J>+&-Al*jujpB~TJFgA4olejErNzOZ~aO1UIW_6L2qSLm$O z6Xawb+{yPimZWqhDxp2sGaOQ8Cq84pZQeN!K!k^@Ji(0bD9GxAe#OmA#-bNYsDVpzj# z#lIR&v~aaqgpt#O;#Q!HN>fveIJ)K1fI*ofp8VcA*dVuP4dwolSKdc1`efmxH1mqW z?wRfN1+}Y5U6j@ubxqIf)QC{{bI~E9jLs;hG-i+HPZ-S_EySCI$C^)8JR!@py=WwK zh-ki$lL7CQ-ukk#Q8&xV1A=qk6-#)uKa4wx?7Fgzz(M8QaPF5C^z6^%iBmc)^PUOQ z4Qgar0kNsMRTrEd>F~xn&pAiuw!FqiTcVUOUD+-_di*-+{E?gLuVkcgTDVKl)*Z~f zszl!4|Fyy;nPrKcNY^q~2F-#d#C zp|&Db8DzVwVb4Wpof9U}tT9(U%*#C~tpgK#dy55O+5Nl|-H=tdLN|fpmY?1gd|_?a{41L#sbJ_tIZ7l3ca<*@VJuS!n%B5g z53In_l1bJ2T+4~XFt~NjvLnwyGeD@dgn)!`bdnr#hL+XP_Q6dQmb}Ej;ACNSF-)7d8 z*gD62&$-~~&_hv^qhF7@wFS$mouyTKj!%$27*3+nCw-1iD%d`s8mFO@GHeyIwJ@ba zZfe->wDWU?4*c}8(7W^`@OSt9N`4F^phcEpS>sf+&t0SBU_N7Lh&VwMaE|sc+pl$4 z$PM!3Wde~De)-)Z*S#H1W*RlyO1ln&&HSKGS4)oMu2TSs2uL&9D=`g~niekl`MR>D ze)KpeLAIR0{&{DqIB+LoDPm|Ls%a8$D}5XHaydpM>G-?jI0TOTbG2;0mCm|8=bM-0ozV3 za32tL_w<<&P2r0>H$|VEWAd`JBYuP@ScnL-_0m+N%sMh^Cdv7Y!R^)loKg+=6|hdE zMPp3!SG5jB;cQC$a+-2JibDF>@-wNU7r-L>X%2;GQR`s9`sN{R4D^dW?3TxO=9#us zVZJHxO?S4gG#!+G_oH{c@v{&H;-Ey0?`v*s_IBP|ejanr+z}i$=YHGgkpLoSb8IB& z(C^D=0>6~aGMy7%80N9ny?FtC=4kJcd-lbP{C30_m$f2Q&ynf{)jLGcTU^@j6(h@_-5bS5Cs#n8EvOxfAr_%oNlW%b(1v>D9UMQ_<8+P- zNuJ~qww2`@!VB67&Lac&Rc8k^txzvR*Ed+9Eld6osmg4jDm}_2T zB!*BtoZK9AX8R}3tF{KmEgLA8NvO@l3hv$@0ewAcPQ1zMKAr!=ES?DOM z66G@v@6rMBR3{{F!uge#-8Z$5FeJuG2(dv+dS`2Twt{R?3or3r=TtZMH zZnIY#3k;*uWLj25@u zIpe>4CM*J$=zlv{9tr#;JXSh?@Ze!Cj69W5q707BJGqod@_Vb9p``>3=9?kKRPIWB zrf#6NnO;FrvUxyXjdB-WMY)4E7Cn-XG5LKo^FhZyM(jE7kT|z7C(WZu&Q-uX+G7}!&S*NUl1`%eq?ayIy(bV$PHTNdWic$7hl)A^ck*2{$2}%Vk zXqA|&qCjI~l^s1bJm^&MrZx%t2W`gF)+i#iJxn1yGIk{22>)MjZh|_G;6BJtMwcp6 z9?5Lj7@7W>#ktO>qiKYjfQcD(gp%cD`_ZmUBoP)yZZx_T8Y`z1m+6YgkK^Dty?8j= zSVCu1DhRCDDJatd4k1%cG;vwGrrc9Q!+D&Bj+``FqLWcAk?)!?M$jjc*gT0_k2#yB zh;H2Yv%v#2nCy!R5ue!Y2M=y(VjTH5H8haqHL>O}O{XDcSFzw%2o!+>?cBBEg}Ao6Y9@eF zUBIV=N3PdLuLNB9)J+6S&}pC1S0l_HT4641;6$6eCXLGccI{-Ulu_UR{`Z9)5;CJO zO)G{_T%1NbQ%wLV)SET|@^M$72j2I0)@VOClJraQmxq)n5D?okpB7t!6x^F zF5H_=#k~6T2S~Bn^kziu%a^nyH8{66oL%0GZ{D_7jo~7Rz4_of5={jxq^2;bHAl&u@2uz& zQ??a4-9?F3tYmjju4w+?Kz|%Qy95_*-HFxwlC`AZUmxaFPUP_)7p}wOf<01>_s!Cr zDu?*jj78kEuq+9Ix^b8br&Ho91(-oyT!RKrzdu-58Ta-aWTEhB}O1q)DYT8 z(~%SWfVeNxeHrR@PB-|EH&urlAk)KtoL@QcM8=>?S{votjRjc?xhIg6j0L1=)M|z^ zDrtGQy-A6189kW&h^ZTu{^EIf(*A$a-nBXMD>?J?SMa-iY0=)D8F|EMa&F}4X51PL zveAH0Xi~G5EERz2MzH}D20-$a=)W!IL%UOU!y1POgF|DIFGe4e7h zp%-R^MqQ|Xo&Py`^5n_OlbL`j{6g%nR+7K@i~Ik@>@t$8P`8Q>y2u|u&GjejHKxNe zS^sa;;e-gqg3GCAJC-2Lwgo+*B1Ntk;buAdebrP?=W<2Wm#DZ%^Jeo;KdPO> zoM{F}@YSD>seELCs@x&&hWb)iHh-zsJXdQapily#d2PP@2c77F+u48rWi|_>D@^bO zsvC+zJvviK67LCBM5rrY^g3)aiVA9wAfUlF%djB6{ny$r&wF{L6#kx$aCR{gd}7fh z%g9n*jw5yC7=S;Ir`q>yZ_RlWU#!hQbLCPu8*oE0M2K(zWySEMIQnO#k~P)#kMIAiNW(#r59#*I#>#t?gjKbFQdNCk5;-W?8o{egKHG!SUvDsA#%#mb4;U4Ciu2@c2alj`Z*E{$l z)^L5&_|{6;gX~g7h5C+csCHW87^jyZ{Wvwj$^*||qf3#x?W;+lxju?2P{850tNQ_f z%20VlfhBnn!7X7UiG7>MxJ*>XtT|(P5*3umPBpd2;CR^pGZs26(i}eZv6&LR&f%*$ zxnHWeWnS-2OFnNzqhOcoL{0h~{Y?WmrzFW@?%{JH?>G*HIsE zerGE68(!zZz^j~RK|90{p+U3-AIX)47<+VU4T_60(|^I?t$VXh7Vr8tS2|E^``qQg zUPSLDoRKERf{TiL!O+EUY4z;kOF`Q`Qzhx(+OLIE-#ZeDbA{Gu(kj2y+<`X|R8NcBHnQbEFWK_9rFx49pwM#wcjn%#>Gq)AS-c0sNR$!#Xv z2JZP>aK|M%KPRSao6|a{A?Hth1^AW~t!#f-L%?PuXc>R`6q@F)mv_+z{ z@seSEngntIV5E5!@;9`j=-WU0=D#liO7d|16M$T;6Kl#{`<_nf&vJS#eIuc1yckF+ zZ!-10!#yQcIF!?C6a=q9x$s$r0Y?RV7HX6lvfTAmLnx^xy2viqxaFY6yCgrkb`CHg zWDh<6qfAjdlMl?_f&KIcl+z+Th^vYJJ?(a_PoCC<@UPbc^V`eV2Ndw-#aS5gIEwR- z9xsThxO3vUN6*4jyD=(dD-D*Rz|x&UV2M zM@Ei%jto$Z<v@$1S3sTZs=PU>E|jZQhzBLt@5+OAh{ zo|}zS!M80O(vIWs+=7OR$aewU+89P#H!O$Vv+S7Z@p{^+zMp(lvjj;_^Qd^mrEs;T z8!qz8YiyT3iug&=7j}h(8;mLNoA*?CrkwVYg#;#RV7Yi7nh+KVl&9jK(p>_SPj83J z&-yLe!wc!DgJ_%=^=KCn3Mc^J_y&-h`$XHtk)D0<3wVylb|n=Z%+S4dLup zlk~WYb_U$r6x`wLELtBqI0)H;EK->suX0q9&WjB_iwoO78PQ#YyGe`(?_~$jj%J1AuYlE?jlwgefSzg<@P zzJL0}H;01#zOqOsXR31N`{BpaFfWb?GW#p?3QR?8G8ICvxpHobG#BWN-X$VytF{F7 zHSXfL6X_G~n+*MUScc)6k`Cz^+`ekO+i`5!8?=o)0B^|2^eP`3Su&`%oa!CDLs-i1 zkb>UL;jxU*1Mk<8e@Tfd^Cae5zKhI5-0jNTH3ft%!Cjp3#w`Ux-7ff|G0DO*1$%%= z)Se0e($*E zF+;pTTjxM>Oc~Tirlr_L4I-bP_}HH%ME(7$Q(tD@NIe&m*5;`YNs21D(N}gri3ZHA zxZmkxIkK_MWhY!+ik^kSG7zX~h~m#6;#luQ*pSdx7sO#-bNF-26X@L~#@I`XBkGN< zl;R9$$YYAF64x#5z5;KBr;8|l$k4OJ%V2KGTP-icYtPQ|bj<~qyNwLgO4x(1J9KjV zg*R+>ww4YJ=G5VjKllUhlDb)D&a66Ta37gKXn0Wz3vED>3B%n_&}Av36I6PdMM5W` z{nkZsn+bL49m9}Q0S7PPb(r~Dp18l!nGl9i(qns8`fFst0v5Y*v>(+IHb zW7vBN!mFX>g;+UNzDl2tqgfjxRI4whC3tnw28=eUeF}V-wL}wUFa!=WN*+@TrYH{; zxGg=cm0FM#KFn$xBh-|!_vxMD!am0@uz`wBfKPR5hn#($r(9 z36k}KXQkGUjhjexa-ZYu9N<;(q1_@gE#kQ>EDqKe(se@Pn=GPgCIEWmwb@Ew4IK@;W!b*uBEQRRt=Mk%j{QyGj0t$Go+TJ11hm*VWkbdX4yfF_zlvpn2LqwO3 zm6aUtKxawGvB`pqh;wv`uxM~b+ScBpAhc;nL4ibFYpLt`p>%G|>PHVQ_L$@eIZP0Bf5mDbgxQ&dbX5CTzsc*($Ems3|096rwz!N zJFgVAf8S#ba+(K)a*wH3@_fxtG1*SZzr#nsJyXlNfB)%EJ@r*}ZRn_SD)k6O97E0h zCGs}#gmPpq(1Yt;Q?8ia=Z8zWzEk9|v9T1q zE;a`~#P536^cIeI^222orvUI1mO`yjGVX8rdQTvk6rdLSO zx0)}-(!FDV_g#&QXrG#0hVNFDJbRQS@WoKG}_u?vR@0U*Zjvwq-I5`wLab2SWlv8yz|!&X>^N<`~63RgVdP z6&IfL<#qr5mj}+%LiKn^s_<}XC>l;nE|+DrfFuBb z&%cPn^*@S0n+7TsOwUfQc#IF{1`z;nOwaJ9wJ^izB^NgKKpGT3@vtjocn9Zy@+cvn z60zpANh&<_iy|0M1YN~5Jt};tLR|iXkhak-Z{z7xk4tg5;-@!jPPXH)(ax0M(CTt;x(^qG@>xFkN6NFJe`NyeUF!+}ux! z04Cv86Rs6I!ojsfe1sD-GL_)ZRVug+!Sw53M#|9F-?;^f)(UwVd#5dOHKOl*&$FfZ zC%08=bwJL|JEMi07@@e6IpnPe<|W+-Crhc!{yEh<&xwp?cAS(-TR7ZG%M(8@oAO3( z+f>%M&(*##;z9BiOF(%CB))8X3jXyPAyT9y1Q!SVY z8}pI9S$ul`Z|?uj-zkcguJ;ix_1E}?U(Mot;7XLMWGxMFpj>P8C1=h82y0w5&pn>p zO<7G)$T2@IHyudgA_(ck37&a zt39xH$3@S_=&Wuiv9nu$=NViJ{=_^&PbZAX)v@NhpsE-ei+^yk){=V*VGPvDH^T`^ zq_RRi+udns59lUehssPc>>WYGdU*Pdc2Kn|R5$4(M|k?vPaatev!!zItw|Ha)Eai7 zRIAu3a^=GnuS5ZhNIVoj@qs7bI8{OzF2fyPW~$6nOoL1%8`bk_MF>)dssS}DB844F zarcD7c%_br{-!7GD!xfo)HE`ts~_Uw7%D)M1R5(5B&wC!dZMIKyR{g;D1(cB*nvT93sOMVmky-UK2c{UJYq+<~B;@ zsmQ!{@p`la&)Nhka|9%y7@i*Q?a_jp!Yu%#QVOFNwT1h)ex`B}Mf!`rez)!H{1%|+ z{h#|9U)_lH^G%>?^>2jqv6=hC$LOob=6qsj$cRQ-42rSeQClvQ`U##M6Y_=BQ4{hZ zNVDcKg}9)SYeHK{tok=WqV`*m=@qANdiB<1M%fts;=B%eUzu_+WGf#+k%TEda4n`Q zN^7WL^zz}psX_V9wP=Cz#i4`0e2=U8ZHSdk^kZ`f@j_I+a1LQVA;vS)=-KQ}Nhkgd zo_duN^&dvy-dY9<*s@A(yj|pcQV9C9c zP8F3W=15(K5((}Rc^3ZiH*enf4dYT@3khu@>l_n+8!+JqKrpyye&MdYm{ws(tx^ZP z9$^D(&r~-j96+!=Zc6kX4~(k*rZ4U3MF%85-|-kYSZa7FqxPphh=e+c&v(5?lv`4r zW&Z)s!}^u=l$|fp!zEa6xS8SN4<#6@33`qT)ib(I{0|jjkW2M@%IHmBw?l%o%PcD- zb(LpJE{qyzkh_kXLY00Ap8i_dM#uT-Q_9^{f6sE0q#l1-1Kq#>tsj}`2+B$VH^ZWEulLTvyVuB6j*=s#xGN1|`MsZfe zLVpi@Vy$uranNOm6;yP=V!}CyP6{!LGDll=MR0Td`8R*P`rrEY$=6;S$OCf#XFW79 zl2QHKv&{`Jiz}DnYK?bL>BW~VGEsnWx753KVo3lBf~@E7T0Q#YQQ(DF1x#3srOF=e zpD>~~XE;0C_whJXpm(}Xm2gjOnyD_@b zaJPj6UDdw~S5qBNuM`!b&%Ufe36z@i22li5$<`qd(-9ITKQ1mK(kyDP|LG5lA~@hf zR;z*2S~3tGBmoJEN97a4bRTYcFKzo-E3cC^{~4IKb~NsRA~WseZRtqhsDz&GXn9|Q z8`b%t=6e8or~a*&cdcj{*Dy|kyEPEO_Jf*anr@0*b48FO%1~zJ znlxM)eMJtUFsWyuc5(Z~PSb=B@!?xviWq=qk+d#F>f+XDM9=0C|HFw(c24l%u`&Yc zVI1KHNA+Pi`^>$B5jjzzy20JA`mRCU{M>uIN|l|RU9>p-#UGd)1NjxSTdsPr`PLJB z2sOrnS+`M(UEBau|9S~EUg4c7eiqbTN>q`8-hlpU4{bRu>O*d&;sqUoS~^hMY&Z-#QxN_KelaB*izBZi2=WvFn1|D0 zuX&@jgyByxHDfqR?}0BoJV{0#2Fy(ZyGhs!7c{=gh^lSD{hetyy>_i%6jG0NjjZq< z%6l?kh~^;Tnm22}e0*GRZk*FYQl9+$s`gNr9K-%h74(|>Ip#^>zD3XiXH5cu9!h0~ z^FZ<7kz(R^e$K(TC7J&I z_I640)*=MTYgBNc+Hn-rTa|xFl-tua{O|)h4?@&LNR5pvXk=ak^sIN0bQ^%W5t9-V z*BK*+sGj_6aUSN(s^USNo=KRokJ$-JCz(k?*Q77ukN?;^0wNiu8}Vbou`xUfrp@8) zJ2M6%=O$|X%J)v2$gksQ(DkyBFob<-`V`ITr0=sv?`X&TW&l&sclP*A`0@a6Ad#=a z6Rbcq_GX-TVnOy19utqSD0rH26R;`-tUa-fs4H-MfXHOXxL=>xnvf*7jC5DpJkBRI#}RXKcpts?O)+M?gR36{CFFXBfoowaHqWFEkQ zN#2E8jMbW-a*cSMn@$gmO*Jv(%mPZy}F(nsj68h1P zJvqu`8{sq347@@ehV-T3oKif_$CVnY(u!MO!V-+eUL?yWLUf10d<`iQip;GUqOmMv zk#YqL1hKi}%O~|Ltv75PuF9&mlv>g}s-8mzXC+Dk6k(`i1%@sLlIXQ*@>u+9xxn2p zI?=h5>2Lm~X}}FD%{{LDhubOJu3WD7SGbxjZ`vF5I`mYuA+)pNI2`zt<}4B{mZ#B% zSC*{uuiiOC6FGEnTLl?Pxg zFXtHJGo0ak15;_=1J<|)WR({!7>+SS3oHko~8?|t(={VaX`t9RT)t}?WgSCL- z@S`x0aG)?D6PCz)U#_4VA`nOt6m9$J-Bj}kTVXOM4KGyUH{3M!?xmvIXx&a<`YM&H9a9BP0^=|5HE-0inq|E?;nUQ@n`**izb{Bmrl`mK8cXqk@4k# zyh;p`a^Phk&sXJv@%__o*;i{{BG+wX2(;-(+u6^Spom=u?kOecH?eM%Qw6 zAX+>nK1!tshVZVDFrqrNNz?iE*GV4WJ&JuDMHWJq;++VtQ3g$?cd&;(1fUJ!Ac3kn#l1G>87yacTUQhC+nSbfSS&e4X<{ah;yqg zbL4o7h5z*7zgnR>H^d&%w}ml#Z{b3(0n}h067UFN@C=DJIwi!Ckc+FbfF40Lf&F3( zq?j=^d*UM!{;FRFvxfWxj|wU$$%2x>_3N>2?_eeGn)b$NyJ|)g*;^|6J6s)-sh->B z=9WKf9PW8=;xaxQTs&D0gLK37au}+q(;Scc5XHq5LP^H+EoY-gpCEDLP{>8tY9oq& z`?QgS3A9&XY^}0*;pnM}PW&u-XZDTr7)IB0PcGpZ>a=<9{=0~@QA zj!cyjv=F{H{1$gdJ);WNp$Uwf&UuGb0X;j5V3b5kemz~sCI0kNPlJ$Q7|P3Q%wEpE z&$$YDu|_jA&yN~`z6_LRG5q9a9xfVXR?knHz3_oYp=nC_%}ttj7%GcM6Fo!^EMEEW zH%x#h*));vb!jzC-_~;|tuDFz2Orp?S4kuIQocMdr?Qv5!$J^-Cer<^J>SR7J{569 z`CJtC7W;15a62PM*;_jSq70mRYO;^O=F zzxah0rMk7*KMHON%bprRu(_jCVbvEP%KIVTh!P;7OoJ_)MP8wj?t#nt<PxJ)9lw*NiFPk>b)U(rD9k=Q}1D{@jo z@x#7!tw$6WBw!j+sB|tS z_qTl;TPx|Y)-_pbjh8$Di{#(Sa>`^FZq8|33|w{cQd{Ofv~d9ET=U^>2_Akn?ug(B znti@%GxuN_a_uE2+j|4A5DQE~pk7QvRohJ6=;{2#KUFABNnK{>k3N1>AJB)49Osf( z{I|s^6%0}2OpQ@uZOv{}(KPB``jHdF@hp4+NQ9M+Sv$Bm9dLdV996Kz`>Mlx_=gB% zfRqWXa|8P!L;y&Wn|Rw9dkLODn>L|I02k!ExTeCBwy~&W)Qg3Yc&eCi!_wlKyV46h zqmWqy@uI{Th?N5~?t>67I&*QHb&<~dzy9XW&6AkzaM|h5aZQ$|xB~o%jxrIWrNwej zo5PMnuc(WH9gT0=UQdG~a zj3%GXmFdUX_WY3H^H85BvLZ}17!HXM!4h4@d3jL+0QZ5JdsQGS_47UK`?w4-9@?^^ zM6+(7;AohS%9nL}Kbx2yDw3%P(d&RitV+ucS9PtJOuDt@ovCs}eYtWwnFWp(4HcQ= zm#Z)2*zc^d+u7UNt0@YY5GHaGRILHPzLMqvTJVh4*ZaVIZT6yIOV9cc}1`*E#bCjZYL`8m`kZh{g3L_keMxg61K zf{}Twl)Q8kR#8ZXOz7;-DRk@=pFkDe_J!&co=cZ$J zG%@^fEelBR^143AI~+VJc$pMl&p^mLsN_FfE68BuTE6xk4Wak!Sy5o>vh^rF%|a^= zW=)y^6TT&Xl747Lz`clxR0D#&G{cKW;Lpq+ADhkW`&BDfst@kb;@vcghZ2|}0e$EJ zULJZ0NXcDEt;QOG%DQE zX7I25pk7G<^O9$`ue>|eO(=pmtJ*dqQ_YD>;DS4{$cZ-6kkaeVqcmV#*AK4d+LmAc^NAChkBdJA?$tv1EVXL_FL*VJ$rP404Far`vD!%vWmLQ&WzSA(W5y>Cd?C9XW7E5{pB(?}lxzb1;J5}3t z(_6Y?a0NvufqHntz!`R_j;5QBRb@_%%5e=0Q*N>9D*J3QAGCpm*__z1lHHQqIhN!$uQgpzE8AuVZ>1apnn6-XKAnO*qnjRYTaqhnXiKi z0tIgNG^Z!HzUu9q=i(&))U*psPv%R5;vg(DuE!lV|T{{r~D2H zmY&=uh|4-^dru>-LB*?Pw?_~E)wJar^ko6-BJYi!r+8eiLY1{7j=T?>PMTo#Kld<# z$?2LfW#t~iXN5K&6IQkS8cp38W6Lj>w*VZ1V{jO8C~X%a`MC0zkjAgw#i>sHj$ovN ztl@$)H9brS%P^r3<*PJF?}S21?X6J(=#G)dfNeS)qZ~!p;*#uBX4U6b!shMTLSzPc z*VLTQ!c`)jHKem{vUt%F^wFazIDbS`I4Hd8&8~<^(Ui3LEHSDjrv8CJvlf{Ou9Md! zPUI-bpV9}#6y?RF0^yv-K0@T@b#=T5>jAf&0-gTdE-0e_Gb@o{&GEUt@F^y|zG$0# zPRXEJCNs&7Wdg)ow%|5aTXt^%Y6G73R#WEOe0?Q8-;U>GujfV`rBaDnUkkizZ!p{# z;*o-XS)}I}=YY?yBu!@PapQ(Sd~hWDah61R?_wIM(O0n~JS6g(6+Mt?v*rQN;Iq(K zR^=l@wUjUaRWs|z!}Ru`nIxR|*+bOR4N zBjvv%9x@$!Q`MiPznLnhwZ=u(Dux&Sh|!L5T&b3g$aUT2;Wd7!`R#Z=E~6ICt6I9X z3^gb-y>WTfyAg!+sJVC%?|CQ_SwEeg%%WR?q_(6Gl~OZfLZ<=Ojz?9Fch-bZmS#pd zrxz>ym&v|A{mOUof-9`{Oh!YdWK({aP~+F$R2AWRwWgsB?RVBH08!EH8jXLyowYrw z_@tSIn;S)=2}N)(!(v<$XRcidAzr9B+web_dZQ%7TH3c@; zIUlfZKE7((+>>MD5&*YLLzj%7T$4Als*GJTp$3?3YLcf8dVIqCopjPum#FX8RdT5rYio4G zEs9=6vq5~0>5)b(!lJc$1;1MrkncW%Pe(H#KAAOl3->njTw|E{^+ZvwW(^LUCKIw> zQDI}aHO&{Cj=Kf^>6$oqVIZ^j1z{)-m0b12hx6Ry0H(SYrqT+>2K#oVz4IiWEYvci z7>jLMcZqFti5FdzPv4zWm<2N4Lm%s&k~vhn#I@|H087wm366X|*{nq%so4#?YJ~il zJ%E%w(J2WQcIBgjepGK_1s;kZa=WR%3M23W>{BfS?{01e$({eI`0#4(Lq|%HwAk7P zO4Y>XqNn4F_VjKCytt+W{;6K3;BDN=s!>C!t0Fk&k?v&AU#3h=X7;t|`_OJ|t)z;y zSjg(J4%KqoRL9g5aJE@+Z)?W`B40>kr^<=ffS!wHQtN$;cccDIfU*6J4;8n1mM6`P zDs1J>pBPY7VDsp(-FkYP8r^V~>K(w@0t>i3wNM~z;ax@ldnlpkjlTs_8$6|mTSzc$ML)VJV6k~*G>L;e zUW;JiFuu>@@Zst3t=%!pYakj1lBn@+GS@>RP%}0qv?2zGTtPM#oky3>S}k^C`0|!l zvn(?^l&E@AI(rQvxA2JiXkE^?h5b1~%UbO4!tIktUumwgL)J zQGM8T?RmkCts<`_qo|Et&r!k~d%3;g3{c02U6f9>4YKhWa}y+n69YTV^4k zxP?=yQ^quHsg=`S!7p$7i0w7x`B`YNAK?P3`{endH+*w>*3WM|E*^d(;D`5BrAB9$ zZQy+S@BaS3DJG%vvX`m!elqGET_1>%dKsLLnD?j%7i{?W!voB_NGS8Tro4!})r)we z^hqwxv{aT?5u7}ow^2+<^J)9>Fm6;YqgFA!y)@H~ejV5zo~arP@rRliPS374Lbu7a zO9f10{u09o;Sfxs#P}8!IL2;5JGz;DHDGr&D|EX0C)lR?=gs_WQDtk}oDLc2ia89x z+C@q5mI2D8%FO70`7i4+bdN;{hbObEo9JpT2hu1d7Hp=D$9XDOc)_*$ARtY2y8Vuc zQN?PlT3-yrpDE3Ls_j=%S_wV%B-YE~(!3_r&ImsgUFcX@oQD5{cgK#B6{?7qPug(D z`y*VOQkh(@MwL$NJ$MnK9A+N5Ha$}if;Ao}7dbY)H-k&(+$fp3H(Mbvn;C?F=%cvX zA@Gf7$Mp`B-A{`eyV~3OaR2_}pLWG_TGvt$e`xZm9GhCkj$z{R#p7PQ6Gohv!MpDLe069I$?FU^{8Kg9Os!+q zu5C8$lA_%n{pTJ0QGHqhlcvbZ2bosV;rY9`6tQ|Q&eYP*b5c2G^mAveFDN^F@op!&}@>!=qEKT)C~}g#aw*wp(b{T6#P!XTXJ(D#Wa1tgd|Y=|H)dL*p$F~&+t{{>FXS_ilgzvi6XX=yWkwKQiEkHu=${T0lu z==W*ZV`pRPvd&d6P{O1`?uBm`hJE9Io27-S{ zVA|{7B=!3Al@&c+J<{JX7GuE^f^ETf#4Wm>!-zy^!5)*2X_VwI$`M@mL@qhy=CG|O zr?G+>`^l}FWh*eCbizPfB>)WPKuMkZ_qZ$lTS}9K0<6a5;{cdERHw|+YLK7A2!Bm` z#f*SG#+{Leb?8B<7(Dv97mo_Q7QxtfL+PDhQ2+#4c<2c`?}<}OJcxA^n8_Q0Y<>=m zZ;OXN`Qg9l($2e)yhK;qfHq-*(>y1Ks76c@U+^m=!GER_zi*p+8}fNO<{3xp=+Ay>cV9uO0w{@wzd^-~Q>} z|7rf8XL3Y6g}{cWEMsI=QavFg$1z>3y(ZF|fuMM%2YufO2;4T>w}1Bcf94-i6M>vO zv!w~mq^$ZJZ)rL3=PZ+UR4q@c~FRc?4v9%$(2yR zo-|4ds*11D!}IyYKLmqJrmsu+`r^<6mokkChe}oqmG5Qf@+jFah0Zn&Fv*auh6KKLb$fiyKlOv!VHuF7#u=!KX&3Nw72ty} z%qhcE8TmE-TQ?pLTF~w@3B3X7m*Et<0p1ga>SHeNcGc}Y+o^NLtuYNfU7$mv;Bht4 z!XPrzly>gEknOZ=R25Bilj zq!(vlS}P=s*sjfkJ8N=?&6{O7?76&=dG#QXZEphV|60oKW~}kszqtR)Z~hMxmYM>V zf#G6{_fqI9bS&XsmFPL zV3}Fb`UTP4Fs$UO!Kz&km%&dkbZ`VUC%8HMYP8R@Q1w<-)P0VZ0gW5kWf>kU!!=mw zOWxO^RqOl7|nYbX0Ho z<;gr%y-1xfZfb`jto&0M@b>ZTRT5lj$<;^!hk~RJZZwu-Xffr8*Ul&DE)&fNX%9QS?@l5+#@ZU`P zCK|4@pgV>AR6MIFLH|C#dW1e&yc|1OcU06cB{yz5iyd|5fA<<6{mlP4HIW#|b{ zx(D7meC~xmm|uy5LHZ3qn)e22y_DBrnORXgeKrMy1W zYKt7V9|4b_)c)SSyNE_cPw944-*9{PzcEz9VtHs=p2P>Mgv%=wbMiSk4u9h#|0RVx zL8BM#_p?rk;N*&0hnrWxwe&2X)7`gr2p|XU1}`7jde+TWZ{8uQQ?DiM_$n}n>iU#2 z^RsCvMgTSQG(A=pQ12T%LB;4I5@6KO=MbDYDe5~$X|XAF?Ka4Rs}b}|pI#(<@f1n? z13uv8DVH})cQnmF1b_00`ID)w99+eKD&-ulfhYp777Rb7FmX1y;e4R`dLk>!)q@cl zuPq?S1;2TH!f5`hZ~ljR@Hyu)n3L-x`4i$54VbYfpu1pg5 z)R6>?e`KL&t`e!qJ*tIOlO1(8KU!70i(i-0*Hfo-~&s@LO%IAu=AI}zFVVrRom?YfoB>KF+^DebY9co_#|I( z7y=zYEYD5{RAbQjMKuNA<{_RGjr4bvE$>~R{GXS(q*`sy4I9(jpl<3u_SiCZXAN8{ z3cQ(!_T$sH0fYj{c(WWxO3#55oPHDhY13J;2-O8kRRUPX6laAedV;yz#YJYfp%2|I zvQE#&6_$t3h)^-ye#$$knT!+oH=g^zwWNtdyBt-{@b+$C+GEPs77<2RG(T}9T~-di zs+DbB7Yx*}w2SmXbIT0eQ{A1r8|De_Yutaq^+WgZgeOX7j_YuG1Guy(h1w(&-=lr` z45QG z<*64+9z}~u4XH8K4rE;rHHYXKR- zkU;a}q^SD1xS&p!D(;^xYb|(eJf7@gsOwjv;f$hf{>uvfGm9KBxP)*i7%S<_O+pX~ z=AEsZh>TJnh^n|jFFBr=U>&mK9nbVBQ}r;1uCJjC$&h5Tf$Nc)TfZf->d){0*P0Hg z>ivxZkhf8;6f<5f!)9eJEPU5RpPi$f8bm1*HN2$ZS|3T^n67V0kS_Fi1e^X~akqor zM7ToqveRh~F6K^!ZpMy~W*LaceRl}K_Kg?^x*;_^no#pw`a+eY!KseN^UCkRBMFPF zj0x}jstrXPPmPg3Z`d7Bjm3#!|jo` zS3@arIHQR8<|z``7CUFPp2V4c5sSUV2K4b0m_tsG#_hZ#AW@6j3?BXDV{;&`tPvTh znMuARhiW_dLkFr8@2AX_LU3SzyIZI+NFHq$4u}WSN0lC zbZ2ttgSIuPe^XvPw4{|c2|Sw#vluNI*%QB)Wp$8%L#rI%7B?V1OrKF*5#7FcDpXW5 zHFPKWSrTc!gRI&**!S1U>{Rfr=8cIPh*Gq7be+5(x7IG?-#=M&D%c>+nx|s2U!zjB zby4gxq^N~4Toz^$0nu5{C6vC7 zW=#HbrIfV#CD-yzwhi1=u=+i{S#rixqH&?*DbxJXCePgL}q7Kqn3c@e6`4()QGjKK(=EaH{dZ7kqa6X zDg)8mb8gHMe=lP0_)Nb8UO}gAlCoi`-42P$GJPeG3u4EZDti9xQB9C0c|R!%4v2Go z@%0*^aJ5mDAzWzGAVA+Si>eKY>AC`?1%n<8d1i_&LEBGzW2ulAtOOcmbU*j}=_+{H zvTknii@oF$^tf$p*ntUH2K{KI$)%`Up=LZ3>Hn!#J~CsA@H=toj#^m!tf+$d9m4;K zeaNHUWetud#&5jZ4U`;L2*igo?6TrIl*;-ic`!A`{rlfri_@|^Jj)&v^Mob&={K{p zrsp*|j72@=PjKzmN&%8w1{UN=`)R=HuSKh9V2Jr$qX@MVzPS}_(ASmR-ip}7`6Itx z3aQ^ab)X7EcF#ZFC{UySL;&(mhxOJ9KBVDtk%pgAdt56oiV6@N2aM-Tl0o7~#xB>h zq&m=E$t+7xHC3^x`C{q2{1vOuwvrp0-UeFwz0M?xb7UtK;tr%ezhdoxyXV^ z$xT&X%8>_A>Z7a*!p|OA7_T@~1l?egBB7eNhqeHZ$o0Z(O%NQ}xZ3J)!Qg$n&(b8L zL>hLnd4VR%SLNZVw@ubN&RkC4Wm~vc!=o5WWqGkk9|Z zs~^D$_lA7F<*#V-trBZ>uIHB_jgOUvU-X{8mD{QajDoN9PmhBED}R~)fIw2ZX8iy~ zaEk5H@0m8N0u8;@w)^+L^EQUHe#v&)gH3_NM#`O`C~K1Ihc-6Cb;^5 zr}*m)S><)0kWmyOsa?B&|LG_7Z)8v4kdLW#vSv}y2@1Jg!>nw`S|X(*()Xl;AC62th|BIE1Udi1uaANWeNt^Q5^Aa zS51R}L+1ey=KQ{rbxqplA^akblPzw0F`We76V3#GpLG!uPM zcUB7mESi6F|99~K0l^4s&QQ;X*V-$32zuQ3Y(F4bHBk>Vy?Tpr&@MXFqV~3N6ND>! z11625y1nW&)9R#T^IH9eFHjflUB?dwpR*@uxY+}gI%VYH!B0gAAY>s;6|i*Z7!1bp zB+xu3eCsHg0vvhylV6c+j;O0W!FzKTu0bxOkL&f|JQ}gOV_kZx4 zzMDWlowR6Xhvsn%l_lRM(Uy`zy^`yO4Pdyq-xFrw?YK?MnI|)r*7ExCv4sWVXqBeU zj{AAy6WH}N*^Q%Z@5x6WH5ggBdp&OweH>&b6h`_W1`o7Mfm6Q@t*~uus7qd}7s}Pdy%Yyh$zN;g z`5SDaUL;AT(`(BWU92~wFcDI#CimfQ{D!GTQw<+u+)@P8TBshWK7*rcs2Ki$K4Y1w zW(xB#i_mry0b|EJHhYfCC#>}=99aZx^;9^zaZ9QsR4Rv?^q$CMF?EWz?m(3#98Z@_ zv^GNj_V@CfBdd5zX#+p}VJ&|YMXR47ptsF-5;PE=UtWY&DSEPXW72-bHJ+XIAn^VB zcOfP4ns`IWP@)P!y>xza5q-}$mu3nafk8c`Y_27-y`oh?9BQgbjX0WrPxqqUNMVG5 z&eTg0F+cOh9D~P0@cWm%IE05xT+e=>PMiI2LGl$XK_=JzWELagJNxVq zO7M#bC$)}5puF~Lx7`Kgvl(iCaCBTx9BkIV_PPQkp>FO}&m0ptYU-1GQ^x`I&!_ts zr+7pcIXt>oY-(x028Phfel~mU4+Nb30i1i0TwO>*lyedW7Nx#MOm$2f_^`!4IxY4(zFyiy53J{HqvmQ!CZd23(F zVOsNA!W7hM?6E9^_VwZE3ZM%Tj@Vv85mm-FYc&?ULi3L0m?$yXFP!TleqMSuXNu^y*F~Dm}_I97N0>-iP z4fAq|&csnkTAitexcQ$#MMSnKP$D|oebTp*|MA! zei?W}OvEUOa{vCfeyg}))Hz5;7rGTygz9UIkU#(CuRri$Bm7Te#)xf`_5=JHb)kMl zc#VP=p%-mjzzuCGoyhnra`6J%e69Ysey`{jAhqy~G)U+(H`S=McUXt_MO2+1R?AqmH@MG5x39dj>u%eT5k+T7Fj z;g3(y$b<{gFT_ibfT!WCf;W?b9R5~%nC~*~iW}9s^mO$u2-ds|R%p>2_zVG490o8w zI1Kt^p6!I{J_*55Hh68$IfeJykMt_0`9$FF9#fU`)`Z|b!n|J2Q3%iN3l#Jk9n&E@ zkPU6e#07+}uD3>)D0Wr{gZrP@UA+Rpa@bwwuzx|K=XX4U<)N9u`W0Av>z_?Ejb9DcPY)^4?f7)zlTDaQmwy2!n;ALnqD^UOK2be2|%&Tj&o zQB6ZctpU&wd@PALEh6MOQ8@ySMJH$#!}a5*@1phs?#(!_PnNO2Tbseo)s>+H$@4vL z6|Dv9&%4b#m})=wBvz5!Fm5>km}ihA2nl7$R*<^p$)3z_G8-NL1CG87e&S9!qs+dy zmm1<+DlaHfSd;qQ9|R$hh};yX_B1#HtetEiV6y2T%NO{QvyV zZ<51Bw_lcMc3yYA9d|6;;sdP%{*WTj2b!V@_4qis;augu6#IK~Mn#K@kn{VBFI%eP z!@#j&X1JYSeivXM?<;OFzw(x#PzeA;#n1g~Tn^~UM7_Pq*I2UAJ&+@-9GChz(D=p; zZG2IGCAu3+kn0rzTVDdR3VZC!=>?AbIy%?qQ6y+M5aKd^>WpnP2Vd=opPnxV8#^Zm zN|ko(l5Zas{?S3*IK>Aqm` z3=-t*&AEU7z3#fJz*U(V63PhOUILcdnGeVi*$HTj!0RGtt=>gcu-r+NuA#pLXscd;zpcBlzBIU3B+OHKU(PTxC zdJ7PR=5NX~HRiI@7wSP|&&QTMA zT?y;oQrl|YbOG{;N77j|lmQlm1c6x*n8gS<*0YGz^Sfz}OJUm}7kve0WQx>F$!`AJ z5<458d!5ofRc^VD5}I4q%Nf`#TLq_KamYm<$Bf%F(jH_~-HaqR&zz)&S~p*M-IL`d zF8P`%LDP_Y)D9y_J)0^D<|&uDo&5-*(i;EhANen~ha#3O1XTQGO$bOYgNddQ(Phf; z%;2pc^RUXTKs?#5Hz?a>4X!cVtbHtG@tj6i!ELJl5vDBqeDv`n(+AHPLHpXhr;sa>yT=M}m%=Z?E;l%?>X`@`2{`EKO~ZZm(tf%tJrz4D*#7R%Ffq3cT? zjbKtZWVetvnO(H1Nr4?xOE^5Nf4%9xgu7TT9*{U=G?668+l%8iN8^qiz>HJL$s)x4 zzrO#w`u8+axv`Jt0X>&uFN+z@FPhJ<`+Dbv&V54UbW8ElUU{=H*Ad$4R0@;&7Za?m9u98^ zzxT`D(8V%ShzJ0BK<>SvYH7R};gAq>EpYj@h4mhuChAvszdi>-VGfy7%XeH~emhzZ z9s!TAN}7mW#Wu>E0dkBy`WdAYEk^K)`vX^o`FtF)VKMuG=SNYNd6QF;(v42)DO?|` zC!goM>Vi^p-ltT?8$wj=VZDr4EfI;xcO5rJO#uO(xZ_=;vM_5R|NaQ{o7I#JVEuwT zng|y(dM{QbXj$7b**4v`E)=6cKk*tA%5`W*OhxgAwE54l@m)^)+rRwg-^Y(B*Xz)Q zx>lOkHq<%P40N_Q9koOUM|y-^WRWu-`4GN*Vg)|61PeSiIcvAUg*)61MNKXzt9{_n zmM3*Wu(Ucx53+iHyEcxPoHF>c9syPw&MPy{$fX)W)AT;VlJloml+O3EkT|5j)tg_S zQr6BaUbY+{aNbdR+?Dgz--hq9eNUQMs6E6%&^toM;#%h%D@I%ISf=@;fKp9!RB#Cw zvtXlmp1p-4SWyy*lF7wYjXBM?P3o78)H!d>7X36rYg5EKLWlSTDbj)Wr(Ob%`d!lr z^(zwezz`c+K2)7z>F@ob7Nqy5ze-`1NPaz2nHld2?hGd9S5wlV1{&2rL;L`)k;?Ah zQUQN0Z(cQ>ih+nRs1Q?Z^7VD?1qwk8GQ8g$Ssx0ds&to?(%`9xUpz}X8u=@tzj=5$ z&gpC4?*QuNbPcY=O#!?Q**N!lWoxkpeTSV8$8u}Y z2#Ed6$9}^G1V(P}X8Nn>ABrO-^@j<0wre@n4~lC04sdeB<_MRAyeSE@_xq**FkS`d z%?AP)E^#mTkyZV}*2T8@(gfo?tP1A(ln|Grz^z9-c(p=U6+=r;i`!=f%~HExA1tMM z^c6t>!AY(ZH1X{q*xQ3mQiIdmB&*k);X2J!S9NMvn!abdbH?X&=PHaV6~06SwRCHA z*e>_-i+P76xn?O@wh+d@H+z7Xy(m)sQZ`6nGFg8=!ABHvpEsnUs+xc~5tnR>^pR=Z z$~&sGw`3$~A;p556yEQF=rfI?S4+bG{OFP2v4aFGKAD7U10pxFQi|!NPuIT*)3hoB zdOrMF$N00_y?lINo?5kdj&_v~w*F9{ity8{QC%^=n(FKEsJRRW-H7Vd_ZX|(@I_rY zA`uzh_Dz*N3H|cP?4oH4#BL2A`dAU*8cD%LcDdHRN#{V5@~LwN!0j$Gu6(2bb**fX z#XLb;ytRBo_K1fOu72b8j9u}^2~M7Hl+a^gs_Ojmb9sR$9PC+pem7o^^1w!jsmNTd z0Yfg#O+YwN$sL}?gPQQeCwpIbA34iXsAFz4L4M^Ex@EUPz3nA8Q*qd{T~Bz6qm`-? ztwxJn;K}?3@p3zMaGysx8*ZS{F756+btF#}U)NcxIZ(DXt_Xz0Wn1Dnb-QVJ>GDz^q$$!7QB;6R-{ib*NGP?H?pI&1Rpefu`B@y?tplhY7ZIYCPcl4DI$E; zhZ@HKNr*+)=bK*XWnA0OFu>Oo;)A#)F6ns*_th^ye)Q;5uMK7Mx?i?XS&!D+_%{LT zS_GXd`Og$W?eHN5Z!y}Bnx$r2bYf9jtvq6SmezIZkdiyNLzWSIPVAcTgD1P2Mep2M3 z9Te3Q%rsowFo--eMU(km6i&s2eRmnv6e+IY06F%}kqN`n);hwIBG?D&{{4Sd695*3 zCQ&a#WaM>OHQb9{ir;uuXe=tZ?VE?ReO1X$6Losq9`F12uln5LVCrjfivKfJb>$3) zez0+StC}S+oVfpi^FEKT&jxZ#EX_bQI2>Ry8+w|~Xa16Lq@^Gk9D2ws0WG>??3sYNx(&AQ64k${+rC|s z$!_ zUr$}C#l>pK#D(mS@VM)yuSyM?XHID2xDv!B99JI~XCDiEu9TW-ok$4TWXN5Ybija^ zXBi%l3!F{Q0~^_H=MkvkRE$5S-Za9dpp5aG$HxZ1ItjBs&mI>SI`?t1K0rpkf!5 zE70d+Eg(8U*Ij(?^`wM`9ND!jK9d{nZz(tdN0jNZzgcwSZue%~PRU)cpb(i@uQAjZ zk64e6Vv|02d5w@)I)PtxG@hy{qvbcP<*Q9wZun?TN{Yv4s7zL1iyp;fTP8BE5O^mD z32)g8qC~3OQG}qfCxOCR`0kPCNj5(^u`}3JDhw@|?HwUX&kO#&hD%MS*Pt{a)Ur)& zZBuOzxC*U`5++WaCArYTMnf>Z6M4-x@=W5_2|0rJ-1Ix4b%G=B7AM6~BO>At2d7@IJxD`~0ZByCRh5?w(4x<8utTCzy6@_9(pXs0rXF+lfP?LJ#?F3DI(R z$FO3{HqY>FFB*pW1s7oCAAL6JOL>W4u-AkEjw*Q1%&>Azb3|%?=IXX;)>2VUs@Iq) zAqM47>SDtJeL*HB+Fgfv!=4OW-k)HI;!g@&k76}Jp@Yyo3JOq*;N@-Io* zb5v2cW>9T-u|h$ANFBP(!z>LH(aB*ZHc>2pMS_T_r=-_gnKrM3r{w8SOG3Zy4nORg zWlZu|DC@F&-=?~U(^*x?JOF_)0liFc0TqLeUy$`r{iE7-WhjM2gNIZ~GyJNr7w9YP zmtJJp_n4yHwRu26O;V5=D>sOlKphot8rSiskpG4Y4; zluq*4frKSAziv}s_j{zg!4t}>HV<3`u;T?nK_UOz+o@o5CZ^;f%3+)36lKl|AVf$k zk9jSIvRQ@<{C0Os{T_~!dJoFeyw%hb*S^BokZMwo$#Zz_A&-_PahXpdM2_E!V^lv= zytzj|m>ipPur|?k!gB?T6LDgpDrbPT2e}hH=;sNKd`w9T1ENriW01Kt=r+ZqWq1Zk zAHaX_H?C0eUL+|T!E7&;gX;}J%|Y`>fgtD@HR1h;qV(@bX0^V6mk6Pt@JlhvM%)zY z&Ly42Ga^(&K)0TKeB^Vo&FgzVsNlt=?uj1(?x`RB-gB?`XraWxmaA{hU0hCy@0L4C zd0{4*9Zn;>H+{w7Uz>I*V2X_T6$3$&A@~8!$m3(5|FVkP$mXa z&O|{%gb!N4@bFIHT}#;b=92QtLH`$s`7_bq@7rDTxTy3%l%7(l4cOV*Gi+Oc_E6h_ z9KZApRrPcEp=YhGOJ{xbw+_PGLC-{B#pSW8f0TdW>K`=@Jl-fv80ziD74ERQM5xAA z3&c^2fUc_)%OT2bfYJ!_n(k?d668?7HZPM!b9Wwz*}ziS#5$22v-F~l><}`@OtyhH z5=xqrYGy%szX23flOwX-Wt5J!Nq+snldrkYQL9XGxDLf*m;kZacR1OGJFjSgJpzEt zqoTLXq@DRM5Bf*%)A5rXpE+U}M#NonHP!NU4`=oX+8pB2>m+HFrH2chbk3E%;t`G) zaUVUIx1VT#=h?C6r`byE)QU#k#&B9aMmwO1yN7*}9c4C!l`m?m6-VmbC|q~Sij!$r zd~~!7HGx_g((X#IoXu+_dF(w!M5=sD=%#E9Ad!iR}mvB<;aBu3${W1J1 zW_v9@?MOVIZ8a(YosBi+$hwMKo6V)`Q5b87G;;L&bR*HaK!`ds(F#Q@X0&u(gGQ5JvlVzSs*KCjIG0fUs$U&NBVFU6@Ruh`SwO%4Z>uXk1Qcw8IK}I}>x6 zT*6@2YVRn1Q5z(WTX761WOBso8>*_cUjsOEczkdn^zl|L{kG$Yem<&qtX+^V=$;-b zmJWAQ=tu`5ctnlXLrs`}#fO>`%2$YNAqiD6#I8D07XH$kftyX~cy4L>Y8K44x58T0 z(D(0u^gAXPT9PUy`VUI|Ln>I(|%nPrrku)<)NZ4XtSBd$s1 zXtQL$i5pkZ)S9Slw)(g77wg}cxxe&IUZKEPJbf{}SoQ#W2dOVTSw=pqlKljus4)@U zVP387{b9m8?H620}8)a0a3)%DF#f)olCSb6qL(LK#_dztyobgPULFQ5L9jXe_oBzQNKJ=Gz z(YzzB>XFeig}gT#yzF{8-Z1GZ{`3U~p@+Oxax`+LcxrJS#vF3&SDkDiwC=0kKKjje ze23blF7l0^@+2<1;aQ=@skdVDhV>N}0Fw@n1|a^6cTuovm@e{%t=-BfQ!~|s@sfZf z_a~BJ*O0c`lu7kgZajTj!tNIz)FjBF4j0`%$Q&P`6I^N8{bZFrg0S`;;BHgtT(3~3 zTnO8bKK4X*5xuhqtB_vFRjKu{Woli~&I(A4Y&+QF4 zrE%_7c{cJJ=bjOgQ~cgs^7z3_EQ%4%4+1EJ2G*%24s6B=mAW4K@}6A8DW(=N;y`6Y zYj!ZcL~6Z+Q1%;3Ttt=754KZAdN{UX5tFN<{)zJG$?Tu*das^PMF)-8D&iJ81Ev8~ zG7=Ynn{r zd&(RF6rc`e5L}u@#>toShW#nWu4YZnqu)p(3Yb^#CLso_aZg+z7DqW~1VuEZ<|lr! zL`FK{F<61yf?-rFN3z>weMpub#=Iv&XdO&o4UJ;AtgV0O*-_KqnC$q(Y|zig+xcV~ zDW4oXIbGD6y^i(MZ9luc3?=)yvIpVNTA|=`(ENgLeK&8@)cV>V3a!QYf;NGp9-U@EvqoN(16FP{9p0rbc z-=}GP5s`bX-XqKTtd-w)w1_Jr31c1?(?JYBTQ=f*;^SA+SMllSF+yHOOSGHCfi;txh{$z7oeF; ze2PF{xAbKPJ-uo z&w$+2s!-J=i0h-UjMNS&lHy%!Il~p>sXrOrFuYFybJH;|o!N_6^}B?s{ylDG#Bfn? z_n_#FhGri7Ohzm>u75U};wTNR;5+snR3prjbJ`wwg&_x57$sV0yiBGv9XUDt4oV1dTicAkX>-_Z9!+ z%ANOHh{LQ^?m+*cmPET2UXuM=vp<`KFMT+6V2C2V{Mt8tSVve>tsqi$+F^r$Z1*gm z&aWaoC5efRYB|KykX3~i%@8A~N@#&s)sa++V8lWx%ol|!Fg_wccC0KPVMpq>^1b8P z^2ZHjT-16Gwm$dK0%?!y%!hA8;c@5qw`huId0pB3Cvw+5WOv$lXnyP=WrAW5_mDb} ztlk{MEebRaj`5!aiC2Dl2vsQ%6c0#f<{>tMe=~Y9qNT*2aQy@2W8`9R;fPPzoLZZm)vPXzYrOeTXj@g;xL zE_!NTf_1TET%1k4u5&nkxGF=uQHdl>RBEe4YNkxE*lA$=pdr|`+L`A^p)5YdMlEu! zxVJV}B|q!m;Cx!G`a0?o0>{b>$1jh3nW>!u%8QplDFWq%EN&tsg?SNIz~ou&EZ+My zb(g&a0lR!LTQL_v(;)iwrl5tJS}4W$iI4ta4#J>M=Kt1ApiPvF3lp@@xinD z0$2h(XQ&mg9clYqQdEs{U7EEFq$O!CL5WiN8Q9$?2upc+n zp?e{}E=((2 zhuILL{s!i+-4iMQ0OV|4QP>&CM(NtZT-|4ytq3DtQQv_wQF?21R-H+BFEmCAr)mC3t^dSc#E_>Kcn3)wi{3VQ(M?mU0{^y!udA_d3q0Jz_0Oeu2!Zi)s9 zjR`Ee_HZ!K7G-YC6PPEn(e1>KAF_6H$O^}qp}1*cA{J+h?$JWrWFFN^-^|I@MHy+Y z&GhBc;{yFbp+g>rIY%28$`V<;9C5-`m$;~fr#DK0a5Lcm>>GeI6@YR+e|v%``+evx zbBvDgwbRO=Xr?569QD!Z>UM~YHTCbKPk-8%CXnM_Ev>c5*1cKvJNAY>f4o%_M{^97 z>?x{3ujvg|yjOfnD+r9hs7NYIK(G^8cAoSo_}yQ!ZHuV*C8)mzH)1s_aZ#^h8ivl{ zT7!XAv16&WCZB@Kya4=qEl_U!7y{b&R_OXl{MQ88=b;{IrL4t*FgjbHsz`jjV>1<9?AQA)m;Fai4-R>68t;}+LKUa!MCQ(qv<{eue<0JFt87qFaaenR)j zn)T zUSQ_LF+IiHDryjQ{lxbIs|O~DB=FiWxzUVTnjfb9_2hW}sdsSIp-lH&P32;RfUS=R z2*Jy|ryGEV>Lh;;@UEv)>z}f=+BZ2n%oW6PWQj(^a$k@H_#~DJ_;(r5bo3(C*iWllrlT~e5bz4V zn;BH_8n2W0fk+)4XE5hImwjS5SU+J)3s}Xq?3kcXH=dY|(`AHiDUuaN7(IpLf=9>} zryL%eBEbD?4+Jp=LgmR)&H*j2nUUDeoYfTfNw>4bwrbW`#Kq8U+ea<9dyH>Ko&68J zemu;~mic-8PjEO~VD9)Y$YgamYgV7^)tqsk!j54jZ{R_#bAPn6Q~x~7_HtC4n7uGy zNEj~@6YYu`7begj|MB;0Rrl}z=#OgkAd85h`myur*EOdhnlouhHq6Tw02^Das4o#j zHMjsUHQU9jr71+jWR*-@uO2B+?F@RiY8+NGnaJ9%nZVI6*Nkrn+tOpb%?YWBEz%Bd z=5fn@-fjR?@MuK>`(G1_A;BZ&N}kR;;bZ+L*hEb@Q=T$om4~jCZyfJzo9PH6B<4i* zt^FnH*SckeIHVr19~k8aj2rmqXOL zQ5cC#mTlZ_u|$3OjrD?TSIn6?4vuvbB*mD1P`!){sO z1O6k=V@Ms}Nvn;UL2VO<%HY$;=kly`euXh-^1_y|z%WKZHssP%#52wcJm ztvKr&qBU9OoP~3p;c4|$XseS>;asDB z)%!*d;aWPFBuHf1atireqvFWwcE`}XPYL&3NPIwHq)fJK2DN5+Ioz3K&lcrnks)iCf!CyA zbCd&hiworHs;^X(>vm~;`#0bG-yW90Ma>&j10Fqp+6Fx32g$FuM+F6x>k9XAb~(Jm zVa=`KpB%gizrwBJbHkTaJA-N*MY7RQTen;xaecY3B1W;P)Q@j126wVY%+|Xqb9)(k ze1h-}Mo}h)C}zRLVN+WJ^t58$AAM-HqV9NfzodI~RK;QFvrtB!GQ`L~GBGiNLXF#8 zMt}y_#CN)%jN;mTF5t_sLm7GgA_5&-)W0jvWKzmetJ&{I=w^8Z`q?Z?+oB8f3p4qi zaFARg-aVj1{;)j_|L`rE*2+B?#1jgKYiP}TLz-Upp%^u>5li!M5QvjT(x7)+1`i!| zEcGS_&%6z2sW7u$LNbM`yXAXf0V$puq;A-`VC z7ndZV%$m#n`{z_aFLnT)vEDF=$-Ra(d+HLv)9%xF|9{^9D-ZO9%yaV#Cs3Q~Y14)n zJw0{ZE$*AN;iWFHiEBG0=rK7>H;K9*9)cmfYTk$f`7&z7+~vzohd}1U>d!|Z;`$x9 zSOo|CyW?pPdZYp|A%Fcv*1`ZoMK25yHv{sN1xrLS7o`gPMy?O1NEpT&Q!7C zu_gb*FFi_CyvY-hSms;WGVu?t}**r=)a;*|5$OZut z2&)p;CKob8frfhRNksR%=xNOmV-z2R$#{%N3cH(t&($mdvbzaNVj`okr0J}iC%cHN zeyq{a>a$L<{}I2jO3=)aY<|W^*S7fdma`%B>b5| z`h=b|i82L@l+Cdwr#J~CpFlMS8qfQOf9UObqEpoe^6*J5@#^K>xfg+*H-u2DGKkJN zi`R=cYd%aWM+E|xur3BcM*SebcO4bV;l)@sgCQywy_FyQ$q)a<2eI>`>Pwf+f)?J> zCj@tE5g5?zx?~bgsX|tn1Yl^n8;Z*)49wz+As^4%5%KJxfq)>mJ6r!23owhga*hTP z+(+*-Vl|TiIjwo&ifFy%Vy>>Co((YXHC>i&KMw_fO`GEx3YlLpBrUp^asEQId-sU^ zP+S!T_FE1{K60)3{{0o{J2n2WT-F+|5k3x4fRcuLE)Eu#qzZO(UTT4ukvrzzm1s#N z73GFI&DG&mQ@UvmRIkQSbwN*hgut37NV)T^xN08iS3WrT+p~#}m8f=`omec(KouQW zb$b8%heZeFrGQa3k*GJ@=XNrB3zjTJrMiv8F^)7%~eJDK5P zs`qgo7{E(HDOXl{A3@gNj6S=&3BS`ZGFHE>g@BN!5niwN+xOr6rN81@weOu!gM%~F z{;GW-DVCV)s!9hsc(Y+*nhqy5=!>Zi54cWbdQ^X>`g2~wz8vH~B z?u$AkSa3{={I2_s6_u9WwMCz7~cqm1l!XA8rhV9$rCq;0>3FbP4}zkBc79Lj>tZ)#V4B!&F!B+P|Vj^vK3G zF-lz8{NMMFsgD=>A+ZF0Y@E~KbQwcqXeg2w`sSffF2&ojrfVLU1Aq0+f6)IHIXPem zhRkbJePreO)?`*`6uy-yTR^a~<CT_612%gGQa1A4y*BE>m&Jx4} z`iOOqJ3KGJkkKaUHQ{P(Ii+*EX5}hCHN4(6 z8?4+R>x-6vE43-8XGLjzx}jJ;TUqla-296ChzwhCSBR1|-8p#LHXwN$%4EG;`ZNg5 z5McPK)vLEQiGULh3)5A)PNdONoCXGI7aUw(FD1X`RXE0|271{w&#qJu#q&ojLfyVB z?<-Ki=78Nz>poHMDVWamt1fu=C+NcRtaqXgQ)6R(epS>~bFq_{;e_* z|9Z=wUtP>!HG`oQ%=;+3o)aV=c-z_s{J$5m4_BP3W(k-OajogE{dmodpDq zvwC7Zauqs4Xr&y$e72Scp#IZOKCzy0Mw;TRfw$gr|Ni4gA3rjICC1m>6;vEZo#})#(p#mx$Ic96-_RA? zUn^U&EgKa#i?g)BRoLr9IFkVH<)E%sF@H~M_V#=lI6r0nw^W*9-ueu!q~vwTTznZg zmOR!Lu4WoenQ0`o0kn8ZsPU*zCK2bdp=^}IvN+s1_Ct46aw%r#LS4M^8y{n(pjj9B zx3kdWIblXQEaGoOtNMeNr-JOR;=?5$~@aCR$fG5juToOpjn!s_sO!%G>liM z3O<6rCw#5nc}#Av=Rc90O#IeACx#Vgklk$58n#7IVT6Wb0t?#Dt{=?Ny)vaNKt)H# z5E*a%7B14vq$dS@+VcLxANoR3JmKZO*9q z5FQyt_&osFt`-{HJH$|ZOe7VBMf6_XEn8FFIBEb9<@CG-#AI#)0{JDlnDbzo6O1R* z0wqZL(6XQdWN+060D)!vNZY^rA2#k+70sQ}+yzZJ+EQnU7cQ#YOY zt1ER4(AwXcR6wXzv9f-KW#M|)zw=u^@&@o+<9!tZ>;Mpsca{^KnBXU>uc+>|#j&<~ z-0~i91B1^G;Ei83vaZwdyT4X*;$F_79_+DYXwifxM?d=1gQ!yS|C07@&yi-?b)WwQ z`^k@Di;S73=F;fP44c%9nYdIHx^NpLs*26w1eE|1Kvn@r41g?Dm1&1fgOo`p=7BjE6zECy1SIfPKTd_`?Iv?IJPSj6&=&V*dS>3iG91=ixRQW9JU40qp zUuBlTq;35ny@030wEc>$85U_u?;RQm_|M!^#B$U!fMX-XfP(b{X(!YR#2A)PG`Y%D zR5SLM?%^R1wshT9OIq5^gD!dbKMc&IEQXJgA{fb;5l`U@*7UwB=6R5QioR8WW9z>=r$dtW~jg4^9ihV{e%sx$O4vG7E5b>Hx zcYajpoK}(`*BxiD=CS$~;ThoND;DAd2ag!2O+06X@`z;l)QI~C>Fi%@4Nb6W zW$MOR|2{y^``I$+{L`3{KJS2@mI-c@ZND7r8e-_)Bj}LUoS?|C zq2XEe!<^yJ9>{DzJN5pG%TQ&2~F}?JSxGVKTg`jX_@t|Dydz zAAir!AN5J!YEfRlf5U&ZMtOfda0}WfXt{^ujjPj?fM(^Fzf1^MQ$!U4n!acp%f`NE=E7R5`>~C#6=Hk zBVkl&+Rb2$72mEAZTijKRq@vSjk{^@8btah5RK}0lckF-n6F4@5sqF?1l;0Bh8tu@ z+3m^8A}V&vd2h3XeKzm+VBO6uHg^upg#`_^hlTK6@8jf8JCsR_4D0idaAB&u)k&W( zgy+|kr`Hd~Pr}l`g6{(S_kZ6XQ`__~UbY6DsMp$RmT{s}dXhEncN8?hk_zTW%S?JX zC`V&GuM?s-9M{K#pf9Iy{0+v|EPEcS^H2ash>*@;YFt|y<3-QuA|+Q#0~!|F*wKg)*nN6riF` zA!qHD7Zjp|q%>@qA;Wc*X7oJf{d+a(jClAV$$1R?h`eB#Uxfh+3|P|-2X^u5b`Kwr zqP|>fFkfwD4SYy3{CI&MQnH?Lk7g!i_rlQODui_^X9mQR3!1Q0vIzpC<9pDLQ=E3d z5mEX!6B{GS>y;;l*O%y~p`YTC4T-M2=xQ3gS%-m_mr(mdvvfv4A2|#ROv@Pr&22lT zVE;U1)#jS@C%zY6wxxTEL$JnT(~}f!667Tx_=m{+Z_>(^a2O&4Ye>XqeS z2H|LkkpnP>R+e21fzn#^Ko+pMdgG`CJj^}ZkDDnF^t^pay?afqk7}5+tOQ3qF4W0{ zM&}s9;XDHBJ0@Hf7iepv2zEfnh#>z9FkxY)aqZwky|L6l>|Rz7qs^tU?LqrFo391Ztxk z!7SP;a=!_dbe0r~vkS%zwZHcFh@yBslS8R!m$U8eubao1K$-OlbY3Rk)gQ}Zmv9af za$$b3Kt(`%B1em<@?kGXoJL$ot68;nJoWh<&2JjiBiIEca@T!%8j%$pH8Jn7{{pc!d5H`!8~+NF{?lSOm)rofY^4 zU(v7nf@EUXTWTLd%Auqh2efJ~Cv>244I}JE2ho`YGZRbD>sP1xB0gcMS|5;D1L&|~ zprGG?wv+pmSGwK$jBnD{Zu07c+hsM;>%=;rNgJ6(xUC7#e@(C4S`PQht_N6Q$LZ$F z0M*+$v%GU*#0T-YWAPNsKm&6Beb+fgT`4~w=ZJ52G4x~_&d?D3@$t6Eq@(^gaBHR) zP9p)HFs{`2p$-p`6Rsis|FDkhcof94JFR#Zr3EQ`hkA~p62Qs~BdBV9DeiSZB+yIV zCQcAxlS98~e<}z=1SB>w4M_ynk_Akzi=$rNimqA8D9SUVAEJKiwXX3ta^3wzxi*>l zdYRt&Mr-d>yTW`r_=7qu+8NcGvC^;fMAMnuucE{3+fCKxb);bTcfKd=)?&Lb8$mEVFqpyR&ih{zCV@oW)r_%_O`->>`kILlqo64=RAznUEfEw85xgy_;n_wB;V z8mcoJw5@mm>PJL(as<9+C~`Ad$Od&m+0ajaU4IjrCG$quCWO7cMFoyLN`S;neUV(@ zlg+Vt7+VEKja1>z#*12^a4_b|Hg*mRLG&(WLcjQ(PbSwmmi1+6eZOYAL1)5lrotra zcSbYe@xqYUZ9#WG0Q*nDOtbV}wrf{^l8mXg?!2tiEbxTdng~4M6h?Oj8Sxlt;l{jO z0`)!;o++|)+zI&0Bt0aj@1=5(Qyyl9mn3hE)%*>-qR#Lipn}rqm6zE=;+qHEdfz=k zjF}cMr+0NCclOUgqI{m{4^m&0GuQbr{Ap(IlA>zJzaTUnQW zFtu%DF`ke=szjofC4*&}gDT;|?8DGnOJQc15w~?`fuYnBvH!@n)K3ja`gt+6x_c-d z=5G^;e6!q7Mfx!Ov=cQETh9Dw@Vr+H!)fM_UpU1bFnLG~$=H{@SEr2myJFyJQLB?GZOJqJMVzY>Uws!tq0;{BPn!TLsaWk8$fk()KK z$1DMKS#6LGFugL0Bz_MY+`s2y)Q7mwKlc?L;A23@O^!#INzx4Q_}fONC%FF9I!U{i zAl}*NrGigbsJBXLDdHk8y3I*ude$KxU+vj3p!*+e4YOf*Sk4i-%Pyc>@UH);Ld~uc zjx#=s|3>a3ma{fbL?7BoCGoWhdJLJu5^*RmQ|Ba`r(zjR-_&gaxQ5uY!Aelf-ho;+ zSuP7N+-qOI{pa`pp$(Ba7K+!#lr_1+R3izw%h3^g70p4mn_WU;d`% zmFZ2b2bhnrMt!9Ts2|r+(C9JtE7*#}l?e{a@8(s9SD)~oKMqC!0LoGiMn$+UgPfZ_ z5#UyaQ;=MoPG5!8d+0@U6d2Q&RMrQ1 zBnYPoCoaVH)VqpMAD$#c6@7|w46dmY(dZN(UjzSGJ%DS+qp{~88=D^H*yN^ilK~;O zNXmA69UJj{3%dT-`&XYOr&5KeYLl>v#fk#0WSHe)3zE0BcFwn>fVf}b;}K&5t$Z39 zdvE92(5`G7pWNeV3mKF&XeiF&29@}`B|l9Z z{PM{O*Go{EQ1hCvpr&T^b>%{x*H?EL(@QwV9&H8rQRE?FXUdVuLQ~e`z-U58|CPQg zOG+A}5#u-8KhNoaHB4^xi!UZ`W?hd@;Ir~{k2*x2@Vh@hIwt|`m+lZa%S(0ztU^_T z^#yc`;u7_vq$OTyQCBWX=Mr#V-JT-Wh0k9fqaf1EO5X3QA^{X79pJ;G<3hrjl;HMK zRaT#o*7erIqTYc!H4j;&6efB0q_3=?X$+zpdi)SBCFWinw~9LTUZ)u2al_SyG<_Fg z%XmY5mo5Y!nW3ciX`Y5;_4DxX+@TX!t&?t!XIC^K|BX>lr}%u|k5t%)fCSwP>a)Z< zu%b=B2QZd@!5h^HH=|8NJqjo5&kcFgF7yF;WhKf8>)UMGMx55BRZ;BK{2`jP{(`;= zljz5vd{SF_9T6#-vqlH~6XtS+Z+Jxe9Qfa>lpnQJT1P4Z{`LDg^Qf;OW~`iJaR_PI z98lW8p60GGN8?NzE3uU|Agh`fjvb8-LwvYZmR0MsS2}J?Gj9NI3d=r1VXeH^hBJLc zi+t#U5HDjvc1osP&AXZ*EI7?PGPVdFch*^;*AhWZ$KWVJKVQ6|n{GXKya?y`pVfXN~bIHSjY z1rr$qcu)L5=8ymUoGv=W1325+hJL?+)#|CQAZ0LwD)i!}JqOTP4^=M9oUS;kzsC_^ z3fMU!_B*|#46NSr?f-KBw`P(PFaZ?OWzRNET7_+W?~idGhQTxDq`eOtU8*=C12L?D z5R@NM(~7e;X>`~-$I^b^$9afJ8hWq|(`7})CzNh^A-rb+y}e(mAQ8nm-E_(f7Aa|4 zYAxB-glC9sT3ylZdP!;!XDqUznxhe-_`J1`3!i

    M**3jJJ=@tCkzSPUReRww^$#6f}$D6xnk6P z8487ZU(aq_0Dm2TbCaWb>P6pAxJzIX;gcgMa)}8r;634l5r0O$L3MG-=)8>$4*tMN z3FCJaaztGy<#~C5H{q4m33Sa>7QIMIVPX>ihx013XUr><$>7lIXl!3H>cRBHmrpe; zS;jgAf*(*|{X|=e1w0O=Zr2jx2Th*?T33gM!i6Apys!r>tTj1lTKXXc-JpKf^kD7C zh1k4auDr%cY>|~^Co8UGMl!ehc>|qwc>!teVqXn7{Ln8Y5ICo>YLSSkB{`4V=9(I@ zuz;n);s;0P;fZ3C=NJi!zt>C&>W^uA)K!|C7J_yW z!r7f3)i*Uwu&Zm)Uy!IpJ@g3dqJ2Q_3+5~y!#7?C%}wYCb%bGg=GSf2w~*Pfyq{f* zfll=c)r2@4^!G=v$tSkq+^*p1Hs;0h1B3{##17=UWt>5siSI-ym`MGL}j z`Jrz1!APssPb$M(l z)c3#=vR$xT{sqsK$q}*!{%Vjjy4m$M8fYdHq_$2;76j(a;kwjgsBBrL9nYV0?3r(n zsC9&FE=h~=@&t`B!N{})_o_LE92mc!b}dcqa^fFYMs776*S{mtd^NcJ(!Pp@8ECGe zOw1s_S>+%1ey}0Q{hL?#JXi<+Jd;6vT;)Qm1fuMz&;%E_mW~*pFMIt&5Ce7YgWQ^K z`GPc4_hptNJx3}lztwXJQK*qEAC2oXkuhQ&$r^XijOz|gl2t}f1V(w@y0FX;m=6yQ z6DDC1WkY(dVZFE`M3_DmmJRKB5{QDjVAN(E6mGP1W1ZuOi*ii$^Kg|ej>4G|06n=; ziB#ywCkfalOc%%qV?;tR^d5;3xS+)sA!-{LnCW3wTq+MVL$p~^*yW^9)w=6bH`ml!l&lMcOQz)k5^5J>agoO7_G#CFB+DQ z)7HkeT#Xac_dlKfY3&F?rgGTNAfI*NdYc=tX6*nW8BzZuLSsbXqZ$%WY>o~e(ZigS!uo{~AX*3k`6;vVG=FIRm{3YM%q7RQtUC?mJGjep15_ z+cXG=MJiT4OP(vemm%u0mRL3GoY~bNk@Kz6} z(-!lY>v5y6iEX_{s&rDM*}_RW8SXJvfRY;urEXNhK=x0)U^G|=V8jl4r&Rd0-$HW_ z5OWLG5K-`p_{`|PxHu%fa+ncVL^`^A0K4klM4LA5P&nIVUO8$2^2ljByEAwB?2dR~ zo6986j=F*LqbvndqWhyd0KxZVAN$M*bP~1<#KYEzf%`S6WOtx@_m;_@bs_xa=-f-D z4U{(Gjm&{}Ga7LPest@C zSFR%<1wX32ewz?7pCU1{I!5=cv!nGh_$-d+f#};`m&n`l5-GjE2y*)t*H=OI+99p- zV#b*&V^+No{GsgUvO62GcFWGW(Hod*M_|+qI8z}&cP}Geu3$H-M`QLk*zKaNv1Y({I8@qR#Z1id}DG)Ve<5O``n+v`Byg^d4LB z2SA=|OroIo3D2@cQ`9=U1nnZhLfK)@Zbzm^3Am9MR7Am<`v|(pJ+m{`%d2jE^YY8yW1QhVyxUm7lt>bQmb@n}#KR*y@Zfmq*Tr zVlS?vdzLg#WE8eU(ZT6VC?12<+Wti!)95V$BVWt3bhd+VqD1=DaQCnkkcbCZF(z9+$(XOkA!Ab#sSnEK+UpEXH z|C*wH;)!i!Z7(9N&pd({7Ven2w=mJ zIY&VOY=u7>AV#m${`Z~jX|H77Kb(I2ht`Im?vP482W*|-VadeH_W8Ck)V`J+|NdX8Wx|w;9O}%Z`~CYsR)5 z!G``X|JHYMe`{qi#_pIA3qa;M-(}zT+IwfkH`)YUSCpX1t)JEpKmYmg^Pkt+kXK;= z;=v-GZZcZXjIFp^8L>Us0M~#=4Y7HiC74^K2O&wtibw-4DDLzkC8sJaQ$#wTS}Ar z^c$cG1CADOV^6hyP?CFrzGOq=TZFb%)HH~-;2JrX+i@DCO z&GHG!r5ZuHniOGS=FQLQ2NCGfn^qUmI8mGrs-d#j^b%X_EOhA=BBo_W{S2->=P!(FfpSpW$$1(st@fAoAu8ZZUhXaD8%A@sMebRSMd#Q zX@@`~lmgX96oSa6?LL^U=`p*Mvs+^E4fX2#e@T^+f5QC;_Yx`I9sGTTHvRC%KI#b5 zArK;u-pl?+Yf}|hGZI6ZA2ZN7aY@&wl-LddW8EP_fR*WEpckr6CRVTn5lWuif`A+= z%wggOxh&u^8N0qGp#|m_C7Ii=>e*J772j2jNYo2NG87g(H(LhTd^%$xG zhJl|#Y*M(qhM#SH+C%@S*=a0At=|@F1-(zCpzG2ojVnq**n}DP6uJ!_RW$0CT@?Ub zOwWOmrM2y2eWSFpe-!cTEXz@^NFqYDh?614KU0)*J5eyAuIt6K;d!?b#j<8Q-VA1BF*rmfw|2YP%6rGT285{;(8wz55bBejh7n5>X&2p z0KKbqE0wcmnc_SXB(tKIYqA8$1Er>-Xqk<*?yRlbe-zoZnhr8F6I)^uyimZ$CKtk^ zMY6EFYRyQC_YrKr!OsyoGqpSz57LlfmmSgKwb=9(Gr8(CBvuW+ZYc7O?E>I5bVm!? z?tTcY#eJIe5cCvQcM8EA!XB+ZKLHIhC>ZTi-r`)d_ZTUNY%=|<@ubD;#vQqXtK)e{ zG4QciLIMzXzw1Tz+uxWY#9*N0PcFW-jjh{d>k6rRS;E#N*lujt2_4r~q{$^dx;3Wk z;4+jFCKEmFyz=%+%$2wZ*{F4q7)x(EC8}y8D6vgzj#%U&2Jvaayl8xC2*Cw1deS3s zmqRT1IjRQeaBUN-rYT^b!YkKbfrQWNiuR$|R}ol&tv z@Lv1o!Rluagq)3B7%vN@lah+00ZUOKoR1gw!vhAX?KQ#g6tVciRvoO(X07<{mlW}d zc8VjgM!TgTTytFz3Eo);Pqk@Wgr_2{9aVFC>mz!=bdO7`M-ijBxhTZyPe1x^_~_@K znmsj8+&kVP6GBkhoc(bnpS;z6h*a`n{D;`#z*=ZC0-nojI7OCk7{1(0cI!tSXfxe@ zlJ?5kG*G#c^zqPsSvJabx$*|XWNY}-WR0t$6<+WNM; z{uOLO^tBUCXAxCb-Zz8z`*5JLNnQ;v ze2qlbI^^=12~6SsA9*>eM1HvTfGX6hfK!U&<-mzT&4{JM!jET2O<=fk&5Z%lraV3|!Fe~AjxXx*%|NZACBAC&Q&iL0F3dfy}Wgo7>q%ff}*YH%5++oEAE| z@rrM3oXs*1EwbAn29`joezO1UF@0t#gfgo137!4zS%JKDGPoPsiqKk^fZ!nEXV zPPidYLzv486RWF$tjLwZ5jSz$7`~|At9ey0L<@2wwCm4;_=OgFJ((-9QXI=mZ@kkM zPZ`1t*g-fK3n*^1=%xj-z_n%w16DDn2e;>4DUepQQH~{JY?x{b8PsfE*%f!Sz`Ek< z{Y-IH`T34OvK%(EBy5Pnb&_%U+i#Um15Hw*J7Ag4g=K&+kM;g(3Qq# zl`7Z4IXBuwhsK)Mq8izHd=Rp%}sMOK8!un|(6is0l=$9}NlvYd~~#5d7ryu<{eDw3579ouw zCkqmyr_&6lQlZgqLxFT(6MKWMMiLAHKAz)+hMDzzcv9~{+Ckch^dXon0q7GvmcI4r zKf$n8D!M2K`SLvIqGJQhNpFQU;Vek<6qPdt!AOtAkt7)-KRL}%ei7rel(#& z1?a3p!rtP8rXWa_loMAR(A@jQ5@n?#6mN_MT^ab^Qj8(%`d0mK@XTsE`@??ylh8_S z=9&C4KURt4nDGomVjdnoC`yW|e4GlF0Yd2-uw%U1 zX|Mvr*BuKaz~OdDq!Qt4UnNlsOlgf#TUYpp96-_{>GAB9`*3=CxmLfYEA>-Rx1%+v zhXzIObB;YEz(4@s#*6C{nS){HehxXuxU1PU+|e{`|es2{I%5xv1`flx0xs(wO6tWhi@dKN`5m=@W%<3JEJ`%@b*A4i`_ ze174<>B>wOiIS%^w{PKrlQ39gW%>|F-uYf-g`6;@#oIVhryS2%_=+BJxY+nM@_NzX+^Gt8!WSY*mqL1h_TK5K&#RJ>cFZwbuR zYi!xT`f+SC74f(`gfQwLZn&Ldy&t6y%FTyzw(7yps!VsAbay0JODNbf4q3lUZ%xN5 z1ceE)H3?eIx{#)pV0hOJRauBpSS#GT3ocNTXtv^kZw$Xv+iMHSP+!Y|MXCD<@+k+4 z9^!Da&f0E6`^NoZWu}Hwzm4_wt1lsR0#^0@1~5G71{zlh=tCA0}x#UWz~ z0-iT3A+o39q_G&!Z)jBI1p|->jY}jZdkk?<68mO}UFXYD5Nr52j2|p3>4#brqkEM1 zy^EAM`ER6CSF)sK`gU+Tf+nW3H%dxV)L2X9@OXuv&DctXpG-2F2)0GE&loq)Aq35k zmtjMp`sGKiMyB`Fr#iMmYL=+&Lu@z7+mT~aQ}Ff*;rb@twbz?cXkI*#J^FDT*-I;rV#=c=@s*d5i}Mn{#?~9 zrzia!z}%j!MP^j{iJY*Nj44DhjWX99v$?zlU8%e0jdeZE?V%k*Z4&6k=@8x(R|g); zRJwh*KfLq?Xx`UYCpI9qz@k{w{%RFr*(Ot{`ZyH{bCp9c>ryHT_VQ8lSK3b9IZUkp z?4sjku$v=yg0`?CvCx3_@v!tCJ3nr_6ACQ=q@K)c;U{N7v|MFM=iyym(VK}(} z*jIPgOv%u6jFeJhc=kT48USGsE3B`ybL?!7Qi`TebR}sKQdySuc@m3sqcpRhAX}K! z;zb=U&h7`b`0!`8nf1o;nG8qXrjxmYiHF=^z2p3p3k=Z>*0!gdK~mAm5y&ern*Na^ zuID;hjZ&eD_2Z z7&M0y4!zdX2(T@0(;fdYiJkhRGAhW_(t3wOh)QmkBn0a!HX*om+9SivT(Eh?Jx@DV z)Amg9z#i8>8uok~i(4}AeA?ooA|K5)DQSic7F{TMCbyOitw^W%<_RHPW!0H5Hi9!=5DTsg)ys56)nQvdZh(yv(Y9-)q}w&-nCWplXPM)keR zgcXpD!2hm|VztLhh0b{!v^tqHg-+ZVz79?KxiPQgknc<)mc(?QiK25NvH<=Z^B{`f z>LxZg!r5F$RJW@pX8i}btl?xMweDgJ2uy%UK>D+|Ml$oo7xirN+be${R>^rzFD-_* zB9syQ`qt`=`HHzRn zF|YKHUSfj9O-aBVuT*`A-@Tn1_aXdvQj`o-*o+taWdi_wERhOhp*wyaEL zulhq!c=zZ#Fiy3+u|2x~s%WR(PGw3CH(b`?m~gvyFK$on8a38M|MZA|UMIR#b#jjR zRvj3GlvOpe`)qMdkWo$^0`iAeD*HHAA_Cr|RD8Q-k zBx}-o#IJkJU}cn-EKERK)lq`C0+4#uctvZ#Za+LGi1wgBxxaxu((Gm3;BOVP- z^YJ3Vkg~-GcmutdPbHxlDcGj9qCBdiw#N^@7raH`pdqvK1Fbc55%u?0l@fgc)ZH3$YVC!$D- zPxv`d(7D*Wj$`|!(-1JZEb+d5c9?U4$ZNEeeVuGmJtu~Al6e)XC}8ch5tD~$X2%Ok zdc;D;t!2$#pL#yXE=h(`H)`$7azSzy7IG6M2_TYPbvg|t8px>EyaYkrS1uqu=tkc& z^V`ft8jQERj{yy>Qy`U+>wrBKD-(7Lj0<-b+^4*N53^>i&JfG;viMGm_`f1kqm8cv|NB~6&T+YvkJ34DdvIN zyA9dqKzm7=U+_03;%$!heZ$cEcP+BQQ~?{vNM@vOJsp5dERuqd7S1aYQov=_SrrAF zZ8Sm6wPD;%qniTcILrbesaVhKsusNk00OLm_0TbW?N@T%rwOuMD!cO%;cX;S9v*g1 z67Mr)cfA6Si1MEBuSf!~Pg&Z-EzV*yF4n^h)##*nR?cIDgpQ!)ddsT&LhXAl6^%Vv z-#nC%Go8r*Ui0}FY)sywyAhZe)Qj3D(PZ`3&j{x7u@J%GJ|?}b5)&Q|YdvlB`mEpU z6LMGnA{v19ss^JA;(fJ@K@dsL1tBFgNh`pq zzKM~6g z^B!s0JGQhSU8>tEhOS#pHIq&c)zBiOYe90b59#TjHJdkVS%lcF zJ>zEa;Wc5VC?aCL+w%^w=tja3s3KI?uiy(Q-l+WhZ0xA#9iKIXh{0?K*bW5ppWh+_Z*Bc8VS*+Q*^%h89zR+_uKnkN=$I&UOm0(3V{Vt@ft@tdqRp>?(*3ai23=pI}R}kU8NC4qcJ9+V9 zcwtV#8dX2HM3)}nFyO&^w+Fa_q^cUW^=xHYK1wpfM>YVKOS4Wm(VbG0MUK zf_82~0bP;UQFN)ECcAFdO=~RR{$zm!WGmGt3CM!XH_;qWd>BdTLj}z0uk2AHz3Zz$ z4!FQNfW6`H$kZqwoAzD7-qelDIyJwy=!gxO7N_;2i2#Pqe2Rg`LW)#m(#Ml#G-1Ml ztG^NrH!MuX7JDYC;x`|%682HsB3wY=Cg_qR85Wo?OAUOmgi@~5Oc8&GN+xU<3f679 z5L%OLQ8uVZY9sS+EdnJo%1cvzja+_x{ONFD_5v`48&Inz8K^F&^`l~ZBuwjzzdyY4 zWgIL3pdw7_fF3ZIdZZc4`#KVzl$nr?m3i$KYufBXSH#N0O?sbq#A9&X2+T@k#t7<3 z_kjd7pbAsu;|HRO;Cj3hgwrCxi(jxYE|K%~IZ4t)3IMz~E`(RCIOS!h`PP|)TIsl% zGn5HtEjQ!WK7->z9@8QkiQ`jLLi!xCv1tI2qfeOYAp76X5zPMFDzP_1=eyp3hMdFj z#XU3*2}e$LF%mFPF>31u73QyGfwX=pe(2cnwTFu^OtIaaM_3SVs%73`=S1LEeo!>| z5xgh5jq>6(JnJDG^~R)6r5Y86(W^mNYi_u>nQ#I)Kz|;!jSY0Wyz)^B*ha{KU&H1; zbCu`bf;%h9RpXxyjW}xe=f{$3Qh8)1GQc>$@l8OTvV+=v1ufhYt&~lcGXel^W!x3h zlEa1q{kEp;P=EajkDd?Pra;n6Xxv&iQoxohA_?c%n{319aS{9*YSMKSk<3u%fRWDX zP&EZ@5$f+bfvI{Ob(b%Xywv7jN`Tenlf@9$!Y_O(tnK7(?QuC84g3)DyB?1_dVimO z1NPv3D_j*L6iFW-ll_JZ%}9m=N8HD3 zzGoM1zfdKK?_3^i8yJ9}X-46<@KWeGMVCV%Fu-{T_0z(D*J1r|`Aqv0Kh`Gp(Z~@1 zM>McVooZIOV+Sk#BC%s5gk}{ACb7P{T7W}uGF+gpJ3Sco#qc6v#FLXZWAZoT4|42% z4-xb19-)4c$>^6U+YYq^KtOeO)ZQQZ?$q@Xb=;#)U>y2!!;fh`2+;KRbtX7<#@!={ zmOXL`3(#4o3kcyH96qKp<9pw$m8@&#Gc@&S*nmww^m99C>1EUX-TV|Tgy`yEXJ6{4 zTAu8~&-RsnKyUU3Cv=V;udi~R!!EN}E z*dM4HDg7J=y{&xIpunKAac1zAYJ&OJK^#Q#F5_m7fxU@PEuWQnnjL#Pr);MyK;<2f zLP1G_6Ryv;-h~NF8UpuCPe0b#W0W}bF^d4G>C-2Qg1?a%+G`@t?Pw1xjBQKVNpS3RJA^(9CrCYE8bp`DQ)&N8&d*G!|>d^)pkvQ2iag^LJisQE?V@-CnN9# z<*-&C3zi;{GSrushmC{PgT=E9XB%>uMMtn> zB9drH6UeG6i>7?O9R)7c1>IDMw3=RqDF5^)^riC3AWA2q@y3ESI)=NrPCaBS>x2aF z!%T3W{7kOTP6Z+cBQa)n5t$QdH}3K>A2|8T;MlRGtwr$jLX~F~w!?SvwEW^aML|8{ zLF~@Ksg^7>fRB>tEof1G4hNS#qv$YkSP#D$ywrwkSgbXI zlMP^#M1^O>AiV%Q;*O2_y8N?Q z$EVVq__PZG7&@#Z6?uW@_HB~@Qe48jXrL%3qX_9@Ms-jD;jj5+;AW_f9xXwBA7pjD z#N3l|u=h~?6Ix`$N3lY$)={XV{<4SzL{S?mRkuOfv?p;FU>lMVs7;y;Z|?& za2Jahce3Ft5-=QvSFPYC)ZGj+?m9uhXSz#<@jqfGX7L+^sd4oJ^iTK!mDk#_*M>%9 z|k5~d~EXGlYYjJ&KNC;p~IXhXktVcJGh`?NhvORsP#sg+jxo{RPq{ILv zqnYZA=2!=jb*BG6Rj{=`_3-gZN9&>ukFF0eo$8G*75|Kl*WVT~<^{4iL0|XO)F;F5 z6wUt0(7P+Y#u7$2hCy=&!3qKIBZotto416`!o4YRO2Yk)9%yZe^*f}btoOB=%<16B z5z^H=I6|&WdX7{6c!J07iS-JoW)6MH-}~t)I4w!<$06%a-C#|T%1$~eW%52q&S*%J zW%;oTMGmvBNhpOYZ|NK>5{1YI{0zY43Ln(DQVdr&0bVEM;b+Z>r%{H#dN@%~I;^SV zN!E8tIz%9nM$E(|c(>Oh*{#z`8WylE44vG>1H){=?5XA(M@%60}wqYR%^<1qf|@kmniCbneu0A z4TLZq)eydiU9j^OJ*vRUBf9}XQOpY2_+jk(wsX6Z*vyFIVQJiLb9*Xa=nLL~O`-)u zlg7lkEZ6t~hYhkL9lF+-85R>jv=|x}gf_Rf+124RTH~OzP9tA^)N1SQ03p@)$%o)r zrGVII^-FcDdt1ZjrVwm{bHb3;BMP8_1I>|-%x}Th@X>EYOoV&B?;-q{1hoYhLVUr{ z^1ivncU|j=on_>ktS|?_ZA#$yccdrq6zFCV0{bb3JXX=#*jl(ZyY;H$KQtO*8-omX zXl`m3c&&=qhe1J)Jc&A~B~cSEq=}Z+8OSHCpx5l!pTTNhfUJPZ{m?CCgE9y5PfTVU ze$Y!Nz_@_6udg+bOU{7e964WwCrmo$UE41nRuHQO2{!i{33Ct zYK;_OpqCree)+(guNjm;*^5v?P)xmoP77h&?#Mx}O{{3W#4Ok1rd|kRTjwN0Z4_$g zWR@zw9uBNQH4Pkj5}`FZo3whX#chOK3HfDntHa@CUA?j@Yfc0Cq0m==Z{D8vRHsB* zx>(YuwK=6h*O-awNx?!W2AG*wS5!ROb_jr9{gmy=!x$+^F*KVkof*sLB~n_{Jv z@GM-uOq_rT1Ex`?s8Ych*|cZF{wveV9hP64C*bo$BV0mvy_N)(O2PJhxXl~at%+_x zZWg%B?7Y^h?#$6scP(>Hv=KHb3giSpND2r>YQuYjqL$PJD+xQevx#O@##@x@Ba~Q= zQtDO1y+5i`$G^Cxp177q^zoRUX_gmQp0hhVOS={%W9Gf=ac}tCq|Xa4vKEV^C@RO# z!hRCwj4SnVio~yoy7@N*Iohq&lGIsXk@LNEhnSPB!XIJ0=xG>Pc>6HR+^_C)ZTiuI zH{*2aY_GDfq9_`oMQdhdR-h?Uiy{(?7bHXc^E2z8(n3z9Vvg)V%Ko_r4WTdzcUsbt}9Z zY|snxZ$9XqVQ0%fedjv~2BhY4=*%hjq2OF~DC9Z;Eiuj2OH*A;taOV!ZqN;G7YYLj z(M5E#!n4oR3Xq&B)(JT-2YpZ}i#}!yNGFu(%|Fa2)CU9YFc;?2(oFlyrLuX*{v@qU zDB|rb^xAf4Ov!P_-051e??E%X~FF-KtH<$_=A&2NCZb0>(@{ z;ob6BxZ+uqf(ka0RKweXm&Z{Mov36dEuytgtB$c^PzoeF5^iBHO~Q%WN8UvKXVubP zl|a7AbJ`;7HB@)hejB(W9>Y_zdC_6oMX-(Jsh`&-h6pc`nLN4LJqjcUT$pe_7(CTl z>6_45cQOkv9h%u34r-0c3}h+x27!o4NXb?@mS>~9-Qw2qz%hiT=7bzLg;)n7c)X6Y|Yi7MX58Y&UKGn{tgnKGMCKB+&DitcBD8;sXl z&(JMk72I}b4EDMYad%8!2y}>7MSDS{4C&5bL4`y()LGB_(A_uY_~_wDRdX1+Q6XWy!mXGh%&m4z3U(cR<> zc^f_UniDGMX{2HI=I=;e%(ChPx10cF*bzFNtV2;8g1BidVRvoLmJAi@p|9hPpGozv zr*PE|U0u=pZ9L&Nyy+VS%z0&~N)b|PIOzLCFdye$s8G1Ypze}b@Dp6hI(rqC|Ja1a zX|U02#EEq+#3_@$r$ulp1IGINXxQEH^)lL1?L<(&J*qWd!MnZ2!v=S>O56}Ut?ZC* zWW#E-A4yxyyoWM2aXa~m|E$dI;jvrLbqu(nLoLxj1anle-CRf7a(tLG`_bk*T z_9;C+>Zg4cbrNcmyM;dihB#h)JGnuAPP@dXtZ8`HF%W1&$UF#`q|232HIoyJAVM&# zSUDtf;hHI4t3mFt)4Ee(DI+&X4o;z>1dr_szanytovTP^88sW&Yi(*vMjiyGk#m`) z9u1UXYB}J3K$nF`B?C*aiOx5X3EWi~7H(Yx6B?!u&%ha9g>1sT=L5w3B56;?C=nc2 z6vAOz4@mE#rTyW^M}&3V1;6?q;%5hk(=SG*Xiy78%-)Jhbrk^n>c`dOZe!z1ZoXq;bD)SF$*4?@Nws< zLuC>AJ-`8wVTK@d;@zb~wXuxQkY(MeMDarF?tPsg?8wM3_LkmdwE+15^pM_<|ZPKmb zB{aePVZBUA)c`PT5~N4t)bY zIL1ZqSq!LZJD=U+DD_Fcb|1#!k&N9Nz!!Ccs2Xjw*j0(&bak*#86Rq5TiLk%^U_1v zXh^h5@f3|`uY>%#R&JHNZ9>B^=KyQ^NKqgP*c+#d=%hr+2Rx;ua)_`AMyR?nD7Ia@ z72G{QESSRQJ)?sV&xv5DtQ(xq8RoZNp^$`%;O|ED4ff z9vMB&f2NbJMZyWlbW9kTELP$K&N1CqgZup1El5cnviY#DqBRcZm}@zpd+Qd#eukZ; z*aNefLSr_-VYcTYpGbXgZ1qce__B8kCN)uS5&f_-2`*fti`+_0-Od;DSYvCzif`p} zdZ{d*MUAjK{-Zz$1o(U(ceA&|#fphGu|Vr@D1Gcb2h8>eX4l^|m_!{CipEh%?`R6_ z^SnBRf6%tkN!M&cfjYXzZrh@4}SOo64qZs8)pL&)K*4JH-@bv%Xx_)CGOYx`ksV=)RW(^ zH#|hNaPB(91k&!oJM{{Q01Ksz z7}`Mu$0!HgVUbg-Ytk|LY~iR@)`725SM%NBlllS$Y}{xJsBWk-6Fvf*Ft^vfqofW~ z0T$tC3JMtJ=H{@Bq#7{Aj|X;*0~b0s2}Bd_H;;$JA|JJdXbe`Q=>j^iwz8WEH^d=+ zo(Y&yeUuAic;ZDv@uIJMw;D|8pPtYe+VpXUDJYEB`iUyXj?z;Jfe%W>wK*4Q9Fl;P zn?=M#^oIE5!hE9g<`5VBuB7ZC$t>RwrZu)n1kn@;;2HO{R&cEAw}$V>oYXP>OBBi^ zDZqM{P%6?Qa_^VyN+~Qt?jK>{zYDzS*o{ge+n5MI6v{HyvgKP9KY&ABoZF=1WlBCH zIPjShngBqRR5-utU!-?iWL<$jshnKc&$=TO!KLOT+&j=fK6c6?J3v8o1M8y#S2K#d zjj=OeADI03ABM+&<)=MIw1+Md@9OStHE|E99Z1TjxrI)}TQ(s!yM-IJoxvqGxcx#y z(M)hkx3^FeM^dyUVWKck9e_;!jb>VfF?oTFcag+s2x$~(^Gg{R#;fLFi6w}>Y8`E_Aq-KmdxCh$ek!LF-b!hw4hCf%ec zS-H_smxs{hn_AEqoBs0>QzXG+>4M0FfZIfGb_NiTGcsCe=xCjbK9}am2{aNZta_c^ zWYj4KRIjHXI|PmliPo697K;O}*o;?r1P3`Nt}NXNhGw9@@t@)nS-%k{(;6U*S^*I_a$`MSphPce@osm%00vYPDt9`AjFb5}?1yg=DVxRHcWQFx<%|-w6{4i!HI|$4h*e^xpSj_iR8&euX|VC6G6srCUk_OU{$&K#j7={)OTiDa55wc%dG)}<)QKP%c{RYROKuGC>F{|-2oTOlFNiw4 z{09mcOo+IHi$#=V5;+;}C<-a*M-M>X2Xezd2xhnE^GP`vWW>}j)rKo1ODJae% zkb2|_ZX>+(-QhQiqxQ2e!=iy)u}e&F$JqHOE$@T~>FG(}xeuoVUJ>1aXi|`~p8qGq zpZH}kVINHt{|`>oWC0ybP|fGOB@^W#i}q0l(Wlhu8!p6 z_)RO_Cm`nSN#ZQ}O@Dp(vv2RaMK-P1m~tfA>6E(TD>lQe!rsKbyRn42KOu zT}xEmQa?hlY?}1#b*ZRz%>aMO#h;D?D12T=Y;7KfP7~wg8iN~!B8gxn`8xVwi{SmP zJ%T@UC8#rCETYQg>Mf{WZ11(vr6|@k`|cZIMF|D!`K}eTqv&e8@|(BKNt_Wv3KH?J zUa@7U&a*{??49+ykkMEVYxz@e0GJ7`RHqxY8nHfJ4gB_dgg#N`CCqMl7f$BN4O<3E z8o#Yx{P^JyhW$Djo`Zof3-zOR;hpghDPO}nD>A&^jwaNzxI@8UPa6a5+QF(aK|7t2(6FXO50QJ!d1A@OfwGkAj40d zp6?epB6cpz^4kxA>6l^F*A?EXi&0Q5r8g?;HahL&r^%O8=*SLVgF^L+J7%q0N@(D z@@H;!4w4a9Zd&?lKbubxi6sIe0qw(WT~Ph}hOCu2WMy`Jidsjbg_4V#I%v1>?IpIC~f^+j%zxuxh{f|kIPsj=JqFXEc8y;G#YN)L2 zj-(zaR0DKrhq;x-;a|%2M?lm?slm;CG^9}Ega#fdE^ri40I{eWAXXW8e@53<0JOC& zLNBHhaG<)jMQ@<5?oM(&)#o8Roy zwBMB}IGfMj)?S6$CX!d?DR;$GH7kK(ow5|h!Y=u)FV+irAmXrLSHVQ=%LLCX>C2S5ScdiV41DOBw}T32JH{EoM_hJBR^Th?X5_AKOXMp0V$iA&TZQ(&U1x@W zR0mB9&?nw2id!>jYq7kH1$aGe1UA_<^V0TkY(I3&`6+&sqC}k|ZHrQG!b*N@o99Hh zw|2YYiHU=!~f$&b{F z&8eo0K`~9`dx58KFX-g!)EKYhgLN$vpr?V3lI&GxA;dY>+&v&sKM;&IM%3w=n>2!> zytYPp8V#g0C?pW)N_bZMxyLO_&>#R zLZLpBaUOC+y-dOb#IKS2Ng+T_p#du>(#LP|yN{Q+64=3JfL-pHT)uyCsWyvdUrZRx z4sY#=j#Go&1{|K z6dwGoCC`-ZBH4W7Y|iexQu&1065i+82oCa>pCMJUwdW=z?zKWd#}00jopFz7AI`4a z2*2X)p4P_>G_(z0h7*3Q5PHsr;56qw2sfkIzaOo(N7FjS1+@?LZ!+ttgir_%b@iVN z->gqf=xySKk?DL#ZL6Q&upj{;5_M>eiu{mW6b6N>_7rY3XQL$x z{iMxX>GS!!Zi22Fncnx86M_dV*`t%2a$hfl4tv8wp(ux6{Tpvp1q8RKx}qz&N(K!4 zZ1}zJnpE`B9%pHixo7T3glKayjS#A%H54i!VKI=y%G&T#F=P^_PKa)TOu`^3miqbd zwOSosFofgNYF;O^g_hw3X!N<@&TAGLc|Z&n8w9tf0$J%ZsF!Z12+`@IAznwZhyxg} zQFwXeVx?dtlF<)wL#sL9ew><5axiNexQsy zENXvx3o?sj>Z-ms+4ES~ZFW+ELuoM_?~X~r3WoapiXF1Q^y&7&0`yj8U>qU}?IiM= zv^QF1aq1~Jb#MesQZEJK@}QhR{j3aH=6qi^?O%Tt7+$6A-sXn1cd^dK@Y*!iWbq2l z;Uj$pyZI-gFKNNGl7mjMOE>o5i!l^s!|8LhU1!&-V^+UAoS`8BW5{!GLbDA}>Vf*~lIcJRmaK;()%> zH-IT*9Qrpq3^Ej%)Ecqo?z-=bt(HXhR0fV;cnHPzn6Y9WJ6puLEHG$Wi<3H+Z7fckR$B@QM{O2hQ<6E+`sb^u0 z7A@LGSBU zfPVHPHM~QKH5(kdK6QnM?dJWo2wJ}Nmlk|N!InA^0eKdoXt3ppXw;3v8?)}`zji`j zG1eU`SS-vHcFw+%sy*YLd6I(^NJ&Rm0Jxw+>z=`9ADN==7B?IaMMb?FQwf58Kyq>k zq19@sFZzw&@R#XRpvzs0@V#r@{Qa5k@JSkl0uMeSL%GcnETb0A6$6(LH3Rb%9Vxp)Z@N zAvMZITiGR3t`w(Rbb)>~5zr%Q(tW{A_m`x5&V)u#UIq6~hRM6+CwaxQuGGeuY4*Qh z-vE?s@pG0+A*9gg-T;Kq`WJg;;*d)pv#>Yd2#Q2e>O8rAI%Xl6w``pTvXeurYU&=m zME5MB-cN`~RA<0?q{!flCnYhI$nsfGb;q4qBw}*3EQ0KrezprFCrk7WcAE*-B^~e- zpC(-kS`l%aCY%k<&_Y$cgq8X50>QBLP#`?hOX)Ypp0cJfnCk9Stzpu6Li1$4Z|ry- zhB!-7Bq9Lc<*PQDF{+~w_d8-`GarY@;Df(8C3AWoF8 zh;i>A$NTO=XIbk*yM6f1F()m24H$#u%MgPAY>>iTBL3t13p$6Z{CBkfNC zjz?40@zO_l4S0PIBmsvkmZ-=pv4_}54cj)6lXZ(``(XE`F_vKiVRhAo+H+)#CEhAz zX4W2KzfQ-sl;y zc^(8z(-;AJ)eh3p1%X9;+M2t7GhGj8Jq`S}-4T@4d5Ufli2kgX+n}+kYG(b!y)g~q zhY0DXh;hk4wWP>FA4IT4;byS0-8xG)eLYtdTaPVbo=O?}8|w8ZJ2L!us-15mUGo$w zft9>Wu@Q7wx1_nxU_QL+UoJKGG`n0$RZyC;Mqi4wcMc)sxsECLoJlYIRxQVwM_<<%6L{P>p)MZRK_2 zprzXFWRR$X+q$4@y<)n2fBc}lVZasy=twhN2~@FT2$|KOI4d0W&iYZE#Qvam4z(ZB zaoGZCqnR;jxMGo^J3|z-=(w-2HAq5(_|lOTczEHpo(U=hKh-A*AzRWj9X z&Ugk;97{NP3&&yp??bA@AL`!6aBGoiD=hYRTl>@qvl58n!WxhQT(XY{Y&k~2SqGmz zOzAU5f+5^i{(elMGhefGWqUYi7}tK1Yz}>B?cT?FB%GL9Ch@!+JF`WJJ$U}ii8d3g z?(+q42A?>&>Dx##K|pBU7Rs*ABi{nB#h~a+g%iYg^ZV&su>Xu^HyA{k5lHeJIn(AE zVdLkXIaD#9&s{%frJ!Ogt-jaytj>;WaLcU=l68ogo9OJa$pM$ZcscySFZc}hT6CY3 z{!rb}LG}^usDqY~vRB8VK#wo4k2mzmlI?K7_VL5lhwm1Zy=r`J_8`!4Pm{e61&U3g zQ0faxoy@v^fTlsn`fmh9-g}7$7AULcoc5g}%eDAw z8#OrTg)XR_#mS;WCwFmFh#_^GQ4Ea<=>^D$BJCIkjVLy7tGrPVe#{=<^{yb646Et{bmOJ#$bRS(x>~C4jU5)d{RB#^?-EAk-YgUm>JZ+DX!8>>bL@9g&4J%c zm5KijRCDyvMfhiHlHnZg*+1a^JILd;*VdOjf2q*sW z-`C9v*%xgpA0#jsjqZCp81|p}X0e^Glv~ec4q(E`>Ud>mSS9Z>9*6-YG-Xa(+c+o6 zh8sQ3_|M0DXRC?~A44M~~HUS)80hHK^n14kls2mPkJXu|x&C*zD? z#9a|P4R9anV@&bO5TzY96QJoYY3O>#88=s>ZWj3O5Kfw`xCEL#SBSK(Sy}iD2DN~y zg5c&1a{FAq53|uL*0RKLo!zfpGwK%;$)Q+{VypF0(>V#s+iFK^HAH=_1}a)8$Vq@^ zmN*tZ%2AGl96S)m*S54P4_nemnYz}^;Z;)gtvDAKDlB?g{S5=Pxb~*mPg|yBdF?~j z-~CVVWf)RK=8)Tq^ROBAhbK>K{epmVbw)1#$?z?o_-8EYsyjWZUU^wxR0oxrW?t2P zjtCCbt{YOebGt?qk1I-@0pV<=M27R?_oivFW-z@>$OSP`5-5nz5B7CNCwIqnWk0)c-oLUqf zBatwRN^BZ`Z0Dyr&kf zh9+M(^~?Mq#sf$){5CoBZkH^lw4W!vCLK)CC?m|1GCIB(4*XyYGVT%1a%_!wkh}mN zYJma$n=#Qjo2GtLR0uxfUX$5YdGfAVcHymmT7>aDH7hU#kwB5tsrD2(xO$f0NxXkvYJc`FMB&)u6j78Hm+ zIR4F8*>AfaLhY(TDkoAj1c0!RN;+Y7bfreD6)0%LRxpkfdB}n#DbtK2)28<>npMn~ zjJGmM=@5#us36LX4dhFt0ZMwVICp|c`SVAu8l*@=!N@&-S+;rH=4;l)fY|ywCd14h_=2QNX9Wcva+nEr!bvm( zA;SGGmVHR5_JW)2bPdT5UZ`ccTagul_q)YnhbJH2Z+c z*X(Vn!RcZQ%Z?KqB8*}s1WaFyo&uws{SsSgPX?`IArZdwgJ##)ixp@tD4vrp+S5=M z-}bT0=r!WQ4%E`>E_h5~^KOr0hImfR~uW@~3C)l!TS<6u>hf1K$A2tvM zaWJPMxOI{!pJCyxi8020o8iJy%bt_gH8$bFM6xYi;bklt4BxV+1Vp?ApwbFmx-S6I zR|za%#NLlbMQm{8U;;;!fy$i$XlhiAiPJquPD z2eEr+G~J(vBckU{8%U=vjwoCJ7_Lug;efhcbA`VDy-$aq6qTsU-lAvyZqfdcT$@q4 z9^Qgd0qH$_sU|C-P1Vi$vSWVshnGcg*frB^c`esGRrlNAlLKiC(r*}J(z zIAmHK)6Jv388 zNC(AY-a%D9uUl0KtlRQ30q-Ijz_1$>j$EL+NjYg-wH|yy1Z5ns&}@Vcn1xXXtYX2T z>jeeLHhrL2c#FFRKlG{y3u60XtaxYbnbFT0f~5VCztVbjDpXX9Y<*qo&|j$AFo4%u zGbs*>BDMU@yBQAB>rm6@#`L?$km-58wDp5Tsk z(B!v>L{L3@Upz|PPZ`Y0KMIZdL((p(!Ccv80?Smi^(1#sk_oR z820Mvs{C2>AR8+Cv}a4&8P$PNg@zH9K));5<~d@JoWo5lNc|QFvb2L|+Op8Q|VMy>OW=igiE8_Bw2( z>TXfm{+Ub?b-#Wg5`@P~-xR$|7Y5N$&4g(i*TDg->DtyQ-4DbNRtv_pG%UZf&4vw| z2?YIZapuDV;MIyHWT$uUnFvo;w~9d$$*wzWagVDorEwExcs{KSRS~C>8t&lvbUk~w zL+;Xf{_}ByM?Jm)iQhsS?cZ=)Y}R$)_Z$=!KR2h?KLH>J>!Gp5|uS!7KuDD#V8oo1gv4r&h0w`GHi zNF=fGM$$8f=R;z?9$p+hx7^vkz<^sdCaPy}6mVuX>><=k^Zo`}tToFwMlIEDWdlE{ z9~N9gAH}CrcQAvohyfu6Qj6i?BUWh+3djW6D10qLL8GB`XNIr-RV^FSi95B6EyP#v z6Go2(AMX@{MaZODuWYDjqQev@1WDIS09M=lPh?jx@)OY9i~CMa)MKMdy>OZ|sEriX zp?^mNR^pSY6aQ{!u~hDyq9&6DQJrA=bE1kKlZJcnz2S!?m)?V$txce5=b?pGIiyA$ zzHVXF6@p50O%JSxXq@YQT`^=yU@8_%0I?a487gSlr>ihCp=z3ZitW7C%ShPF5LptK@*z?TiyOA%ABkD=>cpzKZZC#q24{EKq7P}9Ej?cuQgz)-Xw7DqMg z554#5gP41XM;qsxgu20WCJ!7Y2t2GwF-Umd;GF8I{ z^ejw^uXbhk#8;lD4R*+_$jK>kB!o#>U0pY@k>1DUkB0A>Q>cR^S|zW;)$Lzk64fbD zEmLHnnOfn~H@rhDV)Q1E2v>nLQR|Kgsgj6d@oegODChB)cIkW*Vv(B;yL(j)-3k5} zc9{0j3#{B2lt2FC;gA0~$c9)EW}j>nUqfr0-R(U{AjtYlY_{1V$QoKCiV&_`rUd#q zV|?27#4HB>Yr5K?ImuN-?gc9Qie@VpjXd`>N4?^9V6ANHp zwPk$P5Cpu`VVP!KOry~keuSz%%8BrV% zVG2165n$q~5D)cY)e%y6Q9nNU7$8F{PuMwO$EaO`Sdw49xWg$Uh-_;oP}DG+42$|V zwsRf=dpf-;X~XHdRtQAfyc^Yc;%h-JI)1c#h%fR%z@C8{N`$pK8 zg2!23mU_d~;VX;4&g;jSf)@@Nk<-5l(M2-8e^LWB$dc9J3NR!t^x6F}YA%311r9_ zE<&)4NQ5Dqs~}Sn(0PD4<%c(y*h|ITxYr4IEgR5@KaN>*8}v#5G;K{)uR;O!anh-x z5IswTdxM@~0V5oI2o7hClULQEDEyLP)^L101<-Dhn3Atd|1=!X?q< zhq_!!kvD3a4@BwiL)p^*>y`zq^<78QtCM=?2l5Q>Zt_xz-2#4Gc9o|{WW682oj5+k4;@?69bpJX@EYRpjd84U`k{k-om++Of(`H)qA)_I zsn!dA^PgpDCE9o&UmX9do9{GJ0SryONxe%h#bj!RWagYqLpEQBPjWCewl(9lm$P`F z+BIOg6eSUbCOR5H3284JH3XzTf~Pi*JOZ(7EEbHN4DI7M2p#BC5V0NAJjM;gVcAZ1Pw4CD&vEMrB5#6jl|%G^1Jj|1AN<-U_lE_BdYQn?^=m5gx;QhOt4T$Tl*1&8jVmXqIteE1IWqX9(9GC12i~I$ND} zjt7*1(1A?r6wkQ0Sr1_1KS$1~S?ISNg+O<9uVF{KGhKZt<58}6eL{D)Zhk6s6c1OH zl^gd$H`|N_AL-a`C&7;D-pe!V6|$Pn@1u^9BH!S)2Wk}n%J~Hcm}wp#h9$#6%;w=5 z06$>*8_Ab$d=RsphTVC1f64?kXYA!*2}WwN9Y-7v59DHMncNp_83inXgN@alx4t=+ z4c<`;3)VCi&$5$K)yFsgRr`B=04u2b?c8&?-GuhHP)@qgfT!tZcHsavI zkRz0LRzu!kEdX$+*2Mj}81Ddo>n$KW@e1mSi1c6= zbx*c6KZvO1j_fl#sKkGr=wCqu>x{0LD7faldTt*y*bn~k177{{BFc*}9~kfv!B^GE z&)#!@W;!*Mx<(&|i`A6UP2b1}wd!85qOG+@Y&$9sqfg>to6x%6DnV$%uF933fN}l) zJyPPxShwpHKHYSC#|BX0!DR|Nsjpz2&U38ft~z7~aJd;5L(B6!5Lm?*yTeg^_wmCQ z!-h8lx?vu2avUPo^qRl0o4W^OKJDf?3i*2n4^%7MtRMGg6!3qcg-&M|>!<>M_y?jewcB9w_|T$#y< z+fZ;9d{RkHOeWYVgQHV$V%$+ledfF#)DhzQ3~#)H*1tHeQ(Y{(f)=2JUOsiGz)oqGX^!8XNG){YY-K2JGk~zZii4%UEKy0shT_29 zzz#=^CmZlFCIr=mwNC9>SGR#ddXje2ftOonUNGRMlmPJUsh;4v5ND5$XlpF+>)+39b})FUKNcaY1^~ zqe{0a2Nct{Z}_YKX?Xl=Z{ZO6^pX2^pssr09DGQ09(1gcBK7kS71cM&!UJ!eZa#Nm zlHaKqR>~#i{l!G(C*glz@g=Ti9SlRyFTxF($Qmob5DDGblf+D#R${P+$%r1X8;gUR zIYw6IdXSOWXOR=3^@wB|K2;Hk`rme67MMs#BaLDUSL;B1?L>}UK}aH#P`3bXSd>*Q zMZJJXo2^P^ds{X;zH>`X9baBTrM@FWtb<6>eYdj_EK_9( zeRoSZ73~)Ckb2=xJ74?S@U^d5&|wFteUB{STI#DtOw~xLeoL5XLd06|k2!n zj?3bEK5YAI&*dD@#=_B1v?7*i2EX z;_A8Ee=r<*ORMe*5SNVs0;y)xCv>LocMBw4dNYpcVGPX&E0fW_jiBh9nqkmQTv5q0 z!7=H9UL*%}H>YS4_tQ>hH?>~nh)NToIayl$Bre?8^-o6t&rEfS^uYNMQ&J9;@V(hq zWAzw?=hIka#u~HvxpbSz>sFn?T31LFI$-10*&k+qFEU9B8H>L6Lu&N|3(DECQwUWK z(3+*Z-O4gReAh|+>aT`>{I9%065|A&tchp$@1=(W)hDBTe&2g;;OaS-(SXKx{cqU% zS!qoE6Ye)xA|iOU#(Z=mSmnGg4(n1!i!dyjGwc`OQSlqOwPZ)T5CiI!%_9_1KJ(o` z@bp&=T!H*;SS=^?LRqXjUV&IgeP;#14A7aZ;-V%sE3ALxwDzRaD$!=9q0KR_lGoF^ zUM|8$TzM`21UW<5V}1lRCTo)(0k)r`xS`4Vi~@i1A$`!&u2O3}sOagJLuB7M!kNBN z)F~=Yj`-&At>3LXksEP)8X9eP*g32(B5TMynVK-bFmjFK`TFql_#<%RWMK^=;4X_W z#fVWJQ~h~s9dh;|pxU(Z1Hu`2(+sAPU6L9Ob(&#`7PY9QgGGiTeei$**V_-b;AwrD zwbE?B#>pFsFB_-%0g#$1!2OG+g0jKICiX&iu+e5txvEO1BUipnZnk!U=y$5JCiT?%ojx7P8$heo#(4TWnzOyNcmAe{=ZF-z<3mcgCzrP`Rq4 zFqM9Z{fd@kg=<+#?45}J;&v2Q5S&=VIjTWF`P||Hl(ztTDZCU=M8ehXUN94inhKxK zmOik?NW{z7n?FHK8rxrlwT>5#(C0(khUxqFm;B1G31vwhI7lnPRtt<7FP=2jJVCnF z`mv$mqeUOrFdqWNwz4WRq*K6W9$4r7>TuYo$Eid)Z;3dh^It++$i1sC>c#c%y~APK z8&khnc*DN{n#_F%vKG%otzhbq8~EeUfu_oO#QHSDxT`$6Vh%lDQ_wMQNKR10) zHlhobIc@SIL)Z|FVGhq%ZPV&p(P@}{{)%vzn*SNP58qN1W*H%xVx>i_bWc5+9%Ay> z_W3*2u()rN_|KY$Aqujy&b<7KcQ<$>1Yq4h|96J(`tOBOUn8VBpibPRQdQ4~eILED zzL-AV7!0pgemUa)msGNg#-v;<2=6B(peOIvB0!%Fi-<6*{w3Q+J?E6)#NUB@+-8xu zeERFtwQ_gKs5$76fL75=WMsW6>H>4HyVN$-Q0To@E(a}E^0%3 zPP!+`7B9Q2BY*IQ71U`#ZwC2|RSN%ySH{RNie4h-rM54;TeNMuzFPt@mOSdGOmJvz z!7~L8Z1I!$%MQRavL=0`Zp4NpR~<$@p3klCu@&KF8!0+4p^e-rk7Wx2T_?{)WM9=6 z@NLJ<{nnD!Nxqko9NjGfg*od!0?(l& zKL*ItO0`{A))nmI{9{JnM`T{Z9gy8U$by(%bji{hA>Cfp;`Uw-FTHNslrAE$+~R71 zLj`w3W8|zs&=?w=3KJ&E2xZbq9PQ|2yZ8&s&aWeQlBKCwYx2!&X>=L*+{D$y6Q*XG z6{rYl0}GA2$$IJOK3nIlej}FpO-~db3`hP8pyFlJIs>i3qGXzV#*k_9dDB5ik_MBg zghlhCkIDWQ9W}9B5_~KuM#6PLiIIWFYA!h9?CTscI9jiv{w#^>5KQW_NmCN9e$Fof zYox#tPY7eON~Yk$Ixgh6n6;ZnpRS?USJ}_{FpLkMxdnv0=h`SFY~)WHkimIi)nqSxR6*cUAng9#wkiB*AsLsVaez+WZ98Zbiv5&5B zk989S_Q&2Ook77jbP{O@@pR2V1(9qCvsqUml$Q1BRP!95WLo71J#4e9wTeMl*HLr` zv+32^qnzO`1%CH;hu`g9wV*h}|B5R>>h;sbfE_}w6I6yh+he<0QG-aGSJ9aoNz4M< zv3EOk$+_xBS#MHQ-OFLdbqK=Vn@<@&37e#)fW;IsX<_Ygk25qt z;k%Sf4`B+t^m*|ZB`-GS-sxkVEgnCp-q3=Ix${`Aaa2z=doMM~8E+1&<;$h4XXg+I zI<+F2kB7+~HDlS?_3KljmY}}bDrgaNLo>Z6y`EmCX2^fVZi(#FQCVdA0ScJGV?jb{ zdepyyKh_#SI3-5*ej2Un&}J0kWV}GPtqG|`4$4VKw6|r0>ix=|HQ{HTIX=CZylE3d zisb~oOAH_=fO~g5CJJupLkk61Ydp;Qx`tO?#)dYDu0LR}*7^_m>l~p!ZdXMh7fDiHj;_d`>sN@-dw!nGax~G6lHgK?JYI< z80L4V%1sFEnmTk#Wr@^g7_D!m3A(y{%6kN+zX(0^5^D5yj=ok)WX1kf{^v(9WtuE% zk4`E66V4XXx_cMI>vO2bo3N`ji7qJW@`wx_}Yv~}3ku2!V!FMu~KJBa8}RM8A`kZ`qzm z;X^SbV%?9NV!p}X^foATzOYN*nE`x@1lh_r@$jb`g}=IuqmK^ctoX z{Pnw@OJat?WcN;7s|aoZlDOW5E~6`szs$X@xmGWLn{y@gaUh34>dh~hOPo%mO50p9 z;A+=lWC1B9^AHhSKsZfK*AZaqTr@`LcZOq>WWEXZB!@S_*rDa;YFW(jnAKaVk-)Ol zduF06IDTqT=O0kiV zHF@nSJ#i8A29y|oFw$bm@vL!%W<7?j$;YQrNrX6r55wd+z(FEf=S7XiTsSYXdlwN~ z*FlpaHnBAu9FGlCEwXD0v4A}+G9m;|DYU=zIm=R8aXEFqkXM9syD71EtyEHHJz+Hu z4*gl65tADqmEu=x*AIdn>}SfgCrDD?Dp{4acM~x>iKA~Q({y0b(TeE@uPa=kR=e-| z9Rnd%WGF--iRxy~7nOl+SYFkkAn4d&(Gmyr6YrV88+`3rm$ffdzydru1ASO@irppHMGQ8^ee1X=rfuBiH(euux$yb`+6N;$m$EY6 zYp;M+sJi(lt6m@f>FU3s{^18J(TMW!MFZWt>scYp&D<`kgD{-Gygl4avB2vWDycz& zqwWPhzVIQ)~gzq>GOWpEq z&t2nU2_@bHbbJ-h(DVRtgfZUC7***L9INfE%(F?neUsD6nlF{`e!sTI;3b&9ND#3lx>e@zG|_^BdGS@R8xta)%c_Q@pFyDZLQ+V$N?Tg$J+?yFn4-aeN&DU< zWFK8UVlrzJn4r*^pI_R5O9&fgb2!>E2|dq(2SF2%vi|EP)vA%{#tFlaj?y@cyU>oH z_@)t}iGWE67u+`H4An_Le)!t(Q!Az$;W^l-|GYK4@G0B@Sz#(91XFYdS<3MdznO+c zbAC1GuVe^qxw`2*wj}$9`V-;2=mSBPpEH84wg(C0x3~tYEqNZGt0OcC+OOJ7d;uf9C$6!I*jyCY-wP zt%dZE#Bh2Y?0kWTY;zkrPOOk71FfPywPcc-sPjBmlq5kw|E3vh7)#!kdd!9hS6JoK zS^ywvUfNZ+n%q-!QH!u7h39pkw1O^Ylcf^4w9Dk%+JKvCW#;yG_{{+p6E zD=@zVH3inTo{7G{i6RF^DQ_H^KDf-$!%eNMh77@w6S+_psaAwo4m_K3YM+W9lkGJl*2{4rOr`4lfZZ z)XAbd7j$8U^lBL}WY$DIKN^k#u!5#o@2mQpAlXeHx=Jpmo1VHQoY#8p511UF_~|G zB=Bu|@s+wU_hS_~`>+rosEc_j7}mT?*1j}dF(a5q6u-PtSAb~ekJtbL`$bi zJH*TMrHF?Z3{;o0NAkUH8STY%`}#?RQBT4`K=>{q5weH11gXV!6SF|XoR&~U19M+@ zylpKz3Kis3IrLU!HXBiZHW%33Qe}_u{CM|u3azROC0Rf7E@*8Ly%1JWc)MMKAc(ebW;?-Pvi<;mQm%Y)am!>)RFW5kN$5xzjmVU8_7F?M9GH7nGQ{6ZLn z1h$fm8)JKZ%K^q(=-e@Yo(hp7Mm#$^LzoYf`>>m6WlS%3Y@Z;+T|R`t=gkuGqA^je zUK%b_g4<++y&@ckbf=D&!}}P>`)t@R#=?)!`g@}7gjOYFaSiL<@$Qzs_nyzQ0i-y0 z^t`%Iw}sTLoby?f7yzb56LsaF^h~eW6|8&TRB|leSxs-tg8$a=U;3|`(ETMe5dIat zVl%Rgb3V&@)g4T=IzbYBSEm04aZcdf1t#R8TZxY!=oHB7?xVI0a3eBQ;Rjz5G7%E< z&nO`bhde#ai)j#v`VdV%6jck;4>L;+wZEmgE^LJh&)|nLvq=24hWo7{JjEXMaH&9X znn^?9hfgh;r|M{ekT=C+aWCgLq1%PXzKf1b&8C;C@l)_!kpn}&rki0J%p3)8;US`7 zxd7HzYi*tQ$l2G=1k;*E^V%`We;Gy^H|Syk;s*jbnV*}|CZ37u5>bdhL>&|{y%_@J z)k^vBaf>`?SGVQ@Xv)WJ4D^#J!c`U{wX&SP@^*$vO+o>@i)DNcZTYL84|5CNRp(_W zqSgu3`p*PCrHJ@*@h9p04s3*G0c@K^I;6J_acm**SH5mx(vMH4-ZpuT05eGC+;v_R z%su|d*gKSXtpfoytABGB9ei-6gYxrVG6J^_N^U5p`rz#eYstm4JyX|eUS~MJC!!4y z_EEtzfsODVCnp{*(_A7N9@XB-tf@rM4^~SR7SPFdIlQTZ!2uNSOnMP-vpNXOnk%I) zwOVTkcrT*O@-IrtQ%016W@oX`^|&%7lgE ziwcjp9q)uhAWO9Q-_jQwtMK<%|IOoH@&EtwKlZ=Ff9wzS``R{iLIS%;P6@BINs%o6 z_P7p3jyq)ALzxF+?9&I67_>B}R$cCDNsHEZt{&fFPWR!cwrZC<4RSY2kvEb1!a4eA zm)F>KYW?M9PZ;>)4}8JgJx~*~z$c@D!W8P`RVIbMs1Ex z2KO{xfQP1K@nqPoo%8%`r-}_l5*i*2EEPd4U~&zyR5Y+8Ug+}%dC4w(e7x^2rb{mc za#s5>*r{Ht2IAdiq|Rj)>Rg=%;dQ~L>wQ#Lb)rY$@V>0391CUO1TSd=t!C6UT{4Xx zJXa^YSnU%IFht0qSkVt6y#r^|vJZG_mULZ=jmc`6Nd5O_4VKW3(g(APCDU0MIwiYb zUp*w2TpwhB5uJ7WpY~%o?|Sc$eZ_}q6=$*(VjTa1zM1u~)@<^&A3PHBE~G&YX1Sm1a^z{(@`Utw<7Vq1ms4)ii+K(KQ=}4!`6E_K zou`Fq!&!|cr7wV5W`#bcvZKA0L%loJb|^q`PlHP%GQmC$^%ki@%BAxG`NkXXuSmQGpn@>2xq}XD; z6{i62$8SZ{%;tfF*qgr-lGCMFidXQF`o1b)>^;pHXMtk&Dk)NwU$gAlkI8V>uWpMC zcKkp19V}KK)^j8#Ej;`lWJwoae50?tP1a^s2*gAC~$jS8KG>0S}$bi3lSSiG)+{a>+9Tk)KqW7C0X(6F5XCp!S`>OU0#L5=b z9zkj>W>s?)%^hJT_^ZNmcL(eJ5T?$P;T^`McYU)8h6BT_rBW$*9ol^R*SjCQVvtKh zPFm2}SFhX)SAXH{O>PDKzr}K>BOnCaB5uA$8lNnMfbA6X_~Fl}=P%>?-d+I;9mjx@ z6K_K%ha^0J#CijuVznBL+$yFMtyzi$h^mhGj`~IG6+&#W8|)g!^A~l%fYrJycr@(P zVX<|y<9fd}!lhkzLwm%P>RBwlGgHgD(<N=DqoD#BgQyG(7YFOWM0VN0wb@e*PQAF%R>0racO=DY4l%HrX1H3IK(wLN{&%P+e+W zMw7T_7k~tj$U>ns<8VlRw&&4)tB_1lvLa@Dku?{M_0U!Nzx@05!P?)Ktw(nR83iEo zoW0jxdtJY^7No09iGxP`VLhVtNxX`qx+LvoL?CdEIiS4$Y4z-~NL5eyAz;QhK8@qG z_xg(ZXf2HwCtCaZb!yZRxa5i(LRk@|cmUe)MAHwbx+>!m)WG2&EdXcxQ`)Qyra00y z1hk-EbudzTlYSY1yt^&U%JvY5Gy2C(U)Mc-<`M^VqNj*!iW!_(S73r#^8q8kg=axZ z8({pfn!W%MXT~cXXoeq-N3E5+76@`;2D_SyW} z-s_ZS5!LaNiBNL*MPd|eu!<1)!yPI0F!Z?~(=^SF8xBs&+?Rh1fBqg(c^->GtU{%J zaj>%JQskNweyiHs6#>7n;ksJPw22Id4!c5wLg|}1<_^S%{jQI{ks!v#j zz#-|MG9Wog&aJ_rHK88Kx(kteNnDXwepoaF0l597v`zV^!+lr=l}M6U(~(Be1h!;0pMXv1TbuEix&yKc%0Vd+|p zplyf(2S5~72u}gFf;|58_P~(ujXpkuh!&))b&v4H+82C1Rm11osRO1j7Z<8k`%bGI zx*>D2mIb(qS`*8xXDN2p(7NI9uFb?=`c%EY%}@Ef?2pGHkeQjgm{zr#P5gEJ70q&q zNmxI&31caogz5y-Prf}~_2KYwE)SpzcfTklAQZyEJ+V-iJx<6pPi&*GMKnd@yZdcy zIg3#Jm&9)UdW8RNNj;6c@=~x4T0esND@ydsV1JAQ+&3qB58!NHBx3$l1QiMIx$(g& zSTi_Vy%RXyK+`aKggLJ9=w6B7zsC#)^W$YvTFVIMnKOu0EQw~Wx4->&!?%B5-`$0k znHd!GN_B#N`R;G(1Ml9@CFRV1J1R`9{NFw}eqwZ$EkGScvcop+zNeC+y`t96W?~^x z1Fh&aZiIOCw5OR>&w^wK`re*18Jg4ey7pxVbPw&_oA-y`GL6UvFD|h#eX({Cz%5sB;ruC zNreW2@gqp6;dx#JqH~vfE}+|Yx=bu0JGn21!yDhmJ*Z;Z6g~*e;!dq&kKODYa431;Ch{Ugs4NF*Q6*JEPFUZi%K6*AI@i13rc$6c^ znrRGHZxBE9=E94kt>pQ!3S=ko%h_R@#4IRA$ugSuT&PkvdN^_~R4n|RR*wyueG*O8 zkNFHvK1&GRS_7OCI%FiVgWIatyM(&WcVzIGy=NpzQD_;Beo|n;q8cPpXzNg))dvU)s}e1vSZ&enGz<3 zQKRm#(66M~_ON5>t_V;i5JnvSt(qwjW$DJPy33ll+&{%-GQCX%cj^8MIYMNpm{EZ& z7|Ns(Iu-C_TJUn-2vm6|y9>DcwkIzHJ|Sk)>8%1o$!Kqd2D0Q_)N#GeSVuH;Q&%Xc zY>&B%Eeh<@)ZLVL#$38IdkrplP)_ z48E}c2+c9Z_%_y|AF&E(&F1stN;ZSE({@h`7HEN;#v^x2*-+GCMk@D_ggtGmrUJiP zT>aiZ8v5LrBk(w=hvF?&doOXX#$5q+HmZbpvZ}R<= z@`H&>nN6Hsj%YXPEilZfD zJ9uCmX4Vs+&pApRczqT}-$nMXHU6spLVu79yQ605H{cb~3U4b`#K9^NQ%6nMl-m=y zqixj$63-~we)LiHMj?|*Ss0LqdinbHfqT~ojwDoHofE>9xOH~@BCLNEPy(7fD=;N5 z#7oq!VXsp^1p}G8XfZa9g^PTIYGzpWEsWufgOrQ5Jzp^46yZmp)^8dstql6)@IE$S zuo>ruk@NH}3|c3R#GKPw9fl7}QJ66Vt6Az!~9l~#(7=0FM!D=F9vQN&lyAyV% z7T+c&*8)+mL`nm!sXjqc>2-1hfWzW{g5??%qcD#Cf7c2?o37{ZH^G7>{i(JCid5Ji zRr98v?vNjLLV`z<)WKV`ZzT3E1%*wAlChhRbobr_3|NAI>a9lw2nm; z3(6(#%ra9tXQ|W10e2nRnVXQ^G?ywewx+B<{;W=c7%#|#dbHB@-3is&2IE?v1d zq;?EO0gIiuC|N%Ii_2rAfXGYyq?!*l%$^_-@J z$=|?2V}OnGFmzWZ4i3BGW!7?}wejtW_R28$0pGL=qv8&l#?+}o1i%^oAdHA5Z69}R zr@)&moKFR};aet6O}7ucG64swS0g{OGSF5u%Mm1Vj-oz?h%;%fT0V>vY9COwAfASC z0O|}A0NI5QS6hJENL2`$I2eKM1rI%6gjc!_r=UBE3E^{M<)kxWKXkHVMO1b9l)jiz z7k(A-;{(N0DW@sgsw#GOzeK`&zxCem!}>PB;y?fP-w$tpX(G1q-2?6JYY&()cB3@Wy%rau}vAnoXH9Sbve`b7>4N8S=;P5=*wGZnZd=$u7Ruy&l-xPenbi| z{rSL;l7ea0mzOaNEpk-MtfAV4oK$0qKF{SEEbT`%H)xq-C zk>e-zC<>S80O@5RYH*OTMf=lPg&(F&AkEhTNh53owDc~NRCdx>2|X@G(Tk_whJ4n{ z)c_?StpMZIZTr{5zc%&o#xp%YFbefuYb1~plCY&OaTe;YKpne1sES)owEjfl)z`K1 zeOhM7O{l+-YwRyTKKIdQH#{2C!%JHe#}tF2&ICv@H>y)xB5S2bdbDws{ra1F(2?cj zs3?D8nWz?gupj%Ymy@oSaPT7zFdcyB)t;JcBPLs?jsDyozV^pkLN$HbwVN@z*85~( zdqgb+A+Jw9^w%fH@->4LEnj73I>kppUmNlQ1_*V}6+pJRa+CH~f;U7Ow zUA-m3yS@k2Mi8TBwMM2fXzABP@*W>XixeWi>w7>>#da93)5=Z$;n)hgk+f}fN}shpU2iJ z9BrBJR%$r~t4O;B}0)Mv+lWTr7YB=(3fd*f8^p8my{W{aaS_|x%_G-Q?Tw=#SBUi z{g-#YsvpPseBmGheCtk#EZU3koF7nIowBXVeuwUlBH_nA2t~e>`|?_35@So2z&${m z2do(TbYLERh+u|8Ou7aq6xZrUU4Tlh0L))c12vl69N-D;2T3;{_Z^hNCq33@eyvho*NIzLTce0tEW)GfH?$*=>w?u)gca~BU>XE3k+n0TXTBTUU&U4k ztEL~Qa-*y(*QXFH9q)i3jv`y=Gn^7!H;upIF40-syvqa19j+VL6am88%YQEI9_A9! z=vEe&0T|SAoE9ybplkg=6L>w@5q#Ayk`P zLjg_0+-P^?;VW;F#e-IA^<;2 z!OHS;)x?zGxmUA#azRpX5e@%{jwm1#CrMyr=%^iH4m5;6>hha#O+N(L-6w(h6`WFK zU`r4n(v>~js(GI*((7A8-5uVCyRY2dC&f~G>%+<;f4Ftm&Z%~-WPwa&wC-8KQe#I= zq&Yx79WDJKD2u&zWX^QZ5^~cUC`BWTd{jkxu&_w=oOgGj^2AG9F}^P zh!4y`+>U4_1@2X%i!VnUYd*Y)Fv2Q3qX-z#rPs59w>t?BZcllSi;TxTZl~Ari&R`c z8V>w`9rbJL0i_nQ^%98+e&!3aJv`go^pA{L+*_~DipgCj1fB2_x(Y#IT)^SnpCYO{ z1J%E=B@zNG(`(5SRunINe7Lu0{ZZ^a` z^5?8e+5-;RY`@#2a9kRg0GqZK;Hr1fr+$@mXgFmTiefJnP_&p4dooj;<43u5$Opsw z9~6O5HxV7!8`k%W12z;=d1axdP-V;e`&Wu-pb>L25h2!&>GVlrZP1>?xKFhnLX03O z{25I+Cu`ViX7`8acY&$fM!>{PHS6?=jNaguJ z8m7tKS5uQW-KhP7o6r>p;AV20rITrl;|(34mM6e$>NOHJ*v%%NoN~>|2_lf9R5CT( zOhVtFl`ZM;Kj@G<_PJQHU&i^%7Dxb%mgdjV3!Xc+94!9yN8a?qfaNc+w8`SR;XW$^zIG7#zX%kjo_cSTxJf9QGlpBE5(VEPtEKUj zT&JbP9;yfQDQxd83FCAdU|9C=R-t(CoR2@eiQK|W_Ah8WdbiYj09_H_y%pHEh&59r zq^{H|VVR=EKej_DlaBK)f6Y7saB&jKd&G<@ty}Msd)a3V>-tF}7d_Eb7)AAy%*Sbf zMGfAu{F8x=2!!dfl0-3~b7<)UZ-`eUi&u(n^IW?M1k|h5j5f0i_$o$&poM%3S_K${ z?@zuajR!`dp41{OFKYMFIB*A-a6dF~ z$jrhP?5vAgM@;HHKZ8vPKAkvz*#DSU( zpCg@kcfDHIy2Q=?5Ks+DFfiE#P$=eOh#|j9wT9}5<|3F57^N3IhDEUhKqnKDC#g$|7?i<+L4N1Q!F6DZ!LqM^xCbEbHOjJ6h_(a}+@BBpB7qhf$&F|jqAc4G1JPoqF$*dfA2Y(}Pbvbe( zFD3mtlL!r8sL&zDaO_j`Zx&}rXtHdC_uDB&er3;T4&spDY-JN zt@)>Q=9(1qX=)LdHOx7T{bvVM|C#bkWRP4x{%3`nqloj{zj)jJzkxjJhY~sj)3^%p z%#&B=_&?LG%G-z=j|Oi}VW6_xQD4JACfL@3Q52Wdz}F79hmGwgzJB|dSA>rJg-bj+ zh10@F$IWe;$9vyW@9fb0=rm_VC~4`<6d$)dnqNmc<%pzCZ5SCLq2dPv!k|gFSED2l zcvzn|*XTtz%MW;M35ySg@7F*7&G2u02n0_KX{Z+0V|&yR~12oZl?$}?KobN}&x<t&(hB3iy$*gj{H(rxs;2vg$WT+}G54@e_sOv4 zjU2#V9jGTS;HKk1cAgE}k7~icHoX6v^?Z)%Mr}&EDgM>4X0ojeEA?KD`pD3hj?5MQ z3fBJT`kpYpfm9>lJe|v^BVOdiZUg@bSq9J+D$Tu)_lEa^Z(%2mC$Jm2tqMFLjnrk% zS7hguzTnHjzUQ3Qtv>B{gHga* zLgTt34Nh=ZGN+!7fH{x&$P*DNb|eWJ;s?R7;N6QT;$T*&dZ=1hAcO!+*Yh>(Vb`bl z&_pqsF@RaX=ZRF?L^6-|WEM#$HP1VEhq%&bk{6ue8?gJG*)Dx^CKu6{(B!Z0B?rGmN0G7IZvGTJAH zMXn|O;s>eA`diDZn$CCaviRpOIro5Ie>nWuub^XDMBVq0PTU&be-!uaZB}8e3=R#c z$Di%zW*^c2)$eWbwobb??4mCjA?LzijL)lZ>PB0Bxliui=#-^eQI+{gFR1@&2@!~w zO5?=1Iui_529FoNDbFkKAg$ajF3y}-PJKp&X{y?AD(8;AjZ4l95vI@0jZa(u1?Ks6 zZhXMp1U>JmXivF8dnHt;pV8ps$c_w6Hcj4Hl0d8~Z!Fd@2$41~PI?S%AE(DB%*72p zyF14bp3fxmd{8vKS;|DLK)jyGg(rJ6H7w?_$BlQSdOoJxnrKak5aO|5Iw#}yx=k(& znSP=QR5?ms#y;Lu>atZ#p1Lq-S^0W1hG77{+z|uJGP11>e?I8k;Bt|z!=n*+9^Wf>Q z<~NCj=&)%<1pJJR&Hsgx+iZp8C3RF(5J)ABuOss2;{-Xu6|{GUr*nhX!l}=?A<{xZ z=hlR+L*U7cS~v}%xtY_bv|bJr1n-97)RPrzyXRM9ge|fakD}kVJ&W^HAS!=&3+LnZ z-0KwhKnulZ?q~y`uv}Zy^$B+nZzzNtj$Xfj>PKr%Jz==aVR$xw`|i*1Z1{h9HvI=5 z1%WV4eelbkDtnH}<|XebiPS@Gr%nI@ICv2Z1%zNSzwAjFxSObV=TyBULt9fK}x3#z-DiJRAHPQx+627J`W=p5GShb(77=x7Ul0*<( z>*U&RqfA|PmuV|&=T$MJ!sDNKC2ZxcNYW>ni`Ythq@bp?LEpEim1!xSxjHAipip+BPx?Emoycv-n z?`U#yhPa2XE`v+v%p_1pi4|19kx6T2P{G6PLD2^*kItAH<- zIuzf|=p{0Q8l~&=abM#YpLKl_k_Dtzo!$@mZ_xi0Q~lxaJ-?n<2HkG`v#jl`<>b5+ z&8nM5A&Mu^*S#Ex2_%=Svp37kmaR_*avx$uNDxR3z@=a^ntpNA<{hfd+F6A1 z#l>u;x^@^w48ULbTk0E&?|<~dDjUfmG$6OY`=SCW^OZn=&@GN%%hK}$s43D+SAtl@ zS;~?|fVL$eprs7wVr13UTy<7_`=`UVzp6heWO&jY0;ZU8ghwGO)&&$9M^KrUNLnMj zZbJLF368unBvL!B$U?2ESRfjRqa}!U^ti=*2KuElp{$&|KYn% zN0Ai;Y_g&*<{hESHVFIS!qnfHbqx_ag|_(N=)Cy!r^%<+GU;Ng4JV)z@}cOA#{vP< zXtgy1zv&F#RKpAPRPD*C4TodyRUa&iBP|JBw%+r=1NCHb8c9wWo2fH6&-_YvfA zqsSm*7$A>)U9jjR+fzg-ceu#HEtpO+5S}chy0SJrEg`a}sTO*Q*qgR3M?gNOvU99H1W>ctrDu!cpixxZS^_ zu@%V|(q|!u9tr9i&lDq=qsX^{)|hu$thUqKLhp~T%BLTOng?16QeQrqA3|PKrB$61 zzyKaa)QcgJ^G;eNy-JqD^>j@`rWD;N(EF%}!FEyG$)y10t^!~)W=2skvK_scW37+q zytO&*{KH&4c2&u{K}8x^kAmGM~cQ=;qdOw?+%-mYl;)%3AX%5onpfTcH0=# z`kqNbJeRFAJ+!B&)(Nqz0?a2uM6J~X`FRIO(>mh~icq}K7IN)>1KJUmB1FJ2y6xut z`H31Ag!{WgD$K`@DUkThwdswTkN`dqX+A&w{#UNzU3mEOuT0el}e6TnMZAPBB57+@N9!KWnbI&q)t1 z(NZzeqmKyJv~mR>7S~aM5^i=e3gG68i&7|G5Bol%ua=CN@lDXCYjK(BY{ZLE7Mp@- z(-&*Wt(TBNpId>*<4|Q|f7%E9H+=Ac`QQ+Szl%tW2B!Ma7y%wSMXG$btPLp;9wjZ946259?<2>f+$7?w&ss z1AFzGvK##)m-iybMgg27fJ|tophbR_pQtcE1BllmlqH*X8GIw8AfWR4E7AnrSUiR* ze_5|qM^Mk#QkmI*9G>#VCf3TnIQ*iaAoXl#{M(qoV@Z0gK9k3osneueSPKIufY zaXgg@sC8j-!yVIba*5!{s_}szgHTbst$HZ2)Zym=|0H01vs~aA08j*)8mQEF$tecMabr-CLU+BQhWPiNq%~N!%-d z3(@+1A|=wVeSgGrqV;BBFNlv)ZgPF+1c6S4jM%~7&*zivB%ivFmf?3rR9XJP3G z9?*QDm=S_lI9}fF`S8daZYaCfRlutwDIxX_CVMJtH-b3o4R9AZ3D8Dz*lb;>+@Lo% zA~G=aS+0~_`t2OLK$wu*zw1!INHTDRF)f|wWHfa&@}NJXPDYM3j@}#{Txl1+qHo zHwk}$j?DE}f)_~KY&!Fm+Pp9tv)E7>adbd^0gp}y*d}*kJxn0ZK8D3&EQoRFB(cyk z=}s~n4uKW9Dg_DvvJ?^F2Bf0dlO7p(T6RL@wNdyf;MaFTumWJz)|qYs1Tu#i%JUT)Pxb z@+ozVQqrvrTXhg@Z_#xXlgQL-uZMf^0>?6%G?f~=%Q34*(2XbiEj(|Js6S$r4U^)k zIfH+a{F86`$DnBhJ14KO2uvhg`yYR5nfQqNKr%0;eyTiAU2XgoQ1m&=LWZ;glHHjU z#C@keE(-+nu9gNspDBs;6AfM{>#2`QeJ_Qes((Xh#uDT`s(os&V*Q+LEow#1uB-s_GFP}EwOE#~92)mv?Wyni$_)j9eD$g$ zzXcuW7jQVc523H5^hr+09Hpku$;EyB{xNWZAZ+-JTY%|`32+@cWZ(%I5A+p!-> zSn4tmS-9hz#ZuOq;$1cA=nI}*T#J*t^@GJ}BX3P+7z?YV$m#Uae@(!3*0X?EABW^u z%*bS*ID|}sGAAR>OBY@83bI+;z--(hV!Hv5o~bJ# zt+k5skAOyadr$Bfbczw}dH3cIhu^F9M-!x3SW8ZVv`Cz?DFOCWt<*x{Cj}RwRqKh zbS{&L34L;;Eq-ga6x4=6se^i&dOwY$4$O6n>oaJDapq0o>*B8Z1WsPb2@{m1=mOUf zK4Mk=5Ejrw&vt)E)+8)muWx9@_Ne%XRQ^d`uTT0;b@42O@g$7u0*}OU>XR`1ML4*9 za&s6j6rTB2&F&>M^TBv;Z3WR6z)TyT?jHW(c~966$QB$k6MP ztj2}5nE{@kydv@@-D;-5347%wac4r+tbm+0(|^g|id191`OoX1MAWeLaW_<1WvY|s zK+zLxtuIW@&WEts$f7_SC|%Eg_hy%%{DEJ7fExDm#+(k5l&q@zylTkirzl#}@o^*u ziK|+!V~oU=Khy+~;OP}%#whuyB`akZYO1L%OHTYPwa+iXMM|!>nnw5Kn3S)OVy_nh zsCak8XS*@SgYl=#Q-TF@mFgFaOjknctdC}#&M?=Hdprqe4M3H(--)vJqa&5A>8?)B zW~~uh<=Key^YU@k3K4Lkvw(mUsYCz_wF+! zBy6|uKwX+K@Fj=HJFYU<3k?<>(`cndWvirw@9EgS&)li#58Yo&1S+g~j0qlE zi~tf-$q{=>q!o`Af#4afk*1uzkrjZ#`c~YV)_if&*;T`NbFex!Q!mo4rhrO1|0bLn zoQ4au>~iYnnL3FUC$8x(26bAr&+MXc75cjO)|&Wul9Yfc0B&yyt^I zK?0G>z*O|_W&rsK8^E7tKS7PhWPhH;OLCago*^J^kr5lrTBUGI%KZ}>ZcOf8bWMjU z*D8Z~$*()@Ld7MA5l4@C8G-K`*)$1Jqj(8ue+U|gAPky8T^z$voErkVSNp(zS3Ufy zBJdWj?`GV_{;+Q|ZlfT3mY(c|^XpqH=s*#jUKMq9ROK5{4DM@h`lR9=>>Z!;9Go!W z?l8?aCrF;Cqzh4eLE%m+OU)0x7vVS(b``;|fdlU9$p+(TkqxreaUC>n%8A2`(~)(wV6kCUx6w0bmj`pL8hG%xrW{3 zC+WBpBL!Zh)M^)DKbRER%TzRj);uS=Sc7J^Xgj=iQq>7`|8Qg)<1O^w_H| zS-9b&B||E!sErbS7*kun5!?q^u*8*gO5mHd;g3E}a1j1yBo_2A2|GXSF{Lk*LBFcs z&?TBggR<8}av72s{oUGAT?Y|klcphpr$@fiTs2thNX2j_!oFwg;FgA6y{~pRz}SjQ z_Y_uNIbKu7r4SqMolN{D2{mY8cP!-sz^wIE>I+TOaT&oQO|6;=z zJ+8I$Xjre0Hey%`{`3dKPk&I~1NT2&uF+T_(*-m7%h%lWNL$jJP4cC|7;0f_n=X}X zbAZF8SAqh~pje@GjGT1Enp31;p31+DxC8q^`0=t*iw&h7rvEPppJyS_0hCVjAKUqx z+NrWPY~-XirbVIm{AVF@5|O8hoMIUA<6~uI9V0;#bq{B4#VtY%|3CG^p2C^+Cd3f} zAbhYk&xkI=X#qZ0uW`7(p1|>D9n9tl&23SZMF|p?w5hiUxmFlhQKx!oEA-8dHx%Y3 z5`|zb;hO}vp>D6QSyMfM4eL0}k(fb*>NTD;0v=TBAf6oVmU>H++8v4oz1(H-n6e;E z7WAl=%3l4k;sYoBI+vYm>MNy#*WGU!u|r|{Ywb$TU`iB!ZY=edQ;e|w0y65l4w)v4 zV@}Y&SW{y7ve&w>m+4YLQ)}Vv26WdLQ=K7X7(T8?J-|{^32^gV-lLXOb+)Y!NC&NA z1Ce9t&XsU#4n3-bZ|FGP7BPAqy&+UYb6#6T2feE00P-W5O486*a&19@2d#kKB^}|!=8jgwST7yCZX(|EC z7HrCtBvt_PUXd!Vuc*7+tXH~%`B-$PT=15@6k+FBziCuMH`N`?8z8PGSH{~U>ZqQ( z@|3&`4BB>uGp$8)v$SH)cn;;fkr_Y)D8Phq=-2I$*I`}7N63I~2y%VCgI*I`1%j`# zeVn|Pi=u0&8K`Co9Sl2|0)`?YiOPh7zN#IHFzw0Ajc$ z>yH{cHFW? zP=aHw$J15d(aOPkkV~p=Qjg!Ixla9Cbv>rt3QZa77-!eG4t@{a0#%7C;TZ>oCO1+6 zPfPSRYjA6bLM%(Ut>M78jQHar)%E_ySdb8jQB~M()dI<7UZjaKHi%}fL@@%R%ZmmL zK=SFQ^<(2O$At0i6sZnbD44oZ$~M>JMn`e)u7V2*Ay8kikb#tLp9D&<^<%bn<5Ive z)HwQ3)UV7?+k>d$L=a?$e&lvQHyEaWm=8{ZNT5+NMKa;T{#aqe1w9n*Q8pO+I$jJ z!4@&4((ig9G27FDyo?1K#@<(+jyJW0#k^vxQLcaA$Mqq?@Ll6e5 z1kh(f7N257crWU)Xhn?Au5Mvc(3Z}-YgvE$25|W7qOK5W%E?RJWW0$wIrai~ zbSrTJzjb%7MNq;hWhw#|2Ez|5!a7C_?*tPiVWB5j{W>$g^vG`~9S29(|8+wh(JqMW3rD zfvBOwP|rBAdyFCz`*KPSDIWl&SG35M8K9Qh;I~BiVI|3|s|Rtdu!_)7UJ18TrnRq| zEVpT`oni&lq6Am{Mv(z!XtRp{DQkYL5kekZ57o%BK4^wP6742pYq};VU}#gKLU?nv2f@8G!mV4?`;a}{o9fEP`IpRLc-WGQ zx~og$-5{>6&nG^5o_#K7gfOXnhB2u~e#uZAG?uxtAx`32p9WpAz>*A9m#^?|QvQ+ff`WW_svj?%B&= zDry8khx=Go$$A~eiiMi>b`IgLo*ih=n z(OR}=cLhl%UtHMkSks)CD@JiDZSM8ry3JiBAz|Rx(>3?N4`9F!@ikeML|aW1kdg}m zdXbh`SEyvx^|%yC%vCzSR|!N-eg6J;{u{pY^Y^`)Rs0ICTXoARp#XK0Td2QJbQ4gC zwy$E#sDB<+bd|>?i_3lyAwYHND58@3yT7gnJstLJbfH^#meLOrxISep%(EUqXZz7v zsSWaWpKMJP+*@C-uL_e*JD>VdeT$zZ2f#6apVG71xFSAm*>v%cBnWc^l|cTQtj4Sh zBl<;P+L0kF=|lvAK`Dr*`UOovaeJ8M<%?g;&+CiI%B>UM=$99{h?w??=`j7o8vx9- z&nmLQ$_ZW?d$Py6DHP%iX{2drTC4o%p%^Zw=W`MM^LsZ5PH^Kn>W01QLl^+y4NP=AIT*AAJKjTBj+mLVOE)l=2tdHp2}r(dnb&r8 zvFk0woVniUBCX#;?!F_aLk#PXtt zW#%*Qt)9o8k1l9)P^`b#)UXdlC4Jr5!=A)s4vz=4`6#*k0<72xW>LiLj_ z81s##%l5krrL7K;DGh`3R(1)DkQ(zT5TEv6H>i0b{%xxh>Y|MNxgywkSl7&%Kv&Qb$AVH3?iP*r-UUriA(t1~WKu@+U~(!Gx$ann)X z0)zQsZ3Z^*SHJNo@~TxkSfffb(+N~5Z4cO0Ty)KA_zF7@4dq|#arcfEv0o}1HJ-Xo zozv2}){jbHq2av_9H(3Hb1*_Rk0nzzsw-BSD<=rfBG2U*?>)H*B%mb7qx{fsFA z#51Gqdw4k552+!(;G~x5(Ru{k==#)iT8=ni|8mAM=ZF2Fym%NK_ek%Egslka7zaUH zN!fJ{w^N1P@fM=~^Ws@ZpVK8TXj%;=-xJ`#9G%8G?%X_aZQbs=rf;@j9J0dc`A zz7D!<2)9ET!7_o|A{BCSzRkX^Da!>|1`C*nRyXpn<6U2($IO6pX0nf%ilxc`G$UTP z-?-AIChHcNgv!r$GBkd1;F)W;ht zG=d#t+bOO@-KDpGIehyU#f~7LyBCCC4r=Kv!VjkxtT&nN|&=_0imeJ zZF*?L9?%F~t8Z~u=d+%6q&sZ@!6_4h@m1vR^`K`NA0)EO`kO+qX$3$u*f*-^hdxjr`i3Yh6$4l3{P zv&HOa8een8Y~A%73D4P@K%`JZWa5Y_hFyPt3^lL1lSy*pILvMweIy_^pmt4A?zZQM zl>JEQ|CHz{9p1C?Ef&sF#S=a;?=Jpkp+M1tT}~OSr_<;dgq7j(uD6NdiVn0g9WB?f zd5x(f@8N11q0%*#P>8c?&q4xa(pp30=sn>Vt)Z42wmHm1E!7}LJ*A-PxQGCkpL~+3 zRDy+y?|FSIG_c22@M1vy_X1KfcAt73;A&r`s3mt5iUJ82OuMQ!IHbJmx$6R5bj=8k z8p&@P1H(?C7kOUB=E@OV^zekhSOjep>#G&DS^>`7SLJA?3~$O^s*;X-gqKqFkL$GQ zp8RLSxBpkMQVZuHqdcRy7_X{FeMj9Yo%wq-#9Ht5(M0ngU zCjw<~Ani_B+1XvZ8f-G~vEcZz7#mgSJAw%xx>@-`%ioLH;bBWf*<>Lx!#zb--9{cu zb}OMaxWO%x1kJ;j>g^bW`;7MB6#vPAYg*YWUQtf^V&-2clt_J_PnOBh!%?Y&C&N?o zBncFfy>uRs3aC%4dK|q_elqdReVXC{|Als-xM~1a-g z6cJIoRr3jdeIE^A^Sw23cHpe3irRo8VM{RK4Yid3ewd1_P0+04*q8OuirY$4em49l zdsuo>u@^+I7i%MVKVaS`NJ7m73a}qKJye}SpM82@-F zTNB5&3DzRa%I>gHhpo)l*M8;^wh-Isn~+Rq21gM^uy!zBrYf-*6-Cq{1(dtG%4t21 z@tCl&EYpFSQs%>BQ6#1xN((#sREQEUh7w}7{5k}QDk=y<`g=!sEJV>kOT|&QiN5&@ zrd1G`jMrVaJY6e{4^hWW;(gaWkO&fTh#(R+1;(mbXbajJ@Z7u{Q}y?UKdgj7c zr4*5%+fl7qBu4hDDuw6M`tVX_qN#C569Hw?&d%d0cbN zSHr}oGUo4zy8BlQbENUw68@VmD0g%VG&!YPe&ib>K?P!xMtN~V$Wu#Dc)Lh60#sB* zN4iIoPOc*#vN2AcGCf*!B^d;-0XDvqjTEtL+N_ZWU2r~@tVQ{7IGT@d_0V(7r%;7WHV-sJmMYbB=4@F-i_6a z(Uo2t@oiq5`66MsGUGPZURf&Rz9DXoLS(=+1h)ECT6!{K$^_7x*AOJR6*7#dx7XSV z%(LgvizYso2`M-PCcTe*8(4Ag^`!}DAq0_c#KP4i(n-3OKr%a@Dz zuZO7pWt0%K*vAN1>8eR=uc5P+QN$(gon3zW%i-OxObP0GlqQr8Fo*8S%f>qPZyK7M zC;O?`HlXBcVRPbv0hPR@oS)Py5A_!wBdil8!{Xa)aGq-JLO(kmgyn%bSx-Q^HGN+{ zt26V0b=MNUVFX>6mV_|qTeUA{4DNlEIDm(uX1`oTvN^SYJ9Tm5!rIVGu?5%B7y_INA2A1Z+#+`&g3d_r#K-q9`zSrzpV1n1( zKA&LMO~cq7FaFugGFA+rm?s)16qyY|KYU|Ol?ud&Bx37YwlwBs7qzOaVHb+X5@cEJ zYfWaRA9*pk8|Djo$kpF5geSqRM>KiJ_qBO_NDWhV0s|4baA=5ABvf+MEcH0IbC4zB zsmsDPzoY!FB4PQ9g*E}3=WQgW*zgy%QWDBnAu{JPXL~U4E<)F0cyXMO{vdlo^h%Iq z^UN+N2l*V{8NgF@m9&sx9wQOVrJ)1w-e>~vU)GxB`^1YebL1RZbM8*qf^v|(x!NP> zeP%`41~Y}gflcA2*;Sz*-~P!z{7L?A$RG4?zw1F(Ev4R0TS;lUXD3$@oW9ZWAKl>V z_lIAvi{I*=dVqK)2kR(g@R)uW%hAkU%?_YIv`Z%i+PYo4Nie;5K*)6zrr-8x*j}^b zpf3ZVKmYuNE<%6MM?V@q_BR1m9n>e+luuxXCQG$5a*zg2oOiKBzTu(IcRdhBLB*L- zkmkc7Nl){IoSEaX5YHc@jUxRj7ZJdGjObnlA`s!m7Li5bhanCg(pO;$QC+bZh;oDl zj$C;ijIxH7z#obeY28)-QTsXN%N92_9H@2e-S`N71dL)|x6QW!GLYX0IyB7x*&=jJ zgJQ=g@B;zxICv!Z;AW8(M<)8ukP4*tU&KDaRdLmpq~tdUknc34AA8W4-xSbqcso8n=$N>>WLU%k@nLfwf;yfBMEHaz04|dfN*9S%%#HPb z2xO{$RN?~v+>}@$ZRMY~!L!%OK$b4(92$NCPNOnwJszVp-HR?~8^34W5#j@9$ZWX} zOiOytTa~p7h|zuQR1D&CU!75;m(dIbtBx5pQcIK}tZ;X2V07&KMxl6BfnS9Glt8in z`UsKB^Y1G@wLd&JnGPgQ>w8a82=(vjWQgLZRnBzgEv@P>qf<&VKUwN&J8PzHh3y;5f&CKfr;4dgNBigpdeR> zsc^FnV1IA;{W|JHw2|89)iEe1VzOR8n9KbZw>b+-ydy^tVH$_Cg*ANRp|$&5yS!HL z2A?n1lHI?%S9g15Wms8>zHJwPsT?E>E>bYhKR!i+U)D1ePFb;XxH+ks@;YJ+x{ z7eq}ZQ&LvZTAF?!#lepplmz|hv0uUk4B!5Z50efCZ8TCXZ5-^-Mf_N27NnX_s0cdi z4ZKvAmW{&E_n1T|iF)1|CrM|yBHqTqu(7jUN2$4%lau4{46w+^^Llv?keV(q`E$R2 zcqILvJkJ{|v~iTTYXH2|zlnFp6+wd*{h7S5lrvS7^&zWR+W@A<33P*6{giHk>#A34 z=eluRg99Z3(j>%Wiv`r{S9w&3WjaGyTVO`41omnFwt-&LOOx;1*R{YKaE*v!>qHq~ z{vx=%av*F90;m&!VSZ@FwL4)bKagruQ0K-hj(w@8~%ll4Om$#Qu5bF zyh{h!sC7iPI-rn_kUO4rg5_na79`TEowKet{C0{1)TMhA}?wGy?6kS`)P8< zF6-k8)J}S5(y!L{xUc)B`ii`|(96OWquFaMENz@eUIZe*@$H}25A^u>@FPoVN)*W< zF^9=ZA>lp`Z)zLaQbb~0?Mu6N#732RzHWS5Ygbd{^sbKx=3>xwosY(g=F)2fE=yW| zGlVH3di zWsVha^Y+G!vgK7Nsg6ikQw=9wM#|n>!|9Q_DTZhRSJ)x9A)hgY|bsl&9TcRhWIXo*0&|_>Nuny4RKgj1z`$#=yD-QS#TW9*9iN~`>95HAwlf*YcD*bZO3QQ1+Hot}h^M0h5^VCRfN4 zwpnCETk?~0RB(v~Dogj*BQ!noV9k#p{@1#0dBdB!bUqq7T{UB~SPhn3biAMX{2cX= z)%(Ny(WORS8vye9o`PF3-%>tbq_jEOhhiNo4_*thr4gWCOeOaE=ntj!7r{eon$;H} z(T7G<2oMvsVU=1J0<(su;mE>-F`PA!Ak6D4>>sIsRMQQ%vh;N2{>Dx_W`w2#)iJ18 z2Bp+p6q*Ck_rWnjc8iekVNqbQC<>~FPd{qnEg(B5QMF0Z3gIHH;CEk%2{qoJo=9~B z>|h$t0el1ZJ|}t-_Q$?XoFH$p=6L6oDeLZTKI%le&o5JL1yZx<@b1mehM$^Cv0mC_na*$M zQ#tOYf+F~EB*LxE4jTs1+ud@3k;V#XKlbDr-vu;SmsZF@p6U<4{QL$Zr;d-XAB_cl zJ499-y>=IhOH{zNoWyBGZx^|=EwY3WN1Z5n6s(fc}7q z(lA9cOA z>|?+T^ptjDRXuKqNPSt{r67d4;SrJxi^9LS<(hL2ekiKlBi22yiyS5m*391;ozS~ZZ=NwB6uV5rubl=WI90Y z`vTgydl3;O%DzqwOAjduQP8#0D;SB&(!_WQKIY){rDN5@$)~+V><_Ldy zQ+3D7VjG=I5YV8wtx@cI!)t2)8MEb4b=)8WKzD{nJZ#br)~AeH#}&@VCiv>vVJv9x zVQxbBcDVimH--DP1>3ziiP{tl76~o89YTcsjhB4JJdCsMabY`5BBqUN-?~@W1c5}9 zo_{Cwf@-F%WCJ)n@(rjNZZ1-p1qE&{ZG^Wj&;B_ulZe5A7-1h-P&0i!sjS zT?qP^3UP1LBE?g3wa6@*+H|!-(VT`zQr8v5L*rgCgj(bB5axcfx}hOh{AMD#1SgA! zp#C%hI~r0D?63vh)N9B3g7~`3R-Wjkwn9?B;5q8dGhvB~?icfZ^y%;uuS~&m#A7{Y z9IzcLr_miIR*xuyYwXw&0z{5D31M6hQHtZ<0pq6gz4(sJ%kVD|lVrbE#Ugxgq9W-t&-FeMRj>aHd`Ho{U5Aeuz7ZEHrTy6c&O%24Cp z1`9JKZrXh#tke{>E%mo#kIPa8@M0PrVtb>Tz?pd~G$&M=)8K02G1yVGmMJ=tB-FsK zBD`Fjl#B$u`Fef06|-khiJ8I;I#!pG&!(%w5(9=ed$45T2{THg>o|U*1QOG6YB^$( zMfkt1g&#G7NB!U>^d|MqyCGcb@s=*tSYV;L#+|uwX(%BQ8)(UUC4w{UjwKC;A-DfX zua|5nbO(G3rCxMUbL_+VFqvX`PTupz6;6em%a>sfs~H5Yh?!O=31gWQ*xZ|b$T*pA zsfb$vwwLu*LNLOtn?HzGN@l_{<_}sXG+!`;*F!5UzlalR_+!$Z7L}CD@F*`9OJ7L= zKUp^M^qPQE7ooTlUK@AGZSL2HW9Qs9*4!d*e@498Uy^wr^Pj8l8nOpT?~ffo$7^3w za@QE7NKhJQ`TE^zq-aPdiFDY?8`#D6qxWQ1Y6A2?a?HB(NGwH9iR-~_c@Y67b)KC` zuj{C5fq0*6K1>U>eN!&1K0x{x)J4opqh;=g{v+o( z0HFcjgzt&R(01F;R#q(}^yA@iv7fNineGzW9B*o=E6~VFrZHhyB+{Y}T4-39>n`-i z{^*$n7~>+ii9Wp%noY(ER`Qee6QVAo7A1;Vl6m)`o>(lJe=YgLxg-oASJzN zMWWg@!Glors(rwAGLMk*lTG0`&JI(oikd42zk!Rs|k@9t0gO%Z_>99M$=On(sYTF70)Z=C}I4fyQ@K3VO?e$cVq%I!CxhnR)rY&-Hb9_dA67iD@_o_HI~244XQV-4cpshxWyHH|4Cv*u{mBI2@<()vtH`= zZ-+mwR{|bP8Kn#z^&*)MY}13eaK$bH#&~EhyOS&0d6cks1}V7e5IWf7RFW1^RP3g- zlickhq*Uh;@t$PWD@CR{A}*a@G^yy-`Ha=d(|o^I}Ra(~rR`pTU}b z>!fj^?X%8=z_A3uy_j1j$`llePnHq?^h1;=%;4F+>P&kR0F^I?LFtnHE_b*+sULBa zbZQ7*i+&{eMBVH&E;IH#j%YBs#vAHax+bR`0V%4!+Z}WVVT8bhJe3i2i)T~>ASdQ+ zts+XM-EMgXY8KSRb z3)kt#Bi15#+nS@#0ynX>IjouuAs&ecGs@Wi&TpbQQ~E~?=M-JCsn4N8OV)cz>Td5B zTQ%EF*Q|a)GF4}lQ+xNR|IOMY0F|xh4%#S!8AR{+T7-L#W#H*fnGTq1z3JT=`ZijA zNWCq&po)$~x^XVCncPoCoTBcMPBI@J`-ZvGRjhE{+`1kpyO*}{wx1BL!y+(cg%$3{ zqRg9d1uCxYxB_Z6myC0)7?omlJBNJk(na6F^7BPkU<+++y!`W_qZ2oUL8t|(CUAK7 z7xiPa_wKu^$(85KdRPwkiZrS>M+G(3RoD)Xu`mNyPS{E}A_~wbLlPLRVO)KHyNFdk zm8rQ>%Kk;`hnfIyApTTq<;9oje~2B{rKQ3V*$)84eiiT7ZR{LsYOaC+bQc|HBi};5 zRQRVW3jz3~0pcEEZQL>f%=Dwc?k3z{Nl0!Q{ObM4jzo?6>h85>F&1qtl!|1{Aps|? zf0>F`oC+hf-?C?Z^a?0P+Blau`Gyz!kCS5=Y(3G-VfMIkiveDQU5M`DUSt`{9B|7r zF_^6#qtx~f+?sY%R0*Q2sbNzs>Zd;)e(JM_6tM~-?MYrtfm{0Nbz27K?fTl(7p@fP z-Iez`b0k6H`2*nf81--y{vC{h|J+6`7`8*#cal5E#0aMW#v^)+u<)g7oosGEMfjx1 zp2A5op#8CniL$8VGOQ<;{oc@%1ZxF*v*5f=hg`B$e1JOzmYl=GY&nBz!RI6)ve z9L3)JsKsPW$OAVv$HzHOv-O1W-?odpB*VY$e;JdB+x!pOTk^tM`HyOk<93nIYZ ztY@L+z918PR&8xHe0X2B3sy@OV3ttA`qujPftP&Q%DwL$u%;CjjxM|> zOWL119f!D-Ir=CE%{Am1k;;U|OhAj=qi_r`CA7vRQqx&0=H)pX%`jv%C=WZ)9)q)eRduxlFR8a z4QyK18uLi2Bs|9zsqe`a5#qpS2#Sq%W_SL@K@&UOO;dvYqH96nhN%*+$AR@yVk~6M zI&;HtG;8q!6-Rbhm1u_Q-VXtq+ewEBpe&392dZ3ZKki z4^FTShgOWA_kaJ$EjtnT_|3k92oawanFdT*Gk~HPo_obBZzlb(qnVOrB#&2ypU1Hv zR@9F()YsM#_81A}{tS7RcPA$y#sNLi@x3!B04gX~7yDx9e5JtDNxJegq0f)*(Pdu3 z7|BL#`WCm)CM-!0=^nbFUif3t%<_-7{&@X|~Q`m}gI_tb5%{7lPvH~c!?XXfyHnWQqR5zbY{V*T!f~C+(+sV2GMG_a*p`e zEfIAcf{%`LJX*llOqY-*S+%J%rDJ*k00)(W_kytm(#3~RFmne<@_LYX%+`}4 z<8@aLnO0xxq1981;+b-k7cz5_Y(_muSb)8iTEp94e*5Rcw}0(FJ{?wf{1byP_Dm^= z(A0&0Q*9vY{kWYi@my;|QN8rvXsdwWtiOT%h`yN^aYP>?q07%|?+Eq6j;oW&xQlWw z7!tLk)(P9VA@+YUGda)}QyeoVQ#UDW?b`Vk^TMB1VLA478u#+=;riObuMO{i;P=u2 zc@pWU-@{JsrARCT*&Mt|SH#;u{?V@E0?rce^vGiC3G3y|#k*p^5^np$m*zXQ!viEy zerf=XIn)(@&92$p29ed-%S=5-z(zvN+ZN#^rWQNP*-wVoHmM1xO+{ zAFm4zbs#47)QfNUbL0+i3HH2Xb?f-weXAiYW?21^q zAc%XZ;)_2O8$TH~{AVJh6mdx8o9%b$(}xZ!)>MvY682|n)8~jCm#Z1q@p&fncF#)y zlU@=p3sKU?7w{go1{LLMWCdV}QkgQG-%Y245RXYl6(n3mc$E*&x+FoF$FQqo_Ni8i z-aFz^NKpRYTUETc*yH2RhEGjkB^+Alxw&Y*3af--aqrKlFZH6gsN#!8ehx31$}8MJ zSjG?9RkUk_J(xZ!3gd2O4ofCJtL%%$?f@KEyaI&?W_iZ-h|BcK=yacfh67=t!db#% z#`MSR?cM^p#bL2TG?EgkF!dr^2i_ZgWd>~u1niDKL^PP5JrK$tI(+D8VxlyVP8!CV zkMLh@w4p6gHuUC&Dg0>0m@ATg7B^36%BwE*RSNdM)>{aJNSsXjr&Win_OtbYcW-_V zArW>x=yBp0O1PR)p6w57rV1($lnCe0`Cjs5!&_~tBp$Frfp3be1DQA_|4@j(h?dXH zY`Ewy3CBTz&#mPU1k-t!XrE9{>zB?orr_F;K;?YG5ooRuCP8<~t!+k(5=0pw5?yD~b~9C+-y7R4b=B zve@6dQ(@f$E7$(t{*M@dUcy7#H>yZ*UvfQ-USY|svz!jqY|~&Okbl4xB|GHRD|SK5 zwi_gpBk_5SErHag>%Q=l{g}ZAggttUHFI{7w|jn%K%v7_Al+L3#zY++ll)%l5=O-f zB_@X^lx3fvepB8zbi-prlYLUW6y!zedlKUA)g8ilO+G372Oz_B~ z8sb?42>I}iQGg^7ctgaH1P(-t`kiP%;*Rw*@}emLvmMD1MO~Ic0WHij?E*nX_TkS` z^_I<@0P>(2v(}NEgbFprY-jPFAdn$N5cEt5@-i@}RI!=bLaALbXz}vBWMwFOE;$Ol zXJ-hle{9C0RPpjK@{9tOq^Y}wg^en@vX)%xha9f~_AabYQBW-iD7Njy(Zc&IZ?$a2 z?7yN=zE=J2%@;73zkS7ArF6emhO>~(2>%Q7-AlE9Y2`oGBScAg9)<2pfx77x=~_wR~5F6|o%(u3hJY^w9v#Vov;Nwc?#cF9-TX(9A*nld!k5#iZ5bF!iHE z2u$X-3J5C=jJOHQiN*$(hl)X%D)J@I`aN%xfB3tXe5J;%7$$Aoz5=p?yAL39q+m6prgro0&2KsFw~zO8+fP$XYjDv!K#Qlm@H(*l z@zH2pXuX8ty&S8W-Q9Ml5LvBqKJ&0*RLeGAwk>}9SKt2qyZ@iB2IP6G<{Rkf?}tYN5dH8Z!!M*5ZPI*=sOc{YuH+;vwF)MlXrhRyys&`MR}1xT)WJ$RtSEvsNr>&2@Y5xC!zT!U1w*N`GTtUkU*En)qjdGI z7%~qlBS`4_+8rTpqKK9(iz^n^+7Q%}u&y&{j+;!}aT@xq*M-DYqbOnQ?vz7uRFT3o z>K?d~7ZzOK#`LD4Okm?hXArE(MfVSgr1LkgS}uh=pQLB&YCz1wJ0+G97WU(ou3|Yp zf)@3OF6a*}z0kBrTxmwcJz}0C@x|5l_jUTqn_L()u`s`kTAyA zYw7zkO9O%-)4X+32)KtjSi|Zq*H+&XGSDWJ66trQ8O>k7YNdtP;AnOPD0mz~cs3QkY0E3Qs%+b|FK)I{4wC&wtl^@pf;(6NmY zK}C3F3#K)rMp86V4m|0SY<1XN_wjDin>srI@81ts-4-V(Rd`PQi6(#8aE3XZ2)GEQB|uG@L!6(3?q@b zPo(VQkj2|yrw5L2DKtWOL6CRRZx%*xqF{D838nUVhdG6Eu57(D2S#!hxUF+tH%BZv z^fdQdMwxJ^w|WZJ;i$8W=C#!=TWKjgzZjd|T)`FKE#5h?VmYUIm5|bpOmvY7GHB&X1iN4c{H85J8{3o~7 z3;;u;BE#35L?_twHh%u7;Q{Npx6FV}uaQ`<5Di3seb47e?)hn(NmgG|#PTd+R^Jv1n(Gr7vh>NSGDqb%2`Mx?O38@SP#HmL&i>Kdba;MiFJ~WZxP}h<6{Sg6N8*XoU zS|-a%R2x)Zq18lpO+`8#s!v|7DbhPz3^FJ6&K&kcW1G5#fTgkMfgrH_9Z}Fy+y)p3 z8;S1|93r)YG35;t*pJqq>i~88GAA#aMH2zqC(t15f^E27bE(Y(ge%e6J=9OXu8U>*X;7LBHla0WAB#gks|@Tial0T zC+o$MQzG2j%P^i|O8bvHhl(EbjtFG;qj??T5L20?P94xAao5Ex@^7tV5F{2Zb-f;b z%6USdSJZ5t_LNY>_Qy6_63-HRMLnDjbfg6m<|BYb&dt=!_Y4lSf#k|L)vH?@p*nOm zcXaepXB_dnl~a1$+#F}66aF_5k)kjyz3QnDA#-1}Y~=!nPG7yCbKWgs`Zz#xO$wpc zBh-PViwwN0brXX8r6QK-NV5wXgO+k6YicjJM4(Z9Y``=X!L`6u$SB-94M)2;iMZnE zET#JY62rdh9))UV9hJ(XvLPhhr*xm9Q?l|xH!q4*FNbhWu?%q`lK1z4j-r|_x_!vE zLz*wHxCqzR_ZEgq1x?SEB$iqTW$CeUDmRoWn`CmnC|9s;F7&f2Z56re#yP8hWm@t% z#b^ABEkzp**_t(zAbpFw8Lx*#=G>JAqpsu7LM`qcz`>TpnOyh*Th(WkQk-&8of2hd3oPC!_A zp*-cNmlh~sklp1UQohjHU%)lEiTZZrN8?3*K$DI>v*dgjV%i-U&6>glErQdir!P)q z!6~p+cYHsmVanJh-M5*N2g1b5fk6Q_&cf#aoWMl}xxq@AZsG{nbKfR;Id^z0XOw|L|^OJx2 z-;DqL;5+{f`nk@LTto;jiKCO~4nkj4DykQ~)zA-yU-vGQZ_+eoFJ%`K1>3^6OvgJm z|3?Yd=ZFiCTcXdjAJyX4L{94?z=Gxbod#r<5G6eJ@J)XXSct+(bu2`rvTh9R%O$#o zPkdC^wZmzTkhhxJI5;@(FfhxfEs!?G4lISGwb2TS5SYCYJ2+(u)zp<%uANUeAJik4g!m@pI#Uq)nw z_dPiSz$#LQaV9*#1z`hdW|o$VnZajF)sMYERAC$Y+jVkT2(>-#%&Xv8PRA>2-Y1Pw zuKcwg6;%G zF#+G=_=XSoul|O;nvH-TL;PCc`@`>;pftQdb6_7%ywK4CZ=jf3=({)1m2&M1Z>?L| zkgpXH)afz)zkl;=jPr%L4b%9~(#ROH{R=DmPX|VrGr`<1Rq)Q(HGJILp4|bYz^SC= zIB(gVC5+V+%WKiS(2qVHJ~wq4dQ7=g>`0Kt>^`V5+VdW2Q!?zp90b~x4b!a18=@LO z>H4q%z8Lm%H$)7O5T-qhOW#a9s(xqb=xyyvo*}RaI)Uj`MD+I;Ov|1|_mrm~D7w_& zSI(!HLDqhl1uY7fQT=_e7$yuY+ph)>%2H>TSqG;+0#L5HWA8#2btVh0LvT>zvXLX+ zJf@iGg@wNIH&HwE6co^`B*~a*>Yl~hH^aAo>1&9|a8@@3j(uH!K@?&w`+`PC<$l41 zMFXi7*O4|z=u;}|mz2naCCwO&>D#bI<6NS!R4@eZt<&Ru8hQ+ze(MKUOA@!-Oq~{f zP>ZhCxsUdOqQA`GYc!P?PA%#0Xrry65elx1K% zhFkX#(KqD8ebG<^sNwo>ql{5um}a>3=KVQcRKBy@=TCEF^p(m}s1my&9yaUuUy+6A zpCP`%I`E(d*#;Ngqh1}J*=R!dt1}JNhOo64f(&ncvf@{3iWTzl?grsHb*20XL$r&5 z=-RVya!oHWI4PwxMb2d5P1$Cd|~o}QYUw#EylHkM1x^sgy}yuAtE7-tPdhoEwYhgRZjCaVH@$;GXQ1)jjP&i?<9>`G3v-2#xT!TdG4z|Of*WNfQ6du?cV*gs=?pzKSvMqLW&h<3AOBgwwbR=30ZFy2(X5=J#vo9}GYAOVo~luv!Gd zRsA&WR_{1PBw*Sj1`21un;|rejH_qzZA6p2eRtg4(C~6ggRRMp=;3ZZ$ok2xbJ;+- z2=SPHI2JP6yCbPe87}lu#DXb5qNcD%v&dhKc^bO=l2bP zF$x0w{yX&yIJdOj11W8Op0vhZO&r&S!J8)Q)=8p32Fe6WAUm0jeM|_M*9p$3Y0R!% zYG>Rjv2Z=L19d_$wicjDLSa7NvtuFBEX1x1jd)(=0Lik9@ap5ZK=4vR@Ge2hZ z#IYU39RWX^BOO~h*#sIRJcd)~0R3L1?L2xc??c{6a08uhDU)8Z-PEHt)ViVqs3|kh z=yJ!(R#!q;uCa0g;P8A*A(wPEg`!@lPm8);f#(d`5pcn zey5&<-qOfK_4KoCM%?O#v8GK&`-A5SULE1R7OSm6*OoLfCBGomjV|w;d5XJO7nh+D zb;$_)J)lXP!9p;xL-WWcFpN4{(v8`p6ZR% z^dn|2jE2U`D)0k9WpwFDmhET>ve(QxNs1uiHr-lkGXzeVgwR7Ot)0eUty_7~UI8&W zjydKKuMHta9l4L1^T7x<1Xe-l$xV0;C;;g%ZlAv-pq4E4;?qmQs1`+Aj6J}jlf7Rb zSf`J0tr0x*Q3!x0x9FJAJ&|oeqH4Fo$AR{(9~D<&3w{^U37rZQ}WjZ0mHLE^G& zXKmm-dLGnMWn5wKnxl3#xyVx&S2vt;f~Z57Y|LR^RsmGR5XOy1bGI@y`?ak9;ufT{ zB5=VPz!*QQo3=hYs}HKff|{v$i5e1(grtaEl@x&GcM1{Yf^~ z1DBRvhZQlhVrtn_dE@PF_+3xIq0y1WGIF9=MXRZ*(-izm9Piy6;4wFC=I69tPes!0rRBJM2d>`-BGw`wSq$BvS2}?%cdjiizV+g`k7>=>#QNghb$I?|72=U`i zP<^IrZ8`POB{vQO@Cm+vDhz66r2ph@nxbp?(iw}|F&2_ECK(6(2-8*#Cg=cCPZQ%! z&f+TS7&#ChPy;M9=~=Ht0e^?NkNHx=dU`9(4x-nCAH+6PT|{w_UrxJ*Y>`4LhkR#* z!JdHqMqp9-QrxfE=5>X*CHLLHXv2m+_3NZUNuP1g>*Iv3QnKyv;BewGNHkhSq0M5D zcW=HP7LngsRnH65kJtY=_4LXZ!i^Pe469rJL9OE*cld#%r>FQx;7177rP zZVc;>yo{saKC~JAZd!qWm$57I`*Fx0oI*A1@JDV4vd09@tr^}LL6)ApsOHIa$gSHg zPpF^wF)oF~t)CU3CtuW6!DCWE9vS}b%^wf%)knDtGVR0nAwh#MtRp4+_o4SHyT)4? zOR}@T1VofP=}-^qYQjlp%>dj77pqr9I5Z99!G-XQuN3U-MiAw)JT;$9NJD9K_M{VNmowTH=Ipq6YjA33 z>U+e2>BT%OsuC22@RdIxxBGJ1H>Qm~4WmnSu@^lOx3al8eC_Ay*jrclpyQrgAY!<- zYVg6>5GIL(N19o}p2J%wAl`0Qb~6~;)zw~vCR-`4knd6d#sdhL#equdvx^2kF}?JN z^IAcXb)&5!Y~f#Bx3EiJcnD^H16^?*^cgXUcX$O`Lt31?@vL@8yV^Kte~8LRml@AnPL+{p%TyWHWP zu3&Zmo`TbsRGGwer)t9GmMZ;(7SW5OB^#kGy46wazn5L#0$p1Auj{#B}9TFAf|8rK`J6c zm()HIjXg(oi;B8lL^Mv#n5Mm<=!Dj}F#K6W@$tmqD5^{R9v#nFG@O1lTm}@A3o^WP zWFKo_9Br9_Hlc;q_e`dr)0)i%10Q`dd|K|v;c!}4vspQJ7T1`4^30kh_GhGrEcu$J z)Pgw0&V-&>V1W7AV2vJ+8v5@Ux&>KE2w1?0LvoBN}(5bJN)9`O6P zBUkooVn)baWcF+Kts#-`4IeRZ+05{KcEc?UAqlVsiS0|JfaxNX>spIc*6xvXFCj(| z2O}8X&G`6{fMVncd*(5kG(?t+x?k%5g!OAqK3dbw%(dmESkBk*|P9>j7l&hfXe# z?L-iaq={Rlj))?=Ftg^Slf8_hN7d$f+I*|d&-9M zx5gYl-IQy2O0ER)_K_x?wH0M#6=784PDu>biWPG-6@Z1^*|j2RDEmIH;2ba4XRXGn zFfAuuOIfg#h!4+ixfE#4_zL}saf+6N*F_5}cSHW&n?D%dv&y4ODvXcKgE+80#ogffN8oDMI)#JE(ub ze#ujqppQZHX2g68LjrXWSj+>Nc4721#FJ1@eh9Mh@GvCU`hvLP62}um_|fp;hjoRS z0jMaDf1E47;s>f<@J%$OmjJl#v1)!Is?T(!4z$34P~yU@+_L!w=Da0M{&yzmgR+gu z-g1zX2=)PX`o z5c+`0yT!~ZgwgdwalK_9JB-KfHRcJ$-}#)L2^pKZn5xlkpM|kE?AG-nBD?Kl5M21< z`qSF5(#^qVSHp{|rCKdshMe(NH~A@8VA6W^i@D9CXzC^|6g;Sx;9CWdE!dtsG5T0!dV8KiU=Z7Q!3X?a&pBO+!p9C z>31dVL}&ksR5=z2TD3$B;jxT;a(#kIbm`4B3~ky4y>+4(ncQs_;I}FIo?;dq$Bb&E zCYtayd|>AK#X?}k1Tr9X#^?+yEGbUl5vtcun%D8shknMo3^3Xs)P7*jY3!HH(~t)V zRezUrz2QS1TYJ$3FC0-ab4cp{kF$4sjx5X0`}{Z9PkK8VI)WU9#y(#3poea4tzO-ULewhx$I20+CdiL@^ecK~m^B)PcVBG)>oLOp zuEz(2j>Tv=I<#j1MnG7GQAC?wil<Kh!MHBUHW1{Eq8z~t zpl;|*8WosEgyeCa)f(Kd*(Z9dVrBnk(h3yQW1~-;SQ4rfxloTizla_GsdwHmI9h7L z4e^r+AfTH~Q$^6ZC;r8Tybk;IyBk}>%HGDhuf1{uvj2NFd}DY~!cd(ghJW~<>PM_u z?pp|OT4?i&2mk<>ARgq;Z*yCE=@xiaGqS*Yj%}y~84BK3rc9|G*^oqwA3)=Hf1sRy zKHUAAx-Tt27Kz*Nzr89F)UQMSOs<8I#IlRi+M}C+0m_BjeR49sr&Qw~^<)iN0s!0R(6I_RPL`W(a7*aJrWnEZcVYgkFoS?Ut0^fi^ z`NivWDzNW#W^v=63~PS-txmL4Tc&zP>`{@qG{L0x)traHEGun_Oqtf=fJv@w`PuFA zEtjC+56ZuQXU&qJRG<;a)uMs?&rCp*C{74#yK5taq8?JCB`}$N?g~XlM(&(EQ&WQe zgp!J}kgc(|)me!9)^|I!2+Rn9Nl|&1tFZP}fNAO6)%sD^Ehiz{2OB|Z?87XT*a7l_ zii=mu16u^nZ?k#}rNEJ)i5~5oaQE(+rd=03?%uus)i=Mm`|IKTpZe~MJw(Il;Isn$ zBzOLeKTSFVQ4?El({|Af-Q3+ExVh{>tsbGjUN4P4+QSx^Uq?C-8&)a4Vi~l;>~#ej zR)$UQfN0{8dhuUbEaKN`W{Er_AENYW&)_2M`A|Kao99aIC*XLPO;6 z3Xj!~iVsru?Ndh0F#FDbMl`;T(dxTOHeB3v^YFt*!zcbSt{HI&s}L|Ba^N_3|BljM zjVI*n>l%Q(!EW^Kt%!S$jTIwC>qRXqztl~f8CiJ$DQRa5 zJ}^QiL07GAt1cI6^E1dDsJRec20U=)jg@afLTTKZ>BF4-JsJ%Y; zWC(K?QQu6P;&okvV5iqdCz8&1EVW357BPQoP2*I0p9Rh{)&<_WSw*b~UPVp`%Ll2j zgkiiz+o2>PQ=^_T*v#|GdXXX}t829h<#zlA1V0v_>GlNv4M%M0FCGjt0eFa$FJWSI&90F^@fuE9YeviOe_T;9J(r8nhKR)$Zol|E@zo zG?GUiW$es3E7H4V$(7=GXr!0^_b+{A>VD)h8R_e~P{jHg|? zs+mQX*9<6=@a?LmRVXjHb~QqgfrYB9Aca)tiPb~mu>VCNGmDtvNHnf7F<;8frk&F+}sZlHxr z+5s~p1$3{x7Q~Q(1oc&w`hd*#-`nT|hqakK4IP(rgTU4N`IW=rEBthmgLeJ`c*gYx z!NxdGb@C|X2XpwG?;H6o&ij3Bn6ysewNnNRu}m9H+!Y2E)D{}3G65DKix7lG?bdAj zYqQ>cd6>igFFfeCLDyZ2@p|ZlBf36X(%$IdeXBXUk68qiXozVf zjGAamMKFfBj&9B^%3Cyy0yA(}*kQ91lObqi;}gt4oN*U|jQ zYks1p5f^H$S&tC?dYl^%iDc6}Q=g2G(^GJ->l-s9W)~xZR$>Mf8Ij)+8>K(<2Oree z)|cgjx*R$@j_|OdCx)GRB88H)Q|kGBHITy@G9XoHxpwysd-Y^VgJ`a}hTsG+{Uv6N z`k1G$O|VEsLct_2p*1?QhyFhNbokU)h2ACY^R+I`3L#)`E?U|394IixNIyXWq zzY?IU#K%|JDTP^#+$i^oBc2Iq-}>cGzg3DMC$ttK+n2P!uXUJOYk2UjCJ0ME*6~4; z9?*FBaClg28-jD&;vT{Or+?FE)lB`wQH*nAQFMf1|Xy-m;0w;I)#IIuE<& z=0dP5g=_2MKm6hF!(yTnRB}k!W-u$C9Zi|abgmIzC-9jzjKbbsOe-M2 zxb%%dZ!wF9;`DwY(PO6(v6+zh!Ql4JztnV)4uvC)G&f5yi*3g@eT?*mQo-St&kk2u z{*D&()!4qx>-t?y@|Gjj-~3eT0MKO8Dv}xg2U|hZw!7>#JfYf*q>qK=_BgxNHqp<9 z=*_)KYawrSTav0@7-i42qJ)E;B4;hRi7vUbI9hnvT%Y*OjO-*^zvRS$)Sm|g3YX=S zlx?e7@;2e1WuuaCLxfhpz%;{4qpmQ6a5n>pfoTONmbVGow?HmN+9=UeqI zRWLmEC)ZVv;U8b}#U7K!Pgw+I1-Ig_@fy~B-m6l#u;AlwIC7U!^|glNUj}C%My>od z!icio)()lw-p=~)xDIiHZ|A{q$yGvMAKm0~c@KNT=JrN?Q?R$Fn|Eg-8WQ z-NR8Id>|9X(y}FoY~2YC8NM3&-#6xb`n7UJ2wa~ z;VVbrXB$?S#pQyo2zi>6le?I>2kUw}`SkAKDy&Vm9 zX=TuL$|$^SV+S>;_Uzohf_$Q-SBZG#<1Zjri1CX-mv+#C)Woz8KUI?Bw8YHr%J9-F z(gc}HYz7}5olX=e^x@GCNRuJ20?Twjfi>!|9=V0l<DH{mrRu6I2qC}yzkeoOOg%V zhaU{lFK$=yJj}7&l#Ds9jS01isKS)@FP^B&#p|Acks$oSPq@KV)6EO8C70wJy8!`L z59u92{JmY~$O=9czTt?j1G-x@!~7zpS3hBsU*5z4PTEY=qmwYKFJ*DK96)(o`7cGd zbrJzK2O2U`DQ5#tG^H4IALpX7D_!P1Asm`Ddx~^_X(2_aE*OS4 zaG`&PZS#*xswZ?kyt(QGU&Eda?TmSQ6Zr~a8i%qKgSiBur9G)>P0ExSO6(;w%4VC7 zlEn-Aat;+NDthanSJHeUJl|^TZhAaS0P*=@WL}Uas;Ts=A#x*A2Uv_8Sd2mbk8Zdnxw$&TkIC=VkF}tW|$|OA6g)4+V$aR+wNWcOBX=nYlf*L{=)31PasSRNa?- zBG_@~b@iVOD>kL@N+QQWUDS-)1}~1Jxv!saEuOQB)9>&5ndLjRnuA2FKMhjS?)E5n~^#Oxs>G+%Qp zvq^b#Z|G;m)5A~R4g?n(LCEu3+diCk5S+{KRd~0rh1zy>XFqx_U3f`rxcE5+xmBSq z1=T4WQOA>dS%X+_`SvxF8Ed8FKfw@HP}OP3B?<2kOoSp}B-s5tddciNfu`nh>iq>j zr@M{(K^~@O!jPk@*a*l-R8~E;^`;1e_Yexqy70t$3i*LarfTMp1k^-KBZDO%2gglE zTXW$HWzGyu13PpR0qn~&Y!jrd$@E06_2>-kCmgrN(%qAh<{@)pnZANWh6BrVik_h5 zl4moq2Jmu3LhUSWah2<(D{8XTx|`Qw$V{J?J@lR74~kl-8s1$$Y54YGocBi5PJQ(A*uhU!C&=X@-(Y6qu9hwlzQv?L_&u}BG~60VY&T%0IJ(|Hm-SI3zm z2!YZQQVd0ZjN%qznfJSa7*6!2s_;Adl@;UUrm4B9T&^NfULN{~M5RfAZ%E6lc2R1T zfU_b2CPY7+3FCQH^6`=PAs;~M9CX{9XJWs_{Y1QkAGDBN5*a_V{{5)yIE~NeK=W>T zSh%%fZ0V1%U^e!Kr)D|Hp*lRo+f77|5obcw5o$NquceTAm>Ee8?OThf;9S7S9~E8L zMHqqyXewF^n6eW@=2bWH?%px1i+*waPH;gMiSB3!R@LbE3D1ZRSm! zZ)$?yJ`07mPaJCrc1(8){4yn)9b6OacrJJbZ_jRhD>$}7shU-)4XyX_una0V5jpS z1r{Zt6%$xJ>hZtw+|*z|;Nns(5_Sj@LzQLyKK1gCF50MExreo9dV>xVzXVWTE4Pem zu3|9Muh3Yv_*UH?Zs{7VNxh8eyh?IilKrd)q3A18t0dtIuW&A2n9585oCwBQw-yV! zyPY!Z=3QSBZG@QNU*VWXJOl0=JG+^%S+0i1iuZF-UAy#F8^av&3#;YDi zqJ;6fZv(sJi2fQwu-74^7wYPDcad2u9NhtXP-Q8UjF<-(?O?OoEMd^b*Wm$Iv@0GD zUF+;P7=QGQhB&4vzb|Q>8DI6B1GZA%rf>QX!#4#<;~2~egsi!Mypy)Xg$B`g-U1KiCMZCRKQ7yKhP+cO3SNK=I9_InZk#A?ld!N^aZK zpb4dZ@!(iZ^ivm>bl+c|2u_7;s-2+cHZRHwm<*5W_YmLRLf-$`@DG1<_kR~Zf`9`3sd!Aj!Sm6l!{?&vMlwbfTO^( zU}e*J*itl>GncaT)>rgx7_!zy0BP3K&;mmO`3(bX>=B1j@)OPlyhPIYHwz@KG?U3s zzrv#>y-i3^%~JxmqB}Q@34iUD^(>1l4}o2aVx*coWTrvwmh`D&GjLxv=v&hbPfenZ z0kc@*E)5dat!SjP6Y{kuh=`S(rpn&?B6e6>%(@FfS-{!9bsVFGu~2(pOL3h*@4Y_K z6fWN|rq5mAXTymZG6ovm%vb4PP;iV%H`ISGjAxflqhTl9~&ZI$VyRZ)(!VG^Jdo0x!ko%ZX zejsk>6}CnFnUW7`De5}nb*$856?DfY@17489OUqO5}H!Fx(>HW3V0XzYPEQz)U^0*Cqh_bAgv@x^Uu-GPyY@gYnYU8M-R7HP8bWmiE| zEUEB>m=F{8-HRECM+w?m;au$K&$Ilc>KyiqDD>D1v9yz-&xT3Sx;55|xU}HIH0$G* zb%@qWyvc6T{TO|kQCjX%j|pPeB+j5!;&gd~HfZw-eJ1Lcb)<2~CMkvSSIX+uh5h01 z+quxpBJa=w_0r$O7C~=VU&=Xr6|M$SOTdd!+Z5*^L8oW8RGrszfI?3x8AN1wnSlG$ z#4Nc%%}{}@QT|oOAuUO8v3M2+4y8omX6%!t2m2cnRB6n_nO@V1r^#2_GMjKQSEC-Q z;birFoH8*$QR;ZOsZV|I;n3?bh-k>Yiy?Vp@KlIV&Td)MNo>KA3hpBF0r9haF-!> z@2I)2zc6lf*e}MSoL~%w)-S~Uq-WeJ3rEVSB1Pq|=u%y8#2`O5sTfwS&?L@H;3Bm5 zH*)`Jkx)y(xXT3lXr9yjs91)?^3R96zbSenJco@ZMIksEuXNGlMyl12ySr^J&w=UQg&G8bC`lr#?TI3-1EqaS`ed{oP6=%^m>Su2;=6x?A6_`$Vcl!%%M zcBux*&x@z|+o|ZC;s`VLEL8#$li&tgzOK;shwl|7F*(xf#BQj+y#JHo?yt@F*chlX z{58LMg)yt#gs>MecyLhA6ka;k{Oegr`z`!QKNXtcH!l%WLA=+Er;2uJPT{ZYYQ`Zv z(f#llk>&dHmk4zJrW!pjVHGns8zbx!$e^Sme44$sk!~U4x~2L%g-EJ@_W@lFx4_Rj zJ@cMAFNop=Tz!MDK^ zv=f%p?HQd1!>1n?%j`uAJvOxeO(0xZ*ha5g01{0oP%M%HpXqu@xsSK&f|Ld z#E#Ddcgs^<(P1$9wBCqHh0^sCl%aN}9PpL!sLD*_7qISuZRQ^(yX;f_Fi@ClPcM4J-Kn8Dd^`9G$2&+eL&vMojjKo z52*X|3ik2}pSGK4{!kj6ZoIV=$@STB{Y0Z+@T*01DRv_Bh+x}BZHVUBJAmKU_rS=Y zG>eGp_DXfTm6BwK{Gsq5Ta%jldYhATQ^%teYI1FTolG_6dwKGHHAwM-hMC<43` ziN=G0eXgT)7;lNInX%T(>cLn~rJqTH3!gk6p0C%QMhw&6`JFGhw7RABWd%?|-Mk=N z*e?bE#ThHPrbT7O zZT4G0bJjZ1_=rG23;!UfQJu;v27{Z2p~2tQUKI@QAyQ(|+zG6?A{FJfe;73-Yf5}s zWRY#<%7pQ)#%`bbyC}Ew{aV6k29?e|9l{{~93^;DUz_4*MERjSs;gEwm0svl zVZ8~MBsdzbHY^abhXN=_Fbn;&I31^#*0yzTrf)7BWhL*W1jFn5F?_zZbZt|nN0MS$ z!(s6!#&L6THPr$`e;v^V$nXN}+lM|((p4Coku;EV|Ei}{IqGPgf#y7H7)0&g&yZaT z4=qAFpU)2T&InvTWu3Hv3Nglc;ENR9)c)k-Zop$+b1px-F$1U@&uttqD7EvbOF$s= z0;FiDr!6As;jU|jYAcHN8i=GTw^yfu#kpLNxpFFlyZ(%s$!Yb{Bt^a(p2#9a_93K? z;1VK>lvfB^+@Aj09!uc#Q8RF9dE@Dn;F!cU6Kx<*6w zCO@yc&+Wa6CWI?$PZ!@*<8Q;4Wf2%?B&2`ID9{p7UO&d)B5=%O2}zsO(nCuj?^lFb zDX#7A%&lrOO=?>HTt%jy?)t#l5?-oqTtvdU_&z7=78(mg2#WHG3)jO2D8jK@Lwt+} zjL8nykOPoWh%-17giY8nU@756$5-HkfHUExP{8VaqZ;JLn2~kkvM?I_9Gyi~Dr?aM z0ssS3%-Ca8I|}*CV^O>|1e_KP1d*53F=IgI9MN&=MRO^XW{52Bal}$ds$(a8 zU58GHcZgR%6E9}uagF30c5DKrur(-c?!i$dG8&h7t6%CXwL4{3vXp>~PB<5uztpMn z{*Q)lerX*nSkRcYuHDK>Rr0A;ufm%is6yIG+i^j?5<%VK$4l;Ilwa*)`lJ=MJyoKh z9=`XKwz~BwJTy|KJp_%Ko(${t$-DnBy#KRWW79+^zWSz&^(6(oz6c_}X8lx|GtvrOZa;h=|!gaZSfuud1AQ2;te3pGFnxMQz+Lq)4yg%Y(r z2q$2d{BcMkd>VevOM@uw)(`>`Hf7AXNuWY`fDtX@TUNt5hg0_qDV_E2D{K$Rbjvfm zL(1E_rQKxLlHsYYB;1_tXrmGK>*=ywqI(6+&oOiQC%I&w78zi?TcvyIXX=5FvNz#^ zGb)#8ay^;bo9QqHOQ4t%i_fBGkpgn-Lu*m64p_~IisCcOA8tg??;=#PhU=xO3vf-k zJ507k5NU*Gts0jJ%i`dbIg2dv*eoPlxKEqAQd_xu_xZ465v`;uPVn;fh?$-VY#rUT z$&v#!LfXQz5kXwVWW!VSE=fY3(7)p!wW^iAb@`MA-%`fwX|r6COA+H2+ceAZIFV%rSSq36^RuTz;~2!7;uzSNtou{wI30DyNf-xtE4 zEMk#>;31joSWN5kLajw@QwHlrm&Xi%t4%8GcuN$cK8PQQxm5D^c=*~|Sd&wr(LrGr zBk)6tHir!t&T>`e_Z!%@juILq1xQg*-H^sVz}M~$Q=HVDMP+dafYw>SaD5y$>O?R( zq^~yxdqUm%8I__d(8KrN9e(f~?+ZZ=dRe14YL$;cSZ^0a#;}o(q(%AkHnw8J0s9qi zM@JmzG`ZwFg&*)aO)%HHSVUt#^n+P&6|r{;vybTX3{shFNG*)eIE4Z>Gr-2-DW3Vog8TClT!D$emb!53B!KAluE&70A0fCXNN|B=3^aQ{o z{NiX0v14*lMU3}N1dmSJ!esxdmt$d&7Oqo%%=%&`ARNvDh zGUWEZ<%g{xH^HFPI+!d=txEie9iQ!YRBJY+6udsq1VR~@76n4tB4F!WbVbrI=8kZ# z_3=Z&aAZ@}-UQ77UHz(9o0cYF;**JA8pVW7fMdVJQZxq19@C&s{5E)`&6e$5q?O!h zPDxjVLXt5^%Y1~Hijd4W+U1m~Kq#f!p@QJq7G@7)IAFVkdangW_7!XCiH0Ha?=Hof z3Q2@Y!!jpb-ZKT@u!YHH!Kpfqplb)u7)>a#;cvK3d(@v^b}} z3;ax|&J~J{mkKMew$6bF#ns!EV+)gZQt<;Hs?AKUHRh?LJPR!5gKYf{N;QyJtM6bd z^2qw>26Z-%(b1F}D7W>ko7UR@xs4XMsh}`TnN+*j?PJCHzxn0xQ(p|U7NMb9+a@(& z_3vkRmv!3(4(Mp4H8$QRqZkdqbv+3mo&o{c59asQqXqQY;Q%K|kxj~65 z0oerxx1Ex_D6b(x|3UQHJG~Etn$;#ODenI+M9;Ko1Jt9hEMWIAF6fJQ(ndfO4e*FQ zt>f4MiI7eeTJ*A8(8?HmQlG@Afi1k9SIDjd=_nKAg;39cTk9)=HP7Nwen_`r6Q5&u z-25I=H#=#JP!uJb$j!Zt5(Z|az%4ywOcqdT8_OrIs({@YV@ip zsM%NXdaaD`_YlpmT4e=S*yl!YY=vF}*5OD_;7Cn4*F(trr;ShiXAFEJzeAR=5RUM5 ziFoizswv7zI9#!4pF)-cJL|BgW5`m2f(n2r>9{uhcXTPe!marYIIuRox9EZm@yIV; z$IK>LZNgt-fTdx+3o~BWvB4q6e3Cb1LQW5GKaS3$5&>y)J*l%anDUFaL!dOc;uKcbr4NLR?t$Ky43Bqyy z#7p1fuBU;zrU`v@v;t=n1g_g~bcTCbU&V@G43xu(f?9+Tu&e1xJlV$$(r&MKy06DRdOt^gv;)CLG80ks6c*@m$!V{zp$owF7Az8Y)gz7dj- z9{M8TxTslO5rWMrr|ch9bhH$#L+yO4=>gD@(ekWyD|Y61tpJ<-lx}3EBU4O_EhZ3Z z1+ZFupWc1=9NxF$-0bQkc|o|xoRhpp8lPp*PCjU zXtPC_7a}Ppj@4`4x#p>?*-Q=Pz7TDwK)ZkL03Hk3<~iQ1q=U1*&*z)-WD7jqlqq2zGMRI zDmUOObvafd5N9h|iY~Kk?o3DaI`%dSr#?uB5$u)P3!cpNviv}`#YIDUsuaL0Q}zvv zte3(qQ>_qfX{?&DZd>Erq8_f_N3|HCAP+4+=!}8xpB*3-{Y!xu!G&Qu#Y3MSon5L1 z&TAn9PA9aOH&=jI<>+ulYRjwW;d#JDKnila7BqoGNQ)Wc&UhL zH7yT)27su^T2l!*3p2(Qcs}ga3*VCIp|Mw2mTCkkpn;EKmv(l-Lm?RIG!-1Hf1|xJ zA})AGTxbN{D1f|TO)aL5CPo&h{}#c5OyLn@opr}U0TWTsYE)D67frx_Z>IHl}$Mh=PQjBL_$Tq$1`|buIA5g7U9b7*&xg)v?daY*v zla&J$uWK=wsW^1uB1hb7|ANQ(y<9J>T!+Oa^tmpbfr!ZP=OTop0Owt&qY~|UFyV9H z1OA{4iBJ6FV}w)p4O?>)d%TKMNduKp{lrz5t=_3#K?`DpmPG@R!qyU@}t7j)mLWN#DZw6f3;CNW@`sjq% zbCCdKIu9-q>~YP(g%|zh7NTCSyPNgf!}scMgek{lY^|6mMt6bk-hDZ&ZLL_mELy@y z5P+!ZU=K1=vta`31FKTY0Uo7!pC)0ODcLu@s`hUuEf?igp?`%c@OSW;5%l#)VI=VK zYWu*-F>>ms1}1(`r-3%8Z;kNLzo;J@d!KbOsntqTV%{&9PlYuV^YYSK_pGyi+HbNf zog50YZlnRcax!X&WBo9f{}C}zQxDz2bj8rs;!gL7;z?odRG(?+_wX$*GPIirRBoj z^Jn!f_yQ^VsZVkuCnT!`7|(+@Ump7$fKb8m+AqvT$z20ng9s89~+$BgjL{gRunhFC9_ zaPZ#slIJckmcoTu+3rJ_!9&|6a};q^$NjWpO)V@w_SZ-edsSw2-goM zr_%t$9P+58en{(TGF86Yt~!;4{8$^h(t2i_kS0u59lZw~hLB98UT^>?@HK4WNCBog z>z7bdlU&287_Ak*^%;`77NqoRw;vT1F^jlbKNQW&;rXs5;tDLSxKzEowQx|KuSvvd zo>Nwn$bZ<=!-hx5MV^I7p`NC z$oi8oKH+=PwpcdBLg?!c&7Ldjl-m&lQ(~}TSD!1c4SRXN0%U-W&fX+A^T#0R_=CI0 z64|BoO^W~JkYxtYP_r1>@up}+A;qQ_ip7u&!mEwn5#^8mG54!v!;z35V zF8yX3XfoD}d+o2VpfN$}C}x1Fr$j93m{2=GA_eoXNAf7H)u>WLFJ`}c_q&N*+m~!} z9;x*+_D6yG*AFmx8V6X{WM*(b?Z9YimIP&3!%wunTDuBw5Lw~0qsQt0h@D;Zn@Cel zBZi})$5ePpsMkW8H1ZkT$7eHxe>f+skEE}s0=tw=QZxgW@rl2$=2rCM^*K;+cOeuv z{&9DK(Tlwz?*)J8Q5~v?iCl+4_&M+!M-7yq)!=5ICzl!2Vt0^6NKi2&3K^iHN;-<8 z1$`voESX?~Nle$z6M9{}^2g`qRh_@T@O*jLpQ4&2sVj$e5vhd%CyHIlT+S zGW;FQI9k-J(;0aJ_xGp{{dGbMF^i4{1YB{;;GNx}mtD$ye`!-;1wO|Zy;=}ktuJgf z_tD&>{M8VohZ6XkKH0@B4iY*)!pIZyKOI&@r@^4ZGeNEE;b0iJnv1wv3+fr#Rd+pA zGap)K!zLk2Lw%GgtKv6##kGgKcRzuZ{81DKin8m!t>Jlz17afT-`FfUe|v)1F1Du6 zxFl2kh}@UCRa-!7!)aFfZ5EpECj5<12x`4KLLrFDYT3{5wu*%?(a*Y^X4b}%QHPC# zFDh|w+V39S%F0Q&i@NA8C1bg{GF1b-4)zng7HEW$wwFWLMAO6ZTH5)bZ2{D)9V=!) zpZX3ylE;fVn1NMd&RRuh@0Xxl*mG?Y*hr3c$7_a4#Dsh%XKn7OOr`n`?P0QZ9D9Cx z5hb)r9?j3|!!X9Y-w%XT4*@8=h>Pc~s8lKgei9H&8RHF+%W0NWlIyK{nX53`CY17+D`dieS(`{MQpl#m5N3F} znrx7uwKGMNbQ?PlLYfw3Fo}_ALWS3hd9P83Tik-kPCgY3h(-G3CkJZRp*rv0y>N4H z*jV@D0p|!;|4NZm*2h%zqV&0qK%fHjT(=tLm4*Kt~6MUKwtQ(kkZB(_7Vy zQGa7yNNtP?cU#*QfAa|$e&Fx5E|r$-OllXq5Y`Vs>b1e`%2N9Q3k`y}h*lHwL9cpj zk3`uJbNXaa)WH)(IqH967!ybDWl(cC^LExSq=+)u6RX-2lLSu|j;&{-X^ums$pwyH zHAFsI$XTsA30d6k>$QIFz_yEZ8rKZ0d7f=VZ zdRHCXArxe{-f^Im5Q6ty^NVPJ&%&1-O_fXa`zrnVa@h7+(XQ=Usq`R=>hZHMmbfjE zkL;5G-LVKMCN=HO#nKLqwWe2DZ%=3gd!d%_Y}hQWh2sJwN8X3Z$JB}2li}IwTJkkh zipD`p{RUxie4I9zLfCcR?=66oVHN6!PVT9QiRd;tx@J zdU0TRkHl(jTNv=VhV#x{YPo?>J2O!kEW%h3wNW=SQQ3!^8Fnh#7M|mKMX1*7D;7&1 zzk?pe;ErC_{*iAnj7h{nmnJ!XLTcwQ0vnt92wU|E#a-yz8{fi~)OqOjKI2G;`uSFE zPrnlG=(;Zn_w%#5waO;;>_JNd-Tkef1!!Cq(|%SVCC}>n@Bi1~oBvoGhi+lG&%8$; zzM|-;3m+c-i{b9ymn}0UV^zZEd$?17WL5a6V($9makdpeis6|RRf(oVFa>DhnT!)( z(nAbl1y8Zhx^JSqw%k-R&xP<`-Z@q|@rBMRC^tgSKkJ!K;6YKUgHmX5<-BVXZeJ?| zD=ragVq@ZE;RzD=1%FSH-tT=(3W)0LBCo9S)%xKE0UR+-^_`(6&4+W`l8_is^*G8& z$H;{*#Xb$#POtIa7u|nQwyz^8xYDdE^DTSA48icV3fMmf8wv{2@dY5rNs7r4t0h2a zHIYwteG0U=9;RM#UExnb5(V?0%<+L)XFLj-K?u%VGA5Fa-uNRcxIYZ7KZh0df`5+OY#Nc|4z63&snVnLh3|DfUzJVb|g01?F^i&J|Jo$pOWKRH!~q zZK!M8=1&9et_ubJVdezHnEk-^Bih|F*F@5u(|@{d=TC=!Q73u`Ny8ZXXesNut!y+~ zs5c9h0}lrYx5=Q)&Tj(ki2Q>g`xcw|d(xTrTUoeWq1dkuR2SFKaoAL#XE ztjQLlk>+~!S6x&5XCLu1uBpj4y5B9w(fS&rt7!f^mG3CzIM_YQj z-m7uJD6FE4Ao>dgj^)JxMJH4FUQ4%YesDm!e;4f&zEY2uaUBy0zU03SF&F7}* zrpR;BVoj}8LwD^eYGlr75#VUBj)te_N3kol-;miG2Ret**f%5`bZzdt-yOc|$ExX3 zwsl{_SbvLmRG*#GOT!2c=H!0*CC22TbE5A zisb^yNBrL3Ve|p&q-UihyXh=j%L7Q-!JDdd+wjiR^kp8dj!H6G+_r}zRyf1;>2<`I zCdzR65rP>d-fgdewzCuWH9;q0KN&uob=!ybf8K|(;zx%al=NG}_iX=Na`u#F5;!eJ zc{!|?3?cD{MHy%Ne4xKWPP19u65l5{rcnWAQUG=OrI!Zx$MEyT5$B8z9DeYFA9ylm>$w-1a#cw-D3@zumPU~8 z2z=R*{uAm79K`kQWrjV7 z=Jjl|+EL32D@qep9B6K4HFg0=ZgjN|Yi{e(ka&LRgXHT1J$N{LDfbCQ&Wk5Q7$!sab-j%O#}7@vUaKa z>&A;=>=90kyd)i#e8-X_CFN7fxcrF?rNb|EQbikJsGs<` zJXLNdJp*$d$@G4FN#{BOF%OGEk1<2|nt5rPxHDF*Uy6$2jAS*{cjR;P8(ReTK-{iv zu&$9V({M)tn!pVq-?p(@BSH%r1pzZE2Al3QM7W=32p@Vyga{!L$mE_*0Xc>x=JhH4 z(f`Xp_*y6q})Tz*=#;++($U?@Hc-mbuIKdlLc}J)TSQtVmr)hq~4U@T+sIK1_zWW1n z4KBRKL&(&L{DT_jkOq$2nmcB zM5kF|?DBYousKe^BRxEBhQbOx!YnUeW>!FVK)&!s6lo~^ee@Fo*UO$BNdDz$6Ocv? zA5wP?1{}Jdta~5RlkeWq_}n%l*Z_nYuk)r?u3174tFH}@eKJT1xskDjRoDQ`-rJaa zCJ%R+@-UNzJe9Vh5E;X~cvxyY@)r%JwjUlL!wK>1?Vlh)dlx1m-EhK5FBpGS#Q_tPUqcb!_AE>?g3SC;8#;+=+iIbX|2um#mE7Nz5?xKfOx&VFZce-V5BC;&4 z#}XkJZh1S*w0g#Ul0)jNexYdc~!^i4$47{WhSJL(UMWL6a8! z&IrX@m^N8G4QB~+zX29fe`COXW`cf>Xsug~n5B-e$GQn`WOrMxVT`ioiaxXgC0W=; zm0~#=<3s>hqgm?dauVlnLeh8J@GAPx9pVdU)(A(Exv910w$`lh?I};Xa zhk0=11(KSRrNUF``%wZypqd_kCq;eGZBCi@_-3v?MOPeHQA_r4S4?;$ywtk4dWo>< z?-5cg94>nOVr_J$R&R*x)qu5knIlhcaO|e)qVM)N3JPC$@hyxG)>H7$noP-v!(Gua zN67=fvjx$7C4kDb`9xuaYpD0MgT~buxW_dxzwHTOu1i#T>(AwN7Ku^r33_0*JH@_X z4$w!8_JhUE{I%QRGF}k*KEh*FfgJ0iaF+I@310!?z#$-jXxoVZ2l>|Gk5vgK67O|j z^r_+@WZz5yAPJ-r0)K;%CX)3uw%(D?$J6O+ZB;X`y4&7RhW&~{tuWqq#t>HcOic9R zHBoZIWfC{-gNp4U>h2xQ%s#NhZlB?ngq1|SZT5rHmE=ArIPQS-Bqn0#OU+dHkx8Id zK!~r*6K-$_DbK$;k}|%EkOD39m37cUygI3&d)#v!fh-0x$)Nyg;0L4EkqGvS{IMA+ zp{&LD1E+XZTuBSoYtG;T_@>_>oP9!xYj8|9G(`3@^9zkvU%SW1^q)~8H0XARgKfZ1 zj-;~La6PMFNptmz=V^+wKQ}Q{Ktx`BS;9X-J{{Wp_}9bTU-}@%ddekx=Zw}s+_H$c zfic=HF+P#o-u)%Aj$bPt=Lc@hUa84|nB>@fZ?FHqIU4`@SS13(}ia5 zyx$Z>k>OwR5kIT}4FxA%DnG&zpglmh(Z*`ko}5Egfn)9o2mlOB?GBj4W7O7u>Hy-Q z1l*hemVav|r`H^WpiC{-GaBraq3Uk=~(^+EQN2Sdppgq$YYr?&GxBog0=16DZ{ z7s0JbKSH$?WuD*9$Xk;9PuT_O2`hc6Kg6s+nV|u>e$(!tsB}Xi@kKDF!Yfz3mS}HS zt&1x7W&=8v2oq2=6v7oQ(=uJ8qLFfmPCBN_tmXVVOFyQYOR*7pqYG0^@K5||#oV}* z#bZQBX2$Cw`w;GL!oE7;I!Bc)=~Ky(1g*5F-;xLb_+_UERjWK0=2ZA)+;Dc-rRMuz z4tIaUahNqv zsv3o(#aL$H&t>wO@6~e;=_l#rW%D4==>E_fiZ)oBB&kK1Sb6p~bMGX}_};X(yE6R5 z+oiUTs=%di`8dGqpl^ALvD%XjuapewMYLg{=nW?sZ@>7E@a^}CT!hGBb1$y7m4qcoE9H1}<~zMOIE65mr#OFz zaX*EyG6dfu&Tm7RYHH!6Rxs6xbJ!`%CIt$*01OTq8(#)pskql2v;T ztYLO^T*SZ)T_2uCqXV@Z!tJtQ+IsCT$tBj1?ROs24`3Fa&}-Eh-wSzf1w*1wRkH?q6}xc3T-(+}p|-Js8G9XxvU{$^#42v3zs#fhbc>xtbaxLSLaw(fl;hu=0Q?aI~e@dbpdyVi(xZ_aY|0j{OOJ7V6nVUa&262SP~gkb&x zE7{I}8~dWll6t+wl&VB++bWtu%nwLEoo#*pr-wN2vaRb_WWi?9^nwdSlE~C2ZvJ*( zLA9aZJ;jp;w3PsWX%BzwkH&>Fv zGF~CSmLldk8K7znaufzBskq%>^05Lk-kYI?kj;wvdK50Em+{beJYW z$<=FsY4=KSD1gK*8vB|Q4ffP_=pytfVJqdTLk{c{J4}XbVXsfje_WMTgy}3ZaJYGB z{0ZjIJPO*C7_Hk%Z2nQLg;a&IfYzSuUvnM(i*1zW{)rrZxI%fO2$Pu{aB-XYIx&#-b_+P>^7J5nBE3}^v9P3Dh z-!@)l%XS#Qa;|-a_-s9bwF8<}Ib!o4jn2DubocJ}hu;tCsDWk%h=sFFBS*gU%~cd^ z6YCP3+)X$%FXZPY!kT=d&^_GNt7$7f$Br;xp{4jeoSGL!_4MZMmnwno6**hfMHc0d z!x?72qQ%dCAW^tiYS`MwU6`B-;t+jKAPP9qk1PaQ;WqUv(?ePE{vr`46eS3SA{(wR z4?<9sZTzo>@0$Q4mOtZx3`~<~i-h*s4H$cP>~(~5kUwM0Si&PYbfM|b1SEW*Po525 znojGiZa=yZ)!@ST0>I69q{I&_z69{%m`N7+wKd7ds3B}YPkL|o!s`f$jUF!-VBs^K zG&B{~T=rZ*h0l7GAOJyf2t`+LNg&VY`cpf_qB@x!cu`NdtrFrEmjYMBs>ZD}=O7dtbg}1M^-09_+` zjA$1PMvl-MI!AgpNgK+9tPNI9mj5$Hso^TZ;%S1BkV)5{f?8<0eCdUWitKXP+!1&7 z(SXTru}}_G3~&TCBhzQXd@d>6MxINb9}<}p%*+IaJkdLE@ePtn?JwSh@$>>*X}$6i z3SRV|HhKSfHv;56vDAZZKEcOVVd=L~nk*1iRgY7vI{%~=nEq)Kz>Ft|3)uTz@8nc~ zcwSZn6z~+VY5fRs)n&m#5yGg1BBv-I&D&g;^ZF9H7D6BUC5VS69qaZzqYq^$JXv8V z$b$&I&o5`3PiRS64iR?fQ3+1SZ`*-9U3Mq{KYdel5!(LMBCbYE32X_T66GZ1>VadZ zw`arE0El_07B&PKWqQ6{vK5|Fyl#0$C>mQSOhIg^V4sz>y4W z$Z!!%_VcbU{EpO0%pjr=P@&tgJ4o$lGW@u9w59uPGzT#p^*4M-vlqYY%HsZMD z0qan8cdnz~#4fB+TWxpeA_DhVidW=c@W$euAi|ImD2pyQ04|6FeStTy{%=V^9roz{ zN&}Y$bcJvce&RB81qr68D(ob-VS z#CcpqP(sLGb)}Lx_@Doq@xS+fboVFx-yi?i{{P{>_KWqs+5m_gu1aDhUnm4wf4+OS zC9f$oEheUFuhL`R!*H+w!p5Xg7}5IThIZmTy`=sFaGZl3oO8A8>tDffK|=X6+#V&} zj&v)`NKX}$uD7(tEuG8Syx44_g>h3~#K+dte5Ht%Ye{v5==qXRG=*)>$cOs#YJ34I zq~3g~zD6Ml&qmc#uOrO0Gi-K+xX$A=6QsTkg)u$+)0Y!RW&|9nlTcDvY046nlvQ;h ziWm3E@W71vRN{n-O{}3FO!yFk%r}2$EMn_Ink&K0h_oq!;GBSycR{V>WuJYzK5UyF zCs@e9qy2E=F&y8#N2ca@)%75mH^Vo-sBN{Y7#)aP+JC=R&Bo}^;x|p$20>%LiI^M4 zQHGjur!_FBUiZ5#HMeY`D3WP`gsV=kyngxu0scqJ0p`0jButL;M62q2GEpVYz7^r3(z8wn1hwWH?D9 zSsA#T77oe_DGd?_vLTNRC#-vM0EB>BTwO&MPC~ls7CUV%)$LfvZ;xTMg3*4c6{B%xodGgMuVHK-VlE=*q<*3VvzyEuWgomF3_Iegd_uf~R;*h4Sg@TW5}3UAfnP7`gx1 z|AznUU-_f0z0ZMs2sE)Yc)Xg|6B^S^FUsB`c;u}qT~3^( zh(qF+cvw2qM1v)p6;<4q*%#Ez78?ae8at2-9U| zF&b(YcI7lmFiQ8|oL|->A^pKY0|68CtVK5s2$IfQ+QqZTyTvM+LGMZmK}!P*j{*`3 zCo)ec@te4?J9J^dY%@tuTY+qLf!!c7J#%TMa^hDxE`oPpPx=WP8mY2abn6pcMJzO$ zuuA1-N0>`wJ}nh7Jp0Z1cz9HoR)~Vw5gv?O#Be!Pdw^E(=P!q@<$OF_5r#@8fPje|iE#V}P<0lP+J}KA#mPvbNadsaJEV%{WT< zn2q&dD?w3@7#LgR03b1MY!da}*tKNAxTa4fODaO_@aQln#};6t@8_p5x)L0zcMrix z?t2w0c!#@YTFNui{{@WC&Eey90UrRLzb+V0(Kstvuf+y65*Pr+*t)4qpijX)z++SBRVW zK11l2xMx%6mwF7MPv1^`DvwF2bLt6{V9F6C$X`YCy$NEHi|l)j0sqBi^y&0)d{&P;RO7AMdE%@ zKk`{D>Mr$&N1a9VXMPpRa54pdebo~i1Y;8XjD6}^&xqqt^($mBUU#$+#Hr)i7`C>& zHI*x;`-KPE^9_4g`DUN7-NhKl^KaFENB*vC9-rXs0v3@H^MNR51#hu!e>sr4VNWnK ztxuH!g{<0?BAl$b8#nxrdg%6$vh|4RE(@N>ud=QnEehVukrRD`j z=gIydH89`?m&A~$op_tPhHR)pX1+jxn04ME+>y=+2iz6DmTB4jnSV9h{hQ+3e>MDm zZ6AaJ>&;jiCkktH`tva~c*Kluqy5;(wcc>H{>c3}Cf*$vQCsT=)5 z!uw#UR4Fue@T=Dd*4HsIqpEw(px%U(tn*_I`Vjp(X#g_1QI2Y2fKU)0YWUl8X(0It zVqKo7KedhI1fvu?`s3l7|5UuOMH`u`<;D;n4?Sj!R-qA3zu%`qz2VB%lDAR2H}xb! zrU)`~ih5ILQiV?H&{>&RukBRuZm+Giv(7$|>N*Q^TB;oq;MexzYERnalx)-15EYo~ zL8>QoA3&v;_2!p?tttcP)ik%lD%w0Z4GgCxx-SGY)Y zG%t!AVF)PH?$-j%;yS(xtAh^WuiKm`zkn;?jvE3r&3Y1zpEica79cN{N*j^Gfn;HZ zs4BjqEX%kk2bGm^BZh9`9uTEsAd%^GPUqvU&QLN4+KecW zbAzdC@4#e{X!&(#lTfN7}rtIxB1{NJNv%N8uCj?kwnQ%E;|Sm^wt*b<6rcG0>6 zTC^O_#0;$t*1;R(IZal{vIzI<=YjUdYVhXSIA{6Bn}3Rp8E6>HHQ_))%8#J6{eEYj z0-3ALpciphY%;>O%$dVH!r1UDo<&YE!uJhiMYr9(ZNBi3CSq8tX~IU zl;r6UcQLK7)lo0f|2?2f3-*)wl2pjS*^Szy75mUCdFZaJTG-01@}B`ae({3HSutbegC@S{}_NF*wFuDZQI${4=m zmirsT9Z8~h^GL$3qiEm%@8A6C@cv)l{nATEjBAeXQlN)uoF;A862;lRMl{X>t84o0 z3jztD{Tn8gG|#6_3QILdI1o}((4OfS_bJH+3ZT~LET zV-4rBNPm20nQ*zd6?}!YVw3s3K!6s#>_{!C&-@ka=qm4V5RjwCa{X12gUA(XFF`&S zvV?U~=&kJIk@w0ay6~7sCG)w!m%vxOCP}owRgnYW9j)J75d(hR_kp)MnyUobpljg^ zj55)G(B^x6BC!mi5 zEBb0A6~s!161ydYAQUGU?}yrQ{S?_SWw5YYu*1JuCxXv4=<)QM;j4P2%G;i;*XgZe z2AT!#>(F4Lp1oCBZno^hHhz_LUl3S+wmJ#Kb7o{CFrtxdKCRY`M2a3%W>dMxtc@F?om$0TC2 zZR#0BJ*Xg8zlGWc&@{=YxI+ydMg_C`-@vTIRia$uj9Qh;urbuuk{+D3vu6nASAL~9 z{c=fRBE-`8Js5MAK2VyH5oZG{ROEU8t8XZhqY^F);U=vy5c09!i zYlx^UF}e53c!k-DC>8d53k20li9)GwVEu?K!(|GxVl;nC1ML&IJfMq~nczMbW2g-B z2yV;w!p@^e@@d1so!>~QrP+Nf3)=F(!9zYXH&O15@rOZt=vH@EN2ge}l433T=?~vG z3tGC<;^zpq(n0H{gIFl`ii9Ew4+fHSX#q>V?M?qAjmpDhJD1l9JAl!n8 zOiW-uTUsO3nbr*A*&KS#U7VT^*zwpy-Ktt~0UGxRnHra43)`Ukw1TjJ zvk(_Qm;!&Who-aKf!K&-Sbm8 zO`q1*>E*$Yb{{m|9t5x<0FkBdH0URgBwMa1cyEr@yFYw1Jg^Lte>v@kJ{tCaS>F`? zm&~0d{+g;4mcZXAbL>4o4vo6{>aOawzf?d?e9MPONBs&eXZ&a~vA7+?`I$ z|58G&9;fzN0u|J^g`e8ouDz-AS&xvY9emE&SW%})i$W?GVj?FjhU<>a;1@wA?6KM; zXZfK|XB#r9Q&D|-19qQl@KF)3 zH$2k7NQCX%S}6#P*QOGzh1|{%HO?4zfmcgUFQ(tsw@(Q_)=_e^(ygNr*;)_LLxUk& z4%C!4k{{+|1gI?6ImD=~10SU2bG_g@OtAPZi+6&!xR&s}k5J|^+St#$P(LFXn+g3R zh>8z;J?p!`f{1|1IH0Rr#$R%FUBb4xs9{M!FFKYJlXB+yex7+O3g)L%o)@K*BNs^C zM{SMf0MoA36~tXF5{Mbn00J1sg52y%u@b%B1NWX&!5@w4HP)N${XKEBU z-IsMjIHV}5PNYYRhG+9ypK3A&`hZwLtyPJbXbej+eeos*5!7M?<8LU;)4%w|K+v@~ z=j))Vu%PGtGziP|j%b<%rI>8JK?M(hqP@QAs2uuS(G~I`l@*7ywQ?qPy^sD?$K1R& ze5#%f{A9V<3A+xt=c9ZvV>=gJJJB~|9$Zb75J8NkXuyVA8ArGti||m6a1nuG)IepT zKa{qp)nLz1qS&YKU~gv%*Fd%IRO~_4*RyUn z6FI<>vkX*iG*)_8-N#KVb_RVuWS<+>{LyFrb`5Xv%L#kWes<$eU;&Yzw7vydbw>2* zwRd_6`8xLP3+8BL%H)`gEIEfdu(2W!XVYIt(Ya~oqV>D!ZaWN(oox~Wq50TLySYGC zfvl^)15P?P?Wjs0=)p8WVmH0X8 z;WPW%i(bRiZ)2X!0U*GUrhO%(b&2m6QLt8ydI=ugd^I80Uj`QD-YEZngn1zsF&eH8-k`j(cxvOJR_VnRCQWCJL(!9qLDXr^z&5unktmyrY2?M z24E3gqo929n!p?6SRbB0^AS(jBOR9e{l|^Gy^P@a<^}dJHk89p8+v~|U6l%ytoV7@ zA)K`J<3nMke9nsY^G0Df3WokK{tN#N|AoKzIbNC++9jzh=|DNsC@%1;gjz`GF~PRB z0qhv>Fph@^k*#B7lAacdRzY^&Cd9tEN0tnwDs?U=$SsGZS)ArWhVEYAzD00zM}{;T zC{2AjL{PtlfmEizIas4nWJ(t-K?Z#xRu1K-5zRNvr-w4|+THSZcM(BjlAt!QK)ki6 zeQkgo5CX6MjO#r+LdSPb0!0A!(kL3R$N zlyt?<7U?=w! z2ksjA6mJ3h19pk(a+!iJyxtwIY)mQM24#_LAVG9#qHUAG!IA7T8}Btm+~pjHM3?Xg zpR~qo&1Uv{2`mm#1Ps5!cVqX1;FP`lFlW(11H*sl&E0=1;e^WBowd@d5OhzG)&!91 zDZnt-_8cTDuvm*Y`ZByC5<>Z;T#n*%;c3ppjp1c#rYQyG#}R71kMiNz`k)qXX2*nZ zBG>xl43Gb0-fO!E|6nGYUnhwArGUW-Pkxhukrj;4o4EWJ*heEK+jh-}aSV%s5hrm6 z2%65LJmuN&c+J=H4SgLDUsU%N>6B1@=?~DQ9%|EaFLW^@(i>bU`Z-czPb8VUBuaJD z_xYE@)_Ofyiay$-jCIopO)GRru@qg|!){&5wtd;)eZw|ltn?Bbk2xlel{6ritmH!~d{X4qfu{ zocnv5+&`jhoJ-u3*2dU!8}^#p>H2kMIeXiTxWMZ(toDmgce1AAUfH*$Nt^WE58&x)24ybZa0|{KbhXuoIL9Y+Nh%wG-`B#S;_`}tY zJ0t@@ou*BGWfRvp0auSzHVTaabu+* zP6X;n+~l}{qeJ8FF574HDOfXjUf;*Y+fS=p^NKIJzR{=|mBPe1++5+s$#^NsV{CX> zRJFnh?I{2vhGN}RATyy>TKC+7g7_^v0-#+iNJ^V{_nm3`d)1D|t&B^aG)8)bVC4 z@hSNffjfd9IdR1fF?nlklzRXV>8Wdm-E%7SM-^F+hFq2-%vMCG1}zbr@sA1&?2W(?DbypGYLmmkZyg)4#P`DOwTZ8fH1z7$5~#Br3ds zjW9I8FM4xCFDZrXI{$qEsdAIv zWFPGXCTmW#Y}BgP?*ltFzfO=9gebip?6TRV&?OeEJHzvrbvcx`fEr;=jL5Yhs)C6D z3RuB|^SpvqU`t-e&!ZO{X1N)0c7a*Y5?Y=m*`f_$TxwjA&e2cdB(b21r9i4IqChfi zx~roTFqR7F*6GSbr16(oP5?fAKLhOB;;+S`hXq7D~E z!3e6KbT~I%>@dihT|&CNTtv=iinn4TZEQ~oQ&oO!Z2Q7rG zFi8APdR|Q)dKjV^`QQ9yvHIH3h0@qMO+u+olQfA;-h7-+Q@~^szM+3kLjP?IE6@BD z1u%rQbv#vHkhdOO(mD|}i@$crUIZUXH20kot3DuAq0 z0hCSPYhu@!T-1C3`4^nEL=3LahWTI{Q~b1F&CiJDe^$>{CMSD~=;{%#hk3id-AdU% zdRRAGYn{~Bxi7dQ_0}z}wckq4f=HIdN^@NC_f_*=obWaO4f>zlj2?Q8ooQl%^f_tH zUA zJzVtOM2)C1f(g0H6}rUY&Y}a2^FrFkWkuk*sBk1x}7B?EEJ}+PRTCwAL@nK5@St#zSKTfwX1@NGnr@jr2LCsMe zwpf?vL_uafjj%g6{y-Hv)4gwH%iT{+orjzo#Jj(Zy-bR}_|KaauPEgG~l<^Tu6I zKf9pjB0w%;?2HjlribKX1D~+6JM4MeU=s*MccYM#zcLye&wx=|nZA4m^A#0H=gtD< z4zx)E*^g!=XystR3EeWI@}CjNOyH(aC9*OtR6mCA>Z-Oze>XjejOLqc_=XW^oRR@p zij+%NwSgpF=|3O7U&4!mu?N09Yurvp#= zPq}Kr#%s^1AgQ-MrsIU4vq?pj?->15*YwW590~^Rh20gk+QXdH%Maw!-0~hG#CwQ}$HX%=ViWc=y2*pX3>+E)RtT*mMuL;B9KJ~^M_jYJstbnhI zXt-7h&UC0i&A|S`RHYV+@dLGPHu1XnN={7}KtGwC3%nB9brt9Tm9436)Dj{A(^%{^ zhE{*&?T}=pH7C7u>!n+C=B$4My9@qpA;wk&TVkS>5h7-}ef!QY2*=`|5c$L*Om9pg zfoK`D=JRZGJ{ex_3r&ol!tCI;RRnG(qy>NkMH+sf;D(3bU_(md)PpGTdqyYVqVIJR zB&+X)A}^Y0%6ohw7Y$QW3&YTBGO-J>78rb3e+D{EI+iqVZ5OrJiosR7=4Ucz41kA! zp3r~OknZ1QjsXmq^@G^14GOK$NuA{9yop9k-rNSR>3M*BP)Z62LG7Txu6zq_vcGmo zCPyFg9te69jO;{WtmOywaWdJUgIQZV60tTc2)fs2FmPC!gqVM$4WOukfw5wNLNCYZ zpe}Wmj?w(+N5hYPRHVehI&VsuvB1T?S5$)CTiHQPPTeKwIugwF6p|}zTf>)rEQ#<- zT%aY&2@0qBupa54&)d+s0^_;iUh60{5P}e4T~%h3c$R@Yrq|{%__2l)dc`Y5mu3+> z0;|y4Y~d}Pg%6Dn*m~zJchzR_#H_Cj1*WZp#Vn7(J!h00VoI{l`U^gd;`A?Qg7b3( ziH}ZgTZl?^yOIE$YlKkn0r(|Xd4u!UrN@Ke6YC$E>4XL+w?c5Ge-z+jkK767ayT4z zO@cGgOWYAikNn7|uvxtfKuR~DgsS_oKSqP6z!u@IKMX`aG`_#n#Z&;N<`3*F&*UgbKr1!z8BZ9jcImm@<-lZQ*EI3QG&!mxw_Z_^d^(%RA znf9G~ELgp9pBMH-3=ZH`7z67>0peD^A$2F&RuAT%p+aduGQYWuYYoDz74Vuh_L{>G zu@J<53o2P1^j`3@)Q_vfZG7v#2SdDSMV1IMrKmE72D$@<8=~Ywt^1IYqMJY_um*X> zK0ZSDH1Gn}RnJofkULd0+anekSx#;USrOAk)n9IffaTtYCt|bUWCcD_iRq`lD%&uj z(rC3(1(mNNfGB)DCSNjR$BBS=H;P&izq4aozQ2i@{ zWw^6|WHrjl_wweVbzuEAR2%1K$=gSY8eNKln?{amLT|U)gQORhwu$>xP-lZ6H4iaN z+e;0J&w!5NDz#bswxx~~8K8*INGjJm=3ODkXtT?2FX2QpO-Kd1I~&JkS(fc1^A zy!Z%NySJdG1Gj+Sf>};6^d-9xBG{m#z&JuY+ZYsh9d3(*L=1@TrQB6sXbwFdqhP^b z=Kyv2NVB4BG60S_6tP^UOGh?thClH}b;tDR-H+1B&4$EjGd{ha10j(kXh)zvy!&+| z*)*CqwwXM*pk^;VFQlf=W$(WIc$aWnBz~c7q-~@hcj_RptM-O3ymIPANq`c+=;By$ zN$M7gOC%AZWAduNJsQTjbD;mqmU^~me%PkY=0#>h-8^Y4bl>2 z!$b~*en9itu+n(vHiOF+GM)@*_FiwUZ>gV7DQ;zx{f=fShgP{Hi3|Fm94eeue|8tH zblor4L?M{^Gf+CzZ2I`1t6)QhXPYXT6AcY4G@d<2hSVdP20|(o#c_3RdTbFaOe(tr zOoc(SQGA|saEm6POq8Q9x6gNlgZ+(|Z!*zRsn4_n_ z0Ll!}raU_#)ov(Al_<#PMUKTVT^S3*LLZL0EtHIQV_xi%_zKPbQ+pMkscbZEXc}b- z*j!xaz-{IBFkivy)l0aNJ%|Ic_Jd`3t>U>z$K{f$$2@m-Y0569_^9f>;us5%lX=M0 zLrtJK_SKslfov~c!JdGliYORt_<DBZ&@O88k zfHT>v1Ve2W8{)#HIvPg?=m#rf3^3FZs8L14Ul~rRm05)58kD@|&`TkWbH-HmQpj(nq@Kl;>fJD1Hqi)%jGI1vIM{h|$f z2ZU}TGY7iQ+o9gRnM^%sOnnD4_O4nzpV-hBaRW@;eWTIZsyfqlGXjB>vk?`VRh7+45B)7*1$`cyAgLCrlD1WP<+lzEK#u z7VdPzxUWy@{I_vI{CL&3tL8xah-2WTxMk^fNJ-!era0ql+pvjx@+SR6ig-#-epx?o zsL2-?cqs5C043 zY~m?jzTkaFZ@qvg$kF32bfq2;v;OlQv{>q+hqBM&PBa}4c%uoZMH@mv{RCEq)o4h7 z2wN_WBZbee)D6d%xHU%OasJKfYz+Cup%J<#o@^$0GHg8aPg^J+ZvjyPAlF2}X0QbZ zsPXK$Rz3r~TL-zZK5XvQmlQ}}vW&+uUtZAUl84&zXx<9$QQSJBY!qd;Fs;9a&|ak< zoyFWSmAB(uh+2no_JL?%_TAg22Wrhqx7KInq8!zl$4q_%yLG2AJ)Fem*4+{g9qOJJS^jlgZ5n$+tLXji}QeV29 z%jw|8M8;{i`@!|{QzkHAEAg(`^{AO15Pd=X=b~Q!3olqcnHWL}MBz zSAC&=mE52@MlAdU<^pue0Up@6aa$HbxYFd-Pe&#Y`zan$(MurPrv z0JCjx_aX4Cc@H2zWHw}-mWGen3cPzqQ2hTL?*7)~AH(=IF_qcv#8G5mS^8^&2n=7& zyjKCJmMeGZm$SZEI}3R6M3ZuNR1D-u=pqk>+;Z6`qcaVe|NVzg&xJ`xU& zN-1vpw+33%HBuRiL?W$|3!zy$-lo@l;I0OcA*h1+o1Kl&vFhAp!9lBF#7&ug3RC4o0VVY+sJp;$u$|hh#Mto7pVs%$^JD|n^TIrHJ%59; zs+)kI#ssau%EZoqp#n);^)MVpKApYm?%pxTteeTIV5|qB&0Ac}g5c(obnaOcD(T#S zJt-hus|UNNti8MsHwARGF+6? znv{YlnxM$lX0;A`Z>dJ{5S^m@f42F>TBi*_sG=36Gwb^xR=Vx~P%VYM9TnwK267!4 zNySk}>#4g$7?&%T@Mu<^{|l;I%_YYT5SPI(dK|iXTe!6?@Cd0%@kk!j3Ri~xIH>1< z1jhSPT%iV!Gp~AjpzWanih%1Vt_u=C#o?M3nz5V5$cN023tUbZpr^wL(4SdxXY-*h zhT64q6WJZE4Hb%AH!6zr=jNMpfSI#t7&3@*_C%pzZJEYkG3`+)L``~K6a%Rf57SsoG0L@YIuQ#+F+P1jXe!LZHTB(>spW@!MNY0K z5?}z&O>8mizLyP(HS58xfjhF^?`GgN)&Li|9<1OpzwjqEng@)&uZIW-LcYKM%<)km z<{l|5H0!ujdsAPP2RT3Vh$ex?o96Zh@N+;a)M`K=OyA~x-oc1Epgl%?fE`he_`JUV z6-&Vt>M&G{09eEdjYoE!e_l1&DS}owe{Sw)zgX_Y5Z3-Iyh~U zKWmY)rXKsj$C|f!6s<^Lcg<->;A9ScPU%lLUbw|cDVbG-3F?h%G04%^kCgMmE&WG1 zI>X(+ufGbFkNx6P>V+v@B%NUo0I#n*jpJKjZ44f_&ryXe3N}^cwvpch5%TA)q%)a`M$U% zG{4lg^2x9nlOm+@F5;#&pHoQ4SS$T}rJ3sfXlRBx3mcS*!FdEK?^~xe99rCqo`$)5 z_Y-0cCKh3LIxXuJ%Ou@PFfAc`VElM(U+U+> zmy^BNB=Piet$#G^)jwy$&-|HP2;s~A25aU7uqf21#Vbi0*@S((sQI=1hQlK=(#TFO z#8OA5HH0aRQq1!dVAAcvhay{zZ)FC21qW+EK+`+k$KzF9coFXb6OU}fv3bI^1W#Z| z-;^!Z*M%LWyB^QNy+x;GP71U)+zGHZnNf1x_W(n>HxH7;7!P9Q=R`QEVz|q;<2ybQR@;hULTnC`KOe z8MBvqG-J5u{WPaUXvqo$mzrM%t)f>!l4_O z#)&9&_Y7$UqW~0u8XT%v1vDD1@J3kNz1{HL+IP)^DAF>ml`P5DT9zZa!T)Q2&s)fR zvc?W|gs1^f^?mOl-+VKld@@t^R|#g{2>=uKwAKbn^6}4TQyk)WgrG=xn(^c9oBJPj z56bO}<=A)}xQN8!5mQf{K>ypeG>wneVyy{;07f-Y2Vom36;SiiOyROh!piBD5wANr*SPsnf{ zc`m%&&8`#x!uT3ybOO_8Kc0k5$M$+V_hC6NT0gC)?LlJpFDutM%buO*->D0kg7ZoD zDBiQlI*f`HyT?6r!xEo}e+c)^v>3@9GMDt-R{=y65fcqXA zv`VO%=1Qs>Ux~XbL0@zc@7l_G_eBgB$GRfsvlD1ei{2kCPtZncZ60fP2F|2)jh~6J z3bQ+pX99&-g)3%OJ8xhysEUZbUq-|XJU@94d2w+~&}+9AG2Ugp+*yaquz;6}2uXK& zqZ9xhO+p0dn3>@UHs|lV!Xcz~VoqtD>q>na<9pPKD?JBVGpKs$^nv=Xi-KMj{#X%{ zJ|ngUm&w`uHm@KC5OQH*e%vbR6V=H42-#j`rs7v1feYo`n}g2C!bO+(ECMUp`K{4+hQ54-F=-KzyDG_6OmOn82u#<6KS zC*HEfdzC;~Xt+9aF&W7h)?GY?*m9aUEfll99*b~!Fn-EY6wItaZX;ds#(XAjxjN1asu)-EaSuWeBA* zxXz*`|E&9Gz7lA?3}9WVx$SmOmi$c#?PXzxf)_yag*SFkgc~({A@)~e5BSkksM3dH z>I!3${1!60s!*0cyv=Pvsoq#%ba7!_59o-Zp3wgI9-SX)ROCfaE$xN?cway2KKd}8 zx4vBSq?&@N{2dr&3+NZYr-@&7S1H!Ajn&+ecR>?kI>IHF{lh=+{&|dTU%F%-N3)is zb0UT8(XxfY-Xf9N3m6~;@y$JzE5hr%GyT&WoW+=5k3$?YUtfVGvloAX#97#ACpW;Q z2L*|n@L*y^RVmH28B*$((HCROy-k-83N^m96wg;5E52w<6w>{80$?_9U!|1qiNl)b z3C>Ef1y>QWn2fBw^_7^+{ZG12e7MN$pc#ToRiF>kq80kMy&LbxG=dW8YxDMvvg*h> zvT$-k)zpq<{#s6*N<_-DvQQ9KGqh~;x65t5B!}IV5tXZ)w*g27 zus&H+qk>*!fm{z1_;aBi4jUy&@dpszN~z?PSV7kuAB*RUu5Vc!zMA)DTy*;nu?i${ zA;u7tG};>L+~~HS#Lm3m{qS3{+ElG?QJ1i3FS|6HpQEY##P#OpQTxV&U&l!NU+!>SDckTWI=$U={J9Ue`?wWI?X*tRBXS2-M9Ei_{h?8V}Tk7?7-k zTt7^-T9*V#rdF;676Z&>-=viaD#_Cnqi*ECSgw{4Mb`o!Cwbi?P`Ik12(~MpRGy_^ zqsBE{*rh{QdM#bg8%!ha&5;abbjcS7d3VwTn|qZ2^yd zMd`L_f)CHIglt`)xe-oJt+mw!m#Kix3in-cN&vkNvJeKO26`?;|Ag2tk!F^xk4aNg zID;cw7zHBUCS!#VYRy_ZV!*61C66njr&fRa9%?%{JoGe!QJXxHz;6+^UGZ@pFGGcM za}}j9E?LFsUXcalf;e!C;OS=b#CqLl)^PJGxD`v_b?30N(4|E?ZK?_#p~=`>!K8E=IofC#xw+oYI={X1zG9 z)Jvo?7XP#H2?y8d3DiJLunJ&}WZ>pg#rLs*NK2*#NIZXO9j5r+7om`WtkQ-g1Ow4B zzNw&-i2P|h5K${9RHco^2V8Fj@3|wlQ8=ye*7ty|z-BD1^G2~}X%m4Wjgx2#Tz-&6 zJOPBRWpc&bNdRIYYqmCqvkc9-Q5kdEVH*C*k>Z7AJ$qvW$f!?xD#0n0aB^#IYm(omqOd4>omF+>-c5v zr$RxnO@YW#SIoK}7n*{Ai~Zuf03II^5uqhRGqPij8fc71Noz3a&Qh1C5P9L1^$`NA zGAtswCi!7PoudNbA2xzQy?+i{;I;Z4>ScvwRe*%Ukalx_Ozmn?eabb$Vejq6L7~sP zx94*-M+nlqVuvU{vDXA98c#8%4fXtik2P8xj74~*e@9|BdPnz?Pczmcz8?^EJciXP zOTNi@Gr0jo@49H%370%(awQ=hSTU(qTd=3&IASm9J0Uu9vr6$V`r*ta#Y^y-h}){5#42(?7VHc_T=)8V_or_m zX8zirB}iJ%!;%y~3mQ|sJ8EUPB8DzD-fh(CB=+HbTDBC;(1Oavh}hvY6xF&hBsn^jvM%660*O)WFGOtSbj8|Fh#at5gLPeg zlgtmFwX2DmQE2Y5&ztDwa_m8!N2UBY73gHFv6^MKsM!J&mb*E2fGn`#th8d=L$^;t zcfh=gy;qz~CN)Ef*NYonc=vZ_H#P0x)F<)a=HQHS3pDo1JSU0CV1K^`3)fb<&2p=Q z1>TF3s=STyC_|m=Dh+A_rGZEwfa7fyFvmbKMYXe57x*C~b#-Jri^=tB@XA5G5OvCY&%@AlM*9*hp+=*3!JP=Epqa-TU@AO{FB2FT zgRgq{w4-Qjx6oRGJwOL)IXje(VYpVpQBSY0ylZA-tQ8z!OiPJaT9wXpd~06PQkIQ} zu}r5IP0}m3O?7UX&c=c5k=NCa8SSv!@oZffS7qZ{vMYzHn5HLcJ4j%NQe5!_(En5; z^4t$n-z~$pDHg2$cs}uJi+E@$LbPYF8VmgtvkgO}6qKRutmJS|e$^uGlyE2z0t8%& z)8nDleP$@rl7t{=JneN6(H814flXEG1|AXgAmk)j>A1;)A(9z_EvjA0g(Sa^?qZ(X zQR7w6Y(CE-3g~4mO0mJyih|&3RsAFs_gUJMFz{Q(w09a-4Z74m$-6&;B60g0Z#@_qrRspPFd?Fdr&?|m>4EeH9$;Q!Ul}VQAb0?jZ;3CXYnk;p*>;Vi@%AtL`iWj< z<}5q|T4`9Xv^A9q48g<-JS!fD4%jr~5IFnua%>DYGz*oUOhkM z3eEIqw|~>>THXFlocw5u)%s(_vvDCx0`Ze4-IFIC^$aMM-9=F9dK2z&(iBPnFt3!% zV>?KlJxGaCXdDAQH&B-+l~BAx2d!=yYAvSD-LxP7{jBS4#70o#BGz6Il893ts=O(F zQEKchp=La6EM1(X0)s4X!zy2YVnN@cI0*)38thPNuNb%=A=Sc7A1lYMz`%k_8m@?G9>bWtO}lb2YL@^u z@|uJ!cb)vM{&gRCDK^j`v5|VZZXUl4#=jZeAEUH(^dKBCE#+z%h5%SzZLKuG z)syXzK^U$j{MnKn3g8W*!gI}eP<8xR?(e`xs+TRbWd&!8EMU>@qg^#8|7Md+R~63n zZ%AcBbqz}VWA*cH$A=Pb20)u!uh>kSYVhde(leYFCx}KL@P`H!+WJo+5W=Ig*b>t* zah5=VG@2}Y!2lDL zhEv;N)2(&O@j>o6xqlX+zTB)cW)`2oR!7RRE{@{MwdGY0`BLIlvEO=KuNQimnUS!J zJ;Q5Sw59BUqDHlF3oaeu7|Td-6%9XQO@jTjN^V}di2&SEheSZcJi4Qq#rtG1x2bv& zAFJXq3aC6vH&MBtsSl0ag0Wvf1n$9?`Qy>NS!AqLiyPejCVp%s{>BB`TV@EggRv_8 z!1Xu4sIo5{&aBz2(`+Oe;I#ybi6ER8yBmFhQu1kd(mh-EiP_%2E3B}P;;Se~tsys~ z)dXb{;vuR!QTsKv5@Q7Fy@V@j*+tKV&>x>bb?^p`$%$fym^ZP+`>1>3ScPP}+@wH7 z(Y$Iqh~Sk)a+8yyXvx^`?SX=k@mvU4s);LX6TK8$j(y27C#^5$BSfWA^q1vyrmk$2 z2`WZNI%EuDcR0bUY6*rh6>bn6_;PK$fSeG2{TBZ^{x4n)=MPO9e+92@BaOexA2#uT zur;w{&jJrhrFr98Yb+ao^9S7zOKZE$!Izj3_ADT~AYSoR5GFH4Fi=WFfzC7kE_RV( zzRO!?`9`HRMFbfwbts`mKabrc!$!9c_2zPe$pCGwz0er8S;TEc zd^IL7$5aXag}q4`_!dQvHI|T-D)Em2#Xx`Se&QQSulR7P9vjqOy(c{}Oxce^TsSd+ z_Sa=(EgTA8p#++DVPl~_<2$M>LIs!%8HxX1jo7O^n-(RCpzrzn^EjY4L)Fsqg257l z@>1+wE>ILub#5Y&&Q~J23EjsZ`3=JCY^36{kEfHuIDE_z_i@#S#dh-7QGjUEkO<7- zbikkm-1lZTuj`B;KztNC_CYyb!o5!m1WZSHMc3=N=?ao^xTN|Hv1nlXrSRko7ia|O#AyQ7Bq>_#kP<+ai;^`35dhQantzxSpzLrN^U8o|mI!;hj)7l2) zUjovURy+)H68CVd-XHL<)BpG?pIz}K8T1+EXEC^x3qlKdi?)$k^pa;Ypvov)Q}Z@) zfK;&4lcy91*HR6+wXS*PCP&8M$S~*_uN*d4MwiOn%5(+vb2ZbCPt<}hw}I|fcGHou z(Y$CyY~dbPBx31_Plp6`VM)%4D6e&!F%V(x?guch(dZN!fU;cFag+H{cvD=@+N@0% z>9~OG(2qE0m#}J6i7UucXfAycvRI`$*n>NWJEZRR%@4W{VqdYfbjj9mSF5Wnd?SwC zPB%;C2IZG^kELlvS4L%T!Bk|_IA;WTw2t7Jt@nmD>)=o(K@4PK}Z`=<~+h58%Q(wv!cE>$bX2*E50&FOPmLD3e&UZN3Qxw zQSM|4V}zA3#rjT{gl@@canp%W;AN%?eLxcvFEY(^FI-naLBe_!%BH~}EWp;CvFAb_ zbUNlzImsRs-=fF6Rc$i&IHi^Zvi*gYRpK1QlxT%oy*qLceDSD~lQ0;c^d$<(f@$#F zSVF>acq4ub`jUH_Q|Q?qLaw+|;*A#*FGn(J8uIeV6-jqUXCjnM)zp+=QZkN?LUM4HoLYBIqTT-Rf4M8p zN5aLg-v0ddFFXFnV{0s6LQObjO zua4-F&ujO4{PVM)Dwqu;C_W7`aCR1G-s5h`pCplDa8vFxyfXRL&eapU9Am+&I8PQo z;5&Z3l08pjBFOQB*!q@_mZ_DGyA?|TF)&CE#U{lw8`Xw+ga8n(T0V=!Ub+2VlmnQD z-EjeCfUaqkr6y_~@ZjuVOo0h5Prn>WgevVCj-;uH%nlzAAR$%3<(SoLOs4^S9!tW{ zpAkoxw`7W0&uwil&pMhruP#L$Zqi(68FQzekvhKNC3_t35-`9ZesDR4*v`=cmpYut zS}5)*{JL5;0ff%3tB@MPFd+NYn zD+Hw`<>VEj5+v8P+yVcN?z{BFluhP!+C(G@$z;Z6Q+F`rN(rYHT$Ne|zAK@|bHRBG z&UzkiT0!HNd9#ZSd0tpz0HuI1p4+@sSG*82{8R4?g9SP+vugPg)I6{d>8y|v7vBMb zny*AWka`B?`$wva-4vNu1-YDI764uOcYAjf1@~M&>@vD5Eq9zth#)R<{a0YKf!f7h z%FVt1Fn%Hd16hmFwn|rWV?${mRPGcck=u6mfx{xujk659p%<9(j%bnM!1QW7>?8O}BcYAF7?rOK^4P8EI)gRcZ_%wC| z+CHY$0E>n<#c#yt;nrmUmQ^OmL5wHt3bsq);8LmBJ72{KejK{DDMSP|zGm>D0I!6CIRvo8Eq+hQAEE{2`+{m8MQ_zjt zX9Q_3ksOtir6f~96ewBWbg?EALaTf6b^IX1wPf)==N43vlxeP-4(w`SMLh;ukqUA7 zGl+s|pivg=p0AL*cvWRGFN5^LF+qPy|Ham{3I!i~>SCH5pt~v#sUQn)&TC_PTz&jYEM38hH^I z-6>gc+QbSNQpOavB+Gtv?#~iuxtOU2hob9o3_n{)8kM8&cj0w>STs<7rF$B8B%I#D zPE^SiELgs+O)i4h@1A&N;BGNonj(1SYoL_soaPhr5@Z=_lz+ikb^FJQUv)=bSVV&N zdBC5I;ja7XaMg))RvU-*n`|G6oc<7=?TFxax+naZ!tf!QmnS$G{6e<7$HXl>!Z7gE zIJgStUY5XCWVFwh#NS=Qf`X45gT_w~MLZ4$r@b&=;Gihv<@Zo#SwZz$;R^f^o1j+q zezFKNL?)k!S%qPY^HH2{p5*I_5#VbxfY|jKirp!S{OSW!RPk87jfR=G?Sg){i=KWQ;QqX zRa~69#t?V*eAwwj;nlnx1Vt&CU zh-U(baRS*@Z`rjX`zJ%hImRKc04aPe{%qs?>H^~up;mM_UV|dVse}ty zHkI4YA|QR)0Hyc_J{L+8c2BGf;F%d9?)C1I%?d>8Zt)=T zRtfXWSIA@PaS|;KJ*5d}Fb-)tDNBHuACxO|k9#;Y6qN@(G*RYcH58q9amNmh^?cl9 zxIFAJ5&8II3*X)T`}nc-L$5AF)9Dy0=BEPKwqCd;M4~VQCUos&rOS1JfJ*Dq0UXM% zIHpfSmN@w;;=-=R_OuI2L6_Jb%nHXUS=+9}x;ZkWO~zNb!UWn{yi~M#d|a@!T`7KrVIhOvqjg)%# z4y84e^A^(-fAafrbuQzR#0zn&)C#*qW|}M1+~W8$s?x36<%bEj(>q-v{ocLqUgpTM zyv;9-?+e>17Z(Lun(Y${i|2&*i2LU;^VG1tx>qw zycMpBiIZ1UE@*-XkZ}?=W*fRIOU@EkGM;YXmgx@D$>ZL)Z88uTxVQ}@`r^49=MkHisWJpeu!z+@%9>Du`xM!_4!PN2 z|I#&+c2I&qam!JziQQAZ0m-je!vKc?J_g-+eLyYr$5`>3&t5;2&-cY+*O3{gB>7+?jh z>78*+roXvQ+uO2ObI3pM_1iac%*y+kCeDKl25?n$$vG&WPatheYVr{GLrXC!TlbV3 zyDUh%Y^%3RknpaizgK`NVqE%VyAS!=bV1i+(YF>*2KA*$xnoO)&0X*7&|biRyec)@ zI|?xVyZBp6psGdO8mpCX?nSY>QRPy_H7amP97z{@0en$=w!*nM#PGGs5O_Vn%`JQx zl}EVm$)7_8T4Zi=jjCjhg_A=ohm??~AAEh6$x`06GrJL9@2Z6F{eJg@_xzQaA{qk4 z{AB9op+_(y(wKaW*oXM;%J{-F@Txt<(Mr!8m~4Ue&3eDKMur&D`bzoBvhV-W!%?$V zt!`>lUGaFYML(g@cu1T`xka@qsC(%8q5ImPLm=(Aw8)t%^oRZpa^^9%=F}xNdfk=J zu9Lh|U>~CY0fL7t@%y#4znIJD^Phnb%NM94FpK~%) zSjAdL9J)JqEEWbs{cMm`M256O9~CuCCk*tIi(RN{^e^6~ZOJCfW{+plV1PnQ9-)(68U^erPFg9;6SWHnun)!hyOYu^YTMO9NN{WH9(QH;u<> zABrmgr#S{rTorPLKi!{uO^Miu7f5(6_O7paS#Btwth3{TeOF@2GXOE6vTRYS4JL9o zPxN~2r%Z^NWX5`_@p8q=W4udbm|J@(uMk3>_kk6HR}%Z6gMlE$!Bj;6K;!hBTVysK z+$yeo>Z#Gx3+;i0>$x<{)(NE4j?wdzA|o0uOA$v1tnY!pJ3~fEA&)=ed_MHBcA>(I z(jVhC6!*ch+an-MC~MILxx2b+dK%6QhLtA-xpELML|rDoR`pZJoAKZt^FgXi#ImWc zWTDp);kznVm5%H5Dm9;aITp=`Hp2$t0`c$m={YC1&q;~k?x`*;B)<%j?y}r0W()z` zg2K%iz04NrA%|Sf({kO)-I}HYO(|}0G0fDwQqIW@OvNUklm$EjoKZSB4V&qX04G46wh6o!Z-MRp#|!{${Dd zZKgtl2-lweD)x@@E_?*;Ly7R2?hnrJzu^0ZVD$Rk7d{64R;0FY+OOq$CFMjnz(2${ z0ml<%{aW{JPmBOuD~kM;kaz17%5r)Ft@JiTU1zI^Wf-HuWsoCMgPXe=Ol)3c>DFaJ zCi#e*@>l>knpC^-Zn55`z$;H^?X~~Ntz-@hQdStdFhTMu^Kj7L#7UIjohMPsU8StO zwQW!G>XaG&_id_xw^f=|R+10+TDfKXKe`^6JZE2yl z&ZUZmvvjc~RBV7IVG-;rJEr?Q9zgFW1?S^OxL6YX5R50-gXOF8JbVEGh-50TbY|-$ zu3d|V?ajlR-l)66KlR>)zA&rrP`L_t>M?*G*I4{FYVTt&FS-`&3qE;8yTDC1`m4@m z3Mk+XU5SAycn-DF13N^q(XlsU3Y=2zq3!@~Oo+<$6)gAL4#y}_6mcu^=iFPn?n3pmO(-NIVIME*dMFzDeja9@q+eD zK;nKBG#-+DMrph~=?t=!ll|DGT+V?~T1yU0euVRv zBDujAW07U&L7noCFDieKC^h9KV+Zd)>>k*pBohkERN+9QF@!S&D8$ngC51N;3qnEL zivEbdpe2HY7oXsW&XV1s?d1w6nE`QPIf|Bn=d2-=jd&;VDeTOm?i!jZZ!DHaO%L6i z5gA06#Q-=r=I>CwjU9`xF zsQ0>W|B=4=aH5%YKve2q$17MqD6UkT1hh%f|e^72x*{v=+|N5AQLvCZ652wXXiYSOY; zmjasHMC{x63fIGK_iQzuh+U>cjKWTWpO%d*dPSKjle6y)AhWL(`t!01I!CEgZVb^H zS(C(8LJY?IV-d!skd9o{0o1*`lqjCN5=WNxD;YBVXWjGo%PuF!FF(9)IEXzZWeR|D z#ys(+WKtCD53;&gq%kn)EE-2`kVUo{!zU2xhhYA3FWFa^_jhew+Wa7;NAKIPr=b#F z?uc5xHFO20r8}`!U>aS_+g*58jfDZbvRoql=KCC#lQzRz=0?ipqr2!sKZ3Gpg#V!u zcziUFQsoazL?3jboNn&Spt*MvDyr7K_7;#VTO|0Wna1R(H>O?DJu%lT(E)0 zq{tH^nPp&?!vrsM!bTxuY~+`5cDWE81%l^>>*iGkdOrlEt|=o;`PY1gBWe3o1puyR z2@0%kbx*8aW>{fL&cAa)Eq!8D*22vQ=j#f`6BL{k3qk~WF{)4)KDT06NDq7Lgvpe5 zRdYe2TbcSpdF>mIK9&Q=wjUV5Vm%Yajv6Xh~U#J!Z;H)R48{jCTQX?E_Nn+I? z$0~1xb2y#-Z<+1Xz59!||LyI6{qO%bB(M>oA9&8>(ol}8me=UPb;vhj8z71f6UWlgmzOBGn9x~$^~A6eMjO1z}Jku!PG^xz{I`V-0=?Q z;D5~zxy5F;mD|A`@++Sp(|&-$uSG%#OJQSZcl`x*%ZSnUh!FY9>*UH8k}SF1sC#Af zdjQvhs%TtVR5*#k9XED7rrAyPF6Gdbyx3zd1awsnV7XiN(+?>k*SH9_1ynCq%djX4 zeXQG)qF@fk@_$2A?%2Q|#Id=3^Dnx85#wv=#4Zv}Gq1DU5%nLw{qyeapT?5}b>ov{ z5NSXViPIDh78G1|Pqom}`I?t2uYv=}gVF8T&PZ4q(HONb;r66!z%dOcNLg|r(o>j= z)WbrwMhC@oBcPdOP?gPtRC^6u9Bm|Va+w+V9)ID}s_MS_ovnHIb?OGWx$Q!HJIS5%X9a{zNgc9XX)|;w{lO)}bKgLMOc+8x0&)pq= zZ@CO_f9fsf<(RTwwe!(|vYRkS{Ta+xlydw`(@kvAqYOD8Ds?-5n(4wQB7tUNg1tLy z{N10t{fmF>|GIzdkNR8K+<)BtvB$iNIYRXi6iLyWbMuQd=nR=Dee{UyO7(rLOIz}u z+y;O5MwP8+LzU);W5+VxQC|JcbCR672?o;`)YGmXi==lc))S&fB=%K2i_*x5y-H=r zU&-kHICcobU4eQ(NUdpV*Hb_ff;zO0SezR?gR=n$Pd(k6h|+Ok->A2#04hHmo)9O{ zlKTX`gU0qRo?a3As%3N4F0wWfUK@5L_Y6De^eVnB1^)Iwbnkwd^Q3Wg{P$*OU`iJQ zCBnR;AXjv_xdI}LkJGfbm<}8|?^?s3nHB{+?Q$HU&P!S?nEZ&p} zw+Py-m_|>!mC(V!549}iMoZB5^AxiZn(S_(1EBX3C5GkH;y}2E@km-vUl#*nM@{3f zeuzcK`U!Uj=%_oo!|id9L*({w+9IancK4aTi!N7QaxCDzPimGh)h*d+q~2l&o8T;O z21pIcSxiCT8sOp}@y+6|K1e`ARo{d1T?oaEcRr)al&{7<-lt#J!-Yu8K)E3-y$AoC z%^*eBy8{NTyX3$(u=r|#WGHNTiLt7;Z%9mwAJh}#Kg9rKIo5-JO(nWU<@ae&9>+M% zdQ zYw=iaDDni%T!G3nst955#p`fATtJ=>@sMYb9t2nrpLq9wcDH}y zjoPZU$ss`^CKmiXNYsXg!Fla46kNjJyp2DyMjNn zPym%=SFW5Zh}iy&Di+pWa#Zyys$dE(oWKKRffryAag*WP$6)avMkVOdzX3%dKAi!c z%kIdQF_z|>DJ5J#q+X#6UnU}I2%lTnlcx>qDp%|lG|B1M-=c^b>OhLWNYNfLC-fkO zIwn3_jtJSB3R0Jq8;i4@L?o(gkenJDJ0$y-I7Td?N{~oy;%Cx`0WA;n@qO?O*h8cJ za!3{F0EoBjqP!Hssu|d4XRR)Ga~0B`#&8smKIXBy3v0;IAaWn~YMk5KH?Mf6MSNr4 zieNo`JcSydgZmRGS%e(gWy}>XuDdWl6CTr}`e_+-+=Bj9k6Cb;noq9mtPRhThhL~} zgolVcrkVMyZ%0KCOtFUOQ%U-}kI#`ZmoZ!>DVlWZ z@M1lo9`UfaFIy~ryc@s_0_I#+lKAsjCmXr0Afw8x zA1EC>1~G!RHQ2w-8(Z#QnImqs4pFK2CrK*~5`XTJD$MIoi^V`oPaX$@c~pvz)FOXg zDyp!Fr1Cl?CkAh%q;X<~g*+gh7RTTyR$pd*;ZM)Ao|{qZ4HY5Kl~_~@>EhJ8L#*)A zkS`F1d*KzR(B41xk0N~k_&@7S-YC1JR@YeNqV^c*uw7tV!W-nNgR@(rg-=2(v0z}( z!0@DYPl@Paf1(Rn(F^fIvJnSZl_Bk2t%dEc2c*cx`g#1IpjlyppK#radzdGTK7xpN zVav^v2Dyu3kFi=IBg$qeGC}e$fzXmLzCWS{qirJGFBT~D{kQM_b@3lonpl5E9d`s} z$08{$f1@8#ER@|B%og`A3D}VRYO%PD#_{#{u}&VvIw)Ca)yG;Vy5WtL9R7mx@$2yg z4DOl^t&_Ky?lc=Vpj9?ZICn9C7toJl1-NA!)E*l}BDq<2V<-=_X}&{UX${Vp8!WLSm3f zj!LkASkVRtZ!gY~$ zFSfvWZq^)*bkSt7$3Bk3AwO@Sdk_vLK_FjJYZ$;N6_`r_Mv?z(mB8v^-~xU5S5#M+ z;4D;lOvZ+wrGp~SBu=ud_FNyq_bJnqfyE7#;}p>$FH@G6T3y_aJ`Km5s%-p&H8`qb zfb>eyreYn^9aN7Fa#!#w@L07S^G48L6jUdKMJP-TM_fc0JM|KvZTi6bENe_u zXOI)eFx1G5J&SFB@SyuBo~{A|%buHe(J$-7p%ZHKTo9Wq zHZMv~sWu^eqB8rm?-Cazzw$WF!;5Y?7M8GFN-SEKV3F5nqG~}4*QstsU?JCp?Dx#^ z1jMVBfvEBH9rTEZh-4;ji!FFS6zdLECH2}on% zy?a?8H}|gaP6e3) zPZkBZA&)kut<-UBY*eQ8a-*P%%N093?>HD@%;Wr7csVi->~12>N*^z=40fdzc3X)0 zJ(q)!QCHk#7THrI!(x;M>Ft}Ft`O^G`c}vpo9%&T14}~5Zbh)b-4|$-YpSJrk&^u2 zQTK_b({5SaH{S3iP}P!hO1HVf4jWU?f6rpq_cGL0URjfHi zoK#gTq*~blY4b#{4Nj`~MZP?g!ObtP_A@xBr0H5phUgEDtRF!%_p8OJl-$~-Ac$;I zJG&DRxKdHe7&e&T6-!shnxl&^xlbCi*W03kl4sRD61m`NQr4E&oO9=pGM!5T?uBVv z3;ROk^lr=s_}gmOVJlVTkYWMF`Z#m~5Nj8J9^A(ZgSW&|@TY2No>o|Xkphaa&1+iQvjxdHpXjz0HKXhsaSzTo z@q`dQ)HnEXnaQhWu%dW9ydF9nu7!U=WB}LT(AEy7*t~#(@Oyu5UNs`sDCo(Tl|iLt zR2(K`RS{;E4fPUx!CeJKJnuk}@hjvd=tLdkr!yC%2asq?0|?=Fgt=m|%pfvK2l$QH zXG#MuNHpm~GA6tv6Z|>u2I}`)cy|ee9G3vr?1BOr6pzfILO$Yzg{GzZqUCSGWrg%1 zF}X5A>iR2Q&lLsu0LUB5KNbToQuH^gBo3j>884$)!2+ZI?C$NG${CO?a zEHY(#L|G>aM6(EoMR1x81=4UBQ+mHE#rR(W0SJxt){}rG{L`yAykK1cH0u>i+zz=) zA4Zay`krWWJ9yqIRJy8EWys>{fJOAOa>a6?k*7btDCdBCcd8ueMy-jJY|Qd{rnx%r ze3LjAu(`UcSKja)UTmsHfWN3AT1qviF93w24J;WHtKG9*k6YQ}*98ZVg47}N*5NyfJHfE! z6~@-7G{f+$42*53eyJ_$bM8OC<~eYLO!rt0n63Z$_CKo2HUBS6uVp&!`YWgNz*u>O zR#3;mp8>n%u)8p|LYeTI{edUkTjDdtD^6M6m_p;t{Pth@Xu!k7f{?xld820^7lNTp z0b*BWL`OJ((gpEI@HciZ50XCq%#uuUWgeP)_o)GGWsx15eR5|kI)+|6nu~h9+wRBH zrmzSui?qEE`=K~@93y=n>EgV2imNR*cZ?)1v>_cRvCsrB&Cg z3WNj{maRB(V4g=uF;;2S^-a&Lk0=m1k3AtdDN!PB`Ak|2bfv->Ixj+4YDIj+f>7V( zXm;dr6P*=js&WHV%$q?Tm+jj`E1fJ5wN{H8DRXC4LUI>Zp$EpE0~B2o`Jioe#eAL8t!-ZNa&8Sm0T=Uf?VK zv{3RI=lrXjpIS2p^37EdHib+I%0~Q_2v`h*=$k0NW z6LJ;jiB%F#e^S3AAXTp~Ft-}z-CpaqJc#AyD5k9pj?PtQLG=-cSBTGXr^n*@-0EKV z8@hbt3s$3R3Fa0CV;mQ{Zk*E=F@989B{yVjhtQNVq5ZMCt|4et;(f_dNQz7=|E z+7c-!LXz$}9PTf=owA#o*ci1$1q*XWU(2Bh3!{W#3{9av@~z{?#>1~o?g^GWw47nS zFQ5m^673VK^xCbDg(3}-V%5ls#|kq0J;L)|`|S59w&6Yd{=9ouEXXQZss84UBH{7l z3(TyEqe0jTgqjNB4xH%lYIIQ?S=Jh0Cl(bbUZ{N_5wpzq>@B=EFPU7@ zBQ6%7iPl;`nf+;@kdJx3@A=dH>QaPsYAMC0s}{>B*-3j`L&dHKELhB6N0imVyg_s^ z32;B)agL*Z0gimq-Bd7i!qQX!R>4ot?GUYf?r*J$(p|~0b5mCUpr^nbIZXq)qvj;v z{i1vO_g)uaV;s-2E?E3{JU6K4IQ$4)H;IK?5qs#c+ar=X@o0vvia=T;R05jX*+BcK zPajwb*mNwV9$Kj&$n)!)^I`xK)k0jc2xAGzyxZ2#s=LYnuZKgkau#>r{n) zn7O*32QvNY^)CqlgDK^N*r>u5ea%%+Xg6HWv@FE(4^jPE_x`thE~LV(9`6&NjcK5- zm}?=rs^~Uptoa}qKPAak!&0T_WH+KgF>KNJUi~ea#vG1H5UWR$Zc*iAOjWc9<4LZb z?x+~lUBMsk_)51E=LklVU!`IdG_bJ%sm(HLpMEKXHw4vIwyqvj$ejlm@56v(e^Mn% z7%tC~l=h%|nALS|-+bPE{<+^bC9c2E&Ewx}Xf9|wYr_+OI~K%gSkw`?NfFnzO&kUT zyV@fn%ez1GBciSLgPFqIodE}mB~qGY;Xcs!gN_1>%ZG2BB**c~w#@M*`J8zIR$J@B z7&WGYO2TPVLZT3aPvV=p0kg%~>Z`e-nP!SO$)CmAs(U(#QQkArQSoA8KR3EPFP@MB z5RlYq2u7uFsb=Rz@|xG_rygI8@fzde+Agtv(uM^L9#nz=9CWz(hq>)aeWVO*JaV4> zqZJ4xA?L~=$R4T0tMMmX8Wda1Sq?Y(Sh1Mu!v$jy*E~I#c|u|-q1%IGQ0&~@=I_L! z?GNS%+~NOvmkdCP+`>QQ%(Z{9S=-NQU+{C4=Emnr{jH!THjEWSe}Y3+V)W~ow#2Ow z!{Dqp6__&9+{FAzng7+cWs|!UP|8;S_9r1e`s6E%=87E7Ih0{3C;Jn$4C82WlDddP zn*WUjTN*(Dmb5-ivvMb$uxbkt&?g{24!8ma@MBp5!Slr>AO?4FQ*;fP9im6Y%>!9P zcWKk}Aq9>5Vw$MZiY*lI!c}Q0gn~7aF8gBMMuLz&54I(Fco!R7`CVNh5jD|c>ptMM&vsgL6<3gX60BRuW>BG}3m8IvV^g9MlD z!yJNf>yOin26>9r=l&Jjgfg$?37pZh0=0ty%9o>LL5@|vPr-c97sT8>ge~!2{K_8t zZ^o;!Vk^pg0=nzHe2v{)W@1i?w)RPcaC6p68GV z)GG9j#iVuyR-;TF>jTzm6f-lQp*tYTbHAojamz&TJXYa%Q2U@-u!oqT02i`5ygU5~ z=f9XGJi8*m3MS$wxmXI8Pe~?jR7nsOPldGl!afx)QRcaf(1)B*S4vcd_)#ngxPqR5 zXli&RCDBTh_m+F*x2<-y!G#K%qHg_+6}-|@%9VM#gxj&|;-w9gyZu?5L6zz(gbBr9 z%7B39Bu?cpEz^e2E3?^cZ}~-vV^^quz1!`1!z8`UtngUxiiH(pJDTPcA;)`}C!qO7 zea`REQP>d}Au${6^nt|)(*PxuV!=1_j z5&~~6%JOP|q}rdC3lPP|cK3YMo);2I{n|!_abSf?yq+kY09%lIf_usx$7K;CAs0-@ zw8)TNd0ABr7jz98Q?(4(t@sSrFSn^9G_vAkki=LN z(mkWFCU<-+F1>n$Mq0FIpgnq~zeAx(763&X*BpRy>2j@Vdt8UgB#aMW=p6Zpq|0HM zFKkxR7Uj4C#w)iWWAD4QJM?U%)~@Z&<;WYHJaTC63m$i}eqVb@?Q9gZrGiDWUHMc*|}-#E8=k z=gOsMMHaCET%PPgp^s<3^9Re`=#Nq^@Y0k(N zi0{aoK(hE1Y{oJKaq}bw}8L z)hycNoV8M~9_8%1!2{QI`K1}~sN(=Ah{B;Qq03$H4Ng?mhEcxR16l8OGA&__COBH9 z*eU>NyaBJI8E{2b?#Bdy-VlXiH++2o#*w95jnRHr0N~|rdnX1&YhgYaZ`kf0Keh(K zrv{sa0}#T27gy`Mexbw;Q=AChFGPZ=GWHL(V>yH*qTKyhP@ThjNpkKZ;>HpoAIzK3wjY$t5># zk<%8f7A4u+Mzol_gnQmkZ3PbE$G#+|)~uq1#wEw2K0$V7QP-b*Vnhdm$5G~7+`jo? z_Xk#X+}p?U27p4s+B)w_x)Z^DA$VmmgnVZ&&lVts3SpzLInH??AZ&caH&c$eqM_x9 z6rdVRLdLjyyOnT1&^%||90zVNuqWJ_%ZT_Dt4vV;Wm#nkmNsTsLGPBZYHk3i;~P}c zv=T)_rgJVA2qWDpcboX(Y&J{_j&BxI0?Fjm#qm@f)N!pDI_^sQay(b80Dl%YGF>}5 z#k3NCzVYNfT&1z_A&DR%5nzamStC8r1$QWCL*Zz6IkF8^`b6CBxTDF?z}yZU24FqL z!?^e-St<@V;|T4Kh&1HNqbEo)j@3k$6kQ;`B@VYYwR`u|_(2r@M^+n*mHgOZ7k(z@ z)J3`IRr;TYc|X*}Q@Yd-Q3(g)P4NnlaK!+ezdpQt@wy+&=eSyuCk{j*agUesr4)#jUK3O%Y>`5+07+8 zN>s!`Pw8!RPh2tU1zG}aiosGT$|L{ zj1^Q?eyzm#Y|_CRTfj%~Lv%yNH<~k-bZgSu$4Q}S9#SjL#KjZ4f<{NH)$~=VM>TXP zBkgl@PD|b5l`+g5DN#?EB5hy+H$^8CZY`v36)b{R6T@uAN$EQ*CDcmCg?~R(mHUVLR|PF=2#a%2f~%iNkTT}>J9Or@ zm~BC&>lE>gBY|OnUdT^#k*C5VIs#l1Q^s0`JYwmG4S^8}aLPLRVfQ}l?mvvL@Bs!J zQioYy1OpahUZ8-^P$BXdOK^`*%KqUyOplY%MZy^fm#0K`2!d*$3YG~7igXBwQMC@u zMhBtyFt1{xqsUu8!3ye$s^hsV6T$7j>u&!tp4XBR-u|+D95@@h)Y5cyAyaHIkVJSN z|Jir>;IP4kf&{$VbMp7Fj6}b_$B9-kpxD61S45!w*rT2@Yc{^D{uI7=Y67z=Bu}Ph@Ln*+D$9PkN7I@KcDqHGn|?8wvEA z6c3@iN<3YOzBk=I)r2hNjc<%zE_(-|<$*r{GiAU+SJ*@ZTQ>^DI1_diFX8e7dXpaU zGy07(hL-oNb~eak^d~gyW2(p{1BgFwt@xQ}mur$eQlE=2wEf&CbDbJ8Mv)X)xzmh3 z7=)Q9_{aIHMR8>?9m967EDsSuYuUst?w;8o^X8NTRRkaYri;YgzWF{&6FWNw1N0Q< z>R?o@=>85F_ZFs0qIf8LLRuj@pbrSOpRoizT77Ghwfv3ln_i?jQ*m$>mj%TWLKc<< zb`rBM(4_+9oIC z1g2-Md7nLyVdhBzDINoO%<{4z<+aq7g-HbNa8`y0WovlOd}`>HT74*>7y`}8gy)o_ z(B~<1gN!kKh7EUBPUue8-}9GAc)|9HwF9K&oMADyN1 zI>_|mdCeyz%)(D_zP<0PtostRuP8U^hj1ZR8W6?kFj2}EHt5K*01tf53>M-Z$eATi zrY9Gbv%p1`X>e(An@BhTp*sK~jK!sOwp*)lYySeIEYAEVbZYVoM3imyF}(3Fbiz+8 zJ?j@_-6D;gP5+YnS`2Cx(C{x;6e2>(u=PTWZ{Mhdg@1wt_#{^U>)nsyKYyqDW(+_K z2VPZh7xVyye#N8}(q z=I7n&mJcFBCywZRp*|io$fC!!iEY6;?U!Kv5s1bWh0MxI_E5MS%h)XW+7JB_fZ!{E z`aXM`H!Y270bK>p0KhsliUIsO9#5?zXc5e+;%Z#LQ7i`;r%^C4huhoY)}ck`0$-F= zDH-+W(7ilYVWq_I>hxtMv{;+R&TV)-w>Lt>fnzzb)8e^c7lFuouWF$1im>At?xinX zIbov$*StVYbcF?q+s71Rdx?~C-+6WKnx_=qd^S`;;#%)uX9{D2A#wBid@NyCBormO zQ`vUF7tELLAQ1`)YE54aD!ri!jCpLykI0)5{^LO6cYYp4r?(v)qFJF z94USBw#(yNBq4-rd82#QkLeGFSYd$jwV=xy2eb@k8EC|&KJ#Q{1Q(iJDc%N+3T=cl zdGkxq#H2El{*<{61CD0wEmlGCJC4NK;`y-@>yNwj)fJ27ybh|uip5gcio@CjdvJUfjNewzUX@&9P~ZJ-_ubec(Yfj4O9aLXlz zs`y^mpCS>c{vp~2%@or=MLEaQ35*nh@JKZYew+@jT8+_s(mQZKwGj-o5^J63y0=mLaP(-8J5@qj3CoGSUR(yed{%&u z6qJDNZ`XN^PiGG2Vs75e{Y1F|deGrYoc+tNQ1cmQFj_jGHV7AwllgJm6(jJCnm zM*R@rF1q{#9u;qKkppRRkb8d(LBe)xHAcd0`6E{=6#!V43kBL-5GEcU{&x3`7;?MK z%T}#|nG|XWLUIQ&#T2x+u8tw)2q2rMXRUiyJW-rqpp%Q^YF^B#iA|4$=l}%1@{;kB z*!7kG;we~MRRVfcot4iDl;H0HLIODzt&jAN3Ime9Jo2SNCb=RiLr&=f*F`Yqo!24h z7mJ!gQy0Z@c_E-Rmh@TYax_5Lg{X)`8wYEwQ7<28IeUFt)yok)I_#DLJ5Uoy(6ZY| z&`cFME{S&p;IoWe#sl|Av5V2Y>`KKRxbXn+JTPQ{)G8)cnI1&Qe|!x~ zvXVb_1GlTGIB8E<(Bc^+t-Ty=?k$$^sxXr=M)(>AMo62Q!jIBh{_Kw2=aKAz3D@Q0 z4&wqM_F-EATlzG2o!jyvH)<1RR6*@F@oy@?w}~>%8SD-xR}jHAgUo{-L}5TR&*4+p+qbVRh6h7V9XJ zOPB#Qd8#S(vetx=QMs5O@lM&$WMFx#4&a)e7K@yMe$&$LWN-+}aBf-nJF{zyi_|)I z6{Cv6<}R8QX`!AB2iY>ee%@*%bE;yd1ZG@yL2&P&Xi_p9xJp>ykO~FLuhKetA!Y}= z;R^(Fhvc_vfXS96Z`ltZKV#JrN#TBc`SG|MvgNg|x9iV8?uMz#1jCNgrC#E}8L>sZ z%511bL$Hd_d=-9$n{x@a1uE+$(Y`XpGL3|WJWOSc0T^A5yc3u>=MI>z@Cg4+%tfPv zoaW}Ah9C@HD|Ap|oLk#_DtF}9JqCp$zE$Y}6R@epAPWiILTJimAO>=!MXaHXrMt4S8l*pel`D%$AjmHH;&I5M`Hb=T}CBF3QcEJNd)+`Mec?dIt=1` zdXbh|+Ky$XkAZ?k7OT{%l3gS;-L%sKfBJlm#d(aNO1igdF=c9I2yJff4NV69>z>De z{Q(`vPeu2tGE00IqoH75oQ;CAkj3?(rD#Ju4}|D4IT>)h7m93AB4~Ev;S$%DSvxJK z4HY=gL5T6JQwv)4p;Oa7_^6_Nk=#_qFV~jy=hM?8z0Ia0V$5*_94kn!R(MuDR+WX4WJttJ44_!fc?F|d*l-)jinOueb;u15R{}I@*%!v_iV$pb3160j zPMU1ydjKGn!__vrs66UkA064S7bMq;)x9hiFZdbp9#St#k^z)$rqgJ?T*$HJf|M`C zJM>n4pXJzBDcq&LIo%o zFYhk_w?3x?87f%ohOjh0lOu%Yt?W;iQ;jhX5bPw_H+AVPyciyz;Z_T;ifsWPZj|vd zP23CAs^3Z~CJ|;^x6fa-ge*z$}$=EXZ%@zn*c*Q4L6QmLA0$ARqS){-s%pSjso@8@(Io5>>86S4R z=#*9KX7JqCM++@1_8|pyuXAB>@ByHx5f%ej7Lz5Gpry%+>0X9J45OONPg|LxnCk7D zG2I{h^=R|sqcf3DNfkIl8$w0SUm|g>-oiN z2s-vaVsSd3t_8r|b5VFNhpo^0LfH}3scRQUtP9r0WdSMC9p@N8fy~i+Tz-D6w)gNL)$2i0Qc))AYp%PZhu^WDWX1 z0DnedcV;&oxiFikH;}>@?f0=p^891QqAO<^Fd)A4VLT19;6HV@e`Rr0=$b36(t7#_ zC!#l-{y@yG-WiiQRXiTqB)oJbq};LpL@Uvq)S` zDjKc8TKZQj=mC!Kw1Dg~YcI;BBEo4Yyo@U}!cqVpiA6)uD^217sg5D2kH9bevQ1TC zqCs>CF{?i;@Jt8@OHLGUl5{Zv9GgDikHs-}`zjcjpZf9ysi({-P!RKpuk@*%E6ltG z@&Qp>%2#{_y@6;z?eQ-}3&c8(!$;J4xm)rWS4G3fSZtQ?^lquGcfqyQ&W;4Qkn-C@t)tdCcKsaPwq2$@24gD#hruw!Scpt6RbFQ^*j&(#ZEv86scm5UELq2 zxJ^D5M1}z2Y-$(Qi*kPblGRw1zPh&(CeHnYSc40W`Qr46+c$sI{h_^ONF&7d@bzc4 zPBv35g1IYp8MZ*V$CX}HT(9-+$(9A6b<)d)#HS%tTA@9zD(e&uogaLNH1fB2lq7Kx z1KGjbDd8bb=XoKhVtokEYxjQ<9W*fX{4}htG2NN4m-kS$MVn)183kLu7{u0*7?#?F z5hhM0OhzGU?O*8B)2=knpk-8|Chvao_UBG@WK|Sh59vbf1FglpVOw$S7%uCxsVqe4 zqopAO)v~$NsVhfQ!v^AxvpPOFKn?+}EVslca5B;BF}Z{8ycC#ZLfnT3!&b&G9s@oF zk(ffCULW>hQXn#jRE$vZR)2?z)D;a>{5ypLZtKEvBsuCUJe8%cxBN5~WEWFF&6vub z4fi4EP^7Y83iGVpoY;ef*yEP$#1gS5Hl3zC#*6H*@wKlF4ySSx;nk-rOVI8_hqazU zY~tY|2?H*FLMx1gxVaof<4(&u3n40`0K_HHhi8<#rbrk9P)P|pIQ4Rhy#dR}bh(CB zv>$ct8}HE{f59-)z*TM_^r|8in4MVr3oA_BJtT$Gm^zXwN8C^VRUx&FE}2;=Kkj~< zc6v{e>5mb&&HM$RqvdXM$3ufJ}7`O$&*xus}P(V`;aO7E&{8SHtYsZ_L$g{6vb zNDj(VG3Ro$)Ktfts^ZJ^gIz$}G2q5E^@cuoDJs1~C3!F8NDJIWR4QkvM?i^>kqaIN zZ%9GB@q7kB+FTIhRO&@cpO^z`3GWJ=%(scuZAKp9Pa6@TAa&{j zWS$3ccC3_D>uGQ?PBVcc*xZq<4y?MyP$0cNJ*-8_1}w>YF?(ILsK45gMzUcbi2T>Az@?ge*awqbM*Q7{T z2_pBq-lo@qwi+y3EjHUUFe0r~4v!JiI<{%-1ZXW*>4kR4kD|dIH43N2A|f}EiBL}< zM2-YG=fSipkBTS8Q%OY;>cfRnZhA^6cBwcp^|+I3hXaQ7GVe*!=!)ZYaD)E_DZ(S{ z&mEW)a8xce>EhIGb`y&wS2;CzhsRB9Vj0vckNwZ-ObEJ^;NLn0k;=$*r@!y4iSLYwVT2# zrn(DmtQ z0$1e;#kS)Qv?xq`6zlX;&kfgEA$9^gJRVGx;DCFy#I?z5 zfdP>w#0F4oEFAWI4S3UQuU{-+s^1jR<5biiUucQEXRCA=SiIf^NG&qtWeG31^~6Nc z0O{L+Xt+zBIFbh?yL3Xe7QE2HYLKT$GqhO2bBaxtT_mYnH(=TiD4#io7Bb201<52t z7HpO)17Y`dq4qn}{aQG!?FvMfc&qjZ<(=C+D<3k*H!c31uQ=SU@C@H{O@ys4Im$~fcM zx)c5?*JlQ+rtI+KOz3Q-9&iOM++4_iL(6J$dybeQKnwGwLQ8|dy!LzAS}&lQ8Z1yz zsQ~byKaF3ZVoz3siX^CX;X*w}8Y~sy0RKG07pt4y((0~nVPW(j5L{B;{NXDN8^wTW z5KH{^_q+FF6Xgg{=^++tlkjTSk3Vu$FtUq{DQp(M*k_cGDm^ME4U~re93) z+b?xFP@=Y6-+dL~D5g&L?%%xqvv>dYwj_78YF5=5QL{pP%6NweHy-QFMe$1KQ6seR z8RWSF1Y>VaUxxnuM)$s#qggqN44g|S-R%%qz*_G8s$)PrWR2Ww?`Cr{7O03_)Ll-`i+gE%G7*LH?9aFmhk*jR6?<` zskCukTjpX1@5LKbswLA#7C4k>ln0J4#I^Hx70r9KZZ#~lB zM=XnV8G&ImfRcRUMe@6Sl$CT*YkuzG#qwcqGokE~ZU#b9>?~DADJi&X^HRmtg22?6 zl<3g%pqWG_&5;tfP==vNAvMfm(bTK}`4Ax?#;=IM{j8O#@*m}_AF5mdA1)6d#KJAT zaNz)q63(y#qxw^LZn=@?cexL-3S|1qZH*u0PjfwajA~g2kWitY@5zQP`*6YcWq0`~ z1F}yutBGvbT0QgWkcLqx(6V%D0S8pLVAh|>2VVG9@2Ej_?hW(sV^^UNVWdEqDn)&8 z8mFn@?yB3QmGIPX(sW(Br%6V!EvkD;jB6lfb%d4%#9<11cxTRp$}3WArR`^4(|vs7 z;vnoXl8}4qyX_|p5uE2rTeZ&|JD(o( z$-{U+gV@llXvn=-C3VRCQ|}ykl?0+oIDxZ7*lfR|!k}U5s)r7VH)~hygELM= zfuW)ls(|9(?6_#oz;j;RV{QZ;R7;d97xF2vr1CMoKI`Z-;#C$Z1VTmh!=r%4_+d

    r15%>T_NS)U)Ji-wk#{lMSfdUt+-nA zV78yd1e#2i0gby!qQR((o4@_B1v1qN$S8M@gmp+QQ z5^zHy)%%&CK#K&v9u{WW&))u$=<-+ow^e-mAN|26V_9t;X2nUEA62=|zCQ_b(t$cg zM^?_NN2eT82N1+4IVjq2hn7_0ZH)`NK1#Tvned!hL?hgfOLy@?xWb3DiJ`aK)A&SO%`EzG?~u6L1(R_nKUvT1WGV&iPeQ zOXTW;$Y_4vU+#~_U6RpYF(T&9nfBYc0+i1UQlVRy-I|L^EYR7mNvQpt?3O26|6TQ}c57c6qE=_)#7cmzQY})a9_)%BtUr|Q{96K?SW5Irfe&=Yh3URqbcWF!(K`5h=1(bU-5&c>eGYUKz&W zj&D_(W!+X=Zy8|F#}rRcy+%VCfoz<4tNs_tHL!$f?I|X4d6UP4$_XgYf;l{br2HF^@517v zf=cXUIpxvVNUo*n2P$42$B+v*%SFl` zxhs|8UD!n3vCux?l0RmS0J%2f3ZxL@ofopw_vh5A;wSgIT32I_qIlJ6-p85*l(bA8 z6H=fGWcpI<}xMWdS{cbbWyF%N>H#q7yb*En7{G$y+s^N;5s)*CeEcZ*W1idmm%!9|@ zPtbkkXYG!4&L;SqrHJ!0Ixg3smc$k6A(=_rmy8@%@WDgxB35^_GPga2d9+_(gduGz z*#n&XI22eQlzdGJ^*-UQVNpInT_9Zuk9jyk6f3nL>D#$PRE5sE9lE)vUCH1~E@&?4 zV?_FKe3{#$g0C~I6%_mM@PEo;hNYf;Q;IPYOy2)Y1*~|`+2GnWST@;~B!-L*RsJe< zU&9{VGHh2T#R~{+a=jnVL==vST{7=xm&yoR?kJ=aQ}ko+EaHbHYwgPMOEuS6CG{M& z3emuV@u-hKc5Q#`=PTd$M5=9xb0?J>^gG>m_!mpVWubmEKC5iWSamky1GjGkI2GB$ z>*C1#lU#JM+M#C6oVsb@~V^2(#Ipn>==3~e)Wkt}g&U_+8U__l9e1&W`h2%no=p=~?lwXfE#5Nd} z*!7B{o){|e3JtkC^S-ps!lQ{$OZA=ba;v$p&Eh`BoK>9CPIy{1`&Z;w}`9Q<@gRLTf;%2 zj87~U_u@t2lOVFnjy5>la#Rpf%kcL(c=9%4g|#^*bi$Ns%oUp{cU94Ke;4yfNTitX zWzQ9sIym*B?UE_Ri>O_7khiibiabkbkkV^nuf<4&-VGM&K5-blICHuiYzeCk?xdH6 zvA97bt=4qf=X%7gDq_ibu%)B`04l{xL)W(kHuyL5IQ_VUQmL!Z+%J)_ydHf7zJXDt zhVU}H8n!cb!#gif%V#uQzDU^k;Q~|q5KSPC+y!V>S)WVsTNaj# zF!22T@~0?SL--tIJ6=rv_hJ_*VW2E<0}{P{*l|Em{&`!TQsJbnt2_d@oVWo!U18JxutTyF%OyI*>)@B6+GXcbVQQgl2(Z<_RdX)p;zV*XW|OYd#ZeZc?h1k<7tyXf{)9 zGvR$Wa`^dPANgRC&XAEFLE!R96)=8x#?x; z?U31BPGb+3^JH)yo5CeDE|t@;ibO9Fs^@W#TBfL?<+ZQ&xjxT8(XH^}>%yBLQZ`*k zVt5J&RpvHB!fEmsFWI@Pg;gw|U3SYi5cnR<7PK3RCcXq(-g>Jg?{BGl>_HNJuhPgF zfjnNPbw-6r7AjVP18b8`34B;^IX+?mNIb6(RVc=6Z!-YijqYV=#Z}rB)eCgRPrzUQtg5zUP(-)!vb!Qj z;*TTpV3oFaCR_zlH(nPk1LVvSj=(EgwBYCLz<)~}Jd%BYcvzoIRg_YM5DyKB&MLeL zyv}KA-mg{lS5uNr<$JLqP;(n4rbNTZnQeup3r|D05fPkzc(i{3>d$-$IiMQOo z`Fi&Q|M3}K?Q!N5t#t_@+`jpz-PhwKbgtx)e#r!B01QZtwb~qj__TmB7yF_6H#Sea zo)X5j$J{0kEizp&SoNe#GwU00xZ<0w{)A-=1qg;hIK|rqDVk9h*2y|h=Ghfo1AlEt zMUaM=m$4z3^12)2E#xe`EV=0lI$V@KAT(iazO-N3K(Sh(QT?)@n${gyfvImxmD55T z#*0a_I227SDTH+>A<~(P7XjTjQ7Gpd&;Xi!3E{)sy|es_zyQO-v%#zNeZsM7oxpU{ zd1a6(0vxa`yfT{%wG=Pf1$)d$JaE===}x*kXLazz5N${rqxy8UuUN>Y862ub8FrYZ zvB&%B&r7W3K|$PUZiHTOgb9rJ#$D^kjA?PIg@sclfs|eui#)v6Q88vZhULQsdDQ?2 zh#8+8i z2^&ytpety*haz%WOES|IZbxRA(HZ67*-?i&_OL5A=jK=qKV9yj<>wF1&pnhq1-Hv$ zgQa-&xUm;IiekQSf0a~vsRinSxIGn}{m8@^mnR;NnOg|%5h~w@g{=PWa70@7!qO2T zy)IYMj7YCa7!+kh-L6HyaHV?@m(16?_q+?Iiu8s0k>_6wb6$*?aU{H}&}^l_4r8_gT_kaO~$Df-GWWnTfF&PbH`B49qEBh!IkZX40gP zWy1AU{E4fP9qsx8eJt0@@=|Ep6EPg^F(PluuAtL7)yCo;xqU;g++=~sHsY#^m68+n z)cR#}tmx7CAlHRFm_nnl#`bfD$VRul?8iPA?B^U;@S;av$`vlb17OrdE8qRByZu${C%FkS_ZKcO z;T9>!n4QXCh-FZfR6=VZPwJnL-W3U`*cPm(=QO&2t5&X}C2kcGb<6R>NyEuPr&b9% zy&-r145u`92CHRuTHSpmytj~_-^DLTyohfwYtklSrHQG3%b$F#x)HD9Z|kHx_-|ZT zB-~r7md+&2G{&6Y?VO(Om2C5DMD5ue83@5*5qDX){6 zRGf;2g(LANFwBjTtY7EqSh|v`ccng9d{l3eQK1AuNd_g>44e$^? zFG}w<1ISF`F99b*erSXTE+#s9EIidEE=Fnah`loL*g zM`x6#t-y2+ulvRW>S40t+g1$XsbJ>Q?5yyj+w!tev4X84S2_;DgHO!qCwvUbZzW9u z8s4w#JoXul40$lb_}zc#ZhsbI?Q;Z_ne4Aq0+MA@@YnObP-oDy8y_`-1&7xNC%1{W zM!DK^NCN-OUq)q#6=pHqr}Y8$XsHoS9P#o*#ZG{{V$l2u4jM~LgFF#=V%UlVbw>`4 zz58`{`*+2rk*_S<{&jVY0M+Dc*Nx;S;f&sNxOdbD!MYWuyjX2kwf`LA;X=Q2&^%L- zvkK+2wH*Vo%S+^n=8s__{h<3%Z01LwbdO?Uc%4_oq)NDe-Vy8&66OWm)n$mZ#!_vz zzIWaTR+n-mjjmR$W5SMBOahiEViWI`5MkpKP1D0>CF!L~2D>!({9@!}Ox=$i!eKz> z^MaO@+Uv)d(eR|3sqUJEdc5B#c10cGNtqns!&t+3L?l7~c z-u<%s?N2=2wZZjZB&RDLw*(h(T8@QQS9!c1W$!hr9DN@r7fR+RDH9qpI~D`&k-q8k z*(wsx9Xy|ki{>}#SX>s497`z+u&C@U@B`0Mo#jj>5NE7ZpPcKkf;ilgJi>UQL|+fgTVSwTY%9 zPn(cF+NJG8Icz)fzUf8cWj`SfK0fgGWHQgA$`$Egb1+3mh@}a4gVQU@mc?SJD*Gse zNI#aAG?$mT=L^)0r ztS2<#sx15tZ+bbOhF?!mb0JrnY9-JU@`)7huGc-Xq%I*|F$j3b0;~^UpjZub-Xy(A zIV!AbIWQSnco@DuWve2>0d{WuxoqIM@(GjR^#PZaA= zARrYYyi(8uldXOG=JN`yUE||te5BoDT`6L>+&%GAHo*%jIhVKx@ab1=iG3tjXo%-0 z?W+tAPJuNUa>c4$4)bDM?*4zu-t9TE>^k%N8yw+qgx`)@bdVgA?CR?38)Ha0kw5|H z!i~tRDmF!hlt?5lT>ugdAPZeJAw@_tejWR5=CMrbYK5(lWZgAVL>2gd`S8 zEd$KIV)MN<5(i4ekaM`IBL#5@xZkk_4V7FOIU7+*^cMXTR%Lunrur-=a{qewEg!?; z0-nHLSi&T^$~5lw%D|i{d&V*MWmZ*0xkY2xm%Jjc9G_EWw$Dzfam;izDyrl;L&K4w5xqq^k1nYYrNJr)3@w=2KO48-WcpdCN$r^+S0yve@s`?!; zD9_+Z>*zi&{;v8=*nK>~Aw5D zcr{O#Y&U_fn-pLn;hury;_LROZDpHNB>%*u!Rq;ue` z$0C$#YV=9jl-qlz5|={(F*%p<1$maUM1%JLKfIuQ*&5~Qjp6TUX$Q84OeFO_9;Fh? zMae}Z*hXVzc|ax#W0FuCIWPoUg*O)G1anknyu}V4wI=jr2NKdU3lZXM9R+;6q%q-( z19sUUTqB{Rmrmw2kz{}9uN@HDmusCyqTEF2<0ZEShu>(zSudq>KT;ec+N4gs(1Dr} zUEdUZR6Si(N_=tbeBHqzwxM!$G>gJ{Tnd{WmJ*XA8FnXjpcOJ$8O0^VG-B<^P_1BS z2|~fwqHltC20JMCHoiX>s^GaLMDlZFmm*FROWkH4R$gzCu8#41MhK{InXsKYEsy90 z6W_2NCTBh_(j;XSUNV+7O#|`s?o)Fx`UmVtwKxR;LtVFF?-eBl?p_p25&xg-J1wpu zWj_j1`&H;6vcW?x{;uQn659#sAX75DGH1{X3kg}Z*F7Ek@<42AR)Y~_4>UcQN>DCD z0RygP+yth|m>h!jCPbZdram)%RF0bO9AMFvRktQO%sE%)ZBGjksTA5b&qFT`njuAb z@;Eg3jeh8pB@gIQmGA@)Vly&B)#9_3qa96f>rsFBC-GFNZtOZgbr%5{^u6p#Xz_BP zw4kD1cDa=F_sb*`*Z}%v2`&!`u}X`J)0~3#w0mkZ2bR@*Y332cXW#vY?(JWim-GBnZ@In3`I`nsD;npb6MArG;z>tQ8+SlM?V!W0$|_!iP3o(A4J$j15ep@3_=JxXSGK`M06K>W zU#voRv;I2uqcDM=ID;z4+oC?m5a1=w_`r%+-MR>C1ix|A~?1@DR4xW3Z76blj|4YuF^vB z(0Bi9_x7LsP>i5jP)_Q;0y)!^^jtK;|Hkj({J!gS?E7ck&w@RAV8SzB*1&jblXjZU z)JX}dBs9q$!&u9eEf|JH}-dn9le@{(HWVQjJ=6!9-UY#JBp zXycnWh~0~lopS{1;^-i4yfF@EsWuu}h_BF+jLK`sC-^^n zp-(jnlW$B6L>snCwKrG*QEo|So5ISjDkhXP_(c(Gga@yySJ4Jq6$v^*MC2X98l%~` z3=Qb_^|O8~ zMhBb(8d=Ibo=c)(YlI!Ar~IJFZE)kH`zl`0s`zToD$?$1aw`)mKGK3AM&C3bKpL@_ zc|zYV5sPP?6@MEZl~HCz<)W%g1nDHZLD)gNPR8R@5J@SzumD?2+b z@)&eax%y|_cy4==3CR5bQ+HV~4YQHL-U9+E}^*kD5GRxRI`6bOsEmN1=Cj6eLjg*3%-JRo+8RlSdRN z2WussE!P9lQjqutu!9CCE{?71W&{Mow6@Yxq%Wt3lS{y+&%uuM0u#0pw}RX4c2D=? z=%tF-ovC>!s|iN7aYp_b72r)&JD*nCR1NYhlZLQA%x3uHYN95SNl4io*oMWWfCGdr zlVTBz%~6-8kVKDViBgs^6qY2KD$1y~Nn012EZaBWwGr&CRU353ZNU{58gC^;)ruJN z7nmkOz&v9}se#%}5pgVQIklfDA&||46c&frDWoM(0J>L$m$ZO`@hTppmP2Qn?rV_1JZ|OmLY*OtQEvS`b z&M?}GrEP7^nAXi!h2aIs86hgISX`S&^p}Z;B91!_N|GI(N7;^_TNf4*gd$wQ9!(8`PyFO7l>evSd&@rhPX3HE1>RocHQ0{s$9q_R#7 zIi5-L8I!M#GMSY$d`x)-A{&?)B%d#*w$YyO3Zn2tdz0b+4}?oaM^*SB6J z2^33RLsoIf^DT-4?>^r#vt7-IHn2-SO$A-CO5|W4lZ#bi2oTe_^xxn8mtXyUclVF+ zchvWs+~!#KK-~`F2ZsN4Ww23W>v=hzfHvw4#>!{sui*@H*J|VydlfA+xh5cNDVOou zgUVE4^uGtAz}AmqQQt^HQ~$Af3Z=1f6D&Df@KE|`X*slxK{@cNpaJgT;bB$b3eI<= zuoVE9BC<}F|AJi7x~0ay%$@tFuY$(@pJyl|si9Pr{M16ng-Lb}B3 z6QyoSD=JczP88V|24|+-L-J#;CPw{v%QQlg#t~@X76;{T(53{#dXWMN7Wlw#-EBae zKfSFZnL9h;kTTU;g(s?>C)T`Px(pA_6iY1{{$A$fVM#Y;;bN^j1gSwR&D-hIpHc9zF>gnd%$eR@cdUO>0uVkFl5Ur29^`PLWBl*Mf zn8f}%sDnmq$0uF!{D}gzoRdTSr~~(G(rB5YJ!Jr@~`qeTF!cIK?yHtKJ|YJI`5!ngG?tB%NHkV zj3iqql`zkBvgppzds5v$*D&4=f<(x~CqU$rkrTdJiWN+iLxPNRL~96uy3~!4I@MW=8v=M4+*MM` zTAf#!V=x?d&tiY73zYX#ke>UZj6}kyXpzMoY>uD%huzzMjvs|`frQ5)=zea-DXOvZ zhaXxEVYX;Md!_h*TiBuj1q_9JD8+>=U(+qDXsJ5CY}kEa8ALl5R#U6tK*o+sI7g#A z7iUL)D6+DlZ+L!%*-#$jqIeT9)@{5}7D*BLBTQSz@lI#7?nMN|S3x9;$i+KQCDy!P z-3+ipb&P;r9K6z3~-&S1atCGv7Brt_*m9-bgt;TljFdni!?S}E+TL7v)i~}}*j1uNC^r zKIE9z0XG>Y%1*NHckhMjx#>zxP+!7wdNQ9Em=e?k`C&kEf>bU1uz_Y1h|DHNXRG7Ry{S6oT%nfU-~5{apkNChXTm1S6`@kzV;Fa;VKu z@vpYIUkoXlq+&W#V6w)Dp3i1*LhWNqP*o|%73G#RNf#18jxe;}fr$Wh@FXVp@aH?4 zDBu0<|M&lIZM6RXd?EP$z3$tl4+Wd;*t{^wePv^3p$7qFmopD!u=>N?Ng?;OCYWDI z7O+1vI$j7!Sji1tSwJcYF@I(s&;cJY4w67O!(YuiKkYV?8^P*ypT3NnA6+8k4MTw0cA7W>V{%mmix5d7xmZsw;~Y8o70Se zs{(ltVu#HO)`F{Caw0!(#y8VB{8qIr5ESL#>|+(m;r3=ntx6Tv_0c>>E{H&Qjn?e< zVQDf6d|U{kl5)v2)NI->V~ADL7eN)$--Ob|4h4k>U5ziegRL)_W^IyR7*9}a1m)}% ze9Fx5_z9>QR5+fl%LFo#HI#IU&$%XKQPg-XeK&f@Gq30^;=VamVR;j2OM=k4kF0G!QOkYFA0T%9cKQFikkp{fNNU^)-VTsHAK$coAZ#9h3HqUwDCbzyArG*rhZ*A2{jhV$v15j>{-^>NZm*%$8in`M&QWM5Q3Aiu7sH6PsYvDxSR=?otIdO3U?` zBV60WO)rBy0Mu_PW^#q=7&0<^==D`S`x8uJ=LFzxgB3)u{}$H&x++bXv}nl?%w=N7*3ot>D;o!s(5SB z(jvQI8c0_2ag-dwiTJ8to>35oH1=;8m)7c@>N($I@3l1z{Az6Z?#+h}x(`F5GxzW) z$cMbu)8ZVqy2Xp)%-y#Xi6fPd+%cIX9z?sXk}*LspU{VzaG~f9k~E$!Zq8wdyL;Wp zhd`dKigwTl-q+@Rze}`ptN=rleN9V0DLN11gj^KR4uytbYVqWhw;z7gedeRXGR49x zNX^|FVsRH4#vYZT2n->?p%%}A3NklRK~k8>k0ud@{7?=o*g#_^C@*!gD?9;k8}tVu zRhEBH^cjDPKOs)l*2+?ul%O_br{=s0PDZxdb|{V9x1~5r*2WQ&+BgmjK`*W)4Az`` z4ed0Z&XxT%bAhn52|U;8S0b&BjNa#&m=*&{WvHz=DlRr(2PX6DSHQsV(aZ<4K83y% zxx?-)6Qm5}(ttCxx{82(FK*_C>Ze`+HdTcT2m5{-ZIEU(;xUJ9C_K^}w!Hg9PRZP< zS?g+^jf>(pT?*$1Kr~ab#*0}Zu*62_)*ehh#)VAq2Tp<;e8MV#E!lbW>^w|fM~Anp zeZQCsCWxX~cKuXnYljV#l=s^gpp;)6DEX(3HfuTGm#|>9kXnn4>Rz-Yyv2F(WoSIC zQ>ktyH@Ht-sFn(odC2gY7m!nPGg^?7(`jZl8>yDzR##k-D7vE$+{U zA9tU{gNMBn6z2f$HYrJPXr^9r6fbMN8xKrvP@De;{riS_W>?Mf{N|^ccePAVjUuzm z#o$yGcd*Yn3?e-eKkq8K!8P$)nlnxGxEeZrE>&#qaksmls2L&)-E^HfAktI`SZzuq z*9a_0N>?fQexv)f_hKC_>UrfpVJZ}*f4?PJbHBzx`foG1CP=S>u=37()HN1MJ$siB zP#Y+zs7r{j)taldig8rzvZ?4@mGCt~)44)bKkln19`eJNVg(%=cYgDJ@>}C2Qy3kq ztPc3g=YOAQlkN!TjH0Pn#l3nISVF>y@7)MX@bpF2U1JBGwMbwrLfI0^%3PUUdBB!} z1xI;iq%|7HqDRb0iiPHxmo_E}Bd_h6^6NTYuVug;A#W!Bt~3C6GtO9v+sn8Gsq`>N ztuT1j@;Krfa=Dl6g%&F(5<2#@6QZ+hT;aLA>Ow^A0yJWZ4|AoZ_ zAxNNX?YT<7l!G(c6Ds~+WSAhq*xM!68gQsrwn~ecd4WTOwcs5gZT*%J8=|#9TvmR# z%B&U4hl?VYF6hNHIV0X#+*R>Do>1*OGHw0uYh|zONX&A-l+IcpGq2)QzwQoV4@&JO z*6ckTBU#RJ4razazS_*8kKQ&yh;)Eh| zg_=t%36)l9{1Z`(AQD;iR|P#ln#>q<+2*;$)5$XLl%H}&@F#|A^1JvImE^4Ln@bBK zU?eTerd85~;Oqoxu!l0}x4P5>S9gN;*XpihUR%>t-lb!sP*|+x@!kaDy6B~%7~0ot}WzfTUg@<8%QeJZB#)76-pPy zkMVVbxG>4&Xhu^3RGH-jS<7jY5skpYH^Z$0iP6YB*h*f-a`%k&`_k27hp3UIRB_7z z@vB=#AQx%*7A`Vi_I>{8z8`PmS_5ZR`STpz{UlDhiPzdw^fA_xw?r5GZEvCb{5vwWgS9zcvFH{s?Z~lN%o&6^1iEh(9SkXk_6hgnNt{BQ~c3)#^;*uB+OmY#wQrhT7em9pHuQ6tAG6q`&@ym!cX)DrUSCe04Q7 zt-A64;`%d>VSFTiF&}nXQ^6M@&V-avZtkdz4}&%W@dJ=wt=S^sPyKKX3$>AJ9PDw( z*sboFc~W^=r$vkGcaKX?$!%P71cuL@ZaUm2EHB@J+cqKL;`r>9dz~SBgo`06ISxo$ zJg+%m|mC3bKvkoNFId`(b?0G~Fz>iAcSuwiE+E1jCg()KKx-pMIAQxJ(v( z%5{$?Qr^|4o|(WSj;_W^q5lUVR6hKyOGHmZ{_L~(-j`H*$@=gSu(u34fCQmd>#@No zq9;aTmwW0!jw=}5hq?q)jc8xXfc1i3A-IQ{?nIjTG(&d<98K9Hj`P zG%VXwG>K9*v*FEz@r`-4=nD(GEb(qHcuB|ngRiZ1DM z3w%c8@hGj&em-9EoS2Owvrdf^qA^Qn=9|Gn1+iPKCYM3l?e)xR`1q=oBh#CrO;|EL z#$4r0a_FssU;3~6V8prAcGniIEvhsWg4r&l-m+wPSa@eV%UMvdS zSlU-sP;dk0jzi4yXS-rlA9Lpg;g~#7|LiE%{+Q4;=UJTmYtrImOcp72y_mL4fbuJ$ zJ}pzz>~pO;6yT_MGX{8p0g+Ys`SsdT_yr?qv8 zEEUP^m`S-w!QnL1PKL%@#6dwWU8MW+tr|V81-zH1P zk69Hj&b|I^LL9P5(;w`|4EUO$awcg`D7WpeVkeQpt;;$_pi-9O%gvGL+d#OL zE03^_MF0kI@v;$6LyBJ24m?~B>6MgLPrIM`vIzlqAG219^}1Lmw^#D}-6pLSrq%96 z%1{Cmxp{S2z6ISiWwhv*NwcJa!ios2jlF?M?Z>A?tdf#<)Exyk(>AVmtCzAHHURQ~ zMdH~CocP#VmR+fp7*qC0;8S2gTtj3Zd1o3<5Z6dysAhcmBPw{qzAo_!vPdc%&1?H^ z3{JHHBv2h)ENsBSYG-?uKYdSOK;=$BxX4tL$@%Mx`ijA$@xRyk_Pzws>K4G?bl6fJ2%^?0SxS#8;EuL(mZK(5 zy5b5dKhiCkm@m2!5{3e38ly@)CJBK$_Jcg|Ddb69>!gqMIaOstCTkcE{7_0$p?#lR zZHzTjiWqm3Jo@8xSGuErHeZ0BfjmB|zTeh#=Hu>Ul)|#^{CP7r2 z8ZwLUL{Q;g0&v9Q{31L2pe;#vESA(ObRNYaSkFc46MYw^9{beP424d8)GGF5>*6O9 z9@J-A7I;R$t_{PD?SRAc7(ZkWu_;))z`F>c*QaH5nAk!!U$=AcoAwaUQ=2j))i<`` zJj~{att*#~O;t=zKo_#dUlfMgmrIfwQeF!kF~!?U>*#~+*2^F3+kyb=P`C;BnZxAA zimzNqEVDNs7$%Rny_R3CD$dUmgU|#>5cAbJh68yBO=N?JIxaU39wY79K;nR(GWk5r zkpukY!g|=^#1o4QQ~TafPp)UT^8cEgs#`6hngo-$j zzn3xy#~pT}Fee_xyuwGZ5lo5N#`-xjAcu0^z4^K;O>pvg!Q1Z*AkGEy1Q%IQB(KZGlH$R@q$-`g_rH=hzqcVpjeu51UH}PRXGB9(+dzio_ z<7i1f59trP<0X(ld5gR`&I}UJ26SUu@hezLrQ(Ms&KK9Mj3I4QstH?L*H{(lc))Ld z!{_%^(rNtPcS`psu4Y&zT&oiMZOM?SEc;O$Y%ShFG8|j=$*o`?q}KhI^qV;HO?B=G zPH7ZcTH9dC0nN^9SN^f;)yk>vH^d^31nz}adoG-ILIKdJW ziIh0=VX)Pu!ci*JZWf!2J6wZu@@GUaypw_4pNwC$%QGQ>v+{N*W9Ou{vmGC1QbC>e||?8|_N*Ke{E^Cu;~ zXtuP`C>{^PZ*Rn6>pt}c1LXbn<+4p<`RJvgv)z|_` z@%%Dx9~4m_D~LOlnC4=bM6E|mBe?0xg;Fdyh(3nH{NJw%W(#j;mISl5yh_x{NlHF- zwW%#L5AY?B=_Lk7Pw68lT+HGljdn^`TskQ|QR%%)FP2HEEH`nQf7ksyNcHXwb0*>k z0plk&6$FD`wGs_B8mw^l<|jxzUoM~udd2dAc*q_O*3iVqt^|6oxhv!XTDPPA2{!uU z3R|fggV_PmQ0<5N?(&6+a2*x8T8_-`-&gR&p^7@HWRGpg3OLRy^T~tmvp7A}<0tVc zENj*ibR?nTs-+dgR=E|&sYVE-!pwObxUO#2^c)+0XLDyg-tp+OF0&E9$x!t*74)V0 z6IOJ30!(I#KNAAGAQ0@m!F?CC5y&+U_>Dypn?%ab~>96x2{z0hHqu9BLRd*o&9L8iSnq{@w1^$_?wo-LO$O&=S9$(usPzm zTLo~4ugxzm(2@j_kbIVgiYzG~YU##Op_A8hxX}&9@z}dJG?7G=V#%4A@KNxNZ0{om z$jJkn;YI%9=mMjY8&Ui&YUF4YRh~p}^!&Uer%ELp7d5XyF)0!8YO0B0gVGSA)F17y zb?dT(1S`KZv!gF~-aXC#4HlC{pEL5+8-|7^Ajb zUcatUIG_N|+V`Tzn{Zar{2BC|Go*`ZyqInogZOWA-QBxbr#RAgP0yyr?t+H zGKW6$>qpEFNJr$pRN#vQYNgOyo6)RpHd&|(74MHzYvgNkBo=)?q#-%{W`5 za|;-x5k|$MXhBm3Q?)8eAlAI}>a=)_nNx(%7}(E(mB{;k5K;c!W-Ne7N`0+>KTGGr zeP2&9Ov%>8@h^!;2=+t}uH9r09hZFYhx5e& z)hUZJGe}C3iik3{h_lt<#(wrNhpr{Mq)4;Hw4^Evh6(k5OJWkU!A?~U`M)nOCk!6&jY$&&Jq zik4;+x*_>))veI6Zg!mEMD1SETs;IvO)~;UQK#@A7iP8aqG=}j4AG&ZQM{J20V??l znp?;?)E83|w|s--dm`QlUe3PcTkV0CaPqN6?E59b!jDTybTan~MfbaGJ%$=RFB>40 zpFI@=pX|Z;k*I3oU@i!%s$L1;u4Z!Yf|D25Reic2*3D$B?z7Uc_G! zB5@f%zWc+sge-sI%hZyC@XE>_m_v!!t1ioi#wirdr0@f-T*`(6L9 zP@GR|`tH7lUlw*5A)(GEmX~y~z`B#2Tsj(4P$JhbnWPm7KZ>2HDvd%I@2poEc&>2r~Aebf8p~5LWIehH1;FyQfD_&km+#@E&Gt|#q*?Zuk+YcSE6#ApfAo#&O-v(0LV-`2v5dWi)2b}w}p)A zkKrD3wczHPSv6*Ysy;8BE~_D#ENwov&M#^Nm>6b*s&WIEM1X7Y6lK1>{pYUz>#u|4 zW0jc20Rc0q^2EZqul9`3iXNrk!{JOt?0rIXf>Vz^afPQw^MPK*fdK9i=%LG=0UF>V z4w6C=?C*FBHpirdjFGGw>zYDawczAunx5Noqt3Dm2_~w}iSe9QHIz#$GGWWtlR!pj5+JPjDb+2Qs4OI6E z1a#tjr>U@g*(v4UScfpw&*RH7ebN+Y)p79`49hy>ZWl2en~*O)e^H{rBV}@wgMRp- z%RBinQ_V_LsZ~YF4JqI?9Xo);bIqDX(cJ={nKhwO-=qZVzFb}K*5#q8tK?2tT-*{p zCGvJ$q`?8r^VK229|E43bz`7pbK7fH!}SVQ1{`Ya8B``+0&ZH~!Vt54TGLkPgIR!F zRzahAzjV~31@G`ni5$q9kC$NeQ7*VF=!YtO#x)A2c*MxcI53c5V_5Q|$tEbK7v>QX z%T5k#puCSdzhhMwmL9xxG^MnrTc|7I4ckc zz!upiHdb`4Yp@RiT64W_6vN;FW{5T1VE?p(rZ#^!(XYTy@W@CGnU7dzlM~40ujxX? z)3{n-Kk*YcfrcINAc{3=*cdLB`UtYA;;=w*t=R{T0sf@dM*jw7!Pnw8+Qtbq!#eVj zj#?^0ErQ+GZ(v@ZEEuE4k4jL-3Wm(P00j3bG{|e3%>qaYk~3#RqUW){@{1L)!EQ`D zl)MD!LF!-m4n_0b-QNX;cE{bu+I~=%1Sa|M#YJXvB%{`2g?DdW!}6u<;EK9eN~XQ! z$D|hVLho~H!E7F`4F}~6s?FzCIC&oz9ooi z$MXdEvloHD& zxtrqVa>$r`>QKjMR%dX9rA= z%S!2t50^-#L|HspsJJ47J~)hKj;$f0acPw9QwMkrBy`f=T3hSZ%$mb1O66I4`jQlz zd=_sbh|qAq;0SPCj8(j%BFxu0tJFBcD7t(TwrD!#$bw&XcYo=P?Gs0wcmxq^Ce&D^ zIKcGeVlm4*LSzexSZuWs;|wywxyLZB>4_Jr;tUu*{XIqa>Wqq(G@k1|09CH@n3+vT z-0_%|M| zE2WkbA%TcK^f6%#-~{+qj#+ulH6!x@Cl5D5^4#xx%u%9KtQ%ETUi{4?h?rg2@n|VB z_B*$yPnKAIi}1EPBnU5bT?rFHYVsg$DO5>;)etPp7+&scSbx;!mVzf$?Pi=3=%+Q1 z^d^Y?^}Td_CqTg^Z8h*B);pDV+D$9;$XT%ef`D+~*x;t(bD8e3LV$ZDju~uEJ1jnd!wGR^a@@S(%lZAB ziz-a|6m2Je9b9w=CJ3n?oIlB7H7)MR>=lY{DT2EK3C@!H+F0gTEJ9Utkn3~}#TPI( z(nLY4(P<-b1g@uC-#ApH!6$y$UduKu#QxyZ?z0EpCF3MK zCZ$-a)5&JMDlYN}pLk%nO7rn7*j+!Yw(w62o49vyBX;14l4L z+JDv?5nsVP4Tw+0imX#6$Ttq2yN@rxP4%EK)rL-5{lo=HpS9j*I;WMt8o$OH);p!8 z#!omM;6g#EcYg}d8^=JF;|udBd!o}h%?voGSUtTtxcB_Rr-4FlX&7aPXf*W3Wjq$t z{?z=29s_T~6r5yr34rJPns-h8DbN5C>6mR^ds-eP+8Y<~-5XC;+c3C@&^0XtPA8t)VtpE;WQ-Mq;E;T#tLtyK^ckzIZIMWi?&z-IculCWq} z>SJ6^(axX3s#l8%A0zpCd~i7C?8wcEM8yP}2u*r&9O|{I+QehyA4FWDvS#HpB z^2;6)lZJdL0PtV4(>AfzBm17Df%okp+2+da#q2>hyhrZU; zH^OtT?I@-~f5r_Z2>+2aII03+XSAO_x?NV0)0I3^fyk(${v-A$bO8|=KlXh;J~AIu zJi(#ZqL;rVxW<<87GjSqp5r{E4?pTYHV5?&D3*A@4zX2Kq^oT)7Xe6|i;+WJ;s6M< zBX6Xwv)T?9_*RAyzDdd}U8Ta?h>0WR#iMGxHS&OY)J+B?aaKXnLK^LKkGH&wZR)Oz zQN1eX2wT78oukQwzPG}I>df!$l15gNG*?!-pZEla-QLKw2l|=Z#*d1E;b#UYeBN!v z*HoHwnoVrE2VB|86(4t*M=o?RvH!a_zuSG+gj8e%5q(}Hviw+nW8qFEOq=;7+M05B z?akmX464|@2rNNmna3?h#7n{ZwcM6hbepjjRu|rc$@g)$3y#Bqus`(@bLHJC#S1_U zJ{(GSC3}KWKkU98yft`CQ%ipkjZ24>!U993D-n~4&mY0tWrXmhz>s_UEBz7}jxo3F`O+a5 z+9Py&)ZKf&m}rQ2F>F>j6z(P3PuFY3K+XCTEeXLY1ZnvPFPnz;l14X_BPW%C zKxw#?z6oY$#_31nZZ&pA%=4jXPfxsdb&5S8(>|uYUKQOj0uejOT31cNE`Ow{6zHq< zJfe|4Op2fssZVIY3{1vn%~40bqBBm%`c_ba9j)3s~j&*7b`q+IFW zMdVB=lqi1N7VY5UA}KS4O1iw961nCx#T?1=STd18>5GZO^`*6a;jt&k(~AlR;E*oz z-nIaIg)}j-8A5Ll6ZT*PL#&Q#MCaZT<0KeJIt(3#(lk>-Wvm|a_bL-LKJPvcX?4=Q z4(?)r09NxiOg*GuvOSvhkqNL)uM#cdEKh!4MuIu*D zmig>O@ioZKmhB@m+MgkM-eqk=$I4!U#I}i{hp=@26&CGC~47xS3DBfuuWMni7wRQ4v44B$uXDcb6p* z_^jJpjWw!D7-lmm`sTz?$ClfQ2J$gbU5!!;(eCpdtAjhDpT56G0s$e3Ca{2i%m}_X6(x!@GfCPPY*uP~ zy+$RarkeWZKasPP`%w0yq4ER%y9jmCdeqi2<+!?9%nnNo4_6oX!fPM_MpZF>+2`mo z=M;hfU(f>!S7pgO01F_>m6B@{=OIpDqbsv}_}t>CC`u;#4E=5{BtU%uk2D!6MTF&`2`zVULl7(~A1PVs3c}54I1M+~&N1T0;8oSoxSmqlRo&SA z*GB|BzBQn=NCS3f6(?@0pp z!={s~gZW9z--c8seJgOR1KQy3Zf<68C9aXb5?kwoGL|-~GUE_ZW?>SY)4H>WUrY^+ z?yc2?PxrShR#l`p82Lz`J(^nWV2ZT?;w$;=>JI?ay{GQcF{OEjceWzTq&n|pAgbF| z39+DUp?C=MQ{q=?`jGi8Eo`3IegzlD(|6ddI*Jr41P%C2R}-2bAcF%Q<0Zx8df+rO zIWP%IFoSx+a|IL&pRLheCy1{|__MWGG)eNl%55|mawV3Dr!yx(#!62`6Q{+ZZl}v{ z>a4>#vv-vk=!d{asF+OH5OP1y(7}c}2J(rsf^rZ%(y; zODBEVUCq-r#{iR^9YL^o7+9-<{IT!5H~+T#pW=sa1k%;XLeUOCj4`dRY0O*72kvVw z=UzX^2=Iv#Lj1ipEfu@bi*1qt{isy(pd6ZlhRFy;e@vZk!D5^bhGj4>vf%CC#dCnEjNNQtDdBy2mxt`K0J^iU zV`OC&KqC@&Fxqd#>9J+nahG#Dq$FMlp6{B#C}41tkjgcpLso^FJ*oUVS>weO;;PT* ziJ~AJSzs|Pz!8Y}wBTWEkS$DWZW4~t3uLfqHG(&}0^T$dd0uFsx}Eal8a`)2zW8xY zIS`~e8U#-eSwl2DUspt4N4(w2Nm=&D({BY7kBI-Q%T-1dmPR}pkXW(Q!j5@AM2^U#^em+tad}^#)HuXBkp&Do#J}z ztnb!*5jcx&J1gS7AO2?d@80u$nh<+I8m4hGr!>H=e^EwcD|xw;OTLyVZmUa=Z)!94 zjp6il1p(G&KnIz(z?aD+h^3Px@%EP{;8-?Yad=l$@^|L9llk25ru=Tyhmu%xJd{=$ zS6N7iQhZgrXLHRCYf9pV5`f7=jn(6hVU&AG1Y=KcUI$-KRcm}Tqt{>M15j_$2Z9yx z$;xe4`<8n!-#e-om#u!`s3~O>9*p4=tU-gS&mi zgPUCA_)>zlsXol++cqON^^6qZ7viXJk1Lp%7f@ z7PT(t!&nzi4M$a^uABs$;UuEZ(;1`}2@-k9t)!Pj&LVjDY4?V1X9LK zav(ZwR@^MCf)s?*PD&>Rd(x<-3@iQ_6^&qIcx@$o^;ll-b>X?u6#Z6Pc8z4%%nQ`E zSqJ6Z1VmW`II_Q4L(e5}D>DOOb;Pvz zCF+o*`XHVUxWaz?6YooLggZd%-n=6F-R7Qe0k$EgT_Q2fFsPYceY4|qiIx6ZHcz7q zVhGt(_WkZ#O92D*Z^J}-KTzp53e_JJI3GS%1zONdelAT7{2fxMFadqB-V~!5fRXgO+Qh-3Ip}f2XRsF-VnbG$;Y`A?@!aUN)8Lw zcVA?S%V>>tfa^mPBaMVs|7QEV?h_ zC*dk}u!X1W6>84$nhPGYdnxQ!`qRLUuv+qg3RE&|hK>s+$Xrhxk29qeZ>*uO$c)3& zig51ZkJFAOvws!}2^l!num1>*VNBn|)mkw0t zt#zur_*e+ww03Bza88POYadnY6LsigUb7iva&U-Gv7BH!o+w&VS1{r}-^!ajUdXXx;~DS4w?kJ_|Cpr;EQNZtxDX_*r0`ak6uxZcZfH)q3O6b- z;$d;Mnnh%xN0@~CNm$V5N$44$_f?V*qf=Q7@%A+k4}W;pwc^`!8TQc0{+>v8 zLb!|cP%w;MltHcPY;x_c2jYGn5Da;&f{7yUIKE_01(X!X^LEtDflfFV<+f8(g|sn& zhyei$maiatxPe#YQi4pPIiw?N9^$6txy-VJV`&M$jvMS5U0& zDJJ*q*kSflb&-qjQ!MB8^wp&=y#*em^ky9V`1~H6Jq}gMiS>;d{0&lkxv0`4v8&T7 zl0d1ok1v6kAtt$j@c|8DAql%=$?t)%TolWM4us-?Si`&jxqJJ+`ISohv~&@9>%KvVp_bywFHN2) zb)Mj#Al3ezBrK69?9yOG${dvp8aI=cJ_WB(YT3tef8oE7BK=MBBVgY}*_eh%6CChs z?@LYrd*TTVkNGT!g{j}Vx5cZ#pyPur!yqr9=v+;$5%=aQIjqesRdak3QFH86C>U>I z=CE+&WRl4-8VIA#v|PFGTt|4;UmB=yPMTRtPjTPE3?a%AC@;A*Jj+71lLwT9*O(OMTw1m0=J=VU1WN7HA!!W(xxpWdwY~eZ?(UD` z2iXaagXm;AH{ZKqQ1MVOMnbWg*Is(&QbS3?5|XKM(ic?X#@YZa+EWUDt8HL$sz{ew z7JXQ2?mCYlNN9b2SnjP{V2ZBdgd6NPFLIVlb1H0#zp}&N#o)j>R{Ltyq@YQU`~VG{1UND(!L4s8-{{ktQGzLVbz_V6{FM0L ziAoGKJI_i{0op4ikrh$Ca^GIjFKvB*j!L>5Q4|l&(djM{3zK6@BZfG7wJ9sU_ny+p zYY1nBk@rw6HcJz%PlB2lvjQW)BH}dE3xx1O2#$?zCH~UZP$An*!t0pIm9txI3H!99 zQjD_!vPEdBQ>Fau%c8jy0j%9Xff0#K>9?^>)g$B;9?5O-$}Z)`#D!~8i@>%w+58;R ztNfco6|Uf<$K@c)h4|AIqbxEEI#4X60>2Er5@`yjGBWH;?XpUviSsseyl6mO03tz! zADI+KR~WBlS7C;wZ=6W2lyB@IzpOF^>&?q68SzR-MkcnJ9;AJq7okm_hfs^cU0-x- zwZRJ8$SnEI?l_aU>G~kWXg1wOOab=zO+(zeq9bv|Tbt7AqdcN`+)wEHU=;DW4SA>( zx931A9GxH(1v49|9RU70u0gQzqC3kAp@{dANIp`c$}KQ~7pmNWM1I;#gY;Z(2@xS~ z;{p8*p0Z{& z%sp*=6;VC$(;JAfWGY`KC^BX#MLw-^=LKT;u*g&8#zGNKg2VJi1o_xEqzFk+FJgZP zdWB_Dd{Dwz+1x^K*F1ZmfCMDZ!jlRV$9b*NYBSEYX}Y}R7@ip7g{kQA_<{xx2<>9q z!?S>5O|HeQ5Z80};tiO{+4~0%OHM;F0(LgDfUVu7w-L^*BW^rS0~VJ;xoh{i7QrgO zB%b1fyMu3=d1O?zj|-x-jclX@E%oyV<7O%EX^6Lx1wq^L38yKVL^+*8Y1x9y>O^qd zEpWWd(o#SumZ)04gj~tRkn9B*^(7|1hW5?!K!fG3A3S)_J;)FgtvARP_A2D8l!*4u?DiTFpkc|u zX^%dt)k0_$xXE`2fJJT9YBPdc;1P5Y-Lh(9Pt>Bm%0xsjO*+5UfFjlO^kThe7|1CD z35&3%NK_=RVx?);%xa_Q&8I%RtJYl4*=aKsOP(%Dz8rBInyktIzc}_ea)>ynh!s-i z<_T?m*M-OXoJ7Suxqv1s7nqYO2wt0j#tVFwN~q{Hoe4rtaY>{@CeWMN0cCv2EGivP zRvstBeoIdjvJ@MLSx`>GHO;wPl8&G(bOV{Ev4T5au%$y(qPm$EZP7{u%FoSbDIs(Q zYbtxQdFy6R65kIF^2UPf7N=+vSUEn~WY~+eY*ndAcBsj9Hao@wtewaB2+Toa!~{~6 zc0m42K4jeZsql{a4Yjil((SH#z_d(9U{e(i3t(hL3LtbEwAI%#wpv**BK< zRk>-{v(1lQ^5Tw_O>tGsgb^dh@l!&Sy!#K`uYNyvCM2r5j$EnpbFUdoiWpRZ_{W?D zT_S@2DJaAg4PT~>=98Mr&*xW6oVW#Y_p|S_-Tfh=1B&kzInLPwcFj^*kh+4f zja$*$Orw- z?{vTOJMpK+p=SZAJn2rnrAy3srT;Cm#D%(NSd}|h{O)N}`z6_LgcSg!S$IVi<|Wrq zlL3Ls0Pm(g3eH_Ah;u>}X(q#kSa;2;st7H$?uf0R)AYp!AT2JiccjQ+(E08S=}o{R z1Q+~Z1SBU#hG8w*REHY0_w}U=8DE!G#D=O$v!Sf4+~h~P2zB3!H*@}UC-+xL>S#?@ zlky0li>&Eas{cR4$y1}S=P$O3ku#QPrbOoyYr1tzeEUlSf$GqErOzOj3guPeQ$oC) zW(z`AgJcX|*EqL|J7ww5gDI#8iF?n5RdCBX<%rpq+I5@;0-H6ws<|ssUGIVE>)rEs zAcz>fY6`hm+7LTP9G{R6!mToKMz0h}0%S9BR#mKD`peUBOZII%f?Nr(gd$X6VvU^k z0}#rkCmPpwaug@CKj_xufRSM!7T#;0XIo6J&&j}IoKg;OqH^Wqc+2z6oz-sldGVmO z^fU~I5N?{=keNUroOZHhvWdOIRnKE3>eKWM08k}@3SOk5!9(eopIZ%R0)s5T z7yf)fcj9)OzCCTQAa|2eJd+badX^WkWw=X`29?GdJyaqF1I;<&BIFF1A9e4?mgNf- zEM%V&HNTtt-|{sJ*nn1=lrS0A3!Dlh-hv9r_@DQwazS}v$pmCd^l@E+CY#CYCjP4# zY^+*WOrkUZH|6UjCa2MtsIs^N`&NvJcTNRmMPmm6#Kn~YGxw*-QY8Fg>H;iP zd>wc&F9Sio+7f|AvlQ#Q@Q8G*(?m`N3YZqO|ByX#x-C@T&Gvn`NIQrVc@F6GB-Z)E z?l)tfK>ICa77A1%&B-XKIc-FEhYs~T6J6WCrHROKhlQ) zGOFssk9;nbnye<%+|v&MlCaZg@ZjwMG+xdHdc<{RzGXF_Pzn++How4AGb(yNq{Rzb zC5xar63J;|22__1Z`iHvWbmBwOxo+9#Ld&!uTXLqug*D!xGx&bVJdM=4~n$Y_>>mr z@+($Zamn+A8I$ z&+wzxNaQ9wSt^L{^D|tHCj=h=Hi$W(91_9cN1V|MQjU&)gPBTX-5sY-F)o&eRD4WV zWFPQFD>j_z3&sScHORrx%53l^KnZRZA{TUJ1VP#71hwE>XL2 zDxYKMnrO7DE$;R+JVkFuMS1#Wyj+Rx7-C+3pL5V!CB_rg8rIDrG`<6qNLW|V0c;l` zp9>lzdi>IL_3;9pisgPtd&Br|WMAqix!*{*>q~Pa&O8|pJ;7tma-LyH|A;tsGhSf` zr$r|ps9qgcA;>>%aCc4o*aP+W(#5|@HbTJHfa*G4Wy&!U&T5l#s(pwx$_R=#C{cy9 z>7o$M5FGPjVA03{Q5j-ygUD2RN+jqYrIFM{h^b446tPbUa$kxukzYZ`&8_YWe+Ry% zOoQ*lA!5`Z=KKL!;Ru2natFR5oZL3=vuojBobY(fdbjt~lz%2T%nwxnozB4)r(-P1 z*e0UYnZqM)eJP#mJKjKbQla8aSjz>~ZEYyj?;**oaUaN=B1c(GrhCw)rx}4EiMVxb zP*-{vcW(%4zza=0`BQsDV{uQsMInbJk=f&dJn@nU$aWk)2nF_o!GY~=ch!Fz%S2C5 z99)HqRm6|l(N1Wk9LwQ+giu!W4BM*=F(9Lu0AZrGG}kB*bku@?)KV8WYT5j3xulV9 zE2AAZ)H}vawNW-g-W`>XgCjR%%ZJ`>7xVIt;-J?$TKNRuDfx0<08r98pt%%9e1bQS z2^?GAR0|T7I++EBEuvFfC?A3maV|kuE|4&*B0}MAa0E=FcnRq;tKDNibFM&t5vUv+ z+sZXD>;}49^J6_@)Z+c7ghPfs43YtCaJvef_knQfW>n${vX%O#NM6H=%p3ZunUtU^ zWRaZBeV}Az#V%z{K$%j=^NhSA;v})hJ}tmcBWwluv~**B0zcFq7c2qu#Wow>Bm1s7 zM69oGI1tzJ$3O0V?02j+Yho!=i-6$T{#wvR5iB|i_#3zqk}dH;W<3JS5b>SD@_oOB1dEpFtAbZBUQ;e5bA(y_G zfcxrIf(w=rS`-8pqe=SrcZ{d|qqo0&`{$-|3`9=b`tHrs?z#W2p32mdSlWK}S@%(F zgEQl{Y?}R5(aM>zuGkt1ZE`pdo{-n*T>+S(a)U@pJJh?1N*>evQ5@id&`3k< zJRk4PXFh<3|6|`r)enP5;0+Ou{FizGdE}jNWvtcJIsq>JHQSiAt9P@Y;mgN zF`A-MP~$PYjEspjDEV4mt&dxwULFQU=VZ*$X7{tTc%U%Be;eP23EC&)=X=~YH9Tg? z8ojfI&=B z-a+J*9#upK#HO1>5r!+Ite!|gSd{Syo-?mT(o?!`dpnPnw01Q4+Fv=mlBr3iYmiJJ z)+|#i1S@FPN&FQX7^XH0lqh{UNFpx;QxmHY=)Rny_Lw`4q2=AK0m*p|DPxS&Z~Mhg?I6)VDxRGwVK{9SBh|3W@T;I7>Ug zmxeX$`W$gzOZjXoen?mj_?udbVr8tNZ0{Ro#tGAi$DEq~ z8pPts1#e+pl%!!q#-Fhwo|!=8H>n*b=u(_{O9e~56M%=&ng;Dlc;_aQfS5=$H(#w| zN20c_)4)-^Y3hd7%*QX5hpwK{H6xfXH`>H`79@~7iv3az@kuIc4Z*T|Gyp-OKXn|~ zeY!Bu300=xQhx=@!v{rSk_}NjIC-f{y_GSAwE7-G@O~L?+$V>}8t>lxv@65blmU^U zeYpxVRrNF+B8ZKN1?%~y67IS@@f+~V`Z-m;O-hMH?#C|{$`F#_W>Lsrl}L?iQ5}-hKO~+2jza0ZvAa%V7>;Q_-$n51K5Gy|M zSc6ljXb5W3H34c8h=|LAh5H~rEEu6XI4u#t7sv@GG)Ww8u5YAmL^htVk=S8BQth97 z11Me;s01qIjgh)ccLW0eHFqB1Q4V3nULvqFp=fuktO@;;ACuriO_3ayIiZ*OUv=^F%{2FBca~DC?!5V%M3yl z?WS%&#tF9|6m6O5sHpVIe-CKZ4TK?H>w3Xo3MfRcZP(xt)&KUs~RP&g3Yct z5lXr4`yo61K`42_H>F5;$;-SvL>dk(%l0rDmj+kS9HZ6X6(nkSf>Vm5c8M6fhX>73 zM0Zx*zh=MN-42F^a7;V}Bo1yk1i9qQ<42tct%SXxVT*ooLwz3E-qii76_02GE@lqx zPo@I#`rbilz)ytwWlBOjCcjIGFYN5JOgS6tl#}|{w?dSwK|kUvi8_6>R2Q)#(%K=p zZqOKs4yFYN-z|YZ^Xs`JFi42kbS~j1Ibzi@onOA{qy3ftw$^B28W->bISlc`yFX?= zUhtm+Z=30Za(_zrUkPlbL61I48=l~__3qRUAv%{bbD(zpo;EXdvk6j@UpWz9jsHj= zZyZYC)R*pvWLGX-N$A-kuv=(w%A^uth=PPP$x7eixMJ9<@}lL|1elYRf#i-{N0OehV?dzHO2>!n{Mo=2C*- z2-W<`ZnJJH2}`BYN6q)uJjBZ#sEZ5Q#(X5398eQIg)CFKHjjAaq~OM+Gt^{D;`as=KfrO$(7mvB z-fZq#=~hSaSV}wC?QvpAn0T4RPM#m2zfxBG=@D|tLq?txFkA}`{vW&V1c@*<6^$}M z8V9;XF(xl~WN?=hn9rKfGWDcV5{WFTStKx7e10FTW%edY)_*148>HZ@`zrQJ5j5I6 z7CAN}-@W;`d-T9`YqA^F9vA!W&2M>_7U;H<4N(0hx2)ZHT{WI z1`fKYVVf^dXk&`Tq#KtF^>*7|_#=`tFc~5}cB=Uda?cUTvBXDUw{Z&1i_ZKiYJJqM zGWNPqxhTGS^1p&$Eh288l|>SSh_|V2G(IbGE`>5=tr^=KkJ~az*49K)j#|<4ph&A2 zK|;oQ`hYJl@P-$cXPR4qri>3gR^J*pFDcFWE8@BY4m|v%E9fw8&Y{vey$cp1aq7D; zM25#3r1v4zL2;RPyS3qJa0Kmz5ECh+K^GGTC$2;_x8JjR-|;)8xWd=&eg5&M-N$ii z-|oKqJ+A<-pD>p}eR?>v2f)b$W<^_4s)LDt^n2ZpelJd4^&i)TiYkwTK1*{)S8#bS z%41V7in2E>54Lzc>PszF;;~x#b>35q*jlPKp!nGjV+E$r+FJL-TccsbvQ;%2bsOc+ z^nybMn}(pp$U-sM8NDz(cyzffE8WV_>jq7{Dw)sM z2wf~`3o8iOMrxT(>LS3HLUkYTW7@T{jld-pOe`=&xT|HC5;gnw%2w@&QMm82m<4N8 zOP(-kCU%K#I8VTuK>ya77kJvySs3pi&I~h`!6ESzA!g$7{t4Yy(k;|-22;k%Q)mrj zuPH>EpqMSUx=#c;9JWIu(7GUQwu&%xh5Yl{ez(87?>CRTE!%4-Lep-OyIe#9=d0XT ziv(JVa<0tn$>2VyISCatNG9{*n7$;1jfl#Snn%~LP)R%8h9BBTt=Uf!j%W}`1M%U_ z?k6S-V@HIUQI=I(88at~8v#BWoBm?H=*EkK!hYDNMa4jitb$^C?nL%1JmC zjxzGIG)bPf0C@Es4rWZ)=oH&6q8#WDui$0n-Yxi#OP-DzEsC3L^|3r!Qq80u9br3P z#^1rMsA7$uXeq=X=D3y5_PU***1CJ=$}=gm*bpZKGza<%giH2w%RR1+y8Z7r7bL0i?5MacPgQCy6M9jBq zd8``8)!L*dh%YY=kR+!RV9aqNUwYOX!M|U0zxo4z8XLX1sQnCYXycm+Kwk!kbq^nz zP;?*{^Ji-K-J37f$;46|6uTD~aXX~5WwOQ3^io78dq5ICGm$QV-<5+G7wT+s`ZBog zHYnRm$YiR>p*tc1Fh^hc-ndbEb|5XdR6Y^VX6oPD?QP2o)0^HAMHtoCG z714-vZw;DmC>V-w37#3O7Fwyb8%J$^Cp(+#U)RccQ?!*ARVcFhJ_J2A+;cuv21)UL zR-~RQ%`p=>GW1<24oZI>Jn(Df%aMJ7lj7fvnCS#yP2?&_%%K$g0ga`RHL?tx_V}syGv0O#+FkQ64AOqfydc2?xgTLPWr@MdZ-ufdOJnfQ!Nk`13@ez`NeHMK)38n4%cVsd^+2u~pemwCcCb7KcG4kcb25mV3pI?a8n$XBD{o=XYNNx%=x-(RwlSzAQvn^?$`DfQM;3 zS8o9|@F+E|emvw>0;xpe*$uXxjZ2OSj{&2h-Zi90g%vP;(jyfU#?CbGJM|}KAclOJ z5UR?$q4>=kXRrj9=*rfMOej?scrgkPuy$qF`NDAV7bQU#$Lff%c9RY!;rs7*@7o$) zVDxHwCk~0`hWJ_MfX&r_nXBtJIFea+mkS8z5xuhz)-(tndQy?4LOBxaPnxqM&m@72ws<+h*}$N?w-j+g_7fo8qDD|th+alx{n{2(UgqW!H-;2 zRD>Tvyut9>V+AvUMwo<>y!qKN=|C5?Z6XPeA+ul7&UQCJvHylgwS)q8#)&3*{a^n*-+) z#7~NKACzo=07O+TXE=p(L%o0inlvaF;XTt51R(iSF3vJpRV9Vwhlooz;A4+2(U?`d z;``nWHGn{E)7rLA=rSt65>NjKmiONe(SfaA#$fiRU2!ux6=Z^UB~ODr%!>$M7?#jk zR_l{daxb+Vg^f3b|3W+yd>}EOrvp`hQ2m5`0VM^HQN#6d{2Aua1U$5NJPteu)Xi4t zh*b(_h;LBj2UvvjSwtc>`|QZ-e^s$J4UzP~G2#uuTbB$e2KbM&CtW*pMYg0|a6^5m z&-%1LzkHX#vSyye*BP1cN&NR5AKE-3XS>bWj-6qW0O`0z{K^a~*yU6(E~aKpE26WG zBe;T7(zHH8#a(Aa27;GUoN5S1>-1vgc&uto{BvWU@BLQyz4t;8E66zV$~T09DpNN8 zE_|i1IyPt@v^7?xLq#43jn#>a1XTaHwb?x>Q0o}$PN8s%rIwO>0#wIQdnLQ7dVI;9 z@`+rF5N;x%Myr`+F+@B$SBB~~ymTCHpWaYN7!71i8D-M7zJrCGXp(i5YhOXQlmI_E z0v(cpI{_b;#jvB83h2&s7 zW{+Xd8~$Ob%J4v$)m0ibpDJwZud5}POdcXZ#vY2OBo_3o3^ZFWUWctXDakHGJ*X_@ z9ggZy5K%I;HWdU4b)(t|8%?ckYT+`D6D&+{l-X)sP(0$=#*}=<8-@2{?mxf%lkVMrj<;}yC{v7|S{VM;-=|uKzlvQGCLlZQ<-2Jdb!f*gdQIfV{spP6natw`@ZG|^@NL8X|mLG z9_IGY$@mtD^}51#PB~%B6t%3r$wQ*$MB(8*XSq8N_(_5D42E zhSWgPh$KP1IMUCGTdGMMMF6+KiM55+K8Bj_3O3PcOI5Xu-b4K=z*{`XYjXJzB&l5m z%V3Pz4roE$W=Y$Xk~i>*yq+L6id{@vwD@yHCmY~n_89YQ{BVB>YYQLW1P95R6}N#$ zV(A98h6)?{vZ0TqewOTWxT`dd++RS>+L~A8+X#G2sq)T&+G==>aIiM)Q&HeX8*xu; zO7d%g(FT7lGj@a*^d|22Hhuzoa3|i)+Z?Ty( zBqb?Ua9WzyJ`C>N-Rrj2eExJD!oehesCKt!Z5VjvZt{YrPfUOVAz4dr-0#-o9iUQ| z1qDC7&>&glR1gD$6|9evp!h|r;UV;Y8Nc8VXNU;*t><)#Eq!y}o5SlF&y?0^KI%0- z@*^mA(3N~EtU+Esui1f5FC^>_3xGA_`e52Xnb_Ceo1b^z_7Zom(R62Ncgm?y=(1!) zjbPa~+jwg=^>IRnGX;QfbCkYk}sy;5umxXw(_grYUAD$G^Scm_ErX;%GHa zbRpsUI$4%#M!9>}lVS}sV1x7m@x{yUQY(F<<9K~yr;$rRs|0VNH`Da0rR*``4 z&$N)UE|X7yUCv9eZeKX`d4rDqL`@VyE0(sDGZZSwOUezdLbb$TT@`tmyEnjHPbr@D zjbMpI<@Md(iV)dR7$Y~8bI&-Ppv3FfuD;%+VOGI#G_exZWga0FwWSJ~AbSh-a4P9x zn>XCqErDlVwVtDU;fsQ-k)Or5MwC%(W*`?j^`M*{tQG^TsJIjpDie1F8MG(G@*wyh zf8f%_Ap!{JID|VrJS@A0=cK7`G7-TuVJQzC?-6Bhrggh6eSi@z7ve2q zEpg`v2>j9AKPoQpM;>SiZZ~;7c|ki<`ZC4q?k7lM*n35cIy)vV7^fxM%NG?pLS{@! zhf$6U`pe%RV*Zj}5vQamKnD4gBH_WiOrH5DK1Q0*X<~CM6dv&-S5dpzAHm9d+;~ek zdWUzwm9={xCGQ{jI4-YW5gVdPkahbQ4^NK;C=+TmZC6m&izE)p5;uU^#Ra}r_EMeg zI_9bL+VoNf*d?TV#8EKM4^DZ#Xa;8NRSEy=-T>g)Brr5KGnPZi4QT ztiCv7dSe!BY{)^k$Z2hw@B+u?V6JKiziHS_idWNWac)ZZ@r)z5%A=;-=I1^iV!zy= zSX+})u^u(@CRP-Tjls>?&Pep|i)pEv;)337SM+8Kaq(q$X`yv9SLUlKCo$LKfo$*o znwgBWGg1~>1*%4-3oMzwo2yrN%mm=sHG2*|RND~3 zf8OO&uFy64z3~v_a8qg}C-oNLb}>CM&oIy7iirTqHp{e+%@y10VGUM%LuI+R?UcHEmW#h$a1Rw{0%c-~u=t6bVK@%)v zY}q1bfqSWD3BC|yGu{s_AeDmZx!TBe?P~>%Nn!E^R+-s?YgQOtCZ|~7Go_e@%#+I^PX`ni#ETl3^MOgZ4=Pb700SyZ zI@LaJM6l%m6vD|ky#}52i<^)>qtFU2ioBbj-~ZFae*}$M#l5+Y)8*PYhWhjFz3=({ zY!W^YDpDj3F2vUaS`61<9a9}qj9tn=$N=y|p0F$^SZzx6;3Mj3nMUM-1D+o*W<_Hj zP_M0&Y9jVad0R~V)-`s&;UDl%r~mtbi9%do5`v1s(&>yIKt-tnvwRk0*~alctWt_0 z-y{KKPYdNDT&c}TnsIOxLRa?Msa@r}H<+Uzeh?f;{zj(AXm;?rIUHC4^;EcS1g2RO z?SL_XwvtuCsFYjZpXn>rvAK2;bF%Nd_N#w?_qXve5Gy_;+%jJLYy8vc|ICF40&;U9 zOc#*^kIxflQYziWu@$i5tn^}q*ZFm@Hfj(q_2Fs$1(KHkSBh;=8C&p_dJ>-!Z7JIF zlmzcrlHU$>%1NnE(aeGY-!cO15h*mPea3D79FEQ{D2CdJ{>3&A(J^ySqLf#kmWSgK6z3mHD`xFJ`-eaIet>;HijFrbxcuFtu*?Ah%Wo3 zy_8wzXo0_)J_M-Rb^32G##P}Ste0Eb4|)3}IHxwoV7#JcC%6N|gw&;CJoHnTmja7a zxUZLrC2VSparvjQEl@pl4Z0&%yPz8=;}BBX-@ReTbrDZYsZ&?3N_0u#GfC69$KVCU z07Lz;$r`VdJ$w2x-bfP@)P1eE6XHuAzwVLOIl@;dXQtf1iY7Hor&#bYOKYjbd|Qyo zJZJ1unY6tY%fJt7ee8U5VVZ{IaT_l4G^MaMS?(IJZ6JTwFDfszoZ2`puTUDCnyx_-El{VuB$9X*NwoAJD zDk-_7@Oca_u{o7A*@7ZjkQDmhGj0dsG_lY3e#lsm@4e^0l3n#OVLU9z5;7)F%by!a z2*$mMj~ZR^?Vz;-?Tb#Qf8Up|C;AL0r%IVI3``Hj9n}M5#M)?A3wgn?(QD}%3qYL)F>s2eHwni8A%iBm%ESRIRF=sC)09tis2bzD+>WkAIj9<*hh}LW`*Nq z<$2*ymEz|{ZpG_7 z6eCVY*(3J=YiXz+6LI!}_N2s7w5NE3s)D6gdiH9e`BR@KYAk^4crC|`*o?bRC4(I=v*kkx&lEgqOs%eC`2bQUGMR1X-~d_~2R zU95U=0Fs7#6X(Hr@5f$2tH>D-&XPLBEZ+Zx*NjC;tX3V&_}!61bS=7QAN7%&>XP<)Y6?)~^9g^4ZMQY6PFjDDGs3*~nwX`Kq_ zSi&fTK2dJ0$%-2D6NSeESGKeSiL@?bj>G921Z#Z@=0aCTq-185KH?*{rQd0XzSu+y28@!6|q1Jaw5C&xI?w0 zWFC8IH9>shqy#Zqzm5A56F&R?@wtW^FiY0ZijpEg%G(H24Q&H*w4i z!o@UE58+$~9`LnEA130P)921zrvXHBZWR&Z@yokLX8|? zB*ju2&8&8NE30ve8Qj2%?~4{JY8Keu)(6oK z?8D?)SP#OCreg8O0sK7hY;+lo$_*y$7IoQ;!&>=49qA`meso=!zfV6%h>j%|0z&wl zlV$hfn9!M7LBM3Lq&a0x8oPurxA)uwfI}r91sO~?`kG~M5cg(6XJnHCwK=|G$`rik zZy;dH^47+3SV~_gAe6-8>nLivCt)u6G-xx@CYMV~`)(-lj&xyx7GYE+BOXsry`n3c zY$5ZnPh675>I=biWa&1MEw~j{VF-Af7bSNoV5j-B;~ync}qJr>lQ)`e0(_ zwK^udO9;6>5~D_0^%M&&m@q3DR$wRiqex$*r&mIDg%F8bDU<5p%3-k;e**kpIc(Lg zlTNjypembVi`1} z;;p4O0fEmM+)7&(qNbs31~0m7J$r9OP>1gOf`Llkw%GVHnep*Qp=ng46@R>Y_j}#% zdDW}jd&IeK22s^4XGLR5ul?uwQziusj`u!7P&o)o4~ zHJb~#2z+>tJJju45@RbcC$mx=<8~nqiiMAtO~k7Ff?EL&BO`p@n=?VATpH1xoDSk3 zc!2aS_J;K|$`~ailb<0u6{Fud%GZ| znx*KwqZZ0b?kfXJSpy=-Oe0qgBP|uJ5?Pone#rg;tQTia0ZC~qc))cM+|SQj5hUwk zQp_ZXW!ydug-ZwO8D$f&xWr=c4dl3)#oLct@jLoD4)lA{tB63mGCZLvjMdIb=kM>SHJ11{j#m-?xy}cT$q0eU41&I8*NcAX@oRzHNa->|$V#OO+0AlT%+V zx^D;u8Ol;94>@ zX0$vM19}@9RuxO5bG1%iq}F5SRH>hn=`f#Q$p}*$>tr1cwoFv|Pm7fbPN6>@F;55H zH*~QDL60{;eKn6TEw1QR3i9x_B-9{)_TPY+@n+iA)FVXX4u=_fdQUR!XMRH;R_S>U zEBJ2RJmHle{;%%t$H6m_s8&A<@MdGGI1Jif5lKGIS5XKsP4Kcp2FO33eUrb;VHc*> z3+RFW8(tT2*Tb-b-y+YA4qgIQl~csXgPV%0gL9{`iHlx-rXrYJP$m?|n+h~XuB9Fu zyfU+qJ19q|p5YDYzopU38(dr4$!ciu@MG@?A_z|6Uz}YI@Lo!IwzYvr$;BO%!8=I5 ziy$-!-STyUiAY#WL*g~$N?zw3{O}*UyFa@7b5puy0})IDwOYIg2&_lfAYs#s%ffRV zkc|TH!CXhOKI4bGcatu|I)u}(!bx#?AB-;Tn>*wd&q^c-U;({n-HnObGVq(aO#*VVTjSC^_2SWN%u6_V`4HXy}JcT=hrdRk84@f z#gV=lyaQ#VFTEH-L#Q8V2j3M;qXs4bfNI*1G)ZF(hgb7W5*+iGu(!X?aAe^4N`1!E_=(6erK;oVQ#$#n&v8eQ z$|6x0O0Jk&L`g90{4~TE9GekeLY;}~QtuF<)N}FVb6JYNOPRFJDOLh(x?tA&=W4f8 zGjQA8+ZuWnSoop$P2@wv9^6Rj!|%WU8|H1j{~Mo+8o!%%z;R%%3eCjd`NYdZbqp?R zFrmjJt;7d3fNg1*-@|02!ChDG)K3+3E?ZOF(;xob+B#P@+(7?Yx_jw+4Uw&C= zcGBXdOU^PQ>H9v>Lj)y8tyh5Y58A>%BcSlP;%9}$AyNH2-bNLwYLG6-S-COioX;sy zrXt1J7hZhLX}8ipo7#)4-n>XxNOKjESUrHV8EB8uXh8r%gh#7eA+FjSBzsP>jc$@1 zA3o_m@m;x;)gaks;++r|!wL|*yM(1x@CQa^a8aE0Gf=X(DYIdKulJYRD?gz+dxOz;cn@@ww{2{SD`M_+MH!KWyT?QoLvO&4+hFz($Yewr`LHLwh9%-Rw4 z2p_Pj>=xXc7dsr}9(QF)pW^&uT7tgJZ663yhP6@9Fkf^V*%W#xs1TXN#J(a}O81|q zd)?Z4_6`HV?Y|HzTh~8rZZKJxJsi@XJ0ZX_)FB?33lz}$)9dq6kJLohg%+ZsufFjmqiO4 zpy2>~#Jk}_&pO3_IWVUj1K>cDmfY2WEDE>Onl~Pvm*DMvnLN~r1qjhIJ|CpULOb4= zb^`9CwRQK7q<3{dcKPu*IWdE8USVr8#>x3*yI)-C=-LLx+X-jYeeb3kFw1i zBBkBf_kchNUP|IX4!bokM7=w(Q_8Med-RMz(Z7Yi*JAAff}H+yzEN-0r`0cYhqa+mq(@r(4gGrFgLJUb$NuBa1Yxsez;C z-M$HepV_cy6N|w4wSf5ekq#`}H}vIJdNUf71iZt)SRv_yNU-PtDT6K5%eLu%J>Tkn$xl9~{S|!Zu2AHb z@i9{K63F8s#A}7nfQM5T%;fu|=s)CE$9`0|QS6}=oan109I`I{8!?n#iK>e5`b?u; zlrr+)&m`1Y{@R?Z6jF2m=HvKvvDkAIA`VC1ay8$>!B{Fw9o_H+tcPOfGtEdU$>)w6 z!Hk!REfX1z`HWA$-#y)npKiFRHL2L2+>kae+`pE0kAf>f(7TO-N=X==_uhY5n;2FL%H23pSq6 z852;7&E2(Zd#)(-5WD^&Xh7zZXhLu&%0*tP_cx^Ma)d;Sm-G(0wV(!%!nbjwUHmW_ zdw4{Hq!X)+uWokhN%^NhN%ymyj3A86Av26}Rs15hiEuwHF}H>cD`(P&^=ge71%L^Q zm(P=z;_e;7@x5a7dW?F*M#MeY_O9ocxf zQ@Di>N?iu(eeR@w%QvQ(bD@!$K1I3`T))xhX=5KJrttInS3Xms=;$x;MVumS$rQ)7 z>>$cN!X~m(!lC~(&gD0Nr+PaJTyRNU9#g}vW~x#&<_nN9Tw(XGAVIw4xozL+zVl1| zEIEM|0gDX~2qMJeS7NVUQ;QmZ)+vp%6F;#j@1vDO$iT1)Xgh5&DHtX`2^IkhuYnPQ zhxRz50>pbU0yRLS7nto0*BLVzx{(c zL=mtPDavLC1^q#!Wkz$Ar2UtJv`}hf7mt6(x>J zi2ea6n^+qO-BR`b4Agb}1w*MA0&p|VXp#b+E&YWQMQ9FIZPP!z*Yq<-cEQCRw@FXR zh}oi5$#n`?B=BX1*uVQx_tCG|iWx|jo(pR_^Dno#cuELk+JqQce+>;GT%1j-al5{V zojv-rdlKhCg0i+Z_ySuOJ(xio*$&KXjR$SW&sh#ur~;^N7qOQ3+2R5o*XpQtyaiH|u`dY3s*Xl6bd@N-p8*PURC4 z3$PKY=pZKrv@q_V@*mU8!7FlzX>ReHBiAbwe$j0H`*~9|SkYPU) zSvKR)zSdQOJdKVP%KEepnt;OWifwB%t%G>HO||B)i^bG%J(tBx2bPn0%0msRNiCyZ zHBR-flT3|=lFcN>6bBBl9SPwOn($=O`oYrT*{5FfD?&g>mAVbJB*3cjxHT>a7radq zGan9-TcF0v_J>?eYNzc`680ryWn)jaON}TjT8fpRj>4mP)sJ~W%H9R+B=!KNV^TPp z;#>)d#pQ|5+`ap5_b=lh(CL6F$Tj`5f3`65Km5VnkNMw^{zv`)?tj#a_f-kU z9Y~|KsA_T9o^7vn>-%QD@5(*<)4RWpOAS&@Nudycdj$L9cC^IfJpLm4(tp|M_IF~_ zAE8V>is!X3^ba!!hj1g4<<~QYSjEg|-tk$|=3x)<$mjocU(N(=cuB_vQ{orh{! zh!lwhsvbUGbVLsMYIu#bFE7er;}yct7k8W48?10zbo{93Hc_1gRD1XCSGtePmTMF2Ef9g3SE83X*FPNjuPQoF3B?&*1N?0w zPViu9M$4-&i&`F&9s61btRdi^lX8wEp2{#r;o)VkTS%)@UzK6Hg)oM^_8Ph&CG#ry zm1m&);-|ZJue+4RDtU4WqJX412#vL33(|p+fQw zg>OKBVrgo9DZOvjke>()DGj<0e{}cr?)`uAGn<8kwa#>gSUXZps1HAcy9%wwrLP!b zY@`Y=>F}a$S~^{-Uvpf1P0)~w`6pzv{N~*eWuR2ZP-Po$X4s0rxhBGW3nSNd1N_i% z2wn@ZeBN|XD&=cE#w#i>kvmdFcz5>~an|a`lFn2c3IY-R(nG9bRpXIiX0w^5my1-4jBaZ|i`Iazwx~J0uV?TW zVEsbJ9n!6-260h5;}SF$`|#E<%s`p|_l>W#l18<|@h>*zjvD5#=1kq)-}xD!eUt&I zv#d6{qg8+4RPw3T4DS-eH#gwz6EjnzU!S1#nv>JV@FST1&azVd;VE$=;{CfMISUPEGS}tZTYI&B zVtmNg`3vqg^T~okTc|wQ;urX*`-OOXyIS56-vbm8KMTSpl(?z8)haWvDq@RdV4Gyj z$2XTU?-bgD+gpZ7zXDI8OmC=FC~)jsNwSayHBlLSzPQ!GPI?veoTqe=A@lJRqBKwZ z6v+p9AUJqFcyeZo=nuyk+U8A{Zu66myN5w9SrwBqMoJ|r8hoOa$7c=*Ue?}p>n9z` zxS#}Yu16Rd8Vnlup%qU#EYa((hUSDz^tN{1uxLJ8*#nnoG^+}5e(rPh9OI$vktQAJ zzXhVuJ3m)L)Y0o!ZxU3!d#CP>YV{Z-lqF#bqhwI6OZcYs=F4XHt2GBzG!IbHIPFw& zJ>8bigMw6Ssq?|Bnu$K;npTtSlTYcT99#(--sH$Xyygc%c0sg8#Wv!-tqH~TOne$& zr~Y`B9L8lzzqI+Swb5uR(^5sdw|~vjrw7|2+pRRobeDv<%0%42watG?t8C zz&Q#<9cO|J0X#S@-Tx7~*AmO)>+E7mMs>NSUr05f&_A{PMN}wdR zo*|F~hz=4ehdL-p&Q+B?7GNL2B)(r_cL*o>mhsq{wxfgW!6Xf zfkva|K)hR`GPS2`x)>FYMY;Z3(31d~BtZ>Cyu$ocZARQObz8Wtlgs!56?=qgE>rzW ztDsl-e?NNvQ}b!`?#FcB z;Ga(aG9j^8*$^VMn4pyMl5H`eo^mzysn{YF>a^JTW=Ls*^MFjw@-6%fo}IS5u?Q;g zkTtOg{dBNKM93`PS6}#ZcIijOg&F#?_s$LlaxFt#1_rDQGr zHA*Cq5HMVdt1+M_vj+h#N-|~d1Z`S0&zwN%<>QZoGIR*yao$6#+^pyE3_qo5^!N*Tnx25&nVU{) z2s61{V?tLMfqlb0j#c90E!?^wDRR6GuMDDST2iqpx0wG#v&&Fz;JHhI9dX3of zZ{^m%%;cQ16YR?;fP{c=yiEiS4Sq29$~GidN=e*}=IgXkf#)^cNVN^&pS%+&|NFlB zUdj=UOWJBrJNG60sQb0=2AdEZLRd3Y+VWYhpOE>i)ElQK0;x!nE2f^T2;DQqjs5Mt z(JO%Js1vv9x9a%u-JhC!@j7HVmuvKek~eWP1jF{`+MDy*P_#_!`t^4Dlsop(+Qb(ykmg6%)ElH-sS<=j+q<6CuMA#y716zsA^eKXjq{| z%Bs^LH7RjPw{Zc0z?lUYU(g~#D7QFJ4H>a5rNCBE)6)rN=BwHgOdl6!I+^TC?pQI^ z@s0+#9u{$}d^(#_s$8TtH`EPRH=S{@J$mpkkEp#Xkw1gn69g@ zgp+Z(sok<*77kNwhC(P(G4Z+Iq@MPZ%eYd~9}H8f&`2O@na*puth7RLJC@u=urIV5ab~ussQ=4%00k$Ve zBn?qLBn~r3BA2^P-lKsk1c38>tIoU z9q!ytSDGfasnstPa`Q$&MF?sX0_(&7+ui+F{}m5k32FZVYw_};7_0cPaau6aAH&D7 zRfS^*H$G~mgXFTBIk79DY5c5icM7P-&sbs@O0&|{@N)hdExHV$-z4U@;6pRHpl&=! zy3GrhNMqw(-2>`MgGxu;W>8qMMiOFR)LjQhiPm|ymqshirHC5@#b}m$T`j;i+qSlN zLj`c%y^<454I*Fug-A@$!oT=Z{k*Q{-q6 zxsTM;tSjqU>-DlHdZt{JR1SF~g-VIMxN>^``BJZq(d+Yp?g;j3ybHRKWLz%-5T<7} zL$YrE%nlaaC^3+-!OMlxC6mH*=$eTkx`%8bgvSp&s#W2j+P&eEhh#_0j)K0iv2y6m z8VIIPdkAeai3K|oyK}ZzO~HTw_<^)cz`Yw3COm|#mO`dY8u$Q}OCM7i;LZhP1IdMlL+^CD^2v>!p7@m3tYV+(QBf9f_--p4uD{#8n2_!UFa8 z6}x<3a)YsDC!Xf4Rbe|=F*W@^67A`QupR#U@~WvH~JU5t$|%xF*Ki?z2Qk_O`H~)d#Hf&x#!XN z+oa*-FH;&WkQnHZps(g_m4XP=3AIfYNNPILs%pHwA%TOFV&y;JTuay()}m!gd>pv) zRG8)X8S@FlqK=6>qv8#tPQm1$@zW+Vya;9XZ@}WG(A?_Am3k~1;Idtxe#;i>RL(Lw zzC){^SOnSvsp^8|0V_1^4`y$=z1l~a>Zp?v5Ry-HJOgZ} z4=EsNG&2bS?32&Ha;{SILnNEmwYjwnX8Hpgo|^*5=}TC%FOU<7;--A{q;OjVnIS>BH+@}@Xo zz(N_anYxtHjm~rU+BVa}Pz>%tYAD(!aJg+Twi;mqA5GzzPLVUR*d=L7MMaotyIpD9 z0IU0Qt3lPapO0?i?%vXVV99N|Rw2B^dK@Fp!jgDY_P@}#k4!IR(v%gI1kJV*RH+|i z;BWRU&hBfDiyUY@9kPj>V*F`-e%WPoLC+ayaVy4-*NW`U86%1i>;zfXy6^ka0v?)` zDS|696k8`XvrYf2u79+?8@#x;?qTw0nrm2uF4;M3Wn`tVd4qw-Bnu*9fmQQWL4ec3 zG!wzMj*Bfqzf;1kzInafZq+6PDnLp}bdME}1=EnFYEEG;3auznAep^q48nuhk=v?2+ZuU*(hQ3#=!a*LSt$b==NsX1ev zs>A)Y*oz`Px*cvL`pc=CC2qScUIi5Y|kdMniHb+ZR^s(7!H|aB`qBt-q(7aVifwkNRtieq`TBoJ2s@`NQ0=^^zcV>`vm(yE zZyRc}P8VNhw)y-Pwve#a@#Og#0(Dj(3MYu>OpA~sc{RRGs61(`H|mWX0t$RdJV$;E zLOITla6jIY5T#xsLYangg~b)5pBJHYgDAl4QFr5b4w0SnG0@W*-$i-qV1xe`?YP^; z_wE^ULE&$MsQFQcg0XW=W6d9a{Ev)@{L46^c0UThfB*m(sa7Hv?;`4tGe;4S3D{r2 z6hg-I3DuQ=9^>@5j41zj2^+#_h!vm}1hnxIK~Jh8sEFA$EI$hW;v6X^t-{Di5&oluy+D zK*BWgV$L@njMhR$2!3f2NS@!86cjO9we=7ZDVU6DtAjMz@i4jgur-N?crN`Z;fWcT zKy#XKBE_R%{KMc?fp@H;Kq)8hiWpjPS* zd2N+NiAIKqWPH@Ffz)&odK1TGtN_gC`~PI3C?8Q9Mu5Zu+ZF+3mUM)KeV;AoMcm67W-Vu58vn>yF~?0dP3- z*CYx`=A6AL44}&E4N4*nhZQEW%Ah1YLz6Lj2>Z1_Ud!@=fA$x*#X%RAvonZXl*5G5!@p^)kdcQOA;bgOvW2y9Y;@g(Ab zz`1L*MEQg&eN~~tO&+HBTL%bC#-znAI8oOK0$waQf*CS*VbOA2bmYSdj-IptUlCX7L6T=4tq zsG!Nml-_@=e@g6Ubf#x0l2%*t_O4+&Mc0BxN1UF?azxZ(a)SF73sHPVV^O^&*stoM zR%<$%B8$gG#`P{20worxk+v;c8LwIdAP5rs1lniNOukKOYEPPcx$FK$i!x?L>S!qp zppQE|E0jJ>nl1&Z4t593A(6G@Y6%X=FJf)TImIqyJIA&URg_(aU9cUcV9TIm?Pw|y z#uw`*M%Su2ZF9#VBtaQC9pUzh5zXY>auaW>Ch$S>}owC}o;Pk%$L7VuIFT zHq3NFuD2#fX${}K`)2p;I9>|IkQTS4SReVzR%>bz)DiEN&^HaGRspdWIe386Y3>6i z-7GYSUwzR{Y!-q(!PT;?z86b5V9#%sWW+Ig4?37Ey&-b+l2Ixlsi@A!Zj8rKeLO5# zov+0^6tLlFO2q>A3!Ks-bK$Llxo_gdhjoD+0uUtx;>)fuI^>f4%{U(3Hdc|J4sj{kI0QE`L=~vWe>q|?8DvQ}*9x7VS_p+h zSK~NI1xt+OF^ zCo;Ord9Afrf6Xdl;?3`?Khx|&fll9GA{6{C&EOetoso9De6DD1Pk>cHj)Vu=2NjfhLgoRs?=AkM>s^ zG9V8pF9y98^{$mDAe`%0*)c8`BV??&PgHMD6E0Jb$my%HdPPbu(#kkL@6L;#IAs=M ztNIIB^R$-Ms$OP%{likRu!6CpDJyY66c%w_Sc9>Djdp<7OVs*+kD#|qU?nQl!ZAoe z(}XlbT8Aw*9VZ4tVuI9EIbaEMucP`94cOvV-K}>J0tHJWuI(CuN!7%|e(^2@VJ>xb zVe{lgQRpvmUj)k@#+OKO6kGZ9K5%QNfDHkCYN&*?ps|k%z8f*rTFSAKSR7|5T%{}{ zi=;1Uv>l{XgEk6iYH^EZYp|olfdc!ZaO}7!tXsMO;<`Igp+8F;m_4LNGoDOjhr=m) zR1OZWXyMwzQ=dFSy2Tlz`V|!|Y^l64s*YA1KIT605rqM~JA!=~6((edt{IMw>$t34 zn34Bm(Zq6z9Wn%|)C_{Q55b{a+;Mpy*)5HAGX1s%S9FfL1JEpJ)zByQ_OVZb3 z*I0&dSp@IKg{=4bzeHTn;iQzMU*l__;hGRjMj>#hm_V|+z|!fD2`$_@L?&g>gWXx? zJkbx6R8+`9H*k}Mm^lz2%Os=-%o;%uxdQ#Yn81-yahtfXLF28 z(4Jt>HBJe)z!RmwiP5{xTUe75OrFoT<`8FLQVt2xCe39dwnwgx?_A<-dFkuiP=d=| z3mwhz&l~f^_N4Q;#`n^jifvs*<#A-j2We5(}@jB1zOT{N2i3f`AA zM!ZsqIx-!xyQY8ASv-WVnfM!jl&(C@Kaw5esM7;pN0x%>PsbDSk3^~vSoAz3@Inv} z+>BsJ#>GD{N28JCwRXNktnxVTkUZlO{XYPrD{hU2_6G?1^dt@ilBd=ouhs$gj3%Wf z0SfUraiTZ)EcpX@XqVMsw5@feUlgo#{O0(Qcr8Ho*q0bv8OPWc{8B9Z9-yaDt9A3+ zX%#xS3SRrxx4LhA%YSQeK%e%#83}L3>r@Ct-9fnqLo8AxSkR)yu9C{KR{gkh*ooT- zLs`^;|Kt`GEI^M5CvD2!Boi7E{mwba2OZ2a{0UCX-M1Fz#yJj__q5mbcGqKhoK$Hj zMKiv={7Z{-m?a{ZP*_(SRvKsRN-Uic3JBtGU7ba?St4t`BykXAM7m=jS@6hQ5E)} zH=$xjjGNlO<4cLFWzK+si)JKscMf-yKGUFTIT9*?y~*7XE(twC%eBGvxs~NrQDkQv z-ZW!#zt?3ulLHkO5vw)F=@u2L9AP{vNbK15uB`r&5W+tt^qpE-EeL}`G4W}NC!0&XwYp&$_V3v8u+=0s*V(DeaEsR5?#ByCRrEBOPl7bDm5!F@q zI+#n@0+1{f=^{H9X7(qJz_kG6@|Ny9Cocg^M#KbVPkW1GbH&HmXC5=+p*e`qZIzp& z%l|nIJXu+g7{Lg@7h5B~1UFn(p!@+B5O;a$W~@tb;v-qV_GuJ|gw?f+2k9Q{@A$+S zHc^Y=Rg`*6=ioSvr=FNAl;ifsdQcVXz00UASGzWOS-(dJrC~rctdi#t(aEWoO9SDt zqh1cOLWa7W^F(li*3_AnXcLU7W)r2a1cNkH_7Ahw#`oMB>AR&jsm~Ws3Ud{6Sy>PN zGUE|AnK<=8!IVHVDM7h7#2UX=dxhUo5D5Lt2@AY%+Yy?*tbXZtf;PrrKq&Yq6nz_- z(*RqrBs3V9OMhqukFMl<2(Fjbf`di=*?LPxV@apwb z5q+3nDfeaK>x;1>r}fQt^2_T+@xtDG49?ea+al!LI86|V_OJL&9q(8VG2VK z1-MN%7R!xepKyrBg{WU8vNR1V!JeL$v3Er9K!vAB0(0Ty4IuVZFA8G%rxte{7nYbR z4{O3{d~6xEdO1)AF6gpGaQNbOQp!BTc{h*Tn@><9Xe+rfDV?o) z_IiAR&uBJ7t!bF($q?l0z5^B5OLjlmx zw!sltRTT(;FC=tOUfz3jNQl<0uJFgJHjk(n05|iuZ+Unj7alrc*RYncW|OlOET_jz z{E05kzZh&y22IAu62HveTrD~m=jmx$C5Qtnj%G$6S-84!!XwZXB?a;AZ+GARcD(U9 z-!`Kls?)~BG8<)#*wD;)sCxW#cR%xLkI(|uC|BZSZ_~WILWmL77vD!P13pXmG(g@| zxs-a9D7hvJmI;h4ZCfCS8W&h1Hh6{-dKnIZbsAD8eT5;=qS2{WaRRN%RXKdq&EuGV zz57jnZuUb+8k8b%0bt#K#D%&o96;s?CFs5soJm*WlLXA#Ahb~dN-knFLev%6&PvOq zH%xMM{^4f(#jEn+LpAXl(p}0QWe|j$pM5|mIbj_rghY7C#Ux7VGwfX=C#R!=4rQb! z0Yl5NDAz`kWr|EHH5Z{u1In8PeDc}%##-A%( zgTH=L1R%;-MTN=hSX*;E4^!x=5?S$V-t@6c=;00Q@`7r&6i+SjQM^x#C{n%xboyh;dO9pZj6y5-QLK$xuhW!q*J#Ak%W)~U z-a&Qn)C{b1qo_J5fpvP)5Xd((e1)8kdw)n7^7y;e$mF?3X1Txr+z2c&1Nk1$HKNiut=|tt3Vygk z5JX_1NQ0ovH;Pa4B)1-r2H#$bKgl`H>xMz1;9rrI^rL623_`YQ6Igv*RBm6bU*T?ePNU#KB$u`fthb$f#CyA?9aT$F@$JZ~_N`j$g)55Gx5fH_S%~Ckhr?_}K z-l2-%KMuXL{u236iFZOHQ+$dHw<=SiL`$KjGG;3HhzF|Ns^pRtNggbBwI!A?EmS(C zsi5+slkyh)C=IBKbzyTbIc{cR(UkPIHTkl?x6;P2SEVr!zyNndO|co9pMb&6anmv^u{V~^e64@IicHe}h^XgDqS;~_cRCg$-|2!0LO1 zT5I^PN}g8|%dr&lakOoKt*4o?QzLiK7x^=4Rd-@)vfp>90Tr!WaIgWk*tnQVtEiIc zB>451{7TS!SD^@`_CL=+aLhKj=YjarH@a_pGpeEQxuYQ&x5<$*ieejbHWg=!pRw~*1ywRpw6tuHqoAU?F-+W*L?b;ggM$}9E`RcI z;pR5Ql8W_I9!kMm^7aqt5Y<=Rh)E6q&q0<{)B2n%SH{L8z+t&c`&D@~)x_;}Pmi|z zOolLHGh+2x`l4qQc^1sUlv=}qW$JM3m7%)i3DTr!6Xg-cam4BlTfJG_7o2(j^%>X4 zYe2ISP=MA};($+BfNxxjgknv^J=p2q6x(z$9j5dfNLe9fi4$bzctgX)&s_8|fZr0i zS}mdjLy#tMFlu~!haf7-Abm2Ke$$MaWBq*9Jv9+g%y5SgGAaIAw~ktOgwR7Cm?!8l zsVoEmVACD`6don$E?gQCHP*x;L0Oz-<5b`*pLY*US%jSiH#!p0N1ZdHfdS~rci3Iu z?7nHz4?z@2HFyou#cbRQMRp>LQNJZP&u|OgqlA8NJd>b%hq3Sjxt&#g1N%_^JA+K9 zpMruq9FvziDbLsxCEs-QPLWnIvy=1yfVB^E&kqAG#B8W3hc=(Rc5Dv-^(O zW}v(isDhh1@dnfs5!;F1hj8evrt?LNRO!aT$BaID5|<4dVF~0O?cgeUo{45H(W|zf ze#=xQZ*d8z{XJpvOQ2t+@NWt9n>unE)r~sd&jq`v@FbiKxp+n1ToGeOiIBI;tJ2K> zVkvYR8)iqs@~Xjz7ZB%X&Z^J-XmI@&LF&w#N=~+e4;NXWhu$R_PcB0ohl1QC)~O#= zjNq65aE_^ZQ7-7V$`0zSCi+5_As&dy;|e2&gM?QMlbB{#s_P$4UWas}Na}&NM;5GH z_?Fa4VB(kEc5P!rr_>f!Od+)o4ynemNfO~8rx!09>(N8+PWIALu>)w~qJT80QjxBb zc2&4%dT&a{h7cc$=b)Iq359U{(^}WxHPd76F8Y^OIk%y*49=sXU#D znu7Aq{CcS3_#^^UeQ+9?)_Y87N?6e@WFt3 z*50XV&r!m&1n%BA8GKG>w{(sOjbH^$%aA27yCt_Ls9Cv#D%A%8Y2OTu)SA>PH&D^X z7U2fnD08Md)m?e>(8pUYX7Cr<`@`fuz5D)|>d6OpD z3O56x<8$WzF(o>JNN}QIWFw7 zVFgG-D4o3Z7?N;#G4#2E<){|vu!7#+5+6B#wn()9XYXRzN-0WZT&u|8nl75OnF;Q_ z^BN?4j#WgPS+mtH6i)R_f>IVyfnI}o#m1tAbrG>Iy6;hoosPMJ$GxL?A{hXcYJ0UD zJ03`)3g~QT(D^A$n|^7-BDn`5c)_wvMpoZsCI(A^gHC6KfMEB-IHP;$N3m*eRe-s) zDe~{lO09`1-KaBoHuiA?VwkvN{J3ya6J4#|!JYFpiaT|0i@kkLn+KfWTX*MFwpY{Vdk+DwJo`G42l z{oHE@_cZKIN`gq5Y+T|1bT(aNwU)EyMElUFet(~ssrnB4$-5Xyx94R`Dw4^-qcTsp zK|~Auj6)2)^Ff_>Nu!@dBrG;f|AlnR-gW{<`)IH!&@23^u!rDpMLg~(9#IqKo2bQN zmb4CCH}FP?zD0~C-rhqsn8B$zT}lJ5aLZ}* z00Sej%Q;RzZ&ln?^aZ&%bqc9a5;cChPIMzatZy{)MrBw8dDYjO%CDeTYi`nv+xWme z-rXkH?55%9uSX3tYfBPOWpdrTN8(9dX*ntL!xD31a&Dz29d)p7{d~2Oz0SOh zu$hC4`J3#sh>efO%bg9HFXA+hLQXId3jGl$Kt!&DO_7{**k3L`Ii@H#)vR1fVaAn|EL5elX?i+qWD${#K17w79Zyn-$yy%=iTb?8cV`1f05o$+C#zc_ zEKa4Y9UukD_Ev*14fuu3^7{}F0!YUPkf3BWuTzQjZ?O~PTdzz?mPDyUDUu*(3sIMf zOX^po)rzV;qJLl_-;NFrx_I!?a}Qd*tIOh!Ppx#MX9_ba=tcdll`V)pRa;Vn9((zv z?!zD6{o&ouyh`#fr8|IvCNIoOt-4z#`(g~01=?_o^23y&37nfh14}K)>C+LZlsZHO zX-69tvI~{_iFQw3g<83LCm>S8d&T)e)a?G#@3wkg_(ivsVI@S|+oc-#9!8a7Ho<`t zeCG^EJ^8fz-1qO0Wo8&tK2RwEP(>*-WD>lsy4JssXHtsaO)Sg@Cz>Wh2a@ZQTdT4{ z&7HF=m9|%Ps}iBPGJyUNAwZ$63OgRQ3213t2;xQ67m6ef6)A(GQag(>|9Es(gwC2! zmgAC18fut`Q3pI>Kvh7on^x$vW1JHWGWYy8{O2Wq_J*dF6b#3ashzn`bapQMIgzpQ zE0RXYo*ElpMQcsxUu%P&wqhQmdaIOnDgs3lf;CSNcGw7Qyg}j)+oGe|vlV;#Q`_A^ zKMwKk-FLfhoBE6D21ovArgJ<`D-JovP&Ts;qmrL|96!nhn$+QtsC;yf+7|oygCVux zxQ$*+zZ7<;dG;{7%YMJx^%qYuY#$8)Vf z)A(cP-4>y!Y(fJAHeLGkTz932vbDwm%~WfnRKs#6s~VhkY-iyq|B3G@9cej7>Zj_v zXuil%VUc)ats~N`Ec^L65~vk&g8a%7vj8OE2(W;O56D8V`K+D?ypazPof^66}nx#2blsX1V)zzgzXnuMk)aMU;h3`Zz*#nxh#%KsD94~i_hx!I`$XE(iF>TOTdovTHX-peFILT7Kq4Q$2`2BaK9 zy*!p2u1)k4VVc+uy=oqK#oGc+32#xsw9f_G!y1YeDvXI5FV>zAgr!uZy`c!e$!m#n zmBR)fwU!-EVhw=EGBu21lU8y4oN^G>S!y3}TD;6t^n}2T=n1O@W+@S>)EjFxJ9wlM z_^4X31Xwe$clYl5yg0d*F8n39bx8nPwPtbQ6w^x*ehSda=c~*ngg;K_0pi)hC1qTU z|7W|J3sdB>Y<#S5^At!S@U_`F>x)Y*E9#l8868hm~v?x3;_Xd+mb)BM!4nrY!ZYe zO4RMe4_Kskf91Uq8&N5rNxh*gy{tViV|F{gQHIrq`!>%vgMT$Yn^0;AP1Aw<@P7lZ z4ORqck+=IP-6mrxfayN{)UTmkIntaQMKNXXgE2u$5Lh_FMST#hgBtNb!jG=t*v71Z zT_+{l1yifLL4^^aJtZ0A1t=gzKfJe@3}I9~M{EWeB7nxlNV}nOSgDWT5E7BtUMBKE zx3k{suLnJ#(s(c8I{u1p07J!l z9}xzMjZ!jP;ZGFyp+rcQOiT3e`T3-iPjCGWgjI;N%qq|+0HDP|>~ztH7tIxK6o4w; z*eWhPEQ4DiC19nbH!&SYnk1;Py3f3Ov!)E2(GbDwM=G(GIV==Y3W)7Px|(|7=u6xz zdqA!I0zt1>xv-g>v{xe~_Hx|1FnIyEW4GN2AyCtscVcx0z1#5j#3KnLgEUC{g5H$a z+Gcn->pni?g$|K|QZarafw9lSMNujQK(Lqw*XQNPd`bvMg6Oui#%vhS{c%1Q-gqGk#Lo%;7_Fb_fqED^W>;ncGlO$P`;@*Q9of+TA?F zMm_R@Qd31b(W_ASqYMw@4wb!1)}!3bku9D0{zpL#Xo1?**ti59$y<7p2nsGJmV}T0 zx+J5B=KZrT4ebzfogguD3`QZ@7{Ia_=dTSXbNv5kDV-`+~V_~J#pRA z;Ig5LIW)fxZwqlsoYRYK2GUX!bX+3MY>1O@sN32|n+t(UWMXgfh3mBb5pGT|D-wmc zi}QXX^C($L)z37#2p0OHTk|WC-_@+@s}*`x?#x7;9oGwm^V5n!Q5Cr~>ROw@WR^l4 zL{cL^MHJ+SdawQxk=vk0}^EWf#)5~Ziz}_>cbm@;p3q`u8nezpRBkdN#SY2TbeRO>p^`%~x zGD3wx^T1FqY&sTp&{2O?Gow2s(as9P1&%#l-;5frM$#JprWw(}5b7B<&*zIF$o_h* z-|c?aR{Ne3B06JL+j!U92!)U&ocvT`k~iL^W>|TIQHz_sj3q95F%W%^FAHH<(~vx> z-i2T7zVWaAmA~A)i0Z`92kwNXLvH&1mhg~S>%NGA-h)9@g{3jn%%d%z%z`8~<3nIe z{7CwvCG$Q8u8`u6$pD;R32$q{4YqBLnmjTE^h4IYP$`!df1#69E;e1IhHz+(C5 z-D9DUeF_>LgWnauko_fSKw=t+wxvLoz4}>MrMwq28EJ%e$Faog7<1)37d^my1d6$x z;{f_7Km5_%&%1y8KP+yKNXayQFYi;=gIK7JNS0yoR|h9txv9K1KZe5w+Rw18_})HN zmULk8?C9ntyixP)f|YrEw|Zc*3BXuA8Va{+fz28~L4tf7WT+F6x}W=$u}&KBZYg3y z8xY*2p%;7K2gLBfR*%>Wc7JGgL3J3EDSC*`)!6KI=pchoc(HmouB9}BUo;9j`BC=s zcH(OQ*PS4#O$GehEUD(Pv)fQPzux_JJioO|q^QBN{Q$L3O!T!+oJ|M^xMX0iz{*%L zZ%Ul!n&2mTV3sI&qFLk2uhO~WNt~nF8cTt^u_j9euL`a-?EVe^bo!SU)8j@SpKDH8 zkVMgda-Vm)^=uA+jhZrzM!hW#x28^P8p!tqlB(E#&3CWI&igLbE_ipGyOSVpYVW1c}a)DM>*U zfaP%rqAS08iD}WLH%stUX;MX=Vt?{TJfH^mz&*ph+x=F&SsR_C*yO^sP?1|a zB&sLniIv1Vu%r3Z+ID-d+t@$c?Hz6J@5M%CQjhZ{wN@PY6C#1Uve~;iDS`<#$s0$_ zLMPeNy}@%8BlB4tU_xJ{36Gsx|0tBF+X@M(D~!3zNABKzM%(C*gD7-N{UrE9&6}~e zrb;e)H{8{?zCW#5*OFHtg#8&9ieekAQG%0M*Io-)m_JFES{G@GiH@P%&=rh)WNt~K zG;{)HPP$}F=DomPsC~LHCfgm1eQlzdgJL8ezh)D^9LtkwScj^;F%-y6qYO*H1cmRl z_b}&*-uME+XqKb>At%u?(Lp^ddVp{KK4#E$(OQUl`tsFo+xksQZ=ZNw^i>!u!Y-pE zB@+z1IIG&uU}uCU$^|SL=9FMBi=Bd+Py_)vzLa8^VYM|m8_QoPta3fOv8q|)&zfrf zq=sl07?M$mBQN?{Hkz2r;QH?DQUTYsgi8O@?%iK&=KbIO(Kq=BUJ&}CqyoMYz%95~ zSy!PaJ?H0Wxgi_Hq-1iI9t4;tV=) zvm+HZhK~zG(9DLoHhm%`#E?_;a#t1VB8gh`!a*}%ED_bi2f6+=8+0-EZAomv=%kdF z5+X_+lE^IjK#)oVQe5iO?kbZ|q&hU+BhD0Yj%KSqf$F4-m!XDrjK9Ljy|ny2c{8IA zT`R|8EwORRnYD*<9P14e6U(5r*3j4UTICzmxTF21si1)#%hn5016S8=jm0k9MzXXe zGBq1fpVLA?89PCnY6Q8LwaY}h3f<%qD;7H|dYMvUj)*1vL*q{x_r)W&_V22XT#{rQGk+7gP!-Hf*9PCrlmUZeV z*%!@Cm6nL}B_cxKJkSHpgAXC4h`{$_#TvndK&69NTnKdxDCs*^UodH_q&?x_c+>}~ z!G2cGQhDEH;Vd628ChehYsZT8=Y#Bs$Aut_O-heFN=yz}eE6_Pjybbb01YjJC{H-% z(`A_G4wlS#U`E{^GC87KB@yX!+Q3S2m%}ZJF6y+2LmHh4`#!!biS1pq@_1NoD%1fi z5WAuO20;O8(1IJeSzZIjB_Rv1#AYQzN;g!}u|CMT1TgDch6fP@(bgbJCGIRCBgo4;Bj-q;Ddr_YQ7tqtk$xf#8G z@umZiMI(J5ZR6w*jPktbDMm~uo(G%JtUu^XuKw=laUu$@O1|dL^m!LnGkzY`={arHG~Y>5Enn{jt=@} z!X3ru{b#TRnEr7J2sf@&oPyeGlqdBSOetz{r?rI3d6JeC~H`akYa0s%^vw_K#o4(2AGzIZ1C*UKaAMSTtMA4@~(>$w1Lo=s3tM-gWDKcX@ zDxcn#_!YX9tFW@J5Z1bvL^Kn990XFqM?FhQHke`Vvn0m{U@3)+bmE|lgrKJc+MjDf z*xRpwLAb&}SMuqf#D72C?bf|xpg&z^;$q%NtW1?wgM_(oS&8DuSCvL674-|$MJec= zGXtY#BQP%vFOX1np)F6OG&h5%n3dyMgC`km^x+Ta?e7ygB)lh(U#NZD)%fF@8cj6s zI(|d(A5I}6xmTzE2^%3A`ytZ7CUTfZr<> zAMo8+-tTq)I^M$AJK_4O$|6D3`so*^6)XrT+u9SDB3!ry;hDw^78D!xVu={=a`mtC zxzU0rOVP&N*3&h|&`z1Ti}n=+!dxrZM;lw`4Ju=ImVLx&hitMc5U|W{D)6hof$!1< zzD#04h@a!+G_a!=j@JbkK$j(W^U;&;VbJcQ?l=8WKBKHV&+r0UU$;UPIuU#v6i}q% zDzmdB9k3VEcSCS@<$f`_k-$*VwVRrQWQZFUmPSmx|5=CuqC-zUj~_n#`|j??@dJ6l zVR1DjR`KGwh_SA>++G`EayyGz)Jo_o$P#mkF}@{QzIxv3bOu2$G+ohNM_75(75JR` z)HQHW1pvSk;U?=N+(v3XQoxM`>nL0SGd9o0+=FJ74gvey7SVGK`QOI^_2{23fJS-|CGPu!ubU%>vr27A&lPt zr2EK!-$OXmxX%1qtvlp!#`9Z|e#v-XJ?f$OdLAP$gAFJT_$;mspli!4<|>Se{63(l)<^kr zB!ahxdczu19wG}Xfz;l~C@&7CD=G6!|c!6D#;C|>>q8jGGee`ElbR@PZmM(MQ6uT{BR=&bfNFowQQ8KbvfLqeU zX&#be87Gdq-`$kc5ChlYR9iG*hW=b)JyJf3uRa`tU=Di-0`JUTM31nDf!QFR_NHK#4A<0zNz=g+VUKNv)Fn5#8I

    lh1q+1kFRj*U>5~*l2)Jf?xaHW^kWcrdx=p zthGSgmHsMb79n!+i?t#d!Rc+WfC;PdnIfd9+vi|vb=%75oP-v1ta0g{U<|1Y4#tQU?C)CWrW0O2@w4B4oP*CGgXBy5~XM7IL^K zI)ZL@YJO69PPe#iLgLl$T~UnfWu3mbs9ADxH_ZSMK*(+%*DiZ)*?2z(tbSZx!8;5E%&WC*gmf{~!D7_(PPLob2tOOf))jujy0X#~rq4hO*pjLuaD_2M%Ir~z7(^X@zi&R2X>~y65J>(Y zi4m-A1gT zKVlYBJM7p;Z&hk6K1(aJwfGAhdDUg+UAB+o9NcOetz|=RvK6Il0yl8s*(b3GkAd_L z+JF+I%?2XLLB}>Px;Vacops9(n&7zLK*_3e_BT)XO-}(>?|ybcub^s?(l!!%ztD%= z5(axZiRvh_y&4pf$H0|^qNYQSY8^AP#1O+)IS+>&gfRjq8E2(X1-uwk{hW&)f5J{u z^2+xa*YN5pqnxLTX2kah4SeQRJS)=xxti#&q(5S|pp168E5IG&DF6mv`gP3FKs|=E zIPB8I0MLU{G<&Jb6v1PF{moJI;e9ayW5Gj;R^F7-9h@y;Q_kL$ioiZi6Oo=3hA7XH zcs7KN#Q>v<-OBx&j&PnVzSkWbT2JVFiAG?YdeJj#c^dS|A8XK* zX0n@ktUN?vn?5QKa@3ibP2! z3STVg>9ARh$rXZ7zKlIg=OtU3%L?1tQl}@DtPnh?d?2c=yLRkLn+E0CzF1D`5L;_B zO`ub&22!Ia6Og(#@K<9`6T?pe$^0{)wk&4A4?Z%|n@dguUKTeqNH-=mn7k&C5g`wM zchowq*5JOmVsP3sGQ_z4>B6=lI*;h>S`YgN4EvQv4)#_x?!NUP9weN>P zeQ&lGUi>MRnFLsDY(jq<+?=2tib9p~%aIs*2g7DQ8=yODz8^iL!ywJuuh>+^dxZUq zuSvrVimS5dv+M;U0p)7Mb8(L80#_O!@Rr*Q`wFir-)u?4(PkQ*f-Mz{axSYO9*A4U zIxou^4VAeY`9&N)dS1eo+n?Lv;@}|qkpyj)JK&*=h~cy4lO+2q-jmlzpjG3>AY`)w z>gb)-W_erz@ImOA#0SWwiWDSml_M!hP0UfOHK6@u;ODJz6-MHP6}s3jm_quNxprA*rtNHZS40~qJA8EdNL^JONEuMeDr zO?BYz6>7_OSPYxfcK&-k)D{+cP9xSjDp*2bpAJ-&d13h2~r z4nG-T@iZ;%Zyjiy^YMAf9e^6gU@|Ur`x%KvKeR_j%^LBhw1&aR)r4SV8DhNVbT|-| zR#cNw;K20#QDC9fOs@T5_aaEGgj)$w5aGcX!HRUanyfn3z&H;_vD>sBnfutb9gka6 zxrsS}XX)rol+6L~Mh?#(Q8#Mh#r_3@W;H!8o~Q{xU&K)Yz4<9Q?Lznom`kl7SJY>+ z3-8Q$V!N1LKr103!Rz19avY^TO%Zp5(eF0Xk3Q=j`gMpXJ)9zvpe7%t0Nw%xiB11t z5UR&nQ_q3WFOGo@S8>(?wX0+|ewGs~C~8Qw7hp;K$ZdQhlCQP*GBAacq@i4pCNfP! zfIPT)<@tStRUIlL+m{8H3Ivv$V;K8tm9<%%jg{}N;4~OSL|Oha$iBq0zU0;86pLFh z6rwv;)+9p>G7^;*WU#h?S4!vPCJ#-9*m zod%pRcyO44Sp@>7fXXZ6b}S%*+VS&-sEwSos3!rbIZEzisGf@QKGLC8)@;chljMbw zj>|@#cSofy4P9Z0cBaU|b5}iUu36#B^cjcg3$we4%fSkhRiPkqO??G+^Cjg zt#cDrkcPFRq9$UPO%|iRt23!_Lupri_>pQse;N;;x5+fch$LE;4iXm=@4Of)#7eMp zl9h<_NMYvZxz++@geJV`R=h0X=gG8X4C%xF)4l&`92mDtTD-)RHid+&Erk0A>&))! z?f8j@nh8a%LokWPbYI(6p%fD6%EDl?P5*>ha`A-*$__%1M+FG@7nKFBT8R*(F5;5v zKxAWHp58Gb9GeD5ta*Byp`{Q1XZQXuEhPv9bF|b3xH6a0Lw=P!gi*|gY}$z?=zOp7 zMB}AH0AK;1bZIPTd>562@y88jS?wJ8<+K1sL*q-=F&+$&%h)W>qBXp)3`ut)yd0;0 zMRJ~Txg5BEbs)TzB1b+K9i`-t-whq(M*Cw2Z6MpYv@JvTwpNa{k?`q&f@aQH>MleS z;Su)Az^ypQ1_|?;S{6d6G9M=G%%Jnack~jdGy>M^2I&+EV`XOQPPIvBE`NR%Z~}`)A)+AE*~nwb#lMCmf0@(l>9o47^a>VDT-qh@ZwL;h8( zeM7!w^pqP+!p0w<6+4A(kRxszbxIJUMzt0NBHZm_2lUuF8SR2IrYj>6_X{7xGtA1; zX{OoUpPO9RJ+SDkXj#%=*K+b=+mAo%9{X{-$fC(fqHSq|o|V=za+j{nI_vlOb8kwr z5w^W#dOUVnA%fgGiF6AHI2ZAN9;eE$>e(BZm1=>Kt-5dUm<^0nvut-}uk|vY9k&wI zV1+~3qj493$U%v+(M5r%FUaAk>_ zi%wth+SWG4F#}oIpy@M6gz_ay-~&5rIi}=!0@1ZM>q44N;$mUVlup{MFW2ncEQ=Z- zT+xsT*5-i72$s6An|X>CUjBU#gKDwvf=(Z&@WXzsy_|(MFGH;WekAMB#!!JA2lup; z!D5&gH?oAqD2fAxwU{j&Waaps(j+Nq!KZfmgEZ)lpqtl9 zfG&b$-NobO-rc+1AwSg@iU3FM2b`{k>ICiK{WScE7%BZ&E7}LASc>S(ug|ZgOl6wP zlpcmudcJS*XY;&i)hWl}cWiJM zX6ODHYY>qfSBKLSZqt8OIhqR>I1k5!Nh?f*BW3^bb>$xkVS5Erx=}K^4yN=n>9wNT zZ_5;AuLNqx5($}tgdR-i@ezTz?*4qyEc?%M^FX{`*bpIZEJ%kQQl!a%nj;X+n3`VV zCXdeikaF#{Kj;}TijD!T8E}KFVh>Z~Iq~`1bg`cb)Dr8&+r@om8N9DJpu1XF;!d}E zTRmE5Z}oyAlnX0+Qjv9tVL)SD`sF&?-l1hR*=&y1A`C<=HhMz7l&h*GD);6@hB4%3 zCKIVUTYggr@zs65XmT^gCpswfHs zh~u<)vuV69?Np*xgqJ<0u62)7D_<}3RDnVc&A$PXafZv3TSkD5)4hV6D0l-P8pQb^ zc#jK#7a6Cca`1@&AwFbM5?5HHlBls{6^B;eo{kO+Ad0WT{JO;{m(;S#!p6x0`xg+O>$!oG3083kMBE`oFd#T9rRj z4yea0$7w5%N=1J6?uj}b#M;Ie@}`o(m9=E4^;VI>T+C~+>?BGp%(s0y#vt#m#~Oh^ zj?!K|C0uGcY5qENw<02cLqh^;iK z6}c4kT#j$@6Wl*t|Kp;yiO3x6A0C+|Jqh4=JOz#)d&dfIIcxWEElQ`#Sn*&YjqxM` zL6i&@<$Tx%wqk_%CWCdsDk;KjrA-vOi;Jf zk)I)R)@QJ%j<53UgR{T$o$fo|iARG~;BFNTA^QI^L!mfvpZSbQRy8?ED5AKIAaDSA zBwKv;SyxK;kktxwU}VG#@4;@dn1-RSU>n{y2q|}qVgQ8{3v}R(A|2A`@ys@eWM1MF zup+|ixELA4_948#uu9B1dyYR@8c|M+Ql+6w&)@BJU#$U-76oo_eEWEkYEO$U1VsS-@ z=rK4nlMyWJH6n@bT6sG#VP$){_dnh&XfO9STHWcMmNu*OK*K_>xzDG2-D(`=kxXlI zqFSBSzTr(146aK@EBb^C5y1DuMB*_A1f+}Z+Z~W0on*9y;4arJZUPAJ4C}X{0?1q6 z-WBBw%3b^NezuYWyX)dD{1C7D9%!EjLR?8g*u??u@INufLR1+1TES6YbHt{;F-7LMci-3J!|(s&@Bg>{*ZsHtsK0qBJ&a$5h85l=_7b!{r?55w zGzSE-FaO2;3TUr<)g|*a-m?O{1lh?kAXNOG1Jcac3;x9-$DHxczVN-cQn8R;hZZeG z7ML?vwAvk8VJ-&G$3-BVnibf{H+z`G2*_GXFtwJRK~lB;P(n4f%4i1L3#y8sJ3?Gx zu)PDiwpeCfj!robBzLS96B}14o~6-?$uj^1Q91(A2Uilbe-g_;bT=@g=|aSGa@rLE zy{3jF%=*kHGWLdqC&-o(vg@k{HCAuDV|};l`R!{07y5J`Q@bWEcJgIekSH5toE|H%lJzd*sHFuUhAF}!L`0d7eM-Wd3&qaXXwdt zMH$$n{I2jX!56I)=0$>SDDq<~r7VX@2hc=Y%zyzYF?Du}V*<Lshk!HgYY2RXV@6iy|BQ5^>GAckC2ho(UT{ zbGhlKhxFFoI||MRxIl~{-ri_N-Xivgag}Kam00fdN_JhO*BatWfsUUg%1uRBW-L9naVrLPj~8fB^5Ivu3yd@lSmG&P6dBkDm{ z?yd>Z=(Kck)eE`Y)D;aj%Yr?^j48gLk$9@84Fa`f@WB|n$O2}5>1z%^Yw`-oE9)ta z?!zC_IQ{Og&3AZxt-34eqtXLUN@&_Ef#!5G!OD%3SXCcUTyyVl6)u}!8a#wK$NNtS zGg^3{#yJpWhH_Cb4Yhd?j6WL)~XuFhcT7F1>!vnMyWE;3NdWL)JJh^$jaZ&86A%{x7@t z|6nQ&@h;?Q`iiL?@l0vVtc^zm3zvyL6#j*gk;Gg-AlgOnKe zebb*J^Av-{n6fA$hgnnoL(c2MI+>r|Q0)YVl-dK02KZ^L>^50@2ziqf?NhTVy%BlWno*lG!V2Z&wc zy%@!?#>rnTIiK~qYP!rGhu$m#VCqNtQ;>*;jbqA|K%Y$erg%#vYleC9I#dIL%3DWAsguI%Nn4n@4g;p|q4Az6N zU$ad>$>9K2>84E5SvrIghy}S!$cUTXnt;^to6j{|iM!{G5=G~+7a8`$F+~s+jsTpe zTFWGxnlnAk_9eu37}}d|F@wP%Y8?irDS=Cqc?Evww%nA6_!gH>m8?`6gho`A1`f7~ z**l<0WP3H}ucFiIJPq{^axtYL`ralK05)m_r7 z-tWpRPH?B^BT!+8ixhthQF|1(gpi#A8<{3FTYWA1whd&@4MmZb9{p$lSz~n8FI)CKX6Sr{L*LXhEoXM zm^Q>&QC8G=bT`2X#Mv0j8}^EPsP})6 zm9*Mjpo;px{CnQRh$phOz&3oMQM80{oWeWC*j88Oa=U6AczGKGuk|U$vFTdI!hz zGHwF(gaAm?(j35w*E`~qC#pA_;qMm%px+fg`k`qc1r>_k#nd=d+z~qJ|xMo4HcDw{i-fI=)Y!+!Q*#2#~i_s#2AaWWnls&mV+KV5*k+tbPoLygX z^|Ef|mbpEd5SR3A->m%U1L8CrCl558hYx!0;FF5JOTnC3W%g&`Y|0Xwkuc6YC}h zjAJ+}F6pHRYF7Pyv0NruxS*uQ;z$=qU#t+3SIv%4h1cs+`v7kUi=^>h7VZpuuv#4s zJ6zf0p7RRCDZS0)1O~(cW4?GTF$=(s5Z$_3%Zf!zlzI_ce+A8=P!M$O-sKI&?RR|| zI|0&JJQR4Sx=iyq!m+@9M{f zk##t!wslrAYm_rc7>zN8;F43tc{(BH8o!a(NX^>fP`B)b%gfRWp*KixxE~k$*Yvjw z44DwtF{ut2>tV7Wu2u*L8N;CrsEkstBAhG1+kmh>AWLK^*Sy;S%1-cf*~S5lVgOP@ z{ee<}xW!LcuUs0jLd2MJjV09s+6 zc}eU84(nNNikJ%-w?RV4{EW}3T4**zR7xhb6pG=?-B3b2jk40{QJzcVS)a$UfB5k~ z{kY33P9+`{;bDOK%zeURIgoBH8TC@dFhq}e0Y+8DroBcM`rqI{6o zma@L4PvrsZ;$c9z_t;iAl_j-r_pmPxT@=R%({jJ?Y-h)EIZ<9QF6g9a^i;*FIK*tN z+8XyC(bf5g71Ud5-sd$*(BJMw^PEP8oOW+?5~@-ZMM}G@d{tCo`g>d*ik$H~ zi!_RbLHjc@?6|84@Wr%rrC_J~gsN8-VH&I1cBs|Cy97tJNg69$%bhDGY8oI%++i9- zl+vg5{+WPDu?%yXmw6Oa3Eq7cqA!|A9^FmZ8wFPy-4N=^EPp{Ppp_wY6i?lce^Z^M z^i)|IqetRTOQ6nPCFoDchsVMb&(ew%{yjb{Tr)D}Fpt=MvJPs~J#t&GGj)Hv70J_i zw-5?aATZ$f0x3){vSR}vV3KqgO5s5XAev?_lE!xH7rY=V2yegK^lwha zu+w$1$ER-PDyji)$f&~ScX>11QBfimiGX23#)E_I6;JY^dnwIcL><@xPsVfGB){_@ z*}JF`0WcRuh$-c9h{8dxs*y=0C0Kg6s{(_)BbzwPg+Zgxiy=Eqo|TaWm`5}Vt#nT$ zHx=X~7a)e4kua%)Y2XSsNa@Sf%& z6Sxf`AD^Z?l(Of2-~pToM`&}C2>Yhk*RWlm@bU?2iz(#9M5mEKfUV zSUoY{c_|RGdx(}%2veP6W79e74ZUI4;sD81?(moWJdUKYs$0=3p8-^KuH4gZeK+=0 z-D;W%UHnHrt8>hgeb3-B>lh&@o=#w~2ljAS5eyK2M1fiemYlev5sup#_lGb^1Z_## z`GuqoaV<{@ljxy-XUs_)wF*k0Jg-YdmnKSFxJ3_;fN&ZoRcSLUQ#@>q`>{e|y1NnR zSY>us0sOT7TLw$5l10^#!Z|`I3ni$hK67E*Hx&fts1?vHLc}*;Dd~^WNZz3lQ(H`T z*aGm@#-EJOV?vY-p7qt}lSDF^0ex?l~g}jUt8O_ddlBO?+HVLwc-2qR+BeI;HIpo;ZcWt|S!hY;UYn z@_x7-`cZYk6J}?;8e6)NAAa%vKi~h7fbsi33-uGfdQTMjZ~FMC7nu6XbPcY~##dlg z$_KYR!d+)@wQ}PopP6NZr6*L(6PGkEe;Gx6&rX7Lfb3nBv-8u(bL5vR;@@7PDQ9MlQa~VpS8pp~#_!vxfx|!%-cD zKlh)2|6hl+{95<>ek=hE%v~-&e|EduNl09(pO(np1+{)$$XFMBwCM76(RsZwzmBE z;lJ+Vk7LFU-zCE-Z?qjnz{|G7vP7$l3*Y7Xp?4$EI3HDNkxmo7qd-HvL71jUP8fs! z)xJ^}kjzvuED_;}0>1X`h1g`9xdXY~wXFh(rZ6o`1>a!}27NtkHAmv|rb+XzA2aGJ zn)ny+Mlm2VmV>s#7c9!3QdysPSyxz8op^tr=p}68Yaqx_Al8Ap7i~`%;@nV32i$vK8*1}W*SB;aV#HB=g_aJ|H_O0kNS3b*E zb{aGctd{HrpDN109bP2|ZlxPFUpv<5SkMAx?LfOi{KzT&kErZeBe+;4^r#$8qFgE9 zjo|k>o`#dBD_0g%>{?$gF^1EZv#3Q@4k>*m>5YX3T*VH-30-t2)-l1kDqvE8gq> zAyRe741fO<<%hrjNy`t9A3T?5l#b7-0V=R%#q!@_{fXEg&Wjt$-j_RM*z?4!lZ1jm z!3`=>-_dD+MeMX^00C zrIlIb`<(a^|A&IX6waZ;mf*}_eU$;Ig@%q*q1!G&saOQZWbIaC9HoPQk0EF~G7kok3iVanUD4``{UsUyQ`lK74EK_)z(jnkAR9`R$ti77e)1%b~2L zyRa+3j!7G0NNb@_tpcDOVW9f-fH)Pa;{p@lNYG^nv~0ILjsw;WEIWqNCow5T5TBNL zSq!o(KxTh<4<8U}Zr&&=EbruvJ4pAZ*{-A|7viADgVjUuKYP$1Y`5_fI-V+k-r!q# zj{B55FBbd;89(tT4)Uz5`}_idt!U;s2K#nFmJxNGk6C(tEk|*hC=Hem8EybzMal`a zbEt&5#3sO)edODuk&GSERk~(OXNoO6DN&Zm8E9Tz#%0l#m|Z+=aB6d+%br@dX2^t6?-NhtqSi}w>yfd!2Zn$OKa)tTV6}WtU+&lPFqS- z(f7nJiJ;e$3`uO3J_lPhEl%Eu2VRw0D+dZxHPU;C^eb5vMFz#-D`cA4IYOH!h^c8j z1ad5w1a??37romA8>}s4sha@U<|3IE@mH1O zC%3jp9wsFq!xRNImgHdz6+KY0? z1KiGh*g)?TSPe)}P<*++cPy%VZT@M*sE?BD^ccU63cAskjeK-W0Lnf4mg;Nsm(ac7 zu|5&NG!xlkY!;PeW32)^huz;g_F&M1x zz+<%KH8Mze6Z!Hs+)z?}9>-`Gb>&T0p7oN1TtIEiG(T$(8)#-hq{b;C zd<&@S!!^RC#bSjZ<%&1d@xhgpRTj(62|O>iDV&<|kq`7YtQLtHe($<4E6WfrBOAj1 zg^;NscULao)}QmHUcUI#X~S$Y7AVQyA6lruP)5XDv;z?n3{+i5ERKVsz`p8sHa)^+ zhMfKj5uCJ50yY@f= zBnX$Ds_6Y{&K#RO++E-4lnx!gkf^f-)hJMdqEZ|XJw??IG`r6O6j0bbp4&CtMVJhR zm3Lcqv=+=C8UcZ{7*DJZ+O<>|!4)AX7if^csF&#aWK68kWwkNp`!-v39l-zyhgHsB z>%WV1{yg6Ggwp%*Z)Cb09QcXg$WRtB8U5bLOp47F8;)U3dMW|2l${$8nJzvFnT}*( zVvYIK4)6`WU9f$J3!fX<#~3wDK>-3u3l16ZJGDUUJAhn*ad zY7c#a84#ydSu52JhZY0yp3Sm|l{jwBy+Yi>Z-Vni4olf-9!B(&8VPBGw8zC-uN6O| z)ypj1#6hmvH=Y&aWR9GuVvhm*al4iiMd*?@tMcPVZwl1fWWwZ5R;geympDJ2eZF~ASJ9ve zL0lD5yJaheN)d1+L$$Ld$?B|Jcyy+&UDcU5di*RgG)0Xd7$-hKWa{VL`@f96e@gx= zE&Y64Xf$wW86Zs#cl%5KhkyA1-N4$> z3U>NH{ku;hV9hyUKMG^l+IYKqDNtYxd(hLptWA-AkwD6r6)Q?a;LGll&truZHd%|| z-@m)%Mcn&J7LGksLA+5G*%8Y=mk(1_bW*<<$|Vyz_XOZvPDL_73Fqs{F#0rx{v4B% zHTH^c!JX{hUSpib=xOP3ix*W;K+gID7H83UDeZGRJ3ET6F|k&5F(#@wTn^#ayYGDz zL!dhMkI5^JN$ei>fskS}a%EPC_B9ue(?8#B5M!LZE-w4N%KkW=lWV_}>zQ7hP(4;j z2gPKON2xvoy9ZV%hhh!*?viAE5w4m-Oq!h!!WG? zIjVPBDr+gIx7dIjlFx#v$=j3V-pbA&W=Imvfsx5rtYjzSqgJ-hoAM)+NkK1`*thc{ z1ESW~Mh?0qNs#w3pl30jAH^s_oFPVHHR0^~Ge>80EOIW1_zdAF{Fvt(%W>rKSy28S zCvb$=D0xQ`NdV?3m4H@V!RGB-DXM)yexWWsGDL+t=ZH&Ew2^> zCSC+aPFL;tX#)#)n)+naJ>T(iwRF1k@nuXD5JhlI%|^euA`_C!D<1cihF2170ZmX? z6d|f4B^qVR_kY{mMk@xq4>~0J=hZFMk&GuZBLKb_9N^Le2`2t7^}ey<^?QJ%TB$HU zbHK42XFLQw@N-q8ZT~MGO(E-pzGp+6cM{{4;;+=E0BAg;@REPCj2P03A>*)*h+p}9 zlQ*fxXN8{M=PFb5HC{lizXFY5^+3K@jCd`}ib+~w^S6M;eHQk|(4|zg#iODu1$f(e zLbsDy_r`x!-T_ue-VVwL?cV>yTTL>Nt~>uijx;m_uO&L|`0$M<8m;N8bHBpzvKjQ@ z=LP;2Ff}Wr4nq0g<~*KA-D8K5p8elF*cz}w&=jawMM$vg^CFNYAlMx$Xni!2()Wa) z!bnwlNFQYY5E7|cITSDC1!1KYeDODRqfPLf{2_QC_?~#Dp?ztC(3N|TN)}YMSY&}8 zcJs^9-I}I+tW1txhZKIRh52#!_%r`R0hBM}U0-#h?XB1aj1HNOMFF6oa6wfbE51%p z{`9bhj_?aFi7r$mQxyt4Vk0g6Zcf6h+uJ7UGjf-KSr~LM&r$UCrD(Z*RBzCf;|= z-{z$r$!Ip|apgpBMl+ar=BcadLoOeIk)3YWn?E`M&JgF;k}og;HWeJVV9nj#2)a5BSjjB zKIdm7YnA7{S}Wk7H5biXKaFLEsm28SLD|Hm2F{2e@NGd^ zX`V8r(RYo!zEm0n7=+~00 z)WXazQc5%!!We?wt2R%1S&W6tg=TG(hGK-Yu}^MlOL?stb$5J>QiTNSX)O~%C+XP; zu8Q_v!AeH6vpyld2;QfU{H--uKKMg1wU%*E5Udd|sfiU|al4O;I)oBOpaRRQG+#eD z6j`n>@E>P*G!}R=F4)kF4ib%U6|cU3hkO4h?C9MtYYHN_N=Wj=)*(#-E5JmQj}n}5 zm4w8yq7fYEBSrJP!^*`PY{iq8x>Bix+xRWdF2Tl!Vl_P28!SjpdM0Lrk4~)RA}?)F z@o^3D4mAv2syx}#11UQWDMim5#b4C%x;6q5{{tI{GM^SGy{~bZiXSr(06d9NUhqha zXztB;2bj`LO!}klSK@=bi(BHF4DI+JCp{CY&>&ju7#kY08>XQ)KrY8c*+8Kjg;K!X zS8yR-xYIq0zsM!)?*GcSGj0_l*q8Ymg1Cjfpt2bGVr4>BfRlQvr;EC8gn5O z*MEmmb^CYNMzOxoYQBkK5LN?%Rr*Z~G`r50;@eg(Ltp;<@ZVYl%Oj8I2<`S!fp3L*jrT)*`}}i%QKexJL}0 zz*GCu(**;*I#mvy_n)~%O;2Bwq~Jr+MerAl-vit#Y}wkQ#8n&1Sx|+b3L>j!zeZp0 z5|yQ(a%j0-PK9hzFEfOe%hLlFWtcmoeBIo3&}FXUY&hMZ|~pf zx_=U@On}aXb;)I>_FGP+?#erB7#3Y0(_We zN)?RmF7f>PcW=849x9^uz_BgYIuRs{14zMTibH};anO}4Y&j4&ReNFrY6EXdN_FdP z*eGOP^M?xZsU0?YXKpiH;q{CNEIw51++u@$eI{z#EvyW!2B%! zwgobCW`C?_b8!sRR=ge@YQ=H6)UJ%=_+nYdKHR_ioF)O0vjrpYA{+AvW}r5Ow{!=; z{j8>ugQy5PZtlIu3GO}GvkuYWls|{S#O_|Gz#eFX*Al)kbmEI7f6-*OE8da{_ArnA znzgw(Srxe@76b(Hs#HJ4h#}5W3GWIxmDN^3H6AEQ$ zp+D?OF=D(}D#&8xDpx&LV?0qYk#uonTOts#_#%CbOZ;dbULyMFtdx}>ws3MZ-i^Iz zT_~vxd|}R_h%uy?vHcj?@dCDxSU~&)pniFC=dU337*DE8f~t(YQQ;*{RN5m)oiR3X z0xs#KwjY%aTb zdm198V9{i%I9sXB#29J?z|Aq&MGgEfOBuxzgtOfLLXgu>-~VZ>4#k!L=)4Tzhp%L+jD=L@ywY=DcuUyX4ikWbw!GvSw{2kg4QvHvm?Zy!mmt)Aw*w z+Fd#;=U|&E&LG30!qhpajJ@~rC6@D<8Es$#r~*rMb3+#*`;+VLwY4oK{`I__bx19C z*kpnyH57TA6rb;<_Mx7f)hp>Zm8cGp`?&k?%lH4~{eSW>F#^y><(Q0MS_#y;E?QX^ z;Pg1N31EG=us$eG-s+;p`#L0P85Xid1%eh0Oc%H?x&tda-t>gyL1Z$C62=?vIkZfxDSKI+km}+I?0vD})eTViqq+^9QhbnC?ucn^WAvVhE zV(Sp)tof*iZ~D?}weiJXx4j#B{6{q#xwl$H&v&M7Ul8#;T@-$95w(kP$$s0F!8Hx} zmynNZ(mAJuR3IJpvgaHw-AiTpAj}XjolfjT6Sqa#NlEkFQnp~}P&awng!k{h3->L_ zZ_{I9DC|sm&K0$>@oXte17Z2ECRxeMZp;1h(`OXXzq-I|0rGnZlf@A3S?|$z$ zy59>s@8gfV-?C_mI|OP~aL(oJZrz7qpvx0+MtgW0A-*`bs?p5j;WS^`^kJ3UGue=- zZ>$Q48s4SOUBXg-GcAVtQ!E>DH)1~qr1ns*u8=@*VnwPkT%g1dzn7jeNPAxLD!>0( zVIg6Cfj_nbSa=7V#{=7A#6pN8AD;P;;rL^kz;-Pl>cQ91HvB^W5L>3&Y z$ilD{v!ia}BTJ-~g_g?&DQ&5p*n2Ev$#>Z3X+UK+mQ-xEit-A@oBZHqQ6EqjF_}3u zZAb2KCeXiJ@+B7tGCR|)Pu%QQcH;ZG3}(g97%K(VK}v9I8InK;qHj}bNA>}n5HEEjS7Nm%7(T*^jI|yC7{wqpfz_q> zN%d?iN_eAdtOR~ZzGU}lxV)eW0HAzb%OQi1_m8p1A9ueKQrddg|I?3Sc943ieLW!_ zXufuXJU3$F4k^GTf6a%Ttlm_zXEa8}JBUfBmDt5`WXZQ2byoSN4O&=5eowh|goM5N zBfRf%A&1^VL!*~QdWD#%9Bu0uIs%bzSf018JHr+rnu0_MF1*FOHFGe zWj-I4ptdTzfVIWcnypuy2r&TIl4*(eh{F83EtTgb9qgr+jZvY?HPoceWg_g9h22|= zO#{t3#EgwiQ@}j6w2EBF8_1=fy;jpzFA!fn1L}@#lzqTeOTJY+ebhj%!iz*NDVr^j-3h zX<&xAy!uOSln*s2(c7)8i?SvH4Htn{S>_^~--W=a^= zZ4%>@yPvj|Oe7I;eC$;R8Ajp!+%nh{Co(Y=!_J>%9E7gf^w>A+&J4~Pz) zOO{})588%$O7=&Id$Z3ADis@;T3Rs#<+6^GJdXXgaA0@la1S0dP4OjIK0cOWER&9V z6m8@S4@aias`y;2f#3qbmj^O{mwtnahZIaY&m0LXxoNKLCYY`6n-CxB`#iGvH0CrY zQD0M`?DCx01r&%Ynp5UDIc`FkSg|5LF7GC{u>g^MKDM`XV||l`S}V)(T_n8P?NsYy z)C?4iH5Ec*puCz}A;|1{Qra=yoZzLMkVI3%rd*5)$BxgevyZtLxzzLm-H^4X>M^~Z zFv@V8yrj@lCWkmCX$Y{w4O}UjrL`0$iIWPJ#aq~Axv2}|Q$X?1TH=+6GlB1tCMjd5 zJoQ=&kIa$vUDka_swEmC(?alikAdA{Dd8BLU&aq8=V`*WkTuwrz8j>sf^t+U=as{5 z0GZ^^$DmaW&V5$;S1KTSp$3#FD@!?LNMa`lvisuYGtPf-ycDprZ?dMlFF7#Ru=F%_=@7K7?uCMKe4MhIMH zpx{lEjXR*A!}-{clrzZb<&I0s`*k-D*%`8DiW2$r^tmQr-R>P9vD>EcSvimDfZ7Nk zx#ga|<5@5w&wd zQq%0D7>Eyji|T&pW^Z)>yhKPFw_f_D4Do=N2H~HyQWeauIX;asa!&Q2IF}eERf`Gc z=*`|(_m-!X`uYA{@4cs8iTm$_sx7LTd31Yt9H3y|aKHetmJ1-~~azoM# zPd+5`M9?7QzX22|XRj*fVVo8i2scA_Ld1hX@6$`YTQwd9y|Lw##Tcb~_nyYbF{XPnHhuw|(dXHCr;zHFjmqR>xp!Gz z89%YyvRkIJA{c|~?szdDUB)>Fy&Ae|B8l`>?mnFWcufq3c*{18i}${ zu89)V3ef^`5eINHPZ`?rZakD2p-_!!(ahw!Tvew6R$ylCD-Vi705Nm*_yM&o>7LYm zxXe3S9!=(KFnkVsrUW(~qSL}&fsIV^=#j2p86Wgot+eQ6De0T^ZART;(clQCu=Elj zTvvSfs;G4KN{$o;i&qlFOA&~oh_+C&WumJ%#?!0}#rTSnBDlt~*ZOVHUz`6K%PhLI z)L1E*w$?o_nT%=<24%MB9+P@jA?{V+Sf3?_A%T#$%r|6@?ZAvohj5#saj{uo;|PBh z7SI|}J1rPwn{UJo{s-M}#23Nb6>1p!jsi;NAUAbbBk6=1vQ?I<;LG5pmQtxZfC$Xa zuTKl!g(^}Lgc5!m$b&L<Y#qk zKX)Y?lM4SRk2l^*MnPM7r7BNXIL}-vDZ?xl?H%iCQkh1PJD+s=0HU97GK@B*qTfku z#jEaB9NA@&mS@NFJd9<1MEXgLWm@zD49Yd zRJ7|nBk7ZFHFb-E>D~R$;v6*gKrK%|+^co`l-DVtrTPPt6i#J=w6hQ@jA3*p%gi4x1<3C zJ~=-Iyh~&1Ar2Z-*`?lTNDnt>%u1D}biwE1wZP(h-@747jndPL#rzU4MbQMGkq9~{ zL>cSe+}L^uo+=>@en(s$*aVevL+PJp^u%mi`Wr@<#S>CtLxbW0N;ND7!4e%23FBsH zMjR?IeXD>GJE(f!HSi-znw)x-o|cf1~E88o9FKQIZ#f-D2 zl(=!~sb%u@8BwvvgrF(=HKrqQxYSSQAnuW%i!qwrr9b>{WH5Q2t-wP3$o;L908bBeid3?qW6x(Y zv*$%_hV=4`!EYktwE`Gd9XG8ybb35HIO;_4)12Z?OmesW-8Ww-pO=xi04XU~2ev?s zz%Ge0lscTeF%@%EhNI&>t)WmE2Nnjm)J=)2?ziYbtOXLk)2*z=CP_1AnV^x_uo*R2 zE>TmD`;1lR^d@&mhWjTLZS-+podEgwm!RFzRH+v2T9PT`F+sI4UZ~y1s)-@0MP*Vw z5X`=?4Y-9hcq?^+M1d<=hExvD&HqrU^RD>ST!$D(xkV}#N*eF%yd+2UeRUGNT9+0k zxlBfc6#^BzaS`~vC$larI%MaEhPC!QKx#TNh^d*^!Obbd9#dyT+G^@8+AGM%8kEUNV1kuEA)~T>mXtDUYxU7P2 zJis*}T3(XrMKd%imH!8%OZfxDy);B8RBbcDg-aIQ0ZrF$cnT$BU@1dKN;RToO>4Y= z_xMqlIa#}4|Js;~G-nY2MH-44rXP7D7iqOa4L)D((%c|I+DLmD)4(+ohR_b79jA3j zL-BQ>qgU~7m9&fNexqBpqK6bmJWqy?lV8}-zSkvcSE!q}i*C{BmvNn`-gt>*1#2?z z4CUcGdE}Kt9A<^Duq=m`mE#!sc;Z*>@0W59)Uc8V8WT~bj>q$?J1eFOl|gu3;=^KL z7wcn_zm>s?6Np89y|4!;Ahp;eMK2a>t^1*61?Dk|8L-Riw?V(F=GEk+DWSo0L%%m2 zB+tXT0HT``Gt3PkBIJI zc{igV<{>Kt}u<;16HFQL0MVKbP)IuVW~Q?8*B5qa5P zcJF`c(OmNZh3$QqB^4CcSHfg5R94kkE5vxH_{g|;KnN>mIkU5lvAK-KF7eHR0yA8U z7pz#jJ}=`Js094m7WZAcaOGBQ)Ae>^#baJ+8P}B_ z3Z$(lM8S1K0@WZ?A{XfQ&IMpp(w0a{u3s?{+;eSp7tdn!uV+qu;grPNfF3Pl^a++wo(Lz zF*q?*{Y0n_G1k_52Yf+sA%=w7B>PD#NbZ{$ zuaK$DUH=4YaTGssWl*6ehxpU({a=(xU~2&!)`~@>_czj!wWLlsFb<2r`HAVZ1ZBkW z=I+vmA2W>42(!#_L=&^`R0V*E6wigbNSzltC4r8{whi%Wi$g_z9$L{` zfg%FyMj3@7+JnXxvy?H6BSK3vp1N?;t)i*9dwkwbp(8-+%bS=515xw|S{6P(E);XgU~(_Vt*XLH9lt`7Tf&_ti=@P^29WCJD!pib7Bzy2UL8;& zx42$DoS0nWw_X$q0%B%Xmj3`}zc<`M23p2-v8p*rt*}&zpvZrH?Axgx_wT;$zVXSp z1Z2M`q)f_l^Bw#dzXay+ry*1ye?(a;#}wraNzFWPBIB`WWp;E@Y{n7wQbW*4jxBO{ z20>70Z?boZv=a~UN^oI9X*@je!UCJ z`z%@_di+28FJ=#Ni)rLzHiw=kB~f7;ar2b*cU{)~NDdN`VnFfty6;=Wsl8h++YRt*Bk!%kX$ zKHT%0R#`!Y?41LFo)n$8PE>|+KMUG=1q`58!RKe3y!1$DNZoOf9_V@nX9P$+-}flM4{!1)^*J7r$vCIOwQjJ3{ou*q{o`3*AHHFc zEcFE8FeRu|l0Jeyb!&&~{+;4xaHnIe>g)6uWtGJcV;l0t&75}*Vq;&v4%u817+74^ zhpZ~;1>av6WAUtiS;mX zQNNt~kpJo))_)hpmRCUlJUs zgO1#FUviWLm@pLx)2=elb90}628!A zr^OxK1&e_6ZAn$B4sPzs-#8ezEWgUvU^BU^QrQ8BK!wc5Zo<@>o{p~)i`0fnyrm$# z>PtYH7*FZxfLmL;EZ-wpX-SEnay`rB z*86u{jOkpEm;|pf7F}7m>5ebQu{$YIqhL5kX#P8aZX-1csSSt?fVJd~^3IdE%#3-48qg7Y}2QwTg-1y>M0xOe>7*pKn zo_pg+mfDJOuo`Helmlw20J8f&7o(iLrFB$7_43bb4yb7gDS#nUv;@gRB?3*WOjsfJ zjc&VWb<)P2jxW{B0+jVt9!t$|Y+<3>l0nU3f>GS~pgw@&*vv2U01{fDz@itr8KRiz z{>`YV=d*NDKm4ySUA+dI3d!`M9R~z-5*MX%e^sz&Ts+$?(GYAU!CWYcs+DKPHY>Rb zBX_X%se2GqS`*tC+8oOXgCxk!&q;>{JmlA>*bx2_6IvwwI3AzK1dRmlrEfLIGf;a*duPPyuS8pZUOLb<_o zRiN(9ZpunwWC-5nQ?aq{|G)13=W(2{N~XZ$3x*Z-B{(RBfpe?&+=m6E8?yR`h0Li; z8b!onk@{{Eaf0ywIdmq-AE+?9r$M_A@IDvI^wk%uVn@Vq>6e~xL-;kJ;hHyJ_A`L!G9fD01(W2{QhDT#it(%Usy z&$pV=937<)31;2ii>2p%A|0WaeYL7}K}{8Dfon=k>eR(*|A>HB%yc7)Z}^J92;jiO zG2%3P#&A-oX%$jMY3z8~=hUM0$J7>hmihGS-EL7PeGdRe9WN&|fXk1@i)YcurYK%u z8%B-97H;O=L-me$BXw(8w7O)M!Y1q8#olw;VjEy`DaQklJ=~qz8ax{vK|3GEufAAtG!F+*JDNwAC;b{7S+kJ3l z+f~P~G+()8r^E`Z1@|zyZd%l+Mm0EB3uVwg>poBFM1MJQPo2jw<)nq+# z>DLSuj_pI7Gu0|LKUY{RJ;A58os%S4Dr>CLY0DB+_5ImfI|LgEw!46REB;MH%7Z<#P43n>?6=z1fNa2{w94&o+!5xPOOJ1H<3j zMW>Zy@FAo-wUOE~8}JZeilZsoDw~qa$(H8Yj;H}%f~B+`&ydIvQzpsAT%+9lvCI%W zKC{G)=JaDL0SlFH1SKm0S|UeA;I%6;Hxk{b9^&>YL_4?QC3Ji$nsQaGd`VtBD+m@Q z4reRog~3uop(-Yirq;C>3xS{y*If^QgHa+m_~C`cDeV8KfDR{z=icMJmj2z&sAU23 z3-&rG&oQ3+cfa5LZcJ2g8^(GK)lSb&pd2f*X+1d-JW??>6?UBo$M*qVR*p=IUHW_L zKn3_x-jgM)d9opL>L32r60xD(^2R3q#TVq+=B!au+A5W7e1 zKR1y?0>&b|MA3QwZh0TAvT8ffR*_g{!e!MTd`1uE4))7n#}SmaF$K%- z&bHP7ES1Q2h5mO{-a{!cHDPKELZfO2#m?w-$T5qPFTAn#-$UMzx<=8la1tKB){$C( z7F71?I-wwh5m_zS+@*)0FbRCSSlISG!pf-tzxccYAzR$r+|va`*!!m2DIA&A6_nNP zV8h}|%LNXltdE#h<2Qa0KjBsv&tEW=^p|fvF`U>cS|QXNkco5wfFssuY%gho#WGX6 zdBd;0X)6i`7jm{D4P4FHGUs1+zw6h+nZh+`U|n&#Kshj*Lcieu2*tHb-Ed(mRj>qU z*|14|L8X}nqk>%sjFQ}o2_1SwO$jFsO{35%JQy3rqtN^X;AkZWD%Pr!2OgS6MN5J6 z+-W(g_(qe&&hZ{oPh&uG%~rGafYM=z>aRN7{G0!c|6*S-G*c}p+yX-){=_j#r$}T# zGFL+^)PkeSnz%c%)ZSL9sd_*G zLxeI!X@QTPB>ntS7D=KF0Jcgph`EVlmBM~_)C!EDYI6k2j==;nmY~J6vvst=82$>oMhV% z+CyEIFa)IafmIlXWX7#fYG$ra=-J1Cs0jdT7`G}K7AwnX=4L5<*nrIWLK*B4`+hGN87e39O^D-m_rg{--@K{`Bg@#Z#eskg*53fL(piE-j#i6IB%f;d z8+=<#kvNekJ?eq@6L=UR5OE6Q6OF2w{!|N#Bij3Uhx0T+Mc$`nZi36}!vy!~-{ERX z@zmzDmXI^$L1v`GslAq;L~#h^fEEN1$ABhmUN>@Pd6ETn$o-h+1jsOLPz5r^!h#7* zGs}H?#MHYxr!TXSZ|xrpDVUSo_`y||d3jW4&U~%bMzf}2POnrx0e{YyI1DI}mjfso z)b6$zCj1d_vrGxguq!shp3YvuP2jS)-*{xXOw!ku0u zZ{9_2qjBp#z^ng`#E|#h43P4|0l|hw}1h6v2{h z7t`k^?r5Ky<2@2N5I*t6fnye2BGhPo@ajDce9_NhpJ#B$Qc6TYLR;lU#$Fy5i65Z| zu0%C1e%0ngtkLs$95AH0;{tWKp;TP2M)aT*Eq5P$^KxQd7L-vo+7u-`kVZ zRpLK7zYc10Lf@nz^vNeUabXJXfj-A}-Ty`R;m_lT5C41j{%`V!e-9n2C_9c{b02ad zTO8giR?>GajaFm~ay6wj0A$v{&4Q{EQsTEa%%<*%&YmoMKWzow3SkYrHAP%`2wLa* zv0H(_izWYFNb~tYO-YMXUaOc*yCH21OPzrRKq_*Rj5F}ZO9b!8ivT7C0ZaTVt5Jy8 z=Gv}G z*jWz|zEsj_JWNtB$J3~dN(oP1qIj$?58q%`HTBG5$2TcTb9P5AQQdW5w;BZQ17|$S z8$v?k^sK)CEI=qO8bb@!4egJRweDl3n3pVQ`0#$zmKOTBsgaO7kP@oGr*{WvSWrXw zZDXv8BHT-J8g@0hX{;0`585Jr5^S-(84sre5F~~SRNGs_(s|f4)u`}5by&g*hUGK z`DHsKziJ4+*@VvSc!xPYTrvLh{ktD_yRjY@$`Y8AQ1n@!Oy~?yh3~wUvVFNV<>)(iC1WHt7i4$^!e&T?5P8VpW1>f|7T~H8dO3hT?lg%E0 z30tH1yN8#J7+42-q1tI|mX835nK0G9Ri^+F6*AYxeUd)|RE&#R#1UO{tq1}8~ zywAEluf14kLRRD4$s8$9hM*?sIIeY^xIgqbFAh~1J#%Pa2T_8egnE_6oPNnJ@Z3V3 z?H0wQkWaY{C<=H~sL&($8Q4+Khi?w6H7FYBEF}_0{;f|TYEOk+ODmw3k%iZk$6PV$ z@o}MfUf(gYtK$NDsPx{k?%;(e><-hmhH#bJvYZ#sX$|spGPA~v<;Y$ppjEGGL z|E^jyKEzbg=J=9AFc#yh+3cu#s}ep|*_frq_WDMTu6^G44tly+5$;oifHWE&A z^X$qxDrWG|n6NI1Fg*+-4{VRJYgI;pKEZUWCBf1n?XaGFbe0Hckiy{cl7M!S;m2p} zZ=&ZiIcND(tJ{@J1aeud$P>m33nEqj{ks=fK;>o~-64zENjF6n*Ht)xODE)l%D4G4 z-Ts3tJs=Q|kV{qa%~o)H;|R45Ig@(BU=X#l7Bp}ev#fw8O!Z$QUn29@rijXeVQSXw z0%XOYsLM)m8#%ExSSn7B_=Lt@ZsE%sm`<^J{YA&apTS;KZ`?cJC7DFqhei_7e5MwMhN zW=T0k+327XqBYm@Bl?9h+@b@0)8n%KfEZoYDf(Xb@y8#>8sKA!OXDALx+*f89R_7_ z$9ut0xS6n1;z$dI0Q**oU5(2goy~=TT`NPu3xP0XYhHpg6jCl7q<^T_`?D0N_|jwR zKD>MX=kZ6uv(s6edW^w`1O%V+Vcd3g+*A`cAypF@4Sc-0J}J)|Z&2~{FEcGgunaE| z926fI0(@VaO8(>S{eO&SHOq}9i#^3aIxH0M*8tUR^D6gi2gXGKhnn}!tWHdkgEEWm z;+FK9?8fU@VHv6xZE}at)}XW#HN5kr2Gp69_(hPV|mWtfw5&EkjG_>qxME|QdYa+ zFrLd<`NEP;*y{qYBqT$lQsB!vi%>=I(#=^KKsmZ7nWrO=fgH{Y%;59^fEp0BD~ELy z_q(NYVFo29DR06bxL(0$5QfRZN=7*1-S0$>~3> zZO6`9QIN*v^qMT|wgM!drj~%k;p(`$Cjsew#<*a(?HE`ri7KyP?^kVr?==gdRVKju zpDJ4WXZHns1xdz~M=A@nPdUoII<>TdC%vab-j$5!!5Je!2^wdS6r7ct;rg}(T|8<^ zxxkevNZCYcZi0g!(@_2wwtGPUFc98i!iHqC?=wG;l#XNe*7vxyI60FsZ8gDQAJ{^! zozQB?{|sL;ew4C7sfDPnbZdS-_3zLvxR+iB(W>}mjEAFC;zl$aAZIqtEZD^?59qcm zn}1gIhv{Q(Iy3*YdlduO?4BE2#hXzocuP|lpT)_gLMDaEh&F(qbYFZPV-T3tC5MY{cI(>(JjO6VoTde871i=m?}!IX`NhH1gGTb3eHu&t za_~(zvQTaGLLT34om*ye35qGNc^Pu>QDAMdw7=R9yn@W+tFB9ci2m0OK{U)A{^^C6?i>#cOuj5ZG zTAzOsX86`?v_ZxvyW?LB5qJ)5tb&49;~Ob7f8=JYPozw=4vSyL+r+xuN@Gb;Rv9&C zd5lS3&=R&{Lb2>Pm?hBFTlC!9XJuGlq26GtdV>F|`>xLf!7G=+(MvSd!BcEZXSymc2W0r9Pu1zE6RIm*JGJ=OP&&Lo7iJFX(Oww z5&S0VU&KZAb?q^J?uH-^v~v#bbZL5=%pZHHw5MCa+=Er< zS1eP-emTVP5qYlh??3+I?vMQ-kUWk<$sR^DdORznLnT8~)uIo<_77%n7A^w@L*Sky zwt|m{QiCs$OUWrB`Lw#nu*2_)QN4x|b0u{w3^M;L|EOVp7HVN3I#9W)o0!)V`fkRe zb2e~SbjFKcP0Ir)4=s}HWqKjcCnXobz08x)VmQ~Q4r8I&q1z~Xso^vwQw5Nl6olDL zQ7P*(H*Du{C6r(Val{j{R%3~2zkN`aPHL1E$l1Ub&jt(JA74W7iz0zSp+HAGO!xou zPR*QzwD{p*TJi{G4g#A*}mBkVo$CB2jNi-k=xnNaesz3^7;Jb7}yDH#$ ziy)RDTtmBV3Tt4OyOW5Q*LOpy@ijr9&uOSm0(UDRq*yXycJfY(&pAR@Jd`~y#{l$) zB1c6XoJj6DX$!zFy;w~F7>SwQ*GcfY?seD~9`7ZRMkx!wi0^l^!};qNU+eO=?}o+{ zgCVZr2hhSpB?CirTVg=wBkAdRDvzcD%wxML6Vbi$Yo&nj(#rfKe5xzMYI>8@i>aN_ za>1*@3HM1gnKtl%K{yk5bl%BV>RY0R?5y9Z5159{P1V* zNt^tec<}wZ-|BwzH)FLadCB^xp#@JEjl=NCSncBkG%`Pl#h!US)jEBj^qb7za!?YWK4l zUepzI=$mQr9A)xXM3qM98?hDyen@UgO{%^a<2PP)g~IL@whSvr&4|2Elt+UVBt>74 z#g_x{${3P34fw&;)ME(45INdhR30lhC7x@YH5^KGMb-ZMvQfRcUw7*-VvToP8{#`3 zcfT7S+Ch3McK`;>=dtjcRLhP%Zbk2|4)Szx(`|G+{wvND%os0_l36g$%5T(hSXZ=H zXb9`5kHIAAK=XkY5hHjy=zbWh#SJ7!&lN%%uehMjtcFl1`3f;RVwm%;q#h9{Q&iM2 z===ishR&_3$!-}th2W+>q3Obk1YObu(Dm?&;w7?9jFD3H=&_&F`o>7Bcx5(vtT9(| zq#LxwTYgp#PcAB*^lR)uZpJ%6a zA{m9`ufxgSibrGTHf0e%BO;I?uZtYLY_NZ$<;C&UG`E*m;ye7PG^kO?6@3>z_eWLZ zKXdtIh9afaTT-={NqqHJy5IRI#wiltdHkTTaoT5n3fZ)WVk>F=A2$*=3H)LuXUSk7}54$J_7T*RrLO6`sXqjQs;PdE?Zn7#H1ID!riH*L`NUQGJ?-~Ik~JN-|QU{F7XS2X_;cO(~aton!>5;NEj#G6e& zo|N4Jl@D7^)9&F>j25=Q=UJZq3zgrnMwXw0PYP6vTDFXY}NF3YtM5seN|1O5Y|E zhMKmw^^r=GXazf}0R@+&yQ>iG5>R&0j3g2+q-LScK?F12t^&`ZmB$$;9`(eNBH)8WJTK9o%8NK^t_1`2W{m~zHzyH0DVvm_H0M6@p7vAN}k0Vefm(7p6 zp0k8N1UP#>QzG9_FG<_AoFSt0J%siv*Wyyo1jo;A`n^CUt z2^l^X{I%91zzfvMHeXT`s>bsuQwD+a&T_LQ>MBrT6GbmKRT%MBi@Dsh)Qc6j#)eO? zwbo^6=|3ev&uR6tbttD>i{`WlZ=-G1QleaJ8);)U3(5UIaCHufCLZ0godu+Wh4DBP z1Gug$(&2nE#$-prY^!eUjW{Zb#Cx;2@214=>KWWb$x<&gmU#=6eRMX>>@qC+m1XOV zP2y(47T_bKg9H_LmUp2zRbP0S_qEKn%EY9tMF0inoFsV?ln-hkWxZlB%nxh)syfY> zaka=UkSAP7opH;>h&qExkiFv?|QKE?Zo95Axmi`9qL`Bq*o$?R+zIml&{1ea?KIsAiDUl1E8#^X7# zVWLs3zfG-SFyb|^%nHuPRsGO}-HUvnj563Mo(=S_y^@j&4Il181-4!sir8BcE}F$W z)4dZCp;+?!f9w;0e;Erw^rYD)At7`h$uqDAsq_@bplrgjlW5shVi+k})Nd+=!1Xys z4cD(V=ogRpz@rceg^gQ2NOcW2Fd%d3T6WY?5=61f*5Nd6KhLsNvdrLfaRv<&v=rhT zKj2tH(_;L67KmEG=z|4w9D#r|U#s24|t;j|?=vz4}qgp1EsfRp~DkXTx$X<*5(Dtbl~rRz-k-x1I?v4E>LA`mG+M~oSs!#|@EKkJ@7izQdExq|f6 z1rmsulc698-e@TG`*$=5h_61=#ujl{6AbG2J{5Y;C*r$A8fza0dT*f((*PlG)44Mu z2Ngii8*>TEA3vdP4B10j9TKmhpbl%?=%BxV(S87F9YQQk3H%6v!Mk{%qd7TQ3<2V) zUotGOZX+V%6S{{U?Df7P55+4zb6h_HbGHR7!{`! zrbCgiJw&q*n)*<5h@+6?@ieTriEV~ zg(~w*ASqzd-Vg{FMDHE1;k(nm_?3sCa|Q@%2RTc_e#_pQK#JX;72A?hGOIBT?ap8$ zM$UROMk{OrZmlfB^7e6k#d*BK(T9+oi+fS=h~>)_g7L4cGg=d zPUje+LP?~6uIaTJkf(Q6N&sA9rYJHWlM^zvSULeq8x*DvuizKT{{094jr_RLo_}2B z(3F6R9M3{!!o?n^#ev;A6J=MrvAhhY{-DIXrd)g6N6#p^TnwWPB z+a}tk;1=2Isrk8PPjl%S%rgqD289mQqTH?c4T&BAPgGDT{MrFme7UCgq%{GT(q35R zUOEZ=qA+ycLToxP3E_iSc-7^nimM4@bmnxd~+&4VAytaR<=+9_&x*gr!lQGF34y-gR8kMnOEbYy(Ju?8 zreO5o=7uZFumR^n(U z9@Pk`CuN^Sb@?woHWN}^TQ9G)1u>oo%XnI=x{YTncuaw!l<1Xtv8mZkYRgqVFRXU} z4}N8zRFWxpQRg_O0!k!5Gq$y7%c=;JWobw>wA64`s*G}=VlY_P0&6E$V1O)r5&CMi zshl8@%QPbNrtS~D3~gXk5;-o|GX76@!cC>SH#D3|hacvN-bOQP-f?k_Akf6r;Qjqw z_x|s_j*aZXhs)B+VPH3F6RgInWVsJ=jrz(6&`D*L}2O7UbDO?8m2n!)_)QF-! zSmMX@!!GPzfTW|T)gXavnbDKOg;ebOca-qP$uZ|pm@T|5S5G$%o%Q~ z!qO~{9>6L~g!9vEO&L($z8Cn5*eW*#F&#kN!qDz5xsZcB_Zcu1*BM#R39Iqwtf)wt^#}5 zTprj)TP|K?y@Jurt>vE;8lIkqyfvmux#7rdyvjeysCoQ}Pal7r7kHIkb604i3ecjS zX5~5>o(bYYpnarvWq(s~!#WH71j?t~ib29iJ*ad7Fj%Efc`=mi4u;~V%5$nROB~d> z@Yl*_6;&%XhwE*cXh@t3KMZ}@PJOHqY)tg72Rt0l0mgL(V!-q^<8;lE$8vfIO`zPL z5Fk(s2vK3l1!6fuWXmxl*9I3AE8E*h;reEGSJh5d4!!CovQYk6893Z(_ebKLwX4M8D#^1-G;u#op^OAhE<>Rvj zJL4?x)HQuBc5Tp3nolQ8K2kVp#CCwtQ~eZkFJ3edN7#F)`;t_#+93mUhO>hu!0KBq zuXV#+e{D;Zj0l1zSRwfDV(L*7;-$i2NGx633-C=7-#!W*^ALw~cf~(3trA3}%nNfq z&fA>c?q9@%>Ad!X5c^e>11$4#y342@;c}rNHoLFmqnPAq2guNV=SH`&6F=d!5zDmc z)KFi=Mivebg;Hw$g_di;PQ^pu*V9YSZ&V&Jw~f~jeh7=UQ0J=|dFncCI>sK~p>t#r zww3N>yhARLopnqsLB4LMvW4aqWk?s1r9?zR)uM@^^|%r`D}JH8hA~x)%6F*X#GB4C z0Y`~-C241V(QtsX*A)ZYgl-e+2E^&7w5p29N3b1*8oz>EN&S$Cq|W02wn02oOL6<4 z(&L#@N!Z%{^tBl1wX7rO{3r$g1VktuuvRz})4tdT*GddT{V$-H>sztZO5mL)ezigx zPK<+PXOtzrAb-ZJy{mtAtCP5TF?%DW6k-hHu4LC=tEnm$j|C8dv${9S{4e&xOH2v{ zT;kNVaOFa&1~p$RxAO|-<{T8!q$}F7{wGQUd6HN)StEPBU6@61q)hf}$yaUH`GV8D zrpx|pel`z%qaO6xcAHRN0VPGfyiUT!U|2cQN?3EFFNx3lzkkRDjVCV&@h$f5oD(MP z`)%NHM5v;Wk<@>0U$iLip8$!!^BR!$RM0*e#nscZ)KaD|@;o95!|fQ-%4)a16+hg+ z`?c=ZeC?o#bj2a^mRKcU;=%aUlIg^|}o6z#OxVB;< zL;cf%NC6=?3;@yY3o((aQoI@8l6y&I^Sr90rT6v1xK}oLLPgm@Q2jjC+oZ==0Z;=A zb*uEj)gJf6u(-}X0T3f|La@j4#xr``R3h_B)f6FZ8R-@k(_!bX-`0?h(P=;}IZk+x&| z5O4oke}>~V$|wlE1&j*As{*AemK<;8rIcE<@kB{@^bkerocG@vCw?9q!j0($V7gX_ zDd!3TrJWvj#r+gvR!@v?E0Z+QAHGci`gz9Nv>i*a0F-^kPATme$UESMj|Y8?7yQOk zB-Y=ZrikCaqkre8@lmxg`lSU0UctF=gL-gZu=EvwB_*E=IqzdEiR#H*!h}(*)z6&V z>9$lYhuDE3CRa>_Rq1a(OD68pB?=Z~q5{D@q?J?;32C8G3nbVXXY2H^sJOL8uPW}~ zaD&|B#M?V>=auEWh;0^y2UDo_{8Nw+))|98=7H^rr1#*oLe z=LlP*b{^>8l>PFCZOXhWKd%#NnU$dAfSiij%1ZZwZ78w_&xJvxF^Y;)F_{?ShIYd* z0|{WAy&XT2kKz_@OU%gn@PcGA(V@%k&bUZL^Gzpn&PjB}^)fJ3P5C*J-(ImK3{S<> zra2_pb41iKJ!7-`$_kW%tH>Ki&;vdp4qB{Mk%%e&Rc7=Rw+9mISzt5?Ld+5CY+V*N zO<)^!oq_X0V%MQ>*EyeIYY5}Nydm>fV=xBtI8lJ-8HJ; zazP1lRn4veda+G(2t0Ex57Ay?khsLRekIGuD(b@U@5TpxkF#2Rbz`iVW+($a-k`yF z=suuN=))bHyjtb$>JTS@98tgEoJw;TX581F3hGC-`E^-%A15k%!;FhfITUsK>@Hz1 zihW&Nk5g%U-hKJyzsP1BBS}oBa)40{FcucJ^+GFYHnZ;w5wn2UL7KeP&2aWWB`yIR z1$fH29}sV#yGlHP6U4H6@1K!?beUX9YJ)8hTpAL4%Vvh=ddWml8vefcz=y?KKr~}` z&!sr2j>1J$?rHGs!eV2Kx%+hDy}kpf49u{6Dj5B7S`w;1An>gsP>R$pRLucnk^5Vi z66~0nEJIFEQDj>U}#U zwhJy2Q-0tbR8adIe}x5mTg*-B2MS-ahppm5Dwq?Jq0Wz}exDtjUqw1}c0zrLX4iJM z=%H@TtEO_ zAlDK?ctlGxzZrOy)Z2nPsO22s%}qg4iFmm=0MBdJi6g6EVQ8nSCa4<;%XL-+Ys8h5 zqQUh-W_IkHT>roFauWQ+myn~saDz3HAIc_HfpGXv=^8f@GcfHZkxm>@X!k&zOiLxUGxGGLl@8$qvc3dfA8W@tmXv=1{jFuqkcedy{7zd^Fk#q804mO zUnRw=*4M&isd(Qw85=~JND7=B#vvGXFMXn*ajO;z1NK5uC)PG9$lJ8kMr!=YW~snS z@RO zLGQjt0P)>xL(ZJ)u32JtTKs-T2?jBiA3W+l3yCAckVwSQQ9NBL7j@*tIU2uifz?zW zE7D1{(!0rW5+8ni4H@i26S6CsT^e(^>3so&Tx!-Pb6;i+F2)fvlCdfKgQCe4

    #!fFDVLvyRF()o2^!MT!$)A#nszVQfi9gkUNlbO+I>xGH0Px8M6H1{?n^N z31KlAgk5x`5<|>9rBJt&qG17Vi$x3u{3v7rRgC=;t3O6$u!1`hs5Vp zsg@XgjN&0?m_WN?&n1r%*W^-9UJOV8Ig`EaEn61EdtD@FI6foQnXkDG0vEnie_8t_ z7Q3hmr37=3rf*_?ScA*ma{M%S2FA4Nm0{zi$JB-JZ^XrwNooNm%*dG{a_54LLv{ws z#8T5^&nOB1iZ$qqpZVDosVB1LMD!%|Rb?wC;) z%kk}LvyZFsFoIkU(BP_Lf>*wY$MzwR1+Pu`DvCKi!L2R>%h?ZL7A86?!E@UxFZgFf*r#E&t5E>R!D-6A(HS3PU%Z&-Eqh=_JJBS+S3(~}(iFHF~2n9g! zgvfLL2xfnSUmK5E$&qmq13uFM-^aCp+yD57%x(#A?m5i6DMaU8D__$I(P z#2RlU5y=Gvnv=WSy$G>sT#v``(*{cJDk&23QJG-zpdpPTqvom*+sPtHtvTYi;}h7Q z75c$b;KC|^<iV0Ju31c@({8T zgouiu;3gfV(M4P=JwRB22*_nAV|{XWNzGMoD`m6$5U!GKLF#wBuO(dMtxvJNRuh0(gX=4_~!%0iBLIhsCD)Qbt z&y#sNO>a4UIZWI|O8HmcXqk9?mfqN;tGSM*-%U1z=zf)G)9mVl8g&$q6{`K;bl(Y$ z^YKFticcDSod`9ed&|ZQBwJn#8S&COUU!;N5yr_2K*dAK*SRsSrnb?z03ajZG%nIY zM7y=Bt&csYHY8k_P$jzTF&R8oHEDqafbd%cxe0K;KvW$(Zk?WM1Yro#+k;nEQ;uc4 zMeaIwTddIV>P~1-9z+fafQIE@i)x_p6P6t4LWqD$2$haC0}7kRJ9O`J(R(IPGa7lM zdJ=m$k`x_3EI73Ksgf&@65m%A14fGn54Z|&XJYV3;hAW=@n=D`D=W|9&$0%%`?7rM zva*$8pN9YkysIFn1c=`MrR5IWwIxNCou+SlcjQlOP+36Ee_pHv7Wt`Cs;_4IT$MU3 zF(|GgAzKLsUeAth)N3>W6l@3fI97R%p`I7H+BuNEpDu~`Z?b%2gSI!eSG#ZGt5A){ z$-mqau)upQ7jtUSvVLAWvYa11_TM+fpEFyp+4@|4@h({}ZWstH2c)Sa%c@t)%3wD> zt$O*hGmG)s9EHm{FU&Y`4`wp96j%25R;kDD@0I4bmae^Ph+6YI;sUX`2sr{rF}A!4 zs4rV+I}{g?K6_mbTVJXGkM8cv2T=I?|Be61XZx@J=TS(NT+J=eA||tJwUGtAiizM_ zsP@U#ZmjdT#D*l;cmZS#YAXN?y#OMr-ijue1zT9m6OrPO7bW}0aw4x>X&#?65M|EK zLUrDDbB{zWFhEMJA997a{VkRR1+@BBfFVfSN-E$RuSEG^V|c6#UPq?XQX=xCQ?%fI zG!k7cEoMhC!CUU0sY(V_xv~2LobOoYol&y7=N9xU8ud70dLWkA02thWF&s8{>_#w z`xxbn-hZ|B@IxoWY9>Mto4w)+HK@WJNEi@90OcHVEIh%(0Dp&EnW;^Ls!ytgj`;(B zxYGanetNVGs&i4Xi`XPgz`vrAUax8WF7YH=9hk z?;a~RITXW6mjxKagT5#(2cCrl0ffW=J~sUOU&MI0n#`yY1*Loc|HLuw3!|wFR2+O< zq}^_P)p7p(wa9sZW9lVX~B@O%|PDHF+KeW{pCOjHFWGqr(>0*AnM2sTfnKgJ$@(S4R-Z;-jGn&I^MlkU@y zMBF)|!-aml)0Hehe&8CKPLd?>+ug5vbc8T$o!p-zR;y| ztG@iwKT$n#7(2)rL=j)0b7C33QpTkRF#zMuiySn8orpDfjU30?l6{U`ot7F0RM0(a zW$=nND|MwSbV#_B1WGh_8okq2ilM0XTe`0_P@7^o@lM7rLCG-)I04kk+@S|@eN0U7 z%yK!H%6q~dE5>C*UM^$nxTFbi&u)v4iD}MKQLDhKjZj_6q%3#$QullmYxIPCazBu} zhmA=WPu+goPFb<9B#j%AUk!*TqP;yuZ( zq=eyCvNV?!(43Z}mrLkmdaP$k!MQ#IBGz9uEkwJsNUmet~(d=+u~7UF_%-QSMXe6kXQuI$FERH z%?yeME=hfP%`rUsP^Beg_fS0MNedr(VDf`;>XSQ91cX<4Nq3c0R-hjh5t@|yasvmC zRH+y^W+2u=tf_<&hhlQ$Q7At~v{r$K9tGaaok~N*I+Gwk`Pkw}QiF-@m$h>Mm*w{@i{_F7o`4aUcEDp~k-Y#dbG!lNk+O7F1IQ>v=6~ZaR zB8+Jj&;b?o7Bn4(hRQ7aIgwn0eXkHd%Oi*be^k@GIB~F>i_ZY%R*8~cfWcC?;z6}W zx45HJl?s6vk%C*iBXa!|^_(W?foZJ^)Ye!}J30#;USLUGMp`woSXkzIQ(1``1AO|*Z0kSCL2zF;VM6jU200Wec4vH0> zC)CxqVy{f6y=$1$UnbB36S4UD=Cujx~$69fNMx3q^#mAoL zz!+m;_LxPzoFBZ>!bW+ai?9k94$%?$9C4teYD;|u>WiT6y{@~05aC(I3tPbE2Qf3^ zBH7zjr9qVA9Dk#$U?ElP5k$OBlo$9-ja+jzmuW6HrnIMpYahl-i;1NSU>blI#?$F@ z@}<=y2Zn%x9GiUo-oi9Q5m(`qI5(Jl5`ZFtn@BG2ETpp~a@)(nq;U#wC=T2Zk(Co( z;6cob5??39^)oN?Z*+g)edg?3rst(@Kr7oZv_8^yTwcVX5<^aiRD4JTM^RJ#vqGDf zsU{@;m;&*_55J(E!TsNQ!H#DX=#b7{@N{Q1Fom}f#{^{p+Z$?qJf?B&to9-70w*)F zaw+aW=fjVyNC{WpIgM`7t+ofx$H$!as2J)++Ur(5CGpE^ox)A%XQ>28gGXc#x$<(} zMDUS9q47Wrg4!T%M?%93N((yBFvTS!xhO(S!oHyBs^M` zs&oRU^s1;*))~ED!zM~0q6oGuMxQl4q=uAvv#9U{#;qSsN)b!KLdF_VV1rNCpg7keE2mx2uvLNCCMo=KBPmo@47vWV&M#{%ZUd`qhm;oBMYm>Rcui8 zcGS(|c@I?#w{~v3MpDstE-cAy-u+>T6F9{ok&^6u2zX%*P^;)>W9`GwiRHw|M1~;9 z6&TE~cmKviQn+z$X4>cIISUBRit^S9+&7psJfig(jmKjHbF#}cbt&VJ`Skz)guPjF zE*JU5?Q@eXH`~ab~V*A69@#BOl*Y+WL7n&ttSxJq7sNe2M~!wQk023 z)Qe;$nO6#>FKe2-wOngS{7rG~&rn)HV9Ir6j||(-Fv8u#)peS}(ui8RFld ze>Hx~sr}byf{zl2sDh2cu#LMz7;{q878h(Y6IR(;qAjuhs;<|nL_Oz&+*!VB8K=N- zlx=+e{lB5ModUtPsP?{1HX;D=KFXHQpZ~Ti`svT}kA}(p2A+|k^HHYij z`Yb9ry3@MWD%~zV1p(s|+mJ-Da7(zpx$nK0G@;Gj&Xv#@xYfrKTlAEO;cEb!sDXCN z)sDxsPfA?>H4+PCpduYUnLQmMJNQ`CE(588%#UNj;uuR9GnOmpa(ro;2Gs-~`SaV{ zKva-u1PHbg!{Tx}O&a@uB)sP(fZ8TycuoFQ+qAp4pS!I%<`#;=tAnYYQy(d}cG_U) z9)(6?rLkL^X-KHam?)jEWPkZ|9f9%IqR_e6!^%wk3aL}l47Cmxg@-}CtYJFD8)duc zYjzVMOt*uYz+6*zJ=w#x&{Cx!_>sHhh_=i?vL;=1TJp5t^Cuw>j~fDp)1l$JT&SXx z6cx6X!e2fDL(1Xv``XEfhCkL9^-WF3Ed6U`$A3QpAwV>}8A^sqSRYr6>UFzWoc7TU z@2TY2Qm6tC^UL%LVlE7_i4LC2@B-z2eP;3M67tC86hhyr+VvV15m7{7kK9ha$Hm-c zq}$?E2p>J->pyB1|KW$$+JvnpljeBz7o`z1`)Ywdv`Cqa7tz3PGy@3 zN(%L@`bIfOetAA8{ETjD7Ewh#me@2xmJ*VKd4>)^AEhy7;z3?ku>RxghTaK?&WCnYZ{A#&yfJ8jn?P zHeIo?j;OTb228?WzLkI10=g>TcoSa*R3Ssor5-zVGN14hUdN3$6! zsL|o<(jTR6)*(G;{R=V8`H1&(gdLB~k@1xs;L20jm~pZzuJCdEq!ct;BeGsz*&5;Q z)73}(HRgy4MG1>5>Q5~qOjroV4i`WdQE}UPv#S)VzmIhbkm&e71ZHOf9x(f)PT5=z zWoC06lej1O6FJc``P`j4as{#G*~xdD!z!Hl1`)%_iwUrliZ5|baJzWITktRl;(9V$xa5w0N?)^c z$&UMa`TEympQw1*_UuEd(_1V~FkI=<)11uefY(N9!rkNbax%b^U#6HDm4;O$JFRXX z>76CuYXztIFwOs5m;o!|ZC7Zz+@M=nIcr7AeK`xUGW{BCcwg$yIumjN7D(&?I7Yv0 z$CHQ|q57JA2wnZet5=INB%}KX{F93|zANE_H4*DnEeZ_ry#AL0nA4qB+k}IeSVo>Q zk18XvjJF_GO2%n2F28V=tX448d_}Kt16;Ss-@SeRztj05=}wG;uSD6wNj(KTr;Edw z0=iAQHE95rR!-}2s*uJnM8Wfs7RW!I%wf%V>0Usu81&*cti3X017^}e>ArK`MaNaPKSyw!)%JtkkXRxdxD{u zak2a~X82M4Cu*AzX@Hc6*1~G0khts*3+JMEu@s_{E$-g_R{3481eO>H`rdH07ZlXl zOb0w^Xz~Yp9`HI%5X#|g8q<+ld>OK8|)1_yU!c8XKtg0gk`n+f83)bBSn9&ES|M z5DsmA#cQsByu34nSw>lXkR{Ju-A*z#qEeQ;w)+ZVHd6uv%rVQc8Y`w1WYxKn+o6IC zO^R16$)y6UO#~l-0(d<*!V2z+p5PsNro>;iF?1(idF){!U;wr8Z~%4kVk`jHHwYB7 zPC5?ZI>Ohbu3ZlEO7lU~Iyp(wb26J_WpP!8Fe|-dD?|W1n4P#8r{k{_Wc0O%z5*<* zx_1U5qKYap^6Vk4F3ds;J0}3Yf=xp|2gr6}mb<(Pm#*#rZs!=^26y9Z?3dz5s6 zyKn(8h{2)K)e3r}kcL-U$-_&TOw!9!=&vbdNWaRRpBS=HElZeVh?fFv z7D80<#DrBmhS(LthacKPa0(oa)Ae#ATAzT5IGiYSIP|UVlms99(9(G`M82d~EzSVF z)5~;6(;uum*xcRm)*PNo!{_=zddZL?86l?Zwwo}LV#XGvBgThS3=#d}w+`3=-`Z-F zZF;?8RJ%LzYDj)D#8o)MjZ&QTJ2M~u`q zv7TaaD3U@_6wXyZULx`7@~MQ?U@)?=6lWv%Odcn&R0WlEF83ARHreeuqY z;gNBS#wj4$C%wfRb4r2?vtKq@GJ*TiQhNItQ9jz&SEGD<)80#c)5hv^=>9KvVHN9_kA<#r0wNeUEUJ+lW@}{)Hm7FSFGK%bJq`P8Uy8=wsNBUV*u3 zQ`Tx^gz@_%R&9ZA?dz0W0qw)(_}GgKcteqT;C6peSya?XY(N^qj>8hekS3hY{pk*Y z`5exJZx=IOD;x2xwawDoN-U7Qvim$lU4@hzGfs_9%BW3RT=LyPd{T*$Nl{Mm2=8eR z77?8~V!{Lq46 z5o4rmA(HoB+-FXG^ZuU_{}E3oa}wJ-WjjfcPQ;?~lWDhto@!B+ePl2>s1$%!15nS7 z;%sWslOT|*v*``m_A28qT=$$$v}No9r3!L4lpCbG*3tBXg$q;PGr@mHba~*+aZh?a zu#5Kk{&8KFs2AlHL(~U;P#HDxS5zw3OpHhOLo+%5RrxS}kGAX&LYMs3Z@j!8{Q;S)N>T2gle|miKDJC!6C) zQ^-37*D=P*^mVK!DV7Pn+k>V9pTLve9(8&0^(0ptRJ?`wp2g@{ExZv=M6vCAh2*hm zsOgcEPml@hWIAFeWPn*_smD1>_xhgD5P8!94^&sYqU8;Fh+Typ3j7$d`tc)b5PB@s zMna37HnPe4|Cn9WXgrHN50Y7ryvcfxE>TYCD1d@(YJQBPFDZk;3$^ zkR5bPbwT1qL((8pIcc>UV7l3!cvK#>Vzk3~Ic=!%hUC=KA+NwC^0#3Xc$MU+;5hlS z+_#QH^eKg56iu#Ni(B2jeTr%S<8<|}!QYTaY#~xYEBfjpmbr#CT8#<}Yq&TS9q@Se z4$8{bW=oJ%G78Bg9v+Bi2ArWe(7HIeB$c8KtUXBsRR{6mrH4gb%nEvXSa_5>@srHe z(^QU-6&H%0c}Bn`z;NUS6D78CQ{{9_gUY+~fYoayouUJjdS`QJIRQY||XuBa;;K=rEqGdl3SyzrMnom(o(i^WYkcsbLPWq;W*V z0VG1#8qsaxz`lrQpK;Z3FZtPr8rsp+H{>=6E<-}w?E-~=ky?-4Fj$b~bOl$!6b$#Y zfiL8Kw#!QfX_oj?qL0gK4F}`~a(N6=K6htt)t}k3mDilHZ8-vVeu@CjcieY_Gy78Y+%YLkkDhn?q-gC-_ zVEPz!g-ZKXqr2%q#C59 z3dmgWF8Yhso=oA65aic zBcO7L2CinTp_E~hWFlUw;FLTAhJ~G0{pGB`kU7XGxhugT-vyxcK*+>ue6CIIX8aoL zmXzDX?a1x2Y?4>{r@?oW9@>jv>R$CyU!AEkwqHd$$mjlv=B|%pgI5*);P!37Uycoj zm(y$wD$kmwzi3vGv`pHGYy1_wu)jw9w8Fo>vmT`|BH81T5;+X><$E3vP|#+J^5F9(L0s?{&XY1{FA`!!oK(dn zLc3F_I!SenDy1~!7h|XaRL0HKfcLQOr}6bJR*5Gd+vm2{flo4Z3=ilO1VI9k=Cvx5 zlDY)mXN5e>|I!`$BK~;AFw~VO#F)&1lH?vs=QQOwuCQeCR0x=OyKc}0DgS`0Vy5!4N0+^ul~1$hSGx z)ny*c3&!}eEiD#b%wLNk@Gi#DdtKYf>GWC!*8EW%X^CmboJ^z5j(<8Rwhs}|L_h%4 z;lO&m*saQ0i|>x<4CxABeMFa(R(~8w=PTvdx(L5ma2k%UvG8#2{hc_uoTb`#DruBK zw;Xu>N-&u;heYmex%#GJi04qH5qbh+?3bNURs@8~6 z>Th#(UADmydVgcI5)waB>yP50?Y8o4Aoca|`JBSD$HCGOX^+Q~pvZqi!|5E;+`%px zr$k_~_o};g+TcFr&^(QoD~<^vGB%b;S5U5LL2fWY^d1WWdtW0xMKU^DJ)0a~luj(~ z!KFC#&Rs%AGd>QYdqy3z7)tdLoC&M*hRuDPs>WdMRca4cwZF7SyWjK-f#0c1(FvCu zK}D=3O9pIW-FU+=k~Ygh9GlO?6lf2BI%Ry5##CL<4?JNz*21QYoz@{LTXxpqCAAslKARo#t^qW3u6Yhh+n85Ef;w%iquKG2Qh@77-pMLqs_$qHYI6X_pvWOeP)<-ymt4)y@6@4c{H4=@SNF=SBXAQ^h%^&R%*O zik_>AGC5dpOUjVcZFI&G00WussxjK{-olvvZm4q>KO%OCFjgB`-2)VHVqQ)JOCBGc zHq&LS<|S>(NC5xTPbJ(1HigU3tb1uH_S3tr++k9>yv*Z?2nztRu}~sRcvTh|0bgMb zvIzLw6i)K1m8FSj(6Tdz4y=h#l1g;xPp#{4L2h^78mJ7V!J^_a@5DF;OZc)Z_Digz zkW7IOE;(~F2eNsA2YFvLqz#njxy!G^*7aiu z0~fd&huJe0i4AngdpMs6Tn8V@)EN#co1;#pep(}Mrr%w{Zo)o;Vs5CRHk3UFZO|d` zdlpRUT1CLXBeH_gNtESYA;bpaF~Nnj9I70j?81hx)B*%cU0+XNK``F98`sOKzkW60 z%EF3IBNt4LrGpOe451xVtItHTEqKrtS=lQhy~R6Tl|=VAmt6!&$T|%d67^>x+X6si z5Pn|g@D}Xa81#wrK4T$J21uBnsYH_;#%Ag7{Xjn!V@yolWOCHjt@FhB>sLGwyU^c) zbA{0Pq5)n;RHhe6FLv<>YO1%#104aBD^w!IPjPG$TjFv~8Yv*3ViqlGNJ3o8(^l>1 zX}kFd;X5HxRg$m|VE>-{HXHP+!XQPP4-}( zf*4)+bQB;9O*18>E%xv^0#=h1;*S)weB{MgXL&}cTg!bzA0y-#H1uW+Rnps`FK!G9 z?^bjsKdg`8beYn+IG-qDXf@8{lzxH46wkYR``hK8`2pvsm`oHpB25QH*$f4y-K-wq z+v-#_yQVgym3P)|`|9B}LIr*cI@djTjsPv2`5@bqcffWJb2|ILo_LwnKqo zcg@ZhJ(xmV*xDm#^dW~7c6=NURxServI}~f7 z2^W@z%hfFEe5kafH|u2l8fVG+zSZx{w-0YENApx?=Q#;+gr3+`e-n+dF#U4+J5e_& z@Ys5cf#X`GP?t;+5k=RRb5$6m2n0*X4Wu`r?f_}5{J7Xd-RwiBP0bdeuXh7fmtNj6q?T?;`qSfGT&IT4fdqIq%Xei;G&V;|$p#_a@z z^X!n2+L@9^($zMq-7{3SImTO)VIW3^K&iumvO7R6{41&v62ZuU6$0bE7Lr09)Q8%I zYE;aIsE&unRhGbTLX034$uL?gi3!%D#72mnyxM0T$0|{Siz__Q_&e8XE(&yt`sTsj zx(W~x()LKUI350}Th_MXCnk-&#FJIfvRz)4pOcZdi;`Z#)yx5yBo6x`8mxeITEs1s zF0RWo-L-`O&cVWO(+;7ySUknkLcmx9T#m**e*a5q#l@2B)hcb(oU-_1p`9Gdiyw~C znC26Be5AGFYs8&JQltW4`7CW#z^IW#c>Izwqzlzn8;Q~jUR#@}6Ni{-1WS#L{TOE^ z1}l21496wRVEwo@i?jj(Afl1!^ebWd>8N>%aLVRNBqTOi;b*TC#R)Rdq#`R^6NcZ! zv@q;X8e_g0I%-Lf%K1rGHbPH_fORsOzMPlW*Tdvsj;cs=mc6un_7SRxw}m4LtU^eW z9f;Ehw1NdPO6_<)0&{B_dBoJ@SH}#ohA#*%`pfG;KDiusZ4J(bvN|EvGL`l^p@Jyp zpUR)__sZT@jQAB1+UkkMFO&-|-zlk3a5kB_%`LfmdjIox{}H4l z{_?%@Ew9H5t}ZVj9411Oaq5+>_hCFlcoo4ii!b>)m~qfbtYGM1iw??bRC^c!Bg6|| z2-8cEPztEZl+6|8FajWkiWVA61o7^n_kjf=+o%bR@M}d8V$g)ib8XsFK8?+kT+NOo zF+MFcJ4M=uwt{>U%I4ZpbbF0L`DVY)R^?6s0K72}Nu z(b4E*KM2F-dg%QG_m%I^H2-?1 z_);;bZ+^3U)1%N9$ufvNt1K-48KAPJZCEn*@AEX%l?!h4B%Z!Pb!Wd>jmEMG+Z8h~ z^FKN&r0dV`5%zU+_SIu5Xo$#?tXMSX=yt?!mt)okeJG-S&N^nMEoJgIk-OLkg2(tr zSGM$2(eu6-r&`TyAU&bvBb|KVm5jy2q&FThNT?pCZXa+;b6q^RKRB5G%Uh> zad^HYuE(ls3iCA=syxBsPun9@_AmUvJ_Uq55$s7ML`N)Lg!JmJR(jH8;={*k^cO5w zOBra|^jgjg_jP=z9{b6uKLp!_93w%mZsvlFrJa9D9h{P+ozl>{5u0v!|3@gz5V|Wq znuwYpkK%_m+5!cvxR0i3t%NY@e%A#T!n)}DlaC)=9zO9}v45>V1xg1{3arkWucNYp z**6(fo*kiqtvFso@J}$WVSnY{!rejYi`!tzGm0*6@`#3t}4N3dQ zB5e#|qNol@;k9;aMVD{&?7cqx$Mr=B&E_!{JH+^Ih?W7l}B_)Cdwt@ z?MCu3eH9D~ZO2`Tr~1m6u7VpL+yb#0H&28r!n{9b48^Wlp0wJ!`wuY(L03Ntc@Y}} zK1Th&Kl50Ge9$!gDqHnjkMInd0rr5bJ4^B>4@%2vx_gWLnU@qB#z*dBC}6u- zYFMybb`__cPq?BIt|3Zen0A}`&ew3d^GBWv-Z=|n?TgZgKi#cPpH6*io4thCA^?Z% zfJ-n>pi50aEz`HBW+s z_ShC-R!b;%6)ttju&Fx0lAN?l2*gC}vUtEs>1=I<=-`1<-glg0%Ou+9#0#bIhbW_W z?fTZFf??uh?`KgwhOP5X0|h;Z6(H`B3C)MfT(kX2s!a2XMOt|B-5Bb=;?C9!DxzK4 zj2_tbh^wqA-$h*xgZ!9vp+Fz>IHMl+gI#@d1oW(klxkkOP{lKPT8yDTQr#$ z__u$vd=NiP%lkk4^&fbpuGz5apT(#Mt57ey5hCCJ+w$&Lo(@H#NK;Gp!A9BWTLjnC zV*%Q8C<7GQ>k<7Bs^?{;26>r^l5m*wb)ajT(8}dm@@Uu5?;M#JZoTXljJpWXit*Okp#xrI6jo=$3q-dLuT~Kz*)kSBU0NE*>54maP3kMMmT5_ zNs>nhtA|M1hsjSc6*2`~Ep6>3C>#u(HTVMHzuE*t1EWY%Y^$&j&!>8C><)}Lu?(Qu>t5U z-Xg5%Ml9}pzjCsVhtz_Lx!k=~Fw!T4a#&bV+}d7DG!PGUoPu^X9shMy<<3-=r`4Wv zITZrla#Am>od{JELsvip(Q5aJijImSmkqq0&118%%M+siyUI6uLT3Z{dsxA zt(gZ05T#o%B#oe?QXf5~E8%B^k@$>%+2ck#lim9=1!EK{z(m8oNZt+Y#=9|F>QnBN zH6LBzWo!rGepb7W?9o$40$ysTULU z2^g()p)B^pR#QP#F0UR$(@*0mVqLexi)UAJNBvRa&RxHwbM&Qp-DJv#*d^Qo$WmyN znlpg`7R#j)MGMRq(N5_n^-(la$c7pQ9Opj8mxtW!ggE2bM8bu48I&Ly`cM2vu%=fo zku8NtjLQjZAiuvV@BS%fH$DXpZzKt_9VrZNS9wsG%ZgXTQVnzx;>*z4(U^Yq=O~hV zo=8})315IR@qJYvAA41va{CH2RyfTk2sB&j7WdEx!qLw3tgF&Yg=Zpku>;=u1(JH- zxu~5wR*$5{otT11vzsfHP-z8}UAlrmOJ|3sE!5a$Xac<#3mZSkz z7fx}Nf{=CGqGpd1c}NYNL?^+aNwEzEs>2gUi%MXH4#V-|LX9_92J~!h|7=*HjkW@{ zhG(V&QFsBb-fKYduF8#zVJhD|{<;m84*WaK2G>bVPW(!H!XH3T^X?I9;sQY~jgyl8 zc@FwPy`^||?=?m!_JLc9-@U!5fKNydvYe4gHvH5v5Ox~D;St0@QlPR12VyEOSVGGg z5iLfQ;W$EY3j;6JJbNajnHPtg>%LTkI7CPViF2ine(8J>pOy4UD&w6%FynA8;`c4?dzyUpH$;_Kr7 z$R;V}nz3EK^6&DW|7!6YkBQcGz>f`f*}@m)e)6kmg)RT1-7?Yvp6(jUm>6>s)hjJi z?BYOeLa~6nTyRiROpNHWSK|`b>x4w=v$YI65h(wpPP8{%#zSzFkfF{4LBkv$`24Y2z8ZdG37 zq9vtwdrwZnX~HIHdP8Eq2*haOI4ke}xV-z**kp3Ar4^PpLE&Rgf4Yy!U=vo&Cu8r0 zTdMwb3>evHuoOUAna1nZsLYf~Asi}Yua;zwA*jjC=4vlKKW8!9wYnpS*Xz)Mn+hP4 zfkQEI5So!c1b>Aaf)e6hn8V6_P398OTQN*M1hC|bLw{latlVCowTe*09#U9!NZGS# z?y$UjeoI9g*L(hgCVlI%P#`SaXX9p?RsOgo4@u^vO2_Nb5LX2rzah@n9Q*I`L5pS4r>lUNTh-p-6kSnIrMZXD#*L+9LhY1$#X+}?8o8vR$RLo;$SK{eWFiArOegZ z9!}Jb?L4-hpIN_{xReGvGD=EJgxHHTZv_c8x@6=LS!U*7Q6WXRF#SQiK}p#M+rH?i zETA$zlj-!d$v!NlS9q`%14sZuJ(AnARt9^q$%yO?0u`fP#H z5HSlyiGjlXV~=$8m$5fPC`1rV_`a@s@8ABQq)HMT-&LRXi?K>hF5B+?@>xslRX)^{ z#slg}PO@6ABn9c+HV054fv}?lx(Hh&Yd(#d?tD!-%BpOdx^~$`z9*v;E5YPvVrdc9 z;0`2>wK4+*mKI5CP3qSes@g0aXUfR4EOiHc{=m_&z!L0;u^A_o+qxO}%<(coU9(#y zd44sh(--43gF<9|eG-T272|1Id(87(X^eK);4iW1Tr=mweqvki-d1o-@vKxiMO@Mm zPYl`NI>gn*koDX;sN%6|(pwZZ%I0fpdmlLB@4Bru5~)^Z361zHp}iwE#iw!N_47MTiZi{VR( z&%z!j|FW-Z-gq3+IhT#-b%N$dq>%CkJ{B(&EIi}nwX+6qTM@Sy8v!sLdgC9J?3bDw z3p|jgA&!ebG-FC>xFF}UN);CMt#eGW2$!O~OaJl+pT1U;$}EG8Ma?ho5pm;&K~hyA z2bYTy&a9jw6^IEC?mY z<*)m(8$HY-A0y5RqrB(Wg!m&TEX!?TaKf6BtY#waHYi%Iy$ZVqQ?1=7T_BWm-SB>> zbjJ0YI4!a-S$*S>Z1-g{7Y>zzGj8xJN{tWX>x`3~vqbSTSz+#m&>iD1OY2XIajcl& zQn6SV{^B9WiNjAoGc5;FFw+H;#P-%jNT&&PU=b#{t6q35^h=wOuVh1+KDFo=oQulS zgI2tfPT7fxL)eqOFdH?YhPetAHe;6K_CJ2mqvmr&N~1=4~nr)w-cCGumpH zgskqiyEPfs0>ok|nB!4Gfh$z#B8fg$Tx@9(w}Zy=allq(Upq?rRUmMiWn- zCt@SOC(oM?&m98*B;aPe9#hMO+cCbkin?yx;r-3MGKjy-q1!aWr*@jzS@n?6>NTqy zv&7S^&Fqe6#y+JRjxR`V;UI8RgvuZ-V+@TQXD((BxWWecNfV0aym7#v6Mm$2obmB$ zOeRmTU(4mRwUjG4Z)3-SgBbU>vG~;@p*GUx(k)fXy;048K(R-;QVW7zH~(m)x@)TU zoYd_e?iyc;r$ZSQaEHpDB{Q&6A}7;0i1b5Foz28BCCGkgPGRe9!jr_g(t2*lw{_MaHFR@mQ8~;@1_HV%w|7(FRYj+)v`;Fc0SX#Z zLPIw72{OO*bvfm#He=~QE*4O0b|`9SJGF;`0M`Bnuq2)*EdcR-mlGL# zHBcYhhQ)^wlYCijJq%oOFRi6&N7$;o47W=qad|c4TWlpWwnb#}kpT5i2L#GZi)F%3 z!6cZrbyVugw_>W(Yf)J$FnJPFbnuw~(mf&;Q++Ox)6vMFM+8%0SlTKU}=lZx| zB%s^6uwpbl&aQ+thJW|=_saKvvju^^U%vnU^N;nro^w3CrUZoy*LVRt1mrPJHgFfp zDtUDAfqIc(G5c%}X_v}2`mVvM0K%BJ_$-Yp+kh28ZDeF<;qA6m8u|CVE}tW|;3j@c zALRjHmJ5AzfpU$da3efC#(vVU6V+2W=ng`l@Gh}D(^UB|Zj@iTV7j(TS%_bg4HBtT z%K_r;5?aIViQ44-SS=0KuYszui-_Y$ZI|6<7LJs3!Ls@N^y3r_E@VZ?&$EU!tqf~* zRshWSyZU0B9!3D5TawDUpYKclw#)cp?N}b)s3us*5);4>&OtWnhFlbGCm-x* zjEuySR$&*L43(Cy-6OuCvN~DmY!(=}u}qbclHj=?Ea_<~<5Eo8a@NJ;r`W$ut5q@? zy;dq`qvRC!&3k^}dUnbKU3LVai(Vx~wTjx(c)fsF2)dFzF{Ez0Dlg^%?r zUcV<=g`dWWSVqC6%9rZeR=e)1xlf?ShqgxUwHJEVOG0fib`66wRumn$Q@Y+oNNe0y zF`&<6N5&6W$n&Pmq%0`|?kB_SQ%%HN?}biM%FDrB@|;v_Go^sT!y|OlZYO0(H&bX~ zi4<1fpB@P!NzA+s?1#p`>?2{82t4vHr>V^yIUZ;7P`q!@W+>o3rTCp?RV9c8 z@sc{{or4rI_DA;4ZY8e@OL#jkx!>mrAvuB)+-zED2D!r-omrW)L_XwVNgO6Gi2^gF zNe88GUFV&PmZM}l6UohX(h5N?6gI!LEId;%_ptd@bYD!|A84X7MVD0ceUF?pztNCF zT*(YPK$w*cQjkUnjO3Z}2({rCAmDv>Q0}SPw|2?vif$Y2& zz2~kZ-jRC@O5?v$@U&^Ydp1w@Xa?vN50)SV~cow5|4gz&4+HIvU|6j%n z_o>2V%mA0;p-s(3Q%Y*D0>Bn;9-W(;-iK%WcancTZJ8dOBZN0z3`T>A=&1mYsQ}?jFdazt>*EH zt^BiiaJ5OZW6C-B3LH;+j0M&iYHox(EB6sO4|jl4E=@!3sdPeoR+ll9GSY&QmHgv* zy)&+^AkL%MV;xDD`oha17rEz0oq$OBneT` zR5zCG3SZ676NiIa(qD&MS1)ES#AJ$XhB@tICPC?RxNXLPNDEH+#cgfYm&9^95BX9q zevSs!TFA28TiTRVmynWJ6TUAyAsxRnh>A4HO246%k`jR1D$8=7P!ch%iB@28G;1|` zPPjCd18Y#7mugLR*`_6MxVy~N<_?hct{X#ypx)9mK+p)9c~NLgNX;-0ldy*Fl2L1y)e*H}Mpr0n*$Yaq zV{ZZeC-A0ENg_;|crsEdPperdO8(fQv1!a+LCNOd?Gpnh#Wvjn>!rVCffj2AzF_Kz zeE0Ul@`2C5uyzM?k_}{##QVgI#EOqaI6w=GvFX$4-voAB)YGn0gXZ=tY8m zV~!x)3fiR9biBy>=v314tocjNWgVg<#W+&U+FF(^9l-ux*$PptmBN}nt8HM7$WwOr zyp~CbsX^zP*NUiKnO9U4ks0WZpp&tEk~yjzDpIqIIjA7cFFZ-WPtc0cMvxmC>yYa* z)nAGR#+fGY45`N4c^Th+b6XPg53g!&wv?&{HpMxm?mUiYPFvjMJT=D@L<>h!RXIqxl8Lv9zAQxnkxAsaMj$O5!y^=$tMxuopMJPig@sq$%;RQ5K= zOIG1uFJI@MSiG99|7q%HAU}GMCt`(I)xj5GmY$SUTeu4;GM)uK%DmG)g78x=PV(r( zgJW3^;u{=x<*moMz&&bc;^U>B&5Npi@wvUS-;4i-e3n%5{xQ9FL>Nn(wg*`PbD|5ASa2VNe^5)(w6dIeA;q?_f@_aSSuDE`i_IO4OEMFqPQ`Ck+XkJnO@ zAK#&=4fU3pw$R89#!KPpQ!fN+hl7*>7R=E-i@4=6-u3bM{=#PW=Z`>37G+Gm4jT+^MvbC)0$meqh$~({O1bjGBjEK4WQW2c z0{7bIR*liwK=lZUb*(&-HU-QdrgRa1lpb){h?1TJc9OOMrkFPo#l`#B;`GNFA`$Qai@l8GJd;5X!_;-*CFnV=S#2Rm6#~uSZ#LwK{~3OaeB+o&jb zlgUSbpMiiRrSa|$acbu@z-xZ=SS1qO%@3Md0v(@m zMU!KfTzo}F7C?~sQN#?9wVdbHM_;2NL$V)yw|uwBDQf9ulN)kib#!rEGJ9dil7@M` zXki^EQQ>)*g%**HQZQ%Dy~gd3@!KvZUcj2f+v$NpsYkEjaajS`@!oM;N()eP&l+a4 zw$Ni8D6cboXug&ioq=2Jt+MI2CYHUYF9w*E%9;e|6^QbOJzr}14UP3M@I{6%^#>mmhgIF zLBtxkOm-W$uQCVH-Tjoy9(8KVg;;5=;>t;1x(2pT2unpix1l|F`{l2yy2N!lnjJO_ zR3L2#$Tw`+hS}~-U%vm>@BTy`3B1)(Y$)h#{O{Km=Uh|XxIH#{l9>GK3P>A*+y%2X zHUu_|7SAs^6y-s5-q^Nj?VHM^;};m!q%ES9{firzax9LQ8!Ja~^(KGH4Z2DN8`jH4 z>l{X?krM={>X{P4Wj|4O-YyuU4`X+0`J7q85Cv!^)9nnBGDfnWcOnimbIQM>O-)f4 z$gf}zT0ol&GLC%W*K7mzzz9E^Cym?PJ=iY;Z#5Gc<0j4Qm16JVAEnoFR}i~iBBF1? zYdmkDuwL97cunLIzA-YkH~m;X~!bf+>BVNC4dw+6rTo}4Fb6LpS?SK;Oe;~%K;a)&od0! zYD(nhFPNzupBhPsW#z!`dO1{<5g*^zT2!!j085i>O4Tmg{DUVbKA1%fni<4oYU*0xD;58`Oy7l)IIa`o>Z8 zJRs2`XBkb<>N#rw8$m9&d*f;m$d@WlM>WBvZo8%fJ@q6Lm1);pV{klk)hxmO`dM zVTa>cQ(RJu%*IVz7EssY7;zV^a^uaoP@NL1y}P%ckICG*NLF$mdu=IYLn$odq?n?u z3G3)=$c4MA%t_DSN-^G475-o(=9V~){Sl>0 zY>^V8L)z2cY?k%4SU-9Gq%8|?unvGmBb6&h#*!m%6u2YK_!H;~$&*qSbr7lwT!iQ; z-t_nJ09m5s5L`9OeV|LmehI8T%wEz(skvi4Ncm?i9=q$&L7)DTi(YZrc=(qB7jcw_N0$1df!xKW!wMy zC;rkECHOiN?4d%? zNv@aB-Q}*|58>)i4b!DD?kG0+*Oo1o7FxO48dyO0^*<7F%<+U^Nse&H5KbM~nyLY% zc|tqK4xcUBJ$oOn=({MEf}L2>A-HM168JFGjEvdvvPJAX#xFG0he_Ibn3#Nn?ec^8 zs=ya8(@8Qhy416qs>%tVP56l@8xN0BtKvA*^P=qS@2*pFZ#zaXER@=4yE9Os!sa3G zA~GR^NDK%|a$I*rZeKyejci zx=8JY^sJg@{ax}uMArOFtOZ{OB4?D2`qjMeE8bfEuQ4ahZ*$;oufwZCgp(w=971yf!0m}`z?CUmt0A+W23^0=dBZt%5!gcSr^whf-1=ZdHy%mSfB3zpZ1G8AiqNspX zfM-M0uqoNt;cIWfXLF9)DB(z3D0>8|sX_68K0Ans6JY3sekJ(j=fFqDG-(*o=7bU! zoY+M3y`VhPeJOkoN7uP;3?clpxgWZ!>TySj>;y%a=p`<_*LDC@%s!1%X?gr9u36Ip zwirlEfN1N3`0qzl+l@h@~UZ&YBA=OQujT2#}a;tR#TO@Gd z^aidcFY1m8v79%X6c-O#Uc7@#7;V%76<`L6_9l~_0KnB*JJIq{Iy+7Cs^=UAD}(eL zx^x~rYN~h93QK{>Hyc$HVQbv{j~xcUr4hEA%49U-4Ezu?m>QQ}Xwixo zf$iJj{2G&27il{dzjOaj(f?bb1}aeqKIB)C+-I4WZC^{y`x{+KPfRJi>JLGcs{M(Z zz*P3zG&w?x%B;(s`_K+oFJs8HM$jKGszv{qC-y@3e#VEX+({+dI96a)4=m>tG{ewm z1JO&d?o!zpHSvZ0vbSyxJuc&MsF>d=zaJmMTuJHu_onhvnPYlga>pht0M`|?BVJnV z{9*btCDsSM{g^AENOk`LQXnL6@g=H==8i#d%o7iGKL#1kig zm;pwziv=mpe9_<)Wpa7Am%v_a$rkj?B&t|j;;SU3v!{n8G-jJOksJ;X;pwd43OjQP zwZ&G%UpJHklRz(BQV9wG1XU!rF%=-KjVE@0nCi7yW-MXG=vQ}3FQgbtH8)L<(zC=C zV!!uun%oQ!5G*Op8PoAAieY`u4ilz1PwM~Ka9&Qh6GEgEY|Vj$RRt2Tp)JrID7xsI z*yUb1Z@tSzuf9k+h4dJ=I}i`tG0D?%*|Kd8xGi2g&clfZm_QV21ks11d+*4Uv+-rD z67KY{EiwR8wp;r!%kj0FT?REBGj7%d2pjZ~HNYjhSW(O-M3i{v3(X3}A-haQ2#ZTVSv6Jr_4nSBz}QbS0Co3~30_@d|^tr7J$#vO(Y|9&gA&Dm+gw`*K8SI^JE}=9Ou+}?vjfXEZ zI8B$9vD%iKBm|vuFbF9FZKcZtN-)E8>m52tc>Sv%kSX!+|4q63Bd_AA?s9fCRt7<7WI*M9bo?RD8=_Spn(!-`R3LHVf@!i9?!Jx=a%UM5g z0`6PC`@4HoQ?6&r0@~qrjk$2iE&BRc8Wtq1SX)I+53GPz?h_0-d`U~F1LwmKL5M3e z+UodyF90*V;sOC`eq6GAE~ePAIbK%_Ak&h$ZL!*{wKA3ibgCx+Qlr$`H027OFJ(ZN z59>c6BV=p8jQ=J?@p+m&w#T&DdrA;R)5f3SBaBcu>3USL=-g8qI;L24Z8s-sD|Llh zO$SuclrUTQJ(ummsF&nv0;*11vuexxO93mfzOY6(O)(o?T^1!gd$d$`Be?)t%0Hwy zu-f=0oas;U8Y+06OchW{rj}M2sv|2k88^oO+rP{kO`OMl8taCqEVb8Sj&RaIr9;Bj zOJ}=h`2g}m{|JwW8Pef|gA*IBl1gKh-woyao8?>aC~87YZ+u8l(!eSeH4Ndz2O`GN zYJ}W3fn>+x5l_qR#!kFh@5-PBDM*CQMPZBQLFk2g-Sjsuw#|ZVg6Q50dTEP_mrQi- zLQk6_Zz}7UC`&Y1Owh-KTOKirRj6c`+1RF9b<0?l82X8D`*uW+RZ+mz*Q=Ov43|K9 zg~WRy4FD%+zPC}Zx_ps`8mJ8e_9UYCF$8bXkT=iCU-(*JXQC zZbjJl%5{MF(HaS1An@}t8%v-r3O-V6(F{v^(6wur0tmnYF}KcY=3b(1u~^Wi9^Z2@ zgG)mLUoe@alG*qPm#R`8vsGQZaXNN%mBM?^WsP$r;$Cr-?AKI6(iFL2O}82-askQZaZ;A>h`An$nmp6)D45pT?ahq#0_^ko+fB#qviQOD4%@SUsE3Ko)yJPc^ZEMw7>vv+82~K8#|UKt@)D*kQu z2L|{QOIJ9s4N+gL$t`Q}E>q;iw$!_{!be@0D+V5Q~nZRQ7i|u?|?Pa~z39YL&HAUD#N> z8c9}duGqyrRO;lTdvZ>6GW-Af|MJy8cwCRbX+q|JR0s;fx#dKAtRFonj~@8&ji$#m zNrL_3<=S3E5?ksN*=L+ZKeeWn0o7$mMj>A(ZMPuMCOy)}NXtO$&KG6iF;RglBX?v? z3(E^tOj)rvad;_b;DL5YZ%{r{Jm~#j{rV56P8>h3aUe%8la;TW5+&l{4aB+=fGEwE zp#^pV0jb|yS-4h)^gu@Ad9n9UhTiorATwhx(j3pOOjid|!Eq0|W&>VM)BUKl7R>9I zuDDK?^Q+p|K&jrXysRD&Qzt$vuZIjY^}qT3sMu!FyLZwYFlv+R4L^g%hP#` z0vhC|r37dI70%Md*9A?McmF9qeFe2RWc$8YhmlttLdcr;*+rh&3F-~t{ z1{~V#Ru49MWwV9bsnUeO>}3_njq$bH2y;q!bC$Ue$_FV08eI(~e4fNZkIvUK=G#V0^thsP2@i2lcTB)5b-!oegR`RIq}xMkOr=0`$Eq2 z(yUKw;`?=C=8kCBg&ItWd_uZ0wYh7WREYo;PlTMrb?dcT9o{0exh?mX2!m1_Ec+DS zAg1PVvP$q3r5Hj6$P8_T7eS<08t=7yR8??ZvXP{kxMkxVggR=%aXjzC56bsKrr6C} z8I{e59IK*6u7${DEX3Zgyq;gw!tS{AKBJ6Umi>?tfZ1mhCfqieCn^2I%LC(AUrApO zEOwZ#i~%HJg(_ky_-@i*ZECvdDOQpKz(gx(nSC2-?Nw=|;Ct9_*vnq&dsmV61Fb7{dfyrF+Zi$0%+EW$w3_+!UY&~o+6zY2-T0127tYZUae84*qJwWT)5%HQSh4e zv4(vgRLVh1#8JRssGQGSAMT`o>Xn^AEFysl?hf1$&jpjn4&M>>&}s8rUTi?S>-oFa z)W~I{LTds^%<@%%_E7(WOeO(mJUJ%EsU2M)$)>*PIdn;#_~T1UR82CgbtIE|9jMfa zp&2wK!lANyF)|7!Lx5YNy44eD|Dl#UPgyPs*I-4@6PS6A0EOf~Dvy(N3~Gf*?+5tq zQb}ngLJ(K52n%GH*W@m|oV3_lqu3hCxy(!eBfmIp(~hpPFQ?WgGx6he9ykFW#}*id zgP6NJt8_kzzi!`4zn`H%;T3^Uf@evouTQbMu*9tgMs#1nQo~Y+trn}H6$^r``a0&I zyy$7GrBzc^e3y#>RGB~?0xF~4GE`?yvF4iRmVj$1Xm7+vi8=|dn7hDHZuBRhB|;gp zfFr4E&(#HS3PFxiu6I-mP?*9;bQ=S2#1Zu>tmDMPqGiEjC@RDhtBVxM14>c2PR;0C zHZlOy9>!AXHgM(z1Ql#LH!mVGp(1;J{F>M;P5a4V;}%polpd< zTBP3nBVt??)#C>BOgX+d4^{%J^-~odh!>#}6!MHsmYZE^UGet~E*X6A7>MYBUMje_ zMj`1oN7JQW{Q7^6XUT~YOEg`fFw;mo_OPLuu(X&sDR_4Xfj&i^DhW5M{DJUjhUC{^ zm~by9dkhkPuKV8u=WDKFbqWl7++k-9_Sfjx)gR>z-rDJ+31wsmXA)K4EyKQVTz0FJLOwR2dKc68Yo+iT0?~< zIJNgu`iTJbiNO`J{xOE50+=7IX+jyEv>XK0&1*=ebcjoD055En-l`|LvgmOi3%a1( zf9i+#cgxz~1=oCh0{dFr!kFaqvgH*Ni{rSN9W}#D;fDAsLCFJ<6t93<95Orrgm~dS zuq(_zz9lnbnKOqih^ZAi8B;PDS$HtHuxl;d5XS-K0qx9}6Sp5-5rhgOLq2y42k=s1 zO@C8$Z$oUpM5Dx{#e0)S>`B!^qP31f%;5b`@BWTjb@BBze4hA|>{QrY7NUJcRQYy% z;2qt7|L*-SJ!slLOM*m70(7cv4l=W+Cv zfj67Ju5rlam8!7Ia*_TEr8z0ndWnaz1=r%V9c!qrsJ@V)9UGkYbq3-uALP=*Ti7XQ z(^QonYmgGhgq(p1z(y?8Tw=n>ygI82UB-{_<8LueV>_NAUGd*2gom-ST2oN{v-lfk zUXD8M!9*YQW1!T_vH&UAz?CK|#bR;8!I`N!LQ=4pG6dj4$X3w*2KgUT=rLYR1%lKh zu_ifSXeC8~l1)|}ptUqNSwsy`Um=1W2j=eW@0Q=T#PxwcRP`g4N5v+N?T{n9wEFya zu_~Jvm@dRVJh6RX+C^B{8_+|O61a<*iAHhNTvf%{oMAP7S&|`#fi)YZ42hLRhRrFR zdi?adzAo6IN^3ug$zf^R$?EQ^g6Kg%3L1fSj88DctHu;i;Ac|F7!gl-)}8BG4JGCc z3q!{|U3JH=xlgl?3XcJyjy>Bc{f_6~CuuMJw3Jm=aUgd#f~;L`Q;=dqA<<0h1B63k zDH$}jO!*4QpB|o5JT^~G$T}ube0%{V{mNSi?d4RMM>eVZTv$#>l#C=Yi#TG(OGzmh zH0YV7vS9MzbN%V7mn0Y<@Lq?9sbo+sZcET0M%7W)uLPN1EAcx-lLl(YHhcsBPKSGU zq9@08;`q^w4`ezvgVNFsfRxIe$5P)vhCr0QnSIZlq zm0q^Rd`-vQn8yja?PXge)uqm?K~jr2UTJUuT}n+!tGv4g&!Ov0r_V<{#krq4Kwju)7y!CM;W)= z7(J=8y2k~}ZoiCW_9mX8C^HevmJBehh3MN_126i0h>FbllaOOYL;%l23u2VJc6V*j zBI7Z+;MAEfo4Y$+&qcbjrr_V5*D$i!Ssr4qB9QTO5Q1Wx@efEiic##bj0sq;)!R9l z*ah(kot`yYxEN&B$X%eg@e-ttYr*wC@l?1*QE--U@^A=BV-%eyy#R#Ida8a`tP($q zJ?IDb5kUAF4FxE09EhX}=U6;5@TlmV31^CqDk`pLhb;;_6Y;)>xEF5*Fe#vBQh|D~7$R z7(mOW@A1AEGVx6@=MyB;skNAF*+F$s=Gds6i4D$Aa|ZEkRF2-O#>crs7Of%T^>`Dh z5o&j}v=SPK71JFaHT{L1-bx{=0pbzC962JK6JOe0xNQsB2F5{H3oo}81COZ+5P0SW#;ort!}C8e4j2s#9)%3 zrty|_HvOt;Brcv&3OKHn)rNN?)usPga(CCnRc=A^cYh*B4NT?T-^MkpOx)Pqj~+ZI z2_4@+lxxvKYd^fcMFvH!h3@SE1t5tzo+G5co%&U7b+V|E9SaIh38G|DD85E0-$0;3 zen@#d4=VAz(b2zGerRcu->85rW}$z%MJ*Fd0L5G_mm7q6S>}dIamX-R$O4X?60kxg znwXQw0n0o@AVnhL@$pMge#|+aWUO&W5Qr#9x)W zfATaiOsR|ZN_Shxy=a*FxncAq+YjQ8yZlr1uh?2RHR)>8p~FB{*RlA9YJXPRRZNOr zXK;Jc{Uj%;y41}A=;55mhEC5AzHa6EbV@LLKJ+|D&rx!da$~Her^Gy2o7{?!fFrgy zMoLIMx%fD{v3M643~4&Zp;VQ^dmaj=Ea$0R?zJ{S+fdE)G{;QU8RgY+s@8RMd|xk_ zJIsUWuS|R!)mFi_K*n!M z%i93I%FMxmqS!V;#fhYSMZ8*7Vgh>dny`b6JX!CRJc;;uo1f}V8J#5x1{vab=Bqv& zP_R>?V)^J(f4jP`WSYe=xTz<2pf$1D59s68rMsY`d+^7tB-GYa>|bk?Lx>yUNO|QL zG~@K~!jV)X0O`>9Wla(L>3xb=h~nMb|EYY(Lm`$pyNJKh%;WACvElp$h&`qzwISA0 zPBNFZE?TpnCUW|t*lGHzq@-lLx|8d4<6zmOm=c~hHv%Cz z3o7>tePb{fW0x;6b-6i><%%tblN662$_E5@)wW!{$Bt_q1wWJ=YZ;37uA&L8w8_uZFIwMM^AOc@!kf8Le1;h|W8?1a50=_G)rd;9 z>-}K9#}wK%)ZHkb#Z<*uoTtm3TTy770SvNrE<a*ca@ zKAtwamzqJTsQ4yUiK^3LBgQCHAUczy7A2!OJI$!rE{97cbnw<Trt@0RhHnZ^Axg)ArtmI@S5DQBS>km!K0Z9v@P(P8^w3Y$( zEgK^Hgz&ITV%maDXb@u6zBz$a!c=u>dRJFeG_Wm(7(6-+ehAAsYS4=?Snw(z6Y_aD z#@Nc|>3}l|iONi+ue_?O^79CST(@Y4s7FUlRZ4RXX(i)Vui(|Sw%`X!qENz@mpDk> zDNJ<9N@ZbL5Yl3{FCh3U$t!+V6$qf}SrmBs`PnM4JE5r*4@*qWc& z-7U|1`=K)5|6k?ZKgRb-O<%5bbd9HkiAaI&ynX^TD7!Bqwj4lKX0@{MT8keN)^GTs zLW1L0@n&)toL3TUFzsuILxg#3u>wIwXn_hTh1=L3^>&j@fKbF;OOe>*vC+#LN*@~& zljJ|KDO!nyE$mp&g7aF{Z77L@k_u2=A?g&PjR)z}Vc2_RDGY|o7xN<_vfuY-S)NI% z(}}DDA}5!1lhYP65Lw1!5~_5K;P_ekO*(=*WA$eF>uC)7o8|kKYi=5(P74r!{vXOe zcVq>0Pp$9N*dfCNA3sncm7fBFB2FS#ynm-#2{@YEb|2ynK*w}Za;e?9wPBzrUNJrP z=ZRGlV89Uq8kdE_Or37bUbq2QI6h4*^MV?6*#HeZqVnWnEc*LjEY%WJVK=fW@M@B^ zMWO}*gHhz83Rj48hGiM=B?wNqiK{0m6V(+K0>Qk~=vBViVX@8CT1?+Z^a)qJao;V2bCL+Me< zC3ZDDfY9;iO^k}(2$afeM^&JV#UE3>P|Q3@cB&as$koz55p$v01|uQ3-0d+{Y@u*d2tPbQUV6w4(SI!CrI z@!~^MP0A~Oac{rurc{VkfQ?4=R6B)XDo8`n|JH(n2scQpUnMl1>XRw#7jmyKgJY2h z#PgNW_|jWMdbz}}y>JEd%J`jZ-+89;Y%}ZZ`JW?awIfr!f{-YmSjw~n48l!Mp=>}` zdXtS}zvtL7trr5p@NDZU6++}2Kfa4uJsc;Gp@thl0+y^R^@YnyPdabeT~vZfyOgKz z|B%EHQy&KvhHx>LZTVIdh{6o;nHXa(L@*^Z!Jb&&AfPcfQ4>?V0=>z>$GnKo;~m0Q zbSnrBf-4^JuxxhRN2ITl*|A$DrydX?K9FRv*(LOi7}BE7pD(!W5t+rbsX0kkFO>{; zTbTnE2ieZ*oTvrs|Bqe7s(!W;jp=)#RZ z3)wnYZ()aT(BsMrsZ-1~N8&<@sJ?$&-u==Opt#A5P`A%@PptgJb-UDnZAz|Fj>F?z zD}zo9Za zfcbbt3AmIZTr2bA*Ed0fFXAKdWv!N*7MW;ZVnNZM{0Nr8e+K(-L%Pd%7=cNTQ&URC zre)xSRzZu*8s4mB(kZBE2Z^P zP$iDvZAr;4H%#j!0nbn8xc(X4LKKzL5>hpR-E*C;T$OlXaP$NB_#^={hiop-YUiW) zJ90+a6xNMJ9(kv?W8UsPI=;m#sWo_PeYa0wj{S(IDV&Orn7hypbYHwztZeX^iU;p0>U#&+C>>_Kv?Sg!0v^;lQp{=?*0DV<8nrtz7c5a;$1BZdzBChrX6CMRmFa^7ys$-H@4Wgmz$&dS}}ZGPc!jRT9B4|0*fXy!6 z3OV5MbbdqMYcC7M_>jDb!3s5o(5@kiLJ331?w9qrYW~I7%YPsL{e4A6bdiRx+# z!#2J~Sn3yW#q0n5O$>H=Sj&qi=Wh?xnMs>P1nRMLwor>LBUz0EJYf2!PJPHlR=96k z$3VQYa@JIquoBd>DjTS9W(=QI6gA4GH^9=gxmIeDhFcUPO`}WFX&$ zC~TXL`kd#wSi&(B=tw@~fkGc(8M)TzQxTOLQv9a0fHl}Qbh36S0K-no>+!MUR4nfd zRp{w=>jg>kb;xLk0`L9!0flPP3HVj7@73$Uh^)mOJ#(Us;(W3F|7D}cECBF;=W~<5 zYDBiluwVNFsMcFOEA=spSj)u9iq2M*P*g6v#>jjBM|bq#`O~<624 zONU4(x%kmO<~8n+6q!g9tYBIi2eE2*Z@6>6) z+OQ{->DBm{EWlbr(Hk#_hI2sow-^JQymUKL=True@~XWA2!6Juw`jhbt&W=Klvxnn z4a&jv2Qk*#t2ZGd7+=WR%XVSlqM@DrI>~Li+iMl(N?pE6-ROzIR7@;vj@#0z=5z(( zRhQY%Tt)Fb3u#`tEt@S4_XO7^K37r6cmL?+QfX0ACCELuMtmh)R%g*}6B+GE)u^gclK=P)2aXmsFY@M3Cg9&aurDRcKF^DfDS;3ML%iwp;g%{%)!9G$FG8>6cRD!l*T)&;tKJQD#wMh zco0TYg~RJ4eJ690+ZK~>rXK5`Kthr7rlW3Gr#3{h4gk?<5TR8us% z9me8vXNb2nZZdKVlPYb^Ay$MCqj;cFuBr8x(iYUS^}bIN!%rSQaw?=G200b99A!m>78zc|vcu&%PPHa3=wHC1waTa*dZl}2UwWJB z0R{!+CnYa#PaJdaqRzuBuf!VbKWy0BlnqS~*N&`~&>U4;r+L%eTP#>pJJ8kbHj{P8 zcSG8LqkP9gxq=E)YO%-EAVJQE$NnrxPVtMUb}m{M2Y7G^Jw}ey|F;H0)d;!rk8sOZfS_KZ(!iCsNTZ z&Zi5-!uI>*5XWDzG5xWV*;plJI4Itlb_#{C55i4n`tpt6m`vR{U*uSxmR_?x5;=u-PZhsH5W>bThMHeYXLrj%A*Kcdii6VuiZp||gJe*tSbin;*$89ET$kP3e zkLLDRDzt@k7|5MaR9;=)s{I#Zlido^*Q}g_L0OAEg=Cd5BdjF?>@C2m+vOq^a=AH! z@FEL78-yFOwL^=nmS?d~0u*JzD-*;Ai&lc-;AZook_%Xb^hLE{Biqp3XGpx~%{aKp zre&~rqSf@bhj&B}eT0Od^mOiGoog!-I#@Q5RhhZe(D|G*i2p>i*F(jzks`_*0(ECA zZ(0Ox0~O!EDpD^}o0F4@6e~6rGQTZ6E)Ld{$JC?xC^mui-GqjMuS z8i|j6BMdv7wWLqB{8OJjz|7id52b3C9?Z7}!u^VEVSbeFj)fsHCBrGW4|7$MiFuF? z#`U=FA2%`6+jzKkxx-^yD7EpCUvx2~zROk4s(3LyKPePYPNqw*Mh_FGM&~H+o%VI=$Pgwm!`V%fM;d8ct|kGfjXibU(}$Zk1y+J+P| zM}3wAV4+>;yx?(cLG_5A7EF{_8JdSB?&f&12&5h#d2Y2Y(T{0LgP3R9iGtau6^4xr zt@nTV?w{WMYVq%~_&+Z?->sN$9A`=~r#}iGMKSm{Y+p6UW1K8YGi`KLtTcKB#yz#$ zTXGao(o*dN`GUIV$u9Cv5a@C)A_xF4dlPSZP#!+^`KHAvLFDvRYQl}0&3X6sgYv`p z;udgJ{5SEQ=72H4(%ZKB+@k?Ch(nI%D;TGf?)R?LMo-CtB9{DSnojLg46d2d;XSL5 zDOCJP{D@Pbp5lHj3?Q@@esUaPtOwl(>6T*#6tHU7;UQxPsX=zphw*UKlETz}(yN*R z9zUaNd9HJy2{}z~;z=D4reUhZiJO8HfiosJT5uLj3SQwAVU1X`qmUnHmob`Mrh!c! zFTiRATlST}(Zq&2ohMI@XiM&8&52=YR12YP;3G7KA2GyM!Iab08ShW|C90M3`nUlT z;8hP}Xv8VE%0b_=5MV#QN+=;EWvD!C9uXb<9{S38#Yo|{1BJ%ydIC05B>fB;1#o^g z`Cgz^mLf5vrMA_fEyGinX9m`J=xI_K1QQ$7;v^wUTB}Pr$0(VY7S|%;)x6w8z*rKR znwLvFv>G#Q&>~&-8}tdIajzuB{S+OToFbKL0wHXXWZBxEJhE!olvlut#bEUC+v|o< zuj!_@>!*s{C3P37T~rloN;f|yD%o#W7hd-x()Oj1a!ZE}<%0aqfrQatpn8yctx<8o z1BJXM+u-WIX%PKD;fTd`c<*AgQSe!8GttlE{=+wzw98P~a+)zY^;@gnIE4$b8Uz%1 zgga=ihMo`*Q_kz`Hjx`HlrJXOMsWlye9k#14-2&#ik{-(^qgao!DD4?poZF^AuyIg zde=1|9v`fEWj!XYDh*1>cl`@to~Z^5MZw*g3pC*tFeX_C5cZS_ zlZH>oiCXm*il9&cn`1ds z?Y{h?^lXa&d@N!vK998;;{pH=!GlH2uD5g^>?oX!oM!i#^3u~iL6N8R*lp}SZUMfJ zeH>?P7ej2-i$Az$AjZxgos`r@SY;p2G9ZDQDk~*sLDU0*cz(nR^-IQ|l z>Erkb=$?K^@e{W)h=f=0;5$o$Y*TPFItfgylk-lYPz(a08vM%!K86jzP8jB z0Klgv5XZ<=1Qotlti#<~6$Xf(x?7#Hxg9@MAH0p5x;`tO?e2AE6L2)$6UPpmWP865CXY%AqOTC*vNg~RWKuO#rx#2;ORgdN;G(h3r}NUp^; z#sFkwpHAXHY|jxlUYyQ3fq`~#)IjbeeZRZAyq)h)yKxhoy>MG&7t6ho)4` z%(`w*c-?Xav1)P-Ix#(ARq?Lzkou2}&8}f>t&eB)Y8_%wmu){*r=6Pr` z8Az)dx6u=m!B=$BtCSjT;5^#O;1F1wmCZIwPMx4w0z`@*T{IW7#?ag!If; z;ptji)J4mXOw4ZH6c1LGg>qCvNH8;8@@I$e$iM@gPtN%>Rl9QvG8qQ{Z(Zm1+t_uT z@%$fsITN6D3!@X;6MH;pU4SB~%M2+gqGrZv8w8|8Nwh9hL~-OXTA*oO3-mFsLW?+c z8l*uV(xA788TtR}@7qUfe~V7)haf|d=j_Yc>+-E{t<6x&d#j#)3C(;4lw_<)ov~B9 z{^@IUvtu#8~7vlSi=40=0JNa_47f63d>y&`ob$0Ck6z8DK% zYmL<)&L>W&Cau5xUBx1G@s%aO)K*uRT=1eI%UhCWt!mbkYz7EeP7l$O`=I(?lnDQ( zsgFmH4_4Laxi*DNx0jv>n(?vYuTr47*~=U}AdwGa7vKH^iKduMiEv#ufjmmYeV)0K ztn~k=63c2Rey>URPmICz=-BUNd$HB)!?KH0#U-yj??9X0z#a7WpW#G-?s#{fNoQ3p z&K5PV6UO=7$6f&8pJLV^HCCNMU^vPN&to@)UHg?CQu#zpu@-l?_%uHIqA{M-sJm_5 z{U{W}up~%ga$rACJh@%@gF3M>mm)!&qxkxh=KYV-Kmn3SMAZYr@l|w$Q{eBgyXq-Z zcLB$Nsb#6c!PZ$zmSPhp1@|Hb!2+thvA(9$CS*`%UJ&s}p%;5WcuT(0`2bF`bou9= z>_`n`$NZ0Vc#(Q)HK)~xgCbD@90U|`?VLYdeUozK-8he2XJ@A&os4I6&zdJ$$^HV4(OBi zm*WG6?!c$*yz*h zO$hOvB}BT*Z!grwF3vx#wR3U-k^+0~Mbkms9$Bfv_7%IBdf#MH0VonmMFGrfTr9A^ z;{^S!D-wnBoRlNL{Z&B*xd&(&`tXXkZFsJu*OaNRPlnR3o!Z zDK#oiu&WuE1F5^JW=Z6RUOZDy{|EIn3gwt3>qxE#bHbzS&0t}GtIJ-cKn3;5L8%cE z5L|9ru&)w?@nOQG_C@5Y`bV|1Br@re8aqN(iYG3r^CfI?o_s?zmeT1&1R*c5Y0*&B zA^L;Q=C9?2SVP<>PGmOmNlWMjtVqBM6)cNOPCW>{_fKgNVk@LR){XFnw3uZR$6eIp z8NZVop2M^7S3l(}$G_yXJB~bPauGV0l_?|W zr7S%k(piJx$82!Oq{(sQ?ZF-Avp){8PSwc2B z&M8NH$pUdTdCecG)qwNI+w{GOjiI&e(GgAK+~*3A@vN519D*kJtK2k;F+ZyY5$b=(Y4wB^IBbPM7FNS zeFWg&b=unoJcKOGJ;9DLXQp!6i~V!PB~Z(ON;1kNJ^=m@tkLYBuHa!!>1 zX10wyEhi(D6p+~SW4Qr%&p*@c5BQobBhIG_P{fh^rLtyRHqt)R`Lw6q5 zwH$vFPr_(iW&Cs^pD$S^_&OK^v!`)CQDSjUcAGVuMovHlH@ry;d>>pCQgyKndjB8! z3-mz1Q&o=3i=tpeN8aC(?^QlZNMlLijw`$331ofm7x^Id{&D#*cr}54+P+V@L@Aa5 zSCQirVzdK++JhN@e2lS&@m*4>Tm8zXpn11&oA~2|wp3m}Xq4zF8gJHP&2giI$H(Mq zipQOk-()^O=~6nK)Jg6q*@jfcqTgc9(mV4j<`24J71z{0YGRZY9rr4ZX@mOMb(m0|iXi_~A>OyT&V_ft5|47NT|HX0c zH??=q6#;T}3-RCMR)AdDv!f$MqOLZH8($quvV7Dx&=qN<>vWb$ijIb(J27tXtew3k zbs#~EnfqxB?N6J(@}UudkgEW!BxMP?HcCds-DM#X|MQz)VPfC>Ds~4_L+#r_`8=fQ zf#V4d1{voGlfu!h?$j0v5BW?74Aa!#jVDqsIkIe(Yblw(kAV_v_g5j;N5XGahwLP8 z2pAz(@*ca4f?zjRXGHFTIL9wi8k@^jQr6*!11PLJexlASI7wckRu(Y}a`>$$nc5j4 zYp#tOEzb#(k*d(xZ-3f+{dXa}L>gRX5U(j!%(Gg&LXgNfNcm|xVNB&Yjt*r!%}*$A}L-={2?VMb8Ar$cvlm73AdX47xAyNsc=O< zAK$cqm>9-FU#j23PM)T>QJE#kd{NbFX;go+@X}G6h|_^i%pK&qC%R9D%nYDlETHJM z)WZ6N^$>Z|cAgpr8nzWqv{h3%FT&@%u@@oq)j4m~}IH&UNiOg=#6_!sPf<7#44#@^9@&SI-}nq&EA{s<-6x|=bH!ZAvxrd2rf zfykMK@uZDWt~2Qkw*(w@kz*1LC7fSTn&R;V1$hYt8*%BUQVC#ap1F-jUc0uy?6+ztAIN}&EIUO|Zw*wQl2i+~%QpxAFY_mJ)$B3cIR-8yt*jX}8SgQiUpoZdQLoVOg-08)7 zVBKH=VZ!5a{CzvVOIRliBPI(R-lsG1AWr4?@fvGkE8j(tlM+knc}Im*v5(SrrlP@z zBohWdtVTf#k-)ye5`x{sJFZVE{*4ROv9qCKU2KzD&Q$gGF+9*1AUm7s^<-=DgjZrz z(44j{87`2IHMhD@Ejd>9%}cS1cCPpYP?omcbHgXD?Sk+Tr375x{@|VF+ZL)D7%ioB zX!xhBg;HyzU+#-@eC*jxsA9^*`hb9O6r<}c9BE!|aZ9~>;ys_Ml{R2el>id=Ag6WR z@m2toh4cBfDyban;WTZ?g(UHn-k2}sY)o4t2AjB%cG#g|OSWRynY1VW9 zi_BwPnBXaylGoF%BJ<=-4r$PGpfcqU6a3T+8$%GKvzaFvUP0~|$#pSRU`O~VH}R}e zCUu1pB5`tL@h6-0I&M>W=N)j)x*0@{T;105?mlgv5B#tFW?J*I*y)c$1JYyUiN&4}dQm&`q?8Z^y>K9Di$_Lw&uQ5jtM?7GMD)N>m4i z_wVsLkq^>l^ij^>k)8-U#7R>sGnS`DcTjkfuOIRECoYb9m_x$EJY+dd)EX% zuVgrqdSWu?DUX9kDvr#weuH_^Y2h-KOIP4HS%Vo`rdX^)X!g0s2>C#{1R31AR+#r$ zJ&FQ8{jfc3d)l{a)lSuIK=ab+!amf8&EwQF617_{?6mF08RB3BT1UTNFDnLXcI8o$ zn8LXyjfjDbO~1chsH#NH+}rA#N-&B7ZDYY6 z(NqxLZuTP~58}OLA{Ip#5Es+&>kPRza;sYjl7Jtnre`E1qAY)m@Fl&s6VscD1ukW> zw{;G^Z<>~d0GK@ z;*bIhoF_+Aum>mQ3tN|iDOS~XW3Mb(t-S4TZ#4UxUePrIFD#l@z5~Dm<&z#WAktnq zkT`iy=@a@O+1AEx~$eVM3T|G;7cOs3RYd8-e@6r@7@a zod_pErHeN0;Z+Vy)$4B%vIU(`F9kekb)YwHu09YByM&I+M5({j0_bkVw*YNzV zY32~?0|9A2{y%N#YZ$i9>4?w&u=$fevzXJmsNU*@OU~&%n%=}-@ByF>-M_40z)sPm zWC{^|zjoSuY9oDt3n85@&)#Ddr@hS8?*;-G#NsG=J)Ks%1!8nH^CPJMpg)wjdfBJv zrdKRQ66wyMdD;2?$ZuF{y4_GygQlik!5L@|8*5I78}}?G2{pY21x(iGxH+gaTEaIp zOZQm}yA|HL&YZd<9|vsC(GkWnS4atha?J`s6O~BuFiZ>KM)h`;x6T*dVnxUXPwAMH z!W1G@JZ!AOERlbA6x6Z$Hv8m}SYRZS1}%AR`>E#|vN*q~lDOS}*^E4w?dgpYMg$dJ z-A*&=#3BkCt$8oNNm2Sz^$IYF`U5bitUZ284-rcU5d2|Ttq?z$k`e=BMgXiLO2yyR zMFuCx4ZEo@q#vQ5crz#J6Flg7NRoiDR|2D#U_A^gsqWn|S+xplUlP`!#QYFqMM6x4 zppF+=nPT=ywZ?uT9PiO+KNbwIfDWlX%{_I(Ss1vaRXN8Gyht^18EY*qmiR0lB=XUD zBH+Ug#(F0$IB6MWNs+9fwyV}=qn8X&?Q~Z$> zmz<^e`S@7=3|$9Z%R$>S7=)n&>6ZQ|&9=?+ge>=P)c0astS#yxcCD)n3MxAOitYfE z^^>HeIAAd>(CA@fm!A~YBTI~2ae@-*2R{J*j-s0w3g0=XCb~pusluN_%^kC1<;MH) zCWx1+Uf$Vg_H2nqzyrGfI=)FiMH_n6f#kKxv^%aFNe2u}QeD$0{2oFS(KJ*5A#w4=&KUVQFxLML5G zr2XN3dtspZ1AAakp99sB#`8XN!_BDgwdB~PvL8qQl+LnMK0csu(#7x(Qkb(qGZj=V znZ{i!>VLa?iIzpU4({t&{N6lW6 z6qStdqFC6+#4Zo+z3K0K0(&fjBiucVnyn~nxr69<8jCftOh8T6* z2ZR$gsj|ub?674)pd4bTnA2JO|D9%Q;P0yJQBaWSr{csWv>rtY632jJL_IWmZWx35 zCW2O@{8Ze~1-8%)tMO92m71!=vre^FLy$lStG)=PJwE5-v*Zb0&3$D}>}(%{opS~l z@Dx)>djc5R@g@{a; zC;|Y6OpB(#NAL7hK{#bH@VFDjVjcV%kS;31ckgYJOWRPa&;|dd}Yqo%ft02N)h3daA-page$IGHcZASQ!XARPNaXUC{_k(e|+It_EcUISfUt_d{amMp-zNGi#eW;$6yB>B! zZD_5TDDD(>GvBAqQH_kO?F&t|+2D8wx@pkNN3kiu13TOMT#GV@ZJmGdx~Ub+^J8{! z;?_&+-2Gg+M+6-k-iID4HAb#nk@9F&$8v>(%b;oY_g%lA^Ns&xA5k#jn_m|E@Xaq{ z7O3^oxhh@<$itbBArIM?QOc~7n}P`JaF4Yu!LKZem$WNB8e3GOC0XK3 zwY@@Hm-ACMNsTaMa<1%W@cYtYsG#97Vp+(W>P=IZ|JaMwUZj=!({%+E6Q$U}+QTk! z_9!#($_)xt*k=K50!Gt+pIsCuJz1{j>pN~k*Z;KHjWHDOSs16yiTt^8=XmA-mSuzW zTR|84QXkcQP%u~NG!wx89>lL5^dm%;TRRG^C{kIMh zeW>jzwz`i(otgy#i>V$O%Bg;#xs>oa; zq$Vq7SriMqOj$q{n5CB0b&QT=l(epwq$M_##&NNwCo>F3Oo3c4k!Ji0f0gh!hNzrk za*WYT6SOCMElj86sSAgsTz4z=YF%sMkWTlRbM%L#1&&;_aOZmAblx7L+{3qsU4o z=6R`J!Wo#R1(*=Nu~VdRs=uh(R`9Ci-RTC?-14SVTH=IIe73SdaM(y^FvbFQ&~FBPFZ)m_Y??G&RrV!41jOT(7huDS+Q%<$ucJy(KIJck=u?!6 z-yuzwI_P&WSDDd>*K{_gkxGKpC_Yy=mXr{~9?~~wr3byTV|)@)hgli)!py2e)AWcM z5-)Shf#-f3Ot-(b2LWJwmf-32Ldcg#VeKFwg58fdb;YC!x!@4tYqQ-IK!&kd-E6Ee zzpb>J((Lu)SQ+vacv}?+Lx7fxowrU6-EugVbv$D(++r5&I&ay$ZX zLrF;@mTvfjPG%ciq-Mq zR2lLj2>$+vpV!5MNk}KX^hV^n1Wz>zfW31#kH^DYNoV~>OPq$*o%7?l1tRA?ov#Awt+1^zSo;U?)b(uG&m>P^3BULih~VOdqQ8b{v`WYrE67osD-iQ4}x z#(Ojx@IsZEgw`ZjP@WT((GmVU`E~oq+IcNy6H8^h^bfiMMX*sx*^B(emV5i?9D~m06x6YOL%00j3#0;fPTd;A-C`Q1+YEGE%@M(osfR8n_#G8U{1?5K(;JtlG)8MwR`bvlA%>q z5Qz1f-kwLVytFo^Nn2k0*iu2_cdI8A3nPG7B|0IM%md?_%ZSs4){y2B4$C``@QF&$B(B(iyc+_3STMn@SWzJhe;xkhJw_G z%OMMrbRqp#tNUNN9i@B_!nCY|BdwgS23g~8x*j*qCc&Eu#(naTPE4g*Umk~7{s(3ZW^>ceiSL2(sx(PD-Og>VsoRbZJU1v;jee6qOl* zXe*Ejm}jx7^u^E-u8A{6SsvJ8Oh27`84ReBkjLErQ=tHUHd-x_Jb>81Cu5oJZl9>w zAl?a3U$lmg6c1fUxyMA*<~(1>cRlAa7$V~fSl+3~`66b2t($nA{Rl$%NTjlV>P z2}&1=U5qcWWKazcPD2>k@6`CPFn1^88isIBbs}%hMb!3z#KYDVZ1N7+U7z{gy*^=l zk7~@Ao^0`l)rgHWfAsJ|JS`fo_)Ufj0+QJEyc-w!8mo-u#ar0(=qo z{$=WA2ixRf&>bxiPvGx&27`E^93iplV`a$xKJui_!CH}Ih`I57#i=mfD#c&P#pfRF z+h2eEV<<`fwsI!3)T*olW~d&L$|+TU8praYK2h=h%)5y!%hnPV#FOn5kc7(4$n_jhIZumvYzd11adN|&ry%UhS<((0vpH>zE-p-Q z!PClTnZkMbx~Fvxy8DgAsR1ETK<<;pEq)NW;2EAJ>fu5?6{SGDFLc&nxTSJ9v@b_2 zYpJbfcj$!^P?5Vam&zI4Hmzf1ZAbQx1UD^wq4x4Q>}IZg;tBeHYw@OmB6xqh>Z0UeI?M5#a%!S;w|E! zzqF#J3M(fl1nZqq>$2i_sK!B9sL;xKQt3tV=Bs$9?$3N}12{}B!UZcsjwPq(CW5=- zrz29d6&uh!4$$zKhF1v>UN9YESCrEY%-^t~YK+#u*DIRk)wlaga|g zmktV-LM2w6K%~ujPY?g#p7(Q2YrqTC{?)_d Date: Thu, 28 Dec 2023 11:20:00 -0800 Subject: [PATCH 310/859] Fix OpenAI server sampling w.r.t. temp and seed (#4668) The default values for tfs_z and typical_p were being set to zero, which caused the token candidates array to get shrunk down to one element thus preventing any sampling. Note this only applies to OpenAI API compatible HTTP server requests. The solution is to use the default values that OpenAI documents, as well as ensuring we use the llama.cpp defaults for the rest. I've tested this change still ensures deterministic output by default. If a "temperature" greater than 0 is explicitly passed, then output is unique each time. If "seed" is specified in addition to "temperature" then the output becomes deterministic once more. See mozilla-Ocho/llamafile#117 See mozilla-Ocho/llamafile@9e4bf29 --- examples/server/server.cpp | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 72dfe452c..c5035e202 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -441,7 +441,6 @@ struct llama_client_slot } images.clear(); - // llama_set_rng_seed(ctx, params.seed); in batched the seed matter??????? } bool has_budget(gpt_params &global_params) { @@ -921,6 +920,7 @@ struct llama_server_context llama_sampling_free(slot->ctx_sampling); } slot->ctx_sampling = llama_sampling_init(slot->sparams); + llama_set_rng_seed(ctx, slot->params.seed); slot->command = LOAD_PROMPT; all_slots_are_idle = false; @@ -1215,7 +1215,7 @@ struct llama_server_context {"n_ctx", slot.n_ctx}, {"model", params.model_alias}, {"seed", slot.params.seed}, - {"temp", slot.sparams.temp}, + {"temperature", slot.sparams.temp}, {"top_k", slot.sparams.top_k}, {"top_p", slot.sparams.top_p}, {"min_p", slot.sparams.min_p}, @@ -2437,26 +2437,33 @@ json oaicompat_completion_params_parse( llama_params["__oaicompat"] = true; // Map OpenAI parameters to llama.cpp parameters + // + // For parameters that are defined by the OpenAI documentation (e.g. + // temperature), we explicitly specify OpenAI's intended default; we + // need to do that because sometimes OpenAI disagrees with llama.cpp + // + // https://platform.openai.com/docs/api-reference/chat/create + llama_sampling_params default_sparams; llama_params["model"] = json_value(body, "model", std::string("uknown")); llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); - llama_params["temperature"] = json_value(body, "temperature", 0.8); - llama_params["top_k"] = json_value(body, "top_k", 40); - llama_params["top_p"] = json_value(body, "top_p", 0.95); + llama_params["temperature"] = json_value(body, "temperature", 0.0); + llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); + llama_params["top_p"] = json_value(body, "top_p", 1.0); llama_params["n_predict"] = json_value(body, "max_tokens", -1); llama_params["logit_bias"] = json_value(body, "logit_bias",json::object()); llama_params["frequency_penalty"] = json_value(body, "frequency_penalty", 0.0); llama_params["presence_penalty"] = json_value(body, "presence_penalty", 0.0); - llama_params["seed"] = json_value(body, "seed", 0); + llama_params["seed"] = json_value(body, "seed", LLAMA_DEFAULT_SEED); llama_params["stream"] = json_value(body, "stream", false); - llama_params["mirostat"] = json_value(body, "mirostat", false); - llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", 0.0); - llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", 0.0); - llama_params["penalize_nl"] = json_value(body, "penalize_nl", false); - llama_params["typical_p"] = json_value(body, "typical_p", 0.0); + llama_params["mirostat"] = json_value(body, "mirostat", default_sparams.mirostat); + llama_params["mirostat_tau"] = json_value(body, "mirostat_tau", default_sparams.mirostat_tau); + llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); + llama_params["penalize_nl"] = json_value(body, "penalize_nl", default_sparams.penalize_nl); + llama_params["typical_p"] = json_value(body, "typical_p", default_sparams.typical_p); llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", 0); llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); - llama_params["tfs_z"] = json_value(body, "tfs_z", 0.0); + llama_params["tfs_z"] = json_value(body, "tfs_z", default_sparams.tfs_z); if (body.count("grammar") != 0) { llama_params["grammar"] = json_value(body, "grammar", json::object()); From ca38b8d334baa724bd6c9402470931d26427466f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 14:41:36 +0200 Subject: [PATCH 311/859] scripts : do not sync commits from this repo --- scripts/sync-ggml-am.sh | 44 +++++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 15 deletions(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 83abe3681..93aad88a7 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -26,22 +26,36 @@ echo "Syncing ggml changes since commit $lc" cd $SRC_GGML -git log --oneline $lc..HEAD +git log --oneline $lc..HEAD | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits -git format-patch $lc --stdout -- \ - include/ggml/ggml*.h \ - src/ggml*.h \ - src/ggml*.c \ - src/ggml*.cpp \ - src/ggml*.m \ - src/ggml*.metal \ - src/ggml*.cu \ - tests/test-opt.cpp \ - tests/test-grad0.cpp \ - tests/test-quantize-fns.cpp \ - tests/test-quantize-perf.cpp \ - tests/test-backend-ops.cpp \ - > $SRC_LLAMA/ggml-src.patch +if [ ! -s $SRC_LLAMA/ggml-commits ]; then + rm -v $SRC_LLAMA/ggml-commits + echo "No new commits" + exit 0 +fi + +if [ -f $SRC_LLAMA/ggml-src.patch ]; then + rm -v $SRC_LLAMA/ggml-src.patch +fi + +while read c; do + git format-patch -k $c~1..$c --stdout -- \ + include/ggml/ggml*.h \ + src/ggml*.h \ + src/ggml*.c \ + src/ggml*.cpp \ + src/ggml*.m \ + src/ggml*.metal \ + src/ggml*.cu \ + tests/test-opt.cpp \ + tests/test-grad0.cpp \ + tests/test-quantize-fns.cpp \ + tests/test-quantize-perf.cpp \ + tests/test-backend-ops.cpp \ + >> $SRC_LLAMA/ggml-src.patch +done < $SRC_LLAMA/ggml-commits + +rm -v $SRC_LLAMA/ggml-commits # delete files if empty if [ ! -s $SRC_LLAMA/ggml-src.patch ]; then From afc8c192919f04613a92d40391bff4c8cd99856b Mon Sep 17 00:00:00 2001 From: bssrdf Date: Fri, 29 Dec 2023 03:32:31 -0500 Subject: [PATCH 312/859] ggml : fix some mul mat cases + add tests for src1 F16 (ggml/669) * fixed mul-mat error for old GPUs * style fixes * add mul mat src1 f16 test cases, fix more cases ggml-ci --------- Co-authored-by: bssrdf Co-authored-by: slaren --- ggml-backend.c | 8 +++- ggml-cuda.cu | 89 +++++++++++++++++++------------------- ggml.c | 2 +- tests/test-backend-ops.cpp | 14 +++--- 4 files changed, 60 insertions(+), 53 deletions(-) diff --git a/ggml-backend.c b/ggml-backend.c index 526ce732b..2c3752067 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -614,10 +614,14 @@ static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c } static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { - return true; + switch (op->op) { + case GGML_OP_MUL_MAT: + return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; + default: + return true; + } GGML_UNUSED(backend); - GGML_UNUSED(op); } static struct ggml_backend_i cpu_backend_i = { diff --git a/ggml-cuda.cu b/ggml-cuda.cu index abad9cc39..9a9effcf5 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7485,6 +7485,8 @@ static void ggml_cuda_op_dequantize_mul_mat_vec( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + GGML_ASSERT(src1->type == GGML_TYPE_F32); + // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics #ifdef GGML_CUDA_F16 cuda_pool_alloc src1_dfloat_a; @@ -7577,6 +7579,7 @@ static void ggml_cuda_op_mul_mat_cublas( const int compute_capability = g_device_caps[id].cc; if (compute_capability >= CC_VOLTA && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && ggml_is_contiguous(src0) && row_diff == src0->ne[1] && dst->op_params[0] == GGML_PREC_DEFAULT) { + //printf("this branch\n"); // convert src0 and src1 to fp16, multiply as fp16, convert dst to fp32 cuda_pool_alloc src0_as_f16; if (src0->type != GGML_TYPE_F16) { @@ -7614,9 +7617,9 @@ static void ggml_cuda_op_mul_mat_cublas( const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); to_fp32_cuda(dst_f16.get(), dst_dd_i, row_diff*src1_ncols, stream); - } - else { + } else { cuda_pool_alloc src0_ddq_as_f32; + cuda_pool_alloc src1_ddq_as_f32; if (src0->type != GGML_TYPE_F32) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src0->type); @@ -7624,7 +7627,15 @@ static void ggml_cuda_op_mul_mat_cublas( src0_ddq_as_f32.alloc(row_diff*ne00); to_fp32_cuda(src0_dd_i, src0_ddq_as_f32.get(), row_diff*ne00, stream); } + if (src1->type != GGML_TYPE_F32) { + const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(src1->type); + GGML_ASSERT(to_fp32_cuda != nullptr); + src1_ddq_as_f32.alloc(src1_ncols*ne10); + to_fp32_cuda(src1_ddf_i, src1_ddq_as_f32.get(), src1_ncols*ne10, stream); + } + const float * src0_ddf_i = src0->type == GGML_TYPE_F32 ? (const float *) src0_dd_i : src0_ddq_as_f32.get(); + const float * src1_ddf1_i = src1->type == GGML_TYPE_F32 ? (const float *) src1_ddf_i : src1_ddq_as_f32.get(); const float alpha = 1.0f; const float beta = 0.0f; @@ -7633,9 +7644,9 @@ static void ggml_cuda_op_mul_mat_cublas( CUBLAS_CHECK( cublasSgemm(g_cublas_handles[id], CUBLAS_OP_T, CUBLAS_OP_N, row_diff, src1_ncols, ne10, - &alpha, src0_ddf_i, ne00, - src1_ddf_i, ne10, - &beta, dst_dd_i, ldc)); + &alpha, src0_ddf_i, ne00, + src1_ddf1_i, ne10, + &beta, dst_dd_i, ldc)); } (void) dst; @@ -8035,6 +8046,7 @@ static void ggml_cuda_op_mul_mat( GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src1->type == GGML_TYPE_F32 || (src1->ne[2] == 1 && src1->ne[3] == 1)); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -8481,9 +8493,9 @@ static __global__ void k_compute_batched_ptrs( int64_t i03 = i13 / r3; int64_t i02 = i12 / r2; - ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; - ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12/2 + i13*nb13/2; - ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; + ptrs_src[0*ne23 + i12 + i13*ne12] = (const char *) src0_as_f16 + i02*nb02 + i03*nb03; + ptrs_src[1*ne23 + i12 + i13*ne12] = (const char *) src1_as_f16 + i12*nb12 + i13*nb13; + ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { @@ -8492,28 +8504,10 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne00 = src0->ne[0]; GGML_UNUSED(ne00); - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; + GGML_TENSOR_BINARY_OP_LOCALS - const int64_t nb01 = src0->nb[1]; - const int64_t nb02 = src0->nb[2]; GGML_UNUSED(nb02); - const int64_t nb03 = src0->nb[3]; GGML_UNUSED(nb03); - - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; - - const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); - const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); - - const int64_t ne1 = ggml_nelements(src1); - const int64_t ne = ggml_nelements(dst); + const int64_t ne_dst = ggml_nelements(dst); ggml_cuda_set_device(g_main_device); cudaStream_t main_stream = g_cudaStreams[g_main_device][0]; @@ -8522,7 +8516,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; void * src0_ddq = src0_extra->data_device[g_main_device]; - half * src0_as_f16 = (half *) src0_ddq; + half * src0_f16 = (half *) src0_ddq; ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; float * src1_ddf = (float *) src1_extra->data_device[g_main_device]; @@ -8531,11 +8525,15 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const float * dst_ddf = (float *) dst_extra->data_device[g_main_device]; // convert src1 to fp16 - const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); - GGML_ASSERT(to_fp16_cuda != nullptr); - - cuda_pool_alloc src1_as_f16(ne1); - to_fp16_cuda(src1_ddf, src1_as_f16.get(), ne1, main_stream); + cuda_pool_alloc src1_f16_alloc; + if (src1->type != GGML_TYPE_F16) { + const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); + const int64_t ne_src1 = ggml_nelements(src1); + src1_f16_alloc.alloc(ne_src1); + GGML_ASSERT(to_fp16_cuda != nullptr); + to_fp16_cuda(src1_ddf, src1_f16_alloc.get(), ne_src1, main_stream); + } + half * src1_f16 = src1->type == GGML_TYPE_F16 ? (half *) src1_ddf : src1_f16_alloc.get(); cuda_pool_alloc dst_f16; char * dst_t; @@ -8557,7 +8555,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const const void * beta = &beta_f16; if (dst->op_params[0] == GGML_PREC_DEFAULT) { - dst_t = (char *) dst_f16.alloc(ne); + dst_t = (char *) dst_f16.alloc(ne_dst); nbd2 /= sizeof(float) / sizeof(half); nbd3 /= sizeof(float) / sizeof(half); @@ -8604,9 +8602,9 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmStridedBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const char *) src0_as_f16, CUDA_R_16F, nb01/sizeof(half), src0->nb[2]/sizeof(half), // strideA - (const char *) src1_as_f16.get(), CUDA_R_16F, nb11/sizeof(float), src1->nb[2]/sizeof(float), // strideB - beta, ( char *) dst_t, cu_data_type, ne01, dst->nb[2]/sizeof(float), // strideC + alpha, (const char *) src0_f16, CUDA_R_16F, nb01/nb00, nb02/nb00, // strideA + (const char *) src1_f16, CUDA_R_16F, nb11/nb10, nb12/nb10, // strideB + beta, ( char *) dst_t, cu_data_type, ne01, nb2/nb0, // strideC ne12*ne13, cu_compute_type, CUBLAS_GEMM_DEFAULT_TENSOR_OP)); @@ -8619,12 +8617,13 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const dim3 block_dims(ne13, ne12); k_compute_batched_ptrs<<<1, block_dims, 0, main_stream>>>( - src0_as_f16, src1_as_f16.get(), dst_t, + src0_f16, src1_f16, dst_t, ptrs_src.get(), ptrs_dst.get(), ne12, ne13, ne23, nb02, nb03, - nb12, nb13, + src1->type == GGML_TYPE_F16 ? nb12 : nb12/2, + src1->type == GGML_TYPE_F16 ? nb13 : nb13/2, nbd2, nbd3, r2, r3); CUDA_CHECK(cudaGetLastError()); @@ -8632,8 +8631,8 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const CUBLAS_CHECK( cublasGemmBatchedEx(g_cublas_handles[g_main_device], CUBLAS_OP_T, CUBLAS_OP_N, ne01, ne11, ne10, - alpha, (const void **) (ptrs_src.get() + 0*ne23), CUDA_R_16F, nb01/sizeof(half), - (const void **) (ptrs_src.get() + 1*ne23), CUDA_R_16F, nb11/sizeof(float), + alpha, (const void **) (ptrs_src.get() + 0*ne23), CUDA_R_16F, nb01/nb00, + (const void **) (ptrs_src.get() + 1*ne23), CUDA_R_16F, nb11/nb10, beta, ( void **) (ptrs_dst.get() + 0*ne23), cu_data_type, ne01, ne23, cu_compute_type, @@ -8643,7 +8642,7 @@ static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const if (dst->op_params[0] == GGML_PREC_DEFAULT) { const to_fp32_cuda_t to_fp32_cuda = ggml_get_to_fp32_cuda(GGML_TYPE_F16); - to_fp32_cuda(dst_f16.get(), dst_ddf, ne, main_stream); + to_fp32_cuda(dst_f16.get(), dst_ddf, ne_dst, main_stream); } } @@ -8682,13 +8681,13 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } else if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F32 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); } else if (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) { - if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0) { + if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0 && src1->type == GGML_TYPE_F32) { #ifdef GGML_CUDA_FORCE_DMMV const bool use_mul_mat_vec_q = false; #else diff --git a/ggml.c b/ggml.c index ed56e60a8..a9e1ea9b4 100644 --- a/ggml.c +++ b/ggml.c @@ -9687,7 +9687,7 @@ static void ggml_compute_forward_mul_mat( const size_t row_size = ggml_row_size(vec_dot_type, ne10); assert(params->wsize >= ne11*ne12*ne13*row_size); - assert(src1->type == GGML_TYPE_F32); + GGML_ASSERT(src1->type == GGML_TYPE_F32); for (int64_t i13 = 0; i13 < ne13; ++i13) { for (int64_t i12 = 0; i12 < ne12; ++i12) { diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index f3df8a8c6..b115299c0 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -350,13 +350,18 @@ struct test_case { fflush(stdout); // check if backends support op + bool supported = true; for (ggml_backend_t backend : {backend1, backend2}) { if (!ggml_backend_supports_op(backend, out)) { - printf("not supported\n"); - ggml_free(ctx); - return true; + printf("not supported [%s] ", ggml_backend_name(backend)); + supported = false; } } + if (!supported) { + printf("\n"); + ggml_free(ctx); + return true; + } // post-graph sentinel add_sentinel(ctx); @@ -1505,8 +1510,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } for (ggml_type type_a : all_types) { - for (ggml_type type_b : {GGML_TYPE_F32 /*, GGML_TYPE_F16 */}) { - // FIXME: CPU crashes on f16xf16 + for (ggml_type type_b : {GGML_TYPE_F32, GGML_TYPE_F16}) { test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, { 1, 1}, {1, 1})); test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 1}, {1, 1})); test_cases.emplace_back(new test_mul_mat(type_a, type_b, 16, 1, 256, {10, 1}, {2, 1})); From 38b3de4658292582a8941a2be5c77b40ce6ac0f2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 14:56:41 +0200 Subject: [PATCH 313/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 1ec144116..6ff2d5233 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -76e7f47b69e8334384dc718480c496dafbd47999 +168c43edd1f85ebdecd4c79262cacb32b74eda68 From 441f51dca004debf8b275f1bdc08e0f1af7fd8f8 Mon Sep 17 00:00:00 2001 From: Tamotsu Takahashi Date: Fri, 29 Dec 2023 19:23:27 +0900 Subject: [PATCH 314/859] ci : build with CLBlast + ggml-opencl use GGML_API (whisper/1576) * Build with CLBlast * Declare GGML_API After rebasing, examples/talk-llama failed: "D:\a\whisper.cpp\whisper.cpp\build\ALL_BUILD.vcxproj" (build target) (1) -> "D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj" (default target) (14) -> (Link target) -> llama.obj : error LNK2019: unresolved external symbol ggml_cl_free_data referenced in function "public: __cdecl llama_model::~llama_model(void)" (??1llama_model@@QEAA@XZ) [D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj] llama.obj : error LNK2019: unresolved external symbol ggml_cl_transform_tensor referenced in function "public: void __cdecl llama_model_loader::load_all_data(struct ggml_context *,void (__cdecl*)(float,void *),void *,struct llama_mlock *)" (?load_all_data@llama_model_loader@@QEAAXPEAUggml_context@@P6AXMPEAX@Z1PEAUllama_mlock@@@Z) [D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj] D:\a\whisper.cpp\whisper.cpp\build\bin\Release\talk-llama.exe : fatal error LNK1120: 2 unresolved externals [D:\a\whisper.cpp\whisper.cpp\build\examples\talk-llama\talk-llama.vcxproj] --- ggml-opencl.h | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/ggml-opencl.h b/ggml-opencl.h index a92b445c9..44d05bd64 100644 --- a/ggml-opencl.h +++ b/ggml-opencl.h @@ -6,19 +6,19 @@ extern "C" { #endif -void ggml_cl_init(void); +GGML_API void ggml_cl_init(void); -void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -size_t ggml_cl_mul_mat_get_wsize(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -void ggml_cl_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, void * wdata, size_t wsize); +GGML_API void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API size_t ggml_cl_mul_mat_get_wsize(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API void ggml_cl_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, void * wdata, size_t wsize); -void * ggml_cl_host_malloc(size_t size); -void ggml_cl_host_free(void * ptr); +GGML_API void * ggml_cl_host_malloc(size_t size); +GGML_API void ggml_cl_host_free(void * ptr); -void ggml_cl_free_data(const struct ggml_tensor* tensor); +GGML_API void ggml_cl_free_data(const struct ggml_tensor* tensor); -void ggml_cl_transform_tensor(void * data, struct ggml_tensor * tensor); +GGML_API void ggml_cl_transform_tensor(void * data, struct ggml_tensor * tensor); #ifdef __cplusplus } From c8255f8a6b2a3b3ebc6cb340cc2487f39fc95ffc Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 15:12:35 +0200 Subject: [PATCH 315/859] scripts : print list of sync commits --- scripts/sync-ggml-am.sh | 1 + scripts/sync-ggml.last | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 93aad88a7..91478f177 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -26,6 +26,7 @@ echo "Syncing ggml changes since commit $lc" cd $SRC_GGML +git log --oneline $lc..HEAD git log --oneline $lc..HEAD | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits if [ ! -s $SRC_LLAMA/ggml-commits ]; then diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 6ff2d5233..5b6a440f7 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -168c43edd1f85ebdecd4c79262cacb32b74eda68 +df098ea908764cba4a4889a1cbe7b026b2d31a14 From afd997ab6011dfefe9e917425b04ef4d83614841 Mon Sep 17 00:00:00 2001 From: Peter Sugihara Date: Fri, 29 Dec 2023 05:58:56 -0800 Subject: [PATCH 316/859] llama.swiftui : fix infinite loop, ouput timings, buff UI (#4674) * fix infinite loop * slight UI simplification, clearer UX * clearer UI text, add timings to completion log --- .../llama.cpp.swift/LibLlama.swift | 2 ++ .../llama.swiftui/Models/LlamaState.swift | 27 ++++++++++---- .../llama.swiftui/UI/ContentView.swift | 35 +++---------------- .../llama.swiftui/UI/DownloadButton.swift | 2 +- 4 files changed, 29 insertions(+), 37 deletions(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 464fb3277..66244382f 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -1,5 +1,7 @@ import Foundation +// To use this in your own project, add llama.cpp as a swift package dependency +// and uncomment this import line. // import llama enum LlamaError: Error { diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index 3393eb242..17cb5b9dd 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -4,6 +4,7 @@ import Foundation class LlamaState: ObservableObject { @Published var messageLog = "" @Published var cacheCleared = false + let NS_PER_S = 1_000_000_000.0 private var llamaContext: LlamaContext? private var defaultModelUrl: URL? { @@ -20,12 +21,12 @@ class LlamaState: ObservableObject { } func loadModel(modelUrl: URL?) throws { - messageLog += "Loading model...\n" if let modelUrl { + messageLog += "Loading model...\n" llamaContext = try LlamaContext.create_context(path: modelUrl.path()) messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" } else { - messageLog += "Could not locate model\n" + messageLog += "Load a model from the list below\n" } } @@ -34,15 +35,29 @@ class LlamaState: ObservableObject { return } + let t_start = DispatchTime.now().uptimeNanoseconds await llamaContext.completion_init(text: text) + let t_heat_end = DispatchTime.now().uptimeNanoseconds + let t_heat = Double(t_heat_end - t_start) / NS_PER_S + messageLog += "\(text)" - while await llamaContext.n_cur <= llamaContext.n_len { + while await llamaContext.n_cur < llamaContext.n_len { let result = await llamaContext.completion_loop() messageLog += "\(result)" } + + let t_end = DispatchTime.now().uptimeNanoseconds + let t_generation = Double(t_end - t_heat_end) / NS_PER_S + let tokens_per_second = Double(await llamaContext.n_len) / t_generation + await llamaContext.clear() - messageLog += "\n\ndone\n" + messageLog += """ + \n + Done + Heat up took \(t_heat)s + Generated \(tokens_per_second) t/s\n + """ } func bench() async { @@ -56,10 +71,10 @@ class LlamaState: ObservableObject { messageLog += await llamaContext.model_info() + "\n" let t_start = DispatchTime.now().uptimeNanoseconds - await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up + let _ = await llamaContext.bench(pp: 8, tg: 4, pl: 1) // heat up let t_end = DispatchTime.now().uptimeNanoseconds - let t_heat = Double(t_end - t_start) / 1_000_000_000.0 + let t_heat = Double(t_end - t_start) / NS_PER_S messageLog += "Heat up time: \(t_heat) seconds, please wait...\n" // if more than 5 seconds, then we're probably running on a slow device diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index c78f107b3..147e0c63b 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -42,46 +42,27 @@ struct ContentView: View { Button("Send") { sendText() } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) Button("Bench") { bench() } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) Button("Clear") { clear() } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) Button("Copy") { UIPasteboard.general.string = llamaState.messageLog } - .padding(8) - .background(Color.blue) - .foregroundColor(.white) - .cornerRadius(8) - } + }.buttonStyle(.bordered) - VStack { + VStack(alignment: .leading) { DownloadButton( llamaState: llamaState, modelName: "TinyLlama-1.1B (Q4_0, 0.6 GiB)", modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" ) - .font(.system(size: 12)) - .padding(.top, 4) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -89,7 +70,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true", filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf" ) - .font(.system(size: 12)) DownloadButton( llamaState: llamaState, @@ -97,8 +77,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", filename: "tinyllama-1.1b-f16.gguf" ) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -106,7 +84,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true", filename: "phi-2-q4_0.gguf" ) - .font(.system(size: 12)) DownloadButton( llamaState: llamaState, @@ -114,8 +91,6 @@ struct ContentView: View { modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", filename: "phi-2-q8_0.gguf" ) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) DownloadButton( llamaState: llamaState, @@ -123,15 +98,15 @@ struct ContentView: View { modelUrl: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true", filename: "mistral-7b-v0.1.Q4_0.gguf" ) - .font(.system(size: 12)) Button("Clear downloaded models") { ContentView.cleanupModelCaches() llamaState.cacheCleared = true } - .padding(8) - .font(.system(size: 12)) } + .padding(.top, 4) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) } .padding() } diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift index 4bd75cb69..c9f322ca1 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift @@ -93,7 +93,7 @@ struct DownloadButton: View { print("Error: \(err.localizedDescription)") } }) { - Text("\(modelName) (Downloaded)") + Text("Load \(modelName)") } } else { Text("Unknown status") From 82d6eab224862a7044069fb9211dc4b29124264b Mon Sep 17 00:00:00 2001 From: andrijdavid Date: Fri, 29 Dec 2023 15:18:20 +0100 Subject: [PATCH 317/859] main-cmake-pkg : fix build issue (#4665) * Fix main-cmake-pkg compilation * Use glob to load common files * cmake : fix trailing whitespace --------- Co-authored-by: Georgi Gerganov --- examples/main-cmake-pkg/CMakeLists.txt | 27 ++++++-------------------- 1 file changed, 6 insertions(+), 21 deletions(-) diff --git a/examples/main-cmake-pkg/CMakeLists.txt b/examples/main-cmake-pkg/CMakeLists.txt index cb00edbbb..deb77d588 100644 --- a/examples/main-cmake-pkg/CMakeLists.txt +++ b/examples/main-cmake-pkg/CMakeLists.txt @@ -7,28 +7,13 @@ find_package(Llama 0.0.1 REQUIRED) # Bake common functionality in with target. Because applications # using the relocatable Llama package should be outside of the # source tree, main-cmake-pkg pretends the dependencies are built-in. - set(_common_path "${CMAKE_CURRENT_LIST_DIR}/../../common") -add_library(common OBJECT - ${_common_path}/common.h - ${_common_path}/common.cpp - ${_common_path}/console.h - ${_common_path}/console.cpp - ${_common_path}/grammar-parser.h - ${_common_path}/grammar-parser.cpp - ${_common_path}/sampling.h - ${_common_path}/sampling.cpp - ) - -# WARNING: because build-info.h is auto-generated, it will only -# be available after the user has built the llama.cpp sources. -# -configure_file(${_common_path}/../build-info.h - ${CMAKE_CURRENT_BINARY_DIR}/build-info.h - COPYONLY) - -target_include_directories(common PUBLIC ${LLAMA_INCLUDE_DIR} - ${CMAKE_CURRENT_BINARY_DIR}) +add_library(common OBJECT) +file(GLOB _common_files + "${_common_path}/*.h" + "${_common_path}/*.cpp" +) +target_sources(common PRIVATE ${_common_files}) # If the common project was part of "main-cmake-pkg" the transient # defines would automatically be attached. Because the common func- From b93edd22f55d3e5268263c3edcdae1818505c078 Mon Sep 17 00:00:00 2001 From: Karthik Sethuraman Date: Fri, 29 Dec 2023 06:22:10 -0800 Subject: [PATCH 318/859] server : allow to generate multimodal embeddings (#4681) --- examples/server/README.md | 4 +++- examples/server/server.cpp | 12 +++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index f1e586a1c..718a7e064 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -166,7 +166,7 @@ node index.js `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) - `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:` In this case, `[img-12]` will be replaced by the embeddings of the image id 12 in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. *Result JSON:* @@ -224,6 +224,8 @@ node index.js `content`: Set the text to process. + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `content`. You can determine the place of the image in the content as in the following: `Image: [img-21].\nCaption: This is a picture of a house`. In this case, `[img-21]` will be replaced by the embeddings of the image with id `21` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 21}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + - **POST** `/infill`: For code infilling. Takes a prefix and a suffix and returns the predicted completion as stream. *Options:* diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c5035e202..31b8cf33d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -3077,7 +3077,17 @@ int main(int argc, char **argv) { prompt = ""; } - const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0} }, false, true, -1); + + json image_data; + if (body.count("image_data") != 0) { + image_data = body["image_data"]; + } + else + { + image_data = ""; + } + + const int task_id = llama.request_completion({ {"prompt", prompt}, { "n_predict", 0}, {"image_data", image_data} }, false, true, -1); task_result result = llama.next_result(task_id); return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); }); From 60f55e888c29cbd87c4238dd19e85d0eef87245d Mon Sep 17 00:00:00 2001 From: SakuraUmi Date: Fri, 29 Dec 2023 22:22:44 +0800 Subject: [PATCH 319/859] server : fix OpenAI server sampling w.r.t. penalty. (#4675) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 31b8cf33d..035eb24ac 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2461,7 +2461,7 @@ json oaicompat_completion_params_parse( llama_params["mirostat_eta"] = json_value(body, "mirostat_eta", default_sparams.mirostat_eta); llama_params["penalize_nl"] = json_value(body, "penalize_nl", default_sparams.penalize_nl); llama_params["typical_p"] = json_value(body, "typical_p", default_sparams.typical_p); - llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", 0); + llama_params["repeat_last_n"] = json_value(body, "repeat_last_n", default_sparams.penalty_last_n); llama_params["ignore_eos"] = json_value(body, "ignore_eos", false); llama_params["tfs_z"] = json_value(body, "tfs_z", default_sparams.tfs_z); From db49ff8ed7f0bb201176703441cc02911b08ef2a Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Fri, 29 Dec 2023 06:24:12 -0800 Subject: [PATCH 320/859] server : replace sleep with condition variables (#4673) The server currently schedules tasks using a sleep(5ms) busy loop. This adds unnecessary latency since most sleep implementations do a round up to the system scheduling quantum (usually 10ms). Other libc sleep impls spin for smaller time intervals which results in the server's busy loop consuming all available cpu. Having the explicit notify() / wait() code also helps aid in the readability of the server code. See mozilla-Ocho/llamafile@711344b --- examples/server/server.cpp | 41 ++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 035eb24ac..0aada8e28 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 @@ -541,7 +542,9 @@ struct llama_server_context std::vector queue_results; std::vector queue_multitasks; std::mutex mutex_tasks; // also guards id_gen, and queue_multitasks + std::condition_variable condition_tasks; std::mutex mutex_results; + std::condition_variable condition_results; ~llama_server_context() { @@ -1169,7 +1172,7 @@ struct llama_server_context void send_error(task_server& task, std::string error) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = task.id; res.multitask_id = task.multitask_id; @@ -1177,6 +1180,7 @@ struct llama_server_context res.error = true; res.result_json = { { "content", error } }; queue_results.push_back(res); + condition_results.notify_all(); } void add_multi_task(int id, std::vector& sub_ids) @@ -1186,6 +1190,7 @@ struct llama_server_context multi.id = id; std::copy(sub_ids.begin(), sub_ids.end(), std::inserter(multi.subtasks_remaining, multi.subtasks_remaining.end())); queue_multitasks.push_back(multi); + condition_tasks.notify_one(); } void update_multi_task(int multitask_id, int subtask_id, task_result& result) @@ -1197,6 +1202,7 @@ struct llama_server_context { multitask.subtasks_remaining.erase(subtask_id); multitask.results.push_back(result); + condition_tasks.notify_one(); } } } @@ -1244,7 +1250,7 @@ struct llama_server_context void send_partial_response(llama_client_slot &slot, completion_token_output tkn) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = slot.task_id; res.multitask_id = slot.multitask_id; @@ -1280,11 +1286,12 @@ struct llama_server_context } queue_results.push_back(res); + condition_results.notify_all(); } void send_final_response(llama_client_slot &slot) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = slot.task_id; res.multitask_id = slot.multitask_id; @@ -1340,11 +1347,12 @@ struct llama_server_context } queue_results.push_back(res); + condition_results.notify_all(); } void send_embedding(llama_client_slot &slot) { - std::lock_guard lock(mutex_results); + std::unique_lock lock(mutex_results); task_result res; res.id = slot.task_id; res.multitask_id = slot.multitask_id; @@ -1372,6 +1380,7 @@ struct llama_server_context }; } queue_results.push_back(res); + condition_results.notify_all(); } int request_completion(json data, bool infill, bool embedding, int multitask_id) @@ -1395,6 +1404,7 @@ struct llama_server_context // otherwise, it's a single-prompt task, we actually queue it queue_tasks.push_back(task); + condition_tasks.notify_one(); return task.id; } @@ -1402,13 +1412,10 @@ struct llama_server_context { while (true) { - std::this_thread::sleep_for(std::chrono::microseconds(5)); - std::lock_guard lock(mutex_results); - - if (queue_results.empty()) - { - continue; - } + std::unique_lock lock(mutex_results); + condition_results.wait(lock, [&]{ + return !queue_results.empty(); + }); for (int i = 0; i < (int) queue_results.size(); i++) { @@ -1504,12 +1511,13 @@ struct llama_server_context void request_cancel(int task_id) { - std::lock_guard lock(mutex_tasks); + std::unique_lock lock(mutex_tasks); task_server task; task.id = id_gen++; task.type = CANCEL_TASK; task.target_id = task_id; queue_tasks.push_back(task); + condition_tasks.notify_one(); } int split_multiprompt_task(task_server& multiprompt_task) @@ -1535,7 +1543,7 @@ struct llama_server_context void process_tasks() { - std::lock_guard lock(mutex_tasks); + std::unique_lock lock(mutex_tasks); while (!queue_tasks.empty()) { task_server task = queue_tasks.front(); @@ -1607,6 +1615,7 @@ struct llama_server_context std::lock_guard lock(mutex_results); queue_results.push_back(aggregate_result); + condition_results.notify_all(); queue_iterator = queue_multitasks.erase(queue_iterator); } @@ -1637,8 +1646,10 @@ struct llama_server_context LOG_TEE("all slots are idle and system prompt is empty, clear the KV cache\n"); kv_cache_clear(); } - // avoid 100% usage of cpu all time - std::this_thread::sleep_for(std::chrono::milliseconds(5)); + std::unique_lock lock(mutex_tasks); + condition_tasks.wait(lock, [&]{ + return !queue_tasks.empty(); + }); } for (llama_client_slot &slot : slots) From 4af4801566bc262a38fb77f51edf278ac323c2bd Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Fri, 29 Dec 2023 06:38:38 -0800 Subject: [PATCH 321/859] llava-cli : refactor to use sampling library (#4669) This change makes it possible to use flags like `--grammar` when using the `llava-cli` program. The rest is just code cleanup deleting a long standing TODO comment. This change also ensures that logging information is emitted to stderr which helps the `llava-cli` command be more friendly to shell scripts. See Mozilla-Ocho/llamafile@1cd334f --- examples/llava/llava-cli.cpp | 85 ++++++------------------------------ 1 file changed, 13 insertions(+), 72 deletions(-) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 31f8cd8e0..502b788b1 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -39,73 +39,11 @@ static bool eval_string(struct llama_context * ctx_llama, const char* str, int n return true; } -// TODO: use common/sampling.h -static llama_token sample_id(llama_context * ctx_llama, gpt_params & params) { - auto & sparams = params.sparams; - - // out of user input, sample next token - const float temp = sparams.temp; - const int32_t top_k = sparams.top_k <= 0 ? llama_n_vocab(llama_get_model(ctx_llama)) : sparams.top_k; - const float top_p = sparams.top_p; - const float tfs_z = sparams.tfs_z; - const float typical_p = sparams.typical_p; - // const int32_t repeat_last_n = sparams.repeat_last_n < 0 ? n_ctx : sparams.repeat_last_n; - // const float repeat_penalty = sparams.repeat_penalty; - // const float alpha_presence = sparams.presence_penalty; - // const float alpha_frequency = sparams.frequency_penalty; - const int mirostat = sparams.mirostat; - const float mirostat_tau = sparams.mirostat_tau; - const float mirostat_eta = sparams.mirostat_eta; - // const bool penalize_nl = sparams.penalize_nl; - - llama_token id = 0; - { - auto logits = llama_get_logits(ctx_llama); - auto n_vocab = llama_n_vocab(llama_get_model(ctx_llama)); - - // Apply params.logit_bias map - for (auto it = sparams.logit_bias.begin(); it != sparams.logit_bias.end(); it++) { - logits[it->first] += it->second; - } - - std::vector candidates; - candidates.reserve(n_vocab); - for (llama_token token_id = 0; token_id < n_vocab; token_id++) { - candidates.emplace_back(llama_token_data{token_id, logits[token_id], 0.0f}); - } - - llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; - - if (temp <= 0) { - // Greedy sampling - id = llama_sample_token_greedy(ctx_llama, &candidates_p); - } else { - if (mirostat == 1) { - static float mirostat_mu = 2.0f * mirostat_tau; - const int mirostat_m = 100; - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token_mirostat(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, mirostat_m, &mirostat_mu); - } else if (mirostat == 2) { - static float mirostat_mu = 2.0f * mirostat_tau; - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token_mirostat_v2(ctx_llama, &candidates_p, mirostat_tau, mirostat_eta, &mirostat_mu); - } else { - // Temperature sampling - llama_sample_top_k(ctx_llama, &candidates_p, top_k, 1); - llama_sample_tail_free(ctx_llama, &candidates_p, tfs_z, 1); - llama_sample_typical(ctx_llama, &candidates_p, typical_p, 1); - llama_sample_top_p(ctx_llama, &candidates_p, top_p, 1); - llama_sample_temp(ctx_llama, &candidates_p, temp); - id = llama_sample_token(ctx_llama, &candidates_p); - } - } - } - - return id; -} - -static const char * sample(struct llama_context * ctx_llama, gpt_params & params, int * n_past) { - int id = sample_id(ctx_llama, params); +static const char * sample(struct llama_sampling_context * ctx_sampling, + struct llama_context * ctx_llama, + int * n_past) { + const llama_token id = llama_sampling_sample(ctx_sampling, ctx_llama, NULL); + llama_sampling_accept(ctx_sampling, ctx_llama, id, true); static std::string ret; if (id == llama_token_eos(llama_get_model(ctx_llama))) { ret = ""; @@ -174,8 +112,8 @@ struct llava_context { }; static void show_additional_info(int /*argc*/, char ** argv) { - printf("\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); - printf(" note: a lower temperature value like 0.1 is recommended for better quality.\n"); + fprintf(stderr, "\n example usage: %s -m --mmproj --image [--temp 0.1] [-p \"describe the image in detail.\"]\n", argv[0]); + fprintf(stderr, " note: a lower temperature value like 0.1 is recommended for better quality.\n"); } static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_params * params) { @@ -185,7 +123,7 @@ static struct llava_image_embed * load_image(llava_context * ctx_llava, gpt_para auto prompt = params->prompt; if (prompt_contains_image(prompt)) { if (!params->image.empty()) { - printf("using base64 encoded image instead of command line image path\n"); + fprintf(stderr, "using base64 encoded image instead of command line image path\n"); } embed = llava_image_embed_make_with_prompt_base64(ctx_llava->ctx_clip, params->n_threads, prompt); if (!embed) { @@ -217,16 +155,19 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ // generate the response - printf("\n"); + fprintf(stderr, "\n"); + + struct llama_sampling_context * ctx_sampling = llama_sampling_init(params->sparams); for (int i = 0; i < max_tgt_len; i++) { - const char * tmp = sample(ctx_llava->ctx_llama, *params, &n_past); + const char * tmp = sample(ctx_sampling, ctx_llava->ctx_llama, &n_past); if (strcmp(tmp, "") == 0) break; printf("%s", tmp); fflush(stdout); } + llama_sampling_free(ctx_sampling); printf("\n"); } From 97bbca6e8522d18041fcde6c3d0907a52ce36446 Mon Sep 17 00:00:00 2001 From: Cuong Trinh Manh Date: Fri, 29 Dec 2023 21:39:15 +0700 Subject: [PATCH 322/859] cmake : fix ld warning duplicate libraries libllama.a (#4671) * fix "ld: warning: ignoring duplicate libraries: '../libllama.a'" * fix warning in example. --- common/CMakeLists.txt | 2 +- examples/llava/CMakeLists.txt | 2 +- examples/server/CMakeLists.txt | 2 +- tests/CMakeLists.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt index b5d5453d2..f79acfef1 100644 --- a/common/CMakeLists.txt +++ b/common/CMakeLists.txt @@ -65,4 +65,4 @@ endif() target_include_directories(${TARGET} PUBLIC .) target_compile_features(${TARGET} PUBLIC cxx_std_11) -target_link_libraries(${TARGET} PRIVATE llama build_info) +target_link_libraries(${TARGET} PRIVATE build_info PUBLIC llama) diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 8ea3e5c83..48dae1506 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -32,5 +32,5 @@ endif() set(TARGET llava-cli) add_executable(llava-cli llava-cli.cpp) install(TARGETS llava-cli RUNTIME) -target_link_libraries(llava-cli PRIVATE common llama llava ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(llava-cli PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(llava PRIVATE cxx_std_11) diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index 859cd12c6..81709e448 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -6,7 +6,7 @@ install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ ) -target_link_libraries(${TARGET} PRIVATE common llama llava ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 9b5e69d13..7c932240d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -2,7 +2,7 @@ function(llama_build_executable source) get_filename_component(TEST_TARGET ${source} NAME_WE) add_executable(${TEST_TARGET} ${source}) install(TARGETS ${TEST_TARGET} RUNTIME) - target_link_libraries(${TEST_TARGET} PRIVATE llama common) + target_link_libraries(${TEST_TARGET} PRIVATE common) endfunction() function(llama_test_executable name source) @@ -14,7 +14,7 @@ function(llama_build_and_test_executable source) get_filename_component(TEST_TARGET ${source} NAME_WE) add_executable(${TEST_TARGET} ${source}) install(TARGETS ${TEST_TARGET} RUNTIME) - target_link_libraries(${TEST_TARGET} PRIVATE llama common) + target_link_libraries(${TEST_TARGET} PRIVATE common) add_test(NAME ${TEST_TARGET} COMMAND $ ${ARGN}) endfunction() From 68eccbdc5b56f2a2450f9a8463f9934388cafabf Mon Sep 17 00:00:00 2001 From: Philip Taron Date: Fri, 29 Dec 2023 06:42:26 -0800 Subject: [PATCH 323/859] flake.nix : rewrite (#4605) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * flake.lock: update to hotfix CUDA::cuda_driver Required to support https://github.com/ggerganov/llama.cpp/pull/4606 * flake.nix: rewrite 1. Split into separate files per output. 2. Added overlays, so that this flake can be integrated into others. The names in the overlay are `llama-cpp`, `llama-cpp-opencl`, `llama-cpp-cuda`, and `llama-cpp-rocm` so that they fit into the broader set of Nix packages from [nixpkgs](https://github.com/nixos/nixpkgs). 3. Use [callPackage](https://summer.nixos.org/blog/callpackage-a-tool-for-the-lazy/) rather than `with pkgs;` so that there's dependency injection rather than dependency lookup. 4. Add a description and meta information for each package. The description includes a bit about what's trying to accelerate each one. 5. Use specific CUDA packages instead of cudatoolkit on the advice of SomeoneSerge. 6. Format with `serokell/nixfmt` for a consistent style. 7. Update `flake.lock` with the latest goods. * flake.nix: use finalPackage instead of passing it manually * nix: unclutter darwin support * nix: pass most darwin frameworks unconditionally ...for simplicity * *.nix: nixfmt nix shell github:piegamesde/nixfmt/rfc101-style --command \ nixfmt flake.nix .devops/nix/*.nix * flake.nix: add maintainers * nix: move meta down to follow Nixpkgs style more closely * nix: add missing meta attributes nix: clarify the interpretation of meta.maintainers nix: clarify the meaning of "broken" and "badPlatforms" nix: passthru: expose the use* flags for inspection E.g.: ``` ❯ nix eval .#cuda.useCuda true ``` * flake.nix: avoid re-evaluating nixpkgs too many times * flake.nix: use flake-parts * nix: migrate to pname+version * flake.nix: overlay: expose both the namespace and the default attribute * ci: add the (Nix) flakestry workflow * nix: cmakeFlags: explicit OFF bools * nix: cuda: reduce runtime closure * nix: fewer rebuilds * nix: respect config.cudaCapabilities * nix: add the impure driver's location to the DT_RUNPATHs * nix: clean sources more thoroughly ...this way outPaths change less frequently, and so there are fewer rebuilds * nix: explicit mpi support * nix: explicit jetson support * flake.nix: darwin: only expose the default --------- Co-authored-by: Someone Serge --- .devops/nix/apps.nix | 22 +++ .devops/nix/devshells.nix | 13 ++ .devops/nix/jetson-support.nix | 32 ++++ .devops/nix/nixpkgs-instances.nix | 35 ++++ .devops/nix/package.nix | 265 ++++++++++++++++++++++++++++ .devops/nix/scope.nix | 12 ++ .github/workflows/nix-flakestry.yml | 23 +++ flake.lock | 55 +++--- flake.nix | 226 ++++++++++-------------- 9 files changed, 524 insertions(+), 159 deletions(-) create mode 100644 .devops/nix/apps.nix create mode 100644 .devops/nix/devshells.nix create mode 100644 .devops/nix/jetson-support.nix create mode 100644 .devops/nix/nixpkgs-instances.nix create mode 100644 .devops/nix/package.nix create mode 100644 .devops/nix/scope.nix create mode 100644 .github/workflows/nix-flakestry.yml diff --git a/.devops/nix/apps.nix b/.devops/nix/apps.nix new file mode 100644 index 000000000..b8a12cc0a --- /dev/null +++ b/.devops/nix/apps.nix @@ -0,0 +1,22 @@ +{ + perSystem = + { config, lib, ... }: + { + apps = + let + inherit (config.packages) default; + binaries = [ + "llama" + "llama-embedding" + "llama-server" + "quantize" + "train-text-from-scratch" + ]; + mkApp = name: { + type = "app"; + program = "${default}/bin/${name}"; + }; + in + lib.genAttrs binaries mkApp; + }; +} diff --git a/.devops/nix/devshells.nix b/.devops/nix/devshells.nix new file mode 100644 index 000000000..1862f0f08 --- /dev/null +++ b/.devops/nix/devshells.nix @@ -0,0 +1,13 @@ +{ + perSystem = + { config, lib, ... }: + { + devShells = + lib.concatMapAttrs + (name: package: { + ${name} = package.passthru.shell; + ${name + "-extra"} = package.passthru.shell-extra; + }) + config.packages; + }; +} diff --git a/.devops/nix/jetson-support.nix b/.devops/nix/jetson-support.nix new file mode 100644 index 000000000..08426d2ab --- /dev/null +++ b/.devops/nix/jetson-support.nix @@ -0,0 +1,32 @@ +{ inputs, ... }: +{ + perSystem = + { + config, + system, + lib, + pkgsCuda, + ... + }: + lib.optionalAttrs (system == "aarch64-linux") { + packages = + let + caps.jetson-xavier = "7.2"; + caps.jetson-orin = "8.7"; + caps.jetson-nano = "5.3"; + + pkgsFor = + cap: + import inputs.nixpkgs { + inherit system; + config = { + cudaSupport = true; + cudaCapabilities = [ cap ]; + cudaEnableForwardCompat = false; + inherit (pkgsCuda.config) allowUnfreePredicate; + }; + }; + in + builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps; + }; +} diff --git a/.devops/nix/nixpkgs-instances.nix b/.devops/nix/nixpkgs-instances.nix new file mode 100644 index 000000000..6e9872b28 --- /dev/null +++ b/.devops/nix/nixpkgs-instances.nix @@ -0,0 +1,35 @@ +{ inputs, ... }: +{ + # The _module.args definitions are passed on to modules as arguments. E.g. + # the module `{ pkgs ... }: { /* config */ }` implicitly uses + # `_module.args.pkgs` (defined in this case by flake-parts). + perSystem = + { system, ... }: + { + _module.args = { + pkgsCuda = import inputs.nixpkgs { + inherit system; + # Ensure dependencies use CUDA consistently (e.g. that openmpi, ucc, + # and ucx are built with CUDA support) + config.cudaSupport = true; + config.allowUnfreePredicate = + p: + builtins.all + ( + license: + license.free + || builtins.elem license.shortName [ + "CUDA EULA" + "cuDNN EULA" + ] + ) + (p.meta.licenses or [ p.meta.license ]); + }; + # Ensure dependencies use ROCm consistently + pkgsRocm = import inputs.nixpkgs { + inherit system; + config.rocmSupport = true; + }; + }; + }; +} diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix new file mode 100644 index 000000000..5f2a7c9f4 --- /dev/null +++ b/.devops/nix/package.nix @@ -0,0 +1,265 @@ +{ + lib, + config, + stdenv, + mkShell, + cmake, + ninja, + pkg-config, + git, + python3, + mpi, + openblas, # TODO: Use the generic `blas` so users could switch betwen alternative implementations + cudaPackages, + darwin, + rocmPackages, + clblast, + useBlas ? builtins.all (x: !x) [ + useCuda + useMetalKit + useOpenCL + useRocm + ], + useCuda ? config.cudaSupport, + useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL, + useMpi ? false, # Increases the runtime closure size by ~700M + useOpenCL ? false, + useRocm ? config.rocmSupport, + llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake +}@inputs: + +let + inherit (lib) + cmakeBool + cmakeFeature + optionals + strings + versionOlder + ; + + # It's necessary to consistently use backendStdenv when building with CUDA support, + # otherwise we get libstdc++ errors downstream. + stdenv = throw "Use effectiveStdenv instead"; + effectiveStdenv = if useCuda then cudaPackages.backendStdenv else inputs.stdenv; + + suffices = + lib.optionals useBlas [ "BLAS" ] + ++ lib.optionals useCuda [ "CUDA" ] + ++ lib.optionals useMetalKit [ "MetalKit" ] + ++ lib.optionals useMpi [ "MPI" ] + ++ lib.optionals useOpenCL [ "OpenCL" ] + ++ lib.optionals useRocm [ "ROCm" ]; + + pnameSuffix = + strings.optionalString (suffices != [ ]) + "-${strings.concatMapStringsSep "-" strings.toLower suffices}"; + descriptionSuffix = + strings.optionalString (suffices != [ ]) + ", accelerated with ${strings.concatStringsSep ", " suffices}"; + + # TODO: package the Python in this repository in a Nix-like way. + # It'd be nice to migrate to buildPythonPackage, as well as ensure this repo + # is PEP 517-compatible, and ensure the correct .dist-info is generated. + # https://peps.python.org/pep-0517/ + llama-python = python3.withPackages ( + ps: [ + ps.numpy + ps.sentencepiece + ] + ); + + # TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime + llama-python-extra = python3.withPackages ( + ps: [ + ps.numpy + ps.sentencepiece + ps.torchWithoutCuda + ps.transformers + ] + ); + + # apple_sdk is supposed to choose sane defaults, no need to handle isAarch64 + # separately + darwinBuildInputs = + with darwin.apple_sdk.frameworks; + [ + Accelerate + CoreVideo + CoreGraphics + ] + ++ optionals useMetalKit [ MetalKit ]; + + cudaBuildInputs = with cudaPackages; [ + cuda_cccl.dev # + + # A temporary hack for reducing the closure size, remove once cudaPackages + # have stopped using lndir: https://github.com/NixOS/nixpkgs/issues/271792 + cuda_cudart.dev + cuda_cudart.lib + cuda_cudart.static + libcublas.dev + libcublas.lib + libcublas.static + ]; + + rocmBuildInputs = with rocmPackages; [ + clr + hipblas + rocblas + ]; +in + +effectiveStdenv.mkDerivation ( + finalAttrs: { + pname = "llama-cpp${pnameSuffix}"; + version = llamaVersion; + + src = lib.cleanSourceWith { + filter = + name: type: + !(builtins.any (_: _) [ + (lib.hasSuffix ".nix" name) # Ignore *.nix files when computing outPaths + (name == "README.md") # Ignore *.md changes whe computing outPaths + (lib.hasPrefix "." name) # Skip hidden files and directories + ]); + src = lib.cleanSource ../../.; + }; + + postPatch = '' + substituteInPlace ./ggml-metal.m \ + --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" + + # TODO: Package up each Python script or service appropriately. + # If we were to migrate to buildPythonPackage and prepare the `pyproject.toml`, + # we could make those *.py into setuptools' entrypoints + substituteInPlace ./*.py --replace "/usr/bin/env python" "${llama-python}/bin/python" + ''; + + nativeBuildInputs = + [ + cmake + ninja + pkg-config + git + ] + ++ optionals useCuda [ + cudaPackages.cuda_nvcc + + # TODO: Replace with autoAddDriverRunpath + # once https://github.com/NixOS/nixpkgs/pull/275241 has been merged + cudaPackages.autoAddOpenGLRunpathHook + ]; + + buildInputs = + optionals effectiveStdenv.isDarwin darwinBuildInputs + ++ optionals useCuda cudaBuildInputs + ++ optionals useMpi [ mpi ] + ++ optionals useOpenCL [ clblast ] + ++ optionals useRocm rocmBuildInputs; + + cmakeFlags = + [ + (cmakeBool "LLAMA_NATIVE" true) + (cmakeBool "LLAMA_BUILD_SERVER" true) + (cmakeBool "BUILD_SHARED_LIBS" true) + (cmakeBool "CMAKE_SKIP_BUILD_RPATH" true) + (cmakeBool "LLAMA_BLAS" useBlas) + (cmakeBool "LLAMA_CLBLAST" useOpenCL) + (cmakeBool "LLAMA_CUBLAS" useCuda) + (cmakeBool "LLAMA_HIPBLAS" useRocm) + (cmakeBool "LLAMA_METAL" useMetalKit) + (cmakeBool "LLAMA_MPI" useMpi) + ] + ++ optionals useCuda [ + ( + with cudaPackages.flags; + cmakeFeature "CMAKE_CUDA_ARCHITECTURES" ( + builtins.concatStringsSep ";" (map dropDot cudaCapabilities) + ) + ) + ] + ++ optionals useRocm [ + (cmakeFeature "CMAKE_C_COMPILER" "hipcc") + (cmakeFeature "CMAKE_CXX_COMPILER" "hipcc") + + # Build all targets supported by rocBLAS. When updating search for TARGET_LIST_ROCM + # in https://github.com/ROCmSoftwarePlatform/rocBLAS/blob/develop/CMakeLists.txt + # and select the line that matches the current nixpkgs version of rocBLAS. + # Should likely use `rocmPackages.clr.gpuTargets`. + "-DAMDGPU_TARGETS=gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102" + ] + ++ optionals useMetalKit [ (lib.cmakeFeature "CMAKE_C_FLAGS" "-D__ARM_FEATURE_DOTPROD=1") ] + ++ optionals useBlas [ (lib.cmakeFeature "LLAMA_BLAS_VENDOR" "OpenBLAS") ]; + + # TODO(SomeoneSerge): It's better to add proper install targets at the CMake level, + # if they haven't been added yet. + postInstall = '' + mv $out/bin/main $out/bin/llama + mv $out/bin/server $out/bin/llama-server + mkdir -p $out/include + cp $src/llama.h $out/include/ + ''; + + # Define the shells here, but don't add in the inputsFrom to avoid recursion. + passthru = { + inherit + useBlas + useCuda + useMetalKit + useMpi + useOpenCL + useRocm + ; + + shell = mkShell { + name = "shell-${finalAttrs.finalPackage.name}"; + description = "contains numpy and sentencepiece"; + buildInputs = [ llama-python ]; + inputsFrom = [ finalAttrs.finalPackage ]; + }; + + shell-extra = mkShell { + name = "shell-extra-${finalAttrs.finalPackage.name}"; + description = "contains numpy, sentencepiece, torchWithoutCuda, and transformers"; + buildInputs = [ llama-python-extra ]; + inputsFrom = [ finalAttrs.finalPackage ]; + }; + }; + + meta = { + # Configurations we don't want even the CI to evaluate. Results in the + # "unsupported platform" messages. This is mostly a no-op, because + # cudaPackages would've refused to evaluate anyway. + badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin; + + # Configurations that are known to result in build failures. Can be + # overridden by importing Nixpkgs with `allowBroken = true`. + broken = (useMetalKit && !effectiveStdenv.isDarwin); + + description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}"; + homepage = "https://github.com/ggerganov/llama.cpp/"; + license = lib.licenses.mit; + + # Accommodates `nix run` and `lib.getExe` + mainProgram = "llama"; + + # These people might respond, on the best effort basis, if you ping them + # in case of Nix-specific regressions or for reviewing Nix-specific PRs. + # Consider adding yourself to this list if you want to ensure this flake + # stays maintained and you're willing to invest your time. Do not add + # other people without their consent. Consider removing people after + # they've been unreachable for long periods of time. + + # Note that lib.maintainers is defined in Nixpkgs, but you may just add + # an attrset following the same format as in + # https://github.com/NixOS/nixpkgs/blob/f36a80e54da29775c78d7eff0e628c2b4e34d1d7/maintainers/maintainer-list.nix + maintainers = with lib.maintainers; [ + philiptaron + SomeoneSerge + ]; + + # Extend `badPlatforms` instead + platforms = lib.platforms.all; + }; + } +) diff --git a/.devops/nix/scope.nix b/.devops/nix/scope.nix new file mode 100644 index 000000000..7932ac1e8 --- /dev/null +++ b/.devops/nix/scope.nix @@ -0,0 +1,12 @@ +{ + lib, + newScope, + llamaVersion ? "0.0.0", +}: + +lib.makeScope newScope ( + self: { + inherit llamaVersion; + llama-cpp = self.callPackage ./package.nix { }; + } +) diff --git a/.github/workflows/nix-flakestry.yml b/.github/workflows/nix-flakestry.yml new file mode 100644 index 000000000..3abfb3509 --- /dev/null +++ b/.github/workflows/nix-flakestry.yml @@ -0,0 +1,23 @@ +# Make the flake discoverable on https://flakestry.dev +name: "Publish a flake to flakestry" +on: + push: + tags: + - "v?[0-9]+.[0-9]+.[0-9]+" + - "v?[0-9]+.[0-9]+" + workflow_dispatch: + inputs: + tag: + description: "The existing tag to publish" + type: "string" + required: true +jobs: + publish-flake: + runs-on: ubuntu-latest + permissions: + id-token: "write" + contents: "read" + steps: + - uses: flakestry/flakestry-publish@main + with: + version: "${{ inputs.tag || github.ref_name }}" diff --git a/flake.lock b/flake.lock index 0455f6561..3fcd1f45d 100644 --- a/flake.lock +++ b/flake.lock @@ -1,30 +1,30 @@ { "nodes": { - "flake-utils": { + "flake-parts": { "inputs": { - "systems": "systems" + "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1694529238, - "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "lastModified": 1701473968, + "narHash": "sha256-YcVE5emp1qQ8ieHUnxt1wCZCC3ZfAS+SRRWZ2TMda7E=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "34fed993f1674c8d06d58b37ce1e0fe5eebcb9f5", "type": "github" }, "original": { - "owner": "numtide", - "repo": "flake-utils", + "owner": "hercules-ci", + "repo": "flake-parts", "type": "github" } }, "nixpkgs": { "locked": { - "lastModified": 1698318101, - "narHash": "sha256-gUihHt3yPD7bVqg+k/UVHgngyaJ3DMEBchbymBMvK1E=", + "lastModified": 1703559957, + "narHash": "sha256-x9PUuMEPGUOMB51zNxrDr2QoHbYWlCS2xhFedm9MC5Q=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "63678e9f3d3afecfeafa0acead6239cdb447574c", + "rev": "75dd68c36f458c6593c5bbb48abfd3e59bfed380", "type": "github" }, "original": { @@ -34,26 +34,29 @@ "type": "github" } }, - "root": { - "inputs": { - "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs" - } - }, - "systems": { + "nixpkgs-lib": { "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "dir": "lib", + "lastModified": 1701253981, + "narHash": "sha256-ztaDIyZ7HrTAfEEUt9AtTDNoCYxUdSd6NrRHaYOIxtk=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "e92039b55bcd58469325ded85d4f58dd5a4eaf58", "type": "github" }, "original": { - "owner": "nix-systems", - "repo": "default", + "dir": "lib", + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", "type": "github" } + }, + "root": { + "inputs": { + "flake-parts": "flake-parts", + "nixpkgs": "nixpkgs" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index 4cf28d5c1..2209070aa 100644 --- a/flake.nix +++ b/flake.nix @@ -1,139 +1,99 @@ { + description = "Port of Facebook's LLaMA model in C/C++"; + inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; - flake-utils.url = "github:numtide/flake-utils"; + flake-parts.url = "github:hercules-ci/flake-parts"; }; - outputs = { self, nixpkgs, flake-utils }: - flake-utils.lib.eachDefaultSystem (system: - let - name = "llama.cpp"; - src = ./.; - meta.mainProgram = "llama"; - inherit (pkgs.stdenv) isAarch32 isAarch64 isDarwin; - buildInputs = with pkgs; [ openmpi ]; - osSpecific = with pkgs; buildInputs ++ ( - if isAarch64 && isDarwin then - with pkgs.darwin.apple_sdk_11_0.frameworks; [ - Accelerate - MetalKit - ] - else if isAarch32 && isDarwin then - with pkgs.darwin.apple_sdk.frameworks; [ - Accelerate - CoreGraphics - CoreVideo - ] - else if isDarwin then - with pkgs.darwin.apple_sdk.frameworks; [ - Accelerate - CoreGraphics - CoreVideo - ] - else - with pkgs; [ openblas ] - ); - pkgs = import nixpkgs { inherit system; }; - nativeBuildInputs = with pkgs; [ cmake ninja pkg-config ]; - cudatoolkit_joined = with pkgs; symlinkJoin { - # HACK(Green-Sky): nix currently has issues with cmake findcudatoolkit - # see https://github.com/NixOS/nixpkgs/issues/224291 - # copied from jaxlib - name = "${cudaPackages.cudatoolkit.name}-merged"; - paths = [ - cudaPackages.cudatoolkit.lib - cudaPackages.cudatoolkit.out - ] ++ lib.optionals (lib.versionOlder cudaPackages.cudatoolkit.version "11") [ - # for some reason some of the required libs are in the targets/x86_64-linux - # directory; not sure why but this works around it - "${cudaPackages.cudatoolkit}/targets/${system}" - ]; - }; - llama-python = - pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece ]); - # TODO(Green-Sky): find a better way to opt-into the heavy ml python runtime - llama-python-extra = - pkgs.python3.withPackages (ps: with ps; [ numpy sentencepiece torchWithoutCuda transformers ]); - postPatch = '' - substituteInPlace ./ggml-metal.m \ - --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" - substituteInPlace ./*.py --replace '/usr/bin/env python' '${llama-python}/bin/python' - ''; - postInstall = '' - mv $out/bin/main $out/bin/llama - mv $out/bin/server $out/bin/llama-server - mkdir -p $out/include - cp ${src}/llama.h $out/include/ - ''; - cmakeFlags = [ "-DLLAMA_NATIVE=OFF" "-DLLAMA_BUILD_SERVER=ON" "-DBUILD_SHARED_LIBS=ON" "-DCMAKE_SKIP_BUILD_RPATH=ON" ]; - in + + # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl: + # + # ```bash + # ❯ nix repl + # nix-repl> :lf github:ggerganov/llama.cpp + # Added 13 variables. + # nix-repl> outputs.apps.x86_64-linux.quantize + # { program = "/nix/store/00000000000000000000000000000000-llama.cpp/bin/quantize"; type = "app"; } + # ``` + outputs = + { self, flake-parts, ... }@inputs: + let + # We could include the git revisions in the package names but those would + # needlessly trigger rebuilds: + # llamaVersion = self.dirtyShortRev or self.shortRev; + + # Nix already uses cryptographic hashes for versioning, so we'll just fix + # the fake semver for now: + llamaVersion = "0.0.0"; + in + flake-parts.lib.mkFlake { inherit inputs; } + { - packages.default = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = osSpecific; - cmakeFlags = cmakeFlags - ++ (if isAarch64 && isDarwin then [ - "-DCMAKE_C_FLAGS=-D__ARM_FEATURE_DOTPROD=1" - "-DLLAMA_METAL=ON" - ] else [ - "-DLLAMA_BLAS=ON" - "-DLLAMA_BLAS_VENDOR=OpenBLAS" - ]); - }; - packages.opencl = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs; buildInputs ++ [ clblast ]; - cmakeFlags = cmakeFlags ++ [ - "-DLLAMA_CLBLAST=ON" - ]; - }; - packages.cuda = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs; buildInputs ++ [ cudatoolkit_joined ]; - cmakeFlags = cmakeFlags ++ [ - "-DLLAMA_CUBLAS=ON" - ]; - }; - packages.rocm = pkgs.stdenv.mkDerivation { - inherit name src meta postPatch nativeBuildInputs postInstall; - buildInputs = with pkgs.rocmPackages; buildInputs ++ [ clr hipblas rocblas ]; - cmakeFlags = cmakeFlags ++ [ - "-DLLAMA_HIPBLAS=1" - "-DCMAKE_C_COMPILER=hipcc" - "-DCMAKE_CXX_COMPILER=hipcc" - # Build all targets supported by rocBLAS. When updating search for TARGET_LIST_ROCM - # in github.com/ROCmSoftwarePlatform/rocBLAS/blob/develop/CMakeLists.txt - # and select the line that matches the current nixpkgs version of rocBLAS. - "-DAMDGPU_TARGETS=gfx803;gfx900;gfx906:xnack-;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-;gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102" - ]; - }; - apps.llama-server = { - type = "app"; - program = "${self.packages.${system}.default}/bin/llama-server"; - }; - apps.llama-embedding = { - type = "app"; - program = "${self.packages.${system}.default}/bin/embedding"; - }; - apps.llama = { - type = "app"; - program = "${self.packages.${system}.default}/bin/llama"; - }; - apps.quantize = { - type = "app"; - program = "${self.packages.${system}.default}/bin/quantize"; - }; - apps.train-text-from-scratch = { - type = "app"; - program = "${self.packages.${system}.default}/bin/train-text-from-scratch"; - }; - apps.default = self.apps.${system}.llama; - devShells.default = pkgs.mkShell { - buildInputs = [ llama-python ]; - packages = nativeBuildInputs ++ osSpecific; - }; - devShells.extra = pkgs.mkShell { - buildInputs = [ llama-python-extra ]; - packages = nativeBuildInputs ++ osSpecific; - }; - }); + + imports = [ + .devops/nix/nixpkgs-instances.nix + .devops/nix/apps.nix + .devops/nix/devshells.nix + .devops/nix/jetson-support.nix + ]; + + # An overlay can be used to have a more granular control over llama-cpp's + # dependencies and configuration, than that offered by the `.override` + # mechanism. Cf. https://nixos.org/manual/nixpkgs/stable/#chap-overlays. + # + # E.g. in a flake: + # ``` + # { nixpkgs, llama-cpp, ... }: + # let pkgs = import nixpkgs { + # overlays = [ (llama-cpp.overlays.default) ]; + # system = "aarch64-linux"; + # config.allowUnfree = true; + # config.cudaSupport = true; + # config.cudaCapabilities = [ "7.2" ]; + # config.cudaEnableForwardCompat = false; + # }; in { + # packages.aarch64-linux.llamaJetsonXavier = pkgs.llamaPackages.llama-cpp; + # } + # ``` + # + # Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format + flake.overlays.default = + (final: prev: { + llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + inherit (final.llamaPackages) llama-cpp; + }); + + systems = [ + "aarch64-darwin" + "aarch64-linux" + "x86_64-darwin" # x86_64-darwin isn't tested (and likely isn't relevant) + "x86_64-linux" + ]; + + perSystem = + { + config, + lib, + pkgs, + pkgsCuda, + pkgsRocm, + ... + }: + { + # We don't use the overlay here so as to avoid making too many instances of nixpkgs, + # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs + packages = + { + default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + } + // lib.optionalAttrs pkgs.stdenv.isLinux { + opencl = config.packages.default.override { useOpenCL = true; }; + cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + + mpi-cpu = config.packages.default.override { useMpi = true; }; + mpi-cuda = config.packages.default.override { useMpi = true; }; + }; + }; + }; } From 04ac0607e913ab91234dfb240e12a76509e30982 Mon Sep 17 00:00:00 2001 From: crasm Date: Fri, 29 Dec 2023 09:50:29 -0500 Subject: [PATCH 324/859] python : add check-requirements.sh and GitHub workflow (#4585) * python: add check-requirements.sh and GitHub workflow This script and workflow forces package versions to remain compatible across all convert*.py scripts, while allowing secondary convert scripts to import dependencies not wanted in convert.py. * Move requirements into ./requirements * Fail on "==" being used for package requirements (but can be suppressed) * Enforce "compatible release" syntax instead of == * Update workflow * Add upper version bound for transformers and protobuf * improve check-requirements.sh * small syntax change * don't remove venvs if nocleanup is passed * See if this fixes docker workflow * Move check-requirements.sh into ./scripts/ --------- Co-authored-by: Jared Van Bortel --- .devops/full-cuda.Dockerfile | 3 +- .devops/full-rocm.Dockerfile | 3 +- .devops/full.Dockerfile | 3 +- .devops/main-rocm.Dockerfile | 3 +- .../workflows/python-check-requirements.yml | 29 +++ convert-hf-to-gguf.py | 95 +++++----- convert-lora-to-ggml.py | 147 +++++++-------- convert-persimmon-to-gguf.py | 1 + requirements-hf-to-gguf.txt | 3 - requirements.txt | 17 +- .../requirements-convert-hf-to-gguf.txt | 2 + ...equirements-convert-llama-ggml-to-gguf.txt | 1 + .../requirements-convert-lora-to-ggml.txt | 2 + ...requirements-convert-persimmon-to-gguf.txt | 2 + requirements/requirements-convert.txt | 5 + scripts/check-requirements.sh | 174 ++++++++++++++++++ 16 files changed, 360 insertions(+), 130 deletions(-) create mode 100644 .github/workflows/python-check-requirements.yml mode change 100644 => 100755 convert-persimmon-to-gguf.py delete mode 100644 requirements-hf-to-gguf.txt create mode 100644 requirements/requirements-convert-hf-to-gguf.txt create mode 100644 requirements/requirements-convert-llama-ggml-to-gguf.txt create mode 100644 requirements/requirements-convert-lora-to-ggml.txt create mode 100644 requirements/requirements-convert-persimmon-to-gguf.txt create mode 100644 requirements/requirements-convert.txt create mode 100755 scripts/check-requirements.sh diff --git a/.devops/full-cuda.Dockerfile b/.devops/full-cuda.Dockerfile index 360602d65..77a9ddc14 100644 --- a/.devops/full-cuda.Dockerfile +++ b/.devops/full-cuda.Dockerfile @@ -14,7 +14,8 @@ ARG CUDA_DOCKER_ARCH=all RUN apt-get update && \ apt-get install -y build-essential python3 python3-pip git -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.devops/full-rocm.Dockerfile b/.devops/full-rocm.Dockerfile index 6c521e9b4..8b9633dc4 100644 --- a/.devops/full-rocm.Dockerfile +++ b/.devops/full-rocm.Dockerfile @@ -23,7 +23,8 @@ ARG ROCM_DOCKER_ARCH=\ gfx1101 \ gfx1102 -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.devops/full.Dockerfile b/.devops/full.Dockerfile index 687628b35..cef1297d3 100644 --- a/.devops/full.Dockerfile +++ b/.devops/full.Dockerfile @@ -5,7 +5,8 @@ FROM ubuntu:$UBUNTU_VERSION as build RUN apt-get update && \ apt-get install -y build-essential python3 python3-pip git -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.devops/main-rocm.Dockerfile b/.devops/main-rocm.Dockerfile index 789deff6d..0a706dc73 100644 --- a/.devops/main-rocm.Dockerfile +++ b/.devops/main-rocm.Dockerfile @@ -23,7 +23,8 @@ ARG ROCM_DOCKER_ARCH=\ gfx1101 \ gfx1102 -COPY requirements.txt requirements.txt +COPY requirements.txt requirements.txt +COPY requirements requirements RUN pip install --upgrade pip setuptools wheel \ && pip install -r requirements.txt diff --git a/.github/workflows/python-check-requirements.yml b/.github/workflows/python-check-requirements.yml new file mode 100644 index 000000000..92e1108b3 --- /dev/null +++ b/.github/workflows/python-check-requirements.yml @@ -0,0 +1,29 @@ +name: Python check requirements.txt + +on: + push: + paths: + - 'scripts/check-requirements.sh' + - 'convert*.py' + - 'requirements.txt' + - 'requirements/*.txt' + pull_request: + paths: + - 'scripts/check-requirements.sh' + - 'convert*.py' + - 'requirements.txt' + - 'requirements/*.txt' + +jobs: + python-check-requirements: + runs-on: ubuntu-latest + name: check-requirements + steps: + - name: Check out source repository + uses: actions/checkout@v3 + - name: Set up Python environment + uses: actions/setup-python@v4 + with: + python-version: "3.11" + - name: Run check-requirements.sh script + run: bash scripts/check-requirements.sh nocleanup diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 3557a825e..51724c0df 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -242,7 +242,7 @@ class Model: tokens: list[bytearray] = [] toktypes: list[int] = [] - from transformers import AutoTokenizer # type: ignore[attr-defined] + from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained(dir_model) vocab_size = hparams.get("vocab_size", len(tokenizer.vocab)) assert max(tokenizer.vocab.values()) < vocab_size @@ -856,7 +856,7 @@ class StableLMModel(Model): hparams = self.hparams block_count = hparams["num_hidden_layers"] - self.gguf_writer.add_name(dir_model.name) + self.gguf_writer.add_name(self.dir_model.name) self.gguf_writer.add_context_length(hparams["max_position_embeddings"]) self.gguf_writer.add_embedding_length(hparams["hidden_size"]) self.gguf_writer.add_block_count(block_count) @@ -902,7 +902,7 @@ class QwenModel(Model): tokens: list[bytearray] = [] toktypes: list[int] = [] - from transformers import AutoTokenizer # type: ignore[attr-defined] + from transformers import AutoTokenizer tokenizer = AutoTokenizer.from_pretrained(dir_model, trust_remote_code=True) vocab_size = hparams["vocab_size"] assert max(tokenizer.get_vocab().values()) < vocab_size @@ -1185,57 +1185,62 @@ def parse_args() -> argparse.Namespace: return parser.parse_args() -args = parse_args() +def main() -> None: + args = parse_args() -dir_model = args.model + dir_model = args.model -if args.awq_path: - sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) - from awq.apply_awq import add_scale_weights - tmp_model_path = args.model / "weighted_model" - dir_model = tmp_model_path - if tmp_model_path.is_dir(): - print(f"{tmp_model_path} exists as a weighted model.") + if args.awq_path: + sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" + dir_model = tmp_model_path + if tmp_model_path.is_dir(): + print(f"{tmp_model_path} exists as a weighted model.") + else: + tmp_model_path.mkdir(parents=True, exist_ok=True) + print("Saving new weighted model ...") + add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) + print(f"Saved weighted model at {tmp_model_path}.") + + if not dir_model.is_dir(): + print(f'Error: {args.model} is not a directory', file=sys.stderr) + sys.exit(1) + + ftype_map = { + "f32": gguf.GGMLQuantizationType.F32, + "f16": gguf.GGMLQuantizationType.F16, + } + + if args.outfile is not None: + fname_out = args.outfile else: - tmp_model_path.mkdir(parents=True, exist_ok=True) - print("Saving new weighted model ...") - add_scale_weights(str(args.model), str(args.awq_path), str(tmp_model_path)) - print(f"Saved weighted model at {tmp_model_path}.") + # output in the same directory as the model by default + fname_out = dir_model / f'ggml-model-{args.outtype}.gguf' -if not dir_model.is_dir(): - print(f'Error: {args.model} is not a directory', file=sys.stderr) - sys.exit(1) + print(f"Loading model: {dir_model.name}") -ftype_map = { - "f32": gguf.GGMLQuantizationType.F32, - "f16": gguf.GGMLQuantizationType.F16, -} + hparams = Model.load_hparams(dir_model) -if args.outfile is not None: - fname_out = args.outfile -else: - # output in the same directory as the model by default - fname_out = dir_model / f'ggml-model-{args.outtype}.gguf' + with torch.inference_mode(): + model_class = Model.from_model_architecture(hparams["architectures"][0]) + model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) -print(f"Loading model: {dir_model.name}") + print("Set model parameters") + model_instance.set_gguf_parameters() -hparams = Model.load_hparams(dir_model) + print("Set model tokenizer") + model_instance.set_vocab() -with torch.inference_mode(): - model_class = Model.from_model_architecture(hparams["architectures"][0]) - model_instance = model_class(dir_model, ftype_map[args.outtype], fname_out, args.bigendian) + if args.vocab_only: + print(f"Exporting model vocab to '{fname_out}'") + model_instance.write_vocab() + else: + print(f"Exporting model to '{fname_out}'") + model_instance.write() - print("Set model parameters") - model_instance.set_gguf_parameters() + print(f"Model successfully exported to '{fname_out}'") - print("Set model tokenizer") - model_instance.set_vocab() - if args.vocab_only: - print(f"Exporting model vocab to '{fname_out}'") - model_instance.write_vocab() - else: - print(f"Exporting model to '{fname_out}'") - model_instance.write() - - print(f"Model successfully exported to '{fname_out}'") +if __name__ == '__main__': + main() diff --git a/convert-lora-to-ggml.py b/convert-lora-to-ggml.py index 53bb8a3d9..35ce152f4 100755 --- a/convert-lora-to-ggml.py +++ b/convert-lora-to-ggml.py @@ -47,95 +47,96 @@ def write_tensor_header(fout: BinaryIO, name: str, shape: Sequence[int], data_ty fout.seek((fout.tell() + 31) & -32) -if len(sys.argv) < 2: - print(f"Usage: python {sys.argv[0]} [arch]") - print( - "Path must contain HuggingFace PEFT LoRA files 'adapter_config.json' and 'adapter_model.bin'" - ) - print(f"Arch must be one of {list(gguf.MODEL_ARCH_NAMES.values())} (default: llama)") - sys.exit(1) +if __name__ == '__main__': + if len(sys.argv) < 2: + print(f"Usage: python {sys.argv[0]} [arch]") + print( + "Path must contain HuggingFace PEFT LoRA files 'adapter_config.json' and 'adapter_model.bin'" + ) + print(f"Arch must be one of {list(gguf.MODEL_ARCH_NAMES.values())} (default: llama)") + sys.exit(1) -input_json = os.path.join(sys.argv[1], "adapter_config.json") -input_model = os.path.join(sys.argv[1], "adapter_model.bin") -output_path = os.path.join(sys.argv[1], "ggml-adapter-model.bin") + input_json = os.path.join(sys.argv[1], "adapter_config.json") + input_model = os.path.join(sys.argv[1], "adapter_model.bin") + output_path = os.path.join(sys.argv[1], "ggml-adapter-model.bin") -model = torch.load(input_model, map_location="cpu") -arch_name = sys.argv[2] if len(sys.argv) == 3 else "llama" + model = torch.load(input_model, map_location="cpu") + arch_name = sys.argv[2] if len(sys.argv) == 3 else "llama" -if arch_name not in gguf.MODEL_ARCH_NAMES.values(): - print(f"Error: unsupported architecture {arch_name}") - sys.exit(1) + if arch_name not in gguf.MODEL_ARCH_NAMES.values(): + print(f"Error: unsupported architecture {arch_name}") + sys.exit(1) -arch = list(gguf.MODEL_ARCH_NAMES.keys())[list(gguf.MODEL_ARCH_NAMES.values()).index(arch_name)] -name_map = gguf.TensorNameMap(arch, 200) # 200 layers ought to be enough for anyone + arch = list(gguf.MODEL_ARCH_NAMES.keys())[list(gguf.MODEL_ARCH_NAMES.values()).index(arch_name)] + name_map = gguf.TensorNameMap(arch, 200) # 200 layers ought to be enough for anyone -with open(input_json, "r") as f: - params = json.load(f) + with open(input_json, "r") as f: + params = json.load(f) -if params["peft_type"] != "LORA": - print(f"Error: unsupported adapter type {params['peft_type']}, expected LORA") - sys.exit(1) + if params["peft_type"] != "LORA": + print(f"Error: unsupported adapter type {params['peft_type']}, expected LORA") + sys.exit(1) -if params["fan_in_fan_out"] is True: - print("Error: param fan_in_fan_out is not supported") - sys.exit(1) + if params["fan_in_fan_out"] is True: + print("Error: param fan_in_fan_out is not supported") + sys.exit(1) -if params["bias"] is not None and params["bias"] != "none": - print("Error: param bias is not supported") - sys.exit(1) + if params["bias"] is not None and params["bias"] != "none": + print("Error: param bias is not supported") + sys.exit(1) -# TODO: these seem to be layers that have been trained but without lora. -# doesn't seem widely used but eventually should be supported -if params["modules_to_save"] is not None and len(params["modules_to_save"]) > 0: - print("Error: param modules_to_save is not supported") - sys.exit(1) + # TODO: these seem to be layers that have been trained but without lora. + # doesn't seem widely used but eventually should be supported + if params["modules_to_save"] is not None and len(params["modules_to_save"]) > 0: + print("Error: param modules_to_save is not supported") + sys.exit(1) -with open(output_path, "wb") as fout: - fout.truncate() + with open(output_path, "wb") as fout: + fout.truncate() - write_file_header(fout, params) - for k, v in model.items(): - orig_k = k - if k.endswith(".default.weight"): - k = k.replace(".default.weight", ".weight") - if k in ["llama_proj.weight", "llama_proj.bias"]: - continue - if k.endswith("lora_A.weight"): - if v.dtype != torch.float16 and v.dtype != torch.float32: + write_file_header(fout, params) + for k, v in model.items(): + orig_k = k + if k.endswith(".default.weight"): + k = k.replace(".default.weight", ".weight") + if k in ["llama_proj.weight", "llama_proj.bias"]: + continue + if k.endswith("lora_A.weight"): + if v.dtype != torch.float16 and v.dtype != torch.float32: + v = v.float() + v = v.T + else: v = v.float() - v = v.T - else: - v = v.float() - t = v.detach().numpy() + t = v.detach().numpy() - prefix = "base_model.model." - if k.startswith(prefix): - k = k[len(prefix) :] + prefix = "base_model.model." + if k.startswith(prefix): + k = k[len(prefix) :] - lora_suffixes = (".lora_A.weight", ".lora_B.weight") - if k.endswith(lora_suffixes): - suffix = k[-len(lora_suffixes[0]):] - k = k[: -len(lora_suffixes[0])] - else: - print(f"Error: unrecognized tensor name {orig_k}") - sys.exit(1) + lora_suffixes = (".lora_A.weight", ".lora_B.weight") + if k.endswith(lora_suffixes): + suffix = k[-len(lora_suffixes[0]):] + k = k[: -len(lora_suffixes[0])] + else: + print(f"Error: unrecognized tensor name {orig_k}") + sys.exit(1) - tname = name_map.get_name(k) - if tname is None: - print(f"Error: could not map tensor name {orig_k}") - print(" Note: the arch parameter must be specified if the model is not llama") - sys.exit(1) + tname = name_map.get_name(k) + if tname is None: + print(f"Error: could not map tensor name {orig_k}") + print(" Note: the arch parameter must be specified if the model is not llama") + sys.exit(1) - if suffix == ".lora_A.weight": - tname += ".weight.loraA" - elif suffix == ".lora_B.weight": - tname += ".weight.loraB" - else: - assert False + if suffix == ".lora_A.weight": + tname += ".weight.loraA" + elif suffix == ".lora_B.weight": + tname += ".weight.loraB" + else: + assert False - print(f"{k} => {tname} {t.shape} {t.dtype} {t.nbytes/1024/1024:.2f}MB") - write_tensor_header(fout, tname, t.shape, t.dtype) - t.tofile(fout) + print(f"{k} => {tname} {t.shape} {t.dtype} {t.nbytes/1024/1024:.2f}MB") + write_tensor_header(fout, tname, t.shape, t.dtype) + t.tofile(fout) -print(f"Converted {input_json} and {input_model} to {output_path}") + print(f"Converted {input_json} and {input_model} to {output_path}") diff --git a/convert-persimmon-to-gguf.py b/convert-persimmon-to-gguf.py old mode 100644 new mode 100755 index 206b7d5ff..1ba5864dc --- a/convert-persimmon-to-gguf.py +++ b/convert-persimmon-to-gguf.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 import torch import os from pprint import pprint diff --git a/requirements-hf-to-gguf.txt b/requirements-hf-to-gguf.txt deleted file mode 100644 index f4600539e..000000000 --- a/requirements-hf-to-gguf.txt +++ /dev/null @@ -1,3 +0,0 @@ --r requirements.txt -torch==2.1.1 -transformers==4.35.2 diff --git a/requirements.txt b/requirements.txt index 1a1162566..d36f74520 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,12 @@ -numpy==1.24.4 -sentencepiece==0.1.98 -transformers>=4.34.0 -gguf>=0.1.0 -protobuf>=4.21.0 +# These requirements include all dependencies for all top-level python scripts +# for llama.cpp. Avoid adding packages here directly. +# +# Package versions must stay compatible across all top-level python scripts. +# + +-r ./requirements/requirements-convert.txt + +-r ./requirements/requirements-convert-hf-to-gguf.txt +-r ./requirements/requirements-convert-llama-ggml-to-gguf.txt +-r ./requirements/requirements-convert-lora-to-ggml.txt +-r ./requirements/requirements-convert-persimmon-to-gguf.txt diff --git a/requirements/requirements-convert-hf-to-gguf.txt b/requirements/requirements-convert-hf-to-gguf.txt new file mode 100644 index 000000000..6ac402610 --- /dev/null +++ b/requirements/requirements-convert-hf-to-gguf.txt @@ -0,0 +1,2 @@ +-r ./requirements-convert.txt +torch~=2.1.1 diff --git a/requirements/requirements-convert-llama-ggml-to-gguf.txt b/requirements/requirements-convert-llama-ggml-to-gguf.txt new file mode 100644 index 000000000..a0f37cd1c --- /dev/null +++ b/requirements/requirements-convert-llama-ggml-to-gguf.txt @@ -0,0 +1 @@ +-r ./requirements-convert.txt diff --git a/requirements/requirements-convert-lora-to-ggml.txt b/requirements/requirements-convert-lora-to-ggml.txt new file mode 100644 index 000000000..6ac402610 --- /dev/null +++ b/requirements/requirements-convert-lora-to-ggml.txt @@ -0,0 +1,2 @@ +-r ./requirements-convert.txt +torch~=2.1.1 diff --git a/requirements/requirements-convert-persimmon-to-gguf.txt b/requirements/requirements-convert-persimmon-to-gguf.txt new file mode 100644 index 000000000..6ac402610 --- /dev/null +++ b/requirements/requirements-convert-persimmon-to-gguf.txt @@ -0,0 +1,2 @@ +-r ./requirements-convert.txt +torch~=2.1.1 diff --git a/requirements/requirements-convert.txt b/requirements/requirements-convert.txt new file mode 100644 index 000000000..a3d6ecec0 --- /dev/null +++ b/requirements/requirements-convert.txt @@ -0,0 +1,5 @@ +numpy~=1.24.4 +sentencepiece~=0.1.98 +transformers>=4.35.2,<5.0.0 +gguf>=0.1.0 +protobuf>=4.21.0,<5.0.0 diff --git a/scripts/check-requirements.sh b/scripts/check-requirements.sh new file mode 100755 index 000000000..af7bab753 --- /dev/null +++ b/scripts/check-requirements.sh @@ -0,0 +1,174 @@ +#!/bin/bash +set -euo pipefail + +# +# check-requirements.sh checks all requirements files for each top-level +# convert*.py script. +# +# WARNING: This is quite IO intensive, because a fresh venv is set up for every +# python script. As of 2023-12-22, this writes ~2.7GB of data. An adequately +# sized tmpfs /tmp or ramdisk is recommended if running this frequently. +# +# usage: check-requirements.sh [] +# check-requirements.sh nocleanup [] +# +# where: +# - is a directory that can be used as the base for +# setting up the venvs. Defaults to `/tmp`. +# - 'nocleanup' as the first argument will disable automatic cleanup +# of the files created by this script. +# +# requires: +# - bash >= 3.2.57 +# - shellcheck +# +# For each script, it creates a fresh venv, `pip install`s the requirements, and +# finally imports the python script to check for `ImportError`. +# + +log() { + local level=$1 msg=$2 + printf >&2 '%s: %s\n' "$level" "$msg" +} + +debug() { + log DEBUG "$@" +} + +info() { + log INFO "$@" +} + +fatal() { + log FATAL "$@" + exit 1 +} + +cleanup() { + if [[ -n ${workdir+x} && -d $workdir && -w $workdir ]]; then + info "Removing $workdir" + local count=0 + rm -rfv -- "$workdir" | while read -r; do + if (( count++ > 750 )); then + printf . + count=0 + fi + done + printf '\n' + info "Removed $workdir" + fi +} + +do_cleanup=1 +if [[ ${1-} == nocleanup ]]; then + do_cleanup=0; shift +fi + +if (( do_cleanup )); then + trap exit INT TERM + trap cleanup EXIT +fi + +this=$(realpath -- "$0"); readonly this +cd "$(dirname "$this")/.." # PWD should stay in llama.cpp project directory + +shellcheck "$this" + +readonly reqs_dir=requirements + +if [[ ${1+x} ]]; then + tmp_dir=$(realpath -- "$1") + if [[ ! ( -d $tmp_dir && -w $tmp_dir ) ]]; then + fatal "$tmp_dir is not a writable directory" + fi +else + tmp_dir=/tmp +fi + +workdir=$(mktemp -d "$tmp_dir/check-requirements.XXXX"); readonly workdir +info "Working directory: $workdir" + +check_requirements() { + local reqs=$1 + + info "$reqs: beginning check" + pip --disable-pip-version-check install -qr "$reqs" + info "$reqs: OK" +} + +check_convert_script() { + local py=$1 # e.g. ./convert-hf-to-gguf.py + local pyname=${py##*/} # e.g. convert-hf-to-gguf.py + pyname=${pyname%.py} # e.g. convert-hf-to-gguf + + info "$py: beginning check" + + local reqs="$reqs_dir/requirements-$pyname.txt" + if [[ ! -r $reqs ]]; then + fatal "$py missing requirements. Expected: $reqs" + fi + + local venv="$workdir/$pyname-venv" + python3 -m venv "$venv" + + ( + # shellcheck source=/dev/null + source "$venv/bin/activate" + + check_requirements "$reqs" + + python - "$py" "$pyname" <<'EOF' +import sys +from importlib.machinery import SourceFileLoader +py, pyname = sys.argv[1:] +SourceFileLoader(pyname, py).load_module() +EOF + ) + + if (( do_cleanup )); then + rm -rf -- "$venv" + fi + + info "$py: imports OK" +} + +readonly ignore_eq_eq='check_requirements: ignore "=="' + +for req in "$reqs_dir"/*; do + # Check that all sub-requirements are added to top-level requirements.txt + if ! grep -qF "$req" requirements.txt; then + fatal "$req needs to be added to requirements.txt" + fi + + # Make sure exact release versions aren't being pinned in the requirements + # Filters out the ignore string + if grep -vF "$ignore_eq_eq" "$req" | grep -q '=='; then + tab=$'\t' + cat >&2 < Date: Sat, 30 Dec 2023 00:31:19 +0800 Subject: [PATCH 325/859] cuda: fix vmm oom issue on NVIDIA AGX Orin (#4687) Signed-off-by: hydai --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 9a9effcf5..09585b07d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6662,7 +6662,7 @@ static void ggml_cuda_pool_free_leg(int device, void * ptr, size_t size) { // pool with virtual memory static CUdeviceptr g_cuda_pool_addr[GGML_CUDA_MAX_DEVICES] = {0}; static size_t g_cuda_pool_used[GGML_CUDA_MAX_DEVICES] = {0}; -static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 36; // 64 GB +static const size_t CUDA_POOL_VMM_MAX_SIZE = 1ull << 35; // 32 GB static void * ggml_cuda_pool_malloc_vmm(int device, size_t size, size_t * actual_size) { scoped_spin_lock lock(g_cuda_pool_lock); From ce18d727a47f2473ca863a6f78bf3ad480008f72 Mon Sep 17 00:00:00 2001 From: Steward Garcia <57494570+FSSRepo@users.noreply.github.com> Date: Fri, 29 Dec 2023 11:52:15 -0500 Subject: [PATCH 326/859] clip : enable gpu backend (#4205) * clip: enable CUDA backend * add missing kernels * add enough padding for alignment * remove ggml_repeat of clip.cpp * add metal backend * llava : fixes - avoid ggml_repeat - use GGML_USE_ instead of CLIP_USE_ macros - remove unused vars --------- Co-authored-by: Georgi Gerganov --- examples/llava/CMakeLists.txt | 3 +- examples/llava/clip.cpp | 231 +++++++++++++++++++--------------- 2 files changed, 131 insertions(+), 103 deletions(-) diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index 48dae1506..2985caff8 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -24,7 +24,8 @@ endif() if (NOT MSVC) target_compile_options(llava PRIVATE -Wno-cast-qual) # stb_image.h - endif() +endif() + if(TARGET BUILD_INFO) add_dependencies(llava BUILD_INFO) endif() diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index f06ec400d..f9326a5cc 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -16,12 +16,19 @@ #include "clip.h" #include "ggml.h" #include "ggml-alloc.h" +#include "ggml-backend.h" + +#ifdef GGML_USE_CUBLAS +#include "ggml-cuda.h" +#endif + +#ifdef GGML_USE_METAL +#include "ggml-metal.h" +#endif #define STB_IMAGE_IMPLEMENTATION #include "stb_image.h" -#define CLIP_DEBUG - static std::string format(const char * fmt, ...) { va_list ap; va_list ap2; @@ -196,20 +203,6 @@ struct clip_vision_model { struct ggml_tensor * mm_2_b; }; -// Replacement for std::vector that doesn't require zero-initialization. -struct clip_buffer { - uint8_t * data = NULL; - size_t size = 0; - - void resize(size_t size) { - delete[] data; - data = new uint8_t[size]; - this->size = size; - } - - ~clip_buffer() { delete[] data; } -}; - struct clip_ctx { bool has_text_encoder = false; bool has_vision_encoder = false; @@ -223,9 +216,10 @@ struct clip_ctx { struct gguf_context * ctx_gguf; // memory buffers to evaluate the model - clip_buffer buf_compute; - clip_buffer buf_alloc; - ggml_allocr * alloc = NULL; + ggml_backend_buffer_t params_buffer = NULL; + ggml_backend_buffer_t compute_buffer = NULL; + ggml_backend_t backend = NULL; + ggml_allocr * compute_alloc = NULL; }; static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_image_f32_batch * imgs) { @@ -252,25 +246,20 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima if(ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); } - - const auto & buf_compute = ctx->buf_compute; - struct ggml_init_params params = { - /*.mem_size =*/ buf_compute.size, - /*.mem_buffer =*/ buf_compute.data, - /*.no_alloc =*/ false, + /*.mem_size =*/ GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead(), + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, }; - params.no_alloc = true; - struct ggml_context * ctx0 = ggml_init(params); struct ggml_cgraph * gf = ggml_new_graph(ctx0); struct ggml_tensor * inp_raw = ggml_new_tensor_4d(ctx0, GGML_TYPE_F32, image_size, image_size, 3, batch_size); - ggml_allocr_alloc(ctx->alloc, inp_raw); + ggml_allocr_alloc(ctx->compute_alloc, inp_raw); - if (!ggml_allocr_is_measure(ctx->alloc)) { - float * data = (float *)ggml_get_data(inp_raw); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + float * data = (float *)malloc(ggml_nbytes(inp_raw)); for (size_t i = 0; i < imgs->size; i++) { const int nx = imgs->data[i].nx; @@ -289,6 +278,8 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima } } } + ggml_backend_tensor_set(inp_raw, data, 0, ggml_nbytes(inp_raw)); + free(data); } struct ggml_tensor * inp = ggml_conv_2d(ctx0, model.patch_embeddings, inp_raw, patch_size, patch_size, 0, 0, 1, 1); @@ -298,36 +289,39 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima // concat class_embeddings and patch_embeddings struct ggml_tensor * embeddings = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size); - ggml_allocr_alloc(ctx->alloc, embeddings); - if (!ggml_allocr_is_measure(ctx->alloc)) { - ggml_set_zero(embeddings); + ggml_allocr_alloc(ctx->compute_alloc, embeddings); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + void* zero_mem = malloc(ggml_nbytes(embeddings)); + memset(zero_mem, 0, ggml_nbytes(embeddings)); + ggml_backend_tensor_set(embeddings, zero_mem, 0, ggml_nbytes(embeddings)); + free(zero_mem); } - struct ggml_tensor * temp = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, 1, batch_size); - ggml_allocr_alloc(ctx->alloc, temp); + embeddings = ggml_acc(ctx0, embeddings, model.class_embedding, + embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], 0); - embeddings = ggml_acc(ctx0, embeddings, ggml_repeat(ctx0, model.class_embedding, temp), embeddings->nb[1], - embeddings->nb[2], embeddings->nb[3], 0); - embeddings = - ggml_acc(ctx0, embeddings, inp, embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); + embeddings = ggml_acc(ctx0, embeddings, inp, + embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); struct ggml_tensor * positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_positions); - ggml_allocr_alloc(ctx->alloc, positions); - if (!ggml_allocr_is_measure(ctx->alloc)) { + ggml_allocr_alloc(ctx->compute_alloc, positions); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + int* positions_data = (int*)malloc(ggml_nbytes(positions)); for (int i = 0; i < num_positions; i++) { - ggml_set_i32_1d(positions, i, i); + positions_data[i] = i; } + ggml_backend_tensor_set(positions, positions_data, 0, ggml_nbytes(positions)); + free(positions_data); } embeddings = - ggml_add(ctx0, embeddings, ggml_repeat(ctx0, ggml_get_rows(ctx0, model.position_embeddings, positions), embeddings)); + ggml_add(ctx0, embeddings, ggml_get_rows(ctx0, model.position_embeddings, positions)); // pre-layernorm { embeddings = ggml_norm(ctx0, embeddings, eps); - embeddings = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, model.pre_ln_w, embeddings), embeddings), - ggml_repeat(ctx0, model.pre_ln_b, embeddings)); + embeddings = ggml_add(ctx0, ggml_mul(ctx0, embeddings, model.pre_ln_w), model.pre_ln_b); } // loop over layers @@ -340,15 +334,15 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima { cur = ggml_norm(ctx0, cur, eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, model.layers[il].ln_1_w, cur), cur), - ggml_repeat(ctx0, model.layers[il].ln_1_b, cur)); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ln_1_w), + model.layers[il].ln_1_b); } // self-attention { struct ggml_tensor * Q = - ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].q_b, cur), ggml_mul_mat(ctx0, model.layers[il].q_w, cur)); + ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].q_w, cur), model.layers[il].q_b); Q = ggml_scale_inplace(ctx0, Q, 1.0f / sqrt((float)d_head)); Q = ggml_reshape_4d(ctx0, Q, d_head, n_head, num_positions, batch_size); @@ -356,14 +350,14 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima Q = ggml_reshape_3d(ctx0, Q, d_head, num_positions, n_head * batch_size); struct ggml_tensor * K = - ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].k_b, cur), ggml_mul_mat(ctx0, model.layers[il].k_w, cur)); + ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].k_w, cur), model.layers[il].k_b); K = ggml_reshape_4d(ctx0, K, d_head, n_head, num_positions, batch_size); K = ggml_cont(ctx0, ggml_permute(ctx0, K, 0, 2, 1, 3)); K = ggml_reshape_3d(ctx0, K, d_head, num_positions, n_head * batch_size); struct ggml_tensor * V = - ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].v_b, cur), ggml_mul_mat(ctx0, model.layers[il].v_w, cur)); + ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].v_w, cur), model.layers[il].v_b); V = ggml_reshape_4d(ctx0, V, d_head, n_head, num_positions, batch_size); V = ggml_cont(ctx0, ggml_permute(ctx0, V, 1, 2, 0, 3)); @@ -379,7 +373,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima } // attention output - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].o_b, cur), ggml_mul_mat(ctx0, model.layers[il].o_w, cur)); + cur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].o_w, cur), model.layers[il].o_b); // re-add the layer input, e.g., residual cur = ggml_add(ctx0, cur, embeddings); @@ -390,12 +384,11 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima { cur = ggml_norm(ctx0, cur, eps); - cur = ggml_add(ctx0, ggml_mul(ctx0, ggml_repeat(ctx0, model.layers[il].ln_2_w, cur), cur), - ggml_repeat(ctx0, model.layers[il].ln_2_b, cur)); + cur = ggml_add(ctx0, ggml_mul(ctx0, cur, model.layers[il].ln_2_w), model.layers[il].ln_2_b); } cur = ggml_mul_mat(ctx0, model.layers[il].ff_i_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].ff_i_b, cur), cur); + cur = ggml_add(ctx0, cur, model.layers[il].ff_i_b); if (ctx->use_gelu) { cur = ggml_gelu_inplace(ctx0, cur); @@ -404,7 +397,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima } cur = ggml_mul_mat(ctx0, model.layers[il].ff_o_w, cur); - cur = ggml_add(ctx0, ggml_repeat(ctx0, model.layers[il].ff_o_b, cur), cur); + cur = ggml_add(ctx0, cur, model.layers[il].ff_o_b); // residual 2 cur = ggml_add(ctx0, embeddings, cur); @@ -417,23 +410,26 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima embeddings = ggml_reshape_2d(ctx0, embeddings, embeddings->ne[0], embeddings->ne[1]); struct ggml_tensor * patches = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_patches); - ggml_allocr_alloc(ctx->alloc, patches); - if (!ggml_allocr_is_measure(ctx->alloc)) { - for (int i = 0; i < num_patches; ++i) { - ggml_set_i32_1d(patches, i, i+1); + ggml_allocr_alloc(ctx->compute_alloc, patches); + if (!ggml_allocr_is_measure(ctx->compute_alloc)) { + int* patches_data = (int*)malloc(ggml_nbytes(patches)); + for (int i = 0; i < num_positions; i++) { + patches_data[i] = i + 1; } + ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); + free(patches_data); } embeddings = ggml_get_rows(ctx0, embeddings, patches); // mm projection 0 embeddings = ggml_mul_mat(ctx0, model.mm_0_w, embeddings); - embeddings = ggml_add(ctx0, ggml_repeat(ctx0, model.mm_0_b, embeddings), embeddings); + embeddings = ggml_add(ctx0, embeddings, model.mm_0_b); embeddings = ggml_gelu(ctx0, embeddings); embeddings = ggml_mul_mat(ctx0, model.mm_2_w, embeddings); - embeddings = ggml_add(ctx0, ggml_repeat(ctx0, model.mm_2_b, embeddings), embeddings); + embeddings = ggml_add(ctx0, embeddings, model.mm_2_b); } // build the graph @@ -446,7 +442,6 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima // read and create ggml_context containing the tensors and their data struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { - struct ggml_context * meta = NULL; struct gguf_init_params params = { @@ -479,7 +474,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: ftype: %s\n", __func__, ftype_str.c_str()); printf("\n"); } - + const int n_tensors = gguf_get_n_tensors(ctx); // kv if (verbosity >= 3) { const int n_kv = gguf_get_n_kv(ctx); @@ -493,27 +488,38 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } // data - size_t ctx_size = 0; + size_t buffer_size = 0; { - const int n_tensors = gguf_get_n_tensors(ctx); - for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); const size_t offset = gguf_get_tensor_offset(ctx, i); - struct ggml_tensor * cur = ggml_get_tensor(meta, name); - ctx_size += sizeof(struct ggml_tensor) + GGML_OBJECT_SIZE; size_t tensor_size = ggml_nbytes(cur); - size_t padded_size = ggml_nbytes_pad(cur); - ctx_size += padded_size; + buffer_size += tensor_size; if (verbosity >= 3) { - printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, padded_size=%zu, offset=%zu\n", __func__, i, - ggml_n_dims(cur), cur->name, tensor_size, padded_size, offset); + printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, offset=%zu\n", __func__, i, + ggml_n_dims(cur), cur->name, tensor_size, offset); } } } + buffer_size += n_tensors * 128 /* CLIP PADDING */; + clip_ctx * new_clip = new clip_ctx; +#ifdef GGML_USE_CUBLAS + new_clip->backend = ggml_backend_cuda_init(0); + printf("%s: CLIP using CUDA backend\n", __func__); +#endif + +#ifdef GGML_USE_METAL + new_clip->backend = ggml_backend_metal_init(); + printf("%s: CLIP using Metal backend\n", __func__); +#endif + + if (!new_clip->backend) { + new_clip->backend = ggml_backend_cpu_init(); + printf("%s: CLIP using CPU backend\n", __func__); + } // model size and capabilities { @@ -539,17 +545,20 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: text_encoder: %d\n", __func__, new_clip->has_text_encoder); printf("%s: vision_encoder: %d\n", __func__, new_clip->has_vision_encoder); printf("%s: llava_projector: %d\n", __func__, new_clip->has_llava_projector); - printf("%s: model size: %.2f MB\n", __func__, (ctx_size / 1024.0 / 1024.0)); + printf("%s: model size: %.2f MB\n", __func__, buffer_size / 1024.0 / 1024.0); printf("%s: metadata size: %.2f MB\n", __func__, ggml_get_mem_size(meta) / 1024.0 / 1024.0); } } + printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, buffer_size / (1024.0 * 1024.0), n_tensors); + // load tensors { + std::vector read_buf; struct ggml_init_params params = { - /*.mem_size =*/ ctx_size, + /*.mem_size =*/ (n_tensors + 1) * ggml_tensor_overhead(), /*.mem_buffer =*/ NULL, - /*.no_alloc =*/ false, + /*.no_alloc =*/ true, }; new_clip->ctx = ggml_init(params); @@ -566,13 +575,21 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return nullptr; } - const int n_tensors = gguf_get_n_tensors(ctx); + // add tensors to context for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); struct ggml_tensor * t = ggml_get_tensor(meta, name); struct ggml_tensor * cur = ggml_dup_tensor(new_clip->ctx, t); ggml_set_name(cur, name); + } + // alloc memory and offload data + new_clip->params_buffer = ggml_backend_alloc_buffer(new_clip->backend, buffer_size); + ggml_allocr* alloc = ggml_allocr_new_from_buffer(new_clip->params_buffer); + for (int i = 0; i < n_tensors; ++i) { + const char * name = gguf_get_tensor_name(ctx, i); + struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx, name); + ggml_allocr_alloc(alloc, cur); const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); if (!fin) { @@ -580,10 +597,22 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { clip_free(new_clip); return nullptr; } - - fin.read(reinterpret_cast(cur->data), ggml_nbytes(t)); + int num_bytes = ggml_nbytes(cur); + if (ggml_backend_is_cpu(new_clip->backend) +#ifdef GGML_USE_METAL + || ggml_backend_is_metal(new_clip->backend) +#endif + ) { + // for the CPU and Metal backend, we can read directly into the tensor + fin.read(reinterpret_cast(cur->data), num_bytes); + } else { + // read into a temporary buffer first, then copy to device memory + read_buf.resize(num_bytes); + fin.read(reinterpret_cast(read_buf.data()), num_bytes); + ggml_backend_tensor_set(cur, read_buf.data(), 0, num_bytes); + } } - + ggml_allocr_free(alloc); fin.close(); } @@ -657,18 +686,16 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // measure mem requirement and allocate { - static const size_t tensor_alignment = 32; - new_clip->buf_compute.resize(ggml_tensor_overhead()*GGML_DEFAULT_GRAPH_SIZE + ggml_graph_overhead()); - new_clip->alloc = ggml_allocr_new_measure(tensor_alignment); + new_clip->compute_alloc = ggml_allocr_new_measure_from_backend(new_clip->backend); clip_image_f32_batch batch; batch.size = 1; ggml_cgraph * gf = clip_image_build_graph(new_clip, &batch); - size_t alloc_size = ggml_allocr_alloc_graph(new_clip->alloc, gf) + tensor_alignment; - ggml_allocr_free(new_clip->alloc); - new_clip->buf_alloc.resize(alloc_size); - new_clip->alloc = ggml_allocr_new(new_clip->buf_alloc.data, new_clip->buf_alloc.size, tensor_alignment); + size_t compute_memory_buffer_size = ggml_allocr_alloc_graph(new_clip->compute_alloc, gf); + ggml_allocr_free(new_clip->compute_alloc); + new_clip->compute_buffer = ggml_backend_alloc_buffer(new_clip->backend, compute_memory_buffer_size); + new_clip->compute_alloc = ggml_allocr_new_from_buffer(new_clip->compute_buffer); - printf("%s: total allocated memory: %.2f MB\n", __func__, (new_clip->buf_compute.size + alloc_size)/1024.0/1024.0); + printf("%s: compute allocated memory: %.2f MB\n", __func__, compute_memory_buffer_size /1024.0/1024.0); } return new_clip; @@ -852,29 +879,29 @@ bool clip_image_batch_encode(const clip_ctx * ctx, const int n_threads, const cl } // reset alloc buffer to clean the memory from previous invocations - ggml_allocr_reset(ctx->alloc); + ggml_allocr_reset(ctx->compute_alloc); // build the inference graph ggml_cgraph * gf = clip_image_build_graph(ctx, imgs); - ggml_allocr_alloc_graph(ctx->alloc, gf); + ggml_allocr_alloc_graph(ctx->compute_alloc, gf); - struct ggml_cplan plan = ggml_graph_plan(gf, n_threads); - if (plan.work_size > 0) { - plan.work_data = (uint8_t *)malloc(plan.work_size); + if (ggml_backend_is_cpu(ctx->backend)) { + ggml_backend_cpu_set_n_threads(ctx->backend, n_threads); } - ggml_graph_compute(gf, &plan); +#ifdef GGML_USE_METAL + if (ggml_backend_is_metal(ctx->backend)) { + ggml_backend_metal_set_n_cb(ctx->backend, n_threads); + } +#endif + + ggml_backend_graph_compute(ctx->backend, gf); // the last node is the embedding tensor -struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 1]; + struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 1]; // copy the embeddings to the location passed by the user - memcpy(vec, ggml_get_data_f32(embeddings), ggml_nbytes(embeddings)); - - if (plan.work_size > 0) { - free(plan.work_data); - } - + ggml_backend_tensor_get(embeddings, vec, 0, ggml_nbytes(embeddings)); return true; } @@ -1045,8 +1072,8 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i gguf_free(ctx_out); { - printf("%s: original size = %8.2f MB\n", __func__, total_size_org / 1024.0 / 1024.0); - printf("%s: quantized size = %8.2f MB\n", __func__, total_size_new / 1024.0 / 1024.0); + printf("%s: original size = %8.2f MB\n", __func__, total_size_org / 1024.0 / 1024.0); + printf("%s: quantized size = %8.2f MB\n", __func__, total_size_new / 1024.0 / 1024.0); int64_t sum_all = 0; for (size_t i = 0; i < hist_all.size(); ++i) { From 0235b9b571f3cc7d2b8836409a5404b41ce1379c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 29 Dec 2023 18:53:34 +0200 Subject: [PATCH 327/859] clip : use ggml_backend_buffer_is_host (#4205) --- examples/llava/clip.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index f9326a5cc..6a731eeec 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -598,11 +598,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return nullptr; } int num_bytes = ggml_nbytes(cur); - if (ggml_backend_is_cpu(new_clip->backend) -#ifdef GGML_USE_METAL - || ggml_backend_is_metal(new_clip->backend) -#endif - ) { + if (ggml_backend_buffer_is_host(new_clip->params_buffer)) { // for the CPU and Metal backend, we can read directly into the tensor fin.read(reinterpret_cast(cur->data), num_bytes); } else { From a20f3c7465d6d1b33767757c2760643b799a81bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Fri, 29 Dec 2023 23:12:53 +0100 Subject: [PATCH 328/859] CUDA: fix tensor core logic for Pascal and HIP (#4682) --- ggml-cuda.cu | 72 ++++++++++++++++++++++++++++------------------------ 1 file changed, 39 insertions(+), 33 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 09585b07d..71a64ca09 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -123,24 +123,6 @@ #define GGML_CUDA_MAX_NODES 8192 -// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication -// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant -// for large computational tasks. the drawback is that this requires some extra amount of VRAM: -// - 7B quantum model: +100-200 MB -// - 13B quantum model: +200-400 MB -// -//#define GGML_CUDA_FORCE_MMQ - -// TODO: improve this to be correct for more hardware -// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores -// probably other such cases, and not sure what happens on AMD hardware -#if !defined(GGML_CUDA_FORCE_MMQ) -#define CUDA_USE_TENSOR_CORES -#endif - -// max batch size to use MMQ kernels when tensor cores are available -#define MMQ_MAX_BATCH_SIZE 32 - #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -207,6 +189,23 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { } #endif // defined(GGML_USE_HIPBLAS) +// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication +// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant +// for large computational tasks. the drawback is that this requires some extra amount of VRAM: +// - 7B quantum model: +100-200 MB +// - 13B quantum model: +200-400 MB +// +//#define GGML_CUDA_FORCE_MMQ + +// TODO: improve this to be correct for more hardware +// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores +#if !defined(GGML_CUDA_FORCE_MMQ) && (!defined(GGML_USE_HIPBLAS) || defined(RDNA3)) +#define CUDA_USE_TENSOR_CORES +#endif + +// max batch size to use MMQ kernels when tensor cores are available +#define MMQ_MAX_BATCH_SIZE 32 + #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data #endif @@ -8661,11 +8660,26 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } } -#ifdef CUDA_USE_TENSOR_CORES - const bool use_tensor_cores = true; +#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + const bool fp16_performance_good = true; + +#ifdef RDNA3 + const bool use_mul_mat_q = false; #else - const bool use_tensor_cores = false; -#endif + const bool use_mul_mat_q = true; +#endif // RDNA3 + +#else + + const bool fp16_performance_good = min_compute_capability >= CC_VOLTA; + bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); +#ifdef CUDA_USE_TENSOR_CORES + // when tensor cores are available, use them for large batch size + // ref: https://github.com/ggerganov/llama.cpp/pull/3776 + use_mul_mat_q = use_mul_mat_q && !(fp16_performance_good && src1->ne[1] > MMQ_MAX_BATCH_SIZE); +#endif // CUDA_USE_TENSOR_CORES + +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); @@ -8675,13 +8689,13 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 //printf("src0 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src0), ggml_is_transposed(src0), ggml_type_name(src0->type), src0->name); //printf("src1 is contiguous %d, transposed %d, type = %s, name = %s\n", ggml_is_contiguous(src1), ggml_is_transposed(src1), ggml_type_name(src1->type), src1->name); - if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { + if (!split && all_on_device && !fp16_performance_good && src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { // KQ single-batch ggml_cuda_mul_mat_vec_p021(src0, src1, dst); - } else if (!split && all_on_device && !use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { + } else if (!split && all_on_device && !fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && use_tensor_cores && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { @@ -8701,14 +8715,6 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); - - // when tensor cores are available, use them for large batch size - // ref: https://github.com/ggerganov/llama.cpp/pull/3776 - if (use_tensor_cores && min_compute_capability >= CC_VOLTA && src1->ne[1] > MMQ_MAX_BATCH_SIZE) { - use_mul_mat_q = false; - } - if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { From 24a447e20af425fa44cf10feaa632b6bb596c80f Mon Sep 17 00:00:00 2001 From: automaticcat Date: Sat, 30 Dec 2023 15:07:48 +0700 Subject: [PATCH 329/859] ggml : add ggml_cpu_has_avx_vnni() (#4589) * feat: add avx_vnni based on intel documents * ggml: add avx vnni based on intel document * llama: add avx vnni information display * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * docs: add more details about using oneMKL and oneAPI for intel processors * Update ggml.c Fix indentation upgate Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- README.md | 30 ++++++++++++++++++++++-------- common/common.cpp | 1 + ggml.c | 8 ++++++++ ggml.h | 1 + llama.cpp | 1 + 5 files changed, 33 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 48dcd6464..ca6d14e17 100644 --- a/README.md +++ b/README.md @@ -385,16 +385,30 @@ Building the program with BLAS support may lead to some performance improvements Check [BLIS.md](docs/BLIS.md) for more information. -- #### Intel MKL +- #### Intel oneMKL + - Using manual oneAPI installation: + By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. Otherwise please install oneAPI and follow the below steps: + ```bash + mkdir build + cd build + source /opt/intel/oneapi/setvars.sh # You can skip this step if in oneapi-runtime docker image, only required for manual installation + cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON + cmake --build . --config Release + ``` - By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. You may also specify it by: + - Using oneAPI docker image: + If you do not want to source the environment vars and install oneAPI manually, you can also build the code using intel docker container: [oneAPI-runtime](https://hub.docker.com/r/intel/oneapi-runtime) - ```bash - mkdir build - cd build - cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx - cmake --build . --config Release - ``` + ```bash + mkdir build + cd build + cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON + cmake --build . --config Release + ``` + + Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. + + Check [Optimizing and Running LLaMA2 on Intel® CPU](https://www.intel.com/content/www/us/en/content-details/791610/optimizing-and-running-llama2-on-intel-cpu.html) for more information. - #### cuBLAS diff --git a/common/common.cpp b/common/common.cpp index b3425ab09..eacaee18e 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1394,6 +1394,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "build_number: %d\n", LLAMA_BUILD_NUMBER); fprintf(stream, "cpu_has_arm_fma: %s\n", ggml_cpu_has_arm_fma() ? "true" : "false"); fprintf(stream, "cpu_has_avx: %s\n", ggml_cpu_has_avx() ? "true" : "false"); + fprintf(stream, "cpu_has_avx_vnni: %s\n", ggml_cpu_has_avx_vnni() ? "true" : "false"); fprintf(stream, "cpu_has_avx2: %s\n", ggml_cpu_has_avx2() ? "true" : "false"); fprintf(stream, "cpu_has_avx512: %s\n", ggml_cpu_has_avx512() ? "true" : "false"); fprintf(stream, "cpu_has_avx512_vbmi: %s\n", ggml_cpu_has_avx512_vbmi() ? "true" : "false"); diff --git a/ggml.c b/ggml.c index a9e1ea9b4..bcec200f6 100644 --- a/ggml.c +++ b/ggml.c @@ -19638,6 +19638,14 @@ int ggml_cpu_has_avx(void) { #endif } +int ggml_cpu_has_avx_vnni(void) { +#if defined(__AVXVNNI__) + return 1; +#else + return 0; +#endif +} + int ggml_cpu_has_avx2(void) { #if defined(__AVX2__) return 1; diff --git a/ggml.h b/ggml.h index 67d6bc4f1..64f4e45e8 100644 --- a/ggml.h +++ b/ggml.h @@ -2198,6 +2198,7 @@ extern "C" { // GGML_API int ggml_cpu_has_avx (void); + GGML_API int ggml_cpu_has_avx_vnni (void); GGML_API int ggml_cpu_has_avx2 (void); GGML_API int ggml_cpu_has_avx512 (void); GGML_API int ggml_cpu_has_avx512_vbmi(void); diff --git a/llama.cpp b/llama.cpp index 68c7cced6..a833d4c15 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10780,6 +10780,7 @@ const char * llama_print_system_info(void) { s = ""; s += "AVX = " + std::to_string(ggml_cpu_has_avx()) + " | "; + s += "AVX_VNNI = " + std::to_string(ggml_cpu_has_avx_vnni()) + " | "; s += "AVX2 = " + std::to_string(ggml_cpu_has_avx2()) + " | "; s += "AVX512 = " + std::to_string(ggml_cpu_has_avx512()) + " | "; s += "AVX512_VBMI = " + std::to_string(ggml_cpu_has_avx512_vbmi()) + " | "; From 39d8bc71edcb8b6f99d46fa4216af7a15232e218 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 30 Dec 2023 13:52:01 +0100 Subject: [PATCH 330/859] CUDA: fixed tensor cores not being used on RDNA3 (#4697) --- ggml-cuda.cu | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 71a64ca09..8c2712308 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -119,10 +119,29 @@ #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 #define CC_OFFSET_AMD 1000000 +#define CC_RDNA1 (CC_OFFSET_AMD + 1010) #define CC_RDNA2 (CC_OFFSET_AMD + 1030) +#define CC_RDNA3 (CC_OFFSET_AMD + 1100) #define GGML_CUDA_MAX_NODES 8192 +// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication +// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant +// for large computational tasks. the drawback is that this requires some extra amount of VRAM: +// - 7B quantum model: +100-200 MB +// - 13B quantum model: +200-400 MB +// +//#define GGML_CUDA_FORCE_MMQ + +// TODO: improve this to be correct for more hardware +// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores +#if !defined(GGML_CUDA_FORCE_MMQ) +#define CUDA_USE_TENSOR_CORES +#endif + +// max batch size to use MMQ kernels when tensor cores are available +#define MMQ_MAX_BATCH_SIZE 32 + #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -189,23 +208,6 @@ static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { } #endif // defined(GGML_USE_HIPBLAS) -// define this if you want to always fallback to MMQ kernels and not use cuBLAS for matrix multiplication -// on modern hardware, using cuBLAS is recommended as it utilizes F16 tensor cores which are very performant -// for large computational tasks. the drawback is that this requires some extra amount of VRAM: -// - 7B quantum model: +100-200 MB -// - 13B quantum model: +200-400 MB -// -//#define GGML_CUDA_FORCE_MMQ - -// TODO: improve this to be correct for more hardware -// for example, currently fails for GeForce GTX 1660 which is TURING arch (> VOLTA) but does not have tensor cores -#if !defined(GGML_CUDA_FORCE_MMQ) && (!defined(GGML_USE_HIPBLAS) || defined(RDNA3)) -#define CUDA_USE_TENSOR_CORES -#endif - -// max batch size to use MMQ kernels when tensor cores are available -#define MMQ_MAX_BATCH_SIZE 32 - #if defined(_MSC_VER) #pragma warning(disable: 4244 4267) // possible loss of data #endif @@ -8661,13 +8663,12 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - const bool fp16_performance_good = true; -#ifdef RDNA3 - const bool use_mul_mat_q = false; -#else - const bool use_mul_mat_q = true; -#endif // RDNA3 + const bool fp16_performance_good = min_compute_capability >= CC_RDNA1; + bool use_mul_mat_q = ggml_is_quantized(src0->type); +#ifdef CUDA_USE_TENSOR_CORES + use_mul_mat_q = use_mul_mat_q && min_compute_capability < CC_RDNA3; +#endif // CUDA_USE_TENSOR_CORES #else From 9fbda719de18a9400a064c28759c39d55d687d3e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 30 Dec 2023 23:24:42 +0200 Subject: [PATCH 331/859] clip : refactor + bug fixes (#4696) * clip : refactor + bug fixes ggml-ci * server : add log message --- examples/llava/clip.cpp | 241 +++++++++++++++++++++---------------- examples/llava/clip.h | 48 +++----- examples/llava/llava.cpp | 4 +- examples/server/server.cpp | 38 +++--- 4 files changed, 169 insertions(+), 162 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 6a731eeec..cfb79e789 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -146,6 +146,27 @@ static std::string get_ftype(int ftype) { } } +// +// image data +// + +// RGB uint8 image +struct clip_image_u8 { + int nx; + int ny; + + std::vector buf; +}; + +// RGB float32 image (NHWC) +// Memory layout: RGBRGBRGB... +struct clip_image_f32 { + int nx; + int ny; + + std::vector buf; +}; + // // clip layers // @@ -204,16 +225,21 @@ struct clip_vision_model { }; struct clip_ctx { - bool has_text_encoder = false; - bool has_vision_encoder = false; + bool has_text_encoder = false; + bool has_vision_encoder = false; bool has_llava_projector = false; + struct clip_vision_model vision_model; + float image_mean[3]; float image_std[3]; bool use_gelu = false; int32_t ftype = 1; - struct ggml_context * ctx; + struct gguf_context * ctx_gguf; + struct ggml_context * ctx_data; + + std::vector buf_compute_meta; // memory buffers to evaluate the model ggml_backend_buffer_t params_buffer = NULL; @@ -222,7 +248,7 @@ struct clip_ctx { ggml_allocr * compute_alloc = NULL; }; -static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_image_f32_batch * imgs) { +static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32_batch * imgs) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return nullptr; @@ -243,13 +269,14 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima //const int projection_dim = hparams.projection_dim; const float eps = hparams.eps; int batch_size = imgs->size; - if(ctx->has_llava_projector) { + if (ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); } + struct ggml_init_params params = { - /*.mem_size =*/ GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead(), - /*.mem_buffer =*/ NULL, - /*.no_alloc =*/ true, + /*.mem_size =*/ ctx->buf_compute_meta.size(), + /*.mem_buffer =*/ ctx->buf_compute_meta.data(), + /*.no_alloc =*/ true, }; struct ggml_context * ctx0 = ggml_init(params); @@ -272,7 +299,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima for (int k = 0; k < 3; k++) { for (int y = 0; y < ny; y++) { for (int x = 0; x < nx; x++) { - data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].data[3 * (y * nx + x) + k]; + data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].buf[3 * (y * nx + x) + k]; } } } @@ -413,7 +440,7 @@ static ggml_cgraph * clip_image_build_graph(const clip_ctx * ctx, const clip_ima ggml_allocr_alloc(ctx->compute_alloc, patches); if (!ggml_allocr_is_measure(ctx->compute_alloc)) { int* patches_data = (int*)malloc(ggml_nbytes(patches)); - for (int i = 0; i < num_positions; i++) { + for (int i = 0; i < num_patches; i++) { patches_data[i] = i + 1; } ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); @@ -561,8 +588,8 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { /*.no_alloc =*/ true, }; - new_clip->ctx = ggml_init(params); - if (!new_clip->ctx) { + new_clip->ctx_data = ggml_init(params); + if (!new_clip->ctx_data) { fprintf(stderr, "%s: ggml_init() failed\n", __func__); clip_free(new_clip); return nullptr; @@ -579,7 +606,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); struct ggml_tensor * t = ggml_get_tensor(meta, name); - struct ggml_tensor * cur = ggml_dup_tensor(new_clip->ctx, t); + struct ggml_tensor * cur = ggml_dup_tensor(new_clip->ctx_data, t); ggml_set_name(cur, name); } @@ -588,7 +615,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { ggml_allocr* alloc = ggml_allocr_new_from_buffer(new_clip->params_buffer); for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); - struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx, name); + struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx_data, name); ggml_allocr_alloc(alloc, cur); const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); @@ -617,20 +644,20 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // load vision model auto & vision_model = new_clip->vision_model; auto & hparams = vision_model.hparams; - hparams.hidden_size = get_u32(ctx, format(KEY_N_EMBD, "vision")); - hparams.n_head = get_u32(ctx, format(KEY_N_HEAD, "vision")); + hparams.hidden_size = get_u32(ctx, format(KEY_N_EMBD, "vision")); + hparams.n_head = get_u32(ctx, format(KEY_N_HEAD, "vision")); hparams.n_intermediate = get_u32(ctx, format(KEY_N_FF, "vision")); - hparams.n_layer = get_u32(ctx, format(KEY_N_BLOCK, "vision")); - hparams.image_size = get_u32(ctx, KEY_IMAGE_SIZE); - hparams.patch_size = get_u32(ctx, KEY_PATCH_SIZE); + hparams.n_layer = get_u32(ctx, format(KEY_N_BLOCK, "vision")); + hparams.image_size = get_u32(ctx, KEY_IMAGE_SIZE); + hparams.patch_size = get_u32(ctx, KEY_PATCH_SIZE); hparams.projection_dim = get_u32(ctx, format(KEY_PROJ_DIM, "vision")); - hparams.eps = get_f32(ctx, format(KEY_LAYER_NORM_EPS, "vision")); + hparams.eps = get_f32(ctx, format(KEY_LAYER_NORM_EPS, "vision")); int idx_mean = get_key_idx(ctx, KEY_IMAGE_MEAN); - int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); + int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); for (int i = 0; i < 3; ++i) { new_clip->image_mean[i] = *((const float *)gguf_get_arr_data(ctx, idx_mean)); - new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); + new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); } if (verbosity >= 2) { @@ -644,35 +671,35 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("v_n_layer %d\n", hparams.n_layer); } - vision_model.patch_embeddings = get_tensor(new_clip->ctx, TN_PATCH_EMBD); - vision_model.class_embedding = get_tensor(new_clip->ctx, TN_CLASS_EMBD); - vision_model.position_embeddings = get_tensor(new_clip->ctx, format(TN_POS_EMBD, "v")); - vision_model.pre_ln_w = get_tensor(new_clip->ctx, format(TN_LN_PRE, "v", "weight")); - vision_model.pre_ln_b = get_tensor(new_clip->ctx, format(TN_LN_PRE, "v", "bias")); - vision_model.mm_0_w = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 0, "weight")); - vision_model.mm_0_b = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 0, "bias")); - vision_model.mm_2_w = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 2, "weight")); - vision_model.mm_2_b = get_tensor(new_clip->ctx, format(TN_LLAVA_PROJ, 2, "bias")); + vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); + vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); + vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); + vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); + vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + vision_model.mm_0_w = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 0, "weight")); + vision_model.mm_0_b = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 0, "bias")); + vision_model.mm_2_w = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 2, "weight")); + vision_model.mm_2_b = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 2, "bias")); vision_model.layers.resize(hparams.n_layer); for (int il = 0; il < hparams.n_layer; ++il) { auto & layer = vision_model.layers[il]; - layer.k_w = get_tensor(new_clip->ctx, format(TN_ATTN_K, "v", il, "weight")); - layer.q_w = get_tensor(new_clip->ctx, format(TN_ATTN_Q, "v", il, "weight")); - layer.v_w = get_tensor(new_clip->ctx, format(TN_ATTN_V, "v", il, "weight")); - layer.o_w = get_tensor(new_clip->ctx, format(TN_ATTN_OUTPUT, "v", il, "weight")); - layer.ln_1_w = get_tensor(new_clip->ctx, format(TN_LN_1, "v", il, "weight")); - layer.ln_2_w = get_tensor(new_clip->ctx, format(TN_LN_2, "v", il, "weight")); - layer.ff_i_w = get_tensor(new_clip->ctx, format(TN_FFN_DOWN, "v", il, "weight")); - layer.ff_o_w = get_tensor(new_clip->ctx, format(TN_FFN_UP, "v", il, "weight")); - layer.k_b = get_tensor(new_clip->ctx, format(TN_ATTN_K, "v", il, "bias")); - layer.q_b = get_tensor(new_clip->ctx, format(TN_ATTN_Q, "v", il, "bias")); - layer.v_b = get_tensor(new_clip->ctx, format(TN_ATTN_V, "v", il, "bias")); - layer.o_b = get_tensor(new_clip->ctx, format(TN_ATTN_OUTPUT, "v", il, "bias")); - layer.ln_1_b = get_tensor(new_clip->ctx, format(TN_LN_1, "v", il, "bias")); - layer.ln_2_b = get_tensor(new_clip->ctx, format(TN_LN_2, "v", il, "bias")); - layer.ff_i_b = get_tensor(new_clip->ctx, format(TN_FFN_DOWN, "v", il, "bias")); - layer.ff_o_b = get_tensor(new_clip->ctx, format(TN_FFN_UP, "v", il, "bias")); + layer.k_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_K, "v", il, "weight")); + layer.q_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_Q, "v", il, "weight")); + layer.v_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_V, "v", il, "weight")); + layer.o_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_OUTPUT, "v", il, "weight")); + layer.ln_1_w = get_tensor(new_clip->ctx_data, format(TN_LN_1, "v", il, "weight")); + layer.ln_2_w = get_tensor(new_clip->ctx_data, format(TN_LN_2, "v", il, "weight")); + layer.ff_i_w = get_tensor(new_clip->ctx_data, format(TN_FFN_DOWN, "v", il, "weight")); + layer.ff_o_w = get_tensor(new_clip->ctx_data, format(TN_FFN_UP, "v", il, "weight")); + layer.k_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_K, "v", il, "bias")); + layer.q_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_Q, "v", il, "bias")); + layer.v_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_V, "v", il, "bias")); + layer.o_b = get_tensor(new_clip->ctx_data, format(TN_ATTN_OUTPUT, "v", il, "bias")); + layer.ln_1_b = get_tensor(new_clip->ctx_data, format(TN_LN_1, "v", il, "bias")); + layer.ln_2_b = get_tensor(new_clip->ctx_data, format(TN_LN_2, "v", il, "bias")); + layer.ff_i_b = get_tensor(new_clip->ctx_data, format(TN_FFN_DOWN, "v", il, "bias")); + layer.ff_o_b = get_tensor(new_clip->ctx_data, format(TN_FFN_UP, "v", il, "bias")); } } @@ -680,8 +707,9 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { new_clip->ctx_gguf = ctx; -// measure mem requirement and allocate + // measure mem requirement and allocate { + new_clip->buf_compute_meta.resize(GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead()); new_clip->compute_alloc = ggml_allocr_new_measure_from_backend(new_clip->backend); clip_image_f32_batch batch; batch.size = 1; @@ -697,26 +725,27 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { return new_clip; } -clip_image_u8 * make_clip_image_u8() { - auto img = new clip_image_u8(); - return img; +struct clip_image_u8 * clip_image_u8_init() { + return new clip_image_u8(); } -clip_image_f32 * make_clip_image_f32() { return new clip_image_f32(); } -void clip_image_u8_free(clip_image_u8 * img) { if (img->data) { delete[] img->data; } delete img; } -void clip_image_f32_free(clip_image_f32 * img) { if (img->data) { delete[] img->data; } delete img; } +struct clip_image_f32 * clip_image_f32_init() { + return new clip_image_f32(); +} + +void clip_image_u8_free (struct clip_image_u8 * img) { delete img; } +void clip_image_f32_free(struct clip_image_f32 * img) { delete img; } static void build_clip_img_from_data(const stbi_uc * data, int nx, int ny, clip_image_u8 * img) { img->nx = nx; img->ny = ny; - img->size = nx * ny * 3; - img->data = new uint8_t[img->size](); - memcpy(img->data, data, img->size); + img->buf.resize(3 * nx * ny); + memcpy(img->buf.data(), data, img->buf.size()); } bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { int nx, ny, nc; - auto data = stbi_load(fname, &nx, &ny, &nc, 3); + auto * data = stbi_load(fname, &nx, &ny, &nc, 3); if (!data) { fprintf(stderr, "%s: failed to load image '%s'\n", __func__, fname); return false; @@ -728,7 +757,7 @@ bool clip_image_load_from_file(const char * fname, clip_image_u8 * img) { bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img) { int nx, ny, nc; - auto data = stbi_load_from_memory(bytes, bytes_length, &nx, &ny, &nc, 3); + auto * data = stbi_load_from_memory(bytes, bytes_length, &nx, &ny, &nc, 3); if (!data) { fprintf(stderr, "%s: failed to decode image bytes\n", __func__); return false; @@ -740,7 +769,7 @@ bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length // normalize: x = (x - mean) / std // TODO: implement bicubic interpolation instead of linear. -bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32 * res, const bool pad2square) { +bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32 * res, const bool pad2square) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; @@ -749,18 +778,17 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip // the logic below is to pad the shorter side to the longer side with a background color: rgb(122, 116, 104) // see https://github.com/haotian-liu/LLaVA/blob/e854a2bf85118c504f6f16bf5c3c7c92f8fa8c6b/llava/conversation.py#L113-L156 - clip_image_u8 * temp = make_clip_image_u8(); // we will keep the input image data here temporarily + clip_image_u8 * temp = clip_image_u8_init(); // we will keep the input image data here temporarily if (pad2square && img->nx != img->ny) { int longer_side = std::max(img->nx, img->ny); temp->nx = longer_side; temp->ny = longer_side; - temp->size = 3 * longer_side * longer_side; - temp->data = new uint8_t[temp->size](); - uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA + temp->buf.resize(3 * longer_side * longer_side); + const uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA // fill with background color - for (size_t i = 0; i < temp->size; i++) { - temp->data[i] = bc[i % 3]; + for (size_t i = 0; i < temp->buf.size(); i++) { + temp->buf[i] = bc[i % 3]; } // copy from the input image @@ -768,17 +796,16 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip for (int x = 0; x < img->nx; x++) { const int i = 3 * (y * img->nx + x); const int j = 3 * (y * temp->nx + x); - temp->data[j] = img->data[i]; - temp->data[j+1] = img->data[i+1]; - temp->data[j+2] = img->data[i+2]; + temp->buf[j] = img->buf[i]; + temp->buf[j+1] = img->buf[i+1]; + temp->buf[j+2] = img->buf[i+2]; } } } else { - temp->nx = img->nx; - temp->ny = img->ny; - temp->size = img->size; - temp->data = new uint8_t[temp->size](); - memcpy(&temp->data[0], &img->data[0], temp->size); // copy + temp->nx = img->nx; + temp->ny = img->ny; + temp->buf.resize(img->buf.size()); + memcpy(temp->buf.data(), img->buf.data(), temp->buf.size()); } const int nx = temp->nx; @@ -789,8 +816,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip res->nx = nx2; res->ny = ny2; - res->size = 3 * nx2 * ny2; - res->data = new float[res->size](); + res->buf.resize(3 * nx2 * ny2); const float scale = std::max(nx, ny) / (float)ctx->vision_model.hparams.image_size; @@ -821,10 +847,10 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip const int j10 = 3 * (y1 * nx + x0) + c; const int j11 = 3 * (y1 * nx + x1) + c; - const float v00 = temp->data[j00]; - const float v01 = temp->data[j01]; - const float v10 = temp->data[j10]; - const float v11 = temp->data[j11]; + const float v00 = temp->buf[j00]; + const float v01 = temp->buf[j01]; + const float v10 = temp->buf[j10]; + const float v11 = temp->buf[j11]; const float v0 = v00 * (1.0f - dx) + v01 * dx; const float v1 = v10 * (1.0f - dx) + v11 * dx; @@ -835,7 +861,7 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip const int i = 3 * (y * nx3 + x) + c; - res->data[i] = ((float(v2) / 255.0f) - m3[c]) / s3[c]; + res->buf[i] = ((float(v2) / 255.0f) - m3[c]) / s3[c]; } } } @@ -845,12 +871,13 @@ bool clip_image_preprocess(const clip_ctx * ctx, const clip_image_u8 * img, clip } void clip_free(clip_ctx * ctx) { - ggml_free(ctx->ctx); + ggml_free(ctx->ctx_data); gguf_free(ctx->ctx_gguf); + delete ctx; } -bool clip_image_encode(const clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { +bool clip_image_encode(struct clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; @@ -862,8 +889,7 @@ bool clip_image_encode(const clip_ctx * ctx, const int n_threads, clip_image_f32 return clip_image_batch_encode(ctx, n_threads, &imgs, vec); } -bool clip_image_batch_encode(const clip_ctx * ctx, const int n_threads, const clip_image_f32_batch * imgs, float * vec) { - +bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_image_f32_batch * imgs, float * vec) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; @@ -906,31 +932,32 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i ggml_type type = GGML_TYPE_Q4_1; switch (itype) { - case 2: - type = GGML_TYPE_Q4_0; - break; - case 3: - type = GGML_TYPE_Q4_1; - break; - case 6: - type = GGML_TYPE_Q5_0; - break; - case 7: - type = GGML_TYPE_Q5_1; - break; - case 8: - type = GGML_TYPE_Q8_0; - break; - default: - fprintf(stderr, "%s: invalid quantization type %d\n", __func__, itype); - return false; + case 2: + type = GGML_TYPE_Q4_0; + break; + case 3: + type = GGML_TYPE_Q4_1; + break; + case 6: + type = GGML_TYPE_Q5_0; + break; + case 7: + type = GGML_TYPE_Q5_1; + break; + case 8: + type = GGML_TYPE_Q8_0; + break; + default: + fprintf(stderr, "%s: invalid quantization type %d\n", __func__, itype); + return false; }; - auto ctx_clip = clip_model_load(fname_inp, 2); - const auto & ctx_src = ctx_clip->ctx_gguf; - const auto & ctx_data = ctx_clip->ctx; + auto * ctx_clip = clip_model_load(fname_inp, 2); - auto ctx_out = gguf_init_empty(); + const auto & ctx_src = ctx_clip->ctx_gguf; + const auto & ctx_data = ctx_clip->ctx_data; + + auto * ctx_out = gguf_init_empty(); gguf_set_kv(ctx_out, ctx_src); gguf_set_val_u32(ctx_out, "general.quantization_version", GGML_QNT_VERSION); gguf_set_val_u32(ctx_out, "general.file_type", itype); diff --git a/examples/llava/clip.h b/examples/llava/clip.h index f11df85de..458a256a1 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -35,31 +35,14 @@ struct clip_vision_hparams { float eps; }; -/** load mmproj model */ -CLIP_API struct clip_ctx * clip_model_load(const char * fname, const int verbosity); -/** free mmproj model */ +CLIP_API struct clip_ctx * clip_model_load(const char * fname, int verbosity); + CLIP_API void clip_free(struct clip_ctx * ctx); -size_t clip_embd_nbytes(const struct clip_ctx * ctx); -int clip_n_patches(const struct clip_ctx * ctx); -int clip_n_mmproj_embd(const struct clip_ctx * ctx); +CLIP_API size_t clip_embd_nbytes(const struct clip_ctx * ctx); -// RGB uint8 image -struct clip_image_u8 { - int nx; - int ny; - uint8_t * data = NULL; - size_t size; -}; - -// RGB float32 image (NHWC) -// Memory layout: RGBRGBRGB... -struct clip_image_f32 { - int nx; - int ny; - float * data = NULL; - size_t size; -}; +CLIP_API int clip_n_patches (const struct clip_ctx * ctx); +CLIP_API int clip_n_mmproj_embd(const struct clip_ctx * ctx); struct clip_image_u8_batch { struct clip_image_u8 * data; @@ -71,21 +54,22 @@ struct clip_image_f32_batch { size_t size; }; -struct clip_image_u8 * make_clip_image_u8(); -struct clip_image_f32 * make_clip_image_f32(); -CLIP_API void clip_image_u8_free(clip_image_u8 * img); -CLIP_API void clip_image_f32_free(clip_image_f32 * img); +CLIP_API struct clip_image_u8 * clip_image_u8_init (); +CLIP_API struct clip_image_f32 * clip_image_f32_init(); + +CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); +CLIP_API void clip_image_f32_free(struct clip_image_f32 * img); + CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); + /** interpret bytes as an image file with length bytes_length, and use the result to populate img */ CLIP_API bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img); -bool clip_image_preprocess(const struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, const bool pad2square); -bool clip_image_encode(const struct clip_ctx * ctx, const int n_threads, struct clip_image_f32 * img, float * vec); +CLIP_API bool clip_image_preprocess (struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, bool pad2square); +CLIP_API bool clip_image_encode (struct clip_ctx * ctx, int n_threads, struct clip_image_f32 * img, float * vec); +CLIP_API bool clip_image_batch_encode(struct clip_ctx * ctx, int n_threads, const struct clip_image_f32_batch * imgs, float * vec); -bool clip_image_batch_encode(const struct clip_ctx * ctx, const int n_threads, const struct clip_image_f32_batch * imgs, - float * vec); - -bool clip_model_quantize(const char * fname_inp, const char * fname_out, const int itype); +CLIP_API bool clip_model_quantize(const char * fname_inp, const char * fname_out, int itype); #ifdef __cplusplus } diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 0cae8c4b1..d42e7582e 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -10,7 +10,7 @@ #include "base64.hpp" static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float * image_embd, int * n_img_pos) { - clip_image_f32 * img_res = make_clip_image_f32(); + clip_image_f32 * img_res = clip_image_f32_init(); if (!clip_image_preprocess(ctx_clip, img, img_res, /*pad2square =*/ true)) { fprintf(stderr, "%s: unable to preprocess image\n", __func__); clip_image_f32_free(img_res); @@ -86,7 +86,7 @@ bool llava_eval_image_embed(llama_context * ctx_llama, const struct llava_image_ } LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { - clip_image_u8 * img = make_clip_image_u8(); + clip_image_u8 * img = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_bytes, image_bytes_length, img)) { clip_image_u8_free(img); fprintf(stderr, "%s: can't load image from bytes, is it a valid image?", __func__); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0aada8e28..52d9b9768 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -82,7 +82,7 @@ static inline bool is_base64(uint8_t c) return (isalnum(c) || (c == '+') || (c == '/')); } -static std::vector base64_decode(std::string const &encoded_string) +static std::vector base64_decode(const std::string & encoded_string) { int i = 0; int j = 0; @@ -209,10 +209,10 @@ struct slot_image int32_t id; bool request_encode_image = false; - float* image_embedding = nullptr; + float * image_embedding = nullptr; int32_t image_tokens = 0; - clip_image_u8 img_data; + clip_image_u8 * img_data; std::string prefix_prompt; // before of this image }; @@ -434,10 +434,12 @@ struct llama_client_slot generated_token_probs.clear(); - for (slot_image &img : images) + for (slot_image & img : images) { free(img.image_embedding); - delete[] img.img_data.data; + if (img.img_data) { + clip_image_u8_free(img.img_data); + } img.prefix_prompt = ""; } @@ -851,24 +853,17 @@ struct llama_server_context { for (const auto &img : *images_data) { - std::string data_b64 = img["data"].get(); + const std::vector image_buffer = base64_decode(img["data"].get()); + slot_image img_sl; img_sl.id = img.count("id") != 0 ? img["id"].get() : slot->images.size(); - int width, height, channels; - std::vector image_buffer = base64_decode(data_b64); - data_b64.clear(); - auto data = stbi_load_from_memory(image_buffer.data(), image_buffer.size(), &width, &height, &channels, 3); - if (!data) { + img_sl.img_data = clip_image_u8_init(); + if (!clip_image_load_from_bytes(image_buffer.data(), image_buffer.size(), img_sl.img_data)) + { LOG_TEE("slot %i - failed to load image [id: %i]\n", slot->id, img_sl.id); return false; } - LOG_TEE("slot %i - image loaded [id: %i] resolution (%i x %i)\n", slot->id, img_sl.id, width, height); - img_sl.img_data.nx = width; - img_sl.img_data.ny = height; - img_sl.img_data.size = width * height * 3; - img_sl.img_data.data = new uint8_t[width * height * 3](); - memcpy(img_sl.img_data.data, data, width * height * 3); - stbi_image_free(data); + LOG_TEE("slot %i - loaded image\n", slot->id); img_sl.request_encode_image = true; slot->images.push_back(img_sl); } @@ -1143,8 +1138,8 @@ struct llama_server_context { continue; } - clip_image_f32 img_res; - if (!clip_image_preprocess(clp_ctx, &img.img_data, &img_res, /*pad2square =*/ true)) + clip_image_f32 * img_res = clip_image_f32_init(); + if (!clip_image_preprocess(clp_ctx, img.img_data, img_res, /*pad2square =*/ true)) { LOG_TEE("Error processing the given image"); clip_free(clp_ctx); @@ -1159,11 +1154,12 @@ struct llama_server_context return false; } LOG_TEE("slot %i - encoding image [id: %i]\n", slot.id, img.id); - if (!clip_image_encode(clp_ctx, params.n_threads, &img_res, img.image_embedding)) + if (!clip_image_encode(clp_ctx, params.n_threads, img_res, img.image_embedding)) { LOG_TEE("Unable to encode image\n"); return false; } + clip_image_f32_free(img_res); img.request_encode_image = false; } From e39106c0554cbd0e9310e08fb3b2a577ea4b6273 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 31 Dec 2023 11:43:31 +0200 Subject: [PATCH 332/859] ggml : add ggml_vdotq_s32 alias (#4715) ggml-ci --- ggml-quants.c | 118 ++++++++++++++++++++++++++------------------------ 1 file changed, 61 insertions(+), 57 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 05ef8f9b7..55a9496d1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -410,13 +410,17 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { #if !defined(__ARM_FEATURE_DOTPROD) -inline static int32x4_t vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { +inline static int32x4_t ggml_vdotq_s32(int32x4_t acc, int8x16_t a, int8x16_t b) { const int16x8_t p0 = vmull_s8(vget_low_s8 (a), vget_low_s8 (b)); const int16x8_t p1 = vmull_s8(vget_high_s8(a), vget_high_s8(b)); return vaddq_s32(acc, vaddq_s32(vpaddlq_s16(p0), vpaddlq_s16(p1))); } +#else + +#define ggml_vdotq_s32(a, b, c) vdotq_s32(a, b, c) + #endif #endif @@ -2481,8 +2485,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); // dot product into int32x4_t - const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); - const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); + const int32x4_t p_0 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_0ls, v1_0l), v0_0hs, v1_0h); + const int32x4_t p_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_1ls, v1_1l), v0_1hs, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); @@ -2769,8 +2773,8 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); // dot product into int32x4_t - const int32x4_t p_0 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); - const int32x4_t p_1 = vdotq_s32(vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); + const int32x4_t p_0 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_0l, v1_0l), v0_0h, v1_0h); + const int32x4_t p_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), v0_1l, v1_1l), v0_1h, v1_1h); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(p_0), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(p_1), GGML_FP16_TO_FP32(x1->d)*y1->d); @@ -2936,11 +2940,11 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + ggml_vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + ggml_vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3228,11 +3232,11 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri const int8x16_t v1_1h = vld1q_s8(y1->qs + 16); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), - vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); + ggml_vdotq_s32(vdupq_n_s32(0), v0_0lf, v1_0l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_0hf, v1_0h))), GGML_FP16_TO_FP32(x0->d)*y0->d); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), - vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); + ggml_vdotq_s32(vdupq_n_s32(0), v0_1lf, v1_1l), + ggml_vdotq_s32(vdupq_n_s32(0), v0_1hf, v1_1h))), GGML_FP16_TO_FP32(x1->d)*y1->d); } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1) + summs0 + summs1; @@ -3483,12 +3487,12 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri const int8x16_t y1_1 = vld1q_s8(y1->qs + 16); sumv0 = vmlaq_n_f32(sumv0, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), - vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); + ggml_vdotq_s32(vdupq_n_s32(0), x0_0, y0_0), + ggml_vdotq_s32(vdupq_n_s32(0), x0_1, y0_1))), GGML_FP16_TO_FP32(x0->d)*GGML_FP16_TO_FP32(y0->d)); sumv1 = vmlaq_n_f32(sumv1, vcvtq_f32_s32(vaddq_s32( - vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), - vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); + ggml_vdotq_s32(vdupq_n_s32(0), x1_0, y1_0), + ggml_vdotq_s32(vdupq_n_s32(0), x1_1, y1_1))), GGML_FP16_TO_FP32(x1->d)*GGML_FP16_TO_FP32(y1->d)); } *s = vaddvq_f32(sumv0) + vaddvq_f32(sumv1); @@ -3598,8 +3602,8 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri // We use this macro instead of a function call because for some reason // the code runs 2-3% slower, even if the function is declared inline #define MULTIPLY_ACCUM_WITH_SCALE(index)\ - isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ - isum += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * aux[is+(index)];\ + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * aux[is+1+(index)]; #define SHIFT_MULTIPLY_ACCUM_WITH_SCALE(shift, index)\ q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32;\ @@ -3973,10 +3977,10 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri q2bytes.val[2] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 4), m3)); q2bytes.val[3] = vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q2bits, 6), m3)); - isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * scales[0]; - isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * scales[1]; - isum1 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[2], q8bytes.val[2])) * scales[2]; - isum2 += vaddvq_s32(vdotq_s32(vzero, q2bytes.val[3], q8bytes.val[3])) * scales[3]; + isum1 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[0], q8bytes.val[0])) * scales[0]; + isum2 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[1], q8bytes.val[1])) * scales[1]; + isum1 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[2], q8bytes.val[2])) * scales[2]; + isum2 += vaddvq_s32(ggml_vdotq_s32(vzero, q2bytes.val[3], q8bytes.val[3])) * scales[3]; sum += d * (isum1 + isum2); } @@ -4256,10 +4260,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 2), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 2), m3b)), vreinterpretq_s8_u8(q3h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes_1.val[0])) * scale[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes_1.val[1])) * scale[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes_1.val[2])) * scale[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes_1.val[3])) * scale[3]; scale += 4; @@ -4273,10 +4277,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[0], 6), m3b)), vreinterpretq_s8_u8(q3h.val[2])); q3bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vandq_u8(vshrq_n_u8(q3bits.val[1], 6), m3b)), vreinterpretq_s8_u8(q3h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes_2.val[0])) * scale[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes_2.val[1])) * scale[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes_2.val[2])) * scale[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes_2.val[3])) * scale[3]; scale += 4; @@ -4757,10 +4761,10 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri q3bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(vshrq_n_u8(q3bits, 4), m3b), q3h.val[2])); q3bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q3bits, 6), q3h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[0], q8bytes.val[0])) * scales[0]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[1], q8bytes.val[1])) * scales[2]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[2], q8bytes.val[2])) * scales[1]; - isum += vaddvq_s32(vdotq_s32(vzero, q3bytes.val[3], q8bytes.val[3])) * scales[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[0], q8bytes.val[0])) * scales[0]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[1], q8bytes.val[1])) * scales[2]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[2], q8bytes.val[2])) * scales[1]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q3bytes.val[3], q8bytes.val[3])) * scales[3]; sum += d * isum; @@ -5109,14 +5113,14 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int32x4_t p1 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi1 += vaddvq_s32(p1) * scales[2*j+0]; q8bytes = ggml_vld1q_s8_x2(q8); q8 += 32; q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); sumi2 += vaddvq_s32(p2) * scales[2*j+1]; } @@ -5449,13 +5453,13 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri q4bytes.val[0] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[0], m4b)); q4bytes.val[1] = vreinterpretq_s8_u8(vandq_u8 (q4bits.val[1], m4b)); - const int32x4_t p1 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[0]), q4bytes.val[1], q8bytes.val[1]); const int32_t sumi1 = vaddvq_s32(p1) * scales[0]; q4bytes.val[0] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[0], 4)); q4bytes.val[1] = vreinterpretq_s8_u8(vshrq_n_u8(q4bits.val[1], 4)); - const int32x4_t p2 = vdotq_s32(vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[2]), q4bytes.val[1], q8bytes.val[3]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(mzero, q4bytes.val[0], q8bytes.val[2]), q4bytes.val[1], q8bytes.val[3]); const int32_t sumi2 = vaddvq_s32(p2) * scales[1]; sumf += d * (sumi1 + sumi2); @@ -5722,8 +5726,8 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[0], 4), q5h.val[2])); q5bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q5bits.val[1], 4), q5h.val[3])); - sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; - sumi += vaddvq_s32(vdotq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; + sumi += vaddvq_s32(ggml_vdotq_s32(ggml_vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0]), q5bytes.val[1], q8bytes.val[1])) * *scales++; + sumi += vaddvq_s32(ggml_vdotq_s32(ggml_vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2]), q5bytes.val[3], q8bytes.val[3])) * *scales++; } sumf += d * sumi - dmin * sumi_mins; @@ -6112,10 +6116,10 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri q5bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[0], 4)), vreinterpretq_s8_u8(q5h.val[2])); q5bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vshrq_n_u8(q5bits.val[1], 4)), vreinterpretq_s8_u8(q5h.val[3])); - int32_t sumi1 = sc[0] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0])); - int32_t sumi2 = sc[1] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[1], q8bytes.val[1])); - int32_t sumi3 = sc[2] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2])); - int32_t sumi4 = sc[3] * vaddvq_s32(vdotq_s32(mzero, q5bytes.val[3], q8bytes.val[3])); + int32_t sumi1 = sc[0] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[0], q8bytes.val[0])); + int32_t sumi2 = sc[1] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[1], q8bytes.val[1])); + int32_t sumi3 = sc[2] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[2], q8bytes.val[2])); + int32_t sumi4 = sc[3] * vaddvq_s32(ggml_vdotq_s32(mzero, q5bytes.val[3], q8bytes.val[3])); sumf += d * (sumi1 + sumi2 + sumi3 + sumi4); } @@ -6399,10 +6403,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[2], m4b), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vandq_u8(q6bits.val[3], m4b), q6h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; scale += 4; @@ -6426,10 +6430,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[2], 4), q6h.val[2])); q6bytes.val[3] = vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[3], 4), q6h.val[3])); - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; scale += 4; } //sum += isum * d_all * y[i].d; @@ -6816,10 +6820,10 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri q6bytes.val[2] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[0], 4), q6h.val[2])), m32s); q6bytes.val[3] = vsubq_s8(vreinterpretq_s8_u8(vorrq_u8(vshrq_n_u8(q6bits.val[1], 4), q6h.val[3])), m32s); - isum += vaddvq_s32(vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + - vaddvq_s32(vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; + isum += vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[0], q8bytes.val[0])) * scale[0] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[1], q8bytes.val[1])) * scale[1] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[2], q8bytes.val[2])) * scale[2] + + vaddvq_s32(ggml_vdotq_s32(vzero, q6bytes.val[3], q8bytes.val[3])) * scale[3]; sum += isum * d_all * y[i].d; From 1e3900ebacb3a0b385271389686403c97ad76d88 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Fri, 29 Dec 2023 16:15:37 +0000 Subject: [PATCH 333/859] flake.nix: expose full scope in legacyPackages --- .devops/nix/jetson-support.nix | 19 +++++++++++++------ flake.nix | 20 +++++++++++++++++--- 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/.devops/nix/jetson-support.nix b/.devops/nix/jetson-support.nix index 08426d2ab..78e2e40e0 100644 --- a/.devops/nix/jetson-support.nix +++ b/.devops/nix/jetson-support.nix @@ -8,12 +8,13 @@ pkgsCuda, ... }: - lib.optionalAttrs (system == "aarch64-linux") { - packages = + { + legacyPackages = let - caps.jetson-xavier = "7.2"; - caps.jetson-orin = "8.7"; - caps.jetson-nano = "5.3"; + caps.llamaPackagesXavier = "7.2"; + caps.llamaPackagesOrin = "8.7"; + caps.llamaPackagesTX2 = "6.2"; + caps.llamaPackagesNano = "5.3"; pkgsFor = cap: @@ -27,6 +28,12 @@ }; }; in - builtins.mapAttrs (name: cap: ((pkgsFor cap).callPackage ./scope.nix { }).llama-cpp) caps; + builtins.mapAttrs (name: cap: (pkgsFor cap).callPackage ./scope.nix { }) caps; + + packages = lib.optionalAttrs (system == "aarch64-linux") { + jetson-xavier = config.legacyPackages.llamaPackagesXavier.llama-cpp; + jetson-orin = config.legacyPackages.llamaPackagesOrin.llama-cpp; + jetson-nano = config.legacyPackages.llamaPackagesNano.llama-cpp; + }; }; } diff --git a/flake.nix b/flake.nix index 2209070aa..6785b52f4 100644 --- a/flake.nix +++ b/flake.nix @@ -80,16 +80,30 @@ ... }: { + # Unlike `.#packages`, legacyPackages may contain values of + # arbitrary types (including nested attrsets) and may even throw + # exceptions. This attribute isn't recursed into by `nix flake + # show` either. + # + # You can add arbitrary scripts to `.devops/nix/scope.nix` and + # access them as `nix build .#llamaPackages.${scriptName}` using + # the same path you would with an overlay. + legacyPackages = { + llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }; + }; + # We don't use the overlay here so as to avoid making too many instances of nixpkgs, # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs packages = { - default = (pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + default = config.legacyPackages.llamaPackages.llama-cpp; } // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; - cuda = (pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; - rocm = (pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; }).llama-cpp; + cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp; + rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; From a5c088d8c698299b973d2709153e5d95295606d9 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Tue, 26 Dec 2023 23:34:40 +0000 Subject: [PATCH 334/859] flake.nix: rocm not yet supported on aarch64, so hide the output --- flake.nix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 6785b52f4..920a79906 100644 --- a/flake.nix +++ b/flake.nix @@ -74,6 +74,7 @@ { config, lib, + system, pkgs, pkgsCuda, pkgsRocm, @@ -103,10 +104,12 @@ // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp; - rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; + } + // lib.optionalAttrs (system == "x86_64-linux") { + rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; }; }; }; From 356ea17e0f92bfbbf28a4f69261bed48eff68d9c Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Fri, 29 Dec 2023 16:21:50 +0000 Subject: [PATCH 335/859] flake.nix: expose checks --- flake.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/flake.nix b/flake.nix index 920a79906..8d0f095d7 100644 --- a/flake.nix +++ b/flake.nix @@ -111,6 +111,11 @@ // lib.optionalAttrs (system == "x86_64-linux") { rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; }; + + # Packages exposed in `.#checks` will be built by the CI and by + # `nix flake check`. Currently we expose all packages, but we could + # make more granular choices + checks = config.packages; }; }; } From 7adedecbe39bd552bc14142f496246d55a43ac4e Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Tue, 26 Dec 2023 19:17:26 +0000 Subject: [PATCH 336/859] workflows: nix-ci: init; build flake outputs --- .github/workflows/build.yml | 1 - .github/workflows/nix-ci.yml | 44 ++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/nix-ci.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index a5090e398..0a28a1111 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -515,7 +515,6 @@ jobs: - name: Build Xcode project run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build - # freeBSD-latest: # runs-on: macos-12 # steps: diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml new file mode 100644 index 000000000..f82b2cb3d --- /dev/null +++ b/.github/workflows/nix-ci.yml @@ -0,0 +1,44 @@ +name: Nix CI + +on: + workflow_dispatch: # allows manual triggering + push: + branches: + - master + paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + pull_request: + types: [opened, synchronize, reopened] + paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + +jobs: + nix-build: + if: ${{ vars.CACHIX_NAME != '' }} + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, macos-latest ] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: Set-up cachix to push the results to + uses: cachix/cachix-action@v13 + with: + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + name: ${{ vars.CACHIX_NAME }} + - name: Build + run: > + nix run github:Mic92/nix-fast-build + -- --skip-cached --no-nom + --flake + ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)" From 1e9ae54cf24d27afe3900d1250634a2a33423db1 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 17:19:11 +0000 Subject: [PATCH 337/859] workflows: nix-ci: add a job for eval --- .github/workflows/nix-ci.yml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index f82b2cb3d..845b93bfb 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -11,6 +11,33 @@ on: paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] jobs: + nix-eval: + strategy: + fail-fast: false + matrix: + os: [ ubuntu-latest, macos-latest ] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: List all flake outputs + run: nix flake show --all-systems + - name: Show all output paths + run: > + nix run github:nix-community/nix-eval-jobs + -- --gc-roots-dir gcroot + --flake + ".#packages.$(nix eval --raw --impure --expr builtins.currentSystem)" nix-build: if: ${{ vars.CACHIX_NAME != '' }} strategy: From c5239944bab0ff71915df8f2dc7e42fc2c138ff6 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 16:38:36 +0000 Subject: [PATCH 338/859] workflows: weekly `nix flake update` --- .github/workflows/nix-flake-update.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/nix-flake-update.yml diff --git a/.github/workflows/nix-flake-update.yml b/.github/workflows/nix-flake-update.yml new file mode 100644 index 000000000..fa9360841 --- /dev/null +++ b/.github/workflows/nix-flake-update.yml @@ -0,0 +1,22 @@ +name: update-flake-lock +on: + workflow_dispatch: + schedule: + - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00 + +jobs: + lockfile: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@main + - name: Update flake.lock + uses: DeterminateSystems/update-flake-lock@main + with: + pr-title: "nix: update flake.lock" + pr-labels: | + nix + pr-reviewers: philiptaron,SomeoneSerge + token: ${{ secrets.GITHUB_TOKEN }} From 06f2a5d1909a1385b1a16dab4ade68377e121bdd Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 17:36:08 +0000 Subject: [PATCH 339/859] workflows: nix-flakestry: drop tag filters ...and add a job for flakehub.com --- .github/workflows/nix-flakestry.yml | 23 ---------------- .github/workflows/nix-publish-flake.yml | 36 +++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/nix-flakestry.yml create mode 100644 .github/workflows/nix-publish-flake.yml diff --git a/.github/workflows/nix-flakestry.yml b/.github/workflows/nix-flakestry.yml deleted file mode 100644 index 3abfb3509..000000000 --- a/.github/workflows/nix-flakestry.yml +++ /dev/null @@ -1,23 +0,0 @@ -# Make the flake discoverable on https://flakestry.dev -name: "Publish a flake to flakestry" -on: - push: - tags: - - "v?[0-9]+.[0-9]+.[0-9]+" - - "v?[0-9]+.[0-9]+" - workflow_dispatch: - inputs: - tag: - description: "The existing tag to publish" - type: "string" - required: true -jobs: - publish-flake: - runs-on: ubuntu-latest - permissions: - id-token: "write" - contents: "read" - steps: - - uses: flakestry/flakestry-publish@main - with: - version: "${{ inputs.tag || github.ref_name }}" diff --git a/.github/workflows/nix-publish-flake.yml b/.github/workflows/nix-publish-flake.yml new file mode 100644 index 000000000..2c3c1ebda --- /dev/null +++ b/.github/workflows/nix-publish-flake.yml @@ -0,0 +1,36 @@ +# Make the flake discoverable on https://flakestry.dev and https://flakehub.com/flakes +name: "Publish a flake to flakestry & flakehub" +on: + push: + tags: + - "*" + workflow_dispatch: + inputs: + tag: + description: "The existing tag to publish" + type: "string" + required: true +jobs: + flakestry-publish: + runs-on: ubuntu-latest + permissions: + id-token: "write" + contents: "read" + steps: + - uses: flakestry/flakestry-publish@main + with: + version: "${{ inputs.tag || github.ref_name }}" + flakehub-publish: + runs-on: "ubuntu-latest" + permissions: + id-token: "write" + contents: "read" + steps: + - uses: "actions/checkout@v4" + with: + ref: "${{ (inputs.tag != null) && format('refs/tags/{0}', inputs.tag) || '' }}" + - uses: "DeterminateSystems/nix-installer-action@main" + - uses: "DeterminateSystems/flakehub-push@main" + with: + visibility: "public" + tag: "${{ inputs.tag }}" From d8361747317c5cb2e00e7fb3b59ff4dce5a176a5 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 18:01:07 +0000 Subject: [PATCH 340/859] workflows: nix-ci: add a qemu job for jetsons --- .github/workflows/nix-ci.yml | 41 ++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index 845b93bfb..a38c6ead4 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -69,3 +69,44 @@ jobs: -- --skip-cached --no-nom --flake ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)" + nix-build-aarch64: + if: ${{ vars.CACHIX_NAME != '' }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install QEMU + # Copy-paste from https://github.com/orgs/community/discussions/8305#discussioncomment-5888654 + run: | + sudo apt-get install -y qemu-user-static qemu-system-aarch64 + sudo usermod -a -G kvm $USER + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-platforms = aarch64-linux + extra-system-features = nixos-test kvm + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: Set-up cachix to push the results to + uses: cachix/cachix-action@v13 + with: + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + name: ${{ vars.CACHIX_NAME }} + - name: Show all output paths + run: > + nix run github:nix-community/nix-eval-jobs + -- --gc-roots-dir gcroot + --flake + ".#packages.aarch64-linux" + - name: Build + run: > + nix run github:Mic92/nix-fast-build + -- --skip-cached --no-nom + --systems aarch64-linux + --flake + ".#checks.aarch64-linux" From 198ed7ebfc89b8f2b35a8b1655d57bfb57530c1a Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sat, 30 Dec 2023 18:25:25 +0000 Subject: [PATCH 341/859] flake.nix: suggest the binary caches --- flake.nix | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/flake.nix b/flake.nix index 8d0f095d7..488ed6c59 100644 --- a/flake.nix +++ b/flake.nix @@ -6,6 +6,29 @@ flake-parts.url = "github:hercules-ci/flake-parts"; }; + # Optional binary cache + nixConfig = { + extra-substituters = [ + # Populated by the CI in ggerganov/llama.cpp + "https://llama-cpp.cachix.org" + + # A development cache for nixpkgs imported with `config.cudaSupport = true`. + # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci. + # This lets one skip building e.g. the CUDA-enabled openmpi. + # TODO: Replace once nix-community obtains an official one. + "https://cuda-maintainers.cachix.org" + ]; + + # Verify these are the same keys as published on + # - https://app.cachix.org/cache/llama-cpp + # - https://app.cachix.org/cache/cuda-maintainers + extra-trusted-public-keys = [ + "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc=" + "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E=" + ]; + }; + + # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl: # # ```bash From edd1ab7bc34c10a780ee7f9a4499f7689cdad36d Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Sun, 31 Dec 2023 17:42:22 +0000 Subject: [PATCH 342/859] flake.lock: update to a commit recently cached by nixpkgs-cuda-ci --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 3fcd1f45d..15a0a1a8e 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1703559957, - "narHash": "sha256-x9PUuMEPGUOMB51zNxrDr2QoHbYWlCS2xhFedm9MC5Q=", + "lastModified": 1703637592, + "narHash": "sha256-8MXjxU0RfFfzl57Zy3OfXCITS0qWDNLzlBAdwxGZwfY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "75dd68c36f458c6593c5bbb48abfd3e59bfed380", + "rev": "cfc3698c31b1fb9cdcf10f36c9643460264d0ca8", "type": "github" }, "original": { From 58ba655af054715c0516ee270ad028ad9e74f357 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 10:57:44 +0200 Subject: [PATCH 343/859] metal : enable shader debugging (cmake option) (#4705) * ggml : disable fast-math for Metal (cmake build only) ggml-ci * metal : fix Metal API debug warnings * cmake : add -fno-inline for Metal build (#4545) * metal : fix API debug warnings * metal : fix compile warnings * metal : use uint64_t for strides * cmake : rename option to LLAMA_METAL_SHADER_DEBUG * metal : fix mat-vec Q8_0 kernel for BS > 1 * metal : normalize mat-vec kernel signatures * cmake : respect LLAMA_QKK_64 option * metal : fix mat-vec Q4_K kernel for QK_K == 64 ggml-ci --- CMakeLists.txt | 34 ++- ci/run.sh | 14 +- ggml-metal.m | 28 ++- ggml-metal.metal | 475 +++++++++++++++++++++---------------- tests/test-backend-ops.cpp | 8 +- 5 files changed, 329 insertions(+), 230 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 545aab267..57ae4c2df 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -95,6 +95,7 @@ option(LLAMA_HIP_UMA "llama: use HIP unified memory arch option(LLAMA_CLBLAST "llama: use CLBlast" OFF) option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) +option(LLAMA_METAL_SHADER_DEBUG "llama: compile Metal with -fno-fast-math" OFF) option(LLAMA_MPI "llama: use MPI" OFF) option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) @@ -154,9 +155,9 @@ if (APPLE AND LLAMA_ACCELERATE) endif() if (LLAMA_METAL) - find_library(FOUNDATION_LIBRARY Foundation REQUIRED) - find_library(METAL_FRAMEWORK Metal REQUIRED) - find_library(METALKIT_FRAMEWORK MetalKit REQUIRED) + find_library(FOUNDATION_LIBRARY Foundation REQUIRED) + find_library(METAL_FRAMEWORK Metal REQUIRED) + find_library(METALKIT_FRAMEWORK MetalKit REQUIRED) message(STATUS "Metal framework found") set(GGML_HEADERS_METAL ggml-metal.h) @@ -173,6 +174,33 @@ if (LLAMA_METAL) # copy ggml-metal.metal to bin directory configure_file(ggml-metal.metal ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal COPYONLY) + if (LLAMA_METAL_SHADER_DEBUG) + # custom command to do the following: + # xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air + # xcrun -sdk macosx metallib ggml-metal.air -o ggml.metallib + # + # note: this is the only way I found to disable fast-math in Metal. it's ugly, but at least it works + # disabling fast math is needed in order to pass tests/test-backend-ops + # note: adding -fno-inline fixes the tests when using MTL_SHADER_VALIDATION=1 + set(XC_FLAGS -fno-fast-math -fno-inline -g) + if (LLAMA_QKK_64) + set(XC_FLAGS ${XC_FLAGS} -DQK_K=64) + endif() + + add_custom_command( + OUTPUT ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + COMMAND xcrun -sdk macosx metal ${XC_FLAGS} -c ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air + COMMAND xcrun -sdk macosx metallib ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + DEPENDS ggml-metal.metal + COMMENT "Compiling Metal kernels" + ) + + add_custom_target( + ggml-metal ALL + DEPENDS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + ) + endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${FOUNDATION_LIBRARY} ${METAL_FRAMEWORK} diff --git a/ci/run.sh b/ci/run.sh index 2e3343831..47a254f4c 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -30,6 +30,12 @@ sd=`dirname $0` cd $sd/../ SRC=`pwd` +CMAKE_EXTRA="" + +if [ ! -z ${GG_BUILD_METAL} ]; then + CMAKE_EXTRA="${CMAKE_EXTRA} -DLLAMA_METAL_SHADER_DEBUG=ON" +fi + ## helpers # download a file if it does not exist or if it is outdated @@ -81,8 +87,8 @@ function gg_run_ctest_debug { set -e - (time cmake -DCMAKE_BUILD_TYPE=Debug .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log - (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log + (time cmake -DCMAKE_BUILD_TYPE=Debug ${CMAKE_EXTRA} .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log + (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log (time ctest --output-on-failure -E test-opt ) 2>&1 | tee -a $OUT/${ci}-ctest.log @@ -109,8 +115,8 @@ function gg_run_ctest_release { set -e - (time cmake -DCMAKE_BUILD_TYPE=Release .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log - (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log + (time cmake -DCMAKE_BUILD_TYPE=Release ${CMAKE_EXTRA} .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log + (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log if [ -z ${GG_BUILD_LOW_PERF} ]; then (time ctest --output-on-failure ) 2>&1 | tee -a $OUT/${ci}-ctest.log diff --git a/ggml-metal.m b/ggml-metal.m index 51a72ae33..cd9d00456 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -257,13 +257,14 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { bundle = [NSBundle bundleForClass:[GGMLMetalClass class]]; #endif NSError * error = nil; - NSString * libPath = [bundle pathForResource:@"default" ofType:@"metallib"]; + NSString * libPath = [bundle pathForResource:@"ggml" ofType:@"metallib"]; if (libPath != nil) { + // pre-compiled library found NSURL * libURL = [NSURL fileURLWithPath:libPath]; GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [libPath UTF8String]); ctx->library = [ctx->device newLibraryWithURL:libURL error:&error]; } else { - GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); + GGML_METAL_LOG_INFO("%s: ggml.metallib not found, loading from source\n", __func__); NSString * sourcePath; NSString * ggmlMetalPathResources = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; @@ -291,6 +292,13 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { options = [MTLCompileOptions new]; options.preprocessorMacros = @{ @"QK_K" : @(64) }; #endif + // try to disable fast-math + // NOTE: this seems to have no effect whatsoever + // instead, in order to disable fast-math, we have to build ggml.metallib from the command line + // using xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air + // and go through the "pre-compiled library found" path above + //[options setFastMathEnabled:false]; + ctx->library = [ctx->device newLibraryWithSource:src options:options error:&error]; } @@ -1230,7 +1238,7 @@ void ggml_metal_graph_compute( // not sure how to avoid this // TODO: make a simpler cpy_bytes kernel - const int nth = MIN(1024, ne00); + const int nth = MIN((int) ctx->pipeline_cpy_f32_f32.maxTotalThreadsPerThreadgroup, ne00); [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1285,7 +1293,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - const int nth = MIN(1024, ne0); + const int nth = MIN((int) ctx->pipeline_add.maxTotalThreadsPerThreadgroup, ne00); [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1785,8 +1793,9 @@ void ggml_metal_graph_compute( [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; // TODO: how to make this an array? read Metal docs - for (int j = 0; j < n_as; ++j) { - struct ggml_tensor * src_cur = dst->src[2 + j]; + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; size_t offs_src_cur = 0; id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); @@ -1909,8 +1918,9 @@ void ggml_metal_graph_compute( [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; // TODO: how to make this an array? read Metal docs - for (int j = 0; j < n_as; ++j) { - struct ggml_tensor * src_cur = dst->src[2 + j]; + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; size_t offs_src_cur = 0; id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); @@ -2229,7 +2239,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; - const int nth = MIN(1024, ne0); + const int nth = MIN((int) ctx->pipeline_upscale_f32.maxTotalThreadsPerThreadgroup, ne0); [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; diff --git a/ggml-metal.metal b/ggml-metal.metal index d5b54e112..1d5b8f6f4 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -59,26 +59,26 @@ kernel void kernel_add( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, constant int64_t & offs, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], @@ -109,26 +109,26 @@ kernel void kernel_mul( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], uint3 ntg[[threads_per_threadgroup]]) { @@ -158,26 +158,26 @@ kernel void kernel_div( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, uint3 tgpig[[threadgroup_position_in_grid]], uint3 tpitg[[thread_position_in_threadgroup]], uint3 ntg[[threads_per_threadgroup]]) { @@ -205,7 +205,7 @@ kernel void kernel_add_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(28)]], + constant uint64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] + src1[tpig % nb]; } @@ -214,7 +214,7 @@ kernel void kernel_mul_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(28)]], + constant uint64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] * src1[tpig % nb]; } @@ -223,7 +223,7 @@ kernel void kernel_div_row( device const float4 * src0, device const float4 * src1, device float4 * dst, - constant int64_t & nb [[buffer(28)]], + constant uint64_t & nb [[buffer(28)]], uint tpig[[thread_position_in_grid]]) { dst[tpig] = src0[tpig] / src1[tpig % nb]; } @@ -307,26 +307,26 @@ kernel void kernel_sum_rows( constant int64_t & ne01, constant int64_t & ne02, constant int64_t & ne03, - constant int64_t & nb00, - constant int64_t & nb01, - constant int64_t & nb02, - constant int64_t & nb03, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant uint64_t & nb03, constant int64_t & ne10, constant int64_t & ne11, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, - constant int64_t & nb13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant uint64_t & nb13, constant int64_t & ne0, constant int64_t & ne1, constant int64_t & ne2, constant int64_t & ne3, - constant int64_t & nb0, - constant int64_t & nb1, - constant int64_t & nb2, - constant int64_t & nb3, + constant uint64_t & nb0, + constant uint64_t & nb1, + constant uint64_t & nb2, + constant uint64_t & nb3, uint3 tpig[[thread_position_in_grid]]) { int64_t i3 = tpig.z; int64_t i2 = tpig.y; @@ -920,14 +920,21 @@ kernel void kernel_mul_mv_q4_0_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -939,14 +946,21 @@ kernel void kernel_mul_mv_q4_1_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -958,14 +972,21 @@ kernel void kernel_mul_mv_q5_0_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -977,14 +998,21 @@ kernel void kernel_mul_mv_q5_1_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -1071,12 +1099,19 @@ kernel void kernel_mul_mv_q8_0_f32( constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne10, + constant int64_t & ne11, constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -1182,8 +1217,8 @@ kernel void kernel_mul_mv_f32_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { kernel_mul_mv_f32_f32_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); @@ -1209,8 +1244,8 @@ kernel void kernel_mul_mv_f16_f16( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1346,8 +1381,8 @@ kernel void kernel_mul_mv_f16_f32_1row( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { kernel_mul_mv_f16_f32_1row_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); @@ -1452,8 +1487,8 @@ kernel void kernel_mul_mv_f16_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { kernel_mul_mv_f16_f32_impl(src0, src1, dst, ne00, ne01, ne02, nb00, nb01, nb02, ne10, ne11, ne12, nb10, nb11, nb12, ne0, ne1, r2, r3, tgpig, tiisg); @@ -1478,8 +1513,8 @@ kernel void kernel_mul_mv_f16_f32_l4( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]]) { @@ -1543,7 +1578,8 @@ kernel void kernel_alibi_f32( const int64_t i3 = n / (ne2*ne1*ne0); const int64_t i2 = (n - i3*ne2*ne1*ne0) / (ne1*ne0); const int64_t i1 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0) / ne0; - const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + //const int64_t i0 = (n - i3*ne2*ne1*ne0 - i2*ne1*ne0 - i1*ne0); + const int64_t k = i3*ne3 + i2; float m_k; @@ -2410,22 +2446,6 @@ typedef struct { } block_q6_K; // 210 bytes / block -static inline uchar4 get_scale_min_k4(int j, device const uint8_t * q) { - uchar4 r; - if (j < 4) { - r[0] = q[j+0] & 63; - r[2] = q[j+1] & 63; - r[1] = q[j+4] & 63; - r[3] = q[j+5] & 63; - } else { - r[0] = (q[j+4] & 0xF) | ((q[j-4] >> 6) << 4); - r[2] = (q[j+5] & 0xF) | ((q[j-3] >> 6) << 4); - r[1] = (q[j+4] >> 4) | ((q[j-0] >> 6) << 4); - r[3] = (q[j+5] >> 4) | ((q[j+1] >> 6) << 4); - } - return r; -} - //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -2584,14 +2604,21 @@ kernel void kernel_mul_mv_q2_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2841,14 +2868,21 @@ kernel void kernel_mul_mv_q3_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -2984,8 +3018,8 @@ void kernel_mul_mv_q4_K_f32_impl( constant uint & r2, constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], - uint tiisg[[thread_index_in_simdgroup]], - uint sgitg[[simdgroup_index_in_threadgroup]]) { + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { const int ix = tiisg/4; // 0...7 const int it = tiisg%4; // 0...3 @@ -2994,7 +3028,7 @@ void kernel_mul_mv_q4_K_f32_impl( const int r0 = tgpig.x; const int r1 = tgpig.y; const int im = tgpig.z; - const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int first_row = r0 * N_DST; const int ib_row = first_row * nb; const uint i12 = im%ne12; @@ -3060,7 +3094,7 @@ void kernel_mul_mv_q4_K_f32_impl( for (int row = 0; row < N_DST; ++row) { all_sum = simd_sum(sumf[row]); if (tiisg == 0) { - dst[r1*ne0+ im*ne0*ne1 + first_row + row] = all_sum; + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; } } } @@ -3072,14 +3106,21 @@ kernel void kernel_mul_mv_q4_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -3271,14 +3312,21 @@ kernel void kernel_mul_mv_q5_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -3398,14 +3446,21 @@ kernel void kernel_mul_mv_q6_K_f32( device const float * src1, device float * dst, constant int64_t & ne00, - constant int64_t & ne01[[buffer(4)]], - constant int64_t & ne02[[buffer(5)]], - constant int64_t & ne10[[buffer(9)]], - constant int64_t & ne12[[buffer(11)]], - constant int64_t & ne0 [[buffer(15)]], - constant int64_t & ne1 [[buffer(16)]], - constant uint & r2 [[buffer(17)]], - constant uint & r3 [[buffer(18)]], + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, uint3 tgpig[[threadgroup_position_in_grid]], uint tiisg[[thread_index_in_simdgroup]], uint sgitg[[simdgroup_index_in_threadgroup]]) { @@ -3523,7 +3578,7 @@ void dequantize_q8_0(device const block_q8_0 *xb, short il, thread type4x4 & reg device const int8_t * qs = ((device const int8_t *)xb->qs); const half d = xb->d; - for (int i=0;i<16;i++) { + for (int i = 0; i < 16; i++) { reg[i/4][i%4] = (qs[i + 16*il] * d); } } @@ -3792,12 +3847,12 @@ void kernel_mul_mm_impl(device const uchar * src0, device float * dst, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, constant uint & r2, @@ -3924,12 +3979,12 @@ kernel void kernel_mul_mm(device const uchar * src0, device float * dst, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, constant uint & r2, @@ -3965,19 +4020,19 @@ kernel void kernel_mul_mm_id( device const uchar * ids, device const uchar * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4070,12 +4125,12 @@ typedef void (mat_mm_t)( device float * dst, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, constant uint & r2, @@ -4104,19 +4159,19 @@ typedef void (mat_mm_id_t)( device const uchar * ids, device const uchar * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne02, - constant int64_t & nb01, - constant int64_t & nb02, + constant uint64_t & nb01, + constant uint64_t & nb02, constant int64_t & ne12, constant int64_t & ne13, - constant int64_t & nb10, - constant int64_t & nb11, - constant int64_t & nb12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4153,7 +4208,7 @@ kernel void kernel_mul_mv_id_f32_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4169,7 +4224,7 @@ kernel void kernel_mul_mv_id_f32_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4222,7 +4277,7 @@ kernel void kernel_mul_mv_id_f16_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4238,7 +4293,7 @@ kernel void kernel_mul_mv_id_f16_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4291,7 +4346,7 @@ kernel void kernel_mul_mv_id_q8_0_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4307,7 +4362,7 @@ kernel void kernel_mul_mv_id_q8_0_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4354,7 +4409,7 @@ kernel void kernel_mul_mv_id_q4_0_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4370,7 +4425,7 @@ kernel void kernel_mul_mv_id_q4_0_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4417,7 +4472,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4433,7 +4488,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4480,7 +4535,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4496,7 +4551,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4543,7 +4598,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4559,7 +4614,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4606,7 +4661,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4622,7 +4677,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4669,7 +4724,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4685,7 +4740,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4732,7 +4787,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4748,7 +4803,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4795,7 +4850,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4811,7 +4866,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, @@ -4858,7 +4913,7 @@ kernel void kernel_mul_mv_id_q6_K_f32( device const char * ids, device const char * src1, device uchar * dst, - constant int64_t & nbi1, + constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, @@ -4874,7 +4929,7 @@ kernel void kernel_mul_mv_id_q6_K_f32( constant uint64_t & nb12, constant int64_t & ne0, constant int64_t & ne1, - constant int64_t & nb1, + constant uint64_t & nb1, constant uint & r2, constant uint & r3, constant int & idx, diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index b115299c0..eff063b2d 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -15,19 +15,18 @@ #include #include - static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float max = 1.0f) { size_t size = ggml_nelements(tensor); std::vector data(size); #if 0 - std::default_random_engine generator(rd()); + static std::default_random_engine generator(1234); std::uniform_real_distribution distribution(min, max); for (size_t i = 0; i < size; i++) { data[i] = distribution(generator); } -#endif +#else auto init_thread = [&](size_t start, size_t end) { std::random_device rd; std::default_random_engine generator(rd()); @@ -49,6 +48,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m for (auto & t : threads) { t.join(); } +#endif if (tensor->type == GGML_TYPE_F32 || tensor->type == GGML_TYPE_I32) { ggml_backend_tensor_set(tensor, data.data(), 0, size * sizeof(float)); @@ -437,7 +437,7 @@ struct test_case { double err = nmse(f1.data(), f2.data(), f1.size()); if (err > ud->max_err) { printf("[%s] NMSE = %f ", ggml_op_desc(t1), err); - //for (int i = 0; i < f1.size(); i++) { + //for (int i = 0; i < (int) f1.size(); i++) { // printf("%5d %9.6f %9.6f, diff = %9.6f\n", i, f1[i], f2[i], f1[i] - f2[i]); //} //printf("\n"); From 775ac8712a7b42cfead2585f42cec0dfd56644ab Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 2 Jan 2024 10:16:55 +0100 Subject: [PATCH 344/859] finetune: fix typo in README.md (#4733) Signed-off-by: Daniel Bevenius --- examples/finetune/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/finetune/README.md b/examples/finetune/README.md index a2a2c1281..a884706c5 100644 --- a/examples/finetune/README.md +++ b/examples/finetune/README.md @@ -61,7 +61,7 @@ For example to apply 40% of the 'shakespeare' LORA adapter, 80% of the 'bible' L --lora lora-open-llama-3b-v2-q8_0-yet-another-one-LATEST.bin ``` -The scale numbers don't need to add up to one, and you can also use numbers greater than 1 to further increase the influence of an adapter. But making the values to big will sometimes result in worse output. Play around to find good values. +The scale numbers don't need to add up to one, and you can also use numbers greater than 1 to further increase the influence of an adapter. But making the values too big will sometimes result in worse output. Play around to find good values. Gradient checkpointing reduces the memory requirements by ~50% but increases the runtime. If you have enough RAM, you can make finetuning a bit faster by disabling checkpointing with `--no-checkpointing`. From 26f3071d714f0b27ad7f021a46a66a1085480258 Mon Sep 17 00:00:00 2001 From: "Nam D. Tran" <42194884+namtranase@users.noreply.github.com> Date: Tue, 2 Jan 2024 16:23:38 +0700 Subject: [PATCH 345/859] py : re-enable mmap in convert hf (#4732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * update: awq support llama-7b model * update: change order * update: benchmark results for llama2-7b * update: mistral 7b v1 benchmark * update: support 4 models * fix: Readme * update: ready for PR * update: readme * fix: readme * update: change order import * black * format code * update: work for bot mpt and awqmpt * update: readme * Rename to llm_build_ffn_mpt_awq * Formatted other files * Fixed params count * fix: remove code * update: more detail for mpt * fix: readme * fix: readme * update: change folder architecture * fix: common.cpp * fix: readme * fix: remove ggml_repeat * update: cicd * update: cicd * uppdate: remove use_awq arg * update: readme * llama : adapt plamo to new ffn ggml-ci * fix: update torch version --------- Co-authored-by: Trần Đức Nam Co-authored-by: Le Hoang Anh Co-authored-by: Georgi Gerganov --- awq-py/requirements.txt | 2 +- convert-hf-to-gguf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/awq-py/requirements.txt b/awq-py/requirements.txt index 5fe604329..991896116 100644 --- a/awq-py/requirements.txt +++ b/awq-py/requirements.txt @@ -1,2 +1,2 @@ -torch>=2.0.0 +torch>=2.1.1 transformers>=4.32.0 diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 51724c0df..203eaf64b 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -59,7 +59,7 @@ class Model: from safetensors import safe_open ctx = cast(ContextManager[Any], safe_open(self.dir_model / part_name, framework="pt", device="cpu")) else: - ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", weights_only=True)) + ctx = contextlib.nullcontext(torch.load(str(self.dir_model / part_name), map_location="cpu", mmap=True, weights_only=True)) with ctx as model_part: for name in model_part.keys(): From 5d7002d4372ebf107cfaf46fcd90df27b204f330 Mon Sep 17 00:00:00 2001 From: minarchist Date: Tue, 2 Jan 2024 04:38:15 -0600 Subject: [PATCH 346/859] server : add --override-kv parameter (#4710) * Changes to server to allow metadata override * documentation * flake.nix: expose full scope in legacyPackages * flake.nix: rocm not yet supported on aarch64, so hide the output * flake.nix: expose checks * workflows: nix-ci: init; build flake outputs * workflows: nix-ci: add a job for eval * workflows: weekly `nix flake update` * workflows: nix-flakestry: drop tag filters ...and add a job for flakehub.com * workflows: nix-ci: add a qemu job for jetsons * flake.nix: suggest the binary caches * flake.lock: update to a commit recently cached by nixpkgs-cuda-ci --------- Co-authored-by: John Co-authored-by: Someone Serge --- examples/server/server.cpp | 51 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 52d9b9768..b77d3f079 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2016,6 +2016,10 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf("\n"); + printf(" --override-kv KEY=TYPE:VALUE\n"); + printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); + printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf("\n"); } static void server_params_parse(int argc, char **argv, server_params &sparams, @@ -2379,6 +2383,49 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } + else if (arg == "--override-kv") + { + if (++i >= argc) { + invalid_param = true; + break; + } + char * sep = strchr(argv[i], '='); + if (sep == nullptr || sep - argv[i] >= 128) { + fprintf(stderr, "error: Malformed KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + struct llama_model_kv_override kvo; + std::strncpy(kvo.key, argv[i], sep - argv[i]); + kvo.key[sep - argv[i]] = 0; + sep++; + if (strncmp(sep, "int:", 4) == 0) { + sep += 4; + kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.int_value = std::atol(sep); + } else if (strncmp(sep, "float:", 6) == 0) { + sep += 6; + kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.float_value = std::atof(sep); + } else if (strncmp(sep, "bool:", 5) == 0) { + sep += 5; + kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + if (std::strcmp(sep, "true") == 0) { + kvo.bool_value = true; + } else if (std::strcmp(sep, "false") == 0) { + kvo.bool_value = false; + } else { + fprintf(stderr, "error: Invalid boolean value for KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + } else { + fprintf(stderr, "error: Invalid type for KV override: %s\n", argv[i]); + invalid_param = true; + break; + } + params.kv_overrides.push_back(kvo); + } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); @@ -2386,6 +2433,10 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, exit(1); } } + if (!params.kv_overrides.empty()) { + params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.back().key[0] = 0; + } if (invalid_param) { From 32866c5edde402f42ff4233bb89dcfcede34fd22 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 13:28:15 +0200 Subject: [PATCH 347/859] editorconfig : fix whitespace and indentation #4710 --- examples/server/server.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index b77d3f079..e45ea809a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2383,8 +2383,8 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } - else if (arg == "--override-kv") - { + else if (arg == "--override-kv") + { if (++i >= argc) { invalid_param = true; break; From 83e633c27efdf0eb0ba54249e784b0ea760b1007 Mon Sep 17 00:00:00 2001 From: postmasters Date: Tue, 2 Jan 2024 03:51:28 -0800 Subject: [PATCH 348/859] llama : differentiate the KV dims in the attention (#4657) * Add n_key_dim and n_value_dim Some models use values that are not derived from `n_embd`. Also remove `n_embd_head` and `n_embd_gqa` because it is not clear which "head" is referred to (key or value). Fix issue #4648. * Fix `llm_build_kqv` to use `n_value_gqa` * Rebase * Rename variables * Fix llm_build_kqv to be more generic wrt n_embd_head_k * Update default values for n_embd_head_k and n_embd_head_v Co-authored-by: Georgi Gerganov * Fix llm_load_tensors: the asserts were not backcompat --------- Co-authored-by: Georgi Gerganov --- gguf-py/gguf/constants.py | 2 + gguf-py/gguf/gguf_writer.py | 6 + llama.cpp | 271 +++++++++++++++++++++++++----------- 3 files changed, 201 insertions(+), 78 deletions(-) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index ae62cc575..f0a1c51f8 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -46,6 +46,8 @@ class Keys: HEAD_COUNT_KV = "{arch}.attention.head_count_kv" MAX_ALIBI_BIAS = "{arch}.attention.max_alibi_bias" CLAMP_KQV = "{arch}.attention.clamp_kqv" + KEY_LENGTH = "{arch}.attention.key_length" + VALUE_LENGTH = "{arch}.attention.value_length" LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 73e021607..d93aaa877 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -333,6 +333,12 @@ class GGUFWriter: def add_head_count_kv(self, count: int) -> None: self.add_uint32(Keys.Attention.HEAD_COUNT_KV.format(arch=self.arch), count) + def add_key_length(self, length: int) -> None: + self.add_uint32(Keys.Attention.KEY_LENGTH.format(arch=self.arch), length) + + def add_value_length(self, length: int) -> None: + self.add_uint32(Keys.Attention.VALUE_LENGTH.format(arch=self.arch), length) + def add_max_alibi_bias(self, bias: float) -> None: self.add_float32(Keys.Attention.MAX_ALIBI_BIAS.format(arch=self.arch), bias) diff --git a/llama.cpp b/llama.cpp index a833d4c15..704464039 100644 --- a/llama.cpp +++ b/llama.cpp @@ -245,6 +245,8 @@ enum llm_kv { LLM_KV_ATTENTION_HEAD_COUNT_KV, LLM_KV_ATTENTION_MAX_ALIBI_BIAS, LLM_KV_ATTENTION_CLAMP_KQV, + LLM_KV_ATTENTION_KEY_LENGTH, + LLM_KV_ATTENTION_VALUE_LENGTH, LLM_KV_ATTENTION_LAYERNORM_EPS, LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, @@ -297,6 +299,8 @@ static std::map LLM_KV_NAMES = { { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, { LLM_KV_ATTENTION_MAX_ALIBI_BIAS, "%s.attention.max_alibi_bias" }, { LLM_KV_ATTENTION_CLAMP_KQV, "%s.attention.clamp_kqv" }, + { LLM_KV_ATTENTION_KEY_LENGTH, "%s.attention.key_length" }, + { LLM_KV_ATTENTION_VALUE_LENGTH, "%s.attention.value_length" }, { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" }, { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" }, @@ -1284,6 +1288,8 @@ struct llama_hparams { uint32_t n_head_kv; uint32_t n_layer; uint32_t n_rot; + uint32_t n_embd_head_k; // dimension of keys (d_k). d_q is assumed to be the same, but there are n_head q heads, and only n_head_kv k-v heads + uint32_t n_embd_head_v; // dimension of values (d_v) aka n_embd_head uint32_t n_ff; uint32_t n_expert = 0; uint32_t n_expert_used = 0; @@ -1310,6 +1316,8 @@ struct llama_hparams { if (this->n_head_kv != other.n_head_kv) return true; if (this->n_layer != other.n_layer) return true; if (this->n_rot != other.n_rot) return true; + if (this->n_embd_head_k != other.n_embd_head_k) return true; + if (this->n_embd_head_v != other.n_embd_head_v) return true; if (this->n_ff != other.n_ff) return true; if (this->n_expert != other.n_expert) return true; if (this->n_expert_used != other.n_expert_used) return true; @@ -1331,12 +1339,12 @@ struct llama_hparams { return n_head/n_head_kv; } - uint32_t n_embd_head() const { - return n_embd/n_head; + uint32_t n_embd_k_gqa() const { // dimension of key embeddings across all k-v heads + return n_embd_head_k * n_head_kv; } - uint32_t n_embd_gqa() const { - return n_embd/n_gqa(); + uint32_t n_embd_v_gqa() const { // dimension of value embeddings across all k-v heads + return n_embd_head_v * n_head_kv; } }; @@ -1645,8 +1653,9 @@ static bool llama_kv_cache_init( uint32_t n_ctx, int n_gpu_layers, bool offload) { - const uint32_t n_embd = hparams.n_embd_gqa(); - const uint32_t n_layer = hparams.n_layer; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const uint32_t n_layer = hparams.n_layer; cache.has_shift = false; @@ -1677,8 +1686,8 @@ static bool llama_kv_cache_init( const int i_gpu_start = (int) n_layer - n_gpu_layers; for (int i = 0; i < (int) n_layer; i++) { - ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd*n_ctx); - ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd*n_ctx); + ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd_k_gqa*n_ctx); + ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd_v_gqa*n_ctx); ggml_format_name(k, "cache_k_l%d", i); ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); @@ -2672,6 +2681,12 @@ static void llm_load_hparams( // gpt-j n_rot = rotary_dim } + hparams.n_embd_head_k = hparams.n_embd / hparams.n_head; + ml.get_key(LLM_KV_ATTENTION_KEY_LENGTH, hparams.n_embd_head_k, false); + + hparams.n_embd_head_v = hparams.n_embd / hparams.n_head; + ml.get_key(LLM_KV_ATTENTION_VALUE_LENGTH, hparams.n_embd_head_v, false); + // arch-specific KVs switch (model.arch) { case LLM_ARCH_LLAMA: @@ -3082,8 +3097,12 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: n_head = %u\n", __func__, hparams.n_head); LLAMA_LOG_INFO("%s: n_head_kv = %u\n", __func__, hparams.n_head_kv); LLAMA_LOG_INFO("%s: n_layer = %u\n", __func__, hparams.n_layer); - LLAMA_LOG_INFO("%s: n_rot = %u\n", __func__, hparams.n_rot); // a.k.a. n_embd_head, n_head_dim + LLAMA_LOG_INFO("%s: n_rot = %u\n", __func__, hparams.n_rot); + LLAMA_LOG_INFO("%s: n_embd_head_k = %u\n", __func__, hparams.n_embd_head_k); + LLAMA_LOG_INFO("%s: n_embd_head_v = %u\n", __func__, hparams.n_embd_head_v); LLAMA_LOG_INFO("%s: n_gqa = %u\n", __func__, hparams.n_gqa()); + LLAMA_LOG_INFO("%s: n_embd_k_gqa = %u\n", __func__, hparams.n_embd_k_gqa()); + LLAMA_LOG_INFO("%s: n_embd_v_gqa = %u\n", __func__, hparams.n_embd_v_gqa()); LLAMA_LOG_INFO("%s: f_norm_eps = %.1e\n", __func__, hparams.f_norm_eps); LLAMA_LOG_INFO("%s: f_norm_rms_eps = %.1e\n", __func__, hparams.f_norm_rms_eps); LLAMA_LOG_INFO("%s: f_clamp_kqv = %.1e\n", __func__, hparams.f_clamp_kqv); @@ -3173,10 +3192,11 @@ static bool llm_load_tensors( // create tensors for the weights { - const int64_t n_embd = hparams.n_embd; - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - const int64_t n_layer = hparams.n_layer; - const int64_t n_vocab = hparams.n_vocab; + const int64_t n_embd = hparams.n_embd; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + const int64_t n_layer = hparams.n_layer; + const int64_t n_vocab = hparams.n_vocab; const auto tn = LLM_TN(model.arch); switch (model.arch) { @@ -3202,7 +3222,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3270,7 +3293,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3318,7 +3344,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3368,7 +3397,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3420,7 +3452,11 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); + const int i_gpu_start = n_layer - n_gpu_layers; model.layers.resize(n_layer); for (uint32_t i = 0; i < n_layer; ++i) { @@ -3469,7 +3505,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3520,7 +3559,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3567,7 +3609,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3665,7 +3710,10 @@ static bool llm_load_tensors( model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3714,7 +3762,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -3761,7 +3812,10 @@ static bool llm_load_tensors( model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); } - const uint32_t n_ff = hparams.n_ff; + const uint32_t n_ff = hparams.n_ff; + const int64_t n_embd_gqa = n_embd_v_gqa; + GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); const int i_gpu_start = n_layer - n_gpu_layers; @@ -4000,8 +4054,8 @@ static struct ggml_tensor * llm_build_inp_embd( return inpL; } -// Persimmon: n_rot = n_embd_head/2 -// Other: n_rot = n_embd_head +// Persimmon: n_rot = n_embd_head_k/2 +// Other: n_rot = n_embd_head_k static void llm_build_k_shift( struct ggml_context * ctx, const llama_hparams & hparams, @@ -4014,17 +4068,17 @@ static void llm_build_k_shift( float freq_base, float freq_scale, const llm_build_cb & cb) { - const int64_t n_layer = hparams.n_layer; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_gqa = hparams.n_embd_gqa(); - const int64_t n_embd_head = hparams.n_embd_head(); - const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; - const float ext_factor = cparams.yarn_ext_factor; - const float attn_factor = cparams.yarn_attn_factor; - const float beta_fast = cparams.yarn_beta_fast; - const float beta_slow = cparams.yarn_beta_slow; + const int64_t n_layer = hparams.n_layer; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_head_k = hparams.n_embd_head_k; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; + const float ext_factor = cparams.yarn_ext_factor; + const float attn_factor = cparams.yarn_attn_factor; + const float beta_fast = cparams.yarn_beta_fast; + const float beta_slow = cparams.yarn_beta_slow; - GGML_ASSERT(n_embd_head % n_rot == 0); + GGML_ASSERT(n_embd_head_k % n_rot == 0); struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_ctx); cb(K_shift, "K_shift", -1); @@ -4042,9 +4096,9 @@ static void llm_build_k_shift( // we rotate only the first n_rot dimensions ggml_rope_custom_inplace(ctx, ggml_view_3d(ctx, kv.k_l[il], - n_embd_head, n_head_kv, n_ctx, - ggml_row_size(kv.k_l[il]->type, n_embd_head), - ggml_row_size(kv.k_l[il]->type, n_embd_gqa), + n_embd_head_k, n_head_kv, n_ctx, + ggml_row_size(kv.k_l[il]->type, n_embd_head_k), + ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa), 0), K_shift, n_rot, rope_type, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); @@ -4065,18 +4119,19 @@ static void llm_build_kv_store( int32_t kv_head, const llm_build_cb & cb, int64_t il) { - const int64_t n_embd_gqa = hparams.n_embd_gqa(); + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); // compute the transposed [n_tokens, n_embd] V matrix - struct ggml_tensor * v_cur_t = ggml_transpose(ctx, ggml_reshape_2d(ctx, v_cur, n_embd_gqa, n_tokens)); + struct ggml_tensor * v_cur_t = ggml_transpose(ctx, ggml_reshape_2d(ctx, v_cur, n_embd_v_gqa, n_tokens)); //struct ggml_tensor * v_cur_t = ggml_transpose(ctx, v_cur); // TODO: reshape above is likely not needed cb(v_cur_t, "v_cur_t", il); - struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_gqa, - (ggml_row_size(kv.k_l[il]->type, n_embd_gqa))*kv_head); + struct ggml_tensor * k_cache_view = ggml_view_1d(ctx, kv.k_l[il], n_tokens*n_embd_k_gqa, + (ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa))*kv_head); cb(k_cache_view, "k_cache_view", il); - struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_gqa, + struct ggml_tensor * v_cache_view = ggml_view_2d(ctx, kv.v_l[il], n_tokens, n_embd_v_gqa, ( n_ctx)*ggml_element_size(kv.v_l[il]), (kv_head)*ggml_element_size(kv.v_l[il])); cb(v_cache_view, "v_cache_view", il); @@ -4226,20 +4281,20 @@ static struct ggml_tensor * llm_build_kqv( float kq_scale, const llm_build_cb & cb, int il) { - const int64_t n_embd = hparams.n_embd; - const int64_t n_head = hparams.n_head; - const int64_t n_head_kv = hparams.n_head_kv; - const int64_t n_embd_head = hparams.n_embd_head(); - const int64_t n_embd_gqa = hparams.n_embd_gqa(); + const int64_t n_head = hparams.n_head; + const int64_t n_head_kv = hparams.n_head_kv; + const int64_t n_embd_head_k = hparams.n_embd_head_k; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_head_v = hparams.n_embd_head_v; struct ggml_tensor * q = ggml_permute(ctx, q_cur, 0, 2, 1, 3); cb(q, "q", il); struct ggml_tensor * k = ggml_view_3d(ctx, kv.k_l[il], - n_embd_head, n_kv, n_head_kv, - ggml_row_size(kv.k_l[il]->type, n_embd_gqa), - ggml_row_size(kv.k_l[il]->type, n_embd_head), + n_embd_head_k, n_kv, n_head_kv, + ggml_row_size(kv.k_l[il]->type, n_embd_k_gqa), + ggml_row_size(kv.k_l[il]->type, n_embd_head_k), 0); cb(k, "k", il); @@ -4278,9 +4333,9 @@ static struct ggml_tensor * llm_build_kqv( // split cached v into n_head heads struct ggml_tensor * v = ggml_view_3d(ctx, kv.v_l[il], - n_kv, n_embd_head, n_head_kv, + n_kv, n_embd_head_v, n_head_kv, ggml_element_size(kv.v_l[il])*n_ctx, - ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head, + ggml_element_size(kv.v_l[il])*n_ctx*n_embd_head_v, 0); cb(v, "v", il); @@ -4290,7 +4345,7 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * kqv_merged = ggml_permute(ctx, kqv, 0, 2, 1, 3); cb(kqv_merged, "kqv_merged", il); - struct ggml_tensor * cur = ggml_cont_2d(ctx, kqv_merged, n_embd, n_tokens); + struct ggml_tensor * cur = ggml_cont_2d(ctx, kqv_merged, n_embd_head_k*n_head, n_tokens); cb(cur, "kqv_merged_cont", il); cur = ggml_mul_mat(ctx, wo, cur); @@ -4317,8 +4372,10 @@ struct llm_build_context { const int64_t n_ctx; // user-specified context size (can be different from n_ctx_train) const int64_t n_head; const int64_t n_head_kv; - const int64_t n_embd_head; - const int64_t n_embd_gqa; + const int64_t n_embd_head_k; + const int64_t n_embd_k_gqa; + const int64_t n_embd_head_v; + const int64_t n_embd_v_gqa; const int64_t n_expert; const int64_t n_expert_used; @@ -4360,8 +4417,10 @@ struct llm_build_context { n_ctx (cparams.n_ctx), n_head (hparams.n_head), n_head_kv (hparams.n_head_kv), - n_embd_head (hparams.n_embd_head()), - n_embd_gqa (hparams.n_embd_gqa()), + n_embd_head_k (hparams.n_embd_head_k), + n_embd_k_gqa (hparams.n_embd_k_gqa()), + n_embd_head_v (hparams.n_embd_head_v), + n_embd_v_gqa (hparams.n_embd_v_gqa()), n_expert (hparams.n_expert), n_expert_used (hparams.n_expert_used), freq_base (cparams.rope_freq_base), @@ -4404,6 +4463,8 @@ struct llm_build_context { struct ggml_cgraph * build_llama() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; @@ -4588,6 +4649,9 @@ struct llm_build_context { struct ggml_cgraph * build_baichuan() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4705,6 +4769,11 @@ struct llm_build_context { struct ggml_cgraph * build_falcon() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4824,6 +4893,11 @@ struct llm_build_context { struct ggml_cgraph * build_starcoder() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * pos; struct ggml_tensor * inpL; @@ -4920,7 +4994,12 @@ struct llm_build_context { struct ggml_cgraph * build_persimmon() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); - const int64_t n_rot = n_embd_head / 2; + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + + const int64_t n_rot = n_embd_head_k / 2; struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5129,6 +5208,11 @@ struct llm_build_context { struct ggml_cgraph * build_refact() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5217,6 +5301,11 @@ struct llm_build_context { struct ggml_cgraph * build_bloom() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5308,6 +5397,11 @@ struct llm_build_context { struct ggml_cgraph * build_mpt() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5403,6 +5497,9 @@ struct llm_build_context { struct ggml_cgraph * build_stablelm() { struct ggml_cgraph * gf = ggml_new_graph(ctx0); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5513,6 +5610,9 @@ struct llm_build_context { struct ggml_cgraph * build_qwen() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5624,6 +5724,11 @@ struct llm_build_context { struct ggml_cgraph * build_phi2() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * attn_norm_output; struct ggml_tensor * ffn_output; @@ -5736,6 +5841,9 @@ struct llm_build_context { struct ggml_cgraph * build_plamo() { struct ggml_cgraph * gf = ggml_new_graph(ctx0); + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5840,6 +5948,11 @@ struct llm_build_context { struct ggml_cgraph * build_gpt2() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_gqa == n_embd); + struct ggml_tensor * cur; struct ggml_tensor * pos; struct ggml_tensor * inpL; @@ -9627,8 +9740,8 @@ struct llama_context * llama_new_context_with_model( const ggml_type type_k = params.type_k; const ggml_type type_v = params.type_v; - GGML_ASSERT(hparams.n_embd_head() % ggml_blck_size(type_k) == 0); - GGML_ASSERT(hparams.n_embd_head() % ggml_blck_size(type_v) == 0); + GGML_ASSERT(hparams.n_embd_head_k % ggml_blck_size(type_k) == 0); + GGML_ASSERT(hparams.n_embd_head_v % ggml_blck_size(type_v) == 0); // reserve memory for context buffers if (!hparams.vocab_only) { @@ -10172,9 +10285,10 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto & hparams = ctx->model.hparams; const auto & cparams = ctx->cparams; - const auto n_layer = hparams.n_layer; - const auto n_embd = hparams.n_embd_gqa(); - const auto n_ctx = cparams.n_ctx; + const auto n_layer = hparams.n_layer; + const auto n_embd_k_gqa = hparams.n_embd_k_gqa(); + const auto n_embd_v_gqa = hparams.n_embd_v_gqa(); + const auto n_ctx = cparams.n_ctx; const size_t kv_buf_size = ggml_backend_buffer_get_size(kv_self.buf); const uint32_t kv_head = kv_self.head; @@ -10196,15 +10310,15 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat std::vector vout2d(n_layer); for (int il = 0; il < (int) n_layer; ++il) { - kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); + kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); + vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd, kv_head, - elt_size*n_embd, 0); + n_embd_k_gqa, kv_head, + elt_size*n_embd_k_gqa, 0); ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd, + kv_head, n_embd_v_gqa, elt_size*n_ctx, 0); ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d[il])); @@ -10311,9 +10425,10 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { const auto & hparams = ctx->model.hparams; const auto & cparams = ctx->cparams; - const int n_layer = hparams.n_layer; - const int n_embd = hparams.n_embd_gqa(); - const int n_ctx = cparams.n_ctx; + const int n_layer = hparams.n_layer; + const int n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int n_embd_v_gqa = hparams.n_embd_v_gqa(); + const int n_ctx = cparams.n_ctx; size_t kv_buf_size; uint32_t kv_head; @@ -10337,15 +10452,15 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { std::vector vin2d(n_layer); for (int il = 0; il < n_layer; ++il) { - kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd, kv_head); - vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd); + kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); + vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd, kv_head, - elt_size*n_embd, 0); + n_embd_k_gqa, kv_head, + elt_size*n_embd_k_gqa, 0); ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd, + kv_head, n_embd_v_gqa, elt_size*n_ctx, 0); ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d[il], k2d)); From 0040d42eeb237197054cc7790df5776eacfa608e Mon Sep 17 00:00:00 2001 From: Marcus Dunn <51931484+MarcusDunn@users.noreply.github.com> Date: Tue, 2 Jan 2024 06:15:16 -0800 Subject: [PATCH 349/859] llama : replace all API facing `int`'s with `int32_t` (#4577) * replaced all API facing `int`'s with `int32_t` * formatting and missed `int` in `llama_token_to_piece` --- llama.cpp | 50 +++++++++++++++++++++---------------------- llama.h | 63 +++++++++++++++++++++++++++---------------------------- 2 files changed, 56 insertions(+), 57 deletions(-) diff --git a/llama.cpp b/llama.cpp index 704464039..2e34cb395 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8030,7 +8030,7 @@ void llama_sample_softmax(struct llama_context * ctx, llama_token_data_array * c } } -void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * candidates, int k, size_t min_keep) { +void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * candidates, int32_t k, size_t min_keep) { const int64_t t_start_sample_us = ggml_time_us(); k = std::max(k, (int) min_keep); @@ -8390,7 +8390,7 @@ void llama_sample_classifier_free_guidance( } } -llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int m, float * mu) { +llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int32_t m, float * mu) { GGML_ASSERT(ctx); auto N = float(llama_n_vocab(llama_get_model(ctx))); @@ -9598,7 +9598,7 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { return result; } -int llama_max_devices(void) { +int32_t llama_max_devices(void) { return LLAMA_MAX_DEVICES; } @@ -9909,15 +9909,15 @@ enum llama_vocab_type llama_vocab_type(const struct llama_model * model) { return model->vocab.type; } -int llama_n_vocab(const struct llama_model * model) { +int32_t llama_n_vocab(const struct llama_model * model) { return model->vocab.id_to_token.size(); } -int llama_n_ctx_train(const struct llama_model * model) { +int32_t llama_n_ctx_train(const struct llama_model * model) { return model->hparams.n_ctx_train; } -int llama_n_embd(const struct llama_model * model) { +int32_t llama_n_embd(const struct llama_model * model) { return model->hparams.n_embd; } @@ -9925,7 +9925,7 @@ float llama_rope_freq_scale_train(const struct llama_model * model) { return model->hparams.rope_freq_scale_train; } -int llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size) { +int32_t llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size) { const auto & it = model->gguf_kv.find(key); if (it == model->gguf_kv.end()) { if (buf_size > 0) { @@ -9936,11 +9936,11 @@ int llama_model_meta_val_str(const struct llama_model * model, const char * key, return snprintf(buf, buf_size, "%s", it->second.c_str()); } -int llama_model_meta_count(const struct llama_model * model) { +int32_t llama_model_meta_count(const struct llama_model * model) { return (int)model->gguf_kv.size(); } -int llama_model_meta_key_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size) { +int32_t llama_model_meta_key_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size) { if (i < 0 || i >= (int)model->gguf_kv.size()) { if (buf_size > 0) { buf[0] = '\0'; @@ -9952,7 +9952,7 @@ int llama_model_meta_key_by_index(const struct llama_model * model, int i, char return snprintf(buf, buf_size, "%s", it->first.c_str()); } -int llama_model_meta_val_str_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size) { +int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int32_t i, char * buf, size_t buf_size) { if (i < 0 || i >= (int)model->gguf_kv.size()) { if (buf_size > 0) { buf[0] = '\0'; @@ -9964,7 +9964,7 @@ int llama_model_meta_val_str_by_index(const struct llama_model * model, int i, c return snprintf(buf, buf_size, "%s", it->second.c_str()); } -int llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { +int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { return snprintf(buf, buf_size, "%s %s %s", llama_model_arch_name(model->arch).c_str(), llama_model_type_name(model->type), @@ -9991,7 +9991,7 @@ struct ggml_tensor * llama_get_model_tensor(struct llama_model * model, const ch return ggml_get_tensor(model->ctx, name); } -int llama_model_quantize( +uint32_t llama_model_quantize( const char * fname_inp, const char * fname_out, const llama_model_quantize_params * params) { @@ -10004,7 +10004,7 @@ int llama_model_quantize( } } -int llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, int n_threads) { +int32_t llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, int32_t n_threads) { try { return llama_apply_lora_from_file_internal(ctx->model, path_lora, scale, path_base_model, n_threads); } catch (const std::exception & err) { @@ -10013,7 +10013,7 @@ int llama_apply_lora_from_file(struct llama_context * ctx, const char * path_lor } } -int llama_model_apply_lora_from_file(const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, int n_threads) { +int32_t llama_model_apply_lora_from_file(const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, int32_t n_threads) { try { return llama_apply_lora_from_file_internal(*model, path_lora, scale, path_base_model, n_threads); } catch (const std::exception & err) { @@ -10111,7 +10111,7 @@ void llama_kv_cache_view_update(const struct llama_context * ctx, struct llama_k } } -int llama_get_kv_cache_token_count(const struct llama_context * ctx) { +int32_t llama_get_kv_cache_token_count(const struct llama_context * ctx) { int result = 0; for (uint32_t i = 0; i < ctx->kv_self.size; i++) { @@ -10121,7 +10121,7 @@ int llama_get_kv_cache_token_count(const struct llama_context * ctx) { return result; } -int llama_get_kv_cache_used_cells(const struct llama_context * ctx) { +int32_t llama_get_kv_cache_used_cells(const struct llama_context * ctx) { return ctx->kv_self.used; } @@ -10603,7 +10603,7 @@ int llama_eval( struct llama_context * ctx, llama_token * tokens, int32_t n_tokens, - int n_past) { + int32_t n_past) { llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); const int ret = llama_decode_internal(*ctx, llama_batch_get_one(tokens, n_tokens, n_past, 0)); @@ -10618,7 +10618,7 @@ int llama_eval_embd( struct llama_context * ctx, float * embd, int32_t n_tokens, - int n_past) { + int32_t n_past) { llama_kv_cache_seq_rm(ctx->kv_self, -1, n_past, -1); llama_batch batch = { n_tokens, nullptr, embd, nullptr, nullptr, nullptr, nullptr, n_past, 1, 0, }; @@ -10689,7 +10689,7 @@ void llama_batch_free(struct llama_batch batch) { if (batch.logits) free(batch.logits); } -int llama_decode( +int32_t llama_decode( struct llama_context * ctx, struct llama_batch batch) { const int ret = llama_decode_internal(*ctx, batch); @@ -10737,11 +10737,11 @@ llama_token llama_token_nl(const struct llama_model * model) { return model->vocab.linefeed_id; } -int llama_add_bos_token(const struct llama_model * model) { +int32_t llama_add_bos_token(const struct llama_model * model) { return model->vocab.special_add_bos; } -int llama_add_eos_token(const struct llama_model * model) { +int32_t llama_add_eos_token(const struct llama_model * model) { return model->vocab.special_add_eos; } @@ -10761,12 +10761,12 @@ llama_token llama_token_eot(const struct llama_model * model) { return model->vocab.special_eot_id; } -int llama_tokenize( +int32_t llama_tokenize( const struct llama_model * model, const char * text, - int text_len, + int32_t text_len, llama_token * tokens, - int n_max_tokens, + int32_t n_max_tokens, bool add_bos, bool special) { auto res = llama_tokenize_internal(model->vocab, std::string(text, text_len), add_bos, special); @@ -10794,7 +10794,7 @@ static std::string llama_decode_text(const std::string & text) { } // does not write null-terminator to buf -int llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int length) { +int32_t llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int32_t length) { if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { case LLAMA_VOCAB_TYPE_SPM: { diff --git a/llama.h b/llama.h index af76bae2d..461d4604a 100644 --- a/llama.h +++ b/llama.h @@ -226,7 +226,7 @@ extern "C" { // model quantization parameters typedef struct llama_model_quantize_params { - int nthread; // number of threads to use for quantizing, if <=0 will use std::thread::hardware_concurrency() + int32_t nthread; // number of threads to use for quantizing, if <=0 will use std::thread::hardware_concurrency() enum llama_ftype ftype; // quantize to this llama_ftype bool allow_requantize; // allow quantizing non-f32/f16 tensors bool quantize_output_tensor; // quantize output.weight @@ -310,21 +310,20 @@ extern "C" { LLAMA_API int64_t llama_time_us(void); - LLAMA_API int llama_max_devices (void); + LLAMA_API int32_t llama_max_devices(void); LLAMA_API bool llama_mmap_supported (void); LLAMA_API bool llama_mlock_supported(void); LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); - // TODO: become more consistent with returned int types across the API LLAMA_API uint32_t llama_n_ctx (const struct llama_context * ctx); LLAMA_API uint32_t llama_n_batch (const struct llama_context * ctx); LLAMA_API enum llama_vocab_type llama_vocab_type(const struct llama_model * model); - LLAMA_API int llama_n_vocab (const struct llama_model * model); - LLAMA_API int llama_n_ctx_train(const struct llama_model * model); - LLAMA_API int llama_n_embd (const struct llama_model * model); + LLAMA_API int32_t llama_n_vocab (const struct llama_model * model); + LLAMA_API int32_t llama_n_ctx_train(const struct llama_model * model); + LLAMA_API int32_t llama_n_embd (const struct llama_model * model); // Get the model's RoPE frequency scaling factor LLAMA_API float llama_rope_freq_scale_train(const struct llama_model * model); @@ -335,19 +334,19 @@ extern "C" { // - GGUF array values are not supported by these functions // Get metadata value as a string by key name - LLAMA_API int llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_meta_val_str(const struct llama_model * model, const char * key, char * buf, size_t buf_size); // Get the number of metadata key/value pairs - LLAMA_API int llama_model_meta_count(const struct llama_model * model); + LLAMA_API int32_t llama_model_meta_count(const struct llama_model * model); // Get metadata key name by index - LLAMA_API int llama_model_meta_key_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_meta_key_by_index(const struct llama_model * model, int32_t i, char * buf, size_t buf_size); // Get metadata value as a string by index - LLAMA_API int llama_model_meta_val_str_by_index(const struct llama_model * model, int i, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int32_t i, char * buf, size_t buf_size); // Get a string describing the model type - LLAMA_API int llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size); + LLAMA_API int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size); // Returns the total size of all the tensors in the model in bytes LLAMA_API uint64_t llama_model_size(const struct llama_model * model); @@ -359,7 +358,7 @@ extern "C" { LLAMA_API struct ggml_tensor * llama_get_model_tensor(struct llama_model * model, const char * name); // Returns 0 on success - LLAMA_API int llama_model_quantize( + LLAMA_API uint32_t llama_model_quantize( const char * fname_inp, const char * fname_out, const llama_model_quantize_params * params); @@ -370,20 +369,20 @@ extern "C" { // The model needs to be reloaded before applying a new adapter, otherwise the adapter // will be applied on top of the previous one // Returns 0 on success - LLAMA_API DEPRECATED(int llama_apply_lora_from_file( + LLAMA_API DEPRECATED(int32_t llama_apply_lora_from_file( struct llama_context * ctx, const char * path_lora, float scale, const char * path_base_model, - int n_threads), + int32_t n_threads), "use llama_model_apply_lora_from_file instead"); - LLAMA_API int llama_model_apply_lora_from_file( + LLAMA_API int32_t llama_model_apply_lora_from_file( const struct llama_model * model, const char * path_lora, float scale, const char * path_base_model, - int n_threads); + int32_t n_threads); // // KV cache @@ -439,10 +438,10 @@ extern "C" { // Returns the number of tokens in the KV cache (slow, use only for debug) // If a KV cell has multiple sequences assigned to it, it will be counted multiple times - LLAMA_API int llama_get_kv_cache_token_count(const struct llama_context * ctx); + LLAMA_API int32_t llama_get_kv_cache_token_count(const struct llama_context * ctx); // Returns the number of used KV cells (i.e. have at least one sequence assigned to them) - LLAMA_API int llama_get_kv_cache_used_cells(const struct llama_context * ctx); + LLAMA_API int32_t llama_get_kv_cache_used_cells(const struct llama_context * ctx); // Clear the KV cache LLAMA_API void llama_kv_cache_clear( @@ -533,7 +532,7 @@ extern "C" { struct llama_context * ctx, llama_token * tokens, int32_t n_tokens, - int n_past), + int32_t n_past), "use llama_decode() instead"); // Same as llama_eval, but use float matrix input directly. @@ -542,7 +541,7 @@ extern "C" { struct llama_context * ctx, float * embd, int32_t n_tokens, - int n_past), + int32_t n_past), "use llama_decode() instead"); // Return batch for single sequence of tokens starting at pos_0 @@ -574,7 +573,7 @@ extern "C" { // 0 - success // 1 - could not find a KV slot for the batch (try reducing the size of the batch or increase the context) // < 0 - error - LLAMA_API int llama_decode( + LLAMA_API int32_t llama_decode( struct llama_context * ctx, struct llama_batch batch); @@ -614,10 +613,10 @@ extern "C" { LLAMA_API llama_token llama_token_nl (const struct llama_model * model); // next-line // Returns -1 if unknown, 1 for true or 0 for false. - LLAMA_API int llama_add_bos_token(const struct llama_model * model); + LLAMA_API int32_t llama_add_bos_token(const struct llama_model * model); // Returns -1 if unknown, 1 for true or 0 for false. - LLAMA_API int llama_add_eos_token(const struct llama_model * model); + LLAMA_API int32_t llama_add_eos_token(const struct llama_model * model); // codellama infill tokens LLAMA_API llama_token llama_token_prefix(const struct llama_model * model); // Beginning of infill prefix @@ -635,12 +634,12 @@ extern "C" { /// @return Returns a negative number on failure - the number of tokens that would have been returned /// @param special Allow tokenizing special and/or control tokens which otherwise are not exposed and treated as plaintext. /// Does not insert a leading space. - LLAMA_API int llama_tokenize( + LLAMA_API int32_t llama_tokenize( const struct llama_model * model, const char * text, - int text_len, + int32_t text_len, llama_token * tokens, - int n_max_tokens, + int32_t n_max_tokens, bool add_bos, bool special); @@ -648,11 +647,11 @@ extern "C" { // Uses the vocabulary in the provided context. // Does not write null terminator to the buffer. // User code is responsible to remove the leading whitespace of the first non-BOS token when decoding multiple tokens. - LLAMA_API int llama_token_to_piece( + LLAMA_API int32_t llama_token_to_piece( const struct llama_model * model, llama_token token, char * buf, - int length); + int32_t length); // // Grammar @@ -704,7 +703,7 @@ extern "C" { LLAMA_API void llama_sample_top_k( struct llama_context * ctx, llama_token_data_array * candidates, - int k, + int32_t k, size_t min_keep); /// @details Nucleus sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751 @@ -763,7 +762,7 @@ extern "C" { llama_token_data_array * candidates, float tau, float eta, - int m, + int32_t m, float * mu); /// @details Mirostat 2.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words. @@ -836,8 +835,8 @@ extern "C" { llama_beam_search_callback_fn_t callback, void * callback_data, size_t n_beams, - int n_past, - int n_predict); + int32_t n_past, + int32_t n_predict); // Performance information LLAMA_API struct llama_timings llama_get_timings(struct llama_context * ctx); From 540938f8904707dd74cb3be18495f853b312e72f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 16:26:45 +0200 Subject: [PATCH 350/859] llama : llama_model_desc print number of experts --- llama.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 2e34cb395..3bb056dba 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9965,8 +9965,9 @@ int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int3 } int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { - return snprintf(buf, buf_size, "%s %s %s", + return snprintf(buf, buf_size, "%s %s%s %s", llama_model_arch_name(model->arch).c_str(), + model->hparams.n_expert > 0 ? (std::to_string(model->hparams.n_expert) + "x").c_str() : "", llama_model_type_name(model->type), llama_model_ftype_name(model->ftype).c_str()); } From 0ef3ca2ac62016c0c545de1c89dc2e3e130f4a99 Mon Sep 17 00:00:00 2001 From: Phil H <5756783+phiharri@users.noreply.github.com> Date: Tue, 2 Jan 2024 15:48:49 +0000 Subject: [PATCH 351/859] server : add token counts to html footer (#4738) * server: add token counts to stats * server: generate hpp --------- Co-authored-by: phiharri --- examples/server/completion.js.hpp | 693 ++--- examples/server/index.html.hpp | 4591 +++++++++++++++-------------- examples/server/index.js.hpp | 3693 +++++++++++------------ examples/server/public/index.html | 4 +- 4 files changed, 4529 insertions(+), 4452 deletions(-) diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp index f0a071a69..fe5f81228 100644 --- a/examples/server/completion.js.hpp +++ b/examples/server/completion.js.hpp @@ -74,355 +74,376 @@ unsigned char completion_js[] = { 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x27, 0x41, 0x63, 0x63, 0x65, 0x70, 0x74, 0x27, 0x3a, 0x20, 0x27, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, - 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, - 0x79, 0x2e, 0x67, 0x65, 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, - 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, - 0x20, 0x54, 0x65, 0x78, 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, - 0x28, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, - 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, - 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, - 0x20, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x70, 0x61, 0x72, 0x74, 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, - 0x61, 0x64, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, - 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, - 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, - 0x65, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, - 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, - 0x64, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, - 0x6e, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x41, 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, - 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, - 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, - 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, - 0x64, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x61, 0x72, 0x61, 0x63, 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, + 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x20, + 0x3f, 0x20, 0x7b, 0x27, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x60, 0x42, 0x65, 0x61, + 0x72, 0x65, 0x72, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x7d, 0x60, 0x7d, 0x20, + 0x3a, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x3a, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, 0x20, 0x20, 0x7d, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, 0x79, 0x2e, 0x67, 0x65, + 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x54, 0x65, 0x78, + 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x42, 0x75, 0x66, + 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x61, 0x72, 0x74, + 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, 0x61, 0x64, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, 0x6e, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x41, + 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x65, 0x66, + 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, 0x64, 0x65, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x72, 0x61, 0x63, + 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x73, + 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, 0x65, 0x61, + 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x65, 0x6e, 0x64, + 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, + 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x49, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x64, + 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x20, 0x77, + 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, + 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, 0x65, 0x6e, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, 0x65, + 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, 0x20, 0x69, 0x6e, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x20, + 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, 0x20, 0x63, 0x68, 0x75, + 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x65, 0x6e, + 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, + 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, + 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, 0x65, 0x20, 0x61, 0x20, + 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x61, + 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, 0x65, 0x20, 0x61, 0x6c, + 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, 0x20, 0x74, 0x68, 0x65, + 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, - 0x42, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x2e, 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, - 0x6e, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x53, 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x65, 0x74, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, - 0x65, 0x78, 0x74, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, - 0x6e, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x49, 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, - 0x78, 0x74, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, - 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, - 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, - 0x65, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, - 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, - 0x20, 0x69, 0x6e, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, - 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, - 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x65, 0x6e, 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, - 0x6e, 0x65, 0x42, 0x72, 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, - 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, - 0x65, 0x72, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, - 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, - 0x65, 0x72, 0x20, 0x69, 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, - 0x65, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, - 0x61, 0x6b, 0x20, 0x61, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, - 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, - 0x65, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, - 0x20, 0x74, 0x68, 0x65, 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, - 0x2f, 0x5e, 0x28, 0x5c, 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, - 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, - 0x68, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, - 0x65, 0x63, 0x28, 0x6c, 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, - 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, - 0x6d, 0x61, 0x74, 0x63, 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, - 0x6e, 0x63, 0x65, 0x20, 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x2e, 0x63, 0x70, 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, - 0x20, 0x6a, 0x75, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, - 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, - 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, - 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, 0x2f, 0x5e, 0x28, 0x5c, + 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, + 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x28, 0x6c, + 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x20, + 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x6a, 0x75, 0x73, + 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x79, 0x69, + 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x66, + 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, 0x61, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x66, 0x72, 0x6f, + 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2c, 0x20, 0x77, 0x65, + 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, + 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, + 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, - 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, - 0x64, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x69, 0x66, 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, - 0x61, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x2c, 0x20, 0x77, 0x65, 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x3d, + 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, + 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x24, + 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x60, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, + 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, + 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, + 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, + 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, + 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, + 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, + 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, + 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, + 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, - 0x72, 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, - 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, - 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, - 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, - 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, - 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x73, 0x75, 0x62, 0x63, - 0x72, 0x69, 0x62, 0x65, 0x20, 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, - 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, - 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, - 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, - 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, + 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, + 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, + 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, + 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, + 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, + 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, + 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, + 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, + 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, - 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, - 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, + 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, + 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, - 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, - 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, - 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, - 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, - 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x22, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, - 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, - 0x6e, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x3a, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, - 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, - 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x61, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, - 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, - 0x65, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, - 0x6e, 0x6f, 0x74, 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, - 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, - 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, - 0x28, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, - 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, - 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, - 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, - 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, - 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, - 0x6d, 0x69, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, - 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, - 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x28, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, - 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, - 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, - 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x20, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, - 0x2f, 0x2a, 0x2a, 0x0a, 0x20, 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, - 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, - 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x65, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, - 0x62, 0x61, 0x63, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, - 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, - 0x61, 0x63, 0x6b, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, - 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, - 0x69, 0x6e, 0x66, 0x6f, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, - 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, - 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, - 0x73, 0x6f, 0x20, 0x6f, 0x6e, 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, + 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, + 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, - 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, - 0x77, 0x61, 0x69, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, - 0x2f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, - 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, - 0x72, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a + 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, + 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, + 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, + 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, + 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x29, 0x2e, 0x74, 0x68, 0x65, + 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, + 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a }; -unsigned int completion_js_len = 5099; +unsigned int completion_js_len = 5346; diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index f22b77e7f..20551520e 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -383,2380 +383,2409 @@ unsigned char index_html[] = { 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x39, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, - 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, + 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, + 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, + 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, - 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, - 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, - 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, - 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, - 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, - 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, - 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, - 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, - 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, - 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, - 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, - 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, - 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, - 0x69, 0x6e, 0x20, 0x62, 0x6f, 0x72, 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, - 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, - 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, - 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, - 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, - 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, - 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, - 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, - 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, + 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, + 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, + 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, + 0x79, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, + 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, + 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, + 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, - 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, - 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, - 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, - 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, - 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, - 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, - 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, - 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, - 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, - 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, - 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, - 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, - 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, - 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, - 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, - 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, - 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, - 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, - 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, - 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, - 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, - 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, - 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, - 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, - 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, - 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, - 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, - 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, - 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, - 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, - 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, - 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, - 0x7a, 0x69, 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, - 0x76, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, - 0x22, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, - 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, - 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, - 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, + 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, + 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, + 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, + 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, + 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, + 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, + 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, + 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, + 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, + 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, + 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, + 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, + 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, + 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, + 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, + 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, + 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x6c, 0x79, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, + 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, + 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x5b, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, + 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, + 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, - 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, - 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, - 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, - 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, - 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, - 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, - 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, - 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, - 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, - 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, - 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, - 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, - 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, - 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, - 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, - 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, - 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, + 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, + 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, + 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, + 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, + 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, - 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, - 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, - 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, - 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, - 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, - 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, - 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, - 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, + 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, + 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, + 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, + 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, + 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, + 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, + 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, + 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, + 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, + 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, - 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, - 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, - 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, - 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, - 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, - 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, - 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, - 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, - 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, - 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, - 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, - 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, - 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, - 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, - 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, - 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, - 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, - 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, - 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, - 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, - 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, - 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, - 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, - 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, - 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, - 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, - 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, - 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, - 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, - 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, - 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, - 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, - 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, - 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, - 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, - 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, - 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, - 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, - 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, - 0x5f, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, - 0x20, 0x21, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, - 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, - 0x72, 0x74, 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, - 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, - 0x6e, 0x27, 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, - 0x64, 0x2e, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, - 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, - 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, - 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, - 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, - 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, + 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, + 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, + 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, + 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, + 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, + 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, - 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, - 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, - 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, - 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, - 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, - 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, - 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, - 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, - 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, - 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, - 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, - 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, - 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, - 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, - 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, - 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, - 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, - 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, - 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, - 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, - 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, - 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, - 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, - 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, + 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, + 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, - 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, - 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, - 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, - 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, - 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, - 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, - 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, - 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, - 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, + 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, + 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, + 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, + 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, + 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, + 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, + 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, + 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, + 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, + 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, + 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, + 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, + 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, + 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, - 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, 0x61, 0x6c, - 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, - 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x20, - 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, - 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, - 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, - 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, - 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, - 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, - 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, - 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, - 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, - 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, - 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, - 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, + 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, + 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, - 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, + 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, - 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, - 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, - 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, - 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, - 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, - 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, - 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, - 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, - 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, - 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, - 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, - 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, - 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, - 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, - 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, - 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, + 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, - 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, - 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, - 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, - 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, - 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, - 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, + 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, + 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, - 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, - 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, - 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, - 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, - 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, - 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, - 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, - 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, - 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, - 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, - 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x21, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, 0x2f, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, - 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, - 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, - 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x63, 0x72, - 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, 0x74, 0x6f, - 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, 0x65, 0x64, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x3d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x70, 0x20, - 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, 0x66, 0x66, - 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x2b, 0x20, - 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, - 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, 0x2c, 0x20, - 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x20, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, - 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, - 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, - 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, 0x3e, 0x20, - 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, - 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, - 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, - 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, - 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, - 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2b, 0x2f, - 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x73, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, - 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, 0x72, 0x6b, - 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, 0x65, 0x78, - 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x75, 0x73, - 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, - 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, 0x74, 0x72, - 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, 0x3c, 0x2f, - 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x73, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, - 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x60, 0x20, - 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, - 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, - 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, - 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, - 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x69, - 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x68, - 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, 0x65, 0x79, - 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, 0x67, 0x20, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, - 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, 0x20, 0x60, - 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, 0x7b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, - 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x7d, 0x20, - 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x7d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, - 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, - 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, - 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, - 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x6e, - 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x73, - 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, 0x74, 0x68, - 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x29, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, - 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, - 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, - 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, - 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, - 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, - 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, - 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, - 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, - 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, - 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, - 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, - 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, - 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, - 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, - 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, - 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, - 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, - 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, - 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, - 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, - 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, - 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, - 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, - 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, - 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, - 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, - 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, - 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, - 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, + 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, + 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, + 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, + 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, + 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, + 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, + 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, + 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, + 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, + 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, + 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, + 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, + 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, + 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, + 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, + 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, + 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, + 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, + 0x28, 0x28, 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, + 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, + 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x20, 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, + 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, + 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, + 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, + 0x6e, 0x65, 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, + 0x2c, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, + 0x3d, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, + 0x6c, 0x65, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, + 0x3d, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, - 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, - 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, - 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, - 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, - 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, - 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, - 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, - 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, - 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, - 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, - 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, - 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x20, 0x20, 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, + 0x62, 0x61, 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, + 0x29, 0x2c, 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, + 0x44, 0x61, 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, + 0x20, 0x26, 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x68, 0x69, 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, + 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, + 0x22, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6f, 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, + 0x24, 0x7b, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, + 0x73, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, + 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, + 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, + 0x65, 0x7d, 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, + 0x61, 0x67, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, + 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x3d, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, - 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, - 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, - 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, + 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, + 0x3e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, + 0x7d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, + 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, + 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, + 0x74, 0x6f, 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, + 0x65, 0x64, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, + 0x70, 0x20, 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, + 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, + 0x2b, 0x20, 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, + 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, + 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, + 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, + 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, + 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, + 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x60, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, + 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, + 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, + 0x2f, 0x70, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, + 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, + 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, + 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, + 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, + 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, + 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, + 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, + 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, + 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, + 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, + 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, + 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, + 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, + 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, + 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, - 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, - 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, + 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, + 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, + 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, + 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, + 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, + 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, + 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, + 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, + 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, + 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, + 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, + 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, + 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, - 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, - 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, + 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, + 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, - 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, + 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, + 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, + 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, + 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, - 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, - 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, - 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, + 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, + 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, - 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, - 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, + 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, - 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, - 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, + 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, + 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, - 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, - 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, - 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, + 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, - 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, - 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, - 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, - 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, - 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, - 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, + 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, + 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, + 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, + 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, + 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, + 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, + 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, + 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, - 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, - 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, - 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, - 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, - 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, - 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, - 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, - 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, - 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, + 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, - 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, - 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, - 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x22, - 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, - 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, - 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, - 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, - 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, - 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, - 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, - 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, + 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, + 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, + 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, + 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, + 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, - 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, - 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, - 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, - 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, + 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, + 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, - 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, - 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, - 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, - 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, - 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, - 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, - 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, - 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, - 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, - 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, + 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, - 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, - 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, - 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, - 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, - 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, - 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, + 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, + 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, + 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, + 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, + 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, + 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, + 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, - 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, - 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x61, 0x70, 0x69, + 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x3e, 0x41, 0x50, 0x49, 0x20, 0x4b, 0x65, + 0x79, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x5f, + 0x6b, 0x65, 0x79, 0x7d, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x45, 0x6e, 0x74, 0x65, 0x72, + 0x20, 0x41, 0x50, 0x49, 0x20, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, - 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, - 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, - 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, - 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, - 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, - 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, - 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, - 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, - 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, - 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, - 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, - 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, - 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, - 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, - 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, - 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, - 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, - 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, - 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, - 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, - 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, - 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, - 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, - 0x2a, 0x20, 0x31, 0x30, 0x30, 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, - 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, - 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, - 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, - 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, - 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, - 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, - 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, - 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, - 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, - 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, - 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, - 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, - 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, - 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, - 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, - 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, - 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, - 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, - 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, - 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, - 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, - 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, - 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, - 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, - 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, - 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, - 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, - 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, - 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, - 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, - 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, - 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, - 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, - 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, - 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, - 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, - 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, - 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, - 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, - 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, - 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, - 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, - 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, - 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, - 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, - 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, - 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, - 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, - 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, - 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, - 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, - 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, - 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, - 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, - 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, + 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, + 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, + 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, + 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, + 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, + 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, + 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, + 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, - 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, - 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, - 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, - 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, - 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, - 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, - 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, - 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, + 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, + 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, + 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, + 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, + 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, + 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, + 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, - 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, - 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, - 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, - 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, - 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, - 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, - 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, - 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, - 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, - 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, - 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, - 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, - 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, - 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, - 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, - 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, - 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, - 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, + 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, + 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, + 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, + 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, + 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, + 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, + 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, + 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, + 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, + 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, + 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, + 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, - 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, - 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, - 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, - 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, - 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, - 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, - 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, - 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, - 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, - 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, - 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, - 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, - 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, - 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, + 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, + 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, + 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, + 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, + 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, + 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, + 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, + 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, + 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, + 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, + 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, + 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, + 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, + 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, + 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, + 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, + 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, + 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, + 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, + 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, + 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, + 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, + 0x64, 0x7d, 0x20, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, + 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x7d, 0x20, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x70, + 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, + 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, + 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, + 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, + 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, + 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, + 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, + 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, + 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, + 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, + 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, + 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, + 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, + 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, + 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, + 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, + 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, - 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, - 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, - 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, + 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, + 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, + 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, + 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, + 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, + 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, + 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, + 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, + 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, + 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, + 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, + 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, + 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, + 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, + 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, + 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, + 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, + 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, + 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, + 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, + 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, + 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, - 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, + 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, + 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, + 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, + 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, + 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, + 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, + 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, + 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, + 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, + 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, - 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, - 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, - 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, - 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, - 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, - 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, - 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, - 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, - 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, - 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, - 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, - 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, - 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, + 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, + 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, + 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, + 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, + 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, + 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, - 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, - 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, - 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, - 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, - 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, - 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, - 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, - 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, - 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, - 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, - 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, - 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, - 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, - 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, - 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, - 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, + 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, + 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, + 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, + 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, + 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, + 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, + 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, + 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, + 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, - 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, - 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, - 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, - 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, - 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, - 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, - 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, - 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, - 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, - 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, - 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, - 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, - 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, - 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, - 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, - 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a + 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, + 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, + 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, + 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, + 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, + 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, + 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, + 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, + 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a }; -unsigned int index_html_len = 33103; +unsigned int index_html_len = 33456; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index c9dc078b7..e09b3c8c5 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -2,1875 +2,1902 @@ unsigned char index_js[] = { 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, - 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x75, 0x3e, 0x31, 0x29, 0x7b, 0x75, 0x2d, 0x2d, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x74, - 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x5f, 0x29, 0x7b, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x77, 0x68, 0x69, - 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, - 0x2e, 0x6f, 0x3b, 0x69, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x69, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, - 0x28, 0x21, 0x28, 0x38, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x26, 0x26, 0x61, - 0x28, 0x69, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x28, - 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, 0x3d, - 0x21, 0x30, 0x7d, 0x7d, 0x69, 0x3d, 0x5f, 0x7d, 0x7d, 0x66, 0x3d, 0x30, - 0x3b, 0x75, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x75, - 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, - 0x29, 0x3b, 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, + 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x53, 0x79, 0x6d, 0x62, 0x6f, + 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, 0x63, + 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x29, 0x7b, 0x66, 0x2d, 0x2d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, + 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6f, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x73, 0x2b, 0x2b, 0x3b, 0x77, 0x68, + 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x3d, + 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, + 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, 0x26, + 0x70, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, + 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x73, 0x3d, + 0x30, 0x3b, 0x66, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x66, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x28, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x30, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x28, 0x74, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, - 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x6c, 0x65, - 0x74, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x28, 0x74, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6f, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x6f, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, - 0x6c, 0x6c, 0x79, 0x7b, 0x6f, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, - 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x75, 0x3d, 0x30, 0x2c, 0x66, 0x3d, 0x30, - 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x73, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x69, - 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, 0x74, - 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, 0x69, - 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, - 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, - 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, 0x73, - 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, - 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, - 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, 0x2e, - 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, 0x2e, - 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, 0x73, - 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, - 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3d, 0x74, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, - 0x29, 0x7b, 0x6e, 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, - 0x65, 0x3d, 0x6e, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, - 0x65, 0x7d, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, - 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2c, 0x69, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, - 0x2d, 0x33, 0x33, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, - 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x69, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, + 0x7d, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x66, 0x3d, 0x30, 0x2c, 0x73, 0x3d, + 0x30, 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, + 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, + 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, + 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, + 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, + 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, + 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, + 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, + 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, + 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, + 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x68, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, + 0x61, 0x6e, 0x64, 0x3d, 0x6e, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, - 0x4f, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, - 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, - 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x79, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, - 0x65, 0x74, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, - 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x2c, 0x73, 0x65, 0x74, 0x28, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, - 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, - 0x76, 0x29, 0x21, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, - 0x74, 0x65, 0x64, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, - 0x61, 0x76, 0x65, 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, - 0x65, 0x63, 0x74, 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, - 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x65, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, - 0x75, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, - 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, - 0x29, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x6e, 0x28, - 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x28, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61, 0x28, - 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, - 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, - 0x28, 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, - 0x7c, 0x7c, 0x21, 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, - 0x6e, 0x2e, 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, - 0x2e, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, - 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, - 0x65, 0x3b, 0x6e, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, - 0x69, 0x3d, 0x2d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, - 0x73, 0x3d, 0x6e, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x64, 0x28, 0x74, - 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, - 0x73, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x74, 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, - 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, - 0x2e, 0x55, 0x28, 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, - 0x65, 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, - 0x70, 0x3d, 0x74, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, - 0x3b, 0x65, 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, - 0x2e, 0x72, 0x29, 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x65, 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, - 0x29, 0x7b, 0x63, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x3d, 0x34, 0x7d, 0x28, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x63, 0x29, - 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, - 0x3b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, - 0x28, 0x33, 0x32, 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, - 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, - 0x3d, 0x6c, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x61, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x7d, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, - 0x74, 0x72, 0x79, 0x7b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, - 0x69, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, - 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x7c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, - 0x7c, 0x7c, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, - 0x36, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, - 0x3d, 0x74, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x3d, 0x33, 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, - 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, - 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x53, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x29, 0x7d, 0x3b, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, + 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, + 0x74, 0x7d, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x55, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2c, 0x74, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, - 0x2e, 0x53, 0x2e, 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x76, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, - 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, - 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x76, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, - 0x28, 0x29, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, - 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, - 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x76, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x73, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x68, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, - 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, - 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x7b, 0x75, 0x2b, - 0x2b, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, - 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, - 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, - 0x29, 0x7b, 0x74, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, - 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x67, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, - 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, - 0x79, 0x7b, 0x69, 0x3d, 0x5f, 0x3b, 0x6e, 0x28, 0x29, 0x7d, 0x7d, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x67, 0x28, 0x74, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, - 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, - 0x2e, 0x55, 0x28, 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x6d, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x69, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, + 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, + 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, + 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x77, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x5f, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x5f, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, + 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, + 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, + 0x74, 0x28, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, + 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x79, 0x29, 0x21, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x28, 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, - 0x72, 0x64, 0x65, 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, - 0x29, 0x3b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, - 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, - 0x3b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x6e, 0x28, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6b, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x3d, 0x33, 0x32, 0x7d, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, - 0x79, 0x7b, 0x69, 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x78, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x6e, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, - 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, - 0x7d, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x26, 0x3d, 0x2d, 0x39, 0x3b, 0x6d, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x75, 0x2b, 0x2b, - 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x62, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x6e, 0x29, 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x6f, 0x3d, 0x5f, 0x3b, 0x5f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, - 0x7d, 0x3b, 0x6b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, - 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x29, 0x29, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, - 0x65, 0x77, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, - 0x6e, 0x2e, 0x63, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, - 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x2e, 0x64, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, - 0x76, 0x61, 0x72, 0x20, 0x78, 0x2c, 0x77, 0x2c, 0x43, 0x2c, 0x45, 0x2c, - 0x55, 0x2c, 0x48, 0x2c, 0x4e, 0x2c, 0x50, 0x2c, 0x24, 0x2c, 0x44, 0x3d, - 0x7b, 0x7d, 0x2c, 0x54, 0x3d, 0x5b, 0x5d, 0x2c, 0x56, 0x3d, 0x2f, 0x61, - 0x63, 0x69, 0x74, 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, - 0x7c, 0x6e, 0x7c, 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, - 0x67, 0x72, 0x69, 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, - 0x7c, 0x6e, 0x74, 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, - 0x7c, 0x7a, 0x6f, 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, - 0x65, 0x72, 0x61, 0x2f, 0x69, 0x2c, 0x41, 0x3d, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, - 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, - 0x65, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, - 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, - 0x6e, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x57, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, - 0x7d, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, - 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, 0x3d, - 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, - 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, - 0x5d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, - 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, - 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3d, 0x3d, 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, - 0x5b, 0x6f, 0x5d, 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, - 0x69, 0x2c, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x7b, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, - 0x72, 0x65, 0x66, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, - 0x5f, 0x62, 0x3a, 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x5f, 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, - 0x68, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, 0x61, 0x76, 0x65, + 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x73, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x6e, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, 0x66, 0x2b, 0x2b, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, + 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, + 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x2e, + 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x7c, 0x7c, 0x21, + 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, 0x6e, 0x2e, 0x53, + 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x64, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, + 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x53, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, 0x65, 0x3b, 0x6e, + 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, 0x69, 0x3d, 0x2d, + 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x73, 0x3d, 0x6e, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x6c, + 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, + 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, + 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x6e, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x74, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, 0x3b, 0x65, 0x2e, + 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x72, 0x29, + 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, + 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, + 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, + 0x34, 0x7d, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x29, 0x2e, 0x68, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, + 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, 0x3b, 0x69, 0x66, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, 0x3d, 0x6c, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, 0x74, 0x72, 0x79, + 0x7b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x7c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x30, + 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x36, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, 0x3d, 0x74, 0x3b, + 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x33, + 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, + 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x29, 0x7d, + 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x66, 0x6f, + 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, + 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x36, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, + 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x29, + 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, + 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, + 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, + 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x79, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x67, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x7b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x28, + 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, + 0x38, 0x3b, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, + 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x69, + 0x3d, 0x5f, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, + 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, + 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x67, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x76, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, + 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x62, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, + 0x7d, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, + 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, + 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, + 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x39, 0x3b, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x64, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x7d, 0x3b, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, + 0x6f, 0x3b, 0x6f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x3b, 0x53, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x69, 0x66, + 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, + 0x29, 0x62, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x77, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, + 0x53, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x64, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, 0x61, 0x72, + 0x20, 0x78, 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, + 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, 0x2c, 0x54, 0x3d, 0x7b, 0x7d, 0x2c, + 0x56, 0x3d, 0x5b, 0x5d, 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, + 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, + 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, + 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, + 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, + 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, + 0x2f, 0x69, 0x2c, 0x46, 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, + 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, + 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, + 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, + 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, + 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, + 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, + 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, + 0x3a, 0x5f, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, + 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, + 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, + 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x5f, 0x3f, 0x2b, 0x2b, 0x43, 0x3a, 0x5f, 0x7d, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x5f, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x77, 0x2e, 0x76, 0x6e, 0x6f, - 0x64, 0x65, 0x26, 0x26, 0x77, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, - 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4c, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x52, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x6a, 0x28, - 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x2e, 0x5f, 0x5f, - 0x6b, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, - 0x2b, 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, - 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, + 0x69, 0x3f, 0x2b, 0x2b, 0x45, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, + 0x2d, 0x31, 0x2c, 0x5f, 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, + 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, 0x76, 0x6e, + 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, + 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x71, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, + 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, + 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, + 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x71, 0x28, 0x74, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x6a, 0x28, - 0x74, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, - 0x5f, 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, - 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x28, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x29, 0x7b, 0x28, - 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x55, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x47, 0x2e, 0x5f, 0x5f, - 0x72, 0x2b, 0x2b, 0x7c, 0x7c, 0x48, 0x21, 0x3d, 0x3d, 0x77, 0x2e, 0x64, - 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x48, 0x3d, 0x77, - 0x2e, 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, - 0x47, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x47, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, - 0x28, 0x50, 0x29, 0x3b, 0x74, 0x3d, 0x55, 0x2e, 0x73, 0x68, 0x69, 0x66, - 0x74, 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, - 0x28, 0x6e, 0x3d, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, - 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x75, 0x3d, 0x28, 0x72, 0x3d, 0x28, 0x65, 0x3d, 0x74, - 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x28, - 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x6f, 0x3d, - 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x72, 0x29, 0x29, 0x2e, 0x5f, 0x5f, 0x76, - 0x3d, 0x72, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, 0x31, 0x2c, 0x69, 0x74, 0x28, - 0x66, 0x2c, 0x72, 0x2c, 0x6f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, 0x2c, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x66, 0x2e, 0x6f, - 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x72, 0x2e, 0x5f, - 0x5f, 0x68, 0x3f, 0x5b, 0x75, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x75, 0x3f, 0x6a, 0x28, - 0x72, 0x29, 0x3a, 0x75, 0x2c, 0x72, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x5f, - 0x29, 0x2c, 0x5f, 0x74, 0x28, 0x69, 0x2c, 0x72, 0x2c, 0x5f, 0x29, 0x2c, - 0x72, 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x75, 0x26, 0x26, 0x42, 0x28, - 0x72, 0x29, 0x29, 0x2c, 0x55, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3e, 0x6e, 0x26, 0x26, 0x55, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x50, - 0x29, 0x29, 0x3b, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, - 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, - 0x2c, 0x79, 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x3d, 0x30, - 0x2c, 0x53, 0x3d, 0x69, 0x26, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, - 0x7c, 0x54, 0x2c, 0x78, 0x3d, 0x53, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x2c, 0x77, 0x3d, 0x78, 0x2c, 0x43, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, - 0x43, 0x3b, 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x28, 0x70, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x3d, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x70, 0x3d, 0x6e, 0x5b, 0x63, - 0x5d, 0x29, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, - 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x3f, 0x6e, 0x75, - 0x6c, 0x6c, 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, - 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x70, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, - 0x6e, 0x74, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x70, 0x3f, 0x4f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, 0x3a, - 0x41, 0x28, 0x70, 0x29, 0x3f, 0x4f, 0x28, 0x52, 0x2c, 0x7b, 0x63, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x70, 0x7d, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x3a, 0x70, 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, - 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x70, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2c, 0x70, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x70, 0x2e, 0x72, - 0x65, 0x66, 0x3f, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x70, 0x29, 0x26, - 0x26, 0x28, 0x70, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2c, 0x70, 0x2e, 0x5f, - 0x5f, 0x62, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x2d, - 0x31, 0x3d, 0x3d, 0x3d, 0x28, 0x6d, 0x3d, 0x58, 0x28, 0x70, 0x2c, 0x53, - 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x77, 0x29, 0x29, 0x3f, 0x61, - 0x3d, 0x44, 0x3a, 0x28, 0x61, 0x3d, 0x53, 0x5b, 0x6d, 0x5d, 0x7c, 0x7c, - 0x44, 0x2c, 0x53, 0x5b, 0x6d, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x77, 0x2d, 0x2d, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x74, 0x2c, - 0x70, 0x2c, 0x61, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x2c, 0x6c, 0x2c, 0x73, 0x29, 0x2c, 0x64, 0x3d, 0x70, 0x2e, 0x5f, - 0x5f, 0x65, 0x2c, 0x28, 0x68, 0x3d, 0x70, 0x2e, 0x72, 0x65, 0x66, 0x29, - 0x26, 0x26, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x21, 0x3d, 0x68, 0x26, 0x26, - 0x28, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x61, - 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x70, 0x29, - 0x2c, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x68, 0x2c, 0x70, 0x2e, - 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x64, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x64, 0x26, 0x26, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x76, 0x26, 0x26, 0x28, 0x76, 0x3d, 0x64, 0x29, 0x2c, - 0x62, 0x3d, 0x21, 0x28, 0x67, 0x3d, 0x61, 0x3d, 0x3d, 0x3d, 0x44, 0x7c, - 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, - 0x76, 0x29, 0x26, 0x26, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x2c, 0x67, 0x3f, - 0x2d, 0x31, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x6b, 0x2d, 0x2d, 0x3a, 0x6d, - 0x21, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x28, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, - 0x2b, 0x31, 0x3f, 0x28, 0x6b, 0x2b, 0x2b, 0x2c, 0x62, 0x3d, 0x21, 0x30, - 0x29, 0x3a, 0x6d, 0x3e, 0x79, 0x3f, 0x77, 0x3e, 0x43, 0x2d, 0x79, 0x3f, - 0x28, 0x6b, 0x2b, 0x3d, 0x6d, 0x2d, 0x79, 0x2c, 0x62, 0x3d, 0x21, 0x30, - 0x29, 0x3a, 0x6b, 0x2d, 0x2d, 0x3a, 0x6b, 0x3d, 0x6d, 0x3c, 0x79, 0x26, - 0x26, 0x6d, 0x3d, 0x3d, 0x79, 0x2d, 0x31, 0x3f, 0x6d, 0x2d, 0x79, 0x3a, - 0x30, 0x29, 0x2c, 0x79, 0x3d, 0x63, 0x2b, 0x6b, 0x2c, 0x62, 0x3d, 0x62, - 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x63, 0x26, 0x26, 0x21, 0x67, 0x2c, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x7c, 0x7c, 0x6d, 0x3d, 0x3d, 0x3d, 0x79, 0x26, 0x26, 0x61, 0x2e, 0x5f, - 0x5f, 0x6b, 0x21, 0x3d, 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x22, + 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, + 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x48, 0x2e, 0x70, 0x75, 0x73, + 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, + 0x2b, 0x2b, 0x7c, 0x7c, 0x50, 0x21, 0x3d, 0x3d, 0x43, 0x2e, 0x64, 0x65, + 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x50, 0x3d, 0x43, 0x2e, + 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, 0x7a, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, + 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, + 0x24, 0x29, 0x3b, 0x74, 0x3d, 0x48, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, + 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, + 0x6e, 0x3d, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, + 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x66, 0x3d, 0x5b, + 0x5d, 0x2c, 0x28, 0x72, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, + 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, + 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, + 0x31, 0x2c, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, + 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, 0x5f, 0x74, + 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, + 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x2e, + 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, + 0x3f, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x71, 0x28, 0x69, 0x29, + 0x3a, 0x6f, 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x66, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, + 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, + 0x2c, 0x69, 0x74, 0x28, 0x75, 0x2c, 0x5f, 0x2c, 0x66, 0x29, 0x2c, 0x5f, + 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x42, 0x28, 0x5f, + 0x29, 0x29, 0x2c, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x6e, 0x26, 0x26, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x24, 0x29, + 0x29, 0x3b, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, + 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, + 0x5f, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, + 0x79, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x4b, + 0x28, 0x65, 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x66, 0x3d, 0x65, 0x2e, + 0x5f, 0x5f, 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, + 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, + 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, + 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, + 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x54, 0x3a, 0x76, + 0x5b, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x54, 0x2c, 0x61, + 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, + 0x61, 0x2c, 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, + 0x5f, 0x65, 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, + 0x72, 0x65, 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, + 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, + 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, + 0x2c, 0x6c, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, + 0x66, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, + 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, + 0x70, 0x29, 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, + 0x61, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x66, 0x3d, 0x51, 0x28, 0x61, 0x2c, + 0x66, 0x2c, 0x74, 0x29, 0x3a, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x66, + 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, 0x26, 0x26, 0x28, 0x66, + 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, + 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, + 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, 0x3b, 0x65, 0x2e, 0x5f, + 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x64, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, + 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x3d, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x73, 0x3d, 0x65, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x6c, 0x3d, 0x73, 0x2c, 0x63, + 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x66, 0x3b, + 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, + 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x7c, 0x7c, 0x62, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x28, 0x66, 0x3d, 0x70, 0x2e, - 0x5f, 0x5f, 0x64, 0x2c, 0x70, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x29, 0x3a, 0x66, 0x3d, 0x64, 0x2e, 0x6e, 0x65, - 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x3a, 0x66, 0x3d, - 0x51, 0x28, 0x74, 0x2c, 0x64, 0x2c, 0x66, 0x29, 0x3a, 0x66, 0x3d, 0x4a, - 0x28, 0x70, 0x2c, 0x66, 0x2c, 0x74, 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, - 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x29, 0x29, 0x29, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x76, 0x2c, 0x63, - 0x3d, 0x78, 0x3b, 0x63, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x26, 0x26, 0x28, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, - 0x5f, 0x65, 0x26, 0x26, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, - 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x64, 0x3d, 0x53, 0x5b, 0x63, 0x5d, 0x2e, 0x5f, 0x5f, 0x65, - 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, - 0x29, 0x2c, 0x75, 0x74, 0x28, 0x53, 0x5b, 0x63, 0x5d, 0x2c, 0x53, 0x5b, - 0x63, 0x5d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x30, 0x3b, 0x5f, 0x26, 0x26, - 0x6f, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, - 0x2b, 0x2b, 0x29, 0x28, 0x69, 0x3d, 0x5f, 0x5b, 0x6f, 0x5d, 0x29, 0x26, - 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, 0x3d, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x3f, 0x4a, 0x28, 0x69, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3a, 0x51, 0x28, - 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x29, 0x29, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x3d, 0x6e, 0x7c, - 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x7c, - 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x7c, 0x7c, 0x28, 0x41, - 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x4b, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x3a, 0x6e, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x65, 0x2e, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x21, 0x3d, 0x3d, - 0x74, 0x3f, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, 0x65, - 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, - 0x3a, 0x6e, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x6e, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x7c, 0x7c, 0x74, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x74, 0x42, - 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6e, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, + 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, + 0x7c, 0x69, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, + 0x6f, 0x72, 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x4f, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x29, 0x3a, 0x46, 0x28, 0x69, + 0x29, 0x3f, 0x4f, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, 0x69, + 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, 0x69, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, + 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3f, + 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, + 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x59, 0x28, 0x69, + 0x2c, 0x65, 0x2c, 0x72, 0x3d, 0x5f, 0x2b, 0x63, 0x2c, 0x6c, 0x29, 0x2c, + 0x69, 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, + 0x6c, 0x2d, 0x2d, 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, + 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, + 0x31, 0x30, 0x37, 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, + 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, + 0x26, 0x63, 0x2d, 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, + 0x75, 0x21, 0x3d, 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, + 0x72, 0x2b, 0x31, 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, + 0x6c, 0x3e, 0x66, 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, + 0x3a, 0x63, 0x2d, 0x2d, 0x3a, 0x63, 0x3d, 0x75, 0x3c, 0x72, 0x26, 0x26, + 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, 0x3f, 0x75, 0x2d, 0x72, 0x3a, 0x30, + 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, 0x69, + 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, + 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, 0x79, + 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6f, 0x2e, + 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, 0x29, 0x29, + 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, + 0x65, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x2d, + 0x2d, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6c, 0x29, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, + 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, + 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, + 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, + 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, + 0x69, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, + 0x5f, 0x26, 0x26, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, + 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, + 0x3d, 0x51, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, + 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x21, + 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, + 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x6e, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x5f, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, 0x3d, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, 0x31, 0x2c, - 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, - 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x66, - 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, + 0x7c, 0x7c, 0x28, 0x46, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, + 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, + 0x31, 0x2c, 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, + 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x3d, 0x66, 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, + 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x66, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x3f, 0x31, 0x3a, + 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, 0x3e, 0x3d, 0x30, + 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, 0x30, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, 0x5d, 0x29, 0x26, + 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, + 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, + 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, + 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x3b, 0x69, - 0x66, 0x28, 0x69, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x66, - 0x3f, 0x31, 0x3a, 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, - 0x3e, 0x3d, 0x30, 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, - 0x30, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, - 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, 0x79, - 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x72, 0x3b, 0x72, 0x2d, - 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, - 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x5f, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, - 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, - 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, - 0x75, 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, - 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, - 0x69, 0x6e, 0x20, 0x65, 0x29, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, - 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x6f, 0x20, 0x69, 0x6e, - 0x20, 0x6e, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x2c, 0x65, 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x5f, - 0x26, 0x26, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x5b, 0x6f, - 0x5d, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x6b, 0x65, 0x79, 0x22, - 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x22, 0x63, 0x68, 0x65, 0x63, - 0x6b, 0x65, 0x64, 0x22, 0x3d, 0x3d, 0x3d, 0x6f, 0x7c, 0x7c, 0x65, 0x5b, - 0x6f, 0x5d, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x7c, 0x7c, 0x74, - 0x74, 0x28, 0x74, 0x2c, 0x6f, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x2c, 0x65, - 0x5b, 0x6f, 0x5d, 0x2c, 0x69, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, - 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, - 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, - 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, - 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x56, 0x2e, 0x74, 0x65, 0x73, 0x74, - 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, - 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, - 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, - 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, - 0x69, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x69, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x69, 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, - 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, - 0x65, 0x29, 0x69, 0x26, 0x26, 0x65, 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, - 0x69, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, - 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, - 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, - 0x65, 0x28, 0x2f, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, - 0x2c, 0x22, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, 0x74, 0x6f, - 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x69, - 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, - 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, 0x69, 0x63, - 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, - 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, 0x74, 0x2e, - 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, 0x6e, 0x2b, - 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x69, 0x7c, 0x7c, 0x74, 0x2e, - 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, - 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, - 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, - 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, - 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, 0x3a, 0x6e, - 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, - 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, - 0x4c, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x5f, - 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, 0x48, 0x7c, 0x3a, 0x68, - 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x24, 0x2f, - 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, - 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x22, 0x21, 0x3d, 0x3d, - 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, 0x65, 0x66, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, 0x6d, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, 0x62, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x64, 0x6f, - 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, - 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, - 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, 0x53, 0x70, 0x61, 0x6e, - 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, - 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x3b, - 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x63, 0x61, 0x74, 0x63, - 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x22, 0x2d, - 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, 0x3f, 0x74, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, - 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, - 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x28, 0x77, 0x2e, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, - 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x77, 0x2e, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x77, 0x2e, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x73, 0x2c, 0x63, - 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, - 0x2c, 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x78, - 0x2c, 0x43, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, - 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, - 0x6c, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x68, 0x26, 0x26, 0x28, 0x66, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, - 0x75, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x5d, 0x29, 0x2c, 0x28, 0x73, 0x3d, 0x77, - 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x73, 0x28, 0x6e, 0x29, 0x3b, - 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x45, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x79, 0x3d, - 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, 0x28, 0x73, - 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, - 0x70, 0x65, 0x29, 0x26, 0x26, 0x69, 0x5b, 0x73, 0x2e, 0x5f, 0x5f, 0x63, - 0x5d, 0x2c, 0x67, 0x3d, 0x73, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x73, 0x2e, - 0x5f, 0x5f, 0x3a, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x3f, 0x76, - 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, - 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, - 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, 0x28, 0x6e, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x49, - 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, 0x63, 0x2e, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, 0x2c, 0x6d, - 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, 0x2c, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x6e, - 0x3d, 0x69, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, - 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x28, 0x63, - 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, 0x67, 0x65, - 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, - 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, - 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, - 0x2c, 0x46, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, - 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, 0x2c, 0x61, - 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x63, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x76, - 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, + 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, 0x75, + 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, 0x31, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, + 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, + 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, + 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, + 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, + 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, + 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, + 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, + 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, + 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, + 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, + 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, + 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, + 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, + 0x29, 0x24, 0x7c, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x24, 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, + 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, + 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, + 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, + 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, + 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, + 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, + 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, + 0x2e, 0x75, 0x3d, 0x5f, 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2c, 0x74, + 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x69, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, + 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, + 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, + 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, + 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, + 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, + 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, + 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, + 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, + 0x72, 0x6f, 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, + 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, + 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, + 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, + 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, + 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x74, 0x2e, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x74, 0x3c, 0x3d, 0x6e, 0x2e, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x74, 0x3d, 0x44, + 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x28, 0x43, 0x2e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, + 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x43, 0x2e, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x63, 0x2c, + 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, + 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x77, 0x2c, + 0x78, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, + 0x28, 0x66, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, + 0x6c, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x6c, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, + 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, + 0x28, 0x6c, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x6c, 0x2e, 0x5f, + 0x5f, 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x6c, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x6c, 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, + 0x3f, 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, + 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, + 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, + 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, + 0x20, 0x49, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, + 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, + 0x2c, 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, + 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, + 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, + 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, + 0x73, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x29, 0x29, 0x2c, 0x4d, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x63, + 0x73, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, + 0x2c, 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, + 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, + 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x4d, 0x6f, - 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, - 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, - 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x79, - 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, - 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, 0x2c, 0x67, - 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, - 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x63, - 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, 0x7c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, - 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x29, 0x7d, - 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, 0x2e, 0x5f, - 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x62, 0x2b, - 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, 0x3b, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, - 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x72, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, 0x65, 0x61, - 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, - 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x29, - 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x74, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x3d, - 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, 0x22, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, - 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, - 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, - 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, - 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, - 0x78, 0x3d, 0x30, 0x3b, 0x78, 0x3c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x78, 0x2b, 0x2b, 0x29, 0x63, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, - 0x5f, 0x73, 0x62, 0x5b, 0x78, 0x5d, 0x29, 0x3b, 0x63, 0x2e, 0x5f, 0x73, - 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x64, 0x6f, - 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, - 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x3d, 0x63, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x2b, - 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x46, 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, 0x2c, 0x63, - 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, 0x7c, 0x7c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, - 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, - 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x64, 0x3d, - 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, - 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x7a, 0x28, 0x74, 0x2c, - 0x41, 0x28, 0x43, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x73, 0x26, - 0x26, 0x73, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, 0x52, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x73, 0x2e, 0x6b, 0x65, 0x79, - 0x3f, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x73, 0x29, 0x3f, 0x43, 0x3a, 0x5b, - 0x43, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, - 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x2c, 0x63, 0x2e, - 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, + 0x26, 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, + 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, + 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, + 0x2c, 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, + 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, + 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, + 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, + 0x5f, 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, + 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, + 0x62, 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, + 0x3b, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, - 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x2c, 0x76, 0x26, - 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, + 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, + 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, + 0x64, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, + 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, + 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, + 0x6c, 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x29, 0x2c, 0x77, 0x3d, 0x30, 0x3b, 0x77, 0x3c, 0x63, 0x2e, 0x5f, 0x73, + 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x77, 0x2b, 0x2b, + 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x77, 0x5d, 0x29, 0x3b, 0x63, 0x2e, + 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x64, 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x6c, 0x3d, 0x63, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, + 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, + 0x26, 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, + 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, + 0x2c, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, + 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, + 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, + 0x64, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, + 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4a, 0x28, + 0x74, 0x2c, 0x46, 0x28, 0x78, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x6c, 0x26, 0x26, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, + 0x6a, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, + 0x65, 0x79, 0x3f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x6c, 0x29, 0x3f, 0x78, + 0x3a, 0x5b, 0x78, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x2c, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, + 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, + 0x2c, 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, + 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, + 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x66, 0x3f, 0x31, + 0x36, 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x6c, 0x29, 0x3b, 0x28, 0x73, 0x3d, - 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x73, - 0x28, 0x6e, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x28, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, - 0x26, 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x75, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x21, 0x21, 0x66, 0x2c, 0x6f, 0x5b, 0x6f, - 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, - 0x69, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x69, 0x5d, 0x2c, 0x65, 0x5b, - 0x2b, 0x2b, 0x69, 0x5d, 0x29, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x26, - 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, - 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, - 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, - 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, - 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, - 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x73, 0x2c, 0x63, 0x2c, 0x68, - 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x61, 0x3d, 0x6e, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, 0x3d, 0x6e, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x2c, 0x64, 0x3d, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x5f, + 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x6c, 0x3d, + 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x6c, + 0x28, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, + 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, + 0x5f, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, + 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, + 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, + 0x29, 0x3b, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, + 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x73, 0x2c, 0x6c, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, + 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2c, 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, + 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, - 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x64, 0x3c, 0x6f, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, 0x2b, 0x29, 0x69, 0x66, 0x28, - 0x28, 0x6c, 0x3d, 0x6f, 0x5b, 0x64, 0x5d, 0x29, 0x26, 0x26, 0x22, 0x73, - 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22, - 0x69, 0x6e, 0x20, 0x6c, 0x3d, 0x3d, 0x21, 0x21, 0x70, 0x26, 0x26, 0x28, - 0x70, 0x3f, 0x6c, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x4e, 0x61, 0x6d, - 0x65, 0x3d, 0x3d, 0x3d, 0x70, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x6c, 0x2e, - 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x7b, 0x74, - 0x3d, 0x6c, 0x2c, 0x6f, 0x5b, 0x64, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x70, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, 0x4e, 0x6f, 0x64, - 0x65, 0x28, 0x61, 0x29, 0x3b, 0x74, 0x3d, 0x5f, 0x3f, 0x64, 0x6f, 0x63, + 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, + 0x69, 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, + 0x26, 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, + 0x26, 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, + 0x3d, 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, + 0x28, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, + 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, + 0x2f, 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, 0x28, 0x22, 0x68, - 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x77, 0x33, - 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, 0x2f, 0x73, 0x76, - 0x67, 0x22, 0x2c, 0x70, 0x29, 0x3a, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x70, 0x2c, 0x61, 0x2e, 0x69, 0x73, 0x26, - 0x26, 0x61, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, - 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x3d, 0x70, 0x29, 0x68, 0x3d, 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x75, - 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x3d, 0x3d, 0x61, - 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x61, 0x29, - 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, 0x6f, 0x3d, 0x6f, - 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x2c, 0x73, - 0x3d, 0x28, 0x68, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, - 0x7c, 0x44, 0x29, 0x2e, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, - 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, - 0x54, 0x4d, 0x4c, 0x2c, 0x63, 0x3d, 0x61, 0x2e, 0x64, 0x61, 0x6e, 0x67, - 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, - 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x2c, 0x21, 0x75, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, - 0x6f, 0x72, 0x28, 0x68, 0x3d, 0x7b, 0x7d, 0x2c, 0x64, 0x3d, 0x30, 0x3b, - 0x64, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x64, 0x2b, - 0x2b, 0x29, 0x68, 0x5b, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, - 0x75, 0x74, 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x6e, 0x61, 0x6d, 0x65, - 0x5d, 0x3d, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, - 0x65, 0x73, 0x5b, 0x64, 0x5d, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, - 0x28, 0x63, 0x7c, 0x7c, 0x73, 0x29, 0x26, 0x26, 0x28, 0x63, 0x26, 0x26, - 0x28, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, - 0x3d, 0x3d, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, - 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, - 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, - 0x7c, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, - 0x4c, 0x3d, 0x63, 0x26, 0x26, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x74, 0x6d, - 0x6c, 0x7c, 0x7c, 0x22, 0x22, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x59, - 0x28, 0x74, 0x2c, 0x61, 0x2c, 0x68, 0x2c, 0x5f, 0x2c, 0x75, 0x29, 0x2c, - 0x63, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x5b, 0x5d, 0x3b, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x7a, 0x28, 0x74, 0x2c, 0x41, - 0x28, 0x64, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x29, 0x3f, 0x64, 0x3a, 0x5b, - 0x64, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x2c, 0x5f, 0x26, 0x26, - 0x22, 0x66, 0x6f, 0x72, 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, - 0x63, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x70, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, - 0x6f, 0x3f, 0x6f, 0x5b, 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, - 0x26, 0x26, 0x6a, 0x28, 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, - 0x72, 0x28, 0x64, 0x3d, 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x64, 0x2d, 0x2d, 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x6f, 0x5b, 0x64, 0x5d, 0x26, 0x26, 0x4d, 0x28, 0x6f, 0x5b, 0x64, 0x5d, - 0x29, 0x3b, 0x75, 0x7c, 0x7c, 0x28, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, 0x3d, 0x61, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x26, 0x26, 0x28, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7c, 0x7c, 0x22, 0x70, 0x72, 0x6f, 0x67, - 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x21, - 0x64, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x3d, 0x70, 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x68, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, - 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x21, 0x31, 0x29, 0x2c, 0x22, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x26, - 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x28, 0x64, - 0x3d, 0x61, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x29, 0x26, - 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x68, 0x65, 0x63, 0x6b, - 0x65, 0x64, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x22, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x2c, 0x64, 0x2c, 0x68, 0x2e, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, - 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x3b, 0x69, 0x66, - 0x28, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, - 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, - 0x2c, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, - 0x28, 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, - 0x69, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x69, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, - 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, - 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x69, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, - 0x69, 0x2e, 0x5f, 0x5f, 0x50, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x69, 0x66, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, - 0x6f, 0x72, 0x28, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x69, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, 0x69, 0x5b, - 0x5f, 0x5d, 0x26, 0x26, 0x75, 0x74, 0x28, 0x69, 0x5b, 0x5f, 0x5d, 0x2c, - 0x6e, 0x2c, 0x65, 0x7c, 0x7c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, - 0x4d, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x6f, 0x72, 0x28, 0x74, 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x2c, 0x5f, 0x2c, - 0x6f, 0x2c, 0x72, 0x3b, 0x77, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x77, 0x2e, - 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x2c, 0x5f, 0x3d, 0x28, 0x69, - 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, - 0x75, 0x6c, 0x6c, 0x3a, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, - 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, - 0x2c, 0x72, 0x3d, 0x5b, 0x5d, 0x2c, 0x69, 0x74, 0x28, 0x6e, 0x2c, 0x74, - 0x3d, 0x28, 0x21, 0x69, 0x26, 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, - 0x5f, 0x5f, 0x6b, 0x3d, 0x57, 0x28, 0x52, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x5b, 0x74, 0x5d, 0x29, 0x2c, 0x5f, 0x7c, 0x7c, 0x44, 0x2c, 0x44, - 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, - 0x5d, 0x3a, 0x5f, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, - 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, - 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x4e, 0x6f, 0x64, 0x65, 0x73, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6f, 0x2c, 0x21, 0x69, 0x26, 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x5f, 0x3f, - 0x5f, 0x2e, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, - 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x2c, 0x69, 0x2c, 0x72, 0x29, 0x2c, - 0x5f, 0x74, 0x28, 0x6f, 0x2c, 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x73, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, + 0x69, 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, + 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, + 0x3d, 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, + 0x6f, 0x3d, 0x6f, 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, + 0x7c, 0x54, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, + 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, + 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, + 0x73, 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, + 0x69, 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x73, 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, + 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, + 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, + 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, + 0x29, 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, + 0x61, 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, + 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, + 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x6c, 0x3d, 0x61, 0x3a, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, + 0x61, 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, + 0x3d, 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, + 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, + 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, + 0x61, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x6c, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x6c, 0x2e, + 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x6c, 0x2e, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, + 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, + 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x6c, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, + 0x63, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4a, 0x28, 0x74, 0x2c, + 0x46, 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, + 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, + 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x71, 0x28, + 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, + 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, + 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, + 0x26, 0x26, 0x57, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, + 0x7c, 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, + 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, + 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, + 0x6d, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, + 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, + 0x29, 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x22, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, + 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, + 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, + 0x5d, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, + 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, + 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, + 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x69, 0x2c, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x46, - 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x29, 0x2c, 0x6e, 0x29, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, - 0x6f, 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, - 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, - 0x3a, 0x75, 0x5b, 0x6f, 0x5d, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, - 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, - 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, - 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, - 0x65, 0x29, 0x2c, 0x4f, 0x28, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, - 0x75, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x5f, - 0x7c, 0x7c, 0x74, 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, - 0x3d, 0x7b, 0x5f, 0x5f, 0x63, 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, - 0x43, 0x22, 0x2b, 0x24, 0x2b, 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, - 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x28, 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x64, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, - 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, - 0x2c, 0x28, 0x69, 0x3d, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, + 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x43, 0x2e, 0x75, 0x6e, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, + 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, + 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, + 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, + 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, + 0x5f, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, + 0x30, 0x3b, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, + 0x74, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x57, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, + 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x43, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, + 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x72, 0x3d, 0x5b, 0x5d, + 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x4c, + 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5b, 0x74, 0x5d, 0x29, + 0x2c, 0x69, 0x7c, 0x7c, 0x54, 0x2c, 0x54, 0x2c, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, + 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, + 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x69, 0x3f, 0x6e, + 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x65, + 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x6f, 0x2c, + 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x73, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x74, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, + 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, 0x29, + 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, + 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, + 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4f, 0x28, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x5f, 0x7c, 0x7c, + 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x72, + 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, 0x63, + 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x44, 0x2b, + 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, 0x75, + 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x28, + 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x71, 0x28, - 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x75, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x65, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, - 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, - 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, - 0x65, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, - 0x74, 0x29, 0x2c, 0x31, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, - 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x2e, 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, - 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x78, 0x74, 0x54, 0x79, 0x70, 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x54, - 0x2e, 0x73, 0x6c, 0x69, 0x63, 0x65, 0x2c, 0x77, 0x3d, 0x7b, 0x5f, 0x5f, - 0x65, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x28, 0x5f, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x26, 0x26, 0x21, 0x5f, 0x2e, 0x5f, - 0x5f, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, - 0x5f, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, - 0x72, 0x29, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x28, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, - 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x5f, - 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x26, 0x26, 0x28, 0x5f, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x43, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x2c, 0x69, 0x7c, 0x7c, 0x7b, - 0x7d, 0x29, 0x2c, 0x72, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, - 0x72, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x5f, - 0x5f, 0x45, 0x3d, 0x5f, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, - 0x29, 0x7b, 0x74, 0x3d, 0x6e, 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, - 0x74, 0x7d, 0x7d, 0x2c, 0x43, 0x3d, 0x30, 0x2c, 0x45, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, - 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, - 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, - 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x26, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x21, 0x3d, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3f, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x29, 0x2c, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x28, 0x74, 0x3d, 0x74, - 0x28, 0x46, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, 0x2c, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x29, 0x2c, 0x74, 0x26, - 0x26, 0x46, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x2c, - 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x66, 0x6f, - 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x74, 0x26, 0x26, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x74, 0x29, 0x2c, 0x71, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x52, 0x2c, - 0x55, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x3f, 0x50, 0x72, - 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, - 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, 0x3a, 0x73, 0x65, 0x74, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, 0x50, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x76, - 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, - 0x5f, 0x62, 0x7d, 0x2c, 0x47, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x2c, - 0x24, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x61, 0x74, 0x2c, 0x70, - 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, 0x79, 0x74, 0x3d, 0x30, - 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, 0x74, 0x3d, 0x5b, 0x5d, - 0x2c, 0x62, 0x74, 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x62, 0x2c, 0x6b, 0x74, - 0x3d, 0x77, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x74, 0x3d, 0x77, 0x2e, - 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x78, 0x74, 0x3d, 0x77, 0x2e, - 0x5f, 0x5f, 0x63, 0x2c, 0x77, 0x74, 0x3d, 0x77, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, - 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x68, 0x28, 0x70, - 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, 0x6e, 0x29, 0x2c, 0x79, - 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x70, 0x74, - 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, 0x2c, 0x5f, 0x5f, 0x68, - 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, 0x67, 0x74, 0x7d, 0x29, - 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x31, 0x2c, - 0x55, 0x74, 0x28, 0x42, 0x74, 0x2c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x43, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x69, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, - 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x3d, 0x5b, 0x65, 0x3f, 0x65, - 0x28, 0x6e, 0x29, 0x3a, 0x42, 0x74, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x69, - 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x5b, 0x30, - 0x5d, 0x3a, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x2c, 0x65, 0x3d, - 0x69, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x3b, 0x6e, 0x21, 0x3d, - 0x3d, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x5b, - 0x65, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, 0x5d, 0x5d, 0x2c, 0x69, - 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, - 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, 0x2c, 0x69, 0x2e, 0x5f, - 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, 0x74, 0x2e, 0x75, 0x29, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, - 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, - 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x65, - 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, 0x29, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, - 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x21, 0x31, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x21, 0x30, - 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, 0x21, 0x72, 0x26, 0x26, - 0x69, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, - 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, 0x6f, 0x7c, 0x7c, 0x6f, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, 0x70, 0x74, 0x2e, 0x75, - 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x70, 0x74, - 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x2c, 0x72, - 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3b, - 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x6f, 0x3b, - 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, 0x69, 0x7d, 0x72, 0x26, - 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x2c, 0x70, 0x74, 0x2e, - 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x5f, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x4e, - 0x7c, 0x7c, 0x69, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, - 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x77, 0x2e, 0x5f, 0x5f, 0x73, 0x26, - 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, - 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, - 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4e, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, - 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, 0x29, 0x3b, 0x21, 0x77, - 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x65, 0x29, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x50, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, - 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x74, 0x7d, - 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x4e, 0x74, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, - 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, 0x2c, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x29, - 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x65, 0x3a, - 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, 0x28, 0x74, 0x29, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x44, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, - 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x37, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x65, 0x2e, 0x5f, - 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x56, - 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x65, - 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x56, - 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x38, 0x2c, - 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, - 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, 0x65, 0x3d, 0x43, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, 0x74, 0x2c, 0x6e, 0x3f, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x26, - 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, 0x30, 0x2c, 0x6e, 0x2e, - 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3a, - 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x77, 0x2e, - 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x26, 0x26, 0x77, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, - 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, 0x3f, 0x6e, 0x28, 0x74, - 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, 0x30, - 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x70, - 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, - 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, 0x7c, 0x28, 0x70, 0x74, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, - 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, - 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, 0x69, 0x29, - 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x2c, 0x5b, - 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x31, - 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x29, - 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, - 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, - 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, - 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, 0x5d, 0x29, 0x3b, 0x74, - 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, 0x65, 0x5b, 0x30, 0x5d, - 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, 0x5d, 0x2b, 0x2b, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x74, 0x28, - 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, - 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x28, 0x29, - 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x50, 0x26, 0x26, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, - 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, - 0x68, 0x28, 0x49, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, - 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, - 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x75, - 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x77, 0x2e, 0x5f, - 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x62, - 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x77, 0x2e, - 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, 0x6b, 0x74, 0x28, 0x74, - 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, - 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, - 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, 0x64, 0x74, 0x3d, 0x3d, - 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, - 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, 0x26, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, - 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, - 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x49, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, - 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, - 0x70, 0x74, 0x7d, 0x2c, 0x77, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x6e, - 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x6e, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, 0x3d, 0x6d, 0x74, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, 0x26, 0x76, 0x74, 0x3d, - 0x3d, 0x3d, 0x77, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, - 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, - 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, 0x77, 0x2e, 0x72, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, 0x7c, 0x7c, 0x4c, 0x74, - 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, + 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, + 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x47, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x65, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, 0x29, + 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, + 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, + 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, + 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x56, 0x2e, 0x73, 0x6c, 0x69, 0x63, + 0x65, 0x2c, 0x43, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, + 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, 0x79, + 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, + 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, + 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x69, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, + 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, 0x3d, + 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x69, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x6e, + 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, 0x45, + 0x3d, 0x30, 0x2c, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, + 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x4d, 0x28, 0x7b, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, + 0x28, 0x74, 0x3d, 0x74, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, + 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x4d, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x6e, 0x29, 0x2c, 0x47, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, + 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, + 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x47, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x3d, 0x6a, 0x2c, 0x48, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, + 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, + 0x24, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, 0x7a, 0x2e, 0x5f, 0x5f, + 0x72, 0x3d, 0x30, 0x2c, 0x44, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, + 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, + 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, + 0x62, 0x2c, 0x6b, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, + 0x74, 0x3d, 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x77, + 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x78, 0x74, 0x3d, 0x43, + 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, + 0x6e, 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x65, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, + 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, + 0x67, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x31, 0x2c, 0x55, 0x74, 0x28, 0x71, 0x74, 0x2c, 0x74, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, + 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, + 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x71, 0x74, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, + 0x5d, 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, + 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, + 0x5d, 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, + 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, + 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, + 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, + 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, + 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, + 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, + 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, + 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, + 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, + 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, + 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, + 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, + 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x43, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, + 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, + 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, + 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, + 0x29, 0x3b, 0x21, 0x43, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, + 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, + 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x50, + 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, + 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, + 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, + 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, + 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, + 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x38, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, + 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, + 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, + 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x73, 0x65, + 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, + 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, + 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, + 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, + 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x5f, 0x29, + 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, + 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, + 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, + 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, + 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, + 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, + 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, + 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, + 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x57, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, + 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, + 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, + 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, + 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x43, 0x2e, 0x5f, + 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, + 0x7d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x62, 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, + 0x7d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, + 0x6b, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, + 0x64, 0x74, 0x3d, 0x3d, 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, + 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, + 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, + 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, + 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, 0x7d, 0x2c, 0x43, 0x2e, 0x64, 0x69, + 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, + 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, + 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, + 0x3d, 0x6d, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, + 0x26, 0x76, 0x74, 0x3d, 0x3d, 0x3d, 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, + 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, + 0x7c, 0x7c, 0x4f, 0x74, 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, + 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, + 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, + 0x74, 0x3d, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x43, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, + 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, + 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x6a, 0x74, + 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x6c, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, + 0x5d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6c, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x77, 0x74, 0x26, + 0x26, 0x77, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x43, 0x2e, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x78, 0x74, 0x26, 0x26, + 0x78, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, + 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x48, - 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x21, - 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, 0x2e, 0x69, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, - 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x77, 0x2e, 0x5f, 0x5f, 0x63, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, - 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7d, - 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x73, 0x29, 0x7b, - 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, - 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, 0x5d, 0x2c, 0x77, 0x2e, - 0x5f, 0x5f, 0x65, 0x28, 0x73, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, - 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x78, 0x74, 0x26, 0x26, 0x78, 0x74, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x77, 0x2e, 0x75, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x77, 0x74, 0x26, 0x26, 0x77, 0x74, 0x28, 0x74, - 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x48, - 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, - 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, 0x7d, 0x29, 0x29, 0x2c, - 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x6e, 0x26, 0x26, 0x77, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6e, 0x2c, - 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, 0x3b, 0x76, 0x61, 0x72, - 0x20, 0x4f, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x72, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x3b, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6c, 0x65, 0x61, 0x72, - 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x69, 0x29, 0x2c, 0x4f, - 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x6e, 0x69, - 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, - 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, - 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x69, 0x3d, 0x73, 0x65, 0x74, 0x54, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, 0x2c, 0x31, 0x30, 0x30, - 0x29, 0x3b, 0x4f, 0x74, 0x26, 0x26, 0x28, 0x6e, 0x3d, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x52, 0x74, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2c, 0x65, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, + 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, + 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, + 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4c, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, + 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, + 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, + 0x5f, 0x29, 0x2c, 0x4c, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, + 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, + 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x5f, 0x3d, + 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, + 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4c, 0x74, 0x26, 0x26, 0x28, 0x6e, + 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, + 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x52, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, + 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, + 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x49, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, + 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, 0x28, 0x29, 0x29, 0x2c, - 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x49, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, - 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x21, 0x3d, - 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7c, 0x7c, 0x6e, - 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x65, 0x5d, - 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x42, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x3f, - 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x77, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x77, 0x5b, 0x74, 0x5d, 0x7c, 0x7c, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x6c, 0x65, 0x74, - 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, 0x3b, 0x7a, 0x74, 0x3d, - 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, 0x28, 0x7b, 0x64, 0x61, - 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, - 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x74, 0x3d, 0x74, 0x2e, - 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, - 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, - 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, - 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, 0x69, 0x66, 0x28, 0x21, - 0x45, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x29, 0x26, - 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, - 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3a, 0x74, 0x2e, 0x6e, - 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, 0x29, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, - 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x3b, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, - 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x7d, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x30, 0x3a, - 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, 0x22, 0x3a, 0x74, 0x7c, - 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, 0x5d, 0x29, 0x3b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, 0x74, 0x22, 0x3b, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x28, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, + 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x43, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, + 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x43, 0x5b, 0x74, + 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, + 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, + 0x3b, 0x7a, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, + 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, + 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, + 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, + 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, + 0x69, 0x66, 0x28, 0x21, 0x55, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, + 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, + 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, + 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, + 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, + 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, + 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, + 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, + 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, + 0x65, 0x73, 0x28, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, + 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, + 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, 0x79, 0x70, 0x65, 0x3a, - 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x4b, - 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3a, 0x7b, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, - 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x68, 0x69, 0x73, - 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, - 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, 0x7d, 0x7d, 0x29, 0x3b, - 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, 0x2c, 0x28, 0x74, 0x2c, + 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, + 0x7d, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, + 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, + 0x6f, 0x66, 0x20, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, + 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, 0x5d, 0x3d, + 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, + 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x5f, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x7b, + 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, + 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, + 0x3b, 0x77, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, + 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, 0x73, 0x65, + 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, + 0x47, 0x74, 0x3d, 0x5f, 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, + 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, + 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, + 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, + 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, + 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, + 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, + 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, + 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, + 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, + 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, + 0x29, 0x7b, 0x6f, 0x3d, 0x51, 0x74, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x72, + 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x5f, 0x29, + 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x6f, 0x3d, 0x61, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, + 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, + 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, + 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, + 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, + 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x42, 0x74, 0x28, 0x22, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x63, - 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, 0x6c, 0x65, 0x74, 0x20, - 0x5f, 0x3d, 0x65, 0x5b, 0x69, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x20, - 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x63, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, 0x6e, 0x2e, 0x5f, 0x5f, - 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, 0x74, 0x5b, 0x69, 0x5d, - 0x3d, 0x5f, 0x3b, 0x65, 0x5b, 0x69, 0x5d, 0x3d, 0x5f, 0x2e, 0x70, 0x65, - 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x72, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x63, 0x3b, 0x69, 0x66, 0x28, 0x69, 0x29, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, 0x3d, 0x69, 0x2e, 0x5f, - 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x75, - 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3b, 0x53, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x6e, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, 0x3b, 0x6e, 0x2e, 0x63, - 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x2e, 0x5f, 0x5f, 0x24, 0x66, - 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, 0x47, 0x74, 0x3d, 0x69, - 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x71, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x65, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, - 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x29, 0x7d, 0x29, - 0x3b, 0x71, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, - 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x65, - 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x2c, 0x69, 0x3d, - 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x69, 0x66, 0x28, 0x74, - 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x65, 0x2e, 0x55, 0x3b, - 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x69, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x21, - 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x29, 0x7b, 0x69, 0x2e, - 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6e, 0x3d, - 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, 0x7d, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x2c, - 0x72, 0x3d, 0x74, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, 0x29, 0x7b, 0x6f, 0x3d, - 0x51, 0x74, 0x28, 0x65, 0x2c, 0x5f, 0x2c, 0x72, 0x2c, 0x69, 0x29, 0x3b, - 0x6e, 0x5b, 0x5f, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x69, 0x29, 0x7d, 0x7d, 0x7d, 0x74, - 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x69, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x6e, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, - 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6f, 0x3d, - 0x68, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, - 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x6f, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x69, 0x3d, 0x6e, 0x7d, - 0x2c, 0x64, 0x3a, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, - 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x69, 0x5b, 0x6e, - 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x74, 0x5b, 0x6e, - 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, - 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, - 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, - 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x7d, - 0x7d, 0x29, 0x7d, 0x7d, 0x71, 0x74, 0x28, 0x22, 0x75, 0x6e, 0x6d, 0x6f, - 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, - 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x55, 0x3b, 0x69, - 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, - 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, - 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x7d, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, - 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, - 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, 0x2e, 0x64, 0x28, 0x29, - 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x71, 0x74, - 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, - 0x65, 0x2c, 0x69, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x3c, - 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x6e, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, - 0x2c, 0x69, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, - 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, 0x7c, 0x7c, 0x34, 0x26, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x33, - 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x69, - 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, - 0x21, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x5b, 0x69, 0x5d, 0x21, 0x3d, - 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, - 0x69, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x69, 0x66, 0x28, 0x21, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, - 0x3e, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, 0x74, 0x28, 0x74, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x50, 0x74, 0x28, - 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, - 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, - 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x79, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, - 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x6e, 0x3d, 0x50, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x48, 0x74, 0x28, 0x28, - 0x29, 0x3d, 0x3e, 0x53, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6e, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, 0x29, 0x2c, 0x5b, 0x5d, - 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, 0x3d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3b, 0x6e, 0x5b, 0x30, - 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x2c, 0x75, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x72, - 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, - 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x3b, 0x33, - 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x30, 0x5d, 0x3d, 0x75, 0x3a, - 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x3d, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, - 0x28, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x2c, 0x75, 0x29, - 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, 0x69, 0x5b, 0x31, 0x5d, - 0x3d, 0x69, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x5b, 0x6e, - 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, 0x3a, 0x36, 0x3d, 0x3d, - 0x3d, 0x72, 0x3f, 0x69, 0x5b, 0x31, 0x5d, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, - 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, 0x22, 0x3a, 0x72, 0x3f, - 0x28, 0x5f, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x75, - 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, 0x65, 0x2c, 0x5b, 0x22, - 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, 0x29, 0x2c, 0x69, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x5f, 0x29, 0x2c, 0x75, 0x5b, 0x30, 0x5d, - 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, 0x3a, 0x28, 0x6e, 0x5b, - 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, 0x5b, 0x6f, 0x5d, 0x3d, - 0x5f, 0x29, 0x29, 0x3a, 0x69, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x75, - 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, 0x7d, 0x2c, - 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x6e, 0x6e, 0x2e, 0x67, - 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, 0x6e, 0x3d, 0x6e, 0x65, - 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, 0x2e, 0x73, 0x65, 0x74, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, 0x29, 0x2c, 0x28, 0x6e, - 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x2e, 0x67, - 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, 0x6e, 0x2e, 0x73, 0x65, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, - 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, - 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, 0x72, 0x3d, 0x5b, 0x30, - 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, - 0x74, 0x7c, 0x7c, 0x28, 0x5f, 0x3d, 0x5f, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, - 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x24, - 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, 0x3f, 0x72, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, 0x5f, 0x29, 0x3a, 0x33, - 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x5f, 0x29, - 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x33, 0x2c, 0x74, - 0x2c, 0x5f, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x32, 0x3d, 0x3d, - 0x3d, 0x69, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, 0x22, 0x3d, 0x3d, 0x3d, - 0x5f, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x69, - 0x26, 0x26, 0x5f, 0x26, 0x26, 0x21, 0x74, 0x3f, 0x72, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, 0x30, 0x2c, 0x5f, 0x29, - 0x3a, 0x69, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, 0x28, 0x5f, 0x7c, 0x7c, - 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x26, 0x26, - 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x30, 0x2c, - 0x5f, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x2c, 0x74, 0x26, - 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x2c, 0x74, - 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x69, 0x3d, 0x36, 0x29, 0x29, 0x2c, - 0x5f, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, 0x30, 0x3b, 0x66, 0x3c, - 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, 0x2b, 0x2b, - 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x26, - 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, 0x29, 0x29, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x3d, 0x30, 0x3b, 0x6c, - 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x6c, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, - 0x6c, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x3c, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x3d, - 0x5b, 0x72, 0x5d, 0x2c, 0x69, 0x3d, 0x33, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, - 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x69, 0x3f, 0x22, 0x2d, 0x2d, 0x22, - 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, - 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x31, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, - 0x3a, 0x5f, 0x3d, 0x6e, 0x2b, 0x5f, 0x5b, 0x30, 0x5d, 0x3a, 0x6f, 0x3f, - 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, 0x22, 0x22, 0x3a, 0x5f, - 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, - 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x6f, 0x3d, 0x6e, - 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, - 0x29, 0x2c, 0x69, 0x3d, 0x31, 0x29, 0x3a, 0x69, 0x26, 0x26, 0x28, 0x22, - 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x69, 0x3d, 0x35, 0x2c, - 0x65, 0x3d, 0x5f, 0x2c, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x22, 0x2f, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x69, 0x3c, 0x35, 0x7c, - 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, 0x5b, 0x66, 0x5d, 0x5b, - 0x6c, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x33, - 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x28, 0x72, 0x3d, 0x72, 0x5b, 0x30, - 0x5d, 0x29, 0x2c, 0x69, 0x3d, 0x72, 0x2c, 0x28, 0x72, 0x3d, 0x72, 0x5b, - 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x32, 0x2c, 0x30, - 0x2c, 0x69, 0x29, 0x2c, 0x69, 0x3d, 0x30, 0x29, 0x3a, 0x22, 0x20, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x74, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, - 0x75, 0x28, 0x29, 0x2c, 0x69, 0x3d, 0x32, 0x29, 0x3a, 0x5f, 0x2b, 0x3d, - 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x21, - 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x69, 0x3d, - 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x7d, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, 0x2c, 0x72, 0x7d, 0x28, - 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, 0x29, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, 0x3a, 0x6e, 0x5b, 0x30, - 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, 0x3d, 0x65, 0x6e, 0x2e, - 0x62, 0x69, 0x6e, 0x64, 0x28, 0x57, 0x29, 0x3b, 0x65, 0x78, 0x70, 0x6f, - 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x52, 0x20, 0x61, 0x73, 0x20, 0x46, - 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x63, 0x20, 0x61, 0x73, - 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x65, 0x20, 0x61, 0x73, - 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x79, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, - 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, - 0x57, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, - 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, - 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x53, 0x20, - 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x57, 0x20, - 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, 0x61, 0x73, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, 0x20, 0x68, 0x79, - 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x45, 0x20, 0x61, 0x73, 0x20, 0x69, - 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x2c, 0x6c, 0x74, 0x20, 0x61, 0x73, 0x20, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x2c, 0x68, 0x20, 0x61, 0x73, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x2c, 0x4b, 0x20, 0x61, 0x73, 0x20, 0x74, 0x6f, 0x43, - 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2c, 0x72, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x64, - 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x61, - 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, - 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x6f, 0x75, 0x6e, - 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, - 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, 0x75, 0x74, 0x45, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, 0x63, 0x65, 0x72, 0x2c, - 0x50, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, - 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x45, 0x66, 0x66, 0x65, - 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a + 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, + 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, + 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, + 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, + 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x69, + 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x5f, + 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, + 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, + 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, + 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x5b, + 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, + 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, + 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, + 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x61, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, + 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6d, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, + 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, + 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, + 0x48, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, + 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, + 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, + 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, + 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, + 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x30, + 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, + 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, + 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, + 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, + 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, 0x5d, 0x5b, + 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, + 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, + 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, + 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, + 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x29, 0x2c, + 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, + 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x5f, 0x7d, 0x2c, 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, + 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x65, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x6e, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, + 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, + 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, + 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, + 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x3d, + 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, + 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, 0x69, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, + 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, + 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, + 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, + 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, + 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, + 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, + 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, 0x74, 0x3f, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, + 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, + 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, + 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, 0x36, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, + 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, + 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, + 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, + 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x73, + 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x5f, + 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, + 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, 0x33, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x5f, 0x3f, + 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x3e, + 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, 0x2c, 0x69, + 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, 0x5b, 0x30, + 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, + 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, + 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, 0x3a, 0x5f, + 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, + 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, 0x22, 0x22, + 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, + 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, + 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, 0x2c, 0x28, + 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, 0x30, 0x29, + 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, + 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, + 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, + 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, + 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, + 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, + 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, + 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, + 0x3d, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, 0x29, 0x3b, + 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, + 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x6a, 0x20, + 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, + 0x68, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, + 0x5f, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6d, 0x20, 0x61, 0x73, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x52, + 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x66, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, + 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x55, 0x20, + 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x43, 0x20, 0x61, 0x73, 0x20, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x61, 0x20, 0x61, 0x73, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x20, 0x61, 0x73, + 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2c, 0x75, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x50, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, + 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, + 0x63, 0x65, 0x72, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a }; -unsigned int index_js_len = 22472; +unsigned int index_js_len = 22800; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index 07d779d20..b059c75f2 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -427,7 +427,7 @@ } if (data.timings) { - llamaStats.value = data.timings; + llamaStats.value = data; } } @@ -880,7 +880,7 @@ } return html` - ${llamaStats.value.predicted_per_token_ms.toFixed()}ms per token, ${llamaStats.value.predicted_per_second.toFixed(2)} tokens per second + ${llamaStats.value.tokens_predicted} predicted, ${llamaStats.value.tokens_cached} cached, ${llamaStats.value.timings.predicted_per_token_ms.toFixed()}ms per token, ${llamaStats.value.timings.predicted_per_second.toFixed(2)} tokens per second ` } From f3f62f0d835d559e80714bbeb05d03125574e3dd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 2 Jan 2024 21:07:47 +0200 Subject: [PATCH 352/859] metal : optimize ggml_mul_mat_id (faster Mixtral PP) (#4725) * ggml : disable fast-math for Metal (cmake build only) ggml-ci * metal : fix Metal API debug warnings * cmake : add -fno-inline for Metal build (#4545) * metal : fix API debug warnings * metal : fix compile warnings * metal : use uint64_t for strides * cmake : rename option to LLAMA_METAL_SHADER_DEBUG * metal : fix mat-vec Q8_0 kernel for BS > 1 * metal : normalize mat-vec kernel signatures * cmake : respect LLAMA_QKK_64 option * metal : fix mat-vec Q4_K kernel for QK_K == 64 * metal : optimizing ggml_mul_mat_id (wip) * metal : minor fix * metal : opt mul_mm_id --- ggml-metal.m | 31 ++++--- ggml-metal.metal | 205 +++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 190 insertions(+), 46 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index cd9d00456..7a369b55e 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1657,6 +1657,10 @@ void ggml_metal_graph_compute( } }; + if (ggml_is_quantized(src0t)) { + GGML_ASSERT(ne00 >= nth0*nth1); + } + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1715,6 +1719,9 @@ void ggml_metal_graph_compute( // TODO: make this more general GGML_ASSERT(n_as <= 8); + // max size of the src1ids array in the kernel stack + GGML_ASSERT(ne11 <= 512); + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; const int64_t ne20 = src2 ? src2->ne[0] : 0; @@ -1732,9 +1739,6 @@ void ggml_metal_graph_compute( GGML_ASSERT(!ggml_is_transposed(src2)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(ne20 % 32 == 0); - // !!!!!!!!! TODO: this assert is probably required but not sure! - //GGML_ASSERT(ne20 >= 64); GGML_ASSERT(src1t == GGML_TYPE_F32); const uint r2 = ne12/ne22; @@ -1742,22 +1746,22 @@ void ggml_metal_graph_compute( // find the break-even point where the matrix-matrix kernel becomes more efficient compared // to the matrix-vector kernel - int ne11_mm_min = 1; + int ne11_mm_min = n_as; const int idx = ((int32_t *) dst->op_params)[0]; // batch size GGML_ASSERT(ne01 == ne11); - const int64_t _ne1 = 1; // kernel_mul_mm_impl needs a reference in constant memory - // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel // !!! // TODO: for now, always use mat-vec kernels until we figure out how to improve the // indirect matrix multiplication // !!! - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && _ne1 > ne11_mm_min) { + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + ne20 % 32 == 0 && ne20 >= 64 && + ne11 > ne11_mm_min) { switch (src2->type) { case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f32_f32]; break; case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f16_f32]; break; @@ -1787,7 +1791,7 @@ void ggml_metal_graph_compute( [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; - [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:14]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; @@ -1805,8 +1809,7 @@ void ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - // TODO: processing one row at a time (ne11 -> 1) is not efficient - [encoder dispatchThreadgroups:MTLSizeMake( (_ne1 + 31)/32, (ne21 + 63)/64, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + [encoder dispatchThreadgroups:MTLSizeMake((ne11 + 31)/32, (ne21 + 63)/64, n_as*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; } else { int nth0 = 32; int nth1 = 1; @@ -1889,11 +1892,17 @@ void ggml_metal_graph_compute( } break; default: { - GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); GGML_ASSERT(false && "not implemented"); } }; + if (ggml_is_quantized(src2t)) { + GGML_ASSERT(ne20 >= nth0*nth1); + } + + const int64_t _ne1 = 1; // kernels needs a reference in constant memory + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; diff --git a/ggml-metal.metal b/ggml-metal.metal index 1d5b8f6f4..9aa7b502a 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -846,7 +846,7 @@ inline float block_q_n_dot_y(device const block_q5_1 * qb_curr, float sumy, thre #define N_SIMDGROUP 2 // number of SIMD groups in a thread group //Note: This is a template, but strictly speaking it only applies to // quantizations where the block size is 32. It also does not -// giard against the number of rows not being divisible by +// guard against the number of rows not being divisible by // N_DST, so this is another explicit assumption of the implementation. template void mul_vec_q_n_f32_impl( @@ -3973,6 +3973,131 @@ void kernel_mul_mm_impl(device const uchar * src0, } } +// same as kernel_mul_mm_impl, but src1 and dst are accessed via indices stored in src1ids +template +void kernel_mul_mm_id_impl( + device const uchar * src0, + device const uchar * src1, + thread short * src1ids, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne02, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + int64_t ne1, + constant uint & r2, + constant uint & r3, + threadgroup uchar * shared_memory, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + threadgroup half * sa = (threadgroup half *)(shared_memory); + threadgroup float * sb = (threadgroup float *)(shared_memory + 4096); + + const uint r0 = tgpig.y; + const uint r1 = tgpig.x; + const uint im = tgpig.z; + + if (r1 * BLOCK_SIZE_N >= ne1) return; + + // if this block is of 64x32 shape or smaller + short n_rows = (ne0 - r0 * BLOCK_SIZE_M < BLOCK_SIZE_M) ? (ne0 - r0 * BLOCK_SIZE_M) : BLOCK_SIZE_M; + short n_cols = (ne1 - r1 * BLOCK_SIZE_N < BLOCK_SIZE_N) ? (ne1 - r1 * BLOCK_SIZE_N) : BLOCK_SIZE_N; + + // a thread shouldn't load data outside of the matrix + short thread_row = ((short)tiitg/THREAD_PER_ROW) < n_rows ? ((short)tiitg/THREAD_PER_ROW) : n_rows - 1; + short thread_col = ((short)tiitg/THREAD_PER_COL) < n_cols ? ((short)tiitg/THREAD_PER_COL) : n_cols - 1; + + simdgroup_half8x8 ma[4]; + simdgroup_float8x8 mb[2]; + simdgroup_float8x8 c_res[8]; + for (int i = 0; i < 8; i++){ + c_res[i] = make_filled_simdgroup_matrix(0.f); + } + + short il = (tiitg % THREAD_PER_ROW); + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + uint offset0 = (i12/r2)*nb02 + (i13/r3)*(nb02*ne02); + ushort offset1 = il/nl; + + device const block_q * x = (device const block_q *)(src0 + (r0 * BLOCK_SIZE_M + thread_row) * nb01 + offset0) + offset1; + device const float * y = (device const float *)(src1 + + nb12 * im + + nb11 * src1ids[r1 * BLOCK_SIZE_N + thread_col] + + nb10 * (BLOCK_SIZE_K / THREAD_PER_COL * (tiitg % THREAD_PER_COL))); + + for (int loop_k = 0; loop_k < ne00; loop_k += BLOCK_SIZE_K) { + // load data and store to threadgroup memory + half4x4 temp_a; + dequantize_func(x, il, temp_a); + threadgroup_barrier(mem_flags::mem_threadgroup); + + for (int i = 0; i < 16; i++) { + *(sa + SG_MAT_SIZE * ((tiitg / THREAD_PER_ROW / 8) \ + + (tiitg % THREAD_PER_ROW) * 16 + (i / 8) * 8) \ + + (tiitg / THREAD_PER_ROW) % 8 + (i & 7) * 8) = temp_a[i/4][i%4]; + } + + *(threadgroup float2x4 *)(sb + (tiitg % THREAD_PER_COL) * 8 * 32 + 8 * (tiitg / THREAD_PER_COL)) = *((device float2x4 *)y); + + il = (il + 2 < nl) ? il + 2 : il % 2; + x = (il < 2) ? x + (2+nl-1)/nl : x; + y += BLOCK_SIZE_K; + + threadgroup_barrier(mem_flags::mem_threadgroup); + + // load matrices from threadgroup memory and conduct outer products + threadgroup half * lsma = (sa + THREAD_MAT_M * SG_MAT_SIZE * (sgitg % 2)); + threadgroup float * lsmb = (sb + THREAD_MAT_N * SG_MAT_SIZE * (sgitg / 2)); + + for (int ik = 0; ik < BLOCK_SIZE_K / 8; ik++) { + for (int i = 0; i < 4; i++) { + simdgroup_load(ma[i],lsma + SG_MAT_SIZE * i); + } + simdgroup_barrier(mem_flags::mem_none); + for (int i = 0; i < 2; i++) { + simdgroup_load(mb[i],lsmb + SG_MAT_SIZE * i); + } + + lsma += BLOCK_SIZE_M / SG_MAT_ROW * SG_MAT_SIZE; + lsmb += BLOCK_SIZE_N / SG_MAT_ROW * SG_MAT_SIZE; + + for (int i = 0; i < 8; i++){ + simdgroup_multiply_accumulate(c_res[i], mb[i/4], ma[i%4], c_res[i]); + } + } + } + + { + threadgroup_barrier(mem_flags::mem_threadgroup); + threadgroup float * temp_str = ((threadgroup float *)shared_memory) \ + + 32 * (sgitg&1) + (16 * (sgitg>>1)) * BLOCK_SIZE_M; + for (int i = 0; i < 8; i++) { + simdgroup_store(c_res[i], temp_str + 8 * (i%4) + 8 * BLOCK_SIZE_M * (i/4), BLOCK_SIZE_M); + } + + threadgroup_barrier(mem_flags::mem_threadgroup); + + device float * C = dst + (BLOCK_SIZE_M * r0) + im*ne1*ne0; + if (sgitg == 0) { + for (int i = 0; i < n_rows; i++) { + for (int j = tiitg; j < n_cols; j += BLOCK_SIZE_N) { + *(C + i + src1ids[j + r1*BLOCK_SIZE_N] * ne0) = *(temp_str + i + j * BLOCK_SIZE_M); + } + } + } + } +} + template kernel void kernel_mul_mm(device const uchar * src0, device const uchar * src1, @@ -4019,7 +4144,7 @@ template( - src0[id], - src1 + bid*nb11, - (device float *) (dst + bid*nb1), + for (int64_t i1 = 0; i1 < ne1; i1++) { + if (((device int32_t *) (ids + i1*nbi1))[idx] == id) { + src1ids[_ne1++] = i1; + } + } + + kernel_mul_mm_id_impl( + src0s[id], + src1, + src1ids, + dst, ne00, ne02, nb01, @@ -4069,7 +4204,7 @@ kernel void kernel_mul_mm_id( nb11, nb12, ne0, - ne1, + _ne1, r2, r3, shared_memory, @@ -4158,7 +4293,7 @@ template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4471,7 +4606,7 @@ kernel void kernel_mul_mv_id_q4_0_f32( kernel void kernel_mul_mv_id_q4_1_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4515,7 +4650,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( mul_vec_q_n_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4534,7 +4669,7 @@ kernel void kernel_mul_mv_id_q4_1_f32( kernel void kernel_mul_mv_id_q5_0_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4578,7 +4713,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( mul_vec_q_n_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4597,7 +4732,7 @@ kernel void kernel_mul_mv_id_q5_0_f32( kernel void kernel_mul_mv_id_q5_1_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4641,7 +4776,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( mul_vec_q_n_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4660,7 +4795,7 @@ kernel void kernel_mul_mv_id_q5_1_f32( kernel void kernel_mul_mv_id_q2_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4704,7 +4839,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( kernel_mul_mv_q2_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4723,7 +4858,7 @@ kernel void kernel_mul_mv_id_q2_K_f32( kernel void kernel_mul_mv_id_q3_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4767,7 +4902,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( kernel_mul_mv_q3_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4786,7 +4921,7 @@ kernel void kernel_mul_mv_id_q3_K_f32( kernel void kernel_mul_mv_id_q4_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4830,7 +4965,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( kernel_mul_mv_q4_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4849,7 +4984,7 @@ kernel void kernel_mul_mv_id_q4_K_f32( kernel void kernel_mul_mv_id_q5_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4893,7 +5028,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( kernel_mul_mv_q5_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, @@ -4912,7 +5047,7 @@ kernel void kernel_mul_mv_id_q5_K_f32( kernel void kernel_mul_mv_id_q6_K_f32( device const char * ids, device const char * src1, - device uchar * dst, + device float * dst, constant uint64_t & nbi1, constant int64_t & ne00, constant int64_t & ne01, @@ -4956,7 +5091,7 @@ kernel void kernel_mul_mv_id_q6_K_f32( kernel_mul_mv_q6_K_f32_impl( src0[id], (device const float *) (src1 + bid*nb11), - (device float *) ( dst + bid*nb1), + dst + bid*ne0, ne00, ne01, ne02, From f2eb19bd8bc9f5730d6e05d7a52a9e19bf5ac099 Mon Sep 17 00:00:00 2001 From: Justin Parker Date: Wed, 3 Jan 2024 03:43:19 -0500 Subject: [PATCH 353/859] server : throw an error when `slot unavailable` (#4741) --- examples/server/public/completion.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/examples/server/public/completion.js b/examples/server/public/completion.js index 6e2b99565..baaec1d60 100644 --- a/examples/server/public/completion.js +++ b/examples/server/public/completion.js @@ -95,6 +95,15 @@ export async function* llama(prompt, params = {}, config = {}) { break; } } + if (result.error) { + result.error = JSON.parse(result.error); + if (result.error.content.includes('slot unavailable')) { + // Throw an error to be caught by upstream callers + throw new Error('slot unavailable'); + } else { + console.error(`llama.cpp error: ${result.error.content}`); + } + } if (result.error) { result.error = JSON.parse(result.error); console.error(`llama.cpp error: ${result.error.content}`); From 5f66ebca9c41a17385341da4b553a8eb5f07edee Mon Sep 17 00:00:00 2001 From: Guillaume Wenzek Date: Fri, 29 Dec 2023 18:07:03 +0100 Subject: [PATCH 354/859] ggml : extend ggml_get_rows, ggml_repeat, ggml_concat (ggml/639) * add more int ops * ggml_compute_forward_dup_bytes * add tests * PR comments * tests : minor indentations --------- Co-authored-by: Georgi Gerganov --- ggml.c | 166 ++++++++++++++++++++++++++++++++++++- tests/test-backend-ops.cpp | 42 ++++++++-- 2 files changed, 198 insertions(+), 10 deletions(-) diff --git a/ggml.c b/ggml.c index bcec200f6..b124f14cc 100644 --- a/ggml.c +++ b/ggml.c @@ -4766,8 +4766,11 @@ struct ggml_tensor * ggml_get_rows( } // TODO: implement non F32 return - //struct ggml_tensor * result = ggml_new_tensor_2d(ctx, a->type, a->ne[0], b->ne[0]); - struct ggml_tensor * result = ggml_new_tensor_4d(ctx, GGML_TYPE_F32, a->ne[0], b->ne[0], b->ne[1], b->ne[2]); + enum ggml_type type = GGML_TYPE_F32; + if (a->type == GGML_TYPE_I32) { + type = a->type; + } + struct ggml_tensor * result = ggml_new_tensor_4d(ctx, type, a->ne[0], b->ne[0], b->ne[1], b->ne[2]); result->op = GGML_OP_GET_ROWS; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; @@ -6938,14 +6941,165 @@ static void ggml_compute_forward_dup_f32( } } +// A simplified version of ggml_compute_forward_dup that doesn't do float upcasting, and just plain old memcpy. +static void ggml_compute_forward_dup_bytes( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + struct ggml_tensor * dst) { + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); + GGML_ASSERT(src0->type == dst->type); + + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + return; + } + + if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst)) { + ggml_compute_forward_dup_same_cont(params, src0, dst); + return; + } + + GGML_TENSOR_UNARY_OP_LOCALS; + + const size_t type_size = ggml_type_size(src0->type); + const int ith = params->ith; // thread index + const int nth = params->nth; // number of threads + + + // parallelize by rows + const int nr = ne01; + // number of rows per thread + const int dr = (nr + nth - 1) / nth; + // row range for this thread + const int ir0 = dr * ith; + const int ir1 = MIN(ir0 + dr, nr); + + if (src0->type == dst->type && + ne00 == ne0 && + nb00 == type_size && nb0 == type_size) { + // copy by rows + const size_t rs = ne00 * type_size; + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + for (int64_t i01 = ir0; i01 < ir1; i01++) { + memcpy( + ((char *) dst->data + i01*nb1 + i02*nb2 + i03*nb3), + ((char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03), + rs); + } + } + } + return; + } + + if (ggml_is_contiguous(dst)) { + size_t id = 0; + char * dst_ptr = (char *) dst->data; + const size_t rs = ne00 * type_size; + + if (nb00 == type_size) { + // src0 is contigous on first dimension, copy by rows + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int64_t i01 = ir0; i01 < ir1; i01++) { + const char * src0_ptr = (char *) src0->data + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, rs); + id += rs; + } + id += rs * (ne01 - ir1); + } + } + } else { + //printf("%s: this is not optimal - fix me\n", __func__); + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + id += rs * ir0; + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = (char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03; + memcpy(dst_ptr + id, src0_ptr, type_size); + + id += type_size; + } + } + id += rs * (ne01 - ir1); + } + } + } + + return; + } + + // dst counters + + int64_t i10 = 0; + int64_t i11 = 0; + int64_t i12 = 0; + int64_t i13 = 0; + + for (int64_t i03 = 0; i03 < ne03; i03++) { + for (int64_t i02 = 0; i02 < ne02; i02++) { + i10 += ne00 * ir0; + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + for (int64_t i01 = ir0; i01 < ir1; i01++) { + for (int64_t i00 = 0; i00 < ne00; i00++) { + const char * src0_ptr = ((char *) src0->data + i00*nb00 + i01*nb01 + i02*nb02 + i03*nb03); + char * dst_ptr = ((char *) dst->data + i10*nb0 + i11*nb1 + i12*nb2 + i13*nb3); + + memcpy(dst_ptr, src0_ptr, type_size); + + if (++i10 == ne0) { + i10 = 0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } + i10 += ne00 * (ne01 - ir1); + while (i10 >= ne0) { + i10 -= ne0; + if (++i11 == ne1) { + i11 = 0; + if (++i12 == ne2) { + i12 = 0; + if (++i13 == ne3) { + i13 = 0; + } + } + } + } + } + } +} + static void ggml_compute_forward_dup( const struct ggml_compute_params * params, const struct ggml_tensor * src0, struct ggml_tensor * dst) { - if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst) && src0->type == dst->type) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + if (src0->type == dst->type) { + ggml_compute_forward_dup_bytes(params, src0, dst); return; } + switch (src0->type) { case GGML_TYPE_F16: { @@ -8404,10 +8558,12 @@ static void ggml_compute_forward_repeat( struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F16: + case GGML_TYPE_I16: { ggml_compute_forward_repeat_f16(params, src0, dst); } break; case GGML_TYPE_F32: + case GGML_TYPE_I32: { ggml_compute_forward_repeat_f32(params, src0, dst); } break; @@ -8550,6 +8706,7 @@ static void ggml_compute_forward_concat( struct ggml_tensor* dst) { switch (src0->type) { case GGML_TYPE_F32: + case GGML_TYPE_I32: { ggml_compute_forward_concat_f32(params, src0, src1, dst); } break; @@ -10674,6 +10831,7 @@ static void ggml_compute_forward_get_rows( ggml_compute_forward_get_rows_f16(params, src0, src1, dst); } break; case GGML_TYPE_F32: + case GGML_TYPE_I32: { ggml_compute_forward_get_rows_f32(params, src0, src1, dst); } break; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index eff063b2d..44412cb94 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -58,6 +58,9 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m int64_t hist[16]; ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size, hist); ggml_backend_tensor_set(tensor, dataq.data(), 0, dataq.size()); + } else if (tensor->type == GGML_TYPE_I8 || tensor->type == GGML_TYPE_I16 || tensor->type == GGML_TYPE_I32) { + // This is going to create some weird integers though. + ggml_backend_tensor_set(tensor, data.data(), 0, ggml_nbytes(tensor)); } else { GGML_ASSERT(false); } @@ -87,8 +90,13 @@ static std::vector tensor_to_float(const ggml_tensor * t) { tv.push_back(*(float *) &buf[i]); } else if (t->type == GGML_TYPE_I32) { tv.push_back((float)*(int32_t *) &buf[i]); + } else if (t->type == GGML_TYPE_I16) { + tv.push_back((float)*(int16_t *) &buf[i]); + } else if (t->type == GGML_TYPE_I8) { + tv.push_back((float)*(int8_t *) &buf[i]); } else if (quantized) { - tt.to_float(&buf[i], vq.data(), bs); + std::vector vq(ggml_blck_size(t->type)); + tt.to_float(&buf[i], vq.data(), ggml_blck_size(t->type)); tv.insert(tv.end(), vq.begin(), vq.end()); } else { GGML_ASSERT(false); @@ -661,17 +669,26 @@ struct test_repeat : public test_case { struct test_dup : public test_case { const ggml_type type; const std::array ne; + const std::array permute; + bool _use_permute; std::string vars() override { - return VARS_TO_STR2(type, ne); + std::string v = VARS_TO_STR2(type, ne); + if (_use_permute) v += "," + VAR_TO_STR(permute); + return v; } test_dup(ggml_type type = GGML_TYPE_F32, - std::array ne = {10, 10, 10, 1}) - : type(type), ne(ne) {} + std::array ne = {10, 10, 10, 1}, + std::array permute = {0, 0, 0, 0}) + : type(type), ne(ne), permute(permute), + _use_permute(permute[0] + permute[1] + permute[2] + permute[3] > 0) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * src = ggml_new_tensor(ctx, type, 4, ne.data()); + if (_use_permute) { + src = ggml_permute(ctx, src, permute[0], permute[1], permute[2], permute[3]); + } ggml_tensor * out = ggml_dup(ctx, src); return out; } @@ -1450,14 +1467,26 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } } } + for (int b : {1, 7}) { + for (bool v : {false, true}) { + test_cases.emplace_back(new test_get_rows(GGML_TYPE_I32, 256, 5, 4, b, v)); + } + } test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {2, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 2, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 2, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 2})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_I32, {10, 10, 10, 10}, {2, 1, 1, 1})); + test_cases.emplace_back(new test_repeat(GGML_TYPE_I16, {10, 10, 10, 10}, {1, 1, 1, 2})); - test_cases.emplace_back(new test_dup()); + test_cases.emplace_back(new test_dup(GGML_TYPE_F32)); + test_cases.emplace_back(new test_dup(GGML_TYPE_F16)); + test_cases.emplace_back(new test_dup(GGML_TYPE_I32)); + test_cases.emplace_back(new test_dup(GGML_TYPE_I16)); + test_cases.emplace_back(new test_dup(GGML_TYPE_I16, {10, 8, 3, 1}, {0, 2, 1, 3})); + test_cases.emplace_back(new test_dup(GGML_TYPE_I16, {10, 8, 3, 1}, {1, 2, 0, 3})); for (ggml_type type : all_types) { test_cases.emplace_back(new test_cpy(GGML_TYPE_F32, type, {256, 10, 10, 1})); @@ -1565,7 +1594,8 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_alibi()); test_cases.emplace_back(new test_im2col()); - test_cases.emplace_back(new test_concat()); + test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); + test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); for (ggml_sort_order order : {GGML_SORT_ASC, GGML_SORT_DESC}) { test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {8, 1, 1, 1}, order)); From ab62fc3e5520f5a143c36cb23c269f11aa4dafd6 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 11:25:54 +0200 Subject: [PATCH 355/859] scripts : fix sync order + metal sed --- scripts/sync-ggml-am.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 91478f177..248cf1023 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -27,7 +27,7 @@ echo "Syncing ggml changes since commit $lc" cd $SRC_GGML git log --oneline $lc..HEAD -git log --oneline $lc..HEAD | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits +git log --oneline $lc..HEAD --reverse | grep -v "(llama/[0-9]*)" | cut -d' ' -f1 > $SRC_LLAMA/ggml-commits if [ ! -s $SRC_LLAMA/ggml-commits ]; then rm -v $SRC_LLAMA/ggml-commits @@ -87,7 +87,6 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml-impl.h -> ggml-impl.h # src/ggml-metal.h -> ggml-metal.h # src/ggml-metal.m -> ggml-metal.m - # src/ggml-metal.metal -> ggml-metal.metal # src/ggml-mpi.h -> ggml-mpi.h # src/ggml-mpi.c -> ggml-mpi.c # src/ggml-opencl.cpp -> ggml-opencl.cpp @@ -114,7 +113,6 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/src\/ggml-impl\.h/ggml-impl.h/g' \ -e 's/src\/ggml-metal\.h/ggml-metal.h/g' \ -e 's/src\/ggml-metal\.m/ggml-metal.m/g' \ - -e 's/src\/ggml-metal\.metal/ggml-metal.metal/g' \ -e 's/src\/ggml-mpi\.h/ggml-mpi.h/g' \ -e 's/src\/ggml-mpi\.c/ggml-mpi.c/g' \ -e 's/src\/ggml-opencl\.cpp/ggml-opencl.cpp/g' \ From 289313716ff7ccf6aee284f686a0fe8cbc7714af Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 11:35:46 +0200 Subject: [PATCH 356/859] metal : add kernel_get_rows_i32 ggml-ci --- ggml-metal.m | 4 ++++ ggml-metal.metal | 29 +++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 7a369b55e..7aa92c14c 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -87,6 +87,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q4_K); GGML_METAL_DECL_KERNEL(get_rows_q5_K); GGML_METAL_DECL_KERNEL(get_rows_q6_K); + GGML_METAL_DECL_KERNEL(get_rows_i32); GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); @@ -377,6 +378,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q4_K); GGML_METAL_ADD_KERNEL(get_rows_q5_K); GGML_METAL_ADD_KERNEL(get_rows_q6_K); + GGML_METAL_ADD_KERNEL(get_rows_i32); GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); @@ -499,6 +501,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q4_K); GGML_METAL_DEL_KERNEL(get_rows_q5_K); GGML_METAL_DEL_KERNEL(get_rows_q6_K); + GGML_METAL_DEL_KERNEL(get_rows_i32); GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); @@ -1978,6 +1981,7 @@ void ggml_metal_graph_compute( case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_K]; break; case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_K]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; + case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 9aa7b502a..a7d3f9efa 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -3829,6 +3829,35 @@ kernel void kernel_get_rows_f16( } } +kernel void kernel_get_rows_i32( + device const void * src0, + device const char * src1, + device int32_t * dst, + constant int64_t & ne00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb1, + constant uint64_t & nb2, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint3 tptg [[threads_per_threadgroup]]) { + const int64_t i10 = tgpig.x; + const int64_t i11 = tgpig.y; + + const int64_t r = ((device int32_t *) ((device char *) src1 + i11*nb11 + i10*nb10))[0]; + + const int64_t i02 = i11; + + for (int ind = tiitg; ind < ne00; ind += tptg.x) { + ((device int32_t *) ((device char *) dst + i11*nb2 + i10*nb1))[ind] = + ((device int32_t *) ((device char *) src0 + r*nb01 + i02*nb02))[ind]; + } +} + + #define BLOCK_SIZE_M 64 // 8 simdgroup matrices from matrix A #define BLOCK_SIZE_N 32 // 4 simdgroup matrices from matrix B #define BLOCK_SIZE_K 32 From 75e3fd85814c367b55aea11e7bb38cb7b82c6aa0 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 11:37:44 +0200 Subject: [PATCH 357/859] sync : ggml ggml-ci --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 5b6a440f7..2105a8df2 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -df098ea908764cba4a4889a1cbe7b026b2d31a14 +5b6f3aeba051be8926cb921b8ba529ff990608bf From d55356d3baa58a6c3a9171cb67a67094b9aa9dff Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 13:01:44 +0200 Subject: [PATCH 358/859] cuda : mark I16 and I32 ops as unsupported ggml-ci --- ggml-cuda.cu | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 8c2712308..2e759d43e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -10039,14 +10039,22 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten } return false; } break; + case GGML_OP_DUP: + case GGML_OP_REPEAT: + case GGML_OP_CONCAT: + { + ggml_type src0_type = op->src[0]->type; + if (src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16) { + return true; + } + return false; + } break; case GGML_OP_NONE: case GGML_OP_RESHAPE: case GGML_OP_VIEW: case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: case GGML_OP_NORM: - case GGML_OP_REPEAT: - case GGML_OP_DUP: case GGML_OP_ADD: case GGML_OP_MUL: case GGML_OP_DIV: @@ -10063,7 +10071,6 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_SUM_ROWS: case GGML_OP_ARGSORT: case GGML_OP_ACC: - case GGML_OP_CONCAT: case GGML_OP_GROUP_NORM: case GGML_OP_UPSCALE: case GGML_OP_PAD: From 7bed7eba359b0fa8e575345dc5467a46b4ba509f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 3 Jan 2024 14:18:46 +0200 Subject: [PATCH 359/859] cuda : simplify expression Co-authored-by: slaren --- ggml-cuda.cu | 5 +---- scripts/sync-ggml.last | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2e759d43e..52d3cc6a6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -10044,10 +10044,7 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten case GGML_OP_CONCAT: { ggml_type src0_type = op->src[0]->type; - if (src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16) { - return true; - } - return false; + return src0_type != GGML_TYPE_I32 && src0_type != GGML_TYPE_I16; } break; case GGML_OP_NONE: case GGML_OP_RESHAPE: diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 2105a8df2..354246a26 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -5b6f3aeba051be8926cb921b8ba529ff990608bf +3fd01e00e40583ccd4b393a7c6502d6a4455a1d5 From ece9a45e8ffb73ad461c792720c2fec28b0137bc Mon Sep 17 00:00:00 2001 From: Ashraful Islam Date: Wed, 3 Jan 2024 11:30:02 -0600 Subject: [PATCH 360/859] swift : update Package.swift to use ggml as dependency (#4691) * updates the package.swift to use ggml as dependency * changes the ggml package url src to ggerganov --- Package.swift | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/Package.swift b/Package.swift index 18d610d69..e33a4ff46 100644 --- a/Package.swift +++ b/Package.swift @@ -13,21 +13,17 @@ let package = Package( products: [ .library(name: "llama", targets: ["llama"]), ], + dependencies: [ + .package(url: "https://github.com/ggerganov/ggml.git", .branch("master")) + ], targets: [ .target( name: "llama", + dependencies: ["ggml"], path: ".", exclude: [], sources: [ - "ggml.c", "llama.cpp", - "ggml-alloc.c", - "ggml-backend.c", - "ggml-quants.c", - "ggml-metal.m", - ], - resources: [ - .process("ggml-metal.metal") ], publicHeadersPath: "spm-headers", cSettings: [ From cb1e2818e0e12ec99f7236ec5d4f3ffd8bcc2f4a Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 3 Jan 2024 18:53:40 +0100 Subject: [PATCH 361/859] train : fix typo in overlapping-samples help msg (#4758) This commit fixes a typo in the help message for the --overlapping-samples option. Signed-off-by: Daniel Bevenius --- common/train.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/train.cpp b/common/train.cpp index dcf9614e4..e6f2f7a2f 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1107,7 +1107,7 @@ void print_common_train_usage(int /*argc*/, char ** /*argv*/, const struct train fprintf(stderr, " --sample-start STR Sets the starting point for samples after the specified pattern. If empty use every token position as sample start. (default '%s')\n", params->sample_start.c_str()); fprintf(stderr, " --include-sample-start Include the sample start in the samples. (default off)\n"); fprintf(stderr, " --escape process sample start escapes sequences (\\n, \\r, \\t, \\', \\\", \\\\)\n"); - fprintf(stderr, " --overlapping-samples Samples my overlap, will include sample-start of second and following samples. When off, samples will end at begin of next sample. (default off)\n"); + fprintf(stderr, " --overlapping-samples Samples may overlap, will include sample-start of second and following samples. When off, samples will end at begin of next sample. (default off)\n"); fprintf(stderr, " --fill-with-next-samples Samples shorter than context length will be followed by the next (shuffled) samples. (default off)\n"); fprintf(stderr, " --separate-with-eos When fill-with-next-samples, insert end-of-sequence token between samples.%s\n", params->separate_with_eos ? " (default)" : ""); fprintf(stderr, " --separate-with-bos When fill-with-next-samples, insert begin-of-sequence token between samples.%s\n", params->separate_with_bos ? " (default)" : ""); From 46cea79e1f32499bb24b9fab12123cd386e96728 Mon Sep 17 00:00:00 2001 From: singularity <12184989+singularity-s0@users.noreply.github.com> Date: Thu, 4 Jan 2024 15:58:16 +0800 Subject: [PATCH 362/859] llama.swiftui : fix build of ggml.metallib (#4754) * metal: fix metal backend init failure in swiftui * metal: build ggml.metallib instead of copy src * llama.swift : remove debug flags from metallib build --------- Co-authored-by: Georgi Gerganov --- .../llama.swiftui.xcodeproj/project.pbxproj | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 2e6159928..7bf4489a2 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -9,7 +9,6 @@ /* Begin PBXBuildFile section */ 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; @@ -24,8 +23,25 @@ 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; + F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; /* End PBXBuildFile section */ +/* Begin PBXBuildRule section */ + F1FE20DB2B465C2100B45541 /* PBXBuildRule */ = { + isa = PBXBuildRule; + compilerSpec = com.apple.compilers.proxy.script; + fileType = sourcecode.metal; + inputFiles = ( + ); + isEditable = 1; + outputFiles = ( + "${DERIVED_FILES_DIR}/ggml-metal.air", + "${DERIVED_FILES_DIR}/ggml.metallib", + ); + script = "# metal\nxcrun metal -c \"${INPUT_FILE_PATH}\" -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\nxcrun metallib -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE%-metal}.metallib\" \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\n"; + }; +/* End PBXBuildRule section */ + /* Begin PBXFileReference section */ 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; @@ -190,6 +206,7 @@ 8A1C83712AC328BD0096AF73 /* Resources */, ); buildRules = ( + F1FE20DB2B465C2100B45541 /* PBXBuildRule */, ); dependencies = ( ); @@ -241,7 +258,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - 542378792ACE3F3500834A7B /* ggml-metal.metal in Resources */, + F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */, 8A3F84242AC4C891005E2EE8 /* models in Resources */, 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, From dc891b7f7a23158d54f9383790b92c79cc5906c1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 4 Jan 2024 10:12:26 +0200 Subject: [PATCH 363/859] ggml : include stdlib.h before intrin.h (#4736) --- ggml-impl.h | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-impl.h b/ggml-impl.h index 1f5610a86..2faced080 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -5,6 +5,7 @@ // GGML internal header #include +#include // load `stdlib.h` before other headers to work around MinGW bug: https://sourceforge.net/p/mingw-w64/bugs/192/ #include #include #include // memcpy From e5804313a1edaf00726ed0b96ecced07accbf50c Mon Sep 17 00:00:00 2001 From: Michael Coppola Date: Thu, 4 Jan 2024 03:17:09 -0500 Subject: [PATCH 364/859] server : fix options in README.md (#4765) * fix examples/server/README.md * minor : fix whitespace --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 718a7e064..243e66991 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -168,6 +168,12 @@ node index.js `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) + + `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) + + `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) + *Result JSON:* Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. @@ -198,12 +204,6 @@ node index.js `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) - `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) - - `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) - - `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) - - **POST** `/tokenize`: Tokenize a given text. *Options:* From 3c0b585561d74a56977cf3a3844535ecc9e37972 Mon Sep 17 00:00:00 2001 From: singularity <12184989+singularity-s0@users.noreply.github.com> Date: Thu, 4 Jan 2024 16:22:38 +0800 Subject: [PATCH 365/859] llama.swiftui : support loading custom model from file picker (#4767) * swiftui: support load model from file picker * swiftui: remove trailing whitespace --- .../llama.swiftui.xcodeproj/project.pbxproj | 4 ++ .../llama.swiftui/UI/ContentView.swift | 2 + .../llama.swiftui/UI/LoadCustomButton.swift | 44 +++++++++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 7bf4489a2..a70750a22 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -23,6 +23,7 @@ 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; + F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; /* End PBXBuildFile section */ @@ -68,6 +69,7 @@ 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; + F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LoadCustomButton.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -182,6 +184,7 @@ children = ( 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */, 8A1C83782AC328BD0096AF73 /* ContentView.swift */, + F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */, ); path = UI; sourceTree = ""; @@ -274,6 +277,7 @@ files = ( 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, + F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */, 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 147e0c63b..7c81ea256 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -103,6 +103,8 @@ struct ContentView: View { ContentView.cleanupModelCaches() llamaState.cacheCleared = true } + + LoadCustomButton(llamaState: llamaState) } .padding(.top, 4) .font(.system(size: 12)) diff --git a/examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift b/examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift new file mode 100644 index 000000000..4315dbe4f --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/LoadCustomButton.swift @@ -0,0 +1,44 @@ +import SwiftUI +import UniformTypeIdentifiers + +struct LoadCustomButton: View { + @ObservedObject private var llamaState: LlamaState + @State private var showFileImporter = false + + init(llamaState: LlamaState) { + self.llamaState = llamaState + } + + var body: some View { + VStack { + Button(action: { + showFileImporter = true + }) { + Text("Load Custom Model") + } + } + .fileImporter( + isPresented: $showFileImporter, + allowedContentTypes: [UTType(filenameExtension: "gguf", conformingTo: .data)!], + allowsMultipleSelection: false + ) { result in + switch result { + case .success(let files): + files.forEach { file in + let gotAccess = file.startAccessingSecurityScopedResource() + if !gotAccess { return } + + do { + try llamaState.loadModel(modelUrl: file.absoluteURL) + } catch let err { + print("Error: \(err.localizedDescription)") + } + + file.stopAccessingSecurityScopedResource() + } + case .failure(let error): + print(error) + } + } + } +} From a91928014fcf51fe297823fcff0788de4f14206e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 4 Jan 2024 09:43:23 +0100 Subject: [PATCH 366/859] Print backend name on test-backend-ops failure (#4751) --- tests/test-backend-ops.cpp | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 44412cb94..b79de7a7d 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -392,15 +392,21 @@ struct test_case { struct callback_userdata { bool ok; double max_err; + ggml_backend_t backend1; + ggml_backend_t backend2; }; callback_userdata ud { true, max_nmse_err(), + backend1, + backend2 }; auto callback = [](int index, ggml_tensor * t1, ggml_tensor * t2, void * user_data) -> bool { callback_userdata * ud = (callback_userdata *) user_data; + const char * bn1 = ggml_backend_name(ud->backend1); + const char * bn2 = ggml_backend_name(ud->backend2); if (t1->op == GGML_OP_NONE) { // sentinels must be unchanged @@ -422,7 +428,7 @@ struct test_case { for (size_t i = 0; i < f1.size(); i++) { // check for nans if (std::isnan(f1[i]) || std::isnan(f2[i])) { - printf("[%s] NaN at index %zu (%f %f) ", ggml_op_desc(t1), i, f1[i], f2[i]); + printf("[%s] NaN at index %zu (%s=%f %s=%f) ", ggml_op_desc(t1), i, bn1, f1[i], bn2, f2[i]); ud->ok = false; return true; } @@ -430,12 +436,12 @@ struct test_case { if (isinf_or_max(f1[i]) || isinf_or_max(f2[i])) { if (isinf_or_max(f1[i]) && isinf_or_max(f2[i])) { if (std::signbit(f1[i]) != std::signbit(f2[i])) { - printf("[%s] inf sign mismatch: %f %f ", ggml_op_desc(t1), f1[i], f2[i]); + printf("[%s] inf sign mismatch: %s=%f %s=%f ", ggml_op_desc(t1), bn1, f1[i], bn2, f2[i]); ud->ok = false; return true; } } else { - printf("[%s] inf mismatch: %f %f ", ggml_op_desc(t1), f1[i], f2[i]); + printf("[%s] inf mismatch: %s=%f %s=%f ", ggml_op_desc(t1), bn1, f1[i], bn2, f2[i]); ud->ok = false; return true; } From 012cf349aec8ffb47c9def5dc018240fa3721e8b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 4 Jan 2024 19:56:33 +0200 Subject: [PATCH 367/859] server : send token probs for "stream == false" (#4714) --- examples/server/server.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index e45ea809a..d1469fb08 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1265,7 +1265,7 @@ struct llama_server_context { std::vector probs_output = {}; const std::vector to_send_toks = llama_tokenize(ctx, tkn.text_to_send, false); - size_t probs_pos = std::min(slot.sent_token_probs_index, slot.generated_token_probs.size()); + size_t probs_pos = std::min(slot.sent_token_probs_index, slot.generated_token_probs.size()); size_t probs_stop_pos = std::min(slot.sent_token_probs_index + to_send_toks.size(), slot.generated_token_probs.size()); if (probs_pos < probs_stop_pos) { @@ -1325,7 +1325,7 @@ struct llama_server_context { probs = std::vector( slot.generated_token_probs.begin(), - slot.generated_token_probs.begin() + slot.sent_token_probs_index); + slot.generated_token_probs.end()); } res.result_json["completion_probabilities"] = probs_vector_to_json(ctx, probs); } From b3a7c20b5c035250257d2b62851c379b159c899a Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 4 Jan 2024 20:45:37 +0100 Subject: [PATCH 368/859] finetune : remove unused includes (#4756) This commit removes unused includes from finetune.cpp. Signed-off-by: Daniel Bevenius --- examples/finetune/finetune.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index e0520f64c..eaca42fc1 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -3,15 +3,9 @@ #include "llama.h" #include "common.h" #include "train.h" -#include #include -#include -#include #include -#include #include -#include -#include #include #include From 3681f22443d917e7328456b69c276d6927dafeec Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 15:11:10 +0200 Subject: [PATCH 369/859] examples : add few-shot translation example (#4783) --- examples/base-translate.sh | 56 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100755 examples/base-translate.sh diff --git a/examples/base-translate.sh b/examples/base-translate.sh new file mode 100755 index 000000000..50fba025c --- /dev/null +++ b/examples/base-translate.sh @@ -0,0 +1,56 @@ +#!/bin/bash +# +# Few-shot translation example. +# Requires a base model (i.e. no fine-tuned or instruct models). +# +# Usage: +# +# cd llama.cpp +# make -j +# +# ./examples/base-translate.sh "" +# + +if [ $# -ne 2 ]; then + echo "Usage: ./base-translate.sh \"\"" + exit 1 +fi + +ftmp="__llama.cpp_example_tmp__.txt" +trap "rm -f $ftmp" EXIT + +echo "Translate from English to French: + +=== + +sea otter, peppermint, plush girafe: + +sea otter => loutre de mer +peppermint => menthe poivrée +plush girafe => girafe peluche + +=== + +violin + +violin => violon + +=== + +phone, computer, mouse, keyboard: + +phone => téléphone +computer => ordinateur +mouse => souris +keyboard => clavier + +=== +" > $ftmp + +echo "$2 +" >> $ftmp + +model=$1 + +# generate the most likely continuation, run on the CPU until the string "===" is found +./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -ngl 0 -r "===" From c1d7cb28d3fed97fbc95fc3c43f0c5e2113e546c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 15:18:21 +0200 Subject: [PATCH 370/859] ggml : do not sched_yield when calling BLAS (#4761) * ggml : do not sched_yield when calling BLAS ggml-ci * ggml : fix do_yield logic ggml-ci * ggml : simplify do_yield logic ggml-ci --- ggml.c | 41 ++++++++++++++--------------------------- 1 file changed, 14 insertions(+), 27 deletions(-) diff --git a/ggml.c b/ggml.c index b124f14cc..62f0f18ef 100644 --- a/ggml.c +++ b/ggml.c @@ -9704,10 +9704,10 @@ static void ggml_compute_forward_group_norm( #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) // helper function to determine if it is better to use BLAS or not // for large matrices, BLAS is faster -static bool ggml_compute_forward_mul_mat_use_blas( - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - struct ggml_tensor * dst) { +static bool ggml_compute_forward_mul_mat_use_blas(struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + //const int64_t ne00 = src0->ne[0]; //const int64_t ne01 = src0->ne[1]; @@ -9787,7 +9787,7 @@ static void ggml_compute_forward_mul_mat( #endif #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(src0, src1, dst)) { + if (ggml_compute_forward_mul_mat_use_blas(dst)) { if (params->ith != 0) { return; } @@ -16301,24 +16301,6 @@ static int ggml_get_n_tasks(struct ggml_tensor * node, int n_threads) { //n_tasks = MIN(n_threads, MAX(1, nr0/128)); //printf("nr0 = %8d, nr1 = %8d, nr0*nr1 = %8d, n_tasks%d\n", nr0, nr1, nr0*nr1, n_tasks); - -#if defined(GGML_USE_CUBLAS) - if (ggml_cuda_can_mul_mat(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } -#elif defined(GGML_USE_CLBLAST) - if (ggml_cl_can_mul_mat(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } -#endif -#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(node->src[0], node->src[1], node)) { - n_tasks = 1; // TODO: this actually is doing nothing - // the threads are still spinning - } -#endif } break; case GGML_OP_MUL_MAT_ID: { @@ -16491,6 +16473,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { state->shared->node_n += 1; return (thread_ret_t) GGML_EXIT_ABORTED; } + if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { // all other threads are finished and spinning // do finalize and init here so we don't have synchronize again @@ -16556,14 +16539,18 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } else { // wait for other threads to finish const int last = node_n; + + const bool do_yield = last < 0 || cgraph->nodes[last]->op == GGML_OP_MUL_MAT; + while (true) { // TODO: this sched_yield can have significant impact on the performance - either positive or negative // depending on the workload and the operating system. // since it is not clear what is the best approach, it should potentially become user-configurable // ref: https://github.com/ggerganov/ggml/issues/291 -#if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - sched_yield(); -#endif + // UPD: adding the do_yield flag seems to resolve the issue universally + if (do_yield) { + sched_yield(); + } node_n = atomic_load(&state->shared->node_n); if (node_n != last) break; @@ -16642,7 +16629,7 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } else #endif #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) - if (ggml_compute_forward_mul_mat_use_blas(node->src[0], node->src[1], node)) { + if (ggml_compute_forward_mul_mat_use_blas(node)) { if (node->src[0]->type != GGML_TYPE_F32) { // here we need memory just for single 2D matrix from src0 cur = ggml_type_size(GGML_TYPE_F32)*(node->src[0]->ne[0]*node->src[0]->ne[1]); From 1bf681f90ef4cf37b36e6d604d3e30fc57eda650 Mon Sep 17 00:00:00 2001 From: Finn Voorhees Date: Wed, 3 Jan 2024 08:39:43 -0500 Subject: [PATCH 371/859] ggml : add error handling to graph_compute (whisper/1714) --- ggml-backend-impl.h | 2 +- ggml-backend.c | 10 +++++++--- ggml-backend.h | 2 +- ggml-cuda.cu | 4 +++- ggml-metal.h | 2 +- ggml-metal.m | 9 +++++---- 6 files changed, 18 insertions(+), 11 deletions(-) diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index 05859935a..ca21b4743 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -90,7 +90,7 @@ extern "C" { void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); // compute graph without a plan - void (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); + bool (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation bool (*supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); diff --git a/ggml-backend.c b/ggml-backend.c index 2c3752067..53e741cb8 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -195,11 +195,14 @@ void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_ ggml_backend_synchronize(backend); } -void ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { - backend->iface.graph_compute(backend, cgraph); +bool ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { + if (!backend->iface.graph_compute(backend, cgraph)) { + return false; + } // TODO: optional sync ggml_backend_synchronize(backend); + return true; } bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { @@ -597,7 +600,7 @@ static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_bac GGML_UNUSED(backend); } -static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_cplan cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); @@ -611,6 +614,7 @@ static void ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c cplan.work_data = cpu_ctx->work_data; ggml_graph_compute(cgraph, &cplan); + return true; } static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { diff --git a/ggml-backend.h b/ggml-backend.h index a9d2fddd7..85ff67b0e 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -58,7 +58,7 @@ extern "C" { GGML_API void ggml_backend_graph_plan_free (ggml_backend_t backend, ggml_backend_graph_plan_t plan); GGML_API void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan); - GGML_API void ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); + GGML_API bool ggml_backend_graph_compute (ggml_backend_t backend, struct ggml_cgraph * cgraph); GGML_API bool ggml_backend_supports_op (ggml_backend_t backend, const struct ggml_tensor * op); // tensor copy between different backends diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 52d3cc6a6..10c21615e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -9910,7 +9910,7 @@ static void ggml_backend_cuda_graph_plan_compute(ggml_backend_t backend, ggml_ba UNUSED(plan); } -static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -9967,6 +9967,8 @@ static void ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph } UNUSED(backend); + + return true; } static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { diff --git a/ggml-metal.h b/ggml-metal.h index b5e02b668..c4b7325da 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -87,7 +87,7 @@ int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx); // same as ggml_graph_compute but uses Metal // creates gf->n_threads command buffers in parallel -void ggml_metal_graph_compute(struct ggml_metal_context * ctx, struct ggml_cgraph * gf); +bool ggml_metal_graph_compute(struct ggml_metal_context * ctx, struct ggml_cgraph * gf); // // backend API diff --git a/ggml-metal.m b/ggml-metal.m index 7aa92c14c..55cc1a872 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -977,7 +977,7 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { return false; } } -void ggml_metal_graph_compute( +bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @autoreleasepool { @@ -2405,10 +2405,11 @@ void ggml_metal_graph_compute( MTLCommandBufferStatus status = (MTLCommandBufferStatus) [ctx->command_buffers[i] status]; if (status != MTLCommandBufferStatusCompleted) { GGML_METAL_LOG_INFO("%s: command buffer %d failed with status %lu\n", __func__, i, status); - GGML_ASSERT(false); + return false; } } + return true; } } @@ -2688,10 +2689,10 @@ static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggm UNUSED(backend); } -static void ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; - ggml_metal_graph_compute(metal_ctx, cgraph); + return ggml_metal_graph_compute(metal_ctx, cgraph); } static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { From d061bf9405cc5fd50792fb2dbdff9c9ea53d6bf9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 15:36:04 +0200 Subject: [PATCH 372/859] ggml : fix q2_k bpw in comments (ggml/680) --- ggml-quants.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-quants.h b/ggml-quants.h index 70c12c274..62c1df6cb 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -70,7 +70,7 @@ static_assert(sizeof(block_q8_1) == 2*sizeof(float) + QK8_1, "wrong q8_1 block s // 2-bit quantization // weight is represented as x = a * q + b // 16 blocks of 16 elements each -// Effectively 2.5625 bits per weight +// Effectively 2.625 bits per weight typedef struct { uint8_t scales[QK_K/16]; // scales and mins, quantized with 4 bits uint8_t qs[QK_K/4]; // quants From 91d38876dfa10332359ac671b62353aeceb448d3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 5 Jan 2024 16:30:52 +0200 Subject: [PATCH 373/859] metal : switch back to default.metallib (ggml/681) ggml-ci --- CMakeLists.txt | 10 ++++++---- .../llama.swiftui.xcodeproj/project.pbxproj | 19 +------------------ ggml-metal.m | 6 +++--- scripts/sync-ggml.last | 2 +- 4 files changed, 11 insertions(+), 26 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 57ae4c2df..ce237cf45 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -177,27 +177,29 @@ if (LLAMA_METAL) if (LLAMA_METAL_SHADER_DEBUG) # custom command to do the following: # xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air - # xcrun -sdk macosx metallib ggml-metal.air -o ggml.metallib + # xcrun -sdk macosx metallib ggml-metal.air -o default.metallib # # note: this is the only way I found to disable fast-math in Metal. it's ugly, but at least it works # disabling fast math is needed in order to pass tests/test-backend-ops # note: adding -fno-inline fixes the tests when using MTL_SHADER_VALIDATION=1 + # note: unfortunately, we have to call it default.metallib instead of ggml.metallib + # ref: https://github.com/ggerganov/whisper.cpp/issues/1720 set(XC_FLAGS -fno-fast-math -fno-inline -g) if (LLAMA_QKK_64) set(XC_FLAGS ${XC_FLAGS} -DQK_K=64) endif() add_custom_command( - OUTPUT ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + OUTPUT ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/default.metallib COMMAND xcrun -sdk macosx metal ${XC_FLAGS} -c ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air - COMMAND xcrun -sdk macosx metallib ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + COMMAND xcrun -sdk macosx metallib ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.air -o ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/default.metallib DEPENDS ggml-metal.metal COMMENT "Compiling Metal kernels" ) add_custom_target( ggml-metal ALL - DEPENDS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml.metallib + DEPENDS ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/default.metallib ) endif() diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index a70750a22..14b93f26c 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -23,26 +23,10 @@ 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; - F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; /* End PBXBuildFile section */ -/* Begin PBXBuildRule section */ - F1FE20DB2B465C2100B45541 /* PBXBuildRule */ = { - isa = PBXBuildRule; - compilerSpec = com.apple.compilers.proxy.script; - fileType = sourcecode.metal; - inputFiles = ( - ); - isEditable = 1; - outputFiles = ( - "${DERIVED_FILES_DIR}/ggml-metal.air", - "${DERIVED_FILES_DIR}/ggml.metallib", - ); - script = "# metal\nxcrun metal -c \"${INPUT_FILE_PATH}\" -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\nxcrun metallib -o \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE%-metal}.metallib\" \"${DERIVED_FILES_DIR}/${INPUT_FILE_BASE}.air\"\n"; - }; -/* End PBXBuildRule section */ - /* Begin PBXFileReference section */ 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; @@ -209,7 +193,6 @@ 8A1C83712AC328BD0096AF73 /* Resources */, ); buildRules = ( - F1FE20DB2B465C2100B45541 /* PBXBuildRule */, ); dependencies = ( ); diff --git a/ggml-metal.m b/ggml-metal.m index 55cc1a872..fbbdcd8c4 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -258,14 +258,14 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { bundle = [NSBundle bundleForClass:[GGMLMetalClass class]]; #endif NSError * error = nil; - NSString * libPath = [bundle pathForResource:@"ggml" ofType:@"metallib"]; + NSString * libPath = [bundle pathForResource:@"default" ofType:@"metallib"]; if (libPath != nil) { // pre-compiled library found NSURL * libURL = [NSURL fileURLWithPath:libPath]; GGML_METAL_LOG_INFO("%s: loading '%s'\n", __func__, [libPath UTF8String]); ctx->library = [ctx->device newLibraryWithURL:libURL error:&error]; } else { - GGML_METAL_LOG_INFO("%s: ggml.metallib not found, loading from source\n", __func__); + GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); NSString * sourcePath; NSString * ggmlMetalPathResources = [[NSProcessInfo processInfo].environment objectForKey:@"GGML_METAL_PATH_RESOURCES"]; @@ -295,7 +295,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { #endif // try to disable fast-math // NOTE: this seems to have no effect whatsoever - // instead, in order to disable fast-math, we have to build ggml.metallib from the command line + // instead, in order to disable fast-math, we have to build default.metallib from the command line // using xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air // and go through the "pre-compiled library found" path above //[options setFastMathEnabled:false]; diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 354246a26..fe7f3202f 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -3fd01e00e40583ccd4b393a7c6502d6a4455a1d5 +f96711108d55bdbbd277e6be07204dce6a94fb93 From be36bb946a6336238e92706464de6a30495fe825 Mon Sep 17 00:00:00 2001 From: Ikko Eltociear Ashimine Date: Sat, 6 Jan 2024 01:02:44 +0900 Subject: [PATCH 374/859] flake.nix : fix typo (#4700) betwen -> between --- .devops/nix/package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index 5f2a7c9f4..43bdbd755 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -9,7 +9,7 @@ git, python3, mpi, - openblas, # TODO: Use the generic `blas` so users could switch betwen alternative implementations + openblas, # TODO: Use the generic `blas` so users could switch between alternative implementations cudaPackages, darwin, rocmPackages, From eec22a1c6378d9a013943cbddb4330c0da621442 Mon Sep 17 00:00:00 2001 From: a-n-n-a-l-e-e <150648636+a-n-n-a-l-e-e@users.noreply.github.com> Date: Fri, 5 Jan 2024 08:04:40 -0800 Subject: [PATCH 375/859] cmake : check for openblas64 (#4134) openblas v0.3.22 64-bit pkg-config file is named openblas64.pc https://github.com/OpenMathLib/OpenBLAS/issues/3790 --- CMakeLists.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ce237cf45..668669c6d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -230,7 +230,11 @@ if (LLAMA_BLAS) if (${LLAMA_BLAS_VENDOR} MATCHES "Generic") pkg_check_modules(DepBLAS REQUIRED blas) elseif (${LLAMA_BLAS_VENDOR} MATCHES "OpenBLAS") - pkg_check_modules(DepBLAS REQUIRED openblas) + # As of openblas v0.3.22, the 64-bit is named openblas64.pc + pkg_check_modules(DepBLAS openblas64) + if (NOT DepBLAS_FOUND) + pkg_check_modules(DepBLAS REQUIRED openblas) + endif() elseif (${LLAMA_BLAS_VENDOR} MATCHES "FLAME") pkg_check_modules(DepBLAS REQUIRED blis) elseif (${LLAMA_BLAS_VENDOR} MATCHES "ATLAS") From 96e80dabc6e73ff68b09b68947b1fc25883c5094 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 6 Jan 2024 11:40:24 +0200 Subject: [PATCH 376/859] examples : improve base-translate.sh script (#4783) --- examples/base-translate.sh | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/examples/base-translate.sh b/examples/base-translate.sh index 50fba025c..00dedd0df 100755 --- a/examples/base-translate.sh +++ b/examples/base-translate.sh @@ -8,14 +8,19 @@ # cd llama.cpp # make -j # -# ./examples/base-translate.sh "" +# ./examples/base-translate.sh "" [extra-main-args] # -if [ $# -ne 2 ]; then - echo "Usage: ./base-translate.sh \"\"" +if [ $# -lt 2 ]; then + echo "Usage: ./base-translate.sh \"\" [extra-main-args]" exit 1 fi +eargs="" +if [ $# -gt 2 ]; then + eargs="${@:3}" +fi + ftmp="__llama.cpp_example_tmp__.txt" trap "rm -f $ftmp" EXIT @@ -52,5 +57,5 @@ echo "$2 model=$1 -# generate the most likely continuation, run on the CPU until the string "===" is found -./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -ngl 0 -r "===" +# generate the most likely continuation until the string "===" is found +./main -m $model -f $ftmp -n 64 --temp 0 --repeat-penalty 1.0 --no-penalize-nl -r "===" $eargs From c75ca5d96f902564cbbbdd7f5cade80d53c288bb Mon Sep 17 00:00:00 2001 From: Daniel Illescas Romero Date: Sat, 6 Jan 2024 16:12:59 +0100 Subject: [PATCH 377/859] llama.swiftui : use correct pointer for llama_token_eos (#4797) --- examples/llama.swiftui/llama.cpp.swift/LibLlama.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 66244382f..8696b493c 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -161,7 +161,7 @@ actor LlamaContext { new_token_id = llama_sample_token_greedy(context, &candidates_p) } - if new_token_id == llama_token_eos(context) || n_cur == n_len { + if new_token_id == llama_token_eos(model) || n_cur == n_len { print("\n") let new_token_str = String(cString: temporary_invalid_cchars + [0]) temporary_invalid_cchars.removeAll() From 67984921a70a7e680a24494aeb7575a66e90685d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 08:45:26 +0200 Subject: [PATCH 378/859] server : fix n_predict check (#4798) --- examples/server/server.cpp | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index d1469fb08..6c7fcd176 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -447,8 +447,14 @@ struct llama_client_slot } bool has_budget(gpt_params &global_params) { + if (params.n_predict == -1 && global_params.n_predict == -1) + { + return true; // limitless + } + n_remaining = -1; - if(params.n_predict != -1) + + if (params.n_predict != -1) { n_remaining = params.n_predict - n_decoded; } @@ -456,7 +462,8 @@ struct llama_client_slot { n_remaining = global_params.n_predict - n_decoded; } - return n_remaining > 0 || n_remaining == -1; // no budget || limitless + + return n_remaining > 0; // no budget } bool available() const { @@ -1102,7 +1109,7 @@ struct llama_server_context } // check the limits - if (slot.n_decoded > 2 && slot.has_next_token && !slot.has_budget(params)) + if (slot.n_decoded > 0 && slot.has_next_token && !slot.has_budget(params)) { slot.stopped_limit = true; slot.has_next_token = false; @@ -1703,7 +1710,6 @@ struct llama_server_context llama_batch_add(batch, slot.sampled, system_tokens.size() + slot.n_past, { slot.id }, true); - slot.n_decoded += 1; slot.n_past += 1; } @@ -1921,6 +1927,7 @@ struct llama_server_context llama_sampling_accept(slot.ctx_sampling, ctx, id, true); + slot.n_decoded += 1; if (slot.n_decoded == 1) { slot.t_start_genereration = ggml_time_us(); From 63ee677efd92060b14894b984597c62e3742b8da Mon Sep 17 00:00:00 2001 From: Konstantin Zhuravlyov Date: Sun, 7 Jan 2024 01:52:42 -0500 Subject: [PATCH 379/859] ggml : use __builtin_amdgcn_sudot4 in __dp4a for gfx11 (#4787) --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 10c21615e..54b266be4 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -183,7 +183,7 @@ static __device__ __forceinline__ int __vsubss4(const int a, const int b) { static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { #if defined(__gfx906__) || defined(__gfx908__) || defined(__gfx90a__) || defined(__gfx1030__) c = __builtin_amdgcn_sdot4(a, b, c, false); -#elif defined(__gfx1100__) +#elif defined(RDNA3) c = __builtin_amdgcn_sudot4( true, a, true, b, c, false); #elif defined(__gfx1010__) || defined(__gfx900__) int tmp1; From 3418c03ecc149fd657527ebb06776239b60a3f3b Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Sun, 7 Jan 2024 08:46:55 +0100 Subject: [PATCH 380/859] llama.swiftui : add visionOS target (#4805) --- .../llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 14b93f26c..9b1a9787b 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -420,11 +420,13 @@ MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; PRODUCT_NAME = "$(TARGET_NAME)"; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; + SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; + TARGETED_DEVICE_FAMILY = "1,2,7"; }; name = Debug; }; @@ -453,10 +455,12 @@ MARKETING_VERSION = 1.0; PRODUCT_BUNDLE_IDENTIFIER = "com.bachittle.llama-swift"; PRODUCT_NAME = "$(TARGET_NAME)"; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; + SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; + TARGETED_DEVICE_FAMILY = "1,2,7"; }; name = Release; }; From d117d4dc5dadb46831036bfa4d6e5e8c86babaf1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 09:50:31 +0200 Subject: [PATCH 381/859] llama : print tensor meta for debugging --- llama.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 3bb056dba..06db40303 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2180,7 +2180,11 @@ struct llama_model_loader { type_max = type; } - // LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, name, ggml_type_name(meta->type), llama_format_tensor_shape(meta).c_str()); + // TODO: make runtime configurable +#if 0 + struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); + LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, ggml_get_name(meta), ggml_type_name(type), llama_format_tensor_shape(meta).c_str()); +#endif } switch (type_max) { From 72d8407b3696dd1293bd233b6db392be108bc377 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Sun, 7 Jan 2024 09:20:50 +0100 Subject: [PATCH 382/859] llama.swiftui : use llama.cpp as SPM package (#4804) --- .../llama.cpp.swift/LibLlama.swift | 5 +- .../llama.cpp.swift/bridging-header.h | 5 -- .../llama.swiftui.xcodeproj/project.pbxproj | 80 +++---------------- .../AccentColor.colorset/Contents.json | 11 --- .../Preview Assets.xcassets/Contents.json | 6 -- 5 files changed, 13 insertions(+), 94 deletions(-) delete mode 100644 examples/llama.swiftui/llama.cpp.swift/bridging-header.h delete mode 100644 examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json delete mode 100644 examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index 8696b493c..fc79fd346 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -1,8 +1,5 @@ import Foundation - -// To use this in your own project, add llama.cpp as a swift package dependency -// and uncomment this import line. -// import llama +import llama enum LlamaError: Error { case couldNotInitializeContext diff --git a/examples/llama.swiftui/llama.cpp.swift/bridging-header.h b/examples/llama.swiftui/llama.cpp.swift/bridging-header.h deleted file mode 100644 index 6cd72c979..000000000 --- a/examples/llama.swiftui/llama.cpp.swift/bridging-header.h +++ /dev/null @@ -1,5 +0,0 @@ -// -// Use this file to import your target's public headers that you would like to expose to Swift. -// - -#import "llama.h" diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index 9b1a9787b..a8848a49f 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -7,52 +7,31 @@ objects = { /* Begin PBXBuildFile section */ - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */ = {isa = PBXBuildFile; fileRef = 542376072B0D9BFB008E6A1C /* ggml-quants.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */ = {isa = PBXBuildFile; fileRef = 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09B2AC8723900A8AEE9 /* ggml.c */; settings = {COMPILER_FLAGS = "-DGGML_USE_ACCELERATE -DGGML_USE_METAL -DGGML_USE_K_QUANTS -O3"; }; }; - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */ = {isa = PBXBuildFile; fileRef = 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */; settings = {COMPILER_FLAGS = "-O3"; }; }; - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 542EA0A12AC8729100A8AEE9 /* llama.cpp */; settings = {COMPILER_FLAGS = "-DGGML_USE_K_QUANTS -DGGML_USE_METAL -O3"; }; }; 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */ = {isa = PBXBuildFile; fileRef = 549479C52AC9E0F200E0F78B /* ggml-metal.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc -DGGML_SWIFT -DGGML_USE_METAL -O3"; }; }; 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; }; 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */; }; - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */; }; 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8A39BE092AC7601000BFEB40 /* Accelerate.framework */; }; 8A3F84242AC4C891005E2EE8 /* models in Resources */ = {isa = PBXBuildFile; fileRef = 8A3F84232AC4C891005E2EE8 /* models */; }; 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A907F322AC7134E006146EA /* LibLlama.swift */; }; 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */; }; - F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */ = {isa = PBXBuildFile; fileRef = 549479C82AC9E10B00E0F78B /* ggml-metal.metal */; }; + DF810E132B4A5BA200301144 /* llama in Frameworks */ = {isa = PBXBuildFile; productRef = DF810E122B4A5BA200301144 /* llama */; }; F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 542376062B0D9BEA008E6A1C /* ggml-quants.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-quants.h"; path = "../../ggml-quants.h"; sourceTree = ""; }; - 542376072B0D9BFB008E6A1C /* ggml-quants.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-quants.c"; path = "../../ggml-quants.c"; sourceTree = ""; }; - 542376092B0D9C40008E6A1C /* ggml-backend.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "ggml-backend.h"; path = "../../ggml-backend.h"; sourceTree = ""; }; - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-backend.c"; path = "../../ggml-backend.c"; sourceTree = ""; }; - 542EA09B2AC8723900A8AEE9 /* ggml.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = ggml.c; path = ../../ggml.c; sourceTree = ""; }; - 542EA09C2AC8723900A8AEE9 /* ggml.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = ggml.h; path = ../../ggml.h; sourceTree = ""; }; - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-alloc.h"; path = "../../ggml-alloc.h"; sourceTree = ""; }; - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = "ggml-alloc.c"; path = "../../ggml-alloc.c"; sourceTree = ""; }; - 542EA0A12AC8729100A8AEE9 /* llama.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = llama.cpp; path = ../../llama.cpp; sourceTree = ""; }; - 542EA0A22AC8729100A8AEE9 /* llama.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = llama.h; path = ../../llama.h; sourceTree = ""; }; - 549479C52AC9E0F200E0F78B /* ggml-metal.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "ggml-metal.m"; path = "../../ggml-metal.m"; sourceTree = ""; }; - 549479C62AC9E0F200E0F78B /* ggml-metal.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "ggml-metal.h"; path = "../../ggml-metal.h"; sourceTree = ""; }; - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; name = "ggml-metal.metal"; path = "../../ggml-metal.metal"; sourceTree = ""; }; 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = ""; }; - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "bridging-header.h"; sourceTree = ""; }; 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; 8A1C83782AC328BD0096AF73 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 8A39BE092AC7601000BFEB40 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 8A3F84232AC4C891005E2EE8 /* models */ = {isa = PBXFileReference; lastKnownFileType = folder; name = models; path = llama.swiftui/Resources/models; sourceTree = ""; }; 8A907F322AC7134E006146EA /* LibLlama.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibLlama.swift; sourceTree = ""; }; 8A9F7C4C2AC332EE008AE1EA /* LlamaState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LlamaState.swift; sourceTree = ""; }; + DF2D2FE72B4A59BE00FCB72D /* llama.cpp */ = {isa = PBXFileReference; lastKnownFileType = wrapper; name = llama.cpp; path = ../..; sourceTree = ""; }; F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LoadCustomButton.swift; sourceTree = ""; }; /* End PBXFileReference section */ @@ -61,6 +40,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + DF810E132B4A5BA200301144 /* llama in Frameworks */, 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */, 8A39BE0A2AC7601100BFEB40 /* Accelerate.framework in Frameworks */, ); @@ -69,30 +49,10 @@ /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */ = { - isa = PBXGroup; - children = ( - 5423760A2B0D9C4B008E6A1C /* ggml-backend.c */, - 542376092B0D9C40008E6A1C /* ggml-backend.h */, - 542376062B0D9BEA008E6A1C /* ggml-quants.h */, - 542376072B0D9BFB008E6A1C /* ggml-quants.c */, - 549479C82AC9E10B00E0F78B /* ggml-metal.metal */, - 549479C62AC9E0F200E0F78B /* ggml-metal.h */, - 549479C52AC9E0F200E0F78B /* ggml-metal.m */, - 542EA09B2AC8723900A8AEE9 /* ggml.c */, - 542EA09C2AC8723900A8AEE9 /* ggml.h */, - 542EA09F2AC8725700A8AEE9 /* ggml-alloc.c */, - 542EA09E2AC8725700A8AEE9 /* ggml-alloc.h */, - 542EA0A12AC8729100A8AEE9 /* llama.cpp */, - 542EA0A22AC8729100A8AEE9 /* llama.h */, - ); - name = llama.cpp; - sourceTree = ""; - }; 8A1C836A2AC328BD0096AF73 = { isa = PBXGroup; children = ( - 8A08D1F62AC7383900FE6CD4 /* llama.cpp */, + DF2D2FE72B4A59BE00FCB72D /* llama.cpp */, 8A907F312AC7134E006146EA /* llama.cpp.swift */, 8A3F84232AC4C891005E2EE8 /* models */, 8A1C83752AC328BD0096AF73 /* llama.swiftui */, @@ -117,19 +77,10 @@ 8A9F7C4A2AC332BF008AE1EA /* UI */, 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */, 8A1C837A2AC328BE0096AF73 /* Assets.xcassets */, - 8A1C837C2AC328BE0096AF73 /* Preview Content */, ); path = llama.swiftui; sourceTree = ""; }; - 8A1C837C2AC328BE0096AF73 /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8A1C837D2AC328BE0096AF73 /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; 8A39BE082AC7601000BFEB40 /* Frameworks */ = { isa = PBXGroup; children = ( @@ -157,7 +108,6 @@ 8A907F312AC7134E006146EA /* llama.cpp.swift */ = { isa = PBXGroup; children = ( - 8A08D20A2AC73B1500FE6CD4 /* bridging-header.h */, 8A907F322AC7134E006146EA /* LibLlama.swift */, ); path = llama.cpp.swift; @@ -198,6 +148,7 @@ ); name = llama.swiftui; packageProductDependencies = ( + DF810E122B4A5BA200301144 /* llama */, ); productName = llama.swiftui; productReference = 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */; @@ -244,9 +195,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - F1FE20DC2B465C4500B45541 /* ggml-metal.metal in Resources */, 8A3F84242AC4C891005E2EE8 /* models in Resources */, - 8A1C837E2AC328BE0096AF73 /* Preview Assets.xcassets in Resources */, 8A1C837B2AC328BE0096AF73 /* Assets.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -258,18 +207,12 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - 542376082B0D9BFB008E6A1C /* ggml-quants.c in Sources */, - 549479CD2AC9E42A00E0F78B /* ggml-metal.m in Sources */, F1FE20E22B465ECA00B45541 /* LoadCustomButton.swift in Sources */, - 542EA09D2AC8723900A8AEE9 /* ggml.c in Sources */, 8A907F332AC7138A006146EA /* LibLlama.swift in Sources */, - 542EA0A32AC8729100A8AEE9 /* llama.cpp in Sources */, 8A9F7C4D2AC332EE008AE1EA /* LlamaState.swift in Sources */, 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */, - 542EA0A02AC8725700A8AEE9 /* ggml-alloc.c in Sources */, - 5423760B2B0D9C4B008E6A1C /* ggml-backend.c in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -399,11 +342,9 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; DEVELOPMENT_TEAM = STLSG3FG8Q; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; @@ -423,7 +364,6 @@ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2,7"; @@ -434,11 +374,9 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"llama.swiftui/Preview Content\""; DEVELOPMENT_TEAM = STLSG3FG8Q; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; @@ -458,7 +396,6 @@ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator xros xrsimulator"; SUPPORTS_XR_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_OBJC_BRIDGING_HEADER = "llama.cpp.swift/bridging-header.h"; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2,7"; }; @@ -486,6 +423,13 @@ defaultConfigurationName = Release; }; /* End XCConfigurationList section */ + +/* Begin XCSwiftPackageProductDependency section */ + DF810E122B4A5BA200301144 /* llama */ = { + isa = XCSwiftPackageProductDependency; + productName = llama; + }; +/* End XCSwiftPackageProductDependency section */ }; rootObject = 8A1C836B2AC328BD0096AF73 /* Project object */; } diff --git a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json b/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json deleted file mode 100644 index eb8789700..000000000 --- a/examples/llama.swiftui/llama.swiftui/Assets.xcassets/AccentColor.colorset/Contents.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "colors" : [ - { - "idiom" : "universal" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json b/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json deleted file mode 100644 index 73c00596a..000000000 --- a/examples/llama.swiftui/llama.swiftui/Preview Content/Preview Assets.xcassets/Contents.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "info" : { - "author" : "xcode", - "version" : 1 - } -} From 3c36213df850a2353e95572b3636797c79b7c815 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 11:21:53 +0200 Subject: [PATCH 383/859] llama : remove redundant GQA check (#4796) --- llama.cpp | 8 -------- 1 file changed, 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 06db40303..021e79a8f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4776,7 +4776,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4900,7 +4899,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * pos; @@ -5001,7 +4999,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); const int64_t n_rot = n_embd_head_k / 2; @@ -5215,7 +5212,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5308,7 +5304,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5404,7 +5399,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5731,7 +5725,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * attn_norm_output; @@ -5955,7 +5948,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_gqa == n_embd); struct ggml_tensor * cur; struct ggml_tensor * pos; From 9dede37d812604897496dd9d276ae9fbe13d1042 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 7 Jan 2024 14:29:36 +0200 Subject: [PATCH 384/859] llama : remove unused vars (#4796) --- llama.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 021e79a8f..91aa3f8e7 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4997,7 +4997,6 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; - const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); const int64_t n_rot = n_embd_head_k / 2; @@ -5210,7 +5209,6 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; - const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); struct ggml_tensor * cur; From d5a410e8556191672465f7ff58682ea2474038b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 7 Jan 2024 17:24:08 +0100 Subject: [PATCH 385/859] CUDA: fixed redundant value dequantization (#4809) --- ggml-cuda.cu | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 54b266be4..2df64b111 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1872,14 +1872,6 @@ static __device__ void convert_f16(const void * vx, const int ib, const int iqs, v.y = x[ib + iqs + 1]; } -static __device__ void convert_f32(const void * vx, const int ib, const int iqs, dfloat2 & v){ - const float * x = (const float *) vx; - - // automatic half -> float type cast if dfloat == float - v.x = x[ib + iqs + 0]; - v.y = x[ib + iqs + 1]; -} - static __global__ void quantize_q8_1(const float * __restrict__ x, void * __restrict__ vy, const int kx, const int kx_padded) { const int ix = blockDim.x*blockIdx.x + threadIdx.x; @@ -1983,7 +1975,7 @@ static __global__ void k_get_rows_float( template static __global__ void dequantize_block(const void * __restrict__ vx, dst_t * __restrict__ y, const int k) { - const int i = blockDim.x*blockIdx.x + 2*threadIdx.x; + const int i = 2*(blockDim.x*blockIdx.x + threadIdx.x); if (i >= k) { return; @@ -2002,6 +1994,19 @@ static __global__ void dequantize_block(const void * __restrict__ vx, dst_t * __ y[iybs + iqs + y_offset] = v.y; } +template +static __global__ void convert_unary(const void * __restrict__ vx, dst_t * __restrict__ y, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + + const src_t * x = (src_t *) vx; + + y[i] = x[i]; +} + // VDR = vec dot ratio, how many contiguous integers each thread processes when the vec dot kernel is called // MMVQ = mul_mat_vec_q, MMQ = mul_mat_q @@ -5609,7 +5614,7 @@ static void quantize_row_q8_1_cuda(const float * x, void * vy, const int kx, con template static void dequantize_block_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { - const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; + const int num_blocks = (k + 2*CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / (2*CUDA_DEQUANTIZE_BLOCK_SIZE); dequantize_block<<>>(vx, y, k); } @@ -5659,6 +5664,12 @@ static void dequantize_row_q6_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +template +static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; + convert_unary<<>>(vx, y, k); +} + static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: @@ -5682,7 +5693,7 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; case GGML_TYPE_F32: - return dequantize_block_cuda<1, 1, convert_f32>; + return convert_unary_cuda; default: return nullptr; } @@ -5711,7 +5722,7 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; case GGML_TYPE_F16: - return dequantize_block_cuda<1, 1, convert_f16>; + return convert_unary_cuda; default: return nullptr; } From 226460cc0d5b185bc6685fb76f418fd9418d7add Mon Sep 17 00:00:00 2001 From: slaren Date: Sun, 7 Jan 2024 17:59:01 +0100 Subject: [PATCH 386/859] llama-bench : add no-kv-offload parameter (#4812) --- examples/llama-bench/llama-bench.cpp | 34 +++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 6617c050d..7f7186cde 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -138,6 +138,7 @@ struct cmd_params { std::vector n_threads; std::vector n_gpu_layers; std::vector main_gpu; + std::vector no_kv_offload; std::vector mul_mat_q; std::vector> tensor_split; int reps; @@ -155,6 +156,7 @@ static const cmd_params cmd_params_defaults = { /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, /* main_gpu */ {0}, + /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, /* tensor_split */ {{}}, /* reps */ 5, @@ -176,6 +178,7 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); + printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); printf(" -ts, --tensor_split \n"); printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); @@ -309,6 +312,13 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { break; } params.main_gpu = split(argv[i], split_delim); + } else if (arg == "-nkvo" || arg == "--no-kv-offload") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.no_kv_offload.insert(params.no_kv_offload.end(), p.begin(), p.end()); } else if (arg == "-mmq" || arg == "--mul-mat-q") { if (++i >= argc) { invalid_param = true; @@ -383,6 +393,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.type_v.empty()) { params.type_v = cmd_params_defaults.type_v; } if (params.n_gpu_layers.empty()) { params.n_gpu_layers = cmd_params_defaults.n_gpu_layers; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } + if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } @@ -400,6 +411,7 @@ struct cmd_params_instance { int n_threads; int n_gpu_layers; int main_gpu; + bool no_kv_offload; bool mul_mat_q; std::array tensor_split; @@ -428,6 +440,7 @@ struct cmd_params_instance { cparams.type_k = type_k; cparams.type_v = type_v; cparams.mul_mat_q = mul_mat_q; + cparams.offload_kqv = !no_kv_offload; return cparams; } @@ -444,6 +457,7 @@ static std::vector get_cmd_params_instances_int(const cmd_p for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) for (const auto & mmq : params.mul_mat_q) + for (const auto & nkvo : params.no_kv_offload) for (const auto & nt : params.n_threads) { cmd_params_instance instance = { /* .model = */ m, @@ -455,6 +469,7 @@ static std::vector get_cmd_params_instances_int(const cmd_p /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, + /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, }; @@ -476,6 +491,7 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) for (const auto & mmq : params.mul_mat_q) + for (const auto & nkvo : params.no_kv_offload) for (const auto & nt : params.n_threads) { for (const auto & n_prompt : params.n_prompt) { if (n_prompt == 0) { @@ -491,6 +507,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, + /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, }; @@ -511,6 +528,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, /* .main_gpu = */ mg, + /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, }; @@ -559,6 +577,7 @@ struct test { ggml_type type_v; int n_gpu_layers; int main_gpu; + bool no_kv_offload; bool mul_mat_q; std::array tensor_split; int n_prompt; @@ -579,6 +598,7 @@ struct test { type_v = inst.type_v; n_gpu_layers = inst.n_gpu_layers; main_gpu = inst.main_gpu; + no_kv_offload = inst.no_kv_offload; mul_mat_q = inst.mul_mat_q; tensor_split = inst.tensor_split; n_prompt = inst.n_prompt; @@ -640,7 +660,8 @@ struct test { "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", - "n_gpu_layers", "main_gpu", "mul_mat_q", "tensor_split", + "n_gpu_layers", "main_gpu", "no_kv_offload", + "mul_mat_q", "tensor_split", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", "avg_ts", "stddev_ts" @@ -659,7 +680,7 @@ struct test { return INT; } if (field == "cuda" || field == "opencl" || field == "metal" || field == "gpu_blas" || field == "blas" || - field == "f16_kv" || field == "mul_mat_q") { + field == "f16_kv" || field == "no_kv_offload" || field == "mul_mat_q") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -690,7 +711,8 @@ struct test { cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), - std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(mul_mat_q), tensor_split_str, + std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(no_kv_offload), + std::to_string(mul_mat_q), tensor_split_str, std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), std::to_string(avg_ts()), std::to_string(stdev_ts()) @@ -851,6 +873,9 @@ struct markdown_printer : public printer { if (field == "mul_mat_q") { return "mmq"; } + if (field == "no_kv_offload") { + return "nkvo"; + } if (field == "tensor_split") { return "ts"; } @@ -885,6 +910,9 @@ struct markdown_printer : public printer { if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { fields.push_back("mul_mat_q"); } + if (params.no_kv_offload.size() > 1 || params.no_kv_offload != cmd_params_defaults.no_kv_offload) { + fields.push_back("no_kv_offload"); + } if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { fields.push_back("tensor_split"); } From b7e7982953f80a656e03feb5cfb17a17a173eb26 Mon Sep 17 00:00:00 2001 From: Lars Grammel Date: Sun, 7 Jan 2024 21:24:11 +0100 Subject: [PATCH 387/859] readme : add lgrammel/modelfusion JS/TS client for llama.cpp (#4814) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ca6d14e17..2f6e6ffee 100644 --- a/README.md +++ b/README.md @@ -118,6 +118,7 @@ as the main playground for developing new features for the [ggml](https://github - Python: [abetlen/llama-cpp-python](https://github.com/abetlen/llama-cpp-python) - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) - Node.js: [withcatai/node-llama-cpp](https://github.com/withcatai/node-llama-cpp) +- JS/TS (llama.cpp server client): [lgrammel/modelfusion](https://modelfusion.dev/integration/model-provider/llamacpp) - Ruby: [yoshoku/llama_cpp.rb](https://github.com/yoshoku/llama_cpp.rb) - Rust: [mdrokz/rust-llama.cpp](https://github.com/mdrokz/rust-llama.cpp) - C#/.NET: [SciSharp/LLamaSharp](https://github.com/SciSharp/LLamaSharp) From b0034d93ce2949ce7d9c098ca02e56f66cd484e2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 11:14:04 +0200 Subject: [PATCH 388/859] examples : add passkey test (#3856) * examples : add passkey test * passkey : better prints * passkey : select pass key pos from CLI * passkey : simplify n_past logic * make : add passkey target * passkey : add "self-extend"-like context extension (#4810) * llama : "self-extend"-like context extension * passkey : add comment * passkey : add readme --- .gitignore | 1 + Makefile | 5 +- examples/CMakeLists.txt | 1 + examples/batched/batched.cpp | 1 + examples/passkey/CMakeLists.txt | 5 + examples/passkey/README.md | 12 ++ examples/passkey/passkey.cpp | 296 ++++++++++++++++++++++++++++++++ llama.cpp | 34 ++++ llama.h | 7 + 9 files changed, 361 insertions(+), 1 deletion(-) create mode 100644 examples/passkey/CMakeLists.txt create mode 100644 examples/passkey/README.md create mode 100644 examples/passkey/passkey.cpp diff --git a/.gitignore b/.gitignore index def74a1e9..cf1b692e9 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ models-mnt /lookup /main /metal +/passkey /perplexity /q8dot /quantize diff --git a/Makefile b/Makefile index 28c6d79bc..4c7e175bf 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ BUILD_TARGETS = \ main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ - speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup tests/test-c.o + speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup passkey tests/test-c.o # Binaries only useful for tests TEST_TARGETS = \ @@ -665,6 +665,9 @@ lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + ifdef LLAMA_METAL metal: examples/metal/metal.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 4cc13d6e9..0c71cbdf7 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -31,6 +31,7 @@ else() add_subdirectory(quantize-stats) add_subdirectory(save-load-state) add_subdirectory(simple) + add_subdirectory(passkey) add_subdirectory(speculative) add_subdirectory(lookahead) add_subdirectory(lookup) diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index 22a4265df..b1775e0b0 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -69,6 +69,7 @@ int main(int argc, char ** argv) { std::vector tokens_list; tokens_list = ::llama_tokenize(model, params.prompt, true); + const int n_kv_req = tokens_list.size() + (n_len - tokens_list.size())*n_parallel; // initialize the context diff --git a/examples/passkey/CMakeLists.txt b/examples/passkey/CMakeLists.txt new file mode 100644 index 000000000..3161bf3ef --- /dev/null +++ b/examples/passkey/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET passkey) +add_executable(${TARGET} passkey.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/passkey/README.md b/examples/passkey/README.md new file mode 100644 index 000000000..4a22bb559 --- /dev/null +++ b/examples/passkey/README.md @@ -0,0 +1,12 @@ +# llama.cpp/example/passkey + +See the following PRs for more info: + +- https://github.com/ggerganov/llama.cpp/pull/3856 +- https://github.com/ggerganov/llama.cpp/pull/4810 + +### Usage + +```bash +make -j && ./passkey ./models/llama-7b-v2/ggml-model-f16.gguf 250 +``` diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp new file mode 100644 index 000000000..5c0022832 --- /dev/null +++ b/examples/passkey/passkey.cpp @@ -0,0 +1,296 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include + +int main(int argc, char ** argv) { + gpt_params params; + + if (argc == 1 || argv[1][0] == '-') { + printf("usage: %s MODEL_PATH N_JUNK N_GRP I_POS SEED\n" , argv[0]); + return 1 ; + } + + int seed = -1; + + int n_junk = 250; // number of times to repeat the junk text + int n_keep = 32; // number of tokens in the prompt prefix + int n_grp = 1; // if more than 1 - perform LongLM SelfExtend + int i_pos = -1; // position of the passkey in the junk text + + if (argc >= 2) { + params.model = argv[1]; + } + + if (argc >= 3) { + n_junk = std::stoi(argv[2]); + } + + if (argc >= 4) { + n_grp = std::stoi(argv[3]); + } + + if (argc >= 5) { + i_pos = std::stoi(argv[4]); + } + + if (argc >= 6) { + seed = std::stoi(argv[5]); + } + + if (seed == -1) { + seed = time(NULL); + } + + srand(seed); + + if (i_pos == -1) { + i_pos = rand() % n_junk; + } + + const std::string prompt_prefix = "There is an important info hidden inside a lot of irrelevant text. Find it and memorize them. I will quiz you about the important information there."; + const std::string prompt_suffix = " What is the pass key? The pass key is"; + + // generate junk text + params.prompt = prompt_prefix; + + const int passkey = rand() % 50000 + 1; + + for (int i = 0; i < n_junk; i++) { + if (i % n_junk == i_pos) { + params.prompt += " The pass key is " + std::to_string(passkey) + ". Remember it. " + std::to_string(passkey) + " is the pass key."; + } + + params.prompt += " The grass is green. The sky is blue. The sun is yellow. Here we go. There and back again."; + } + + params.prompt += prompt_suffix; + + // init LLM + + llama_backend_init(params.numa); + + // initialize the model + + llama_model_params model_params = llama_model_default_params(); + + model_params.n_gpu_layers = 99; // offload all layers to the GPU + + llama_model * model = llama_load_model_from_file(params.model.c_str(), model_params); + + if (model == NULL) { + fprintf(stderr , "%s: error: unable to load model\n" , __func__); + return 1; + } + + // initialize the context + + llama_context_params ctx_params = llama_context_default_params(); + + ctx_params.seed = seed; + ctx_params.n_ctx = llama_n_ctx_train(model)*n_grp + n_keep; + ctx_params.n_batch = 512; + ctx_params.n_threads = params.n_threads; + ctx_params.n_threads_batch = params.n_threads_batch == -1 ? params.n_threads : params.n_threads_batch; + + GGML_ASSERT(ctx_params.n_batch % n_grp == 0 && "n_batch must be divisible by n_grp"); + + llama_context * ctx = llama_new_context_with_model(model, ctx_params); + + if (ctx == NULL) { + fprintf(stderr , "%s: error: failed to create the llama_context\n" , __func__); + return 1; + } + + // tokenize the prompt + std::vector tokens_list; + tokens_list = ::llama_tokenize(ctx, params.prompt, true); + + // tokenize the prefix and use it as a sink + const int n_tokens_prefix = ::llama_tokenize(ctx, prompt_prefix, true).size(); + + const int n_tokens_all = tokens_list.size(); + + // we leave a margin of 16 tokens for the generated text - it should contain just the passkey + const int n_predict = 16; + + // total length of the sequences including the prompt + const int n_len = n_tokens_all + n_predict; + + const int n_ctx = llama_n_ctx(ctx) - n_keep; + const int n_kv_req = llama_n_ctx(ctx); + const int n_batch = ctx_params.n_batch; + const int n_batch_grp = ctx_params.n_batch/n_grp; + + LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_kv_req = %d, n_grp = %d, n_batch = %d\n", __func__, n_len, n_ctx, n_kv_req, n_grp, n_batch); + + // print the prompt token-by-token + + LOG_TEE("\n"); + LOG_TEE("prefix tokens: %d\n", n_tokens_prefix); + LOG_TEE("prompt tokens: %d\n", n_tokens_all); + //LOG_TEE("prompt: %s\n", params.prompt.c_str()); + + llama_batch batch = llama_batch_init(512, 0, 1); + + int n_past = 0; + + // fill the KV cache + for (int i = 0; i < n_ctx; i += n_batch) { + if (i > 0 && n_grp > 1) { + // if SelfExtend is enabled, we compress the position from the last batch by a factor of n_grp + const int ib = i/n_batch - 1; + const int bd = n_batch_grp*(n_grp - 1); + + llama_kv_cache_seq_shift(ctx, 0, n_past - n_batch, n_past, ib*bd); + llama_kv_cache_seq_div (ctx, 0, n_past - n_batch + ib*bd, n_past + ib*bd, n_grp); + + n_past -= bd; + } + + llama_batch_clear(batch); + + for (int j = 0; j < n_batch && i + j < n_tokens_all; j++) { + llama_batch_add(batch, tokens_list[i + j], n_past++, { 0 }, false); + } + + if (i + n_batch >= n_tokens_all) { + batch.logits[batch.n_tokens - 1] = true; + } + + if (llama_decode(ctx, batch) != 0) { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return 1; + } + + LOG_TEE("%s: processed: [%6d, %6d)\n", __func__, i, std::min(i + n_batch, n_tokens_all)); + + if (i + n_batch >= n_tokens_all) { + break; + } + } + + for (int i = n_ctx; i < n_tokens_all; i += n_batch) { + const int n_discard = n_batch; + + LOG_TEE("%s: shifting KV cache with %d\n", __func__, n_discard); + + llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + + n_past -= n_discard; + + llama_batch_clear(batch); + + for (int j = 0; j < n_batch && i + j < n_tokens_all; j++) { + llama_batch_add(batch, tokens_list[i + j], n_past++, { 0 }, false); + } + + if (i + n_batch >= n_tokens_all) { + batch.logits[batch.n_tokens - 1] = true; + } + + if (llama_decode(ctx, batch) != 0) { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return 1; + } + + LOG_TEE("%s: processed: [%6d, %6d)\n", __func__, i, std::min(i + n_batch, n_tokens_all)); + } + + { + const int n_discard = n_past - n_ctx + n_predict; + + if (n_discard > 0) { + LOG_TEE("%s: shifting KV cache with %d to free space for the answer\n", __func__, n_discard); + + llama_kv_cache_seq_rm (ctx, 0, n_keep , n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, 0, n_keep + n_discard, n_ctx, -n_discard); + + n_past -= n_discard; + } + } + + LOG_TEE("\n"); + LOG_TEE("%s: passkey = %d, inserted at position %d / %d (token pos: ~%d)\n", __func__, passkey, i_pos, n_junk, (i_pos * n_tokens_all) / n_junk); + LOG_TEE("\n"); + + // main loop + + int n_cur = n_tokens_all; + int n_decode = 0; + + LOG_TEE("%s", prompt_suffix.c_str()); + fflush(stdout); + + const auto t_main_start = ggml_time_us(); + + while (n_cur <= n_len) { + // sample the next token + { + auto n_vocab = llama_n_vocab(model); + auto * logits = llama_get_logits_ith(ctx, batch.n_tokens - 1); + + std::vector candidates; + candidates.reserve(n_vocab); + + for (llama_token token_id = 0; token_id < n_vocab; token_id++) { + candidates.emplace_back(llama_token_data{ token_id, logits[token_id], 0.0f }); + } + + llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; + + // sample the most likely token + const llama_token new_token_id = llama_sample_token_greedy(ctx, &candidates_p); + + // is it an end of stream? + if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + LOG_TEE("\n"); + + break; + } + + LOG_TEE("%s", llama_token_to_piece(ctx, new_token_id).c_str()); + fflush(stdout); + + n_decode += 1; + + // prepare the next batch + llama_batch_clear(batch); + + // push this new token for next evaluation + llama_batch_add(batch, new_token_id, n_past++, { 0 }, true); + } + + n_cur += 1; + + // evaluate the current batch with the transformer model + if (llama_decode(ctx, batch)) { + fprintf(stderr, "%s : failed to eval, return code %d\n", __func__, 1); + return 1; + } + } + + LOG_TEE("\n"); + + const auto t_main_end = ggml_time_us(); + + LOG_TEE("%s: decoded %d tokens in %.2f s, speed: %.2f t/s\n", + __func__, n_decode, (t_main_end - t_main_start) / 1000000.0f, n_decode / ((t_main_end - t_main_start) / 1000000.0f)); + + llama_print_timings(ctx); + + fprintf(stderr, "\n"); + + llama_batch_free(batch); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + return 0; +} diff --git a/llama.cpp b/llama.cpp index 91aa3f8e7..63853d1c3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1903,6 +1903,28 @@ static void llama_kv_cache_seq_shift( cache.head = new_head != cache.size ? new_head : 0; } +static void llama_kv_cache_seq_div( + struct llama_kv_cache & cache, + llama_seq_id seq_id, + llama_pos p0, + llama_pos p1, + int d) { + if (p0 < 0) p0 = 0; + if (p1 < 0) p1 = std::numeric_limits::max(); + + for (uint32_t i = 0; i < cache.size; ++i) { + if (cache.cells[i].has_seq_id(seq_id) && cache.cells[i].pos >= p0 && cache.cells[i].pos < p1) { + cache.has_shift = true; + + { + llama_pos p_old = cache.cells[i].pos; + cache.cells[i].pos /= d; + cache.cells[i].delta += cache.cells[i].pos - p_old; + } + } + } +} + // // model loading and saving // @@ -10140,9 +10162,21 @@ void llama_kv_cache_seq_keep(struct llama_context * ctx, llama_seq_id seq_id) { } void llama_kv_cache_seq_shift(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, llama_pos delta) { + if (delta == 0) { + return; + } + llama_kv_cache_seq_shift(ctx->kv_self, seq_id, p0, p1, delta); } +void llama_kv_cache_seq_div(struct llama_context * ctx, llama_seq_id seq_id, llama_pos p0, llama_pos p1, int d) { + if (d == 1) { + return; + } + + llama_kv_cache_seq_div(ctx->kv_self, seq_id, p0, p1, d); +} + // Returns the *maximum* size of the state size_t llama_get_state_size(const struct llama_context * ctx) { // we don't know size of rng until we actually serialize it. so reserve more than enough memory for its serialized state. diff --git a/llama.h b/llama.h index 461d4604a..5305de90b 100644 --- a/llama.h +++ b/llama.h @@ -484,6 +484,13 @@ extern "C" { llama_pos p1, llama_pos delta); + LLAMA_API void llama_kv_cache_seq_div( + struct llama_context * ctx, + llama_seq_id seq_id, + llama_pos p0, + llama_pos p1, + int d); + // // State / sessions // From 52531fdff88764282c1b233174721aab8347252d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 11:18:32 +0200 Subject: [PATCH 389/859] main : add self-extend support (#4815) * examples : add passkey test * passkey : better prints * passkey : select pass key pos from CLI * passkey : simplify n_past logic * llama : "self-extend"-like context extension * passkey : add comment * main : add Self-Extend support * llama : add comment about llama_kv_cache_seq_div --- common/common.cpp | 18 +++++++++ common/common.h | 2 + examples/main/main.cpp | 87 ++++++++++++++++++++++++++++++------------ llama.h | 4 ++ 4 files changed, 87 insertions(+), 24 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index eacaee18e..6b4913a65 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -220,6 +220,20 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.n_ctx = std::stoi(argv[i]); + } else if (arg == "--grp-attn-n" || arg == "-gan") { + if (++i >= argc) { + invalid_param = true; + break; + } + + params.grp_attn_n = std::stoi(argv[i]); + } else if (arg == "--grp-attn-w" || arg == "-gaw") { + if (++i >= argc) { + invalid_param = true; + break; + } + + params.grp_attn_w = std::stoi(argv[i]); } else if (arg == "--rope-freq-base") { if (++i >= argc) { invalid_param = true; @@ -904,6 +918,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" Not recommended since this is both slower and uses more VRAM.\n"); #endif // GGML_USE_CUBLAS #endif + printf(" -gan N, --grp-attn-n N\n"); + printf(" group-attention factor (default: %d)\n", params.grp_attn_n); + printf(" -gat N, --grp-attn-w N\n"); + printf(" group-attention width (default: %.1f)\n", (double)params.grp_attn_w); printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); printf(" verbose print of the KV cache\n"); diff --git a/common/common.h b/common/common.h index 9659aa045..e2bbfc258 100644 --- a/common/common.h +++ b/common/common.h @@ -62,6 +62,8 @@ struct gpt_params { int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs int32_t n_beams = 0; // if non-zero then use beam search of given width. + int32_t grp_attn_n = 1; // group-attention factor + int32_t grp_attn_w = 512; // group-attention width float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor diff --git a/examples/main/main.cpp b/examples/main/main.cpp index c096f110b..5ea67051f 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -439,6 +439,21 @@ int main(int argc, char ** argv) { LOG_TEE("sampling: \n%s\n", llama_sampling_print(sparams).c_str()); LOG_TEE("sampling order: \n%s\n", llama_sampling_order_print(sparams).c_str()); LOG_TEE("generate: n_ctx = %d, n_batch = %d, n_predict = %d, n_keep = %d\n", n_ctx, params.n_batch, params.n_predict, params.n_keep); + + // group-attention state + // number of grouped KV tokens so far (used only if params.grp_attn_n > 1) + int ga_i = 0; + + const int ga_n = params.grp_attn_n; + const int ga_w = params.grp_attn_w; + + if (ga_n != 1) { + GGML_ASSERT(ga_n > 0 && "grp_attn_n must be positive"); // NOLINT + GGML_ASSERT(ga_w % ga_n == 0 && "grp_attn_w must be a multiple of grp_attn_n"); // NOLINT + //GGML_ASSERT(n_ctx_train % ga_w == 0 && "n_ctx_train must be a multiple of grp_attn_w"); // NOLINT + //GGML_ASSERT(n_ctx >= n_ctx_train * ga_n && "n_ctx must be at least n_ctx_train * grp_attn_n"); // NOLINT + LOG_TEE("self-extend: n_ctx_train = %d, grp_attn_n = %d, grp_attn_w = %d\n", n_ctx_train, ga_n, ga_w); + } LOG_TEE("\n\n"); if (params.interactive) { @@ -500,37 +515,61 @@ int main(int argc, char ** argv) { fflush(stdout); } - // infinite text generation via context swapping - // if we run out of context: - // - take the n_keep first tokens from the original prompt (via n_past) - // - take half of the last (n_ctx - n_keep) tokens and recompute the logits in batches - if (n_past + (int) embd.size() + std::max(0, guidance_offset) > n_ctx) { - if (params.n_predict == -2) { - LOG_TEE("\n\n%s: context full and n_predict == -%d => stopping\n", __func__, params.n_predict); - break; + if (ga_n == 1) { + // infinite text generation via context shifting + // if we run out of context: + // - take the n_keep first tokens from the original prompt (via n_past) + // - take half of the last (n_ctx - n_keep) tokens and recompute the logits in batches + if (n_past + (int) embd.size() + std::max(0, guidance_offset) > n_ctx) { + if (params.n_predict == -2) { + LOG_TEE("\n\n%s: context full and n_predict == -%d => stopping\n", __func__, params.n_predict); + break; + } + + const int n_left = n_past - params.n_keep - 1; + const int n_discard = n_left/2; + + LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", + n_past, n_left, n_ctx, params.n_keep, n_discard); + + llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); + llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + + n_past -= n_discard; + + if (ctx_guidance) { + n_past_guidance -= n_discard; + } + + LOG("after swap: n_past = %d, n_past_guidance = %d\n", n_past, n_past_guidance); + + LOG("embd: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, embd).c_str()); + + LOG("clear session path\n"); + path_session.clear(); } + } else { + // context extension via Self-Extend + while (n_past >= ga_i + ga_w) { + const int ib = (ga_n*ga_i)/ga_w; + const int bd = (ga_w/ga_n)*(ga_n - 1); + const int dd = (ga_w/ga_n) - ib*bd - ga_w; - const int n_left = n_past - params.n_keep - 1; - const int n_discard = n_left/2; + LOG("\n"); + LOG("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", ga_i, n_past, ib*bd, ga_i + ib*bd, n_past + ib*bd); + LOG("div: [%6d, %6d] / %6d -> [%6d, %6d]\n", ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n, (ga_i + ib*bd)/ga_n, (ga_i + ib*bd + ga_w)/ga_n); + LOG("shift: [%6d, %6d] + %6d -> [%6d, %6d]\n", ga_i + ib*bd + ga_w, n_past + ib*bd, dd, ga_i + ib*bd + ga_w + dd, n_past + ib*bd + dd); - LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", - n_past, n_left, n_ctx, params.n_keep, n_discard); + llama_kv_cache_seq_shift(ctx, 0, ga_i, n_past, ib*bd); + llama_kv_cache_seq_div (ctx, 0, ga_i + ib*bd, ga_i + ib*bd + ga_w, ga_n); + llama_kv_cache_seq_shift(ctx, 0, ga_i + ib*bd + ga_w, n_past + ib*bd, dd); - llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + n_past -= bd; - n_past -= n_discard; + ga_i += ga_w/ga_n; - if (ctx_guidance) { - n_past_guidance -= n_discard; + LOG("\nn_past_old = %d, n_past = %d, ga_i = %d\n\n", n_past + bd, n_past, ga_i); } - - LOG("after swap: n_past = %d, n_past_guidance = %d\n", n_past, n_past_guidance); - - LOG("embd: %s\n", LOG_TOKENS_TOSTR_PRETTY(ctx, embd).c_str()); - - LOG("clear session path\n"); - path_session.clear(); } // try to reuse a matching prefix from the loaded session instead of re-eval (via n_past) diff --git a/llama.h b/llama.h index 5305de90b..869ff0acf 100644 --- a/llama.h +++ b/llama.h @@ -484,6 +484,10 @@ extern "C" { llama_pos p1, llama_pos delta); + // Integer division of the positions by factor of `d > 1` + // If the KV cache is RoPEd, the KV data is updated accordingly + // p0 < 0 : [0, p1] + // p1 < 0 : [p0, inf) LLAMA_API void llama_kv_cache_seq_div( struct llama_context * ctx, llama_seq_id seq_id, From 42ea63c5a3da01d4a94e906d8565868012c79f4f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 15:57:36 +0200 Subject: [PATCH 390/859] llama.swiftui : update readme --- examples/llama.swiftui/README.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/examples/llama.swiftui/README.md b/examples/llama.swiftui/README.md index fa68e6ed8..96cf743d4 100644 --- a/examples/llama.swiftui/README.md +++ b/examples/llama.swiftui/README.md @@ -1,7 +1,12 @@ -# llama.swiftui +# llama.cpp/examples/llama.swiftui -Local inference of llama.cpp on an iPhone. -So far I only tested with starcoder 1B model, but it can most likely handle 7B models as well. +Local inference of llama.cpp on an iPhone. This is a sample app that can be used as a starting +point for more advanced projects. + +For usage instructions and performance stats, check the following discussion: https://github.com/ggerganov/llama.cpp/discussions/4508 + +![image](https://github.com/ggerganov/llama.cpp/assets/1991296/2b40284f-8421-47a2-b634-74eece09a299) + +Video demonstration: https://github.com/bachittle/llama.cpp/assets/39804642/e290827a-4edb-4093-9642-2a5e399ec545 - From 668b31fc7d86245435ad6574e0e1126e734049e2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 16:40:51 +0200 Subject: [PATCH 391/859] swift : exclude ggml-metal.metal from the package (#4822) --- Package.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Package.swift b/Package.swift index e33a4ff46..583e2e276 100644 --- a/Package.swift +++ b/Package.swift @@ -21,7 +21,7 @@ let package = Package( name: "llama", dependencies: ["ggml"], path: ".", - exclude: [], + exclude: ["ggml-metal.metal"], sources: [ "llama.cpp", ], From dd5ae06405c5565b99889bdb3f168f4351252cfb Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 8 Jan 2024 16:02:32 +0100 Subject: [PATCH 392/859] SOTA 2-bit quants (#4773) * iq2_xxs: basics * iq2_xxs: scalar and AVX2 dot products Needed to change Q8_K to have quants in the -127...127 range, else the IQ2_XXS AVX implementation becomes very awkward. The alternative would have been to use Q8_0 instead. Perhaps I'll change later, for now this is what we have. * iq2_xxs: ARM_NEON dot product Somehow strangely slow (112 ms/token). * iq2_xxs: WIP Metal Dequantize works, something is still wrong with the dot product. * iq2_xxs: Metal dot product now works We have PP-512 = 475 t/s TG-128 = 47.3 t/s Not the greatest performance, but not complete garbage either. * iq2_xxs: slighty faster dot product TG-128 is now 48.4 t/s * iq2_xxs: slighty faster dot product TG-128 is now 50.9 t/s * iq2_xxs: even faster Metal dot product TG-128 is now 54.1 t/s. Strangely enough, putting the signs lookup table into shared memory has a bigger impact than the grid values being in shared memory. * iq2_xxs: dequantize CUDA kernel - fix conflict with master * iq2_xxs: quantized CUDA dot product (MMVQ) We get TG-128 = 153.1 t/s * iq2_xxs: slightly faster CUDA dot product TG-128 is now at 155.1 t/s. * iq2_xxs: add to llama ftype enum * iq2_xxs: fix MoE on Metal * Fix missing MMQ ops when on hipBLAS I had put the ggml_supports_mmq call at the wrong place. * Fix bug in qequantize_row_iq2_xxs The 0.25f factor was missing. Great detective work by @ggerganov! * Fixing tests * PR suggestion --------- Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 205 +++++++++++++++++++++++ ggml-metal.m | 40 +++++ ggml-metal.metal | 314 ++++++++++++++++++++++++++++++++++++ ggml-quants.c | 294 ++++++++++++++++++++++++++++++++- ggml-quants.h | 12 ++ ggml.c | 26 +++ ggml.h | 3 + llama.cpp | 3 + llama.h | 1 + tests/test-quantize-fns.cpp | 5 + 10 files changed, 902 insertions(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2df64b111..e0ea890b1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -477,6 +477,14 @@ typedef struct { } block_q6_K; static_assert(sizeof(block_q6_K) == sizeof(ggml_fp16_t) + 13*QK_K/16, "wrong q6_K block size/padding"); +#define QR2_XXS 8 +#define QI2_XXS (QK_K / (4*QR2_XXS)) +typedef struct { + half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1292,6 +1300,128 @@ static __global__ void dequantize_block_q6_K(const void * __restrict__ vx, dst_t #endif } +static const __device__ uint64_t kgrid_iq2xxs[256] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +}; + +static const __device__ uint8_t ksigns_iq2xs[128] = { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +}; + +static const __device__ uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; + +inline bool ggml_cuda_supports_mmq(enum ggml_type type) { + switch (type) { + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: + case GGML_TYPE_Q5_0: + case GGML_TYPE_Q5_1: + case GGML_TYPE_Q8_0: + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: + case GGML_TYPE_Q4_K: + case GGML_TYPE_Q5_K: + case GGML_TYPE_Q6_K: + return true; + default: + return false; + } +} + +template +static __global__ void dequantize_block_iq2_xxs(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq2_xxs * x = (const block_iq2_xxs *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint16_t * q2 = x[i].qs + 4*ib; + const uint8_t * aux8 = (const uint8_t *)q2; + const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[il]); + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = (float)x[i].d * (0.5f + (aux32 >> 28)) * 0.25f; + const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*il) & 127]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -3825,6 +3955,55 @@ static __device__ __forceinline__ float vec_dot_q6_K_q8_1_mul_mat( return vec_dot_q6_K_q8_1_impl_mmq(&x_ql[index_x], &y_qs[index_y], sc, x_dmf[i * (WARP_SIZE/QI6_K) + i/QI6_K], &y_df[index_y/QI8_1]); } +static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if QK_K == 256 + const block_iq2_xxs * bq2 = (const block_iq2_xxs *) vbq; + +#if QR2_XXS == 8 + const int ib32 = iqs; + const uint16_t * q2 = bq2->qs + 4*ib32; + const uint8_t * aux8 = (const uint8_t *)q2; + const int8_t * q8 = bq8_1[ib32].qs; + uint32_t aux32 = q2[2] | (q2[3] << 16); + int sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[l]); + const uint8_t signs = ksigns_iq2xs[aux32 & 127]; + for (int j = 0; j < 8; ++j) { + sumi += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + aux32 >>= 7; + } + const float d = (float)bq2->d * (0.5f + aux32) * (float)bq8_1[ib32].ds.x * 0.25f; + return d * sumi; +#else + // iqs is 0...15 + const int ib32 = iqs/2; + const int il = iqs%2; + const uint16_t * q2 = bq2->qs + 4*ib32; + const uint8_t * aux8 = (const uint8_t *)q2; + const uint8_t * grid1 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); + const uint8_t * grid2 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = (float)bq2->d * (0.5f + (aux32 >> 28)) * (float)bq8_1[ib32].ds.x * 0.25f; + const uint8_t signs1 = ksigns_iq2xs[(aux32 >> 14*il) & 127]; + const uint8_t signs2 = ksigns_iq2xs[(aux32 >> (14*il + 7)) & 127]; + const int8_t * q8 = bq8_1[ib32].qs + 16*il; + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j+0] * grid1[j] * (signs1 & kmask_iq2xs[j] ? -1 : 1); + sumi2 += q8[j+8] * grid2[j] * (signs2 & kmask_iq2xs[j] ? -1 : 1); + } + return d * (sumi1 + sumi2); +#endif +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -5664,6 +5843,12 @@ static void dequantize_row_q6_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +template +static void dequantize_row_iq2_xxs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq2_xxs<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -5692,6 +5877,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_q5_K_cuda; case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; + case GGML_TYPE_IQ2_XXS: + return dequantize_row_iq2_xxs_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -5721,6 +5908,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_q5_K_cuda; case GGML_TYPE_Q6_K: return dequantize_row_q6_K_cuda; + case GGML_TYPE_IQ2_XXS: + return dequantize_row_iq2_xxs_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -5915,6 +6104,15 @@ static void mul_mat_vec_q6_K_q8_1_cuda(const void * vx, const void * vy, float * <<>>(vx, vy, dst, ncols, nrows); } +static void mul_mat_vec_iq2_xxs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const dim3 block_nums(block_num_y, 1, 1); + const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); + mul_mat_vec_q + <<>>(vx, vy, dst, ncols, nrows); +} + static void ggml_mul_mat_q4_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { @@ -7407,6 +7605,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: return max_compute_capability >= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -7427,6 +7626,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q3_K: case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: + case GGML_TYPE_IQ2_XXS: return max_compute_capability >= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -7477,6 +7677,9 @@ static void ggml_cuda_op_mul_mat_vec_q( case GGML_TYPE_Q6_K: mul_mat_vec_q6_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); break; + case GGML_TYPE_IQ2_XXS: + mul_mat_vec_iq2_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; @@ -8693,6 +8896,8 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + use_mul_mat_q = use_mul_mat_q && ggml_cuda_supports_mmq(src0->type); + // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); //printf(" %8d %8d %8d %8d\n", src0->nb[0], src0->nb[1], src0->nb[2], src0->nb[3]); diff --git a/ggml-metal.m b/ggml-metal.m index fbbdcd8c4..6c2a8d04e 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -88,6 +88,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q5_K); GGML_METAL_DECL_KERNEL(get_rows_q6_K); GGML_METAL_DECL_KERNEL(get_rows_i32); + GGML_METAL_DECL_KERNEL(get_rows_iq2_xxs); GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); @@ -106,6 +107,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); @@ -121,6 +123,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_id_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); @@ -133,6 +136,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); @@ -145,6 +149,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_id_q4_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xxs_f32); GGML_METAL_DECL_KERNEL(rope_f32); GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); @@ -379,6 +384,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q5_K); GGML_METAL_ADD_KERNEL(get_rows_q6_K); GGML_METAL_ADD_KERNEL(get_rows_i32); + GGML_METAL_ADD_KERNEL(get_rows_iq2_xxs); GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); @@ -397,6 +403,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_iq2_xxs_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); @@ -412,6 +419,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_id_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xxs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); @@ -425,6 +433,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_iq2_xxs_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); @@ -437,6 +446,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_id_q4_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xxs_f32); } GGML_METAL_ADD_KERNEL(rope_f32); GGML_METAL_ADD_KERNEL(rope_f16); @@ -502,6 +512,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q5_K); GGML_METAL_DEL_KERNEL(get_rows_q6_K); GGML_METAL_DEL_KERNEL(get_rows_i32); + GGML_METAL_DEL_KERNEL(get_rows_iq2_xxs); GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); @@ -520,6 +531,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_iq2_xxs_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); @@ -535,6 +547,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_id_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xxs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); @@ -548,6 +561,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_iq2_xxs_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); @@ -560,6 +574,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_id_q4_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xxs_f32); } GGML_METAL_DEL_KERNEL(rope_f32); GGML_METAL_DEL_KERNEL(rope_f16); @@ -1541,6 +1556,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_K_f32]; break; case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q6_K_f32]; break; + case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xxs_f32]; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1653,6 +1669,12 @@ bool ggml_metal_graph_compute( nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_q6_K_f32]; } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xxs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1686,9 +1708,14 @@ bool ggml_metal_graph_compute( if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || + //src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src0t == GGML_TYPE_IQ2_XXS) { + [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1778,6 +1805,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_K_f32]; break; case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; + case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xxs_f32]; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1893,6 +1921,12 @@ bool ggml_metal_graph_compute( nth1 = 32; [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q6_K_f32]; } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xxs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1942,9 +1976,14 @@ bool ggml_metal_graph_compute( if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || + //src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src2t == GGML_TYPE_IQ2_XXS) { + [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1982,6 +2021,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_K]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; + case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xxs]; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index a7d3f9efa..0cc535ac7 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2446,6 +2446,12 @@ typedef struct { } block_q6_K; // 210 bytes / block +typedef struct { + half d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +// 66 bytes / block for QK_K = 256, so 2.0625 bpw + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -3468,6 +3474,221 @@ kernel void kernel_mul_mv_q6_K_f32( kernel_mul_mv_q6_K_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); } +// ======================= "True" 2-bit + +constexpr constant static uint64_t kgrid_iq2xxs[256] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +}; + +constexpr constant static uint8_t ksigns_iq2xs[128] = { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +}; + +constexpr constant static uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; + +void kernel_mul_mv_iq2_xxs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xxs * x = (device const block_iq2_xxs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 256); + { + int nval = 4; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = kgrid_iq2xxs[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + +#if QK_K == 256 + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xxs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + device const uint8_t * aux8 = (device const uint8_t *)q2; + const uint32_t aux32 = q2[2] | (q2[3] << 16); + const float d = db * (0.5f + (aux32 >> 28)); + + float sum = 0; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + aux8[l]); + const uint8_t signs = shared_signs[(aux32 >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sum += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d * sum; + + dh += nb*sizeof(block_iq2_xxs)/2; + q2 += nb*sizeof(block_iq2_xxs)/2; + } + + y4 += 32 * 32; + } +#else + // TODO +#endif + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xxs_f32")]] +kernel void kernel_mul_mv_iq2_xxs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= // NOTE: this is not dequantizing - we are simply fitting the template @@ -3739,6 +3960,31 @@ void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg } } +template +void dequantize_iq2_xxs(device const block_iq2_xxs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + // each block of 32 needs 2 uint32_t's for the quants & scale, so 4 uint16_t's. + device const uint16_t * q2 = xb->qs + 4*ib32; + const uint32_t aux32_g = q2[0] | (q2[1] << 16); + const uint32_t aux32_s = q2[2] | (q2[3] << 16); + thread const uint8_t * aux8 = (thread const uint8_t *)&aux32_g; + const float dl = d * (0.5f + (aux32_s >> 28)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); + uint8_t signs = ksigns_iq2xs[(aux32_s >> 14*il) & 127]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + signs = ksigns_iq2xs[(aux32_s >> (14*il+7)) & 127]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -4278,6 +4524,7 @@ template [[host_name("kernel_get_rows_q3_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -4314,6 +4561,7 @@ template [[host_name("kernel_mul_mm_q3_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q5_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -4362,6 +4610,7 @@ template [[host_name("kernel_mul_mm_id_q3_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -5134,3 +5383,68 @@ kernel void kernel_mul_mv_id_q6_K_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq2_xxs_f32")]] +kernel void kernel_mul_mv_id_iq2_xxs_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq2_xxs_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index 55a9496d1..fd127f2d1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -2340,6 +2340,138 @@ size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * return (n/QK_K*sizeof(block_q6_K)); } +// ====================== "True" 2-bit (de)-quantization + +void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { + (void)x; + (void)y; + (void)k; + assert(k % QK_K == 0); + //fprintf(stderr, "=========================== %s: not implemented\n", __func__); +} + +static const uint64_t iq2xxs_grid[256] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, + 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, + 0x0808080819081908, 0x0808080819190808, 0x0808080819192b08, 0x08080808192b0819, + 0x08080808192b1908, 0x080808082b080808, 0x080808082b08082b, 0x080808082b082b2b, + 0x080808082b2b082b, 0x0808081908080819, 0x0808081908081908, 0x0808081908190808, + 0x0808081908191919, 0x0808081919080808, 0x080808192b081908, 0x080808192b192b08, + 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b082b082b, 0x0808082b2b08082b, + 0x0808190808080819, 0x0808190808081908, 0x0808190808190808, 0x08081908082b0819, + 0x08081908082b1908, 0x0808190819080808, 0x080819081908082b, 0x0808190819082b08, + 0x08081908192b0808, 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, + 0x080819082b2b1908, 0x0808191908080808, 0x080819190808082b, 0x0808191908082b08, + 0x08081919082b0808, 0x080819191908192b, 0x08081919192b2b19, 0x080819192b080808, + 0x080819192b190819, 0x0808192b08082b19, 0x0808192b08190808, 0x0808192b19080808, + 0x0808192b2b081908, 0x0808192b2b2b1908, 0x08082b0808080808, 0x08082b0808081919, + 0x08082b0808082b08, 0x08082b0808191908, 0x08082b08082b2b08, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b081919082b, 0x08082b082b082b08, + 0x08082b1908081908, 0x08082b1919080808, 0x08082b2b0808082b, 0x08082b2b08191908, + 0x0819080808080819, 0x0819080808081908, 0x0819080808190808, 0x08190808082b0819, + 0x0819080819080808, 0x08190808192b0808, 0x081908082b081908, 0x081908082b190808, + 0x081908082b191919, 0x0819081908080808, 0x0819081908082b08, 0x08190819082b0808, + 0x0819081919190808, 0x0819081919192b2b, 0x081908192b080808, 0x0819082b082b1908, + 0x0819082b19081919, 0x0819190808080808, 0x0819190808082b08, 0x08191908082b0808, + 0x08191908082b1919, 0x0819190819082b19, 0x081919082b080808, 0x0819191908192b08, + 0x08191919192b082b, 0x0819192b08080808, 0x0819192b0819192b, 0x08192b0808080819, + 0x08192b0808081908, 0x08192b0808190808, 0x08192b0819080808, 0x08192b082b080819, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b192b2b0808, 0x08192b2b19190819, + 0x082b080808080808, 0x082b08080808082b, 0x082b080808082b2b, 0x082b080819081908, + 0x082b0808192b0819, 0x082b08082b080808, 0x082b08082b08082b, 0x082b0819082b2b19, + 0x082b081919082b08, 0x082b082b08080808, 0x082b082b0808082b, 0x082b190808080819, + 0x082b190808081908, 0x082b190808190808, 0x082b190819080808, 0x082b19081919192b, + 0x082b191908080808, 0x082b191919080819, 0x082b1919192b1908, 0x082b192b2b190808, + 0x082b2b0808082b08, 0x082b2b08082b0808, 0x082b2b082b191908, 0x082b2b2b19081908, + 0x1908080808080819, 0x1908080808081908, 0x1908080808190808, 0x1908080808192b08, + 0x19080808082b0819, 0x19080808082b1908, 0x1908080819080808, 0x1908080819082b08, + 0x190808081919192b, 0x19080808192b0808, 0x190808082b080819, 0x190808082b081908, + 0x190808082b190808, 0x1908081908080808, 0x19080819082b0808, 0x19080819192b0819, + 0x190808192b080808, 0x190808192b081919, 0x1908082b08080819, 0x1908082b08190808, + 0x1908082b19082b08, 0x1908082b1919192b, 0x1908082b192b2b08, 0x1908190808080808, + 0x1908190808082b08, 0x19081908082b0808, 0x190819082b080808, 0x190819082b192b19, + 0x190819190819082b, 0x19081919082b1908, 0x1908192b08080808, 0x19082b0808080819, + 0x19082b0808081908, 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, + 0x19082b1908080808, 0x19082b1919192b08, 0x19082b19192b0819, 0x19082b192b08082b, + 0x19082b2b19081919, 0x19082b2b2b190808, 0x1919080808080808, 0x1919080808082b08, + 0x1919080808190819, 0x1919080808192b19, 0x19190808082b0808, 0x191908082b080808, + 0x191908082b082b08, 0x1919081908081908, 0x191908191908082b, 0x191908192b2b1908, + 0x1919082b2b190819, 0x191919082b190808, 0x191919082b19082b, 0x1919191908082b2b, + 0x1919192b08080819, 0x1919192b19191908, 0x19192b0808080808, 0x19192b0808190819, + 0x19192b0808192b19, 0x19192b08192b1908, 0x19192b1919080808, 0x19192b2b08082b08, + 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, 0x192b0808192b2b08, + 0x192b081908080808, 0x192b081919191919, 0x192b082b08192b08, 0x192b082b192b0808, + 0x192b190808080808, 0x192b190808081919, 0x192b191908190808, 0x192b19190819082b, + 0x192b19192b081908, 0x192b2b081908082b, 0x2b08080808080808, 0x2b0808080808082b, + 0x2b08080808082b2b, 0x2b08080819080819, 0x2b0808082b08082b, 0x2b08081908081908, + 0x2b08081908192b08, 0x2b08081919080808, 0x2b08082b08190819, 0x2b08190808080819, + 0x2b08190808081908, 0x2b08190808190808, 0x2b08190808191919, 0x2b08190819080808, + 0x2b081908192b0808, 0x2b08191908080808, 0x2b0819191908192b, 0x2b0819192b191908, + 0x2b08192b08082b19, 0x2b08192b19080808, 0x2b08192b192b0808, 0x2b082b080808082b, + 0x2b082b1908081908, 0x2b082b2b08190819, 0x2b19080808081908, 0x2b19080808190808, + 0x2b190808082b1908, 0x2b19080819080808, 0x2b1908082b2b0819, 0x2b1908190819192b, + 0x2b1908192b080808, 0x2b19082b19081919, 0x2b19190808080808, 0x2b191908082b082b, + 0x2b19190819081908, 0x2b19191919190819, 0x2b192b082b080819, 0x2b192b19082b0808, + 0x2b2b08080808082b, 0x2b2b080819190808, 0x2b2b08082b081919, 0x2b2b081908082b19, + 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, +}; + +static const uint8_t ksigns_iq2xs[128] = { + 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, + 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, + 160, 33, 34, 163, 36, 165, 166, 39, 40, 169, 170, 43, 172, 45, 46, 175, + 48, 177, 178, 51, 180, 53, 54, 183, 184, 57, 58, 187, 60, 189, 190, 63, + 192, 65, 66, 195, 68, 197, 198, 71, 72, 201, 202, 75, 204, 77, 78, 207, + 80, 209, 210, 83, 212, 85, 86, 215, 216, 89, 90, 219, 92, 221, 222, 95, + 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, + 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, +}; +static const uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; + +void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + uint32_t aux32[2]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(aux32, x[i].qs + 4*ib32, 2*sizeof(uint32_t)); + const float db = d * (0.5f + (aux32[1] >> 28)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[(aux32[1] >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + y[j] = db * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + } + } +} + +void quantize_row_iq2_xxs(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq2_xxs * restrict y = vy; + quantize_row_iq2_xxs_reference(x, y, k); +} + +size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist) { + assert(k % QK_K == 0); + (void)hist; // TODO: collect histograms + + for (int j = 0; j < n; j += k) { + block_iq2_xxs * restrict y = (block_iq2_xxs *)dst + j/QK_K; + quantize_row_iq2_xxs_reference(src + j, y, k); + } + return (n/QK_K*sizeof(block_iq2_xxs)); +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -2362,7 +2494,9 @@ void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict x += QK_K; continue; } - const float iscale = -128.f/max; + //const float iscale = -128.f/max; + // We need this change for IQ2_XXS, else the AVX implementation becomes very awkward + const float iscale = -127.f/max; for (int j = 0; j < QK_K; ++j) { int v = nearest_int(iscale*x[j]); y[i].qs[j] = MIN(127, v); @@ -7065,3 +7199,161 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri } #endif + +static const int8_t keven_signs_q2xs[1024] = { + 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, + 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, -1, + 1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, -1, + 1, 1, -1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, 1, + 1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, -1, + 1, 1, -1, 1, -1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, 1, + 1, 1, 1, -1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, 1, + 1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, 1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, -1, + 1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, 1, -1, 1, -1, + 1, 1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, 1, + 1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, 1, + 1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, -1, + 1, 1, 1, 1, -1, -1, 1, 1, -1, 1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, 1, + 1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, -1, + 1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, -1, + 1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, 1, + 1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, 1, -1, 1, 1, 1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, -1, + 1, 1, -1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, -1, -1, 1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, 1, + 1, 1, 1, -1, 1, 1, -1, 1, -1, 1, 1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, + 1, 1, -1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, -1, + 1, 1, 1, 1, -1, 1, -1, 1, -1, 1, 1, 1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, 1, + 1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, 1, 1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, -1, + 1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, -1, + 1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, 1, + 1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, 1, + 1, 1, -1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, + 1, 1, 1, -1, 1, -1, -1, -1, -1, 1, 1, -1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, -1, + 1, 1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, -1, -1, -1, 1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, 1, + 1, 1, 1, 1, -1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, 1, 1, -1, 1, 1, -1, -1, -1, 1, -1, -1, 1, 1, -1, -1, -1, -1, + 1, 1, -1, 1, -1, -1, -1, 1, -1, 1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, 1, + 1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, + 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, +}; + +void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + assert(n % QK_K == 0); + + const block_iq2_xxs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + int8x16x4_t q2u; + int8x16x4_t q2s; + int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + float sumf1 = 0, sumf2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = vld1q_s8_x4(q8); q8 += 64; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 0])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 1]))); + q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 2])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 3]))); + q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 8])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 9]))); + q2u.val[3] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[10])), vld1_s8((const void *)(iq2xxs_grid + aux8[11]))); + q2s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 7) & 127)))); + q2s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[1] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[1] >> 21) & 127)))); + q2s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[3] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[3] >> 7) & 127)))); + q2s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[3] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[3] >> 21) & 127)))); + q2u.val[0] = vmulq_s8(q2u.val[0], q2s.val[0]); + q2u.val[1] = vmulq_s8(q2u.val[1], q2s.val[1]); + q2u.val[2] = vmulq_s8(q2u.val[2], q2s.val[2]); + q2u.val[3] = vmulq_s8(q2u.val[3], q2s.val[3]); + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[0], q8b.val[0]), q2u.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[2], q8b.val[2]), q2u.val[3], q8b.val[3]); + sumf1 += vaddvq_s32(p1) * (0.5f + (aux32[1] >> 28)); + sumf2 += vaddvq_s32(p2) * (0.5f + (aux32[3] >> 28)); + } + sumf += d*(sumf1 + sumf2); + } + *s = 0.25f * sumf; + +#elif defined(__AVX2__) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint32_t aux32[4]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; + const __m256i q2_1 = _mm256_set_epi64x(iq2xxs_grid[aux8[ 3]], iq2xxs_grid[aux8[ 2]], iq2xxs_grid[aux8[1]], iq2xxs_grid[aux8[0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xxs_grid[aux8[11]], iq2xxs_grid[aux8[10]], iq2xxs_grid[aux8[9]], iq2xxs_grid[aux8[8]]); + const __m256i s2_1 = _mm256_set_epi64x(signs64[(aux32[1] >> 21) & 127], signs64[(aux32[1] >> 14) & 127], + signs64[(aux32[1] >> 7) & 127], signs64[(aux32[1] >> 0) & 127]); + const __m256i s2_2 = _mm256_set_epi64x(signs64[(aux32[3] >> 21) & 127], signs64[(aux32[3] >> 14) & 127], + signs64[(aux32[3] >> 7) & 127], signs64[(aux32[3] >> 0) & 127]); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, s2_1); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, s2_2); + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = aux32[1] >> 28; + const uint16_t ls2 = aux32[3] >> 28; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + uint32_t aux32[2]; + const uint8_t * aux8 = (const uint8_t *)aux32; + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + memcpy(aux32, q2, 2*sizeof(uint32_t)); + q2 += 4; + const uint32_t ls = 2*(aux32[1] >> 28) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); + const uint8_t signs = ksigns_iq2xs[(aux32[1] >> 7*l) & 127]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls; + } + sumf += d * bsum; + } + *s = 0.125f * sumf; +#endif +} diff --git a/ggml-quants.h b/ggml-quants.h index 62c1df6cb..8dd911d41 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -165,6 +165,14 @@ typedef struct { } block_q8_K; static_assert(sizeof(block_q8_K) == sizeof(float) + QK_K + QK_K/16*sizeof(int16_t), "wrong q8_K block size/padding"); +// (Almost) "true" 2-bit quantization. +// Due to the need to use blocks as per ggml dsign, it ends up using +// 2.0625 bpw because of the 16-bit scale for each block of 256. +typedef struct { + ggml_fp16_t d; + uint16_t qs[QK_K/8]; +} block_iq2_xxs; +static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); // Quantization void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); @@ -180,6 +188,7 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k); void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); +void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k); void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); @@ -194,6 +203,7 @@ void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); +void quantize_row_iq2_xxs(const float * restrict x, void * restrict y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); @@ -209,6 +219,7 @@ void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int k); void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int k); void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); +void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); @@ -222,3 +233,4 @@ void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, const void * restrict vx, void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); diff --git a/ggml.c b/ggml.c index 62f0f18ef..adb387100 100644 --- a/ggml.c +++ b/ggml.c @@ -573,6 +573,17 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot = ggml_vec_dot_q6_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, + [GGML_TYPE_IQ2_XXS] = { + .type_name = "iq2_xxs", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_xxs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_xxs, + .from_float = quantize_row_iq2_xxs, + .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xxs_reference, + .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2111,6 +2122,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_Q4_K: wtype = GGML_TYPE_Q4_K; break; case GGML_FTYPE_MOSTLY_Q5_K: wtype = GGML_TYPE_Q5_K; break; case GGML_FTYPE_MOSTLY_Q6_K: wtype = GGML_TYPE_Q6_K; break; + case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7436,6 +7448,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7700,6 +7713,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -7814,6 +7828,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: default: { GGML_ASSERT(false); @@ -10455,6 +10470,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -10629,6 +10645,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: default: { GGML_ASSERT(false); @@ -10823,6 +10840,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11459,6 +11477,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11533,6 +11552,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: + case GGML_TYPE_IQ2_XXS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -18648,6 +18668,12 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i block_q6_K * block = (block_q6_K*)dst + start / QK_K; result = ggml_quantize_q6_K(src + start, block, n, n, hist); } break; + case GGML_TYPE_IQ2_XXS: + { + GGML_ASSERT(start % QK_K == 0); + block_iq2_xxs * block = (block_iq2_xxs*)dst + start / QK_K; + result = ggml_quantize_iq2_xxs(src + start, block, n, n, hist); + } break; case GGML_TYPE_F16: { int elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 64f4e45e8..c55e598b4 100644 --- a/ggml.h +++ b/ggml.h @@ -339,6 +339,7 @@ extern "C" { GGML_TYPE_Q5_K = 13, GGML_TYPE_Q6_K = 14, GGML_TYPE_Q8_K = 15, + GGML_TYPE_IQ2_XXS = 16, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -373,6 +374,7 @@ extern "C" { GGML_FTYPE_MOSTLY_Q4_K = 12, // except 1d tensors GGML_FTYPE_MOSTLY_Q5_K = 13, // except 1d tensors GGML_FTYPE_MOSTLY_Q6_K = 14, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors }; // available tensor operations: @@ -2067,6 +2069,7 @@ extern "C" { GGML_API size_t ggml_quantize_q4_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); diff --git a/llama.cpp b/llama.cpp index 63853d1c3..8e0717db9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2222,6 +2222,7 @@ struct llama_model_loader { case GGML_TYPE_Q4_K: ftype = LLAMA_FTYPE_MOSTLY_Q4_K_M; break; case GGML_TYPE_Q5_K: ftype = LLAMA_FTYPE_MOSTLY_Q5_K_M; break; case GGML_TYPE_Q6_K: ftype = LLAMA_FTYPE_MOSTLY_Q6_K; break; + case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2593,6 +2594,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small"; case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XSS - 2.0625 bpw"; default: return "unknown, may not work"; } @@ -9038,6 +9040,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_Q5_K_S: case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS:quantized_type = GGML_TYPE_IQ2_XXS; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index 869ff0acf..c11075bbc 100644 --- a/llama.h +++ b/llama.h @@ -103,6 +103,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q5_K_S = 16, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q5_K_M = 17, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q6_K = 18, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index a2459a286..cee712618 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -134,6 +134,11 @@ int main(int argc, char * argv[]) { continue; } + if ((ggml_type)i == GGML_TYPE_IQ2_XXS) { + printf("Skip %s due to missing quantization functionality\n", ggml_type_name((ggml_type) i)); + continue; + } + printf("Testing %s\n", ggml_type_name((ggml_type) i)); if (qfns.from_float && qfns.to_float) { From a9a8c5de3d2028701c239d821b220214fcaefbf1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 8 Jan 2024 20:25:17 +0200 Subject: [PATCH 393/859] readme : add link to SOTA models --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2f6e6ffee..a0d86a6ef 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow - Collecting Apple Silicon performance stats: - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 - A-series: https://github.com/ggerganov/llama.cpp/discussions/4508 From 1fc2f265ff9377a37fd2c61eae9cd813a3491bea Mon Sep 17 00:00:00 2001 From: howlger Date: Mon, 8 Jan 2024 20:05:53 +0100 Subject: [PATCH 394/859] common : fix the short form of `--grp-attn-w`, not `-gat` (#4825) See https://github.com/ggerganov/llama.cpp/blob/master/common/common.cpp#L230C53-L230C57 --- common/common.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 6b4913a65..4e89fe516 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -920,7 +920,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { #endif printf(" -gan N, --grp-attn-n N\n"); printf(" group-attention factor (default: %d)\n", params.grp_attn_n); - printf(" -gat N, --grp-attn-w N\n"); + printf(" -gaw N, --grp-attn-w N\n"); printf(" group-attention width (default: %.1f)\n", (double)params.grp_attn_w); printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); From 8f900abfc09851e281bc9027e0ab2f16bf079b29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 9 Jan 2024 08:58:55 +0100 Subject: [PATCH 395/859] CUDA: faster softmax via shared memory + fp16 math (#4742) --- ggml-cuda.cu | 333 ++++++++++++++++++++++++++++++++++--- tests/test-backend-ops.cpp | 17 +- 2 files changed, 321 insertions(+), 29 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e0ea890b1..e26260a35 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -116,6 +116,7 @@ #include "ggml.h" #include "ggml-backend-impl.h" +#define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 #define CC_OFFSET_AMD 1000000 @@ -556,11 +557,12 @@ static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; struct cuda_device_capabilities { int cc; // compute capability + size_t smpb; // max. shared memory per block bool vmm; // virtual memory support size_t vmm_granularity; // granularity of virtual memory }; -static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, false, 0} }; +static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, 0, false, 0} }; static void * g_scratch_buffer = nullptr; static size_t g_scratch_size = 0; // disabled by default @@ -593,6 +595,19 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { return a; } +static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { +#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) + (void) a; + bad_arch(); +#else +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); + } + return a; +#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +} + static __device__ __forceinline__ float warp_reduce_max(float x) { #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { @@ -601,6 +616,19 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } +static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { +#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) + (void) x; + bad_arch(); +#else +#pragma unroll + for (int mask = 16; mask > 0; mask >>= 1) { + x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); + } + return x; +#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +} + static __device__ __forceinline__ float op_repeat(const float a, const float b) { return b; GGML_UNUSED(a); @@ -5385,75 +5413,233 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int dst[i] = x[i] - (col > n_past + row % rows_per_channel) * FLT_MAX; } -static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols, const int nrows_y, const float scale) { +template +static __global__ void soft_max_f16(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL + const int ncols_data = ncols_template == 0 ? ncols_par : ncols_template; + const int ncols_smem = GGML_PAD(ncols_data, 2*WARP_SIZE)/2; + const int tid = threadIdx.x; const int rowx = blockIdx.x; const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension - const int block_size = blockDim.x; + const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; const int warp_id = threadIdx.x / WARP_SIZE; const int lane_id = threadIdx.x % WARP_SIZE; - __shared__ float buf[CUDA_SOFT_MAX_BLOCK_SIZE/WARP_SIZE]; + extern __shared__ half data_soft_max_f16[]; + half * buf_iw = data_soft_max_f16 + 0; // shared memory buffer for inter-warp communication + // (shared memory) buffer to cache values between iterations: + half2 * vals = vals_smem ? (half2 *) (buf_iw + WARP_SIZE) : (half2 *) (dst + rowx*ncols_data); + // if the buffer is larger than max. shared memory per block, use dst as temp. buffer instead + // in that case col_smem == col_data must be enforced to avoid race conditions - float max_val = -INFINITY; + half2 max_val = make_half2(-INFINITY, -INFINITY); - for (int col = tid; col < ncols; col += block_size) { - const int ix = rowx*ncols + col; - const int iy = rowy*ncols + col; - max_val = max(max_val, x[ix]*scale + (y ? y[iy] : 0.0f)); +#pragma unroll + for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { + const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; + const int col_smem = vals_smem ? col0 + tid : col_data; + + const int ix = rowx*ncols_data + col_data; + const int iy = rowy*ncols_data + col_data; + + half2 val; + if (need_check && col_data + 0 >= ncols_data) { + val.x = -INFINITY; + } else { + val.x = x[ix + 0]*scale + (y ? y[iy + 0] : 0.0f); + } + if (need_check && col_data + WARP_SIZE >= ncols_data) { + val.y = -INFINITY; + } else { + val.y = x[ix + WARP_SIZE]*scale + (y ? y[iy + WARP_SIZE] : 0.0f); + } + if (!need_check || col_smem < (vals_smem ? ncols_smem : ncols_data)) { + vals[col_smem] = val; + } + max_val = __hmax2(max_val, val); } // find the max value in the block max_val = warp_reduce_max(max_val); if (block_size > WARP_SIZE) { if (warp_id == 0) { - buf[lane_id] = -INFINITY; + buf_iw[lane_id] = -INFINITY; } __syncthreads(); if (lane_id == 0) { - buf[warp_id] = max_val; + buf_iw[warp_id] = __hmax(max_val.x, max_val.y); } __syncthreads(); - max_val = buf[lane_id]; + max_val = __half2half2(buf_iw[lane_id]); max_val = warp_reduce_max(max_val); + } else { + max_val = __half2half2(__hmax(max_val.x, max_val.y)); } - float tmp = 0.f; + half2 tmp = make_half2(0.0f, 0.0f); // partial sums + +#pragma unroll + for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { + const int col_smem = vals_smem ? col0 + tid : 2*col0 + 2*warp_id*WARP_SIZE + lane_id; + + if (ncols_template == 0 && col_smem >= (vals_smem ? ncols_smem : ncols_data)) { + break; + } + + const half2 val = h2exp(vals[col_smem] - max_val); - for (int col = tid; col < ncols; col += block_size) { - const int ix = rowx*ncols + col; - const int iy = rowy*ncols + col; - const float val = expf((x[ix]*scale + (y ? y[iy] : 0.0f)) - max_val); tmp += val; - dst[ix] = val; + vals[col_smem] = val; } // find the sum of exps in the block tmp = warp_reduce_sum(tmp); if (block_size > WARP_SIZE) { if (warp_id == 0) { - buf[lane_id] = 0.f; + buf_iw[lane_id] = 0.0f; } __syncthreads(); if (lane_id == 0) { - buf[warp_id] = tmp; + buf_iw[warp_id] = tmp.x + tmp.y; } __syncthreads(); - tmp = buf[lane_id]; + tmp = __half2half2(buf_iw[lane_id]); + tmp = warp_reduce_sum(tmp); + } else { + tmp = __half2half2(tmp.x + tmp.y); + } + + const half2 inv_sum = make_half2(1.0f, 1.0f) / tmp; + +#pragma unroll + for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { + const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; + const int col_smem = vals_smem ? col0 + tid : col_data; + + const int idst = rowx*ncols_data + col_data; + const half2 result = vals[col_smem] * inv_sum; + + if (need_check && col_data + 0 >= ncols_data) { + return; + } + dst[idst] = result.x; + + if (need_check && col_data + WARP_SIZE >= ncols_data) { + return; + } + + dst[idst + WARP_SIZE] = result.y; + } +#else + (void) x; (void) y; (void) dst; (void) ncols_par; (void) nrows_y; (void) scale; + bad_arch(); +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +} + +template +static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { + const int ncols = ncols_template == 0 ? ncols_par : ncols_template; + + const int tid = threadIdx.x; + const int rowx = blockIdx.x; + const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension + + const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; + + const int warp_id = threadIdx.x / WARP_SIZE; + const int lane_id = threadIdx.x % WARP_SIZE; + + extern __shared__ float data_soft_max_f32[]; + float * buf_iw = data_soft_max_f32; // shared memory buffer for inter-warp communication + // shared memory buffer to cache values between iterations: + float * vals = vals_smem ? buf_iw + WARP_SIZE : dst + rowx*ncols; + + float max_val = -INFINITY; + +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + break; + } + + const int ix = rowx*ncols + col; + const int iy = rowy*ncols + col; + + const float val = x[ix]*scale + (y ? y[iy] : 0.0f); + vals[col] = val; + max_val = max(max_val, val); + } + + // find the max value in the block + max_val = warp_reduce_max(max_val); + if (block_size > WARP_SIZE) { + if (warp_id == 0) { + buf_iw[lane_id] = -INFINITY; + } + __syncthreads(); + + if (lane_id == 0) { + buf_iw[warp_id] = max_val; + } + __syncthreads(); + + max_val = buf_iw[lane_id]; + max_val = warp_reduce_max(max_val); + } + + float tmp = 0.0f; // partial sum + +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + break; + } + + const float val = expf(vals[col] - max_val); + tmp += val; + vals[col] = val; + } + + // find the sum of exps in the block + tmp = warp_reduce_sum(tmp); + if (block_size > WARP_SIZE) { + if (warp_id == 0) { + buf_iw[lane_id] = 0.0f; + } + __syncthreads(); + + if (lane_id == 0) { + buf_iw[warp_id] = tmp; + } + __syncthreads(); + + tmp = buf_iw[lane_id]; tmp = warp_reduce_sum(tmp); } - const float inv_tmp = 1.f / tmp; + const float inv_sum = 1.0f / tmp; - for (int col = tid; col < ncols; col += block_size) { - const int i = rowx*ncols + col; - dst[i] *= inv_tmp; +#pragma unroll + for (int col0 = 0; col0 < ncols; col0 += block_size) { + const int col = col0 + tid; + + if (ncols_template == 0 && col >= ncols) { + return; + } + + const int idst = rowx*ncols + col; + dst[idst] = vals[col] * inv_sum; } } @@ -6752,12 +6938,90 @@ static void diag_mask_inf_f32_cuda(const float * x, float * dst, const int ncols diag_mask_inf_f32<<>>(x, dst, ncols_x, rows_per_channel, n_past); } +static void soft_max_f16_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { + int nth = WARP_SIZE; + while (nth < ncols_x/2 && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; + const dim3 block_dims(nth, 1, 1); + const dim3 block_nums(nrows_x, 1, 1); + const size_t shmem = (GGML_PAD(ncols_x, 2*WARP_SIZE) + WARP_SIZE)*sizeof(half); + static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); + if (shmem <= g_device_caps[g_main_device].smpb) { + switch (ncols_x) { + case 32: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 64: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 128: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 256: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 512: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 1024: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 2048: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 4096: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + default: + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + } + } else { + const size_t shmem_low = WARP_SIZE*sizeof(half); + soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); + } +} + static void soft_max_f32_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { int nth = WARP_SIZE; while (nth < ncols_x && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; const dim3 block_dims(nth, 1, 1); const dim3 block_nums(nrows_x, 1, 1); - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + const size_t shmem = (GGML_PAD(ncols_x, WARP_SIZE) + WARP_SIZE)*sizeof(float); + static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); + if (shmem < g_device_caps[g_main_device].smpb) { + switch (ncols_x) { + case 32: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 64: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 128: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 256: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 512: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 1024: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 2048: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + case 4096: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + default: + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + break; + } + } else { + const size_t shmem_low = WARP_SIZE*sizeof(float); + soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + } } static void im2col_f32_f16_cuda(const float* x, half* dst, @@ -7072,6 +7336,7 @@ void ggml_init_cublas() { #else g_device_caps[id].cc = 100*prop.major + 10*prop.minor; #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + g_device_caps[id].smpb = prop.sharedMemPerBlock; } for (int id = 0; id < g_device_count; ++id) { g_tensor_split[id] /= total_vram; @@ -8087,7 +8352,21 @@ static void ggml_cuda_op_soft_max( float scale = 1.0f; memcpy(&scale, dst->op_params, sizeof(float)); - soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); +#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + const bool use_f16_soft_max = false; +#else +#ifdef GGML_CUDA_F16 + const bool use_f16_soft_max = true; +#else + const bool use_f16_soft_max = false; +#endif // GGML_CUDA_F16 +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) + + if (use_f16_soft_max) { + soft_max_f16_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + } else { + soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + } (void) dst; } diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index b79de7a7d..7a60d7743 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -450,7 +450,7 @@ struct test_case { double err = nmse(f1.data(), f2.data(), f1.size()); if (err > ud->max_err) { - printf("[%s] NMSE = %f ", ggml_op_desc(t1), err); + printf("[%s] NMSE = %.9f > %.9f ", ggml_op_desc(t1), err, ud->max_err); //for (int i = 0; i < (int) f1.size(); i++) { // printf("%5d %9.6f %9.6f, diff = %9.6f\n", i, f1[i], f2[i], f1[i] - f2[i]); //} @@ -1449,6 +1449,7 @@ struct test_moe : public test_case { static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op_name) { std::vector> test_cases; + std::default_random_engine rng(0); const ggml_type all_types[] = { GGML_TYPE_F32, GGML_TYPE_F16, @@ -1583,7 +1584,19 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 1}, 5)); test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 10}, 5)); - test_cases.emplace_back(new test_soft_max()); + std::uniform_int_distribution<> dist_ne1(1, 50); + int exponent = 1; + while (exponent < (1 << 17)) { + std::uniform_int_distribution<> dist_ne0(exponent, 2*exponent); + + for (int n = 0; n < 10; ++n) { + int64_t ne0 = dist_ne0(rng); + int64_t ne1 = dist_ne1(rng); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1})); + } + + exponent <<= 1; + } for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512)); // llama 7B From 18c2e1752c3b387689e9e73d7d8a1a3b1511ce23 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 10:42:06 +0200 Subject: [PATCH 396/859] ggml : fix vld1q_s8_x4 32-bit compat (#4828) * ggml : fix vld1q_s8_x4 32-bit compat ggml-ci * ggml : fix 32-bit ARM compat (cont) ggml-ci --- ggml-quants.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index fd127f2d1..d497e6de9 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -7250,9 +7250,9 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res uint32_t aux32[4]; const uint8_t * aux8 = (const uint8_t *)aux32; - int8x16x4_t q2u; - int8x16x4_t q2s; - int8x16x4_t q8b; + ggml_int8x16x4_t q2u; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; float sumf = 0; for (int i = 0; i < nb; ++i) { @@ -7261,7 +7261,7 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res const int8_t * restrict q8 = y[i].qs; float sumf1 = 0, sumf2 = 0; for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { - q8b = vld1q_s8_x4(q8); q8 += 64; + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; memcpy(aux32, q2, 4*sizeof(uint32_t)); q2 += 8; q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 0])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 1]))); q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xxs_grid + aux8[ 2])), vld1_s8((const void *)(iq2xxs_grid + aux8[ 3]))); From 8c5833031857c9e9ada61948bae894ab9c785f86 Mon Sep 17 00:00:00 2001 From: Zsapi Date: Tue, 9 Jan 2024 10:12:43 +0100 Subject: [PATCH 397/859] server : add api-key flag to documentation (#4832) Document the api-key flag added to server in https://github.com/ggerganov/llama.cpp/pull/4441 --- examples/server/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/README.md b/examples/server/README.md index 243e66991..5d9829624 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -23,6 +23,7 @@ Command line options: - `--host`: Set the hostname or ip address to listen. Default `127.0.0.1`. - `--port`: Set the port to listen. Default: `8080`. - `--path`: path from which to serve static files (default examples/server/public) +- `--api-key`: Set an api key for request authorization. By default the server responds to every request. With an api key set, the requests must have the Authorization header set with the api key as Bearer token. - `--embedding`: Enable embedding extraction, Default: disabled. - `-np N`, `--parallel N`: Set the number of slots for process requests (default: 1) - `-cb`, `--cont-batching`: enable continuous batching (a.k.a dynamic batching) (default: disabled) From 128de3585b0f58b1e562733448fc00109f23a95d Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Tue, 9 Jan 2024 05:02:05 -0500 Subject: [PATCH 398/859] server : update readme about token probs (#4777) * updated server readme to reflect the gg/server-token-probs-4088 commit added explanation for the API's completion result which now includes `completion_probabilities`. Also added a JSON schema that shows the type/structure of `completion_probabilities`. * simplified the `completion_probabilities` JSON schema It's now easier to understand what the structure of `completion_probabilities` looks like. * minor : fix trailing whitespace --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 57 ++++++++++++++++++++++----------------- 1 file changed, 33 insertions(+), 24 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 5d9829624..d85a14f89 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -175,35 +175,44 @@ node index.js `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) - *Result JSON:* +### Result JSON: - Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. +* Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. - `content`: Completion result as a string (excluding `stopping_word` if any). In case of streaming mode, will contain the next token as a string. - `stop`: Boolean for use with `stream` to check whether the generation has stopped (Note: This is not related to stopping words array `stop` from input options) +- `completion_probabilities`: An array of token probabilities for each completion. The array's length is `n_predict`. Each item in the array has the following structure: - `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model` +``` +{ + "content": "", + "probs": [ + { + "prob": float, + "tok_str": "" + }, + { + "prob": float, + "tok_str": "" + }, + ... + ] +}, +``` +Notice that each `probs` is an array of length `n_probs`. - `model`: The path to the model loaded with `-m` - - `prompt`: The provided `prompt` - - `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token - - `stopped_limit`: Indicating whether the completion stopped because `n_predict` tokens were generated before stop words or EOS was encountered - - `stopped_word`: Indicating whether the completion stopped due to encountering a stopping word from `stop` JSON array provided - - `stopping_word`: The stopping word encountered which stopped the generation (or "" if not stopped due to a stopping word) - - `timings`: Hash of timing information about the completion such as the number of tokens `predicted_per_second` - - `tokens_cached`: Number of tokens from the prompt which could be re-used from previous completion (`n_past`) - - `tokens_evaluated`: Number of tokens evaluated in total from the prompt - - `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) +- `content`: Completion result as a string (excluding `stopping_word` if any). In case of streaming mode, will contain the next token as a string. +- `stop`: Boolean for use with `stream` to check whether the generation has stopped (Note: This is not related to stopping words array `stop` from input options) +- `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model` +- `model`: The path to the model loaded with `-m` +- `prompt`: The provided `prompt` +- `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token +- `stopped_limit`: Indicating whether the completion stopped because `n_predict` tokens were generated before stop words or EOS was encountered +- `stopped_word`: Indicating whether the completion stopped due to encountering a stopping word from `stop` JSON array provided +- `stopping_word`: The stopping word encountered which stopped the generation (or "" if not stopped due to a stopping word) +- `timings`: Hash of timing information about the completion such as the number of tokens `predicted_per_second` +- `tokens_cached`: Number of tokens from the prompt which could be re-used from previous completion (`n_past`) +- `tokens_evaluated`: Number of tokens evaluated in total from the prompt +- `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) - **POST** `/tokenize`: Tokenize a given text. From d9653894dffbfd3a58616f31b0967b34faf6f611 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 16:23:05 +0200 Subject: [PATCH 399/859] scripts : script to get Paul Graham essays in txt format (#4838) --- scripts/get-pg.sh | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100755 scripts/get-pg.sh diff --git a/scripts/get-pg.sh b/scripts/get-pg.sh new file mode 100755 index 000000000..d516db46c --- /dev/null +++ b/scripts/get-pg.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +function usage { + echo "usage: $0" + exit 1 +} + +function has_cmd { + if ! [ -x "$(command -v $1)" ]; then + echo "error: $1 is not available" >&2 + exit 1 + fi +} + +# check for: curl, html2text, tail, sed, fmt +has_cmd curl +has_cmd html2text +has_cmd tail +has_cmd sed + +if [ $# -ne 1 ]; then + usage +fi + +n=$1 + +# get urls +urls="$(curl http://www.aaronsw.com/2002/feeds/pgessays.rss | grep html | sed -e "s/.*http/http/" | sed -e "s/html.*/html/" | head -n $n)" + +printf "urls:\n%s\n" "$urls" + +if [ -f pg.txt ]; then + rm pg.txt +fi + +for url in $urls; do + echo "processing $url" + + curl -L $url | html2text | tail -n +4 | sed -E "s/^[[:space:]]+//g" | fmt -w 80 >> pg.txt + + # don't flood the server + sleep 1 +done + +echo "done. data in pg.txt" + +exit 0 From 18adb4e9bb340b7b4565d8b6715b4449283e7641 Mon Sep 17 00:00:00 2001 From: iohub Date: Wed, 10 Jan 2024 00:45:54 +0800 Subject: [PATCH 400/859] readme : add 3rd party collama reference to UI list (#4840) Add a VSCode extension for llama.cpp reference to UI list --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index a0d86a6ef..866aa87b4 100644 --- a/README.md +++ b/README.md @@ -137,6 +137,7 @@ as the main playground for developing new features for the [ggml](https://github - [semperai/amica](https://github.com/semperai/amica) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) +- [iohub/collama](https://github.com/iohub/coLLaMA) --- From 9a818f7c42761984ac99e08e613cc20634f8410e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 19:20:45 +0200 Subject: [PATCH 401/859] scripts : improve get-pg.sh (#4838) --- scripts/get-pg.sh | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/scripts/get-pg.sh b/scripts/get-pg.sh index d516db46c..b027793e1 100755 --- a/scripts/get-pg.sh +++ b/scripts/get-pg.sh @@ -2,6 +2,22 @@ function usage { echo "usage: $0" + echo "note: n is the number of essays to download" + echo "for specific n, the resulting pg.txt file will have the following number of tokens:" + echo "n | tokens" + echo "--- | ---" + echo "1 | 6230" + echo "2 | 23619" + echo "5 | 25859" + echo "10 | 36888" + echo "15 | 50188" + echo "20 | 59094" + echo "25 | 88764" + echo "30 | 103121" + echo "32 | 108338" + echo "35 | 113403" + echo "40 | 127699" + echo "45 | 135896" exit 1 } @@ -33,10 +49,17 @@ if [ -f pg.txt ]; then rm pg.txt fi +c=1 for url in $urls; do echo "processing $url" - curl -L $url | html2text | tail -n +4 | sed -E "s/^[[:space:]]+//g" | fmt -w 80 >> pg.txt + cc=$(printf "%03d" $c) + + curl -L $url | html2text | tail -n +4 | sed -E "s/^[[:space:]]+//g" | fmt -w 80 >> pg-$cc-one.txt + cat pg-$cc-one.txt >> pg.txt + + cp -v pg.txt pg-$cc-all.txt + c=$((c+1)) # don't flood the server sleep 1 From 4dccb38d9abab7f9f2d1f9a6977df4185d490132 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 9 Jan 2024 19:37:08 +0200 Subject: [PATCH 402/859] metal : improve dequantize precision to match CPU (#4836) ggml-ci --- ggml-metal.metal | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/ggml-metal.metal b/ggml-metal.metal index 0cc535ac7..229efb8b6 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -3841,8 +3841,8 @@ void dequantize_q3_K(device const block_q3_K *xb, short il, thread type4x4 & reg uint16_t scale_2 = scales[il%8], scale_1 = scales[8 + il%4]; int16_t dl_int = (il/4)&1 ? (scale_2&kmask2) | ((scale_1&kmask1) << 2) : (scale_2&kmask2) | ((scale_1&kmask1) << 4); - half dl = il<8 ? d_all * (dl_int - 32.h) : d_all * (dl_int / 16.h - 32.h); - const half ml = 4.h * dl; + float dl = il<8 ? d_all * (dl_int - 32.f) : d_all * (dl_int / 16.f - 32.f); + const float ml = 4.f * dl; il = (il/2) & 3; const half coef = il>1 ? (il>2 ? 1/64.h : 1/16.h) : (il>0 ? 1/4.h : 1.h); @@ -3909,7 +3909,7 @@ void dequantize_q5_K(device const block_q5_K *xb, short il, thread type4x4 & reg uint8_t ul = 1 << (il/2); il = il & 3; const uchar2 sc = get_scale_min_k4_just2(is, il/2, xb->scales); - const float d = il < 2 ? xb->d : xb->d / 16.h; + const float d = il < 2 ? xb->d : xb->d / 16.f; const float min = xb->dmin; const float dl = d * sc[0]; const float ml = min * sc[1]; @@ -3942,17 +3942,17 @@ void dequantize_q6_K(device const block_q6_K *xb, short il, thread type4x4 & reg #if QK_K == 256 ql = ql + 64*(il/8) + 32*((il/2)&1) + 16*(il&1); qh = qh + 32*(il/8) + 16*(il&1); - half sc = scales[(il%2) + 2 * ((il/2))]; + float sc = scales[(il%2) + 2 * ((il/2))]; il = (il/2) & 3; #else ql = ql + 16 * (il&1); - half sc = scales[il]; + float sc = scales[il]; #endif const uint16_t kmask1 = il>1 ? (il>2 ? 192 : 48) : (il>0 ? 12 : 3); const uint16_t kmask2 = il>1 ? 0xF0 : 0x0F; - const half coef = il>1 ? 1.f/16.h : 1.h; - const half ml = d_all * sc * 32.h; - const half dl = d_all * sc * coef; + const float coef = il>1 ? 1.f/16.f : 1.f; + const float ml = d_all * sc * 32.f; + const float dl = d_all * sc * coef; for (int i = 0; i < 16; ++i) { const half q = il&1 ? ((ql[i] & kmask2) | ((qh[i] & kmask1) << 2)) : ((ql[i] & kmask2) | ((qh[i] & kmask1) << 4)); From 36e5a08b203542dca53cca4eaf172c5dc4bbc991 Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Tue, 9 Jan 2024 09:59:14 -0800 Subject: [PATCH 403/859] llava-cli : don't crash if --image flag is invalid (#4835) This change fixes an issue where supplying `--image missing-file` would result in a segfault due to a null pointer being dereferenced. This can result in distracting info being printed if robust crash analysis tools are being used. --- examples/llava/llava-cli.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 502b788b1..d94795fe3 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -243,6 +243,9 @@ int main(int argc, char ** argv) { } auto image_embed = load_image(ctx_llava, ¶ms); + if (!image_embed) { + return 1; + } // process the prompt process_prompt(ctx_llava, image_embed, ¶ms, params.prompt); From 6efb8eb30e7025b168f3fda3ff83b9b386428ad6 Mon Sep 17 00:00:00 2001 From: Austin <77757836+teleprint-me@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:46:46 -0500 Subject: [PATCH 404/859] convert.py : fix vanilla LLaMA model conversion (#4818) * Update Imports and Add Notes for Future Reference - Updated import statements in `convert.py`. - Added import for `AutoTokenizer` from `transformers` module. - Added conditional import for `gguf` from the local directory. - Added comments and notes for future reference. Additional Notes: - Noted removal of a redundant `TypeAlias` import. - Noted the removal of a `gguf` debug statement. - Commented on the presence of `ARCH` and `NDArray` definitions. - Commented on cleaning up and refactoring data type definitions. * Refine Model Hyperparameters and Params Class - Updated type annotations to use `Optional` for clarity. - Improved method names and attribute consistency. - Removed unnecessary variables for better code readability. Additional Notes: - Highlighted the use of `Optional` for clearer intent. - Ensured backward and forward compatibility. * Restore BpeVocab and SentencePieceVocab classes - Restored the BpeVocab class for handling BPE tokenization. - Restored the SentencePieceVocab class for SentencePiece tokenization. These classes are essential for maintaining the original behavior of the codebase. * refactor: Standardize vocabulary handling with HfVocab - Replaced VocabLoader with HfVocab, aligning vocabulary handling across classes. - Updated initialization of HfVocab with local_files_only=True for AutoTokenizer. - Introduced optional parameter fname_added_tokens for flexible added token management. - Streamlined added token handling for clarity and conciseness. - Maintained special tokens and IDs, enhancing token management. - Simplified token processing methods for improved readability. - Added a placeholder for score computation with a default value of -1000.0. - Optimized newline token check for efficiency. - Updated __repr__ function for clarity in representation. - Adjusted type alias Vocab to include BpeVocab, SentencePieceVocab, and HfVocab. - Removed redundant code related to special token handling, reverse vocabulary mapping, and vocabulary file detection. This refactoring promotes a standardized and modular approach to vocabulary management, facilitating future integration with a VocabFactory and improving code maintainability and scalability. * refactor: Enhance readability, functionality, and code quality - Improved code formatting and readability for better maintainability. - Refactored LazyUnpickler's CLASSES dictionary for clarity. - Added print statements and warnings in check_vocab_size for user feedback. - Removed find_vocab_file_path, as it's superseded by VocabFactory. - Preparatory changes for upcoming classes: OutputFile and VocabFactory. - Overall focus on code quality, error handling, and consistency. These changes reflect a continuous effort to refine the codebase, ensuring it meets best practices and prepares for future enhancements, such as the VocabFactory. * refactor: Update OutputFile class for enhanced model vocabulary management - Restructured the constructor for improved readability. - Updated `add_meta_arch` method for flexible model name determination. - Introduced `handle_tokenizer_model` for mapping vocab types to supported tokenizer models. - Streamlined vocabulary extraction with `extract_vocabulary_from_model`. - Simplified vocabulary metadata addition using `add_meta_vocab`. - Refactored `add_tensor_info` for clarity and consistency. - Improved error handling for better user feedback. These changes signify the development of a versatile and comprehensive `OutputFile` class, enabling efficient management of model conversion output, metadata, vocabulary, and tensor information. * feat: Introduce VocabFactory for flexible vocabulary management in model conversion - The VocabFactory class is added to facilitate modular vocabulary handling. - The constructor initializes a directory path and detects vocabulary-related files. - The _select_file method provides file paths based on vocabulary type (e.g., BPE, SentencePiece). - _create_special_vocab generates special vocabularies, accommodating different types. - The load_vocab method loads vocabularies, handling BPE, SentencePiece, and Hugging Face Fast Tokenizer. - Error handling and logging enhance debugging and user feedback. - The modular and flexible design simplifies vocabulary management and supports future extensions. The VocabFactory class enhances code modularity and maintainability, allowing versatile vocabulary handling in the model conversion process. * refactor: Improve code organization, argument parsing, and user interface - Renamed 'default_outfile' to 'default_output_file' for clarity. - Refactored argument parser setup into 'get_argument_parser' function. - Introduced descriptive comments for each argument in the parser. - Added '--vocab-type' argument with choices ["spm", "bpe", "hfft"] for vocabulary processing. - Improved flag naming consistency: '--outfile' to '--out-file' and '--bigendian' to '--big-endian'. - Enhanced error handling to prevent overwriting input data in 'default_output_file'. - Made 'argv' in 'main' an optional parameter for flexibility. - Introduced dynamic import for 'awq.apply_awq' based on 'args.awq_path' for conditional dependency. These changes enhance code clarity, organization, and the user interface of the script, aligning it with Python best practices and improving maintainability. * refactor: Further refine functionality, improve user interaction, and streamline vocabulary handling - Renamed command-line arguments for clarity and consistency. - Improved path resolution and import adjustments for robustness. - Thoughtfully handled 'awq-path' and conditional logic for the weighted model. - Enhanced model and vocabulary loading with the 'VocabFactory' class for structured and adaptable loading. - Strengthened error handling and user feedback for a more user-friendly experience. - Structured output file handling with clear conditions and defaults. - Streamlined and organized the 'main' function for better logic flow. - Passed 'sys.argv[1:]' to 'main' for adaptability and testability. These changes solidify the script's functionality, making it more robust, user-friendly, and adaptable. The use of the 'VocabFactory' class is a notable enhancement in efficient vocabulary handling, reflecting a thoughtful and iterative approach to script development. * chore: Apply ruff formatting to convert.py Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * Revert to commit 0614c33 * chore: Apply flake8 formatting rules Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * refactor: Revise `check_vocab_size` for Enhanced Clarity and Correctness - Resolved an unreachable branch issue by reorganizing the conditional structure. - Moved the special case check for `params.n_vocab == -1` to the top for immediate assertion. - Flattened the conditional logic for improved clarity and predictability of the function's behavior. These changes enhance the readability and functional correctness of the `check_vocab_size` function without altering its intended functionality. * py : fix outfile and outtype * py : suggest hint for missing vocab size --------- Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> Co-authored-by: Georgi Gerganov --- convert.py | 969 ++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 666 insertions(+), 303 deletions(-) diff --git a/convert.py b/convert.py index c3f3fc0a1..3b613eefc 100755 --- a/convert.py +++ b/convert.py @@ -17,29 +17,58 @@ import signal import struct import sys import time +import warnings import zipfile from abc import ABCMeta, abstractmethod -from collections import OrderedDict +from argparse import ArgumentParser from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor from dataclasses import dataclass from pathlib import Path -from typing import IO, TYPE_CHECKING, Any, Callable, Iterable, Literal, Optional, TypeVar, cast +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Iterable, + Literal, + Optional, + Tuple, + TypeVar, +) import numpy as np from sentencepiece import SentencePieceProcessor -if 'NO_LOCAL_GGUF' not in os.environ: - sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) -import gguf +try: + from transformers import AutoTokenizer +except ModuleNotFoundError as e: + warnings.warn(f"Could not import AutoTokenizer from transformers: {e}") -if TYPE_CHECKING: - from typing import TypeAlias +# If NO_LOCAL_GGUF is not set, try to import gguf from the local gguf-py directory +if "NO_LOCAL_GGUF" not in os.environ: + # Use absolute path to the gguf-py directory + gguf_py_dir = str(Path(__file__).resolve().parent / "gguf-py") + print(gguf_py_dir) # NOTE: Remove this once path is verified after changes are completed + if gguf_py_dir not in sys.path: + sys.path.insert(1, gguf_py_dir) -if hasattr(faulthandler, 'register') and hasattr(signal, 'SIGUSR1'): +# Import gguf module +try: + import gguf +except ModuleNotFoundError as e: + print(f"Could not import gguf: {e}") + sys.exit(1) + +if TYPE_CHECKING: # NOTE: This isn't necessary. + from typing import TypeAlias # This can technically be omitted. + +if hasattr(faulthandler, "register") and hasattr(signal, "SIGUSR1"): faulthandler.register(signal.SIGUSR1) -NDArray: TypeAlias = 'np.ndarray[Any, Any]' +# NOTE: n-dimensional arrays should be directly referenced +NDArray: TypeAlias = "np.ndarray[Any, Any]" +# Why is this here? LLAMA and GPT are technically the only compatible ARCHs. ARCH = gguf.MODEL_ARCH.LLAMA DEFAULT_CONCURRENCY = 8 @@ -49,6 +78,7 @@ DEFAULT_CONCURRENCY = 8 # +# TODO: Clean up and refactor data types @dataclass(frozen=True) class DataType: name: str @@ -153,65 +183,85 @@ GGML_FILE_TYPE_TO_DATA_TYPE: dict[GGMLFileType, DataType] = { @dataclass class Params: - n_vocab: int - n_embd: int - n_layer: int - n_ctx: int - n_ff: int - n_head: int - n_head_kv: int - n_experts: int | None = None - n_experts_used: int | None = None - f_norm_eps: float | None = None + n_vocab: int + n_embd: int + n_layer: int + n_ctx: int + n_ff: int + n_head: int + n_head_kv: int + f_norm_eps: Optional[float] = None + n_experts: Optional[int] = None + n_experts_used: Optional[int] = None - rope_scaling_type: gguf.RopeScalingType | None = None - f_rope_freq_base: float | None = None - f_rope_scale: float | None = None - n_orig_ctx: int | None = None - rope_finetuned: bool | None = None + rope_scaling_type: Optional[gguf.RopeScalingType] = None + f_rope_freq_base: Optional[float] = None + f_rope_scale: Optional[float] = None + n_orig_ctx: Optional[int] = None + rope_finetuned: Optional[bool] = None - ftype: GGMLFileType | None = None + ftype: Optional[GGMLFileType] = None # path to the directory containing the model files - path_model: Path | None = None + path_model: Optional[Path] = None @staticmethod - def guessed(model: LazyModel) -> Params: + def guessed(model: LazyModel) -> "Params": # try transformer naming first - n_vocab, n_embd = model["model.embed_tokens.weight"].shape if "model.embed_tokens.weight" in model else model["tok_embeddings.weight"].shape + n_vocab, n_embd = ( + model["model.embed_tokens.weight"].shape + if "model.embed_tokens.weight" in model + else model["tok_embeddings.weight"].shape + ) # try transformer naming first if "model.layers.0.self_attn.q_proj.weight" in model: - n_layer = next(i for i in itertools.count() if f"model.layers.{i}.self_attn.q_proj.weight" not in model) - elif "model.layers.0.self_attn.W_pack.weight" in model: # next: try baichuan naming - n_layer = next(i for i in itertools.count() if f"model.layers.{i}.self_attn.W_pack.weight" not in model) + n_layer = next( + i + for i in itertools.count() + if f"model.layers.{i}.self_attn.q_proj.weight" not in model + ) + elif ( + "model.layers.0.self_attn.W_pack.weight" in model + ): # next: try baichuan naming + n_layer = next( + i + for i in itertools.count() + if f"model.layers.{i}.self_attn.W_pack.weight" not in model + ) else: - n_layer = next(i for i in itertools.count() if f"layers.{i}.attention.wq.weight" not in model) + n_layer = next( + i + for i in itertools.count() + if f"layers.{i}.attention.wq.weight" not in model + ) if n_layer < 1: - raise Exception("failed to guess 'n_layer'. This model is unknown or unsupported.\n" - "Suggestion: provide 'config.json' of the model in the same directory containing model files.") + raise Exception( + "failed to guess 'n_layer'. This model is unknown or unsupported.\n" + "Suggestion: provide 'config.json' of the model in the same directory containing model files." + ) - n_head = n_embd // 128 # guessed - n_mult = 256 # guessed + n_head = n_embd // 128 # guessed + n_mult = 256 # guessed # TODO: verify this n_ff = int(2 * (4 * n_embd) / 3) n_ff = n_mult * ((n_ff + n_mult - 1) // n_mult) return Params( - n_vocab = n_vocab, - n_embd = n_embd, - n_layer = n_layer, - n_ctx = -1, - n_ff = n_ff, - n_head = n_head, - n_head_kv = n_head, - f_norm_eps = 1e-5, + n_vocab=n_vocab, + n_embd=n_embd, + n_layer=n_layer, + n_ctx=-1, + n_ff=n_ff, + n_head=n_head, + n_head_kv=n_head, + f_norm_eps=1e-5, ) @staticmethod - def loadHFTransformerJson(model: LazyModel, config_path: Path) -> Params: + def load_transformers_config(model: LazyModel, config_path: Path) -> "Params": config = json.load(open(config_path)) rope_scaling_type = f_rope_scale = n_orig_ctx = rope_finetuned = None @@ -224,20 +274,22 @@ class Params: rope_scaling_type = gguf.RopeScalingType.LINEAR elif typ == "yarn": rope_scaling_type = gguf.RopeScalingType.YARN - n_orig_ctx = rope_scaling['original_max_position_embeddings'] - rope_finetuned = rope_scaling['finetuned'] + n_orig_ctx = rope_scaling["original_max_position_embeddings"] + rope_finetuned = rope_scaling["finetuned"] else: - raise NotImplementedError(f'Unknown rope scaling type: {typ}') + raise NotImplementedError(f"Unknown rope scaling type: {typ}") if "max_sequence_length" in config: n_ctx = config["max_sequence_length"] elif "max_position_embeddings" in config: n_ctx = config["max_position_embeddings"] else: - raise Exception("failed to guess 'n_ctx'. This model is unknown or unsupported.\n" - "Suggestion: provide 'config.json' of the model in the same directory containing model files.") + raise Exception( + "failed to guess 'n_ctx'. This model is unknown or unsupported.\n" + "Suggestion: provide 'config.json' of the model in the same directory containing model files." + ) - n_experts = None + n_experts = None n_experts_used = None if "num_local_experts" in config: @@ -245,30 +297,30 @@ class Params: n_experts_used = config["num_experts_per_tok"] return Params( - n_vocab = config["vocab_size"], - n_embd = config["hidden_size"], - n_layer = config["num_hidden_layers"], - n_ctx = n_ctx, - n_ff = config["intermediate_size"], - n_head = (n_head := config["num_attention_heads"]), - n_head_kv = config.get("num_key_value_heads", n_head), - n_experts = n_experts, - n_experts_used = n_experts_used, - f_norm_eps = config["rms_norm_eps"], - f_rope_freq_base = config.get("rope_theta"), - rope_scaling_type = rope_scaling_type, - f_rope_scale = f_rope_scale, - n_orig_ctx = n_orig_ctx, - rope_finetuned = rope_finetuned, + n_vocab=config["vocab_size"], + n_embd=config["hidden_size"], + n_layer=config["num_hidden_layers"], + n_ctx=n_ctx, + n_ff=config["intermediate_size"], + n_head=(n_head := config["num_attention_heads"]), + n_head_kv=config.get("num_key_value_heads", n_head), + n_experts=n_experts, + n_experts_used=n_experts_used, + f_norm_eps=config["rms_norm_eps"], + f_rope_freq_base=config.get("rope_theta"), + rope_scaling_type=rope_scaling_type, + f_rope_scale=f_rope_scale, + n_orig_ctx=n_orig_ctx, + rope_finetuned=rope_finetuned, ) # LLaMA v2 70B params.json # {"dim": 8192, "multiple_of": 4096, "ffn_dim_multiplier": 1.3, "n_heads": 64, "n_kv_heads": 8, "n_layers": 80, "norm_eps": 1e-05, "vocab_size": -1} @staticmethod - def loadOriginalParamsJson(model: LazyModel, config_path: Path) -> Params: + def load_torch_params(model: LazyModel, config_path: Path) -> "Params": config = json.load(open(config_path)) - n_experts = None + n_experts = None n_experts_used = None f_rope_freq_base = None @@ -291,129 +343,249 @@ class Params: if config.get("moe"): n_ff = model["layers.0.feed_forward.experts.0.w1.weight"].shape[0] - n_experts = config["moe"]["num_experts"] + n_experts = config["moe"]["num_experts"] n_experts_used = config["moe"]["num_experts_per_tok"] f_rope_freq_base = 1e6 return Params( - n_vocab = model["tok_embeddings.weight"].shape[0], - n_embd = config["dim"], - n_layer = config["n_layers"], - n_ctx = n_ctx, - n_ff = n_ff, - n_head = (n_head := config["n_heads"]), - n_head_kv = config.get("n_kv_heads", n_head), - n_experts = n_experts, - n_experts_used = n_experts_used, - f_norm_eps = config["norm_eps"], - f_rope_freq_base = config.get("rope_theta", f_rope_freq_base), + n_vocab=config.get("vocab_size", model["tok_embeddings.weight"].shape[0]), + n_embd=config["dim"], + n_layer=config["n_layers"], + n_ctx=n_ctx, + n_ff=n_ff, + n_head=(n_head := config["n_heads"]), + n_head_kv=config.get("n_kv_heads", n_head), + n_experts=n_experts, + n_experts_used=n_experts_used, + f_norm_eps=config["norm_eps"], + f_rope_freq_base=config.get("rope_theta", f_rope_freq_base), ) @staticmethod - def load(model_plus: ModelPlus) -> Params: - hf_config_path = model_plus.paths[0].parent / "config.json" + def load(model_plus: ModelPlus) -> "Params": + hf_config_path = model_plus.paths[0].parent / "config.json" orig_config_path = model_plus.paths[0].parent / "params.json" if hf_config_path.exists(): - params = Params.loadHFTransformerJson(model_plus.model, hf_config_path) + params = Params.load_transformers_config(model_plus.model, hf_config_path) elif orig_config_path.exists(): - params = Params.loadOriginalParamsJson(model_plus.model, orig_config_path) - elif model_plus.format != 'none': + params = Params.load_torch_params(model_plus.model, orig_config_path) + elif model_plus.format != "none": params = Params.guessed(model_plus.model) else: - raise ValueError('Cannot guess params when model format is none') + raise ValueError("Cannot guess params when model format is none") params.path_model = model_plus.paths[0].parent return params -class VocabLoader: - def __init__(self, params: Params, fname_tokenizer: Path) -> None: - try: - from transformers import AutoTokenizer - except ImportError as e: - raise ImportError( - "To use VocabLoader, please install the `transformers` package. " - "You can install it with `pip install transformers`." - ) from e +class BpeVocab: # GPT + def __init__( + self, fname_tokenizer: Path, fname_added_tokens: Optional[Path] + ) -> None: + self.bpe_tokenizer = json.loads( + open(str(fname_tokenizer), encoding="utf-8").read() + ) + added_tokens: dict[str, int] + if fname_added_tokens is not None: + # FIXME: Verify that added tokens here _cannot_ overlap with the main vocab. + added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) + else: + # Fall back to trying to find the added tokens in tokenizer.json + tokenizer_json_file = fname_tokenizer.parent / "tokenizer.json" + if not tokenizer_json_file.is_file(): + added_tokens = {} + else: + tokenizer_json = json.load(open(tokenizer_json_file, encoding="utf-8")) + added_tokens = dict( + (item["content"], item["id"]) + for item in tokenizer_json.get("added_tokens", []) + # Added tokens here can be duplicates of the main vocabulary. + if item["content"] not in self.bpe_tokenizer + ) - try: - self.tokenizer = AutoTokenizer.from_pretrained(str(fname_tokenizer), trust_remote_code=True) - except ValueError: - self.tokenizer = AutoTokenizer.from_pretrained(str(fname_tokenizer), use_fast=False, trust_remote_code=True) + vocab_size: int = len(self.bpe_tokenizer) + expected_ids = list(range(vocab_size, vocab_size + len(added_tokens))) + actual_ids = sorted(added_tokens.values()) + if expected_ids != actual_ids: + expected_end_id = vocab_size + len(actual_ids) - 1 + raise Exception( + f"Expected the {len(actual_ids)} added token ID(s) to be sequential in the range {vocab_size} - {expected_end_id}; got {actual_ids}" + ) - self.added_tokens_dict: OrderedDict[str, int] = OrderedDict() + items = sorted(added_tokens.items(), key=lambda text_idx: text_idx[1]) + self.added_tokens_list = [text for (text, idx) in items] + self.vocab_size_base: int = vocab_size + self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_list) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens - for tok, tokidx in sorted(self.tokenizer.get_added_vocab().items(), key=lambda x: x[1]): - if tokidx >= params.n_vocab or tokidx < self.tokenizer.vocab_size: - continue + def bpe_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + tokenizer = self.bpe_tokenizer + reverse_vocab = {id: encoded_tok for encoded_tok, id in tokenizer.items()} - self.added_tokens_dict[tok] = tokidx + for i, _ in enumerate(tokenizer): + yield reverse_vocab[i], 0.0, gguf.TokenType.NORMAL - self.unk_token_id: int = self.tokenizer.unk_token_id - self.specials: dict[str, int] = { + def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + for text in self.added_tokens_list: + score = -1000.0 + yield text.encode("utf-8"), score, gguf.TokenType.CONTROL + + def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + yield from self.bpe_tokens() + yield from self.added_tokens() + + def __repr__(self) -> str: + return f"" + + +class SentencePieceVocab: # LlaMa + def __init__( + self, fname_tokenizer: Path, fname_added_tokens: Optional[Path] + ) -> None: + self.sentencepiece_tokenizer = SentencePieceProcessor(str(fname_tokenizer)) + added_tokens: dict[str, int] + if fname_added_tokens is not None: + added_tokens = json.load(open(fname_added_tokens, encoding="utf-8")) + else: + added_tokens = {} + + vocab_size: int = self.sentencepiece_tokenizer.vocab_size() + + new_tokens = { + id: piece for piece, id in added_tokens.items() if id >= vocab_size + } + expected_new_ids = list(range(vocab_size, vocab_size + len(new_tokens))) + actual_new_ids = sorted(new_tokens.keys()) + + if expected_new_ids != actual_new_ids: + raise ValueError( + f"Expected new token IDs {expected_new_ids} to be sequential; got {actual_new_ids}" + ) + + # Token pieces that were added to the base vocabulary. + self.added_tokens_list = [new_tokens[id] for id in actual_new_ids] + self.vocab_size_base = vocab_size + self.vocab_size = self.vocab_size_base + len(self.added_tokens_list) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens + + def sentencepiece_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + tokenizer = self.sentencepiece_tokenizer + for i in range(tokenizer.vocab_size()): + piece = tokenizer.id_to_piece(i) + text: bytes = piece.encode("utf-8") + score: float = tokenizer.get_score(i) + + toktype = gguf.TokenType.NORMAL + if tokenizer.is_unknown(i): + toktype = gguf.TokenType.UNKNOWN + if tokenizer.is_control(i): + toktype = gguf.TokenType.CONTROL + + # NOTE: I think added_tokens are user defined. + # ref: https://github.com/google/sentencepiece/blob/master/src/sentencepiece_model.proto + # if tokenizer.is_user_defined(i): toktype = gguf.TokenType.USER_DEFINED + + if tokenizer.is_unused(i): + toktype = gguf.TokenType.UNUSED + if tokenizer.is_byte(i): + toktype = gguf.TokenType.BYTE + + yield text, score, toktype + + def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + for text in self.added_tokens_list: + score = -1000.0 + yield text.encode("utf-8"), score, gguf.TokenType.USER_DEFINED + + def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: + yield from self.sentencepiece_tokens() + yield from self.added_tokens() + + def __repr__(self) -> str: + return f"" + + +class HfVocab: + def __init__( + self, + fname_tokenizer: Path, + fname_added_tokens: Optional[Path] = None, + ) -> None: + print("fname_tokenizer:", fname_tokenizer) + # Allow the tokenizer to default to slow or fast versions. + # Explicitly set tokenizer to use local paths. + self.tokenizer = AutoTokenizer.from_pretrained( + fname_tokenizer, + cache_dir=fname_tokenizer, + local_files_only=True, + ) + + # Initialize lists and dictionaries for added tokens + self.added_tokens_list = [] + self.added_tokens_dict = dict() + self.added_tokens_ids = set() + + # Process added tokens + for tok, tokidx in sorted( + self.tokenizer.get_added_vocab().items(), key=lambda x: x[1] + ): + # Only consider added tokens that are not in the base vocabulary + if tokidx >= self.tokenizer.vocab_size: + self.added_tokens_list.append(tok) + self.added_tokens_dict[tok] = tokidx + self.added_tokens_ids.add(tokidx) + + # Store special tokens and their IDs + self.specials = { tok: self.tokenizer.get_vocab()[tok] for tok in self.tokenizer.all_special_tokens } - self.special_ids: set[int] = set(self.tokenizer.all_special_ids) - self.reverse_vocab = {id: encoded_tok for encoded_tok, id in self.tokenizer.get_vocab().items()} - self.vocab_size_base: int = self.tokenizer.vocab_size - self.vocab_size: int = self.vocab_size_base + len(self.added_tokens_dict) - self.fname_tokenizer: Path = fname_tokenizer + self.special_ids = set(self.tokenizer.all_special_ids) - vocab_file = "tokenizer.model" - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate is not None: - self.spm = SentencePieceProcessor(str(path_candidate)) - print(self.spm.vocab_size(), self.vocab_size_base) - else: - self.spm = None + # Set vocabulary sizes + self.vocab_size_base = self.tokenizer.vocab_size + self.vocab_size = self.vocab_size_base + len(self.added_tokens_list) - def hf_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - added_tokens_ids = set(self.added_tokens_dict.values()) + self.fname_tokenizer = fname_tokenizer + self.fname_added_tokens = fname_added_tokens - for i in range(self.vocab_size_base): - if i in added_tokens_ids: + def hf_tokens(self) -> Iterable[Tuple[bytes, float, gguf.TokenType]]: + reverse_vocab = { + id: encoded_tok for encoded_tok, id in self.tokenizer.get_vocab().items() + } + + for token_id in range(self.vocab_size_base): + # Skip processing added tokens here + if token_id in self.added_tokens_ids: continue - text = self.reverse_vocab[i].encode("utf-8") - yield text, self.get_token_score(i), self.get_token_type(i) + # Convert token text to bytes + token_text = reverse_vocab[token_id].encode("utf-8") - def get_token_type(self, token_id: int) -> gguf.TokenType: - toktype = gguf.TokenType.NORMAL + # Yield token text, score, and type + yield token_text, self.get_token_score(token_id), self.get_token_type( + token_id, self.special_ids # Reuse already stored special IDs + ) - if self.spm is not None and token_id < self.spm.vocab_size(): - if self.spm.is_unknown(token_id): - toktype = gguf.TokenType.UNKNOWN - if self.spm.is_control(token_id): - toktype = gguf.TokenType.CONTROL - if self.spm.is_unused(token_id): - toktype = gguf.TokenType.UNUSED - if self.spm.is_byte(token_id): - toktype = gguf.TokenType.BYTE - else: - token = self.reverse_vocab[token_id] - if token_id == self.unk_token_id: - toktype = gguf.TokenType.UNKNOWN - elif token_id in self.special_ids: - toktype = gguf.TokenType.CONTROL - elif len(token) == 6 and token.startswith("<0x") and token.endswith(">"): - toktype = gguf.TokenType.BYTE - - return toktype + def get_token_type(self, token_id: int, special_ids: set) -> gguf.TokenType: + # Determine token type based on whether it's a special token + return ( + gguf.TokenType.CONTROL if token_id in special_ids else gguf.TokenType.NORMAL + ) def get_token_score(self, token_id: int) -> float: - if self.spm is not None and token_id < self.spm.vocab_size(): - return cast(float, self.spm.get_score(token_id)) - return 0.0 + # Placeholder for actual logic to determine the token's score + # This needs to be implemented based on specific requirements + return -1000.0 # Default score def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: - - for text in self.added_tokens_dict: + for text in self.added_tokens_list: if text in self.specials: - - toktype = self.get_token_type(self.specials[text]) + toktype = self.get_token_type(self.specials[text], self.special_ids) score = self.get_token_score(self.specials[text]) else: @@ -422,45 +594,18 @@ class VocabLoader: yield text.encode("utf-8"), score, toktype - def has_newline_token(self) -> bool: - return '<0x0A>' in self.tokenizer.vocab or '\n' in self.tokenizer.vocab + def has_newline_token(self): + return "<0x0A>" in self.tokenizer.vocab or "\n" in self.tokenizer.vocab def all_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: yield from self.hf_tokens() yield from self.added_tokens() - def get_vocab_type(self) -> str: - path_candidates = [] - vocab_file = "tokenizer.model" - path_candidates.append(vocab_file) - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate is not None: - return "llama" - - vocab_file = "vocab.json" - path_candidates.append(vocab_file) - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate is not None: - return "gpt2" - - vocab_file = "tokenizer.json" - path_candidates.append(vocab_file) - path_candidate = find_vocab_file_path(self.fname_tokenizer, vocab_file) - if path_candidate: - if not self.has_newline_token(): - return "gpt2" - return "llama" - - raise FileNotFoundError( - f"Could not find {path_candidates} in {self.fname_tokenizer} or its parent; " - "if it's in another directory, pass the directory as --vocab-dir" - ) - def __repr__(self) -> str: - return f"" + return f"" -Vocab: TypeAlias = 'VocabLoader' +Vocab: TypeAlias = "BpeVocab | SentencePieceVocab | HfVocab" # @@ -724,13 +869,17 @@ class LazyUnpickler(pickle.Unpickler): CLASSES: dict[tuple[str, str], Any] = { # getattr used here as a workaround for mypy not being smart enough to determine # the staticmethods have a __func__ attribute. - ('torch._tensor', '_rebuild_from_type_v2'): getattr(rebuild_from_type_v2, '__func__'), - ('torch._utils', '_rebuild_tensor_v2'): getattr(lazy_rebuild_tensor_v2, '__func__'), - ('torch', 'BFloat16Storage'): LazyStorageKind(DT_BF16), - ('torch', 'HalfStorage'): LazyStorageKind(DT_F16), - ('torch', 'FloatStorage'): LazyStorageKind(DT_F32), - ('torch', 'IntStorage'): LazyStorageKind(DT_I32), - ('torch', 'Tensor'): LazyTensor, + ("torch._tensor", "_rebuild_from_type_v2"): getattr( + rebuild_from_type_v2, "__func__" + ), + ("torch._utils", "_rebuild_tensor_v2"): getattr( + lazy_rebuild_tensor_v2, "__func__" + ), + ("torch", "BFloat16Storage"): LazyStorageKind(DT_BF16), + ("torch", "HalfStorage"): LazyStorageKind(DT_F16), + ("torch", "FloatStorage"): LazyStorageKind(DT_F32), + ("torch", "IntStorage"): LazyStorageKind(DT_I32), + ("torch", "Tensor"): LazyTensor, } def find_class(self, module: str, name: str) -> Any: @@ -839,32 +988,43 @@ def bounded_parallel_map(func: Callable[[In], Out], iterable: Iterable[In], conc def check_vocab_size(params: Params, vocab: Vocab, pad_vocab: bool = False) -> None: - if params.n_vocab != vocab.vocab_size: - if params.n_vocab == vocab.vocab_size: - print("Ignoring added_tokens.json since model matches vocab size without it.") - vocab.added_tokens_dict = OrderedDict() - vocab.vocab_size = vocab.vocab_size - return + # Handle special case where the model's vocab size is not set + if params.n_vocab == -1: + raise ValueError( + f"The model's vocab size is set to -1 in params.json. Please update it manually. Maybe {vocab.vocab_size}?" + ) - if pad_vocab and params.n_vocab > vocab.vocab_size: - pad_count = params.n_vocab - vocab.vocab_size - print(f'Padding vocab with {pad_count} token(s) - through ') - for i in range(1, (params.n_vocab - vocab.vocab_size) + 1): - vocab.added_tokens_dict[f''] = -1 - vocab.vocab_size = params.n_vocab - return - msg = f"Vocab size mismatch (model has {params.n_vocab}, but {vocab.fname_tokenizer}" - msg += f" has {vocab.vocab_size})." - if vocab.vocab_size < params.n_vocab < vocab.vocab_size + 20: - msg += f" Most likely you are missing added_tokens.json (should be in {vocab.fname_tokenizer.parent})." - if vocab.vocab_size < params.n_vocab: - msg += " Possibly try using the --padvocab option." - raise Exception(msg) + # Check for a vocab size mismatch + if params.n_vocab == vocab.vocab_size: + print("Ignoring added_tokens.json since model matches vocab size without it.") + return + + if pad_vocab and params.n_vocab > vocab.vocab_size: + pad_count = params.n_vocab - vocab.vocab_size + print( + f"Padding vocab with {pad_count} token(s) - through " + ) + for i in range(1, pad_count + 1): + vocab.added_tokens_dict[f""] = -1 + vocab.vocab_size = params.n_vocab + return + + msg = f"Vocab size mismatch (model has {params.n_vocab}, but {vocab.fname_tokenizer} has {vocab.vocab_size})." + if vocab.vocab_size < params.n_vocab < vocab.vocab_size + 20: + msg += f" Most likely you are missing added_tokens.json (should be in {vocab.fname_tokenizer.parent})." + if vocab.vocab_size < params.n_vocab: + msg += " Add the --pad-vocab option and try again." + + raise Exception(msg) class OutputFile: - def __init__(self, fname_out: Path, endianess:gguf.GGUFEndian = gguf.GGUFEndian.LITTLE) -> None: - self.gguf = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[ARCH], endianess=endianess) + def __init__( + self, fname_out: Path, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE + ) -> None: + self.gguf = gguf.GGUFWriter( + fname_out, gguf.MODEL_ARCH_NAMES[ARCH], endianess=endianess + ) def add_meta_arch(self, params: Params) -> None: name = "LLaMA" @@ -873,16 +1033,21 @@ class OutputFile: if params.n_ctx == 4096: name = "LLaMA v2" elif params.path_model is not None: - name = str(params.path_model.parent).split('/')[-1] + name = str(params.path_model.parent).split("/")[-1] - self.gguf.add_name (name) - self.gguf.add_context_length (params.n_ctx) - self.gguf.add_embedding_length (params.n_embd) - self.gguf.add_block_count (params.n_layer) - self.gguf.add_feed_forward_length (params.n_ff) + self.gguf.add_name(name) + self.gguf.add_context_length(params.n_ctx) + self.gguf.add_embedding_length(params.n_embd) + self.gguf.add_block_count(params.n_layer) + self.gguf.add_feed_forward_length(params.n_ff) self.gguf.add_rope_dimension_count(params.n_embd // params.n_head) - self.gguf.add_head_count (params.n_head) - self.gguf.add_head_count_kv (params.n_head_kv) + self.gguf.add_head_count(params.n_head) + self.gguf.add_head_count_kv(params.n_head_kv) + + if params.f_norm_eps is None: + raise ValueError("f_norm_eps is None") + + self.gguf.add_layer_norm_rms_eps(params.f_norm_eps) if params.n_experts: self.gguf.add_expert_count(params.n_experts) @@ -890,11 +1055,6 @@ class OutputFile: if params.n_experts_used: self.gguf.add_expert_used_count(params.n_experts_used) - if params.f_norm_eps: - self.gguf.add_layer_norm_rms_eps(params.f_norm_eps) - else: - raise ValueError('f_norm_eps is None') - if params.f_rope_freq_base is not None: self.gguf.add_rope_freq_base(params.f_rope_freq_base) @@ -912,18 +1072,44 @@ class OutputFile: if params.ftype is not None: self.gguf.add_file_type(params.ftype) - def add_meta_vocab(self, vocab: Vocab) -> None: + def handle_tokenizer_model(self, vocab: Vocab) -> str: + # Map the vocab types to the supported tokenizer models + tokenizer_model = { + SentencePieceVocab: "llama", + HfVocab: "llama", + BpeVocab: "gpt2", + }.get(type(vocab)) + + # Block if vocab type is not predefined + if tokenizer_model is None: + raise ValueError("Unknown vocab type: Not supported") + + return tokenizer_model + + def extract_vocabulary_from_model(self, vocab: Vocab) -> Tuple[list, list, list]: tokens = [] scores = [] toktypes = [] + # NOTE: `all_tokens` returns the base vocabulary and added tokens for text, score, toktype in vocab.all_tokens(): tokens.append(text) scores.append(score) toktypes.append(toktype) - vocab_type = vocab.get_vocab_type() - self.gguf.add_tokenizer_model(vocab_type) + return tokens, scores, toktypes + + def add_meta_vocab(self, vocab: Vocab) -> None: + # Handle the tokenizer model + tokenizer_model = self.handle_tokenizer_model(vocab) + + # Ensure that tokenizer_model is added to the GGUF model + self.gguf.add_tokenizer_model(tokenizer_model) + + # Extract model vocabulary for model conversion + tokens, scores, toktypes = self.extract_vocabulary_from_model(vocab) + + # Add extracted token information for model conversion self.gguf.add_token_list(tokens) self.gguf.add_token_scores(scores) self.gguf.add_token_types(toktypes) @@ -933,10 +1119,14 @@ class OutputFile: def add_tensor_info(self, name: str, tensor: LazyTensor) -> None: n_elements = int(np.prod(tensor.shape)) - raw_dtype = getattr(tensor.data_type, 'ggml_type', None) - data_type = getattr(tensor.data_type, 'quantized_type', None) or tensor.data_type.dtype + raw_dtype = getattr(tensor.data_type, "ggml_type", None) + data_type = ( + getattr(tensor.data_type, "quantized_type", None) or tensor.data_type.dtype + ) data_nbytes = tensor.data_type.elements_to_bytes(n_elements) - self.gguf.add_tensor_info(name, tensor.shape, data_type, data_nbytes, raw_dtype = raw_dtype) + self.gguf.add_tensor_info( + name, tensor.shape, data_type, data_nbytes, raw_dtype=raw_dtype + ) def write_meta(self) -> None: self.gguf.write_header_to_file() @@ -950,11 +1140,14 @@ class OutputFile: @staticmethod def write_vocab_only( - fname_out: Path, params: Params, vocab: Vocab, svocab: gguf.SpecialVocab, + fname_out: Path, + params: Params, + vocab: Vocab, + svocab: gguf.SpecialVocab, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE, pad_vocab: bool = False, ) -> None: - check_vocab_size(params, vocab, pad_vocab = pad_vocab) + check_vocab_size(params, vocab, pad_vocab=pad_vocab) of = OutputFile(fname_out, endianess=endianess) @@ -982,12 +1175,17 @@ class OutputFile: @staticmethod def write_all( - fname_out: Path, ftype: GGMLFileType, params: Params, model: LazyModel, vocab: Vocab, svocab: gguf.SpecialVocab, + fname_out: Path, + ftype: GGMLFileType, + params: Params, + model: LazyModel, + vocab: Vocab, + svocab: gguf.SpecialVocab, concurrency: int = DEFAULT_CONCURRENCY, endianess: gguf.GGUFEndian = gguf.GGUFEndian.LITTLE, pad_vocab: bool = False, ) -> None: - check_vocab_size(params, vocab, pad_vocab = pad_vocab) + check_vocab_size(params, vocab, pad_vocab=pad_vocab) of = OutputFile(fname_out, endianess=endianess) @@ -1004,18 +1202,30 @@ class OutputFile: of.write_tensor_info() # tensor data - ndarrays_inner = bounded_parallel_map(OutputFile.do_item, model.items(), concurrency = concurrency) + ndarrays_inner = bounded_parallel_map( + OutputFile.do_item, model.items(), concurrency=concurrency + ) if ftype == GGMLFileType.MostlyQ8_0: - ndarrays = bounded_parallel_map(OutputFile.maybe_do_quantize, ndarrays_inner, concurrency = concurrency, max_workers = concurrency, use_processpool_executor = True) + ndarrays = bounded_parallel_map( + OutputFile.maybe_do_quantize, + ndarrays_inner, + concurrency=concurrency, + max_workers=concurrency, + use_processpool_executor=True, + ) else: ndarrays = map(OutputFile.maybe_do_quantize, ndarrays_inner) start = time.time() - for i, ((name, lazy_tensor), ndarray) in enumerate(zip(model.items(), ndarrays)): + for i, ((name, lazy_tensor), ndarray) in enumerate( + zip(model.items(), ndarrays) + ): elapsed = time.time() - start - size = ' x '.join(f"{dim:6d}" for dim in lazy_tensor.shape) + size = " x ".join(f"{dim:6d}" for dim in lazy_tensor.shape) padi = len(str(len(model))) - print(f"[{i+1:{padi}d}/{len(model)}] Writing tensor {name:38s} | size {size:16} | type {lazy_tensor.data_type.name:4} | T+{int(elapsed):4}") + print( + f"[{i+1:{padi}d}/{len(model)}] Writing tensor {name:38s} | size {size:16} | type {lazy_tensor.data_type.name:4} | T+{int(elapsed):4}" + ) of.gguf.write_tensor_data(ndarray) of.close() @@ -1145,30 +1355,95 @@ def load_some_model(path: Path) -> ModelPlus: return model_plus -def find_vocab_file_path(path: Path, vocab_file: str) -> Optional[Path]: - path2 = path / vocab_file - # Use `.parent` instead of /.. to handle the symlink case better. - path3 = path.parent / vocab_file +class VocabFactory: + def __init__(self, path: Path): + self.path = path + self.files = { + "tokenizer.model": None, + "vocab.json": None, + "tokenizer.json": None, + } + self._detect_files() - if path2.exists(): - return path2 - if path3.exists(): - return path3 + def _detect_files(self): + for file in self.files.keys(): + file_path = self.path / file + parent_file_path = self.path.parent / file + if file_path.exists(): + self.files[file] = file_path + elif parent_file_path.exists(): + self.files[file] = parent_file_path - return None + def _select_file(self, vocabtype: Optional[str]) -> Path: + if vocabtype in ["spm", "bpe"]: + # For SentencePiece and BPE, return specific files as before + file_key = "tokenizer.model" if vocabtype == "spm" else "vocab.json" + if self.files[file_key]: + return self.files[file_key] + else: + raise FileNotFoundError(f"{vocabtype} {file_key} not found.") + elif vocabtype == "hfft": + # For Hugging Face Fast Tokenizer, return the directory path instead of a specific file + return self.path + else: + raise ValueError(f"Unsupported vocabulary type {vocabtype}") + + def _create_special_vocab( + self, + vocab: Vocab, + vocabtype: str, + model_parent_path: Path, + ) -> gguf.SpecialVocab: + load_merges = vocabtype == "bpe" + n_vocab = vocab.vocab_size if hasattr(vocab, "vocab_size") else None + return gguf.SpecialVocab( + model_parent_path, + load_merges=load_merges, + special_token_types=None, # Predetermined or passed as a parameter + n_vocab=n_vocab, + ) + + def load_vocab( + self, vocabtype: str, model_parent_path: Path + ) -> Tuple[Vocab, gguf.SpecialVocab]: + path = self._select_file(vocabtype) + print(f"Loading vocab file '{path}', type '{vocabtype}'") + + added_tokens_path = path.parent / "added_tokens.json" + if vocabtype == "bpe": + vocab = BpeVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + elif vocabtype == "spm": + vocab = SentencePieceVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + elif vocabtype == "hfft": + vocab = HfVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + else: + raise ValueError(f"Unsupported vocabulary type {vocabtype}") + special_vocab = self._create_special_vocab( + vocab, + vocabtype, + model_parent_path, + ) + return vocab, special_vocab -def default_outfile(model_paths: list[Path], file_type: GGMLFileType) -> Path: +def default_output_file(model_paths: list[Path], file_type: GGMLFileType) -> Path: namestr = { - GGMLFileType.AllF32: "f32", + GGMLFileType.AllF32: "f32", GGMLFileType.MostlyF16: "f16", - GGMLFileType.MostlyQ8_0:"q8_0", + GGMLFileType.MostlyQ8_0: "q8_0", }[file_type] ret = model_paths[0].parent / f"ggml-model-{namestr}.gguf" if ret in model_paths: sys.stderr.write( f"Error: Default output path ({ret}) would overwrite the input. " - "Please explicitly specify a path using --outfile.\n") + "Please explicitly specify a path using --outfile.\n" + ) sys.exit(1) return ret @@ -1178,32 +1453,111 @@ def do_dump_model(model_plus: ModelPlus) -> None: print(f"model_plus.format = {model_plus.format!r}") print(f"model_plus.vocab = {model_plus.vocab!r}") for name, lazy_tensor in model_plus.model.items(): - print(f"{name}: shape={lazy_tensor.shape} type={lazy_tensor.data_type}; {lazy_tensor.description}") + print( + f"{name}: shape={lazy_tensor.shape} type={lazy_tensor.data_type}; {lazy_tensor.description}" + ) -def main(args_in: list[str] | None = None) -> None: +def get_argument_parser() -> ArgumentParser: output_choices = ["f32", "f16"] if np.uint32(1) == np.uint32(1).newbyteorder("<"): # We currently only support Q8_0 output on little endian systems. output_choices.append("q8_0") - parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") - parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) - parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") - parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") - parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") - parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") - parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default = DEFAULT_CONCURRENCY) - parser.add_argument("--bigendian", action="store_true", help="model is executed on big endian machine") - parser.add_argument("--padvocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") - args = parser.parse_args(args_in) + parser = argparse.ArgumentParser( + description="Convert a LLaMa model to a GGML compatible file" + ) + + parser.add_argument( + "model", + type=Path, + help="Directory containing the model file or the model file itself (*.pth, *.pt, *.bin)", + ) + + parser.add_argument( + "--awq-path", + type=Path, + help="Path to the Activation-aware Weight Quantization cache file", + default=None, + ) + + parser.add_argument( + "--dump", + action="store_true", + help="Display the model content without converting it", + ) + + parser.add_argument( + "--dump-single", + action="store_true", + help="Display the content of a single model file without conversion", + ) + + parser.add_argument( + "--vocab-only", + action="store_true", + help="Extract and output only the vocabulary", + ) + + parser.add_argument( + "--outtype", + choices=output_choices, + help="Output format - note: q8_0 may be very slow (default: f16 or f32 based on input)", + ) + + parser.add_argument( + "--vocab-dir", + type=Path, + help="Directory containing the tokenizer.model, if separate from the model file", + ) + + parser.add_argument( + "--vocab-type", + choices=["spm", "bpe", "hfft"], # hfft: Hugging Face Fast Tokenizer + default="spm", + help="The vocabulary format used to define the tokenizer model (default: spm)", + ) + + parser.add_argument( + "--pad-vocab", + action="store_true", + help="Add padding tokens when the model's vocabulary size exceeds the tokenizer metadata", + ) + + parser.add_argument( + "--outfile", + type=Path, + help="Specify the path for the output file (default is based on input)", + ) + + parser.add_argument( + "--ctx", type=int, help="Model training context (default is based on input)" + ) + + parser.add_argument( + "--concurrency", + type=int, + help=f"Concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", + default=DEFAULT_CONCURRENCY, + ) + + parser.add_argument( + "--big-endian", + action="store_true", + help="Indicate that the model is executed on a big-endian machine", + ) + + return parser + + +def main(argv: Optional[list[str]] = None) -> None: + parser = get_argument_parser() + args = parser.parse_args(argv) + if args.awq_path: - sys.path.insert(1, str(Path(__file__).parent / 'awq-py')) + sys.path.insert(1, str(Path(__file__).resolve().parent / "awq-py")) from awq.apply_awq import add_scale_weights + tmp_model_path = args.model / "weighted_model" if tmp_model_path.is_dir(): print(f"{tmp_model_path} exists as a weighted model.") @@ -1222,22 +1576,27 @@ def main(args_in: list[str] | None = None) -> None: if not args.vocab_only: model_plus = load_some_model(args.model) else: - model_plus = ModelPlus(model = {}, paths = [args.model / 'dummy'], format = 'none', vocab = None) + model_plus = ModelPlus( + model={}, paths=[args.model / "dummy"], format="none", vocab=None + ) if args.dump: do_dump_model(model_plus) return + endianess = gguf.GGUFEndian.LITTLE - if args.bigendian: + if args.big_endian: endianess = gguf.GGUFEndian.BIG params = Params.load(model_plus) if params.n_ctx == -1: if args.ctx is None: - raise Exception("The model doesn't have a context size, and you didn't specify one with --ctx\n" - "Please specify one with --ctx:\n" - " - LLaMA v1: --ctx 2048\n" - " - LLaMA v2: --ctx 4096\n") + raise Exception( + "The model doesn't have a context size, and you didn't specify one with --ctx\n" + "Please specify one with --ctx:\n" + " - LLaMA v1: --ctx 2048\n" + " - LLaMA v2: --ctx 4096\n" + ) params.n_ctx = args.ctx if args.outtype: @@ -1249,47 +1608,51 @@ def main(args_in: list[str] | None = None) -> None: print(f"params = {params}") - vocab: Vocab + model_parent_path = model_plus.paths[0].parent + vocab_path = Path(args.vocab_dir or args.model or model_parent_path) + vocab_factory = VocabFactory(vocab_path) + vocab, special_vocab = vocab_factory.load_vocab(args.vocab_type, model_parent_path) + if args.vocab_only: if not args.outfile: raise ValueError("need --outfile if using --vocab-only") - # FIXME: Try to respect vocab_dir somehow? - vocab = VocabLoader(params, args.vocab_dir or args.model) - special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, - load_merges = True, - n_vocab = vocab.vocab_size) outfile = args.outfile - OutputFile.write_vocab_only(outfile, params, vocab, special_vocab, - endianess = endianess, pad_vocab = args.padvocab) + OutputFile.write_vocab_only( + outfile, + params, + vocab, + special_vocab, + endianess=endianess, + pad_vocab=args.pad_vocab, + ) print(f"Wrote {outfile}") return if model_plus.vocab is not None and args.vocab_dir is None: vocab = model_plus.vocab - else: - vocab_dir = args.vocab_dir if args.vocab_dir else model_plus.paths[0].parent - vocab = VocabLoader(params, vocab_dir) - # FIXME: Try to respect vocab_dir somehow? - print(f"Vocab info: {vocab}") - special_vocab = gguf.SpecialVocab(model_plus.paths[0].parent, - load_merges = True, - n_vocab = vocab.vocab_size) - - print(f"Special vocab info: {special_vocab}") - model = model_plus.model - model = convert_model_names(model, params) - ftype = pick_output_type(model, args.outtype) - model = convert_to_output_type(model, ftype) - outfile = args.outfile or default_outfile(model_plus.paths, ftype) + model = model_plus.model + model = convert_model_names(model, params) + ftype = pick_output_type(model, args.outtype) + model = convert_to_output_type(model, ftype) + outfile = args.outfile or default_output_file(model_plus.paths, ftype) params.ftype = ftype print(f"Writing {outfile}, format {ftype}") - OutputFile.write_all(outfile, ftype, params, model, vocab, special_vocab, - concurrency = args.concurrency, endianess = endianess, pad_vocab = args.padvocab) + OutputFile.write_all( + outfile, + ftype, + params, + model, + vocab, + special_vocab, + concurrency=args.concurrency, + endianess=endianess, + pad_vocab=args.pad_vocab, + ) print(f"Wrote {outfile}") -if __name__ == '__main__': - main() +if __name__ == "__main__": + main(sys.argv[1:]) # Exclude the first element (script name) from sys.argv From 4f56458d34cb13dcbf69aca650e9bf77d5497e6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 10 Jan 2024 01:04:33 +0100 Subject: [PATCH 405/859] Python script to compare commits with llama-bench (#4844) --- scripts/compare-llama-bench.py | 356 +++++++++++++++++++++++++++++++++ 1 file changed, 356 insertions(+) create mode 100755 scripts/compare-llama-bench.py diff --git a/scripts/compare-llama-bench.py b/scripts/compare-llama-bench.py new file mode 100755 index 000000000..bc1714487 --- /dev/null +++ b/scripts/compare-llama-bench.py @@ -0,0 +1,356 @@ +#!/usr/bin/env python3 + +import argparse +import heapq +import sys +import os +from glob import glob +import sqlite3 + +try: + import git + from tabulate import tabulate +except ImportError: + print("ERROR: the following Python libraries are required: GitPython, tabulate.") + sys.exit(1) + +# Properties by which to differentiate results per commit: +KEY_PROPERTIES = [ + "cuda", "opencl", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", + "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", + "n_gpu_layers", "main_gpu", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" +] + +# Properties that are boolean and are converted to Yes/No for the table: +BOOL_PROPERTIES = ["cuda", "opencl", "metal", "gpu_blas", "blas"] + +# Header names for the table: +PRETTY_NAMES = { + "cuda": "CUDA", "opencl": "OpenCL", "metal": "Metal", "gpu_blas": "GPU BLAS", "blas": "BLAS", + "cpu_info": "CPU", "gpu_info": "GPU", "model_filename": "File", "model_type": "Model", + "model_size": "Model Size [GiB]", "model_n_params": "Num. of Parameters", + "n_batch": "Batch size", "n_threads": "Threads", "type_k": "K type", "type_v": "V type", + "n_gpu_layers": "GPU layers", "main_gpu": "Main GPU", "no_kv_offload": "NKVO", + "mul_mat_q": "MMQ", "tensor_split": "Tensor split" +} + +DEFAULT_SHOW = ["model_type"] # Always show these properties by default. +DEFAULT_HIDE = ["model_filename"] # Always hide these properties by default. +GPU_NAME_STRIP = ["NVIDIA GeForce ", "Tesla ", "AMD Radeon "] # Strip prefixes for smaller tables. + +DESCRIPTION = """Creates tables from llama-bench data written to an SQLite database. Example usage (Linux): + +$ git checkout master +$ make clean && make llama-bench +$ ./llama-bench -o sql | sqlite3 llama-bench.sqlite +$ git checkout some_branch +$ make clean && make llama-bench +$ ./llama-bench -o sql | sqlite3 llama-bench.sqlite +$ ./scripts/compare-llama-bench.py + +Performance numbers from multiple runs per commit are averaged WITHOUT being weighted by the --repetitions parameter of llama-bench. +""" + +parser = argparse.ArgumentParser( + description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) +help_b = ( + "The baseline commit to compare performance to. " + "Accepts either a branch name, tag name, or commit hash. " + "Defaults to latest master commit with data." +) +parser.add_argument("-b", "--baseline", help=help_b) +help_c = ( + "The commit whose performance is to be compared to the baseline. " + "Accepts either a branch name, tag name, or commit hash. " + "Defaults to the non-master commit for which llama-bench was run most recently." +) +parser.add_argument("-c", "--compare", help=help_c) +help_i = ( + "Input SQLite file for comparing commits. " + "Defaults to 'llama-bench.sqlite' in the current working directory. " + "If no such file is found and there is exactly one .sqlite file in the current directory, " + "that file is instead used as input." +) +parser.add_argument("-i", "--input", help=help_i) +help_o = ( + "Output format for the table. " + "Defaults to 'pipe' (GitHub compatible). " + "Also supports e.g. 'latex' or 'mediawiki'. " + "See tabulate documentation for full list." +) +parser.add_argument("-o", "--output", help=help_o, default="pipe") +help_s = ( + "Columns to add to the table. " + "Accepts a comma-separated list of values. " + f"Legal values: {', '.join(KEY_PROPERTIES[:-2])}. " + "Defaults to model name (model_type) and CPU and/or GPU name (cpu_info, gpu_info) " + "plus any column where not all data points are the same. " + "If the columns are manually specified, then the results for each unique combination of the " + "specified values are averaged WITHOUT weighing by the --repetitions parameter of llama-bench." +) +parser.add_argument("-s", "--show", help=help_s) + +known_args, unknown_args = parser.parse_known_args() + +if unknown_args: + print(f"ERROR: Received unknown args: {unknown_args}.") + print() + parser.print_help() + sys.exit(1) + +input_file = known_args.input +if input_file is None and os.path.exists("./llama-bench.sqlite"): + input_file = "llama-bench.sqlite" +if input_file is None: + sqlite_files = glob("*.sqlite") + if len(sqlite_files) == 1: + input_file = sqlite_files[0] + +if input_file is None: + print("ERROR: Cannot find a suitable input file, please provide one.") + print() + parser.print_help() + sys.exit(1) + +connection = sqlite3.connect(input_file) +cursor = connection.cursor() +builds = cursor.execute("SELECT DISTINCT build_commit FROM test;").fetchall() + +try: + repo = git.Repo(".", search_parent_directories=True) +except git.exc.InvalidGitRepositoryError: + repo = None + + +def find_parent_in_data(commit): + """Helper function to find the most recent parent measured in number of commits for which there is data.""" + heap = [(0, commit)] + seen_hexsha8 = set() + while heap: + depth, current_commit = heapq.heappop(heap) + current_hexsha8 = commit.hexsha[:8] + if (current_hexsha8,) in builds: + return current_hexsha8 + for parent in commit.parents: + parent_hexsha8 = parent.hexsha[:8] + if parent_hexsha8 not in seen_hexsha8: + seen_hexsha8.add(parent_hexsha8) + heapq.heappush(heap, (depth + 1, parent)) + return None + + +def get_all_parent_hexsha8s(commit): + """Helper function to recursively get hexsha8 values for all parents of a commit.""" + unvisited = [commit] + visited = [] + + while unvisited: + current_commit = unvisited.pop(0) + visited.append(current_commit.hexsha[:8]) + for parent in current_commit.parents: + if parent.hexsha[:8] not in visited: + unvisited.append(parent) + + return visited + + +def get_commit_name(hexsha8): + """Helper function to find a human-readable name for a commit if possible.""" + if repo is None: + return hexsha8 + for h in repo.heads: + if h.commit.hexsha[:8] == hexsha8: + return h.name + for t in repo.tags: + if t.commit.hexsha[:8] == hexsha8: + return t.name + return hexsha8 + + +def get_commit_hexsha8(name): + """Helper function to search for a commit given a human-readable name.""" + if repo is None: + return None + for h in repo.heads: + if h.name == name: + return h.commit.hexsha[:8] + for t in repo.tags: + if t.name == name: + return t.commit.hexsha[:8] + return None + + +hexsha8_baseline = name_baseline = None + +# If the user specified a baseline, try to find a commit for it: +if known_args.baseline is not None: + if (known_args.baseline,) in builds: + hexsha8_baseline = known_args.baseline + if hexsha8_baseline is None: + hexsha8_baseline = get_commit_hexsha8(known_args.baseline) + name_baseline = known_args.baseline + if hexsha8_baseline is None: + print(f"ERROR: cannot find data for baseline={known_args.baseline}.") + sys.exit(1) +# Otherwise, search for the most recent parent of master for which there is data: +elif repo is not None: + hexsha8_baseline = find_parent_in_data(repo.heads.master.commit) + + if hexsha8_baseline is None: + print("ERROR: No baseline was provided and did not find data for any master branch commits.") + print() + parser.print_help() + sys.exit(1) +else: + print( + "ERROR: No baseline was provided and the current working directory " + "is not part of a git repository from which a baseline could be inferred." + ) + print() + parser.print_help() + sys.exit(1) + + +name_baseline = get_commit_name(hexsha8_baseline) + +hexsha8_compare = name_compare = None + +# If the user has specified a compare value, try to find a corresponding commit: +if known_args.compare is not None: + if (known_args.compare,) in builds: + hexsha8_compare = known_args.compare + if hexsha8_compare is None: + hexsha8_compare = get_commit_hexsha8(known_args.compare) + name_compare = known_args.compare + if hexsha8_compare is None: + print(f"ERROR: cannot find data for baseline={known_args.compare}.") + sys.exit(1) +# Otherwise, search for the commit for llama-bench was most recently run +# and that is not a parent of master: +elif repo is not None: + hexsha8s_master = get_all_parent_hexsha8s(repo.heads.master.commit) + builds_timestamp = cursor.execute( + "SELECT build_commit, test_time FROM test ORDER BY test_time;").fetchall() + for (hexsha8, _) in reversed(builds_timestamp): + if hexsha8 not in hexsha8s_master: + hexsha8_compare = hexsha8 + break + + if hexsha8_compare is None: + print("ERROR: No compare target was provided and did not find data for any non-master commits.") + print() + parser.print_help() + sys.exit(1) +else: + print( + "ERROR: No compare target was provided and the current working directory " + "is not part of a git repository from which a compare target could be inferred." + ) + print() + parser.print_help() + sys.exit(1) + +name_compare = get_commit_name(hexsha8_compare) + + +def get_rows(properties): + """ + Helper function that gets table rows for some list of properties. + Rows are created by combining those where all provided properties are equal. + The resulting rows are then grouped by the provided properties and the t/s values are averaged. + The returned rows are unique in terms of property combinations. + """ + select_string = ", ".join( + [f"tb.{p}" for p in properties] + ["tb.n_prompt", "tb.n_gen", "AVG(tb.avg_ts)", "AVG(tc.avg_ts)"]) + equal_string = " AND ".join( + [f"tb.{p} = tc.{p}" for p in KEY_PROPERTIES] + [ + f"tb.build_commit = '{hexsha8_baseline}'", f"tc.build_commit = '{hexsha8_compare}'"] + ) + group_order_string = ", ".join([f"tb.{p}" for p in properties] + ["tb.n_gen", "tb.n_prompt"]) + query = (f"SELECT {select_string} FROM test tb JOIN test tc ON {equal_string} " + f"GROUP BY {group_order_string} ORDER BY {group_order_string};") + return cursor.execute(query).fetchall() + + +# If the user provided columns to group the results by, use them: +if known_args.show is not None: + show = known_args.show.split(",") + unknown_cols = [] + for prop in show: + if prop not in KEY_PROPERTIES[:-2]: # Last two values are n_prompt, n_gen. + unknown_cols.append(prop) + if unknown_cols: + print(f"ERROR: Unknown values for --show: {', '.join(unknown_cols)}") + print() + parser.print_usage() + sys.exit(1) + rows_show = get_rows(show) +# Otherwise, select those columns where the values are not all the same: +else: + rows_full = get_rows(KEY_PROPERTIES) + properties_different = [] + for i, kp_i in enumerate(KEY_PROPERTIES): + if kp_i in DEFAULT_SHOW or kp_i == "n_prompt" or kp_i == "n_gen": + continue + for row_full in rows_full: + if row_full[i] != rows_full[0][i]: + properties_different.append(kp_i) + break + + show = [] + # Show CPU and/or GPU by default even if the hardware for all results is the same: + if "gpu_blas" not in properties_different and "n_gpu_layers" not in properties_different: + gpu_blas = bool(rows_full[0][KEY_PROPERTIES.index("gpu_blas")]) + ngl = int(rows_full[0][KEY_PROPERTIES.index("n_gpu_layers")]) + + if not gpu_blas or ngl != 99 and "cpu_info" not in properties_different: + show.append("cpu_info") + if gpu_blas and "gpu_info" not in properties_different: + show.append("gpu_info") + + show += DEFAULT_SHOW + show += properties_different + for prop in DEFAULT_HIDE: + try: + show.remove(prop) + except ValueError: + pass + rows_show = get_rows(show) + +table = [] +for row in rows_show: + n_prompt = int(row[-4]) + n_gen = int(row[-3]) + assert n_prompt == 0 or n_gen == 0 + test_name = f"tg{n_gen}" if n_prompt == 0 else f"pp{n_prompt}" + # Regular columns test name avg t/s values Speedup + # VVVVVVVVVVVVV VVVVVVVVV VVVVVVVVVVVVVV VVVVVVV + table.append(list(row[:-4]) + [test_name] + list(row[-2:]) + [float(row[-1]) / float(row[-2])]) + +# Some a-posteriori fixes to make the table contents prettier: +for bool_property in BOOL_PROPERTIES: + if bool_property in show: + ip = show.index(bool_property) + for row_table in table: + row_table[ip] = "Yes" if int(row_table[ip]) == 1 else "No" + +if "model_size" in show: + ip = show.index("model_size") + for row_table in table: + row_table[ip] = float(row_table[ip]) / 1024 ** 3 + +if "gpu_info" in show: + ip = show.index("gpu_info") + for gns in GPU_NAME_STRIP: + for row_table in table: + row_table[ip] = row_table[ip].replace(gns, "") + +headers = [PRETTY_NAMES[p] for p in show] +headers += ["Test", f"t/s {name_baseline}", f"t/s {name_compare}", "Speedup"] + +print(tabulate( + table, + headers=headers, + floatfmt=".2f", + tablefmt=known_args.output +)) From d34633d8db6c2e400355de4862cd699154ecc73f Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Wed, 10 Jan 2024 14:37:09 +0100 Subject: [PATCH 406/859] clip : support more quantization types (#4846) Uses ggml functions instead of hardcoded names and adds support to quantize into the modern Q-K variants. This is just the bare minimum to get k-types working - a more refined choice of types would be needed to get best quality on low quantizations. I ran a few tests, it doesn't break anything I could notice and a Q6_K ViT works almost as well as Q8_0 but 3 times the inference speed. --- examples/llava/clip.cpp | 62 ++++++++++++++++------------------------- 1 file changed, 24 insertions(+), 38 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index cfb79e789..2ae8853d3 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -126,24 +126,7 @@ static struct ggml_tensor * get_tensor(struct ggml_context * ctx, const std::str } static std::string get_ftype(int ftype) { - switch (ftype) { - case 0: - return "f32"; - case 1: - return "f16"; - case 2: - return "q4_0"; - case 3: - return "q4_1"; - case 6: - return "q5_0"; - case 7: - return "q5_1"; - case 8: - return "q8_0"; - default: - throw std::runtime_error(format("%s: Unrecognized file type: %d\n", __func__, ftype)); - } + return ggml_type_name(static_cast(ftype)); } // @@ -533,6 +516,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { buffer_size += n_tensors * 128 /* CLIP PADDING */; clip_ctx * new_clip = new clip_ctx; + #ifdef GGML_USE_CUBLAS new_clip->backend = ggml_backend_cuda_init(0); printf("%s: CLIP using CUDA backend\n", __func__); @@ -543,6 +527,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: CLIP using Metal backend\n", __func__); #endif + if (!new_clip->backend) { new_clip->backend = ggml_backend_cpu_init(); printf("%s: CLIP using CPU backend\n", __func__); @@ -931,26 +916,8 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i ggml_type type = GGML_TYPE_Q4_1; - switch (itype) { - case 2: - type = GGML_TYPE_Q4_0; - break; - case 3: - type = GGML_TYPE_Q4_1; - break; - case 6: - type = GGML_TYPE_Q5_0; - break; - case 7: - type = GGML_TYPE_Q5_1; - break; - case 8: - type = GGML_TYPE_Q8_0; - break; - default: - fprintf(stderr, "%s: invalid quantization type %d\n", __func__, itype); - return false; - }; + assert(itype < GGML_TYPE_COUNT); + type = static_cast(itype); auto * ctx_clip = clip_model_load(fname_inp, 2); @@ -1010,6 +977,10 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i if (quantize) { new_type = type; + if (new_type >= GGML_TYPE_Q2_K && name.find("embd") != std::string::npos) { + new_type = GGML_TYPE_Q8_0; // ggml_get_rows needs non K type + // fprintf(stderr, "%s: quantizing %s to %s\n", __func__, name.c_str(), ggml_type_name(new_type)); + } const size_t n_elms = ggml_nelements(cur); float * f32_data; @@ -1054,6 +1025,21 @@ bool clip_model_quantize(const char * fname_inp, const char * fname_out, const i case GGML_TYPE_Q8_0: { new_size = ggml_quantize_q8_0(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); } break; + case GGML_TYPE_Q2_K: { + new_size = ggml_quantize_q2_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q3_K: { + new_size = ggml_quantize_q3_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q4_K: { + new_size = ggml_quantize_q4_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q5_K: { + new_size = ggml_quantize_q5_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; + case GGML_TYPE_Q6_K: { + new_size = ggml_quantize_q6_K(f32_data, new_data, n_elms, cur->ne[0], hist_cur.data()); + } break; default: { fprintf(stderr, "%s: unsupported quantization type %d\n", __func__, new_type); return false; From 329ff615699d32f596d4ebf8baba654c30064e0d Mon Sep 17 00:00:00 2001 From: Austin <77757836+teleprint-me@users.noreply.github.com> Date: Wed, 10 Jan 2024 08:39:09 -0500 Subject: [PATCH 407/859] llama : recognize 1B phi models (#4847) This update categorizes models with 24 layers as MODEL_1B, ensuring compatibility with different Phi model variants without impacting existing Phi-2 model functionality. --- llama.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/llama.cpp b/llama.cpp index 8e0717db9..0f09d0c2b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2829,6 +2829,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); switch (hparams.n_layer) { + case 24: model.type = e_model::MODEL_1B; break; case 32: model.type = e_model::MODEL_3B; break; default: model.type = e_model::MODEL_UNKNOWN; } From 57d016ba2d46a6e22517a31a75cebb48f9e234b6 Mon Sep 17 00:00:00 2001 From: Brian Date: Thu, 11 Jan 2024 01:09:53 +1100 Subject: [PATCH 408/859] llama : add additional suffixes for model params (#4834) * llm_load_print_meta: Add additional suffixs for model params * Update llama.cpp model param log remove unneeded comments and convert from > to >= --- llama.cpp | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 0f09d0c2b..e1f1932ba 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3146,7 +3146,15 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: rope_finetuned = %s\n", __func__, hparams.rope_finetuned ? "yes" : "unknown"); LLAMA_LOG_INFO("%s: model type = %s\n", __func__, llama_model_type_name(model.type)); LLAMA_LOG_INFO("%s: model ftype = %s\n", __func__, llama_model_ftype_name(model.ftype).c_str()); - LLAMA_LOG_INFO("%s: model params = %.2f B\n", __func__, ml.n_elements*1e-9); + if (ml.n_elements >= 1e12) { + LLAMA_LOG_INFO("%s: model params = %.2f T\n", __func__, ml.n_elements*1e-12); + } else if (ml.n_elements >= 1e9) { + LLAMA_LOG_INFO("%s: model params = %.2f B\n", __func__, ml.n_elements*1e-9); + } else if (ml.n_elements >= 1e6) { + LLAMA_LOG_INFO("%s: model params = %.2f M\n", __func__, ml.n_elements*1e-6); + } else { + LLAMA_LOG_INFO("%s: model params = %.2f K\n", __func__, ml.n_elements*1e-3); + } if (ml.n_bytes < GiB) { LLAMA_LOG_INFO("%s: model size = %.2f MiB (%.2f BPW) \n", __func__, ml.n_bytes/1024.0/1024.0, ml.n_bytes*8.0/ml.n_elements); } else { From cd108e641dbdedd8c5641c4cec1762f751f38136 Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Wed, 10 Jan 2024 14:56:05 -0500 Subject: [PATCH 409/859] server : add a `/health` endpoint (#4860) * added /health endpoint to the server * added comments on the additional /health endpoint * Better handling of server state When the model is being loaded, the server state is `LOADING_MODEL`. If model-loading fails, the server state becomes `ERROR`, otherwise it becomes `READY`. The `/health` endpoint provides more granular messages now according to the server_state value. * initialized server_state * fixed a typo * starting http server before initializing the model * Update server.cpp * Update server.cpp * fixes * fixes * fixes * made ServerState atomic and turned two-line spaces into one-line --- examples/server/server.cpp | 199 +++++++++++++++++++++---------------- 1 file changed, 113 insertions(+), 86 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6c7fcd176..1cca634d5 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -26,6 +26,7 @@ #include #include #include +#include #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 @@ -146,6 +147,12 @@ static std::vector base64_decode(const std::string & encoded_string) // parallel // +enum ServerState { + LOADING_MODEL, // Server is starting up, model not fully loaded yet + READY, // Server is ready and model is loaded + ERROR // An error occurred, load_model failed +}; + enum task_type { COMPLETION_TASK, CANCEL_TASK @@ -2453,7 +2460,6 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } - static std::string random_string() { static const std::string str("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"); @@ -2790,15 +2796,117 @@ int main(int argc, char **argv) {"system_info", llama_print_system_info()}, }); - // load the model - if (!llama.load_model(params)) + httplib::Server svr; + + std::atomic server_state{LOADING_MODEL}; + + svr.set_default_headers({{"Server", "llama.cpp"}, + {"Access-Control-Allow-Origin", "*"}, + {"Access-Control-Allow-Headers", "content-type"}}); + + svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { + ServerState current_state = server_state.load(); + switch(current_state) { + case READY: + res.set_content(R"({"status": "ok"})", "application/json"); + res.status = 200; // HTTP OK + break; + case LOADING_MODEL: + res.set_content(R"({"status": "loading model"})", "application/json"); + res.status = 503; // HTTP Service Unavailable + break; + case ERROR: + res.set_content(R"({"status": "error", "error": "Model failed to load"})", "application/json"); + res.status = 500; // HTTP Internal Server Error + break; + } + }); + + svr.set_logger(log_server_request); + + svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) + { + const char fmt[] = "500 Internal Server Error\n%s"; + char buf[BUFSIZ]; + try + { + std::rethrow_exception(std::move(ep)); + } + catch (std::exception &e) + { + snprintf(buf, sizeof(buf), fmt, e.what()); + } + catch (...) + { + snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); + } + res.set_content(buf, "text/plain; charset=utf-8"); + res.status = 500; + }); + + svr.set_error_handler([](const httplib::Request &, httplib::Response &res) + { + if (res.status == 401) + { + res.set_content("Unauthorized", "text/plain; charset=utf-8"); + } + if (res.status == 400) + { + res.set_content("Invalid request", "text/plain; charset=utf-8"); + } + else if (res.status == 404) + { + res.set_content("File Not Found", "text/plain; charset=utf-8"); + res.status = 404; + } + }); + + // set timeouts and change hostname and port + svr.set_read_timeout (sparams.read_timeout); + svr.set_write_timeout(sparams.write_timeout); + + if (!svr.bind_to_port(sparams.hostname, sparams.port)) { + fprintf(stderr, "\ncouldn't bind to server socket: hostname=%s port=%d\n\n", sparams.hostname.c_str(), sparams.port); return 1; } - llama.initialize(); + // Set the base directory for serving static files + svr.set_base_dir(sparams.public_path); - httplib::Server svr; + // to make it ctrl+clickable: + LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); + + std::unordered_map log_data; + log_data["hostname"] = sparams.hostname; + log_data["port"] = std::to_string(sparams.port); + + if (!sparams.api_key.empty()) { + log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); + } + + LOG_INFO("HTTP server listening", log_data); + // run the HTTP server in a thread - see comment below + std::thread t([&]() + { + if (!svr.listen_after_bind()) + { + server_state.store(ERROR); + return 1; + } + + return 0; + }); + + // load the model + if (!llama.load_model(params)) + { + server_state.store(ERROR); + return 1; + } else { + llama.initialize(); + server_state.store(READY); + } // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { @@ -2826,10 +2934,6 @@ int main(int argc, char **argv) return false; }; - svr.set_default_headers({{"Server", "llama.cpp"}, - {"Access-Control-Allow-Origin", "*"}, - {"Access-Control-Allow-Headers", "content-type"}}); - // this is only called if no index.html is found in the public --path svr.Get("/", [](const httplib::Request &, httplib::Response &res) { @@ -2937,8 +3041,6 @@ int main(int argc, char **argv) } }); - - svr.Get("/v1/models", [¶ms](const httplib::Request&, httplib::Response& res) { std::time_t t = std::time(0); @@ -3157,81 +3259,6 @@ int main(int argc, char **argv) return res.set_content(result.result_json.dump(), "application/json; charset=utf-8"); }); - svr.set_logger(log_server_request); - - svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) - { - const char fmt[] = "500 Internal Server Error\n%s"; - char buf[BUFSIZ]; - try - { - std::rethrow_exception(std::move(ep)); - } - catch (std::exception &e) - { - snprintf(buf, sizeof(buf), fmt, e.what()); - } - catch (...) - { - snprintf(buf, sizeof(buf), fmt, "Unknown Exception"); - } - res.set_content(buf, "text/plain; charset=utf-8"); - res.status = 500; - }); - - svr.set_error_handler([](const httplib::Request &, httplib::Response &res) - { - if (res.status == 401) - { - res.set_content("Unauthorized", "text/plain; charset=utf-8"); - } - if (res.status == 400) - { - res.set_content("Invalid request", "text/plain; charset=utf-8"); - } - else if (res.status == 404) - { - res.set_content("File Not Found", "text/plain; charset=utf-8"); - res.status = 404; - } - }); - - // set timeouts and change hostname and port - svr.set_read_timeout (sparams.read_timeout); - svr.set_write_timeout(sparams.write_timeout); - - if (!svr.bind_to_port(sparams.hostname, sparams.port)) - { - fprintf(stderr, "\ncouldn't bind to server socket: hostname=%s port=%d\n\n", sparams.hostname.c_str(), sparams.port); - return 1; - } - - // Set the base directory for serving static files - svr.set_base_dir(sparams.public_path); - - // to make it ctrl+clickable: - LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); - - std::unordered_map log_data; - log_data["hostname"] = sparams.hostname; - log_data["port"] = std::to_string(sparams.port); - - if (!sparams.api_key.empty()) { - log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); - } - - LOG_INFO("HTTP server listening", log_data); - // run the HTTP server in a thread - see comment below - std::thread t([&]() - { - if (!svr.listen_after_bind()) - { - return 1; - } - - return 0; - }); - // GG: if I put the main loop inside a thread, it crashes on the first request when build in Debug!? // "Bus error: 10" - this is on macOS, it does not crash on Linux //std::thread t2([&]() From 5c1980d8d4c4e0c0af77359f81cc44d90b3f250b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 09:10:34 +0200 Subject: [PATCH 410/859] server : fix build + rename enums (#4870) --- examples/server/server.cpp | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1cca634d5..4a0714997 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -147,15 +147,15 @@ static std::vector base64_decode(const std::string & encoded_string) // parallel // -enum ServerState { - LOADING_MODEL, // Server is starting up, model not fully loaded yet - READY, // Server is ready and model is loaded - ERROR // An error occurred, load_model failed +enum server_state { + SERVER_STATE_LOADING_MODEL, // Server is starting up, model not fully loaded yet + SERVER_STATE_READY, // Server is ready and model is loaded + SERVER_STATE_ERROR // An error occurred, load_model failed }; enum task_type { - COMPLETION_TASK, - CANCEL_TASK + TASK_TYPE_COMPLETION, + TASK_TYPE_CANCEL, }; struct task_server { @@ -1402,7 +1402,7 @@ struct llama_server_context task.data = std::move(data); task.infill_mode = infill; task.embedding_mode = embedding; - task.type = COMPLETION_TASK; + task.type = TASK_TYPE_COMPLETION; task.multitask_id = multitask_id; // when a completion task's prompt array is not a singleton, we split it into multiple requests @@ -1524,7 +1524,7 @@ struct llama_server_context std::unique_lock lock(mutex_tasks); task_server task; task.id = id_gen++; - task.type = CANCEL_TASK; + task.type = TASK_TYPE_CANCEL; task.target_id = task_id; queue_tasks.push_back(task); condition_tasks.notify_one(); @@ -1560,7 +1560,7 @@ struct llama_server_context queue_tasks.erase(queue_tasks.begin()); switch (task.type) { - case COMPLETION_TASK: { + case TASK_TYPE_COMPLETION: { llama_client_slot *slot = get_slot(json_value(task.data, "slot_id", -1)); if (slot == nullptr) { @@ -1589,7 +1589,7 @@ struct llama_server_context break; } } break; - case CANCEL_TASK: { // release slot linked with the task id + case TASK_TYPE_CANCEL: { // release slot linked with the task id for (auto & slot : slots) { if (slot.task_id == task.target_id) @@ -2798,24 +2798,24 @@ int main(int argc, char **argv) httplib::Server svr; - std::atomic server_state{LOADING_MODEL}; + std::atomic state{SERVER_STATE_LOADING_MODEL}; svr.set_default_headers({{"Server", "llama.cpp"}, {"Access-Control-Allow-Origin", "*"}, {"Access-Control-Allow-Headers", "content-type"}}); svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { - ServerState current_state = server_state.load(); + server_state current_state = state.load(); switch(current_state) { - case READY: + case SERVER_STATE_READY: res.set_content(R"({"status": "ok"})", "application/json"); res.status = 200; // HTTP OK break; - case LOADING_MODEL: + case SERVER_STATE_LOADING_MODEL: res.set_content(R"({"status": "loading model"})", "application/json"); res.status = 503; // HTTP Service Unavailable break; - case ERROR: + case SERVER_STATE_ERROR: res.set_content(R"({"status": "error", "error": "Model failed to load"})", "application/json"); res.status = 500; // HTTP Internal Server Error break; @@ -2891,7 +2891,7 @@ int main(int argc, char **argv) { if (!svr.listen_after_bind()) { - server_state.store(ERROR); + state.store(SERVER_STATE_ERROR); return 1; } @@ -2901,11 +2901,11 @@ int main(int argc, char **argv) // load the model if (!llama.load_model(params)) { - server_state.store(ERROR); + state.store(SERVER_STATE_ERROR); return 1; } else { llama.initialize(); - server_state.store(READY); + state.store(SERVER_STATE_READY); } // Middleware for API key validation From 7a9f75c38b5e62fe27b8a5a3ed823b4a3714024b Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Thu, 11 Jan 2024 02:12:05 -0500 Subject: [PATCH 411/859] server : update readme to document the new `/health` endpoint (#4866) * added /health endpoint to the server * added comments on the additional /health endpoint * Better handling of server state When the model is being loaded, the server state is `LOADING_MODEL`. If model-loading fails, the server state becomes `ERROR`, otherwise it becomes `READY`. The `/health` endpoint provides more granular messages now according to the server_state value. * initialized server_state * fixed a typo * starting http server before initializing the model * Update server.cpp * Update server.cpp * fixes * fixes * fixes * made ServerState atomic and turned two-line spaces into one-line * updated `server` readme to document the `/health` endpoint too --- examples/server/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/examples/server/README.md b/examples/server/README.md index d85a14f89..dc27e72b9 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -110,6 +110,10 @@ node index.js ``` ## API Endpoints +- **GET** `/health`: Returns the current state of the server: + - `{"status": "loading model"}` if the model is still being loaded. + - `{"status": "error"}` if the model failed to load. + - `{"status": "ok"}` if the model is successfully loaded and the server is ready for further requests mentioned below. - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. From f34432ca1e0b288129390c1db8296a82aaf1e632 Mon Sep 17 00:00:00 2001 From: Erik Scholz Date: Fri, 5 Jan 2024 16:00:00 +0100 Subject: [PATCH 412/859] fix : cuda order of synchronization when setting a buffer (ggml/679) * fix : cuda order of synchronization when setting a buffer * also sync before memcpy --------- Co-authored-by: slaren --- ggml-cuda.cu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e26260a35..900f7ba4a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -10184,8 +10184,8 @@ static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, gg ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); - CUDA_CHECK(cudaMemcpy((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice)); + CUDA_CHECK(cudaDeviceSynchronize()); } static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { From c910e3c28a1caee8cb1398143d582dd9ab697e68 Mon Sep 17 00:00:00 2001 From: Halalaluyafail3 <55773281+Halalaluyafail3@users.noreply.github.com> Date: Tue, 9 Jan 2024 11:16:37 -0500 Subject: [PATCH 413/859] Fix execlp call (ggml/689) NULL can be an integer constant expression with the value zero, in this case the behavior would be undefined because of an incorrect type being passed to the variable arguments. --- ggml.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index adb387100..4a0ec4c44 100644 --- a/ggml.c +++ b/ggml.c @@ -132,7 +132,7 @@ void ggml_print_backtrace(void) { "-ex", "bt -frame-info source-and-location", "-ex", "detach", "-ex", "quit", - NULL); + (char *) NULL); } else { waitpid(pid, NULL, 0); } From e739de790921e6abbc8c70398303cacd74913f61 Mon Sep 17 00:00:00 2001 From: leejet Date: Wed, 10 Jan 2024 21:13:42 +0800 Subject: [PATCH 414/859] ggml : change GGML_MAX_NAME at compile time (ggml/682) * change GGML_MAX_NAME to 128 * allow controlling the value of GGML_MAX_NAME through external macro definitions --- ggml.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml.h b/ggml.h index c55e598b4..b6cc85952 100644 --- a/ggml.h +++ b/ggml.h @@ -218,7 +218,9 @@ #define GGML_MAX_PARAMS 2048 #define GGML_MAX_CONTEXTS 64 #define GGML_MAX_SRC 10 +#ifndef GGML_MAX_NAME #define GGML_MAX_NAME 64 +#endif #define GGML_MAX_OP_PARAMS 64 #define GGML_DEFAULT_N_THREADS 4 #define GGML_DEFAULT_GRAPH_SIZE 2048 From 5362e43962e84d61e20b91f34991d7ccaef4a7d5 Mon Sep 17 00:00:00 2001 From: Jack Mousseau Date: Wed, 10 Jan 2024 06:19:19 -0800 Subject: [PATCH 415/859] metal : wrap each operation in debug group (ggml/690) --- ggml-metal.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 6c2a8d04e..161906824 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1067,6 +1067,8 @@ bool ggml_metal_graph_compute( GGML_ASSERT(!"unsupported op"); } + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst)]]; + const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; const int64_t ne02 = src0 ? src0->ne[2] : 0; @@ -2423,6 +2425,8 @@ bool ggml_metal_graph_compute( GGML_ASSERT(false); } } + + [encoder popDebugGroup]; } if (encoder != nil) { From f85a973aa139ae6f37e8b8e1966f1d278b5e0372 Mon Sep 17 00:00:00 2001 From: Timothy Cronin <40186632+4imothy@users.noreply.github.com> Date: Thu, 11 Jan 2024 02:27:48 -0500 Subject: [PATCH 416/859] ggml : remove ggml_cpy_inplace and ggml_cont_inplace (ggml/693) --- ggml.c | 30 ++++++++---------------------- ggml.h | 11 ----------- 2 files changed, 8 insertions(+), 33 deletions(-) diff --git a/ggml.c b/ggml.c index 4a0ec4c44..9c42a45e3 100644 --- a/ggml.c +++ b/ggml.c @@ -4311,13 +4311,13 @@ struct ggml_tensor * ggml_set_2d_inplace( static struct ggml_tensor * ggml_cpy_impl( struct ggml_context * ctx, struct ggml_tensor * a, - struct ggml_tensor * b, - bool inplace) { + struct ggml_tensor * b) { GGML_ASSERT(ggml_nelements(a) == ggml_nelements(b)); bool is_node = false; - if (!inplace && (a->grad || b->grad)) { + if (a->grad || b->grad) { + // inplace is false and either one have a grad is_node = true; } @@ -4341,29 +4341,21 @@ struct ggml_tensor * ggml_cpy( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * b) { - return ggml_cpy_impl(ctx, a, b, false); -} - -struct ggml_tensor * ggml_cpy_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b) { - return ggml_cpy_impl(ctx, a, b, true); + return ggml_cpy_impl(ctx, a, b); } // ggml_cont static struct ggml_tensor * ggml_cont_impl( struct ggml_context * ctx, - struct ggml_tensor * a, - bool inplace) { + struct ggml_tensor * a) { bool is_node = false; - if (!inplace && a->grad) { + if (a->grad) { is_node = true; } - struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); + struct ggml_tensor * result = ggml_dup_tensor(ctx, a); ggml_format_name(result, "%s (cont)", a->name); result->op = GGML_OP_CONT; @@ -4376,13 +4368,7 @@ static struct ggml_tensor * ggml_cont_impl( struct ggml_tensor * ggml_cont( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_cont_impl(ctx, a, false); -} - -struct ggml_tensor * ggml_cont_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a) { - return ggml_cont_impl(ctx, a, true); + return ggml_cont_impl(ctx, a); } // make contiguous, with new shape diff --git a/ggml.h b/ggml.h index b6cc85952..127dcef1d 100644 --- a/ggml.h +++ b/ggml.h @@ -1163,22 +1163,11 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); - // a -> b, in-place, return view(b) - GGML_API struct ggml_tensor * ggml_cpy_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a, - struct ggml_tensor * b); - // make contiguous GGML_API struct ggml_tensor * ggml_cont( struct ggml_context * ctx, struct ggml_tensor * a); - // make contiguous, in-place - GGML_API struct ggml_tensor * ggml_cont_inplace( - struct ggml_context * ctx, - struct ggml_tensor * a); - // make contiguous, with new shape GGML_API struct ggml_tensor * ggml_cont_1d( struct ggml_context * ctx, From 3267c2abc72e34608224408ace3c048831050f97 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 09:34:59 +0200 Subject: [PATCH 417/859] metal : fix deprecation warning (ggml/690) --- ggml-metal.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-metal.m b/ggml-metal.m index 161906824..82d68cd1b 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1067,7 +1067,7 @@ bool ggml_metal_graph_compute( GGML_ASSERT(!"unsupported op"); } - [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst)]]; + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; From 64802ec00d6383784a9dacf616095eaced16c3c3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 09:39:08 +0200 Subject: [PATCH 418/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index fe7f3202f..3e2c579d5 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -f96711108d55bdbbd277e6be07204dce6a94fb93 +979cc23b345006504cfc1f67c0fdf627805e3319 From 2a7c94db5fb67b2f8882d2d16a11bf5d8d12d397 Mon Sep 17 00:00:00 2001 From: Paul Tsochantaris Date: Thu, 11 Jan 2024 14:31:52 +0000 Subject: [PATCH 419/859] metal : put encoder debug group behind a define (#4873) --- ggml-metal.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 82d68cd1b..9698e5a79 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -1067,7 +1067,9 @@ bool ggml_metal_graph_compute( GGML_ASSERT(!"unsupported op"); } +#ifndef GGML_METAL_NDEBUG [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; +#endif const int64_t ne00 = src0 ? src0->ne[0] : 0; const int64_t ne01 = src0 ? src0->ne[1] : 0; @@ -2426,7 +2428,9 @@ bool ggml_metal_graph_compute( } } +#ifndef GGML_METAL_NDEBUG [encoder popDebugGroup]; +#endif } if (encoder != nil) { From 2f043328e3116724d15b915b5c6078e2df860a69 Mon Sep 17 00:00:00 2001 From: Isaac McFadyen Date: Thu, 11 Jan 2024 09:33:26 -0500 Subject: [PATCH 420/859] server : fix typo in model name (#4876) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 4a0714997..860e4e9ae 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2515,7 +2515,7 @@ json oaicompat_completion_params_parse( // // https://platform.openai.com/docs/api-reference/chat/create llama_sampling_params default_sparams; - llama_params["model"] = json_value(body, "model", std::string("uknown")); + llama_params["model"] = json_value(body, "model", std::string("unknown")); llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.0); From 43f76bf1c362c067fce46bb8dcda0b64af8a9533 Mon Sep 17 00:00:00 2001 From: pudepiedj Date: Thu, 11 Jan 2024 16:14:52 +0000 Subject: [PATCH 421/859] main : print total token count and tokens consumed so far (#4874) * Token count changes * Add show token count * Updating before PR * Two requested changes * Move param def posn --- common/common.cpp | 8 ++++++++ common/common.h | 2 +- examples/main/main.cpp | 6 +++++- llama.cpp | 2 +- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 4e89fe516..bfcd6d4df 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -630,6 +630,12 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.ppl_stride = std::stoi(argv[i]); + } else if (arg == "-stc" || arg == "--show_token_count") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.token_interval = std::stoi(argv[i]); } else if (arg == "--ppl-output-type") { if (++i >= argc) { invalid_param = true; @@ -944,6 +950,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf(" -stc N --show_token_count N\n"); + printf(" show consumed tokens every N tokens\n"); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); diff --git a/common/common.h b/common/common.h index e2bbfc258..a295e88b0 100644 --- a/common/common.h +++ b/common/common.h @@ -64,6 +64,7 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. int32_t grp_attn_n = 1; // group-attention factor int32_t grp_attn_w = 512; // group-attention width + int32_t token_interval = 512; // show token count every 512 tokens float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor @@ -242,4 +243,3 @@ void dump_kv_cache_view(const llama_kv_cache_view & view, int row_size = 80); // Dump the KV cache view showing individual sequences in each cell (long output). void dump_kv_cache_view_seqs(const llama_kv_cache_view & view, int row_size = 40); - diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 5ea67051f..1f35febbd 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -500,7 +500,7 @@ int main(int argc, char ** argv) { while ((n_remain != 0 && !is_antiprompt) || params.interactive) { // predict if (!embd.empty()) { - // Note: n_ctx - 4 here is to match the logic for commandline prompt handling via + // Note: (n_ctx - 4) here is to match the logic for commandline prompt handling via // --prompt or --file which uses the same value. int max_embd_size = n_ctx - 4; @@ -650,6 +650,10 @@ int main(int argc, char ** argv) { n_past += n_eval; LOG("n_past = %d\n", n_past); + // Display total tokens alongside total time + if (n_past % params.token_interval == 0) { + printf("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); + } } if (!embd.empty() && !path_session.empty()) { diff --git a/llama.cpp b/llama.cpp index e1f1932ba..aaadfa444 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10921,7 +10921,7 @@ void llama_print_timings(struct llama_context * ctx) { __func__, timings.t_p_eval_ms, timings.n_p_eval, timings.t_p_eval_ms / timings.n_p_eval, 1e3 / timings.t_p_eval_ms * timings.n_p_eval); LLAMA_LOG_INFO("%s: eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", __func__, timings.t_eval_ms, timings.n_eval, timings.t_eval_ms / timings.n_eval, 1e3 / timings.t_eval_ms * timings.n_eval); - LLAMA_LOG_INFO("%s: total time = %10.2f ms\n", __func__, (timings.t_end_ms - timings.t_start_ms)); + LLAMA_LOG_INFO("%s: total time = %10.2f ms / %5d tokens\n", __func__, (timings.t_end_ms - timings.t_start_ms), (timings.n_p_eval + timings.n_eval)); } void llama_reset_timings(struct llama_context * ctx) { From d8d90aa343c22fe01429d3540e47ded87e9dcb9d Mon Sep 17 00:00:00 2001 From: Someone Date: Thu, 11 Jan 2024 17:22:34 +0000 Subject: [PATCH 422/859] ci: nix-flake-update: new token with pr permissions (#4879) * ci: nix-flake-update: new token with pr permissions --------- Co-authored-by: Georgi Gerganov --- .github/workflows/nix-flake-update.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nix-flake-update.yml b/.github/workflows/nix-flake-update.yml index fa9360841..3a6a96e26 100644 --- a/.github/workflows/nix-flake-update.yml +++ b/.github/workflows/nix-flake-update.yml @@ -19,4 +19,4 @@ jobs: pr-labels: | nix pr-reviewers: philiptaron,SomeoneSerge - token: ${{ secrets.GITHUB_TOKEN }} + token: ${{ secrets.FLAKE_TOKEN }} From eab67950068e4b125007d027232c47d2a5831cd0 Mon Sep 17 00:00:00 2001 From: Behnam M <58621210+ibehnam@users.noreply.github.com> Date: Thu, 11 Jan 2024 12:41:39 -0500 Subject: [PATCH 423/859] server : add `LOG_INFO` when model is successfully loaded (#4881) * added /health endpoint to the server * added comments on the additional /health endpoint * Better handling of server state When the model is being loaded, the server state is `LOADING_MODEL`. If model-loading fails, the server state becomes `ERROR`, otherwise it becomes `READY`. The `/health` endpoint provides more granular messages now according to the server_state value. * initialized server_state * fixed a typo * starting http server before initializing the model * Update server.cpp * Update server.cpp * fixes * fixes * fixes * made ServerState atomic and turned two-line spaces into one-line * updated `server` readme to document the `/health` endpoint too * used LOG_INFO after successful model loading --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 860e4e9ae..51a4b689f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2906,6 +2906,7 @@ int main(int argc, char **argv) } else { llama.initialize(); state.store(SERVER_STATE_READY); + LOG_INFO("model loaded", {}); } // Middleware for API key validation From 27379455c38cb13f24de92dbd6fcdd04eeb1b9d9 Mon Sep 17 00:00:00 2001 From: Michael Coppola Date: Thu, 11 Jan 2024 12:51:17 -0500 Subject: [PATCH 424/859] server : support for multiple api keys (#4864) * server: added support for multiple api keys, added loading api keys from file * minor: fix whitespace * added file error handling to --api-key-file, changed code to better reflect current style * server: update README.md for --api-key-file --------- Co-authored-by: Michael Coppola --- examples/server/README.md | 3 ++- examples/server/server.cpp | 36 ++++++++++++++++++++++++++++++------ 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index dc27e72b9..fd3034b99 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -23,7 +23,8 @@ Command line options: - `--host`: Set the hostname or ip address to listen. Default `127.0.0.1`. - `--port`: Set the port to listen. Default: `8080`. - `--path`: path from which to serve static files (default examples/server/public) -- `--api-key`: Set an api key for request authorization. By default the server responds to every request. With an api key set, the requests must have the Authorization header set with the api key as Bearer token. +- `--api-key`: Set an api key for request authorization. By default the server responds to every request. With an api key set, the requests must have the Authorization header set with the api key as Bearer token. May be used multiple times to enable multiple valid keys. +- `--api-key-file`: path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access. May be used in conjunction with `--api-key`'s. - `--embedding`: Enable embedding extraction, Default: disabled. - `-np N`, `--parallel N`: Set the number of slots for process requests (default: 1) - `-cb`, `--cont-batching`: enable continuous batching (a.k.a dynamic batching) (default: disabled) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 51a4b689f..345004fa1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -39,7 +39,7 @@ using json = nlohmann::json; struct server_params { std::string hostname = "127.0.0.1"; - std::string api_key; + std::vector api_keys; std::string public_path = "examples/server/public"; int32_t port = 8080; int32_t read_timeout = 600; @@ -2021,6 +2021,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); + printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); @@ -2081,7 +2082,28 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - sparams.api_key = argv[i]; + sparams.api_keys.push_back(argv[i]); + } + else if (arg == "--api-key-file") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + std::ifstream key_file(argv[i]); + if (!key_file) { + fprintf(stderr, "error: failed to open file '%s'\n", argv[i]); + invalid_param = true; + break; + } + std::string key; + while (std::getline(key_file, key)) { + if (key.size() > 0) { + sparams.api_keys.push_back(key); + } + } + key_file.close(); } else if (arg == "--timeout" || arg == "-to") { @@ -2881,8 +2903,10 @@ int main(int argc, char **argv) log_data["hostname"] = sparams.hostname; log_data["port"] = std::to_string(sparams.port); - if (!sparams.api_key.empty()) { - log_data["api_key"] = "api_key: ****" + sparams.api_key.substr(sparams.api_key.length() - 4); + if (sparams.api_keys.size() == 1) { + log_data["api_key"] = "api_key: ****" + sparams.api_keys[0].substr(sparams.api_keys[0].length() - 4); + } else if (sparams.api_keys.size() > 1) { + log_data["api_key"] = "api_key: " + std::to_string(sparams.api_keys.size()) + " keys loaded"; } LOG_INFO("HTTP server listening", log_data); @@ -2912,7 +2936,7 @@ int main(int argc, char **argv) // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { // If API key is not set, skip validation - if (sparams.api_key.empty()) { + if (sparams.api_keys.empty()) { return true; } @@ -2921,7 +2945,7 @@ int main(int argc, char **argv) std::string prefix = "Bearer "; if (auth_header.substr(0, prefix.size()) == prefix) { std::string received_api_key = auth_header.substr(prefix.size()); - if (received_api_key == sparams.api_key) { + if (std::find(sparams.api_keys.begin(), sparams.api_keys.end(), received_api_key) != sparams.api_keys.end()) { return true; // API key is valid } } From 4330bd83feb39683de4bd7a34cfcf672ff8ac3e4 Mon Sep 17 00:00:00 2001 From: Laura Date: Thu, 11 Jan 2024 19:02:48 +0100 Subject: [PATCH 425/859] server : implement credentialed CORS (#4514) * Implement credentialed CORS according to MDN * Fix syntax error * Move validate_api_key up so it is defined before its first usage --- examples/server/server.cpp | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 345004fa1..031824e14 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2822,9 +2822,15 @@ int main(int argc, char **argv) std::atomic state{SERVER_STATE_LOADING_MODEL}; - svr.set_default_headers({{"Server", "llama.cpp"}, - {"Access-Control-Allow-Origin", "*"}, - {"Access-Control-Allow-Headers", "content-type"}}); + svr.set_default_headers({{"Server", "llama.cpp"}}); + + // CORS preflight + svr.Options(R"(.*)", [](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); + res.set_header("Access-Control-Allow-Credentials", "true"); + res.set_header("Access-Control-Allow-Methods", "POST"); + res.set_header("Access-Control-Allow-Headers", "*"); + }); svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { server_state current_state = state.load(); @@ -2987,9 +2993,9 @@ int main(int argc, char **argv) return false; }); - svr.Get("/props", [&llama](const httplib::Request & /*req*/, httplib::Response &res) + svr.Get("/props", [&llama](const httplib::Request & req, httplib::Response &res) { - res.set_header("Access-Control-Allow-Origin", "*"); + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); json data = { { "user_name", llama.name_user.c_str() }, { "assistant_name", llama.name_assistant.c_str() } @@ -2999,6 +3005,7 @@ int main(int argc, char **argv) svr.Post("/completion", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } @@ -3066,8 +3073,9 @@ int main(int argc, char **argv) } }); - svr.Get("/v1/models", [¶ms](const httplib::Request&, httplib::Response& res) + svr.Get("/v1/models", [¶ms](const httplib::Request& req, httplib::Response& res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); std::time_t t = std::time(0); json models = { @@ -3085,9 +3093,11 @@ int main(int argc, char **argv) res.set_content(models.dump(), "application/json; charset=utf-8"); }); + // TODO: add mount point without "/v1" prefix -- how? svr.Post("/v1/chat/completions", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } @@ -3161,6 +3171,7 @@ int main(int argc, char **argv) svr.Post("/infill", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } @@ -3233,6 +3244,7 @@ int main(int argc, char **argv) svr.Post("/tokenize", [&llama](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); const json body = json::parse(req.body); std::vector tokens; if (body.count("content") != 0) @@ -3245,6 +3257,7 @@ int main(int argc, char **argv) svr.Post("/detokenize", [&llama](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); const json body = json::parse(req.body); std::string content; if (body.count("tokens") != 0) @@ -3259,6 +3272,7 @@ int main(int argc, char **argv) svr.Post("/embedding", [&llama](const httplib::Request &req, httplib::Response &res) { + res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); const json body = json::parse(req.body); json prompt; if (body.count("content") != 0) From 3ba5b8ca8e6181a5c712c5b77595a29f1d3e2b97 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 21:31:31 +0200 Subject: [PATCH 426/859] swift : pin ggml commit + remove ggml.h from spm-headers (#4878) ggml-ci --- Package.swift | 2 +- spm-headers/ggml.h | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 120000 spm-headers/ggml.h diff --git a/Package.swift b/Package.swift index 583e2e276..59191da45 100644 --- a/Package.swift +++ b/Package.swift @@ -14,7 +14,7 @@ let package = Package( .library(name: "llama", targets: ["llama"]), ], dependencies: [ - .package(url: "https://github.com/ggerganov/ggml.git", .branch("master")) + .package(url: "https://github.com/ggerganov/ggml.git", .revision("979cc23b345006504cfc1f67c0fdf627805e3319")) ], targets: [ .target( diff --git a/spm-headers/ggml.h b/spm-headers/ggml.h deleted file mode 120000 index 39215298f..000000000 --- a/spm-headers/ggml.h +++ /dev/null @@ -1 +0,0 @@ -../ggml.h \ No newline at end of file From 49662cbed3e95f5976c070b85b9fd53fd577038d Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Thu, 11 Jan 2024 20:39:39 +0100 Subject: [PATCH 427/859] ggml : SOTA 2-bit quants (add IQ2_XS) (#4856) * iq2_xs: basics * iq2_xs: this should have been in the basics * iq2_xs: CUDA and scalar CPU works * iq2_xs: WIP Metal * iq2_xs: Metal now works * iq2_xs: working, but dog slow, ARM_NEON dot product * iq2_xs: better ARM_NEON dot product We are now at 19.5 t/s for TG-128 and 61 t/s for PP-512 when running on the CPU. * iq2_xs: AVX2 dot product - 19.5 t/s * iq2_xs: faster AVX2 dit product 21.4 t/s for TG-128, 59.2 t/s for PP-512. The latter is 2x compared to the previous version. * iq2_xs: had forgotten to delete iq2-data.h * Add llama enum for IQ2_XS --------- Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 232 +++++++++++++++++++++- ggml-metal.m | 42 +++- ggml-metal.metal | 378 +++++++++++++++++++++++++++++++++++- ggml-quants.c | 360 +++++++++++++++++++++++++++++++++- ggml-quants.h | 12 ++ ggml.c | 30 ++- ggml.h | 3 + llama.cpp | 3 + llama.h | 1 + tests/test-quantize-fns.cpp | 5 +- 10 files changed, 1038 insertions(+), 28 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 900f7ba4a..dd19699f6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -486,6 +486,15 @@ typedef struct { } block_iq2_xxs; static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); +#define QR2_XS 8 +#define QI2_XS (QK_K / (4*QR2_XS)) +typedef struct { + half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1328,7 +1337,7 @@ static __global__ void dequantize_block_q6_K(const void * __restrict__ vx, dst_t #endif } -static const __device__ uint64_t kgrid_iq2xxs[256] = { +static const __device__ uint64_t iq2xxs_grid[256] = { 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, @@ -1395,6 +1404,137 @@ static const __device__ uint64_t kgrid_iq2xxs[256] = { 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, }; +static const __device__ uint64_t iq2xs_grid[512] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +}; + static const __device__ uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -1439,7 +1579,7 @@ static __global__ void dequantize_block_iq2_xxs(const void * __restrict__ vx, ds dst_t * y = yy + i*QK_K + 32*ib + 8*il; const uint16_t * q2 = x[i].qs + 4*ib; const uint8_t * aux8 = (const uint8_t *)q2; - const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[il]); + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[il]); const uint32_t aux32 = q2[2] | (q2[3] << 16); const float d = (float)x[i].d * (0.5f + (aux32 >> 28)) * 0.25f; const uint8_t signs = ksigns_iq2xs[(aux32 >> 7*il) & 127]; @@ -1450,6 +1590,28 @@ static __global__ void dequantize_block_iq2_xxs(const void * __restrict__ vx, ds } +template +static __global__ void dequantize_block_iq2_xs(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq2_xs * x = (const block_iq2_xs *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint16_t * q2 = x[i].qs + 4*ib; + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[il] & 511)); + const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib] >> 4*(il/2)) & 0xf)) * 0.25f; + const uint8_t signs = ksigns_iq2xs[q2[il] >> 9]; + for (int j = 0; j < 8; ++j) y[j] = d * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); +#else + assert(false); +#endif + +} + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -3996,7 +4158,7 @@ static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( uint32_t aux32 = q2[2] | (q2[3] << 16); int sumi = 0; for (int l = 0; l < 4; ++l) { - const uint8_t * grid = (const uint8_t *)(kgrid_iq2xxs + aux8[l]); + const uint8_t * grid = (const uint8_t *)(iq2xxs_grid + aux8[l]); const uint8_t signs = ksigns_iq2xs[aux32 & 127]; for (int j = 0; j < 8; ++j) { sumi += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); @@ -4012,8 +4174,8 @@ static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( const int il = iqs%2; const uint16_t * q2 = bq2->qs + 4*ib32; const uint8_t * aux8 = (const uint8_t *)q2; - const uint8_t * grid1 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); - const uint8_t * grid2 = (const uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + const uint8_t * grid1 = (const uint8_t *)(iq2xxs_grid + aux8[2*il+0]); + const uint8_t * grid2 = (const uint8_t *)(iq2xxs_grid + aux8[2*il+1]); const uint32_t aux32 = q2[2] | (q2[3] << 16); const float d = (float)bq2->d * (0.5f + (aux32 >> 28)) * (float)bq8_1[ib32].ds.x * 0.25f; const uint8_t signs1 = ksigns_iq2xs[(aux32 >> 14*il) & 127]; @@ -4032,6 +4194,42 @@ static __device__ __forceinline__ float vec_dot_iq2_xxs_q8_1( #endif } +static __device__ __forceinline__ float vec_dot_iq2_xs_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if QK_K == 256 + const block_iq2_xs * bq2 = (const block_iq2_xs *) vbq; + + const int ib32 = iqs; + const uint16_t * q2 = bq2->qs + 4*ib32; + const int8_t * q8 = bq8_1[ib32].qs; + const uint8_t ls1 = bq2->scales[ib32] & 0xf; + const uint8_t ls2 = bq2->scales[ib32] >> 4; + int sumi1 = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + int sumi2 = 0; + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi2 += q8[j] * grid[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + const float d = (float)bq2->d * (float)bq8_1[ib32].ds.x * 0.25f; + return d * ((0.5f + ls1) * sumi1 + (0.5f + ls2) * sumi2); +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -6035,6 +6233,12 @@ static void dequantize_row_iq2_xxs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq2_xxs<<>>(vx, y); } +template +static void dequantize_row_iq2_xs_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq2_xs<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -6065,6 +6269,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_q6_K_cuda; case GGML_TYPE_IQ2_XXS: return dequantize_row_iq2_xxs_cuda; + case GGML_TYPE_IQ2_XS: + return dequantize_row_iq2_xs_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6096,6 +6302,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_q6_K_cuda; case GGML_TYPE_IQ2_XXS: return dequantize_row_iq2_xxs_cuda; + case GGML_TYPE_IQ2_XS: + return dequantize_row_iq2_xs_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -6299,6 +6507,15 @@ static void mul_mat_vec_iq2_xxs_q8_1_cuda(const void * vx, const void * vy, floa <<>>(vx, vy, dst, ncols, nrows); } +static void mul_mat_vec_iq2_xs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { + GGML_ASSERT(ncols % QK_K == 0); + const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const dim3 block_nums(block_num_y, 1, 1); + const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); + mul_mat_vec_q + <<>>(vx, vy, dst, ncols, nrows); +} + static void ggml_mul_mat_q4_0_q8_1_cuda( const void * vx, const void * vy, float * dst, const int ncols_x, const int nrows_x, const int ncols_y, const int nrows_y, const int nrows_dst, cudaStream_t stream) { @@ -7871,6 +8088,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: return max_compute_capability >= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -7892,6 +8110,7 @@ static int64_t get_row_rounding(ggml_type type) { case GGML_TYPE_Q4_K: case GGML_TYPE_Q5_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: return max_compute_capability >= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -7945,6 +8164,9 @@ static void ggml_cuda_op_mul_mat_vec_q( case GGML_TYPE_IQ2_XXS: mul_mat_vec_iq2_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); break; + case GGML_TYPE_IQ2_XS: + mul_mat_vec_iq2_xs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; diff --git a/ggml-metal.m b/ggml-metal.m index 9698e5a79..6e5594432 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -89,6 +89,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(get_rows_q6_K); GGML_METAL_DECL_KERNEL(get_rows_i32); GGML_METAL_DECL_KERNEL(get_rows_iq2_xxs); + GGML_METAL_DECL_KERNEL(get_rows_iq2_xs); GGML_METAL_DECL_KERNEL(rms_norm); GGML_METAL_DECL_KERNEL(group_norm); GGML_METAL_DECL_KERNEL(norm); @@ -108,6 +109,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mv_iq2_xs_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); @@ -124,6 +126,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xs_f32); GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); @@ -137,6 +140,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mm_iq2_xs_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); @@ -150,6 +154,7 @@ struct ggml_metal_context { GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xxs_f32); + GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xs_f32); GGML_METAL_DECL_KERNEL(rope_f32); GGML_METAL_DECL_KERNEL(rope_f16); GGML_METAL_DECL_KERNEL(alibi_f32); @@ -385,6 +390,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(get_rows_q6_K); GGML_METAL_ADD_KERNEL(get_rows_i32); GGML_METAL_ADD_KERNEL(get_rows_iq2_xxs); + GGML_METAL_ADD_KERNEL(get_rows_iq2_xs); GGML_METAL_ADD_KERNEL(rms_norm); GGML_METAL_ADD_KERNEL(group_norm); GGML_METAL_ADD_KERNEL(norm); @@ -404,6 +410,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mv_iq2_xs_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); @@ -420,6 +427,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); @@ -434,6 +442,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mm_iq2_xs_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); @@ -447,6 +456,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xxs_f32); + GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xs_f32); } GGML_METAL_ADD_KERNEL(rope_f32); GGML_METAL_ADD_KERNEL(rope_f16); @@ -513,6 +523,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(get_rows_q6_K); GGML_METAL_DEL_KERNEL(get_rows_i32); GGML_METAL_DEL_KERNEL(get_rows_iq2_xxs); + GGML_METAL_DEL_KERNEL(get_rows_iq2_xs); GGML_METAL_DEL_KERNEL(rms_norm); GGML_METAL_DEL_KERNEL(group_norm); GGML_METAL_DEL_KERNEL(norm); @@ -532,6 +543,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mv_iq2_xs_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); @@ -548,6 +560,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xs_f32); if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); @@ -562,6 +575,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mm_iq2_xs_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); @@ -575,6 +589,7 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xxs_f32); + GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xs_f32); } GGML_METAL_DEL_KERNEL(rope_f32); GGML_METAL_DEL_KERNEL(rope_f16); @@ -1561,6 +1576,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q6_K_f32]; break; case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xxs_f32]; break; + case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xs_f32]; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1679,6 +1695,12 @@ bool ggml_metal_graph_compute( nth1 = 16; [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xxs_f32]; } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1712,12 +1734,12 @@ bool ggml_metal_graph_compute( if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - //src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src0t == GGML_TYPE_IQ2_XXS) { - [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { + const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_Q4_K) { @@ -1810,6 +1832,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xxs_f32]; break; + case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xs_f32]; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1931,6 +1954,12 @@ bool ggml_metal_graph_compute( nth1 = 16; [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xxs_f32]; } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xs_f32]; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1980,12 +2009,12 @@ bool ggml_metal_graph_compute( if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - //src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src2t == GGML_TYPE_IQ2_XXS) { - [encoder setThreadgroupMemoryLength:(256*8+128) atIndex:0]; + else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { + const int mem_size = src2t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src2t == GGML_TYPE_Q4_K) { @@ -2026,6 +2055,7 @@ bool ggml_metal_graph_compute( case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xxs]; break; + case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xs]; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 229efb8b6..029578dc5 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2452,6 +2452,13 @@ typedef struct { } block_iq2_xxs; // 66 bytes / block for QK_K = 256, so 2.0625 bpw +typedef struct { + half d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +// 74 bytes / block for QK_K = 256, so 2.3125 bpw + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -3476,7 +3483,7 @@ kernel void kernel_mul_mv_q6_K_f32( // ======================= "True" 2-bit -constexpr constant static uint64_t kgrid_iq2xxs[256] = { +constexpr constant static uint64_t iq2xxs_grid[256] = { 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, @@ -3543,6 +3550,137 @@ constexpr constant static uint64_t kgrid_iq2xxs[256] = { 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, }; +constexpr constant static uint64_t iq2xs_grid[512] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +}; + constexpr constant static uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -3600,7 +3738,7 @@ void kernel_mul_mv_iq2_xxs_f32_impl( { int nval = 4; int pos = (32*sgitg + tiisg)*nval; - for (int i = 0; i < nval; ++i) values[pos + i] = kgrid_iq2xxs[pos + i]; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xxs_grid[pos + i]; nval = 2; pos = (32*sgitg + tiisg)*nval; for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; @@ -3689,6 +3827,149 @@ kernel void kernel_mul_mv_iq2_xxs_f32( kernel_mul_mv_iq2_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq2_xs_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq2_xs * x = (device const block_iq2_xs *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint64_t * values = (threadgroup uint64_t *)shared_values; + threadgroup uint8_t * shared_signs = (threadgroup uint8_t *)(values + 512); + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq2xs_grid[pos + i]; + nval = 2; + pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) shared_signs[pos+i] = ksigns_iq2xs[pos+i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + +#if QK_K == 256 + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq2_xs * xr = x + ibl; + device const uint16_t * q2 = xr->qs + 4 * ib; + device const uint8_t * sc = xr->scales + ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const uint8_t ls1 = sc[0] & 0xf; + const uint8_t ls2 = sc[0] >> 4; + const float d1 = db * (0.5f + ls1); + const float d2 = db * (0.5f + ls2); + + float sum1 = 0, sum2 = 0; + for (int l = 0; l < 2; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum1 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + for (int l = 2; l < 4; ++l) { + const threadgroup uint8_t * grid = (const threadgroup uint8_t *)(values + (q2[l] & 511)); + const uint8_t signs = shared_signs[(q2[l] >> 9)]; + for (int j = 0; j < 8; ++j) { + sum2 += yl[8*l + j] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + } + sumf[row] += d1 * sum1 + d2 * sum2; + + dh += nb*sizeof(block_iq2_xs)/2; + q2 += nb*sizeof(block_iq2_xs)/2; + sc += nb*sizeof(block_iq2_xs); + } + + y4 += 32 * 32; + } +#else + // TODO +#endif + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.25f; + } + } +} + +[[host_name("kernel_mul_mv_iq2_xs_f32")]] +kernel void kernel_mul_mv_iq2_xs_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq2_xs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= // NOTE: this is not dequantizing - we are simply fitting the template @@ -3973,18 +4254,39 @@ void dequantize_iq2_xxs(device const block_iq2_xxs * xb, short il, thread type4x const uint32_t aux32_s = q2[2] | (q2[3] << 16); thread const uint8_t * aux8 = (thread const uint8_t *)&aux32_g; const float dl = d * (0.5f + (aux32_s >> 28)) * 0.25f; - constant uint8_t * grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+0]); + constant uint8_t * grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+0]); uint8_t signs = ksigns_iq2xs[(aux32_s >> 14*il) & 127]; for (int i = 0; i < 8; ++i) { reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); } - grid = (constant uint8_t *)(kgrid_iq2xxs + aux8[2*il+1]); + grid = (constant uint8_t *)(iq2xxs_grid + aux8[2*il+1]); signs = ksigns_iq2xs[(aux32_s >> (14*il+7)) & 127]; for (int i = 0; i < 8; ++i) { reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); } } +template +void dequantize_iq2_xs(device const block_iq2_xs * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint16_t * q2 = xb->qs + 4*ib32; + const float dl = d * (0.5f + ((xb->scales[ib32] >> 4*il) & 0xf)) * 0.25f; + constant uint8_t * grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+0] & 511)); + uint8_t signs = ksigns_iq2xs[q2[2*il+0] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } + grid = (constant uint8_t *)(iq2xs_grid + (q2[2*il+1] & 511)); + signs = ksigns_iq2xs[q2[2*il+1] >> 9]; + for (int i = 0; i < 8; ++i) { + reg[2+i/4][i%4] = dl * grid[i] * (signs & kmask_iq2xs[i] ? -1.f : 1.f); + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -4525,6 +4827,7 @@ template [[host_name("kernel_get_rows_q4_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_q5_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -4562,6 +4865,7 @@ template [[host_name("kernel_mul_mm_q4_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -4611,6 +4915,7 @@ template [[host_name("kernel_mul_mm_id_q4_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_q5_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -5448,3 +5753,68 @@ kernel void kernel_mul_mv_id_iq2_xxs_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq2_xs_f32")]] +kernel void kernel_mul_mv_id_iq2_xs_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq2_xs_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index d497e6de9..a24b4b244 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -2342,15 +2342,7 @@ size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * // ====================== "True" 2-bit (de)-quantization -void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { - (void)x; - (void)y; - (void)k; - assert(k % QK_K == 0); - //fprintf(stderr, "=========================== %s: not implemented\n", __func__); -} - -static const uint64_t iq2xxs_grid[256] = { +static const uint64_t iq2xxs_grid[256] = { 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b2b08, 0x08080808082b2b2b, 0x0808080819080819, @@ -2417,6 +2409,137 @@ static const uint64_t iq2xxs_grid[256] = { 0x2b2b082b08080808, 0x2b2b190808192b08, 0x2b2b2b0819190808, 0x2b2b2b1908081908, }; +static const uint64_t iq2xs_grid[512] = { + 0x0808080808080808, 0x080808080808082b, 0x0808080808081919, 0x0808080808082b08, + 0x0808080808082b2b, 0x0808080808190819, 0x0808080808191908, 0x080808080819192b, + 0x0808080808192b19, 0x08080808082b0808, 0x08080808082b082b, 0x08080808082b1919, + 0x08080808082b2b08, 0x0808080819080819, 0x0808080819081908, 0x080808081908192b, + 0x0808080819082b19, 0x0808080819190808, 0x080808081919082b, 0x0808080819191919, + 0x0808080819192b08, 0x08080808192b0819, 0x08080808192b1908, 0x080808082b080808, + 0x080808082b08082b, 0x080808082b081919, 0x080808082b082b08, 0x080808082b190819, + 0x080808082b191908, 0x080808082b192b19, 0x080808082b2b0808, 0x0808081908080819, + 0x0808081908081908, 0x080808190808192b, 0x0808081908082b19, 0x0808081908190808, + 0x080808190819082b, 0x0808081908191919, 0x0808081908192b08, 0x0808081908192b2b, + 0x08080819082b0819, 0x08080819082b1908, 0x0808081919080808, 0x080808191908082b, + 0x0808081919081919, 0x0808081919082b08, 0x0808081919190819, 0x0808081919191908, + 0x08080819192b0808, 0x08080819192b2b08, 0x080808192b080819, 0x080808192b081908, + 0x080808192b190808, 0x0808082b08080808, 0x0808082b0808082b, 0x0808082b08081919, + 0x0808082b08082b08, 0x0808082b08190819, 0x0808082b08191908, 0x0808082b082b0808, + 0x0808082b19080819, 0x0808082b19081908, 0x0808082b19190808, 0x0808082b19191919, + 0x0808082b2b080808, 0x0808082b2b082b2b, 0x0808190808080819, 0x0808190808081908, + 0x080819080808192b, 0x0808190808082b19, 0x0808190808190808, 0x080819080819082b, + 0x0808190808191919, 0x0808190808192b08, 0x08081908082b0819, 0x08081908082b1908, + 0x0808190819080808, 0x080819081908082b, 0x0808190819081919, 0x0808190819082b08, + 0x0808190819190819, 0x0808190819191908, 0x080819081919192b, 0x08081908192b0808, + 0x080819082b080819, 0x080819082b081908, 0x080819082b190808, 0x0808191908080808, + 0x080819190808082b, 0x0808191908081919, 0x0808191908082b08, 0x0808191908190819, + 0x0808191908191908, 0x08081919082b0808, 0x0808191919080819, 0x0808191919081908, + 0x0808191919190808, 0x08081919192b0819, 0x080819192b080808, 0x0808192b08080819, + 0x0808192b08081908, 0x0808192b08190808, 0x0808192b082b192b, 0x0808192b19080808, + 0x0808192b1908082b, 0x0808192b2b081908, 0x08082b0808080808, 0x08082b080808082b, + 0x08082b0808081919, 0x08082b0808082b08, 0x08082b0808082b2b, 0x08082b0808190819, + 0x08082b0808191908, 0x08082b08082b0808, 0x08082b08082b1919, 0x08082b0819080819, + 0x08082b0819081908, 0x08082b0819190808, 0x08082b0819192b08, 0x08082b082b080808, + 0x08082b082b2b0808, 0x08082b082b2b2b2b, 0x08082b1908080819, 0x08082b1908081908, + 0x08082b1908190808, 0x08082b1919080808, 0x08082b192b080819, 0x08082b192b082b19, + 0x08082b2b08080808, 0x08082b2b082b0808, 0x08082b2b082b2b08, 0x08082b2b2b19192b, + 0x08082b2b2b2b0808, 0x0819080808080819, 0x0819080808081908, 0x081908080808192b, + 0x0819080808082b19, 0x0819080808190808, 0x081908080819082b, 0x0819080808191919, + 0x0819080808192b08, 0x08190808082b0819, 0x08190808082b1908, 0x0819080819080808, + 0x081908081908082b, 0x0819080819081919, 0x0819080819082b08, 0x0819080819190819, + 0x0819080819191908, 0x08190808192b0808, 0x08190808192b2b2b, 0x081908082b080819, + 0x081908082b081908, 0x081908082b190808, 0x0819081908080808, 0x081908190808082b, + 0x0819081908081919, 0x0819081908082b08, 0x0819081908190819, 0x0819081908191908, + 0x08190819082b0808, 0x0819081919080819, 0x0819081919081908, 0x0819081919190808, + 0x081908192b080808, 0x081908192b191908, 0x081908192b19192b, 0x0819082b08080819, + 0x0819082b08081908, 0x0819082b0808192b, 0x0819082b08190808, 0x0819082b19080808, + 0x0819082b192b0808, 0x0819190808080808, 0x081919080808082b, 0x0819190808081919, + 0x0819190808082b08, 0x0819190808190819, 0x0819190808191908, 0x08191908082b0808, + 0x0819190819080819, 0x0819190819081908, 0x0819190819082b19, 0x0819190819190808, + 0x08191908192b1908, 0x081919082b080808, 0x0819191908080819, 0x0819191908081908, + 0x0819191908190808, 0x0819191919080808, 0x0819192b08080808, 0x0819192b08191908, + 0x0819192b19082b19, 0x08192b0808080819, 0x08192b0808081908, 0x08192b0808190808, + 0x08192b080819082b, 0x08192b0819080808, 0x08192b0819191908, 0x08192b082b08192b, + 0x08192b1908080808, 0x08192b1908081919, 0x08192b19192b192b, 0x08192b2b19190819, + 0x08192b2b2b2b2b19, 0x082b080808080808, 0x082b08080808082b, 0x082b080808081919, + 0x082b080808082b08, 0x082b080808082b2b, 0x082b080808190819, 0x082b080808191908, + 0x082b0808082b0808, 0x082b080819080819, 0x082b080819081908, 0x082b080819190808, + 0x082b08082b080808, 0x082b08082b2b0808, 0x082b081908080819, 0x082b081908081908, + 0x082b081908190808, 0x082b081919080808, 0x082b081919082b08, 0x082b0819192b1919, + 0x082b082b08080808, 0x082b082b082b082b, 0x082b082b2b080808, 0x082b082b2b2b2b08, + 0x082b190808080819, 0x082b190808081908, 0x082b190808190808, 0x082b1908082b2b19, + 0x082b190819080808, 0x082b191908080808, 0x082b191919080819, 0x082b19191919082b, + 0x082b19192b192b19, 0x082b192b08080819, 0x082b192b08192b2b, 0x082b192b2b2b192b, + 0x082b2b0808080808, 0x082b2b0808082b08, 0x082b2b0808082b2b, 0x082b2b08082b0808, + 0x082b2b0819191919, 0x082b2b082b082b08, 0x082b2b082b2b082b, 0x082b2b19192b2b08, + 0x082b2b192b190808, 0x082b2b2b08082b08, 0x082b2b2b082b0808, 0x082b2b2b2b08082b, + 0x082b2b2b2b082b08, 0x082b2b2b2b082b2b, 0x1908080808080819, 0x1908080808081908, + 0x190808080808192b, 0x1908080808082b19, 0x1908080808190808, 0x190808080819082b, + 0x1908080808191919, 0x1908080808192b08, 0x19080808082b0819, 0x19080808082b1908, + 0x1908080819080808, 0x190808081908082b, 0x1908080819081919, 0x1908080819082b08, + 0x1908080819082b2b, 0x1908080819190819, 0x1908080819191908, 0x19080808192b0808, + 0x19080808192b1919, 0x190808082b080819, 0x190808082b081908, 0x190808082b190808, + 0x1908081908080808, 0x190808190808082b, 0x1908081908081919, 0x1908081908082b08, + 0x1908081908190819, 0x1908081908191908, 0x19080819082b0808, 0x1908081919080819, + 0x1908081919081908, 0x1908081919190808, 0x190808192b080808, 0x190808192b081919, + 0x190808192b2b082b, 0x1908082b08080819, 0x1908082b08081908, 0x1908082b08190808, + 0x1908082b0819082b, 0x1908082b082b2b19, 0x1908082b19080808, 0x1908190808080808, + 0x190819080808082b, 0x1908190808081919, 0x1908190808082b08, 0x1908190808190819, + 0x1908190808191908, 0x1908190808192b19, 0x19081908082b0808, 0x1908190819080819, + 0x1908190819081908, 0x1908190819190808, 0x190819082b080808, 0x190819082b191908, + 0x1908191908080819, 0x1908191908081908, 0x1908191908190808, 0x19081919082b1908, + 0x1908191919080808, 0x190819192b192b2b, 0x1908192b08080808, 0x1908192b08082b2b, + 0x1908192b19081908, 0x1908192b19190808, 0x19082b0808080819, 0x19082b0808081908, + 0x19082b0808190808, 0x19082b0819080808, 0x19082b0819081919, 0x19082b0819191908, + 0x19082b08192b082b, 0x19082b1908080808, 0x19082b1908190819, 0x19082b1919081908, + 0x19082b1919190808, 0x19082b19192b2b19, 0x19082b2b08081908, 0x1919080808080808, + 0x191908080808082b, 0x1919080808081919, 0x1919080808082b08, 0x1919080808190819, + 0x1919080808191908, 0x19190808082b0808, 0x19190808082b2b08, 0x1919080819080819, + 0x1919080819081908, 0x1919080819190808, 0x191908082b080808, 0x1919081908080819, + 0x1919081908081908, 0x1919081908190808, 0x1919081908191919, 0x1919081919080808, + 0x191908191908082b, 0x1919082b08080808, 0x1919082b19081908, 0x1919082b2b2b2b2b, + 0x1919190808080819, 0x1919190808081908, 0x1919190808190808, 0x19191908082b0819, + 0x1919190819080808, 0x19191908192b0808, 0x191919082b080819, 0x191919082b2b0819, + 0x1919191908080808, 0x1919191908082b08, 0x191919192b080808, 0x191919192b082b08, + 0x1919192b082b0819, 0x1919192b192b2b08, 0x1919192b2b2b0819, 0x19192b0808080808, + 0x19192b0808191908, 0x19192b0819080819, 0x19192b0819190808, 0x19192b082b192b19, + 0x19192b1908192b2b, 0x19192b1919080808, 0x19192b191908082b, 0x19192b2b2b081919, + 0x192b080808080819, 0x192b080808081908, 0x192b080808190808, 0x192b080819080808, + 0x192b080819191908, 0x192b0808192b082b, 0x192b08082b08192b, 0x192b08082b2b2b19, + 0x192b081908080808, 0x192b082b082b1908, 0x192b082b19082b2b, 0x192b082b2b19082b, + 0x192b190808080808, 0x192b19080819192b, 0x192b191908190808, 0x192b191919080808, + 0x192b191919081919, 0x192b19192b2b1908, 0x192b2b0808080819, 0x192b2b08192b2b2b, + 0x192b2b19082b1919, 0x192b2b2b0808192b, 0x192b2b2b19191908, 0x192b2b2b192b082b, + 0x2b08080808080808, 0x2b0808080808082b, 0x2b08080808081919, 0x2b08080808082b08, + 0x2b08080808190819, 0x2b08080808191908, 0x2b080808082b0808, 0x2b080808082b2b2b, + 0x2b08080819080819, 0x2b08080819081908, 0x2b08080819190808, 0x2b0808082b080808, + 0x2b0808082b08082b, 0x2b0808082b2b2b08, 0x2b0808082b2b2b2b, 0x2b08081908080819, + 0x2b08081908081908, 0x2b0808190808192b, 0x2b08081908190808, 0x2b08081919080808, + 0x2b08081919190819, 0x2b08081919192b19, 0x2b08082b08080808, 0x2b08082b082b0808, + 0x2b08082b2b080808, 0x2b08082b2b08082b, 0x2b08082b2b2b0808, 0x2b08082b2b2b2b08, + 0x2b08190808080819, 0x2b08190808081908, 0x2b08190808190808, 0x2b0819080819082b, + 0x2b08190808191919, 0x2b08190819080808, 0x2b081908192b0808, 0x2b0819082b082b19, + 0x2b08191908080808, 0x2b08191919081908, 0x2b0819192b2b1919, 0x2b08192b08192b08, + 0x2b08192b192b2b2b, 0x2b082b0808080808, 0x2b082b0808082b08, 0x2b082b08082b1919, + 0x2b082b0819192b2b, 0x2b082b082b080808, 0x2b082b082b08082b, 0x2b082b082b2b2b08, + 0x2b082b190808192b, 0x2b082b2b082b082b, 0x2b082b2b2b080808, 0x2b082b2b2b082b08, + 0x2b082b2b2b19192b, 0x2b082b2b2b2b2b08, 0x2b19080808080819, 0x2b19080808081908, + 0x2b19080808190808, 0x2b19080819080808, 0x2b1908081919192b, 0x2b1908082b081908, + 0x2b19081908080808, 0x2b190819082b082b, 0x2b190819192b1908, 0x2b19082b1919192b, + 0x2b19082b2b082b19, 0x2b19190808080808, 0x2b19190808081919, 0x2b19190819081908, + 0x2b19190819190808, 0x2b19190819192b08, 0x2b191919082b2b19, 0x2b1919192b190808, + 0x2b1919192b19082b, 0x2b19192b19080819, 0x2b192b0819190819, 0x2b192b082b2b192b, + 0x2b192b1919082b19, 0x2b192b2b08191919, 0x2b192b2b192b0808, 0x2b2b080808080808, + 0x2b2b08080808082b, 0x2b2b080808082b08, 0x2b2b080808082b2b, 0x2b2b0808082b0808, + 0x2b2b0808082b2b2b, 0x2b2b08082b2b0808, 0x2b2b081919190819, 0x2b2b081919192b19, + 0x2b2b08192b2b192b, 0x2b2b082b08080808, 0x2b2b082b0808082b, 0x2b2b082b08082b08, + 0x2b2b082b082b2b2b, 0x2b2b082b2b080808, 0x2b2b082b2b2b0808, 0x2b2b190819080808, + 0x2b2b19082b191919, 0x2b2b192b192b1919, 0x2b2b192b2b192b08, 0x2b2b2b0808082b2b, + 0x2b2b2b08082b0808, 0x2b2b2b08082b082b, 0x2b2b2b08082b2b08, 0x2b2b2b082b2b0808, + 0x2b2b2b082b2b2b08, 0x2b2b2b1908081908, 0x2b2b2b192b081908, 0x2b2b2b192b08192b, + 0x2b2b2b2b082b2b08, 0x2b2b2b2b082b2b2b, 0x2b2b2b2b2b190819, 0x2b2b2b2b2b2b2b2b, +}; + static const uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -2427,8 +2550,17 @@ static const uint8_t ksigns_iq2xs[128] = { 96, 225, 226, 99, 228, 101, 102, 231, 232, 105, 106, 235, 108, 237, 238, 111, 240, 113, 114, 243, 116, 245, 246, 119, 120, 249, 250, 123, 252, 125, 126, 255, }; + static const uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; +void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { + (void)x; + (void)y; + (void)k; + assert(k % QK_K == 0); + //fprintf(stderr, "=========================== %s: not implemented\n", __func__); +} + void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); const int nb = k / QK_K; @@ -2472,6 +2604,58 @@ size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_ return (n/QK_K*sizeof(block_iq2_xxs)); } +// ====================== 2.3125 bpw (de)-quantization + +void quantize_row_iq2_xs_reference(const float * restrict x, block_iq2_xs * restrict y, int k) { + (void)x; + (void)y; + (void)k; + assert(k % QK_K == 0); + //fprintf(stderr, "=========================== %s: not implemented\n", __func__); +} + +void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + float db[2]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + db[0] = d * (0.5f + (x[i].scales[ib32] & 0xf)) * 0.25f; + db[1] = d * (0.5f + (x[i].scales[ib32] >> 4)) * 0.25f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (x[i].qs[4*ib32 + l] & 511)); + const uint8_t signs = ksigns_iq2xs[x[i].qs[4*ib32 + l] >> 9]; + for (int j = 0; j < 8; ++j) { + y[j] = db[l/2] * grid[j] * (signs & kmask_iq2xs[j] ? -1.f : 1.f); + } + y += 8; + } + } + } +} + +void quantize_row_iq2_xs(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq2_xs * restrict y = vy; + quantize_row_iq2_xs_reference(x, y, k); +} + +size_t ggml_quantize_iq2_xs(const float * src, void * dst, int n, int k, int64_t * hist) { + assert(k % QK_K == 0); + (void)hist; // TODO: collect histograms + + for (int j = 0; j < n; j += k) { + block_iq2_xs * restrict y = (block_iq2_xs *)dst + j/QK_K; + quantize_row_iq2_xs_reference(src + j, y, k); + } + return (n/QK_K*sizeof(block_iq2_xs)); +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -7357,3 +7541,161 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res *s = 0.125f * sumf; #endif } + +void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { + assert(n % QK_K == 0); + + const block_iq2_xs * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + int8x16x4_t q2u; + int8x16x4_t q2s; + int8x16x4_t q8b; + + int32x4x4_t scales32; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + const uint8x8_t scales8 = vld1_u8(x[i].scales); + const uint8x8_t scales_l = vand_u8(scales8, vdup_n_u8(0xf)); + const uint8x8_t scales_h = vshr_n_u8(scales8, 4); + uint8x16_t scales = vcombine_u8(vzip1_u8(scales_l, scales_h), vzip2_u8(scales_l, scales_h)); + scales = vaddq_u8(vshlq_n_u8(scales, 1), vdupq_n_u8(1)); + const uint16x8_t scales1 = vmovl_u8(vget_low_u8(scales)); + const uint16x8_t scales2 = vmovl_u8(vget_high_u8(scales)); + scales32.val[0] = vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(scales1))); + scales32.val[1] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales1))); + scales32.val[2] = vreinterpretq_s32_u32(vmovl_u16(vget_low_u16(scales2))); + scales32.val[3] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales2))); + int32x4_t sumi = vdupq_n_s32(0); + for (int ib64 = 0; ib64 < QK_K/64; ++ib64) { + q8b = vld1q_s8_x4(q8); q8 += 64; + q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[0] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[1] & 511)))); + q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[2] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[3] & 511)))); + q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[4] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[5] & 511)))); + q2u.val[3] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[6] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[7] & 511)))); + q2s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[0] >> 9))), vld1_s8((const void *)(signs64 + (q2[1] >> 9)))); + q2s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[2] >> 9))), vld1_s8((const void *)(signs64 + (q2[3] >> 9)))); + q2s.val[2] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[4] >> 9))), vld1_s8((const void *)(signs64 + (q2[5] >> 9)))); + q2s.val[3] = vcombine_s8(vld1_s8((const void *)(signs64 + (q2[6] >> 9))), vld1_s8((const void *)(signs64 + (q2[7] >> 9)))); + q2u.val[0] = vmulq_s8(q2u.val[0], q2s.val[0]); + q2u.val[1] = vmulq_s8(q2u.val[1], q2s.val[1]); + q2u.val[2] = vmulq_s8(q2u.val[2], q2s.val[2]); + q2u.val[3] = vmulq_s8(q2u.val[3], q2s.val[3]); + const int32x4_t p1 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[0], q8b.val[0]); + const int32x4_t p2 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[1], q8b.val[1]); + const int32x4_t p3 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[2], q8b.val[2]); + const int32x4_t p4 = ggml_vdotq_s32(vdupq_n_s32(0), q2u.val[3], q8b.val[3]); + const int32x4_t p = vpaddq_s32(vpaddq_s32(p1, p2), vpaddq_s32(p3, p4)); + sumi = vmlaq_s32(sumi, p, scales32.val[ib64]); + q2 += 8; + } + sumf += d*vaddvq_s32(sumi); + } + *s = 0.125f * sumf; + +#elif defined(__AVX2__) + + const __m128i m4 = _mm_set1_epi8(0xf); + const __m128i m1 = _mm_set1_epi8(1); + const __m128i m511 = _mm_set1_epi16(511); + const __m128i m127 = _mm_set1_epi16(127); + + const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; + + uint64_t aux64; + + // somewhat hacky, but gives a significant boost in performance + __m128i aux_gindex, aux_sindex; + const uint16_t * gindex = (const uint16_t *)&aux_gindex; + const uint16_t * sindex = (const uint16_t *)&aux_sindex; + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const int8_t * restrict q8 = y[i].qs; + + memcpy(&aux64, x[i].scales, 8); + __m128i stmp = _mm_set1_epi64x(aux64); + stmp = _mm_unpacklo_epi8(_mm_and_si128(stmp, m4), _mm_and_si128(_mm_srli_epi16(stmp, 4), m4)); + const __m128i scales = _mm_add_epi8(_mm_slli_epi16(stmp, 1), m1); + + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m128i q2_data = _mm_loadu_si128((const __m128i*)q2); q2 += 8; + aux_gindex = _mm_and_si128(q2_data, m511); + aux_sindex = _mm_and_si128(_mm_srli_epi16(q2_data, 9), m127); + const __m256i q2_1 = _mm256_set_epi64x(iq2xs_grid[gindex[3]], iq2xs_grid[gindex[2]], iq2xs_grid[gindex[1]], iq2xs_grid[gindex[0]]); + const __m256i q2_2 = _mm256_set_epi64x(iq2xs_grid[gindex[7]], iq2xs_grid[gindex[6]], iq2xs_grid[gindex[5]], iq2xs_grid[gindex[4]]); + const __m256i s2_1 = _mm256_set_epi64x(signs64[sindex[3]], signs64[sindex[2]], signs64[sindex[1]], signs64[sindex[0]]); + const __m256i s2_2 = _mm256_set_epi64x(signs64[sindex[7]], signs64[sindex[6]], signs64[sindex[5]], signs64[sindex[4]]); + const __m256i q8s_1 = _mm256_sign_epi8(q8_1, s2_1); + const __m256i q8s_2 = _mm256_sign_epi8(q8_2, s2_2); + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + + const __m256i sc1 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+0))); + const __m256i sc2 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, get_scale_shuffle(ib32+1))); + + sumi1 = _mm256_add_epi32(sumi1, _mm256_madd_epi16(dot1, sc1)); + sumi2 = _mm256_add_epi32(sumi2, _mm256_madd_epi16(dot2, sc2)); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.125f * hsum_float_8(accumf); + +#else + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint16_t * restrict q2 = x[i].qs; + const uint8_t * restrict sc = x[i].scales; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ++ib32) { + const uint16_t ls1 = 2*(sc[ib32] & 0xf) + 1; + const uint16_t ls2 = 2*(sc[ib32] >> 4) + 1; + int32_t sumi = 0; + for (int l = 0; l < 2; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls1; + sumi = 0; + for (int l = 2; l < 4; ++l) { + const uint8_t * grid = (const uint8_t *)(iq2xs_grid + (q2[l] & 511)); + const uint8_t signs = ksigns_iq2xs[q2[l] >> 9]; + for (int j = 0; j < 8; ++j) { + sumi += grid[j] * q8[j] * (signs & kmask_iq2xs[j] ? -1 : 1); + } + q8 += 8; + } + bsum += sumi * ls2; + q2 += 4; + } + sumf += d * bsum; + } + *s = 0.125f * sumf; +#endif +} diff --git a/ggml-quants.h b/ggml-quants.h index 8dd911d41..df5e7ae80 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -174,6 +174,14 @@ typedef struct { } block_iq2_xxs; static_assert(sizeof(block_iq2_xxs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t), "wrong iq2_xxs block size/padding"); +// 2.3125 bpw quants +typedef struct { + ggml_fp16_t d; + uint16_t qs[QK_K/8]; + uint8_t scales[QK_K/32]; +} block_iq2_xs; +static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16_t) + QK_K/32, "wrong iq2_xs block size/padding"); + // Quantization void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k); @@ -189,6 +197,7 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k); +void quantize_row_iq2_xs_reference (const float * restrict x, block_iq2_xs * restrict y, int k); void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); @@ -204,6 +213,7 @@ void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); void quantize_row_iq2_xxs(const float * restrict x, void * restrict y, int k); +void quantize_row_iq2_xs (const float * restrict x, void * restrict y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); @@ -220,6 +230,7 @@ void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int k); void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k); +void dequantize_row_iq2_xs (const block_iq2_xs * restrict x, float * restrict y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); @@ -234,3 +245,4 @@ void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict vx, const void * restrict vy); diff --git a/ggml.c b/ggml.c index 9c42a45e3..d2a8c0478 100644 --- a/ggml.c +++ b/ggml.c @@ -584,6 +584,17 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, + [GGML_TYPE_IQ2_XS] = { + .type_name = "iq2_xs", + .blck_size = QK_K, + .type_size = sizeof(block_iq2_xs), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq2_xs, + .from_float = quantize_row_iq2_xs, + .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xs_reference, + .vec_dot = ggml_vec_dot_iq2_xs_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2123,6 +2134,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_Q5_K: wtype = GGML_TYPE_Q5_K; break; case GGML_FTYPE_MOSTLY_Q6_K: wtype = GGML_TYPE_Q6_K; break; case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; + case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7435,6 +7447,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7700,6 +7713,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -7815,6 +7829,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: default: { GGML_ASSERT(false); @@ -10457,6 +10472,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -10632,6 +10648,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: default: { GGML_ASSERT(false); @@ -10827,6 +10844,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11464,6 +11482,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11539,6 +11558,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_Q5_K: case GGML_TYPE_Q6_K: case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -18660,6 +18680,12 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i block_iq2_xxs * block = (block_iq2_xxs*)dst + start / QK_K; result = ggml_quantize_iq2_xxs(src + start, block, n, n, hist); } break; + case GGML_TYPE_IQ2_XS: + { + GGML_ASSERT(start % QK_K == 0); + block_iq2_xs * block = (block_iq2_xs*)dst + start / QK_K; + result = ggml_quantize_iq2_xs(src + start, block, n, n, hist); + } break; case GGML_TYPE_F16: { int elemsize = sizeof(ggml_fp16_t); @@ -19015,8 +19041,8 @@ struct gguf_context * gguf_init_from_file(const char * fname, struct gguf_init_p (int64_t) info->ne[3]; if (ne % ggml_blck_size(info->type) != 0) { - fprintf(stderr, "%s: tensor '%s' number of elements (%" PRId64 ") is not a multiple of block size (%d)\n", - __func__, info->name.data, ne, ggml_blck_size(info->type)); + fprintf(stderr, "%s: tensor '%s' of type %d (%s) number of elements (%" PRId64 ") is not a multiple of block size (%d)\n", + __func__, info->name.data, (int)info->type, ggml_type_name(info->type), ne, ggml_blck_size(info->type)); fclose(file); gguf_free(ctx); return NULL; diff --git a/ggml.h b/ggml.h index 127dcef1d..93b42a27d 100644 --- a/ggml.h +++ b/ggml.h @@ -342,6 +342,7 @@ extern "C" { GGML_TYPE_Q6_K = 14, GGML_TYPE_Q8_K = 15, GGML_TYPE_IQ2_XXS = 16, + GGML_TYPE_IQ2_XS = 17, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -377,6 +378,7 @@ extern "C" { GGML_FTYPE_MOSTLY_Q5_K = 13, // except 1d tensors GGML_FTYPE_MOSTLY_Q6_K = 14, // except 1d tensors GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors }; // available tensor operations: @@ -2061,6 +2063,7 @@ extern "C" { GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist); + GGML_API size_t ggml_quantize_iq2_xs (const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); diff --git a/llama.cpp b/llama.cpp index aaadfa444..bd219d49c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2223,6 +2223,7 @@ struct llama_model_loader { case GGML_TYPE_Q5_K: ftype = LLAMA_FTYPE_MOSTLY_Q5_K_M; break; case GGML_TYPE_Q6_K: ftype = LLAMA_FTYPE_MOSTLY_Q6_K; break; case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; + case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2595,6 +2596,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XSS - 2.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; default: return "unknown, may not work"; } @@ -9050,6 +9052,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; case LLAMA_FTYPE_MOSTLY_IQ2_XXS:quantized_type = GGML_TYPE_IQ2_XXS; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XS :quantized_type = GGML_TYPE_IQ2_XS; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index c11075bbc..6fde113ff 100644 --- a/llama.h +++ b/llama.h @@ -104,6 +104,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q5_K_M = 17, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q6_K = 18, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ2_XS = 20, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index cee712618..31a78c632 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -134,8 +134,9 @@ int main(int argc, char * argv[]) { continue; } - if ((ggml_type)i == GGML_TYPE_IQ2_XXS) { - printf("Skip %s due to missing quantization functionality\n", ggml_type_name((ggml_type) i)); + const ggml_type ei = (ggml_type)i; + if (ei == GGML_TYPE_IQ2_XXS || ei == GGML_TYPE_IQ2_XS) { + printf("Skip %s due to missing quantization functionality\n", ggml_type_name(ei)); continue; } From 469e75d0a35b08de549a4fd87f082ca7a8a539ba Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Thu, 11 Jan 2024 20:43:15 +0100 Subject: [PATCH 428/859] llama : restore intended k-quants mixes for MoE models (#4872) * Restore intended k-quants quantization mixes for MoE models * Update Q2_K_S values in the quantize tool Still using LLaMA-v1 PPL values in the quant description today does not make much sense. But let's leave this update for another PR. --------- Co-authored-by: Iwan Kawrakow Co-authored-by: Georgi Gerganov --- examples/quantize/quantize.cpp | 1 + llama.cpp | 24 +++++++++++++++--------- llama.h | 1 + 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index d27ea5e91..f878f6911 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -18,6 +18,7 @@ static const std::vector QUANT_OPTIONS = { { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.33G, +0.0683 ppl @ LLaMA-v1-7B", }, { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, + { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, diff --git a/llama.cpp b/llama.cpp index bd219d49c..d39ff94c7 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2586,7 +2586,8 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q8_0: return "Q8_0"; // K-quants - case LLAMA_FTYPE_MOSTLY_Q2_K: return "Q2_K"; + case LLAMA_FTYPE_MOSTLY_Q2_K: return "Q2_K - Medium"; + case LLAMA_FTYPE_MOSTLY_Q2_K_S: return "Q2_K - Small"; case LLAMA_FTYPE_MOSTLY_Q3_K_S: return "Q3_K - Small"; case LLAMA_FTYPE_MOSTLY_Q3_K_M: return "Q3_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q3_K_L: return "Q3_K - Large"; @@ -8955,10 +8956,13 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty // TODO: explore better strategies new_type = GGML_TYPE_Q8_0; } - } else if (name.find("ffn_down.weight") != std::string::npos) { + } else if (name.find("ffn_down") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) { + if (qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { - new_type = qs.i_feed_forward_w2 < 2 ? GGML_TYPE_Q5_K + new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q5_K : arch != LLM_ARCH_FALCON || use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } @@ -8967,14 +8971,14 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { if (arch == LLM_ARCH_FALCON) { - new_type = qs.i_feed_forward_w2 < 2 ? GGML_TYPE_Q6_K : + new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q6_K : use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else { if (use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; } } else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < 4) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) { new_type = GGML_TYPE_Q5_K; } ++qs.i_feed_forward_w2; @@ -8992,9 +8996,10 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) new_type = GGML_TYPE_Q5_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) new_type = GGML_TYPE_Q6_K; } - else if (name.find("ffn_gate.weight") != std::string::npos || name.find("ffn_up.weight") != std::string::npos) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; - } + // IK: let's remove this, else Q2_K is almost the same as Q3_K_S + //else if (name.find("ffn_gate") != std::string::npos || name.find("ffn_up") != std::string::npos) { + // if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; + //} // This can be used to reduce the size of the Q5_K_S model. // The associated PPL increase is fully in line with the size reduction //else { @@ -9043,6 +9048,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; + case LLAMA_FTYPE_MOSTLY_Q2_K_S: quantized_type = GGML_TYPE_Q2_K; break; case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; @@ -9101,7 +9107,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (name.find("attn_v.weight") != std::string::npos || name.find("attn_qkv.weight") != std::string::npos) { ++qs.n_attention_wv; } - else if (name.find("ffn_down.weight") != std::string::npos) { + else if (name.find("ffn_down") != std::string::npos) { ++qs.n_feed_forward_w2; } } diff --git a/llama.h b/llama.h index 6fde113ff..43d41b8f6 100644 --- a/llama.h +++ b/llama.h @@ -105,6 +105,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q6_K = 18, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XXS = 19, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ2_XS = 20, // except 1d tensors + LLAMA_FTYPE_MOSTLY_Q2_K_S = 21, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; From b0377875488b33f7114138687d828da1de61775d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 21:58:28 +0200 Subject: [PATCH 429/859] swift : track ggml release branch (#4867) --- Package.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Package.swift b/Package.swift index 59191da45..37524edee 100644 --- a/Package.swift +++ b/Package.swift @@ -14,7 +14,7 @@ let package = Package( .library(name: "llama", targets: ["llama"]), ], dependencies: [ - .package(url: "https://github.com/ggerganov/ggml.git", .revision("979cc23b345006504cfc1f67c0fdf627805e3319")) + .package(url: "https://github.com/ggerganov/ggml.git", .branch("release")) ], targets: [ .target( From 3ca63b4538dfc78aaec88cd2c3e3f8417c1924e3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 22:43:05 +0200 Subject: [PATCH 430/859] main : disable token count by default (#4874) --- common/common.cpp | 6 +++--- common/common.h | 2 +- examples/main/main.cpp | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index bfcd6d4df..287e8bd5a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -630,7 +630,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.ppl_stride = std::stoi(argv[i]); - } else if (arg == "-stc" || arg == "--show_token_count") { + } else if (arg == "-stc" || arg == "--show-token-count") { if (++i >= argc) { invalid_param = true; break; @@ -950,8 +950,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -stc N --show_token_count N\n"); - printf(" show consumed tokens every N tokens\n"); + printf(" -stc N --show-token-count N\n"); + printf(" show consumed tokens every N tokens (default: %d)\n", params.token_interval); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); diff --git a/common/common.h b/common/common.h index a295e88b0..82d23cf54 100644 --- a/common/common.h +++ b/common/common.h @@ -64,7 +64,7 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. int32_t grp_attn_n = 1; // group-attention factor int32_t grp_attn_w = 512; // group-attention width - int32_t token_interval = 512; // show token count every 512 tokens + int32_t token_interval = -1; // show token count every 512 tokens (-1 = disabled) float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1f35febbd..6953d107c 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -651,8 +651,8 @@ int main(int argc, char ** argv) { LOG("n_past = %d\n", n_past); // Display total tokens alongside total time - if (n_past % params.token_interval == 0) { - printf("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); + if (params.token_interval > 0 && n_past % params.token_interval == 0) { + LOG_TEE("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); } } From 7edefbd79cc6dea96640edc54c6b94b2b2496d8b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 22:46:26 +0200 Subject: [PATCH 431/859] main : better name for variable n_print (#4874) --- common/common.cpp | 8 ++++---- common/common.h | 2 +- examples/main/main.cpp | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 287e8bd5a..b2cb0e257 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -630,12 +630,12 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.ppl_stride = std::stoi(argv[i]); - } else if (arg == "-stc" || arg == "--show-token-count") { + } else if (arg == "-ptc" || arg == "--print-token-count") { if (++i >= argc) { invalid_param = true; break; } - params.token_interval = std::stoi(argv[i]); + params.n_print = std::stoi(argv[i]); } else if (arg == "--ppl-output-type") { if (++i >= argc) { invalid_param = true; @@ -950,8 +950,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -stc N --show-token-count N\n"); - printf(" show consumed tokens every N tokens (default: %d)\n", params.token_interval); + printf(" -stc N --print-token-count N\n"); + printf(" print token count every N tokens (default: %d)\n", params.n_print); printf("\n"); #ifndef LOG_DISABLE_LOGS log_print_usage(); diff --git a/common/common.h b/common/common.h index 82d23cf54..1359e76ab 100644 --- a/common/common.h +++ b/common/common.h @@ -64,7 +64,7 @@ struct gpt_params { int32_t n_beams = 0; // if non-zero then use beam search of given width. int32_t grp_attn_n = 1; // group-attention factor int32_t grp_attn_w = 512; // group-attention width - int32_t token_interval = -1; // show token count every 512 tokens (-1 = disabled) + int32_t n_print = -1; // print token count every n tokens (-1 = disabled) float rope_freq_base = 0.0f; // RoPE base frequency float rope_freq_scale = 0.0f; // RoPE frequency scaling factor float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 6953d107c..c53b29978 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -651,7 +651,7 @@ int main(int argc, char ** argv) { LOG("n_past = %d\n", n_past); // Display total tokens alongside total time - if (params.token_interval > 0 && n_past % params.token_interval == 0) { + if (params.n_print > 0 && n_past % params.n_print == 0) { LOG_TEE("\n\033[31mTokens consumed so far = %d / %d \033[0m\n", n_past, n_ctx); } } From 1d118386fea031f01550f8cd47a5c86296e5333f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 11 Jan 2024 23:23:49 +0200 Subject: [PATCH 432/859] server : fix infill when prompt is empty (#4833) --- examples/server/server.cpp | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 031824e14..1d30a15a6 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1406,7 +1406,7 @@ struct llama_server_context task.multitask_id = multitask_id; // when a completion task's prompt array is not a singleton, we split it into multiple requests - if (task.data.at("prompt").size() > 1) + if (task.data.count("prompt") && task.data.at("prompt").size() > 1) { lock.unlock(); // entering new func scope return split_multiprompt_task(task); @@ -1577,9 +1577,9 @@ struct llama_server_context slot->reset(); - slot->infill = task.infill_mode; - slot->embedding = task.embedding_mode; - slot->task_id = task.id; + slot->infill = task.infill_mode; + slot->embedding = task.embedding_mode; + slot->task_id = task.id; slot->multitask_id = task.multitask_id; if (!launch_slot_with_data(slot, task.data)) @@ -1731,7 +1731,8 @@ struct llama_server_context const bool has_prompt = slot.prompt.is_array() || (slot.prompt.is_string() && !slot.prompt.get().empty()) || !slot.images.empty(); // empty prompt passed -> release the slot and send empty response - if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt) + // note: infill mode allows empty prompt + if (slot.state == IDLE && slot.command == LOAD_PROMPT && !has_prompt && !slot.infill) { slot.release(); slot.print_timings(); @@ -2609,8 +2610,8 @@ static json format_final_response_oaicompat(const json &request, const task_resu {"object", streaming ? "chat.completion.chunk" : "chat.completion"}, {"usage", json{{"completion_tokens", num_tokens_predicted}, - {"prompt_tokens", num_prompt_tokens}, - {"total_tokens", num_tokens_predicted + num_prompt_tokens}}}, + {"prompt_tokens", num_prompt_tokens}, + {"total_tokens", num_tokens_predicted + num_prompt_tokens}}}, {"id", gen_chatcmplid()}}; if (server_verbose) { From 326b418b59b6d48d854c4461a2303e8ac0a311e6 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Fri, 12 Jan 2024 06:59:57 +0100 Subject: [PATCH 433/859] Importance Matrix calculation (#4861) * imatrix: 1st version * imatrix: WIP * Cleanup * Update examples/imatrix/imatrix.cpp Co-authored-by: Georgi Gerganov --------- Co-authored-by: Iwan Kawrakow Co-authored-by: Georgi Gerganov --- Makefile | 5 +- examples/CMakeLists.txt | 1 + examples/imatrix/CMakeLists.txt | 5 + examples/imatrix/imatrix.cpp | 380 ++++++++++++++++++++++++++++++++ ggml.c | 14 ++ ggml.h | 6 + 6 files changed, 410 insertions(+), 1 deletion(-) create mode 100644 examples/imatrix/CMakeLists.txt create mode 100644 examples/imatrix/imatrix.cpp diff --git a/Makefile b/Makefile index 4c7e175bf..05fe9a0f6 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ # Define the default target now so that it is always the first target BUILD_TARGETS = \ - main quantize quantize-stats perplexity embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ + main quantize quantize-stats perplexity imatrix embedding vdot q8dot train-text-from-scratch convert-llama2c-to-ggml \ simple batched batched-bench save-load-state server gguf llama-bench libllava.a llava-cli baby-llama beam-search \ speculative infill tokenize benchmark-matmult parallel finetune export-lora lookahead lookup passkey tests/test-c.o @@ -614,6 +614,9 @@ quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml. perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) +imatrix: examples/imatrix/imatrix.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 0c71cbdf7..fa127a3aa 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -36,6 +36,7 @@ else() add_subdirectory(lookahead) add_subdirectory(lookup) add_subdirectory(train-text-from-scratch) + add_subdirectory(imatrix) if (LLAMA_METAL) add_subdirectory(metal) endif() diff --git a/examples/imatrix/CMakeLists.txt b/examples/imatrix/CMakeLists.txt new file mode 100644 index 000000000..d688a1620 --- /dev/null +++ b/examples/imatrix/CMakeLists.txt @@ -0,0 +1,5 @@ +set(TARGET imatrix) +add_executable(${TARGET} imatrix.cpp) +install(TARGETS ${TARGET} RUNTIME) +target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_compile_features(${TARGET} PRIVATE cxx_std_11) diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp new file mode 100644 index 000000000..1461bc963 --- /dev/null +++ b/examples/imatrix/imatrix.cpp @@ -0,0 +1,380 @@ +#include "common.h" +#include "llama.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#pragma warning(disable: 4244 4267) // possible loss of data +#endif + +struct Stats { + std::vector values; + int ncall = 0; +}; + +struct StatParams { + std::string ofile = "imatrix.dat"; + int n_output_frequency = 10; + int verbosity = 1; + bool collect_output_weight = false; +}; + +class IMatrixCollector { +public: + IMatrixCollector() = default; + void set_parameters(StatParams&& params) { m_params = std::move(params); } + void collect_imatrix(const struct ggml_tensor * src0, const struct ggml_tensor * src1); + void save_imatrix() const; +private: + std::unordered_map m_stats; + StatParams m_params; + std::mutex m_mutex; + int m_last_call = 0; +}; + +void IMatrixCollector::collect_imatrix(const struct ggml_tensor * src0, const struct ggml_tensor * src1) { + if (src1->ne[1] < 16 || src1->type != GGML_TYPE_F32) return; + if (!(strncmp(src0->name, "blk.", 4) == 0 || (m_params.collect_output_weight && strcmp(src0->name, "output.weight") == 0))) return; + std::lock_guard lock(m_mutex); + auto& e = m_stats[src0->name]; + if (e.values.empty()) { + e.values.resize(src1->ne[0], 0); + } + else if (e.values.size() != (size_t)src1->ne[0]) { + fprintf(stderr, "Oops: inconsistent size for %s (%d vs %d)\n", src0->name, (int)e.values.size(), (int)src1->ne[0]); + exit(1); //GGML_ASSERT(false); + } + ++e.ncall; + if (m_params.verbosity > 1) { + printf("%s[%d]: %s, %d x %d, %d\n",__func__,m_last_call,src0->name,(int)src1->ne[0],(int)src1->ne[1],(int)src1->type); + } + for (int row = 0; row < (int)src1->ne[1]; ++row) { + const float * x = (const float *)src1->data + row * src1->ne[0]; + for (int j = 0; j < (int)src1->ne[0]; ++j) { + e.values[j] += x[j]*x[j]; + } + } + if (e.ncall > m_last_call) { + m_last_call = e.ncall; + if (m_last_call % m_params.n_output_frequency == 0) { + save_imatrix(); + } + } +} + +void IMatrixCollector::save_imatrix() const { + const char * fname = m_params.ofile.empty() ? "imatrix.dat" : m_params.ofile.c_str(); + std::ofstream out(fname, std::ios::binary); + int n_entries = m_stats.size(); + out.write((const char*)&n_entries, sizeof(n_entries)); + for (auto& p : m_stats) { + int len = p.first.size(); + out.write((const char*)&len, sizeof(len)); + out.write(p.first.c_str(), len); + out.write((const char*)&p.second.ncall, sizeof(p.second.ncall)); + int nval = p.second.values.size(); + out.write((const char*)&nval, sizeof(nval)); + if (nval > 0) out.write((const char*)p.second.values.data(), nval*sizeof(float)); + } + if (m_params.verbosity > 0) { + fprintf(stderr, "\n%s: stored collected data after %d chunks in %s\n",__func__,m_last_call,fname); + } +} + +static IMatrixCollector g_collector; + +static void ik_collect_imatrix(const struct ggml_tensor * src0, const struct ggml_tensor * src1) { + g_collector.collect_imatrix(src0, src1); +} + + +struct results_log_softmax { + double log_softmax; + float logit; + float prob; +}; + +static std::vector softmax(const std::vector& logits) { + std::vector probs(logits.size()); + float max_logit = logits[0]; + for (float v : logits) { + max_logit = std::max(max_logit, v); + } + double sum_exp = 0.0; + for (size_t i = 0; i < logits.size(); i++) { + // Subtract the maximum logit value from the current logit value for numerical stability + const float logit = logits[i] - max_logit; + const float exp_logit = expf(logit); + sum_exp += exp_logit; + probs[i] = exp_logit; + } + for (size_t i = 0; i < probs.size(); i++) { + probs[i] /= sum_exp; + } + return probs; +} + +static results_log_softmax log_softmax(int n_vocab, const float * logits, int tok) { + float max_logit = logits[0]; + for (int i = 1; i < n_vocab; ++i) { + max_logit = std::max(max_logit, logits[i]); + } + double sum_exp = 0.0; + for (int i = 0; i < n_vocab; ++i) { + sum_exp += expf(logits[i] - max_logit); + } + return {logits[tok] - max_logit - log(sum_exp), logits[tok], expf(logits[tok] - max_logit) / (float) sum_exp}; +} + +static void process_logits( + int n_vocab, const float * logits, const int * tokens, int n_token, std::vector & workers, + double & nll, double & nll2, float * logit_history, float * prob_history +) { + std::mutex mutex; + int counter = 0; + auto compute = [&mutex, &counter, &nll, &nll2, logit_history, prob_history, n_vocab, logits, tokens, n_token] () { + double local_nll = 0; + double local_nll2 = 0; + while (true) { + std::unique_lock lock(mutex); + int i = counter++; + if (i >= n_token) { + nll += local_nll; nll2 += local_nll2; + break; + } + lock.unlock(); + const results_log_softmax results = log_softmax(n_vocab, logits + i*n_vocab, tokens[i+1]); + const double v = -results.log_softmax; + local_nll += v; + local_nll2 += v*v; + + logit_history[i] = results.logit; + prob_history[i] = results.prob; + } + }; + for (auto & w : workers) { + w = std::thread(compute); + } + compute(); + for (auto & w : workers) { + w.join(); + } +} + +static bool compute_imatrix(llama_context * ctx, const gpt_params & params) { + + const bool add_bos = llama_should_add_bos_token(llama_get_model(ctx)); + const int n_ctx = llama_n_ctx(ctx); + + auto tim1 = std::chrono::high_resolution_clock::now(); + fprintf(stderr, "%s: tokenizing the input ..\n", __func__); + + std::vector tokens = ::llama_tokenize(ctx, params.prompt, add_bos); + + auto tim2 = std::chrono::high_resolution_clock::now(); + fprintf(stderr, "%s: tokenization took %g ms\n",__func__,1e-3*std::chrono::duration_cast(tim2-tim1).count()); + + if (int(tokens.size()) < 2*n_ctx) { + fprintf(stderr, "%s: you need at least %d tokens for a context of %d tokens\n",__func__,2*n_ctx, + n_ctx); + fprintf(stderr, "%s: the data file you provided tokenizes to only %zu tokens\n",__func__,tokens.size()); + return false; + } + + std::vector logit_history; + logit_history.resize(tokens.size()); + + std::vector prob_history; + prob_history.resize(tokens.size()); + + const int n_chunk_max = tokens.size() / n_ctx; + + const int n_chunk = params.n_chunks < 0 ? n_chunk_max : std::min(params.n_chunks, n_chunk_max); + const int n_vocab = llama_n_vocab(llama_get_model(ctx)); + const int n_batch = params.n_batch; + + int count = 0; + double nll = 0.0; + double nll2 = 0.0; + + fprintf(stderr, "%s: computing over %d chunks with batch_size %d\n", __func__, n_chunk, n_batch); + + std::vector workers(std::thread::hardware_concurrency() - 1); + + for (int i = 0; i < n_chunk; ++i) { + const int start = i * n_ctx; + const int end = start + n_ctx; + + const int num_batches = (n_ctx + n_batch - 1) / n_batch; + + std::vector logits; + + const auto t_start = std::chrono::high_resolution_clock::now(); + + // clear the KV cache + llama_kv_cache_clear(ctx); + + for (int j = 0; j < num_batches; ++j) { + const int batch_start = start + j * n_batch; + const int batch_size = std::min(end - batch_start, n_batch); + + // save original token and restore it after eval + const auto token_org = tokens[batch_start]; + + // add BOS token for the first batch of each chunk + if (add_bos && j == 0) { + tokens[batch_start] = llama_token_bos(llama_get_model(ctx)); + } + + if (llama_decode(ctx, llama_batch_get_one(tokens.data() + batch_start, batch_size, j * n_batch, 0))) { + fprintf(stderr, "%s : failed to eval\n", __func__); + return false; + } + + // restore the original token in case it was set to BOS + tokens[batch_start] = token_org; + + const auto * batch_logits = llama_get_logits(ctx); + logits.insert(logits.end(), batch_logits, batch_logits + batch_size * n_vocab); + } + + const auto t_end = std::chrono::high_resolution_clock::now(); + + if (i == 0) { + const float t_total = std::chrono::duration(t_end - t_start).count(); + fprintf(stderr, "%s: %.2f seconds per pass - ETA ", __func__, t_total); + int total_seconds = (int)(t_total * n_chunk); + if (total_seconds >= 60*60) { + fprintf(stderr, "%d hours ", total_seconds / (60*60)); + total_seconds = total_seconds % (60*60); + } + fprintf(stderr, "%.2f minutes\n", total_seconds / 60.0); + } + + const int first = n_ctx/2; + process_logits(n_vocab, logits.data() + first*n_vocab, tokens.data() + start + first, n_ctx - 1 - first, + workers, nll, nll2, logit_history.data() + start + first, prob_history.data() + start + first); + count += n_ctx - first - 1; + + printf("[%d]%.4lf,", i + 1, std::exp(nll / count)); + fflush(stdout); + } + printf("\n"); + + nll2 /= count; + nll /= count; + const double ppl = exp(nll); + nll2 -= nll * nll; + if (nll2 > 0) { + nll2 = sqrt(nll2/(count-1)); + printf("Final estimate: PPL = %.4lf +/- %.5lf\n", ppl, nll2*ppl); + } else { + printf("Unexpected negative standard deviation of log(prob)\n"); + } + + return true; +} + +int main(int argc, char ** argv) { + + StatParams sparams; + std::vector args; + args.push_back(argv[0]); + int iarg = 1; + for (; iarg < argc-1; ++iarg) { + std::string arg{argv[iarg]}; + if (arg == "-o" || arg == "--output-file") { + sparams.ofile = argv[++iarg]; + } + else if (arg == "-ofreq" || arg == "--output-frequency") { + sparams.n_output_frequency = std::stoi(argv[++iarg]); + } + else if (arg == "-ow" || arg == "--output-weight") { + sparams.collect_output_weight = std::stoi(argv[++iarg]); + } + else if (arg == "--verbosity") { + sparams.verbosity = std::stoi(argv[++iarg]); + } else { + args.push_back(argv[iarg]); + } + } + if (iarg < argc) { + args.push_back(argv[iarg]); + } + + gpt_params params; + params.n_batch = 512; + if (!gpt_params_parse(args.size(), args.data(), params)) { + return 1; + } + + g_collector.set_parameters(std::move(sparams)); + + ggml_set_imatrix_collection(ik_collect_imatrix); + + params.logits_all = true; + params.n_batch = std::min(params.n_batch, params.n_ctx); + + print_build_info(); + + if (params.seed == LLAMA_DEFAULT_SEED) { + params.seed = time(NULL); + } + + fprintf(stderr, "%s: seed = %u\n", __func__, params.seed); + + std::mt19937 rng(params.seed); + if (params.random_prompt) { + params.prompt = gpt_random_prompt(rng); + } + + llama_backend_init(params.numa); + + llama_model * model; + llama_context * ctx; + + // load the model and apply lora adapter, if any + std::tie(model, ctx) = llama_init_from_gpt_params(params); + if (model == NULL) { + fprintf(stderr, "%s: error: unable to load model\n", __func__); + return 1; + } + + const int n_ctx_train = llama_n_ctx_train(model); + if (params.n_ctx > n_ctx_train) { + fprintf(stderr, "%s: warning: model was trained on only %d context tokens (%d specified)\n", + __func__, n_ctx_train, params.n_ctx); + } + + // print system information + { + fprintf(stderr, "\n"); + fprintf(stderr, "%s\n", get_system_info(params).c_str()); + } + + bool OK = compute_imatrix(ctx, params); + if (!OK) { + return 1; + } + + g_collector.save_imatrix(); + + llama_print_timings(ctx); + + llama_free(ctx); + llama_free_model(model); + + llama_backend_free(); + + return 0; +} diff --git a/ggml.c b/ggml.c index d2a8c0478..f5caeba08 100644 --- a/ggml.c +++ b/ggml.c @@ -394,6 +394,12 @@ static const size_t CACHE_LINE_SIZE_F32 = CACHE_LINE_SIZE/sizeof(float); static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y); static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y); +ggml_collect_imatrix_t g_imatrix_collect = NULL; + +void ggml_set_imatrix_collection(ggml_collect_imatrix_t imatrix_collect) { + g_imatrix_collect = imatrix_collect; +} + static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { [GGML_TYPE_I8] = { .type_name = "i8", @@ -9763,6 +9769,10 @@ static void ggml_compute_forward_mul_mat( const int ith = params->ith; const int nth = params->nth; + if (ith == 1 && g_imatrix_collect) { + g_imatrix_collect(src0, src1); + } + const enum ggml_type type = src0->type; const bool src1_cont = ggml_is_contiguous(src1); @@ -10066,6 +10076,10 @@ static void ggml_compute_forward_mul_mat_id( const struct ggml_tensor * src0_cur = dst->src[cur_a + 2]; + if (ith == 1 && g_imatrix_collect) { + g_imatrix_collect(src0_cur, src1); + } + const void * wdata = (src1->type == vec_dot_type) ? src1->data : params->wdata; const size_t row_size = ggml_row_size(vec_dot_type, ne10); diff --git a/ggml.h b/ggml.h index 93b42a27d..4c2ff6c66 100644 --- a/ggml.h +++ b/ggml.h @@ -2067,6 +2067,12 @@ extern "C" { GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); + // + // Importance matrix + // + typedef void(*ggml_collect_imatrix_t)(const struct ggml_tensor * src0, const struct ggml_tensor * src1); + GGML_API void ggml_set_imatrix_collection(ggml_collect_imatrix_t imatrix_collect); + // // gguf // From f445c0e68cf8e1faca0b2aa8dfb9d48231cec301 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 13:01:56 +0200 Subject: [PATCH 434/859] llama : fix llm_build_k_shift to use correct n_rot (#4889) * llama : fix llm_build_k_shift to use correct n_rot ggml-ci * llama : always use hparams.n_rot for ggml_rope_custom ggml-ci * convert : fix persimmon conversion to write correct n_rot --- common/common.cpp | 3 ++ convert-hf-to-gguf.py | 9 ++++- gguf-py/gguf/tensor_mapping.py | 7 ++++ llama.cpp | 65 +++++++++++++++++----------------- 4 files changed, 51 insertions(+), 33 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index b2cb0e257..3aefed01d 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1055,6 +1055,9 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & } static ggml_type kv_cache_type_from_str(const std::string & s) { + if (s == "f32") { + return GGML_TYPE_F32; + } if (s == "f16") { return GGML_TYPE_F16; } diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 203eaf64b..813aeeed6 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -817,10 +817,17 @@ class PersimmonModel(Model): hidden_size = self.hparams["hidden_size"] self.gguf_writer.add_name('persimmon-8b-chat') + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) self.gguf_writer.add_embedding_length(hidden_size) self.gguf_writer.add_block_count(block_count) self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) - self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + + # NOTE: not sure about this change - why does the model not have a rope dimension count when it is smaller + # than the head size? + # ref: https://github.com/ggerganov/llama.cpp/pull/4889 + #self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + self.gguf_writer.add_rope_dimension_count(hidden_size // head_count // 2) + self.gguf_writer.add_head_count(head_count) self.gguf_writer.add_head_count_kv(head_count_kv) self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 80c1d5449..24a089037 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -57,6 +57,7 @@ class TensorNameMap: "transformer.norm_f", # mpt "ln_f", # refact bloom qwen gpt2 "language_model.encoder.final_layernorm", # persimmon + "model.final_layernorm", # persimmon "lm_head.ln", # phi2 ), @@ -98,6 +99,7 @@ class TensorNameMap: "transformer.h.{bid}.self_attention.query_key_value", # falcon "h.{bid}.self_attention.query_key_value", # bloom "language_model.encoder.layers.{bid}.self_attention.query_key_value", # persimmon + "model.layers.{bid}.self_attn.query_key_value", # persimmon "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 ), @@ -141,6 +143,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.output.dense", # bert "transformer.h.{bid}.attn.out_proj", # gpt-j "language_model.encoder.layers.{bid}.self_attention.dense", # persimmon + "model.layers.{bid}.self_attn.dense", # persimmon "h.{bid}.attn.c_proj", # gpt2 "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo @@ -184,6 +187,7 @@ class TensorNameMap: "encoder.layer.{bid}.intermediate.dense", # bert "transformer.h.{bid}.mlp.fc_in", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_h_to_4h", # persimmon + "model.layers.{bid}.mlp.dense_h_to_4h", # persimmon "transformer.h.{bid}.mlp.w1", # qwen "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 @@ -225,6 +229,7 @@ class TensorNameMap: "encoder.layer.{bid}.output.dense", # bert "transformer.h.{bid}.mlp.fc_out", # gpt-j "language_model.encoder.layers.{bid}.mlp.dense_4h_to_h", # persimmon + "model.layers.{bid}.mlp.dense_4h_to_h", # persimmon "h.{bid}.mlp.c_proj", # gpt2 "transformer.h.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo @@ -237,10 +242,12 @@ class TensorNameMap: MODEL_TENSOR.ATTN_Q_NORM: ( "language_model.encoder.layers.{bid}.self_attention.q_layernorm", + "model.layers.{bid}.self_attn.q_layernorm", # persimmon ), MODEL_TENSOR.ATTN_K_NORM: ( "language_model.encoder.layers.{bid}.self_attention.k_layernorm", + "model.layers.{bid}.self_attn.k_layernorm", # persimmon ), MODEL_TENSOR.ROPE_FREQS: ( diff --git a/llama.cpp b/llama.cpp index d39ff94c7..0bab95563 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4104,7 +4104,6 @@ static void llm_build_k_shift( struct ggml_cgraph * graph, llm_rope_type type, int64_t n_ctx, - int n_rot, float freq_base, float freq_scale, const llm_build_cb & cb) { @@ -4112,14 +4111,13 @@ static void llm_build_k_shift( const int64_t n_head_kv = hparams.n_head_kv; const int64_t n_embd_head_k = hparams.n_embd_head_k; const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int32_t n_rot = hparams.n_rot; const int32_t n_orig_ctx = cparams.n_yarn_orig_ctx; const float ext_factor = cparams.yarn_ext_factor; const float attn_factor = cparams.yarn_attn_factor; const float beta_fast = cparams.yarn_beta_fast; const float beta_slow = cparams.yarn_beta_slow; - GGML_ASSERT(n_embd_head_k % n_rot == 0); - struct ggml_tensor * K_shift = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, n_ctx); cb(K_shift, "K_shift", -1); @@ -4523,7 +4521,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4561,14 +4559,14 @@ struct llm_build_context { Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -4691,6 +4689,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4708,7 +4707,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4734,12 +4733,12 @@ struct llm_build_context { case MODEL_7B: Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); Kcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - n_embd_head, 0, 0, n_orig_ctx, freq_base, freq_scale, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); break; @@ -4812,6 +4811,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -4829,7 +4829,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -4870,13 +4870,13 @@ struct llm_build_context { // using mode = 2 for neox mode Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -5033,9 +5033,8 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; - GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - - const int64_t n_rot = n_embd_head_k / 2; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head/2 == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5052,7 +5051,7 @@ struct llm_build_context { cb(KQ_mask, "KQ_mask", -1); if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5112,7 +5111,7 @@ struct llm_build_context { // RoPE the first n_rot of q/k, pass the other half, and concat. struct ggml_tensor * qrot = ggml_view_3d( - ctx0, tmpq, n_rot, n_head, n_tokens, + ctx0, tmpq, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpq) * n_embd_head, ggml_element_size(tmpq) * n_embd_head * n_head, 0 @@ -5120,7 +5119,7 @@ struct llm_build_context { cb(qrot, "qrot", il); struct ggml_tensor * krot = ggml_view_3d( - ctx0, tmpk, n_rot, n_head, n_tokens, + ctx0, tmpk, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, 0 @@ -5129,29 +5128,29 @@ struct llm_build_context { // get the second half of tmpq, e.g tmpq[n_rot:, :, :] struct ggml_tensor * qpass = ggml_view_3d( - ctx0, tmpq, n_rot, n_head, n_tokens, + ctx0, tmpq, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpq) * n_embd_head, ggml_element_size(tmpq) * n_embd_head * n_head, - ggml_element_size(tmpq) * n_rot + ggml_element_size(tmpq) * hparams.n_rot ); cb(qpass, "qpass", il); struct ggml_tensor * kpass = ggml_view_3d( - ctx0, tmpk, n_rot, n_head, n_tokens, + ctx0, tmpk, hparams.n_rot, n_head, n_tokens, ggml_element_size(tmpk) * n_embd_head, ggml_element_size(tmpk) * n_embd_head * n_head, - ggml_element_size(tmpk) * n_rot + ggml_element_size(tmpk) * hparams.n_rot ); cb(kpass, "kpass", il); struct ggml_tensor * qrotated = ggml_rope_custom( - ctx0, qrot, inp_pos, n_rot, 2, 0, n_orig_ctx, + ctx0, qrot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(qrotated, "qrotated", il); struct ggml_tensor * krotated = ggml_rope_custom( - ctx0, krot, inp_pos, n_rot, 2, 0, n_orig_ctx, + ctx0, krot, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(krotated, "krotated", il); @@ -5531,6 +5530,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5548,7 +5548,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, hparams.n_rot, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5661,7 +5661,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5693,13 +5693,13 @@ struct llm_build_context { // using mode = 2 for neox mode Qcur = ggml_rope_custom( - ctx0, Qcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Qcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, Kcur, inp_pos, n_embd_head, 2, 0, n_orig_ctx, + ctx0, Kcur, inp_pos, hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); cb(Kcur, "Kcur", il); @@ -5778,7 +5778,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE_NEOX, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5874,6 +5874,7 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; @@ -5891,7 +5892,7 @@ struct llm_build_context { // shift the entire K-cache if needed if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, n_embd_head, freq_base, freq_scale, cb); + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); } for (int il = 0; il < n_layer; ++il) { @@ -5917,13 +5918,13 @@ struct llm_build_context { cb(Vcur, "Vcur", il); Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + ctx0, ggml_reshape_3d(ctx0, Qcur, hparams.n_rot, n_head, n_tokens), inp_pos, n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Qcur, "Qcur", il); Kcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + ctx0, ggml_reshape_3d(ctx0, Kcur, hparams.n_rot, n_head_kv, n_tokens), inp_pos, n_embd_head, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Kcur, "Kcur", il); From 2d00741e12c5db4a33dfccd1125f5de4adec9a5b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 13:03:38 +0200 Subject: [PATCH 435/859] py : fix lint (#4889) --- convert-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 813aeeed6..a1c79fd47 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -825,7 +825,7 @@ class PersimmonModel(Model): # NOTE: not sure about this change - why does the model not have a rope dimension count when it is smaller # than the head size? # ref: https://github.com/ggerganov/llama.cpp/pull/4889 - #self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) + # self.gguf_writer.add_rope_dimension_count(hidden_size // head_count) self.gguf_writer.add_rope_dimension_count(hidden_size // head_count // 2) self.gguf_writer.add_head_count(head_count) From 4315a94366708828f949f9db89d2a8d99b634459 Mon Sep 17 00:00:00 2001 From: howlger Date: Fri, 12 Jan 2024 12:05:32 +0100 Subject: [PATCH 436/859] common : streamline the formatting of help (#4890) * common : streamline the formatting of help - Separate alternative parameters by a comma - Do not indent `--version` differently * Update common/common.cpp --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 3aefed01d..062a8b4de 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -818,7 +818,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf("\n"); printf("options:\n"); printf(" -h, --help show this help message and exit\n"); - printf(" --version show version and build info\n"); + printf(" --version show version and build info\n"); printf(" -i, --interactive run in interactive mode\n"); printf(" --interactive-first run in interactive mode and wait for input right away\n"); printf(" -ins, --instruct run in instruction mode (use with Alpaca models)\n"); @@ -915,7 +915,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" number of layers to store in VRAM\n"); printf(" -ngld N, --n-gpu-layers-draft N\n"); printf(" number of layers to store in VRAM for the draft model\n"); - printf(" -ts SPLIT --tensor-split SPLIT\n"); + printf(" -ts SPLIT, --tensor-split SPLIT\n"); printf(" how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); printf(" -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); #ifdef GGML_USE_CUBLAS @@ -950,7 +950,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -stc N --print-token-count N\n"); + printf(" -ptc N, --print-token-count N\n"); printf(" print token count every N tokens (default: %d)\n", params.n_print); printf("\n"); #ifndef LOG_DISABLE_LOGS From 3cabe80630c7eeb57713cd02249053a8cf6894fa Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 13:10:19 +0200 Subject: [PATCH 437/859] llama : fix typo "imp_embd" -> "inp_embd" --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 0bab95563..29f8873f6 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5040,7 +5040,7 @@ struct llm_build_context { struct ggml_tensor * inpL; inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, cb); - cb(inpL, "imp_embd", -1); + cb(inpL, "inp_embd", -1); // inp_pos - contains the positions struct ggml_tensor * inp_pos = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_tokens); From 1b280c9fffd682b6924010a4437f0275f2921fa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Fri, 12 Jan 2024 12:30:41 +0100 Subject: [PATCH 438/859] CUDA: fix softmax compile for old CUDA versions (#4862) --- ggml-cuda.cu | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index dd19699f6..a345b0c4a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -116,6 +116,8 @@ #include "ggml.h" #include "ggml-backend-impl.h" +#define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) + #define CC_PASCAL 600 #define MIN_CC_DP4A 610 // minimum compute capability for __dp4a, an intrinsic for byte-wise dot products #define CC_VOLTA 700 @@ -605,16 +607,16 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { } static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { -#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) - (void) a; - bad_arch(); -#else +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); } return a; -#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +#else + (void) a; + bad_arch(); +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL } static __device__ __forceinline__ float warp_reduce_max(float x) { @@ -626,16 +628,16 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { } static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) - (void) x; - bad_arch(); -#else +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX #pragma unroll for (int mask = 16; mask > 0; mask >>= 1) { x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); } return x; -#endif // __CUDA_ARCH__ < CC_PASCAL || (defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +#else + (void) x; + bad_arch(); +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX } static __device__ __forceinline__ float op_repeat(const float a, const float b) { @@ -5613,7 +5615,7 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int template static __global__ void soft_max_f16(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX const int ncols_data = ncols_template == 0 ? ncols_par : ncols_template; const int ncols_smem = GGML_PAD(ncols_data, 2*WARP_SIZE)/2; @@ -5738,7 +5740,7 @@ static __global__ void soft_max_f16(const float * x, const float * y, float * ds #else (void) x; (void) y; (void) dst; (void) ncols_par; (void) nrows_y; (void) scale; bad_arch(); -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX } template @@ -8574,15 +8576,15 @@ static void ggml_cuda_op_soft_max( float scale = 1.0f; memcpy(&scale, dst->op_params, sizeof(float)); -#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - const bool use_f16_soft_max = false; -#else +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION >= CUDART_HMAX #ifdef GGML_CUDA_F16 const bool use_f16_soft_max = true; #else const bool use_f16_soft_max = false; #endif // GGML_CUDA_F16 -#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) +#else + const bool use_f16_soft_max = false; +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && CUDART_VERSION >= CUDART_HMAX if (use_f16_soft_max) { soft_max_f16_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); From 5537d9d36bfdb4379555431f574d3d78ce6e7955 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 14:33:21 +0200 Subject: [PATCH 439/859] gitignore : imatrix --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index cf1b692e9..fba207045 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ models-mnt /embedding /gguf /gguf-llama-simple +/imatrix /infill /libllama.so /llama-bench From e790eef21ce659f5c16d59f8a5c8dcf6cde0692a Mon Sep 17 00:00:00 2001 From: Zay <95888118+isaiahbjork@users.noreply.github.com> Date: Fri, 12 Jan 2024 05:48:00 -0700 Subject: [PATCH 440/859] llama.swiftui : update models layout (#4826) * Updated Models Layout - Added a models drawer - Added downloading directly from Hugging Face - Load custom models from local folder - Delete models by swiping left * trimmed trailing white space * Updated Models Layout --- .../llama.swiftui.xcodeproj/project.pbxproj | 8 +- .../llama.swiftui/Models/LlamaState.swift | 89 ++++++++ .../llama.swiftui/UI/ContentView.swift | 213 +++++++++--------- .../llama.swiftui/UI/DownloadButton.swift | 2 + .../llama.swiftui/UI/InputButton.swift | 131 +++++++++++ 5 files changed, 338 insertions(+), 105 deletions(-) create mode 100644 examples/llama.swiftui/llama.swiftui/UI/InputButton.swift diff --git a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj index a8848a49f..3950b9e9d 100644 --- a/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj +++ b/examples/llama.swiftui/llama.swiftui.xcodeproj/project.pbxproj @@ -8,6 +8,7 @@ /* Begin PBXBuildFile section */ 549479CB2AC9E16000E0F78B /* Metal.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 549479CA2AC9E16000E0F78B /* Metal.framework */; }; + 79E1D9CD2B4CD16E005F8E46 /* InputButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */; }; 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */; }; 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */; }; 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A1C83782AC328BD0096AF73 /* ContentView.swift */; }; @@ -22,6 +23,7 @@ /* Begin PBXFileReference section */ 549479CA2AC9E16000E0F78B /* Metal.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Metal.framework; path = System/Library/Frameworks/Metal.framework; sourceTree = SDKROOT; }; + 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputButton.swift; sourceTree = ""; }; 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DownloadButton.swift; sourceTree = ""; }; 8A1C83732AC328BD0096AF73 /* llama.swiftui.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = llama.swiftui.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8A1C83762AC328BD0096AF73 /* llama_swiftuiApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = llama_swiftuiApp.swift; sourceTree = ""; }; @@ -119,6 +121,7 @@ 7FA3D2B22B2EA2F600543F92 /* DownloadButton.swift */, 8A1C83782AC328BD0096AF73 /* ContentView.swift */, F1FE20E12B465EC900B45541 /* LoadCustomButton.swift */, + 79E1D9CC2B4CD16E005F8E46 /* InputButton.swift */, ); path = UI; sourceTree = ""; @@ -213,6 +216,7 @@ 8A1C83792AC328BD0096AF73 /* ContentView.swift in Sources */, 8A1C83772AC328BD0096AF73 /* llama_swiftuiApp.swift in Sources */, 7FA3D2B32B2EA2F600543F92 /* DownloadButton.swift in Sources */, + 79E1D9CD2B4CD16E005F8E46 /* InputButton.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -345,7 +349,7 @@ CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = STLSG3FG8Q; + DEVELOPMENT_TEAM = K5UQJPP73A; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; @@ -377,7 +381,7 @@ CLANG_ENABLE_MODULES = YES; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = STLSG3FG8Q; + DEVELOPMENT_TEAM = K5UQJPP73A; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; diff --git a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift index 17cb5b9dd..5bde18917 100644 --- a/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift +++ b/examples/llama.swiftui/llama.swiftui/Models/LlamaState.swift @@ -1,9 +1,19 @@ import Foundation +struct Model: Identifiable { + var id = UUID() + var name: String + var url: String + var filename: String + var status: String? +} + @MainActor class LlamaState: ObservableObject { @Published var messageLog = "" @Published var cacheCleared = false + @Published var downloadedModels: [Model] = [] + @Published var undownloadedModels: [Model] = [] let NS_PER_S = 1_000_000_000.0 private var llamaContext: LlamaContext? @@ -13,23 +23,102 @@ class LlamaState: ObservableObject { } init() { + loadModelsFromDisk() + loadDefaultModels() + } + + private func loadModelsFromDisk() { + do { + let documentsURL = getDocumentsDirectory() + let modelURLs = try FileManager.default.contentsOfDirectory(at: documentsURL, includingPropertiesForKeys: nil, options: [.skipsHiddenFiles, .skipsSubdirectoryDescendants]) + for modelURL in modelURLs { + let modelName = modelURL.deletingPathExtension().lastPathComponent + downloadedModels.append(Model(name: modelName, url: "", filename: modelURL.lastPathComponent, status: "downloaded")) + } + } catch { + print("Error loading models from disk: \(error)") + } + } + + private func loadDefaultModels() { do { try loadModel(modelUrl: defaultModelUrl) } catch { messageLog += "Error!\n" } + + for model in defaultModels { + let fileURL = getDocumentsDirectory().appendingPathComponent(model.filename) + if FileManager.default.fileExists(atPath: fileURL.path) { + + } else { + var undownloadedModel = model + undownloadedModel.status = "download" + undownloadedModels.append(undownloadedModel) + } + } } + func getDocumentsDirectory() -> URL { + let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) + return paths[0] + } + private let defaultModels: [Model] = [ + Model(name: "TinyLlama-1.1B (Q4_0, 0.6 GiB)",url: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true",filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf", status: "download"), + Model( + name: "TinyLlama-1.1B Chat (Q8_0, 1.1 GiB)", + url: "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf?download=true", + filename: "tinyllama-1.1b-chat-v1.0.Q8_0.gguf", status: "download" + ), + + Model( + name: "TinyLlama-1.1B (F16, 2.2 GiB)", + url: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", + filename: "tinyllama-1.1b-f16.gguf", status: "download" + ), + + Model( + name: "Phi-2.7B (Q4_0, 1.6 GiB)", + url: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true", + filename: "phi-2-q4_0.gguf", status: "download" + ), + + Model( + name: "Phi-2.7B (Q8_0, 2.8 GiB)", + url: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", + filename: "phi-2-q8_0.gguf", status: "download" + ), + + Model( + name: "Mistral-7B-v0.1 (Q4_0, 3.8 GiB)", + url: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true", + filename: "mistral-7b-v0.1.Q4_0.gguf", status: "download" + ), + Model( + name: "OpenHermes-2.5-Mistral-7B (Q3_K_M, 3.52 GiB)", + url: "https://huggingface.co/TheBloke/OpenHermes-2.5-Mistral-7B-GGUF/resolve/main/openhermes-2.5-mistral-7b.Q3_K_M.gguf?download=true", + filename: "openhermes-2.5-mistral-7b.Q3_K_M.gguf", status: "download" + ) + ] func loadModel(modelUrl: URL?) throws { if let modelUrl { messageLog += "Loading model...\n" llamaContext = try LlamaContext.create_context(path: modelUrl.path()) messageLog += "Loaded model \(modelUrl.lastPathComponent)\n" + + // Assuming that the model is successfully loaded, update the downloaded models + updateDownloadedModels(modelName: modelUrl.lastPathComponent, status: "downloaded") } else { messageLog += "Load a model from the list below\n" } } + + private func updateDownloadedModels(modelName: String, status: String) { + undownloadedModels.removeAll { $0.name == modelName } + } + + func complete(text: String) async { guard let llamaContext else { return diff --git a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift index 7c81ea256..30c2dc431 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/ContentView.swift @@ -2,115 +2,57 @@ import SwiftUI struct ContentView: View { @StateObject var llamaState = LlamaState() - @State private var multiLineText = "" - - private static func cleanupModelCaches() { - // Delete all models (*.gguf) - let fileManager = FileManager.default - let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] - do { - let fileURLs = try fileManager.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil) - for fileURL in fileURLs { - if fileURL.pathExtension == "gguf" { - try fileManager.removeItem(at: fileURL) - } - } - } catch { - print("Error while enumerating files \(documentsUrl.path): \(error.localizedDescription)") - } - } + @State private var showingHelp = false // To track if Help Sheet should be shown var body: some View { - VStack { - ScrollView(.vertical, showsIndicators: true) { - Text(llamaState.messageLog) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) + NavigationView { + VStack { + ScrollView(.vertical, showsIndicators: true) { + Text(llamaState.messageLog) + .font(.system(size: 12)) + .frame(maxWidth: .infinity, alignment: .leading) + .padding() + .onTapGesture { + UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + } + } + + TextEditor(text: $multiLineText) + .frame(height: 80) + .padding() + .border(Color.gray, width: 0.5) + + HStack { + Button("Send") { + sendText() + } + + Button("Bench") { + bench() + } + + Button("Clear") { + clear() + } + + Button("Copy") { + UIPasteboard.general.string = llamaState.messageLog + } + } + .buttonStyle(.bordered) .padding() - .onTapGesture { - UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) - } - } - TextEditor(text: $multiLineText) - .frame(height: 80) + NavigationLink(destination: DrawerView(llamaState: llamaState)) { + Text("View Models") + } .padding() - .border(Color.gray, width: 0.5) - HStack { - Button("Send") { - sendText() - } - - Button("Bench") { - bench() - } - - Button("Clear") { - clear() - } - - Button("Copy") { - UIPasteboard.general.string = llamaState.messageLog - } - }.buttonStyle(.bordered) - - VStack(alignment: .leading) { - DownloadButton( - llamaState: llamaState, - modelName: "TinyLlama-1.1B (Q4_0, 0.6 GiB)", - modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true", - filename: "tinyllama-1.1b-1t-openorca.Q4_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "TinyLlama-1.1B (Q8_0, 1.1 GiB)", - modelUrl: "https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true", - filename: "tinyllama-1.1b-1t-openorca.Q8_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "TinyLlama-1.1B (F16, 2.2 GiB)", - modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true", - filename: "tinyllama-1.1b-f16.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "Phi-2.7B (Q4_0, 1.6 GiB)", - modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true", - filename: "phi-2-q4_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "Phi-2.7B (Q8_0, 2.8 GiB)", - modelUrl: "https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true", - filename: "phi-2-q8_0.gguf" - ) - - DownloadButton( - llamaState: llamaState, - modelName: "Mistral-7B-v0.1 (Q4_0, 3.8 GiB)", - modelUrl: "https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true", - filename: "mistral-7b-v0.1.Q4_0.gguf" - ) - - Button("Clear downloaded models") { - ContentView.cleanupModelCaches() - llamaState.cacheCleared = true - } - - LoadCustomButton(llamaState: llamaState) } - .padding(.top, 4) - .font(.system(size: 12)) - .frame(maxWidth: .infinity, alignment: .leading) + .padding() + .navigationBarTitle("Model Settings", displayMode: .inline) + } - .padding() } func sendText() { @@ -131,8 +73,73 @@ struct ContentView: View { await llamaState.clear() } } + struct DrawerView: View { + + @ObservedObject var llamaState: LlamaState + @State private var showingHelp = false + func delete(at offsets: IndexSet) { + offsets.forEach { offset in + let model = llamaState.downloadedModels[offset] + let fileURL = getDocumentsDirectory().appendingPathComponent(model.filename) + do { + try FileManager.default.removeItem(at: fileURL) + } catch { + print("Error deleting file: \(error)") + } + } + + // Remove models from downloadedModels array + llamaState.downloadedModels.remove(atOffsets: offsets) + } + + func getDocumentsDirectory() -> URL { + let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) + return paths[0] + } + var body: some View { + List { + Section(header: Text("Download Models From Hugging Face")) { + HStack { + InputButton(llamaState: llamaState) + } + } + Section(header: Text("Downloaded Models")) { + ForEach(llamaState.downloadedModels) { model in + DownloadButton(llamaState: llamaState, modelName: model.name, modelUrl: model.url, filename: model.filename) + } + .onDelete(perform: delete) + } + Section(header: Text("Default Models")) { + ForEach(llamaState.undownloadedModels) { model in + DownloadButton(llamaState: llamaState, modelName: model.name, modelUrl: model.url, filename: model.filename) + } + } + + } + .listStyle(GroupedListStyle()) + .navigationBarTitle("Model Settings", displayMode: .inline).toolbar { + ToolbarItem(placement: .navigationBarTrailing) { + Button("Help") { + showingHelp = true + } + } + }.sheet(isPresented: $showingHelp) { // Sheet for help modal + VStack(alignment: .leading) { + VStack(alignment: .leading) { + Text("1. Make sure the model is in GGUF Format") + .padding() + Text("2. Copy the download link of the quantized model") + .padding() + } + Spacer() + } + } + } + } } -//#Preview { -// ContentView() -//} +struct ContentView_Previews: PreviewProvider { + static var previews: some View { + ContentView() + } +} diff --git a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift index c9f322ca1..4584d6eaa 100644 --- a/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift +++ b/examples/llama.swiftui/llama.swiftui/UI/DownloadButton.swift @@ -53,6 +53,8 @@ struct DownloadButton: View { llamaState.cacheCleared = false + let model = Model(name: modelName, url: modelUrl, filename: filename, status: "downloaded") + llamaState.downloadedModels.append(model) status = "downloaded" } } catch let err { diff --git a/examples/llama.swiftui/llama.swiftui/UI/InputButton.swift b/examples/llama.swiftui/llama.swiftui/UI/InputButton.swift new file mode 100644 index 000000000..c5ffbad4e --- /dev/null +++ b/examples/llama.swiftui/llama.swiftui/UI/InputButton.swift @@ -0,0 +1,131 @@ +import SwiftUI + +struct InputButton: View { + @ObservedObject var llamaState: LlamaState + @State private var inputLink: String = "" + @State private var status: String = "download" + @State private var filename: String = "" + + @State private var downloadTask: URLSessionDownloadTask? + @State private var progress = 0.0 + @State private var observation: NSKeyValueObservation? + + private static func extractModelInfo(from link: String) -> (modelName: String, filename: String)? { + guard let url = URL(string: link), + let lastPathComponent = url.lastPathComponent.components(separatedBy: ".").first, + let modelName = lastPathComponent.components(separatedBy: "-").dropLast().joined(separator: "-").removingPercentEncoding, + let filename = lastPathComponent.removingPercentEncoding else { + return nil + } + + return (modelName, filename) + } + + private static func getFileURL(filename: String) -> URL { + FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent(filename) + } + + private func download() { + guard let extractedInfo = InputButton.extractModelInfo(from: inputLink) else { + // Handle invalid link or extraction failure + return + } + + let (modelName, filename) = extractedInfo + self.filename = filename // Set the state variable + + status = "downloading" + print("Downloading model \(modelName) from \(inputLink)") + guard let url = URL(string: inputLink) else { return } + let fileURL = InputButton.getFileURL(filename: filename) + + downloadTask = URLSession.shared.downloadTask(with: url) { temporaryURL, response, error in + if let error = error { + print("Error: \(error.localizedDescription)") + return + } + + guard let response = response as? HTTPURLResponse, (200...299).contains(response.statusCode) else { + print("Server error!") + return + } + + do { + if let temporaryURL = temporaryURL { + try FileManager.default.copyItem(at: temporaryURL, to: fileURL) + print("Writing to \(filename) completed") + + llamaState.cacheCleared = false + + let model = Model(name: modelName, url: self.inputLink, filename: filename, status: "downloaded") + llamaState.downloadedModels.append(model) + status = "downloaded" + } + } catch let err { + print("Error: \(err.localizedDescription)") + } + } + + observation = downloadTask?.progress.observe(\.fractionCompleted) { progress, _ in + self.progress = progress.fractionCompleted + } + + downloadTask?.resume() + } + + var body: some View { + VStack { + HStack { + TextField("Paste Quantized Download Link", text: $inputLink) + .textFieldStyle(RoundedBorderTextFieldStyle()) + + Button(action: { + downloadTask?.cancel() + status = "download" + }) { + Text("Cancel") + } + } + + if status == "download" { + Button(action: download) { + Text("Download Custom Model") + } + } else if status == "downloading" { + Button(action: { + downloadTask?.cancel() + status = "download" + }) { + Text("Downloading \(Int(progress * 100))%") + } + } else if status == "downloaded" { + Button(action: { + let fileURL = InputButton.getFileURL(filename: self.filename) + if !FileManager.default.fileExists(atPath: fileURL.path) { + download() + return + } + do { + try llamaState.loadModel(modelUrl: fileURL) + } catch let err { + print("Error: \(err.localizedDescription)") + } + }) { + Text("Load Custom Model") + } + } else { + Text("Unknown status") + } + } + .onDisappear() { + downloadTask?.cancel() + } + .onChange(of: llamaState.cacheCleared) { newValue in + if newValue { + downloadTask?.cancel() + let fileURL = InputButton.getFileURL(filename: self.filename) + status = FileManager.default.fileExists(atPath: fileURL.path) ? "downloaded" : "download" + } + } + } +} From 930f907d3ece1eb5b0a1ec5e209983a66dcbfa68 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 12 Jan 2024 18:54:53 +0100 Subject: [PATCH 441/859] export-lora : use LLAMA_FILE_MAGIC_GGLA (#4894) This commit replaces the magic number used in export-lora.cpp with the one defined in llama.h, which is indirectly included via common.h. Signed-off-by: Daniel Bevenius --- examples/export-lora/export-lora.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 58fbe204d..4cd5d99bb 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -245,9 +245,8 @@ static struct lora_data * load_lora(struct lora_info * info) { params_ggml.no_alloc = true; result->ctx = ggml_init(params_ggml); - uint32_t LLAMA_FILE_MAGIC_LORA = 0x67676C61; // 'ggla' uint32_t magic = file.read_u32(); - if (magic != LLAMA_FILE_MAGIC_LORA) { + if (magic != LLAMA_FILE_MAGIC_GGLA) { die_fmt("unexpected lora header file magic in '%s'", info->filename.c_str()); } uint32_t version = file.read_u32(); From 584d674be622fbf1578694ada6e62eebedbfd377 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 20:54:12 +0200 Subject: [PATCH 442/859] llama : remove redundant assert for StableLM (#4901) --- llama.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 29f8873f6..ce413f605 100644 --- a/llama.cpp +++ b/llama.cpp @@ -5530,7 +5530,6 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; From e7e4df031b9e29d4b55a4e0b0295187f6b213db1 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 12 Jan 2024 20:07:38 +0100 Subject: [PATCH 443/859] llama : ggml-backend integration (#4766) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * llama : ggml-backend integration * ggml-backend : add names to buffers * fix unmap after loading * batched-bench : add tensor_split param * llama : check for null tensor_split * ggml-backend : increase GGML_MAX_BACKENDS * improve graph splitting, partial fix for --no-kv-offload * cuda : add ggml-backend split buffer support * cuda : do not create buffer types for devices that don't exist (fixes usage without CUDA devices available) * ggml : fix null backend dereference (#4807) * ggml : fix null backend dereference * ggml : also check ggml_backend_is_cpu * test-backend-ops : check buffer allocation failures * llama : add cparam (split_mode) and command line argument (--split-mode, -sm) to configure the split mode (none, layer or row) * ggml : fix mul_mat_id work size * llama : rewrite session kv load/set without graphs * minor * llama : only initialize used backends, free backends on context free * llama : abort ctx if cuda backend init fails * llama : rewrite lora with ggml-backend and compute on CPU ggml-ci * llama : only map to a backend buffer the region of the file mapping containing the tensors used in the buffer * opencl : add ggml-backend buffer type * cuda : only use batched_cublas with batched mat muls (fixes fp16 tg perf) * llama : on Metal, by default offload the full model ggml-ci * metal : page align the data ptr (#4854) * Apply suggestions from code review Co-authored-by: Johannes Gäßler * cuda : fix split buffer free * address review comments * llama-bench : add split-mode parameter * fix whitespace * opencl : fix double initialization * server : add --split-mode parameter * use async copy and compute to improve multi-gpu performance ggml-ci * use async memcpys to copy the graph outputs to the CPU * fix opencl * use a host buffer for the cpu compute buffer for faster copies to the gpu --------- Co-authored-by: Georgi Gerganov Co-authored-by: Johannes Gäßler --- common/common.cpp | 65 +- common/common.h | 1 + examples/batched-bench/batched-bench.cpp | 3 + examples/llama-bench/llama-bench.cpp | 146 +- examples/server/server.cpp | 40 +- ggml-alloc.c | 34 +- ggml-alloc.h | 4 +- ggml-backend-impl.h | 38 +- ggml-backend.c | 693 ++++--- ggml-backend.h | 60 +- ggml-cuda.cu | 907 +++++---- ggml-cuda.h | 26 +- ggml-impl.h | 2 + ggml-metal.m | 55 +- ggml-opencl.cpp | 335 +++- ggml-opencl.h | 16 +- ggml.c | 30 +- ggml.h | 9 +- llama.cpp | 2320 +++++++++------------- llama.h | 18 +- tests/test-backend-ops.cpp | 26 +- 21 files changed, 2533 insertions(+), 2295 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 062a8b4de..322b9f91e 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -543,9 +543,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD params.n_gpu_layers = std::stoi(argv[i]); -#else +#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); #endif @@ -554,9 +553,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD params.n_gpu_layers_draft = std::stoi(argv[i]); -#else +#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers-draft option will be ignored\n"); fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); #endif @@ -565,25 +563,44 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } -#ifdef GGML_USE_CUBLAS params.main_gpu = std::stoi(argv[i]); -#else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. It is not possible to set a main GPU.\n"); -#endif +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the main GPU has no effect.\n"); +#endif // GGML_USE_CUBLAS + } else if (arg == "--split-mode" || arg == "-sm") { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string arg_next = argv[i]; + if (arg_next == "none") { + params.split_mode = LLAMA_SPLIT_NONE; + } else if (arg_next == "layer") { + params.split_mode = LLAMA_SPLIT_LAYER; + } else if (arg_next == "row") { + params.split_mode = LLAMA_SPLIT_ROW; + } else { + invalid_param = true; + break; + } +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the split mode has no effect.\n"); +#endif // GGML_USE_CUBLAS } else if (arg == "--tensor-split" || arg == "-ts") { if (++i >= argc) { invalid_param = true; break; } -#ifdef GGML_USE_CUBLAS std::string arg_next = argv[i]; // split string by , and / const std::regex regex{R"([,/]+)"}; std::sregex_token_iterator it{arg_next.begin(), arg_next.end(), regex, -1}; std::vector split_arg{it, {}}; - GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); - + if (split_arg.size() >= LLAMA_MAX_DEVICES) { + invalid_param = true; + break; + } for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { if (i < split_arg.size()) { params.tensor_split[i] = std::stof(split_arg[i]); @@ -591,14 +608,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.tensor_split[i] = 0.0f; } } -#else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. It is not possible to set a tensor split.\n"); -#endif // GGML_USE_CUBLAS - } else if (arg == "--no-mul-mat-q" || arg == "-nommq") { -#ifdef GGML_USE_CUBLAS - params.mul_mat_q = false; -#else - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Disabling mul_mat_q kernels has no effect.\n"); +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting a tensor split has no effect.\n"); #endif // GGML_USE_CUBLAS } else if (arg == "--no-mmap") { params.use_mmap = false; @@ -915,14 +926,15 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" number of layers to store in VRAM\n"); printf(" -ngld N, --n-gpu-layers-draft N\n"); printf(" number of layers to store in VRAM for the draft model\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); printf(" -ts SPLIT, --tensor-split SPLIT\n"); - printf(" how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); -#ifdef GGML_USE_CUBLAS - printf(" -nommq, --no-mul-mat-q\n"); - printf(" use " GGML_CUBLAS_NAME " instead of custom mul_mat_q " GGML_CUDA_NAME " kernels.\n"); - printf(" Not recommended since this is both slower and uses more VRAM.\n"); -#endif // GGML_USE_CUBLAS + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); #endif printf(" -gan N, --grp-attn-n N\n"); printf(" group-attention factor (default: %d)\n", params.grp_attn_n); @@ -1041,6 +1053,7 @@ struct llama_model_params llama_model_params_from_gpt_params(const gpt_params & mparams.n_gpu_layers = params.n_gpu_layers; } mparams.main_gpu = params.main_gpu; + mparams.split_mode = params.split_mode; mparams.tensor_split = params.tensor_split; mparams.use_mmap = params.use_mmap; mparams.use_mlock = params.use_mlock; diff --git a/common/common.h b/common/common.h index 1359e76ab..f29be5b5a 100644 --- a/common/common.h +++ b/common/common.h @@ -59,6 +59,7 @@ struct gpt_params { float p_split = 0.1f; // speculative decoding split probability int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) + llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs int32_t n_beams = 0; // if non-zero then use beam search of given width. diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 57596ed98..7924db267 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -88,7 +88,10 @@ int main(int argc, char ** argv) { llama_model_params model_params = llama_model_default_params(); + const std::vector t_split (LLAMA_MAX_DEVICES, 0.0f); + model_params.n_gpu_layers = n_gpu_layers; + model_params.tensor_split = t_split.data(); llama_model * model = llama_load_model_from_file(params.model.c_str(), model_params); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 7f7186cde..97325b5bd 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -128,6 +128,25 @@ static std::string get_gpu_info() { // command line params enum output_formats {CSV, JSON, MARKDOWN, SQL}; +static const char * output_format_str(output_formats format) { + switch (format) { + case CSV: return "csv"; + case JSON: return "json"; + case MARKDOWN: return "md"; + case SQL: return "sql"; + default: GGML_ASSERT(!"invalid output format"); + } +} + +static const char * split_mode_str(llama_split_mode mode) { + switch (mode) { + case LLAMA_SPLIT_NONE: return "none"; + case LLAMA_SPLIT_LAYER: return "layer"; + case LLAMA_SPLIT_ROW: return "row"; + default: GGML_ASSERT(!"invalid split mode"); + } +} + struct cmd_params { std::vector model; std::vector n_prompt; @@ -137,6 +156,7 @@ struct cmd_params { std::vector type_v; std::vector n_threads; std::vector n_gpu_layers; + std::vector split_mode; std::vector main_gpu; std::vector no_kv_offload; std::vector mul_mat_q; @@ -155,6 +175,7 @@ static const cmd_params cmd_params_defaults = { /* type_v */ {GGML_TYPE_F16}, /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, + /* split_mode */ {LLAMA_SPLIT_LAYER}, /* main_gpu */ {0}, /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, @@ -169,21 +190,22 @@ static void print_usage(int /* argc */, char ** argv) { printf("\n"); printf("options:\n"); printf(" -h, --help\n"); - printf(" -m, --model (default: %s)\n", join(cmd_params_defaults.model, ",").c_str()); - printf(" -p, --n-prompt (default: %s)\n", join(cmd_params_defaults.n_prompt, ",").c_str()); - printf(" -n, --n-gen (default: %s)\n", join(cmd_params_defaults.n_gen, ",").c_str()); - printf(" -b, --batch-size (default: %s)\n", join(cmd_params_defaults.n_batch, ",").c_str()); - printf(" -ctk , --cache-type-k (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_k, ggml_type_name), ",").c_str()); - printf(" -ctv , --cache-type-v (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_v, ggml_type_name), ",").c_str()); - printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); - printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); - printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); - printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); - printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); - printf(" -ts, --tensor_split \n"); - printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); - printf(" -o, --output (default: %s)\n", cmd_params_defaults.output_format == CSV ? "csv" : cmd_params_defaults.output_format == JSON ? "json" : cmd_params_defaults.output_format == MARKDOWN ? "md" : "sql"); - printf(" -v, --verbose (default: %s)\n", cmd_params_defaults.verbose ? "1" : "0"); + printf(" -m, --model (default: %s)\n", join(cmd_params_defaults.model, ",").c_str()); + printf(" -p, --n-prompt (default: %s)\n", join(cmd_params_defaults.n_prompt, ",").c_str()); + printf(" -n, --n-gen (default: %s)\n", join(cmd_params_defaults.n_gen, ",").c_str()); + printf(" -b, --batch-size (default: %s)\n", join(cmd_params_defaults.n_batch, ",").c_str()); + printf(" -ctk , --cache-type-k (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_k, ggml_type_name), ",").c_str()); + printf(" -ctv , --cache-type-v (default: %s)\n", join(transform_to_str(cmd_params_defaults.type_v, ggml_type_name), ",").c_str()); + printf(" -t, --threads (default: %s)\n", join(cmd_params_defaults.n_threads, ",").c_str()); + printf(" -ngl, --n-gpu-layers (default: %s)\n", join(cmd_params_defaults.n_gpu_layers, ",").c_str()); + printf(" -sm, --split-mode (default: %s)\n", join(transform_to_str(cmd_params_defaults.split_mode, split_mode_str), ",").c_str()); + printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); + printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); + printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); + printf(" -ts, --tensor_split (default: 0)\n"); + printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); + printf(" -o, --output (default: %s)\n", output_format_str(cmd_params_defaults.output_format)); + printf(" -v, --verbose (default: %s)\n", cmd_params_defaults.verbose ? "1" : "0"); printf("\n"); printf("Multiple values can be given for each parameter by separating them with ',' or by specifying the parameter multiple times.\n"); } @@ -306,6 +328,28 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.n_gpu_layers.insert(params.n_gpu_layers.end(), p.begin(), p.end()); + } else if (arg == "-sm" || arg == "--split-mode") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + std::vector modes; + for (const auto & m : p) { + llama_split_mode mode; + if (m == "none") { + mode = LLAMA_SPLIT_NONE; + } else if (m == "layer") { + mode = LLAMA_SPLIT_LAYER; + } else if (m == "row") { + mode = LLAMA_SPLIT_ROW; + } else { + invalid_param = true; + break; + } + modes.push_back(mode); + } + params.split_mode.insert(params.split_mode.end(), modes.begin(), modes.end()); } else if (arg == "-mg" || arg == "--main-gpu") { if (++i >= argc) { invalid_param = true; @@ -392,6 +436,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.type_k.empty()) { params.type_k = cmd_params_defaults.type_k; } if (params.type_v.empty()) { params.type_v = cmd_params_defaults.type_v; } if (params.n_gpu_layers.empty()) { params.n_gpu_layers = cmd_params_defaults.n_gpu_layers; } + if (params.split_mode.empty()) { params.split_mode = cmd_params_defaults.split_mode; } if (params.main_gpu.empty()) { params.main_gpu = cmd_params_defaults.main_gpu; } if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } @@ -410,6 +455,7 @@ struct cmd_params_instance { ggml_type type_v; int n_threads; int n_gpu_layers; + llama_split_mode split_mode; int main_gpu; bool no_kv_offload; bool mul_mat_q; @@ -419,6 +465,7 @@ struct cmd_params_instance { llama_model_params mparams = llama_model_default_params(); mparams.n_gpu_layers = n_gpu_layers; + mparams.split_mode = split_mode; mparams.main_gpu = main_gpu; mparams.tensor_split = tensor_split.data(); @@ -428,6 +475,7 @@ struct cmd_params_instance { bool equal_mparams(const cmd_params_instance & other) const { return model == other.model && n_gpu_layers == other.n_gpu_layers && + split_mode == other.split_mode && main_gpu == other.main_gpu && tensor_split == other.tensor_split; } @@ -446,45 +494,13 @@ struct cmd_params_instance { } }; -static std::vector get_cmd_params_instances_int(const cmd_params & params, int n_gen, int n_prompt) { - std::vector instances; - - for (const auto & m : params.model) - for (const auto & nl : params.n_gpu_layers) - for (const auto & mg : params.main_gpu) - for (const auto & ts : params.tensor_split) - for (const auto & nb : params.n_batch) - for (const auto & tk : params.type_k) - for (const auto & tv : params.type_v) - for (const auto & mmq : params.mul_mat_q) - for (const auto & nkvo : params.no_kv_offload) - for (const auto & nt : params.n_threads) { - cmd_params_instance instance = { - /* .model = */ m, - /* .n_prompt = */ n_prompt, - /* .n_gen = */ n_gen, - /* .n_batch = */ nb, - /* .type_k = */ tk, - /* .type_v = */ tv, - /* .n_threads = */ nt, - /* .n_gpu_layers = */ nl, - /* .main_gpu = */ mg, - /* .no_kv_offload= */ nkvo, - /* .mul_mat_q = */ mmq, - /* .tensor_split = */ ts, - }; - instances.push_back(instance); - } - return instances; -} - static std::vector get_cmd_params_instances(const cmd_params & params) { std::vector instances; -#if 1 // this ordering minimizes the number of times that each model needs to be reloaded for (const auto & m : params.model) for (const auto & nl : params.n_gpu_layers) + for (const auto & sm : params.split_mode) for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) for (const auto & nb : params.n_batch) @@ -506,6 +522,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, + /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, @@ -527,6 +544,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .type_v = */ tv, /* .n_threads = */ nt, /* .n_gpu_layers = */ nl, + /* .split_mode = */ sm, /* .main_gpu = */ mg, /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, @@ -535,24 +553,6 @@ static std::vector get_cmd_params_instances(const cmd_param instances.push_back(instance); } } -#else - // this ordering separates the prompt and generation tests - for (const auto & n_prompt : params.n_prompt) { - if (n_prompt == 0) { - continue; - } - auto instances_prompt = get_cmd_params_instances_int(params, 0, n_prompt); - instances.insert(instances.end(), instances_prompt.begin(), instances_prompt.end()); - } - - for (const auto & n_gen : params.n_gen) { - if (n_gen == 0) { - continue; - } - auto instances_gen = get_cmd_params_instances_int(params, n_gen, 0); - instances.insert(instances.end(), instances_gen.begin(), instances_gen.end()); - } -#endif return instances; } @@ -576,6 +576,7 @@ struct test { ggml_type type_k; ggml_type type_v; int n_gpu_layers; + llama_split_mode split_mode; int main_gpu; bool no_kv_offload; bool mul_mat_q; @@ -597,6 +598,7 @@ struct test { type_k = inst.type_k; type_v = inst.type_v; n_gpu_layers = inst.n_gpu_layers; + split_mode = inst.split_mode; main_gpu = inst.main_gpu; no_kv_offload = inst.no_kv_offload; mul_mat_q = inst.mul_mat_q; @@ -660,7 +662,8 @@ struct test { "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", - "n_gpu_layers", "main_gpu", "no_kv_offload", + "n_gpu_layers", "split_mode", + "main_gpu", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", @@ -711,7 +714,8 @@ struct test { cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), - std::to_string(n_gpu_layers), std::to_string(main_gpu), std::to_string(no_kv_offload), + std::to_string(n_gpu_layers), split_mode_str(split_mode), + std::to_string(main_gpu), std::to_string(no_kv_offload), std::to_string(mul_mat_q), tensor_split_str, std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), @@ -867,6 +871,9 @@ struct markdown_printer : public printer { if (field == "n_gpu_layers") { return "ngl"; } + if (field == "split_mode") { + return "sm"; + } if (field == "n_threads") { return "threads"; } @@ -907,6 +914,9 @@ struct markdown_printer : public printer { if (params.main_gpu.size() > 1 || params.main_gpu != cmd_params_defaults.main_gpu) { fields.push_back("main_gpu"); } + if (params.split_mode.size() > 1 || params.split_mode != cmd_params_defaults.split_mode) { + fields.push_back("split_mode"); + } if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { fields.push_back("mul_mat_q"); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1d30a15a6..c1ab8f9dc 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2005,12 +2005,15 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, #ifdef LLAMA_SUPPORTS_GPU_OFFLOAD printf(" -ngl N, --n-gpu-layers N\n"); printf(" number of layers to store in VRAM\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); printf(" -ts SPLIT --tensor-split SPLIT\n"); - printf(" how to split tensors across multiple GPUs, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for scratch and small tensors\n"); - printf(" -nommq, --no-mul-mat-q\n"); - printf(" use cuBLAS instead of custom mul_mat_q CUDA kernels.\n"); - printf(" Not recommended since this is both slower and uses more VRAM.\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row)\n"); #endif printf(" -m FNAME, --model FNAME\n"); printf(" model path (default: %s)\n", params.model.c_str()); @@ -2253,6 +2256,33 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, "See main README.md for information on enabling GPU BLAS support", {{"n_gpu_layers", params.n_gpu_layers}}); #endif + } + else if (arg == "--split-mode" || arg == "-sm") + { + if (++i >= argc) { + invalid_param = true; + break; + } + std::string arg_next = argv[i]; + if (arg_next == "none") + { + params.split_mode = LLAMA_SPLIT_NONE; + } + else if (arg_next == "layer") + { + params.split_mode = LLAMA_SPLIT_LAYER; + } + else if (arg_next == "row") + { + params.split_mode = LLAMA_SPLIT_ROW; + } + else { + invalid_param = true; + break; + } +#ifndef GGML_USE_CUBLAS + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS. Setting the split mode has no effect.\n"); +#endif // GGML_USE_CUBLAS } else if (arg == "--tensor-split" || arg == "-ts") { diff --git a/ggml-alloc.c b/ggml-alloc.c index a27dd54b0..89b85d348 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -102,8 +102,6 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { } } - AT_PRINTF("block %d\n", best_fit_block); - if (best_fit_block == -1) { // the last block is our last resort struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; @@ -117,6 +115,7 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { return; } } + struct free_block * block = &alloc->free_blocks[best_fit_block]; void * addr = block->addr; block->addr = (char*)block->addr + size; @@ -129,6 +128,8 @@ void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { } } + AT_PRINTF("block %d, addr %p\n", best_fit_block, addr); + tensor->data = addr; tensor->buffer = alloc->buffer; if (!alloc->measure) { @@ -229,6 +230,7 @@ void ggml_tallocr_reset(ggml_tallocr_t alloc) { alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows } else { alloc->free_blocks[0].size = ggml_backend_buffer_get_size(alloc->buffer) - align_offset; + ggml_backend_buffer_reset(alloc->buffer); } } @@ -263,9 +265,9 @@ ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment) { return alloc; } -ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { +ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft) { // create a backend buffer to get the correct tensor allocation sizes - ggml_backend_buffer_t buffer = ggml_backend_alloc_buffer(backend, 1); + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, 1); // TODO: move alloc initialization to a common ggml_tallocr_new_impl function ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); @@ -275,13 +277,22 @@ ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backe return alloc; } -ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { - ggml_backend_buffer_t buffer = ggml_backend_alloc_buffer(backend, size); +ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { + return ggml_tallocr_new_measure_from_buft(ggml_backend_get_default_buffer_type(backend)); +} + +ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size) { + // create a backend buffer to get the correct tensor allocation sizes + ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, size); ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); alloc->buffer_owned = true; return alloc; } +ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { + return ggml_tallocr_new_from_buft(ggml_backend_get_default_buffer_type(backend), size); +} + ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer) { ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); @@ -779,10 +790,21 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte if (nbytes == 0) { // all the tensors in the context are already allocated +#ifndef NDEBUG + fprintf(stderr, "%s: all tensors in the context are already allocated\n", __func__); +#endif return NULL; } ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, nbytes); + if (buffer == NULL) { + // failed to allocate buffer +#ifndef NDEBUG + fprintf(stderr, "%s: failed to allocate buffer\n", __func__); +#endif + return NULL; + } + ggml_tallocr_t tallocr = ggml_tallocr_new_from_buffer(buffer); for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { diff --git a/ggml-alloc.h b/ggml-alloc.h index 64a412468..4e5997521 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -52,8 +52,10 @@ typedef struct ggml_tallocr * ggml_tallocr_t; GGML_API ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment); GGML_API ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment); -GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); +GGML_API ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size); GGML_API ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer +GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); +GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft); GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend); GGML_API struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t talloc); diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index ca21b4743..1db32901f 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -16,9 +16,10 @@ extern "C" { typedef void * ggml_backend_buffer_type_context_t; struct ggml_backend_buffer_type_i { + const char * (*get_name) (ggml_backend_buffer_type_t buft); ggml_backend_buffer_t (*alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment - size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding + size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend // check if tensor data is in host memory // should be equivalent to supports_backend(buft, ggml_backend_cpu_init()) @@ -34,16 +35,15 @@ extern "C" { typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - void (*free_buffer) (ggml_backend_buffer_t buffer); - //void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras - void * (*get_base) (ggml_backend_buffer_t buffer); - void (*init_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - // (optional) copy tensor between different buffer-type, allow for single-copy tranfers - void (*cpy_tensor_from)(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to) (ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); + const char * (*get_name) (ggml_backend_buffer_t buffer); + void (*free_buffer)(ggml_backend_buffer_t buffer); + void * (*get_base) (ggml_backend_buffer_t buffer); + void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer + void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); + void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras }; struct ggml_backend_buffer { @@ -51,6 +51,7 @@ extern "C" { ggml_backend_buffer_type_t buft; ggml_backend_buffer_context_t context; size_t size; + enum ggml_backend_buffer_usage usage; }; ggml_backend_buffer_t ggml_backend_buffer_init( @@ -59,6 +60,8 @@ extern "C" { ggml_backend_buffer_context_t context, size_t size); + // do not use directly, use ggml_backend_tensor_copy instead + bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml_tensor * dst); // // Backend @@ -74,22 +77,20 @@ extern "C" { // buffer allocation ggml_backend_buffer_type_t (*get_default_buffer_type)(ggml_backend_t backend); - // (optional) asynchroneous tensor data access + // (optional) asynchronous tensor data access void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*cpy_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * src, struct ggml_tensor * dst); - // (optional) asynchroneous tensor copy - void (*cpy_tensor_from_async)(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - void (*cpy_tensor_to_async) (ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst); - + // (optional) complete all pending operations void (*synchronize)(ggml_backend_t backend); // compute graph with a plan - ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, struct ggml_cgraph * cgraph); + ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); void (*graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); - // compute graph without a plan + // compute graph without a plan (async) bool (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation @@ -102,7 +103,6 @@ extern "C" { ggml_backend_context_t context; }; - // // Backend registry // diff --git a/ggml-backend.c b/ggml-backend.c index 53e741cb8..4c2d8b0b2 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -15,6 +15,10 @@ // backend buffer type +const char * ggml_backend_buft_name(ggml_backend_buffer_type_t buft) { + return buft->iface.get_name(buft); +} + ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { return buft->iface.alloc_buffer(buft, size); } @@ -58,11 +62,16 @@ ggml_backend_buffer_t ggml_backend_buffer_init( /* .buft = */ buft, /* .context = */ context, /* .size = */ size, + /* .usage = */ GGML_BACKEND_BUFFER_USAGE_ANY }; return buffer; } +const char * ggml_backend_buffer_name(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name(buffer); +} + void ggml_backend_buffer_free(ggml_backend_buffer_t buffer) { if (buffer == NULL) { return; @@ -94,11 +103,11 @@ void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_t } size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer) { - return ggml_backend_buft_get_alignment(ggml_backend_buffer_type(buffer)); + return ggml_backend_buft_get_alignment(ggml_backend_buffer_get_type(buffer)); } size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { - return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type(buffer), tensor); + return ggml_backend_buft_get_alloc_size(ggml_backend_buffer_get_type(buffer), tensor); } void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { @@ -106,13 +115,31 @@ void ggml_backend_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { } bool ggml_backend_buffer_is_host(ggml_backend_buffer_t buffer) { - return ggml_backend_buft_is_host(ggml_backend_buffer_type(buffer)); + return ggml_backend_buft_is_host(ggml_backend_buffer_get_type(buffer)); } -ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer) { +void ggml_backend_buffer_set_usage(ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage) { + buffer->usage = usage; +} + +ggml_backend_buffer_type_t ggml_backend_buffer_get_type(ggml_backend_buffer_t buffer) { return buffer->buft; } +void ggml_backend_buffer_reset(ggml_backend_buffer_t buffer) { + if (buffer->iface.reset) { + buffer->iface.reset(buffer); + } +} + +bool ggml_backend_buffer_copy_tensor(const struct ggml_tensor * src, struct ggml_tensor * dst) { + ggml_backend_buffer_t dst_buf = dst->view_src ? dst->view_src->buffer : dst->buffer; + if (dst_buf->iface.cpy_tensor) { + return src->buffer->iface.cpy_tensor(dst_buf, src, dst); + } + return false; +} + // backend const char * ggml_backend_name(ggml_backend_t backend) { @@ -146,30 +173,42 @@ void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - backend->iface.set_tensor_async(backend, tensor, data, offset, size); + if (backend->iface.set_tensor_async == NULL) { + ggml_backend_tensor_set(tensor, data, offset, size); + } else { + backend->iface.set_tensor_async(backend, tensor, data, offset, size); + } } void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - backend->iface.get_tensor_async(backend, tensor, data, offset, size); + if (backend->iface.get_tensor_async == NULL) { + ggml_backend_tensor_get(tensor, data, offset, size); + } else { + backend->iface.get_tensor_async(backend, tensor, data, offset, size); + } } void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); - GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); + GGML_ASSERT(buf != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); - tensor->buffer->iface.set_tensor(tensor->buffer, tensor, data, offset, size); + tensor->buffer->iface.set_tensor(buf, tensor, data, offset, size); } void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { + ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; + GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); - tensor->buffer->iface.get_tensor(tensor->buffer, tensor, data, offset, size); + tensor->buffer->iface.get_tensor(buf, tensor, data, offset, size); } void ggml_backend_synchronize(ggml_backend_t backend) { @@ -190,19 +229,10 @@ void ggml_backend_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_pla void ggml_backend_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { backend->iface.graph_plan_compute(backend, plan); - - // TODO: optional sync - ggml_backend_synchronize(backend); } bool ggml_backend_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { - if (!backend->iface.graph_compute(backend, cgraph)) { - return false; - } - - // TODO: optional sync - ggml_backend_synchronize(backend); - return true; + return backend->iface.graph_compute(backend, cgraph); } bool ggml_backend_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { @@ -227,28 +257,20 @@ static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml } void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst) { - //printf("src: %s ne: [%d %d %d %d] nb: [%d %d %d %d]\n", src->name, (int)src->ne[0], (int)src->ne[1], (int)src->ne[2], (int)src->ne[3], (int)src->nb[0], (int)src->nb[1], (int)src->nb[2], (int)src->nb[3]); - //printf("dst: %s ne: [%d %d %d %d] nb: [%d %d %d %d]\n", dst->name, (int)dst->ne[0], (int)dst->ne[1], (int)dst->ne[2], (int)dst->ne[3], (int)dst->nb[0], (int)dst->nb[1], (int)dst->nb[2], (int)dst->nb[3]); GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); - // fprintf(stderr, "cpy tensor %s from %s to %s (%lu bytes)\n", src->name, ggml_backend_name(src->backend), ggml_backend_name(dst->backend), ggml_nbytes(src)); - if (src == dst) { return; } - // TODO: allow backends to support copy to/from same backend - - if (dst->buffer->iface.cpy_tensor_from != NULL) { - dst->buffer->iface.cpy_tensor_from(dst->buffer, src, dst); - } else if (src->buffer->iface.cpy_tensor_to != NULL) { - src->buffer->iface.cpy_tensor_to(src->buffer, src, dst); - } else { - // shouldn't be hit when copying from/to CPU - #ifndef NDEBUG - fprintf(stderr, "ggml_backend_tensor_copy: neither cpy_tensor_from nor cpy_tensor_to " - "are implemented for %s and %s, falling back to get/set\n", src->name, dst->name); - #endif + if (ggml_backend_buffer_is_host(src->buffer)) { + ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); + } else if (ggml_backend_buffer_is_host(dst->buffer)) { + ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); + } else if (!ggml_backend_buffer_copy_tensor(src, dst)) { +#ifndef NDEBUG + fprintf(stderr, "%s: warning: slow copy from %s to %s\n", __func__, ggml_backend_buffer_name(src->buffer), ggml_backend_buffer_name(dst->buffer)); +#endif size_t nbytes = ggml_nbytes(src); void * data = malloc(nbytes); ggml_backend_tensor_get(src, data, 0, nbytes); @@ -257,6 +279,31 @@ void ggml_backend_tensor_copy(struct ggml_tensor * src, struct ggml_tensor * dst } } +void ggml_backend_tensor_copy_async(ggml_backend_t backend, struct ggml_tensor * src, struct ggml_tensor * dst) { + GGML_ASSERT(ggml_are_same_layout(src, dst) && "cannot copy tensors with different layouts"); + + if (src == dst) { + return; + } + + if (ggml_backend_buft_supports_backend(src->buffer->buft, backend) && ggml_backend_buft_supports_backend(dst->buffer->buft, backend)) { + if (backend->iface.cpy_tensor_async != NULL) { + if (backend->iface.cpy_tensor_async(backend, src, dst)) { + return; + } + } + } + + size_t nbytes = ggml_nbytes(src); + if (ggml_backend_buffer_is_host(src->buffer)) { + ggml_backend_tensor_set_async(backend, dst, src->data, 0, nbytes); + } + else { + ggml_backend_tensor_copy(src, dst); + } +} + + // backend registry #define GGML_MAX_BACKENDS_REG 16 @@ -392,6 +439,12 @@ ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { // backend CPU +static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { + return "CPU"; + + GGML_UNUSED(buffer); +} + static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { return (void *)buffer->context; } @@ -412,14 +465,12 @@ static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, con GGML_UNUSED(buffer); } -static void ggml_backend_cpu_buffer_cpy_tensor_from(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); - - GGML_UNUSED(buffer); -} - -static void ggml_backend_cpu_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); +static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + memcpy(dst->data, src->data, ggml_nbytes(src)); + return true; + } + return false; GGML_UNUSED(buffer); } @@ -429,30 +480,38 @@ static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t } static struct ggml_backend_buffer_i cpu_backend_buffer_i = { + /* .get_name = */ ggml_backend_cpu_buffer_name, /* .free_buffer = */ ggml_backend_cpu_buffer_free_buffer, /* .get_base = */ ggml_backend_cpu_buffer_get_base, /* .init_tensor = */ NULL, // no initialization required /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, - /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .cpy_tensor = */ ggml_backend_cpu_buffer_cpy_tensor, /* .clear = */ ggml_backend_cpu_buffer_clear, + /* .reset = */ NULL, }; // for buffers from ptr, free is not called static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { + /* .get_name = */ ggml_backend_cpu_buffer_name, /* .free_buffer = */ NULL, // ptr is not owned by the buffer, so it does not need to be freed /* .get_base = */ ggml_backend_cpu_buffer_get_base, /* .init_tensor = */ NULL, // no initialization required /* .set_tensor = */ ggml_backend_cpu_buffer_set_tensor, /* .get_tensor = */ ggml_backend_cpu_buffer_get_tensor, - /* .cpy_tensor_from = */ ggml_backend_cpu_buffer_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_cpu_buffer_cpy_tensor_to, + /* .cpy_tensor = */ ggml_backend_cpu_buffer_cpy_tensor, /* .clear = */ ggml_backend_cpu_buffer_clear, + /* .reset = */ NULL, }; static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 +static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU"; + + GGML_UNUSED(buft); +} + static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? @@ -483,6 +542,7 @@ static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_buffer_type_get_name, /* .alloc_buffer = */ ggml_backend_cpu_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes @@ -501,6 +561,18 @@ ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { #include +static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "CPU_HBM"; + + GGML_UNUSED(buft); +} + +static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { + return "CPU_HBM"; + + GGML_UNUSED(buf); +} + static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { hbw_free(buffer->context); } @@ -514,17 +586,18 @@ static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_ return NULL; } - // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); buffer->buft = buft; + buffer->iface.get_name = ggml_backend_cpu_hbm_buffer_get_name; buffer->iface.free_buffer = ggml_backend_cpu_hbm_buffer_free_buffer; return buffer; } -ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type() { +ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type_hbm = { /* .iface = */ { + /* .get_name = */ ggml_backend_cpu_hbm_buffer_type_get_name, /* .alloc_buffer = */ ggml_backend_cpu_hbm_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes @@ -568,7 +641,7 @@ struct ggml_backend_plan_cpu { struct ggml_cgraph cgraph; }; -static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); @@ -634,8 +707,7 @@ static struct ggml_backend_i cpu_backend_i = { /* .get_default_buffer_type = */ ggml_backend_cpu_get_default_buffer_type, /* .set_tensor_async = */ NULL, /* .get_tensor_async = */ NULL, - /* .cpy_tensor_from_async = */ NULL, - /* .cpy_tensor_to_async = */ NULL, + /* .cpy_tensor_async = */ NULL, /* .synchronize = */ NULL, /* .graph_plan_create = */ ggml_backend_cpu_graph_plan_create, /* .graph_plan_free = */ ggml_backend_cpu_graph_plan_free, @@ -661,7 +733,7 @@ ggml_backend_t ggml_backend_cpu_init(void) { } bool ggml_backend_is_cpu(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_cpu_name; + return backend && backend->iface.get_name == ggml_backend_cpu_name; } void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { @@ -685,7 +757,7 @@ static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user // scheduler -#define GGML_MAX_BACKENDS 4 +#define GGML_MAX_BACKENDS 16 #define GGML_MAX_SPLITS 256 #define GGML_MAX_SPLIT_INPUTS 16 @@ -695,21 +767,29 @@ struct ggml_backend_sched_split { int i_end; struct ggml_tensor * inputs[GGML_MAX_SPLIT_INPUTS]; int n_inputs; + // graph view of this split struct ggml_cgraph graph; }; struct ggml_backend_sched { + bool is_reset; // true if the scheduler has been reset since the last graph split + int n_backends; ggml_backend_t backends[GGML_MAX_BACKENDS]; + ggml_backend_buffer_type_t bufts[GGML_MAX_BACKENDS]; ggml_tallocr_t tallocs[GGML_MAX_BACKENDS]; ggml_gallocr_t galloc; + // hash keys of the nodes in the graph struct ggml_hash_set hash_set; - ggml_tallocr_t * node_talloc; // [hash_set.size] - struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // [hash_set.size][GGML_MAX_BACKENDS] + // hash values (arrays of [hash_set.size]) + ggml_tallocr_t * node_talloc; // tallocr assigned to each node (indirectly this is the backend) + struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // copies of each node for each destination backend + // copy of the graph with modified inputs struct ggml_cgraph * graph; + struct ggml_backend_sched_split splits[GGML_MAX_SPLITS]; int n_splits; @@ -750,14 +830,22 @@ static int sched_allocr_prio(ggml_backend_sched_t sched, ggml_tallocr_t allocr) return INT_MAX; } -static ggml_backend_t get_buffer_backend(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { +static ggml_tallocr_t sched_allocr_from_buffer(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { if (buffer == NULL) { return NULL; } + + // check if this is already allocate in a allocr buffer (from user manual allocations) + for (int i = 0; i < sched->n_backends; i++) { + if (ggml_tallocr_get_buffer(sched->tallocs[i]) == buffer) { + return sched->tallocs[i]; + } + } + // find highest prio backend that supports the buffer type for (int i = 0; i < sched->n_backends; i++) { if (ggml_backend_buft_supports_backend(buffer->buft, sched->backends[i])) { - return sched->backends[i]; + return sched->tallocs[i]; } } GGML_ASSERT(false && "tensor buffer type not supported by any backend"); @@ -767,7 +855,6 @@ static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_talloc if (allocr == NULL) { return NULL; } - // find highest prio backend that supports the buffer type for (int i = 0; i < sched->n_backends; i++) { if (sched->tallocs[i] == allocr) { return sched->backends[i]; @@ -777,7 +864,7 @@ static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_talloc } #if 0 -static char causes[GGML_DEFAULT_GRAPH_SIZE*8 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug, remove +static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug only #define SET_CAUSE(node, ...) sprintf(causes[hash_id(node)], __VA_ARGS__) #define GET_CAUSE(node) causes[hash_id(node)] #else @@ -786,45 +873,37 @@ static char causes[GGML_DEFAULT_GRAPH_SIZE*8 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_IN #endif // returns the backend that should be used for the node based on the current locations -static ggml_backend_t sched_backend_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { - // if the dst tensor is already allocated in a buffer, we must assume that it is critical to keep it there - // ie. kv cache updates - // note that this doesn't allow fallback to CPU. need to add output tensors to the splits to copy the data back to the original backend. +static ggml_tallocr_t sched_allocr_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { + // assign pre-allocated nodes to their backend // dst - ggml_backend_t cur_backend = get_buffer_backend(sched, node->buffer); - if (cur_backend != NULL) { + ggml_tallocr_t cur_allocr = sched_allocr_from_buffer(sched, node->buffer); + if (cur_allocr != NULL) { SET_CAUSE(node, "1.dst"); - return cur_backend; + return cur_allocr; } - // view_src - if (node->view_src != NULL && get_buffer_backend(sched, node->view_src->buffer) != NULL) { - SET_CAUSE(node, "1.vsrc"); - return get_buffer_backend(sched, node->view_src->buffer); + if (node->view_src != NULL) { + cur_allocr = sched_allocr_from_buffer(sched, node->view_src->buffer); + if (cur_allocr != NULL) { + SET_CAUSE(node, "1.vsrc"); + return cur_allocr; + } } - - // src - int cur_prio = INT_MAX; - size_t cur_size = 0; - + // assign nodes that use weights to the backend of the weights for (int i = 0; i < GGML_MAX_SRC; i++) { const struct ggml_tensor * src = node->src[i]; if (src == NULL) { break; } - ggml_backend_t src_backend = get_buffer_backend(sched, src->buffer); - if (src_backend != NULL) { - int src_prio = sched_backend_prio(sched, src_backend); - size_t src_size = ggml_nbytes(src); - if (src_prio < cur_prio && src_size >= cur_size) { - cur_prio = src_prio; - cur_size = src_size; - cur_backend = src_backend; - SET_CAUSE(node, "1.src%d", i); - } + if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { + ggml_tallocr_t src_allocr = sched_allocr_from_buffer(sched, src->buffer); + // operations with weights are always run on the same backend as the weights + SET_CAUSE(node, "1.wgt%d", i); + return src_allocr; } } - return cur_backend; + + return NULL; } static char * fmt_size(size_t size) { @@ -857,7 +936,7 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra } ggml_tallocr_t node_allocr = node_allocr(node); ggml_backend_t node_backend = node_allocr ? get_allocr_backend(sched, node_allocr) : NULL; // FIXME: - fprintf(stderr, "node #%3d (%10.10s): %20.20s (%4.4s) [%4.4s %8.8s]:", i, ggml_op_name(node->op), node->name, + fprintf(stderr, "node #%3d (%10.10s): %20.20s (%5.5s) [%5.5s %8.8s]:", i, ggml_op_name(node->op), node->name, fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", GET_CAUSE(node)); for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; @@ -866,7 +945,7 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra } ggml_tallocr_t src_allocr = node_allocr(src); ggml_backend_t src_backend = src_allocr ? get_allocr_backend(sched, src_allocr) : NULL; - fprintf(stderr, " %20.20s (%4.4s) [%4.4s %8.8s]", src->name, + fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", GET_CAUSE(src)); } fprintf(stderr, "\n"); @@ -882,15 +961,17 @@ static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, co return dup; } + +//#define DEBUG_PASS1 +//#define DEBUG_PASS2 +//#define DEBUG_PASS3 +//#define DEBUG_PASS4 + // assigns backends to ops and splits the graph into subgraphs that can be computed on the same backend -// TODO: merge passes static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { - // reset state - size_t hash_size = sched->hash_set.size; - memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); - memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); - memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); + // reset splits sched->n_splits = 0; + sched->is_reset = false; struct ggml_init_params params = { /* .mem_size = */ sizeof(sched->context_buffer), @@ -898,26 +979,22 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g /* .no_alloc = */ true }; - if (sched->ctx != NULL) { - ggml_free(sched->ctx); - } + ggml_free(sched->ctx); sched->ctx = ggml_init(params); + if (sched->ctx == NULL) { + fprintf(stderr, "%s: failed to initialize context\n", __func__); + GGML_ASSERT(false); + } - // pass 1: assign backends to ops with allocated inputs + // pass 1: assign backends to ops with pre-allocated inputs for (int i = 0; i < graph->n_leafs; i++) { struct ggml_tensor * leaf = graph->leafs[i]; if (node_allocr(leaf) != NULL) { // do not overwrite user assignments continue; } - ggml_backend_t leaf_backend = get_buffer_backend(sched, leaf->buffer); - if (leaf_backend == NULL && leaf->view_src != NULL) { - leaf_backend = get_buffer_backend(sched, leaf->view_src->buffer); - } - if (leaf_backend != NULL) { - node_allocr(leaf) = ggml_backend_sched_get_tallocr(sched, leaf_backend); - } + node_allocr(leaf) = sched_allocr_from_cur(sched, leaf); } for (int i = 0; i < graph->n_nodes; i++) { @@ -926,50 +1003,102 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // do not overwrite user assignments continue; } - ggml_backend_t node_backend = sched_backend_from_cur(sched, node); - if (node_backend != NULL) { - node_allocr(node) = ggml_backend_sched_get_tallocr(sched, node_backend); + node_allocr(node) = sched_allocr_from_cur(sched, node); + // src + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + if (node_allocr(src) == NULL) { + node_allocr(src) = sched_allocr_from_cur(sched, src); + } } } - //printf("PASS 1 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#ifdef DEBUG_PASS1 + fprintf(stderr, "PASS 1 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif - // pass 2: assign backends to ops from current assignments - // TODO: - // - reuse sched_backend_from_cur - for (int i = 0; i < graph->n_nodes; i++) { - struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr == NULL) { - int cur_prio = INT_MAX; - size_t cur_size = 0; - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * src = node->src[j]; - if (src == NULL) { - break; - } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr != NULL) { - int src_prio = sched_allocr_prio(sched, src_allocr); - size_t src_size = ggml_nbytes(src); - if (src_prio < cur_prio && src_size >= cur_size) { - cur_prio = src_prio; - cur_size = src_size; - node_allocr = src_allocr; - SET_CAUSE(node, "2.src%d", j); - } - } + // pass 2: expand current backend assignments + // assign the same backend to adjacent nodes + // expand gpu backends (i.e. non last prio) up and down, ignoring cpu (the lowest priority backend) + // thus, cpu will never be used unless weights are on cpu, or there are no gpu ops between cpu ops + + // pass 2.1 expand gpu up + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = graph->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; } + ggml_tallocr_t node_allocr = node_allocr(node); if (node_allocr != NULL) { - node_allocr(node) = node_allocr; + if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + // skip cpu (lowest prio backend) + cur_allocr = NULL; + } else { + cur_allocr = node_allocr; + } + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.1"); } } } - //printf("PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); - // pass 3: assign backends to remaining src from dst (should only be leafs) + // pass 2.2 expand gpu down + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr != NULL) { + if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + // skip cpu (lowest prio backend) + cur_allocr = NULL; + } else { + cur_allocr = node_allocr; + } + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.2"); + } + } + } + + // pass 2.3 expand rest up + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = graph->n_nodes - 1; i >= 0; i--) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr != NULL) { + cur_allocr = node_allocr; + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.3"); + } + } + } +#ifdef DEBUG_PASS2 + fprintf(stderr, "PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif + + // pass 3: assign backends to remaining src from dst and view_src for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t node_allocr = node_allocr(node); + ggml_tallocr_t cur_allocr = node_allocr(node); + if (node->view_src != NULL && cur_allocr == NULL) { + cur_allocr = node_allocr(node) = node_allocr(node->view_src); + SET_CAUSE(node, "3.vsrc"); + } for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { @@ -977,81 +1106,105 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g } ggml_tallocr_t src_allocr = node_allocr(src); if (src_allocr == NULL) { - node_allocr(src) = node_allocr; + if (src->view_src != NULL) { + // views are always on the same backend as the source + node_allocr(src) = node_allocr(src->view_src); + SET_CAUSE(src, "3.vsrc"); + } else { + node_allocr(src) = cur_allocr; + SET_CAUSE(src, "3.cur"); + } } } } - //printf("PASS 3 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#ifdef DEBUG_PASS3 + fprintf(stderr, "PASS 3 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif // pass 4: split graph, find tensors that need to be copied - // TODO: - // - when switching from a less preferred backend to a more preferred backend, check if it is possible to move the switch to an earlier point for the same cost - // find first backend - int cur_split = 0; - for (int i = 0; i < graph->n_nodes; i++) { - struct ggml_tensor * node = graph->nodes[i]; - if (node->view_src == NULL) { - sched->splits[0].tallocr = node_allocr(node); - break; - } - } - sched->splits[0].i_start = 0; - sched->splits[0].n_inputs = 0; - memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK - ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; - size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); - for (int i = 0; i < graph->n_nodes; i++) { - struct ggml_tensor * node = graph->nodes[i]; - - if (ggml_is_view_op(node->op)) { - continue; - } - - ggml_tallocr_t node_allocr = node_allocr(node); - - if (node_allocr != cur_allocr) { - sched->splits[cur_split].i_end = i; - cur_split++; - GGML_ASSERT(cur_split < GGML_MAX_SPLITS); - sched->splits[cur_split].tallocr = node_allocr; - sched->splits[cur_split].i_start = i; - sched->splits[cur_split].n_inputs = 0; - memset(sched->splits[cur_split].inputs, 0, sizeof(sched->splits[cur_split].inputs)); //HACK - cur_allocr = node_allocr; - cur_backend_id = sched_allocr_prio(sched, cur_allocr); - } - - // find inputs that are not on the same backend - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * src = node->src[j]; - if (src == NULL) { + { + int cur_split = 0; + // find the backend of the first split, skipping view ops + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (!ggml_is_view_op(node->op)) { + sched->splits[0].tallocr = node_allocr(node); break; } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr != node_allocr) { - int n_inputs = sched->splits[cur_split].n_inputs++; - GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); - sched->splits[cur_split].inputs[n_inputs] = (struct ggml_tensor *)src; + } + sched->splits[0].i_start = 0; + sched->splits[0].n_inputs = 0; + memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK + ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; + size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; - // create copies - size_t id = hash_id(src); - if (sched->node_copies[id][cur_backend_id] == NULL) { - struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); - sched->node_copies[id][cur_backend_id] = tensor_copy; - node_allocr(tensor_copy) = cur_allocr; - ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); - ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); + if (ggml_is_view_op(node->op)) { + continue; + } + + ggml_tallocr_t node_allocr = node_allocr(node); + + if (node_allocr != cur_allocr) { + sched->splits[cur_split].i_end = i; + cur_split++; + GGML_ASSERT(cur_split < GGML_MAX_SPLITS); + sched->splits[cur_split].tallocr = node_allocr; + sched->splits[cur_split].i_start = i; + sched->splits[cur_split].n_inputs = 0; + cur_allocr = node_allocr; + cur_backend_id = sched_allocr_prio(sched, cur_allocr); + } + + // find inputs that are not on the same backend + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_tallocr_t src_allocr = node_allocr(src); + GGML_ASSERT(src_allocr != NULL); // all inputs should be assigned by now + if (src_allocr != node_allocr) { + // check if the input is already in the split + bool found = false; + for (int k = 0; k < sched->splits[cur_split].n_inputs; k++) { + if (sched->splits[cur_split].inputs[k] == src) { + found = true; + break; + } + } + + if (!found) { + int n_inputs = sched->splits[cur_split].n_inputs++; + //printf("split %d input %d: %s (%s)\n", cur_split, n_inputs, src->name, ggml_backend_name(get_allocr_backend(sched, src_allocr))); + GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); + sched->splits[cur_split].inputs[n_inputs] = src; + } + + // create a copy of the input in the split's backend + size_t id = hash_id(src); + if (sched->node_copies[id][cur_backend_id] == NULL) { + ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); + struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); + ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); + + sched->node_copies[id][cur_backend_id] = tensor_copy; + node_allocr(tensor_copy) = cur_allocr; + SET_CAUSE(tensor_copy, "4.cpy"); + } + node->src[j] = sched->node_copies[id][cur_backend_id]; } - node->src[j] = sched->node_copies[id][cur_backend_id]; } } + sched->splits[cur_split].i_end = graph->n_nodes; + sched->n_splits = cur_split + 1; } - sched->splits[cur_split].i_end = graph->n_nodes; - sched->n_splits = cur_split + 1; +#ifdef DEBUG_PASS4 + fprintf(stderr, "PASS 4 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); +#endif - //fprintf(stderr, "PASS 4 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); fflush(stdout); - -#if 1 +#ifndef NDEBUG // sanity check: all sources should have the same backend as the node for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -1059,6 +1212,11 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g if (node_allocr == NULL) { fprintf(stderr, "!!!!!!! %s has no backend\n", node->name); } + if (node->view_src != NULL && node_allocr != node_allocr(node->view_src)) { + fprintf(stderr, "!!!!!!! %s has backend %s, view_src %s has backend %s\n", + node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", + node->view_src->name, node_allocr(node->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(node->view_src))) : "NULL"); + } for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { @@ -1070,8 +1228,14 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", j, src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL"); } + if (src->view_src != NULL && src_allocr != node_allocr(src->view_src)) { + fprintf(stderr, "!!!!!!! [src] %s has backend %s, view_src %s has backend %s\n", + src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL", + src->view_src->name, node_allocr(src->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(src->view_src))) : "NULL"); + } } } + fflush(stderr); #endif // create copies of the graph for each split @@ -1085,6 +1249,8 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_allocr_prio(sched, split->tallocr)]; + // add a dependency to the input source so that it is not freed before the copy is done + GGML_ASSERT(input_cpy->src[0] == NULL || input_cpy->src[0] == input); input_cpy->src[0] = input; graph_copy->nodes[graph_copy->n_nodes++] = input_cpy; } @@ -1119,24 +1285,16 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { uint64_t copy_start_us = ggml_time_us(); for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_backend_prio(sched, split_backend)]; - if (input->buffer == NULL) { - if (input->view_src == NULL) { - fprintf(stderr, "input %s has no buffer and no view_src\n", input->name); - exit(1); - } - // FIXME: may need to use the sched buffer instead - ggml_backend_view_init(input->view_src->buffer, input); - } - if (input_cpy->buffer == NULL) { - fprintf(stderr, "input_cpy %s has no buffer\n", input_cpy->name); - exit(1); - } - //GGML_ASSERT(input->buffer->backend != input_cpy->buffer->backend); - //GGML_ASSERT(input_cpy->buffer->backend == split_backend); - ggml_backend_tensor_copy(input, input_cpy); + struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][split_backend_id]; + + GGML_ASSERT(input->buffer != NULL); + GGML_ASSERT(input_cpy->buffer != NULL); + + // TODO: avoid this copy if it was already copied in a previous split, and the input didn't change + // this is important to avoid copying constants such as KQ_mask and inp_pos multiple times + ggml_backend_tensor_copy_async(split_backend, input, input_cpy); } - // ggml_backend_synchronize(split_backend); + //ggml_backend_synchronize(split_backend); // necessary to measure copy time int64_t copy_end_us = ggml_time_us(); copy_us[split_backend_id] += copy_end_us - copy_start_us; @@ -1148,7 +1306,7 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { uint64_t compute_start_us = ggml_time_us(); ggml_backend_graph_compute(split_backend, &split->graph); - // ggml_backend_synchronize(split_backend); + //ggml_backend_synchronize(split_backend); // necessary to measure compute time uint64_t compute_end_us = ggml_time_us(); compute_us[split_backend_id] += compute_end_us - compute_start_us; } @@ -1168,26 +1326,41 @@ static void sched_reset(ggml_backend_sched_t sched) { for (int i = 0; i < sched->n_backends; i++) { ggml_tallocr_reset(sched->tallocs[i]); } + // reset state for the next run + size_t hash_size = sched->hash_set.size; + memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); + memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); + memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); + + sched->is_reset = true; } -ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_backends) { +ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size) { + GGML_ASSERT(n_backends > 0); GGML_ASSERT(n_backends <= GGML_MAX_BACKENDS); - struct ggml_backend_sched * sched = malloc(sizeof(struct ggml_backend_sched)); - memset(sched, 0, sizeof(struct ggml_backend_sched)); + struct ggml_backend_sched * sched = calloc(sizeof(struct ggml_backend_sched), 1); + + // initialize hash table + sched->hash_set = ggml_hash_set_new(graph_size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + sched->node_talloc = calloc(sizeof(sched->node_talloc[0]) * sched->hash_set.size, 1); + sched->node_copies = calloc(sizeof(sched->node_copies[0]) * sched->hash_set.size, 1); sched->n_backends = n_backends; for (int i = 0; i < n_backends; i++) { sched->backends[i] = backends[i]; + sched->bufts[i] = bufts ? bufts[i] : ggml_backend_get_default_buffer_type(backends[i]); } sched->galloc = ggml_gallocr_new(); // init measure allocs for each backend for (int i = 0; i < n_backends; i++) { - sched->tallocs[i] = ggml_tallocr_new_measure_from_backend(backends[i]); + sched->tallocs[i] = ggml_tallocr_new_measure_from_buft(sched->bufts[i]); } + sched_reset(sched); + return sched; } @@ -1199,6 +1372,7 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { ggml_tallocr_free(sched->tallocs[i]); } ggml_gallocr_free(sched->galloc); + ggml_free(sched->ctx); free(sched->hash_set.keys); free(sched->node_talloc); free(sched->node_copies); @@ -1206,12 +1380,7 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { } void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { - // initialize hash tables - size_t hash_size = measure_graph->visited_hash_table.size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS; - sched->hash_set.size = hash_size; - sched->hash_set.keys = malloc(sizeof(sched->hash_set.keys[0]) * hash_size); - sched->node_talloc = malloc(sizeof(sched->node_talloc[0]) * hash_size); - sched->node_copies = malloc(sizeof(sched->node_copies[0]) * hash_size); + GGML_ASSERT(ggml_tallocr_is_measure(sched->tallocs[0])); // can only be initialized once sched_split_graph(sched, measure_graph); sched_alloc_splits(sched); @@ -1220,28 +1389,41 @@ void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgr for (int i = 0; i < sched->n_backends; i++) { size_t size = ggml_tallocr_max_size(sched->tallocs[i]); ggml_tallocr_free(sched->tallocs[i]); - sched->tallocs[i] = ggml_tallocr_new_from_backend(sched->backends[i], size); + sched->tallocs[i] = ggml_tallocr_new_from_buft(sched->bufts[i], size); } sched_reset(sched); } void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { - GGML_ASSERT(sched->hash_set.size >= graph->visited_hash_table.size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + GGML_ASSERT((int)sched->hash_set.size >= graph->n_nodes + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + + if (!sched->is_reset) { + sched_reset(sched); + } sched_split_graph(sched, graph); sched_alloc_splits(sched); sched_compute_splits(sched); +} + +void ggml_backend_sched_reset(ggml_backend_sched_t sched) { sched_reset(sched); } +int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched) { + return sched->n_splits; +} + ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend) { int backend_index = sched_backend_prio(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); return sched->tallocs[backend_index]; } ggml_backend_buffer_t ggml_backend_sched_get_buffer(ggml_backend_sched_t sched, ggml_backend_t backend) { int backend_index = sched_backend_prio(sched, backend); + GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); return ggml_tallocr_get_buffer(sched->tallocs[backend_index]); } @@ -1251,10 +1433,19 @@ void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml node_allocr(node) = sched->tallocs[backend_index]; } +ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node) { + ggml_tallocr_t allocr = node_allocr(node); + if (allocr == NULL) { + return NULL; + } + return get_allocr_backend(sched, allocr); +} + // utils + void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { GGML_ASSERT(tensor->buffer == NULL); - //GGML_ASSERT(tensor->data == NULL); // views of pre-allocted tensors may have the data set, but still need to be initialized + //GGML_ASSERT(tensor->data == NULL); // views of pre-allocated tensors may have the data set in ggml_new_tensor, but still need to be initialized by the backend GGML_ASSERT(tensor->view_src != NULL); GGML_ASSERT(tensor->view_src->buffer != NULL); GGML_ASSERT(tensor->view_src->data != NULL); @@ -1320,6 +1511,7 @@ static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor struct ggml_tensor * dst = node_copies[id]; if (dst->view_src != NULL) { + graph_init_tensor(hash_set, node_copies, node_init, src->view_src); ggml_backend_view_init(dst->view_src->buffer, dst); } else { @@ -1353,6 +1545,21 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s struct ggml_context * ctx_allocated = ggml_init(params); struct ggml_context * ctx_unallocated = ggml_init(params); + if (ctx_allocated == NULL || ctx_unallocated == NULL) { + fprintf(stderr, "failed to allocate context for graph copy\n"); + free(hash_set.keys); + free(node_copies); + free(node_init); + ggml_free(ctx_allocated); + ggml_free(ctx_unallocated); + return (struct ggml_backend_graph_copy) { + /* .buffer = */ NULL, + /* .ctx_allocated = */ NULL, + /* .ctx_unallocated = */ NULL, + /* .graph = */ NULL, + }; + } + // dup nodes for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -1361,6 +1568,20 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s // allocate nodes ggml_backend_buffer_t buffer = ggml_backend_alloc_ctx_tensors(ctx_allocated, backend); + if (buffer == NULL) { + fprintf(stderr, "failed to allocate buffer for graph copy\n"); + free(hash_set.keys); + free(node_copies); + free(node_init); + ggml_free(ctx_allocated); + ggml_free(ctx_unallocated); + return (struct ggml_backend_graph_copy) { + /* .buffer = */ NULL, + /* .ctx_allocated = */ NULL, + /* .ctx_unallocated = */ NULL, + /* .graph = */ NULL, + }; + } //printf("copy buffer size: %zu MB\n", ggml_backend_buffer_get_size(buffer) / 1024 / 1024); @@ -1397,8 +1618,12 @@ void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy) { ggml_free(copy.ctx_unallocated); } -void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data) { +bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data) { struct ggml_backend_graph_copy copy = ggml_backend_graph_copy(backend2, graph); + if (copy.buffer == NULL) { + return false; + } + struct ggml_cgraph * g1 = graph; struct ggml_cgraph * g2 = copy.graph; @@ -1428,4 +1653,6 @@ void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t } ggml_backend_graph_copy_free(copy); + + return true; } diff --git a/ggml-backend.h b/ggml-backend.h index 85ff67b0e..4eb244af1 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -17,22 +17,31 @@ extern "C" { // // buffer type - GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size); - GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); - GGML_API size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); - GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); - GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); + GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); + GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); + GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); + GGML_API size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); + GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); // buffer - GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); - GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); - GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); - GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_type(ggml_backend_buffer_t buffer); + enum ggml_backend_buffer_usage { + GGML_BACKEND_BUFFER_USAGE_ANY = 0, + GGML_BACKEND_BUFFER_USAGE_WEIGHTS = 1, + }; + + GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); + GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); + GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); // // Backend @@ -140,23 +149,24 @@ extern "C" { typedef struct ggml_backend_sched * ggml_backend_sched_t; // Initialize a backend scheduler - GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, int n_backends); - - GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); - + GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size); + GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); // Initialize backend buffers from a measure graph - GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + // Get the number of splits of the last graph + GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); GGML_API ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend); GGML_API ggml_backend_buffer_t ggml_backend_sched_get_buffer (ggml_backend_sched_t sched, ggml_backend_t backend); - GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); + GGML_API ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); - // Allocate a graph on the backend scheduler - GGML_API void ggml_backend_sched_graph_compute( - ggml_backend_sched_t sched, - struct ggml_cgraph * graph); + // Allocate and compute graph on the backend scheduler + GGML_API void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + // Reset all assignments and allocators - must be called before using the sched allocators to allocate inputs + GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); // // Utils @@ -176,7 +186,7 @@ extern "C" { typedef bool (*ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); // Compare the output of two backends - GGML_API void ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); + GGML_API bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); // Tensor initialization GGML_API void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, void * addr); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index a345b0c4a..2db50437c 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8,8 +8,13 @@ #include #include #include +#include #include - +#include +#include +#include "ggml-cuda.h" +#include "ggml.h" +#include "ggml-backend-impl.h" #if defined(GGML_USE_HIPBLAS) #include @@ -77,6 +82,7 @@ #define cudaMemcpyKind hipMemcpyKind #define cudaMemset hipMemset #define cudaMemsetAsync hipMemsetAsync +#define cudaMemGetInfo hipMemGetInfo #define cudaOccupancyMaxPotentialBlockSize hipOccupancyMaxPotentialBlockSize #define cudaSetDevice hipSetDevice #define cudaStreamCreateWithFlags hipStreamCreateWithFlags @@ -112,10 +118,6 @@ #endif // defined(GGML_USE_HIPBLAS) -#include "ggml-cuda.h" -#include "ggml.h" -#include "ggml-backend-impl.h" - #define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) #define CC_PASCAL 600 @@ -564,7 +566,7 @@ static void ggml_cuda_set_device(const int device) { static int g_device_count = -1; static int g_main_device = 0; -static float g_tensor_split[GGML_CUDA_MAX_DEVICES] = {0}; +static std::array g_default_tensor_split = {}; struct cuda_device_capabilities { int cc; // compute capability @@ -575,10 +577,6 @@ struct cuda_device_capabilities { static cuda_device_capabilities g_device_caps[GGML_CUDA_MAX_DEVICES] = { {0, 0, false, 0} }; -static void * g_scratch_buffer = nullptr; -static size_t g_scratch_size = 0; // disabled by default -static size_t g_scratch_offset = 0; - static cublasHandle_t g_cublas_handles[GGML_CUDA_MAX_DEVICES] = {nullptr}; [[noreturn]] @@ -7548,8 +7546,9 @@ void ggml_init_cublas() { CUDA_CHECK(cudaGetDeviceProperties(&prop, id)); fprintf(stderr, " Device %d: %s, compute capability %d.%d, VMM: %s\n", id, prop.name, prop.major, prop.minor, device_vmm ? "yes" : "no"); - g_tensor_split[id] = total_vram; + g_default_tensor_split[id] = total_vram; total_vram += prop.totalGlobalMem; + #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) g_device_caps[id].cc = 100*prop.major + 10*prop.minor + CC_OFFSET_AMD; #else @@ -7558,7 +7557,7 @@ void ggml_init_cublas() { g_device_caps[id].smpb = prop.sharedMemPerBlock; } for (int id = 0; id < g_device_count; ++id) { - g_tensor_split[id] /= total_vram; + g_default_tensor_split[id] /= total_vram; } for (int id = 0; id < g_device_count; ++id) { @@ -7582,30 +7581,6 @@ void ggml_init_cublas() { } } -void ggml_cuda_set_tensor_split(const float * tensor_split) { - if (tensor_split == nullptr) { - return; - } - bool all_zero = true; - for (int i = 0; i < g_device_count; ++i) { - if (tensor_split[i] != 0.0f) { - all_zero = false; - break; - } - } - if (all_zero) { - return; - } - float split_sum = 0.0f; - for (int i = 0; i < g_device_count; ++i) { - g_tensor_split[i] = split_sum; - split_sum += tensor_split[i]; - } - for (int i = 0; i < g_device_count; ++i) { - g_tensor_split[i] /= split_sum; - } -} - void * ggml_cuda_host_malloc(size_t size) { if (getenv("GGML_CUDA_NO_PINNED") != nullptr) { return nullptr; @@ -8057,11 +8032,11 @@ static void ggml_cuda_op_mul_mat_q( (void) src1_ddf_i; } -static int64_t get_row_rounding(ggml_type type) { +static int64_t get_row_rounding(ggml_type type, const std::array & tensor_split) { int64_t min_compute_capability = INT_MAX; int64_t max_compute_capability = INT_MIN; for (int id = 0; id < g_device_count; ++id) { - if (g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { + if (tensor_split[id] < (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { if (min_compute_capability > g_device_caps[id].cc) { min_compute_capability = g_device_caps[id].cc; } @@ -8122,6 +8097,21 @@ static int64_t get_row_rounding(ggml_type type) { #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) } +static void get_row_split(int64_t * row_low, int64_t * row_high, const ggml_tensor * tensor, const std::array & tensor_split, int id) { + const int64_t nrows = ggml_nrows(tensor); + const int64_t rounding = get_row_rounding(tensor->type, tensor_split); + + *row_low = id == 0 ? 0 : nrows*tensor_split[id]; + *row_low -= *row_low % rounding; + + if (id == g_device_count - 1) { + *row_high = nrows; + } else { + *row_high = nrows*tensor_split[id + 1]; + *row_high -= *row_high % rounding; + } +} + static void ggml_cuda_op_mul_mat_vec_q( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const char * src0_dd_i, const float * src1_ddf_i, const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, @@ -8739,6 +8729,11 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { peer_access_enabled = enable_peer_access; } +// FIXME: move this somewhere else +struct ggml_backend_cuda_split_buffer_type_context { + std::array tensor_split; +}; + static void ggml_cuda_op_mul_mat( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_cuda_op_mul_mat_t op, const bool convert_src1_to_q8_1) { @@ -8790,6 +8785,14 @@ static void ggml_cuda_op_mul_mat( GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); + std::array tensor_split; + if (split) { + // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_GPU_SPLIT check + // GGML_ASSERT(src0->buffer != nullptr && src0->buffer->buft == ...); + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; + tensor_split = buft_ctx->tensor_split; + } + struct dev_data { cuda_pool_alloc src0_dd_alloc; cuda_pool_alloc src1_ddf_alloc; @@ -8817,17 +8820,17 @@ static void ggml_cuda_op_mul_mat( // for multi GPU, get the row boundaries from tensor split // and round to mul_mat_q tile sizes if (split) { - const int64_t rounding = get_row_rounding(src0->type); + const int64_t rounding = get_row_rounding(src0->type, tensor_split); if (id != 0) { - dev[id].row_low = ne01*g_tensor_split[id]; + dev[id].row_low = ne01*tensor_split[id]; if (dev[id].row_low < ne01) { dev[id].row_low -= dev[id].row_low % rounding; } } if (id != g_device_count - 1) { - dev[id].row_high = ne01*g_tensor_split[id + 1]; + dev[id].row_high = ne01*tensor_split[id + 1]; if (dev[id].row_high < ne01) { dev[id].row_high -= dev[id].row_high % rounding; } @@ -9373,10 +9376,17 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; - for (int id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_device_caps[id].cc && g_tensor_split[id] < (id + 1 < g_device_count ? g_tensor_split[id + 1] : 1.0f)) { - min_compute_capability = g_device_caps[id].cc; + + if (split) { + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; + auto & tensor_split = buft_ctx->tensor_split; + for (int id = 0; id < g_device_count; ++id) { + if (min_compute_capability > g_device_caps[id].cc && tensor_split[id] < (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { + min_compute_capability = g_device_caps[id].cc; + } } + } else { + min_compute_capability = g_device_caps[g_main_device].cc; } #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) @@ -9415,7 +9425,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 } else if (!split && all_on_device && !fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { // KQV single-batch ggml_cuda_mul_mat_vec_nc(src0, src1, dst); - } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1)) { + } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { // KQ + KQV multi-batch ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); } else if (src0->type == GGML_TYPE_F32) { @@ -9877,247 +9887,7 @@ static size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_spl return nrows_split*ggml_row_size(tensor->type, tensor->ne[0]); } -void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor) { - const int64_t nrows = ggml_nrows(tensor); - - const int64_t ne0 = tensor->ne[0]; - - const size_t nb1 = tensor->nb[1]; - - ggml_backend_type backend = tensor->backend; - ggml_tensor_extra_gpu * extra = new struct ggml_tensor_extra_gpu; - memset(extra, 0, sizeof(*extra)); - - for (int id = 0; id < g_device_count; ++id) { - if (backend == GGML_BACKEND_GPU && id != g_main_device) { - continue; - } - - ggml_cuda_set_device(id); - - int64_t row_low, row_high; - if (backend == GGML_BACKEND_GPU) { - row_low = 0; - row_high = nrows; - } else if (backend == GGML_BACKEND_GPU_SPLIT) { - const int64_t rounding = get_row_rounding(tensor->type); - - row_low = id == 0 ? 0 : nrows*g_tensor_split[id]; - row_low -= row_low % rounding; - - if (id == g_device_count - 1) { - row_high = nrows; - } else { - row_high = nrows*g_tensor_split[id + 1]; - row_high -= row_high % rounding; - } - } else { - GGML_ASSERT(false); - } - if (row_low == row_high) { - continue; - } - - int64_t nrows_split = row_high - row_low; - - const size_t offset_split = row_low*nb1; - size_t size = ggml_nbytes_split(tensor, nrows_split); - const size_t original_size = size; - - // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses - if (ne0 % MATRIX_ROW_PADDING != 0) { - size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); - } - - char * buf; - CUDA_CHECK(cudaMalloc(&buf, size)); - char * buf_host = (char *)data + offset_split; - - // set padding to 0 to avoid possible NaN values - if (size > original_size) { - CUDA_CHECK(cudaMemset(buf + original_size, 0, size - original_size)); - } - - CUDA_CHECK(cudaMemcpy(buf, buf_host, original_size, cudaMemcpyHostToDevice)); - - extra->data_device[id] = buf; - - if (backend == GGML_BACKEND_GPU_SPLIT) { - for (int64_t is = 0; is < MAX_STREAMS; ++is) { - CUDA_CHECK(cudaEventCreateWithFlags(&extra->events[id][is], cudaEventDisableTiming)); - } - } - } - - tensor->extra = extra; -} - -void ggml_cuda_free_data(struct ggml_tensor * tensor) { - if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { - return; - } - - ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - - for (int id = 0; id < g_device_count; ++id) { - ggml_cuda_set_device(id); - if (extra->data_device[id] != nullptr) { - CUDA_CHECK(cudaFree(extra->data_device[id])); - } - - for (int64_t is = 0; is < MAX_STREAMS; ++is) { - if (extra->events[id][is] != nullptr) { - CUDA_CHECK(cudaEventDestroy(extra->events[id][is])); - } - } - } - - delete extra; -} - -static ggml_tensor_extra_gpu * g_temp_tensor_extras = nullptr; -static size_t g_temp_tensor_extra_index = 0; - -static ggml_tensor_extra_gpu * ggml_cuda_alloc_temp_tensor_extra() { - if (g_temp_tensor_extras == nullptr) { - g_temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_CUDA_MAX_NODES]; - } - - size_t alloc_index = g_temp_tensor_extra_index; - g_temp_tensor_extra_index = (g_temp_tensor_extra_index + 1) % GGML_CUDA_MAX_NODES; - ggml_tensor_extra_gpu * extra = &g_temp_tensor_extras[alloc_index]; - memset(extra, 0, sizeof(*extra)); - - return extra; -} - -static void ggml_cuda_assign_buffers_impl(struct ggml_tensor * tensor, bool scratch, bool force_inplace, bool no_alloc) { - if (scratch && g_scratch_size == 0) { - return; - } - - tensor->backend = GGML_BACKEND_GPU; - - // recursively assign CUDA buffers until a compute tensor is found - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_CPU) { - const ggml_op src0_op = tensor->src[0]->op; - if (src0_op == GGML_OP_RESHAPE || src0_op == GGML_OP_TRANSPOSE || src0_op == GGML_OP_VIEW || src0_op == GGML_OP_PERMUTE) { - ggml_cuda_assign_buffers_impl(tensor->src[0], scratch, force_inplace, no_alloc); - } - } - if (tensor->op == GGML_OP_CPY && tensor->src[1]->backend == GGML_BACKEND_CPU) { - ggml_cuda_assign_buffers_impl(tensor->src[1], scratch, force_inplace, no_alloc); - } - - if (scratch && no_alloc) { - return; - } - - ggml_tensor_extra_gpu * extra; - - const bool inplace = (tensor->src[0] != nullptr && tensor->src[0]->data == tensor->data) || - tensor->op == GGML_OP_VIEW || - force_inplace; - const size_t size = ggml_nbytes(tensor); - - ggml_cuda_set_device(g_main_device); - if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; - size_t offset = 0; - if (tensor->op == GGML_OP_VIEW) { - memcpy(&offset, tensor->op_params, sizeof(size_t)); - } - extra = ggml_cuda_alloc_temp_tensor_extra(); - extra->data_device[g_main_device] = src0_ddc + offset; - } else if (tensor->op == GGML_OP_CPY) { - ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu * ) tensor->src[1]->extra; - void * src1_ddv = src1_extra->data_device[g_main_device]; - extra = ggml_cuda_alloc_temp_tensor_extra(); - extra->data_device[g_main_device] = src1_ddv; - } else if (scratch) { - GGML_ASSERT(size <= g_scratch_size); - if (g_scratch_offset + size > g_scratch_size) { - g_scratch_offset = 0; - } - - char * data = (char *) g_scratch_buffer; - if (data == nullptr) { - CUDA_CHECK(cudaMalloc(&data, g_scratch_size)); - g_scratch_buffer = data; - } - extra = ggml_cuda_alloc_temp_tensor_extra(); - extra->data_device[g_main_device] = data + g_scratch_offset; - - g_scratch_offset += size; - - GGML_ASSERT(g_scratch_offset <= g_scratch_size); - } else { // allocate new buffers outside of scratch - void * data; - CUDA_CHECK(cudaMalloc(&data, size)); - CUDA_CHECK(cudaMemset(data, 0, size)); - extra = new ggml_tensor_extra_gpu; - memset(extra, 0, sizeof(*extra)); - extra->data_device[g_main_device] = data; - } - - tensor->extra = extra; -} - -void ggml_cuda_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset) { - if (g_scratch_size == 0) { - return; - } - if (g_scratch_buffer == nullptr) { - ggml_cuda_set_device(g_main_device); - CUDA_CHECK(cudaMalloc(&g_scratch_buffer, g_scratch_size)); - } - - ggml_tensor_extra_gpu * extra = ggml_cuda_alloc_temp_tensor_extra(); - - const bool inplace = tensor->view_src != nullptr; - - if (inplace && (tensor->view_src->backend == GGML_BACKEND_GPU || tensor->view_src->backend == GGML_BACKEND_GPU_SPLIT)) { - ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; - char * src0_ddc = (char *) src0_extra->data_device[g_main_device]; - size_t view_offset = 0; - if (tensor->op == GGML_OP_VIEW) { - memcpy(&view_offset, tensor->op_params, sizeof(size_t)); - } - extra->data_device[g_main_device] = src0_ddc + view_offset; - } else { - extra->data_device[g_main_device] = (char *) g_scratch_buffer + offset; - } - - tensor->extra = extra; -} - -void ggml_cuda_copy_to_device(struct ggml_tensor * tensor) { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - GGML_ASSERT(ggml_is_contiguous(tensor)); - - ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_cuda_set_device(g_main_device); - CUDA_CHECK(cudaMemcpy(extra->data_device[g_main_device], tensor->data, ggml_nbytes(tensor), cudaMemcpyHostToDevice)); -} - -void ggml_cuda_assign_buffers(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, true, false, false); -} - -void ggml_cuda_assign_buffers_no_alloc(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, true, false, true); -} - -void ggml_cuda_assign_buffers_no_scratch(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, false, false, false); -} - -void ggml_cuda_assign_buffers_force_inplace(struct ggml_tensor * tensor) { - ggml_cuda_assign_buffers_impl(tensor, false, true, false); -} - -void ggml_cuda_set_main_device(const int main_device) { +static void ggml_cuda_set_main_device(const int main_device) { if (main_device >= g_device_count) { fprintf(stderr, "warning: cannot set main_device=%d because there are only %d devices. Using device %d instead.\n", main_device, g_device_count, g_main_device); @@ -10126,30 +9896,12 @@ void ggml_cuda_set_main_device(const int main_device) { if (g_main_device != main_device && g_device_count > 1) { g_main_device = main_device; - cudaDeviceProp prop; - CUDA_CHECK(cudaGetDeviceProperties(&prop, g_main_device)); - fprintf(stderr, "%s: using device %d (%s) as main device\n", __func__, g_main_device, prop.name); + //cudaDeviceProp prop; + //CUDA_CHECK(cudaGetDeviceProperties(&prop, g_main_device)); + //fprintf(stderr, "%s: using device %d (%s) as main device\n", __func__, g_main_device, prop.name); } } -void ggml_cuda_set_scratch_size(const size_t scratch_size) { - // this is a hack to not completely break llama.cpp when using multiple models or contexts simultaneously - // it still won't always work as expected, but it's better than nothing - if (scratch_size > g_scratch_size) { - ggml_cuda_free_scratch(); - } - g_scratch_size = std::max(g_scratch_size, scratch_size); -} - -void ggml_cuda_free_scratch() { - if (g_scratch_buffer == nullptr) { - return; - } - - CUDA_CHECK(cudaFree(g_scratch_buffer)); - g_scratch_buffer = nullptr; -} - bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { if (!g_cublas_loaded) return false; @@ -10328,21 +10080,31 @@ void ggml_cuda_get_device_description(int device, char * description, size_t des #define UNUSED GGML_UNUSED +struct ggml_backend_cuda_context { + int device; + std::string name; +}; + // cuda buffer -struct ggml_backend_buffer_context_cuda { +struct ggml_backend_cuda_buffer_context { int device; void * dev_ptr = nullptr; ggml_tensor_extra_gpu * temp_tensor_extras = nullptr; size_t temp_tensor_extra_index = 0; + std::string name; - ggml_backend_buffer_context_cuda(int device, void * dev_ptr) : device(device), dev_ptr(dev_ptr) {} + ggml_backend_cuda_buffer_context(int device, void * dev_ptr) : + device(device), dev_ptr(dev_ptr), + name(GGML_CUDA_NAME + std::to_string(device)) { + } - ~ggml_backend_buffer_context_cuda() { + ~ggml_backend_cuda_buffer_context() { delete[] temp_tensor_extras; } ggml_tensor_extra_gpu * ggml_cuda_alloc_temp_tensor_extra() { + // TODO: remove GGML_CUDA_MAX_NODES, allocate dynamically and reuse in backend_buffer_reset if (temp_tensor_extras == nullptr) { temp_tensor_extras = new ggml_tensor_extra_gpu[GGML_CUDA_MAX_NODES]; } @@ -10356,19 +10118,28 @@ struct ggml_backend_buffer_context_cuda { } }; +static const char * ggml_backend_cuda_buffer_get_name(ggml_backend_buffer_t buffer) { + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; + return ctx->name.c_str(); +} + +static bool ggml_backend_buffer_is_cuda(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name == ggml_backend_cuda_buffer_get_name; +} + static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; CUDA_CHECK(cudaFree(ctx->dev_ptr)); delete ctx; } static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; return ctx->dev_ptr; } static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { assert(tensor->view_src->buffer->buft == buffer->buft); @@ -10397,14 +10168,12 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g CUDA_CHECK(cudaMemsetAsync((char *)tensor->data + original_size, 0, padded_size - original_size, g_cudaStreams[ctx->device][0])); } } - - UNUSED(buffer); } static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); @@ -10415,49 +10184,82 @@ static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, gg static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); - CUDA_CHECK(cudaMemcpy(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost)); + CUDA_CHECK(cudaDeviceSynchronize()); +} + +static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { + if (ggml_backend_buffer_is_cuda(src->buffer)) { + ggml_backend_cuda_buffer_context * src_ctx = (ggml_backend_cuda_buffer_context *)src->buffer->context; + ggml_backend_cuda_buffer_context * dst_ctx = (ggml_backend_cuda_buffer_context *)buffer->context; + + ggml_cuda_set_device(src_ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + ggml_cuda_set_device(dst_ctx->device); + CUDA_CHECK(cudaDeviceSynchronize()); + CUDA_CHECK(cudaMemcpy((char *)dst->data, (const char *)src->data, ggml_nbytes(src), cudaMemcpyDeviceToDevice)); + CUDA_CHECK(cudaDeviceSynchronize()); + + return true; + } + return false; } static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { - ggml_backend_buffer_context_cuda * ctx = (ggml_backend_buffer_context_cuda *)buffer->context; + ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); CUDA_CHECK(cudaDeviceSynchronize()); - CUDA_CHECK(cudaMemset(ctx->dev_ptr, value, buffer->size)); + CUDA_CHECK(cudaDeviceSynchronize()); } -static struct ggml_backend_buffer_i cuda_backend_buffer_interface = { +static ggml_backend_buffer_i ggml_backend_cuda_buffer_interface = { + /* .get_name = */ ggml_backend_cuda_buffer_get_name, /* .free_buffer = */ ggml_backend_cuda_buffer_free_buffer, /* .get_base = */ ggml_backend_cuda_buffer_get_base, /* .init_tensor = */ ggml_backend_cuda_buffer_init_tensor, /* .set_tensor = */ ggml_backend_cuda_buffer_set_tensor, /* .get_tensor = */ ggml_backend_cuda_buffer_get_tensor, - /* .cpy_tensor_from = */ NULL, - /* .cpy_tensor_to = */ NULL, + /* .cpy_tensor = */ ggml_backend_cuda_buffer_cpy_tensor, /* .clear = */ ggml_backend_cuda_buffer_clear, + /* .reset = */ NULL, }; // cuda buffer type -static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { - int device = (int) (intptr_t) buft->context; +struct ggml_backend_cuda_buffer_type_context { + int device; + std::string name; +}; - ggml_cuda_set_device(device); +static const char * ggml_backend_cuda_buffer_type_name(ggml_backend_buffer_type_t buft) { + ggml_backend_cuda_buffer_type_context * ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; + + return ctx->name.c_str(); +} + +static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + ggml_backend_cuda_buffer_type_context * buft_ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; + + ggml_cuda_set_device(buft_ctx->device); size = std::max(size, (size_t)1); // cudaMalloc returns null for size 0 void * dev_ptr; - CUDA_CHECK(cudaMalloc(&dev_ptr, size)); + cudaError_t err = cudaMalloc(&dev_ptr, size); + if (err != cudaSuccess) { + fprintf(stderr, "%s: allocating %.2f MiB on device %d: cudaMalloc failed: %s\n", __func__, size/1024.0/1024.0, buft_ctx->device, cudaGetErrorString(err)); + return nullptr; + } - ggml_backend_buffer_context_cuda * ctx = new ggml_backend_buffer_context_cuda(device, dev_ptr); + ggml_backend_cuda_buffer_context * ctx = new ggml_backend_cuda_buffer_context(buft_ctx->device, dev_ptr); - return ggml_backend_buffer_init(buft, cuda_backend_buffer_interface, ctx, size); + return ggml_backend_buffer_init(buft, ggml_backend_cuda_buffer_interface, ctx, size); } static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { @@ -10466,7 +10268,7 @@ static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_ty UNUSED(buft); } -static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, ggml_tensor * tensor) { +static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); int64_t nrows_split = row_high - row_low; @@ -10487,21 +10289,32 @@ static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_t } static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { - return ggml_backend_is_cuda(backend); + if (!ggml_backend_is_cuda(backend)) { + return false; + } - UNUSED(buft); + ggml_backend_cuda_buffer_type_context * buft_ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; + + return buft_ctx->device == cuda_ctx->device; } static ggml_backend_buffer_type_i ggml_backend_cuda_buffer_type_interface = { + /* .get_name = */ ggml_backend_cuda_buffer_type_name, /* .alloc_buffer = */ ggml_backend_cuda_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cuda_buffer_type_get_alignment, /* .get_alloc_size = */ ggml_backend_cuda_buffer_type_get_alloc_size, /* .supports_backend = */ ggml_backend_cuda_buffer_type_supports_backend, - /* .is_host = */ nullptr, + /* .is_host = */ NULL, }; ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { - static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_types[GGML_CUDA_MAX_DEVICES]; + // FIXME: this is not thread safe + if (device >= ggml_backend_cuda_get_device_count()) { + return nullptr; + } + + static ggml_backend_buffer_type ggml_backend_cuda_buffer_types[GGML_CUDA_MAX_DEVICES]; static bool ggml_backend_cuda_buffer_type_initialized = false; @@ -10509,7 +10322,7 @@ ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { for (int i = 0; i < GGML_CUDA_MAX_DEVICES; i++) { ggml_backend_cuda_buffer_types[i] = { /* .iface = */ ggml_backend_cuda_buffer_type_interface, - /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, + /* .context = */ new ggml_backend_cuda_buffer_type_context{i, GGML_CUDA_NAME + std::to_string(i)}, }; } ggml_backend_cuda_buffer_type_initialized = true; @@ -10518,8 +10331,306 @@ ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { return &ggml_backend_cuda_buffer_types[device]; } +// cuda split buffer + +struct ggml_backend_cuda_split_buffer_context { + ~ggml_backend_cuda_split_buffer_context() { + for (ggml_tensor_extra_gpu * extra : tensor_extras) { + for (int id = 0; id < g_device_count; ++id) { + for (int64_t is = 0; is < MAX_STREAMS; ++is) { + if (extra->events[id][is] != nullptr) { + CUDA_CHECK(cudaEventDestroy(extra->events[id][is])); + } + } + if (extra->data_device[id] != nullptr) { + CUDA_CHECK(cudaFree(extra->data_device[id])); + } + } + delete extra; + } + } + + std::vector tensor_extras; +}; + +static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_t buffer) { + return GGML_CUDA_NAME "_Split"; + + UNUSED(buffer); +} + +// unused at the moment +//static bool ggml_backend_buffer_is_cuda_split(ggml_backend_buffer_t buffer) { +// return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; +//} + +static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; + delete ctx; +} + +static void * ggml_backend_cuda_split_buffer_get_base(ggml_backend_buffer_t buffer) { + // the pointers are stored in the tensor extras, this is just a dummy address and never dereferenced + return (void *)0x1000; + + UNUSED(buffer); +} + +static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { + GGML_ASSERT(tensor->view_src == nullptr); // views of split tensors are not supported + + ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + + ggml_tensor_extra_gpu * extra = new ggml_tensor_extra_gpu{}; + + ctx->tensor_extras.push_back(extra); + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + // FIXME: do not crash if cudaMalloc fails + // currently, init_tensor cannot fail, it needs to be fixed in ggml-backend first + ggml_cuda_set_device(id); + char * buf; + CUDA_CHECK(cudaMalloc(&buf, size)); + + // set padding to 0 to avoid possible NaN values + if (size > original_size) { + CUDA_CHECK(cudaMemset(buf + original_size, 0, size - original_size)); + } + + extra->data_device[id] = buf; + + for (int64_t is = 0; is < MAX_STREAMS; ++is) { + CUDA_CHECK(cudaEventCreateWithFlags(&extra->events[id][is], cudaEventDisableTiming)); + } + } + tensor->backend = GGML_BACKEND_GPU_SPLIT; + tensor->extra = extra; +} + +static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + // split tensors must always be set in their entirety at once + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + const size_t nb1 = tensor->nb[1]; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *)tensor->extra; + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + const size_t offset_split = row_low*nb1; + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + const char * buf_host = (const char *)data + offset_split; + CUDA_CHECK(cudaMemcpy(extra->data_device[id], buf_host, original_size, cudaMemcpyHostToDevice)); + } +} + +static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + // split tensors must always be set in their entirety at once + GGML_ASSERT(offset == 0); + GGML_ASSERT(size == ggml_nbytes(tensor)); + + ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *)buffer->buft->context; + + const int64_t ne0 = tensor->ne[0]; + const size_t nb1 = tensor->nb[1]; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *)tensor->extra; + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, buft_ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + const size_t offset_split = row_low*nb1; + size_t size = ggml_nbytes_split(tensor, nrows_split); + const size_t original_size = size; + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + + char * buf_host = (char *)data + offset_split; + CUDA_CHECK(cudaMemcpy(buf_host, extra->data_device[id], original_size, cudaMemcpyDeviceToHost)); + } +} + +static void ggml_backend_cuda_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + UNUSED(buffer); + UNUSED(value); +} + +static struct ggml_backend_buffer_i ggml_backend_cuda_split_buffer_interface = { + /* .get_name = */ ggml_backend_cuda_split_buffer_get_name, + /* .free_buffer = */ ggml_backend_cuda_split_buffer_free_buffer, + /* .get_base = */ ggml_backend_cuda_split_buffer_get_base, + /* .init_tensor = */ ggml_backend_cuda_split_buffer_init_tensor, + /* .set_tensor = */ ggml_backend_cuda_split_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_cuda_split_buffer_get_tensor, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_cuda_split_buffer_clear, + /* .reset = */ NULL, +}; + +// cuda split buffer type + +static const char * ggml_backend_cuda_split_buffer_type_name(ggml_backend_buffer_type_t buft) { + return GGML_CUDA_NAME "_Split"; + + UNUSED(buft); +} + +static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + // since we don't know the exact split after rounding, we cannot allocate the device buffers at this point + // instead, we allocate them for each tensor separately in init_tensor + // however, the size still represents the maximum cumulative size of all the device buffers after the tensors are allocated, + // as returned by get_alloc_size. this limit is enforced during tensor allocation by ggml-alloc, so it must be correct. + ggml_backend_cuda_split_buffer_context * ctx = new ggml_backend_cuda_split_buffer_context(); + + return ggml_backend_buffer_init(buft, ggml_backend_cuda_split_buffer_interface, ctx, size); +} + +static size_t ggml_backend_cuda_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { + return 128; + + UNUSED(buft); +} + +static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { + ggml_backend_cuda_split_buffer_type_context * ctx = (ggml_backend_cuda_split_buffer_type_context *)buft->context; + + size_t total_size = 0; + + const int64_t ne0 = tensor->ne[0]; + + for (int id = 0; id < g_device_count; ++id) { + int64_t row_low, row_high; + get_row_split(&row_low, &row_high, tensor, ctx->tensor_split, id); + + int64_t nrows_split = row_high - row_low; + if (nrows_split == 0) { + continue; + } + + total_size += ggml_nbytes_split(tensor, nrows_split); + + // pad last row to a multiple of 512 elements to avoid out-of-bounds memory accesses + if (ne0 % MATRIX_ROW_PADDING != 0) { + total_size += ggml_row_size(tensor->type, MATRIX_ROW_PADDING - ne0 % MATRIX_ROW_PADDING); + } + } + + return total_size; +} + +static bool ggml_backend_cuda_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { + return ggml_backend_is_cuda(backend); + + UNUSED(buft); +} + +static bool ggml_backend_cuda_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { + return false; + + UNUSED(buft); +} + +static ggml_backend_buffer_type_i ggml_backend_cuda_split_buffer_type_interface = { + /* .get_name = */ ggml_backend_cuda_split_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_cuda_split_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cuda_split_buffer_type_get_alignment, + /* .get_alloc_size = */ ggml_backend_cuda_split_buffer_type_get_alloc_size, + /* .supports_backend = */ ggml_backend_cuda_split_buffer_type_supports_backend, + /* .is_host = */ ggml_backend_cuda_split_buffer_type_is_host, +}; + +ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split) { + // FIXME: this is not thread safe + static std::map, struct ggml_backend_buffer_type> buft_map; + + std::array tensor_split_arr = {}; + + bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + GGML_CUDA_MAX_DEVICES, [](float x) { return x == 0.0f; }); + if (all_zero) { + tensor_split_arr = g_default_tensor_split; + } else { + float split_sum = 0.0f; + for (int i = 0; i < g_device_count; ++i) { + tensor_split_arr[i] = split_sum; + split_sum += tensor_split[i]; + } + for (int i = 0; i < g_device_count; ++i) { + tensor_split_arr[i] /= split_sum; + } + } + + auto it = buft_map.find(tensor_split_arr); + if (it != buft_map.end()) { + return &it->second; + } + + struct ggml_backend_buffer_type buft { + /* .iface = */ ggml_backend_cuda_split_buffer_type_interface, + /* .context = */ new ggml_backend_cuda_split_buffer_type_context{tensor_split_arr}, + }; + + auto result = buft_map.emplace(tensor_split_arr, buft); + return &result.first->second; +} + // host buffer type +static const char * ggml_backend_cuda_host_buffer_type_name(ggml_backend_buffer_type_t buft) { + return GGML_CUDA_NAME "_Host"; + + UNUSED(buft); +} + +static const char * ggml_backend_cuda_host_buffer_name(ggml_backend_buffer_t buffer) { + return GGML_CUDA_NAME "_Host"; + + UNUSED(buffer); +} + static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_cuda_host_free(buffer->context); } @@ -10532,9 +10643,9 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm return ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); } - // FIXME: this is a hack to avoid having to implement a new buffer type ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); buffer->buft = buft; + buffer->iface.get_name = ggml_backend_cuda_host_buffer_name; buffer->iface.free_buffer = ggml_backend_cuda_host_buffer_free_buffer; return buffer; @@ -10543,6 +10654,7 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_type_host = { /* .iface = */ { + /* .get_name = */ ggml_backend_cuda_host_buffer_type_name, /* .alloc_buffer = */ ggml_backend_cuda_host_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, @@ -10557,31 +10669,27 @@ ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { // backend -struct ggml_backend_context_cuda { - int device; -}; - static const char * ggml_backend_cuda_name(ggml_backend_t backend) { - return GGML_CUDA_NAME; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; - UNUSED(backend); + return cuda_ctx->name.c_str(); } static void ggml_backend_cuda_free(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; delete cuda_ctx; delete backend; } static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; return ggml_backend_cuda_buffer_type(cuda_ctx->device); } static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -10590,7 +10698,7 @@ static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tens } static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -10598,39 +10706,27 @@ static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggm CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } +static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; + + if (dst->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && ggml_backend_buffer_is_cuda(src->buffer)) { + CUDA_CHECK(cudaMemcpyAsync(dst->data, src->data, ggml_nbytes(dst), cudaMemcpyDeviceToDevice, g_cudaStreams[cuda_ctx->device][0])); + return true; + } + + return false; +} + static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[cuda_ctx->device][0])); UNUSED(backend); } -static ggml_backend_graph_plan_t ggml_backend_cuda_graph_plan_create(ggml_backend_t backend, ggml_cgraph * cgraph) { - GGML_ASSERT(!"not implemented"); - - return nullptr; - - UNUSED(backend); - UNUSED(cgraph); -} - -static void ggml_backend_cuda_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - GGML_ASSERT(!"not implemented"); - - UNUSED(backend); - UNUSED(plan); -} - -static void ggml_backend_cuda_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { - GGML_ASSERT(!"not implemented"); - - UNUSED(backend); - UNUSED(plan); -} - static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - ggml_backend_context_cuda * cuda_ctx = (ggml_backend_context_cuda *)backend->context; + ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -10640,53 +10736,31 @@ static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; - if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) + if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE || node->op == GGML_OP_NONE) { continue; + } - assert(node->backend == GGML_BACKEND_GPU); +#ifndef NDEBUG + assert(node->backend == GGML_BACKEND_GPU || node->backend == GGML_BACKEND_GPU_SPLIT); assert(node->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_GPU); + assert(node->src[j]->backend == GGML_BACKEND_GPU || node->src[j]->backend == GGML_BACKEND_GPU_SPLIT); assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); assert(node->src[j]->extra != nullptr); } } +#endif bool ok = ggml_cuda_compute_forward(¶ms, node); if (!ok) { fprintf(stderr, "%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); } GGML_ASSERT(ok); - -#if 0 - if (node->type == GGML_TYPE_F32) { - cudaDeviceSynchronize(); - std::vector tmp(ggml_nelements(node), 0.0f); - cudaMemcpy(tmp.data(), node->data, ggml_nelements(node)*sizeof(float), cudaMemcpyDeviceToHost); - printf("\n%s (%s) (%s %s) (%s %s): ", node->name, ggml_op_name(node->op), - ggml_type_name(node->src[0]->type), - node->src[1] ? ggml_type_name(node->src[1]->type) : "none", - node->src[0]->name, - node->src[1] ? node->src[1]->name : "none"); - double sum = 0.0; - double sq_sum = 0.0; - for (int i = 0; i < ggml_nelements(node); i++) { - printf("%f ", tmp[i]); - sum += tmp[i]; - sq_sum += tmp[i]*tmp[i]; - } - printf("\n"); - printf("sum: %f, ", sum); - printf("sq_sum: %f\n", sq_sum); - } -#endif } - UNUSED(backend); - return true; } @@ -10801,18 +10875,17 @@ static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_ten UNUSED(backend); } -static ggml_backend_i cuda_backend_i = { +static ggml_backend_i ggml_backend_cuda_interface = { /* .get_name = */ ggml_backend_cuda_name, /* .free = */ ggml_backend_cuda_free, /* .get_default_buffer_type = */ ggml_backend_cuda_get_default_buffer_type, /* .set_tensor_async = */ ggml_backend_cuda_set_tensor_async, /* .get_tensor_async = */ ggml_backend_cuda_get_tensor_async, - /* .cpy_tensor_from_async = */ NULL, - /* .cpy_tensor_to_async = */ NULL, + /* .cpy_tensor_async = */ ggml_backend_cuda_cpy_tensor_async, /* .synchronize = */ ggml_backend_cuda_synchronize, - /* .graph_plan_create = */ ggml_backend_cuda_graph_plan_create, - /* .graph_plan_free = */ ggml_backend_cuda_graph_plan_free, - /* .graph_plan_compute = */ ggml_backend_cuda_graph_plan_compute, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_compute = */ NULL, /* .graph_compute = */ ggml_backend_cuda_graph_compute, /* .supports_op = */ ggml_backend_cuda_supports_op, }; @@ -10828,12 +10901,13 @@ ggml_backend_t ggml_backend_cuda_init(int device) { // not strictly necessary, but it may reduce the overhead of the first graph_compute ggml_cuda_set_main_device(device); - ggml_backend_context_cuda * ctx = new ggml_backend_context_cuda { - /* .device = */ device + ggml_backend_cuda_context * ctx = new ggml_backend_cuda_context { + /* .device = */ device, + /* .name = */ GGML_CUDA_NAME + std::to_string(device), }; ggml_backend_t cuda_backend = new ggml_backend { - /* .interface = */ cuda_backend_i, + /* .interface = */ ggml_backend_cuda_interface, /* .context = */ ctx }; @@ -10841,9 +10915,24 @@ ggml_backend_t ggml_backend_cuda_init(int device) { } bool ggml_backend_is_cuda(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_cuda_name; + return backend && backend->iface.get_name == ggml_backend_cuda_name; } +int ggml_backend_cuda_get_device_count() { + return ggml_cuda_get_device_count(); +} + +void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size) { + ggml_cuda_get_device_description(device, description, description_size); +} + +void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total) { + ggml_cuda_set_device(device); + + CUDA_CHECK(cudaMemGetInfo(free, total)); +} + +// backend registry static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { ggml_backend_t cuda_backend = ggml_backend_cuda_init((int) (intptr_t) user_data); return cuda_backend; diff --git a/ggml-cuda.h b/ggml-cuda.h index cdb0c0c41..d19cbf3fd 100644 --- a/ggml-cuda.h +++ b/ggml-cuda.h @@ -27,22 +27,6 @@ GGML_API void * ggml_cuda_host_malloc(size_t size); GGML_API void ggml_cuda_host_free(void * ptr); GGML_API bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -GGML_API void ggml_cuda_set_tensor_split(const float * tensor_split); -GGML_API void ggml_cuda_transform_tensor(void * data, struct ggml_tensor * tensor); -GGML_API void ggml_cuda_free_data(struct ggml_tensor * tensor); - -GGML_API void ggml_cuda_assign_buffers(struct ggml_tensor * tensor); -GGML_API void ggml_cuda_assign_buffers_no_scratch(struct ggml_tensor * tensor); -GGML_API void ggml_cuda_assign_buffers_force_inplace(struct ggml_tensor * tensor); - -GGML_API void ggml_cuda_assign_buffers_no_alloc(struct ggml_tensor * tensor); -GGML_API void ggml_cuda_assign_scratch_offset(struct ggml_tensor * tensor, size_t offset); -GGML_API void ggml_cuda_copy_to_device(struct ggml_tensor * tensor); - -GGML_API void ggml_cuda_set_main_device(int main_device); -GGML_API void ggml_cuda_set_mul_mat_q(bool mul_mat_q); -GGML_API void ggml_cuda_set_scratch_size(size_t scratch_size); -GGML_API void ggml_cuda_free_scratch(void); GGML_API bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); GGML_API int ggml_cuda_get_device_count(void); @@ -52,13 +36,17 @@ GGML_API void ggml_cuda_get_device_description(int device, char * description, GGML_API ggml_backend_t ggml_backend_cuda_init(int device); GGML_API bool ggml_backend_is_cuda(ggml_backend_t backend); -GGML_API int ggml_backend_cuda_get_device(ggml_backend_t backend); GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); - -// pinned host buffer for use with CPU backend for faster copies between CPU and GPU +// split tensor buffer that splits matrices by rows across multiple devices +GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); +// pinned host buffer for use with the CPU backend for faster copies between CPU and GPU GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); +GGML_API int ggml_backend_cuda_get_device_count(void); +GGML_API void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); + #ifdef __cplusplus } #endif diff --git a/ggml-impl.h b/ggml-impl.h index 2faced080..2c58075ac 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -228,6 +228,8 @@ inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { #define GGML_HASHTABLE_FULL ((size_t)-1) #define GGML_HASHTABLE_ALREADY_EXISTS ((size_t)-2) +struct ggml_hash_set ggml_hash_set_new(size_t size); + bool ggml_hash_contains (const struct ggml_hash_set hash_set, struct ggml_tensor * key); // returns GGML_HASHTABLE_FULL if table is full, otherwise the current index of the key or where it should be inserted diff --git a/ggml-metal.m b/ggml-metal.m index 6e5594432..c03624073 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2520,10 +2520,10 @@ static void ggml_backend_metal_free_device(void) { } } -static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { - struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; +static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { + return "Metal"; - return ctx->all_data; + UNUSED(buffer); } static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { @@ -2541,6 +2541,12 @@ static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) free(ctx); } +static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { + struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; + + return ctx->all_data; +} + static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { memcpy((char *)tensor->data + offset, data, size); @@ -2553,14 +2559,12 @@ static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, c UNUSED(buffer); } -static void ggml_backend_metal_buffer_cpy_tensor_from(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_get(src, dst->data, 0, ggml_nbytes(src)); - - UNUSED(buffer); -} - -static void ggml_backend_metal_buffer_cpy_tensor_to(ggml_backend_buffer_t buffer, struct ggml_tensor * src, struct ggml_tensor * dst) { - ggml_backend_tensor_set(dst, src->data, 0, ggml_nbytes(src)); +static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { + if (ggml_backend_buffer_is_host(src->buffer)) { + memcpy(dst->data, src->data, ggml_nbytes(src)); + return true; + } + return false; UNUSED(buffer); } @@ -2572,18 +2576,25 @@ static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_ } static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { + /* .get_name = */ ggml_backend_metal_buffer_get_name, /* .free_buffer = */ ggml_backend_metal_buffer_free_buffer, /* .get_base = */ ggml_backend_metal_buffer_get_base, /* .init_tensor = */ NULL, /* .set_tensor = */ ggml_backend_metal_buffer_set_tensor, /* .get_tensor = */ ggml_backend_metal_buffer_get_tensor, - /* .cpy_tensor_from = */ ggml_backend_metal_buffer_cpy_tensor_from, - /* .cpy_tensor_to = */ ggml_backend_metal_buffer_cpy_tensor_to, + /* .cpy_tensor = */ ggml_backend_metal_buffer_cpy_tensor, /* .clear = */ ggml_backend_metal_buffer_clear, + /* .reset = */ NULL, }; // default buffer type +static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { + return "Metal"; + + UNUSED(buft); +} + static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); @@ -2656,6 +2667,7 @@ static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t bu ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_buffer_type_metal = { /* .iface = */ { + /* .get_name = */ ggml_backend_metal_buffer_type_get_name, /* .alloc_buffer = */ ggml_backend_metal_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_metal_buffer_type_get_alignment, /* .get_alloc_size = */ NULL, // defaults to ggml_nbytes @@ -2679,6 +2691,14 @@ ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t siz ctx->n_buffers = 0; const size_t size_page = sysconf(_SC_PAGESIZE); + + // page-align the data ptr + { + const uintptr_t offs = (uintptr_t) data % size_page; + data = (void *) ((char *) data - offs); + size += offs; + } + size_t size_aligned = size; if ((size_aligned % size_page) != 0) { size_aligned += (size_page - (size_aligned % size_page)); @@ -2779,14 +2799,13 @@ static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct UNUSED(backend); } -static struct ggml_backend_i metal_backend_i = { +static struct ggml_backend_i ggml_backend_metal_i = { /* .get_name = */ ggml_backend_metal_name, /* .free = */ ggml_backend_metal_free, /* .get_default_buffer_type = */ ggml_backend_metal_get_default_buffer_type, /* .set_tensor_async = */ NULL, /* .get_tensor_async = */ NULL, - /* .cpy_tensor_from_async = */ NULL, - /* .cpy_tensor_to_async = */ NULL, + /* .cpy_tensor_async = */ NULL, /* .synchronize = */ NULL, /* .graph_plan_create = */ NULL, /* .graph_plan_free = */ NULL, @@ -2805,7 +2824,7 @@ ggml_backend_t ggml_backend_metal_init(void) { ggml_backend_t metal_backend = malloc(sizeof(struct ggml_backend)); *metal_backend = (struct ggml_backend) { - /* .interface = */ metal_backend_i, + /* .interface = */ ggml_backend_metal_i, /* .context = */ ctx, }; @@ -2813,7 +2832,7 @@ ggml_backend_t ggml_backend_metal_init(void) { } bool ggml_backend_is_metal(ggml_backend_t backend) { - return backend->iface.get_name == ggml_backend_metal_name; + return backend && backend->iface.get_name == ggml_backend_metal_name; } void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index 496f9cdca..2bb93638f 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "ggml-opencl.h" +#include "ggml-backend-impl.h" #include #include @@ -10,7 +11,7 @@ #include #include -#define CL_TARGET_OPENCL_VERSION 110 +#define CL_TARGET_OPENCL_VERSION 120 #include #if defined(_MSC_VER) @@ -929,6 +930,12 @@ static cl_program build_program_from_source(cl_context ctx, cl_device_id dev, co } void ggml_cl_init(void) { + static bool initialized = false; + if (initialized) { + return; + } + initialized = true; + cl_int err; struct cl_device; @@ -1483,8 +1490,8 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } else { d_X = ggml_cl_pool_malloc(sizeof(float) * x_ne, &x_size); } - cl_mem d_Y = ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); - cl_mem d_D = ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); + cl_mem d_Y = src1->backend == GGML_BACKEND_GPU ? (cl_mem) src1->extra : ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); + cl_mem d_D = dst->backend == GGML_BACKEND_GPU ? (cl_mem) dst->extra : ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); size_t x_offset = 0; @@ -1501,7 +1508,9 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { // copy src1 to device - CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); + if (src1->backend == GGML_BACKEND_CPU) { + CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); + } CL_CHECK(clFinish(queue)); @@ -1522,8 +1531,10 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); + if (dst->backend == GGML_BACKEND_CPU) { + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); + } } } } @@ -1532,8 +1543,12 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr if (src0->backend != GGML_BACKEND_GPU) { ggml_cl_pool_free(d_X, x_size); } - ggml_cl_pool_free(d_Y, y_size); - ggml_cl_pool_free(d_D, d_size); + if (src1->backend != GGML_BACKEND_GPU) { + ggml_cl_pool_free(d_Y, y_size); + } + if (dst->backend != GGML_BACKEND_GPU) { + ggml_cl_pool_free(d_D, d_size); + } } static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, void * wdata, size_t wsize) { @@ -1598,6 +1613,8 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_X, 0, src0, i03, i02, NULL)); } + // FIXME: convert on device + for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { // convert src1 to fp16 // TODO: use multiple threads @@ -1643,11 +1660,13 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host, then convert to float - CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); - - float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - - ggml_fp16_to_fp32_row(tmp, d, d_ne); + if (dst->backend == GGML_BACKEND_CPU) { + CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); + float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); + ggml_fp16_to_fp32_row(tmp, d, d_ne); + } else { + // FIXME: convert dst to fp32 on device + } } } } @@ -1801,7 +1820,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * } -bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, const struct ggml_tensor * dst) { const int64_t ne10 = src1->ne[0]; const int64_t ne0 = dst->ne[0]; @@ -1895,3 +1914,291 @@ void ggml_cl_transform_tensor(void * data, ggml_tensor * tensor) { tensor->extra = dst; GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); } + +// ggml-backend + +// buffer + +struct ggml_backend_opencl_buffer_context { + ~ggml_backend_opencl_buffer_context() { + if (buffer) { + clReleaseMemObject(buffer); + } + for (auto * sub_buffer : sub_buffers) { + clReleaseMemObject(sub_buffer); + } + } + + cl_mem buffer; + std::vector sub_buffers; +}; + +static void * const cl_ptr_base = (void *)(uintptr_t) 0x1000; + +static const char * ggml_backend_opencl_buffer_get_name(ggml_backend_buffer_t buffer) { + return "OpenCL"; + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + delete ctx; +} + +static void * ggml_backend_opencl_buffer_get_base(ggml_backend_buffer_t buffer) { + return cl_ptr_base; + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { + if (tensor->view_src != NULL && tensor->view_offs == 0) { + tensor->extra = tensor->view_src->extra; + } else { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + cl_buffer_region region = {(size_t)((char *)tensor->data - (char *)cl_ptr_base), ggml_nbytes(tensor)}; + cl_int err; + cl_mem sub_buffer = clCreateSubBuffer(ctx->buffer, CL_MEM_READ_WRITE, CL_BUFFER_CREATE_TYPE_REGION, ®ion, &err); + CL_CHECK(err); + ctx->sub_buffers.push_back(sub_buffer); + tensor->extra = sub_buffer; + } + tensor->backend = GGML_BACKEND_GPU; +} + +static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { + cl_mem tensor_buffer = (cl_mem) tensor->extra; + CL_CHECK(clEnqueueWriteBuffer(queue, tensor_buffer, true, offset, size, data, 0, NULL, NULL)); + CL_CHECK(clFinish(queue)); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { + cl_mem tensor_buffer = (cl_mem) tensor->extra; + CL_CHECK(clEnqueueReadBuffer(queue, tensor_buffer, true, offset, size, data, 0, NULL, NULL)); + CL_CHECK(clFinish(queue)); + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + CL_CHECK(clEnqueueFillBuffer(queue, ctx->buffer, &value, sizeof(value), 0, buffer->size, 0, NULL, NULL)); + CL_CHECK(clFinish(queue)); +} + +static void ggml_backend_opencl_buffer_reset(ggml_backend_buffer_t buffer) { + ggml_backend_opencl_buffer_context * ctx = (ggml_backend_opencl_buffer_context *) buffer->context; + for (auto * sub_buffer : ctx->sub_buffers) { + clReleaseMemObject(sub_buffer); + } + ctx->sub_buffers.clear(); +} + +static ggml_backend_buffer_i ggml_backend_opencl_buffer_interface = { + /* .get_name = */ ggml_backend_opencl_buffer_get_name, + /* .free_buffer = */ ggml_backend_opencl_buffer_free_buffer, + /* .get_base = */ ggml_backend_opencl_buffer_get_base, + /* .init_tensor = */ ggml_backend_opencl_buffer_init_tensor, + /* .set_tensor = */ ggml_backend_opencl_buffer_set_tensor, + /* .get_tensor = */ ggml_backend_opencl_buffer_get_tensor, + /* .cpy_tensor = */ NULL, + /* .clear = */ ggml_backend_opencl_buffer_clear, + /* .reset = */ ggml_backend_opencl_buffer_reset, +}; + +// buffer type + +static const char * ggml_backend_opencl_buffer_type_name(ggml_backend_buffer_type_t buffer_type) { + return "OpenCL"; + + GGML_UNUSED(buffer_type); +} + +static ggml_backend_buffer_t ggml_backend_opencl_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buffer_type, size_t size) { + ggml_cl_init(); + + cl_int err; + cl_mem mem = clCreateBuffer(context, CL_MEM_READ_WRITE, size, NULL, &err); + if (err != CL_SUCCESS) { + fprintf(stderr, "%s: failed to allocate %.2f MiB\n", __func__, size / 1024.0 / 1024.0); + return nullptr; + } + + ggml_backend_opencl_buffer_context * ctx = new ggml_backend_opencl_buffer_context{mem, {}}; + + return ggml_backend_buffer_init(buffer_type, ggml_backend_opencl_buffer_interface, ctx, size); +} + +static size_t ggml_backend_opencl_buffer_type_get_alignment(ggml_backend_buffer_type_t buffer_type) { + // FIXME: not thread safe, device may not be initialized yet + static cl_uint alignment = -1; + if (alignment == (cl_uint)-1) { + ggml_cl_init(); + clGetDeviceInfo(device, CL_DEVICE_MEM_BASE_ADDR_ALIGN, sizeof(cl_uint), &alignment, NULL); + } + return alignment; + + GGML_UNUSED(buffer_type); +} + +static bool ggml_backend_opencl_buffer_type_supports_backend(ggml_backend_buffer_type_t buffer_type, ggml_backend_t backend) { + //return ggml_backend_is_opencl(backend); // opencl must be used through the cpu backend + return ggml_backend_is_cpu(backend); + + GGML_UNUSED(buffer_type); +} + +static ggml_backend_buffer_type_i ggml_backend_opencl_buffer_type_interface = { + /* .get_name = */ ggml_backend_opencl_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_opencl_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_opencl_buffer_type_get_alignment, + /* .get_alloc_size = */ NULL, + /* .supports_backend = */ ggml_backend_opencl_buffer_type_supports_backend, + /* .is_host = */ NULL, +}; + + +ggml_backend_buffer_type_t ggml_backend_opencl_buffer_type() { + static ggml_backend_buffer_type buffer_type = { + /* .iface = */ ggml_backend_opencl_buffer_type_interface, + /* .context = */ nullptr, + }; + + return &buffer_type; +} + +#if 0 +// host buffer type + +static const char * ggml_backend_opencl_host_buffer_type_name(ggml_backend_buffer_type_t buft) { + return "CL_Host"; + + GGML_UNUSED(buft); +} + +static const char * ggml_backend_opencl_host_buffer_name(ggml_backend_buffer_t buffer) { + return "CL_Host"; + + GGML_UNUSED(buffer); +} + +static void ggml_backend_opencl_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { + ggml_cl_host_free(buffer->context); +} + +static ggml_backend_buffer_t ggml_backend_opencl_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { + void * ptr = ggml_cl_host_malloc(size); + + if (ptr == nullptr) { + // fallback to cpu buffer + return ggml_backend_buft_alloc_buffer(ggml_backend_cpu_buffer_type(), size); + } + + ggml_backend_buffer_t buffer = ggml_backend_cpu_buffer_from_ptr(ptr, size); + buffer->buft = buft; + buffer->iface.get_name = ggml_backend_opencl_host_buffer_name; + buffer->iface.free_buffer = ggml_backend_opencl_host_buffer_free_buffer; + + return buffer; +} + +ggml_backend_buffer_type_t ggml_backend_opencl_host_buffer_type() { + static struct ggml_backend_buffer_type ggml_backend_opencl_buffer_type_host = { + /* .iface = */ { + /* .get_name = */ ggml_backend_opencl_host_buffer_type_name, + /* .alloc_buffer = */ ggml_backend_opencl_host_buffer_type_alloc_buffer, + /* .get_alignment = */ ggml_backend_cpu_buffer_type()->iface.get_alignment, + /* .get_alloc_size = */ ggml_backend_cpu_buffer_type()->iface.get_alloc_size, + /* .supports_backend = */ ggml_backend_cpu_buffer_type()->iface.supports_backend, + /* .is_host = */ ggml_backend_cpu_buffer_type()->iface.is_host, + }, + /* .context = */ nullptr, + }; + + return &ggml_backend_opencl_buffer_type_host; +} + +// backend + +static const char * ggml_backend_opencl_name(ggml_backend_t backend) { + return "OpenCL"; + + GGML_UNUSED(backend); +} + +static void ggml_backend_opencl_free(ggml_backend_t backend) { + GGML_UNUSED(backend); +} + +static ggml_backend_buffer_type_t ggml_backend_opencl_get_default_buffer_type(ggml_backend_t backend) { + return ggml_backend_opencl_buffer_type(); + + GGML_UNUSED(backend); +} + +static bool ggml_backend_opencl_graph_compute(ggml_backend_t backend, ggml_cgraph * graph) { + for (int i = 0; i < graph->n_nodes; ++i) { + ggml_tensor * node = graph->nodes[i]; + switch (node->op) { + case GGML_OP_MUL_MAT: + ggml_cl_mul_mat(node->src[0], node->src[1], node, nullptr, 0); + break; + case GGML_OP_MUL: + ggml_cl_mul(node->src[0], node->src[1], node); + break; + default: + GGML_ASSERT(false); + } + } + + return true; + + GGML_UNUSED(backend); +} + +static bool ggml_backend_opencl_supports_op(ggml_backend_t backend, const ggml_tensor * op) { + switch (op->op) { + case GGML_OP_MUL_MAT: + return ggml_cl_can_mul_mat(op->src[0], op->src[1], op); + case GGML_OP_MUL: + // return ggml_can_repeat_rows(op->src[1], op->src[0]); + return true; + default: + return false; + } + + GGML_UNUSED(backend); +} + +static ggml_backend_i opencl_backend_i = { + /* .get_name = */ ggml_backend_opencl_name, + /* .free = */ ggml_backend_opencl_free, + /* .get_default_buffer_type = */ ggml_backend_opencl_get_default_buffer_type, + /* .set_tensor_async = */ NULL, + /* .get_tensor_async = */ NULL, + /* .cpy_tensor_from_async = */ NULL, + /* .cpy_tensor_to_async = */ NULL, + /* .synchronize = */ NULL, + /* .graph_plan_create = */ NULL, + /* .graph_plan_free = */ NULL, + /* .graph_plan_compute = */ NULL, + /* .graph_compute = */ ggml_backend_opencl_graph_compute, + /* .supports_op = */ ggml_backend_opencl_supports_op, +}; + +ggml_backend_t ggml_backend_opencl_init() { + ggml_backend_t backend = new ggml_backend { + /* .interface = */ opencl_backend_i, + /* .context = */ nullptr + }; + + return backend; +} + +bool ggml_backend_is_opencl(ggml_backend_t backend) { + return backend && backend->iface.get_name == ggml_backend_opencl_name; +} +#endif diff --git a/ggml-opencl.h b/ggml-opencl.h index 44d05bd64..919b00d63 100644 --- a/ggml-opencl.h +++ b/ggml-opencl.h @@ -1,6 +1,7 @@ #pragma once #include "ggml.h" +#include "ggml-backend.h" #ifdef __cplusplus extern "C" { @@ -9,17 +10,26 @@ extern "C" { GGML_API void ggml_cl_init(void); GGML_API void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -GGML_API bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, const struct ggml_tensor * dst); GGML_API size_t ggml_cl_mul_mat_get_wsize(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); GGML_API void ggml_cl_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst, void * wdata, size_t wsize); -GGML_API void * ggml_cl_host_malloc(size_t size); -GGML_API void ggml_cl_host_free(void * ptr); +// GGML_API void * ggml_cl_host_malloc(size_t size); +// GGML_API void ggml_cl_host_free(void * ptr); GGML_API void ggml_cl_free_data(const struct ggml_tensor* tensor); GGML_API void ggml_cl_transform_tensor(void * data, struct ggml_tensor * tensor); +// backend API + +// GGML_API ggml_backend_t ggml_backend_opencl_init(void); + +// GGML_API bool ggml_backend_is_opencl(ggml_backend_t backend); + +GGML_API ggml_backend_buffer_type_t ggml_backend_opencl_buffer_type(void); +// GGML_API ggml_backend_buffer_type_t ggml_backend_opencl_host_buffer_type(void); + #ifdef __cplusplus } #endif diff --git a/ggml.c b/ggml.c index f5caeba08..6dbd7626c 100644 --- a/ggml.c +++ b/ggml.c @@ -2354,6 +2354,10 @@ struct ggml_context * ggml_init(struct ggml_init_params params) { } void ggml_free(struct ggml_context * ctx) { + if (ctx == NULL) { + return; + } + // make this function thread safe ggml_critical_section_start(); @@ -4362,6 +4366,23 @@ struct ggml_tensor * ggml_cpy( return ggml_cpy_impl(ctx, a, b); } +struct ggml_tensor * ggml_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_type type) { + bool is_node = false; + + struct ggml_tensor * result = ggml_new_tensor(ctx, type, GGML_MAX_DIMS, a->ne); + ggml_format_name(result, "%s (copy)", a->name); + + result->op = GGML_OP_CPY; + result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; + result->src[0] = a; + result->src[1] = result; + + return result; +} + // ggml_cont static struct ggml_tensor * ggml_cont_impl( @@ -14871,7 +14892,7 @@ size_t ggml_hash_find_or_insert(struct ggml_hash_set hash_set, struct ggml_tenso return i; } -static struct ggml_hash_set ggml_hash_set_new(size_t size) { +struct ggml_hash_set ggml_hash_set_new(size_t size) { size = ggml_hash_size(size); struct ggml_hash_set result; result.size = size; @@ -16620,7 +16641,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { return GGML_EXIT_SUCCESS; } -struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { +struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threads) { if (n_threads <= 0) { n_threads = GGML_DEFAULT_N_THREADS; } @@ -16682,14 +16703,15 @@ struct ggml_cplan ggml_graph_plan(struct ggml_cgraph * cgraph, int n_threads) { } break; case GGML_OP_MUL_MAT_ID: { + cur = 0; const struct ggml_tensor * src0 = node->src[2]; const struct ggml_tensor * src1 = node->src[1]; const enum ggml_type vec_dot_type = type_traits[src0->type].vec_dot_type; if (src1->type != vec_dot_type) { - cur = ggml_row_size(vec_dot_type, ggml_nelements(src1)); + cur += ggml_row_size(vec_dot_type, ggml_nelements(src1)); } const int n_as = ggml_get_op_params_i32(node, 1); - cur = GGML_PAD(cur, sizeof(int64_t)); // align + cur += GGML_PAD(cur, sizeof(int64_t)); // align cur += n_as * sizeof(int64_t); // matrix_row_counts cur += n_as * src1->ne[1] * sizeof(int64_t); // matrix_rows } break; diff --git a/ggml.h b/ggml.h index 4c2ff6c66..b18ba7812 100644 --- a/ggml.h +++ b/ggml.h @@ -1165,6 +1165,11 @@ extern "C" { struct ggml_tensor * a, struct ggml_tensor * b); + GGML_API struct ggml_tensor * ggml_cast( + struct ggml_context * ctx, + struct ggml_tensor * a, + enum ggml_type type); + // make contiguous GGML_API struct ggml_tensor * ggml_cont( struct ggml_context * ctx, @@ -1842,8 +1847,8 @@ extern "C" { // ggml_graph_plan() has to be called before ggml_graph_compute() // when plan.work_size > 0, caller must allocate memory for plan.work_data - GGML_API struct ggml_cplan ggml_graph_plan (struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); - GGML_API int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); + GGML_API struct ggml_cplan ggml_graph_plan (const struct ggml_cgraph * cgraph, int n_threads /*= GGML_DEFAULT_N_THREADS*/); + GGML_API int ggml_graph_compute( struct ggml_cgraph * cgraph, struct ggml_cplan * cplan); // same as ggml_graph_compute() but the work data is allocated as a part of the context // note: the drawback of this API is that you must have ensured that the context has enough memory for the work data diff --git a/llama.cpp b/llama.cpp index ce413f605..fe1d8947c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1,5 +1,4 @@ #define LLAMA_API_INTERNAL -//#define LLAMA_GGML_BACKEND_CUDA_TEST // for testing only - enables ggml-cuda through ggml-backend, disables partial offloading #include "llama.h" #include "unicode.h" @@ -152,10 +151,6 @@ static bool is_float_close(float a, float b, float abs_tol) { return std::fabs(b - a) <= abs_tol; } -#ifdef GGML_USE_CPU_HBM -#include -#endif - static void zeros(std::ofstream & file, size_t n) { char zero = 0; for (size_t i = 0; i < n; ++i) { @@ -1190,12 +1185,6 @@ struct llama_mlock { #endif }; -typedef void (*offload_func_t)(struct ggml_tensor * tensor); - -static void ggml_offload_nop(struct ggml_tensor * tensor) { - (void) tensor; -} - static std::string llama_token_to_piece(const struct llama_context * ctx, llama_token token) { std::vector result(8, 0); const int n_tokens = llama_token_to_piece(llama_get_model(ctx), token, result.data(), result.size()); @@ -1211,19 +1200,14 @@ static std::string llama_token_to_piece(const struct llama_context * ctx, llama_ return std::string(result.data(), result.size()); } -static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { +static ggml_backend_buffer_type_t llama_default_buffer_type_cpu(bool host_buffer) { ggml_backend_buffer_type_t buft = nullptr; -#ifdef GGML_USE_METAL - if (n_gpu_layers > 0) { - buft = ggml_backend_metal_buffer_type(); +#if defined(GGML_USE_CUBLAS) + // host buffers should only be used when data is expected to be copied to/from the GPU + if (host_buffer) { + buft = ggml_backend_cuda_host_buffer_type(); } -#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (n_gpu_layers > 0) { - buft = ggml_backend_cuda_buffer_type(0); - } -#elif defined(GGML_USE_CUBLAS) - buft = ggml_backend_cuda_host_buffer_type(); #elif defined(GGML_USE_CPU_HBM) buft = ggml_backend_cpu_hbm_buffer_type(); #endif @@ -1231,10 +1215,45 @@ static ggml_backend_buffer_type_t llama_default_buffer_type(int n_gpu_layers) { if (buft == nullptr) { buft = ggml_backend_cpu_buffer_type(); } - return buft; - GGML_UNUSED(n_gpu_layers); + GGML_UNUSED(host_buffer); +} + +static ggml_backend_buffer_type_t llama_default_buffer_type_offload(int gpu) { + ggml_backend_buffer_type_t buft = nullptr; + +#ifdef GGML_USE_METAL + buft = ggml_backend_metal_buffer_type(); +#elif defined(GGML_USE_CUBLAS) + buft = ggml_backend_cuda_buffer_type(gpu); +#elif defined(GGML_USE_CLBLAST) + buft = ggml_backend_opencl_buffer_type(); +#endif + + if (buft == nullptr) { + buft = llama_default_buffer_type_cpu(true); + } + return buft; + + GGML_UNUSED(gpu); +} + +static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_gpu, const float * tensor_split) { + ggml_backend_buffer_type_t buft = nullptr; + +#ifdef GGML_USE_CUBLAS + if (ggml_backend_cuda_get_device_count() > 1) { + buft = ggml_backend_cuda_split_buffer_type(tensor_split); + } +#endif + + if (buft == nullptr) { + buft = llama_default_buffer_type_offload(fallback_gpu); + } + return buft; + + GGML_UNUSED(tensor_split); } // @@ -1445,24 +1464,24 @@ struct llama_kv_cache { std::vector k_l; // per layer std::vector v_l; - struct ggml_context * ctx = NULL; + std::vector ctxs; + std::vector bufs; - ggml_backend_buffer_t buf = NULL; + size_t total_size() const { + size_t size = 0; + for (ggml_backend_buffer_t buf : bufs) { + size += ggml_backend_buffer_get_size(buf); + } + return size; + } ~llama_kv_cache() { -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (ggml_cublas_loaded()) { - for (size_t i = 0; i < k_l.size(); ++i) { - ggml_cuda_free_data(k_l[i]); - ggml_cuda_free_data(v_l[i]); - } - } -#endif - if (ctx) { + for (struct ggml_context * ctx : ctxs) { ggml_free(ctx); } - - ggml_backend_buffer_free(buf); + for (ggml_backend_buffer_t buf : bufs) { + ggml_backend_buffer_free(buf); + } } }; @@ -1539,16 +1558,32 @@ struct llama_model { std::vector layers; + llama_split_mode split_mode; + int main_gpu; int n_gpu_layers; // gguf metadata std::unordered_map gguf_kv; - // context - struct ggml_context * ctx = NULL; + // layer -> buffer type mapping + struct layer_buft { + layer_buft() : buft_matrix(nullptr), buft(nullptr) {} + layer_buft(ggml_backend_buffer_type_t matrix) : buft_matrix(matrix), buft(matrix) {} + layer_buft(ggml_backend_buffer_type_t matrix, ggml_backend_buffer_type_t other) : buft_matrix(matrix), buft(other) {} - // the model memory buffer - ggml_backend_buffer_t buf = NULL; + ggml_backend_buffer_type_t buft_matrix; // matrices only - used by split buffers and backends that support only matrix multiplication + ggml_backend_buffer_type_t buft; // everything else + }; + + layer_buft buft_input; + layer_buft buft_output; + std::vector buft_layer; + + // contexts where the model tensors metadata is stored + std::vector ctxs; + + // the model memory buffers for the tensor data + std::vector bufs; // model memory mapped file std::unique_ptr mapping; @@ -1564,39 +1599,32 @@ struct llama_model { int64_t t_start_us = 0; ~llama_model() { -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (ggml_cublas_loaded()) { - for (size_t i = 0; i < tensors_by_name.size(); ++i) { - ggml_cuda_free_data(tensors_by_name[i].second); - } - ggml_cuda_free_scratch(); - } -#endif - -#if defined(GGML_USE_CLBLAST) - for (size_t i = 0; i < tensors_by_name.size(); ++i) { - ggml_cl_free_data(tensors_by_name[i].second); - } -#endif - if (ctx) { + for (struct ggml_context * ctx : ctxs) { ggml_free(ctx); } - - ggml_backend_buffer_free(buf); + for (ggml_backend_buffer_t buf : bufs) { + ggml_backend_buffer_free(buf); + } } }; struct llama_context { llama_context(const llama_model & model) : model(model), t_start_us(model.t_start_us), t_load_us(model.t_load_us) {} ~llama_context() { - ggml_allocr_free(alloc); - ggml_backend_buffer_free(buf_alloc); - ggml_backend_free(backend); + ggml_backend_sched_free(sched); + + for (ggml_backend_t backend : backends) { + ggml_backend_free(backend); + } } llama_cparams cparams; - ggml_backend_t backend = nullptr; + std::vector backends; +#ifdef GGML_USE_METAL + ggml_backend_t backend_metal = nullptr; +#endif + ggml_backend_t backend_cpu = nullptr; const llama_model & model; @@ -1630,8 +1658,9 @@ struct llama_context { // memory buffers used to evaluate the model std::vector buf_compute_meta; - ggml_backend_buffer_t buf_alloc = NULL; - ggml_allocr * alloc = NULL; + ggml_backend_sched_t sched = nullptr; + // allocator for the input tensors + ggml_tallocr * alloc = nullptr; // temporary buffer for copying data to/from the backend std::vector> buf_copy; @@ -1646,16 +1675,17 @@ struct llama_context { // static bool llama_kv_cache_init( - const struct llama_hparams & hparams, struct llama_kv_cache & cache, + const llama_model & model, ggml_type ktype, ggml_type vtype, uint32_t n_ctx, - int n_gpu_layers, bool offload) { + const struct llama_hparams & hparams = model.hparams; + const uint32_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const uint32_t n_embd_v_gqa = hparams.n_embd_v_gqa(); - const uint32_t n_layer = hparams.n_layer; + const int64_t n_layer = hparams.n_layer; cache.has_shift = false; @@ -1666,62 +1696,65 @@ static bool llama_kv_cache_init( cache.cells.clear(); cache.cells.resize(n_ctx); - struct ggml_init_params params; - params.mem_size = 2u*n_layer*ggml_tensor_overhead(); - params.mem_buffer = NULL; - params.no_alloc = true; +#ifdef GGML_USE_CLBLAST + offload = false; +#endif - cache.ctx = ggml_init(params); + // count used buffer types + std::map buft_layer_count; + if (offload) { + for (int64_t i = 0; i < n_layer; ++i) { + buft_layer_count[model.buft_layer[i].buft]++; + } + } else { + buft_layer_count[llama_default_buffer_type_cpu(true)] = n_layer; + } - size_t vram_kv_cache = 0; - - if (!cache.ctx) { - LLAMA_LOG_ERROR("%s: failed to allocate memory for kv cache\n", __func__); - return false; + // create a context for each buffer type + std::map ctx_map; + for (auto & it : buft_layer_count) { + int n_layers = it.second; + struct ggml_init_params params = { + /*.mem_size =*/ 2u*n_layers*ggml_tensor_overhead(), + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ true, + }; + ggml_context * ctx = ggml_init(params); + if (!ctx) { + LLAMA_LOG_ERROR("%s: failed to allocate context for kv cache\n", __func__); + return false; + } + ctx_map[it.first] = ctx; + cache.ctxs.push_back(ctx); } cache.k_l.reserve(n_layer); cache.v_l.reserve(n_layer); - const int i_gpu_start = (int) n_layer - n_gpu_layers; - for (int i = 0; i < (int) n_layer; i++) { - ggml_tensor * k = ggml_new_tensor_1d(cache.ctx, ktype, n_embd_k_gqa*n_ctx); - ggml_tensor * v = ggml_new_tensor_1d(cache.ctx, vtype, n_embd_v_gqa*n_ctx); + struct ggml_context * ctx = offload ? ctx_map.at(model.buft_layer[i].buft) : cache.ctxs.front(); + ggml_tensor * k = ggml_new_tensor_1d(ctx, ktype, n_embd_k_gqa*n_ctx); + ggml_tensor * v = ggml_new_tensor_1d(ctx, vtype, n_embd_v_gqa*n_ctx); ggml_format_name(k, "cache_k_l%d", i); ggml_format_name(v, "cache_v_l%d", i); cache.k_l.push_back(k); cache.v_l.push_back(v); -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (i >= i_gpu_start) { - if (offload) { - ggml_cuda_assign_buffers_no_scratch(k); - ggml_cuda_assign_buffers_no_scratch(v); - vram_kv_cache += ggml_nbytes(k); - vram_kv_cache += ggml_nbytes(v); - // HACK: mark tensor as allocated - k->data = v->data = (void *)(uintptr_t)1; - } + } + + // allocate tensors and initialize the buffers to avoid NaNs in the padding + for (auto it : ctx_map) { + ggml_backend_buffer_type_t buft = it.first; + ggml_context * ctx = it.second; + ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, buft); + if (!buf) { + LLAMA_LOG_ERROR("%s: failed to allocate buffer for kv cache\n", __func__); + return false; } -#endif // GGML_USE_CUBLAS + ggml_backend_buffer_clear(buf, 0); + LLAMA_LOG_INFO("%s: %10s KV buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf)/1024.0/1024.0); + cache.bufs.push_back(buf); } - // allocate tensors - cache.buf = ggml_backend_alloc_ctx_tensors_from_buft(cache.ctx, llama_default_buffer_type(n_gpu_layers)); - - // buf may be NULL with full offload - if (cache.buf) { - // initialize the buffer to avoid NaNs in the padding - ggml_backend_buffer_clear(cache.buf, 0); - } - - if (vram_kv_cache > 0) { - LLAMA_LOG_INFO("%s: VRAM kv self = %.2f MB\n", __func__, vram_kv_cache / 1024.0 / 1024.0); - } - - GGML_UNUSED(i_gpu_start); - GGML_UNUSED(offload); - return true; } @@ -2354,9 +2387,8 @@ struct llama_model_loader { return get_tensor_meta(get_tensor_name(i)); } - struct ggml_tensor * create_tensor_for(struct ggml_context * ctx, struct ggml_tensor * meta, ggml_backend_type backend) { + struct ggml_tensor * create_tensor_for(struct ggml_context * ctx, struct ggml_tensor * meta) { struct ggml_tensor * tensor = ggml_dup_tensor(ctx, meta); - tensor->backend = backend; // TODO: ggml_set_backend ggml_set_name(tensor, ggml_get_name(meta)); n_created++; @@ -2364,7 +2396,7 @@ struct llama_model_loader { return tensor; } - struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, ggml_backend_type backend, bool required = true) { + struct ggml_tensor * create_tensor(struct ggml_context * ctx, const std::string & name, const std::vector & ne, bool required = true) { struct ggml_tensor * cur = ggml_get_tensor(ctx_meta, name.c_str()); if (cur == NULL) { @@ -2374,12 +2406,6 @@ struct llama_model_loader { throw std::runtime_error(format("%s: tensor '%s' not found", __func__, name.c_str())); } - if (backend == GGML_BACKEND_GPU_SPLIT) { - if (ne.size() == 1) { - throw std::runtime_error(format("%s: 1-dimensional tensor '%s' cannot be split on the GPU", __func__, name.c_str())); - } - } - { bool is_ok = true; for (size_t i = 0; i < ne.size(); ++i) { @@ -2397,7 +2423,7 @@ struct llama_model_loader { } } - return create_tensor_for(ctx, cur, backend); + return create_tensor_for(ctx, cur); } void done_getting_tensors() const { @@ -2416,26 +2442,36 @@ struct llama_model_loader { return gguf_get_data_offset(ctx_gguf) + gguf_get_tensor_offset(ctx_gguf, idx); } - void init_mapping(bool prefetch = true) { - /* - // prefetch only CPU tensors - if (use_mmap) { - size_t size_pref = 0; // prefetch - - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - if (cur->backend == GGML_BACKEND_CPU) { - size_t tensor_end = gguf_get_tensor_offset(ctx_gguf, i) + ggml_nbytes(cur); - size_pref = std::max(size_pref, tensor_end); - } - } - mapping.reset(new llama_mmap(&file, gguf_get_data_offset(ctx_gguf) + size_pref, ggml_is_numa())); - } - */ + void init_mapping(bool prefetch = true, llama_mlock * lmlock = nullptr) { // prefetch the whole file - all the data is needed anyway if (use_mmap) { mapping.reset(new llama_mmap(&file, prefetch ? -1 : 0, ggml_is_numa())); } + + // compute the total size of all tensors for progress reporting + for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { + struct ggml_tensor * cur = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); + size_data += ggml_nbytes(cur); + } + + if (use_mmap && mapping) { + if (lmlock) { + lmlock->init(mapping->addr); + } + mmap_used_first = mapping->size; + } + } + + void get_mapping_range(size_t * first, size_t * last, ggml_context * ctx) const { + GGML_ASSERT(mapping); + + *first = mapping->size; + *last = 0; + for (ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor; tensor = ggml_get_next_tensor(ctx, tensor)) { + const size_t offs = file_offset(ggml_get_name(tensor)); + *first = std::min(*first, offs); + *last = std::max(*last, offs + ggml_nbytes(tensor)); + } } // for backwards compatibility, does not support ggml-backend @@ -2443,8 +2479,11 @@ struct llama_model_loader { const size_t offs = file_offset(ggml_get_name(cur)); if (use_mmap && mapping) { - GGML_ASSERT(cur->data == nullptr); - cur->data = (uint8_t *)mapping->addr + offs; + if (cur->data == nullptr) { + cur->data = (uint8_t *)mapping->addr + offs; + } else { + memcpy(cur->data, (uint8_t *)mapping->addr + offs, ggml_nbytes(cur)); + } } else { GGML_ASSERT(cur->data != nullptr); file.seek(offs, SEEK_SET); @@ -2452,37 +2491,23 @@ struct llama_model_loader { } } + size_t size_done = 0; + size_t size_data = 0; + size_t mmap_used_first = -1; + size_t mmap_used_last = 0; + // Returns false if cancelled by progress_callback - bool load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) const { - size_t size_data = 0; - - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - size_data += ggml_nbytes(cur); - } - - if (use_mmap && buf_mmap) { - if (lmlock) { - lmlock->init(mapping->addr); - } - } - -#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) - const bool legacy_offload = true; -#else - const bool legacy_offload = false; -#endif + bool load_all_data(struct ggml_context * ctx, llama_progress_callback progress_callback, void * progress_callback_user_data, ggml_backend_buffer_t buf_mmap, llama_mlock * lmlock) { + GGML_ASSERT(size_data != 0 && "call init_mapping() first"); std::vector> read_buf; - size_t size_done = 0; - - size_t mmap_first = -1; - size_t mmap_last = 0; - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - GGML_ASSERT(cur); // unused tensors should have been caught by load_data already + if (!cur) { + // some tensors may be allocated in a different context + continue; + } if (progress_callback) { if (!progress_callback((float) size_done / size_data, progress_callback_user_data)) { @@ -2492,67 +2517,48 @@ struct llama_model_loader { const size_t offs = file_offset(ggml_get_name(cur)); - if (!legacy_offload || cur->backend == GGML_BACKEND_CPU) { - if (use_mmap && mapping) { - if (buf_mmap) { - ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + offs); - if (lmlock) { - lmlock->grow_to(offs + ggml_nbytes(cur)); - } - mmap_first = std::min(mmap_first, offs); - mmap_last = std::max(mmap_last, offs + ggml_nbytes(cur)); - } else { - ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + offs, 0, ggml_nbytes(cur)); + if (use_mmap && mapping) { + if (buf_mmap && cur->data == nullptr) { + ggml_backend_tensor_alloc(buf_mmap, cur, (uint8_t *) mapping->addr + offs); + if (lmlock) { + lmlock->grow_to(offs + ggml_nbytes(cur)); } + mmap_used_first = std::min(mmap_used_first, offs); + mmap_used_last = std::max(mmap_used_last, offs + ggml_nbytes(cur)); } else { - if (ggml_backend_buffer_is_host(cur->buffer)) { - file.seek(offs, SEEK_SET); - file.read_raw(cur->data, ggml_nbytes(cur)); - } else { - read_buf.resize(ggml_nbytes(cur)); - file.seek(offs, SEEK_SET); - file.read_raw(read_buf.data(), ggml_nbytes(cur)); - ggml_backend_tensor_set(cur, read_buf.data(), 0, ggml_nbytes(cur)); - } + ggml_backend_tensor_set(cur, (uint8_t *) mapping->addr + offs, 0, ggml_nbytes(cur)); } } else { - // HACK: mark tensor as allocated - cur->data = (void *)(uintptr_t)1; - void * data; - if (use_mmap && mapping) { - data = (uint8_t *) mapping->addr + offs; + if (ggml_backend_buffer_is_host(cur->buffer)) { + file.seek(offs, SEEK_SET); + file.read_raw(cur->data, ggml_nbytes(cur)); } else { read_buf.resize(ggml_nbytes(cur)); file.seek(offs, SEEK_SET); file.read_raw(read_buf.data(), ggml_nbytes(cur)); - data = read_buf.data(); + ggml_backend_tensor_set(cur, read_buf.data(), 0, ggml_nbytes(cur)); } - -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - ggml_cuda_transform_tensor(data, cur); -#elif defined(GGML_USE_CLBLAST) - GGML_ASSERT(cur->backend == GGML_BACKEND_GPU); - ggml_cl_transform_tensor(data, cur); -#else - GGML_ASSERT(!"GPU tensor without a GPU backend"); - GGML_UNUSED(data); -#endif } size_done += ggml_nbytes(cur); } - // unmap offloaded tensors and metadata - if (use_mmap && mapping) { - mapping->unmap_fragment(0, mmap_first); - mapping->unmap_fragment(mmap_last, mapping->size); + // check if this is the last call and do final cleanup + if (size_done >= size_data) { + // unmap offloaded tensors and metadata + if (use_mmap && mapping) { + mapping->unmap_fragment(0, mmap_used_first); + if (mmap_used_last != 0) { + mapping->unmap_fragment(mmap_used_last, mapping->size); + } + } + if (progress_callback) { + // Even though the model is done loading, we still honor + // cancellation since we need to free allocations. + return progress_callback(1.0f, progress_callback_user_data); + } } - if (progress_callback) { - // Even though the model is done loading, we still honor - // cancellation since we need to free allocations. - return progress_callback(1.0f, progress_callback_user_data); - } return true; } }; @@ -3181,6 +3187,7 @@ static bool llm_load_tensors( llama_model_loader & ml, llama_model & model, int n_gpu_layers, + enum llama_split_mode split_mode, int main_gpu, const float * tensor_split, bool use_mlock, @@ -3188,702 +3195,563 @@ static bool llm_load_tensors( void * progress_callback_user_data) { model.t_start_us = ggml_time_us(); - auto & ctx = model.ctx; auto & hparams = model.hparams; + model.split_mode = split_mode; + model.main_gpu = main_gpu; model.n_gpu_layers = n_gpu_layers; - size_t ctx_size = ggml_tensor_overhead() * ml.n_tensors; + const int64_t n_layer = hparams.n_layer; + const int64_t i_gpu_start = std::max((int64_t) hparams.n_layer - n_gpu_layers, (int64_t) 0); - LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, ctx_size/1024.0/1024.0); + // there is very little benefit to offloading the input layer, so always keep it on the CPU + model.buft_input = llama_default_buffer_type_cpu(true); - // create the ggml context + model.buft_layer.resize(n_layer); + + // assign cpu layers + for (int64_t i = 0; i < i_gpu_start; ++i) { + model.buft_layer[i] = llama_default_buffer_type_cpu(true); + } + +#ifdef GGML_USE_CUBLAS + if (split_mode == LLAMA_SPLIT_LAYER) { + // calculate the split points + int device_count = ggml_backend_cuda_get_device_count(); + bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + device_count, [](float x) { return x == 0.0f; }); + float splits[GGML_CUDA_MAX_DEVICES]; + if (all_zero) { + // default split, by free memory + for (int i = 0; i < device_count; ++i) { + size_t total; + size_t free; + ggml_backend_cuda_get_device_memory(i, &total, &free); + splits[i] = free; + } + } else { + std::copy(tensor_split, tensor_split + device_count, splits); + } + + // sum and normalize the splits to get the split points + float split_sum = 0.0f; + for (int i = 0; i < device_count; ++i) { + split_sum += splits[i]; + splits[i] = split_sum; + } + for (int i = 0; i < device_count; ++i) { + splits[i] /= split_sum; + } + + // assign the repeating layers to the devices according to the splits + int act_gpu_layers = std::min(n_gpu_layers, (int)n_layer + 1); + for (int64_t i = i_gpu_start; i < n_layer; ++i) { + int layer_gpu = std::upper_bound(splits, splits + device_count, float(i - i_gpu_start)/act_gpu_layers) - splits; + model.buft_layer[i] = llama_default_buffer_type_offload(layer_gpu); + } + // assign the output layer + if (n_gpu_layers > n_layer) { + int layer_gpu = std::upper_bound(splits, splits + device_count, float(act_gpu_layers - 1)/act_gpu_layers) - splits; + model.buft_output = llama_default_buffer_type_offload(layer_gpu); + } else { + model.buft_output = llama_default_buffer_type_cpu(true); + } + } else +#endif { + ggml_backend_buffer_type_t split_buft; + if (split_mode == LLAMA_SPLIT_ROW) { + split_buft = llama_default_buffer_type_split(main_gpu, tensor_split); + } else { + // LLAMA_SPLIT_NONE or LLAMA_SPLIT_LAYER in backends where it is not supported + split_buft = llama_default_buffer_type_offload(main_gpu); + } + // assign the repeating layers + for (int64_t i = i_gpu_start; i < n_layer; ++i) { + model.buft_layer[i] = { + split_buft, + llama_default_buffer_type_offload(main_gpu) + }; + } + // assign the output layer + if (n_gpu_layers > n_layer) { + model.buft_output = { + split_buft, + llama_default_buffer_type_offload(main_gpu) + }; + } else { + model.buft_output = llama_default_buffer_type_cpu(true); + } + } + + // count used buffer types + std::map buft_layer_count; + buft_layer_count[model.buft_input.buft]++; + buft_layer_count[model.buft_input.buft_matrix]++; + buft_layer_count[model.buft_output.buft]++; + buft_layer_count[model.buft_output.buft_matrix]++; + for (int64_t i = 0; i < n_layer; ++i) { + buft_layer_count[model.buft_layer[i].buft]++; + buft_layer_count[model.buft_layer[i].buft_matrix]++; + } + + // create one context per buffer type + size_t ctx_size = ggml_tensor_overhead()*ml.n_tensors; + std::map ctx_map; + for (auto & it : buft_layer_count) { struct ggml_init_params params = { /*.mem_size =*/ ctx_size, /*.mem_buffer =*/ NULL, /*.no_alloc =*/ true, }; - - model.ctx = ggml_init(params); - if (!model.ctx) { - throw std::runtime_error(format("ggml_init() failed")); + ggml_context * ctx = ggml_init(params); + if (!ctx) { + throw std::runtime_error(format("failed to create context")); } + ctx_map[it.first] = ctx; + model.ctxs.push_back(ctx); } - (void) main_gpu; - - enum ggml_backend_type llama_backend_offload = GGML_BACKEND_CPU; - enum ggml_backend_type llama_backend_offload_split = GGML_BACKEND_CPU; - -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (ggml_cublas_loaded()) { - LLAMA_LOG_INFO("%s: using " GGML_CUDA_NAME " for GPU acceleration\n", __func__); - ggml_cuda_set_main_device(main_gpu); - - llama_backend_offload = GGML_BACKEND_GPU; - llama_backend_offload_split = GGML_BACKEND_GPU_SPLIT; - } -#elif defined(GGML_USE_CLBLAST) - LLAMA_LOG_INFO("%s: using OpenCL for GPU acceleration\n", __func__); - llama_backend_offload = GGML_BACKEND_GPU; - llama_backend_offload_split = GGML_BACKEND_GPU; -#endif + LLAMA_LOG_INFO("%s: ggml ctx size = %7.2f MiB\n", __func__, model.ctxs.size()*ctx_size/1024.0/1024.0); // create tensors for the weights { const int64_t n_embd = hparams.n_embd; const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); - const int64_t n_layer = hparams.n_layer; + const int64_t n_embd_gqa = n_embd_v_gqa; const int64_t n_vocab = hparams.n_vocab; + const int64_t n_ff = hparams.n_ff; + + GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); + + ggml_context * ctx_input = ctx_map.at(model.buft_input.buft); + ggml_context * ctx_output = ctx_map.at(model.buft_output.buft); + ggml_context * ctx_output_split = ctx_map.at(model.buft_output.buft_matrix); + auto ctx_for_layer = [&](int i) { return ctx_map.at(model.buft_layer[i].buft); }; + auto ctx_for_layer_split = [&](int i) { return ctx_map.at(model.buft_layer[i].buft_matrix); }; + + model.layers.resize(n_layer); const auto tn = LLM_TN(model.arch); switch (model.arch) { case LLM_ARCH_LLAMA: case LLM_ARCH_REFACT: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); // optional bias tensors - layer.bq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, backend, false); - layer.bk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, backend, false); - layer.bv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, backend, false); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend, false); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}, false); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}, false); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}, false); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, false); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_gate_inp = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE_INP, "weight", i), {n_embd}, backend, false); + layer.ffn_gate_inp = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_GATE_INP, "weight", i), {n_embd}, false); if (layer.ffn_gate_inp == nullptr) { GGML_ASSERT(hparams.n_expert == 0); GGML_ASSERT(hparams.n_expert_used == 0); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } else { GGML_ASSERT(hparams.n_expert > 0); GGML_ASSERT(hparams.n_expert_used > 0); // MoE branch for (uint32_t x = 0; x < hparams.n_expert; ++x) { - layer.ffn_gate_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); - layer.ffn_down_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN_EXP, "weight", i, x), { n_ff, n_embd}, backend_split); - layer.ffn_up_exp[x] = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}, backend_split); + layer.ffn_gate_exp[x] = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE_EXP, "weight", i, x), {n_embd, n_ff}); + layer.ffn_down_exp[x] = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN_EXP, "weight", i, x), { n_ff, n_embd}); + layer.ffn_up_exp[x] = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP_EXP, "weight", i, x), {n_embd, n_ff}); } } } } break; case LLM_ARCH_BAICHUAN: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_FALCON: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); if (gguf_find_tensor(ml.ctx_gguf, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i).c_str()) >= 0) { - layer.attn_norm_2 = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i), {n_embd}, backend); - layer.attn_norm_2_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM_2, "bias", i), {n_embd}, backend); + layer.attn_norm_2 = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM_2, "weight", i), {n_embd}); + layer.attn_norm_2_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM_2, "bias", i), {n_embd}); } - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_STARCODER: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; case LLM_ARCH_PERSIMMON: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); - const int i_gpu_start = n_layer - n_gpu_layers; - model.layers.resize(n_layer); - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); - layer.attn_q_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "weight", i), {64}, backend); - layer.attn_q_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q_NORM, "bias", i), {64}, backend); - layer.attn_k_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "weight", i), {64}, backend); - layer.attn_k_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}, backend); + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); + + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + + layer.attn_q_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q_NORM, "weight", i), {64}); + layer.attn_q_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q_NORM, "bias", i), {64}); + + layer.attn_k_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K_NORM, "weight", i), {64}); + layer.attn_k_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}); } } break; case LLM_ARCH_BLOOM: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.tok_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}, GGML_BACKEND_CPU); - model.tok_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.tok_norm = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); + model.tok_norm_b = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; case LLM_ARCH_MPT: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); // AWQ ScaleActivation layer - layer.ffn_act = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, backend, false); + layer.ffn_act = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, false); } } break; case LLM_ARCH_STABLELM: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - /* - llama_model_loader: - tensor 4: blk.0.attn_output.weight f16 [ 2560, 2560, 1, 1 ] - */ - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_QWEN: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + + // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + } - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - } - - const uint32_t n_ff = hparams.n_ff / 2; - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd * 3}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd * 3}, backend); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd*3}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd*3}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff/2}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff/2, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff/2}); } } break; case LLM_ARCH_PHI2: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); - model.output_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + model.output_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT, "bias"), {n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; case LLM_ARCH_PLAMO: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.wq = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}, backend_split); - layer.wk = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}, backend_split); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.ffn_gate = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}, backend_split); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; case LLM_ARCH_GPT2: { - model.tok_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}, GGML_BACKEND_CPU); - model.pos_embd = ml.create_tensor(ctx, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}, GGML_BACKEND_CPU); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); // output { - ggml_backend_type backend_norm; - ggml_backend_type backend_output; - - if (n_gpu_layers > int(n_layer)) { - backend_norm = llama_backend_offload; - backend_output = llama_backend_offload_split; - } else { - backend_norm = GGML_BACKEND_CPU; - backend_output = GGML_BACKEND_CPU; - } - - model.output_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}, backend_norm); - model.output_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, backend_norm); - model.output = ml.create_tensor(ctx, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}, backend_output); + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } - const uint32_t n_ff = hparams.n_ff; - const int64_t n_embd_gqa = n_embd_v_gqa; - GGML_ASSERT(n_embd_gqa == n_embd / hparams.n_gqa()); - GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); - - const int i_gpu_start = n_layer - n_gpu_layers; - - model.layers.resize(n_layer); - - for (uint32_t i = 0; i < n_layer; ++i) { - const ggml_backend_type backend = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload; // NOLINT - const ggml_backend_type backend_split = int(i) < i_gpu_start ? GGML_BACKEND_CPU : llama_backend_offload_split; // NOLINT + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}, backend); - layer.attn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, backend); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, backend_split); - layer.bqkv = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, backend); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}, backend_split); - layer.bo = ml.create_tensor(ctx, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, backend); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}, backend); - layer.ffn_norm_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, backend); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}, backend_split); - layer.ffn_down_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, backend); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); - layer.ffn_up = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}, backend_split); - layer.ffn_up_b = ml.create_tensor(ctx, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, backend); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); } } break; default: @@ -3893,78 +3761,51 @@ static bool llm_load_tensors( ml.done_getting_tensors(); - ml.init_mapping(); + ml.init_mapping(true, use_mlock ? &model.mlock_mmap : nullptr); - // allocate tensors - size_t vram_weights = 0; - size_t buf_size = 0; + // create the backend buffers + std::vector> ctx_bufs; - ggml_backend_buffer_type_t buft = llama_default_buffer_type(n_gpu_layers); + for (auto & it : ctx_map) { + ggml_backend_buffer_type_t buft = it.first; + ggml_context * ctx = it.second; + ggml_backend_buffer_t buf = nullptr; - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { - // GGML_BACKEND_GPU tensors are for CUDA and OpenCL only, which are handled separately without ggml-backend - if (t->backend == GGML_BACKEND_CPU) { - buf_size += GGML_PAD(ggml_backend_buft_get_alloc_size(buft, t), ggml_backend_buft_get_alignment(buft)); - } else { - vram_weights += ggml_nbytes(t); + // only the mmap region containing the tensors in the model is mapped to the backend buffer + // this is important for metal with apple silicon: if the entire model could be mapped to a metal buffer, then we could just use metal for all layers + // this allows using partial offloading when the model size exceeds the metal buffer size, but not the RAM size + if (ml.use_mmap && buft == llama_default_buffer_type_cpu(true)) { + size_t first, last; + ml.get_mapping_range(&first, &last, ctx); + buf = ggml_backend_cpu_buffer_from_ptr((char *) ml.mapping->addr + first, last - first); } - } - - // create backend buffer - ggml_backend_buffer_t buf_mmap = nullptr; - #ifdef GGML_USE_METAL - if (n_gpu_layers > 0) { - if (ml.use_mmap) { + else if (ml.use_mmap && buft == ggml_backend_metal_buffer_type()) { const size_t max_size = ggml_get_max_tensor_size(ctx); - model.buf = ggml_backend_metal_buffer_from_ptr(ml.mapping->addr, ml.mapping->size, max_size); - buf_mmap = model.buf; - } else { - model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_metal_buffer_type()); + size_t first, last; + ml.get_mapping_range(&first, &last, ctx); + buf = ggml_backend_metal_buffer_from_ptr((char *) ml.mapping->addr + first, last - first, max_size); } - } -#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) - // for testing only - if (n_gpu_layers > 0) { - model.buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cuda_buffer_type(0)); - } #endif - - if (model.buf == nullptr) { - // CPU backend, and indirectly CUDA and OpenCL - if (ml.use_mmap) { - model.buf = ggml_backend_cpu_buffer_from_ptr(ml.mapping->addr, ml.mapping->size); - buf_mmap = model.buf; - } else { - // allocate only CPU tensors - model.buf = ggml_backend_buft_alloc_buffer(buft, buf_size); - ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(model.buf); - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != nullptr; t = ggml_get_next_tensor(ctx, t)) { - if (t->backend == GGML_BACKEND_CPU) { - ggml_tallocr_alloc(alloc, t); - } + else { + buf = ggml_backend_alloc_ctx_tensors_from_buft(ctx, buft); + if (buf != nullptr && use_mlock && ggml_backend_buffer_is_host(buf)) { + model.mlock_buf.init (ggml_backend_buffer_get_base(buf)); + model.mlock_buf.grow_to(ggml_backend_buffer_get_size(buf)); } - ggml_tallocr_free(alloc); } - } - - if (use_mlock && ggml_backend_buffer_is_host(model.buf)) { - model.mlock_buf.init (ggml_backend_buffer_get_base(model.buf)); - model.mlock_buf.grow_to(ggml_backend_buffer_get_size(model.buf)); + if (buf == nullptr) { + throw std::runtime_error("failed to allocate buffer"); + } + // indicate that this buffer contains weights + // this is used by ggml_backend_sched to improve op scheduling -> ops that use a weight are preferably scheduled to the backend that contains the weight + ggml_backend_buffer_set_usage(buf, GGML_BACKEND_BUFFER_USAGE_WEIGHTS); + model.bufs.push_back(buf); + ctx_bufs.emplace_back(ctx, buf); } // print memory requirements { - size_t sys_mem_required = ctx_size + buf_size; - - if (sys_mem_required > 0) { - LLAMA_LOG_INFO("%s: system memory used = %7.2f MiB\n", __func__, sys_mem_required / 1024.0 / 1024.0); - } - if (vram_weights > 0) { - LLAMA_LOG_INFO("%s: VRAM used = %7.2f MiB\n", __func__, vram_weights / 1024.0 / 1024.0); - } - -#if (defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST)) || defined(GGML_USE_CLBLAST) const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); @@ -3976,23 +3817,26 @@ static bool llm_load_tensors( const int max_offloadable_layers = hparams.n_layer + 1; LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); -#endif // defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) - } -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - ggml_cuda_set_tensor_split(tensor_split); -#else - GGML_UNUSED(tensor_split); -#endif // GGML_USE_CUBLAS + for (ggml_backend_buffer_t buf : model.bufs) { + LLAMA_LOG_INFO("%s: %10s buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); + } + } // populate tensors_by_name - for (int i = 0; i < ml.n_tensors; ++i) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, ml.get_tensor_name(i)); - model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); + for (ggml_context * ctx : model.ctxs) { + for (auto * cur = ggml_get_first_tensor(ctx); cur != NULL; cur = ggml_get_next_tensor(ctx, cur)) { + model.tensors_by_name.emplace_back(ggml_get_name(cur), cur); + } } - if (!ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf_mmap, use_mlock ? &model.mlock_mmap : NULL)) { - return false; + // load tensor data + for (auto & it : ctx_bufs) { + ggml_context * ctx = it.first; + ggml_backend_buffer_t buf = it.second; + if (!ml.load_all_data(ctx, progress_callback, progress_callback_user_data, buf, use_mlock ? &model.mlock_mmap : NULL)) { + return false; + } } model.mapping = std::move(ml.mapping); @@ -4026,13 +3870,13 @@ static int llama_model_load(const std::string & fname, llama_model & model, cons } if (!llm_load_tensors( - ml, model, params.n_gpu_layers, params.main_gpu, params.tensor_split, params.use_mlock, + ml, model, params.n_gpu_layers, params.split_mode, params.main_gpu, params.tensor_split, params.use_mlock, params.progress_callback, params.progress_callback_user_data )) { return -2; } } catch (const std::exception & err) { - LLAMA_LOG_ERROR("error loading model: %s\n", err.what()); + LLAMA_LOG_ERROR("%s: error loading model: %s\n", __func__, err.what()); return -1; } @@ -4476,8 +4320,6 @@ struct llm_build_context { do_rope_shift (worst_case || kv_self.has_shift), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { - GGML_ASSERT(!!kv_self.ctx); - // all initializations should be done in init() } @@ -4557,6 +4399,12 @@ struct llm_build_context { cb(Vcur, "Vcur", il); } + // these nodes are added to the graph together so that they are not reordered + // by doing so, the number of splits in the graph is reduced + ggml_build_forward_expand(gf, Qcur); + ggml_build_forward_expand(gf, Kcur); + ggml_build_forward_expand(gf, Vcur); + Qcur = ggml_rope_custom( ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, @@ -6077,199 +5925,13 @@ struct llm_build_context { } }; -// -// tensor offloading helpers -// -// TODO: will be removed with backend v2 - -enum llm_offload_func_e { - OFFLOAD_FUNC_NOP, - OFFLOAD_FUNC, - OFFLOAD_FUNC_FRC, // force offload - OFFLOAD_FUNC_KQV, - OFFLOAD_FUNC_NR, - OFFLOAD_FUNC_EMB, // embeddings - OFFLOAD_FUNC_OUT, -}; - -// TODO: will be removed with backend v2 -struct llm_offload_trie { - struct node { - ~node() { - for (int i = 0; i < 256; ++i) { - if (children[i]) { - delete children[i]; - } - } - } - - node * children[256] = { nullptr }; - llm_offload_func_e func = OFFLOAD_FUNC_NOP; - }; - - llm_offload_trie() { - root = new node; - } - - llm_offload_trie(const std::unordered_map & map) { - root = new node; - - for (const auto & kv : map) { - add(kv.first, kv.second); - } - } - - ~llm_offload_trie() { - delete root; - } - - void add(const char * name, llm_offload_func_e func) { - node * cur = root; - - for (int i = 0; ; ++i) { - const uint8_t c = name[i]; - - if (!c) { - break; - } - - if (!cur->children[c]) { - cur->children[c] = new node; - } - - cur = cur->children[c]; - } - - cur->func = func; - } - - llm_offload_func_e find(const char * name) const { - const node * cur = root; - - for (int i = 0; ; ++i) { - const uint8_t c = name[i]; - - if (!c) { - break; - } - - if (!cur->children[c]) { - return OFFLOAD_FUNC_NOP; - } - - cur = cur->children[c]; - } - - return cur->func; - } - - node * root = nullptr; -}; - -// TODO: will be removed with backend v2 -static const std::unordered_map k_offload_map = { - //{ "inp_tokens", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel - //{ "inp_embd", OFFLOAD_FUNC_NR }, // TODO: missing K-quants get_rows kernel - { "pos_embd", OFFLOAD_FUNC_NR }, - - { "inp_pos", OFFLOAD_FUNC_FRC }, // this is often used for KQ ops (e.g. rope) - { "KQ_mask", OFFLOAD_FUNC_FRC }, - { "K_shift", OFFLOAD_FUNC_FRC }, - - { "K_shifted", OFFLOAD_FUNC }, - - { "inp_norm", OFFLOAD_FUNC_NR }, - { "inp_norm_w", OFFLOAD_FUNC_NR }, - { "inp_norm_wb", OFFLOAD_FUNC_NR }, - - { "norm", OFFLOAD_FUNC }, - { "norm_w", OFFLOAD_FUNC }, - { "norm_wb", OFFLOAD_FUNC }, - - { "attn_norm", OFFLOAD_FUNC }, - { "attn_norm_2", OFFLOAD_FUNC }, - - { "wqkv", OFFLOAD_FUNC_KQV }, - { "bqkv", OFFLOAD_FUNC_KQV }, - { "wqkv_clamped", OFFLOAD_FUNC_KQV }, - - { "tmpk", OFFLOAD_FUNC_KQV }, - { "tmpq", OFFLOAD_FUNC_KQV }, - { "tmpv", OFFLOAD_FUNC_KQV }, - { "Kcur", OFFLOAD_FUNC_KQV }, - { "Qcur", OFFLOAD_FUNC_KQV }, - { "Vcur", OFFLOAD_FUNC_KQV }, - - { "krot", OFFLOAD_FUNC_KQV }, - { "qrot", OFFLOAD_FUNC_KQV }, - { "kpass", OFFLOAD_FUNC_KQV }, - { "qpass", OFFLOAD_FUNC_KQV }, - { "krotated", OFFLOAD_FUNC_KQV }, - { "qrotated", OFFLOAD_FUNC_KQV }, - - { "q", OFFLOAD_FUNC_KQV }, - { "k", OFFLOAD_FUNC_KQV }, - { "kq", OFFLOAD_FUNC_KQV }, - { "kq_scaled", OFFLOAD_FUNC_KQV }, - { "kq_scaled_alibi", OFFLOAD_FUNC_KQV }, - { "kq_masked", OFFLOAD_FUNC_KQV }, - { "kq_soft_max", OFFLOAD_FUNC_KQV }, - { "kq_soft_max_ext", OFFLOAD_FUNC_KQV }, - { "v", OFFLOAD_FUNC_KQV }, - { "kqv", OFFLOAD_FUNC_KQV }, - { "kqv_merged", OFFLOAD_FUNC_KQV }, - { "kqv_merged_cont", OFFLOAD_FUNC_KQV }, - { "kqv_wo", OFFLOAD_FUNC_KQV }, - { "kqv_out", OFFLOAD_FUNC_KQV }, - - { "ffn_inp", OFFLOAD_FUNC }, - { "ffn_norm", OFFLOAD_FUNC }, - - { "ffn_up", OFFLOAD_FUNC }, - { "ffn_up_b", OFFLOAD_FUNC }, - { "ffn_gate", OFFLOAD_FUNC }, - { "ffn_gate_b", OFFLOAD_FUNC }, - { "ffn_gate_par", OFFLOAD_FUNC }, - { "ffn_act", OFFLOAD_FUNC }, - { "ffn_down", OFFLOAD_FUNC }, - { "ffn_down_b", OFFLOAD_FUNC }, - { "ffn_out", OFFLOAD_FUNC }, - - { "ffn_silu", OFFLOAD_FUNC }, - { "ffn_gelu", OFFLOAD_FUNC }, - { "ffn_relu", OFFLOAD_FUNC }, - { "ffn_sqr(relu)", OFFLOAD_FUNC }, - - { "ffn_moe_logits", OFFLOAD_FUNC }, - { "ffn_moe_probs", OFFLOAD_FUNC }, - { "ffn_moe_argsort", OFFLOAD_FUNC }, - { "ffn_moe_weights", OFFLOAD_FUNC }, - { "ffn_moe_weights_sum", OFFLOAD_FUNC }, - { "ffn_moe_weights_norm", OFFLOAD_FUNC }, - { "ffn_moe_weighted", OFFLOAD_FUNC }, - { "ffn_moe_up", OFFLOAD_FUNC }, - { "ffn_moe_gate", OFFLOAD_FUNC }, - { "ffn_moe_silu", OFFLOAD_FUNC }, - { "ffn_moe_gate_par", OFFLOAD_FUNC }, - { "ffn_moe_down", OFFLOAD_FUNC }, - { "ffn_moe_out", OFFLOAD_FUNC }, - - { "l_out", OFFLOAD_FUNC }, - - { "result_norm", OFFLOAD_FUNC_EMB }, - { "result_output_no_bias", OFFLOAD_FUNC_EMB }, - { "result_output", OFFLOAD_FUNC_OUT }, -}; - -static llm_offload_trie k_offload_func_trie(k_offload_map); - static struct ggml_cgraph * llama_build_graph( llama_context & lctx, const llama_batch & batch) { const auto & model = lctx.model; // check if we should build the worst-case graph (for memory measurement) - const bool worst_case = ggml_allocr_is_measure(lctx.alloc); + const bool worst_case = ggml_tallocr_is_measure(lctx.alloc); // keep track of the input that has already been allocated bool alloc_inp_tokens = false; @@ -6278,16 +5940,8 @@ static struct ggml_cgraph * llama_build_graph( bool alloc_inp_KQ_mask = false; bool alloc_inp_K_shift = false; -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - const bool do_offload = true; -#else - const bool do_offload = true; // TODO: set to false after finishing refactoring -#endif - - int n_non_view = 0; // number of non-view tensors that have been processed by the callback - // this callback allows us to apply custom logic to each tensor (e.g. ggml-alloc, offloading, etc.) - // TODO: will be removed with backend v2 + // TODO: improve handling of input and output tensors, then replace this with ggml_set_name llm_build_cb cb = [&](struct ggml_tensor * cur, const char * name, int il) { if (il >= 0) { ggml_format_name(cur, "%s-%d", name, il); @@ -6298,12 +5952,11 @@ static struct ggml_cgraph * llama_build_graph( // // allocate input tensors and set input data // - // TODO: will be removed with backend v2 if (!alloc_inp_tokens && strcmp(name, "inp_tokens") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc) && batch.token) { + if (!ggml_tallocr_is_measure(lctx.alloc) && batch.token) { const int64_t n_tokens = cur->ne[0]; ggml_backend_tensor_set(cur, batch.token, 0, n_tokens*ggml_element_size(cur)); @@ -6312,10 +5965,10 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_tokens = true; } - if (!alloc_inp_embd && strcmp(name, "inp_embd") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + if (!alloc_inp_embd && strcmp(name, "inp_embd") == 0 && batch.embd) { + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc) && batch.embd) { + if (!ggml_tallocr_is_measure(lctx.alloc) && batch.embd) { const int64_t n_embd = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; @@ -6326,9 +5979,9 @@ static struct ggml_cgraph * llama_build_graph( } if (!alloc_inp_pos && strcmp(name, "inp_pos") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc) && batch.pos) { + if (!ggml_tallocr_is_measure(lctx.alloc) && batch.pos) { const int64_t n_tokens = cur->ne[0]; static_assert(std::is_same::value, "llama_pos must be int32_t"); @@ -6339,9 +5992,9 @@ static struct ggml_cgraph * llama_build_graph( } if (!alloc_inp_KQ_mask && strcmp(name, "KQ_mask") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc)) { + if (!ggml_tallocr_is_measure(lctx.alloc)) { const int64_t n_kv = cur->ne[0]; const int64_t n_tokens = cur->ne[1]; @@ -6379,9 +6032,9 @@ static struct ggml_cgraph * llama_build_graph( } if (!alloc_inp_K_shift && strcmp(name, "K_shift") == 0) { - ggml_allocr_alloc(lctx.alloc, cur); + ggml_tallocr_alloc(lctx.alloc, cur); - if (!ggml_allocr_is_measure(lctx.alloc)) { + if (!ggml_tallocr_is_measure(lctx.alloc)) { const int64_t n_ctx = cur->ne[0]; int32_t * data; @@ -6403,136 +6056,6 @@ static struct ggml_cgraph * llama_build_graph( alloc_inp_K_shift = true; } - - // view tensors are not processed further - if (cur->view_src != nullptr) { - return; - } - - if (cur->op != GGML_OP_NONE) { - n_non_view++; - } - - // - // offload layers - // - // TODO: will be removed with backend v2 - -//#define LLAMA_OFFLOAD_DEBUG - - if (!do_offload) { - return; - } - - const int n_layer = model.hparams.n_layer; - - const int n_gpu_layers = model.n_gpu_layers; - const int i_gpu_start = n_layer - n_gpu_layers; - - // should we offload the final norm? yes if we are not computing embeddings - const bool offload_emb = lctx.embedding.empty(); - - static const std::unordered_map> k_offload_func_name = { - { OFFLOAD_FUNC_NOP, "CPU" }, - { OFFLOAD_FUNC_OUT, "CPU" }, -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - { OFFLOAD_FUNC, "GPU (CUDA)" }, - { OFFLOAD_FUNC_FRC, "GPU (CUDA) FRC" }, - { OFFLOAD_FUNC_KQV, "GPU (CUDA) KQV" }, - { OFFLOAD_FUNC_NR, "GPU (CUDA) NR" }, - { OFFLOAD_FUNC_EMB, "GPU (CUDA) EMB" }, -#else - { OFFLOAD_FUNC, "CPU" }, - { OFFLOAD_FUNC_FRC, "CPU" }, - { OFFLOAD_FUNC_KQV, "CPU" }, - { OFFLOAD_FUNC_NR, "CPU" }, - { OFFLOAD_FUNC_EMB, "CPU" }, -#endif // GGML_USE_CUBLAS - }; - - // check the global map for what offload function to use for this tensor - llm_offload_func_e func_e = k_offload_func_trie.find(name); - - if (func_e == OFFLOAD_FUNC_NOP) { -#ifdef LLAMA_OFFLOAD_DEBUG - // if a tensor hasn't been offloaded, we warn the user - if (worst_case) { - LLAMA_LOG_WARN("%s: %32s: not offloaded (ref: %s)\n", __func__, - cur->name, "https://github.com/ggerganov/llama.cpp/pull/3837"); - } -#endif - - return; - } - - // count the number of layers and respect the provided n_gpu_layers - switch (func_e) { - case OFFLOAD_FUNC_NOP: - case OFFLOAD_FUNC_OUT: - break; - case OFFLOAD_FUNC: - if (n_gpu_layers < n_layer) { - if (il < i_gpu_start) { - func_e = OFFLOAD_FUNC_NOP; - } - } - break; - case OFFLOAD_FUNC_FRC: - if (!lctx.cparams.offload_kqv) { - func_e = OFFLOAD_FUNC_NOP; - } break; - case OFFLOAD_FUNC_KQV: - if (!lctx.cparams.offload_kqv) { - func_e = OFFLOAD_FUNC_NOP; - } else { - if (n_gpu_layers < n_layer) { - if (il < i_gpu_start) { - func_e = OFFLOAD_FUNC_NOP; - } - } - } - break; - case OFFLOAD_FUNC_NR: - if (n_gpu_layers <= n_layer + 0) { - func_e = OFFLOAD_FUNC_NOP; - } - break; - case OFFLOAD_FUNC_EMB: - if (!offload_emb || n_gpu_layers < n_layer) { - func_e = OFFLOAD_FUNC_NOP; - } - break; - default: GGML_ASSERT(false); - } - - offload_func_t func = ggml_offload_nop; - - // this is needed for compatibility with Metal for example -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - static offload_func_t ggml_offload_gpu = ggml_cuda_assign_buffers_no_alloc; -#else - static offload_func_t ggml_offload_gpu = ggml_offload_nop; -#endif - - switch (func_e) { - case OFFLOAD_FUNC_NOP: - case OFFLOAD_FUNC_OUT: func = ggml_offload_nop; break; - case OFFLOAD_FUNC: - case OFFLOAD_FUNC_KQV: - case OFFLOAD_FUNC_FRC: - case OFFLOAD_FUNC_NR: - case OFFLOAD_FUNC_EMB: func = ggml_offload_gpu; break; - default: GGML_ASSERT(false); - } - - // apply offload function to the tensor - func(cur); - -#ifdef LLAMA_OFFLOAD_DEBUG - if (worst_case) { - LLAMA_LOG_INFO("%s: %32s: %s\n", __func__, cur->name, k_offload_func_name.at(func_e).c_str()); - } -#endif }; struct ggml_cgraph * result = NULL; @@ -6600,27 +6123,6 @@ static struct ggml_cgraph * llama_build_graph( llm.free(); - if (worst_case) { - int n_non_view_total = 0; - - for (int i = 0; i < result->n_nodes; ++i) { - if (result->nodes[i]->view_src == nullptr) { - n_non_view_total++; - } - } - - LLAMA_LOG_INFO("%s: non-view tensors processed: %d/%d\n", __func__, n_non_view, n_non_view_total); - - if (n_non_view != n_non_view_total) { - LLAMA_LOG_WARN("%s: ****************************************************************\n", __func__); - LLAMA_LOG_WARN("%s: not all non-view tensors have been processed with a callback\n", __func__); - LLAMA_LOG_WARN("%s: this can indicate an inefficiency in the graph implementation\n", __func__); - LLAMA_LOG_WARN("%s: build with LLAMA_OFFLOAD_DEBUG for more info\n", __func__); - LLAMA_LOG_WARN("%s: ref: https://github.com/ggerganov/llama.cpp/pull/3837\n", __func__); - LLAMA_LOG_WARN("%s: ****************************************************************\n", __func__); - } - } - return result; } @@ -6666,8 +6168,6 @@ static int llama_decode_internal( auto & kv_self = lctx.kv_self; - GGML_ASSERT(!!kv_self.ctx); - const int64_t n_embd = hparams.n_embd; const int64_t n_vocab = hparams.n_vocab; @@ -6721,12 +6221,10 @@ static int llama_decode_internal( //printf("kv_self.n = %5d, kv_self.used = %5d, kv_self.head = %5d\n", kv_self.n, kv_self.used, kv_self.head); - ggml_allocr_reset(lctx.alloc); + ggml_backend_sched_reset(lctx.sched); ggml_cgraph * gf = llama_build_graph(lctx, batch); - ggml_allocr_alloc_graph(lctx.alloc, gf); - // the output is always the last tensor in the graph struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; GGML_ASSERT(strcmp(res->name, "result_output") == 0); @@ -6738,30 +6236,6 @@ static int llama_decode_internal( GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); } -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - char * buf_alloc_base = (char *)ggml_backend_buffer_get_base(lctx.buf_alloc); - for (int i = 0; i < gf->n_leafs; i++) { - ggml_tensor * node = gf->leafs[i]; - if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); - ggml_cuda_copy_to_device(node); - } - } - - for (int i = 0; i < gf->n_nodes; i++) { - ggml_tensor * node = gf->nodes[i]; - if (node->backend == GGML_BACKEND_GPU && node->extra == NULL) { - ggml_cuda_assign_scratch_offset(node, (char *)node->data - buf_alloc_base); - } - } - - // HACK: ggml-alloc may change the tensor backend when reusing a parent, so force output to be on the CPU here if needed - if (!lctx.embedding.empty()) { - embeddings->backend = GGML_BACKEND_CPU; - } - res->backend = GGML_BACKEND_CPU; -#endif - // LLAMA_LOG_INFO("graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); // for big prompts, if BLAS is enabled, it is better to use only one thread @@ -6784,15 +6258,17 @@ static int llama_decode_internal( #endif #ifdef GGML_USE_METAL - if (ggml_backend_is_metal(lctx.backend)) { - ggml_backend_metal_set_n_cb(lctx.backend, n_threads); + if (ggml_backend_is_metal(lctx.backend_metal)) { + ggml_backend_metal_set_n_cb(lctx.backend_metal, n_threads); } #endif - if (ggml_backend_is_cpu(lctx.backend)) { - ggml_backend_cpu_set_n_threads(lctx.backend, n_threads); + if (lctx.backend_cpu != nullptr) { + ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); } - ggml_backend_graph_compute(lctx.backend, gf); + ggml_backend_sched_graph_compute(lctx.sched, gf); + + // fprintf(stderr, "splits: %d\n", ggml_backend_sched_get_n_splits(lctx.sched)); #ifdef GGML_USE_MPI ggml_mpi_graph_compute_post(lctx.ctx_mpi, gf, n_layer); @@ -6840,30 +6316,33 @@ static int llama_decode_internal( logits_out.clear(); #endif + ggml_backend_t res_backend = ggml_backend_sched_get_node_backend(lctx.sched, res); + GGML_ASSERT(res_backend != nullptr); if (batch.logits) { logits_out.resize(n_vocab * n_tokens); for (uint32_t i = 0; i < n_tokens; i++) { if (batch.logits[i] == 0) { continue; } - ggml_backend_tensor_get(res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); + ggml_backend_tensor_get_async(res_backend, res, logits_out.data() + (n_vocab*i), (n_vocab*i)*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[i] = true; #endif } } else if (lctx.logits_all) { logits_out.resize(n_vocab * n_tokens); - ggml_backend_tensor_get(res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); + ggml_backend_tensor_get_async(res_backend, res, logits_out.data(), 0, n_vocab*n_tokens*sizeof(float)); #ifndef NDEBUG std::fill(logits_valid.begin(), logits_valid.end(), true); #endif } else { logits_out.resize(n_vocab); - ggml_backend_tensor_get(res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); + ggml_backend_tensor_get_async(res_backend, res, logits_out.data(), (n_vocab*(n_tokens - 1))*sizeof(float), n_vocab*sizeof(float)); #ifndef NDEBUG logits_valid[0] = true; #endif } + ggml_backend_synchronize(res_backend); } // extract embeddings @@ -6871,7 +6350,9 @@ static int llama_decode_internal( auto & embedding_out = lctx.embedding; embedding_out.resize(n_embd); - ggml_backend_tensor_get(embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); + ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); + ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); + ggml_backend_synchronize(embeddings_backend); } // measure the performance only for the single-token evals @@ -9347,48 +8828,23 @@ static int llama_apply_lora_from_file_internal( LLAMA_LOG_INFO("%s: r = %d, alpha = %d, scaling = %.2f\n", __func__, lora_r, lora_alpha, scaling); - // create a name -> tensor map of the model to accelerate lookups - // find the max tensor size to estimate the required temporary buffer size - size_t max_tensor_size = 0; - std::unordered_map model_tensors; - for (const auto & kv : model.tensors_by_name) { - model_tensors.insert(kv); - size_t f32_size = ggml_nelements(kv.second) * sizeof(float); - max_tensor_size = std::max(max_tensor_size, f32_size); - } - - // create a temporary ggml context to store the lora tensors - // TODO: use ggml-alloc - size_t lora_ctx_size = max_tensor_size * 3; - LLAMA_LOG_INFO("%s: allocating %.f MB for lora temporary buffer\n", __func__, lora_ctx_size / 1024.0 / 1024.0); - std::vector lora_buf(lora_ctx_size); - - struct ggml_init_params params; - params.mem_size = lora_buf.size(); - params.mem_buffer = lora_buf.data(); - params.no_alloc = false; - - using unique_context = std::unique_ptr; - - unique_context lora_ctx(nullptr, ggml_free); - lora_ctx.reset(ggml_init(params)); - std::unordered_map lora_tensors; - // load base model std::unique_ptr ml; - - if (path_base_model) { + if (path_base_model) { LLAMA_LOG_INFO("%s: loading base model from '%s'\n", __func__, path_base_model); ml.reset(new llama_model_loader(path_base_model, /*use_mmap*/ true, /*kv_overrides*/ nullptr)); - ml->init_mapping(false); // no prefetching + ml->init_mapping(/*prefetch*/ false); // no prefetching } - // read tensors and apply - bool warned = false; - int n_tensors = 0; - - std::vector work_buffer; + struct tensor_meta { + std::string name; + ggml_type type; + int32_t ne[2]; + size_t offset; + }; + std::map tensor_meta_map; + // load all tensor meta while (true) { if (fin.tell() == fin.size) { // eof @@ -9401,7 +8857,7 @@ static int llama_apply_lora_from_file_internal( fin.read_raw(&n_dims, sizeof(n_dims)); fin.read_raw(&name_len, sizeof(name_len)); - fin.read_raw(&ftype, sizeof(ftype)); + fin.read_raw(&ftype, sizeof(ftype)); if (n_dims != 1 && n_dims != 2) { LLAMA_LOG_ERROR("%s: unsupported tensor dimension %d\n", __func__, n_dims); @@ -9415,31 +8871,23 @@ static int llama_apply_lora_from_file_internal( std::string name; { - GGML_ASSERT(name_len <= 1024); - char buf[1024]; + GGML_ASSERT(name_len < GGML_MAX_NAME); + char buf[GGML_MAX_NAME]; fin.read_raw(buf, name_len); name = std::string(buf, name_len); } - // check for lora suffix and get the type of tensor - const std::string lora_suffix = ".lora"; - size_t pos = name.rfind(lora_suffix); - if (pos == std::string::npos) { + // check for lora suffix + std::string lora_suffix; + if (name.length() > 6) { + lora_suffix = name.substr(name.length() - 6); + } + if (lora_suffix != ".loraA" && lora_suffix != ".loraB") { LLAMA_LOG_ERROR("%s: error: '%s' is not a lora tensor\n", __func__, name.c_str()); return 1; } - std::string lora_type = name.substr(pos + lora_suffix.length()); - std::string base_name = name; - base_name.erase(pos); - // LLAMA_LOG_INFO("%s: %s => %s (lora type %s) \n", __func__, name.c_str(), base_name.c_str(), lora_type.c_str()); - - if (model_tensors.find(base_name) == model_tensors.end()) { - LLAMA_LOG_ERROR("%s: unknown tensor '%s' in lora adapter\n", __func__, name.data()); - return 1; - } - - // create ggml tensor + // tensor type ggml_type wtype; switch (ftype) { case 0: wtype = GGML_TYPE_F32; break; @@ -9451,122 +8899,177 @@ static int llama_apply_lora_from_file_internal( return false; } } - ggml_tensor * lora_tensor = ggml_new_tensor_2d(lora_ctx.get(), wtype, ne[0], ne[1]); - ggml_set_name(lora_tensor, name.c_str()); - // load tensor data + // data offset size_t offset = fin.tell(); - size_t tensor_data_size = ggml_nbytes(lora_tensor); offset = (offset + 31) & -32; - fin.seek(offset, SEEK_SET); - fin.read_raw(lora_tensor->data, tensor_data_size); - lora_tensors[name] = lora_tensor; + // skip tensor data + fin.seek(offset + ggml_row_size(wtype, ne[0]) * ne[1], SEEK_SET); - // check if we have both A and B tensors and apply - if (lora_tensors.find(base_name + ".loraA") != lora_tensors.end() && - lora_tensors.find(base_name + ".loraB") != lora_tensors.end()) { + tensor_meta_map.emplace(name, tensor_meta{ name, wtype, { ne[0], ne[1] }, offset }); + } - ggml_tensor * dest_t = model_tensors[base_name]; + bool warned = false; + int n_tensors = 0; - offload_func_t offload_func = ggml_offload_nop; - offload_func_t offload_func_force_inplace = ggml_offload_nop; + // apply + ggml_backend_t backend_cpu = ggml_backend_cpu_init(); + if (backend_cpu == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to initialize cpu backend\n", __func__); + return 1; + } + ggml_backend_cpu_set_n_threads(backend_cpu, n_threads); -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (dest_t->backend == GGML_BACKEND_GPU || dest_t->backend == GGML_BACKEND_GPU_SPLIT) { - if (dest_t->type != GGML_TYPE_F16) { - throw std::runtime_error(format( - "%s: error: the simultaneous use of LoRAs and GPU acceleration is only supported for f16 models. dest_t->type: %d", __func__, dest_t->type)); - } - offload_func = ggml_cuda_assign_buffers; - offload_func_force_inplace = ggml_cuda_assign_buffers_force_inplace; - } -#endif // GGML_USE_CUBLAS + std::vector> read_buf; + for (const auto & it : model.tensors_by_name) { + const std::string & base_name = it.first; + ggml_tensor * model_t = it.second; - ggml_tensor * base_t; - if (ml) { - struct gguf_context * ctx_gguf = ml->ctx_gguf; + if (tensor_meta_map.find(base_name + ".loraA") == tensor_meta_map.end() || + tensor_meta_map.find(base_name + ".loraB") == tensor_meta_map.end()) { + continue; + } - // load from base model - if (gguf_find_tensor(ctx_gguf, base_name.c_str()) < 0) { - LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); - return 1; - } + tensor_meta & metaA = tensor_meta_map.at(base_name + ".loraA"); + tensor_meta & metaB = tensor_meta_map.at(base_name + ".loraB"); - base_t = ml->get_tensor_meta(base_name.c_str()); - ml->load_data_for(base_t); - } else { - base_t = dest_t; - } + ggml_init_params lora_init_params = { + /* .mem_size */ ggml_tensor_overhead()*128 + ggml_graph_overhead(), + /* .mem_buffer */ nullptr, + /* .no_alloc */ true, + }; + ggml_context * lora_ctx = ggml_init(lora_init_params); + if (lora_ctx == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to initialize lora context\n", __func__); + ggml_backend_free(backend_cpu); + return 1; + } - if (ggml_is_quantized(base_t->type)) { - if (!warned) { - LLAMA_LOG_WARN("%s: warning: using a lora adapter with a quantized model may result in poor quality, " - "use a f16 or f32 base model with --lora-base\n", __func__); - warned = true; - } - } + // create tensors + ggml_tensor * loraA = ggml_new_tensor_2d(lora_ctx, metaA.type, metaA.ne[0], metaA.ne[1]); + ggml_tensor * loraB = ggml_new_tensor_2d(lora_ctx, metaB.type, metaB.ne[0], metaB.ne[1]); + ggml_set_name(loraA, metaA.name.c_str()); + ggml_set_name(loraB, metaB.name.c_str()); - ggml_tensor * loraA = lora_tensors[base_name + ".loraA"]; - GGML_ASSERT(loraA->type == GGML_TYPE_F32); - ggml_set_name(loraA, "loraA"); - - ggml_tensor * loraB = lora_tensors[base_name + ".loraB"]; - GGML_ASSERT(loraB->type == GGML_TYPE_F32); - ggml_set_name(loraB, "loraB"); - - if (base_t->ne[0] != loraA->ne[1] || base_t->ne[1] != loraB->ne[1]) { - LLAMA_LOG_ERROR("%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" - " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); + ggml_tensor * base_t; + if (ml) { + if (gguf_find_tensor(ml->ctx_gguf, base_name.c_str()) < 0) { + LLAMA_LOG_ERROR("%s: error: tensor '%s' not found in base model\n", __func__, base_name.c_str()); return 1; } + base_t = ggml_dup_tensor(lora_ctx, ml->get_tensor_meta(base_name.c_str())); + } else { + base_t = ggml_dup_tensor(lora_ctx, model_t); + } + ggml_set_name(base_t, base_name.c_str()); + // allocate in backend buffer + ggml_backend_buffer_t lora_buf = ggml_backend_alloc_ctx_tensors_from_buft(lora_ctx, ggml_backend_cpu_buffer_type()); + if (lora_buf == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to allocate lora tensors\n", __func__); + return 1; + } + + // load tensor data + auto load_tensor = [&read_buf, &fin](const tensor_meta & tensor_meta, ggml_tensor * tensor) { + read_buf.resize(ggml_nbytes(tensor)); + fin.seek(tensor_meta.offset, SEEK_SET); + fin.read_raw(read_buf.data(), ggml_nbytes(tensor)); + ggml_backend_tensor_set(tensor, read_buf.data(), 0, read_buf.size()); + }; + load_tensor(metaA, loraA); + load_tensor(metaB, loraB); + + // load base model tensor data + if (ml) { + ml->load_data_for(base_t); + } else { + ggml_backend_tensor_copy(model_t, base_t); + } + + if (ggml_is_quantized(base_t->type) && !warned) { + LLAMA_LOG_WARN("%s: warning: using a lora adapter with a quantized model may result in poor quality, " + "use a f16 or f32 base model with --lora-base\n", __func__); + warned = true; + } + + if (base_t->ne[0] != loraA->ne[1] || base_t->ne[1] != loraB->ne[1]) { + LLAMA_LOG_ERROR("%s: incompatible tensor dimensions (%" PRId64 " and %" PRId64 ");" + " are you sure that this adapter is for this model?\n", __func__, base_t->ne[0], loraA->ne[1]); + ggml_free(lora_ctx); + ggml_backend_buffer_free(lora_buf); + ggml_backend_free(backend_cpu); + return 1; + } + + auto build_lora_graph = [&]() { // w = w + BA*s - ggml_tensor * BA = ggml_mul_mat(lora_ctx.get(), loraA, loraB); - offload_func(BA); + ggml_tensor * BA = ggml_mul_mat(lora_ctx, loraA, loraB); ggml_set_name(BA, "BA"); if (scaling != 1.0f) { - BA = ggml_scale_inplace(lora_ctx.get(), BA, scaling); - offload_func(BA); + BA = ggml_scale(lora_ctx, BA, scaling); ggml_set_name(BA, "BA_scaled"); } ggml_tensor * r; - if (base_t == dest_t) { - r = ggml_add_inplace(lora_ctx.get(), dest_t, BA); - offload_func_force_inplace(r); - ggml_set_name(r, "r_add_inplace"); - } - else { - r = ggml_add(lora_ctx.get(), base_t, BA); - offload_func(r); - ggml_set_name(r, "r_add"); + r = ggml_add_inplace(lora_ctx, base_t, BA); + ggml_set_name(r, "r_add"); - r = ggml_cpy(lora_ctx.get(), r, dest_t); - offload_func(r); - ggml_set_name(r, "r_cpy"); + if (base_t->type != model_t->type) { + // convert the result to the model type + r = ggml_cast(lora_ctx, r, model_t->type); + ggml_set_name(r, "r_cast"); } - struct ggml_cgraph * gf = ggml_new_graph(lora_ctx.get()); - ggml_build_forward_expand(gf, r); + return r; + }; - ggml_graph_compute_helper(work_buffer, gf, n_threads); + ggml_cgraph * gf = ggml_new_graph(lora_ctx); + ggml_tensor * r = build_lora_graph(); + ggml_build_forward_expand(gf, r); - // the tensors in the adapter must be sorted such that loraA and loraB of the same tensor are next to each other - GGML_ASSERT(lora_tensors.size() == 2); + ggml_backend_buffer_t graph_buf = ggml_backend_alloc_ctx_tensors_from_buft(lora_ctx, ggml_backend_cpu_buffer_type()); + if (graph_buf == nullptr) { + LLAMA_LOG_ERROR("%s: error: failed to allocate graph tensors\n", __func__); + ggml_free(lora_ctx); + ggml_backend_buffer_free(lora_buf); + ggml_backend_free(backend_cpu); + return 1; + } - // we won't need these tensors again, reset the context to save memory - lora_ctx.reset(ggml_init(params)); - lora_tensors.clear(); + ggml_backend_graph_compute(backend_cpu, gf); - n_tensors++; - if (n_tensors % 4 == 0) { - LLAMA_LOG_INFO("."); - } + ggml_backend_tensor_set(model_t, r->data, 0, ggml_nbytes(r)); + +#if 0 + // TODO: use scheduler with fallback to CPU for less copies between CPU and GPU + //ggml_backend_sched_t sched = ggml_backend_sched_new(backends.data(), backends.size(), GGML_DEFAULT_GRAPH_SIZE); + + // sched compute + ggml_build_forward_expand(gf, build_graph()); + ggml_backend_sched_init_measure(sched, gf); + + // create the graph again, since the previous one was destroyed by the measure + ggml_graph_clear(gf); + ggml_build_forward_expand(gf, build_graph()); + ggml_backend_sched_graph_compute(sched, gf); + ggml_backend_sched_free(sched); +#endif + + ggml_backend_buffer_free(lora_buf); + ggml_backend_buffer_free(graph_buf); + ggml_free(lora_ctx); + + n_tensors++; + if (n_tensors % 4 == 0) { + LLAMA_LOG_INFO("."); } } + ggml_backend_free(backend_cpu); + const int64_t t_lora_us = ggml_time_us() - t_start_lora_us; LLAMA_LOG_INFO(" done (%.2f ms)\n", t_lora_us / 1000.0); @@ -9579,6 +9082,7 @@ static int llama_apply_lora_from_file_internal( struct llama_model_params llama_model_default_params() { struct llama_model_params result = { /*.n_gpu_layers =*/ 0, + /*.split_mode =*/ LLAMA_SPLIT_LAYER, /*.main_gpu =*/ 0, /*.tensor_split =*/ nullptr, /*.progress_callback =*/ nullptr, @@ -9590,7 +9094,8 @@ struct llama_model_params llama_model_default_params() { }; #ifdef GGML_USE_METAL - result.n_gpu_layers = 1; + // note: we usually have plenty of VRAM, so by default offload all layers to the GPU + result.n_gpu_layers = 999; #endif return result; @@ -9780,41 +9285,53 @@ struct llama_context * llama_new_context_with_model( GGML_ASSERT(hparams.n_embd_head_k % ggml_blck_size(type_k) == 0); GGML_ASSERT(hparams.n_embd_head_v % ggml_blck_size(type_v) == 0); - // reserve memory for context buffers if (!hparams.vocab_only) { - // initialize backend + // initialize backends #ifdef GGML_USE_METAL if (model->n_gpu_layers > 0) { - ctx->backend = ggml_backend_metal_init(); - if (ctx->backend == nullptr) { + ctx->backend_metal = ggml_backend_metal_init(); + if (ctx->backend_metal == nullptr) { LLAMA_LOG_ERROR("%s: failed to initialize Metal backend\n", __func__); + llama_free(ctx); + return nullptr; } + ctx->backends.push_back(ctx->backend_metal); } -#elif defined(GGML_USE_CUBLAS) && defined(LLAMA_GGML_BACKEND_CUDA_TEST) - // for testing only +#elif defined(GGML_USE_CUBLAS) if (model->n_gpu_layers > 0) { - ctx->backend = ggml_backend_cuda_init(0); - if (ctx->backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize CUDA backend\n", __func__); + // with split_mode LLAMA_SPLIT_NONE or LLAMA_SPLIT_ROW, only the main GPU backend is used + if (model->split_mode == LLAMA_SPLIT_NONE || model->split_mode == LLAMA_SPLIT_ROW) { + ggml_backend_t backend = ggml_backend_cuda_init(model->main_gpu); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CUDA%d backend\n", __func__, model->main_gpu); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); + } else { + // LLAMA_SPLIT_LAYER requires a backend for each GPU + for (int device = 0; device < ggml_backend_cuda_get_device_count(); ++device) { + ggml_backend_t backend = ggml_backend_cuda_init(device); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CUDA%d backend\n", __func__, device); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); + } } } #endif - - if (ctx->backend == nullptr && ggml_backend_buffer_is_host(model->buf)) { - ctx->backend = ggml_backend_cpu_init(); - if (ctx->backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize CPU backend\n", __func__); - } - } - - if (ctx->backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize a backend\n", __func__); - delete ctx; + ctx->backend_cpu = ggml_backend_cpu_init(); + if (ctx->backend_cpu == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize CPU backend\n", __func__); + llama_free(ctx); return nullptr; } + ctx->backends.push_back(ctx->backend_cpu); - if (!llama_kv_cache_init(ctx->model.hparams, ctx->kv_self, type_k, type_v, - cparams.n_ctx, model->n_gpu_layers, cparams.offload_kqv)) { + if (!llama_kv_cache_init(ctx->kv_self, ctx->model, type_k, type_v, + cparams.n_ctx, cparams.offload_kqv)) { LLAMA_LOG_ERROR("%s: llama_kv_cache_init() failed for self-attention cache\n", __func__); llama_free(ctx); return nullptr; @@ -9850,11 +9367,22 @@ struct llama_context * llama_new_context_with_model( } { - // the compute buffer is used to store the tensor and graph structs, while the allocator buffer is used for the tensor data + // buffer types used for the compute buffer of each backend + std::vector backend_buft; + for (auto * backend : ctx->backends) { + if (ggml_backend_is_cpu(backend)) { + // use host buffers for the CPU backend compute buffer + backend_buft.push_back(llama_default_buffer_type_cpu(true)); + } else { + backend_buft.push_back(ggml_backend_get_default_buffer_type(backend)); + } + } + + // buffer used to store the computation graph and the tensor meta data ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); - // create measure allocator - ctx->alloc = ggml_allocr_new_measure_from_backend(ctx->backend); + ctx->sched = ggml_backend_sched_new(ctx->backends.data(), backend_buft.data(), ctx->backends.size(), LLAMA_MAX_NODES); + ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); @@ -9862,50 +9390,19 @@ struct llama_context * llama_new_context_with_model( llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); - // measure memory requirements for the graph - size_t alloc_size = ggml_allocr_alloc_graph(ctx->alloc, gf); + // initialize scheduler with the worst-case graph + ggml_backend_sched_init_measure(ctx->sched, gf); + // note: the number of splits during measure is higher than during inference due to the kv shift + int n_splits = ggml_backend_sched_get_n_splits(ctx->sched); + LLAMA_LOG_INFO("%s: graph splits (measure): %d\n", __func__, n_splits); + ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); - LLAMA_LOG_INFO("%s: compute buffer total size = %.2f MiB\n", __func__, (ctx->buf_compute_meta.size() + alloc_size) / 1024.0 / 1024.0); - - // create allocator again with exact memory requirements - ggml_allocr_free(ctx->alloc); - - ctx->buf_alloc = ggml_backend_alloc_buffer(ctx->backend, alloc_size); - ctx->alloc = ggml_allocr_new_from_buffer(ctx->buf_alloc); -#if defined(GGML_USE_CUBLAS) && !defined(LLAMA_GGML_BACKEND_CUDA_TEST) - if (model->n_gpu_layers > 0) { - // the CPU buffer adds this padding in case the malloc buffer is not aligned, so we need to do the same for the GPU buffer, since we use the same offsets - ggml_cuda_set_scratch_size(alloc_size + 64); - LLAMA_LOG_INFO("%s: VRAM scratch buffer: %.2f MiB\n", __func__, alloc_size / 1024.0 / 1024.0); - - // calculate total VRAM usage - auto add_tensor = [](const ggml_tensor * t, size_t & size) { - if (t->backend == GGML_BACKEND_GPU || t->backend == GGML_BACKEND_GPU_SPLIT) { - size += ggml_nbytes(t); - } - }; - size_t model_vram_size = 0; - for (const auto & kv : model->tensors_by_name) { - add_tensor(kv.second, model_vram_size); - } - - size_t kv_vram_size = 0; - for (auto & k : ctx->kv_self.k_l) { - add_tensor(k, kv_vram_size); - } - for (auto & v : ctx->kv_self.v_l) { - add_tensor(v, kv_vram_size); - } - - size_t ctx_vram_size = alloc_size + kv_vram_size; - size_t total_vram_size = model_vram_size + ctx_vram_size; - - LLAMA_LOG_INFO("%s: total VRAM used: %.2f MiB (model: %.2f MiB, context: %.2f MiB)\n", __func__, - total_vram_size / 1024.0 / 1024.0, - model_vram_size / 1024.0 / 1024.0, - ctx_vram_size / 1024.0 / 1024.0); + for (ggml_backend_t backend : ctx->backends) { + ggml_backend_buffer_t buf = ggml_backend_sched_get_buffer(ctx->sched, backend); + LLAMA_LOG_INFO("%s: %10s compute buffer size = %8.2f MiB\n", __func__, + ggml_backend_buffer_name(buf), + ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); } -#endif } } @@ -10002,9 +9499,8 @@ int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int3 } int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { - return snprintf(buf, buf_size, "%s %s%s %s", + return snprintf(buf, buf_size, "%s %s %s", llama_model_arch_name(model->arch).c_str(), - model->hparams.n_expert > 0 ? (std::to_string(model->hparams.n_expert) + "x").c_str() : "", llama_model_type_name(model->type), llama_model_ftype_name(model->ftype).c_str()); } @@ -10026,7 +9522,14 @@ uint64_t llama_model_n_params(const struct llama_model * model) { } struct ggml_tensor * llama_get_model_tensor(struct llama_model * model, const char * name) { - return ggml_get_tensor(model->ctx, name); + auto it = std::find_if(model->tensors_by_name.begin(), model->tensors_by_name.end(), + [name](const std::pair & it) { + return it.first == name; + }); + if (it == model->tensors_by_name.end()) { + return nullptr; + } + return it->second; } uint32_t llama_model_quantize( @@ -10211,7 +9714,7 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_embedding = ctx->embedding.size() * sizeof(float); const size_t s_kv_size = sizeof(size_t); const size_t s_kv_ntok = sizeof(int); - const size_t s_kv = ggml_backend_buffer_get_size(ctx->kv_self.buf); + const size_t s_kv = ctx->kv_self.total_size(); const size_t s_total = ( + s_rng_size @@ -10340,7 +9843,7 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat const auto n_embd_v_gqa = hparams.n_embd_v_gqa(); const auto n_ctx = cparams.n_ctx; - const size_t kv_buf_size = ggml_backend_buffer_get_size(kv_self.buf); + const size_t kv_buf_size = kv_self.total_size(); const uint32_t kv_head = kv_self.head; const uint32_t kv_size = kv_self.size; const uint32_t kv_used = kv_self.used; @@ -10353,46 +9856,19 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat if (kv_buf_size) { const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); - ggml_cgraph * gf = ggml_new_graph(cpy_ctx); - - std::vector kout2d(n_layer); - std::vector vout2d(n_layer); - - for (int il = 0; il < (int) n_layer; ++il) { - kout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); - vout2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); - - ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd_k_gqa, kv_head, - elt_size*n_embd_k_gqa, 0); - - ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd_v_gqa, - elt_size*n_ctx, 0); - - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, k2d, kout2d[il])); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, v2d, vout2d[il])); - } - - ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); - - ggml_backend_graph_compute(ctx->backend, gf); - std::vector tmp_buf; for (int il = 0; il < (int) n_layer; ++il) { - tmp_buf.resize(ggml_nbytes(kout2d[il])); - ggml_backend_tensor_get(kout2d[il], tmp_buf.data(), 0, tmp_buf.size()); + tmp_buf.resize(elt_size*n_embd_k_gqa*kv_head); + ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); - tmp_buf.resize(ggml_nbytes(vout2d[il])); - ggml_backend_tensor_get(vout2d[il], tmp_buf.data(), 0, tmp_buf.size()); - data_ctx->write(tmp_buf.data(), tmp_buf.size()); + // v is not contiguous, copy row by row + tmp_buf.resize(elt_size*kv_head); + for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*elt_size*n_ctx, tmp_buf.size()); + data_ctx->write(tmp_buf.data(), tmp_buf.size()); + } } - - ggml_free(cpy_ctx); - - ggml_backend_buffer_free(buf); } for (uint32_t i = 0; i < kv_size; ++i) { @@ -10491,48 +9967,22 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { memcpy(&kv_used, inp, sizeof(kv_used)); inp += sizeof(kv_used); if (kv_buf_size) { - GGML_ASSERT(ggml_backend_buffer_get_size(kv_self.buf) == kv_buf_size); + GGML_ASSERT(kv_self.total_size() == kv_buf_size); const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - ggml_context * cpy_ctx = ggml_init({ 6*n_layer*ggml_tensor_overhead() + ggml_graph_overhead(), NULL, /* no_alloc */ true }); - ggml_cgraph * gf = ggml_new_graph(cpy_ctx); + for (int il = 0; il < (int) n_layer; ++il) { + size_t k_size = elt_size*n_embd_k_gqa*kv_head; + ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); + inp += k_size; - std::vector kin2d(n_layer); - std::vector vin2d(n_layer); - - for (int il = 0; il < n_layer; ++il) { - kin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.k_l[il]->type, n_embd_k_gqa, kv_head); - vin2d[il] = ggml_new_tensor_2d(cpy_ctx, kv_self.v_l[il]->type, kv_head, n_embd_v_gqa); - - ggml_tensor * k2d = ggml_view_2d(cpy_ctx, kv_self.k_l[il], - n_embd_k_gqa, kv_head, - elt_size*n_embd_k_gqa, 0); - - ggml_tensor * v2d = ggml_view_2d(cpy_ctx, kv_self.v_l[il], - kv_head, n_embd_v_gqa, - elt_size*n_ctx, 0); - - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, kin2d[il], k2d)); - ggml_build_forward_expand(gf, ggml_cpy(cpy_ctx, vin2d[il], v2d)); + // v is not contiguous, copy row by row + size_t v_row_size = elt_size*kv_head; + for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { + ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*elt_size*n_ctx, v_row_size); + inp += v_row_size; + } } - - ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(cpy_ctx, ctx->backend); - - // load data into the tensors - for (int il = 0; il < n_layer; ++il) { - ggml_backend_tensor_set(kin2d[il], inp, 0, ggml_nbytes(kin2d[il])); - inp += ggml_nbytes(kin2d[il]); - - ggml_backend_tensor_set(vin2d[il], inp, 0, ggml_nbytes(vin2d[il])); - inp += ggml_nbytes(vin2d[il]); - } - - ggml_backend_graph_compute(ctx->backend, gf); - - ggml_free(cpy_ctx); - - ggml_backend_buffer_free(buf); } ctx->kv_self.head = kv_head; diff --git a/llama.h b/llama.h index 43d41b8f6..689e12d7c 100644 --- a/llama.h +++ b/llama.h @@ -118,6 +118,12 @@ extern "C" { LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, }; + enum llama_split_mode { + LLAMA_SPLIT_NONE = 0, // single GPU + LLAMA_SPLIT_LAYER = 1, // split layers and KV across GPUs + LLAMA_SPLIT_ROW = 2, // split rows across GPUs + }; + typedef struct llama_token_data { llama_token id; // token id float logit; // log-odds of the token @@ -180,8 +186,16 @@ extern "C" { struct llama_model_params { int32_t n_gpu_layers; // number of layers to store in VRAM - int32_t main_gpu; // the GPU that is used for scratch and small tensors - const float * tensor_split; // how to split layers across multiple GPUs (size: LLAMA_MAX_DEVICES) + enum llama_split_mode split_mode; // how to split the model across multiple GPUs + + // main_gpu interpretation depends on split_mode: + // LLAMA_SPLIT_NONE: the GPU that is used for the entire model + // LLAMA_SPLIT_ROW: the GPU that is used for small tensors and intermediate results + // LLAMA_SPLIT_LAYER: ignored + int32_t main_gpu; + + // proportion of the model (layers or rows) to offload to each GPU, size: LLAMA_MAX_DEVICES + const float * tensor_split; // Called with a progress value between 0.0 and 1.0. Pass NULL to disable. // If the provided progress_callback returns true, model loading continues. diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 7a60d7743..d9b8b106a 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -376,6 +376,11 @@ struct test_case { // allocate ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend1); + if (buf == NULL) { + printf("failed to allocate tensors [%s] ", ggml_backend_name(backend1)); + ggml_free(ctx); + return false; + } // build graph ggml_build_forward_expand(gf, out); @@ -463,19 +468,23 @@ struct test_case { GGML_UNUSED(index); }; - ggml_backend_compare_graph_backend(backend1, backend2, gf, callback, &ud); + const bool cmp_ok = ggml_backend_compare_graph_backend(backend1, backend2, gf, callback, &ud); - if (ud.ok) { - printf("\033[1;32mOK\033[0m\n"); - } else { - printf("\033[1;31mFAIL\033[0m\n"); + if (!cmp_ok) { + printf("compare failed "); } ggml_backend_buffer_free(buf); ggml_free(ctx); - return ud.ok; + if (ud.ok && cmp_ok) { + printf("\033[1;32mOK\033[0m\n"); + return true; + } + + printf("\033[1;31mFAIL\033[0m\n"); + return false; } bool eval_perf(ggml_backend_t backend, const char * op_name) { @@ -519,6 +528,11 @@ struct test_case { // allocate ggml_backend_buffer_t buf = ggml_backend_alloc_ctx_tensors(ctx, backend); + if (buf == NULL) { + printf("failed to allocate tensors\n"); + ggml_free(ctx); + return false; + } // randomize tensors initialize_tensors(ctx); From 3fe81781e3bf98b8e44946240a19f3a6ad08a11a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Fri, 12 Jan 2024 20:38:54 +0100 Subject: [PATCH 444/859] CUDA: faster q8_0 -> f16 dequantization (#4895) --- ggml-cuda.cu | 57 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 2db50437c..bd3814c72 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -523,6 +523,8 @@ static_assert(sizeof(block_iq2_xs) == sizeof(ggml_fp16_t) + QK_K/8*sizeof(uint16 #define CUDA_ACC_BLOCK_SIZE 256 #define CUDA_IM2COL_BLOCK_SIZE 256 +#define CUDA_Q8_0_NE_ALIGN 2048 + // dmmv = dequantize_mul_mat_vec #ifndef GGML_CUDA_DMMV_X #define GGML_CUDA_DMMV_X 32 @@ -2327,6 +2329,45 @@ static __global__ void convert_unary(const void * __restrict__ vx, dst_t * __res y[i] = x[i]; } +template +static __global__ void dequantize_block_q8_0_f16(const void * __restrict__ vx, half * __restrict__ y, const int k) { +#if __CUDA_ARCH__ >= CC_PASCAL + constexpr int nint = CUDA_Q8_0_NE_ALIGN/sizeof(int) + WARP_SIZE; + + const int i0 = CUDA_Q8_0_NE_ALIGN*blockIdx.x; + const int * x0 = ((int *) vx) + blockIdx.x * nint; + half2 * y2 = (half2 *) (y + i0); + + __shared__ int vals[nint]; + +#pragma unroll + for (int ix0 = 0; ix0 < nint; ix0 += WARP_SIZE) { + if (need_check && i0*sizeof(block_q8_0)/QK8_0 + sizeof(int)*(ix0 + threadIdx.x) >= k*sizeof(block_q8_0)/QK8_0) { + break; + } + + const int ix = ix0 + threadIdx.x; + vals[ix] = x0[ix]; + } + +#pragma unroll + for (int iy = 0; iy < CUDA_Q8_0_NE_ALIGN; iy += 2*WARP_SIZE) { + if (need_check && i0 + iy + 2*threadIdx.x >= k) { + return; + } + + const half * b0 = ((const half *) vals) + (sizeof(block_q8_0)/sizeof(half)) * ((iy + 2*threadIdx.x)/QK8_0); + const half d = *b0; + const char2 qs = ((const char2 *) (b0 + 1))[threadIdx.x % (QK8_0/2)]; + + y2[iy/2 + threadIdx.x] = __hmul2(make_half2(qs.x, qs.y), __half2half2(d)); + } +#else + (void) vx; (void) y; (void) k; + bad_arch(); +#endif // __CUDA_ARCH__ >= CC_PASCAL +} + // VDR = vec dot ratio, how many contiguous integers each thread processes when the vec dot kernel is called // MMVQ = mul_mat_vec_q, MMQ = mul_mat_q @@ -6181,6 +6222,17 @@ static void dequantize_block_cuda(const void * __restrict__ vx, dst_t * __restri dequantize_block<<>>(vx, y, k); } +static void dequantize_block_q8_0_f16_cuda(const void * __restrict__ vx, half * __restrict__ y, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_Q8_0_NE_ALIGN - 1) / CUDA_Q8_0_NE_ALIGN; + if (k % CUDA_Q8_0_NE_ALIGN == 0) { + const bool need_check = false; + dequantize_block_q8_0_f16<<>>(vx, y, k); + } else { + const bool need_check = true; + dequantize_block_q8_0_f16<<>>(vx, y, k); + } +} + template static void dequantize_row_q2_K_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -6246,6 +6298,7 @@ static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict_ } static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { + int id; switch (type) { case GGML_TYPE_Q4_0: return dequantize_block_cuda; @@ -6256,6 +6309,10 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { case GGML_TYPE_Q5_1: return dequantize_block_cuda; case GGML_TYPE_Q8_0: + CUDA_CHECK(cudaGetDevice(&id)); + if (g_device_caps[id].cc >= CC_PASCAL) { + return dequantize_block_q8_0_f16_cuda; + } return dequantize_block_cuda; case GGML_TYPE_Q2_K: return dequantize_row_q2_K_cuda; From 52ee4540c0f2e11d52c839db6eb51d014ce060e1 Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Fri, 12 Jan 2024 20:46:45 +0100 Subject: [PATCH 445/859] examples : add pydantic models to GBNF grammar generator (#4883) * Create pydantic-models-to-grammar.py * Added some comments for usage * Refactored Grammar Generator Added example and usage instruction. * Update pydantic_models_to_grammar.py * Update pydantic-models-to-grammar-examples.py * Renamed module and imported it. * Update pydantic-models-to-grammar.py * Renamed file and fixed grammar generator issue. --- .../pydantic-models-to-grammar-examples.py | 136 ++ examples/pydantic_models_to_grammar.py | 1151 +++++++++++++++++ 2 files changed, 1287 insertions(+) create mode 100644 examples/pydantic-models-to-grammar-examples.py create mode 100644 examples/pydantic_models_to_grammar.py diff --git a/examples/pydantic-models-to-grammar-examples.py b/examples/pydantic-models-to-grammar-examples.py new file mode 100644 index 000000000..a8a4919cf --- /dev/null +++ b/examples/pydantic-models-to-grammar-examples.py @@ -0,0 +1,136 @@ +# Function calling example using pydantic models. + +import json +from enum import Enum +from typing import Union, Optional + +import requests +from pydantic import BaseModel, Field + +import importlib +from pydantic_models_to_grammar import generate_gbnf_grammar_and_documentation + +# Function to get completion on the llama.cpp server with grammar. +def create_completion(prompt, grammar): + headers = {"Content-Type": "application/json"} + data = {"prompt": prompt, "grammar": grammar} + + response = requests.post("http://127.0.0.1:8080/completion", headers=headers, json=data) + data = response.json() + + print(data["content"]) + return data["content"] + + +# A function for the agent to send a message to the user. +class SendMessageToUser(BaseModel): + """ + Send a message to the User. + """ + chain_of_thought: str = Field(..., description="Your chain of thought while sending the message.") + message: str = Field(..., description="Message you want to send to the user.") + + def run(self): + print(self.message) + + +# Enum for the calculator function. +class MathOperation(Enum): + ADD = "add" + SUBTRACT = "subtract" + MULTIPLY = "multiply" + DIVIDE = "divide" + + +# Very simple calculator tool for the agent. +class Calculator(BaseModel): + """ + Perform a math operation on two numbers. + """ + number_one: Union[int, float] = Field(..., description="First number.") + operation: MathOperation = Field(..., description="Math operation to perform.") + number_two: Union[int, float] = Field(..., description="Second number.") + + def run(self): + if self.operation == MathOperation.ADD: + return self.number_one + self.number_two + elif self.operation == MathOperation.SUBTRACT: + return self.number_one - self.number_two + elif self.operation == MathOperation.MULTIPLY: + return self.number_one * self.number_two + elif self.operation == MathOperation.DIVIDE: + return self.number_one / self.number_two + else: + raise ValueError("Unknown operation.") + + +# Here the grammar gets generated by passing the available function models to generate_gbnf_grammar_and_documentation function. This also generates a documentation usable by the LLM. +# pydantic_model_list is the list of pydanitc models +# outer_object_name is an optional name for an outer object around the actual model object. Like a "function" object with "function_parameters" which contains the actual model object. If None, no outer object will be generated +# outer_object_content is the name of outer object content. +# model_prefix is the optional prefix for models in the documentation. (Default="Output Model") +# fields_prefix is the prefix for the model fields in the documentation. (Default="Output Fields") +gbnf_grammar, documentation = generate_gbnf_grammar_and_documentation( + pydantic_model_list=[SendMessageToUser, Calculator], outer_object_name="function", + outer_object_content="function_parameters", model_prefix="Function", fields_prefix="Parameters") + +print(gbnf_grammar) +print(documentation) + +system_message = "You are an advanced AI, tasked to assist the user by calling functions in JSON format. The following are the available functions and their parameters and types:\n\n" + documentation + +user_message = "What is 42 * 42?" +prompt = f"<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{user_message}<|im_end|>\n<|im_start|>assistant" + +text = create_completion(prompt=prompt, grammar=gbnf_grammar) +# This should output something like this: +# { +# "function": "calculator", +# "function_parameters": { +# "number_one": 42, +# "operation": "multiply", +# "number_two": 42 +# } +# } +function_dictionary = json.loads(text) +if function_dictionary["function"] == "calculator": + function_parameters = {**function_dictionary["function_parameters"]} + + print(Calculator(**function_parameters).run()) + # This should output: 1764 + + +# A example structured output based on pydantic models. The LLM will create an entry for a Book database out of an unstructured text. +class Category(Enum): + """ + The category of the book. + """ + Fiction = "Fiction" + NonFiction = "Non-Fiction" + + +class Book(BaseModel): + """ + Represents an entry about a book. + """ + title: str = Field(..., description="Title of the book.") + author: str = Field(..., description="Author of the book.") + published_year: Optional[int] = Field(..., description="Publishing year of the book.") + keywords: list[str] = Field(..., description="A list of keywords.") + category: Category = Field(..., description="Category of the book.") + summary: str = Field(..., description="Summary of the book.") + + +# We need no additional parameters other than our list of pydantic models. +gbnf_grammar, documentation = generate_gbnf_grammar_and_documentation([Book]) + +system_message = "You are an advanced AI, tasked to create a dataset entry in JSON for a Book. The following is the expected output model:\n\n" + documentation + +text = """The Feynman Lectures on Physics is a physics textbook based on some lectures by Richard Feynman, a Nobel laureate who has sometimes been called "The Great Explainer". The lectures were presented before undergraduate students at the California Institute of Technology (Caltech), during 1961–1963. The book's co-authors are Feynman, Robert B. Leighton, and Matthew Sands.""" +prompt = f"<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{text}<|im_end|>\n<|im_start|>assistant" + +text = create_completion(prompt=prompt, grammar=gbnf_grammar) + +json_data = json.loads(text) + +print(Book(**json_data)) diff --git a/examples/pydantic_models_to_grammar.py b/examples/pydantic_models_to_grammar.py new file mode 100644 index 000000000..41b98fdc1 --- /dev/null +++ b/examples/pydantic_models_to_grammar.py @@ -0,0 +1,1151 @@ +import inspect +import json +from copy import copy +from inspect import isclass, getdoc +from types import NoneType + +from pydantic import BaseModel, create_model, Field +from typing import Any, Type, List, get_args, get_origin, Tuple, Union, Optional, _GenericAlias +from enum import Enum +from typing import get_type_hints, Callable +import re + + +class PydanticDataType(Enum): + """ + Defines the data types supported by the grammar_generator. + + Attributes: + STRING (str): Represents a string data type. + BOOLEAN (str): Represents a boolean data type. + INTEGER (str): Represents an integer data type. + FLOAT (str): Represents a float data type. + OBJECT (str): Represents an object data type. + ARRAY (str): Represents an array data type. + ENUM (str): Represents an enum data type. + CUSTOM_CLASS (str): Represents a custom class data type. + """ + STRING = "string" + TRIPLE_QUOTED_STRING = "triple_quoted_string" + MARKDOWN_STRING = "markdown_string" + BOOLEAN = "boolean" + INTEGER = "integer" + FLOAT = "float" + OBJECT = "object" + ARRAY = "array" + ENUM = "enum" + ANY = "any" + NULL = "null" + CUSTOM_CLASS = "custom-class" + CUSTOM_DICT = "custom-dict" + SET = "set" + + +def map_pydantic_type_to_gbnf(pydantic_type: Type[Any]) -> str: + if isclass(pydantic_type) and issubclass(pydantic_type, str): + return PydanticDataType.STRING.value + elif isclass(pydantic_type) and issubclass(pydantic_type, bool): + return PydanticDataType.BOOLEAN.value + elif isclass(pydantic_type) and issubclass(pydantic_type, int): + return PydanticDataType.INTEGER.value + elif isclass(pydantic_type) and issubclass(pydantic_type, float): + return PydanticDataType.FLOAT.value + elif isclass(pydantic_type) and issubclass(pydantic_type, Enum): + return PydanticDataType.ENUM.value + + elif isclass(pydantic_type) and issubclass(pydantic_type, BaseModel): + return format_model_and_field_name(pydantic_type.__name__) + elif get_origin(pydantic_type) == list: + element_type = get_args(pydantic_type)[0] + return f"{map_pydantic_type_to_gbnf(element_type)}-list" + elif get_origin(pydantic_type) == set: + element_type = get_args(pydantic_type)[0] + return f"{map_pydantic_type_to_gbnf(element_type)}-set" + elif get_origin(pydantic_type) == Union: + union_types = get_args(pydantic_type) + union_rules = [map_pydantic_type_to_gbnf(ut) for ut in union_types] + return f"union-{'-or-'.join(union_rules)}" + elif get_origin(pydantic_type) == Optional: + element_type = get_args(pydantic_type)[0] + return f"optional-{map_pydantic_type_to_gbnf(element_type)}" + elif isclass(pydantic_type): + return f"{PydanticDataType.CUSTOM_CLASS.value}-{format_model_and_field_name(pydantic_type.__name__)}" + elif get_origin(pydantic_type) == dict: + key_type, value_type = get_args(pydantic_type) + return f"custom-dict-key-type-{format_model_and_field_name(map_pydantic_type_to_gbnf(key_type))}-value-type-{format_model_and_field_name(map_pydantic_type_to_gbnf(value_type))}" + else: + return "unknown" + + +def format_model_and_field_name(model_name: str) -> str: + parts = re.findall('[A-Z][^A-Z]*', model_name) + if not parts: # Check if the list is empty + return model_name.lower().replace("_", "-") + return '-'.join(part.lower().replace("_", "-") for part in parts) + + +def generate_list_rule(element_type): + """ + Generate a GBNF rule for a list of a given element type. + + :param element_type: The type of the elements in the list (e.g., 'string'). + :return: A string representing the GBNF rule for a list of the given type. + """ + rule_name = f"{map_pydantic_type_to_gbnf(element_type)}-list" + element_rule = map_pydantic_type_to_gbnf(element_type) + list_rule = fr'{rule_name} ::= "[" {element_rule} ("," {element_rule})* "]"' + return list_rule + + +def get_members_structure(cls, rule_name): + if issubclass(cls, Enum): + # Handle Enum types + members = [f'\"\\\"{member.value}\\\"\"' for name, member in cls.__members__.items()] + return f"{cls.__name__.lower()} ::= " + " | ".join(members) + if cls.__annotations__ and cls.__annotations__ != {}: + result = f'{rule_name} ::= "{{"' + type_list_rules = [] + # Modify this comprehension + members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param_type)}' + for name, param_type in cls.__annotations__.items() + if name != 'self'] + + result += '"," '.join(members) + result += ' "}"' + return result, type_list_rules + elif rule_name == "custom-class-any": + result = f'{rule_name} ::= ' + result += 'value' + type_list_rules = [] + return result, type_list_rules + else: + init_signature = inspect.signature(cls.__init__) + parameters = init_signature.parameters + result = f'{rule_name} ::= "{{"' + type_list_rules = [] + # Modify this comprehension too + members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param.annotation)}' + for name, param in parameters.items() + if name != 'self' and param.annotation != inspect.Parameter.empty] + + result += '", "'.join(members) + result += ' "}"' + return result, type_list_rules + + +def regex_to_gbnf(regex_pattern: str) -> str: + """ + Translate a basic regex pattern to a GBNF rule. + Note: This function handles only a subset of simple regex patterns. + """ + gbnf_rule = regex_pattern + + # Translate common regex components to GBNF + gbnf_rule = gbnf_rule.replace('\\d', '[0-9]') + gbnf_rule = gbnf_rule.replace('\\s', '[ \t\n]') + + # Handle quantifiers and other regex syntax that is similar in GBNF + # (e.g., '*', '+', '?', character classes) + + return gbnf_rule + + +def generate_gbnf_integer_rules(max_digit=None, min_digit=None): + """ + + Generate GBNF Integer Rules + + Generates GBNF (Generalized Backus-Naur Form) rules for integers based on the given maximum and minimum digits. + + Parameters: + max_digit (int): The maximum number of digits for the integer. Default is None. + min_digit (int): The minimum number of digits for the integer. Default is None. + + Returns: + integer_rule (str): The identifier for the integer rule generated. + additional_rules (list): A list of additional rules generated based on the given maximum and minimum digits. + + """ + additional_rules = [] + + # Define the rule identifier based on max_digit and min_digit + integer_rule = "integer-part" + if max_digit is not None: + integer_rule += f"-max{max_digit}" + if min_digit is not None: + integer_rule += f"-min{min_digit}" + + # Handling Integer Rules + if max_digit is not None or min_digit is not None: + # Start with an empty rule part + integer_rule_part = '' + + # Add mandatory digits as per min_digit + if min_digit is not None: + integer_rule_part += '[0-9] ' * min_digit + + # Add optional digits up to max_digit + if max_digit is not None: + optional_digits = max_digit - (min_digit if min_digit is not None else 0) + integer_rule_part += ''.join(['[0-9]? ' for _ in range(optional_digits)]) + + # Trim the rule part and append it to additional rules + integer_rule_part = integer_rule_part.strip() + if integer_rule_part: + additional_rules.append(f'{integer_rule} ::= {integer_rule_part}') + + return integer_rule, additional_rules + + +def generate_gbnf_float_rules(max_digit=None, min_digit=None, max_precision=None, min_precision=None): + """ + Generate GBNF float rules based on the given constraints. + + :param max_digit: Maximum number of digits in the integer part (default: None) + :param min_digit: Minimum number of digits in the integer part (default: None) + :param max_precision: Maximum number of digits in the fractional part (default: None) + :param min_precision: Minimum number of digits in the fractional part (default: None) + :return: A tuple containing the float rule and additional rules as a list + + Example Usage: + max_digit = 3 + min_digit = 1 + max_precision = 2 + min_precision = 1 + generate_gbnf_float_rules(max_digit, min_digit, max_precision, min_precision) + + Output: + ('float-3-1-2-1', ['integer-part-max3-min1 ::= [0-9] [0-9] [0-9]?', 'fractional-part-max2-min1 ::= [0-9] [0-9]?', 'float-3-1-2-1 ::= integer-part-max3-min1 "." fractional-part-max2-min + *1']) + + Note: + GBNF stands for Generalized Backus-Naur Form, which is a notation technique to specify the syntax of programming languages or other formal grammars. + """ + additional_rules = [] + + # Define the integer part rule + integer_part_rule = "integer-part" + (f"-max{max_digit}" if max_digit is not None else "") + ( + f"-min{min_digit}" if min_digit is not None else "") + + # Define the fractional part rule based on precision constraints + fractional_part_rule = "fractional-part" + fractional_rule_part = '' + if max_precision is not None or min_precision is not None: + fractional_part_rule += (f"-max{max_precision}" if max_precision is not None else "") + ( + f"-min{min_precision}" if min_precision is not None else "") + # Minimum number of digits + fractional_rule_part = '[0-9]' * (min_precision if min_precision is not None else 1) + # Optional additional digits + fractional_rule_part += ''.join([' [0-9]?'] * ( + (max_precision - (min_precision if min_precision is not None else 1)) if max_precision is not None else 0)) + additional_rules.append(f'{fractional_part_rule} ::= {fractional_rule_part}') + + # Define the float rule + float_rule = f"float-{max_digit if max_digit is not None else 'X'}-{min_digit if min_digit is not None else 'X'}-{max_precision if max_precision is not None else 'X'}-{min_precision if min_precision is not None else 'X'}" + additional_rules.append(f'{float_rule} ::= {integer_part_rule} "." {fractional_part_rule}') + + # Generating the integer part rule definition, if necessary + if max_digit is not None or min_digit is not None: + integer_rule_part = '[0-9]' + if min_digit is not None and min_digit > 1: + integer_rule_part += ' [0-9]' * (min_digit - 1) + if max_digit is not None: + integer_rule_part += ''.join([' [0-9]?'] * (max_digit - (min_digit if min_digit is not None else 1))) + additional_rules.append(f'{integer_part_rule} ::= {integer_rule_part.strip()}') + + return float_rule, additional_rules + + +def generate_gbnf_rule_for_type(model_name, field_name, + field_type, is_optional, processed_models, created_rules, + field_info=None) -> \ + Tuple[str, list]: + """ + Generate GBNF rule for a given field type. + + :param model_name: Name of the model. + + :param field_name: Name of the field. + :param field_type: Type of the field. + :param is_optional: Whether the field is optional. + :param processed_models: List of processed models. + :param created_rules: List of created rules. + :param field_info: Additional information about the field (optional). + + :return: Tuple containing the GBNF type and a list of additional rules. + :rtype: Tuple[str, list] + """ + rules = [] + + field_name = format_model_and_field_name(field_name) + gbnf_type = map_pydantic_type_to_gbnf(field_type) + + if isclass(field_type) and issubclass(field_type, BaseModel): + nested_model_name = format_model_and_field_name(field_type.__name__) + nested_model_rules = generate_gbnf_grammar(field_type, processed_models, created_rules) + rules.extend(nested_model_rules) + gbnf_type, rules = nested_model_name, rules + elif isclass(field_type) and issubclass(field_type, Enum): + enum_values = [f'\"\\\"{e.value}\\\"\"' for e in field_type] # Adding escaped quotes + enum_rule = f"{model_name}-{field_name} ::= {' | '.join(enum_values)}" + rules.append(enum_rule) + gbnf_type, rules = model_name + "-" + field_name, rules + elif get_origin(field_type) == list or field_type == list: # Array + element_type = get_args(field_type)[0] + element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-element", + element_type, is_optional, processed_models, + created_rules) + rules.extend(additional_rules) + array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ + rules.append(array_rule) + gbnf_type, rules = model_name + "-" + field_name, rules + + elif get_origin(field_type) == set or field_type == set: # Array + element_type = get_args(field_type)[0] + element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-element", + element_type, is_optional, processed_models, + created_rules) + rules.extend(additional_rules) + array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ + rules.append(array_rule) + gbnf_type, rules = model_name + "-" + field_name, rules + + elif gbnf_type.startswith("custom-class-"): + nested_model_rules, field_types = get_members_structure(field_type, gbnf_type) + rules.append(nested_model_rules) + elif gbnf_type.startswith("custom-dict-"): + key_type, value_type = get_args(field_type) + + additional_key_type, additional_key_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-key-type", + key_type, is_optional, processed_models, + created_rules) + additional_value_type, additional_value_rules = generate_gbnf_rule_for_type(model_name, + f"{field_name}-value-type", + value_type, is_optional, + processed_models, created_rules) + gbnf_type = fr'{gbnf_type} ::= "{{" ( {additional_key_type} ":" {additional_value_type} ("," {additional_key_type} ":" {additional_value_type})* )? "}}" ' + + rules.extend(additional_key_rules) + rules.extend(additional_value_rules) + elif gbnf_type.startswith("union-"): + union_types = get_args(field_type) + union_rules = [] + + for union_type in union_types: + if isinstance(union_type, _GenericAlias): + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, + field_name, union_type, + False, + processed_models, created_rules) + union_rules.append(union_gbnf_type) + rules.extend(union_rules_list) + + + elif not issubclass(union_type, NoneType): + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, + field_name, union_type, + False, + processed_models, created_rules) + union_rules.append(union_gbnf_type) + rules.extend(union_rules_list) + + # Defining the union grammar rule separately + if len(union_rules) == 1: + union_grammar_rule = f"{model_name}-{field_name}-optional ::= {' | '.join(union_rules)} | null" + else: + union_grammar_rule = f"{model_name}-{field_name}-union ::= {' | '.join(union_rules)}" + rules.append(union_grammar_rule) + if len(union_rules) == 1: + gbnf_type = f"{model_name}-{field_name}-optional" + else: + gbnf_type = f"{model_name}-{field_name}-union" + elif isclass(field_type) and issubclass(field_type, str): + if field_info and hasattr(field_info, 'json_schema_extra') and field_info.json_schema_extra is not None: + + triple_quoted_string = field_info.json_schema_extra.get('triple_quoted_string', False) + markdown_string = field_info.json_schema_extra.get('markdown_string', False) + + gbnf_type = PydanticDataType.TRIPLE_QUOTED_STRING.value if triple_quoted_string else PydanticDataType.STRING.value + gbnf_type = PydanticDataType.MARKDOWN_STRING.value if markdown_string else gbnf_type + + elif field_info and hasattr(field_info, 'pattern'): + # Convert regex pattern to grammar rule + regex_pattern = field_info.regex.pattern + gbnf_type = f"pattern-{field_name} ::= {regex_to_gbnf(regex_pattern)}" + else: + gbnf_type = PydanticDataType.STRING.value + + elif isclass(field_type) and issubclass(field_type, float) and field_info and hasattr(field_info, + 'json_schema_extra') and field_info.json_schema_extra is not None: + # Retrieve precision attributes for floats + max_precision = field_info.json_schema_extra.get('max_precision') if field_info and hasattr(field_info, + 'json_schema_extra') else None + min_precision = field_info.json_schema_extra.get('min_precision') if field_info and hasattr(field_info, + 'json_schema_extra') else None + max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + + # Generate GBNF rule for float with given attributes + gbnf_type, rules = generate_gbnf_float_rules(max_digit=max_digits, min_digit=min_digits, + max_precision=max_precision, + min_precision=min_precision) + + elif isclass(field_type) and issubclass(field_type, int) and field_info and hasattr(field_info, + 'json_schema_extra') and field_info.json_schema_extra is not None: + # Retrieve digit attributes for integers + max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, + 'json_schema_extra') else None + + # Generate GBNF rule for integer with given attributes + gbnf_type, rules = generate_gbnf_integer_rules(max_digit=max_digits, min_digit=min_digits) + else: + gbnf_type, rules = gbnf_type, [] + + if gbnf_type not in created_rules: + return gbnf_type, rules + else: + if gbnf_type in created_rules: + return gbnf_type, rules + + +def generate_gbnf_grammar(model: Type[BaseModel], processed_models: set, created_rules: dict) -> (list, bool, bool): + """ + + Generate GBnF Grammar + + Generates a GBnF grammar for a given model. + + :param model: A Pydantic model class to generate the grammar for. Must be a subclass of BaseModel. + :param processed_models: A set of already processed models to prevent infinite recursion. + :param created_rules: A dict containing already created rules to prevent duplicates. + :return: A list of GBnF grammar rules in string format. And two booleans indicating if an extra markdown or triple quoted string is in the grammar. + Example Usage: + ``` + model = MyModel + processed_models = set() + created_rules = dict() + + gbnf_grammar = generate_gbnf_grammar(model, processed_models, created_rules) + ``` + """ + if model in processed_models: + return [] + + processed_models.add(model) + model_name = format_model_and_field_name(model.__name__) + + if not issubclass(model, BaseModel): + # For non-Pydantic classes, generate model_fields from __annotations__ or __init__ + if hasattr(model, '__annotations__') and model.__annotations__: + model_fields = {name: (typ, ...) for name, typ in model.__annotations__.items()} + else: + init_signature = inspect.signature(model.__init__) + parameters = init_signature.parameters + model_fields = {name: (param.annotation, param.default) for name, param in parameters.items() + if name != 'self'} + else: + # For Pydantic models, use model_fields and check for ellipsis (required fields) + model_fields = model.__annotations__ + + model_rule_parts = [] + nested_rules = [] + has_markdown_code_block = False + has_triple_quoted_string = False + look_for_markdown_code_block = False + look_for_triple_quoted_string = False + for field_name, field_info in model_fields.items(): + if not issubclass(model, BaseModel): + field_type, default_value = field_info + # Check if the field is optional (not required) + is_optional = (default_value is not inspect.Parameter.empty) and (default_value is not Ellipsis) + else: + field_type = field_info + field_info = model.model_fields[field_name] + is_optional = field_info.is_required is False and get_origin(field_type) is Optional + rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, + format_model_and_field_name(field_name), + field_type, is_optional, + processed_models, created_rules, field_info) + look_for_markdown_code_block = True if rule_name == "markdown_string" else False + look_for_triple_quoted_string = True if rule_name == "triple_quoted_string" else False + if not look_for_markdown_code_block and not look_for_triple_quoted_string: + if rule_name not in created_rules: + created_rules[rule_name] = additional_rules + model_rule_parts.append(f' ws \"\\\"{field_name}\\\"\" ": " {rule_name}') # Adding escaped quotes + nested_rules.extend(additional_rules) + else: + has_triple_quoted_string = look_for_markdown_code_block + has_markdown_code_block = look_for_triple_quoted_string + + fields_joined = r' "," "\n" '.join(model_rule_parts) + model_rule = fr'{model_name} ::= "{{" "\n" {fields_joined} "\n" ws "}}"' + + if look_for_markdown_code_block or look_for_triple_quoted_string: + model_rule += ' ws "}"' + + if has_triple_quoted_string: + model_rule += '"\\n" triple-quoted-string' + if has_markdown_code_block: + model_rule += '"\\n" markdown-code-block' + all_rules = [model_rule] + nested_rules + + return all_rules, has_markdown_code_block, has_triple_quoted_string + + +def generate_gbnf_grammar_from_pydantic_models(models: List[Type[BaseModel]], outer_object_name: str = None, + outer_object_content: str = None, list_of_outputs: bool = False) -> str: + """ + Generate GBNF Grammar from Pydantic Models. + + This method takes a list of Pydantic models and uses them to generate a GBNF grammar string. The generated grammar string can be used for parsing and validating data using the generated + * grammar. + + Parameters: + models (List[Type[BaseModel]]): A list of Pydantic models to generate the grammar from. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + list_of_outputs (str, optional): Allows a list of output objects + Returns: + str: The generated GBNF grammar string. + + Examples: + models = [UserModel, PostModel] + grammar = generate_gbnf_grammar_from_pydantic(models) + print(grammar) + # Output: + # root ::= UserModel | PostModel + # ... + """ + processed_models = set() + all_rules = [] + created_rules = {} + if outer_object_name is None: + + for model in models: + model_rules, _, _ = generate_gbnf_grammar(model, + processed_models, created_rules) + all_rules.extend(model_rules) + + if list_of_outputs: + root_rule = r'root ::= ws "[" grammar-models ("," grammar-models)* "]"' + "\n" + else: + root_rule = r'root ::= ws grammar-models' + "\n" + root_rule += "grammar-models ::= " + " | ".join( + [format_model_and_field_name(model.__name__) for model in models]) + all_rules.insert(0, root_rule) + return "\n".join(all_rules) + elif outer_object_name is not None: + if list_of_outputs: + root_rule = fr'root ::= ws "[" {format_model_and_field_name(outer_object_name)} ("," {format_model_and_field_name(outer_object_name)})* "]"' + "\n" + else: + root_rule = f"root ::= {format_model_and_field_name(outer_object_name)}\n" + + model_rule = fr'{format_model_and_field_name(outer_object_name)} ::= ws "{{" ws "\"{outer_object_name}\"" ": " grammar-models' + + fields_joined = " | ".join( + [fr'{format_model_and_field_name(model.__name__)}-grammar-model' for model in models]) + + grammar_model_rules = f'\ngrammar-models ::= {fields_joined}' + mod_rules = [] + for model in models: + mod_rule = fr'{format_model_and_field_name(model.__name__)}-grammar-model ::= ws' + mod_rule += fr'"\"{format_model_and_field_name(model.__name__)}\"" "," ws "\"{outer_object_content}\"" ws ":" ws {format_model_and_field_name(model.__name__)}' + '\n' + mod_rules.append(mod_rule) + grammar_model_rules += "\n" + "\n".join(mod_rules) + look_for_markdown_code_block = False + look_for_triple_quoted_string = False + for model in models: + model_rules, markdown_block, triple_quoted_string = generate_gbnf_grammar(model, + processed_models, created_rules) + all_rules.extend(model_rules) + if markdown_block: + look_for_markdown_code_block = True + + if triple_quoted_string: + look_for_triple_quoted_string = True + + if not look_for_markdown_code_block and not look_for_triple_quoted_string: + model_rule += ' ws "}"' + all_rules.insert(0, root_rule + model_rule + grammar_model_rules) + return "\n".join(all_rules) + + +def get_primitive_grammar(grammar): + """ + Returns the needed GBNF primitive grammar for a given GBNF grammar string. + + Args: + grammar (str): The string containing the GBNF grammar. + + Returns: + str: GBNF primitive grammar string. + """ + type_list = [] + if "string-list" in grammar: + type_list.append(str) + if "boolean-list" in grammar: + type_list.append(bool) + if "integer-list" in grammar: + type_list.append(int) + if "float-list" in grammar: + type_list.append(float) + additional_grammar = [generate_list_rule(t) for t in type_list] + primitive_grammar = r""" +boolean ::= "true" | "false" +null ::= "null" +string ::= "\"" ( + [^"\\] | + "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) + )* "\"" ws +ws ::= ([ \t\n] ws)? +float ::= ("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? ws + +integer ::= [0-9]+""" + + any_block = "" + if "custom-class-any" in grammar: + any_block = ''' +value ::= object | array | string | number | boolean | null + +object ::= + "{" ws ( + string ":" ws value + ("," ws string ":" ws value)* + )? "}" ws + +array ::= + "[" ws ( + value + ("," ws value)* + )? "]" ws + +number ::= integer | float''' + + markdown_code_block_grammar = "" + if "markdown-code-block" in grammar: + markdown_code_block_grammar = r''' +markdown-code-block ::= opening-triple-ticks markdown-code-block-content closing-triple-ticks +markdown-code-block-content ::= ( [^`] | "`" [^`] | "`" "`" [^`] )* +opening-triple-ticks ::= "```" "python" "\n" | "```" "c" "\n" | "```" "cpp" "\n" | "```" "txt" "\n" | "```" "text" "\n" | "```" "json" "\n" | "```" "javascript" "\n" | "```" "css" "\n" | "```" "html" "\n" | "```" "markdown" "\n" +closing-triple-ticks ::= "```" "\n"''' + + if "triple-quoted-string" in grammar: + markdown_code_block_grammar = r""" +triple-quoted-string ::= triple-quotes triple-quoted-string-content triple-quotes +triple-quoted-string-content ::= ( [^'] | "'" [^'] | "'" "'" [^'] )* +triple-quotes ::= "'''" """ + return "\n" + '\n'.join(additional_grammar) + any_block + primitive_grammar + markdown_code_block_grammar + + +def generate_field_markdown(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1) -> str: + indent = ' ' * depth + field_markdown = f"{indent}- **{field_name}** (`{field_type.__name__}`): " + + # Extracting field description from Pydantic Field using __model_fields__ + field_info = model.model_fields.get(field_name) + field_description = field_info.description if field_info and field_info.description else "No description available." + + field_markdown += field_description + '\n' + + # Handling nested BaseModel fields + if isclass(field_type) and issubclass(field_type, BaseModel): + field_markdown += f"{indent} - Details:\n" + for name, type_ in field_type.__annotations__.items(): + field_markdown += generate_field_markdown(name, type_, field_type, depth + 2) + + return field_markdown + + +def generate_markdown_report(pydantic_models: List[Type[BaseModel]]) -> str: + markdown = "" + for model in pydantic_models: + markdown += f"### {format_model_and_field_name(model.__name__)}\n" + + # Check if the model's docstring is different from BaseModel's docstring + class_doc = getdoc(model) + base_class_doc = getdoc(BaseModel) + class_description = class_doc if class_doc and class_doc != base_class_doc else "No specific description available." + + markdown += f"{class_description}\n\n" + markdown += "#### Fields\n" + + if isclass(model) and issubclass(model, BaseModel): + for name, field_type in model.__annotations__.items(): + markdown += generate_field_markdown(format_model_and_field_name(name), field_type, model) + markdown += "\n" + + return markdown + + +def format_json_example(example: dict, depth: int) -> str: + """ + Format a JSON example into a readable string with indentation. + + Args: + example (dict): JSON example to be formatted. + depth (int): Indentation depth. + + Returns: + str: Formatted JSON example string. + """ + indent = ' ' * depth + formatted_example = '{\n' + for key, value in example.items(): + value_text = f"'{value}'" if isinstance(value, str) else value + formatted_example += f"{indent}{key}: {value_text},\n" + formatted_example = formatted_example.rstrip(',\n') + '\n' + indent + '}' + return formatted_example + + +def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_prefix="Model", + fields_prefix="Fields", documentation_with_field_description=True) -> str: + """ + Generate text documentation for a list of Pydantic models. + + Args: + pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. + model_prefix (str): Prefix for the model section. + fields_prefix (str): Prefix for the fields section. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation. + """ + documentation = "" + pyd_models = [(model, True) for model in pydantic_models] + for model, add_prefix in pyd_models: + if add_prefix: + documentation += f"{model_prefix}: {format_model_and_field_name(model.__name__)}\n" + else: + documentation += f"Model: {format_model_and_field_name(model.__name__)}\n" + + # Handling multi-line model description with proper indentation + + class_doc = getdoc(model) + base_class_doc = getdoc(BaseModel) + class_description = class_doc if class_doc and class_doc != base_class_doc else "" + if class_description != "": + documentation += " Description: " + documentation += "\n" + format_multiline_description(class_description, 2) + "\n" + + if add_prefix: + # Indenting the fields section + documentation += f" {fields_prefix}:\n" + else: + documentation += f" Fields:\n" + if isclass(model) and issubclass(model, BaseModel): + for name, field_type in model.__annotations__.items(): + # if name == "markdown_code_block": + # continue + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + if get_origin(field_type) == Union: + element_types = get_args(field_type) + for element_type in element_types: + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + documentation += generate_field_text(name, field_type, model, + documentation_with_field_description=documentation_with_field_description) + documentation += "\n" + + if hasattr(model, 'Config') and hasattr(model.Config, + 'json_schema_extra') and 'example' in model.Config.json_schema_extra: + documentation += f" Expected Example Output for {format_model_and_field_name(model.__name__)}:\n" + json_example = json.dumps(model.Config.json_schema_extra['example']) + documentation += format_multiline_description(json_example, 2) + "\n" + + return documentation + + +def generate_field_text(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, + documentation_with_field_description=True) -> str: + """ + Generate text documentation for a Pydantic model field. + + Args: + field_name (str): Name of the field. + field_type (Type[Any]): Type of the field. + model (Type[BaseModel]): Pydantic model class. + depth (int): Indentation depth in the documentation. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation for the field. + """ + indent = ' ' * depth + + field_info = model.model_fields.get(field_name) + field_description = field_info.description if field_info and field_info.description else "" + + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)} of {format_model_and_field_name(element_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + elif get_origin(field_type) == Union: + element_types = get_args(field_type) + types = [] + for element_type in element_types: + types.append(format_model_and_field_name(element_type.__name__)) + field_text = f"{indent}{field_name} ({' or '.join(types)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + else: + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + + if not documentation_with_field_description: + return field_text + + if field_description != "": + field_text += f"{indent} Description: " + field_description + "\n" + + # Check for and include field-specific examples if available + if hasattr(model, 'Config') and hasattr(model.Config, + 'json_schema_extra') and 'example' in model.Config.json_schema_extra: + field_example = model.Config.json_schema_extra['example'].get(field_name) + if field_example is not None: + example_text = f"'{field_example}'" if isinstance(field_example, str) else field_example + field_text += f"{indent} Example: {example_text}\n" + + if isclass(field_type) and issubclass(field_type, BaseModel): + field_text += f"{indent} Details:\n" + for name, type_ in field_type.__annotations__.items(): + field_text += generate_field_text(name, type_, field_type, depth + 2) + + return field_text + + +def format_multiline_description(description: str, indent_level: int) -> str: + """ + Format a multiline description with proper indentation. + + Args: + description (str): Multiline description. + indent_level (int): Indentation level. + + Returns: + str: Formatted multiline description. + """ + indent = ' ' * indent_level + return indent + description.replace('\n', '\n' + indent) + + +def save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path="./grammar.gbnf", + documentation_file_path="./grammar_documentation.md"): + """ + Save GBNF grammar and documentation to specified files. + + Args: + grammar (str): GBNF grammar string. + documentation (str): Documentation string. + grammar_file_path (str): File path to save the GBNF grammar. + documentation_file_path (str): File path to save the documentation. + + Returns: + None + """ + try: + with open(grammar_file_path, 'w') as file: + file.write(grammar + get_primitive_grammar(grammar)) + print(f"Grammar successfully saved to {grammar_file_path}") + except IOError as e: + print(f"An error occurred while saving the grammar file: {e}") + + try: + with open(documentation_file_path, 'w') as file: + file.write(documentation) + print(f"Documentation successfully saved to {documentation_file_path}") + except IOError as e: + print(f"An error occurred while saving the documentation file: {e}") + + +def remove_empty_lines(string): + """ + Remove empty lines from a string. + + Args: + string (str): Input string. + + Returns: + str: String with empty lines removed. + """ + lines = string.splitlines() + non_empty_lines = [line for line in lines if line.strip() != ""] + string_no_empty_lines = "\n".join(non_empty_lines) + return string_no_empty_lines + + +def generate_and_save_gbnf_grammar_and_documentation(pydantic_model_list, + grammar_file_path="./generated_grammar.gbnf", + documentation_file_path="./generated_grammar_documentation.md", + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True): + """ + Generate GBNF grammar and documentation, and save them to specified files. + + Args: + pydantic_model_list: List of Pydantic model classes. + grammar_file_path (str): File path to save the generated GBNF grammar. + documentation_file_path (str): File path to save the generated documentation. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + None + """ + documentation = generate_text_documentation(pydantic_model_list, model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, + outer_object_content, list_of_outputs) + grammar = remove_empty_lines(grammar) + save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path, documentation_file_path) + + +def generate_gbnf_grammar_and_documentation(pydantic_model_list, outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", list_of_outputs: bool = False, + documentation_with_field_description=True): + """ + Generate GBNF grammar and documentation for a list of Pydantic models. + + Args: + pydantic_model_list: List of Pydantic model classes. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + tuple: GBNF grammar string, documentation string. + """ + documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, + outer_object_content, list_of_outputs) + grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) + return grammar, documentation + + +def generate_gbnf_grammar_and_documentation_from_dictionaries(dictionaries: List[dict], + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True): + """ + Generate GBNF grammar and documentation from a list of dictionaries. + + Args: + dictionaries (List[dict]): List of dictionaries representing Pydantic models. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + tuple: GBNF grammar string, documentation string. + """ + pydantic_model_list = create_dynamic_models_from_dictionaries(dictionaries) + documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, + outer_object_content, list_of_outputs) + grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) + return grammar, documentation + + +def create_dynamic_model_from_function(func: Callable): + """ + Creates a dynamic Pydantic model from a given function's type hints and adds the function as a 'run' method. + + Args: + func (Callable): A function with type hints from which to create the model. + + Returns: + A dynamic Pydantic model class with the provided function as a 'run' method. + """ + # Extracting type hints from the provided function + type_hints = get_type_hints(func) + type_hints.pop('return', None) + + # Handling default values and annotations + dynamic_fields = {} + defaults = getattr(func, '__defaults__', ()) or () + defaults_index = len(type_hints) - len(defaults) + + for index, (name, typ) in enumerate(type_hints.items()): + if index >= defaults_index: + default_value = defaults[index - defaults_index] + dynamic_fields[name] = (typ, default_value) + else: + dynamic_fields[name] = (typ, ...) + + # Creating the dynamic model + dynamicModel = create_model(f'{func.__name__}', **dynamic_fields) + + dynamicModel.__doc__ = getdoc(func) + + # Wrapping the original function to handle instance 'self' + def run_method_wrapper(self): + func_args = {name: getattr(self, name) for name in type_hints} + return func(**func_args) + + # Adding the wrapped function as a 'run' method + setattr(dynamicModel, 'run', run_method_wrapper) + + return dynamicModel + + +def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): + """ + Add a 'run' method to a dynamic Pydantic model, using the provided function. + + Args: + - model (Type[BaseModel]): Dynamic Pydantic model class. + - func (Callable): Function to be added as a 'run' method to the model. + + Returns: + - Type[BaseModel]: Pydantic model class with the added 'run' method. + """ + + def run_method_wrapper(self): + func_args = {name: getattr(self, name) for name in model.model_fields} + return func(**func_args) + + # Adding the wrapped function as a 'run' method + setattr(model, 'run', run_method_wrapper) + + return model + + +def create_dynamic_models_from_dictionaries(dictionaries: List[dict]): + """ + Create a list of dynamic Pydantic model classes from a list of dictionaries. + + Args: + - dictionaries (List[dict]): List of dictionaries representing model structures. + + Returns: + - List[Type[BaseModel]]: List of generated dynamic Pydantic model classes. + """ + dynamic_models = [] + for func in dictionaries: + model_name = format_model_and_field_name(func.get("name", "")) + dyn_model = convert_dictionary_to_to_pydantic_model(func, model_name) + dynamic_models.append(dyn_model) + return dynamic_models + + +def map_grammar_names_to_pydantic_model_class(pydantic_model_list): + output = {} + for model in pydantic_model_list: + output[format_model_and_field_name(model.__name__)] = model + + return output + + +from enum import Enum + + +def json_schema_to_python_types(schema): + type_map = { + 'any': Any, + 'string': str, + 'number': float, + 'integer': int, + 'boolean': bool, + 'array': list, + } + return type_map[schema] + + +def list_to_enum(enum_name, values): + return Enum(enum_name, {value: value for value in values}) + + +def convert_dictionary_to_to_pydantic_model(dictionary: dict, model_name: str = 'CustomModel') -> Type[BaseModel]: + """ + Convert a dictionary to a Pydantic model class. + + Args: + - dictionary (dict): Dictionary representing the model structure. + - model_name (str): Name of the generated Pydantic model. + + Returns: + - Type[BaseModel]: Generated Pydantic model class. + """ + fields = {} + + if "properties" in dictionary: + for field_name, field_data in dictionary.get("properties", {}).items(): + if field_data == 'object': + submodel = convert_dictionary_to_to_pydantic_model(dictionary, f'{model_name}_{field_name}') + fields[field_name] = (submodel, ...) + else: + field_type = field_data.get('type', 'str') + + if field_data.get("enum", []): + fields[field_name] = (list_to_enum(field_name, field_data.get("enum", [])), ...) + if field_type == "array": + items = field_data.get("items", {}) + if items != {}: + array = {"properties": items} + array_type = convert_dictionary_to_to_pydantic_model(array, f'{model_name}_{field_name}_items') + fields[field_name] = (List[array_type], ...) + else: + fields[field_name] = (list, ...) + elif field_type == 'object': + submodel = convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}_{field_name}') + fields[field_name] = (submodel, ...) + else: + field_type = json_schema_to_python_types(field_type) + fields[field_name] = (field_type, ...) + if "function" in dictionary: + + for field_name, field_data in dictionary.get("function", {}).items(): + if field_name == "name": + model_name = field_data + elif field_name == "description": + fields["__doc__"] = field_data + elif field_name == "parameters": + return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') + if "parameters" in dictionary: + field_data = {"function": dictionary} + return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') + + custom_model = create_model(model_name, **fields) + return custom_model + + + From fa5c1fb44a2724292da545d6b7cf2a1ac0e0b989 Mon Sep 17 00:00:00 2001 From: slaren Date: Fri, 12 Jan 2024 20:38:34 +0100 Subject: [PATCH 446/859] backend_sched : fix assignments ggml-ci --- ggml-backend.c | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/ggml-backend.c b/ggml-backend.c index 4c2d8b0b2..505dbba47 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1087,6 +1087,24 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g } } } + + // pass 2.4 expand rest down + { + ggml_tallocr_t cur_allocr = NULL; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + if (ggml_is_view_op(node->op)) { + continue; + } + ggml_tallocr_t node_allocr = node_allocr(node); + if (node_allocr != NULL) { + cur_allocr = node_allocr; + } else { + node_allocr(node) = cur_allocr; + SET_CAUSE(node, "2.4"); + } + } + } #ifdef DEBUG_PASS2 fprintf(stderr, "PASS 2 ASSIGNMENTS\n"); sched_print_assignments(sched, graph); #endif @@ -1146,6 +1164,8 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g ggml_tallocr_t node_allocr = node_allocr(node); + GGML_ASSERT(node_allocr != NULL); // all nodes should be assigned by now + if (node_allocr != cur_allocr) { sched->splits[cur_split].i_end = i; cur_split++; From f238461236f4e0e18cac1a554af23c7deadc9b01 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 14:02:30 +0200 Subject: [PATCH 447/859] ggml : fix 32-bit ARM compat for IQ2_XS (whisper/1758) * ggml : fix 32-bit ARM compat * ggml : fix fix * ggml : fix fix fix --- ggml-quants.c | 39 +++++++++++++++++++++++++++++++++++---- 1 file changed, 35 insertions(+), 4 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index a24b4b244..601d155d7 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -272,10 +272,13 @@ static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 // vaddvq_s16 // vpaddq_s16 +// vpaddq_s32 // vaddvq_s32 // vaddvq_f32 // vmaxvq_f32 // vcvtnq_s32_f32 +// vzip1_u8 +// vzip2_u8 inline static int32_t vaddvq_s16(int16x8_t v) { return @@ -291,6 +294,12 @@ inline static int16x8_t vpaddq_s16(int16x8_t a, int16x8_t b) { return vcombine_s16(a0, b0); } +inline static int32x4_t vpaddq_s32(int32x4_t a, int32x4_t b) { + int32x2_t a0 = vpadd_s32(vget_low_s32(a), vget_high_s32(a)); + int32x2_t b0 = vpadd_s32(vget_low_s32(b), vget_high_s32(b)); + return vcombine_s32(a0, b0); +} + inline static int32_t vaddvq_s32(int32x4_t v) { return vgetq_lane_s32(v, 0) + vgetq_lane_s32(v, 1) + vgetq_lane_s32(v, 2) + vgetq_lane_s32(v, 3); } @@ -316,6 +325,28 @@ inline static int32x4_t vcvtnq_s32_f32(float32x4_t v) { return res; } +inline static uint8x8_t vzip1_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[0]; res[1] = b[0]; + res[2] = a[1]; res[3] = b[1]; + res[4] = a[2]; res[5] = b[2]; + res[6] = a[3]; res[7] = b[3]; + + return res; +} + +inline static uint8x8_t vzip2_u8(uint8x8_t a, uint8x8_t b) { + uint8x8_t res; + + res[0] = a[4]; res[1] = b[4]; + res[2] = a[5]; res[3] = b[5]; + res[4] = a[6]; res[5] = b[6]; + res[6] = a[7]; res[7] = b[7]; + + return res; +} + // vld1q_s16_x2 // vld1q_u8_x2 // vld1q_u8_x4 @@ -7554,9 +7585,9 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest const uint64_t * signs64 = (const uint64_t *)keven_signs_q2xs; - int8x16x4_t q2u; - int8x16x4_t q2s; - int8x16x4_t q8b; + ggml_int8x16x4_t q2u; + ggml_int8x16x4_t q2s; + ggml_int8x16x4_t q8b; int32x4x4_t scales32; @@ -7578,7 +7609,7 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest scales32.val[3] = vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales2))); int32x4_t sumi = vdupq_n_s32(0); for (int ib64 = 0; ib64 < QK_K/64; ++ib64) { - q8b = vld1q_s8_x4(q8); q8 += 64; + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; q2u.val[0] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[0] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[1] & 511)))); q2u.val[1] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[2] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[3] & 511)))); q2u.val[2] = vcombine_s8(vld1_s8((const void *)(iq2xs_grid + (q2[4] & 511))), vld1_s8((const void *)(iq2xs_grid + (q2[5] & 511)))); From de473f5f8e19ba5e659cdf5af65fb9251dce16c5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 12 Jan 2024 22:02:43 +0200 Subject: [PATCH 448/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 3e2c579d5..edcdb530a 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -979cc23b345006504cfc1f67c0fdf627805e3319 +400c07f00508e6f60fb25405444b5669c365b0a9 From 15ebe59210e7fd9817ff67f51fa1a5ee2d004294 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 13:44:37 +0200 Subject: [PATCH 449/859] convert : update phi-2 to latest HF repo (#4903) * convert : update phi-2 to latest HF repo ggml-ci * py : try to fix flake stuff --- convert-hf-to-gguf.py | 39 +++++++++++++++++++++---------- gguf-py/gguf/constants.py | 3 +++ gguf-py/gguf/tensor_mapping.py | 2 ++ llama.cpp | 42 ++++++++++++++++++++++++++-------- 4 files changed, 65 insertions(+), 21 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index a1c79fd47..b133f3b49 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -23,6 +23,15 @@ if 'NO_LOCAL_GGUF' not in os.environ: import gguf +# check for any of the given keys in the dictionary and return the value of the first key found +def get_key_opts(d, keys): + for k in keys: + if k in d: + return d[k] + print(f"Could not find any of {keys}") + sys.exit() + + ###### MODEL DEFINITIONS ###### class SentencePieceTokenTypes(IntEnum): @@ -257,10 +266,11 @@ class Model: toktypes.append(gguf.TokenType.USER_DEFINED) elif reverse_vocab[i] in added_vocab: tokens.append(reverse_vocab[i]) - if tokenizer.added_tokens_decoder[i].special: - toktypes.append(gguf.TokenType.CONTROL) - else: - toktypes.append(gguf.TokenType.USER_DEFINED) + if hasattr(tokenizer, "added_tokens_decoder"): + if tokenizer.added_tokens_decoder[i].special: + toktypes.append(gguf.TokenType.CONTROL) + else: + toktypes.append(gguf.TokenType.USER_DEFINED) else: tokens.append(reverse_vocab[i]) toktypes.append(gguf.TokenType.NORMAL) @@ -1068,17 +1078,22 @@ class GPT2Model(Model): class Phi2Model(Model): def set_gguf_parameters(self): - block_count = self.hparams["n_layer"] + block_count = get_key_opts(self.hparams, ["num_hidden_layers", "n_layer"]) + + rot_pct = get_key_opts(self.hparams, ["partial_rotary_factor"]) + n_embd = get_key_opts(self.hparams, ["hidden_size", "n_embd"]) + n_head = get_key_opts(self.hparams, ["num_attention_heads", "n_head"]) self.gguf_writer.add_name("Phi2") - self.gguf_writer.add_context_length(self.hparams["n_positions"]) - self.gguf_writer.add_embedding_length(self.hparams["n_embd"]) - self.gguf_writer.add_feed_forward_length(4 * self.hparams["n_embd"]) + self.gguf_writer.add_context_length(get_key_opts(self.hparams, ["n_positions", "max_position_embeddings"])) + + self.gguf_writer.add_embedding_length(n_embd) + self.gguf_writer.add_feed_forward_length(4 * n_embd) self.gguf_writer.add_block_count(block_count) - self.gguf_writer.add_head_count(self.hparams["n_head"]) - self.gguf_writer.add_head_count_kv(self.hparams["n_head"]) - self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_epsilon"]) - self.gguf_writer.add_rope_dimension_count(self.hparams["rotary_dim"]) + self.gguf_writer.add_head_count(n_head) + self.gguf_writer.add_head_count_kv(n_head) + self.gguf_writer.add_layer_norm_eps(get_key_opts(self.hparams, ["layer_norm_epsilon", "layer_norm_eps"])) + self.gguf_writer.add_rope_dimension_count(int(rot_pct * n_embd) // n_head) self.gguf_writer.add_file_type(self.ftype) self.gguf_writer.add_add_bos_token(False) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index f0a1c51f8..972b4e9a7 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -389,6 +389,9 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.OUTPUT, MODEL_TENSOR.ATTN_NORM, MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, MODEL_TENSOR.ATTN_OUT, MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_DOWN, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 24a089037..e5b146106 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -191,6 +191,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.w1", # qwen "h.{bid}.mlp.c_fc", # gpt2 "transformer.h.{bid}.mlp.fc1", # phi2 + "model.layers.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo ), @@ -232,6 +233,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.dense_4h_to_h", # persimmon "h.{bid}.mlp.c_proj", # gpt2 "transformer.h.{bid}.mlp.fc2", # phi2 + "model.layers.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo ), diff --git a/llama.cpp b/llama.cpp index fe1d8947c..1d2eb569f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -574,6 +574,9 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_OUTPUT, "output" }, { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, @@ -3676,8 +3679,19 @@ static bool llm_load_tensors( layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); - layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); - layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}); + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}, false); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, false); + + if (layer.wqkv == nullptr) { + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + } layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); @@ -5637,15 +5651,25 @@ struct llm_build_context { // self-attention { - cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); - cb(cur, "wqkv", il); + struct ggml_tensor * Qcur = nullptr; + struct ggml_tensor * Kcur = nullptr; + struct ggml_tensor * Vcur = nullptr; - cur = ggml_add(ctx0, cur, model.layers[il].bqkv); - cb(cur, "bqkv", il); + if (model.layers[il].wqkv) { + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, attn_norm_output); + cb(cur, "wqkv", il); - struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); - struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); - struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + + Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + } else { + Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, attn_norm_output), model.layers[il].bq); + Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, attn_norm_output), model.layers[il].bk); + Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, attn_norm_output), model.layers[il].bv); + } cb(Qcur, "Qcur", il); cb(Kcur, "Kcur", il); From ee8243adaa9a9f51ff449213383874e49efe368f Mon Sep 17 00:00:00 2001 From: makomk Date: Sat, 13 Jan 2024 14:16:11 +0000 Subject: [PATCH 450/859] server : fix crash with multimodal models without BOS token (#4904) --- examples/server/server.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c1ab8f9dc..7b33aea1f 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1835,7 +1835,7 @@ struct llama_server_context slot.cache_tokens = prompt_tokens; - if (slot.n_past == slot.num_prompt_tokens) + if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) { // we have to evaluate at least 1 token to generate logits. LOG_TEE("slot %d : we have to evaluate at least 1 token to generate logits\n", slot.id); From 356327feb3f66980ab687040495d722696d98970 Mon Sep 17 00:00:00 2001 From: Ziad Ben Hadj-Alouane Date: Sat, 13 Jan 2024 09:20:46 -0500 Subject: [PATCH 451/859] server : fix deadlock that occurs in multi-prompt scenarios (#4905) * * fix deadlock * * dont ruint all whitespace --- examples/server/server.cpp | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 7b33aea1f..79eacf828 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1350,14 +1350,17 @@ struct llama_server_context res.result_json["model"] = slot.oaicompat_model; } + queue_results.push_back(res); + condition_results.notify_all(); + + // done with results, unlock + lock.unlock(); + // parent multitask, if any, needs to be updated if (slot.multitask_id != -1) { update_multi_task(slot.multitask_id, slot.task_id, res); } - - queue_results.push_back(res); - condition_results.notify_all(); } void send_embedding(llama_client_slot &slot) @@ -1603,6 +1606,7 @@ struct llama_server_context } // remove finished multitasks from the queue of multitasks, and add the corresponding result to the result queue + std::vector agg_results; auto queue_iterator = queue_multitasks.begin(); while (queue_iterator != queue_multitasks.end()) { @@ -1623,8 +1627,9 @@ struct llama_server_context } aggregate_result.result_json = json{ "results", result_jsons }; - std::lock_guard lock(mutex_results); - queue_results.push_back(aggregate_result); + + agg_results.push_back(aggregate_result); + condition_results.notify_all(); queue_iterator = queue_multitasks.erase(queue_iterator); @@ -1634,6 +1639,13 @@ struct llama_server_context ++queue_iterator; } } + + // done with tasks, unlock + lock.unlock(); + + // copy aggregate results of complete multi-tasks to the results queue + std::lock_guard lock_results(mutex_results); + queue_results.insert(queue_results.end(), agg_results.begin(), agg_results.end()); } bool update_slots() { From 7dc78764e2ff86512e6e31cb0fcb8087df4b4708 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 13 Jan 2024 15:52:53 +0100 Subject: [PATCH 452/859] compare-llama-bench: tweak output format (#4910) --- scripts/compare-llama-bench.py | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/scripts/compare-llama-bench.py b/scripts/compare-llama-bench.py index bc1714487..70737f976 100755 --- a/scripts/compare-llama-bench.py +++ b/scripts/compare-llama-bench.py @@ -10,15 +10,15 @@ import sqlite3 try: import git from tabulate import tabulate -except ImportError: +except ImportError as e: print("ERROR: the following Python libraries are required: GitPython, tabulate.") - sys.exit(1) + raise e # Properties by which to differentiate results per commit: KEY_PROPERTIES = [ - "cuda", "opencl", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", - "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", - "n_gpu_layers", "main_gpu", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" + "cpu_info", "gpu_info", "n_gpu_layers", "main_gpu", "cuda", "opencl", "metal", "gpu_blas", + "blas", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", + "type_k", "type_v", "no_kv_offload", "mul_mat_q", "tensor_split", "n_prompt", "n_gen" ] # Properties that are boolean and are converted to Yes/No for the table: @@ -37,6 +37,7 @@ PRETTY_NAMES = { DEFAULT_SHOW = ["model_type"] # Always show these properties by default. DEFAULT_HIDE = ["model_filename"] # Always hide these properties by default. GPU_NAME_STRIP = ["NVIDIA GeForce ", "Tesla ", "AMD Radeon "] # Strip prefixes for smaller tables. +MODEL_SUFFIX_REPLACE = {" - Small": "_S", " - Medium": "_M", " - Large": "_L"} DESCRIPTION = """Creates tables from llama-bench data written to an SQLite database. Example usage (Linux): @@ -308,8 +309,13 @@ else: if gpu_blas and "gpu_info" not in properties_different: show.append("gpu_info") - show += DEFAULT_SHOW show += properties_different + + index_default = 0 + for prop in ["cpu_info", "gpu_info", "n_gpu_layers", "main_gpu"]: + if prop in show: + index_default += 1 + show = show[:index_default] + DEFAULT_SHOW + show[index_default:] for prop in DEFAULT_HIDE: try: show.remove(prop) @@ -334,6 +340,12 @@ for bool_property in BOOL_PROPERTIES: for row_table in table: row_table[ip] = "Yes" if int(row_table[ip]) == 1 else "No" +if "model_type" in show: + ip = show.index("model_type") + for (old, new) in MODEL_SUFFIX_REPLACE.items(): + for row_table in table: + row_table[ip] = row_table[ip].replace(old, new) + if "model_size" in show: ip = show.index("model_size") for row_table in table: @@ -341,10 +353,16 @@ if "model_size" in show: if "gpu_info" in show: ip = show.index("gpu_info") - for gns in GPU_NAME_STRIP: - for row_table in table: + for row_table in table: + for gns in GPU_NAME_STRIP: row_table[ip] = row_table[ip].replace(gns, "") + gpu_names = row_table[ip].split("/") + num_gpus = len(gpu_names) + all_names_the_same = len(set(gpu_names)) == 1 + if len(gpu_names) >= 2 and all_names_the_same: + row_table[ip] = f"{num_gpus}x {gpu_names[0]}" + headers = [PRETTY_NAMES[p] for p in show] headers += ["Test", f"t/s {name_baseline}", f"t/s {name_compare}", "Speedup"] From b38b5e93ae31019e87f692b69d27124eae6aac02 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 18:03:45 +0200 Subject: [PATCH 453/859] metal : refactor kernel loading code (#4794) * metal : detect more GPU families * metal : refactor kernel loading * metal : set kernel family requirements * metal : fix kernel init + fix compile options * metal : take into account simdgroup reduction support * metal : print only skipped kernels * metal : fix check for simdgroup reduction support * metal : check for Metal 3 * metal : free allocations * metal : normalize encoder:setComputePipelineStatus calls ggml-ci * metal : fix Metal3 family check ggml-ci * metal : check for simdgroup matrix mul. feature ggml-ci --- ggml-metal.m | 1048 +++++++++++++++++++++++++------------------------- 1 file changed, 530 insertions(+), 518 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index c03624073..6c28a7ee3 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -26,6 +26,8 @@ #define GGML_MAX_CONCUR (2*GGML_DEFAULT_GRAPH_SIZE) +#define GGML_METAL_MAX_KERNELS 256 + struct ggml_metal_buffer { const char * name; @@ -35,6 +37,134 @@ struct ggml_metal_buffer { id metal; }; +struct ggml_metal_kernel { + id function; + id pipeline; +}; + +enum ggml_metal_kernel_type { + GGML_METAL_KERNEL_TYPE_ADD, + GGML_METAL_KERNEL_TYPE_ADD_ROW, + GGML_METAL_KERNEL_TYPE_MUL, + GGML_METAL_KERNEL_TYPE_MUL_ROW, + GGML_METAL_KERNEL_TYPE_DIV, + GGML_METAL_KERNEL_TYPE_DIV_ROW, + GGML_METAL_KERNEL_TYPE_SCALE, + GGML_METAL_KERNEL_TYPE_SCALE_4, + GGML_METAL_KERNEL_TYPE_TANH, + GGML_METAL_KERNEL_TYPE_RELU, + GGML_METAL_KERNEL_TYPE_GELU, + GGML_METAL_KERNEL_TYPE_GELU_QUICK, + GGML_METAL_KERNEL_TYPE_SILU, + GGML_METAL_KERNEL_TYPE_SOFT_MAX, + GGML_METAL_KERNEL_TYPE_SOFT_MAX_4, + GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, + GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, + GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, + GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, + GGML_METAL_KERNEL_TYPE_RMS_NORM, + GGML_METAL_KERNEL_TYPE_GROUP_NORM, + GGML_METAL_KERNEL_TYPE_NORM, + GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, + GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, + //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, + GGML_METAL_KERNEL_TYPE_ROPE_F32, + GGML_METAL_KERNEL_TYPE_ROPE_F16, + GGML_METAL_KERNEL_TYPE_ALIBI_F32, + GGML_METAL_KERNEL_TYPE_IM2COL_F16, + GGML_METAL_KERNEL_TYPE_UPSCALE_F32, + GGML_METAL_KERNEL_TYPE_PAD_F32, + GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, + GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, + GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, + GGML_METAL_KERNEL_TYPE_CPY_F32_F16, + GGML_METAL_KERNEL_TYPE_CPY_F32_F32, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, + GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, + //GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, + //GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, + GGML_METAL_KERNEL_TYPE_CPY_F16_F16, + GGML_METAL_KERNEL_TYPE_CPY_F16_F32, + GGML_METAL_KERNEL_TYPE_CONCAT, + GGML_METAL_KERNEL_TYPE_SQR, + GGML_METAL_KERNEL_TYPE_SUM_ROWS, + + GGML_METAL_KERNEL_TYPE_COUNT +}; + struct ggml_metal_context { int n_cb; @@ -50,134 +180,13 @@ struct ggml_metal_context { int n_buffers; struct ggml_metal_buffer buffers[GGML_METAL_MAX_BUFFERS]; + struct ggml_metal_kernel kernels[GGML_METAL_MAX_KERNELS]; + int concur_list[GGML_MAX_CONCUR]; int concur_list_len; - // custom kernels -#define GGML_METAL_DECL_KERNEL(name) \ - id function_##name; \ - id pipeline_##name - - GGML_METAL_DECL_KERNEL(add); - GGML_METAL_DECL_KERNEL(add_row); // TODO: avoid this extra kernel, instead extend the "add" kernel to support broadcast - GGML_METAL_DECL_KERNEL(mul); - GGML_METAL_DECL_KERNEL(mul_row); // TODO: avoid this extra kernel, instead extend the "mul" kernel to support broadcast - GGML_METAL_DECL_KERNEL(div); - GGML_METAL_DECL_KERNEL(div_row); - GGML_METAL_DECL_KERNEL(scale); - GGML_METAL_DECL_KERNEL(scale_4); - GGML_METAL_DECL_KERNEL(tanh); - GGML_METAL_DECL_KERNEL(relu); - GGML_METAL_DECL_KERNEL(gelu); - GGML_METAL_DECL_KERNEL(gelu_quick); - GGML_METAL_DECL_KERNEL(silu); - GGML_METAL_DECL_KERNEL(soft_max); - GGML_METAL_DECL_KERNEL(soft_max_4); - GGML_METAL_DECL_KERNEL(diag_mask_inf); - GGML_METAL_DECL_KERNEL(diag_mask_inf_8); - GGML_METAL_DECL_KERNEL(get_rows_f32); - GGML_METAL_DECL_KERNEL(get_rows_f16); - GGML_METAL_DECL_KERNEL(get_rows_q4_0); - GGML_METAL_DECL_KERNEL(get_rows_q4_1); - GGML_METAL_DECL_KERNEL(get_rows_q5_0); - GGML_METAL_DECL_KERNEL(get_rows_q5_1); - GGML_METAL_DECL_KERNEL(get_rows_q8_0); - GGML_METAL_DECL_KERNEL(get_rows_q2_K); - GGML_METAL_DECL_KERNEL(get_rows_q3_K); - GGML_METAL_DECL_KERNEL(get_rows_q4_K); - GGML_METAL_DECL_KERNEL(get_rows_q5_K); - GGML_METAL_DECL_KERNEL(get_rows_q6_K); - GGML_METAL_DECL_KERNEL(get_rows_i32); - GGML_METAL_DECL_KERNEL(get_rows_iq2_xxs); - GGML_METAL_DECL_KERNEL(get_rows_iq2_xs); - GGML_METAL_DECL_KERNEL(rms_norm); - GGML_METAL_DECL_KERNEL(group_norm); - GGML_METAL_DECL_KERNEL(norm); - GGML_METAL_DECL_KERNEL(mul_mv_f32_f32); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f16); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f32); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f32_1row); - GGML_METAL_DECL_KERNEL(mul_mv_f16_f32_l4); - GGML_METAL_DECL_KERNEL(mul_mv_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mv_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_f32_f32); - //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f16); - GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32); - //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32_1row); - //GGML_METAL_DECL_KERNEL(mul_mv_id_f16_f32_l4); - GGML_METAL_DECL_KERNEL(mul_mv_id_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mv_id_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_f32_f32); - GGML_METAL_DECL_KERNEL(mul_mm_f16_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_f32_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_f16_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q4_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q4_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q5_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q5_1_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q8_0_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q2_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q3_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q4_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q5_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_q6_K_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xxs_f32); - GGML_METAL_DECL_KERNEL(mul_mm_id_iq2_xs_f32); - GGML_METAL_DECL_KERNEL(rope_f32); - GGML_METAL_DECL_KERNEL(rope_f16); - GGML_METAL_DECL_KERNEL(alibi_f32); - GGML_METAL_DECL_KERNEL(im2col_f16); - GGML_METAL_DECL_KERNEL(upscale_f32); - GGML_METAL_DECL_KERNEL(pad_f32); - GGML_METAL_DECL_KERNEL(argsort_f32_i32_asc); - GGML_METAL_DECL_KERNEL(argsort_f32_i32_desc); - GGML_METAL_DECL_KERNEL(leaky_relu_f32); - GGML_METAL_DECL_KERNEL(cpy_f32_f16); - GGML_METAL_DECL_KERNEL(cpy_f32_f32); - GGML_METAL_DECL_KERNEL(cpy_f32_q8_0); - GGML_METAL_DECL_KERNEL(cpy_f32_q4_0); - GGML_METAL_DECL_KERNEL(cpy_f32_q4_1); - //GGML_METAL_DECL_KERNEL(cpy_f32_q5_0); - //GGML_METAL_DECL_KERNEL(cpy_f32_q5_1); - GGML_METAL_DECL_KERNEL(cpy_f16_f16); - GGML_METAL_DECL_KERNEL(cpy_f16_f32); - GGML_METAL_DECL_KERNEL(concat); - GGML_METAL_DECL_KERNEL(sqr); - GGML_METAL_DECL_KERNEL(sum_rows); - -#undef GGML_METAL_DECL_KERNEL + bool support_simdgroup_reduction; + bool support_simdgroup_mm; }; // MSL code @@ -298,19 +307,22 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { return NULL; } - MTLCompileOptions* options = nil; + // dictionary of preprocessor macros + NSMutableDictionary * prep = [NSMutableDictionary dictionary]; + #ifdef GGML_QKK_64 - options = [MTLCompileOptions new]; - options.preprocessorMacros = @{ @"QK_K" : @(64) }; + prep[@"QK_K"] = @(64); #endif - // try to disable fast-math - // NOTE: this seems to have no effect whatsoever - // instead, in order to disable fast-math, we have to build default.metallib from the command line - // using xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air - // and go through the "pre-compiled library found" path above + + MTLCompileOptions* options = [MTLCompileOptions new]; + options.preprocessorMacros = prep; + //[options setFastMathEnabled:false]; ctx->library = [ctx->device newLibraryWithSource:src options:options error:&error]; + + [options release]; + [prep release]; } if (error) { @@ -323,16 +335,41 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { // print MTL GPU family: GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); + const NSInteger MTLGPUFamilyMetal3 = 5001; + // determine max supported GPU family // https://developer.apple.com/metal/Metal-Shading-Language-Specification.pdf // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf - for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { - if ([ctx->device supportsFamily:i]) { - GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); - break; + { + for (int i = MTLGPUFamilyApple1 + 20; i >= MTLGPUFamilyApple1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyApple%d (%d)\n", __func__, i - (int) MTLGPUFamilyApple1 + 1, i); + break; + } + } + + for (int i = MTLGPUFamilyCommon1 + 5; i >= MTLGPUFamilyCommon1; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyCommon%d (%d)\n", __func__, i - (int) MTLGPUFamilyCommon1 + 1, i); + break; + } + } + + for (int i = MTLGPUFamilyMetal3 + 5; i >= MTLGPUFamilyMetal3; --i) { + if ([ctx->device supportsFamily:i]) { + GGML_METAL_LOG_INFO("%s: GPU family: MTLGPUFamilyMetal%d (%d)\n", __func__, i - (int) MTLGPUFamilyMetal3 + 3, i); + break; + } } } + ctx->support_simdgroup_reduction = [ctx->device supportsFamily:MTLGPUFamilyApple7]; + ctx->support_simdgroup_reduction |= [ctx->device supportsFamily:MTLGPUFamilyMetal3]; + + ctx->support_simdgroup_mm = [ctx->device supportsFamily:MTLGPUFamilyApple7]; + + GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); + GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); if (ctx->device.maxTransferRate != 0) { @@ -346,141 +383,152 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { { NSError * error = nil; + for (int i = 0; i < GGML_METAL_MAX_KERNELS; ++i) { + ctx->kernels[i].function = nil; + ctx->kernels[i].pipeline = nil; + } + /* - GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) ctx->pipeline_##name, \ - (int) ctx->pipeline_##name.maxTotalThreadsPerThreadgroup, \ - (int) ctx->pipeline_##name.threadExecutionWidth); \ + GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ + (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ + (int) kernel->pipeline.threadExecutionWidth); \ */ -#define GGML_METAL_ADD_KERNEL(name) \ - ctx->function_##name = [ctx->library newFunctionWithName:@"kernel_"#name]; \ - ctx->pipeline_##name = [ctx->device newComputePipelineStateWithFunction:ctx->function_##name error:&error]; \ - if (error) { \ - GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ - return NULL; \ +#define GGML_METAL_ADD_KERNEL(e, name, supported) \ + if (supported) { \ + struct ggml_metal_kernel * kernel = &ctx->kernels[e]; \ + kernel->function = [ctx->library newFunctionWithName:@"kernel_"#name]; \ + kernel->pipeline = [ctx->device newComputePipelineStateWithFunction:kernel->function error:&error]; \ + GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ + (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ + (int) kernel->pipeline.threadExecutionWidth); \ + if (error) { \ + GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ + return NULL; \ + } \ + } else { \ + GGML_METAL_LOG_WARN("%s: skipping %-32s (not supported)\n", __func__, "kernel_"#name); \ } - GGML_METAL_ADD_KERNEL(add); - GGML_METAL_ADD_KERNEL(add_row); - GGML_METAL_ADD_KERNEL(mul); - GGML_METAL_ADD_KERNEL(mul_row); - GGML_METAL_ADD_KERNEL(div); - GGML_METAL_ADD_KERNEL(div_row); - GGML_METAL_ADD_KERNEL(scale); - GGML_METAL_ADD_KERNEL(scale_4); - GGML_METAL_ADD_KERNEL(tanh); - GGML_METAL_ADD_KERNEL(relu); - GGML_METAL_ADD_KERNEL(gelu); - GGML_METAL_ADD_KERNEL(gelu_quick); - GGML_METAL_ADD_KERNEL(silu); - GGML_METAL_ADD_KERNEL(soft_max); - GGML_METAL_ADD_KERNEL(soft_max_4); - GGML_METAL_ADD_KERNEL(diag_mask_inf); - GGML_METAL_ADD_KERNEL(diag_mask_inf_8); - GGML_METAL_ADD_KERNEL(get_rows_f32); - GGML_METAL_ADD_KERNEL(get_rows_f16); - GGML_METAL_ADD_KERNEL(get_rows_q4_0); - GGML_METAL_ADD_KERNEL(get_rows_q4_1); - GGML_METAL_ADD_KERNEL(get_rows_q5_0); - GGML_METAL_ADD_KERNEL(get_rows_q5_1); - GGML_METAL_ADD_KERNEL(get_rows_q8_0); - GGML_METAL_ADD_KERNEL(get_rows_q2_K); - GGML_METAL_ADD_KERNEL(get_rows_q3_K); - GGML_METAL_ADD_KERNEL(get_rows_q4_K); - GGML_METAL_ADD_KERNEL(get_rows_q5_K); - GGML_METAL_ADD_KERNEL(get_rows_q6_K); - GGML_METAL_ADD_KERNEL(get_rows_i32); - GGML_METAL_ADD_KERNEL(get_rows_iq2_xxs); - GGML_METAL_ADD_KERNEL(get_rows_iq2_xs); - GGML_METAL_ADD_KERNEL(rms_norm); - GGML_METAL_ADD_KERNEL(group_norm); - GGML_METAL_ADD_KERNEL(norm); - GGML_METAL_ADD_KERNEL(mul_mv_f32_f32); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f16); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f32); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f32_1row); - GGML_METAL_ADD_KERNEL(mul_mv_f16_f32_l4); - GGML_METAL_ADD_KERNEL(mul_mv_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mv_iq2_xs_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_f32_f32); - //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f16); - GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32); - //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32_1row); - //GGML_METAL_ADD_KERNEL(mul_mv_id_f16_f32_l4); - GGML_METAL_ADD_KERNEL(mul_mv_id_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mv_id_iq2_xs_f32); - if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { - GGML_METAL_ADD_KERNEL(mul_mm_f32_f32); - GGML_METAL_ADD_KERNEL(mul_mm_f16_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mm_iq2_xs_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_f32_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_f16_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q4_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q4_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q5_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q5_1_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q8_0_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q2_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q3_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q4_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q5_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_q6_K_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xxs_f32); - GGML_METAL_ADD_KERNEL(mul_mm_id_iq2_xs_f32); - } - GGML_METAL_ADD_KERNEL(rope_f32); - GGML_METAL_ADD_KERNEL(rope_f16); - GGML_METAL_ADD_KERNEL(alibi_f32); - GGML_METAL_ADD_KERNEL(im2col_f16); - GGML_METAL_ADD_KERNEL(upscale_f32); - GGML_METAL_ADD_KERNEL(pad_f32); - GGML_METAL_ADD_KERNEL(argsort_f32_i32_asc); - GGML_METAL_ADD_KERNEL(argsort_f32_i32_desc); - GGML_METAL_ADD_KERNEL(leaky_relu_f32); - GGML_METAL_ADD_KERNEL(cpy_f32_f16); - GGML_METAL_ADD_KERNEL(cpy_f32_f32); - GGML_METAL_ADD_KERNEL(cpy_f32_q8_0); - GGML_METAL_ADD_KERNEL(cpy_f32_q4_0); - GGML_METAL_ADD_KERNEL(cpy_f32_q4_1); - //GGML_METAL_ADD_KERNEL(cpy_f32_q5_0); - //GGML_METAL_ADD_KERNEL(cpy_f32_q5_1); - GGML_METAL_ADD_KERNEL(cpy_f16_f16); - GGML_METAL_ADD_KERNEL(cpy_f16_f32); - GGML_METAL_ADD_KERNEL(concat); - GGML_METAL_ADD_KERNEL(sqr); - GGML_METAL_ADD_KERNEL(sum_rows); + // simd_sum and simd_max requires MTLGPUFamilyApple7 -#undef GGML_METAL_ADD_KERNEL + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD, add, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ADD_ROW, add_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL, mul, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_ROW, mul_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV, div, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIV_ROW, div_row, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE, scale, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SCALE_4, scale_4, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_TANH, tanh, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RELU, relu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU, gelu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GELU_QUICK, gelu_quick, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SILU, silu, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX, soft_max, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SOFT_MAX_4, soft_max_4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF, diag_mask_inf, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8, diag_mask_inf_8, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F32, get_rows_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_F16, get_rows_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0, get_rows_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1, get_rows_q4_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0, get_rows_q5_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1, get_rows_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0, get_rows_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K, get_rows_q2_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K, get_rows_q3_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K, get_rows_q4_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K, get_rows_q5_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K, get_rows_q6_K, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_NORM, norm, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32, mul_mv_f32_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16, mul_mv_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32, mul_mv_f16_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW, mul_mv_f16_f32_1row, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4, mul_mv_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32, mul_mv_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32, mul_mv_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32, mul_mv_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32, mul_mv_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32, mul_mv_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32, mul_mv_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32, mul_mv_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32, mul_mv_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32, mul_mv_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32, mul_mv_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_1ROW, mul_mv_id_f16_f32_1row, ctx->support_simdgroup_reduction); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32_L4, mul_mv_id_f16_f32_l4, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32, mul_mv_id_q4_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32, mul_mv_id_q4_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32, mul_mv_id_q5_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32, mul_mv_id_q5_1_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32, mul_mv_id_q8_0_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32, mul_mv_id_q2_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32, mul_mv_id_q3_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32, mul_mv_id_q4_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32, mul_mv_id_q5_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32, mul_mv_id_q6_K_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32, mul_mm_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32, mul_mm_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32, mul_mm_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32, mul_mm_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32, mul_mm_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32, mul_mm_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32, mul_mm_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32, mul_mm_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32, mul_mm_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32, mul_mm_id_q4_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32, mul_mm_id_q5_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32, mul_mm_id_q5_1_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32, mul_mm_id_q8_0_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32, mul_mm_id_q2_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32, mul_mm_id_q3_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32, mul_mm_id_q4_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32, mul_mm_id_q5_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32, mul_mm_id_q6_K_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC, argsort_f32_i32_desc, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32, leaky_relu_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F16, cpy_f32_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_F32, cpy_f32_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0, cpy_f32_q8_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0, cpy_f32_q4_0, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1, cpy_f32_q4_1, true); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0, cpy_f32_q5_0, true); + //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1, cpy_f32_q5_1, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F16, cpy_f16_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CPY_F16_F32, cpy_f16_f32, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_CONCAT, concat, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SQR, sqr, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_SUM_ROWS, sum_rows, true); } return ctx; @@ -488,137 +536,21 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_LOG_INFO("%s: deallocating\n", __func__); -#define GGML_METAL_DEL_KERNEL(name) \ - [ctx->function_##name release]; \ - [ctx->pipeline_##name release]; - - GGML_METAL_DEL_KERNEL(add); - GGML_METAL_DEL_KERNEL(add_row); - GGML_METAL_DEL_KERNEL(mul); - GGML_METAL_DEL_KERNEL(mul_row); - GGML_METAL_DEL_KERNEL(div); - GGML_METAL_DEL_KERNEL(div_row); - GGML_METAL_DEL_KERNEL(scale); - GGML_METAL_DEL_KERNEL(scale_4); - GGML_METAL_DEL_KERNEL(tanh); - GGML_METAL_DEL_KERNEL(relu); - GGML_METAL_DEL_KERNEL(gelu); - GGML_METAL_DEL_KERNEL(gelu_quick); - GGML_METAL_DEL_KERNEL(silu); - GGML_METAL_DEL_KERNEL(soft_max); - GGML_METAL_DEL_KERNEL(soft_max_4); - GGML_METAL_DEL_KERNEL(diag_mask_inf); - GGML_METAL_DEL_KERNEL(diag_mask_inf_8); - GGML_METAL_DEL_KERNEL(get_rows_f32); - GGML_METAL_DEL_KERNEL(get_rows_f16); - GGML_METAL_DEL_KERNEL(get_rows_q4_0); - GGML_METAL_DEL_KERNEL(get_rows_q4_1); - GGML_METAL_DEL_KERNEL(get_rows_q5_0); - GGML_METAL_DEL_KERNEL(get_rows_q5_1); - GGML_METAL_DEL_KERNEL(get_rows_q8_0); - GGML_METAL_DEL_KERNEL(get_rows_q2_K); - GGML_METAL_DEL_KERNEL(get_rows_q3_K); - GGML_METAL_DEL_KERNEL(get_rows_q4_K); - GGML_METAL_DEL_KERNEL(get_rows_q5_K); - GGML_METAL_DEL_KERNEL(get_rows_q6_K); - GGML_METAL_DEL_KERNEL(get_rows_i32); - GGML_METAL_DEL_KERNEL(get_rows_iq2_xxs); - GGML_METAL_DEL_KERNEL(get_rows_iq2_xs); - GGML_METAL_DEL_KERNEL(rms_norm); - GGML_METAL_DEL_KERNEL(group_norm); - GGML_METAL_DEL_KERNEL(norm); - GGML_METAL_DEL_KERNEL(mul_mv_f32_f32); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f16); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f32); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f32_1row); - GGML_METAL_DEL_KERNEL(mul_mv_f16_f32_l4); - GGML_METAL_DEL_KERNEL(mul_mv_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mv_iq2_xs_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_f32_f32); - //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f16); - GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32); - //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32_1row); - //GGML_METAL_DEL_KERNEL(mul_mv_id_f16_f32_l4); - GGML_METAL_DEL_KERNEL(mul_mv_id_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mv_id_iq2_xs_f32); - if ([ctx->device supportsFamily:MTLGPUFamilyApple7]) { - GGML_METAL_DEL_KERNEL(mul_mm_f32_f32); - GGML_METAL_DEL_KERNEL(mul_mm_f16_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mm_iq2_xs_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_f32_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_f16_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q4_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q4_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q5_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q5_1_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q8_0_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q2_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q3_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q4_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q5_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_q6_K_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xxs_f32); - GGML_METAL_DEL_KERNEL(mul_mm_id_iq2_xs_f32); - } - GGML_METAL_DEL_KERNEL(rope_f32); - GGML_METAL_DEL_KERNEL(rope_f16); - GGML_METAL_DEL_KERNEL(alibi_f32); - GGML_METAL_DEL_KERNEL(im2col_f16); - GGML_METAL_DEL_KERNEL(upscale_f32); - GGML_METAL_DEL_KERNEL(pad_f32); - GGML_METAL_DEL_KERNEL(argsort_f32_i32_asc); - GGML_METAL_DEL_KERNEL(argsort_f32_i32_desc); - GGML_METAL_DEL_KERNEL(leaky_relu_f32); - GGML_METAL_DEL_KERNEL(cpy_f32_f16); - GGML_METAL_DEL_KERNEL(cpy_f32_f32); - GGML_METAL_DEL_KERNEL(cpy_f32_q8_0); - GGML_METAL_DEL_KERNEL(cpy_f32_q4_0); - GGML_METAL_DEL_KERNEL(cpy_f32_q4_1); - //GGML_METAL_DEL_KERNEL(cpy_f32_q5_0); - //GGML_METAL_DEL_KERNEL(cpy_f32_q5_1); - GGML_METAL_DEL_KERNEL(cpy_f16_f16); - GGML_METAL_DEL_KERNEL(cpy_f16_f32); - GGML_METAL_DEL_KERNEL(concat); - GGML_METAL_DEL_KERNEL(sqr); - GGML_METAL_DEL_KERNEL(sum_rows); - -#undef GGML_METAL_DEL_KERNEL for (int i = 0; i < ctx->n_buffers; ++i) { [ctx->buffers[i].metal release]; } + for (int i = 0; i < GGML_METAL_MAX_KERNELS; ++i) { + if (ctx->kernels[i].pipeline) { + [ctx->kernels[i].pipeline release]; + } + + if (ctx->kernels[i].function) { + [ctx->kernels[i].function release]; + } + } + [ctx->library release]; [ctx->queue release]; [ctx->device release]; @@ -930,7 +862,7 @@ void ggml_metal_graph_find_concurrency( } } -static bool ggml_metal_supports_op(const struct ggml_tensor * op) { +static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: switch (ggml_get_unary_op(op)) { @@ -956,9 +888,11 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_SCALE: case GGML_OP_SQR: case GGML_OP_SUM_ROWS: + return true; case GGML_OP_SOFT_MAX: case GGML_OP_RMS_NORM: case GGML_OP_GROUP_NORM: + return ctx->support_simdgroup_reduction; case GGML_OP_NORM: case GGML_OP_ALIBI: case GGML_OP_ROPE: @@ -967,9 +901,10 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { case GGML_OP_PAD: case GGML_OP_ARGSORT: case GGML_OP_LEAKY_RELU: + return true; case GGML_OP_MUL_MAT: case GGML_OP_MUL_MAT_ID: - return true; + return ctx->support_simdgroup_reduction; case GGML_OP_CPY: case GGML_OP_DUP: case GGML_OP_CONT: @@ -1007,6 +942,7 @@ static bool ggml_metal_supports_op(const struct ggml_tensor * op) { return false; } } + bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @@ -1077,7 +1013,7 @@ bool ggml_metal_graph_compute( } break; } - if (!ggml_metal_supports_op(dst)) { + if (!ggml_metal_supports_op(ctx, dst)) { GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); GGML_ASSERT(!"unsupported op"); } @@ -1143,7 +1079,9 @@ bool ggml_metal_graph_compute( { const int64_t nb = ne00; - [encoder setComputePipelineState:ctx->pipeline_concat]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1197,18 +1135,18 @@ bool ggml_metal_graph_compute( nb = ne00 / 4; switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->pipeline_add_row; break; - case GGML_OP_MUL: pipeline = ctx->pipeline_mul_row; break; - case GGML_OP_DIV: pipeline = ctx->pipeline_div_row; break; + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; default: GGML_ASSERT(false); } bcast_row = true; } else { switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->pipeline_add; break; - case GGML_OP_MUL: pipeline = ctx->pipeline_mul; break; - case GGML_OP_DIV: pipeline = ctx->pipeline_div; break; + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; default: GGML_ASSERT(false); } } @@ -1275,9 +1213,9 @@ bool ggml_metal_graph_compute( // not sure how to avoid this // TODO: make a simpler cpy_bytes kernel - const int nth = MIN((int) ctx->pipeline_cpy_f32_f32.maxTotalThreadsPerThreadgroup, ne00); + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; - [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -1297,10 +1235,14 @@ bool ggml_metal_graph_compute( [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } - [encoder setComputePipelineState:ctx->pipeline_add]; + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1330,7 +1272,7 @@ bool ggml_metal_graph_compute( [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - const int nth = MIN((int) ctx->pipeline_add.maxTotalThreadsPerThreadgroup, ne00); + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -1342,13 +1284,16 @@ bool ggml_metal_graph_compute( int64_t n = ggml_nelements(dst); + id pipeline = nil; + if (n % 4 == 0) { n /= 4; - [encoder setComputePipelineState:ctx->pipeline_scale_4]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; } else { - [encoder setComputePipelineState:ctx->pipeline_scale]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; @@ -1359,7 +1304,9 @@ bool ggml_metal_graph_compute( switch (ggml_get_unary_op(gf->nodes[i])) { case GGML_UNARY_OP_TANH: { - [encoder setComputePipelineState:ctx->pipeline_tanh]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1369,7 +1316,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_RELU: { - [encoder setComputePipelineState:ctx->pipeline_relu]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1379,7 +1328,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_GELU: { - [encoder setComputePipelineState:ctx->pipeline_gelu]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1390,7 +1341,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_GELU_QUICK: { - [encoder setComputePipelineState:ctx->pipeline_gelu_quick]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1401,7 +1354,9 @@ bool ggml_metal_graph_compute( } break; case GGML_UNARY_OP_SILU: { - [encoder setComputePipelineState:ctx->pipeline_silu]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; @@ -1420,18 +1375,23 @@ bool ggml_metal_graph_compute( { GGML_ASSERT(ggml_is_contiguous(src0)); - [encoder setComputePipelineState:ctx->pipeline_sqr]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; const int64_t n = ggml_nelements(dst); + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; } break; case GGML_OP_SUM_ROWS: { GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); - [encoder setComputePipelineState:ctx->pipeline_sum_rows]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -1465,20 +1425,23 @@ bool ggml_metal_graph_compute( { int nth = 32; // SIMD width + id pipeline = nil; + if (ne00%4 == 0) { while (nth < ne00/4 && nth < 256) { nth *= 2; } - [encoder setComputePipelineState:ctx->pipeline_soft_max_4]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; } else { while (nth < ne00 && nth < 1024) { nth *= 2; } - [encoder setComputePipelineState:ctx->pipeline_soft_max]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; } const float scale = ((float *) dst->op_params)[0]; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; if (id_src1) { [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; @@ -1498,11 +1461,15 @@ bool ggml_metal_graph_compute( { const int n_past = ((int32_t *)(dst->op_params))[0]; + id pipeline = nil; + if (ne00%8 == 0) { - [encoder setComputePipelineState:ctx->pipeline_diag_mask_inf_8]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; } else { - [encoder setComputePipelineState:ctx->pipeline_diag_mask_inf]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -1562,23 +1529,28 @@ bool ggml_metal_graph_compute( ne00 % 32 == 0 && ne00 >= 64 && (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + switch (src0->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_f32_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_f16_f32]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_0_f32]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_1_f32]; break; - case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_0_f32]; break; - case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_1_f32]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q8_0_f32]; break; - case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q2_K_f32]; break; - case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q3_K_f32]; break; - case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q4_K_f32]; break; - case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q5_K_f32]; break; - case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_q6_K_f32]; break; - case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xxs_f32]; break; - case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_iq2_xs_f32]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1602,12 +1574,14 @@ bool ggml_metal_graph_compute( int nrows = 1; //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + id pipeline = nil; + // use custom matrix x vector kernel switch (src0t) { case GGML_TYPE_F32: { GGML_ASSERT(src1t == GGML_TYPE_F32); - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f32_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; nrows = 4; } break; case GGML_TYPE_F16: @@ -1616,16 +1590,16 @@ bool ggml_metal_graph_compute( nth1 = 1; if (src1t == GGML_TYPE_F32) { if (ne11 * ne12 < 4) { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_1row]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32_l4]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; nrows = ne11; } else { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; nrows = 4; } } else { - [encoder setComputePipelineState:ctx->pipeline_mul_mv_f16_f16]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; nrows = 4; } } break; @@ -1633,73 +1607,73 @@ bool ggml_metal_graph_compute( { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; } break; case GGML_TYPE_Q4_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; } break; case GGML_TYPE_Q5_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; } break; case GGML_TYPE_Q5_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; } break; case GGML_TYPE_Q8_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q8_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; } break; case GGML_TYPE_Q2_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q2_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; } break; case GGML_TYPE_Q3_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q3_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; } break; case GGML_TYPE_Q4_K: { nth0 = 4; //1; nth1 = 8; //32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q4_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; } break; case GGML_TYPE_Q5_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q5_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; } break; case GGML_TYPE_Q6_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_q6_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; } break; case GGML_TYPE_IQ2_XXS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xxs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; } break; case GGML_TYPE_IQ2_XS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_iq2_xs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; } break; default: { @@ -1712,6 +1686,7 @@ bool ggml_metal_graph_compute( GGML_ASSERT(ne00 >= nth0*nth1); } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1818,23 +1793,28 @@ bool ggml_metal_graph_compute( if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && ne20 % 32 == 0 && ne20 >= 64 && ne11 > ne11_mm_min) { + + id pipeline = nil; + switch (src2->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f32_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_f16_f32]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_0_f32]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_1_f32]; break; - case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_0_f32]; break; - case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_1_f32]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q8_0_f32]; break; - case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q2_K_f32]; break; - case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q3_K_f32]; break; - case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q4_K_f32]; break; - case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q5_K_f32]; break; - case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_q6_K_f32]; break; - case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xxs_f32]; break; - case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_mul_mm_id_iq2_xs_f32]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -1874,91 +1854,93 @@ bool ggml_metal_graph_compute( int nrows = 1; //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + id pipeline = nil; + // use custom matrix x vector kernel switch (src2t) { case GGML_TYPE_F32: { GGML_ASSERT(src1t == GGML_TYPE_F32); - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_f32_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; } break; case GGML_TYPE_F16: { GGML_ASSERT(src1t == GGML_TYPE_F32); nth0 = 32; nth1 = 1; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_f16_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; } break; case GGML_TYPE_Q4_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; } break; case GGML_TYPE_Q4_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; } break; case GGML_TYPE_Q5_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; } break; case GGML_TYPE_Q5_1: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_1_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; } break; case GGML_TYPE_Q8_0: { nth0 = 8; nth1 = 8; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q8_0_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; } break; case GGML_TYPE_Q2_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q2_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; } break; case GGML_TYPE_Q3_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q3_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; } break; case GGML_TYPE_Q4_K: { nth0 = 4; //1; nth1 = 8; //32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q4_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; } break; case GGML_TYPE_Q5_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q5_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; } break; case GGML_TYPE_Q6_K: { nth0 = 2; nth1 = 32; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_q6_K_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; } break; case GGML_TYPE_IQ2_XXS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xxs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; } break; case GGML_TYPE_IQ2_XS: { nth0 = 4; nth1 = 16; - [encoder setComputePipelineState:ctx->pipeline_mul_mv_id_iq2_xs_f32]; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; } break; default: { @@ -1973,6 +1955,7 @@ bool ggml_metal_graph_compute( const int64_t _ne1 = 1; // kernels needs a reference in constant memory + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -2040,25 +2023,28 @@ bool ggml_metal_graph_compute( } break; case GGML_OP_GET_ROWS: { + id pipeline = nil; + switch (src0->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_get_rows_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_get_rows_f16]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_0]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_1]; break; - case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_0]; break; - case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_1]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_get_rows_q8_0]; break; - case GGML_TYPE_Q2_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q2_K]; break; - case GGML_TYPE_Q3_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q3_K]; break; - case GGML_TYPE_Q4_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q4_K]; break; - case GGML_TYPE_Q5_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q5_K]; break; - case GGML_TYPE_Q6_K: [encoder setComputePipelineState:ctx->pipeline_get_rows_q6_K]; break; - case GGML_TYPE_I32: [encoder setComputePipelineState:ctx->pipeline_get_rows_i32]; break; - case GGML_TYPE_IQ2_XXS: [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xxs]; break; - case GGML_TYPE_IQ2_XS : [encoder setComputePipelineState:ctx->pipeline_get_rows_iq2_xs]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; + case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -2086,7 +2072,9 @@ bool ggml_metal_graph_compute( nth *= 2; } - [encoder setComputePipelineState:ctx->pipeline_rms_norm]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2115,7 +2103,9 @@ bool ggml_metal_graph_compute( // nth *= 2; //} - [encoder setComputePipelineState:ctx->pipeline_group_norm]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2137,7 +2127,9 @@ bool ggml_metal_graph_compute( const int nth = MIN(256, ne00); - [encoder setComputePipelineState:ctx->pipeline_norm]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2164,7 +2156,9 @@ bool ggml_metal_graph_compute( const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - [encoder setComputePipelineState:ctx->pipeline_alibi_f32]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ALIBI_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2209,12 +2203,15 @@ bool ggml_metal_graph_compute( memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + id pipeline = nil; + switch (src0->type) { - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_rope_f32]; break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_rope_f16]; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F16].pipeline; break; default: GGML_ASSERT(false); }; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; @@ -2277,12 +2274,15 @@ bool ggml_metal_graph_compute( const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; + id pipeline = nil; + switch (src0->type) { case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_im2col_f16]; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; default: GGML_ASSERT(false); }; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; @@ -2305,7 +2305,9 @@ bool ggml_metal_graph_compute( const int sf = dst->op_params[0]; - [encoder setComputePipelineState:ctx->pipeline_upscale_f32]; + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -2326,7 +2328,7 @@ bool ggml_metal_graph_compute( [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; - const int nth = MIN((int) ctx->pipeline_upscale_f32.maxTotalThreadsPerThreadgroup, ne0); + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; } break; @@ -2334,7 +2336,9 @@ bool ggml_metal_graph_compute( { GGML_ASSERT(src0->type == GGML_TYPE_F32); - [encoder setComputePipelineState:ctx->pipeline_pad_f32]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; @@ -2367,12 +2371,15 @@ bool ggml_metal_graph_compute( enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + id pipeline = nil; + switch (order) { - case GGML_SORT_ASC: [encoder setComputePipelineState:ctx->pipeline_argsort_f32_i32_asc]; break; - case GGML_SORT_DESC: [encoder setComputePipelineState:ctx->pipeline_argsort_f32_i32_desc]; break; + case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; default: GGML_ASSERT(false); }; + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2386,7 +2393,9 @@ bool ggml_metal_graph_compute( float slope; memcpy(&slope, dst->op_params, sizeof(float)); - [encoder setComputePipelineState:ctx->pipeline_leaky_relu_f32]; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; @@ -2403,33 +2412,36 @@ bool ggml_metal_graph_compute( int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); + id pipeline = nil; + switch (src0t) { case GGML_TYPE_F32: { GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); switch (dstt) { - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f16]; break; - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_f32]; break; - case GGML_TYPE_Q8_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q8_0]; break; - case GGML_TYPE_Q4_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q4_0]; break; - case GGML_TYPE_Q4_1: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q4_1]; break; - //case GGML_TYPE_Q5_0: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q5_0]; break; - //case GGML_TYPE_Q5_1: [encoder setComputePipelineState:ctx->pipeline_cpy_f32_q5_1]; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; + //case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; + //case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; default: GGML_ASSERT(false && "not implemented"); }; } break; case GGML_TYPE_F16: { switch (dstt) { - case GGML_TYPE_F16: [encoder setComputePipelineState:ctx->pipeline_cpy_f16_f16]; break; - case GGML_TYPE_F32: [encoder setComputePipelineState:ctx->pipeline_cpy_f16_f32]; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; default: GGML_ASSERT(false && "not implemented"); }; } break; default: GGML_ASSERT(false && "not implemented"); } + [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; @@ -2794,9 +2806,9 @@ static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml } static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { - return ggml_metal_supports_op(op); + struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; - UNUSED(backend); + return ggml_metal_supports_op(metal_ctx, op); } static struct ggml_backend_i ggml_backend_metal_i = { From c30b1ef39aeba497a943416d2897d69fee055b96 Mon Sep 17 00:00:00 2001 From: texmex76 <40733439+texmex76@users.noreply.github.com> Date: Sat, 13 Jan 2024 17:06:20 +0100 Subject: [PATCH 454/859] gguf : fix potential infinite for-loop (#4600) Co-authored-by: Bernhard Gstrein --- ggml.c | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml.c b/ggml.c index 6dbd7626c..de6ef34bd 100644 --- a/ggml.c +++ b/ggml.c @@ -19184,7 +19184,7 @@ void gguf_free(struct gguf_context * ctx) { if (ctx->kv) { // free string memory - not great.. - for (uint32_t i = 0; i < ctx->header.n_kv; ++i) { + for (uint64_t i = 0; i < ctx->header.n_kv; ++i) { struct gguf_kv * kv = &ctx->kv[i]; if (kv->key.data) { @@ -19200,7 +19200,7 @@ void gguf_free(struct gguf_context * ctx) { if (kv->type == GGUF_TYPE_ARRAY) { if (kv->value.arr.data) { if (kv->value.arr.type == GGUF_TYPE_STRING) { - for (uint32_t j = 0; j < kv->value.arr.n; ++j) { + for (uint64_t j = 0; j < kv->value.arr.n; ++j) { struct gguf_str * str = &((struct gguf_str *) kv->value.arr.data)[j]; if (str->data) { free(str->data); @@ -19216,7 +19216,7 @@ void gguf_free(struct gguf_context * ctx) { } if (ctx->infos) { - for (uint32_t i = 0; i < ctx->header.n_tensors; ++i) { + for (uint64_t i = 0; i < ctx->header.n_tensors; ++i) { struct gguf_tensor_info * info = &ctx->infos[i]; if (info->name.data) { From 722d33f34ec74c6f7046109f936d0928ffe171bc Mon Sep 17 00:00:00 2001 From: Yann Follet <131855179+YannFollet@users.noreply.github.com> Date: Sun, 14 Jan 2024 00:09:08 +0800 Subject: [PATCH 455/859] main : add parameter --no-display-prompt (#4541) * add the parameter : --no-display-prompt , combine with --log-disable it will display only the generated tokens * remove empty line --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 6 +++++- common/common.h | 1 + examples/main/main.cpp | 7 ++++++- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 322b9f91e..c11006bcb 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -617,6 +617,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.numa = true; } else if (arg == "--verbose-prompt") { params.verbose_prompt = true; + } else if (arg == "--no-display-prompt") { + params.display_prompt = false; } else if (arg == "-r" || arg == "--reverse-prompt") { if (++i >= argc) { invalid_param = true; @@ -936,11 +938,12 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); #endif + printf(" --verbose-prompt print a verbose prompt before generation (default: %s)\n", params.verbose_prompt ? "true" : "false"); + printf(" --no-display-prompt don't print prompt at generation (default: %s)\n", !params.display_prompt ? "true" : "false"); printf(" -gan N, --grp-attn-n N\n"); printf(" group-attention factor (default: %d)\n", params.grp_attn_n); printf(" -gaw N, --grp-attn-w N\n"); printf(" group-attention width (default: %.1f)\n", (double)params.grp_attn_w); - printf(" --verbose-prompt print prompt before generation\n"); printf(" -dkvc, --dump-kv-cache\n"); printf(" verbose print of the KV cache\n"); printf(" -nkvo, --no-kv-offload\n"); @@ -1582,6 +1585,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "min_p: %f # default: 0.0\n", sparams.min_p); fprintf(stream, "typical_p: %f # default: 1.0\n", sparams.typical_p); fprintf(stream, "verbose_prompt: %s # default: false\n", params.verbose_prompt ? "true" : "false"); + fprintf(stream, "display_prompt: %s # default: true\n", params.display_prompt ? "true" : "false"); } // diff --git a/common/common.h b/common/common.h index f29be5b5a..096468243 100644 --- a/common/common.h +++ b/common/common.h @@ -126,6 +126,7 @@ struct gpt_params { bool use_mlock = false; // use mlock to keep model in memory bool numa = false; // attempt optimizations that help on some NUMA systems bool verbose_prompt = false; // print prompt tokens before generation + bool display_prompt = true; // print prompt before generation bool infill = false; // use infill mode bool dump_kv_cache = false; // dump the KV cache contents for debugging purposes bool no_kv_offload = false; // disable KV offloading diff --git a/examples/main/main.cpp b/examples/main/main.cpp index c53b29978..58b7f807a 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -477,6 +477,7 @@ int main(int argc, char ** argv) { bool is_antiprompt = false; bool input_echo = true; + bool display = true; bool need_to_save_session = !path_session.empty() && n_matching_session_tokens < embd_inp.size(); int n_past = 0; @@ -491,6 +492,7 @@ int main(int argc, char ** argv) { // the first thing we will do is to output the prompt, so set color accordingly console::set_display(console::prompt); + display = params.display_prompt; std::vector embd; std::vector embd_guidance; @@ -707,7 +709,7 @@ int main(int argc, char ** argv) { } // display text - if (input_echo) { + if (input_echo && display) { for (auto id : embd) { const std::string token_str = llama_token_to_piece(ctx, id); printf("%s", token_str.c_str()); @@ -724,6 +726,7 @@ int main(int argc, char ** argv) { // reset color to default if there is no pending user input if (input_echo && (int) embd_inp.size() == n_consumed) { console::set_display(console::reset); + display = true; } // if not currently processing queued inputs; @@ -796,6 +799,7 @@ int main(int argc, char ** argv) { // color user input only console::set_display(console::user_input); + display = params.display_prompt; std::string line; bool another_line = true; @@ -806,6 +810,7 @@ int main(int argc, char ** argv) { // done taking input, reset color console::set_display(console::reset); + display = true; // Add tokens to embd only if the input buffer is non-empty // Entering a empty line lets the user pass control back From 6b48ed089377330cdb362970a51c1c89b6d857a8 Mon Sep 17 00:00:00 2001 From: Someone Date: Sat, 13 Jan 2024 16:29:16 +0000 Subject: [PATCH 456/859] workflows: unbreak nix-build-aarch64, and split it out (#4915) The fix should be just the `sudo apt-get update` --- .github/workflows/nix-ci-aarch64.yml | 55 ++++++++++++++++++++++++++++ .github/workflows/nix-ci.yml | 41 --------------------- 2 files changed, 55 insertions(+), 41 deletions(-) create mode 100644 .github/workflows/nix-ci-aarch64.yml diff --git a/.github/workflows/nix-ci-aarch64.yml b/.github/workflows/nix-ci-aarch64.yml new file mode 100644 index 000000000..be7c26d40 --- /dev/null +++ b/.github/workflows/nix-ci-aarch64.yml @@ -0,0 +1,55 @@ +name: Nix aarch64 builds + +on: + workflow_dispatch: # allows manual triggering + push: + branches: + - master + paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + pull_request: + types: [opened, synchronize, reopened] + paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', '**/*.sh', '**/*.py', '**/*.nix'] + +jobs: + nix-build-aarch64: + if: ${{ vars.CACHIX_NAME != '' }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install QEMU + # Copy-paste from https://github.com/orgs/community/discussions/8305#discussioncomment-5888654 + run: | + sudo apt-get update + sudo apt-get install -y qemu-user-static qemu-system-aarch64 + sudo usermod -a -G kvm $USER + - name: Install Nix + uses: DeterminateSystems/nix-installer-action@v9 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + extra-conf: | + extra-platforms = aarch64-linux + extra-system-features = nixos-test kvm + extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + - uses: DeterminateSystems/magic-nix-cache-action@v2 + with: + upstream-cache: https://${{ matrix.cachixName }}.cachix.org + - name: Set-up cachix to push the results to + uses: cachix/cachix-action@v13 + with: + authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' + name: ${{ vars.CACHIX_NAME }} + - name: Show all output paths + run: > + nix run github:nix-community/nix-eval-jobs + -- --gc-roots-dir gcroot + --flake + ".#packages.aarch64-linux" + - name: Build + run: > + nix run github:Mic92/nix-fast-build + -- --skip-cached --no-nom + --systems aarch64-linux + --flake + ".#checks.aarch64-linux" diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index a38c6ead4..845b93bfb 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -69,44 +69,3 @@ jobs: -- --skip-cached --no-nom --flake ".#checks.$(nix eval --raw --impure --expr builtins.currentSystem)" - nix-build-aarch64: - if: ${{ vars.CACHIX_NAME != '' }} - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - name: Install QEMU - # Copy-paste from https://github.com/orgs/community/discussions/8305#discussioncomment-5888654 - run: | - sudo apt-get install -y qemu-user-static qemu-system-aarch64 - sudo usermod -a -G kvm $USER - - name: Install Nix - uses: DeterminateSystems/nix-installer-action@v9 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - extra-conf: | - extra-platforms = aarch64-linux - extra-system-features = nixos-test kvm - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - - uses: DeterminateSystems/magic-nix-cache-action@v2 - with: - upstream-cache: https://${{ matrix.cachixName }}.cachix.org - - name: Set-up cachix to push the results to - uses: cachix/cachix-action@v13 - with: - authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: ${{ vars.CACHIX_NAME }} - - name: Show all output paths - run: > - nix run github:nix-community/nix-eval-jobs - -- --gc-roots-dir gcroot - --flake - ".#packages.aarch64-linux" - - name: Build - run: > - nix run github:Mic92/nix-fast-build - -- --skip-cached --no-nom - --systems aarch64-linux - --flake - ".#checks.aarch64-linux" From df845cc982e7e2ea7b9900e29d55b15338faa78d Mon Sep 17 00:00:00 2001 From: David Friehs Date: Sat, 13 Jan 2024 17:29:43 +0100 Subject: [PATCH 457/859] llama : minimize size used for state save/load (#4820) * examples : save-load-state: save only required state * llama : only reserve n_vocab * n_batch at most for logits llama_decode asserts that only n_batch tokens are passed each call, and n_ctx is expected to be bigger than n_batch. * llama : always reserve n_vocab * n_batch for logits llama_context de-serialization breaks if the contexts have differing capacity for logits and llama_decode will at maximum resize to n_vocab * n_batch. * llama : only save and restore used logits for batch sizes of 512 this reduces save state in the best case by around 62 MB, which can be a lot if planning to save on each message to allow regenerating messages. * llama : use ostringstream and istringstream for save and load * llama : serialize rng into minimum amount of space required * llama : break session version due to serialization changes --- examples/save-load-state/save-load-state.cpp | 21 ++++---- llama.cpp | 53 +++++++------------- llama.h | 2 +- 3 files changed, 29 insertions(+), 47 deletions(-) diff --git a/examples/save-load-state/save-load-state.cpp b/examples/save-load-state/save-load-state.cpp index 48d801110..ef952e2bd 100644 --- a/examples/save-load-state/save-load-state.cpp +++ b/examples/save-load-state/save-load-state.cpp @@ -45,13 +45,13 @@ int main(int argc, char ** argv) { // save state (rng, logits, embedding and kv_cache) to file { std::vector state_mem(llama_get_state_size(ctx)); + const size_t written = llama_copy_state_data(ctx, state_mem.data()); - { - FILE *fp_write = fopen("dump_state.bin", "wb"); - llama_copy_state_data(ctx, state_mem.data()); // could also copy directly to memory mapped file - fwrite(state_mem.data(), 1, state_mem.size(), fp_write); - fclose(fp_write); - } + FILE *fp_write = fopen("dump_state.bin", "wb"); + fwrite(state_mem.data(), 1, written, fp_write); + fclose(fp_write); + + fprintf(stderr, "%s : serialized state into %zd out of a maximum of %zd bytes\n", __func__, written, state_mem.size()); } // save state (last tokens) @@ -100,18 +100,17 @@ int main(int argc, char ** argv) { std::vector state_mem(llama_get_state_size(ctx2)); FILE * fp_read = fopen("dump_state.bin", "rb"); + const size_t read = fread(state_mem.data(), 1, state_mem.size(), fp_read); + fclose(fp_read); - const size_t ret = fread(state_mem.data(), 1, state_mem.size(), fp_read); - if (ret != state_mem.size()) { + if (read != llama_set_state_data(ctx2, state_mem.data())) { fprintf(stderr, "\n%s : failed to read state\n", __func__); llama_free(ctx2); llama_free_model(model); return 1; } - llama_set_state_data(ctx2, state_mem.data()); - - fclose(fp_read); + fprintf(stderr, "%s : deserialized state from %zd out of a maximum of %zd bytes\n", __func__, read, state_mem.size()); } // restore state (last tokens) diff --git a/llama.cpp b/llama.cpp index 1d2eb569f..275456088 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9379,12 +9379,8 @@ struct llama_context * llama_new_context_with_model( ggml_type_name(type_v), (float)memory_size_v / (1024.0f * 1024.0f)); } - // resized during inference - if (params.logits_all) { - ctx->logits.reserve(cparams.n_ctx*hparams.n_vocab); - } else { - ctx->logits.reserve(hparams.n_vocab); - } + // resized during inference, reserve maximum + ctx->logits.reserve(hparams.n_vocab*cparams.n_batch); if (params.embedding){ ctx->embedding.resize(hparams.n_embd); @@ -9731,8 +9727,8 @@ size_t llama_get_state_size(const struct llama_context * ctx) { // for reference, std::mt19937(1337) serializes to 6701 bytes. const size_t s_rng_size = sizeof(size_t); const size_t s_rng = LLAMA_MAX_RNG_STATE; - const size_t s_logits_capacity = sizeof(size_t); const size_t s_logits_size = sizeof(size_t); + // assume worst case for logits although only currently set ones are serialized const size_t s_logits = ctx->logits.capacity() * sizeof(float); const size_t s_embedding_size = sizeof(size_t); const size_t s_embedding = ctx->embedding.size() * sizeof(float); @@ -9743,7 +9739,6 @@ size_t llama_get_state_size(const struct llama_context * ctx) { const size_t s_total = ( + s_rng_size + s_rng - + s_logits_capacity + s_logits_size + s_logits + s_embedding_size @@ -9812,37 +9807,27 @@ struct llama_data_file_context : llama_data_context { static void llama_copy_state_data_internal(struct llama_context * ctx, llama_data_context * data_ctx) { // copy rng { - std::stringstream rng_ss; + std::ostringstream rng_ss; rng_ss << ctx->rng; - const size_t rng_size = rng_ss.str().size(); - char rng_buf[LLAMA_MAX_RNG_STATE]; + const std::string & rng_str = rng_ss.str(); + const size_t rng_size = rng_str.size(); - memset(&rng_buf[0], 0, LLAMA_MAX_RNG_STATE); - memcpy(&rng_buf[0], rng_ss.str().data(), rng_ss.str().size()); + GGML_ASSERT(rng_size <= LLAMA_MAX_RNG_STATE); - data_ctx->write(&rng_size, sizeof(rng_size)); - data_ctx->write(&rng_buf[0], LLAMA_MAX_RNG_STATE); + data_ctx->write(&rng_size, sizeof(rng_size)); + data_ctx->write(rng_str.data(), rng_size); } // copy logits { - const size_t logits_cap = ctx->logits.capacity(); const size_t logits_size = ctx->logits.size(); - data_ctx->write(&logits_cap, sizeof(logits_cap)); data_ctx->write(&logits_size, sizeof(logits_size)); if (logits_size) { data_ctx->write(ctx->logits.data(), logits_size * sizeof(float)); } - - // If there is a gap between the size and the capacity, write padding - size_t padding_size = (logits_cap - logits_size) * sizeof(float); - if (padding_size > 0) { - std::vector padding(padding_size, 0); // Create a buffer filled with zeros - data_ctx->write(padding.data(), padding_size); - } } // copy embeddings @@ -9925,13 +9910,13 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { // set rng { size_t rng_size; - char rng_buf[LLAMA_MAX_RNG_STATE]; + memcpy(&rng_size, inp, sizeof(rng_size)); inp += sizeof(rng_size); - memcpy(&rng_size, inp, sizeof(rng_size)); inp += sizeof(rng_size); - memcpy(&rng_buf[0], inp, LLAMA_MAX_RNG_STATE); inp += LLAMA_MAX_RNG_STATE; + GGML_ASSERT(rng_size <= LLAMA_MAX_RNG_STATE); - std::stringstream rng_ss; - rng_ss.str(std::string(&rng_buf[0], rng_size)); + std::string rng_str((char *)inp, rng_size); inp += rng_size; + + std::istringstream rng_ss(rng_str); rng_ss >> ctx->rng; GGML_ASSERT(!rng_ss.fail()); @@ -9939,20 +9924,18 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { // set logits { - size_t logits_cap; size_t logits_size; - memcpy(&logits_cap, inp, sizeof(logits_cap)); inp += sizeof(logits_cap); memcpy(&logits_size, inp, sizeof(logits_size)); inp += sizeof(logits_size); - GGML_ASSERT(ctx->logits.capacity() == logits_cap); + GGML_ASSERT(ctx->logits.capacity() >= logits_size); if (logits_size) { ctx->logits.resize(logits_size); - memcpy(ctx->logits.data(), inp, logits_size * sizeof(float)); - } - inp += logits_cap * sizeof(float); + memcpy(ctx->logits.data(), inp, logits_size * sizeof(float)); + inp += logits_size * sizeof(float); + } } // set embeddings diff --git a/llama.h b/llama.h index 689e12d7c..01d6fafaa 100644 --- a/llama.h +++ b/llama.h @@ -43,7 +43,7 @@ #define LLAMA_FILE_MAGIC_GGSN 0x6767736eu // 'ggsn' #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN -#define LLAMA_SESSION_VERSION 3 +#define LLAMA_SESSION_VERSION 4 #if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) // Defined when llama.cpp is compiled with support for offloading model layers to GPU. From 2d57de525541247132e354f561ff48775fba5d85 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 18:46:37 +0200 Subject: [PATCH 458/859] metal : disable log for loaded kernels (#4794) --- ggml-metal.m | 3 --- 1 file changed, 3 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 6c28a7ee3..57e444827 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -398,9 +398,6 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { struct ggml_metal_kernel * kernel = &ctx->kernels[e]; \ kernel->function = [ctx->library newFunctionWithName:@"kernel_"#name]; \ kernel->pipeline = [ctx->device newComputePipelineStateWithFunction:kernel->function error:&error]; \ - GGML_METAL_LOG_INFO("%s: loaded %-32s %16p | th_max = %4d | th_width = %4d\n", __func__, "kernel_"#name, (void *) kernel->pipeline, \ - (int) kernel->pipeline.maxTotalThreadsPerThreadgroup, \ - (int) kernel->pipeline.threadExecutionWidth); \ if (error) { \ GGML_METAL_LOG_ERROR("%s: error: load pipeline error: %s\n", __func__, [[error description] UTF8String]); \ return NULL; \ From f172de03f11465dc6c5a0fc3a22f8ec254c6832c Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 18:47:38 +0200 Subject: [PATCH 459/859] llama : fix detokenization of non-special added-tokens (#4916) Co-authored-by: goerch --- llama.cpp | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 275456088..2190ea7aa 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10305,6 +10305,8 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { case LLAMA_VOCAB_TYPE_SPM: { + // NOTE: we accept all unsupported token types, + // suppressing them like CONTROL tokens. if (llama_is_normal_token(model->vocab, token)) { std::string result = model->vocab.id_to_token[token].text; llama_unescape_whitespace(result); @@ -10313,6 +10315,13 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, result.c_str(), result.length()); return result.length(); + } else if (llama_is_user_defined_token(model->vocab, token)) { + std::string result = model->vocab.id_to_token[token].text; + if (length < (int) result.length()) { + return -result.length(); + } + memcpy(buf, result.c_str(), result.length()); + return result.length(); } else if (llama_is_unknown_token(model->vocab, token)) { // NOLINT if (length < 3) { return -3; @@ -10327,14 +10336,12 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } buf[0] = llama_token_to_byte(model->vocab, token); return 1; - } else { - // TODO: for now we accept all unsupported token types, - // suppressing them like CONTROL tokens. - // GGML_ASSERT(false); } break; } case LLAMA_VOCAB_TYPE_BPE: { + // NOTE: we accept all unsupported token types, + // suppressing them like CONTROL tokens. if (llama_is_normal_token(model->vocab, token)) { std::string result = model->vocab.id_to_token[token].text; result = llama_decode_text(result); @@ -10343,12 +10350,15 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token } memcpy(buf, result.c_str(), result.length()); return result.length(); + } else if (llama_is_user_defined_token(model->vocab, token)) { + std::string result = model->vocab.id_to_token[token].text; + if (length < (int) result.length()) { + return -result.length(); + } + memcpy(buf, result.c_str(), result.length()); + return result.length(); } else if (llama_is_control_token(model->vocab, token)) { ; - } else { - // TODO: for now we accept all unsupported token types, - // suppressing them like CONTROL tokens. - // GGML_ASSERT(false); } break; } From 0ea069b87bd296c556824e57455433b6c0357340 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 19:31:26 +0200 Subject: [PATCH 460/859] server : fix prompt caching with system prompt (#4914) --- examples/server/server.cpp | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 79eacf828..93f999298 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1180,8 +1180,9 @@ struct llama_server_context return slot.images.size() > 0; } - void send_error(task_server& task, std::string error) + void send_error(task_server& task, const std::string &error) { + LOG_TEE("task %i - error: %s\n", task.id, error.c_str()); std::unique_lock lock(mutex_results); task_result res; res.id = task.id; @@ -1570,12 +1571,22 @@ struct llama_server_context LOG_TEE("slot unavailable\n"); // send error result send_error(task, "slot unavailable"); - return; + break; } if (task.data.contains("system_prompt")) { + if (!all_slots_are_idle) { + send_error(task, "system prompt can only be updated when all slots are idle"); + break; + } process_system_prompt_data(task.data["system_prompt"]); + + // reset cache_tokens for all slots + for (llama_client_slot &slot : slots) + { + slot.cache_tokens.clear(); + } } slot->reset(); @@ -1652,8 +1663,7 @@ struct llama_server_context // attend tasks process_tasks(); - // update the system prompt wait until all slots are idle state - if (system_need_update && all_slots_are_idle) + if (system_need_update) { LOG_TEE("updating system prompt\n"); update_system_prompt(); From 4be5ef556de830c5c4f6e45c05ef4427823fe607 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 13 Jan 2024 20:45:45 +0200 Subject: [PATCH 461/859] metal : remove old API (#4919) ggml-ci --- Makefile | 9 -- examples/CMakeLists.txt | 3 - examples/metal/CMakeLists.txt | 4 - examples/metal/metal.cpp | 103 ------------- ggml-metal.h | 55 +------ ggml-metal.m | 276 +++------------------------------- llama.cpp | 4 +- 7 files changed, 27 insertions(+), 427 deletions(-) delete mode 100644 examples/metal/CMakeLists.txt delete mode 100644 examples/metal/metal.cpp diff --git a/Makefile b/Makefile index 05fe9a0f6..995b89f7a 100644 --- a/Makefile +++ b/Makefile @@ -43,10 +43,6 @@ ifeq ($(UNAME_S),Darwin) endif endif -ifneq '' '$(or $(filter clean,$(MAKECMDGOALS)),$(LLAMA_METAL))' -BUILD_TARGETS += metal -endif - default: $(BUILD_TARGETS) test: $(TEST_TARGETS) @@ -671,11 +667,6 @@ lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -ifdef LLAMA_METAL -metal: examples/metal/metal.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) -endif - ifeq ($(UNAME_S),Darwin) swift: examples/batched.swift (cd examples/batched.swift; make build) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index fa127a3aa..f67d74c55 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -37,9 +37,6 @@ else() add_subdirectory(lookup) add_subdirectory(train-text-from-scratch) add_subdirectory(imatrix) - if (LLAMA_METAL) - add_subdirectory(metal) - endif() if (LLAMA_BUILD_SERVER) add_subdirectory(server) endif() diff --git a/examples/metal/CMakeLists.txt b/examples/metal/CMakeLists.txt deleted file mode 100644 index f16d49165..000000000 --- a/examples/metal/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -set(TEST_TARGET metal) -add_executable(${TEST_TARGET} metal.cpp) -install(TARGETS ${TARGET} RUNTIME) -target_link_libraries(${TEST_TARGET} PRIVATE ggml) diff --git a/examples/metal/metal.cpp b/examples/metal/metal.cpp deleted file mode 100644 index 16c1146f9..000000000 --- a/examples/metal/metal.cpp +++ /dev/null @@ -1,103 +0,0 @@ -// Evaluate a statically exported ggml computation graph with Metal -// -// - First, export a LLaMA graph: -// -// $ ./bin/main -m ../models/7B/ggml-model-q4_0.gguf --export -// -// - Run this tool to evaluate the exported graph: -// -// $ ./bin/metal llama.ggml -// -// The purpose of this tool is mostly for debugging and demonstration purposes. -// The main limitation of exporting computation graphs is that their sizes are static which often -// can be a problem for real-world applications. -// - -#include "ggml.h" -#include "ggml-metal.h" - -#include -#include -#include - -int main(int argc, char ** argv) { - ggml_time_init(); - - if (argc != 2) { - fprintf(stderr, "Usage: %s llama.ggml\n", argv[0]); - return -1; - } - - const char * fname_cgraph = argv[1]; - - // load the compute graph - struct ggml_context * ctx_data = NULL; - struct ggml_context * ctx_eval = NULL; - - struct ggml_cgraph * gf = ggml_graph_import(fname_cgraph, &ctx_data, &ctx_eval); - - // this allocates all Metal resources and memory buffers - auto * ctx_metal = ggml_metal_init(1); - - const size_t max_size_data = ggml_get_max_tensor_size(ctx_data); - const size_t max_size_eval = ggml_get_max_tensor_size(ctx_eval); - ggml_metal_add_buffer(ctx_metal, "data", ggml_get_mem_buffer(ctx_data), ggml_get_mem_size(ctx_data), max_size_data); - ggml_metal_add_buffer(ctx_metal, "eval", ggml_get_mem_buffer(ctx_eval), ggml_get_mem_size(ctx_eval), max_size_eval); - - // main - { - struct ggml_tensor * input = ggml_graph_get_tensor(gf, "embd"); - *(int32_t *) input->data = 1; // BOS - - ggml_metal_set_tensor(ctx_metal, input); - - // warmup - ggml_metal_graph_compute(ctx_metal, gf); - - const int n_iter = 16; - - const int64_t t0 = ggml_time_us(); - - // the actual inference happens here - for (int i = 0; i < n_iter; ++i) { - ggml_metal_graph_compute(ctx_metal, gf); - } - - const int64_t t1 = ggml_time_us(); - - printf("time: %.2f ms, %.2f ms/tok\n", (t1 - t0) / 1000.0, (t1 - t0) / 1000.0 / n_iter); - } - - // debug output - { - struct ggml_tensor * logits = gf->nodes[gf->n_nodes - 1]; - ggml_metal_get_tensor(ctx_metal, logits); - - float * ptr = (float *) ggml_get_data(logits); - - printf("logits: "); - for (int i = 0; i < 10; i++) { - printf("%8.4f ", ptr[i]); - } - printf("\n"); - int imax = 0; - double sum = 0.0; - double vmax = -1e9; - for (int i = 0; i < 32000; i++) { - sum += (double) ptr[i]; - if (ptr[i] > vmax) { - vmax = ptr[i]; - imax = i; - } - } - printf("sum: %f, imax = %d, vmax = %f\n", sum, imax, vmax); - } - - ggml_metal_free(ctx_metal); - - ggml_free(ctx_data); - ggml_free(ctx_eval); - - return 0; -} - diff --git a/ggml-metal.h b/ggml-metal.h index c4b7325da..cd5e2995f 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -36,64 +36,13 @@ struct ggml_cgraph; extern "C" { #endif -// -// internal API -// temporary exposed to user-code -// - -struct ggml_metal_context; - -void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_data); - -// number of command buffers to use -struct ggml_metal_context * ggml_metal_init(int n_cb); -void ggml_metal_free(struct ggml_metal_context * ctx); - -void * ggml_metal_host_malloc(size_t n); -void ggml_metal_host_free (void * data); - -// set the number of command buffers to use -void ggml_metal_set_n_cb(struct ggml_metal_context * ctx, int n_cb); - -// creates a mapping between a host memory buffer and a device memory buffer -// - make sure to map all buffers used in the graph before calling ggml_metal_graph_compute -// - the mapping is used during computation to determine the arguments of the compute kernels -// - you don't need to keep the host memory buffer allocated as it is never accessed by Metal -// - max_size specifies the maximum size of a tensor and is used to create shared views such -// that it is guaranteed that the tensor will fit in at least one of the views -// -bool ggml_metal_add_buffer( - struct ggml_metal_context * ctx, - const char * name, - void * data, - size_t size, - size_t max_size); - -// set data from host memory into the device -void ggml_metal_set_tensor(struct ggml_metal_context * ctx, struct ggml_tensor * t); - -// get data from the device into host memory -void ggml_metal_get_tensor(struct ggml_metal_context * ctx, struct ggml_tensor * t); - -// try to find operations that can be run concurrently in the graph -// you should run it again if the topology of your graph changes -void ggml_metal_graph_find_concurrency(struct ggml_metal_context * ctx, struct ggml_cgraph * gf, bool check_mem); - -// if the graph has been optimized for concurrently dispatch, return length of the concur_list if optimized -int ggml_metal_if_optimized(struct ggml_metal_context * ctx); - -// output the concur_list for ggml_alloc -int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx); - -// same as ggml_graph_compute but uses Metal -// creates gf->n_threads command buffers in parallel -bool ggml_metal_graph_compute(struct ggml_metal_context * ctx, struct ggml_cgraph * gf); - // // backend API // user-code should use only these functions // +GGML_API void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * user_data); + GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); diff --git a/ggml-metal.m b/ggml-metal.m index 57e444827..cae52c983 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -24,8 +24,6 @@ #define UNUSED(x) (void)(x) -#define GGML_MAX_CONCUR (2*GGML_DEFAULT_GRAPH_SIZE) - #define GGML_METAL_MAX_KERNELS 256 struct ggml_metal_buffer { @@ -182,9 +180,6 @@ struct ggml_metal_context { struct ggml_metal_kernel kernels[GGML_METAL_MAX_KERNELS]; - int concur_list[GGML_MAX_CONCUR]; - int concur_list_len; - bool support_simdgroup_reduction; bool support_simdgroup_mm; }; @@ -200,7 +195,6 @@ struct ggml_metal_context { @implementation GGMLMetalClass @end - static void ggml_metal_default_log_callback(enum ggml_log_level level, const char * msg, void * user_data) { fprintf(stderr, "%s", msg); @@ -211,11 +205,6 @@ static void ggml_metal_default_log_callback(enum ggml_log_level level, const cha ggml_log_callback ggml_metal_log_callback = ggml_metal_default_log_callback; void * ggml_metal_log_user_data = NULL; -void ggml_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { - ggml_metal_log_callback = log_callback; - ggml_metal_log_user_data = user_data; -} - GGML_ATTRIBUTE_FORMAT(2, 3) static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ if (ggml_metal_log_callback != NULL) { @@ -238,7 +227,18 @@ static void ggml_metal_log(enum ggml_log_level level, const char * format, ...){ } } -struct ggml_metal_context * ggml_metal_init(int n_cb) { +static void * ggml_metal_host_malloc(size_t n) { + void * data = NULL; + const int result = posix_memalign((void **) &data, sysconf(_SC_PAGESIZE), n); + if (result != 0) { + GGML_METAL_LOG_ERROR("%s: error: posix_memalign failed\n", __func__); + return NULL; + } + + return data; +} + +static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: allocating\n", __func__); id device; @@ -264,7 +264,6 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_BUFFERS); ctx->queue = [ctx->device newCommandQueue]; ctx->n_buffers = 0; - ctx->concur_list_len = 0; ctx->d_queue = dispatch_queue_create("ggml-metal", DISPATCH_QUEUE_CONCURRENT); @@ -531,7 +530,7 @@ struct ggml_metal_context * ggml_metal_init(int n_cb) { return ctx; } -void ggml_metal_free(struct ggml_metal_context * ctx) { +static void ggml_metal_free(struct ggml_metal_context * ctx) { GGML_METAL_LOG_INFO("%s: deallocating\n", __func__); for (int i = 0; i < ctx->n_buffers; ++i) { @@ -557,33 +556,6 @@ void ggml_metal_free(struct ggml_metal_context * ctx) { free(ctx); } -void * ggml_metal_host_malloc(size_t n) { - void * data = NULL; - const int result = posix_memalign((void **) &data, sysconf(_SC_PAGESIZE), n); - if (result != 0) { - GGML_METAL_LOG_ERROR("%s: error: posix_memalign failed\n", __func__); - return NULL; - } - - return data; -} - -void ggml_metal_host_free(void * data) { - free(data); -} - -void ggml_metal_set_n_cb(struct ggml_metal_context * ctx, int n_cb) { - ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_BUFFERS); -} - -int ggml_metal_if_optimized(struct ggml_metal_context * ctx) { - return ctx->concur_list_len; -} - -int * ggml_metal_get_concur_list(struct ggml_metal_context * ctx) { - return ctx->concur_list; -} - // temporarily defined here for compatibility between ggml-backend and the old API struct ggml_backend_metal_buffer { @@ -656,209 +628,6 @@ static id ggml_metal_get_buffer(struct ggml_metal_context * ctx, stru return nil; } -bool ggml_metal_add_buffer( - struct ggml_metal_context * ctx, - const char * name, - void * data, - size_t size, - size_t max_size) { - if (ctx->n_buffers >= GGML_METAL_MAX_BUFFERS) { - GGML_METAL_LOG_ERROR("%s: error: too many buffers\n", __func__); - return false; - } - - if (data) { - // verify that the buffer does not overlap with any of the existing buffers - for (int i = 0; i < ctx->n_buffers; ++i) { - const int64_t ioffs = (int64_t) data - (int64_t) ctx->buffers[i].data; - - if (ioffs >= 0 && ioffs < (int64_t) ctx->buffers[i].size) { - GGML_METAL_LOG_ERROR("%s: error: buffer '%s' overlaps with '%s'\n", __func__, name, ctx->buffers[i].name); - return false; - } - } - - const size_t size_page = sysconf(_SC_PAGESIZE); - - size_t size_aligned = size; - if ((size_aligned % size_page) != 0) { - size_aligned += (size_page - (size_aligned % size_page)); - } - - // the buffer fits into the max buffer size allowed by the device - if (size_aligned <= ctx->device.maxBufferLength) { - ctx->buffers[ctx->n_buffers].name = name; - ctx->buffers[ctx->n_buffers].data = data; - ctx->buffers[ctx->n_buffers].size = size; - - ctx->buffers[ctx->n_buffers].metal = [ctx->device newBufferWithBytesNoCopy:data length:size_aligned options:MTLResourceStorageModeShared deallocator:nil]; - - if (ctx->buffers[ctx->n_buffers].metal == nil) { - GGML_METAL_LOG_ERROR("%s: error: failed to allocate '%-16s' buffer, size = %8.2f MiB\n", __func__, name, size_aligned / 1024.0 / 1024.0); - return false; - } - - GGML_METAL_LOG_INFO("%s: allocated '%-16s' buffer, size = %8.2f MiB", __func__, name, size_aligned / 1024.0 / 1024.0); - - ++ctx->n_buffers; - } else { - // this overlap between the views will guarantee that the tensor with the maximum size will fully fit into - // one of the views - const size_t size_ovlp = ((max_size + size_page - 1) / size_page + 1) * size_page; // round-up 2 pages just in case - const size_t size_step = ctx->device.maxBufferLength - size_ovlp; - const size_t size_view = ctx->device.maxBufferLength; - - for (size_t i = 0; i < size; i += size_step) { - const size_t size_step_aligned = (i + size_view <= size) ? size_view : (size_aligned - i); - - ctx->buffers[ctx->n_buffers].name = name; - ctx->buffers[ctx->n_buffers].data = (void *) ((uint8_t *) data + i); - ctx->buffers[ctx->n_buffers].size = size_step_aligned; - - ctx->buffers[ctx->n_buffers].metal = [ctx->device newBufferWithBytesNoCopy:(void *) ((uint8_t *) data + i) length:size_step_aligned options:MTLResourceStorageModeShared deallocator:nil]; - - if (ctx->buffers[ctx->n_buffers].metal == nil) { - GGML_METAL_LOG_ERROR("%s: error: failed to allocate '%-16s' buffer, size = %8.2f MiB\n", __func__, name, size_step_aligned / 1024.0 / 1024.0); - return false; - } - - GGML_METAL_LOG_INFO("%s: allocated '%-16s' buffer, size = %8.2f MiB, offs = %12ld", __func__, name, size_step_aligned / 1024.0 / 1024.0, i); - if (i + size_step < size) { - GGML_METAL_LOG_INFO("\n"); - } - - ++ctx->n_buffers; - } - } - -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", - ctx->device.currentAllocatedSize / 1024.0 / 1024.0, - ctx->device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - - if (ctx->device.currentAllocatedSize > ctx->device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); - } else { - GGML_METAL_LOG_INFO("\n"); - } -#else - GGML_METAL_LOG_INFO(", (%8.2f)\n", ctx->device.currentAllocatedSize / 1024.0 / 1024.0); -#endif - } - - return true; -} - -void ggml_metal_set_tensor( - struct ggml_metal_context * ctx, - struct ggml_tensor * t) { - size_t offs; - id id_dst = ggml_metal_get_buffer(ctx, t, &offs); - - memcpy((void *) ((uint8_t *) id_dst.contents + offs), t->data, ggml_nbytes(t)); -} - -void ggml_metal_get_tensor( - struct ggml_metal_context * ctx, - struct ggml_tensor * t) { - size_t offs; - id id_src = ggml_metal_get_buffer(ctx, t, &offs); - - memcpy(t->data, (void *) ((uint8_t *) id_src.contents + offs), ggml_nbytes(t)); -} - -void ggml_metal_graph_find_concurrency( - struct ggml_metal_context * ctx, - struct ggml_cgraph * gf, bool check_mem) { - int search_depth = gf->n_nodes; //we only find concurrency in this range to avoid wasting too much time - int nodes_unused[GGML_MAX_CONCUR]; - - for (int i = 0; i < GGML_MAX_CONCUR; i++) { ctx->concur_list[i] = 0; } - for (int i = 0; i < gf->n_nodes; i++) { nodes_unused[i] = 1; } - ctx->concur_list_len = 0; - - int n_left = gf->n_nodes; - int n_start = 0; // all nodes before n_start at nodes_unused array have been sorted and store back to ctx->concur_list - int level_pos = 0; // at ctx->concur_list, the last layer (level) ends at level_pos - - while (n_left > 0) { - // number of nodes at a layer (that can be issued concurrently) - int concurrency = 0; - for (int i = n_start; i < ((n_start + search_depth > gf->n_nodes) ? gf->n_nodes : n_start + search_depth); i++) { - if (nodes_unused[i]) { - // if the requirements for gf->nodes[i] are satisfied - int exe_flag = 1; - - // scan all srcs - for (int src_ind = 0; src_ind < GGML_MAX_SRC; src_ind++) { - struct ggml_tensor * src_cur = gf->nodes[i]->src[src_ind]; - if (src_cur) { - // if is leaf nodes it's satisfied. - // TODO: ggml_is_leaf() - if (src_cur->op == GGML_OP_NONE && src_cur->grad == NULL) { - continue; - } - - // otherwise this src should be the output from previous nodes. - int is_found = 0; - - // scan 2*search_depth back because we inserted barrier. - //for (int j = ((level_pos - 2*search_depth) < 0 ? 0 : (level_pos - 2*search_depth)); j < level_pos; j++) { - for (int j = MAX(0, level_pos - 2*search_depth); j < level_pos; j++) { - if (ctx->concur_list[j] >= 0 && gf->nodes[ctx->concur_list[j]] == src_cur) { - is_found = 1; - break; - } - } - if (is_found == 0) { - exe_flag = 0; - break; - } - } - } - if (exe_flag && check_mem) { - // check if nodes[i]'s data will be overwritten by a node before nodes[i]. - // if node[5] and node[3] write to the same memory region, then we can't issue node[5] before node[3] - int64_t data_start = (int64_t) gf->nodes[i]->data; - int64_t length = (int64_t) ggml_nbytes(gf->nodes[i]); - for (int j = n_start; j < i; j++) { - if (nodes_unused[j] && gf->nodes[j]->op != GGML_OP_RESHAPE \ - && gf->nodes[j]->op != GGML_OP_VIEW \ - && gf->nodes[j]->op != GGML_OP_TRANSPOSE \ - && gf->nodes[j]->op != GGML_OP_PERMUTE) { - if (((int64_t)gf->nodes[j]->data) >= data_start + length || \ - ((int64_t)gf->nodes[j]->data) + (int64_t) ggml_nbytes(gf->nodes[j]) <= data_start) { - continue; - } - - exe_flag = 0; - } - } - } - if (exe_flag) { - ctx->concur_list[level_pos + concurrency] = i; - nodes_unused[i] = 0; - concurrency++; - ctx->concur_list_len++; - } - } - } - n_left -= concurrency; - // adding a barrier different layer - ctx->concur_list[level_pos + concurrency] = -1; - ctx->concur_list_len++; - // jump all sorted nodes at nodes_bak - while (!nodes_unused[n_start]) { - n_start++; - } - level_pos += concurrency + 1; - } - - if (ctx->concur_list_len > GGML_MAX_CONCUR) { - GGML_METAL_LOG_WARN("%s: too many elements for metal ctx->concur_list!\n", __func__); - } -} - static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: @@ -940,19 +709,15 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const } } -bool ggml_metal_graph_compute( +static bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { @autoreleasepool { - // if there is ctx->concur_list, dispatch concurrently - // else fallback to serial dispatch MTLComputePassDescriptor * edesc = MTLComputePassDescriptor.computePassDescriptor; - const bool has_concur = ctx->concur_list_len && ctx->concur_list_len <= GGML_MAX_CONCUR; - - const int n_nodes = has_concur ? ctx->concur_list_len : gf->n_nodes; - edesc.dispatchType = has_concur ? MTLDispatchTypeConcurrent : MTLDispatchTypeSerial; + const int n_nodes = gf->n_nodes; + edesc.dispatchType = MTLDispatchTypeSerial; // create multiple command buffers and enqueue them // then, we encode the graph into the command buffers in parallel @@ -983,7 +748,7 @@ bool ggml_metal_graph_compute( const int node_end = MIN((cb_idx == n_cb - 1) ? n_nodes : (cb_idx + 1) * n_nodes_per_cb, n_nodes); for (int ind = node_start; ind < node_end; ++ind) { - const int i = has_concur ? ctx->concur_list[ind] : ind; + const int i = ind; if (i == -1) { [encoder memoryBarrierWithScope:MTLBarrierScopeBuffers]; @@ -2823,6 +2588,11 @@ static struct ggml_backend_i ggml_backend_metal_i = { /* .supports_op = */ ggml_backend_metal_supports_op, }; +void ggml_backend_metal_log_set_callback(ggml_log_callback log_callback, void * user_data) { + ggml_metal_log_callback = log_callback; + ggml_metal_log_user_data = user_data; +} + ggml_backend_t ggml_backend_metal_init(void) { struct ggml_metal_context * ctx = ggml_metal_init(GGML_DEFAULT_N_THREADS); @@ -2849,7 +2619,7 @@ void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb) { struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; - ggml_metal_set_n_cb(ctx, n_cb); + ctx->n_cb = MIN(n_cb, GGML_METAL_MAX_BUFFERS); } bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family) { diff --git a/llama.cpp b/llama.cpp index 2190ea7aa..66494974a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1266,7 +1266,7 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_g struct llama_state { llama_state() { #ifdef GGML_USE_METAL - ggml_metal_log_set_callback(log_callback, log_callback_user_data); + ggml_backend_metal_log_set_callback(log_callback, log_callback_user_data); #endif } @@ -10470,7 +10470,7 @@ void llama_log_set(ggml_log_callback log_callback, void * user_data) { g_state.log_callback = log_callback ? log_callback : llama_log_callback_default; g_state.log_callback_user_data = user_data; #ifdef GGML_USE_METAL - ggml_metal_log_set_callback(g_state.log_callback, g_state.log_callback_user_data); + ggml_backend_metal_log_set_callback(g_state.log_callback, g_state.log_callback_user_data); #endif } From c71d608ce7a1584bf5072f197919dd24f3a6163f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 13 Jan 2024 21:41:37 +0100 Subject: [PATCH 462/859] ggml: cache sin/cos for RoPE (#4908) --- ggml.c | 46 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 32 insertions(+), 14 deletions(-) diff --git a/ggml.c b/ggml.c index de6ef34bd..bcfb6652c 100644 --- a/ggml.c +++ b/ggml.c @@ -11638,6 +11638,21 @@ static float ggml_rope_yarn_corr_dim(int n_dims, int n_orig_ctx, float n_rot, fl return n_dims * logf(n_orig_ctx / (n_rot * 2 * (float)M_PI)) / (2 * logf(base)); } +static void ggml_rope_cache_init( + float theta_base, float freq_scale, float corr_dims[2], int64_t ne0, float ext_factor, float mscale, + float * cache, float sin_sign, float theta_scale +) { + float theta = theta_base; + for (int64_t i0 = 0; i0 < ne0; i0 += 2) { + rope_yarn( + theta, freq_scale, corr_dims, i0, ext_factor, mscale, &cache[i0 + 0], &cache[i0 + 1] + ); + cache[i0 + 1] *= sin_sign; + + theta *= theta_scale; + } +} + void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] ) { @@ -11720,6 +11735,12 @@ static void ggml_compute_forward_rope_f32( for (int64_t i3 = 0; i3 < ne3; i3++) { for (int64_t i2 = 0; i2 < ne2; i2++) { const int64_t p = pos[i2]; + + float * cache = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32)*ith; + if (!is_glm && !is_neox) { // TODO: cache sin/cos for glm, neox + ggml_rope_cache_init(p, freq_scale, corr_dims, ne0, ext_factor, attn_factor, cache, sin_sign, theta_scale); + } + for (int64_t i1 = 0; i1 < ne1; i1++) { if (ir++ < ir0) continue; if (ir > ir1) break; @@ -11753,18 +11774,13 @@ static void ggml_compute_forward_rope_f32( } } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - float cos_theta, sin_theta; - rope_yarn( - theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta - ); - sin_theta *= sin_sign; + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; // zeta scaling for xPos only: float zeta = xpos_base != 0.0f ? powf((i0 + 0.4f * ne0) / (1.4f * ne0), p / xpos_base) : 1.0f; if (xpos_down) zeta = 1.0f / zeta; - theta_base *= theta_scale; - const float * const src = (float *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); float * dst_data = (float *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -11888,6 +11904,12 @@ static void ggml_compute_forward_rope_f16( for (int64_t i3 = 0; i3 < ne3; i3++) { for (int64_t i2 = 0; i2 < ne2; i2++) { const int64_t p = pos[i2]; + + float * cache = (float *) params->wdata + (ne0 + CACHE_LINE_SIZE_F32)*ith; + if (!is_glm && !is_neox) { // TODO: cache sin/cos for glm, neox + ggml_rope_cache_init(p, freq_scale, corr_dims, ne0, ext_factor, attn_factor, cache, sin_sign, theta_scale); + } + for (int64_t i1 = 0; i1 < ne1; i1++) { if (ir++ < ir0) continue; if (ir > ir1) break; @@ -11921,13 +11943,8 @@ static void ggml_compute_forward_rope_f16( } } else if (!is_neox) { for (int64_t i0 = 0; i0 < ne0; i0 += 2) { - float cos_theta, sin_theta; - rope_yarn( - theta_base, freq_scale, corr_dims, i0, ext_factor, attn_factor, &cos_theta, &sin_theta - ); - sin_theta *= sin_sign; - - theta_base *= theta_scale; + const float cos_theta = cache[i0 + 0]; + const float sin_theta = cache[i0 + 1]; const ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i3*nb03 + i2*nb02 + i1*nb01 + i0*nb00); ggml_fp16_t * dst_data = (ggml_fp16_t *)((char *) dst->data + i3*nb3 + i2*nb2 + i1*nb1 + i0*nb0); @@ -16722,6 +16739,7 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa } } break; case GGML_OP_SOFT_MAX: + case GGML_OP_ROPE: { cur = ggml_type_size(GGML_TYPE_F32) * node->ne[0] * n_tasks; } break; From 76484fbfd355df388f71d6edaa98e1692a74de7e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 00:14:46 +0200 Subject: [PATCH 463/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index edcdb530a..753d227a7 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -400c07f00508e6f60fb25405444b5669c365b0a9 +1890780da4ea10db88736fcde85f285abf6c64b0 From 807179ec583dcb882f97d9704577c06beb2c5ec9 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 09:44:30 +0200 Subject: [PATCH 464/859] Make Q3_K_S be the same as olf Q3_K_L for Mixtral-8x7B (#4906) Co-authored-by: Iwan Kawrakow --- llama.cpp | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/llama.cpp b/llama.cpp index 66494974a..8e20e72a2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8489,9 +8489,16 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty ++qs.i_feed_forward_w2; } else if (name.find("attn_output.weight") != std::string::npos) { if (arch != LLM_ARCH_FALCON) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + if (qs.model.hparams.n_expert == 8) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || + ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { + new_type = GGML_TYPE_Q5_K; + } + } else { + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + } } else { if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q4_K; } From 147b17ac94a24d524e367cda26a9ff6245689f34 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 09:45:56 +0200 Subject: [PATCH 465/859] 2-bit quantizations (#4897) * imatrix: load * imatrix: WIP * imatrix: Add Q2_K quantization * imatrix: also guard against Q2_K_S quantization without importance matrix * imatrix: guard even more against low-bit quantization misuse --------- Co-authored-by: Iwan Kawrakow --- examples/benchmark/benchmark-matmult.cpp | 4 +- examples/quantize/quantize.cpp | 133 +++- ggml-quants.c | 950 +++++++++++++++++++++-- ggml-quants.h | 12 +- ggml.c | 36 +- ggml.h | 9 +- llama.cpp | 84 +- llama.h | 1 + tests/test-backend-ops.cpp | 2 +- 9 files changed, 1149 insertions(+), 82 deletions(-) diff --git a/examples/benchmark/benchmark-matmult.cpp b/examples/benchmark/benchmark-matmult.cpp index 434e1d6bd..e89f3de2f 100644 --- a/examples/benchmark/benchmark-matmult.cpp +++ b/examples/benchmark/benchmark-matmult.cpp @@ -194,7 +194,7 @@ int main(int argc, char ** argv) { // Set up a the benchmark matrices // printf("Creating new tensor q11 & Running quantize\n"); struct ggml_tensor * q11 = ggml_new_tensor_2d(ctx, qtype, sizex, sizey); - ggml_quantize_chunk(qtype, (const float *) m11->data, q11->data, 0, nelements, hist_cur.data()); + ggml_quantize_chunk(qtype, (const float *) m11->data, q11->data, 0, nelements/m11->ne[0], m11->ne[0], hist_cur.data(), nullptr); // Set up a the compute graph // printf("Creating new tensor q31\n"); @@ -207,7 +207,7 @@ int main(int argc, char ** argv) { // Set up a second graph computation to make sure we override the CPU cache lines // printf("Creating new tensor q12 & Running quantize\n"); struct ggml_tensor * q12 = ggml_new_tensor_2d(ctx, qtype, sizex, sizey); - ggml_quantize_chunk(qtype, (const float *) m12->data, q12->data, 0, nelements, hist_cur.data()); + ggml_quantize_chunk(qtype, (const float *) m12->data, q12->data, 0, nelements/m12->ne[0], m12->ne[0], hist_cur.data(), nullptr); // printf("Creating new tensor q32\n"); struct ggml_tensor * q32 = ggml_mul_mat(ctx, q12, m2); diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index f878f6911..f4e2175f1 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -5,6 +5,10 @@ #include #include #include +#include +#include +#include +#include struct quant_option { std::string name; @@ -17,6 +21,8 @@ static const std::vector QUANT_OPTIONS = { { "Q4_1", LLAMA_FTYPE_MOSTLY_Q4_1, " 3.90G, +0.1585 ppl @ LLaMA-v1-7B", }, { "Q5_0", LLAMA_FTYPE_MOSTLY_Q5_0, " 4.33G, +0.0683 ppl @ LLaMA-v1-7B", }, { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, + { "IQ2_XXS",LLAMA_FTYPE_MOSTLY_IQ2_XXS," 2.06 bpw quantization", }, + { "IQ2_XS", LLAMA_FTYPE_MOSTLY_IQ2_XS, " 2.31 bpw quantization", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, @@ -72,10 +78,14 @@ static bool try_parse_ftype(const std::string & ftype_str_in, llama_ftype & ftyp // [[noreturn]] static void usage(const char * executable) { - printf("usage: %s [--help] [--allow-requantize] [--leave-output-tensor] [--pure] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); + printf("usage: %s [--help] [--allow-requantize] [--leave-output-tensor] [--pure] [--imatrix] [--include-weights] [--exclude-weights] model-f32.gguf [model-quant.gguf] type [nthreads]\n\n", executable); printf(" --allow-requantize: Allows requantizing tensors that have already been quantized. Warning: This can severely reduce quality compared to quantizing from 16bit or 32bit\n"); printf(" --leave-output-tensor: Will leave output.weight un(re)quantized. Increases model size but may also increase quality, especially when requantizing\n"); printf(" --pure: Disable k-quant mixtures and quantize all tensors to the same type\n"); + printf(" --imatrixfile_name: use data in file_name as importance matrix for quant optimizations\n"); + printf(" --include-weights tensor_name: use importance matrix for this/these tensor(s)\n"); + printf(" --exclude-weights tensor_name: use importance matrix for this/these tensor(s)\n"); + printf("Note: --include-weights and --exclude-weights cannot be used together\n"); printf("\nAllowed quantization types:\n"); for (auto & it : QUANT_OPTIONS) { if (it.name != "COPY") { @@ -83,11 +93,93 @@ static void usage(const char * executable) { } else { printf(" "); } - printf("%-6s : %s\n", it.name.c_str(), it.desc.c_str()); + printf("%-7s : %s\n", it.name.c_str(), it.desc.c_str()); } exit(1); } +static void load_imatrix(const std::string& imatrix_file, std::unordered_map>& imatrix_data) { + std::ifstream in(imatrix_file.c_str(), std::ios::binary); + if (!in) { + printf("%s: failed to open %s\n",__func__,imatrix_file.c_str()); + return; + } + int n_entries; + in.read((char*)&n_entries, sizeof(n_entries)); + if (in.fail() || n_entries < 1) { + printf("%s: no data in file %s\n", __func__, imatrix_file.c_str()); + return; + } + for (int i = 0; i < n_entries; ++i) { + int len; in.read((char *)&len, sizeof(len)); + std::vector name_as_vec(len+1); + in.read((char *)name_as_vec.data(), len); + if (in.fail()) { + printf("%s: failed reading name for entry %d from %s\n",__func__,i+1,imatrix_file.c_str()); + return; + } + name_as_vec[len] = 0; + std::string name{name_as_vec.data()}; + auto& e = imatrix_data[std::move(name)]; + int ncall; + in.read((char*)&ncall, sizeof(ncall)); + int nval; + in.read((char *)&nval, sizeof(nval)); + if (in.fail() || nval < 1) { + printf("%s: failed reading number of values for entry %d\n",__func__,i); + imatrix_data = {}; + return; + } + e.resize(nval); + in.read((char*)e.data(), nval*sizeof(float)); + if (in.fail()) { + printf("%s: failed reading data for entry %d\n",__func__,i); + imatrix_data = {}; + return; + } + if (ncall > 0) { + for (auto& v : e) v /= ncall; + } + } + printf("%s: loaded %d importance matrix entries from %s\n",__func__,int(imatrix_data.size()),imatrix_file.c_str()); +} + +static void prepare_imatrix(const std::string& imatrix_file, + const std::vector& included_weights, + const std::vector& excluded_weights, + std::unordered_map>& imatrix_data) { + if (!imatrix_file.empty()) { + load_imatrix(imatrix_file, imatrix_data); + } + if (imatrix_data.empty()) { + return; + } + if (!excluded_weights.empty()) { + for (auto& name : excluded_weights) { + for (auto it = imatrix_data.begin(); it != imatrix_data.end(); ) { + auto pos = it->first.find(name); + if (pos != std::string::npos) it = imatrix_data.erase(it); + else ++it; + } + } + } + if (!included_weights.empty()) { + std::unordered_map> tmp; + for (auto& name : included_weights) { + for (auto& e : imatrix_data) { + auto pos = e.first.find(name); + if (pos != std::string::npos) { + tmp.emplace(std::move(e)); + } + } + } + imatrix_data = std::move(tmp); + } + if (!imatrix_data.empty()) { + printf("%s: have %d importance matrix entries\n", __func__, int(imatrix_data.size())); + } +} + int main(int argc, char ** argv) { if (argc < 3) { usage(argv[0]); @@ -96,6 +188,8 @@ int main(int argc, char ** argv) { llama_model_quantize_params params = llama_model_quantize_default_params(); int arg_idx = 1; + std::string imatrix_file; + std::vector included_weights, excluded_weights; for (; arg_idx < argc && strncmp(argv[arg_idx], "--", 2) == 0; arg_idx++) { if (strcmp(argv[arg_idx], "--leave-output-tensor") == 0) { @@ -104,14 +198,42 @@ int main(int argc, char ** argv) { params.allow_requantize = true; } else if (strcmp(argv[arg_idx], "--pure") == 0) { params.pure = true; + } else if (strcmp(argv[arg_idx], "--imatrix") == 0) { + if (arg_idx < argc-1) { + imatrix_file = argv[++arg_idx]; + } else { + usage(argv[0]); + } + } else if (strcmp(argv[arg_idx], "--include-weights") == 0) { + if (arg_idx < argc-1) { + included_weights.push_back(argv[++arg_idx]); + } else { + usage(argv[0]); + } + } else if (strcmp(argv[arg_idx], "--exclude-weights") == 0) { + if (arg_idx < argc-1) { + excluded_weights.push_back(argv[++arg_idx]); + } else { + usage(argv[0]); + } } else { usage(argv[0]); } } if (argc - arg_idx < 2) { + printf("%s: bad arguments\n", argv[0]); usage(argv[0]); } + if (!included_weights.empty() && !excluded_weights.empty()) { + usage(argv[0]); + } + + std::unordered_map> imatrix_data; + prepare_imatrix(imatrix_file, included_weights, excluded_weights, imatrix_data); + if (!imatrix_data.empty()) { + params.imatrix = &imatrix_data; + } llama_backend_init(false); @@ -163,6 +285,13 @@ int main(int argc, char ** argv) { } } + if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) && imatrix_data.empty()) { + fprintf(stderr, "\n===============================================================================================\n"); + fprintf(stderr, "Please do not use IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); + fprintf(stderr, "===============================================================================================\n\n\n"); + return 1; + } + print_build_info(); fprintf(stderr, "%s: quantizing '%s' to '%s' as %s", __func__, fname_inp.c_str(), fname_out.c_str(), ftype_str.c_str()); diff --git a/ggml-quants.c b/ggml-quants.c index 601d155d7..9290d54cf 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -5,6 +5,8 @@ #include #include #include +#include // for qsort +#include // for GGML_ASSERT #ifdef __ARM_NEON @@ -1639,6 +1641,241 @@ size_t ggml_quantize_q2_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q2_K)); } +static float make_qkx3_quants(int n, int nmax, const float * restrict x, const float * restrict weights, + uint8_t * restrict L, float * restrict the_min, uint8_t * restrict Laux, + float rmin, float rdelta, int nstep, bool use_mad) { + float min = x[0]; + float max = x[0]; + float sum_w = weights ? weights[0] : x[0]*x[0]; + float sum_x = sum_w * x[0]; + for (int i = 1; i < n; ++i) { + if (x[i] < min) min = x[i]; + if (x[i] > max) max = x[i]; + float w = weights ? weights[i] : x[i]*x[i]; + sum_w += w; + sum_x += w * x[i]; + } + if (min > 0) { + min = 0; + } + if (max <= min) { + for (int i = 0; i < n; ++i) L[i] = 0; + *the_min = -min; + return 0.f; + } + float iscale = nmax/(max - min); + float scale = 1/iscale; + float best_mad = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + L[i] = MAX(0, MIN(nmax, l)); + float diff = scale * L[i] + min - x[i]; + diff = use_mad ? fabsf(diff) : diff*diff; + float w = weights ? weights[i] : x[i]*x[i]; + best_mad += w * diff; + } + if (nstep < 1) { + *the_min = -min; + return scale; + } + for (int is = 0; is <= nstep; ++is) { + iscale = (rmin + rdelta*is + nmax)/(max - min); + float sum_l = 0, sum_l2 = 0, sum_xl = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale*(x[i] - min)); + l = MAX(0, MIN(nmax, l)); + Laux[i] = l; + float w = weights ? weights[i] : x[i]*x[i]; + sum_l += w*l; + sum_l2 += w*l*l; + sum_xl += w*l*x[i]; + } + float D = sum_w * sum_l2 - sum_l * sum_l; + if (D > 0) { + float this_scale = (sum_w * sum_xl - sum_x * sum_l)/D; + float this_min = (sum_l2 * sum_x - sum_l * sum_xl)/D; + if (this_min > 0) { + this_min = 0; + this_scale = sum_xl / sum_l2; + } + float mad = 0; + for (int i = 0; i < n; ++i) { + float diff = this_scale * Laux[i] + this_min - x[i]; + diff = use_mad ? fabsf(diff) : diff*diff; + float w = weights ? weights[i] : x[i]*x[i]; + mad += w * diff; + } + if (mad < best_mad) { + for (int i = 0; i < n; ++i) { + L[i] = Laux[i]; + } + best_mad = mad; + scale = this_scale; + min = this_min; + } + } + } + *the_min = -min; + return scale; +} + +static float make_qp_quants(int n, int nmax, const float * restrict x, uint8_t * restrict L, const float * quant_weights) { + float max = 0; + for (int i = 0; i < n; ++i) { + max = MAX(max, x[i]); + } + if (!max) { // all zero + for (int i = 0; i < n; ++i) { L[i] = 0; } + return 0.f; + } + float iscale = nmax / max; + for (int i = 0; i < n; ++i) { + L[i] = nearest_int(iscale * x[i]); + } + float scale = 1/iscale; + float best_mse = 0; + for (int i = 0; i < n; ++i) { + float diff = x[i] - scale*L[i]; + float w = quant_weights[i]; + best_mse += w*diff*diff; + } + for (int is = -4; is <= 4; ++is) { + if (is == 0) continue; + float iscale_is = (0.1f*is + nmax)/max; + float scale_is = 1/iscale_is; + float mse = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale_is*x[i]); + l = MIN(nmax, l); + float diff = x[i] - scale_is*l; + float w = quant_weights[i]; + mse += w*diff*diff; + } + if (mse < best_mse) { + best_mse = mse; + iscale = iscale_is; + } + } + float sumlx = 0; + float suml2 = 0; + for (int i = 0; i < n; ++i) { + int l = nearest_int(iscale * x[i]); + l = MIN(nmax, l); + L[i] = l; + float w = quant_weights[i]; + sumlx += w*x[i]*l; + suml2 += w*l*l; + } + for (int itry = 0; itry < 5; ++itry) { + int n_changed = 0; + for (int i = 0; i < n; ++i) { + float w = quant_weights[i]; + float slx = sumlx - w*x[i]*L[i]; + float sl2 = suml2 - w*L[i]*L[i]; + if (slx > 0 && sl2 > 0) { + int new_l = nearest_int(x[i] * sl2 / slx); + new_l = MIN(nmax, new_l); + if (new_l != L[i]) { + slx += w*x[i]*new_l; + sl2 += w*new_l*new_l; + if (slx*slx*suml2 > sumlx*sumlx*sl2) { + L[i] = new_l; sumlx = slx; suml2 = sl2; + ++n_changed; + } + } + } + } + if (!n_changed) { + break; + } + } + return sumlx / suml2; +} + +static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restrict y, int k, const float * restrict quant_weights) { + GGML_ASSERT(quant_weights); + assert(k % QK_K == 0); + const int nb = k / QK_K; + const bool requantize = true; + + uint8_t L[QK_K]; + uint8_t Laux[16]; + float mins[QK_K/16]; + float scales[QK_K/16]; + float sw[QK_K/16]; + float weight[QK_K/16]; + uint8_t Ls[QK_K/16], Lm[QK_K/16]; + + for (int i = 0; i < nb; i++) { + memset(sw, 0, QK_K/16*sizeof(float)); + float sumx2 = 0; + for (int j = 0; j < QK_K; ++j) sumx2 += x[j]*x[j]; + float sigma2 = sumx2/QK_K; + for (int j = 0; j < QK_K/16; ++j) { + const float * restrict qw = quant_weights + QK_K * i + 16*j; + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); + for (int l = 0; l < 16; ++l) sw[j] += weight[l]; + scales[j] = make_qkx3_quants(16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + } + + float dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); + float mm = make_qp_quants(QK_K/16, 15, mins, Lm, sw); + y[i].d = GGML_FP32_TO_FP16(dm); + y[i].dmin = GGML_FP32_TO_FP16(mm); + dm = GGML_FP16_TO_FP32(y[i].d); + mm = GGML_FP16_TO_FP32(y[i].dmin); + + for (int j = 0; j < QK_K/16; ++j) { + y[i].scales[j] = Ls[j] | (Lm[j] << 4); + } + + if (requantize) { + for (int j = 0; j < QK_K/16; ++j) { + const float d = dm * (y[i].scales[j] & 0xF); + if (!d) continue; + const float m = mm * (y[i].scales[j] >> 4); + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int((x[16*j + ii] + m)/d); + l = MAX(0, MIN(3, l)); + L[16*j + ii] = l; + } + } + } + +#if QK_K == 256 + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } +#else + for (int l = 0; l < 16; ++l) { + y[i].qs[l] = L[l] | (L[l + 16] << 2) | (L[l + 32] << 4) | (L[l + 48] << 6); + } +#endif + + x += QK_K; + + } +} + +size_t quantize_q2_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q2_K, n_per_row); + if (!quant_weights) { + quantize_row_q2_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q2_K_impl(src, (block_q2_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + //========================= 3-bit (de)-quantization void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict y, int k) { @@ -2584,14 +2821,6 @@ static const uint8_t ksigns_iq2xs[128] = { static const uint8_t kmask_iq2xs[8] = {1, 2, 4, 8, 16, 32, 64, 128}; -void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k) { - (void)x; - (void)y; - (void)k; - assert(k % QK_K == 0); - //fprintf(stderr, "=========================== %s: not implemented\n", __func__); -} - void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); const int nb = k / QK_K; @@ -2618,33 +2847,8 @@ void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y } } -void quantize_row_iq2_xxs(const float * restrict x, void * restrict vy, int k) { - assert(k % QK_K == 0); - block_iq2_xxs * restrict y = vy; - quantize_row_iq2_xxs_reference(x, y, k); -} - -size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist) { - assert(k % QK_K == 0); - (void)hist; // TODO: collect histograms - - for (int j = 0; j < n; j += k) { - block_iq2_xxs * restrict y = (block_iq2_xxs *)dst + j/QK_K; - quantize_row_iq2_xxs_reference(src + j, y, k); - } - return (n/QK_K*sizeof(block_iq2_xxs)); -} - // ====================== 2.3125 bpw (de)-quantization -void quantize_row_iq2_xs_reference(const float * restrict x, block_iq2_xs * restrict y, int k) { - (void)x; - (void)y; - (void)k; - assert(k % QK_K == 0); - //fprintf(stderr, "=========================== %s: not implemented\n", __func__); -} - void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, int k) { assert(k % QK_K == 0); const int nb = k / QK_K; @@ -2670,23 +2874,6 @@ void dequantize_row_iq2_xs(const block_iq2_xs * restrict x, float * restrict y, } } -void quantize_row_iq2_xs(const float * restrict x, void * restrict vy, int k) { - assert(k % QK_K == 0); - block_iq2_xs * restrict y = vy; - quantize_row_iq2_xs_reference(x, y, k); -} - -size_t ggml_quantize_iq2_xs(const float * src, void * dst, int n, int k, int64_t * hist) { - assert(k % QK_K == 0); - (void)hist; // TODO: collect histograms - - for (int j = 0; j < n; j += k) { - block_iq2_xs * restrict y = (block_iq2_xs *)dst + j/QK_K; - quantize_row_iq2_xs_reference(src + j, y, k); - } - return (n/QK_K*sizeof(block_iq2_xs)); -} - //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -7730,3 +7917,666 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest *s = 0.125f * sumf; #endif } + +// ================================ IQ2 quantization ============================================= + +typedef struct { + uint64_t * grid; + int * map; + uint16_t * neighbours; +} iq2_entry_t; + +static iq2_entry_t iq2_data[2] = { + {NULL, NULL, NULL}, + {NULL, NULL, NULL}, +}; + +static inline int iq2_data_index(int grid_size) { + GGML_ASSERT(grid_size == 256 || grid_size == 512); + return grid_size == 256 ? 0 : 1; +} + +static int iq2_compare_func(const void * left, const void * right) { + const int * l = (const int *)left; + const int * r = (const int *)right; + return l[0] < r[0] ? -1 : l[0] > r[0] ? 1 : l[1] < r[1] ? -1 : l[1] > r[1] ? 1 : 0; +} + +static void q2xs_init_impl(int grid_size) { + const int gindex = iq2_data_index(grid_size); + if (iq2_data[gindex].grid) { + return; + } + static const uint16_t kgrid_256[256] = { + 0, 2, 5, 8, 10, 17, 20, 32, 34, 40, 42, 65, 68, 80, 88, 97, + 100, 128, 130, 138, 162, 257, 260, 272, 277, 320, 388, 408, 512, 514, 546, 642, + 1025, 1028, 1040, 1057, 1060, 1088, 1090, 1096, 1120, 1153, 1156, 1168, 1188, 1280, 1282, 1288, + 1312, 1350, 1385, 1408, 1425, 1545, 1552, 1600, 1668, 1700, 2048, 2053, 2056, 2068, 2088, 2113, + 2116, 2128, 2130, 2184, 2308, 2368, 2562, 2580, 4097, 4100, 4112, 4129, 4160, 4192, 4228, 4240, + 4245, 4352, 4360, 4384, 4432, 4442, 4480, 4644, 4677, 5120, 5128, 5152, 5157, 5193, 5248, 5400, + 5474, 5632, 5654, 6145, 6148, 6160, 6208, 6273, 6400, 6405, 6560, 6737, 8192, 8194, 8202, 8260, + 8289, 8320, 8322, 8489, 8520, 8704, 8706, 9217, 9220, 9232, 9280, 9302, 9472, 9537, 9572, 9872, + 10248, 10272, 10388, 10820, 16385, 16388, 16400, 16408, 16417, 16420, 16448, 16456, 16470, 16480, 16513, 16516, + 16528, 16640, 16672, 16737, 16768, 16773, 16897, 16912, 16968, 16982, 17000, 17408, 17416, 17440, 17536, 17561, + 17682, 17700, 17920, 18433, 18436, 18448, 18496, 18501, 18688, 18776, 18785, 18818, 19013, 19088, 20480, 20488, + 20497, 20505, 20512, 20608, 20616, 20740, 20802, 20900, 21137, 21648, 21650, 21770, 22017, 22100, 22528, 22545, + 22553, 22628, 22848, 23048, 24580, 24592, 24640, 24680, 24832, 24917, 25112, 25184, 25600, 25605, 25872, 25874, + 25988, 26690, 32768, 32770, 32778, 32833, 32898, 33028, 33048, 33088, 33297, 33793, 33796, 33808, 33813, 33856, + 33888, 34048, 34118, 34196, 34313, 34368, 34400, 34818, 35076, 35345, 36868, 36880, 36900, 36928, 37025, 37142, + 37248, 37445, 37888, 37922, 37956, 38225, 39041, 39200, 40962, 41040, 41093, 41225, 41472, 42008, 43088, 43268, + }; + static const uint16_t kgrid_512[512] = { + 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, + 73, 80, 82, 85, 88, 97, 100, 128, 130, 133, 136, 145, 148, 153, 160, 257, + 260, 262, 265, 272, 274, 277, 280, 282, 289, 292, 320, 322, 325, 328, 337, 340, + 352, 360, 385, 388, 400, 512, 514, 517, 520, 529, 532, 544, 577, 580, 592, 597, + 640, 650, 1025, 1028, 1030, 1033, 1040, 1042, 1045, 1048, 1057, 1060, 1088, 1090, 1093, 1096, + 1105, 1108, 1110, 1120, 1153, 1156, 1168, 1280, 1282, 1285, 1288, 1297, 1300, 1312, 1345, 1348, + 1360, 1377, 1408, 1537, 1540, 1552, 1574, 1600, 1602, 1668, 2048, 2050, 2053, 2056, 2058, 2065, + 2068, 2080, 2085, 2113, 2116, 2128, 2136, 2176, 2208, 2218, 2305, 2308, 2320, 2368, 2433, 2441, + 2560, 2592, 2600, 2710, 2720, 4097, 4100, 4102, 4105, 4112, 4114, 4117, 4120, 4129, 4132, 4160, + 4162, 4165, 4168, 4177, 4180, 4192, 4202, 4225, 4228, 4240, 4352, 4354, 4357, 4360, 4369, 4372, + 4384, 4417, 4420, 4432, 4480, 4500, 4502, 4609, 4612, 4614, 4624, 4672, 4704, 5120, 5122, 5125, + 5128, 5137, 5140, 5152, 5185, 5188, 5193, 5200, 5220, 5248, 5377, 5380, 5392, 5440, 5632, 5652, + 5705, 6145, 6148, 6160, 6162, 6208, 6228, 6278, 6400, 6405, 6502, 6737, 6825, 8192, 8194, 8197, + 8200, 8202, 8209, 8212, 8224, 8257, 8260, 8272, 8320, 8352, 8449, 8452, 8464, 8512, 8520, 8549, + 8704, 8738, 8832, 8872, 9217, 9220, 9232, 9257, 9280, 9472, 9537, 9554, 9625, 9729, 9754, 9894, + 10240, 10248, 10250, 10272, 10325, 10376, 10402, 10600, 10640, 10760, 10784, 10882, 10888, 10890, 16385, 16388, + 16390, 16393, 16400, 16402, 16405, 16408, 16417, 16420, 16448, 16450, 16453, 16456, 16458, 16465, 16468, 16480, + 16485, 16513, 16516, 16528, 16640, 16642, 16645, 16648, 16657, 16660, 16672, 16705, 16708, 16720, 16768, 16773, + 16802, 16897, 16900, 16912, 16914, 16937, 16960, 17408, 17410, 17413, 17416, 17425, 17428, 17433, 17440, 17473, + 17476, 17488, 17536, 17556, 17665, 17668, 17680, 17700, 17728, 17818, 17920, 17930, 17988, 18000, 18433, 18436, + 18448, 18496, 18501, 18516, 18530, 18688, 18705, 18756, 18768, 18793, 18948, 20480, 20482, 20485, 20488, 20497, + 20500, 20512, 20520, 20545, 20548, 20560, 20608, 20737, 20740, 20752, 20757, 20800, 20802, 20992, 21060, 21162, + 21505, 21508, 21520, 21537, 21568, 21600, 21633, 21665, 21760, 21768, 21888, 21896, 22049, 22120, 22177, 22528, + 22548, 22593, 22608, 22681, 22810, 22848, 22850, 23173, 24577, 24580, 24592, 24640, 24660, 24674, 24710, 24745, + 24832, 25124, 25162, 25234, 25600, 25622, 25872, 25920, 25925, 26020, 26625, 26730, 26917, 27142, 27220, 27234, + 32768, 32770, 32773, 32776, 32785, 32788, 32800, 32810, 32833, 32836, 32848, 32896, 32898, 32936, 32938, 33025, + 33028, 33030, 33040, 33088, 33105, 33113, 33280, 33312, 33408, 33410, 33440, 33448, 33793, 33796, 33808, 33810, + 33813, 33856, 33888, 33929, 34048, 34116, 34213, 34328, 34410, 34816, 34824, 34853, 34906, 34944, 34946, 34984, + 35078, 35362, 35456, 35464, 35478, 35496, 36865, 36868, 36880, 36928, 36950, 36996, 37120, 37154, 37220, 37462, + 37513, 37888, 37893, 37956, 37968, 37976, 38185, 38288, 38290, 38465, 38993, 39078, 39241, 39445, 39520, 40960, + 40962, 40968, 40970, 40992, 41002, 41120, 41297, 41305, 41382, 41472, 41474, 41480, 41514, 41600, 41632, 42048, + 42133, 42597, 42648, 43018, 43040, 43042, 43048, 43168, 43176, 43268, 43396, 43398, 43560, 43562, 43665, 43690, + }; + const int kmap_size = 43692; + const int nwant = 2; + const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; + uint64_t * kgrid_q2xs; + int * kmap_q2xs; + uint16_t * kneighbors_q2xs; + + printf("================================================================= %s(grid_size = %d)\n", __func__, grid_size); + uint64_t * the_grid = (uint64_t *)malloc(grid_size*sizeof(uint64_t)); + for (int k = 0; k < grid_size; ++k) { + int8_t * pos = (int8_t *)(the_grid + k); + for (int i = 0; i < 8; ++i) { + int l = (kgrid[k] >> 2*i) & 0x3; + pos[i] = 2*l + 1; + } + } + kgrid_q2xs = the_grid; + iq2_data[gindex].grid = the_grid; + kmap_q2xs = (int *)malloc(kmap_size*sizeof(int)); + iq2_data[gindex].map = kmap_q2xs; + for (int i = 0; i < kmap_size; ++i) kmap_q2xs[i] = -1; + uint64_t aux64; + uint8_t * aux8 = (uint8_t *)&aux64; + for (int i = 0; i < grid_size; ++i) { + aux64 = kgrid_q2xs[i]; + uint16_t index = 0; + for (int k=0; k<8; ++k) { + uint16_t q = (aux8[k] - 1)/2; + index |= (q << 2*k); + } + kmap_q2xs[index] = i; + } + int8_t pos[8]; + int * dist2 = (int *)malloc(2*grid_size*sizeof(int)); + int num_neighbors = 0, num_not_in_map = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q2xs[i] >= 0) continue; + ++num_not_in_map; + for (int k = 0; k < 8; ++k) { + int l = (i >> 2*k) & 0x3; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + j); + int d2 = 0; + for (int k = 0; k < 8; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq2_compare_func); + int n = 0; int d2 = dist2[0]; + int nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + ++n; + } + num_neighbors += n; + } + printf("%s: %d neighbours in total\n", __func__, num_neighbors); + kneighbors_q2xs = (uint16_t *)malloc((num_neighbors + num_not_in_map)*sizeof(uint16_t)); + iq2_data[gindex].neighbours = kneighbors_q2xs; + int counter = 0; + for (int i = 0; i < kmap_size; ++i) { + if (kmap_q2xs[i] >= 0) continue; + for (int k = 0; k < 8; ++k) { + int l = (i >> 2*k) & 0x3; + pos[k] = 2*l + 1; + } + for (int j = 0; j < grid_size; ++j) { + const int8_t * pg = (const int8_t *)(kgrid_q2xs + j); + int d2 = 0; + for (int k = 0; k < 8; ++k) d2 += (pg[k] - pos[k])*(pg[k] - pos[k]); + dist2[2*j+0] = d2; + dist2[2*j+1] = j; + } + qsort(dist2, grid_size, 2*sizeof(int), iq2_compare_func); + kmap_q2xs[i] = -(counter + 1); + int d2 = dist2[0]; + uint16_t * start = &kneighbors_q2xs[counter++]; + int n = 0, nhave = 1; + for (int j = 0; j < grid_size; ++j) { + if (dist2[2*j] > d2) { + if (nhave == nwant) break; + d2 = dist2[2*j]; + ++nhave; + } + kneighbors_q2xs[counter++] = dist2[2*j+1]; + ++n; + } + *start = n; + } + free(dist2); +} + +void ggml_init_iq2_quantization(enum ggml_type type) { + if (type == GGML_TYPE_IQ2_XXS) { + q2xs_init_impl(256); + } + else if (type == GGML_TYPE_IQ2_XS) { + q2xs_init_impl(512); + } + else { + fprintf(stderr, "======================== Why are you calling %s with type %d?\n", __func__, (int)type); + } +} + +static void q2xs_deinit_impl(int grid_size) { + GGML_ASSERT(grid_size == 256 || grid_size == 512 || grid_size == 1024); + const int gindex = iq2_data_index(grid_size); + if (iq2_data[gindex].grid) { + free(iq2_data[gindex].grid); iq2_data[gindex].grid = NULL; + free(iq2_data[gindex].map); iq2_data[gindex].map = NULL; + free(iq2_data[gindex].neighbours); iq2_data[gindex].neighbours = NULL; + } +} + +void ggml_deinit_iq2_quantization(enum ggml_type type) { + if (type == GGML_TYPE_IQ2_XXS) { + q2xs_deinit_impl(256); + } + else if (type == GGML_TYPE_IQ2_XS) { + q2xs_deinit_impl(512); + } + else { + fprintf(stderr, "======================== Why are you calling %s with type %d?\n", __func__, (int)type); + } +} + +static int iq2_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float scale, int8_t * restrict L) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_d2 = FLT_MAX; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float d2 = 0; + for (int i = 0; i < 8; ++i) { + float q = pg[i]; + float diff = scale*q - xval[i]; + d2 += weight[i]*diff*diff; + } + if (d2 < best_d2) { + best_d2 = d2; grid_index = neighbours[j]; + } + } + GGML_ASSERT(grid_index >= 0); + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(256); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights); + GGML_ASSERT(kgrid_q2xs); + GGML_ASSERT(kmap_q2xs); + GGML_ASSERT(kneighbors_q2xs); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int nbl = n/256; + + block_iq2_xxs * y = vy; + + float scales[QK_K/32]; + float weight[32]; + float xval[32]; + int8_t L[32]; + int8_t Laux[32]; + float waux[32]; + bool is_on_grid[4]; + bool is_on_grid_aux[4]; + uint8_t block_signs[4]; + uint32_t q2[2*(QK_K/32)]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(q2, 0, QK_K/4); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/32; ++ib) { + const float * xb = xbl + 32*ib; + const float * qw = quant_weights + QK_K*ibl + 32*ib; + for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + for (int i = 0; i < 32; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 4; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 32; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + memset(L, 0, 32); + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 4; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 32; ++i) L[i] = Laux[i]; + for (int k = 0; k < 4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 4; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q2xs + grid_index); + for (int i = 0; i < 8; ++i) L[8*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 32; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < 4; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + q2[2*ib+0] |= (grid_index << 8*k); + q2[2*ib+1] |= (block_signs[k] << 7*k); + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/4); + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + float sumqx = 0, sumq2 = 0; + for (int ib = 0; ib < QK_K/32; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + q2[2*ib+1] |= ((uint32_t)l << 28); + const float * xb = xbl + 32*ib; + const float * qw = quant_weights + QK_K*ibl + 32*ib; + for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + const uint8_t * aux8 = (const uint8_t *)(q2 + 2*ib); + const float db = d * (1 + 2*l); + uint32_t u = 0; + for (int k = 0; k < 4; ++k) { + const int8_t * signs = keven_signs_q2xs + 8*((q2[2*ib+1] >> 7*k) & 127); + const float * xk = xb + 8*k; + const float * wk = weight + 8*k; + const uint8_t * grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); + float best_mse = 0; int best_index = aux8[k]; + for (int j = 0; j < 8; ++j) { + float diff = db * grid[j] * signs[j] - xk[j]; + best_mse += wk[j] * diff * diff; + } + for (int idx = 0; idx < 256; ++idx) { + grid = (const uint8_t *)(kgrid_q2xs + idx); + float mse = 0; + for (int j = 0; j < 8; ++j) { + float diff = db * grid[j] * signs[j] - xk[j]; + mse += wk[j] * diff * diff; + } + if (mse < best_mse) { + best_mse = mse; best_index = idx; + } + } + u |= (best_index << 8*k); + grid = (const uint8_t *)(kgrid_q2xs + best_index); + //grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); + for (int j = 0; j < 8; ++j) { + float q = db * grid[j] * signs[j]; + sumqx += wk[j] * q * xk[j]; + sumq2 += wk[j] * q * q; + } + } + q2[2*ib] = u; + if (sumq2 > 0) y[ibl].d = GGML_FP32_TO_FP16(d*sumqx/sumq2); + } + memcpy(y[ibl].qs, q2, QK_K/4); + } +} + +static void quantize_row_iq2_xs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(512); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights); + GGML_ASSERT(kmap_q2xs); + GGML_ASSERT(kgrid_q2xs); + GGML_ASSERT(kneighbors_q2xs); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 3; + + const int nbl = n/256; + + block_iq2_xs * y = vy; + + float scales[QK_K/16]; + float weight[16]; + float xval[16]; + int8_t L[16]; + int8_t Laux[16]; + float waux[16]; + bool is_on_grid[2]; + bool is_on_grid_aux[2]; + uint8_t block_signs[2]; + uint16_t q2[2*(QK_K/16)]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(q2, 0, QK_K/4); + memset(y[ibl].scales, 0, QK_K/32); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/16; ++ib) { + const float * xb = xbl + 16*ib; + const float * qw = quant_weights + QK_K*ibl + 16*ib; + for (int i = 0; i < 16; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + for (int i = 0; i < 16; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < 2; ++k) { + int nflip = 0; + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; ++nflip; s |= (1 << i); + } + } + if (nflip%2) { + int imin = 0; float min = weight[8*k+imin]*xb[8*k+imin]*xb[8*k+imin]; + for (int i = 1; i < 8; ++i) { + float ax = weight[8*k+i]*xb[8*k+i]*xb[8*k+i]; + if (ax < min) { + min = ax; imin = i; + } + } + xval[8*k+imin] = -xval[8*k+imin]; + s ^= (1 << imin); + } + block_signs[k] = s & 127; + } + float max = xval[0]; + for (int i = 1; i < 16; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + memset(L, 0, 16); + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + is_on_grid[0] = is_on_grid[1] = true; + for (int is = -9; is <= 9; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/max; + float this_scale = 1/id; + for (int k = 0; k < 2; ++k) { + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + Laux[8*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < 16; ++i) L[i] = Laux[i]; + for (int k = 0; k < 2; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < 2; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < 2; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 8; ++i) { + int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 2*i); + L[8*k + i] = l; + } + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, scale, L + 8*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 16; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + scale = -scale; + for (int k = 0; k < 2; ++k) block_signs[k] = (~block_signs[k]) & 127; + } + for (int k = 0; k < 2; ++k) { + uint16_t u = 0; + for (int i = 0; i < 8; ++i) u |= (L[8*k+i] << 2*i); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 8; ++i) printf(" %d", L[8*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + q2[2*ib+k] = grid_index | (block_signs[k] << 9); + } + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/4); + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + for (int ib = 0; ib < QK_K/16; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(15, l)); + if (ib%2 == 0) y[ibl].scales[ib/2] = l; + else y[ibl].scales[ib/2] |= (l << 4); + } + memcpy(y[ibl].qs, q2, QK_K/4); + + } +} + +size_t quantize_iq2_xxs(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq2_xxs_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_xxs); + } + return nrow * nblock * sizeof(block_iq2_xxs); +} + +size_t quantize_iq2_xs(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq2_xs_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq2_xs); + } + return nrow * nblock * sizeof(block_iq2_xs); +} + diff --git a/ggml-quants.h b/ggml-quants.h index df5e7ae80..e5d110230 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -196,8 +196,6 @@ void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k); void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); -void quantize_row_iq2_xxs_reference(const float * restrict x, block_iq2_xxs * restrict y, int k); -void quantize_row_iq2_xs_reference (const float * restrict x, block_iq2_xs * restrict y, int k); void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); @@ -212,8 +210,6 @@ void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); -void quantize_row_iq2_xxs(const float * restrict x, void * restrict y, int k); -void quantize_row_iq2_xs (const float * restrict x, void * restrict y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); @@ -246,3 +242,11 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict vx, const void * restrict vy); + +// +// Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") +// +size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); + diff --git a/ggml.c b/ggml.c index bcfb6652c..52467475a 100644 --- a/ggml.c +++ b/ggml.c @@ -585,8 +585,8 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .type_size = sizeof(block_iq2_xxs), .is_quantized = true, .to_float = (ggml_to_float_t) dequantize_row_iq2_xxs, - .from_float = quantize_row_iq2_xxs, - .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xxs_reference, + .from_float = NULL, + .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, @@ -596,8 +596,8 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .type_size = sizeof(block_iq2_xs), .is_quantized = true, .to_float = (ggml_to_float_t) dequantize_row_iq2_xs, - .from_float = quantize_row_iq2_xs, - .from_float_reference = (ggml_from_float_t) quantize_row_iq2_xs_reference, + .from_float = NULL, + .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, }, @@ -18665,8 +18665,11 @@ size_t ggml_quantize_q8_0(const float * src, void * dst, int n, int k, int64_t * return (n/QK8_0*sizeof(block_q8_0)); } -size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist) { +size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, + int nrows, int n_per_row, int64_t * hist, const float * imatrix) { + (void)imatrix; size_t result = 0; + int n = nrows * n_per_row; switch (type) { case GGML_TYPE_Q4_0: { @@ -18701,8 +18704,11 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i case GGML_TYPE_Q2_K: { GGML_ASSERT(start % QK_K == 0); - block_q2_K * block = (block_q2_K*)dst + start / QK_K; - result = ggml_quantize_q2_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q2_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q3_K: { @@ -18731,14 +18737,22 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i case GGML_TYPE_IQ2_XXS: { GGML_ASSERT(start % QK_K == 0); - block_iq2_xxs * block = (block_iq2_xxs*)dst + start / QK_K; - result = ggml_quantize_iq2_xxs(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + GGML_ASSERT(imatrix); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq2_xxs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_IQ2_XS: { GGML_ASSERT(start % QK_K == 0); - block_iq2_xs * block = (block_iq2_xs*)dst + start / QK_K; - result = ggml_quantize_iq2_xs(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + GGML_ASSERT(imatrix); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq2_xs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_F16: { diff --git a/ggml.h b/ggml.h index b18ba7812..1187074f7 100644 --- a/ggml.h +++ b/ggml.h @@ -2067,10 +2067,13 @@ extern "C" { GGML_API size_t ggml_quantize_q4_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q5_K(const float * src, void * dst, int n, int k, int64_t * hist); GGML_API size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * hist); - GGML_API size_t ggml_quantize_iq2_xxs(const float * src, void * dst, int n, int k, int64_t * hist); - GGML_API size_t ggml_quantize_iq2_xs (const float * src, void * dst, int n, int k, int64_t * hist); - GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, int n, int64_t * hist); + GGML_API size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, + int start, int nrows, int n_per_row, int64_t * hist, const float * imatrix); + + // These are needed for IQ2_XS and IQ2_XXS quantizations + GGML_API void ggml_init_iq2_quantization(enum ggml_type type); + GGML_API void ggml_deinit_iq2_quantization(enum ggml_type type); // // Importance matrix diff --git a/llama.cpp b/llama.cpp index 8e20e72a2..107b05114 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8429,9 +8429,23 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + new_type = GGML_TYPE_Q5_K; + } else if (new_type != GGML_TYPE_Q8_0) { new_type = GGML_TYPE_Q6_K; } + } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + if (name.find("attn_v.weight") != std::string::npos) { + if (qs.model.hparams.n_gqa() >= 4 || qs.model.hparams.n_expert >= 4) new_type = GGML_TYPE_Q4_K; + else new_type = GGML_TYPE_Q2_K; + ++qs.i_attention_wv; + } + else if (name.find("ffn_down") != std::string::npos) { + if (qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) new_type = GGML_TYPE_Q2_K; + ++qs.i_feed_forward_w2; + } + else if (name == "token_embd.weight") new_type = GGML_TYPE_Q2_K; } else if (name.find("attn_v.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { @@ -8601,6 +8615,13 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (params->only_copy) { ftype = model.ftype; } + const std::unordered_map> * imatrix_data = nullptr; + if (params->imatrix) { + imatrix_data = static_cast>*>(params->imatrix); + if (imatrix_data) { + printf("================================ Have weights data with %d entries\n",int(imatrix_data->size())); + } + } const size_t align = GGUF_DEFAULT_ALIGNMENT; struct gguf_context * ctx_out = gguf_init_empty(); @@ -8658,6 +8679,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // placeholder for the meta data ::zeros(fout, meta_size); + std::set used_iq2; + for (int i = 0; i < ml.n_tensors; ++i) { struct ggml_tensor * tensor = ml.get_tensor_meta(i); @@ -8710,6 +8733,35 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } else { const size_t nelements = ggml_nelements(tensor); + if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS) && used_iq2.find(new_type) == used_iq2.end()) { + ggml_init_iq2_quantization(new_type); + used_iq2.insert(new_type); + } + + const float * imatrix = nullptr; + if (imatrix_data) { + auto it = imatrix_data->find(tensor->name); + if (it == imatrix_data->end()) { + printf("\n====== %s: did not find weights for %s\n", __func__, tensor->name); + } else { + if (it->second.size() == (size_t)tensor->ne[0]) { + imatrix = it->second.data(); + } else { + printf("\n====== %s: imatrix size %d is different from tensor size %d for %s\n", __func__, + int(it->second.size()), int(tensor->ne[0]), tensor->name); + } + } + } + if ((new_type == GGML_TYPE_IQ2_XXS || + new_type == GGML_TYPE_IQ2_XS || + (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { + fprintf(stderr, "\n\n============================================================\n"); + fprintf(stderr, "Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); + fprintf(stderr, "The result will be garbage, so bailing out\n"); + fprintf(stderr, "============================================================\n\n"); + throw std::runtime_error(format("Missing importance matrix for tensor %s in a very low-bit quantization", tensor->name)); + } + float * f32_data; if (tensor->type == GGML_TYPE_F32) { @@ -8730,21 +8782,28 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s new_data = work.data(); std::array hist_cur = {}; - static const int chunk_size = 32 * 512; + const int n_per_row = tensor->ne[0]; + const int nrows = nelements / n_per_row; + + static const int min_chunk_size = 32 * 512; + const int chunk_size = n_per_row >= min_chunk_size ? n_per_row : n_per_row * ((min_chunk_size + n_per_row - 1)/n_per_row); + const int nchunk = (nelements + chunk_size - 1)/chunk_size; const int nthread_use = nthread > 1 ? std::max(1, std::min(nthread, nchunk)) : 1; if (nthread_use < 2) { - new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nelements, hist_cur.data()); + new_size = ggml_quantize_chunk(new_type, f32_data, new_data, 0, nrows, n_per_row, hist_cur.data(), imatrix); } else { - size_t counter = 0; + int counter = 0; new_size = 0; - auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, nelements]() { + auto compute = [&mutex, &counter, &hist_cur, &new_size, new_type, f32_data, new_data, chunk_size, + nrows, n_per_row, imatrix]() { std::array local_hist = {}; + const int nrows_per_chunk = chunk_size / n_per_row; size_t local_size = 0; while (true) { std::unique_lock lock(mutex); - size_t first = counter; counter += chunk_size; - if (first >= nelements) { + int first_row = counter; counter += nrows_per_chunk; + if (first_row >= nrows) { if (local_size > 0) { for (int j=0; j %8.2f MiB | hist: ", ggml_nbytes(tensor)/1024.0/1024.0, new_size/1024.0/1024.0); + LLAMA_LOG_INFO("size = %8.2f MiB -> %8.2f MiB", ggml_nbytes(tensor)/1024.0/1024.0, new_size/1024.0/1024.0); int64_t tot_count = 0; for (size_t i = 0; i < hist_cur.size(); i++) { hist_all[i] += hist_cur[i]; @@ -8774,6 +8834,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } if (tot_count > 0) { + LLAMA_LOG_INFO(" | hist: "); for (size_t i = 0; i < hist_cur.size(); i++) { LLAMA_LOG_INFO("%5.3f ", hist_cur[i] / float(nelements)); } @@ -8802,6 +8863,10 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s fout.close(); + for (auto type : used_iq2) { + ggml_deinit_iq2_quantization(type); + } + gguf_free(ctx_out); LLAMA_LOG_INFO("%s: model size = %8.2f MB\n", __func__, total_size_org/1024.0/1024.0); @@ -9166,6 +9231,7 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { /*.quantize_output_tensor =*/ true, /*.only_copy =*/ false, /*.pure =*/ false, + /*.imatrix =*/ nullptr, }; return result; diff --git a/llama.h b/llama.h index 01d6fafaa..79c8335b6 100644 --- a/llama.h +++ b/llama.h @@ -249,6 +249,7 @@ extern "C" { bool quantize_output_tensor; // quantize output.weight bool only_copy; // only copy tensors - ftype, allow_requantize and quantize_output_tensor are ignored bool pure; // disable k-quant mixtures and quantize all tensors to the same type + void * imatrix; // pointer to importance matrix data } llama_model_quantize_params; // grammar types diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index d9b8b106a..22a7856d4 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -56,7 +56,7 @@ static void init_tensor_uniform(ggml_tensor * tensor, float min = -1.0f, float m GGML_ASSERT(size % ggml_blck_size(tensor->type) == 0); std::vector dataq(ggml_row_size(tensor->type, size)); int64_t hist[16]; - ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size, hist); + ggml_quantize_chunk(tensor->type, data.data(), dataq.data(), 0, size/tensor->ne[0], tensor->ne[0], hist, nullptr); ggml_backend_tensor_set(tensor, dataq.data(), 0, dataq.size()); } else if (tensor->type == GGML_TYPE_I8 || tensor->type == GGML_TYPE_I16 || tensor->type == GGML_TYPE_I32) { // This is going to create some weird integers though. From ac32902a87147f78d63c931aa8a23dee762660e7 Mon Sep 17 00:00:00 2001 From: Karthik Kumar Viswanathan <195178+guilt@users.noreply.github.com> Date: Sun, 14 Jan 2024 00:41:44 -0800 Subject: [PATCH 466/859] llama : support WinXP build with MinGW 8.1.0 (#3419) --- CMakeLists.txt | 8 ++++++-- llama.cpp | 4 ++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 668669c6d..2741568ed 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.13) # for add_link_options +cmake_minimum_required(VERSION 3.14) # for add_link_options and implicit target directories. project("llama.cpp" C CXX) set(CMAKE_EXPORT_COMPILE_COMMANDS ON) @@ -76,6 +76,10 @@ if (NOT MSVC) option(LLAMA_F16C "llama: enable F16C" ${INS_ENB}) endif() +if (WIN32) + option(LLAMA_WIN_VER "llama: Windows Version" 0x602) +endif() + # 3rd party libs option(LLAMA_ACCELERATE "llama: enable Accelerate framework" ON) option(LLAMA_BLAS "llama: use BLAS" OFF) @@ -686,7 +690,7 @@ endif() if (MINGW) # Target Windows 8 for PrefetchVirtualMemory - add_compile_definitions(_WIN32_WINNT=0x602) + add_compile_definitions(_WIN32_WINNT=${LLAMA_WIN_VER}) endif() # diff --git a/llama.cpp b/llama.cpp index 107b05114..51e9bdaed 100644 --- a/llama.cpp +++ b/llama.cpp @@ -987,6 +987,7 @@ struct llama_mmap { } if (prefetch > 0) { +#if _WIN32_WINNT >= 0x602 // PrefetchVirtualMemory is only present on Windows 8 and above, so we dynamically load it BOOL (WINAPI *pPrefetchVirtualMemory) (HANDLE, ULONG_PTR, PWIN32_MEMORY_RANGE_ENTRY, ULONG); HMODULE hKernel32 = GetModuleHandleW(L"kernel32.dll"); @@ -1004,6 +1005,9 @@ struct llama_mmap { llama_format_win_err(GetLastError()).c_str()); } } +#else + throw std::runtime_error("PrefetchVirtualMemory unavailable"); +#endif } } From 5f5fe1bd608fa2ed42af97b5f2ea31be6625fc48 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Sun, 14 Jan 2024 09:44:39 +0100 Subject: [PATCH 467/859] metal : correctly set SIMD support flags on iOS (#4923) * Correctly set support_simdgroup_reduction and support_simdgroup_mm on iPhone/iPad * log a little bit more info on iOS --- ggml-metal.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ggml-metal.m b/ggml-metal.m index cae52c983..2ca726055 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -330,7 +330,6 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { } } -#if TARGET_OS_OSX // print MTL GPU family: GGML_METAL_LOG_INFO("%s: GPU name: %s\n", __func__, [[ctx->device name] UTF8String]); @@ -370,6 +369,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); +#if TARGET_OS_OSX GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); if (ctx->device.maxTransferRate != 0) { GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MB/s\n", __func__, ctx->device.maxTransferRate / 1e6); From a128c38de862431f1aae9ccc40b792fbc1b8b682 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 10:53:39 +0200 Subject: [PATCH 468/859] Fix ffn_down quantization mix for MoE models (#4927) * Fix ffn_down quantization mix for MoE models In #4872 I did not consider the part where every third tensor is quantized with more bits. Fir MoE this leads to tensors of the same layer being quantized with different number of bits, which is not considered as a possibility in the inference implementation (it is assumed all experts use the same quantization). * Fix the fix * Review suggestion --------- Co-authored-by: Iwan Kawrakow --- llama.cpp | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index 51e9bdaed..b1d6015e2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8480,13 +8480,31 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = GGML_TYPE_Q8_0; } } else if (name.find("ffn_down") != std::string::npos) { + const int n_expert = std::max(1, (int)qs.model.hparams.n_expert); + int i_layer, n_layer; + if (n_expert == 1) { + i_layer = qs.i_feed_forward_w2; + n_layer = qs.n_feed_forward_w2; + } else { + // Believe it or not, "experts" in the FFN of Mixtral-8x7B are not consecutive, but iccasionally randomly + // sprinkled in the model. Hence, simply dividing i_feed_forward_w2 by n_expert does not work + // for getting the current layer as I initially thought, and we need to resort to parsing the + // tensor name. + n_layer = qs.n_feed_forward_w2 / n_expert; + if (sscanf(name.c_str(), "blk.%d.ffn_down", &i_layer) != 1) { + throw std::runtime_error(format("Failed to determine layer for tensor %s", name.c_str())); + } + if (i_layer < 0 || i_layer >= n_layer) { + throw std::runtime_error(format("Bad layer %d for tensor %s. Must be in [0, %d)", i_layer, name.c_str(), n_layer)); + } + } if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) { - if (qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) new_type = GGML_TYPE_Q4_K; + if (i_layer < n_layer/8) new_type = GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { - new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q5_K - : arch != LLM_ARCH_FALCON || use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q4_K + new_type = i_layer < n_layer/16 ? GGML_TYPE_Q5_K + : arch != LLM_ARCH_FALCON || use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { @@ -8494,14 +8512,14 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { if (arch == LLM_ARCH_FALCON) { - new_type = qs.i_feed_forward_w2 < qs.n_feed_forward_w2/16 ? GGML_TYPE_Q6_K : - use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; + new_type = i_layer < n_layer/16 ? GGML_TYPE_Q6_K : + use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else { - if (use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; + if (use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; } } - else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(qs.i_feed_forward_w2, qs.n_feed_forward_w2)) new_type = GGML_TYPE_Q6_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && qs.i_feed_forward_w2 < qs.n_feed_forward_w2/8) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && i_layer < n_layer/8) { new_type = GGML_TYPE_Q5_K; } ++qs.i_feed_forward_w2; From 03c526749041c863b0cd842b26b8907e1ea0e0b1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 11:03:19 +0200 Subject: [PATCH 469/859] llama : use LLAMA_LOG_ macros for logging --- llama.cpp | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/llama.cpp b/llama.cpp index b1d6015e2..51821965e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1114,7 +1114,7 @@ struct llama_mlock { suggest = false; } - fprintf(stderr, "warning: failed to mlock %zu-byte buffer (after previously locking %zu bytes): %s\n%s", + LLAMA_LOG_WARN("warning: failed to mlock %zu-byte buffer (after previously locking %zu bytes): %s\n%s", size, this->size, errmsg, suggest ? MLOCK_SUGGESTION : ""); return false; } @@ -1123,7 +1123,7 @@ struct llama_mlock { static void raw_unlock(void * addr, size_t size) { if (munlock(addr, size)) { - fprintf(stderr, "warning: failed to munlock buffer: %s\n", std::strerror(errno)); + LLAMA_LOG_WARN("warning: failed to munlock buffer: %s\n", std::strerror(errno)); } } #elif defined(_WIN32) @@ -1141,7 +1141,7 @@ struct llama_mlock { return true; } if (tries == 2) { - fprintf(stderr, "warning: failed to VirtualLock %zu-byte buffer (after previously locking %zu bytes): %s\n", + LLAMA_LOG_WARN("warning: failed to VirtualLock %zu-byte buffer (after previously locking %zu bytes): %s\n", len, size, llama_format_win_err(GetLastError()).c_str()); return false; } @@ -1150,7 +1150,7 @@ struct llama_mlock { // set size and try again. SIZE_T min_ws_size, max_ws_size; if (!GetProcessWorkingSetSize(GetCurrentProcess(), &min_ws_size, &max_ws_size)) { - fprintf(stderr, "warning: GetProcessWorkingSetSize failed: %s\n", + LLAMA_LOG_WARN("warning: GetProcessWorkingSetSize failed: %s\n", llama_format_win_err(GetLastError()).c_str()); return false; } @@ -1163,7 +1163,7 @@ struct llama_mlock { min_ws_size += increment; max_ws_size += increment; if (!SetProcessWorkingSetSize(GetCurrentProcess(), min_ws_size, max_ws_size)) { - fprintf(stderr, "warning: SetProcessWorkingSetSize failed: %s\n", + LLAMA_LOG_WARN("warning: SetProcessWorkingSetSize failed: %s\n", llama_format_win_err(GetLastError()).c_str()); return false; } @@ -1172,7 +1172,7 @@ struct llama_mlock { static void raw_unlock(void * ptr, size_t len) { if (!VirtualUnlock(ptr, len)) { - fprintf(stderr, "warning: failed to VirtualUnlock buffer: %s\n", + LLAMA_LOG_WARN("warning: failed to VirtualUnlock buffer: %s\n", llama_format_win_err(GetLastError()).c_str()); } } @@ -1184,7 +1184,7 @@ struct llama_mlock { } bool raw_lock(const void * addr, size_t len) const { - fprintf(stderr, "warning: mlock not supported on this system\n"); + LLAMA_LOG_WARN("warning: mlock not supported on this system\n"); return false; } @@ -2085,13 +2085,13 @@ namespace GGUFMeta { __func__, override_type_to_str(override->tag), override->key); switch (override->tag) { case LLAMA_KV_OVERRIDE_BOOL: { - printf("%s\n", override->bool_value ? "true" : "false"); + LLAMA_LOG_INFO("%s\n", override->bool_value ? "true" : "false"); } break; case LLAMA_KV_OVERRIDE_INT: { - printf("%" PRId64 "\n", override->int_value); + LLAMA_LOG_INFO("%" PRId64 "\n", override->int_value); } break; case LLAMA_KV_OVERRIDE_FLOAT: { - printf("%.6f\n", override->float_value); + LLAMA_LOG_INFO("%.6f\n", override->float_value); } break; default: // Shouldn't be possible to end up here, but just in case... @@ -6993,7 +6993,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< if (match + special_token.length() > raw_text_base_offset + raw_text_base_length) break; #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "FF: (%ld %ld %ld) '%s'\n", raw_text->length(), raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); + LLAMA_LOG_WARN("FF: (%ld %ld %ld) '%s'\n", raw_text->length(), raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); #endif auto source = std::distance(buffer.begin(), it); @@ -7006,7 +7006,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< buffer.emplace_after(it, (*raw_text), left_reminder_offset, left_reminder_length); #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "FL: (%ld %ld) '%s'\n", left_reminder_offset, left_reminder_length, raw_text->substr(left_reminder_offset, left_reminder_length).c_str()); + LLAMA_LOG_WARN("FL: (%ld %ld) '%s'\n", left_reminder_offset, left_reminder_length, raw_text->substr(left_reminder_offset, left_reminder_length).c_str()); #endif it++; } @@ -7022,7 +7022,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< buffer.emplace_after(it, (*raw_text), right_reminder_offset, right_reminder_length); #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "FR: (%ld %ld) '%s'\n", right_reminder_offset, right_reminder_length, raw_text->substr(right_reminder_offset, right_reminder_length).c_str()); + LLAMA_LOG_WARN("FR: (%ld %ld) '%s'\n", right_reminder_offset, right_reminder_length, raw_text->substr(right_reminder_offset, right_reminder_length).c_str()); #endif it++; @@ -7038,7 +7038,7 @@ static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list< raw_text_base_length = right_reminder_length; #ifdef PRETOKENIZERDEBUG - fprintf(stderr, "RR: (%ld %ld) '%s'\n", raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); + LLAMA_LOG_WARN("RR: (%ld %ld) '%s'\n", raw_text_base_offset, raw_text_base_length, raw_text->substr(raw_text_base_offset, raw_text_base_length).c_str()); #endif } else { if (source == 0) { @@ -7095,7 +7095,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } #ifdef PRETOKENIZERDEBUG - fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_spm tokenizer(vocab); llama_escape_whitespace(raw_text); @@ -7116,7 +7116,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); #ifdef PRETOKENIZERDEBUG - fprintf(stderr,"TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_bpe tokenizer(vocab); tokenizer.tokenize(raw_text, output); @@ -8641,7 +8641,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (params->imatrix) { imatrix_data = static_cast>*>(params->imatrix); if (imatrix_data) { - printf("================================ Have weights data with %d entries\n",int(imatrix_data->size())); + LLAMA_LOG_INFO("================================ Have weights data with %d entries\n",int(imatrix_data->size())); } } @@ -8764,12 +8764,12 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if (imatrix_data) { auto it = imatrix_data->find(tensor->name); if (it == imatrix_data->end()) { - printf("\n====== %s: did not find weights for %s\n", __func__, tensor->name); + LLAMA_LOG_INFO("\n====== %s: did not find weights for %s\n", __func__, tensor->name); } else { if (it->second.size() == (size_t)tensor->ne[0]) { imatrix = it->second.data(); } else { - printf("\n====== %s: imatrix size %d is different from tensor size %d for %s\n", __func__, + LLAMA_LOG_INFO("\n====== %s: imatrix size %d is different from tensor size %d for %s\n", __func__, int(it->second.size()), int(tensor->ne[0]), tensor->name); } } @@ -8777,10 +8777,10 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS || (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { - fprintf(stderr, "\n\n============================================================\n"); - fprintf(stderr, "Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); - fprintf(stderr, "The result will be garbage, so bailing out\n"); - fprintf(stderr, "============================================================\n\n"); + LLAMA_LOG_ERROR("\n\n============================================================\n"); + LLAMA_LOG_ERROR("Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); + LLAMA_LOG_ERROR("The result will be garbage, so bailing out\n"); + LLAMA_LOG_ERROR("============================================================\n\n"); throw std::runtime_error(format("Missing importance matrix for tensor %s in a very low-bit quantization", tensor->name)); } From 9408cfdad6b1c090a7e1419d4434edc260b7e47e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 11:08:09 +0200 Subject: [PATCH 470/859] scripts : sync-ggml-am.sh option to skip commits --- scripts/sync-ggml-am.sh | 14 +++++++++++++- scripts/sync-ggml.last | 2 +- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 248cf1023..6b2514a11 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -5,7 +5,7 @@ # Usage: # # $ cd /path/to/llama.cpp -# $ ./scripts/sync-ggml-am.sh +# $ ./scripts/sync-ggml-am.sh -skip hash0,hash1,hash2... # set -e @@ -24,6 +24,11 @@ fi lc=$(cat $SRC_LLAMA/scripts/sync-ggml.last) echo "Syncing ggml changes since commit $lc" +to_skip="" +if [ "$1" == "-skip" ]; then + to_skip=$2 +fi + cd $SRC_GGML git log --oneline $lc..HEAD @@ -40,6 +45,13 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then fi while read c; do + if [ -n "$to_skip" ]; then + if [[ $to_skip == *"$c"* ]]; then + echo "Skipping $c" + continue + fi + fi + git format-patch -k $c~1..$c --stdout -- \ include/ggml/ggml*.h \ src/ggml*.h \ diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 753d227a7..be9e408fb 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -1890780da4ea10db88736fcde85f285abf6c64b0 +b306d6e996ec0ace77118fa5098822cdc7f9c88f From bb0c1392479398f9aba86d9ec98db0b95ede6e6d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 14 Jan 2024 13:26:53 +0200 Subject: [PATCH 471/859] llama : check LLAMA_TRACE env for extra logging (#4929) * llama : minor fix indent * llama : check LLAMA_TRACE env for extra logging ggml-ci --- llama.cpp | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/llama.cpp b/llama.cpp index 51821965e..63f37ecdb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2190,6 +2190,11 @@ struct llama_model_loader { LLM_KV llm_kv = LLM_KV(LLM_ARCH_UNKNOWN); llama_model_loader(const std::string & fname, bool use_mmap, const struct llama_model_kv_override * param_overrides_p) : file(fname.c_str(), "rb") { + int trace = 0; + if (getenv("LLAMA_TRACE")) { + trace = atoi(getenv("LLAMA_TRACE")); + } + struct gguf_init_params params = { /*.no_alloc = */ true, /*.ctx = */ &ctx_meta, @@ -2242,11 +2247,10 @@ struct llama_model_loader { type_max = type; } - // TODO: make runtime configurable -#if 0 - struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); - LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, ggml_get_name(meta), ggml_type_name(type), llama_format_tensor_shape(meta).c_str()); -#endif + if (trace > 0) { + struct ggml_tensor * meta = ggml_get_tensor(ctx_meta, gguf_get_tensor_name(ctx_gguf, i)); + LLAMA_LOG_INFO("%s: - tensor %4d: %32s %-8s [ %s ]\n", __func__, i, ggml_get_name(meta), ggml_type_name(type), llama_format_tensor_shape(meta).c_str()); + } } switch (type_max) { @@ -6451,15 +6455,15 @@ static uint8_t llama_token_to_byte(const llama_vocab& vocab, llama_token id) { static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { static const char * hex = "0123456789ABCDEF"; switch (llama_vocab_get_type(vocab)) { - case LLAMA_VOCAB_TYPE_SPM: { - const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; - return vocab.token_to_id.at(buf); - } - case LLAMA_VOCAB_TYPE_BPE: { - return vocab.token_to_id.at(bytes_to_unicode_bpe(ch)); - } - default: - GGML_ASSERT(false); + case LLAMA_VOCAB_TYPE_SPM: { + const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; + return vocab.token_to_id.at(buf); + } + case LLAMA_VOCAB_TYPE_BPE: { + return vocab.token_to_id.at(bytes_to_unicode_bpe(ch)); + } + default: + GGML_ASSERT(false); } } From 467a882fd2e5b6172897b49aa45aa29bd3f27685 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 14 Jan 2024 16:21:12 +0200 Subject: [PATCH 472/859] Add ability to use importance matrix for all k-quants (#4930) Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 2 +- ggml-quants.c | 443 ++++++++++++++++++++++++++++++++- ggml-quants.h | 5 +- ggml.c | 28 ++- 4 files changed, 462 insertions(+), 16 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index f4e2175f1..2ae046933 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -82,7 +82,7 @@ static void usage(const char * executable) { printf(" --allow-requantize: Allows requantizing tensors that have already been quantized. Warning: This can severely reduce quality compared to quantizing from 16bit or 32bit\n"); printf(" --leave-output-tensor: Will leave output.weight un(re)quantized. Increases model size but may also increase quality, especially when requantizing\n"); printf(" --pure: Disable k-quant mixtures and quantize all tensors to the same type\n"); - printf(" --imatrixfile_name: use data in file_name as importance matrix for quant optimizations\n"); + printf(" --imatrix file_name: use data in file_name as importance matrix for quant optimizations\n"); printf(" --include-weights tensor_name: use importance matrix for this/these tensor(s)\n"); printf(" --exclude-weights tensor_name: use importance matrix for this/these tensor(s)\n"); printf("Note: --include-weights and --exclude-weights cannot be used together\n"); diff --git a/ggml-quants.c b/ggml-quants.c index 9290d54cf..0750fe1bb 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -1244,7 +1244,8 @@ static inline int nearest_int(float fval) { return (i & 0x007fffff) - 0x00400000; } -static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * restrict L, int rmse_type) { +static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * restrict L, int rmse_type, + const float * restrict qw) { float max = 0; float amax = 0; for (int i = 0; i < n; ++i) { @@ -1270,14 +1271,13 @@ static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * rmse_type = -rmse_type; return_early = true; } - int weight_type = rmse_type%2; float sumlx = 0; float suml2 = 0; for (int i = 0; i < n; ++i) { int l = nearest_int(iscale * x[i]); l = MAX(-nmax, MIN(nmax-1, l)); L[i] = l + nmax; - float w = weight_type == 1 ? x[i] * x[i] : 1; + float w = qw ? qw[i] : rmse_type == 1 ? x[i] * x[i] : rmse_type == 2 ? 1 : rmse_type == 3 ? fabsf(x[i]) : sqrtf(fabsf(x[i])); sumlx += w*x[i]*l; suml2 += w*l*l; } @@ -1293,7 +1293,7 @@ static float make_qx_quants(int n, int nmax, const float * restrict x, int8_t * for (int i = 0; i < n; ++i) { int l = nearest_int(iscale * x[i]); l = MAX(-nmax, MIN(nmax-1, l)); - float w = weight_type == 1 ? x[i] * x[i] : 1; + float w = qw ? qw[i] : rmse_type == 1 ? x[i] * x[i] : rmse_type == 2 ? 1 : rmse_type == 3 ? fabsf(x[i]) : sqrtf(fabsf(x[i])); sumlx += w*x[i]*l; suml2 += w*l*l; } @@ -2089,6 +2089,112 @@ size_t ggml_quantize_q3_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q3_K)); } +static void quantize_row_q3_K_impl(const float * restrict x, block_q3_K * restrict y, int n_per_row, const float * restrict quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q3_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + int8_t L[QK_K]; + float scales[QK_K / 16]; + float weight[16]; + float sw[QK_K / 16]; + int8_t Ls[QK_K / 16]; + + for (int i = 0; i < nb; i++) { + + float sumx2 = 0; + for (int j = 0; j < QK_K; ++j) sumx2 += x[j]*x[j]; + float sigma2 = 2*sumx2/QK_K; + + for (int j = 0; j < QK_K/16; ++j) { + if (quant_weights) { + const float * qw = quant_weights ? quant_weights + QK_K * i + 16*j : NULL; + for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j+l]*x[16*j+l]); + } else { + for (int l = 0; l < 16; ++l) weight[l] = x[16*j+l]*x[16*j+l]; + } + float sumw = 0; + for (int l = 0; l < 16; ++l) sumw += weight[l]; + sw[j] = sumw; + + scales[j] = make_qx_quants(16, 4, x + 16*j, L + 16*j, 1, weight); + + } + + memset(y[i].scales, 0, 12); + + float d_block = make_qx_quants(QK_K/16, 32, scales, Ls, 1, sw); + for (int j = 0; j < QK_K/16; ++j) { + int l = Ls[j]; + if (j < 8) { + y[i].scales[j] = l & 0xF; + } else { + y[i].scales[j-8] |= ((l & 0xF) << 4); + } + l >>= 4; + y[i].scales[j%4 + 8] |= (l << (2*(j/4))); + } + y[i].d = GGML_FP32_TO_FP16(d_block); + + int8_t sc; + for (int j = 0; j < QK_K/16; ++j) { + sc = j < 8 ? y[i].scales[j] & 0xF : y[i].scales[j-8] >> 4; + sc = (sc | (((y[i].scales[8 + j%4] >> (2*(j/4))) & 3) << 4)) - 32; + float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-4, MIN(3, l)); + L[16*j + ii] = l + 4; + } + } + + memset(y[i].hmask, 0, QK_K/8); + // We put the high-bit for the 1st 8 quants into bit 0, the next 8 into bit 1, etc. + int m = 0; + uint8_t hm = 1; + for (int j = 0; j < QK_K; ++j) { + if (L[j] > 3) { + y[i].hmask[m] |= hm; + L[j] -= 4; + } + if (++m == QK_K/8) { + m = 0; hm <<= 1; + } + } + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + y[i].qs[j/4 + l] = L[j + l] | (L[j + l + 32] << 2) | (L[j + l + 64] << 4) | (L[j + l + 96] << 6); + } + } + + x += QK_K; + } +#endif +} + +size_t quantize_q3_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q3_K, n_per_row); + if (!quant_weights) { + quantize_row_q3_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q3_K_impl(src, (block_q3_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== 4-bit (de)-quantization void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict y, int k) { @@ -2254,6 +2360,108 @@ size_t ggml_quantize_q4_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q4_K)); } +static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restrict y, int n_per_row, const float * quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q4_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + uint8_t L[QK_K]; + uint8_t Laux[32]; + float weights[32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + + for (int i = 0; i < nb; i++) { + + float sum_x2 = 0; + for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; + float sigma2 = sum_x2/QK_K; + float av_x = sqrtf(sigma2); + + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/32; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 32*j; + for (int l = 0; l < 32; ++l) weights[l] = qw[l] * sqrtf(sigma2 + x[32*j + l]*x[32*j + l]); + } else { + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + } + scales[j] = make_qkx3_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + //scales[j] = make_qkx2_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -1.f, 0.1f, 20, false); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; + float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = nearest_int(inv_scale*scales[j]); + uint8_t lm = nearest_int(inv_min*mins[j]); + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(15, l)); + L[32*j + ii] = l; + } + } + uint8_t * q = y[i].qs; + for (int j = 0; j < QK_K; j += 64) { + for (int l = 0; l < 32; ++l) q[l] = L[j + l] | (L[j + l + 32] << 4); + q += 32; + } + + x += QK_K; + + } +#endif +} + +size_t quantize_q4_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q4_K, n_per_row); + if (!quant_weights) { + quantize_row_q4_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q4_K_impl(src, (block_q4_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== 5-bit (de)-quantization void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k) { @@ -2349,7 +2557,7 @@ void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict #else float max_scale = 0, amax = 0; for (int j = 0; j < QK_K/16; ++j) { - scales[j] = make_qx_quants(16, 16, x + 16*j, L + 16*j, 1); + scales[j] = make_qx_quants(16, 16, x + 16*j, L + 16*j, 1, NULL); float abs_scale = fabsf(scales[j]); if (abs_scale > amax) { amax = abs_scale; @@ -2460,6 +2668,123 @@ size_t ggml_quantize_q5_K(const float * restrict src, void * restrict dst, int n return (n/QK_K*sizeof(block_q5_K)); } +static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restrict y, int n_per_row, const float * quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q5_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + uint8_t L[QK_K]; + float mins[QK_K/32]; + float scales[QK_K/32]; + float weights[32]; + uint8_t Laux[32]; + + for (int i = 0; i < nb; i++) { + + float sum_x2 = 0; + for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; + float sigma2 = sum_x2/QK_K; + float av_x = sqrtf(sigma2); + + float max_scale = 0; // as we are deducting the min, scales are always positive + float max_min = 0; + for (int j = 0; j < QK_K/32; ++j) { + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 32*j; + for (int l = 0; l < 32; ++l) weights[l] = qw[l] * sqrtf(sigma2 + x[32*j + l]*x[32*j + l]); + } else { + for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); + } + scales[j] = make_qkx3_quants(32, 31, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + float scale = scales[j]; + if (scale > max_scale) { + max_scale = scale; + } + float min = mins[j]; + if (min > max_min) { + max_min = min; + } + } + + float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; + float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + for (int j = 0; j < QK_K/32; ++j) { + uint8_t ls = nearest_int(inv_scale*scales[j]); + uint8_t lm = nearest_int(inv_min*mins[j]); + ls = MIN(63, ls); + lm = MIN(63, lm); + if (j < 4) { + y[i].scales[j] = ls; + y[i].scales[j+4] = lm; + } else { + y[i].scales[j+4] = (ls & 0xF) | ((lm & 0xF) << 4); + y[i].scales[j-4] |= ((ls >> 4) << 6); + y[i].scales[j-0] |= ((lm >> 4) << 6); + } + } + y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); + y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + + uint8_t sc, m; + for (int j = 0; j < QK_K/32; ++j) { + get_scale_min_k4(j, y[i].scales, &sc, &m); + const float d = GGML_FP16_TO_FP32(y[i].d) * sc; + if (!d) continue; + const float dm = GGML_FP16_TO_FP32(y[i].dmin) * m; + for (int ii = 0; ii < 32; ++ii) { + int l = nearest_int((x[32*j + ii] + dm)/d); + l = MAX(0, MIN(31, l)); + L[32*j + ii] = l; + } + } + + uint8_t * restrict qh = y[i].qh; + uint8_t * restrict ql = y[i].qs; + memset(qh, 0, QK_K/8); + + uint8_t m1 = 1, m2 = 2; + for (int n = 0; n < QK_K; n += 64) { + for (int j = 0; j < 32; ++j) { + int l1 = L[n + j]; + if (l1 > 15) { + l1 -= 16; qh[j] |= m1; + } + int l2 = L[n + j + 32]; + if (l2 > 15) { + l2 -= 16; qh[j] |= m2; + } + ql[j] = l1 | (l2 << 4); + } + m1 <<= 2; m2 <<= 2; + ql += 32; + } + + x += QK_K; + + } +#endif +} + +size_t quantize_q5_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q5_K, n_per_row); + if (!quant_weights) { + quantize_row_q5_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q5_K_impl(src, (block_q5_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== 6-bit (de)-quantization void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k) { @@ -2476,7 +2801,7 @@ void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict for (int ib = 0; ib < QK_K/16; ++ib) { - const float scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1); + const float scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, NULL); scales[ib] = scale; const float abs_scale = fabsf(scale); @@ -2608,6 +2933,112 @@ size_t ggml_quantize_q6_K(const float * src, void * dst, int n, int k, int64_t * return (n/QK_K*sizeof(block_q6_K)); } +static void quantize_row_q6_K_impl(const float * restrict x, block_q6_K * restrict y, int n_per_row, const float * quant_weights) { +#if QK_K != 256 + (void)quant_weights; + quantize_row_q6_K_reference(x, y, n_per_row); +#else + assert(n_per_row % QK_K == 0); + const int nb = n_per_row / QK_K; + + int8_t L[QK_K]; + float scales[QK_K/16]; + //float weights[16]; + + for (int i = 0; i < nb; i++) { + + //float sum_x2 = 0; + //for (int j = 0; j < QK_K; ++j) sum_x2 += x[j]*x[j]; + //float sigma2 = sum_x2/QK_K; + + float max_scale = 0; + float max_abs_scale = 0; + + for (int ib = 0; ib < QK_K/16; ++ib) { + + float scale; + if (quant_weights) { + const float * qw = quant_weights + QK_K*i + 16*ib; + //for (int j = 0; j < 16; ++j) weights[j] = qw[j] * sqrtf(sigma2 + x[16*ib + j]*x[16*ib + j]); + //scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, weights); + scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, qw); + } else { + scale = make_qx_quants(16, 32, x + 16*ib, L + 16*ib, 1, NULL); + } + scales[ib] = scale; + + const float abs_scale = fabsf(scale); + if (abs_scale > max_abs_scale) { + max_abs_scale = abs_scale; + max_scale = scale; + } + + } + + if (!max_abs_scale) { + memset(&y[i], 0, sizeof(block_q6_K)); + y[i].d = GGML_FP32_TO_FP16(0.f); + x += QK_K; + continue; + } + + float iscale = -128.f/max_scale; + y[i].d = GGML_FP32_TO_FP16(1/iscale); + for (int ib = 0; ib < QK_K/16; ++ib) { + y[i].scales[ib] = MIN(127, nearest_int(iscale*scales[ib])); + } + + for (int j = 0; j < QK_K/16; ++j) { + float d = GGML_FP16_TO_FP32(y[i].d) * y[i].scales[j]; + if (!d) { + continue; + } + for (int ii = 0; ii < 16; ++ii) { + int l = nearest_int(x[16*j + ii]/d); + l = MAX(-32, MIN(31, l)); + L[16*j + ii] = l + 32; + } + } + + uint8_t * restrict ql = y[i].ql; + uint8_t * restrict qh = y[i].qh; + for (int j = 0; j < QK_K; j += 128) { + for (int l = 0; l < 32; ++l) { + const uint8_t q1 = L[j + l + 0] & 0xF; + const uint8_t q2 = L[j + l + 32] & 0xF; + const uint8_t q3 = L[j + l + 64] & 0xF; + const uint8_t q4 = L[j + l + 96] & 0xF; + ql[l+ 0] = q1 | (q3 << 4); + ql[l+32] = q2 | (q4 << 4); + qh[l] = (L[j + l] >> 4) | ((L[j + l + 32] >> 4) << 2) | ((L[j + l + 64] >> 4) << 4) | ((L[j + l + 96] >> 4) << 6); + } + ql += 64; + qh += 32; + } + + x += QK_K; + + } +#endif +} + +size_t quantize_q6_K(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + int row_size = ggml_row_size(GGML_TYPE_Q6_K, n_per_row); + if (!quant_weights) { + quantize_row_q6_K_reference(src, dst, nrow*n_per_row); + } + else { + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_q6_K_impl(src, (block_q6_K*)qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += row_size; + } + } + return nrow * row_size; +} + // ====================== "True" 2-bit (de)-quantization static const uint64_t iq2xxs_grid[256] = { diff --git a/ggml-quants.h b/ggml-quants.h index e5d110230..99467936a 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -249,4 +249,7 @@ void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); - +size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q5_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_q6_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index 52467475a..ef5888ab2 100644 --- a/ggml.c +++ b/ggml.c @@ -18713,26 +18713,38 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i case GGML_TYPE_Q3_K: { GGML_ASSERT(start % QK_K == 0); - block_q3_K * block = (block_q3_K*)dst + start / QK_K; - result = ggml_quantize_q3_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q3_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q4_K: { GGML_ASSERT(start % QK_K == 0); - block_q4_K * block = (block_q4_K*)dst + start / QK_K; - result = ggml_quantize_q4_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q4_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q5_K: { GGML_ASSERT(start % QK_K == 0); - block_q5_K * block = (block_q5_K*)dst + start / QK_K; - result = ggml_quantize_q5_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q5_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_Q6_K: { GGML_ASSERT(start % QK_K == 0); - block_q6_K * block = (block_q6_K*)dst + start / QK_K; - result = ggml_quantize_q6_K(src + start, block, n, n, hist); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_q6_K(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); } break; case GGML_TYPE_IQ2_XXS: { From a836c8f534ab789b02da149fbdaf7735500bff74 Mon Sep 17 00:00:00 2001 From: David Pflug Date: Sun, 14 Jan 2024 10:46:00 -0500 Subject: [PATCH 473/859] llama : fix missing quotes (#4937) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 63f37ecdb..7af38718c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7099,7 +7099,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } #ifdef PRETOKENIZERDEBUG - LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN("TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_spm tokenizer(vocab); llama_escape_whitespace(raw_text); @@ -7120,7 +7120,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); #ifdef PRETOKENIZERDEBUG - LLAMA_LOG_WARN(TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); + LLAMA_LOG_WARN("TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); #endif llm_tokenizer_bpe tokenizer(vocab); tokenizer.tokenize(raw_text, output); From 4a3156de2fac9a8ee4279de7804d4e352dcfe121 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 15 Jan 2024 07:48:06 +0200 Subject: [PATCH 474/859] CUDA: faster dequantize kernels for Q4_0 and Q4_1 (#4938) Co-authored-by: Iwan Kawrakow --- ggml-cuda.cu | 77 +++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 73 insertions(+), 4 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index bd3814c72..a870718a7 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1105,6 +1105,61 @@ static __device__ __forceinline__ void dequantize_q8_0(const void * vx, const in #endif // GGML_CUDA_F16 } +template +static __global__ void dequantize_block_q4_0(const void * __restrict__ vx, dst_t * __restrict__ yy, int nb32) { + + const int i = blockIdx.x; + + // assume 32 threads + const int tid = threadIdx.x; + const int il = tid/8; + const int ir = tid%8; + const int ib = 8*i + ir; + if (ib >= nb32) { + return; + } + + dst_t * y = yy + 256*i + 32*ir + 4*il; + + const block_q4_0 * x = (const block_q4_0 *)vx + ib; + const float d = __half2float(x->d); + const float dm = -8*d; + + const uint8_t * q = x->qs + 4*il; + + for (int l = 0; l < 4; ++l) { + y[l+ 0] = d * (q[l] & 0xF) + dm; + y[l+16] = d * (q[l] >> 4) + dm; + } +} + +template +static __global__ void dequantize_block_q4_1(const void * __restrict__ vx, dst_t * __restrict__ yy, int nb32) { + + const int i = blockIdx.x; + + // assume 32 threads + const int tid = threadIdx.x; + const int il = tid/8; + const int ir = tid%8; + const int ib = 8*i + ir; + if (ib >= nb32) { + return; + } + + dst_t * y = yy + 256*i + 32*ir + 4*il; + + const block_q4_1 * x = (const block_q4_1 *)vx + ib; + const float2 d = __half22float2(x->dm); + + const uint8_t * q = x->qs + 4*il; + + for (int l = 0; l < 4; ++l) { + y[l+ 0] = d.x * (q[l] & 0xF) + d.y; + y[l+16] = d.x * (q[l] >> 4) + d.y; + } +} + //================================== k-quants template @@ -6253,6 +6308,20 @@ static void dequantize_row_q3_K_cuda(const void * vx, dst_t * y, const int k, cu #endif } +template +static void dequantize_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb32 = k / 32; + const int nb = (k + 255) / 256; + dequantize_block_q4_0<<>>(vx, y, nb32); +} + +template +static void dequantize_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb32 = k / 32; + const int nb = (k + 255) / 256; + dequantize_block_q4_1<<>>(vx, y, nb32); +} + template static void dequantize_row_q4_K_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -6301,9 +6370,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { int id; switch (type) { case GGML_TYPE_Q4_0: - return dequantize_block_cuda; + return dequantize_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_block_cuda; + return dequantize_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: @@ -6338,9 +6407,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: - return dequantize_block_cuda; + return dequantize_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_block_cuda; + return dequantize_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: From 2faaef39799c97a53bec3898141478700da25757 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 15 Jan 2024 10:09:38 +0200 Subject: [PATCH 475/859] llama : check for 256 divisibility for IQ2_XS, IQ2_XXS (#4950) Co-authored-by: Iwan Kawrakow --- llama.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 7af38718c..f9718060d 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8559,7 +8559,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty //} bool convert_incompatible_tensor = false; if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || - new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K) { + new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || + new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { @@ -8571,6 +8572,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } if (convert_incompatible_tensor) { switch (new_type) { + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; From ddb008d845cd50bb090bf051f570130524042936 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 15 Jan 2024 13:27:00 +0200 Subject: [PATCH 476/859] cuda : fix dequantize kernel names (#4938) --- ggml-cuda.cu | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index a870718a7..c3e14bc96 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6309,14 +6309,14 @@ static void dequantize_row_q3_K_cuda(const void * vx, dst_t * y, const int k, cu } template -static void dequantize_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { +static void dequantize_row_q4_0_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb32 = k / 32; const int nb = (k + 255) / 256; dequantize_block_q4_0<<>>(vx, y, nb32); } template -static void dequantize_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { +static void dequantize_row_q4_1_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb32 = k / 32; const int nb = (k + 255) / 256; dequantize_block_q4_1<<>>(vx, y, nb32); @@ -6370,9 +6370,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { int id; switch (type) { case GGML_TYPE_Q4_0: - return dequantize_q4_0_cuda; + return dequantize_row_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_q4_1_cuda; + return dequantize_row_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: @@ -6407,9 +6407,9 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { switch (type) { case GGML_TYPE_Q4_0: - return dequantize_q4_0_cuda; + return dequantize_row_q4_0_cuda; case GGML_TYPE_Q4_1: - return dequantize_q4_1_cuda; + return dequantize_row_q4_1_cuda; case GGML_TYPE_Q5_0: return dequantize_block_cuda; case GGML_TYPE_Q5_1: From d9aa4ffa6e0296d42f1f676dd85de97c8491eb73 Mon Sep 17 00:00:00 2001 From: "Victor Z. Peng" Date: Mon, 15 Jan 2024 04:41:46 -0800 Subject: [PATCH 477/859] awq-py : fix typo in awq-py/README.md (#4947) --- awq-py/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awq-py/README.md b/awq-py/README.md index 59354f4e3..16e68d027 100644 --- a/awq-py/README.md +++ b/awq-py/README.md @@ -43,7 +43,7 @@ Example for llama model # For llama7b and llama2 models python convert.py models/llama-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/llama_7b_fp16.gguf # For mistral and mpt models -python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/llama-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf +python convert-hf-to-gguf.py models/mpt-7b/ --awq-path awq_cache/mpt-7b-w4-g128.pt --outfile models/mpt_7b_fp16.gguf ``` ## Quantize From 4483396751c79dea540808b9cb9238245d06da2b Mon Sep 17 00:00:00 2001 From: David Friehs Date: Mon, 15 Jan 2024 14:06:52 +0100 Subject: [PATCH 478/859] llama : apply classifier-free guidance to logits directly (#4951) --- common/sampling.cpp | 9 ++++--- llama.cpp | 60 ++++++++++++++++++++++++++++++--------------- llama.h | 17 +++++++++---- 3 files changed, 57 insertions(+), 29 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 8e45909f1..dd1ffeb1b 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -190,6 +190,11 @@ static llama_token llama_sampling_sample_impl( logits[it->first] += it->second; } + if (ctx_cfg) { + float * logits_guidance = llama_get_logits_ith(ctx_cfg, idx); + llama_sample_apply_guidance(ctx_main, logits, logits_guidance, params.cfg_scale); + } + cur.clear(); for (llama_token token_id = 0; token_id < n_vocab; token_id++) { @@ -198,10 +203,6 @@ static llama_token llama_sampling_sample_impl( llama_token_data_array cur_p = { cur.data(), cur.size(), false }; - if (ctx_cfg) { - llama_sample_classifier_free_guidance(ctx_main, &cur_p, ctx_cfg, params.cfg_scale); - } - // apply penalties const auto& penalty_tokens = params.use_penalty_prompt_tokens ? params.penalty_prompt_tokens : prev; const int penalty_tokens_used_size = std::min((int)penalty_tokens.size(), penalty_last_n); diff --git a/llama.cpp b/llama.cpp index f9718060d..46c4d11c8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7898,39 +7898,59 @@ static void llama_log_softmax(float * array, size_t size) { } } +void llama_sample_apply_guidance( + struct llama_context * ctx, + float * logits, + float * logits_guidance, + float scale) { + GGML_ASSERT(ctx); + + const auto t_start_sample_us = ggml_time_us(); + const auto n_vocab = llama_n_vocab(llama_get_model(ctx)); + + llama_log_softmax(logits, n_vocab); + llama_log_softmax(logits_guidance, n_vocab); + + for (int i = 0; i < n_vocab; ++i) { + auto & l = logits[i]; + const auto & g = logits_guidance[i]; + + l = scale * (l - g) + g; + } + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; +} + void llama_sample_classifier_free_guidance( struct llama_context * ctx, llama_token_data_array * candidates, struct llama_context * guidance_ctx, float scale) { - int64_t t_start_sample_us = ggml_time_us(); - GGML_ASSERT(ctx); + int64_t t_start_sample_us; - auto n_vocab = llama_n_vocab(llama_get_model(ctx)); + t_start_sample_us = ggml_time_us(); + const size_t n_vocab = llama_n_vocab(llama_get_model(ctx)); - GGML_ASSERT(n_vocab == (int)candidates->size); + GGML_ASSERT(n_vocab == candidates->size); GGML_ASSERT(!candidates->sorted); - std::vector logits_base; - logits_base.reserve(candidates->size); - for (size_t i = 0; i < candidates->size; ++i) { - logits_base.push_back(candidates->data[i].logit); - } - llama_log_softmax(logits_base.data(), candidates->size); - - float* logits_guidance = llama_get_logits(guidance_ctx); - llama_log_softmax(logits_guidance, n_vocab); - - for (int i = 0; i < n_vocab; ++i) { - float logit_guidance = logits_guidance[i]; - float logit_base = logits_base[i]; - candidates->data[i].logit = scale * (logit_base - logit_guidance) + logit_guidance; + std::vector logits_base(n_vocab); + for (size_t i = 0; i < n_vocab; ++i) { + logits_base[i] = candidates->data[i].logit; } - if (ctx) { - ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + float * logits_guidance = llama_get_logits(guidance_ctx); + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; + llama_sample_apply_guidance(ctx, logits_base.data(), logits_guidance, scale); + t_start_sample_us = ggml_time_us(); + + for (size_t i = 0; i < n_vocab; ++i) { + candidates->data[i].logit = logits_base[i]; } + + ctx->t_sample_us += ggml_time_us() - t_start_sample_us; } llama_token llama_sample_token_mirostat(struct llama_context * ctx, llama_token_data_array * candidates, float tau, float eta, int32_t m, float * mu) { diff --git a/llama.h b/llama.h index 79c8335b6..a570b0d69 100644 --- a/llama.h +++ b/llama.h @@ -714,14 +714,21 @@ extern "C" { float penalty_present); /// @details Apply classifier-free guidance to the logits as described in academic paper "Stay on topic with Classifier-Free Guidance" https://arxiv.org/abs/2306.17806 - /// @param candidates A vector of `llama_token_data` containing the candidate tokens, the logits must be directly extracted from the original generation context without being sorted. - /// @params guidance_ctx A separate context from the same model. Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context. - /// @params scale Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance. - LLAMA_API void llama_sample_classifier_free_guidance( + /// @param logits Logits extracted from the original generation context. + /// @param logits_guidance Logits extracted from a separate context from the same model. Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context. + /// @param scale Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance. + LLAMA_API void llama_sample_apply_guidance( + struct llama_context * ctx, + float * logits, + float * logits_guidance, + float scale); + + LLAMA_API DEPRECATED(void llama_sample_classifier_free_guidance( struct llama_context * ctx, llama_token_data_array * candidates, struct llama_context * guidance_ctx, - float scale); + float scale), + "use llama_sample_apply_guidance() instead"); /// @details Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits. LLAMA_API void llama_sample_softmax( From 3e5ca7931c68152e4ec18d126e9c832dd84914c8 Mon Sep 17 00:00:00 2001 From: ngc92 <7938269+ngc92@users.noreply.github.com> Date: Mon, 15 Jan 2024 20:40:48 +0200 Subject: [PATCH 479/859] pass cpu-architecture arguments only to host code (C;C++) (#4943) --- CMakeLists.txt | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2741568ed..7bd640966 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -594,6 +594,13 @@ if (NOT MSVC) endif() endif() +function(add_compile_option_cpp ARG) + # Adds a compile option to C/C++ only, but not for Cuda. + # Use, e.g., for CPU-architecture flags. + add_compile_options($<$:${ARG}>) + add_compile_options($<$:${ARG}>) +endfunction() + if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATCHES "aarch64") OR ("${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "arm64")) message(STATUS "ARM detected") if (MSVC) @@ -628,8 +635,7 @@ elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GE include(cmake/FindSIMD.cmake) endif () if (LLAMA_AVX512) - add_compile_options($<$:/arch:AVX512>) - add_compile_options($<$:/arch:AVX512>) + add_compile_option_cpp(/arch:AVX512) # MSVC has no compile-time flags enabling specific # AVX512 extensions, neither it defines the # macros corresponding to the extensions. @@ -643,37 +649,35 @@ elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GE add_compile_definitions($<$:__AVX512VNNI__>) endif() elseif (LLAMA_AVX2) - add_compile_options($<$:/arch:AVX2>) - add_compile_options($<$:/arch:AVX2>) + add_compile_option_cpp(/arch:AVX2) elseif (LLAMA_AVX) - add_compile_options($<$:/arch:AVX>) - add_compile_options($<$:/arch:AVX>) + add_compile_option_cpp(/arch:AVX) endif() else() if (LLAMA_NATIVE) - add_compile_options(-march=native) + add_compile_option_cpp(-march=native) endif() if (LLAMA_F16C) - add_compile_options(-mf16c) + add_compile_option_cpp(-mf16c) endif() if (LLAMA_FMA) - add_compile_options(-mfma) + add_compile_option_cpp(-mfma) endif() if (LLAMA_AVX) - add_compile_options(-mavx) + add_compile_option_cpp(-mavx) endif() if (LLAMA_AVX2) - add_compile_options(-mavx2) + add_compile_option_cpp(-mavx2) endif() if (LLAMA_AVX512) - add_compile_options(-mavx512f) - add_compile_options(-mavx512bw) + add_compile_option_cpp(-mavx512f) + add_compile_option_cpp(-mavx512bw) endif() if (LLAMA_AVX512_VBMI) - add_compile_options(-mavx512vbmi) + add_compile_option_cpp(-mavx512vbmi) endif() if (LLAMA_AVX512_VNNI) - add_compile_options(-mavx512vnni) + add_compile_option_cpp(-mavx512vnni) endif() endif() elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "ppc64") From e0324285a569d0583cf2f4a07a2402221ee25f58 Mon Sep 17 00:00:00 2001 From: stduhpf Date: Tue, 16 Jan 2024 12:04:32 +0100 Subject: [PATCH 480/859] speculative : threading options (#4959) * speculative: expose draft threading * fix usage format * accept -td and -tbd args * speculative: revert default behavior when -td is unspecified * fix trailing whitespace --- common/common.cpp | 22 ++++++++++++++++++++++ common/common.h | 2 ++ examples/speculative/speculative.cpp | 4 ++++ 3 files changed, 28 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index c11006bcb..2b0865fff 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -167,6 +167,24 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { if (params.n_threads_batch <= 0) { params.n_threads_batch = std::thread::hardware_concurrency(); } + } else if (arg == "-td" || arg == "--threads-draft") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.n_threads_draft = std::stoi(argv[i]); + if (params.n_threads_draft <= 0) { + params.n_threads_draft = std::thread::hardware_concurrency(); + } + } else if (arg == "-tbd" || arg == "--threads-batch-draft") { + if (++i >= argc) { + invalid_param = true; + break; + } + params.n_threads_batch_draft = std::stoi(argv[i]); + if (params.n_threads_batch_draft <= 0) { + params.n_threads_batch_draft = std::thread::hardware_concurrency(); + } } else if (arg == "-p" || arg == "--prompt") { if (++i >= argc) { invalid_param = true; @@ -845,6 +863,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -t N, --threads N number of threads to use during generation (default: %d)\n", params.n_threads); printf(" -tb N, --threads-batch N\n"); printf(" number of threads to use during batch and prompt processing (default: same as --threads)\n"); + printf(" -td N, --threads-draft N"); + printf(" number of threads to use during generation (default: same as --threads)"); + printf(" -tbd N, --threads-batch-draft N\n"); + printf(" number of threads to use during batch and prompt processing (default: same as --threads-draft)\n"); printf(" -p PROMPT, --prompt PROMPT\n"); printf(" prompt to start generation with (default: empty)\n"); printf(" -e, --escape process prompt escapes sequences (\\n, \\r, \\t, \\', \\\", \\\\)\n"); diff --git a/common/common.h b/common/common.h index 096468243..1f43e6282 100644 --- a/common/common.h +++ b/common/common.h @@ -46,7 +46,9 @@ struct gpt_params { uint32_t seed = -1; // RNG seed int32_t n_threads = get_num_physical_cores(); + int32_t n_threads_draft = -1; int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) + int32_t n_threads_batch_draft = -1; int32_t n_predict = -1; // new tokens to predict int32_t n_ctx = 512; // context size int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 20f1fb5bf..7b3af01f3 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -65,6 +65,10 @@ int main(int argc, char ** argv) { // load the draft model params.model = params.model_draft; params.n_gpu_layers = params.n_gpu_layers_draft; + if (params.n_threads_draft > 0) { + params.n_threads = params.n_threads_draft; + } + params.n_threads_batch = params.n_threads_batch_draft; std::tie(model_dft, ctx_dft) = llama_init_from_gpt_params(params); { From d75c232e1da56f19ac4d2530dadbe0ab3a11fde5 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 16 Jan 2024 12:14:19 +0100 Subject: [PATCH 481/859] finetune : use LLAMA_FILE_MAGIC_GGLA (#4961) This commit replaces the magic number LLAMA_FILE_MAGIC_LORA used in finetune.cpp with LLAMA_FILE_MAGIC_GGLA defined in llama.h. Signed-off-by: Daniel Bevenius --- examples/finetune/finetune.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index eaca42fc1..a6620fd73 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1138,9 +1138,8 @@ static void save_as_llama_lora(const char * filename, struct my_llama_lora * lor return tn_buf.data(); }; - uint32_t LLAMA_FILE_MAGIC_LORA = 0x67676C61; // 'ggla' // write_magic - file.write_u32(LLAMA_FILE_MAGIC_LORA); // magic + file.write_u32(LLAMA_FILE_MAGIC_GGLA); // magic file.write_u32(1); // version // write_hparams file.write_u32(lora->hparams.lora_r); From a0b3ac8c48b66206b9c5921ce57bd5c0ea6557c3 Mon Sep 17 00:00:00 2001 From: Justine Tunney Date: Tue, 16 Jan 2024 03:16:33 -0800 Subject: [PATCH 482/859] ggml : introduce GGML_CALL function annotation (#4850) This change makes it possible to build ggml-cuda.cu and ggml-metal.m as independent dynamic shared objects, that may be conditionally linked at runtime in a multiplatform binary. It introduces a GGML_CALL annotation that documents which functions have a cyclic call relationship, between the application code and GPU modules. This change does nothing, unless the build defines -DGGML_MULTIPLATFORM which causes back-references and function pointers to conform to MS ABI which is supported by NVCC, ROCm, XCode, GCC and Clang across platforms --- ggml-backend-impl.h | 60 +++++++++++----------- ggml-backend.c | 80 ++++++++++++++--------------- ggml-backend.h | 50 +++++++++--------- ggml-cuda.cu | 121 ++++++++++++++++++++++---------------------- ggml-cuda.h | 32 ++++++------ ggml-metal.h | 4 +- ggml-metal.m | 42 +++++++-------- ggml.c | 32 ++++++------ ggml.h | 58 ++++++++++++--------- 9 files changed, 244 insertions(+), 235 deletions(-) diff --git a/ggml-backend-impl.h b/ggml-backend-impl.h index 1db32901f..1397828d9 100644 --- a/ggml-backend-impl.h +++ b/ggml-backend-impl.h @@ -16,14 +16,14 @@ extern "C" { typedef void * ggml_backend_buffer_type_context_t; struct ggml_backend_buffer_type_i { - const char * (*get_name) (ggml_backend_buffer_type_t buft); - ggml_backend_buffer_t (*alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); - size_t (*get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment - size_t (*get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding - bool (*supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend + const char * (*GGML_CALL get_name) (ggml_backend_buffer_type_t buft); + ggml_backend_buffer_t (*GGML_CALL alloc_buffer) (ggml_backend_buffer_type_t buft, size_t size); + size_t (*GGML_CALL get_alignment) (ggml_backend_buffer_type_t buft); // tensor alignment + size_t (*GGML_CALL get_alloc_size) (ggml_backend_buffer_type_t buft, const struct ggml_tensor * tensor); // data size needed to allocate the tensor, including padding + bool (*GGML_CALL supports_backend)(ggml_backend_buffer_type_t buft, ggml_backend_t backend); // check if the buffer type is usable by the backend // check if tensor data is in host memory // should be equivalent to supports_backend(buft, ggml_backend_cpu_init()) - bool (*is_host) (ggml_backend_buffer_type_t buft); + bool (*GGML_CALL is_host) (ggml_backend_buffer_type_t buft); }; struct ggml_backend_buffer_type { @@ -35,15 +35,15 @@ extern "C" { typedef void * ggml_backend_buffer_context_t; struct ggml_backend_buffer_i { - const char * (*get_name) (ggml_backend_buffer_t buffer); - void (*free_buffer)(ggml_backend_buffer_t buffer); - void * (*get_base) (ggml_backend_buffer_t buffer); - void (*init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - void (*set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - bool (*cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer - void (*clear) (ggml_backend_buffer_t buffer, uint8_t value); - void (*reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras + const char * (*GGML_CALL get_name) (ggml_backend_buffer_t buffer); + void (*GGML_CALL free_buffer)(ggml_backend_buffer_t buffer); + void * (*GGML_CALL get_base) (ggml_backend_buffer_t buffer); + void (*GGML_CALL init_tensor)(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + void (*GGML_CALL set_tensor) (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*GGML_CALL get_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*GGML_CALL cpy_tensor) (ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst); // dst is in the buffer, src may be in any buffer + void (*GGML_CALL clear) (ggml_backend_buffer_t buffer, uint8_t value); + void (*GGML_CALL reset) (ggml_backend_buffer_t buffer); // reset any internal state due to tensor initialization, such as tensor extras }; struct ggml_backend_buffer { @@ -54,7 +54,7 @@ extern "C" { enum ggml_backend_buffer_usage usage; }; - ggml_backend_buffer_t ggml_backend_buffer_init( + GGML_CALL ggml_backend_buffer_t ggml_backend_buffer_init( ggml_backend_buffer_type_t buft, struct ggml_backend_buffer_i iface, ggml_backend_buffer_context_t context, @@ -70,31 +70,31 @@ extern "C" { typedef void * ggml_backend_context_t; struct ggml_backend_i { - const char * (*get_name)(ggml_backend_t backend); + const char * (*GGML_CALL get_name)(ggml_backend_t backend); - void (*free)(ggml_backend_t backend); + void (*GGML_CALL free)(ggml_backend_t backend); // buffer allocation - ggml_backend_buffer_type_t (*get_default_buffer_type)(ggml_backend_t backend); + ggml_backend_buffer_type_t (*GGML_CALL get_default_buffer_type)(ggml_backend_t backend); // (optional) asynchronous tensor data access - void (*set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - void (*get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - bool (*cpy_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * src, struct ggml_tensor * dst); + void (*GGML_CALL set_tensor_async)(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + void (*GGML_CALL get_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + bool (*GGML_CALL cpy_tensor_async)(ggml_backend_t backend, const struct ggml_tensor * src, struct ggml_tensor * dst); // (optional) complete all pending operations - void (*synchronize)(ggml_backend_t backend); + void (*GGML_CALL synchronize)(ggml_backend_t backend); // compute graph with a plan - ggml_backend_graph_plan_t (*graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); - void (*graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); - void (*graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); + ggml_backend_graph_plan_t (*GGML_CALL graph_plan_create) (ggml_backend_t backend, const struct ggml_cgraph * cgraph); + void (*GGML_CALL graph_plan_free) (ggml_backend_t backend, ggml_backend_graph_plan_t plan); + void (*GGML_CALL graph_plan_compute)(ggml_backend_t backend, ggml_backend_graph_plan_t plan); // compute graph without a plan (async) - bool (*graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); + bool (*GGML_CALL graph_compute)(ggml_backend_t backend, struct ggml_cgraph * cgraph); // check if the backend supports an operation - bool (*supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); + bool (*GGML_CALL supports_op)(ggml_backend_t backend, const struct ggml_tensor * op); }; struct ggml_backend { @@ -107,9 +107,9 @@ extern "C" { // Backend registry // - typedef ggml_backend_t (*ggml_backend_init_fn)(const char * params, void * user_data); + typedef ggml_backend_t (*GGML_CALL ggml_backend_init_fn)(const char * params, void * user_data); - void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data); + GGML_CALL void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data); #ifdef __cplusplus } diff --git a/ggml-backend.c b/ggml-backend.c index 505dbba47..f5424fb90 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -19,7 +19,7 @@ const char * ggml_backend_buft_name(ggml_backend_buffer_type_t buft) { return buft->iface.get_name(buft); } -ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL ggml_backend_buffer_t ggml_backend_buft_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { return buft->iface.alloc_buffer(buft, size); } @@ -27,7 +27,7 @@ size_t ggml_backend_buft_get_alignment(ggml_backend_buffer_type_t buft) { return buft->iface.get_alignment(buft); } -size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor) { +GGML_CALL size_t ggml_backend_buft_get_alloc_size(ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor) { // get_alloc_size is optional, defaults to ggml_nbytes if (buft->iface.get_alloc_size) { return buft->iface.get_alloc_size(buft, tensor); @@ -48,7 +48,7 @@ bool ggml_backend_buft_is_host(ggml_backend_buffer_type_t buft) { // backend buffer -ggml_backend_buffer_t ggml_backend_buffer_init( +GGML_CALL ggml_backend_buffer_t ggml_backend_buffer_init( ggml_backend_buffer_type_t buft, struct ggml_backend_buffer_i iface, ggml_backend_buffer_context_t context, @@ -95,7 +95,7 @@ void * ggml_backend_buffer_get_base(ggml_backend_buffer_t buffer) { return base; } -void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { +GGML_CALL void ggml_backend_buffer_init_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { // init_tensor is optional if (buffer->iface.init_tensor) { buffer->iface.init_tensor(buffer, tensor); @@ -191,7 +191,7 @@ void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_ten } } -void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); @@ -201,7 +201,7 @@ void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * data, siz tensor->buffer->iface.set_tensor(buf, tensor, data, offset, size); } -void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { ggml_backend_buffer_t buf = tensor->view_src ? tensor->view_src->buffer : tensor->buffer; GGML_ASSERT(tensor->data != NULL && "tensor not allocated"); @@ -318,9 +318,9 @@ struct ggml_backend_reg { static struct ggml_backend_reg ggml_backend_registry[GGML_MAX_BACKENDS_REG]; static size_t ggml_backend_registry_count = 0; -static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data); +GGML_CALL static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data); -static void ggml_backend_registry_init(void) { +GGML_CALL static void ggml_backend_registry_init(void) { static bool initialized = false; if (initialized) { @@ -333,18 +333,18 @@ static void ggml_backend_registry_init(void) { // add forward decls here to avoid including the backend headers #ifdef GGML_USE_CUBLAS - extern void ggml_backend_cuda_reg_devices(void); + extern GGML_CALL void ggml_backend_cuda_reg_devices(void); ggml_backend_cuda_reg_devices(); #endif #ifdef GGML_USE_METAL - extern ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); - extern ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); + extern GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); + extern GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); ggml_backend_register("Metal", ggml_backend_reg_metal_init, ggml_backend_metal_buffer_type(), NULL); #endif } -void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { +GGML_CALL void ggml_backend_register(const char * name, ggml_backend_init_fn init_fn, ggml_backend_buffer_type_t default_buffer_type, void * user_data) { GGML_ASSERT(ggml_backend_registry_count < GGML_MAX_BACKENDS_REG); size_t id = ggml_backend_registry_count; @@ -439,33 +439,33 @@ ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { // backend CPU -static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { return "CPU"; GGML_UNUSED(buffer); } -static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { return (void *)buffer->context; } -static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { free(buffer->context); } -static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cpu_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { memcpy((char *)tensor->data + offset, data, size); GGML_UNUSED(buffer); } -static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cpu_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { memcpy(data, (const char *)tensor->data + offset, size); GGML_UNUSED(buffer); } -static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { if (ggml_backend_buffer_is_host(src->buffer)) { memcpy(dst->data, src->data, ggml_nbytes(src)); return true; @@ -475,7 +475,7 @@ static bool ggml_backend_cpu_buffer_cpy_tensor(ggml_backend_buffer_t buffer, con GGML_UNUSED(buffer); } -static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_cpu_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { memset(buffer->context, value, buffer->size); } @@ -506,13 +506,13 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 -static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "CPU"; GGML_UNUSED(buft); } -static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? @@ -521,25 +521,25 @@ static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_back return ggml_backend_buffer_init(buft, cpu_backend_buffer_i, data, size); } -static size_t ggml_backend_cpu_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_cpu_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return TENSOR_ALIGNMENT; GGML_UNUSED(buft); } -static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_cpu_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_cpu(backend); GGML_UNUSED(buft); } -static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { +GGML_CALL static bool ggml_backend_cpu_buffer_type_is_host(ggml_backend_buffer_type_t buft) { return true; GGML_UNUSED(buft); } -ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_cpu_buffer_type = { /* .iface = */ { /* .get_name = */ ggml_backend_cpu_buffer_type_get_name, @@ -561,23 +561,23 @@ ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void) { #include -static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cpu_hbm_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "CPU_HBM"; GGML_UNUSED(buft); } -static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { +GGML_CALL static const char * ggml_backend_cpu_hbm_buffer_get_name(ggml_backend_buffer_t buf) { return "CPU_HBM"; GGML_UNUSED(buf); } -static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cpu_hbm_buffer_free_buffer(ggml_backend_buffer_t buffer) { hbw_free(buffer->context); } -static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_hbm_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { //void * ptr = hbw_malloc(size); void * ptr; int result = hbw_posix_memalign(&ptr, ggml_backend_cpu_buffer_type_get_alignment(buft), size); @@ -617,20 +617,20 @@ struct ggml_backend_cpu_context { size_t work_size; }; -static const char * ggml_backend_cpu_name(ggml_backend_t backend) { +GGML_CALL static const char * ggml_backend_cpu_name(ggml_backend_t backend) { return "CPU"; GGML_UNUSED(backend); } -static void ggml_backend_cpu_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_cpu_free(ggml_backend_t backend) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; free(cpu_ctx->work_data); free(cpu_ctx); free(backend); } -static ggml_backend_buffer_type_t ggml_backend_cpu_get_default_buffer_type(ggml_backend_t backend) { +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_cpu_get_default_buffer_type(ggml_backend_t backend) { return ggml_backend_cpu_buffer_type(); GGML_UNUSED(backend); @@ -641,7 +641,7 @@ struct ggml_backend_plan_cpu { struct ggml_cgraph cgraph; }; -static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { +GGML_CALL static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend_t backend, const struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_backend_plan_cpu * cpu_plan = malloc(sizeof(struct ggml_backend_plan_cpu)); @@ -656,7 +656,7 @@ static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(ggml_backend return cpu_plan; } -static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +GGML_CALL static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; free(cpu_plan->cplan.work_data); @@ -665,7 +665,7 @@ static void ggml_backend_cpu_graph_plan_free(ggml_backend_t backend, ggml_backen GGML_UNUSED(backend); } -static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { +GGML_CALL static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_backend_graph_plan_t plan) { struct ggml_backend_plan_cpu * cpu_plan = (struct ggml_backend_plan_cpu *)plan; ggml_graph_compute(&cpu_plan->cgraph, &cpu_plan->cplan); @@ -673,7 +673,7 @@ static void ggml_backend_cpu_graph_plan_compute(ggml_backend_t backend, ggml_bac GGML_UNUSED(backend); } -static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_backend_cpu_context * cpu_ctx = (struct ggml_backend_cpu_context *)backend->context; struct ggml_cplan cplan = ggml_graph_plan(cgraph, cpu_ctx->n_threads); @@ -690,7 +690,7 @@ static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, struct ggml_c return true; } -static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { +GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_MUL_MAT: return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; @@ -732,7 +732,7 @@ ggml_backend_t ggml_backend_cpu_init(void) { return cpu_backend; } -bool ggml_backend_is_cpu(ggml_backend_t backend) { +GGML_CALL bool ggml_backend_is_cpu(ggml_backend_t backend) { return backend && backend->iface.get_name == ggml_backend_cpu_name; } @@ -743,11 +743,11 @@ void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { ctx->n_threads = n_threads; } -ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { +GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } -static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data) { +GGML_CALL static ggml_backend_t ggml_backend_reg_cpu_init(const char * params, void * user_data) { return ggml_backend_cpu_init(); GGML_UNUSED(params); diff --git a/ggml-backend.h b/ggml-backend.h index 4eb244af1..12b4b4ab7 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -17,12 +17,12 @@ extern "C" { // // buffer type - GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); - GGML_API ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); - GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); - GGML_API size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); - GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); - GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); + GGML_API const char * ggml_backend_buft_name (ggml_backend_buffer_type_t buft); + GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_buft_alloc_buffer (ggml_backend_buffer_type_t buft, size_t size); + GGML_API size_t ggml_backend_buft_get_alignment (ggml_backend_buffer_type_t buft); + GGML_API GGML_CALL size_t ggml_backend_buft_get_alloc_size (ggml_backend_buffer_type_t buft, struct ggml_tensor * tensor); + GGML_API bool ggml_backend_buft_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend); + GGML_API bool ggml_backend_buft_is_host (ggml_backend_buffer_type_t buft); // buffer enum ggml_backend_buffer_usage { @@ -30,18 +30,18 @@ extern "C" { GGML_BACKEND_BUFFER_USAGE_WEIGHTS = 1, }; - GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); - GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); - GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); - GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); - GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); - GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); - GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); + GGML_API const char * ggml_backend_buffer_name (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_free (ggml_backend_buffer_t buffer); + GGML_API void * ggml_backend_buffer_get_base (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_size (ggml_backend_buffer_t buffer); + GGML_API GGML_CALL void ggml_backend_buffer_init_tensor (ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API size_t ggml_backend_buffer_get_alignment (ggml_backend_buffer_t buffer); + GGML_API size_t ggml_backend_buffer_get_alloc_size(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor); + GGML_API void ggml_backend_buffer_clear (ggml_backend_buffer_t buffer, uint8_t value); + GGML_API bool ggml_backend_buffer_is_host (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_set_usage (ggml_backend_buffer_t buffer, enum ggml_backend_buffer_usage usage); + GGML_API ggml_backend_buffer_type_t ggml_backend_buffer_get_type (ggml_backend_buffer_t buffer); + GGML_API void ggml_backend_buffer_reset (ggml_backend_buffer_t buffer); // // Backend @@ -58,8 +58,8 @@ extern "C" { GGML_API void ggml_backend_tensor_set_async(ggml_backend_t backend, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); GGML_API void ggml_backend_tensor_get_async(ggml_backend_t backend, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); - GGML_API void ggml_backend_tensor_set( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); - GGML_API void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); + GGML_API GGML_CALL void ggml_backend_tensor_set( struct ggml_tensor * tensor, const void * data, size_t offset, size_t size); + GGML_API GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * data, size_t offset, size_t size); GGML_API void ggml_backend_synchronize(ggml_backend_t backend); @@ -80,13 +80,13 @@ extern "C" { GGML_API ggml_backend_t ggml_backend_cpu_init(void); - GGML_API bool ggml_backend_is_cpu(ggml_backend_t backend); - GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); + GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); + GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); // Create a backend buffer from an existing pointer - GGML_API ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); + GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); - GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); + GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cpu_buffer_type(void); #ifdef GGML_USE_CPU_HBM GGML_API ggml_backend_buffer_type_t ggml_backend_cpu_hbm_buffer_type(void); @@ -183,7 +183,7 @@ extern "C" { GGML_API struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph); GGML_API void ggml_backend_graph_copy_free(struct ggml_backend_graph_copy copy); - typedef bool (*ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); + typedef bool (*GGML_CALL ggml_backend_eval_callback)(int node_index, struct ggml_tensor * t1, struct ggml_tensor * t2, void * user_data); // Compare the output of two backends GGML_API bool ggml_backend_compare_graph_backend(ggml_backend_t backend1, ggml_backend_t backend2, struct ggml_cgraph * graph, ggml_backend_eval_callback callback, void * user_data); diff --git a/ggml-cuda.cu b/ggml-cuda.cu index c3e14bc96..568c411af 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7615,11 +7615,11 @@ struct cuda_pool_alloc { static bool g_cublas_loaded = false; -bool ggml_cublas_loaded(void) { +GGML_CALL bool ggml_cublas_loaded(void) { return g_cublas_loaded; } -void ggml_init_cublas() { +GGML_CALL void ggml_init_cublas() { static bool initialized = false; if (!initialized) { @@ -7707,7 +7707,7 @@ void ggml_init_cublas() { } } -void * ggml_cuda_host_malloc(size_t size) { +GGML_CALL void * ggml_cuda_host_malloc(size_t size) { if (getenv("GGML_CUDA_NO_PINNED") != nullptr) { return nullptr; } @@ -7725,7 +7725,7 @@ void * ggml_cuda_host_malloc(size_t size) { return ptr; } -void ggml_cuda_host_free(void * ptr) { +GGML_CALL void ggml_cuda_host_free(void * ptr) { CUDA_CHECK(cudaFreeHost(ptr)); } @@ -9242,7 +9242,7 @@ static void ggml_cuda_rms_norm(const ggml_tensor * src0, const ggml_tensor * src ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_rms_norm); } -bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +GGML_CALL bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { if (!g_cublas_loaded) return false; const int64_t ne10 = src1->ne[0]; @@ -10013,7 +10013,7 @@ static size_t ggml_nbytes_split(const struct ggml_tensor * tensor, int nrows_spl return nrows_split*ggml_row_size(tensor->type, tensor->ne[0]); } -static void ggml_cuda_set_main_device(const int main_device) { +GGML_CALL static void ggml_cuda_set_main_device(const int main_device) { if (main_device >= g_device_count) { fprintf(stderr, "warning: cannot set main_device=%d because there are only %d devices. Using device %d instead.\n", main_device, g_device_count, g_main_device); @@ -10028,7 +10028,7 @@ static void ggml_cuda_set_main_device(const int main_device) { } } -bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { +GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor) { if (!g_cublas_loaded) return false; ggml_cuda_func_t func; @@ -10186,7 +10186,7 @@ bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_ return true; } -int ggml_cuda_get_device_count() { +GGML_CALL int ggml_cuda_get_device_count() { int device_count; if (cudaGetDeviceCount(&device_count) != cudaSuccess) { return 0; @@ -10194,7 +10194,7 @@ int ggml_cuda_get_device_count() { return device_count; } -void ggml_cuda_get_device_description(int device, char * description, size_t description_size) { +GGML_CALL void ggml_cuda_get_device_description(int device, char * description, size_t description_size) { cudaDeviceProp prop; CUDA_CHECK(cudaGetDeviceProperties(&prop, device)); snprintf(description, description_size, "%s", prop.name); @@ -10244,27 +10244,27 @@ struct ggml_backend_cuda_buffer_context { } }; -static const char * ggml_backend_cuda_buffer_get_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cuda_buffer_get_name(ggml_backend_buffer_t buffer) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; return ctx->name.c_str(); } -static bool ggml_backend_buffer_is_cuda(ggml_backend_buffer_t buffer) { +GGML_CALL static bool ggml_backend_buffer_is_cuda(ggml_backend_buffer_t buffer) { return buffer->iface.get_name == ggml_backend_cuda_buffer_get_name; } -static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cuda_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; CUDA_CHECK(cudaFree(ctx->dev_ptr)); delete ctx; } -static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_cuda_buffer_get_base(ggml_backend_buffer_t buffer) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; return ctx->dev_ptr; } -static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { +GGML_CALL static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; if (tensor->view_src != NULL && tensor->view_offs == 0) { @@ -10296,7 +10296,7 @@ static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t buffer, g } } -static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -10307,7 +10307,7 @@ static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, gg CUDA_CHECK(cudaDeviceSynchronize()); } -static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -10318,7 +10318,7 @@ static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, co CUDA_CHECK(cudaDeviceSynchronize()); } -static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { if (ggml_backend_buffer_is_cuda(src->buffer)) { ggml_backend_cuda_buffer_context * src_ctx = (ggml_backend_cuda_buffer_context *)src->buffer->context; ggml_backend_cuda_buffer_context * dst_ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -10335,7 +10335,7 @@ static bool ggml_backend_cuda_buffer_cpy_tensor(ggml_backend_buffer_t buffer, co return false; } -static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_cuda_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; ggml_cuda_set_device(ctx->device); @@ -10357,19 +10357,18 @@ static ggml_backend_buffer_i ggml_backend_cuda_buffer_interface = { }; // cuda buffer type - struct ggml_backend_cuda_buffer_type_context { int device; std::string name; }; -static const char * ggml_backend_cuda_buffer_type_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cuda_buffer_type_name(ggml_backend_buffer_type_t buft) { ggml_backend_cuda_buffer_type_context * ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; return ctx->name.c_str(); } -static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { ggml_backend_cuda_buffer_type_context * buft_ctx = (ggml_backend_cuda_buffer_type_context *)buft->context; ggml_cuda_set_device(buft_ctx->device); @@ -10388,13 +10387,13 @@ static ggml_backend_buffer_t ggml_backend_cuda_buffer_type_alloc_buffer(ggml_bac return ggml_backend_buffer_init(buft, ggml_backend_cuda_buffer_interface, ctx, size); } -static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_cuda_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 128; UNUSED(buft); } -static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { +GGML_CALL static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); int64_t nrows_split = row_high - row_low; @@ -10414,7 +10413,7 @@ static size_t ggml_backend_cuda_buffer_type_get_alloc_size(ggml_backend_buffer_t UNUSED(buft); } -static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_cuda_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { if (!ggml_backend_is_cuda(backend)) { return false; } @@ -10434,7 +10433,7 @@ static ggml_backend_buffer_type_i ggml_backend_cuda_buffer_type_interface = { /* .is_host = */ NULL, }; -ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device) { // FIXME: this is not thread safe if (device >= ggml_backend_cuda_get_device_count()) { return nullptr; @@ -10479,7 +10478,7 @@ struct ggml_backend_cuda_split_buffer_context { std::vector tensor_extras; }; -static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_t buffer) { return GGML_CUDA_NAME "_Split"; UNUSED(buffer); @@ -10490,19 +10489,19 @@ static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backend_buffer_ // return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; //} -static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; delete ctx; } -static void * ggml_backend_cuda_split_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_cuda_split_buffer_get_base(ggml_backend_buffer_t buffer) { // the pointers are stored in the tensor extras, this is just a dummy address and never dereferenced return (void *)0x1000; UNUSED(buffer); } -static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { +GGML_CALL static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { GGML_ASSERT(tensor->view_src == nullptr); // views of split tensors are not supported ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; @@ -10552,7 +10551,7 @@ static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_buffer_t buf tensor->extra = extra; } -static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { // split tensors must always be set in their entirety at once GGML_ASSERT(offset == 0); GGML_ASSERT(size == ggml_nbytes(tensor)); @@ -10586,7 +10585,7 @@ static void ggml_backend_cuda_split_buffer_set_tensor(ggml_backend_buffer_t buff } } -static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { // split tensors must always be set in their entirety at once GGML_ASSERT(offset == 0); GGML_ASSERT(size == ggml_nbytes(tensor)); @@ -10620,7 +10619,7 @@ static void ggml_backend_cuda_split_buffer_get_tensor(ggml_backend_buffer_t buff } } -static void ggml_backend_cuda_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_cuda_split_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { UNUSED(buffer); UNUSED(value); } @@ -10639,13 +10638,13 @@ static struct ggml_backend_buffer_i ggml_backend_cuda_split_buffer_interface = { // cuda split buffer type -static const char * ggml_backend_cuda_split_buffer_type_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cuda_split_buffer_type_name(ggml_backend_buffer_type_t buft) { return GGML_CUDA_NAME "_Split"; UNUSED(buft); } -static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { // since we don't know the exact split after rounding, we cannot allocate the device buffers at this point // instead, we allocate them for each tensor separately in init_tensor // however, the size still represents the maximum cumulative size of all the device buffers after the tensors are allocated, @@ -10655,13 +10654,13 @@ static ggml_backend_buffer_t ggml_backend_cuda_split_buffer_type_alloc_buffer(gg return ggml_backend_buffer_init(buft, ggml_backend_cuda_split_buffer_interface, ctx, size); } -static size_t ggml_backend_cuda_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_cuda_split_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 128; UNUSED(buft); } -static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { +GGML_CALL static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { ggml_backend_cuda_split_buffer_type_context * ctx = (ggml_backend_cuda_split_buffer_type_context *)buft->context; size_t total_size = 0; @@ -10688,13 +10687,13 @@ static size_t ggml_backend_cuda_split_buffer_type_get_alloc_size(ggml_backend_bu return total_size; } -static bool ggml_backend_cuda_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_cuda_split_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_cuda(backend); UNUSED(buft); } -static bool ggml_backend_cuda_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { +GGML_CALL static bool ggml_backend_cuda_split_buffer_type_is_host(ggml_backend_buffer_type_t buft) { return false; UNUSED(buft); @@ -10709,7 +10708,7 @@ static ggml_backend_buffer_type_i ggml_backend_cuda_split_buffer_type_interface /* .is_host = */ ggml_backend_cuda_split_buffer_type_is_host, }; -ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split) { // FIXME: this is not thread safe static std::map, struct ggml_backend_buffer_type> buft_map; @@ -10745,23 +10744,23 @@ ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * ten // host buffer type -static const char * ggml_backend_cuda_host_buffer_type_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_cuda_host_buffer_type_name(ggml_backend_buffer_type_t buft) { return GGML_CUDA_NAME "_Host"; UNUSED(buft); } -static const char * ggml_backend_cuda_host_buffer_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_cuda_host_buffer_name(ggml_backend_buffer_t buffer) { return GGML_CUDA_NAME "_Host"; UNUSED(buffer); } -static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_cuda_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_cuda_host_free(buffer->context); } -static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { void * ptr = ggml_cuda_host_malloc(size); if (ptr == nullptr) { @@ -10777,7 +10776,7 @@ static ggml_backend_buffer_t ggml_backend_cuda_host_buffer_type_alloc_buffer(ggm return buffer; } -ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { static struct ggml_backend_buffer_type ggml_backend_cuda_buffer_type_host = { /* .iface = */ { /* .get_name = */ ggml_backend_cuda_host_buffer_type_name, @@ -10795,26 +10794,26 @@ ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type() { // backend -static const char * ggml_backend_cuda_name(ggml_backend_t backend) { +GGML_CALL static const char * ggml_backend_cuda_name(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; return cuda_ctx->name.c_str(); } -static void ggml_backend_cuda_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_cuda_free(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; delete cuda_ctx; delete backend; } -static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_cuda_get_default_buffer_type(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; return ggml_backend_cuda_buffer_type(cuda_ctx->device); } -static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); @@ -10823,7 +10822,7 @@ static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_tens CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); } -static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); @@ -10832,7 +10831,7 @@ static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, const ggm CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } -static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; if (dst->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && ggml_backend_buffer_is_cuda(src->buffer)) { @@ -10843,7 +10842,7 @@ static bool ggml_backend_cuda_cpy_tensor_async(ggml_backend_t backend, const ggm return false; } -static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; CUDA_CHECK(cudaStreamSynchronize(g_cudaStreams[cuda_ctx->device][0])); @@ -10851,7 +10850,7 @@ static void ggml_backend_cuda_synchronize(ggml_backend_t backend) { UNUSED(backend); } -static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; ggml_cuda_set_main_device(cuda_ctx->device); @@ -10890,7 +10889,7 @@ static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, ggml_cgraph return true; } -static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { +GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, const ggml_tensor * op) { switch (op->op) { case GGML_OP_UNARY: switch (ggml_get_unary_op(op)) { @@ -11016,7 +11015,7 @@ static ggml_backend_i ggml_backend_cuda_interface = { /* .supports_op = */ ggml_backend_cuda_supports_op, }; -ggml_backend_t ggml_backend_cuda_init(int device) { +GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device) { ggml_init_cublas(); // TODO: remove from ggml.c if (device < 0 || device >= ggml_cuda_get_device_count()) { @@ -11040,35 +11039,35 @@ ggml_backend_t ggml_backend_cuda_init(int device) { return cuda_backend; } -bool ggml_backend_is_cuda(ggml_backend_t backend) { +GGML_CALL bool ggml_backend_is_cuda(ggml_backend_t backend) { return backend && backend->iface.get_name == ggml_backend_cuda_name; } -int ggml_backend_cuda_get_device_count() { +GGML_CALL int ggml_backend_cuda_get_device_count() { return ggml_cuda_get_device_count(); } -void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size) { +GGML_CALL void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size) { ggml_cuda_get_device_description(device, description, description_size); } -void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total) { +GGML_CALL void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total) { ggml_cuda_set_device(device); CUDA_CHECK(cudaMemGetInfo(free, total)); } // backend registry -static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { +GGML_CALL static ggml_backend_t ggml_backend_reg_cuda_init(const char * params, void * user_data) { ggml_backend_t cuda_backend = ggml_backend_cuda_init((int) (intptr_t) user_data); return cuda_backend; UNUSED(params); } -extern "C" int ggml_backend_cuda_reg_devices(); +extern "C" GGML_CALL int ggml_backend_cuda_reg_devices(); -int ggml_backend_cuda_reg_devices() { +GGML_CALL int ggml_backend_cuda_reg_devices() { int device_count = ggml_cuda_get_device_count(); //int device_count = 1; // DEBUG: some tools require delaying CUDA initialization for (int i = 0; i < device_count; i++) { diff --git a/ggml-cuda.h b/ggml-cuda.h index d19cbf3fd..b1ebd61d7 100644 --- a/ggml-cuda.h +++ b/ggml-cuda.h @@ -18,34 +18,34 @@ extern "C" { #define GGML_CUDA_MAX_DEVICES 16 // Always success. To check if CUDA is actually loaded, use `ggml_cublas_loaded`. -GGML_API void ggml_init_cublas(void); +GGML_API GGML_CALL void ggml_init_cublas(void); // Returns `true` if there are available CUDA devices and cublas loads successfully; otherwise, it returns `false`. -GGML_API bool ggml_cublas_loaded(void); +GGML_API GGML_CALL bool ggml_cublas_loaded(void); -GGML_API void * ggml_cuda_host_malloc(size_t size); -GGML_API void ggml_cuda_host_free(void * ptr); +GGML_API GGML_CALL void * ggml_cuda_host_malloc(size_t size); +GGML_API GGML_CALL void ggml_cuda_host_free(void * ptr); -GGML_API bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); -GGML_API bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); +GGML_API GGML_CALL bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst); +GGML_API GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); -GGML_API int ggml_cuda_get_device_count(void); -GGML_API void ggml_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL int ggml_cuda_get_device_count(void); +GGML_API GGML_CALL void ggml_cuda_get_device_description(int device, char * description, size_t description_size); // backend API -GGML_API ggml_backend_t ggml_backend_cuda_init(int device); +GGML_API GGML_CALL ggml_backend_t ggml_backend_cuda_init(int device); -GGML_API bool ggml_backend_is_cuda(ggml_backend_t backend); +GGML_API GGML_CALL bool ggml_backend_is_cuda(ggml_backend_t backend); -GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_buffer_type(int device); // split tensor buffer that splits matrices by rows across multiple devices -GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_split_buffer_type(const float * tensor_split); // pinned host buffer for use with the CPU backend for faster copies between CPU and GPU -GGML_API ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_cuda_host_buffer_type(void); -GGML_API int ggml_backend_cuda_get_device_count(void); -GGML_API void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); -GGML_API void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); +GGML_API GGML_CALL int ggml_backend_cuda_get_device_count(void); +GGML_API GGML_CALL void ggml_backend_cuda_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL void ggml_backend_cuda_get_device_memory(int device, size_t * free, size_t * total); #ifdef __cplusplus } diff --git a/ggml-metal.h b/ggml-metal.h index cd5e2995f..8b0bfc5f1 100644 --- a/ggml-metal.h +++ b/ggml-metal.h @@ -47,11 +47,11 @@ GGML_API ggml_backend_t ggml_backend_metal_init(void); GGML_API bool ggml_backend_is_metal(ggml_backend_t backend); -GGML_API ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); +GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size); GGML_API void ggml_backend_metal_set_n_cb(ggml_backend_t backend, int n_cb); -GGML_API ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void); // helper to check if the device supports a specific family // ideally, the user code should be doing these checks diff --git a/ggml-metal.m b/ggml-metal.m index 2ca726055..867f2fd48 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2294,13 +2294,13 @@ static void ggml_backend_metal_free_device(void) { } } -static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { +GGML_CALL static const char * ggml_backend_metal_buffer_get_name(ggml_backend_buffer_t buffer) { return "Metal"; UNUSED(buffer); } -static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { +GGML_CALL static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; for (int i = 0; i < ctx->n_buffers; i++) { @@ -2315,25 +2315,25 @@ static void ggml_backend_metal_buffer_free_buffer(ggml_backend_buffer_t buffer) free(ctx); } -static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { +GGML_CALL static void * ggml_backend_metal_buffer_get_base(ggml_backend_buffer_t buffer) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; return ctx->all_data; } -static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_metal_buffer_set_tensor(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor, const void * data, size_t offset, size_t size) { memcpy((char *)tensor->data + offset, data, size); UNUSED(buffer); } -static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { +GGML_CALL static void ggml_backend_metal_buffer_get_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * tensor, void * data, size_t offset, size_t size) { memcpy(data, (const char *)tensor->data + offset, size); UNUSED(buffer); } -static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { +GGML_CALL static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const struct ggml_tensor * src, struct ggml_tensor * dst) { if (ggml_backend_buffer_is_host(src->buffer)) { memcpy(dst->data, src->data, ggml_nbytes(src)); return true; @@ -2343,7 +2343,7 @@ static bool ggml_backend_metal_buffer_cpy_tensor(ggml_backend_buffer_t buffer, c UNUSED(buffer); } -static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { +GGML_CALL static void ggml_backend_metal_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { struct ggml_backend_metal_buffer_context * ctx = (struct ggml_backend_metal_buffer_context *)buffer->context; memset(ctx->all_data, value, ctx->all_size); @@ -2363,13 +2363,13 @@ static struct ggml_backend_buffer_i ggml_backend_metal_buffer_i = { // default buffer type -static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { +GGML_CALL static const char * ggml_backend_metal_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "Metal"; UNUSED(buft); } -static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); const size_t size_page = sysconf(_SC_PAGESIZE); @@ -2421,24 +2421,24 @@ static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_ba return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } -static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { +GGML_CALL static size_t ggml_backend_metal_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { return 32; UNUSED(buft); } -static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { +GGML_CALL static bool ggml_backend_metal_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { return ggml_backend_is_metal(backend) || ggml_backend_is_cpu(backend); UNUSED(buft); } -static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { +GGML_CALL static bool ggml_backend_metal_buffer_type_is_host(ggml_backend_buffer_type_t buft) { return true; UNUSED(buft); } -ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { +GGML_CALL ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { static struct ggml_backend_buffer_type ggml_backend_buffer_type_metal = { /* .iface = */ { /* .get_name = */ ggml_backend_metal_buffer_type_get_name, @@ -2456,7 +2456,7 @@ ggml_backend_buffer_type_t ggml_backend_metal_buffer_type(void) { // buffer from ptr -ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { +GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t size, size_t max_size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); ctx->all_data = data; @@ -2543,31 +2543,31 @@ ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, size_t siz // backend -static const char * ggml_backend_metal_name(ggml_backend_t backend) { +GGML_CALL static const char * ggml_backend_metal_name(ggml_backend_t backend) { return "Metal"; UNUSED(backend); } -static void ggml_backend_metal_free(ggml_backend_t backend) { +GGML_CALL static void ggml_backend_metal_free(ggml_backend_t backend) { struct ggml_metal_context * ctx = (struct ggml_metal_context *)backend->context; ggml_metal_free(ctx); free(backend); } -static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { +GGML_CALL static ggml_backend_buffer_type_t ggml_backend_metal_get_default_buffer_type(ggml_backend_t backend) { return ggml_backend_metal_buffer_type(); UNUSED(backend); } -static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { +GGML_CALL static bool ggml_backend_metal_graph_compute(ggml_backend_t backend, struct ggml_cgraph * cgraph) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; return ggml_metal_graph_compute(metal_ctx, cgraph); } -static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { +GGML_CALL static bool ggml_backend_metal_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { struct ggml_metal_context * metal_ctx = (struct ggml_metal_context *)backend->context; return ggml_metal_supports_op(metal_ctx, op); @@ -2630,9 +2630,9 @@ bool ggml_backend_metal_supports_family(ggml_backend_t backend, int family) { return [ctx->device supportsFamily:(MTLGPUFamilyApple1 + family - 1)]; } -ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); // silence warning +GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data); // silence warning -ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data) { +GGML_CALL ggml_backend_t ggml_backend_reg_metal_init(const char * params, void * user_data) { return ggml_backend_metal_init(); GGML_UNUSED(params); diff --git a/ggml.c b/ggml.c index ef5888ab2..5779f32d2 100644 --- a/ggml.c +++ b/ggml.c @@ -1990,19 +1990,19 @@ void ggml_print_objects(const struct ggml_context * ctx) { GGML_PRINT("%s: --- end ---\n", __func__); } -int64_t ggml_nelements(const struct ggml_tensor * tensor) { +GGML_CALL int64_t ggml_nelements(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[0]*tensor->ne[1]*tensor->ne[2]*tensor->ne[3]; } -int64_t ggml_nrows(const struct ggml_tensor * tensor) { +GGML_CALL int64_t ggml_nrows(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->ne[1]*tensor->ne[2]*tensor->ne[3]; } -size_t ggml_nbytes(const struct ggml_tensor * tensor) { +GGML_CALL size_t ggml_nbytes(const struct ggml_tensor * tensor) { size_t nbytes; size_t blck_size = ggml_blck_size(tensor->type); if (blck_size == 1) { @@ -2025,15 +2025,15 @@ size_t ggml_nbytes_pad(const struct ggml_tensor * tensor) { return GGML_PAD(ggml_nbytes(tensor), GGML_MEM_ALIGN); } -int ggml_blck_size(enum ggml_type type) { +GGML_CALL int ggml_blck_size(enum ggml_type type) { return type_traits[type].blck_size; } -size_t ggml_type_size(enum ggml_type type) { +GGML_CALL size_t ggml_type_size(enum ggml_type type) { return type_traits[type].type_size; } -size_t ggml_row_size(enum ggml_type type, int64_t ne) { +GGML_CALL size_t ggml_row_size(enum ggml_type type, int64_t ne) { assert(ne % ggml_blck_size(type) == 0); return ggml_type_size(type)*ne/ggml_blck_size(type); } @@ -2042,15 +2042,15 @@ double ggml_type_sizef(enum ggml_type type) { return ((double)(type_traits[type].type_size))/type_traits[type].blck_size; } -const char * ggml_type_name(enum ggml_type type) { +GGML_CALL const char * ggml_type_name(enum ggml_type type) { return type_traits[type].type_name; } -bool ggml_is_quantized(enum ggml_type type) { +GGML_CALL bool ggml_is_quantized(enum ggml_type type) { return type_traits[type].is_quantized; } -const char * ggml_op_name(enum ggml_op op) { +GGML_CALL const char * ggml_op_name(enum ggml_op op) { return GGML_OP_NAME[op]; } @@ -2062,7 +2062,7 @@ const char * ggml_unary_op_name(enum ggml_unary_op op) { return GGML_UNARY_OP_NAME[op]; } -const char * ggml_op_desc(const struct ggml_tensor * t) { +GGML_CALL const char * ggml_op_desc(const struct ggml_tensor * t) { if (t->op == GGML_OP_UNARY) { enum ggml_unary_op uop = ggml_get_unary_op(t); return ggml_unary_op_name(uop); @@ -2072,7 +2072,7 @@ const char * ggml_op_desc(const struct ggml_tensor * t) { } } -size_t ggml_element_size(const struct ggml_tensor * tensor) { +GGML_CALL size_t ggml_element_size(const struct ggml_tensor * tensor) { return ggml_type_size(tensor->type); } @@ -2154,11 +2154,11 @@ size_t ggml_tensor_overhead(void) { return GGML_OBJECT_SIZE + GGML_TENSOR_SIZE; } -bool ggml_is_transposed(const struct ggml_tensor * tensor) { +GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor) { return tensor->nb[0] > tensor->nb[1]; } -bool ggml_is_contiguous(const struct ggml_tensor * tensor) { +GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return @@ -2177,7 +2177,7 @@ static inline bool ggml_is_contiguous_except_dim_1(const struct ggml_tensor * te tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; } -bool ggml_is_permuted(const struct ggml_tensor * tensor) { +GGML_CALL bool ggml_is_permuted(const struct ggml_tensor * tensor) { static_assert(GGML_MAX_DIMS == 4, "GGML_MAX_DIMS is not 4 - update this function"); return tensor->nb[0] > tensor->nb[1] || tensor->nb[1] > tensor->nb[2] || tensor->nb[2] > tensor->nb[3]; @@ -3079,7 +3079,7 @@ float * ggml_get_data_f32(const struct ggml_tensor * tensor) { return (float *)(tensor->data); } -enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor) { +GGML_CALL enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor) { GGML_ASSERT(tensor->op == GGML_OP_UNARY); return (enum ggml_unary_op) ggml_get_op_params_i32(tensor, 0); } @@ -11653,7 +11653,7 @@ static void ggml_rope_cache_init( } } -void ggml_rope_yarn_corr_dims( +GGML_CALL void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] ) { // start and end correction dims diff --git a/ggml.h b/ggml.h index 1187074f7..837c52e68 100644 --- a/ggml.h +++ b/ggml.h @@ -187,6 +187,16 @@ # define GGML_API #endif +#ifdef GGML_MULTIPLATFORM +# if defined(_WIN32) +# define GGML_CALL +# else +# define GGML_CALL __attribute__((__ms_abi__)) +# endif +#else +# define GGML_CALL +#endif + // TODO: support for clang #ifdef __GNUC__ # define GGML_DEPRECATED(func, hint) func __attribute__((deprecated(hint))) @@ -649,41 +659,41 @@ extern "C" { GGML_API void ggml_print_object (const struct ggml_object * obj); GGML_API void ggml_print_objects(const struct ggml_context * ctx); - GGML_API int64_t ggml_nelements (const struct ggml_tensor * tensor); - GGML_API int64_t ggml_nrows (const struct ggml_tensor * tensor); - GGML_API size_t ggml_nbytes (const struct ggml_tensor * tensor); - GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN + GGML_API GGML_CALL int64_t ggml_nelements (const struct ggml_tensor * tensor); + GGML_API GGML_CALL int64_t ggml_nrows (const struct ggml_tensor * tensor); + GGML_API GGML_CALL size_t ggml_nbytes (const struct ggml_tensor * tensor); + GGML_API size_t ggml_nbytes_pad (const struct ggml_tensor * tensor); // same as ggml_nbytes() but padded to GGML_MEM_ALIGN - GGML_API int ggml_blck_size(enum ggml_type type); - GGML_API size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block - GGML_API size_t ggml_row_size (enum ggml_type type, int64_t ne); // size in bytes for all elements in a row + GGML_API GGML_CALL int ggml_blck_size(enum ggml_type type); + GGML_API GGML_CALL size_t ggml_type_size(enum ggml_type type); // size in bytes for all elements in a block + GGML_API GGML_CALL size_t ggml_row_size (enum ggml_type type, int64_t ne); // size in bytes for all elements in a row GGML_DEPRECATED( GGML_API double ggml_type_sizef(enum ggml_type type), // ggml_type_size()/ggml_blck_size() as float "use ggml_row_size() instead"); - GGML_API const char * ggml_type_name(enum ggml_type type); - GGML_API const char * ggml_op_name (enum ggml_op op); - GGML_API const char * ggml_op_symbol(enum ggml_op op); + GGML_API GGML_CALL const char * ggml_type_name(enum ggml_type type); + GGML_API GGML_CALL const char * ggml_op_name (enum ggml_op op); + GGML_API const char * ggml_op_symbol(enum ggml_op op); - GGML_API const char * ggml_unary_op_name(enum ggml_unary_op op); - GGML_API const char * ggml_op_desc(const struct ggml_tensor * t); // unary or op name + GGML_API const char * ggml_unary_op_name(enum ggml_unary_op op); + GGML_API GGML_CALL const char * ggml_op_desc(const struct ggml_tensor * t); // unary or op name - GGML_API size_t ggml_element_size(const struct ggml_tensor * tensor); + GGML_API GGML_CALL size_t ggml_element_size(const struct ggml_tensor * tensor); - GGML_API bool ggml_is_quantized(enum ggml_type type); + GGML_API GGML_CALL bool ggml_is_quantized(enum ggml_type type); // TODO: temporary until model loading of ggml examples is refactored GGML_API enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype); - GGML_API bool ggml_is_transposed(const struct ggml_tensor * tensor); - GGML_API bool ggml_is_contiguous(const struct ggml_tensor * tensor); - GGML_API bool ggml_is_permuted (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_scalar (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_vector (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_matrix (const struct ggml_tensor * tensor); - GGML_API bool ggml_is_3d (const struct ggml_tensor * tensor); - GGML_API int ggml_n_dims (const struct ggml_tensor * tensor); // returns 1 for scalars + GGML_API GGML_CALL bool ggml_is_transposed(const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_contiguous(const struct ggml_tensor * tensor); + GGML_API GGML_CALL bool ggml_is_permuted (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_scalar (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_vector (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_matrix (const struct ggml_tensor * tensor); + GGML_API bool ggml_is_3d (const struct ggml_tensor * tensor); + GGML_API int ggml_n_dims (const struct ggml_tensor * tensor); // returns 1 for scalars GGML_API bool ggml_are_same_shape(const struct ggml_tensor * t0, const struct ggml_tensor * t1); @@ -770,7 +780,7 @@ extern "C" { GGML_API void * ggml_get_data (const struct ggml_tensor * tensor); GGML_API float * ggml_get_data_f32(const struct ggml_tensor * tensor); - GGML_API enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor); + GGML_API GGML_CALL enum ggml_unary_op ggml_get_unary_op(const struct ggml_tensor * tensor); GGML_API const char * ggml_get_name (const struct ggml_tensor * tensor); GGML_API struct ggml_tensor * ggml_set_name ( struct ggml_tensor * tensor, const char * name); @@ -1413,7 +1423,7 @@ extern "C" { float beta_slow); // compute correction dims for YaRN RoPE scaling - void ggml_rope_yarn_corr_dims( + GGML_CALL void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2]); // xPos RoPE, in-place, returns view(a) From 122ed4840cc6d209df6043e027f9f8a03aee01da Mon Sep 17 00:00:00 2001 From: Maximilian Winter Date: Tue, 16 Jan 2024 13:10:48 +0100 Subject: [PATCH 483/859] examples : fix and improv docs for the grammar generator (#4909) * Create pydantic-models-to-grammar.py * Added some comments for usage * Refactored Grammar Generator Added example and usage instruction. * Update pydantic_models_to_grammar.py * Update pydantic-models-to-grammar-examples.py * Renamed module and imported it. * Update pydantic-models-to-grammar.py * Renamed file and fixed grammar generator issue. * Fixed some issues and bugs of the grammar generator. Imporved Documentation * Update pydantic_models_to_grammar.py --- examples/pydantic_models_to_grammar.py | 877 +++++++++++++++---------- 1 file changed, 519 insertions(+), 358 deletions(-) diff --git a/examples/pydantic_models_to_grammar.py b/examples/pydantic_models_to_grammar.py index 41b98fdc1..848c1c367 100644 --- a/examples/pydantic_models_to_grammar.py +++ b/examples/pydantic_models_to_grammar.py @@ -4,6 +4,7 @@ from copy import copy from inspect import isclass, getdoc from types import NoneType +from docstring_parser import parse from pydantic import BaseModel, create_model, Field from typing import Any, Type, List, get_args, get_origin, Tuple, Union, Optional, _GenericAlias from enum import Enum @@ -25,9 +26,10 @@ class PydanticDataType(Enum): ENUM (str): Represents an enum data type. CUSTOM_CLASS (str): Represents a custom class data type. """ + STRING = "string" TRIPLE_QUOTED_STRING = "triple_quoted_string" - MARKDOWN_STRING = "markdown_string" + MARKDOWN_CODE_BLOCK = "markdown_code_block" BOOLEAN = "boolean" INTEGER = "integer" FLOAT = "float" @@ -78,10 +80,10 @@ def map_pydantic_type_to_gbnf(pydantic_type: Type[Any]) -> str: def format_model_and_field_name(model_name: str) -> str: - parts = re.findall('[A-Z][^A-Z]*', model_name) + parts = re.findall("[A-Z][^A-Z]*", model_name) if not parts: # Check if the list is empty return model_name.lower().replace("_", "-") - return '-'.join(part.lower().replace("_", "-") for part in parts) + return "-".join(part.lower().replace("_", "-") for part in parts) def generate_list_rule(element_type): @@ -93,29 +95,31 @@ def generate_list_rule(element_type): """ rule_name = f"{map_pydantic_type_to_gbnf(element_type)}-list" element_rule = map_pydantic_type_to_gbnf(element_type) - list_rule = fr'{rule_name} ::= "[" {element_rule} ("," {element_rule})* "]"' + list_rule = rf'{rule_name} ::= "[" {element_rule} ("," {element_rule})* "]"' return list_rule def get_members_structure(cls, rule_name): if issubclass(cls, Enum): # Handle Enum types - members = [f'\"\\\"{member.value}\\\"\"' for name, member in cls.__members__.items()] + members = [f'"\\"{member.value}\\""' for name, member in cls.__members__.items()] return f"{cls.__name__.lower()} ::= " + " | ".join(members) if cls.__annotations__ and cls.__annotations__ != {}: result = f'{rule_name} ::= "{{"' type_list_rules = [] # Modify this comprehension - members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param_type)}' - for name, param_type in cls.__annotations__.items() - if name != 'self'] + members = [ + f' "\\"{name}\\"" ":" {map_pydantic_type_to_gbnf(param_type)}' + for name, param_type in cls.__annotations__.items() + if name != "self" + ] result += '"," '.join(members) result += ' "}"' return result, type_list_rules elif rule_name == "custom-class-any": - result = f'{rule_name} ::= ' - result += 'value' + result = f"{rule_name} ::= " + result += "value" type_list_rules = [] return result, type_list_rules else: @@ -124,9 +128,11 @@ def get_members_structure(cls, rule_name): result = f'{rule_name} ::= "{{"' type_list_rules = [] # Modify this comprehension too - members = [f' \"\\\"{name}\\\"\" ":" {map_pydantic_type_to_gbnf(param.annotation)}' - for name, param in parameters.items() - if name != 'self' and param.annotation != inspect.Parameter.empty] + members = [ + f' "\\"{name}\\"" ":" {map_pydantic_type_to_gbnf(param.annotation)}' + for name, param in parameters.items() + if name != "self" and param.annotation != inspect.Parameter.empty + ] result += '", "'.join(members) result += ' "}"' @@ -141,8 +147,8 @@ def regex_to_gbnf(regex_pattern: str) -> str: gbnf_rule = regex_pattern # Translate common regex components to GBNF - gbnf_rule = gbnf_rule.replace('\\d', '[0-9]') - gbnf_rule = gbnf_rule.replace('\\s', '[ \t\n]') + gbnf_rule = gbnf_rule.replace("\\d", "[0-9]") + gbnf_rule = gbnf_rule.replace("\\s", "[ \t\n]") # Handle quantifiers and other regex syntax that is similar in GBNF # (e.g., '*', '+', '?', character classes) @@ -158,12 +164,12 @@ def generate_gbnf_integer_rules(max_digit=None, min_digit=None): Generates GBNF (Generalized Backus-Naur Form) rules for integers based on the given maximum and minimum digits. Parameters: - max_digit (int): The maximum number of digits for the integer. Default is None. - min_digit (int): The minimum number of digits for the integer. Default is None. + max_digit (int): The maximum number of digits for the integer. Default is None. + min_digit (int): The minimum number of digits for the integer. Default is None. Returns: - integer_rule (str): The identifier for the integer rule generated. - additional_rules (list): A list of additional rules generated based on the given maximum and minimum digits. + integer_rule (str): The identifier for the integer rule generated. + additional_rules (list): A list of additional rules generated based on the given maximum and minimum digits. """ additional_rules = [] @@ -178,21 +184,21 @@ def generate_gbnf_integer_rules(max_digit=None, min_digit=None): # Handling Integer Rules if max_digit is not None or min_digit is not None: # Start with an empty rule part - integer_rule_part = '' + integer_rule_part = "" # Add mandatory digits as per min_digit if min_digit is not None: - integer_rule_part += '[0-9] ' * min_digit + integer_rule_part += "[0-9] " * min_digit # Add optional digits up to max_digit if max_digit is not None: optional_digits = max_digit - (min_digit if min_digit is not None else 0) - integer_rule_part += ''.join(['[0-9]? ' for _ in range(optional_digits)]) + integer_rule_part += "".join(["[0-9]? " for _ in range(optional_digits)]) # Trim the rule part and append it to additional rules integer_rule_part = integer_rule_part.strip() if integer_rule_part: - additional_rules.append(f'{integer_rule} ::= {integer_rule_part}') + additional_rules.append(f"{integer_rule} ::= {integer_rule_part}") return integer_rule, additional_rules @@ -224,21 +230,26 @@ def generate_gbnf_float_rules(max_digit=None, min_digit=None, max_precision=None additional_rules = [] # Define the integer part rule - integer_part_rule = "integer-part" + (f"-max{max_digit}" if max_digit is not None else "") + ( + integer_part_rule = ( + "integer-part" + (f"-max{max_digit}" if max_digit is not None else "") + ( f"-min{min_digit}" if min_digit is not None else "") + ) # Define the fractional part rule based on precision constraints fractional_part_rule = "fractional-part" - fractional_rule_part = '' + fractional_rule_part = "" if max_precision is not None or min_precision is not None: fractional_part_rule += (f"-max{max_precision}" if max_precision is not None else "") + ( - f"-min{min_precision}" if min_precision is not None else "") + f"-min{min_precision}" if min_precision is not None else "" + ) # Minimum number of digits - fractional_rule_part = '[0-9]' * (min_precision if min_precision is not None else 1) + fractional_rule_part = "[0-9]" * (min_precision if min_precision is not None else 1) # Optional additional digits - fractional_rule_part += ''.join([' [0-9]?'] * ( - (max_precision - (min_precision if min_precision is not None else 1)) if max_precision is not None else 0)) - additional_rules.append(f'{fractional_part_rule} ::= {fractional_rule_part}') + fractional_rule_part += "".join( + [" [0-9]?"] * ((max_precision - ( + min_precision if min_precision is not None else 1)) if max_precision is not None else 0) + ) + additional_rules.append(f"{fractional_part_rule} ::= {fractional_rule_part}") # Define the float rule float_rule = f"float-{max_digit if max_digit is not None else 'X'}-{min_digit if min_digit is not None else 'X'}-{max_precision if max_precision is not None else 'X'}-{min_precision if min_precision is not None else 'X'}" @@ -246,20 +257,19 @@ def generate_gbnf_float_rules(max_digit=None, min_digit=None, max_precision=None # Generating the integer part rule definition, if necessary if max_digit is not None or min_digit is not None: - integer_rule_part = '[0-9]' + integer_rule_part = "[0-9]" if min_digit is not None and min_digit > 1: - integer_rule_part += ' [0-9]' * (min_digit - 1) + integer_rule_part += " [0-9]" * (min_digit - 1) if max_digit is not None: - integer_rule_part += ''.join([' [0-9]?'] * (max_digit - (min_digit if min_digit is not None else 1))) - additional_rules.append(f'{integer_part_rule} ::= {integer_rule_part.strip()}') + integer_rule_part += "".join([" [0-9]?"] * (max_digit - (min_digit if min_digit is not None else 1))) + additional_rules.append(f"{integer_part_rule} ::= {integer_rule_part.strip()}") return float_rule, additional_rules -def generate_gbnf_rule_for_type(model_name, field_name, - field_type, is_optional, processed_models, created_rules, - field_info=None) -> \ - Tuple[str, list]: +def generate_gbnf_rule_for_type( + model_name, field_name, field_type, is_optional, processed_models, created_rules, field_info=None +) -> Tuple[str, list]: """ Generate GBNF rule for a given field type. @@ -282,20 +292,19 @@ def generate_gbnf_rule_for_type(model_name, field_name, if isclass(field_type) and issubclass(field_type, BaseModel): nested_model_name = format_model_and_field_name(field_type.__name__) - nested_model_rules = generate_gbnf_grammar(field_type, processed_models, created_rules) + nested_model_rules, _ = generate_gbnf_grammar(field_type, processed_models, created_rules) rules.extend(nested_model_rules) gbnf_type, rules = nested_model_name, rules elif isclass(field_type) and issubclass(field_type, Enum): - enum_values = [f'\"\\\"{e.value}\\\"\"' for e in field_type] # Adding escaped quotes + enum_values = [f'"\\"{e.value}\\""' for e in field_type] # Adding escaped quotes enum_rule = f"{model_name}-{field_name} ::= {' | '.join(enum_values)}" rules.append(enum_rule) gbnf_type, rules = model_name + "-" + field_name, rules - elif get_origin(field_type) == list or field_type == list: # Array + elif get_origin(field_type) == list: # Array element_type = get_args(field_type)[0] - element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-element", - element_type, is_optional, processed_models, - created_rules) + element_rule_name, additional_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-element", element_type, is_optional, processed_models, created_rules + ) rules.extend(additional_rules) array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ rules.append(array_rule) @@ -303,10 +312,9 @@ def generate_gbnf_rule_for_type(model_name, field_name, elif get_origin(field_type) == set or field_type == set: # Array element_type = get_args(field_type)[0] - element_rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-element", - element_type, is_optional, processed_models, - created_rules) + element_rule_name, additional_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-element", element_type, is_optional, processed_models, created_rules + ) rules.extend(additional_rules) array_rule = f"""{model_name}-{field_name} ::= "[" ws {element_rule_name} ("," ws {element_rule_name})* "]" """ rules.append(array_rule) @@ -318,15 +326,13 @@ def generate_gbnf_rule_for_type(model_name, field_name, elif gbnf_type.startswith("custom-dict-"): key_type, value_type = get_args(field_type) - additional_key_type, additional_key_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-key-type", - key_type, is_optional, processed_models, - created_rules) - additional_value_type, additional_value_rules = generate_gbnf_rule_for_type(model_name, - f"{field_name}-value-type", - value_type, is_optional, - processed_models, created_rules) - gbnf_type = fr'{gbnf_type} ::= "{{" ( {additional_key_type} ":" {additional_value_type} ("," {additional_key_type} ":" {additional_value_type})* )? "}}" ' + additional_key_type, additional_key_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-key-type", key_type, is_optional, processed_models, created_rules + ) + additional_value_type, additional_value_rules = generate_gbnf_rule_for_type( + model_name, f"{field_name}-value-type", value_type, is_optional, processed_models, created_rules + ) + gbnf_type = rf'{gbnf_type} ::= "{{" ( {additional_key_type} ": " {additional_value_type} ("," "\n" ws {additional_key_type} ":" {additional_value_type})* )? "}}" ' rules.extend(additional_key_rules) rules.extend(additional_value_rules) @@ -336,19 +342,16 @@ def generate_gbnf_rule_for_type(model_name, field_name, for union_type in union_types: if isinstance(union_type, _GenericAlias): - union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, - field_name, union_type, - False, - processed_models, created_rules) + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type( + model_name, field_name, union_type, False, processed_models, created_rules + ) union_rules.append(union_gbnf_type) rules.extend(union_rules_list) - elif not issubclass(union_type, NoneType): - union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type(model_name, - field_name, union_type, - False, - processed_models, created_rules) + union_gbnf_type, union_rules_list = generate_gbnf_rule_for_type( + model_name, field_name, union_type, False, processed_models, created_rules + ) union_rules.append(union_gbnf_type) rules.extend(union_rules_list) @@ -363,45 +366,58 @@ def generate_gbnf_rule_for_type(model_name, field_name, else: gbnf_type = f"{model_name}-{field_name}-union" elif isclass(field_type) and issubclass(field_type, str): - if field_info and hasattr(field_info, 'json_schema_extra') and field_info.json_schema_extra is not None: - - triple_quoted_string = field_info.json_schema_extra.get('triple_quoted_string', False) - markdown_string = field_info.json_schema_extra.get('markdown_string', False) + if field_info and hasattr(field_info, "json_schema_extra") and field_info.json_schema_extra is not None: + triple_quoted_string = field_info.json_schema_extra.get("triple_quoted_string", False) + markdown_string = field_info.json_schema_extra.get("markdown_code_block", False) gbnf_type = PydanticDataType.TRIPLE_QUOTED_STRING.value if triple_quoted_string else PydanticDataType.STRING.value - gbnf_type = PydanticDataType.MARKDOWN_STRING.value if markdown_string else gbnf_type + gbnf_type = PydanticDataType.MARKDOWN_CODE_BLOCK.value if markdown_string else gbnf_type - elif field_info and hasattr(field_info, 'pattern'): + elif field_info and hasattr(field_info, "pattern"): # Convert regex pattern to grammar rule regex_pattern = field_info.regex.pattern gbnf_type = f"pattern-{field_name} ::= {regex_to_gbnf(regex_pattern)}" else: gbnf_type = PydanticDataType.STRING.value - elif isclass(field_type) and issubclass(field_type, float) and field_info and hasattr(field_info, - 'json_schema_extra') and field_info.json_schema_extra is not None: + elif ( + isclass(field_type) + and issubclass(field_type, float) + and field_info + and hasattr(field_info, "json_schema_extra") + and field_info.json_schema_extra is not None + ): # Retrieve precision attributes for floats - max_precision = field_info.json_schema_extra.get('max_precision') if field_info and hasattr(field_info, - 'json_schema_extra') else None - min_precision = field_info.json_schema_extra.get('min_precision') if field_info and hasattr(field_info, - 'json_schema_extra') else None - max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None - min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None + max_precision = ( + field_info.json_schema_extra.get("max_precision") if field_info and hasattr(field_info, + "json_schema_extra") else None + ) + min_precision = ( + field_info.json_schema_extra.get("min_precision") if field_info and hasattr(field_info, + "json_schema_extra") else None + ) + max_digits = field_info.json_schema_extra.get("max_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None + min_digits = field_info.json_schema_extra.get("min_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None # Generate GBNF rule for float with given attributes - gbnf_type, rules = generate_gbnf_float_rules(max_digit=max_digits, min_digit=min_digits, - max_precision=max_precision, - min_precision=min_precision) + gbnf_type, rules = generate_gbnf_float_rules( + max_digit=max_digits, min_digit=min_digits, max_precision=max_precision, min_precision=min_precision + ) - elif isclass(field_type) and issubclass(field_type, int) and field_info and hasattr(field_info, - 'json_schema_extra') and field_info.json_schema_extra is not None: + elif ( + isclass(field_type) + and issubclass(field_type, int) + and field_info + and hasattr(field_info, "json_schema_extra") + and field_info.json_schema_extra is not None + ): # Retrieve digit attributes for integers - max_digits = field_info.json_schema_extra.get('max_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None - min_digits = field_info.json_schema_extra.get('min_digit') if field_info and hasattr(field_info, - 'json_schema_extra') else None + max_digits = field_info.json_schema_extra.get("max_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None + min_digits = field_info.json_schema_extra.get("min_digit") if field_info and hasattr(field_info, + "json_schema_extra") else None # Generate GBNF rule for integer with given attributes gbnf_type, rules = generate_gbnf_integer_rules(max_digit=max_digits, min_digit=min_digits) @@ -443,13 +459,13 @@ def generate_gbnf_grammar(model: Type[BaseModel], processed_models: set, created if not issubclass(model, BaseModel): # For non-Pydantic classes, generate model_fields from __annotations__ or __init__ - if hasattr(model, '__annotations__') and model.__annotations__: + if hasattr(model, "__annotations__") and model.__annotations__: model_fields = {name: (typ, ...) for name, typ in model.__annotations__.items()} else: init_signature = inspect.signature(model.__init__) parameters = init_signature.parameters - model_fields = {name: (param.annotation, param.default) for name, param in parameters.items() - if name != 'self'} + model_fields = {name: (param.annotation, param.default) for name, param in parameters.items() if + name != "self"} else: # For Pydantic models, use model_fields and check for ellipsis (required fields) model_fields = model.__annotations__ @@ -469,51 +485,55 @@ def generate_gbnf_grammar(model: Type[BaseModel], processed_models: set, created field_type = field_info field_info = model.model_fields[field_name] is_optional = field_info.is_required is False and get_origin(field_type) is Optional - rule_name, additional_rules = generate_gbnf_rule_for_type(model_name, - format_model_and_field_name(field_name), - field_type, is_optional, - processed_models, created_rules, field_info) - look_for_markdown_code_block = True if rule_name == "markdown_string" else False + rule_name, additional_rules = generate_gbnf_rule_for_type( + model_name, format_model_and_field_name(field_name), field_type, is_optional, processed_models, + created_rules, field_info + ) + look_for_markdown_code_block = True if rule_name == "markdown_code_block" else False look_for_triple_quoted_string = True if rule_name == "triple_quoted_string" else False if not look_for_markdown_code_block and not look_for_triple_quoted_string: if rule_name not in created_rules: created_rules[rule_name] = additional_rules - model_rule_parts.append(f' ws \"\\\"{field_name}\\\"\" ": " {rule_name}') # Adding escaped quotes + model_rule_parts.append(f' ws "\\"{field_name}\\"" ":" ws {rule_name}') # Adding escaped quotes nested_rules.extend(additional_rules) else: - has_triple_quoted_string = look_for_markdown_code_block - has_markdown_code_block = look_for_triple_quoted_string + has_triple_quoted_string = look_for_triple_quoted_string + has_markdown_code_block = look_for_markdown_code_block fields_joined = r' "," "\n" '.join(model_rule_parts) - model_rule = fr'{model_name} ::= "{{" "\n" {fields_joined} "\n" ws "}}"' - - if look_for_markdown_code_block or look_for_triple_quoted_string: - model_rule += ' ws "}"' + model_rule = rf'{model_name} ::= "{{" "\n" {fields_joined} "\n" ws "}}"' + has_special_string = False if has_triple_quoted_string: + model_rule += '"\\n" ws "}"' model_rule += '"\\n" triple-quoted-string' + has_special_string = True if has_markdown_code_block: + model_rule += '"\\n" ws "}"' model_rule += '"\\n" markdown-code-block' + has_special_string = True all_rules = [model_rule] + nested_rules - return all_rules, has_markdown_code_block, has_triple_quoted_string + return all_rules, has_special_string -def generate_gbnf_grammar_from_pydantic_models(models: List[Type[BaseModel]], outer_object_name: str = None, - outer_object_content: str = None, list_of_outputs: bool = False) -> str: +def generate_gbnf_grammar_from_pydantic_models( + models: List[Type[BaseModel]], outer_object_name: str = None, outer_object_content: str = None, + list_of_outputs: bool = False +) -> str: """ Generate GBNF Grammar from Pydantic Models. This method takes a list of Pydantic models and uses them to generate a GBNF grammar string. The generated grammar string can be used for parsing and validating data using the generated * grammar. - Parameters: - models (List[Type[BaseModel]]): A list of Pydantic models to generate the grammar from. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - list_of_outputs (str, optional): Allows a list of output objects + Args: + models (List[Type[BaseModel]]): A list of Pydantic models to generate the grammar from. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + list_of_outputs (str, optional): Allows a list of output objects Returns: - str: The generated GBNF grammar string. + str: The generated GBNF grammar string. Examples: models = [UserModel, PostModel] @@ -527,52 +547,53 @@ def generate_gbnf_grammar_from_pydantic_models(models: List[Type[BaseModel]], ou all_rules = [] created_rules = {} if outer_object_name is None: - for model in models: - model_rules, _, _ = generate_gbnf_grammar(model, - processed_models, created_rules) + model_rules, _ = generate_gbnf_grammar(model, processed_models, created_rules) all_rules.extend(model_rules) if list_of_outputs: - root_rule = r'root ::= ws "[" grammar-models ("," grammar-models)* "]"' + "\n" + root_rule = r'root ::= (" "| "\n") "[" ws grammar-models ("," ws grammar-models)* ws "]"' + "\n" else: - root_rule = r'root ::= ws grammar-models' + "\n" + root_rule = r'root ::= (" "| "\n") grammar-models' + "\n" root_rule += "grammar-models ::= " + " | ".join( [format_model_and_field_name(model.__name__) for model in models]) all_rules.insert(0, root_rule) return "\n".join(all_rules) elif outer_object_name is not None: if list_of_outputs: - root_rule = fr'root ::= ws "[" {format_model_and_field_name(outer_object_name)} ("," {format_model_and_field_name(outer_object_name)})* "]"' + "\n" + root_rule = ( + rf'root ::= (" "| "\n") "[" ws {format_model_and_field_name(outer_object_name)} ("," ws {format_model_and_field_name(outer_object_name)})* ws "]"' + + "\n" + ) else: root_rule = f"root ::= {format_model_and_field_name(outer_object_name)}\n" - model_rule = fr'{format_model_and_field_name(outer_object_name)} ::= ws "{{" ws "\"{outer_object_name}\"" ": " grammar-models' + model_rule = ( + rf'{format_model_and_field_name(outer_object_name)} ::= (" "| "\n") "{{" ws "\"{outer_object_name}\"" ":" ws grammar-models' + ) fields_joined = " | ".join( - [fr'{format_model_and_field_name(model.__name__)}-grammar-model' for model in models]) + [rf"{format_model_and_field_name(model.__name__)}-grammar-model" for model in models]) - grammar_model_rules = f'\ngrammar-models ::= {fields_joined}' + grammar_model_rules = f"\ngrammar-models ::= {fields_joined}" mod_rules = [] for model in models: - mod_rule = fr'{format_model_and_field_name(model.__name__)}-grammar-model ::= ws' - mod_rule += fr'"\"{format_model_and_field_name(model.__name__)}\"" "," ws "\"{outer_object_content}\"" ws ":" ws {format_model_and_field_name(model.__name__)}' + '\n' + mod_rule = rf"{format_model_and_field_name(model.__name__)}-grammar-model ::= " + mod_rule += ( + rf'"\"{model.__name__}\"" "," ws "\"{outer_object_content}\"" ":" ws {format_model_and_field_name(model.__name__)}' + "\n" + ) mod_rules.append(mod_rule) grammar_model_rules += "\n" + "\n".join(mod_rules) - look_for_markdown_code_block = False - look_for_triple_quoted_string = False + for model in models: - model_rules, markdown_block, triple_quoted_string = generate_gbnf_grammar(model, - processed_models, created_rules) + model_rules, has_special_string = generate_gbnf_grammar(model, processed_models, + created_rules) + + if not has_special_string: + model_rules[0] += r'"\n" ws "}"' + all_rules.extend(model_rules) - if markdown_block: - look_for_markdown_code_block = True - if triple_quoted_string: - look_for_triple_quoted_string = True - - if not look_for_markdown_code_block and not look_for_triple_quoted_string: - model_rule += ' ws "}"' all_rules.insert(0, root_rule + model_rule + grammar_model_rules) return "\n".join(all_rules) @@ -582,10 +603,10 @@ def get_primitive_grammar(grammar): Returns the needed GBNF primitive grammar for a given GBNF grammar string. Args: - grammar (str): The string containing the GBNF grammar. + grammar (str): The string containing the GBNF grammar. Returns: - str: GBNF primitive grammar string. + str: GBNF primitive grammar string. """ type_list = [] if "string-list" in grammar: @@ -611,7 +632,7 @@ integer ::= [0-9]+""" any_block = "" if "custom-class-any" in grammar: - any_block = ''' + any_block = """ value ::= object | array | string | number | boolean | null object ::= @@ -626,7 +647,7 @@ array ::= ("," ws value)* )? "]" ws -number ::= integer | float''' +number ::= integer | float""" markdown_code_block_grammar = "" if "markdown-code-block" in grammar: @@ -641,90 +662,32 @@ closing-triple-ticks ::= "```" "\n"''' triple-quoted-string ::= triple-quotes triple-quoted-string-content triple-quotes triple-quoted-string-content ::= ( [^'] | "'" [^'] | "'" "'" [^'] )* triple-quotes ::= "'''" """ - return "\n" + '\n'.join(additional_grammar) + any_block + primitive_grammar + markdown_code_block_grammar + return "\n" + "\n".join(additional_grammar) + any_block + primitive_grammar + markdown_code_block_grammar -def generate_field_markdown(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1) -> str: - indent = ' ' * depth - field_markdown = f"{indent}- **{field_name}** (`{field_type.__name__}`): " - - # Extracting field description from Pydantic Field using __model_fields__ - field_info = model.model_fields.get(field_name) - field_description = field_info.description if field_info and field_info.description else "No description available." - - field_markdown += field_description + '\n' - - # Handling nested BaseModel fields - if isclass(field_type) and issubclass(field_type, BaseModel): - field_markdown += f"{indent} - Details:\n" - for name, type_ in field_type.__annotations__.items(): - field_markdown += generate_field_markdown(name, type_, field_type, depth + 2) - - return field_markdown - - -def generate_markdown_report(pydantic_models: List[Type[BaseModel]]) -> str: - markdown = "" - for model in pydantic_models: - markdown += f"### {format_model_and_field_name(model.__name__)}\n" - - # Check if the model's docstring is different from BaseModel's docstring - class_doc = getdoc(model) - base_class_doc = getdoc(BaseModel) - class_description = class_doc if class_doc and class_doc != base_class_doc else "No specific description available." - - markdown += f"{class_description}\n\n" - markdown += "#### Fields\n" - - if isclass(model) and issubclass(model, BaseModel): - for name, field_type in model.__annotations__.items(): - markdown += generate_field_markdown(format_model_and_field_name(name), field_type, model) - markdown += "\n" - - return markdown - - -def format_json_example(example: dict, depth: int) -> str: +def generate_markdown_documentation( + pydantic_models: List[Type[BaseModel]], model_prefix="Model", fields_prefix="Fields", + documentation_with_field_description=True +) -> str: """ - Format a JSON example into a readable string with indentation. + Generate markdown documentation for a list of Pydantic models. Args: - example (dict): JSON example to be formatted. - depth (int): Indentation depth. + pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. + model_prefix (str): Prefix for the model section. + fields_prefix (str): Prefix for the fields section. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - str: Formatted JSON example string. - """ - indent = ' ' * depth - formatted_example = '{\n' - for key, value in example.items(): - value_text = f"'{value}'" if isinstance(value, str) else value - formatted_example += f"{indent}{key}: {value_text},\n" - formatted_example = formatted_example.rstrip(',\n') + '\n' + indent + '}' - return formatted_example - - -def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_prefix="Model", - fields_prefix="Fields", documentation_with_field_description=True) -> str: - """ - Generate text documentation for a list of Pydantic models. - - Args: - pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. - model_prefix (str): Prefix for the model section. - fields_prefix (str): Prefix for the fields section. - documentation_with_field_description (bool): Include field descriptions in the documentation. - - Returns: - str: Generated text documentation. + str: Generated text documentation. """ documentation = "" pyd_models = [(model, True) for model in pydantic_models] for model, add_prefix in pyd_models: if add_prefix: - documentation += f"{model_prefix}: {format_model_and_field_name(model.__name__)}\n" + documentation += f"{model_prefix}: {model.__name__}\n" else: - documentation += f"Model: {format_model_and_field_name(model.__name__)}\n" + documentation += f"Model: {model.__name__}\n" # Handling multi-line model description with proper indentation @@ -733,7 +696,7 @@ def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_pr class_description = class_doc if class_doc and class_doc != base_class_doc else "" if class_description != "": documentation += " Description: " - documentation += "\n" + format_multiline_description(class_description, 2) + "\n" + documentation += format_multiline_description(class_description, 0) + "\n" if add_prefix: # Indenting the fields section @@ -753,35 +716,192 @@ def generate_text_documentation(pydantic_models: List[Type[BaseModel]], model_pr for element_type in element_types: if isclass(element_type) and issubclass(element_type, BaseModel): pyd_models.append((element_type, False)) - documentation += generate_field_text(name, field_type, model, - documentation_with_field_description=documentation_with_field_description) + documentation += generate_field_markdown( + name, field_type, model, documentation_with_field_description=documentation_with_field_description + ) documentation += "\n" - if hasattr(model, 'Config') and hasattr(model.Config, - 'json_schema_extra') and 'example' in model.Config.json_schema_extra: + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: documentation += f" Expected Example Output for {format_model_and_field_name(model.__name__)}:\n" - json_example = json.dumps(model.Config.json_schema_extra['example']) + json_example = json.dumps(model.Config.json_schema_extra["example"]) documentation += format_multiline_description(json_example, 2) + "\n" return documentation -def generate_field_text(field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, - documentation_with_field_description=True) -> str: +def generate_field_markdown( + field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, + documentation_with_field_description=True +) -> str: + """ + Generate markdown documentation for a Pydantic model field. + + Args: + field_name (str): Name of the field. + field_type (Type[Any]): Type of the field. + model (Type[BaseModel]): Pydantic model class. + depth (int): Indentation depth in the documentation. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation for the field. + """ + indent = " " * depth + + field_info = model.model_fields.get(field_name) + field_description = field_info.description if field_info and field_info.description else "" + + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)} of {format_model_and_field_name(element_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + elif get_origin(field_type) == Union: + element_types = get_args(field_type) + types = [] + for element_type in element_types: + types.append(format_model_and_field_name(element_type.__name__)) + field_text = f"{indent}{field_name} ({' or '.join(types)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + else: + field_text = f"{indent}{field_name} ({format_model_and_field_name(field_type.__name__)})" + if field_description != "": + field_text += ":\n" + else: + field_text += "\n" + + if not documentation_with_field_description: + return field_text + + if field_description != "": + field_text += f" Description: " + field_description + "\n" + + # Check for and include field-specific examples if available + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: + field_example = model.Config.json_schema_extra["example"].get(field_name) + if field_example is not None: + example_text = f"'{field_example}'" if isinstance(field_example, str) else field_example + field_text += f"{indent} Example: {example_text}\n" + + if isclass(field_type) and issubclass(field_type, BaseModel): + field_text += f"{indent} Details:\n" + for name, type_ in field_type.__annotations__.items(): + field_text += generate_field_markdown(name, type_, field_type, depth + 2) + + return field_text + + +def format_json_example(example: dict, depth: int) -> str: + """ + Format a JSON example into a readable string with indentation. + + Args: + example (dict): JSON example to be formatted. + depth (int): Indentation depth. + + Returns: + str: Formatted JSON example string. + """ + indent = " " * depth + formatted_example = "{\n" + for key, value in example.items(): + value_text = f"'{value}'" if isinstance(value, str) else value + formatted_example += f"{indent}{key}: {value_text},\n" + formatted_example = formatted_example.rstrip(",\n") + "\n" + indent + "}" + return formatted_example + + +def generate_text_documentation( + pydantic_models: List[Type[BaseModel]], model_prefix="Model", fields_prefix="Fields", + documentation_with_field_description=True +) -> str: + """ + Generate text documentation for a list of Pydantic models. + + Args: + pydantic_models (List[Type[BaseModel]]): List of Pydantic model classes. + model_prefix (str): Prefix for the model section. + fields_prefix (str): Prefix for the fields section. + documentation_with_field_description (bool): Include field descriptions in the documentation. + + Returns: + str: Generated text documentation. + """ + documentation = "" + pyd_models = [(model, True) for model in pydantic_models] + for model, add_prefix in pyd_models: + if add_prefix: + documentation += f"{model_prefix}: {model.__name__}\n" + else: + documentation += f"Model: {model.__name__}\n" + + # Handling multi-line model description with proper indentation + + class_doc = getdoc(model) + base_class_doc = getdoc(BaseModel) + class_description = class_doc if class_doc and class_doc != base_class_doc else "" + if class_description != "": + documentation += " Description: " + documentation += "\n" + format_multiline_description(class_description, 2) + "\n" + + if isclass(model) and issubclass(model, BaseModel): + documentation_fields = "" + for name, field_type in model.__annotations__.items(): + # if name == "markdown_code_block": + # continue + if get_origin(field_type) == list: + element_type = get_args(field_type)[0] + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + if get_origin(field_type) == Union: + element_types = get_args(field_type) + for element_type in element_types: + if isclass(element_type) and issubclass(element_type, BaseModel): + pyd_models.append((element_type, False)) + documentation_fields += generate_field_text( + name, field_type, model, documentation_with_field_description=documentation_with_field_description + ) + if documentation_fields != "": + if add_prefix: + documentation += f" {fields_prefix}:\n{documentation_fields}" + else: + documentation += f" Fields:\n{documentation_fields}" + documentation += "\n" + + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: + documentation += f" Expected Example Output for {format_model_and_field_name(model.__name__)}:\n" + json_example = json.dumps(model.Config.json_schema_extra["example"]) + documentation += format_multiline_description(json_example, 2) + "\n" + + return documentation + + +def generate_field_text( + field_name: str, field_type: Type[Any], model: Type[BaseModel], depth=1, + documentation_with_field_description=True +) -> str: """ Generate text documentation for a Pydantic model field. Args: - field_name (str): Name of the field. - field_type (Type[Any]): Type of the field. - model (Type[BaseModel]): Pydantic model class. - depth (int): Indentation depth in the documentation. - documentation_with_field_description (bool): Include field descriptions in the documentation. + field_name (str): Name of the field. + field_type (Type[Any]): Type of the field. + model (Type[BaseModel]): Pydantic model class. + depth (int): Indentation depth in the documentation. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - str: Generated text documentation for the field. + str: Generated text documentation for the field. """ - indent = ' ' * depth + indent = " " * depth field_info = model.model_fields.get(field_name) field_description = field_info.description if field_info and field_info.description else "" @@ -817,9 +937,9 @@ def generate_field_text(field_name: str, field_type: Type[Any], model: Type[Base field_text += f"{indent} Description: " + field_description + "\n" # Check for and include field-specific examples if available - if hasattr(model, 'Config') and hasattr(model.Config, - 'json_schema_extra') and 'example' in model.Config.json_schema_extra: - field_example = model.Config.json_schema_extra['example'].get(field_name) + if hasattr(model, "Config") and hasattr(model.Config, + "json_schema_extra") and "example" in model.Config.json_schema_extra: + field_example = model.Config.json_schema_extra["example"].get(field_name) if field_example is not None: example_text = f"'{field_example}'" if isinstance(field_example, str) else field_example field_text += f"{indent} Example: {example_text}\n" @@ -837,39 +957,40 @@ def format_multiline_description(description: str, indent_level: int) -> str: Format a multiline description with proper indentation. Args: - description (str): Multiline description. - indent_level (int): Indentation level. + description (str): Multiline description. + indent_level (int): Indentation level. Returns: - str: Formatted multiline description. + str: Formatted multiline description. """ - indent = ' ' * indent_level - return indent + description.replace('\n', '\n' + indent) + indent = " " * indent_level + return indent + description.replace("\n", "\n" + indent) -def save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path="./grammar.gbnf", - documentation_file_path="./grammar_documentation.md"): +def save_gbnf_grammar_and_documentation( + grammar, documentation, grammar_file_path="./grammar.gbnf", documentation_file_path="./grammar_documentation.md" +): """ Save GBNF grammar and documentation to specified files. Args: - grammar (str): GBNF grammar string. - documentation (str): Documentation string. - grammar_file_path (str): File path to save the GBNF grammar. - documentation_file_path (str): File path to save the documentation. + grammar (str): GBNF grammar string. + documentation (str): Documentation string. + grammar_file_path (str): File path to save the GBNF grammar. + documentation_file_path (str): File path to save the documentation. Returns: - None + None """ try: - with open(grammar_file_path, 'w') as file: + with open(grammar_file_path, "w") as file: file.write(grammar + get_primitive_grammar(grammar)) print(f"Grammar successfully saved to {grammar_file_path}") except IOError as e: print(f"An error occurred while saving the grammar file: {e}") try: - with open(documentation_file_path, 'w') as file: + with open(documentation_file_path, "w") as file: file.write(documentation) print(f"Documentation successfully saved to {documentation_file_path}") except IOError as e: @@ -881,10 +1002,10 @@ def remove_empty_lines(string): Remove empty lines from a string. Args: - string (str): Input string. + string (str): Input string. Returns: - str: String with empty lines removed. + str: String with empty lines removed. """ lines = string.splitlines() non_empty_lines = [line for line in lines if line.strip() != ""] @@ -892,95 +1013,109 @@ def remove_empty_lines(string): return string_no_empty_lines -def generate_and_save_gbnf_grammar_and_documentation(pydantic_model_list, - grammar_file_path="./generated_grammar.gbnf", - documentation_file_path="./generated_grammar_documentation.md", - outer_object_name: str = None, - outer_object_content: str = None, - model_prefix: str = "Output Model", - fields_prefix: str = "Output Fields", - list_of_outputs: bool = False, - documentation_with_field_description=True): +def generate_and_save_gbnf_grammar_and_documentation( + pydantic_model_list, + grammar_file_path="./generated_grammar.gbnf", + documentation_file_path="./generated_grammar_documentation.md", + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True, +): """ Generate GBNF grammar and documentation, and save them to specified files. Args: - pydantic_model_list: List of Pydantic model classes. - grammar_file_path (str): File path to save the generated GBNF grammar. - documentation_file_path (str): File path to save the generated documentation. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - model_prefix (str): Prefix for the model section in the documentation. - fields_prefix (str): Prefix for the fields section in the documentation. - list_of_outputs (bool): Whether the output is a list of items. - documentation_with_field_description (bool): Include field descriptions in the documentation. + pydantic_model_list: List of Pydantic model classes. + grammar_file_path (str): File path to save the generated GBNF grammar. + documentation_file_path (str): File path to save the generated documentation. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - None + None """ - documentation = generate_text_documentation(pydantic_model_list, model_prefix, fields_prefix, - documentation_with_field_description=documentation_with_field_description) - grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, - outer_object_content, list_of_outputs) + documentation = generate_markdown_documentation( + pydantic_model_list, model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description + ) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, outer_object_content, + list_of_outputs) grammar = remove_empty_lines(grammar) save_gbnf_grammar_and_documentation(grammar, documentation, grammar_file_path, documentation_file_path) -def generate_gbnf_grammar_and_documentation(pydantic_model_list, outer_object_name: str = None, - outer_object_content: str = None, - model_prefix: str = "Output Model", - fields_prefix: str = "Output Fields", list_of_outputs: bool = False, - documentation_with_field_description=True): +def generate_gbnf_grammar_and_documentation( + pydantic_model_list, + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True, +): """ Generate GBNF grammar and documentation for a list of Pydantic models. Args: - pydantic_model_list: List of Pydantic model classes. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - model_prefix (str): Prefix for the model section in the documentation. - fields_prefix (str): Prefix for the fields section in the documentation. - list_of_outputs (bool): Whether the output is a list of items. - documentation_with_field_description (bool): Include field descriptions in the documentation. + pydantic_model_list: List of Pydantic model classes. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - tuple: GBNF grammar string, documentation string. + tuple: GBNF grammar string, documentation string. """ - documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, - documentation_with_field_description=documentation_with_field_description) - grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, - outer_object_content, list_of_outputs) + documentation = generate_markdown_documentation( + copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description + ) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, outer_object_content, + list_of_outputs) grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) return grammar, documentation -def generate_gbnf_grammar_and_documentation_from_dictionaries(dictionaries: List[dict], - outer_object_name: str = None, - outer_object_content: str = None, - model_prefix: str = "Output Model", - fields_prefix: str = "Output Fields", - list_of_outputs: bool = False, - documentation_with_field_description=True): +def generate_gbnf_grammar_and_documentation_from_dictionaries( + dictionaries: List[dict], + outer_object_name: str = None, + outer_object_content: str = None, + model_prefix: str = "Output Model", + fields_prefix: str = "Output Fields", + list_of_outputs: bool = False, + documentation_with_field_description=True, +): """ Generate GBNF grammar and documentation from a list of dictionaries. Args: - dictionaries (List[dict]): List of dictionaries representing Pydantic models. - outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. - outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. - model_prefix (str): Prefix for the model section in the documentation. - fields_prefix (str): Prefix for the fields section in the documentation. - list_of_outputs (bool): Whether the output is a list of items. - documentation_with_field_description (bool): Include field descriptions in the documentation. + dictionaries (List[dict]): List of dictionaries representing Pydantic models. + outer_object_name (str): Outer object name for the GBNF grammar. If None, no outer object will be generated. Eg. "function" for function calling. + outer_object_content (str): Content for the outer rule in the GBNF grammar. Eg. "function_parameters" or "params" for function calling. + model_prefix (str): Prefix for the model section in the documentation. + fields_prefix (str): Prefix for the fields section in the documentation. + list_of_outputs (bool): Whether the output is a list of items. + documentation_with_field_description (bool): Include field descriptions in the documentation. Returns: - tuple: GBNF grammar string, documentation string. + tuple: GBNF grammar string, documentation string. """ pydantic_model_list = create_dynamic_models_from_dictionaries(dictionaries) - documentation = generate_text_documentation(copy(pydantic_model_list), model_prefix, fields_prefix, - documentation_with_field_description=documentation_with_field_description) - grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, - outer_object_content, list_of_outputs) + documentation = generate_markdown_documentation( + copy(pydantic_model_list), model_prefix, fields_prefix, + documentation_with_field_description=documentation_with_field_description + ) + grammar = generate_gbnf_grammar_from_pydantic_models(pydantic_model_list, outer_object_name, outer_object_content, + list_of_outputs) grammar = remove_empty_lines(grammar + get_primitive_grammar(grammar)) return grammar, documentation @@ -990,41 +1125,61 @@ def create_dynamic_model_from_function(func: Callable): Creates a dynamic Pydantic model from a given function's type hints and adds the function as a 'run' method. Args: - func (Callable): A function with type hints from which to create the model. + func (Callable): A function with type hints from which to create the model. Returns: - A dynamic Pydantic model class with the provided function as a 'run' method. + A dynamic Pydantic model class with the provided function as a 'run' method. """ - # Extracting type hints from the provided function - type_hints = get_type_hints(func) - type_hints.pop('return', None) - # Handling default values and annotations + # Get the signature of the function + sig = inspect.signature(func) + + # Parse the docstring + docstring = parse(func.__doc__) + dynamic_fields = {} - defaults = getattr(func, '__defaults__', ()) or () - defaults_index = len(type_hints) - len(defaults) + param_docs = [] + for param in sig.parameters.values(): + # Exclude 'self' parameter + if param.name == "self": + continue - for index, (name, typ) in enumerate(type_hints.items()): - if index >= defaults_index: - default_value = defaults[index - defaults_index] - dynamic_fields[name] = (typ, default_value) + # Assert that the parameter has a type annotation + if param.annotation == inspect.Parameter.empty: + raise TypeError(f"Parameter '{param.name}' in function '{func.__name__}' lacks a type annotation") + + # Find the parameter's description in the docstring + param_doc = next((d for d in docstring.params if d.arg_name == param.name), None) + + # Assert that the parameter has a description + if not param_doc or not param_doc.description: + raise ValueError( + f"Parameter '{param.name}' in function '{func.__name__}' lacks a description in the docstring") + + # Add parameter details to the schema + param_doc = next((d for d in docstring.params if d.arg_name == param.name), None) + param_docs.append((param.name, param_doc)) + if param.default == inspect.Parameter.empty: + default_value = ... else: - dynamic_fields[name] = (typ, ...) - + default_value = param.default + dynamic_fields[param.name] = ( + param.annotation if param.annotation != inspect.Parameter.empty else str, default_value) # Creating the dynamic model - dynamicModel = create_model(f'{func.__name__}', **dynamic_fields) + dynamic_model = create_model(f"{func.__name__}", **dynamic_fields) - dynamicModel.__doc__ = getdoc(func) + for param_doc in param_docs: + dynamic_model.model_fields[param_doc[0]].description = param_doc[1].description + + dynamic_model.__doc__ = docstring.short_description - # Wrapping the original function to handle instance 'self' def run_method_wrapper(self): - func_args = {name: getattr(self, name) for name in type_hints} + func_args = {name: getattr(self, name) for name, _ in dynamic_fields.items()} return func(**func_args) # Adding the wrapped function as a 'run' method - setattr(dynamicModel, 'run', run_method_wrapper) - - return dynamicModel + setattr(dynamic_model, "run", run_method_wrapper) + return dynamic_model def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): @@ -1032,11 +1187,11 @@ def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): Add a 'run' method to a dynamic Pydantic model, using the provided function. Args: - - model (Type[BaseModel]): Dynamic Pydantic model class. - - func (Callable): Function to be added as a 'run' method to the model. + model (Type[BaseModel]): Dynamic Pydantic model class. + func (Callable): Function to be added as a 'run' method to the model. Returns: - - Type[BaseModel]: Pydantic model class with the added 'run' method. + Type[BaseModel]: Pydantic model class with the added 'run' method. """ def run_method_wrapper(self): @@ -1044,7 +1199,7 @@ def add_run_method_to_dynamic_model(model: Type[BaseModel], func: Callable): return func(**func_args) # Adding the wrapped function as a 'run' method - setattr(model, 'run', run_method_wrapper) + setattr(model, "run", run_method_wrapper) return model @@ -1054,15 +1209,15 @@ def create_dynamic_models_from_dictionaries(dictionaries: List[dict]): Create a list of dynamic Pydantic model classes from a list of dictionaries. Args: - - dictionaries (List[dict]): List of dictionaries representing model structures. + dictionaries (List[dict]): List of dictionaries representing model structures. Returns: - - List[Type[BaseModel]]: List of generated dynamic Pydantic model classes. + List[Type[BaseModel]]: List of generated dynamic Pydantic model classes. """ dynamic_models = [] for func in dictionaries: model_name = format_model_and_field_name(func.get("name", "")) - dyn_model = convert_dictionary_to_to_pydantic_model(func, model_name) + dyn_model = convert_dictionary_to_pydantic_model(func, model_name) dynamic_models.append(dyn_model) return dynamic_models @@ -1080,12 +1235,12 @@ from enum import Enum def json_schema_to_python_types(schema): type_map = { - 'any': Any, - 'string': str, - 'number': float, - 'integer': int, - 'boolean': bool, - 'array': list, + "any": Any, + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "array": list, } return type_map[schema] @@ -1094,58 +1249,64 @@ def list_to_enum(enum_name, values): return Enum(enum_name, {value: value for value in values}) -def convert_dictionary_to_to_pydantic_model(dictionary: dict, model_name: str = 'CustomModel') -> Type[BaseModel]: +def convert_dictionary_to_pydantic_model(dictionary: dict, model_name: str = "CustomModel") -> Type[BaseModel]: """ Convert a dictionary to a Pydantic model class. Args: - - dictionary (dict): Dictionary representing the model structure. - - model_name (str): Name of the generated Pydantic model. + dictionary (dict): Dictionary representing the model structure. + model_name (str): Name of the generated Pydantic model. Returns: - - Type[BaseModel]: Generated Pydantic model class. + Type[BaseModel]: Generated Pydantic model class. """ fields = {} if "properties" in dictionary: for field_name, field_data in dictionary.get("properties", {}).items(): - if field_data == 'object': - submodel = convert_dictionary_to_to_pydantic_model(dictionary, f'{model_name}_{field_name}') + if field_data == "object": + submodel = convert_dictionary_to_pydantic_model(dictionary, f"{model_name}_{field_name}") fields[field_name] = (submodel, ...) else: - field_type = field_data.get('type', 'str') + field_type = field_data.get("type", "str") if field_data.get("enum", []): fields[field_name] = (list_to_enum(field_name, field_data.get("enum", [])), ...) - if field_type == "array": + elif field_type == "array": items = field_data.get("items", {}) if items != {}: array = {"properties": items} - array_type = convert_dictionary_to_to_pydantic_model(array, f'{model_name}_{field_name}_items') + array_type = convert_dictionary_to_pydantic_model(array, f"{model_name}_{field_name}_items") fields[field_name] = (List[array_type], ...) else: fields[field_name] = (list, ...) - elif field_type == 'object': - submodel = convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}_{field_name}') + elif field_type == "object": + submodel = convert_dictionary_to_pydantic_model(field_data, f"{model_name}_{field_name}") fields[field_name] = (submodel, ...) + elif field_type == "required": + required = field_data.get("enum", []) + for key, field in fields.items(): + if key not in required: + fields[key] = (Optional[fields[key][0]], ...) else: field_type = json_schema_to_python_types(field_type) fields[field_name] = (field_type, ...) if "function" in dictionary: - for field_name, field_data in dictionary.get("function", {}).items(): if field_name == "name": model_name = field_data elif field_name == "description": fields["__doc__"] = field_data elif field_name == "parameters": - return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') + return convert_dictionary_to_pydantic_model(field_data, f"{model_name}") + if "parameters" in dictionary: field_data = {"function": dictionary} - return convert_dictionary_to_to_pydantic_model(field_data, f'{model_name}') - + return convert_dictionary_to_pydantic_model(field_data, f"{model_name}") + if "required" in dictionary: + required = dictionary.get("required", []) + for key, field in fields.items(): + if key not in required: + fields[key] = (Optional[fields[key][0]], ...) custom_model = create_model(model_name, **fields) return custom_model - - - From 7c8d3abd1a17c28fc56b1a4814bc4b29f91d7454 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Tue, 16 Jan 2024 14:33:02 +0100 Subject: [PATCH 484/859] metal : log `recommendedMaxWorkingSetSize` on iOS 16+ (#4936) * metal: Log `recommendedMaxWorkingSetSize` on iOS 16+ * Only log on iOS and macOS, ignoring tvOS and other platforms * Check for Xcode version before using recommendedMaxWorkingSetSize --------- Co-authored-by: Georgi Gerganov --- ggml-metal.m | 58 ++++++++++++++++++++++++---------------------------- 1 file changed, 27 insertions(+), 31 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 867f2fd48..44134d1d9 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -369,8 +369,12 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_INFO("%s: simdgroup reduction support = %s\n", __func__, ctx->support_simdgroup_reduction ? "true" : "false"); GGML_METAL_LOG_INFO("%s: simdgroup matrix mul. support = %s\n", __func__, ctx->support_simdgroup_mm ? "true" : "false"); GGML_METAL_LOG_INFO("%s: hasUnifiedMemory = %s\n", __func__, ctx->device.hasUnifiedMemory ? "true" : "false"); -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); + +#if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) + if (@available(macOS 10.12, iOS 16.0, *)) { + GGML_METAL_LOG_INFO("%s: recommendedMaxWorkingSetSize = %8.2f MB\n", __func__, ctx->device.recommendedMaxWorkingSetSize / 1e6); + } +#elif TARGET_OS_OSX if (ctx->device.maxTransferRate != 0) { GGML_METAL_LOG_INFO("%s: maxTransferRate = %8.2f MB/s\n", __func__, ctx->device.maxTransferRate / 1e6); } else { @@ -2369,6 +2373,25 @@ GGML_CALL static const char * ggml_backend_metal_buffer_type_get_name(ggml_backe UNUSED(buft); } +static void ggml_backend_metal_log_allocated_size(id device) { +#if TARGET_OS_OSX || (TARGET_OS_IOS && __clang_major__ >= 15) + if (@available(macOS 10.12, iOS 16.0, *)) { + GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", + device.currentAllocatedSize / 1024.0 / 1024.0, + device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); + + if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { + GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); + } else { + GGML_METAL_LOG_INFO("\n"); + } + } else { + GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); + } +#endif + UNUSED(device); +} + GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { struct ggml_backend_metal_buffer_context * ctx = malloc(sizeof(struct ggml_backend_metal_buffer_context)); @@ -2401,22 +2424,7 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_metal_buffer_type_alloc_buff } GGML_METAL_LOG_INFO("%s: allocated buffer, size = %8.2f MiB", __func__, size_aligned / 1024.0 / 1024.0); - - -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", - device.currentAllocatedSize / 1024.0 / 1024.0, - device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - - if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); - } else { - GGML_METAL_LOG_INFO("\n"); - } -#else - GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); -#endif - + ggml_backend_metal_log_allocated_size(device); return ggml_backend_buffer_init(buft, ggml_backend_metal_buffer_i, ctx, size); } @@ -2524,19 +2532,7 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_metal_buffer_from_ptr(void * data, } } -#if TARGET_OS_OSX - GGML_METAL_LOG_INFO(", (%8.2f / %8.2f)", - device.currentAllocatedSize / 1024.0 / 1024.0, - device.recommendedMaxWorkingSetSize / 1024.0 / 1024.0); - - if (device.currentAllocatedSize > device.recommendedMaxWorkingSetSize) { - GGML_METAL_LOG_WARN("%s: warning: current allocated size is greater than the recommended max working set size\n", __func__); - } else { - GGML_METAL_LOG_INFO("\n"); - } -#else - GGML_METAL_LOG_INFO(", (%8.2f)\n", device.currentAllocatedSize / 1024.0 / 1024.0); -#endif + ggml_backend_metal_log_allocated_size(device); return ggml_backend_buffer_init(ggml_backend_metal_buffer_type(), ggml_backend_metal_buffer_i, ctx, size); } From 3a48d558a69c88ac17efcaa5900cd9eb19596ac4 Mon Sep 17 00:00:00 2001 From: Alex Azarov Date: Tue, 16 Jan 2024 14:41:27 +0100 Subject: [PATCH 485/859] metal : replace loop of dispatch_async with dispatch_apply (#4934) * Replace loop of dispatch_async with dispatch_apply * Update ggml-metal.m --------- Co-authored-by: Georgi Gerganov --- ggml-metal.m | 2882 +++++++++++++++++++++++++------------------------- 1 file changed, 1439 insertions(+), 1443 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index 44134d1d9..c21dc465a 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -737,1475 +737,249 @@ static bool ggml_metal_graph_compute( ctx->command_encoders[i] = [ctx->command_buffers[i] computeCommandEncoderWithDescriptor: edesc]; } - for (int cb_idx = 0; cb_idx < n_cb; ++cb_idx) { - const int n_nodes_per_cb = (n_nodes + n_cb - 1) / n_cb; + const int n_nodes_per_cb = (n_nodes + n_cb - 1) / n_cb; + dispatch_apply(n_cb, ctx->d_queue, ^(size_t iter) { + const int cb_idx = iter; - dispatch_async(ctx->d_queue, ^{ - size_t offs_src0 = 0; - size_t offs_src1 = 0; - size_t offs_dst = 0; + size_t offs_src0 = 0; + size_t offs_src1 = 0; + size_t offs_dst = 0; - id command_buffer = ctx->command_buffers[cb_idx]; - id encoder = ctx->command_encoders[cb_idx]; + id command_buffer = ctx->command_buffers[cb_idx]; + id encoder = ctx->command_encoders[cb_idx]; - const int node_start = (cb_idx + 0) * n_nodes_per_cb; - const int node_end = MIN((cb_idx == n_cb - 1) ? n_nodes : (cb_idx + 1) * n_nodes_per_cb, n_nodes); + const int node_start = (cb_idx + 0) * n_nodes_per_cb; + const int node_end = MIN((cb_idx == n_cb - 1) ? n_nodes : (cb_idx + 1) * n_nodes_per_cb, n_nodes); - for (int ind = node_start; ind < node_end; ++ind) { - const int i = ind; + for (int ind = node_start; ind < node_end; ++ind) { + const int i = ind; - if (i == -1) { - [encoder memoryBarrierWithScope:MTLBarrierScopeBuffers]; - continue; - } + if (i == -1) { + [encoder memoryBarrierWithScope:MTLBarrierScopeBuffers]; + continue; + } - //GGML_METAL_LOG_INFO("%s: encoding node %3d, op = %8s\n", __func__, i, ggml_op_name(gf->nodes[i]->op)); + //GGML_METAL_LOG_INFO("%s: encoding node %3d, op = %8s\n", __func__, i, ggml_op_name(gf->nodes[i]->op)); - struct ggml_tensor * src0 = gf->nodes[i]->src[0]; - struct ggml_tensor * src1 = gf->nodes[i]->src[1]; - struct ggml_tensor * dst = gf->nodes[i]; + struct ggml_tensor * src0 = gf->nodes[i]->src[0]; + struct ggml_tensor * src1 = gf->nodes[i]->src[1]; + struct ggml_tensor * dst = gf->nodes[i]; - switch (dst->op) { - case GGML_OP_NONE: - case GGML_OP_RESHAPE: - case GGML_OP_VIEW: - case GGML_OP_TRANSPOSE: - case GGML_OP_PERMUTE: - { - // noop -> next node - } continue; - default: - { - } break; - } + switch (dst->op) { + case GGML_OP_NONE: + case GGML_OP_RESHAPE: + case GGML_OP_VIEW: + case GGML_OP_TRANSPOSE: + case GGML_OP_PERMUTE: + { + // noop -> next node + } continue; + default: + { + } break; + } - if (!ggml_metal_supports_op(ctx, dst)) { - GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); - GGML_ASSERT(!"unsupported op"); - } + if (!ggml_metal_supports_op(ctx, dst)) { + GGML_METAL_LOG_ERROR("%s: error: unsupported op '%s'\n", __func__, ggml_op_desc(dst)); + GGML_ASSERT(!"unsupported op"); + } #ifndef GGML_METAL_NDEBUG - [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; + [encoder pushDebugGroup:[NSString stringWithCString:ggml_op_desc(dst) encoding:NSUTF8StringEncoding]]; #endif - const int64_t ne00 = src0 ? src0->ne[0] : 0; - const int64_t ne01 = src0 ? src0->ne[1] : 0; - const int64_t ne02 = src0 ? src0->ne[2] : 0; - const int64_t ne03 = src0 ? src0->ne[3] : 0; + const int64_t ne00 = src0 ? src0->ne[0] : 0; + const int64_t ne01 = src0 ? src0->ne[1] : 0; + const int64_t ne02 = src0 ? src0->ne[2] : 0; + const int64_t ne03 = src0 ? src0->ne[3] : 0; - const uint64_t nb00 = src0 ? src0->nb[0] : 0; - const uint64_t nb01 = src0 ? src0->nb[1] : 0; - const uint64_t nb02 = src0 ? src0->nb[2] : 0; - const uint64_t nb03 = src0 ? src0->nb[3] : 0; + const uint64_t nb00 = src0 ? src0->nb[0] : 0; + const uint64_t nb01 = src0 ? src0->nb[1] : 0; + const uint64_t nb02 = src0 ? src0->nb[2] : 0; + const uint64_t nb03 = src0 ? src0->nb[3] : 0; - const int64_t ne10 = src1 ? src1->ne[0] : 0; - const int64_t ne11 = src1 ? src1->ne[1] : 0; - const int64_t ne12 = src1 ? src1->ne[2] : 0; - const int64_t ne13 = src1 ? src1->ne[3] : 0; UNUSED(ne13); + const int64_t ne10 = src1 ? src1->ne[0] : 0; + const int64_t ne11 = src1 ? src1->ne[1] : 0; + const int64_t ne12 = src1 ? src1->ne[2] : 0; + const int64_t ne13 = src1 ? src1->ne[3] : 0; UNUSED(ne13); - const uint64_t nb10 = src1 ? src1->nb[0] : 0; - const uint64_t nb11 = src1 ? src1->nb[1] : 0; - const uint64_t nb12 = src1 ? src1->nb[2] : 0; - const uint64_t nb13 = src1 ? src1->nb[3] : 0; UNUSED(nb13); + const uint64_t nb10 = src1 ? src1->nb[0] : 0; + const uint64_t nb11 = src1 ? src1->nb[1] : 0; + const uint64_t nb12 = src1 ? src1->nb[2] : 0; + const uint64_t nb13 = src1 ? src1->nb[3] : 0; UNUSED(nb13); - const int64_t ne0 = dst ? dst->ne[0] : 0; - const int64_t ne1 = dst ? dst->ne[1] : 0; - const int64_t ne2 = dst ? dst->ne[2] : 0; - const int64_t ne3 = dst ? dst->ne[3] : 0; + const int64_t ne0 = dst ? dst->ne[0] : 0; + const int64_t ne1 = dst ? dst->ne[1] : 0; + const int64_t ne2 = dst ? dst->ne[2] : 0; + const int64_t ne3 = dst ? dst->ne[3] : 0; - const uint64_t nb0 = dst ? dst->nb[0] : 0; - const uint64_t nb1 = dst ? dst->nb[1] : 0; - const uint64_t nb2 = dst ? dst->nb[2] : 0; - const uint64_t nb3 = dst ? dst->nb[3] : 0; + const uint64_t nb0 = dst ? dst->nb[0] : 0; + const uint64_t nb1 = dst ? dst->nb[1] : 0; + const uint64_t nb2 = dst ? dst->nb[2] : 0; + const uint64_t nb3 = dst ? dst->nb[3] : 0; - const enum ggml_type src0t = src0 ? src0->type : GGML_TYPE_COUNT; - const enum ggml_type src1t = src1 ? src1->type : GGML_TYPE_COUNT; - const enum ggml_type dstt = dst ? dst->type : GGML_TYPE_COUNT; + const enum ggml_type src0t = src0 ? src0->type : GGML_TYPE_COUNT; + const enum ggml_type src1t = src1 ? src1->type : GGML_TYPE_COUNT; + const enum ggml_type dstt = dst ? dst->type : GGML_TYPE_COUNT; - id id_src0 = src0 ? ggml_metal_get_buffer(ctx, src0, &offs_src0) : nil; - id id_src1 = src1 ? ggml_metal_get_buffer(ctx, src1, &offs_src1) : nil; - id id_dst = dst ? ggml_metal_get_buffer(ctx, dst, &offs_dst) : nil; + id id_src0 = src0 ? ggml_metal_get_buffer(ctx, src0, &offs_src0) : nil; + id id_src1 = src1 ? ggml_metal_get_buffer(ctx, src1, &offs_src1) : nil; + id id_dst = dst ? ggml_metal_get_buffer(ctx, dst, &offs_dst) : nil; - //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); - //if (src0) { - // GGML_METAL_LOG_INFO("%s: src0 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src0t), ne00, ne01, ne02, - // ggml_is_contiguous(src0), src0->name); - //} - //if (src1) { - // GGML_METAL_LOG_INFO("%s: src1 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src1t), ne10, ne11, ne12, - // ggml_is_contiguous(src1), src1->name); - //} - //if (dst) { - // GGML_METAL_LOG_INFO("%s: dst - %4s [%5lld, %5lld, %5lld], 1, %s\n", __func__, ggml_type_name(dstt), ne0, ne1, ne2, - // dst->name); - //} + //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); + //if (src0) { + // GGML_METAL_LOG_INFO("%s: src0 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src0t), ne00, ne01, ne02, + // ggml_is_contiguous(src0), src0->name); + //} + //if (src1) { + // GGML_METAL_LOG_INFO("%s: src1 - %4s [%5lld, %5lld, %5lld], %d, %s\n", __func__, ggml_type_name(src1t), ne10, ne11, ne12, + // ggml_is_contiguous(src1), src1->name); + //} + //if (dst) { + // GGML_METAL_LOG_INFO("%s: dst - %4s [%5lld, %5lld, %5lld], 1, %s\n", __func__, ggml_type_name(dstt), ne0, ne1, ne2, + // dst->name); + //} - switch (dst->op) { - case GGML_OP_CONCAT: - { - const int64_t nb = ne00; + switch (dst->op) { + case GGML_OP_CONCAT: + { + const int64_t nb = ne00; - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CONCAT].pipeline; - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; - [encoder setBytes:&nb length:sizeof(nb) atIndex:27]; + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; + [encoder setBytes:&nb length:sizeof(nb) atIndex:27]; - const int nth = MIN(1024, ne0); + const int nth = MIN(1024, ne0); - [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ADD: - case GGML_OP_MUL: - case GGML_OP_DIV: - { - const size_t offs = 0; + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ADD: + case GGML_OP_MUL: + case GGML_OP_DIV: + { + const size_t offs = 0; - bool bcast_row = false; + bool bcast_row = false; - int64_t nb = ne00; + int64_t nb = ne00; - id pipeline = nil; + id pipeline = nil; - if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { - GGML_ASSERT(ggml_is_contiguous(src0)); - - // src1 is a row - GGML_ASSERT(ne11 == 1); - - nb = ne00 / 4; - switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; - case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; - case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; - default: GGML_ASSERT(false); - } - - bcast_row = true; - } else { - switch (dst->op) { - case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; - case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; - case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; - default: GGML_ASSERT(false); - } - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; - [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - [encoder setBytes:&nb length:sizeof(nb) atIndex:28]; - - if (bcast_row) { - const int64_t n = ggml_nelements(dst)/4; - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } else { - const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } - } break; - case GGML_OP_ACC: - { - GGML_ASSERT(src0t == GGML_TYPE_F32); - GGML_ASSERT(src1t == GGML_TYPE_F32); - GGML_ASSERT(dstt == GGML_TYPE_F32); - - GGML_ASSERT(ggml_is_contiguous(src0)); - GGML_ASSERT(ggml_is_contiguous(src1)); - - const size_t pnb1 = ((int32_t *) dst->op_params)[0]; - const size_t pnb2 = ((int32_t *) dst->op_params)[1]; - const size_t pnb3 = ((int32_t *) dst->op_params)[2]; - const size_t offs = ((int32_t *) dst->op_params)[3]; - - const bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - - if (!inplace) { - // run a separete kernel to cpy src->dst - // not sure how to avoid this - // TODO: make a simpler cpy_bytes kernel - - const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; - - const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } - - const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; - [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:8]; - [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:9]; - [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:10]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; - [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:24]; - [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:25]; - [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; - [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; - - const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); - - [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_SCALE: - { + if (ggml_nelements(src1) == ne10 && ggml_is_contiguous(src1) && ne00 % 4 == 0 && ne10 % 4 == 0) { GGML_ASSERT(ggml_is_contiguous(src0)); - const float scale = *(const float *) dst->op_params; + // src1 is a row + GGML_ASSERT(ne11 == 1); - int64_t n = ggml_nelements(dst); - - id pipeline = nil; - - if (n % 4 == 0) { - n /= 4; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; + nb = ne00 / 4; + switch (dst->op) { + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD_ROW].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_ROW].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV_ROW].pipeline; break; + default: GGML_ASSERT(false); } - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; + bcast_row = true; + } else { + switch (dst->op) { + case GGML_OP_ADD: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; break; + case GGML_OP_MUL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL].pipeline; break; + case GGML_OP_DIV: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIV].pipeline; break; + default: GGML_ASSERT(false); + } + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:24]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:25]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + [encoder setBytes:&nb length:sizeof(nb) atIndex:28]; + + if (bcast_row) { + const int64_t n = ggml_nelements(dst)/4; [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_UNARY: - switch (ggml_get_unary_op(gf->nodes[i])) { - case GGML_UNARY_OP_TANH: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_RELU: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_GELU: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_GELU_QUICK: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_UNARY_OP_SILU: - { - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - GGML_ASSERT(n % 4 == 0); - - [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - default: - { - GGML_METAL_LOG_WARN("%s: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); - GGML_ASSERT(false); - } - } break; - case GGML_OP_SQR: - { - GGML_ASSERT(ggml_is_contiguous(src0)); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_SUM_ROWS: - { - GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; - [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:18]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:19]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:20]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:21]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:22]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:23]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:24]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:25]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_SOFT_MAX: - { - int nth = 32; // SIMD width - - id pipeline = nil; - - if (ne00%4 == 0) { - while (nth < ne00/4 && nth < 256) { - nth *= 2; - } - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; - } else { - while (nth < ne00 && nth < 1024) { - nth *= 2; - } - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; - } - - const float scale = ((float *) dst->op_params)[0]; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - if (id_src1) { - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - } else { - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; - } - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; - [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_DIAG_MASK_INF: - { - const int n_past = ((int32_t *)(dst->op_params))[0]; - - id pipeline = nil; - - if (ne00%8 == 0) { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&n_past length:sizeof(int) atIndex:4]; - - if (ne00%8 == 0) { - [encoder dispatchThreadgroups:MTLSizeMake(ne00*ne01*ne02/8, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } - else { - [encoder dispatchThreadgroups:MTLSizeMake(ne00, ne01, ne02) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } - } break; - case GGML_OP_MUL_MAT: - { - GGML_ASSERT(ne00 == ne10); - - // TODO: assert that dim2 and dim3 are contiguous - GGML_ASSERT(ne12 % ne02 == 0); - GGML_ASSERT(ne13 % ne03 == 0); - - const uint r2 = ne12/ne02; - const uint r3 = ne13/ne03; - - // find the break-even point where the matrix-matrix kernel becomes more efficient compared - // to the matrix-vector kernel - int ne11_mm_min = 1; - -#if 0 - // the numbers below are measured on M2 Ultra for 7B and 13B models - // these numbers do not translate to other devices or model sizes - // TODO: need to find a better approach - if ([ctx->device.name isEqualToString:@"Apple M2 Ultra"]) { - switch (src0t) { - case GGML_TYPE_F16: ne11_mm_min = 2; break; - case GGML_TYPE_Q8_0: ne11_mm_min = 7; break; - case GGML_TYPE_Q2_K: ne11_mm_min = 15; break; - case GGML_TYPE_Q3_K: ne11_mm_min = 7; break; - case GGML_TYPE_Q4_0: - case GGML_TYPE_Q4_1: ne11_mm_min = 15; break; - case GGML_TYPE_Q4_K: ne11_mm_min = 11; break; - case GGML_TYPE_Q5_0: // not tested yet - case GGML_TYPE_Q5_1: ne11_mm_min = 13; break; // not tested yet - case GGML_TYPE_Q5_K: ne11_mm_min = 7; break; - case GGML_TYPE_Q6_K: ne11_mm_min = 7; break; - default: ne11_mm_min = 1; break; - } - } -#endif - - // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs - // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && - !ggml_is_transposed(src0) && - !ggml_is_transposed(src1) && - src1t == GGML_TYPE_F32 && - ne00 % 32 == 0 && ne00 >= 64 && - (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { - //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); - - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; - case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; - case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; - case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; - case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; - case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; - case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; - case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; - case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; - case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; - default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:5]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:6]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; - [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; - } else { - int nth0 = 32; - int nth1 = 1; - int nrows = 1; - //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); - - id pipeline = nil; - - // use custom matrix x vector kernel - switch (src0t) { - case GGML_TYPE_F32: - { - GGML_ASSERT(src1t == GGML_TYPE_F32); - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; - nrows = 4; - } break; - case GGML_TYPE_F16: - { - nth0 = 32; - nth1 = 1; - if (src1t == GGML_TYPE_F32) { - if (ne11 * ne12 < 4) { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; - } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; - nrows = ne11; - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; - nrows = 4; - } - } else { - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; - nrows = 4; - } - } break; - case GGML_TYPE_Q4_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; - } break; - case GGML_TYPE_Q4_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; - } break; - case GGML_TYPE_Q5_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; - } break; - case GGML_TYPE_Q5_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; - } break; - case GGML_TYPE_Q8_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; - } break; - case GGML_TYPE_Q2_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; - } break; - case GGML_TYPE_Q3_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; - } break; - case GGML_TYPE_Q4_K: - { - nth0 = 4; //1; - nth1 = 8; //32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; - } break; - case GGML_TYPE_Q5_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; - } break; - case GGML_TYPE_Q6_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XXS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; - } break; - default: - { - GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); - GGML_ASSERT(false && "not implemented"); - } - }; - - if (ggml_is_quantized(src0t)) { - GGML_ASSERT(ne00 >= nth0*nth1); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:9]; - [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:10]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:11]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:12]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:13]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:14]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:15]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:16]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; - - if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || - src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { - const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; - [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_Q3_K) { -#ifdef GGML_QKK_64 - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#else - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#endif - } - else if (src0t == GGML_TYPE_Q5_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src0t == GGML_TYPE_Q6_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } else { - const int64_t ny = (ne11 + nrows - 1)/nrows; - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - } - } break; - case GGML_OP_MUL_MAT_ID: - { - //GGML_ASSERT(ne00 == ne10); - //GGML_ASSERT(ne03 == ne13); - - GGML_ASSERT(src0t == GGML_TYPE_I32); - - const int n_as = ((int32_t *) dst->op_params)[1]; - - // TODO: make this more general - GGML_ASSERT(n_as <= 8); - - // max size of the src1ids array in the kernel stack - GGML_ASSERT(ne11 <= 512); - - struct ggml_tensor * src2 = gf->nodes[i]->src[2]; - - const int64_t ne20 = src2 ? src2->ne[0] : 0; - const int64_t ne21 = src2 ? src2->ne[1] : 0; - const int64_t ne22 = src2 ? src2->ne[2] : 0; - const int64_t ne23 = src2 ? src2->ne[3] : 0; GGML_UNUSED(ne23); - - const uint64_t nb20 = src2 ? src2->nb[0] : 0; GGML_UNUSED(nb20); - const uint64_t nb21 = src2 ? src2->nb[1] : 0; - const uint64_t nb22 = src2 ? src2->nb[2] : 0; - const uint64_t nb23 = src2 ? src2->nb[3] : 0; GGML_UNUSED(nb23); - - const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); - - GGML_ASSERT(!ggml_is_transposed(src2)); - GGML_ASSERT(!ggml_is_transposed(src1)); - - GGML_ASSERT(src1t == GGML_TYPE_F32); - - const uint r2 = ne12/ne22; - const uint r3 = ne13/ne23; - - // find the break-even point where the matrix-matrix kernel becomes more efficient compared - // to the matrix-vector kernel - int ne11_mm_min = n_as; - - const int idx = ((int32_t *) dst->op_params)[0]; - - // batch size - GGML_ASSERT(ne01 == ne11); - - // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs - // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel - // !!! - // TODO: for now, always use mat-vec kernels until we figure out how to improve the - // indirect matrix multiplication - // !!! - if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && - ne20 % 32 == 0 && ne20 >= 64 && - ne11 > ne11_mm_min) { - - id pipeline = nil; - - switch (src2->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; - case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; - case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; - case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; - case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; - case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; - case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; - case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; - case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; - case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; - default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; - [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; - [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:5]; - [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; - [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:7]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:8]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:9]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:10]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; - [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; - // TODO: how to make this an array? read Metal docs - for (int j = 0; j < 8; ++j) { - // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 - struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; - - size_t offs_src_cur = 0; - id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); - - [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:19 + j]; - } - - [encoder setThreadgroupMemoryLength:8192 atIndex:0]; - - [encoder dispatchThreadgroups:MTLSizeMake((ne11 + 31)/32, (ne21 + 63)/64, n_as*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; - } else { - int nth0 = 32; - int nth1 = 1; - int nrows = 1; - //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); - - id pipeline = nil; - - // use custom matrix x vector kernel - switch (src2t) { - case GGML_TYPE_F32: - { - GGML_ASSERT(src1t == GGML_TYPE_F32); - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; - } break; - case GGML_TYPE_F16: - { - GGML_ASSERT(src1t == GGML_TYPE_F32); - nth0 = 32; - nth1 = 1; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; - } break; - case GGML_TYPE_Q4_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; - } break; - case GGML_TYPE_Q4_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; - } break; - case GGML_TYPE_Q5_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; - } break; - case GGML_TYPE_Q5_1: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; - } break; - case GGML_TYPE_Q8_0: - { - nth0 = 8; - nth1 = 8; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; - } break; - case GGML_TYPE_Q2_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; - } break; - case GGML_TYPE_Q3_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; - } break; - case GGML_TYPE_Q4_K: - { - nth0 = 4; //1; - nth1 = 8; //32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; - } break; - case GGML_TYPE_Q5_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; - } break; - case GGML_TYPE_Q6_K: - { - nth0 = 2; - nth1 = 32; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XXS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; - } break; - case GGML_TYPE_IQ2_XS: - { - nth0 = 4; - nth1 = 16; - pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; - } break; - default: - { - GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); - GGML_ASSERT(false && "not implemented"); - } - }; - - if (ggml_is_quantized(src2t)) { - GGML_ASSERT(ne20 >= nth0*nth1); - } - - const int64_t _ne1 = 1; // kernels needs a reference in constant memory - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; - [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; - [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; - [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:6]; - [encoder setBytes:&nb20 length:sizeof(nb20) atIndex:7]; - [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:8]; - [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:9]; - [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; - [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:11]; - [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; - [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; - [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; - [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; - [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:17]; - [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:18]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:19]; - [encoder setBytes:&r2 length:sizeof(r2) atIndex:20]; - [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; - [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; - // TODO: how to make this an array? read Metal docs - for (int j = 0; j < 8; ++j) { - // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 - struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; - - size_t offs_src_cur = 0; - id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); - - [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:23 + j]; - } - - if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || - src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { - const int mem_size = src2t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; - [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_Q4_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_Q3_K) { -#ifdef GGML_QKK_64 - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#else - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; -#endif - } - else if (src2t == GGML_TYPE_Q5_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - else if (src2t == GGML_TYPE_Q6_K) { - [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } else { - const int64_t ny = (_ne1 + nrows - 1)/nrows; - [encoder dispatchThreadgroups:MTLSizeMake(ne21, ny, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; - } - } - } break; - case GGML_OP_GET_ROWS: - { - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; - case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; - case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; - case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; - case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; - case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; - case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; - case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; - case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; - case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; - case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; - default: GGML_ASSERT(false && "not implemented"); - } - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:4]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:5]; - [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:6]; - [encoder setBytes:&nb10 length:sizeof( int64_t) atIndex:7]; - [encoder setBytes:&nb11 length:sizeof( int64_t) atIndex:8]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:10]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne10, ne11, 1) threadsPerThreadgroup:MTLSizeMake(32, 1, 1)]; - } break; - case GGML_OP_RMS_NORM: - { - GGML_ASSERT(ne00 % 4 == 0); - - float eps; - memcpy(&eps, dst->op_params, sizeof(float)); - - int nth = 32; // SIMD width - - while (nth < ne00/4 && nth < 1024) { - nth *= 2; - } - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; - [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; - - const int64_t nrows = ggml_nrows(src0); - - [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_GROUP_NORM: - { - GGML_ASSERT(ne00 % 4 == 0); - - //float eps; - //memcpy(&eps, dst->op_params, sizeof(float)); - - const float eps = 1e-6f; // TODO: temporarily hardcoded - - const int32_t n_groups = ((int32_t *) dst->op_params)[0]; - - int nth = 32; // SIMD width - - //while (nth < ne00/4 && nth < 1024) { - // nth *= 2; - //} - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:5]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:6]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&n_groups length:sizeof( int32_t) atIndex:8]; - [encoder setBytes:&eps length:sizeof( float) atIndex:9]; - [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; - - [encoder dispatchThreadgroups:MTLSizeMake(n_groups, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_NORM: - { - float eps; - memcpy(&eps, dst->op_params, sizeof(float)); - - const int nth = MIN(256, ne00); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; - [encoder setBytes:&eps length:sizeof( float) atIndex:4]; - [encoder setThreadgroupMemoryLength:GGML_PAD(nth*sizeof(float), 16) atIndex:0]; - - const int64_t nrows = ggml_nrows(src0); - - [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ALIBI: - { - GGML_ASSERT((src0t == GGML_TYPE_F32)); - - const int nth = MIN(1024, ne00); - - //const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_head = ((int32_t *) dst->op_params)[1]; - float max_bias; - memcpy(&max_bias, (int32_t *) dst->op_params + 2, sizeof(float)); - - const int n_heads_log2_floor = 1 << (int) floor(log2(n_head)); - const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); - const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ALIBI_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; - [encoder setBytes:&m0 length:sizeof( float) atIndex:18]; - [encoder setBytes:&m1 length:sizeof( float) atIndex:19]; - [encoder setBytes:&n_heads_log2_floor length:sizeof(int) atIndex:20]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ROPE: - { - GGML_ASSERT(ne10 == ne02); - - const int nth = MIN(1024, ne00); - - const int n_past = ((int32_t *) dst->op_params)[0]; - const int n_dims = ((int32_t *) dst->op_params)[1]; - const int mode = ((int32_t *) dst->op_params)[2]; - // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal - const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; - - float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; - memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); - memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); - memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); - memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); - memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); - memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); - - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F32].pipeline; break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F16].pipeline; break; - default: GGML_ASSERT(false); - }; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:5]; - [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:6]; - [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:7]; - [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; - [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; - [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; - [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:11]; - [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:12]; - [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:13]; - [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:14]; - [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:15]; - [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:16]; - [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:17]; - [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:18]; - [encoder setBytes:&n_past length:sizeof( int) atIndex:19]; - [encoder setBytes:&n_dims length:sizeof( int) atIndex:20]; - [encoder setBytes:&mode length:sizeof( int) atIndex:21]; - [encoder setBytes:&n_orig_ctx length:sizeof( int) atIndex:22]; - [encoder setBytes:&freq_base length:sizeof( float) atIndex:23]; - [encoder setBytes:&freq_scale length:sizeof( float) atIndex:24]; - [encoder setBytes:&ext_factor length:sizeof( float) atIndex:25]; - [encoder setBytes:&attn_factor length:sizeof( float) atIndex:26]; - [encoder setBytes:&beta_fast length:sizeof( float) atIndex:27]; - [encoder setBytes:&beta_slow length:sizeof( float) atIndex:28]; - - [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_IM2COL: - { - GGML_ASSERT(src0->type == GGML_TYPE_F16); - GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); - - const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; - const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; - const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; - const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; - const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; - const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; - const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; - - const int32_t N = src1->ne[is_2D ? 3 : 2]; - const int32_t IC = src1->ne[is_2D ? 2 : 1]; - const int32_t IH = is_2D ? src1->ne[1] : 1; - const int32_t IW = src1->ne[0]; - - const int32_t KH = is_2D ? src0->ne[1] : 1; - const int32_t KW = src0->ne[0]; - - const int32_t OH = is_2D ? dst->ne[2] : 1; - const int32_t OW = dst->ne[1]; - - const int32_t CHW = IC * KH * KW; - - const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; - const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; - - id pipeline = nil; - - switch (src0->type) { - case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; - default: GGML_ASSERT(false); - }; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; - [encoder setBytes:&ofs1 length:sizeof( int32_t) atIndex:3]; - [encoder setBytes:&IW length:sizeof( int32_t) atIndex:4]; - [encoder setBytes:&IH length:sizeof( int32_t) atIndex:5]; - [encoder setBytes:&CHW length:sizeof( int32_t) atIndex:6]; - [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:7]; - [encoder setBytes:&s1 length:sizeof( int32_t) atIndex:8]; - [encoder setBytes:&p0 length:sizeof( int32_t) atIndex:9]; - [encoder setBytes:&p1 length:sizeof( int32_t) atIndex:10]; - [encoder setBytes:&d0 length:sizeof( int32_t) atIndex:11]; - [encoder setBytes:&d1 length:sizeof( int32_t) atIndex:12]; - - [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; - } break; - case GGML_OP_UPSCALE: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - - const int sf = dst->op_params[0]; - - const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; - [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; - + } else { const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); - [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_PAD: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } + } break; + case GGML_OP_ACC: + { + GGML_ASSERT(src0t == GGML_TYPE_F32); + GGML_ASSERT(src1t == GGML_TYPE_F32); + GGML_ASSERT(dstt == GGML_TYPE_F32); - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + GGML_ASSERT(ggml_is_contiguous(src0)); + GGML_ASSERT(ggml_is_contiguous(src1)); - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; - [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; - [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; - [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; - [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; - [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; - [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; - [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; - [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; - [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; - [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; - [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; - [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; - [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + const size_t pnb1 = ((int32_t *) dst->op_params)[0]; + const size_t pnb2 = ((int32_t *) dst->op_params)[1]; + const size_t pnb3 = ((int32_t *) dst->op_params)[2]; + const size_t offs = ((int32_t *) dst->op_params)[3]; - const int nth = MIN(1024, ne0); + const bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - case GGML_OP_ARGSORT: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_I32); + if (!inplace) { + // run a separete kernel to cpy src->dst + // not sure how to avoid this + // TODO: make a simpler cpy_bytes kernel - const int nrows = ggml_nrows(src0); - - enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; - - id pipeline = nil; - - switch (order) { - case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; - case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; - default: GGML_ASSERT(false); - }; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; - - [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00, 1, 1)]; - } break; - case GGML_OP_LEAKY_RELU: - { - GGML_ASSERT(src0->type == GGML_TYPE_F32); - - float slope; - memcpy(&slope, dst->op_params, sizeof(float)); - - id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; - - [encoder setComputePipelineState:pipeline]; - [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; - [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; - [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; - - const int64_t n = ggml_nelements(dst); - - [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; - } break; - case GGML_OP_DUP: - case GGML_OP_CPY: - case GGML_OP_CONT: - { - GGML_ASSERT(ne00 % ggml_blck_size(src0->type) == 0); - - int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); - - id pipeline = nil; - - switch (src0t) { - case GGML_TYPE_F32: - { - GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); - - switch (dstt) { - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; - case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; - case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; - case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; - //case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; - //case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; - default: GGML_ASSERT(false && "not implemented"); - }; - } break; - case GGML_TYPE_F16: - { - switch (dstt) { - case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; - case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; - default: GGML_ASSERT(false && "not implemented"); - }; - } break; - default: GGML_ASSERT(false && "not implemented"); - } + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -2227,31 +1001,1253 @@ static bool ggml_metal_graph_compute( [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; - } break; - default: - { - GGML_METAL_LOG_ERROR("%s: error: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); - GGML_ASSERT(false); } - } + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ADD].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:7]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:8]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:9]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:10]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:11]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:12]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:13]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:14]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:15]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:16]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:17]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:18]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:19]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:20]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:21]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:22]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:23]; + [encoder setBytes:&pnb1 length:sizeof(pnb1) atIndex:24]; + [encoder setBytes:&pnb2 length:sizeof(pnb2) atIndex:25]; + [encoder setBytes:&pnb3 length:sizeof(pnb3) atIndex:26]; + [encoder setBytes:&offs length:sizeof(offs) atIndex:27]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne00); + + [encoder dispatchThreadgroups:MTLSizeMake(ne11, ne12, ne13) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_SCALE: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + const float scale = *(const float *) dst->op_params; + + int64_t n = ggml_nelements(dst); + + id pipeline = nil; + + if (n % 4 == 0) { + n /= 4; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE_4].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SCALE].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:2]; + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_UNARY: + switch (ggml_get_unary_op(gf->nodes[i])) { + case GGML_UNARY_OP_TANH: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_TANH].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_RELU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RELU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_GELU_QUICK: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GELU_QUICK].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_UNARY_OP_SILU: + { + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SILU].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + GGML_ASSERT(n % 4 == 0); + + [encoder dispatchThreadgroups:MTLSizeMake(n/4, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + default: + { + GGML_METAL_LOG_WARN("%s: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); + GGML_ASSERT(false); + } + } break; + case GGML_OP_SQR: + { + GGML_ASSERT(ggml_is_contiguous(src0)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SQR].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SUM_ROWS: + { + GGML_ASSERT(src0->nb[0] == ggml_type_size(src0->type)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SUM_ROWS].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&nb13 length:sizeof(nb13) atIndex:17]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:18]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:19]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:20]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:21]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:22]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:23]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:24]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:25]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_SOFT_MAX: + { + int nth = 32; // SIMD width + + id pipeline = nil; + + if (ne00%4 == 0) { + while (nth < ne00/4 && nth < 256) { + nth *= 2; + } + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX_4].pipeline; + } else { + while (nth < ne00 && nth < 1024) { + nth *= 2; + } + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; + } + + const float scale = ((float *) dst->op_params)[0]; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + if (id_src1) { + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_DIAG_MASK_INF: + { + const int n_past = ((int32_t *)(dst->op_params))[0]; + + id pipeline = nil; + + if (ne00%8 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF_8].pipeline; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_DIAG_MASK_INF].pipeline; + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&n_past length:sizeof(int) atIndex:4]; + + if (ne00%8 == 0) { + [encoder dispatchThreadgroups:MTLSizeMake(ne00*ne01*ne02/8, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } + else { + [encoder dispatchThreadgroups:MTLSizeMake(ne00, ne01, ne02) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } + } break; + case GGML_OP_MUL_MAT: + { + GGML_ASSERT(ne00 == ne10); + + // TODO: assert that dim2 and dim3 are contiguous + GGML_ASSERT(ne12 % ne02 == 0); + GGML_ASSERT(ne13 % ne03 == 0); + + const uint r2 = ne12/ne02; + const uint r3 = ne13/ne03; + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + int ne11_mm_min = 1; + +#if 0 + // the numbers below are measured on M2 Ultra for 7B and 13B models + // these numbers do not translate to other devices or model sizes + // TODO: need to find a better approach + if ([ctx->device.name isEqualToString:@"Apple M2 Ultra"]) { + switch (src0t) { + case GGML_TYPE_F16: ne11_mm_min = 2; break; + case GGML_TYPE_Q8_0: ne11_mm_min = 7; break; + case GGML_TYPE_Q2_K: ne11_mm_min = 15; break; + case GGML_TYPE_Q3_K: ne11_mm_min = 7; break; + case GGML_TYPE_Q4_0: + case GGML_TYPE_Q4_1: ne11_mm_min = 15; break; + case GGML_TYPE_Q4_K: ne11_mm_min = 11; break; + case GGML_TYPE_Q5_0: // not tested yet + case GGML_TYPE_Q5_1: ne11_mm_min = 13; break; // not tested yet + case GGML_TYPE_Q5_K: ne11_mm_min = 7; break; + case GGML_TYPE_Q6_K: ne11_mm_min = 7; break; + default: ne11_mm_min = 1; break; + } + } +#endif + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + !ggml_is_transposed(src0) && + !ggml_is_transposed(src1) && + src1t == GGML_TYPE_F32 && + ne00 % 32 == 0 && ne00 >= 64 && + (ne11 > ne11_mm_min || (ggml_is_quantized(src0t) && ne12 > 1))) { + //printf("matrix: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; + default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:5]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:6]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:7]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:8]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:9]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:12]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:13]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:14]; + [encoder setThreadgroupMemoryLength:8192 atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake( (ne11 + 31)/32, (ne01 + 63)/64, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + // use custom matrix x vector kernel + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F32_F32].pipeline; + nrows = 4; + } break; + case GGML_TYPE_F16: + { + nth0 = 32; + nth1 = 1; + if (src1t == GGML_TYPE_F32) { + if (ne11 * ne12 < 4) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_1ROW].pipeline; + } else if (ne00 >= 128 && ne01 >= 8 && ne00%4 == 0) { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32_L4].pipeline; + nrows = ne11; + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F32].pipeline; + nrows = 4; + } + } else { + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_F16_F16].pipeline; + nrows = 4; + } + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_0_F32].pipeline; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_1_F32].pipeline; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_0_F32].pipeline; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_1_F32].pipeline; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q8_0_F32].pipeline; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q2_K_F32].pipeline; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q3_K_F32].pipeline; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q4_K_F32].pipeline; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q5_K_F32].pipeline; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_Q6_K_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32].pipeline; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); + GGML_ASSERT(false && "not implemented"); + } + }; + + if (ggml_is_quantized(src0t)) { + GGML_ASSERT(ne00 >= nth0*nth1); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:9]; + [encoder setBytes:&ne11 length:sizeof(ne11) atIndex:10]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:11]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:12]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:13]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:14]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:15]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:16]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:17]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:18]; + + if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || + src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || + src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { + const int mem_size = src0t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q3_K) { +#ifdef GGML_QKK_64 + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#else + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#endif + } + else if (src0t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src0t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 1)/2, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (ne11 + nrows - 1)/nrows; + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ny, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + } + } break; + case GGML_OP_MUL_MAT_ID: + { + //GGML_ASSERT(ne00 == ne10); + //GGML_ASSERT(ne03 == ne13); + + GGML_ASSERT(src0t == GGML_TYPE_I32); + + const int n_as = ((int32_t *) dst->op_params)[1]; + + // TODO: make this more general + GGML_ASSERT(n_as <= 8); + + // max size of the src1ids array in the kernel stack + GGML_ASSERT(ne11 <= 512); + + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; + + const int64_t ne20 = src2 ? src2->ne[0] : 0; + const int64_t ne21 = src2 ? src2->ne[1] : 0; + const int64_t ne22 = src2 ? src2->ne[2] : 0; + const int64_t ne23 = src2 ? src2->ne[3] : 0; GGML_UNUSED(ne23); + + const uint64_t nb20 = src2 ? src2->nb[0] : 0; GGML_UNUSED(nb20); + const uint64_t nb21 = src2 ? src2->nb[1] : 0; + const uint64_t nb22 = src2 ? src2->nb[2] : 0; + const uint64_t nb23 = src2 ? src2->nb[3] : 0; GGML_UNUSED(nb23); + + const enum ggml_type src2t = src2 ? src2->type : GGML_TYPE_COUNT; GGML_UNUSED(src2t); + + GGML_ASSERT(!ggml_is_transposed(src2)); + GGML_ASSERT(!ggml_is_transposed(src1)); + + GGML_ASSERT(src1t == GGML_TYPE_F32); + + const uint r2 = ne12/ne22; + const uint r3 = ne13/ne23; + + // find the break-even point where the matrix-matrix kernel becomes more efficient compared + // to the matrix-vector kernel + int ne11_mm_min = n_as; + + const int idx = ((int32_t *) dst->op_params)[0]; + + // batch size + GGML_ASSERT(ne01 == ne11); + + // for now the matrix-matrix multiplication kernel only works on A14+/M1+ SoCs + // AMD GPU and older A-chips will reuse matrix-vector multiplication kernel + // !!! + // TODO: for now, always use mat-vec kernels until we figure out how to improve the + // indirect matrix multiplication + // !!! + if ([ctx->device supportsFamily:MTLGPUFamilyApple7] && + ne20 % 32 == 0 && ne20 >= 64 && + ne11 > ne11_mm_min) { + + id pipeline = nil; + + switch (src2->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_1_F32 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_0_F32 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_1_F32 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q8_0_F32 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q2_K_F32 ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q3_K_F32 ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_K_F32 ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q5_K_F32 ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q6_K_F32 ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; + default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:5]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:6]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:7]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:8]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:9]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:10]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:11]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:12]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:13]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:16]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:17]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:18]; + // TODO: how to make this an array? read Metal docs + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; + + size_t offs_src_cur = 0; + id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); + + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:19 + j]; + } + + [encoder setThreadgroupMemoryLength:8192 atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake((ne11 + 31)/32, (ne21 + 63)/64, n_as*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(128, 1, 1)]; + } else { + int nth0 = 32; + int nth1 = 1; + int nrows = 1; + //printf("vector: ne00 = %6d, ne01 = %6d, ne02 = %6d, ne11 = %6d, ne12 = %6d\n", ne00, ne01, ne02, ne11, ne12); + + id pipeline = nil; + + // use custom matrix x vector kernel + switch (src2t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32].pipeline; + } break; + case GGML_TYPE_F16: + { + GGML_ASSERT(src1t == GGML_TYPE_F32); + nth0 = 32; + nth1 = 1; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32].pipeline; + } break; + case GGML_TYPE_Q4_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_0_F32].pipeline; + } break; + case GGML_TYPE_Q4_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_1_F32].pipeline; + } break; + case GGML_TYPE_Q5_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_0_F32].pipeline; + } break; + case GGML_TYPE_Q5_1: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_1_F32].pipeline; + } break; + case GGML_TYPE_Q8_0: + { + nth0 = 8; + nth1 = 8; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q8_0_F32].pipeline; + } break; + case GGML_TYPE_Q2_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q2_K_F32].pipeline; + } break; + case GGML_TYPE_Q3_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q3_K_F32].pipeline; + } break; + case GGML_TYPE_Q4_K: + { + nth0 = 4; //1; + nth1 = 8; //32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q4_K_F32].pipeline; + } break; + case GGML_TYPE_Q5_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q5_K_F32].pipeline; + } break; + case GGML_TYPE_Q6_K: + { + nth0 = 2; + nth1 = 32; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_Q6_K_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XXS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32].pipeline; + } break; + case GGML_TYPE_IQ2_XS: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32].pipeline; + } break; + default: + { + GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); + GGML_ASSERT(false && "not implemented"); + } + }; + + if (ggml_is_quantized(src2t)) { + GGML_ASSERT(ne20 >= nth0*nth1); + } + + const int64_t _ne1 = 1; // kernels needs a reference in constant memory + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:3]; + [encoder setBytes:&ne20 length:sizeof(ne20) atIndex:4]; + [encoder setBytes:&ne21 length:sizeof(ne21) atIndex:5]; + [encoder setBytes:&ne22 length:sizeof(ne22) atIndex:6]; + [encoder setBytes:&nb20 length:sizeof(nb20) atIndex:7]; + [encoder setBytes:&nb21 length:sizeof(nb21) atIndex:8]; + [encoder setBytes:&nb22 length:sizeof(nb22) atIndex:9]; + [encoder setBytes:&ne10 length:sizeof(ne10) atIndex:10]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:11]; + [encoder setBytes:&ne12 length:sizeof(ne12) atIndex:12]; + [encoder setBytes:&ne13 length:sizeof(ne13) atIndex:13]; + [encoder setBytes:&nb10 length:sizeof(nb10) atIndex:14]; + [encoder setBytes:&nb11 length:sizeof(nb11) atIndex:15]; + [encoder setBytes:&nb12 length:sizeof(nb12) atIndex:16]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:17]; + [encoder setBytes:&_ne1 length:sizeof(_ne1) atIndex:18]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:19]; + [encoder setBytes:&r2 length:sizeof(r2) atIndex:20]; + [encoder setBytes:&r3 length:sizeof(r3) atIndex:21]; + [encoder setBytes:&idx length:sizeof(idx) atIndex:22]; + // TODO: how to make this an array? read Metal docs + for (int j = 0; j < 8; ++j) { + // NOTE: this is done like this to avoid uninitialized kernel arguments when n_as < 8 + struct ggml_tensor * src_cur = dst->src[2 + (j % n_as)]; + + size_t offs_src_cur = 0; + id id_src_cur = ggml_metal_get_buffer(ctx, src_cur, &offs_src_cur); + + [encoder setBuffer:id_src_cur offset:offs_src_cur atIndex:23 + j]; + } + + if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || + src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || + src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { + const int mem_size = src2t == GGML_TYPE_IQ2_XXS ? 256*8+128 : 512*8+128; + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q4_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q3_K) { +#ifdef GGML_QKK_64 + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#else + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; +#endif + } + else if (src2t == GGML_TYPE_Q5_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + else if (src2t == GGML_TYPE_Q6_K) { + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 1)/2, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else { + const int64_t ny = (_ne1 + nrows - 1)/nrows; + [encoder dispatchThreadgroups:MTLSizeMake(ne21, ny, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } + } + } break; + case GGML_OP_GET_ROWS: + { + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F32 ].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_F16 ].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_0 ].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_1 ].pipeline; break; + case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_0 ].pipeline; break; + case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_1 ].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q8_0 ].pipeline; break; + case GGML_TYPE_Q2_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q2_K ].pipeline; break; + case GGML_TYPE_Q3_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q3_K ].pipeline; break; + case GGML_TYPE_Q4_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q4_K ].pipeline; break; + case GGML_TYPE_Q5_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q5_K ].pipeline; break; + case GGML_TYPE_Q6_K: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_Q6_K ].pipeline; break; + case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; + case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; + case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; + default: GGML_ASSERT(false && "not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:4]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&ne10 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb10 length:sizeof( int64_t) atIndex:7]; + [encoder setBytes:&nb11 length:sizeof( int64_t) atIndex:8]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:10]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne10, ne11, 1) threadsPerThreadgroup:MTLSizeMake(32, 1, 1)]; + } break; + case GGML_OP_RMS_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + int nth = 32; // SIMD width + + while (nth < ne00/4 && nth < 1024) { + nth *= 2; + } + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_RMS_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + const int64_t nrows = ggml_nrows(src0); + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_GROUP_NORM: + { + GGML_ASSERT(ne00 % 4 == 0); + + //float eps; + //memcpy(&eps, dst->op_params, sizeof(float)); + + const float eps = 1e-6f; // TODO: temporarily hardcoded + + const int32_t n_groups = ((int32_t *) dst->op_params)[0]; + + int nth = 32; // SIMD width + + //while (nth < ne00/4 && nth < 1024) { + // nth *= 2; + //} + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GROUP_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:5]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&n_groups length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&eps length:sizeof( float) atIndex:9]; + [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; + + [encoder dispatchThreadgroups:MTLSizeMake(n_groups, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_NORM: + { + float eps; + memcpy(&eps, dst->op_params, sizeof(float)); + + const int nth = MIN(256, ne00); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_NORM].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:3]; + [encoder setBytes:&eps length:sizeof( float) atIndex:4]; + [encoder setThreadgroupMemoryLength:GGML_PAD(nth*sizeof(float), 16) atIndex:0]; + + const int64_t nrows = ggml_nrows(src0); + + [encoder dispatchThreadgroups:MTLSizeMake(nrows, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ALIBI: + { + GGML_ASSERT((src0t == GGML_TYPE_F32)); + + const int nth = MIN(1024, ne00); + + //const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_head = ((int32_t *) dst->op_params)[1]; + float max_bias; + memcpy(&max_bias, (int32_t *) dst->op_params + 2, sizeof(float)); + + const int n_heads_log2_floor = 1 << (int) floor(log2(n_head)); + const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ALIBI_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&m0 length:sizeof( float) atIndex:18]; + [encoder setBytes:&m1 length:sizeof( float) atIndex:19]; + [encoder setBytes:&n_heads_log2_floor length:sizeof(int) atIndex:20]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ROPE: + { + GGML_ASSERT(ne10 == ne02); + + const int nth = MIN(1024, ne00); + + const int n_past = ((int32_t *) dst->op_params)[0]; + const int n_dims = ((int32_t *) dst->op_params)[1]; + const int mode = ((int32_t *) dst->op_params)[2]; + // skip 3, n_ctx, used in GLM RoPE, unimplemented in metal + const int n_orig_ctx = ((int32_t *) dst->op_params)[4]; + + float freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow; + memcpy(&freq_base, (int32_t *) dst->op_params + 5, sizeof(float)); + memcpy(&freq_scale, (int32_t *) dst->op_params + 6, sizeof(float)); + memcpy(&ext_factor, (int32_t *) dst->op_params + 7, sizeof(float)); + memcpy(&attn_factor, (int32_t *) dst->op_params + 8, sizeof(float)); + memcpy(&beta_fast, (int32_t *) dst->op_params + 9, sizeof(float)); + memcpy(&beta_slow, (int32_t *) dst->op_params + 10, sizeof(float)); + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F32].pipeline; break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ROPE_F16].pipeline; break; + default: GGML_ASSERT(false); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:1]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:6]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:10]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:14]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:17]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:18]; + [encoder setBytes:&n_past length:sizeof( int) atIndex:19]; + [encoder setBytes:&n_dims length:sizeof( int) atIndex:20]; + [encoder setBytes:&mode length:sizeof( int) atIndex:21]; + [encoder setBytes:&n_orig_ctx length:sizeof( int) atIndex:22]; + [encoder setBytes:&freq_base length:sizeof( float) atIndex:23]; + [encoder setBytes:&freq_scale length:sizeof( float) atIndex:24]; + [encoder setBytes:&ext_factor length:sizeof( float) atIndex:25]; + [encoder setBytes:&attn_factor length:sizeof( float) atIndex:26]; + [encoder setBytes:&beta_fast length:sizeof( float) atIndex:27]; + [encoder setBytes:&beta_slow length:sizeof( float) atIndex:28]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_IM2COL: + { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F16); + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int32_t N = src1->ne[is_2D ? 3 : 2]; + const int32_t IC = src1->ne[is_2D ? 2 : 1]; + const int32_t IH = is_2D ? src1->ne[1] : 1; + const int32_t IW = src1->ne[0]; + + const int32_t KH = is_2D ? src0->ne[1] : 1; + const int32_t KW = src0->ne[0]; + + const int32_t OH = is_2D ? dst->ne[2] : 1; + const int32_t OW = dst->ne[1]; + + const int32_t CHW = IC * KH * KW; + + const int32_t ofs0 = src1->nb[is_2D ? 3 : 2] / 4; + const int32_t ofs1 = src1->nb[is_2D ? 2 : 1] / 4; + + id pipeline = nil; + + switch (src0->type) { + case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; + default: GGML_ASSERT(false); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src1 offset:offs_src1 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ofs0 length:sizeof( int32_t) atIndex:2]; + [encoder setBytes:&ofs1 length:sizeof( int32_t) atIndex:3]; + [encoder setBytes:&IW length:sizeof( int32_t) atIndex:4]; + [encoder setBytes:&IH length:sizeof( int32_t) atIndex:5]; + [encoder setBytes:&CHW length:sizeof( int32_t) atIndex:6]; + [encoder setBytes:&s0 length:sizeof( int32_t) atIndex:7]; + [encoder setBytes:&s1 length:sizeof( int32_t) atIndex:8]; + [encoder setBytes:&p0 length:sizeof( int32_t) atIndex:9]; + [encoder setBytes:&p1 length:sizeof( int32_t) atIndex:10]; + [encoder setBytes:&d0 length:sizeof( int32_t) atIndex:11]; + [encoder setBytes:&d1 length:sizeof( int32_t) atIndex:12]; + + [encoder dispatchThreadgroups:MTLSizeMake(IC, OH, OW) threadsPerThreadgroup:MTLSizeMake(N, KH, KW)]; + } break; + case GGML_OP_UPSCALE: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + const int sf = dst->op_params[0]; + + const id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_UPSCALE_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + [encoder setBytes:&sf length:sizeof(sf) atIndex:18]; + + const int nth = MIN((int) pipeline.maxTotalThreadsPerThreadgroup, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_PAD: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_PAD_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof(ne03) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(nb00) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(nb01) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(nb02) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(nb03) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof(ne0) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof(ne1) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof(ne2) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof(ne3) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(nb0) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(nb1) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(nb2) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(nb3) atIndex:17]; + + const int nth = MIN(1024, ne0); + + [encoder dispatchThreadgroups:MTLSizeMake(ne1, ne2, ne3) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + case GGML_OP_ARGSORT: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_I32); + + const int nrows = ggml_nrows(src0); + + enum ggml_sort_order order = (enum ggml_sort_order) dst->op_params[0]; + + id pipeline = nil; + + switch (order) { + case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; + default: GGML_ASSERT(false); + }; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + + [encoder dispatchThreadgroups:MTLSizeMake(1, nrows, 1) threadsPerThreadgroup:MTLSizeMake(ne00, 1, 1)]; + } break; + case GGML_OP_LEAKY_RELU: + { + GGML_ASSERT(src0->type == GGML_TYPE_F32); + + float slope; + memcpy(&slope, dst->op_params, sizeof(float)); + + id pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_LEAKY_RELU_F32].pipeline; + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&slope length:sizeof(slope) atIndex:2]; + + const int64_t n = ggml_nelements(dst); + + [encoder dispatchThreadgroups:MTLSizeMake(n, 1, 1) threadsPerThreadgroup:MTLSizeMake(1, 1, 1)]; + } break; + case GGML_OP_DUP: + case GGML_OP_CPY: + case GGML_OP_CONT: + { + GGML_ASSERT(ne00 % ggml_blck_size(src0->type) == 0); + + int nth = MIN(1024, ne00/ggml_blck_size(src0->type)); + + id pipeline = nil; + + switch (src0t) { + case GGML_TYPE_F32: + { + GGML_ASSERT(ne0 % ggml_blck_size(dst->type) == 0); + + switch (dstt) { + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_F32].pipeline; break; + case GGML_TYPE_Q8_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q8_0].pipeline; break; + case GGML_TYPE_Q4_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_0].pipeline; break; + case GGML_TYPE_Q4_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q4_1].pipeline; break; + //case GGML_TYPE_Q5_0: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_0].pipeline; break; + //case GGML_TYPE_Q5_1: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F32_Q5_1].pipeline; break; + default: GGML_ASSERT(false && "not implemented"); + }; + } break; + case GGML_TYPE_F16: + { + switch (dstt) { + case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F16].pipeline; break; + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_CPY_F16_F32].pipeline; break; + default: GGML_ASSERT(false && "not implemented"); + }; + } break; + default: GGML_ASSERT(false && "not implemented"); + } + + [encoder setComputePipelineState:pipeline]; + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; + [encoder setBuffer:id_dst offset:offs_dst atIndex:1]; + [encoder setBytes:&ne00 length:sizeof( int64_t) atIndex:2]; + [encoder setBytes:&ne01 length:sizeof( int64_t) atIndex:3]; + [encoder setBytes:&ne02 length:sizeof( int64_t) atIndex:4]; + [encoder setBytes:&ne03 length:sizeof( int64_t) atIndex:5]; + [encoder setBytes:&nb00 length:sizeof(uint64_t) atIndex:6]; + [encoder setBytes:&nb01 length:sizeof(uint64_t) atIndex:7]; + [encoder setBytes:&nb02 length:sizeof(uint64_t) atIndex:8]; + [encoder setBytes:&nb03 length:sizeof(uint64_t) atIndex:9]; + [encoder setBytes:&ne0 length:sizeof( int64_t) atIndex:10]; + [encoder setBytes:&ne1 length:sizeof( int64_t) atIndex:11]; + [encoder setBytes:&ne2 length:sizeof( int64_t) atIndex:12]; + [encoder setBytes:&ne3 length:sizeof( int64_t) atIndex:13]; + [encoder setBytes:&nb0 length:sizeof(uint64_t) atIndex:14]; + [encoder setBytes:&nb1 length:sizeof(uint64_t) atIndex:15]; + [encoder setBytes:&nb2 length:sizeof(uint64_t) atIndex:16]; + [encoder setBytes:&nb3 length:sizeof(uint64_t) atIndex:17]; + + [encoder dispatchThreadgroups:MTLSizeMake(ne01, ne02, ne03) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; + } break; + default: + { + GGML_METAL_LOG_ERROR("%s: error: node %3d, op = %8s not implemented\n", __func__, i, ggml_op_name(dst->op)); + GGML_ASSERT(false); + } + } #ifndef GGML_METAL_NDEBUG - [encoder popDebugGroup]; + [encoder popDebugGroup]; #endif - } + } - if (encoder != nil) { - [encoder endEncoding]; - encoder = nil; - } + if (encoder != nil) { + [encoder endEncoding]; + encoder = nil; + } - [command_buffer commit]; - }); - } - - // wait for all threads to finish - dispatch_barrier_sync(ctx->d_queue, ^{}); + [command_buffer commit]; + }); // check status of command buffers // needed to detect if the device ran out-of-memory for example (#1881) From 862f5e41ab1fdf12d6f59455aad3f5dd8258f805 Mon Sep 17 00:00:00 2001 From: Neuman Vong Date: Wed, 17 Jan 2024 00:47:34 +1100 Subject: [PATCH 486/859] android : introduce starter project example (#4926) * Introduce starter project for Android Based on examples/llama.swiftui. * Add github workflow * Set NDK version * Only build arm64-v8a in CI * Sync bench code * Rename CI prop to skip-armeabi-v7a * Remove unused tests --- .github/workflows/build.yml | 25 ++ examples/llama.android/.gitignore | 33 ++ examples/llama.android/README.md | 0 examples/llama.android/app/.gitignore | 1 + examples/llama.android/app/build.gradle.kts | 91 ++++ examples/llama.android/app/proguard-rules.pro | 21 + .../app/src/main/AndroidManifest.xml | 30 ++ .../app/src/main/cpp/CMakeLists.txt | 50 +++ .../app/src/main/cpp/llama-android.cpp | 394 ++++++++++++++++++ .../java/com/example/llama/Downloadable.kt | 119 ++++++ .../src/main/java/com/example/llama/Llm.kt | 172 ++++++++ .../java/com/example/llama/MainActivity.kt | 154 +++++++ .../java/com/example/llama/MainViewModel.kt | 104 +++++ .../java/com/example/llama/ui/theme/Color.kt | 11 + .../java/com/example/llama/ui/theme/Theme.kt | 70 ++++ .../java/com/example/llama/ui/theme/Type.kt | 34 ++ .../res/drawable/ic_launcher_background.xml | 170 ++++++++ .../res/drawable/ic_launcher_foreground.xml | 30 ++ .../main/res/mipmap-anydpi/ic_launcher.xml | 6 + .../res/mipmap-anydpi/ic_launcher_round.xml | 6 + .../src/main/res/mipmap-hdpi/ic_launcher.webp | Bin 0 -> 1404 bytes .../res/mipmap-hdpi/ic_launcher_round.webp | Bin 0 -> 2898 bytes .../src/main/res/mipmap-mdpi/ic_launcher.webp | Bin 0 -> 982 bytes .../res/mipmap-mdpi/ic_launcher_round.webp | Bin 0 -> 1772 bytes .../main/res/mipmap-xhdpi/ic_launcher.webp | Bin 0 -> 1900 bytes .../res/mipmap-xhdpi/ic_launcher_round.webp | Bin 0 -> 3918 bytes .../main/res/mipmap-xxhdpi/ic_launcher.webp | Bin 0 -> 2884 bytes .../res/mipmap-xxhdpi/ic_launcher_round.webp | Bin 0 -> 5914 bytes .../main/res/mipmap-xxxhdpi/ic_launcher.webp | Bin 0 -> 3844 bytes .../res/mipmap-xxxhdpi/ic_launcher_round.webp | Bin 0 -> 7778 bytes .../app/src/main/res/values/colors.xml | 10 + .../app/src/main/res/values/strings.xml | 3 + .../app/src/main/res/values/themes.xml | 5 + .../app/src/main/res/xml/backup_rules.xml | 13 + .../main/res/xml/data_extraction_rules.xml | 19 + examples/llama.android/build.gradle.kts | 5 + examples/llama.android/gradle.properties | 23 + .../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 59203 bytes .../gradle/wrapper/gradle-wrapper.properties | 6 + examples/llama.android/gradlew | 185 ++++++++ examples/llama.android/settings.gradle.kts | 17 + 41 files changed, 1807 insertions(+) create mode 100644 examples/llama.android/.gitignore create mode 100644 examples/llama.android/README.md create mode 100644 examples/llama.android/app/.gitignore create mode 100644 examples/llama.android/app/build.gradle.kts create mode 100644 examples/llama.android/app/proguard-rules.pro create mode 100644 examples/llama.android/app/src/main/AndroidManifest.xml create mode 100644 examples/llama.android/app/src/main/cpp/CMakeLists.txt create mode 100644 examples/llama.android/app/src/main/cpp/llama-android.cpp create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/Llm.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt create mode 100644 examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt create mode 100644 examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml create mode 100644 examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml create mode 100644 examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml create mode 100644 examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml create mode 100644 examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp create mode 100644 examples/llama.android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp create mode 100644 examples/llama.android/app/src/main/res/values/colors.xml create mode 100644 examples/llama.android/app/src/main/res/values/strings.xml create mode 100644 examples/llama.android/app/src/main/res/values/themes.xml create mode 100644 examples/llama.android/app/src/main/res/xml/backup_rules.xml create mode 100644 examples/llama.android/app/src/main/res/xml/data_extraction_rules.xml create mode 100644 examples/llama.android/build.gradle.kts create mode 100644 examples/llama.android/gradle.properties create mode 100644 examples/llama.android/gradle/wrapper/gradle-wrapper.jar create mode 100644 examples/llama.android/gradle/wrapper/gradle-wrapper.properties create mode 100755 examples/llama.android/gradlew create mode 100644 examples/llama.android/settings.gradle.kts diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0a28a1111..367df07a7 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -515,6 +515,31 @@ jobs: - name: Build Xcode project run: xcodebuild -project examples/llama.swiftui/llama.swiftui.xcodeproj -scheme llama.swiftui -sdk iphoneos CODE_SIGNING_REQUIRED=NO CODE_SIGN_IDENTITY= -destination 'generic/platform=iOS' build + android-build: + runs-on: ubuntu-latest + + steps: + - name: Clone + uses: actions/checkout@v3 + + - name: Set up JDK + uses: actions/setup-java@v3 + with: + java-version: 17 + distribution: zulu + + - name: Setup Android SDK + uses: android-actions/setup-android@v3 + with: + log-accepted-android-sdk-licenses: false + + - name: Build + run: | + cd examples/llama.android + + # Skip armeabi-v7a for now (https://github.com/llvm/llvm-project/issues/65820). + ./gradlew build --no-daemon -Pskip-armeabi-v7a + # freeBSD-latest: # runs-on: macos-12 # steps: diff --git a/examples/llama.android/.gitignore b/examples/llama.android/.gitignore new file mode 100644 index 000000000..347e252ef --- /dev/null +++ b/examples/llama.android/.gitignore @@ -0,0 +1,33 @@ +# Gradle files +.gradle/ +build/ + +# Local configuration file (sdk path, etc) +local.properties + +# Log/OS Files +*.log + +# Android Studio generated files and folders +captures/ +.externalNativeBuild/ +.cxx/ +*.apk +output.json + +# IntelliJ +*.iml +.idea/ +misc.xml +deploymentTargetDropDown.xml +render.experimental.xml + +# Keystore files +*.jks +*.keystore + +# Google Services (e.g. APIs or Firebase) +google-services.json + +# Android Profiling +*.hprof diff --git a/examples/llama.android/README.md b/examples/llama.android/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/examples/llama.android/app/.gitignore b/examples/llama.android/app/.gitignore new file mode 100644 index 000000000..796b96d1c --- /dev/null +++ b/examples/llama.android/app/.gitignore @@ -0,0 +1 @@ +/build diff --git a/examples/llama.android/app/build.gradle.kts b/examples/llama.android/app/build.gradle.kts new file mode 100644 index 000000000..7815a8025 --- /dev/null +++ b/examples/llama.android/app/build.gradle.kts @@ -0,0 +1,91 @@ +plugins { + id("com.android.application") + id("org.jetbrains.kotlin.android") +} + +android { + namespace = "com.example.llama" + compileSdk = 34 + + ndkVersion = "26.1.10909125" + + defaultConfig { + applicationId = "com.example.llama" + minSdk = 33 + targetSdk = 34 + versionCode = 1 + versionName = "1.0" + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + vectorDrawables { + useSupportLibrary = true + } + ndk { + // Workaround for https://github.com/llvm/llvm-project/issues/65820 + // affecting armeabi-v7a. Skip armeabi-v7a when invoked with + // -Pskip-armeabi-v7a (e.g., ./gradlew build -Pskip-armeabi-v7a). + if (project.hasProperty("skip-armeabi-v7a")) { + abiFilters += listOf("arm64-v8a", "x86_64", "x86") + } + } + externalNativeBuild { + cmake { + cppFlags += listOf() + arguments += listOf() + } + } + } + + buildTypes { + release { + isMinifyEnabled = false + proguardFiles( + getDefaultProguardFile("proguard-android-optimize.txt"), + "proguard-rules.pro" + ) + } + } + compileOptions { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 + } + kotlinOptions { + jvmTarget = "1.8" + } + buildFeatures { + compose = true + } + composeOptions { + kotlinCompilerExtensionVersion = "1.5.1" + } + packaging { + resources { + excludes += "/META-INF/{AL2.0,LGPL2.1}" + } + } + externalNativeBuild { + cmake { + path = file("src/main/cpp/CMakeLists.txt") + version = "3.22.1" + } + } +} + +dependencies { + + implementation("androidx.core:core-ktx:1.12.0") + implementation("androidx.lifecycle:lifecycle-runtime-ktx:2.6.2") + implementation("androidx.activity:activity-compose:1.8.2") + implementation(platform("androidx.compose:compose-bom:2023.08.00")) + implementation("androidx.compose.ui:ui") + implementation("androidx.compose.ui:ui-graphics") + implementation("androidx.compose.ui:ui-tooling-preview") + implementation("androidx.compose.material3:material3") + testImplementation("junit:junit:4.13.2") + androidTestImplementation("androidx.test.ext:junit:1.1.5") + androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1") + androidTestImplementation(platform("androidx.compose:compose-bom:2023.08.00")) + androidTestImplementation("androidx.compose.ui:ui-test-junit4") + debugImplementation("androidx.compose.ui:ui-tooling") + debugImplementation("androidx.compose.ui:ui-test-manifest") +} diff --git a/examples/llama.android/app/proguard-rules.pro b/examples/llama.android/app/proguard-rules.pro new file mode 100644 index 000000000..f1b424510 --- /dev/null +++ b/examples/llama.android/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile diff --git a/examples/llama.android/app/src/main/AndroidManifest.xml b/examples/llama.android/app/src/main/AndroidManifest.xml new file mode 100644 index 000000000..41a358a29 --- /dev/null +++ b/examples/llama.android/app/src/main/AndroidManifest.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + diff --git a/examples/llama.android/app/src/main/cpp/CMakeLists.txt b/examples/llama.android/app/src/main/cpp/CMakeLists.txt new file mode 100644 index 000000000..85139329a --- /dev/null +++ b/examples/llama.android/app/src/main/cpp/CMakeLists.txt @@ -0,0 +1,50 @@ + +# For more information about using CMake with Android Studio, read the +# documentation: https://d.android.com/studio/projects/add-native-code.html. +# For more examples on how to use CMake, see https://github.com/android/ndk-samples. + +# Sets the minimum CMake version required for this project. +cmake_minimum_required(VERSION 3.22.1) + +# Declares the project name. The project name can be accessed via ${ PROJECT_NAME}, +# Since this is the top level CMakeLists.txt, the project name is also accessible +# with ${CMAKE_PROJECT_NAME} (both CMake variables are in-sync within the top level +# build script scope). +project("llama-android") + +include(FetchContent) +FetchContent_Declare( + llama + GIT_REPOSITORY https://github.com/ggerganov/llama.cpp + GIT_TAG master +) + +# Also provides "common" +FetchContent_MakeAvailable(llama) + +# Creates and names a library, sets it as either STATIC +# or SHARED, and provides the relative paths to its source code. +# You can define multiple libraries, and CMake builds them for you. +# Gradle automatically packages shared libraries with your APK. +# +# In this top level CMakeLists.txt, ${CMAKE_PROJECT_NAME} is used to define +# the target library name; in the sub-module's CMakeLists.txt, ${PROJECT_NAME} +# is preferred for the same purpose. +# +# In order to load a library into your app from Java/Kotlin, you must call +# System.loadLibrary() and pass the name of the library defined here; +# for GameActivity/NativeActivity derived applications, the same library name must be +# used in the AndroidManifest.xml file. +add_library(${CMAKE_PROJECT_NAME} SHARED + # List C/C++ source files with relative paths to this CMakeLists.txt. + llama-android.cpp) + +# Specifies libraries CMake should link to your target library. You +# can link libraries from various origins, such as libraries defined in this +# build script, prebuilt third-party libraries, or Android system libraries. +target_link_libraries(${CMAKE_PROJECT_NAME} + # List libraries link to the target library + llama + common + android + log) diff --git a/examples/llama.android/app/src/main/cpp/llama-android.cpp b/examples/llama.android/app/src/main/cpp/llama-android.cpp new file mode 100644 index 000000000..d5e705dce --- /dev/null +++ b/examples/llama.android/app/src/main/cpp/llama-android.cpp @@ -0,0 +1,394 @@ +#include +#include +#include +#include +#include +#include +#include "llama.h" +#include "common/common.h" + +// Write C++ code here. +// +// Do not forget to dynamically load the C++ library into your application. +// +// For instance, +// +// In MainActivity.java: +// static { +// System.loadLibrary("llama-android"); +// } +// +// Or, in MainActivity.kt: +// companion object { +// init { +// System.loadLibrary("llama-android") +// } +// } + +#define TAG "llama-android.cpp" +#define LOGi(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__) +#define LOGe(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) + +jclass la_int_var; +jmethodID la_int_var_value; +jmethodID la_int_var_inc; + +static void log_callback(ggml_log_level level, const char * fmt, void * data) { + if (level == GGML_LOG_LEVEL_ERROR) __android_log_print(ANDROID_LOG_ERROR, TAG, fmt, data); + else if (level == GGML_LOG_LEVEL_INFO) __android_log_print(ANDROID_LOG_INFO, TAG, fmt, data); + else if (level == GGML_LOG_LEVEL_WARN) __android_log_print(ANDROID_LOG_WARN, TAG, fmt, data); + else __android_log_print(ANDROID_LOG_DEFAULT, TAG, fmt, data); +} + +extern "C" +JNIEXPORT jlong JNICALL +Java_com_example_llama_Llm_load_1model(JNIEnv *env, jobject, jstring filename) { + llama_model_params model_params = llama_model_default_params(); + + auto path_to_model = env->GetStringUTFChars(filename, 0); + LOGi("Loading model from %s", path_to_model); + + auto model = llama_load_model_from_file(path_to_model, model_params); + env->ReleaseStringUTFChars(filename, path_to_model); + + if (!model) { + LOGe("load_model() failed"); + env->ThrowNew(env->FindClass("java/lang/IllegalStateException"), "load_model() failed"); + return 0; + } + + return reinterpret_cast(model); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_free_1model(JNIEnv *, jobject, jlong model) { + llama_free_model(reinterpret_cast(model)); +} + +extern "C" +JNIEXPORT jlong JNICALL +Java_com_example_llama_Llm_new_1context(JNIEnv *env, jobject, jlong jmodel) { + auto model = reinterpret_cast(jmodel); + + if (!model) { + LOGe("new_context(): model cannot be null"); + env->ThrowNew(env->FindClass("java/lang/IllegalArgumentException"), "Model cannot be null"); + return 0; + } + + int n_threads = std::max(1, std::min(8, (int) sysconf(_SC_NPROCESSORS_ONLN) - 2)); + LOGi("Using %d threads", n_threads); + + llama_context_params ctx_params = llama_context_default_params(); + ctx_params.seed = 1234; + ctx_params.n_ctx = 2048; + ctx_params.n_threads = n_threads; + ctx_params.n_threads_batch = n_threads; + + llama_context * context = llama_new_context_with_model(model, ctx_params); + + if (!context) { + LOGe("llama_new_context_with_model() returned null)"); + env->ThrowNew(env->FindClass("java/lang/IllegalStateException"), + "llama_new_context_with_model() returned null)"); + return 0; + } + + return reinterpret_cast(context); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_free_1context(JNIEnv *, jobject, jlong context) { + llama_free(reinterpret_cast(context)); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_backend_1free(JNIEnv *, jobject) { + llama_backend_free(); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_log_1to_1android(JNIEnv *, jobject) { + llama_log_set(log_callback, NULL); +} + +extern "C" +JNIEXPORT jstring JNICALL +Java_com_example_llama_Llm_bench_1model( + JNIEnv *env, + jobject, + jlong context_pointer, + jlong model_pointer, + jlong batch_pointer, + jint pp, + jint tg, + jint pl, + jint nr + ) { + auto pp_avg = 0.0; + auto tg_avg = 0.0; + auto pp_std = 0.0; + auto tg_std = 0.0; + + const auto context = reinterpret_cast(context_pointer); + const auto model = reinterpret_cast(model_pointer); + const auto batch = reinterpret_cast(batch_pointer); + + const int n_ctx = llama_n_ctx(context); + + LOGi("n_ctx = %d", n_ctx); + + int i, j; + int nri; + for (nri = 0; nri < nr; nri++) { + LOGi("Benchmark prompt processing (pp)"); + + llama_batch_clear(*batch); + + const int n_tokens = pp; + for (i = 0; i < n_tokens; i++) { + llama_batch_add(*batch, 0, i, { 0 }, false); + } + + batch->logits[batch->n_tokens - 1] = true; + llama_kv_cache_clear(context); + + const auto t_pp_start = ggml_time_us(); + if (llama_decode(context, *batch) != 0) { + LOGi("llama_decode() failed during prompt processing"); + } + const auto t_pp_end = ggml_time_us(); + + // bench text generation + + LOGi("Benchmark text generation (tg)"); + + llama_kv_cache_clear(context); + const auto t_tg_start = ggml_time_us(); + for (i = 0; i < tg; i++) { + + llama_batch_clear(*batch); + for (j = 0; j < pl; j++) { + llama_batch_add(*batch, 0, i, { j }, true); + } + + LOGi("llama_decode() text generation: %d", i); + if (llama_decode(context, *batch) != 0) { + LOGi("llama_decode() failed during text generation"); + } + } + + const auto t_tg_end = ggml_time_us(); + + llama_kv_cache_clear(context); + + const auto t_pp = double(t_pp_end - t_pp_start) / 1000000.0; + const auto t_tg = double(t_tg_end - t_tg_start) / 1000000.0; + + const auto speed_pp = double(pp) / t_pp; + const auto speed_tg = double(pl * tg) / t_tg; + + pp_avg += speed_pp; + tg_avg += speed_tg; + + pp_std += speed_pp * speed_pp; + tg_std += speed_tg * speed_tg; + + LOGi("pp %f t/s, tg %f t/s", speed_pp, speed_tg); + } + + pp_avg /= double(nr); + tg_avg /= double(nr); + + if (nr > 1) { + pp_std = sqrt(pp_std / double(nr - 1) - pp_avg * pp_avg * double(nr) / double(nr - 1)); + tg_std = sqrt(tg_std / double(nr - 1) - tg_avg * tg_avg * double(nr) / double(nr - 1)); + } else { + pp_std = 0; + tg_std = 0; + } + + char model_desc[128]; + llama_model_desc(model, model_desc, sizeof(model_desc)); + + const auto model_size = double(llama_model_size(model)) / 1024.0 / 1024.0 / 1024.0; + const auto model_n_params = double(llama_model_n_params(model)) / 1e9; + + const auto backend = "(Android)"; // TODO: What should this be? + + std::stringstream result; + result << std::setprecision(2); + result << "| model | size | params | backend | test | t/s |\n"; + result << "| --- | --- | --- | --- | --- | --- |\n"; + result << "| " << model_desc << " | " << model_size << "GiB | " << model_n_params << "B | " << backend << " | pp " << pp << " | " << pp_avg << " ± " << pp_std << " |\n"; + result << "| " << model_desc << " | " << model_size << "GiB | " << model_n_params << "B | " << backend << " | tg " << tg << " | " << tg_avg << " ± " << tg_std << " |\n"; + + return env->NewStringUTF(result.str().c_str()); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_free_1batch(JNIEnv *, jobject, jlong batch_pointer) { + llama_batch_free(*reinterpret_cast(batch_pointer)); +} + +extern "C" +JNIEXPORT jlong JNICALL +Java_com_example_llama_Llm_new_1batch(JNIEnv *, jobject, jint n_tokens, jint embd, jint n_seq_max) { + + // Source: Copy of llama.cpp:llama_batch_init but heap-allocated. + + llama_batch *batch = new llama_batch { + 0, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + nullptr, + 0, + 0, + 0, + }; + + if (embd) { + batch->embd = (float *) malloc(sizeof(float) * n_tokens * embd); + } else { + batch->token = (llama_token *) malloc(sizeof(llama_token) * n_tokens); + } + + batch->pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens); + batch->n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens); + batch->seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * n_tokens); + for (int i = 0; i < n_tokens; ++i) { + batch->seq_id[i] = (llama_seq_id *) malloc(sizeof(llama_seq_id) * n_seq_max); + } + batch->logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens); + + return reinterpret_cast(batch); +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_backend_1init(JNIEnv *, jobject, jboolean numa) { + llama_backend_init(numa); +} + +extern "C" +JNIEXPORT jstring JNICALL +Java_com_example_llama_Llm_system_1info(JNIEnv *env, jobject) { + return env->NewStringUTF(llama_print_system_info()); +} + +extern "C" +JNIEXPORT jint JNICALL +Java_com_example_llama_Llm_completion_1init( + JNIEnv *env, + jobject, + jlong context_pointer, + jlong batch_pointer, + jstring jtext, + jint n_len + ) { + + const auto text = env->GetStringUTFChars(jtext, 0); + const auto context = reinterpret_cast(context_pointer); + const auto batch = reinterpret_cast(batch_pointer); + + const auto tokens_list = llama_tokenize(context, text, 1); + + auto n_ctx = llama_n_ctx(context); + auto n_kv_req = tokens_list.size() + (n_len - tokens_list.size()); + + LOGi("n_len = %d, n_ctx = %d, n_kv_req = %d", n_len, n_ctx, n_kv_req); + + if (n_kv_req > n_ctx) { + LOGe("error: n_kv_req > n_ctx, the required KV cache size is not big enough"); + } + + for (auto id : tokens_list) { + LOGi("%s", llama_token_to_piece(context, id).c_str()); + } + + llama_batch_clear(*batch); + + // evaluate the initial prompt + for (auto i = 0; i < tokens_list.size(); i++) { + llama_batch_add(*batch, tokens_list[i], i, { 0 }, false); + } + + // llama_decode will output logits only for the last token of the prompt + batch->logits[batch->n_tokens - 1] = true; + + if (llama_decode(context, *batch) != 0) { + LOGe("llama_decode() failed"); + } + + env->ReleaseStringUTFChars(jtext, text); + + return batch->n_tokens; +} + +extern "C" +JNIEXPORT jstring JNICALL +Java_com_example_llama_Llm_completion_1loop( + JNIEnv * env, + jobject, + jlong context_pointer, + jlong batch_pointer, + jint n_len, + jobject intvar_ncur +) { + const auto context = reinterpret_cast(context_pointer); + const auto batch = reinterpret_cast(batch_pointer); + const auto model = llama_get_model(context); + + if (!la_int_var) la_int_var = env->GetObjectClass(intvar_ncur); + if (!la_int_var_value) la_int_var_value = env->GetMethodID(la_int_var, "getValue", "()I"); + if (!la_int_var_inc) la_int_var_inc = env->GetMethodID(la_int_var, "inc", "()V"); + + auto n_vocab = llama_n_vocab(model); + auto logits = llama_get_logits_ith(context, batch->n_tokens - 1); + + std::vector candidates; + candidates.reserve(n_vocab); + + for (llama_token token_id = 0; token_id < n_vocab; token_id++) { + candidates.emplace_back(llama_token_data{ token_id, logits[token_id], 0.0f }); + } + + llama_token_data_array candidates_p = { candidates.data(), candidates.size(), false }; + + // sample the most likely token + const auto new_token_id = llama_sample_token_greedy(context, &candidates_p); + + const auto n_cur = env->CallIntMethod(intvar_ncur, la_int_var_value); + if (new_token_id == llama_token_eos(model) || n_cur == n_len) { + return env->NewStringUTF(""); + } + + auto new_token_chars = llama_token_to_piece(context, new_token_id); + LOGi("new_token_chars: `%s`", new_token_chars.c_str()); + auto new_token = env->NewStringUTF(new_token_chars.c_str()); + + llama_batch_clear(*batch); + llama_batch_add(*batch, new_token_id, n_cur, { 0 }, true); + + env->CallVoidMethod(intvar_ncur, la_int_var_inc); + + if (llama_decode(context, *batch) != 0) { + LOGe("llama_decode() returned null"); + } + + return new_token; +} + +extern "C" +JNIEXPORT void JNICALL +Java_com_example_llama_Llm_kv_1cache_1clear(JNIEnv *, jobject, jlong context) { + llama_kv_cache_clear(reinterpret_cast(context)); +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt b/examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt new file mode 100644 index 000000000..78c231ae5 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/Downloadable.kt @@ -0,0 +1,119 @@ +package com.example.llama + +import android.app.DownloadManager +import android.net.Uri +import android.util.Log +import androidx.compose.material3.Button +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableDoubleStateOf +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.remember +import androidx.compose.runtime.rememberCoroutineScope +import androidx.compose.runtime.setValue +import androidx.core.database.getLongOrNull +import androidx.core.net.toUri +import kotlinx.coroutines.delay +import kotlinx.coroutines.launch +import java.io.File + +data class Downloadable(val name: String, val source: Uri, val destination: File) { + companion object { + @JvmStatic + private val tag: String? = this::class.qualifiedName + + sealed interface State + data object Ready: State + data class Downloading(val id: Long): State + data class Downloaded(val downloadable: Downloadable): State + data class Error(val message: String): State + + @JvmStatic + @Composable + fun Button(viewModel: MainViewModel, dm: DownloadManager, item: Downloadable) { + var status: State by remember { + mutableStateOf( + if (item.destination.exists()) Downloaded(item) + else Ready + ) + } + var progress by remember { mutableDoubleStateOf(0.0) } + + val coroutineScope = rememberCoroutineScope() + + suspend fun waitForDownload(result: Downloading, item: Downloadable): State { + while (true) { + val cursor = dm.query(DownloadManager.Query().setFilterById(result.id)) + + if (cursor == null) { + Log.e(tag, "dm.query() returned null") + return Error("dm.query() returned null") + } + + if (!cursor.moveToFirst() || cursor.count < 1) { + cursor.close() + Log.i(tag, "cursor.moveToFirst() returned false or cursor.count < 1, download canceled?") + return Ready + } + + val pix = cursor.getColumnIndex(DownloadManager.COLUMN_BYTES_DOWNLOADED_SO_FAR) + val tix = cursor.getColumnIndex(DownloadManager.COLUMN_TOTAL_SIZE_BYTES) + val sofar = cursor.getLongOrNull(pix) ?: 0 + val total = cursor.getLongOrNull(tix) ?: 1 + cursor.close() + + if (sofar == total) { + return Downloaded(item) + } + + progress = (sofar * 1.0) / total + + delay(1000L) + } + } + + fun onClick() { + when (val s = status) { + is Downloaded -> { + viewModel.load(item.destination.path) + } + + is Downloading -> { + coroutineScope.launch { + status = waitForDownload(s, item) + } + } + + else -> { + item.destination.delete() + + val request = DownloadManager.Request(item.source).apply { + setTitle("Downloading model") + setDescription("Downloading model: ${item.name}") + setAllowedNetworkTypes(DownloadManager.Request.NETWORK_WIFI) + setDestinationUri(item.destination.toUri()) + } + + viewModel.log("Saving ${item.name} to ${item.destination.path}") + Log.i(tag, "Saving ${item.name} to ${item.destination.path}") + + val id = dm.enqueue(request) + status = Downloading(id) + onClick() + } + } + } + + Button(onClick = { onClick() }, enabled = status !is Downloading) { + when (status) { + is Downloading -> Text(text = "Downloading ${(progress * 100).toInt()}%") + is Downloaded -> Text("Load ${item.name}") + is Ready -> Text("Download ${item.name}") + is Error -> Text("Download ${item.name}") + } + } + } + + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/Llm.kt b/examples/llama.android/app/src/main/java/com/example/llama/Llm.kt new file mode 100644 index 000000000..5f3270372 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/Llm.kt @@ -0,0 +1,172 @@ +package com.example.llama + +import android.util.Log +import kotlinx.coroutines.CoroutineDispatcher +import kotlinx.coroutines.asCoroutineDispatcher +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.flow +import kotlinx.coroutines.flow.flowOn +import kotlinx.coroutines.withContext +import java.util.concurrent.Executors +import kotlin.concurrent.thread + +class Llm { + private val tag: String? = this::class.simpleName + + private val threadLocalState: ThreadLocal = ThreadLocal.withInitial { State.Idle } + + private val runLoop: CoroutineDispatcher = Executors.newSingleThreadExecutor { + thread(start = false, name = "Llm-RunLoop") { + Log.d(tag, "Dedicated thread for native code: ${Thread.currentThread().name}") + + // No-op if called more than once. + System.loadLibrary("llama-android") + + // Set llama log handler to Android + log_to_android() + backend_init(false) + + Log.d(tag, system_info()) + + it.run() + }.apply { + uncaughtExceptionHandler = Thread.UncaughtExceptionHandler { _, exception: Throwable -> + Log.e(tag, "Unhandled exception", exception) + } + } + }.asCoroutineDispatcher() + + private val nlen: Int = 64 + + private external fun log_to_android() + private external fun load_model(filename: String): Long + private external fun free_model(model: Long) + private external fun new_context(model: Long): Long + private external fun free_context(context: Long) + private external fun backend_init(numa: Boolean) + private external fun backend_free() + private external fun free_batch(batch: Long) + private external fun new_batch(nTokens: Int, embd: Int, nSeqMax: Int): Long + private external fun bench_model( + context: Long, + model: Long, + batch: Long, + pp: Int, + tg: Int, + pl: Int, + nr: Int + ): String + + private external fun system_info(): String + + private external fun completion_init( + context: Long, + batch: Long, + text: String, + nLen: Int + ): Int + + private external fun completion_loop( + context: Long, + batch: Long, + nLen: Int, + ncur: IntVar + ): String + + private external fun kv_cache_clear(context: Long) + + suspend fun bench(pp: Int, tg: Int, pl: Int, nr: Int = 1): String { + return withContext(runLoop) { + when (val state = threadLocalState.get()) { + is State.Loaded -> { + Log.d(tag, "bench(): $state") + bench_model(state.context, state.model, state.batch, pp, tg, pl, nr) + } + + else -> throw IllegalStateException("No model loaded") + } + } + } + + suspend fun load(pathToModel: String) { + withContext(runLoop) { + when (threadLocalState.get()) { + is State.Idle -> { + val model = load_model(pathToModel) + if (model == 0L) throw IllegalStateException("load_model() failed") + + val context = new_context(model) + if (context == 0L) throw IllegalStateException("new_context() failed") + + val batch = new_batch(512, 0, 1) + if (batch == 0L) throw IllegalStateException("new_batch() failed") + + Log.i(tag, "Loaded model $pathToModel") + threadLocalState.set(State.Loaded(model, context, batch)) + } + else -> throw IllegalStateException("Model already loaded") + } + } + } + + fun send(message: String): Flow = flow { + when (val state = threadLocalState.get()) { + is State.Loaded -> { + val ncur = IntVar(completion_init(state.context, state.batch, message, nlen)) + while (ncur.value <= nlen) { + val str = completion_loop(state.context, state.batch, nlen, ncur) + if (str.isEmpty()) { + break + } + emit(str) + } + kv_cache_clear(state.context) + } + else -> {} + } + }.flowOn(runLoop) + + /** + * Unloads the model and frees resources. + * + * This is a no-op if there's no model loaded. + */ + suspend fun unload() { + withContext(runLoop) { + when (val state = threadLocalState.get()) { + is State.Loaded -> { + free_context(state.context) + free_model(state.model) + free_batch(state.batch) + + threadLocalState.set(State.Idle) + } + else -> {} + } + } + } + + companion object { + private class IntVar(value: Int) { + @Volatile + var value: Int = value + private set + + fun inc() { + synchronized(this) { + value += 1 + } + } + } + + private sealed interface State { + data object Idle: State + data class Loaded(val model: Long, val context: Long, val batch: Long): State + } + + // Enforce only one instance of Llm. + private val _instance: Llm = Llm() + + fun instance(): Llm = _instance + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt b/examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt new file mode 100644 index 000000000..9da04f7d3 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/MainActivity.kt @@ -0,0 +1,154 @@ +package com.example.llama + +import android.app.ActivityManager +import android.app.DownloadManager +import android.content.ClipData +import android.content.ClipboardManager +import android.net.Uri +import android.os.Bundle +import android.os.StrictMode +import android.os.StrictMode.VmPolicy +import android.text.format.Formatter +import androidx.activity.ComponentActivity +import androidx.activity.compose.setContent +import androidx.activity.viewModels +import androidx.compose.foundation.layout.Box +import androidx.compose.foundation.layout.Column +import androidx.compose.foundation.layout.Row +import androidx.compose.foundation.layout.fillMaxSize +import androidx.compose.foundation.layout.padding +import androidx.compose.foundation.lazy.LazyColumn +import androidx.compose.foundation.lazy.items +import androidx.compose.foundation.lazy.rememberLazyListState +import androidx.compose.material3.Button +import androidx.compose.material3.LocalContentColor +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.OutlinedTextField +import androidx.compose.material3.Surface +import androidx.compose.material3.Text +import androidx.compose.runtime.Composable +import androidx.compose.ui.Modifier +import androidx.compose.ui.unit.dp +import androidx.core.content.getSystemService +import com.example.llama.ui.theme.LlamaAndroidTheme +import java.io.File + +class MainActivity( + activityManager: ActivityManager? = null, + downloadManager: DownloadManager? = null, + clipboardManager: ClipboardManager? = null, +): ComponentActivity() { + private val tag: String? = this::class.simpleName + + private val activityManager by lazy { activityManager ?: getSystemService()!! } + private val downloadManager by lazy { downloadManager ?: getSystemService()!! } + private val clipboardManager by lazy { clipboardManager ?: getSystemService()!! } + + private val viewModel: MainViewModel by viewModels() + + // Get a MemoryInfo object for the device's current memory status. + private fun availableMemory(): ActivityManager.MemoryInfo { + return ActivityManager.MemoryInfo().also { memoryInfo -> + activityManager.getMemoryInfo(memoryInfo) + } + } + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + + StrictMode.setVmPolicy( + VmPolicy.Builder(StrictMode.getVmPolicy()) + .detectLeakedClosableObjects() + .build() + ) + + val free = Formatter.formatFileSize(this, availableMemory().availMem) + val total = Formatter.formatFileSize(this, availableMemory().totalMem) + + viewModel.log("Current memory: $free / $total") + viewModel.log("Downloads directory: ${getExternalFilesDir(null)}") + + val extFilesDir = getExternalFilesDir(null) + + val models = listOf( + Downloadable( + "Phi-2 7B (Q4_0, 1.6 GiB)", + Uri.parse("https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true"), + File(extFilesDir, "phi-2-q4_0.gguf"), + ), + Downloadable( + "TinyLlama 1.1B (f16, 2.2 GiB)", + Uri.parse("https://huggingface.co/ggml-org/models/resolve/main/tinyllama-1.1b/ggml-model-f16.gguf?download=true"), + File(extFilesDir, "tinyllama-1.1-f16.gguf"), + ), + Downloadable( + "Phi 2 DPO (Q3_K_M, 1.48 GiB)", + Uri.parse("https://huggingface.co/TheBloke/phi-2-dpo-GGUF/resolve/main/phi-2-dpo.Q3_K_M.gguf?download=true"), + File(extFilesDir, "phi-2-dpo.Q3_K_M.gguf") + ), + ) + + setContent { + LlamaAndroidTheme { + // A surface container using the 'background' color from the theme + Surface( + modifier = Modifier.fillMaxSize(), + color = MaterialTheme.colorScheme.background + ) { + MainCompose( + viewModel, + clipboardManager, + downloadManager, + models, + ) + } + + } + } + } +} + +@Composable +fun MainCompose( + viewModel: MainViewModel, + clipboard: ClipboardManager, + dm: DownloadManager, + models: List +) { + Column { + val scrollState = rememberLazyListState() + + Box(modifier = Modifier.weight(1f)) { + LazyColumn(state = scrollState) { + items(viewModel.messages) { + Text( + it, + style = MaterialTheme.typography.bodyLarge.copy(color = LocalContentColor.current), + modifier = Modifier.padding(16.dp) + ) + } + } + } + OutlinedTextField( + value = viewModel.message, + onValueChange = { viewModel.updateMessage(it) }, + label = { Text("Message") }, + ) + Row { + Button({ viewModel.send() }) { Text("Send") } + Button({ viewModel.bench(8, 4, 1) }) { Text("Bench") } + Button({ viewModel.clear() }) { Text("Clear") } + Button({ + viewModel.messages.joinToString("\n").let { + clipboard.setPrimaryClip(ClipData.newPlainText("", it)) + } + }) { Text("Copy") } + } + + Column { + for (model in models) { + Downloadable.Button(viewModel, dm, model) + } + } + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt b/examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt new file mode 100644 index 000000000..be95e2221 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/MainViewModel.kt @@ -0,0 +1,104 @@ +package com.example.llama + +import android.util.Log +import androidx.compose.runtime.getValue +import androidx.compose.runtime.mutableStateOf +import androidx.compose.runtime.setValue +import androidx.lifecycle.ViewModel +import androidx.lifecycle.viewModelScope +import kotlinx.coroutines.flow.catch +import kotlinx.coroutines.launch + +class MainViewModel(private val llm: Llm = Llm.instance()): ViewModel() { + companion object { + @JvmStatic + private val NanosPerSecond = 1_000_000_000.0 + } + + private val tag: String? = this::class.simpleName + + var messages by mutableStateOf(listOf("Initializing...")) + private set + + var message by mutableStateOf("") + private set + + override fun onCleared() { + super.onCleared() + + viewModelScope.launch { + try { + llm.unload() + } catch (exc: IllegalStateException) { + messages += exc.message!! + } + } + } + + fun send() { + val text = message + message = "" + + // Add to messages console. + messages += text + messages += "" + + viewModelScope.launch { + llm.send(text) + .catch { + Log.e(tag, "send() failed", it) + messages += it.message!! + } + .collect { messages = messages.dropLast(1) + (messages.last() + it) } + } + } + + fun bench(pp: Int, tg: Int, pl: Int, nr: Int = 1) { + viewModelScope.launch { + try { + val start = System.nanoTime() + val warmupResult = llm.bench(pp, tg, pl, nr) + val end = System.nanoTime() + + messages += warmupResult + + val warmup = (end - start).toDouble() / NanosPerSecond + messages += "Warm up time: $warmup seconds, please wait..." + + if (warmup > 5.0) { + messages += "Warm up took too long, aborting benchmark" + return@launch + } + + messages += llm.bench(512, 128, 1, 3) + } catch (exc: IllegalStateException) { + Log.e(tag, "bench() failed", exc) + messages += exc.message!! + } + } + } + + fun load(pathToModel: String) { + viewModelScope.launch { + try { + llm.load(pathToModel) + messages += "Loaded $pathToModel" + } catch (exc: IllegalStateException) { + Log.e(tag, "load() failed", exc) + messages += exc.message!! + } + } + } + + fun updateMessage(newMessage: String) { + message = newMessage + } + + fun clear() { + messages = listOf() + } + + fun log(message: String) { + messages += message + } +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt new file mode 100644 index 000000000..40c30e8d9 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Color.kt @@ -0,0 +1,11 @@ +package com.example.llama.ui.theme + +import androidx.compose.ui.graphics.Color + +val Purple80 = Color(0xFFD0BCFF) +val PurpleGrey80 = Color(0xFFCCC2DC) +val Pink80 = Color(0xFFEFB8C8) + +val Purple40 = Color(0xFF6650a4) +val PurpleGrey40 = Color(0xFF625b71) +val Pink40 = Color(0xFF7D5260) diff --git a/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt new file mode 100644 index 000000000..e742220a8 --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Theme.kt @@ -0,0 +1,70 @@ +package com.example.llama.ui.theme + +import android.app.Activity +import android.os.Build +import androidx.compose.foundation.isSystemInDarkTheme +import androidx.compose.material3.MaterialTheme +import androidx.compose.material3.darkColorScheme +import androidx.compose.material3.dynamicDarkColorScheme +import androidx.compose.material3.dynamicLightColorScheme +import androidx.compose.material3.lightColorScheme +import androidx.compose.runtime.Composable +import androidx.compose.runtime.SideEffect +import androidx.compose.ui.graphics.toArgb +import androidx.compose.ui.platform.LocalContext +import androidx.compose.ui.platform.LocalView +import androidx.core.view.WindowCompat + +private val DarkColorScheme = darkColorScheme( + primary = Purple80, + secondary = PurpleGrey80, + tertiary = Pink80 +) + +private val LightColorScheme = lightColorScheme( + primary = Purple40, + secondary = PurpleGrey40, + tertiary = Pink40 + + /* Other default colors to override + background = Color(0xFFFFFBFE), + surface = Color(0xFFFFFBFE), + onPrimary = Color.White, + onSecondary = Color.White, + onTertiary = Color.White, + onBackground = Color(0xFF1C1B1F), + onSurface = Color(0xFF1C1B1F), + */ +) + +@Composable +fun LlamaAndroidTheme( + darkTheme: Boolean = isSystemInDarkTheme(), + // Dynamic color is available on Android 12+ + dynamicColor: Boolean = true, + content: @Composable () -> Unit +) { + val colorScheme = when { + dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> { + val context = LocalContext.current + if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context) + } + + darkTheme -> DarkColorScheme + else -> LightColorScheme + } + val view = LocalView.current + if (!view.isInEditMode) { + SideEffect { + val window = (view.context as Activity).window + window.statusBarColor = colorScheme.primary.toArgb() + WindowCompat.getInsetsController(window, view).isAppearanceLightStatusBars = darkTheme + } + } + + MaterialTheme( + colorScheme = colorScheme, + typography = Typography, + content = content + ) +} diff --git a/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt new file mode 100644 index 000000000..0b87946ca --- /dev/null +++ b/examples/llama.android/app/src/main/java/com/example/llama/ui/theme/Type.kt @@ -0,0 +1,34 @@ +package com.example.llama.ui.theme + +import androidx.compose.material3.Typography +import androidx.compose.ui.text.TextStyle +import androidx.compose.ui.text.font.FontFamily +import androidx.compose.ui.text.font.FontWeight +import androidx.compose.ui.unit.sp + +// Set of Material typography styles to start with +val Typography = Typography( + bodyLarge = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 16.sp, + lineHeight = 24.sp, + letterSpacing = 0.5.sp + ) + /* Other default text styles to override + titleLarge = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Normal, + fontSize = 22.sp, + lineHeight = 28.sp, + letterSpacing = 0.sp + ), + labelSmall = TextStyle( + fontFamily = FontFamily.Default, + fontWeight = FontWeight.Medium, + fontSize = 11.sp, + lineHeight = 16.sp, + letterSpacing = 0.5.sp + ) + */ +) diff --git a/examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml b/examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml new file mode 100644 index 000000000..07d5da9cb --- /dev/null +++ b/examples/llama.android/app/src/main/res/drawable/ic_launcher_background.xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml b/examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml new file mode 100644 index 000000000..7706ab9e6 --- /dev/null +++ b/examples/llama.android/app/src/main/res/drawable/ic_launcher_foreground.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + diff --git a/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml new file mode 100644 index 000000000..b3e26b4c6 --- /dev/null +++ b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml new file mode 100644 index 000000000..b3e26b4c6 --- /dev/null +++ b/examples/llama.android/app/src/main/res/mipmap-anydpi/ic_launcher_round.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp b/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..c209e78ecd372343283f4157dcfd918ec5165bb3 GIT binary patch literal 1404 zcmV-?1%vuhNk&F=1pok7MM6+kP&il$0000G0000-002h-06|PpNX!5L00Dqw+t%{r zzW2vH!KF=w&cMnnN@{whkTw+#mAh0SV?YL=)3MimFYCWp#fpdtz~8$hD5VPuQgtcN zXl<@<#Cme5f5yr2h%@8TWh?)bSK`O z^Z@d={gn7J{iyxL_y_%J|L>ep{dUxUP8a{byupH&!UNR*OutO~0{*T4q5R6@ApLF! z5{w?Z150gC7#>(VHFJZ-^6O@PYp{t!jH(_Z*nzTK4 zkc{fLE4Q3|mA2`CWQ3{8;gxGizgM!zccbdQoOLZc8hThi-IhN90RFT|zlxh3Ty&VG z?Fe{#9RrRnxzsu|Lg2ddugg7k%>0JeD+{XZ7>Z~{=|M+sh1MF7~ zz>To~`~LVQe1nNoR-gEzkpe{Ak^7{{ZBk2i_<+`Bq<^GB!RYG+z)h;Y3+<{zlMUYd zrd*W4w&jZ0%kBuDZ1EW&KLpyR7r2=}fF2%0VwHM4pUs}ZI2egi#DRMYZPek*^H9YK zay4Iy3WXFG(F14xYsoDA|KXgGc5%2DhmQ1gFCkrgHBm!lXG8I5h*uf{rn48Z!_@ z4Bk6TJAB2CKYqPjiX&mWoW>OPFGd$wqroa($ne7EUK;#3VYkXaew%Kh^3OrMhtjYN?XEoY`tRPQsAkH-DSL^QqyN0>^ zmC>{#F14jz4GeW{pJoRpLFa_*GI{?T93^rX7SPQgT@LbLqpNA}<@2wH;q493)G=1Y z#-sCiRNX~qf3KgiFzB3I>4Z%AfS(3$`-aMIBU+6?gbgDb!)L~A)je+;fR0jWLL-Fu z4)P{c7{B4Hp91&%??2$v9iRSFnuckHUm}or9seH6 z>%NbT+5*@L5(I9j@06@(!{ZI?U0=pKn8uwIg&L{JV14+8s2hnvbRrU|hZCd}IJu7*;;ECgO%8_*W Kmw_-CKmY()leWbG literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp b/examples/llama.android/app/src/main/res/mipmap-hdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..b2dfe3d1ba5cf3ee31b3ecc1ced89044a1f3b7a9 GIT binary patch literal 2898 zcmV-Y3$650Nk&FW3jhFDMM6+kP&il$0000G0000-002h-06|PpNWB9900E$G+qN-D z+81ABX7q?;bwx%xBg?kcwr$(C-Tex-ZCkHUw(Y9#+`E5-zuONG5fgw~E2WDng@Bc@ z24xy+R1n%~6xI#u9vJ8zREI)sb<&Il(016}Z~V1n^PU3-_H17A*Bf^o)&{_uBv}Py zulRfeE8g(g6HFhk_?o_;0@tz?1I+l+Y#Q*;RVC?(ud`_cU-~n|AX-b`JHrOIqn(-t&rOg-o`#C zh0LPxmbOAEb;zHTu!R3LDh1QO zZTf-|lJNUxi-PpcbRjw3n~n-pG;$+dIF6eqM5+L();B2O2tQ~|p{PlpNcvDbd1l%c zLtXn%lu(3!aNK!V#+HNn_D3lp z2%l+hK-nsj|Bi9;V*WIcQRTt5j90A<=am+cc`J zTYIN|PsYAhJ|=&h*4wI4ebv-C=Be#u>}%m;a{IGmJDU`0snWS&$9zdrT(z8#{OZ_Y zxwJx!ZClUi%YJjD6Xz@OP8{ieyJB=tn?>zaI-4JN;rr`JQbb%y5h2O-?_V@7pG_+y z(lqAsqYr!NyVb0C^|uclHaeecG)Sz;WV?rtoqOdAAN{j%?Uo%owya(F&qps@Id|Of zo@~Y-(YmfB+chv^%*3g4k3R0WqvuYUIA+8^SGJ{2Bl$X&X&v02>+0$4?di(34{pt* zG=f#yMs@Y|b&=HyH3k4yP&goF2LJ#tBLJNNDo6lG06r}ghC-pC4Q*=x3;|+W04zte zAl>l4kzUBQFYF(E`KJy?ZXd1tnfbH+Z~SMmA21KokJNs#eqcXWKUIC>{TuoKe^vhF z);H)o`t9j~`$h1D`#bxe@E`oE`cM9w(@)5Bp8BNukIwM>wZHfd0S;5bcXA*5KT3bj zc&_~`&{z7u{Et!Z_k78H75gXf4g8<_ul!H$eVspPeU3j&&Au=2R*Zp#M9$9s;fqwgzfiX=E_?BwVcfx3tG9Q-+<5fw z%Hs64z)@Q*%s3_Xd5>S4dg$s>@rN^ixeVj*tqu3ZV)biDcFf&l?lGwsa zWj3rvK}?43c{IruV2L`hUU0t^MemAn3U~x3$4mFDxj=Byowu^Q+#wKRPrWywLjIAp z9*n}eQ9-gZmnd9Y0WHtwi2sn6n~?i#n9VN1B*074_VbZZ=WrpkMYr{RsI ztM_8X1)J*DZejxkjOTRJ&a*lrvMKBQURNP#K)a5wIitfu(CFYV4FT?LUB$jVwJSZz zNBFTWg->Yk0j&h3e*a5>B=-xM7dE`IuOQna!u$OoxLlE;WdrNlN)1 z7**de7-hZ!(%_ZllHBLg`Ir#|t>2$*xVOZ-ADZKTN?{(NUeLU9GbuG-+Axf*AZ-P1 z0ZZ*fx+ck4{XtFsbcc%GRStht@q!m*ImssGwuK+P@%gEK!f5dHymg<9nSCXsB6 zQ*{<`%^bxB($Z@5286^-A(tR;r+p7B%^%$N5h%lb*Vlz-?DL9x;!j<5>~kmXP$E}m zQV|7uv4SwFs0jUervsxVUm>&9Y3DBIzc1XW|CUZrUdb<&{@D5yuLe%Xniw^x&{A2s z0q1+owDSfc3Gs?ht;3jw49c#mmrViUfX-yvc_B*wY|Lo7; zGh!t2R#BHx{1wFXReX*~`NS-LpSX z#TV*miO^~B9PF%O0huw!1Zv>^d0G3$^8dsC6VI!$oKDKiXdJt{mGkyA`+Gwd4D-^1qtNTUK)`N*=NTG-6}=5k6suNfdLt*dt8D| z%H#$k)z#ZRcf|zDWB|pn<3+7Nz>?WW9WdkO5(a^m+D4WRJ9{wc>Y}IN)2Kbgn;_O? zGqdr&9~|$Y0tP=N(k7^Eu;iO*w+f%W`20BNo)=Xa@M_)+o$4LXJyiw{F?a633SC{B zl~9FH%?^Rm*LVz`lkULs)%idDX^O)SxQol(3jDRyBVR!7d`;ar+D7do)jQ}m`g$TevUD5@?*P8)voa?kEe@_hl{_h8j&5eB-5FrYW&*FHVt$ z$kRF9Nstj%KRzpjdd_9wO=4zO8ritN*NPk_9avYrsF(!4))tm{Ga#OY z(r{0buexOzu7+rw8E08Gxd`LTOID{*AC1m*6Nw@osfB%0oBF5sf<~wH1kL;sd zo)k6^VyRFU`)dt*iX^9&QtWbo6yE8XXH?`ztvpiOLgI3R+=MOBQ9=rMVgi<*CU%+d1PQQ0a1U=&b0vkF207%xU0ssI2 literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp b/examples/llama.android/app/src/main/res/mipmap-mdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..4f0f1d64e58ba64d180ce43ee13bf9a17835fbca GIT binary patch literal 982 zcmV;{11bDcNk&G_0{{S5MM6+kP&il$0000G0000l001ul06|PpNU8t;00Dqo+t#w^ z^1csucXz7-Qrhzl9HuHB%l>&>1tG2^vb*E&k^T3$FG1eQZ51g$uv4V+kI`0<^1Z@N zk?Jjh$olyC%l>)Xq;7!>{iBj&BjJ`P&$fsCfpve_epJOBkTF?nu-B7D!hO=2ZR}

    C%4 zc_9eOXvPbC4kzU8YowIA8cW~Uv|eB&yYwAObSwL2vY~UYI7NXPvf3b+c^?wcs~_t{ ze_m66-0)^{JdOMKPwjpQ@Sna!*?$wTZ~su*tNv7o!gXT!GRgivP}ec?5>l1!7<(rT zds|8x(qGc673zrvYIz;J23FG{9nHMnAuP}NpAED^laz3mAN1sy+NXK)!6v1FxQ;lh zOBLA>$~P3r4b*NcqR;y6pwyhZ3_PiDb|%n1gGjl3ZU}ujInlP{eks-#oA6>rh&g+!f`hv#_%JrgYPu z(U^&XLW^QX7F9Z*SRPpQl{B%x)_AMp^}_v~?j7 zapvHMKxSf*Mtyx8I}-<*UGn3)oHd(nn=)BZ`d$lDBwq_GL($_TPaS{UeevT(AJ`p0 z9%+hQb6z)U9qjbuXjg|dExCLjpS8$VKQ55VsIC%@{N5t{NsW)=hNGI`J=x97_kbz@ E0Of=7!TQj4N+cqN`nQhxvX7dAV-`K|Ub$-q+H-5I?Tx0g9jWxd@A|?POE8`3b8fO$T))xP* z(X?&brZw({`)WU&rdAs1iTa0x6F@PIxJ&&L|dpySV!ID|iUhjCcKz(@mE z!x@~W#3H<)4Ae(4eQJRk`Iz3<1)6^m)0b_4_TRZ+cz#eD3f8V;2r-1fE!F}W zEi0MEkTTx}8i1{`l_6vo0(Vuh0HD$I4SjZ=?^?k82R51bC)2D_{y8mi_?X^=U?2|F{Vr7s!k(AZC$O#ZMyavHhlQ7 zUR~QXuH~#o#>(b$u4?s~HLF*3IcF7023AlwAYudn0FV~|odGH^05AYPEfR)8p`i{n zwg3zPVp{+wOsxKc>)(pMupKF!Y2HoUqQ3|Yu|8lwR=?5zZuhG6J?H`bSNk_wPoM{u zSL{c@pY7+c2kck>`^q1^^gR0QB7Y?KUD{vz-uVX~;V-rW)PDcI)$_UjgVV?S?=oLR zf4}zz{#*R_{LkiJ#0RdQLNC^2Vp%JPEUvG9ra2BVZ92(p9h7Ka@!yf9(lj#}>+|u* z;^_?KWdzkM`6gqPo9;;r6&JEa)}R3X{(CWv?NvgLeOTq$cZXqf7|sPImi-7cS8DCN zGf;DVt3Am`>hH3{4-WzH43Ftx)SofNe^-#|0HdCo<+8Qs!}TZP{HH8~z5n`ExcHuT zDL1m&|DVpIy=xsLO>8k92HcmfSKhflQ0H~9=^-{#!I1g(;+44xw~=* zxvNz35vfsQE)@)Zsp*6_GjYD};Squ83<_?^SbALb{a`j<0Gn%6JY!zhp=Fg}Ga2|8 z52e1WU%^L1}15Ex0fF$e@eCT(()_P zvV?CA%#Sy08_U6VPt4EtmVQraWJX` zh=N|WQ>LgrvF~R&qOfB$!%D3cGv?;Xh_z$z7k&s4N)$WYf*k=|*jCEkO19{h_(%W4 zPuOqbCw`SeAX*R}UUsbVsgtuG?xs(#Ikx9`JZoQFz0n*7ZG@Fv@kZk`gzO$HoA9kN z8U5{-yY zvV{`&WKU2$mZeoBmiJrEdzUZAv1sRxpePdg1)F*X^Y)zp^Y*R;;z~vOv-z&)&G)JQ{m!C9cmziu1^nHA z`#`0c>@PnQ9CJKgC5NjJD8HM3|KC(g5nnCq$n0Gsu_DXk36@ql%npEye|?%RmG)

    FJ$wK}0tWNB{uH;AM~i literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp b/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher.webp new file mode 100644 index 0000000000000000000000000000000000000000..948a3070fe34c611c42c0d3ad3013a0dce358be0 GIT binary patch literal 1900 zcmV-y2b1_xNk&Fw2LJ$9MM6+kP&il$0000G0001A003VA06|PpNH75a00DqwTbm-~ zullQTcXxO9ki!OCRx^i?oR|n!<8G0=kI^!JSjFi-LL*`V;ET0H2IXfU0*i>o6o6Gy zRq6Ap5(_{XLdXcL-MzlN`ugSdZY_`jXhcENAu)N_0?GhF))9R;E`!bo9p?g?SRgw_ zEXHhFG$0{qYOqhdX<(wE4N@es3VIo$%il%6xP9gjiBri+2pI6aY4 zJbgh-Ud|V%3O!IcHKQx1FQH(_*TK;1>FQWbt^$K1zNn^cczkBs=QHCYZ8b&l!UV{K z{L0$KCf_&KR^}&2Fe|L&?1I7~pBENnCtCuH3sjcx6$c zwqkNkru);ie``q+_QI;IYLD9OV0ZxkuyBz|5<$1BH|vtey$> z5oto4=l-R-Aaq`Dk0}o9N0VrkqW_#;!u{!bJLDq%0092{Ghe=F;(kn} z+sQ@1=UlX30+2nWjkL$B^b!H2^QYO@iFc0{(-~yXj2TWz?VG{v`Jg zg}WyYnwGgn>{HFaG7E~pt=)sOO}*yd(UU-D(E&x{xKEl6OcU?pl)K%#U$dn1mDF19 zSw@l8G!GNFB3c3VVK0?uyqN&utT-D5%NM4g-3@Sii9tSXKtwce~uF zS&Jn746EW^wV~8zdQ1XC28~kXu8+Yo9p!<8h&(Q({J*4DBglPdpe4M_mD8AguZFn~ ztiuO~{6Bx?SfO~_ZV(GIboeR9~hAym{{fV|VM=77MxDrbW6`ujX z<3HF(>Zr;#*uCvC*bpoSr~C$h?_%nXps@A)=l_;({Fo#6Y1+Zv`!T5HB+)#^-Ud_; zBwftPN=d8Vx)*O1Mj+0oO=mZ+NVH*ptNDC-&zZ7Hwho6UQ#l-yNvc0Cm+2$$6YUk2D2t#vdZX-u3>-Be1u9gtTBiMB^xwWQ_rgvGpZ6(C@e23c!^K=>ai-Rqu zhqT`ZQof;9Bu!AD(i^PCbYV%yha9zuoKMp`U^z;3!+&d@Hud&_iy!O-$b9ZLcSRh? z)R|826w}TU!J#X6P%@Zh=La$I6zXa#h!B;{qfug}O%z@K{EZECu6zl)7CiNi%xti0 zB{OKfAj83~iJvmpTU|&q1^?^cIMn2RQ?jeSB95l}{DrEPTW{_gmU_pqTc)h@4T>~& zluq3)GM=xa(#^VU5}@FNqpc$?#SbVsX!~RH*5p0p@w z;~v{QMX0^bFT1!cXGM8K9FP+=9~-d~#TK#ZE{4umGT=;dfvWi?rYj;^l_Zxywze`W z^Cr{55U@*BalS}K%Czii_80e0#0#Zkhlij4-~I@}`-JFJ7$5{>LnoJSs??J8kWVl6|8A}RCGAu9^rAsfCE=2}tHwl93t0C?#+jMpvr7O3`2=tr{Hg$=HlnjVG^ewm|Js0J*kfPa6*GhtB>`fN!m#9J(sU!?(OSfzY*zS(FJ<-Vb zfAIg+`U)YaXv#sY(c--|X zEB+TVyZ%Ie4L$gi#Fc++`h6%vzsS$pjz9aLt+ZL(g;n$Dzy5=m=_TV(3H8^C{r0xd zp#a%}ht55dOq?yhwYPrtp-m1xXp;4X;)NhxxUpgP%XTLmO zcjaFva^}dP3$&sfFTIR_jC=2pHh9kpI@2(6V*GQo7Ws)`j)hd+tr@P~gR*2gO@+1? zG<`_tB+LJuF|SZ9tIec;h%}}6WClT`L>HSW?E{Hp1h^+mlbf_$9zA>!ug>NALJsO{ mU%z=YwVD?}XMya)Bp;vlyE5&E_6!fzx9pwrdz474!~g(M6R?N? literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp b/examples/llama.android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp new file mode 100644 index 0000000000000000000000000000000000000000..1b9a6956b3acdc11f40ce2bb3f6efbd845cc243f GIT binary patch literal 3918 zcmV-U53%r4Nk&FS4*&pHMM6+kP&il$0000G0001A003VA06|PpNSy@$00HoY|G(*G z+qV7x14$dSO^Re!iqt-AAIE9iwr$(CZQJL$blA4B`>;C3fBY6Q8_YSjb2%a=fc}4E zrSzssacq<^nmW|Rs93PJni30R<8w<(bK_$LO4L?!_OxLl$}K$MUEllnMK|rg=f3;y z*?;3j|Nh>)p0JQ3A~rf(MibH2r+)3cyV1qF&;8m{w-S*y+0mM){KTK^M5}ksc`qX3 zy>rf^b>~l>SSHds8(I@hz3&PD@LmEs4&prkT=BjsBCXTMhN$_)+kvnl0bLKW5rEsj z*d#KXGDB4P&>etx0X+`R19yC=LS)j!mgs5M0L~+o-T~Jl!p!AJxnGAhV%~rhYUL4hlWhgES3Kb5oA&X z{}?3OBSS-{!v$nCIGj->(-TAG)8LR{htr41^gxsT8yqt2@DEG6Yl`Uma3Nd4;YUoW zTbkYl3CMU5ypMF3EIkYmWL|*BknM`0+Kq6CpvO(y$#j94e+q{vI{Zp8cV_6RK!`&C zob$*5Q|$IZ09dW=L!V zw@#2wviu|<#3lgGE8GEhcx+zBt`} zOwP8j9X%^f7i_bth4PiJ$LYtFJSCN$3xwDN;8mr*B;CJwBP2G0TMq0uNt7S^DO_wE zepk!Wrn#Z#03j{`c*Rf~y3o7?J}w?tEELRUR2cgxB*Y{LzA#pxHgf}q?u5idu>077 zd^=p)`nA}6e`|@`p?u}YU66PP_MA}Zqqe!c{nK&z%Jwq1N4e_q<#4g^xaz=ao;u|6 zwpRcW2Lax=ZGbx=Q*HhlJ`Ns#Y*r0*%!T?P*TTiX;rb)$CGLz=rSUum$)3Qyv{BL2 zO*=OI2|%(Yz~`pNEOnLp>+?T@glq-DujlIp?hdJeZ7ctP4_OKx|5@EOps3rr(pWzg zK4d3&oN-X2qN(d_MkfwB4I)_)!I_6nj2iA9u^pQ{;GckGLxBGrJUM2Wdda!k)Y>lq zmjws>dVQ*vW9lvEMkiN3wE-__6OWD0txS&Qn0n22cyj4Q*8(nG4!G{6OOwNvsrPIL zCl-$W9UwkEUVuLwyD%|inbOF*xMODZ4VMEVAq_zUxZ+K#Gdqf!DW$5f)?7UNOFMz! zrB~tuu=6X2FE(p^iqgxr+?ZK;=yz`e;C$#_@D9Lj-+TDVOrva>(#*PVbaHO>A)mhl z07OJWCqYC60518$!&c`eNBcBW%GnfaQ*$eazV^2_AW?j)h;J1nUjN(I9=0+!RVx~% z3@Tf!P0TE+98jA?WceK-}A1% zW!K)lyKcGqy#M~})315-A#2NXQ`?6NR#Apo=S!oF=JfpX>iR*49ec{7AN$xxpK{D$ z2d%Fz&rdfSqourN$~Y^NFIMV1CZ?J*bMx~H3k&meGtH@q9ra2vZxmA$S(#jaaj-g4 ztJmxG+DLV<*q<|sDXPp$X>E)#S}Vm&sRaO5P&goh2><}FEdZSXDqsL$06sAkh(e+v zAsBhKSRexgwg6tIy~GFJzaTxXD(}|+0eOwFDA%rn`X;MVwDHT9=4=g%OaJ9s%3b9>9EUTnnp0t;2Zpa{*>mk~hZqItE_!dQ zOtC>8`$l|mV43Jbudf0N6&&X;{=z}Zi}d1`2qmJ}i|0*GsulD3>GgQXHN)pkR6sf1 z?5ZU%&xtL}oH;YiAA)d*^Ndw2T$+Mjuzyzz@-SM`9df7LqTxLuIwC~S0092~+=qYv z@*ja;?Wt!T!{U?c*Z0YtGe)XbI&y-?B&G2$`JDM)(dIV9G`Sc#6?sI60de6kv+)Qb zUW~2|WjvJq3TA8`0+sWA3zRhY9a~ow)O~&StBkG2{*{TGiY~S8ep{V&Vo2l<6LWsu z^#p0-v*t2?3&aA1)ozu|%efSR=XnpX$lvTeRdKlvM!@|pM5p2w3u-6 zU>}t2xiYLS+{|%C65AzX+23Mtlq?BS&YdYcYsVjoiE&rT>;Necn6l^K)T^lmE`5u{ zm1i+-a-gc;Z&v-{;8r)z6NYfBUv+=_L}ef}qa9FX01)+Aaf+;xj(mL6|JUzGJR1|fnanb%?BPPIp>SCjP|8qE5qJ{=n5ZGw?81z3(k;pzH%1CtlX50{E7h)$h{qGKfzC`e2o`*IqA#tjA z`Fz&^%$b9F*N`)U-#6>a)Z`55`$Dd0cfcs0$d13^ONrdCu9xcv_=n#WQo8stcz3jP9|2EvdI-RhJM3%Q%oM&!OlShM|0 z?gz?wHZSnm45njLtsz8PVT1S&jAlbKg5kVam$p16=EK@Sj4EP0OtH zmJDmdc^v)x>56Qg_wmYHz6h)>kl_h$>0@J!ypv%APmjZTAQVLy6Fu50RGY&JAVNhx zrF_qG6`x9MkT;1SFWo$)l{M$;3qUDn9JwE}z zRl#E_bDRJFii61kPgBybIgp8dNW!Cc1b*^YYk-#oWLJvtM_v^hQx~9?8LD4VFFxBF z3MlrsSC%f9Oupn*ctPL0U1fwfX?`tRhPD{PSLFPQOmIt$mDy0SgpNVvHS+f#Do>h1Gn?LZU9(KaN>Q_=Y*_T zvtD7%_u^^+{g`0VGzg(VZrpVQ6Ub5M=tI_p7T93R8@3Zulu3|#{iNcu!oiHxZ4Rf*( zfmiN$$ru(*_Zqn=`Gq#OuHRTSwp7uH_SokR&|)RuW5yo=Z|_4?qU-JU+tpt>!B&Is z@N(=SG;bpVc;AO@zbmMM zScqq1)b-ZQIrs={oD}|?6y{$HNB1U0^LsBh8JI&3!GBZxOXI<}&5-$lgkAaYqhOTb z?2vEnZ$-kk;*M_17(upJF3%+iH*s0-r{vttXVB2OUwI1s^+G(Ft(U8gYFXC}#P&E^ z>T@C^tS`Z7{6HT4_nF~n>JlZtk5&qDBl6r|^kzQYe`wq!C)n@$c>WOPA61NDFj<<6 zGW71NMMhwAl!U-yqrq2xrSFqRCI8acw7?}3j;ynxo*-b7Co;g5r%^j=H@9({PXXBf z@r>U>>N;E)81wx`B4f%{PB~MHka_);%kBCb(d|Jy5!MqJ%2p`t&@L)4$T2j&-WHvG zv3(uyA_gwqNu(k?jQTtv3dgPKRZoH8prxe7>pQBW5L&dpumS&5Ld2?(sCpJjvc4L5 zEnh&?91WVm)ZdTj=fjJ$pPDdgAttLXuke+?KdKxu*;kTC(r!tQk6;gxj4h%FdHAt(^M3YvYj(!tOeN)+Hvj6+< zzyJRG?^lZfWuR#t!tUKP&(?%3v&Zd$R2YN>lB(Lq`OInY48%4%yTv2 zYe1{G`3)(PDEio5Y@-I5tUf`c%%OCJMtSW56g3iEg%3`$7XSJJHyA z<|7&N)5Xrlgv~%BO24eFd;Hd;uiK%D`EdK|quUeRZDqbh9l)%j%J#0lfrZumvA<_w zu&=AVvdChf6}eqh(bUz`(`Ue*p01{fBAcTgKyDYLs_I+YyJEk+rM@avU~>fB$n)HS zM7pfJydu`i%gfS<{PF94kZDv$t>06sAkheDzu40NJ$5CMW%n^Lls?8^p^QGWURbKu3ZduZQZ((s2? zzE`}<{;Zt7<$C|9R8A~DJ~@%x>TfP zF>TX8)@v|t)q4GjRt<}5s6hLHwRel7>V@&r-O|Av(yh;Q1A{E>Ir>p+%dHD|=l+lT zpr(Dg&>#Nu=!)6bCLr-ZS%|;h)Ij$+e@r8_{qO19QvDe=&1tmpY*0lcA^Cc-#{9fQ z<~$*<&P$Q<_jy#<$40PMofM7aQ}C=jphI`4kLg}Z7CIN#26D{-4v-_CA-LiE@(%{y!BzsU%gG`Q?sjLUf%qFSl0y)2#ae*+EI>s|i`d^V$Dn)qmzqRq6VJRY|{4ujsIU%#bnqU6MR&-1I_43=|5(6Jr;Jvert) zE?S|Tmn}Tv<-??sxV5@9t}3D=>YZ0JrQe$CO~|EY=Lj9RM&4svQHPQL6%pV5fPFiH zfXDx;l@~et{*{U*#c#Dvzu)|znDO7$#CRx)Z&yp-}SrD{&|(MQtfUz~n35@RLfUy=aqrhCX0M}J_r5QsK~NmRCR|Nm&L z41UdsLjWxSUlL41r^0K&nCCK>fdR-!MYjFg(z9_mF^C|#ZQw?`)f6uVzF^`bRnVY& zo}@M06J&_+>w9@jpaO4snmU;0t-(zYW1qVBHtuD!d?%?AtN7Plp><-1Y8Rqb20ZaP zTCgn*-Sri4Q8Xn>=gNaWQ57%!D35UkA@ksOlPB*Dvw}t02ENAqw|kFhn%ZyyW%+t{ zNdM!uqEM^;2}f+tECHbwLmH*!nZVrb$-az%t50Y2pg(HqhvY-^-lb}>^6l{$jOI6} zo_kBzj%8aX|6H5M0Y<)7pzz_wLkIpRm!;PzY)9+24wk2&TT{w--phDGDCOz{cN_ca zpnm7`$oDy=HX%0i-`769*0M6(e5j-?(?24%)<)&46y0e&6@HCDZAm9W6Ib#Y#BF6- z=30crHGg+RRTe%VBC>T00OV6F+gQDAK38Ne3N9bm|62tPccBJi)5{B z4zc^Db72XiBd}v$CF|yU{Z=M|DZ%-(XarYNclODlb1Kz1_EKLy(NSLCN`eUl(rBCL zT*jx@wNvze0|TSqgE(QArOZU)_?qH(sj#TwzElLs9q)(0u!_P|R%Cy_0JFQxgGV>1 zz4?_uq<8_gM0`c*Hh|;UMz~vrg1gQXp{ufg`hM_qU;U>+zmvc5blCLSq@PrEBSGR# z&8=2Z4uXN`F3p73ueD1l{s{k$WipAvSh5W7ABe?4)t;r@V?y`bNB5FvBuE|0VRTb< zM1Hn^?DSsJY+sX@T5xW=#>T9VEV|?<(=6|ge$X6Sb05!LFdjDcoq*gM(Zq=t;_)Le&jyt(&9jzR73noru`a# zN*<`KwGa^gZU3-)MSLF0aFag#f0<>E(bYTeHmtdbns#|I)-$)mJ`q9ctQ8g0=ET?| zdO}eZ*b_p>ygRTtR^5Ggdam=Zb5wmd{}np+Jn1d_=M`~P=M67jj})fH4ztb5yQqQW z^C|C&^LHAK-u+ooIK)yM)QM?t;|<{P;;{`p=BclzAN#JzL4jCwXkQB1Dy{=^KR`=~ zTrr)y7eiYBzSNs_DvO=4A6#EgGS-zY%Vi)N*Yb`U;6o}KR}dq{r9pT5wqZ@3NOE8- z9-(}D|Nc5732CSYQbL)!gPQ#RbD8BhK3dl{sUuPvei0tkvnJBxDEAYTesU8H$)g(Plra{VH(v3u^CO1~(+ zU0O7#)jaS4{NcwA+LuSm&VBcX2#Im3xg)W}ySNw%->orn1taZ&+d)}8gJTqA!u|5P z{yv?zol_3|(1(%M(EVU=cp?L`{Pi|ixk{U)*guFML3P!OSlz;zGA#T+E@8@cgQ_mv1o7RSU=Zo_82F?&&2r;WE z@wk}JHYEZ9nYUc(Vv~iTCa3u8e4q(yq<29VoNbKk|`mq%I6u)My=gPIDuUb&lzf4`MEA9^g8u z)vp8|$$HE9m_BTV?lOosIGa4jud=jIbw)O2eCMfyw2*S8?hjWw^nqws$O*M$3I1)x zR0PWFb3$ySOcGTe1dz%N0l;RPc`x%05FtT^f^j{YCP}*Q=lvp4$ZXrTZQHhO+w%wJn3c8j%+5C3UAFD&%8dBl_qi9D5g8fry}6Ev z2_Q~)5^N$!IU`BPh1O|=BxQ#*C5*}`lluC515$lxc-vNC)IgW=K|=z7o%cWFpndn= zX}f{`!VK02_kU+Q5a3m37J;c} zTzbxteE{GNf?yLt5X=Bzc-mio^Up0nunMCgp*ZJ;%MJvPM3QK)BryP(_v@ei4UvHr z6+sbCifQaOkL6-;5fL8$W($zZ_;CZp305C;~$hhRquZr-r)jjd1z z31%ZK{-(`P#|Um_Sivn@p$-vz46uqT>QG0B1w9znfS9A8PB2LaHdzA|_)yjXVR*l{ zkcu3@vEf7bxH0nkh`q?8FmoO_Ucui*>_a~P?qQrlZ9@+D7%MTpSnztpylXrt5!-k8_QPB?YL8Kx_On8WD zgT+111d(Op$^$&KLAN5+@?>f7F4~wFi(8TL8+szgVmcMDTp5l&k6~=rA{Dt}!gb^r zSWY<)M7D|Z2P0cEodj6E42PV>&>DFmQpgt)E-|#sSUU@uKed+F680H@<;-x{p|nuH4!_mn85rx>wz;0mPi2ZkL#k6;sznu?cXh!T0S>{w6 zL^gvR05NY64l*<+_L>On$rjx9!US;l;LX6@z}yi#2XHh)F@Oo+l)h%fq$v}DNmF2> zfs^_t0)3N-W<9-N?uedVv{)-J0W5mh#29QM5R5h&KuiRM=0Zvnf#lF=K#WlCgc#9c zS;qvh(P$!_a8JwyhI^ZJV2k+B6Z^64?w|1?5gyo6y{}923CRZfYVe1#?F% z7h2SUiNO3;T#JUOyovSs@@C1GtwipycA=*x5{BpIZ_#GCMuV8XK=x;qCNy{d7?wA~ zC+=vjls;ci&zW=6$H~4^K%v{p}Ab?U%C6Z4p%eC<3ExqU$XR<}LLF67A$Sr20DR_pJ3yeBa~ z^sw{V0FI5;UpwXsScYuhbqGQ`YQ25;6p6W^+tgL&;Ml;>S3CGpSZ>VrTn0m1$y$HU z&65)I!c?oREz};c=nLCliriqQX->4uivHTgd${GqeAlf*!P^B|jkU|*IdNP(&6C>4 zqOW$)Nw9nvjy^&`?E|gotDV{JmJ9Q~vuhy<`^C4XIUDt|j4o6rK^e8_(=YqC zuaR6TRVf@tUFHB079o4MBIh{M~4>WwnGgesQH*3?w(RA%hCZ*7)b!aNV=yOQ%o_Y=Lt0Sl*(9^jfRnC210Om$=y>*o|3z} zAR&vAdrB#mWoaB0fJSw9xw|Am$fzK>rx-~R#7IFSAwdu_EI|SRfB*yl0w8oX09H^q zAjl2?0I)v*odGJ40FVGaF&2qJq9Gv`>V>2r0|c`GX8h>CX8eHcOy>S0@<;M3<_6UM z7yCEpug5NZL!H_0>Hg_HasQGxR`rY&Z{geOy?N92Z z{lER^um|$*?*G63*njwc(R?NT)Bei*3jVzR>FWUDb^gKhtL4A=kE_1p-%Fo2`!8M} z(0AjuCiS;G{?*^1tB-uY%=)SRx&D)pK4u@>f6@KPe3}2j_har$>HqzH;UCR^ssFD0 z7h+VLO4o@_Yt>>AeaZKUxqyvxWCAjKB>qjQ30UA)#w z&=RmdwlT`7a8J8Yae=7*c8XL|{@%wA8uvCqfsNX^?UZsS>wX}QD{K}ad4y~iO*p%4 z_cS{u7Ek%?WV6em2(U9#d8(&JDirb^u~7wK4+xP$iiI6IlD|a&S)6o=kG;59N|>K1 zn(0mUqbG3YIY7dQd+*4~)`!S9m7H6HP6YcKHhBc#b%1L}VIisp%;TckEkcu0>lo@u995$<*Em;XNodjTiCdC%R+TX|_ZR#|1`RR|`^@Teh zl#w@8fI1FTx2Dy+{blUT{`^kY*V-AZUd?ZZqCS4gW(kY5?retkLbF=>p=59Nl|=sf zo1Pc|{{N4>5nt#627ylGF`3n>X%`w%bw-Y~zWM_{Si$dc82|=YhISal{N7OY?O`C4 zD|qb}6nLWJ`hUyL+E>-;ricg9J@ZNYP(x(Sct&OI$Y!QWr*=^VN;G3#i>^1n4e#Je zOVhbFbLpXVu*16enDM+ic;97@R~u&kh__kgP#!R`*rQEnA+_dLkNP~L`0alC|J;c; zeiK=s8;BsLE)KbG3BD&Br@(Ha@SBT&$?xX`=$;eeel=|R_dIr6-Ro?=HEjnsJ_b`1 zK6Yg^-6;^2aW!xeTK)A~3Rm|L^FCHB_I>jIju7ZGo&N_1*QHkxH2!!%@o4iZ?vntS;&zJdPe1dH#04YD93A44o-MpfD zP{rn_aq>U%RDvC2+bp;xPlsOzauIi3*Lf42`jVKKZCRuKdYhi>FDuL2l=v{$BCN#Q6796s%r-AG$Q^t(3c@ zD?w0UhYr11@feiyl9kY_@H8~|xlmO<8PfQmj1!$@WieW@VxR@Psxfe-v9WCi1+f>F4VL?0O~K7T?m4-u|pSkBpUJZZe*16_wAp zSYZ@;k`3;W3UHKUWc8QeI}0jH5Ly=cGWQPw(Kr2fm=-5L(d`lcXofy8tJY3@Tuadz zYWXR{mW7XT!RF#RVCe%}=tM*O6!AD3^(!8un~opNI%Uko7$5t@<8+?; zTxDys(MyyGsUjtSu9$+|_-t!U3fVb1dkK?l`17<+jfl=hrBHnDSV>^R1=TnQeyqbW z>ov#l%!1|S!1>8UUxIdhQq`_klcHVx0{?#>K3#$4GlXncwldt!g17TcvKq-jo_996 z>oA=tH9CqRl6Yw?Uc`am!V?lHJbizOJaVaScf1UP5e7Dbgabq=b!B~T&_F6?ooU>w%x0A zH~&MHJ=q`fCH{U<7MDXE4SD32cDZA)WJeWkllJ`UspWaS#eDe^kg^oU_A14UE9zG-a^g{xaXf$})Wik>gT zl#dkzGr(;h0JZDuFn(+k8wNq?PZ5grQ<+sM?wBGt@JnH6v0#or-5wBQWKU~(S_> zkE!tc*ZJ1Y&*p(xX84POb3cClRMd!^qJ#CAZfIepEj-<`VURS_yCz0(?*Ixcj4 z-!zV1_QZhpm=0<;*(nm+F>T=)o?ep@CK5I%g^VAA+RB25ab?7)A~z~egru=I1S|@v zH7tXV!0wmGS^qj#e+MY;C5eUjEAp$Y?LDkS^QPZ}8WN85?r$u<-Epi;yZ1|J2J`se z$D6DpH~2F=eI0B&=UFAUnJvZAmClJlK)sutJ?M>xpZiWV&0=G4MZP+x+p>EX=HbCz zxls%Mw?*u^;LbHWIWCyq+yi)`GmFn9J112CZda_u@YIP%i;srFg_paU02Ifij*7}l z&CF-(3|>*a|+vbNR`^RP=9G?ymEJ0Z~)d&c*UE$UMepZ zcITr{0WqhxkjUnM15js_gW=e3Uh|y6ZReaXHIz-=p`x5VvB&rH9y>Amv@^WmXFEw) zQXYrk3feir=a{jMQ+wDIkkFnZ$k{sJakHn*?u za%4b!00ev8NVLM1TY=cl?KB&55BY_MU-sg?c>=Dbz_W{(Z~c?HJi*XpYL)C6Bd8WH zt+v-#0&o~@t4qESi*)+eW%@VD0|o^yF)n0hME$UtXF$*Lvh}7sso{`|pn*JDIy5^Fm3s$5*zEE=?u5<=l8FJc3r%+H} zdfoNl2J0^~!-*mOL5o-x32|e0Im*E!yY7F7E5N)W3>+v_LBydlEx?4$RL5f2oYRD# zaR0wv(-p~wO0eLDl3K=%`{5+0Gd$ktO=W)gWlGZJ0`K z$_RNA=ckrfa;H0KA~dR^p�(p-{x$&=IACIfoAR!za)F-^da-t3#0Dycnp zwO~NVXwXCl;jE<}>%@xz|=8fIJAB?>+E{7)|4l${4ngA3G|=r z2Dyv;VVWSgZx9Wj>qUjleGl3Ei9K4>h!(lPS%8VOG>Xu0%6VDz^O=bjJmuP7>DeUv zrbI}MlHB^^d?{zv6d=@_ZD2lg1&G7UjnVN{1}9WkaM3H~btX0GtSzB+tZ^qRgWo4m z!GmimlG$=wgXCnr6j@m<1gAL46#T~5Bnm=2{^@>|t&`9mkEPddj zAvG~@Tv~TAm2i%VW}R-g(Z0)z-Y|szHr@rk>4MAyG*Ma*7Yh#H7(!-5>DZ@8r;_dx z{prSe<>~099F8vsYd2xff7uAS%7{S)f(|@me3t2$iy&NEc7OUEchp@9A|X;;IA>8!oX+y(BKJ$EzV* znR$z;!L$s7uy@{OT~nG#B!NRraT8(X##Ho!0r_o@gg0CA-9H^;-uE&?$2$nHv_00o z%cbuUc-tCx$Uh&EZ4Nf4Zgqv)Y6>usG3>GeQnxx_Z6+PcbX-+ysbt1hQ`K1LDpOE? zrAhIZhSN9yVIAOa22gn577tbc&i3|3V8NWy&!tw##`}9*x}gtI^h1DzZRA>UuaJG) zaZ7j)dq!O}{?#8Y7~7i6fHh4{`pL?>-18|p!S75Y#^DM>-S3)vuZG+Q7l@ek zQP~#cBpWgg#mApc_sPYjpw8odQuRokmTkzcNl`^CcKB7e&;zViV;{Y{o^Y$%7i0m# z62%#1Lq!RC?}lK>%mp}T!3Xv;L*0v*>USLm``N%>w>@fwC+#T&Tx2bN4w(20JB}oU zuSa6v^kXi0xPs?pbaOHnyiqq6By1EZY9OZ^^QA>{q-Hsd&m`pbQ%8121aWG-F5xf zlZ%;B{;C>X19|`^_?dVyCq>n+41w7|!tUS!{9rHlbhX=SZO5CQ^;!Du_E7*`GiR^Q w)2!4MKjfSAeNo!9>IaV6aUZ*?W>} zs4%E?srLW`CJh0GCIK@hTkrW7A15Iu%N&?Q^$0+!{Tv&|t^Y@u%!L zglTg&?Q5q#ijZ;&HBQ?FNPp;k3J5!&{^+SGq?AX~SiOM9jJMRpyP?RCr@z38AQyy&WRMaC;n4una$~nJKSp?q|s8F00c9?Q! zY_ovvjTFm+DeQM^LXJ#v0}6HRt3R1%5PT*}W!k8BEM;Jrj8dIceFo2fhzTqaB3KKk zGlCLI)gU25(#u6ch6GeB1k@eHq7l{EHXv0n6xE#ws#ri}08kkCf8hUt{|Ejb`2YW* zvg}0nSSX1m=76s?sZhRY$K=3dpJ+y*eDULGnL2}4>4nvW^7_<~wIM_5fjvwt4h1|g z)g0Z6ZFq9j<~9~b8((~TN{Z?ZQfw|is&Xp~AC61sj;xItKyCHdI|tCMC_LbXF>~vR z=w6V3^H=W4CbAgR4#xw}ETTwu2guW~=Crl@SMXv85jQ=%y!s^?m4PI0My7MWICO;- z175jm%&PcPWh8QdOU(#8bp4!N7ET-+)N}N2zk2)8ch|4Q&lPFNQgT-thu053`r*h3 z_8dI@G;`zn;lH$zX3RzIk`E8~`J=BBdR}qD%n@vVG1834)!pS1Y?zVkJGtsa(sB~y zNfMYKsOJb%5J(0ivK8d+l2D2y&5X!cg3BG!AJ}910|_${nF}sC1QF^nLIhzXk-Y#x z0)&1iK!O;Og0Ky!;`b~v%b$`S4E&fB)1NB4v@8wr( z&+NX4e^&o)ecb=)dd~C!{(1e6t?&9j{l8%U*k4)?`(L3;Qjw z#w7FS+U(94MaJKS!J9O8^$)36_J8;thW#2$y9i{bB{?M{QS_inZIJ!jwqAbfXYVd$ zQ5fC$6Nc9hFi8m^;oI-%C#BS|c8vy+@{jx6hFcf^_;2VRgkoN(0h!_VSGmgNPRsxI z8$rTo0LaYq-H5i&gtj81=&xU?H-Y2==G@uQV7E`@+2E9XQW@{&j`?EOktk|Ho{HU>ZqDzvgjwBmdex z&uZNd2C1h{{}2k6Ys9$*nFP3;K%u!MhW`uZy7Sn`1M1zs@Es&;z*Z>Gsh@-3Fe6pE zQD2@cqF((NrRevgvLsvM_8;;iNyJ5nyPyy?e!kvKjGj`6diRFBEe49Oa7wwkJFV7Z z$YT&DWloYu-H?3<0BKn9L&JYDT-SK~*6c5pi18P26$JESKRYj{T7Zk6KiRJcbvOO*{P56Q6s8msbeI3>|j>K9}Q9UBeq*inXKemCm`-<5|-$ZyN4u$(3 z&HcvqehFD%5Yrmykg-^d`=BSa8(i=>ZoC77^mWY{evp(km@aHqhUECBz76YiR+VYK zY_avFC~V3$=`6C4JhfHAQ@DZtUOwH`L;oYX6zK0-uI^?hS$ALfq}A7evR;ohJHij} zHSZdW?EKv9U1s4oD*<(0oQ*;MaQ6@cvGL zuHCPgm_NhVsgp^sfr*ia^Db}swo1?O(_Q2)y+S$CBm+g=9wCOUPbz(x)_GbaKa@A7 zuI&!ynLiZRT#V%_y_-D`0Z5lT*auoe{(U5NylTzFSJW()W-#F6*&A`LNO1bV#Y;QJ zSbLBnp|B^dtK|KIWC|No>JjWBWE@n7O)x{&^E(WMeMvp57#qA8m* zeTow*U@_86B#Fm*rxyYu5PRWaWHx8y> z*qmHEp(AMDl0v)ij(AY8fnH=~ZwwjVAbu*m5;xPfidh@ov6d8g zfJsi&!QyK53Es%sC39ts;54V68koALD4b|%tNHW0bIkZAJKa=W&FomJSEDT>W1xIX z1x%Z>AvNIsSPLcn3RTcHXb@KB?cuM)=x6fcIx>&(GxqZ8w3p#jJ(GVgc*`c0HG}dv zIop&Qim!K1NFwic%07KcjWgHBPUkq7f~lj;TPqVGTiT#cUeim>;nY`>h@a*S{qQex zQ`z62WK|Mj)Y{tfF{;T4P;c8$Q|KU?Joh zIkA^z%X7z|r>4aTh@|StTi!-r1D!g=zb#3d#{{&K3CqE$Iz-UH<%37c zRfkO`&uM%#AD3PHv`g5t0e^O%nVL0d{Xlx^EjEC3#skF@`zl-7PF^0oxW)1!C!JxR zWvuAHH?)61FKA1QeT*_sY7;_Id#!GmV4n`MO{~sv}VLSK` zXRw=Y=Clz*00B(5y^K;gCZMAzjT5+c3IC=)l(9VIDdatpxj3y89WwI|bH&$!ZEvp` zPR!T@#!(|KfI-w?!&+7$N3F6>tD{YO4Qg$d_`nNEdfVCha9vaPn0jI0`)`@*72hq! zpU5ND^P*RoEkbD5o#az(-g=Y)L>HH>Oc%}$ zT3Rs_ih0;4+Lv4Y;@Iv(;fUbQ=i-G(#>vghec~*j(I#r|5mqFiJBpzi&hzEcD{u$< zRsm0BVYn=pT;0>R(itW|*D&;O%bOc7et9ACaH#J>z3A1A~6fdP>pmbM%xzm4>|;c_?B+%sl;Qs2{t!60$^u zH1t@9^6>;?!FuusnISi$f5CL&;z?EqJN$FBuWDA#D5`cy_UvCFIVvf{c?4N0teh;d zET$7aVbj08KTQS!x?Nd1Is8q8qFzs}a=!@nJ;7FSfCY^T@D-gpw`w<6e#X3+;O}1h z$%I!M)0bg|EKUA04Qjn@+x{Rj8vt6Wn!R|3A92z}^$KfF5(#CWr4y#~re1CN4i4w0 z#GsypBR{xA3Er7sgAi(|}1-W?s~n$7?K|9WL8kpVfw-;#b9 z+mn;=ep!162U5R>_t}fOt~tE?s#m( zO-S$7>Ay6*hHdZ)7_oU915WYYCIX;hFI-U2EWYX!pllONr@Q--2o~`!isi6vTPLJ4@(|o=%NHYjo0_S&q*UQIROw@*N-By@PaQ&;YxFZ0aR zX&}LeOEz);#m~Hwm^VAY8DK}b$F4bo{jMN?d!lxKPhNklzr^Cd`0f4oJr^z=I|l`* zm8AHm*fPV`0=lF3Pnnp}&J0N1X@}-D94YvmUabFrLGSnTz7Mu^21F#O5tN#CuY9Vh zUZBH=ez%h*wkf0hBtXJh1SN3d+IF{gzT7lp)j}n?03lt;XSQRAh7qd&v;RwTYDuQ# zbI2*r<>?x-G0@hM{;%{VBD7nLKt~D`T~-HAt5;h%i0_=Ifs=yHma5dhJ+QMG?Ux(a z|E?1CMy1!~oA`FP!k~iG=t&5#>bVdz=peT8HMB6Y)#7PpETtNryT^+Rv3vpJaF^zP z{H}0-LyV9Fu21ID%wO9f1IKlFr1p4c{o-?03vyB-tr5duk^&L$;m_|f$vs`^Sl{j2 z95}oY{LlY+=ZS%J+tZoXCd0*sSU7w^gjovXn+g7uyra5{cU49@yHf#Z^Jl-$9cIfo z+AJuxH$VLb=#+uBbVmUjnx zxb1pZ@-O9=AIk4@S)m6fJ2?{HrNYwwnL3a45muuNjr;6$O`bGEM0T4A2_S$t=86*- zcO+0mywg*j#A4mU}enR_!cGmIYQ;qwfchWtFEXL)AK%*;=j znYne+hS4EMy3S)C*mZ1KI>!+)0V@9!N6H$Y}~MJ{rYuf zz^KljIWvFi-?#?V@LPR&c6Nn{!=XM z>}-h$S76;$H{E{Y%@^zlmOl^efBwa%UU+jJD9UVukQ3ti_kH-?H*RC0?M1W%FCvMB zM_+v6fk$6X2sx)-p~B3&Kl{nscK}pNLM*qjtpaf9>AU{-iPKQZR8yCg!TY}Qg*(;) z)gdvCcB%kppZc$VdvsK@)3l1{&DG!d_6OHOS`y=ITLEVu`unSKA2E%JD*DVX{LJ}K z9l>hMRDqxQh0lnpGHpVYneX}eA3Pt|2v%=q;rt)``R|#bDyB)OXY&vI_@|*}h}G?^ z@aZ4_!7cQPX`!fW_?{oT1NTwHs#l5L-0`E|y@48<3Q^HFf8=Idi zpJYD%1MkII!~|7I^WGo)IF=?{>ACnjJ_WUi39C}!Q{QnheVJqeKKqq5^o5CBde(g9 zvw$X6^jz_^E2$wSw4!q5*RG(C2_^XO$HBn_55vbl44OnTTRwRaePP0vo{K)U1#99& z<>rq7V&V(<&@I%MFoN5zrY}sz=(*-L&}1QQ*a%`u25h{cFj===17eB_uGuzG&byQ< zrm8BJZl4r_E$3k|Wo6FW0-6M7>qac5uFQsQcmkLWGfeH74S3Z_rJ!jgN++!@i=HW8 zkyjI(oPH-+-N#Qc^-mpNO`bc6r=2-<%&Wy5K1vfFJB(L_IkpS6fY^NmuL8qsgj>MD zn~BHH9WM~32_3vd=W&B)k7F9q%stJx+b_L_X-4zr^LVUMCmyCTA3sWtkvsmME?Xiy z?xOSfB=_$oY06~J-HcCq&)qcW{j;uP;?Dm}=hkq?zh&n!;m((-G-u_t|6x399Q;>A zgNpxoJNj{u|MFDH7Rhq@FCAl0dE|ddnl!oh9{Lq?@JDoR6L;C941IK`ISfdE$4S zE0AUQ8+2|Ncl_q5QkSp#AODp~(^mfP&%Au@@|TBQwoP`UU+V{6u8|)6ZA{~uKmQ*M zmrMTDU8S~8Eqi{^v0Ug&5Upcm#y7Z1(RbgZAG8jB$eRwCspQ)>5;U)oGZ&E5aeR*K z8Yt`Y0$G))Yd(Y3KH}tA4`-_QmNke5hU_|nq=xtyjwW(_o?itz>B>WM&^63bNdQ)k@-IgDHW*RW$Xo9#RzrTrCn7L2H{9Amq|qNg@#eZY=|P zCoI?2s+L)zsM%WX(NbVEY^`C>lFjIBYmJ6@DKJ0ZT4&F&WHW!dwa%QzOG!?jY_2(S zDcEzZbz*2Q!43|z))9yOP9X1Xt%DXzwY(3tl-TR=Qb_MbZYRrooh;dYYmS!U_as1(=YVB?Q_A|tNu5Ut&_q3jbfDM zoFxT^uEuH`nX3*sB%K?GuHUkweYReBwnHqh3P)~`+s3+Tj!rDA1e)8vuBv5J*IsxC zkd^~b(aGzArj08{>cnzOuy04C+C`}gb|Yz-1avxeWzev3NzcHbz_&4W@QCr$z3~w=8Ua- z`;vfG1~BP8CyLb=F7t1am~ph_#|O%$khSJ9%Vtcn)YmpgQxF?xM^_Vb+5fnpB^W0I`f%X8gb9#X{Q-yJG0{Z56aWeI&zPxnf5pdJA38bM`cYnS#x)% z`n1tFf$i)W-hGm(f9mde^=X@NcV_lFb=P`4&CI&H=IArijGwdCk&X@uQ$5xmj!~^? z#$ROCI)V-~t%L%GS#wo@U27ddR`4`3)WoB{R-4snfNrfee|kI8^bu#yDgYqOwas9# zmcb`3!kRJ`Cr=_tq)8aMt{aGtUZsqwVlj6DgCGre>AEt&x8H_in!x@uwgExIh|-mA zjdaC(29~CTVSaaF7HPbql&*9Uo8P@f)>LqCXclr}peS7_1BQ28u9PO8Eq1@`l3q9o zkfKCaO2?T?ZyA6loW<#9_c^O=m<&h}CA!ineAD@=(gbq`vyT|tiJ6#^B1$P;;qax` z55k&Q?wEh#87niLo*+n4L@65J(Nz~=Ya%7^(miLb(E>A3B@|Jjl;FU&D>o|9#7PJH z?|ago!o;WC^h=|T7PVBg(DAB}72cyUS zb(f>Bwbr!F1eTCO5fpj<{PqhY5>143p?~5ZA5H40);=@M#MYvrB6gqHbU_!GSY??i z%s=>-ciA4*zOOZHds0a(kWewZ4h(k8h(ua7HX)Au&mY~H8KY6(_cb$_&fA@QjIW-*heP3%$d!m5^AdnT}`12qA^c@!g3DOwZ5WwE2?)-yU z!)Vx#Mtxt?FzFTwK!77sy7)sMzUd->w4^bxtpM2j!b1pjgyk zGKwWGeb4)^zjy{9Es&PU1}gwg?|J#L$KJB7ett9@4M%-nGtIQr0>Fl@8-yh`-+1ed zS6r}(MeSvgSoFmH*_WPu@i?}!AB~2?;i&IxrkNg~cQ9Som98tcq)k^|eeER|Zl77t za-TVUc;DNvzVXJ%w52+#weN?+;i#{f#!Oc&z?81*N>^e~ltRS%ZI@lR{rs()HmqG! zx*}ZrI-EZ}ckJMiy>A^oofwDfC~IH)z8{VHKGT@#E5I(Ll&+MnMCl>~AV7+>Gi%mF zkU1QlKASdR0B80!YhP<$Ywi0?W2Ux45oPfxv9QolWzJPD^weBfvo4SONxP35106sAmh(e+vAs0GboFD@PvNs)jNPvarhW}0YliZEg{Gazv z+JDIpoojRVPr<*C|BTq<`6ga{5q^8^!|0cxe=rZ!zxH3%f5ZO0cQ*Z<^$Yt2{|Ek0 zyT|*F+CO@K;(owBKtGg!S^xj-Z~rga2m6nxKl9J=fBSuNKW_dLKWhJKeg^-Xe`^1? z`TyJj)8E!#>_3Y?uKrwqq3LJ#SGU>AzUO|6`nR^u&3FNN_jGOc zw)Nw`wr3yIKhgcee6IaN=ws>M{6677%)hPwx&HzC(f&u~&)6@b2kNRzBDQAP0*H73 zq%McOmRk{B3i47qRe=DA*$&odrbEJZ*pV9XXa&p@wlW~@Yfs>V{yiTtplMhgM*-Bz zsSnlq&pG;z0OUN%$~$3=g1UF+G*>+17eRbBf3=y79J}KR8owon@$1Z7MIrvvWWH)34nK2SD)GsrJ{l z1Cl#oVo3A8qY3e=aF)qzms~FG#2$LzT=gs&aVMOj>(%{y<&O0cG!nCiESl~x=^dF{ zKvj8F1K8Ng171wwM5Fh4KoQw`_c6#y$(5cAm7e}~nJ#A*fx+c9;y#&W!#VukR)ugk zKp3=+;Ut+IYn%m+r4d*<`L2h%aDnX5}^!5R|H;(34AoVWjRx(msBZvk;rCI*|~ zdOijqI@9Z{Vu!~jvHW{lBa$rnl4+!s_5sfK3bCGk-B%iDe&@-}+%fOKU|(9?V1 zHE8&@4z)Kx!RAvAs z!Wic9=o#(bg?kc-G68-m(jZ`^=XGUXb)}t(%&~sjFnV^sEX%hSy6UKC4iOhgV=BHV z2w`4g7Y=s#Vu2B_?#VQ|hP39@eArgfX>-0S+dd&^mx0*wp}>)x;c4RUgxz%;oNe?& z-7-lJ@Y^2^C;=qJsxx5|xF)*pTGhch2B&kxtn;f!7=gznk}I3}Dh}(CoMXgA5-p&kS202!l?!fT3t|HG*rIP~mS* z$Wjo}jq3}z$Qq!9yrtd3fM0N629ZM?LU$nv@Tv9b7I;D|;0H2dsA~g7Z7zp1| zB)XmrkMgF6OQr|R)HHD^TE{Y#j!~SR?b`Xt3Qs`B+x<hxexYeAjMUWdZ-*n9%(1)Wb(n2U<><7&9dwGJmrob)4%H? zlQ%z+L-^$dFhhH|@u$%97Qz?*Ynh2VG@q|?8vY&L74&fs&_b&3$x&Oyjl~LQDRRap zJU4U*R+(2Dd!G+lh8!V{pT_UJn+^1Qg6$` zqkNm(a#hWyc6SP+p5=C4HL8-m`pO`5o~`-LI?_h5CsH?F_%?nDodmz&pWR20WTpJE z?N|wSzLjMUK8E)a2tI}Lf;+;*M|h3Y(U#>)g1>zk9|Hd}oZAa2 zLYBWBoSW!Ts!RwXr^8h+U*@{9{zqS^iH)Op<;r`Uw~nc}<^$V~_i%$GFjaG?X1@E|M`h)nekvFKt`Dh-f>@|0-`Xoq)o` zx;JmzDfOV9qCx|EVpogEe0LK~tGS?5$$L_i6P$P6wIsCQaP_;d{{N=iV@+8LI}o#( zvo*Ejy=IIn{rdIQh1&q-{EuohpVOjJ^Q3lD*YTp37$^RRgn8ihpdu5{Ct%5-KO!VL zcNB6dUajXI9jkm-P|i3~GB-A(X`P1Oqqb$tcku)UJw0w3GeUijb__#QT4j%64z%EeB7S?jlWwx_7&+EEvB|6N=kV}DwnyAlX=?j`) zmU#!$*^@NIu#n_d7;WoJV@*Fbv9|yJO4;n|BNF2xy(54RyB>t~8lUOUW$&2%Nwi1y zx6JxW88>U2$#qhl^6KUbtmg9}D0o5vYDT7kWJthLGkpGnN4T>{St^_EU>4;DmLF9o zr|LqsA8_MoNLQ=}w?8u!ziSZ@PC#Y<#9uJFo-ozVo6D;<8j^1$c|qAE3ZTE5i~zmE z$BU5lw6l=EWsg^y^;8>r9qH{xfL|~PZYK#md$zZ0?o11gV<*WSW~cgy2GYGQir%wf zt4iW8D+;s*;RGrmd(-T<@2&j(Cb9xhV*l-x`TpK`xq|7p?5R%5*s!69?2c!cC*VY* z2DE^9pvOPLU!1e}wA8S8opcTJ3`NB>hY=JQnL~QFXR4K8A$BqJnoEB$wn-%u@E6Mh zCfMF4kusv3N!(aHC}4)Xs^xoOwXd%e^6pi5|DZo=Q25j+6HlJ^7FodH6y1bMROR^q zGu6)fopS`h%Sw<;ZH%TEPf+#81-#_v+@8nlR0jLcIDKQtLleOC)6yLZgC!D9X3GgS zohwU{v$jl=quD#Go^hB{`@Qw*a%`(^jyT~=q^bWgGzRj;|12J55HWdCWV}EB|K=%N z3Nq-qxJJ`>^|1MNN+q}zTB&ooE3j==AgK@^UW<^oSbeALa2peF)Th6{@sj0KyMNHZ zksk1+MXN2tv+22A%cQOGpS9)77(uP9mh+!5T5ERLvF@b}$+WvXM45Z?-kCa)fb~f1 znVbTD$Gx-0Zxc`0D@YgHakge6SL0H`-vN_x?AP0>iGH0_EE&=v83hMJgaKAI0jJXm zVxVz;X<$v6WW7}fxROO7vr#YLP;;lij5VrX{;>7kK6TtOH&6|Ar^xo>00%+u$C4@# z>!jOt6*3><171+WxoZnKDTzJtDRw+T030;yI}~uV@9fCnei^I*j>Bp&mzP2d=FPb_ zCM*l_+$LDR3B*a!A$g#>xsrZvw0lckxmMg>0aQd7tPyN=t{dgXb;Ie+T8{fZH=gdu zM7Rg9c(kg(Jg0?ARRRl=AONFKrvFj)lTY$KfT%6^6s`mk*ABGhsce*LsoD>K{z_M2 ziPpnu+lw22PfF!CoId^6n*G4H(Ix+#+N{C(da7t1BYMGEaE#PdpOLxsVD5riQXHp@OX;`S`8VnpM~)I920w~<3|mo0 zf8~Az`*?2?H&gZ&*K&bRkV@qzvMlRHXys8*Ze2+1c?5o!^+$&MHxB@4Ee5cke52R! zmn7AZtY6ST%ixgU5)%$%QcwHj7Es-Qu^kLAPwy%7pGBw_4Q9#da^W2$}axNHr03)_nw z5?yuNmXrI5HgS46)c5&}B)Tts49oU92>3xBLLy}FMUW=84DQbVq^;7_e7|(Sdz|&J z73N+M`rc2rt*oSWu#7S{*s~nH6HRHJS1SmzeXk|;CA)FI4bat3<%}nkB%;;?=F>B7ms9QSxv#@+69;@>QaR?REYX4&)=itG>rM{<{A79Rmk)`5ON#GL`*KX%}Ihk3w(RtM-WLt z?f&FLF}4N^yE!(pZ&Yj&Bc`~K0@4_}*0Om?wN|}4WJ>WL;G^H2*QpgEkGA~OET-Km zkwz|5{6dnz1U<2Pe9DNL>3g5FEIvp1jzP&2K#z~j%g6!7B;^zF+o95?fV{3mnB8*RMhCDNp>Am-3e@jNfMj?jHV$MWjk!DDKP zkAz$Y?Sr)!GUOX}qTQ5aMh|wq1uq}~joWyKl=b_LboM#wi{CMuz5x6BKlA-qy++cM01D3b7`uD z#l6M4pI;JCypO8JZ6?U&wNxR!{4oB_ zlV!x9+-&Qy6{%MQ{~yoZGkKiTSC`YS_j22~G;xUV855g2&C(zm^V!(wpcm@zn{%!g z4}JGo(sGZ1O~to-}le

    UmY2RIYtNPVDpE$%vda+HD#3m z&VuXJ{BK&Qe+rBa7eq}Q(bq|tn(RrJAk|ztj2(i{d>nmQnM?;HF2k&9sA6up5tmjl z7lySlzMbifH17-m-Lwa_F&e7nOH?ESi3#ckR3tsM+jsck3`oG!uMS}|eAwVXv>}qxwq?QY%QJ0}r@^;fhuUA9W z*BVl>TGo&N004@xSiwDUXUvp51sVmqO3m)=B55aPwf@0=e}cN+$-BdKxY`YrT_4)0 z_d10#i44Q*rFr8MC>*)v$EJvz``(pb{e&*6k+b zsMz%($|1+8hn8c2?P(l@;Rb&CsZeYoCI3?2!LqjbwPXW3z4G$Qfj=cT5Yb%vY0(AX oeb?AaKtwrnc|$|zzw9vfvn^aJJ!zd)XFXqqy0000001=f@-~a#s literal 0 HcmV?d00001 diff --git a/examples/llama.android/app/src/main/res/values/colors.xml b/examples/llama.android/app/src/main/res/values/colors.xml new file mode 100644 index 000000000..ca1931bca --- /dev/null +++ b/examples/llama.android/app/src/main/res/values/colors.xml @@ -0,0 +1,10 @@ + + + #FFBB86FC + #FF6200EE + #FF3700B3 + #FF03DAC5 + #FF018786 + #FF000000 + #FFFFFFFF + diff --git a/examples/llama.android/app/src/main/res/values/strings.xml b/examples/llama.android/app/src/main/res/values/strings.xml new file mode 100644 index 000000000..7a9d314e2 --- /dev/null +++ b/examples/llama.android/app/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + LlamaAndroid + diff --git a/examples/llama.android/app/src/main/res/values/themes.xml b/examples/llama.android/app/src/main/res/values/themes.xml new file mode 100644 index 000000000..8a24fda56 --- /dev/null +++ b/examples/llama.android/app/src/main/res/values/themes.xml @@ -0,0 +1,5 @@ + + + + + + + + + +

    +
    + + + + +)LITERAL"; +unsigned int index_html_len = sizeof(index_html); diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index e09b3c8c5..647abe116 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -1,1903 +1,4 @@ -unsigned char index_js[] = { - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x28, 0x29, - 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, - 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x53, 0x79, 0x6d, 0x62, 0x6f, - 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, 0x3b, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x29, 0x7b, 0x66, 0x2d, 0x2d, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, - 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6f, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x73, 0x2b, 0x2b, 0x3b, 0x77, 0x68, - 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x3d, - 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, - 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, 0x26, - 0x70, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, - 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, - 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x73, 0x3d, - 0x30, 0x3b, 0x66, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x66, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x28, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, - 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x6c, - 0x65, 0x74, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x30, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x28, 0x74, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, - 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, - 0x7d, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x66, 0x3d, 0x30, 0x2c, 0x73, 0x3d, - 0x30, 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, - 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, - 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, - 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, - 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, - 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, - 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, - 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, - 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, - 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, - 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x68, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, - 0x61, 0x6e, 0x64, 0x3d, 0x6e, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, - 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, - 0x74, 0x7d, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, - 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, - 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, - 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, - 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x77, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x5f, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, 0x7d, 0x66, 0x69, - 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x3d, 0x5f, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, - 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, - 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, - 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x68, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x63, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, - 0x74, 0x28, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, - 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x79, 0x29, 0x21, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, - 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, - 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, 0x61, 0x76, 0x65, - 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x21, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x73, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x6e, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, 0x66, 0x2b, 0x2b, - 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, - 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, - 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, - 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, - 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x2e, - 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x7c, 0x7c, 0x21, - 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, 0x6e, 0x2e, 0x53, - 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x64, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, - 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x53, - 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, 0x65, 0x3b, 0x6e, - 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, 0x69, 0x3d, 0x2d, - 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x73, 0x3d, 0x6e, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x6c, - 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x77, - 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, - 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, - 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, - 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, 0x2e, 0x55, 0x28, - 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x6e, - 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x74, - 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, 0x3b, 0x65, 0x2e, - 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x72, 0x29, - 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, - 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x68, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, - 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, - 0x34, 0x7d, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x29, 0x2e, 0x68, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, - 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, - 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, 0x3b, 0x69, 0x66, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, 0x3d, 0x6c, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, 0x74, 0x72, 0x79, - 0x7b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, - 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, - 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x7c, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x30, - 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x29, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x36, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, 0x3d, 0x74, 0x3b, - 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x33, - 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, - 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, 0x68, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x29, 0x7d, - 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x68, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, - 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, - 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x66, 0x6f, - 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, - 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, - 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x36, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, - 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, - 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x29, - 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, - 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, - 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, - 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x63, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, - 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, - 0x79, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x67, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x7b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x28, - 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, - 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, - 0x38, 0x3b, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, - 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x69, - 0x3d, 0x5f, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, - 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, - 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, - 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x67, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, - 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, - 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x76, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, - 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x62, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, - 0x7d, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, - 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, - 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, - 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, - 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, - 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, - 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, - 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, - 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, - 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, - 0x39, 0x3b, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x64, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, - 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x2e, - 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, - 0x7d, 0x3b, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, - 0x6f, 0x3b, 0x6f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x3b, 0x53, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x69, 0x66, - 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, - 0x29, 0x62, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x77, 0x28, 0x74, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, - 0x53, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x2e, 0x63, - 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, - 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, - 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x64, - 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, 0x61, 0x72, - 0x20, 0x78, 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, - 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, 0x2c, 0x54, 0x3d, 0x7b, 0x7d, 0x2c, - 0x56, 0x3d, 0x5b, 0x5d, 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, - 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, - 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, - 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, - 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, - 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, - 0x2f, 0x69, 0x2c, 0x46, 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, - 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, - 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, - 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x74, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, - 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, - 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, - 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, - 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, - 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, - 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, - 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, - 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, - 0x3a, 0x5f, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, - 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, - 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, - 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, - 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x69, 0x3f, 0x2b, 0x2b, 0x45, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, - 0x2d, 0x31, 0x2c, 0x5f, 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, - 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, 0x76, 0x6e, - 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, - 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, - 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x71, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, - 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, - 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, - 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x71, 0x28, 0x74, - 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, - 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, - 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, - 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, - 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, - 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, - 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x48, 0x2e, 0x70, 0x75, 0x73, - 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, - 0x2b, 0x2b, 0x7c, 0x7c, 0x50, 0x21, 0x3d, 0x3d, 0x43, 0x2e, 0x64, 0x65, - 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x50, 0x3d, 0x43, 0x2e, - 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, 0x7a, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, - 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, - 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, - 0x24, 0x29, 0x3b, 0x74, 0x3d, 0x48, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, - 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, - 0x6e, 0x3d, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, - 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x66, 0x3d, 0x5b, - 0x5d, 0x2c, 0x28, 0x72, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, - 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, - 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, - 0x31, 0x2c, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, - 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, 0x5f, 0x74, - 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, - 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x2e, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, - 0x65, 0x6e, 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, - 0x3f, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x71, 0x28, 0x69, 0x29, - 0x3a, 0x6f, 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x2c, 0x66, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, - 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, - 0x2c, 0x69, 0x74, 0x28, 0x75, 0x2c, 0x5f, 0x2c, 0x66, 0x29, 0x2c, 0x5f, - 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x42, 0x28, 0x5f, - 0x29, 0x29, 0x2c, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, - 0x6e, 0x26, 0x26, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x24, 0x29, - 0x29, 0x3b, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, - 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, - 0x5f, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, - 0x79, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, - 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x4b, - 0x28, 0x65, 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x66, 0x3d, 0x65, 0x2e, - 0x5f, 0x5f, 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, - 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, - 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, - 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, - 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x54, 0x3a, 0x76, - 0x5b, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x54, 0x2c, 0x61, - 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, - 0x61, 0x2c, 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, - 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, - 0x5f, 0x65, 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, - 0x72, 0x65, 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, - 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, - 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, - 0x2c, 0x6c, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, - 0x66, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, - 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, - 0x70, 0x29, 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, - 0x5f, 0x75, 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, - 0x61, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x66, 0x3d, 0x51, 0x28, 0x61, 0x2c, - 0x66, 0x2c, 0x74, 0x29, 0x3a, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x66, - 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, 0x26, 0x26, 0x28, 0x66, - 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, - 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, - 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, 0x3b, 0x65, 0x2e, 0x5f, - 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x64, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, - 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x3d, 0x6e, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x73, 0x3d, 0x65, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x6c, 0x3d, 0x73, 0x2c, 0x63, - 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, - 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x66, 0x3b, - 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, - 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, - 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, - 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, - 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, - 0x7c, 0x69, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, - 0x6f, 0x72, 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x4f, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x29, 0x3a, 0x46, 0x28, 0x69, - 0x29, 0x3f, 0x4f, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, 0x69, - 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, 0x69, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, - 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3f, - 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, - 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x74, - 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x59, 0x28, 0x69, - 0x2c, 0x65, 0x2c, 0x72, 0x3d, 0x5f, 0x2b, 0x63, 0x2c, 0x6c, 0x29, 0x2c, - 0x69, 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, - 0x6c, 0x2d, 0x2d, 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, - 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, - 0x31, 0x30, 0x37, 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, - 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, - 0x26, 0x63, 0x2d, 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, - 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, - 0x75, 0x21, 0x3d, 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, - 0x72, 0x2b, 0x31, 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, - 0x6c, 0x3e, 0x66, 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, - 0x3a, 0x63, 0x2d, 0x2d, 0x3a, 0x63, 0x3d, 0x75, 0x3c, 0x72, 0x26, 0x26, - 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, 0x3f, 0x75, 0x2d, 0x72, 0x3a, 0x30, - 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, 0x69, - 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, - 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, - 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, 0x79, - 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6f, 0x2e, - 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, 0x29, 0x29, - 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, - 0x65, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x2d, - 0x2d, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6c, 0x29, 0x66, 0x6f, 0x72, 0x28, - 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, - 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, - 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, - 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, - 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, - 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, - 0x69, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, - 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, - 0x5f, 0x26, 0x26, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, - 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, - 0x3d, 0x51, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, - 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x21, - 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, - 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, 0x5f, - 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x6e, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, - 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, - 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, - 0x7c, 0x7c, 0x28, 0x46, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, - 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x29, 0x7b, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, - 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, - 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, - 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, - 0x31, 0x2c, 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, - 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x3d, 0x66, 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, - 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, - 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x66, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, - 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x3f, 0x31, 0x3a, - 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, 0x3e, 0x3d, 0x30, - 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, 0x30, 0x29, 0x7b, - 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, 0x5d, 0x29, 0x26, - 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, - 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, - 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, - 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, - 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, 0x75, - 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, 0x31, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, - 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, - 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, - 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, - 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, - 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, - 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, - 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, - 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, - 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, - 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, - 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, - 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, - 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, - 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, - 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, - 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, - 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, - 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, - 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, - 0x69, 0x6e, 0x74, 0x65, 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, - 0x29, 0x24, 0x7c, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, - 0x2c, 0x22, 0x24, 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, - 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, - 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, - 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, - 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, - 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, - 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, - 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, - 0x2e, 0x75, 0x3d, 0x5f, 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, - 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2c, 0x74, - 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, - 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, - 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, - 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, - 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, - 0x69, 0x66, 0x28, 0x69, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, - 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, - 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, - 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, - 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, - 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, - 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, - 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, - 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, - 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, - 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, - 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, - 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, - 0x72, 0x6f, 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, - 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, - 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, - 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, - 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, - 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, - 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, - 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x74, 0x2e, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, - 0x74, 0x3c, 0x3d, 0x6e, 0x2e, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x74, 0x3d, 0x44, - 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x28, 0x43, 0x2e, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, - 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x43, 0x2e, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, - 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, - 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x63, 0x2c, - 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, - 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x77, 0x2c, - 0x78, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, - 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, - 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, - 0x3b, 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, - 0x28, 0x66, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, - 0x5f, 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, - 0x6c, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x6c, 0x28, - 0x6e, 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, - 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, - 0x28, 0x6c, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x6c, 0x2e, 0x5f, - 0x5f, 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x6c, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x6c, 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, - 0x3f, 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, - 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, - 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, - 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, - 0x20, 0x49, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, - 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, - 0x2c, 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, - 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, - 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, - 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, - 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, - 0x73, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, - 0x29, 0x29, 0x2c, 0x4d, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, - 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, - 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, - 0x73, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, - 0x2c, 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, - 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, - 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, - 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, - 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, - 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, - 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, - 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, - 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, - 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, - 0x26, 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, - 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, - 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, - 0x2c, 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, - 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, - 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, - 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, - 0x5f, 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, - 0x5f, 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, - 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, - 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, - 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, - 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, - 0x62, 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, - 0x3b, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, - 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, - 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, - 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, - 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, - 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, - 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, - 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, - 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, - 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, - 0x64, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, - 0x3d, 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, - 0x6b, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, - 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, - 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, - 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, - 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, - 0x6c, 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x29, 0x2c, 0x77, 0x3d, 0x30, 0x3b, 0x77, 0x3c, 0x63, 0x2e, 0x5f, 0x73, - 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x77, 0x2b, 0x2b, - 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x77, 0x5d, 0x29, 0x3b, 0x63, 0x2e, - 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x64, 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, - 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x6c, 0x3d, 0x63, 0x2e, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, - 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, - 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, - 0x26, 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, - 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, - 0x2c, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, - 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, - 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, - 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, - 0x64, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, - 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4a, 0x28, - 0x74, 0x2c, 0x46, 0x28, 0x78, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, - 0x6c, 0x26, 0x26, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, - 0x6a, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, - 0x65, 0x79, 0x3f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x6c, 0x29, 0x3f, 0x78, - 0x3a, 0x5b, 0x78, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, - 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x2c, - 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, - 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, - 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, - 0x2c, 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, - 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, - 0x6c, 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, - 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x66, 0x3f, 0x31, - 0x36, 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, - 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, - 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, - 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, - 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, - 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, - 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x6c, 0x3d, - 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x6c, - 0x28, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, - 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, - 0x5f, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, - 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, - 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, - 0x29, 0x3b, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x43, 0x2e, 0x5f, - 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, - 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, - 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, - 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, - 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x73, 0x2c, 0x6c, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, - 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2c, 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, - 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, - 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, - 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, - 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, - 0x69, 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, - 0x26, 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, - 0x74, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, - 0x26, 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, - 0x3d, 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, - 0x29, 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, - 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, - 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, - 0x28, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, - 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, - 0x2f, 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, - 0x69, 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, - 0x6c, 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, - 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, - 0x3d, 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x3d, 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, - 0x6f, 0x3d, 0x6f, 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, - 0x29, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, - 0x7c, 0x54, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, - 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, - 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, - 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, - 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, - 0x73, 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, - 0x69, 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, - 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, - 0x73, 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, - 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, - 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, - 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, - 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, - 0x2c, 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, - 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, - 0x29, 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, - 0x61, 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, - 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, - 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x6c, 0x3d, 0x61, 0x3a, 0x22, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, - 0x61, 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, - 0x3d, 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, - 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, - 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, - 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, - 0x61, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x6c, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x6c, 0x2e, - 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, - 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x6c, 0x2e, 0x5f, 0x5f, 0x68, - 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, - 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, - 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x6c, 0x2e, 0x5f, - 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, - 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, - 0x63, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, - 0x54, 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4a, 0x28, 0x74, 0x2c, - 0x46, 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, - 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, - 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, - 0x3d, 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, - 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x71, 0x28, - 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x2c, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, - 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, - 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, - 0x26, 0x26, 0x57, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, - 0x7c, 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, - 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, - 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, - 0x6d, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, - 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, - 0x2c, 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, - 0x29, 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x22, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, - 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, - 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, - 0x5d, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, - 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, - 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, - 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, - 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, - 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x43, 0x2e, 0x75, 0x6e, - 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x6e, 0x6d, - 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, - 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, - 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, - 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, - 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, - 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, - 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, - 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, - 0x5f, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, - 0x30, 0x3b, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, - 0x74, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, - 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, - 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x57, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, - 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, - 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x43, - 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, - 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x72, 0x3d, 0x5b, 0x5d, - 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x5f, 0x26, - 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x4c, - 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5b, 0x74, 0x5d, 0x29, - 0x2c, 0x69, 0x7c, 0x7c, 0x54, 0x2c, 0x54, 0x2c, 0x76, 0x6f, 0x69, 0x64, - 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, - 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, - 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x69, 0x3f, 0x6e, - 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, - 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, - 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x5f, 0x26, - 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x65, - 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x6f, 0x2c, - 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x73, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, - 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x74, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, - 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, 0x72, - 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, 0x29, - 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, - 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, 0x5d, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, - 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, - 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, - 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4f, 0x28, - 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x5f, 0x7c, 0x7c, - 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x72, - 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, 0x63, - 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x44, 0x2b, - 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, 0x75, - 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x28, - 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, - 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x7b, - 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, - 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x68, - 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, - 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, - 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x47, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, - 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x65, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, - 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, 0x69, - 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, 0x29, - 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, - 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, - 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, - 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, - 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, - 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x56, 0x2e, 0x73, 0x6c, 0x69, 0x63, - 0x65, 0x2c, 0x43, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, - 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, - 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, - 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, 0x79, - 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, 0x6e, - 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, - 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, - 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x69, 0x2e, - 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, 0x67, - 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, - 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, - 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6d, - 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, - 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, 0x3d, - 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x69, 0x7d, - 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x6e, - 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, 0x45, - 0x3d, 0x30, 0x2c, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, - 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, - 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, - 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x4d, 0x28, 0x7b, - 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, - 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, - 0x28, 0x74, 0x3d, 0x74, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, - 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x29, 0x2c, 0x74, 0x26, 0x26, 0x4d, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, - 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x6e, 0x29, 0x2c, 0x47, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, - 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, - 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, - 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, - 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x47, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x3d, 0x6a, 0x2c, 0x48, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, - 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, - 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, - 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, - 0x24, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, - 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, 0x7a, 0x2e, 0x5f, 0x5f, - 0x72, 0x3d, 0x30, 0x2c, 0x44, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, - 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, - 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, - 0x62, 0x2c, 0x6b, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, - 0x74, 0x3d, 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x77, - 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x78, 0x74, 0x3d, 0x43, - 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x43, 0x2e, 0x5f, - 0x5f, 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, - 0x6e, 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x65, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, - 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, - 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, - 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, - 0x67, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, - 0x74, 0x3d, 0x31, 0x2c, 0x55, 0x74, 0x28, 0x71, 0x74, 0x2c, 0x74, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, - 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, - 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, - 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, - 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x71, 0x74, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, - 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, - 0x5d, 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, - 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, - 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, - 0x5d, 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, - 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, - 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, - 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, - 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, - 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, - 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, - 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, - 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, - 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, - 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, - 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, - 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, - 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, - 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, - 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, - 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, - 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, - 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, - 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, - 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, - 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, - 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, - 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, - 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, - 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, - 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x43, 0x2e, - 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, - 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, - 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, - 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, - 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, - 0x29, 0x3b, 0x21, 0x43, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, - 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, - 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, - 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x50, - 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, - 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, - 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, - 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, - 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, - 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x49, 0x74, - 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, - 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, - 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, - 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, - 0x74, 0x3d, 0x38, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, - 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, - 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, - 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, - 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, - 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, - 0x29, 0x7b, 0x43, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x73, 0x65, - 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, - 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, - 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, - 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, - 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x5f, 0x29, - 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, - 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, - 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, - 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, - 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, - 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, - 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, - 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, - 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, - 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, - 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, - 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, - 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, - 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, - 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, - 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x57, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, - 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, - 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, - 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, - 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, - 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, - 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, - 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x43, 0x2e, 0x5f, - 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, - 0x7d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, - 0x6c, 0x6c, 0x2c, 0x62, 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, - 0x7d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, - 0x6b, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, - 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, - 0x64, 0x74, 0x3d, 0x3d, 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, - 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, - 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, - 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, - 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, - 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, - 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, - 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, 0x7d, 0x2c, 0x43, 0x2e, 0x64, 0x69, - 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, - 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, - 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, - 0x3d, 0x6d, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, - 0x26, 0x76, 0x74, 0x3d, 0x3d, 0x3d, 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, - 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, - 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, - 0x7c, 0x7c, 0x4f, 0x74, 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, - 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, - 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, - 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, - 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, - 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, - 0x74, 0x3d, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x43, - 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, - 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, - 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, - 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, - 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x6a, 0x74, - 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x28, 0x6c, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, - 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, - 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, - 0x5d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6c, 0x2c, 0x74, 0x2e, - 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x77, 0x74, 0x26, - 0x26, 0x77, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x43, 0x2e, - 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x78, 0x74, 0x26, 0x26, - 0x78, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, - 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, - 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, - 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, - 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, - 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, - 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, - 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, - 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, - 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4c, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, - 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, - 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, - 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, - 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, - 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, - 0x5f, 0x29, 0x2c, 0x4c, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, - 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, - 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x5f, 0x3d, - 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, - 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4c, 0x74, 0x26, 0x26, 0x28, 0x6e, - 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, - 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x52, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, - 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, - 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, - 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, - 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x49, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, - 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, - 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, - 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, - 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, - 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, - 0x2c, 0x6e, 0x29, 0x7b, 0x43, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, - 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x43, 0x5b, 0x74, - 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, - 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, - 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, - 0x29, 0x7b, 0x69, 0x66, 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, - 0x3b, 0x7a, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, - 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, - 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, - 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, - 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, - 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, - 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, - 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, - 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, - 0x69, 0x66, 0x28, 0x21, 0x55, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, - 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, - 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, - 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, - 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, - 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, - 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, - 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x28, - 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, - 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, - 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, - 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, - 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, - 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, - 0x65, 0x73, 0x28, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, - 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, - 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, - 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, - 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, - 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, - 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, - 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, - 0x7d, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, - 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, - 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, - 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, - 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, - 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, - 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, - 0x6f, 0x66, 0x20, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, - 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, - 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, 0x5d, 0x3d, - 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, - 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, - 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, - 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x5f, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x7b, - 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, - 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x5f, 0x2e, - 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, - 0x3b, 0x77, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, - 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x5f, 0x2e, - 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, 0x73, 0x65, - 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, - 0x47, 0x74, 0x3d, 0x5f, 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, - 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, - 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, - 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, - 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, - 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, - 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, - 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, - 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, - 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, - 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, - 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, - 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, - 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, - 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, - 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, - 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, - 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, - 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, - 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, - 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, - 0x29, 0x7b, 0x6f, 0x3d, 0x51, 0x74, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x72, - 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, - 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x5f, 0x29, - 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, - 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, - 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x2c, 0x6f, 0x3d, 0x61, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, - 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, - 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x77, 0x28, 0x28, 0x29, 0x3d, - 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, - 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, - 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, - 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, - 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, - 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, - 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x42, 0x74, 0x28, 0x22, - 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, - 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, - 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, - 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, - 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, - 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, - 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, - 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, - 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, - 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, - 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, - 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, - 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, - 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, - 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, - 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, - 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x69, - 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x5f, - 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, - 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, - 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, - 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, - 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, - 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, - 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, - 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, - 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x5b, - 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, - 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, - 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, - 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, - 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x61, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, - 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, - 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, - 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, - 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6d, 0x28, 0x28, 0x29, - 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, - 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, - 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, - 0x48, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x77, 0x28, 0x28, 0x29, 0x3d, - 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, - 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, - 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, - 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, - 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, - 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, - 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, - 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, - 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, - 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, - 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, - 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x30, - 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, - 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, - 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, - 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, - 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, - 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, - 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, 0x5d, 0x5b, - 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, - 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, - 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, - 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, - 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x29, 0x2c, - 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, - 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, - 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, 0x70, 0x75, - 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x5f, 0x7d, 0x2c, 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, - 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x65, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, - 0x6e, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, - 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, - 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, - 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, - 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, - 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, - 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, - 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x3d, - 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, - 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, - 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, 0x69, 0x2e, - 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, - 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, - 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, - 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, - 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, - 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, - 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, - 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, - 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, - 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, 0x74, 0x3f, - 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, - 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, - 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, - 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, - 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, 0x36, - 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, - 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, - 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, - 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, - 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x73, - 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, - 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, - 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x5f, - 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, - 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, 0x33, 0x29, - 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x5f, 0x3f, - 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x3e, - 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, 0x2c, 0x69, - 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, 0x5b, 0x30, - 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, - 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, - 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, - 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, 0x3a, 0x5f, - 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, - 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, 0x22, 0x22, - 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, - 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, - 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, - 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x72, - 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, 0x2c, 0x28, - 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, - 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, 0x30, 0x29, - 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, - 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, - 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, - 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, - 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, - 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, - 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, - 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, - 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, - 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, - 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, - 0x3d, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, 0x29, 0x3b, - 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, - 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x6a, 0x20, - 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, - 0x68, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, - 0x5f, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, - 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6d, 0x20, 0x61, 0x73, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, - 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x52, - 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x66, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, - 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, 0x61, - 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x55, 0x20, - 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, - 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x43, 0x20, 0x61, 0x73, 0x20, 0x6f, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x61, 0x20, 0x61, 0x73, - 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x20, 0x61, 0x73, - 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2c, 0x75, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, - 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x75, 0x74, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, - 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, - 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, - 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x50, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, - 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, - 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, - 0x63, 0x65, 0x72, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, - 0x65, 0x52, 0x65, 0x66, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, - 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, - 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, - 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a -}; -unsigned int index_js_len = 22800; +const char index_js[] = R"LITERAL( +function t(){throw new Error("Cycle detected")}const n=Symbol.for("preact-signals");function e(){if(f>1){f--;return}let t,n=!1;while(void 0!==o){let _=o;o=void 0;s++;while(void 0!==_){const i=_.o;_.o=void 0;_.f&=-3;if(!(8&_.f)&&p(_))try{_.c()}catch(e){if(!n){t=e;n=!0}}_=i}}s=0;f--;if(n)throw t}function _(t){if(f>0)return t();f++;try{return t()}finally{e()}}let i,o,r=0;function u(t){if(r>0)return t();const n=i;i=void 0;r++;try{return t()}finally{r--;i=n}}let f=0,s=0,l=0;function c(t){if(void 0===i)return;let n=t.n;if(void 0===n||n.t!==i){n={i:0,S:t,p:i.s,n:void 0,t:i,e:void 0,x:void 0,r:n};if(void 0!==i.s)i.s.n=n;i.s=n;t.n=n;if(32&i.f)t.S(n);return n}else if(-1===n.i){n.i=0;if(void 0!==n.n){n.n.p=n.p;if(void 0!==n.p)n.p.n=n.n;n.p=i.s;n.n=void 0;i.s.n=n;i.s=n}return n}}function h(t){this.v=t;this.i=0;this.n=void 0;this.t=void 0}h.prototype.brand=n;h.prototype.h=function(){return!0};h.prototype.S=function(t){if(this.t!==t&&void 0===t.e){t.x=this.t;if(void 0!==this.t)this.t.e=t;this.t=t}};h.prototype.U=function(t){if(void 0!==this.t){const n=t.e,e=t.x;if(void 0!==n){n.x=e;t.e=void 0}if(void 0!==e){e.e=n;t.x=void 0}if(t===this.t)this.t=e}};h.prototype.subscribe=function(t){const n=this;return w((function(){const e=n.value,_=32&this.f;this.f&=-33;try{t(e)}finally{this.f|=_}}))};h.prototype.valueOf=function(){return this.value};h.prototype.toString=function(){return this.value+""};h.prototype.toJSON=function(){return this.value};h.prototype.peek=function(){return this.v};Object.defineProperty(h.prototype,"value",{get(){const t=c(this);if(void 0!==t)t.i=this.i;return this.v},set(n){if(i instanceof y)!function(){throw new Error("Computed cannot have side-effects")}();if(n!==this.v){if(s>100)t();this.v=n;this.i++;l++;f++;try{for(let t=this.t;void 0!==t;t=t.x)t.t.N()}finally{e()}}}});function a(t){return new h(t)}function p(t){for(let n=t.s;void 0!==n;n=n.n)if(n.S.i!==n.i||!n.S.h()||n.S.i!==n.i)return!0;return!1}function d(t){for(let n=t.s;void 0!==n;n=n.n){const e=n.S.n;if(void 0!==e)n.r=e;n.S.n=n;n.i=-1;if(void 0===n.n){t.s=n;break}}}function v(t){let n,e=t.s;while(void 0!==e){const t=e.p;if(-1===e.i){e.S.U(e);if(void 0!==t)t.n=e.n;if(void 0!==e.n)e.n.p=t}else n=e;e.S.n=e.r;if(void 0!==e.r)e.r=void 0;e=t}t.s=n}function y(t){h.call(this,void 0);this.x=t;this.s=void 0;this.g=l-1;this.f=4}(y.prototype=new h).h=function(){this.f&=-3;if(1&this.f)return!1;if(32==(36&this.f))return!0;this.f&=-5;if(this.g===l)return!0;this.g=l;this.f|=1;if(this.i>0&&!p(this)){this.f&=-2;return!0}const t=i;try{d(this);i=this;const t=this.x();if(16&this.f||this.v!==t||0===this.i){this.v=t;this.f&=-17;this.i++}}catch(t){this.v=t;this.f|=16;this.i++}i=t;v(this);this.f&=-2;return!0};y.prototype.S=function(t){if(void 0===this.t){this.f|=36;for(let t=this.s;void 0!==t;t=t.n)t.S.S(t)}h.prototype.S.call(this,t)};y.prototype.U=function(t){if(void 0!==this.t){h.prototype.U.call(this,t);if(void 0===this.t){this.f&=-33;for(let t=this.s;void 0!==t;t=t.n)t.S.U(t)}}};y.prototype.N=function(){if(!(2&this.f)){this.f|=6;for(let t=this.t;void 0!==t;t=t.x)t.t.N()}};y.prototype.peek=function(){if(!this.h())t();if(16&this.f)throw this.v;return this.v};Object.defineProperty(y.prototype,"value",{get(){if(1&this.f)t();const n=c(this);this.h();if(void 0!==n)n.i=this.i;if(16&this.f)throw this.v;return this.v}});function m(t){return new y(t)}function g(t){const n=t.u;t.u=void 0;if("function"==typeof n){f++;const _=i;i=void 0;try{n()}catch(n){t.f&=-2;t.f|=8;b(t);throw n}finally{i=_;e()}}}function b(t){for(let n=t.s;void 0!==n;n=n.n)n.S.U(n);t.x=void 0;t.s=void 0;g(t)}function k(t){if(i!==this)throw new Error("Out-of-order effect");v(this);i=t;this.f&=-2;if(8&this.f)b(this);e()}function S(t){this.x=t;this.u=void 0;this.s=void 0;this.o=void 0;this.f=32}S.prototype.c=function(){const t=this.S();try{if(8&this.f)return;if(void 0===this.x)return;const n=this.x();if("function"==typeof n)this.u=n}finally{t()}};S.prototype.S=function(){if(1&this.f)t();this.f|=1;this.f&=-9;g(this);d(this);f++;const n=i;i=this;return k.bind(this,n)};S.prototype.N=function(){if(!(2&this.f)){this.f|=2;this.o=o;o=this}};S.prototype.d=function(){this.f|=8;if(!(1&this.f))b(this)};function w(t){const n=new S(t);try{n.c()}catch(t){n.d();throw t}return n.d.bind(n)}var x,C,E,U,H,P,N,$,D,T={},V=[],A=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i,F=Array.isArray;function M(t,n){for(var e in n)t[e]=n[e];return t}function W(t){var n=t.parentNode;n&&n.removeChild(t)}function L(t,n,e){var _,i,o,r={};for(o in n)"key"==o?_=n[o]:"ref"==o?i=n[o]:r[o]=n[o];if(arguments.length>2&&(r.children=arguments.length>3?x.call(arguments,2):e),"function"==typeof t&&null!=t.defaultProps)for(o in t.defaultProps)void 0===r[o]&&(r[o]=t.defaultProps[o]);return O(t,r,_,i,null)}function O(t,n,e,_,i){var o={type:t,props:n,key:e,ref:_,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,constructor:void 0,__v:null==i?++E:i,__i:-1,__u:0};return null==i&&null!=C.vnode&&C.vnode(o),o}function R(){return{current:null}}function j(t){return t.children}function I(t,n){this.props=t,this.context=n}function q(t,n){if(null==n)return t.__?q(t.__,t.__i+1):null;for(var e;nn&&H.sort($));z.__r=0}function J(t,n,e,_,i,o,r,u,f,s,l){var c,h,a,p,d,v=_&&_.__k||V,y=n.length;for(e.__d=f,K(e,n,v),f=e.__d,c=0;c0?O(i.type,i.props,i.key,i.ref?i.ref:null,i.__v):i)?(i.__=t,i.__b=t.__b+1,u=Y(i,e,r=_+c,l),i.__i=u,o=null,-1!==u&&(l--,(o=e[u])&&(o.__u|=131072)),null==o||null===o.__v?(-1==u&&c--,"function"!=typeof i.type&&(i.__u|=65536)):u!==r&&(u===r+1?c++:u>r?l>f-r?c+=u-r:c--:c=u(null!=f&&0==(131072&f.__u)?1:0))for(;r>=0||u=0){if((f=n[r])&&0==(131072&f.__u)&&i==f.key&&o===f.type)return r;r--}if(u2&&(u.children=arguments.length>3?x.call(arguments,2):e),O(t.type,u,_||t.key,i||t.ref,null)}function ht(t,n){var e={__c:n="__cC"+D++,__:t,Consumer:function(t,n){return t.children(n)},Provider:function(t){var e,_;return this.getChildContext||(e=[],(_={})[n]=this,this.getChildContext=function(){return _},this.shouldComponentUpdate=function(t){this.props.value!==t.value&&e.some((function(t){t.__e=!0,G(t)}))},this.sub=function(t){e.push(t);var n=t.componentWillUnmount;t.componentWillUnmount=function(){e.splice(e.indexOf(t),1),n&&n.call(t)}}),t.children}};return e.Provider.__=e.Consumer.contextType=e}x=V.slice,C={__e:function(t,n,e,_){for(var i,o,r;n=n.__;)if((i=n.__c)&&!i.__)try{if((o=i.constructor)&&null!=o.getDerivedStateFromError&&(i.setState(o.getDerivedStateFromError(t)),r=i.__d),null!=i.componentDidCatch&&(i.componentDidCatch(t,_||{}),r=i.__d),r)return i.__E=i}catch(n){t=n}throw t}},E=0,U=function(t){return null!=t&&null==t.constructor},I.prototype.setState=function(t,n){var e;e=null!=this.__s&&this.__s!==this.state?this.__s:this.__s=M({},this.state),"function"==typeof t&&(t=t(M({},e),this.props)),t&&M(e,t),null!=t&&this.__v&&(n&&this._sb.push(n),G(this))},I.prototype.forceUpdate=function(t){this.__v&&(this.__e=!0,t&&this.__h.push(t),G(this))},I.prototype.render=j,H=[],N="function"==typeof Promise?Promise.prototype.then.bind(Promise.resolve()):setTimeout,$=function(t,n){return t.__v.__b-n.__v.__b},z.__r=0,D=0;var at,pt,dt,vt,yt=0,mt=[],gt=[],bt=C.__b,kt=C.__r,St=C.diffed,wt=C.__c,xt=C.unmount;function Ct(t,n){C.__h&&C.__h(pt,t,yt||n),yt=0;var e=pt.__H||(pt.__H={__:[],__h:[]});return t>=e.__.length&&e.__.push({__V:gt}),e.__[t]}function Et(t){return yt=1,Ut(qt,t)}function Ut(t,n,e){var _=Ct(at++,2);if(_.t=t,!_.__c&&(_.__=[e?e(n):qt(void 0,n),function(t){var n=_.__N?_.__N[0]:_.__[0],e=_.t(n,t);n!==e&&(_.__N=[e,_.__[1]],_.__c.setState({}))}],_.__c=pt,!pt.u)){var i=function(t,n,e){if(!_.__c.__H)return!0;var i=_.__c.__H.__.filter((function(t){return t.__c}));if(i.every((function(t){return!t.__N})))return!o||o.call(this,t,n,e);var r=!1;return i.forEach((function(t){if(t.__N){var n=t.__[0];t.__=t.__N,t.__N=void 0,n!==t.__[0]&&(r=!0)}})),!(!r&&_.__c.props===t)&&(!o||o.call(this,t,n,e))};pt.u=!0;var o=pt.shouldComponentUpdate,r=pt.componentWillUpdate;pt.componentWillUpdate=function(t,n,e){if(this.__e){var _=o;o=void 0,i(t,n,e),o=_}r&&r.call(this,t,n,e)},pt.shouldComponentUpdate=i}return _.__N||_.__}function Ht(t,n){var e=Ct(at++,3);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__H.__h.push(e))}function Pt(t,n){var e=Ct(at++,4);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__h.push(e))}function Nt(t){return yt=5,Dt((function(){return{current:t}}),[])}function $t(t,n,e){yt=6,Pt((function(){return"function"==typeof t?(t(n()),function(){return t(null)}):t?(t.current=n(),function(){return t.current=null}):void 0}),null==e?e:e.concat(t))}function Dt(t,n){var e=Ct(at++,7);return It(e.__H,n)?(e.__V=t(),e.i=n,e.__h=t,e.__V):e.__}function Tt(t,n){return yt=8,Dt((function(){return t}),n)}function Vt(t){var n=pt.context[t.__c],e=Ct(at++,9);return e.c=t,n?(null==e.__&&(e.__=!0,n.sub(pt)),n.props.value):t.__}function At(t,n){C.useDebugValue&&C.useDebugValue(n?n(t):t)}function Ft(t){var n=Ct(at++,10),e=Et();return n.__=t,pt.componentDidCatch||(pt.componentDidCatch=function(t,_){n.__&&n.__(t,_),e[1](t)}),[e[0],function(){e[1](void 0)}]}function Mt(){var t=Ct(at++,11);if(!t.__){for(var n=pt.__v;null!==n&&!n.__m&&null!==n.__;)n=n.__;var e=n.__m||(n.__m=[0,0]);t.__="P"+e[0]+"-"+e[1]++}return t.__}function Wt(){for(var t;t=mt.shift();)if(t.__P&&t.__H)try{t.__H.__h.forEach(Rt),t.__H.__h.forEach(jt),t.__H.__h=[]}catch(u){t.__H.__h=[],C.__e(u,t.__v)}}C.__b=function(t){pt=null,bt&&bt(t)},C.__r=function(t){kt&&kt(t),at=0;var n=(pt=t.__c).__H;n&&(dt===pt?(n.__h=[],pt.__h=[],n.__.forEach((function(t){t.__N&&(t.__=t.__N),t.__V=gt,t.__N=t.i=void 0}))):(n.__h.forEach(Rt),n.__h.forEach(jt),n.__h=[],at=0)),dt=pt},C.diffed=function(t){St&&St(t);var n=t.__c;n&&n.__H&&(n.__H.__h.length&&(1!==mt.push(n)&&vt===C.requestAnimationFrame||((vt=C.requestAnimationFrame)||Ot)(Wt)),n.__H.__.forEach((function(t){t.i&&(t.__H=t.i),t.__V!==gt&&(t.__=t.__V),t.i=void 0,t.__V=gt}))),dt=pt=null},C.__c=function(t,n){n.some((function(t){try{t.__h.forEach(Rt),t.__h=t.__h.filter((function(t){return!t.__||jt(t)}))}catch(l){n.some((function(t){t.__h&&(t.__h=[])})),n=[],C.__e(l,t.__v)}})),wt&&wt(t,n)},C.unmount=function(t){xt&&xt(t);var n,e=t.__c;e&&e.__H&&(e.__H.__.forEach((function(t){try{Rt(t)}catch(t){n=t}})),e.__H=void 0,n&&C.__e(n,e.__v))};var Lt="function"==typeof requestAnimationFrame;function Ot(t){var n,e=function(){clearTimeout(_),Lt&&cancelAnimationFrame(n),setTimeout(t)},_=setTimeout(e,100);Lt&&(n=requestAnimationFrame(e))}function Rt(t){var n=pt,e=t.__c;"function"==typeof e&&(t.__c=void 0,e()),pt=n}function jt(t){var n=pt;t.__c=t.__(),pt=n}function It(t,n){return!t||t.length!==n.length||n.some((function(n,e){return n!==t[e]}))}function qt(t,n){return"function"==typeof n?n(t):n}function Bt(t,n){C[t]=n.bind(null,C[t]||(()=>{}))}let Gt,zt;function Jt(t){if(zt)zt();zt=t&&t.S()}function Kt({data:t}){const n=Xt(t);n.value=t;const e=Dt(()=>{let t=this.__v;while(t=t.__)if(t.__c){t.__c.__$f|=4;break}this.__$u.c=()=>{var t;if(!U(e.peek())&&3===(null==(t=this.base)?void 0:t.nodeType))this.base.data=e.peek();else{this.__$f|=1;this.setState({})}};return m(()=>{let t=n.value.value;return 0===t?0:!0===t?"":t||""})},[]);return e.value}Kt.displayName="_st";Object.defineProperties(h.prototype,{constructor:{configurable:!0,value:void 0},type:{configurable:!0,value:Kt},props:{configurable:!0,get(){return{data:this}}},__b:{configurable:!0,value:1}});Bt("__b",(t,n)=>{if("string"==typeof n.type){let t,e=n.props;for(let _ in e){if("children"===_)continue;let i=e[_];if(i instanceof h){if(!t)n.__np=t={};t[_]=i;e[_]=i.peek()}}}t(n)});Bt("__r",(t,n)=>{Jt();let e,_=n.__c;if(_){_.__$f&=-2;e=_.__$u;if(void 0===e)_.__$u=e=function(t){let n;w((function(){n=this}));n.c=()=>{_.__$f|=1;_.setState({})};return n}()}Gt=_;Jt(e);t(n)});Bt("__e",(t,n,e,_)=>{Jt();Gt=void 0;t(n,e,_)});Bt("diffed",(t,n)=>{Jt();Gt=void 0;let e;if("string"==typeof n.type&&(e=n.__e)){let t=n.__np,_=n.props;if(t){let n=e.U;if(n)for(let e in n){let _=n[e];if(void 0!==_&&!(e in t)){_.d();n[e]=void 0}}else{n={};e.U=n}for(let i in t){let o=n[i],r=t[i];if(void 0===o){o=Qt(e,i,r,_);n[i]=o}else o.o(r,_)}}}t(n)});function Qt(t,n,e,_){const i=n in t&&void 0===t.ownerSVGElement,o=a(e);return{o:(t,n)=>{o.value=t;_=n},d:w(()=>{const e=o.value.value;if(_[n]!==e){_[n]=e;if(i)t[n]=e;else if(e)t.setAttribute(n,e);else t.removeAttribute(n)}})}}Bt("unmount",(t,n)=>{if("string"==typeof n.type){let t=n.__e;if(t){const n=t.U;if(n){t.U=void 0;for(let t in n){let e=n[t];if(e)e.d()}}}}else{let t=n.__c;if(t){const n=t.__$u;if(n){t.__$u=void 0;n.d()}}}t(n)});Bt("__h",(t,n,e,_)=>{if(_<3||9===_)n.__$f|=2;t(n,e,_)});I.prototype.shouldComponentUpdate=function(t,n){const e=this.__$u;if(!(e&&void 0!==e.s||4&this.__$f))return!0;if(3&this.__$f)return!0;for(let _ in n)return!0;for(let _ in t)if("__source"!==_&&t[_]!==this.props[_])return!0;for(let _ in this.props)if(!(_ in t))return!0;return!1};function Xt(t){return Dt(()=>a(t),[])}function Yt(t){const n=Nt(t);n.current=t;Gt.__$f|=4;return Dt(()=>m(()=>n.current()),[])}function Zt(t){const n=Nt(t);n.current=t;Ht(()=>w(()=>n.current()),[])}var tn=function(t,n,e,_){var i;n[0]=0;for(var o=1;o=5&&((i||!t&&5===_)&&(r.push(_,0,i,e),_=6),t&&(r.push(_,t,0,e),_=6)),i=""},f=0;f"===n?(_=1,i=""):i=n+i[0]:o?n===o?o="":i+=n:'"'===n||"'"===n?o=n:">"===n?(u(),_=1):_&&("="===n?(_=5,e=i,i=""):"/"===n&&(_<5||">"===t[f][s+1])?(u(),3===_&&(r=r[0]),_=r,(r=r[0]).push(2,0,_),_=0):" "===n||"\t"===n||"\n"===n||"\r"===n?(u(),_=2):i+=n),3===_&&"!--"===i&&(_=4,r=r[0])}return u(),r}(t)),n),arguments,[])).length>1?n:n[0]}var _n=en.bind(L);export{I as Component,j as Fragment,h as Signal,_ as batch,ct as cloneElement,m as computed,ht as createContext,L as createElement,R as createRef,w as effect,L as h,_n as html,lt as hydrate,U as isValidElement,C as options,st as render,a as signal,X as toChildArray,u as untracked,Tt as useCallback,Yt as useComputed,Vt as useContext,At as useDebugValue,Ht as useEffect,Ft as useErrorBoundary,Mt as useId,$t as useImperativeHandle,Pt as useLayoutEffect,Dt as useMemo,Ut as useReducer,Nt as useRef,Xt as useSignal,Zt as useSignalEffect,Et as useState}; +)LITERAL"; +unsigned int index_js_len = sizeof(index_js); diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp index 0a05c369d..83b22d670 100644 --- a/examples/server/json-schema-to-grammar.mjs.hpp +++ b/examples/server/json-schema-to-grammar.mjs.hpp @@ -1,311 +1,115 @@ -unsigned char json_schema_to_grammar_mjs[] = { - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, - 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, 0x3f, - 0x27, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, - 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, - 0x65, 0x61, 0x6e, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, - 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, - 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x6e, - 0x75, 0x6d, 0x62, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, 0x22, - 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, - 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, 0x29, - 0x29, 0x20, 0x28, 0x22, 0x2e, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, - 0x2b, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, 0x20, 0x5b, 0x2d, - 0x2b, 0x5d, 0x3f, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2b, 0x29, 0x3f, - 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x69, - 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, - 0x22, 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, - 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, - 0x29, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, - 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x60, 0x20, 0x22, - 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x5b, 0x5e, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, - 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x22, 0x5c, - 0x5c, 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, - 0x2f, 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, - 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, - 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2a, 0x20, - 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, - 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x20, 0x27, 0x22, - 0x6e, 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, - 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, 0x61, 0x2d, 0x7a, 0x2d, - 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, - 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, - 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5d, - 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, - 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, - 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, 0x3d, 0x20, - 0x7b, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x72, 0x27, - 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x6e, - 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x22, - 0x27, 0x7d, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, - 0x72, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, - 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, - 0x7c, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, - 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, - 0x65, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x27, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x27, 0x2c, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, 0x52, 0x55, - 0x4c, 0x45, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x4a, 0x53, - 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, - 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, - 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, - 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x20, - 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, - 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, - 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x7d, - 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, - 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, - 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, - 0x52, 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, - 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x65, 0x73, - 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x28, - 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, - 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, - 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, - 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, - 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, - 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x6b, 0x65, 0x79, 0x2c, 0x20, 0x72, - 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, - 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, - 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x72, 0x6f, 0x6f, - 0x74, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, - 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, - 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, - 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, - 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, - 0x6f, 0x6e, 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x2e, 0x6d, - 0x61, 0x70, 0x28, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, - 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, - 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, - 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, - 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, - 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x65, - 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, - 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x65, 0x6e, 0x75, 0x6d, - 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, - 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, - 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, - 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, 0x26, 0x26, - 0x20, 0x27, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, - 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x60, 0x72, 0x65, 0x71, 0x75, 0x69, - 0x72, 0x65, 0x64, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, - 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, - 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, - 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, - 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, - 0x72, 0x6f, 0x70, 0x50, 0x61, 0x69, 0x72, 0x73, 0x20, 0x3d, 0x20, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, - 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, - 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, - 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, - 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, - 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, 0x66, 0x20, - 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, 0x20, 0x74, - 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6b, 0x65, 0x79, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x3d, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, - 0x65, 0x72, 0x5b, 0x61, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x27, 0x20, 0x3f, 0x20, - 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x61, 0x5b, - 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, - 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, - 0x20, 0x3d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, - 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, - 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, - 0x72, 0x27, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, - 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, - 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, - 0x65, 0x72, 0x42, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x5b, 0x30, 0x5d, 0x2e, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, - 0x65, 0x28, 0x62, 0x5b, 0x30, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, - 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, - 0x50, 0x61, 0x69, 0x72, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, - 0x68, 0x28, 0x28, 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, - 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x5d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, - 0x69, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, - 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, - 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x20, - 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, - 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, - 0x2b, 0x3d, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, - 0x61, 0x6c, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, - 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, - 0x27, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, - 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, - 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, - 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, - 0x20, 0x26, 0x26, 0x20, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, - 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, - 0x44, 0x4f, 0x20, 0x60, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, - 0x65, 0x6d, 0x73, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, - 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, - 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, - 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, - 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x3a, 0x20, 0x22, 0x22, 0x7d, - 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, - 0x20, 0x3d, 0x20, 0x60, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x20, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, - 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, - 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x29, 0x2a, 0x29, - 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, - 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, - 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x50, 0x52, - 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, - 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, - 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, - 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, - 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, - 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, - 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, - 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x5d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, - 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x67, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, - 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, - 0x28, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x2b, 0x3d, 0x20, - 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x3a, 0x3a, 0x3d, - 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, 0x6e, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a +const char json_schema_to_grammar_mjs[] = R"LITERAL( +const SPACE_RULE = '" "?'; + +const PRIMITIVE_RULES = { + boolean: '("true" | "false") space', + number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space', + integer: '("-"? ([0-9] | [1-9] [0-9]*)) space', + string: ` "\\"" ( + [^"\\\\] | + "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) + )* "\\"" space`, + null: '"null" space', }; -unsigned int json_schema_to_grammar_mjs_len = 3695; + +const INVALID_RULE_CHARS_RE = /[^\dA-Za-z-]+/g; +const GRAMMAR_LITERAL_ESCAPE_RE = /[\n\r"]/g; +const GRAMMAR_LITERAL_ESCAPES = {'\r': '\\r', '\n': '\\n', '"': '\\"'}; + +export class SchemaConverter { + constructor(propOrder) { + this._propOrder = propOrder || {}; + this._rules = new Map(); + this._rules.set('space', SPACE_RULE); + } + + _formatLiteral(literal) { + const escaped = JSON.stringify(literal).replace( + GRAMMAR_LITERAL_ESCAPE_RE, + m => GRAMMAR_LITERAL_ESCAPES[m] + ); + return `"${escaped}"`; + } + + _addRule(name, rule) { + let escName = name.replace(INVALID_RULE_CHARS_RE, '-'); + let key = escName; + + if (this._rules.has(escName)) { + if (this._rules.get(escName) === rule) { + return key; + } + + let i = 0; + while (this._rules.has(`${escName}${i}`)) { + i += 1; + } + key = `${escName}${i}`; + } + + this._rules.set(key, rule); + return key; + } + + visit(schema, name) { + const schemaType = schema.type; + const ruleName = name || 'root'; + + if (schema.oneOf || schema.anyOf) { + const rule = (schema.oneOf || schema.anyOf).map((altSchema, i) => + this.visit(altSchema, `${name}${name ? "-" : ""}${i}`) + ).join(' | '); + + return this._addRule(ruleName, rule); + } else if ('const' in schema) { + return this._addRule(ruleName, this._formatLiteral(schema.const)); + } else if ('enum' in schema) { + const rule = schema.enum.map(v => this._formatLiteral(v)).join(' | '); + return this._addRule(ruleName, rule); + } else if (schemaType === 'object' && 'properties' in schema) { + // TODO: `required` keyword (from python implementation) + const propOrder = this._propOrder; + const propPairs = Object.entries(schema.properties).sort((a, b) => { + // sort by position in prop_order (if specified) then by key + const orderA = typeof propOrder[a[0]] === 'number' ? propOrder[a[0]] : Infinity; + const orderB = typeof propOrder[b[0]] === 'number' ? propOrder[b[0]] : Infinity; + return orderA - orderB || a[0].localeCompare(b[0]); + }); + + let rule = '"{" space'; + propPairs.forEach(([propName, propSchema], i) => { + const propRuleName = this.visit(propSchema, `${name}${name ? "-" : ""}${propName}`); + if (i > 0) { + rule += ' "," space'; + } + rule += ` ${this._formatLiteral(propName)} space ":" space ${propRuleName}`; + }); + rule += ' "}" space'; + + return this._addRule(ruleName, rule); + } else if (schemaType === 'array' && 'items' in schema) { + // TODO `prefixItems` keyword (from python implementation) + const itemRuleName = this.visit(schema.items, `${name}${name ? "-" : ""}item`); + const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`; + return this._addRule(ruleName, rule); + } else { + if (!PRIMITIVE_RULES[schemaType]) { + throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`); + } + return this._addRule( + ruleName === 'root' ? 'root' : schemaType, + PRIMITIVE_RULES[schemaType] + ); + } + } + + formatGrammar() { + let grammar = ''; + this._rules.forEach((rule, name) => { + grammar += `${name} ::= ${rule}\n`; + }); + return grammar; + } +} +)LITERAL"; +unsigned int json_schema_to_grammar_mjs_len = sizeof(json_schema_to_grammar_mjs); From e6f291d15844398f8326940fe5ad7f2e02b5aa56 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Jan 2024 20:17:30 +0200 Subject: [PATCH 625/859] server : fix context shift (#5195) * server : fix context shift + simplify self-extend * server : take system_tokens into account * server : more n_past fixes * server : rever n_past_se changes --- examples/server/chat.sh | 1 + examples/server/server.cpp | 109 ++++++++++++++++++++----------------- 2 files changed, 60 insertions(+), 50 deletions(-) diff --git a/examples/server/chat.sh b/examples/server/chat.sh index 014360121..da0a6ca68 100755 --- a/examples/server/chat.sh +++ b/examples/server/chat.sh @@ -48,6 +48,7 @@ chat_completion() { top_p: 0.9, n_keep: $n_keep, n_predict: 256, + cache_prompt: true, stop: ["\n### Human:"], stream: true }')" diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 11dd82c33..21bdce8ed 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -185,7 +185,7 @@ struct llama_client_slot llama_sampling_context *ctx_sampling = nullptr; int32_t ga_i = 0; // group-attention state - int32_t ga_n = 1;// group-attention factor + int32_t ga_n = 1; // group-attention factor int32_t ga_w = 512; // group-attention width int32_t n_past_se = 0; // self-extend @@ -219,7 +219,8 @@ struct llama_client_slot sent_token_probs_index = 0; infill = false; ga_i = 0; - n_past_se = 0; + n_past_se = 0; + generated_token_probs.clear(); for (slot_image & img : images) @@ -1227,7 +1228,7 @@ struct llama_server_context std::vector append_tokens = tokenize(json_prompt, false); // has next image for (int i = 0; i < (int) append_tokens.size(); ++i) { - llama_batch_add(batch, append_tokens[i], slot.n_past, { slot.id }, true); + llama_batch_add(batch, append_tokens[i], system_tokens.size() + slot.n_past, { slot.id }, true); slot.n_past += 1; } } @@ -1295,6 +1296,8 @@ struct llama_server_context for (llama_client_slot &slot : slots) { slot.cache_tokens.clear(); + slot.n_past = 0; + slot.n_past_se = 0; } } @@ -1364,26 +1367,26 @@ struct llama_server_context kv_cache_clear(); } return true; - } else { - task_server task; - task.type = TASK_TYPE_NEXT_RESPONSE; - task.target_id = -1; - queue_tasks.post(task); } + task_server task; + task.type = TASK_TYPE_NEXT_RESPONSE; + task.target_id = -1; + queue_tasks.post(task); + for (llama_client_slot &slot : slots) { if (slot.ga_n == 1) { - if (slot.is_processing() && slot.cache_tokens.size() >= (size_t) slot.n_ctx) + if (slot.is_processing() && system_tokens.size() + slot.cache_tokens.size() >= (size_t) slot.n_ctx) { // Shift context - const int n_left = slot.n_past - slot.params.n_keep - 1; + const int n_left = system_tokens.size() + slot.n_past - slot.params.n_keep - 1; const int n_discard = n_left / 2; LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, slot.params.n_keep, n_left, n_discard); llama_kv_cache_seq_rm (ctx, slot.id, slot.params.n_keep + 1 , slot.params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, slot.n_past, -n_discard); + llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, system_tokens.size() + slot.n_past, -n_discard); for (size_t i = slot.params.n_keep + 1 + n_discard; i < slot.cache_tokens.size(); i++) { @@ -1429,8 +1432,10 @@ struct llama_server_context slot.i_batch = batch.n_tokens; const int32_t slot_npast = slot.n_past_se > 0 ? slot.n_past_se : slot.n_past; - llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id }, true); + // TODO: we always have to take into account the "system_tokens" + // this is not great and needs to be improved somehow + llama_batch_add(batch, slot.sampled, system_tokens.size() + slot_npast, { slot.id }, true); slot.n_past += 1; } @@ -1481,8 +1486,8 @@ struct llama_server_context prefix_tokens.insert(prefix_tokens.begin(), llama_token_prefix(model)); prefix_tokens.insert(prefix_tokens.begin(), llama_token_bos(model)); // always add BOS - prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); - prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); + prefix_tokens.insert(prefix_tokens.end(), llama_token_suffix(model)); + prefix_tokens.insert(prefix_tokens.end(), suffix_tokens.begin(), suffix_tokens.end()); prefix_tokens.push_back(llama_token_middle(model)); prompt_tokens = prefix_tokens; } @@ -1582,8 +1587,8 @@ struct llama_server_context } LOG_VERBOSE("prompt ingested", { - {"n_past", slot.n_past}, - {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, + {"n_past", slot.n_past}, + {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, {"to_eval", tokens_to_str(ctx, slot.cache_tokens.cbegin() + slot.n_past, slot.cache_tokens.cend())}, }); @@ -1591,10 +1596,13 @@ struct llama_server_context // process the prefix of first image std::vector prefix_tokens = has_images ? tokenize(slot.images[0].prefix_prompt, add_bos_token) : prompt_tokens; + int32_t slot_npast = slot.n_past_se > 0 ? slot.n_past_se : slot.n_past; - int ga_i = slot.ga_i; + + int32_t ga_i = slot.ga_i; int32_t ga_n = slot.ga_n; int32_t ga_w = slot.ga_w; + for (; slot.n_past < (int) prefix_tokens.size(); ++slot.n_past) { if (slot.ga_n != 1) @@ -1606,7 +1614,7 @@ struct llama_server_context } } llama_batch_add(batch, prefix_tokens[slot.n_past], system_tokens.size() + slot_npast, {slot.id }, false); - slot_npast += 1; + slot_npast++; } if (has_images && !ingest_images(slot, n_batch)) @@ -1666,6 +1674,7 @@ struct llama_server_context slot.n_past_se += n_tokens; } } + llama_batch batch_view = { n_tokens, @@ -1782,51 +1791,51 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); if (llama_mlock_supported()) { - printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); + printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } if (llama_mmap_supported()) { - printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); + printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } - printf(" --numa attempt optimizations that help on some NUMA systems\n"); + printf(" --numa attempt optimizations that help on some NUMA systems\n"); #ifdef LLAMA_SUPPORTS_GPU_OFFLOAD printf(" -ngl N, --n-gpu-layers N\n"); - printf(" number of layers to store in VRAM\n"); + printf(" number of layers to store in VRAM\n"); printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); - printf(" how to split the model across multiple GPUs, one of:\n"); - printf(" - none: use one GPU only\n"); - printf(" - layer (default): split layers and KV across GPUs\n"); - printf(" - row: split rows across GPUs\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); printf(" -ts SPLIT --tensor-split SPLIT\n"); - printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); - printf(" or for intermediate results and KV (with split-mode = row)\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row)\n"); #endif printf(" -m FNAME, --model FNAME\n"); - printf(" model path (default: %s)\n", params.model.c_str()); + printf(" model path (default: %s)\n", params.model.c_str()); printf(" -a ALIAS, --alias ALIAS\n"); - printf(" set an alias for the model, will be added as `model` field in completion response\n"); - printf(" --lora FNAME apply LoRA adapter (implies --no-mmap)\n"); - printf(" --lora-base FNAME optional model to use as a base for the layers modified by the LoRA adapter\n"); - printf(" --host ip address to listen (default (default: %s)\n", sparams.hostname.c_str()); - printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); - printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); - printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); - printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); - printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); - printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); - printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); - printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); - printf(" -spf FNAME, --system-prompt-file FNAME\n"); - printf(" Set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); - printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); - printf(" --log-disable disables logging to a file.\n"); + printf(" set an alias for the model, will be added as `model` field in completion response\n"); + printf(" --lora FNAME apply LoRA adapter (implies --no-mmap)\n"); + printf(" --lora-base FNAME optional model to use as a base for the layers modified by the LoRA adapter\n"); + printf(" --host ip address to listen (default (default: %s)\n", sparams.hostname.c_str()); + printf(" --port PORT port to listen (default (default: %d)\n", sparams.port); + printf(" --path PUBLIC_PATH path from which to serve static files (default %s)\n", sparams.public_path.c_str()); + printf(" --api-key API_KEY optional api key to enhance server security. If set, requests must include this key for access.\n"); + printf(" --api-key-file FNAME path to file containing api keys delimited by new lines. If set, requests must include one of the keys for access.\n"); + printf(" -to N, --timeout N server read/write timeout in seconds (default: %d)\n", sparams.read_timeout); + printf(" --embedding enable embedding vector output (default: %s)\n", params.embedding ? "enabled" : "disabled"); + printf(" -np N, --parallel N number of slots for process requests (default: %d)\n", params.n_parallel); + printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); + printf(" -spf FNAME, --system-prompt-file FNAME\n"); + printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); + printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); + printf(" --log-disable disables logging to a file.\n"); printf("\n"); printf(" --override-kv KEY=TYPE:VALUE\n"); - printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); - printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); - printf(" -gan N, --grp-attn-n N Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); - printf(" -gaw N, --grp-attn-w N Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); + printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); + printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); + printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); + printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); printf("\n"); } From e0085fdf7c758f0bc2746fc106fb29dd9df959de Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 30 Jan 2024 21:19:26 +0200 Subject: [PATCH 626/859] Revert "server : change deps.sh xxd files to string literals (#5221)" This reverts commit 4003be0e5feef320f3707786f22722b73cff9356. --- examples/server/completion.js.hpp | 651 ++- examples/server/deps.sh | 11 +- examples/server/index.html.hpp | 3829 ++++++++++++----- examples/server/index.js.hpp | 1907 +++++++- .../server/json-schema-to-grammar.mjs.hpp | 424 +- 5 files changed, 5454 insertions(+), 1368 deletions(-) diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp index 5609ee3bf..fe5f81228 100644 --- a/examples/server/completion.js.hpp +++ b/examples/server/completion.js.hpp @@ -1,204 +1,449 @@ -const char completion_js[] = R"LITERAL( -const paramDefaults = { - stream: true, - n_predict: 500, - temperature: 0.2, - stop: ["
    "] +unsigned char completion_js[] = { + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x3a, 0x20, 0x74, 0x72, + 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x3a, 0x20, 0x35, 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, + 0x20, 0x30, 0x2e, 0x32, 0x2c, 0x0a, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, + 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x5d, 0x0a, 0x7d, + 0x3b, 0x0a, 0x0a, 0x6c, 0x65, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x0a, + 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x2e, 0x20, 0x52, 0x65, 0x63, 0x6f, 0x6d, 0x6d, 0x65, + 0x6e, 0x64, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x6f, 0x73, + 0x74, 0x20, 0x75, 0x73, 0x65, 0x20, 0x63, 0x61, 0x73, 0x65, 0x73, 0x2e, + 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, + 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x22, + 0x54, 0x65, 0x6c, 0x6c, 0x20, 0x6d, 0x65, 0x20, 0x61, 0x20, 0x6a, 0x6f, + 0x6b, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, + 0x69, 0x63, 0x74, 0x3a, 0x20, 0x38, 0x30, 0x30, 0x7d, 0x29, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, + 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x2f, 0x2f, 0x0a, + 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, + 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, + 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x21, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, + 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x73, 0x2c, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x2c, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x3a, 0x20, 0x27, + 0x50, 0x4f, 0x53, 0x54, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x62, + 0x6f, 0x64, 0x79, 0x3a, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x73, 0x3a, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x27, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x27, + 0x3a, 0x20, 0x27, 0x6b, 0x65, 0x65, 0x70, 0x2d, 0x61, 0x6c, 0x69, 0x76, + 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x27, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2d, 0x54, 0x79, 0x70, 0x65, 0x27, + 0x3a, 0x20, 0x27, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x27, 0x41, 0x63, 0x63, 0x65, 0x70, 0x74, 0x27, + 0x3a, 0x20, 0x27, 0x74, 0x65, 0x78, 0x74, 0x2f, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x2d, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x20, + 0x3f, 0x20, 0x7b, 0x27, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x60, 0x42, 0x65, 0x61, + 0x72, 0x65, 0x72, 0x20, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x7d, 0x60, 0x7d, 0x20, + 0x3a, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x3a, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x0a, 0x20, 0x20, 0x7d, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x62, 0x6f, 0x64, 0x79, 0x2e, 0x67, 0x65, + 0x74, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x54, 0x65, 0x78, + 0x74, 0x44, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x42, 0x75, 0x66, + 0x66, 0x65, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x61, 0x72, 0x74, + 0x69, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x72, 0x65, 0x61, 0x64, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x6f, 0x6e, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x41, + 0x64, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x3d, 0x20, 0x6c, 0x65, 0x66, + 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x2b, 0x20, 0x64, 0x65, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2e, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x43, 0x68, 0x65, 0x63, 0x6b, 0x20, 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x72, 0x61, 0x63, + 0x74, 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x73, + 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, 0x65, 0x61, + 0x6b, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x65, 0x6e, 0x64, + 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, + 0x70, 0x6c, 0x69, 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x28, 0x27, 0x5c, 0x6e, 0x27, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x49, + 0x66, 0x20, 0x74, 0x68, 0x65, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, 0x64, + 0x6f, 0x65, 0x73, 0x6e, 0x27, 0x74, 0x20, 0x65, 0x6e, 0x64, 0x20, 0x77, + 0x69, 0x74, 0x68, 0x20, 0x61, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, + 0x72, 0x65, 0x61, 0x6b, 0x2c, 0x20, 0x74, 0x68, 0x65, 0x6e, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, 0x65, + 0x20, 0x69, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x53, 0x74, 0x6f, 0x72, 0x65, 0x20, 0x69, 0x74, 0x20, 0x69, 0x6e, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x20, + 0x62, 0x65, 0x20, 0x61, 0x64, 0x64, 0x65, 0x64, 0x20, 0x74, 0x6f, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6e, 0x65, 0x78, 0x74, 0x20, 0x63, 0x68, 0x75, + 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x65, 0x6e, + 0x64, 0x73, 0x57, 0x69, 0x74, 0x68, 0x4c, 0x69, 0x6e, 0x65, 0x42, 0x72, + 0x65, 0x61, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x20, 0x2f, 0x2f, 0x20, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, + 0x66, 0x20, 0x77, 0x65, 0x20, 0x68, 0x61, 0x76, 0x65, 0x20, 0x61, 0x20, + 0x6c, 0x69, 0x6e, 0x65, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x61, + 0x74, 0x20, 0x74, 0x68, 0x65, 0x20, 0x65, 0x6e, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x50, 0x61, 0x72, 0x73, 0x65, 0x20, 0x61, 0x6c, + 0x6c, 0x20, 0x73, 0x73, 0x65, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x64, 0x64, 0x20, 0x74, 0x68, 0x65, + 0x6d, 0x20, 0x74, 0x6f, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x20, 0x3d, 0x20, 0x2f, 0x5e, 0x28, 0x5c, + 0x53, 0x2b, 0x29, 0x3a, 0x5c, 0x73, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, + 0x67, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x69, 0x6e, + 0x65, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x67, 0x65, 0x78, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x28, 0x6c, + 0x69, 0x6e, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x5b, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x31, 0x5d, 0x5d, 0x20, 0x3d, 0x20, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x5b, 0x32, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x20, + 0x77, 0x65, 0x20, 0x6b, 0x6e, 0x6f, 0x77, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x20, 0x69, 0x73, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x2c, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x6a, 0x75, 0x73, + 0x74, 0x20, 0x64, 0x65, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6a, 0x73, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x79, 0x69, + 0x65, 0x6c, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x79, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x66, + 0x20, 0x77, 0x65, 0x20, 0x67, 0x6f, 0x74, 0x20, 0x61, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x20, 0x66, 0x72, 0x6f, + 0x6d, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2c, 0x20, 0x77, 0x65, + 0x20, 0x77, 0x69, 0x6c, 0x6c, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, + 0x68, 0x65, 0x72, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, + 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, + 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x3d, + 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, + 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x24, + 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x60, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, + 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, + 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, + 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, + 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, + 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, + 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, + 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, + 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, + 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, + 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, + 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, + 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, + 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, + 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, + 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, + 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, + 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, + 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, + 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, + 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, + 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, + 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, + 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, + 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, + 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, + 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, + 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, + 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, + 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, + 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, + 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, + 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, + 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, + 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, + 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x29, 0x2e, 0x74, 0x68, 0x65, + 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, + 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a }; - -let generation_settings = null; - - -// Completes the prompt as a generator. Recommended for most use cases. -// -// Example: -// -// import { llama } from '/completion.js' -// -// const request = llama("Tell me a joke", {n_predict: 800}) -// for await (const chunk of request) { -// document.write(chunk.data.content) -// } -// -export async function* llama(prompt, params = {}, config = {}) { - let controller = config.controller; - - if (!controller) { - controller = new AbortController(); - } - - const completionParams = { ...paramDefaults, ...params, prompt }; - - const response = await fetch("/completion", { - method: 'POST', - body: JSON.stringify(completionParams), - headers: { - 'Connection': 'keep-alive', - 'Content-Type': 'application/json', - 'Accept': 'text/event-stream', - ...(params.api_key ? {'Authorization': `Bearer ${params.api_key}`} : {}) - }, - signal: controller.signal, - }); - - const reader = response.body.getReader(); - const decoder = new TextDecoder(); - - let content = ""; - let leftover = ""; // Buffer for partially read lines - - try { - let cont = true; - - while (cont) { - const result = await reader.read(); - if (result.done) { - break; - } - - // Add any leftover data to the current chunk of data - const text = leftover + decoder.decode(result.value); - - // Check if the last character is a line break - const endsWithLineBreak = text.endsWith('\n'); - - // Split the text into lines - let lines = text.split('\n'); - - // If the text doesn't end with a line break, then the last line is incomplete - // Store it in leftover to be added to the next chunk of data - if (!endsWithLineBreak) { - leftover = lines.pop(); - } else { - leftover = ""; // Reset leftover if we have a line break at the end - } - - // Parse all sse events and add them to result - const regex = /^(\S+):\s(.*)$/gm; - for (const line of lines) { - const match = regex.exec(line); - if (match) { - result[match[1]] = match[2] - // since we know this is llama.cpp, let's just decode the json in data - if (result.data) { - result.data = JSON.parse(result.data); - content += result.data.content; - - // yield - yield result; - - // if we got a stop token from server, we will break here - if (result.data.stop) { - if (result.data.generation_settings) { - generation_settings = result.data.generation_settings; - } - cont = false; - break; - } - } - if (result.error) { - result.error = JSON.parse(result.error); - if (result.error.content.includes('slot unavailable')) { - // Throw an error to be caught by upstream callers - throw new Error('slot unavailable'); - } else { - console.error(`llama.cpp error: ${result.error.content}`); - } - } - if (result.error) { - result.error = JSON.parse(result.error); - console.error(`llama.cpp error: ${result.error.content}`); - } - } - } - } - } catch (e) { - if (e.name !== 'AbortError') { - console.error("llama error: ", e); - } - throw e; - } - finally { - controller.abort(); - } - - return content; -} - -// Call llama, return an event target that you can subscribe to -// -// Example: -// -// import { llamaEventTarget } from '/completion.js' -// -// const conn = llamaEventTarget(prompt) -// conn.addEventListener("message", (chunk) => { -// document.write(chunk.detail.content) -// }) -// -export const llamaEventTarget = (prompt, params = {}, config = {}) => { - const eventTarget = new EventTarget(); - (async () => { - let content = ""; - for await (const chunk of llama(prompt, params, config)) { - if (chunk.data) { - content += chunk.data.content; - eventTarget.dispatchEvent(new CustomEvent("message", { detail: chunk.data })); - } - if (chunk.data.generation_settings) { - eventTarget.dispatchEvent(new CustomEvent("generation_settings", { detail: chunk.data.generation_settings })); - } - if (chunk.data.timings) { - eventTarget.dispatchEvent(new CustomEvent("timings", { detail: chunk.data.timings })); - } - } - eventTarget.dispatchEvent(new CustomEvent("done", { detail: { content } })); - })(); - return eventTarget; -} - -// Call llama, return a promise that resolves to the completed text. This does not support streaming -// -// Example: -// -// llamaPromise(prompt).then((content) => { -// document.write(content) -// }) -// -// or -// -// const content = await llamaPromise(prompt) -// document.write(content) -// -export const llamaPromise = (prompt, params = {}, config = {}) => { - return new Promise(async (resolve, reject) => { - let content = ""; - try { - for await (const chunk of llama(prompt, params, config)) { - content += chunk.data.content; - } - resolve(content); - } catch (error) { - reject(error); - } - }); -}; - -/** - * (deprecated) - */ -export const llamaComplete = async (params, controller, callback) => { - for await (const chunk of llama(params.prompt, params, { controller })) { - callback(chunk); - } -} - -// Get the model info from the server. This is useful for getting the context window and so on. -export const llamaModelInfo = async () => { - if (!generation_settings) { - generation_settings = await fetch("/model.json").then(r => r.json()); - } - return generation_settings; -} -)LITERAL"; -unsigned int completion_js_len = sizeof(completion_js); +unsigned int completion_js_len = 5346; diff --git a/examples/server/deps.sh b/examples/server/deps.sh index c0a9de9f9..ea23e6450 100755 --- a/examples/server/deps.sh +++ b/examples/server/deps.sh @@ -15,13 +15,6 @@ cd $PUBLIC for FILE in $FILES; do echo "generate $FILE.hpp" - # Use C++11 string literals instead of ugly xxd. - f=$(echo $FILE | sed 's/\./_/g' -e 's/-/_/g') - echo "const char $f[] = R\"LITERAL(" > $DIR/$FILE.hpp - cat $FILE >> $DIR/$FILE.hpp - echo ")LITERAL\";" >> $DIR/$FILE.hpp - echo "unsigned int ${f}_len = sizeof($f);" >> $DIR/$FILE.hpp - - #Deprecated old xxd - #xxd -i $FILE > $DIR/$FILE.hpp + # use simple flag for old version of xxd + xxd -i $FILE > $DIR/$FILE.hpp done diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index 603d12068..20551520e 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -1,1038 +1,2791 @@ -const char index_html[] = R"LITERAL( - - - - - - - llama.cpp - chat - - - - - - - -
    - -
    -
    - - - - -)LITERAL"; -unsigned int index_html_len = sizeof(index_html); +unsigned char index_html[] = { + 0x3c, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a, 0x3c, 0x68, 0x65, 0x61, + 0x64, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x63, + 0x68, 0x61, 0x72, 0x73, 0x65, 0x74, 0x3d, 0x22, 0x55, 0x54, 0x46, 0x2d, + 0x38, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x76, 0x69, 0x65, 0x77, 0x70, 0x6f, + 0x72, 0x74, 0x22, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3d, + 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, 0x3d, 0x64, 0x65, 0x76, 0x69, 0x63, + 0x65, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, 0x2c, 0x20, 0x69, 0x6e, 0x69, + 0x74, 0x69, 0x61, 0x6c, 0x2d, 0x73, 0x63, 0x61, 0x6c, 0x65, 0x3d, 0x31, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x69, 0x6d, 0x75, 0x6d, 0x2d, 0x73, 0x63, + 0x61, 0x6c, 0x65, 0x3d, 0x31, 0x22, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x3c, 0x6d, 0x65, 0x74, 0x61, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, + 0x22, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3d, 0x22, 0x6c, + 0x69, 0x67, 0x68, 0x74, 0x20, 0x64, 0x61, 0x72, 0x6b, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x3c, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3e, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x2d, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x3c, 0x2f, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x3c, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x6f, 0x64, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, + 0x79, 0x3a, 0x20, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x2d, 0x75, 0x69, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, + 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x39, 0x30, 0x25, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x23, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x65, 0x6d, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, + 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, 0x72, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, 0x6f, 0x6c, 0x75, 0x6d, + 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6a, 0x75, 0x73, + 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x3a, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2d, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x33, 0x70, 0x78, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, + 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6a, + 0x75, 0x73, 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x3a, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2d, 0x62, 0x65, + 0x74, 0x77, 0x65, 0x65, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, 0x65, 0x6d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x67, + 0x72, 0x6f, 0x77, 0x3a, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2d, 0x79, + 0x3a, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x31, + 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, 0x23, 0x63, 0x63, + 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, + 0x64, 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, + 0x35, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x62, 0x6f, 0x64, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x61, 0x78, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x3a, 0x20, 0x36, 0x30, 0x30, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x2d, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x3a, 0x20, 0x33, 0x30, 0x30, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x68, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x3a, 0x20, 0x31, 0x2e, 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x20, + 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x70, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x66, 0x6c, 0x6f, 0x77, + 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3a, 0x20, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x2d, 0x77, 0x6f, 0x72, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x6f, 0x72, 0x64, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3a, 0x20, + 0x62, 0x72, 0x65, 0x61, 0x6b, 0x2d, 0x77, 0x6f, 0x72, 0x64, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x79, 0x70, 0x68, 0x65, 0x6e, + 0x73, 0x3a, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x2d, 0x74, 0x6f, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x2d, 0x62, + 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x23, 0x77, 0x72, 0x69, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x72, 0x6d, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, + 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x20, 0x30, + 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, + 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, + 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x63, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x69, 0x67, 0x6e, + 0x2d, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x3a, 0x20, 0x73, 0x74, 0x72, 0x65, + 0x74, 0x63, 0x68, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x69, 0x67, 0x68, 0x74, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x64, 0x69, 0x72, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x72, 0x6f, 0x77, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6a, 0x75, 0x73, 0x74, 0x69, 0x66, 0x79, 0x2d, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x65, + 0x6e, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, + 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, + 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x2e, 0x74, 0x77, 0x6f, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x3a, 0x20, 0x67, 0x72, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x67, 0x72, 0x69, 0x64, 0x2d, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x61, 0x20, 0x61, 0x22, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, + 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x2e, + 0x74, 0x68, 0x72, 0x65, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x67, + 0x72, 0x69, 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, + 0x72, 0x69, 0x64, 0x2d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x3a, 0x20, 0x22, 0x61, 0x20, 0x61, 0x20, 0x61, 0x22, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x61, 0x70, 0x3a, 0x20, 0x31, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, + 0x3a, 0x20, 0x31, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, + 0x23, 0x61, 0x61, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x62, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, + 0x73, 0x3a, 0x20, 0x34, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, + 0x69, 0x6e, 0x2d, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x77, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x62, 0x6f, 0x6c, 0x64, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, + 0x6e, 0x3a, 0x20, 0x2d, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x2d, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x75, 0x72, 0x73, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x6f, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x5b, + 0x6f, 0x70, 0x65, 0x6e, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, + 0x65, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, 0x33, 0x65, + 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, + 0x64, 0x65, 0x72, 0x2d, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x3a, 0x20, + 0x31, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, 0x23, 0x63, + 0x63, 0x63, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x3a, 0x20, 0x61, 0x62, 0x73, 0x6f, 0x6c, 0x75, 0x74, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, + 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, + 0x77, 0x68, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x30, 0x2e, + 0x32, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, + 0x6f, 0x78, 0x2d, 0x73, 0x68, 0x61, 0x64, 0x6f, 0x77, 0x3a, 0x20, 0x30, + 0x20, 0x30, 0x20, 0x31, 0x30, 0x70, 0x78, 0x20, 0x72, 0x67, 0x62, 0x61, + 0x28, 0x30, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x30, 0x2c, 0x20, 0x30, 0x2e, + 0x31, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, + 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x35, 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x6c, 0x65, 0x78, 0x2d, 0x67, 0x72, 0x6f, + 0x77, 0x3a, 0x20, 0x31, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x77, 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x72, 0x65, 0x20, 0x63, 0x6f, 0x64, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, + 0x3a, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, + 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x32, 0x32, + 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x64, 0x64, 0x64, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x64, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x3a, 0x20, 0x6d, + 0x6f, 0x6e, 0x6f, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, + 0x20, 0x30, 0x2e, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x33, 0x65, 0x6d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, 0x33, + 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2e, 0x73, 0x6c, 0x69, 0x6d, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2d, 0x61, 0x6c, 0x69, 0x67, 0x6e, + 0x3a, 0x20, 0x63, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6f, + 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x38, + 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x38, 0x38, 0x38, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, + 0x6f, 0x64, 0x65, 0x2d, 0x63, 0x68, 0x61, 0x74, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x34, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, + 0x31, 0x30, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x5b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x65, 0x64, 0x69, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x5d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x2d, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x77, 0x68, 0x69, 0x74, 0x65, 0x2d, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3a, + 0x20, 0x70, 0x72, 0x65, 0x2d, 0x77, 0x72, 0x61, 0x70, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x75, 0x74, 0x6c, 0x69, 0x6e, 0x65, + 0x3a, 0x20, 0x30, 0x70, 0x78, 0x20, 0x73, 0x6f, 0x6c, 0x69, 0x64, 0x20, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x20, 0x6c, + 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, + 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x30, + 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, + 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, + 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, + 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, + 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, + 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, + 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, + 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, + 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, + 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, + 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, + 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, + 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, + 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, + 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, + 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, + 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, + 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, + 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, + 0x0a, 0x20, 0x20, 0x3c, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x2c, 0x20, 0x68, 0x2c, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x2c, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x2c, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x2c, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x69, 0x6e, 0x64, + 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, + 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x2e, 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x76, 0x61, 0x72, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, + 0x3d, 0x20, 0x2d, 0x31, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, + 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x55, 0x73, + 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, 0x74, 0x2e, 0x20, 0x4c, 0x6c, + 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, + 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, 0x6e, 0x64, 0x2c, 0x20, 0x68, 0x6f, + 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, 0x67, 0x6f, 0x6f, 0x64, 0x20, 0x61, + 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, 0x69, 0x6e, 0x67, 0x2c, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, 0x65, 0x72, 0x20, 0x66, 0x61, 0x69, + 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, + 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x74, 0x65, 0x6c, + 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x77, 0x69, 0x74, 0x68, 0x20, 0x70, + 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, + 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, + 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, + 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, + 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, + 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, + 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, + 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, + 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, + 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x39, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, + 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, + 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, + 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, + 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, + 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, + 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, + 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, + 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, + 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, + 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x74, 0x72, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, + 0x79, 0x3a, 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, + 0x52, 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, + 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, + 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, + 0x3d, 0x20, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, + 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, + 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, + 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, + 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, + 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, + 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, + 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, + 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, + 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, + 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, + 0x6d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, + 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, + 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, + 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, + 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, + 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, + 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x28, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, + 0x74, 0x27, 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, + 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, + 0x69, 0x66, 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, + 0x20, 0x61, 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x69, 0x6e, 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, + 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x73, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x6c, 0x79, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, + 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x6f, 0x76, 0x65, + 0x72, 0x72, 0x69, 0x64, 0x65, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, + 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, + 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x6e, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x64, 0x65, 0x74, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x61, 0x76, 0x69, + 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x22, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3a, 0x20, 0x7b, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x2c, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, + 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5b, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x5d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, + 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, + 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, + 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, + 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, + 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, + 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, + 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x4e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, + 0x73, 0x6f, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, + 0x6f, 0x6d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, + 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x20, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, + 0x2e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, + 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x77, 0x65, 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, + 0x6e, 0x74, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, + 0x76, 0x65, 0x72, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, + 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x20, 0x61, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, + 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, + 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, + 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, + 0x6f, 0x67, 0x28, 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, + 0x73, 0x20, 0x27, 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x61, 0x76, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, + 0x74, 0x27, 0x2c, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, + 0x64, 0x20, 0x69, 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, + 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, + 0x64, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, + 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, + 0x27, 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, + 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, + 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, + 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, + 0x67, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, + 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x28, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x20, 0x45, 0x4e, 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, + 0x67, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, + 0x20, 0x62, 0x72, 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, + 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, + 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, + 0x73, 0x20, 0x74, 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x20, 0x3e, 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x73, 0x74, 0x72, 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, + 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, + 0x2c, 0x20, 0x2e, 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x28, 0x73, 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x41, 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, + 0x28, 0x5f, 0x2c, 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, + 0x61, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, + 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, + 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, + 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, + 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, + 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, + 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, + 0x68, 0x65, 0x64, 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, + 0x27, 0x2c, 0x20, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, + 0x22, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, + 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, + 0x64, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, + 0x64, 0x61, 0x6c, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, + 0x28, 0x22, 0x54, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x20, 0x77, 0x61, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, + 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, + 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, + 0x74, 0x20, 0x62, 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, + 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, + 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, + 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, + 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, + 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, + 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, + 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, + 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, 0x75, 0x72, 0x69, 0x6f, 0x75, + 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x69, 0x61, + 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, 0x6c, 0x69, 0x67, 0x65, 0x6e, + 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, + 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, 0x73, 0x74, + 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, 0x65, 0x73, 0x20, 0x68, 0x65, + 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x6f, 0x6c, + 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x73, 0x20, + 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, + 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, 0x3a, 0x5b, 0x69, 0x6d, 0x67, + 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x7d, 0x5c, 0x6e, + 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, 0x4e, 0x54, 0x3a, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, + 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x22, 0x3c, + 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, + 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x3a, + 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x22, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, + 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, + 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, + 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, + 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x2e, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x6c, 0x79, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x6d, 0x61, 0x70, + 0x28, 0x28, 0x5b, 0x5f, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, + 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, + 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, + 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, + 0x29, 0x20, 0x3a, 0x20, 0x64, 0x61, 0x74, 0x61, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, + 0x27, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, + 0x6f, 0x70, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, + 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, + 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, + 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, + 0x6e, 0x65, 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, + 0x2c, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, + 0x3d, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, + 0x6c, 0x65, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x61, 0x64, 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, + 0x3d, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, + 0x62, 0x61, 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, + 0x29, 0x2c, 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, + 0x44, 0x61, 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, + 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, + 0x20, 0x26, 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x68, 0x69, 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, + 0x6d, 0x69, 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, + 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, + 0x22, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6f, 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, + 0x24, 0x7b, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x73, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, + 0x73, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, + 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, + 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, + 0x65, 0x7d, 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, + 0x61, 0x67, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, 0x3c, + 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, + 0x3d, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, + 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x75, + 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, + 0x3e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, + 0x7d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, + 0x7b, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x74, 0x6f, 0x70, + 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, + 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, + 0x24, 0x7b, 0x72, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, + 0x65, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, + 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x6f, 0x74, + 0x74, 0x6f, 0x6d, 0x20, 0x28, 0x69, 0x66, 0x20, 0x6e, 0x65, 0x65, 0x64, + 0x65, 0x64, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x70, 0x61, + 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x48, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, 0x3c, 0x3d, 0x20, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, + 0x70, 0x20, 0x2b, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x6f, + 0x66, 0x66, 0x73, 0x65, 0x74, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x20, + 0x2b, 0x20, 0x33, 0x30, 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x54, 0x6f, 0x28, 0x30, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x63, 0x72, + 0x6f, 0x6c, 0x6c, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x73, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x20, 0x3d, + 0x20, 0x28, 0x5b, 0x75, 0x73, 0x65, 0x72, 0x2c, 0x20, 0x64, 0x61, 0x74, + 0x61, 0x5d, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, + 0x61, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, + 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x24, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x20, 0x3d, 0x20, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, + 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2b, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x20, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x4d, 0x61, + 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x7d, 0x20, 0x74, + 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, + 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, + 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x69, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x4d, 0x6f, 0x64, 0x65, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, + 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x60, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, + 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, + 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, + 0x2f, 0x70, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x54, 0x65, 0x78, 0x74, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x5b, + 0x5d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6b, + 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x7d, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6d, + 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x77, 0x69, 0x64, + 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, 0x7b, 0x21, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x60, 0x20, 0x3a, + 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, + 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x65, 0x64, 0x69, + 0x74, 0x61, 0x62, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x69, 0x73, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x6f, 0x64, 0x65, + 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, + 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x64, 0x69, + 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, + 0x68, 0x61, 0x74, 0x4c, 0x69, 0x6e, 0x65, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, + 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, + 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x29, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, + 0x6c, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, + 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2c, 0x20, 0x5b, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3a, 0x20, 0x4d, 0x61, + 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x28, 0x65, 0x6c, 0x2e, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, + 0x29, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, + 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, + 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, + 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x6c, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, + 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, + 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x3d, 0x20, 0x65, 0x6c, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, + 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, + 0x77, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x74, + 0x28, 0x27, 0x2c, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, + 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, + 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x20, + 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x5b, 0x63, 0x75, 0x72, + 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, 0x3a, 0x20, 0x69, 0x20, + 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, + 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, 0x43, 0x6f, 0x6e, + 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x3a, + 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, + 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, + 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, + 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, 0x70, + 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, + 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, + 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, + 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, + 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, + 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, + 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, + 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, + 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, + 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, + 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, + 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, + 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, + 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, + 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, + 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, + 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, + 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, + 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, + 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, + 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, + 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, + 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, + 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, + 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, + 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, + 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, + 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, + 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, + 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, + 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, + 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, + 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, + 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, + 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, + 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, + 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, + 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, + 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, 0x6e, 0x61, 0x6c, 0x69, + 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x20, 0x73, 0x65, + 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, + 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, + 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x6c, + 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, + 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, + 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x4d, 0x69, 0x6e, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, + 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x6e, 0x5f, + 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6d, 0x69, 0x6e, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, + 0x66, 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, + 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, + 0x20, 0x22, 0x54, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, + 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, + 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, + 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, + 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, + 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, + 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, + 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, + 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, + 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, + 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, + 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, + 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, + 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, + 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, + 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, + 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, + 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x74, 0x61, 0x75, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, + 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, + 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, + 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, + 0x74, 0x61, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, + 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x73, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, + 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x61, 0x70, 0x69, + 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x3e, 0x41, 0x50, 0x49, 0x20, 0x4b, 0x65, + 0x79, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, + 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x22, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x70, 0x69, 0x5f, + 0x6b, 0x65, 0x79, 0x7d, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, + 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x45, 0x6e, 0x74, 0x65, 0x72, + 0x20, 0x41, 0x50, 0x49, 0x20, 0x6b, 0x65, 0x79, 0x22, 0x20, 0x6f, 0x6e, + 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x72, 0x6d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, + 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x28, 0x31, 0x20, + 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, 0x3d, 0x20, 0x4d, + 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, + 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x60, 0x72, 0x67, + 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, 0x7b, 0x67, 0x7d, + 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, + 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3d, + 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, + 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x31, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x62, + 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, + 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, 0x28, 0x27, 0x62, + 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, 0x29, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, + 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, + 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x3d, 0x3e, 0x20, + 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3a, 0x20, + 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, + 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3d, 0x24, 0x7b, + 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2c, + 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x3d, + 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, + 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x66, 0x69, 0x6e, + 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, + 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x66, 0x6f, 0x75, + 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, 0x70, 0x72, 0x6f, + 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, + 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x62, + 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, + 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, 0x2c, 0x20, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, + 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, + 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, + 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, + 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, + 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, + 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, + 0x3e, 0x24, 0x7b, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, + 0x7d, 0x3a, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, + 0x7b, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, + 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x20, 0x2a, 0x20, 0x31, 0x30, 0x30, + 0x29, 0x7d, 0x25, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, + 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, + 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, + 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, + 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, + 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, + 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, + 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, + 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, + 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, + 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, + 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, + 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, + 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, + 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, + 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, + 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, + 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, + 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, + 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, + 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, + 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, + 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, + 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, + 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, + 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, + 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, + 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, + 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, + 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, + 0x64, 0x7d, 0x20, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, + 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x5f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x7d, 0x20, + 0x63, 0x61, 0x63, 0x68, 0x65, 0x64, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x2e, 0x70, + 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, 0x70, + 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, 0x7b, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, + 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, + 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, + 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, + 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, + 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, + 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, + 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, + 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, + 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, + 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, + 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, + 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, + 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, + 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, + 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, + 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, + 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, + 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, + 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, + 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, + 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, + 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, + 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, + 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, + 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, + 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, + 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, + 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, + 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, + 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, + 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, + 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, + 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, + 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, + 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, + 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, + 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, + 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, + 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, + 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, + 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, + 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, + 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, + 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, + 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, + 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, + 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, + 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, + 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, + 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, + 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, + 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, + 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, + 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, + 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, + 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, + 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, + 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, + 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, + 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, + 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, + 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, + 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, + 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, + 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, + 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, + 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, + 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, + 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, + 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, + 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, + 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, + 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, + 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, + 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, + 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, + 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, + 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, + 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, + 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, + 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, + 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, + 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, + 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, + 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, + 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, + 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, + 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, + 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, + 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, + 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, + 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, + 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, + 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, + 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, + 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x22, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, 0x74, 0x3d, 0x22, 0x69, 0x6d, + 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, + 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, + 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, + 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, + 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a, 0x0a +}; +unsigned int index_html_len = 33456; diff --git a/examples/server/index.js.hpp b/examples/server/index.js.hpp index 647abe116..e09b3c8c5 100644 --- a/examples/server/index.js.hpp +++ b/examples/server/index.js.hpp @@ -1,4 +1,1903 @@ -const char index_js[] = R"LITERAL( -function t(){throw new Error("Cycle detected")}const n=Symbol.for("preact-signals");function e(){if(f>1){f--;return}let t,n=!1;while(void 0!==o){let _=o;o=void 0;s++;while(void 0!==_){const i=_.o;_.o=void 0;_.f&=-3;if(!(8&_.f)&&p(_))try{_.c()}catch(e){if(!n){t=e;n=!0}}_=i}}s=0;f--;if(n)throw t}function _(t){if(f>0)return t();f++;try{return t()}finally{e()}}let i,o,r=0;function u(t){if(r>0)return t();const n=i;i=void 0;r++;try{return t()}finally{r--;i=n}}let f=0,s=0,l=0;function c(t){if(void 0===i)return;let n=t.n;if(void 0===n||n.t!==i){n={i:0,S:t,p:i.s,n:void 0,t:i,e:void 0,x:void 0,r:n};if(void 0!==i.s)i.s.n=n;i.s=n;t.n=n;if(32&i.f)t.S(n);return n}else if(-1===n.i){n.i=0;if(void 0!==n.n){n.n.p=n.p;if(void 0!==n.p)n.p.n=n.n;n.p=i.s;n.n=void 0;i.s.n=n;i.s=n}return n}}function h(t){this.v=t;this.i=0;this.n=void 0;this.t=void 0}h.prototype.brand=n;h.prototype.h=function(){return!0};h.prototype.S=function(t){if(this.t!==t&&void 0===t.e){t.x=this.t;if(void 0!==this.t)this.t.e=t;this.t=t}};h.prototype.U=function(t){if(void 0!==this.t){const n=t.e,e=t.x;if(void 0!==n){n.x=e;t.e=void 0}if(void 0!==e){e.e=n;t.x=void 0}if(t===this.t)this.t=e}};h.prototype.subscribe=function(t){const n=this;return w((function(){const e=n.value,_=32&this.f;this.f&=-33;try{t(e)}finally{this.f|=_}}))};h.prototype.valueOf=function(){return this.value};h.prototype.toString=function(){return this.value+""};h.prototype.toJSON=function(){return this.value};h.prototype.peek=function(){return this.v};Object.defineProperty(h.prototype,"value",{get(){const t=c(this);if(void 0!==t)t.i=this.i;return this.v},set(n){if(i instanceof y)!function(){throw new Error("Computed cannot have side-effects")}();if(n!==this.v){if(s>100)t();this.v=n;this.i++;l++;f++;try{for(let t=this.t;void 0!==t;t=t.x)t.t.N()}finally{e()}}}});function a(t){return new h(t)}function p(t){for(let n=t.s;void 0!==n;n=n.n)if(n.S.i!==n.i||!n.S.h()||n.S.i!==n.i)return!0;return!1}function d(t){for(let n=t.s;void 0!==n;n=n.n){const e=n.S.n;if(void 0!==e)n.r=e;n.S.n=n;n.i=-1;if(void 0===n.n){t.s=n;break}}}function v(t){let n,e=t.s;while(void 0!==e){const t=e.p;if(-1===e.i){e.S.U(e);if(void 0!==t)t.n=e.n;if(void 0!==e.n)e.n.p=t}else n=e;e.S.n=e.r;if(void 0!==e.r)e.r=void 0;e=t}t.s=n}function y(t){h.call(this,void 0);this.x=t;this.s=void 0;this.g=l-1;this.f=4}(y.prototype=new h).h=function(){this.f&=-3;if(1&this.f)return!1;if(32==(36&this.f))return!0;this.f&=-5;if(this.g===l)return!0;this.g=l;this.f|=1;if(this.i>0&&!p(this)){this.f&=-2;return!0}const t=i;try{d(this);i=this;const t=this.x();if(16&this.f||this.v!==t||0===this.i){this.v=t;this.f&=-17;this.i++}}catch(t){this.v=t;this.f|=16;this.i++}i=t;v(this);this.f&=-2;return!0};y.prototype.S=function(t){if(void 0===this.t){this.f|=36;for(let t=this.s;void 0!==t;t=t.n)t.S.S(t)}h.prototype.S.call(this,t)};y.prototype.U=function(t){if(void 0!==this.t){h.prototype.U.call(this,t);if(void 0===this.t){this.f&=-33;for(let t=this.s;void 0!==t;t=t.n)t.S.U(t)}}};y.prototype.N=function(){if(!(2&this.f)){this.f|=6;for(let t=this.t;void 0!==t;t=t.x)t.t.N()}};y.prototype.peek=function(){if(!this.h())t();if(16&this.f)throw this.v;return this.v};Object.defineProperty(y.prototype,"value",{get(){if(1&this.f)t();const n=c(this);this.h();if(void 0!==n)n.i=this.i;if(16&this.f)throw this.v;return this.v}});function m(t){return new y(t)}function g(t){const n=t.u;t.u=void 0;if("function"==typeof n){f++;const _=i;i=void 0;try{n()}catch(n){t.f&=-2;t.f|=8;b(t);throw n}finally{i=_;e()}}}function b(t){for(let n=t.s;void 0!==n;n=n.n)n.S.U(n);t.x=void 0;t.s=void 0;g(t)}function k(t){if(i!==this)throw new Error("Out-of-order effect");v(this);i=t;this.f&=-2;if(8&this.f)b(this);e()}function S(t){this.x=t;this.u=void 0;this.s=void 0;this.o=void 0;this.f=32}S.prototype.c=function(){const t=this.S();try{if(8&this.f)return;if(void 0===this.x)return;const n=this.x();if("function"==typeof n)this.u=n}finally{t()}};S.prototype.S=function(){if(1&this.f)t();this.f|=1;this.f&=-9;g(this);d(this);f++;const n=i;i=this;return k.bind(this,n)};S.prototype.N=function(){if(!(2&this.f)){this.f|=2;this.o=o;o=this}};S.prototype.d=function(){this.f|=8;if(!(1&this.f))b(this)};function w(t){const n=new S(t);try{n.c()}catch(t){n.d();throw t}return n.d.bind(n)}var x,C,E,U,H,P,N,$,D,T={},V=[],A=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i,F=Array.isArray;function M(t,n){for(var e in n)t[e]=n[e];return t}function W(t){var n=t.parentNode;n&&n.removeChild(t)}function L(t,n,e){var _,i,o,r={};for(o in n)"key"==o?_=n[o]:"ref"==o?i=n[o]:r[o]=n[o];if(arguments.length>2&&(r.children=arguments.length>3?x.call(arguments,2):e),"function"==typeof t&&null!=t.defaultProps)for(o in t.defaultProps)void 0===r[o]&&(r[o]=t.defaultProps[o]);return O(t,r,_,i,null)}function O(t,n,e,_,i){var o={type:t,props:n,key:e,ref:_,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,constructor:void 0,__v:null==i?++E:i,__i:-1,__u:0};return null==i&&null!=C.vnode&&C.vnode(o),o}function R(){return{current:null}}function j(t){return t.children}function I(t,n){this.props=t,this.context=n}function q(t,n){if(null==n)return t.__?q(t.__,t.__i+1):null;for(var e;nn&&H.sort($));z.__r=0}function J(t,n,e,_,i,o,r,u,f,s,l){var c,h,a,p,d,v=_&&_.__k||V,y=n.length;for(e.__d=f,K(e,n,v),f=e.__d,c=0;c0?O(i.type,i.props,i.key,i.ref?i.ref:null,i.__v):i)?(i.__=t,i.__b=t.__b+1,u=Y(i,e,r=_+c,l),i.__i=u,o=null,-1!==u&&(l--,(o=e[u])&&(o.__u|=131072)),null==o||null===o.__v?(-1==u&&c--,"function"!=typeof i.type&&(i.__u|=65536)):u!==r&&(u===r+1?c++:u>r?l>f-r?c+=u-r:c--:c=u(null!=f&&0==(131072&f.__u)?1:0))for(;r>=0||u=0){if((f=n[r])&&0==(131072&f.__u)&&i==f.key&&o===f.type)return r;r--}if(u2&&(u.children=arguments.length>3?x.call(arguments,2):e),O(t.type,u,_||t.key,i||t.ref,null)}function ht(t,n){var e={__c:n="__cC"+D++,__:t,Consumer:function(t,n){return t.children(n)},Provider:function(t){var e,_;return this.getChildContext||(e=[],(_={})[n]=this,this.getChildContext=function(){return _},this.shouldComponentUpdate=function(t){this.props.value!==t.value&&e.some((function(t){t.__e=!0,G(t)}))},this.sub=function(t){e.push(t);var n=t.componentWillUnmount;t.componentWillUnmount=function(){e.splice(e.indexOf(t),1),n&&n.call(t)}}),t.children}};return e.Provider.__=e.Consumer.contextType=e}x=V.slice,C={__e:function(t,n,e,_){for(var i,o,r;n=n.__;)if((i=n.__c)&&!i.__)try{if((o=i.constructor)&&null!=o.getDerivedStateFromError&&(i.setState(o.getDerivedStateFromError(t)),r=i.__d),null!=i.componentDidCatch&&(i.componentDidCatch(t,_||{}),r=i.__d),r)return i.__E=i}catch(n){t=n}throw t}},E=0,U=function(t){return null!=t&&null==t.constructor},I.prototype.setState=function(t,n){var e;e=null!=this.__s&&this.__s!==this.state?this.__s:this.__s=M({},this.state),"function"==typeof t&&(t=t(M({},e),this.props)),t&&M(e,t),null!=t&&this.__v&&(n&&this._sb.push(n),G(this))},I.prototype.forceUpdate=function(t){this.__v&&(this.__e=!0,t&&this.__h.push(t),G(this))},I.prototype.render=j,H=[],N="function"==typeof Promise?Promise.prototype.then.bind(Promise.resolve()):setTimeout,$=function(t,n){return t.__v.__b-n.__v.__b},z.__r=0,D=0;var at,pt,dt,vt,yt=0,mt=[],gt=[],bt=C.__b,kt=C.__r,St=C.diffed,wt=C.__c,xt=C.unmount;function Ct(t,n){C.__h&&C.__h(pt,t,yt||n),yt=0;var e=pt.__H||(pt.__H={__:[],__h:[]});return t>=e.__.length&&e.__.push({__V:gt}),e.__[t]}function Et(t){return yt=1,Ut(qt,t)}function Ut(t,n,e){var _=Ct(at++,2);if(_.t=t,!_.__c&&(_.__=[e?e(n):qt(void 0,n),function(t){var n=_.__N?_.__N[0]:_.__[0],e=_.t(n,t);n!==e&&(_.__N=[e,_.__[1]],_.__c.setState({}))}],_.__c=pt,!pt.u)){var i=function(t,n,e){if(!_.__c.__H)return!0;var i=_.__c.__H.__.filter((function(t){return t.__c}));if(i.every((function(t){return!t.__N})))return!o||o.call(this,t,n,e);var r=!1;return i.forEach((function(t){if(t.__N){var n=t.__[0];t.__=t.__N,t.__N=void 0,n!==t.__[0]&&(r=!0)}})),!(!r&&_.__c.props===t)&&(!o||o.call(this,t,n,e))};pt.u=!0;var o=pt.shouldComponentUpdate,r=pt.componentWillUpdate;pt.componentWillUpdate=function(t,n,e){if(this.__e){var _=o;o=void 0,i(t,n,e),o=_}r&&r.call(this,t,n,e)},pt.shouldComponentUpdate=i}return _.__N||_.__}function Ht(t,n){var e=Ct(at++,3);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__H.__h.push(e))}function Pt(t,n){var e=Ct(at++,4);!C.__s&&It(e.__H,n)&&(e.__=t,e.i=n,pt.__h.push(e))}function Nt(t){return yt=5,Dt((function(){return{current:t}}),[])}function $t(t,n,e){yt=6,Pt((function(){return"function"==typeof t?(t(n()),function(){return t(null)}):t?(t.current=n(),function(){return t.current=null}):void 0}),null==e?e:e.concat(t))}function Dt(t,n){var e=Ct(at++,7);return It(e.__H,n)?(e.__V=t(),e.i=n,e.__h=t,e.__V):e.__}function Tt(t,n){return yt=8,Dt((function(){return t}),n)}function Vt(t){var n=pt.context[t.__c],e=Ct(at++,9);return e.c=t,n?(null==e.__&&(e.__=!0,n.sub(pt)),n.props.value):t.__}function At(t,n){C.useDebugValue&&C.useDebugValue(n?n(t):t)}function Ft(t){var n=Ct(at++,10),e=Et();return n.__=t,pt.componentDidCatch||(pt.componentDidCatch=function(t,_){n.__&&n.__(t,_),e[1](t)}),[e[0],function(){e[1](void 0)}]}function Mt(){var t=Ct(at++,11);if(!t.__){for(var n=pt.__v;null!==n&&!n.__m&&null!==n.__;)n=n.__;var e=n.__m||(n.__m=[0,0]);t.__="P"+e[0]+"-"+e[1]++}return t.__}function Wt(){for(var t;t=mt.shift();)if(t.__P&&t.__H)try{t.__H.__h.forEach(Rt),t.__H.__h.forEach(jt),t.__H.__h=[]}catch(u){t.__H.__h=[],C.__e(u,t.__v)}}C.__b=function(t){pt=null,bt&&bt(t)},C.__r=function(t){kt&&kt(t),at=0;var n=(pt=t.__c).__H;n&&(dt===pt?(n.__h=[],pt.__h=[],n.__.forEach((function(t){t.__N&&(t.__=t.__N),t.__V=gt,t.__N=t.i=void 0}))):(n.__h.forEach(Rt),n.__h.forEach(jt),n.__h=[],at=0)),dt=pt},C.diffed=function(t){St&&St(t);var n=t.__c;n&&n.__H&&(n.__H.__h.length&&(1!==mt.push(n)&&vt===C.requestAnimationFrame||((vt=C.requestAnimationFrame)||Ot)(Wt)),n.__H.__.forEach((function(t){t.i&&(t.__H=t.i),t.__V!==gt&&(t.__=t.__V),t.i=void 0,t.__V=gt}))),dt=pt=null},C.__c=function(t,n){n.some((function(t){try{t.__h.forEach(Rt),t.__h=t.__h.filter((function(t){return!t.__||jt(t)}))}catch(l){n.some((function(t){t.__h&&(t.__h=[])})),n=[],C.__e(l,t.__v)}})),wt&&wt(t,n)},C.unmount=function(t){xt&&xt(t);var n,e=t.__c;e&&e.__H&&(e.__H.__.forEach((function(t){try{Rt(t)}catch(t){n=t}})),e.__H=void 0,n&&C.__e(n,e.__v))};var Lt="function"==typeof requestAnimationFrame;function Ot(t){var n,e=function(){clearTimeout(_),Lt&&cancelAnimationFrame(n),setTimeout(t)},_=setTimeout(e,100);Lt&&(n=requestAnimationFrame(e))}function Rt(t){var n=pt,e=t.__c;"function"==typeof e&&(t.__c=void 0,e()),pt=n}function jt(t){var n=pt;t.__c=t.__(),pt=n}function It(t,n){return!t||t.length!==n.length||n.some((function(n,e){return n!==t[e]}))}function qt(t,n){return"function"==typeof n?n(t):n}function Bt(t,n){C[t]=n.bind(null,C[t]||(()=>{}))}let Gt,zt;function Jt(t){if(zt)zt();zt=t&&t.S()}function Kt({data:t}){const n=Xt(t);n.value=t;const e=Dt(()=>{let t=this.__v;while(t=t.__)if(t.__c){t.__c.__$f|=4;break}this.__$u.c=()=>{var t;if(!U(e.peek())&&3===(null==(t=this.base)?void 0:t.nodeType))this.base.data=e.peek();else{this.__$f|=1;this.setState({})}};return m(()=>{let t=n.value.value;return 0===t?0:!0===t?"":t||""})},[]);return e.value}Kt.displayName="_st";Object.defineProperties(h.prototype,{constructor:{configurable:!0,value:void 0},type:{configurable:!0,value:Kt},props:{configurable:!0,get(){return{data:this}}},__b:{configurable:!0,value:1}});Bt("__b",(t,n)=>{if("string"==typeof n.type){let t,e=n.props;for(let _ in e){if("children"===_)continue;let i=e[_];if(i instanceof h){if(!t)n.__np=t={};t[_]=i;e[_]=i.peek()}}}t(n)});Bt("__r",(t,n)=>{Jt();let e,_=n.__c;if(_){_.__$f&=-2;e=_.__$u;if(void 0===e)_.__$u=e=function(t){let n;w((function(){n=this}));n.c=()=>{_.__$f|=1;_.setState({})};return n}()}Gt=_;Jt(e);t(n)});Bt("__e",(t,n,e,_)=>{Jt();Gt=void 0;t(n,e,_)});Bt("diffed",(t,n)=>{Jt();Gt=void 0;let e;if("string"==typeof n.type&&(e=n.__e)){let t=n.__np,_=n.props;if(t){let n=e.U;if(n)for(let e in n){let _=n[e];if(void 0!==_&&!(e in t)){_.d();n[e]=void 0}}else{n={};e.U=n}for(let i in t){let o=n[i],r=t[i];if(void 0===o){o=Qt(e,i,r,_);n[i]=o}else o.o(r,_)}}}t(n)});function Qt(t,n,e,_){const i=n in t&&void 0===t.ownerSVGElement,o=a(e);return{o:(t,n)=>{o.value=t;_=n},d:w(()=>{const e=o.value.value;if(_[n]!==e){_[n]=e;if(i)t[n]=e;else if(e)t.setAttribute(n,e);else t.removeAttribute(n)}})}}Bt("unmount",(t,n)=>{if("string"==typeof n.type){let t=n.__e;if(t){const n=t.U;if(n){t.U=void 0;for(let t in n){let e=n[t];if(e)e.d()}}}}else{let t=n.__c;if(t){const n=t.__$u;if(n){t.__$u=void 0;n.d()}}}t(n)});Bt("__h",(t,n,e,_)=>{if(_<3||9===_)n.__$f|=2;t(n,e,_)});I.prototype.shouldComponentUpdate=function(t,n){const e=this.__$u;if(!(e&&void 0!==e.s||4&this.__$f))return!0;if(3&this.__$f)return!0;for(let _ in n)return!0;for(let _ in t)if("__source"!==_&&t[_]!==this.props[_])return!0;for(let _ in this.props)if(!(_ in t))return!0;return!1};function Xt(t){return Dt(()=>a(t),[])}function Yt(t){const n=Nt(t);n.current=t;Gt.__$f|=4;return Dt(()=>m(()=>n.current()),[])}function Zt(t){const n=Nt(t);n.current=t;Ht(()=>w(()=>n.current()),[])}var tn=function(t,n,e,_){var i;n[0]=0;for(var o=1;o=5&&((i||!t&&5===_)&&(r.push(_,0,i,e),_=6),t&&(r.push(_,t,0,e),_=6)),i=""},f=0;f"===n?(_=1,i=""):i=n+i[0]:o?n===o?o="":i+=n:'"'===n||"'"===n?o=n:">"===n?(u(),_=1):_&&("="===n?(_=5,e=i,i=""):"/"===n&&(_<5||">"===t[f][s+1])?(u(),3===_&&(r=r[0]),_=r,(r=r[0]).push(2,0,_),_=0):" "===n||"\t"===n||"\n"===n||"\r"===n?(u(),_=2):i+=n),3===_&&"!--"===i&&(_=4,r=r[0])}return u(),r}(t)),n),arguments,[])).length>1?n:n[0]}var _n=en.bind(L);export{I as Component,j as Fragment,h as Signal,_ as batch,ct as cloneElement,m as computed,ht as createContext,L as createElement,R as createRef,w as effect,L as h,_n as html,lt as hydrate,U as isValidElement,C as options,st as render,a as signal,X as toChildArray,u as untracked,Tt as useCallback,Yt as useComputed,Vt as useContext,At as useDebugValue,Ht as useEffect,Ft as useErrorBoundary,Mt as useId,$t as useImperativeHandle,Pt as useLayoutEffect,Dt as useMemo,Ut as useReducer,Nt as useRef,Xt as useSignal,Zt as useSignalEffect,Et as useState}; -)LITERAL"; -unsigned int index_js_len = sizeof(index_js); +unsigned char index_js[] = { + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x28, 0x29, + 0x7b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x43, 0x79, 0x63, 0x6c, 0x65, 0x20, + 0x64, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x29, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x53, 0x79, 0x6d, 0x62, 0x6f, + 0x6c, 0x2e, 0x66, 0x6f, 0x72, 0x28, 0x22, 0x70, 0x72, 0x65, 0x61, 0x63, + 0x74, 0x2d, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x73, 0x22, 0x29, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x66, 0x3e, 0x31, 0x29, 0x7b, 0x66, 0x2d, 0x2d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7d, 0x6c, 0x65, 0x74, 0x20, + 0x74, 0x2c, 0x6e, 0x3d, 0x21, 0x31, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6f, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x73, 0x2b, 0x2b, 0x3b, 0x77, 0x68, + 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x3d, + 0x5f, 0x2e, 0x6f, 0x3b, 0x5f, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x5f, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, + 0x66, 0x28, 0x21, 0x28, 0x38, 0x26, 0x5f, 0x2e, 0x66, 0x29, 0x26, 0x26, + 0x70, 0x28, 0x5f, 0x29, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x65, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x21, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x65, 0x3b, 0x6e, + 0x3d, 0x21, 0x30, 0x7d, 0x7d, 0x5f, 0x3d, 0x69, 0x7d, 0x7d, 0x73, 0x3d, + 0x30, 0x3b, 0x66, 0x2d, 0x2d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x66, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x28, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x6c, + 0x65, 0x74, 0x20, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x30, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x28, 0x74, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x30, 0x29, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x72, 0x2b, 0x2b, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x29, 0x7d, 0x66, 0x69, 0x6e, + 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x72, 0x2d, 0x2d, 0x3b, 0x69, 0x3d, 0x6e, + 0x7d, 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x66, 0x3d, 0x30, 0x2c, 0x73, 0x3d, + 0x30, 0x2c, 0x6c, 0x3d, 0x30, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x63, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x6e, 0x2e, 0x74, 0x21, 0x3d, 0x3d, + 0x69, 0x29, 0x7b, 0x6e, 0x3d, 0x7b, 0x69, 0x3a, 0x30, 0x2c, 0x53, 0x3a, + 0x74, 0x2c, 0x70, 0x3a, 0x69, 0x2e, 0x73, 0x2c, 0x6e, 0x3a, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x3a, 0x69, 0x2c, 0x65, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x78, 0x3a, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x2c, 0x72, 0x3a, 0x6e, 0x7d, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x69, 0x2e, 0x73, 0x29, + 0x69, 0x2e, 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, + 0x6e, 0x3b, 0x74, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x33, + 0x32, 0x26, 0x69, 0x2e, 0x66, 0x29, 0x74, 0x2e, 0x53, 0x28, 0x6e, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x3d, 0x6e, + 0x2e, 0x69, 0x29, 0x7b, 0x6e, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x69, 0x66, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, + 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x6e, 0x2e, 0x70, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x6e, 0x2e, 0x70, 0x29, 0x6e, 0x2e, 0x70, 0x2e, 0x6e, 0x3d, 0x6e, + 0x2e, 0x6e, 0x3b, 0x6e, 0x2e, 0x70, 0x3d, 0x69, 0x2e, 0x73, 0x3b, 0x6e, + 0x2e, 0x6e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x2e, + 0x73, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x69, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3d, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6e, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x74, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x68, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x62, 0x72, + 0x61, 0x6e, 0x64, 0x3d, 0x6e, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x68, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x26, 0x26, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x65, 0x29, 0x7b, + 0x74, 0x2e, 0x78, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x2e, 0x65, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, + 0x74, 0x7d, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x65, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x78, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x7b, 0x6e, + 0x2e, 0x78, 0x3d, 0x65, 0x3b, 0x74, 0x2e, 0x65, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x65, 0x2e, 0x65, 0x3d, 0x6e, + 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, + 0x69, 0x66, 0x28, 0x74, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3d, 0x65, 0x7d, 0x7d, + 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x77, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x5f, 0x3d, 0x33, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x28, 0x65, 0x29, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x5f, 0x7d, 0x7d, 0x29, 0x29, 0x7d, 0x3b, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x4f, 0x66, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, + 0x6f, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2b, 0x22, 0x22, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x6f, 0x4a, 0x53, 0x4f, 0x4e, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3b, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, + 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, + 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x69, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x2c, 0x73, 0x65, + 0x74, 0x28, 0x6e, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, + 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x6f, 0x66, 0x20, 0x79, 0x29, 0x21, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, + 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x20, 0x68, 0x61, 0x76, 0x65, + 0x20, 0x73, 0x69, 0x64, 0x65, 0x2d, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x73, 0x22, 0x29, 0x7d, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x73, 0x3e, 0x31, 0x30, 0x30, 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x6e, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x3b, 0x6c, 0x2b, 0x2b, 0x3b, 0x66, 0x2b, 0x2b, + 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, + 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x66, + 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x65, 0x28, 0x29, 0x7d, 0x7d, + 0x7d, 0x7d, 0x29, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x61, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x70, 0x28, 0x74, 0x29, 0x7b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, + 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x2e, + 0x53, 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x7c, 0x7c, 0x21, + 0x6e, 0x2e, 0x53, 0x2e, 0x68, 0x28, 0x29, 0x7c, 0x7c, 0x6e, 0x2e, 0x53, + 0x2e, 0x69, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x69, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x31, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x64, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, + 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x53, + 0x2e, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x65, 0x29, 0x6e, 0x2e, 0x72, 0x3d, 0x65, 0x3b, 0x6e, + 0x2e, 0x53, 0x2e, 0x6e, 0x3d, 0x6e, 0x3b, 0x6e, 0x2e, 0x69, 0x3d, 0x2d, + 0x31, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x73, 0x3d, 0x6e, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x76, 0x28, 0x74, 0x29, 0x7b, 0x6c, + 0x65, 0x74, 0x20, 0x6e, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x73, 0x3b, 0x77, + 0x68, 0x69, 0x6c, 0x65, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, + 0x3d, 0x3d, 0x65, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, + 0x3d, 0x65, 0x2e, 0x70, 0x3b, 0x69, 0x66, 0x28, 0x2d, 0x31, 0x3d, 0x3d, + 0x3d, 0x65, 0x2e, 0x69, 0x29, 0x7b, 0x65, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x65, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x29, 0x74, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x6e, + 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x65, 0x2e, 0x6e, 0x29, 0x65, 0x2e, 0x6e, 0x2e, 0x70, 0x3d, 0x74, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x6e, 0x3d, 0x65, 0x3b, 0x65, 0x2e, + 0x53, 0x2e, 0x6e, 0x3d, 0x65, 0x2e, 0x72, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x72, 0x29, + 0x65, 0x2e, 0x72, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x65, + 0x3d, 0x74, 0x7d, 0x74, 0x2e, 0x73, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x79, 0x28, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, + 0x3d, 0x6c, 0x2d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, + 0x34, 0x7d, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x68, 0x29, 0x2e, 0x68, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x3b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x33, 0x32, + 0x3d, 0x3d, 0x28, 0x33, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x35, 0x3b, 0x69, 0x66, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x3d, 0x3d, 0x3d, 0x6c, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x67, 0x3d, 0x6c, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x7c, 0x3d, 0x31, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x69, 0x3e, 0x30, 0x26, 0x26, 0x21, 0x70, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x7d, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x69, 0x3b, 0x74, 0x72, 0x79, + 0x7b, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, + 0x68, 0x69, 0x73, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, + 0x31, 0x36, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x7c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x21, 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x30, + 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x29, 0x7b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x31, 0x37, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3d, 0x74, 0x3b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x31, 0x36, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x2b, 0x2b, 0x7d, 0x69, 0x3d, 0x74, 0x3b, + 0x76, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x33, + 0x36, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x21, 0x3d, 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, + 0x74, 0x2e, 0x53, 0x2e, 0x53, 0x28, 0x74, 0x29, 0x7d, 0x68, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x53, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x29, 0x7d, + 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x68, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x55, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, + 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x74, 0x29, 0x7b, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x33, 0x33, 0x3b, 0x66, 0x6f, + 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x73, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x6e, 0x29, 0x74, 0x2e, 0x53, 0x2e, + 0x55, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, + 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x36, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x74, 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x74, 0x3b, 0x74, 0x3d, 0x74, 0x2e, 0x78, 0x29, 0x74, 0x2e, 0x74, + 0x2e, 0x4e, 0x28, 0x29, 0x7d, 0x7d, 0x3b, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x69, + 0x66, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, 0x28, 0x29, 0x29, + 0x74, 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x3b, 0x4f, 0x62, 0x6a, + 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, + 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x79, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x22, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0x2c, 0x7b, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x28, + 0x29, 0x3b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x63, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x68, + 0x28, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x21, 0x3d, 0x3d, 0x6e, 0x29, 0x6e, 0x2e, 0x69, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x31, 0x36, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x76, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x7d, 0x7d, 0x29, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6d, 0x28, 0x74, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, + 0x79, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x67, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x75, 0x3b, 0x74, 0x2e, 0x75, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x29, 0x7b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x5f, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x28, + 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, + 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x74, 0x2e, 0x66, 0x7c, 0x3d, + 0x38, 0x3b, 0x62, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, + 0x20, 0x6e, 0x7d, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x69, + 0x3d, 0x5f, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x28, 0x74, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x73, + 0x3b, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x3b, + 0x6e, 0x3d, 0x6e, 0x2e, 0x6e, 0x29, 0x6e, 0x2e, 0x53, 0x2e, 0x55, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x2e, 0x78, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x3b, 0x74, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x67, 0x28, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x6b, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x69, + 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x29, 0x74, 0x68, 0x72, 0x6f, + 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x22, 0x4f, 0x75, 0x74, 0x2d, 0x6f, 0x66, 0x2d, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, 0x74, 0x22, 0x29, 0x3b, 0x76, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x69, 0x3d, 0x74, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x69, 0x66, + 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x62, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x65, 0x28, 0x29, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x53, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x3d, 0x74, 0x3b, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x73, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x3d, 0x33, 0x32, + 0x7d, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, + 0x2e, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x53, 0x28, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x69, + 0x66, 0x28, 0x38, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x78, 0x28, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x75, 0x3d, 0x6e, 0x7d, 0x66, 0x69, + 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x7b, 0x74, 0x28, 0x29, 0x7d, 0x7d, 0x3b, + 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, + 0x53, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, + 0x7b, 0x69, 0x66, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, + 0x29, 0x74, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, + 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x26, 0x3d, 0x2d, + 0x39, 0x3b, 0x67, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x64, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x66, 0x2b, 0x2b, 0x3b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x69, 0x3b, 0x69, 0x3d, 0x74, 0x68, + 0x69, 0x73, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x2e, + 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x7d, 0x3b, 0x53, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x4e, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x32, 0x26, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x66, 0x29, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6f, 0x3d, + 0x6f, 0x3b, 0x6f, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x3b, 0x53, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x7c, 0x3d, 0x38, 0x3b, 0x69, 0x66, + 0x28, 0x21, 0x28, 0x31, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x29, + 0x29, 0x62, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x7d, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x77, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, + 0x53, 0x28, 0x74, 0x29, 0x3b, 0x74, 0x72, 0x79, 0x7b, 0x6e, 0x2e, 0x63, + 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, + 0x6e, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, + 0x74, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x64, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x29, 0x7d, 0x76, 0x61, 0x72, + 0x20, 0x78, 0x2c, 0x43, 0x2c, 0x45, 0x2c, 0x55, 0x2c, 0x48, 0x2c, 0x50, + 0x2c, 0x4e, 0x2c, 0x24, 0x2c, 0x44, 0x2c, 0x54, 0x3d, 0x7b, 0x7d, 0x2c, + 0x56, 0x3d, 0x5b, 0x5d, 0x2c, 0x41, 0x3d, 0x2f, 0x61, 0x63, 0x69, 0x74, + 0x7c, 0x65, 0x78, 0x28, 0x3f, 0x3a, 0x73, 0x7c, 0x67, 0x7c, 0x6e, 0x7c, + 0x70, 0x7c, 0x24, 0x29, 0x7c, 0x72, 0x70, 0x68, 0x7c, 0x67, 0x72, 0x69, + 0x64, 0x7c, 0x6f, 0x77, 0x73, 0x7c, 0x6d, 0x6e, 0x63, 0x7c, 0x6e, 0x74, + 0x77, 0x7c, 0x69, 0x6e, 0x65, 0x5b, 0x63, 0x68, 0x5d, 0x7c, 0x7a, 0x6f, + 0x6f, 0x7c, 0x5e, 0x6f, 0x72, 0x64, 0x7c, 0x69, 0x74, 0x65, 0x72, 0x61, + 0x2f, 0x69, 0x2c, 0x46, 0x3d, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, + 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x66, + 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, + 0x6e, 0x29, 0x74, 0x5b, 0x65, 0x5d, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x57, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4c, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3d, 0x7b, 0x7d, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x22, 0x6b, + 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, 0x5b, 0x6f, + 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x69, + 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x72, 0x5b, 0x6f, 0x5d, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x32, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, 0x2e, + 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, + 0x3d, 0x72, 0x5b, 0x6f, 0x5d, 0x26, 0x26, 0x28, 0x72, 0x5b, 0x6f, 0x5d, + 0x3d, 0x74, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x6f, 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x72, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x7b, + 0x74, 0x79, 0x70, 0x65, 0x3a, 0x74, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x6e, 0x2c, 0x6b, 0x65, 0x79, 0x3a, 0x65, 0x2c, 0x72, 0x65, 0x66, + 0x3a, 0x5f, 0x2c, 0x5f, 0x5f, 0x6b, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x5f, 0x5f, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, + 0x30, 0x2c, 0x5f, 0x5f, 0x65, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5f, + 0x5f, 0x64, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x5f, 0x5f, + 0x63, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, + 0x30, 0x2c, 0x5f, 0x5f, 0x76, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x69, 0x3f, 0x2b, 0x2b, 0x45, 0x3a, 0x69, 0x2c, 0x5f, 0x5f, 0x69, 0x3a, + 0x2d, 0x31, 0x2c, 0x5f, 0x5f, 0x75, 0x3a, 0x30, 0x7d, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x69, + 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x43, 0x2e, 0x76, 0x6e, + 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, + 0x28, 0x6f, 0x29, 0x2c, 0x6f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x52, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3a, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x6a, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x49, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x3d, 0x74, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6e, 0x29, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x3f, 0x71, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x69, 0x2b, + 0x31, 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, 0x5f, + 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, 0x2b, + 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x65, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3f, 0x71, 0x28, 0x74, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x2c, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x6e, 0x3d, 0x30, 0x3b, 0x6e, 0x3c, 0x74, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6e, 0x2b, + 0x2b, 0x29, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x6e, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, + 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x42, 0x28, 0x74, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x47, 0x28, 0x74, 0x29, 0x7b, 0x28, 0x21, + 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x64, 0x3d, 0x21, 0x30, 0x29, 0x26, 0x26, 0x48, 0x2e, 0x70, 0x75, 0x73, + 0x68, 0x28, 0x74, 0x29, 0x26, 0x26, 0x21, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, + 0x2b, 0x2b, 0x7c, 0x7c, 0x50, 0x21, 0x3d, 0x3d, 0x43, 0x2e, 0x64, 0x65, + 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x69, 0x6e, 0x67, 0x29, 0x26, 0x26, 0x28, 0x28, 0x50, 0x3d, 0x43, 0x2e, + 0x64, 0x65, 0x62, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x69, 0x6e, 0x67, 0x29, 0x7c, 0x7c, 0x4e, 0x29, 0x28, 0x7a, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x7a, + 0x28, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, + 0x24, 0x29, 0x3b, 0x74, 0x3d, 0x48, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, + 0x28, 0x29, 0x3b, 0x29, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, 0x28, + 0x6e, 0x3d, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x5f, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6f, 0x3d, 0x28, 0x69, + 0x3d, 0x28, 0x65, 0x3d, 0x74, 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x75, 0x3d, 0x5b, 0x5d, 0x2c, 0x66, 0x3d, 0x5b, + 0x5d, 0x2c, 0x28, 0x72, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x50, 0x29, 0x26, + 0x26, 0x28, 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x69, 0x29, + 0x29, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x76, 0x2b, + 0x31, 0x2c, 0x43, 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x26, 0x26, 0x43, + 0x2e, 0x76, 0x6e, 0x6f, 0x64, 0x65, 0x28, 0x5f, 0x29, 0x2c, 0x5f, 0x74, + 0x28, 0x72, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x6e, + 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x72, 0x2e, + 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x2c, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, 0x5f, 0x75, + 0x3f, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x75, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x3f, 0x71, 0x28, 0x69, 0x29, + 0x3a, 0x6f, 0x2c, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x66, 0x29, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x2e, + 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x3d, 0x5f, + 0x2c, 0x69, 0x74, 0x28, 0x75, 0x2c, 0x5f, 0x2c, 0x66, 0x29, 0x2c, 0x5f, + 0x2e, 0x5f, 0x5f, 0x65, 0x21, 0x3d, 0x6f, 0x26, 0x26, 0x42, 0x28, 0x5f, + 0x29, 0x29, 0x2c, 0x48, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, + 0x6e, 0x26, 0x26, 0x48, 0x2e, 0x73, 0x6f, 0x72, 0x74, 0x28, 0x24, 0x29, + 0x29, 0x3b, 0x7a, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x30, 0x7d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, + 0x2c, 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x3d, + 0x5f, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x56, 0x2c, + 0x79, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x66, + 0x6f, 0x72, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x4b, + 0x28, 0x65, 0x2c, 0x6e, 0x2c, 0x76, 0x29, 0x2c, 0x66, 0x3d, 0x65, 0x2e, + 0x5f, 0x5f, 0x64, 0x2c, 0x63, 0x3d, 0x30, 0x3b, 0x63, 0x3c, 0x79, 0x3b, + 0x63, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x61, + 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x63, 0x5d, 0x29, 0x26, 0x26, + 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x26, 0x26, 0x28, 0x68, 0x3d, 0x2d, 0x31, + 0x3d, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x3f, 0x54, 0x3a, 0x76, + 0x5b, 0x61, 0x2e, 0x5f, 0x5f, 0x69, 0x5d, 0x7c, 0x7c, 0x54, 0x2c, 0x61, + 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x63, 0x2c, 0x5f, 0x74, 0x28, 0x74, 0x2c, + 0x61, 0x2c, 0x68, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, + 0x66, 0x2c, 0x73, 0x2c, 0x6c, 0x29, 0x2c, 0x70, 0x3d, 0x61, 0x2e, 0x5f, + 0x5f, 0x65, 0x2c, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x68, 0x2e, + 0x72, 0x65, 0x66, 0x21, 0x3d, 0x61, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, + 0x28, 0x68, 0x2e, 0x72, 0x65, 0x66, 0x26, 0x26, 0x72, 0x74, 0x28, 0x68, + 0x2e, 0x72, 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x29, + 0x2c, 0x6c, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x61, 0x2e, 0x72, 0x65, + 0x66, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x63, 0x7c, 0x7c, 0x70, 0x2c, 0x61, + 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x64, 0x26, 0x26, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x70, 0x26, 0x26, 0x28, 0x64, 0x3d, + 0x70, 0x29, 0x2c, 0x36, 0x35, 0x35, 0x33, 0x36, 0x26, 0x61, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x7c, 0x68, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x3d, 0x3d, + 0x61, 0x2e, 0x5f, 0x5f, 0x6b, 0x3f, 0x66, 0x3d, 0x51, 0x28, 0x61, 0x2c, + 0x66, 0x2c, 0x74, 0x29, 0x3a, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x61, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3f, 0x66, + 0x3d, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3a, 0x70, 0x26, 0x26, 0x28, 0x66, + 0x3d, 0x70, 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, + 0x6e, 0x67, 0x29, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x61, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, + 0x2d, 0x31, 0x39, 0x36, 0x36, 0x30, 0x39, 0x29, 0x3b, 0x65, 0x2e, 0x5f, + 0x5f, 0x64, 0x3d, 0x66, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x64, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, + 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x3d, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x73, 0x3d, 0x65, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x2c, 0x6c, 0x3d, 0x73, 0x2c, 0x63, + 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x2c, 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x66, 0x3b, + 0x5f, 0x2b, 0x2b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x69, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x69, 0x3d, 0x6e, 0x5b, 0x5f, 0x5d, 0x29, + 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x22, 0x3d, + 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, + 0x3a, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x69, 0x7c, 0x7c, 0x22, 0x62, 0x69, 0x67, 0x69, 0x6e, 0x74, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x69, 0x7c, + 0x7c, 0x69, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, + 0x6f, 0x72, 0x3d, 0x3d, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3f, 0x4f, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, 0x29, 0x3a, 0x46, 0x28, 0x69, + 0x29, 0x3f, 0x4f, 0x28, 0x6a, 0x2c, 0x7b, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3a, 0x69, 0x7d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3a, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x69, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x26, 0x26, 0x69, + 0x2e, 0x5f, 0x5f, 0x62, 0x3e, 0x30, 0x3f, 0x4f, 0x28, 0x69, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x2c, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, + 0x69, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3f, + 0x69, 0x2e, 0x72, 0x65, 0x66, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x69, + 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x3a, 0x69, 0x29, 0x3f, 0x28, 0x69, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x69, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x74, + 0x2e, 0x5f, 0x5f, 0x62, 0x2b, 0x31, 0x2c, 0x75, 0x3d, 0x59, 0x28, 0x69, + 0x2c, 0x65, 0x2c, 0x72, 0x3d, 0x5f, 0x2b, 0x63, 0x2c, 0x6c, 0x29, 0x2c, + 0x69, 0x2e, 0x5f, 0x5f, 0x69, 0x3d, 0x75, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x2d, 0x31, 0x21, 0x3d, 0x3d, 0x75, 0x26, 0x26, 0x28, + 0x6c, 0x2d, 0x2d, 0x2c, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x75, 0x5d, 0x29, + 0x26, 0x26, 0x28, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x31, 0x33, + 0x31, 0x30, 0x37, 0x32, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x6f, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6f, + 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, 0x2d, 0x31, 0x3d, 0x3d, 0x75, 0x26, + 0x26, 0x63, 0x2d, 0x2d, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x69, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x5f, + 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, 0x29, 0x3a, + 0x75, 0x21, 0x3d, 0x3d, 0x72, 0x26, 0x26, 0x28, 0x75, 0x3d, 0x3d, 0x3d, + 0x72, 0x2b, 0x31, 0x3f, 0x63, 0x2b, 0x2b, 0x3a, 0x75, 0x3e, 0x72, 0x3f, + 0x6c, 0x3e, 0x66, 0x2d, 0x72, 0x3f, 0x63, 0x2b, 0x3d, 0x75, 0x2d, 0x72, + 0x3a, 0x63, 0x2d, 0x2d, 0x3a, 0x63, 0x3d, 0x75, 0x3c, 0x72, 0x26, 0x26, + 0x75, 0x3d, 0x3d, 0x72, 0x2d, 0x31, 0x3f, 0x75, 0x2d, 0x72, 0x3a, 0x30, + 0x2c, 0x75, 0x21, 0x3d, 0x3d, 0x5f, 0x2b, 0x63, 0x26, 0x26, 0x28, 0x69, + 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x36, 0x35, 0x35, 0x33, 0x36, 0x29, + 0x29, 0x29, 0x3a, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x29, 0x26, + 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x2e, 0x6b, 0x65, 0x79, + 0x26, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, 0x28, 0x6f, 0x2e, + 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x26, 0x26, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, 0x29, 0x29, + 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x2c, 0x21, 0x31, 0x29, 0x2c, + 0x65, 0x5b, 0x5f, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6c, 0x2d, + 0x2d, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x6c, 0x29, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x30, 0x3b, 0x5f, 0x3c, 0x73, 0x3b, 0x5f, 0x2b, 0x2b, 0x29, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x6f, 0x3d, 0x65, 0x5b, 0x5f, + 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x6f, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x28, + 0x6f, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, + 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x71, 0x28, 0x6f, + 0x29, 0x29, 0x2c, 0x75, 0x74, 0x28, 0x6f, 0x2c, 0x6f, 0x29, 0x29, 0x7d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, + 0x69, 0x3b, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, + 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x69, 0x3d, 0x30, 0x3b, + 0x5f, 0x26, 0x26, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, + 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x6e, + 0x3d, 0x51, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x29, + 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x21, + 0x3d, 0x6e, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x69, 0x6e, 0x73, 0x65, 0x72, + 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x2c, 0x6e, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x2c, 0x6e, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x2c, 0x6e, 0x26, 0x26, 0x6e, + 0x2e, 0x6e, 0x65, 0x78, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x3d, 0x6e, 0x7c, 0x7c, 0x5b, 0x5d, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x3d, 0x3d, 0x74, 0x7c, 0x7c, 0x22, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, + 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, + 0x7c, 0x7c, 0x28, 0x46, 0x28, 0x74, 0x29, 0x3f, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x29, 0x7b, 0x58, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x29, + 0x29, 0x3a, 0x6e, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x59, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x69, 0x3d, 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x6f, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x72, 0x3d, 0x65, 0x2d, + 0x31, 0x2c, 0x75, 0x3d, 0x65, 0x2b, 0x31, 0x2c, 0x66, 0x3d, 0x6e, 0x5b, + 0x65, 0x5d, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x3d, 0x66, 0x7c, 0x7c, 0x66, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, + 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x3e, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x66, 0x26, 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, + 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x3f, 0x31, 0x3a, + 0x30, 0x29, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x3b, 0x72, 0x3e, 0x3d, 0x30, + 0x7c, 0x7c, 0x75, 0x3c, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x72, 0x3e, 0x3d, 0x30, 0x29, 0x7b, + 0x69, 0x66, 0x28, 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x72, 0x5d, 0x29, 0x26, + 0x26, 0x30, 0x3d, 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, + 0x66, 0x2e, 0x5f, 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, + 0x2e, 0x6b, 0x65, 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x72, 0x3b, 0x72, 0x2d, 0x2d, 0x7d, 0x69, 0x66, 0x28, 0x75, 0x3c, 0x6e, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x28, 0x66, 0x3d, 0x6e, 0x5b, 0x75, 0x5d, 0x29, 0x26, 0x26, 0x30, 0x3d, + 0x3d, 0x28, 0x31, 0x33, 0x31, 0x30, 0x37, 0x32, 0x26, 0x66, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x26, 0x26, 0x69, 0x3d, 0x3d, 0x66, 0x2e, 0x6b, 0x65, + 0x79, 0x26, 0x26, 0x6f, 0x3d, 0x3d, 0x3d, 0x66, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x3b, 0x75, + 0x2b, 0x2b, 0x7d, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x2d, 0x31, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x22, 0x2d, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x5b, 0x30, 0x5d, 0x3f, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x50, + 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x79, 0x28, 0x6e, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, 0x22, 0x3a, 0x65, 0x29, 0x3a, + 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x3f, 0x22, 0x22, 0x3a, 0x22, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x22, + 0x21, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, + 0x41, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x28, 0x6e, 0x29, 0x3f, 0x65, 0x3a, + 0x65, 0x2b, 0x22, 0x70, 0x78, 0x22, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3b, + 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x29, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, + 0x73, 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x5f, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2e, 0x63, 0x73, + 0x73, 0x54, 0x65, 0x78, 0x74, 0x3d, 0x5f, 0x3d, 0x22, 0x22, 0x29, 0x2c, + 0x5f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x5f, + 0x29, 0x65, 0x26, 0x26, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x22, 0x22, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x65, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x5f, 0x26, 0x26, 0x65, + 0x5b, 0x6e, 0x5d, 0x3d, 0x3d, 0x3d, 0x5f, 0x5b, 0x6e, 0x5d, 0x7c, 0x7c, + 0x5a, 0x28, 0x74, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x2c, 0x6e, 0x2c, + 0x65, 0x5b, 0x6e, 0x5d, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, + 0x66, 0x28, 0x22, 0x6f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x30, 0x5d, + 0x26, 0x26, 0x22, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, 0x31, 0x5d, + 0x29, 0x6f, 0x3d, 0x6e, 0x21, 0x3d, 0x3d, 0x28, 0x6e, 0x3d, 0x6e, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x28, 0x50, 0x6f, + 0x69, 0x6e, 0x74, 0x65, 0x72, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, + 0x29, 0x24, 0x7c, 0x43, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x24, 0x2f, + 0x2c, 0x22, 0x24, 0x31, 0x22, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x6e, 0x2e, + 0x74, 0x6f, 0x4c, 0x6f, 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, + 0x29, 0x69, 0x6e, 0x20, 0x74, 0x3f, 0x6e, 0x2e, 0x74, 0x6f, 0x4c, 0x6f, + 0x77, 0x65, 0x72, 0x43, 0x61, 0x73, 0x65, 0x28, 0x29, 0x2e, 0x73, 0x6c, + 0x69, 0x63, 0x65, 0x28, 0x32, 0x29, 0x3a, 0x6e, 0x2e, 0x73, 0x6c, 0x69, + 0x63, 0x65, 0x28, 0x32, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x7c, 0x7c, 0x28, + 0x74, 0x2e, 0x6c, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x6c, 0x5b, + 0x6e, 0x2b, 0x6f, 0x5d, 0x3d, 0x65, 0x2c, 0x65, 0x3f, 0x5f, 0x3f, 0x65, + 0x2e, 0x75, 0x3d, 0x5f, 0x2e, 0x75, 0x3a, 0x28, 0x65, 0x2e, 0x75, 0x3d, + 0x44, 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2c, 0x74, + 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x29, 0x3a, 0x74, 0x2e, 0x72, 0x65, + 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, + 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x6e, 0x2c, 0x6f, 0x3f, 0x65, 0x74, + 0x3a, 0x6e, 0x74, 0x2c, 0x6f, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x69, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x78, 0x6c, 0x69, 0x6e, 0x6b, 0x28, + 0x48, 0x7c, 0x3a, 0x68, 0x29, 0x2f, 0x2c, 0x22, 0x68, 0x22, 0x29, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x73, 0x4e, 0x61, + 0x6d, 0x65, 0x24, 0x2f, 0x2c, 0x22, 0x73, 0x22, 0x29, 0x3b, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, 0x22, 0x77, 0x69, 0x64, 0x74, 0x68, + 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x68, 0x72, + 0x65, 0x66, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x6c, 0x69, + 0x73, 0x74, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x66, 0x6f, + 0x72, 0x6d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x74, 0x61, + 0x62, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, + 0x26, 0x22, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x21, + 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x72, 0x6f, 0x77, 0x53, 0x70, 0x61, + 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, 0x63, 0x6f, 0x6c, + 0x53, 0x70, 0x61, 0x6e, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x22, + 0x72, 0x6f, 0x6c, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x6e, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x5b, + 0x6e, 0x5d, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, 0x3f, 0x22, + 0x22, 0x3a, 0x65, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x7d, 0x22, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x7c, 0x7c, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x65, 0x7c, 0x7c, 0x21, 0x31, 0x3d, 0x3d, 0x3d, 0x65, + 0x26, 0x26, 0x22, 0x2d, 0x22, 0x21, 0x3d, 0x3d, 0x6e, 0x5b, 0x34, 0x5d, + 0x3f, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, + 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, + 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x31, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x74, 0x2e, 0x74, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x74, 0x3c, 0x3d, 0x6e, 0x2e, 0x75, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x74, 0x3d, 0x44, + 0x61, 0x74, 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x28, 0x43, 0x2e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x74, + 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x6c, 0x5b, 0x74, 0x2e, + 0x74, 0x79, 0x70, 0x65, 0x2b, 0x21, 0x30, 0x5d, 0x28, 0x43, 0x2e, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x3f, 0x43, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5f, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, + 0x2c, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, + 0x2c, 0x73, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6c, 0x2c, 0x63, 0x2c, + 0x68, 0x2c, 0x61, 0x2c, 0x70, 0x2c, 0x64, 0x2c, 0x76, 0x2c, 0x79, 0x2c, + 0x6d, 0x2c, 0x67, 0x2c, 0x62, 0x2c, 0x6b, 0x2c, 0x53, 0x2c, 0x77, 0x2c, + 0x78, 0x2c, 0x45, 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, + 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x31, 0x32, 0x38, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x26, + 0x28, 0x66, 0x3d, 0x21, 0x21, 0x28, 0x33, 0x32, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x75, 0x29, 0x2c, 0x6f, 0x3d, 0x5b, 0x75, 0x3d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x5d, 0x29, 0x2c, 0x28, + 0x6c, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x29, 0x26, 0x26, 0x6c, 0x28, + 0x6e, 0x29, 0x3b, 0x74, 0x3a, 0x69, 0x66, 0x28, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x45, 0x29, 0x74, 0x72, 0x79, 0x7b, 0x69, 0x66, 0x28, + 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, 0x3d, + 0x28, 0x6c, 0x3d, 0x45, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x54, 0x79, 0x70, 0x65, 0x29, 0x26, 0x26, 0x5f, 0x5b, 0x6c, 0x2e, 0x5f, + 0x5f, 0x63, 0x5d, 0x2c, 0x67, 0x3d, 0x6c, 0x3f, 0x6d, 0x3f, 0x6d, 0x2e, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x6c, 0x2e, 0x5f, 0x5f, 0x3a, 0x5f, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x63, + 0x3f, 0x76, 0x3d, 0x28, 0x63, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x65, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x3d, 0x63, 0x2e, + 0x5f, 0x5f, 0x45, 0x3a, 0x28, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, + 0x79, 0x70, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x3f, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, + 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x3a, + 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x63, 0x3d, 0x6e, 0x65, 0x77, + 0x20, 0x49, 0x28, 0x79, 0x2c, 0x67, 0x29, 0x2c, 0x63, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x3d, 0x45, 0x2c, + 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x3d, 0x66, 0x74, 0x29, + 0x2c, 0x6d, 0x26, 0x26, 0x6d, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x63, 0x29, + 0x2c, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x7b, 0x7d, 0x29, 0x2c, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x6e, 0x3d, 0x5f, 0x2c, 0x68, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x30, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x29, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x45, 0x2e, + 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, + 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x3d, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, + 0x73, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x29, 0x29, 0x2c, 0x4d, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x45, + 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, + 0x73, 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x29, 0x29, 0x29, + 0x2c, 0x61, 0x3d, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x70, + 0x3d, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x6e, 0x2c, 0x68, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, + 0x3d, 0x45, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, + 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, + 0x6f, 0x70, 0x73, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, 0x2e, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x63, + 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x2e, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, + 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, + 0x69, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x45, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, + 0x26, 0x79, 0x21, 0x3d, 0x3d, 0x61, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, 0x63, 0x65, 0x69, 0x76, 0x65, + 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x63, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x52, 0x65, + 0x63, 0x65, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x28, 0x79, + 0x2c, 0x67, 0x29, 0x2c, 0x21, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x26, 0x26, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, + 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x21, 0x31, 0x3d, 0x3d, + 0x3d, 0x63, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x28, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x7c, + 0x7c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, + 0x5f, 0x76, 0x29, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, + 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x6b, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, + 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x62, 0x3d, 0x30, 0x3b, 0x62, 0x3c, 0x63, + 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, + 0x62, 0x2b, 0x2b, 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x62, 0x5d, 0x29, + 0x3b, 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x2c, 0x63, 0x2e, + 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, 0x3b, 0x62, 0x72, + 0x65, 0x61, 0x6b, 0x20, 0x74, 0x7d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, + 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x79, 0x2c, 0x63, + 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x67, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, + 0x21, 0x3d, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x26, 0x26, + 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, + 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x2c, + 0x64, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x69, 0x66, 0x28, 0x63, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x67, 0x2c, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3d, 0x79, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x74, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, 0x3d, 0x30, 0x2c, + 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x22, 0x69, + 0x6e, 0x20, 0x45, 0x26, 0x26, 0x45, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x29, + 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x64, + 0x3d, 0x21, 0x31, 0x2c, 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, + 0x6c, 0x3d, 0x63, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x2c, 0x63, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x29, 0x2c, 0x77, 0x3d, 0x30, 0x3b, 0x77, 0x3c, 0x63, 0x2e, 0x5f, 0x73, + 0x62, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x77, 0x2b, 0x2b, + 0x29, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x63, 0x2e, 0x5f, 0x73, 0x62, 0x5b, 0x77, 0x5d, 0x29, 0x3b, 0x63, 0x2e, + 0x5f, 0x73, 0x62, 0x3d, 0x5b, 0x5d, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x64, 0x6f, 0x7b, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x21, 0x31, 0x2c, + 0x6b, 0x26, 0x26, 0x6b, 0x28, 0x6e, 0x29, 0x2c, 0x6c, 0x3d, 0x63, 0x2e, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2c, 0x63, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x2c, 0x63, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x29, 0x2c, 0x63, 0x2e, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x7d, + 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x64, 0x26, + 0x26, 0x2b, 0x2b, 0x53, 0x3c, 0x32, 0x35, 0x29, 0x3b, 0x63, 0x2e, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x3d, 0x63, 0x2e, 0x5f, 0x5f, 0x73, 0x2c, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x26, 0x26, + 0x28, 0x5f, 0x3d, 0x4d, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x5f, 0x29, + 0x2c, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x29, 0x29, 0x2c, 0x68, + 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x63, 0x2e, 0x67, 0x65, + 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, + 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x7c, 0x7c, 0x28, + 0x64, 0x3d, 0x63, 0x2e, 0x67, 0x65, 0x74, 0x53, 0x6e, 0x61, 0x70, 0x73, + 0x68, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x28, 0x61, 0x2c, 0x70, 0x29, 0x29, 0x2c, 0x4a, 0x28, + 0x74, 0x2c, 0x46, 0x28, 0x78, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, + 0x6c, 0x26, 0x26, 0x6c, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x3d, 0x3d, + 0x6a, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6c, 0x2e, 0x6b, + 0x65, 0x79, 0x3f, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3a, 0x6c, 0x29, 0x3f, 0x78, + 0x3a, 0x5b, 0x78, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x2c, + 0x63, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, + 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x26, 0x3d, 0x2d, 0x31, 0x36, 0x31, + 0x2c, 0x63, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x26, 0x26, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x63, 0x29, + 0x2c, 0x76, 0x26, 0x26, 0x28, 0x63, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x63, + 0x2e, 0x5f, 0x5f, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x66, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, + 0x6c, 0x21, 0x3d, 0x6f, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, + 0x75, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x75, 0x7c, 0x3d, 0x66, 0x3f, 0x31, + 0x36, 0x30, 0x3a, 0x33, 0x32, 0x2c, 0x6f, 0x5b, 0x6f, 0x2e, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x75, 0x29, 0x5d, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x6f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x3d, 0x3d, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x3f, 0x28, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x2c, + 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, 0x65, 0x29, + 0x3a, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x6f, 0x74, 0x28, 0x65, 0x2e, + 0x5f, 0x5f, 0x65, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x66, 0x2c, 0x73, 0x29, 0x3b, 0x28, 0x6c, 0x3d, + 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x29, 0x26, 0x26, 0x6c, + 0x28, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x69, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x6e, + 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x3d, 0x30, 0x3b, + 0x5f, 0x3c, 0x65, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x5f, + 0x2b, 0x2b, 0x29, 0x72, 0x74, 0x28, 0x65, 0x5b, 0x5f, 0x5d, 0x2c, 0x65, + 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, 0x2c, 0x65, 0x5b, 0x2b, 0x2b, 0x5f, 0x5d, + 0x29, 0x3b, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x63, 0x28, 0x6e, 0x2c, 0x74, 0x29, 0x2c, 0x74, 0x2e, 0x73, 0x6f, + 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x3d, 0x6e, 0x2e, + 0x5f, 0x5f, 0x68, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, + 0x2c, 0x74, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x63, + 0x61, 0x6c, 0x6c, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, + 0x28, 0x74, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6f, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x73, 0x2c, 0x6c, 0x2c, 0x63, 0x2c, 0x68, 0x2c, 0x61, 0x2c, 0x70, + 0x2c, 0x64, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2c, 0x79, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2c, 0x6d, + 0x3d, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x22, + 0x73, 0x76, 0x67, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x28, 0x69, + 0x3d, 0x21, 0x30, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, + 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x6f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, + 0x69, 0x66, 0x28, 0x28, 0x61, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x26, + 0x26, 0x22, 0x73, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, + 0x74, 0x65, 0x22, 0x69, 0x6e, 0x20, 0x61, 0x3d, 0x3d, 0x21, 0x21, 0x6d, + 0x26, 0x26, 0x28, 0x6d, 0x3f, 0x61, 0x2e, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x3d, 0x3d, 0x6d, 0x3a, 0x33, 0x3d, 0x3d, + 0x3d, 0x61, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x7b, 0x74, 0x3d, 0x61, 0x2c, 0x6f, 0x5b, 0x73, 0x5d, 0x3d, 0x6e, + 0x75, 0x6c, 0x6c, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, 0x7d, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x78, 0x74, + 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x79, 0x29, 0x3b, 0x74, 0x3d, 0x69, 0x3f, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4e, 0x53, + 0x28, 0x22, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77, 0x77, + 0x2e, 0x77, 0x33, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x32, 0x30, 0x30, 0x30, + 0x2f, 0x73, 0x76, 0x67, 0x22, 0x2c, 0x6d, 0x29, 0x3a, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x28, 0x6d, 0x2c, 0x79, 0x2e, + 0x69, 0x73, 0x26, 0x26, 0x79, 0x29, 0x2c, 0x6f, 0x3d, 0x6e, 0x75, 0x6c, + 0x6c, 0x2c, 0x75, 0x3d, 0x21, 0x31, 0x7d, 0x69, 0x66, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x3d, 0x6d, 0x29, 0x76, 0x3d, 0x3d, 0x3d, 0x79, + 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3d, + 0x3d, 0x3d, 0x79, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x79, 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x69, 0x66, 0x28, + 0x6f, 0x3d, 0x6f, 0x26, 0x26, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x2c, 0x76, 0x3d, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x7c, + 0x7c, 0x54, 0x2c, 0x21, 0x75, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, + 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x3d, 0x7b, 0x7d, 0x2c, + 0x73, 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x2e, 0x61, 0x74, 0x74, 0x72, + 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x76, 0x5b, 0x28, 0x61, 0x3d, 0x74, + 0x2e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5b, + 0x73, 0x5d, 0x29, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x5d, 0x3d, 0x61, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, + 0x69, 0x6e, 0x20, 0x76, 0x29, 0x61, 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x2c, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x73, 0x7c, 0x7c, 0x28, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, + 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, + 0x48, 0x54, 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x63, 0x3d, 0x61, + 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, + 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x61, 0x2c, 0x69, 0x29, + 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x79, + 0x29, 0x61, 0x3d, 0x79, 0x5b, 0x73, 0x5d, 0x2c, 0x22, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x68, 0x3d, + 0x61, 0x3a, 0x22, 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, + 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, + 0x4d, 0x4c, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x6c, 0x3d, 0x61, 0x3a, 0x22, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3d, 0x3d, 0x73, 0x3f, 0x70, 0x3d, + 0x61, 0x3a, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x22, 0x3d, + 0x3d, 0x73, 0x3f, 0x64, 0x3d, 0x61, 0x3a, 0x22, 0x6b, 0x65, 0x79, 0x22, + 0x3d, 0x3d, 0x3d, 0x73, 0x7c, 0x7c, 0x75, 0x26, 0x26, 0x22, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x61, 0x7c, 0x7c, 0x76, 0x5b, 0x73, 0x5d, 0x3d, + 0x3d, 0x3d, 0x61, 0x7c, 0x7c, 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, + 0x61, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x69, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x6c, 0x29, 0x75, 0x7c, 0x7c, 0x63, 0x26, 0x26, 0x28, 0x6c, 0x2e, + 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x63, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x7c, 0x7c, 0x6c, 0x2e, 0x5f, 0x5f, 0x68, + 0x74, 0x6d, 0x6c, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, + 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x29, 0x7c, 0x7c, 0x28, 0x74, 0x2e, 0x69, + 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x6c, 0x2e, 0x5f, + 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x6b, + 0x3d, 0x5b, 0x5d, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x28, + 0x63, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x69, 0x6e, 0x6e, 0x65, 0x72, 0x48, + 0x54, 0x4d, 0x4c, 0x3d, 0x22, 0x22, 0x29, 0x2c, 0x4a, 0x28, 0x74, 0x2c, + 0x46, 0x28, 0x68, 0x29, 0x3f, 0x68, 0x3a, 0x5b, 0x68, 0x5d, 0x2c, 0x6e, + 0x2c, 0x65, 0x2c, 0x5f, 0x2c, 0x69, 0x26, 0x26, 0x22, 0x66, 0x6f, 0x72, + 0x65, 0x69, 0x67, 0x6e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x22, 0x21, + 0x3d, 0x3d, 0x6d, 0x2c, 0x6f, 0x2c, 0x72, 0x2c, 0x6f, 0x3f, 0x6f, 0x5b, + 0x30, 0x5d, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x26, 0x26, 0x71, 0x28, + 0x65, 0x2c, 0x30, 0x29, 0x2c, 0x75, 0x2c, 0x66, 0x29, 0x2c, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x73, 0x3d, + 0x6f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2d, 0x2d, + 0x3b, 0x29, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x5b, 0x73, 0x5d, + 0x26, 0x26, 0x57, 0x28, 0x6f, 0x5b, 0x73, 0x5d, 0x29, 0x3b, 0x75, 0x7c, + 0x7c, 0x28, 0x73, 0x3d, 0x22, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x2c, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x70, 0x26, 0x26, + 0x28, 0x70, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x7c, 0x7c, 0x22, + 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x22, 0x3d, 0x3d, 0x3d, + 0x6d, 0x26, 0x26, 0x21, 0x70, 0x7c, 0x7c, 0x22, 0x6f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x3d, 0x6d, 0x26, 0x26, 0x70, 0x21, 0x3d, + 0x3d, 0x76, 0x5b, 0x73, 0x5d, 0x29, 0x26, 0x26, 0x74, 0x74, 0x28, 0x74, + 0x2c, 0x73, 0x2c, 0x70, 0x2c, 0x76, 0x5b, 0x73, 0x5d, 0x2c, 0x21, 0x31, + 0x29, 0x2c, 0x73, 0x3d, 0x22, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, + 0x22, 0x2c, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x64, + 0x26, 0x26, 0x64, 0x21, 0x3d, 0x3d, 0x74, 0x5b, 0x73, 0x5d, 0x26, 0x26, + 0x74, 0x74, 0x28, 0x74, 0x2c, 0x73, 0x2c, 0x64, 0x2c, 0x76, 0x5b, 0x73, + 0x5d, 0x2c, 0x21, 0x31, 0x29, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x72, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x74, + 0x72, 0x79, 0x7b, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x3f, + 0x74, 0x28, 0x6e, 0x29, 0x3a, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x3d, 0x6e, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x65, 0x29, + 0x7d, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, + 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x5f, 0x2c, 0x69, 0x3b, 0x69, 0x66, 0x28, 0x43, 0x2e, 0x75, 0x6e, + 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x6e, 0x6d, + 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x28, 0x5f, 0x3d, 0x74, + 0x2e, 0x72, 0x65, 0x66, 0x29, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x26, 0x26, 0x5f, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x65, + 0x7c, 0x7c, 0x72, 0x74, 0x28, 0x5f, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, + 0x6e, 0x29, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x28, 0x5f, + 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x29, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, + 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x29, 0x74, + 0x72, 0x79, 0x7b, 0x5f, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x28, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, + 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, + 0x5f, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x50, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x69, 0x66, 0x28, 0x5f, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x6b, 0x29, 0x66, 0x6f, 0x72, 0x28, 0x69, 0x3d, + 0x30, 0x3b, 0x69, 0x3c, 0x5f, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x69, 0x2b, 0x2b, 0x29, 0x5f, 0x5b, 0x69, 0x5d, 0x26, 0x26, 0x75, + 0x74, 0x28, 0x5f, 0x5b, 0x69, 0x5d, 0x2c, 0x6e, 0x2c, 0x65, 0x7c, 0x7c, + 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x21, 0x3d, + 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x29, 0x3b, 0x65, 0x7c, 0x7c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x74, 0x2e, 0x5f, 0x5f, 0x65, 0x7c, 0x7c, 0x57, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x64, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x66, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x74, + 0x2c, 0x65, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x73, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, + 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x43, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x2c, 0x69, 0x3d, 0x28, 0x5f, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x65, 0x29, 0x3f, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x65, + 0x26, 0x26, 0x65, 0x2e, 0x5f, 0x5f, 0x6b, 0x7c, 0x7c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6b, 0x2c, 0x6f, 0x3d, 0x5b, 0x5d, 0x2c, 0x72, 0x3d, 0x5b, 0x5d, + 0x2c, 0x5f, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x3d, 0x28, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x7c, 0x7c, 0x6e, 0x29, 0x2e, 0x5f, 0x5f, 0x6b, 0x3d, 0x4c, + 0x28, 0x6a, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x5b, 0x74, 0x5d, 0x29, + 0x2c, 0x69, 0x7c, 0x7c, 0x54, 0x2c, 0x54, 0x2c, 0x76, 0x6f, 0x69, 0x64, + 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6f, 0x77, 0x6e, 0x65, 0x72, + 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x21, + 0x5f, 0x26, 0x26, 0x65, 0x3f, 0x5b, 0x65, 0x5d, 0x3a, 0x69, 0x3f, 0x6e, + 0x75, 0x6c, 0x6c, 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, + 0x68, 0x69, 0x6c, 0x64, 0x3f, 0x78, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x6e, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x73, + 0x29, 0x3a, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x6f, 0x2c, 0x21, 0x5f, 0x26, + 0x26, 0x65, 0x3f, 0x65, 0x3a, 0x69, 0x3f, 0x69, 0x2e, 0x5f, 0x5f, 0x65, + 0x3a, 0x6e, 0x2e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x2c, 0x5f, 0x2c, 0x72, 0x29, 0x2c, 0x69, 0x74, 0x28, 0x6f, 0x2c, + 0x74, 0x2c, 0x72, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x73, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x6c, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x63, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x2c, 0x69, 0x2c, + 0x6f, 0x2c, 0x72, 0x2c, 0x75, 0x3d, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x74, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x26, + 0x26, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x50, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x2c, 0x6e, 0x29, + 0x22, 0x6b, 0x65, 0x79, 0x22, 0x3d, 0x3d, 0x6f, 0x3f, 0x5f, 0x3d, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3a, 0x22, 0x72, 0x65, 0x66, 0x22, 0x3d, 0x3d, 0x6f, + 0x3f, 0x69, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3a, 0x75, 0x5b, 0x6f, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6e, 0x5b, + 0x6f, 0x5d, 0x26, 0x26, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, + 0x3d, 0x72, 0x3f, 0x72, 0x5b, 0x6f, 0x5d, 0x3a, 0x6e, 0x5b, 0x6f, 0x5d, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3e, 0x32, 0x26, 0x26, 0x28, 0x75, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x3d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x33, 0x3f, 0x78, + 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2c, 0x32, 0x29, 0x3a, 0x65, 0x29, 0x2c, 0x4f, 0x28, + 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x2c, 0x75, 0x2c, 0x5f, 0x7c, 0x7c, + 0x74, 0x2e, 0x6b, 0x65, 0x79, 0x2c, 0x69, 0x7c, 0x7c, 0x74, 0x2e, 0x72, + 0x65, 0x66, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x68, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x7b, 0x5f, 0x5f, 0x63, + 0x3a, 0x6e, 0x3d, 0x22, 0x5f, 0x5f, 0x63, 0x43, 0x22, 0x2b, 0x44, 0x2b, + 0x2b, 0x2c, 0x5f, 0x5f, 0x3a, 0x74, 0x2c, 0x43, 0x6f, 0x6e, 0x73, 0x75, + 0x6d, 0x65, 0x72, 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x28, + 0x6e, 0x29, 0x7d, 0x2c, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x3a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x2c, 0x5f, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, + 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x7c, 0x7c, 0x28, 0x65, 0x3d, 0x5b, 0x5d, 0x2c, 0x28, 0x5f, 0x3d, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, + 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x5f, 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x21, 0x3d, 0x3d, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, + 0x65, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x65, 0x3d, 0x21, 0x30, 0x2c, 0x47, 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x75, 0x62, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x65, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, + 0x74, 0x3b, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x65, 0x2e, 0x73, 0x70, 0x6c, 0x69, 0x63, 0x65, 0x28, 0x65, 0x2e, 0x69, + 0x6e, 0x64, 0x65, 0x78, 0x4f, 0x66, 0x28, 0x74, 0x29, 0x2c, 0x31, 0x29, + 0x2c, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, + 0x29, 0x7d, 0x7d, 0x29, 0x2c, 0x74, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, + 0x5f, 0x5f, 0x3d, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, + 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x54, 0x79, 0x70, + 0x65, 0x3d, 0x65, 0x7d, 0x78, 0x3d, 0x56, 0x2e, 0x73, 0x6c, 0x69, 0x63, + 0x65, 0x2c, 0x43, 0x3d, 0x7b, 0x5f, 0x5f, 0x65, 0x3a, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x2c, 0x6f, 0x2c, 0x72, 0x3b, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x29, 0x69, 0x66, 0x28, 0x28, 0x69, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x63, + 0x29, 0x26, 0x26, 0x21, 0x69, 0x2e, 0x5f, 0x5f, 0x29, 0x74, 0x72, 0x79, + 0x7b, 0x69, 0x66, 0x28, 0x28, 0x6f, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, 0x72, 0x29, 0x26, 0x26, 0x6e, + 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x6f, 0x2e, 0x67, 0x65, 0x74, 0x44, 0x65, + 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x46, 0x72, + 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x26, 0x26, 0x28, 0x69, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x6f, 0x2e, 0x67, + 0x65, 0x74, 0x44, 0x65, 0x72, 0x69, 0x76, 0x65, 0x64, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x46, 0x72, 0x6f, 0x6d, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, + 0x74, 0x29, 0x29, 0x2c, 0x72, 0x3d, 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, + 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x69, 0x2e, 0x63, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, + 0x63, 0x68, 0x26, 0x26, 0x28, 0x69, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x74, 0x2c, 0x5f, 0x7c, 0x7c, 0x7b, 0x7d, 0x29, 0x2c, 0x72, 0x3d, + 0x69, 0x2e, 0x5f, 0x5f, 0x64, 0x29, 0x2c, 0x72, 0x29, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x69, 0x2e, 0x5f, 0x5f, 0x45, 0x3d, 0x69, 0x7d, + 0x63, 0x61, 0x74, 0x63, 0x68, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x3d, 0x6e, + 0x7d, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x74, 0x7d, 0x7d, 0x2c, 0x45, + 0x3d, 0x30, 0x2c, 0x55, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x6e, 0x75, 0x6c, + 0x6c, 0x3d, 0x3d, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, + 0x63, 0x74, 0x6f, 0x72, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3b, 0x65, + 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x73, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x3f, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x73, 0x3d, 0x4d, 0x28, 0x7b, + 0x7d, 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x29, 0x2c, 0x22, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x74, 0x26, 0x26, + 0x28, 0x74, 0x3d, 0x74, 0x28, 0x4d, 0x28, 0x7b, 0x7d, 0x2c, 0x65, 0x29, + 0x2c, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x4d, 0x28, 0x65, 0x2c, 0x74, 0x29, 0x2c, + 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, 0x28, 0x6e, 0x26, 0x26, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x73, 0x62, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x6e, 0x29, 0x2c, 0x47, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x29, + 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, + 0x65, 0x2e, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x29, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x26, 0x26, + 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x3d, 0x21, 0x30, + 0x2c, 0x74, 0x26, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x74, 0x29, 0x2c, 0x47, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x29, 0x29, 0x7d, 0x2c, 0x49, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, + 0x72, 0x3d, 0x6a, 0x2c, 0x48, 0x3d, 0x5b, 0x5d, 0x2c, 0x4e, 0x3d, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x3f, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x74, 0x68, 0x65, 0x6e, + 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, + 0x65, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x29, 0x29, + 0x3a, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x2c, + 0x24, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x2d, 0x6e, 0x2e, 0x5f, + 0x5f, 0x76, 0x2e, 0x5f, 0x5f, 0x62, 0x7d, 0x2c, 0x7a, 0x2e, 0x5f, 0x5f, + 0x72, 0x3d, 0x30, 0x2c, 0x44, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x61, 0x74, 0x2c, 0x70, 0x74, 0x2c, 0x64, 0x74, 0x2c, 0x76, 0x74, 0x2c, + 0x79, 0x74, 0x3d, 0x30, 0x2c, 0x6d, 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x67, + 0x74, 0x3d, 0x5b, 0x5d, 0x2c, 0x62, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, + 0x62, 0x2c, 0x6b, 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x2c, 0x53, + 0x74, 0x3d, 0x43, 0x2e, 0x64, 0x69, 0x66, 0x66, 0x65, 0x64, 0x2c, 0x77, + 0x74, 0x3d, 0x43, 0x2e, 0x5f, 0x5f, 0x63, 0x2c, 0x78, 0x74, 0x3d, 0x43, + 0x2e, 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3b, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x43, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x43, 0x2e, 0x5f, + 0x5f, 0x68, 0x28, 0x70, 0x74, 0x2c, 0x74, 0x2c, 0x79, 0x74, 0x7c, 0x7c, + 0x6e, 0x29, 0x2c, 0x79, 0x74, 0x3d, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x65, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x7c, 0x7c, 0x28, 0x70, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x7b, 0x5f, 0x5f, 0x3a, 0x5b, 0x5d, + 0x2c, 0x5f, 0x5f, 0x68, 0x3a, 0x5b, 0x5d, 0x7d, 0x29, 0x3b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x3e, 0x3d, 0x65, 0x2e, 0x5f, 0x5f, + 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x65, 0x2e, 0x5f, + 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x7b, 0x5f, 0x5f, 0x56, 0x3a, + 0x67, 0x74, 0x7d, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x5b, 0x74, 0x5d, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x45, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x31, 0x2c, 0x55, 0x74, 0x28, 0x71, 0x74, 0x2c, 0x74, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x55, 0x74, + 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x32, 0x29, + 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x2e, 0x74, 0x3d, 0x74, 0x2c, 0x21, 0x5f, + 0x2e, 0x5f, 0x5f, 0x63, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, 0x5f, 0x3d, + 0x5b, 0x65, 0x3f, 0x65, 0x28, 0x6e, 0x29, 0x3a, 0x71, 0x74, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x29, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x4e, 0x3f, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x5b, 0x30, 0x5d, 0x3a, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, + 0x5d, 0x2c, 0x65, 0x3d, 0x5f, 0x2e, 0x74, 0x28, 0x6e, 0x2c, 0x74, 0x29, + 0x3b, 0x6e, 0x21, 0x3d, 0x3d, 0x65, 0x26, 0x26, 0x28, 0x5f, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x5b, 0x65, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x5b, 0x31, + 0x5d, 0x5d, 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x73, 0x65, 0x74, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x29, 0x7d, 0x5d, + 0x2c, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x70, 0x74, 0x2c, 0x21, 0x70, + 0x74, 0x2e, 0x75, 0x29, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, + 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x5f, 0x2e, 0x5f, 0x5f, + 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x69, 0x3d, 0x5f, 0x2e, 0x5f, + 0x5f, 0x63, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x69, + 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x7d, 0x29, 0x29, 0x3b, 0x69, 0x66, + 0x28, 0x69, 0x2e, 0x65, 0x76, 0x65, 0x72, 0x79, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x7d, 0x29, + 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x6f, 0x7c, 0x7c, + 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x72, + 0x3d, 0x21, 0x31, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x69, 0x66, + 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x3b, 0x74, 0x2e, + 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x2c, 0x74, 0x2e, 0x5f, + 0x5f, 0x4e, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x21, + 0x3d, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x5b, 0x30, 0x5d, 0x26, 0x26, 0x28, + 0x72, 0x3d, 0x21, 0x30, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x21, 0x28, + 0x21, 0x72, 0x26, 0x26, 0x5f, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x3d, 0x3d, 0x3d, 0x74, 0x29, 0x26, 0x26, 0x28, 0x21, + 0x6f, 0x7c, 0x7c, 0x6f, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x29, 0x7d, 0x3b, + 0x70, 0x74, 0x2e, 0x75, 0x3d, 0x21, 0x30, 0x3b, 0x76, 0x61, 0x72, 0x20, + 0x6f, 0x3d, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x2c, 0x72, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, + 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x3b, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, + 0x5f, 0x3d, 0x6f, 0x3b, 0x6f, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x2c, 0x69, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x2c, 0x6f, 0x3d, + 0x5f, 0x7d, 0x72, 0x26, 0x26, 0x72, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x28, + 0x74, 0x68, 0x69, 0x73, 0x2c, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7d, + 0x2c, 0x70, 0x74, 0x2e, 0x73, 0x68, 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, + 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x3d, 0x69, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x5f, + 0x2e, 0x5f, 0x5f, 0x4e, 0x7c, 0x7c, 0x5f, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x48, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, + 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x33, 0x29, 0x3b, 0x21, 0x43, 0x2e, + 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, 0x28, 0x65, 0x2e, 0x5f, 0x5f, + 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, + 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, 0x2c, 0x70, 0x74, 0x2e, 0x5f, + 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x50, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, 0x72, + 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x34, + 0x29, 0x3b, 0x21, 0x43, 0x2e, 0x5f, 0x5f, 0x73, 0x26, 0x26, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x26, 0x26, 0x28, + 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2c, 0x65, 0x2e, 0x69, 0x3d, 0x6e, + 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x65, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x79, 0x74, 0x3d, 0x35, 0x2c, 0x44, 0x74, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3a, 0x74, 0x7d, 0x7d, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x24, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x2c, 0x65, 0x29, 0x7b, 0x79, 0x74, 0x3d, 0x36, 0x2c, 0x50, + 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x74, 0x3f, 0x28, 0x74, 0x28, 0x6e, 0x28, 0x29, 0x29, + 0x2c, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x28, 0x6e, 0x75, 0x6c, + 0x6c, 0x29, 0x7d, 0x29, 0x3a, 0x74, 0x3f, 0x28, 0x74, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x6e, 0x28, 0x29, 0x2c, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x29, 0x3a, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, + 0x65, 0x3f, 0x65, 0x3a, 0x65, 0x2e, 0x63, 0x6f, 0x6e, 0x63, 0x61, 0x74, + 0x28, 0x74, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x44, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x76, 0x61, + 0x72, 0x20, 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, + 0x37, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x49, 0x74, + 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x2c, 0x6e, 0x29, 0x3f, 0x28, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x74, 0x28, 0x29, 0x2c, 0x65, 0x2e, 0x69, + 0x3d, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2c, 0x65, + 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x3a, 0x65, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x79, + 0x74, 0x3d, 0x38, 0x2c, 0x44, 0x74, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x7d, 0x29, 0x2c, 0x6e, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x56, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x5b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x5d, 0x2c, + 0x65, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, 0x2b, 0x2c, 0x39, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, 0x63, 0x3d, + 0x74, 0x2c, 0x6e, 0x3f, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x3d, 0x3d, 0x65, + 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x3d, 0x21, + 0x30, 0x2c, 0x6e, 0x2e, 0x73, 0x75, 0x62, 0x28, 0x70, 0x74, 0x29, 0x29, + 0x2c, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x3a, 0x74, 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x74, 0x28, 0x74, 0x2c, 0x6e, + 0x29, 0x7b, 0x43, 0x2e, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x26, 0x26, 0x43, 0x2e, 0x75, 0x73, 0x65, + 0x44, 0x65, 0x62, 0x75, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x28, 0x6e, + 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x74, 0x29, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x46, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, 0x2b, + 0x2b, 0x2c, 0x31, 0x30, 0x29, 0x2c, 0x65, 0x3d, 0x45, 0x74, 0x28, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x2e, 0x5f, 0x5f, + 0x3d, 0x74, 0x2c, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, + 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x7c, + 0x7c, 0x28, 0x70, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x44, 0x69, 0x64, 0x43, 0x61, 0x74, 0x63, 0x68, 0x3d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x5f, 0x29, + 0x7b, 0x6e, 0x2e, 0x5f, 0x5f, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x28, + 0x74, 0x2c, 0x5f, 0x29, 0x2c, 0x65, 0x5b, 0x31, 0x5d, 0x28, 0x74, 0x29, + 0x7d, 0x29, 0x2c, 0x5b, 0x65, 0x5b, 0x30, 0x5d, 0x2c, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x65, 0x5b, 0x31, 0x5d, + 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x29, 0x7d, 0x5d, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x74, 0x28, 0x29, + 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3d, 0x43, 0x74, 0x28, 0x61, 0x74, + 0x2b, 0x2b, 0x2c, 0x31, 0x31, 0x29, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x74, + 0x2e, 0x5f, 0x5f, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, + 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x6e, 0x75, + 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x21, 0x6e, 0x2e, 0x5f, + 0x5f, 0x6d, 0x26, 0x26, 0x6e, 0x75, 0x6c, 0x6c, 0x21, 0x3d, 0x3d, 0x6e, + 0x2e, 0x5f, 0x5f, 0x3b, 0x29, 0x6e, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x3b, + 0x76, 0x61, 0x72, 0x20, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x7c, + 0x7c, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x6d, 0x3d, 0x5b, 0x30, 0x2c, 0x30, + 0x5d, 0x29, 0x3b, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x22, 0x50, 0x22, 0x2b, + 0x65, 0x5b, 0x30, 0x5d, 0x2b, 0x22, 0x2d, 0x22, 0x2b, 0x65, 0x5b, 0x31, + 0x5d, 0x2b, 0x2b, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x2e, 0x5f, 0x5f, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x57, 0x74, 0x28, 0x29, 0x7b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, + 0x72, 0x20, 0x74, 0x3b, 0x74, 0x3d, 0x6d, 0x74, 0x2e, 0x73, 0x68, 0x69, + 0x66, 0x74, 0x28, 0x29, 0x3b, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, 0x5f, + 0x5f, 0x50, 0x26, 0x26, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x29, 0x74, 0x72, + 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, 0x2c, + 0x74, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, + 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x75, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, + 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x43, 0x2e, 0x5f, + 0x5f, 0x65, 0x28, 0x75, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x7d, + 0x7d, 0x43, 0x2e, 0x5f, 0x5f, 0x62, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x70, 0x74, 0x3d, 0x6e, 0x75, + 0x6c, 0x6c, 0x2c, 0x62, 0x74, 0x26, 0x26, 0x62, 0x74, 0x28, 0x74, 0x29, + 0x7d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x72, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6b, 0x74, 0x26, 0x26, + 0x6b, 0x74, 0x28, 0x74, 0x29, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x3b, 0x76, + 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x28, 0x70, 0x74, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x29, 0x2e, 0x5f, 0x5f, 0x48, 0x3b, 0x6e, 0x26, 0x26, 0x28, + 0x64, 0x74, 0x3d, 0x3d, 0x3d, 0x70, 0x74, 0x3f, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x70, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x3d, 0x5b, 0x5d, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, 0x26, + 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x4e, + 0x29, 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x2c, 0x74, + 0x2e, 0x5f, 0x5f, 0x4e, 0x3d, 0x74, 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x7d, 0x29, 0x29, 0x29, 0x3a, 0x28, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, + 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, 0x66, 0x6f, 0x72, + 0x45, 0x61, 0x63, 0x68, 0x28, 0x6a, 0x74, 0x29, 0x2c, 0x6e, 0x2e, 0x5f, + 0x5f, 0x68, 0x3d, 0x5b, 0x5d, 0x2c, 0x61, 0x74, 0x3d, 0x30, 0x29, 0x29, + 0x2c, 0x64, 0x74, 0x3d, 0x70, 0x74, 0x7d, 0x2c, 0x43, 0x2e, 0x64, 0x69, + 0x66, 0x66, 0x65, 0x64, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x53, 0x74, 0x26, 0x26, 0x53, 0x74, 0x28, + 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3b, 0x6e, 0x26, 0x26, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x26, + 0x26, 0x28, 0x6e, 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x68, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x26, 0x26, 0x28, 0x31, 0x21, 0x3d, + 0x3d, 0x6d, 0x74, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x6e, 0x29, 0x26, + 0x26, 0x76, 0x74, 0x3d, 0x3d, 0x3d, 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x46, 0x72, 0x61, 0x6d, 0x65, 0x7c, 0x7c, 0x28, 0x28, 0x76, 0x74, 0x3d, + 0x43, 0x2e, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x29, + 0x7c, 0x7c, 0x4f, 0x74, 0x29, 0x28, 0x57, 0x74, 0x29, 0x29, 0x2c, 0x6e, + 0x2e, 0x5f, 0x5f, 0x48, 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, + 0x61, 0x63, 0x68, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x74, 0x2e, 0x69, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x74, 0x2e, 0x69, 0x29, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x21, 0x3d, 0x3d, 0x67, 0x74, 0x26, 0x26, 0x28, 0x74, + 0x2e, 0x5f, 0x5f, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x56, 0x29, 0x2c, 0x74, + 0x2e, 0x69, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x56, 0x3d, 0x67, 0x74, 0x7d, 0x29, 0x29, 0x29, 0x2c, 0x64, + 0x74, 0x3d, 0x70, 0x74, 0x3d, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x2c, 0x43, + 0x2e, 0x5f, 0x5f, 0x63, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, + 0x65, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, + 0x74, 0x29, 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, 0x52, 0x74, 0x29, + 0x2c, 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x68, + 0x2e, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x28, 0x28, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x21, 0x74, 0x2e, 0x5f, 0x5f, 0x7c, 0x7c, 0x6a, 0x74, + 0x28, 0x74, 0x29, 0x7d, 0x29, 0x29, 0x7d, 0x63, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x6c, 0x29, 0x7b, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, + 0x74, 0x2e, 0x5f, 0x5f, 0x68, 0x26, 0x26, 0x28, 0x74, 0x2e, 0x5f, 0x5f, + 0x68, 0x3d, 0x5b, 0x5d, 0x29, 0x7d, 0x29, 0x29, 0x2c, 0x6e, 0x3d, 0x5b, + 0x5d, 0x2c, 0x43, 0x2e, 0x5f, 0x5f, 0x65, 0x28, 0x6c, 0x2c, 0x74, 0x2e, + 0x5f, 0x5f, 0x76, 0x29, 0x7d, 0x7d, 0x29, 0x29, 0x2c, 0x77, 0x74, 0x26, + 0x26, 0x77, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7d, 0x2c, 0x43, 0x2e, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x78, 0x74, 0x26, 0x26, + 0x78, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, + 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x65, 0x26, 0x26, 0x65, + 0x2e, 0x5f, 0x5f, 0x48, 0x26, 0x26, 0x28, 0x65, 0x2e, 0x5f, 0x5f, 0x48, + 0x2e, 0x5f, 0x5f, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, 0x28, + 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, + 0x7b, 0x74, 0x72, 0x79, 0x7b, 0x52, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x63, + 0x61, 0x74, 0x63, 0x68, 0x28, 0x74, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x7d, + 0x7d, 0x29, 0x29, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x48, 0x3d, 0x76, 0x6f, + 0x69, 0x64, 0x20, 0x30, 0x2c, 0x6e, 0x26, 0x26, 0x43, 0x2e, 0x5f, 0x5f, + 0x65, 0x28, 0x6e, 0x2c, 0x65, 0x2e, 0x5f, 0x5f, 0x76, 0x29, 0x29, 0x7d, + 0x3b, 0x76, 0x61, 0x72, 0x20, 0x4c, 0x74, 0x3d, 0x22, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, + 0x6f, 0x66, 0x20, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, + 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, + 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4f, 0x74, + 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x3d, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x29, 0x7b, 0x63, + 0x6c, 0x65, 0x61, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, + 0x5f, 0x29, 0x2c, 0x4c, 0x74, 0x26, 0x26, 0x63, 0x61, 0x6e, 0x63, 0x65, + 0x6c, 0x41, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, + 0x61, 0x6d, 0x65, 0x28, 0x6e, 0x29, 0x2c, 0x73, 0x65, 0x74, 0x54, 0x69, + 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x29, 0x7d, 0x2c, 0x5f, 0x3d, + 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x65, + 0x2c, 0x31, 0x30, 0x30, 0x29, 0x3b, 0x4c, 0x74, 0x26, 0x26, 0x28, 0x6e, + 0x3d, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x41, 0x6e, 0x69, 0x6d, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x72, 0x61, 0x6d, 0x65, 0x28, 0x65, + 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x52, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x70, 0x74, 0x2c, 0x65, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x22, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x65, 0x26, 0x26, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x2c, 0x65, + 0x28, 0x29, 0x29, 0x2c, 0x70, 0x74, 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6a, 0x74, 0x28, 0x74, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, 0x70, 0x74, 0x3b, 0x74, 0x2e, 0x5f, + 0x5f, 0x63, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x28, 0x29, 0x2c, 0x70, 0x74, + 0x3d, 0x6e, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x49, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x21, 0x74, 0x7c, 0x7c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x21, 0x3d, 0x3d, 0x6e, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x7c, 0x7c, 0x6e, 0x2e, 0x73, 0x6f, 0x6d, 0x65, 0x28, 0x28, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x6e, 0x2c, 0x65, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x21, 0x3d, 0x3d, + 0x74, 0x5b, 0x65, 0x5d, 0x7d, 0x29, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x71, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x29, + 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x22, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, + 0x66, 0x20, 0x6e, 0x3f, 0x6e, 0x28, 0x74, 0x29, 0x3a, 0x6e, 0x7d, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x42, 0x74, 0x28, 0x74, + 0x2c, 0x6e, 0x29, 0x7b, 0x43, 0x5b, 0x74, 0x5d, 0x3d, 0x6e, 0x2e, 0x62, + 0x69, 0x6e, 0x64, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x2c, 0x43, 0x5b, 0x74, + 0x5d, 0x7c, 0x7c, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x7d, 0x29, 0x29, + 0x7d, 0x6c, 0x65, 0x74, 0x20, 0x47, 0x74, 0x2c, 0x7a, 0x74, 0x3b, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4a, 0x74, 0x28, 0x74, + 0x29, 0x7b, 0x69, 0x66, 0x28, 0x7a, 0x74, 0x29, 0x7a, 0x74, 0x28, 0x29, + 0x3b, 0x7a, 0x74, 0x3d, 0x74, 0x26, 0x26, 0x74, 0x2e, 0x53, 0x28, 0x29, + 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4b, 0x74, + 0x28, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x74, 0x7d, 0x29, 0x7b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x58, 0x74, 0x28, 0x74, 0x29, + 0x3b, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x44, 0x74, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x5f, 0x5f, 0x76, 0x3b, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x28, + 0x74, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, 0x29, 0x69, 0x66, 0x28, 0x74, 0x2e, + 0x5f, 0x5f, 0x63, 0x29, 0x7b, 0x74, 0x2e, 0x5f, 0x5f, 0x63, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x62, 0x72, 0x65, 0x61, 0x6b, + 0x7d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x2e, 0x63, + 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x74, 0x3b, + 0x69, 0x66, 0x28, 0x21, 0x55, 0x28, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, + 0x28, 0x29, 0x29, 0x26, 0x26, 0x33, 0x3d, 0x3d, 0x3d, 0x28, 0x6e, 0x75, + 0x6c, 0x6c, 0x3d, 0x3d, 0x28, 0x74, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x62, 0x61, 0x73, 0x65, 0x29, 0x3f, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, + 0x3a, 0x74, 0x2e, 0x6e, 0x6f, 0x64, 0x65, 0x54, 0x79, 0x70, 0x65, 0x29, + 0x29, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x62, 0x61, 0x73, 0x65, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x3d, 0x65, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, + 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, + 0x7d, 0x7d, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x28, + 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x30, 0x3d, 0x3d, 0x3d, + 0x74, 0x3f, 0x30, 0x3a, 0x21, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x3f, 0x22, + 0x22, 0x3a, 0x74, 0x7c, 0x7c, 0x22, 0x22, 0x7d, 0x29, 0x7d, 0x2c, 0x5b, + 0x5d, 0x29, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x4b, 0x74, 0x2e, 0x64, 0x69, 0x73, + 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x5f, 0x73, + 0x74, 0x22, 0x3b, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x64, 0x65, + 0x66, 0x69, 0x6e, 0x65, 0x50, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, + 0x65, 0x73, 0x28, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, + 0x70, 0x65, 0x2c, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x6f, 0x72, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x2c, 0x74, + 0x79, 0x70, 0x65, 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, + 0x72, 0x61, 0x62, 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x4b, 0x74, 0x7d, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x3a, 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, + 0x6c, 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x67, 0x65, 0x74, 0x28, 0x29, 0x7b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x7b, 0x64, 0x61, 0x74, 0x61, 0x3a, + 0x74, 0x68, 0x69, 0x73, 0x7d, 0x7d, 0x7d, 0x2c, 0x5f, 0x5f, 0x62, 0x3a, + 0x7b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x62, 0x6c, + 0x65, 0x3a, 0x21, 0x30, 0x2c, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x31, + 0x7d, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x62, 0x22, + 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x2c, 0x65, 0x3d, 0x6e, 0x2e, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, + 0x20, 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x65, 0x29, 0x7b, 0x69, 0x66, 0x28, + 0x22, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0x3d, 0x3d, + 0x3d, 0x5f, 0x29, 0x63, 0x6f, 0x6e, 0x74, 0x69, 0x6e, 0x75, 0x65, 0x3b, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x3d, 0x65, 0x5b, 0x5f, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x69, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, + 0x6f, 0x66, 0x20, 0x68, 0x29, 0x7b, 0x69, 0x66, 0x28, 0x21, 0x74, 0x29, + 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, 0x70, 0x3d, 0x74, 0x3d, 0x7b, 0x7d, 0x3b, + 0x74, 0x5b, 0x5f, 0x5d, 0x3d, 0x69, 0x3b, 0x65, 0x5b, 0x5f, 0x5d, 0x3d, + 0x69, 0x2e, 0x70, 0x65, 0x65, 0x6b, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x72, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x4a, + 0x74, 0x28, 0x29, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x2c, 0x5f, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x5f, 0x29, 0x7b, + 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x26, 0x3d, 0x2d, 0x32, 0x3b, 0x65, + 0x3d, 0x5f, 0x2e, 0x5f, 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x65, 0x29, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x75, 0x3d, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, + 0x3b, 0x77, 0x28, 0x28, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x28, 0x29, 0x7b, 0x6e, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x7d, 0x29, 0x29, + 0x3b, 0x6e, 0x2e, 0x63, 0x3d, 0x28, 0x29, 0x3d, 0x3e, 0x7b, 0x5f, 0x2e, + 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x31, 0x3b, 0x5f, 0x2e, 0x73, 0x65, + 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x28, 0x7b, 0x7d, 0x29, 0x7d, 0x3b, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7d, 0x28, 0x29, 0x7d, + 0x47, 0x74, 0x3d, 0x5f, 0x3b, 0x4a, 0x74, 0x28, 0x65, 0x29, 0x3b, 0x74, + 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, + 0x65, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, + 0x3d, 0x3e, 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x74, 0x28, 0x6e, 0x2c, 0x65, 0x2c, + 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x64, 0x69, 0x66, + 0x66, 0x65, 0x64, 0x22, 0x2c, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, 0x3e, + 0x7b, 0x4a, 0x74, 0x28, 0x29, 0x3b, 0x47, 0x74, 0x3d, 0x76, 0x6f, 0x69, + 0x64, 0x20, 0x30, 0x3b, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3b, 0x69, 0x66, + 0x28, 0x22, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, + 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, + 0x26, 0x26, 0x28, 0x65, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x29, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x6e, + 0x70, 0x2c, 0x5f, 0x3d, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x3b, + 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x3d, + 0x65, 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x66, 0x6f, 0x72, + 0x28, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, + 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x3d, 0x6e, 0x5b, 0x65, 0x5d, 0x3b, + 0x69, 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x21, 0x28, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x29, + 0x29, 0x7b, 0x5f, 0x2e, 0x64, 0x28, 0x29, 0x3b, 0x6e, 0x5b, 0x65, 0x5d, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x7d, 0x7d, 0x65, 0x6c, 0x73, + 0x65, 0x7b, 0x6e, 0x3d, 0x7b, 0x7d, 0x3b, 0x65, 0x2e, 0x55, 0x3d, 0x6e, + 0x7d, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x6f, 0x3d, 0x6e, + 0x5b, 0x69, 0x5d, 0x2c, 0x72, 0x3d, 0x74, 0x5b, 0x69, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x6f, + 0x29, 0x7b, 0x6f, 0x3d, 0x51, 0x74, 0x28, 0x65, 0x2c, 0x69, 0x2c, 0x72, + 0x2c, 0x5f, 0x29, 0x3b, 0x6e, 0x5b, 0x69, 0x5d, 0x3d, 0x6f, 0x7d, 0x65, + 0x6c, 0x73, 0x65, 0x20, 0x6f, 0x2e, 0x6f, 0x28, 0x72, 0x2c, 0x5f, 0x29, + 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, 0x29, 0x3b, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x51, 0x74, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x69, 0x3d, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x26, 0x26, 0x76, + 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3d, 0x3d, 0x3d, 0x74, 0x2e, 0x6f, 0x77, + 0x6e, 0x65, 0x72, 0x53, 0x56, 0x47, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x2c, 0x6f, 0x3d, 0x61, 0x28, 0x65, 0x29, 0x3b, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x7b, 0x6f, 0x3a, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x3d, + 0x3e, 0x7b, 0x6f, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x74, 0x3b, + 0x5f, 0x3d, 0x6e, 0x7d, 0x2c, 0x64, 0x3a, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x6f, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, + 0x69, 0x66, 0x28, 0x5f, 0x5b, 0x6e, 0x5d, 0x21, 0x3d, 0x3d, 0x65, 0x29, + 0x7b, 0x5f, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x69, 0x66, 0x28, 0x69, + 0x29, 0x74, 0x5b, 0x6e, 0x5d, 0x3d, 0x65, 0x3b, 0x65, 0x6c, 0x73, 0x65, + 0x20, 0x69, 0x66, 0x28, 0x65, 0x29, 0x74, 0x2e, 0x73, 0x65, 0x74, 0x41, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x28, 0x6e, 0x2c, 0x65, + 0x29, 0x3b, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x74, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x76, 0x65, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x28, 0x6e, 0x29, 0x7d, 0x7d, 0x29, 0x7d, 0x7d, 0x42, 0x74, 0x28, 0x22, + 0x75, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x2c, 0x28, 0x74, 0x2c, + 0x6e, 0x29, 0x3d, 0x3e, 0x7b, 0x69, 0x66, 0x28, 0x22, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x22, 0x3d, 0x3d, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, + 0x20, 0x6e, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x29, 0x7b, 0x6c, 0x65, 0x74, + 0x20, 0x74, 0x3d, 0x6e, 0x2e, 0x5f, 0x5f, 0x65, 0x3b, 0x69, 0x66, 0x28, + 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, + 0x2e, 0x55, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x55, + 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x74, 0x20, 0x69, 0x6e, 0x20, 0x6e, 0x29, 0x7b, + 0x6c, 0x65, 0x74, 0x20, 0x65, 0x3d, 0x6e, 0x5b, 0x74, 0x5d, 0x3b, 0x69, + 0x66, 0x28, 0x65, 0x29, 0x65, 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, + 0x7d, 0x65, 0x6c, 0x73, 0x65, 0x7b, 0x6c, 0x65, 0x74, 0x20, 0x74, 0x3d, + 0x6e, 0x2e, 0x5f, 0x5f, 0x63, 0x3b, 0x69, 0x66, 0x28, 0x74, 0x29, 0x7b, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x6e, 0x29, 0x7b, 0x74, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3d, 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x3b, 0x6e, + 0x2e, 0x64, 0x28, 0x29, 0x7d, 0x7d, 0x7d, 0x74, 0x28, 0x6e, 0x29, 0x7d, + 0x29, 0x3b, 0x42, 0x74, 0x28, 0x22, 0x5f, 0x5f, 0x68, 0x22, 0x2c, 0x28, + 0x74, 0x2c, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x3d, 0x3e, 0x7b, 0x69, + 0x66, 0x28, 0x5f, 0x3c, 0x33, 0x7c, 0x7c, 0x39, 0x3d, 0x3d, 0x3d, 0x5f, + 0x29, 0x6e, 0x2e, 0x5f, 0x5f, 0x24, 0x66, 0x7c, 0x3d, 0x32, 0x3b, 0x74, + 0x28, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7d, 0x29, 0x3b, 0x49, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x73, 0x68, + 0x6f, 0x75, 0x6c, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, 0x6e, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x65, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x5f, 0x24, 0x75, 0x3b, 0x69, 0x66, 0x28, 0x21, 0x28, 0x65, 0x26, 0x26, + 0x76, 0x6f, 0x69, 0x64, 0x20, 0x30, 0x21, 0x3d, 0x3d, 0x65, 0x2e, 0x73, + 0x7c, 0x7c, 0x34, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, 0x24, + 0x66, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x69, 0x66, 0x28, 0x33, 0x26, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x6e, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x30, 0x3b, + 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x5f, 0x20, 0x69, 0x6e, + 0x20, 0x74, 0x29, 0x69, 0x66, 0x28, 0x22, 0x5f, 0x5f, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x22, 0x21, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x74, 0x5b, + 0x5f, 0x5d, 0x21, 0x3d, 0x3d, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x5b, 0x5f, 0x5d, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x21, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x6c, 0x65, 0x74, 0x20, + 0x5f, 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x70, 0x73, 0x29, 0x69, 0x66, 0x28, 0x21, 0x28, 0x5f, 0x20, 0x69, + 0x6e, 0x20, 0x74, 0x29, 0x29, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, + 0x30, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x21, 0x31, 0x7d, 0x3b, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x58, 0x74, 0x28, + 0x74, 0x29, 0x7b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x44, 0x74, + 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x61, 0x28, 0x74, 0x29, 0x2c, 0x5b, 0x5d, + 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x59, + 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6e, + 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, 0x6e, 0x2e, 0x63, 0x75, 0x72, + 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, 0x47, 0x74, 0x2e, 0x5f, 0x5f, + 0x24, 0x66, 0x7c, 0x3d, 0x34, 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x44, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x6d, 0x28, 0x28, 0x29, + 0x3d, 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, + 0x29, 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x5a, 0x74, 0x28, 0x74, 0x29, 0x7b, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x6e, 0x3d, 0x4e, 0x74, 0x28, 0x74, 0x29, 0x3b, + 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x3d, 0x74, 0x3b, + 0x48, 0x74, 0x28, 0x28, 0x29, 0x3d, 0x3e, 0x77, 0x28, 0x28, 0x29, 0x3d, + 0x3e, 0x6e, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x28, 0x29, + 0x29, 0x2c, 0x5b, 0x5d, 0x29, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x74, 0x6e, + 0x3d, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x2c, + 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x69, + 0x3b, 0x6e, 0x5b, 0x30, 0x5d, 0x3d, 0x30, 0x3b, 0x66, 0x6f, 0x72, 0x28, + 0x76, 0x61, 0x72, 0x20, 0x6f, 0x3d, 0x31, 0x3b, 0x6f, 0x3c, 0x6e, 0x2e, + 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x6f, 0x2b, 0x2b, 0x29, 0x7b, + 0x76, 0x61, 0x72, 0x20, 0x72, 0x3d, 0x6e, 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, + 0x2c, 0x75, 0x3d, 0x6e, 0x5b, 0x6f, 0x5d, 0x3f, 0x28, 0x6e, 0x5b, 0x30, + 0x5d, 0x7c, 0x3d, 0x72, 0x3f, 0x31, 0x3a, 0x32, 0x2c, 0x65, 0x5b, 0x6e, + 0x5b, 0x6f, 0x2b, 0x2b, 0x5d, 0x5d, 0x29, 0x3a, 0x6e, 0x5b, 0x2b, 0x2b, + 0x6f, 0x5d, 0x3b, 0x33, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x30, + 0x5d, 0x3d, 0x75, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, + 0x31, 0x5d, 0x3d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x61, 0x73, + 0x73, 0x69, 0x67, 0x6e, 0x28, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x2c, 0x75, 0x29, 0x3a, 0x35, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x28, + 0x5f, 0x5b, 0x31, 0x5d, 0x3d, 0x5f, 0x5b, 0x31, 0x5d, 0x7c, 0x7c, 0x7b, + 0x7d, 0x29, 0x5b, 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x3d, 0x75, + 0x3a, 0x36, 0x3d, 0x3d, 0x3d, 0x72, 0x3f, 0x5f, 0x5b, 0x31, 0x5d, 0x5b, + 0x6e, 0x5b, 0x2b, 0x2b, 0x6f, 0x5d, 0x5d, 0x2b, 0x3d, 0x75, 0x2b, 0x22, + 0x22, 0x3a, 0x72, 0x3f, 0x28, 0x69, 0x3d, 0x74, 0x2e, 0x61, 0x70, 0x70, + 0x6c, 0x79, 0x28, 0x75, 0x2c, 0x74, 0x6e, 0x28, 0x74, 0x2c, 0x75, 0x2c, + 0x65, 0x2c, 0x5b, 0x22, 0x22, 0x2c, 0x6e, 0x75, 0x6c, 0x6c, 0x5d, 0x29, + 0x29, 0x2c, 0x5f, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x69, 0x29, 0x2c, + 0x75, 0x5b, 0x30, 0x5d, 0x3f, 0x6e, 0x5b, 0x30, 0x5d, 0x7c, 0x3d, 0x32, + 0x3a, 0x28, 0x6e, 0x5b, 0x6f, 0x2d, 0x32, 0x5d, 0x3d, 0x30, 0x2c, 0x6e, + 0x5b, 0x6f, 0x5d, 0x3d, 0x69, 0x29, 0x29, 0x3a, 0x5f, 0x2e, 0x70, 0x75, + 0x73, 0x68, 0x28, 0x75, 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x5f, 0x7d, 0x2c, 0x6e, 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, + 0x61, 0x70, 0x3b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x65, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x3d, + 0x6e, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, + 0x3b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x7c, 0x7c, 0x28, + 0x6e, 0x3d, 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x2c, 0x6e, 0x6e, + 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2c, 0x6e, 0x29, + 0x29, 0x2c, 0x28, 0x6e, 0x3d, 0x74, 0x6e, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2c, 0x6e, 0x2e, 0x67, 0x65, 0x74, 0x28, 0x74, 0x29, 0x7c, 0x7c, 0x28, + 0x6e, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x74, 0x2c, 0x6e, 0x3d, 0x66, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x66, 0x6f, + 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x6e, 0x2c, 0x65, 0x2c, 0x5f, 0x3d, + 0x31, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x2c, 0x6f, 0x3d, 0x22, 0x22, 0x2c, + 0x72, 0x3d, 0x5b, 0x30, 0x5d, 0x2c, 0x75, 0x3d, 0x66, 0x75, 0x6e, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x28, 0x74, 0x29, 0x7b, 0x31, 0x3d, 0x3d, 0x3d, + 0x5f, 0x26, 0x26, 0x28, 0x74, 0x7c, 0x7c, 0x28, 0x69, 0x3d, 0x69, 0x2e, + 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, + 0x2a, 0x5c, 0x6e, 0x5c, 0x73, 0x2a, 0x7c, 0x5c, 0x73, 0x2a, 0x5c, 0x6e, + 0x5c, 0x73, 0x2a, 0x24, 0x2f, 0x67, 0x2c, 0x22, 0x22, 0x29, 0x29, 0x29, + 0x3f, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x30, 0x2c, 0x74, 0x2c, + 0x69, 0x29, 0x3a, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x74, + 0x7c, 0x7c, 0x69, 0x29, 0x3f, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x33, 0x2c, 0x74, 0x2c, 0x69, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x32, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x22, 0x2e, 0x2e, 0x2e, + 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x74, 0x3f, 0x72, 0x2e, 0x70, + 0x75, 0x73, 0x68, 0x28, 0x34, 0x2c, 0x74, 0x2c, 0x30, 0x29, 0x3a, 0x32, + 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x69, 0x26, 0x26, 0x21, 0x74, 0x3f, + 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x35, 0x2c, 0x30, 0x2c, 0x21, + 0x30, 0x2c, 0x69, 0x29, 0x3a, 0x5f, 0x3e, 0x3d, 0x35, 0x26, 0x26, 0x28, + 0x28, 0x69, 0x7c, 0x7c, 0x21, 0x74, 0x26, 0x26, 0x35, 0x3d, 0x3d, 0x3d, + 0x5f, 0x29, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, + 0x5f, 0x2c, 0x30, 0x2c, 0x69, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, 0x36, + 0x29, 0x2c, 0x74, 0x26, 0x26, 0x28, 0x72, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x5f, 0x2c, 0x74, 0x2c, 0x30, 0x2c, 0x65, 0x29, 0x2c, 0x5f, 0x3d, + 0x36, 0x29, 0x29, 0x2c, 0x69, 0x3d, 0x22, 0x22, 0x7d, 0x2c, 0x66, 0x3d, + 0x30, 0x3b, 0x66, 0x3c, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x3b, 0x66, 0x2b, 0x2b, 0x29, 0x7b, 0x66, 0x26, 0x26, 0x28, 0x31, 0x3d, + 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x75, 0x28, 0x29, 0x2c, 0x75, 0x28, 0x66, + 0x29, 0x29, 0x3b, 0x66, 0x6f, 0x72, 0x28, 0x76, 0x61, 0x72, 0x20, 0x73, + 0x3d, 0x30, 0x3b, 0x73, 0x3c, 0x74, 0x5b, 0x66, 0x5d, 0x2e, 0x6c, 0x65, + 0x6e, 0x67, 0x74, 0x68, 0x3b, 0x73, 0x2b, 0x2b, 0x29, 0x6e, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x5d, 0x2c, 0x31, 0x3d, 0x3d, 0x3d, 0x5f, + 0x3f, 0x22, 0x3c, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, + 0x29, 0x2c, 0x72, 0x3d, 0x5b, 0x72, 0x5d, 0x2c, 0x5f, 0x3d, 0x33, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x34, 0x3d, 0x3d, 0x3d, 0x5f, 0x3f, + 0x22, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, 0x26, 0x22, 0x3e, + 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, 0x5f, 0x3d, 0x31, 0x2c, 0x69, + 0x3d, 0x22, 0x22, 0x29, 0x3a, 0x69, 0x3d, 0x6e, 0x2b, 0x69, 0x5b, 0x30, + 0x5d, 0x3a, 0x6f, 0x3f, 0x6e, 0x3d, 0x3d, 0x3d, 0x6f, 0x3f, 0x6f, 0x3d, + 0x22, 0x22, 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x3a, 0x27, 0x22, 0x27, 0x3d, + 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x27, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x6f, 0x3d, 0x6e, 0x3a, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, + 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x31, 0x29, 0x3a, 0x5f, + 0x26, 0x26, 0x28, 0x22, 0x3d, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x3f, 0x28, + 0x5f, 0x3d, 0x35, 0x2c, 0x65, 0x3d, 0x69, 0x2c, 0x69, 0x3d, 0x22, 0x22, + 0x29, 0x3a, 0x22, 0x2f, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x26, 0x26, 0x28, + 0x5f, 0x3c, 0x35, 0x7c, 0x7c, 0x22, 0x3e, 0x22, 0x3d, 0x3d, 0x3d, 0x74, + 0x5b, 0x66, 0x5d, 0x5b, 0x73, 0x2b, 0x31, 0x5d, 0x29, 0x3f, 0x28, 0x75, + 0x28, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, 0x26, 0x26, 0x28, 0x72, + 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2c, 0x5f, 0x3d, 0x72, 0x2c, 0x28, + 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, 0x29, 0x2e, 0x70, 0x75, 0x73, 0x68, + 0x28, 0x32, 0x2c, 0x30, 0x2c, 0x5f, 0x29, 0x2c, 0x5f, 0x3d, 0x30, 0x29, + 0x3a, 0x22, 0x20, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, + 0x74, 0x22, 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x6e, 0x22, + 0x3d, 0x3d, 0x3d, 0x6e, 0x7c, 0x7c, 0x22, 0x5c, 0x72, 0x22, 0x3d, 0x3d, + 0x3d, 0x6e, 0x3f, 0x28, 0x75, 0x28, 0x29, 0x2c, 0x5f, 0x3d, 0x32, 0x29, + 0x3a, 0x69, 0x2b, 0x3d, 0x6e, 0x29, 0x2c, 0x33, 0x3d, 0x3d, 0x3d, 0x5f, + 0x26, 0x26, 0x22, 0x21, 0x2d, 0x2d, 0x22, 0x3d, 0x3d, 0x3d, 0x69, 0x26, + 0x26, 0x28, 0x5f, 0x3d, 0x34, 0x2c, 0x72, 0x3d, 0x72, 0x5b, 0x30, 0x5d, + 0x29, 0x7d, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x75, 0x28, 0x29, + 0x2c, 0x72, 0x7d, 0x28, 0x74, 0x29, 0x29, 0x2c, 0x6e, 0x29, 0x2c, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2c, 0x5b, 0x5d, 0x29, + 0x29, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x3e, 0x31, 0x3f, 0x6e, + 0x3a, 0x6e, 0x5b, 0x30, 0x5d, 0x7d, 0x76, 0x61, 0x72, 0x20, 0x5f, 0x6e, + 0x3d, 0x65, 0x6e, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x4c, 0x29, 0x3b, + 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x7b, 0x49, 0x20, 0x61, 0x73, 0x20, + 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x6a, 0x20, + 0x61, 0x73, 0x20, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2c, + 0x68, 0x20, 0x61, 0x73, 0x20, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, + 0x5f, 0x20, 0x61, 0x73, 0x20, 0x62, 0x61, 0x74, 0x63, 0x68, 0x2c, 0x63, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x6d, 0x20, 0x61, 0x73, 0x20, 0x63, + 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x68, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x78, 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x52, + 0x20, 0x61, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x66, 0x2c, 0x77, 0x20, 0x61, 0x73, 0x20, 0x65, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x2c, 0x4c, 0x20, 0x61, 0x73, 0x20, 0x68, 0x2c, 0x5f, 0x6e, 0x20, + 0x61, 0x73, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x6c, 0x74, 0x20, 0x61, + 0x73, 0x20, 0x68, 0x79, 0x64, 0x72, 0x61, 0x74, 0x65, 0x2c, 0x55, 0x20, + 0x61, 0x73, 0x20, 0x69, 0x73, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x45, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x2c, 0x43, 0x20, 0x61, 0x73, 0x20, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x73, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x61, 0x20, 0x61, 0x73, + 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x58, 0x20, 0x61, 0x73, + 0x20, 0x74, 0x6f, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x41, 0x72, 0x72, 0x61, + 0x79, 0x2c, 0x75, 0x20, 0x61, 0x73, 0x20, 0x75, 0x6e, 0x74, 0x72, 0x61, + 0x63, 0x6b, 0x65, 0x64, 0x2c, 0x54, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x2c, 0x59, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, + 0x75, 0x74, 0x65, 0x64, 0x2c, 0x56, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x41, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x44, 0x65, 0x62, 0x75, 0x67, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x48, 0x74, 0x20, 0x61, 0x73, 0x20, + 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x46, 0x74, + 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x61, 0x72, 0x79, 0x2c, 0x4d, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x64, 0x2c, 0x24, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x49, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x76, 0x65, 0x48, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x2c, 0x50, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4c, 0x61, 0x79, 0x6f, + 0x75, 0x74, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x44, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x4d, 0x65, 0x6d, 0x6f, 0x2c, 0x55, + 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x64, 0x75, + 0x63, 0x65, 0x72, 0x2c, 0x4e, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x2c, 0x58, 0x74, 0x20, 0x61, 0x73, 0x20, 0x75, + 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x5a, 0x74, 0x20, + 0x61, 0x73, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, + 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x45, 0x74, 0x20, 0x61, 0x73, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x7d, 0x3b, 0x0a +}; +unsigned int index_js_len = 22800; diff --git a/examples/server/json-schema-to-grammar.mjs.hpp b/examples/server/json-schema-to-grammar.mjs.hpp index 83b22d670..0a05c369d 100644 --- a/examples/server/json-schema-to-grammar.mjs.hpp +++ b/examples/server/json-schema-to-grammar.mjs.hpp @@ -1,115 +1,311 @@ -const char json_schema_to_grammar_mjs[] = R"LITERAL( -const SPACE_RULE = '" "?'; - -const PRIMITIVE_RULES = { - boolean: '("true" | "false") space', - number: '("-"? ([0-9] | [1-9] [0-9]*)) ("." [0-9]+)? ([eE] [-+]? [0-9]+)? space', - integer: '("-"? ([0-9] | [1-9] [0-9]*)) space', - string: ` "\\"" ( - [^"\\\\] | - "\\\\" (["\\\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F]) - )* "\\"" space`, - null: '"null" space', +unsigned char json_schema_to_grammar_mjs[] = { + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, + 0x52, 0x55, 0x4c, 0x45, 0x20, 0x3d, 0x20, 0x27, 0x22, 0x20, 0x22, 0x3f, + 0x27, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x62, 0x6f, 0x6f, 0x6c, + 0x65, 0x61, 0x6e, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x74, 0x72, 0x75, 0x65, + 0x22, 0x20, 0x7c, 0x20, 0x22, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x22, 0x29, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, 0x22, + 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, 0x5b, + 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, 0x29, + 0x29, 0x20, 0x28, 0x22, 0x2e, 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, + 0x2b, 0x29, 0x3f, 0x20, 0x28, 0x5b, 0x65, 0x45, 0x5d, 0x20, 0x5b, 0x2d, + 0x2b, 0x5d, 0x3f, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2b, 0x29, 0x3f, + 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x69, + 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x3a, 0x20, 0x27, 0x28, 0x22, 0x2d, + 0x22, 0x3f, 0x20, 0x28, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x20, 0x7c, 0x20, + 0x5b, 0x31, 0x2d, 0x39, 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x5d, 0x2a, + 0x29, 0x29, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x2c, 0x0a, 0x20, + 0x20, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x60, 0x20, 0x22, + 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x5b, 0x5e, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, 0x5d, 0x20, + 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x22, 0x5c, + 0x5c, 0x5c, 0x5c, 0x22, 0x20, 0x28, 0x5b, 0x22, 0x5c, 0x5c, 0x5c, 0x5c, + 0x2f, 0x62, 0x66, 0x6e, 0x72, 0x74, 0x5d, 0x20, 0x7c, 0x20, 0x22, 0x75, + 0x22, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x20, 0x5b, 0x30, 0x2d, 0x39, 0x61, 0x2d, 0x66, 0x41, 0x2d, 0x46, + 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2a, 0x20, + 0x22, 0x5c, 0x5c, 0x22, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x2c, 0x0a, 0x20, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3a, 0x20, 0x27, 0x22, + 0x6e, 0x75, 0x6c, 0x6c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x2c, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, 0x52, 0x45, 0x20, 0x3d, 0x20, + 0x2f, 0x5b, 0x5e, 0x5c, 0x64, 0x41, 0x2d, 0x5a, 0x61, 0x2d, 0x7a, 0x2d, + 0x5d, 0x2b, 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, + 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, 0x52, + 0x45, 0x20, 0x3d, 0x20, 0x2f, 0x5b, 0x5c, 0x6e, 0x5c, 0x72, 0x22, 0x5d, + 0x2f, 0x67, 0x3b, 0x0a, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x52, + 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, 0x45, 0x52, 0x41, + 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x53, 0x20, 0x3d, 0x20, + 0x7b, 0x27, 0x5c, 0x72, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x72, 0x27, + 0x2c, 0x20, 0x27, 0x5c, 0x6e, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x6e, + 0x27, 0x2c, 0x20, 0x27, 0x22, 0x27, 0x3a, 0x20, 0x27, 0x5c, 0x5c, 0x22, + 0x27, 0x7d, 0x3b, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x6f, + 0x72, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x3d, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x7c, + 0x7c, 0x20, 0x7b, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x20, 0x3d, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x4d, 0x61, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, + 0x65, 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x27, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x27, 0x2c, 0x20, 0x53, 0x50, 0x41, 0x43, 0x45, 0x5f, 0x52, 0x55, + 0x4c, 0x45, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x4a, 0x53, + 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, + 0x28, 0x6c, 0x69, 0x74, 0x65, 0x72, 0x61, 0x6c, 0x29, 0x2e, 0x72, 0x65, + 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, 0x49, 0x54, + 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, 0x45, 0x5f, + 0x52, 0x45, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x20, + 0x3d, 0x3e, 0x20, 0x47, 0x52, 0x41, 0x4d, 0x4d, 0x41, 0x52, 0x5f, 0x4c, + 0x49, 0x54, 0x45, 0x52, 0x41, 0x4c, 0x5f, 0x45, 0x53, 0x43, 0x41, 0x50, + 0x45, 0x53, 0x5b, 0x6d, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x60, 0x22, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x61, 0x70, 0x65, 0x64, 0x7d, + 0x22, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x5f, + 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, + 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x5f, 0x43, 0x48, 0x41, 0x52, 0x53, 0x5f, + 0x52, 0x45, 0x2c, 0x20, 0x27, 0x2d, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, 0x65, 0x73, + 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x67, 0x65, 0x74, 0x28, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x69, 0x20, 0x3d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x68, 0x61, 0x73, 0x28, + 0x60, 0x24, 0x7b, 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x20, 0x2b, 0x3d, 0x20, 0x31, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x60, 0x24, 0x7b, + 0x65, 0x73, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x69, 0x7d, + 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, 0x75, 0x6c, 0x65, + 0x73, 0x2e, 0x73, 0x65, 0x74, 0x28, 0x6b, 0x65, 0x79, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6b, 0x65, 0x79, 0x3b, 0x0a, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, + 0x3d, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x74, 0x79, 0x70, + 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x7c, 0x7c, 0x20, 0x27, 0x72, 0x6f, 0x6f, + 0x74, 0x27, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x6f, 0x6e, 0x65, 0x4f, + 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, + 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, + 0x6f, 0x6e, 0x65, 0x4f, 0x66, 0x20, 0x7c, 0x7c, 0x20, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x2e, 0x61, 0x6e, 0x79, 0x4f, 0x66, 0x29, 0x2e, 0x6d, + 0x61, 0x70, 0x28, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, + 0x73, 0x69, 0x74, 0x28, 0x61, 0x6c, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x69, 0x7d, 0x60, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, + 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, + 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x72, + 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, + 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x27, 0x65, + 0x6e, 0x75, 0x6d, 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x65, 0x6e, 0x75, 0x6d, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x76, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x68, + 0x69, 0x73, 0x2e, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, + 0x74, 0x65, 0x72, 0x61, 0x6c, 0x28, 0x76, 0x29, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x27, 0x20, 0x7c, 0x20, 0x27, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, 0x52, 0x75, 0x6c, + 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x27, 0x20, 0x26, 0x26, + 0x20, 0x27, 0x70, 0x72, 0x6f, 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, + 0x27, 0x20, 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x54, 0x4f, 0x44, 0x4f, 0x3a, 0x20, 0x60, 0x72, 0x65, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x64, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, + 0x64, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x50, 0x61, 0x69, 0x72, 0x73, 0x20, 0x3d, 0x20, 0x4f, + 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, + 0x73, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x65, 0x72, 0x74, 0x69, 0x65, 0x73, 0x29, 0x2e, 0x73, 0x6f, 0x72, + 0x74, 0x28, 0x28, 0x61, 0x2c, 0x20, 0x62, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x73, 0x6f, 0x72, 0x74, 0x20, 0x62, 0x79, 0x20, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, + 0x70, 0x5f, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, 0x28, 0x69, 0x66, 0x20, + 0x73, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x65, 0x64, 0x29, 0x20, 0x74, + 0x68, 0x65, 0x6e, 0x20, 0x62, 0x79, 0x20, 0x6b, 0x65, 0x79, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x3d, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x61, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x27, 0x20, 0x3f, 0x20, + 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x61, 0x5b, + 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, 0x6e, 0x66, 0x69, 0x6e, 0x69, + 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x42, + 0x20, 0x3d, 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x70, 0x72, + 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, + 0x5d, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x6e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x27, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, + 0x65, 0x72, 0x5b, 0x62, 0x5b, 0x30, 0x5d, 0x5d, 0x20, 0x3a, 0x20, 0x49, + 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x79, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x6f, 0x72, 0x64, 0x65, 0x72, 0x41, 0x20, 0x2d, 0x20, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x42, 0x20, 0x7c, 0x7c, 0x20, 0x61, 0x5b, 0x30, 0x5d, 0x2e, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x72, + 0x65, 0x28, 0x62, 0x5b, 0x30, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x3d, + 0x20, 0x27, 0x22, 0x7b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x70, + 0x50, 0x61, 0x69, 0x72, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, + 0x68, 0x28, 0x28, 0x5b, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x5d, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x70, 0x72, 0x6f, 0x70, 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x76, 0x69, 0x73, + 0x69, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x2c, 0x20, 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, + 0x3a, 0x20, 0x22, 0x22, 0x7d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x4e, + 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x20, 0x3e, 0x20, + 0x30, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, 0x27, + 0x20, 0x22, 0x2c, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, + 0x2b, 0x3d, 0x20, 0x60, 0x20, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x4c, 0x69, 0x74, 0x65, 0x72, + 0x61, 0x6c, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x29, + 0x7d, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x22, 0x3a, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x60, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x20, 0x2b, 0x3d, 0x20, + 0x27, 0x20, 0x22, 0x7d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x27, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, + 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, + 0x6d, 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x61, 0x72, 0x72, 0x61, 0x79, 0x27, + 0x20, 0x26, 0x26, 0x20, 0x27, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x27, 0x20, + 0x69, 0x6e, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x4f, + 0x44, 0x4f, 0x20, 0x60, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x49, 0x74, + 0x65, 0x6d, 0x73, 0x60, 0x20, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x20, 0x28, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x70, 0x79, 0x74, 0x68, 0x6f, + 0x6e, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, + 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2e, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x2c, 0x20, 0x60, 0x24, + 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x3f, 0x20, 0x22, 0x2d, 0x22, 0x20, 0x3a, 0x20, 0x22, 0x22, 0x7d, + 0x69, 0x74, 0x65, 0x6d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x75, 0x6c, 0x65, + 0x20, 0x3d, 0x20, 0x60, 0x22, 0x5b, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x20, 0x28, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, 0x52, 0x75, 0x6c, + 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x28, 0x22, 0x2c, 0x22, 0x20, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x20, 0x24, 0x7b, 0x69, 0x74, 0x65, 0x6d, + 0x52, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x29, 0x2a, 0x29, + 0x3f, 0x20, 0x22, 0x5d, 0x22, 0x20, 0x73, 0x70, 0x61, 0x63, 0x65, 0x60, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, 0x64, 0x64, + 0x52, 0x75, 0x6c, 0x65, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, + 0x65, 0x2c, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x50, 0x52, + 0x49, 0x4d, 0x49, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, + 0x53, 0x5b, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, + 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x55, 0x6e, 0x72, 0x65, 0x63, 0x6f, + 0x67, 0x6e, 0x69, 0x7a, 0x65, 0x64, 0x20, 0x73, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x3a, 0x20, 0x24, 0x7b, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x29, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x61, + 0x64, 0x64, 0x52, 0x75, 0x6c, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x75, 0x6c, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, + 0x3f, 0x20, 0x27, 0x72, 0x6f, 0x6f, 0x74, 0x27, 0x20, 0x3a, 0x20, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x50, 0x52, 0x49, 0x4d, 0x49, + 0x54, 0x49, 0x56, 0x45, 0x5f, 0x52, 0x55, 0x4c, 0x45, 0x53, 0x5b, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x54, 0x79, 0x70, 0x65, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x67, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x3d, 0x20, 0x27, 0x27, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x5f, 0x72, + 0x75, 0x6c, 0x65, 0x73, 0x2e, 0x66, 0x6f, 0x72, 0x45, 0x61, 0x63, 0x68, + 0x28, 0x28, 0x72, 0x75, 0x6c, 0x65, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x20, 0x2b, 0x3d, 0x20, + 0x60, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x20, 0x3a, 0x3a, 0x3d, + 0x20, 0x24, 0x7b, 0x72, 0x75, 0x6c, 0x65, 0x7d, 0x5c, 0x6e, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x72, 0x61, 0x6d, + 0x6d, 0x61, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, 0x0a }; - -const INVALID_RULE_CHARS_RE = /[^\dA-Za-z-]+/g; -const GRAMMAR_LITERAL_ESCAPE_RE = /[\n\r"]/g; -const GRAMMAR_LITERAL_ESCAPES = {'\r': '\\r', '\n': '\\n', '"': '\\"'}; - -export class SchemaConverter { - constructor(propOrder) { - this._propOrder = propOrder || {}; - this._rules = new Map(); - this._rules.set('space', SPACE_RULE); - } - - _formatLiteral(literal) { - const escaped = JSON.stringify(literal).replace( - GRAMMAR_LITERAL_ESCAPE_RE, - m => GRAMMAR_LITERAL_ESCAPES[m] - ); - return `"${escaped}"`; - } - - _addRule(name, rule) { - let escName = name.replace(INVALID_RULE_CHARS_RE, '-'); - let key = escName; - - if (this._rules.has(escName)) { - if (this._rules.get(escName) === rule) { - return key; - } - - let i = 0; - while (this._rules.has(`${escName}${i}`)) { - i += 1; - } - key = `${escName}${i}`; - } - - this._rules.set(key, rule); - return key; - } - - visit(schema, name) { - const schemaType = schema.type; - const ruleName = name || 'root'; - - if (schema.oneOf || schema.anyOf) { - const rule = (schema.oneOf || schema.anyOf).map((altSchema, i) => - this.visit(altSchema, `${name}${name ? "-" : ""}${i}`) - ).join(' | '); - - return this._addRule(ruleName, rule); - } else if ('const' in schema) { - return this._addRule(ruleName, this._formatLiteral(schema.const)); - } else if ('enum' in schema) { - const rule = schema.enum.map(v => this._formatLiteral(v)).join(' | '); - return this._addRule(ruleName, rule); - } else if (schemaType === 'object' && 'properties' in schema) { - // TODO: `required` keyword (from python implementation) - const propOrder = this._propOrder; - const propPairs = Object.entries(schema.properties).sort((a, b) => { - // sort by position in prop_order (if specified) then by key - const orderA = typeof propOrder[a[0]] === 'number' ? propOrder[a[0]] : Infinity; - const orderB = typeof propOrder[b[0]] === 'number' ? propOrder[b[0]] : Infinity; - return orderA - orderB || a[0].localeCompare(b[0]); - }); - - let rule = '"{" space'; - propPairs.forEach(([propName, propSchema], i) => { - const propRuleName = this.visit(propSchema, `${name}${name ? "-" : ""}${propName}`); - if (i > 0) { - rule += ' "," space'; - } - rule += ` ${this._formatLiteral(propName)} space ":" space ${propRuleName}`; - }); - rule += ' "}" space'; - - return this._addRule(ruleName, rule); - } else if (schemaType === 'array' && 'items' in schema) { - // TODO `prefixItems` keyword (from python implementation) - const itemRuleName = this.visit(schema.items, `${name}${name ? "-" : ""}item`); - const rule = `"[" space (${itemRuleName} ("," space ${itemRuleName})*)? "]" space`; - return this._addRule(ruleName, rule); - } else { - if (!PRIMITIVE_RULES[schemaType]) { - throw new Error(`Unrecognized schema: ${JSON.stringify(schema)}`); - } - return this._addRule( - ruleName === 'root' ? 'root' : schemaType, - PRIMITIVE_RULES[schemaType] - ); - } - } - - formatGrammar() { - let grammar = ''; - this._rules.forEach((rule, name) => { - grammar += `${name} ::= ${rule}\n`; - }); - return grammar; - } -} -)LITERAL"; -unsigned int json_schema_to_grammar_mjs_len = sizeof(json_schema_to_grammar_mjs); +unsigned int json_schema_to_grammar_mjs_len = 3695; From e8dc55d0065d076d4c20f3c4bfca562701b4edfe Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 30 Jan 2024 19:04:37 -0500 Subject: [PATCH 627/859] kompute : llama-bench support and ggml_cpu_has_kompute() (#5226) --- common/common.cpp | 1 + examples/llama-bench/llama-bench.cpp | 15 +++++++++++---- ggml.c | 11 ++++++++++- ggml.h | 1 + llama.cpp | 5 ----- 5 files changed, 23 insertions(+), 10 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 288013676..0dd1c50cf 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1521,6 +1521,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cpu_has_avx512_vnni: %s\n", ggml_cpu_has_avx512_vnni() ? "true" : "false"); fprintf(stream, "cpu_has_cublas: %s\n", ggml_cpu_has_cublas() ? "true" : "false"); fprintf(stream, "cpu_has_clblast: %s\n", ggml_cpu_has_clblast() ? "true" : "false"); + fprintf(stream, "cpu_has_kompute: %s\n", ggml_cpu_has_kompute() ? "true" : "false"); fprintf(stream, "cpu_has_fma: %s\n", ggml_cpu_has_fma() ? "true" : "false"); fprintf(stream, "cpu_has_gpublas: %s\n", ggml_cpu_has_gpublas() ? "true" : "false"); fprintf(stream, "cpu_has_neon: %s\n", ggml_cpu_has_neon() ? "true" : "false"); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index f239415d3..542cc7bb8 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -563,6 +563,7 @@ struct test { static const bool cuda; static const bool opencl; static const bool vulkan; + static const bool kompute; static const bool metal; static const bool gpu_blas; static const bool blas; @@ -647,6 +648,9 @@ struct test { if (vulkan) { return "Vulkan"; } + if (kompute) { + return "Kompute"; + } if (metal) { return "Metal"; } @@ -662,7 +666,7 @@ struct test { static const std::vector & get_fields() { static const std::vector fields = { "build_commit", "build_number", - "cuda", "opencl", "vulkan", "metal", "gpu_blas", "blas", + "cuda", "opencl", "vulkan", "kompute", "metal", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", @@ -686,8 +690,9 @@ struct test { field == "avg_ns" || field == "stddev_ns") { return INT; } - if (field == "cuda" || field == "opencl" || field == "vulkan"|| field == "metal" || field == "gpu_blas" || field == "blas" || - field == "f16_kv" || field == "no_kv_offload" || field == "mul_mat_q") { + if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || + field == "gpu_blas" || field == "blas" || field == "f16_kv" || field == "no_kv_offload" || + field == "mul_mat_q") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -714,7 +719,8 @@ struct test { } std::vector values = { build_commit, std::to_string(build_number), - std::to_string(cuda), std::to_string(opencl), std::to_string(vulkan), std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), + std::to_string(cuda), std::to_string(opencl), std::to_string(vulkan), std::to_string(vulkan), + std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), @@ -743,6 +749,7 @@ const int test::build_number = LLAMA_BUILD_NUMBER; const bool test::cuda = !!ggml_cpu_has_cublas(); const bool test::opencl = !!ggml_cpu_has_clblast(); const bool test::vulkan = !!ggml_cpu_has_vulkan(); +const bool test::kompute = !!ggml_cpu_has_kompute(); const bool test::metal = !!ggml_cpu_has_metal(); const bool test::gpu_blas = !!ggml_cpu_has_gpublas(); const bool test::blas = !!ggml_cpu_has_blas(); diff --git a/ggml.c b/ggml.c index a7a9ea319..b2c8baaa8 100644 --- a/ggml.c +++ b/ggml.c @@ -20473,6 +20473,14 @@ int ggml_cpu_has_vulkan(void) { #endif } +int ggml_cpu_has_kompute(void) { +#if defined(GGML_USE_KOMPUTE) + return 1; +#else + return 0; +#endif +} + int ggml_cpu_has_sycl(void) { #if defined(GGML_USE_SYCL) return 1; @@ -20482,7 +20490,8 @@ int ggml_cpu_has_sycl(void) { } int ggml_cpu_has_gpublas(void) { - return ggml_cpu_has_cublas() || ggml_cpu_has_clblast() || ggml_cpu_has_vulkan() || ggml_cpu_has_sycl(); + return ggml_cpu_has_cublas() || ggml_cpu_has_clblast() || ggml_cpu_has_vulkan() || ggml_cpu_has_kompute() || + ggml_cpu_has_sycl(); } int ggml_cpu_has_sse3(void) { diff --git a/ggml.h b/ggml.h index bf782e6ad..afc87b843 100644 --- a/ggml.h +++ b/ggml.h @@ -2266,6 +2266,7 @@ extern "C" { GGML_API int ggml_cpu_has_cublas (void); GGML_API int ggml_cpu_has_clblast (void); GGML_API int ggml_cpu_has_vulkan (void); + GGML_API int ggml_cpu_has_kompute (void); GGML_API int ggml_cpu_has_gpublas (void); GGML_API int ggml_cpu_has_sse3 (void); GGML_API int ggml_cpu_has_ssse3 (void); diff --git a/llama.cpp b/llama.cpp index 7b9a5c079..a490eeab2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6878,11 +6878,6 @@ static int llama_decode_internal( n_threads = std::min(4, n_threads); } - const bool fully_offloaded = model.n_gpu_layers >= (int) hparams.n_layer + 1; - if ((ggml_cpu_has_cublas() || ggml_cpu_has_vulkan()) && fully_offloaded) { - n_threads = 1; - } - #ifdef GGML_USE_MPI const int64_t n_layer = hparams.n_layer; ggml_mpi_graph_compute_pre(lctx.ctx_mpi, gf, n_layer); From 01684139c352561840ae55ec627ab58abc3e06ab Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 31 Jan 2024 10:38:07 +0800 Subject: [PATCH 628/859] support SYCL backend windows build (#5208) * support SYCL backend windows build * add windows build in CI * add for win build CI * correct install oneMKL * fix install issue * fix ci * fix install cmd * fix install cmd * fix install cmd * fix install cmd * fix install cmd * fix win build * fix win build * fix win build * restore other CI part * restore as base * rm no new line * fix no new line issue, add -j * fix grammer issue * allow to trigger manually, fix format issue * fix format * add newline * fix format * fix format * fix format issuse --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- .github/workflows/build.yml | 25 ++++ .github/workflows/editorconfig.yml | 6 + .gitignore | 1 + CMakeLists.txt | 6 +- README_sycl.md => README-sycl.md | 198 +++++++++++++++++++++++++++-- README.md | 4 +- examples/sycl/win-build-sycl.bat | 23 ++++ examples/sycl/win-run-llama2.bat | 13 ++ scripts/install-oneapi.bat | 19 +++ 9 files changed, 281 insertions(+), 14 deletions(-) rename README_sycl.md => README-sycl.md (58%) create mode 100644 examples/sycl/win-build-sycl.bat create mode 100644 examples/sycl/win-run-llama2.bat create mode 100644 scripts/install-oneapi.bat diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fb719a550..c6db1666e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -565,6 +565,31 @@ jobs: path: | cudart-llama-bin-win-cu${{ matrix.cuda }}-x64.zip + windows-latest-cmake-sycl: + runs-on: windows-latest + defaults: + run: + shell: bash + + env: + WINDOWS_BASEKIT_URL: https://registrationcenter-download.intel.com/akdlm/IRC_NAS/62641e01-1e8d-4ace-91d6-ae03f7f8a71f/w_BaseKit_p_2024.0.0.49563_offline.exe + WINDOWS_DPCPP_MKL: intel.oneapi.win.cpp-dpcpp-common:intel.oneapi.win.mkl.devel + + + steps: + - name: Clone + id: checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install + run: scripts/install-oneapi.bat $WINDOWS_BASEKIT_URL $WINDOWS_DPCPP_MKL + + - name: Build + id: cmake_build + run: examples/sycl/win-build-sycl.bat + ios-xcode-build: runs-on: macos-latest diff --git a/.github/workflows/editorconfig.yml b/.github/workflows/editorconfig.yml index b4e535acf..0e0993cd4 100644 --- a/.github/workflows/editorconfig.yml +++ b/.github/workflows/editorconfig.yml @@ -1,6 +1,12 @@ name: EditorConfig Checker on: + workflow_dispatch: # allows manual triggering + inputs: + create_release: + description: 'Create new release' + required: true + type: boolean push: branches: - master diff --git a/.gitignore b/.gitignore index cb0069bfb..b84459b92 100644 --- a/.gitignore +++ b/.gitignore @@ -89,3 +89,4 @@ examples/jeopardy/results.txt poetry.lock poetry.toml +nppBackup diff --git a/CMakeLists.txt b/CMakeLists.txt index 65a6f3971..15a1101aa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -507,7 +507,11 @@ if (LLAMA_SYCL) set(GGML_HEADERS_SYCL ggml.h ggml-sycl.h) set(GGML_SOURCES_SYCL ggml-sycl.cpp) - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} sycl OpenCL mkl_core pthread m dl mkl_sycl_blas mkl_intel_ilp64 mkl_tbb_thread) + if (WIN32) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} -fsycl sycl7 OpenCL mkl_sycl_blas_dll.lib mkl_intel_ilp64_dll.lib mkl_sequential_dll.lib mkl_core_dll.lib) + else() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} -fsycl OpenCL mkl_core pthread m dl mkl_sycl_blas mkl_intel_ilp64 mkl_tbb_thread) + endif() endif() if (LLAMA_KOMPUTE) diff --git a/README_sycl.md b/README-sycl.md similarity index 58% rename from README_sycl.md rename to README-sycl.md index d5a1818f5..2b2cfe03a 100644 --- a/README_sycl.md +++ b/README-sycl.md @@ -8,10 +8,14 @@ [Linux](#linux) +[Windows](#windows) + [Environment Variable](#environment-variable) [Known Issue](#known-issue) +[Q&A](#q&a) + [Todo](#todo) ## Background @@ -33,7 +37,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |OS|Status|Verified| |-|-|-| |Linux|Support|Ubuntu 22.04| -|Windows|Ongoing| | +|Windows|Support|Windows 11| ## Intel GPU @@ -42,7 +46,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |-|-|-| |Intel Data Center Max Series| Support| Max 1550| |Intel Data Center Flex Series| Support| Flex 170| -|Intel Arc Series| Support| Arc 770| +|Intel Arc Series| Support| Arc 770, 730M| |Intel built-in Arc GPU| Support| built-in Arc GPU in Meteor Lake| |Intel iGPU| Support| iGPU in i5-1250P, i7-1165G7| @@ -131,6 +135,7 @@ cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx #build all binary cmake --build . --config Release -v +cd .. ``` or @@ -195,7 +200,7 @@ GGML_SYCL_DEVICE=0 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "Building or run by script: ``` -./examples/sycl/run_llama2.sh +./examples/sycl/run-llama2.sh ``` Note: @@ -205,11 +210,175 @@ Note: 5. Check the device ID in output -Like: +Like: ``` Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device ``` +## Windows + +### Setup Environment + +1. Install Intel GPU driver. + +Please install Intel GPU driver by official guide: [Install GPU Drivers](https://www.intel.com/content/www/us/en/products/docs/discrete-gpus/arc/software/drivers.html). + +2. Install Intel® oneAPI Base toolkit. + +a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). + +Recommend to install to default folder: **/opt/intel/oneapi**. + +Following guide uses the default folder as example. If you use other folder, please modify the following guide info with your folder. + +b. Enable oneAPI running environment: + +- In Search, input 'oneAPI'. + +Search & open "Intel oneAPI command prompt for Intel 64 for Visual Studio 2022" + +- In Run: + +In CMD: +``` +"C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 +``` + +c. Check GPU + +In oneAPI command line: + +``` +sycl-ls +``` + +There should be one or more level-zero devices. Like **[ext_oneapi_level_zero:gpu:0]**. + +Output (example): +``` +[opencl:acc:0] Intel(R) FPGA Emulation Platform for OpenCL(TM), Intel(R) FPGA Emulation Device OpenCL 1.2 [2023.16.10.0.17_160000] +[opencl:cpu:1] Intel(R) OpenCL, 11th Gen Intel(R) Core(TM) i7-1185G7 @ 3.00GHz OpenCL 3.0 (Build 0) [2023.16.10.0.17_160000] +[opencl:gpu:2] Intel(R) OpenCL Graphics, Intel(R) Iris(R) Xe Graphics OpenCL 3.0 NEO [31.0.101.5186] +[ext_oneapi_level_zero:gpu:0] Intel(R) Level-Zero, Intel(R) Iris(R) Xe Graphics 1.3 [1.3.28044] + +``` + +3. Install cmake & make + +a. Download & install cmake for windows: https://cmake.org/download/ + +b. Download & install make for windows provided by mingw-w64: https://www.mingw-w64.org/downloads/ + + +### Build locally: + +In oneAPI command line window: + +``` +mkdir -p build +cd build +@call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force + +:: for FP16 +:: faster for long-prompt inference +:: cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release -DLLAMA_SYCL_F16=ON + +:: for FP32 +cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release + + +:: build example/main only +:: make main + +:: build all binary +make -j +cd .. +``` + +or + +``` +.\examples\sycl\win-build-sycl.bat +``` + +Note: + +- By default, it will build for all binary files. It will take more time. To reduce the time, we recommend to build for **example/main** only. + +### Run + +1. Put model file to folder **models** + +2. Enable oneAPI running environment + +- In Search, input 'oneAPI'. + +Search & open "Intel oneAPI command prompt for Intel 64 for Visual Studio 2022" + +- In Run: + +In CMD: +``` +"C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 +``` + +3. List device ID + +Run without parameter: + +``` +build\bin\ls-sycl-device.exe + +or + +build\bin\main.exe +``` + +Check the ID in startup log, like: + +``` +found 4 SYCL devices: + Device 0: Intel(R) Arc(TM) A770 Graphics, compute capability 1.3, + max compute_units 512, max work group size 1024, max sub group size 32, global mem size 16225243136 + Device 1: Intel(R) FPGA Emulation Device, compute capability 1.2, + max compute_units 24, max work group size 67108864, max sub group size 64, global mem size 67065057280 + Device 2: 13th Gen Intel(R) Core(TM) i7-13700K, compute capability 3.0, + max compute_units 24, max work group size 8192, max sub group size 64, global mem size 67065057280 + Device 3: Intel(R) Arc(TM) A770 Graphics, compute capability 3.0, + max compute_units 512, max work group size 1024, max sub group size 32, global mem size 16225243136 + +``` + +|Attribute|Note| +|-|-| +|compute capability 1.3|Level-zero running time, recommended | +|compute capability 3.0|OpenCL running time, slower than level-zero in most cases| + +4. Set device ID and execute llama.cpp + +Set device ID = 0 by **set GGML_SYCL_DEVICE=0** + +``` +set GGML_SYCL_DEVICE=0 +build\bin\main.exe -m models\llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e -ngl 33 -s 0 +``` +or run by script: + +``` +.\examples\sycl\win-run-llama2.bat +``` + +Note: + +- By default, mmap is used to read model file. In some cases, it leads to the hang issue. Recommend to use parameter **--no-mmap** to disable mmap() to skip this issue. + + +5. Check the device ID in output + +Like: +``` +Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device +``` ## Environment Variable @@ -220,7 +389,7 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device |LLAMA_SYCL|ON (mandatory)|Enable build with SYCL code path.
    For FP32/FP16, LLAMA_SYCL=ON is mandatory.| |LLAMA_SYCL_F16|ON (optional)|Enable FP16 build with SYCL code path. Faster for long-prompt inference.
    For FP32, not set it.| |CMAKE_C_COMPILER|icx|Use icx compiler for SYCL code path| -|CMAKE_CXX_COMPILER|icpx|use icpx for SYCL code path| +|CMAKE_CXX_COMPILER|icpx (Linux), icx (Windows)|use icpx/icx for SYCL code path| #### Running @@ -232,19 +401,24 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device ## Known Issue -- Error: `error while loading shared libraries: libsycl.so.7: cannot open shared object file: No such file or directory`. - - Miss to enable oneAPI running environment. - - Install oneAPI base toolkit and enable it by: `source /opt/intel/oneapi/setvars.sh`. - - - Hang during startup llama.cpp use mmap as default way to read model file and copy to GPU. In some system, memcpy will be abnormal and block. Solution: add **--no-mmap**. +## Q&A + +- Error: `error while loading shared libraries: libsycl.so.7: cannot open shared object file: No such file or directory`. + + Miss to enable oneAPI running environment. + + Install oneAPI base toolkit and enable it by: `source /opt/intel/oneapi/setvars.sh`. + +- In Windows, no result, not error. + + Miss to enable oneAPI running environment. + ## Todo - Support to build in Windows. diff --git a/README.md b/README.md index b37348a74..7746cb510 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics +- ⚠️ Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 + - [SYCL backend](README-sycl.md) is ready (1/28/2024), support Linux/Windows in Intel GPUs (iGPU, Arc/Flex/Max series) - New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow - Collecting Apple Silicon performance stats: - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 @@ -604,7 +606,7 @@ Building the program with BLAS support may lead to some performance improvements llama.cpp based on SYCL is used to support Intel GPU (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). - For detailed info, please refer to [llama.cpp for SYCL](README_sycl.md). + For detailed info, please refer to [llama.cpp for SYCL](README-sycl.md). ### Prepare Data & Run diff --git a/examples/sycl/win-build-sycl.bat b/examples/sycl/win-build-sycl.bat new file mode 100644 index 000000000..f9d43f8ed --- /dev/null +++ b/examples/sycl/win-build-sycl.bat @@ -0,0 +1,23 @@ + +:: MIT license +:: Copyright (C) 2024 Intel Corporation +:: SPDX-License-Identifier: MIT + +mkdir -p build +cd build +@call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force + +:: for FP16 +:: faster for long-prompt inference +:: cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release -DLLAMA_SYCL_F16=ON + +:: for FP32 +cmake -G "MinGW Makefiles" .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icx -DCMAKE_BUILD_TYPE=Release + + +:: build example/main only +:: make main + +:: build all binary +make -j +cd .. diff --git a/examples/sycl/win-run-llama2.bat b/examples/sycl/win-run-llama2.bat new file mode 100644 index 000000000..28d935541 --- /dev/null +++ b/examples/sycl/win-run-llama2.bat @@ -0,0 +1,13 @@ +:: MIT license +:: Copyright (C) 2024 Intel Corporation +:: SPDX-License-Identifier: MIT + +INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" +@call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force + + +set GGML_SYCL_DEVICE=0 +rem set GGML_SYCL_DEBUG=1 +.\build\bin\main.exe -m models\llama-2-7b.Q4_0.gguf -p %INPUT2% -n 400 -e -ngl 33 -s 0 + + diff --git a/scripts/install-oneapi.bat b/scripts/install-oneapi.bat new file mode 100644 index 000000000..e99bef14a --- /dev/null +++ b/scripts/install-oneapi.bat @@ -0,0 +1,19 @@ +:: MIT license +:: Copyright (C) 2024 Intel Corporation +:: SPDX-License-Identifier: MIT + + +set URL=%1 +set COMPONENTS=%2 + +curl.exe --output %TEMP%\webimage.exe --url %URL% --retry 5 --retry-delay 5 +start /b /wait %TEMP%\webimage.exe -s -x -f webimage_extracted --log extract.log +del %TEMP%\webimage.exe +if "%COMPONENTS%"=="" ( + webimage_extracted\bootstrapper.exe -s --action install --eula=accept -p=NEED_VS2017_INTEGRATION=0 -p=NEED_VS2019_INTEGRATION=0 -p=NEED_VS2022_INTEGRATION=0 --log-dir=. +) else ( + webimage_extracted\bootstrapper.exe -s --action install --components=%COMPONENTS% --eula=accept -p=NEED_VS2017_INTEGRATION=0 -p=NEED_VS2019_INTEGRATION=0 -p=NEED_VS2022_INTEGRATION=0 --log-dir=. +) +set installer_exit_code=%ERRORLEVEL% +rd /s/q "webimage_extracted" +exit /b %installer_exit_code% From d62520eb2cc1d7168a30edec6110e1daefbd959f Mon Sep 17 00:00:00 2001 From: Yiming Cui Date: Wed, 31 Jan 2024 11:04:21 +0800 Subject: [PATCH 629/859] Fix typos of IQ2_XXS and IQ3_XXS in llama.cpp (#5231) --- llama.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index a490eeab2..bb23689fa 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2713,10 +2713,10 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q5_K_S: return "Q5_K - Small"; case LLAMA_FTYPE_MOSTLY_Q5_K_M: return "Q5_K - Medium"; case LLAMA_FTYPE_MOSTLY_Q6_K: return "Q6_K"; - case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XSS - 2.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS:return "IQ2_XXS - 2.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; - case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XSS - 3.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; default: return "unknown, may not work"; } From f8e9140cb46eebaa867e1184a9946e4840eec772 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Wed, 31 Jan 2024 11:44:19 +0100 Subject: [PATCH 630/859] Vulkan Fixes (#5223) * Fix Vulkan F16 models * Fix Vulkan context shift crash * Add Vulkan to common.cpp dump_non_result_info_yaml function * Fix bug in Vulkan CPY op * Fix small matrix multiplication errors in AMD GPUs on Windows or with amdvlk Co-authored-by: Engininja2 <139037756+Engininja2@users.noreply.github.com> --------- Co-authored-by: Engininja2 <139037756+Engininja2@users.noreply.github.com> --- common/common.cpp | 1 + ggml-vulkan-shaders.hpp | 1952 +++++++++++++---------------------- ggml-vulkan.cpp | 14 +- ggml_vk_generate_shaders.py | 4 +- 4 files changed, 704 insertions(+), 1267 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 0dd1c50cf..9d976c7c8 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1520,6 +1520,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cpu_has_avx512_vbmi: %s\n", ggml_cpu_has_avx512_vbmi() ? "true" : "false"); fprintf(stream, "cpu_has_avx512_vnni: %s\n", ggml_cpu_has_avx512_vnni() ? "true" : "false"); fprintf(stream, "cpu_has_cublas: %s\n", ggml_cpu_has_cublas() ? "true" : "false"); + fprintf(stream, "cpu_has_vulkan: %s\n", ggml_cpu_has_vulkan() ? "true" : "false"); fprintf(stream, "cpu_has_clblast: %s\n", ggml_cpu_has_clblast() ? "true" : "false"); fprintf(stream, "cpu_has_kompute: %s\n", ggml_cpu_has_kompute() ? "true" : "false"); fprintf(stream, "cpu_has_fma: %s\n", ggml_cpu_has_fma() ? "true" : "false"); diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index 321e36383..e2e9be22c 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -890,7 +890,7 @@ const uint64_t cpy_f32_f32_len = 2472; unsigned char dequant_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x87,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x81,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -898,7 +898,7 @@ unsigned char dequant_f16_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x10,0x00,0x06,0x00, +0x4f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x10,0x00,0x06,0x00, 0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, @@ -910,23 +910,23 @@ unsigned char dequant_f16_data[] = { 0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, 0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5b,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x5b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, @@ -945,330 +945,109 @@ unsigned char dequant_f16_data[] = { 0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x5c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x02,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x02,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x02,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x81,0x02,0x00,0x00, -0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00,0x0e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x81,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x82,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x81,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x54,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6c,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc8,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x6e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe8,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x6f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x08,0x01,0x00,0x00,0xf5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x1f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x72,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x3f,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x48,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x5f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x5f,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x66,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x76,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x68,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x75,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x7f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00, -0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x78,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x88,0x01,0x00,0x00,0x75,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x95,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x79,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x9c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x9f,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa6,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa8,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb2,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc8,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xdf,0x01,0x00,0x00, -0xd2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe6,0x01,0x00,0x00,0x67,0x00,0x00,0x00,0x7e,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe8,0x01,0x00,0x00,0xd5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x67,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xff,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x06,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x80,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x08,0x02,0x00,0x00,0xf5,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x1f,0x02,0x00,0x00, -0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x28,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x32,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x35,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x3f,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x3f,0x02,0x00,0x00, -0x32,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x48,0x02,0x00,0x00,0x35,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x55,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x67,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x5f,0x02,0x00,0x00,0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x5f,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x66,0x02,0x00,0x00,0x67,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x68,0x02,0x00,0x00, -0x5f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x68,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0xf9,0x00,0x02,0x00,0x81,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x81,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x23,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x4c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x5a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, +0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0d,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x80,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, +0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x23,0x00,0x00,0x00, +0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, +0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, +0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, +0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x80,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x52,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7a,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_f16_len = 4392; +const uint64_t dequant_f16_len = 1748; unsigned char dequant_f16_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc8,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -1280,23 +1059,23 @@ unsigned char dequant_f16_fp32_data[] = { 0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x50,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x50,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x60,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5e,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x85,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -1315,405 +1094,105 @@ unsigned char dequant_f16_fp32_data[] = { 0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4a,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x5f,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xad,0x02,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xae,0x02,0x00,0x00,0x12,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb3,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb5,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb9,0x02,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbc,0x02,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x48,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x4c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x4e,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5f,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, +0x0a,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x85,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x1b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0xaf,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, +0x23,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xaf,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x2b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x30,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x32,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0xad,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xcb,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xaf,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xf9,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x04,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x13,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x28,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2c,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x28,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x37,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x40,0x01,0x00,0x00, -0x2c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x41,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4b,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xb4,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x4c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xb5,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x65,0x01,0x00,0x00,0x64,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x6f,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x73,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x74,0x01,0x00,0x00,0x73,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x70,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7f,0x01,0x00,0x00,0x7e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x74,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x89,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb8,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xa2,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x9f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xa3,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xac,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xb7,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb8,0x01,0x00,0x00,0xb7,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xbb,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xba,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xb8,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xc7,0x01,0x00,0x00,0xc6,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xd1,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xe0,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0xf5,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf5,0x01,0x00,0x00,0xf4,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0xff,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x03,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x04,0x02,0x00,0x00, -0x03,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0e,0x02,0x00,0x00, -0x00,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x0f,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x0f,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x19,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x24,0x02,0x00,0x00,0x23,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc0,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x24,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x33,0x02,0x00,0x00,0x32,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x3c,0x02,0x00,0x00,0x28,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x3d,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3d,0x02,0x00,0x00,0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x56,0x02,0x00,0x00,0x48,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x57,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x73,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00,0x61,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x61,0x02,0x00,0x00,0x60,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x6b,0x02,0x00,0x00, -0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x6b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x6f,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x70,0x02,0x00,0x00, -0x6f,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x7a,0x02,0x00,0x00, -0x6c,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x7b,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x77,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x7b,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x70,0x02,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x8f,0x02,0x00,0x00,0x56,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x8f,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x93,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x90,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x73,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x94,0x02,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa9,0x02,0x00,0x00,0xa8,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x86,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x86,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x17,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x48,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x50,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x48,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x18,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x17,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t dequant_f16_fp32_len = 5420; +const uint64_t dequant_f16_fp32_len = 1816; unsigned char dequant_q2_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -15313,7 +14792,7 @@ const uint64_t gelu_f32_len = 1408; unsigned char get_rows_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -15321,7 +14800,7 @@ unsigned char get_rows_f16_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -15341,22 +14820,184 @@ unsigned char get_rows_f16_data[] = { 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, +0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x72,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +}; +const uint64_t get_rows_f16_len = 1892; + +unsigned char get_rows_f16_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, +0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, @@ -15388,198 +15029,28 @@ unsigned char get_rows_f16_data[] = { 0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, 0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x62,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_f16_len = 1940; - -unsigned char get_rows_f16_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, 0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -15600,7 +15071,7 @@ unsigned char get_rows_f16_f32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7b,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -15613,51 +15084,51 @@ unsigned char get_rows_f16_f32_data[] = { 0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6e,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7b,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_f32_len = 1988; +const uint64_t get_rows_f16_f32_len = 1940; unsigned char get_rows_f16_f32_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -15676,23 +15147,23 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x64,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x65,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x67,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -15723,32 +15194,28 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x64,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7c,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00, 0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, @@ -15770,7 +15237,7 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, 0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, 0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, @@ -15783,51 +15250,51 @@ unsigned char get_rows_f16_f32_fp32_data[] = { 0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, 0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6f,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x6f,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7b,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x7b,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_f32_fp32_len = 1980; +const uint64_t get_rows_f16_f32_fp32_len = 1932; unsigned char get_rows_f16_fp32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -15846,23 +15313,23 @@ unsigned char get_rows_f16_fp32_data[] = { 0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x64,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x65,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x65,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x67,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x67,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -15893,31 +15360,27 @@ unsigned char get_rows_f16_fp32_data[] = { 0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x64,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x66,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x77,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, 0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x7c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x7a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -15938,7 +15401,7 @@ unsigned char get_rows_f16_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -15951,42 +15414,42 @@ unsigned char get_rows_f16_fp32_data[] = { 0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x7c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x7c,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x48,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x76,0x00,0x00,0x00, +0x75,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x79,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_fp32_len = 1996; +const uint64_t get_rows_f16_fp32_len = 1948; unsigned char get_rows_q4_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -52701,7 +52164,7 @@ const uint64_t mul_f32_len = 1456; unsigned char mul_mat_vec_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xb6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, @@ -52709,9 +52172,9 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x0f,0x00,0x0c,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x13,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0xad,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x1a,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x13,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -52729,23 +52192,23 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x64,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x64,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xaa,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xab,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa8,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xab,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xad,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xaa,0x00,0x00,0x00, 0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb5,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, 0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, 0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, @@ -52760,7 +52223,7 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x13,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x17,0x00,0x00,0x00, 0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x18,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, 0x19,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x1a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1a,0x00,0x00,0x00, @@ -52775,7 +52238,7 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x2e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x02,0x00, 0x30,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x35,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, 0x4d,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, @@ -52784,26 +52247,22 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x50,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x63,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xaa,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xab,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x89,0x00,0x00,0x00, +0x08,0x01,0x00,0x00,0x1d,0x00,0x03,0x00,0xa7,0x00,0x00,0x00, +0x17,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x18,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, 0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, @@ -52819,122 +52278,91 @@ unsigned char mul_mat_vec_f16_f32_data[] = { 0x1b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, 0x1f,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, 0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, +0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x87,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, 0x2c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, 0x2f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0xb1,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, +0xb5,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, 0x24,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, 0x24,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6e,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6e,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1f,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x22,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0xe0,0x00,0x04,0x00, -0x8b,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x8e,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0xad,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x90,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x30,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x99,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x97,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x98,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x17,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x99,0x00,0x00,0x00,0xe0,0x00,0x04,0x00,0x8b,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x91,0x00,0x00,0x00, -0xc3,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00, -0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xa9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0xa7,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x1e,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6e,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xa9,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xa9,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, +0x35,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, +0x37,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x3d,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x54,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x54,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x4d,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, +0x73,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x57,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6d,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x17,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x17,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x1f,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x22,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x24,0x00,0x00,0x00, +0xe0,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0xaa,0x00,0x05,0x00,0x30,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0xa6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa5,0x00,0x00,0x00, +0x41,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, +0xac,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x1e,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x17,0x00,0x00,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x6d,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xa6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa6,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t mul_mat_vec_f16_f32_len = 2788; +const uint64_t mul_mat_vec_f16_f32_len = 2372; unsigned char mul_mat_vec_nc_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 1d93ec6bb..bccc40bf5 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -817,7 +817,7 @@ static void ggml_vk_load_shaders() { // mulmat std::initializer_list warptile_l = { 128, 128, 128, 16, vk_device.subgroup_size * 2, 64, 2, 4, 4, vk_device.subgroup_size }; std::initializer_list warptile_m = { 128, 64, 64, 16, vk_device.subgroup_size, 32, 2, 4, 2, vk_device.subgroup_size }; - std::initializer_list warptile_s = { vk_device.subgroup_size, 32, 32, 8, 32, 32, 2, 2, 2, vk_device.subgroup_size }; + std::initializer_list warptile_s = { vk_device.subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, vk_device.subgroup_size }; std::array l_wg_denoms = {128, 128, 1 }; std::array m_wg_denoms = { 64, 64, 1 }; @@ -2873,7 +2873,8 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm if (op == GGML_OP_CPY) { GGML_ASSERT(!transfer_src0); GGML_ASSERT(!transfer_src1); - d_sz = dst->ne[1] * dst->nb[1]; + x_sz = ggml_nbytes(src0); + d_sz = ggml_nbytes(dst); if (extra->offset + d_sz >= d_D->size) { d_sz = VK_WHOLE_SIZE; @@ -4556,8 +4557,15 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml } ggml_vk_preallocate_buffers(); + int last_node = cgraph->n_nodes - 1; + + // If the last op in the cgraph isn't backend GPU, the command buffer doesn't get closed properly + while (last_node > 0 && cgraph->nodes[last_node]->backend != GGML_BACKEND_GPU) { + last_node -= 1; + } + for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_build_graph(cgraph->nodes[i], i == cgraph->n_nodes - 1); + ggml_vk_build_graph(cgraph->nodes[i], i == last_node); } ggml_compute_params params = {}; diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index d0861fde4..6b1b82bf3 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -19,8 +19,8 @@ shader_int8_ext = """ # Type-specific defines shader_f16_defines = """ -#define QUANT_K 32 -#define QUANT_R 2 +#define QUANT_K 1 +#define QUANT_R 1 #define A_TYPE float16_t """ From dabcc5b471348e4ae03ddacc41e19ad75fb2f041 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 31 Jan 2024 13:43:03 +0100 Subject: [PATCH 631/859] ggml : limit n_threads to the max n_tasks (#5238) --- ggml.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ggml.c b/ggml.c index b2c8baaa8..afd9c6c61 100644 --- a/ggml.c +++ b/ggml.c @@ -16985,12 +16985,16 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa struct ggml_cplan cplan; memset(&cplan, 0, sizeof(struct ggml_cplan)); + int max_tasks = 1; + // thread scheduling for the different operations + work buffer size estimation for (int i = 0; i < cgraph->n_nodes; i++) { struct ggml_tensor * node = cgraph->nodes[i]; const int n_tasks = ggml_get_n_tasks(node, n_threads); + max_tasks = MAX(max_tasks, n_tasks); + size_t cur = 0; switch (node->op) { @@ -17157,7 +17161,7 @@ struct ggml_cplan ggml_graph_plan(const struct ggml_cgraph * cgraph, int n_threa work_size += CACHE_LINE_SIZE*(n_threads - 1); } - cplan.n_threads = n_threads; + cplan.n_threads = MIN(max_tasks, n_threads); cplan.work_size = work_size; cplan.work_data = NULL; From b2b9f025e7821e78bd501d75d01838c26de07a57 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 31 Jan 2024 21:04:46 +0800 Subject: [PATCH 632/859] format license text, restore apache license by legal suggestion (#5233) --- examples/sycl/ls-sycl-device.cpp | 10 ++++++---- ggml-sycl.cpp | 15 +++++++++++---- ggml-sycl.h | 9 +++++---- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/examples/sycl/ls-sycl-device.cpp b/examples/sycl/ls-sycl-device.cpp index 42847154a..52442e4ca 100644 --- a/examples/sycl/ls-sycl-device.cpp +++ b/examples/sycl/ls-sycl-device.cpp @@ -1,7 +1,9 @@ -/*MIT license - Copyright (C) 2024 Intel Corporation - SPDX-License-Identifier: MIT -*/ +// +// MIT license +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: MIT +// + #include "ggml-sycl.h" diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 3fc346975..1cc55ef52 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -1,7 +1,14 @@ -/*MIT license - Copyright (C) 2024 Intel Corporation - SPDX-License-Identifier: MIT -*/ +// +// MIT license +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: MIT +// + +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// #include #include diff --git a/ggml-sycl.h b/ggml-sycl.h index 0eabb53cc..ba0c61473 100644 --- a/ggml-sycl.h +++ b/ggml-sycl.h @@ -1,7 +1,8 @@ -/*MIT license - Copyright (C) 2024 Intel Corporation - SPDX-License-Identifier: MIT -*/ +// +// MIT license +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: MIT +// #pragma once From 15606309a05ccf7fadbaad5538cb7c32acb1e06b Mon Sep 17 00:00:00 2001 From: JidongZhang-THU <1119708529@qq.com> Date: Wed, 31 Jan 2024 21:10:15 +0800 Subject: [PATCH 633/859] llava : add MobileVLM support (#5132) * New Feature: 1. Sum_Rows: fix cuda kernel overflow fix block shape error when nrows too big 2. Im2Col: Support Batch in cuda Support f32 to f32 both in cpu && cuda 3. DepthWiseConv: Support by Im2Col && MulMat 4. Pool_2d: Supoort avg pooling in cuda 5. HardSigmoid: Imp in cuda 6. HardSwish: Imp in cuda * fix tabs instead of spaces * code clean * CUDA POOL2D * ADD POOL2D test case in test-backend-ops.cpp * code clean * fix pool2d_kernel nits * fix bug in pool2d kernel * fix avg pooling, count_include_pad nits * test-backend-ops : add more pool_2d tests * cuda : fix warnings and formatting * ggml : check types in release builds too in pool_2d * test-backend-ops : remove f16 pool_2d tests * cuda : more style fixes * Add assert in ggml_cuda_op_pool2d * pool2d float padding fallback * test-backend-ops : add dst_type to im2col --------- Co-authored-by: slaren --- examples/llava/MobileVLM-README.md | 58 +++++++- ggml-cuda.cu | 209 ++++++++++++++++++++++++++--- ggml.c | 118 +++++++++++++--- ggml.h | 3 +- tests/test-backend-ops.cpp | 74 +++++++++- 5 files changed, 421 insertions(+), 41 deletions(-) diff --git a/examples/llava/MobileVLM-README.md b/examples/llava/MobileVLM-README.md index c6258eba6..9eba791da 100644 --- a/examples/llava/MobileVLM-README.md +++ b/examples/llava/MobileVLM-README.md @@ -111,17 +111,71 @@ llama_print_timings: eval time = 1279.03 ms / 18 runs ( 71.06 m llama_print_timings: total time = 34570.79 ms ``` +## Orin compile and run +### compile +```sh +make LLAMA_CUBLAS=1 CUDA_DOCKER_ARCH=sm_87 LLAMA_CUDA_F16=1 -j 32 +``` + +### run on Orin +### case 1 +**input** +```sh +./llava-cli \ + -m /data/local/tmp/ggml-model-q4_k.gguf \ + --mmproj /data/local/tmp/mmproj-model-f16.gguf \ + --image /data/local/tmp/demo.jpeg \ + -p "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWho is the author of this book? \nAnswer the question using a single word or phrase. ASSISTANT:" \ + --n-gpu-layers 999 +``` +**output** +```sh + +encode_image_with_clip: image encoded in 296.62 ms by CLIP ( 2.06 ms per image patch) + + Susan Wise Bauer + +llama_print_timings: load time = 1067.64 ms +llama_print_timings: sample time = 1.53 ms / 6 runs ( 0.25 ms per token, 3934.43 tokens per second) +llama_print_timings: prompt eval time = 306.84 ms / 246 tokens ( 1.25 ms per token, 801.72 tokens per second) +llama_print_timings: eval time = 91.50 ms / 6 runs ( 15.25 ms per token, 65.58 tokens per second) +llama_print_timings: total time = 1352.63 ms / 252 tokens +``` + +### case 2 +**input** +```sh +./llava-cli \ + -m /data/local/tmp/ggml-model-q4_k.gguf \ + --mmproj /data/local/tmp/mmproj-model-f16.gguf \ + -p "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: \nWhat is in the image? ASSISTANT:" \ + --n-gpu-layers 999 + +``` +**output** +```sh +encode_image_with_clip: image encoded in 302.15 ms by CLIP ( 2.10 ms per image patch) + + The image features a cat lying in the grass. + +llama_print_timings: load time = 1057.07 ms +llama_print_timings: sample time = 3.27 ms / 11 runs ( 0.30 ms per token, 3360.83 tokens per second) +llama_print_timings: prompt eval time = 213.60 ms / 232 tokens ( 0.92 ms per token, 1086.14 tokens per second) +llama_print_timings: eval time = 166.65 ms / 11 runs ( 15.15 ms per token, 66.01 tokens per second) +llama_print_timings: total time = 1365.47 ms / 243 tokens +``` + ## Minor shortcomings The `n_patch` of output in `ldp` is 1/4 of the input. In order to implement quickly, we uniformly modified `clip_n_patches` function to a quarter. when counting the time consumption, the calculated time will be 4 times bigger than the real cost. ## TODO -- [ ] Support non-CPU backend for the new operators, such as `depthwise`, `hardswish`, `hardsigmoid` +- [x] Support non-CPU backend for the new operators, such as `depthwise`, `hardswish`, `hardsigmoid` - [ ] Optimize LDP projector performance - Optimize the structure definition to avoid unnecessary memory rearrangements, to reduce the use of `ggml_permute_cpy`; - Optimize operator implementation (ARM CPU/NVIDIA GPU): such as depthwise conv, hardswish, hardsigmoid, etc. -- [ ] run MobileVLM on `Jetson Orin` +- [x] run MobileVLM on `Jetson Orin` - [ ] Support more model variants, such as `MobileVLM-3B`. diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 949bc8a1c..e56595742 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -524,6 +524,8 @@ static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong #define CUDA_SILU_BLOCK_SIZE 256 #define CUDA_TANH_BLOCK_SIZE 256 #define CUDA_RELU_BLOCK_SIZE 256 +#define CUDA_HARDSIGMOID_BLOCK_SIZE 256 +#define CUDA_HARDSWISH_BLOCK_SIZE 256 #define CUDA_SQR_BLOCK_SIZE 256 #define CUDA_CPY_BLOCK_SIZE 32 #define CUDA_SCALE_BLOCK_SIZE 256 @@ -540,6 +542,7 @@ static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong #define CUDA_PAD_BLOCK_SIZE 256 #define CUDA_ACC_BLOCK_SIZE 256 #define CUDA_IM2COL_BLOCK_SIZE 256 +#define CUDA_POOL2D_BLOCK_SIZE 256 #define CUDA_Q8_0_NE_ALIGN 2048 @@ -823,6 +826,24 @@ static __global__ void relu_f32(const float * x, float * dst, const int k) { dst[i] = fmaxf(x[i], 0); } +static __global__ void hardsigmoid_f32(const float * x, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = fminf(1.0f, fmaxf(0.0f, (x[i] + 3.0f) / 6.0f)); +} + +static __global__ void hardswish_f32(const float * x, float * dst, const int k) { + const int i = blockDim.x*blockIdx.x + threadIdx.x; + + if (i >= k) { + return; + } + dst[i] = x[i] * fminf(1.0f, fmaxf(0.0f, (x[i] + 3.0f) / 6.0f)); +} + static __global__ void leaky_relu_f32(const float * x, float * dst, const int k, const float negative_slope) { const int i = blockDim.x*blockIdx.x + threadIdx.x; if (i >= k) { @@ -5823,7 +5844,7 @@ static __global__ void alibi_f32(const float * x, float * dst, const int ncols, } static __global__ void k_sum_rows_f32(const float * x, float * dst, const int ncols) { - const int row = blockIdx.y; + const int row = blockIdx.x; const int col = threadIdx.x; float sum = 0.0f; @@ -6145,9 +6166,10 @@ static __global__ void clamp_f32(const float * x, float * dst, const float min, dst[i] = x[i] < min ? min : (x[i] > max ? max : x[i]); } -static __global__ void im2col_f32_f16( - const float * x, half * dst, - int offset_delta, int IW, int IH, int OW, int KW, int KH, int pelements, int CHW, +template +static __global__ void im2col_kernel( + const float * x, T * dst, int batch_offset, + int offset_delta, int IC, int IW, int IH, int OH, int OW, int KW, int KH, int pelements, int CHW, int s0, int s1, int p0, int p1, int d0, int d1) { const int i = threadIdx.x + blockIdx.x * blockDim.x; if (i >= pelements) { @@ -6160,21 +6182,73 @@ static __global__ void im2col_f32_f16( const int ky = (i - kd) / OW; const int ix = i % OW; + const int oh = blockIdx.y; + const int batch = blockIdx.z / IC; + const int ic = blockIdx.z % IC; + const int64_t iiw = ix * s0 + kx * d0 - p0; - const int64_t iih = blockIdx.y * s1 + ky * d1 - p1; + const int64_t iih = oh * s1 + ky * d1 - p1; const int64_t offset_dst = - (blockIdx.y * OW + ix) * CHW + - (blockIdx.z * (KW * KH) + ky * KW + kx); + ((batch * OH + oh) * OW + ix) * CHW + + (ic * (KW * KH) + ky * KW + kx); if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { - dst[offset_dst] = __float2half(0.0f); + dst[offset_dst] = 0.0f; } else { - const int64_t offset_src = blockIdx.z * offset_delta; - dst[offset_dst] = __float2half(x[offset_src + iih * IW + iiw]); + const int64_t offset_src = ic * offset_delta + batch * batch_offset; + dst[offset_dst] = x[offset_src + iih * IW + iiw]; } } +template +static __global__ void pool2d_nchw_kernel( + const int ih, const int iw, const int oh, const int ow, + const int kh, const int kw, const int sh, const int sw, + const int ph, const int pw, const int parallel_elements, + const Ti* src, To* dst, const enum ggml_op_pool op) { + int idx = threadIdx.x + blockIdx.x * blockDim.x; + if (idx >= parallel_elements) { + return; + } + + const int I_HW = ih * iw; + const int O_HW = oh * ow; + const int nc = idx / O_HW; + const int cur_oh = idx % O_HW / ow; + const int cur_ow = idx % O_HW % ow; + const Ti* i_ptr = src + nc * I_HW; + To* o_ptr = dst + nc * O_HW; + const int start_h = cur_oh * sh - ph; + const int bh = max(0, start_h); + const int eh = min(ih, start_h + kh); + const int start_w = cur_ow * sw - pw; + const int bw = max(0, start_w); + const int ew = min(iw, start_w + kw); + const To scale = 1. / (kh * kw); + To res = 0; + + switch (op) { + case GGML_OP_POOL_AVG: res = 0; break; + case GGML_OP_POOL_MAX: res = -FLT_MAX; break; + } + + for (int i = bh; i < eh; i += 1) { + for (int j = bw; j < ew; j += 1) { + #if __CUDA_ARCH__ >= 350 + Ti cur = __ldg(i_ptr + i * iw + j); + #else + Ti cur = i_ptr[i * iw + j]; + #endif + switch (op) { + case GGML_OP_POOL_AVG: res += cur * scale; break; + case GGML_OP_POOL_MAX: res = max(res, (To)cur); break; + } + } + } + o_ptr[cur_oh * ow + cur_ow] = res; +} + template static void get_rows_cuda(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const void * src0_dd, const int32_t * src1_dd, float * dst_dd, cudaStream_t stream) { @@ -6388,6 +6462,16 @@ static void relu_f32_cuda(const float * x, float * dst, const int k, cudaStream_ relu_f32<<>>(x, dst, k); } +static void hardsigmoid_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_HARDSIGMOID_BLOCK_SIZE - 1) / CUDA_HARDSIGMOID_BLOCK_SIZE; + hardsigmoid_f32<<>>(x, dst, k); +} + +static void hardswish_f32_cuda(const float * x, float * dst, const int k, cudaStream_t stream) { + const int num_blocks = (k + CUDA_HARDSWISH_BLOCK_SIZE - 1) / CUDA_HARDSWISH_BLOCK_SIZE; + hardswish_f32<<>>(x, dst, k); +} + static void leaky_relu_f32_cuda(const float * x, float * dst, const int k, const float negative_slope, cudaStream_t stream) { const int num_blocks = (k + CUDA_RELU_BLOCK_SIZE - 1) / CUDA_RELU_BLOCK_SIZE; leaky_relu_f32<<>>(x, dst, k, negative_slope); @@ -7475,7 +7559,7 @@ static void alibi_f32_cuda(const float * x, float * dst, const int ncols, const static void sum_rows_f32_cuda(const float * x, float * dst, const int ncols, const int nrows, cudaStream_t stream) { const dim3 block_dims(WARP_SIZE, 1, 1); - const dim3 block_nums(1, nrows, 1); + const dim3 block_nums(nrows, 1, 1); k_sum_rows_f32<<>>(x, dst, ncols); } @@ -7587,14 +7671,15 @@ static void soft_max_f32_cuda(const float * x, const float * y, float * dst, con } } -static void im2col_f32_f16_cuda(const float* x, half* dst, +template +static void im2col_cuda(const float* x, T* dst, int IW, int IH, int OW, int OH, int KW, int KH, int IC, - int offset_delta, + int batch, int batch_offset, int offset_delta, int s0,int s1,int p0,int p1,int d0,int d1, cudaStream_t stream) { const int parallel_elements = OW * KW * KH; const int num_blocks = (parallel_elements + CUDA_IM2COL_BLOCK_SIZE - 1) / CUDA_IM2COL_BLOCK_SIZE; - dim3 block_nums(num_blocks, OH, IC); - im2col_f32_f16<<>>(x, dst, offset_delta, IW, IH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1); + dim3 block_nums(num_blocks, OH, batch * IC); + im2col_kernel<<>>(x, dst, batch_offset, offset_delta, IC, IW, IH, OH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1); } // buffer pool for cuda @@ -8179,6 +8264,34 @@ static void ggml_cuda_op_relu( (void) src1_dd; } +static void ggml_cuda_op_hardsigmoid( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + hardsigmoid_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + +static void ggml_cuda_op_hardswish( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + hardswish_f32_cuda(src0_dd, dst_dd, ggml_nelements(src0), main_stream); + + (void) src1; + (void) dst; + (void) src1_dd; +} + static void ggml_cuda_op_leaky_relu( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { @@ -8810,13 +8923,46 @@ static void ggml_cuda_op_alibi( (void) src1_dd; } +static void ggml_cuda_op_pool2d( + const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, + const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { + + GGML_ASSERT(src0->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + const int32_t * opts = (const int32_t *)dst->op_params; + enum ggml_op_pool op = static_cast(opts[0]); + const int k0 = opts[1]; + const int k1 = opts[2]; + const int s0 = opts[3]; + const int s1 = opts[4]; + const int p0 = opts[5]; + const int p1 = opts[6]; + + const int64_t IH = src0->ne[1]; + const int64_t IW = src0->ne[0]; + + const int64_t N = dst->ne[3]; + const int64_t OC = dst->ne[2]; + const int64_t OH = dst->ne[1]; + const int64_t OW = dst->ne[0]; + + const int parallel_elements = N * OC * OH * OW; + const int num_blocks = (parallel_elements + CUDA_POOL2D_BLOCK_SIZE - 1) / CUDA_POOL2D_BLOCK_SIZE; + dim3 block_nums(num_blocks); + pool2d_nchw_kernel<<>>(IH, IW, OH, OW, k1, k0, s1, s0, p1, p0, parallel_elements, src0_dd, dst_dd, op); + + (void) src1; + (void) src1_dd; +} + static void ggml_cuda_op_im2col( const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, const float * src0_dd, const float * src1_dd, float * dst_dd, cudaStream_t main_stream) { GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; @@ -8838,8 +8984,14 @@ static void ggml_cuda_op_im2col( const int64_t OW = dst->ne[1]; const size_t delta_offset = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 + const int64_t batch = src1->ne[3]; + const size_t batch_offset = src1->nb[3] / 4; // nb is byte offset, src is type float32 - im2col_f32_f16_cuda(src1_dd, (half*) dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + if(dst->type == GGML_TYPE_F16) { + im2col_cuda(src1_dd, (half*) dst_dd, IW, IH, OW, OH, KW, KH, IC, batch, batch_offset, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } else { + im2col_cuda(src1_dd, (float*) dst_dd, IW, IH, OW, OH, KW, KH, IC, batch, batch_offset, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } (void) src0; (void) src0_dd; @@ -9435,6 +9587,13 @@ static void ggml_cuda_relu(const ggml_tensor * src0, const ggml_tensor * src1, g ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_relu); } +static void ggml_cuda_hardsigmoid(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_hardsigmoid); +} + +static void ggml_cuda_hardswish(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_hardswish); +} static void ggml_cuda_leaky_relu(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_leaky_relu); } @@ -10220,6 +10379,10 @@ static void ggml_cuda_alibi(const ggml_tensor * src0, const ggml_tensor * src1, ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_alibi); } +static void ggml_cuda_pool2d(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_pool2d); +} + static void ggml_cuda_im2col(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { ggml_cuda_op_flatten(src0, src1, dst, ggml_cuda_op_im2col); } @@ -10321,6 +10484,12 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st case GGML_UNARY_OP_RELU: func = ggml_cuda_relu; break; + case GGML_UNARY_OP_HARDSIGMOID: + func = ggml_cuda_hardsigmoid; + break; + case GGML_UNARY_OP_HARDSWISH: + func = ggml_cuda_hardswish; + break; default: return false; } @@ -10395,6 +10564,9 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st case GGML_OP_IM2COL: func = ggml_cuda_im2col; break; + case GGML_OP_POOL_2D: + func = ggml_cuda_pool2d; + break; case GGML_OP_SUM_ROWS: func = ggml_cuda_sum_rows; break; @@ -11123,6 +11295,8 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_RELU: + case GGML_UNARY_OP_HARDSIGMOID: + case GGML_UNARY_OP_HARDSWISH: case GGML_UNARY_OP_GELU_QUICK: case GGML_UNARY_OP_TANH: return true; @@ -11221,6 +11395,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons case GGML_OP_ROPE: case GGML_OP_ALIBI: case GGML_OP_IM2COL: + case GGML_OP_POOL_2D: case GGML_OP_SUM_ROWS: case GGML_OP_ARGSORT: case GGML_OP_ACC: diff --git a/ggml.c b/ggml.c index afd9c6c61..ee994c875 100644 --- a/ggml.c +++ b/ggml.c @@ -5349,7 +5349,7 @@ GGML_API struct ggml_tensor * ggml_conv_1d( int s0, int p0, int d0) { - struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, 0, p0, 0, d0, 0, false); // [N, OL, IC * K] + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, 0, p0, 0, d0, 0, false, GGML_TYPE_F16); // [N, OL, IC * K] struct ggml_tensor * result = ggml_mul_mat(ctx, @@ -5427,16 +5427,15 @@ struct ggml_tensor * ggml_conv_depthwise_2d( int p1, int d0, int d1) { + struct ggml_tensor * new_a = ggml_reshape_4d(ctx, a, a->ne[0], a->ne[1], 1, a->ne[2] * a->ne[3]); struct ggml_tensor * im2col = ggml_im2col(ctx, new_a, ggml_reshape_4d(ctx, b, b->ne[0], b->ne[1], 1, b->ne[2] * b->ne[3]), - s0, s1, p0, p1, d0, d1, true); // [N * IC, OH, OW, KH * KW] - - struct ggml_tensor * result = - ggml_mul_mat(ctx, - ggml_reshape_4d(ctx, new_a, (new_a->ne[0] * new_a->ne[1]), new_a->ne[2], new_a->ne[3], 1), // [OC,1, KH, KW] => [1, OC, 1, KH * KW] - ggml_reshape_4d(ctx, im2col, im2col->ne[0], im2col->ne[2] * im2col->ne[1], b->ne[2], b->ne[3])); // [N * IC, OH, OW, KH * KW] => [N, IC, OH * OW, KH * KW] + s0, s1, p0, p1, d0, d1, true, GGML_TYPE_F16); // [N * IC, OH, OW, KH * KW] + struct ggml_tensor * new_b = ggml_reshape_4d(ctx, im2col, im2col->ne[0], im2col->ne[2] * im2col->ne[1], b->ne[2], b->ne[3]); // [N * IC, OH, OW, KH * KW] => [N, IC, OH * OW, KH * KW] + new_a = ggml_reshape_4d(ctx, new_a, (new_a->ne[0] * new_a->ne[1]), new_a->ne[2], new_a->ne[3], 1); // [OC,1, KH, KW] => [1, OC, 1, KH * KW] + struct ggml_tensor * result = ggml_mul_mat(ctx, new_a, new_b); result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], b->ne[2], b->ne[3]); // [N, OC, OH, OW] return result; @@ -5457,7 +5456,8 @@ struct ggml_tensor * ggml_im2col( int p1, int d0, int d1, - bool is_2D) { + bool is_2D, + enum ggml_type dst_type) { if(is_2D) { GGML_ASSERT(a->ne[2] == b->ne[2]); @@ -5481,7 +5481,7 @@ struct ggml_tensor * ggml_im2col( is_2D ? b->ne[3] : 1, }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F16, 4, ne); + struct ggml_tensor * result = ggml_new_tensor(ctx, dst_type, 4, ne); int32_t params[] = { s0, s1, p0, p1, d0, d1, (is_2D ? 1 : 0) }; ggml_set_op_params(result, params, sizeof(params)); @@ -5506,7 +5506,7 @@ struct ggml_tensor * ggml_conv_2d( int p1, int d0, int d1) { - struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, s1, p0, p1, d0, d1, true); // [N, OH, OW, IC * KH * KW] + struct ggml_tensor * im2col = ggml_im2col(ctx, a, b, s0, s1, p0, p1, d0, d1, true, GGML_TYPE_F16); // [N, OH, OW, IC * KH * KW] struct ggml_tensor * result = ggml_mul_mat(ctx, @@ -5632,12 +5632,13 @@ struct ggml_tensor * ggml_pool_2d( is_node = true; } + struct ggml_tensor * result; const int64_t ne[3] = { ggml_calc_pool_output_size(a->ne[0], k0, s0, p0), ggml_calc_pool_output_size(a->ne[1], k1, s1, p1), a->ne[2], }; - struct ggml_tensor * result = ggml_new_tensor(ctx, GGML_TYPE_F32, 3, ne); + result = ggml_new_tensor(ctx, GGML_TYPE_F32, 3, ne); int32_t params[] = { op, k0, k1, s0, s1, p0, p1 }; ggml_set_op_params(result, params, sizeof(params)); @@ -5645,7 +5646,6 @@ struct ggml_tensor * ggml_pool_2d( result->op = GGML_OP_POOL_2D; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; - return result; } @@ -12493,6 +12493,92 @@ static void ggml_compute_forward_conv_transpose_1d( } } +// src0: kernel [OC, IC, KH, KW] +// src1: image [N, IC, IH, IW] +// dst: result [N, OH, OW, IC*KH*KW] +static void ggml_compute_forward_im2col_f32( + const struct ggml_compute_params * params, + const struct ggml_tensor * src0, + const struct ggml_tensor * src1, + struct ggml_tensor * dst) { + GGML_ASSERT(src0->type == GGML_TYPE_F16); + GGML_ASSERT(src1->type == GGML_TYPE_F32); + GGML_ASSERT( dst->type == GGML_TYPE_F32); + + int64_t t0 = ggml_perf_time_us(); + UNUSED(t0); + + GGML_TENSOR_BINARY_OP_LOCALS; + + const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; + const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; + const int32_t p0 = ((const int32_t *)(dst->op_params))[2]; + const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; + const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; + const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; + + const int ith = params->ith; + const int nth = params->nth; + + const int64_t N = is_2D ? ne13 : ne12; + const int64_t IC = is_2D ? ne12 : ne11; + const int64_t IH = is_2D ? ne11 : 1; + const int64_t IW = ne10; + + const int64_t KH = is_2D ? ne01 : 1; + const int64_t KW = ne00; + + const int64_t OH = is_2D ? ne2 : 1; + const int64_t OW = ne1; + + int ofs0 = is_2D ? nb13 : nb12; + int ofs1 = is_2D ? nb12 : nb11; + + GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); + GGML_ASSERT(nb10 == sizeof(float)); + + if (params->type == GGML_TASK_INIT) { + return; + } + + if (params->type == GGML_TASK_FINALIZE) { + return; + } + + // im2col: [N, IC, IH, IW] => [N, OH, OW, IC*KH*KW] + { + float * const wdata = (float *) dst->data; + + for (int64_t in = 0; in < N; in++) { + for (int64_t ioh = 0; ioh < OH; ioh++) { // 1 + for (int64_t iow = 0; iow < OW; iow++) { + for (int64_t iic = ith; iic < IC; iic += nth) { + + // micro kernel + float * dst_data = wdata + (in*OH*OW + ioh*OW + iow)*(IC*KH*KW); // [IC, KH, KW] + const float * const src_data = (float *)((char *) src1->data + in*ofs0 + iic*ofs1); // [IH, IW] + + for (int64_t ikh = 0; ikh < KH; ikh++) { // 1 + for (int64_t ikw = 0; ikw < KW; ikw++) { + const int64_t iiw = iow*s0 + ikw*d0 - p0; + const int64_t iih = ioh*s1 + ikh*d1 - p1; + + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = 0; + } else { + dst_data[iic*(KH*KW) + ikh*KW + ikw] = (src_data[iih*IW + iiw]); + } + } + } + } + } + } + } + } +} + + // src0: kernel [OC, IC, KH, KW] // src1: image [N, IC, IH, IW] // dst: result [N, OH, OW, IC*KH*KW] @@ -12583,14 +12669,14 @@ static void ggml_compute_forward_im2col( const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { - switch (src0->type) { + switch (dst->type) { case GGML_TYPE_F16: { ggml_compute_forward_im2col_f16(params, src0, src1, dst); } break; case GGML_TYPE_F32: { - GGML_ASSERT(false); + ggml_compute_forward_im2col_f32(params, src0, src1, dst); } break; default: { @@ -12781,8 +12867,8 @@ static void ggml_compute_forward_pool_2d( const struct ggml_compute_params * params, const struct ggml_tensor * src, struct ggml_tensor * dst) { - assert(src->type == GGML_TYPE_F32); - assert(params->ith == 0); + GGML_ASSERT(src->type == GGML_TYPE_F32); + GGML_ASSERT(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; diff --git a/ggml.h b/ggml.h index afc87b843..e0a4799f3 100644 --- a/ggml.h +++ b/ggml.h @@ -1495,7 +1495,8 @@ extern "C" { int p1, int d0, int d1, - bool is_2D); + bool is_2D, + enum ggml_type dst_type); GGML_API struct ggml_tensor * ggml_conv_depthwise_2d( struct ggml_context * ctx, diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 1d29070b6..eb06123d2 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -227,6 +227,14 @@ static std::string var_to_str(ggml_type type) { return ggml_type_name(type); } +static std::string var_to_str(ggml_op_pool pool) { + switch (pool) { + case GGML_OP_POOL_AVG: return "avg"; + case GGML_OP_POOL_MAX: return "max"; + default: return std::to_string(pool); + } +} + #define VARS_TO_STR1(a) VAR_TO_STR(a) #define VARS_TO_STR2(a, b) VAR_TO_STR(a) + "," + VAR_TO_STR(b) #define VARS_TO_STR3(a, b, c) VAR_TO_STR(a) + "," + VARS_TO_STR2(b, c) @@ -238,6 +246,7 @@ static std::string var_to_str(ggml_type type) { #define VARS_TO_STR9(a, b, c, d, e, f, g, h, i) VAR_TO_STR(a) + "," + VARS_TO_STR8(b, c, d, e, f, g, h, i) #define VARS_TO_STR10(a, b, c, d, e, f, g, h, i, j) VAR_TO_STR(a) + "," + VARS_TO_STR9(b, c, d, e, f, g, h, i, j) #define VARS_TO_STR11(a, b, c, d, e, f, g, h, i, j, k) VAR_TO_STR(a) + "," + VARS_TO_STR10(b, c, d, e, f, g, h, i, j, k) +#define VARS_TO_STR12(a, b, c, d, e, f, g, h, i, j, k, l) VAR_TO_STR(a) + "," + VARS_TO_STR11(b, c, d, e, f, g, h, i, j, k, l) #ifdef GGML_USE_SYCL static bool inline _isinf(float f) { @@ -1162,10 +1171,45 @@ struct test_alibi : public test_case { } }; +// GGML_OP_POOL2D +struct test_pool2d : public test_case { + enum ggml_op_pool pool_type; + const ggml_type type_input; + const std::array ne_input; + // kernel size + const int k0; + const int k1; + // stride + const int s0; + const int s1; + // padding + const int p0; + const int p1; + + std::string vars() override { + return VARS_TO_STR9(pool_type, type_input, ne_input, k0, k1, s0, s1, p0, p1); + } + + test_pool2d(ggml_op_pool pool_type = GGML_OP_POOL_AVG, + ggml_type type_input = GGML_TYPE_F32, + std::array ne_input = {10, 10, 3, 1}, // [input_width, input_height, input_channels, 1] + int k0 = 3, int k1 = 3, + int s0 = 1, int s1 = 1, + int p0 = 1, int p1 = 1) + : pool_type(pool_type), type_input(type_input), ne_input(ne_input), k0(k0), k1(k1), s0(s0), s1(s1), p0(p0), p1(p1) {} + + ggml_tensor * build_graph(ggml_context * ctx) override { + ggml_tensor * input = ggml_new_tensor(ctx, type_input, 4, ne_input.data()); + ggml_tensor * out = ggml_pool_2d(ctx, input, pool_type, k0, k1, s0, s1, p0, p1); + return out; + } +}; + // GGML_OP_IM2COL struct test_im2col : public test_case { const ggml_type type_input; const ggml_type type_kernel; + const ggml_type dst_type; const std::array ne_input; const std::array ne_kernel; // stride @@ -1181,22 +1225,22 @@ struct test_im2col : public test_case { const bool is_2D; std::string vars() override { - return VARS_TO_STR11(type_input, type_kernel, ne_input, ne_kernel, s0, s1, p0, p1, d0, d1, is_2D); + return VARS_TO_STR12(type_input, type_kernel, dst_type, ne_input, ne_kernel, s0, s1, p0, p1, d0, d1, is_2D); } - test_im2col(ggml_type type_input = GGML_TYPE_F32, ggml_type type_kernel = GGML_TYPE_F16, + test_im2col(ggml_type type_input = GGML_TYPE_F32, ggml_type type_kernel = GGML_TYPE_F16, ggml_type dst_type = GGML_TYPE_F32, std::array ne_input = {10, 10, 3, 1}, // [input_width, input_height, input_channels, 1] std::array ne_kernel = {3, 3, 3, 1}, // [kernel_width, kernel_height, input_channels, 1] int s0 = 1, int s1 = 1, int p0 = 1, int p1 = 1, int d0 = 1, int d1 = 1, bool is_2D = true) - : type_input(type_input), type_kernel(type_kernel), ne_input(ne_input), ne_kernel(ne_kernel), s0(s0), s1(s1), p0(p0), p1(p1), d0(d0), d1(d1), is_2D(is_2D) {} + : type_input(type_input), type_kernel(type_kernel), dst_type(dst_type), ne_input(ne_input), ne_kernel(ne_kernel), s0(s0), s1(s1), p0(p0), p1(p1), d0(d0), d1(d1), is_2D(is_2D) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * input = ggml_new_tensor(ctx, type_input, 4, ne_input.data()); ggml_tensor * kernel = ggml_new_tensor(ctx, type_kernel, 4, ne_kernel.data()); - ggml_tensor * out = ggml_im2col(ctx, kernel, input, s0, s1, p0, p1, d0, d1, is_2D); + ggml_tensor * out = ggml_im2col(ctx, kernel, input, s0, s1, p0, p1, d0, d1, is_2D, dst_type); return out; } }; @@ -1912,6 +1956,27 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } } + for (ggml_type type_input : {GGML_TYPE_F32}) { + for (ggml_op_pool pool_type : {GGML_OP_POOL_AVG, GGML_OP_POOL_MAX}) { + for (int k0 : {1, 3}) { + for (int k1 : {1, 3}) { + for (int s0 : {1, 2}) { + for (int s1 : {1, 2}) { + for (int p0 : {0, 1}) { + for (int p1 : {0, 1}) { + test_cases.emplace_back(new test_pool2d(pool_type, type_input, {10, 10, 3, 1}, k0, k1, s0, s1, p0, p1)); + } + } + } + } + } + } + } + } + + test_cases.emplace_back(new test_im2col(GGML_TYPE_F32, GGML_TYPE_F16, GGML_TYPE_F32)); + test_cases.emplace_back(new test_im2col(GGML_TYPE_F32, GGML_TYPE_F16, GGML_TYPE_F16)); + test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {2, 1, 1, 1})); test_cases.emplace_back(new test_repeat(GGML_TYPE_F32, {10, 10, 10, 10}, {1, 2, 1, 1})); @@ -2049,7 +2114,6 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op } test_cases.emplace_back(new test_alibi()); - test_cases.emplace_back(new test_im2col()); test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); From efb7bdbbd061d087c788598b97992c653f992ddd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 31 Jan 2024 15:35:41 +0200 Subject: [PATCH 634/859] metal : add im2col F32 dst support (#5132) --- ggml-metal.m | 13 ++++++++++--- ggml-metal.metal | 33 +++++++++++++++++++++++++++++---- 2 files changed, 39 insertions(+), 7 deletions(-) diff --git a/ggml-metal.m b/ggml-metal.m index f87859552..5260ed827 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -135,6 +135,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, GGML_METAL_KERNEL_TYPE_IM2COL_F16, + GGML_METAL_KERNEL_TYPE_IM2COL_F32, GGML_METAL_KERNEL_TYPE_UPSCALE_F32, GGML_METAL_KERNEL_TYPE_PAD_F32, GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, @@ -506,6 +507,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F16, im2col_f16, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_IM2COL_F32, im2col_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_UPSCALE_F32, upscale_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_PAD_F32, pad_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC, argsort_f32_i32_asc, true); @@ -630,6 +632,10 @@ static bool ggml_metal_supports_op(const struct ggml_metal_context * ctx, const case GGML_OP_ALIBI: case GGML_OP_ROPE: case GGML_OP_IM2COL: + return true; + case GGML_OP_POOL_1D: + case GGML_OP_POOL_2D: + return false; case GGML_OP_UPSCALE: case GGML_OP_PAD: case GGML_OP_ARGSORT: @@ -2015,7 +2021,7 @@ static bool ggml_metal_graph_compute( { GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); const int32_t s0 = ((const int32_t *)(dst->op_params))[0]; const int32_t s1 = ((const int32_t *)(dst->op_params))[1]; @@ -2023,6 +2029,7 @@ static bool ggml_metal_graph_compute( const int32_t p1 = ((const int32_t *)(dst->op_params))[3]; const int32_t d0 = ((const int32_t *)(dst->op_params))[4]; const int32_t d1 = ((const int32_t *)(dst->op_params))[5]; + const bool is_2D = ((const int32_t *)(dst->op_params))[6] == 1; const int32_t N = src1->ne[is_2D ? 3 : 2]; @@ -2043,8 +2050,8 @@ static bool ggml_metal_graph_compute( id pipeline = nil; - switch (src0->type) { - case GGML_TYPE_F32: GGML_ASSERT(false && "not implemented"); break; + switch (dst->type) { + case GGML_TYPE_F32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F32].pipeline; break; case GGML_TYPE_F16: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_IM2COL_F16].pipeline; break; default: GGML_ASSERT(false); }; diff --git a/ggml-metal.metal b/ggml-metal.metal index 2614d82e8..efed6ad46 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -1775,9 +1775,29 @@ kernel void kernel_rope( template [[host_name("kernel_rope_f32")]] kernel rope_t kernel_rope; template [[host_name("kernel_rope_f16")]] kernel rope_t kernel_rope; -kernel void kernel_im2col_f16( +typedef void (im2col_t)( device const float * x, - device half * dst, + device char * dst, + constant int32_t & ofs0, + constant int32_t & ofs1, + constant int32_t & IW, + constant int32_t & IH, + constant int32_t & CHW, + constant int32_t & s0, + constant int32_t & s1, + constant int32_t & p0, + constant int32_t & p1, + constant int32_t & d0, + constant int32_t & d1, + uint3 tgpig[[threadgroup_position_in_grid]], + uint3 tgpg[[threadgroups_per_grid]], + uint3 tpitg[[thread_position_in_threadgroup]], + uint3 ntg[[threads_per_threadgroup]]); + +template +kernel void kernel_im2col( + device const float * x, + device char * dst, constant int32_t & ofs0, constant int32_t & ofs1, constant int32_t & IW, @@ -1800,14 +1820,19 @@ kernel void kernel_im2col_f16( (tpitg[0] * tgpg[1] * tgpg[2] + tgpig[1] * tgpg[2] + tgpig[2]) * CHW + (tgpig[0] * (ntg[1] * ntg[2]) + tpitg[1] * ntg[2] + tpitg[2]); + device T * pdst = (device T *) (dst); + if (iih < 0 || iih >= IH || iiw < 0 || iiw >= IW) { - dst[offset_dst] = 0.0f; + pdst[offset_dst] = 0.0f; } else { const int32_t offset_src = tpitg[0] * ofs0 + tgpig[0] * ofs1; - dst[offset_dst] = x[offset_src + iih * IW + iiw]; + pdst[offset_dst] = x[offset_src + iih * IW + iiw]; } } +template [[host_name("kernel_im2col_f32")]] kernel im2col_t kernel_im2col; +template [[host_name("kernel_im2col_f16")]] kernel im2col_t kernel_im2col; + kernel void kernel_upscale_f32( device const char * src0, device char * dst, From 5cb04dbc16d1da38c8fdcc0111b40e67d00dd1c3 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 31 Jan 2024 17:30:17 +0200 Subject: [PATCH 635/859] llama : remove LLAMA_MAX_DEVICES and LLAMA_SUPPORTS_GPU_OFFLOAD (#5240) * llama : remove LLAMA_MAX_DEVICES from llama.h ggml-ci * Update llama.cpp Co-authored-by: slaren * server : remove LLAMA_MAX_DEVICES ggml-ci * llama : remove LLAMA_SUPPORTS_GPU_OFFLOAD ggml-ci * train : remove LLAMA_SUPPORTS_GPU_OFFLOAD * readme : add deprecation notice * readme : change deprecation notice to "remove" and fix url * llama : remove gpu includes from llama.h ggml-ci --------- Co-authored-by: slaren --- README.md | 3 +- common/common.cpp | 56 ++++++++++---------- common/common.h | 66 ++++++++++++------------ common/train.cpp | 12 ++--- examples/batched-bench/batched-bench.cpp | 2 +- examples/llama-bench/llama-bench.cpp | 16 +++--- examples/server/server.cpp | 44 ++++++++-------- llama.cpp | 39 +++++++++++--- llama.h | 29 ++++------- 9 files changed, 143 insertions(+), 124 deletions(-) diff --git a/README.md b/README.md index 7746cb510..e6ed1d429 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,8 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ### Hot topics -- ⚠️ Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 +- Remove LLAMA_MAX_DEVICES and LLAMA_SUPPORTS_GPU_OFFLOAD: https://github.com/ggerganov/llama.cpp/pull/5240 +- Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 - [SYCL backend](README-sycl.md) is ready (1/28/2024), support Linux/Windows in Intel GPUs (iGPU, Arc/Flex/Max series) - New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow - Collecting Apple Silicon performance stats: diff --git a/common/common.cpp b/common/common.cpp index 9d976c7c8..ce739b15c 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -583,20 +583,20 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } params.n_gpu_layers = std::stoi(argv[i]); -#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD - fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); - fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); -#endif + if (!llama_supports_gpu_offload()) { + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); + } } else if (arg == "--gpu-layers-draft" || arg == "-ngld" || arg == "--n-gpu-layers-draft") { if (++i >= argc) { invalid_param = true; break; } params.n_gpu_layers_draft = std::stoi(argv[i]); -#ifndef LLAMA_SUPPORTS_GPU_OFFLOAD - fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers-draft option will be ignored\n"); - fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); -#endif + if (!llama_supports_gpu_offload()) { + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers-draft option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); + } } else if (arg == "--main-gpu" || arg == "-mg") { if (++i >= argc) { invalid_param = true; @@ -637,11 +637,11 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { const std::regex regex{R"([,/]+)"}; std::sregex_token_iterator it{arg_next.begin(), arg_next.end(), regex, -1}; std::vector split_arg{it, {}}; - if (split_arg.size() >= LLAMA_MAX_DEVICES) { + if (split_arg.size() >= llama_max_devices()) { invalid_param = true; break; } - for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { + for (size_t i = 0; i < llama_max_devices(); ++i) { if (i < split_arg.size()) { params.tensor_split[i] = std::stof(split_arg[i]); } else { @@ -989,30 +989,30 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA. see examples/llava/README.md\n"); printf(" --image IMAGE_FILE path to an image file. use with multimodal models\n"); - if (llama_mlock_supported()) { + if (llama_supports_mlock()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } - if (llama_mmap_supported()) { + if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } printf(" --numa attempt optimizations that help on some NUMA systems\n"); printf(" if run without this previously, it is recommended to drop the system page cache before using this\n"); printf(" see https://github.com/ggerganov/llama.cpp/issues/1437\n"); -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - printf(" -ngl N, --n-gpu-layers N\n"); - printf(" number of layers to store in VRAM\n"); - printf(" -ngld N, --n-gpu-layers-draft N\n"); - printf(" number of layers to store in VRAM for the draft model\n"); - printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); - printf(" how to split the model across multiple GPUs, one of:\n"); - printf(" - none: use one GPU only\n"); - printf(" - layer (default): split layers and KV across GPUs\n"); - printf(" - row: split rows across GPUs\n"); - printf(" -ts SPLIT, --tensor-split SPLIT\n"); - printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); - printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); -#endif // LLAMA_SUPPORTS_GPU_OFFLOAD + if (llama_supports_gpu_offload()) { + printf(" -ngl N, --n-gpu-layers N\n"); + printf(" number of layers to store in VRAM\n"); + printf(" -ngld N, --n-gpu-layers-draft N\n"); + printf(" number of layers to store in VRAM for the draft model\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); + printf(" -ts SPLIT, --tensor-split SPLIT\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row) (default: %d)\n", params.main_gpu); + } printf(" --verbose-prompt print a verbose prompt before generation (default: %s)\n", params.verbose_prompt ? "true" : "false"); printf(" --no-display-prompt don't print prompt at generation (default: %s)\n", !params.display_prompt ? "true" : "false"); printf(" -gan N, --grp-attn-n N\n"); @@ -1651,7 +1651,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cont_batching: %s # default: false\n", params.cont_batching ? "true" : "false"); fprintf(stream, "temp: %f # default: 0.8\n", sparams.temp); - const std::vector tensor_split_vector(params.tensor_split, params.tensor_split + LLAMA_MAX_DEVICES); + const std::vector tensor_split_vector(params.tensor_split, params.tensor_split + llama_max_devices()); dump_vector_float_yaml(stream, "tensor_split", tensor_split_vector); fprintf(stream, "tfs: %f # default: 1.0\n", sparams.tfs_z); diff --git a/common/common.h b/common/common.h index 214a379b5..24a99d728 100644 --- a/common/common.h +++ b/common/common.h @@ -43,40 +43,40 @@ extern char const *LLAMA_BUILD_TARGET; int32_t get_num_physical_cores(); struct gpt_params { - uint32_t seed = -1; // RNG seed + uint32_t seed = -1; // RNG seed - int32_t n_threads = get_num_physical_cores(); - int32_t n_threads_draft = -1; - int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) - int32_t n_threads_batch_draft = -1; - int32_t n_predict = -1; // new tokens to predict - int32_t n_ctx = 512; // context size - int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) - int32_t n_keep = 0; // number of tokens to keep from initial prompt - int32_t n_draft = 8; // number of tokens to draft during speculative decoding - int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) - int32_t n_parallel = 1; // number of parallel sequences to decode - int32_t n_sequences = 1; // number of sequences to decode - float p_accept = 0.5f; // speculative decoding accept probability - float p_split = 0.1f; // speculative decoding split probability - int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) - int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) - llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs - int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors - float tensor_split[LLAMA_MAX_DEVICES] = {0}; // how split tensors should be distributed across GPUs - int32_t n_beams = 0; // if non-zero then use beam search of given width. - int32_t grp_attn_n = 1; // group-attention factor - int32_t grp_attn_w = 512; // group-attention width - int32_t n_print = -1; // print token count every n tokens (-1 = disabled) - float rope_freq_base = 0.0f; // RoPE base frequency - float rope_freq_scale = 0.0f; // RoPE frequency scaling factor - float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor - float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor - float yarn_beta_fast = 32.0f; // YaRN low correction dim - float yarn_beta_slow = 1.0f; // YaRN high correction dim - int32_t yarn_orig_ctx = 0; // YaRN original context length - int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment - // pinging @cebtenzzre + int32_t n_threads = get_num_physical_cores(); + int32_t n_threads_draft = -1; + int32_t n_threads_batch = -1; // number of threads to use for batch processing (-1 = use n_threads) + int32_t n_threads_batch_draft = -1; + int32_t n_predict = -1; // new tokens to predict + int32_t n_ctx = 512; // context size + int32_t n_batch = 512; // batch size for prompt processing (must be >=32 to use BLAS) + int32_t n_keep = 0; // number of tokens to keep from initial prompt + int32_t n_draft = 8; // number of tokens to draft during speculative decoding + int32_t n_chunks = -1; // max number of chunks to process (-1 = unlimited) + int32_t n_parallel = 1; // number of parallel sequences to decode + int32_t n_sequences = 1; // number of sequences to decode + float p_accept = 0.5f; // speculative decoding accept probability + float p_split = 0.1f; // speculative decoding split probability + int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) + int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) + llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs + int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors + float tensor_split[128] = {0}; // how split tensors should be distributed across GPUs + int32_t n_beams = 0; // if non-zero then use beam search of given width. + int32_t grp_attn_n = 1; // group-attention factor + int32_t grp_attn_w = 512; // group-attention width + int32_t n_print = -1; // print token count every n tokens (-1 = disabled) + float rope_freq_base = 0.0f; // RoPE base frequency + float rope_freq_scale = 0.0f; // RoPE frequency scaling factor + float yarn_ext_factor = -1.0f; // YaRN extrapolation mix factor + float yarn_attn_factor = 1.0f; // YaRN magnitude scaling factor + float yarn_beta_fast = 32.0f; // YaRN low correction dim + float yarn_beta_slow = 1.0f; // YaRN high correction dim + int32_t yarn_orig_ctx = 0; // YaRN original context length + int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment + // pinging @cebtenzzre // // sampling parameters struct llama_sampling_params sparams; diff --git a/common/train.cpp b/common/train.cpp index e6f2f7a2f..e4c3d5df6 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1363,12 +1363,12 @@ bool consume_common_train_arg( *invalid_param = true; return true; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - params->n_gpu_layers = std::stoi(argv[i]); -#else - fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); - fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); -#endif + if (llama_supports_gpu_offload()) { + params->n_gpu_layers = std::stoi(argv[i]); + } else { + fprintf(stderr, "warning: not compiled with GPU offload support, --n-gpu-layers option will be ignored\n"); + fprintf(stderr, "warning: see main README.md for information on enabling GPU BLAS support\n"); + } } else if (arg == "-h" || arg == "--help") { params->print_usage = true; return true; diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 7924db267..b52d68457 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -88,7 +88,7 @@ int main(int argc, char ** argv) { llama_model_params model_params = llama_model_default_params(); - const std::vector t_split (LLAMA_MAX_DEVICES, 0.0f); + const std::vector t_split(llama_max_devices(), 0.0f); model_params.n_gpu_layers = n_gpu_layers; model_params.tensor_split = t_split.data(); diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 542cc7bb8..c5a6f744e 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -160,7 +160,7 @@ struct cmd_params { std::vector main_gpu; std::vector no_kv_offload; std::vector mul_mat_q; - std::vector> tensor_split; + std::vector> tensor_split; int reps; bool verbose; output_formats output_format; @@ -179,7 +179,7 @@ static const cmd_params cmd_params_defaults = { /* main_gpu */ {0}, /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, - /* tensor_split */ {{}}, + /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, /* reps */ 5, /* verbose */ false, /* output_format */ MARKDOWN @@ -380,10 +380,10 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { const std::regex regex{R"([;/]+)"}; std::sregex_token_iterator it{ts.begin(), ts.end(), regex, -1}; std::vector split_arg{it, {}}; - GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); + GGML_ASSERT(split_arg.size() <= llama_max_devices()); - std::array tensor_split; - for (size_t i = 0; i < LLAMA_MAX_DEVICES; ++i) { + std::vector tensor_split(llama_max_devices()); + for (size_t i = 0; i < llama_max_devices(); ++i) { if (i < split_arg.size()) { tensor_split[i] = std::stof(split_arg[i]); } else { @@ -459,7 +459,7 @@ struct cmd_params_instance { int main_gpu; bool no_kv_offload; bool mul_mat_q; - std::array tensor_split; + std::vector tensor_split; llama_model_params to_llama_mparams() const { llama_model_params mparams = llama_model_default_params(); @@ -582,7 +582,7 @@ struct test { int main_gpu; bool no_kv_offload; bool mul_mat_q; - std::array tensor_split; + std::vector tensor_split; int n_prompt; int n_gen; std::string test_time; @@ -704,7 +704,7 @@ struct test { std::vector get_values() const { std::string tensor_split_str; int max_nonzero = 0; - for (int i = 0; i < LLAMA_MAX_DEVICES; i++) { + for (size_t i = 0; i < llama_max_devices(); i++) { if (tensor_split[i] > 0) { max_nonzero = i; } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 21bdce8ed..ea77125ea 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1789,28 +1789,28 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); printf(" --memory-f32 use f32 instead of f16 for memory key+value (default: disabled)\n"); printf(" not recommended: doubles context memory required and no measurable increase in quality\n"); - if (llama_mlock_supported()) + if (llama_supports_mlock()) { printf(" --mlock force system to keep model in RAM rather than swapping or compressing\n"); } - if (llama_mmap_supported()) + if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } printf(" --numa attempt optimizations that help on some NUMA systems\n"); -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - printf(" -ngl N, --n-gpu-layers N\n"); - printf(" number of layers to store in VRAM\n"); - printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); - printf(" how to split the model across multiple GPUs, one of:\n"); - printf(" - none: use one GPU only\n"); - printf(" - layer (default): split layers and KV across GPUs\n"); - printf(" - row: split rows across GPUs\n"); - printf(" -ts SPLIT --tensor-split SPLIT\n"); - printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); - printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); - printf(" or for intermediate results and KV (with split-mode = row)\n"); -#endif + if (llama_supports_gpu_offload()) { + printf(" -ngl N, --n-gpu-layers N\n"); + printf(" number of layers to store in VRAM\n"); + printf(" -sm SPLIT_MODE, --split-mode SPLIT_MODE\n"); + printf(" how to split the model across multiple GPUs, one of:\n"); + printf(" - none: use one GPU only\n"); + printf(" - layer (default): split layers and KV across GPUs\n"); + printf(" - row: split rows across GPUs\n"); + printf(" -ts SPLIT --tensor-split SPLIT\n"); + printf(" fraction of the model to offload to each GPU, comma-separated list of proportions, e.g. 3,1\n"); + printf(" -mg i, --main-gpu i the GPU to use for the model (with split-mode = none),\n"); + printf(" or for intermediate results and KV (with split-mode = row)\n"); + } printf(" -m FNAME, --model FNAME\n"); printf(" model path (default: %s)\n", params.model.c_str()); printf(" -a ALIAS, --alias ALIAS\n"); @@ -2066,13 +2066,13 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } -#ifdef LLAMA_SUPPORTS_GPU_OFFLOAD - params.n_gpu_layers = std::stoi(argv[i]); -#else - LOG_WARNING("Not compiled with GPU offload support, --n-gpu-layers option will be ignored. " + if (llama_supports_gpu_offload()) { + params.n_gpu_layers = std::stoi(argv[i]); + } else { + LOG_WARNING("Not compiled with GPU offload support, --n-gpu-layers option will be ignored. " "See main README.md for information on enabling GPU BLAS support", {{"n_gpu_layers", params.n_gpu_layers}}); -#endif + } } else if (arg == "--split-mode" || arg == "-sm") { @@ -2115,9 +2115,9 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, const std::regex regex{R"([,/]+)"}; std::sregex_token_iterator it{arg_next.begin(), arg_next.end(), regex, -1}; std::vector split_arg{it, {}}; - GGML_ASSERT(split_arg.size() <= LLAMA_MAX_DEVICES); + GGML_ASSERT(split_arg.size() <= llama_max_devices()); - for (size_t i_device = 0; i_device < LLAMA_MAX_DEVICES; ++i_device) + for (size_t i_device = 0; i_device < llama_max_devices(); ++i_device) { if (i_device < split_arg.size()) { diff --git a/llama.cpp b/llama.cpp index bb23689fa..9b249ba9c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10090,18 +10090,45 @@ struct llama_model_quantize_params llama_model_quantize_default_params() { return result; } -int32_t llama_max_devices(void) { - return LLAMA_MAX_DEVICES; +size_t llama_max_devices(void) { +#if defined(GGML_USE_METAL) + return 1; +#elif defined(GGML_USE_CUBLAS) + return GGML_CUDA_MAX_DEVICES; +#elif defined(GGML_USE_SYCL) + return GGML_SYCL_MAX_DEVICES; +#else + return 1; +#endif } -bool llama_mmap_supported(void) { +bool llama_supports_mmap(void) { return llama_mmap::SUPPORTED; } -bool llama_mlock_supported(void) { +bool llama_supports_mlock(void) { return llama_mlock::SUPPORTED; } +bool llama_supports_gpu_offload(void) { +#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) || defined(GGML_USE_VULKAN) || \ + defined(GGML_USE_SYCL) || defined(GGML_USE_KOMPUTE) + // Defined when llama.cpp is compiled with support for offloading model layers to GPU. + return true; +#else + return false; +#endif +} + +// deprecated: +bool llama_mmap_supported(void) { + return llama_supports_mmap(); +} + +bool llama_mlock_supported(void) { + return llama_supports_mlock(); +} + void llama_backend_init(bool numa) { ggml_time_init(); @@ -10133,8 +10160,8 @@ int64_t llama_time_us(void) { } struct llama_model * llama_load_model_from_file( - const char * path_model, - struct llama_model_params params) { + const char * path_model, + struct llama_model_params params) { ggml_time_init(); llama_model * model = new llama_model; diff --git a/llama.h b/llama.h index 17d43d039..9a60e9bfb 100644 --- a/llama.h +++ b/llama.h @@ -3,15 +3,7 @@ #include "ggml.h" #include "ggml-backend.h" -#ifdef GGML_USE_CUBLAS -#include "ggml-cuda.h" -#define LLAMA_MAX_DEVICES GGML_CUDA_MAX_DEVICES -#elif defined(GGML_USE_SYCL) -#include "ggml-sycl.h" -#define LLAMA_MAX_DEVICES GGML_SYCL_MAX_DEVICES -#else -#define LLAMA_MAX_DEVICES 1 -#endif // GGML_USE_CUBLAS + #include #include #include @@ -49,12 +41,6 @@ #define LLAMA_SESSION_MAGIC LLAMA_FILE_MAGIC_GGSN #define LLAMA_SESSION_VERSION 4 -#if defined(GGML_USE_CUBLAS) || defined(GGML_USE_CLBLAST) || defined(GGML_USE_METAL) || defined(GGML_USE_VULKAN) || \ - defined(GGML_USE_SYCL) || defined(GGML_USE_KOMPUTE) -// Defined when llama.cpp is compiled with support for offloading model layers to GPU. -#define LLAMA_SUPPORTS_GPU_OFFLOAD -#endif - #ifdef __cplusplus extern "C" { #endif @@ -201,7 +187,7 @@ extern "C" { // LLAMA_SPLIT_LAYER: ignored int32_t main_gpu; - // proportion of the model (layers or rows) to offload to each GPU, size: LLAMA_MAX_DEVICES + // proportion of the model (layers or rows) to offload to each GPU, size: llama_max_devices() const float * tensor_split; // Called with a progress value between 0.0 and 1.0. Pass NULL to disable. @@ -338,9 +324,14 @@ extern "C" { LLAMA_API int64_t llama_time_us(void); - LLAMA_API int32_t llama_max_devices(void); - LLAMA_API bool llama_mmap_supported (void); - LLAMA_API bool llama_mlock_supported(void); + LLAMA_API size_t llama_max_devices(void); + + LLAMA_API bool llama_supports_mmap (void); + LLAMA_API bool llama_supports_mlock (void); + LLAMA_API bool llama_supports_gpu_offload(void); + + LLAMA_API DEPRECATED(bool llama_mmap_supported (void), "use llama_supports_mmap() instead"); + LLAMA_API DEPRECATED(bool llama_mlock_supported(void), "use llama_supports_mlock() instead"); LLAMA_API const struct llama_model * llama_get_model(const struct llama_context * ctx); From d3bac7d58408c602ec1f1e423695f1df8410bb03 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 31 Jan 2024 18:47:10 +0200 Subject: [PATCH 636/859] llama : reorder build_orion() at correct place (#5118) --- llama.cpp | 239 +++++++++++++++++++++++++++--------------------------- 1 file changed, 119 insertions(+), 120 deletions(-) diff --git a/llama.cpp b/llama.cpp index 9b249ba9c..02b0a485a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4666,126 +4666,6 @@ struct llm_build_context { ctx0 = nullptr; } } - struct ggml_cgraph * build_orion() { - struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); - - const int64_t n_embd_head = hparams.n_embd_head_v; - GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); - - struct ggml_tensor * cur; - struct ggml_tensor * inpL; - - inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); - cb(inpL, "inp_embd", -1); - - // inp_pos - contains the positions - struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); - cb(inp_pos, "inp_pos", -1); - - // KQ_mask (mask for 1 head, it will be broadcasted to all heads) - struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); - cb(KQ_mask, "KQ_mask", -1); - - // shift the entire K-cache if needed - if (do_rope_shift) { - llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); - } - - for (int il = 0; il < n_layer; ++il) { - struct ggml_tensor * inpSA = inpL; - - // norm - cur = llm_build_norm(ctx0, inpL, hparams, - model.layers[il].attn_norm, model.layers[il].attn_norm_b, - LLM_NORM, cb, il); - cb(cur, "attn_norm", il); - - // self-attention - { - // compute Q and K and RoPE them - struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); - cb(Qcur, "Qcur", il); - // if (model.layers[il].bq) { - // Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); - // cb(Qcur, "Qcur", il); - // } - - struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); - cb(Kcur, "Kcur", il); - // if (model.layers[il].bk) { - // Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); - // cb(Kcur, "Kcur", il); - // } - - struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); - cb(Vcur, "Vcur", il); - // if (model.layers[il].bv) { - // Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); - // cb(Vcur, "Vcur", il); - // } - - Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow - ); - cb(Qcur, "Qcur", il); - - Kcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, - hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, - ext_factor, attn_factor, beta_fast, beta_slow - ); - cb(Kcur, "Kcur", il); - - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); - cb(cur, "kqv_out", il); - } - - struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); - cb(ffn_inp, "ffn_inp", il); - - // feed-forward network - cur = llm_build_norm(ctx0, ffn_inp, hparams, - model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, - LLM_NORM, cb, il); - cb(cur, "ffn_norm", il); - - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, - model.layers[il].ffn_gate, NULL, - model.layers[il].ffn_down, NULL, - NULL, - LLM_FFN_SILU, LLM_FFN_PAR, cb, il); - cb(cur, "ffn_out", il); - - cur = ggml_add(ctx0, cur, ffn_inp); - cb(cur, "l_out", il); - - // input for next layer - inpL = cur; - } - - cur = inpL; - - cur = llm_build_norm(ctx0, cur, hparams, - model.output_norm, model.output_norm_b, - LLM_NORM, cb, -1); - cb(cur, "result_norm", -1); - - // lm_head - cur = ggml_mul_mat(ctx0, model.output, cur); - cb(cur, "result_output", -1); - - ggml_build_forward_expand(gf, cur); - - return gf; - } - - struct ggml_cgraph * build_llama() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); @@ -6589,6 +6469,125 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_orion() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, model.layers[il].attn_norm_b, + LLM_NORM, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + // if (model.layers[il].bq) { + // Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + // cb(Qcur, "Qcur", il); + // } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + // if (model.layers[il].bk) { + // Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + // cb(Kcur, "Kcur", il); + // } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + // if (model.layers[il].bv) { + // Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + // cb(Vcur, "Vcur", il); + // } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, NULL, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, + LLM_NORM, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, model.output_norm_b, + LLM_NORM, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph( From 1cfb5372cf5707c8ec6dde7c874f4a44a6c4c915 Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Wed, 31 Jan 2024 19:21:55 +0000 Subject: [PATCH 637/859] Fix broken Vulkan Cmake (properly) (#5230) * build vulkan as object * vulkan ci --- .github/workflows/build.yml | 6 ++++-- CMakeLists.txt | 8 ++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c6db1666e..f4c374ce5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -356,6 +356,8 @@ jobs: defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DBUILD_SHARED_LIBS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS="$env:RUNNER_TEMP/openblas/include" -DBLAS_LIBRARIES="$env:RUNNER_TEMP/openblas/lib/openblas.lib"' - build: 'kompute' defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON -DBUILD_SHARED_LIBS=ON' + - build: 'vulkan' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_VULKAN=ON -DBUILD_SHARED_LIBS=ON' steps: - name: Clone @@ -406,7 +408,7 @@ jobs: - name: Install Vulkan SDK id: get_vulkan - if: ${{ matrix.build == 'kompute' }} + if: ${{ matrix.build == 'kompute' || matrix.build == 'vulkan' }} run: | curl.exe -o $env:RUNNER_TEMP/VulkanSDK-Installer.exe -L "https://sdk.lunarg.com/sdk/download/${env:VULKAN_VERSION}/windows/VulkanSDK-${env:VULKAN_VERSION}-Installer.exe" & "$env:RUNNER_TEMP\VulkanSDK-Installer.exe" --accept-licenses --default-answer --confirm-command install @@ -451,7 +453,7 @@ jobs: - name: Test id: cmake_test # not all machines have native AVX-512 - if: ${{ matrix.build != 'clblast' && matrix.build != 'kompute' && (matrix.build != 'avx512' || env.HAS_AVX512F == '1') }} + if: ${{ matrix.build != 'clblast' && matrix.build != 'kompute' && matrix.build != 'vulkan' && (matrix.build != 'avx512' || env.HAS_AVX512F == '1') }} run: | cd build ctest -L main -C Release --verbose --timeout 900 diff --git a/CMakeLists.txt b/CMakeLists.txt index 15a1101aa..1ee455b3a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -423,10 +423,7 @@ if (LLAMA_VULKAN) if (Vulkan_FOUND) message(STATUS "Vulkan found") - set(GGML_HEADERS_VULKAN ggml-vulkan.h) - set(GGML_SOURCES_VULKAN ggml-vulkan.cpp) - - add_library(ggml-vulkan STATIC ggml-vulkan.cpp ggml-vulkan.h) + add_library(ggml-vulkan OBJECT ggml-vulkan.cpp ggml-vulkan.h) if (BUILD_SHARED_LIBS) set_target_properties(ggml-vulkan PROPERTIES POSITION_INDEPENDENT_CODE ON) endif() @@ -1012,7 +1009,6 @@ add_library(ggml OBJECT ggml-quants.h ${GGML_SOURCES_CUDA} ${GGML_HEADERS_CUDA} ${GGML_SOURCES_OPENCL} ${GGML_HEADERS_OPENCL} - ${GGML_SOURCES_VULKAN} ${GGML_HEADERS_VULKAN} ${GGML_SOURCES_METAL} ${GGML_HEADERS_METAL} ${GGML_SOURCES_MPI} ${GGML_HEADERS_MPI} ${GGML_SOURCES_EXTRA} ${GGML_HEADERS_EXTRA} @@ -1094,7 +1090,7 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/LlamaConfig.cmake DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/Llama) set(GGML_PUBLIC_HEADERS "ggml.h" "ggml-alloc.h" "ggml-backend.h" - "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" "${GGML_HEADERS_VULKAN}" + "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" "${GGML_HEADERS_METAL}" "${GGML_HEADERS_MPI}" "${GGML_HEADERS_EXTRA}") set_target_properties(ggml PROPERTIES PUBLIC_HEADER "${GGML_PUBLIC_HEADERS}") From ce32060198b7e2d6a13a9b8e1e1369e3c295ae2a Mon Sep 17 00:00:00 2001 From: Guoteng <32697156+SolenoidWGT@users.noreply.github.com> Date: Thu, 1 Feb 2024 17:19:51 +0800 Subject: [PATCH 638/859] llama : support InternLM2 (#5184) * support InternLM2 inference * add add_space_prefix KV pair --- convert-hf-to-gguf.py | 152 ++++++++++++++++++++++++ gguf-py/gguf/constants.py | 18 +++ gguf-py/gguf/gguf_writer.py | 3 + gguf-py/gguf/tensor_mapping.py | 14 ++- llama.cpp | 205 ++++++++++++++++++++++++++++++++- 5 files changed, 387 insertions(+), 5 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 6ab7f486e..4ebab07b3 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -203,6 +203,8 @@ class Model: return CodeShellModel if model_architecture == "OrionForCausalLM": return OrionModel + if model_architecture == "InternLM2ForCausalLM": + return InternLM2Model return Model def _is_model_safetensors(self) -> bool: @@ -254,6 +256,8 @@ class Model: return gguf.MODEL_ARCH.CODESHELL if arch == "OrionForCausalLM": return gguf.MODEL_ARCH.ORION + if arch == "InternLM2ForCausalLM": + return gguf.MODEL_ARCH.INTERNLM2 raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1344,6 +1348,154 @@ class CodeShellModel(Model): self.gguf_writer.add_tensor("output.weight", data) print(name, f"=> output.weight, shape = {data.shape}, {old_dtype} --> {data.dtype}") + +class InternLM2Model(Model): + def set_vocab(self): + # (TODO): Is there a better way? + # Copy from _set_vocab_sentencepiece, The only difference is that we will treat the character + # \x00 specially and convert it into an emoji character to prevent it from being mistakenly + # recognized as an empty string in C++. + from sentencepiece import SentencePieceProcessor + from sentencepiece import sentencepiece_model_pb2 as model + + tokenizer_path = self.dir_model / 'tokenizer.model' + + tokens: list[bytes] = [] + scores: list[float] = [] + toktypes: list[int] = [] + + if not tokenizer_path.is_file(): + print(f'Error: Missing {tokenizer_path}', file=sys.stderr) + sys.exit(1) + + sentencepiece_model = model.ModelProto() + sentencepiece_model.ParseFromString(open(tokenizer_path, "rb").read()) + add_prefix = sentencepiece_model.normalizer_spec.add_dummy_prefix + + tokenizer = SentencePieceProcessor(str(tokenizer_path)) + vocab_size = self.hparams.get('vocab_size', tokenizer.vocab_size()) + + for token_id in range(vocab_size): + piece = tokenizer.id_to_piece(token_id) + text = piece.encode("utf-8") + score = tokenizer.get_score(token_id) + if text == b"\x00": + # (TODO): fixme + # Hack here and replace the \x00 characters. + print(f"InternLM2 convert token '{text}' to '🐉'!") + text = "🐉" + + toktype = SentencePieceTokenTypes.NORMAL + if tokenizer.is_unknown(token_id): + toktype = SentencePieceTokenTypes.UNKNOWN + elif tokenizer.is_control(token_id): + toktype = SentencePieceTokenTypes.CONTROL + elif tokenizer.is_unused(token_id): + toktype = SentencePieceTokenTypes.UNUSED + elif tokenizer.is_byte(token_id): + toktype = SentencePieceTokenTypes.BYTE + + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + added_tokens_file = self.dir_model / 'added_tokens.json' + if added_tokens_file.is_file(): + with open(added_tokens_file, "r", encoding="utf-8") as f: + added_tokens_json = json.load(f) + + for key in added_tokens_json: + tokens.append(key.encode("utf-8")) + scores.append(-1000.0) + toktypes.append(SentencePieceTokenTypes.USER_DEFINED) + + self.gguf_writer.add_tokenizer_model("llama") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + self.gguf_writer.add_add_space_prefix(add_prefix) + + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + + def set_gguf_parameters(self): + self.gguf_writer.add_name("InternLM2") + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_block_count(self.hparams["num_hidden_layers"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) + + def post_write_tensors(self, tensor_map, name, data_torch): + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + self.gguf_writer.add_tensor(new_name, data) + + def write_tensors(self): + from einops import rearrange + + num_heads = self.hparams.get("num_attention_heads") + num_kv_heads = self.hparams.get("num_key_value_heads") + hidden_size = self.hparams.get("hidden_size") + q_per_kv = num_heads // num_kv_heads + head_dim = hidden_size // num_heads + num_groups = num_heads // q_per_kv + + block_count = self.hparams["num_hidden_layers"] + model_kv = dict(self.get_tensors()) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + qkv_pattern = r"model\.layers\.(\d+)\.attention\.wqkv" + for name, data_torch in model_kv.items(): + # we don't need these + if name.endswith(".rotary_emb.inv_freq"): + continue + + if re.match(qkv_pattern, name): + bid = re.findall(qkv_pattern, name)[0] + qkv = data_torch + qkv = rearrange(qkv.T, " o (g n i) ->o g n i", g=num_groups, n=q_per_kv + 2, i=head_dim) + q, k, v = qkv[..., : q_per_kv, :], qkv[..., q_per_kv: q_per_kv + 1, :], qkv[..., q_per_kv + 1: q_per_kv + 2, :] + q = rearrange(q, " o g n i -> o (g n i)").T + k = rearrange(k, " o g n i -> o (g n i)").T + v = rearrange(v, " o g n i -> o (g n i)").T + self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wq.weight", q) + self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wk.weight", k) + self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wv.weight", v) + else: + self.post_write_tensors(tensor_map, name, data_torch) + + ###### CONVERSION LOGIC ###### diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index f5c933a41..ed8e26f83 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -72,6 +72,7 @@ class Keys: PAD_ID = "tokenizer.ggml.padding_token_id" ADD_BOS = "tokenizer.ggml.add_bos_token" ADD_EOS = "tokenizer.ggml.add_eos_token" + ADD_PREFIX = "tokenizer.ggml.add_space_prefix" HF_JSON = "tokenizer.huggingface.json" RWKV = "tokenizer.rwkv.world" CHAT_TEMPLATE = "tokenizer.chat_template" @@ -102,6 +103,7 @@ class MODEL_ARCH(IntEnum): PLAMO = auto() CODESHELL = auto() ORION = auto() + INTERNLM2 = auto() class MODEL_TENSOR(IntEnum): @@ -153,6 +155,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.PLAMO: "plamo", MODEL_ARCH.CODESHELL: "codeshell", MODEL_ARCH.ORION: "orion", + MODEL_ARCH.INTERNLM2: "internlm2", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -446,6 +449,21 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.INTERNLM2: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.OUTPUT, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + ], # TODO } diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index d93aaa877..16808196e 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -411,6 +411,9 @@ class GGUFWriter: def add_add_eos_token(self, value: bool) -> None: self.add_bool(Keys.Tokenizer.ADD_EOS, value) + def add_add_space_prefix(self, value: bool) -> None: + self.add_bool(Keys.Tokenizer.ADD_PREFIX, value) + def add_chat_template(self, value: str) -> None: self.add_string(Keys.Tokenizer.CHAT_TEMPLATE, value) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index de177af13..4f16d8504 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -19,6 +19,7 @@ class TensorNameMap: "language_model.embedding.word_embeddings", # persimmon "wte", # gpt2 "transformer.embd.wte", # phi2 + "model.tok_embeddings", # internlm2 ), # Token type embeddings @@ -42,7 +43,7 @@ class TensorNameMap: MODEL_TENSOR.OUTPUT: ( "embed_out", # gptneox "lm_head", # gpt2 mpt falcon llama-hf baichuan qwen - "output", # llama-pth bloom + "output", # llama-pth bloom internlm2 "word_embeddings_for_head", # persimmon "lm_head.linear", # phi2 ), @@ -51,7 +52,7 @@ class TensorNameMap: MODEL_TENSOR.OUTPUT_NORM: ( "gpt_neox.final_layer_norm", # gptneox "transformer.ln_f", # gpt2 gpt-j falcon - "model.norm", # llama-hf baichuan + "model.norm", # llama-hf baichuan internlm2 "norm", # llama-pth "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt @@ -84,6 +85,7 @@ class TensorNameMap: "h.{bid}.ln_1", # gpt2 "transformer.h.{bid}.ln", # phi2 "model.layers.layers.{bid}.norm", # plamo + "model.layers.{bid}.attention_norm", # internlm2 ), # Attention norm 2 @@ -111,6 +113,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.self.query", # bert "transformer.h.{bid}.attn.q_proj", # gpt-j "model.layers.layers.{bid}.self_attn.q_proj", # plamo + "model.layers.{bid}.attention.wq" # internlm2 ), # Attention key @@ -120,6 +123,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.self.key", # bert "transformer.h.{bid}.attn.k_proj", # gpt-j "model.layers.layers.{bid}.self_attn.k_proj", # plamo + "model.layers.{bid}.attention.wk" # internlm2 ), # Attention value @@ -129,6 +133,7 @@ class TensorNameMap: "encoder.layer.{bid}.attention.self.value", # bert "transformer.h.{bid}.attn.v_proj", # gpt-j "model.layers.layers.{bid}.self_attn.v_proj", # plamo + "model.layers.{bid}.attention.wv" # internlm2 ), # Attention output @@ -147,6 +152,7 @@ class TensorNameMap: "h.{bid}.attn.c_proj", # gpt2 "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo + "model.layers.{bid}.attention.wo", # internlm2 ), # Rotary embeddings @@ -169,6 +175,7 @@ class TensorNameMap: "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon "model.layers.{bid}.ln2", # yi "h.{bid}.ln_2", # gpt2 + "model.layers.{bid}.ffn_norm", # internlm2 ), MODEL_TENSOR.FFN_GATE_INP: ( @@ -194,6 +201,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc1", # phi2 "model.layers.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo + "model.layers.{bid}.feed_forward.w3", # internlm2 ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -212,6 +220,7 @@ class TensorNameMap: "layers.{bid}.feed_forward.w1", # llama-pth "transformer.h.{bid}.mlp.w2", # qwen "model.layers.layers.{bid}.mlp.gate_proj", # plamo + "model.layers.{bid}.feed_forward.w1", # internlm2 ), MODEL_TENSOR.FFN_GATE_EXP: ( @@ -236,6 +245,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.fc2", # phi2 "model.layers.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo + "model.layers.{bid}.feed_forward.w2", # internlm2 ), MODEL_TENSOR.FFN_DOWN_EXP: ( diff --git a/llama.cpp b/llama.cpp index 02b0a485a..e8f44c2cb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -204,6 +204,7 @@ enum llm_arch { LLM_ARCH_PLAMO, LLM_ARCH_CODESHELL, LLM_ARCH_ORION, + LLM_ARCH_INTERNLM2, LLM_ARCH_UNKNOWN, }; @@ -226,6 +227,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_PLAMO, "plamo" }, { LLM_ARCH_CODESHELL, "codeshell" }, { LLM_ARCH_ORION, "orion" }, + { LLM_ARCH_INTERNLM2, "internlm2" }, }; enum llm_kv { @@ -278,6 +280,7 @@ enum llm_kv { LLM_KV_TOKENIZER_PAD_ID, LLM_KV_TOKENIZER_ADD_BOS, LLM_KV_TOKENIZER_ADD_EOS, + LLM_KV_TOKENIZER_ADD_PREFIX, LLM_KV_TOKENIZER_HF_JSON, LLM_KV_TOKENIZER_RWKV, }; @@ -332,6 +335,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TOKENIZER_PAD_ID, "tokenizer.ggml.padding_token_id" }, { LLM_KV_TOKENIZER_ADD_BOS, "tokenizer.ggml.add_bos_token" }, { LLM_KV_TOKENIZER_ADD_EOS, "tokenizer.ggml.add_eos_token" }, + { LLM_KV_TOKENIZER_ADD_PREFIX, "tokenizer.ggml.add_space_prefix" }, { LLM_KV_TOKENIZER_HF_JSON, "tokenizer.huggingface.json" }, { LLM_KV_TOKENIZER_RWKV, "tokenizer.rwkv.world" }, }; @@ -669,7 +673,23 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, - + { + LLM_ARCH_INTERNLM2, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -1377,6 +1397,7 @@ enum e_model { MODEL_13B, MODEL_14B, MODEL_15B, + MODEL_20B, MODEL_30B, MODEL_34B, MODEL_40B, @@ -1618,6 +1639,8 @@ struct llama_vocab { id special_suffix_id = 32008; id special_eot_id = 32010; + bool add_space_prefix = true; + int find_bpe_rank(const std::string & token_left, const std::string & token_right) const { GGML_ASSERT(token_left.find(' ') == std::string::npos); GGML_ASSERT(token_left.find('\n') == std::string::npos); @@ -2731,6 +2754,7 @@ static const char * llama_model_type_name(e_model type) { case MODEL_13B: return "13B"; case MODEL_14B: return "14B"; case MODEL_15B: return "15B"; + case MODEL_20B: return "20B"; case MODEL_30B: return "30B"; case MODEL_34B: return "34B"; case MODEL_40B: return "40B"; @@ -2743,6 +2767,14 @@ static const char * llama_model_type_name(e_model type) { default: return "?B"; } } +static const char * llama_model_vocab_type_name(enum llama_vocab_type type){ + switch (type) { + case LLAMA_VOCAB_TYPE_SPM: return "SPM"; + case LLAMA_VOCAB_TYPE_BPE: return "BPE"; + default: return "unknown"; + } +} + static void llm_load_arch(llama_model_loader & ml, llama_model & model) { model.arch = ml.get_arch(); @@ -3006,6 +3038,15 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_INTERNLM2: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + switch (hparams.n_layer) { + case 32: model.type = e_model::MODEL_7B; break; + case 48: model.type = e_model::MODEL_20B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -3057,6 +3098,11 @@ static void llm_load_vocab( vocab.special_unk_id = 0; vocab.special_sep_id = -1; vocab.special_pad_id = -1; + + const int add_space_prefix_keyidx = gguf_find_key(ctx, kv(LLM_KV_TOKENIZER_ADD_PREFIX).c_str()); + if (add_space_prefix_keyidx != -1) { + vocab.add_space_prefix = gguf_get_val_bool(ctx, add_space_prefix_keyidx); + } // The default value of add_space_prefix is true. } else if (tokenizer_name == "gpt2") { vocab.type = LLAMA_VOCAB_TYPE_BPE; @@ -3269,7 +3315,7 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { // hparams LLAMA_LOG_INFO("%s: format = %s\n", __func__, llama_file_version_name(ml.fver)); LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch).c_str()); - LLAMA_LOG_INFO("%s: vocab type = %s\n", __func__, vocab.type == LLAMA_VOCAB_TYPE_SPM ? "SPM" : "BPE"); // TODO: fix + LLAMA_LOG_INFO("%s: vocab type = %s\n", __func__, llama_model_vocab_type_name(vocab.type)); LLAMA_LOG_INFO("%s: n_vocab = %u\n", __func__, hparams.n_vocab); LLAMA_LOG_INFO("%s: n_merges = %u\n", __func__, (int) vocab.bpe_ranks.size()); LLAMA_LOG_INFO("%s: n_ctx_train = %u\n", __func__, hparams.n_ctx_train); @@ -4018,8 +4064,35 @@ static bool llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; + case LLM_ARCH_INTERNLM2: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + // output + { + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + } + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + // layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -6588,6 +6661,126 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_internlm2() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + if (model.layers[il].bq) { + Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + cb(Qcur, "Qcur", il); + } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + if (model.layers[il].bk) { + Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + cb(Kcur, "Kcur", il); + } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + if (model.layers[il].bv) { + Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + cb(Vcur, "Vcur", il); + } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.output, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + }; static struct ggml_cgraph * llama_build_graph( @@ -6746,6 +6939,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_orion(); } break; + case LLM_ARCH_INTERNLM2: + { + result = llm.build_internlm2(); + } break; default: GGML_ASSERT(false); } @@ -7688,7 +7885,9 @@ static std::vector llama_tokenize_internal(const llama_vocab & // auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); if (&fragment == &fragment_buffer.front()) { - raw_text = " " + raw_text; // prefix with space if the first token is not special + if (vocab.add_space_prefix) { + raw_text = " " + raw_text; // prefix with space if the first token is not special + } } #ifdef PRETOKENIZERDEBUG From d71ac90985854b0905e1abba778e407e17f9f887 Mon Sep 17 00:00:00 2001 From: Ali Nehzat Date: Fri, 2 Feb 2024 02:18:53 +1100 Subject: [PATCH 639/859] make : generate .a library for static linking (#5205) --- Makefile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 781f0bf8c..bf9e085de 100644 --- a/Makefile +++ b/Makefile @@ -586,8 +586,11 @@ train.o: common/train.cpp common/train.h libllama.so: llama.o ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -shared -fPIC -o $@ $^ $(LDFLAGS) +libllama.a: llama.o ggml.o $(OBJS) $(COMMON_DEPS) + ar rcs libllama.a llama.o ggml.o $(OBJS) $(COMMON_DEPS) + clean: - rm -vrf *.o tests/*.o *.so *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) + rm -vrf *.o tests/*.o *.so *.a *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) # # Examples From 8ca511cadee2c67f0bd8c7034a2513778ee9a1b7 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 1 Feb 2024 18:30:17 +0100 Subject: [PATCH 640/859] cuda : fix LLAMA_CUDA_F16 (#5262) --- ggml-cuda.cu | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e56595742..3242a0b4a 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -8657,9 +8657,9 @@ static void ggml_cuda_op_dequantize_mul_mat_vec( if (src1_convert_f16) { src1_dfloat = src1_dfloat_a.alloc(ne00); - ggml_cpy_f32_f16_cuda((const char *) src1_ddf_i, (char *) src1_dfloat, ne00, - ne00, 1, sizeof(float), 0, 0, - ne00, 1, sizeof(half), 0, 0, stream); + const to_fp16_cuda_t to_fp16_cuda = ggml_get_to_fp16_cuda(src1->type); + GGML_ASSERT(to_fp16_cuda != nullptr); + to_fp16_cuda(src1_ddf_i, src1_dfloat, ne00, stream); } #else const dfloat * src1_dfloat = (const dfloat *) src1_ddf_i; // dfloat == float, no conversion From 4d0924a8902010d31bd737b6f1f594943d120d0f Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Thu, 1 Feb 2024 19:25:24 +0100 Subject: [PATCH 641/859] Vulkan Phi Fix for AMD Proprietary Drivers (#5260) * Replace tanh to avoid NaN in gelu shader on AMD proprietary driver * Fix another Vulkan CPY buffer size bug --- ggml-vulkan-shaders.hpp | 132 +++++++++++++++++++----------------- ggml-vulkan.cpp | 17 +++-- ggml_vk_generate_shaders.py | 3 +- 3 files changed, 83 insertions(+), 69 deletions(-) diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index e2e9be22c..195410c02 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -14670,14 +14670,14 @@ const uint64_t f32_to_f16_fp32_len = 1596; unsigned char gelu_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x45,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00, 0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30, 0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00, 0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x24,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x38,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -14696,15 +14696,15 @@ unsigned char gelu_f32_data[] = { 0x22,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x24,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x29,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x35,0x00,0x00,0x00, 0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x00,0x00,0x00, +0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x38,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x38,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x48,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, 0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, 0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, @@ -14731,64 +14731,70 @@ unsigned char gelu_f32_data[] = { 0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, 0x3b,0x00,0x04,0x00,0x23,0x00,0x00,0x00,0x24,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x29,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2a,0x42,0x4c,0x3f, +0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x00,0x00,0x80,0x3f,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x13,0x27,0x37,0x3d,0x1d,0x00,0x03,0x00, +0x35,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x37,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x36,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x37,0x00,0x00,0x00,0x38,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x2b,0x00,0x04,0x00, -0x11,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x00,0x00,0x80,0x3f, -0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x2a,0x42,0x4c,0x3f,0x2b,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x13,0x27,0x37,0x3d,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x43,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x44,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x1c,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x43,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x24,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x11,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x3a,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x2b,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x40, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00, +0x47,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x49,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x4a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x14,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0xae,0x00,0x05,0x00, +0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x1b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x1c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x49,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x24,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x11,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x85,0x00,0x05,0x00, 0x11,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x28,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x11,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x30,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x2c,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x85,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, 0x28,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x11,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x0c,0x00,0x06,0x00,0x11,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x26,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x3f,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x43,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x43,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, +0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x34,0x00,0x00,0x00, +0x0c,0x00,0x06,0x00,0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, +0x81,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x40,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x88,0x00,0x05,0x00, +0x11,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x11,0x00,0x00,0x00, +0x43,0x00,0x00,0x00,0x3d,0x00,0x00,0x00,0x42,0x00,0x00,0x00, +0x85,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x3c,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x26,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x45,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0x49,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x49,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t gelu_f32_len = 1408; +const uint64_t gelu_f32_len = 1484; unsigned char get_rows_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index bccc40bf5..b1e0006bb 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -2876,6 +2876,9 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm x_sz = ggml_nbytes(src0); d_sz = ggml_nbytes(dst); + if (extra_src0->offset + x_sz >= d_X->size) { + x_sz = VK_WHOLE_SIZE; + } if (extra->offset + d_sz >= d_D->size) { d_sz = VK_WHOLE_SIZE; } @@ -2911,12 +2914,16 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm break; } - x_sz *= ne02 * ne03; - if (y_sz != VK_WHOLE_SIZE) { - y_sz *= ne12 * ne13; - } if (op != GGML_OP_CPY) { - d_sz *= ne02 * ne03; + if (x_sz != VK_WHOLE_SIZE) { + x_sz *= ne02 * ne03; + } + if (y_sz != VK_WHOLE_SIZE) { + y_sz *= ne12 * ne13; + } + if (d_sz != VK_WHOLE_SIZE) { + d_sz *= ne02 * ne03; + } } if (!use_src1 && op == GGML_OP_SOFT_MAX) { diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index 6b1b82bf3..67981a751 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -1689,7 +1689,8 @@ void main() { } const float xi = float(data_a[i]); - data_d[i] = D_TYPE(0.5f*xi*(1.0f + tanh(SQRT_2_OVER_PI*xi*(1.0f + GELU_COEF_A*xi*xi)))); + const float val = SQRT_2_OVER_PI*xi*(1.0f + GELU_COEF_A*xi*xi); + data_d[i] = D_TYPE(0.5f*xi*(2.0f - 2.0f / (exp(2 * val) + 1))); } """ From 128dcbd3c9c4b12f42b560a4430427d7b2828628 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Fri, 2 Feb 2024 03:48:53 +0800 Subject: [PATCH 642/859] add --no-mmap in llama-bench (#5257) * add --no-mmap, show sycl backend * fix conflict * fix code format, change print for --no-mmap * ren no_mmap to mmap, show mmap when not default value in printer * update guide for mmap * mv position to reduce model reload --- README-sycl.md | 2 +- examples/llama-bench/llama-bench.cpp | 60 +++++++++++++++++++++++++--- ggml-sycl.cpp | 34 +++++++++++++++- ggml-sycl.h | 3 +- 4 files changed, 89 insertions(+), 10 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index 2b2cfe03a..b8ee212b8 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -405,7 +405,7 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device llama.cpp use mmap as default way to read model file and copy to GPU. In some system, memcpy will be abnormal and block. - Solution: add **--no-mmap**. + Solution: add **--no-mmap** or **--mmap 0**. ## Q&A diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index c5a6f744e..e36c061a2 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -20,6 +20,7 @@ #include "llama.h" #include "common.h" #include "ggml-cuda.h" +#include "ggml-sycl.h" // utils static uint64_t get_time_ns() { @@ -120,6 +121,22 @@ static std::string get_gpu_info() { id += "/"; } } +#endif +#ifdef GGML_USE_SYCL + int device_list[GGML_SYCL_MAX_DEVICES]; + ggml_sycl_get_gpu_list(device_list, GGML_SYCL_MAX_DEVICES); + + for (int i = 0; i < GGML_SYCL_MAX_DEVICES; i++) { + if (device_list[i] >0 ){ + char buf[128]; + ggml_sycl_get_device_description(i, buf, sizeof(buf)); + id += buf; + id += "/"; + } + } + if (id.length() >2 ) { + id.pop_back(); + } #endif // TODO: other backends return id; @@ -161,6 +178,7 @@ struct cmd_params { std::vector no_kv_offload; std::vector mul_mat_q; std::vector> tensor_split; + std::vector use_mmap; int reps; bool verbose; output_formats output_format; @@ -180,6 +198,7 @@ static const cmd_params cmd_params_defaults = { /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, /* tensor_split */ {std::vector(llama_max_devices(), 0.0f)}, + /* use_mmap */ {true}, /* reps */ 5, /* verbose */ false, /* output_format */ MARKDOWN @@ -201,6 +220,7 @@ static void print_usage(int /* argc */, char ** argv) { printf(" -sm, --split-mode (default: %s)\n", join(transform_to_str(cmd_params_defaults.split_mode, split_mode_str), ",").c_str()); printf(" -mg, --main-gpu (default: %s)\n", join(cmd_params_defaults.main_gpu, ",").c_str()); printf(" -nkvo, --no-kv-offload <0|1> (default: %s)\n", join(cmd_params_defaults.no_kv_offload, ",").c_str()); + printf(" -mmp, --mmap <0|1> (default: %s)\n", join(cmd_params_defaults.use_mmap, ",").c_str()); printf(" -mmq, --mul-mat-q <0|1> (default: %s)\n", join(cmd_params_defaults.mul_mat_q, ",").c_str()); printf(" -ts, --tensor_split (default: 0)\n"); printf(" -r, --repetitions (default: %d)\n", cmd_params_defaults.reps); @@ -370,6 +390,13 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { } auto p = split(argv[i], split_delim); params.mul_mat_q.insert(params.mul_mat_q.end(), p.begin(), p.end()); + } else if (arg == "-mmp" || arg == "--mmap") { + if (++i >= argc) { + invalid_param = true; + break; + } + auto p = split(argv[i], split_delim); + params.use_mmap.insert(params.use_mmap.end(), p.begin(), p.end()); } else if (arg == "-ts" || arg == "--tensor-split") { if (++i >= argc) { invalid_param = true; @@ -441,6 +468,7 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { if (params.no_kv_offload.empty()){ params.no_kv_offload = cmd_params_defaults.no_kv_offload; } if (params.mul_mat_q.empty()) { params.mul_mat_q = cmd_params_defaults.mul_mat_q; } if (params.tensor_split.empty()) { params.tensor_split = cmd_params_defaults.tensor_split; } + if (params.use_mmap.empty()) { params.use_mmap = cmd_params_defaults.use_mmap; } if (params.n_threads.empty()) { params.n_threads = cmd_params_defaults.n_threads; } return params; @@ -460,6 +488,7 @@ struct cmd_params_instance { bool no_kv_offload; bool mul_mat_q; std::vector tensor_split; + bool use_mmap; llama_model_params to_llama_mparams() const { llama_model_params mparams = llama_model_default_params(); @@ -468,6 +497,7 @@ struct cmd_params_instance { mparams.split_mode = split_mode; mparams.main_gpu = main_gpu; mparams.tensor_split = tensor_split.data(); + mparams.use_mmap = use_mmap; return mparams; } @@ -477,6 +507,7 @@ struct cmd_params_instance { n_gpu_layers == other.n_gpu_layers && split_mode == other.split_mode && main_gpu == other.main_gpu && + use_mmap == other.use_mmap && tensor_split == other.tensor_split; } @@ -503,6 +534,7 @@ static std::vector get_cmd_params_instances(const cmd_param for (const auto & sm : params.split_mode) for (const auto & mg : params.main_gpu) for (const auto & ts : params.tensor_split) + for (const auto & mmp : params.use_mmap) for (const auto & nb : params.n_batch) for (const auto & tk : params.type_k) for (const auto & tv : params.type_v) @@ -527,6 +559,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, + /* .use_mmap = */ mmp, }; instances.push_back(instance); } @@ -549,6 +582,7 @@ static std::vector get_cmd_params_instances(const cmd_param /* .no_kv_offload= */ nkvo, /* .mul_mat_q = */ mmq, /* .tensor_split = */ ts, + /* .use_mmap = */ mmp, }; instances.push_back(instance); } @@ -565,6 +599,7 @@ struct test { static const bool vulkan; static const bool kompute; static const bool metal; + static const bool sycl; static const bool gpu_blas; static const bool blas; static const std::string cpu_info; @@ -583,6 +618,7 @@ struct test { bool no_kv_offload; bool mul_mat_q; std::vector tensor_split; + bool use_mmap; int n_prompt; int n_gen; std::string test_time; @@ -605,6 +641,7 @@ struct test { no_kv_offload = inst.no_kv_offload; mul_mat_q = inst.mul_mat_q; tensor_split = inst.tensor_split; + use_mmap = inst.use_mmap; n_prompt = inst.n_prompt; n_gen = inst.n_gen; // RFC 3339 date-time format @@ -654,25 +691,29 @@ struct test { if (metal) { return "Metal"; } + if (sycl) { + return GGML_SYCL_NAME; + } if (gpu_blas) { return "GPU BLAS"; } if (blas) { return "BLAS"; } + return "CPU"; } static const std::vector & get_fields() { static const std::vector fields = { "build_commit", "build_number", - "cuda", "opencl", "vulkan", "kompute", "metal", "gpu_blas", "blas", + "cuda", "opencl", "vulkan", "kompute", "metal", "sycl", "gpu_blas", "blas", "cpu_info", "gpu_info", "model_filename", "model_type", "model_size", "model_n_params", "n_batch", "n_threads", "type_k", "type_v", "n_gpu_layers", "split_mode", "main_gpu", "no_kv_offload", - "mul_mat_q", "tensor_split", + "mul_mat_q", "tensor_split", "use_mmap", "n_prompt", "n_gen", "test_time", "avg_ns", "stddev_ns", "avg_ts", "stddev_ts" @@ -691,8 +732,8 @@ struct test { return INT; } if (field == "cuda" || field == "opencl" || field == "vulkan" || field == "kompute" || field == "metal" || - field == "gpu_blas" || field == "blas" || field == "f16_kv" || field == "no_kv_offload" || - field == "mul_mat_q") { + field == "gpu_blas" || field == "blas" || field == "sycl" ||field == "f16_kv" || field == "no_kv_offload" || + field == "mul_mat_q" || field == "use_mmap") { return BOOL; } if (field == "avg_ts" || field == "stddev_ts") { @@ -720,13 +761,13 @@ struct test { std::vector values = { build_commit, std::to_string(build_number), std::to_string(cuda), std::to_string(opencl), std::to_string(vulkan), std::to_string(vulkan), - std::to_string(metal), std::to_string(gpu_blas), std::to_string(blas), + std::to_string(metal), std::to_string(sycl), std::to_string(gpu_blas), std::to_string(blas), cpu_info, gpu_info, model_filename, model_type, std::to_string(model_size), std::to_string(model_n_params), std::to_string(n_batch), std::to_string(n_threads), ggml_type_name(type_k), ggml_type_name(type_v), std::to_string(n_gpu_layers), split_mode_str(split_mode), std::to_string(main_gpu), std::to_string(no_kv_offload), - std::to_string(mul_mat_q), tensor_split_str, + std::to_string(mul_mat_q), tensor_split_str, std::to_string(use_mmap), std::to_string(n_prompt), std::to_string(n_gen), test_time, std::to_string(avg_ns()), std::to_string(stdev_ns()), std::to_string(avg_ts()), std::to_string(stdev_ts()) @@ -753,6 +794,7 @@ const bool test::kompute = !!ggml_cpu_has_kompute(); const bool test::metal = !!ggml_cpu_has_metal(); const bool test::gpu_blas = !!ggml_cpu_has_gpublas(); const bool test::blas = !!ggml_cpu_has_blas(); +const bool test::sycl = !!ggml_cpu_has_sycl(); const std::string test::cpu_info = get_cpu_info(); const std::string test::gpu_info = get_gpu_info(); @@ -895,6 +937,9 @@ struct markdown_printer : public printer { if (field == "no_kv_offload") { return "nkvo"; } + if (field == "use_mmap") { + return "mmap"; + } if (field == "tensor_split") { return "ts"; } @@ -938,6 +983,9 @@ struct markdown_printer : public printer { if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { fields.push_back("tensor_split"); } + if (params.use_mmap.size() > 1 || params.use_mmap != cmd_params_defaults.use_mmap) { + fields.push_back("use_mmap"); + } fields.push_back("test"); fields.push_back("t/s"); diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 1cc55ef52..e8ba48353 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -2928,7 +2928,6 @@ void ggml_sycl_set_main_device(int main_device); void ggml_sycl_set_mul_mat_q(bool mul_mat_q); void ggml_sycl_set_scratch_size(size_t scratch_size); void ggml_sycl_free_scratch(void); -int ggml_sycl_get_device_count(void); void ggml_sycl_get_device_description(int device, char * description, size_t description_size); bool ggml_backend_is_sycl(ggml_backend_t backend); int ggml_backend_sycl_get_device(ggml_backend_t backend); @@ -14493,6 +14492,37 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ return true; } +GGML_API GGML_CALL void ggml_sycl_get_gpu_list(int *id_list, int max_len) try { + int max_compute_units = -1; + for(int i=0;i Date: Thu, 1 Feb 2024 23:20:13 -0800 Subject: [PATCH 643/859] llama : fix memory leak in llama_batch_free (#5252) The llama_batch_init allocates memory for a fixed number of tokens. However, the llama_batch_free only frees memory for the number of tokens that were added to the batch. This change-set uses a null terminated array for the batch seq_id, and frees all the elements until the nullptr is reached. This change-set also changes the name of the first parameter from `n_tokens` to `n_tokens_alloc` to more clearly indicate that this value is the number of tokens allocated to the batch, not the number of tokens in the batch. --- llama.cpp | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/llama.cpp b/llama.cpp index e8f44c2cb..6bf7f9efb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -11377,22 +11377,24 @@ struct llama_batch llama_batch_get_one( }; } -struct llama_batch llama_batch_init(int32_t n_tokens, int32_t embd, int32_t n_seq_max) { +struct llama_batch llama_batch_init(int32_t n_tokens_alloc, int32_t embd, int32_t n_seq_max) { llama_batch batch = { 0, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, 0, 0, 0, }; if (embd) { - batch.embd = (float *) malloc(sizeof(float) * n_tokens * embd); + batch.embd = (float *) malloc(sizeof(float) * n_tokens_alloc * embd); } else { - batch.token = (llama_token *) malloc(sizeof(llama_token) * n_tokens); + batch.token = (llama_token *) malloc(sizeof(llama_token) * n_tokens_alloc); } - batch.pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens); - batch.n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens); - batch.seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * n_tokens); - for (int i = 0; i < n_tokens; ++i) { + batch.pos = (llama_pos *) malloc(sizeof(llama_pos) * n_tokens_alloc); + batch.n_seq_id = (int32_t *) malloc(sizeof(int32_t) * n_tokens_alloc); + batch.seq_id = (llama_seq_id **) malloc(sizeof(llama_seq_id *) * (n_tokens_alloc + 1)); + for (int i = 0; i < n_tokens_alloc; ++i) { batch.seq_id[i] = (llama_seq_id *) malloc(sizeof(llama_seq_id) * n_seq_max); } - batch.logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens); + batch.seq_id[n_tokens_alloc] = nullptr; + + batch.logits = (int8_t *) malloc(sizeof(int8_t) * n_tokens_alloc); return batch; } @@ -11403,7 +11405,7 @@ void llama_batch_free(struct llama_batch batch) { if (batch.pos) free(batch.pos); if (batch.n_seq_id) free(batch.n_seq_id); if (batch.seq_id) { - for (int i = 0; i < batch.n_tokens; ++i) { + for (int i = 0; batch.seq_id[i] != nullptr; ++i) { free(batch.seq_id[i]); } free(batch.seq_id); From af3ba5d94627d337e32a95129e31a3064c459f6b Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Fri, 2 Feb 2024 15:53:27 +0800 Subject: [PATCH 644/859] [SYCL] update guide of SYCL backend (#5254) * update guide for make installation, memory, gguf model link, rm todo for windows build * add vs install requirement * update for gpu device check * update help of llama-bench * fix grammer issues --- README-sycl.md | 64 +++++++++++++++++++++++++++----- examples/llama-bench/README.md | 34 ++++++++++------- examples/sycl/win-run-llama2.bat | 2 +- 3 files changed, 77 insertions(+), 23 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index b8ee212b8..f7edc1c3e 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -42,6 +42,8 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). ## Intel GPU +### Verified + |Intel GPU| Status | Verified Model| |-|-|-| |Intel Data Center Max Series| Support| Max 1550| @@ -50,6 +52,17 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |Intel built-in Arc GPU| Support| built-in Arc GPU in Meteor Lake| |Intel iGPU| Support| iGPU in i5-1250P, i7-1165G7| +Note: If the EUs (Execution Unit) in iGPU is less than 80, the inference speed will be too slow to use. + +### Memory + +The memory is a limitation to run LLM on GPUs. + +When run llama.cpp, there is print log to show the applied memory on GPU. You could know how much memory to be used in your case. Like `llm_load_tensors: buffer size = 3577.56 MiB`. + +For iGPU, please make sure the shared memory from host memory is enough. For llama-2-7b.Q4_0, recommend the host memory is 8GB+. + +For dGPU, please make sure the device memory is enough. For llama-2-7b.Q4_0, recommend the device memory is 4GB+. ## Linux @@ -105,7 +118,7 @@ source /opt/intel/oneapi/setvars.sh sycl-ls ``` -There should be one or more level-zero devices. Like **[ext_oneapi_level_zero:gpu:0]**. +There should be one or more level-zero devices. Please confirm that at least one GPU is present, like **[ext_oneapi_level_zero:gpu:0]**. Output (example): ``` @@ -152,6 +165,8 @@ Note: 1. Put model file to folder **models** +You could download [llama-2-7b.Q4_0.gguf](https://huggingface.co/TheBloke/Llama-2-7B-GGUF/blob/main/llama-2-7b.Q4_0.gguf) as example. + 2. Enable oneAPI running environment ``` @@ -223,7 +238,13 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device Please install Intel GPU driver by official guide: [Install GPU Drivers](https://www.intel.com/content/www/us/en/products/docs/discrete-gpus/arc/software/drivers.html). -2. Install Intel® oneAPI Base toolkit. +Note: **The driver is mandatory for compute function**. + +2. Install Visual Studio. + +Please install [Visual Studio](https://visualstudio.microsoft.com/) which impact oneAPI environment enabling in Windows. + +3. Install Intel® oneAPI Base toolkit. a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). @@ -252,7 +273,7 @@ In oneAPI command line: sycl-ls ``` -There should be one or more level-zero devices. Like **[ext_oneapi_level_zero:gpu:0]**. +There should be one or more level-zero devices. Please confirm that at least one GPU is present, like **[ext_oneapi_level_zero:gpu:0]**. Output (example): ``` @@ -260,15 +281,21 @@ Output (example): [opencl:cpu:1] Intel(R) OpenCL, 11th Gen Intel(R) Core(TM) i7-1185G7 @ 3.00GHz OpenCL 3.0 (Build 0) [2023.16.10.0.17_160000] [opencl:gpu:2] Intel(R) OpenCL Graphics, Intel(R) Iris(R) Xe Graphics OpenCL 3.0 NEO [31.0.101.5186] [ext_oneapi_level_zero:gpu:0] Intel(R) Level-Zero, Intel(R) Iris(R) Xe Graphics 1.3 [1.3.28044] - ``` -3. Install cmake & make +4. Install cmake & make -a. Download & install cmake for windows: https://cmake.org/download/ +a. Download & install cmake for Windows: https://cmake.org/download/ -b. Download & install make for windows provided by mingw-w64: https://www.mingw-w64.org/downloads/ +b. Download & install make for Windows provided by mingw-w64 +- Download binary package for Windows in https://github.com/niXman/mingw-builds-binaries/releases. + + Like [x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z](https://github.com/niXman/mingw-builds-binaries/releases/download/13.2.0-rt_v11-rev1/x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z). + +- Unzip the binary package. In the **bin** sub-folder and rename **xxx-make.exe** to **make.exe**. + +- Add the **bin** folder path in the Windows system PATH environment. ### Build locally: @@ -309,6 +336,8 @@ Note: 1. Put model file to folder **models** +You could download [llama-2-7b.Q4_0.gguf](https://huggingface.co/TheBloke/Llama-2-7B-GGUF/blob/main/llama-2-7b.Q4_0.gguf) as example. + 2. Enable oneAPI running environment - In Search, input 'oneAPI'. @@ -419,8 +448,25 @@ Using device **0** (Intel(R) Arc(TM) A770 Graphics) as main device Miss to enable oneAPI running environment. +- Meet compile error. + + Remove folder **build** and try again. + +- I can **not** see **[ext_oneapi_level_zero:gpu:0]** afer install GPU driver in Linux. + + Please run **sudo sycl-ls**. + + If you see it in result, please add video/render group to your ID: + + ``` + sudo usermod -aG render username + sudo usermod -aG video username + ``` + + Then **relogin**. + + If you do not see it, please check the installation GPU steps again. + ## Todo -- Support to build in Windows. - - Support multiple cards. diff --git a/examples/llama-bench/README.md b/examples/llama-bench/README.md index d02824bfa..374e40a7d 100644 --- a/examples/llama-bench/README.md +++ b/examples/llama-bench/README.md @@ -23,19 +23,23 @@ usage: ./llama-bench [options] options: -h, --help - -m, --model (default: models/7B/ggml-model-q4_0.gguf) - -p, --n-prompt (default: 512) - -n, --n-gen (default: 128) - -b, --batch-size (default: 512) - --memory-f32 <0|1> (default: 0) - -t, --threads (default: 16) - -ngl N, --n-gpu-layers (default: 99) - -mg i, --main-gpu (default: 0) - -mmq, --mul-mat-q <0|1> (default: 1) - -ts, --tensor_split - -r, --repetitions (default: 5) - -o, --output (default: md) - -v, --verbose (default: 0) + -m, --model (default: models/7B/ggml-model-q4_0.gguf) + -p, --n-prompt (default: 512) + -n, --n-gen (default: 128) + -b, --batch-size (default: 512) + -ctk , --cache-type-k (default: f16) + -ctv , --cache-type-v (default: f16) + -t, --threads (default: 112) + -ngl, --n-gpu-layers (default: 99) + -sm, --split-mode (default: layer) + -mg, --main-gpu (default: 0) + -nkvo, --no-kv-offload <0|1> (default: 0) + -mmp, --mmap <0|1> (default: 1) + -mmq, --mul-mat-q <0|1> (default: 1) + -ts, --tensor_split (default: 0) + -r, --repetitions (default: 5) + -o, --output (default: md) + -v, --verbose (default: 0) Multiple values can be given for each parameter by separating them with ',' or by specifying the parameter multiple times. ``` @@ -51,6 +55,10 @@ Each test is repeated the number of times given by `-r`, and the results are ave For a description of the other options, see the [main example](../main/README.md). +Note: + +- When using SYCL backend, there would be hang issue in some cases. Please set `--mmp 0`. + ## Examples ### Text generation with different models diff --git a/examples/sycl/win-run-llama2.bat b/examples/sycl/win-run-llama2.bat index 28d935541..cf621c675 100644 --- a/examples/sycl/win-run-llama2.bat +++ b/examples/sycl/win-run-llama2.bat @@ -2,7 +2,7 @@ :: Copyright (C) 2024 Intel Corporation :: SPDX-License-Identifier: MIT -INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" +set INPUT2="Building a website can be done in 10 simple steps:\nStep 1:" @call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" intel64 --force From e805f0fa9951081ce0a86378a7aa52b6f636b82d Mon Sep 17 00:00:00 2001 From: "Meng, Hengyu" Date: Fri, 2 Feb 2024 15:54:14 +0800 Subject: [PATCH 645/859] [SYCL] get MAX_MEM_ALLOC from device property (#5270) * get max alloc size from device prop * fix macro typo --- ggml-sycl.cpp | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index e8ba48353..4ee2eed38 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -337,6 +337,7 @@ namespace dpct } size_t get_global_mem_size() const { return _global_mem_size; } size_t get_local_mem_size() const { return _local_mem_size; } + size_t get_max_mem_alloc_size() const { return _max_mem_alloc_size; } /// Returns the maximum clock rate of device's global memory in kHz. If /// compiler does not support this API then returns default value 3200000 kHz. unsigned int get_memory_clock_rate() const { return _memory_clock_rate; } @@ -398,6 +399,10 @@ namespace dpct { _local_mem_size = local_mem_size; } + void set_max_mem_alloc_size(size_t max_mem_alloc_size) + { + _max_mem_alloc_size = max_mem_alloc_size; + } void set_max_work_group_size(int max_work_group_size) { _max_work_group_size = max_work_group_size; @@ -465,6 +470,7 @@ namespace dpct int _max_register_size_per_work_group; size_t _global_mem_size; size_t _local_mem_size; + size_t _max_mem_alloc_size; size_t _max_nd_range_size[3]; int _max_nd_range_size_i[3]; uint32_t _device_id; @@ -516,6 +522,7 @@ namespace dpct dev.get_info()); prop.set_global_mem_size(dev.get_info()); prop.set_local_mem_size(dev.get_info()); + prop.set_max_mem_alloc_size(dev.get_info()); #if (defined(SYCL_EXT_INTEL_DEVICE_INFO) && SYCL_EXT_INTEL_DEVICE_INFO >= 6) if (dev.has(sycl::aspect::ext_intel_memory_clock_rate)) @@ -644,6 +651,11 @@ namespace dpct return get_device_info().get_global_mem_size(); } + size_t get_max_mem_alloc_size() const + { + return get_device_info().get_max_mem_alloc_size(); + } + /// Get the number of bytes of free and total memory on the SYCL device. /// \param [out] free_memory The number of bytes of free memory on the SYCL device. /// \param [out] total_memory The number of bytes of total memory on the SYCL device. @@ -11311,10 +11323,10 @@ void ggml_init_sycl() try { GGML_ASSERT(g_all_sycl_device_count <= GGML_SYCL_MAX_DEVICES); int64_t total_vram = 0; -#if defined(GGML_SYCL_FP16) - fprintf(stderr, "%s: GGML_SYCL_FP16: yes\n", __func__); +#if defined(GGML_SYCL_F16) + fprintf(stderr, "%s: GGML_SYCL_F16: yes\n", __func__); #else - fprintf(stderr, "%s: GGML_SYCL_FP16: no\n", __func__); + fprintf(stderr, "%s: GGML_SYCL_F16: no\n", __func__); #endif @@ -14788,6 +14800,12 @@ static size_t ggml_backend_sycl_buffer_type_get_alignment(ggml_backend_buffer_ty UNUSED(buft); } +static size_t ggml_backend_sycl_buffer_type_get_max_size(ggml_backend_buffer_type_t buft) { + return dpct::get_current_device().get_max_mem_alloc_size(); + + UNUSED(buft); +} + static size_t ggml_backend_sycl_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { int64_t row_low = 0; int64_t row_high = ggml_nrows(tensor); @@ -14818,7 +14836,7 @@ static ggml_backend_buffer_type_i ggml_backend_sycl_buffer_type_interface = { /* .get_name = */ ggml_backend_sycl_buffer_type_name, /* .alloc_buffer = */ ggml_backend_sycl_buffer_type_alloc_buffer, /* .get_alignment = */ ggml_backend_sycl_buffer_type_get_alignment, - /* .get_max_size = */ NULL, // TODO: return device.maxBufferLength + /* .get_max_size = */ ggml_backend_sycl_buffer_type_get_max_size, /* .get_alloc_size = */ ggml_backend_sycl_buffer_type_get_alloc_size, /* .supports_backend = */ ggml_backend_sycl_buffer_type_supports_backend, /* .is_host = */ nullptr, From 6b91b1e0a92ac2e4e269eec6361ca53a61ced6c6 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Fri, 2 Feb 2024 08:56:31 +0100 Subject: [PATCH 646/859] docker : add build for SYCL, Vulkan + update readme (#5228) * add vulkan dockerfile * intel dockerfile: compile sycl by default * fix vulkan dockerfile * add docs for vulkan * docs: sycl build in docker * docs: remove trailing spaces * docs: sycl: add docker section * docs: clarify install vulkan SDK outside docker * sycl: use intel/oneapi-basekit docker image * docs: correct TOC * docs: correct docker image for Intel oneMKL --- .devops/main-intel.Dockerfile | 16 ++--- .devops/main-vulkan.Dockerfile | 29 +++++++++ .devops/server-intel.Dockerfile | 15 +++-- .devops/server-vulkan.Dockerfile | 29 +++++++++ README-sycl.md | 102 +++++++++++++++++++------------ README.md | 64 ++++++++++++++----- 6 files changed, 188 insertions(+), 67 deletions(-) create mode 100644 .devops/main-vulkan.Dockerfile create mode 100644 .devops/server-vulkan.Dockerfile diff --git a/.devops/main-intel.Dockerfile b/.devops/main-intel.Dockerfile index e1e6acc24..572e5d8ea 100644 --- a/.devops/main-intel.Dockerfile +++ b/.devops/main-intel.Dockerfile @@ -1,8 +1,8 @@ ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04 -ARG UBUNTU_VERSION=22.04 -FROM intel/hpckit:$ONEAPI_VERSION as build +FROM intel/oneapi-basekit:$ONEAPI_VERSION as build +ARG LLAMA_SYCL_F16=OFF RUN apt-get update && \ apt-get install -y git @@ -10,16 +10,18 @@ WORKDIR /app COPY . . -# for some reasons, "-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DLLAMA_NATIVE=ON" give worse performance RUN mkdir build && \ cd build && \ - cmake .. -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx && \ - cmake --build . --config Release --target main server + if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ + echo "LLAMA_SYCL_F16 is set" && \ + export OPT_SYCL_F16="-DLLAMA_SYCL_F16=ON"; \ + fi && \ + cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx ${OPT_SYCL_F16} && \ + cmake --build . --config Release --target main -FROM ubuntu:$UBUNTU_VERSION as runtime +FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime COPY --from=build /app/build/bin/main /main -COPY --from=build /app/build/bin/server /server ENV LC_ALL=C.utf8 diff --git a/.devops/main-vulkan.Dockerfile b/.devops/main-vulkan.Dockerfile new file mode 100644 index 000000000..bca460365 --- /dev/null +++ b/.devops/main-vulkan.Dockerfile @@ -0,0 +1,29 @@ +ARG UBUNTU_VERSION=jammy + +FROM ubuntu:$UBUNTU_VERSION as build + +# Install build tools +RUN apt update && apt install -y git build-essential cmake wget + +# Install Vulkan SDK +RUN wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - && \ + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list && \ + apt update -y && \ + apt-get install -y vulkan-sdk + +# Build it +WORKDIR /app +COPY . . +RUN mkdir build && \ + cd build && \ + cmake .. -DLLAMA_VULKAN=1 && \ + cmake --build . --config Release --target main + +# Clean up +WORKDIR / +RUN cp /app/build/bin/main /main && \ + rm -rf /app + +ENV LC_ALL=C.utf8 + +ENTRYPOINT [ "/main" ] diff --git a/.devops/server-intel.Dockerfile b/.devops/server-intel.Dockerfile index e343d278c..312f2df80 100644 --- a/.devops/server-intel.Dockerfile +++ b/.devops/server-intel.Dockerfile @@ -1,8 +1,8 @@ ARG ONEAPI_VERSION=2024.0.1-devel-ubuntu22.04 -ARG UBUNTU_VERSION=22.04 -FROM intel/hpckit:$ONEAPI_VERSION as build +FROM intel/oneapi-basekit:$ONEAPI_VERSION as build +ARG LLAMA_SYCL_F16=OFF RUN apt-get update && \ apt-get install -y git @@ -10,13 +10,16 @@ WORKDIR /app COPY . . -# for some reasons, "-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DLLAMA_NATIVE=ON" give worse performance RUN mkdir build && \ cd build && \ - cmake .. -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx && \ - cmake --build . --config Release --target main server + if [ "${LLAMA_SYCL_F16}" = "ON" ]; then \ + echo "LLAMA_SYCL_F16 is set" && \ + export OPT_SYCL_F16="-DLLAMA_SYCL_F16=ON"; \ + fi && \ + cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx ${OPT_SYCL_F16} && \ + cmake --build . --config Release --target server -FROM ubuntu:$UBUNTU_VERSION as runtime +FROM intel/oneapi-basekit:$ONEAPI_VERSION as runtime COPY --from=build /app/build/bin/server /server diff --git a/.devops/server-vulkan.Dockerfile b/.devops/server-vulkan.Dockerfile new file mode 100644 index 000000000..e0add6fc3 --- /dev/null +++ b/.devops/server-vulkan.Dockerfile @@ -0,0 +1,29 @@ +ARG UBUNTU_VERSION=jammy + +FROM ubuntu:$UBUNTU_VERSION as build + +# Install build tools +RUN apt update && apt install -y git build-essential cmake wget + +# Install Vulkan SDK +RUN wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - && \ + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list && \ + apt update -y && \ + apt-get install -y vulkan-sdk + +# Build it +WORKDIR /app +COPY . . +RUN mkdir build && \ + cd build && \ + cmake .. -DLLAMA_VULKAN=1 && \ + cmake --build . --config Release --target server + +# Clean up +WORKDIR / +RUN cp /app/build/bin/server /server && \ + rm -rf /app + +ENV LC_ALL=C.utf8 + +ENTRYPOINT [ "/server" ] diff --git a/README-sycl.md b/README-sycl.md index f7edc1c3e..7aa4274a9 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -1,22 +1,15 @@ # llama.cpp for SYCL -[Background](#background) - -[OS](#os) - -[Intel GPU](#intel-gpu) - -[Linux](#linux) - -[Windows](#windows) - -[Environment Variable](#environment-variable) - -[Known Issue](#known-issue) - -[Q&A](#q&a) - -[Todo](#todo) +- [Background](#background) +- [OS](#os) +- [Intel GPU](#intel-gpu) +- [Docker](#docker) +- [Linux](#linux) +- [Windows](#windows) +- [Environment Variable](#environment-variable) +- [Known Issue](#known-issue) +- [Q&A](#q&a) +- [Todo](#todo) ## Background @@ -36,7 +29,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |OS|Status|Verified| |-|-|-| -|Linux|Support|Ubuntu 22.04| +|Linux|Support|Ubuntu 22.04, Fedora Silverblue 39| |Windows|Support|Windows 11| @@ -50,7 +43,7 @@ For Intel CPU, recommend to use llama.cpp for X86 (Intel MKL building). |Intel Data Center Flex Series| Support| Flex 170| |Intel Arc Series| Support| Arc 770, 730M| |Intel built-in Arc GPU| Support| built-in Arc GPU in Meteor Lake| -|Intel iGPU| Support| iGPU in i5-1250P, i7-1165G7| +|Intel iGPU| Support| iGPU in i5-1250P, i7-1260P, i7-1165G7| Note: If the EUs (Execution Unit) in iGPU is less than 80, the inference speed will be too slow to use. @@ -64,6 +57,38 @@ For iGPU, please make sure the shared memory from host memory is enough. For lla For dGPU, please make sure the device memory is enough. For llama-2-7b.Q4_0, recommend the device memory is 4GB+. +## Docker + +Note: +- Only docker on Linux is tested. Docker on WSL may not work. +- You may need to install Intel GPU driver on the host machine (See the [Linux](#linux) section to know how to do that) + +### Build the image + +You can choose between **F16** and **F32** build. F16 is faster for long-prompt inference. + + +```sh +# For F16: +#docker build -t llama-cpp-sycl --build-arg="LLAMA_SYCL_F16=ON" -f .devops/main-intel.Dockerfile . + +# Or, for F32: +docker build -t llama-cpp-sycl -f .devops/main-intel.Dockerfile . + +# Note: you can also use the ".devops/main-server.Dockerfile", which compiles the "server" example +``` + +### Run + +```sh +# Firstly, find all the DRI cards: +ls -la /dev/dri +# Then, pick the card that you want to use. + +# For example with "/dev/dri/card1" +docker run -it --rm -v "$(pwd):/app:Z" --device /dev/dri/renderD128:/dev/dri/renderD128 --device /dev/dri/card1:/dev/dri/card1 llama-cpp-sycl -m "/app/models/YOUR_MODEL_FILE" -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 +``` + ## Linux ### Setup Environment @@ -76,7 +101,7 @@ Note: for iGPU, please install the client GPU driver. b. Add user to group: video, render. -``` +```sh sudo usermod -aG render username sudo usermod -aG video username ``` @@ -85,7 +110,7 @@ Note: re-login to enable it. c. Check -``` +```sh sudo apt install clinfo sudo clinfo -l ``` @@ -103,7 +128,6 @@ Platform #0: Intel(R) OpenCL HD Graphics 2. Install Intel® oneAPI Base toolkit. - a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). Recommend to install to default folder: **/opt/intel/oneapi**. @@ -112,7 +136,7 @@ Following guide use the default folder as example. If you use other folder, plea b. Check -``` +```sh source /opt/intel/oneapi/setvars.sh sycl-ls @@ -131,21 +155,25 @@ Output (example): 2. Build locally: -``` +Note: +- You can choose between **F16** and **F32** build. F16 is faster for long-prompt inference. +- By default, it will build for all binary files. It will take more time. To reduce the time, we recommend to build for **example/main** only. + +```sh mkdir -p build cd build source /opt/intel/oneapi/setvars.sh -#for FP16 -#cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON # faster for long-prompt inference +# For FP16: +#cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON -#for FP32 +# Or, for FP32: cmake .. -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -#build example/main only +# Build example/main only #cmake --build . --config Release --target main -#build all binary +# Or, build all binary cmake --build . --config Release -v cd .. @@ -153,14 +181,10 @@ cd .. or -``` +```sh ./examples/sycl/build.sh ``` -Note: - -- By default, it will build for all binary files. It will take more time. To reduce the time, we recommend to build for **example/main** only. - ### Run 1. Put model file to folder **models** @@ -177,10 +201,10 @@ source /opt/intel/oneapi/setvars.sh Run without parameter: -``` +```sh ./build/bin/ls-sycl-device -or +# or running the "main" executable and look at the output log: ./build/bin/main ``` @@ -209,13 +233,13 @@ found 4 SYCL devices: Set device ID = 0 by **GGML_SYCL_DEVICE=0** -``` +```sh GGML_SYCL_DEVICE=0 ./build/bin/main -m models/llama-2-7b.Q4_0.gguf -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 ``` or run by script: -``` -./examples/sycl/run-llama2.sh +```sh +./examples/sycl/run_llama2.sh ``` Note: diff --git a/README.md b/README.md index e6ed1d429..af1f09fa0 100644 --- a/README.md +++ b/README.md @@ -393,28 +393,28 @@ Building the program with BLAS support may lead to some performance improvements Check [BLIS.md](docs/BLIS.md) for more information. +- #### SYCL + SYCL is a higher-level programming model to improve programming productivity on various hardware accelerators. + + llama.cpp based on SYCL is used to **support Intel GPU** (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). + + For detailed info, please refer to [llama.cpp for SYCL](README-sycl.md). + - #### Intel oneMKL + Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. Please note that this build config **does not support Intel GPU**. For Intel GPU support, please refer to [llama.cpp for SYCL](./README-sycl.md). + - Using manual oneAPI installation: By default, `LLAMA_BLAS_VENDOR` is set to `Generic`, so if you already sourced intel environment script and assign `-DLLAMA_BLAS=ON` in cmake, the mkl version of Blas will automatically been selected. Otherwise please install oneAPI and follow the below steps: ```bash mkdir build cd build - source /opt/intel/oneapi/setvars.sh # You can skip this step if in oneapi-runtime docker image, only required for manual installation + source /opt/intel/oneapi/setvars.sh # You can skip this step if in oneapi-basekit docker image, only required for manual installation cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON cmake --build . --config Release ``` - Using oneAPI docker image: - If you do not want to source the environment vars and install oneAPI manually, you can also build the code using intel docker container: [oneAPI-runtime](https://hub.docker.com/r/intel/oneapi-runtime) - - ```bash - mkdir build - cd build - cmake .. -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON - cmake --build . --config Release - ``` - - Building through oneAPI compilers will make avx_vnni instruction set available for intel processors that do not support avx512 and avx512_vnni. + If you do not want to source the environment vars and install oneAPI manually, you can also build the code using intel docker container: [oneAPI-basekit](https://hub.docker.com/r/intel/oneapi-basekit). Then, you can use the commands given above. Check [Optimizing and Running LLaMA2 on Intel® CPU](https://www.intel.com/content/www/us/en/content-details/791610/optimizing-and-running-llama2-on-intel-cpu.html) for more information. @@ -601,14 +601,48 @@ Building the program with BLAS support may lead to some performance improvements You can get a list of platforms and devices from the `clinfo -l` command, etc. -- #### SYCL +- #### Vulkan - SYCL is a higher-level programming model to improve programming productivity on various hardware accelerators. + **With docker**: - llama.cpp based on SYCL is used to support Intel GPU (Data Center Max series, Flex series, Arc series, Built-in GPU and iGPU). + You don't need to install Vulkan SDK. It will be installed inside the container. - For detailed info, please refer to [llama.cpp for SYCL](README-sycl.md). + ```sh + # Build the image + docker build -t llama-cpp-vulkan -f .devops/main-vulkan.Dockerfile . + # Then, use it: + docker run -it --rm -v "$(pwd):/app:Z" --device /dev/dri/renderD128:/dev/dri/renderD128 --device /dev/dri/card1:/dev/dri/card1 llama-cpp-vulkan -m "/app/models/YOUR_MODEL_FILE" -p "Building a website can be done in 10 simple steps:" -n 400 -e -ngl 33 + ``` + + **Without docker**: + + Firstly, you need to make sure you installed [Vulkan SDK](https://vulkan.lunarg.com/doc/view/latest/linux/getting_started_ubuntu.html) + + For example, on Ubuntu 22.04 (jammy), use the command below: + + ```bash + wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | apt-key add - + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list https://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list + apt update -y + apt-get install -y vulkan-sdk + # To verify the installation, use the command below: + vulkaninfo + ``` + + Then, build llama.cpp using the cmake command below: + + ```bash + mkdir -p build + cd build + cmake .. -DLLAMA_VULKAN=1 + cmake --build . --config Release + # Test the output binary (with "-ngl 33" to offload all layers to GPU) + ./bin/main -m "PATH_TO_MODEL" -p "Hi you how are you" -n 50 -e -ngl 33 -t 4 + + # You should see in the output, ggml_vulkan detected your GPU. For example: + # ggml_vulkan: Using Intel(R) Graphics (ADL GT2) | uma: 1 | fp16: 1 | warp size: 32 + ``` ### Prepare Data & Run From b05102fe8cfa9893851c6bf6efd15cdc20b6afa2 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Fri, 2 Feb 2024 08:39:48 +0000 Subject: [PATCH 647/859] Tidy ggml-sycl (#5261) * Tidy some code in ggml-sycl * Remove blank space * Remove std::printf comments --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 4ee2eed38..ac75f8e16 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -1366,6 +1366,7 @@ namespace dpct } #else return q.memcpy(to_ptr, from_ptr, size, dep_events); + GGML_UNUSED(direction); #endif // DPCT_USM_LEVEL_NONE } @@ -1667,7 +1668,7 @@ namespace dpct using Ty = typename DataType::T2; Ty s_h; if (get_pointer_attribute(q, s) == pointer_access_attribute::device_only) - detail::dpct_memcpy(q, (void *)&s_h, (void *)s, sizeof(T), device_to_host) + detail::dpct_memcpy(q, (void *)&s_h, (const void *)s, sizeof(T), device_to_host) .wait(); else s_h = *reinterpret_cast(s); @@ -1691,6 +1692,20 @@ namespace dpct int ldb, const void *beta, void *c, int ldc) { #ifndef __INTEL_MKL__ + GGML_UNUSED(q); + GGML_UNUSED(a_trans); + GGML_UNUSED(b_trans); + GGML_UNUSED(m); + GGML_UNUSED(n); + GGML_UNUSED(k); + GGML_UNUSED(alpha); + GGML_UNUSED(a); + GGML_UNUSED(lda); + GGML_UNUSED(b); + GGML_UNUSED(ldb); + GGML_UNUSED(beta); + GGML_UNUSED(c); + GGML_UNUSED(ldc); throw std::runtime_error("The oneAPI Math Kernel Library (oneMKL) Interfaces " "Project does not support this API."); #else @@ -1830,7 +1845,7 @@ namespace dpct template T permute_sub_group_by_xor(sycl::sub_group g, T x, unsigned int mask, - int logical_sub_group_size = 32) + unsigned int logical_sub_group_size = 32) { unsigned int id = g.get_local_linear_id(); unsigned int start_index = @@ -2160,6 +2175,7 @@ namespace dpct } #else return q.memcpy(to_ptr, from_ptr, size, dep_events); + GGML_UNUSED(direction); #endif // DPCT_USM_LEVEL_NONE } @@ -3302,7 +3318,7 @@ void log_ggml_var_device(const char*name, float *src, size_t total_elements, boo std::ofstream logfile; logfile.open(filename); // printf("local buf element %d\n", total_elements); - for(int i=0; ibackend == GGML_BACKEND_GPU && device_id == g_main_device ? ne0 : row_diff; - const int compute_capability = g_device_caps[id].cc; #ifdef GGML_SYCL_F16 bool use_fp16 = true; // TODO(Yu) SYCL capability check #else @@ -12691,7 +12700,7 @@ static void ggml_sycl_set_peer_access(const int n_tokens) { continue; } - int can_access_peer; + // int can_access_peer; // SYCL_CHECK(syclDeviceCanAccessPeer(&can_access_peer, id, id_other)); // if (can_access_peer) { // if (enable_peer_access) { @@ -12716,7 +12725,6 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; const int64_t ne03 = src0->ne[3]; - const int64_t nrows0 = ggml_nrows(src0); const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; @@ -13812,13 +13820,6 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, src1_row_extra.data_device[g_main_device_index] = src1_contiguous.get(); dst_row_extra.data_device[g_main_device_index] = dst_contiguous.get(); - const dpct::memcpy_direction src1_kind = - src1->backend == GGML_BACKEND_CPU ? dpct::host_to_device - : dpct::device_to_device; - const dpct::memcpy_direction dst_kind = dst->backend == GGML_BACKEND_CPU - ? dpct::device_to_host - : dpct::device_to_device; - for (int32_t row_id = 0; row_id < n_as; ++row_id) { const struct ggml_tensor * src0_row = dst->src[row_id + 2]; From 2d40085c26794e29c434480b9e06738e89e5686f Mon Sep 17 00:00:00 2001 From: Mirror Azure <54669636+MirrorAzure@users.noreply.github.com> Date: Fri, 2 Feb 2024 14:39:09 +0300 Subject: [PATCH 648/859] py : add check for '.attn.masked_bias' layers to GPT2model (#5281) --- convert-hf-to-gguf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 4ebab07b3..a6ffd128b 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1138,7 +1138,7 @@ class GPT2Model(Model): for name, data_torch in self.get_tensors(): # we don't need these - if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq", ".attn.bias")): + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq", ".attn.bias", ".attn.masked_bias")): continue if name.endswith((".c_attn.weight", ".c_proj.weight", ".c_fc.weight", ".c_proj.weight")): From e437b37fd0b2b97e6c6ff1045ec7f901faa6498a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 2 Feb 2024 14:23:40 +0200 Subject: [PATCH 649/859] scripts : parse wtype in server-llm.sh (#5167) * scripts : parse wtype in server-llm.sh * scripts : fix check for wfile --- scripts/server-llm.sh | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index 7bf0929bb..0b83cdbbc 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -141,6 +141,28 @@ for wt in "${wtypes[@]}"; do wfiles+=("") done +# map wtype input to index +if [[ ! -z "$wtype" ]]; then + iw=-1 + is=0 + for wt in "${wtypes[@]}"; do + # uppercase + uwt=$(echo "$wt" | tr '[:lower:]' '[:upper:]') + if [[ "$uwt" == "$wtype" ]]; then + iw=$is + break + fi + is=$((is+1)) + done + + if [[ $iw -eq -1 ]]; then + printf "[-] Invalid weight type: %s\n" "$wtype" + exit 1 + fi + + wtype="$iw" +fi + # sample repos repos=( "https://huggingface.co/TheBloke/Llama-2-7B-GGUF" @@ -252,8 +274,10 @@ for file in $model_files; do printf " %2d) %s %s\n" $iw "$have" "$file" done +wfile="${wfiles[$wtype]}" + # ask for weights type until provided and available -while [[ -z "$wtype" ]]; do +while [[ -z "$wfile" ]]; do printf "\n" read -p "[+] Select weight type: " wtype wfile="${wfiles[$wtype]}" From 191221178f51b6e81122c5bda0fd79620e547d07 Mon Sep 17 00:00:00 2001 From: kalomaze <66376113+kalomaze@users.noreply.github.com> Date: Fri, 2 Feb 2024 08:15:30 -0600 Subject: [PATCH 650/859] perplexity : fix KL divergence calculations on Windows (#5273) --- examples/perplexity/perplexity.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 8d2204969..4b08145cd 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -457,14 +457,14 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par std::ofstream logits_stream; if (!params.logits_file.empty()) { - logits_stream.open(params.logits_file.c_str()); + logits_stream.open(params.logits_file.c_str(), std::ios::binary); if (!logits_stream.is_open()) { fprintf(stderr, "%s: failed to open %s for writing\n", __func__, params.logits_file.c_str()); return {}; } fprintf(stderr, "%s: saving all logits to %s\n", __func__, params.logits_file.c_str()); logits_stream.write("_logits_", 8); - logits_stream.write((const char *)&n_ctx, sizeof(n_ctx)); + logits_stream.write(reinterpret_cast(&n_ctx), sizeof(n_ctx)); } auto tim1 = std::chrono::high_resolution_clock::now(); From a305dba8ff642e57f538f42010868fe0bc5262a1 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Sat, 3 Feb 2024 08:11:37 +0000 Subject: [PATCH 651/859] Fix im2col with 32fp (#5286) --- ggml-sycl.cpp | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index ac75f8e16..51445b5e7 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -8247,7 +8247,8 @@ static void clamp_f32(const float * x, float * dst, const float min, const float dst[i] = x[i] < min ? min : (x[i] > max ? max : x[i]); } -static void im2col_f32_f16(const float *x, sycl::half *dst, int offset_delta, +template +static void im2col_kernel(const float *x, T *dst, int offset_delta, int IW, int IH, int OW, int KW, int KH, int pelements, int CHW, int s0, int s1, int p0, int p1, int d0, int d1, @@ -11019,7 +11020,8 @@ static void soft_max_f32_sycl(const float *x, const float *y, float *dst, }); } -static void im2col_f32_f16_sycl(const float *x, sycl::half *dst, int IW, int IH, +template +static void im2col_sycl(const float *x, T *dst, int IW, int IH, int OW, int OH, int KW, int KH, int IC, int offset_delta, int s0, int s1, int p0, int p1, int d0, int d1, @@ -11036,7 +11038,7 @@ static void im2col_f32_f16_sycl(const float *x, sycl::half *dst, int IW, int IH, sycl::range<3>(1, 1, SYCL_IM2COL_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_IM2COL_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - im2col_f32_f16(x, dst, offset_delta, IW, IH, OW, KW, KH, + im2col_kernel(x, dst, offset_delta, IW, IH, OW, KW, KH, parallel_elements, (IC * KH * KW), s0, s1, p0, p1, d0, d1, item_ct1); }); @@ -12424,7 +12426,7 @@ inline void ggml_sycl_op_im2col(const ggml_tensor *src0, GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - GGML_ASSERT( dst->type == GGML_TYPE_F16); + GGML_ASSERT( dst->type == GGML_TYPE_F16 || dst->type == GGML_TYPE_F32); const int32_t s0 = ((const int32_t*)(dst->op_params))[0]; const int32_t s1 = ((const int32_t*)(dst->op_params))[1]; @@ -12447,8 +12449,11 @@ inline void ggml_sycl_op_im2col(const ggml_tensor *src0, const size_t delta_offset = src1->nb[is_2D ? 2 : 1] / 4; // nb is byte offset, src is type float32 - im2col_f32_f16_sycl(src1_dd, (sycl::half *)dst_dd, IW, IH, OW, OH, KW, KH, - IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + if (dst->type == GGML_TYPE_F16) { + im2col_sycl(src1_dd, (sycl::half *)dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } else { + im2col_sycl(src1_dd, (float *)dst_dd, IW, IH, OW, OH, KW, KH, IC, delta_offset, s0, s1, p0, p1, d0, d1, main_stream); + } (void) src0; (void) src0_dd; From 6a66c5071a74a96c4f52cf1015a092acd18c3714 Mon Sep 17 00:00:00 2001 From: BADR Date: Sat, 3 Feb 2024 12:20:26 +0100 Subject: [PATCH 652/859] readme : add tenere in the ui tools list (#5284) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index af1f09fa0..4a9bdf314 100644 --- a/README.md +++ b/README.md @@ -143,6 +143,7 @@ as the main playground for developing new features for the [ggml](https://github - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) - [iohub/collama](https://github.com/iohub/coLLaMA) +- [pythops/tenere](https://github.com/pythops/tenere) --- From 1ec3332ade60aeb1494ace2211cf1a966db6d770 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sat, 3 Feb 2024 06:22:06 -0500 Subject: [PATCH 653/859] YaRN : store rope scaling type as int32_t in memory (#5285) * YaRN : store rope scaling type as int32_t in memory * llama : store mapped names as const char * --- common/common.h | 3 +-- llama.cpp | 24 ++++++++++++------------ llama.h | 2 +- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/common/common.h b/common/common.h index 24a99d728..62de25d6a 100644 --- a/common/common.h +++ b/common/common.h @@ -75,8 +75,7 @@ struct gpt_params { float yarn_beta_fast = 32.0f; // YaRN low correction dim float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length - int8_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // TODO: better to be int32_t for alignment - // pinging @cebtenzzre + int32_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; // // sampling parameters struct llama_sampling_params sparams; diff --git a/llama.cpp b/llama.cpp index 6bf7f9efb..4787a92fe 100644 --- a/llama.cpp +++ b/llama.cpp @@ -208,7 +208,7 @@ enum llm_arch { LLM_ARCH_UNKNOWN, }; -static std::map LLM_ARCH_NAMES = { +static std::map LLM_ARCH_NAMES = { { LLM_ARCH_LLAMA, "llama" }, { LLM_ARCH_FALCON, "falcon" }, { LLM_ARCH_GPT2, "gpt2" }, @@ -285,7 +285,7 @@ enum llm_kv { LLM_KV_TOKENIZER_RWKV, }; -static std::map LLM_KV_NAMES = { +static std::map LLM_KV_NAMES = { { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" }, { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" }, { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" }, @@ -346,7 +346,7 @@ struct LLM_KV { llm_arch arch; std::string operator()(llm_kv kv) const { - return ::format(LLM_KV_NAMES[kv].c_str(), LLM_ARCH_NAMES[arch].c_str()); + return ::format(LLM_KV_NAMES[kv], LLM_ARCH_NAMES[arch]); } }; @@ -747,13 +747,13 @@ struct LLM_TN { // gguf helpers // -static std::map LLAMA_ROPE_SCALING_TYPES = { +static std::map LLAMA_ROPE_SCALING_TYPES = { { LLAMA_ROPE_SCALING_NONE, "none" }, { LLAMA_ROPE_SCALING_LINEAR, "linear" }, { LLAMA_ROPE_SCALING_YARN, "yarn" }, }; -static int8_t llama_rope_scaling_type_from_string(const std::string & name) { +static int32_t llama_rope_scaling_type_from_string(const std::string & name) { for (const auto & kv : LLAMA_ROPE_SCALING_TYPES) { if (kv.second == name) { return kv.first; @@ -1415,6 +1415,7 @@ static const size_t GiB = 1024*MiB; struct llama_hparams { bool vocab_only; + bool rope_finetuned; uint32_t n_vocab; uint32_t n_ctx_train; // context size the model was trained on uint32_t n_embd; @@ -1434,8 +1435,7 @@ struct llama_hparams { float rope_freq_base_train; float rope_freq_scale_train; uint32_t n_yarn_orig_ctx; - int8_t rope_scaling_type_train : 3; - bool rope_finetuned : 1; + int32_t rope_scaling_type_train; float f_clamp_kqv; float f_max_alibi_bias; @@ -2701,7 +2701,7 @@ struct llama_model_loader { // load LLaMA models // -static std::string llama_model_arch_name(llm_arch arch) { +static const char * llama_model_arch_name(llm_arch arch) { auto it = LLM_ARCH_NAMES.find(arch); if (it == LLM_ARCH_NAMES.end()) { return "unknown"; @@ -3310,11 +3310,11 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { const auto & hparams = model.hparams; const auto & vocab = model.vocab; - const auto rope_scaling_type = LLAMA_ROPE_SCALING_TYPES.at(hparams.rope_scaling_type_train); + const char * rope_scaling_type = LLAMA_ROPE_SCALING_TYPES.at(hparams.rope_scaling_type_train); // hparams LLAMA_LOG_INFO("%s: format = %s\n", __func__, llama_file_version_name(ml.fver)); - LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch).c_str()); + LLAMA_LOG_INFO("%s: arch = %s\n", __func__, LLM_ARCH_NAMES.at(model.arch)); LLAMA_LOG_INFO("%s: vocab type = %s\n", __func__, llama_model_vocab_type_name(vocab.type)); LLAMA_LOG_INFO("%s: n_vocab = %u\n", __func__, hparams.n_vocab); LLAMA_LOG_INFO("%s: n_merges = %u\n", __func__, (int) vocab.bpe_ranks.size()); @@ -3336,7 +3336,7 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: n_ff = %u\n", __func__, hparams.n_ff); LLAMA_LOG_INFO("%s: n_expert = %u\n", __func__, hparams.n_expert); LLAMA_LOG_INFO("%s: n_expert_used = %u\n", __func__, hparams.n_expert_used); - LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type.c_str()); + LLAMA_LOG_INFO("%s: rope scaling = %s\n", __func__, rope_scaling_type); LLAMA_LOG_INFO("%s: freq_base_train = %.1f\n", __func__, hparams.rope_freq_base_train); LLAMA_LOG_INFO("%s: freq_scale_train = %g\n", __func__, hparams.rope_freq_scale_train); LLAMA_LOG_INFO("%s: n_yarn_orig_ctx = %u\n", __func__, hparams.n_yarn_orig_ctx); @@ -10735,7 +10735,7 @@ int32_t llama_model_meta_val_str_by_index(const struct llama_model * model, int3 int32_t llama_model_desc(const struct llama_model * model, char * buf, size_t buf_size) { return snprintf(buf, buf_size, "%s %s %s", - llama_model_arch_name(model->arch).c_str(), + llama_model_arch_name(model->arch), llama_model_type_name(model->type), llama_model_ftype_name(model->ftype).c_str()); } diff --git a/llama.h b/llama.h index 9a60e9bfb..cec4158bc 100644 --- a/llama.h +++ b/llama.h @@ -213,7 +213,7 @@ extern "C" { uint32_t n_batch; // prompt processing maximum batch size uint32_t n_threads; // number of threads to use for generation uint32_t n_threads_batch; // number of threads to use for batch processing - int8_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` + int32_t rope_scaling_type; // RoPE scaling type, from `enum llama_rope_scaling_type` // ref: https://github.com/ggerganov/llama.cpp/pull/2054 float rope_freq_base; // RoPE base frequency, 0 = from model From 52bb63c7082c859c3f1dfc527227e6a95b299c7c Mon Sep 17 00:00:00 2001 From: Michael Klimenko Date: Sat, 3 Feb 2024 12:23:37 +0100 Subject: [PATCH 654/859] refactor : switch to emplace_back to avoid extra object (#5291) --- common/common.cpp | 8 ++--- examples/llama-bench/llama-bench.cpp | 34 +++++++++++----------- examples/main/main.cpp | 4 +-- examples/perplexity/perplexity.cpp | 8 ++--- examples/quantize-stats/quantize-stats.cpp | 4 +-- examples/quantize/quantize.cpp | 4 +-- examples/server/server.cpp | 8 ++--- tests/test-llama-grammar.cpp | 2 +- 8 files changed, 36 insertions(+), 36 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index ce739b15c..3302caa20 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -515,7 +515,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(argv[i], 1.0f)); + params.lora_adapter.emplace_back(argv[i], 1.0f); params.use_mmap = false; } else if (arg == "--lora-scaled") { if (++i >= argc) { @@ -527,7 +527,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(lora_adapter, std::stof(argv[i]))); + params.lora_adapter.emplace_back(lora_adapter, std::stof(argv[i])); params.use_mmap = false; } else if (arg == "--lora-base") { if (++i >= argc) { @@ -664,7 +664,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - params.antiprompt.push_back(argv[i]); + params.antiprompt.emplace_back(argv[i]); } else if (arg == "-ld" || arg == "--logdir") { if (++i >= argc) { invalid_param = true; @@ -880,7 +880,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } if (!params.kv_overrides.empty()) { - params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.emplace_back(); params.kv_overrides.back().key[0] = 0; } diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index e36c061a2..ddb0ba064 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -948,46 +948,46 @@ struct markdown_printer : public printer { void print_header(const cmd_params & params) override { // select fields to print - fields.push_back("model"); - fields.push_back("size"); - fields.push_back("params"); - fields.push_back("backend"); + fields.emplace_back("model"); + fields.emplace_back("size"); + fields.emplace_back("params"); + fields.emplace_back("backend"); bool is_cpu_backend = test::get_backend() == "CPU" || test::get_backend() == "BLAS"; if (!is_cpu_backend) { - fields.push_back("n_gpu_layers"); + fields.emplace_back("n_gpu_layers"); } if (params.n_threads.size() > 1 || params.n_threads != cmd_params_defaults.n_threads || is_cpu_backend) { - fields.push_back("n_threads"); + fields.emplace_back("n_threads"); } if (params.n_batch.size() > 1 || params.n_batch != cmd_params_defaults.n_batch) { - fields.push_back("n_batch"); + fields.emplace_back("n_batch"); } if (params.type_k.size() > 1 || params.type_k != cmd_params_defaults.type_k) { - fields.push_back("type_k"); + fields.emplace_back("type_k"); } if (params.type_v.size() > 1 || params.type_v != cmd_params_defaults.type_v) { - fields.push_back("type_v"); + fields.emplace_back("type_v"); } if (params.main_gpu.size() > 1 || params.main_gpu != cmd_params_defaults.main_gpu) { - fields.push_back("main_gpu"); + fields.emplace_back("main_gpu"); } if (params.split_mode.size() > 1 || params.split_mode != cmd_params_defaults.split_mode) { - fields.push_back("split_mode"); + fields.emplace_back("split_mode"); } if (params.mul_mat_q.size() > 1 || params.mul_mat_q != cmd_params_defaults.mul_mat_q) { - fields.push_back("mul_mat_q"); + fields.emplace_back("mul_mat_q"); } if (params.no_kv_offload.size() > 1 || params.no_kv_offload != cmd_params_defaults.no_kv_offload) { - fields.push_back("no_kv_offload"); + fields.emplace_back("no_kv_offload"); } if (params.tensor_split.size() > 1 || params.tensor_split != cmd_params_defaults.tensor_split) { - fields.push_back("tensor_split"); + fields.emplace_back("tensor_split"); } if (params.use_mmap.size() > 1 || params.use_mmap != cmd_params_defaults.use_mmap) { - fields.push_back("use_mmap"); + fields.emplace_back("use_mmap"); } - fields.push_back("test"); - fields.push_back("t/s"); + fields.emplace_back("test"); + fields.emplace_back("t/s"); fprintf(fout, "|"); for (const auto & field : fields) { diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 1c6138d23..0ed4d79f9 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -352,12 +352,12 @@ int main(int argc, char ** argv) { // in instruct mode, we inject a prefix and a suffix to each input by the user if (params.instruct) { params.interactive_first = true; - params.antiprompt.push_back("### Instruction:\n\n"); + params.antiprompt.emplace_back("### Instruction:\n\n"); } // similar for chatml mode else if (params.chatml) { params.interactive_first = true; - params.antiprompt.push_back("<|im_start|>user\n"); + params.antiprompt.emplace_back("<|im_start|>user\n"); } // enable interactive mode if interactive start is specified diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 4b08145cd..b2c131d4c 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -881,7 +881,7 @@ static void hellaswag_score(llama_context * ctx, const gpt_params & params) { size_t li = hs_cur.common_prefix; for (int s = 0; s < 4; ++s) { for (size_t j = hs_cur.common_prefix; j < hs_cur.seq_tokens[s].size() - 1; j++) { - eval_pairs.push_back(std::make_pair(hs_cur.i_batch + li++, hs_cur.seq_tokens[s][j + 1])); + eval_pairs.emplace_back(hs_cur.i_batch + li++, hs_cur.seq_tokens[s][j + 1]); } ++li; } @@ -1159,13 +1159,13 @@ static void winogrande_score(llama_context * ctx, const gpt_params & params) { const int last_1st = task.seq_tokens[0].size() - n_base1 > 1 ? 1 : 0; size_t li = n_base1 - 1; for (size_t j = n_base1-1; j < task.seq_tokens[0].size()-1-last_1st; ++j) { - eval_pairs.push_back(std::make_pair(task.i_batch + li++, task.seq_tokens[0][j+1])); + eval_pairs.emplace_back(task.i_batch + li++, task.seq_tokens[0][j+1]); } const auto& n_base2 = skip_choice ? task.n_base2 : task.common_prefix; const int last_2nd = task.seq_tokens[1].size() - n_base2 > 1 ? 1 : 0; li = task.seq_tokens[0].size() - task.common_prefix + n_base2 - 1; for (size_t j = n_base2-1; j < task.seq_tokens[1].size()-1-last_2nd; ++j) { - eval_pairs.push_back(std::make_pair(task.i_batch + li++, task.seq_tokens[1][j+1])); + eval_pairs.emplace_back(task.i_batch + li++, task.seq_tokens[1][j+1]); } } compute_logprobs(batch_logits.data(), n_vocab, workers, eval_pairs, eval_results); @@ -1524,7 +1524,7 @@ static void multiple_choice_score(llama_context * ctx, const gpt_params & params size_t li = cur_task.common_prefix; for (int s = 0; s < int(cur_task.seq_tokens.size()); ++s) { for (size_t j = cur_task.common_prefix; j < cur_task.seq_tokens[s].size() - 1; j++) { - eval_pairs.push_back(std::make_pair(cur_task.i_batch + li++, cur_task.seq_tokens[s][j + 1])); + eval_pairs.emplace_back(cur_task.i_batch + li++, cur_task.seq_tokens[s][j + 1]); } ++li; } diff --git a/examples/quantize-stats/quantize-stats.cpp b/examples/quantize-stats/quantize-stats.cpp index 6d5f213dc..1d05f1391 100644 --- a/examples/quantize-stats/quantize-stats.cpp +++ b/examples/quantize-stats/quantize-stats.cpp @@ -257,13 +257,13 @@ int main(int argc, char ** argv) { invalid_param = true; break; } - params.include_layers.push_back(argv[i]); + params.include_layers.emplace_back(argv[i]); } else if (arg == "-L" || arg == "--exclude-layer") { if (++i >= argc) { invalid_param = true; break; } - params.exclude_layers.push_back(argv[i]); + params.exclude_layers.emplace_back(argv[i]); } else if (arg == "-t" || arg == "--type") { if (++i >= argc) { invalid_param = true; diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index a9673f0d4..85f403ffc 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -208,13 +208,13 @@ int main(int argc, char ** argv) { } } else if (strcmp(argv[arg_idx], "--include-weights") == 0) { if (arg_idx < argc-1) { - included_weights.push_back(argv[++arg_idx]); + included_weights.emplace_back(argv[++arg_idx]); } else { usage(argv[0]); } } else if (strcmp(argv[arg_idx], "--exclude-weights") == 0) { if (arg_idx < argc-1) { - excluded_weights.push_back(argv[++arg_idx]); + excluded_weights.emplace_back(argv[++arg_idx]); } else { usage(argv[0]); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index ea77125ea..a9f8cb369 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1884,7 +1884,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - sparams.api_keys.push_back(argv[i]); + sparams.api_keys.emplace_back(argv[i]); } else if (arg == "--api-key-file") { @@ -2160,7 +2160,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(argv[i], 1.0f)); + params.lora_adapter.emplace_back(argv[i], 1.0f); params.use_mmap = false; } else if (arg == "--lora-scaled") @@ -2176,7 +2176,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - params.lora_adapter.push_back(std::make_tuple(lora_adapter, std::stof(argv[i]))); + params.lora_adapter.emplace_back(lora_adapter, std::stof(argv[i])); params.use_mmap = false; } else if (arg == "--lora-base") @@ -2318,7 +2318,7 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } } if (!params.kv_overrides.empty()) { - params.kv_overrides.emplace_back(llama_model_kv_override()); + params.kv_overrides.emplace_back(); params.kv_overrides.back().key[0] = 0; } diff --git a/tests/test-llama-grammar.cpp b/tests/test-llama-grammar.cpp index 78fc41117..16ebe753f 100644 --- a/tests/test-llama-grammar.cpp +++ b/tests/test-llama-grammar.cpp @@ -105,7 +105,7 @@ int main() for (auto rule : expected_rules) { - parsed_grammar.rules.push_back({}); + parsed_grammar.rules.emplace_back(); for (auto element : rule) { parsed_grammar.rules.back().push_back(element); From e920ed393d989ed35625ddaf182ebb52cda07fcd Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Sat, 3 Feb 2024 18:15:00 +0100 Subject: [PATCH 655/859] Vulkan Intel Fixes, Optimizations and Debugging Flags (#5301) * Fix Vulkan on Intel ARC Optimize matmul for Intel ARC Add Vulkan dequant test * Add Vulkan debug and validate flags to Make and CMakeLists.txt * Enable asynchronous transfers in Vulkan backend * Fix flake8 * Disable Vulkan async backend functions for now * Also add Vulkan run tests command to Makefile and CMakeLists.txt --- CMakeLists.txt | 20 + Makefile | 12 + ggml-vulkan-shaders.hpp | 10922 +++------------------------------- ggml-vulkan.cpp | 420 +- ggml_vk_generate_shaders.py | 213 +- 5 files changed, 1257 insertions(+), 10330 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1ee455b3a..c156c4824 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -100,6 +100,10 @@ option(LLAMA_HIPBLAS "llama: use hipBLAS" option(LLAMA_HIP_UMA "llama: use HIP unified memory architecture" OFF) option(LLAMA_CLBLAST "llama: use CLBlast" OFF) option(LLAMA_VULKAN "llama: use Vulkan" OFF) +option(LLAMA_VULKAN_CHECK_RESULTS "llama: run Vulkan op checks" OFF) +option(LLAMA_VULKAN_DEBUG "llama: enable Vulkan debug output" OFF) +option(LLAMA_VULKAN_VALIDATE "llama: enable Vulkan validation" OFF) +option(LLAMA_VULKAN_RUN_TESTS "llama: run Vulkan tests" OFF) option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) option(LLAMA_METAL_SHADER_DEBUG "llama: compile Metal with -fno-fast-math" OFF) @@ -431,6 +435,22 @@ if (LLAMA_VULKAN) add_compile_definitions(GGML_USE_VULKAN) + if (LLAMA_VULKAN_CHECK_RESULTS) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_CHECK_RESULTS) + endif() + + if (LLAMA_VULKAN_DEBUG) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_DEBUG) + endif() + + if (LLAMA_VULKAN_VALIDATE) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_VALIDATE) + endif() + + if (LLAMA_VULKAN_RUN_TESTS) + target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_RUN_TESTS) + endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ggml-vulkan) else() message(WARNING "Vulkan not found") diff --git a/Makefile b/Makefile index bf9e085de..a55d15888 100644 --- a/Makefile +++ b/Makefile @@ -457,6 +457,18 @@ ifdef LLAMA_VULKAN_CHECK_RESULTS MK_CPPFLAGS += -DGGML_VULKAN_CHECK_RESULTS endif +ifdef LLAMA_VULKAN_DEBUG + MK_CPPFLAGS += -DGGML_VULKAN_DEBUG +endif + +ifdef LLAMA_VULKAN_VALIDATE + MK_CPPFLAGS += -DGGML_VULKAN_VALIDATE +endif + +ifdef LLAMA_VULKAN_RUN_TESTS + MK_CPPFLAGS += -DGGML_VULKAN_RUN_TESTS +endif + ggml-vulkan.o: ggml-vulkan.cpp ggml-vulkan.h $(CXX) $(CXXFLAGS) -c $< -o $@ endif # LLAMA_VULKAN diff --git a/ggml-vulkan-shaders.hpp b/ggml-vulkan-shaders.hpp index 195410c02..e5e7a8414 100644 --- a/ggml-vulkan-shaders.hpp +++ b/ggml-vulkan-shaders.hpp @@ -890,156 +890,6 @@ const uint64_t cpy_f32_f32_len = 2472; unsigned char dequant_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x81,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x09,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x10,0x00,0x06,0x00, -0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x5b,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x23,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x4c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x5a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x80,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x23,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x29,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2a,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x23,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x33,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x31,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x32,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x48,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x52,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x7f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x7f,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_f16_len = 1748; - -unsigned char dequant_f16_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, @@ -1192,344 +1042,10 @@ unsigned char dequant_f16_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t dequant_f16_fp32_len = 1816; +const uint64_t dequant_f16_len = 1816; unsigned char dequant_q2_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x0c,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x5f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x61,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xfc,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x5d,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x5f,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x70,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x06,0x00, -0x15,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x2a,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x29,0x00,0x03,0x00, -0x11,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xfd,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfe,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x10,0x00,0x00,0x00, -0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x70,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x70,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x9a,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x70,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x69,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x7f,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0xb8,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x70,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x69,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xc6,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x5d,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x7f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0xd7,0x00,0x00,0x00,0x0c,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x0a,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x70,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd9,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x69,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x7f,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0xf7,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x70,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x29,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0xff,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x03,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0xfd,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x03,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0xfd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfd,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q2_K_len = 3956; - -unsigned char dequant_q2_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x13,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -1869,414 +1385,10 @@ unsigned char dequant_q2_K_fp32_data[] = { 0x04,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q2_K_fp32_len = 4056; +const uint64_t dequant_q2_K_len = 4056; unsigned char dequant_q3_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x3f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x71,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x77,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x04,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x05,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x05,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x07,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2c,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x71,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x73,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x75,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x77,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xe1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x04,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x05,0x01,0x00,0x00, -0x04,0x01,0x00,0x00,0x20,0x00,0x04,0x00,0x06,0x01,0x00,0x00, -0x0c,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x3b,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x72,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x18,0x00,0x00,0x00,0x2e,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2e,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x2e,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x35,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3c,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x3c,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3c,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x92,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x94,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xab,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0xae,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb0,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3b,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0xbe,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc5,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb1,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0xc4,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x97,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x97,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0xaa,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0xb1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x6f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x6f,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x91,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x97,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x41,0x00,0x07,0x00,0xe1,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x76,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xe8,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xfb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfb,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x49,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x28,0x01,0x00,0x00, -0xfc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x49,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x03,0x01,0x00,0x00, -0x39,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0xf6,0x00,0x04,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x03,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xfc,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0xf5,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0xf8,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x0f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0x13,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x8e,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x17,0x01,0x00,0x00,0x16,0x01,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x5c,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x18,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x76,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0xeb,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0xe1,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x39,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xfb,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xfd,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x33,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x3e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x2d,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2d,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q3_K_len = 4792; - -unsigned char dequant_q3_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x42,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -2680,709 +1792,10 @@ unsigned char dequant_q3_K_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x30,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q3_K_fp32_len = 4828; +const uint64_t dequant_q3_K_len = 4828; unsigned char dequant_q4_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xf7,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x74,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x75,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x97,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x00,0x48,0x00,0x00,0x1d,0x00,0x03,0x00,0x74,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x75,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x76,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe1,0x02,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe3,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe5,0x02,0x00,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe6,0x02,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe9,0x02,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x02,0x00,0x00, -0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x02,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf3,0x02,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x98,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x99,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xbd,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0xd1,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xe3,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xf8,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xf9,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0xff,0x00,0x00,0x00,0xf5,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x09,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x09,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x10,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe0,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x11,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x12,0x01,0x00,0x00, -0x11,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1f,0x01,0x00,0x00, -0x1e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x20,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x22,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x23,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xe1,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x37,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x38,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x41,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xe2,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x44,0x01,0x00,0x00,0x43,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x45,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x48,0x01,0x00,0x00, -0x43,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x4a,0x01,0x00,0x00, -0x47,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x4b,0x01,0x00,0x00,0x4a,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe2,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0xe3,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x4c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x69,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x6e,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0x6f,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x71,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x71,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x78,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x7b,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7b,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe5,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x83,0x01,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x84,0x01,0x00,0x00, -0x83,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0xe6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x8f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x93,0x01,0x00,0x00, -0x92,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x94,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9e,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa0,0x01,0x00,0x00, -0x98,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xe7,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa8,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0xb4,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb7,0x01,0x00,0x00,0xb6,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x01,0x00,0x00, -0xb7,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xba,0x01,0x00,0x00, -0xb5,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbb,0x01,0x00,0x00,0xba,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xbc,0x01,0x00,0x00, -0xb9,0x01,0x00,0x00,0xbb,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xbd,0x01,0x00,0x00,0xbc,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xbe,0x01,0x00,0x00,0xbd,0x01,0x00,0x00,0xb3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc4,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xe8,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xc7,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc7,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x7f,0x00,0x00,0x00,0xe9,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xda,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xea,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xdb,0x01,0x00,0x00,0xda,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdc,0x01,0x00,0x00, -0xdb,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0xde,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xdf,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0xe3,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xea,0x01,0x00,0x00,0x7f,0x00,0x00,0x00, -0xea,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xe4,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xed,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xea,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xed,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, -0x7f,0x00,0x00,0x00,0xeb,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xf5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xf6,0x01,0x00,0x00, -0xf5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x01,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x02,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x03,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x03,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x06,0x02,0x00,0x00,0x01,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x05,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x09,0x02,0x00,0x00, -0x08,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x09,0x02,0x00,0x00, -0xff,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xec,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x12,0x02,0x00,0x00, -0x0a,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x13,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x10,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x13,0x02,0x00,0x00,0x12,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0xed,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x1c,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x1c,0x02,0x00,0x00,0x1b,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x25,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x26,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x27,0x02,0x00,0x00, -0x26,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x28,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2a,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2b,0x02,0x00,0x00,0x2d,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x2f,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x25,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x30,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x39,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x39,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xef,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x41,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x42,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x42,0x02,0x00,0x00,0x41,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4b,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x4c,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xf0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4f,0x02,0x00,0x00,0x4e,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0xf0,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x5f,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5f,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xf1,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x67,0x02,0x00,0x00,0x56,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x66,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x68,0x02,0x00,0x00, -0x67,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x84,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x85,0x02,0x00,0x00,0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0xf3,0x02,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x7c,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8c,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x99,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x9e,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x9e,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xa0,0x02,0x00,0x00, -0x9d,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa8,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xf4,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xaa,0x02,0x00,0x00,0xa2,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xab,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xab,0x02,0x00,0x00, -0xaa,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0x7f,0x00,0x00,0x00,0xf5,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0xa2,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0x77,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0xbf,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc2,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x60,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x60,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x60,0x00,0x00,0x00,0xc8,0x02,0x00,0x00, -0xc7,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x7f,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xd1,0x02,0x00,0x00,0xd0,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0x7f,0x00,0x00,0x00,0xf6,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xd9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd8,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xda,0x02,0x00,0x00, -0xd9,0x02,0x00,0x00,0xf9,0x00,0x02,0x00,0x98,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x98,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_0_len = 8332; - -unsigned char dequant_q4_0_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x19,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -4122,758 +2535,10 @@ unsigned char dequant_q4_0_fp32_data[] = { 0x9b,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q4_0_fp32_len = 8856; +const uint64_t dequant_q4_0_len = 8856; unsigned char dequant_q4_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x27,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x1e,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x03,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x17,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x1a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x22,0x03,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x24,0x03,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x9c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9d,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x9c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x0e,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xcc,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xeb,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x0f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x03,0x01,0x00,0x00,0x02,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x04,0x01,0x00,0x00, -0x03,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x04,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x02,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x07,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x09,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x0a,0x01,0x00,0x00, -0x09,0x01,0x00,0x00,0xfe,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x00,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x0a,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x12,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x14,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x15,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x10,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x1d,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1e,0x01,0x00,0x00,0x1d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x29,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2e,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x2e,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x2b,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x31,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0x27,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x34,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x3e,0x01,0x00,0x00,0x3d,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x11,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x46,0x01,0x00,0x00,0x35,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x47,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x47,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0x53,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x12,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x55,0x01,0x00,0x00, -0x54,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x56,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x50,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x52,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x5e,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x6f,0x01,0x00,0x00,0x5e,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x70,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x6e,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7b,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x14,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x7c,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7f,0x01,0x00,0x00, -0x7e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x83,0x01,0x00,0x00, -0x82,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x83,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x7b,0x01,0x00,0x00, -0x7b,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x86,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x14,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x90,0x01,0x00,0x00, -0x8f,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x15,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x97,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa2,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x16,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa8,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00, -0xa8,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xab,0x01,0x00,0x00, -0xa6,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xab,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0xac,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xae,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xaf,0x01,0x00,0x00,0xa4,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xb0,0x01,0x00,0x00, -0xae,0x01,0x00,0x00,0xaf,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x16,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb8,0x01,0x00,0x00,0xb0,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xb9,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb9,0x01,0x00,0x00,0xb8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x83,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xb0,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xc2,0x01,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xc0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xcb,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xcd,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x18,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xd0,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00,0xd1,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xd4,0x01,0x00,0x00,0xcf,0x01,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd3,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xcb,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0xcd,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xd9,0x01,0x00,0x00,0xd7,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdf,0x01,0x00,0x00,0x83,0x00,0x00,0x00,0x18,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xe1,0x01,0x00,0x00, -0xd9,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xdf,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe2,0x01,0x00,0x00,0xe1,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0x83,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xeb,0x01,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xeb,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf4,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf6,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0xf7,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xf8,0x01,0x00,0x00,0xf7,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf9,0x01,0x00,0x00,0xf8,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0xf9,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfb,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfc,0x01,0x00,0x00, -0xfb,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xfd,0x01,0x00,0x00,0xf8,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfe,0x01,0x00,0x00, -0xfd,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xff,0x01,0x00,0x00,0xfc,0x01,0x00,0x00,0xfe,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0xff,0x01,0x00,0x00,0xf4,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x01,0x02,0x00,0x00,0xf6,0x01,0x00,0x00, -0xf6,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x02,0x02,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x0a,0x02,0x00,0x00,0x02,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x0b,0x02,0x00,0x00, -0x0a,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x02,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x12,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1d,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x1c,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x02,0x00,0x00,0x22,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x25,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x21,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x28,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0x28,0x02,0x00,0x00, -0x1d,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x1f,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0x29,0x02,0x00,0x00,0x2a,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x1c,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x33,0x02,0x00,0x00,0x2b,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x34,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x34,0x02,0x00,0x00,0x33,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3b,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x1d,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3b,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x3d,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x1e,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x4a,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4f,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x53,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0x48,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x54,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x1e,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x5c,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x5d,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x5a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x5d,0x02,0x00,0x00,0x5c,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x1f,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x66,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x66,0x02,0x00,0x00,0x65,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6f,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x71,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x20,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x73,0x02,0x00,0x00,0x72,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x74,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x02,0x00,0x00, -0x74,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0x75,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x77,0x02,0x00,0x00, -0x76,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x73,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x79,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x7b,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x6f,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0x7c,0x02,0x00,0x00,0x71,0x02,0x00,0x00, -0x71,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0x7d,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x20,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x85,0x02,0x00,0x00,0x7d,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x86,0x02,0x00,0x00, -0x85,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x8e,0x02,0x00,0x00, -0x7d,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x8f,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x02,0x00,0x00,0x8e,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x98,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00, -0x9b,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x22,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x9c,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9d,0x02,0x00,0x00,0x9c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xa1,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xa2,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xa3,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xa4,0x02,0x00,0x00,0xa3,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xa5,0x02,0x00,0x00,0x9a,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0xa4,0x02,0x00,0x00,0xa5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x22,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xae,0x02,0x00,0x00,0xa6,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xaf,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xac,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaf,0x02,0x00,0x00,0xae,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x23,0x03,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xb7,0x02,0x00,0x00,0xa6,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xb8,0x02,0x00,0x00, -0xb7,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x61,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x24,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0xc5,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0xc6,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, -0x6a,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc5,0x02,0x00,0x00, -0x6e,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0xc9,0x02,0x00,0x00, -0xcb,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0xcc,0x02,0x00,0x00,0xc1,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xce,0x02,0x00,0x00, -0xc3,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xcf,0x02,0x00,0x00,0xcd,0x02,0x00,0x00, -0xce,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd5,0x02,0x00,0x00,0x83,0x00,0x00,0x00,0x24,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xd7,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd8,0x02,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd8,0x02,0x00,0x00,0xd7,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdf,0x02,0x00,0x00,0x83,0x00,0x00,0x00, -0x25,0x03,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xe0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xe1,0x02,0x00,0x00, -0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xe1,0x02,0x00,0x00,0xe0,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xea,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xec,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x61,0x00,0x00,0x00,0xed,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xee,0x02,0x00,0x00,0xed,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xef,0x02,0x00,0x00,0xee,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xef,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf1,0x02,0x00,0x00,0xf0,0x02,0x00,0x00,0x6a,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf2,0x02,0x00,0x00, -0xf1,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xf3,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf4,0x02,0x00,0x00, -0xf3,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf2,0x02,0x00,0x00,0xf4,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf5,0x02,0x00,0x00,0xea,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x64,0x00,0x00,0x00,0xf7,0x02,0x00,0x00,0xec,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x64,0x00,0x00,0x00, -0xf8,0x02,0x00,0x00,0xf6,0x02,0x00,0x00,0xf7,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00, -0x83,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xf8,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0x7b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfe,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x01,0x03,0x00,0x00, -0x00,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0x83,0x00,0x00,0x00,0x26,0x03,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0xf8,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x0a,0x03,0x00,0x00,0x7b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x08,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0a,0x03,0x00,0x00,0x09,0x03,0x00,0x00,0xf9,0x00,0x02,0x00, -0x9c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x9c,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_1_len = 8924; - -unsigned char dequant_q4_1_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x59,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -5683,496 +3348,10 @@ unsigned char dequant_q4_1_fp32_data[] = { 0xa0,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xa0,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q4_1_fp32_len = 9704; +const uint64_t dequant_q4_1_len = 9704; unsigned char dequant_q4_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xa6,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x05,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x06,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x08,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x08,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x31,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x42,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x4b,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4c,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4d,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x05,0x01,0x00,0x00,0x42,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x05,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x07,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x07,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x20,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x30,0x01,0x00,0x00,0x56,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x34,0x01,0x00,0x00, -0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x21,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x23,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x32,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x33,0x01,0x00,0x00, -0x2f,0x01,0x00,0x00,0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x3a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x51,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x51,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x6e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x6d,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x6e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x73,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xaa,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x6e,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x6e,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x87,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00, -0x3b,0x01,0x00,0x00,0x7c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0x3c,0x01,0x00,0x00, -0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0xb4,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xb9,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x6c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xbc,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xbe,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0xbf,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0xc0,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc7,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0xc8,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7a,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xcf,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xe0,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7a,0x00,0x00,0x00,0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xd3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xed,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xee,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xef,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb9,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb9,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0xce,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x46,0x00,0x00,0x00,0x3d,0x01,0x00,0x00,0xc3,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0xfb,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x13,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x9c,0x01,0x00,0x00,0xb5,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x19,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0xfc,0x00,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x2a,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x27,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x01,0x00,0x00,0x6a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x52,0x01,0x00,0x00, -0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x53,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x42,0x00,0x00,0x00,0x56,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x9c,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x4d,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x57,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x59,0x01,0x00,0x00,0x63,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x4f,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x5e,0x01,0x00,0x00, -0x5d,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x60,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x5e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00,0x61,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x69,0x01,0x00,0x00, -0x63,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00,0x6a,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x73,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0x4f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x6c,0x01,0x00,0x00, -0x6b,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x6e,0x01,0x00,0x00,0x90,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x70,0x01,0x00,0x00,0x9c,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x73,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x73,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x7a,0x01,0x00,0x00,0x79,0x01,0x00,0x00,0x0c,0x00,0x08,0x00, -0x42,0x00,0x00,0x00,0x7c,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x75,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x63,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x6a,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x73,0x00,0x00,0x00,0x87,0x01,0x00,0x00,0x4f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x87,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x89,0x01,0x00,0x00,0x88,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x01,0x00,0x00, -0x89,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x8a,0x01,0x00,0x00,0x90,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x8c,0x01,0x00,0x00, -0x8b,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x8e,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x51,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8f,0x01,0x00,0x00,0x8e,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x63,0x00,0x00,0x00,0xa5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x94,0x01,0x00,0x00,0x87,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x95,0x01,0x00,0x00, -0x94,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x98,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x51,0x00,0x00,0x00,0x99,0x01,0x00,0x00,0x08,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x0a,0x00,0x00,0x00,0x37,0x01,0x00,0x00, -0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x38,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x43,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x38,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x38,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x32,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x32,0x01,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q4_K_len = 5776; - -unsigned char dequant_q4_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xb1,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -6669,1134 +3848,10 @@ unsigned char dequant_q4_K_fp32_data[] = { 0x37,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q4_K_fp32_len = 5940; +const uint64_t dequant_q4_K_len = 5940; unsigned char dequant_q5_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x55,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xc0,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x00,0x4c,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xbf,0x00,0x00,0x00,0x00,0x01,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, -0xbf,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x62,0x04,0x00,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x63,0x04,0x00,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x64,0x04,0x00,0x00,0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x65,0x04,0x00,0x00,0x12,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x04,0x00,0x00, -0x13,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x67,0x04,0x00,0x00,0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x04,0x00,0x00,0x05,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x04,0x00,0x00, -0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6a,0x04,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x04,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x04,0x00,0x00, -0x07,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x04,0x00,0x00,0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x04,0x00,0x00,0x08,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6f,0x04,0x00,0x00, -0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x04,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x71,0x04,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x04,0x00,0x00, -0x0a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x73,0x04,0x00,0x00,0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x74,0x04,0x00,0x00,0x0b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x75,0x04,0x00,0x00, -0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x04,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x04,0x00,0x00,0x1d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x78,0x04,0x00,0x00, -0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x79,0x04,0x00,0x00,0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xc1,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00,0xc2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xc2,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x59,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xbc,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbc,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xd7,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xdb,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x62,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xea,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x63,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x07,0x01,0x00,0x00,0xf6,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x06,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x08,0x01,0x00,0x00, -0x07,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x11,0x01,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x13,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x17,0x01,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x15,0x01,0x00,0x00, -0x18,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00,0x37,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x1c,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x64,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x20,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x20,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x23,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x24,0x01,0x00,0x00, -0x23,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x25,0x01,0x00,0x00,0x24,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x01,0x00,0x00,0x27,0x01,0x00,0x00, -0x1d,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x29,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x2b,0x01,0x00,0x00,0x24,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2c,0x01,0x00,0x00,0x2b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00,0x2c,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0x2d,0x01,0x00,0x00,0x21,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x30,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x2a,0x01,0x00,0x00,0x30,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x32,0x01,0x00,0x00,0x31,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x33,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0x11,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x33,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x3c,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x3c,0x01,0x00,0x00, -0x3b,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x65,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x44,0x01,0x00,0x00, -0x33,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x45,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x45,0x01,0x00,0x00,0x44,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x50,0x01,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x51,0x01,0x00,0x00,0x50,0x01,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x54,0x01,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0xa3,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x5c,0x01,0x00,0x00, -0x56,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x61,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00, -0x62,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x01,0x00,0x00,0x63,0x01,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x66,0x01,0x00,0x00, -0x64,0x01,0x00,0x00,0x5a,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x67,0x01,0x00,0x00,0x66,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x68,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x69,0x01,0x00,0x00,0x68,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6a,0x01,0x00,0x00, -0x69,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x6a,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x6c,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x6e,0x01,0x00,0x00,0x67,0x01,0x00,0x00,0x6d,0x01,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x6f,0x01,0x00,0x00, -0x6e,0x01,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x70,0x01,0x00,0x00,0x6f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x70,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x76,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, -0x66,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x81,0x01,0x00,0x00,0x70,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x82,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x8d,0x01,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x8f,0x01,0x00,0x00, -0x8e,0x01,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x91,0x01,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x92,0x01,0x00,0x00, -0x91,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x93,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x95,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x96,0x01,0x00,0x00,0x95,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x97,0x01,0x00,0x00, -0x96,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x93,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x9a,0x01,0x00,0x00, -0x99,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x9a,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x9d,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x9e,0x01,0x00,0x00,0x9d,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x9f,0x01,0x00,0x00, -0x9e,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x9f,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x01,0x00,0x00,0xa0,0x01,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa3,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x97,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xa4,0x01,0x00,0x00, -0xa3,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xa5,0x01,0x00,0x00,0x9e,0x01,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xa6,0x01,0x00,0x00, -0xa5,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa7,0x01,0x00,0x00,0xa6,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x01,0x00,0x00,0xa7,0x01,0x00,0x00, -0x9b,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xab,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0xaa,0x01,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xac,0x01,0x00,0x00,0xab,0x01,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x01,0x00,0x00,0xa9,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb5,0x01,0x00,0x00,0xad,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xb6,0x01,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb3,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x67,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0xad,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xbf,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xbd,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xbf,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xca,0x01,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xcb,0x01,0x00,0x00, -0xca,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xcc,0x01,0x00,0x00,0xcb,0x01,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xcf,0x01,0x00,0x00,0xce,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xcc,0x01,0x00,0x00, -0xcf,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0xd0,0x01,0x00,0x00,0x68,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xd1,0x01,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd4,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd6,0x01,0x00,0x00,0xd0,0x01,0x00,0x00, -0x63,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd7,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x01,0x00,0x00, -0xd7,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x68,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xdc,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdd,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xde,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xde,0x01,0x00,0x00, -0xd4,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe0,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0xe2,0x01,0x00,0x00,0xdb,0x01,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0xe2,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe4,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe4,0x01,0x00,0x00,0xd8,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe7,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xe8,0x01,0x00,0x00, -0xe1,0x01,0x00,0x00,0xe7,0x01,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xe9,0x01,0x00,0x00,0xe8,0x01,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf0,0x01,0x00,0x00, -0xa9,0x00,0x00,0x00,0x68,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xf2,0x01,0x00,0x00,0xea,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xf3,0x01,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf0,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xf3,0x01,0x00,0x00, -0xf2,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfa,0x01,0x00,0x00,0xa9,0x00,0x00,0x00,0x69,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xfb,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xfa,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfc,0x01,0x00,0x00,0xfb,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x05,0x02,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x07,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x08,0x02,0x00,0x00,0x07,0x02,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00, -0x6a,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x0f,0x02,0x00,0x00,0x0e,0x02,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x10,0x02,0x00,0x00, -0x0f,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x11,0x02,0x00,0x00,0x10,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x13,0x02,0x00,0x00, -0x0d,0x02,0x00,0x00,0x65,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x13,0x02,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x17,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x6a,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x18,0x02,0x00,0x00,0x17,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x18,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x02,0x00,0x00,0x1a,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x02,0x00,0x00, -0x1b,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x02,0x00,0x00,0x1d,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x1f,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x1f,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x20,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x23,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x15,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x24,0x02,0x00,0x00, -0x23,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x25,0x02,0x00,0x00,0x1e,0x02,0x00,0x00,0x24,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x26,0x02,0x00,0x00, -0x25,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x27,0x02,0x00,0x00,0x26,0x02,0x00,0x00, -0x05,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x6a,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x2f,0x02,0x00,0x00, -0x27,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x30,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x30,0x02,0x00,0x00,0x2f,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x37,0x02,0x00,0x00,0xa9,0x00,0x00,0x00, -0x6b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x38,0x02,0x00,0x00,0x27,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x37,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x39,0x02,0x00,0x00,0x38,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x45,0x02,0x00,0x00,0x44,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x46,0x02,0x00,0x00, -0x45,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x48,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x49,0x02,0x00,0x00, -0x48,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4a,0x02,0x00,0x00,0x46,0x02,0x00,0x00,0x49,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4b,0x02,0x00,0x00, -0x4a,0x02,0x00,0x00,0x6c,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x4b,0x02,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00, -0x4d,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x50,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0x66,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x51,0x02,0x00,0x00, -0x50,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x02,0x00,0x00,0x51,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x54,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x6c,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x55,0x02,0x00,0x00,0x54,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x56,0x02,0x00,0x00, -0x55,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x02,0x00,0x00,0x56,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x02,0x00,0x00,0x57,0x02,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x02,0x00,0x00,0x58,0x02,0x00,0x00,0x4e,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, -0x5a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x5c,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5d,0x02,0x00,0x00, -0x5c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x5e,0x02,0x00,0x00, -0x52,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x60,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x62,0x02,0x00,0x00,0x5b,0x02,0x00,0x00, -0x61,0x02,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x63,0x02,0x00,0x00,0x62,0x02,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x64,0x02,0x00,0x00, -0x63,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0xa9,0x00,0x00,0x00, -0x6c,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x6c,0x02,0x00,0x00,0x64,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x6d,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x6a,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6d,0x02,0x00,0x00,0x6c,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x6d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x64,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x76,0x02,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x74,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x76,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7f,0x02,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x81,0x02,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x83,0x02,0x00,0x00,0x82,0x02,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x85,0x02,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x86,0x02,0x00,0x00,0x85,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x83,0x02,0x00,0x00, -0x86,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x88,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x6e,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x89,0x02,0x00,0x00, -0x88,0x02,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8a,0x02,0x00,0x00,0x89,0x02,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8b,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8d,0x02,0x00,0x00,0x87,0x02,0x00,0x00, -0x67,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x6e,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x92,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x93,0x02,0x00,0x00,0x92,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x94,0x02,0x00,0x00,0x93,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x95,0x02,0x00,0x00, -0x94,0x02,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x95,0x02,0x00,0x00, -0x8b,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x98,0x02,0x00,0x00,0x97,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x92,0x02,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x99,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9b,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x9b,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9f,0x02,0x00,0x00, -0x98,0x02,0x00,0x00,0x9e,0x02,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa0,0x02,0x00,0x00,0x7f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa7,0x02,0x00,0x00, -0xa9,0x00,0x00,0x00,0x6e,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xa9,0x02,0x00,0x00,0xa1,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xaa,0x02,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa7,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xaa,0x02,0x00,0x00, -0xa9,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb1,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x6f,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xb2,0x02,0x00,0x00, -0xa1,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xb3,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb3,0x02,0x00,0x00,0xb2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xc3,0x02,0x00,0x00,0xc2,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc4,0x02,0x00,0x00, -0xc0,0x02,0x00,0x00,0xc3,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0xc4,0x02,0x00,0x00, -0x70,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0xc6,0x02,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc7,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xca,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x69,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0xca,0x02,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcc,0x02,0x00,0x00,0xcb,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x70,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0xcf,0x02,0x00,0x00,0xce,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0xcf,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0xd0,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x02,0x00,0x00,0xd1,0x02,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd4,0x02,0x00,0x00, -0xd2,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd5,0x02,0x00,0x00,0xd4,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xd6,0x02,0x00,0x00, -0xcf,0x02,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd7,0x02,0x00,0x00,0xd6,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd8,0x02,0x00,0x00, -0xd7,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xda,0x02,0x00,0x00,0xd8,0x02,0x00,0x00,0xcc,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xdb,0x02,0x00,0x00, -0xda,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xdc,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0xdb,0x02,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0xdc,0x02,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xde,0x02,0x00,0x00,0xdd,0x02,0x00,0x00, -0xbc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe4,0x02,0x00,0x00,0xa9,0x00,0x00,0x00,0x70,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xe6,0x02,0x00,0x00, -0xde,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0xe7,0x02,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xe4,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xe7,0x02,0x00,0x00,0xe6,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0xa9,0x00,0x00,0x00, -0x71,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0xde,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xee,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf0,0x02,0x00,0x00,0xef,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xfb,0x02,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xfc,0x02,0x00,0x00,0xfb,0x02,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xfd,0x02,0x00,0x00, -0xfc,0x02,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xff,0x02,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x00,0x03,0x00,0x00, -0xff,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0xfd,0x02,0x00,0x00,0x00,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x02,0x03,0x00,0x00, -0x01,0x03,0x00,0x00,0x72,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x03,0x03,0x00,0x00,0x02,0x03,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x04,0x03,0x00,0x00,0x03,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x05,0x03,0x00,0x00, -0x04,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x07,0x03,0x00,0x00,0x01,0x03,0x00,0x00,0x6b,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x08,0x03,0x00,0x00, -0x07,0x03,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x03,0x00,0x00,0x08,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0x0b,0x03,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x72,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x0c,0x03,0x00,0x00,0x0b,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0d,0x03,0x00,0x00, -0x0c,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0e,0x03,0x00,0x00,0x0d,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0x0e,0x03,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x0f,0x03,0x00,0x00,0x05,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x11,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x13,0x03,0x00,0x00,0x0c,0x03,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x14,0x03,0x00,0x00, -0x13,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x15,0x03,0x00,0x00,0x14,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x17,0x03,0x00,0x00,0x15,0x03,0x00,0x00, -0x09,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x18,0x03,0x00,0x00,0x17,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x19,0x03,0x00,0x00,0x12,0x03,0x00,0x00, -0x18,0x03,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x1a,0x03,0x00,0x00,0x19,0x03,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x1b,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0xf9,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x21,0x03,0x00,0x00,0xa9,0x00,0x00,0x00, -0x72,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x23,0x03,0x00,0x00,0x1b,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x24,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x21,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x24,0x03,0x00,0x00,0x23,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2b,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x73,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x2c,0x03,0x00,0x00,0x1b,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x2d,0x03,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x2b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x2d,0x03,0x00,0x00, -0x2c,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x36,0x03,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x38,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x39,0x03,0x00,0x00, -0x38,0x03,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3a,0x03,0x00,0x00,0x39,0x03,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x3c,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x3d,0x03,0x00,0x00,0x3c,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x3e,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0x3d,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3f,0x03,0x00,0x00,0x3e,0x03,0x00,0x00,0x74,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x40,0x03,0x00,0x00, -0x3f,0x03,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x41,0x03,0x00,0x00,0x40,0x03,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x41,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x3e,0x03,0x00,0x00, -0x6d,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x45,0x03,0x00,0x00,0x44,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x03,0x00,0x00, -0x45,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x48,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x74,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x49,0x03,0x00,0x00, -0x48,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4a,0x03,0x00,0x00,0x49,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4b,0x03,0x00,0x00,0x4a,0x03,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x03,0x00,0x00, -0x4b,0x03,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x03,0x00,0x00,0x4c,0x03,0x00,0x00, -0x42,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4f,0x03,0x00,0x00,0x4e,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x50,0x03,0x00,0x00,0x49,0x03,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x51,0x03,0x00,0x00,0x50,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x03,0x00,0x00,0x51,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x03,0x00,0x00, -0x52,0x03,0x00,0x00,0x46,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x54,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x4f,0x03,0x00,0x00,0x55,0x03,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x56,0x03,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x58,0x03,0x00,0x00,0x57,0x03,0x00,0x00,0x36,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x74,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x58,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x61,0x03,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5e,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x61,0x03,0x00,0x00, -0x60,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x75,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x69,0x03,0x00,0x00, -0x58,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x6a,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6a,0x03,0x00,0x00,0x69,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x73,0x03,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x75,0x03,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x76,0x03,0x00,0x00,0x75,0x03,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x77,0x03,0x00,0x00,0x76,0x03,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x79,0x03,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7a,0x03,0x00,0x00,0x79,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x7b,0x03,0x00,0x00, -0x77,0x03,0x00,0x00,0x7a,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x7c,0x03,0x00,0x00,0x7b,0x03,0x00,0x00, -0x75,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x7d,0x03,0x00,0x00,0x7c,0x03,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x7e,0x03,0x00,0x00, -0x7d,0x03,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7f,0x03,0x00,0x00,0x7e,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x81,0x03,0x00,0x00, -0x7b,0x03,0x00,0x00,0x6f,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x82,0x03,0x00,0x00,0x81,0x03,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x83,0x03,0x00,0x00,0x82,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x85,0x03,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x86,0x03,0x00,0x00,0x85,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x87,0x03,0x00,0x00,0x86,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x03,0x00,0x00, -0x87,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x03,0x00,0x00,0x88,0x03,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8b,0x03,0x00,0x00, -0x89,0x03,0x00,0x00,0x7f,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x8c,0x03,0x00,0x00,0x8b,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x8d,0x03,0x00,0x00, -0x86,0x03,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x8e,0x03,0x00,0x00,0x8d,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x03,0x00,0x00, -0x8e,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x03,0x00,0x00,0x8f,0x03,0x00,0x00,0x83,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x92,0x03,0x00,0x00, -0x91,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x93,0x03,0x00,0x00,0x8c,0x03,0x00,0x00,0x92,0x03,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x94,0x03,0x00,0x00, -0x93,0x03,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x95,0x03,0x00,0x00,0x94,0x03,0x00,0x00, -0x73,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x9b,0x03,0x00,0x00,0xa9,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x9d,0x03,0x00,0x00, -0x95,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x9e,0x03,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x9b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x9e,0x03,0x00,0x00,0x9d,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa5,0x03,0x00,0x00,0xa9,0x00,0x00,0x00, -0x76,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa6,0x03,0x00,0x00,0x95,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xa7,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa5,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa7,0x03,0x00,0x00,0xa6,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xb0,0x03,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xb2,0x03,0x00,0x00,0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xb3,0x03,0x00,0x00,0xb2,0x03,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb4,0x03,0x00,0x00, -0xb3,0x03,0x00,0x00,0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0x65,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb7,0x03,0x00,0x00, -0xb6,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb8,0x03,0x00,0x00,0xb4,0x03,0x00,0x00,0xb7,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb9,0x03,0x00,0x00, -0xb8,0x03,0x00,0x00,0x62,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xba,0x03,0x00,0x00,0xb9,0x03,0x00,0x00, -0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xbb,0x03,0x00,0x00,0xba,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbc,0x03,0x00,0x00, -0xbb,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xbe,0x03,0x00,0x00,0xb8,0x03,0x00,0x00,0x71,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xbf,0x03,0x00,0x00, -0xbe,0x03,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc0,0x03,0x00,0x00,0xbf,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00,0xc2,0x03,0x00,0x00, -0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x62,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0xc3,0x03,0x00,0x00,0xc2,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc4,0x03,0x00,0x00, -0xc3,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc5,0x03,0x00,0x00,0xc4,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc6,0x03,0x00,0x00,0xc5,0x03,0x00,0x00, -0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc8,0x03,0x00,0x00,0xc6,0x03,0x00,0x00,0xbc,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc9,0x03,0x00,0x00, -0xc8,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xca,0x03,0x00,0x00,0xc3,0x03,0x00,0x00,0x6f,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xcb,0x03,0x00,0x00, -0xca,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcc,0x03,0x00,0x00,0xcb,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xcc,0x03,0x00,0x00, -0xc0,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xcf,0x03,0x00,0x00,0xce,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0xd0,0x03,0x00,0x00,0xc9,0x03,0x00,0x00, -0xcf,0x03,0x00,0x00,0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0xd1,0x03,0x00,0x00,0xd0,0x03,0x00,0x00,0xcd,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0xd2,0x03,0x00,0x00, -0xd1,0x03,0x00,0x00,0xb0,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd8,0x03,0x00,0x00,0xa9,0x00,0x00,0x00, -0x62,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xda,0x03,0x00,0x00,0xd2,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0xdb,0x03,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xd8,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xdb,0x03,0x00,0x00,0xda,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe2,0x03,0x00,0x00, -0xa9,0x00,0x00,0x00,0x77,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe3,0x03,0x00,0x00,0xd2,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0xe4,0x03,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe2,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xe4,0x03,0x00,0x00, -0xe3,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xed,0x03,0x00,0x00,0x5a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xef,0x03,0x00,0x00,0x60,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xf0,0x03,0x00,0x00, -0xef,0x03,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf1,0x03,0x00,0x00,0xf0,0x03,0x00,0x00,0x48,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf3,0x03,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf4,0x03,0x00,0x00,0xf3,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf5,0x03,0x00,0x00,0xf1,0x03,0x00,0x00, -0xf4,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf6,0x03,0x00,0x00,0xf5,0x03,0x00,0x00,0x64,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xf7,0x03,0x00,0x00, -0xf6,0x03,0x00,0x00,0x6f,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf8,0x03,0x00,0x00,0xf7,0x03,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xf9,0x03,0x00,0x00,0xf8,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xfb,0x03,0x00,0x00,0xf5,0x03,0x00,0x00, -0x73,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xfc,0x03,0x00,0x00,0xfb,0x03,0x00,0x00,0x51,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xfd,0x03,0x00,0x00, -0xfc,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0xff,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x64,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x00,0x04,0x00,0x00, -0xff,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x01,0x04,0x00,0x00,0x00,0x04,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x02,0x04,0x00,0x00,0x01,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x03,0x04,0x00,0x00, -0x02,0x04,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x05,0x04,0x00,0x00,0x03,0x04,0x00,0x00, -0xf9,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x06,0x04,0x00,0x00,0x05,0x04,0x00,0x00,0xc2,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x07,0x04,0x00,0x00,0x00,0x04,0x00,0x00, -0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x08,0x04,0x00,0x00,0x07,0x04,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x08,0x04,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x0b,0x04,0x00,0x00, -0x09,0x04,0x00,0x00,0xfd,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0c,0x04,0x00,0x00,0x0b,0x04,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x0d,0x04,0x00,0x00, -0x06,0x04,0x00,0x00,0x0c,0x04,0x00,0x00,0x83,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x0e,0x04,0x00,0x00,0x0d,0x04,0x00,0x00, -0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x0f,0x04,0x00,0x00,0x0e,0x04,0x00,0x00,0xed,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x15,0x04,0x00,0x00, -0xa9,0x00,0x00,0x00,0x64,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x17,0x04,0x00,0x00,0x0f,0x04,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00, -0x18,0x04,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x15,0x04,0x00,0x00,0x3e,0x00,0x03,0x00,0x18,0x04,0x00,0x00, -0x17,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x04,0x00,0x00,0xa9,0x00,0x00,0x00,0x78,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x20,0x04,0x00,0x00, -0x0f,0x04,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x21,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1f,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x21,0x04,0x00,0x00,0x20,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x2a,0x04,0x00,0x00,0x5a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x2c,0x04,0x00,0x00, -0x60,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x2d,0x04,0x00,0x00,0x2c,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x2e,0x04,0x00,0x00,0x2d,0x04,0x00,0x00, -0x48,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x30,0x04,0x00,0x00,0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x31,0x04,0x00,0x00,0x30,0x04,0x00,0x00, -0xc5,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x32,0x04,0x00,0x00, -0x2e,0x04,0x00,0x00,0x31,0x04,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x33,0x04,0x00,0x00,0x32,0x04,0x00,0x00, -0x88,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x34,0x04,0x00,0x00,0x33,0x04,0x00,0x00,0x6f,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x35,0x04,0x00,0x00, -0x34,0x04,0x00,0x00,0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x36,0x04,0x00,0x00,0x35,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x38,0x04,0x00,0x00, -0x32,0x04,0x00,0x00,0x75,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x39,0x04,0x00,0x00,0x38,0x04,0x00,0x00, -0x51,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3a,0x04,0x00,0x00,0x39,0x04,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x3c,0x04,0x00,0x00,0x57,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x3d,0x04,0x00,0x00,0x3c,0x04,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x3e,0x04,0x00,0x00,0x3d,0x04,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3f,0x04,0x00,0x00, -0x3e,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x04,0x00,0x00,0x3f,0x04,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x42,0x04,0x00,0x00, -0x40,0x04,0x00,0x00,0x36,0x04,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x43,0x04,0x00,0x00,0x42,0x04,0x00,0x00, -0xc2,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x44,0x04,0x00,0x00, -0x3d,0x04,0x00,0x00,0x6f,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x45,0x04,0x00,0x00,0x44,0x04,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x46,0x04,0x00,0x00, -0x45,0x04,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x04,0x00,0x00,0x46,0x04,0x00,0x00,0x3a,0x04,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x49,0x04,0x00,0x00, -0x48,0x04,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x4a,0x04,0x00,0x00,0x43,0x04,0x00,0x00,0x49,0x04,0x00,0x00, -0x83,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x4b,0x04,0x00,0x00, -0x4a,0x04,0x00,0x00,0xcd,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x4b,0x04,0x00,0x00, -0x2a,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x52,0x04,0x00,0x00,0xa9,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x54,0x04,0x00,0x00, -0x4c,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x59,0x00,0x00,0x00,0x55,0x04,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x52,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x55,0x04,0x00,0x00,0x54,0x04,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0xa9,0x00,0x00,0x00, -0x79,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5d,0x04,0x00,0x00,0x4c,0x04,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x59,0x00,0x00,0x00,0x5e,0x04,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5e,0x04,0x00,0x00,0x5d,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0xc1,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xc1,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q5_0_len = 13428; - -unsigned char dequant_q5_0_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x9b,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -8960,1079 +5015,10 @@ unsigned char dequant_q5_0_fp32_data[] = { 0xc3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xc3,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_0_fp32_len = 13952; +const uint64_t dequant_q5_0_len = 13952; unsigned char dequant_q5_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x63,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x52,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x98,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x99,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x99,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x99,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9b,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x9b,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x15,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x50,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x98,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x99,0x00,0x00,0x00, -0x98,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x9a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x99,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x9a,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0x0d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x11,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4d,0x04,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x4e,0x04,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4f,0x04,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x50,0x04,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x04,0x00,0x00,0x15,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x53,0x04,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x04,0x00,0x00,0x16,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x55,0x04,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x04,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x04,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x58,0x04,0x00,0x00,0x18,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x59,0x04,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5a,0x04,0x00,0x00,0x19,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5b,0x04,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5c,0x04,0x00,0x00, -0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5d,0x04,0x00,0x00,0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x04,0x00,0x00,0x1b,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x5f,0x04,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x60,0x04,0x00,0x00,0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x61,0x04,0x00,0x00,0x1e,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x62,0x04,0x00,0x00, -0x1f,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0d,0x00,0x00,0x00,0xbb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xbb,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0xa8,0x00,0x04,0x00,0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2b,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x56,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x56,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x61,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x94,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb5,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xd3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd6,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xd9,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0xde,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe6,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xeb,0x00,0x00,0x00,0xec,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x4c,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xff,0x00,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xfd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xff,0x00,0x00,0x00, -0xfe,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0a,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00,0x37,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x0f,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x12,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x14,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x4d,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x15,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x01,0x00,0x00,0x15,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0x18,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x1a,0x01,0x00,0x00,0x19,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1b,0x01,0x00,0x00, -0x1a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x01,0x00,0x00,0x1b,0x01,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x10,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x20,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x21,0x01,0x00,0x00,0x20,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x22,0x01,0x00,0x00, -0x21,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x22,0x01,0x00,0x00,0x16,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x26,0x01,0x00,0x00,0x1f,0x01,0x00,0x00,0x25,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x27,0x01,0x00,0x00, -0x26,0x01,0x00,0x00,0x08,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x28,0x01,0x00,0x00,0x0a,0x01,0x00,0x00, -0x0a,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x29,0x01,0x00,0x00,0x27,0x01,0x00,0x00,0x28,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2f,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x29,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x2f,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x32,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x39,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x4e,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3a,0x01,0x00,0x00, -0x29,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x3b,0x01,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3b,0x01,0x00,0x00,0x3a,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x44,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x46,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x48,0x01,0x00,0x00, -0x77,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x4b,0x01,0x00,0x00, -0x4a,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4c,0x01,0x00,0x00,0x4b,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x4e,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x50,0x01,0x00,0x00,0x4e,0x01,0x00,0x00,0x82,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x52,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x57,0x01,0x00,0x00,0x56,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x58,0x01,0x00,0x00,0x4c,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x5b,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x55,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x5d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x5e,0x01,0x00,0x00,0x5d,0x01,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x5e,0x01,0x00,0x00, -0x52,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x62,0x01,0x00,0x00,0x5b,0x01,0x00,0x00, -0x61,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x62,0x01,0x00,0x00,0x44,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x46,0x01,0x00,0x00,0x46,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x65,0x01,0x00,0x00,0x63,0x01,0x00,0x00, -0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x65,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x4f,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x65,0x01,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x77,0x01,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x80,0x01,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x85,0x01,0x00,0x00, -0x84,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x86,0x01,0x00,0x00,0x85,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x87,0x01,0x00,0x00,0x86,0x01,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x01,0x00,0x00, -0x87,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8a,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x8a,0x01,0x00,0x00, -0x48,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x01,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0x8d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x90,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x01,0x00,0x00,0x92,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x94,0x01,0x00,0x00, -0x93,0x01,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x96,0x01,0x00,0x00,0x94,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x97,0x01,0x00,0x00,0x96,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x91,0x01,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x99,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x9a,0x01,0x00,0x00,0x99,0x01,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x9a,0x01,0x00,0x00,0x8e,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x97,0x01,0x00,0x00,0x9d,0x01,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x9f,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x82,0x01,0x00,0x00,0x82,0x01,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0x9f,0x01,0x00,0x00,0xa0,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa7,0x01,0x00,0x00,0xa2,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xa9,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa7,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0xaa,0x01,0x00,0x00,0xa9,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb1,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x50,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xb2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xb3,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xb1,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xb3,0x01,0x00,0x00, -0xb2,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xbc,0x01,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xbe,0x01,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00,0x51,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc3,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0xc3,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xc6,0x01,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xc8,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x4c,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xc9,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xca,0x01,0x00,0x00,0xc9,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0xcc,0x01,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x51,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xcd,0x01,0x00,0x00,0xcc,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xce,0x01,0x00,0x00,0xcd,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcf,0x01,0x00,0x00, -0xce,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd0,0x01,0x00,0x00,0xcf,0x01,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0xc4,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xd3,0x01,0x00,0x00,0xd2,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xd4,0x01,0x00,0x00, -0xcd,0x01,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xd5,0x01,0x00,0x00,0xd4,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd6,0x01,0x00,0x00, -0xd5,0x01,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0xd6,0x01,0x00,0x00,0xca,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xd8,0x01,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xda,0x01,0x00,0x00,0xd3,0x01,0x00,0x00,0xd9,0x01,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xdb,0x01,0x00,0x00, -0xda,0x01,0x00,0x00,0xbc,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xdc,0x01,0x00,0x00,0xbe,0x01,0x00,0x00, -0xbe,0x01,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xdd,0x01,0x00,0x00,0xdb,0x01,0x00,0x00,0xdc,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xe3,0x01,0x00,0x00, -0xa2,0x00,0x00,0x00,0x51,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xdd,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xe6,0x01,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xe3,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xe6,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xed,0x01,0x00,0x00,0xa2,0x00,0x00,0x00,0x52,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xee,0x01,0x00,0x00, -0xdd,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xef,0x01,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xed,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xef,0x01,0x00,0x00,0xee,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xfa,0x01,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0xfc,0x01,0x00,0x00, -0x53,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xff,0x01,0x00,0x00, -0xfe,0x01,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0xff,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x02,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x04,0x02,0x00,0x00,0x02,0x02,0x00,0x00,0x4e,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x05,0x02,0x00,0x00, -0x04,0x02,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x06,0x02,0x00,0x00,0x05,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x53,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0b,0x02,0x00,0x00,0x0a,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0c,0x02,0x00,0x00,0x0b,0x02,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0c,0x02,0x00,0x00,0x00,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x0f,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0x09,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x11,0x02,0x00,0x00, -0x10,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x12,0x02,0x00,0x00,0x11,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x14,0x02,0x00,0x00,0x12,0x02,0x00,0x00, -0x06,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x15,0x02,0x00,0x00,0x14,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x16,0x02,0x00,0x00,0x0f,0x02,0x00,0x00, -0x15,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x17,0x02,0x00,0x00,0x16,0x02,0x00,0x00,0xf8,0x01,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x18,0x02,0x00,0x00, -0xfa,0x01,0x00,0x00,0xfa,0x01,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x19,0x02,0x00,0x00,0x17,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1f,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x53,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x19,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1f,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x29,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x54,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x2a,0x02,0x00,0x00,0x19,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x2b,0x02,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x29,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x02,0x00,0x00,0x2a,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x34,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x36,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x38,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x39,0x02,0x00,0x00, -0x38,0x02,0x00,0x00,0x55,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x3a,0x02,0x00,0x00,0x39,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x3b,0x02,0x00,0x00,0x3a,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3c,0x02,0x00,0x00, -0x3b,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x3e,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x40,0x02,0x00,0x00,0x3e,0x02,0x00,0x00, -0x4f,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x41,0x02,0x00,0x00,0x40,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x42,0x02,0x00,0x00, -0x41,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x55,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x44,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x47,0x02,0x00,0x00,0x46,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x02,0x00,0x00, -0x47,0x02,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x02,0x00,0x00,0x48,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x4b,0x02,0x00,0x00,0x4a,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x4c,0x02,0x00,0x00,0x45,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4d,0x02,0x00,0x00,0x4c,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x4e,0x02,0x00,0x00,0x4d,0x02,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x4e,0x02,0x00,0x00,0x42,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x4b,0x02,0x00,0x00,0x51,0x02,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x53,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x36,0x02,0x00,0x00,0x36,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x53,0x02,0x00,0x00,0x54,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5b,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x55,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x5d,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5b,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0x5e,0x02,0x00,0x00,0x5d,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x65,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x56,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x66,0x02,0x00,0x00,0x55,0x02,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x67,0x02,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x65,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x67,0x02,0x00,0x00, -0x66,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x70,0x02,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x72,0x02,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00,0x57,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x76,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x77,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x77,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x7a,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x7c,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x50,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x7d,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7e,0x02,0x00,0x00,0x7d,0x02,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0x80,0x02,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x57,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x81,0x02,0x00,0x00,0x80,0x02,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x82,0x02,0x00,0x00,0x81,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x02,0x00,0x00, -0x82,0x02,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x02,0x00,0x00,0x83,0x02,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x78,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x87,0x02,0x00,0x00,0x86,0x02,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x88,0x02,0x00,0x00, -0x81,0x02,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x89,0x02,0x00,0x00,0x88,0x02,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8a,0x02,0x00,0x00, -0x89,0x02,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x8a,0x02,0x00,0x00,0x7e,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8c,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x8e,0x02,0x00,0x00,0x87,0x02,0x00,0x00,0x8d,0x02,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x8f,0x02,0x00,0x00, -0x8e,0x02,0x00,0x00,0x70,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x90,0x02,0x00,0x00,0x72,0x02,0x00,0x00, -0x72,0x02,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x91,0x02,0x00,0x00,0x8f,0x02,0x00,0x00,0x90,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x97,0x02,0x00,0x00, -0xa2,0x00,0x00,0x00,0x57,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x91,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x9a,0x02,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x97,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa1,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x58,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xa2,0x02,0x00,0x00, -0x91,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xa3,0x02,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa1,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xa3,0x02,0x00,0x00,0xa2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xac,0x02,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xae,0x02,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xb1,0x02,0x00,0x00,0xb0,0x02,0x00,0x00, -0x59,0x04,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb3,0x02,0x00,0x00, -0xb2,0x02,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb4,0x02,0x00,0x00,0xb3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xb6,0x02,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0xb6,0x02,0x00,0x00,0x52,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0xb8,0x02,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0xb9,0x02,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0xbc,0x02,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x59,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0xbd,0x02,0x00,0x00,0xbc,0x02,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xbe,0x02,0x00,0x00, -0xbd,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbf,0x02,0x00,0x00,0xbe,0x02,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0xbf,0x02,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc0,0x02,0x00,0x00,0xb4,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc2,0x02,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0xbd,0x02,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0xc4,0x02,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc6,0x02,0x00,0x00,0xc5,0x02,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0xc6,0x02,0x00,0x00, -0xba,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xc9,0x02,0x00,0x00,0xc8,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0xc3,0x02,0x00,0x00, -0xc9,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xcb,0x02,0x00,0x00,0xca,0x02,0x00,0x00,0xac,0x02,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0xae,0x02,0x00,0x00,0xae,0x02,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xcd,0x02,0x00,0x00,0xcb,0x02,0x00,0x00, -0xcc,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd3,0x02,0x00,0x00,0xa2,0x00,0x00,0x00,0x59,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0xd5,0x02,0x00,0x00, -0xcd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0xd6,0x02,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd3,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xd6,0x02,0x00,0x00,0xd5,0x02,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x02,0x00,0x00,0xa2,0x00,0x00,0x00, -0x5a,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xde,0x02,0x00,0x00,0xcd,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xdf,0x02,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xdd,0x02,0x00,0x00, -0x3e,0x00,0x03,0x00,0xdf,0x02,0x00,0x00,0xde,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xe8,0x02,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xea,0x02,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xec,0x02,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xed,0x02,0x00,0x00, -0xec,0x02,0x00,0x00,0x5b,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xee,0x02,0x00,0x00,0xed,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xef,0x02,0x00,0x00,0xee,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf0,0x02,0x00,0x00, -0xef,0x02,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xf2,0x02,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xf4,0x02,0x00,0x00,0xf2,0x02,0x00,0x00, -0x54,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xf5,0x02,0x00,0x00,0xf4,0x02,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf6,0x02,0x00,0x00, -0xf5,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0xf8,0x02,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5b,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xf9,0x02,0x00,0x00, -0xf8,0x02,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xfa,0x02,0x00,0x00,0xf9,0x02,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xfb,0x02,0x00,0x00,0xfa,0x02,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xfc,0x02,0x00,0x00, -0xfb,0x02,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfe,0x02,0x00,0x00,0xfc,0x02,0x00,0x00, -0xf0,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xff,0x02,0x00,0x00,0xfe,0x02,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xf9,0x02,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x01,0x03,0x00,0x00,0x00,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x02,0x03,0x00,0x00,0x01,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x04,0x03,0x00,0x00, -0x02,0x03,0x00,0x00,0xf6,0x02,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x05,0x03,0x00,0x00,0x04,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x06,0x03,0x00,0x00, -0xff,0x02,0x00,0x00,0x05,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x07,0x03,0x00,0x00,0x06,0x03,0x00,0x00, -0xe8,0x02,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x08,0x03,0x00,0x00,0xea,0x02,0x00,0x00,0xea,0x02,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x09,0x03,0x00,0x00, -0x07,0x03,0x00,0x00,0x08,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0f,0x03,0x00,0x00,0xa2,0x00,0x00,0x00, -0x5b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x11,0x03,0x00,0x00,0x09,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x12,0x03,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x0f,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x12,0x03,0x00,0x00,0x11,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x19,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x5c,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x1a,0x03,0x00,0x00,0x09,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x1b,0x03,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x19,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x1b,0x03,0x00,0x00, -0x1a,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x24,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x26,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x28,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x29,0x03,0x00,0x00,0x28,0x03,0x00,0x00,0x5d,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x2a,0x03,0x00,0x00, -0x29,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x2b,0x03,0x00,0x00,0x2a,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2c,0x03,0x00,0x00,0x2b,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x2e,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x30,0x03,0x00,0x00, -0x2e,0x03,0x00,0x00,0x56,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x31,0x03,0x00,0x00,0x30,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x32,0x03,0x00,0x00,0x31,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0x34,0x03,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x5d,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0x35,0x03,0x00,0x00,0x34,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x36,0x03,0x00,0x00,0x35,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x37,0x03,0x00,0x00, -0x36,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x03,0x00,0x00,0x37,0x03,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x03,0x00,0x00, -0x38,0x03,0x00,0x00,0x2c,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x3b,0x03,0x00,0x00,0x3a,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x3c,0x03,0x00,0x00, -0x35,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x3d,0x03,0x00,0x00,0x3c,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x3e,0x03,0x00,0x00, -0x3d,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x40,0x03,0x00,0x00,0x3e,0x03,0x00,0x00,0x32,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x41,0x03,0x00,0x00, -0x40,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x42,0x03,0x00,0x00,0x3b,0x03,0x00,0x00,0x41,0x03,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x43,0x03,0x00,0x00, -0x42,0x03,0x00,0x00,0x24,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x44,0x03,0x00,0x00,0x26,0x03,0x00,0x00, -0x26,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x45,0x03,0x00,0x00,0x43,0x03,0x00,0x00,0x44,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4b,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x5d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x4d,0x03,0x00,0x00,0x45,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x4e,0x03,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x4b,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x4e,0x03,0x00,0x00, -0x4d,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x55,0x03,0x00,0x00,0xa2,0x00,0x00,0x00,0x5e,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x56,0x03,0x00,0x00, -0x45,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x57,0x03,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x55,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x57,0x03,0x00,0x00,0x56,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x60,0x03,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x62,0x03,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x64,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x65,0x03,0x00,0x00,0x64,0x03,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x66,0x03,0x00,0x00,0x65,0x03,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x67,0x03,0x00,0x00, -0x66,0x03,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x68,0x03,0x00,0x00,0x67,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x6a,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x6c,0x03,0x00,0x00,0x6a,0x03,0x00,0x00,0x58,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x6d,0x03,0x00,0x00, -0x6c,0x03,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x03,0x00,0x00,0x6d,0x03,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x70,0x03,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x71,0x03,0x00,0x00,0x70,0x03,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x72,0x03,0x00,0x00, -0x71,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x73,0x03,0x00,0x00,0x72,0x03,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x74,0x03,0x00,0x00,0x73,0x03,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x03,0x00,0x00,0x74,0x03,0x00,0x00,0x68,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x77,0x03,0x00,0x00, -0x76,0x03,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x78,0x03,0x00,0x00,0x71,0x03,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x79,0x03,0x00,0x00, -0x78,0x03,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x03,0x00,0x00,0x79,0x03,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x03,0x00,0x00,0x7a,0x03,0x00,0x00, -0x6e,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x7d,0x03,0x00,0x00,0x7c,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x7e,0x03,0x00,0x00,0x77,0x03,0x00,0x00, -0x7d,0x03,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x7f,0x03,0x00,0x00,0x7e,0x03,0x00,0x00,0x60,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x80,0x03,0x00,0x00, -0x62,0x03,0x00,0x00,0x62,0x03,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x81,0x03,0x00,0x00,0x7f,0x03,0x00,0x00, -0x80,0x03,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x87,0x03,0x00,0x00,0xa2,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x89,0x03,0x00,0x00, -0x81,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x8a,0x03,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x87,0x03,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8a,0x03,0x00,0x00,0x89,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x91,0x03,0x00,0x00,0xa2,0x00,0x00,0x00, -0x5f,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x92,0x03,0x00,0x00,0x81,0x03,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x93,0x03,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x91,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0x93,0x03,0x00,0x00,0x92,0x03,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x9c,0x03,0x00,0x00, -0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x9e,0x03,0x00,0x00,0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xa0,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xa1,0x03,0x00,0x00, -0xa0,0x03,0x00,0x00,0x4b,0x04,0x00,0x00,0xc4,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa2,0x03,0x00,0x00,0xa1,0x03,0x00,0x00, -0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa3,0x03,0x00,0x00,0xa2,0x03,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xa4,0x03,0x00,0x00, -0xa3,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xa6,0x03,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xa8,0x03,0x00,0x00,0xa6,0x03,0x00,0x00, -0x5a,0x04,0x00,0x00,0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xa9,0x03,0x00,0x00,0xa8,0x03,0x00,0x00,0x4e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xaa,0x03,0x00,0x00, -0xa9,0x03,0x00,0x00,0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00, -0xac,0x03,0x00,0x00,0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4b,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00,0xad,0x03,0x00,0x00, -0xac,0x03,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xae,0x03,0x00,0x00,0xad,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xaf,0x03,0x00,0x00,0xae,0x03,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb0,0x03,0x00,0x00, -0xaf,0x03,0x00,0x00,0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb2,0x03,0x00,0x00,0xb0,0x03,0x00,0x00, -0xa4,0x03,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xb3,0x03,0x00,0x00,0xb2,0x03,0x00,0x00,0xc2,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0xb4,0x03,0x00,0x00,0xad,0x03,0x00,0x00, -0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0xb5,0x03,0x00,0x00,0xb4,0x03,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb6,0x03,0x00,0x00,0xb5,0x03,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb8,0x03,0x00,0x00, -0xb6,0x03,0x00,0x00,0xaa,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xb9,0x03,0x00,0x00,0xb8,0x03,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xba,0x03,0x00,0x00, -0xb3,0x03,0x00,0x00,0xb9,0x03,0x00,0x00,0x8e,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xbb,0x03,0x00,0x00,0xba,0x03,0x00,0x00, -0x9c,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xbc,0x03,0x00,0x00,0x9e,0x03,0x00,0x00,0x9e,0x03,0x00,0x00, -0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xbd,0x03,0x00,0x00, -0xbb,0x03,0x00,0x00,0xbc,0x03,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc3,0x03,0x00,0x00,0xa2,0x00,0x00,0x00, -0x4b,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0xc5,0x03,0x00,0x00,0xbd,0x03,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0xc6,0x03,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xc3,0x03,0x00,0x00, -0x3e,0x00,0x03,0x00,0xc6,0x03,0x00,0x00,0xc5,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xcd,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x60,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0xce,0x03,0x00,0x00,0xbd,0x03,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0xcf,0x03,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xcd,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0xcf,0x03,0x00,0x00, -0xce,0x03,0x00,0x00,0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0xd8,0x03,0x00,0x00,0x57,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xda,0x03,0x00,0x00,0x5b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0xdc,0x03,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0xdd,0x03,0x00,0x00,0xdc,0x03,0x00,0x00,0x4d,0x04,0x00,0x00, -0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xde,0x03,0x00,0x00, -0xdd,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xdf,0x03,0x00,0x00,0xde,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe0,0x03,0x00,0x00,0xdf,0x03,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xe2,0x03,0x00,0x00,0x62,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0xe4,0x03,0x00,0x00, -0xe2,0x03,0x00,0x00,0x5c,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0xe5,0x03,0x00,0x00,0xe4,0x03,0x00,0x00, -0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe6,0x03,0x00,0x00,0xe5,0x03,0x00,0x00,0x41,0x00,0x08,0x00, -0x79,0x00,0x00,0x00,0xe8,0x03,0x00,0x00,0x54,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x4d,0x04,0x00,0x00,0x3d,0x00,0x04,0x00,0x4d,0x00,0x00,0x00, -0xe9,0x03,0x00,0x00,0xe8,0x03,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xea,0x03,0x00,0x00,0xe9,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xeb,0x03,0x00,0x00, -0xea,0x03,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xec,0x03,0x00,0x00,0xeb,0x03,0x00,0x00,0x82,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xee,0x03,0x00,0x00, -0xec,0x03,0x00,0x00,0xe0,0x03,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0xef,0x03,0x00,0x00,0xee,0x03,0x00,0x00, -0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0xf0,0x03,0x00,0x00, -0xe9,0x03,0x00,0x00,0x66,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0xf1,0x03,0x00,0x00,0xf0,0x03,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf2,0x03,0x00,0x00, -0xf1,0x03,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf4,0x03,0x00,0x00,0xf2,0x03,0x00,0x00,0xe6,0x03,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0xf5,0x03,0x00,0x00, -0xf4,0x03,0x00,0x00,0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xf6,0x03,0x00,0x00,0xef,0x03,0x00,0x00,0xf5,0x03,0x00,0x00, -0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0xf7,0x03,0x00,0x00, -0xf6,0x03,0x00,0x00,0xd8,0x03,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0xf8,0x03,0x00,0x00,0xda,0x03,0x00,0x00, -0xda,0x03,0x00,0x00,0x81,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0xf9,0x03,0x00,0x00,0xf7,0x03,0x00,0x00,0xf8,0x03,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xff,0x03,0x00,0x00, -0xa2,0x00,0x00,0x00,0x4d,0x04,0x00,0x00,0x51,0x00,0x05,0x00, -0x4a,0x00,0x00,0x00,0x01,0x04,0x00,0x00,0xf9,0x03,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x02,0x04,0x00,0x00,0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xff,0x03,0x00,0x00,0x3e,0x00,0x03,0x00,0x02,0x04,0x00,0x00, -0x01,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x09,0x04,0x00,0x00,0xa2,0x00,0x00,0x00,0x61,0x04,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x0a,0x04,0x00,0x00, -0xf9,0x03,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x0b,0x04,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x09,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x0b,0x04,0x00,0x00,0x0a,0x04,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x14,0x04,0x00,0x00,0x57,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x16,0x04,0x00,0x00, -0x5b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x04,0x00,0x00,0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x04,0x00,0x00,0x18,0x04,0x00,0x00, -0x82,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x1a,0x04,0x00,0x00,0x19,0x04,0x00,0x00,0x66,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x1b,0x04,0x00,0x00, -0x1a,0x04,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1c,0x04,0x00,0x00,0x1b,0x04,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x1e,0x04,0x00,0x00, -0x62,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x09,0x00,0x00,0x00, -0x20,0x04,0x00,0x00,0x1e,0x04,0x00,0x00,0x5e,0x04,0x00,0x00, -0xc7,0x00,0x05,0x00,0x09,0x00,0x00,0x00,0x21,0x04,0x00,0x00, -0x20,0x04,0x00,0x00,0x4e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x22,0x04,0x00,0x00,0x21,0x04,0x00,0x00, -0x41,0x00,0x08,0x00,0x79,0x00,0x00,0x00,0x24,0x04,0x00,0x00, -0x54,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4d,0x00,0x00,0x00,0x25,0x04,0x00,0x00,0x24,0x04,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x26,0x04,0x00,0x00, -0x25,0x04,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x27,0x04,0x00,0x00,0x26,0x04,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x28,0x04,0x00,0x00,0x27,0x04,0x00,0x00, -0x82,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2a,0x04,0x00,0x00,0x28,0x04,0x00,0x00,0x1c,0x04,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x2b,0x04,0x00,0x00, -0x2a,0x04,0x00,0x00,0xc2,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x2c,0x04,0x00,0x00,0x25,0x04,0x00,0x00,0x66,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x2d,0x04,0x00,0x00, -0x2c,0x04,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x04,0x00,0x00,0x2d,0x04,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x30,0x04,0x00,0x00,0x2e,0x04,0x00,0x00, -0x22,0x04,0x00,0x00,0x6f,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x31,0x04,0x00,0x00,0x30,0x04,0x00,0x00,0x50,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x32,0x04,0x00,0x00,0x2b,0x04,0x00,0x00, -0x31,0x04,0x00,0x00,0x8e,0x00,0x05,0x00,0x7c,0x00,0x00,0x00, -0x33,0x04,0x00,0x00,0x32,0x04,0x00,0x00,0x14,0x04,0x00,0x00, -0x50,0x00,0x05,0x00,0x7c,0x00,0x00,0x00,0x34,0x04,0x00,0x00, -0x16,0x04,0x00,0x00,0x16,0x04,0x00,0x00,0x81,0x00,0x05,0x00, -0x7c,0x00,0x00,0x00,0x35,0x04,0x00,0x00,0x33,0x04,0x00,0x00, -0x34,0x04,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3b,0x04,0x00,0x00,0xa2,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00,0x3d,0x04,0x00,0x00, -0x35,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x56,0x00,0x00,0x00,0x3e,0x04,0x00,0x00,0x9b,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3b,0x04,0x00,0x00,0x3e,0x00,0x03,0x00, -0x3e,0x04,0x00,0x00,0x3d,0x04,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x04,0x00,0x00,0xa2,0x00,0x00,0x00, -0x62,0x04,0x00,0x00,0x51,0x00,0x05,0x00,0x4a,0x00,0x00,0x00, -0x46,0x04,0x00,0x00,0x35,0x04,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x47,0x04,0x00,0x00, -0x9b,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x04,0x00,0x00, -0x3e,0x00,0x03,0x00,0x47,0x04,0x00,0x00,0x46,0x04,0x00,0x00, -0xf9,0x00,0x02,0x00,0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q5_1_len = 12768; - -unsigned char dequant_q5_1_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x95,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -11163,505 +6149,10 @@ unsigned char dequant_q5_1_fp32_data[] = { 0xbe,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_1_fp32_len = 13548; +const uint64_t dequant_q5_1_len = 13548; unsigned char dequant_q5_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x99,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x4d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0f,0x01,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x10,0x01,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x12,0x01,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x12,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x82,0x01,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x42,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x45,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x48,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x4a,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x4d,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x42,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x0f,0x01,0x00,0x00,0x42,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x10,0x01,0x00,0x00,0x0f,0x01,0x00,0x00, -0x20,0x00,0x04,0x00,0x11,0x01,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x3b,0x00,0x04,0x00,0x11,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x63,0x01,0x00,0x00,0x21,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x81,0x01,0x00,0x00, -0x40,0x00,0x00,0x00,0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00, -0x82,0x01,0x00,0x00,0x81,0x01,0x00,0x00,0x58,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x85,0x01,0x00,0x00,0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x83,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x18,0x00,0x00,0x00,0x84,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x01,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x06,0x00,0x00,0x00,0x8b,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0x84,0x01,0x00,0x00,0x80,0x01,0x00,0x00,0x0d,0x00,0x00,0x00, -0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x8b,0x01,0x00,0x00,0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x8b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x30,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00, -0x34,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x53,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x72,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x73,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8d,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x99,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x73,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x73,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x8c,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x46,0x00,0x00,0x00,0x8c,0x01,0x00,0x00,0x81,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0xa3,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x8c,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x8d,0x01,0x00,0x00,0x85,0x00,0x05,0x00,0x42,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xbe,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x71,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xbd,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xc1,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xc3,0x00,0x00,0x00,0xc2,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc5,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0xc7,0x00,0x00,0x00, -0xc6,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0xc7,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xce,0x00,0x00,0x00, -0xcd,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xcf,0x00,0x00,0x00,0xce,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0xd8,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xdb,0x00,0x00,0x00,0xda,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0xdb,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xe0,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0xdf,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xe0,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xe2,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0xe3,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xe5,0x00,0x00,0x00,0xe4,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0xe5,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0xe6,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0xe7,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0xee,0x00,0x00,0x00,0xd8,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf2,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0xf4,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xbe,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xbe,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x46,0x00,0x00,0x00,0x8f,0x01,0x00,0x00,0xd3,0x00,0x00,0x00, -0xbd,0x00,0x00,0x00,0xf7,0x00,0x00,0x00,0xd4,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x46,0x00,0x00,0x00,0x8e,0x01,0x00,0x00, -0xc8,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0xe9,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x8e,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x42,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x8f,0x01,0x00,0x00,0x85,0x00,0x05,0x00, -0x42,0x00,0x00,0x00,0x01,0x01,0x00,0x00,0x5a,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x05,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x06,0x01,0x00,0x00, -0x05,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x07,0x01,0x00,0x00,0x06,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x41,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x0c,0x01,0x00,0x00,0x29,0x00,0x00,0x00,0x0b,0x01,0x00,0x00, -0x72,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x0d,0x01,0x00,0x00, -0x0c,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x0e,0x01,0x00,0x00,0x0d,0x01,0x00,0x00,0x41,0x00,0x08,0x00, -0x78,0x00,0x00,0x00,0x17,0x01,0x00,0x00,0x51,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xdf,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x18,0x01,0x00,0x00,0x17,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x19,0x01,0x00,0x00,0x18,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x19,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1b,0x01,0x00,0x00,0x1a,0x01,0x00,0x00,0x95,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00,0x1e,0x01,0x00,0x00, -0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x1f,0x01,0x00,0x00,0x1e,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x21,0x01,0x00,0x00, -0x1f,0x01,0x00,0x00,0x07,0x01,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x22,0x01,0x00,0x00,0x21,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x01,0x00,0x00, -0x22,0x01,0x00,0x00,0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x23,0x01,0x00,0x00,0x09,0x00,0x00,0x00, -0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x24,0x01,0x00,0x00,0x39,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x26,0x01,0x00,0x00, -0x1b,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x6f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x26,0x01,0x00,0x00, -0x7f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x96,0x01,0x00,0x00, -0xba,0x00,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x2a,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x27,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x2b,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x2b,0x01,0x00,0x00,0x2a,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x6b,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0x32,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0xdf,0x00,0x00,0x00,0x31,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x33,0x01,0x00,0x00, -0x32,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x33,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x35,0x01,0x00,0x00,0x34,0x01,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x36,0x01,0x00,0x00, -0x35,0x01,0x00,0x00,0x95,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x39,0x01,0x00,0x00,0x63,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x78,0x00,0x00,0x00, -0x3a,0x01,0x00,0x00,0x51,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x39,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x3b,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x3d,0x01,0x00,0x00,0x3b,0x01,0x00,0x00,0x07,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x3d,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3f,0x01,0x00,0x00,0x3e,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x3f,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x40,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x42,0x01,0x00,0x00,0x36,0x01,0x00,0x00,0x41,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x43,0x01,0x00,0x00, -0x42,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x46,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x43,0x01,0x00,0x00,0x96,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x47,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x2d,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x47,0x01,0x00,0x00,0x46,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x01,0x00,0x00, -0x64,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x46,0x00,0x00,0x00,0x4e,0x01,0x00,0x00,0x17,0x01,0x00,0x00, -0xc2,0x00,0x05,0x00,0x46,0x00,0x00,0x00,0x4f,0x01,0x00,0x00, -0x4e,0x01,0x00,0x00,0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x50,0x01,0x00,0x00,0x4f,0x01,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x51,0x01,0x00,0x00, -0x50,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x55,0x01,0x00,0x00,0x1e,0x01,0x00,0x00,0xc7,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x57,0x01,0x00,0x00,0x55,0x01,0x00,0x00, -0x0e,0x01,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x57,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x59,0x01,0x00,0x00,0x58,0x01,0x00,0x00, -0xab,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x5a,0x01,0x00,0x00, -0x59,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00, -0x06,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x5a,0x01,0x00,0x00, -0x39,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x01,0x00,0x00,0x51,0x01,0x00,0x00, -0x5b,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x5c,0x01,0x00,0x00,0x7f,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x98,0x01,0x00,0x00,0x01,0x01,0x00,0x00, -0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00,0x60,0x01,0x00,0x00, -0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0xfc,0x00,0x00,0x00, -0x5d,0x01,0x00,0x00,0x98,0x01,0x00,0x00,0x41,0x00,0x06,0x00, -0x53,0x00,0x00,0x00,0x61,0x01,0x00,0x00,0x12,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0x49,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x61,0x01,0x00,0x00,0x60,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x01,0x00,0x00,0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00, -0x6a,0x01,0x00,0x00,0x32,0x01,0x00,0x00,0xc2,0x00,0x05,0x00, -0x46,0x00,0x00,0x00,0x6b,0x01,0x00,0x00,0x6a,0x01,0x00,0x00, -0x70,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x6b,0x01,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6d,0x01,0x00,0x00,0x6c,0x01,0x00,0x00, -0x3d,0x00,0x04,0x00,0x46,0x00,0x00,0x00,0x72,0x01,0x00,0x00, -0x3a,0x01,0x00,0x00,0xc7,0x00,0x05,0x00,0x46,0x00,0x00,0x00, -0x74,0x01,0x00,0x00,0x72,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x75,0x01,0x00,0x00, -0x74,0x01,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x01,0x00,0x00,0x75,0x01,0x00,0x00,0xab,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x76,0x01,0x00,0x00, -0x09,0x00,0x00,0x00,0xa9,0x00,0x06,0x00,0x06,0x00,0x00,0x00, -0x78,0x01,0x00,0x00,0x77,0x01,0x00,0x00,0x39,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x42,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x0c,0x00,0x08,0x00,0x42,0x00,0x00,0x00, -0x7d,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0xfc,0x00,0x00,0x00,0x7a,0x01,0x00,0x00,0x98,0x01,0x00,0x00, -0x41,0x00,0x06,0x00,0x53,0x00,0x00,0x00,0x7e,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x64,0x01,0x00,0x00, -0x3e,0x00,0x03,0x00,0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x8b,0x01,0x00,0x00,0x29,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x11,0x00,0x00,0x00, -0x92,0x01,0x00,0x00,0x85,0x01,0x00,0x00,0x0a,0x00,0x00,0x00, -0x88,0x01,0x00,0x00,0x2f,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x89,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x92,0x01,0x00,0x00,0x83,0x01,0x00,0x00,0x89,0x01,0x00,0x00, -0xf8,0x00,0x02,0x00,0x89,0x01,0x00,0x00,0xf9,0x00,0x02,0x00, -0x83,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x83,0x01,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q5_K_len = 5888; - -unsigned char dequant_q5_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xa0,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -12162,366 +6653,10 @@ unsigned char dequant_q5_K_fp32_data[] = { 0x8a,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q5_K_fp32_len = 5988; +const uint64_t dequant_q5_K_len = 5988; unsigned char dequant_q6_K_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x0a,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x62,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x62,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x62,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x64,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x64,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x66,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x77,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x79,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xff,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x14,0x00,0x02,0x00,0x11,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x15,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x19,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x1e,0x00,0x06,0x00,0x23,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x24,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x26,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x16,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x4b,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x5d,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x62,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x63,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x65,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x76,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x78,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x78,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xdc,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xe1,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x15,0x00,0x00,0x00,0xff,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0xfe,0x00,0x00,0x00,0xfe,0x00,0x00,0x00, -0x2a,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x02,0x01,0x00,0x00, -0x29,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x05,0x01,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x00,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x18,0x00,0x00,0x00, -0x01,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x01,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x0a,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x06,0x00,0x00,0x00, -0x08,0x01,0x00,0x00,0x09,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0xfd,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x10,0x00,0x00,0x00,0xf6,0x00,0x04,0x00,0x0c,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0b,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x19,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x08,0x01,0x00,0x00,0x80,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x26,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x28,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x30,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2e,0x00,0x00,0x00,0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x2f,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x0c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x19,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0x35,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x35,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x43,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x43,0x00,0x00,0x00, -0x46,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x73,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x61,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x73,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x9c,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0xa3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x6c,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0xaa,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xab,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0xb2,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xb4,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x61,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x85,0x00,0x05,0x00,0x61,0x00,0x00,0x00, -0xba,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x73,0x00,0x00,0x00,0xbb,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xbb,0x00,0x00,0x00,0xba,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xbd,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0xc2,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0xc9,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xca,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0xcb,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00,0xcf,0x00,0x00,0x00, -0xce,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd0,0x00,0x00,0x00,0xcf,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0xd0,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0xd1,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xd2,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xd3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd6,0x00,0x00,0x00,0xd5,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0xd6,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x61,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0xd7,0x00,0x00,0x00, -0x85,0x00,0x05,0x00,0x61,0x00,0x00,0x00,0xd9,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xd8,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x73,0x00,0x00,0x00,0xda,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0xbd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xda,0x00,0x00,0x00,0xd9,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xe2,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0xe1,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x81,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0xe2,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0xe4,0x00,0x00,0x00,0xe3,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xe4,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0xea,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x57,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x14,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0xeb,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0xec,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x57,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xe1,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x14,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xf1,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0xf1,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf3,0x00,0x00,0x00, -0xf2,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xf3,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0xf5,0x00,0x00,0x00,0xf4,0x00,0x00,0x00,0x72,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00,0xf5,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf7,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xe5,0x00,0x00,0x00, -0xf7,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x61,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0x85,0x00,0x05,0x00, -0x61,0x00,0x00,0x00,0xfa,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x73,0x00,0x00,0x00, -0xfb,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0xdd,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xfb,0x00,0x00,0x00, -0xfa,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0d,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xfd,0x00,0x00,0x00,0x08,0x01,0x00,0x00, -0x29,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x0a,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x0c,0x00,0x00,0x00,0xf5,0x00,0x07,0x00, -0x11,0x00,0x00,0x00,0x09,0x01,0x00,0x00,0x02,0x01,0x00,0x00, -0x0a,0x00,0x00,0x00,0x05,0x01,0x00,0x00,0x2f,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x06,0x01,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x09,0x01,0x00,0x00,0x00,0x01,0x00,0x00, -0x06,0x01,0x00,0x00,0xf8,0x00,0x02,0x00,0x06,0x01,0x00,0x00, -0xf9,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0xf8,0x00,0x02,0x00, -0x00,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t dequant_q6_K_len = 4212; - -unsigned char dequant_q6_K_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x10,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00, @@ -12881,647 +7016,10 @@ unsigned char dequant_q6_K_fp32_data[] = { 0x06,0x01,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q6_K_fp32_len = 4296; +const uint64_t dequant_q6_K_len = 4296; unsigned char dequant_q8_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xd2,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x14,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x14,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x51,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x51,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x6b,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x6e,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x6e,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x14,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x18,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x49,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x4e,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x6b,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x2c,0x00,0x06,0x00, -0x0a,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x05,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb8,0x02,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb9,0x02,0x00,0x00,0x07,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xba,0x02,0x00,0x00, -0x08,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbb,0x02,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x0a,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x0b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xbe,0x02,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xbf,0x02,0x00,0x00,0x0d,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc0,0x02,0x00,0x00, -0x0e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc1,0x02,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x11,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc4,0x02,0x00,0x00,0x12,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc5,0x02,0x00,0x00,0x13,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc6,0x02,0x00,0x00, -0x14,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xc7,0x02,0x00,0x00,0x15,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x16,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x17,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xca,0x02,0x00,0x00,0x18,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xcb,0x02,0x00,0x00,0x19,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcc,0x02,0x00,0x00, -0x1a,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xcd,0x02,0x00,0x00,0x1b,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x1d,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xd0,0x02,0x00,0x00,0x1e,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xd1,0x02,0x00,0x00,0x1f,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x8f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0d,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x90,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x87,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0x8b,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x1b,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0xa8,0x00,0x04,0x00, -0x24,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x29,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x2c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2b,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x2f,0x00,0x00,0x00, -0xaf,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x2c,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x2c,0x00,0x00,0x00, -0xf5,0x00,0x07,0x00,0x24,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x29,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x32,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x34,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x33,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x34,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x18,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x39,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x87,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00, -0x1b,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3e,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x5d,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x18,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8a,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0xa5,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x37,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb8,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb9,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xba,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xba,0x00,0x00,0x00,0xb9,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xc3,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xc5,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xc5,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xc9,0x00,0x00,0x00, -0xc8,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0xc9,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xc6,0x00,0x00,0x00, -0xca,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0xcb,0x00,0x00,0x00,0xc3,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xd2,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xb6,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xd4,0x00,0x00,0x00,0xcc,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xd5,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xd2,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xd5,0x00,0x00,0x00, -0xd4,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xdc,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xb7,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xdd,0x00,0x00,0x00, -0xcc,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xde,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xdc,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xde,0x00,0x00,0x00,0xdd,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xe7,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xe9,0x00,0x00,0x00,0xe8,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xe9,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xed,0x00,0x00,0x00, -0xec,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0xed,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xea,0x00,0x00,0x00, -0xee,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xf0,0x00,0x00,0x00,0xef,0x00,0x00,0x00,0xe7,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf6,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xf8,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xf9,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf6,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xf9,0x00,0x00,0x00, -0xf8,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x00,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xb9,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x01,0x01,0x00,0x00, -0xf0,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x02,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x02,0x01,0x00,0x00,0x01,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x0b,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x0c,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x0d,0x01,0x00,0x00,0x0c,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x0e,0x01,0x00,0x00, -0x0d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x10,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x11,0x01,0x00,0x00, -0x10,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x12,0x01,0x00,0x00,0x11,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x13,0x01,0x00,0x00,0x0e,0x01,0x00,0x00, -0x12,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x14,0x01,0x00,0x00,0x13,0x01,0x00,0x00,0x0b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x1a,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xba,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x1c,0x01,0x00,0x00,0x14,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x1d,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x1d,0x01,0x00,0x00, -0x1c,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x24,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xbb,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x25,0x01,0x00,0x00, -0x14,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x26,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x24,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x26,0x01,0x00,0x00,0x25,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x2f,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x30,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x31,0x01,0x00,0x00,0x30,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x32,0x01,0x00,0x00, -0x31,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x34,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x35,0x01,0x00,0x00, -0x34,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x36,0x01,0x00,0x00,0x35,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x37,0x01,0x00,0x00,0x32,0x01,0x00,0x00, -0x36,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x38,0x01,0x00,0x00,0x37,0x01,0x00,0x00,0x2f,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3e,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xbc,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x40,0x01,0x00,0x00,0x38,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x41,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3e,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x41,0x01,0x00,0x00, -0x40,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xbd,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x49,0x01,0x00,0x00, -0x38,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x4a,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x48,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x4a,0x01,0x00,0x00,0x49,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x53,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x54,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x55,0x01,0x00,0x00,0x54,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x56,0x01,0x00,0x00, -0x55,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x58,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x59,0x01,0x00,0x00, -0x58,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x5a,0x01,0x00,0x00,0x59,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x5b,0x01,0x00,0x00,0x56,0x01,0x00,0x00, -0x5a,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x5c,0x01,0x00,0x00,0x5b,0x01,0x00,0x00,0x53,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x62,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xbe,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x64,0x01,0x00,0x00,0x5c,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x65,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x62,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x65,0x01,0x00,0x00, -0x64,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6c,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xbf,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x6d,0x01,0x00,0x00, -0x5c,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x6e,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6c,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6e,0x01,0x00,0x00,0x6d,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x77,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x78,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x79,0x01,0x00,0x00,0x78,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x7a,0x01,0x00,0x00, -0x79,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x7c,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x7d,0x01,0x00,0x00, -0x7c,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x7e,0x01,0x00,0x00,0x7d,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x7f,0x01,0x00,0x00,0x7a,0x01,0x00,0x00, -0x7e,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x80,0x01,0x00,0x00,0x7f,0x01,0x00,0x00,0x77,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x86,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc0,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x88,0x01,0x00,0x00,0x80,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x89,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x86,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0x89,0x01,0x00,0x00, -0x88,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x90,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc1,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x91,0x01,0x00,0x00, -0x80,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x92,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x90,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x01,0x00,0x00,0x91,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x9b,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x9c,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x9d,0x01,0x00,0x00,0x9c,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x9e,0x01,0x00,0x00, -0x9d,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xa0,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xa1,0x01,0x00,0x00, -0xa0,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xa2,0x01,0x00,0x00,0xa1,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xa3,0x01,0x00,0x00,0x9e,0x01,0x00,0x00, -0xa2,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xa4,0x01,0x00,0x00,0xa3,0x01,0x00,0x00,0x9b,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xaa,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc2,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xac,0x01,0x00,0x00,0xa4,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xad,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xaa,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xad,0x01,0x00,0x00, -0xac,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb4,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc3,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb5,0x01,0x00,0x00, -0xa4,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xb6,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb4,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb6,0x01,0x00,0x00,0xb5,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xbf,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xc0,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xc1,0x01,0x00,0x00,0xc0,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc1,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xc4,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xc5,0x01,0x00,0x00, -0xc4,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xc6,0x01,0x00,0x00,0xc5,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xc7,0x01,0x00,0x00,0xc2,0x01,0x00,0x00, -0xc6,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xc8,0x01,0x00,0x00,0xc7,0x01,0x00,0x00,0xbf,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xce,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc4,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xd0,0x01,0x00,0x00,0xc8,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xd1,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xce,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xd1,0x01,0x00,0x00, -0xd0,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xd8,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc5,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xd9,0x01,0x00,0x00, -0xc8,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xda,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xd8,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xda,0x01,0x00,0x00,0xd9,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0xe3,0x01,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0xe4,0x01,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0xe5,0x01,0x00,0x00,0xe4,0x01,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0xe6,0x01,0x00,0x00, -0xe5,0x01,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0xe8,0x01,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0xe9,0x01,0x00,0x00, -0xe8,0x01,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0xea,0x01,0x00,0x00,0xe9,0x01,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0xeb,0x01,0x00,0x00,0xe6,0x01,0x00,0x00, -0xea,0x01,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xec,0x01,0x00,0x00,0xeb,0x01,0x00,0x00,0xe3,0x01,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xf2,0x01,0x00,0x00, -0x76,0x00,0x00,0x00,0xc6,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xf4,0x01,0x00,0x00,0xec,0x01,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xf5,0x01,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xf2,0x01,0x00,0x00,0x3e,0x00,0x03,0x00,0xf5,0x01,0x00,0x00, -0xf4,0x01,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xfc,0x01,0x00,0x00,0x76,0x00,0x00,0x00,0xc7,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xfd,0x01,0x00,0x00, -0xec,0x01,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xfe,0x01,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xfc,0x01,0x00,0x00,0x3e,0x00,0x03,0x00, -0xfe,0x01,0x00,0x00,0xfd,0x01,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x07,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x08,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x09,0x02,0x00,0x00,0x08,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x0a,0x02,0x00,0x00, -0x09,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x0c,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x0d,0x02,0x00,0x00, -0x0c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x0e,0x02,0x00,0x00,0x0d,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x0f,0x02,0x00,0x00,0x0a,0x02,0x00,0x00, -0x0e,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x10,0x02,0x00,0x00,0x0f,0x02,0x00,0x00,0x07,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x16,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xc8,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x18,0x02,0x00,0x00,0x10,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x19,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x16,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x19,0x02,0x00,0x00, -0x18,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x20,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xc9,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x21,0x02,0x00,0x00, -0x10,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x22,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x20,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x22,0x02,0x00,0x00,0x21,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x2b,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x2c,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x2d,0x02,0x00,0x00,0x2c,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x2e,0x02,0x00,0x00, -0x2d,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x30,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xcb,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x31,0x02,0x00,0x00, -0x30,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x32,0x02,0x00,0x00,0x31,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x33,0x02,0x00,0x00,0x2e,0x02,0x00,0x00, -0x32,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x34,0x02,0x00,0x00,0x33,0x02,0x00,0x00,0x2b,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xca,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x3c,0x02,0x00,0x00,0x34,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x3d,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3a,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x3d,0x02,0x00,0x00, -0x3c,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xcb,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x45,0x02,0x00,0x00, -0x34,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x46,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x44,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x46,0x02,0x00,0x00,0x45,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x4f,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x50,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x51,0x02,0x00,0x00,0x50,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x52,0x02,0x00,0x00, -0x51,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x54,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x55,0x02,0x00,0x00, -0x54,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x56,0x02,0x00,0x00,0x55,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x57,0x02,0x00,0x00,0x52,0x02,0x00,0x00, -0x56,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x58,0x02,0x00,0x00,0x57,0x02,0x00,0x00,0x4f,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x5e,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xcc,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x60,0x02,0x00,0x00,0x58,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x61,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5e,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x61,0x02,0x00,0x00, -0x60,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xcd,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x69,0x02,0x00,0x00, -0x58,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x6a,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x68,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6a,0x02,0x00,0x00,0x69,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x73,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x74,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x75,0x02,0x00,0x00,0x74,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x76,0x02,0x00,0x00, -0x75,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x78,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x79,0x02,0x00,0x00, -0x78,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x7a,0x02,0x00,0x00,0x79,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x7b,0x02,0x00,0x00,0x76,0x02,0x00,0x00, -0x7a,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x7c,0x02,0x00,0x00,0x7b,0x02,0x00,0x00,0x73,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x82,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xce,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0x84,0x02,0x00,0x00,0x7c,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0x85,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x82,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0x85,0x02,0x00,0x00, -0x84,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xcf,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0x8d,0x02,0x00,0x00, -0x7c,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0x8e,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8c,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8e,0x02,0x00,0x00,0x8d,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x49,0x00,0x00,0x00,0x97,0x02,0x00,0x00,0x56,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00,0x98,0x02,0x00,0x00, -0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4c,0x00,0x00,0x00,0x99,0x02,0x00,0x00,0x98,0x02,0x00,0x00, -0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00,0x9a,0x02,0x00,0x00, -0x99,0x02,0x00,0x00,0x41,0x00,0x08,0x00,0x5d,0x00,0x00,0x00, -0x9c,0x02,0x00,0x00,0x53,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4c,0x00,0x00,0x00,0x9d,0x02,0x00,0x00, -0x9c,0x02,0x00,0x00,0x6f,0x00,0x04,0x00,0x49,0x00,0x00,0x00, -0x9e,0x02,0x00,0x00,0x9d,0x02,0x00,0x00,0x50,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x9f,0x02,0x00,0x00,0x9a,0x02,0x00,0x00, -0x9e,0x02,0x00,0x00,0x8e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0xa0,0x02,0x00,0x00,0x9f,0x02,0x00,0x00,0x97,0x02,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa6,0x02,0x00,0x00, -0x76,0x00,0x00,0x00,0xd0,0x02,0x00,0x00,0x51,0x00,0x05,0x00, -0x49,0x00,0x00,0x00,0xa8,0x02,0x00,0x00,0xa0,0x02,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x55,0x00,0x00,0x00, -0xa9,0x02,0x00,0x00,0x6e,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa6,0x02,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x02,0x00,0x00, -0xa8,0x02,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb0,0x02,0x00,0x00,0x76,0x00,0x00,0x00,0xd1,0x02,0x00,0x00, -0x51,0x00,0x05,0x00,0x49,0x00,0x00,0x00,0xb1,0x02,0x00,0x00, -0xa0,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x55,0x00,0x00,0x00,0xb2,0x02,0x00,0x00,0x6e,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xb0,0x02,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb2,0x02,0x00,0x00,0xb1,0x02,0x00,0x00,0xf9,0x00,0x02,0x00, -0x8f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t dequant_q8_0_len = 7592; - -unsigned char dequant_q8_0_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x23,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, @@ -14262,7 +7760,7 @@ unsigned char dequant_q8_0_fp32_data[] = { 0x95,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t dequant_q8_0_fp32_len = 8868; +const uint64_t dequant_q8_0_len = 8868; unsigned char diag_mask_inf_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, @@ -14530,144 +8028,6 @@ unsigned char f32_to_f16_data[] = { }; const uint64_t f32_to_f16_len = 1596; -unsigned char f32_to_f16_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x09,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x36,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x40,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0c,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x11,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x11,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x33,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x34,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x34,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x34,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x36,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x36,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x3f,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x40,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x40,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x42,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x42,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0e,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x11,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x12,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x12,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x15,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x23,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x32,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x34,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x35,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x34,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x35,0x00,0x00,0x00,0x36,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x3e,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x3f,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x40,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x41,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x40,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x41,0x00,0x00,0x00,0x42,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x4a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3e,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x40,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2c,0x00,0x06,0x00,0x0a,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0e,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x0d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x09,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0xb1,0x00,0x05,0x00, -0x23,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x29,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x27,0x00,0x00,0x00, -0x28,0x00,0x00,0x00,0x29,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x28,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2c,0x00,0x00,0x00,0xb1,0x00,0x05,0x00,0x23,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x29,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x29,0x00,0x00,0x00,0xf5,0x00,0x07,0x00,0x23,0x00,0x00,0x00, -0x2f,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0x05,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x31,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x2f,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x30,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x15,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x39,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3d,0x00,0x00,0x00,0x3b,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x15,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x46,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x4a,0x00,0x00,0x00, -0x4b,0x00,0x00,0x00,0x42,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x3e,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x4b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x32,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x4e,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x36,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3d,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x4f,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x31,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x31,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t f32_to_f16_fp32_len = 1596; - unsigned char gelu_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x4b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, @@ -14798,500 +8158,6 @@ const uint64_t gelu_f32_len = 1484; unsigned char get_rows_f16_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x77,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x75,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x76,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x58,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x72,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x75,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x75,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_f16_len = 1892; - -unsigned char get_rows_f16_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x60,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x61,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x61,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x4e,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x53,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x60,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x6c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4c,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4e,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x6c,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x75,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x78,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_f16_f32_len = 1940; - -unsigned char get_rows_f16_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x51,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x78,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_f16_f32_fp32_len = 1932; - -unsigned char get_rows_f16_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, @@ -15455,21 +8321,19 @@ unsigned char get_rows_f16_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_f16_fp32_len = 1948; +const uint64_t get_rows_f16_len = 1948; -unsigned char get_rows_q4_0_data[] = { +unsigned char get_rows_f16_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, +0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, 0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, 0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, +0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, 0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, 0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, @@ -15488,297 +8352,75 @@ unsigned char get_rows_q4_0_data[] = { 0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x8d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x00,0x48,0x00,0x00, -0x1d,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_0_len = 2356; - -unsigned char get_rows_q4_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x9a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x7b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x7b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x7b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x90,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x66,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x00,0x48,0x00,0x00, -0x1d,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x7b,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x85,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x62,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x63,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x65,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x65,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x77,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x51,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x54,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x51,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x62,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x63,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x6d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, 0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x91,0x00,0x00,0x00, +0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x78,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x79,0x00,0x00,0x00, 0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, 0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, @@ -15800,7 +8442,7 @@ unsigned char get_rows_q4_0_f32_data[] = { 0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, 0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x91,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, 0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, 0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, 0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, @@ -15813,258 +8455,41 @@ unsigned char get_rows_q4_0_f32_data[] = { 0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, 0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x66,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x83,0x00,0x05,0x00, -0x66,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x66,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x85,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x86,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x85,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8e,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x91,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x91,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_0_f32_len = 2404; +0x44,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x3a,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x47,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4c,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x00,0x00,0x4c,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x58,0x00,0x00,0x00,0x59,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x51,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x58,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x6e,0x00,0x00,0x00, +0x5b,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x6d,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x75,0x00,0x00,0x00,0x60,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x78,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x78,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -unsigned char get_rows_q4_0_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x79,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x8d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, -0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x1d,0x00,0x03,0x00, -0x79,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x67,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x67,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x1c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, -0x8a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_0_f32_fp32_len = 2356; +const uint64_t get_rows_f16_f32_len = 1932; -unsigned char get_rows_q4_0_fp32_data[] = { +unsigned char get_rows_q4_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x98,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -16264,425 +8689,11 @@ unsigned char get_rows_q4_0_fp32_data[] = { 0x8f,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00, 0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_0_fp32_len = 2372; +const uint64_t get_rows_q4_0_len = 2372; -unsigned char get_rows_q4_1_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x94,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x91,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x6a,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x7e,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x90,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x92,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x93,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x92,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x88,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x8f,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x92,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x92,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_1_len = 2408; - -unsigned char get_rows_q4_1_f32_data[] = { +unsigned char get_rows_q4_0_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x97,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x7e,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x94,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x6a,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x70,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x7e,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x89,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x93,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x95,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x96,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x6a,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x6a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x6a,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x89,0x00,0x00,0x00,0x8a,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x8a,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x8e,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x89,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0x95,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x95,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q4_1_f32_len = 2456; - -unsigned char get_rows_q4_1_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -16690,7 +8701,7 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -16714,91 +8725,92 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x79,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x7a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x7a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x7a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7c,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x8d,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x04,0x00, +0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, 0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, 0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, 0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, 0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x67,0x00,0x00,0x00, 0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x1d,0x00,0x03,0x00, +0x79,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x83,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x8c,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x93,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x94,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x8e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8f,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -16819,7 +8831,7 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -16845,49 +8857,42 @@ unsigned char get_rows_q4_1_f32_fp32_data[] = { 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, 0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, 0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x6c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00, -0x6c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x8f,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x88,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x93,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x5e,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x6f,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x67,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x67,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8e,0x00,0x05,0x00,0x67,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7f,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x1c,0x00,0x00,0x00,0x8a,0x00,0x00,0x00,0x78,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x83,0x00,0x00,0x00, +0x8b,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x88,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x8b,0x00,0x00,0x00, +0x8a,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x8e,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x8e,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, +0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_1_f32_fp32_len = 2424; +const uint64_t get_rows_q4_0_f32_len = 2356; -unsigned char get_rows_q4_1_fp32_data[] = { +unsigned char get_rows_q4_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x96,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -17093,503 +9098,11 @@ unsigned char get_rows_q4_1_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x94,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q4_1_fp32_len = 2440; +const uint64_t get_rows_q4_1_len = 2440; -unsigned char get_rows_q5_0_data[] = { +unsigned char get_rows_q4_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xa4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xa4,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa6,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb6,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x84,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x87,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x00,0x4c,0x00,0x00, -0x1d,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x55,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x96,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x87,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x9b,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x87,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0xa0,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa6,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xad,0x00,0x00,0x00,0xac,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa9,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5e,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q5_0_len = 2868; - -unsigned char get_rows_q5_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0xa4,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0xa4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0xa4,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa6,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa6,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb9,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00, -0x59,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x5b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x63,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x67,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x84,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x87,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x00,0x4c,0x00,0x00, -0x1d,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0xa4,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xae,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0xb9,0x00,0x00,0x00,0xb8,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x2c,0x00,0x05,0x00,0x87,0x00,0x00,0x00, -0xc4,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x9e,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xba,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0xbb,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xbb,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5e,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x60,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x66,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x6d,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8b,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x8c,0x00,0x00,0x00,0x8b,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x55,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x87,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x83,0x00,0x05,0x00,0x87,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0xc4,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x87,0x00,0x00,0x00,0xa2,0x00,0x00,0x00, -0xa0,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0xad,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xae,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xa6,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xaf,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xa9,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0xa2,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0xb6,0x00,0x00,0x00,0xb5,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xae,0x00,0x00,0x00,0xb7,0x00,0x00,0x00, -0xa6,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xba,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xba,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q5_0_f32_len = 2916; - -unsigned char get_rows_q5_0_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x95,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -17597,7 +9110,7 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x81,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -17617,106 +9130,95 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa3,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xb6,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x7e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, +0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, +0x7f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x81,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x81,0x00,0x00,0x00,0x21,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x92,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, +0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, +0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, 0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x84,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x88,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x00,0x00,0x80,0x41,0x1d,0x00,0x03,0x00, -0xa2,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, -0xb5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x2c,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x68,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x6c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x70,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x74,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x7e,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x7f,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x88,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0x93,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x94,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -17737,7 +9239,7 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -17758,80 +9260,54 @@ unsigned char get_rows_q5_0_f32_fp32_data[] = { 0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5e,0x00,0x00,0x00, -0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x7b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x7f,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x99,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x88,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x83,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x9b,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x88,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xab,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xad,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0xac,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xa5,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0xb7,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, +0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x68,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x69,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x6c,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x72,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x8e,0x00,0x05,0x00,0x6c,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x6c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00, +0x6c,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, +0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x88,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x84,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0x8f,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x88,0x00,0x00,0x00,0x90,0x00,0x00,0x00, +0x81,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x90,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x93,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x93,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_0_f32_fp32_len = 2868; +const uint64_t get_rows_q4_1_f32_len = 2424; -unsigned char get_rows_q5_0_fp32_data[] = { +unsigned char get_rows_q5_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xc3,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -18074,485 +9550,11 @@ unsigned char get_rows_q5_0_fp32_data[] = { 0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_0_fp32_len = 2884; +const uint64_t get_rows_q5_0_len = 2884; -unsigned char get_rows_q5_1_data[] = { +unsigned char get_rows_q5_0_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb1,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xb0,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, -0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb2,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00, -0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, -0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x75,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x77,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x7f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x7d,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0x81,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x88,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x89,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x53,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x91,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x92,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x95,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x96,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x97,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x9c,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x81,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xac,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x5c,0x00,0x00,0x00, -0xaf,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xac,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xaf,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb2,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb2,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q5_1_len = 2764; - -unsigned char get_rows_q5_1_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0xa1,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xb4,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00, -0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00, -0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x50,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x50,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00, -0x56,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x57,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x59,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x50,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x67,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x7d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7f,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x82,0x00,0x00,0x00,0x50,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x88,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x9e,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x9f,0x00,0x00,0x00,0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa9,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0xb3,0x00,0x00,0x00,0x00,0x02,0x00,0x00, -0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xb3,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0xb5,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00, -0xb6,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb6,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00,0x25,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00, -0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb5,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x33,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00,0x14,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x3f,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00, -0x41,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00, -0x5c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x5e,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x61,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x62,0x00,0x00,0x00,0x61,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x67,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x69,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0xc2,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x72,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x75,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x74,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x7f,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x5a,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x7d,0x00,0x00,0x00, -0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x87,0x00,0x00,0x00, -0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x50,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x53,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x81,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x71,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x90,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x92,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x10,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x92,0x00,0x00,0x00,0x78,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x50,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x8e,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x82,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x82,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x82,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x50,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0xa8,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa9,0x00,0x00,0x00,0xaa,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xaa,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0xae,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x50,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0xb0,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa9,0x00,0x00,0x00, -0xb2,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xae,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xb2,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb5,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb5,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, -0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q5_1_f32_len = 2812; - -unsigned char get_rows_q5_1_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0xb5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0xc2,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, 0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, 0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, @@ -18560,7 +9562,7 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, 0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, 0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, 0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, 0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, 0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, @@ -18580,101 +9582,106 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, 0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x47,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, 0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x58,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x59,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x5a,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5c,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xa2,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0xa3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0xa3,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0xa3,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa5,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0xb6,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, 0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x84,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x9e,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, -0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0xb1,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x53,0x00,0x00,0x00, +0x49,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x55,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x1c,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x1e,0x00,0x05,0x00,0x58,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x59,0x00,0x00,0x00,0x58,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x5a,0x00,0x00,0x00,0x59,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x5b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x5b,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5e,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x64,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x7b,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x84,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x88,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x00,0x00,0x80,0x41,0x1d,0x00,0x03,0x00, +0xa2,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0xa3,0x00,0x00,0x00,0xa2,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0xa4,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0xa3,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0xa4,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xac,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0xb5,0x00,0x00,0x00,0x00,0x02,0x00,0x00, +0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00,0xb6,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x2c,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0xc1,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9d,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0xb7,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb8,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb8,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -18695,7 +9702,7 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -18716,77 +9723,80 @@ unsigned char get_rows_q5_1_f32_fp32_data[] = { 0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5e,0x00,0x00,0x00, +0x5f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, -0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, -0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x63,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x69,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x77,0x00,0x00,0x00, -0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x78,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x7e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x71,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, -0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00, -0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, -0x97,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, -0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, -0x1c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, -0xa9,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, -0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0xad,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, -0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0xa8,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, -0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, -0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -}; -const uint64_t get_rows_q5_1_f32_fp32_len = 2780; +0x52,0x00,0x00,0x00,0x60,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x61,0x00,0x00,0x00, +0x60,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x67,0x00,0x00,0x00, +0x68,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x64,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0xc5,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x69,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0xc4,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x75,0x00,0x00,0x00,0xc7,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x7b,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x7d,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x7f,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x84,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x87,0x00,0x00,0x00, +0x86,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8d,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x8c,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0x8d,0x00,0x00,0x00,0x91,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x93,0x00,0x00,0x00, +0x92,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x87,0x00,0x00,0x00,0x75,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x7f,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x95,0x00,0x00,0x00,0x98,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x99,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x88,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0x93,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x83,0x00,0x05,0x00,0x88,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x9b,0x00,0x00,0x00,0xc1,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x88,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x9f,0x00,0x00,0x00, +0x61,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xa8,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xab,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xac,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0xa5,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xad,0x00,0x00,0x00,0xab,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0xb1,0x00,0x00,0x00,0xa8,0x00,0x00,0x00, +0x56,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0xb3,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0xac,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xa5,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0xb4,0x00,0x00,0x00,0xb3,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0xb7,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0xb7,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, -unsigned char get_rows_q5_1_fp32_data[] = { +}; +const uint64_t get_rows_q5_0_f32_len = 2868; + +unsigned char get_rows_q5_1_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0xb6,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -19022,61 +10032,63 @@ unsigned char get_rows_q5_1_fp32_data[] = { 0xb4,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q5_1_fp32_len = 2796; +const uint64_t get_rows_q5_1_len = 2796; -unsigned char get_rows_q8_0_data[] = { +unsigned char get_rows_q5_1_f32_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x86,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x54,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00, +0xb5,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x71,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x58,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x58,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x5a,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x9e,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x9f,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x71,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00, +0x9f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0xa1,0x00,0x00,0x00, 0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x83,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0xa1,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0xb2,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, 0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, 0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, @@ -19108,226 +10120,45 @@ unsigned char get_rows_q8_0_data[] = { 0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x84,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x85,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x85,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x4f,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x68,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x41,0x00,0x08,0x00, -0x62,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x5a,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x5a,0x00,0x00,0x00,0x81,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x81,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x84,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x84,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q8_0_len = 2232; - -unsigned char get_rows_q8_0_f32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x89,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x09,0x00,0x00,0x00, -0x11,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x51,0x11,0x00,0x00,0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00, -0x0b,0x00,0x06,0x00,0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c, -0x2e,0x73,0x74,0x64,0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00, -0x0e,0x00,0x03,0x00,0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x0f,0x00,0x0a,0x00,0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x6d,0x61,0x69,0x6e,0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00, -0x0b,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x54,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x58,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x70,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x71,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x71,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x73,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x86,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00, -0x21,0x00,0x03,0x00,0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x0d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x15,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x16,0x00,0x03,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x1e,0x00,0x06,0x00,0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x1d,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00, -0x1f,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00, -0x06,0x00,0x00,0x00,0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x2b,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x4f,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x1c,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00, -0x4f,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x55,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x56,0x00,0x00,0x00,0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x17,0x00,0x04,0x00, -0x5d,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x62,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x52,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x70,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x71,0x00,0x00,0x00, -0x70,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x72,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x71,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x72,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x7b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x16,0x00,0x03,0x00,0x52,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x15,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x08,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x1e,0x00,0x06,0x00,0x56,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x52,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x55,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x57,0x00,0x00,0x00,0x56,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x58,0x00,0x00,0x00,0x57,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x59,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x59,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x5c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x69,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x2b,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x7e,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x80,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x53,0x00,0x00,0x00,0x17,0x00,0x04,0x00, +0x84,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x0f,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x9e,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x9f,0x00,0x00,0x00, +0x9e,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0xa0,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x9f,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0xa0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0xa8,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, 0x1c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x85,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, -0x09,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0xb1,0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00, +0x09,0x00,0x00,0x00,0xb2,0x00,0x00,0x00,0xb1,0x00,0x00,0x00, 0x16,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00, 0x02,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00, 0x03,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x87,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0x88,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0xf7,0x00,0x03,0x00,0xb3,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfb,0x00,0x03,0x00,0x0c,0x00,0x00,0x00,0xb4,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0xb4,0x00,0x00,0x00,0x41,0x00,0x05,0x00, 0x0d,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, 0x0c,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, 0x0f,0x00,0x00,0x00,0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, @@ -19348,7 +10179,7 @@ unsigned char get_rows_q8_0_f32_data[] = { 0x23,0x00,0x00,0x00,0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00, 0x00,0x00,0x00,0x00,0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00, 0x26,0x00,0x00,0x00,0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0x87,0x00,0x00,0x00, +0x26,0x00,0x00,0x00,0xf9,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, 0xf8,0x00,0x02,0x00,0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00, 0x30,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, 0x2e,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, @@ -19365,247 +10196,81 @@ unsigned char get_rows_q8_0_f32_data[] = { 0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00, 0x06,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, 0x44,0x00,0x00,0x00,0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x4a,0x00,0x00,0x00,0x48,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, 0x41,0x00,0x00,0x00,0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x4d,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00, -0x5b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x06,0x00,0x00,0x00,0x4f,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, 0x45,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x4f,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x62,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x64,0x00,0x00,0x00,0x63,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x65,0x00,0x00,0x00, -0x64,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x68,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x62,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x68,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x69,0x00,0x00,0x00, -0x6f,0x00,0x04,0x00,0x4f,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x6a,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x5d,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x6b,0x00,0x00,0x00, -0x8e,0x00,0x05,0x00,0x5d,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x6c,0x00,0x00,0x00,0x5c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x4e,0x00,0x00,0x00, -0x49,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x7a,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x7b,0x00,0x00,0x00, -0x7c,0x00,0x00,0x00,0x73,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0x7c,0x00,0x00,0x00, -0x7a,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x80,0x00,0x00,0x00,0x76,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x4f,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x6f,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x7b,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0x73,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x80,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x84,0x00,0x00,0x00,0x83,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x87,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x87,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - +0x52,0x00,0x00,0x00,0x5e,0x00,0x00,0x00,0x5d,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x5f,0x00,0x00,0x00, +0x5e,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x5c,0x00,0x00,0x00, +0x62,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x63,0x00,0x00,0x00,0x62,0x00,0x00,0x00, +0x73,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x63,0x00,0x00,0x00,0x41,0x00,0x07,0x00,0x69,0x00,0x00,0x00, +0x6a,0x00,0x00,0x00,0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x12,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x6b,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0xc4,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x6e,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x6f,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x71,0x00,0x00,0x00, +0x70,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x77,0x00,0x00,0x00,0x4a,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x74,0x00,0x00,0x00,0x77,0x00,0x00,0x00, +0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x78,0x00,0x00,0x00,0x54,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x7a,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x80,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x5a,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x7e,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x81,0x00,0x00,0x00, +0x71,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x83,0x00,0x00,0x00, +0x82,0x00,0x00,0x00,0xc7,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x89,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x88,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x71,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0x8d,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x8f,0x00,0x00,0x00, +0x8e,0x00,0x00,0x00,0xc2,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x91,0x00,0x00,0x00,0x83,0x00,0x00,0x00,0x6e,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x7a,0x00,0x00,0x00,0xc5,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x91,0x00,0x00,0x00,0x94,0x00,0x00,0x00, +0x70,0x00,0x04,0x00,0x1c,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x95,0x00,0x00,0x00,0x50,0x00,0x05,0x00,0x84,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x8f,0x00,0x00,0x00,0x96,0x00,0x00,0x00, +0x8e,0x00,0x05,0x00,0x84,0x00,0x00,0x00,0x9a,0x00,0x00,0x00, +0x97,0x00,0x00,0x00,0x5f,0x00,0x00,0x00,0x50,0x00,0x05,0x00, +0x84,0x00,0x00,0x00,0x9c,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x64,0x00,0x00,0x00,0x81,0x00,0x05,0x00,0x84,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x9a,0x00,0x00,0x00,0x9c,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0xa4,0x00,0x00,0x00, +0x4f,0x00,0x00,0x00,0x4a,0x00,0x00,0x00,0x51,0x00,0x05,0x00, +0x1c,0x00,0x00,0x00,0xa7,0x00,0x00,0x00,0x9d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0xa8,0x00,0x00,0x00, +0xa9,0x00,0x00,0x00,0xa1,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0xa4,0x00,0x00,0x00,0x3e,0x00,0x03,0x00,0xa9,0x00,0x00,0x00, +0xa7,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0xad,0x00,0x00,0x00,0xa4,0x00,0x00,0x00,0x54,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0xaf,0x00,0x00,0x00, +0x9d,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0xa8,0x00,0x00,0x00,0xb0,0x00,0x00,0x00,0xa1,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0xad,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0xb0,0x00,0x00,0x00,0xaf,0x00,0x00,0x00,0xf9,0x00,0x02,0x00, +0xb3,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0xb3,0x00,0x00,0x00, +0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q8_0_f32_len = 2280; +const uint64_t get_rows_q5_1_f32_len = 2780; -unsigned char get_rows_q8_0_f32_fp32_data[] = { -0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, -0x8a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, -0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, -0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, -0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, -0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, -0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, -0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, -0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, -0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, -0x48,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x47,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x02,0x00,0x00,0x00, -0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x22,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, -0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, -0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, -0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, -0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, -0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, -0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, -0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, -0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, -0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, -0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, -0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, -0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x51,0x00,0x00,0x00, -0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00, -0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, -0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x00,0x00, -0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x55,0x00,0x00,0x00, -0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x56,0x00,0x00,0x00, -0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00, -0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, -0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x51,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, -0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, -0x1d,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x1e,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, -0x20,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x74,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x75,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, -0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, -0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, -0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, -0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x16,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, -0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, -0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, -0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, -0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x89,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, -0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, -0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, -0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, -0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, -0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, -0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, -0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, -0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, -0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, -0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, -0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, -0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, -0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, -0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, -0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, -0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, -0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, -0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00, -0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, -0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, -0x20,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, -0x52,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, -0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x66,0x00,0x00,0x00, -0x65,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, -0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, -0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, -0x6b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00, -0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, -0x1c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, -0x50,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x67,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, -0x5e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, -0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, -0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x49,0x00,0x00,0x00, -0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, -0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, -0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x76,0x00,0x00,0x00, -0x2e,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, -0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, -0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x79,0x00,0x00,0x00, -0x16,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, -0x84,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x01,0x00,0x00,0x00, -0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, -0x76,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x00,0x00, -0x3e,0x00,0x03,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, -0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, -0x88,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, - -}; -const uint64_t get_rows_q8_0_f32_fp32_len = 2280; - -unsigned char get_rows_q8_0_fp32_data[] = { +unsigned char get_rows_q8_0_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, 0x8b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, 0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, @@ -19799,7 +10464,202 @@ unsigned char get_rows_q8_0_fp32_data[] = { 0xf8,0x00,0x02,0x00,0x89,0x00,0x00,0x00,0xfd,0x00,0x01,0x00, 0x38,0x00,0x01,0x00, }; -const uint64_t get_rows_q8_0_fp32_len = 2296; +const uint64_t get_rows_q8_0_len = 2296; + +unsigned char get_rows_q8_0_f32_data[] = { +0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, +0x8a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x11,0x00,0x02,0x00, +0x01,0x00,0x00,0x00,0x11,0x00,0x02,0x00,0x51,0x11,0x00,0x00, +0x11,0x00,0x02,0x00,0x60,0x11,0x00,0x00,0x0b,0x00,0x06,0x00, +0x01,0x00,0x00,0x00,0x47,0x4c,0x53,0x4c,0x2e,0x73,0x74,0x64, +0x2e,0x34,0x35,0x30,0x00,0x00,0x00,0x00,0x0e,0x00,0x03,0x00, +0x00,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x0f,0x00,0x0a,0x00, +0x05,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x6d,0x61,0x69,0x6e, +0x00,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x2d,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x10,0x00,0x06,0x00,0x04,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x0b,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x04,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x1d,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x1d,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x2a,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x48,0x00,0x04,0x00,0x2b,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00, +0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x03,0x00,0x2b,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x2d,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x2d,0x00,0x00,0x00,0x21,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x53,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x48,0x00,0x05,0x00,0x54,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x23,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x55,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x56,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x56,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x56,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x58,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x73,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x04,0x00,0x00,0x00, +0x48,0x00,0x04,0x00,0x74,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x48,0x00,0x05,0x00,0x74,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x47,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x02,0x00,0x00,0x00, +0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00,0x22,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x47,0x00,0x04,0x00,0x76,0x00,0x00,0x00, +0x21,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x47,0x00,0x04,0x00, +0x87,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x19,0x00,0x00,0x00, +0x13,0x00,0x02,0x00,0x02,0x00,0x00,0x00,0x21,0x00,0x03,0x00, +0x03,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0x17,0x00,0x04,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x03,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0a,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x0a,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x0d,0x00,0x00,0x00, +0x01,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x15,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x12,0x00,0x00,0x00, +0x02,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x16,0x00,0x03,0x00, +0x1c,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x1e,0x00,0x06,0x00, +0x1d,0x00,0x00,0x00,0x06,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x1e,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x1d,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x1e,0x00,0x00,0x00,0x1f,0x00,0x00,0x00, +0x09,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x21,0x00,0x00,0x00,0x09,0x00,0x00,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x02,0x00,0x24,0x00,0x00,0x00,0x1d,0x00,0x03,0x00, +0x2a,0x00,0x00,0x00,0x10,0x00,0x00,0x00,0x1e,0x00,0x03,0x00, +0x2b,0x00,0x00,0x00,0x2a,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x2c,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x2b,0x00,0x00,0x00, +0x3b,0x00,0x04,0x00,0x2c,0x00,0x00,0x00,0x2d,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x2b,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x30,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x10,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x16,0x00,0x03,0x00,0x51,0x00,0x00,0x00, +0x10,0x00,0x00,0x00,0x15,0x00,0x04,0x00,0x52,0x00,0x00,0x00, +0x08,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x1c,0x00,0x04,0x00, +0x53,0x00,0x00,0x00,0x52,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x1e,0x00,0x04,0x00,0x54,0x00,0x00,0x00,0x51,0x00,0x00,0x00, +0x53,0x00,0x00,0x00,0x1d,0x00,0x03,0x00,0x55,0x00,0x00,0x00, +0x54,0x00,0x00,0x00,0x1e,0x00,0x03,0x00,0x56,0x00,0x00,0x00, +0x55,0x00,0x00,0x00,0x20,0x00,0x04,0x00,0x57,0x00,0x00,0x00, +0x0c,0x00,0x00,0x00,0x56,0x00,0x00,0x00,0x3b,0x00,0x04,0x00, +0x57,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x5a,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x51,0x00,0x00,0x00,0x17,0x00,0x04,0x00,0x5e,0x00,0x00,0x00, +0x1c,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x63,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x52,0x00,0x00,0x00, +0x1d,0x00,0x03,0x00,0x73,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x1e,0x00,0x03,0x00,0x74,0x00,0x00,0x00,0x73,0x00,0x00,0x00, +0x20,0x00,0x04,0x00,0x75,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x74,0x00,0x00,0x00,0x3b,0x00,0x04,0x00,0x75,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x20,0x00,0x04,0x00, +0x7d,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x1c,0x00,0x00,0x00, +0x2b,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x86,0x00,0x00,0x00, +0x00,0x02,0x00,0x00,0x2c,0x00,0x06,0x00,0x09,0x00,0x00,0x00, +0x87,0x00,0x00,0x00,0x86,0x00,0x00,0x00,0x16,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x36,0x00,0x05,0x00,0x02,0x00,0x00,0x00, +0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0x00, +0xf8,0x00,0x02,0x00,0x05,0x00,0x00,0x00,0xf7,0x00,0x03,0x00, +0x88,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfb,0x00,0x03,0x00, +0x0c,0x00,0x00,0x00,0x89,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x89,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x0d,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x0b,0x00,0x00,0x00,0x0c,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x0f,0x00,0x00,0x00, +0x0e,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x11,0x00,0x00,0x00,0x0f,0x00,0x00,0x00,0x84,0x00,0x05,0x00, +0x10,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x11,0x00,0x00,0x00, +0x12,0x00,0x00,0x00,0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x13,0x00,0x00,0x00,0x41,0x00,0x05,0x00, +0x0d,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x0b,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00, +0x18,0x00,0x00,0x00,0x17,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x10,0x00,0x00,0x00,0x19,0x00,0x00,0x00,0x18,0x00,0x00,0x00, +0x7c,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x1a,0x00,0x00,0x00, +0x19,0x00,0x00,0x00,0x41,0x00,0x05,0x00,0x21,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0x1f,0x00,0x00,0x00,0x20,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x06,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x22,0x00,0x00,0x00,0xae,0x00,0x05,0x00,0x24,0x00,0x00,0x00, +0x25,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0xf7,0x00,0x03,0x00,0x27,0x00,0x00,0x00,0x00,0x00,0x00,0x00, +0xfa,0x00,0x04,0x00,0x25,0x00,0x00,0x00,0x26,0x00,0x00,0x00, +0x27,0x00,0x00,0x00,0xf8,0x00,0x02,0x00,0x26,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x27,0x00,0x00,0x00,0x41,0x00,0x06,0x00,0x30,0x00,0x00,0x00, +0x31,0x00,0x00,0x00,0x2d,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x1a,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x32,0x00,0x00,0x00,0x31,0x00,0x00,0x00,0x7c,0x00,0x04,0x00, +0x06,0x00,0x00,0x00,0x33,0x00,0x00,0x00,0x32,0x00,0x00,0x00, +0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x33,0x00,0x00,0x00,0x23,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x38,0x00,0x00,0x00, +0x14,0x00,0x00,0x00,0x84,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x1a,0x00,0x00,0x00,0x23,0x00,0x00,0x00, +0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x3f,0x00,0x00,0x00,0x14,0x00,0x00,0x00,0x86,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x45,0x00,0x00,0x00,0x3a,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x89,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x3a,0x00,0x00,0x00,0x44,0x00,0x00,0x00, +0x86,0x00,0x05,0x00,0x06,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x48,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x89,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x4d,0x00,0x00,0x00,0x41,0x00,0x00,0x00, +0x44,0x00,0x00,0x00,0x82,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x4e,0x00,0x00,0x00,0x41,0x00,0x00,0x00,0x4d,0x00,0x00,0x00, +0x41,0x00,0x07,0x00,0x5a,0x00,0x00,0x00,0x5b,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x3d,0x00,0x04,0x00,0x51,0x00,0x00,0x00, +0x5c,0x00,0x00,0x00,0x5b,0x00,0x00,0x00,0x73,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x5d,0x00,0x00,0x00,0x5c,0x00,0x00,0x00, +0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x45,0x00,0x00,0x00, +0x20,0x00,0x00,0x00,0x49,0x00,0x00,0x00,0x3d,0x00,0x04,0x00, +0x52,0x00,0x00,0x00,0x65,0x00,0x00,0x00,0x64,0x00,0x00,0x00, +0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00,0x66,0x00,0x00,0x00, +0x65,0x00,0x00,0x00,0x6f,0x00,0x04,0x00,0x1c,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x66,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x6a,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x41,0x00,0x08,0x00,0x63,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x58,0x00,0x00,0x00,0x2e,0x00,0x00,0x00, +0x45,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x6a,0x00,0x00,0x00, +0x3d,0x00,0x04,0x00,0x52,0x00,0x00,0x00,0x6c,0x00,0x00,0x00, +0x6b,0x00,0x00,0x00,0x72,0x00,0x04,0x00,0x10,0x00,0x00,0x00, +0x6d,0x00,0x00,0x00,0x6c,0x00,0x00,0x00,0x6f,0x00,0x04,0x00, +0x1c,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x6d,0x00,0x00,0x00, +0x50,0x00,0x05,0x00,0x5e,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x67,0x00,0x00,0x00,0x6e,0x00,0x00,0x00,0x8e,0x00,0x05,0x00, +0x5e,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x6f,0x00,0x00,0x00, +0x5d,0x00,0x00,0x00,0x80,0x00,0x05,0x00,0x06,0x00,0x00,0x00, +0x79,0x00,0x00,0x00,0x4e,0x00,0x00,0x00,0x49,0x00,0x00,0x00, +0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00,0x7c,0x00,0x00,0x00, +0x72,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x41,0x00,0x06,0x00, +0x7d,0x00,0x00,0x00,0x7e,0x00,0x00,0x00,0x76,0x00,0x00,0x00, +0x2e,0x00,0x00,0x00,0x79,0x00,0x00,0x00,0x3e,0x00,0x03,0x00, +0x7e,0x00,0x00,0x00,0x7c,0x00,0x00,0x00,0x80,0x00,0x05,0x00, +0x06,0x00,0x00,0x00,0x82,0x00,0x00,0x00,0x79,0x00,0x00,0x00, +0x16,0x00,0x00,0x00,0x51,0x00,0x05,0x00,0x1c,0x00,0x00,0x00, +0x84,0x00,0x00,0x00,0x72,0x00,0x00,0x00,0x01,0x00,0x00,0x00, +0x41,0x00,0x06,0x00,0x7d,0x00,0x00,0x00,0x85,0x00,0x00,0x00, +0x76,0x00,0x00,0x00,0x2e,0x00,0x00,0x00,0x82,0x00,0x00,0x00, +0x3e,0x00,0x03,0x00,0x85,0x00,0x00,0x00,0x84,0x00,0x00,0x00, +0xf9,0x00,0x02,0x00,0x88,0x00,0x00,0x00,0xf8,0x00,0x02,0x00, +0x88,0x00,0x00,0x00,0xfd,0x00,0x01,0x00,0x38,0x00,0x01,0x00, + +}; +const uint64_t get_rows_q8_0_f32_len = 2280; unsigned char matmul_f16_aligned_l_data[] = { 0x03,0x02,0x23,0x07,0x00,0x05,0x01,0x00,0x0b,0x00,0x0d,0x00, diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index b1e0006bb..14fb89e09 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1,6 +1,6 @@ #include "ggml-vulkan.h" -#ifdef VK_RUN_TESTS +#ifdef GGML_VULKAN_RUN_TESTS #include #endif @@ -255,6 +255,7 @@ static size_t vk_staging_offset; static vk_buffer vk_sync_staging; static vk_context * vk_ctx; +static vk_context * vk_transfer_ctx; static bool vk_disable; @@ -264,7 +265,7 @@ size_t vk_output_tensor; #endif static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_size, const void* spv_data, const std::string& entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, std::vector&& specialization_constants, uint32_t align) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_pipeline(" << name << ", " << entrypoint << ", " << parameter_count << ", " << push_constant_size << ", (" << wg_denoms[0] << "," << wg_denoms[1] << "," << wg_denoms[2] << "), specialization_constants, " << align << ")" << std::endl; #endif GGML_ASSERT(parameter_count > 0); @@ -368,7 +369,7 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s } static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uint32_t n) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; #endif // Check if gc already contains pipeline before adding it @@ -413,14 +414,14 @@ static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uin } static void ggml_vk_pipeline_cleanup(vk_pipeline& pipeline) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pipeline_cleanup(" << pipeline.name << ")" << std::endl; #endif pipeline.descriptor_set_idx = 0; } static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_cmd_buffer()" << std::endl; #endif if (q.cmd_buffers.size() > q.cmd_buffer_idx) { @@ -442,7 +443,7 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { } static vk_submission ggml_vk_create_submission(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_submission()" << std::endl; #endif vk_submission s; @@ -453,14 +454,14 @@ static vk_submission ggml_vk_create_submission(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_sequence_1()" << std::endl; #endif return { ggml_vk_create_submission(q, std::move(wait_semaphores), std::move(signal_semaphores)) }; } static void ggml_vk_submit(vk_context * ctx, vk::Fence fence) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_submit(" << ctx->seqs.size() << ", " << fence << ")" << std::endl; #endif if (ctx->seqs.empty()) { @@ -536,7 +537,7 @@ static void ggml_vk_submit(vk_context * ctx, vk::Fence fence) { } static uint32_t ggml_vk_find_queue_family_index(std::vector& queue_family_props, const vk::QueueFlags& required, const vk::QueueFlags& avoid, int32_t compute_index, uint32_t min_num_queues) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_find_queue_family_index()" << std::endl; #endif const uint32_t qfsize = queue_family_props.size(); @@ -578,7 +579,7 @@ static uint32_t ggml_vk_find_queue_family_index(std::vector= vk_gc.tl_semaphores.size()) { @@ -642,7 +643,7 @@ static vk::Event ggml_vk_create_event() { } static void ggml_vk_queue_cleanup(vk_queue& q) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_queue_cleanup()" << std::endl; #endif // Requires command buffers to be done @@ -652,7 +653,7 @@ static void ggml_vk_queue_cleanup(vk_queue& q) { } static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_flags) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ")" << std::endl; #endif GGML_ASSERT(size > 0); @@ -743,7 +744,7 @@ static void ggml_vk_destroy_buffer(vk_buffer& buf) { if (buf.size == 0) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_destroy_buffer(" << buf.size << ")" << std::endl; #endif @@ -757,7 +758,7 @@ static vk_subbuffer ggml_vk_subbuffer(vk_buffer& buf) { } static void ggml_vk_sync_buffers(vk_context * ctx) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_sync_buffers()" << std::endl; #endif const std::vector mem_barriers{ { { vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite }, { vk::AccessFlagBits::eMemoryRead | vk::AccessFlagBits::eMemoryWrite } } }; @@ -773,7 +774,7 @@ static void ggml_vk_sync_buffers(vk_context * ctx) { } static void ggml_vk_wait_events(vk::CommandBuffer& cmd_buffer, std::vector&& events, vk::PipelineStageFlags src_stages, vk::PipelineStageFlags dst_stages) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_wait_events()" << std::endl; #endif if (events.empty()) { @@ -810,7 +811,7 @@ static bool ggml_vk_build_shader(ggml_type type) { } static void ggml_vk_load_shaders() { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_load_shaders()" << std::endl; #endif @@ -849,36 +850,6 @@ static void ggml_vk_load_shaders() { vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - // Build dequant shaders - vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); - - vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - - // get_rows - vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - - vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); } else { vk_pipeline_matmul_f32_l = ggml_vk_create_pipeline("matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); vk_pipeline_matmul_f32_m = ggml_vk_create_pipeline("matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); @@ -901,36 +872,6 @@ static void ggml_vk_load_shaders() { vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - // Build dequant shaders - vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_fp32_len, f32_to_f16_fp32_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); - - vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_fp32_len, dequant_f16_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_fp32_len, dequant_q4_0_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_fp32_len, dequant_q4_1_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_fp32_len, dequant_q5_0_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_fp32_len, dequant_q5_1_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_fp32_len, dequant_q8_0_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_fp32_len, dequant_q2_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_fp32_len, dequant_q3_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_fp32_len, dequant_q4_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_fp32_len, dequant_q5_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_fp32_len, dequant_q6_K_fp32_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - - // get_rows - vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_fp32_len, get_rows_f16_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_fp32_len, get_rows_q4_0_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_fp32_len, get_rows_q4_1_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_fp32_len, get_rows_q5_0_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_fp32_len, get_rows_q5_1_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_fp32_len, get_rows_q8_0_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - - vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_fp32_len, get_rows_f16_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_fp32_len, get_rows_q4_0_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_fp32_len, get_rows_q4_1_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_fp32_len, get_rows_q5_0_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_fp32_len, get_rows_q5_1_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_fp32_len, get_rows_q8_0_f32_fp32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); } vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); @@ -945,6 +886,36 @@ static void ggml_vk_load_shaders() { vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + // dequant shaders + vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); + + vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + + // get_rows + vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + + vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + vk_pipeline_matmul_split_k_reduce = ggml_vk_create_pipeline("split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); vk_pipeline_mul_mat_vec_p021_f16_f32 = ggml_vk_create_pipeline("mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); @@ -983,7 +954,7 @@ static void ggml_vk_load_shaders() { } void ggml_vk_init() { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_init()" << std::endl; #endif static bool initialized = false; @@ -999,17 +970,17 @@ void ggml_vk_init() { vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; const std::vector layers = { -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE "VK_LAYER_KHRONOS_validation", #endif }; const std::vector extensions = { -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", #endif }; vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; vk::ValidationFeaturesEXT validation_features = { features_enable, @@ -1120,7 +1091,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; device_extensions.push_back("VK_KHR_16bit_storage"); -#ifdef VK_VALIDATE +#ifdef GGML_VULKAN_VALIDATE device_extensions.push_back("VK_KHR_shader_non_semantic_info"); #endif @@ -1154,6 +1125,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; vk_fence = vk_device.device.createFence({}); vk_ctx = nullptr; + vk_transfer_ctx = nullptr; vk_disable = false; @@ -1166,7 +1138,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; } static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_to_fp16()" << std::endl; #endif switch (type) { @@ -1190,7 +1162,7 @@ static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { } static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_dequantize_mul_mat_vec()" << std::endl; #endif switch (type) { @@ -1219,7 +1191,7 @@ static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { static vk_buffer g_vk_buffer_pool[MAX_VK_BUFFERS]; static vk_buffer ggml_vk_pool_malloc(size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pool_malloc(" << size << ")" << std::endl; #endif int best_i = -1; @@ -1253,7 +1225,7 @@ static vk_buffer ggml_vk_pool_malloc(size_t size) { } static void ggml_vk_pool_free(vk_buffer& buffer) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pool_free(" << buffer.size << ")" << std::endl; #endif for (int i = 0; i < MAX_VK_BUFFERS; ++i) { @@ -1286,7 +1258,7 @@ static vk_buffer ggml_vk_create_buffer_temp(size_t size) { } static void * ggml_vk_host_malloc(size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_malloc(" << size << ")" << std::endl; #endif vk_buffer buf = ggml_vk_create_buffer(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); @@ -1309,7 +1281,7 @@ static void ggml_vk_host_free(void* ptr) { if (ptr == nullptr) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_free(" << ptr << ")" << std::endl; #endif vk_buffer* buf = nullptr; @@ -1363,7 +1335,7 @@ static void ggml_vk_dispatch_pipeline(vk_context * ctx, vk_pipeline& pipeline, s const uint32_t wg0 = CEIL_DIV(elements[0], pipeline.wg_denoms[0]); const uint32_t wg1 = CEIL_DIV(elements[1], pipeline.wg_denoms[1]); const uint32_t wg2 = CEIL_DIV(elements[2], pipeline.wg_denoms[2]); -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_dispatch_pipeline(" << pipeline.name << ", (" << wg0 << "," << wg1 << "," << wg2 << "))" << std::endl; #endif std::vector descriptor_buffer_infos; @@ -1398,7 +1370,7 @@ static void ggml_vk_end_submission(vk_submission& s, std::vector w } static void ggml_vk_ctx_end(vk_context * ctx) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_ctx_end(" << ctx << ", " << ctx->seqs.size() << ")" << std::endl; #endif if (ctx->s == nullptr) { @@ -1410,7 +1382,7 @@ static void ggml_vk_ctx_end(vk_context * ctx) { } static void ggml_vk_ctx_begin(vk_context * ctx) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_ctx_begin(" << ctx << ")" << std::endl; #endif if (ctx->s != nullptr) { @@ -1441,7 +1413,7 @@ static void ensure_sync_staging_buffer(size_t size) { } static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size_t offset, const ggml_tensor * tensor, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_nc_async(" << tensor << ")" << std::endl; #endif GGML_ASSERT(!ggml_is_contiguous(tensor)); @@ -1548,7 +1520,7 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size } static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d_async(" << width << ", " << height << ")" << std::endl; #endif // Buffer is already mapped @@ -1582,7 +1554,7 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size ctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "STAGING" << std::endl; #endif @@ -1619,14 +1591,14 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size } static void ggml_vk_buffer_write_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t size, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_async(" << size << ")" << std::endl; #endif return ggml_vk_buffer_write_2d_async(ctx, dst, offset, src, size, size, 1, sync_staging); } static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d(" << width << ", " << height << ")" << std::endl; #endif // Buffer is already mapped @@ -1653,14 +1625,14 @@ static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * } static void ggml_vk_buffer_write(vk_buffer* dst, size_t offset, const void * src, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write(" << size << ")" << std::endl; #endif ggml_vk_buffer_write_2d(dst, offset, src, 0, size, 1); } static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_t offset, void * dst, size_t spitch, size_t dpitch, size_t width, size_t height, bool sync_staging = false) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read_2d_async(offset=" << offset << ", width=" << width << ", height=" << height << ")" << std::endl; #endif GGML_ASSERT(width > 0); @@ -1693,7 +1665,7 @@ static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_ return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "STAGING" << std::endl; #endif @@ -1722,7 +1694,7 @@ static void ggml_vk_buffer_read_async(vk_context * ctx, vk_buffer* src, size_t o } static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read(" << offset << ", " << size << ")" << std::endl; #endif if(src->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { @@ -1746,7 +1718,7 @@ static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_ } static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy_async(" << size << ")" << std::endl; #endif VkBufferCopy bc{ src_offset, dst_offset, size }; @@ -1755,7 +1727,7 @@ static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer * dst, size_t } static void ggml_vk_buffer_copy(vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy(" << size << ")" << std::endl; #endif VkBufferCopy bc{ src_offset, dst_offset, size }; @@ -1771,7 +1743,7 @@ static void ggml_vk_buffer_copy(vk_buffer * dst, size_t dst_offset, vk_buffer * } static void ggml_vk_buffer_memset(vk_buffer* dst, size_t offset, uint32_t c, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_memset(" << offset << ", " << c << ", " << size << ")" << std::endl; #endif vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); @@ -1785,7 +1757,7 @@ static void ggml_vk_buffer_memset(vk_buffer* dst, size_t offset, uint32_t c, siz } static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_h2d_tensor_2d(dst=" << dst << ", offset=" << offset << ", src=" << src << ", i3=" << i3 << ", i2=" << i2 << ", i1=" << i1 << ")" << std::endl; #endif const uint64_t ne0 = src->ne[0]; @@ -1815,7 +1787,7 @@ static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offs } static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offset, const ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_d2h_tensor_2d()" << std::endl; #endif const uint64_t ne0 = dst->ne[0]; @@ -1841,24 +1813,24 @@ static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offs } static uint32_t ggml_vk_guess_split_k(int m, int n, int k) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_split_k(" << m << ", " << n << ", " << k << ")"; #endif if (k > 128 && (m < 128 || n < 128) && m > 2 && n > 2) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " = 4" << std::endl; #endif return 4; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " = 1" << std::endl; #endif return 1; } static uint32_t ggml_vk_guess_matmul_pipeline_align(int m, int n) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline_align(" << m << ", " << n << ")" << std::endl; #endif if (m <= 32 || n <= 32) { @@ -1871,41 +1843,41 @@ static uint32_t ggml_vk_guess_matmul_pipeline_align(int m, int n) { } static vk_pipeline* ggml_vk_guess_matmul_pipeline(bool bit16_x, bool bit16_y, int m, int n, bool aligned) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; #endif if (bit16_x && bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef VK_DEBUG + if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_aligned_s : &vk_pipeline_matmul_f16_s; } if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_aligned_m : &vk_pipeline_matmul_f16_m; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_aligned_l : &vk_pipeline_matmul_f16_l; } if (bit16_x && !bit16_y) { - if (m <= 32 || n <= 32) { -#ifdef VK_DEBUG + if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_f32_aligned_s : &vk_pipeline_matmul_f16_f32_s; } if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_f32_aligned_m : &vk_pipeline_matmul_f16_f32_m; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f16_f32_aligned_l : &vk_pipeline_matmul_f16_f32_l; @@ -1914,30 +1886,30 @@ static vk_pipeline* ggml_vk_guess_matmul_pipeline(bool bit16_x, bool bit16_y, in GGML_ASSERT(false); } - if (m <= 32 || n <= 32) { -#ifdef VK_DEBUG + if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f32_aligned_s : &vk_pipeline_matmul_f32_s; } if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f32_aligned_m : &vk_pipeline_matmul_f32_m; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif return aligned ? &vk_pipeline_matmul_f32_aligned_l : &vk_pipeline_matmul_f32_l; } static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_matmul(a: (" << a.buffer.buffer << ", " << a.offset << ", " << a.size << "), b: (" << b.buffer.buffer << ", " << b.offset << ", " << b.size << "), c: (" << d.buffer.buffer << ", " << d.offset << ", " << d.size << "), split_k: (" << split_k_buffer.buffer.buffer << ", " << split_k_buffer.offset << ", " << split_k_buffer.size << "), m: " << m << ", n: " << n << ", k: " << k << ", stride_a: " << stride_a << ", stride_b: " << stride_b << ", stride_d: " << stride_d << ", split_k: " << split_k << ", batch: " << batch << ", ne02: " << ne02 << ", ne12: " << ne12 << ", broadcast2: " << broadcast2 << ", broadcast3: " << broadcast3 << ", batch_stride_a: " << batch_stride_a << ", batch_stride_b: " << batch_stride_b << ", batch_stride_d: " << batch_stride_d << ")" << std::endl; #endif + ggml_vk_sync_buffers(ctx); if (split_k == 1) { - ggml_vk_sync_buffers(ctx); const std::array pc = { m, n, k, stride_a, stride_b, stride_d, k, ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, d }, pc.size() * sizeof(uint32_t), pc.data(), { m, n, batch }); return; @@ -1945,10 +1917,6 @@ static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer GGML_ASSERT(batch_stride_d == m * n); - // Synchronize the two submissions - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.fillBuffer(split_k_buffer.buffer.buffer, 0, split_k_buffer.size, 0); - ggml_vk_sync_buffers(ctx); const std::array pc1 = { m, n, k, stride_a, stride_b, stride_d, CEIL_DIV(k, split_k), ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; // Make sure enough workgroups get assigned for split k to work ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); @@ -1980,7 +1948,7 @@ static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_type from, ggml_type to) { } static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cpy_to_contiguous((" << tensor << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << "), "; std::cerr << "buffer in size=" << in.buffer.size << ", buffer out size=" << out.buffer.size << ")" << std::endl; #endif @@ -2002,7 +1970,7 @@ static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, } static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2186,7 +2154,7 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co } static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_vec_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2366,7 +2334,7 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 } static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_p021_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2455,7 +2423,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor } static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_nc_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; @@ -2561,7 +2529,7 @@ static bool ggml_vk_can_mul_mat(const ggml_tensor * src0, const ggml_tensor * sr } static void ggml_vk_mul_mat(vk_context * ctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat(" << src0 << ", " << src1 << ", " << dst << ")" << std::endl; #endif if (src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { @@ -2774,7 +2742,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * template static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_op_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; if (src1 != nullptr) { std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -3095,7 +3063,7 @@ static void ggml_vk_nop(vk_context * ctx, const ggml_tensor * src0, ggml_tensor } } -#ifdef VK_RUN_TESTS +#ifdef GGML_VULKAN_RUN_TESTS static void ggml_vk_print_matrix_area(const void * data, ggml_type type, int ne0, int ne1, int i0, int i1, int i2) { if (type != GGML_TYPE_F32 && type != GGML_TYPE_F16) { return; @@ -3129,7 +3097,7 @@ static void ggml_vk_print_matrix_area(const void * data, ggml_type type, int ne0 template static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size_t num_it, int split_k, int shader_size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_matmul(" << m << ", " << n << ", " << k << ", " << batch << ", " << num_it << ", " << split_k << ", " << shader_size << ")" << std::endl; #endif const size_t x_ne = m * k * batch; @@ -3520,7 +3488,7 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) } static void ggml_vk_test_transfer(size_t ne, bool pinned) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_transfer(" << ne << ")" << std::endl; #endif // Check transfers are correct @@ -3600,10 +3568,103 @@ static void ggml_vk_test_transfer(size_t ne, bool pinned) { free(y); } } + +static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_test_dequant(" << ne << ")" << std::endl; +#endif + const size_t x_sz = sizeof(float) * ne; + const size_t x_sz_f16 = sizeof(ggml_fp16_t) * ne; + const size_t qx_sz = ne * ggml_type_size(quant)/ggml_blck_size(quant); + float * x = (float *) malloc(x_sz); + void * qx = malloc(qx_sz); + vk_buffer qx_buf = ggml_vk_create_buffer_check(qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer x_buf = ggml_vk_create_buffer_check(x_sz_f16, vk::MemoryPropertyFlagBits::eDeviceLocal); + ggml_fp16_t * x_chk = (ggml_fp16_t *) malloc(x_sz_f16); + + for (size_t i = 0; i < ne; i++) { + x[i] = rand() / (float)RAND_MAX; + } + + std::vector hist_cur(1 << 4, 0); + + vk_pipeline& p = vk_pipeline_dequant[quant]; + + switch(quant) { + case GGML_TYPE_Q4_0: + ggml_quantize_q4_0(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q4_1: + ggml_quantize_q4_1(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_0: + ggml_quantize_q5_0(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_1: + ggml_quantize_q4_1(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q8_0: + ggml_quantize_q8_0(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q2_K: + ggml_quantize_q2_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q3_K: + ggml_quantize_q3_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q4_K: + ggml_quantize_q4_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q5_K: + ggml_quantize_q5_K(x, qx, ne, ne, hist_cur.data()); + break; + case GGML_TYPE_Q6_K: + ggml_quantize_q6_K(x, qx, ne, ne, hist_cur.data()); + break; + default: + GGML_ASSERT(false); + } + + ggml_vk_pipeline_allocate_descriptor_sets(p, 1); + + ggml_vk_buffer_write(&qx_buf, 0, qx, qx_sz); + + vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); + ggml_vk_ctx_begin(ctx); + const std::vector pc = { 1, (int)ne, (int)ne, (int)ne }; + ggml_vk_sync_buffers(ctx); + ggml_vk_dispatch_pipeline(ctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); + ggml_vk_ctx_end(ctx); + + auto begin = std::chrono::high_resolution_clock::now(); + + ggml_vk_submit(ctx, vk_fence); + VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); + vk_device.device.resetFences({ vk_fence }); + + auto end = std::chrono::high_resolution_clock::now(); + + double ms_dequant = std::chrono::duration_cast(end-begin).count() / 1000.0; + ggml_vk_buffer_read(&x_buf, 0, x_chk, x_sz_f16); + + double avg_err = 0.0; + for (size_t i = 0; i < ne; i++) { + avg_err += std::fabs(x[i] - ggml_fp16_to_fp32(x_chk[i])); + } + + std::cerr << "TEST DEQUANT " << ggml_type_name(quant) << " time=" << ms_dequant << "ms avg_err=" << avg_err / ne << std::endl; + + ggml_vk_destroy_buffer(x_buf); + ggml_vk_destroy_buffer(qx_buf); + + free(x); + free(qx); + free(x_chk); +} #endif static ggml_tensor_extra_gpu * ggml_vk_tensor_create_extra(ggml_tensor * tensor) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_extra(" << tensor << " (" << tensor->name << ", " << ggml_op_name(tensor->op) << "))" << std::endl; #endif ggml_tensor_extra_gpu * extra = new ggml_tensor_extra_gpu; @@ -3627,7 +3688,7 @@ static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph } void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; #endif const bool any_on_device = node->backend == GGML_BACKEND_GPU @@ -3746,15 +3807,26 @@ void ggml_vk_preallocate_buffers() { if (vk_disable) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_preallocate_buffers()" << std::endl; std::cerr << "qx_size: " << vk_prealloc_size_qx << " qy_size: " << vk_prealloc_size_qy << " x_size: " << vk_prealloc_size_x << " y_size: " << vk_prealloc_size_y << " split_k_size: " << vk_prealloc_size_split_k << std::endl; #endif -#if defined(VK_RUN_TESTS) +#if defined(GGML_VULKAN_RUN_TESTS) vk_staging = ggml_vk_create_buffer_check(100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); ggml_vk_test_transfer(8192 * 1000, false); ggml_vk_test_transfer(8192 * 1000, true); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_0); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_1); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_0); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_1); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q8_0); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q2_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q3_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_K); + ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q6_K); + const std::vector vals { 8, 8, 8, 100, 46, 576, @@ -3845,7 +3917,7 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_build_graph(" << node << ", " << ggml_op_name(node->op) << ")" << std::endl; #endif vk_semaphore_idx = 0; @@ -4068,7 +4140,7 @@ bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor) return true; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_compute_forward(" << tensor << ", name=" << tensor->name << ", op=" << ggml_op_name(tensor->op) << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << ", view_src=" << tensor->view_src << ", view_offs=" << tensor->view_offs << ")" << std::endl; #endif @@ -4111,7 +4183,7 @@ void ggml_vk_graph_cleanup() { if (vk_disable) { return; } -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_graph_cleanup()" << std::endl; #endif for (auto& buffer : vk_gc.temp_buffers) { @@ -4150,7 +4222,7 @@ void ggml_vk_graph_cleanup() { } static void ggml_vk_cleanup() { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cleanup()" << std::endl; #endif ggml_vk_destroy_buffer(vk_prealloc_x); @@ -4234,7 +4306,7 @@ GGML_CALL static void * ggml_backend_vk_buffer_get_base(ggml_backend_buffer_t bu } GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_init_tensor(" << buffer << " (" << buffer->context << "), " << tensor << ")" << std::endl; #endif ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; @@ -4254,7 +4326,7 @@ GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t b } GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_set_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -4267,7 +4339,7 @@ GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t bu } GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_get_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -4323,7 +4395,7 @@ GGML_CALL static const char * ggml_backend_vk_buffer_type_name(ggml_backend_buff } GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_type_alloc_buffer(" << size << ")" << std::endl; #endif vk_buffer dev_buffer = ggml_vk_create_buffer_device(size); @@ -4467,7 +4539,7 @@ GGML_CALL static ggml_backend_buffer_type_t ggml_backend_vk_get_default_buffer_t } GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_set_tensor_async(" << size << ")" << std::endl; #endif GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); @@ -4475,19 +4547,19 @@ GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, g ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_ctx == nullptr) { + if (vk_transfer_ctx == nullptr) { // Initialize new transfer context - vk_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_ctx); + vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); + ggml_vk_ctx_begin(vk_transfer_ctx); } - ggml_vk_buffer_write_async(vk_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + ggml_vk_buffer_write_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); UNUSED(backend); } GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_get_tensor_async(" << size << ")" << std::endl; #endif GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); @@ -4495,32 +4567,32 @@ GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, c ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_ctx == nullptr) { + if (vk_transfer_ctx == nullptr) { // Initialize new transfer context - vk_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_ctx); + vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); + ggml_vk_ctx_begin(vk_transfer_ctx); } - ggml_vk_buffer_read_async(vk_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + ggml_vk_buffer_read_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); UNUSED(backend); } GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_cpy_tensor_async()" << std::endl; #endif if ((dst->buffer->buft == ggml_backend_vk_buffer_type() || dst->buffer->buft == ggml_backend_vk_host_buffer_type()) && ggml_backend_buffer_is_vk(src->buffer)) { ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - if (vk_ctx == nullptr) { + if (vk_transfer_ctx == nullptr) { // Initialize new transfer context - vk_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_ctx); + vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); + ggml_vk_ctx_begin(vk_transfer_ctx); } - ggml_vk_buffer_copy_async(vk_ctx, &src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); + ggml_vk_buffer_copy_async(vk_transfer_ctx, &src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); return true; } @@ -4530,28 +4602,28 @@ GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, c } GGML_CALL static void ggml_backend_vk_synchronize(ggml_backend_t backend) { -#ifdef VK_DEBUG +#ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_synchronize()" << std::endl; #endif - if(vk_ctx == nullptr) { + if(vk_transfer_ctx == nullptr) { return; } - ggml_vk_ctx_end(vk_ctx); + ggml_vk_ctx_end(vk_transfer_ctx); - for (auto& cpy : vk_ctx->in_memcpys) { + for (auto& cpy : vk_transfer_ctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(vk_ctx, vk_fence); + ggml_vk_submit(vk_transfer_ctx, vk_fence); VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); vk_device.device.resetFences({ vk_fence }); - for (auto& cpy : vk_ctx->out_memcpys) { + for (auto& cpy : vk_transfer_ctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - vk_ctx = nullptr; + vk_transfer_ctx = nullptr; UNUSED(backend); } diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index 67981a751..4abb0383f 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -157,19 +157,10 @@ struct block_q6_K # Dequant functions shader_f16_dequant_func = """ -#define DEQUANT_FUNC f16vec2 v = f16vec2(data_a[ib + 0], data_a[ib + 1]); -""" -shader_f16_dequant_func_compat = """ #define DEQUANT_FUNC vec2 v = vec2(data_a[ib + 0], data_a[ib + 1]); """ shader_q4_0_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2(vui & 0xF, vui >> 4); \ -v = (v - 8.0hf)*d; -""" -shader_q4_0_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const uint vui = uint(data_a[ib].qs[iqs]); \ vec2 v = vec2(vui & 0xF, vui >> 4); \ @@ -177,13 +168,6 @@ v = (v - 8.0f)*d; """ shader_q4_1_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const float16_t m = data_a[ib].m; \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2(vui & 0xF, vui >> 4); \ -v = v*d + m; -""" -shader_q4_1_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const float m = float(data_a[ib].m); \ const uint vui = uint(data_a[ib].qs[iqs]); \ @@ -192,14 +176,6 @@ v = v*d + m; """ shader_q5_0_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const uint uint_qh = uint(data_a[ib].qh[1]) << 16 | data_a[ib].qh[0]; \ -const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y); \ -v = (v - 16.0hf) * d; -""" -shader_q5_0_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const uint uint_qh = uint(data_a[ib].qh[1]) << 16 | data_a[ib].qh[0]; \ const ivec2 qh = ivec2(((uint_qh >> iqs) << 4) & 0x10, (uint_qh >> (iqs + 12)) & 0x10); \ @@ -209,14 +185,6 @@ v = (v - 16.0f) * d; """ shader_q5_1_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -const float16_t m = data_a[ib].m; \ -const ivec2 qh = ivec2(((data_a[ib].qh >> iqs) << 4) & 0x10, (data_a[ib].qh >> (iqs + 12)) & 0x10); \ -const uint8_t vui = data_a[ib].qs[iqs]; \ -f16vec2 v = f16vec2((vui & 0xF) | qh.x, (vui >> 4) | qh.y); \ -v = v*d + m; -""" -shader_q5_1_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ const float m = float(data_a[ib].m); \ const ivec2 qh = ivec2(((data_a[ib].qh >> iqs) << 4) & 0x10, (data_a[ib].qh >> (iqs + 12)) & 0x10); \ @@ -226,11 +194,6 @@ v = v*d + m; """ shader_q8_0_dequant_func = """ -#define DEQUANT_FUNC const float16_t d = data_a[ib].d; \ -f16vec2 v = f16vec2(data_a[ib].qs[iqs], data_a[ib].qs[iqs + 1]); \ -v = v * d; -""" -shader_q8_0_dequant_func_compat = """ #define DEQUANT_FUNC const float d = float(data_a[ib].d); \ vec2 v = vec2(int(data_a[ib].qs[iqs]), int(data_a[ib].qs[iqs + 1])); \ v = v * d; @@ -2110,7 +2073,7 @@ lock = asyncio.Lock() shader_fnames = [] -async def string_to_spv(name, code, defines, fp16): +async def string_to_spv(name, code, defines, fp16=True): f = NamedTemporaryFile(mode="w", delete=False) f.write(code) f.flush() @@ -2200,64 +2163,6 @@ async def main(): tasks.append(string_to_spv("matmul_f16_f32_aligned_m", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) tasks.append(string_to_spv("matmul_f16_f32_aligned_s", "".join(stream), {"LOAD_VEC": load_vec, "A_TYPE": vec_type_f16, "B_TYPE": vec_type, "D_TYPE": "float"}, fp16)) - # Build dequant shaders - tasks.append(string_to_spv("f32_to_f16", f32_to_f16_src, {}, fp16)) - - for i in range(0, VK_NUM_TYPES): - stream.clear() - - stream.extend((dequant_head, shader_int8_ext, shader_float_type)) - - if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func_compat if not fp16 else shader_f16_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func_compat if not fp16 else shader_q4_0_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func_compat if not fp16 else shader_q4_1_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func_compat if not fp16 else shader_q5_0_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func_compat if not fp16 else shader_q5_1_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func_compat if not fp16 else shader_q8_0_dequant_func, dequant_body)) - elif i == GGML_TYPE_Q2_K: - stream.extend((shader_q2_K_defines, dequant_q2_K_body)) - elif i == GGML_TYPE_Q3_K: - stream.extend((shader_q3_K_defines, dequant_q3_K_body)) - elif i == GGML_TYPE_Q4_K: - stream.extend((shader_q4_K_defines, dequant_q4_K_body)) - elif i == GGML_TYPE_Q5_K: - stream.extend((shader_q5_K_defines, dequant_q5_K_body)) - elif i == GGML_TYPE_Q6_K: - stream.extend((shader_q6_K_defines, dequant_q6_K_body)) - else: - continue - - tasks.append(string_to_spv(f"dequant_{type_names[i]}", "".join(stream), {"D_TYPE": "float16_t"}, fp16)) - - # get_rows - for i in range(0, VK_NUM_TYPES): - stream.clear() - stream.extend((generic_head, shader_int8_ext, shader_float_type)) - - if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func_compat if not fp16 else shader_f16_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func_compat if not fp16 else shader_q4_0_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func_compat if not fp16 else shader_q4_1_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func_compat if not fp16 else shader_q5_0_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func_compat if not fp16 else shader_q5_1_dequant_func, get_rows_body)) - elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func_compat if not fp16 else shader_q8_0_dequant_func, get_rows_body)) - else: - continue - - tasks.append(string_to_spv(f"get_rows_{type_names[i]}", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float16_t"}, fp16)) - tasks.append(string_to_spv(f"get_rows_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float"}, fp16)) - # Shaders where precision is needed, so no fp16 version # mul mat vec @@ -2266,17 +2171,17 @@ async def main(): stream.extend((mul_mat_vec_head, shader_int8_ext, shader_f32)) if i == GGML_TYPE_F16: - stream.extend((shader_f16_defines, shader_f16_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_f16_defines, shader_f16_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q4_0: - stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q4_1: - stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q5_0: - stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q5_1: - stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q8_0: - stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func_compat, mul_mat_vec_body)) + stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, mul_mat_vec_body)) elif i == GGML_TYPE_Q2_K: stream.extend((shader_q2_K_defines, mul_mat_vec_q2_K_body)) elif i == GGML_TYPE_Q3_K: @@ -2290,43 +2195,101 @@ async def main(): else: continue - tasks.append(string_to_spv(f"mul_mat_vec_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float", "K_QUANTS_PER_ITERATION": K_QUANTS_PER_ITERATION}, fp16)) + tasks.append(string_to_spv(f"mul_mat_vec_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float", "K_QUANTS_PER_ITERATION": K_QUANTS_PER_ITERATION})) - tasks.append(string_to_spv("mul_mat_vec_p021_f16_f32", mul_mat_p021_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("mul_mat_vec_nc_f16_f32", mul_mat_nc_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"}, True)) + # Dequant shaders + for i in range(0, VK_NUM_TYPES): + stream.clear() + + stream.extend((dequant_head, shader_int8_ext, shader_f32)) + + if i == GGML_TYPE_F16: + stream.extend((shader_f16_defines, shader_f16_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q4_0: + stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q4_1: + stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q5_0: + stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q5_1: + stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q8_0: + stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, dequant_body)) + elif i == GGML_TYPE_Q2_K: + stream.extend((shader_q2_K_defines, dequant_q2_K_body)) + elif i == GGML_TYPE_Q3_K: + stream.extend((shader_q3_K_defines, dequant_q3_K_body)) + elif i == GGML_TYPE_Q4_K: + stream.extend((shader_q4_K_defines, dequant_q4_K_body)) + elif i == GGML_TYPE_Q5_K: + stream.extend((shader_q5_K_defines, dequant_q5_K_body)) + elif i == GGML_TYPE_Q6_K: + stream.extend((shader_q6_K_defines, dequant_q6_K_body)) + else: + continue + + tasks.append(string_to_spv(f"dequant_{type_names[i]}", "".join(stream), {"D_TYPE": "float16_t"})) + + tasks.append(string_to_spv("f32_to_f16", f32_to_f16_src, {})) + + # get_rows + for i in range(0, VK_NUM_TYPES): + stream.clear() + stream.extend((generic_head, shader_int8_ext, shader_f32)) + + if i == GGML_TYPE_F16: + stream.extend((shader_f16_defines, shader_f16_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q4_0: + stream.extend((shader_q4_0_defines, shader_q4_0_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q4_1: + stream.extend((shader_q4_1_defines, shader_q4_1_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q5_0: + stream.extend((shader_q5_0_defines, shader_q5_0_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q5_1: + stream.extend((shader_q5_1_defines, shader_q5_1_dequant_func, get_rows_body)) + elif i == GGML_TYPE_Q8_0: + stream.extend((shader_q8_0_defines, shader_q8_0_dequant_func, get_rows_body)) + else: + continue + + tasks.append(string_to_spv(f"get_rows_{type_names[i]}", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv(f"get_rows_{type_names[i]}_f32", "".join(stream), {"B_TYPE": "float", "D_TYPE": "float"})) + + tasks.append(string_to_spv("mul_mat_vec_p021_f16_f32", mul_mat_p021_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("mul_mat_vec_nc_f16_f32", mul_mat_nc_src, {"A_TYPE": "float16_t", "B_TYPE": "float", "D_TYPE": "float"})) # Norms - tasks.append(string_to_spv("norm_f32", f"{generic_head}\n{shader_f32}\n{norm_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("rms_norm_f32", f"{generic_head}\n{shader_f32}\n{rms_norm_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("norm_f32", f"{generic_head}\n{shader_f32}\n{norm_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rms_norm_f32", f"{generic_head}\n{shader_f32}\n{rms_norm_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("cpy_f32_f32", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("cpy_f32_f16", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float16_t"}, True)) - tasks.append(string_to_spv("cpy_f16_f16", f"{cpy_src}\n{cpy_f16_f16_end}", {"A_TYPE": "float16_t", "D_TYPE": "float16_t"}, True)) + tasks.append(string_to_spv("cpy_f32_f32", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("cpy_f32_f16", f"{cpy_src}\n{cpy_end}", {"A_TYPE": "float", "D_TYPE": "float16_t"})) + tasks.append(string_to_spv("cpy_f16_f16", f"{cpy_src}\n{cpy_f16_f16_end}", {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - tasks.append(string_to_spv("add_f32", f"{generic_head}\n{shader_f32}\n{add_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("add_f32", f"{generic_head}\n{shader_f32}\n{add_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("split_k_reduce", mulmat_split_k_reduce_src, {}, True)) - tasks.append(string_to_spv("mul_f32", f"{generic_head}\n{shader_f32}\n{mul_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("split_k_reduce", mulmat_split_k_reduce_src, {})) + tasks.append(string_to_spv("mul_f32", f"{generic_head}\n{shader_f32}\n{mul_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("scale_f32", f"{generic_head}\n{shader_f32}\n{scale_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("scale_f32", f"{generic_head}\n{shader_f32}\n{scale_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("sqr_f32", f"{generic_head}\n{shader_f32}\n{sqr_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("sqr_f32", f"{generic_head}\n{shader_f32}\n{sqr_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("clamp_f32", f"{generic_head}\n{shader_f32}\n{clamp_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("clamp_f32", f"{generic_head}\n{shader_f32}\n{clamp_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("gelu_f32", f"{generic_head}\n{shader_f32}\n{gelu_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("silu_f32", f"{generic_head}\n{shader_f32}\n{silu_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("relu_f32", f"{generic_head}\n{shader_f32}\n{relu_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("gelu_f32", f"{generic_head}\n{shader_f32}\n{gelu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("silu_f32", f"{generic_head}\n{shader_f32}\n{silu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("relu_f32", f"{generic_head}\n{shader_f32}\n{relu_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("diag_mask_inf_f32", f"{diag_mask_inf_head}\n{shader_f32}\n{diag_mask_inf_body}", {"A_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("diag_mask_inf_f32", f"{diag_mask_inf_head}\n{shader_f32}\n{diag_mask_inf_body}", {"A_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("soft_max_f32", f"{generic_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"}, True)) + tasks.append(string_to_spv("soft_max_f32", f"{generic_head}\n{shader_f32}\n{soft_max_body}", {"A_TYPE": "float", "B_TYPE": "float", "D_TYPE": "float"})) - tasks.append(string_to_spv("rope_f32", rope_src, {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("rope_f16", rope_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"}, True)) + tasks.append(string_to_spv("rope_f32", rope_src, {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rope_f16", rope_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"}, True)) - tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"}, True)) + tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"})) + tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) await asyncio.gather(*tasks) From 60ecf099eddfe70fec797ef6790572e452054add Mon Sep 17 00:00:00 2001 From: Martin Schwaighofer Date: Sun, 28 Jan 2024 12:59:43 +0100 Subject: [PATCH 656/859] add Vulkan support to Nix flake --- .devops/nix/package.nix | 21 +++++++++++++++++---- flake.nix | 1 + 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index a868a9a61..ad23f7dd7 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -13,18 +13,22 @@ cudaPackages, darwin, rocmPackages, + vulkan-headers, + vulkan-loader, clblast, useBlas ? builtins.all (x: !x) [ useCuda useMetalKit useOpenCL useRocm + useVulkan ], useCuda ? config.cudaSupport, useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin && !useOpenCL, useMpi ? false, # Increases the runtime closure size by ~700M useOpenCL ? false, useRocm ? config.rocmSupport, + useVulkan ? false, llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake }@inputs: @@ -48,7 +52,8 @@ let ++ lib.optionals useMetalKit [ "MetalKit" ] ++ lib.optionals useMpi [ "MPI" ] ++ lib.optionals useOpenCL [ "OpenCL" ] - ++ lib.optionals useRocm [ "ROCm" ]; + ++ lib.optionals useRocm [ "ROCm" ] + ++ lib.optionals useVulkan [ "Vulkan" ]; pnameSuffix = strings.optionalString (suffices != [ ]) @@ -108,6 +113,11 @@ let hipblas rocblas ]; + + vulkanBuildInputs = [ + vulkan-headers + vulkan-loader + ]; in effectiveStdenv.mkDerivation ( @@ -164,7 +174,8 @@ effectiveStdenv.mkDerivation ( ++ optionals useCuda cudaBuildInputs ++ optionals useMpi [ mpi ] ++ optionals useOpenCL [ clblast ] - ++ optionals useRocm rocmBuildInputs; + ++ optionals useRocm rocmBuildInputs + ++ optionals useVulkan vulkanBuildInputs; cmakeFlags = [ @@ -178,6 +189,7 @@ effectiveStdenv.mkDerivation ( (cmakeBool "LLAMA_HIPBLAS" useRocm) (cmakeBool "LLAMA_METAL" useMetalKit) (cmakeBool "LLAMA_MPI" useMpi) + (cmakeBool "LLAMA_VULKAN" useVulkan) ] ++ optionals useCuda [ ( @@ -218,6 +230,7 @@ effectiveStdenv.mkDerivation ( useMpi useOpenCL useRocm + useVulkan ; shell = mkShell { @@ -242,11 +255,11 @@ effectiveStdenv.mkDerivation ( # Configurations we don't want even the CI to evaluate. Results in the # "unsupported platform" messages. This is mostly a no-op, because # cudaPackages would've refused to evaluate anyway. - badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin; + badPlatforms = optionals (useCuda || useOpenCL || useVulkan) lib.platforms.darwin; # Configurations that are known to result in build failures. Can be # overridden by importing Nixpkgs with `allowBroken = true`. - broken = (useMetalKit && !effectiveStdenv.isDarwin); + broken = (useMetalKit && !effectiveStdenv.isDarwin) || (useVulkan && effectiveStdenv.isDarwin); description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}"; homepage = "https://github.com/ggerganov/llama.cpp/"; diff --git a/flake.nix b/flake.nix index a776ba024..ad2f9b295 100644 --- a/flake.nix +++ b/flake.nix @@ -157,6 +157,7 @@ mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; + vulkan = config.packages.default.override { useVulkan = true; }; } // lib.optionalAttrs (system == "x86_64-linux") { rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; From 3cc5ed353c07201d8d5b98b0a4713ab633da6d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 3 Feb 2024 20:14:59 +0100 Subject: [PATCH 657/859] make: fix nvcc optimization flags for host code (#5309) --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index a55d15888..40b16e0ea 100644 --- a/Makefile +++ b/Makefile @@ -109,6 +109,7 @@ MK_NVCCFLAGS += -O3 else MK_CFLAGS += -O3 MK_CXXFLAGS += -O3 +MK_NVCCFLAGS += -O3 endif # clock_gettime came in POSIX.1b (1993) @@ -365,7 +366,7 @@ ifdef LLAMA_CUBLAS MK_CPPFLAGS += -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I$(CUDA_PATH)/targets/x86_64-linux/include -I/usr/local/cuda/targets/aarch64-linux/include MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o - MK_NVCCFLAGS = -use_fast_math + MK_NVCCFLAGS += -use_fast_math ifndef JETSON_EOL_MODULE_DETECT MK_NVCCFLAGS += --forward-unknown-to-host-compiler endif # JETSON_EOL_MODULE_DETECT From 3c0d25c4756742ebf15ad44700fabc0700c638bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sat, 3 Feb 2024 20:15:13 +0100 Subject: [PATCH 658/859] make: add nvcc info print (#5310) --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 40b16e0ea..21d5e15ba 100644 --- a/Makefile +++ b/Makefile @@ -553,8 +553,11 @@ $(info I CFLAGS: $(CFLAGS)) $(info I CXXFLAGS: $(CXXFLAGS)) $(info I NVCCFLAGS: $(NVCCFLAGS)) $(info I LDFLAGS: $(LDFLAGS)) -$(info I CC: $(shell $(CC) --version | head -n 1)) -$(info I CXX: $(shell $(CXX) --version | head -n 1)) +$(info I CC: $(shell $(CC) --version | head -n 1)) +$(info I CXX: $(shell $(CXX) --version | head -n 1)) +ifdef LLAMA_CUBLAS +$(info I NVCC: $(shell $(NVCC) --version | tail -n 1)) +endif # LLAMA_CUBLAS $(info ) # From 277fad30c60ef3559dc2d01b19d05e659d40a824 Mon Sep 17 00:00:00 2001 From: Welby Seely Date: Sat, 3 Feb 2024 23:18:51 -0500 Subject: [PATCH 659/859] cmake : use set() for LLAMA_WIN_VER (#5298) option() is specifically for booleans. Fixes #5158 --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index c156c4824..8c04e4c19 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -79,7 +79,7 @@ if (NOT MSVC) endif() if (WIN32) - option(LLAMA_WIN_VER "llama: Windows Version" 0x602) + set(LLAMA_WIN_VER "0x602" CACHE STRING "llama: Windows Version") endif() # 3rd party libs From 5ed26e1fc9fab4ce96ecf2d84183fe45bdcab0d4 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 4 Feb 2024 10:39:58 +0200 Subject: [PATCH 660/859] Adding some imatrix tools (#5302) * imatrix: adding --combine and --continue-from * imatrix: be able to start from a specific chunk --------- Co-authored-by: Iwan Kawrakow --- examples/imatrix/imatrix.cpp | 116 +++++++++++++++++++++++++++++++++-- 1 file changed, 112 insertions(+), 4 deletions(-) diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp index ea06fcdbf..bc9f6fa68 100644 --- a/examples/imatrix/imatrix.cpp +++ b/examples/imatrix/imatrix.cpp @@ -36,6 +36,8 @@ public: void set_parameters(StatParams&& params) { m_params = std::move(params); } bool collect_imatrix(struct ggml_tensor * t, bool ask, void * user_data); void save_imatrix() const; + bool load_imatrix(const char * file_name, bool add); + static bool load_imatrix(const char * file_name, std::unordered_map& imatrix); private: std::unordered_map m_stats; StatParams m_params; @@ -189,6 +191,57 @@ void IMatrixCollector::save_imatrix(const char * fname) const { } } +bool IMatrixCollector::load_imatrix(const char * imatrix_file, std::unordered_map& imatrix_data) { + std::ifstream in(imatrix_file, std::ios::binary); + if (!in) { + printf("%s: failed to open %s\n",__func__,imatrix_file); + return false; + } + int n_entries; + in.read((char*)&n_entries, sizeof(n_entries)); + if (in.fail() || n_entries < 1) { + printf("%s: no data in file %s\n", __func__, imatrix_file); + return false; + } + for (int i = 0; i < n_entries; ++i) { + int len; in.read((char *)&len, sizeof(len)); + std::vector name_as_vec(len+1); + in.read((char *)name_as_vec.data(), len); + if (in.fail()) { + printf("%s: failed reading name for entry %d from %s\n",__func__,i+1,imatrix_file); + return false; + } + name_as_vec[len] = 0; + std::string name{name_as_vec.data()}; + auto& e = imatrix_data[std::move(name)]; + int ncall; + in.read((char*)&ncall, sizeof(ncall)); + int nval; + in.read((char *)&nval, sizeof(nval)); + if (in.fail() || nval < 1) { + printf("%s: failed reading number of values for entry %d\n",__func__,i); + imatrix_data = {}; + return false; + } + e.values.resize(nval); + in.read((char*)e.values.data(), nval*sizeof(float)); + if (in.fail()) { + printf("%s: failed reading data for entry %d\n",__func__,i); + imatrix_data = {}; + return false; + } + e.ncall = ncall; + } + return true; +} + +bool IMatrixCollector::load_imatrix(const char * file_name, bool add) { + if (!add) { + m_stats.clear(); + } + return load_imatrix(file_name, m_stats); +} + static IMatrixCollector g_collector; static bool ik_collect_imatrix(struct ggml_tensor * t, bool ask, void * user_data) { @@ -269,7 +322,7 @@ static void process_logits( } } -static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool compute_ppl) { +static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool compute_ppl, int from_chunk) { const bool add_bos = llama_should_add_bos_token(llama_get_model(ctx)); const int n_ctx = llama_n_ctx(ctx); @@ -282,6 +335,15 @@ static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool auto tim2 = std::chrono::high_resolution_clock::now(); fprintf(stderr, "%s: tokenization took %g ms\n",__func__,1e-3*std::chrono::duration_cast(tim2-tim1).count()); + if (from_chunk > 0) { + if (size_t((from_chunk + 2)*n_ctx) >= tokens.size()) { + fprintf(stderr, "%s: there will be not enough tokens left after removing %d chunks\n", __func__, from_chunk); + return false; + } + fprintf(stderr, "%s: removing initial %d chunks (%d tokens)\n", __func__, from_chunk, from_chunk*n_ctx); + tokens.erase(tokens.begin(), tokens.begin() + from_chunk*n_ctx); + } + if (int(tokens.size()) < 2*n_ctx) { fprintf(stderr, "%s: you need at least %d tokens for a context of %d tokens\n",__func__,2*n_ctx, n_ctx); @@ -402,7 +464,10 @@ static bool compute_imatrix(llama_context * ctx, const gpt_params & params, bool int main(int argc, char ** argv) { StatParams sparams; + std::string prev_result_file; + std::string combine_files; bool compute_ppl = true; + int from_chunk = 0; std::vector args; args.push_back(argv[0]); int iarg = 1; @@ -423,6 +488,13 @@ int main(int argc, char ** argv) { compute_ppl = false; } else if (arg == "--keep-imatrix") { sparams.keep_every = std::stoi(argv[++iarg]); + } else if (arg == "--continue-from") { + prev_result_file = argv[++iarg]; + } else if (arg == "--combine") { + combine_files = argv[++iarg]; + } + else if (arg == "--from-chunk") { + from_chunk = std::stoi(argv[++iarg]); } else { args.push_back(argv[iarg]); } @@ -436,14 +508,50 @@ int main(int argc, char ** argv) { } } + g_collector.set_parameters(std::move(sparams)); + + if (!combine_files.empty()) { + std::vector files; + size_t pos = 0; + while (true) { + auto new_pos = combine_files.find(',', pos); + if (new_pos != std::string::npos) { + files.emplace_back(combine_files.substr(pos, new_pos - pos)); + pos = new_pos + 1; + } else { + files.emplace_back(combine_files.substr(pos)); + break; + } + } + if (files.size() < 2) { + fprintf(stderr, "You must provide at least two comma separated files to use --combine\n"); + return 1; + } + printf("Combining the following %d files\n", int(files.size())); + for (auto& file : files) { + printf(" %s\n", file.c_str()); + if (!g_collector.load_imatrix(file.c_str(), true)) { + fprintf(stderr, "Failed to load %s\n", file.c_str()); + return 1; + } + } + g_collector.save_imatrix(); + return 0; + } + + if (!prev_result_file.empty()) { + if (!g_collector.load_imatrix(prev_result_file.c_str(), false)) { + fprintf(stderr, "=============== Failed to load %s\n", prev_result_file.c_str()); + return 1; + } + } + gpt_params params; params.n_batch = 512; if (!gpt_params_parse(args.size(), args.data(), params)) { return 1; } - g_collector.set_parameters(std::move(sparams)); - params.logits_all = true; params.n_batch = std::min(params.n_batch, params.n_ctx); @@ -495,7 +603,7 @@ int main(int argc, char ** argv) { fprintf(stderr, "%s\n", get_system_info(params).c_str()); } - bool OK = compute_imatrix(ctx, params, compute_ppl); + bool OK = compute_imatrix(ctx, params, compute_ppl, from_chunk); if (!OK) { return 1; } From 9392ebd49ea5ae236a55b47cbf6a13247e8a3b8c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 4 Feb 2024 00:17:24 +0000 Subject: [PATCH 661/859] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'flake-parts': 'github:hercules-ci/flake-parts/07f6395285469419cf9d078f59b5b49993198c00' (2024-01-11) → 'github:hercules-ci/flake-parts/b253292d9c0a5ead9bc98c4e9a26c6312e27d69f' (2024-02-01) • Updated input 'flake-parts/nixpkgs-lib': 'github:NixOS/nixpkgs/b0d36bd0a420ecee3bc916c91886caca87c894e9?dir=lib' (2023-12-30) → 'github:NixOS/nixpkgs/97b17f32362e475016f942bbdfda4a4a72a8a652?dir=lib' (2024-01-29) • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/ae5c332cbb5827f6b1f02572496b141021de335f' (2024-01-25) → 'github:NixOS/nixpkgs/b8b232ae7b8b144397fdb12d20f592e5e7c1a64d' (2024-01-31) --- flake.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/flake.lock b/flake.lock index 95e41f333..8cfc78273 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1704982712, - "narHash": "sha256-2Ptt+9h8dczgle2Oo6z5ni5rt/uLMG47UFTR1ry/wgg=", + "lastModified": 1706830856, + "narHash": "sha256-a0NYyp+h9hlb7ddVz4LUn1vT/PLwqfrWYcHMvFB1xYg=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "07f6395285469419cf9d078f59b5b49993198c00", + "rev": "b253292d9c0a5ead9bc98c4e9a26c6312e27d69f", "type": "github" }, "original": { @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1706191920, - "narHash": "sha256-eLihrZAPZX0R6RyM5fYAWeKVNuQPYjAkCUBr+JNvtdE=", + "lastModified": 1706732774, + "narHash": "sha256-hqJlyJk4MRpcItGYMF+3uHe8HvxNETWvlGtLuVpqLU0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "ae5c332cbb5827f6b1f02572496b141021de335f", + "rev": "b8b232ae7b8b144397fdb12d20f592e5e7c1a64d", "type": "github" }, "original": { @@ -37,11 +37,11 @@ "nixpkgs-lib": { "locked": { "dir": "lib", - "lastModified": 1703961334, - "narHash": "sha256-M1mV/Cq+pgjk0rt6VxoyyD+O8cOUiai8t9Q6Yyq4noY=", + "lastModified": 1706550542, + "narHash": "sha256-UcsnCG6wx++23yeER4Hg18CXWbgNpqNXcHIo5/1Y+hc=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "b0d36bd0a420ecee3bc916c91886caca87c894e9", + "rev": "97b17f32362e475016f942bbdfda4a4a72a8a652", "type": "github" }, "original": { From 4833ac209da6a427de64f97e8f403dcdc5de6bc3 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Mon, 5 Feb 2024 07:08:24 +0000 Subject: [PATCH 662/859] [SYCL] Fix cpy with dims of 3 (#5289) * Fix cpy with dims of 3 * rm asserts --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 194 +++++++++++++++++++++++++++++--------------------- 1 file changed, 114 insertions(+), 80 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index 51445b5e7..a03df4c65 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -7693,6 +7693,13 @@ static void cpy_1_f16_f16(const char * cxi, char * cdsti) { *dsti = *xi; } +static void cpy_1_f16_f32(const char * cxi, char * cdsti) { + const sycl::half *xi = (const sycl::half *)cxi; + float *dsti = (float *)cdsti; + + *dsti = *xi; +} + static void cpy_1_i16_i16(const char * cxi, char * cdsti) { const int16_t *xi = (const int16_t *)cxi; int16_t *dsti = (int16_t *)cdsti; @@ -7709,9 +7716,9 @@ static void cpy_1_i32_i32(const char * cxi, char * cdsti) { template static void cpy_f32_f16(const char * cx, char * cdst, const int ne, - const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, - const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, - const sycl::nd_item<3> &item_ct1) { + const int ne00, const int ne01, const int ne02, const int nb00, const int nb01, const int nb02, + const int nb03, const int ne10, const int ne11, const int ne12, const int nb10, const int nb11, + const int nb12, const int nb13, const sycl::nd_item<3> &item_ct1) { const int i = item_ct1.get_local_range(2) * item_ct1.get_group(2) + item_ct1.get_local_id(2); @@ -7721,15 +7728,17 @@ static void cpy_f32_f16(const char * cx, char * cdst, const int ne, // determine indices i02/i12, i01/i11, i00/i10 as a function of index i of flattened tensor // then combine those indices with the corresponding byte offsets to get the total offsets - const int i02 = i / (ne00*ne01); - const int i01 = (i - i02*ne01*ne00) / ne00; - const int i00 = i - i02*ne01*ne00 - i01*ne00; - const int x_offset = i00*nb00 + i01*nb01 + i02*nb02; + const int i03 = i/(ne00 * ne01 * ne02); + const int i02 = (i - i03*ne00*ne01*ne02 )/ (ne00*ne01); + const int i01 = (i - i03*ne00*ne01*ne02 - i02*ne01*ne00) / ne00; + const int i00 = i - i03*ne00*ne01*ne02 - i02*ne01*ne00 - i01*ne00; + const int x_offset = i00*nb00 + i01*nb01 + i02*nb02 + i03 * nb03; - const int i12 = i / (ne10*ne11); - const int i11 = (i - i12*ne10*ne11) / ne10; - const int i10 = i - i12*ne10*ne11 - i11*ne10; - const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12; + const int i13 = i/(ne10 * ne11 * ne12); + const int i12 = (i - i13*ne10*ne11*ne12) / (ne10*ne11); + const int i11 = (i - i13*ne10*ne11*ne12 - i12*ne10*ne11) / ne10; + const int i10 = i - i13*ne10*ne11*ne12 - i12*ne10*ne11 - i11*ne10; + const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12 + i13 * nb13; cpy_1(cx + x_offset, cdst + dst_offset); } @@ -7823,9 +7832,9 @@ static void cpy_blck_f32_q4_1(const char * cxi, char * cdsti) { template static void cpy_f32_q(const char * cx, char * cdst, const int ne, - const int ne00, const int ne01, const int nb00, const int nb01, const int nb02, - const int ne10, const int ne11, const int nb10, const int nb11, const int nb12, - const sycl::nd_item<3> &item_ct1) { + const int ne00, const int ne01, const int ne02, const int nb00, const int nb01, const int nb02, + const int nb03, const int ne10, const int ne11, const int ne12, const int nb10, const int nb11, + const int nb12, const int nb13, const sycl::nd_item<3> &item_ct1) { const int i = (item_ct1.get_local_range(2) * item_ct1.get_group(2) + item_ct1.get_local_id(2)) * qk; @@ -7834,15 +7843,17 @@ static void cpy_f32_q(const char * cx, char * cdst, const int ne, return; } - const int i02 = i / (ne00*ne01); - const int i01 = (i - i02*ne01*ne00) / ne00; - const int i00 = (i - i02*ne01*ne00 - i01*ne00); - const int x_offset = i00*nb00 + i01*nb01 + i02*nb02; + const int i03 = i/(ne00 * ne01 * ne02); + const int i02 = (i - i03*ne00*ne01*ne02 )/ (ne00*ne01); + const int i01 = (i - i03*ne00*ne01*ne02 - i02*ne01*ne00) / ne00; + const int i00 = i - i03*ne00*ne01*ne02 - i02*ne01*ne00 - i01*ne00; + const int x_offset = i00*nb00 + i01*nb01 + i02*nb02 + i03 * nb03; - const int i12 = i / (ne10*ne11); - const int i11 = (i - i12*ne10*ne11) / ne10; - const int i10 = (i - i12*ne10*ne11 - i11*ne10)/qk; - const int dst_offset = i10*nb10 + i11*nb11 + i12*nb12; + const int i13 = i/(ne10 * ne11 * ne12); + const int i12 = (i - i13*ne10*ne11*ne12) / (ne10*ne11); + const int i11 = (i - i13*ne10*ne11*ne12 - i12*ne10*ne11) / ne10; + const int i10 = i - i13*ne10*ne11*ne12 - i12*ne10*ne11 - i11*ne10; + const int dst_offset = (i10/qk)*nb10 + i11*nb11 + i12*nb12 + i13*nb13; cpy_blck(cx + x_offset, cdst + dst_offset); } @@ -10599,10 +10610,12 @@ static void ggml_mul_mat_vec_nc_f16_f32_sycl( static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10615,8 +10628,8 @@ static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10624,10 +10637,12 @@ static void ggml_cpy_f32_f32_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_f32_f16_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10640,8 +10655,8 @@ static void ggml_cpy_f32_f16_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10649,10 +10664,12 @@ static void ggml_cpy_f32_f16_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_f32_q8_0_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { GGML_ASSERT(ne % QK8_0 == 0); @@ -10661,17 +10678,20 @@ static void ggml_cpy_f32_q8_0_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, 1)), [=](sycl::nd_item<3> item_ct1) { cpy_f32_q( - cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, item_ct1); + cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, + item_ct1); }); } static void ggml_cpy_f32_q4_0_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { GGML_ASSERT(ne % QK4_0 == 0); @@ -10680,17 +10700,20 @@ static void ggml_cpy_f32_q4_0_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, 1)), [=](sycl::nd_item<3> item_ct1) { cpy_f32_q( - cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, item_ct1); + cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, + item_ct1); }); } static void ggml_cpy_f32_q4_1_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { GGML_ASSERT(ne % QK4_1 == 0); @@ -10699,17 +10722,20 @@ static void ggml_cpy_f32_q4_1_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, 1)), [=](sycl::nd_item<3> item_ct1) { cpy_f32_q( - cx, cdst, ne, ne00, ne01, nb00, nb01, nb02, - ne10, ne11, nb10, nb11, nb12, item_ct1); + cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, + item_ct1); }); } static void ggml_cpy_f16_f16_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10722,8 +10748,8 @@ static void ggml_cpy_f16_f16_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10731,10 +10757,12 @@ static void ggml_cpy_f16_f16_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_i16_i16_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10747,8 +10775,8 @@ static void ggml_cpy_i16_i16_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -10756,10 +10784,12 @@ static void ggml_cpy_i16_i16_sycl(const char *cx, char *cdst, const int ne, static void ggml_cpy_i32_i32_sycl(const char *cx, char *cdst, const int ne, const int ne00, const int ne01, - const int nb00, const int nb01, - const int nb02, const int ne10, - const int ne11, const int nb10, - const int nb11, const int nb12, + const int ne02, const int nb00, + const int nb01, const int nb02, + const int nb03, const int ne10, + const int ne11, const int ne12, + const int nb10, const int nb11, + const int nb12, const int nb13, dpct::queue_ptr stream) { const int num_blocks = (ne + SYCL_CPY_BLOCK_SIZE - 1) / SYCL_CPY_BLOCK_SIZE; @@ -10772,8 +10802,8 @@ static void ggml_cpy_i32_i32_sycl(const char *cx, char *cdst, const int ne, sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE), sycl::range<3>(1, 1, SYCL_CPY_BLOCK_SIZE)), [=](sycl::nd_item<3> item_ct1) { - cpy_f32_f16(cx, cdst, ne, ne00, ne01, nb00, nb01, - nb02, ne10, ne11, nb10, nb11, nb12, + cpy_f32_f16(cx, cdst, ne, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, item_ct1); }); } @@ -13910,19 +13940,23 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, const int64_t ne00 = src0->ne[0]; const int64_t ne01 = src0->ne[1]; - GGML_ASSERT(src0->ne[3] == 1); + const int64_t ne02 = src0->ne[2]; + const int64_t nb00 = src0->nb[0]; const int64_t nb01 = src0->nb[1]; const int64_t nb02 = src0->nb[2]; + const int64_t nb03 = src0->nb[3]; const int64_t ne10 = src1->ne[0]; const int64_t ne11 = src1->ne[1]; - GGML_ASSERT(src1->ne[3] == 1); + const int64_t ne12 = src1->ne[2]; + const int64_t nb10 = src1->nb[0]; const int64_t nb11 = src1->nb[1]; const int64_t nb12 = src1->nb[2]; + const int64_t nb13 = src1->nb[3]; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; @@ -13934,21 +13968,21 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, char * src1_ddc = (char *) src1_extra->data_device[g_main_device_index]; if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32) { - ggml_cpy_f32_f32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_f32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F16) { - ggml_cpy_f32_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q8_0) { - ggml_cpy_f32_q8_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_q8_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_0) { - ggml_cpy_f32_q4_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_q4_0_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_Q4_1) { - ggml_cpy_f32_q4_1_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f32_q4_1_sycl(src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_F16 && src1->type == GGML_TYPE_F16) { - ggml_cpy_f16_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_f16_f16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_I16 && src1->type == GGML_TYPE_I16) { - ggml_cpy_i16_i16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_i16_i16_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else if (src0->type == GGML_TYPE_I32 && src1->type == GGML_TYPE_I32) { - ggml_cpy_i32_i32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, nb00, nb01, nb02, ne10, ne11, nb10, nb11, nb12, main_stream); + ggml_cpy_i32_i32_sycl (src0_ddc, src1_ddc, ne, ne00, ne01, ne02, nb00, nb01, nb02, nb03, ne10, ne11, ne12, nb10, nb11, nb12, nb13, main_stream); } else { fprintf(stderr, "%s: unsupported type combination (%s to %s)\n", __func__, ggml_type_name(src0->type), ggml_type_name(src1->type)); From 5d55b0cd827bb0fcfedfa329a82bd5d6ef2c93ca Mon Sep 17 00:00:00 2001 From: chiranko <96988916+chiranko@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:41:38 +0800 Subject: [PATCH 663/859] readme : add CodeShell models to the supported models list (#5330) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 4a9bdf314..a6fe34629 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,7 @@ as the main playground for developing new features for the [ggml](https://github - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) - [x] [GPT-2](https://huggingface.co/gpt2) +- [x] [CodeShell](https://github.com/WisdomShell/codeshell) **Multimodal models:** From 4be04c8965578edc09194fab769b4b922b8444f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=9D=D0=B8=D1=8F=D0=B7=20=D0=93=D0=B0=D1=80=D0=B8=D1=84?= =?UTF-8?q?=D0=B7=D1=8F=D0=BD=D0=BE=D0=B2?= <112617865+garrnizon@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:43:57 +0300 Subject: [PATCH 664/859] scripts : add non-interactive server-llm.sh (#5303) * Update server-llm.sh Add flag --non-interactive that allows run script without asking a permission * Update scripts/server-llm.sh --------- Co-authored-by: Georgi Gerganov --- scripts/server-llm.sh | 73 ++++++++++++++++++++++++------------------- 1 file changed, 40 insertions(+), 33 deletions(-) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index 0b83cdbbc..062b70496 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -47,6 +47,7 @@ if ! command -v make &> /dev/null; then fi # parse arguments +is_interactive=1 port=8888 repo="" wtype="" @@ -66,15 +67,16 @@ verbose=0 function print_usage { printf "Usage:\n" - printf " ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" - printf " --port: port number, default is 8888\n" - printf " --repo: path to a repo containing GGUF model files\n" - printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" - printf " --backend: cpu, cuda, metal, opencl, depends on the OS\n" - printf " --gpu-id: gpu id, default is 0\n" - printf " --n-parallel: number of parallel requests, default is 8\n" - printf " --n-kv: KV cache size, default is 4096\n" - printf " --verbose: verbose output\n\n" + printf " ./server-llm.sh [-interactive] [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" + printf " --non-interactive: run without asking a permision to run\n" + printf " --port: port number, default is 8888\n" + printf " --repo: path to a repo containing GGUF model files\n" + printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" + printf " --backend: cpu, cuda, metal, opencl, depends on the OS\n" + printf " --gpu-id: gpu id, default is 0\n" + printf " --n-parallel: number of parallel requests, default is 8\n" + printf " --n-kv: KV cache size, default is 4096\n" + printf " --verbose: verbose output\n\n" printf "Example:\n\n" printf ' bash -c "$(curl -s https://ggml.ai/server-llm.sh)"\n\n' } @@ -82,6 +84,10 @@ function print_usage { while [[ $# -gt 0 ]]; do key="$1" case $key in + --non-interactive) + is_interactive=0 + shift + ;; --port) port="$2" shift @@ -176,31 +182,32 @@ repos=( "https://huggingface.co/TheBloke/OpenHermes-2-Mistral-7B-GGUF" "https://huggingface.co/TheBloke/CausalLM-7B-GGUF" ) +if [ $is_interactive -eq 1 ]; then + printf "\n" + printf "[I] This is a helper script for deploying llama.cpp's server on this machine.\n\n" + printf " Based on the options that follow, the script might download a model file\n" + printf " from the internet, which can be a few GBs in size. The script will also\n" + printf " build the latest llama.cpp source code from GitHub, which can be unstable.\n" + printf "\n" + printf " Upon success, an HTTP server will be started and it will serve the selected\n" + printf " model using llama.cpp for demonstration purposes.\n" + printf "\n" + printf " Please note:\n" + printf "\n" + printf " - All new data will be stored in the current folder\n" + printf " - The server will be listening on all network interfaces\n" + printf " - The server will run with default settings which are not always optimal\n" + printf " - Do not judge the quality of a model based on the results from this script\n" + printf " - Do not use this script to benchmark llama.cpp\n" + printf " - Do not use this script in production\n" + printf " - This script is only for demonstration purposes\n" + printf "\n" + printf " If you don't know what you are doing, please press Ctrl-C to abort now\n" + printf "\n" + printf " Press Enter to continue ...\n\n" -printf "\n" -printf "[I] This is a helper script for deploying llama.cpp's server on this machine.\n\n" -printf " Based on the options that follow, the script might download a model file\n" -printf " from the internet, which can be a few GBs in size. The script will also\n" -printf " build the latest llama.cpp source code from GitHub, which can be unstable.\n" -printf "\n" -printf " Upon success, an HTTP server will be started and it will serve the selected\n" -printf " model using llama.cpp for demonstration purposes.\n" -printf "\n" -printf " Please note:\n" -printf "\n" -printf " - All new data will be stored in the current folder\n" -printf " - The server will be listening on all network interfaces\n" -printf " - The server will run with default settings which are not always optimal\n" -printf " - Do not judge the quality of a model based on the results from this script\n" -printf " - Do not use this script to benchmark llama.cpp\n" -printf " - Do not use this script in production\n" -printf " - This script is only for demonstration purposes\n" -printf "\n" -printf " If you don't know what you are doing, please press Ctrl-C to abort now\n" -printf "\n" -printf " Press Enter to continue ...\n\n" - -read + read +fi if [[ -z "$repo" ]]; then printf "[+] No repo provided from the command line\n" From 30679d438d5225b3aecf5cec6482cbc9f8f87ba5 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 5 Feb 2024 09:48:03 +0200 Subject: [PATCH 665/859] scripts : fix typos, cleanup (#5303) --- scripts/server-llm.sh | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/scripts/server-llm.sh b/scripts/server-llm.sh index 062b70496..30bbac321 100644 --- a/scripts/server-llm.sh +++ b/scripts/server-llm.sh @@ -14,16 +14,17 @@ # - Might be unstable! # # Usage: -# ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] +# ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] [-non-interactive] # -# --port: port number, default is 8888 -# --repo: path to a repo containing GGUF model files -# --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input -# --backend: cpu, cuda, metal, opencl, depends on the OS -# --gpu-id: gpu id, default is 0 -# --n-parallel: number of parallel requests, default is 8 -# --n-kv: KV cache size, default is 4096 -# --verbose: verbose output +# --port: port number, default is 8888 +# --repo: path to a repo containing GGUF model files +# --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input +# --backend: cpu, cuda, metal, opencl, depends on the OS +# --gpu-id: gpu id, default is 0 +# --n-parallel: number of parallel requests, default is 8 +# --n-kv: KV cache size, default is 4096 +# --verbose: verbose output +# --non-interactive: run without asking a permission to run # # Example: # @@ -67,8 +68,7 @@ verbose=0 function print_usage { printf "Usage:\n" - printf " ./server-llm.sh [-interactive] [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose]\n\n" - printf " --non-interactive: run without asking a permision to run\n" + printf " ./server-llm.sh [--port] [--repo] [--wtype] [--backend] [--gpu-id] [--n-parallel] [--n-kv] [--verbose] [-non-interactive]\n\n" printf " --port: port number, default is 8888\n" printf " --repo: path to a repo containing GGUF model files\n" printf " --wtype: weights type (f16, q8_0, q4_0, q4_1), default is user-input\n" @@ -77,6 +77,7 @@ function print_usage { printf " --n-parallel: number of parallel requests, default is 8\n" printf " --n-kv: KV cache size, default is 4096\n" printf " --verbose: verbose output\n\n" + printf " --non-interactive: run without asking a permission to run\n" printf "Example:\n\n" printf ' bash -c "$(curl -s https://ggml.ai/server-llm.sh)"\n\n' } From e6f81775323f6f4e4a30abf022a6028fa86b79ac Mon Sep 17 00:00:00 2001 From: l3utterfly Date: Mon, 5 Feb 2024 17:00:47 +0900 Subject: [PATCH 666/859] common : add dynamic temperature parameters to main example cli (#5295) * added dynamic temp params in main * added help text --- common/common.cpp | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/common/common.cpp b/common/common.cpp index 3302caa20..8c1a60583 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -399,6 +399,18 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } sparams.penalty_present = std::stof(argv[i]); + } else if (arg == "--dynatemp-range") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.dynatemp_range = std::stof(argv[i]); + } else if (arg == "--dynatemp-exp") { + if (++i >= argc) { + invalid_param = true; + break; + } + sparams.dynatemp_exponent = std::stof(argv[i]); } else if (arg == "--mirostat") { if (++i >= argc) { invalid_param = true; @@ -942,6 +954,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --repeat-penalty N penalize repeat sequence of tokens (default: %.1f, 1.0 = disabled)\n", (double)sparams.penalty_repeat); printf(" --presence-penalty N repeat alpha presence penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.penalty_present); printf(" --frequency-penalty N repeat alpha frequency penalty (default: %.1f, 0.0 = disabled)\n", (double)sparams.penalty_freq); + printf(" --dynatemp-range N dynamic temperature range (default: %.1f, 0.0 = disabled)\n", (double)sparams.dynatemp_range); + printf(" --dynatemp-exp N dynamic temperature exponent (default: %.1f)\n", (double)sparams.dynatemp_exponent); printf(" --mirostat N use Mirostat sampling.\n"); printf(" Top K, Nucleus, Tail Free and Locally Typical samplers are ignored if used.\n"); printf(" (default: %d, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0)\n", sparams.mirostat); From a2d60c9158435ae9a6f14632f07f1acf7a3becef Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Mon, 5 Feb 2024 08:10:22 +0000 Subject: [PATCH 667/859] server : allow to get default generation settings for completion (#5307) --- examples/server/README.md | 16 +++++++++++++++- examples/server/server.cpp | 7 ++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index fe934dab1..d8e7c313e 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -264,7 +264,21 @@ Notice that each `probs` is an array of length `n_probs`. It also accepts all the options of `/completion` except `stream` and `prompt`. -- **GET** `/props`: Return the required assistant name and anti-prompt to generate the prompt in case you have specified a system prompt for all slots. +- **GET** `/props`: Return current server settings. + +### Result JSON + +```json +{ + "assistant_name": "", + "user_name": "", + "default_generation_settings": { ... } +} +``` + +- `assistant_name` - the required assistant name to generate the prompt in case you have specified a system prompt for all slots. +- `user_name` - the required anti-prompt to generate the prompt in case you have specified a system prompt for all slots. +- `default_generation_settings` - the default generation settings for the `/completion` endpoint, has the same fields as the `generation_settings` response object from the `/completion` endpoint. - **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. Compared to `api_like_OAI.py` this API implementation does not require a wrapper to be served. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a9f8cb369..8000fee5c 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -334,6 +334,7 @@ struct llama_server_context // slots / clients std::vector slots; + json default_generation_settings_for_props; llama_server_queue queue_tasks; llama_server_response queue_results; @@ -430,6 +431,9 @@ struct llama_server_context slots.push_back(slot); } + default_generation_settings_for_props = get_formated_generation(slots.front()); + default_generation_settings_for_props["seed"] = -1; + batch = llama_batch_init(n_ctx, 0, params.n_parallel); // empty system prompt @@ -2614,7 +2618,8 @@ int main(int argc, char **argv) res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); json data = { { "user_name", llama.name_user.c_str() }, - { "assistant_name", llama.name_assistant.c_str() } + { "assistant_name", llama.name_assistant.c_str() }, + { "default_generation_settings", llama.default_generation_settings_for_props } }; res.set_content(data.dump(), "application/json; charset=utf-8"); }); From 6fdfa2ecc684000a25a4ad91823bc82a6652b645 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:46:06 +0200 Subject: [PATCH 668/859] iq2_xxs: tune quantization (#5320) We get slightly better PPL, and we cut quantization time in nearly half. The trick is to 1st quantize without forcing points onto the E8-lattice. We can then use a narrower search range around the block scale that we got that way. Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 58 ++++++--------------------------------------------- 1 file changed, 6 insertions(+), 52 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 8236385bc..014c0525a 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -9048,8 +9048,6 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict int8_t L[32]; int8_t Laux[32]; float waux[32]; - bool is_on_grid[4]; - bool is_on_grid_aux[4]; uint8_t block_signs[4]; uint32_t q2[2*(QK_K/32)]; @@ -9099,10 +9097,11 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict memset(L, 0, 32); continue; } + float scale = make_qp_quants(32, kMaxQ+1, xval, (uint8_t*)L, weight); + float eff_max = scale*kMaxQ; float best = 0; - float scale = max/(2*kMaxQ-1); - for (int is = -9; is <= 9; ++is) { - float id = (2*kMaxQ-1+is*0.1f)/max; + for (int is = -6; is <= 6; ++is) { + float id = (2*kMaxQ-1+is*0.1f)/eff_max; float this_scale = 1/id; for (int k = 0; k < 4; ++k) { for (int i = 0; i < 8; ++i) { @@ -9112,9 +9111,7 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict uint16_t u = 0; for (int i = 0; i < 8; ++i) u |= (Laux[8*k+i] << 2*i); int grid_index = kmap_q2xs[u]; - is_on_grid_aux[k] = true; if (grid_index < 0) { - is_on_grid_aux[k] = false; const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; grid_index = iq2_find_best_neighbour(neighbours, kgrid_q2xs, xval + 8*k, waux + 8*k, this_scale, Laux + 8*k); } @@ -9128,16 +9125,12 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict } if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { scale = sumqx/sumq2; best = scale*sumqx; - for (int i = 0; i < 32; ++i) L[i] = Laux[i]; - for (int k = 0; k < 4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + memcpy(L, Laux, 32); } } - int n_not_ongrid = 0; - for (int k = 0; k < 4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; - if (n_not_ongrid > 0 && scale > 0) { + if (scale > 0) { float id = 1/scale; for (int k = 0; k < 4; ++k) { - if (is_on_grid[k]) continue; uint16_t u = 0; for (int i = 0; i < 8; ++i) { int l = nearest_int(0.5f*(id*xval[8*k+i]-1)); @@ -9193,49 +9186,10 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict float d = max_scale/31; y[ibl].d = GGML_FP32_TO_FP16(d); float id = 1/d; - float sumqx = 0, sumq2 = 0; for (int ib = 0; ib < QK_K/32; ++ib) { int l = nearest_int(0.5f*(id*scales[ib]-1)); l = MAX(0, MIN(15, l)); q2[2*ib+1] |= ((uint32_t)l << 28); - const float * xb = xbl + 32*ib; - const float * qw = quant_weights + QK_K*ibl + 32*ib; - for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); - const uint8_t * aux8 = (const uint8_t *)(q2 + 2*ib); - const float db = d * (1 + 2*l); - uint32_t u = 0; - for (int k = 0; k < 4; ++k) { - const int8_t * signs = keven_signs_q2xs + 8*((q2[2*ib+1] >> 7*k) & 127); - const float * xk = xb + 8*k; - const float * wk = weight + 8*k; - const uint8_t * grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); - float best_mse = 0; int best_index = aux8[k]; - for (int j = 0; j < 8; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - best_mse += wk[j] * diff * diff; - } - for (int idx = 0; idx < 256; ++idx) { - grid = (const uint8_t *)(kgrid_q2xs + idx); - float mse = 0; - for (int j = 0; j < 8; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - mse += wk[j] * diff * diff; - } - if (mse < best_mse) { - best_mse = mse; best_index = idx; - } - } - u |= (best_index << 8*k); - grid = (const uint8_t *)(kgrid_q2xs + best_index); - //grid = (const uint8_t *)(kgrid_q2xs + aux8[k]); - for (int j = 0; j < 8; ++j) { - float q = db * grid[j] * signs[j]; - sumqx += wk[j] * q * xk[j]; - sumq2 += wk[j] * q * q; - } - } - q2[2*ib] = u; - if (sumq2 > 0) y[ibl].d = GGML_FP32_TO_FP16(d*sumqx/sumq2); } memcpy(y[ibl].qs, q2, QK_K/4); } From 7e1ae372f36d98fa66b1d778c5862904b4d80c88 Mon Sep 17 00:00:00 2001 From: Guoteng <32697156+SolenoidWGT@users.noreply.github.com> Date: Mon, 5 Feb 2024 17:04:06 +0800 Subject: [PATCH 669/859] py : fix internlm2-hf convert to gguf (#5305) * py : fix internlm2-hf convert to gguf * ggml-ci --- convert-hf-to-gguf.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index a6ffd128b..5e343742d 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1416,8 +1416,32 @@ class InternLM2Model(Model): self.gguf_writer.add_add_space_prefix(add_prefix) special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + old_eos = special_vocab.special_token_ids["eos"] + if "chat" in os.path.basename(self.dir_model.absolute()): + # For the chat model, we replace the eos with '<|im_end|>'. + special_vocab.special_token_ids["eos"] = self._try_get_sft_eos(tokenizer) + print(f"Replace eos:{old_eos} with a special token:{special_vocab.special_token_ids['eos']} \ +in chat mode so that the conversation can end normally.") + special_vocab.add_to_gguf(self.gguf_writer) + def _try_get_sft_eos(self, tokenizer): + unused_145_list = tokenizer.encode('[UNUSED_TOKEN_145]') + im_end_list = tokenizer.encode('<|im_end|>') + assert (len(unused_145_list) == 1) ^ (len(im_end_list) == 1) + if len(unused_145_list) == 1: + eos_token = unused_145_list[0] + if len(im_end_list) == 1: + eos_token = im_end_list[0] + return eos_token + + def _hf_permute_qk(self, weights, n_head: int, n_head_kv: int): + if n_head_kv is not None and n_head != n_head_kv: + n_head = n_head_kv + return (weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) + .swapaxes(1, 2) + .reshape(weights.shape)) + def set_gguf_parameters(self): self.gguf_writer.add_name("InternLM2") self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) @@ -1486,8 +1510,9 @@ class InternLM2Model(Model): qkv = data_torch qkv = rearrange(qkv.T, " o (g n i) ->o g n i", g=num_groups, n=q_per_kv + 2, i=head_dim) q, k, v = qkv[..., : q_per_kv, :], qkv[..., q_per_kv: q_per_kv + 1, :], qkv[..., q_per_kv + 1: q_per_kv + 2, :] - q = rearrange(q, " o g n i -> o (g n i)").T - k = rearrange(k, " o g n i -> o (g n i)").T + # The model weights of q and k equire additional reshape. + q = self._hf_permute_qk(rearrange(q, " o g n i -> o (g n i)").T, num_heads, num_heads) + k = self._hf_permute_qk(rearrange(k, " o g n i -> o (g n i)").T, num_heads, num_kv_heads) v = rearrange(v, " o g n i -> o (g n i)").T self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wq.weight", q) self.post_write_tensors(tensor_map, f"model.layers.{bid}.attention.wk.weight", k) From 89503dcb5f764a5cc7093db1f395f5121876a2cc Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:32:27 +0200 Subject: [PATCH 670/859] iq3_xxs: quards for the no-imatrix situation (#5334) Co-authored-by: Iwan Kawrakow --- llama.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/llama.cpp b/llama.cpp index 4787a92fe..65e399adc 100644 --- a/llama.cpp +++ b/llama.cpp @@ -9456,8 +9456,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && qs.model.hparams.n_gqa() >= 4) { new_type = GGML_TYPE_Q4_K; } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS && qs.model.hparams.n_gqa() >= 4) { - new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { + new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : !qs.has_imatrix ? GGML_TYPE_Q3_K : GGML_TYPE_IQ3_XXS; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; @@ -9496,9 +9496,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { if (i_layer < n_layer/8) new_type = GGML_TYPE_Q4_K; } - //else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { - // if (i_layer < n_layer/8) new_type = GGML_TYPE_Q5_K; - //} + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS && !qs.has_imatrix) { + new_type = i_layer < n_layer/8 ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { new_type = i_layer < n_layer/16 ? GGML_TYPE_Q5_K : arch != LLM_ARCH_FALCON || use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q4_K From abb61944a5f64dec62c893ed0db10790169b672a Mon Sep 17 00:00:00 2001 From: "Dr. Tom Murphy VII Ph.D" <499244+tom7@users.noreply.github.com> Date: Mon, 5 Feb 2024 06:13:57 -0500 Subject: [PATCH 671/859] ggml : avoid duplicating function calls using MIN/MAX macros (#5325) * Avoid duplicating function calls when using MIN/MAX macros. Since these copy "a" and "b" they ask the compiler to evaluate one of them twice. The compiler doesn't have a problem with removing the duplication in something like MAX(0, x + 2), but in some cases we're calling functions, and those calls just happen twice. By explicitly evaluating at the expression we get smaller and faster code without duplicate calls. See ggml_rope_yarn_corr_dims in Compiler Explorer: https://godbolt.org/z/Ee4KMrvKh Code behaves exactly the same. * Update ggml.c --------- Co-authored-by: Georgi Gerganov --- ggml.c | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/ggml.c b/ggml.c index ee994c875..b9ec0c981 100644 --- a/ggml.c +++ b/ggml.c @@ -2470,7 +2470,8 @@ size_t ggml_get_max_tensor_size(const struct ggml_context * ctx) { size_t max_size = 0; for (struct ggml_tensor * tensor = ggml_get_first_tensor(ctx); tensor != NULL; tensor = ggml_get_next_tensor(ctx, tensor)) { - max_size = MAX(max_size, ggml_nbytes(tensor)); + size_t bytes = ggml_nbytes(tensor); + max_size = MAX(max_size, bytes); } return max_size; @@ -11887,8 +11888,10 @@ GGML_CALL void ggml_rope_yarn_corr_dims( int n_dims, int n_orig_ctx, float freq_base, float beta_fast, float beta_slow, float dims[2] ) { // start and end correction dims - dims[0] = MAX(0, floorf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_fast, freq_base))); - dims[1] = MIN(n_dims - 1, ceilf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_slow, freq_base))); + float start = floorf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_fast, freq_base)); + float end = ceilf(ggml_rope_yarn_corr_dim(n_dims, n_orig_ctx, beta_slow, freq_base)); + dims[0] = MAX(0, start); + dims[1] = MIN(n_dims - 1, end); } static void ggml_compute_forward_rope_f32( From c6b395535a6874d749ef47c33eacd466cb252cd5 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:09:47 +0200 Subject: [PATCH 672/859] ggml : make use of ggml-quants.h possible in C++ code (#5338) * Make use of ggml-quants.h possible in C++ code * One cannot possibly be defining static_assert in a C++ compilation --------- Co-authored-by: Iwan Kawrakow --- ggml-impl.h | 2 + ggml-quants.h | 117 +++++++++++++++++++++++++++----------------------- 2 files changed, 65 insertions(+), 54 deletions(-) diff --git a/ggml-impl.h b/ggml-impl.h index 2c58075ac..19df66bce 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -19,6 +19,7 @@ extern "C" { // fall back to the _Static_assert C11 keyword. // if C99 - static_assert is noop // ref: https://stackoverflow.com/a/53923785/4039976 +#ifndef __cplusplus #ifndef static_assert #if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201100L) #define static_assert(cond, msg) _Static_assert(cond, msg) @@ -26,6 +27,7 @@ extern "C" { #define static_assert(cond, msg) struct global_scope_noop_trick #endif #endif +#endif // __FMA__ and __F16C__ are not defined in MSVC, however they are implied with AVX2/AVX512 #if defined(_MSC_VER) && (defined(__AVX2__) || defined(__AVX512F__)) diff --git a/ggml-quants.h b/ggml-quants.h index 5c9f63bd9..bfdf3c997 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -191,70 +191,74 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +#ifdef __cplusplus +extern "C" { +#endif + // Quantization -void quantize_row_q4_0_reference(const float * restrict x, block_q4_0 * restrict y, int k); -void quantize_row_q4_1_reference(const float * restrict x, block_q4_1 * restrict y, int k); -void quantize_row_q5_0_reference(const float * restrict x, block_q5_0 * restrict y, int k); -void quantize_row_q5_1_reference(const float * restrict x, block_q5_1 * restrict y, int k); -void quantize_row_q8_0_reference(const float * restrict x, block_q8_0 * restrict y, int k); -void quantize_row_q8_1_reference(const float * restrict x, block_q8_1 * restrict y, int k); +void quantize_row_q4_0_reference(const float * GGML_RESTRICT x, block_q4_0 * GGML_RESTRICT y, int k); +void quantize_row_q4_1_reference(const float * GGML_RESTRICT x, block_q4_1 * GGML_RESTRICT y, int k); +void quantize_row_q5_0_reference(const float * GGML_RESTRICT x, block_q5_0 * GGML_RESTRICT y, int k); +void quantize_row_q5_1_reference(const float * GGML_RESTRICT x, block_q5_1 * GGML_RESTRICT y, int k); +void quantize_row_q8_0_reference(const float * GGML_RESTRICT x, block_q8_0 * GGML_RESTRICT y, int k); +void quantize_row_q8_1_reference(const float * GGML_RESTRICT x, block_q8_1 * GGML_RESTRICT y, int k); -void quantize_row_q2_K_reference(const float * restrict x, block_q2_K * restrict y, int k); -void quantize_row_q3_K_reference(const float * restrict x, block_q3_K * restrict y, int k); -void quantize_row_q4_K_reference(const float * restrict x, block_q4_K * restrict y, int k); -void quantize_row_q5_K_reference(const float * restrict x, block_q5_K * restrict y, int k); -void quantize_row_q6_K_reference(const float * restrict x, block_q6_K * restrict y, int k); -void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k); -void quantize_row_iq3_xxs_reference(const float * restrict x, block_iq3_xxs * restrict y, int k); +void quantize_row_q2_K_reference(const float * GGML_RESTRICT x, block_q2_K * GGML_RESTRICT y, int k); +void quantize_row_q3_K_reference(const float * GGML_RESTRICT x, block_q3_K * GGML_RESTRICT y, int k); +void quantize_row_q4_K_reference(const float * GGML_RESTRICT x, block_q4_K * GGML_RESTRICT y, int k); +void quantize_row_q5_K_reference(const float * GGML_RESTRICT x, block_q5_K * GGML_RESTRICT y, int k); +void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGML_RESTRICT y, int k); +void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); +void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); -void quantize_row_q4_0(const float * restrict x, void * restrict y, int k); -void quantize_row_q4_1(const float * restrict x, void * restrict y, int k); -void quantize_row_q5_0(const float * restrict x, void * restrict y, int k); -void quantize_row_q5_1(const float * restrict x, void * restrict y, int k); -void quantize_row_q8_0(const float * restrict x, void * restrict y, int k); -void quantize_row_q8_1(const float * restrict x, void * restrict y, int k); +void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q5_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q5_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q8_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q8_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); -void quantize_row_q2_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q3_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q4_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q5_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q6_K(const float * restrict x, void * restrict y, int k); -void quantize_row_q8_K(const float * restrict x, void * restrict y, int k); -void quantize_row_iq3_xxs(const float * restrict x, void * restrict y, int k); +void quantize_row_q2_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q3_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q4_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q5_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization -void dequantize_row_q4_0(const block_q4_0 * restrict x, float * restrict y, int k); -void dequantize_row_q4_1(const block_q4_1 * restrict x, float * restrict y, int k); -void dequantize_row_q5_0(const block_q5_0 * restrict x, float * restrict y, int k); -void dequantize_row_q5_1(const block_q5_1 * restrict x, float * restrict y, int k); -void dequantize_row_q8_0(const block_q8_0 * restrict x, float * restrict y, int k); -//void dequantize_row_q8_1(const block_q8_1 * restrict x, float * restrict y, int k); +void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q4_1(const block_q4_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q5_0(const block_q5_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q5_1(const block_q5_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q8_0(const block_q8_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +//void dequantize_row_q8_1(const block_q8_1 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); -void dequantize_row_q2_K(const block_q2_K * restrict x, float * restrict y, int k); -void dequantize_row_q3_K(const block_q3_K * restrict x, float * restrict y, int k); -void dequantize_row_q4_K(const block_q4_K * restrict x, float * restrict y, int k); -void dequantize_row_q5_K(const block_q5_K * restrict x, float * restrict y, int k); -void dequantize_row_q6_K(const block_q6_K * restrict x, float * restrict y, int k); -void dequantize_row_q8_K(const block_q8_K * restrict x, float * restrict y, int k); -void dequantize_row_iq2_xxs(const block_iq2_xxs * restrict x, float * restrict y, int k); -void dequantize_row_iq2_xs (const block_iq2_xs * restrict x, float * restrict y, int k); -void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y, int k); +void dequantize_row_q2_K(const block_q2_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q3_K(const block_q3_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q4_K(const block_q4_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q5_K(const block_q5_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q6_K(const block_q6_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_q8_K(const block_q8_K * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_iq2_xs_q8_K (int n, float * restrict s, const void * restrict vx, const void * restrict vy); -void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, const void * restrict vx, const void * restrict vy); +void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -276,3 +280,8 @@ void iq2xs_init_impl(int grid_size); void iq2xs_free_impl(int grid_size); void iq3xs_init_impl(int grid_size); void iq3xs_free_impl(int grid_size); + +#ifdef __cplusplus +} +#endif + From 78b00dda6c0d62c34f5371d47718defff6ed2b22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Mon, 5 Feb 2024 15:55:10 +0100 Subject: [PATCH 673/859] README: updated introduction (#5343) * README: updated introduction * readme : update --------- Co-authored-by: Georgi Gerganov --- README.md | 49 ++++++++++++++++++++++++++++++------------------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index a6fe34629..bb6c49338 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [Roadmap](https://github.com/users/ggerganov/projects/7) / [Project status](https://github.com/ggerganov/llama.cpp/discussions/3471) / [Manifesto](https://github.com/ggerganov/llama.cpp/discussions/205) / [ggml](https://github.com/ggerganov/ggml) -Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ +Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) in pure C/C++ ### Hot topics @@ -58,18 +58,20 @@ Inference of [LLaMA](https://arxiv.org/abs/2302.13971) model in pure C/C++ ## Description -The main goal of `llama.cpp` is to run the LLaMA model using 4-bit integer quantization on a MacBook +The main goal of `llama.cpp` is to enable LLM inference with minimal setup and state-of-the-art performance on a wide +variety of hardware - locally and in the cloud. -- Plain C/C++ implementation without dependencies -- Apple silicon first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks +- Plain C/C++ implementation without any dependencies +- Apple silicon is a first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks - AVX, AVX2 and AVX512 support for x86 architectures -- Mixed F16 / F32 precision -- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit and 8-bit integer quantization support -- CUDA, Metal, OpenCL, SYCL GPU backend support +- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit, and 8-bit integer quantization for faster inference and reduced memory use +- Custom CUDA kernels for running LLMs on NVIDIA GPUs (support for AMD GPUs via HIP) +- Vulkan, SYCL, and (partial) OpenCL backend support +- CPU+GPU hybrid inference to partially accelerate models larger than the total VRAM capacity -The original implementation of `llama.cpp` was [hacked in an evening](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022). -Since then, the project has improved significantly thanks to many contributions. This project is mainly for educational purposes and serves -as the main playground for developing new features for the [ggml](https://github.com/ggerganov/ggml) library. +Since its [inception](https://github.com/ggerganov/llama.cpp/issues/33#issuecomment-1465108022), the project has +improved significantly thanks to many contributions. It is the main playground for developing new features for the +[ggml](https://github.com/ggerganov/ggml) library. **Supported platforms:** @@ -77,11 +79,14 @@ as the main playground for developing new features for the [ggml](https://github - [X] Linux - [X] Windows (via CMake) - [X] Docker +- [X] FreeBSD **Supported models:** - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 +- [X] [Mistral AI v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) +- [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [X] Falcon - [X] [Alpaca](https://github.com/ggerganov/llama.cpp#instruction-mode-with-alpaca) - [X] [GPT4All](https://github.com/ggerganov/llama.cpp#using-gpt4all) @@ -95,7 +100,6 @@ as the main playground for developing new features for the [ggml](https://github - [X] [Baichuan 1 & 2](https://huggingface.co/models?search=baichuan-inc/Baichuan) + [derivations](https://huggingface.co/hiyouga/baichuan-7b-sft) - [X] [Aquila 1 & 2](https://huggingface.co/models?search=BAAI/Aquila) - [X] [Starcoder models](https://github.com/ggerganov/llama.cpp/pull/3187) -- [X] [Mistral AI v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) - [X] [Refact](https://huggingface.co/smallcloudai/Refact-1_6B-fim) - [X] [Persimmon 8B](https://github.com/ggerganov/llama.cpp/pull/3410) - [X] [MPT](https://github.com/ggerganov/llama.cpp/pull/3417) @@ -104,15 +108,14 @@ as the main playground for developing new features for the [ggml](https://github - [X] [StableLM-3b-4e1t](https://github.com/ggerganov/llama.cpp/pull/3586) - [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) -- [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) - [x] [GPT-2](https://huggingface.co/gpt2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) **Multimodal models:** -- [x] [Llava 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e) -- [x] [Bakllava](https://huggingface.co/models?search=SkunkworksAI/Bakllava) +- [x] [LLaVA 1.5 models](https://huggingface.co/collections/liuhaotian/llava-15-653aac15d994e992e2677a7e) +- [x] [BakLLaVA](https://huggingface.co/models?search=SkunkworksAI/Bakllava) - [x] [Obsidian](https://huggingface.co/NousResearch/Obsidian-3B-V0.5) - [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) @@ -137,14 +140,22 @@ as the main playground for developing new features for the [ggml](https://github **UI:** +Unless otherwise noted these projects are open-source with permissive licensing: + +- [iohub/collama](https://github.com/iohub/coLLaMA) +- [janhq/jan](https://github.com/janhq/jan) (AGPL) - [nat/openplayground](https://github.com/nat/openplayground) -- [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) -- [withcatai/catai](https://github.com/withcatai/catai) -- [semperai/amica](https://github.com/semperai/amica) +- [LMStudio](https://lmstudio.ai/) (proprietary) +- [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) +- [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) +- [nomic-ai/gpt4all](https://github.com/nomic-ai/gpt4all) +- [ollama/ollama](https://github.com/ollama/ollama) +- [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) (AGPL) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) -- [iohub/collama](https://github.com/iohub/coLLaMA) -- [pythops/tenere](https://github.com/pythops/tenere) +- [pythops/tenere](https://github.com/pythops/tenere) (AGPL) +- [semperai/amica](https://github.com/semperai/amica) +- [withcatai/catai](https://github.com/withcatai/catai) --- From 098f6d737b65134cf220d12b9b706e8cfc5e4610 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Mon, 5 Feb 2024 19:33:00 +0100 Subject: [PATCH 674/859] make: Use ccache for faster compilation (#5318) * make: Use ccache for faster compilation --- CMakeLists.txt | 4 +- Makefile | 169 ++++++++++++++++++++++++++++++++++--------------- 2 files changed, 121 insertions(+), 52 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 8c04e4c19..427015be5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -809,9 +809,9 @@ if (LLAMA_CCACHE) if (LLAMA_CCACHE_FOUND) set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache) set(ENV{CCACHE_SLOPPINESS} time_macros) - message(STATUS "Using ccache") + message(STATUS "ccache found, compilation results will be cached. Disable with LLAMA_CCACHE=OFF.") else() - message(STATUS "Warning: ccache not found - consider installing it or use LLAMA_CCACHE=OFF") + message(STATUS "Warning: ccache not found - consider installing it for faster compilation or disable this warning with LLAMA_CCACHE=OFF") endif () endif() diff --git a/Makefile b/Makefile index 21d5e15ba..ba73f0637 100644 --- a/Makefile +++ b/Makefile @@ -112,6 +112,18 @@ MK_CXXFLAGS += -O3 MK_NVCCFLAGS += -O3 endif +ifndef LLAMA_NO_CCACHE +CCACHE := $(shell which ccache) +ifdef CCACHE +export CCACHE_SLOPPINESS = time_macros +$(info I ccache found, compilation results will be cached. Disable with LLAMA_NO_CCACHE.) +CC := $(CCACHE) $(CC) +CXX := $(CCACHE) $(CXX) +else +$(info I ccache not found. Consider installing it for faster compilation.) +endif # CCACHE +endif # LLAMA_NO_CCACHE + # clock_gettime came in POSIX.1b (1993) # CLOCK_MONOTONIC came in POSIX.1-2001 / SUSv3 as optional # posix_memalign came in POSIX.1-2001 / SUSv3 @@ -374,9 +386,9 @@ ifdef LLAMA_DEBUG MK_NVCCFLAGS += -lineinfo endif # LLAMA_DEBUG ifdef LLAMA_CUDA_NVCC - NVCC = $(LLAMA_CUDA_NVCC) + NVCC = $(CCACHE) $(LLAMA_CUDA_NVCC) else - NVCC = nvcc + NVCC = $(CCACHE) nvcc endif #LLAMA_CUDA_NVCC ifdef CUDA_DOCKER_ARCH MK_NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH) @@ -483,7 +495,7 @@ ifdef LLAMA_HIPBLAS ROCM_PATH ?= /opt/rocm GPU_TARGETS ?= $(shell $(ROCM_PATH)/llvm/bin/amdgpu-arch) endif - HIPCC ?= $(ROCM_PATH)/bin/hipcc + HIPCC ?= $(CCACHE) $(ROCM_PATH)/bin/hipcc LLAMA_CUDA_DMMV_X ?= 32 LLAMA_CUDA_MMV_Y ?= 1 LLAMA_CUDA_KQUANTS_ITER ?= 2 @@ -607,97 +619,135 @@ libllama.a: llama.o ggml.o $(OBJS) $(COMMON_DEPS) clean: rm -vrf *.o tests/*.o *.so *.a *.dll benchmark-matmult common/build-info.cpp *.dot $(COV_TARGETS) $(BUILD_TARGETS) $(TEST_TARGETS) + find examples pocs -type f -name "*.o" -delete # # Examples # +# $< is the first prerequisite, i.e. the source file. +# Explicitly compile this to an object file so that it can be cached with ccache. +# The source file is then filtered out from $^ (the list of all prerequisites) and the object file is added instead. + +# Helper function that replaces .c, .cpp, and .cu file endings with .o: +GET_OBJ_FILE = $(patsubst %.c,%.o,$(patsubst %.cpp,%.o,$(patsubst %.cu,%.o,$(1)))) + main: examples/main/main.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) @echo @echo '==== Run ./main -h for help. ====' @echo infill: examples/infill/infill.cpp ggml.o llama.o $(COMMON_DEPS) console.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) simple: examples/simple/simple.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tokenize: examples/tokenize/tokenize.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) batched: examples/batched/batched.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) batched-bench: examples/batched-bench/batched-bench.cpp build-info.o ggml.o llama.o common.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) quantize: examples/quantize/quantize.cpp build-info.o ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) quantize-stats: examples/quantize-stats/quantize-stats.cpp build-info.o ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) perplexity: examples/perplexity/perplexity.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) imatrix: examples/imatrix/imatrix.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) embedding: examples/embedding/embedding.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h %.hpp $< examples/llava/clip.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) -o $@ $(LDFLAGS) $(LWINSOCK2) gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) train-text-from-scratch: examples/train-text-from-scratch/train-text-from-scratch.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) convert-llama2c-to-ggml: examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) llama-bench: examples/llama-bench/llama-bench.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) libllava.a: examples/llava/llava.cpp examples/llava/llava.h examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h common/base64.hpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -static -fPIC -c $< -o $@ -Wno-cast-qual llava-cli: examples/llava/llava-cli.cpp examples/llava/clip.h examples/llava/clip.cpp examples/llava/llava.h examples/llava/llava.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual + $(CXX) $(CXXFLAGS) -c examples/llava/llava.cpp -o $(call GET_OBJ_FILE, examples/llava/llava.cpp) + $(CXX) $(CXXFLAGS) $(filter-out %.h $< examples/llava/clip.cpp examples/llava/llava.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) $(call GET_OBJ_FILE, examples/llava/llava.cpp) -o $@ $(LDFLAGS) baby-llama: examples/baby-llama/baby-llama.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) beam-search: examples/beam-search/beam-search.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) finetune: examples/finetune/finetune.cpp ggml.o llama.o $(COMMON_DEPS) train.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) export-lora: examples/export-lora/export-lora.cpp ggml.o common/common.h $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) speculative: examples/speculative/speculative.cpp ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) parallel: examples/parallel/parallel.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) lookahead: examples/lookahead/lookahead.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) lookup: examples/lookup/lookup.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) passkey: examples/passkey/passkey.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) ifeq ($(UNAME_S),Darwin) swift: examples/batched.swift @@ -705,7 +755,7 @@ swift: examples/batched.swift endif common/build-info.cpp: $(wildcard .git/index) scripts/build-info.sh - @sh scripts/build-info.sh $(CC) > $@.tmp + @sh scripts/build-info.sh "$(CC)" > $@.tmp @if ! cmp -s $@.tmp $@; then \ mv $@.tmp $@; \ else \ @@ -722,7 +772,8 @@ build-info.o: common/build-info.cpp tests: $(TEST_TARGETS) benchmark-matmult: examples/benchmark/benchmark-matmult.cpp build-info.o ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) run-benchmark-matmult: benchmark-matmult ./$@ @@ -730,58 +781,76 @@ run-benchmark-matmult: benchmark-matmult .PHONY: run-benchmark-matmult swift vdot: pocs/vdot/vdot.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) q8dot: pocs/vdot/q8dot.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $^ -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-llama-grammar: tests/test-llama-grammar.cpp ggml.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-grammar-parser: tests/test-grammar-parser.cpp ggml.o llama.o grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-double-float: tests/test-double-float.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-grad0: tests/test-grad0.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-opt: tests/test-opt.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-quantize-fns: tests/test-quantize-fns.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-quantize-perf: tests/test-quantize-perf.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-sampling: tests/test-sampling.cpp ggml.o llama.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-0-falcon: tests/test-tokenizer-0-falcon.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-0-llama: tests/test-tokenizer-0-llama.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-1-bpe: tests/test-tokenizer-1-bpe.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-tokenizer-1-llama: tests/test-tokenizer-1-llama.cpp ggml.o llama.o $(COMMON_DEPS) console.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-rope: tests/test-rope.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-c.o: tests/test-c.c llama.h $(CC) $(CFLAGS) -c $(filter-out %.h,$^) -o $@ tests/test-backend-ops: tests/test-backend-ops.cpp ggml.o $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-model-load-cancel: tests/test-model-load-cancel.cpp ggml.o llama.o tests/get-model.cpp $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) tests/test-autorelease: tests/test-autorelease.cpp ggml.o llama.o tests/get-model.cpp $(COMMON_DEPS) $(OBJS) - $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) From 906cff55c2848fda091d888a1585915ec0c9ea9e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 6 Feb 2024 07:47:22 +0200 Subject: [PATCH 675/859] py : handle byte tokens in `get_token_type` (#5341) * py : handle byte tokens in `get_token_type` * py : fix empty bytes arg --- convert.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/convert.py b/convert.py index 75c100118..4a2847a27 100755 --- a/convert.py +++ b/convert.py @@ -515,10 +515,14 @@ class HfVocab: # Yield token text, score, and type yield token_text, self.get_token_score(token_id), self.get_token_type( - token_id, self.special_ids # Reuse already stored special IDs + token_id, token_text, self.special_ids # Reuse already stored special IDs ) - def get_token_type(self, token_id: int, special_ids: set[int]) -> gguf.TokenType: + def get_token_type(self, token_id: int, token_text: bytes, special_ids: set[int]) -> gguf.TokenType: + # Special case for byte tokens + if re.fullmatch(br"<0x[0-9A-Fa-f]{2}>", token_text): + return gguf.TokenType.BYTE + # Determine token type based on whether it's a special token return gguf.TokenType.CONTROL if token_id in special_ids else gguf.TokenType.NORMAL @@ -530,7 +534,7 @@ class HfVocab: def added_tokens(self) -> Iterable[tuple[bytes, float, gguf.TokenType]]: for text in self.added_tokens_list: if text in self.specials: - toktype = self.get_token_type(self.specials[text], self.special_ids) + toktype = self.get_token_type(self.specials[text], b'', self.special_ids) score = self.get_token_score(self.specials[text]) else: toktype = gguf.TokenType.USER_DEFINED From 4ffc7a17d4e80c5f3f905139cb570ed9b6934fcb Mon Sep 17 00:00:00 2001 From: Niall Coates <1349685+Niall-@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:16:23 +0000 Subject: [PATCH 676/859] server : various fixes for the prompt field in /completion (#5300) server : fix deadlock when prompt array contains strings and numbers server : removed an unnecessary generation when generating multi-prompts server : removed an unnecessary assert --- examples/server/server.cpp | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8000fee5c..fc7e723a1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1163,13 +1163,30 @@ struct llama_server_context task.multitask_id = multitask_id; // when a completion task's prompt array is not a singleton, we split it into multiple requests - if (task.data.count("prompt") && task.data.at("prompt").size() > 1) - { - split_multiprompt_task(task_id, task); - } - // otherwise, it's a single-prompt task, we actually queue it - queue_tasks.post(task); + // if there's numbers in the prompt array it will be treated as an array of tokens + if (task.data.count("prompt") != 0 && task.data.at("prompt").size() > 1) { + bool numbers = false; + for (const auto& e : task.data.at("prompt")) { + if (e.is_number()) { + numbers = true; + break; + } + } + + // NOTE: split_multiprompt_task() does not handle a mix of strings and numbers, + // it will completely stall the server. I don't know where the bug for this is. + // + // if there are numbers, it needs to be treated like a single prompt, + // queue_tasks handles a mix of strings and numbers just fine. + if (numbers) { + queue_tasks.post(task); + } else { + split_multiprompt_task(task_id, task); + } + } else { + queue_tasks.post(task); + } } // for multiple images processing @@ -1251,7 +1268,10 @@ struct llama_server_context void split_multiprompt_task(int multitask_id, task_server& multiprompt_task) { int prompt_count = multiprompt_task.data.at("prompt").size(); - assert(prompt_count > 1); + if (prompt_count <= 1) { + send_error(multiprompt_task, "error while handling multiple prompts"); + return; + } // generate all the ID for subtask std::vector subtask_ids(prompt_count); From 31e790322133a4b1d0684527ea446e765e8a96cf Mon Sep 17 00:00:00 2001 From: Michael Coppola Date: Tue, 6 Feb 2024 04:20:00 -0500 Subject: [PATCH 677/859] server : add `dynatemp_range` and `dynatemp_exponent` (#5352) * server: added `dynatemp_range` and `dynatemp_exponent` * Update README.md --------- Co-authored-by: Michael Coppola --- examples/server/README.md | 4 ++++ examples/server/server.cpp | 46 +++++++++++++++++++++----------------- 2 files changed, 29 insertions(+), 21 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index d8e7c313e..46d8f85ae 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -137,6 +137,10 @@ node index.js `temperature`: Adjust the randomness of the generated text (default: 0.8). + `dynatemp_range`: Dynamic temperature range (default: 0.0, 0.0 = disabled). + + `dynatemp_exponent`: Dynamic temperature exponent (default: 1.0). + `top_k`: Limit the next token selection to the K most probable tokens (default: 40). `top_p`: Limit the next token selection to a subset of tokens with a cumulative probability above a threshold P (default: 0.95). diff --git a/examples/server/server.cpp b/examples/server/server.cpp index fc7e723a1..e48a1da75 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -524,27 +524,29 @@ struct llama_server_context slot->oaicompat_model = ""; } - slot->params.stream = json_value(data, "stream", false); - slot->params.cache_prompt = json_value(data, "cache_prompt", false); - slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); - slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); - slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); - slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p); - slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); - slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); - slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); - slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); - slot->sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); - slot->sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); - slot->sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); - slot->sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); - slot->sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); - slot->sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); - slot->sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); - slot->params.n_keep = json_value(data, "n_keep", slot->params.n_keep); - slot->params.seed = json_value(data, "seed", default_params.seed); - slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); - slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + slot->params.stream = json_value(data, "stream", false); + slot->params.cache_prompt = json_value(data, "cache_prompt", false); + slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict); + slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k); + slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p); + slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p); + slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z); + slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p); + slot->sparams.temp = json_value(data, "temperature", default_sparams.temp); + slot->sparams.dynatemp_range = json_value(data, "dynatemp_range", default_sparams.dynatemp_range); + slot->sparams.dynatemp_exponent = json_value(data, "dynatemp_exponent", default_sparams.dynatemp_exponent); + slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n); + slot->sparams.penalty_repeat = json_value(data, "repeat_penalty", default_sparams.penalty_repeat); + slot->sparams.penalty_freq = json_value(data, "frequency_penalty", default_sparams.penalty_freq); + slot->sparams.penalty_present = json_value(data, "presence_penalty", default_sparams.penalty_present); + slot->sparams.mirostat = json_value(data, "mirostat", default_sparams.mirostat); + slot->sparams.mirostat_tau = json_value(data, "mirostat_tau", default_sparams.mirostat_tau); + slot->sparams.mirostat_eta = json_value(data, "mirostat_eta", default_sparams.mirostat_eta); + slot->sparams.penalize_nl = json_value(data, "penalize_nl", default_sparams.penalize_nl); + slot->params.n_keep = json_value(data, "n_keep", slot->params.n_keep); + slot->params.seed = json_value(data, "seed", default_params.seed); + slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); + slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); // infill if (data.count("input_prefix") != 0) @@ -1002,6 +1004,8 @@ struct llama_server_context {"model", params.model_alias}, {"seed", slot.params.seed}, {"temperature", slot.sparams.temp}, + {"dynatemp_range", slot.sparams.dynatemp_range}, + {"dynatemp_exponent", slot.sparams.dynatemp_exponent}, {"top_k", slot.sparams.top_k}, {"top_p", slot.sparams.top_p}, {"min_p", slot.sparams.min_p}, From 8a79c591de9b7ff3242a94f68b7fb5a17ed8c2be Mon Sep 17 00:00:00 2001 From: Justin Parker Date: Tue, 6 Feb 2024 04:20:59 -0500 Subject: [PATCH 678/859] server : include total "num_slots" in props endpoint (#5349) --- examples/server/server.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index e48a1da75..d86d7e04a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -432,6 +432,7 @@ struct llama_server_context } default_generation_settings_for_props = get_formated_generation(slots.front()); + default_generation_settings_for_props["num_slots"] = params.n_parallel; default_generation_settings_for_props["seed"] = -1; batch = llama_batch_init(n_ctx, 0, params.n_parallel); From 2c516611f1d0f1e5e9754f8ea1cf97cb1b17bf2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 6 Feb 2024 14:44:06 +0100 Subject: [PATCH 679/859] CUDA: mul_mat_vec_q for batch sizes > 1 (#5351) --- ggml-cuda.cu | 240 +++++++++++++++++++++------------------------------ 1 file changed, 98 insertions(+), 142 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 3242a0b4a..95161b3f4 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5310,41 +5310,50 @@ template static __global__ void #endif // __CUDA_ARCH__ >= CC_VOLTA } -template -static __global__ void mul_mat_vec_q(const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols, const int nrows) { +template +static __global__ void mul_mat_vec_q( + const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par) { + + const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; + const int row = blockIdx.x*blockDim.y + threadIdx.y; - if (row >= nrows) { + if (row >= nrows_x) { return; } - const int blocks_per_row = ncols / qk; + const int blocks_per_row_x = ncols_x / qk; + const int blocks_per_col_y = nrows_y / QK8_1; const int blocks_per_warp = vdr * WARP_SIZE / qi; // partial sum for each thread - float tmp = 0.0f; + float tmp[ncols_y_template != 0 ? ncols_y_template : 8] = {0.0f}; const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = threadIdx.x / (qi/vdr); i < blocks_per_row; i += blocks_per_warp) { - const int ibx = row*blocks_per_row + i; // x block index + for (int i = threadIdx.x / (qi/vdr); i < blocks_per_row_x; i += blocks_per_warp) { + const int ibx = row*blocks_per_row_x + i; // x block index const int iby = i * (qk/QK8_1); // y block index that aligns with ibx const int iqs = vdr * (threadIdx.x % (qi/vdr)); // x block quant index when casting the quants to int - tmp += vec_dot_q_cuda(&x[ibx], &y[iby], iqs); +#pragma unroll + for (int j = 0; j < ncols_y; ++j) { + tmp[j] += vec_dot_q_cuda(&x[ibx], &y[j*blocks_per_col_y + iby], iqs); + } } // sum up partial sums and write back result #pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - tmp += __shfl_xor_sync(0xffffffff, tmp, mask, 32); - } + for (int j = 0; j < ncols_y; ++j) { + tmp[j] = warp_reduce_sum(tmp[j]); - if (threadIdx.x == 0) { - dst[row] = tmp; + if (threadIdx.x == 0) { + dst[j*nrows_x + row] = tmp[j]; + } } } @@ -6816,121 +6825,56 @@ static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, floa <<>>(vx, y, dst, ncols, nrows); } -static void mul_mat_vec_q4_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK4_0 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} +template +static void mul_mat_vec_q_cuda( + const void * vx, const void * vy, float * dst, + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, cudaStream_t stream) { -static void mul_mat_vec_q4_1_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK4_1 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} + GGML_ASSERT(ncols_x % qk == 0); + GGML_ASSERT(ncols_y <= 8); -static void mul_mat_vec_q5_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK5_0 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; + const int block_num_y = (nrows_x + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; const dim3 block_nums(block_num_y, 1, 1); const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q5_1_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK5_1 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q8_0_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK8_0 == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q2_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q3_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q4_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q5_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_q6_K_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_iq2_xxs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_iq2_xs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); -} - -static void mul_mat_vec_iq3_xxs_q8_1_cuda(const void * vx, const void * vy, float * dst, const int ncols, const int nrows, cudaStream_t stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - mul_mat_vec_q - <<>>(vx, vy, dst, ncols, nrows); + switch (ncols_y) { + case 1: + mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 2: + mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 3: + mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 4: + mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 5: + mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 6: + mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 7: + mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + case 8: + mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + default: + GGML_ASSERT(false); + // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + break; + } } static void ggml_mul_mat_q4_0_q8_1_cuda( @@ -8578,50 +8522,61 @@ static void ggml_cuda_op_mul_mat_vec_q( const char * src1_ddq_i, float * dst_dd_i, const int64_t row_low, const int64_t row_high, const int64_t src1_ncols, const int64_t src1_padded_row_size, cudaStream_t stream) { - GGML_ASSERT(ggml_nrows(src1) == 1); - const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; switch (src0->type) { case GGML_TYPE_Q4_0: - mul_mat_vec_q4_0_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q4_1: - mul_mat_vec_q4_1_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q5_0: - mul_mat_vec_q5_0_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q5_1: - mul_mat_vec_q5_1_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q8_0: - mul_mat_vec_q8_0_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q2_K: - mul_mat_vec_q2_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q3_K: - mul_mat_vec_q3_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q4_K: - mul_mat_vec_q4_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q5_K: - mul_mat_vec_q5_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_Q6_K: - mul_mat_vec_q6_K_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_IQ2_XXS: - mul_mat_vec_iq2_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_IQ2_XS: - mul_mat_vec_iq2_xs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; case GGML_TYPE_IQ3_XXS: - mul_mat_vec_iq3_xxs_q8_1_cuda(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); break; default: GGML_ASSERT(false); @@ -9945,17 +9900,18 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 #ifdef GGML_CUDA_FORCE_DMMV const bool use_mul_mat_vec_q = false; #else - const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && ggml_nrows(src1) == 1; + const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); #endif // GGML_CUDA_FORCE_DMMV if (use_mul_mat_vec_q) { - // NOTE: this kernel does not support ggml_nrows(src1) > 1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (use_mul_mat_q) { + if (src1->ne[1] <= 8 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); + } else if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); From 2e9c0bd6b301155ce749e162527fc55e9fb5b832 Mon Sep 17 00:00:00 2001 From: BarfingLemurs <128182951+BarfingLemurs@users.noreply.github.com> Date: Tue, 6 Feb 2024 09:06:48 -0500 Subject: [PATCH 680/859] readme : add phi, orion 14b, internlm2, and yi-VL to readme (#5362) --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index bb6c49338..cc87ac797 100644 --- a/README.md +++ b/README.md @@ -105,11 +105,14 @@ improved significantly thanks to many contributions. It is the main playground f - [X] [MPT](https://github.com/ggerganov/llama.cpp/pull/3417) - [X] [Bloom](https://github.com/ggerganov/llama.cpp/pull/3553) - [x] [Yi models](https://huggingface.co/models?search=01-ai/Yi) -- [X] [StableLM-3b-4e1t](https://github.com/ggerganov/llama.cpp/pull/3586) +- [X] [StableLM models](https://huggingface.co/stabilityai) - [x] [Deepseek models](https://huggingface.co/models?search=deepseek-ai/deepseek) - [x] [Qwen models](https://huggingface.co/models?search=Qwen/Qwen) - [x] [PLaMo-13B](https://github.com/ggerganov/llama.cpp/pull/3557) +- [x] [Phi models](https://huggingface.co/models?search=microsoft/phi) - [x] [GPT-2](https://huggingface.co/gpt2) +- [x] [Orion 14B](https://github.com/ggerganov/llama.cpp/pull/5118) +- [x] [InternLM2](https://huggingface.co/models?search=internlm2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) **Multimodal models:** @@ -119,6 +122,7 @@ improved significantly thanks to many contributions. It is the main playground f - [x] [Obsidian](https://huggingface.co/NousResearch/Obsidian-3B-V0.5) - [x] [ShareGPT4V](https://huggingface.co/models?search=Lin-Chen/ShareGPT4V) - [x] [MobileVLM 1.7B/3B models](https://huggingface.co/models?search=mobileVLM) +- [x] [Yi-VL](https://huggingface.co/models?search=Yi-VL) **Bindings:** From f57fadc009cbff741a1961cb7896c47d73978d2c Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 6 Feb 2024 17:28:02 +0200 Subject: [PATCH 681/859] Slight quantization improvement for Q4_K and Q5_K (#5361) * Q4_K: slightly better quantization * Q5_K: slightly better quantization --------- Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 75 +++++++++++++++++++++++---------------------------- 1 file changed, 33 insertions(+), 42 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 014c0525a..101d3e783 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -2381,19 +2381,20 @@ static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restri uint8_t L[QK_K]; uint8_t Laux[32]; + uint8_t Ls[QK_K/32]; + uint8_t Lm[QK_K/32]; float weights[32]; - float mins[QK_K/32]; - float scales[QK_K/32]; + float sw[QK_K/32]; + float mins[QK_K/32]; + float scales[QK_K/32]; for (int i = 0; i < nb; i++) { float sum_x2 = 0; for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; - float sigma2 = sum_x2/QK_K; + float sigma2 = 2*sum_x2/QK_K; float av_x = sqrtf(sigma2); - float max_scale = 0; // as we are deducting the min, scales are always positive - float max_min = 0; for (int j = 0; j < QK_K/32; ++j) { if (quant_weights) { const float * qw = quant_weights + QK_K*i + 32*j; @@ -2401,25 +2402,17 @@ static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restri } else { for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); } + float sumw = 0; + for (int l = 0; l < 32; ++l) sumw += weights[l]; + sw[j] = sumw; scales[j] = make_qkx3_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); - //scales[j] = make_qkx2_quants(32, 15, x + 32*j, weights, L + 32*j, &mins[j], Laux, -1.f, 0.1f, 20, false); - float scale = scales[j]; - if (scale > max_scale) { - max_scale = scale; - } - float min = mins[j]; - if (min > max_min) { - max_min = min; - } } - float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; - float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + float d_block = make_qp_quants(QK_K/32, 63, scales, Ls, sw); + float m_block = make_qp_quants(QK_K/32, 63, mins, Lm, sw); for (int j = 0; j < QK_K/32; ++j) { - uint8_t ls = nearest_int(inv_scale*scales[j]); - uint8_t lm = nearest_int(inv_min*mins[j]); - ls = MIN(63, ls); - lm = MIN(63, lm); + uint8_t ls = Ls[j]; + uint8_t lm = Lm[j]; if (j < 4) { y[i].scales[j] = ls; y[i].scales[j+4] = lm; @@ -2429,8 +2422,8 @@ static void quantize_row_q4_K_impl(const float * restrict x, block_q4_K * restri y[i].scales[j-0] |= ((lm >> 4) << 6); } } - y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); - y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + y[i].d = GGML_FP32_TO_FP16(d_block); + y[i].dmin = GGML_FP32_TO_FP16(m_block); uint8_t sc, m; for (int j = 0; j < QK_K/32; ++j) { @@ -2688,20 +2681,21 @@ static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restri const int nb = n_per_row / QK_K; uint8_t L[QK_K]; - float mins[QK_K/32]; - float scales[QK_K/32]; - float weights[32]; uint8_t Laux[32]; + uint8_t Ls[QK_K/32]; + uint8_t Lm[QK_K/32]; + float mins[QK_K/32]; + float scales[QK_K/32]; + float sw[QK_K/32]; + float weights[32]; for (int i = 0; i < nb; i++) { float sum_x2 = 0; for (int l = 0; l < QK_K; ++l) sum_x2 += x[l] * x[l]; - float sigma2 = sum_x2/QK_K; + float sigma2 = 2*sum_x2/QK_K; float av_x = sqrtf(sigma2); - float max_scale = 0; // as we are deducting the min, scales are always positive - float max_min = 0; for (int j = 0; j < QK_K/32; ++j) { if (quant_weights) { const float * qw = quant_weights + QK_K*i + 32*j; @@ -2709,22 +2703,19 @@ static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restri } else { for (int l = 0; l < 32; ++l) weights[l] = av_x + fabsf(x[32*j + l]); } + float sumw = 0; + for (int l = 0; l < 32; ++l) sumw += weights[l]; + sw[j] = sumw; + scales[j] = make_qkx3_quants(32, 31, x + 32*j, weights, L + 32*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); - float scale = scales[j]; - if (scale > max_scale) { - max_scale = scale; - } - float min = mins[j]; - if (min > max_min) { - max_min = min; - } } - float inv_scale = max_scale > 0 ? 63.f/max_scale : 0.f; - float inv_min = max_min > 0 ? 63.f/max_min : 0.f; + float d_block = make_qp_quants(QK_K/32, 63, scales, Ls, sw); + float m_block = make_qp_quants(QK_K/32, 63, mins, Lm, sw); + for (int j = 0; j < QK_K/32; ++j) { - uint8_t ls = nearest_int(inv_scale*scales[j]); - uint8_t lm = nearest_int(inv_min*mins[j]); + uint8_t ls = Ls[j]; + uint8_t lm = Lm[j]; ls = MIN(63, ls); lm = MIN(63, lm); if (j < 4) { @@ -2736,8 +2727,8 @@ static void quantize_row_q5_K_impl(const float * restrict x, block_q5_K * restri y[i].scales[j-0] |= ((lm >> 4) << 6); } } - y[i].d = GGML_FP32_TO_FP16(max_scale/63.f); - y[i].dmin = GGML_FP32_TO_FP16(max_min/63.f); + y[i].d = GGML_FP32_TO_FP16(d_block); + y[i].dmin = GGML_FP32_TO_FP16(m_block); uint8_t sc, m; for (int j = 0; j < QK_K/32; ++j) { From b08f22c882a1443e6b97081f3ce718a4d1a741f8 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 6 Feb 2024 19:00:16 +0200 Subject: [PATCH 682/859] Update README.md (#5366) Add some links to quantization related PRs --- README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index cc87ac797..34f2021f9 100644 --- a/README.md +++ b/README.md @@ -736,9 +736,21 @@ Several quantization methods are supported. They differ in the resulting model d | 13B | bits/weight | 16.0 | 4.5 | 5.0 | 5.5 | 6.0 | 8.5 | - [k-quants](https://github.com/ggerganov/llama.cpp/pull/1684) -- recent k-quants improvements +- recent k-quants improvements and new i-quants - [#2707](https://github.com/ggerganov/llama.cpp/pull/2707) - [#2807](https://github.com/ggerganov/llama.cpp/pull/2807) + - [#4773 - 2-bit i-quants (inference)](https://github.com/ggerganov/llama.cpp/pull/4773) + - [#4856 - 2-bit i-quants (inference)](https://github.com/ggerganov/llama.cpp/pull/4856) + - [#4861 - importance matrix](https://github.com/ggerganov/llama.cpp/pull/4861) + - [#4872 - MoE models](https://github.com/ggerganov/llama.cpp/pull/4872) + - [#4897 - 2-bit quantization](https://github.com/ggerganov/llama.cpp/pull/4897) + - [#4930 - imatrix for all k-quants](https://github.com/ggerganov/llama.cpp/pull/4930) + - [#4951 - imatrix on the GPU](https://github.com/ggerganov/llama.cpp/pull/4957) + - [#4969 - imatrix for legacy quants](https://github.com/ggerganov/llama.cpp/pull/4969) + - [#4996 - k-qunats tuning](https://github.com/ggerganov/llama.cpp/pull/4996) + - [#5060 - Q3_K_XS](https://github.com/ggerganov/llama.cpp/pull/5060) + - [#5196 - 3-bit i-quants](https://github.com/ggerganov/llama.cpp/pull/5196) + - [quantization tuning](https://github.com/ggerganov/llama.cpp/pull/5320), [another one](https://github.com/ggerganov/llama.cpp/pull/5334), and [another one](https://github.com/ggerganov/llama.cpp/pull/5361) ### Perplexity (measuring model quality) From 17c97fb0620448b37516a3f53fea6c482b0a30a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 6 Feb 2024 18:43:06 +0100 Subject: [PATCH 683/859] CUDA: mul_mat_vec_q max. batch size 8 -> 4 (#5370) --- ggml-cuda.cu | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 95161b3f4..3b828375e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6831,7 +6831,7 @@ static void mul_mat_vec_q_cuda( const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, cudaStream_t stream) { GGML_ASSERT(ncols_x % qk == 0); - GGML_ASSERT(ncols_y <= 8); + GGML_ASSERT(ncols_y <= 4); const int block_num_y = (nrows_x + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; const dim3 block_nums(block_num_y, 1, 1); @@ -6853,22 +6853,22 @@ static void mul_mat_vec_q_cuda( mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); break; - case 5: - mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; - case 6: - mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; - case 7: - mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; - case 8: - mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); - break; + // case 5: + // mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; + // case 6: + // mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; + // case 7: + // mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; + // case 8: + // mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // break; default: GGML_ASSERT(false); // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> @@ -9909,7 +9909,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (src1->ne[1] <= 8 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { + if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); From 213d1439fadefe182f69c5f7e8dd3b4b6572ebcb Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Tue, 6 Feb 2024 18:08:38 +0000 Subject: [PATCH 684/859] server : remove model.json endpoint (#5371) --- examples/server/completion.js.hpp | 448 +++++++++++++++------------ examples/server/public/completion.js | 3 +- examples/server/server.cpp | 11 - 3 files changed, 244 insertions(+), 218 deletions(-) diff --git a/examples/server/completion.js.hpp b/examples/server/completion.js.hpp index fe5f81228..f5e696e17 100644 --- a/examples/server/completion.js.hpp +++ b/examples/server/completion.js.hpp @@ -236,214 +236,250 @@ unsigned char completion_js[] = { 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, - 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x24, - 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x60, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x73, + 0x28, 0x27, 0x73, 0x6c, 0x6f, 0x74, 0x20, 0x75, 0x6e, 0x61, 0x76, 0x61, + 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x27, 0x29, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x54, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x61, + 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x20, 0x74, 0x6f, 0x20, 0x62, + 0x65, 0x20, 0x63, 0x61, 0x75, 0x67, 0x68, 0x74, 0x20, 0x62, 0x79, 0x20, + 0x75, 0x70, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x20, 0x63, 0x61, 0x6c, + 0x6c, 0x65, 0x72, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x27, + 0x73, 0x6c, 0x6f, 0x74, 0x20, 0x75, 0x6e, 0x61, 0x76, 0x61, 0x69, 0x6c, + 0x61, 0x62, 0x6c, 0x65, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, - 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, - 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, - 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x66, - 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, - 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, - 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x6e, 0x20, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, 0x75, 0x20, 0x63, 0x61, - 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x20, - 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, - 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, - 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x6e, - 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, - 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x22, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, - 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, 0x76, 0x65, 0x6e, 0x74, - 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, - 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, - 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, - 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, - 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x6d, 0x65, - 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x28, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, + 0x6c, 0x65, 0x2e, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x60, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x3a, 0x20, 0x24, 0x7b, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x7d, 0x60, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, - 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, - 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, - 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x74, 0x69, 0x6d, 0x69, - 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x7d, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, - 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, - 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, - 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, 0x65, 0x22, 0x2c, 0x20, - 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x7b, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7d, 0x20, 0x7d, 0x29, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, 0x0a, 0x7d, 0x0a, 0x0a, - 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x61, 0x20, - 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, 0x74, 0x68, 0x61, 0x74, - 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x73, 0x20, 0x74, 0x6f, - 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x69, - 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x73, - 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, 0x74, 0x72, 0x65, 0x61, - 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x45, - 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, - 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, - 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, 0x28, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x2f, - 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, + 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x27, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x45, + 0x72, 0x72, 0x6f, 0x72, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, + 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x65, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, + 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x61, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x79, 0x6f, + 0x75, 0x20, 0x63, 0x61, 0x6e, 0x20, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, + 0x69, 0x62, 0x65, 0x20, 0x74, 0x6f, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, + 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, 0x2f, 0x2f, + 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x7d, 0x20, 0x66, + 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x0a, 0x2f, 0x2f, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x63, 0x6f, 0x6e, 0x6e, 0x20, 0x3d, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x6e, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, + 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, 0x28, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, - 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, - 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x0a, - 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, 0x65, 0x28, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, - 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, - 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x2c, 0x20, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, 0x7b, 0x7d, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x69, - 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x72, 0x65, - 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, - 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, - 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, - 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x28, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, 0x28, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x7d, - 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, 0x2a, 0x2a, 0x0a, 0x20, - 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, - 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, - 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x20, 0x3d, 0x20, - 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, - 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x28, 0x63, - 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x7d, - 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, 0x6e, 0x66, 0x6f, 0x20, - 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, - 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x20, - 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x68, 0x65, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, 0x77, 0x69, 0x6e, 0x64, - 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x6f, 0x20, 0x6f, 0x6e, - 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x4d, 0x6f, 0x64, 0x65, - 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, - 0x66, 0x65, 0x74, 0x63, 0x68, 0x28, 0x22, 0x2f, 0x6d, 0x6f, 0x64, 0x65, - 0x6c, 0x2e, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0x29, 0x2e, 0x74, 0x68, 0x65, - 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, 0x6a, 0x73, 0x6f, - 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, 0x0a, 0x65, 0x78, 0x70, + 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, + 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x7b, + 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, 0x20, + 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, + 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, + 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, + 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, + 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, + 0x74, 0x63, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, + 0x20, 0x43, 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x28, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x2c, 0x20, + 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x20, 0x7d, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a + 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, + 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, + 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, + 0x72, 0x67, 0x65, 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, + 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, + 0x75, 0x73, 0x74, 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, + 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x2c, 0x20, 0x7b, 0x20, + 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x3a, 0x20, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, + 0x67, 0x73, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, + 0x74, 0x2e, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x28, 0x6e, 0x65, 0x77, 0x20, 0x43, 0x75, 0x73, 0x74, + 0x6f, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x28, 0x22, 0x64, 0x6f, 0x6e, + 0x65, 0x22, 0x2c, 0x20, 0x7b, 0x20, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, + 0x3a, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x7d, 0x20, 0x7d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x28, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x3b, + 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x43, 0x61, 0x6c, 0x6c, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x61, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x20, + 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, + 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2e, + 0x20, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, 0x6f, 0x65, 0x73, 0x20, 0x6e, + 0x6f, 0x74, 0x20, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x73, + 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x0a, 0x2f, 0x2f, 0x0a, + 0x2f, 0x2f, 0x20, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x3a, 0x0a, + 0x2f, 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x29, 0x2e, 0x74, 0x68, 0x65, 0x6e, 0x28, + 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, + 0x74, 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, + 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x2f, 0x2f, + 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x72, 0x0a, 0x2f, + 0x2f, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3d, + 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x29, 0x0a, 0x2f, 0x2f, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x72, 0x69, 0x74, + 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x0a, 0x2f, + 0x2f, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x6d, + 0x69, 0x73, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, + 0x7b, 0x7d, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x3d, + 0x20, 0x7b, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x50, + 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x28, 0x61, 0x73, 0x79, 0x6e, 0x63, + 0x20, 0x28, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x2c, 0x20, 0x72, + 0x65, 0x6a, 0x65, 0x63, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x79, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, + 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, + 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x29, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x20, 0x2b, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, + 0x65, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, 0x20, + 0x28, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x7d, 0x3b, 0x0a, 0x0a, 0x2f, + 0x2a, 0x2a, 0x0a, 0x20, 0x2a, 0x20, 0x28, 0x64, 0x65, 0x70, 0x72, 0x65, + 0x63, 0x61, 0x74, 0x65, 0x64, 0x29, 0x0a, 0x20, 0x2a, 0x2f, 0x0a, 0x65, + 0x78, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2c, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, + 0x61, 0x63, 0x6b, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x20, 0x6f, + 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x7d, 0x29, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, + 0x63, 0x6b, 0x28, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x7d, 0x0a, 0x7d, 0x0a, 0x0a, 0x2f, 0x2f, 0x20, 0x47, 0x65, 0x74, + 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x69, + 0x6e, 0x66, 0x6f, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2e, 0x20, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x69, 0x73, 0x20, 0x75, 0x73, 0x65, 0x66, 0x75, 0x6c, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x67, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x20, + 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, + 0x6f, 0x20, 0x6f, 0x6e, 0x2e, 0x0a, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, + 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x67, 0x65, 0x6e, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x20, + 0x3d, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x66, 0x65, 0x74, 0x63, + 0x68, 0x28, 0x22, 0x2f, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x22, 0x29, 0x2e, + 0x74, 0x68, 0x65, 0x6e, 0x28, 0x72, 0x20, 0x3d, 0x3e, 0x20, 0x72, 0x2e, + 0x6a, 0x73, 0x6f, 0x6e, 0x28, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x67, + 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x3b, 0x0a, 0x7d, 0x0a }; -unsigned int completion_js_len = 5346; +unsigned int completion_js_len = 5782; diff --git a/examples/server/public/completion.js b/examples/server/public/completion.js index baaec1d60..ab38a7b40 100644 --- a/examples/server/public/completion.js +++ b/examples/server/public/completion.js @@ -195,7 +195,8 @@ export const llamaComplete = async (params, controller, callback) => { // Get the model info from the server. This is useful for getting the context window and so on. export const llamaModelInfo = async () => { if (!generation_settings) { - generation_settings = await fetch("/model.json").then(r => r.json()); + const props = await fetch("/props").then(r => r.json()); + generation_settings = props.default_generation_settings; } return generation_settings; } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index d86d7e04a..9481ce6b1 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -990,11 +990,6 @@ struct llama_server_context queue_results.send(res); } - json get_model_props() - { - return get_formated_generation(slots[0]); - } - json get_formated_generation(llama_client_slot &slot) { const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); @@ -2895,12 +2890,6 @@ int main(int argc, char **argv) } }); - svr.Get("/model.json", [&llama](const httplib::Request &, httplib::Response &res) - { - const json data = llama.get_model_props(); - return res.set_content(data.dump(), "application/json; charset=utf-8"); - }); - svr.Options(R"(/.*)", [](const httplib::Request &, httplib::Response &res) { return res.set_content("", "application/json; charset=utf-8"); }); From f68664ac241a6b5c233d8f1051eef20929b06008 Mon Sep 17 00:00:00 2001 From: Sang-Kil Park Date: Wed, 7 Feb 2024 13:28:00 +0900 Subject: [PATCH 685/859] convert : fix TypeError on GPT-2 vocab.json (#5288) --- convert.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/convert.py b/convert.py index 4a2847a27..323e8058d 100755 --- a/convert.py +++ b/convert.py @@ -334,9 +334,9 @@ class Params: class BpeVocab: def __init__(self, fname_tokenizer: Path, fname_added_tokens: Path | None) -> None: self.bpe_tokenizer = json.loads(open(str(fname_tokenizer), encoding="utf-8").read()) - try: + if isinstance(self.bpe_tokenizer.get('model'), dict): self.vocab = self.bpe_tokenizer["model"]["vocab"] - except KeyError: + else: self.vocab = self.bpe_tokenizer added_tokens: dict[str, int] if fname_added_tokens is not None: From f3e2b4fa3f81a410ecb7dec929c259ef8d8dbb7d Mon Sep 17 00:00:00 2001 From: Justin Parker Date: Wed, 7 Feb 2024 01:15:19 -0500 Subject: [PATCH 686/859] server : update `/props` with "total_slots" value (#5373) * include total "num_slots" in default_generation_settings_for_props * cleanup total_slots return value in /props endpoint * update /props endpoint docs with total_slots * remove num_slots from default_generation_settings_for_props * update /props endpoint section --- examples/server/README.md | 4 +++- examples/server/server.cpp | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 46d8f85ae..1db7cdf21 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -276,13 +276,15 @@ Notice that each `probs` is an array of length `n_probs`. { "assistant_name": "", "user_name": "", - "default_generation_settings": { ... } + "default_generation_settings": { ... }, + "total_slots": 1 } ``` - `assistant_name` - the required assistant name to generate the prompt in case you have specified a system prompt for all slots. - `user_name` - the required anti-prompt to generate the prompt in case you have specified a system prompt for all slots. - `default_generation_settings` - the default generation settings for the `/completion` endpoint, has the same fields as the `generation_settings` response object from the `/completion` endpoint. +- `total_slots` - the total number of slots for process requests (defined by `--parallel` option) - **POST** `/v1/chat/completions`: OpenAI-compatible Chat Completions API. Given a ChatML-formatted json description in `messages`, it returns the predicted completion. Both synchronous and streaming mode are supported, so scripted and interactive applications work fine. While no strong claims of compatibility with OpenAI API spec is being made, in our experience it suffices to support many apps. Only ChatML-tuned models, such as Dolphin, OpenOrca, OpenHermes, OpenChat-3.5, etc can be used with this endpoint. Compared to `api_like_OAI.py` this API implementation does not require a wrapper to be served. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 9481ce6b1..eceda30d0 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -432,7 +432,6 @@ struct llama_server_context } default_generation_settings_for_props = get_formated_generation(slots.front()); - default_generation_settings_for_props["num_slots"] = params.n_parallel; default_generation_settings_for_props["seed"] = -1; batch = llama_batch_init(n_ctx, 0, params.n_parallel); @@ -2639,7 +2638,8 @@ int main(int argc, char **argv) json data = { { "user_name", llama.name_user.c_str() }, { "assistant_name", llama.name_assistant.c_str() }, - { "default_generation_settings", llama.default_generation_settings_for_props } + { "default_generation_settings", llama.default_generation_settings_for_props }, + { "total_slots", llama.params.n_parallel } }; res.set_content(data.dump(), "application/json; charset=utf-8"); }); From 316c7faf7740fa98ea68f1445f4505810f706b9e Mon Sep 17 00:00:00 2001 From: runfuture Date: Wed, 7 Feb 2024 14:15:56 +0800 Subject: [PATCH 687/859] llama : add MiniCPM support (#5346) * support minicpm arch. * fix tab/space typo. * convert minicpm model via convert-hf-gguf.py * try to make tokenizer work * fix bug for quantize minicpm * fix for flake8 lint * remove convert-minicpm.py * fix for editorconfig * correct minicpm model type (size) * constants expanded for minicpm * Minor change of the constant names for minicpm --- convert-hf-to-gguf.py | 49 ++++++++++ gguf-py/gguf/constants.py | 21 +++++ llama.cpp | 190 +++++++++++++++++++++++++++++++++++++- 3 files changed, 259 insertions(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 5e343742d..829d68368 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -22,6 +22,8 @@ if 'NO_LOCAL_GGUF' not in os.environ: sys.path.insert(1, str(Path(__file__).parent / 'gguf-py')) import gguf +from convert import HfVocab + # check for any of the given keys in the dictionary and return the value of the first key found def get_key_opts(d, keys): @@ -205,6 +207,8 @@ class Model: return OrionModel if model_architecture == "InternLM2ForCausalLM": return InternLM2Model + if model_architecture == "MiniCPMForCausalLM": + return MiniCPMModel return Model def _is_model_safetensors(self) -> bool: @@ -258,6 +262,8 @@ class Model: return gguf.MODEL_ARCH.ORION if arch == "InternLM2ForCausalLM": return gguf.MODEL_ARCH.INTERNLM2 + if arch == "MiniCPMForCausalLM": + return gguf.MODEL_ARCH.MINICPM raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -402,6 +408,31 @@ class Model: special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) special_vocab.add_to_gguf(self.gguf_writer) + def _set_vocab_hf(self): + path = self.dir_model + added_tokens_path = self.dir_model + vocab = HfVocab( + path, added_tokens_path if added_tokens_path.exists() else None + ) + tokens = [] + scores = [] + toktypes = [] + + for text, score, toktype in vocab.all_tokens(): + tokens.append(text) + scores.append(score) + toktypes.append(toktype) + + assert len(tokens) == vocab.vocab_size + + self.gguf_writer.add_tokenizer_model("llama") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + class GPTNeoXModel(Model): def set_gguf_parameters(self): @@ -1041,6 +1072,24 @@ class MixtralModel(Model): self._set_vocab_sentencepiece() +class MiniCPMModel(Model): + def set_gguf_parameters(self): + block_count = self.hparams["num_hidden_layers"] + self.gguf_writer.add_name("MiniCPM") + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_file_type(self.ftype) + self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) + + def set_vocab(self): + self._set_vocab_hf() + + class QwenModel(Model): @staticmethod def token_bytes_to_string(b): diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index ed8e26f83..1cfd41c0b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -104,6 +104,7 @@ class MODEL_ARCH(IntEnum): CODESHELL = auto() ORION = auto() INTERNLM2 = auto() + MINICPM = auto() class MODEL_TENSOR(IntEnum): @@ -156,6 +157,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.CODESHELL: "codeshell", MODEL_ARCH.ORION: "orion", MODEL_ARCH.INTERNLM2: "internlm2", + MODEL_ARCH.MINICPM: "minicpm", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -464,6 +466,25 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, ], + MODEL_ARCH.MINICPM: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ROPE_FREQS, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.ATTN_ROT_EMBD, + MODEL_TENSOR.FFN_GATE_INP, + MODEL_TENSOR.FFN_NORM, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_GATE_EXP, + MODEL_TENSOR.FFN_DOWN_EXP, + MODEL_TENSOR.FFN_UP_EXP, + ], # TODO } diff --git a/llama.cpp b/llama.cpp index 65e399adc..f3c5146d1 100644 --- a/llama.cpp +++ b/llama.cpp @@ -205,6 +205,7 @@ enum llm_arch { LLM_ARCH_CODESHELL, LLM_ARCH_ORION, LLM_ARCH_INTERNLM2, + LLM_ARCH_MINICPM, LLM_ARCH_UNKNOWN, }; @@ -228,6 +229,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_CODESHELL, "codeshell" }, { LLM_ARCH_ORION, "orion" }, { LLM_ARCH_INTERNLM2, "internlm2" }, + { LLM_ARCH_MINICPM, "minicpm" }, }; enum llm_kv { @@ -690,6 +692,29 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_MINICPM, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_OUTPUT, "output" }, + { LLM_TENSOR_ROPE_FREQS, "rope_freqs" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" }, + { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" }, + { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" }, + { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -1390,6 +1415,7 @@ enum e_model { MODEL_UNKNOWN, MODEL_0_5B, MODEL_1B, + MODEL_2B, MODEL_3B, MODEL_4B, MODEL_7B, @@ -2748,6 +2774,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { static const char * llama_model_type_name(e_model type) { switch (type) { case MODEL_1B: return "1B"; + case MODEL_2B: return "2B"; case MODEL_3B: return "3B"; case MODEL_7B: return "7B"; case MODEL_8B: return "8B"; @@ -2887,6 +2914,13 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_MINICPM: + { + switch (hparams.n_layer) { + case 40: model.type = e_model::MODEL_2B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; case LLM_ARCH_FALCON: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); @@ -3524,13 +3558,16 @@ static bool llm_load_tensors( switch (model.arch) { case LLM_ARCH_LLAMA: case LLM_ARCH_REFACT: + case LLM_ARCH_MINICPM: { model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // output { model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); - model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + if (model.arch != LLM_ARCH_MINICPM){ + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + } } for (int i = 0; i < n_layer; ++i) { @@ -6781,6 +6818,153 @@ struct llm_build_context { return gf; } + // ref: https://arxiv.org/abs/2203.03466 + // https://github.com/ggerganov/llama.cpp/issues/5276#issuecomment-1925774738 + // based on the original build_llama() function + struct ggml_cgraph * build_minicpm() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + const int64_t n_embd = hparams.n_embd; + //TODO: if the model varies, these parameters need to be read from the model + const int64_t n_embd_base = 256; + const float scale_embd = 12.0f; + const float scale_depth = 1.4f; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + + // scale the input embeddings + inpL = ggml_scale(ctx0, inpL, scale_embd); + cb(inpL, "inp_scaled", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * inpSA = inpL; + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + if (model.layers[il].bq) { + Qcur = ggml_add(ctx0, Qcur, model.layers[il].bq); + cb(Qcur, "Qcur", il); + } + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + if (model.layers[il].bk) { + Kcur = ggml_add(ctx0, Kcur, model.layers[il].bk); + cb(Kcur, "Kcur", il); + } + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + if (model.layers[il].bv) { + Vcur = ggml_add(ctx0, Vcur, model.layers[il].bv); + cb(Vcur, "Vcur", il); + } + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + // scale_res - scale the hidden states for residual connection + const float scale_res = scale_depth/sqrtf(float(n_layer)); + cur = ggml_scale(ctx0, cur, scale_res); + cb(cur, "hidden_scaled", -1); + + struct ggml_tensor * ffn_inp = ggml_add(ctx0, cur, inpSA); + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + { + cur = llm_build_norm(ctx0, ffn_inp, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + // scale the hidden states for residual connection + cur = ggml_scale(ctx0, cur, scale_res); + cb(cur, "hidden_scaled_ffn", -1); + + cur = ggml_add(ctx0, cur, ffn_inp); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head scaling + const float scale_lmhead = float(n_embd_base)/float(n_embd); + cur = ggml_scale(ctx0, cur, scale_lmhead); + cb(cur, "lmhead_scaling", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.tok_embd, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph( @@ -6943,6 +7127,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_internlm2(); } break; + case LLM_ARCH_MINICPM: + { + result = llm.build_minicpm(); + } break; default: GGML_ASSERT(false); } From 9a697d842bc0cfce8268ebd2ba703ffc1c904f98 Mon Sep 17 00:00:00 2001 From: Ben Williams Date: Tue, 6 Feb 2024 22:16:48 -0800 Subject: [PATCH 688/859] readme : update ui list (#5354) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 34f2021f9..672512d18 100644 --- a/README.md +++ b/README.md @@ -149,6 +149,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [iohub/collama](https://github.com/iohub/coLLaMA) - [janhq/jan](https://github.com/janhq/jan) (AGPL) - [nat/openplayground](https://github.com/nat/openplayground) +- [Faraday](https://faraday.dev/) (proprietary) - [LMStudio](https://lmstudio.ai/) (proprietary) - [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) - [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) From ed0bf32290ee5b30ffad5becd99cbecef74aedd7 Mon Sep 17 00:00:00 2001 From: Eve <139727413+netrunnereve@users.noreply.github.com> Date: Wed, 7 Feb 2024 06:21:30 +0000 Subject: [PATCH 689/859] readme : modernize (#5379) * first cleanup, update everything to Llama 2 and remove outdated content * Delete SHA256SUMS * make build instructions generic * recommend Q4_K_M quantization method * Update README.md --- README.md | 127 +++++++++++++++-------------------------------------- SHA256SUMS | 40 ----------------- 2 files changed, 36 insertions(+), 131 deletions(-) delete mode 100644 SHA256SUMS diff --git a/README.md b/README.md index 672512d18..0509b0ba1 100644 --- a/README.md +++ b/README.md @@ -33,17 +33,14 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others)

    }eg4I-X7M{L==K+^??=p9BEB zZlI+SFa3cu{KBgDMk!7~F30?lJsMjO%kUrz6C(3(rW!>6CDd|4L)xXipKj;<$n zoy8fQv*&T0v^Y>+z%25TmI$HQ2)n=Pu#LT)`rOTee16_V=8H$bXsDhF;|+@xsB>IG zX52G6v*?8o3ZXN?(tln1z>-*Wb&B;7g8DcGB#D!TMlfrr4R5U%mu5l39zT?h&-KGL z{MrXMjP}2-MQI#cskWmlYYna+nIa8XTrzSW>ZCFnqlT55N>&F**K4z7ljtEO1zSCj zU^i$*8MTS4`sq)9YUbGntLCgBVQ9;qp$`j}*Th&23SYl%ZKX_`)*RnI-r%G5SmpE0 z_05s50>>ygcYSyU3L{uhWO0M+oz*pe5rgHBX$fA@<@`9M83>jr&X%d zhF(7Urcc$IzpUTm+0(G32&4w{7&eyaRqW4Od?cF0C78X}3VFTo6J);&{PePnfi0u40tdb3P7oKFL-+Sa{e|{ z?a`6J5@2omg(53OquEfNEOm&o)GEniEZM?CDO?c~v^~KKzgL4K6htgu#7ON2q>wCc z>%94pDKh;e>Cz-n5|KGGe%IDy5f%e;OVz>MM@KZJCW`o>6gJ@rc{e+g*Zv0aecBnQ zg>n~~s~h|E;m5RWyYk-Fq;)x`Ix-D~W4(SAhCk`S{F&rXm+AYv^;~jH^Byk(X4>_* zC{jeZBbZzt7HKJ6ElR1fz`4DXC5YeTlq7r4%_(H_&Yjjp<$Vao7h%S-Y&0jPj_foUU7s z9}uBij%xyZz=bRQ`6_TUL|hTSrc_I6`^TFzN(U{wb^VFSIbVWuiYrc%kd%kNP2;{) zd+M(1XJJ`Vn7Wj(<5%4>g6krF&0ppx*80G32nJ(Qpm$1Mk8i_82K?QVTyphu4l&_O zW?(mCu6PbZ)~)CsE#-i=rWg*zItP zTmN@dt-^Iz5q*7Td{V478vSBD%cJ2oBBRmm-%khKs<5|yn|#>kr8hZ zZBhHfs$8#s&-t&82=dy4pIrq6Q#w328v39a%x6OlYw+yiJY9fqe_cyevHx59o1k)f zo1K%$!W_-61u3aDVz5L{3qX+Muj=JwOla4o^63!sH2=n)s%^bwZcZQz)yf4$oiJHq zg~&P}_4QPM!E-_Bu&C=PsG+Ma8!^w$!Cm-oE1!H(m*ky$sj6k|B+R^)$4=7&Q zBlQ5D<(=>D)fv)FYN`5(@R!W8bd`lyxd@|C(hgoqbmlOPl5f>xH5%=k|70aIz|(W6 zg`TNAadu1QO;K@0*E(%MpB`CU-7x$~|TKcHX`>l`S zzQNain#7w%=?3q?-96t(UEFP_D=;{##^%2E@Gn=z#xf?!Jn{@HD=SN1!>_N^kO=@G zX)f5j2`1*MgBR=o=QWQI#&jRoVSoC?7j-#=-ncyKzTY-f@rzq$`p^ScQ}tE%-+#GK z<8R9%21ChCt(6MkaC7G7?#=FK@?E6!>9?RbzX@S-OtB=vf6E)u2YKT!G3EYJ_`BK& zC3180V=bG6UUnOr0KFTs)i#b9d0=bwbNqabFC9w7GSzS&gw*G7N-Lo=#sugHp% z3F%SF=k7;*)}0zKo#EIj$K7hDyzUEPOC~)P_-&4ufAjpIEWzK{ro*U723W+q94aU~ z2h%>)eYOFJb#%cW@n5h+t8I{FU}~!sYcB`|6zKzfsZPJasj%2Zg25svU8|(R##Y^) z41Vu#Bz`0YR~Km8AY3H~l_5WmZ1r{@qZ~}j5a{!_Hb{o1vB({+Ly=!dhUmA0IWoW*ynkUp^KJo6uv8=kgT(N| z5Dn%AYE5xC>hM-R`P>hs56#=2+@}yC$bo;32@>IxBiOzhi6oCenHR&ae}H*VM>;m= z(`f+3WwAHb?Z?oK55cwS>>H?3V57^eyRiYT1EMW`3T1p^?a=hdr z2Y*hefr{cH%WN{Qe?HFH9_mKYV~8ZgdWwpmL^np1mk34Bswgshcn9Vr^UPG&;)C_X z!YlHP-OC|8p}9S^82UF9m5E}r)B0(K)9#?i?DQf8&W@cuu)#Q9^`-mw%%J{$fA*Ws z6~}3)51Pu~xuc}g0<5XUJOuceQ;Ef3a=s{Dq|?81Hfmpb)N?(|MCk@Kp$_;vN|5gW zJ-CRF`+z!(2*xT-d^fHclN{n_M8qm_qr_yVlf_Yd8N<)kKKda5P!D6xEqEUWb`gsA zt)WL}R{XAf`QtB59)gGjyCQHu$hveW57aFv4BO`!-{fX3E@F5Eo`qaJ+f=6sU?coN zOnMp(vZusT6x>E9-E=SV2jBUQmrohQSvT~_*7ULMTO$h%{7%}pdRts7$QDel9&Gq7 zB}>7)eN<--!f54Fvk!Q`&-~yUg3cMqL^W{Gw!t1DOZ;8pG+kBV2+n&cHhhhb==CwO zT^sfiMz@jBc|xw!55-XqlTX(7yx5OlsdQB6{JeGCB<(z44 zXMX#Zj$rj@^_aW901aA?o?pJRzEqi%yz6?IaOWXwo7!UVLzj92V1!|V)Byb(mg1wl zGa4GwSB2NhDaU)<>yC;elk0jKN=5}K*_H~ECV`rJ_@qQ^&1e@3^%$*}V2w|XOgR#C zLs4;2f+{FeX$@z1F^gxZxkpw4k0~AY!=z3?iCP`YTtAPBHv+6Gy@a-=wv6s0dT-Q8 zfmd2w#rUrsGs^rt1S5defL(3xd&5uY+AxVA_m(P7YJY$;>!0h>*Cw$}HP%2iTC_rO=j~6) z7(`t6;JSAJA}GB$^a<{_4vk^GK$)x{CF-iyS>bF5wdpqtC#<{R(;>c^LCrI;iCk3j z@BI>!mk6<<0e-3T37m5GyZPQcXy?=0cr$Aq70zP%!~(F*(BtN2fK@(3PCC&J2(c1pvgqpye?IWbugIH`wuE z)e^B_v=}yeapK|p*61R!o{;^`IW^re?87RRiB6ySdcf}XhN(bt;7Px;D$0@8EQ+0g z-|JB~+;}s+%-rjqe?ILY5(T8PbN@@sGP$6&G|p$)s1qX6k0m3=@p*NtYN%~uwdNOT zBU1FkSSRVsi2s$x`8SgddH|dyuA0{_)Z<{ z=&C2uAL}amW~!f)y2*KgL?c^iQ>Ld8u1ek|B>rhQ`FusWU7O%ECd`2tLy?BE1MPQd4Kaa{bi-)pldyOPQ^(*h|SQus`v>lPfE-d1%D+8 z>AyZ9tUv3WV(?x%#rCGxbkYu%pqqxRr#AGfUeAEBi5+B4X8`L${-Q8xCOe~8y~^l# z5rAWt6vvv<7T6B;ZFp#!)#g$&Ae^|Meg+vbh+;-#6&QnlDH~Vxw;qWvW9@SZDZ9C; zV1_vh&pw@T#(whF+V>j3{v!K;(yG5)EJcuZJi=(6E}!4KOOjOh^t#_b zcg>4xE>Gc~e}Qz7+AmsQ*mSv0oEu4$VV;_lTU@?e=FiPSB%#`QnRPcN2<^^t z2r6u*BJsieB4|keL*blY$t1`KGNYFFF$?_VKk zyygL&a2(l_)|&()hJ0f2%7#c6a$ROHc=Y19J-H6`gQC;@2O?1Z;r{(+Uqsu0W)tLJ zVTIZ#hD%5KZ}cgI@lE13=ra#BrQ4tup8MyMd~XjK@syJ=Wc4~z;MhkBs=ZC{on8Z z(#swrRF++wed^4}Y?~w#0y)KyhnTGvnGcOw{EPiob`pl|%`i5Y5*o|AGg27oh;p(z zf+chV!Bnb8V&g-#%H*pR$b}^-+{9QvX-y3pm2>N1}mjH%)09fEdF6s7=z9 zvAbt4Q;Ogng+IV%8=G}bLU7`infMK5AW)0#fE?ODK5!zeSbTw5Dy&14sI_1KY!)}5 z*@)7_3YaRZKxqa!S-=I!l<^<=|eQQZyhf&+M!l_iQ(`dW?0#0HP0$lK0tU_Ndl4b;v<7b?-NwiKJGA-@i z_>qsYqqbnySQk#YfV;(REElay_{-J#($7G{1|t zF+*LID1IB&up|+>H9@DYx?ngVwPuN;jBe9-D@XULom)D*gR$>rzNU|2b}J?Jm=mpp zQ6<)AJ0CMr9+pWeUB`@i9#XvW%gDp3hMkWW)4{mO;otrE_i7u87Ubyk?X7Wr>1KJ- z1Gdf$ce&p7(SK)xX?jjvLp7qFbfh~MDj@$4-6HGt_`C2%$!|_x(#D(YLqGb_pDwxM z^3`j(T3uMZz_FIvUnv6rrlPlr@GcC3GvZpgxQru}qh6BW~?8K|Y+ zM(lgv`A+RphGo_cX?{)sZMYkZqVT^~CK0)&q{R{Bc(grrVH0a|M`}H#wG)@@( z_zuf&DP`SYdp*bjG?Rdnsd+#y-_O5!OPfKW8~)853AHSlM{M?l;6LHNy3VuHn8XFZ z5uP#8UaEsHiM^Yp{1&|jOX3NwTmNQ2ZqN*TAZfqiYH3h%O{UP=Ad+f7n>W{B$d~`T zbSZ$USU{^njP1_D>>d2kmVGdde@4Ad_6a$@gJ@REd-#A@!*U$V6YI0sD@;1?&AHD* zlMWg^0~Zf)QJz(leKsD|_VL^DENH6LDT8`tRKr<)i5J#CcgGvv3Mocd-9LlsBst@s zxh}wYikxbf@o8^KL+Qw*!$Ihe`B(LZLl4?)g+#YT%XIQO{;C?xpccL@;IAB%q5f82`--)=3zF1z3HfLjfgm0K z`u^Yfq{v*UQA(Zu)m3;?SEPIELAJNgO}_AbF1>uP`4qXD;XLM!hf(4(6#r~`3v2=s%=?9N=u-g0uMiqe<3PCfCn+LV#T zjtVMAa@-Z_;__8z7aS%{Z7tH!06oGF0JQ(kgt}7#BqBLz7YrtX(c+K%$nQLia5twkgj*Ajb48VGvIIe`*;~`GKa*FLFjuVe~IE&_x=p628 zoMdZj!(uJDH{N!$oVl=({ozwfyJ`uh$j{R9L?&0Q1Am5}io|D2tpGQ1h|Rt`v&)w; zQWfh^GrBN|>rn(XFguH|W7nQ9NTjPF7LW0!zIEot|~j@QcgH;yXp`UF7qVn>(o#r8-&ZWE!qz?i8#dO%Y#`k>A}34TMx4# zJIRo#Ax`KUM^MTurj8__B9gzX*IRg~^58lZd)~nSI7~&@C3NqQF{@9ik_cnDK8Y_` zjI$H)4NRwWXr8WGa4N1qp&ZmDdRG)++~m`}RYJnus3bWN z;WFtyO@tyRQUoWyEdS}a-?H(dpFalnl1oF_b8!(yC{z@n#;Q#niD->RpeT;J#SS2B zEnW6bk?x@!yZ`CNSoq)LzuFV93IbR~V0gH`fK?cpm5}FRXnNfRK7msar?`n)@mV*u z?%(gaxzf;XtzJQJ*}Ed*u5RB#c-DJ0)2yDU8Aidbk31<%-2teBj65b_8raI0{(3?2 zdW^GYrblZ5pc-j^>PP~$*F26kih3$cC0!C9lvfZ1WNJ-s+iZ4MOXlFRYv|Jq+0uR> z{K%oiozwGG!3MYl4-98`9s3G@74cTxhx35&8yr%9*<2s{J6E%J@1l{y@$_)fqi}?F z5Xu!NcpW!`bO&*Wdf;J};eyTNh7`}JiF8I&wvX1}=lZLvs8d`qsW)!W&8t}iG>QIB z`pHXj`0g-(RCmY3!LpPQ1;J{Gz>xiUdS#;HI?P3Zq$uVV?ijpDc*Z;(;ork)schzBF%H$B zF}n9DGmL|MkIF8rRpDPll-fK$y^UTHn71mag<*2pq^taGGTGfBU{soyMG_0rU4)Xm zI(thk4z6n5`|TJ0xX?Wqv7xtGIlu?AI0idpNeW0b)$vlK0J``rG{DZ}%Dmp3l9n8a z4O4^UuhZBU2#j~PM$t>r^#nhw7+T>=(*5br$rz)n0Bfr+U>2#_AKoc$Z z^P{744q>ESS7bMC-V(w-56tWlY7=04y9VC(=!K2c7N=j104TOjyzM&ngsZsdJ_WJ+ z|E=~%Ltg&oM~@%XPM8bjUtdN~_ANn;d{S4_Hde>Pyt(a?f^$5dc0*i0Wp9wNym;la zLNL)eL8hF~Z54mJ+bj3)fA`~hvjRg);z7+yBSzMc5KF9lKx>=YlERaI$rI#T=&#F# z6Pob6j=Z)9aN{z**dWK|@6AT|`s|n~3=uQKIH8AxPflH5C)$O-ujE>1Avhk2C8=nAZgIU z`0$-FE@+CP@&VSG*C(qyVuZ_8YS(9P4c^5i&Y064KA?%>J|_T{^WCi<0;EQWXkdJ8lOdz z!re=`N{@5Q>?`gk?n9S1gDi1=i*WX=V#9gJ*CEvMC62*_wiKJJjfDid}-&UAj zqUmivMlgEV*de;eV9F>i{#npStyThP&+W)pKKat`6&5T9RUa-Cy7g<(+lOsPfxjXQ zsm)bFP1%=7GuYLFAwpG{O#&!I>`Zs1MJ9_SagB9R{0?;iOY?C|{3WP07{RqWphYLQ zRkZ()KlsnjBu#~A_gNm3x~{v!(fETL1@R>RywS?$>Su}sxc(_uUR@$r{oGZIidq`s zZnrewBN%4Na-cMV#GfqYKNV2$QUbZif`y^bk-syvVj`1d;5CDzq4yAJj_+yCQf&^O z#Qfi^2P$@#A_KC6Ch^+U<>C~o0Y9xBJtKY5Bk+Jl9O(NXlH3Q#cq zK#pjhQGcWYe*V779uOdcve~Smd(Q)6!jyS-$<4dIN>evti(B<=A)G6!Jk}L+-e+ zs|V37?nxlMG0TvNamcgmu!P4low=6_9!BV^m#9AP$RUCd4L7go^ivo2{{3(K#46(o zreSOT|lqVT7uW(z>SaJnsRL&FFG5g*!^rg3iTV3~eDWg&S zcxN6eRu;%^fAb|t`gj*f$2c)Z()sucG0AxX5Xeb8(qI5fHLVZ zj16~aAreXw$nU}nz9WVJr=u zsk}afrXw5Yb0I70uHy%WZ~rh57!IvhtpR-Nk#}la-l#4+SV4Btl2*mw4@pZPVZ{QC z8u+e5)h!TVfi>M6oOmQbp9Qvr&rMZa1jfxaG*>)Nn6^|qFD;7GmjvBPMY8l<|E;Ri zn2{xzAn6T1UDMl_OiNHIeSVAb!d-0~FQIFO%C!*J>@H##uP1}PG;>o;Cj+EE4vq+n z_ra6VfzO{4Yrdb|c?EKB<7!Q~CgxT_20qAs+L!4?p!Im~cZ- zGI;!mM329VSBBZ;fL_y@)#k;UY<&RPrNs#cEKf%?E~eYU>@4nlF+CT1){m5r2qjdO z7MdDL5H1~v8Ad&9NW`ta#`V9WP|~NW@+HCy#l}Z8m^b04H&M{>> z@W#DO(Bh_>b{q!r@!HV+`=9qN2MgU=6NF|mE$n7WQe#ehu)aE`F=9#YSK&Es(`(!O z&~8<$CadpJO0!kFBV?SD;2sJ)9V8~{Kcq*z2);rupVG*aMO2H83CL&v&@sYZRi`G( z%h}naOYxyBAbmAX9NAS1r3+{$AS`OiD1-ygh4_d0gw%^urB}dZmqoOzbf9qMN55mi zLMdIuc0t(Cg)ihG=0QY~@84t3rf4NQ^SoBOH9P6N2=kqx8H-3X-82cFT}PutAxyI8 z0BzOOKC_NT1udOi#HCM8nA1>C-Q9m$pQBG>x|EOBLq!vGh!FfP0&Yp-Km z4T?H3Sxu3Z+8QwC4nn{{UPM+*NB#iFwLsV-#b^9nHD|f>V?d!Q+mN@cf7i_cZj+KQbj+ z7C=0T(_AYD`XTjOKcd#or&lq;0PRavx0gXQu=m$|La0B(s!tY&mUUPdGK^&5`Kmo9 z3!Sh*sMe&RpT9p%mD<9(si9gt&K?Bqy>7!$Q9Dh_Y106~?Z>S0_GZKkIyWlj9NDFf z%fLarSZcCzxOf+%0w3>nMk4VN1U^#dS49@wPi=9%@WEDk$VLYjB3jBYas4#)+!IQG zO+7%UfqYertd;=^cF)XrsK5wDK*`dN|HOY=ozhSOpoJ;^6&>MCA8LY~yrI6|q0p}g zUUq&KJ~W|FXA1y>Ryj1y5=6xOE?`-6zx^haQFYZPPykvxDb>>Kqy_35RPAF5MbHA9 z1`pF;Q7`^0(qI`+A)GR#v`vA1br1&dRL_-C;?kHPxZf+@$`SXPi|kXl!2-Jq^-6yI zCOV;1??69cxXJxAu8x9XT#yr96iN%7ZLdoWs<5K_f>kVLAQOL`{W7efuL$|oW`>F$ zU&_HE1^ICtPWT^Ad*JhRAaYuDQC8O7`7Fr9%0)0yL+5Nbv5U%Zs4nNbR-NOTO_l5e(SMofQ|aiwrX%6wssIxinN1KS-G4 zxZ$D<4@oh+o$zg;vNM$D`Q&*?2=O;h)v9UAGIBQ?_S)#Wx15^PU zTLOOfyLE<2ymY$L6yZE>uR21fLV1pL6xJ(a7yFq`F8Z>cBUHi@q?6pNlM0IChzCcH z7oq64r|0yCjAr&Hdm1uZI69 z*ZB+n_vim#|9{a^UPsSQw+RKoT(H`qIR@Y5u*c!{+emh^Dx7jYe=A3>R;wOO=F?S- zAzuf<`P;}?{|dt!ZyK_hkc-Rn)=tN3o&0j)KWi|N#yt7Yz>d9K65?4Q0XAiF)EC6% zCNx*4fM8;<+;Y8yAfHRO6E6#P?a>5e;C5~Glw7oTa$K!EgF3|Dpk>Ij#jC(JfOzw; z3bKNP!&9acco;hQ)cYi!>)(n``b9*+p`!p&C9ZC-J0}ZS2;{;PY-Ip2w~00)quX6Q zpzj^YbxOCWFca4aIrwnL+$ukVY{H@!2uK^7%)kKD;-}WOn7;GhhR9&|I%P4qdx=6Q z{cB^FCY)*JQUAl92L~>70@x#p2|~n2By4T#3+Fv7F0cE>SCLJo(nC+KuAeEtg4yH1 zAN+P#El~b2iR9}6ppstYwfToO4>%6O5Fcg_z&#OlHeuGqdFK%|!n{|BR8f&uF($qs zAAKJxC^jRZN`N!Tw1};IFz$>9^>JWOMz49ZD&kBHf)9ZBvO+;!xfc?j{!j}?;y{)9 zk%^!ed+YLufo&`~T@AdDd4N^SWBlaW-AL4Oa|2bQ`R-HYVUpn$FqkQLQUHp(S`EyE zTahWJr)p(ZH)_E<{HP}ct?#nC!$GSpiz|ocIqbuv9tt8G5VaPMt-(uUNn^m79qojc zV*Fzea4;M@XV0?V5Z8-%P_ZuQXO5!?p@uZC{LZJ-nY;2J=1}SJ5|zJ(Ogenqbt8}o zlVGeub4O4Jj@8?wZG)Lk7qjz7gzuie1wQ8r68Y*Lp2G#VzXAFi73rl>Duq9==sZqF zvW{mh0@Y)gf7n`e|C|5o_l_5MFo8gU410llXohyXqu$m&kmf8@)yUuTnc#)BRzQxQ_X4d0 zLO$W{&{n?RLrp?h?nbL5UQt#^AhQl2^~^@9TTtV);ybyBnW$P@O7gZ|ef{_jX3-W1 z6l`}$-i3X!{uXxfy=sP01Hje#XsQyF+E7|qG1M*uo*ir?xIjTspDyu!PWQ|@fEh0{ zjC2oIQ4W1ciV=ufAp1H;+`s=HKK{M(R&Kj(-O^OxPP}x4&qE1-$vL5;O{CDSLhgAge$f~6Ga&q~@qnstZiV5=x2$XfBjQn#Y5nGI#KOI@CjAoJ2ruovPU z9I0bLq6MH*4hqp9O;-tWVm|eDsm>k%R7hSqm|7;l%MdFX#D-~J>(~_9=y>4+;>k2b zWYK=vqoJCnzQTd((0sW_kE-xJvbDHF=r2sWkdY)kvk|6#)Iu!2+fzaI;n$km8Q2qx zA79Sz2nG4iL{hrVHJ(JkvFdoBQ*8w97?5D~b@d9%soocK&g`c48p#NcgQ9m!2teX_ zH=*Kj7#a_=*?KgsYKSJ9v#yAArG9Jh6IT;#C$MCIQ1CEQ_|ZeGSg&} zpr=SZ;$4UgQl+&9b@J-$wSzLuco79QX@3i4Sl?3wHF8-et>J9_QQltki^btC3TR`I zB{^I~q(<0e;)-j<% zI?^EImUHqu_ov)m@|~vfleeLm_dHKw9738y>J}WQ!g&^}|Ddaw@Ba5itA{X5MqR+j zbW4YGbeYoqd%_Ljs%d7BDTYvGj771m1}ym)>1K`~RO^24WosUUqMc7Vi)+lnid1!* zLSv&qJf&$E%@J4;89^S4-o`+dU>?r zZA&Cg0yoKQ@r7G8@+_`@efocV_^*ghHl1Quf;^_m^XZw%Mk_;5da(UEDxB)DwAVz8S}>u|?(nXAgj)epT+@fBd_q2uZ`mMYdcdgHwNRo_(ocfH z&~Qmz!5wdY!EmYy%rG~Rt@N_IlU=ENSe-<-ABkIM6S%zzrDt7u!?b)E7VlSpS|d}r zHJeL3H@MLLQMB~O^|dh&y*k+4-R|p;7y#-;?M!>4W}wtj`tgE3GZdH3V&++&!kk4w zL|qB<1jDT(!|VAYHTW|L=^}+wSZi9`hSmo8go29q>&G@+!`#Z%YSUip>B?kw-FgV5sw~^=)Lv z8-^oI7b1pkaw)oq5dra-1zbZHjuep75ah@_z-@8dbDb2+YFS0sk3RlAbIUNKFQP_j z;7Wd>=s*TF4hYwffQz{pR5|*J8CbRlTCIm#rU$=L0i=VeeXE-7GcN(548o%8&ToIM z&YtytmVY#q_rig z{pTHulwug=%1SgiAoT`%-^0WwOx~|^E+B1*Z{kgPd+VmD{$D5#XtS!(sZd@=-mN} zb)9^iBiop)806uW(-ccRxJ*Jy&2W~NlvykPP8hG6Z223HrqpMKC>dd%6=CFzExNs; z_UWnSo?v*-BQLtTAg3Clj(e>Tyk8}Bff*eyv9rRQv;kb{j~)mW6OQOe&0M@J?9d+- zwdLvb{GNg>W(Lw78jI>rj7opul_o_BqHQjJVmK_J-e;A+aj8PJvQcP*XXZ$ zAelZzeh4#BBhra`MH)xOxg#jtIM-m*Qc#--m{dDOd;`CoJ^;ccbtd`o7oYgZ$wZ#F zijI&0cg>zNVUDR(L!6tX2eW{0)RsXhG?y)2N?{aDi9*ryQ+*t0xYTo8l9QHgi6_dE zAimLiDS!M6Kl`Cjt{cj@s@!TX58sD5eM^Q1(v|@=e9q!T-Is#1R|&7@-GKbe+V#ip zG{4mDvGPmwyO$H@13bV3Dt$wQ+h2aELW%Mz_xofl3^V5h;Y@<`ch)Jbpqf`$iqhyp zcfyo-n1vfS8{BP*(7-gK8Y{bP;)EsCSW4Ch&y!p`s7S*W8$xSF>^=NCg!nMVR zf+dpzoK9C#cNk4R`VBl3^^3-EYvT$=Br z-~H%^^)m38fA-DaeES#Pm7@4>84^(I#&}pTW7{9trj%Eaz1M9PP5s0*J{TSNDP)nGo#Un6&_1UxjP{xCjkLGB ze^4u?^OTQt@+zzUsTjYWn~V(v?(nS)l;5eRJfX}p$byi70c=f84g&&1HVL#(B>qT; zLp3j)Sg^B-ir~%f>bMB5g&4)O4MYjx>YFYz|Jgq?p;ngy)N+8ja659i2Xq^0FHG&a zl4ozbxka>)Bdb5`?tfj21v4QVyZ!2!6b6(qecZV>u!V z_cp?#3V%$hk zaZLfSY{rBQBh;LadVAI#+09FWj@|)6I&RlKqCZSw83Nef^&FK*bf0vz^Vpk3gebBZ zrX>|*9J&;)l^0b~ipWF3qWO5oG#3Vq$;!68i6WSI)OcbFdfmbi)c%o@0(RCJ)33M~ ziWtw_Zx9AXW%GSQs{qDABwaE_F`Nn7kOS_oSK|5p$NOK`Piopd3HNC=Mds){yhsiZ za7}+DGkB*RTG4LwkIk$^a+FT-t}!sN(rPycywV>=P0*7#1m!XIlmGl&b!skhYYC2e z(9@;*ZH`>gxr`s(K^Mkhb`HXy$w^KO^p3hwEUQ*S%hlkxYX#I^!T>zAej!w9gBcbL z!A@X&`Oj*Og|%x|=N1x1Xh#-DyYK^PgCJ=9X&UzE~l$3J}f=_hp$4|`3?8PeMXQ<~BPcJ`x~ zmYi5uOyqmfLUjb7;Jtn28R zI{*oMz-Pqt;R&xF?FuG1U5_0)=4P0k0*C9FMHy_RTkB1w5%`|ej}21-0X<@bv#sVJMgGZIrlY3;<50ScE3gFPz&Wp5Pvo1GnWBsj z0Qg&fZ4&XbqT&9=Xx*PA$jtRJ(X>S}T+vTsq>i0K{kr{Qte7kG6Xb)!zWW6Zv|UW4 zmM@j+597++Qh|7AN~wB@fN3_1EgX)H(RXY-3oxMPE8FqDc@;i1Q%ERs0pjL$jIdV2 z%h@O1dY-dyglur9GYU1=5B%j_wJ9uODzfN>t$gw$(|qUI)|P*Iva!EWA0*7_Y6p@% z*Eu~&eW$q%#S(PtsDDmR6G*A>gqlcsVM;^>N($nV(Qval3md^`p4kMP)?p7|?ND@p7u$f*Lkje~-KVMQ`d)!*Iu?a<|mbgx2 z0OX`_b=3$m0jj9 z5ZTin@fb%rJJ<7QN@cdU!EfI5X!qouzG#}@BXz8#x|s)KwdP^JrP?HMZRI$|9v&Ky@=*IgZJAQg**@Wd@ zX7ZBJ&?+%4M_SKY?*qsx=pnCS<;%~%@Lz|Sn00FyEYn3emWJR#-Vdr)z4OWnSl#hL z88J8MMk{k!Jslebd7T~660f4*>GI%1LS{pMYF|bb@PO_~dlRM5M)i3Y0?av5h;(rK zI}%dUxJ_HrQ_N0(6X^0?)*r8C_3g;f<2kN>-3}T~*Vg|F`|kWO4)npTG3|-Ag$8Y? z+9Udm6#2<<^ipN6krZQbS;J%1513}Nfa7%#hHss0RYVs4OI&|q9s(}-LC(S#Lu*J^ikh zKUaV4I{L*>N^cthDJ18DLo<85Xl*X)o!rd2J>y8pvymY&U5{p>ld1sfM}>!?M*wP@ zVv!!2!9Ys%dIP7OMo0yaU4u5)CJ40UttJV%tP{q|y=WIny8lR^oJ};i z#l(Q~gh_;W|8+;&fgxcI>>25205_RiAN5u%l&7S@yn7vkA26e2?$kbni|8O(L?g_e zGs!9Nh4vAsYTF?^#Xn9DeXY8qRFwTgpJk~M6=NpdR5-M6-Gm$nfgOB82 zhRw9U(W;PqN7~Hp>G6MCKqm6{-~6Sr_{;yd?jTh#Y|&qzzBvO%89|2Im^!K~dw^JV zngZFFJlhM!ybLVe+1z>Tn>41TD-#zN^8p3yx8wb3_PK8jk@#7c9_m}+IQ)FlTZuhV zTE_2r4Oq-+=2j{imIik|$DxpCXi4fiQFXYj6Fr#sGTI;fc3t~F`4g{~ajkOud{jGo zYZg93OFd+`_p=HML@UhE@sQn1pf(o#X;)6IZ>48H6ZoQOpC1Rh;9baxi`j7lfO?WGs^Be|^`8hI_qQPH7)2ha%zDv*}y!fw>KgB=t-+t61TH7;}@y+}s9z7E1 z*Jp#t1eT=;d^`*Hj;aol#ra{RUNk-iwnPpk!l%*M2~2iTV(B^mXs7wfk&my%zafAAheN%yC?2gNf&BrqF0qPojx?hHE$#7yEJ( z|3iI;L}5UP2!^T$h2kMz;Ldaw(g)lkVGj4V_~@fx#i0eBle#;~D-p_4J0{=I-3GEt zk@n&Y50;Pmr5DsW`N2>{HT2dC4P&7IR$MAN;$^f2kVZEjWH8+HhWJ8qUmtpv9O**M zwAgjO!9#R0onAH2^OU$fw6vE4%CV(A9Od0T#Fi~0WAcPK2e4{+UOWvDyX{6iG6DjR zAxmrmVHd&v2}q&Yc57k^s@^4?M@auAi7gJ~@j*teaUsZdJ;gbTjZ6ebcfRQy-6M&Vm51 zi*&GEj|YbdhtyB#H3ri9fTRk!j4ZA$h^(PLxFI=bh=Uf@l!h}wL5rp&eX1cSbwyV` z`LUn)*y=3p%08QTcff@XjkahtFAQsqxE?gj--=&%;FH(kB_Yx z!~zQW{_QvCCFQ*|a4)fao}et?3QMuk36|_lH?Aui7*~}7_ZtEh<}_{+#R6TM3vg?J zhv@#~OE30`%B`7o)Ol=xPXy^m*5#eYn9qrN4PQF}Aa9zrZt=wy8r(JsTLF9P7G2w; zI^#t*(#elcf`RUkP(zyBkES6-#=uA5<%=1>-FVTv&4BgKR2DIa&lJ-ACyZfkrQhR> zafWcL4{b_?o9tk}eyou;X1Uc_Pw|{K*3*VuUnF@d8D&Md)Ju znpUF}F^?NM9tCV>09B#gy{qTYnmu?GkM?(ivq+L4ZNiJ(n;(M(@8|{Pm9JFC9H|ub zrC7?(X*5UQ8U3%pImQYAF5uL}iEd3_dwrC95E%qZuA zI0<^H)n>_y`}fZSVggx-BKf0~I8URr1?PZtS_7H*9Ko1aH4P23uGKUp6ey7c<%c55 zM2`#7a>af0qw+f$&5+$OWZiU8@dRhZpWNoS&g`Hm`K;xg8YMkD;r5w?!*vRYGfDgls?a3ic;=^%UVeLkrI}Y&gznJ-hU8 zy}is{ux%jaQn;B6Lx9b+?}s(lqKKYjh1FqET=ro+gPg3&_nEz1t7V32w#fou@@a8y zwDch4x9EbI#u)WN<-e8+X%y2!ztz(L+}Rsaawa(ZXN#ql12|#!FA%0(fNEOIRrux1 zG8ZP|>>_wze~$9$N^=&yk9g7|Py;7aboy2yK3Y?KF6WXL=!U>FobB0J6rH#Ycaks3_4 zEj3n+RPrf3BAMl%RMqU&BhOGe#mV>Yf3{l8+Z?QEv=?1dEbLMpUh5+ycb7v=ca)9=#50)$rkiWH0=Kn!lu z%PShNroQ>ZhyO~+{NlU*79+_L8Yv7zxT_orL7E^XeZBnM$K(V%CBIrE4{U&Y>B2*D zKOL+wEh}qn4J9#o6V(s5c(2iKa?Fi|P@xBLz*PHZi@W85|I)0ax~oHyS5}=|dth0x zwYNg$T|hPH;f5>Sr7-rCzpNN4ir2XlQfm=RElcH%h6m7PGM^9*(R>kauFbF_Qs`s2&~$l!L-lIWkro*Vq^XYnCTx^vm#^rm z(lZG=?TTgr59McMjXTZMo6N-;6!DXms$*p7*KaZYi@ut!dEM!;Ej)HD;U~d;R~SOE zH-%Sq4yL`1vB9Kce~L+7cg%&5JWAoEPoY|ms3Abm5Y1_lx-R3Z+j+!rcteDdHbLPK zy^OoLDWJ2@skg80#tag%+j@lIk2~P&w>u7(*a?&0Q)V$;x_ih3Rzr*&O*+Z~7mcm; z)F1}+bP~7Ycq)isLMs}Cdr7j8e_8}8joA5+Sm`#~XzfXH(>^`bQL9BJJeGd^j5Nj6 zvo^Upqp2TxErx5=$0-^8S7!CIMR>fpsPzs(h)~@1r5U}P>Ps{ennW5~p%z-ErL|Av z7+tPxkh%_xFdoM_n^};-r&RD4TZ(~G5|K&##OI2y0c=_MA3tDKWhyR;x+5r1590># z=rSKjrqpE^DoqWvSXo)Ay&uc)O4d7}G+?je&;7dPiI>Q&KGOfKvT;Oq4W#?SQ7 z8ETXO-*pcXXtc%BWDLdy{i`~rrgISTi;(_3!)lf3OvohEUfqWxVsez**hcd@8*j)sWH+htqCudGg2Won zIs{|$aiq8Lca2zE&7x6)G}swkz%c&>Gt3X%eRkku9WR09)@Ij4MEuuvN@RSQ$gxpa z#Xawm8u}{0wG?i^P`f=NybwLM6{76+`sozs$E3`W%9yP%Rq^cnx_7mDcHZOfEJCJ& zwP&jxffzoy8=@Q29y}Rmc7L77gy7mf5ul$$2Do>%Bzf|s*uEZ{Y@kv7t@aAJDyG#M z#V}q2_m(84|N3I$ekXKw@$xli22I-^^n>=F8JG?TcU=(?(7OrL#5Rmn%SH$Emazn9 z!@(hK8&O>$m=i$ST0tXb+)IxF1G0=R?D^sMy)6pk0+GwW^PLS}r-zbl?`^LQ;B~eT zxFp7ai9*wWAkodjVJxP+9=BO(ei)8$!_n4>AUS3oLV68c3$CxfAqN(nkAP0rD~FD&2I#m7AbUOxLgofn4T{wZ{0f*1faBm2rgj3Zh&HOboHWK@o!KL7m|7ZT zhN#Wr{gEe1DA%12#XeAZf>C9}o?RuQy4Knsjcl{YF2Pm?w_?+JKZL($;e-;1!uRok zYBmhbTQAlkwR&*A1@dXt->p{s_~$9>qo>gSigSL0HUGn&D(;0q{E}Fhe#iv1$dQ? zA$%BNRXc8|&lNumw;U_VZ$?(<Wry1Gw&uMevOK9J;-Lop7wg9w4k2Qi!ZL=QRd?BBF};Bu)8-*Qv5D>U&M9 zOF%9(>R2+t_NS!iQWO3B=f#BkyZfa9Hb%D5Y#!T2BTk{t>j^(Ax#dRbRl?-C7iMj-`lC@J)96yZn+DWrbyw!(<6Z*Gg&gp4(>Q+IX z`hKusQRwRnrU7{@$dS&MJ7%Uf(D71u9gPsWEKQB8H7R+i~!nIKy@Br#RT?K zzD+~ceHbky-(Gvg5+h1eKcPZ!QYr8}94#oIFUoZZ`{FxNP-25`~*B6tQ{WB!GOoPcA)2At!&n+X?rbz)N;b# z1~VNx1fvuh<(^k}>$B*7ET}s7cSN204^xJMhbMTv6k2(x)sDA32-_Yo8TY`JvbCH- zor=(s#1wl>>8lBmN&oQ)yfN*0h=*W{|J(rh{ao-wlqlipwFOx1Y;NJL-sI|NzZ?s= z(s+zS1PJ>}hQkkeH0e`EC^4(wqh5ZwsI`o6?xlv!o?JtrYFLfg=*8$@?THz1MB8xg z&Si&na63b^O~_Hu-1S{YZq1GFRTF_F6p;X_p&cJz)A=uJi%2?6XvQ91&oJzsH5b!? zHN9Nzpvdq4|NO6*in)VWl;+QMO0}_LERwKcj)x|PQA2`7cnNqUi|1>;y&ttUzdNKa zyXlK5GF=)4COPLL>E%XU+8hF%N#Mlx{IIU(Xb}!3rXJCnyfwSs(>lU?x75csWtO1x zSBlBdK+OR&91mx9Y<>|UQgNCJ=my!Q)m(4Gg9u*GqmjHY1qvy zxRu*kScr=-rmrK5K+hXK)T12h(?f6QF;=&F$vCKS|Na->tEDhcruTcmB%gIm)&2Xg zcc1OozLmM)34M_-5Z+Rsw6G|3SLy@F2NWb-4b&kjg(xF8_R4&s!>CYz8`tyd3i>Bv8;Lqi-yS-+&sB5p6{12Hz|7bd4?VI0A4NDqhm zDE=BHO#WbY=!24d&4izG>&qIC35{xPvU76o;3ylV&>GD9rhQ48IwOERfZO}7mO?>N zB^GimL)pdo%uP*c3KDJZpCfnPVeog$QK7_cGvRF|-2~6FD?nKULCDj7jI;S0{OV$0 z&NpYIPpbxIseRMs6rX9iLKZ{f1X0UQic9xV$IAs9D?jdla$@fh4f%3qrD&s;^`_%U z6BO!CU<|tv#l7giR}HZ*(yH2Q<-~Z#Gg3ImG5RyDp_O?Kd5>G{qZfcmNoz1{<`8vH zk&restWS@6`1)CR0#8xMWn2*R^#!3&Z>UbJzdl=wmfJ&MHJYg7oD?m&FXtC%V=qVb z%#=5T(XQHrK(A0891-6~3=(!vWv7B9xJ_>GhAv>Zne@DdLN7;wT|kXzFD`YNwQn(5 z94!Ki(QJUGA-#AZF8NA?Mx<;eoe42!kMlNx4+}#@=m^ciu@=Varm^gDlHeK5QHrTs z_`nCJpp}_{w&eP1keUFI=XQWPAj{ZKZ0n$;>Bk@WWHligm}j=loFB?o8z(1X!8L-Wl_iSSyFM|L-M$& zyW6ifdAac3*QdS73JrfTFzW4U!i$HVR`}M0{y{$1)$6xQu$3DMZaJS?0P!^xSa9wT z{^u)wjl}<%ix!A*0;Y(|I)K)3xRXOrc05bZjJ@bNkMI6weFb&*tNTB<@sAZ%^XK|w z70L0?A|5+?T~vA!ZZ7a^jAw$`^=!~VL}JW-+!{;+@Ngy?l5a5F>Y%|a#ZB15WV`I* zcyGuDnxLowx?uSLs3>|Jb1QL}E-w1KjIE8egCe!+DzWpu&-AAPsh2!x4l zll2!aOE{y=c)$`ZN}vmG+C07b1)vRX@)EB}X4*{sH1HjVf-DuKc#edVAM^-V|H-_M z(N#xM#Y93{QXXN|M&6K+rjJax%Gr@`C3ohC2uh5sgWL9t@UOGK=6Jvt73+-y`X`6A zv#Z8!8wkY$hsNA~CzL{5YFqx@Lm-0sqxcGoxRbGn|E zSwH#yPrhH@p`*kyB7FoDCgE7bfFk2XMM;y2;hd4mxb}vI{qL;Nd0>{*8_3S2VK@a(lfGZLm zMTtRi136QTtcxaUba7H7t+*V}<^Ng#-X{uIbZ$Lz!&T8ti8c+|Y>Hc|gjjw1j3BZ9 zxCOwF7RYsXr8naCUlFQQG^O4~KLJJ(*N5-lzg$8Y%#VmrBQeFD#j3~-EEq#&KLc>F}ohs zQwGFe%4nFSFN>}(K9GY+M+5RM3fufA=6m{wqj^U>Di|V+WAO)N0cW+f+v%HHgmgL? z_#pKfP5{U41RT~#W}z+_E`{{1=VDZJg9tK1bzclMS?m_*p|VX7(c02;!n!aP1}8ex zK-R>s>&ABuxPSkLI1Anf_z~oR*^pXu^|o933^@ik-+Vy9X4h{jgba6;>(p|VMO=K7 zG)r2S-=G?fGE^gZTA!y#YTk?Jj%b%r1Ov9vR{)lYHJc(2)5O$g#~UL=geLRGTCl8{ z6_$qBh`NTEEm;ME?+bCNmM+;Jm^&=mWcRs_ogl=Z_Dn5aiy2Ty zyr$6)K?XcPg%kZ`5^EUAnX4e6i2O2gYh!0zTOe{UzwJt(V$kQ&+;bKkR#fml=FpLR;d#wim zjD#*NE`-|y0W!O6(`r&C&n^{-?xA_2%6V~gxlF{nv(_co+g;ySefGG_n8$myCyca3 zUjYVW|2L}*k%lz)!sUf4xLRzvHHi`i5SXfu^|r+*IqwtirPb;xtY8-aoCs%3@C1^X z`m0!l-onq&h+P=J$S$x`b=c@oH6;3LUtk4F(D%ZB)`Oqzt&a{i{NQyw$euRcn|D|s z1q!Cq=%zPu5%k6{pl=qHeUfys@&o1p)$3HSVDve6gBw79_>@cA2PW9)5XBHdj|Rq$ zR2oHl+qJAs>ffWIN9#sP$M8DmyS`qyfczR+Q2s6pC_0WYjoj zS;0v;Pr9G=o!|49xnj`=9W{+Mu($?R_@FFnrESD$&{F1hLVFKqf=&0(`Man;kaI?i zk^5s2F{FUwEDk^<2UD(m`dQuZFTecKS5=Cw;FHB@*Zr_CytAo1r6uJ0mzvfQJ^&eY z=*}-3Mr(yCN=0M?TA~KJvO-s0^+O}ei-B8`Q@O<=63{@p>3Ek6LDSXr`GQY1+@c~= zPIS>|uO3BCZDKUej84p%>yZbf0-;(2*#42tnHtJiHeTIEF=tlEE zw+O<6x?#49q8W8IE1!K)(qK=84qn3}+=Tuy>Vo}TVhwPFnpM_U(F2p{7A%?FZ#ao- zHd<8K24J6nUuAu5Sxfxabs)bpdChIa5U(#|+ukm^4W`~fv4DBldi2D;J%i#Fv$%Ul zpdL}OaRrFHENsV7TGu(`P~gNKmVfZHd-1J!FpgwOXQSX5tQEK|RW;KnReR`dyudv2 z+PKNpq|Tg2{#$T!>rK5>;lFwc`H`DfJ(qDy1L3}2iM%&UUlum<5(>9J5$#6yZ~H32HT} zbh$dqVR=;BW5xrkb#F7~m%wkl5S51}t8>{HrJww^w>$tbB4e=LuJq1{$=a&5KPY1& zWCnV$hdxIK2PHSR0krz?>F*p(t%siM-3L(lZpd^Jw!Kz_f`LV}_CyP{c`UWURvTY$ zL5vZhF@4AlWlsKtSnLwdJl^P$Uxnz9`aa0YdIiI*&B4(6>8ta@!&oK`QmsRM)g7J| zieky28Y%c*8=#(Jd{A6U_yHS($ftjCvC#CEu1A$EDFVMDs&1W6gdDeuK^mk{y-3=) z7#z-R_G`B{5Bz0KKfuRF8fB^oua5vw1jIL5ci4JAaz485(}?nV{*NB;W550X@Sm5l zn&tGZx2hLUjr?h6caZvu7j<(cHYpw4NhZUQ3zWA;wfD8?`R&`Qdd8D|TR2F59;4tl zGTjGx1WdOs&E+gwnrK33`azHOH+sAmvA^UaZ9mH+h98cXTI_CK{&HajzyKJV3Sv>V630<%+48p+{GhFcf5sHVX8i`Gw&_(D%T1;xlW=bFYAZ<_ncon z9PeRDt~%_YjOn#zzOh*RF2;yqO?InLfruV0?PT!hy;gR5NpGv>1s5XhS_bE66uBt?W&lT^NXMOh z!-;so7@8k65kLzssFEv!D&E&?G-jtg)Qt>B$+->gaEWUxyl302NM%c`VDol*(V>b=t_wGtV`I7H&^HL2gk=W5JIvw zdg~k}@KAKP{>Aq`^2WA9|6EM_g~RwE!Qs=o;Lh*XUegc5pCT{%b#U%rg)2uyyOOOhC+WTjCC+5V0#+3}na8(WFgKg?`l&L9AeMD>%teQUHWG&-?R;=0#(s=Sux` z!yIG@L9j)aun3vA8iiR0D=@VK6!-$@S-&4=;}#~3qZYS`0RGI4Vmg?-^+fzG0)pNY zKgsqhnM0;fap(GxugakFC)gqLi+YAER-58>^abD=VMXnGhC7Ulq@djyeccn2bAU=i?gT zu29rd-xSn3o7RD07J#kO;m~hkDqWP@lJa}#1;wKM&mDqBT?n3sNalM;M# z6;#*+gMsVjJH3aEf<{TU)d$p#)?PDEla@rpj`myVQs|3y(hTS@;>n!zCxsN4QtUa7*f+O=(8(L)d~()|NXp=PbcB#xArcucrnDcfA!5@ z7N6m#n76S@PZwSR?M8^#gkV{PCIqX($477>335~S?_VK$gVxlqpEg8@3u^0Auf)(aAIOu-i2D}f7`xfCKsdu-Rp#BaTWR{GsPJ9+`W2FcX zfnL{hkDFKSJ+$9gUGaJv4kJo4rLxcEXf5jW;0EHu7PJH!uwKI{Z%xpfLadi7_7r^A z<{4&*`vI5Kl517S?DYu7*D zO@o{Y{CzR(~KOIg)Z3BVE1myaWEzbDJyJxRbco9`-@;RIaEtl7@<=r=(oww zKwz2e<($k;5m`Qt@BU*m4m0Jk;^JreP@nfm%d>;EI{cL%_xQ4sEdi}lZQ%5lP<+Pt z1ZDSA;a;?W&yEPAyZeF6vlB!3DO+_kFRjz+ky8J)kIQ)uPaOPRS5GnMyGNLayQg$k z|43;eFNd~>xw@?>QQhJcVy6gX^8ypb<^oF;T}F@g@k<@!}2i&|v zmf&UhU&o8X!xZkP^Wz>SKomGbO^|{{ZFx~?sS&o>P{4^)Hp9;x1JN#L$>lt#R{ zbuf^4v;zIGJ#hY5fyxj}Q;n?0QV2`n;JWoXGow<68?l}diMBLh#%p{2=xUU*a$R%= zmR2~F0!hf_s|~6pT&a>~qYnTIua@TOmbs(Vtqrdn6QnukemgTlBQ`P?=)SmFtwS9O z!W-#=O`V3*W!xl#-7@=ruBL>QBp9RiTjfn%)gOJ4ecJlqL~mhDIF|Z)3u;oa8yfMZ z0U7*(DW=m>@8oa)r62ZKL#%337(SD`TE|cR=p)l+kDLx7IvA^o`8eg{p)$#vd<>d_MyU4NYCbV{}rFK7`^vVKEiE z>(O#)0vQtKqXSYw6x7!)Wco#HOQRi@sudnO52T&Kh0X97h?DEt8*hWqM#2ENNJmlb zG=0MY>zHod1}k0y&CZd?KU)Oo>yp|xO(-mekaz0I2DcrXuYf+$y5o%6E2&15*V+eM z9bSkj09f10KhZ#C|F}WHO2C#i=oMAtzmL=l%G4dVFjUwh7|s}ZG53lI)h*s7Ay{-6 z?QPa^qEf&1;yiVCcB1C`_GRkb4j@io)3{NP)-mF zWkR*Nw5b;XtV8%UKZ%(7QjGnG?nAhm$V-1l;|B9*`jXQrz$%*Ak%)W|!OXkIy%2t; zXra*$UWQaUHfSB&HgM})AVzuQq~(L&_zh3&gTC8C;aEXLLl5bZ*%yj6nm*hvpPnDPP6b<5&e37O6mV?2)it@ff_>bCjD)zd-QmL#pBXY~R@UO> zYk)f_>tf!Ol^=Vb`#9%$J&s}6?%yk4VNt8#d~@af?FIqs+oM&2+XO$<9AxZ+wzt>? zT_mXgnS;nkT$L=EDV|SE{g%%q2<+czjInNl*FqD6yEN?({a=`U5i!8kXeAZj;Oavj zZxBgD^aU~H6QyPSRm9=RS8xmbPK6V)^qZ;-hwW9{uX@K~F||Ow|GWB$cq8&OXf4eu_pO2U)YDDp(Gzr zZ3yG|RCngDdk-DbZ6Uc3OVt^zPDp7AKb;Runfljrv)Ab~FAZH$f+Yp9!kq-idyxv> zKtD8+V~g(8p{-`GN)Rk=u6)`M`k`-ZU);Myaf{T;(7eYS`O^0V?O_q71;c}TF+1IC z|5H6#Gd6w4#wtmz9~EtwE-oz1UlQi(`nMiH$|Dff4^BUJP*)q`WSR-oQ{n#Gw@yNS z`xp1W_R1+%5KIs`ZN}Tf@xep-Jnd(_p}?Mw?5DFr|DFdU|m+M z@Y@oxu5whG5}NhxhondSh%vNau|B&+L_Tpiv7^BG#xrTQx#a4;d$M)O5j#)4Rxijx+YFeNxEs$>US%a{6Wl5?c6H~c4V zNMyWdx{N%a3MlRsYgd-N)+exsS?p%sEf0-x^un5*6QOsa_A{k)`3t{mv;;H~> z-~Q=8{OSMQ|I8qRlvzsXoG6x^9u4y}bsLIoI9hI<^Yic2uT{X&gPC*OlA{oq4?%Fn zl+20E69ofM1R;LZO~$QB_w2EX6Z1MiF>!^$g}0TPhY_fKloKcIdsP2 z88-Sd7fR$2t_{rKc2MYP&LJsB0Vn{~4WMuesK%gaMkqx4+18`p+J`rhv@J16Qxrwn z5i{WT@^9@z?zL*DH$QkHc-V#iZSUNcMU89@bN2dR66V(4|3C2*pd#!ZwG8v)<)QyXJin`A&5 z7c8|2(FXh?cJw?z5()?abBtA6xk}x6AB?a{ZymgL77MBp`1k5HA>uFvi|(kLy;{}L zEVz}HXx&YKsUQb?&WZ7#&EhB!jy0|e%mRk7zCt8&+skp_f)QINAsK+=qbJcs6Dqt> zd41H6T0nW3C>mi-!pGcNPS&@$*k`wo52o}F$~V_LP;D2vv9)Uq@lP z2{C3l#Bz3>aZIDqXN((A8cK^^stgietP1Ef!78$Dm@O_7Yy&)a@viG$OFqA*eD z=>y_O97d)Ok_IT5y$KYzwd7fuj1Z7!O$4xtI~JkPE*CUv*+9M-aHMi{i@?uc1Tbk0 z@i@M^FNf6Nwdr-pi0YY5=U0t0#bbp<>?vx(k7S3o1IjH-LzlQ2owY6J-Gk={e=B3} z?)H;oYXwlSIj|cp+_Y#qXCE0-)f;Ipe~%Cp;jyHXD*bBv^e2~LNppmDo@lARapVF_ zN4xFX9D5@!!anTUzSzq%wfpcunnKe6UR~oB$TS>WI=(^u7U4ho&00&G;;J!Klt(U@ zN2v871+)hr@eC|Fc`%m{d|}P*jHlB{xAz8MKx2cb$#jfwcmJR^=Q96A(Ia)3@9K7N z2+k@00v*+pp#i5YTXs)Jk7=m2i0Dm?Ve2>kJ{ZaKdia2w>!O24*ScSAlhnETqJqTV z{iWIbu}ei=ZiT(OTcIV!1YK^1r&QywjY|?umZ0UpJxZ%)mGIa}$JA^cZtd;XYSGAR zI64XCn#kgD6uX^6CAx3}V^j+Z_@);Bb?iD0j!1+hw?zm%(}|Q^4zpS&syk)4%@uHp z2*6Abmo-gFnoNV69t#b2ZUQ5D&G}GwgOErh!Oo=n>4+cnRp2})SD*&8wYwa^l>x!2 zMRNvYvX5pqT}V7+JBgQzu#YuvEh|_QnpJ37%vdC@vQ^N`O&}dJj^wI-Te^=v`bXxM zQ%Hlr@}mvS(6JNBBo2MM^*aXfUDQG4Slz0@qFCNiWYs?bLEVLpt&Ub*&Zt*=I{XAO zlce#;I8H^jA}Y#CKxgnDs!V?-IjWivK9#zk&9`Dv^w>F5nMFakx9TT?BdHjL+Piqj zi#KOCZ~X#*~Zg{H|9;#GAWhaZ0w-358qw$|w+i z6Q;-Ru>1B8M$uXj5Jqp+S`($NXWE(oVeUX)6hW!+Qa36(^=<9^H=zI!&&Lt!jB0#) zShOLtXwS9ajvyQ@Ja1=N(7;Qm ztwKySB0ocvj(B1)ZzgBzQ?SP5LBkIHt!3%do%FPsgc|rRQCZ~OVY9|p6a|1MJ?=NN z%j<}fa0=s3P%OTkZg$KM3~nse`7-14XBQY`UF%@GK(HF{30pcWA|NNS zZ|W*8&vCcn@O-V9C$5JV{on>G+fNr%!(Px}>*b~=M<^z#@iiU-dN1UM_Nx64sEbtdk3-F3dX7tliw=$ zh`HtwOVekH(f_~H&2#b5X{@R&Ia!V&fw}#)hLhFK^y`5TU|Q$qIicWX!`c?dg+>a1~f1TTa=*4?*L5h`Y zOQ^{qw%+^tjWKk4jwj4oIl(HsUTXrgZ@a_?Tad$Ib1wFzx|%o9@EkFsIues4hRH>Q zQdC6MEooE=Ze&hi@3(pcS|Gn}ZFU=x-WION2<9&AGYl)09C7b8WJO8lkACF!lp%r_ zG^{seR}q3R%P&3s%tP2DIB5nE*tbO<>uxrS3qYfEU45HvQ)YPPNdfWK@mV1yvu+c> zO79l6R%fNXVB3n}cE+HnjP$r-uMtUKHI99Ps@)Ou@yAhA0$~Nzww_UXk0*z!67fz2 z7h4K~B|9Xqx;6a`<7uzyPLOxjN2kLODPv;IE59%w|4Ha9IwnnH<<+_Xf!nl_nhlaG z_aI=TR*=_(5;9@o#ahAE=*hI!sYJlBpW7tMms~hiAq0EaSkY-W36$VBtH~12?~;f4 z&2VRWG6Yl*3=G5HM3%NNdrhSEJ~KIiJ5F!u>I446a~bF;_CYXcma4z9&fqxMjynzo zi&)KaY84e0@>Cdo=Gr}OD{lAqlspu+JsH^g-L<1nKKf1HdoVAw?myg+>CV|yAIG?( zl}wR!nV}-e>!MQs93sItAq~O2{dzQ7aC^RN_DyFeP#KD_6eX2()zw0h^99xEGM^j! zH6@j!aOS3)sdZQO;Dap32bqxAAbGU%zP*fY6sAdJC&Ep zDn2-p;WQ*ty&TnnO1ibUKuYF7K)-QxjiU>RQHy}+O2|Y*@1j}MWddl&=jL`%kTxG; z?&1(4vWwvYCFU%EwszR*bvN>q_(W8r+amFmK=k4vftB9RRh86CGgGl!ZI+rNR)O49 z+1CI+Boxj42q9pQqA`3C${+DzoPF9qb-0YWRDqoAt>~<)A`fVPeQAYqa<|tTmvAMn zUyOqV?$ursP%3`kYOBWb;T`M5CxzCf6yTu@?cTIzdHLiDEh%aSYkT!^^h#5T8VaCU zy4sAgbP|ot#fX$vLoM}cZQamP(cJS>%0p_Yijpe!>bcA5m6qOj`DSt*P|QPFDbVf zNfVaADjATypZYzt97fI;P5FFHWJonrPZ$p~)mV}@bvmMO)i*I|bPcK#6xoBz6P{A58&KW1XGQ|6lt3 zXE<`Yc>;_&ydjvlN>sPr2Spl>q@RH^^1QDY#m&4XI&RF7uP`}#hW@L*Z~#4dzQ}U$ zjTZ)8j`~gUyLoRsaFAztKpiv*U#p>lNa+o1O>a+AJyldZ+q}T3>MOK}NFunGTjr-! z((WM}PS5aPP_{Z&dj|vq;@f%wCt3?w_IV?I0Bw6VuQ_A7vvPNNPz<^B{Bx5)(S73= zX$ZQ2MW!}TKzNsyIzZqI-$A_(UNMc<@9ojlq3iJ$SSlD%e&XONLG^{y@Ch=ivdI?_ zVIzEgMbUCek2-j8S5M=>$Fc?mVHel>?&`Yb8k8i?+x5NHHqU3qzA#oWR+=GH#_p;H zD7HAc!E0Td)+n$xzufnP4`p*pK$<$+IB-iC;P34_UA%Q2rEIA)7|t1h5BW)o-A7RO z=6rT52$#2%QY{+zgio84>Vr<1MYO~4xXh6ei8I=z2bzjdnOvNWiokjjUr)x}oX~bO zGbwqN|6}DASIcS24iHjoCv3h zx@i+&>GYlud#Y1>oyY2?t<}R?h{5DE z_ADxLbfB!#MJS=^iUq=d?Y)KtcQr30m|%JHfl#A!7`2lL;~9*Js~(i;S-Mtui^3ete4y- zPBOagQnGn1(iz=+0VqNqZNsw?4S`SmQvJez>$^&7G8D6l*k6FqD@OwG18)&`e`tV_N(KQ1MV(&S*b_~i` zdj7l~nX0vpH=5*hu-XyGAyU7BUI0~U>EZ9dCv1@wwXo&(c^3Cft?1A_Z`4YF6RNK4 zWQmSVfUXgMSRZn(dUzyA4ts0mr5E9>L@dU!u!~DDmJb{uI0XVB^(Lk%1EWSjv7lN= z_8P*hdF~l#%EATK>_wnu>yj`4gQ!9 zMw-_eecsbUyW_haGo9omKZ?58kFq?AW;(}`P%mQ_ZL}i79>MCQZ%!Wcb zE=#%5pa)6IFkDR^-;J27*5j`8maw~=3O5#DQnduAY6g95L!8m+3nGzg+`BWAC=kk8 zPv-67LdnbKFf0p}hIRIzkhMc9xTFW7Rp1PdCBZ5H`k`ME&+GHsBP(x#vo_H>Zpn$P z8K3vl2)lrCqg|CF860tAI$`IH!iE6S z>A0P@R`4JPEVy+m-MOoZiKOC$JgDz zG*O!=&qj4HQm4Tt>=AGkPr#lS#VRV3V%{hy#=AcW`9MP}j>h;v3jmuVX?2a=9j-%k z=OeV8F5B2}+4T30* zKoWhaOTYp=*iFm|&xI33wc7lL8^qkgL`5*x(c;0O4o|2Fl>&5h5(S zL&V-&j;%L{YECF|)9XRNP3KXfQ-G)8ycor~wO|XzxdlS(G`8yTn0n|7`R*!#gBv7J z7O&Gc|K-F!YMnt&5akzxNbvizP<||*nZH#R_1v};afKk3R$Et9uD{5R1+Z@RP$dUn z*@@Lcdmf%-rovtWRP?C})F08Q@wMGLa^s$iCfy(T^vQE?1|=Csj0FaKEX-jY0U3se zOa*i0QLEu;9X&B}7=T@2m&Zn)U8@?G$@?SY4YM(W+9)Wl-Y5sdbI%6%01`t*7V2IS)O_#HXR(J{ zC?HdwI4m$F&Ci?IZPW(M^qJ}T{q1?UjpjW+iVm1wM>d0-%T-5?U9O5{g#xk9w)W># z4o*;o5W5@F(Yqo}>F~=V;r+urSY5A)?(MGCIFx{1lg2f4w&*dDzoUo4=KXz5K(dnn z1SSp@xPmrIjdPAfT;EyWJv2KIU_8IQw_k;#Go0OX)zo*LwqpFH>a9ha9-)I8T&KPw zP{`!!9*1?0;8Q_v#$>$Zc3~X78bu}jN=P!?Uq;U1LAdSeBBx3JsGL40A=rTA zwnRcb98L#z@FnQ|S5Eo;rM#-IjJVs?E= zvv*dhSZ?X*7yX|vI@)4S1?)Zdi&zqsYl%IG{XXtTup;&ST2l}bFTb(;(;&VIQWFgJ zUx0MT_V3SA01f|TctL@-pxzZhMxt)*&5J$+IXL4q!#7DztNQo&L(S{TUt;HQ#K39P zF6~|lK;mKaYnDL|bvd_Wk~g8XW2vF3=6&pYjAmD128$~wg})8c7o5BnV)CxrTGAS8 zou!ad4XA!jhj;Q+d0ukK$abz_;CH#X_b>$!Ywh993s{<1Ytb(ZUG28HQ23KLkbQqu=#nP)fk2bD3j!sB101!2|lgJtd zti|EjFo2#6WfdsQ7`{3vdpt+D)j=ea0-rNJUTL>QNOgrl>U+LyC(6;(M9x%wrhZ_+ zW!oM3u>Bl#p=N&xoA3Yl;jij3_{Z7`!n9WU=(;Z_J`zc86Lfe)nGj@ay#sL6!E#aP zdm#MZ{h2RquI%~8&hmlD!ki_|{RbsWFdEOEf+^$fzob^JuaHvhHX)SH_Egc82vYc5 z;U}jr;}3P|k^bptz{SrgUXxIXL&HCiD-+M2tZ-=U2;BDQv~v@@;2X(6MO#fBSx`QE zJ^#CpKk@g#okpX6Dg{3^H-B@qp&-BOEf893qtQ6P8H;!rjAO5Y_4BA0jF^+ z<6Ae$0A*iw)U58Dbb;}167-?KktDk3iY&uB%m0}8xHN0=pMLt>q?ErK-^hG#n6oKp zB}Wx`Y#!+jhM~XF{bLpj%V@ecv3>UO;>^9dW@f;st&V_K7jn>(whxsPKQ$(ho^Gxw zcUY*5t2k=SLx+G^!ni>HoC?R+m2DikGY?-RBIcYJ7gdRz@)r+H!W|@rM=!7CVm9v4 zyeNT*Akal>nv}}^TZ>)JE{*}}MV;xxv{yX(tK{P3oWp=VE){4v~PV1D~@5nKp;O11+ha^FSGpVKAc)>V|pe?7u0 zn|0)fFW>Y4(Go2xYtY*`R?YJ3s%_ddFi&IvM|kzW7K0#UyiPFWwCfO@uUx(kYzMlI z6iB~;9I}=f;1zf*8I<{O2a2VbE~^t()MgW1Oh-L~5+{NiCWBjrdp#lK5JmY_GovQ_ z{r~jcpZU}C0Aax8M>jbPQ6~id`sHQdq%c*?bB8KL82&(cM&e9G2*k}4qKAqmA$bi~ zeIfVPL(e;r;LHtrbAL(dP@<4jx-5ZT7?=xXchD5o6({rzr8YscG~qX-l!oL``5wOP z*o8_QWH*F;8K<89F%Ya&3FR@f_v9io$6ujsgi%ag#MWmktH}b>;!rbvwspSNPpYd* zCo@CglgGv24Nv@2PK9yb(5PKu=6ywk#GC~vgADrcl*=cM!K?G2i&@}jAwP~N4i)kc z{iPNA^gH2Q@mzh@*8Mx|!Ua9Wj<3#;c9jzFkI2HWG9Vq*EYpe?8(1-VGLw142O#r17<3vvo5Z_M{=m zY6un;Lc_w{+F15rr1VWy=EoTNR#5ZL^uOQziAg~RXARGPn3sy_+h28Peeo+adU)N- z1VjO7PSK8|+`9%S-GW#Cqz5#C9oG}8yb(Z>+0~s)Yg z*ovxT8v`Kcbi?c}8H|?TV~L2=J@4?>*Srunb8jcwAVYEfN5t> z?dVs7^?E&O+c4tp(sGqa9npQE`9T$OmUg1xsiLZ)|IGPYtB^TJ)3lUpQv1~gNd1t zZj&7VsiRYlR*@A#7X04PleAeexUJ9(PUZi zAT!=u(rW9{Yr$TE#iZ)u)$+FBK#|a_5hx9-p1o6*eBY_`54tmWgK#?Qbh6B0dr^%g zL=cK@w!;Bmd@Hl1K0qtkEOjbddqjwcm{@42LXJ5#Y=0QZRqJYCd)$0r^-5Y8r(w0+ zqS?49y+o=4anmC0^$h;Dvu@!(eE88nE@IpP5~_b6<0P#IkmTQ&mGl))bl%w`jZrLL7l6ao4!EBz!*uW;5I?!REKls-hPhjY zPn4@zCJQg63{^(E|9&&OCGEAUShYe+Pov5c!b>@?ee38`nFFr-(S$AUXBs?MAbA6^|6*QlylAB^lr z@0;aQHrol&qt0T4w~jl4cx}7pz3~C;Q-aBGGETz711}rEI7E{95T8g`Kv>cjqwtG0(;`pbub^TljN(!?LtGmX z=u_Dd+=t$4L?5Pai^txD&4%8!*j{@e{f~MO#XdK;A#Z0i*touqM&(2t=#(LWTcaX7 zIdebo;!nwU^j>MMX@V4WKg;eEK3q^;J*3Bx$p*j_2J!D51^4?gq@pkj2F)F9dU&np zPnRp}Z*0H??rm67)nY1QQc*a7UH3xGgwcn(HxDJ-cQw50z2>(XCnrX;*=6I5xq}qKxo--{O_H^`u72;=k;%5{)e70K3WSPT&cZS!Ajp)+jLm& zO>V%-;iBy)HqwxQx(7%8U`JSEEoU*}d1MfR`fd_sBLSAJFBbD<`c8amDL3!U7MdgV zEc$6QKIub(255^`n@;X;C@Rp@Zg0rZxA-xIU^OKOo1BHR4tjxJal$#qzGD;P0rhLx zql)yTvvOJW(ff%&p{{;s!G1_el0#-Y{VoGf8oCp72D&GI1ptN_S75nZFHUzK|lV-9yv!P)>Wui`^0Us8#pUCuL#YlR0G8! zN#nY4&{z&tto6D<_)2PX1rkxXU66?FG2cZaTwEr#iNKHt8*mU#@8wVNig4hZ_R~hm zda;8+0tM#tRBoZU#o0nt6JPqkJ*2I*TLij&YfkoHUjyaMfE!5$8UI# zo}vd`EgX@o)i9(GsrFqR8Iv$!e%)13w{f>aHzwcKo}61p{s^EEg?PHQrN}w{UJv?O z^=YKW*C5B=-i9$D#cR+!1^#Xb$dT^+P@@O;Q8XU!R&e^SX_tMi( zy&dt}{#Ew7B?$g@*3URFNwGwq@Lv(`bfG{cMYU>FAO9rCP2n4~udAU)H`ivFPbUVY zJY_1>U{!aJDR}wmBtxUp;D^Un;x!T?!+|W<=?JCk@#VqhygO>Vp4^FU(gBMhw3TZ5n2zI9x=NP*VC?w=NG;Z5Jp#z6!_kU^aa>j_L=S}j{RF3ohpIR+@ak@Cp%j$ily^vs~jx|i6+f4;>S1bX* zd0NViuYrI6;k$q9-E=YR?T$xR@9XbKuFPB_cKKMvrV`Xb~VA`~1p&5r-RZfK0xos<0-IeAVBDiGWkA74iSR03< zug$D`4OLglgG5c{#Lay+n8{1$HQ@T`sX1k*ZI;JSvE|V91a1j~QFaZc3A?NoTA7j# z+^cN|{=T|6FG}gx3I1!_6!qVH`*Pf!N#Fm$cmLTJ0GoSW@~|tTsYvtVe_|?cOz16^ zi6q!)9k`>=W{pa{bKGQ^E%O^i--Gpm2Rj&jOXf)p1kdH9p99phMkPnX@s??QXh-PC z{>m{zyoQHXMoBs}mb%QL3O=KF78w}UalJ+>OFY zVwuiqL6wF*eE({{CPQb!anxIy19m+Z8gXik({phYbuBnfX0-q>4^1#tiuW@F69KCE zi>H$=rB0MF>nUk_AjAA3fPOffeZwXQnT6iapf@Jrzakny6~rGxm~SWG+Cl^)=-zgs zBG>=A#zsv3DncZyRP!%}_~fIHYo?$4iU~wwGG3x^o1kruyB-M~1Fxrr6)2d=4qS74qIsZetu^-W%{nbF~BeEASa4%sxwl`iDw5f8xqmKNumJWRud%I z(sbrS2#x;i@%dG&cT)HM!;e4lYu?OIqr$f|&xh)Mwu50v4GMNl=X!8)KZ&F;tO<&P zXzqFUtXnw(+(5NTljGy8cSB*#E_bQOT6f__*~~uQXwepODxAV5eBU(S8VUl8u%y4c zM!-~Pwpt6?V!ZMznpG;dxO7fGL^P`f98JsS%5VUkRq_WxaYvEQ!R#cP3sIpFFiMJU z;^T3$$}&#wX!d7xa}aW@FQH5p<+6F*j?s~yDyDF1o1`IN=WqTk9G!2V6r8c*q<( z%GdQru;n_kXmr#3>LvjPJ4r0|P2-J=tk#3|RnKB&c4Ucxl9%mn&{7i6)Y{H+DEr}= zn-;%ez*d4ERVD#(efRFDj&S|$PS1{n8qzljJ%Gzx(gtt zMxh9!b7pZOFlwKr7y~MWz^FYaWme&c@*iHQG>jtzXy+#}Npju7CfWy?bkRq=veiQx z&@T^sl=^JDtGbHTqPTa#kN6EbRB;AUREe5i%FkXrsS|7cRox{S2h1dy7?ZHOY3V`W zOB|CZr}aWeo-Jj03X|Si5wYm4t-6;+AvawnuCsdp(l4;t)?m-yQ3|x9_N>dots7e} zy`T+H5}W28|DxA};T#h_w|J1%1B5vx52p>A+1qZPZi>YoXhz?7SPhUF{C_a(Cg0Kv zXIwsuvQs2xMgbfAAwDih1#V{P4`j$Dp@R%658<(v3{nfmd5PCFbbv!pRzkFRphQpE zE8f`46w<-FCjdyjMLu_G+M6;0KM>%_)HlN0(4}ZyC#2Q5d5wcwVrm-BdajKWV5_O9 zU|nrOp{plawn6qT%B z?NdDz#s=BF3@uuncxMj6MSjJl&_H$Oaw#691NtR0ds~i#zbb5@ze?1AJ}G|SB`8_B z(7TRCx73;)ee?R+PVW5E=YEXi42y%UTKA%z#5(PvPFR4lDyokkn(@PjZ)OolXj$#U z&?+|Kv|MhU#u&)f%FciF7`YH!Si-LDB*|Zo@b&oZn8W66pV-#=nmO+HK4k@tf`x3s zedz2ru~}&d3!j;$D!ys1+nKbMSDot~3wRdKs3;>(*U!Nm*jPW&eF#Z(S-qiF8b#EH z55MzK-3K&|eHG?{VgVg1u=K?fPYzYWqi=bf$ySP+BZWPD#1U(m#T@5sr5LF@39G@-mi@>&$15CdrCLdV<;%&V~ZyV<+g1i=uM7)%^%M|Fa_3nECG0h9yJHg zHR^{sTNJQlalAZb(6^IxLPKm5XWn`Q1H5i0hm}Ue<31Ivw#`s)$htGyW_f%28^4NW zo!1gJ$qriW#&F%qT4_*>E{xUW2GaXAsFvC&naglYX%P6oaWJ$!`JWj=psn0C5boPjR9~}MO+6YH!^Vs3$Cy`tM zrCM`Zq$PrQo0>s>0sB`Wo`v0YI!O%Y3R*x-lUEfX@C8J7F7DxoGwBgIIBX-yT@DgO zB+(_!wImhVjje`!!9*&S=Q%x=J&x_ScS*p;Hvl3huIBC=4d$?=?y{R94{aYn;1&yJ`tX`ks$!n zjC+kdQe&=!U-Hl}TGVAPEY^mZwm;?D%YeJV@6l?H^H_=Y?EotR;;vXfFj0OwW8$?= zOV6H{CVv$A%D>w|6@c3N$g{1Y^g6@jMF9E3Wcy4Cb6(iVx%GwKB-yoWYYpjh7)AS z16jc!?#sp*!5K9Qk=7oPfeM}ai`#9rDe(fgo}EQ}5;b!8>e7bPG2u&yqS<*AoMGe8 zKyg1J8!f30aQGfg?&=f6)3Jpk&J_U}7S?fN?>Fk*D(-~f^k_*xp*2mt3d^e`5Si(0 zG!yS6RqP{PQ?KOw!!)^fDISs;*e(+BS^HKUmC&|I7Q?W6CJfl1dHd-a$kMZ?zKWac zrg?U2Yy+Gh7hTlSQyI@S2X?H;%Y*eAAF@rBW^IA(a9&etukJzxm=51BaOtfojk|d7 z+j%MN8biyK3pE+J?qQ3^MgR3*rkTF~Mx_eUjamm;-M%4#$x_QD+vT0MOluFAT8jwK*8W@f)}8VM?F&GJ`K>L+Gofj zsQ;o&LUf!ip;Q9b0`4$N*nR~h&JR-Q7e}5IV5*wb;PG#SwAuvJ9o;dJau?(GaEaOG z=Q9;giZC_;qxm8jSB<5)&gus>*_WR9`St0^1c_6hM{$e^GFvJzoOB)nw!7tJY(;aZ z6%}S0hO+7`$6>v7GCU82rmUk>M2L5}&@!bVb$r5tcVFcfyR}Q`ft)9c6V%SVwdHTj)Y$X$1b`4Al2F6z&WtR29G54! z^1bL_c2x7N$j1pJL&KAl$NTz+$^wc^Q56L;?#4+BJZu3Uc!c&w&ZECLP@+@@uPLtt zuR(<(&g8m>e3qVl?x{9h1p`df7vUx#ndy;N&9CFolh#q%ZQ>rRZ!aINuXzLFrIr@m z1EuR<*TN`q{O|r{{Zd$T7%HqeCwjodVY022Mh!~ap-&qf?ZoX_T{2mS^_+$#$F0ZFpp+b7_A#b3Ne$6!CM zW)a!jpMaTc9PFDwwaW6X$>3q;)D3a3kb^QiyFg?6toUw9`rh;uR7`?Na3zEhkgb^P zYINWCKKalKK3No3HKT~Zqem@n+Lx>TDwRfVI$e3TL#Wn-W;c;lJ8lwMtD+r}2Q{Rv z)#Yu!Rl8^rYhQw3LDV13riigxIT<#6S|F1aFoN1O^U!&*>SIxHi~z4JzO;z)T#`AU zbII3wJ(yQM6SO0d;NefC*zHUU-1=!tE{?xbnx8ME#!=Hcu$6lpY8ZVUg)#U)MVA%#* zK|Kd=W3C`2SOE7$UZ4AC{k4_RIZT);@Zsd1nv4FrltVMqKHjRbwwJg39$aE0>NQU- z$FP7-<<-oUgfDu`?;6?Nd0&yNTF{qh)Eq{+S#(qvCv$s`#Vx*W#RWavekS(F4a%-nFokM4piHk7 z^qTibBah+ud;!9X$IiuI5q}D(*bUQiR%p;Ym+Bryg)0pql$r&H#&xjE zA+k9^kaA$8*1d|kuUe|5i(kMz!!q>qo??z{e8XLDD_n572PS|M#?NwKAis@P-!RouFe27mV z)D4i2u}YNwQg@Mv4Kz5;`Q*Bm6@6u8wU&iYg5QmBw*E<0m=w9t;i(txdsj6F|2!ET zrG|5e`bCknTu{&>EP2{=(z?0|t8Q04@`p?xM=0!$(5&JyRzsDcQS)hmjuJb!(cFX3 z9F3+kW(k_Z%^5z!8a|MVRy3Y_W#iC$I6iALg8w;SIp9I`79S@Yv2h>ivS; zEm|qv5i8rs6v=mmTJ$W)W-(ZN!&8ViZ4dt%9_`>y7B&p$WA1r zNwi$9R#Nc7Jb&SlY;w0rhZE)^;0AZYZ@sXCmd;`a^w$HwWrqM>jqP0*Op>2cOg%I5 zOj(JDUZ^zymUUA^iy-q~##A+sd|edY5I<_@?ah5Nnq7K#wv$t((qchiH>k3h2^1Of1Ufrb0G6qCtrXwL(>oQQzLERR}|DT|K8&#@t`@b$CP$N%2|rg zk>?gYB`r@_Q7%z)H|0+`z`6@->GRKhjXDInxjsoX3Eamez>p6Iy$=Uy%^{AI>#pp* z<_txo)&6?Z9if7OUC=PV%@QSaFN@L_jt-;XtyWC(oBPpnrK-JYnXk4~vhgq7jp{8T+d z`Ah%gCf^5=;&sLe7iK@hR}Fpkqd)#~d_;V~f}UiJTcK!*;eH`nC_&XfpO7|A5g|`(El|HGD3m-2x_rgq z%yDgf>83?Q77CdxRFXj`3z*U~`d5Q{h`!;^_fl{#KK@qBxo>F976gVDUTxFk;>($3 z77G#?wLnHNz=DM+(udQx-h{VJ4tDQpAYCflY|%ljHuD3nMWhFek^xE1P%A=@hsMRis7ak}P4At&& z6%(>&xxvf3gSzfthX68TWP5=dKiKeXYk;);i*(%XGNtTK5D}aV*W&#YpXgcHatrz; zM--OwFChC1sMeE6r`;hn^RbLW$*BFc-qCuU#<4;E&6Z>ESVNPGe3=Rrd7~Y*TGgp!sP`!KGkZLNB6@Z~a7rAXcYFqhc!`9o z9zy2O6_Bsie2g=xZrXWS|2pcxj8{ARbt?tc!qB4z0L+k0b}&MV?sE&aCX90C^b#y| zxVeoiW)>LDIV*sq%Mg-_U!YY<4j@@ zq>Ud(p-B|F+2w!xc=qFb{_BT7{rmrGD&kDXL0~4LP7CIaVXGWLA-u0=Amr`46gLM+n(}plB3H1!3^7 z4#N^rxoxOk9J=3G`yc;yF%wv>^g{0_+Guzz>RURwPH8GLySa#J7jwbhq;9=&f)70O zMfAxNvTw{RH^`6P&JE}KdC7-^<=37Vwg0aI5;A97dzvlRyYEv~GObXYaBbAJ^%8PeLkk>=}{@;D~|2T;m$NPQP z3{eb9Bp`3T_B`LIPKh>Z=1VYc5cGCv3a)$C7i!^7?Y12CzUiQd8eNE5y;Cxq|_4f7&X8qO*IS#(m|p=T`YE?aFG zr)QlC9PD?Gh?Gy_$Nt^i-JMxcgsun?1|#!BaEFD)+MY|MyKj~s}#Cg4adnfeNhxwma`Dg_iRp(Z{M$e=?C z(d5H=A{4IZh!KPc=FyQuGAg&!Gl9Dx!32_FUmsv&WB+;ZfYL_Yx5y%8726Xf^A$Hw zGX)>z2mLB`@=s^O2#qLck1;m9FgtyX8E5biT|9$XsKuxFf{8+QDs8rcIZsv2w5$Id z3RpB2U(2m|G!@KxIOGu`Ny_#yzHEeXUo@tsN*gbSWSo{cbHAFd+BQpt$@= zEamdy(5C4QdPXI4#_2Msl*nEr0e4^b4Y0KU|=$I z<1KbEi-2dLrl!t_9v+8j=Q~&lb-t$k@voRc=%A2NhrE;G2)af3bUE;0?WCQv|?xd{O8O4>G^lLFj>{?x5(J)y4coP{NT$jb>l$hSV8sp!7{3XGiTdcG#1}OnZ--@$ z>CNN4zA_?(K8_mvv!^EEJ(ffLC>Di~= z22mJ5uIA^6XAZfpJPFCU&b1(XQW&DTm zPYdUpJP{nuaq6!&oU=Ct`3B>zva)5@m)Cre(=*XIhiFPZdDIneuE<;0mq)c5f9RE2 zez3JH;k>SKpJe11o9~@E0h_G!?V(G}v;zWAeq^%5R49BNid-xC7pl@Am1$JI=-2+J zh51{;7O7>eH=~lKZo0Xvj1f}>3x6Ez|EMu`kPGI%&ZFX;eLQMDBp0lo+IB=!;bad7 z2NjNwge~K5#0O9e1tG-47ae4(WoC?Xh-kk}QEYK;z!7u`K{Fn%>EKURcDk9dd4!t? z2@d6t;A&f+ygFi5dTt2)B+Rzxk)DWZZEGEUQh||Y5wXN;F0fqdQrEc#jc+Q!lO!KZWP9$%<(uYG;z+aH6YW&3C%<9xrBgjKa2egYP zM_`NNH&T_SJbM|fO3LsP>r6clG|Wkwj&n<`Mq>}7S6JvQwn={%zYB;oGNDgOeJ^Wg ztsgGe3vmSF^XN7MP8WjNwS==?*$bpxxX~n!)M~6)cuX9^A6~vW2O3fNs?8^lDY$84yq6NO5Fl?z3YO-v447 zoX9BCCiKBduglfcLB9}^NGGyzLH$Y+Te}|bEE^>bL;TPY(OE^?aDokfgCgD1^G|EftEou+n`Dk2 z;HG)pQL)TpPC|@djd~s2lk}<~;U!jO)cVqCwr8VK2A(xkoCt_(s&|;dkzIri6>-<5 z=td_K?CM`U{6*0fLAU<0zD>|q9?AMiut!RB@sa8brH=)RIwTV^MKvBRnR=#uQ2`?a z->7ZvrB7efD1>Y2u8q}4E78yG5AiibfJ{}kx*Vb*pSLrf8p0NR2JEuoMM`YS%(UKK zwHto?%e8je=k;%-O^={||A!BMZZ?)gz&9Ey>oQH_a2Wd*bPPeA!c3+>`8cHU_At&q z#eEm)hDQqF!QL+xAQRnhCmyC1?1InsKIv? z>~x_3?l}-jmfy>8O=&$N4-VIAfWP!hzf=oJm?2viCvB3kr zjBD$CtoiI((y>Q|o__wsoU{Jki?w_;FGd(I{#F&$idqt4;Vn@T{(>}lm7G0O9#j`q zD9_mJwKc)@u>JfC?^1W7Jbd_#->46wrB29jjGyB`>4i6e4#kvdZ*+#4Vj7>) zI3*4UqOj1M2wHuL)6LTR>Yl6s)99<%Lf37!dWCdwz6_Nq#6IAF@7bvp7Z{lv5B5v2KuW-3zaB7Pznc2b6xbpzt3sEkH(uWAOU13|YPK z;lox0uV3!%trZJ41j=9gRj;X=%ucHZ@fXA~@HP2YT}MAS@Gfgd+<)-)6)r7Q$(td0 z4ZeGLf*&H{k!|w}S!ku{_^1{bKJ#!$?I+@V-~TH^=YbW5gOTtY2B)1cltlWx=n0pBB&shrrK66%^B&rv zIXj)Z>JefRksv3};X+m!u8I(hRE)_oBB}xuVu(ULQ6Y5)epUJT@XX<5!bFz`&Rgt+ zygJ$ohMh~@8((fE#}NKCTx2ZI*9>JYbrFQpJdRTV-+-F3)r0JT;oX(vt;AFCZdbP= z8v#^2Qb<7w6ShA&a)!kFJglvo>5$k>oceThm~OdINQ+PVd9*$F<0j5XG9CC>cvLsl)+4;J_;`uB*9R$K@M+h9)#VtA(OgZTjQd!` z6PV3fopeiyyddND@GflD))bCI#rq4_E`1`3?5Yls3fl5HnrmI;q{f=6NAbosP>98W z)Gs_^CdC-2rm88|%rF!Fw}4IFP#h)|7!s~B0J46<(k?)nmr3zM*EDxW;|?p3p9-?mIKr+A9GvPOS$AR8-!|ExsIS(XWJj-+sGE=x!iVQ5+kP16ti zc66`Gvo)A|KwYNkJ-H1*LsI<~FnpA{!L!q4ENOIcju815ko|nzZRW~MjhlG7MRZ;1 zX3+d{!Em>UH#ov-3{L{yQ&aAS%)OYXI*DeH8um_RRen{~@+@$1>!Z8DWZm_$k56L~ zW7(aARAQupwK*gC9v<`CT9NY~__MIYK{lD9Ut?e|;2ZXVfMgk}W1S0F|HY-tilceT zr}CqDxn9x|t?r~0`hE7yz~6Y%gI7OW_d9L`(eO`>qG>;Hu!3g2m*bB=^1#XZPa`8n4|YyO}U|>m0wGg^>qAgZ3qD0@ku4fr^gJtv>`M6^bT|}-UXe# zv4H1>t<|-7j_HI{uZ}}4AEElx7?4eWWI-x;M+;}xqRDn6uu*rDOAuC(&L4J}2LK8w ze$g=}vH%WHHbUJYVasI?4>#8j>Mb({XU{077*6eyT*dBJdHUpqw;oD8+lo+1#QJXTaTHK3DdtYk>V7sJ zfo>VyO1zlK-;K$N$WkYCwm?*x8=rj|shVqq0lY}~JDY~zd2DlebJ?#3BSWOM2XB{N zgx>XL_&)qw==785El(2L51i?baf3XwH>i7=0LmSefFn`7)(Ajl|McdtcZ0s^uD+{O zRfRXMB_t(qYh&5Jz;RxeT6osq`RvJavp`cwz5R)WA|3>;jiaBe2>CvSc4ev+X#=#t zF(#Wzhb`C#;3`_M(QC3c4TWUPf!swCF?y(yEguwn2NmeCu9Fqlsidk?p6LOJCLRb7 zX*zZG*nk+lD<4XSp3=nBb5;^9jr;^5ts{HQ$qVR2ksZ5yjnD7mp65NM*Tb%~{xAYU zKo(8J?>(4xGaPLp^3%Uo`?>!4r%yhwS;J8G#1rZ#xeYhNS&xL$Itrj!r-=#!o5@v9 zyeYTESg+}!=gYnRm$xthZfIeC;>&EydYqs~FvV*i)@bzIJd|4>;hKf@R@K?tOhb^d z<^5-vm*(MimtwE%dwdF6HRzE4q)^%g0M*EWCT>c8cXLsOYPy1ElpIX->P)(hg^)lX zgbPuDinJs)k~n%@KfeV02s8WR>yIU?GZG}6xlv)rC!aLxvXLIS;~`Qx07WXHk%g^j zpBg1?72D`70fE*eX5v0*nHcc5$HnO_2eo ztrns-#`5;Mr^qo4`pnx-09)9n?p*+L=oEw*)6>M+L(!;CGH~E{ma-h)mLd_HrP#n8 z`Gkh{GU02=xV|RYURh-s0HM~zPy?^J;Jk@SuWR?%?sb*)5DeU&)Rl31YxH=g@Hr4C3#TtOJRtM1I z5Hi+J6{SL9tp)h_laD-;G0&cR|vD4C!&@%~$$(o!up!s%!hzjoQ2 zCZTL0JioP2gaO%GeiiyYqOjOO93P72rfTlhy(a`cq`%Z`1hSZ2y)B;G$Vwgiz)?Lp zBp^a)fm@b7&zh!(c+~1Mn8Ip{7y2I6g;~GQbyn!>;0dDr2_Nb|P=icoMa@Ybv#8;4 zNE2J%`+9uKc@g@k+~Qh?_|r8UCtEy@nKSjT47b$F=HxOEy z#EqOSA-|ybBEsY5l5CXobh<9nY$oC4U*l!sgz^^Jk<*RCpgzbChYAL_wArYQvD2Fj za8!EiYs6^kVMXk_O^qTviF%)S?cnMkY?*mItYTK044DVk85R0x@wI%--~G{V*H5gd z32J&hx#d1wyXyzFTTwJSlwpGDcHF+{bx@; zuO-+g1Rd{2$?EDT4(reAHq|K%?U6#c$2vGn{;$D`f*Gv)5i8@^Sya9>kuReu@f_aN z8q23l`f7A_AIcUL?-6kR*u26}>@vRW6<{aCS*tqQp(5Tj7u;{S#;<>-L#x7iF8?Hj8zI>pf-@(tSU$L;jL$c&X zmr>XaNpL%_Us*f;DpHlU(FM&6%NHFEOO@XGduwiK(L$h1ReNF(s@lsSdA|uH&B0^= zBX3pMX9`Ku!y#i6q>dj)cofm;!c}C9O@rg--jo`#qW*l;Ig$g&#s5O}p|tQLXsR_wUXQV>f_ff}I<9`sT(_7OE} za-vILJb&h^XBR5zR1;@Dxbyvstcg^BWbk0jL{O|j17fiFg6voX8P`WSYe`$-z}_Ci zUQNa56NUZbYx2IT_DZZZShMWgxJo>C$x2^O>M2Ijap`V;=uwg{f6h|Yv9@8ax4TZrI14lZG zqjqO#QTzN0%QPIOkL6SP0S~~U(-cR_q+3Enfi;C)qTb`{?5i7wx?bV=e8!{WwSGxx)jI*NXv(IW-)v}=O zBK>06!P#dd>n!Yjq7PnN3fk~~|5n6p7CVUTf+vjfC!P&Dqv|>qKQDd$qQ(I7EOPY7 zdM9uDcpMwv%X$_=81P;QQ8aV#fxb+`^0#5Ip^WmWbj;7wn`3*Aar^msx?tMz$RWOx z^$V7%S!vuku#&!P*rXosSCcSF*eM`a`UoYKaRk)OI3E%)x6gXk8W4P_yM|YUCW<<^ zZN7LOJ$E%Nw4z;u|`;1gDrn|_% zYt&%RlWp5>9!7-t<@k1U7P)Bi49rk_QiD{9dXZXo}Ew%qBa7rk9y7s*zTo zC#iP64|cO2mM;aUP}@0f&?6{G4kXAkIC!usKA#q!TPKK3?WYF zTN6vW4*WU1HKVJ!xo@t;NNLK4#|@-()I*`j76ttx@~@^Q-76;(C*{q&({uz$*!i2J zSRd}~dPNa%_*UT< z{Wl$(NLF+|zg4he*2e_iAKm%AxJdDP+AsQBkN?$}u|FGTrC8(+pQFbQj3V-48PNo* z4XM%B3}Zw_pGTV6O1=1yh>-o!1W=9*ztd(qMK+CFtJ9AlDY^w++8-zJikK&z~%ria66bsZDpN zz}bLq)Ljb-Qk~^*54Z3{Y&a^|O;8{|a zY#6y;$9+;CetpgbHi36Q(-Zu)-C_4v;W_R48GZmWK+oNcW8n4Pfo0e-JilODpstYf zV|uV&m*fh>^S54-R|nfQTo?-_*WwQARWI(xQ8PSg9&b8sl7;W5mjz4+0RR+)6ZdQ=oNFdfEqvvm2O?Dvh`T5W-&6lHoBO7+Zlm4#0xJHdGR=W-q&CR z1a=xJ%f+cwyXjYd?2+u_Bh&%q=ME9e#cSk0#R-WTsX`saUGSaH>n# z#&nzsrT9Q9Ob@&^U`O#72O~8Q5l<$m(DTSW7OrLtH+2T3&%;~aJ2|Qr+I8=YJe(S3 z^FqWgDGc5}GcsrF^it2ZhroLt%BNdgsqB!xpvVH8ghe2Fk(!|D+(Wt8cy9lo=Xs$A zzxF}B%npIsZ}0@xSGHwe6%kc16)&QH;<{TXf`LyH0om)Ppa@gess&Q^JZhzt@l81`R63v@eeHxVx?k2`QA27%v}yZSfA@RlH~61U zM|D;Cs%L>jEqo*7^fVM`KzlFxvL|k%69RIu-bQmFupmVX)aW;sDKvn1%si{HREOdE z5OHf%h6OY2hSWp;MwW$N`s(X?0n{N}mj=N;v>~qGCF2lN6eTQ4;!1H&9DrSw>R*KW z=-2;Q4F{J7=QMsfe~x1hkXYTj^lYg{bxJWX-?n@~-I2~w7z&|bB8>XSo<4nMnr;oY zyv#s1vB+42`hcRA{y=LF71Dhx)sb>7lG;Qa7g5kr7y>x+U8 z)xWoSlgY9Y$9ULq9Ua{JGV2hTXe(~EYAyGZm&BNaDi!R?q&fCjt^U;(RHYUd$ zREkXZI}u!%WcBPymcLz$99Nx>2kUD-*N{^3T2n$PheYk~t4Nu| zG_!K0$csrHiVjZCet|X^CWcBmN4D}R;XYwzYRZq;`uZu49~Ihb#qY+aamc7-g9k4N zzErIRp@N4Vi$7L$wIQ)h{!@HRCO>U8uHFavf!7$h50G*q<ddHKJ0i`}5UIG$yvu3_LjL_6iKw*|R(Qyn1v%iL~YBHIF zD*4M%(g5b(c;7+=>SGIJl`v<&g%V5vaMJk($1dkd00U6v=0$it?l;0Q zh4zQN*uaTJ7FErOA#Tr(JLvm((re7Do<}1LNWrm|l=~T3(_zbRaRYPnx{7aTE@~{j z3OPLUAa{pTyjEV;o#7imBe#7dOP_vW-g({K*NQbf{9k?zxLWLazvdC0){C}$|G$3s zpNl0q6{)4wt?oDM)mjB!`}+_7u{KJJ+sUt5ru^=HT_Q5ZC}G#SblStJ1nT4@Dt=i? zx52$|W9ityV3MN`C8%rkxKU)f)sP-OsQ#^Is%{Z^h|yB)=RI6DnMK+GGP`9t#HJYl zmVi#-I$B#Fm@o0Pwesr(YyCm0aFF5WMc7`cf6plJ=KC7*yehw>b$kE?yW8K?#ziY12S7tW&y&fw^&vEPdO@0^+g(#zoArnn2mdr& zimm!4@gT2_T*89DrPa4X#PxjS)-qV)r1W{vfaP^W#7pLBtSq|cy~**|l|B?sJGGm; zz2zABbR#6Qx3g25?6-fdrgcKPj>+*NV{}T%XV~?{M|_1xxuaG?vq^im_F|?ZDIR;RzEoVMu zkNTkiJP(D}uObnTl{LniRI`3wvu44$hjh(~42Xvhlgn z#0%_ac@d=m<2X6MAz}i`KN(edU!z<026W_z&SD*oB+$KeNBz|K_~VcLLTZgho%O>FUg#9HKUopY!HIL^2@7|=~hRqU+4^hNYfRhcvN;1W;L^CIErWYSH4itGa8sQn5AKNM}s#~9MovXJ}6 z{^XNSdgCvZpo*ZW(z1lNar?XMWx9{a+{L#%TccBx=u5S2*ON7F>7TLtKw87Fah70( z)UCH5n?rd*594^7bSI^}OOXruQi-g^861r>?6i6lois8BeOpCziu=}+|Aok!jv@g` z0GZL`mX<#;M#{U?hbVO_O{bO}XBahsd;(tqT51O8Np+iBXet{&YB*1yy{LC%aR4$5 zh8~PS7@1uTsI{Ozq;uRk+~dWJ~HF|&aVYxsz)$_@&g2ykGh0Gy7^l~ynYaT zJXAH-Am-NQi* zbb|VO)pbIGrQLm?HeI*j?#J#Y!3G$OLj!EaKI21bWDa9z!UB&l81o!T&O9_39J@A+ zZ=eSNn^Mh*a1ibyu(5?Qd24eLj=y!Hqt{xGBE`~YpP9bA5H-G~8<=NQwicbcGt|>3 z&x#-$1j8H&Cw!~I-{IKd?2zi0?uFO<_JK1;HTIJLJ(cwDFw+pEbi=66DLo{6Sqo)w zITzj5f60TM_jH28EWmJaK@z? z@}r0>;8ycm-pqj7wycqo=M$V`_3sl3@Ylb$an6}MH}mu}$Ih}yVM+r9mih(-;OWx= zSK90%BsrW3003W(N5}fW_A)AqDSq_vCuRzwLh7)Ef)TE76||tsuvcc44N$aIJ&o{Y zycta}uHuy2n|49=*pbEWk>obH$&GCe)7S$mJ6o25%aU|{7-Zd3a{>qt`4!vD>Jj+^-lD#FFX)Yz3$AB6s=)HKe0Yun=!gHkGZkmR0{Vp9F0DdQ zeCzPgpT&DkxTd=@fXSxma{GcoqA>U@653EuPcXZ0I$ba2 zFe)Npj8GuAk8Q&iHl_kjpD@)A5M)_x5YUKT3ojK(FskG9N4Ti^ML3|3=nmCdK-MzS z+ZTh_vmG8IvlN~6@OFd%=pUe_U_Hy$jRuYnAAaeVymkY*lowvJd<$}-l)lvsF95ro z55}}c%Se`5+VB7HyZ_VEMT)+S%?@Za zH1qx8J?REsZDC&MWfYZ>&suIky>3Wto9l4(2Z|RrBIn2QWjHzi~ z-qThv%WIDh(ZOiSP58%enMbI{& zfshC_)u&&4kwoA$@K0*J9gjRN!aPzD#Q$B5Q9quV-zgx$BAfJN)de(Jc8>djiPXT0 zUAGD>Cij9Tha0MY05DG&D3qQAYE>@<)t#x?+kk4KZQG1wZ}}QsxdO-KZ5OggRhBg@ zP-4mHR-%4CiIMR!`Gw5kz#r5iA~aeytb1XrT13%>ROfC#p!gzTJk>p{vbygQ=bMQ_ z!%W3=8e&$jm}odR}0RiJ>?vT=!PhHyO*8X0^=KWzKwz3ABr@`TV% z4WPASA(rDA6OX<_&FV8&T^|BB^)eX~@n#xS*9dWQGL^bdAg{jBcG5Aw-`q0+`e?bn zkwn18fKiS>8g{|*@Zr-JPb^n;hgjZP$xD<#&p}%Y#9?TD$EZ1YdTR}+ru`efozO5M zCu2?PXw@Ne-w=Ji$X2`boNPx##%**!5C~=S_!1YD!@$nit4!Q)tdEb^Zfy^zi$9YXbuIr5?ZM8v;kytQUK%3dysK|-c& zhAIcZ@mC~7m7jP;lm*dnbg|Q3Dw(Ek|Nh@R{O_iC%i~&5GE~owKh#iAcGi0dEl^pu zFo-vOi-*bY+8)0p{U|dOwzV%h11msvlsfK!ZZ^ha(=^-fLN<_ddf7*f-}-sYfCT9X z;xM(=E<9BPFzRYvynqFVldmJn-b$zmi{@pRXS@Ax(lmOTIzSkT`aHKi1mq>*=5MUu12D9iNA;6lZ z;|BQF)xnX8Poz)EuWD~M`7AiA`Vw-PXd=~ zD3y8(dQ`~lDFyGs8MUh864{jal2f4G3L*8mY>dAx9yuF!)9mS!&-_VTFYK*i9|FF{ zPa&^v`9CiTrCH**CGLh4NKk_OSFn{IIAwrEOf*5oSQm_#7Uk|z?)yJ__)`yzT3;xD z{WoQnAO7vbU-sYhZbXRmEk)zgf9Pw1mZZD#E0n#OA{Aw(p?M9?If9ClzFP5UiB7R6 ztRfOH&A`cSW@{+*pcgF$F{c`vSv6^s9lwyTp*SR~BOWx?s=ody9EOc;wNHhBd+EpLv3$8eO5QhtO)RQI5M>zrDUZSoip79Tc5` z$jBiD*(o4D`uLZN!uW1e6zX(^E4(@bQd!)zlg!y-cH=k>qcx;Y64i4g7cE*pNLRcAX1<#!e3w&PT# zw%x|`R;Wi4h3}WwHY3%UeRG0qyuVA8m>0we4a56IgynQL4*TI9n)ss__V9Oxy(s>? z3t;eXiY5}ecF6r|KWH4-lRcUqHg0X{1Wn9DVE4P()lv4Uy;=wWUdxPK$Akk z+AxFy$akTYI>k9nnZt2}hFhKuVu^r6*Q6;cE9gwkqmu1CShYq_LYQX;ie?-o9m((E z>;-tN_>RfQ2B*?*_x@lL^wVoXBCjX0y0?4iNvJx&yf8uCaVQ5+oNW|PQ!LY8I-*87 zWb>q#o_}V>(kiv3snvc>g(*KgfMz&hYono<*FT>;d0s=nQ%*RkXe1Q*309nc@3h)n z^#P8yO@E(~nktBS9P)Hcx#Ebh`qIvFbkjSU^hCSWm4OGk(&9#4R^Y%tixC}+6wdGn zGD8Kt52uk=6lDZot~Ekoz3NG#bbVN7v+kPNAGR2?Nql?XSM%Y+@BB`Xf;`G}Vgg=_ z(5N&uuy-EI*$8g}rRM7Upv!;ZIj^cm-Eja@$PX0C8L?oF;q2hg>PPG0k!EXt1$KDT zlMKMC*S9dF!wpJi_)(w@*dkOJY&Fe2TAUTwuL(AIy7a=TR6{NUMo|^WSZ@3&_?lhN zr6r2Mu%&AL4?p&d8@4%}T6i%LM-lD3m?>G4!^XbO+0~<8ldYeOH;>(hz<;^>%4At% z6Gg^|iaC=Z8h3B!+c>V3)VL6YXV*oH6m*)L(p@K z)A-S}IYn)cJxE9DbCNagLRa~E(#=MNKKqX?Rn)Z{RlhH{%fvbg>jEQMt+PT21}J8I z2{LlGj&-=s&0<2cfP^j34pEkj8Yfx>IW!~x80-j^m%kn zQY4|o?QIXSHFsoi@(w-SEF=`cxOnv6*1p<1T=$4^k|Hqb<0ynT!5Pb|hg*BQ-J)3f zG`2o69hSuf)Pn_RqX%Z9eI$PLgCHwOaFCYiUy~SoQwz1t26gCuUiq``dOP!IsS6UE z3{xO|E^BcRt-V{^S_|xYG(Q5p@cUj?$r?uEr6dcEgCT{xBv%`)UxGPnnComFPAbZ= z4Kl6#U>J_HeS}&GDXjAu+YiB+iSa;p6zXzCDqL?cmIcc?p_luk$w{{)mIdMqm%3V% z6JAYV0yi{IWqN0DR1^0LiA5lRF15JNJV40W?9=o`340Vm?Q#s^ufiM`lv1U7*v|j* zAI$&z{`VgKkpKIG|G)l!ts7Vi(xyA%z)3;4y4aGD^6XQYI@U6LB8TJXDT^j2KSXTZ zeHW`Bo(WUg&}T^>>sj^tWxWLcfIXq;?xGuY6Srxxm5MHOPHKl(7=Py-LbWN#QE1Zm zabsiBlrfHERLu*bY2H*MxPt43AFET^>bN`JGuqVzaju}i-3zX57nw*!^L$Lj4%L%IU3+a#r+pj64 zKb3`C?>}wR5A&Y`frXM~^pX%Ocz4j!P$;oJ^3JqN>?Ndoee0gEJ_^(#7?geyW} z^$7Je7puUST?a+D#!&!L7%`8kWd2#cqJYoPf=9-2Gdj}qR12cjeMuNJRW_5?=Z-cMK4BK{lgmol zf!P;PF%3b{$4>K^=*)4@Dt^|d(4=RV(U(wRfq4YDMyg{8oyUwvA^Y4a4U!{=W62LC zs+%0(aS(|S;-Kt$Y`uN38>wKOD;qD%qpy>sz?e`Sf{B;{PS>&6*?6lIza-3S%}8WAnz+jL2qh2_RQdWHS(0GqDj7 znOW7XHd+J_Kt?5iKqC-|MAA5BB4=L5^fr1-HhbYZY>J|Wq$tv?68*jAcW%J_cog48 zC8<@Yi1`2S;@7V)$B&aRDU-knz?~|d8DMNwx4(Rux6kN8%F263G@q!H|K4mpySzS> zO$dnNFDQ;x&B=K{2{cCrHxc|=5;oN-@{VAB3XLZP=!a4cPs}LByFo4|{+*YV(dyQ( z3AXLM{`#R$1y(EhZ@Vl)o%qd45ZwDp79L_0Z;SPWSx$yC-`?Tz7RBCzdk9)tP?6*o z-PrY0kI;8Val~GwIYhS|%jvtxjekG{wi=GOkCCqM_{Bq}I0sATrwrbgyMww3@zzv< z{k~e$*4P7z_%_Wo;nmSJp^4$3_0CTK_bzZ8Znx&Q9`nK3;KRU+zd^p0$XMZAZ}fFv z7K*AKF-9AF>J|CUm-ZfxQ_~f(ZmR`iLi*$H?*7bM@w`L<2`bIrdsuJ;RJ-lnqy!M* zQN)p|DRu%@${PF0BFc!r_$C#UA&RPakAJ7BRk@&ggA>r2@!1LDA5UlmEE#D#o)snC z8KSdeu3O18wZ60Cxe!CCzRGLIG!N4zeuyu6B>6)Ag_RKl5mlE>{zgXV z37Ky#zam70tN&1nJx`1mkWS-{v`7O8WkZYp-5?|8j}HNl(4RB*@$MIQe;NDpyT2FH z-5`2)QKLiz0J(0{gzoH=F^n$0oWQ#0(!TS{B5IEwJjh;MI2h}j@n(RQe-dXBhEgwq zLWy)^k_w3L3Jw_mu0|lS_^O_OQ6N5Aez5e+Gld1`w&=;-{lmQZS;RBa_Mu@;(9Eg7 z$Rxynza~^u!0Jklk5!No%QR;s^={0YV(?B6VoIik;sJ{Uc8_?(!14M^t(d9S@voA! zOPmN8;O=@12?WWcG<8$t+KX{PtUc$k^z>PbQ(oWQKlpuIPpWl|)u%9eVSGTgwb_1U z*O}MBP5P2t^;jvf_7L5i`YDtxHNV4QIrj6YUWQ-mC&1dCVsG(UUzoe(kR29f5ik|+;L_s^Gj`5EQ0D=C11yPm$2fPPqlkwP?VFUeKWwNqm@h)!R$Ju}z2Tbo zh{T>+8PY_Rcv%#&0?tgRoXsAbupCaSDkamgdCS6V>Pjj>F(vQMW!iZZ%{nVuYxgK| zHl$$bRhY?%&Z#%~BB>a9qrx`x8dEcNS^<~T43ybaZxec;OC^@$y99hELyJDVXN1Q2 zeaM`mQm$4N$X$t1ONh`5^K48aoL0S#B<8T%HR_VZjsUmH_19VdyUsPrUmW+zcOCIw2v!ej;cvYmYu|Y z!K5jWO17y$FIAnvczjsT=`LLeZphx?H^oeB8g&>rD*iG&Q`2NsgEqjTPYAJ9hTJ+~cPu>6q}99Ek`*#!gX8gO@t-lo#VX*@ z_NRpeZ)IgAR&t2q@GB`YWL+>bW0{1gIwy}_6v@#9`s_@x+jmee%(gYexUmdu5u^3uU=xw|FN(Nfi-vguwkDa~7EZBr@g$|s z?9Zh6{O~;zIG!++f?gb&!Q*QB&4)A_Jvu5wS#91+%C|6VI^e~lP#w&)I)t1zE)DNK zX2u!3D0Tti?M(Mhtc!?-KOEsK`t+V)yNbiqOMU>W`kWxz6WpQ1)k{rN(vy-;+#uA4 zC*TF3+%>+;ouQ}4lb9EoxsYMplHw<|*lQVwZZ%307`aEg3D$<}W@!UI$6u$YlIxUy z@-Oyj1_&xCO*E{AAK(44*RFvG`AS}M;?)T~w7l{xCZg;n5xn@MvwPq@J^)H{UZOd4 z8bD3OF9D{g5nFtn1f$R@W#WX)CbmaVP>E6e^dG&7pQqCsZYAvYDF!Ku+Pg^i2c9>EVrBj^rBbJrK2{G`^01E1Tjb~%H(|!V{S58 z0RH;;qLt4(PqMuH%!m4NdJ9HDFu96N>5<8e(LscI=I_f~m=`TLj@2x}KZ)tHtjaff zwit*Ug`%iB!{b?&V0+sU!eOwXDX%Akgz)1Dk=Dbpi=ziPjvC8YoEw#z z)`Y{n@WRPjgokLWoiNqlT&t~`d3VRB*!1Q3Q~Jrp^WU(IC6fer!(jw|R?G+F1XtYl z0woy>OvOT5VDfU^c_DhlC>Cs@wF#HORmOK`Us@Th@2$latxYN;Yb%;TLNeX8Dfx&A zV9NtawKgKsV-vxlqTcOkrD#jxUDap zQ-rBF>1j|R=;-{CG<-E4UC(P6y_W2TUkErt?c6dHwx2ywsl^-=dx7TUDopgu`gAts zzKWRSs(%^G-W`OjQWxpJAN=S>M$~ngzSQgB?(M9$!^J=zD4Bs%ZdP@Cn ziM$}0^;SC^)jYZqZDkT196=J}7!aV284!v2BIec@Bnq5$ZO1#I)#a(y8- zO4B9Y&sXL(fGesG?^+rW^#vUd$|}#~@j`wJ<4>#*-WD$E%aVR?*CrJVlEhdR z8p4xMjK7o}oWAjCW{Qo7*>^R16-Zzs!tjrF@LWRmBKiO*%K68DJ-g)^%iCP#snbRA zY<@|FV0H(PwH7~cIKqHSzn>HTuFR{;1s)t&UjL2ll4(-TEXF%OA@M~ZGqpJ}<>KTg zy*Fxb)OZj=a<(hXYy{dBORFC2U8+1X07xdF+yq+bMPq}Atu#x>2@+d~OR-+g2DK+8 zY8#)fEPWMfhH>pp${c;bI_d5?cLaVn1nJcpo+wTH|s!@X3e$yurwb-cCN(s<~@-k zkosup;gYvtm#iMTD!#gYNwMcbM42TS6%rkm5O5RmY;0{mWk^T?dG`TX9kCH(V3Dzu zJMK6I%He`Slvt|Pn=0Oq&j86~F}(ZD`_6S80u?ICyGtn3J4CW@FkQgsmuwsE8=7#Y zs)QKR==wwa>*nO0XM@xXPHO-a$7N6)e!8cac!uy<>=uVU_ZU$>>Sn;0DVG}L`(8N& z-F#zqW%stlS7Dz-!qCflLZ~m4QFabxXg{onU^~t5^$6W|dfE`znScBsRcS$*z z+?EJJ>xGhWl9yx&VjODqRM<1pl*Rx%3~q1!eNl%9t*XN-xL2% z^lmj?rI=!|I7EgpF}X~`?{QJ(qU`gJUrYB;=J!9|!;|hlzMGv@%8#?04DTz@L`7g|dYg=)2L42UREv@T>GxA3-2oi=sz4rnVay-vPUtUgrA+gdB zL5xR}3Ib5-LERI4GO73m&LUy>$dzuF3*u8Di~$YXZs+*b7=kua=O{#jp7OHcIDpNG zd7oiBcnE^`L0VP6TgQ6XE}baJoB0-4{$W~((Zt{`7F!iM+Ya?a{IZZYoGo! znN^@h8QQscfl|~Z_D+E%mTobPPNc+A>UfX`Lqm~gR(AJ1-hSz9IR4ShpJQ*3-pUIC z_#Gmjjd$fn3^Gsqf{59|7)Xi~Q6tih7v_OnP#_h5iG<1_?IRLWCBD7%=&=VhyAT0V zs!3X$zRAh$HVCl4i_#YbMeGo%k2Jz?T>mZ>Q@ls%iR%w^6-3RBsFG?V6I>Pn*StFa zy}>yWvk0X5hr6H0d#Q{3{g^2C`vRJGgY{&MEf9d18+6dKn0MhS#`7vBTkG0n3mhAQ zM$32*E{JJl(cafrqtj7dV#?JlCPM`Zk_BGOA;rgKTKX9fY>Z2HZ86_6?-E%C+0Q-e zbv>RWUWet(0->G~)kRW(qvCPI@$JqBS0=nELPQp-97)m~nGAcf{2-S4RY3?5k4fFh zu9CL&CII0oRtASqK#fq|BwJ$6fLjE$j~PiF7K!ePur;NR$apaU39Ik#c@ya=qss7j zQ1|NgvJ{YPU$7zoEK?b+^WGROU~P&m{3xz}Dqr&2SpnZ-O{b9Jq^0Wvsr#G%o&Utr z(he=(iCvl%1=#8P1sy18hq~{@=*6`)E&FgyU0|#(+Jpy{OHV;xoHq6%gn=ybcnTmt z77jznG1JvX0$Q%n>G-U~1&bd7&WCo&8dq~w`4ij8aGeqMBH8 zvbYsg(LflbBo%Y(l?QnMQ>BG+cXV4x(173oyuFsfETRUis(4ts6;nHjn&F~^3&^p5 zy_}f{Ybh|qcE<>4p{pVY{dssU&L4eAzw2$7|D@u&bS7KbNKsH*^o~7Cv;L5XlM`C^ zxSsY_>yNj%|Ej~U@uH*i;sEd(+;T03AEgd=jr13f&q6w-Atc>I{U6k>`*e~uDT;EG zOQ6RpRj0XgF@@dRyV5xu&dLeKZ=C~KZBh_z_-Vb>H%x1+K;NXG9QHQ4K43S3rc4PC z$5NxxUi)J>j%5+lt5K~571B50LY27uxA_LdbM{~Eti```NQIA=#Ul#xtk0q#r&DO#|q$s^V!oh}Ki;MdJpROAnqti0KofSww!R8PE_jz>}Zfhi&rl zI$XHP%sVp$93_h9RDAIFj58)NSh!x_$p%Ch)h;3F(4#Kn6%ET`u|b)&`ZfO-_C%+* z7>i=#hUBF*jKC0hLoH|3nJPK(+RKL__+%|iR58I?tnj16Qwp+Dsep%^r3&Q7M2MiY zAKykjI`$F8Rbjog;tQ!L{g^tTYUhHRbW>eIgaZ4Q6{ljPSq>?Vgoi@(n1i@0%7q3P z42vuzY;s!kz9Jlv`S=DFu_O}H<6=2% zg4ZsEys{C@P7euW0{f1xi#OjLpdYjP2fVqPy_0zec}6ndE>uBq@tv2tUmy-6PJgBu zHo#MuUUqH#E*5F&$zwkgV%BMfTyVs-f$`}EY7xgE)&&c-#EStdq_IFtdO|m`Nf?!* zchrZ*W5k~XGferI_bej7A{=A8o9uBuun9Wjvh+>QmCY~4CEKSMm|=n&LW^W>6An8U zr-BS5c)h3vS~D7JvBS=;U)CkPN-o9Ifgr0IG<^Z4Vv%;y74h%5-I+g;!;+C_e2&zf z+t|tlgiha?xlmahY>Qd~NS^--SQWHPMQx%l#v(-bcYszkn0E|@9A+L9_%Px>i#;K9 zgkpfT^&%Pcvr(x^S$_||nco2D1jZ;QN0uJ1gorIKKlJxNDw5f+HzgEYL=Gy)G+qFi zM6WqVMUapP$K4d(R!oW7*Go$iuN_V=EoD97TuU5_l4f`~D(?MY|Ft(2n{iw&^M)?s z+p$iB#7cg+^M(>c|8_?S2>%J4(x^}zjW4*Dl`e5(EqUCl}00U(q>br~Na2Db!LMmqkqS8_a0(E#JIEQ*@hzJ+Qv!z1T zdi%8;6JIy}8V6KHjSP;M3%pZ*XI(rb^r!$pF>-Wm(e|$ScfMSdW+jA-;a@fohotW2 z&MXy%(6B%va)%fdNYoNZ5eUOwjMw+NUiuw8;35auOyb5Zs%9SR1hVgWXwa(g&f8YD z@>hQmo7PQb5>!OR)9GVK)+e!LB;U*JATY;ixsY}`d;UH$D;%Q(fl`E`wDniS64Jb@ zIFWdHAVDhRl}rYQEj}l@->jHQ)bSFngs;h&^|zuuT?o?*D#DBg5~KaxQl#x*=lt9C z_idU*J<>2P9@M%_#~&nU$K~WrZ%O6HXCM$Amj*eGzO9K>pzb6NId-8RUhb5X?XDdE zkjrQM6Hg^|P#FVsiijbXYh8!n(MtT30rujjLTK5MsA;VZ{N}~-luw7_D=RNF*8BT^ zymRcO?t;p326CfqX1oylWRwW(*E;N#IHXI5oRmodBPE&)aLq{3PanVfX`FDeb=69q zAyLYTI;DfC4z&lc^yr~C`sF27QLS?aw5}XUv5^i3w-5oQ+q}zN7)u}wkFuYZ6WtsD zj!WKZ-e@GX?Kbrs#J#;}hji`4e68x0*uGDH_{%@==@$yLh&YpP?v{Nh zq=5nz9TyK;E&2RFj#ddXY|W9G@n!jp3UkIAkCk_@7E@9&4LJ3^t7a2qB5ZH`a;;IX z1SAHP1;xY9G z$M6?eHamxXe@KEFs?hjwNZ>2+v-0{7n)poF!W0nTzXb|Bn@Vx|q3wPwqSd=SEA_z0 zozmOWk}OmFcs{8(aqKhO9*exb*IV~_f*GEG^g;}`5lWFZj33;6{I}l@1-JBc+3VZo zl(mc0$4Pm-TcCUKYK)p9eSl;~8Jj0cqZD3zQ*}A^EfkgBJ&3n%tRH&iwkuUC{D6wp z0&*;TTEXFS< z$E61uOhX_pz2cM;g6=N!J^)APF(KrW^rT+)Gj|Q|6$nue2+m)49#P1#GQv5wv|lbAHbyzwnufnsuepYB+V zS73`@pfTP*NTJOR>kXX1K{m+g%5y~`^_Bo!i4MP9iHv7JzeBDHml z<%qy$tQ(+}T49d~0zQ!vc<&hA4C`EC0$hn${$2-2(Jo5OE+KLNd&N~^tJDCe=u*Yy zbH!DB)vU6Z-F9!?7Xnd+`%WQ#Au+3WsUrwMG;7uqT6(x*1){EsHrKeOR70vF8$XV2 zE^o`=dT$F1_IK3R7!4Upvt@J|mb4IjJ}1^zRq;Bk{ROqbWx)l&x|S)6@|x4=RlaPQ zgK|{^$ztHKCdvVTs`iWhvdhmbMaYdP{s3kpo zD8$lhyN4_9PYcZc1n0S$>q;K(jGX53}3g5+BjMcU%<9fQJKPx-2K9f59y6#!3P_&eKs zYu=8fXBFklV$RQHhzm3f6JW?^qYwVcE5di)we9^F2#i_rpJ@l4u^&a8ylaOvX5v_V zEwM%YZSf_~bTwXcykpFybFGIyn`3w4RfHr`_6ai;J8=yS=$*Qe32&Q{fAw{yZV#8` znA5OI{FoMlzU=Md|Ev8dU$EGPK?YJM28{=FTYfSx!Y(iAv~!$BQge>e-`GamxkUYV zG@O;us6M|ERp1T`1;z0ebvYcdKz*Q|UIs`}r1w3@!%^A}098HZwZUxxr(yeT`60V= zrm1*{Dzr)j4B#=LYGV|DoFP_zasx)C`F!{BJKyz_xzlAZTl^GhPzg;OUlAe^C5p*` zB`vpRbdE=D_;st~w%z{`qSus6&{HchhY@Yki&EGlepX;ZH04dfBCLRXddO^*JSV?! z-tH$hql0YK#O%8AdJHg4?oG7;vm`MgDvsEGgYcl5;U12WHke^Ny}9=bWZZKiL7C&R zWJ!>fRb+cEj zO$zYw5+{!7TJC7t#LJ81oCAO48?snD9&iuGER72Q2eK>uIUMI|U_UH%UE88zYYojT zK{SW`rP%5PzGK9cPeW;3ThRxW5U6Arn6iup`{odAtj zM1X}-he#oioUMfr8O9^G6T#*yw0Ie4-u?*gsyv%jt;pbGRRC zeScA1r!$y!Kup$*2Bcx7Xe?i3G+XCRKbNMJ%sGaKl80itJexB9yjy7|n-0|m8qJSA zVxdHM?=5@upm5(|y4UK?A@K%KX_ZWrLDW!K%LETgC#1s>zDW$ag_Kb$@wf0?gF*tx zB>Eu^ZrtqfG%8?^iD_olLZKmkJgAw zYEg=s%2U#?pokCld%0@ZSO4JM75^t`oxHE#Vi2duj*31a@k#_RFX~1RR3Mz@DftZomVb?AV6M;KDTsxQoo66`A|jJ|@svUR8t$bx0W_zqiQa++g2j zq~dIn^}E0IS$u(DMHU78`gc4ZwS80Zb*zn4EIKh1%+5#^gg%4H@KL>Q^^PsIn6So( zhBCSAmRdyt09t7MAxzd0si#;E)RW&aq;?1$7bAyow^Rs%=$)oMZv7Ujsv@W~_l3QP zH^e|{+Ka;og#tU`uMqlx$ixGno0WoEMm3a-dOwCijh66qs7X!Un7R3oB+RqIA>ZO|I+1d6C zP)4#*GOQ^Cv~boa2047*#&uv~bNalP97iZ`@gtlsdyEP*k!fn;9E(VzKZYONSFsoz zN0R7%6`*z15iTf|7wkGqWP6f0%luw39*I<44H{4geO(fuXms|}q!bSyKZ&vJj^6vh z)WHgZt=GqZNy8G`s%+G)!x;`!RYKBVEM+`&`#ifPkcxO2tA-z4_MOv4LOo9#9_3HI z`kOe42pgAM5iW>i@j|?8LN3Q1%jGI+O1B88mvkwW3-k}spe0+rrR)T^=m)=eb2K?g z{fPcXCHlkuXz(@0sajN~+XqhkveL^dxze50Qk|@%U@hRmDZ55$D~%2pt1M=pi&b0$ z;#Os6#L8LRh|TrASQ6q>mDLHHkL^0$*|wB=RCdI^!JHU-wZ699k43`-XLG#>t*45I z(W5h`FMZkh@=h(&&!J}`Fi1erzu6hltiCYY(1hw5$e1&5Y+A7eX&m%HnmPtbdM*Zv zai11tQxzq?v^py8_F6*$G~xDCUhsEuNOc!o&HGE_jj?wa>i{7O`qcV=D(;TO>wU=m zNN~Ks6tKHX4I718q$=Wpt(!+)Kj?z1+4(dYUbp!CpSM8EpSHts-*YIoxPjhdCiGUO z!9z)YZ4ELWg%#~m2?F6b%GOtk*|P1Vv+U7OLt4)7W(lQ0<%#)``-tH|uyR%myy^J; zbpge^;9VSt*elW=@PMDsjtBmYP%(3`en=Vz6FRQK%mfb^wwcBh`*HVizw1Xt^ARUVQ10|c zR-@-U2?P_+F&@QmcAGOM(;y$(%0m&wv(lEA?V;F%>tV60rdT^=(fPox(IBqHsyx!8 ztZ<$kmsOw!vG{Ba6<+8B3G=n;w~}FmX(q8B$R<;! z69--7!g+-FC>a@4DYrGsdzJkc^Ir3QOuZ7$xb{+JwVtVhkEWlzI9VJ?C5y@eW1wO zKZAPVtkhe8$GDw$K#mb(v=nqmj~l|jR)%R*LLn`^uOzRJvN(d9KywT;U8)@BZ882S zuld-2@NH7FoZROC6)|h0!TT3cHv8>-FoPMMD&pp030O!gjf01DZ!PB)1Jn zg%0Kg<;6_t;5gM{RjTbQ1SfL|@tZfV+{y)9(GD5udao@#T8?p1#O$NJPqw&8e+*TP zPCeAY5feg1hftAixj=`+>7d4{hAB26N+??i2nJs`qoABk-=BpIB-MwGi1}|15Kpe2 z7#FI(^rJtAacWS)1L#{}uhP&toRscYN<<0P&%S!e(L`7+B%F^+a1~z=43{tgIVdc` z+l!)MDP1jCzM}~~8Rt6I=aw)I2AUVJOB_drjW_Dc*=ZhZ6+FdSDx#X>U%yRVARgca zJDrxBER#)>cEOfp%DVhQ?A935yeuyNY;A$fbs%Qd7=n`F`8~`kQPIUg1Yp}P(0Ho6 z1Du7Jm-@}rGYQV(XaPdAy|=L+zrAlJAAh3W;Lpf-Q-4%qXvB=Xn6maoDiSX}hs(8Q zK>D{(0mX;nU^&l#8)*)fERMkCsIUtXgC#OubhkjA*a%dSmm~Gqi03M{lyZf$wEW;f><^g>RAg@J%Hwzj42=N)Yeov@`^F*= zUnVIZYmG%hJgHoIhiY_N$iDx*C2Bl9?de9+&gzu&_5=^~W88Q@Rnfuy+VC3{Wr z`uY2u7q9QmE8nVJjCtbHh+VCYo!%;)P_+E!cA*OA4!S6ZrukzkXY-6^6hIo325)c8 zd*XUeN{nqzLDT{+mh%kz-+laky*&v7H+HxSTgsoqN-J*2Ptt6)#v2q6It^ zcqJY3W;v^p%Y>vESOj2 zQtc-j2{X#d<5oS9xVpINJnpmc`Osfs7~@O94k~JJ2=)dg;2E`Ct^a&;OUA)VN0y%vQe2ynm5`Q4^&Yq7Ooc*x5SR?1C?^?c)a(wwp7U#_+wT3)HfUr`Sl zf-^GW6W}$ljkMdA9xr>K2QzZJei9%{VeCEa>1~lO(aX)=`*>_hN2aZVq)C*E_z-;@ z(~G)kUccjd@Lu72iz#711Rd;%^*!0>M%e3jX@{yEqmJE}vKPe{r)rdCk<5AgYojFn z#|&D)VvaAqY&huH2`V>m);$LNOQ6~gAY##G-sSGU-u)zAvPDR!Tud-ImIJN$#V`Lb zzDm++S{S^>0A3KDX+aC5MU86Cfx)y}Jrm%W@rilG-w4t3*mlhJntDoPw&>A&fD z#+P4YW_0Q4;~3@cbEd*2h4~qm{D2T497CIX5~7dlw*>rdvbx(k=LMja4IGV|F%-*(jZ}U98-PK ziGdyCG{tIA>G-E5kW+=waznUp91#&_N(+@&&I{gy-Z&53(=d|sk%N;{x!s+u?$CtcM8_xEB8{xL%f z#Pw!5iS1^nWTv{izvFdRJC1r6^xn|t#2lRN+@Xkl- zbR64(nNy~QABbQ9>h`wFlks2sg>C-OH6nyCMZ@e1yam(F&g6Xg$EVPfSb`v5k~qc^ zuXkT~)H|E2{ojdCcGffhjsZe1OKXpNVwwez63p$0rr%1`I&I(9H`=Cp3lD(Xug78z zl(57=e;dz|o|+Y)AP5Vb<8p1c)%D}K(WoV!8-*764n%*_cb+XYB}WAtflYgyjW0Ww z@%EM=d>o6i{ODP1p>#z&es6TGLZ$+3qc4H9mACBgGt8_XhOd5*Uq9cCOPk*j`)T>&Jh8@SuQ&-rQbJj!I1Fa^Aq8xE@UbF?qjb zV|&jn+g9h@`Yf4kmDSnA%e?$3t$ETJ^kFV~sSiRhkH+e#7+NrK_z}C?Ycc-ir%$~z z;Mm_HWbxV3v$PNee?9b|xwUG=Hz^2Wk+F_M8R%@|zo3aPrw2ABc_0=nl2W+f@y^px zNwt1S!egm6#>3Xeh!9ij$XI=o_{oCMTM42p#>VS7Xy8p-M_HjR)OV@|DxLB^GOM3pe$cXj0WH zJ(Myh#e1VX6~h@VY{fBnxauL;SoabdtDR-Maa|zP~qWn0j&`t^jlW389icx^Q3UlS2eJ`ziPEfA&u@; zP&R%9v8#+P-!cb3PGsASeY2W=5L1{t)9h+tQbW&;) zr6jpQG2NZtBZ|Cm!aC0_vS6pt0>z0Q0O4$W3Pr2z>c1E(%*A+IO5o`LA@mK?^b)b6 z_xD9i=`}V07y+LRh5ye|1nCf{;D1O3rAEc%Qj^RI=w@?}MJoVY0{DL&o1~a%H@2oB z9{wogglO)@j`yY!tF+B7TggRSM0M?;7lRsN)%%Q)H6WvpSjP-;AZ6Ufw=vTGPY)4b zF2<()E0Q_Px3sn(qTVR$LqBY#{YkQQcO#~wvd9|eXI)58#}J-R-cZ<^K?s;VV+4eV zC@qxO#t!shC!NCY9#3A~-r4Sd5sGump7Y=wY-0S1ZBq0LYui7M4E$+ zUBP(}AN1g6W6+j#Egf8w-;SrOgU{(}Y{u48^7I-!@6kVtLET(ZV$uv0k zge1=F*duA|$=SK(yZ{hZ?&|OROT8#JNG##+DuDuqoO9dAz8{ckocN4q7b* zIUR_rLwk5=&@c;!gzFYtrep))4(#!m&GbiEO(5Xs)v%cSpy~h)3d1OSHGaYHVjRHo zM)2&VeBwD4+)qyoZeH5Pvlw$0yO}k{u~O6ZW}rg^Dehvts&c$WXJp~0722DesB>Y_ z@x6ZMFqUwA!<+E=IKlL~Yly7dw^Ya(gjh^-d1b{16R?hwxAg{?7lN|nUR&jHbx*`2 zAm{V+cPq+cNW8(*2k8g`rT|EetWHF5bdNZVQ-##=gIGyLu{;EVLQM`lz&dFv*$;6vFxWtR(20a$$BJ3Di$qfr=j&&>+puX5%vjkG|&QVAOG@Yt@&S zbgLDqUPR@@iV~S0#DbxG6?F|sBZh+4Yof)wk6&anY|4qfmfV@-`pe?(wj%hVeYlIiWa1_ z4>;7JP(d42#>!wuf%iBflWAPIE8 z3rR!<218g`J?O2q-Ey(g9mpS@z=096xecGNf|6;|!Pd2D5OE{DZ8l)jwlP0qvBvO$ zo)*kK$T8WsO6W+g=Bd}0-65@0GmS|8zg)J_)Iq}1QZgjZiH zCqsIJl2#$t2X-TrA6}dVL8+i)+CO;_JMz*}t^olr;3YT?6UCJ)ViE|Sib(uWs=Zsk z`q)!k-Ze{_@;}FE+vfk zyTHTDDds<@)})f?Zx^_|)%=>-cofQ|#!RSSDiJ^s*3G51H@f`zar_Y5h+v?H3l z%0n$62rE743bqg9h~S*%i!caUxpse7?o~coS+B;Fw@5JqH)z3Nb8VHlWPgDKKmpDRHg7Bon&Vx8neQx|Z?3rj(8S{rGgVr8 zF9nk*#{8$B-~Hn5FTJx|tfP8?UlR;r>*H~h`$<{(aYJQ$?Em(#wPL6c%1n!;xB)9z zf}fO&|LU*&+RKJ1j$eY1o>uFY<&(~egQP;Y7!XD>;f;)e1Gg`qGF8Izqo>{iuB)cNQ{}6|`C@YGdl_GGJ>;MGMa)7m&Wvtj9kiS) zZ;2rYggUm^f#Un|-QUjtH$<`aD2d1Ruvznl-{c<;Rbj&saPVTvU(PPauJqs(Lj~mB zQ_B?4_%R~xU<}<{6s|Imtv))cm##Q7LK89Vmz|wh@BVf?4zxcogc4to>%qNmqjit3}l(;?(c1EZ+ftMBYGP+Sg|&lF<5^ctpJ@stP+dA)rokG(7`C@}5W>{y z8`w&Hif2oo1}dMeo=-;!sl?=U+MP-S`yVReKvt`+l=1Q=b)^CcF-9VUH3`4+U@4aK z;ll@!qnQ_j`+vRLoNSUMyeotlS8p)O`1p*b@oA9ZhZS@BgpSSpI;8AA9?{CSV?Dv) zp?NAOTA-q`{FFVTNnRMyaN6pUox?R)y?+y3!RWxFD)S7q>sYcpnu zBaV3-W0VyI0bM%N8{0eUe&*)YEJX6xzZDC+ys{J8%*qk>-u0m5 zQ_gEe-Zd$cq7#*P7jbglnSELk#M`*d^JZ^V)C&?trD!H1z^}%Nc1uZhLn;IL@Q`0~ zk}apaT?)Kjx%d5}GTMJc6yT~#$|IfShCDfz^*2BJSH-5Ccg0#@TIB4j5Tr8E z?2eCB?qg*_L=MbeiMX$Se&8XaOK7=`stOI#1=>UROYZ{#?HJGU!-w{#+d5NUM`+t$ zel;)<@!2B_8F48@-G6g+fgbkc7=~D9jv)?6ClHW$u|uBY%KG=fwhPIsz;qayF${cz z>&zH@_WR!|YXM%IOC#pWS-!;j^q*>}T6TArd)z-(&zIv`7$)|#l6&7pS;k7;*5aG~ zRyQPOjuE!%?Qt2OF_-updv`ib!U|H4wXsMC(P49nh1BVELVGe7k?%HOkplZ(EAAq0 z6Hl_Ik1YZ7@EC5aM+J6;x*gXTkm4oESeOLnkGxNm#hb7`Tcph^w!xmEIQ(kN^BNp& z4eM1n;pvMPGX6SgU9#dYMXm4tGX4_hRc(jfUF@nTV-Q?_0balcc#%w@;4vNMq_c*f zO08wFUm?)2t4^Hx<+kO2AN;$DsTXPm3AML*mMA-q-jcBOa;;G` z1KoDW(9=f`yi1cyO{qXhaEQr{6sRhm!<_ualR2QNxOa6I$tUtK@e4cT4Ctp}-LFy{ z4k8LAHWky2iZwUDBYaTd%B1?8&sJq}4RKA<895wXV!|N1B|EptnJrrV_4Z!K$#>@e z)rRsHWWvAt!4FFhAjsU&m}M+oE1?N+@_Yp7 z;P`idTYnZC)j8~XP1hl~0FfA*AoKs1e-M9hpl4TQ1F+Uuv=T`Jld7me%$ANWgzbKQ#4IMg^}c`GGD%)^BKE2LF?q;aKYys?JO3SMhZf07J=Bz7(r7KKgX@?WLJ)Tu>^`T z-G<_;Dryb=u)d~O1Os)+|}VCbd)^c(m8SOe1ALgh#`_Rr;5B8i}E5N}0JyuJhY z@-C^_vZShGSq&lqnj;icHd~#A%5{q1wvomt@obI~Pl$HhOQ5!DX8j%*;wU=~2?KSV zON^{8X6yT~sveb41pW5T!KF=$d~WT=`xW=ikQdoxiUPAB;MV@;9xP@O# zO2BtjsIF^3im7R_B1($5*P4KNFDOBohh1ZScOQT7?U*}eO*`D)1#z~@sgDD6FinL- zT6L64Su_dbvU@5rpjszE@tx0obMQEEoe|=WG2ri}U(+Z-Jb;&UpjO~D&drrVk#2@` zioxVOc1L`>GW$`L2`r#gV0M}S0^O}d>B>P~r5KUQsyuDjWOjN{5s)6c}b31M-ymx^wW`<8uZGP%ET_Jj5t3AWkS2dkLMrnGD{0 zGXwor6!w9@G1DZ#eoewP-t)WdpV-Nz$0hfKw^gY2FnsQ*9jk5HS1cn{58iNZC*+1| zgvl8=kobG`KF=HbOAEWAbarMrS%Ue4bk}(#O4e%&vi7n>;wU$S*d&&TB=kW7Uy;6Y zeia*c?}3$P*#eTt&gkMI1(+mLy_;l5a245ep4(GqP!v%A`c0BXf9KzR*A7ZRsMvOI zG-bam*+tP%=qEb56&Gk7;8@I9jd1`=dV5#V=UjHSz@`668VT)2Ca&iA=v#VS(2*pe z#1o%k!fV_(KVEeQ%IoLh-rf4(Q`*R?+lu%XqRegOLosArOwt@>$Rx}u0m2Rpp_7)V zg@r?4f*}^i*rTzZ=Y!kf=zsbj=Kp|Q`jfBz`0lUy-=E$6PR0L;7jn{8J+NQrA3ya4&LpUg_^Kc=5tiCm_1EZYsqW=|1LSfS zE*ny$wid&I@c?B}lmS8xA5R>@74vYbSSAa(Hr6afq*WSAL9CRj_lrnjvz&6|Js|1> zaee*w5jnoo5`&8LR^_HFHWco%nZ9IM7SB!A zmXkgB=jqcClOW_DoBbP~*#MHKKu-nc>RNAN7K3shC<%~`AE&WY@&j5Kqlu(cydKb= zU5sQ>`#+Hb#FS*!yehkc+X5n(Uf_M@DW#yz;t0tnfv6IcJ#Ss1792g`Zmd)+GH%;> zf%2@0&?JsCL0hJrV3Or0&%6P0lPX46ceuE+Q$i4ZC{5!BYxhvFko2ulS%ed4SDW_n zA|P+{aVIF_|J0wSr``7PIFo?1T{_L>BK?FE#EWHRiD)tAi}SbqF#c$fP=9l~6(Gcs zCtg`J#o=N6h>}4!vQxh?E6K*@hFn&9n*qUE4nn%+B=*2pWTXk)g&$1aivQ>j5~zhy z_DyfH%G<_+0(Lp%m1GGBFvZw)#4SE2G-vUiB84?w_Ga*b=PkgdhmBv#@K;rycprLV zJ;4S|5o#A~Z0zBB-(T&%VTG~5^Q)*k^WN%)ZCOuGjijq^-5xA+GD_u8*p7J7s#zo( zsJ1jCp$?ZrR+J)EnjARqG;w!E<&8Vwj+%(FK9vu_o|@s@zLHxU%S#06k7IcOO7n^5 z3}}LPGi^`e1vRH-V^QCpF$#cSY4NBbHg1gm3!$5>5AkZ@Qv9-4!HH?H*aS-SX`v*bfLPLwZ`J{ zq(!PKz2lt{VRd#!95S{I zTN{u`thM^QrBs43!0YvTfvmw{s{}Im$sp$Q4kBN{5PeYY3yX}Sxo>cH|NnU|6hM$@ zh(S>K+1c?^WZgIPWegbu7B9$ut)2}&`0H4@GQR6B?h1~W$z*J*Ih$LG#i#%4tN-Sg zPljK5Tlc`&W)cU4uFt{X!aVOR@iZGrPcBQg75^UGvk+c>Cb*Hy5+T;)iW%C2;hx*0 zju-!=8OB~PlpA@xIEHu#e`HB&8J4=5oqj*QiR=E#3x^w%CNsAwF||seC2HNXG`Jo< z$SS&f-l5uHl(4yO;)S0hE!EtCxP>tzqVaw#Ys4%55=PtEnV$MT+0I2H^Fz|yj)bB2 zghx-wC2=t*cp51<#Z0_p7?>b6Vj&=G&hXS{WPdE>ikI=W3>drxTktj`q6+Av`8_l| zIS|k7j9G*_wRD>LU1xU$AoP@}Jue{via|oNT$M@%R8%wWyu-L!y6NLN8D742xB|&+Qj<2}h+_r16cGz+Qm((S{9Vj2i-MyzoHL!07eV zJ-SE~=8sLde&bXZt60*Y^vA8WMubE#5@Q-7kU1^Up~KnfFwYak=Gy!CE{SAZe{Yvc zVYrQz;E4mN4K0J@ew$oeO^RSWS$a$o5w`yvlVYsw`^>N3~jR9GfTzlWF!)68wI*p zI6+*!HVOvv&(Vx9_hhhX`El|KV2P)na6BttdHTpNY9QhoPKWxH$b|)vBvsM8EhZo+ zi>I-Mm0!IRa#7y(x1(D`%}mlA4i_F-tjF4R&$kja6^yvdM1Ki(vO5pNKoT}si8BBij$;YX|ZkU z<6`S~utEGNF^Xf;S{8PY41c^kxKi5)&yjd48DA8u7Z8&LE646K#|Au%PbTKtCCrxI zj_U~qO>xQye;^1SDsWsh!2$`!>Zw90BT8R<;qmM#j;RPxd=r4AZ8mZFwN0^KES z=R)+H$ubtV4Lwrtpj8~u4Qfn0eimSKUNlz^d_oCZUxK+{=>9%U9MPBFt&aSa*pA_C zrPOY*=Twm@MRo>P4W$U>#Sei16g_YbDMB8lF?6WjAJn%HH>tnH>PwkfzD3twwf9IW z%z2(Y1M)Ukbwe@4t`^iW*biN14lrx51mXM$O7F>a$C?w(A{P)-`SjDfKZ{QUS*)x_ z0u7S;yC@bc`j#2h=(Mt7IwF3^#mojRg)D@^y&wcuTLU$Jo7(7Q5A}TTCHAxrLF3Ij zD$yKKuqiB37Wrn5)Gu!#y@UblBG%9T%@02F7~nO9*Sj`(gGvhT6P}IGJP_`N%p79p zU|V~@FGg1Eqz{f#vk1)p!9(AeH}dcgF_QnY^wX-Kpu(ikrN|`m)!b zpEqP5NHS$H;frxeefQw0^vKi>vn17S$``k%XC=HrCb-2E9KnDQxo}v^F=kvj2W$+D z+Eebn8D--V3<;719xwn^)gYF7yjQ@F2`E`%2-~D9%*tp2l}S$a@YkM@Hg! zsXUklHldD`$%{vdDdCwXL${O`pNp5E=7@T@QjOXWo}AbR5@yUi4)k&b&(IlxTGROG z;e&@B%pnAW>A2t~({;|`*r9rqDF1@-KEFU~0ZmUXn&&`mQ@m;LWv1+4KA;}JQvpRq zF}V-UdcizFGgK}RRg}0BGF_~EA)~??_N>J5;TUPajgs!`gO>$ym8rEu?i^<^xYVg) zkAX$6)!3fbSZiJ^MVtz-;ofH<8gq!Vh|YJ81XePcE}iFi|#A`yi$Sjm+qrBFnE=IzvBV$%D>1A>$z)i|ckKgaKnCuKtC z6!++6@D$-BZ@h4JT&c&v*=8*mSLji$Q2QBJyEeGt9L^j0fBOxuix4!diZOz6-}w)* z?9o)EA8D(v+uJ^*q&=IBYUSxE z&*`_!m_>e0#8Iz6YB%fCDM<$(6Ho74WgvO^*%L1$T9I5n7H1we5T~7;-nz>%?!Dpr zSVD}4YXr2G$8(G_LPLgVHkR;sdGE65I?))UqL?kZP2BJ5n^jBz7>^bbAQ%oe-eBN> zr$t66W69<0bUPlLa!n|nGHt@jbF`s1Ss5@Eyeui)MBuXh7=mhq0n$cVSvOMCxa|*Z2gBk5lp-$ zSs~q+@~RrmV!zgej+w?@^+?o}B_~LZBN%%(9;M(se2{!y2+Vm&M3K%FD|P$^OT|l# z&U%O8&igzbg?x-Ngz)me#IzV|3HOOus+x^URqJ*q7dL~VGleSRQY^tfdI>2Bg5*~I z0!+m#-07ITAfe^7_Rc2nEaL*r#H`?=jNRqzikB{Smxz|i;*ZW|L(g#y?01cUfde?M z4hz5olw>^o_Pk^+JC|G@^IUOCBdE*P=er67(35)Nr_k@%y}0}MVKRxCun53H;$`sa zT3UGS5yfOBGXPYmxtHvl78O zn+%7y2yDG4|5NPF0UErV@aJ?T4pAY%lm#N!mE=MW8=AFfG*DSL#h6n9mos7q9&}29 z_0n3LF07tGPtBhy0)jjtmT>Wi=&P2oBjP;i=rD2L$!N@eOpEY)jWQpB;#|x*5dn$)_-Pq#6<0RfJ6uus>hVmQ|!n>K(bDWbg%KD=>hzl4i{Sd z=1@s0g%#sJv|{fX5drz~=U%ifI&z$3$VUlMVa3a7JD~oHOVdBYQyo_*FSg3*d19>y z{T4lq)RJ8&+HPxblX%IV(JY^R_qTlFac>)xwZ2lJ82{a&yujN3@$#dv5%+Q&B;ULK z0o4ZNXyaSEUxc7Rm-IBG0z!}RrAHwQn;=eB!GZx43;uKf^2gujb|BtVZ&*s4RTNpj z1Q#>buj|ZKmP$%)Is%9A^**5+XXX?@t~)49m^Y z;H2m$IL;JMxhgw%IvdMW@)9<#gYq^x^{L<|!-QUOH_b{eU^r?GJS>tNb?mrum0gXR zYgrk`kbAD{rUbpfO5v*1AolJMbrcGWc&QHxb~fRM1|qXj$@UWQ0r2*q3?h{BwI5y8%?^^dFDo3S6grjk#<*YMzTO`6_BFhZR>#9SWB@H@2rqq=q|cR3w-BSPF!SQzjo96hXbH z)7h-WJJKm!@$43-YYfBI|w`s*N#oWO-yZ6pKUiB9&*upE?KbFneKtFkbg&YO~gXZw*Q%aqc z^!}4~p<1NE-tT+9s3AwJBi=jk;+VeN9NYXt4z~hGOAz2y%K7{^CFg9OCxMba$%Mz4 zbAg1_^M5Le5s`ZTFD};jtFQj~tN-@tU;J-elR_@8c$(C}z}j34#twjo7*H2rF8PXN z)bG6XnkhU+=?CiA!|j~mHF_zC9+sp4wXKyIjEpxI3Gj+MK&`cDQ4IJA=OvTBq4Jw) zj!g(C1yp0KXd?A~O`CXjoke8Iabzw^$e~%u8J&0 zDh|bkfdk>?-K=d?5Sk(Zi0MqUtV!S_0{6TGJ8OxTA|u+{F4#lrL6wQAg=;s8!W>VE zs-^Cbt1|{JOQl|ITa!|DcPbCev7T<%69SA5VL`pkm1Md1F1^)6q~8?na6G{$j8E@z zm2nF0KK{FZ7o$>XqG@N7Py@_~Fj-3FxaXtVRK(+{9unjqK3?t**TdmY0&rkLpzf)%HtE&5RKh*Lq>N z|5bUTxL%C`?Bdv4e7cizA05cioD|=e4Hj>2Rix5MEKOMttf?eyq`DGz+*%X0$>ri{ z{eT$t4@0r8Jb30&@}q^I%YXdpul+OF2H%z=Mc}mL?9aK00U&v2fw}?YvJ8NjOz!(m z!OY5iYor^o&oD!N-!G-~rmFOv0r{`^lsur4+0z}24;9D2IPj09ukDbVn$j`g< znI42V*5Wd-jH*z#LBmA^I>0aJ;(d5bU6E-z>;nGGnuf&EAAgk;ClG$;1UvnXmuPUN zkWT!KD=~yvFB2i#wXBOPrX~mQD`EXUNk<6Q!~#NWuEM2c(AgytOBclN$L+ecs#t#T zFy7DYCqDEtMWL675rw3+zNZxebRS{18u~tZ!7GGFiOe(>Q01^TH=AJ{AA#k@8I8ZN zXBXIqs34zYc#Ce16_HHRY*H@imy6gA@U7S7hS|VCE7McAJA^M@3+SrfTk~JUXNrmk zi!uNi@7b;0!oXC$>n=7_2=A+$4DJl?um#s|6(YptLt(zFM=^+G%Ne1HteN)SvsWfC zpeNrPPwp3}*t_9ly|1JqIIr=-@BQj$o}{>({KYknRgLBnl#R`2i{Kf^V3-i-#S)jK zNOt|;QzX(4#v9=ZjZX#T{_2nXXZ(qGC)tk$IR*)me@xNm zWyja#i~aV!VN1;aul{@fUu^F0{hr5qNP~i=2a4`&#txN~1^odZ5cbc)ovWH6ncn{4 zEqEfj0uq!BDI@AO4^~)Z;v2}fy^6uS=*6;#&I#8zwz)qj{ogkb02F+Dw*2Ht>;=~c ziCaH|rM)hkNnkIDc;VqLOowDMbe0Q;6g*+YRH&~F29}=-&N zx*8YE-PWiS$!t#E&f=F%)BZo7ewH(Pnn97>t76Y|Vczks5ttD$>MC{ZgQaqNbArg4 zrO!nCf>0wkB`?_)YCBZ=5E=zP8EZ>EU_GJC8!eO$9T(MF{8p{>Jm(#i?)p(miC|{B zOPaE=mKH!N$~fRd#nT4En{tK2U&TO_g^RUPESQXG$x$4P!5qZT$+hS4CXc=U+`q$b z$DEBdUV2*U4(Cj6)n!}7&5lW(fCB=c6HmG~i%Z_VwWJtVt&EQ*Gd*7R@^CK>IvKEP zL3IeF#Fl`BXNluA1bzpiSB_)E4iecxi~2fjc)V*YA-Phq0(T!j`%e5jWZ?gAycpxG zDm#!f4cr`)Y}oZln#(OwN5hAB5|uHz#tKWu&o}~zSq4pkosCQANu^Ktkg*I0oopF` zBk-R+vmFRriTQi#8wy(FUX{$Ea=o?Xfyh2}|M{n96D5*j+*s}Rw~ao;g51Bw?~x~DEp{L>ZKOZm_3dwdyVOUwwi0ztj0(p!zAXCmSVUo2 z7nuCi3hz|i-2DxeMil4z=(e2XtMM>Dg0Qm20)Te{M2?qobV{Tj7%vdt^GqjPpV-(` zNLUCLA;Dw&h@|`EDquX%1$yaUD2kJ!q2;7Ko__)l5vN)9lXo60gdFG?AZ?n8z+;S% zV<^kiFgPL5;Km+2jlCP4oe)Nk<$_K?LJ@ybGdrd(B;MMa4P`_Q%TXiDPG~s3(DMG= z7`1lu?2PytgJ(rjBhLZGG4}Pt&W_gttQA+B$NIj;l6M+6^>#W##Jj=HTCx;VQU!9% zP7aR|am&vh*ROK@f?x4}>A<(5F4HkCjQSnV42**+#XYdoSz2)~duii~hS1Bm31+yI zNu5ysh}OPLp~J3PO{25*gD;YcD7pRj>yf&_5r_9mwN}t6qN)e5$jG)pyJ4m|tbiY+ z1eLJeA(=0Gs}TtO-@o>c-_s?fR{@^Qw-699tdPAd#RMII=8e^#P_{76>^T;6G4rTS z6a>om7dPW;?*(CWqSJO5Sg5l?#7c;4R1E*U3bF_E_MfbN*2T{~Vy9zJ=MAswEu!Dw zb>8+$=OC&zOeV5k2+ea}paj!Z8$yEVTV{o%vo3`T3mMA&RY1zAnlmgw?-w1*MrS}k z$AwCu@LzdhL}PRgMlb~Pe#DLsC|y1(X=I!-%*tfx!CWrL)8-d8D>K)J3P5O)Ld;<# zxhO1aOOKz0=qx>13e$Q{MfLyxf4vNiD7c}};x+3Rg@1W%y~G)bufOFE%h0OgvceE} z?>P~ZDa;zBv6V2noJr!SxJk*RbO5hF$j2emx3#nK^ihlihPjX$g|!f=_yjtAZ_swL zn6J~_QA?kiJzu3!aP_*dMTlm^qaH3NqW9$)JB`w>F+?)vXE|z2fnS6eLf~K2J$*lF zk$z8EQ+gD}rZPcS(zG0RDfXCMOzyb@tRC=d6+f!(s{fu>Kt4Xo=mnv5cFiR+4Vl)JIo<55|T3bnLs$E55i-r`c=tres zQw$W~USrXSg(YB|OP6~vKfy(hf5+!4IY1gazX1f6$3+47L4h*08YZS{f#?AW!(1qi z_AXv(tuY;r`8)3f^*OoJq146B_B;e3@dLd54ey#uaPz7wgGS;t2%D-KH@HpnpPOtl9HY2R5#xjK0+uQMN7;aN15la#}vEE0m%TTi|VrUN^=9{Ls z7o~N1ukrVn1x6zmI=jYd0tJZ(lPy7Fk0$2ou`#Q}nSA40pLxQZ|Jl&y#p;0DGY)MJXanSLKbfPrdra2!ka)v7w3XX zm}2#jTq~GpEbWrqqI=KexI~hm)8w>_lVtvN=ZhG)Oj6+$cL%G?R$2Eq1e zJ0OH#Yj&v#KPh4BZfipB$?GuQb9mZZ7Tpf@-7s-FjkVrgi;+{o@Z_inDS-C}u^LtTI_a?eSDg1hxf5P3&Q%&V^BhuE+Cd;Rp{ zhul1|)q-|pB55$4S++U)q{u1J%DVx5FuL)OyPO@)w0x_05h`AWkr$yu9f4of_klf} zi0a`qjQH2xSi`l-!7-f-y&9L;rwezRuBv0al=x|{lgPROI4bK3HEDH84stQb3PDA< zJ&e(h;z#TidcpFxv6wLC5`CMZwOsqHE23ENqt6s`*Z0Pjl7a}9mc_(uICT(jbSz1r@U+g`5Xd|y1)O?<~# zKDx4b31~JeFQ`D-l>LST$G_dd(~Ehk)RdD}gHdfwsg4LaCG$N_@A9ZLVjvNwySeNZ zhIoylv-lKRl725&=qpZ9Ugw)?^zDJIb6Z%C+!oL=wq%WnETHAP#}cNfypx}O+h3m!a6Q=@8-Fk{ z#%ht|r}8KM;*w$QtO!m(Vc?0^^sBlHvWwO-usRl(YF=@W3UYT`lrc@@IgfFcscVea za!V~RNs5t}lUh=J$Wh}^wYx{q;`~y#I+sUp$V0g7%D)gphDL=SP+wP zE~ZHPs5GOeLvO30Ry5gr)GEdEeq6u!vZ7yDGX?%Fzq=DgYx_iZh#O~tRIafcg$i`P zP0eGBGp>xYi51f*A@~5AjC1UF&5H|5tQ1}$t}JirrRXsTauvt@5Y%MJkAPRX6!s<- zQG_rVb^iRoqNSGE0+&`8*-mfGHpv>%NWV7IDCzt#;3TyK_9AV;z-d(XVR1WRY1vv=StobV) z(#6v>{*k#<%j6^%uO+q)T9}NP&yJ49UP7)s8W0FHUSykV5UACh3nrFL_F6rg_o7^v zelUA)jc~vt%bt$jH}FG>Ec~`^5whR!l#p}hObP7bK)hCza6!i2!;N*ESLOHqO~efv z_>CC_rJKca-uWUI3A+T75zrkUrkqD8uqt@ZFc8LC*+XAr@6Ulv{IqzZVbUcD^oh^qMeD@q$qA{Pyc;Gm80ShPp#1 zWe(gMAy*F>fprf>d#&+=*MvJ?H4XLrgSf{=_6X0p*JFf)x3+gZnI1Ql!wi?xjD-O6 zj~%*?Csa>f(l6{m=qvX z=jxc{FHRm{xrIXQ5xBF9Vyqmf|HT6B7~W?|=AQr(;1p036icxtJepGsk5FiFxv;nb z2Z-<@m+HIRH9x-l%lP%($1e#Z7RUJ&MCSfbLG4P}dJLP)QM>!RYOn3>;#=}30LI|`71|QMQPyqVo=fALyRUdJ|V{v zW75Z~@eZG>n{G*0Bb}{V*@Cw31JkVrBEil#wv|4hV#48ElM=L58CQ7mT65`kmEk#=1Y$P ziW$7u*c_|^+v=zdvBAJHY0dzlw^Biixa7m_c`+(`a+R^UV$Q@+OW<|e_a&e*PF$tF zZl(X;APk9776SqV!@8B$yLGTeZZp#)uOxQiTSZ7& z)M2dRoQ=a^eOI%Sd7`#lzfLa&ndhT9$b#*0Fvwaq0vhuOz3xZcvlSyx}D7%CTcV>on4(eksW z@hh&RY#p->OP!E$@fnxc^H{|WtqT3k2K&SGk1gBjd!ghkm--pF7g8bP$j{(^40+q0 zrulY1Mn`Xs$0hIE5q}aP)0hzlcBZ0_^o^#j=PH@Qh2+b~`FC(CP7hPODT_3WVIK{DTy7wHMjP6uVD9{pBBe`>)?y zZLi^r&0ToQEy*upK-{};QipKO#GJ4{;ykF5rbTw9<*p#4wvap96u(PsObdp3afP5Q zCN)8V+oC0I5QS<=0&0-$Q=NgOr$*&bspw5Vp~`J?6N}FUM@~;5r4W45DTNM&91(W8 zKU2QX>p>~-{YlGnP9ikE%>m_bc}{!qil)IPVyU)|v3d`mlm-^d57N{ae3=JZMV3nE z6|tMJ&*+I*MRoL9jwepum1xIaqcqB+PE0+i>Gs#-7t2qd_*^TAO7_y@Cq3u#P>wca zPyVnkD5L+po**c($XtV5&#)Bc@s)>tR)W98vE~)bIY69+64n4SUymhp?qyv&aFpk( z571g%V`Uoi5Lp})KvA3%`w5g2pouN^>dX!!Hy(~h|0R}}Xkh(N+z#sg%ISNh!ryXN z5}xE<_YagZz;X%E(0y^9dCHZ?nMkK4+PK8blnED8DhUQ(@GnNC*;tDKkj28&un?36 zhBOCTfC^^RH-<6~v3RmK1YE>7?Xkn#Ni{^Z3y(4IaM%|#J-DbE`yP%fUP3vktBGk3 zX4j=aQMh*cjd=K-wF2`)N0ga$?Ltv8$nlD4aBM+sdC&lfq>nu>Vc}Ia;@7NKQgaCU z(2M^4+GzR~{vS&;CGDnwR(R>|$5x!%B~eb)zl?)86QYvh$rbzN%ImPZzvnk-9m-U< z4?7+`BH1oD4{%PyQ_NNlg_6}pZZY~>t&Cp*C&I}|T;lJO-i56^vg*c~WHq=4{htU$ ztD&zI56v@4Kbhke*FCM+{o2aC1LF$X8t_Lv4RfwBtaD^Dg{;D{`>kYGEIa(#C=yzgww@&9} zSlwH(*LrV&73hHG;TEs;uqZe1gM<>U7o78*{a5RDQjk3LJr{RghVrK+$0S?&JZ%j{ z`4sAi5m3IuEx=hQ-Yk@k%1H{X;+d(XNvw}}(-=|w*#&96peN=Ujy;KQa1&A-n?E2W zuInqtpcJ4Q2zeFgs({2;UZ=zyc$|MAhP7jqVr_#DB49z8Y)lu@S&(Zp`VdvcVdMo?d0dl|oMUusC#K$uSp%f$@itj|70qi_X>X_`8gY56Vvh*k>Mu_Cu zqimfEP`uPD;XxoBUIcRAXnilyCCwyf1>HZpKuMetkMP6E6qeM$&^)7QbKv%fSD_zQm*9;v}K#!I>G3Athq_yA+ED;kBFUM)Nc z$?Pmm8ki39-1{Ct z4@aT^s_N^WL)0s=eURYn=1kWZnqqqKY{5nuUkJs@(sE3xm9&?;?AL0vLh+WmW6@FyS^<~2)zD{+$skB^l64ICbN@7 zn%{l=;cr+uvBB!TN2Dq2n)6byL6Vo#TjowQHd~e{rAAiQMwi^klx~vqi%tv$sp^_r z9)=uVhVXC$u0<}DSDuwXrPoJ&0ff?t8NQjk^J?Q00J~<5e9x^8hPQbex(V$X zV_k6?xc&UA{o`BIUxr<7#^n-6-i}Ilk5`kVp%rwrv_|b;aRtX=L=Bih6P|w&VEG2Z z_MYWE{op~K28hAXnxQRthJwKHNFXhPqL8EeN5W@X-_=aUU(fuQwaI9D;oLlz*$J;< z=j0j3Zy+j%^SF8=^Q1VLT!x;JdQ1r^Vhm(Rrd-q!k0X=*mX+@=~UHc_JACT~{G|a(EJ?+9MZjb%iY@ zY7{PnSbT0k@w<4@P-a4Al%4C5-njG^))enV-aSc3jxNvCJs{SkQ)|`PTVN6~IC9>z z(i2fl*u}UM+Vj`HVJSTn(0ESrXb`+wL$IeMx}(m6!V!6eF=&Wamy3am5C*sEeS@j+ z3Oq+jGJo?c-~P7eszM^A1ufJEbXACH!b2C+5~+LIO|5H+#X0!+Su;Op>4BSHqk2zH z;-Oy-@(zVO-DCyoW8llS1(M1hZD5il@-*I}^V|W<%f-X8A#QnDAg%@P>Z0r*%7A_8bO%|@9n7~$r0FxhFFQwb6`*W9- zmsiEP1v#sDpvS&<&@GQS>}@vB?GSPgA3i}g)$zka zTzazNj{zNEP1g`Xixb7T#L48#itvqOv4-S&!t|h6;|JVHJDo55e5QFSgn7(nr?a;f zqZKCaM*Q*~+wT4=YZNG-93fHcVy?$5=j1Tp!eV-bz!m%C6ew^_<^#8@^kI9#DrG?z z1sp|^C-jT)XufYsWjZE!Qtdd8t4Z2BSnpa;u1N+_Ib&6c+g)d}3W7+wH1zd1zvU%q zAhE$&d^F9bjf6)az!{W)8!jTak`^Sj8;i^hCmb2QKNkJf_Ig3#Kv#n~QmTDv`IeAd z&Z>>Ud5gX*vS#Tb=sJn4_sDTT}y z9Ovn_Z2=LA$#8l8$JA{NK1XMKHAxj$0Gt z5?7^?-czR@f{Fp#*^FiWwcm|@`;~9Si{~D92{CYx0d_VVXUqeO4olgqa6^G3UP6e@ z{>#0-XGU3+THfT*p_eH4OZ6hiZ5P2@gIg3hjCY8GjJERIb_ef8QtcTnFFo|@j>q)5 zA|m2dn)gML|F&@6Wd(D>k8;@W2)TKNkf4>DeGj188Tuur2r{o&qts2xIE;-Y6B*BG z#YRI9^42R7zt>p8^4GfN@2!oX9`VLOn#BWIIdKaO*4AwNPtVKZf9H~ z6R$bv1#Z`qSsq*1RbuE?5K$Wty#2T<0 zj!IB^XZuyA&mTT}7RR)uyiapjL`ZfEfv}3ZaBnN+;l=_ooX%?L9mwB0D|xLGMY3af z-JO@cnC?JopG0uMZ*Q^Hu{4Ah2c=wT@Vcr8L z{Gb(&F(?E%rKe>Y0oxnf>uX-_M(EdSsNy)tbB4Pi0^&x zdtL;HAxH_7ax!y-w|6GY=FGhm4C59XlEg^~SQeiIq)^}oE31gM4CCX0bJL?|b{O&hn zoW#78q>LY~52hvmj|gC80F;VhvwbdpSELT(J9c^N<(fC>M>9eN7pL`VkCH$CVtn!5 z>odMkg2+!GfUk4r`GO9VH-|6n9}z<1?^vOh;wZ>7bx<(oje6?>T*9ipDf`~&!WD`4 zvv%k{=f7SP7>w0>Y2JvTNsq{k5K!|k_DKB2Dl6WxfYDHfNKpiDHzXZuZk47trKmK- z@&Xb;4A!MgDpB?mV|sVwN1bxZ2p;CQKUY6)FB>V)H}3jJjZKX@ZsD?(V02sWyk3wKA+>PJlZ~V@kNQMDpWCdFYOSNRY)wU z*W8X>QiOiYE_K%BN4uCFG3vw7r3m(UaQb05a5*BnZBj^FG%$3h*OL+|eus}cN9Kg| zudF0t0++mp3ia~)_=^JrJ|o7UR0Xb>cws%}bFRb^;i30KsU7Jr+?NsvxrVW(+&rEE zQ7Hlc{VrxCz+;Vsf`KbYbHSduz;iutmdaX+^r+m-D*4OR!g89?D>gP{msfe`A;hL! z!#uIQ^?nRkr4^6kkHImwTr4JCp+Z>6^EFIxd^9*2Si=*}Z}CWY1mgRgLJ`y#Jxl}C zt31f-Jg*iqkVFQ1ZUg#dOA)Aaf%6mlhABq4D&|Ktuyo1Qs>UbV>wEq9O~V_OBZuwr zPj{tZ;@*K(-4v`TuZzvvMls+!*OCwE-Lj}88k;v3FH=3+`Os2CW9_%)273(?m-VPt zdVZ<<2h$s8PSV<4B|R4J z1j)gjT`2(mKib}{InwPo)B6n`ha={4W{VNz9*HCxK%Z!dasenDx^ZGvbvHLDg{A-$ zfZ8||2%x$ft?Op^7v*ZR*25R+m-*;TIcvzIy)HAOONP>7YFyfRUgTVAT9W)<-b&KF{4;n5?%`_1XFn&Uz!*@zjr0kQHJ zC91HrykI8TCZ!LL+-4G%`M9e@c#)tM={NMpl{CQw_i7(KOLY6iw5TB+WwHKN>AGk+M}VpIb$VO0 zAR(a>O;jmZYjPK~Hx^_PK2!mW(kTZkuonEY_;|^*W7b68 zVA-g4UqN)%d@|uB$`ik4^v&jt)Wdu9Y&lfU;n-kK6{UYUOmA*-bXMXnh>aQ>91y3% zJqjLg0d5BawMZB)5eMA2Q`(qZXXp!sgMDBr!-Y^A851X40>(mz*O!SqmVom)F@%^R zR!Oa(KJ{+8{6&Xjp4Sd`6o6?uajag;qb-Pkq?eLo*ey5uORtqlo2mqeUu1JzpEU># zOr@He$GyapsFQ6P5;|FQIFCukk}8hGCeD12fk1KEA}kxAhF?p&r*;K{4A@oF_fWGzMX)9cQ-%Ef)8E{~PrBrx)aC5M;9rQq*LytUbiftY(dHZ-} zAg1`LS3GNDlc9Ak%lP1Kn1K_3+{QZ|VzgX@6L*x00|-66$h_VD%^fg*=J6`p6-)dP zeOh9P*oTCHGHSi>cp-L)?*5zmzc7u6#Gx1}MuVCwVz$nFGYq##^s5G`ji-ra%fzO~ zWeCPre?RS+$w?VC++1slX{#?!pC)`x=?AZj))%r4iFGjn-;;&XO>BqmCyc2qX-H<=VRQMd%5NpE2h(lGH*{S*lzz*? z!gFgfg2r!KFZj24Df{$y#O^EDM{YPx$DK;!&QHz(YWNMHcrZA98TVILyt1yeS9~`5 z)A_J^gv{fwglNxcqMuflMht2^8tDadE}dWq6Q$jtW(Wx85~ZNA>&YpIvLG=-J<`Rd z_=(FGi&IFRGmh6^5nKe z5kal1Twbxj=p>sNZ26(1lB%h8M+3 zzz)zX$J^}3byhlh?sqa$K$q^HW%`%G2V%{^I7r^=>^BHsP^+(QDs^VbY?*8fIZ>_{ zOT~c;PMsVcPOk{cSTYeJhu>~eyd7SYmTK%lsL3`JEFK};>=`}mObv|DqB4P{5I`YC z=P%F#wFI@)z94qdN+|pzg&i<pheJ)8F}9;9h&9MwX)M&44~0f2X*5zT0V<{(Iu;!P*h))! zQW5BwLydeny+-c_FAAxa(EFeKB<jgk*Kvvw(kPAx7? zyJYiRNBv&7Q5js+Af8D(r%)Seu<0-0^N?4^;P)#R7oG=M&>E$(L&Zkf^qlBZR+ddw zcZO_m#&EFT4*^6wBuCOu32-a(gA^&*KLPt#gJ2uJ`9La&{#eo@7}G?&!DNo-cU|8g z0j(Qq*-7k_iISx5d4{`(RrsD;yKPc0-g4?htkkl_G%CboAPFw9UfZe zI(Fw1J-;KI>wN(X)3D}rOQ?MMM?Y$}`=;-s02byeMBIgD34ZPybY<}r`kfu`8)yQ^ zsgiqcq7pXcp!@`S!;dr7zM&p6m-DKJX+#sMQoVS@4zM=Y5|IZ>oIeFxg0vU@RAdNg zpPI3?%Fb(#1Q`L^_TJCwVIHTt__R1Fhj--IYiO|F4FBZW)&V&T`JF4X8~T(LGrAF` zh;0*dKFzPZxS>fn)-P(gUQb= zE|)0R@Bj}oJ|}RvhcWJ#pz9Z-RN~>_+qQRwaYaOVq;OO%|Lu^@zHbd3LlQFEPV#jC7)iNrE5b5U&YtKKbys~+c{(! zv2WmkKoP0FwP{~WYhS0{Gf*@ZSQCnF2}rjj{+q4c zjQzbG7b|vWm;n%+=T|GS-6v17)yGD!>*v14@n@l-C`mgex^0yLm-syb`!C|!jc)ML z{Ddna2jl{SCsFUj+lqdHn-%-1eUP~;FyGw%)@L!DH~{p^*9sQz=D>bSj~4vVS+3DS zYZh^s&u9@-vftbrW!SvCv(oKZ87f-bW@Y(sNAFoH*BZqf#QfIhpT{|Alz>CWJ7nbg z@h2J?b7Ed5F+|)%(}^B*Zp@@?s+r|_M4fkw36^!Rgoh~#CcZQ^o?RS`5Pj)rsR_|h zIVfeBp?NHi;5FW~h0zRiOtZMyVQj4M(w6j$6SU{mV%Wy_11} zh^9wJedZGAwfOW=kauahIIp*eZn|+54ce!&zStI@{@1Vm^wa-#U#d~mGNu~wjBmo< zil+mi2(D%*V3{!4(h|tba}Z7C9+C!m8*=j3KlkosR?I^xn8r*7pq7iM|5j_=Q#%%3 zjTyY-m?DwWtVV|IyS+G|*|_3331Z||MQN{+A_`W5->DESZ~^l4H%vsGs~rP`uRKTW zM4d0;y;v*ekW0=WE*Z7T0S`r(q!S50Tkj(^*2#QYMk4mnIdsapDJ)cUVn6$Ti}s@zd~3MD#K)N@X88SAt4aeaj(> z9SgH33&c>SYof{iYvV2&0r@wW*loy2kVXBWvZng}UV{HB$}fU=zyd#j$q%J5V`h5F1{4nnM)#27_1- zal$|G=@9N!*?rOY2#$lf`dNGsmQ!&En|4N7TuT+!Y_YPUI#R)JirAp-l(@{?q7GiP zNthd5#ditW6lk4ty)+Q1B}z-r7Gg6B=lD=DMd$M}ZKSeaC|g1~?225~lx3eSj&1ca zx4RpAK3i}u<1aa_j9F2dkUcYzc$I3XRg@A@jeqfg68W|NkN>=e{yWmfeyc{Ewkzd* z2q!~bEUGg0KC4Wv$^|jdjG|zM#V3oF;Ip=_`E9C6FtY0HLlJRSnG)>Ji67Vnc||rm zco2QjOCOVtjgymfvLum*rG%Cg>w}RP8y?H+H?WFlsqL#b8|H}HKfT=bi&xE;O4Jn3 zq$N827#1yVE9E&HJlQ8dwzuPJq!KwF0qI=OVRI6)Pm3KDVgw^66E?IWjoR z#xA0p$h0y_A4UZ|^wY0?P6qP7@_*yRQFyfYzM0QSaP2U_9c&^=cAgHv)_T{=qG-)g zY;XfLS*c-pfr5atvuI9{)xhrUSe1DB=9?h!ST~ zkP)<&Nv#0{HT!;p`i%oSy9O^C#@zRQ-(P4GyD%!ez3veWxBLO-WVN!KIN(_b&7>r( zFjo~uNs_|aet?>YAcoS#P@9J|&YvtV`^`xBi;^tFy-<|MS8o*%B6q@%CQwCLq%(vJ z218Ey??}W}u~b&8d5HBB%EcllV|TKJYOlBrF6*>Nlr{riJ2HWh@dBUJ@EZmt$C}Xv zlskxM19=c{C3QpFt;~rCBvpW8=rXQO6AD44FXbjWz1{~}hp+@!>wCSmo53s$T_ebB0*2>j-QNog#O@j`47qqGE4K;qDY zBHn!rXT`r!hIvKkhn!r$y0U6gw?br02#Dl$AIcpn0G|v(S-I>emN+q;&!ihc1z@# z4leXZQa8*vnVkv#XTO5tN#dvZnf-kz6C8{?q%LzK+RC5YN-qARTq-cRv?Sn!q zUtr%F>=kdAB@E1QKpAyaN_JE3L^Y0aCkxs4nb8#cqE~ACbIVQA_&zsY2XAWcfQqRh z5HS|1X<3T#gthU7gLRiPgoiLOC9%_~rSk(d)bJY^EKOX^ggv3C3_%tgLW;Cjz;Yrm zN3b#GQi_AKumDpymhR6-y=DlUD|fF{Niha0bB)l7V@H_^S}Z{wrr-VX@Qg`7b@FosJ-3 z55aW(*A>KZsWkxZwCsbzhJ6^3YU4$d|4hyiT^kz0A~=wCs9^WvU=AklYrDq`P2r0Q?1#%YVe+NBh;1@M>iPv>C(~T9wGwthm4g&_H~4d`s_NtQr={zN5M&xyE&_Qc^sy$Gl*v0NmUG*I`<4%` zkpjRxh`nf`=ca#~dpH}tj+JN15|Xl+*+=E(0~pxUDG8#_X8u&{6CLv^Uv)6&k4YQQ z#;4lCj3Ml_G{&n8vQY6_(|Ponp_(e52*Bc?=n}Nw=pY>ha3Z~~PpQHGG_R`?cGHt; zfXA1{NO&9#BT`JVR+v=R0-Ivt`iu{#m>A795!ok16-UH~X*_Z|Yy@crlgq*&a3ZJ* zM=^zQhxg*!_aA@6CBjgTt#bwj(5WQ^2Bl4`S$kb6QHxu}(i9UYr%LAI@XRCvk4cgV zm*_CHBN4e*8ROcLWmSm3IjkL5xb>z*10#_p_qAU+e_JqP+oWiUCvZTf)widW9@;RW zMh1CcN~kV>PQ3*~t`c;jMU7+PS9in+(tRF&sBONM zo|f3ciy=L8LSQaeT2B|GfYeJ-$&y1 za)g4X-4xTeLur1za0LWa>O5HabeKKqC|(?=2?~IYxr%Ct zwU6?m4V|@j^agv==e0_w?L;Wx*Rh$}SwI1wvLogJ$Y*!b7=<;=qa1%A^b^Z^LCXKC zbpHXuPG{k2Zxd=E)=>Lo;;)G5f(1H*Za+>9@KLQ$+!@d5Qo#VOkCR`B#h2oLR4qAh zL~TloqxpKe&P~a^?Y9oQ$pz}qc3wmy5myA$MZQX~o;V#r$y&>>Ie`Gn2vk}~etJI1 zGzdrD(ob;%XdLNJA_LvR7M+d6i*trm?}%!o&wGJwWWG|UoZ;2B9MpLG@J_HEh5C5U$%vUw|wFGEhf_U}o zXxIshn zyK0LUtG@rZwD{P3wnr~)WV!jQ`8~);O?@l$AAk4|#F?I#5TvGbwxP-K*;dM#S%V&8 z+(C(BQW@3HUK9OR!FQdVZr>DO4m19jbbtkgs#DrAnT#8IYi6-FUAZQ~-|Y3a*W&s2AHVfu z^L6WR6uaF_ANu~|H@+25S1+03b7_)1W#aMIu8ipTfzXd9vx4^=rm%$+WjGJB?%~rC zFk}>0S~S1@>z~J3Rc`J>R?-O>ow}FoHORd>bzqCKaH5wbs<5xnJ81(S?>fcL@%$GU zkfsI7;j(mjXBwoOOEZAtJas)JN(j7ELa(XdCx{_%K%pDahFc)ZK3dE#_@k^G|&sWjF}(e zH}qN_pN9pvGJB>z7qtIesQ|X+*LqQRRB2~IuYl7m5#V=nVx7V`yrv~skV&vBk|1+r z5~>E=2h@DT{*S&kE)7?IcwQ=Pxz?2Wdn5>$waJMm2$LF~9Aqf#_^NO|wZ2Cb z;>|~_geNn<3eHL;bGg@C^EuCH?M(>8Z+>QCqb+=3le1#r&fZute;^nYYLEMJ=z%kc z{erC>qo7NU4bzqI8HSZP_l@{Q5UQXZfLqEao%DP+IVYkLZz2R4H-|cDq6KQHSh;!c zX0BmBVTd{`7-LHqvuYS!30OW+b}ZQC{^MRZ6aE+@nAjmVxS20a=^c9LUtA264Ew_Z zcp@MrlZ&u{*}V-MJxl|cog*Vni@wTl2m$3?p#;SI1}^F9)E~yyrYZ=+c|FR?sua&> zxa|L(VagBFfl*b4rC8xO^cN9XRgTHQoL&apN&G_DM&Cnc9>2%ogLDE2w5= zbWZtSxheQEI3z?>eBk}FI}{1tTJY)Q?af(rc`=$$&()JBH{ed`#B+|*2Kl^>7Fx%+ zd>=P#YtT2bRkwL%ep!)$cX(QYKpKlwJhoXee2{>yX{i)xBO^ZZiBN%s8jp`+G#B0; ztCmc~Pd+7rIdc`V-BuJAq-aA1;s=6pPRb9JbY${roT!rIbO#ugTdDEsBv9fQsa$wJ zEdjwj1vS|20kv17Jew&pa#Q_im`@C9z^pRmpf&CVQ0~%;LGi z^Uien_qsd$t zX9MQ)<6aU4J}CD54^qr2G=&V~5Eda8KZxyuy$r+CUfq!z{+4@wdj99GdeUF`U`SgqXzw@J-_lSY~p?VVpiSS_-!-DdxhRVVXU3ZYO;aFtSYBf={h5uPo=w6i`SI_?ZF~IeNjx8O5A9Y$ z)-(=pNiSa+g$CflAk2=21)1Uy5|FsyMAhB|&6gDd|u; z%|D9eE_%b$-~>L{n#I9kDJ0# zg4&G_W`%#Tpx0X|%eQFficI0JHAOzc*_44v&~jIARTm8anX+u^D&bJHHthiQb^%pM;qMo&de!r!=5F&yA;$ zpez=j(r`HW?0lNG4E@2 z3U27oUy=>lQjgWa=MaGL!EZ)E4bP@`sa?s5C8qYbKmShLI_2SO()otdf%twmOSw1* zMqA}rr1{gJYMG6ovpV0AdPprGY*EmZtYpS^I!NScp)h5%Z)Yp_YsrUIl|=y%hY$}V z6Hv6{_!5h%U?lf-J0K7Sb$nr!2IJoG4)iOR08Tp?YqFHz-Xj`Zsuo64b#AIdJqsz- zap}>xf+*q2R20-oE6nxT9|4_F+bR>Q3r^`Og(oUR>eOnIsXaI*9B9Y@czaad2^X{w zXU<+B_Jxt=usdaQ3h#U0E0a2;2!lu>G%X-CbGs|XbJJPOj4iqm-uC1UIG=%k4)m!{ z!qXu5tA4j<5-Z(vqtOB-{6Ji}m|uf7t0UNqbdD+sYUK1-BYdoQAPcI?Z&1Fe#d~tK zox#@LZYNu6DL@3<`G^}z(hmToG6gD)aq2}-#)2K3+5J8~Z=9YGe@rLHeDUV6SYX7m zDDjA66I3e!uJ<2*SRzGWL`yvy>ZSC2THWvic7_)YQIoY{NE`79S*Tv@0SrkHz&nzp z1k(;*S0==Gyd9;kTHWrN;m8Zn5@zlmtP9r}s)Jg|SQ5mpyuJ#+$BnNr+p51u7NbJ< zCh$T^&-Ja{ZakhE;dngtU`|_;JV>7ht6(Qs?9>1B)!%t%fCEN?>-aa^k7ftNkx>hw zc@mjMMlb4$#yja3u+ldo3~@?F##4;+IR{<^7dij)*S=wDPnm^_E}e_09T1EtEvXd~ z)%0LrNX)ZxBM88e!0>W`w_vpTe1uzK0Of6AlASlU)^0eu8wHQ36RHnUn9;gJSS&0Q z#gx7H8AOU-r7!^a$WK9Pw565ffuTohls+?CsLBvfAjJ`+w%jfjPB1RI85wI#1yfpz zE4=z}t&e19{J{sRnQON=c;Ls^(3@& zl>+2y$}Ov%3NEK`rR_P8yvn5cG%1Pecq>AJF`?bSt4=rDA&;WC?blGXRit?Jlw7|hg-q79? zufhDy*b1s;gJZA|rnN=^Lr(+7EgkymTnvDmT#2bn0jG+LO*}v-*m>1l?*bZlxX7l* zR&UkcIUvuAr zaAZ5{`>zH;WU?CA^$7()AvX?7rG&UE9x*UcjRf%S6`%rru46L|(PQNjk`5VOTY+*T zX)s5|0Q|NIwzk$Y7egGTlNh&0eFT1$Km4+K4eBUB2)j}altL|=Ii?{4&M=03@R^K7 zQ(Vr!InOeAr#xNk3G4C`p%P%y7v*|L~TxFmQJ!t&F< z`|8ilCT-FZae*3VdyDIUvN35|Cubf!B%!fLWNiD6-L!<;ep@VpobdQ;Y#+?8XsFxa*xXcBT%B~}AL^F7@ypS+@ zf^l~l*iad}5QZ{ke3Hux3q>!^nik7I^cK}F1=Z`XygaQ?nid(6)fhBgtL+iFLx^fW zYy=gUmF`~+3|2S0Yd%Y;LI*R?P8?nk!7N2mGOdMVN!RH9 zNS5?N8N1zIJs1O-eO>9 zXD@!Z6<9FNZ)|Dz=neZ0z`dpxMKg0`Rm9`&M#32%iXsHK&XU6Lf`zm`@0 zKTS5)i14zcM=-V;2n4sU6QOE+M&SA0JmLsdSHdg2nOlS;wRNemkC%oXy)2>#h($4) zxNmbYhd!z0co~6np5d~-FM;EyVM+GHy46&@v2TE@mmP2 zL5K|rX=&4ezWLNJz3; zCMlN0^gt#y=S~^@*Xzftx-0P{Rxr9K@odV~Zci!TW8=k=T_2xclq04Kv&Lf9C8~CR z1j}VUuQ6w4bTv?Utmz?fPMDFhyDy8?Kmf1JHq0f1pmWf9;nAYmqpxwjrZZ700hrGR z6Ps%}n34R%$;h3SSw|F;JWOu%lK9&e_N?7YKe+q4^WSEcZSL25w7`kWxqh83Q}JQk zH=6X7%A*UT(Xi(@%@9&y$HAV!iC+~ls+xsKXYal$EXUSUIaU5FOHNuOD8|{eA<01`^~9N)WngE)wv9pZHY180NG|ka#9V!%~~pZ^tKX=)8plo&kciRn%E}1ECU{+cKWK ze9QTdYSksf6Zi}YE$zwK1+;HQP5>EXejv6sIk_Bvkns|0SF}1|?w@{s|4%05VN3DH zG0~XJ=K>;^5_3&@_g1z?%mTUVMg73(4j5h$uBj^S)ERKfYh9-u_-&N8jUoUT}t~{2p zA5XnNc|3u%%gHziug1KpPBbwvjMZ30oEw5*GKW8+ao73nDK1xro)@1jnQ2#Wv*U|n zPy?|Lz=kK~^6p?%(u^>T^_~81JfmTzOTds=5l+zsmp4Ozw1WI)JXnpsmO?dMl+Kfc zmgC5#C&gPNk*LUidBdzBOw)=%W_)jqh#QAvL4$BMtyV$A@#hApyY$v>?~vQEk;&m5 z#l-Q`{m0i-R`}f7;2JmNKso`pcdPF1`*~Hry*CK5!f=_%BL9uIVOs3nyz|n?of~#9 zjOal+z5KNIX4k8^9&sxxmV!bQU#Omt?8n~8$;gR_`;VXg6%YgIo661 zlc@(L6_$w;O*fT8c<|~)TrX?oeE5NLHv_LdpftTUVLWRhdYjP8jfcQcG4X!{8!UQ= z&^%m-u0E5LB?B_lz7NgtR_vy(f`$*kIc-rfdKBZst5n7opiv1{o;Vsw{n@lgeNn}B zwO~Fp(ka^`T7I5a@rViLLE1wSNIq+Zm|m9rq2}FXDGIVQpMV!w`dGF;klt)c!a4|u zBmyl3x^PVD0{^$;rpExznxXW7N2mvn=#JS-Xn4NPn za7C%kghkR5Rh76?gvNaREpmo9;Cm~`P757kwFFDP7+(0V^0bNQ+hZnLh7k~?-WqJx zslqri$H&18Ar-SeoS_BcO`D|2 zV*&X7KGEIgUCr5{Yv>CK>nkYfh<6_dUlT+^Be#P3cxG>M;!Ao#pc<4W<)mp#D*W@7 z^8Vm}Z-2`l*CixkA%$)M)p?lxV8iHu*0@KAlxjb=o)EUcaWt?V3M76L6P9#VK>?{e zr8Kx+9lVY$Y2*TCe+g>L@aP$P0=Qph3F83%@mK#4d`tj`n;g{Unr+2Vq5YSZyf178 z5&@OA9j=q&(xt|w;drOAB~7IVn^fAzuUY-V!h>cE8b%!}!X+J_lY0 zB>bZimpIjwR^e*0oSyrTSU(;#gM#v54UdGpkCq+17UV}%PlsGKIawK-+(~##; z0w`PcO+1!tGu>QL%pz$TfVz}n*t}6 zTv=;OBYyK{sA$CqHC?_OCZJH*mg0ewa-3P-D$PuRzZ`q~?q?wqZcCX%L+XzaawW>f z^;d8%s5Yc!fmvm9N(|d)hj=QPOjVj-{S;YxMl9O9qOeBiUnH1ZUwR46$4~(qoEHb0 zp4CLD6qu@RAdPG>MHCs1eQmFpzPsIlPqrB!B*+nyUW?-0s-2GGS$ji}9=0NJ zXz55lLDG~uSRXxQ+(aOQNZb)^(#%UnP%UTti#}1Cb9YPv%)^wkqB_B~+0Y|Mb{dQ9 z54u5n0&bLI#ygRz%&#t&{iQ3$6h65uX6E5UO~OnG{INrgJofr-Pw%dA4TA%eeT)@! zcJrN>3B(jK%El%tLIJ~op*JbhF%>!0c;bYv+Cks7?#inTzgr@LxR^Ia|IONvoA6L8 z`}O)s=rBKGm=4YPNKT@k_+~UxfY4tc7Y8W7*IF?|k62V&`@$zpM`MTo>xZb_CmMR* z|MmZW{~O%=**CodxfmRfcc455H!abPq#rX_*FkjnlUy*O$+g&!-*BVZmic#8F`h!8 zl_}(bhy95jX_ryv`de5d&Jq~eRxl+RAH;tArBBo<_-1K2+a6+|f>wy*rxVCz8s9i1 z4jKQ}NRar@s=9%g9-3`Gcf-0Q6bk%LQ_k-keGRY@64f~g?ZFr8#jspIZoeD2(TwS@ z&8yJ13=>x`8$GiYq)A#4M#s>L#Y?~DRGr=7TMH)m_Uo}t8Al~cx-QQC4!k-_@LmgT`dKez&Ek3Zbrdlx(&AtArnhoI{T~sV+{Ds&mJ$t4??3>M{9I6&04OfPqH~XDWYO9kOgbOy1Z%c=nUn(;t0WiBhPk` zyFE_fi{{7+%U$gD2mUJjjfu&U;ufkKlK)KUKHd1lq{#vZ<{^Mug3UT=|-M(Ju3v27yG$mTFz9JlBlQazHt?rYj>Vy1jtPZ&)|erg0wsUL&KXG@Q zed~uEo7r05T8%#-bN)PGdaJNM=NWG#uY+||q}3YAu7tI@3QDOJ!*j2<8xLMlUo2CH zRvg3_Bs+X)>*qM6#cy3z$4mtTy%jXb1LnWzj4!5?K+{yzPwJBuIWJEl2B}P39lfJe zHR!yfw8B7~1OWk@08xpRD^^|98Vv;+;?bvyB;?~|BLaq5Lry>fd1+)t$@2T>ZBEhY zBURUYIq`l82q5x!r5l&6tFx`|1rv0sB|!-Kk$VJuHnwB=1X-tPDN&lYy65x6_Nbx) z$x|-G?A>>k_~-01kifw^GS~e)3fn7GSAJj{%j326fpb-ha7CRzi?+wQ`K$Z??Cp{M znsr`@7k=S$spD%go9S@|PsB6f@w`@qhBP0@)Z4BN zPW$S-o^i9Kr_apM`yf;e?){(>fP;07HDb|l{TdR!7wd$f|V_>==Hs5{gQerELm zyHj_UmcTp6^^&Tm3A+lFs$iAxQ&GvSoGy-=j>-w|5C{qUWW1A_3r;0|njs3j_0IU3 zkmhmmJIUnK<6jva;?DS-G1Pz=my%c*Mw$Q89*Pk%@Q^AD&E~fgb?7Z&Bhc`xonBll z7D-{@+YMRhY)G29zZLWvfK3QV3CGp@ z&J8HYsm;5i=|`||TF>*L$fz_M4$shKDgKM*qX z4h4~#UK9aFTTHo##R=dZw(NBc#85}VZ1hrvtU!pebHNovDUi?cUJ*xCSXRD!EeEga zmTC_vh5Vp(;yNrnc^=QifMPE}a5~Hus1iD(UHsyeY}65D2TB^%9iVyCV#;jD470C) zM()Tw_0aaWnX_d*NSj~|hPzA5$B%$_6}P$imjNL}%_*i*{0SA3 z*KLb^)Xd!%=-r^zZ~x2B;`K_K(H}VufY1VEIhH^2RtE94Z+@qAzo8~M-!+RB#wrMc zW~77BTy2XpUB3TDt&q`pJnxCL>zpD$dweAs)J3QEiA^96}0>6=im8`1ykou zLK%Pev+_d74Ig^%JFvCM%NdOi@Pw_;kg50s@%bG9u(j^(6~LX^O%7M^z?$bR@LoP6~Bo#R?{mv-CiJg8RZuIi$UwMmW+hrzMfo^sEcwzx25^P0j)gSASlyf&DDVX zg)p8M#31=ei2^9Nppo1uDB%=voab34hU1?Ck9bZICznl-pHx>4X2yG{9R&{6H@Om! z!@$IiW8|s<(m1+{Tm1eY2#;AsR?_P}nE+6V3ka%&Gi{p=bf;t=<2Cd5|J_537jK6i zf?xY%JO}Ts0qlwJnf?rn{Np9-U0lF@EmOBrPC9;t&z`S;!g z)8b+nATFJf8JTbEKXayII`D!8E1*4B^cp_uTKhewlmoAIka(48BM};oR%mN zkv9W6yW3yH1J!;mt=wiZ|fTk_}?`5+<&i#yCx~Hd;)QT7W&MP1~Ky83mfa6NPzN)Tfm(QPASAwbrK6cf2>nx-Y zEgV264Ko#BqL+?-WT<=q;IL?6aa8Ckk$im;+unZdy@0V97xS{WUfQZLQ=hFOw zf|eR-eh0z?&2xU|7D5OO1$>Zg&>baUy$G0655Oz*BQg81x{pfGk=z?079N^0W_mR& zc)`le6)>RC#Tvdnm)%@keCmtGw8?X{an+AqYE7?vgo?*JN`oAK1r2Whlw?thf|ii! z((+SpOXy|+%Y08zkHybyJq;sjgVrjN(tIkzjEs>Dyx_;D-^&n+r#{( zm~HKGyvG2_WZaWUVXXcGR>-vMU*agn6=+llm9X?Y<5M6*ob` zr4w0%@zRFHd??mq6Dj2lebfsdj*UNmoHItzN2N(CE+jTmO-3A`L(w7SY&lAb(F{cB z)6Q(v6*tq~E)RwJhShFtmwv|o#ss8C@3hc{;3Tc4syu7;jWqI96{* zA559m+R9A>A}DK$*KcER`I}%Ske>ee6tm59J}|7VrJF@^$(3;#tU0f^zFD;$3>iOsQL}rQ4NL8#YO1_w9jKPWz1f;!}ry0Kh}iKctMYV z2qLa1jm|lRhAD-Km1TsP?mJ$rfK3lsX5VTc8fxfMn5-ybXaf?CJyFA{L~cl&*CSc3S7#jTO%xjNpqpHaD(_~hbH!? z39TzO(Zv^InhFQea8<`=2!&YXZnxi$f3I{_Uz#b1sZ*?5KnlPOans9eImRcb8EM;f zaQc(w1wY~vuLiTwnuk_V4=8Z&asX3!S3ul86iLwUb}g;0=mXH$#~){6hsPoiZOHa5 zG;&Vy1(a~G9;U>1f6b&I1WHGYSo-qvGp`Dxvb1jK>4Ss-N?i}_CpELVxS*|$N5n8w zkt7#ru5^Q;?~Qy9c)*+J!_^tI6o4QD@o&c&%-`zo1RGunCLF8o_qXGpOOKy5H&a>@&h?4M z8f-W@v&IcEAmT0YR4#(-yYL0iog5aAIEv)>`=q3^rTm_0|!v@>Ce-g z&aW7>R_gJJo1gg7N@nA-%#}6XoYKU#f>niG#ZD|C?SnCqY4@um61^MCCfA(&`{gK^ zwBhFwCzomWfB$=?Gbij;&?F2E8W8o5`wCz1i6CSXfs1ufD6L*ds)HGZ6Ce&5wQUBD@A3} zRl9j06=K?&_bSw4Y9mo#q4En-6zT(Xoh=DYO55bHa%~m2l2`LWhrq78KNI*lJe`e2 z_DbnZB}Hc9a(_$7#^2s2z+L5V)2u++x*q<->4z2$ZvASlp?*LE0!B)en!;@anO_8M z!gZ>T7Z-wMH@@)dhBn*GaLJH|V-Ys-oDz)a8+;k0)V*7gHV6gfj*y7Juf#6|H^? zj;|)o5#gi;4(F&iED`Fp$z5%h0XC8QoD$~M4^3}Ev+fV1Vv-NVCV0nXz?qIMs*>iq z=zGCT+Q8rP6$s1-m~waNwSG~g*iLug#|=yKD)2J}DRq2oiHTW-i#eCv-fWS8I3Y|g zAFJ?B1u;HAH=Q+a*!;6Pq#v0F?kS{*UN1fv#W^xnz0gDaR*OvD*GjUDpkS0rai}_% zL}^W8N)A3G*D0ryHtzk$W97aw;H>T+WCr!C+}rt?6H%R(jI@G5NMfTj6D@Z4ZjArR zoyB&6C4!eAUbL z1=9;?k+Bbri+~RjBLYw_XXkDXOD!BEZWs}l{LgaLbA1pgil9k06o~ik9TYyNf|mxv zvB(AqAR%>GL>$r)mh?GsNo~${daIje6;2G}5?+im<{r^xC$7Nk{7WnLSC^$n2pad0 zb_lxgPFw=iq40@~xPpi^UsGI^_5T;J;T z2fd%WT2X7fh|}?M@kUC4`IGWe6_XFLp+= zbS2z;sZ?l&I{~xA`js;~EbF3>whi-(E#Qe(%SNGjbEI$DfA8DB7b~X{%9AxwId5VG znvyB=%&%G_wh&*Vho1|e0EN8vP1+c>*;H_b9|f!!j`fZHa(pUIa;ZDWhASVR%q+^A za+~lhcQ+o#!D^;>35TrpGM2z0R~#{(G<)!iHTk5$;=HZZy;r+_HQ=>(f+xn`)zQjQ znNrJwU?Prtc_TP?8h-%0Ik~jcuaF1@14U!X=H+ z0qyc@X(qn?xp#HkEwO3^-| zq{Jz<)%r75hjm*k54%j&z=qF^cV|0w0fKffk@bT+k}rP7`CYkk9Xy7Nn+wkxw_~us zAOBo^-SxfRXSc-t1D4fiY5Uac?_5{|YGDluRPqwO-kVY`ge)mmkK=ItP zVy<1uw2$>(5YD%At-;oWR!kA0DjI1`!()}Xq-8a*B!oeNOa$4C-T+3NOOvYihtOqS z=K+Q|4x4{LQ`2L@x<0LACT*lsik?Ux4J}kUtTG0UW{}|<{QmC*SA6>T)!)Ux3Dlli zxhrl*uYi%{s#o`eiejTuyNxD}2kdWet$L~0Z0aHvL=_Ifs`dWk?zjZUR2{|D^k*@v z3K?eoi3@+=SaB@#Nm}CdX!}AMINtNFxXvfZ|4$d5#|>OsDt#}Hr)jqkGrd+;IrfH; zSGGcAK8b#YW{vZB)9LMQ?QX=wm!D@Mny8P8=FBp0>Dg6-reyvs>0T2l6y7ABt4zgZ zt?=KOCP+qYik`$W`yf@~v^q38F98qW!{G95g`hGg7YFfS?&ww-DRnSn^LQ{`uY&L= zdFPP}o&9*}{&puv$bSAW%SCA^P>2|OcUXe+O2(jl(&$+Mf`$oE>m2uorRrdZMhcpI zQ7|b&f#z-%{3wmT3Bb<#m3!x?H%kb7CT=y3e7RiLQz_tbMr0yE(Fa$`Y_BJ{%C|4< z$Ae>_NA+N?*mzSKHjb4vDpGR|qbf)=A%F~eNJUIunX(otQLoo-+s%1aH#>uPB87Nk z`u|`nnNeif7vMkF7=#q6zp|I|_%L|!Q2Ma@S$PVL^DYYf4~czJ3?x#CWU}L(m}Fds z)aJ`Bqv&-v;^;f2R}FLDTirmpP&>oZ3{uk0ASY#iIm}}9y}j*VxK+^g7Bp{2uMlmI zV`6|W-dB)V4NPuXE|m8-Fy@%>UVgELM-Fhpmtk@7QC#Bg#eok;%8$kn9FBiSHFq=d zY%YBGQ1UtW!m3s9+li!zlj0E7_HlB#ifWHHc6K}Q&!y$XfXYO7M}OW+kiS2xq>QIx zqF&?5aG`=|FVKiVBQPs8!uIzGsHxcvNDb4iNi1E6cmSPivlmyNY*wv263Sb7Ab*{i zrQ;KU8!SSEJUuVX_h|4m8csE3!P#yRtBHYF>a1gTJcPmnK3x$(T@uZcD~HGMkHDmr z!62&P!feWG%{ou8EQ;V5ye>A}N~aeaM(S?Gi|IJmVAE!i&Q{M}9vD76yj%1_!bmz` z!($UzS=?!V_lrqVRo<)*O`X2ASRV~%DE^@Di9dBTnI7B{H1(%K4%8qXLnUT#tJuT- zDJ}FNZd_bQg`-ohIaL{UcxlBZKKG*g4i?;j^YT&C?M3xjwajzB^?X9&nwFn_=_^2U zsv#@!LR14$BDkCx&A%jWO~G}bE&hr8S_vq=1mYMEBTrD*4Fuf{2kFw4PgI)qyucTB zAo5`%HdePg{eHX-1oc@7Iu?qp@}(3TN;Vg{#K|dM8q~+{M8_M}15Thyh1+>p6Bcu! zT`&9j+>mJS6M?Ljpb=p^hF_TCKg!+E667!q21I*}mQ$5NnT00*`FyB+CBIS10uhfR zkd;9YwJ?2KWSn|aaw|jFU%w0a^?2d2F9~v*4mvW-%hC1V1HX84Ok;@iqQ_NSfR^`* zv9zz^r%!+U)!*L#)c2wRJn)JAJce>9m@LSlZ&AZXrb2Ca;^5*gl?)_@SR#(SdPU|Is?q#n`tr~hb~CD=!v?m z82mq!+polBlbZf|tA$5!#N*le$Q75W%~0IJpWmEKp-QiWvF4dOLr~yc`bpcMFiqp5 z5hbyPi0wla)0@EZG##T}cir5ej10G4Zg$W^fs^$2@gHC+qEqGcFcaz|jZJbF@hXA8 z*ibx*%XgFa6!g3*5Q36ROF5^axxZ<792e|4=(TaVX%sTT8QwIJ&|3!Q`4PM`8piUR`>D9GjHI5) zfLP~>P-wAF)i8LT8eJ~O36jUVejjhu03U)G@dz0vJ0X$wOT{gZKALxX7@RZCI%~ov z(+?kN9)OBKS}n`=kION4?O!uHF4lkm;b!OOo-CB|;S~%j37}vPl5}NN3eT$y#dML0 zhG1+Q=_^FIQ?u#)LYi_pfWS@oQ`$eJ6tS4Vd9x1%}(E302^QB!UW^{ds6)MK- zHxr&yBs92Z=StXTyZA_n7x6{%*QtzExf8!vzZaj+-gO4paZDNa?M?N`*_V@CTw09R z>XXdA$&1yBvg8u`1JvTP(TDt+1ak+q9@{M&I+}fnvb|yZ6{y)jL zlAdD@4Aihdz&9T$)EV!+|M;QVhOsFkAmX~9Cpq$&HuC3JvGXe=aC2HBj%PIGDUh~) zD+Z`3B(6S2aXg-s64g|5(wITebV*7FYI!L^*#{*$gJVfLxtKW^Bm^c4q53Kv%Y9%T zaP4?eobjOR-X{pTNr|3p^at@N5+&swS0lg*_Kw$U13O&|_EP-r-jFqP$MWGs^sqa&A;WiZo`2*7aB6U5N?x**E>AWCV zjF_B!Sm{LMns%)rl78#o_?osJ@g3m`XbeanTbDWlhwtvRd;S9an1EdWAqr zbd92Iz`?cjhF(j8qSS(fmWZwec7i;^kUl(uWSWgo%*a)mJ66!QA~}ayJB;j57r#{F zjeY*YG*xZ@tIm>z{e!_OUr;-|zX|H>4K};Ipd86q4N{9A311Ew;Tm@Vkpyp=3$CZk zM@&IU^j|&biif#9eNROu%#{&e9h5GlpNP4ZmyhEoXj3kq@~{W#;6o^6lJ|uZgGoHe z7@2Azn3WRGCOC;c4|d_%#bTzUagrVmXG;Rr+iJ+C0`nX&#S~bnUraEN&|$qzJ20b9 zR3oH-4$${MPKA}@jbB}rw;*pXe=?25s?cZgc`OzUT`rWGaqxV<9~bDoW?5U>@aQ&z zrQG+c3Bz6D{o;E1CszgZW<)f_>UWAhG6{GgSUdj=m>BWqr?tp^$@+QozUuw@QI}#kVvPsW?I~QG{-j3=LvbEF)0?$+wU98PQ&xu`DZvB>7Q9oE1ZH0OrK-+8Et{9@|>3owCWyTp)hiZ@16 zoyqjLFy+cNWi<{u$a-$c0LN)R{ptNL2U!<`ZlR#u9-an*L`=&^|tI4dp zxrw)qhy>GWh|a-fNg*&a8ZQrBTpGu`9No1x?cVZ=pbNpp8WtH##E8|H#v(pju+&&r zR1Fm%v05Z6mU%csPZr8lv4-d?WPSAP`GPr)@6jWy7F$yliy0Yf0$Y*34_c}eus!FivrsX~|GQ(n#XL%d0TB~+T0=khAMk!fH86%EK?(?n}jH!y&w#vZ@* zUpMyl)^<9(UJN)b`k#`Sj@Q!xXw9D|k)Sxa1ti7Jac1|&I8tkAJDz-%6Pp0usg%b$o`Ny^SN%d`h zah&}J?yrLx4@yW46;t7Zc6gk+6lqSMt`tLbe9V&5r%w{s!2(Tx{A+}o_lNd+Y|?W# zF#xO7KnWIWa~ml#<@<3IOOKzJnC;6&EV9iF=sH?)tV@zSGK9q|5id8OPjNA9Uhm1j3~|LOJ49 zASYx2;={}tEE{1wC%0Ai)gH&8T-@qDk;$iyIL>N#bTmB9_6f_2k4@eIus2;r-(;nW zle=2zGVjH5-cbe2=PKa`!_v$VXEox$-sGi#W5u9Y<4|LPRKJ3@52m@&C%wdn@NWk2 zc{i=SxYMZqT-wtoK2%ay5@=O@2{p!CCa-3HdO#7Kf4*Z1nN7R%gThL5B|WAkLA;6{ z1SS><-J`-0NM0Zm2h4peAqGal5YjE-5tYSrhG|^%@%6oWnr1a{W>L>L9cYR|041Sm zi3AULDgE0~Et6OSw;M}79%bsbKUj<3E-fvZ$Pc}Hi%OP_JnPYhsZd@+1z(oaQLSD6 zM015v@p>a8K6%OD5FbKoXs>EjuV-yoTt+V8U1gt_3J4Uhm?j5#`;?Wf z@i@9v)9xkHqXIA|1cF!6$HnVtX(ifyq!k31Emk}s^AWMx7cb&pryoL9&|G?!9rT9# z_^5gOkC*+P4O$NH?(;m^9zXlfjRDa@c}0PZ*HlEO^(KlZ6Q(xVc52Vp#f8PkzH^hS zA)yt&h1LQdBu+yon}gN<4BN`t?>Jj`k?iP@W<(&0qeznrj&%=PE88(eKjNjR96RKq4$69P`!#9lKIYFx3lAYU}jLZ zM6lSu67a1x5{(5AyJnbWT*W{59WUi#LR_)tggDvD;?T%ps1P{Ngyiof;eD01rv>}H zK4Lshrr%ZOPmfooxKxq#dTT9Krc$Na)4a9IoHLNx2qWP~=14Syo7L!P|FB>X$RraJ zpSfM$>=BX){T3@%7tA!L4nP>j`5kGL_`E$vet)iuRu7<%1w3jXTEfJ`6{RR8^NKAL z$$LZS!IzO|ITQqkOCKm6k4EG{ zUj5bNU53@@&pt`w^X&$v@JEk^A3lVPyShybw3-q={O330JWwWJLETjb4$QEs^?Z9@ z;$Gz12_ZU!3%am~Ui?)d4Pz;C+K)<3sG&amZr}|#E`G}kkiiM>Y9O{20jm1>hCskH z?V+vpK~Q?L%vhiS$8phITdSoD-c1XEnoGE!DlUy*D66P}k$HmKrP{c`Na6HyDt(lO zH`G(Bzj8{OqhgPfVsdAXn}af0h!_n~7M@!v*$UZ9>Hq7_Ajqnq#jv2|G2v%$`GVe6 zUS~r8Q=VEDG*=JRaWNOVPURHAo%bJ?7nXyAmYy#;Mj@zSIIF-+6hZ}J1q|o zCk(inKb561xQe!PMVqK^64Ee3%Jin|ZKq(M6r=&YenHo?+dr@0+Crr(S2JuwEMdB!F!SSkqELd)3G-EgUNA$0)iPI z7u&I&pK9BEUXyE@TGqH2Tyi|lBt4PjNrtdDX`CL;2mWM#Cw6fE@f+U=3eO-$HEzy{-Q1a}Y0-|$VHFCxpopQB?*ad3@oICTz)js)$y zR3hA_&#KxUcq}xrq8e&R2C6hZPo|L{%@s2qI+vpJFNHZcYRa!jVg`&IL!EdK!9hIC z*v$RM-~1oq-|EU?F2Yw-O3=S}K_3$=EdOFw@-^|Ii3TWQPMY17FO+U>ZIudFkn^IG z5vH67G@FB0(fx3eaRY=I;c%-LTxmi^N)pDNgckM^2D2WUrc58EyC}?hFrJi6Q;<-8 z>e8Dx9y_FsYls;Dto=^>&?CrEqW%g7&H8KA1RSoPTB>CU>kqfR#4XvfZZx{|R40u}fnQ&iZn-@f?CIaB0mUAQLkB(E{?0CvHH^M!#HbaE^e)?h9Q#d8>f;mXQd1Q5XNA3Sx zEGx`IVBfl=%8u;vcKmPp^k|NxX z085)<@+FbGSh!-YCI19Q41^us#)8p#uyk8~`D*;W`obF+q_L>&p>oP02tNJUS3mPk z2AEF84_pECT%xPwtl9?3uVx7g78=jPCC=KWgX_E6rL|=Xd~%!EBq07`hjSqx@|08{ zdjNMUQCMfKMUBix1az%{bP~1z~8Yr(kh6u-zTTnOt!qaVLajWE5nEuz<3s%S00_;%e3JTHVYR)r-#-V?pnT1Le)x zC#Pty3n?|xOt1(^>~6;$;7-k%!^AK1q9TxpEV~=?^3$}}TY43JUpKsq-n|;^czso-UMOia(;dYIKc%Q?euLID~!sOZ?CN=c^zSZEXLO%g@ogzJPPe)iEc zk=pkkU-bN*jq727yKo`rVb}*;@kYsbR7cgxag1;14jBI?h@N6w;}R1jg+(z3@ZIH+ zX24S-1=qzS3_nG(I^NmJZ+yHD&~SLuQI3+IESWwj`J3D0nIRibyZ`w7*@A!Qk5d|> zI>#wTu&H<4lfAc9TF_vA;kX11rI{<}vi07~D@D|LjG@s;(n#;{q+F+9sDTp;)4d=%ck3nvg3)ZR>@^b9G+Sm+XHzd!1 z9*yTH&)_mCjGuQFRYVNvOX=aYON&dUDrI8vNX3~ir|*5_L=CCv_15=s{adSyxQ5Hm z%5;XceleX{IN*n3^-&>BUK;-L<2q^>32>-ri={`8y&8(IFUpl|FyLf0m2sD)2}MEj z_MpU;xmgs<1c6|gVV#Qzd=xcIB+1)dRrEOZ=TTmgRYug5luP&Ij`3*3>bw*9j1K5r z{J3L~G6PYY)8ZpB%A}waSfJuV@~W&ym}G&hOml!iizbPzm|U08tC^MEsj~}bZHT!#OW zqvMBG4;BZ+LP`Yskk*L52uUL2ga?x6aBpKplC!ZJS^aCK5SZm}XrnKPys1ieBi(+V z*@Zd)it@1lLZ|G>;+c`gWY?6fNl%DWDf39HkP0jio|6}c$9eXFMOhMmX;@zyxxHsj1M zRoTSwF6o6uhr1cVr#tBTlcQGLm%h?|!>^usN9s-sl`c~wjxNv2+&LN*FpRHk0+d2< z=?R$eBBD;sc1m9Hf8GJb+(iB^%Y~Jhn5#lloD$tCpO;F-<1o`H<;r;qRN{|qA$4V> zdVQ3H6so>fpBz`uw83C{LP#SRsKn~Hy6S@ASkLPAUcc*S9}{o4=upaed}u-vFKOk1 zxkqX$1apfcSDzvt?vrU^+f58HHM^bdc-vG{W=n#<-P<6%46U>%LFY?U6}ifPbfpkzSzASBeA;^-1VLBf8SprRU)`JI?ek6SC3pw zF}+xGO*@2?)z+{jl43jZM*49iSjUBLXq17fJ1lpxM?ePswl2Q1{`$Roo6%_IQ@UD)W$7SM>LY;`AhCpXcuNrFW6krMnf+3nZf4Bx&j z7UbIS9g%B4byXHbf-CVIugB);l@_kNwh@cE!%Gn~Kz_3?gd~U}e>DMks1h>}A;m62 z9+KafcPg+0gQFR`=wk;5^d#)meN}1-oN>H>KHht)v%%iA@{%nG2{txv_@d|no675VLATkZ13LbG~*6j(8 zz~AVgBGcssgW$$mpZ@3jpL^>ow6qF7pv?Wp zFQ%#R9$G~glY^F8#R&oHGAFDMKxI1@ln5S`%O}ThlGdZbSCb+Vaaf4&TZQb7GMc1u z$1U^gMSkn^&wYzQeSygcI;#kYHcysJTTIGm%pQmQbYa z-q)=SMRLNd356>+)8>jSbLs;m2V*fSvrc1voq*xT1U1xbuc@{-tSdQ=5dl8g77s!30xgY6bAeOms4RJKjK1jO^Z^(;UOni7!8_Smc~|l zI39B!C&TP1?bz!o_6#1z1fP_5-7x!-+h6wI8(kFHN@5?6y)e!fbBz)JP6u~yoh0~m z*5TjGw?l`RasHJc0CutubO^|r?t7O0z=s|!_}i`Ky-2$@F`JLaApljH=n526uPTMP z6Ta2c^JH+vpR4d>s3Ned^Rh*<$Qj{Kqw6&JnbA!6H&_Kz3@PbjlNcqSn|2ECPv-_9 zNKW0oMpkI18?4B!E>6#Ccdy7Ys*Hf~1}z@^@7sU=F!A~K{$Ku!BUzi2_#7IpD8Yv- zL%cS(qliLzH7%7&K`0ls^hN_;I?kzOBYHHVePWT|30q1ajz>{v`q?pw4$j6n3C|ba6+6W4bjR?1^-_I{5Y29qr7V^R95U7G%W2$ zi|nDoE5nX&<~qY-bK_ssstJ_dPo_aBQJk?o$%=$#Ui!K6->dO?l`k1kt!wJ8sVPUr z&4LG12_guSlqT}Xd<8bqO-TA8npa!(NO94qF^n$@^Pb(~E)7kC*GMTQ6L;6+3(`J$ zPu0Qrrtr&apbR&34XN-4IPqcJNN_pN<`%`X$HQ#Rfk&c#RPic|*5zkbu~MI!d&L!w zCsD|WQDbU#XF2xZ17ZVt(1`OA7vAgDm@>Sa<w`te{>r z2g0~CBfTRgFcOrbcCSNG@jd25c}3{mT6=~h;=#pgpo(Bj|It#fOl=xet0F;qE@7jO z7oPcufY5=7OgTkwN|K(Xl#)CFJp(akFHfrV3C>l{8v{2oYV&!PGOAx@kWdU$4wP$n zF)mj{gJpQMxk#DxrhZI-V|XwZ$gMtNzEQ9XEnzyEIca>4gibM6s<<|Ne|37 z2}ha{SD@ojhL4y9Wm$G4h$lDGOA>0j{3vt3K#w#`Og)9>BGr8eoi~s6=0TwQk6Z4G zAjxx9OaRiCCA@u3Q)GeqiWPcDbrexluk4sY{(Qhdyiyp0Mj-7lgJgSaIpY%5oh1pa zm$pt9spe(wzv{(Bq?Ih0^4cz-10Qkmg0CK!*mwpd%oSUVClL4sm^3XJgFSVO_1m`k z`|$&{lA~hJ_g^vc*&ALXOJ8I^Z2@t;fKrrP1J;8Y^v2cOY6f9TKGrtM^sS?4iCuY( zU?@wW;OXeSZIXOX2_YTJf`*&@0QKrQDA`uVV*wfCI^m+nA&iHGDkx;C2raBSEGd2( zg3PI9WeBpITRR$MkzcDCns>6>=ryiY4H-T3CLw=fT7p8T$aQ9;xkQ$AZ* z^s5n#q%*4i0As}M_jzC}VMzJsLmJFi==%x*qTpdNT532yOlU9xT6&Eod#q7tfFv%r z20Kx%B}&Y1IKhLsE}EJKZy5O@2tNJ0U;SZFRVH7WG`b%br56l-)a0PVn+KF8pO-wv zJIpQIdYYe=P>*fL5ZNne3G!W>fpRI4brW#1y$b^+Y6g-VX?rz;6sHrS#AgsVmvxn3U}FJPz3fm zo-sg;C<^nPJa>3@IeCXr@h-?g#rN+&f>1$IV^icaTaHt3uTnE6C^hC%Yhr2kUsHGl zofFK(>XOm!KYsQb@k&L9iv2)BNTKVbG4m0!<`)R`-^J0ccJ}=)!pQWny(wZDjbQs! zEyTY1nL}D?CrNJ;fpOBl<@_eYk`m<1l*y{bzyP;H{5U6~$TP*MzewB$_YR`@z>g^_ zn3Kf&D$gzeg@-C!4`bbUV~Hjy;k`1^3j4v57ZV5MxF`z$uNIX&fi_WZ_9X{>|3z z+TNSK3AL>jRpUHxx_GB~O{I0sT%1oqLS+AA@*z2t?V(vpA&Kgius~A@VBRzJuUe$t z3*PHHdok4%ZBEQrLF1RWVpIy5asyg)dD{b_*>kxArbCp_b%!Ao@O6$5{jKlDy}*Mg zL7m>HTt6;w!^zp%K7f%N34qIZC(+u5h>dlVX#Lx;39x-bBH~QT0YQgihgeKllP9ZXtT-A+i*=W2d-Vpv5NHD@?{%jrqRuWLrYd z#GT_6eT|rR9R+q_1`9fv#3k^2#5P(&w&o;TyPE>weY4kl8H@Ygk7Hx%-=)l5M)kM3 zUK+I$` zz;bD7dUJP)yrBbLS@@-CLMN=k7Bt02aanw^tvM)W=ax$JQbtaC@nBr^Q)hQ0XiqC6 zE^#_vTqLVQu8#7h7+|?58bZCx4}a&gV)wn)AU9tFn*X;t$++6ncE_q6{How7G*e8$ z=nNPUBlZEVb`mVRH`ptZZf6!tH`{foyufqwGG-}x$90JE5HE~F>9kfN{`$Eb^*LV* z8w}`dd}0i?q!B*F@bYs@H|mv7Z}B#&KBNoo7eQ*kyn{|}5HIJF5asex1uexMN$G$^ zaEGVuAy$*-iNvd4)E3s~Z;SK!y4#Bdw~C<}*8==I7vh{Ho~5FL+`wGUvnqTR-j(@V zWHcKD8W`&sj7mOd1AiN%EdI9icq#r#tHodi`JyUaGNa+tUXo9nXXz6-&9q{MSYcq) zwYPYnVM*>`ai}nC&GG{u{?b*2&6k=h;N76KAY_YBaK6)&FsRDVnK$9%h%5L+=^;8P zrVMt4lgo^iNY4Vy1>or#JyJEi^r86&y0&rKZi2G4{e}5M2BW?Z2ZaY+!49wv7rEwP z3Cy$rw-wI)$62T|hhtLdVG_S4#^cvD#4V0%93YxGhyUkRk;g&NN<&7L*@i((@Qn#ZJe_`=sw4Dg_DSHK7Q>9-S3!g3 zPzMkE+U)H3I6Fj*(mmeUTk~HASh^XjRl&)yfDUP-I4q5hIR>IfH6ApWmj2ejGXc{F zU6-Dh#A<6GGA^Y%{Tal$!UV~w%0ctFonos5Q}j>A#|2VItW#!E7s}xk$VU1&KsS zu_^1Smum&yV!y03yEkSJhqu|?!tDwsL#~)0|Vbxb-G}tu4 zxsv?x16US@!({?|zW@F2`;T3z@;PwL8@jM}gA|JPe<)s%GI>?>3$B)pvrv1azhfJi zFTIX$9u#W=yLL%Gpes&Bz$l%FEvh4UiH{(itL#?F?WVZ(njFi9O-|aBGl4yB%iGb1 z_)~L8|NW<5#O|7^Pi|_%q2r_Yp+mu@Vo7j1Jr62mg>)cJ8zpfjW^Z&N*2p{$0{n7p z>izBgKVbO(?5kh;8xP|e(@>>1#2f#$L8lmt8(Vq){BbOugV|c!GLNcs4!P*8CY`E} zGG~TU<&6*ls_7icop7$rP6KlIkkh|t68b19p@9r|TwVx4l1KhDaRZ{2x#aCpN!(EE zMnb~-TOD5F|6Lc`flZK8L?{HP?$a;6`p5hKKtqKjp82cuHcg67Z^yO)dVhSQ#&z{R zD*hrISI~0#@$-1nf%fAabcAR%8f0&KFHPT8RdSf_Pa7IxLXdmdGMRVajtr+TFQ?S8 zeERAAU*G>@9G*tojHqitH^;LUm490B<>8Nr>@oK|mcKXX$3KZrd+%Gct>?ba^W~UI z7$$=j?`h|*|AXy#{-;0s)gQ(GdHtQ0y;o^@YH0H~0Ww6R66UcoEqY}ggCn*@Nj#o6 ziKJns{@!GS0nu3EngcOGihT_t#vb#Ylop&^5^ORCuD|u8ZYU(8TE7{~hL-wR{5T6v z76sGqZg*D)@n;pzaP)EKxl;rMYBr#MI%4Vy!>WT-@H^MK@dIOw&&zfxRA@>e1q$Qn zsyFxQa{S(0HoYXO<5|M^oMTew!2ozSv;*{~Z}7pv7RQOn$UY}8DRt_vEIwW+881LK z&9jNkGC~7nOUdIx)`Nh>EmeV5A4;U&opcW!?QT26hx&I8*>f*}N<&Z-c1z%5T1lN&p zu*r2pA2n|V(5PBX1=nqHWzeI{A4OWmP5aj8-!pgjl^3CixUdh6-FZo@x23Y*X7E*z zTfdHq1SKTn$xF-2Uf6*Cawh5uNx2bKQawzlW{)8 zILZ5^2H<54!rYKXP0=88R)ySlRu@AC~zwq`ouD+rVca_L~nLlshJEQsM*}iRx#gKN`PyO<*&S%<=P7J~O8t zf`T`Us56v9P!%j_tlJ{LyEB_2L<8cT?p8fhXm|vTFaT`pb$VO=bLRt6ny|v{u)!mUTX(wxOK#RSy_)aqmy%|+y+K?95gJfob${vu1 zrR>4fIZ)#dx1;aTLd9~i+wXe8xFW4-jT55-x)rC;Sb9{n#Rj44xCoSmrGmvavDCwT z+1iayQNmDKa&+Wl$DRpwm(cF=vw{(#tX1TROGF6FKIaT8lA3zf&L+d!L>MrkOo!nG zTGDz1=NUW0M~`1#vAZPI2&7=9nLLUlG$?3MUrHm|fR>?tk@c;Z2J7p=bxX^SVh;y* zh)KVALj~kPq;uxInyK(qfPUio#PKWaD%5>w<*j0a3#r=rb2{x!RJFwAB;`k#O0guS zmstP-s+UJw%nIy8_`NKKQQz|Mv!sRHlISc^na#EAHPPEE4Yr6N5@?IV25G>V#!m<` z+FW`at4gZyUXwlg^|?(d(eT#ReB>nW@DL>0tfNQjTu$aX6Vf^izeUh+_EJX34ES`C z8KaSyXSIv_z<7dgk1H&E1uB|qHwyQV;g0iCq>7AtNe*8iDuLs9DO4JaBmyki4<}c5 zE$v$wYrDIaZrvj+?qHZtKfnKFFcy91w)(Nk(Pha3H+)h0d3=sG8NoyU7kTg29O-tP z>5cz_?eIPKNEWwDk=+3LB#D#@K;h7h6RWDbxhc|)0#E>|aVQW#bvIg}iLf-+ zOi_GjC0)Qb!&~Ph>57HwY-~ra&AdX*y2A6;D_fJ29t9TJurR)xA^^QKW9^5Bd5En+ zR9qG=gY;ccmylBX=jZT8ZcOPIIXrwbEr@&~SWtFaA<5Sz1=~|vWzO7>q^!?N&3y=> zmVkMGIKH(X*;0xpMIeJLP(uJOvZEFZwc)BPW8V+-;T*^8fm4E2W7EXGD7&r39ap#E z*n=eq)fInhgB->*qeawR!uVC2s?rnl+5pQL>!G#~mnA@>7&cvN;@;Aq3Q=OtSzhhO zDFJ5ZH5nwq^z+_Vh(4o#{LwxFCGFSOonALM?!WtXyh?>9X9d_V)^g*_TUC_@Tk((A9*qaYg zAAkB~oXHwyldlJr|4>W3k1RRyVH|ML8GDk{*c|9os zOniewAap)^7NCleJUObBf(lZ+zz|B>0J1srPd<@f*D_y1X3A;9j}gdB@Z`+3LJAiD0{ zznsRR<>{k5E^zK{ueaOt`v+S#ijE6kYy$f9d4q98Gx|o;Cy!|l#F0Y=sKpuHp*aG@ z&V=`>iAex3So3Xaik&*`QmKF0hOw}#F?r+ADVsmEDxjf!i$B28ei^e#(9_eDNMB8+ zgXw-@1uWLl6_MkrXYi#(&YF~^b`AG3gEW26L9xNt%8ORZo7y{oA6m25LFh|k>x*Bt zg~4<&Nr#56nf0WoqO2+s?8wp0s<*WR7zy^5TN(L(MV+g8pK2&k z&6@zi;lI8bUZD9457EcR!89x-597u%$&^K>G-9J$MnQU^5|s0;fQ>cbk&0H^nT*mw ze3b#*?NWM2v!z;H@#1Rg<#+qpx*Br{n7JqBkU;wRKmLh7`eIldH=F=@hvk0J1UI$U z;=+=NLR(vSIq|c1AK#GpGK*b;Llz4KU!pgaNRA(ACx#V2XMSAYfJdo3Nn3vW!MDLo ze^6Cek5U9zKhkw(ZqZw#Vb(uQsocP24i2N!0u7slTX23KcIxzlBD6TJaruMR{;3Eu zz)5P2qC9eZg(yU4wNM6m&smy(xvgB^vcgwGicIpd37}1-chHU@W)pQCtlq*hTC!3X zD~trMJ)ozg{QN$Q&>8N?zv?qtgRqAI@_-?{DfEcu4DizGt@b%Hi&hGf36jJjl@Y>X zU{A4c0?@PGyP*d^|JmK&fBv(e5v3qTcPrm=UJUxt`=3qYn2CJ`lMuU;ALk$Uzzn76 zm4}kn0u6U+J0-?1-^4ml>^CQcGui10UQd>t{bn|TWKvCy=`e8C)u|}k^Zk%G2g#JT zMwEd!{%4r6-jeF!TjmDYJ~vTHG|wnsps)4~edMaiH4p$+2PL zz-cmFE5|x#G*>>EPtX8yDH5U;^B6jOABBmX?Y!LDGV`rzATG9KybQIzxgMwUgKwLh z=!3U~x)cM~I1b@U4ej)ESEOdRyYYZ`2of%QyoMbN%dixhuH27-Hm?cP86Q_9G0_Y# zSL|h_32?_O9UT;0>~DQ z_rkvSz3>@n6<9 zw_+Vf$i3oqtdbNAF>p93h^tkBkc12Mu882!KSy#hPltfysC+^qaB{J(hCcDo$w``C zShVzsDt7Wq4Vb(s#aLjqf!7L&5vPM9Ibn2a$V7xX*#3ip%oApf2G6G1^=X^NUdoo} z2tQKXpfJfR$3aHdgvPzeF1WCU?Q_^vZA8H99H-AjfA3;pp0^y6(=3{`g{lgItAA4Z zgtUz@M^?Df<&-#jFet7U>5sTulYV>!eN^Jg8Z0lju3#fu{cKK+_FrTfMVaW)i#5(ldDpi~A%>2G|Xfhy^Q z;wKB~!YtkR#Vm`$Z@gp;Qf#J!A3~k#WL)g%BSqlP%6B@7w@O66QP~xBvZ*u~}td=UTEW z?IA-suH;R|MHZhGoaoN@^mJ@dGI~2aV<4yK;NxWlDs-%fPe!22sw@1t&CBMqlPAhI z*NZV@6I|U84jA`k=cw|1nwrJUQl#bJik`c%MIGhH9UC=abav|S`&lQ`uKj?1zBrfkOR?^pc^tc%Ghp493EH@O{($nIJL4A_>xf7uZ zFyP89roSvP0M_Ue5Ll9|uz#_M1cg*_6NtVcj`I{G+{=f1%HcTv0QhAPPa=^$8#NqH zI-!z)SI3Y2S#m8te)sVL`9{RvywDqsj%?qo&bnE^Zo%(6Srm0SMwuqXo;^!Qz!tI} zH_cbGb+afN*wvjXl_LvJg0hoIQQwHrIZy)95`~`ZxI7}-kQW|}qGYQ#Lg$M#`G_E- z#zzxfX9D2hrX17(j(Y3QV8&RZL=2tdpdID!U&J*dP1QHU(lG~X>?|)Km%A`8Cf+Q^ zm&@nakepucDi}Ziw6*!71P2G0QN&~{PlW1J%T@QATB7MyShxI8bMKNDZTzafs^BD;f6kQ*!;{rOcpw?qZnfY zoCbbsQi?Q#|8h<7b5Ob8nhcVSFJ}v)bNIeVruhCp{f&QGTHGHBe}VOy_TKpLu(+zs z^Q0va-eJ50%VKn`b{s{h&@7M~oe!*wQEzh!8-XgQ3ZA+iF_48dQRy6uB1o~|SE#X^ zMuC|qit`O&w2LCb5Ab^7v3SH@XRwj~Jy-Fx4}z&Ohg4mNjQ3?{!MGuiu&K}yCB57V znl+6m-m`|~=tUB7r;JU}oDdmHj|!w_6MHXI0kCnKDKgxP!OW2Xvxj1483tx6s%z^- zt`$;w#+zO*1M|+4IS3!m!5+!cpK456EMC>fegz;z*(m0$Esj2x3#Oq4rPQ&wd{xSi zP5W8mCw^hdfIPatN%8#8|MJsceEyg5{0S;2gT~6tzVmmfLC_?vD`z&Bo<26S5w!gE zKmLh%NAL>)xnh?@;NHYvS8+LKZbKZ<;zAi&vm)rSiMgf{==k7HXAn22U;covsHQ_uiGn9kf_j|NGXW4A{h?}2Y{dQ zb2S`-xPYHP4)i7!3_gKBDr*JKi!ISBsm40##S2540)1gh!4(FT!Q1AQ`q5YM5O6k| zJMjZe#j1gM01weoy&4+VG&n|+cuhdv6%CK&w@B%*h~J(DJ2WcAOE4f0vOeYUQkl@s z+`r^}lCfpF32WP*-8`Ps)zrgb>S1TALK7 zjh5?0RBD)1?NZYlmL|NIA*eCVAw7#ddj60QEKb%n=qgN_xK=HoT{3Ph2`JOl-UlZ2 zEwTI{D&1hfX51w4ZY$UU=9*?n&y~FP z=NhK<{i!nSrZl)veCoV(!{x|Y=pf^y6e$+>fkxbAA_8(nGVq#^ zOC71Su(;%lD4$2&qiUi63eHpyPO&#Pq%;Z$a0w|Q#%g6fH|jp~Vb{0yI#CW2 zoftZ2@1H?RPT<$fFQ~9(o?xlK-i;pAVZ(q*2>U1lF*Rm~{VkQt~j)`Bc zZ1{}Hu~aW^1CShvUvF!$ylTR<*^F2Wu{g51^f@?B<#GACfk7rb_jXwg7x|3s9Z80Epczb zw2vV&_p07{JpGb_8}FTzBQn~s5hche%FMf^$^@Y3r9}cPI&rsS0Ihy2iB*-*`xgU1 z=7mR^+Qsb&+h)H4&}BeODpZTYp#z3gn}TiTxI)OWmAj8W`K>rKa)q?LJWEN0*!V^R zxIT)S$X^jG)F#)$2aFhaQUx@@hRR6WGRIZMD2l(Sm4rvsKQF`4(Nj1MW=5gK3-2u~ zKJlXWZm+IR(jsa=^CZt<0-Tv3mJBF)Qpc97F@gHuS8cv|I7vk*e<2r5>!gB}CR{fx z;|;h~$X!&r50KoNX03QHOgXGFKn>qVS5dWi_i=9aBR}|_+|S?Q>2Z3W|LX2v$A8lt z;J_@e0xaBig#3qOL}IzR^21_cYIe8OyxLs)RFKf8{!s95Q^G-ZY||D~@V55IYEEJ? zYW`_&vE`L%c}$N$Uz<@Iaa?s)t2>Jq0Ev-6E?>ZDQr4$lmT%&lhx;_#h(GviT@ zh|Q80|8UOm0RRH8cq6#LV+!;v?cY|gYipHMIC=+3pDt932IBZ}3bFGK9}GHL>6VxU zd0KRed4MpltV8dAZ%Pl9g6P@28J%w_Cgi2^3~{9t6dUN^SeCPDQCEe)eVc?wJJTB=sg6B&xOvk z1l($StA$4oP4>&{TT&6!Khz;ghhw^tDuQG(QUEkfQm}Jkcym=RBwOmNIvU%Ms__&ff3r;|N##o8U6iDavQ66Wd^gC?hqB-RG3a|Rk9k{}4O_%I_v%hZ zw{6-{vWij!#UoIf{-z|dfE-@sY5w2`Kk+fVQfYLoNYW*iL57gLgelac-#+<$@9Ft4 zZ~JpXUoJ&|b7@gSY7aq)48!z|XR)E#jWNY&FCxHQ(HwpB4|m0U#kdOcGP7uH<_yLb5bg?qnU5IjPFty<^&9PJe4Rwb-Nc9OSOD&im_`RB=(6=seO{JFJ(p-B4PHyP^D5$vh-|L zDiWxQpM2wAoTrL%%~l*B?4H5XpM3EhFJuz~N%;f+2CPpL8}Y0yp~Q;`DRaNr`Y*U^ zVs4)xZR*LJSr8FD2ir{@SkYkn)O!LY3HqxWGDE^W)= zQKkiO;Nf*S5K5=a407I}lpP5r*LX%~{^wMJAs_`{=AHep00BuHl-B{1(fAQ)646ep}ot7te z_2Oditb%l#l<3U?s)v&Zy02{)gzqW^@)GV|r)o58vm7xWy4V>Ue`Ynm_}wqQ_|6x> zUUwhgGJ`!Wr*p$@Vpc<^d;qGTr)r2kF^^6z4ytD8MD$dJ;xmN@WdIfZxr zX=QR+cF|Y8QFc_j_Z$7;ozX#7jgYbd%@Kd&hZEZ@-}}x%fvv^6W#i7YL zX$7Y)IjpI`iKYN1lu>;rirIa!t!#9p&N>GrKzO(ZYZzVX89# z8!0M?YLoy9C3gTevfsi@N#@9aJ{cMEr-nSzqAUx}Pvc3I%C*i=Rs0B@vhqv51SrpY zRc=LqGw*ez&-h2RkxI5!3upS~F)+5A;UUG1KG8P}hcMlT>Uo;t_sN4tezWQh#xPNE z{pUaZ^lwZ>3LgY}_LE>=&M8jtYE*iXpsAV6B&ZE}`RQ+J>?;2|gC zyS-h%4BQp{#Z82Dfm(v`)%7E-KJaEGaL{^|uoA0F8FnMSESJeTe0JRsdKvMg_y;XI zJ5Il-H+k_qKyjTaeUpqP?$cG2josUj28U#^ZRm#O-=$O;2a&k zzpP`YZ%II!xV;foNK!!F9|=cRRG?Tw$2Rz%zjny!(~RG-6{YZkZEq2jKC2!_RU>S@ z3G>j3;lSA4p((!I!fR?ORz9*phc0iHfZcft4fGIIcV5^U5rh^-Ff{#9IR7w^jhT)$vj=K0u4K|62_kdpXIGN6P&l)ot zlITM!6g_=aW*Nc1#IK>i#$hM?xMSwRryX4scJ9*>?HkFR-sEu3I6nXpm~U-?iXtjk z5$h!Yx4DyL!kina%-XMW#cdcdh$crvOGxzC_An z%5TZNn(D2G0HTT;XI{srM>MdkU_YpaAOfl5l`zd1R36Q7N?{i<$Ahx4Klp<`h{Fa} zg*5czdXp)2qiKyZI9&I{?G59btxJg*?# zzV512uhR^qEdRTyq?S75<#(iB3<7En68l*2)kfuY0*yxHwEt2 zTVc>9h~esdsI>7$oq^)@a^=oS*|G)?Gj9gd)z;(iO}0*)>2Zo!LzJLf)D1Xj^9s2g ze^Q!+AYuIJ?&BZ-v6s5S5bRoFw6K&lbQhD-CxF|k;yy1IEI>(JQ-z6DFc;GzO!~8G z%C2tq2R(m(jiOicdN)gWf&p8jzxaWZtdraiCc@J;Q5>6WJ(z0FN$oP!TcYFwpwU?t zSO~f~=g?3?DVngt+nWw2$OA>rq`scU4OUvLG;l6CmAs}+()@H~OyG@Fxd^ir@|Mw7gY9Pa(( ziLfhHpU0H}zgp6a!$TUG!5MSFZRMhyP3)z$gnYF{;l&A_$U)^i!k^%E(i&t044?v{ zkMWqb-QG6+Vs>}Tt@xy^QahMp>0tsmy8H`L^vTTT=22x+;<)it>@KxBs373IIm?g& z+u}8Eqw`B4!piW-mzJh!GqGtsF_`n^aNFCJ4q|@#)#pDC&Z2jX%3$J0JpL0#xci8d zZQn0RuKSQt`2;1Vgfc@;5vv82U`E}?PQ+?#S=ac?HPPmZ*u;bYxEQMwq(7*n4ujz5 zDhh$dQbG*J3TbfHK)dm5$V~mXkBZoG8=q=&-S1e;6R+GUOiED`?oftTK7IY^mwtA? zGbpC+GM>&f;ccJ)m%FcnArOF8f(s ze6@d3L{vw(yqidcq7N%9CP^h@3Qj3b0)7zo)vWcENk&p$<&Q-mZcrQG1!7`mPK&lR z&Qa>@HO{@L%r!OSjQc>P^Wp>V0x&avn}4RgC6ya-^vX$|ouPaD?e$?gD*GxNEMKL+ zQ(Xra_yz+#)=VPtFYf*~vj7ymxNa?|3%p412tl+`9^HLx%4$y7(R5#7;b5Er+!z0i z;K(QjVWL*;B*Lc%oC4=l*Vj&nyujS66`a{L~SBA;Bpz|<_{3EeZw*yBg!HgT41?cl7`0)3oEcc2KUMCUG7o zt*)yaB7vR#$$PIIKdmqm9c*v%YNDraZhDgxN!+O2UK~F~R+fb1F4{y1dsTzUw5m>p zs;0%L1ylu2tHK${XU$sJ#y+_k*J%0-Y=R3|%O0gDsAG~+2{Bx%RAe`{G1x5wwB_=^UB#{Fyvl3}#>3Ptf}qTmRn;59Dx?payAc7E)qd zsW2mLkJWKu8(D_Vs^Q@BRnu9qt{NLY9v-TBp2@9ops18s3Q?%PyZg(a1OtVO5LYjP zxws{FsN51wrjg$ql)(;sz3WSa7bcKUX`4*Gwbky}v7WV{S+!fCx1_>s>8tw4EVrz0Q_b)!EpJ-Qr-;XxVFgOH4(!yI*$*q`_JD36Ze)E+cj} zH#hxVmGY^bxf%Dh^GNF`8vsTtF*@*ASF(;*-GY)TaC?w(5MX}aad?2n|y*$;qURa^=?x5rD9(3HiqSK z5~c#F5Ppt*UGczp8&(aDO`$xv43E`7Fh5G*FkV8cMEbW(kY4dVjc0LCcSi}tYNc+1 z5h>0uonr9ZNo#t6eZrSjHf|uFiJbR?e%ky}2)G^s06EAd2v?pK+VoprLYx{GnH6)W7j z*2P&+Qi4Q^XF<$r|1oj{H?l}ZrEBPnI}gR+FgZS`Xo~Miu3+HAMq9LZZVdhI`(mF6 zrg4_!6w)G-Ts_cZ88oB8gtNZgi~Nvku`+`P_?H-Os=IA22Rb6yl%^6dy-o%PB_@qhYc{(N|C*bH_PQ!+cbA}DNho#S-l+wco;S+@_qb9ekT#k6G{T`A!FN!7*wUV zO^S^!BQ6s~ceQAPz+|y)VkyH?c+;U^A8ROug(^$${!Pds>Jsy^s4WX5^V@o(SMq0$nA7NX@HimkA2_bdNmA zP(K%=kiV!3BD*e}P31;-H#ET$9x5FTWJBv^VWGC(I!FfXA=KA*fA87ImWP%@azc>DVm5~)lJF03eKmX2S*l0#%;Qmj~^_W zTZq${A!gY8xzQ4PCupYFHA&M$0qITr%Zt$zquMi^R1RVvy>q_rlRiU%;xjh*ke`53 zR>7Zzx!U{05xwRpy*#S&8UNNKlz$24mnKy21-1_gX8W_CII7&lLGszSDAxY&*I~SI02QdUDzJseX2sI z5hv^yRG>^RZ6v(15C!7dIv%GLoNy1k)ic^}%Ix&>1$WM22Bz1{A1qgv){FF6i*tuB zh`bzR8U}T6RJv~>A4y7+$I5F6Os@GnCLIp-A$`ChICXCnUju|0XhIQsD?< z=9a3*Fw!faW_5ElXon%iU9OJ#yV1Tt>hV6le^Y11@bLpp&^SvS++_aO{O>P5{jK@$ zzWXyaTD%Iq%ay-MzQHy48-Kpf#idIJ)=^NNX|aDENAikZl6lW5cWD1iwb?Fk)q^fX znFvV~Z}%lFXxV7&>h2@`F~0DpX(4i)&nK8XJ|GwsFQ;JWyqx08y;VXXX5&8axfAx~ zr%`iYg+dVFs?>o@_7(m^pj7DEgdz}F!_UMKzohqk5%DdjJ-RLax)NQrfQw{x;hO$f zMKiGl)v6Sc57rzYcN&VA_|vO&ZoqVcAVd&)Ly8$!EqB3|ixt6zpuU7py zDiKS(90nfmQzP!{jJPFElR?B=x1SJxNvH2a(p=_LO)7$kT>zC}Kt~zKa8T-jh`m&g zf=&#iRAT{qEnW}FykhborPX*B_aV&}VvbdBfdj;`Fo`zX04~y7X7g8Ie@WtHpAv3b zVafq@P<}DZaZ265&!!~X#S&Jv;I1^lK@F*4hEANy*A}V`SOciNySY6>xe|2Z{j*0H-kr}^B$^P}- zUk6LH(LxxbrLzAy*@O6sU^cBph@*+?h~;|f)i69tE!6Xl2?czMl@Xr?B3x8PfBIp1 znlTiHN$ZAY*7EV=(it59Qo#U0T~ttGk+hqKB%QHPe2{{!W`>k3?%v8uEMv^|ivK38 zHT4ED0kA$GT~XUq?a@ma{6Kt#R8CEPPmtv)^zJ_Xcv^((AyKgmonp6V=oCT?vttK6 zUM`47;f};Bj36k2Rys`!eXm>3ojlu~@0wmd%3sIY*EeXlc zAg)L?J}dNTvJaA`SiRl%G-xPsIVB3RJsZ2N4Zzg@4fA?jw0@_-R%;J8+{@g^wC!N%zw5eU#olUFLz61wXN-WJ7jPPMGQtk^^R!U9|=DAumrU%Cq~$b) z2%QC}+7T|RmSyZ?w@y8r5P^UeN2&y8km10mbQGJRf*ItXyl7|lwdLeGCTg4$S*^_# z-vVmHj!)!cc(vX4Cu35`{<=n`hU>Fc6m4!0^^x%BjzUqN@|Ng4LNHGGHZ@4dZtN=d1cH19_HbB{o4z+_+ zcWW%ejL=H1rHw$@d1jG*`sH_hUhCThWw?u9uYgYMf0h%kBe^#(T5HsbQ+#Qmgdd2* zUP@6YYcjfWg1u2&-}K*&lLC@nlm!$Sfd4r+8JHF}1SJYCH4BxMrHU1|`eQowm`+k1 z#gHJ_>(9zat)}|wtJTy&G{{bhEf2C$1n~pMhLsng06VBTmgiwny4xbj)KbT<_AuTh z=SoPHClu{WG4?cJy<4TBm?mnzt$r5x*oGkq;rjW1z57qz7k(~L82|igdUjfDz^9Lb z&ES#0tg#q8b6+xop0wmEEQihJZq_RLMX8z4Na@=5>umZSnT8;b-77f_%Zk5zH_$6Q zMEb_7MG)bY1W|=FUJRi*SohP^g$!V?(YMr@ob046NRJmc$a_wOwKoRIRif^t)6A}d z{b`3vJjV-ym^kbHxi#r6Mcte7ED<`Wlz7R?EHzp)3)?(|2(=p;0r-Tz^Z6#~X9=F0 zJ|i_Rg^1Q%cORcUdlssYQp}(e3Aw9+R-}pwYqx6oI$8^P1V71Mvxvd$d@EpqU)~%O zl6=Q`7&^dmQZMM%UP1W4T8Nj%igY{P2SKQHYE8h371V!6oNkl~@vtO%uW=aCnY^W{ zxn}gJRVuC}!M$cT$@ZRSp|laZC9Wc^hbcF5dXn|ZKivX?VU3e0(z&o33RDRdmH^e zP0`#}W5rn0(+5lOOfp?9iT!PJ~QLaPa^8JV6uW{ghom8 zA+6$lVaQK4pi5ju1x!+>-+kN}?#EvQu72VlUe(+T0GA2zv|^L)VL+GrLEkyY39;{{ ztq9z#2?FA{wtnAM1E{i0h&wsHy~r$rGRyY`)#X#K?_1595>BB?5!tC~7v*_pWbuMulg4fMWbt zsr_l*qqmpw2r|;KGeio2lVVf$&@rylXok7&g?GkUHP@p!*wi2pbuB&kbo;QB#FarF z%8C~rga8=OYSYX;AbS9D{fkzXUW2O~Ye{e+)QEgj2$G4TyN|#7Z8Hr1ZSkn7C`7`< zn;$-YkPQa#4uys(Zdq66z5}x47%*^xQukf#t!1DTwqU!sCOJ+z{eC_<6OqNy`g zom_DfxCNViuf5-Ou@cWdFQ&kaS{OR&HX7xKcia|?fs`*)b7x!MgUd3(Q7xs-!a#x4 zhmJGy>o>Pn`f*}6@{MEVz375olMK@fnD)Y`?jGjBaTARyasAX_@d_9Qfz2KmA|+(B4iNFSDc0@0bAI z{?2Zv6JOjK)l`d+vZ*_OB62cb+$s+Nb90lS)<;hsnJO(ZBfm)vjY`IV1(#cRuKE?T z5#8eXaIEDTly2NzMp&5!6q{5Nl~p2yi-dhCdyOLAv+~{^WomvAr~#}iQ|P1yktj9v zUltcqb?o1wOKp$|N`SZDl#r=%pO~Q;NeU?ripoVNV@foWzejiv9OyeyAR@x#G2Evdy&4i z`mSU_9jTW1^zdYu4dro>w}J+%KQimlA*r;nTE+}(y@`BK{jd6i?sjm&PWP)gnPY}P z73oN(2mBF&EcHOk_hb6RaysPPO$5xIqrwjpF5W>g0?^Sl*4o)#u>`@4Ls^KtvCJ`( zD&{XPe?LLq9VcL9@##Wr0>oRa3GZdul2ry=Q}y)<4t58fL zmZJ65_?i{R#+RO@zeE3c)~>4)M{+;t*_U8SB0{LR`PsZT!=$=^H%dpwF3`S6t7>D@ z5=|CnRkBzx>e?1>r2#6Hgr%cKN0^0{FpS5N6dTc_BKL&Mr4NIz%=HQBCMU$k$JLPl z34(+ZZ^RqdkkW;+8iP0W8LCtZVv;m4OTz2-y`7=YHi zaVA(JK&6T_x#Ym;sy@d#xRvw-D<)&Von2hV)&;hCfj$?7gKc0O~g$Pr7hi z9JLPM2`{WQ%dKgl)gg`8GVpVsQ>fjf2qLKmS-cx(am^IH#zz_ZlrM3d$cPx<_j=RL zAi|qTlf=DnxQmOtk1NG@j0w}QIz!)=9+g5>s)(wiBI|>IZGpI8y{KF&eH+Tw$R4FX zdwEjA_5|fXs2r74=lxLW5|RGH-~Oq|`f8lr^$(_bo#`P~$1Sw3ekBy7(4Waa8&Op`m?AY%R2$@O;qBb_)!RQ z;I3oBd|5lIj4oaWd>`_UE?7Z-RR5${_yA!+-@{BPMUexSyd zSbgjf5w*VQA38TVs{_R_Yqwk{tU*9Z=Sv4pd~Z@JHAMYh_9qJ{Qn2+h@X0WFPa*fT z$H5)vfygt_Cad+n*VU+Q6PhkSH)Y0B2Q)W^Lz(*}k6L4>vIOhZAdYu?b6{zGF1MgW z?$H6Ng;1%zg)QSo0DsXup`fSMmHLwy_of(nI9Ea}S05k)TPdP}0d>_uUIl`P85~C? zrguPPCboPun>AeYPK5`)fxA2X-CnoYUF&Cn*)kqShUtQQH?x=X+^-0FC7T@qG`2{^ z{}|qi(fun8Yiy>cY1hNG-ulWv@9DNATT@kWH7v0tI*--vaF~IN+^I@|JR-Lz_#R+p z#tJ*QBPEmZ@(Q2d!>0C5CJ5yB@x?x7fEi_7nO{?IZCpaJq(jgO^O9WIUw6eKu)JD& z47Cl`adkUt`3q^yDpIqj^hLNy$_q$CKQPImAk{m{iiFe%jyxLf$Lf{Jh~*NLd0$K> zAQ~J(sSFob5v|RGf_++x+x$MM&&k9-K=WhdLsp}e%gb#}6H>zP?7FnZr1~tAo{t|r zvk>8FxZ7#%a)BA2le{I%w*1iBT{ej@OBE$z8U7s#MX?j0G;ws>q`1Z|iChvLhy+5YrCW*O(PN>8wCTK%n` z`r-@Qi>528Yp2^?_1zI-mQ4I%u>-JJ&eXLlSOOs|I7}iI=xFoxr(XnD+dFH)!!U*J>zG>SMYX9n%ZE zHaOy9(j&!>Hs+>bw$e5_QIEz& zEOpMBFo<=jm#MN{@e&>>gQbvKpFm}pyzt_XhJI$EVtabFix!=$E1wm)Nn`k3Uui5+ z3ZrBEq=L>TzJ(x~7Bhl)C*{|@r&OKCIV(F zE5X!|hl8B)Gu3FZ8HoOjL_*0y5NYN$fdYnzp@P?#+2!GW@d@XBeMKnf(Pin*brG~ztMhKaBTG2={S5${GVE>P zUWkLN%|IS;e~ zDI4qzzqmI-*dI>g5^)~th69tECCRZOg~&;W_k)m}!nC+RJwS6M4!jd{q)P<^RmIFjM+&s^vFj(v3z;>(1m0e z4+SetaW6-$`nmY@a7+X3pL9hQZ69@N#3mL$~RT|iM@oQrW zSbU;X!k1{+Hi@??S5_5yuK6jbJDruCrICY1ySk-f-aLa}#n|UKePW$lrc$vCdB{i5 zRI?I-hxdSlwgXYO!GJ^7L-VxrALeLg~g}E{5+b4 z8jI|uBj{;MPNz-!V4=WbRmC|2!<)!L*UB8Rz&);~s^q|cDQpljNn{tdK=&Del z>86Z&Z|GG;bK>vsbGwTNy^Z55Yb03|AQTl~K8aC!MX-07oZTmb!pIF1OXU+zWGV{8 zn1@&gx%6S7^73s7Dr+>3qkfMcKl`szQhxuN|9loQnHXtg;88Fsf*~nG!BELYWG4S>5C&dy)}m7G<(8c+(>BfCZ6pPD;qR$+Q_r%v2$l z^tuYNV}+%^#9U2EKFksO(@ADzA1pmE0ai4GsnklUt=m%*?6pL_@A#7k3#Qa$A3H+Y z%^$~%Q92?HP|4x6AXKeVqHkY+`tw*gmnr@56;w#8XNKB70R?jPZ9`Tu>Xk8wH|=1f zmeW|pIRvj9R|*Y73;Uv)nmj5sY8^5_-yB-rZG#KF1{!Hx&p0E z+XF`ogN8otxT{5+s9LmSD>3sCVK7!O8f$G0w=5Z{36ss>)5JSJ1YUemHvN)G-tu!z zx&G#QQmCVVw7*YN*H&Xuco7LT{(u`JIT>{2ir2h!pK5XBfeB3@5dX_7m&j&FX9-@u zU}y6uPYIVdMmmVtX2~jpS_E7=nfM*sjCQ9mvfKh32GzG!&<{|AtBf}Nadsy_s-6>D zG)dGx*HjoNdZ+My>~~xJY~mQ?Y7>@CZAp8s-?Qz9-NdUh^3dC4@V(Z}F4bWM+9<#N6o2W3FY<5c?J?O!6uSd~gCN+6RC+iw6m3 z`fXi=I(~&^Nnag0vHu;TAVjpgBHpsKX`L2)UED*j{OaZ|$zU@D@RD zqOpzmZuQjrW+D>pECtnKlOA)j6O4U31HbyAVD$A zo`vJn;Zo=!r~7P1Hne&MRca#{dPUDl@#oL$rga{iknIR&Oaz_HZBKF#s-5|V!}iK z1Bjoq?@1buA3yRtLBo*M2`a3Pu1^`l7&}$ll;+0zX)|+$&BuDU^pny_rgMEuiAEf8 zf3Ujyau834-N*%&?D;CJr0!P!z$(SUnFCb=8Lt$}c@Ev<4i%^b!YDhZXad z5Yo=N4}F;!Sxv%0g2bP$PE+?ieNaNT^ad;gs+8s{79=B>-t9Dw@dtkxy8{&Yvq^rRqV>blJPcJ?&X>%_lnhmH4TLZ7SRz1z(T@sH zt;He)c1jW^V9&?YIis8P zsKD`5zglLN-Pd{|d@^!@K+l&GKvS9I9EPOdP`t6&_QG`Vn3K5hC`8Qh_^QTDcP8VK zmSspwkyofhO#HlU2ZZe$W+PKuV6nX7L6Pvwm^{7)GwqM&D7yXm^svp5jT^Z_e67T_ zsMm5S1tNpvD%024MAEj$sAfgq;*=4|n~4c}^z4BTgKB+6$4HM@4}s`V^vKK(Dwc|32gpNeRW_oR-~DivwXMq~?W;?6g_^EWzW(_)a+f@fH~}hsOlkH0|!YkGt6PYn-0Xd;g?wv8ruhMAUBS?qxjJKfN<3yg95$^Edn+&DlzbKO6XxW(g zJl^eySg#4Odro18uN1f*ow`{F2=`O?@q!DTos@Re`T z@=D1=?cvhQ4VrAFf~E`xwCiv|U;HHgBr0+4PBMYkAPPet7mmdPX6JN=c-&A5pa8vbP*3jq+?XPZ(C zqfmM)8vR|TO`#c<@v44bJ4Kefughs ze=>~>qb+s7GMQ9p?FCU+i^k1#>SUUk4czU+V(Bo$`i44_lHXP@OjcWheXm9Y(W-B) zmcF@JKxUaDLT}A=v${My3Yrl6*$&QsqJ`gWHc)I1! z@iSt176&b)$I?f%dY#O5Oh$#&EFgB--Rg&YfdT^CfXt7Z0m5bRk@tuACAl&=PlhJZ zhONZd>sTzrS2`pXOWaE*dv5)MvJsb5UHJ|P8cPsbmRE3R{KV&^h4V+W987-YgPFCJ z092}+VG#HLX7vMBAa4%+IXKQ_)iz!@($ht5_f6~XPdO;`t#5r1grt`T3eW^$x*lE- zOz!pf2n(pR%pamvzBtEOF*McOOr(d;9;EXjFj5L*4d*Mj0^hY2YI$c9BK3tc7+O@^ zN$@n|BJ~*;gvH>6(ql|Hrdw~HR^Y^84@g{yEOCaYn`SP=TRVgW2&@H3NQ=^tG#IAY zHt|hd1UjP|sEZ*I55X5x*eHHff5fvIPLVT5ki;*!oH(dKLq1H)IO_)w9zHO!m9kcW zYYKPH!cwxocaM`d8Q^H#m};q#q5_!irM*yY!+To66{wbD=jC9-k7H0IX4$w{o2WgB za`cU#u{uR=q!<~~fW?N)Y^HXZmGM>e^M;4GwLxtnL&Fjl6l_PqQp*7)7{j=L^a2wE`fw4zYBtYdJ2Ud@VcKIRrJmOQI_3&;dZ^JfpQGLDNY z_SH*kU34Vf+MsbtEY-BvB8n!ceC9Z83WC=N+OYNHG5e58DksV;=i?k21z6$Pq7M)O zMIS{w;?%vN6&Sag9EF+l#TTaWK51n{j}I4$vr{h*S%*lVTGhW&S94CrcE!uBPA^uw zhgVkhsM=`pf;hFEVL`C;#<#=6Qg<~xATCL4v0g2{eU-W4&wq9I58mwPzLTbR!_Mbt zzg`f>)vv;DN(vko_pcK4Go}VET#!Zhc%4hxYu_d z|M0iW6hrbFa?8MCJ5|$H3>Jk6|XR+;Ud0FXYobx@y?0HGQB-`dvRgG``NEoR_T)5%!|$1c7nct zij1Xf**zkU?iG`Y0I}uAE-o$lj~FO(aPO5)sS03NmqIutPp7a3;1k)V!SJ(@-)Km` zfoO$)SnY57e6b>1Tij}hGa)>ar+@_Vrrnod1!?Hd7Oxa)Qo;F*M|XP57TmKt-C|vZ z$2X+q=!c(FqB6r__BE{Uc+t|srQn5S@aU%1`7P8a-VeQzFAH6lhi~H%)3Xo3Yt*x$ zHR2)E-`psqWQu5B1+sb$)d_lziIm(fv$MvPDzqMtoha{=VW1Tv82+s4r8SAf3{D|{ z#|jd*G$#>o$nd#NyIjuecdFJX&t;Sts@;5stvk$#Yq+pmQ$-{ z0wP`*pm-mUNqHn*#{lh6`2EGyflhy!!hJZiU-|P_>DmnWETpu@|H{`UA;x-?YH)PgmJiNb7!xVtaY^dW|TxG+7L zvN0-A@U`3AHT_0TG6iAQr5(=#eFO{5*p!hhW& z{9iwPJ!NiEAt|x;yv+Brl476#x;E+z$Q-2wDjx0^U7(%M83>yuc2IAw^^?vbKlHYt<3tG0n0TjM>R)kO0|xIJ|;e> zLgF+85&YseKcoR&x~X^^o^L`tnz9~$v^i0kU1qLL9x=n#(rFW%#BIUo5E}#_M0k6d z!GGixjm?8cz~}rdPGXkkFSa6+U1fTE8)RiA|7s?L3vvu2#=EdkE^abq++4WgGt--= z{B$t+P>k0TO1y>3$}k)*wIVevARdnUPA_9GeSJOt(tyW>*@1ZBcv1*zBEFbhPu}Ld zW>QDGI(Bk9X9NIhm%J{Ubvz!Ci~C!VaAG%Gn>!fUh zB7lsYVJ3{Yb>mXmKyvwHSSNm|6^BARvCYSi(uesVaH)AkSXUHE4Vfs&@1<K z80T0%Qm#JI?)&?4xX|&x`)wb?=3Am}aX`ai2>~cJrY~B00{X*{H7pG~cUYKsMIt5^ zN9y+ck{$q+%W1uGk@zyTC=1$ba54^Sf=IwB4eGRJ1vTn9g9KajtNA-Rj%En}5^bzu z=W9OZg(5np4`ggNu-ZmoTycVm%o(XQT9NTj7N3Ol!nQAoHMr_ZZ4ZK}u$b!51ky{O zUxK)=+_1i zGfKa`kN4n@Z33&0vrq`%D;2;N)-D8Ge3&kRCOv!90)_(!s!2*7giv;@1zQ-`3}H#r zA#7rc9DGAu38HrP8wk?xtOOb4`6Fn|5tn3T9pRf{@l&<*6{_cv%ael6R!#~rZ(1xo zEA?Ik+)*_tB?tvi_Vb1h_V)g>kK9vPol8v)j#3|RTMLsB&H)8wegH^Ccoj1;w)fr7 zKfn9=-7meiz1>nXOsL?E`Q*OH=6)22g%IhoR(OmgNFlpAEx6O8(UjQVoP45}0-5W= z&(aP)dQ}w*p%Y9EoGt#dztQn(Pd74lax(ldS1a2gLX{cZ$8kUGxdW`%-JdH}-iT5)+R0l-4d+~QQWH-~%(T3ni(R|Vx$|m_ENHI<3 zHJj1q9I@*Ua1?TR?yi&gMm5fTkcu4WRccS-E?dq~H0Ps7asB>Ut z;5rutBrL7VL{x|Lc-_L&d|iu;`+%s7KhmAZ%Vq7^;+V8fnQpXsKYTPEqi~!gYEX~bNT#9sCnh*# zNMdV!c@VGb@aob{vd;mcWYVjL1W0k{lzL}$USue9ZV2=S4dh#`Zw$Po3t{C8M`{R( zbV8ie;Ccnn=DG;IR+6IRT?1*s3$LX?#$>~c<0~EvspO05j zU>yIs*5{<^LEkAtXf!EMLm5l-{m}`}uQnYxsPy_3M+;u(CV&@Z*=xPzYs@Q5Ri6wFoBNiPaG zUe7KPi#ckbEDOL*k?6NAmIi^MNmF z_3o=}l}VdFFqE;+{4=Nz<{Jgu9W&p#UWIGwib`?W+K`}A3GJ`$uEaMv>^TA`B7zo3 zNz2tfag`yUz|7eybdhxGP4UCb_VqG;{H2XA8{1ZnGH`Hvi;U$;+tlVS^9QixdmDPWJN<6#xyWsj%Y-+A0&9c?tl74S zn-P+ZUjXH$NfoA(@6DjI?gh!L$~II(rkv&NSbDM;M8okt)BBAdE zXZ6#_5@kO7%AWNmO-;R6RWlZsL8aH+{#fnZM~Gb(A;t1`tR5(SF@DR%VKVMwHV41SsEog~NJp~FgGZ$=hE7hcPvWD5 zXUkQf093i4h(W$cGi_y~7t7!}0#3FbVPb?6Uqpc%{F$q>PMx}qD z(3x|J2De1~RBpG-5zGuzW(+o_CV^0pB|sxHD-{F27QWSekoo#D_$bES=|z4-^OQf{sx^ve~z3Y zezj6yGT9{32tG>?bqcEnp*v^eawA&+4H#d!rLDfdg+^nCvlLYP1Q!!sEYCP$==3*= z9RSBN8>&Q&?mLOmg2*RE$b>A8nc-!x>=nznROGGDTmbLC*zDv^U0t~U_&qP+ka%+cSmm_%A-mG(*hQ6ArhyTC^MQ~ zdSA5dxF|QUKYAMrlclQ;%drxGUNwoeI+x-6{cruw@A!+_QZ@Y%Glw(*Ng4Q|cGQ^x zxe>c@okSM*tc8`tIME^^k#BkR^8C2D>BL;|n-r^eywY4{+|w3+YK& zRL9GzwGYSY5}I5=N-R&EJjq4j(`|$T!gp@UG&M09sV2XZpNJzXOwT1-r7NH z{HMx6eGr(XAOH3NP)<{u{{MI=@iUf=8bKtcj8cnW7|g(@CJ52nQX@nhTh|nmw$4eV zL-VY;j!BJ%I8o!iq~7Phxck*V{!bRc17iE);PNJ_pJRDC2<(#r$OTqCZqAJ#A(b{} zG4Z8cOd8ZuaeoNu5GiWzPtc4fuk87R!H`uyfC}hB{8hECRYD?!(Bi@~UmDB;;h_+A zu>hGfz&!T7#94K}kz6W{YI4pWAt%!Y5c%rUeiBKMh5@i_%+Oe}pvuJ3Sx}8&8Q& zEO2;Q>wK66mknr9;HC#*YshBuH|*VT8czY;lm#CWTVTbK>bz85<{T%I2jvF&ap*OL zRpJ_8%zL%~w>qoqzWtMUBMIFyGlJ%=Y9s=g;_oXPonHJmB@Bs$+Ih9Py4eW^TYniG z3B+V-m^+PR(wC;iIa{Z6rw)3!H^vqJFpC0Kcwb!3=St|9H|Pp=LRzQ>V7V;B7!LCW zqho>uI;gTiLI6-u^`}0QeIg8YnYct(w5KD*7`GQ@M289J62Czmsx=iARu&Zak=%Ms zA&-kd=MujJIl>HaKS0h8)qcgNpnMCCEQzaq0zKAzn>%Z}J^Fub?(W3ixS8X;6)LsQ zA6Ugjsf}*-eZzScPmoqA*%ep`AVq!38BocUw(!fj1_MIJmqG*Omyn)jfTy&94;m~! zdFYc<@_><;H73R6UUM&^RuvW|lEBBu)0)Cu?W|;yt|M37uie2=G`;Z(QJj9ZniNwx z=x4XwH2u!vE2xN5waD{Vwvd!8C$_R~@vL&{IWzNZH2p4UC$%0Wafx;+F@k5AW(Gk3 zgmxl1Jp8G`3+F%fkCA}V2;602!KF&5o4Nf4Ogvayag{>bFi$2FeVkOHFu zHfinK8WGGwaPlldxROxQZ+3|N|NOJpKN!IfEYVE$6>=cxtjr1_0Z(NZaEJY6D%g++ z^~(gCQ@?ZE{g?famtU2RdO~gz%q|ybXH-ks*C&S`aB?yQyGnOQ!eS-Ix-mK)6)m># zWFaIrR$fuY%%VXU`aC#9%v7i0Hv#+*1Hs#tZ)YMb0}qOCVJkP?9g?Ci^ZqzxrFuu< z>Q8Gjh~kdhGW-c&fu8x_u=3g%9f)Ug!oez0D83>&C=y{saD%Rbd*gl{ofk86fky6e zZirsm0Tk<+g3Ds6Q@=x=1OxKSD*o^m1pKTOb5%_HEWFUYE678<{=;bBm+gXB4PHG` z;wm*JWEb~@>-GSPhb1S zSHVad@Btd9uo-dn|75J@$NtN*mR|6`TX1wc zqhMB`$z+$=P5gr3A?xQiUPqrPTUkj6mt}I`$)noZh%pZFkY>YUi6_cK<>N$YL2+H5`XmP32uc7WxDV1tYC*$u+!bbl`@uJ zmg7>Qklp$*xE{%zw9l_a>fzr8eTm^U(G}Z5#VjS%g2XPJkG)Ck3J4;J?YD!!JyI6Q zwB91ORz@Q4?gQOfA)n<>tdWt+ORyn`v)|F9f(K7#d{(?E1&jB~g`<+bmdrk%Qi70N zwlOv9b11SotwFi}rN7^^$3CnnqLNk3qdh1H$w~+EuHbR59736w{&H^ zfuXWDC!=vDI1qnCsZ3+}?zO|vYIk5_b+$V}(a(Q&_p2bh!h%H@0jv{5F8;bdMXc0B z`sGBSbaU+p4@kbeSGi5#t5t0JLqw-B3N2GG38%%mYes(UhPy(_2jOihuN2V^Pal%% zD|Df#6xaOhv~aUv@O5O8Y8B*O7g!coiiP^@uu>Iq_NtFKYFURt3&0J7!Cv31+9KTJ zKdxw6QINa!<_~fcfGiK`b76~~+EP7J2HY9!1)bs*Tg;ZV^GFX~NtKe|B=myoQXAyc z(aT2uYbDmwhT%frXTH5f%HC1)t}Uf!++KIbG`S&=haB@J33}9IoNAU%lUc|XP~P@F zXuyvLg}%HW2u3ak-bExKlhXIy>}C)MQWxkR!5(<91W%tldghmGjI*<(q}XLK(4S+} zgqZIS%UW?OUU`F*tf_JgHam?blKmYT)zlsyQ z`$(K($ppoA(de!CXpb^5EAnG{9$*}D9zLNBm=GEj)?V%sqv?7j#1l$;tNC^eeI2jm z1Le@A1DRR|l)m89k|U}}YuEEu&w_M*mYyU8?B#aXTLjFJ37-3Y@;;s)4-?72jT>ZR z0^-@F?zZLIORb;=6(}T*W_PL??dD289`=5maX)OfHYA=49<=@Zr+0s2!uLoBL5xtm zM=5?l;CM`E%tERg=+QaF7jTx-M-Ihojh5I+%yPf z3&>xjA@;|=8_$oCL1qL5y&5oQpg=r_E~t$wiL;!jDl1J0jHYR6Y^efF`1G(eV_AHV zWi#WAm!<3uMB?=c10YYzg(cO`c~rxdJkW8rs$HI%+<3$Dj6~s)r5w)FmtU5~`bgRn zd;{Gu6BHK(Wx&0@a)lp;76SF^Gxay{64pjgLo}h zh0q(Au{aLOC^Vs>+`C^$kVmMCXXVV ze%~IEy$W!{csZ@jI?y&GAcrB{_{rZL+(PVPt@S~7!x}yv9)_P)ykas2DU-$|^UZe1 z!sZTGN}=UuZHo#k;>M|5A-9b|U~_Pj%R^Vh`6B|YbY3jxEkPhLH{+aW|2QoL*^pv3 zA0z%gJjRrZAC}o}(NX9Mch87pTQb#YwJou^P+qYP%9r9l-`G9e zo)n*Juk)2J>G24}C4iNJcw1apbSzBGrKaaJ8dN+WGz(l>=Ian~HYz%l=8HuFexa?2cPj7; z!tN+916`PY?LTAC1Jx&5ACF@I66&US|Hn!O>l41O>+mcEfd0iS!&ja`An|zYzYk9-Tolh2Jc5h?sGj zjKs#v`B6fOCu}{2d_X+m{rd90li^~2t<|+=p!e10)>gICnb3Y2m+bnWq-iN4(H!lx zSX3fVOpO()dJnRqnQCQqbHG&c@l!`lY@n3iex>|L>IW9dSp-2oP-Y(lAzDWxKqqGi z!nG+`jYZ26R+$xjk-@i4XD!}M4GP%pqFg#FD3{pf*6wQX2Ap=D+FhzKy?d@E`;D(G zJuJZSmfIA_(N9@;vgq?;I>z*Pu2^S!U7W#JrI??F6(!3dx+rDqhNYiEm4OwBjSX+zKb1rzn=A;jzNco;{9} z!Ym;#;wyPhJ|cs280)EypFDbMYQ6H%Bs$h`tAorStG(fEiUB?TgV8#Ejj~uN?0c!* z&u&psW}CUrF2$;QJ|Ve|O0@u0^WNd{90K9^)>TSc6o@-?0C3&9lnM799j`{ZA! z@4#FW!i6P&S3RgISQjxht${+W(MVA2xTRu*Jgi|}+nMJm}G*XTCgd=n;!zDLrA8Z)XX>fh5#|i{1#_=S}aAj??aFvr(Pl!{$gfe6(1U z41)iZaxJqJZry2-tQ*L9au`4mEsANc;m4pB*`a0BkbhIqPg z%vHMtS=aIuehO2=aU1-z44;}H2kC}2>X9w@6~Dgwr9a;$;uf`qd0K658qaz?V|s!F zL4k{4-S|L9>KRzOQ+Cewr^5X3e{mwD6Q7hexMQG-&D-|he>^Vbgv-Q2gRKaP{HSy@(uF`7D1oWoT__&mOtoU-jQQ!YjlXFcKhRo}jVf)KEIZ zV&b=spI#+03o|{Qh^V+mKE+q1gY%4l18Or@hlt|1&z&u`y@q> zDe-Vj+)frLET5(A!0FO@(NEd!t#*4MX(k653dgya6cf6CqZY~3Y54A}BW-F}dp4)G z4i|le!AWeAX1j476!ydQ@w)MGNe6&0M6U77_$=N?Z+G1ryk@`rITnCL-)c^MFUS+| z{hGDGgYwgsNAumHP!qF!S<+Ww^PnH5j^RBJO?tQvOIVSpKNnPE4$p$-%={XG&$BW= zPC?{YJgIzjbS)Xcg(pw^$LrArVyfI^O=X6$gZRhIliUoF?>OBK6l=vxL z04%{tV=&o!q4OTJS20&?OIe5v%`5KZKP)`W;3pW;yy`-i)(|`jL)y89qo4&(nxAn+ zLko9GUj^OnE7<}xqr)&^_OeoaMc-sEJJCq3V*x*Bz{a$1XCXL~d z<6OS~vnfH?BCGLUK%P~jgVa+KVtyxg2gZ+AaB;Y*KJe?--JB_p{=g3PuOZl0Q3PhS zBO+YIYCdcYoJ{=AFtr}Q5aKmVc=2N|#~(^|Wqn$^30yO@xICDM6)u^{v_pz`F0_VS zF)88!H=I!OoYLK6x+L?w{0y@B6@$lIoSrw<*~c>UXQ>?k7MYt-tt8i?rVk)I5~Uht zcthy`g9f?RH6{*YOM<94;>A0kf{K`rc9RoO@BjF?8X(6~+ZZgP$5%nf`J{OcXX|aO z4?hspEbgxg;HQP1n3sq2i1*xmeEvL+i!}S+*7;k9q{~F%Tu5I{`Y$pfrVHj53p7tA z(7&L_>;N4e)EHb%-Q;xX>7v2+QI{F4E z*tO=OXK^Riat1re0*HZQi3hhOPD91t`2QyD-I^oalJmU(U`5!zJXlcx3P3du1p=r>W6N}eIN>WtDC8^IcTI9;$eFP%tNCmN?M>s=jZnT)Fc2W@g?^ux|+1SdT7t4U&Pi5>%l?b)*J;WK<82 z4{y>qp9O`^Klqr?2{f5Qx(D3|(iE@`whJ@n)3kXAx5kM) zps%br;0$J4xU-jfi($H_r$;7c&h(Lx#P%tRe>*6RBPu=Po9|b+hEb^Y%vZZt(eb0qy+8f z@z^PBP@c5ms1$r?rqg9F24FD^lrfUe|NEz(2bs7g zXwnigSp$Mrn_KpGd)JoOfLm z-ZJb}uGO=}GNE~khFOORVSMIdO%1lIm!NlZ?ehh@Z{ZF%Vmr_GO_K%AD}BM?N3M4p z9#vJ|<>V6BaY=>D`kQ4#5egvf1#GK7?)5<{h>N4%Lwg71fiAf>t@OAeiDvkX3u$P< z2~4Y%;VoW7v5ETA{kbp|NMy%c<;c)QCKzV<^`4IeGXt2E8EEsa_8G%+fNLt^ZH4Pc zyuh3_>$p?nKIkk(`o-w6#baR*{2i8#byE!$eVr zgb@}#Qsvtk&zO3Qf@JcSm_OFC%#@w%twjf6@4Y*GpE7YKKK$r|i7KGb*8UY3RpbXq zq@X6L>{izthrGJkS@Zfe_L$YCq)^8P`_0&_z>KTEt5WJ%8&xIPe(~H1v95S0j!v8I z6^Fa@ymW0<@0;rrb0V@DUtOZI=EyiwSA|Zdr(W``rn#DeTf-0Ilvnf#O+|745mas& zDDuWw&oL8vFOD6i(h7!cU<+5PM0+UPA>1=Jbd7vk1_00Ck2pj16HU8mP{$aN^u5IQ ztg|Jt{pmr;(U~h|4Yq`PFdb)JsM(Lxoa6!Y+aQD>GTmwXJtQU702Ud;BWy7`399Ky z9ySZ6)+N3aZ3Io}WzcSfxeESva&SOzBL^);U=TSdS=#;@p}bSghViQLDTbMGSbgn3 z05z$d&WBtINaDYDs60%&2s0OV%s&$xr3c$|KRA$iX(i&!S#ZC6G2ZI#0~jwM##J;{ zHcCV*sXe0pt7DJG1{6yr$kr?{R6ZJP<+oEsmyAz1n2Ms@#}#VFpPlLR^x)Co$syqT^IHV6x;y?BJr78O6yh|%ugtH;mCJ+9KD%a2yGwp4PEYOouNCROc56L8@~xqlxcL2*E&l7I0qrK!0^ zu`t{?u5dd7-x8{K(cqDzIZ=zBykb-AK zM;H|ptNow#ovmK13{+d%f=8o*cWWqbX&LM)<@Im5n#FF&ssU5m`i15r}wyg?vI`r{|9rz3y`o%q0)Gb+A#J64QTc zm?U`i zX&~=*Pb26s0OD?OG~dbxEVokGqsoDOirwX|#zPU4ba{)FJ%6?kglY3xEkI;c7z_u+S75>1MvZv8-PQDYlS>3kD!J*;zf21;@t zhqxQiA#`6|WC2wl7hRz0JleDPg?I~9cO0EDB(13R(Fa7yWo3RU9DoP-0XvMW$QWIB z^L1$&2!;b!qA0!B+c}|(NuY(+WK(P1lSK9KqBN1A1HirthUSE~A}i*9hJ_qWGQOZd zWa4Yy4Ku{tnod{if0?j#(c7;yZARQ48|=Y0*zjhH83Z|5RxPFhu9X)*dH&c+5&99; zR#eX`MauCNe+E#}#i4X#n*S?1Wm+~Flj+UBcxZm``49i`5C8xCpI6ZtUtup7rhKs& zSLx*LFvIFhl|K8g9$rqv&+~c4^6kU}C3TNb@Edm3!=Okg?ghcmAS}p$QAxOWz<89T z-t^*}wsmOBSYR7t8{ADGkeFpF;Y8Nh4U@Bs&-u_?H{PVOev)yqyT^1Z@$~}-o>n{l z1br7Q=FUb(Hv%um8n9Xo<-w{-t^l(&YiAxe^XZf4@pgo)Fl8nS(Z0!15vD7&7+^$G zK1|8O!_kKdIquU06>Bn{-rMT?akNsO<1QGQ5iap|+CfVa#ol)YulqhT^e98e=VFu1 zi?q?j;z3#ZW!HDa^NP(EN#|*4u&o<0@2^lfM-DJzcyCJ_>1*V}#pH6Lv{_JDHs`7j z0h3L+nxe*WGfoljSxmO_rpTP$%7z-|_`Y^$){w7TSMYQVwNFJtfg2N2f+r|I0z~7_ zQ^lr~VF_K3a-m03oW{cACngYWBxrbK62s)*?o~<;1e*1vUCYDpw&T1mRYmCYq6cW< zN8B&mB&Tj*yRomt^g=Kxwnio^&Ku7sclmm6ueUu2D&MlF%n!?4Bzhh@!di@vq2@OG zvA9{VyRb5nAHT}9J|a%W>VuL5a)=1|CpbM{iNSKfbR3%lFr$zc^97Kbgg4{+vm)tO ztOO>H#T_sVm_#!+?-{WkcIR0R{N-PASRfH1kD|l{l z#P;;sHBs1Ybt^BY$0?ETKoa|XUEn%dmD4-Et20q>X8=;xFlmQVujeYF%w0dabS4vm z(G&0|Zp3guWe(efU5ZpEwn)jKG;DnOdidTqRro0KNU~VscnB{2+5KOcp>U8|H%uSU z*0{t@$T-GdNFx~0*rit{==tVm?ryJ=vX~|9hjGE<-)Rx7M0RKUMc$JH(04K z(Xki-gwIusTN`YHAy8dS?Zh;*3zEButW1kppv51gxS9DsLFJX+o6!8X`zFZcNzsxz z*JBWbbB>3M#TtU0wd5#LY8E%N2sxgsve_SApISMRg`;eP^Dkg*+DQLA7BpCAvUx1| z`BIv~YZw|nBs$Ypt6_V5klGEu0o;*=q@u-!RX%`K(re(#A^avs7L_nCkzV8!cduj! z7)DTHgR(Q3`D|X0x-30+7@rsnOt>1RP0 zboiuXp(W;sWt-@q|IMerFYB3&!VE&LE@#v7=g*&b4Rp(ICVA0Gs>h<8p+yJ^v$3V+ z4(}$ubfZSArt`t%`h1ctZr%?37Zpg)Ij)yvZd2}(=1LjVsz7|o=;LX|FzJjiJv%Lo zxl3mWN*YSIXO+o;;}Xvn;ITFR%)bI-z2FvkZ-jS2A7ug3qaQyGXJFopeQ4BAC6g_ag%6^~s}m!&KYv zNX%tGWR;Lp+=i9jme;Z}HrRK7U@8Oax*D>AoQtLH6X;5~>aYEGX4H4Yck;p$@kn#A z>2B_6d{o(zA~=OYB4bb}qbO0Z8R`0i6+!B2tI})DL%038ah;=kP=yS`DmGJJu7q!G ze{>Ul)j~Fb4k*p#0r{QKw_h40D7CP(;MKG+A+5#=AAxX6<%?ZIf;1kzytDJlmtmDn z8-H?0*H@n*uwvQB{)KGBIq1Ej30F6@Z8(XP?B@;blLpI}OI&(CYXy{YX?JotN@;r~ zd{9x$Ah9mvpot8=SPIjo^)&eFHZ|c-;=u&epwpbKA-9SB`!LD6PtPwL6Ty<^Fo)=; z`5-l>;l<=Op3DuS@Dp;G?pL$Uz%>q`k*U*XrE{PqA{~P!_aBMnXu|}hOth`n{WyM6C+-+t92Qo5O#V!!N8fZd zU-$gb8Qz1Y2Sr4p=mk(f702^hHBJH+IMQ@Ks3mnLV%jH~KAJa7YPLi?wEPD*w^$2k zBiJwAgt@lSAH+{za}j+|6HV`>eR)v{fpt{td&>%`9&T14gV_sWVVs3d%!7|MzmSBkMR{ENsVaF&hy^FINvv4 zkWeAkAFJFXwCKMnLSo_EdONbK0>PsSPknFlW`hEdba8SV4SQV*q46O0XV8W@kxpHA zDTE)jvQzJpsp;rdxWfxm)&I0`5Skg)Jb$OCM8H{r3~{(L$0)gIPU|jJ2j4#plG7-~ zH+2U4%SXr;sd%I6>&XDMC{}C|Zvm9B|ITzv$^1hri7vEcU^~&k)B!{qXM5 zc|nX8d0OHlFKGr#Y9*m*>fl=2QWH&R!;pjCYRwVVr5)wX4sIDnEFjvTz>N)(dg?^W=re7PD}BI{XDA$#PF%x%PY zTGQOv2eM|EBW6_)o7STQ3W21;4+NTMSRLGRTniP%Dz&{k`Z~7u`Hw#R$KXv_0yR?H zX)*jF!!Xs~$1PSjI&vU2FT6KK=ok07r@?Z0bM_d&A@HRi&{u(?VT_Wpgdy|X#(1Ef zyT#Q&lv^4#Q1twG5uRUb&aNj&anMm^MXwu{#T*xKm}D{FLzfjAcr(M`qL}=pA=OLB z)mq8g*=MQ#gbXN%!-V)Oc(8CB9I>X{bI7!+P4jzZYnvjTP7y_#~f^9F@|TB>)h&-o zxDo)%d69a%hzf6RZ8$2pAZih0kX#9(k*Ck=f$jx?cGOc!H`ekG)k}EVqN2~G&gRa) z@`YhX^6L@%GpLV0ubv=TUO}k1lwG&crd8X1I8l>u&L>`fi#w$$esNLBtKOyLz8h!P zk_E>#JBROhMr;Tm6T+7##uMcyf+DsvvL3|r9N-aDDhLzWZNt*@7CLk>QHOk4b z%hNk?lsSTH?T)CFw^CnWoE&IC#CRSTZs1}`AJf-inlbA&7ZhU2jNxm)Lv~0K@cyGv zOTht{*OP>nk#9Y7;Bhv9OGwX#R}0b*;>iNxS)U7TSrK<#klw@0-n_-TiaWfp@YolG z`Fp~+lSkq3g2R<>Rdm$+0rB~}Bmd*;{%l5qIY6or2VQ_7@9MTrS$_o-JQi^=%IF@R z#$~adiH~W_iplmC&5=f$xE$n(uM&I(BzXNa;o66JH})r$YXVURunG|QqYlIADkl3F9Xz) zzrdJ+(aWu6IsxhPyZB)8WGalfM6+-zbr9cY#4^;l`zH^88HFJ5upXY=(DT8d%R670 zx3?(2A7k4whuH`CQuF1Kp%U)>fXxrM}=IpyACudl8$0wsBTmSw$;2owyQm)o~;JEX8}I zp1EW`?p0%7K`6%2foyJeX>qb<1*tsKiU3cNi!rAN9wLZ`_70#$4fZI%cPWfex!1?K ztfm9Bgii(B@~bF|z%VYrkXgIB*oVp!^75+=^t#ll1*^n|xoG3lxOG$rK$Q5?I^rr# zT}_L?TP?;(m-tW`(lSbGZyH*2$6+7`QS4H(WmgZtf6s?I94ouXJNzNO_KrdyI+pk( z`sd$5e{U0#bb}W7dvM8gLYQB;{yyJhtMfEPDkPgFP1eOyz4w z_0ftSAEd*2J*grg*3*Dk8NTHS>CzD~J085DD!K9oNPaCynJROGIB-T!r zTzw8MiY>}4WLA?wVfeh)S@Cn<470g{`s;_Bz?Ebjng;a$FxB2OGN4!qvBQfR7Kc&@w=H(qsa>VYh{GK1EpgKleIy-9Bo&j6rGt(RW zE@ooQ)w)WrPYSIMSPRx}&``N7%*o?#@%v?wkFV}B?l@2@Yy_8=O!nGR8Jm#qle74z zUvNyon&^wK_?P)#>~Z!*N;Xo}fCL?va|Pxmi_y<`m}*5{+c&NQ6NPp> z%yquY1Ol(PzApCGlf}m-p#s3H*U#$W{EQMp_x$-IpUCCG*lPkni>H7M>M+(ay(xq4 zoc;;E0!fdcB7H9al*IZ^CP!WbG`nFyM*Qv9zVol*I0PZpTv+*Q{ya)-zFWf^J~Amb z2M9yFxOj7m(PxOB?lT$w1!2O>T^H(5jFG`fHt5}|^(lH6QkB3b0^x#zIOVc;$%$py zp(kkYFu4V(04Rt6P3A{H8~y#KAH}YZM^1M>#6J(U8(3(XVMIdhfc1yc|$WWeI(?AEDfa?IKqH3zmJgS^+g6AG=3}IjSQqUXDK7%Y9My7%uF3z z@>U*Vy+eaBFe!Z0YuOuKXZnbkYHC1ABN18YMB2L=iSQ}{XfCkHB$K@R z_aEO4Z;L6my0NyNP8D%Gm7bWXw+IzXZqq;D%_?HXmx_;UD4;4S;9abfMvHSGfVS7E zYr{2%M}pwzlWb!8ma!%kk+cQ$rj?!fQ7NDfym^jMw`Nx*qpW4oSjqrbBqtEW$ppGEE`Tcd~}2ujQ?mv>jwoVtSp zW?9Mp4^m=%HW7LK-$p%xI=cTc7xG*G>zBS_uyy=|cTJ^-m%d9Ea%u*q#Z?=g7ZKlW z;2x7qF#t?;GO^`W%6%l)g4-3Zk-h9HW26{xZ))$RXMPe_8JmX-P@6yZ=mSqoH%^lP zm9xJj8c|wStRyp(&(Qhhg2|W@20AE#A+C7q$yOkZ%3#gKelR1eZF?XSJs$~HST**g zY-xpHidrGZDmy$b1sZAmVUY)*cqv!3eCVsM>NjSmS#LjD5+p97Y0PnrKzGa#rXZli_z_okkBD3v6J#@b(EwnRH7_HG3)rKl!%{- zCt-#h+!BDbl>*Yv==Hk`3KnB=Dt*FZjYl$e=@)+FtMP68FukLLjd_%|;b;5I>L|7_ zv6grWsG~w(s#aJ`ybzafc(7BVpm47mCZ&gj)kx&RTcIR}08epmNyGx3CL|HGURr$Q z(-d&ii9qVcVh2pGJ}3cxbk>~pyaEP={ltSh7x-mmfBidjZCoYE@vGbstSVrVi31!% zV5Q2hq$`Lg$5L=L_WH5<=Z@}c8liP9bYYi7EBsnjXcVD5R)V$O##MT^$02NzOfb>= z7z>A9;vxBvh7XQUyhEw8iTE;xXF*F$&H+04L?At!SAaca6I7`l+d~7)4+fl@K#cfe zC~yV`_QoB#2$I$x;IzF zt3Rhxu3Ug1<^9Lsu7kO{x-9XkuA;KZg<1z8kN9HK2*J#)GK|b%tQiBpN-p@GA1f{B z&V~Ff%A*Cl3%L~aX%K0H3b1=P5#&s~<-vW9<#N!tT(Q0enu;GfgV%e#_<<1aC`0t> z_gA>MivXO&#aVc=V0OTI25T_qdSNiD%Pyuua5eGoax@(H5sc_(014$_*jFqCbF8+F znUXB^LAEw*T$$LeHa}-Mk{4`)>f&jHLwZ~OKJ_8$FItq^_u7B)JQB`w$IVIz$$97M zElgZfO^j`w>qs>@ng*@9=jl0dFECz$BvhFEOP`Qt!kG^uO~EGpk=xR-0y(&2y$L-@ zD~SgF-7bLQ8M8V2G1&(OF6^abmx;68%`z-lW%AB+V9smJ)M27u6!7)vgJSmJD!fLT znH5=m2~x_h>1?MN*!xf(^H!Z2VTh769Sjd9<_`7nBYIyiN+0{`ge)RnSR4~AH`Fx5 zQ1Em9^UCmbj(jV>Rt%ze)*JYpW)R9nT8(V1oqh&?RMeoRlg+6?+P1`tcCa^&%Q2Ar z2J%TXy+})9S2)7)4du-ai`aR&TVc7w^R)cCle7tNSB#!nSbXARN0*S46t~9WX$^9A z8bpG%XBNFTJA^5aLKzY#)P>Pl;wc2!&%KZ|)KJ^gZuUP>P=jz1!8E(0~L_rjCXLI5jXrGHFGDyAbwFaBWBL&U#C z@Io||RZ_~~w-7+Rfa(4;-tzp#i})w;5z7g{V#K{JWs=336<$Rexp@2dt|T@ef(qq) zI@H7Y#i&rAx>3El6Q1?+&RMEg5>`X-inlIWOZ2O8#wo6L_oLFglsVfewAXinWIOCb?go?QtPRiikpF*S@ylE za;UUx#Q89x7tw2ddSw=b9BL1U1^228*<$o8*Mw>{^Q5I{P*eYj5 z2{9`3z$<5`K$J2tM0Ns?H+^%GB7genXCX0GH-L!@%=}FL7>&N+7_1WaXhx8!FoMF9 zNo;yXz8yj^o)|zl4qUFtI{W>9^AM+a0Azgo&%W7&Cg^n66q>iP$NNgvU#9_e%prY} zr_&Ph+9NHIZt>FNXF*Lg&U!3bt%x!7j$o$J7^BzE#u)od-wg5yi8n1d{T@XzqjN7> zus38M(k)-Ru=FIp4VoTunz-9Q*uJ4vdTJgG*Tz8cC&p;`X04br-Xb_|Cuc`K5Kt%M z%hn_O)rv@}`CW#1E6XQ|lwQ%x?M$f6);So&rlh_~rBB2z&X_@{_<&41} z*9PCHEnG&kHA5DuD0AvV0JZr7R65}RC8*UZ{q>#Kn=2t+vG>O%3M~K^B=zdBFiN(J zE|19zp)YesUFILO)6||lpfy52Q{gW z4-0vzy0J{(Jz98HD9`>drwn0kHIQ`M2m&uImYBeyX1dO2R6uuMC%?%`*Uq#RzU&M zEX`_KwI$1_tznnkmiQ`$?x^M{Xh0@-O>v2-UiKCT zwAjVN490lzV#$9~Wb%4gQcPUSv3H{a`8x7kANMRG=6u|yGl8jdmu4PK8x|Mxg^Npm zBH0dXme}{>M^9eFzE_k>@(`lI-q*2T<^xYlvjJM^wdiNYDwwf{Wfae(_B$yB0$_K~ zXRBU_f;F19c+_wNDlWeL&HgC!*e&^NMBm!Oe-<(*Au`aEz0?yY^}A>S!%lRPEn!o6 zss~Ihh|L`zQqJ#3;&LAm?^ajm_=Nnh;-asW#udQ(Xq#&4?%{l2y*KyaTBp2u_UwtD zNRrB*!pIs1R92$T7^^Vuk3+|>vttU zs}^-+;?zelSY~;zSI&qM)z;A1`+TGB^GP5E1dlTWL9ZN*B=gHRT7aimh(tXOIzAyF zo`-Ol--jGgI?}!PNF5U1gof6Lk+ZB{dc5?cQF$+kTo(C7whKcFsJJ=q zNR8ZQouT6@_J?@-!x9CVP>eGlB3pX#C`{A6?7Rg#l8EwNs7%8yr3I8M`~cgkJsPGA zQ+8C0z|}EV?sm50q04JNAylJWN}tfI*96b>XB1{aO2iK2v|e1K`Xq}rEJ?wxUJnoR zP`~_SlBKlMo1^&sff@+;&C2<=Cb0e$FKXeQLUctN)1hV>D26H8JD7WYcoVz);+J9v zyag_&)-yi!>{)ibK>$G3SP?RJC0291S)JG=ehyw<-a`)cgIir+U6=FfebaYS1E ztW((A>=BS@z7Qw1hUcg=Kb>>GZtk#{%;H<5BGe|=>wPszKyhcEULDk`Rmj9X6@iD- zjCqpRN6W-=)m{29P3G2Z6a9*c^hr~GRDIxGRd-&ZljophJXi4$;xPa!{^tI#LaB6C_KLvSxftz&T!=pkq7=`> z%OJmA+zPG=jH=2CZ$VAa5m>3vpD!-?kHjmlPBG1Vz-;HCg`Mv_<+>msza`F?8Htft zThi;`b(Xa!yXH8TXeaHSXU)-qkx8_$g}D&-DFTsJal!P1Qfb_p8yvXnvZ8dLPYVT< zI#d_ZgJKb_fSGLGUXWX(q8~jEiV{)JWWfriq~&276<}qLD9(f%V+qohA*JlfO9BL3 z-}ht~gDcPkHTK*(&CSsW9e&Zkfz~&%KV)A?-~Zx z06!+~tS51Xjy9XyvylisqzSN}LhBWTg};3xEUwcbKvA$Gf~z(Loy|BFC5We4C(Dey zGdnLWLydydNK;!OY8WpAqEtarNm%>0GUGSOPE;e1lNGpvRL}U>c9}g?>re?J5aMi} zu9vs5-7iCtK9U@-um$jz8jlmU|{hu)L%mtmZf}b>tGGDHa>Cotp8XgoZ|UH8j3Yd;6LW-X*-?uWNkZ5ENU;kKZpnGx6r=g^2ZtK@_`J zY2S4^2QuDVbj6y7C0Ef#TV0)%M#Wv_9%F=HY-vkI9o@-{9s~P8X^cdutJz5Xdcawa zO1C?I9!|zwnVphv5rT4*w!l%QE)*38Tv{+Nf<^-Zv3^lnvbJU+-lEF3;@G|y_^;~4 zWp;ilFjO4c=l|{gFXG=P4DSh*L2BZFVkSuIkr%wYBsjN*r*x$XdMk2yn3)X47Vdw- z|NijPpM)^^S|KNITjf?o4w3I8@8zd`O(ZVXHO8}zr^v@GH(5;uQX~VUoddSuZ-n}P z{w!z+YXaeTX$I5Y{nzLQ_Oy z{X)}~h$5q?;kAi*1iFX9&3Imm>!zk(c%EA!Xp`7u)tfHFTV%{-9F7uFk1(Xc)bzM$ z)&ZJUs%=vEdgnkvOrag>y6J`-SAk7XnC4O+h5L)7XntBv_a0oow`f$Wj&@zaF4(zI zB|wmrJXZ0I-a_ws4G2wt^Nl*C1}}a2`RAYh^QZsh#R)c3nZ6HDhJej=k)LR#@o{3T zdvre!PFY+?n8j4nt2PbBP=_TAY2X{a()x7*Q6JI3jYel!B1c-49T!Q850^r92 zD}cWgTZ-*qYF#PJ@6Q0_AwPy|SfuOq9RXk(m|HI@Q;cM(SFU?9dSBEY>4W0%(yLm1 zdutE$!OmWY+utsAj*UN+VRAvBdQ}p&wYcspS#vQgwK{v4Fk@eo%V8~Ah3nf?kNG+2 zAhqidZB;n}nX+d|whsv^UL;?bC0)l|{01ZXN@JSmYFnz zX<6KvhnA-0)jeCdV4B@#FHcgRO4-aH%%E z@dJH$FS7P+eK07+U&?%{{YQNC{^Rfd+V94zzWnkx&2zv(2+NcYuIy}>xyYPh=FT_m zg=8666EHU(yVu(?)m0^Xldk61d6{30?ovJi{t>aUvu9PG^EctpM z@qXWyKvqc^VvSD=qy$q3xvQ&`UKJr+av%xzBM?v3MTy$r0#oI}rSdDLIK_29Daz(s z9fL3qo9s+hA#JYw9iI!1BP^yVdvVmHX1SL^LCmM}Ib7bN0sC{!W|oIzGcAr4!Cd;; zi1+<~0a4B3CTvc#BgZd&@!b%vB3m%YE0SNOM3iJ2`Ga{x0!*$#Tr4YJZe~hd10C>T zZ^wRKJbz?H6BU@JWOXQ8NoEyZls~oEupNc!Gzq~0jU__9!!p@;g&1#h1c6SCgXVyIcME%iWMcEi=LNy}vy4GjpuG!?o{)$&2f{%vX`p!@MWbj;dv{6J5}8tbBo*0>}JDFJKK zA<^T(d(=Jw9W%dd;p_cFNcb;*D?X;Fxc~Tr;FnAKaQQ0-B$pEGN(UAXq*WaXI>8aH z&Up&MGe$Yulwg9(#c`C>wz^g>>WzBRG>apafw48$0@_s$7-dX><(bPPTD3Q ze`#S%GwHdT%RPq+)!)TgSEMcht)T7h_=9auCO(cz+EYy$#(Bgts0z|e8DT1qN_GJ> z=nOnss7P~|Br5t{*DLsK9jV6c%BXK=bFdyHA!3PJ5GO-2HDxK*y3#UfW=`HSTrM6- zL{>v@iuURgjKx5U=k{B`$rlyz$-`Q2Ya0|O@X-a_{F5hdt@%s9Pi#$b>LVBu@qB~x}y>5~okCggC}6|DB>ct2fi z4WJ^LOK>F?BBWJb78F$}H{_1#a7+>k=cHqNScF&3CU?#CkLQ7YuQj&v87+G7g~_&- z*S+qoc6?6zgmxL8Y_&|7V?^{Xn2D@tiLjOw6wDUBPH)$szENq;dI3*6-pSQ${4Ei`&mdu>*YZzP@aia?YzSSz+LF$xH_(HMrB zRO!^cYh5JciHx7c0Sv;O;%CIa3CLueQL{TAre&GveI}Eaj&ln9h_`|%l&YfHSv(Ff zQc92BZYSQ4+8ox77k3Px4nCT5aduf~xmP{3gqicD8cN?(H18@GTj+$)z*isQjTGh$ zJ-d8(M$<@ht773-qZ37>-S9*02B3yxCX7zW62a*i_Wr}%S6}p9{8xo|GGxZpE#=uf z3l}IP3ITx{e%!exvg|*SC`<=|*?}bhH2|SQC#Db{NHY)_)gmE~P_p_Bi6BHv(Bx%? zl;G~>&k_JCkfUX=I&(hm%8*+Tr?X|r%^^-0HE&0!IZUnFXT0nEH24C%CZs7{5f~G7 zebTgo5A;@Y87i6E1Qw=6yGeYcQaes9BcN2;R(;z13m72HPPm~z+nlNwBw&OYZ!im?lGu^tA3vziSDAze?jXBtCypzVygK73!wyr6et(KQ$Mo!|h68)x2#}*TP+IjgK6vb=tHe85 zu%HAfF_pu_K_MsytMOTZiliXoXY7tvPhkFqS5*ebcH?<_yd9$>D66u}Njdq>_3?Zq zE5r-?;y-*#Sa55D%9t2qzomRJGDM~(7aPK-z+Zb=%Lm@TjO z;(4T;aOuZIimvL?+cJN#_#$>gaWl|nzexUHfgx?}U#9lN9Z1l+M8vS<2D*2{ls#1L z9^@?=;THH^6bKeSYXxy?zo0)Vk`|7`Z7SKO4bq8MMe^!Ih>_M&0YW%HRljYU#t+^Z zDd%xUwitnCYLm1fHBo~0pbX`q1w9E-NcxmUMC-3hbro}2KqXpi7^<;`aZ)TxK1fJ5 zBLLrc=$k@D#_sKOR$|!%Z4Wr+_>q9egyJ%@c4x2|+U;NdT6~HyF!Ao8m50KeQ@U-4 z^ZniKBxjbOJfY^*gwA#*3ngmjbgcI;7|5mSSCgfK+u?ZUi>1;I6HE2Dh|gIzd_JH& zk71tK;i!5`88X*$3|s+4V)ug~QSf}P6H*qx*$2M`4k3s^rQtl{c5S@BpT7CpXu@5| zn?h!ousUvB5Ow>dMH5{IVL_Wru-{o(Z7b26H$K-#gfN2Oc;vT*`pA%2BD80qR&o%{ z;4*vn5~nf_fY2(+Vh$n!#IQj}N)qvP%yy2a>#B~1mzHp9B8rGYVp_^0F~hp>h6RK{ z5T@sTIm{fI`kho>0fC6XPsW7Zj*k4Gv*B&a1;tJ@{2VcQllhtP--nHQb)hS%?uwn&9{>&TYBMl!TRXqat}NEsCw&w235m@_Ik;g9U*~=kf){u ziEbKVpd;9nAhO*~3m(ORX47x|u$DxI;IVR<$QOr27f^8L?xTV&uBF6mY1SVHV=Iw- zb?p}_g-c<8tiq}=DQJUD_-NdXjm=(=v~!~vcpV&p)Kq4s0AG~BEOFB{HwVEnZe0iK zWVf&n94%f;L3yio5Zre(9>tegivS_jTy^U_L#_BZoL5xE>HVLKt)O6oO z$Ba-_Yq3G!V&N0hHp@Q*zLn()0&_I1(VVl|aGj|DsSPY#M1Kz18L>Up>R{ja%YeKP zmicACmnmlC6VOn{>2QvV2(u+$AJK=#sYwR7gGJ0>|-M9yw*SQT%kb{mmB z%cT@*GnB?3k~7LFJT{ixhgmb0N(h+m=7M}$LS8DY%T#p0gM-Y0hd8w7%J!@K+On8yG%=aDzP_cSJRLiI$aBIU1(qi$< zRa|{^j*#>k zyStg5+!|w>_&5~1lM7tFO~XOF$Bg=d^NuI;&sl5k_yydE5jGb!QGpP^pETaN8sI-t z(vq%1tg4rr(+e5`QQnV!$FnHuxP1kxWAE_rZf`AUgapmxMxWGjhObwgZu4SWy6`H2 zAlSc;&8^7IR<(>0@;7k7)UMetUuJdwX>;V3J?iRJqyx!}pfdc{`cdGsTj9(a7-$x& z8Ihh!o}7dPoEAqD1x10nS9VAsXmVa!2(;M<8JXEYUh)O$aNlSUJ3x}EPq~4i9BlIa zA?a4jW0F4Smzf3S7zs7`QP@h&&oV8rz*D~-Pu}AS9+c+B#QfvS59Jpqt#~uNOY1Bv z28706r1@)CWS)A9WHiEQW2IN1hcYf(sKU!yCs-42mSG?*Us2ABj~4xBYE7zf~!jQh(kmFi!>8CVFG|fU@{3+I1!bmhTwX= zma1N|Q7Y1J?8%GPb+W6mv9lLn9Ui`me{S#fL*;I+1zTc#YmlvM4TJnSwAt*h88*@9 z(0QFcjDUa_qmnCm0Ij7+zyHWMVjsadDIhccitrz8a;us7V7R|u3cia3UyD*nI7I3^g@qWHJ47<{k|t^$(Jw4>d;7*Rz;L7MgN`wQSWZ>D(ICRR&(BOG#< z;CSsjNqP{TFK3IxR_AQJ9mIZ^k`>}>cW>v7-$_mt`Cn60=ug%7HOtual)f@WNQ}he zP)4iqmW8Fq@z3WAPeR5J+OOR8Y-ut6(m=FEb7F&FaK;~rrj8EXK&=I#izznfYzZqL zD+ERV_n&?o-_^KSPAUF~G1}Q4#7}f@CR-RE#f?C*WgLi*7yRv{<0+H9ET4GG&p!Ro zJVeVmNLBLboO$$Qj@OZVWVlKE)WsC3g`RLAH!mpL(k(&RWpZ9VC>DvBm|T6!a&P@B z3;I&saQr3isj7ntM-3>9ZM_2gq21vuFBtEor&}(UYq!&V6$C&|VD}r^^Qd;dJv=X= zmzSLRoV&EEe(6W0pY4SjH`WAW|L#1&O*kJlx2MkfALc@CWK;DMzv> z&X{BS@uxrjXF+8)HObjY-JPx7&7P06hh>YA?_UsYrC>QDu`eeXvj$_B38&8=Km9{| zhkGfYakb-jNdR(l;}fh8TLCunDfH={BNTWDhN}|U8w~Fl1{3F_h-aJ#LA}#43>1I6 z|M*+q@iqbG#}9)ML!m5K6!`+|d|nbKGdLTLVD3qo?^s})717|%!O2aNq(j=!V%DyV z@rPI>cGlLOf9M%(bFRA0+~f~G{nWCZ7303vMj{mdDg|PL zT9@>g_^$3kY=|@^WeSsO?ObzngZeEVN8kAUULtQ1H4#$0~H;R6<{(Jc)@H`_3;oB0!vX9eEMa zm^gQq%BAGmxTV9)J2k1)LQz$aDzS_Q?@mtV>pci<$0Gst4v!dSK};`>hl-)%=qSdQ8MqxkQi@NjHt@0t% zSR@)yAv9oF1|sPUIAa~s^YyTpV!L-}l5wUaHm(!+uu*%eNCww{=_~18d!(yGRl<`9|x?F(+Dks*3gZdkPb!MGwSz zuAUX>2qLwZIY?n7ZmEx7r5$gGEg>ZD|4(Zd&AdRg*Y5U8nY>TcLUqjWB(qC9mwD_f z>s}%F!_rftHEc}eI8{cpBdQcZQfh@g1)Go=h;wzB7Iy#E2 zqWiE6{nPs7JhPcZlIhqS0&W9fp-G}Bg&%qZN8t2@oKyIiX?gMKp1^V7eEb$<&Qd)0 zZRzYMc|6Wzpf?VYA%AOnQA-DUR~e8cFS+KAb!edI?e_t{!>(q5&JB}|=ogE6ii2T# zb#JeHMdb@FgKs=DgH!n>n{-l(Z}0(H8J4)rnVOpTP&qMRPBnS?c63%-5rQ1Ml6x8J zfopcQf<7qzyv)qxoZb|)j-VO`WSD#sx)3F~u=wb)`CM2@v2u|Wdn0W4!hf^z%Qc#P zI8+T%&C$Qb^l>bVPyn4)`<~KS!*F}TzzyzCQEgd6*M`@um9o(bdz;LC5y7F*;PD*}4 zwj~nZ`yu&B!*TZP^F0~AYque|qrs=rv=gHh#6rUX(hR&Pt38I$LSa8}jmCUxI1(%X z;x9GR($W(z25cpk;2gwAgDZ&+_%o_EY&tnexoflvp}29u#sG(@+I$u|#a}mHKJnXS za$*UKI&riz=mA_EWN1nKxZ;@q#Xt21;gVL?6%=cNKGSqNZ_0d&Q*18}aRSMsaR7Z+kN5<*wHc(H)fR7)=&mB~1o?p>w(AAS0pvQV0*;&`Rwf=x>F;K91! zpUL>#yQvgQEOVU-^4f20fWV=ODo%97fAP`RZO%&kM9&9sKVHEhE!)X<7Z;2K*u`mV zYV)-ZXREjEr|hEE<4?HpS0#iF2A9KQm`BE2H3lNrBW#76vu{<*|EzCU4FhlP6v?{{ z&&CRnpzPQCP*2WUY^4muZSg<27a%kR!Lw{3V?Vvji**0-*`sp0Is~j;A9r6;^ekPt z&;QH)4{bYcVUW+5HH3!ZYmhd6mTw!a}hwmJiEh#Swj-YB&g3sX)P zjE!AiS;%(s6a+sRW(^S*bLqf*Il1LB`^}eF?e6C+^cRWJVwiCd-0-+myg9?T&mNa6 zzNUuSXGI&VbTY7mQySj~zLvTGO{?Mdu%qp(fs98kkJ#AEup-^gZ-PFHi;qm&eTq0x z0JgWcP=%UikoTmOj{ika9!p&4BR@b=_80@RWP<4pN;`aJiJ56$89YxWtVU0<)_Gxd zs%dxyRM~GJhkI~HuZ$#*yz*S0@C8oK!wE#73SZE#(3GJ65y6|Q5|-HKsNZ85&V^4H z;-%o>pVJQy>K`>3K!XaKjFU%0k|6j#)1fW?rr+UlNu9f4|6GW#>+Q; zK0ZnHHG#U7JP}aZt6C2Ls<0d`ZkSqCsMCYFiF&Kq#9{v`yWWOW!s_5A9b`EA`J+s^ z{OWgq)obh0#v_3(+)d8QkNAqvk6YL@kV|mFKBL1dqFT_UMg1$sOD&deb%hY15mM@93k z?F@pT$O5I-CBkFFw79Mac~s~XZPuz#`t5Q0ZkwFABDSWG;wo3dMcZ*klyU>o%6OII zDxSSqG94IcILXS}jlCiYnhDHHyrWBe->VEUFFq+ka#n>+0?70l%u4j*RWx3lM{elK zRGreSviSO}?=iU5xUd{2T<*01$6{^1v-p*5*s+iM>zctFU%XRf&HmYurhE9|yPbYN zj)H#Sf9Dqwmz#sM5$h>8#o!<>1uFa$dOY?9nHn@DVR9VVE)NhrLTj)LXP|!?>*mMMBdrvUu z(mXzY=#u8=x8CKH7EDWNFyXRRTyNGS-KLPPBKdK;iY(L4$NefMr>L;F9SXQGe-iI0 zjawEo$(ROU0CZMRuN6vE%nioOe$jn9Bu?U-;nF#43;ENAs?=~`QxnC8Ambz(EiplB z!%VzQ7@ZYcV(#AEwIqAk5QISEpT`$vJSFw$e2P8}j!q~_PS_vownVByirr~QwlxLT zE=)cPkmW7hcwMI6T;oeB2;n2@&*lUa+vd{1mIkTF8%AoBNTGJx3J!$UgVU=-dY}c*vmDuAD-ul0nL4+<*MN--(?; z5+Dm@RrDvsCQxq7Vy%y_P@_h)7NXr`wNY0n1ZaU1e`$jW5TjDg16T!6c<%M{pa0{( zzW>XhXs@^86#-SeE>01EK4MmW@E-1PHE?%jwG7SrJ6H6wiWGq<6S4XF)xF6f(Gs(V zYzI(z{)I53bD4E=`bvt21V{Y5^vWK!|NLzNGa8laUm$|zDaEeeuJ2eWA^w?HQYWF! znr2chP;WmU;hCsIKUF3fwr^aEy~&*+#+^gunrj$(5BPYYIbD!B|C;3dbqO-`iE)KRdR0a*;gsVA1rhZfQjjUKUOX!yfNd@- zmXNQ@;OpI<>9E$@?(KCpts02QkU1-gTsX5Au`NQpc7X}QNa$D-kqYG)_3(uYwv~7{S~8~|{PW-O`v`8RXcfU#oD@8K_eO>d;CnMn z>zTvmOb+f~k!72me(~HDZ-JGCC=KSt20CyGvzb)0G#dYm^B`X^zAnzaijix8kWjW) zBy|G(DGyU-0X9QQG;n0WVrX@elf_;|=;Rw8*^x>@(=>*v2N^Vd;2ox*o!15g@#6@9p~9p|~S^LE?v?wo5YquGX& zCgT)TBpi;pa|274W1v0-{FSdzYtYFjGB*8qX`ytDqtggp8D?SefA{G~O1|a1EdJ@U zr66)^=H&~rBR~D7%tVjL2M{cXLF0zGTE$iz^4Bep@<(wpYGHVjH3?|7;r>Y?c5&{` zGRuVP&jAI^NDL+ekJOsHL4o7VDg>a2Dxa%>H}wm_d(WP|F!Qhf7gNL6sZ39qim4&^ zl%I@*ohxa&CKYl#Ii42wf@cuZHG0H&9vIdS!Q{u6i}h@*bWGX)CFrsWm)JN^gN-?G z^!o5lt$BioTuLU8`{@L92``x$jyTzqeR3ahn34u%y7>$*m-lg#wAi@hfrxZba+6WV z2RMLNyr=qcmznKSYg=L{%R?%uLwpduQI5$=RU2hT=joc+FpG}msW;yYFj3?miT8NI z^N0W91q8*(5tn8pYNesx1alScwCc)s`k>n7X8QqMf{-7A&vxF}ZrvgSTHMJMP&&T0 zfOwjio|H!8+*E$|P+#HWAaBolcxN03sL49b0VAEn6@i8LyGpLp3860`jlzh5$LWjF zYd@A1eDdp2#+&Kf4mY&OQwA#J)D<6+7622+kNg2F)`?q;W- z8eL%xJjUxZb-~DFWh@<0u5a#K6nQmatZz`e+uKbTapx+lhZwpMn{8m>1CyO%HN|8g zW_$t|3&69mSUSbq!YmkJKou(=l~}v6GlQmf&hDcGgCH?eAq|{`=g;DA3Y8Ub``NTvm`T)dI3 z1Mqnn<4~?Wj!`j;*+A2pC)ygH(u^_PQ~}Y@A)n*i!3&aK7(#5FMb@x5TW~`A$}>&m1qOxo`f`09U2t9Jq(hjuDSZ1LX1J zM~_1P42F3u_dosoXK~n{|IMer%zyv8Pd~K;+vO_B3eSl5HeF?*nn?ld5u*#Wk=6jw zAcf~j#CLkZ?*z1~#OET(T=tWC0wy3^T~`c1TNJGyy5(Hwn`FJ=X#5b#Qz3F|jr^F> z!f7c)o<-6K*ZM$kjwU4qb1`}V(1ff}*5x912u=~Jvic?r9jLL|9-NTg8LfD$t`G_k zlzKbO^2F8*y-2KfJH4+4g(l8YCkzcvMi0kfhV$=M=PCkJTa%G9rtAG5_dgBID$fex zPz1~_IifS7Os3Bo(d&ZrCL^UVX7z-SQ*OurFBWs|?Rsz9e9N#}%%;@qAPrQGT1*_3 zYgs0BcQ=AHFCkn!2P!HfRY$bFCvQ^$+F~9|W#mC+yPdN;W_+aygpvg?<9pUpGd#wW zr_ySC)+DvB1ww~FFvj`Z7aQ!_9J9p?WItvbno|lx!qXB3!wy8V7nWU~ zo{>+123#~>$4rro zQ>jq+Md|4B`NysVCV)ISP7iPyv7crz$*y5dYL~uim>M6vH^8qnd-l&bgC&!%iFVT> zV#z9@t4v+Sd^+4o=_X7a*5s>!i41Z`VZq=vtby`O8}o-;>`(VGT>xf+9rydl{~S+})Aq;T_%1W!Zm7H1=3my$g9FS$ZjSSOkgIZ)ja4F7!pbBagb=? z`SW-HV1&8Cpq*)m%Qc#K1ai>_(L9l>OmOWX{j0l!_(^R1s@D7+aP@t&TRFP?L_Z1r zMxwIVxe_@H2`}$H--k*;OS&cD>BaY_mw*t2>4=vvRP@bjm;qzaUtPh5i?3qoXpp15 z2Zdwqu-?kWq3pwX#!WS!F{=pyO9=S1Ja8ZI2)hlbyQYvxy!`V|?*I0_Bxv~~qdViH z#7Atk^o39?WljA9Ud1h9a8Ai{z9L*g5F|Ff`24x=4e=vH8ZrPbHX&C?ly?WQAQA?c zE3p8@mon1<{hI<0#(2E+JgZKn1waMG_io3e3WA&WG*IIqA|LBoT6|{KB*rDwsSe>+*5OjJq;)B@Fzs&!Uf@72KDeEPI>FOugCcLhhUzIL3OAX7Ym zPh@W5_*8DE_E>?HEtFTecKM?m+z;$^I1oOmxhs?BDS`5G@I zcAyru#eQjuR(ij*if?n9acSqc_T@N-3=;Mqj!IVG`@Gi_T6nVL1yaBJXYnUuisMOK z+~$D_mu(K~w&+$>V0(10H3*Q!_d6CDt8hdD1hKM=##bBy7UzP;tGmPLbwZOUXZibh zj0_~`6Gx>?c*raP^wGBvVNT%>FH01pd9oLpwqgnsfktha1bZYy<8_-<&d|&=7zQm8 zZ+`OVNus&vj>OfBeJSN zBrK5%!95sX9pGTrXzUhgTn3B8axhXmdub+Onm56kVjTEQt2WF~iK68wzO&1daqhc& zHk}UxQDso#fn*m6XS67m&!u6V8j`~oXP^ibVVVNMAure|s-Pp<5dsL}Q-AD@>~{8Y zJ|meKLR)b#3`;An6!B`++ovYS=IeIji3?9&m<@*ez(dM#^-ZN#d^?GkKcIOHK-Ff8{ErPM`LnynZ*?j2uNe(<>XavfD}1`_}d zuHYj=T^_H+i(EkkTIP6qXWN7-9&S%OM%pWlcbZpGj}u#2CiUpEL~A|t&Nb->HJ}#F z)wBV`dasFkMquQ2SBo?|6OkK|&SHKAC8GFkUUFIx6&*w1r@-!rNH=s!Tv)tyY~p&G zDMpLcxAfi*TdJ=F3<-GRG~*Zguu)F4HmO`|Z*!oVTFNLpXS`cQSwY0s2E(7H0FY?E z$%3=am2>=ubJY}zlA9-(I4ub%Iuv0V2Df5z`B_9rhz(a@?H)B;u~BtHR~3zfq4{%j za-0}81O=ifIIy?wC)4kV)|%Bt1slV!Vi}t?NudVS8Md7sPV%1ZU{CxPJv!+4Aqyhj zdipN@we{NJK->VNW`k}I&*Dhv4@WZ~{40&MO~pU}I;pcWJdF>_VpB9x&5T4n!O-xSU@(CUhTjyCIuBZ+qC^ude3vlCA#`vE2Q%pV zQR}L2@l9VPKnKV9?+14`lVXajW0?5U`>la!v6MI9V{A&iC|i}!Up$L}lA@<}|DU(tatwLbrUejL1 zi#$Up`x&G!gw?3eXkw{Yqz$4hSbkhGqD6oA>F04(jwfk84G>hVS&N^qhP!jb*m%?H z2QdbFueWyNsYHB%y~Ur9>J^Z*j_eX9tf&{P7NST&8R|!hJY0CRl+g@oR(CuzevNTA zNq@A(SMfsSaPtpDy}mI~Hz^gaDf4a<=y6D`YEv@EU${{XR~|W5;M1je4Q}jq8Yjxn z2}qJxQD|;;{wWcH)ltc!G`kq>c~<1-%c;b52?-DzLTx75xDm=UdDH#T zU9nKt5j@F06E%G;!bL@Y)Si`R9nvl)`^`Lm@jR#t`g?q)TUzWg`eG~TjlJWSzrjWf zHlRVkWm(q>O4At=48SinOzN@|%st+TtMPMuT4M@H^ zoDTriqO(DBDwpenCUPlLq(vzFo>!!N;+Z4WX?wjZ^bm2y0HVLl)&hTZRaU!Cm_MF^ zR=CJ^oC>_8kX67pO?$@9amX58YO}BDeRL@!DZQ-oY=qE2_AP%Kx@|ZfTMM+} z9q4Cz;)oql9a?=@;6@-{t3^lWpbz8XSz$KU5a3|28GD}%Av9Uu4nLT@WKr(s&INoG zO$$sGnht^RK(>}F(A&;l&W<3Vjk#6rsllKd(qVO4;ey22zBP_U2>NQ*fMHBsM?bHP zHz7ZEvpW1cU;a*P1?^Mw#|uxh_x0AOcynLFK*{R}_RZZmV3-M$Cj)UfJlt_&-zJYH zC88-!j)x(;=+_``n@OAsezo&`UtaVE7EUaeS65^hdndvoTiYMlpO$J0R0hd3hwi{e zsn}X$iOfBx7YGP0QcxTqX+oSeWCn0ZJRYmSQN5+51wW(>0ze>m-hGOhxn;5$t?aXs8l?QjqR-T;tTvlMMpox!P1Lo{v3UwOOSj8=Bw@n{T?!n@=L$< zMJx_zRKoSt(kkqrR{1iUt}qu%*%rDc-bpM3IcKhYj~*R;@Yxd80l{`dxu>H=5c+r& zO?Tp}guaGyablb5{|}qQB9+bcQyVWIXpCBXoHn@?W`X`59oB3MhCWlDm2!%e6XXFR z7SA6NgEdbdkq{Op#ZOrGvgj~FZ7=8+`=L?TbE>(9Vk8pX%S0xBWbpyfhqO;VQX0`rFBsSQr=R}8H>A}J;sD%a!1Rkh_`UCb5y!|5Z$|IqCClS744Ui< zD%nrE)@Tylo^< zo5l7iDll+?@}A=vcm%R6($HsHXRPvFLvjxZ3;36LSmgZxWe? z(Oeun4Rum()<;5)gG^ApO5ZBQboE-dp6M9SxVTLnG*NL={AB7f685JcQK`yGURAuG8%vuv=+*ckB z^ARa*ENeT%8V&*p?^Jr@ZIT|G68lPqYw?@nUSmQ61}dR4W#DE1l`5t~>R@kVX%mfb zP0SonU0ErA^?0bk_TD2g9iqn-O10Y0lM`B~WrZw(8z>15Jjj(}?N>9MXibGnDq)3v z6WbKbwZuRG)|{q60}m7#p*qJ5kL=35vxHDb0NR#S9eDO_Smvu&ui~X7Bx}%^-W^23 za{V;6bPw|TkKg?wzV?cvE2q6q_|Al7f>`z5K2U3AA~XAh)>-i!@&q2lK{Vy6gj+5N zx17w8zq`8^%(`|}h8V7+SCRGS#hU~D1cJ@*tS`R&9Ul}^%t3I@CzGfNW-+E?LxZUl z%Dn6b$I%6Ld8uwRSQ;4hi!RM+Da->ex zxnxO$Y;Q;il-Xg#R>&bxVR5k4Opb{6M_~U9BcS3M9LrEXTtG2-}|1o zp_n)dFxSo-(@I--z7$(J84gn&gSgA}?qW#hA4p3V))}CmVg=lLr7B~Wh>77iY^0U9 z-rw8SX}}-btnDblFxI3yXFaQeiI0!z(v}Pk8;stY>JR-S;}dneh^0KE-@fOY(U{9OKqEc6}WN#1b&}$Cv5k;u$HW7)-n9Ha1%fm-9}x zb_Vg7rj>r){`W6qcT`XlRxMWa!_?H=(8hz+=nWD+L4X>Oy2k~ByBX3at!g^0Am(Fv zx5g-Ulvte${k?4cP~wa7J%c)o1l>GQ={ct@o=Hz5RWQ|s}3tM1ppe9lP|SX5(Q$l9+#Mf%ckbsgZM zHvnzB(l+1@WF8s#hB~ShLy|BLz{SG(??>f_&5eYnc8)N`<^WI1^5w-MZvxOLQBIqv z_#hK^Zl@ot*V;S?;2FQrIA~bos=yXIU|cPs2bB7ub<<*40mBlqfkb6$2Tm1Baa!zA z`Ul@xpdp<3JJzJij{W#u%x(&uie0-iy-_dV;2#2A6ipvyau92Vo)>Y9gpY{Hdp(`6 z{Fl!^xc`~|@*Dp&4*Pe1H8wx%-bA;vU`~u|MM>jF&WpH6dQBXFmLJb#=0!2;P((`X znd2_d+ePs@$j505oZUNU;>uy zo)K9rjSq+^G}Tm%BT(!lzEX+H8~J_jyG866iBrO#xC|$#5a~f*w^yUQmOC`xVqc+9 z7@HNl0fhB8pZ?Y)5R`#W7V#y6o0|P1d>Wi>GCoX_2)9=YjQdK-5bN`ryWC2sT~zy@ zefs-YgS^sOecihHo%;}uIK(4qHmCKdlk}PoP8jUP!{b7H{kK7%Jn83yDB-5x+kyD9LX|iTJ;mGLdzdf${T6h4|>ez_ACHS%3 z#_>CvmNqDiOco!~n-P&5tRsJ}^B#xnArcY091y;WOQ$3n69XOO@x2rGNF+S3ZSS6@ie&2s3h&(1Gti-Id`~8x}-^)sPAs$LrY?b$Epr zEGzcFKO#ZK*>GBDgra!Vek0fnRN%s6b4+(KEfGP6_b@%Y#uMp)Sab!rw>ECv%(RC+ zi=y?#MY7D=#m3U6mvoG9Tm)yA5)f~`RmB$WJXR5kSrO@&+UOeI7d|A;0AZXlfConR z!mgMl#B*ZXns{C&%1{DBz!t8KRo;KZT$<#^Pe*q(??L*To6R=-e69pV3u&45@JzTw zI34Fj|A1V`A)LSTrHRyn$13FT0mC89FZR*UMXP9yeRdAQJ@!LmyvHRvb;%6}f?coW zNgD(ognp@5Ek=P~_%*fU*tjWw042{O4MuY{h=M*>AoGmFF{)1K!LE(=;gyxqQ<<7v+2QplJ z`XHZYrAp;jvuXvmP{ph%a6OVXU?_qjyELPXe{(-Fm54w{e&jwhkfH28OBQ0ut^0w2 z``~@W3?)f|0h%a(z89l&vyBK?^opU;Wh3H+XFa8d7V(1rVD*HtY+ z;O0yiY$Im4_tFtw9snJ6kBH5;=n0}Gepb(35-8;hYN0}XL2w8cx0EAHf&LPOz*UVE zVeV~qLM|%IfB(Z^j)x>H7c_pOoF}nD(#C#2VG2+-@i^MVxt{ z`ykN~thrF@Z^7`Xd8y4{V4@6|loee9v~O|m_*A@739q?IZHC(vJ=wdUeT-tFv0&!S z{ipk1cw0{wvOVjD6jH;-g`R?T|>`h5v@Wk{wVapRroQl&YOebJ0_&bkZ-D1h<#k%NnRa%MSeK7!?-Uue3 zs#w*C2NlPXy}nnI_wkH(lzzm&cQIkSDRei6fq(9!TI(3L&Lx$Mh?aQli8O<0+kd^g zyR%nvW|Ew@2&1$@zIKU^`mZL;1>1NYxXLEx4EC*>K06{&??vwFMrLqwhylT+%mQ@q zzV;{ge;r?=X~$)u1Ex16Og;^S0{NNi z=(ERnfB2At{c-#E-^L37SckA7sdLMGpm@XmM|yb153Ns#EfsYaMja!0;scs4^(+`` z!2DG|mAb%5Qj7S|>8SW-;1Tt#_$H-m8Rd|!d_T<`4pd(bD${IM z;J1_aJx!jy|M(?Yo|{|1ZZat>VWW+@ZP@ z+z&!`_j<89SWK~myPN%ul`wwJ zOYnxBs^edcQ1M0fUM@dPMDVH1Po%w2(phn!aCXohc*ITdFg1Hw1zRPfv-<#5h1D;l1Yw?*feG{tJ!=LM%^4pm9o}05=jW}k} z3r|`OP-UK+VRHBsFh)v~6*cTP?TMnluh9}rI*g=ot^}1pq9$?aTQ99<0fn%edXN-@ z;wyO*Z|SR3A{{TKjf(k+i9#4Gm&rA`EZ&U#UK)%#)V9&-*G--gL?x8rw-IdjwCmxm-!&r{eB@sZ z58EIaSCOM1hwLpQ=?Gf|xZ=v~A7(6hcOEZy#f)|8()?(E4&-&_0DA}yHFHLm`>+lu zVPKENVC#Z-J-f?j&MQ)r!@Tp)mX`iWc3|&|A!@6vAmTC+7q3L&a}Ky_5azF0RCHU@ z=edjlvCDFGJDP5m?69e;6GucM?QC+HK(&`DuJPs+IlSe>LgBnsr{N+{2yBmRZNqZ0 zsMk%X_hb^-|V=&{(o{9{vmdU08S)^83wN(CKnGP5?sd-&lPL_BcyF*LdrB z8_)DjAs(HzPykiNFvZw4JT$gPI{PBy5Gq*5|BnL!{?rbyVC%s+C|4G>yEusLS?=tYUyqPwUfFt=!Yk7^Tr z=&xY(*bR@8uy;m|@D8kAZaF>EMl{&;*TK!?$rB#ANzI7+`{$+{nKMjr!Klp!=( z-H)34rU03nQJ4Y_WIi(6l-)vDRmdSO$ei*bcQnjAD9H?_Ze-iW?psp@JQOhpR%xoN zp$CKOI}8$Xz5snkMnrWhSA{=Xo~hLC4B%2Lm+{#gbpDX!PRh39);g)g^Y_W_P^go7 zPFE;I1R?sx%=aHPEOM-p!n_Q56II1XO~3DWcp2Avdz2&1kH$w8=5TpUKosPI$5GNM z{4{Nw!TLtuY+in4p2P#j z3&iM-4fJV~KorHRj*1IQ5~88E#l~EE@hJF)7E7LbM>iJ~nmu=1-V03?Mo8ff#lFPl zhk55`iKh(vlAbJuTp0EA$oy6?oPcPZ^}IqP^1ARvmtJjC@+NMLjm||jmqQ>xQ!cv_ zEhzaM1&+P7vJ(62yxQ1aGa(@NF}v|EFaTuinJr1E0xY6edz$hjIj|N@-WD0uG{Oz89n=-@<4yzYT9sWpP&Bjh?~Jc0vG( ze;rS3f%iH8rq?X6_&cfPrtA@LbgpDhq88e;6Nr^z?C@2m9zG>K{xwd5WN|(%JE%fBwty1rh+4 z`Or<;x|om1dUCnd4{xCYVBr{e}2JUTZNFi0f8k6OgTj6vbIAJk53`4b;{mBi{Q8 z11-IzfgJ7ma+spig6s z-+!HL|H;2*x^Lm>qWOfX9A#ii282bZo?e&73n1mul#*McZ0zlLEhKbf?`(XI3pO#P zVnNe78Vyj(Woevxaz~I+YJ|F1Q*I5Q`7jMLLCsU~GHXaJ!-58%fBcVs7z^4XbdwHq zGkuEJK#LU_6!BSge0pqpV6rrXS3LHV#9sVcBaQq?0y3mqrs-o)E|Y5`1VV{qxeF`n z-K?!AyrM{gp9CD7^hCu@$Vfe`Nh<<0A$|Xc?}Xs^lAtkId*2xD?YD8h9hk;<0aIAg z!t`yUKyrh0Mk{M(yz|($klYbSBQVKau2UoT>J!h0cVHZX8&qL_T-+?x08*%36Rk*U zTonn(_>#(4YYe-)x#1OqdZR4cAKipcgCN~2dM&xv!-*uf&v|>$j~}p??7oF9KqpAGvNef;4AZ|LS-7?9DaIqVHeNt1SfK(~!Ml0e>%EFU zqK&1Cg8OJ`ly+OUjaV!pzxzM=^fNOQ7NLr=%2_fJ_Q-$Vq!BaPA$1(Wof}``lVl-L zT%2>P1-J&`nv7>%(@I|=Kj;ZM!3&+zLpr|K20OejG9ClpN4vARv-WlD@9b`vn7095 zHShDOPYH3>=01;?9xeE2(9LIIWH`-B){^z1W5jHKiDKeTr!0 zDjKn*)N{0w@|V_LZ(CA&w4WJga6LVp-VvNOn{1A*X=jw8`uU4e7Dur$GU$D|HoK@f z)r{ONJ_Ihm_y6%fF91<+H8r2Ms>Znn=0xx>3(T#}CoCpCgWtSq8C;N^KYPE9P>2s?##sov+o`!$t|9lKh+qucZ>7hX>d@vU#f=)@$mxuu zV)XM>nZUz}n~#vzHM)>%F=xCB-Beqt>};*Y)BYdE-mN?CE;;Y}3^`oo&NGgs(VRen z0LW3scB9cakT?w*Am<%>mfQduKof_?0BDk+;;cwijvjetB+EKDvJRG| zXe|=+z4Ci@qw1+1U)Zm$Sp?Ai|LkdPN#QsGimA2ZLym@>fk*9gAs~EiSB*Ym#g!#4d#3<| zxmFoW)tjfN6D$cQr)1sa*Kj+7e*I|+0rRQEzrwO!!ZMxpYC#T(4yEVsG(3Vxx;oPyJr{+W^i?;=?C=yjl3e30;;r3DbxU$K?E~X51`lRT`J>}z^{N3_Y zaR^j&nmvWYMNq{}$I<>%Q$+n_Ql%;s39iO#Q)Y*+CT@&`v?dkFdyK2y%MxtF=2bD; zG0r*anSk<_pMU;?SO({m26{sp{Bf*!8n0!^aeHwr_BPhMs(uSD*+u~gG}Gm?kLg;< z7WOpNPZ8sMHB)fna?=psvn{nfS%>jK0>dTdv9-D%bi14u%z1%t$O6Q-Ku0CM`wR)5 zG2KcOxXrq*M3EbM+0>bT@xniJ_PTLM+$F|2A}(UrFY%rIa>DRw=HgIaWo+!7>vAY< z9$uZ7a%Vvc)HSMzAa^yb@h@_8RB`e0c;QI3*hK&{r{zo`SGro51mIk3<`OM9Bc_rM zXt?9p3+d05a=lc((S{_M5-X!p`rTn%>ic2?Af1)i(ubB8;)%X;CiC^6b`Qm7(%R= zD#{j`3WO)GZb_LE?}*ppHKgJ&0^tzd! zadUSGcRP!Wv>76=&IK{ROhdds>_UI3eHlU4gluD3EPZ$i4F73U^OI+sW z+K|@@R@vX{Y^R!KpTS?9%}hoTr|xfhX{#F*)Ze+hE?Kt@wL5UiO`5GJbX=y`2~1WM z<$h%jbg0R1BeeVvz_2ol;j-t^@UKL5vufA964 zj#Z{18R0EM5~}>mCP@u}JdQ^otq=+R_@2DYYD=mw(rkbLZFEUHx?ySG!lZG<4Ad~d zfd(GE=5453SuR|n<|r>Q1+$GKs_Y|Xp+#Dq+ka>doddjGQzqQ;`88`@?BKM%Fq7tUGFn%;&MP8b8$74<9DmsSxfP%@O zNTO+vAW5sFcj3?S5JrgYa?Edn|93{29s~Maf#1T!j841&%1A$QFmbe!_Rb}%hnArR z$lVWRe>h_5_-4hnOQ8RdG7xnqHDUN|JNZ~9d{OweOEfk0&%TWCdK2kj@1?O3a#yj? z7E>+;9<#2b6Rh{TG;pkwa8!Z{LL=n`I`t>SA*$KH0Z1;cX2p5Xf9znHDKZ1`^2 zWEuYLW*%LgK&ZSWbW?Z^M5%H|w#dlI-3(3$;DnF5X$q_&?dB;E`-k!Uhe{zA~jCyT+dbMrZTiVDk4&fTLLC_b{V z@XXdbNfHcte56|5_a$zTgjUfmXzHj$dpd2Eaj$i(LI5zx{3R z8T1jt>yGk~JZu`y?+2sMS+trELRH1t<9wZ*=G5TcyTq`Jj?(n%1N4c{ziLT7Uz1~k zk?=&Bu>^dZo&@yTETxR2V!#YAbz@-!Z?{sYa$-dIe8n;5?=)?<@E7}N&V8(gz~`mQ zl{9zUPhQ>Y`C}A5W?-k`U=a54SJGQFy!3lhk~4PT(oL84Sn=sb#t@KNGhgk`-)$lZbf_1jE1RPHXOi zsZPhG!|Ey~NNdScC)4(T|HdW|Fj`~AfNnnlS-Z05kV(XLZw$A6lGsO8r2FltlQTWL<(`Sl}O1STIP#UFx zEkNCy2k-+KgA3K-d2*|t#t$tN8K;1Dd*`E8pPy#-#ja%SGK;R(F|{-XDthRsHOK%p zWt4iA1?n$jGG3q?;$;JDx%l_N-Jl3R4)7XWcKngh#a56>P$+J4Uf03sxI}#6jhTob zC5%|D@!b}{1P=BEN`uo|jw< zeK4^cORh;qdnS8yM3;wUzZW>o5WwfE?B#pO1>0BxV7VwtG1WV!DUXqL>?gr z%N15(n&!a7qT0{Ig=a1ZXE2$R*y)?Sjs0E-6OvRZ2yYM`MfV*KhnLAxbMx~i<=W0> zf_zD(gz}gs>@XitvGF@8l!xDMiT>XU>XUjS3rSul7SV@xJ_%gInlSvW_FptAAkqy~ z3R#043jPA!Z=Q>`nixCrLoK2#Bykv}3uH^YzdP!SPxO+$2wP1lvX zT<#KWl!?s7E;YC?PUnbZRy9z_90dAon$5!0$lA3gTyG^%{XI(^yK*c zsN7&pn>HjCSp&Veh7RLB_5^Iv6+dRB*Dnsagumo?$t2E-yPC7X|>{R z4zF&?oL5+`S^Nq;#2F$xM#q_HBSdEE_PJ2NOXFyO4%E3ckD7p}p%?y=5JJti^8iw5 zej-OGZuOy3xJ96fFiTn=7*AaE>I8CaW~wp}p~^lLG0Jc16b=&FDZ3zuse{El%KAfr z?iI+3?I|QyJoUx-9AOJaAU;HXVpN3xdvu4olA4%Z-1_)Q>{DCwy{Fl-yeRW1B@fp{ zTlRb1ed+~FFDfh*Yw$04d<|aFDKU?ps+i-7WmBu62Hp1S_Tlb4PP#}Lfz17IodRnr zB8JOKAkAJHb?)Y0cX@L_^YwC2YnyO)(E?(*m0}5bL<6^ z069RxZboRhbUWK?q4*D0yH?E#=V5af)*=Q4zxjew5d`Q54n zLIu7q)N0swB)^O7z%`dJsqp1jF#9N>vJH1W(-L9}bpZC;*J%T6_h}*xTX$tdPn@S{ z#_nS4OAGVy7w*s*$#rjPeqqsf4x5a1Xx^4d1y#73)nie`ty-?70wGtzSK<|;(^Ep2 zX>h&X*$j!eIUE;zXH1o*U(+fkXJ34If6A=kvgrZHf@(2#C{fly!Z)#1ruq2{HD*ZF zuLU7NZCZ(4srlLsPIO#L+-)FAu@P)ab+fRrRU2(JaVl8>LdzsL{7M@4owsI)J}T@j z961yDEzUQBj9CNbrj=>_seFmd-Z+yL&laD>2JglRkn8OBy;R!UR9EdP%bUaMHb>aWlHzTc$=m-wifgzOym-X74_&K1nh4qo;bYA@Q^|i<<4%8C;n&$2dyNBF0zl~ z>Uvy=)s0@w`0wt#4(26dUo<1>h~j;&@9f9N&>cmtjq#hNId7l8t`D*T|g|3<0O6zg{`;Q5k|m;lJ}uY|qSz@j_~eT%9r zYXT-&wm84E7|R7~K`y|O9v@zKf@X!7H3+zZUiPLImgb@!>zlvwUp$x!uHi+g+Cb{H z3VuJ7ZhNPv8QEtxO7Y&fcFa37ZQk(F8P8v1ya~qFcmq0D-cZ`K3U4epM*mdAqV~4zIU|7pG^b`S$$>q1(k@uA^#lAftYE|{j=``Yjr#Q{SX7c z_G`Zx--KV>PJoJigmFeCKvYVxc)fHF;kK}2OI~`ZGW_|X6dh0)4PoL{)L;9ZW(3QgH*a~HewIg|W=v5NIJvvtRFjM|;tV++KBKN=_ zj;|~%unUM&2=8FuLM6RHCKH<|D}q%*HbcLiGBC6@lWW*%nzl z%u1L05V9X<4-R#nyD3^7U{y==hLc>QxKr0V=*H9vgBzYMSR8?NQDE*gJ?Ag~&*%T( z1=d-_yJ~Zu#tKjt7~k~XYOekK!{#P z0mjwY>#Xj_*IIWH^m=@4Yg`gyOloFigg?5fvV`ncFc_w&eYL&wX4~u9xU%niJKzzC z5*%JlvY@o{D@Hu93t_lTBf9Dgv%})UC+D4}|B4UFRE1Wty8&&lO+*#D3+fmLNE^k4 zY@^6kM&(H_iJ7DV$2KJO2GB1ERS)egKMvb zK5&ZG#lA51d)x5?R^Vyr(gkT-tihs(SjQQB3|!>Mzigd&;U4gAUM9`+%Z)f-W=c7H z0zu9wqT3Ic7`W#PesHD9;>bX+dU^a_d=LvItKkUA(u;Y27xx~cX+{7N8o+e+(q4gW zkDrK^kst~7B3YlF1>q2U?jnZqItrYG`nH-W0trUYH=c^30-NX)CDAkkz*kzovMs)P zFuXu#xP!5x&CQ)R@l*!cqb~eZjdnjSfU_-7`~${~2XCJY%6ymitdPdpzC1#@VCa=Y zdV_108bxuF!o#Ozux5x#!vB4R%58RbSM1R$_0T37s279ZsYaDDbz?__?DslrHW+1` z-=emS7be?3I4}H8p5>h%EMk z3v75=#JAjVrADg?4!}QQOTXld=7LUb!HC%OXlX%5YQ9{Ri`9YN8o!HYkjSU4LTu+^ zSSs}QaivG4fD1c@tROsTe%QnysJE<#=*)IuYsCYFjhY`FOa|(%umS$=+iD`wYo^x^ z7wNaY6RV?n<=j%(xri|;HjxaE#<7pICEW33YPw)by~;h*wq%%+RD(sw!MT)jO0!pB zC^)!S%~##{*UP=219_{N(p=h;j0(`dHM3JsKbw6oUd+d)P|3A~v=vxZKG_7L6I4S{ z{iFCEd5Px>mWW9EX-mu+heV`Rae{M494$^>5lCZydaVu#LFOB!xO4*)ilM5oVAO@h zB%4*6hj=R4epE{=l9r-1se@55DnVWRc?6~7PhWmY(~7_Nwog)NtXr(#aS0G^j!O~p z9_Ukl{J6z$<%tEs5MQYS$jcJkD#kUdk$Zs)S;Y?Cf%-7S*<#QTr>&U2JjQEdvpjYgbq<}Y%x>y_Kc)^kPTtJ@k9SGzqlC7!9^D)x2SG%30z4_ z8o8RP)Cm$GAqvUwD553qR^l!!w_&HN_UAI z`dHM6hiTcUNWXx7p|jc+M+vHDs0lc&Kio3e7e(fR4hw>`6dOh5A%mIBVxlT{Q3@O1 zjLzwnM({b9RjBccG|aj?1*|EHLoJ~z!_ZRz8=6o*K^U)&6fh~GqQv#mL;#@F-o%2) z=%UZlZU#wRSov^xS_1UTV3LsJs7JquUa#s#YE~0fRmWD86^0?r&Eth=r|q@gW-!P8 z@aT*g0rX;y-{5QA;tIl1LC*np<{7n%e#^;!I$s6sJY7FH(O|MS>tv}K`Ze!Fi}cX{F~{Zwd3-I9}SNDxIM%@MvzYqgNfuL0fkrr?V;C6HpfZ*_@8{& zH(?X09)EZ-x8SEt)oH<}(YFkf&)hl)lbw!QloPrIN zl)xN{uXBGkd#Uih>ckrluCeD_gh3p=gl&mFa?AV(4Ct)4)5Xy+KfiQZ`9crTV%6>5 z-<0no^2-#7HpNbI$>%O`CUMHJOEwOI{y!jRnvZ=sK7%vkPb~)>v|ZcTHHrGIGoO#Y zuZXa2dwfLxnfN%qz3}WsJQd@3Bac|r21?_<%`ZL+0?LY}J~fEM7+R!O{6xCv{-$_A zGzKEv6<_VB%T&!4%lx2apMN!^>&MNsG&kfEZdJO~*!$Bz>2HD(fus_8Dr#@YnpZ9Z={KYWJk^pW@gVcMo7^OgB6Ir1=J#g+3bztS* z@Atavekj>rlJHJjZ(wK?kRtKL;cXcq*dN{r(<335XvbFXqToGdsF|S);|6cP+zED~ zQ`VEAUTT1r-#c$baO({fwx2gx`n9Yop9}a@*xaB zG98FMn|x-Ob~oSzhZG=!7G>=lQk;Qr;jnfC@y>aEM&pe>0JWx*E%KXjctOsD9Im+ZUh01rCq38-%FF3D5W|M~qH1xRrPo-dXLSoEXObcAwn7>(E%TByIj zcaIi|$IDTi5V!ML3kE}HcbFpfreNq~2;T^1K>@U$q+Iu+Uhf7U zMnwVd%g&cXh`qcWm4IxE5yQoPFR3dcmUm;@AoyU|_tQSZHqgn6D!}wE753Rp5fdvL z{r$a-?ta;@n!?FXPQKwf((}XbK^!Z@FEeRIfN|rmg_QCsGcZ1(zl=?M`rN7_azF)l z*hYy=vsWQq&aVip(FZAQ24;nuLeHMO@B_)FU@wwoiPv+7IW?awP<*OUOu*5V3ML%2 zCRuJOG@SIW0J*elHSY5}RU~$!7PYb722xUEBY$R1c0BQ%jLwtI&WIinrtLWI{Tc>s zZH}%o)`I`-RuRYJ;cd|yJi7&o0 zReuqn5fK!h(}d0pZA41>8aIxCY*4^Y3d~&j6@39Pg@ELGec2`pw)QI;7PSS*96f^- z>ZK0y4o*q@n8T=Ll=W4NPh8mw4mi2EXPKd})>i!0+bh(`OvMiYQfv(Fv3ErVTIQ8; zKbyl3K2RA80F&bB?@IRc0&@h=hlL=1)=h#~uqo7DvG7Lc@E+J<1`bZL<}5ZYxmoHV zz}DAXCsdyLOI{qs@#I{K&eue`NrC>;j-A9DJKsX=6v_!T?En`c?oydZ zdEsxzL&)Ut!T5>(smStp^=XcTvrV7m`no zC{Sre1HCra&#T7fJ}$=mSC|a6M2Ro8uBe!}NY#q^Yxx8xH!vdNo+=eCb9jbk7sL0J z$Y*pbFfCT#Q;&&wJ==_q80USlv|!Ug0jqK&rrMib1${Qg?@RushozSO4V2v6iW_u_ zxSEc}h_hu3H#p-suf?Z}@kr`O=Y5luNdQU5@m7$Q)a6)k1g~UkaC;ulR!Di6e8Upc z9&A5L}!pZb$KTXR?H$;&mpAwP6n^dp6jw}gi8 z(uoo3A81-|3#If!aP4^Q{tZr$zLB@KL*gnk9~?MCG2LB){u7v{UKOKed<6jAGO#q* z4IivzgnLIRmx+A6^6OVDfVjAdmF%C=kM-dI6i`VsCXXu!|Z?x-3seMJkJI=4;7AWi<=EHU|U}DctZDNGDxrf`$1Qc>}luee8?V-QP-!kM2jhM18+MaxlH%jAj-^^I(GQ=Da>;{EeRb0vZXjJ%%_`5T+L z0`S%!EDO|t9ivT|wv77xq&mvmGRyoeLz7&K|Lg4pih@s*}m2)Bf3bZ#z-k@9w;riFfou;EJhnQDEC z!Eet@{yyjA^(SmKMHaAy>I#BZ-%8> zYI#I{D}({%MBj<;w|Goh4gd!-9h2h5F^Nw-`^y$63c;0*H@zW5mG-bI72hf~R@4uQ z_>#&|+vamm=Y#&!Jk)J+gaj>N%2=rYS$^>;Hf@$#Q4zt)j_>A1qHj~se8Q%MfUthi_0cMu%$(c4=H(`bsF%w*EeHawm@;*1VI%~at zFyz9s1;6bT{hj^%RaM=@uIOo-S4#LeB?aRd&*q={8C#TEm{No(v8hu!)9$pu-cU<~ z&dTuj4)c`hXe8loVVnKX{SJ~MMNcpHK)l3SK+pQ^oP>09>_G?`|B)=Hd>3B;;#gdt zcSZd@dzxeKX*6oeP*7KGLkwDcPcN@CBUyHD=zAWoT>&u4NXS~tgr9d$*bhBe6PpM9 z&Z=LyhPRfnf7~#4W1@TDOcxuFRgZn=OhLv{4R6+&a-mGjaVIyjj^h-FB>u6V)xRzo9LzPinA=kHF{lVx zyhZ$g8V*|b8=BB}fDq9b{19Qy{D^LDZTtzOfqV}EL$~PP+$m>E0{W^juv%N9hzAL2?h14TRFq z=sB1x*JpLfdlKU{|LsEy{ifVCtY#8OGq5gveydS9;Sui<)PC|}q4dim{XjK@By`YZ zo!)z@KM&$!V1uik(HDEIT~3L)ER#f`4`gr?o)A|-DcchK zgmzOPEM7Yx6z4UyfXpZ@`8Aj|f!oiGzh&O~U&R9m%=}icLrcT|to`pFnycjW59m)- z>|-?6@o{38=bkP4ET7F~iN?dHC(kQkX_u)idvJj=*|jQ)`Jg52h6^jT9oxpHy(3-a z^Q6`2-RaB+UgF%Ig9NZp4=yA$AoS_O{R<4;x7Z%^42O@d8 zSo$tI875_yUcB%ToZZun@;IfUtler#2`)|vY*3$=%uEbcd_#7rh+*#ZRmPiItSmjl z<<91&>4QZwDv8MMbe;%N34eqx)NZW`q+S6bQ?&Jd;Cy_Ri%;ib$3&ot6k!lOAB@|x zyOZuBe*V()*c`c%lEqQwuWU0fXX|KfmJ7GR3EL|<-6U6F-9ZP{IgL;UN z^o`-(y&K*me&X|V_x$_ch#$sZ{>3kU$7H8Bc=KgqG34CfzM$ckfA#rqW81ul)_G^S z3Q$AqeRJL_u}k2i-UcI}sPdDlvMd7$iq;Sy)Le=FM2ax4-7mrp;sV4Xr$`OSu1M^u ztsdF7Q?!E=t+$TBHad|N;#bmtCJrpapFLOSsXt(%v$3EGWA(N>7V{lU35+_aq97IQ zjL8r`;Uc0o&0fr>a^KU2bSQf9dJIwn=@^GX#A-QVD*{-9G>9`m85M7UrUi~lc4G9(dhSbSxtJH>PYoGKK;%g6|UXCa6 zwa@?d^WT^u2oW%)j^-cnMjFmjgptByc#)APmT!%Vh>Dio@KtZayf$=`SVd>eDe|Ywy7|_65gt#>oUUMpF zQ-6HFj9?##(lXHQG4g~RdQ=NDhZ z6NKrCJ4o(PrEC6NcdxXH=ujDk#3+e=p$QsjH69@U7K6t}JWV^xoGhq&J?2-2bMY;J zf!?Kp7%tuw<_!!-ESd7kKZrM~rxA4u{w6sH0Ej_*0+oOJ`R`&wqtn|#X343*I!2Yn zx$FxGs-StiWgpg#FHRUYR^pmJB%V<4-^dCz-60MU-=)dz$V5OdaWE?IENO*lx(qMM z9G)4o@|@cgTCafOaBF?Gy!7h!al%AiAV{^@*^YICB-p4^>6jHVlY_>h3`)@-jzc5Zcg!pDK1j}k{t_mwT;i4HEw+mjq8A)*{F4!geq_U=)bKjX<1WMs=+a3QVEoj? zZTbP1G)c}q4jN86KB%5`RU1);Juabd8s!MV6bqtQHBNjBzgCF)V&E{m0sy^=9ZbO= zO|E@JBsD-F;lpQ;k7&pR!ozk`kD;fiXpb{xGPA-P!5a)1EH{i+NRGj)jh>piOndsp z7l_qg$MW9{O0bly$!v@X`P)pXl3IDCn5Zj)&|pds#_s5wS&Q=fov-3vHNJBF0%3>I zqk$gZy+q|?+F#X>_a*bk75%eV{~>qR_frEvYZ9}AWr4|@We|d*a=pxTntD#eT^cK1 zO;mEDALoy&MCdpEMB|(5;uQf|;d_>lr$nd~A~ekz0N)%BU43UycKYkVCT*~`)bE@v zKDvm~iSEDff4z609K)mR!Lq6L1BH($a^q-{Q@O|9;wl%>yAX&&WQoJtXFwWvAZUPf zJ1J?Gy$nNeN;KwTpLvzA$NuW`Uz^yne8!kPGhWHU(}m!_r6)N=qqCc}2}@6&d2#e| zzLXyD;we=x2%TvIZBus{e;6Q@?-(dB{B)h0e@Dd|7!F0x{USBv#{_$C0n?~(hp88Y z!K|9x)J)?#4lxf7N;Qiv_O|e%lwE_VhRRjhVV_OMboo4Y;pEd2VPkdyFa@BQoK|rE zm*4;VZ-NwCm=3dLqT~Tum-(U{DZIfu=5w(LVsGF|?X28n@C89Pj`yst_DYHLA*Dra z4pzJxJjTXKFFrgO9;Tk#W?v?1=jN0!L=A!wgMuT$OAa^eALGouH;;W7AwV;X=KTWt}M}pp(r%;sg%FRD& z(8X^cW7yXl?_xzOC}w5sf+^^IbW`f}31Z>6r+iYVfTm!67a&8;JTez9-(zpXzNuOf zjVvZsii9to{4(H|#7n5Boma6vDdl6B&w8Y_3cQI>E|>xk0u;=sWmTN{;Np-Otv=h~ z5gR4kX%gG6yma>K8tk@v^l=DxHxZ$ z(c?)rzjwOQ>6W9Sas3o^YLe1c0K5d@-`JWQ_}aOezQ*N-++KRqOs51pRKr6gc>sBE zM6p>^cqxxjh`*mW`P31|}(Sa4KG}1?Le= zBguPJ3S3qkI|!1f>`$a#L3QYS$lCLPA-II2+PYFm-owUJMGnC-!Yh$n8_^CMeOT=N}GuVK8F)zgqjFd2)&j$Csx4 zc1tP%rkQo2P|va3g%?l#dL)k`uW8jz$Hc`60!OJho2`L5qUW9GM+b9sRhIhNvrVa6N`V@q0*^|d9?KcX zkiSd4RuMFGD&qztZhc3{oeKF^?xgyxOmQ>*l=|K%{}CGdCh=^`wvx{V*CmRhCQ<)M z9-$A&uLeJ!J$V+g{oUw1g{!biiOZ$As%7c^bfjLKoF+cAGIA!R9fI6vuo=M^=qG%@ zin!`<*lKRjZ7}}g;V&(0X=A4ObpBTFB7Y$BUIn2l7HZQ#9k|+v6Df@M zU!bCExxiw z01HHVNY0mkhm>heU24ji3>^i+K8yLK45QJWuZZSDKqu$eLa_+-ptJ>u30a`Q`{=(o zpPTV*7Wac`m>eFDO6(s>og+^y36IF$o(OeKs{u;ny-u3F~V zlUk()BB)94(6yttIdQK!cYr%f*bw|Px$d|YqId;%VOIKmUNi7kt{Qf7fA9FiZts2&biI_ zJ8s|y8C*1&a8Ws$aYB@ipnAS!uZNSdU-1_Za>!)q;z7|U9#NqDk7!1jt)A_I@wPz(a!35^CC-Z*ThyNxTV zkt>Muxbc|nqf3GnIVu+uwsu^(8mq~E?C*NqMv?_n%33Q9lb}J)08oNC$pz!={NBT# z2bG&Opg}BEeW-g9h0f#^y|!iT;VzwuBf1&27Np+g?ogw|9f zc^MuWcQ-&(d-*tVt@OY%@BV z7;`xjS_&!yV&Qrs5k~Yb`0#p|((aJ>Px&f7sv1U^H#qke9jqp~(w}8)2Y6wMTG@H< z+A9jgy^ka2%hpCg6r3|A5%-yrh9Df)9G7+CA9rM|$0~lK{O6B=N2-fjCIgup91IG= zj#KAc$~^-EpagClj=a9}Voacllr1YmIorTH37r~zky1%xmJJy43M9om4j#%stU4XW zA_(y(%|PpaEB3}ri7{R2V|_R&lTiv3awS#M9FbP@^Qq7FsduTb$4Sf{qSWffBk`{B z5aPVZQ6(6MJHU1J9a4MB(x{}fx7N!b)53gagRuk8(yBr;l=!N=lVg->!80t=NG3TD z{O!(WunT?B0bskIin3CdKu2QX7}Qv>-tenoh7}R`gyHK1*cE_94h(EJ9UV08VT-_k zL4t=*zx})O!D%7{rf>ercNS~DjvAc)klRF>5f7>IxAT#2C(BTWx#+I2?yPM z(_ApfAe=hY<9PUO4O5uvk>FLHJtZNObc1Z1iA|_Qze6TqU^2w4M99cJYr%GH?W7&j zY^-Y3(ZU#4K3IWz%A1U(5P={fMfffLN^kVj!|-dnMb5m}_|N#1QsT*J8~d7k8zM-~ z0eq-_khvo6rCK>#N7A+X7EB<<$b0BrDJ0y!?-ag2wIVSZ!g zqc!{>1g*gK@g~e4@JH5w$3*4mgi*!C1`x;CuAm1c+VrC;O<$2GE)3coQ?LU^hXPsY z?V2)bL;)}d{xv%uB(6>?nDLg5C$~kutTYr-Ar>kc-RM2;Dtzpq&V_0`ib;nid>5~! z9I&`Oz`SPSvxwW7){L^AZqwg5oRlu$Q<;N=^cAIp&#h*D1o4ns+g+a&rmTxJ@h8$1 z1EQ3E0kVz8vwkKpBNR^tM~br}Twgj9APSj@5gVrBGHs7$D%Z5pv6#4TMQ%O>6~GEy7vJ-DKez@I=|Xc`_h_TyS`09HAQGtB*eap9RPN#tl4Kb zg)b>6<>Nuk$Ow&Xko5hNxS4yJKpP}ncs_40G0-MOQYlnvj-4ImHcbfluTP%8NSJ%) z_&C*u?r2eus-#TjmDLUYBvjA$a{}Vx(vw*5!>9W}=@OtYtnCY&*w!iHYKT;@qoNDi zSj&QfUbCd@lOjKT_`O)Vti9^9!Pm3#VOpd&bZUqu8bUk{x(qtL_5j~qq*i=TYLQ`V z^e^{sfQ=HlxV?)#pO=yGB%j)_EpJ34Bb!SZi)D!KFCP9dwt*>`;gXfXv4sxNgd?nU z3>itTpaQFAe)uG<5#(bsFI|%lHuw8p=q&)RL&C=K<<`px8R?&w2k>{1VcTj*V}BjH z?u_ApL8IIERiOf!oHDV4a4t z^)siPItgFLGm$rf0>+Pg7kZUzbT-kXlJX0NT>wchB*0VsSV_Q+p=k=ZlM!8{s^I`) z)MeBVh|{P60VWJ%pm;r9)OWdn9a?$@IhUSi$ZK$j;tB>4vZCAbPxM7EGLL|9n`y|Z zQWPn^uWw8>>u(O66ngnAqbT+EPHHlOm4g_wiYpU1pQIYEa{Ni#!M)2GfV`9z-G(JMjtr0cxWrWt;f=%RcFS6 z$TFnoy*D8NcTqg5wf?3VNB`)C0<4F?fR;2_86FPfW=~IKY_TgXR9x z8f#i&*e-C8m9LZ-BCdw;J$ZwIymUjsF2*9^Eq?8*33X>j*ReDcDH-MQ}98a zAI(9A#iK$63oC@=9K$ZAKk@#VgiG30Pas?dhBrX$%tR zr=NfRH8VV-3295VId>t5A&|;~-`ty4_iECEIYnL+Z}e7ZpIp|R4p1%OgIVU5%6L@d15!#wQSs5yy$31pBBfZ9PxVxWUj`uv*WX_bL*sw{ z+q3^KNGnQTmh8mt?L8hbb0+J`>FXTpJ90Oh>FP`cNDKa=q*iE7OP1W+j%q|@-+p;Mz=g*S^$b50_JwncHQS=sSf~|8(jhujy)FD7Qr)@avx&JHiQHyh6%-`G2NP`8 z5F+cxwK4C4r=OHXV4XCyVwJ7X#vPA^w%IWyfa*Qm_$4bJmdbQ$<_~$t-+s&xt&Db4c&npKsNTazB1%T&Y znSH2Ntdl-oG1>yJ$lra-5V6|01j*8(ah5Cutm4|W#;WQt=k{I9?K9WUsV2;kk= zuLk9-{DMEwCyZE|MDGY{QS>TMzGz7s@1vbFwr2398nR_fewzTiKekJwvS5f`{|zr< z4?D5b@5K`|GbeS1!czG-(xn;B))a73slEYI=hOmhWn8^3*h>%;OM$^DJ?R)Uf+OeM zZ(>T;-D3q~>ctKiLCfiy*}w!(F-~zl(zo&9elvdkVFEqVreWUqy2hH{B<`8U+sX38 z{Ep82Wng5wV6K^49enWR(-77{gXsndrJr-zSR4e&;!oUTXD6_AX(Sd)7My94e7E%c zsX6kcDrw`#=``13elc21F{1LDUExtcAb1O#qai4y;>;+NaxwNU1{X(T{0lfg$SLc= zXCuJFU9rYaf2CUTkgkWob7G%ri@wwwiv?dO5$&0{q3aXeXs2J$|X}H7RM}41w5%-ez2Di+TDU2|pbC_o#Vau8BugJ9zFN{5DA^8^X zsFo*YO|8i3YQSzX z6phi2V9gt(;8IEvqCF1n0wy7eRs1!S7nU26+R|a84Iy0>qrNNL!Jr4t#tjCROv5NE0_oL|zAfoC(M z6r9H8dHD2Cz8nAk@_#=3Gz9Nq!I8WM$y(zyw9yzO?jMBN&OyI~JXSm1l+kK4{xr;= z#TQS_YaK=KE%lFDu*iQ_!xJ)jv3*3&b+L|6gBZU>9lac=4$t-iDYn*=kjq--GzEq; z?d-M&1;Z#$j_}}Jxln3nz_fulMOJZ$n#lm;?)Y4uHmxgtmfj9N>7U=zC!yN>SQcbkld;sl zsWfw1ThC!FqNS!rn=p8c+-GnavsKAQWOWRfVk`(Sj zxf(0<{COEMj5Zt>*KX%<6tBlycb~xVX~}kXfcunE?eXD#MjH>nxE9LYmBO4i!%-Ru z_-$E`J2)*}z-VZIUV!U|#V*FTr5POL93KBSiRb_$DtDL&B@HrWZ}UDGH#eA^9FCF@ z3?C_`3KM7dd{J+4M-}7q?N}4Y%0B|`vlvr+h~ZaMcbM^pA0$H4HUtB5XZR}uKMru} zPx`^TqYt&M!of{R5&}O|lCXC3$Np2zBfkqJ(e3S+!_}h95IKd&0DD}OhC*A!_~;~z z?-4Bi;$N?$ zu4fPCQH@?LI&68EvBHn|+LJg*~F+tq{l;Qx*SU^wTqHWo9JohcpF+d!tCc9?>iufVd8n-PIUR{q=>#{u14sT!Czx8Ac5&oaq34)_XGfaHISo&0y6Uv=|)xb!A zysIK%H)z@|&(qIhFu8KN@e4$hN`efm5+1(Hpy9$$?O4^`GL=iClAh3503y;?$iZ?a z_Q%MxIAV-u{BzT2V{awiuO+|Lgfbrl5XM{{eGP zXzpoGEPoqk5=3-S&w|kSILM1uPUxS>V<~_S<@ZZoV^?)MejjZpFIq=izmI{Ec<||E z5`bd@gh|q;xc8=%M|5sjUB)dTJ;xZ_ixJY~Mb#<-_TQGv(PhUb>cgks_)c6>Kw?&g zuLznIpF(+IEdTf@m%%XirKR~G_2SccZw%;*W*-F~;f`^dUaIQiapUtsV=ms8Kc5jN zBnnY>emK5I<@)(OW)C7)^7#}d-<80zUWd7AaR~Z=I)6iHWU^fhKA6-$0QFyEkDCZ& z_8pt1fZzuQvD1F96QXeYl^+3a_wFd;AR7$5LSPr%Bw*6VF3Q2mIOWt}XSj}F^UcK1 zTamL|uqrsX)T|E6Z}y32s;b^}!2wYhtXS#hAnm6Y&*tK{3ri)Pa&$H*cpA)4XrK8N z%?OQeef!)0u)K#(jZ~z2Bd-Q0=S5OcHb!h}aA7?n77|1nbi0XqJNV#7bj~q-<3!Pa z#rD{eWl*3H4V8-2@#VtT(Zg#HZ5uD}BF#N{LPff44~{cTj;tXuSS(`#s%LpAVU^By zib%{kEJ2p$b$2hAQXQmeozMw97vHn>d8npOD*-D)6nxPweWLF2A(~^SykoxP1fP&- zZ4d`k7Qth0o<1vQ7bMs&{Q;Ix!WZ;4Bg2q!F)%jHlTfOGNDoO%Iec0{*mng~kBg-CfKaMHMV z53R}I?8qbFB&>~Up7;n$;|smTBPBh(gJlXk(ux?-gdumg4$L3r5${5I6n_}gOa?&k zGvkd)b7uB-Co~XjTtxfIPA@*Lai%8hW7|Zi8wa{qj5ivq5-lm4#59xA@x5H5*7RsfB7_6DgA`kk!D4xkZa8B#=ItvL7 zu0|i6QsB;Vquz~iL+j|B?xO<$z&~h0XVfI;B{sA-)5jwNEE#9EEQsJuGRNYRIzJex zNeV0($QWw4|_gfWg6qx5e7}BNn2ejCcF?mcVIA`pZ%)x{=Dq8_nW1EpJ=@ zDF3{ScUJS-TZ9MXP9{80(Hbl)1an#3Zrfz7fJcWDNhlqWepK@nL^Ah#ebema(O4P$a!R?Sl5?-))T3x&d zsQ*5%23q-GKbV37{nojy-uku9a>u7BQ@TQizx>OGUzqBr(x{%ZhMoT|FV$@stkh#~ z1a*CXcKW;Cy`p?{v#pgj8WB>ClUTm2oLH82d5`prpJiqXh!EStW0c3nfm!Gr$d$Gd{zKuW{(Q#&J0;>399kd6lln5iQeRGb z^|3(2co8qt0n-I?vRB>xtBDxIvVz7YBU?mrV9GVsaPYilLtseil!LIGrP&K&9`zlcfbudc5Lk?=blMkJ=#dF|!O@~ZVEYh{$!$9rLzO{ES+g?QNU zSk*k~g!D+ga(GGZBlN;&FQ6@{~YXCO^Ev+No8(k0Tbn zj@P4ddzkJ)pfss0px7YSJkrS0_+kT6#9dlkEahgS!=lkJ(oF-|j~r-x3n)VTb7Qk( zrpBXZF!`3DyUIZwKkt^kn5ll=JJUzK? z^P6A$-q*hN&%gECUo+>DP!M7i!nph1R!fkXA7HZ4F;1*`3*&M+vb2f`y7DsH#VfvW z)1NQ_{RGk;Ak|5Z$m0sb?e!}!jC6HtUPUCm7&sa(!Vur;CN&A_Sgdfs$%B7Rg3;Jb?OUIKvITcH)K(TyC`D^iGp9o^xpFO zT2V|rXa%v*|_~`98+rvyTFpd-w3dUibHqY{A zaKZ>bNDc=Nk-{Ipk5!X6FNhz8eQ#t6mM)dIWk&@6SyieE%$HUR;HQ4$8>NtvOMOHq z3$KH(>vjR^BJSW1 zyg^)ImO8n~+iDI+1+sSqD|*C$lo@|fG1yDhK_$Q9>v-!wtb>PF2KVq{2}e=sA(*$h z`2;t+_=^w))EE~#cKmUa?Z*hXRP2+yHv}fi*t~>DLFdKOJJSNjc+_{k@jZWX^B$`$ z%{1~P7d7!ko}Va{PmE!$+-^;6wD-wUF_8zf$Escxteqll3V8QL5QGwK5z7n z^?e|hTqOU!gWSp8pl~&I$y-<>Kz)~p?swTvxQ7dVT9Q*!Z;IyBL5Rf{#baWw>S5`n z0f-Kp31m*~XGx-oqRiASc*WOc7j!YGIc5|I9CK{*qYOpyM)glS~d5q0>a5;ci}v=TA$%mvz=?z$N%6^Jc%Z7mp_u*4gX%PxDJ9a}N+y zQyxoBrHktg2;3Y2WKR7FSW_s2kI7A_9g@^OMkXIwVk_;OB?g*DJd>8}Pg1`Tb==+$ zdJ!f^v^&8`xhi?%SUyOW)0Am%=vdnG_cX2C;pt_X(pVO#xeu0h#SF9t8=Ow?N`H_- zR)x>!Kw?Y;xvQ;xS`mf&FsZ{32sX~1?o+WKH5HwOBI#yXs|(v%ko?o+z=S2n!A;{F zvAl)Fa`yXbtY)$^Ww^F81qn`Q+iXctVKS{qu^fP`kR-@TPRpRpW%j4`=L(sh>wgl%8s>845!b<}vr4!3jl#nh2K?bEO6MnMrq`=nbp^mK1Is#o; zS`ZZx1-b;bjibYP!i@K;A1jD8Wx>1Nnp_-l)6Ar6Y9gXxMseo!Lh(xFIh4FkN0nr= z8vfb*eCz~qZOl0_07Aee5WPXb%nx1#yO82aUIs<68Qs7gE!yFo%nznT_?o?>*Ozy2 zMVF`6(=paTaiBXCzG3^7X8p+_5FLMeI?$|u8*s0{rRB5gx0PIxv1b||lR#+^Dtp(+ z62c1Q8n47>QgaD+;+Ar<_9W@z9M^{zScS0w)z&6q=&xPUxeUSh*)RX+xcabFgIJoz z#G=QA9{>~3)2}sq3Y!dAZOKjv-B_jaKH*YU;ul~N(1$eE30{qT`llW!fmpSeI;ECk z^%NauI8+sg&K~b^L3|vrm#k6?B*v40UkT=1eDd7;1D|u3l25}ZG3JZE*Fr_c$gg*L zo4yDA-kw^agm(Gvw`>--xHVGNi;Di*$rZ}xlpodyCypn8JJiIusU_^E>fR_WAEc+j zREFUY04}Y^!`|$znCL*jVOWzHF!BpOw!gEw{{~eXf5%(};}zR_y=iKZv>&Fz*X&ot zyW?w}{R4sr-m#`X;T{}i25uUJRPZ^zf{LF-Gc|}uY>!@7HfUx$6RK+4e6l<&qi@ki znDPo=R`alVNlbC@rD%N`gD2h&b50&wLSS6)V+F@h;q5dYzWnszFCKm`zPjo;5ZHf! zam*mFB?>JG^!X1y|HYS|TfLFiB@{jF9AJuUlC_FwFFYwDnxt83%_~+`N zWTzU?8?)Z5dNVGj6ERi9D>V@r=3PF>Epy0!oTk!y0ZHq(@C0&b8 zx1!&Q%Zx%yhorkhXS{>H*@3}Uk#WF|^fC$kg0k_?ojcvc*yv;4-G65IHrV6%GfH$& z06E+Jj`vFnzWh*2U=-`{$O%Fz_azXrjsU4BDM0L(L-!QNv;AR%hI9o(4O_LS(P%xmv^-fcY8 zi;>WEET$tjxJ6ZqjkLv78qzOE+;S@PSMgMC57N$FnC{B1b6y7F!@Ll3IYrorg@k%vbh4C$3?5@N_s zu1-eyo)w zXfNCJP-b!7f0%_{5XJe0cQ^v`$u=pl#72*?d+(B}Okm6xp|&xKPl7pBG2p=Z5FHw% zQLp`GnpqIjqnKgR@c~l)t@M#)`&SpzoC^GVVLipq;>Sm9M{wa##ULRycZsO^6x?ti z{!UzT)n+56kCrzLIL71QC(28U9=t`X)jA&v46rrn6`K3R4puxFS(}y`b_4p*&+Or} zCh&owJ>cdOBFqe&a?G29d!nhn5Y!3FhBdV*axUf@;lZnzP#V{GW4G_`Uycz%mA-Tl z8n#?by}D%Yz!xG7lB~dL5!p%I`2?UZW8g?Xbv8)#TxgJx9&=o$#e6_av!JIS-r@%U=kKIl;++Ny1}~+>*}9< z%Maaov+YG3l4iCAjIza}k>;^4H^#X(dtYvuhU?T)V3zt9T~3V%l3(@W>6~v17BA5l ziI9*%Y*6-Px~t!M{4Wk(@mq9sd>o&Xi%|y=9IBxM*9BIJmvUY2(!;^zXpPV>O4Jlr zoJi<-sS@r12vM_(v>N&p_IDGD(cA0h!zFgTUtUk!*);pm901Q^wfK{im6|)2)y++z zI!hu&GRY~PWD)0@#Ay&4wN{c=t1B6Zg;V=KC|;mC7-v3p>ePJx+2=ox+e(Nj9{FUh zR$Hy3KNK#vz`~eo`0>_V*QcsYsIdX7|;u4tDmP&3>yj2n-l zLt(;pzxQKv3U-rxAWSd+Zj(|A5=i7TO#k%}6Gm@J$O#6dv@l*qB!TV4HssjNpuxIW zMrr(ApAi@=ffX!Wfm=ROs~wM%TB^46%W@SubXOddlJGx$_&YCMJm?a%UIN~h?O_P!=gBFESis*+KoxZM7Q!z>H8`( zjZMBB-=0l!wkBAq$qgkM0{>f@jwKFJ<^Byxt5&EP_k>#A#V2vfmjpY!1sDtw!)yJ2aEb3B~3??knjF9t_w!-}-n^MIGZIRh7hCuk5R zh-R#m5|;6QojnrZLS*t|S2RfO?J8b)Jg%kEd&ET(M!WPh?O$vciZioQ&o{p5a}(;% z9Z9jBPQ0o;i8cSYdCz6=>!H00mlA~nEYa{9g09%e7U`JZGW43iX~}bh2_i2v9fReK zUB3@2vSO2tG}h2nMx&33S0^@8lHzopq(;Gxg+!zbV&_x)|@6)W&tI%=DN0Ot$f2I;f_fWHXjc`gE2i zqKCww9(@m!iKUK?%-Q``SR{!TziS@lPp3sK!DA1f{-^JH2`JobD)!~~AAS}Wh`L0_ zjvm!Sbs4aHRdenKf#J;|=^sCq3dgI_$z9QY6i*;R?hAtuLdImFlWs}!Q%O^ct;ng= z>4Gcpx=ga5T~q6r0=I#GwQ?5xcimfrPCq_FP@Oeb%{@% zgPSSs<1pu@zGvAx%HB(1UImQjpYcy?i4$>n-wd0y6N6oYjU`D^ zayD%`VnsJxJ_fF4jetD{Sy_#NE~5*)pS(eB(#YxmlJnxhSkar+5q**v>K4O!@rQ9FheXHdJbCC6uctPI0W2^T}TI!_bRpt;^FX(ay-#% zdX)t0Y1E^}>(65WF#nZ@1I$`-3O8r0g47ianD9Hk(B z0$~HNVJKSv3Zf7(r4H6tk$N*~h1(^MJWD8!P3BsyUe*JxuPb-qO9|biNjPb>+GxFS~2j!Uj|PB7{qk)ccr0-Kzb3f%RK;?OS`-g z2Q(d(o5~(C6{sS~7)A8sJ+kcVyWv6M2)n7t{-E~Mf+)4!G3db=k89fXbf#;PN=j9P zdQ@zPK5m%XM-kRP`WTCOi(T$3M-;*g4a-b@lga|yL;S=vu<2s`A3y(|Nyc6@3frv5 zxKdgE-N?|k{AhrbLlg+mrx4FNOmM5cyZh0mdLL4+w$YHHK6WJUnVinbLn zVMg2Hvyv4o*+$@F*R3I4ynOE2lI1EIo4g8e;+b>XDecM{B-CpHn21LkBIfE{&=(xo z-cG@TuYeZto!_lpB$nZ}Ocvj{zhGZ{S8nDBGb1P;!M2Rolc2@r8eRJ!y4c!i(0J1_ zw>ueGq0kD?p!-q*fE-;+4%>)~c(?{;o{}%{&xAd|;`wI+g-EM7xd~C|_&0i}T&R|- zt{19HymnNKZUq*ozv1NKUJ=t(SjHO@U}DCn0@9W0as1_9{PK5II3sjt z;Q{7x%4Z^qAFrgBtXjCl7Ql#=WD;TyXS}xHWm=d!f)jqv^ujLJ&=+Bvxx6bXf2U#@ zgq4PEX9G&yl*YwYJ6lJc2^h$eq6as3cCzT1F)w5si)loMz&f2!rn#ne`{+o?|A&Wv zXQ{8~7QJC+M?ni~cdXKZTXN1So#aJ0RH0T$6 zKtB;_zxX3M=U^>^-@ozwZ<}bkjNCp=9hsG&<=kLek@T2LV?Y*wkwyf}l ztWzH*2Ttm-qU&k1@^v$@nx5aHrVhB(O{OpvDEsNWz~9 zQDYIHHqLmsL9m5|dB5nb+BfnJS*-X+!1LJY}%WG>l?g?+=l;2Jdh%J$NFi*QU67$#y58s1aS z>))s(1WA#&l%*F>y{oH9(fga&C6 zIbGhn=ufVZ;<}9Wkhi}&h({7)+}ntQ;>tjU+Z^op!If_dPnN7d)l5w#CnbQS>LT(C zr9xwA?unmEKP3{?-n=?(JoZL3DGzC&sL~O0(eQdyBKy4$l*-5zGCCwaL1{r5@fi&9=tD270$)agy=#jK79K2_v58dQb!cu#7cF{TQw5k zs@>?6U-S=52KrVJoUyb1h<|v*8;3~TTMJ?TkH7v8;-5>4Ph$o6X;_(gy@bY5|5yJR ztLe6wguB#k503~7dFI2zrZ z{#mSHe5ek##Yx(w2Ox)D0=V<_q8wgil_Tw@gSH?dPshHgN?PAcEy%%zi73woBdOte zs|ebsSDZ;tl@gt^;y?$d<5UdDmiy|BfQzU3okXqqRb38Fk^4y7IFPS>@7wXuzxTE8 zc*#BEjyWjW5yhuy?&9Sf130X8PyiZ_3L{@@Y%^Ffub@FwraoX28IO*tp13Y*FLRom zB9&D&u}s!u#C5HWjMG3AnZxD8AVU=pk~fl%2^!EwEskH|=zT)JxAwi*WiX5j?ET zK>LgrOLg7?@0oU3oL?}*DWxlz3DmF!uDK)6f{$q6y6K}|-zW`=q!?%}=U!k$6epPz z;fGBFCzI@7AUnjYEPDMojvTYk7ZO86M^ z9uRQ?KIjUP4mT82`<5!=3HoYcNp7LV3%v$8@xJW3_WwB+N`9s}=xYMWSnfzzJqME`0H zF4ZWk%tbwXT3VblC#bQpG)cRFao|KCZVG~e5^6jQVK(9e0uiNh2QtR8(VD@ebI<2} z;y9`5ZiZqp6%a&Nti|Z*t70Hnud}z(kAIIY4KQF5VBe(b;q@rw^a4z8W!Ca1oW_P%(I2IRo8C}=`0#H+Y!LxrpQX>p z9>Ls@j6HlMwF&YLh+3V>wv0~@y*cWlkKz-GhATr+atcJ+pnQCQ%%92~XxbG-z^J;S zK2IXv)E)_&Td}EYbiq$40dsS>adZq#h-Yz!DD^y6^Vc|b2y0!202*6$r3-coOaoVJp{EaU%66Bd6@ZJ5$A~D>LvqsR4e0c@y8jC5A92aOVUxt}OuOxp|xi z;lR5xr+V7gyP8q+&liLE&*$fYMh~Cfj?-8kYq{oL#8n+DPPAiZFIa+fD|YT|DCONq zAU?GfGl|uwURmEfc*X*G%dLf9Yh>sx?;-Zh=$m8UNkKE{LE%Qtgm>5XHu~8bhQqCs zAQ2wKp3@e>B{FymlLf}{xwpQcLW16w;kJtXLy=JV_u1y6OHiu0l3w=8?%u0l(dGM% zOs6j`WJh%c);|A@X#>|!i&jmLXdz)=gFz*A>KQ1~i_%*kn=C6Z<8)svJoD*-sE*&i za+h3XZz~@D1Wp1^l-h)AM8j3Dw1*dgrQ{YE$WdVDQ{T?~NzYjoT?QF$a3+FwKgnjW6HX?tK;H zK|1E-OT|mok!Dc(AfKXl0d%;Lvg9@mgxv80Q(o0k_I4FhLVm=FkIoW)CUIkCr6lV* zZt}w?vI2Dfx+`o&Ey812uj`2j20GrgN^HagC;oweES=#t5xuR+MO zZXwoG-`Fnwjc$Am>wl)K{0DT^br%W{QupOPN)HkmjPE{tBHW)4*8XlU(fS?a0*ye6 zs33<*xk+po-I}lfI%7E<20xL<38^4Q`r$A6-+%r258|cmr@yd76B>uD&xZGCGlT)^ z51ri5QdlN~4`2<9VEK`fN#n_TGWfZ|wMex!+a@(>9|53Cngqz%qq8D)cfpRB{gzuA z*zevT@lm2-R9Dolpeu-IB^-P&9s}2pu@RqaYkpO2re{xH1Zz-S8Px8*8>P3u3?APL z8GZm`XfhG`lbzsm+N89|ual2SKgKu-I*+Vuq{=2OFwOxiSd~+?>L@douz>!F1>D3R zEKI-KasK7r6~kzE8K^CCRbW6|0>#|Ck+ad;LD^S3r}*m|>7GGB{HxW;WA?k6X#3c_ zKC^xGE-Hx&rgD?-41J6RT9s@E2vCrPwl*BTN$ zE`~5tKlgSqxdvW0%>gm~XAohV!&S=DOgV0s!o#Vr{J1`!o;0OwaN2~7Y`1J8T01&1ghc) zu}J-Jyy)T6(`R#;78z^YS_wuFTE(8Om8UDc)!uf$XBl#hgG-`5?&fqboypxx^y*2-|ftrtH zAiQp~et+@#6F=l~@RrjmIgdBvixbA_l=t+Mi7JSRvOO&3(MKO;!}q~b=p|ccXp;Q` z_U|Jz%_`s#aLl-DK|ij%qRDZ9@TffXA}KqvI}KY`Z3ca2$_Y2oK%?f^J8;Ia*e3R>t&c@z) z`4h5@i|t>KViEJk^6<&MhyCTutwYf5PR4k4cjIKH$wcGs7u>8-G;-j?qf{iJZXYl>GH9xkijx7-G7`rwy` zAIIhHy#&#u`q@}HH+Ammv-kTlvkPdc!X+5XmbILLmt|3rE8l8um08j)YH{|tI!c%wf^c-l! z96UO+%tj#5r&uld;%o-Enf^QMkA*>4Ee~nod6_Bgj&)VVF+&^ZEH8 zHkmFuOL*aM^yIdLx(Kt&U=0RYSeo~3c{jlE2J4u?jvfwukvC`e*Tj~qS@njlM3$h~ zakt4=g=uuf05=2fC~OZHA2Z35Z)g$#abl zw-PkLb(8l_uTnb^{w#a!?`*D^E0uVfP_kRi)9t}+ft}KN_Uz`q%#eXi6qUrRtZk*a zt6ZYnsU_xZ9gZ&)H?jpSNDwwhuq1jWZZRZQx-@ohUB*l(bl4K=@yCZxzx^Arcfutm1NW<_tov#-1u^)-m5!)&%`-Xp;bgSJa~L#orV?F@qMg>tXTfD!Ta>t z0eWRlmf66jWouQ%oOV;_5XX^N&qV@cnxH6uzX4harZxe7@Hc^=83|6Oaq?gmjMz!R1V7*rvdfcMg z6SAgBtcydrT2O^hxfUanG9JVvy&Bh4D|l?u;4x5GyVRR|Grf-hX@=P<>#JTlX2iR~ z{AQ`vo~(stgQZ&JDwR5Ri`qc@Wdw|FiX`v#*1Yl)L^V#Dd)Mvsdn;bp{=KIBqy{}c zDT5F32AQf|$lL6LSUIsi`WL9E&riFfEDUZ6B2h;1VljRosOby}W94iAmdpFwpOh}P z)Z?OFFH%f1u;<^$ZY5u47@Ckhk~%1b=?$(TIF<66_$L)7v2r=ya0UzY-dcH8>gkQW zeQ%0VZ*@WOw?SX1Oeg^fU#l&o={<$bn3G{zoqXx3Mcr@^geDssISN+}X|8p%v+R{! z4~YjAlN*bdn0}T5-Cnjtlb5SJ(~L|$Vfy;)g+|T}&}|t+ly<5S1_okNrCmJiWpvdU zsL&KzYXbvdSC~#ybTc8oV}c@H(0~aOWeD~q@#wttRQSU{aYk?L0=0n%COH$xDLk*W z!GQKhML4LMm{X#RA;>Fmdcr!F-*`PF!DT#{!h+6T7rb7}9Xc__g#44$quyoqx-otbhoyWtlGxt03 z**bfO8pR^k2Op9$TNkV9}%t>+D?{S!y0Bl7tZ0;e=&luIT?O*u%~$kWL>USsrA ztgg!z9UN?lz%RT2|LMy`SNhw?^(4J?B#u$|h zD;gjQrcWPPcGSnW@8!6@`=7u4tDHcXe##vOXTVmwGU9uYR+Hpq@)0pjmFH*WadO6LMZHWK} zTOzN0@X^OwGVJ=3QDm7EM3VDNazri-8b}g5GD`^=>IrD8Y#QU_92%C+=vD_nBo_l*{ zJuW$K@nlj-cgL%CoI+A+XWP2`pyEW!(}e2^faX%*W%n9cl>0nQPP2@7)1=D#NQsOy zKq^lYXXEw@(?>boEcCisLW0S`@kLa8Vp8OwnMdF#Td9hqziY>XfC?I~dtT=k!QpXF z=}^!k5A`h1$Tlfij>bDPf9WQkjA#Ir1bKx{D^x^XA=z_DA-2qm9q~UBD5I9BaoQyI za%L_VriZTs7uJ!vun0{yzqWW8L5JdH(E%AIOV~e&zI%*LN@c0u8yZZ6L_jc4^9EP{4`kZyLvz2S{ijeAL-x97+(ntj0OfWS^4SzTOhrFeo53TMV z_a0Qcq`&#oZ~spX1IN-uJI1SqarU;dQg=4Z$F!?V13Jn4<0N~O;Fx1NJP4e#xK7p9 zT9IHMd|W$_o*N}D@S{D-$6a52a=UgFY;$J9irl&L$SsgCh53V~SP@h4VRm$C$@zpe zqH*k!j?Ixj)NS%j8E)+qV5Z4((+9GX*V5)QOVc-q~5 zQH2K^?}{{0SBI6U7K4BE_wDicK5X>Qh&HVZD4bc2(^IJxFbY-5yzu6fG{jhv#FJo| zlRlKp;=q%2I6DtNMKMPv7vfG*86dXOO+_=P=>FH&|0EZBt#^*jDMRh2;L7sFa7UqL z;vLUZ00GHdx$pkb^xZRtXo+C^P8XlDJ++O{aN#@PBXb zYF{;>f1^Wd0_&#Kdi85uZ#mIV3rbJs@?TM9frh62Ov??tHEhTn@pMU6QqqMQd7o<)*AH-rJdJ^k#`h`bpO3 zFCUr2r9R-yB30Y3H}l0Cm(w^%L_}J$AtTtDGo&5W#0|-va_ItByj&1Nbf;y-O|*Ps zymN=8LM_u1cV4~LJ@{iPvo9|pqg#uB^-2pb1J#t=OqEG&PykdUrx*Jl=R-KI!ySHm zs8uE~BL9MY*eR@$|ImwGaIHX?0Dg zIq{9f?jqnfsgU}EPt4VfHBWl3_w>Q|wKe*KFRJzwD81TFUik9D+$PWP2_c?x%CDYn zZ&eWe@kux+@+ZQOM@6eQkqwIjs&?kA8=M<0Fd}|h`fR?8L>in(&S3~BN8wD)7o#(g9WGfqsf*&#Dg>UPTnM~E3=Q*F ztBneQmpNJp)C4eCNNbbC`21}+tueiAoY4~`>LE1p#}uGogqc&4cZYYyv;#`_|E+L+ zG{)YyHKXw>flgzo^wdLbHBE&!<-~Jx<>EBC8hT{o$fXp8NOzJ@$H&LYMQTBX=;q#( z?tIkGKi_`6mH7zkGq?SJMOfyAl{Y#LVrTlb<*j{2^fi|_j%AxzQhb=#^HV%AmC85& z-oIqgTh(A=vDz!e3qAOzgS6Kxykx&bI(!0WvN&pI&?kGO*^iCy@$AsJ)8^BcIanMu zFJD_@fsIq<`Zfpj<3G&Dht4yhZM3*rNT>PN(Y#RAZe(Tt`M3YU7vmPeY6oxQlQDIqXk8o`* z<_3ZtDt+}V))|%;p{ab<23gl6sB3ng&HdP*$1RLwzBG8Ic}k)n_G?$|oD466wi7#i0zf?MYpvG)&NX1dc~}jM2^) zetiD( z1pg_=gX>uvMDG~rnYAN0U%qD`k%2*?mFD}!>`SfK#@=3eY9=%ZFGtlYPIk~eZ{Jil zG++Vkip4l8hrB0z;)+qh9&(Ez2W%(ZtUs0f(IZ zV>UxB`Gb46wK}I0n%Ik=sJ`e!k*TX?UGL!74vLga zt%brN2Yo~bIidbjI?!P5=H1Z~vAk>)EV%3R=18=i1PL`0=NGFaFm8(254l~B=ajf( z(&ok3AXsp@FdyH&rx|j5J)Bdyt7Joe!P%Sn;5b5nRmuk~Cec|z696gW47bqjT92~4 zgN-(w8nIqo7K6r*$m}h1f=J%0XInW;>Hv~3*exd}H;^OYc7XfjTb^(2K7IKpr;pIt zgOyd{&-1Mpc_C}U+U4x<)H2&=iQZpuq^IVF0*z2JKKhbckQ@;VYGq0u!U?ZpY2bmq z@1=Z@uMy$>3UO~f-4OppvZ2GqQ5nUOKKbNM4r@9}K8}hzpsszxAy?V)#|(FR^`D|! za;2vJYJO4%I-TNJS54>c?ds;y6ftI=3-Rk8{6KiQ- zjtveAxK^Y4V_7OozM|@wf*nBXV z9(zWg8_DcV-|l=|K-{^QUgl#WgXhS6h-e+i0Rt3HHj$O)n7Gl+1s6pSgyod7(lU!Y zoPh-$F$K195(UKvUzZ;$NzMMqvG%GRxvjZB!Lb&(yc+Zl%BDWCAuL@EN`+imp5_B0 znu=$@_#elqa5!k%6PFgqT4$~Kk*XVKp7XY|`?*0w>Ek5Q~71cwm@RXuip}M}WLc zisxu3+Qq?0$qxD2p7r^Qt)0y>18G_ScT4i&)|IjZXYin8kn*RAa){B9PXk3^t={H6sKDgn zwmtnUE>UhQfuj{|kOkh}NZ_$WE{)2f$I`a*Ocurjw6M@AblEwz3 z;48YUlz9moVvd?V;BZF>{%@)bD!+-k!i9|G$nN9V*>9Qxsf7*g>5>#yQ<0)gIU7*J zZf)W0^NR@ya%CbDIE-d=IV%W@w}X(6%O_|Kz#l;Y9;@;GcR1T|EAe{2R*Ha7fqZ6i}amY!uDyZ3@$k*TKUZUrx ztFwwWk!9JE5k%h|5(%Q@O}m6tt|&ti9G^swQrkuw&$nK0Z&rrd{)Lq*>|lpCGUr&{ zynOjc&yHF=Q*2JBs8*r`b|GcsZ4>6(!#IhMs+h;Rb-^MUQ@OfzF`v_?|6Gc3GcrlX z4@EbhMnUfiwceIwcjglL2(>JyM*Pd%!*n9TiDJu-XaBv~ z8XUgmi7p3p(Q1d4<EElO;iJZ;RL++JHRp6osq@mg2riAcN=K9zT^T*BbN)6iDzi>QZ0 zXzRXp7jV)lxKv1d3j2@rzkFYdZQHiQ6H@lmCm-F*@pH@+wyz$8E`%y_P&s4=Ux`iA zP{-4o$bq#E9vBtx5xi)CN#;oO$kA}b)DmG-VR?@JPKrO;+|es0sIsLZyb3QzKH=f^ zv+dpOt)0B>{ztbp>VxTCoUdakgnR>eawU)$9?HcAnGofI*%9fF}-+C1)k{4Q(m9K=~V0vLSzF>5-Ttv~0?Z6T^2`Vp-_wbsRenXB=S;7qX z?%i90&*v|mZoU>gciv;MQO=#%-QC{JKkwZsqlKdI{Pu7E@OK2=`MeHOVUw*Ehto@~ z2b-5N6fp-}elWhsC{tgi-RU1lN?=&w-f8t;8enAq#^m(UN7H9Xe{_Vm(~7m@K%D7m z+q4jyXYUBsDgkz1jH1j-LH+b2F{LXO*=SXT4Rm>_hyRnj=xsx%2|0(0=79 zV(2{E-ux_w+JI98T}}=KZsft?p&QL8hO{~@S8*u6l|};JHx)8sj&^q6iYD@C^V6wm z`0TYfyL>sbl**S_KP-=AZ?g)3UF)(Sa_p*oQRJQ%z+P;<+0po4OfRup(VKPS#?D6W zitq?0{9d%fel1lW{gcvR#^O8w&#wQa)}KW^2-+mgNmgT-{090^rV z+dF#L<$T^wfza3kC1i}bG4Y0YFkbZ>GoH=}tL8f5k)^;%58wzxT z-o)bHx?fp&Sj?wpVmzXAh56m01^EJ>NEGjQcYR?D(t1NI znRAM2r%xW_Jjg29f2tKC+ykFqnNji_PhDQjln-iHBrsV}7wUu^3y;r>A4X1BvaS0c z-$?{9nif?^fIixK{W|Xg;_r1{&!h_mV;@D^B!R`}6#Y0eMsS(XucEgHRJ z`1)#k9%ey~0LMW|%CKgv0+@~Y7O#N>5SbA(`EXx8d95|y;U%2KsUdOZ;O!hRN*6h` zs)onQ^C#c_a|3OJUn0k$RyNoU_FN8ud<;%qMT0kde~PA*i+wyZ|1lY(ixbLt z%p1%&I9Y2)l%r@kju+0s!~v%VL}ha#uVT-iS2+QN2l*y){YAn!t~NyX#L>IGv!g3G zTqF25*XcX@$#Q5(^sv-;KK*=$j-6Xt$%h*|TcX>YPpqDSm@v_IR@Q>WPhpybp-sP{ zmu_8`k4L8c^PyT=YE7af z_~pdX57;Wry3AFkNnHNU%Tvu|i!_^y_uEw1C!sdCH-wGV8Ic0LjJgPR$;DLq2Q8_{ zHCz|Y7Xu>2482(DokdZ1LZ8?3KZ{BWeSApYbNdJnUzu+5YU2-bR_QskUn#fHucXYm z1^&&fb`13UxvqPT-){2WHtXTIBFLsJzP`pg7D;$(SGP#e3apJr*bAZKXm+pL`MhYb7F=(w2S+=7gB1E%nIpT? z%8%H&DSgx%mltS@HecKV_GsKRtu{_u;`#Qo;u>>WDpX@C60(A6r)lu_epmNU@@(g8 z;$q|2#Ry@SintYJ1Jg(021rJ5gz0MNeL>Ica_f-MWpuqJ+v50w$Y23>7Y|2qPit>dT2@R#S+q?oS! zSVo$3<)xfUI^DxpDE_#6N#?8w#1X3Dp36s}>%g1yZn#EE^-aD`7e%oYTs$Pr`MIsQ zq;H~ldL5~>o_X}@*?dPY-+cLE_w~!l)tG34`B@dYf=_7NQIccAIB1F5s%M8EPpg-kl;27LxMZ9Nj-+nDrE7 z)@A9ZAK$ifKHDiKR%5b>OOqD}%QST7NhIZnPR}C!QsACZZfN;2Gax$ZEB5*0m*q;@ z`h!=wETNBvb56V-F?J&$M2|)!s@!wyqka^@F?%Xef_7U+rCWw=d2B;sbBCi)=y>W) zXgGx%jVsncv!?G$bdKF0wP1x?jTfsqR>-@_!66@PDbMHRwAdbk({69Q(R6X~L=mfH zieO!lmXyOJkwb}G&$lE5T zY|zXG6UV357RPZZ=uiebH6eqOdQ!WzHQKHW!R=sM%Hz9t*V1&O7z1ifZ5j8lbfW)> zANn5tEmNU5t<8H>Y(xi(T=Oq|8A$O|vkBtCHNq&*^9py)>aOVe#_TjTl|TAGNQiW7 z?z@icz3=P?#*V>r`KE0wnX6P;Yf|j_k07@#gYGt zAeIb-F*nv4TVLOj)gGA}jU(vs@E>cuys;o9B_Dl#MYEAPMMmZ)BK}HS1q-zxp>Whh z*gBsXp$Vz$+gr~brObI5OyPxEsVSL2O^QTwX&L)|0^f$A};}07TztE~g%Fx`a zo$uc&dw$uh`SKVOkNS$vQ#2-Tm7;0bvZ=71nhice06Jd8{xI&_Wv zS_iE^Hrq+pHCM)3eCH76%aeH}bAyVbGUfKZ6acW|G0Lab96BN*7pQ5`Xk>2sLD++o z9=f{(?lCnbQRjUbpzJZ4d%$p&SIrh;w+Mu=C0qeH+DJsTGye#;gsDu#+f0hjTTjLu zG^J$blZZo`M@-=P_Kw~~7eOYJ2#&V=C*;?Zaz{qcL7D8tIQ9-oTiJ^&-;vDz!h<^z z`iaB?F@Yx}%u}x!*&D_<@2c1fEsrRTNC~{eV(Fvvzty`rLar%z#>_$jjd%f35>ANF z%+Og$XScP3gGb+CBm8WR8?)9voH_dyvbc5EYBCNusbUdz|VLD`Owx&)x2X2s4(^+;&tFdV6 zi^)n?Z{iFQ zX>)UupnxFR42e%ClTz~@&Mf`z$)iWFHR1?f#sb))hKAUZ<^_qz;S|uEUXz1L$7Jt% z5+a2&*rIogahYe~)i=*gYff*NN4u&l9MYe@XR^@=h~T$3up`YCbxnI1CvAvt7D(b_ z>+8rZZ;C^bi#C95^XKPQ0)efblzqM|p9T960bfVuJ+9DqY*5aH%TTrM$KltE7 zQHPqUvv3}#z=y*dyv{F5jgC}nbg8_K&e1iZ2luQG{KMH%9P*c&@UT^ziU@!X*%eiF zspNiYb`~cXMSMO;o71P3DEfU{jzMO0Df{#KigsYQ3 zx<0r}qNu6 z1OkP2nIL;`&su^-QY5M9`P==wmNf^5wYo%9);Qi~tqE)2#%Y5zTYIsaGa?#$uN1_e zwSN+%oX?HXNRNo95Y6P;<9v!rekM$1=lFj2FaG8qT>r~_F5zlggj?&W z4V+(;=SA?Mb`|@g9tLBZX~mG{K@@X95Husp!t{wHYJG=SIYQo@s2?1h{dhq&G z%-Vfu8^>@wZMjcrBRaF~{bV-B&2XkX2J&#sK}MBrgj;G_Bxpj5igQFGc~O?*k4V7T zgSm+vEThvH`(IZvBU)a>K?BJFNt879vi<70RvLxfTZFRr+5wos*7T0u2>jAeM6-O& z3&c4&TofLmUnA$;qqm3>dg_|R7xF69NP0zx&pe`PCrOTRE=vHcPGPlr{u5F%+)*Fq zDE?S2>LzhdlwNHIjBBD5v;6f>Qu9(2{Yt;wu_JLKD(CrD8AeQ6n^6e0X$pdh%iW_{(tM1&M=NKfhjI z`S0<6{l89wj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JWj)9JW zj)8wS4E+1Q_G`cP-|}DlyO_V7p3T0QEQYayS^GO?C(iDDE=J})BJNboaLVjeLEk{{-gD!JmX?GuFI#x!XCKHTPLIUT-qh` zykt6@+1LB|^=!D%pZ2^O)qGgw1=C?^kJ!`K2ln-0{yjSzj`D8^ah{jYht+>rbDL=T zli?&UnG6eEaxu*9F{k=rJ{;=9F!Sm8fR07U$=Ps_@0blmgUL`6KN+TaCWOy7&xW~C z%-*;O23)IW}oD97+qnU=Re9zW<%|CzP_DbPqgW$3+=17qxbwEAI_am z^UmpTD*EiR-|V6{_V?@j`xs`ZJ1&QbbEjJSxu(L~9_QOuA}Fh8%L*&aYZk*S|2!Yg z^R+NUTgY9Qv&adQ^N!W9)ZosC#avebab0DThV9w24n(qvakt=MdK81nam|jzuBL|O z8Gq;NCc|D{4s$upzah+vyae!S+$@7%^dv7@iEYot*>4TE?1|6xn6qJCn~R|t{-(p% zcFVi`dodO@TlwDUP_m6(R(l@;**T$BGL8Lxu`M${Q~0tFex!U8Q#`F zp=v&8rHRGf*1fTQIlAksgTXfR(J=SxCbiB-dhQ1!T6~_bW*eG1v>Yb+0|fh058WG9 zHjPWfpMC=a1%m0Y?67OF%x%4dB& z8@|b_(ZUR%K{*X|iTtW}Ggt8=)>fCjk8Pg#*&p^r5jLVe?2D&Qwbbn9n$lzsu9Xj( z4d;5rJ_EbZlCW~K{emnY(p!evsjlOJ^VNkObBX3rZ+nNHQ)=yM*b}ah^TimM_Vi3o z*q@(DZvJTafnK~loaqB{Rv%2YLanTIngHya>_l=2uKZpzfMcvqY7JnD#iF=O!y3N( z)%CCPVGuf3cRtLEmBifh5`zvC{30($6)845bBfI27Mc*?&O6Z7UEljfevkH9*TDJf zZ=`?yjW{h^H!BP~HF68dxs``4!cxG01@1e?nD^30;v>SVCB-^HlTD`cm;DM>{$7GIr2Hudqh>np1+0MEw{vhNQdMo`T$Tf){zC5dH0Xe zMt>~qBF{_mvgHcZY;Pc%H_gtD1m5ebFNf9A=rTJSdx&o3Z79l`UaVhoEqF(G?BXA& zn-25y6We*%YW9hd)IPkvV&P0IU6?fjXUX@@uzPD(u;JpI%&_+0_0=UG>!*n+S99l> z*bp`vG?kK&@Q&QU?DTpP49SN0(eNOLb$#_zJQJg^MCuvuuB|*l$*>n9Aj<#@tta0I zTK-TEHtRM#Gj#{tyU-k}OzAZtGmR8H*_3_MSs?^|R_n%w%=fbai)W$Rp6dn_5t~1n zKn@cZ-m1pGT1Y~%dGfKyxdS~4y{^p7V>5V|{0CxVli`rfm)M#NmwEtmE|nGl<~1Yb z==@}ul#UHq$_9r2n5u#$ZUP)s+J?m0rr{i-WD0%^^3Yj7CctJFm`=)=;}8Ns;FfCs2RF~hJJwtlS7%0IP=~x z*?*fe`JpU|uvgBEg27s+Wi~BF(qUwhrB92crMoqJIfdbV=$bf zvoB3cM#dZ3U;(vRz8t2mPP4xY)3L#9gEEL(qOo#|u{4<#Q`$cnR`q!BI+!y07LI-? z5pjleI?%_^nQI-xJ5cKYxIWZ%IN;7~a*(v-3>V(dZFuGY!_Ko8?}mw;iwt^f5L4K< zscvD+u)?x^Q`cD|p_2wiXCTguN+>&&n@8l*#cu$u_@~ zBnjPlxc*m~fP80w!sIESdTSqJXthqLgszpwycagV&t0-Z;&Oe@fnC0NHS8C2zrMn+ znm@1(KNGE9i&m^dZCkvawv14^bgvWMXM%T$d0UJZYQs2$hLd3~lOgo(eD8TY#0K<9 z{=FbjkQcEC!%B-6YMnGi7{hE|KfXs%)Z7s{X3wz04iB|LOE@}QR11$hFD{0WnUEUU zU@Son@xWO12aG7cUyO_#9$Lc`M}WY(GCW{m2x* z4cKLpbT)h~a5PYBS%STttm+l><}lJjaY~#gIpR0{0s{B+nDzeqDkjyrt=l>L?45pO4;9yg zfz_N&M^;L}FK5I~U6=$6-es~ftdttYsgQ>2O#Tph9*W zn!o1A10-x(shOb|rn6WT&6uP%ivAl5mHaLVq1bK2$R)ML) z+%&BOfjJ_mRvl55;{!#1oPV!IrUN{+mOLz!ng$-ANvR8=6rUI!F*r0QG(5uqx4FKD zYwA`a2mY$LGj1to;qW$nYeR09<@98zplb-dd@orjh{T!{M9PQ-(pNx=xVS1~DA zkyt`gi8MZFXI{MXIj^ma78bUye1Wp5PlWJlOhB)cXc%JMGi5WKnZ~mUNm!!|mv?@E z;kkR4IF7t5VK(G-*H;@-4)%wcMEC)k3Z$883xs$2q&yWr<{!S3AOBdeLBT&QcCn8$ z+&0(J4EGO-v*@wxC9_wj!&`B67T_G8ldcUWc3_Ga#3pi-Ktnev`95*?T2t5bl<;(% z9OyN{@ljT^7H7k%wk^2UT1Py>MODo`f779p%->j?bcAcl57!57IENvNNo*yDSY6+6wx*Y zcKOUadCPNzwZK5I;p{+g+MD8?nobLNGnUS{$|LFifWK50T#-5NVE&VZ9ymF|nuO*1 z8&P0$Qed%M{*&RGd`cK$q9xD#dN5}-xdVCO^;JAwWH@q_=orRlQ5=@;tBSuf&sw*5 zgdX?`oj8Yw0XFiHz{P4K!=)v?81N)-V!PLfB7oZR{BFF4AY|!U4C9?nijGKU(TS~i zRO=87pj7S;Mo4JBY@Ey}ZkGHsgHHPwLJ*#VlN0CpI46pM zp%c2NBDi<$?T?1r`OJ_960zY8D6dYKXmtyD9lC`@FP6-9Dk~-2&n3o@EF#$j*ej0k z&Nm3mR8b(5z5*Z%S1%!&Wb9^%UU%Vx-VkCSK}}%BZ3U%}l^PG0uZQv;N>6}nXGUY9 z9RV#7o$ZY(QDQeJwyG6+2d?L(n1rIQSB9jmp&^tmifj=DuW`=4aTbR?P>`83H?096 zH|-H*Olqw@kZ#LMZPszsi8#P=b>2b}+N1y@KZK0=P}6R=N=t&FE{$y-;<7WKhgq$O z6KmC6-^bePx7ZSi_XBB-DD zn-?r1Ecx};@LGU?9>&X2Sl*Y?I3K>&N)qB(m=*16abT~wXe6(hZN?H;`V_n+RA$YA zY$}ggZo5d`N01eUW6=^}5h6C#go&Nm-i;rVZ%6bh|3=*`9tXA0iWna$RWkN{66Sm) z?ys-V*>eYA@M3sG>@+WK-cUo}92HK{ml@`SQRD(an>QkkMvnm&r7FPC|89;xq-Xv; zXUl3?&WF_=t{2fXRJC&4hdGd|{#(Ouat?2&wY zc=n7Zvds_835_QP=`b&$KfAcF88%OiX+#8rm!+zF_vgd)KN2voEf=LNpoy0%vddWO zrad@oeE>>&Txcvc=&9k6y~uJY605we!V^Fciv zM~@}g6k9q8#|>jnGds}&oJJ6&Tziw@%UmYBO8;fy_h=qM8IAhvB6l}?gG_3Kt_dc@ zelg;3mZC%pf9ya*|5v17$sYo&jAh5xEWQIn(P#;^FpzvEyb7@BS6&YW@>k%9b5T@T zBXX(BQCV;(>w;+*l&%0`kzg#Fn$?sSk@chD6Fm)%4R_GzJ#>{>Z82~@m(7e<)<^;3 z!MRrZ5Zm}f!wSr-PK=Pe$3EeJ5?T>&8Xke3q($(arA@WXY-UbHRCgehmxqUSR-1R6 zA%qTTX&!6?ts+4CVZvv7c%i3Zx0KkvGu+bL&TuI0*@8B`Cm%sFUOpo9Ne?g~AVn{{ zr9L-cw_q7Xha=@>F~Z-~jihb36C${*iTDWF+NuydnwEr~FL;Y9TH((qHwbKDX8HK*tIvlW!7jq%?)pOr4>fEa8Ug1d z@DGNc=2M!jn&ZMWSh-_m7yIedGBZQ|$O43>zmboL7;-7$*H@37_px`st_8im!g@15 zAk4w0&T<=A!&iPGX?1;d%y3ft@%CB%O>S`6)E_B3=lY5d!W`=gVvOCOq%7Wwf}!3@ z0Sy(v7&tOua`9gkUkp72EvG3tNrP}*lzP+xa}urW0}W($7K&6cx)It!sViM57Dvi> zs0sEEJhCGf)C8l$c@`|exiF}jX$Lg6kOLLKLtY`c4g5gBvYaR&q2QHJTu;YB_% zR@svcqpx|wUOKQFgUTfF|<+Z19goTQmQ|4ugog69D4yBc`i7sxKLNs_z9^gMT z^oL^#|D&dZ^>A+Y_C$T&gkA-h9ggecvUDiEWqNr5oe775^#bAU+|mx??n_ks`y zu}2W&%c--XKmKtp?@x!H{3Q8F3)oru|0}(S4eu&eizC1Y!{ z<9;Gg=nB(0F&|G}Aa?OCheiZ69DP#vu`Nwa_Uu{%cJ;8K{BrzEl~d>Ukll&pp@^85`tgnss&*DP6Y4 z#Nlmp2(Ws4So)LNAMBoD&Fq^TVwebvlHTyWa_ct{oR_V_%rNZ$WSJGRPZ)mzENcN_ z6()XJ=vod~eYfb6iaT91~nSI#huMzf31o=JjCDr^j| zb3YUFEw9_m!g1UnbJ`5V2`L{FlfiYK)tvd>hG~QafI6IeP$ zK9K-?s1*-qw(ZyO1SE|?U}+?LUmhfjUXC0Nq3l@pS~%2B(XC;ep z{i+n}1NS4nbXYcf*tC&Q`Aa8iY*hpfua)Ndz4&4n2TAT3QNkSGN5lR6IueESv@mT= z28C_=Hm{tcf# zN1NCDrH2rr1SQ9Jef7xg%G43lbC-O5i>Zh6j4h!5ofXOB=lDzLev=<~u@1()Ig+Pvh!gX4dZg zfs!?0AvMN`pMSN=t%qK3N)dwTP%qqQfqllC^M*CJ+pa~S^aYDY1|5PepB;?SYaSkZ z7D6Np8g5RRh@n6H#zv)ZI_s$<)|aIZ0ZJo53pg})A4oj%vN|4OJSMd4a4CqGNbLFI zz3_)4YCqI+h4_QZ5e4Q6)Vg6H8^qUXZwCn_@6}`fz9>7J7;$F5Y&1wL8r7`62Fd z;#+bzg+H||r&Iw~nF=%*?dnh+Nl$CZsIu6|m5+MRTb0 zrBp=lIni(Ps6QfeUzlBSN=u3X2%y9}j@)a;GPk%;2&d=tj8u(#L ztrF#wg^0fP_|hFIp4Kx4zptoy=J*%zCLN)>!F&th_oQ+)S<`Sjphm ztAPScT<_UG#<>{20=*{~j+w2&3T9Qkf{(;_C|V!9H*bw0FOao* z#k?(4bEKPA9FBgf6-P_`UcmxuB9_JkbN}avVAO!W&}>TNuR}7``FX3GlqZTWbw5%UlTqkt!d@ zYjIv|Akqi^S3MMqy!J-e{N+6O#533ftCeiOEn_g`XpJAw#q7iY9t3)4;K6h@L!AtH ziSysHL8lK8Zz8q+?GKY*nfSWPo(b z3yZMEHpmHw7qK=z++$g}8`Q&T=%^5H-3COAwmK*Q~pllbmWhQIlf{D6`t(>!2fBn|=P1qERyTPU}sB1!ND zN3)2JdlnlUk81C%W4uYOS%@r?rI;LA^k8&*c_obqqdK7wM92_=ogGQBX;WPsu-6h* z(Y$bKCe-QL=V`LzO$3#5Td_}zBQ`Ny8!pNNH4rpPUYr<*?+Jauo=)awh!K-0?7>EV zPfv*$^t`OOw*1z8((1|4pG(;f6=r7h!!=cuZG{F^Z5q*xAg;!CLi-oI-69VAxo{Dg zx<>lfBzBnIL{Qfl2B?wN8;Pxnr&iyt=f8l-YkAHio2G%vEJh^4(44xBpRJL z+9Fpd-$n1Xe3XX+*&sewnsJ#=nRGc^>BWs%R@cZQ=4Q@OR!F>*1tRR=o|Y3@8<~@V zy;5N}xJn5_E>eOoI+=xJqxAFp(Mj-aEVCt@x|J3j^lmOJW5V5fB;gZj7^vr~6W{EB z;k_1kxjJH(y2EsnJaWshfR%b>#5q=Zb@I49+|j#{pREd+THhWX z8dcjH9X2)9ed1*C0X_p=K^!oDq<>bc2AaU53`C};*@~E^=OCrc;<9&dkvo( zCAk_q$`_`DW{Tr#@m3@}ijGWshx5$ zk2kPmHG<6mMWP+lT5c!)PH;LqfXn6dLhe*P^lgO#Fv}}A7raj_B7%oFrS#Y@uEiyVi;$uu5aQzCxPRW^cupl^GWKrlJWhEtnWY7ucs? zhqINajQWU0{*2q9comgU5OJ#{VZ_M9(okM6P~!5(kYXAy+sVeuT9vp1Q~MU{2(Qd^ zt|23vo62pHmaSEH$0C)Rj(Mm{M5GlallyS^q1Isr2UTuZgw?IA@UrCt33Fkaonawp zCeKYeM{O;`%axm+Ay(Bf3h*Wr(4Mr5h-A(e#eh+0@~Z1A3Q6)|t@QAqwr^lyB-*fx zP*OE6Vr%Bg-9jQ`nkE4VvudG2X&TkzZj3VW=zkD`P6Rt`h58y7%8Ivm1M`YpwGEN; z0q|2x8lm>E2|Yb|57SU>c(#Y#yfgeMW=Wk84-d;tV?e5?q2T+$bSqwZifb%kh&!uH z$&fzeJC!!kikp?Q9xpAS2Kg>6ybw}L3KWd#6sHwdspfJzg@ahu3Xa=R4K$l&y41dl z02=!El4odTmduf@msA+3#I#bffuk&+lrtTTaaH>cJRfNLyq@0FdjKy=}3*ae**aG3O&BR&NiV zi6)=-+afArhLm=E-+oR?OMxGX~efnp(p$w35i1Rd`pfqH0<1oS1( z?+ka0x~OtEqs#pqnc~`{k3gfrh*)`e+ayv$-7|yvDI3*kx|p^grJDyA@7d-#;;41W zzX?BBW$)8y5d_|hM%Xe|E&*?eEtNwh5Cj%Atz&c>R@)GW*{yV`L-u32m_Otnb$uUx zm@i{zOc$Oe5z5r)@zi!8A>7V+%J6a@pcshBHgn%8fY0qjB8k$Hybby4$%^b~^;QC` zrkb#tCl!1k9dH%qT0SOXvc*o&Ijlw%Rltlf1eXLlJ4gG5U>6>Zq-{jKr{|(=RIh-G zQVsUk`g*>f1Z0=)@Rg!8LEOC-sH{aNrjqv&mvKkt`C8K!v%xhLeBQ=EnIy|t;{dv$ zE9CQH*tU&cu|P!@QiG3j3}KSz-&Ej?6%%{iS+wODp#Za~iT~XG+rwr)2yScshL$L8 zyq74NB@4tBpOjPJzB|6)8MV$W2Eq0cMfRqc^j;5te6wq`l5f}qUY`d)G!-afmDJbe zpaIM8h#?*_hfx9u8I*#=t5N*=FifhVZnOZ=D3?uNGnW7i#5|zE5OT?)4%9A0BQRI* z`8=9n8n=mJ9pyMj?by^L$y$07x5!;hY!Nf6GGwBNCEttgGE6jc{0&5rU}d^BzAB{h zw@2rL6I{3pl591MZ#^6pOi$wGb6eBPV*>M~_rUmbG2kJl(}X;qm{zyJ()fVcR|FId zVwm$2<(0OGzpWmDFH>|vuDg?%h?Uhu?Gs~A`hJ8h6u^2tY~{>OF@*tj?giXS`O7-Y zJX$zZVsldwUMc4j!4}LQDW7mNU?@Oi`(gly34{m?r+L1tB^bFG7mIH8E38YM>!GTi zj7XR&e${dWMByba3~Cp(S70EvsE0h&pc?uX8zK<^Oeba}le2&1Wn6pBjjs(8Emz>@ zx!2e}w8zqn36F*mZau-m7#vkoT}Dl*qJ0r!FV93+MmV@8{m|X1N7=A-^T!yY!o}zGZ&A!eH5ouZ|B=pqsl0)&+ERWGkFjq=vjLd*L z8V#;#s|-b6yW|YfU-`ou!)A_+#%H6@On6Qwd->4jLNWgU!4zEhT9$owE>15E5d(5D zmPnc8q&aSStg{6YtBo?-O6>`DhTR4+Arf`s^6!NgQG?^v-KZR=}F)$p7 z8et?dpv>iLBVXQNa1{0L;3*fWd9iJN%^5I1kew1r+-S{r z^?5TWTpKz}Sd*vh8~a2M!F=;5$@^9`7yhbqTZ(Y%vOc;0S=3UJq6{#Xh^%n~krg?I zB+Hw_#ivE)O=x$eAP|zP;kWYLe01z`Yiz#*GBoX7c8uCKqwTS3_6mrCiYB6mJTkA; z0&Sr!OD!(zM;1h)B=X&*!e3tzfYhjVho^dQNR->UEn+bmM(bR0W;-d$#c3|#nT)G* zrN`l`DBgQ#xF=*_kA@0hDEc&*yvT8h^5?u5*Fru zu|>LHg|3m8HV2fP3-}5`kpq_b!tkc&#rOcH3zhJOIu_a(q^KPaVHcBgi>y~(a?7VJ zKY1K0Qf{;8HhU{M^R^Ct+Zdh;eoJ`jxhJZg5NBgNnkTxWl=zp!ZcZUMOg^2OzM2i$ z=fzqhwOHKbDN3}S8>wF-9c*p@gpJ5@4WDPe^#?y4ejtg6iqcdm9KV`UuzuL5uo6DC z3v(H^PJOfeQ4fQMFvz^4`B=%^%6B3xio#)S6os1$HEHm%2Ni_?>LUL^Bhe(%pxCyQf5?zwH+A^n-_X4FHkch?@EAb4Nd%9fsJ8h zuzm%O6aAs$ncya2hmBmbj$8zr5w4P{`Hzs<`%`hrNF>d@gGmHs6Q9sE`aKvoJcRPz z>5Iyr*u}Of9g{izJBaJE^3UDI<^;}F%hrW+ivq7XLe@LM!3 z`mOu`_fo;zyU?{qSx#*m6g^oBrjoZtJk=UKT;QkDj|@TP`mYZ^$OVU8Sklzl6rZO7 z01xzHDt3E-R!y6p?~RC~nrbfWN-;omH~}7t!9>IlV)d(!C1vTkeAicKm8R{zL>)V= zl6jn=YRQS%{2zq8#rM?{Qj%x3T^IPV&fs-o8kgvfrdP4|{Ro=yGn$hP&kuoL3wjWa z05)bhk!)aqh2JgAUlBD-Z;KfMbhT?F@>2q}HPFkoxwIns-AFUam1faAH+F67;EBx} z2^Q6M2n9v0Y-YGgB&FfC-g1W-yL$q6#lAIwKyN##rfa(rVv$1m=lSMw`T%=h5iKl% zf-1}CDk~z9Y}H(t8>d1(61-WQbs4@*LueRDg}yj0#~cg6%&M(!oC#5!9f}KY@Pj&h z5XBXU=r=lLn0*_Sv4S6Vb15-lu-wiGgbR|V^P+vY8xH^S^v!sCb1iOgVdJ_$05x*%9n9$J-JE zq&y!)By>5lX?gv^2*!CuC94py?w!PvA#notkw*KH3+m07=cBHA^aCi}|7ZN8cfikn zru~j^iqi4CJ-XcEH`d1~yW8SExCbSO8pkgkCTK$p++YL>k^&&YGqIW<;;4cH5nfUR z2@Z?~FyEi$49O0&#FKE&mcXVLaj}Ci-HYUSoAzrj1UH;5PI94W(O=_=WQY6^@cSfR z8m`%6^G`KpXzUn^=&~q9=v5Bew}bIGW(O0jaiXYAmqJ7M324)+IpJ)!W{RhAxi1jL z(6UawK(H+>b9OCHn{BFrVT&$K^*$m92vSQRhO;S~74Bto3PDkN(p|N%_~GzVJs&Qt zF0S-5PH_E_f3%hOr5z;^a}|MMjpjSugdd-=BXgBr4xi_fh;$uBjB&J5A8V%vM_HKq zO@z@h3wWt64L_mFrvQ@|V&J@ore2$|w~Cqx*hLPEpOZI$_!1|r=0Goc$d=xJE3)I8 zVkeVinDcKT7Ss8f)*wRO89vSz-qBgvKl92Y=;z+ab_k7!Qr5jHOp$*6Jr}}a?pzbmWnCkmK=sKec2UBOLQ0_&Pgx)7JBU=ww38-r%|eF8b$cmm6Eu&g2P|vDi?cz zb<;PI7Z<5QdP(eX+we_s)j<=OOxj>T#tL%g=v!b*-p*!S`=i4uH@6x{tX#1s(ma9h zkYYIH|Gylbi-tUADHarsEldv&_sG~u8UZf__+yCWe<4acH5V~e4^5cb&b{L&$UPZs zd!ZW>x(Knq1rxBSWtLNhqXr9K!rG zwwHKfrY9QW4kjD$C#Ou1%sLr;6M8}jjudWWE4ZB;X1i^7n3bCg5d-sG5{MV~4J?_) zutZ1{LO~#OPSSD;mTeL|Z{|XdO%hH(p_20mrk1r5iApo-=yM5Mrfa>ZWysZ5z;Yy{ zwy){8ga;eRt>KU z7_tnAeC562t*d*vc1i_NDSlC5Dqt`e(?bz0)3{~3Din4B7y9v`hNy;owW$Qv% zx-z;DeLSRBWVJ!fw>9cxtx|X+;7tC-vWY1G^&=B($T=vb?9?s!Ub)gX8&ZB+j+)lX zD>BLRjghRHH015)#!9hKz0n`MZ(*u%<5`gu_g`1vIkw|E&P29Qc{K4F<-4dwDq+%s zPU^C8cc_EnaL-m;h}OgjzzIR2QTUVrL{5*k@;UAwb^#Y@=fEq^j;ad-jI{EtBjsGz zMT~4#juJdKkfln@KOR2LQN%XjNGaWjUS~bFTZx3OC2r-VM)0P3J+7IHqzeqFA|BX6 z#?2a$p>cv4-3g0N;Kgefad-_UGO#Cm`QWIP({Ingxx(79VHTG8Mp*>9O+q1r?gPDb zNw1R;j9Uc71{#N+Eb#E4iu5Yk3^W(Od&@Z6ip#?Pe~ZC6njC0yAMqiN5k29 zWfb605|&n;_(zXMGl@g~n}ckh48o}Q*1*o=0?~J!if8<1$@-jIM}t-$4&pprVjVOo z*A!CL+BYDr@eqv~df|;V7vxPC)rN!@$qf}Rt!UN3rX+Ai~*Ohy^rhby?uNR|1*CjehaIRVll%rEue)&JuMR&X{O_UqHRfq*|&$6FA{K3 zR%u-M5%V|_Uxag5A_RQzOfFiG5glllw-O?QyRcCbVQ`LeDBKt3Qv&Eq__PhvuO zD@@jwKYW^U+cig{I`_1O2;)%%?2EBsblO6N{8WYlG)O9&X+qwgs2KUWErEPT+A7el%s4eHk4kv>V#JR=0t(LIcDuA=xvR6Q(p+ zF;zL^Dg7{y-y?GPbYr+X)N^lF{Y-HGS}>1_o8V{~4Z=ZPw@Cz4 z5d)g`z9)3vBnl~qA-rdzb(o=M9_%0i%=|)$5DnTXqF%8~T?Z4@ppK)-xaq1mv|?}M zvPOScZJy3GT36^N-C9W+h)>3UOSJ@dy_Nvw%)6O^vS8R+{96T5A^_Vge$a3!`$CFh8fkb_YCz54`D#V(6?tfR zI~}@mx1t4^E)`kblXp1{bR9QZe3hB8Lc+umv>f{(OJ}Axv~p!)M~OS~d#KdZN%5$G zWj4G*AxdJMV53To&WObpJ6I#m$itqA5>(Cam1wby7+O8F)j;dccVQV}9OjhSU2jbY zo<#7`LqE89Roa4MgU98gaVnfBCn1dH`~}}7@**+OPvu0Rx}=(px^)kGt|#Wn$& zc`;%dA%@~%zwRJ`H8nOAqxG#zU?p@_trdGY6w;i0B)Qd6APv0B++xmt@l|Jp`$I+O)*W;$Xqg12Fq^vNT=G_0=&=SE zf@|Ten6${oF2uW^i!o-tyQGGBR$_!M^DRV40G3Vp2_Cp3$*&0TR2Fu$>@3z6Y2vt& zwG3zLihkeGt=YIcJ6g}Jqhi+I4BsT!kq^0ms4OnEqDNh0GI{B(;ck+wRdkrf7_|Ui zk&MFjZf27u_uYzFfkM6$H^uB55oL$`89Q|IM(rp-x7V6la!8UFOvc656q5^~=MJ@v zF#k)J zumT;giG$i~1*DN#FS9e;j4Rpfw8M^&=BW5+XLSF@sJ#j2?Rg3{^#Bw^8@5?Jo#!4c zhH`5$Vl#wVRQJFUenDmc+*S;%Me>YKJR6?nlHD5ah~8_3gbPKTbHwSHan)87yVRs9 zC8bG6KqS{XR5i_!Kgtz3PEPMV(1n5hA2gl*n~l`Rs1()sf6a75BOO50S^VO2H_Z^!;aj<|-4W@q(L7KOn0_6(+=aV1@8o%s=9w!O;ZgBYV{9yM zib=~E?XWX&6f@VbNj#8qf)`eX04WLHuYLM811xLtjwjB;Co2g4@o->6`58d4kWtfv*=oZ@h~CZaeK-T23OD3Z)WOBqFoW#k9P za3%E-T%dL%yUgr~7mn0+cuOHV1D$QyyxGSY80Q3PjIzJ^2Q#CQw&0io2IYIhFr->aMMzdm`Z1)Xhn6mn(3bZ`Vxe#ci-eWtVtf&+u5}mITn{2Q*v15% z7HjZ|}?Y}_ z9NadoC{{c4C7;;po82!Ak9JnV*=$FpWN(mZo8ykaayboX-!eoDUr(hk&ZYH-Xtkt- zo}~1*dDkY0ECZ7RVd|E7u&%z%GT4cz@Z#N#Ngws z84v%1C-Iz!(U%%?ef0%wEFp@Uo9E^VeK7n;plY(0gr3}2@ecjE)?~zy)&+bTKe9S+ z#DXu!NF@B9+4ekWXClHl3^{8rOVBu<00%H%hIMh)i`;^VH0{%IvdwycT zqez7?%`%de|9ILM_@t6!n*B0nj&I!xrE(#{66}<979@O({Uj!|oy=FiEyLygptB5YD)6*0sWX%jidU=>z02PdX! zSN@0=4huJOh0W>Xipr6tE$3!tD8VsqS zm!p}toUY_!cuTaaqIg(8A?OVU{I47c~cH$9was5fd?Q*8yglSzc?y#4sA5#ygP|{djmF zwUi*vyOx-klg1Sqar7KzD8#PzVF^r^ik#0*BSr1|Xt)tTrQ1ZPN>Uyv0y7j12@%Gw zEW2-Wcq$lWM+x~AF)IuOa4(ZUk=Sv0CT#)3<;Lw~1`kfHmDdxaOz-9F1Bm$6M4#+W zP~gPPs}OCl`H3a!S98W(N0x0lE*sf`RTg0;tzEfhOBFh49TE1`9OwwQ+KURoiMgzi zK9ODF2O>pVlqdmoO9($9lr3fFH-{f;qKNXjscbjZfJRK}PGPOOfs?`feC2jrQq3fs zRX)H+QM4NBl1LdX4#{><`U0FBP$$xGtS~3NF6x%d6|zhH%+lmkd!aX-#;iA!6|PGz z|6mRL;-E8PyJ0Bj>Y)0fQJk4taHDZb?*{#++IBQ3&;Yr0V%mIUdgROG{g0`gM$0&RtXjZ~wcn0KHGhelWK~q=iojz?CW(G0az@^I6N8GC>RnwX;2_+-i-$G-^7NmP0ToWcG37 zccU*AcW(VfEK4r3D^6MwNUl$KJxq{=NH2cSTI?8Ho*_L=@I=CPq&XS~fkb@L+;EqV zGnaP4vmo7;40L+g>iP&P9Gn<8ScE4km{?99h6N~-+S#ZkK}^GpFyZpz*4C~%KiwTZ z7AvG5Kft7;hh85375v7M1z?X28jd)-h)a%~Nk(wOUWiF!*4#tNfI;iA-g|x=;Snqd zv6yvR7#A8bYuv(mD{f%QsmztTAQ9aSVcez{vv1Opinvbxz4bz8Jg?(4q7KGo1y;6) zCwU8UZLW9kzz^|nh~#7UJDP);Ky+$@thpQwor({<9G)fY2*tXCfP)?N!l<4k_LQ4z zd)Uakc$LgW245xu2glg8yt; z?t<8|WDp5&=p@lMPJyuyDZ7HeGZev`=Y)ke$xJCQqtKG_PEcOXY!^bhh)w2t}+6OnIX zRiDva1pYNxvT8dj;oD#7?Oza#xD-ElLn;zt5-B12qQ0!pywO+d{fyf{yb@wH{V{wc z`FQNNvCSUZf$jOz_l-a0<+QGWDU|C66&{1nETeWj!qnRxt)~ zfNH%S!I`zdmyLIM#Rf4|G-pk8-4@x{e*{EUroTqMhMz65i>@VRxx&??Oh0gmRZzYo z*3vr~@F1(l2(jWDdY^ezUNPA@Ii<+A**6+JxoPF5DMV}cW>AVK^ipt-&k1Cw)YoS9 zD$I8y)af+b|0=o<^D{>famR(R9_qo6X2x*8i`0)pL=a{INgO^Sh9?Sqe7pQm5 zcbV+>m8iiZ-rAzwrbW{yG6yqS!;*_%!=Moo0v4}bPa~-odqa_#aI7o^3CBb4!t$6M&E!%8CPy~T#Eugu>ZGztKvG7gLNGqKE~ip;#d@4 zwc4w&Bs2xQfL54#9u+sk;i|SC2McJ~_fI%aRs$tf8s4tJ_xD=kpe9|&GlO1eE+Gsl zg*kASly3(bdwK=}>pdbNNfWs|+-QV^fvGKMu?e5`~oLBMZRqP3h&^ocW2+m&2nR3zB3MYTjR;4LVY?!e%j! zK~|L;{_*U5T2wr_dLPxWwT0SQE+vSXVIJVsl+3?rxA+uy~3c;??} z4TAInl3b%l%gDtSzD3ODLZKzVN4FQzs#y!BRB&*RiGT=n)kLUhz-=;7aW-Uj4JL|~ zqsL4xB1^{o`KcWT3=~c|^$HVVQ{2juI$!OLWp|3N$98AE^?1<(3kR&|Vzt)9P zI};D<18`9$bLub9kaNRtI}5z=9+Zj@&Oz9Sg<`2Te6M`}NEi+>sIFJUSM+xQkPl&td2q%S%MGUPoXIZkTf(`4uDf zB=tFXsO^?l1mjFGxrsjMEf}_DR)zx+zNKlePoXN9!GYMpMqS(k`1l38TqzCwC3=rG zeMDB1C@m3QbtmLT>J(TK1Ll=Owmp2I0Y_vzmk6E4ruvedU)+TUlyl*MpXm_v(9`hi z*DWNXn1P2>V~IeqF6PyCs%0c@4Yj{8kDWUbm^rJ`?Tuk0N6enJ+!+LC91&d)3FF25 z7g&g6clPHhuSXTU)H3bh9m1y0{Y9>bbUoO(c@0qv;rj~h!NOQkJ&Cls_(GG$k+GN0 zy}lxrscXZLVQ%^BE1aa2DE9=zEaCdlSuQJMRJ?eP$!KCOg2xvT0u-j`DT>vpT$IQ>SMzZP;k$4MH=|F$K9 zNlVqgzWPiUMF$R}Jc%1IR-IxAgSK^uc+n``IHCs=^H#?iPk;T!n2F<@{a>B&oL_1> zkbl2AX7E8_>(E|}RnQ_NqL+nS928nl6G{>lCj)?AiJZS*)NAT39#6aQ^U*=_^WW!+BGWk3W-Duy+#|o86EDua@mPK=dj~K z)-lJ#l3i3T5QM^Gp$*&=LB7ey#x@vR+{B4)j#G4}>e?sy7x@k(n;Z8VuxAEb10wov zIwPfshUF#dkELrz3ZrNL(l0F^gZDX%;Y2;&6I+MfXclg9B2ik3;cgGlg@HX-O(+8TrezedVwzvj(|4*ahYf^xW)Yr#)*&7gs#4HH5lw51E7?->9z7DRrB@w;2 zLmfzBgn2XDFEE73rVEUfTLne{?yPK-S{DTcUq zHGU=xZqxX1_|pJq1RrYh1J&i2c;#)Bnms&BM`#FnX_M#Hg?}&dC@Ryr#h(O-Fcbt+Pq7%Ihn<2MO~q(JC)I6o83|2U2fQpbB+??HoEXG8qr!s{RrfI_6LcL)+m; zN)C1d=1|TtwB}J!GX?SU6XG}`a#+t43ut|b-kg^iGTRG0nm&upLMF0EffFh=p=p$$ zTdDgLDR`JK+?8NL4p20m(kE~}68_GivjF5;Xq*waz$2bSL2zl&5jZll0&~VGA2A%3 zN)Kvc(M`kXC3KX}Wev>Lz<7qKHGxI01P`;;7XYpn9ucrA%As^HA$#vS%h`exkqCj0 z^X$M(+2@#=pv1lK;ma_cKWfI##HhoW5bhaEWX}`W{BX$w*w&dt3z|%d#%0%P-L1qch_7@d8|g ziYTF5SW5zuN7&ZUi$*M(5UuGEs1D;CM)k^}Z7x0jb82fxLj@mEX>7+sg4oGM4;$nqRb}`K?2Q6y+Yez6S-h!y5ap20d}}w zEO~R-O{Ov%EdO?$sUhMAwt-#-yW@D}_}9)^jz4l9*u|!tB47{~BMKwg>sp#%2pp() zHp;LYwVy$G-9 zuDe6pTDJgZS&ZS~)-vN+nEJsb6tGwp(0BiQ`1ap?``5Yjzvsc0;7l}=)uW>O#l$Gs zocdDJ>ArkuM0tejCTzYMHU*-_F6Q>66M8<6BHVj)6=E7WF3#!G-Q<)+-A6t<8iSNG z5o_cd4S4%|KIkakpScnrk}s)>K#1C{d~9qd%OA!+UGg$)Am=}DHv}lkW8NEeI)%-a zRPOt|C68#3zz$8z@NpwM^bHMf;NPAk!Y)Lf()U7CxeQQgjK~bLuKqu^-n6;0G`r68 zU$DRV+hNlRs#|2Ux~gj;H5iE;t8xGns4BKhhIlb$-bw%$#E?Tt>Iip#_isoyNm-(8 zQM47b9G1foRmuP5XT1k|uXWO{2(lBvz305cp7+{&|2C6cf7^_(8(X6j5$9QlAcF=s zyUv~LS$#`(Pcj^_#_#{4{>T`k5T-yr6J4%k1Q6EXG~{>0W0V^^Y8@`H#c+PSBs_xb zi3s(`K=j{-$Km4wwYom#kzO3!njp&);X4gr_-cAuA06&+!w7fzJzc*ok>+%5i*JEWEZY;Zy9|N3@`1ZPVr1g`c3kgmeg;EBOy?gr) zr~kG-I{=)4i&oy63bppFA*?j(V2CVGgomS(xyo*cABCB5lMFU7vomnQ{&`OL)?x-6 zG5qawb#6hGn~HaaB5o&Fcx+*Pe9i-;c6|{^-UETO;kP($%-gE~TT{Ly1IpI{ye{?o zF4nI92r%pqfC=U&ttMGld9b6`g|!`fMUvd>^!jgDJ<%lb9$?5B6XycHD3TR=u8&r=Jy#tSE^= zTg0)Ee8X%99A30d_ij@G1FPx>tUV7pmZP3fNhzm#z@JXPTbr6AX4%B9JvGe^G{-1W za3ni8Bnruo15GHX1<1lZFVmDy2XIDFNtHd;M-d!}tQz%O!6$-ZTt~hcu!;<*;MS1{ z^>Hh9H7KX{2nw~1bna;Ws^3kydpdlhQAq|CmFy%4RHTE=vMB9 zgVCgbESO&dd#$Z68Slt+N8Xfi89H8_M%ZTejlxD=ngk0HJbrO3T@wTg?o46%gd;1! zy6)cD++{cj_#e0&wL}F@oeh9%kr2Mfha(MLL?jy{_D1rqZ6TEu3uYFoOZ6k+aY{UE z9A**BKgd$p0k|~r9|5=A;CZJra?@9!eFp?n2-6vF9rycaQLR$Z?P5bZ1rY~`=F!8n zjFMomsN`y13YPJ^+nW~sN+RU^| zA4w-bn)|uc(cMwU?MdN`=%# zqg?{oO7=#guNTuV=f2#TWJP#rAaxj1+tS?E5mp9yp-50dVHK(FMJZgCY7ELlGkKFp zduqtcG%w*0n5L+n<|B||?FoKsvRu2oSEx!q@e<e|?u7v2msl_wz5nIm64}nu0qgS%PPafr4{M9-N zHquyVCme}n5k9xN85Zd5mJ~^SH?3Ucf&Fl?}D6OmovXV;NKd<8!sA>J8HGyyqcPy!p3kMgQ`WlB? zFF=B2H*ewVh%U6~`z-tEV{Ct$Kx=GozdtL{mR=XWH`D)J5<(4oY*8 zBlfMH@j}5n9-{d9QIx-s@>hyYNlzcej;_i-vitOBuxJ9rp@_|?GuQ|&l)@G7m8vOq zpj30+yf7`#HuV?AAhm>-L)hMk8XjH;TKn>df>!OsR6h?h3+N;10S?h8$6*kCZE5in z{$o1#Pk!R9k<)c56`{xT&FrIl?63g(@ON`4H@+HDuXcjA2&5&T5RyO*UeoCBbwVc; z6L+s(fI&n;{AQB01EWiL#B{o!y z&q2}go*Biljc52JcX9~os3y;xqG*0}*DMCPH{CLc;y1xNzO~su4bg`GIv=tx4p)tf zY4cBRYBW0RRDl=22HUjGv}C} zP3?xM`$db4(VE3e8r`FcHrPO$Ane7TqPBd~$z7t(f95}-QnSqgaY6VFFN(a~q=oGvgqfxLGFC-Yzr~q1jAVoT6M)I$-aj?1nCn3na)h2I$p`;po09 zc<7)N`@6RkaMUS|F(F|Hsi*>wN$&?N9yMVwQ9YLJHc1loBnh3B&xElTu|cSuI=BQj z0QnIXniy`Z9owANn@ezIw@j?CoH~_Xm}gS$9kqcFtlxLS)f6 zn~Z!q-={K_P9mgvMtmB6NR;51w81*PkFxh;0cgynp+bkh_mnCgT!^T-@Glret+3Va zBVVmBhC;@sOHywJ_^G$1GtnG2f+q6Um>}xRyNL4Q4a-xpfKtbLdMvglf#ZvDR8+w9 z$)H!x{Hz1xbT)yliyvrA1I2)NDk-#Uh&;38RgP_;BXQWu^GkFf z=#*L>$}=82diZzzb2LP$)jXJ<`XQHaX8?R;ZaL2hTeQ!=#$ydThq#Z+@~JU2F=EWc zk*fEDV*B-XS_V56)E*#?m^^vUGkb*ml1qV3Bvt!2Ew z4rsuQ>#OOp&kxr= zL2x9N*q@s?C~2LIxYRgV&DrX9fKc42$%*s2O4WNh6NZCNSO)i z&#EHhn)qJBjNWjXSScpm9VD@TA}SU=Xr4=?u=)b!{F9siruv7!$TmWiBBVF3OkFf4 zdOmRH5J26AVv&7mR8n4qpe4Q>u9J+rbtLnU@{2IdoB@jP-PsXaWZq%eBR2+N-x-Fa z4+(`0JCSFzT@Myr@$bdWX^Q1#*n4ml_4(XpiZvQIDR{6%Rh)0sPl(N%PDqEHbSI8a zr;mJ+360yDe&MeuzxIhg#~{Q*A2HdF~RCqdM6pXTn$x|NZ|_j}Y#Q;31rT-aZJ%0^o7uWA>));{B~egr1D|)T%Hv z`BE);RJNq~zuqHAzewL+Av;?j6G%lP`FJi*2TG;$pm*$ys=zSJbdoVAC#F4guR=`f zCKp<0^mt2yedbd&f$)me9p_Dnmw$2d-&Ftb+EfQ~KxoIr2W&_=1wRU)PzEASS<%HB z2d&QW^K2-nQdB8`J`Ba(ZCGOKEl5M5u*QttowiGk07^Npe=jvS#>(mfTC+$|j#Ag0 z!A|PApdtrDsbNCqdfTV+2=JM#J_bJr4n|GcoyS;s9_KLRSu@Y*%~sF3ufjg86d$L^ z7i-bSc|cdGwv|-*z~kBz$1J#P)96#D=b79-@}EVZ&;UCqextj|x<&rrUB8YgB@4ez zuv!vT{s2zI<%>G&J5Q%wuU#SA0CjW3EqDIW^pE_x*&B$%lrQ1-3F_724@*|iD#Ld{ z39pdRA^+6U9uf?xAlHZ~k--Jz4+S{)KFEh;;R#Vx9M<+Yz$HBID;wR3O+aN7=F>!G zeG&h1`jMqK14cz$%i!xR0p7fp(ZHZ+f-nQ+i3uMihdOu5^Qp|XM-)Th13{sE#SB<< z)Ji$89k1UZ&mxE`jnORr{N?n^`X@mOAL=k(v4#5`BkccbF)PE?r&9b zJEO1MV)pF(M*6ju{ljmjcmK%*@RA0x$*zYNj3JMNr9uk`ou$s)Maj33mllku@ADEf zn4?6Z{i^L@`FNLjwAfpzx(VK8Nfb~LH_U)k&LX@u4+5%JsZaL$waHAGNG`8_!T4(w zGV;tX2Uk2AC-@1FMiA0H&a94XF|%)!y2rhLv0-d4{eMbSvPo}w4%GVkXNW6Q(z24) zRrVT2ahHY~!WH#A9JpGeBeWIlF7uMFj*e6*(e4{?QoQf_Jb^8o zw?!Ol4z1R^6^F?Vw!|aIj`=)qQt?6RBVDX~L>;4|%7NlPJ;IV&G~=UZp;b6Jgn{Mp z0>IYBxgXMtLuBs5?KHT*vb&g}DvV^W21UC{yeOwG#CLu-@uYV+E*+>33{|~<-4vq? zyoun!bhmb`KKn1~GCy$~4ndlc1!YL_C^hY|XaiyO+*&V~`mfsM-V|M}Ft`;p zsD~H&q0X7mA>M%>r5^SmOfBkUh>iGB+%)10&xG6Z9@Rzx3$Sw{P*&Jclz|>}%`b0& z@=Z7BKxOD1JQNnS);m|*B+6lj24cjnyyW#60N5-vT|NhByv%wZ4Af?K00)44GqKbipQYLI#CtOx!cdq^i^pQ#oGXeduLb%@o-#| zgK4{-B99_Bsj>u|&!TEUB*S2r5w$*FDhWaB16(Xp9Yf_Z;7nB;uDWirh@Q+=*JrZ# zfE7zgx=SnI?L`I-QFw6Vl4Bu}IvA{eHomS?E*rPLUZ@mj?Ml_KpVuE0P4Tw|oI2B1 z5H3vS56D!fzg3;?3DQMJ<_}=nvP8>YETwej(WyE4^YL`|VJ+)9sg3*y{KO zHs~f2$17&qbJ-bldGTIy7Cjtb&TUj;cX)dzm5?Q+sO3=T!Vi)A& ziw-ttczz{66c5Cg@UEY83y94$AGgVZ8sn_ieXFp?mE<-$6n^geRhehcSH?-n1|4Lu zk?kZSe7N6`|1NE_JKd>&G9cD0F-~sf_9ntlhe}XFFd$8M3fXa!*x%`X5#l!y=2I!= z-}nfKPNJ3}86V-Ofl562Q?-P!p{79U^|>FRflebl;&WEfR{*dMg-Kq7CV^y4^s@3x z&!4lstb!yonZ<*1)9R4@TIEHjj>b}ckssnJCy|2O z!BlK+RAwzm94XUbAPOLt?Gt$kyNySaAm7K<^YsnP7BPi3#6`;2orcEPFgGcrRNLoc z0Fmlo6jkwCr~+fX-rXnDhra4dq1F9RU<$9}-N>u>SW$&=Dqs4{<;o(_dG~ z!ewNlZlgqs5FWV=CpfxVIS|bte#}_8_;ss_goF+t3Ylm)tg`HdH*${Jw%0e30kK|T zmn7 z(7F$gD3S}K{}cfPe_d@v)~DB}^*WGE*|T*dqhob1j#RsmCdQ~IVpH)JctQZcjVegw``^oq}b~_~cn@82$Hb&|UN7v-~4d}62g&e1tG`LM_ z#0g&c>#|EdEaJhGynhU>w71M+Xv20tiJeqdhL}y zU39_uMo3??E7qr>L)5;JWvN{WgWKK!zCq(g_*o8?fTv|4pb9kQk(sy@eZl{3=o@(rzP684NIL z9ixg@m!DZ--Y@&@bDFvD7i!m5WBYnLPIw%24tM2{jEVHu=Ov z$O9v=H?-Etb%=bKzUpgcGCw*S*!@8}e$E#>$Di4%&dMB@ccdCfc9~;vwqZS%kX_{l z8u}=pNin-EX!RkyZqbK7j0Li&`Xc6K@u~or3Gf+o;Vckw(Uj?1h~AJYl^$j%zN|rf zhk@l{7WX`qrAUY$*P-3<6b}hBnOSJTt<)cLAbf_{K4C$eu3irX?!P}Jz2%>Pe1<9` zXm1)&ABPj5lSRUCdK5n+cQA^58u+!aqn9>^hSev})Gd_|zAc{WX;(uHSNC;ooKGfB zO^Pg7AB@z&^LD|d&=H}fD_*-0Wp~=G2P)3=p|`!y)IndwS1*vrsjN^bue~)4ghe;! zUFF;Bbx2b`Xk1|G`A?^hO;FQB7T+dS&IZV*ZbnAGaY-=Q$J3{^D0L0 zZ)p5UJZySn7DAeyD-4y^>O3LGi+G?J#I}%i5kZqZ+}ejn`FY?PMr;fiwjZc#MkSbK z!FVA)_AR}kxTz2LO2RRI+}1HaPAe;8M6A^F@Jj;uvx8rYMDyj{A~Nfru*1Uv+a-s!h>%{N zQ>yJ13PyAuv>|F;y@2;>nQDT9h%4S$O&jyyZkCbAakvQR#u$1F3)X|)Yk~%nCVS`s zED7)gdgZ1!KvAez^~R3sF#KBYI6KFF1JiG1d#hC%d`&J}bpnsco!7|JT#+`Zd%5 z#mb+MqYXFQU7&BSn41A%y%kZj17mEa7v~vy|o?UojF_TX1r4y zQF1XmYA2AFQ5vt6-18CMhR?ztUhPv8K{A5d^!XgC7%Mff09Cpphw%6Xg31YLLHjbJ ztPtl0=x1qsLQ}!Ox2HR`A|Rx4y6Q*pcKp2W9|v&5_bR+kRyOXSK(C@k6bsw)j5svA zCn)|GL13siWn_mlU?L~v5OG-qA=11p3G`uhl-cah?GJpZR4e*Yx44Ci&HB*g-fF+F z293i8IBsu|ueoV45rs}L0!&&tJVPWY1ni0#GI-&B`Tgm(AGw=m%#*2|OCcE1QQk1} zs)%FR`@s>KjBm#ndr%mgAJdhrRRVKVvxa0;<;OUHJ|lF}9W4v;=)uC$FJ zi9SnI!IctZem36edQt31pxZ5Z8}Zln&2f0Y@~7tVXcg-gD(B<9HQzt-BBk(QD@5*S!Qf^A;Jb zN2{ExJ_aCcmoN$=j?lc)_|Uon=Rn3Ufjr~*=*qs?U&a>e&`GhHc8 z6}jH|GDH~7RQy`B_F{zZ9nxU_>kON%BVdIttP?aOv>7Cl;Uaa#lPTlN#NiHQGum*q zWKea`rHN|)`Vt=)_cEJ0+#UJ4A?>`^ZP`M|C$E^oE5Z{#A$8TIScm7=Qb72@Wc~(r zJV8X$!oi8EmzoO|mhvBEYh7IR!m;vg-%&QV2s06-G1q)H{k#?+qwVd@G%qG8~|g5 z@1X8Q^{4?H4aOGs{ra^3tl+9SiJ>&sP?-FnFTDsHcTis^L4)IpBfP8ftM#`TnU{3i zT$_GSC8M`~(BEc+lj_*!0`2U&L7|U!e|gmIBC>d6PaLoKKo%q1mj>gow(6;BQUrsB z%&sQ>EjOs)AaJWn=lT+fC)nyVA{oT`GR*)BhC&`4;L$f0wveFlGV02)IkTa=7^2iv zD-Z3j`~*sOJfjikDd~NlEfU>)Nw z$rvuqPBb~d_bSv#xRoB-nQogeVUr~=SvWFP$(gd`LlC0sGzA4Z8YLJ82P2z84%|Q? zYcuo-Z2})4VO@2t<;T-M^wu!eBQk;bLHWdqjfPOCyiiL1keW4`J=&KPy6X z(I<47O@f4&J?kM55Nh|l6Gyh~E+^P`z+}CVRq=kQR^*`lGyDd~$3K5T;PN^|K$X<@ zq8?2Ty`@2=b!EfeD3bo+*YvgE|NW(}={9s^Ave2lqqna=0xNgcnjwDmWZ7V08bkHd z>3?}ME>$nmd-SsL53f$q&X4Lx6_`Tu^&w^w(fYZptu|h66vSJs}mDcGSF4G&_mbqhE z*dBeJ&YDu_{TEtEJ-gvpBu>eMyVH)>b&D5YB|=cC79z#=6TktHJg&duDQ5T{b&$1cqgCt>6h8l(ZjoV(A@mQAHla#^K1~M^ zHd3M#pR|(o_;iTaC_3m1oe`woJ>l@{3H(B>0uz?L>Lyn4>d*XjMUX6IR|NUt-TJOP z$CUUF6;&xp{Azk+fsHjb`f+8_^(OH%q-LGM6JFNZk;lJglWEoA9PD2ZT&&NJ`b%rC z+oS*ABC2*gSLxKMKA%3U?@|kv{nd%>MHX>1Rt*=Kt)&Gae7$WA!cRGqgKmO0l@+VH zju0B&G!Un@h0lpV2I1Wy=w|;?e)$r_uQ+?qCDKuSt7>yZ{_3C5tM3#G+hbxj4EOYj z-M}7>n*tC&Vj+Km+7yg%CO6crt2j6vPLW8mAUnK#Zqsp2$xDM10Bmx<09oDdq-%zP zzNTV?+TjA?@`xif<=14X5Cal1Po3HvgoDNvpL#1m$;LXuh`L3H!KNEJ7cbUdprBdV zmUZ;!LS3q9UdETGKnd%C9urCoBoBhZX0C?s;~d+!VSi%$aBrD%(6UXbsAm767=t2J zKpk2IZwA!xRdjlh2c&w>5fJy`&5h_a`#r3SVbo|mZZik6rXk%=rp8_qpxm#|NIQIF z$#R<{CPrCPBkk7xNgAfr4vNP&2RaZdEG=DdWs^|t@G%>y#Q zShI2ZHWb70{&Bciou$S3>WxHJEuF}9s1O_$@rBoRGt{P^t$19z;(ODZ^SGJ&k_dIH zf;)a(d`$N=GGwiaG?}Ss|Fh{MA3W0|@QjcnUb0Y%SZ(zvp=p?kQkOUqlwqID@%s+D zVlO*bq@3w}?{0t=Q3@R<^9}UKSb*V;LJEbY+s^1n`UQ) zBAVbxSa_2()z&6^DO_3$I@yPtSG5YPNsY&78oOy~(dQweZk73LxgL_`V;E4;Jg5X8 z{nrEW487JOR0tI9!$=i?VM!T+_6zlJiDP9wKS;w*x;?Eeu&&x!^%H9dbOyg z8@_p>P>CI#g;n_o1Nkspx<8%Xth$N>&pqN4Sohd)!9n!ka!(ye>`rQYK7C}pq><*c zFC7m@X0$5?xIuSxD4%%ts11 zKnIe8G8c<&WO`jJ72m?NARXrG z*aeoPs%{?=od=@0Yk+JQ2XXWo6+sdw?)2bvWX;6e%g&vEv`U zOx)Axh0+Gh9oQ%+3%MaPDKjBb=H>*>=Q_Hy$ZSuN}NzZ)#}VG1uqY`{<{X)$vZJ$?Ynt*)T9An_cW}KMl}{F7Kh6%Ykkdu)PXVA8p;Blc!IO%z z<#Y#v52QVJl&;uK#(QBRzSj>OiihvQ^0=|wB2{`59|&T4wK^FgL2rrt%YdVqg5aYO z^L2-TE#lbz#u>($rlr+!QIV@8APK+-5i(TSW9l(5?f%Z3U=Y!CuY zPvi0p{y=u|U?^}py637Npx=-(e|37wvn{~F7wRfVBRDtj)flUibc!U_zX_l;`^i6A z+C^A2i>i9~dK2#08-jp2;ho+97yTAMY5}#{ldSQ{?c;sSc5Yn{+saM`boE0Eylf9d z!K15M^a<2h|K26=8>de=?m!->FPZ=b#?VCZX6@WV1WhXzB-eu{kG;HeRnj01>X7Li6G^r;jbS!j z*MF^6|0U_anjU)NoYd%jz6p#HqG<&FT+q8U-Rh&m%~^o56h{8AHb!0@r@)Jp>7*=c zeOr^4$=BAJ`E&e9vG_1^F!qvgk-8a++!>WbzTrdauYl6jLgZp>Jl0vIhRy8*CKR!) z+r|bFqGPg=-P3_YN?hbC60>kn$xpoAg}z2l(8sO*N=G1dU<@zyW|Cnl!+mJg4YT;l z(4cKFu=1;b_nwLeO-MNPk%@!=y=7uI*HJ~aRy`fYm%J+T7L2FH#fh-wg04aesKEw0 z9$Gfh(21GWTapcRkqWKU(Q8CV_AQ|fWOE%38b!OShJFM2S*%CrrzHjnX@LOBOhPN7 zu#tTpep4|iaygq%(q~?5=nL7y=vzfNdK+6h5X-zO)s0**`~jaANpnr`%9IeskmL9& z3PvadsWCk@^w$iz3nFK|?zJK>?^FB9e3(~_vP!Q&Dbk?<;)E1Z45d3@Pr1aOeg66M zfzJSZB8Un9Ed1d?G`yN^EimiuVFR~iPz#;ULA4DCxWPdi)Ud;`q%E8AiIf4uCw!>d9)Y7eK<12%+!?rH-Ld3?dr)#O?ttL^)YlZUnUa9&(4!o>>}Dbfg-Q z5jb}K^bRs}9X}6{@B6; z$xaT2R~504(88X1n_h@cd?XsQrUC!GL`r+_p!COZChW8sYfVY)5GLxtz=c8H< z@vSb^KpU?^5y~<}N=A@Lhre#|{{+)JtWJK-gjBfp57Y{O~yxv5M z6?70yv+s)f>;fqfNS;>_&}V*4A!`NiM`+DJHJMx>Xh&ZmZs8oK6 zU~U4YrTe*fwN`yVF&NU%BsyIAwu3*PepaL`f30~!0=532CKF+c2r~z!rxhQ;^XvtO zn!x{EbjMTlkjp6E6zbJhlj8sW3wBYsFhsbMc5qiF443*e)j3qtS@)^&5mQa-6iJME z*Bgx-4U9ZD8J^J(|91NRFW&vFRT9*q;-p@fSmi7pCu*`h91ft>31K$~QXdgw)eptU zrP-|mEkho35=e=0>e+0B>Ox7a=Kt11T4eGRN^dKWKZp?N?EfBU>5ZV2Dv_TqIncMFrb1v1Zgl zzeN?-sM*M;;2;w6$8L)VdXiDQ~gRi$W=7Ac4{{m?}F@e}gDfN64lg!fg9~EAl*O8(R#BUEb}=EcMXrwmQ_= z#ud|GQ+8A~LC5t|>1oP)r9N-yy57Nz#?uxj!*}on(rX$c8?)6fCSR@usW1KMBJm2E z)j792)1BIB8XPc$lF_dO0vF3KHAMQ`_up4j}kw zPT}M7nRnA>1rDMOB@xEjtrS6e)GlZMjqvpOZ1xB0B|RNAaqRnqh*d%ztCsal61b_C zY|c94&Z9b?5*?rK7IN725A2dunpu(=*e~!G2ormasHEe%T@hR4cydhZZAt@*G{>s3^nCb**Ac8Kl%wCATC5S#{so~8KM zCh*30@k%U5GN**{OpaT}(=)n})aE(vXSs!ED1uPGyCl3{hRyp3?ODqo6)yDyr5AYz zD&$(gxar}E6sS+jR*6*2Tp(Uc8bwpBEePc%PC*^n|IWKn`-jbWBvVMXV0r}o?8|_@ z$hVDnOvl+MllcCBEMZ90v%Nm}vQKIBnCR^0!|atu2=Us~h53U-Q1?(%g(gOH^aideIf|s z-Nfq`>7_lEjlti7<+->evT6q~(|&eKmNr7Mvv(gQX$5|fu_a9?T(u4M?X>dYoohfM z@5pX82#3r+VzZq^WR3=8bBMvRXmo6fHZ%K2&sStu>KGL8*!9JZK$@0iA|vLoVu3nXW_yi+v$~j_1a0{cudv%Xn@Gff1zGJ zct-UF_3b4f5rj8)*ePm)dMN($)-Q3wh%(35#o5R8akVE|#mBi?37lFE7WxZxT|5qd z6t?%{#wfzrp<+gZPwP0?rZhNQ5z26b-n}J%bkW3q4`R>WXJfV&$*9Hb5gejL@=A3xt$F9FHhEf?-s2V) z5VoeWiC!DW;_mP7-ZEAC1Fux&*+&{N)+bN&6zeTm`DSu+8wwiT?ub00Pc8A<(F%@tZ)*5~|jf6qO5ql7U6y z3>Qhm1Ey(lHY&}-w@@;=9*+$sr*34h>rjSb_1Bq?#N8O@FnsU&KAGcV>CH1@XWr84 z5S*y_;Srdo{fP1t+Hi}a5ol=xcCNGbR|zUm2~ny6Bd=4r@)sf<^NMVLBG~SzuJ>7+ zP3rLj&u!XBdNMr;Ii_-^g9SQh9zc-l6jM;SN`Gt4^P@n#+(X&}KI`NtIoK|QC*F^r z8TU+BU!uteDKk#Z{8I~$R|N1-qlIvloFyWX0MU6()u<|@k$km$v3%%7LKu+2z1K~FN{$x^g&_h__Y`LV0a4xu> z%gMi@eMECA4DeujkS-)ZDGqoDuG$AC9)P5Jgvx?(xx%12#K4LE8fTIn`#~@&1wY9I z9rUBTMG@l;3|=fV`V3iG;Hj#Z)OV}q7oey%=i!0UyRIHZue!*3=~P<_P((>EA@n1W zFeI7Z1rU)%g&j#DUV?I|C`NvJDQZswu!RN(B_;1pomavhN9O_V66Aed-N4XBCd$9x|ZcC(|dJNL-T2*SL)$3iNUt&Ok0sV>TY@m`HjiFXUu3+D<6IL&hDT>?{id^XLbRJzX`&`8`$?F*cS>!SkAU>WWc4+9az9TC)xYaqFBQd zU{buCg*$&H1+u8`lzH7bV@)7hy|migqDGI_J?;?%y1Q;4BRnFlO6pz({o~Ayo52By zQ;-_BOuosD)Zd#I{r=h7z_;?l8n>E(Jg zQj1@7b9OXp3BT)SQ)E`;JJerv(-v(j0{)ZfPwK+x zKD2s8Bqjqp$~&U|yi_#9S>zhAOBCNyMXEnTQuZXXNK~PcbNAnYfY^K{hz<{CFCkpk zF_hiuJH(?!&+2eymzay8aFHL61(dN>6mslnbqdX~e{ULYyy6c1eFBJ^h(4%w8w@N# zVe%r}&p9kGM)*OajLZ)b9lt0t=HM{VeKtJz?kqM1N+U{JrugkX6D18us zpvaOFjIe<|DmYI(jF?%g_H$bJR?7t?M6i>{tEz<>K0+;4CQ=lJ%$B{^O4iNo=kN-= zWelDd;iiCkJCr#Z@(FDO-!x0%r?iX3nYQ_&Oyo(sWVa8z>x7kCoFYmDa( zYUCuH{^DF@R+@KYpNt{Y^*F`5aB z^f@pzf*Ey_jEaxLd^JD;QU{l)?gy&~@bfm26=L~^-?g5xs3i(pwjcV(X$ z!)DBQAp63ntOv*fkgc=hs)f(SED(uk|B8uta9Jks0urbm9@e8@gi@@pC0b|a2j z=}5?TlaDJBgwkrY$#-u*pFZ|B=CKT{aWsOe525NjJ^9_+Uy%LE+EY}ADuygZi2lIx zwdS%DyrBEsbTQwPr!ENtkP#vkBox9LBy}m+o#|eEi{@%ks3MGmsNR=f+1d6m_&^r2 z%o>yiRlWjnFY|EjRdNmfUw&--((x;~GUl0a7K0kVTX0kc)DT|+qPYyi6p zYZc3wM_C62X|j$7A;I48yQSa=rrne$c(tHSR1;QjL^nk*gv;w=O8!xTz7TaKu)O3x zLEvRGtN|QO#I|Jb6o5XJsAuO;Zdh6LJ{>-7_@}~+Z3;bgen?2I2ob>G%=BwA8d=3R zr_rV!kObcKQAcpLfL9ZUe%48^-31=_&O~nl?^01#8H6Spd6^15ggOzs4oV0epg>jF znM6pkd<6f0-4u&%^&$Uadh(U;od5^~RDab(shkIQbb}2~ulN~)Bs-8-t&*QP#)wB; z_#uw^bqsV?YUbcY5F2aPoB$unXcy0T?vRV4A__u}Qvm^lNH{>=`rS0p;5_}LVc_vC z|ArurAaV`f_&H@U{{E{0T&eRIrbn1!3|z$7=u1$ll>D`y4ap*q7mcq0Q9(cbwylTL zLqGi<>`|PHWXOmm<>F?fLwW!__j}*HRm>o=i^8j;nCekD;8~|m`0yFg(lA9HU0#C8Nt`o1nr3c%56*t_h!+ zBY0)cTpLc6QV{MIdn{41CcI+hqZzpk6P=Mq&Dg&){f?LZijATo+Wzu{m_rkY2On8ZH!0S-IZ4xAL>^}fG?D~(vfa}4Jmd2_|IOIa2^ z9>wWLqTi1m0x|vaW3snMqdH)q6WJ&Ss<;YR17Scw`t?~@DJNTC%)7fcxT@eqR(B2@ zeWA(bstBgB)#C_h8jZul&RV6>UT@tW!+^=75z@cz7_dWxn}@x4-2|YP(mn?|{}T)e zrL7(*tM9O&>m~`NjGOd#!~pBedft;+=;P*WxfrVHie1%pXdUIQl)(v=*P#QtxW|%Y zHKSAO%ewU^kK@*v5U!-nReS}?5zx@&Ix3nR}Od3#oBfs-7@P zD4G<0A!7BFoD+auw5Yg4aKT%+kK~TrhME=$Wnn;qvN_vDJ|wv!IU%m>Sf9F0JA18f zWi8My_Fu#A|8)BAya1H)IvkDlwDX)GVp06NznV%|G9~`a@@a1V+rx zaEjsl++yQ7)40=wMeki0`7M7tOVl0`;DRsx^)cHcxQCwGt^JivN}vrOKOsgVUVj7f z)5XM=Mq4A~kA($lWt6q21BI0%HJFI_Q@n!Z9gllYN@KH4CGT~7)WLk@o89BseDIji zYKQ41-S{Yo7!w^%jA!i8d&>MztI7YS9xbPT)qaQSg8oDOX+V!wK3!m8WJ-(PX;)eQ ze31jd2H4986Go<#+`$DrIWb9z!@1uzHg4>UAOnq#yoATo)^0JgGQ0lh7b2|l{wRU0 z#r8Qd7ZA&ZT8G^ZD~+I6EG=^~I3*{cK(T5^l&C(9^iMmIs1~+h9R`sYQx6ewnJ2zw zm4ZOw_2GXo{nV^d&6R4qTTc_>J_?dTK5P*qY?~aPD@(vwLlQnvkIvA_AU^A8g2tTs zf^UuBi+_GR(bFK*9+mF)n%2uOfT3F=h4;Bvlx4kGkYOB_Q7KsQ21WzWVh$K?a=3UUjIkgJOavt}D=Rnfg z_cRZ6DC(5t_@v&9OBCJjW&xBeNR|N~3C5}AK%jC>0r`jWInk7Ac)}k#zus3fz>`of zjNZcpb42Lf+pnf30+C&WOt_p;TId~<-F0Litg<_o6LqTH+(uf|SAO`-yT6;>{l@a( zMjzKP5IjBTyoLZ_dr3%U!EdvA`##oJeN3T^gdd3)fN5MG z@`20f4ug@{GPlu+RDWGHWwVY{(SkqNrohfcr`8>5E*o~f-^-J5xih+kvnbOzEB_wz z5T@h3@Yi;Eg&IQpH#5M)#bv6wG^}dQ`o_gzUC|T6`Vx}otqA$JOXz14)x!#n5v0xf zs$2X*Xxly*>ADO>pxn#~Ueme?0xZ>66s_5V%P;yQD6BT(e)Cx|9QQXIq$R)v0(n6Pc(LkZ;ir z61f_&A%TGL#TF1qaUgoj??1$UV|qce`fX@-m2+A~*Y1nB;2~7vNGWOJRN1G`fY3wT z_cmATKTJRT4|Rr*Qh8%PL{$A4R8o@UZG^u3&G-Lin*XQ%et-JThXKh1oI`Ej!ct@(VTnDS zZf$v&FI9#F^0)X?6QZzi{3C|s>$bkgEBG%fC@oy?=WnJq;66jNm?Lf)F0nVC#{+I7 z+rrLUnp|xm$LN@uD-tvA0f4(3-jnMI#Ag7{Bl-i6%41af2rFb*% zMaIFE7Fno4QG~VCzn)>h5mFPlfxE63&u)=Vn3VgG=+}EEX1012{7Cdl#2<%#57O&5Ho7{Yn)CMu7BIN-`BN z)g}Fd>38c(ga*=9xG&3KQJ1Gg>K*`;B58#g712=SyLGY8ML31?K-zIS^UTsSPuqX`k zqIIvVIB7Zi2h*SUfMj$g(t*wr^8{40zL0A=WsCj+SZLKmh-6w>R9~N+ ztY0XqzLtSa|P^wi?eo z<(X;I?JF^7G&t}QpDqGO3Q4os;yeu>`ElI_Tm_b_$QL`ulnGg%1-@p83_s(GDME3i z)Cqt2PS2H|doRA84bX(~>e2_g3?%Xp%OTrUZ(}XH0TWf5ZuX8&4J~vI<}bq5BcwdQ zv|iTS@+`UGfnW`b=J!RYID(`_vnX!gI;dOla;F*F6?cSM=5?UJZNN`Y!>9I+Ju z*U*qEy1d#i;o=E?uQP{=4mtn^jT4jbJBD8t50qa|Vhtkg-VY%MI_iXU9ifigy-R}-85GzrrVuP>#d_R#(Q8^=@5bYiwxeq{j=%M zEX-($CFfyL>a5;6EXCBZGhMzh?P#*mMf7=BV&oY)wC{qONabHN;z|iTzaHD7mVW3C z0OLL_A$JE}uN}HQz>}s3W>X>YNvc|`4~zoNb_flmFdR_t-BSX|Qi~gm5Qh3f0cWVs zDt8u*OZKLxUYJA=;iy;)-X#imL0yuc-K=7Zm{f?xbE29--BorFi6=EWd=+qA_BJXu z7ixM0Qf)R1s)Tv#E4w9Rf18Nfky{H&#c@zRJyN28~ulIchhQ7 zAR|9aLL9=Psv_GdK#r}e1v=@=k-Y^E`w(xVAy!Ezs^Pj0VZNGcVBEbKD15XC5Zsfri?h9jR zV$&m($3O`e(eop1*@s;2qVrg=aza*C{OPif!RLX=+;SV(PkTYzF{yT?v7s8 za|fIVg12~45LnZ2PX!InDNwRzwNwf*Cye2_EJgqk@VLI{K-$ZMmZ>)_ zHlz;{Gw`CR58-w2-0zYrkIcb)G?bX~!69?uWcQtZgi+ImU&+TlC>12WNE5x0`;j#5g!X^>j+nye5 z74<1c7UE$BNBCfRHb5v^NKh3Jd@m?pb9Pyok6Tj`j_WO>{=nN-dE()-j@Qv=D7~Lu zj4#g7j{=D24`WvP2b`8{*)pm)DHrvd&|yjh_&`;3zK+UI+DMEJtrlZrWV=IRx2LB? zv3EV^UZsGVZ58~n3-c^S(GqN=O4F+7HW?vC1dVAVK4Ah?${a;`E`d(bMQ9YVXE@lG zaqn%j)y`C=26MGLhpG(Q8%c}@k!S<8!qAAL`ab0jm@FyZX(&NKJs2G0MD-t<+&MYaok_m-ouT_a*_vlVDhpa4c)G^O4?vyoH}>5DKxu*kA1 z%0~rlA`g0XM0#IGsvWA%%)O z_(lY*?_Nvth3h*5;Om2a2>Qi~+)=DB?L{aSaQJWq>j6-4Mr?5By9ny5X*bfTnE!pk z!XB3T{+jQ{-Kp3Zzc~G51qL$*hh#_-l{u+OF8Uc|H#_e@W_Ndbuv=?>_x6M7bKfTj zZtP6|tdZ_+fi}{U0o0@S5i(Bdp&qAB1N{SrZR$^q6>9wA;zz1;q=GZDt-s|Dg%C%yr>1!@P_o(XW{jFdRJKW%yUw}nCIkOU?G;f~tTZ}Z=z|5w{r5&AkVkch<@Pwva-P9mNG zn;$lF3C9xN))1s9GF+9|AOe?qGxcHhvEY?nN5TUkA5Qsg#A`a~(j$S}gtkAd%_xVf zhK*ssWAtEKl2-6>@Bm28-x=5DB7SkhS=3po;k$xCvl;GVx;5R}_D@?o(*wVP_Ey1C zQgqpy2jTc$ZypGTDJyj#gq2nNRqOasb`kvTjr`P8+kI83S z1qnEy-CF2rP83&OG{%vi!Qn1R<)xS!ew0FZQZ%g#cGyXWhp3Mu3Ezkd+1IQ6I5f!L z8&aQ;9C3?MV@LTc*gYiE>SSPsAV5EZ+6eT9e}}BadHG%pioY(vwI5HPdqJvD zi*Nz146Z){#Ik~{Kg|9zZisJ8=W5Dojd6z}rkR{gV`I1gKHK`B=};a{Oe0X&)U{6w z5*5_fi{s`!KI*5_@70b6@;4+Ed9wZV3|G_Z`Y42o=}|)LPo^K)u>A9D=n7_5gw*6W zJjG=*wgG?*^9Vew22C&EMqn9pLc2# z-#Uj3QZ_>*FA|eNuZWX;c;uhbnUPTt7S-IQaiLi6s2DrFa7=R~^<_AGSdG{(x|KR# z(Spr3MayE$1EfSVf?>JW4Qx-lJ`kQ1R=~Tre=z-lsX+sl(FNWRq45L-Fr8G|+UXS) z>V4*ikNGswNtIU}JsT3wkHmDyQYPp@L5A?p9;^963eLH_4LCxgt8StTjz5ttF|JEZ zWiP_#XMmcaxAPQT?;0=#>kRp_tw>sgM(%J5myDGwv8*m?aj%E)<#093z={Z{Ii!z< zE2BYyj{*KZ=o>}660h*rcZCN=B-aZyU+xos7w~sh`o5wP2%f^cD+(Fa6^uo3ORkjWQ;X9JOCsSX*#5;9eM z8uz9>Q>$EX%m_z+wr|AHXn|H=8s=6lNi~$f;QS1^hgcRqVrlJFC?GWKXgX`nQj>;A zi`6@%$HM1)JS(Hn5yx0WFWS$4LIBSz;=q%K)M*e;AWw#UQpjMEzJ<{9PlQYMed{DT z6kc+Y8!P?8>4W;XY&DlxU~9;EvhB*HD8udZnLAW2yk#0rqFpjCzAtHi6r~E}H8n06 zOS2AkL&I!crMJzdu3Lo=)4Wk?^##Bzgg+8ey(Nz*9}JqoZh5 z*B|~j)AzqI{SwB29I1O=p43CdM{1QLgR+SP{W&VJ_aOSdJ_sXpTbbX08qjs@`WZH3 ztoLh+!5yq<6S@_(LYdZ6}HDxjUIq|?v-}a4PXx_%x#nqK`@O=aKg$Vrf z?r@BJlq9cEkU7kGm;`fAUq;Aq>sUR*r3Bmv^(GSURZ!ph%G_`hv@?;=C#O@OU_36`^_f$i zMWa+Gye?XTC)2IOGr`#RT|Gw2HdOw$-tNEgw#%@v1)3W^P?=4CJpFNf80m=mZ|Xtz z7{2=nU_M`$ZVMBv(E)N?O#nwHtbKG%wyaSA1OvjqAPz?eBn+Z^5Y@gj`a3j~LdMv+ zmC2{BLvzQG;EEHxe{bcFjpV!F7W9j{PzJhq|(H1aMs5&4{<;|X3F)05GAMJ+glixGOj4uC+m zg3zI+nR%&{F*=|YNVqftHe(J2OTu-i4=9-FYgPD{xy3CS)iS1|mRr+R*b+4Hnl7)? zx**m^DrL&p&7!<2n-}pp7(;L)L4&i4@lklqV!G=O2eXYhpP@OyULB zV}&F(kBE{)&DnZlCI`0CZ@h_Jn|9&mH>+s*OhRo#yW){`^=`41z=t1%gt&$7?3Dr` z{!i1pzb@e-Rol0QY(VPOpVUos?V}c;=31U-?P+U!`qCTTBa95#&A;G|f#HNvy7m6Z zXii@ChBrvt11jO``FIS$sEk9&&g#G{4_35F^`Y*lD+VHa(}zxOcNHT2j3XWl&@z{E zK`9sS|JL;5dNr)mz5B6%{1SD_pZoCo!K*9v>KfQ|Z-v-3`nir$T~+#&Wk|ScjY6~d z)OmkOPQAk~r0j-{G`#`fbR!R?aypx;^xl?j4=Aeof;)3cs6M3TF?dP$^U-jLfAjrs ziUGwR*;ekR6`8M^7bv)zb2c$uWRD^ayU#CkJI!P58rZe_^Mgwmn;<|;$Hs@gbNX$ZzkAik*jvAV5culd67yq1Ad z-&JsJeO{FHqv;P#LI%e8!@%9*t4OC8wJl8JaB2jy0G;hM4d__{6!x*m(k7CD9Qh_L z!G4>t54FUfPJjAe{}=OrDl&NYck}-r6$kZ$51-(jy!ik(3f^(CIlT5~B;e$+c(IpM zm4tyxfZp*2$@~wCSP&gRUc)lJudcjx@aH)3QR#U=UEg_l4+9Ak=sd!h0u6a#z;Wpt zn>Rb>ZyK0FFu|W5Rwdx4`i-&zOi0AQt)sg&J@M@}Fe;*$!%Ml)Or`n?ZJKL0d#bLe z+tzSTpBL@R_-eqEZ+!Urs#^U;UV3LW=YUL;09DDxcySP@5K7Du@@K9z0Xzaj0A$I2 z*G-IKv$IgRr?Vc+z5(<0!r~`_xDc!q26WJc>lyPl(9>=A0+}+94hoH0ztsU=(a~u}?a9tz z2Klg*Hv6Q`mIfzB%ha}_31N#42k*L;f`>=UpW0IYLPfgKMdKTCAw`BD1Wv%z0{pMn zaf(^>-Gr z>OHL#wxOGTCt;N2zGzvY^Y}EB98TC!Nj`D+i%w`Jxo;=NfV3^t+9?_XWK0ki!8QpV zTR6%nAos4}B)|P0c@CH>?T9CgdbAEy_&Q^h5NJkJ@t71)Md20&V^b?6+wm|Wsw~6g zN(izKW>2*1{c?uJHTrV5#%R#R-7AJvy}nnA<0euuk2} zKbw9h4ggbxIIn*}J&^DXbpb#6gXv%XsP?PK!m4xMx@~~&Aq8ZPowvECFDg+G4idLfl#<=*u10FGoeLCgwLmh-UD@7FHql!ue~J+i1m3y zu`Yx-$_k%HVwxC`Fw_^fX=dVCcAS&wVNOB09UwDD@2<4&DEp(+kMg-61QO%1m zFoacE`ZFLxanRb+82{;uQ1GzUK#24R9eHvbudD98^>sobM=?A{iUBc>{e&Wjo;!ns zKsox+!oZ^V{fko!Iu6h8f-Ldg4lHks>$qJrCIYJ zh5C-VC0eEm<@v;)-%-d0gatyl!m~fQ`ERO!6t^Oq!|xKY=)q>vF;54gFOhOSb$`uw zZ~r;TkoAHHNb-XgsmDoV!_BpeIiKN&ja2wKhi^%;MU?97P&eyyyAPTOJFj-f z9ljS_yp8TTsF%EZOJ9y23ML#5d3(QLPyHK4m4EFYLo5K*9ULd9nSV8}140Nkha$@H^&p|<1asVJ}mecxub4Ik14{R*t-J*kHKe=4Fy z=$N93pCeJ%jkqF5ot<6OGlMJj@m}H{a8I>5;qPkiLP}qSRd9WVp-uz9x(Yl%?l(K| z4?nGzbu@kFgRd!`izZOR{^sEPKs;v}#F|FP_QE!-qSp6dd%EMN5;U6!8-%4oF#(s+ z?Oqkv?}FDAW0*RkJ4U%+{I{KlobttRgC6TmeIxOv0762c)AL11>c}UA8!ser;8$;> zT`xB4aA}T}fSN<+MJEEEc0P0qONPcGO-xX#sloaSPh)3=c$Yhd9`Ie<7ZjUHlTEWKlcf&jVEH|m zg`FG%6xW6nYhPbl!pl~@z8ZSVq(LYNK#+gN7kdx6a2)Y^4a0 z5@I;YCgQV+S0pl=k9v;c?AQBVsOp8XzZ5hV`P~6mk0X+Ykrg`ocs3z-kvP}qS^h<4 zx2?s|W&o+zcPkUZnY%UCeGuO6EpNDqbtv8u6)`!eE!K^~uVZ-k_ET32 zs*&Ssz3oBnv76PI4GUETFyvRe< zy-`K7iDVWQ)2IT>()rnPKT?M-Gds-)$UOLGSplgx)P*{|#hbDrB{NMGP_&99N~^V@R# zgrcq2kDUP7sPiZ7&KY?D7e}Kj%TX&(8K1RQWPc+G@K;{fNo`NJ9lGRXAZGxjk%E(# zXF;MGM_k6*(7U()YWi2T9;_jRzE^u6p?2}cEIeXwBia}wNrIWUQ1J8(%yrSXNZ=(g ztlvcc%Qo=Po$fb1+SP~h7N1}lbSMKd$@WxG-KmKKd>BWnEKwbsrcM`8(oI7`$_IXW z1O*8B;62Y-68|i+PpPlqL?c~xBdCJE=Le%Hz?A?)sdJ{Ei(|ChyNvh{YJ;fPQeRnn ziw!aG#7!^*Jky+vn6F(jA!e^$r2zNmb#z)@6{4nkZ&CiBUXs04NE|s``-2IiG22bA z=ekx+YQ!EfosO`G+NoqWPzF_<9D8$glEcL2d>e|PN*HrKcu9=#gjWHjz_W{_DOtYn zj-tT-fR0g%&=r@C+vj_EGIcilY6`T?Xof~I)oWPRX0i-P3& z;zwIuupUViui#EZ{sFgcB zKU|-z^1TW2rX#q+q`Oea1i6T=Xr`<$>x};V+)gySz=?n3@TOek-X-^Vxkx%~UY%5w zMF7(pOZZ+;Nr|xWt30i*EoFD=5Rj4s3)QB-w)~lS|6rIW)Dae+5v*tRm+|=zrk@w_ zVgMnp+d0%v7U$%uk*yFnQmek8^dRM9exIyulDj$F=C0DgxK8yQu2w3uSXTW(7Oh>l z`HVc`yeKIcup=qXA+#?y5Au3^b z!bN6*Zy2C!5|<-SX1LQFOnKvS>HyTUG0?%7VR%-uTfB)Kc%(Zwg7T1@jx8fHT)l?P z!-@Ol6yXdLu3mRfs5?Izhi4T^?9mi`PY=|y6L$5d8MBaZ|GU#u^Q1iH({oJIVt!nf zpPBi$Q8&lfMTFD5xOoEg#iIWah~OKh@_RSai&aw$QuH)D^{3M0)+W3Ft3NU zZX*qays$E)D_!`_a$D2G;x%APcfsFJI*sCv-oLZfR zku%Ku&^GwC3iW4(oP*51htJ;-cntnR6)iputF%GEDGDf&+rC)K#IDT|T&6QRBvZdo z=S!j+$!sz?H(ja;F{=Wr_9Lx+7UW9}e_f$uCnJRGlWLQ(iKNcU>l6H@1=NIfU)-ph#;ZthG2$XIIqH0HZ8(*-+;;Imz_&m8!>^~m z|Fw?>wSGxStqE}V07FT=9Ba)W>N#^YRst zAq`tS+O2+ZiJO1eGIua@ufOgVDzl$<=A=-n-*^D1O7ZLM;l?4|OitzE)ZZ^w6 zJac?r%;6ChPT0wS2|JerlhQle*DRQwbX7^{T`c61CBij3WT??Y=RQKH0R&-K#P@m2 zMF$lgO%Liwu~bT6L-2ooUBCZ&x?OKY)Svsp4xTehbRwu{8Al!>N1!sA0RijgJlwwh z4T%8ivfxuAfbL8?^)BVrYlqV1PUQewp%U5W2yf6(wLS|GMJ!N$x4^1yAvP=wub8U5 zoRg-O)sSEvggE%;^^Z{uKN&9X@Ez)HeEyJ6*ha7DXi#G7T_>yO#=ifUZ|6y*;|EP0VkES)T5?DG7M%UhtYK#DFbDja7M<7jn%h=gNM0MDLpOB$_8#3754pnxk`@U^zljh&{Wb=?Cc(5I&i zy{+S2U1r!0$72Gv72Jqy;nFYjpCY;gA_!hIt!}v9G)al{WQXIMVH}nMpkw{@5w1)e zqFweNvv2g;PO)RRf&j$G^}g{3Pfo(~%_OJU9;J5l(m^NMR>(V^Y1+S5Oki$xxJWZo zbH{OnU_FJ_g#oZV~VZHL;ITkNEQ;%{_=(%BtUA ze|zrI6+sA0lJoEgSRzIYL{cgV+17F%Mdz>|?88*~`WOWJ#?ts)x>H zP>>ki?kK4S{e0F&--EsB7v9piLJ#30--mJA`9cVphadan57-0((7(8kD~nxZzTU@x zr)aOYhDcw4e1S(?$MbbP9R`5gkr7sik`YQUIjaJ`xKp9%l^>b%1j0>0eYK)8&yPa2 z%x%jeGK$Px#Y|eD%$J~e>);<@vwNS9z$+YNH>ofAb2u5>a^-Sz(t-LkAlB=a2!eA> z{^>sTA11^RFY{)wqKPJ!?MypEQ^19~+;eD>2bN-byBeQ$5yj>dRtz5n+@GA@;sf-> zy6IRZX_OPFye3VVF}03iiN0f|2FksS?H^zjE;?A@H3{VB{bIGZHTEK?eU!Sil0r6; zEcHRCHfj|KZ-JB$-?mn;%h%ydK7d-{iuSf6zaWLKrhZ=c&+xC4m;M`WyU8Eig@2Hb911iwZ$sx>8dgl=r1 zBAj(-Mu}uXICbKLOx=WVSe2J?V#IM_yMtzb=@AnXd9`KP16qs2^x2^P69)g;^qCKr zRo;XN&BSQ30AbAICd1G_pcy^K=>-Vv0r1hkxN+M3M8PF$Jq*iIUax*6Ztz#r_y2Q! z_N#%0^vy{DTEIZEgnldrMztJ3f=q(U<9df#yx8V2=Mt??rw5YK5sV-sauNFkc>xY0 zXzY>9^jf$4p@l*Q2^unaAXL-B0~5Sagw<<;F>s04Qr{~EOzU(-j4lG?JzV5^IayO# z68$4Qw@{-`F*oZ(hcx)M2u&ROdHn#0JE5qGtrJA7FYc>vL4(uD1+AlWLGdo%5_3>JX3dE0bqy*%n=s$|Vo&~Pkjyl+9S zn&UkRF${0SJabQBy4L~16Abs_dQ`P~F^Zb>3uV4KfG`q9;A|K3tw8mlu@jlNFMq%N zBrZIRwM)|5VVrI8{LPuA=73vag-M~;l4kb(cizVgmP!}yS{@%^4PXD3LLFoueLsZy zo9;7#z6=W-yX~az=p%p(vd(#~+`XOxDZulq`>6RKuQj~`;_-MwC$v&^|tN+gQ?tiO=skue} zZ6K#Eqr0@?<)enXzKw=tCoSXeJe}^;lSc88KX)I>3MGFaRIxh_YamVd`-<^%rD2=g zVL0ZRoRXptj@Uhr7(XF_t0ZH+m^7Pf{09`UQPlGNBNs)CQ~ibd@Yo%56D@vFS^MiV zzm9;f$cW~DNEegFG2Ed0GHYWrOGjH|0Hwv@Q*p9CM4y8L5*f8@i0kUo`TCLd8J5y} z_SlLBdc+aPxfP+_=jFa*xNS@+4HkVuYi#cdVj`PdiS!?S{qC>$zrU>i#lKko!dLjE zI^TNRW3XrFGh%lfSor08(70c_O+d(2yXf@3pS5)*2jVKSM90)cO5d2%1=|NP=F*P+ zY@|;OLmUGA8`TvAuOigxja!>RJH

  • Get the Code
  • Build
  • BLAS Build
  • -
  • Prepare Data & Run
  • +
  • Prepare and Quantize
  • +
  • Run the quantized model
  • Memory/Disk Requirements
  • Quantization
  • Interactive mode
  • Constrained output with grammars
  • -
  • Instruction mode with Alpaca
  • -
  • Using OpenLLaMA
  • -
  • Using GPT4All
  • -
  • Using Pygmalion 7B & Metharme 7B
  • -
  • Obtaining the Facebook LLaMA original model and Stanford Alpaca model data
  • -
  • Verifying the model files
  • +
  • Instruct mode
  • +
  • Obtaining and using the Facebook LLaMA 2 model
  • Seminal papers and background on the models
  • Perplexity (measuring model quality)
  • Android
  • @@ -83,20 +80,16 @@ improved significantly thanks to many contributions. It is the main playground f **Supported models:** +Typically finetunes of the base models below are supported as well. + - [X] LLaMA 🦙 - [x] LLaMA 2 🦙🦙 -- [X] [Mistral AI v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) +- [X] [Mistral 7B](https://huggingface.co/mistralai/Mistral-7B-v0.1) - [x] [Mixtral MoE](https://huggingface.co/models?search=mistral-ai/Mixtral) - [X] Falcon -- [X] [Alpaca](https://github.com/ggerganov/llama.cpp#instruction-mode-with-alpaca) -- [X] [GPT4All](https://github.com/ggerganov/llama.cpp#using-gpt4all) - [X] [Chinese LLaMA / Alpaca](https://github.com/ymcui/Chinese-LLaMA-Alpaca) and [Chinese LLaMA-2 / Alpaca-2](https://github.com/ymcui/Chinese-LLaMA-Alpaca-2) - [X] [Vigogne (French)](https://github.com/bofenghuang/vigogne) -- [X] [Vicuna](https://github.com/ggerganov/llama.cpp/discussions/643#discussioncomment-5533894) - [X] [Koala](https://bair.berkeley.edu/blog/2023/04/03/koala/) -- [X] [OpenBuddy 🐶 (Multilingual)](https://github.com/OpenBuddy/OpenBuddy) -- [X] [Pygmalion/Metharme](#using-pygmalion-7b--metharme-7b) -- [X] [WizardLM](https://github.com/nlpxucan/WizardLM) - [X] [Baichuan 1 & 2](https://huggingface.co/models?search=baichuan-inc/Baichuan) + [derivations](https://huggingface.co/hiyouga/baichuan-7b-sft) - [X] [Aquila 1 & 2](https://huggingface.co/models?search=BAAI/Aquila) - [X] [Starcoder models](https://github.com/ggerganov/llama.cpp/pull/3187) @@ -166,7 +159,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: Here is a typical run using LLaMA v2 13B on M2 Ultra: -```java +``` $ make -j && ./main -m models/llama-13b-v2/ggml-model-q4_0.gguf -p "Building a website can be done in 10 simple steps:\nStep 1:" -n 400 -e I llama.cpp build info: I UNAME_S: Darwin @@ -250,7 +243,7 @@ https://user-images.githubusercontent.com/1991296/224442907-7693d4be-acaa-4e01-8 ## Usage -Here are the end-to-end binary build and model conversion steps for the LLaMA-7B model. +Here are the end-to-end binary build and model conversion steps for most supported models. ### Get the Code @@ -635,7 +628,7 @@ Building the program with BLAS support may lead to some performance improvements **Without docker**: - Firstly, you need to make sure you installed [Vulkan SDK](https://vulkan.lunarg.com/doc/view/latest/linux/getting_started_ubuntu.html) + Firstly, you need to make sure you have installed [Vulkan SDK](https://vulkan.lunarg.com/doc/view/latest/linux/getting_started_ubuntu.html) For example, on Ubuntu 22.04 (jammy), use the command below: @@ -648,6 +641,8 @@ Building the program with BLAS support may lead to some performance improvements vulkaninfo ``` + Alternatively your package manager might be able to provide the appropiate libraries. For example for Ubuntu 22.04 you can install `libvulkan-dev` instead. + Then, build llama.cpp using the cmake command below: ```bash @@ -662,34 +657,42 @@ Building the program with BLAS support may lead to some performance improvements # ggml_vulkan: Using Intel(R) Graphics (ADL GT2) | uma: 1 | fp16: 1 | warp size: 32 ``` -### Prepare Data & Run +### Prepare and Quantize + +To obtain the official LLaMA 2 weights please see the Obtaining and using the Facebook LLaMA 2 model section. There is also a large selection of pre-quantized `gguf` models available on Hugging Face. ```bash -# obtain the original LLaMA model weights and place them in ./models +# obtain the official LLaMA model weights and place them in ./models ls ./models -65B 30B 13B 7B tokenizer_checklist.chk tokenizer.model +llama-2-7b tokenizer_checklist.chk tokenizer.model # [Optional] for models using BPE tokenizers ls ./models -65B 30B 13B 7B vocab.json + vocab.json +# [Optional] for PyTorch .bin models like Mistral-7B +ls ./models + # install Python dependencies python3 -m pip install -r requirements.txt -# convert the 7B model to ggml FP16 format -python3 convert.py models/7B/ +# convert the model to ggml FP16 format +python3 convert.py models/mymodel/ # [Optional] for models using BPE tokenizers -python convert.py models/7B/ --vocabtype bpe +python convert.py models/mymodel/ --vocabtype bpe -# quantize the model to 4-bits (using q4_0 method) -./quantize ./models/7B/ggml-model-f16.gguf ./models/7B/ggml-model-q4_0.gguf q4_0 +# quantize the model to 4-bits (using Q4_K_M method) +./quantize ./models/mymodel/ggml-model-f16.gguf ./models/mymodel/ggml-model-Q4_K_M.gguf Q4_K_M -# update the gguf filetype to current if older version is unsupported by another application -./quantize ./models/7B/ggml-model-q4_0.gguf ./models/7B/ggml-model-q4_0-v2.gguf COPY +# update the gguf filetype to current version if older version is now unsupported +./quantize ./models/mymodel/ggml-model-Q4_K_M.gguf ./models/mymodel/ggml-model-Q4_K_M-v2.gguf COPY +``` +### Run the quantized model -# run the inference -./main -m ./models/7B/ggml-model-q4_0.gguf -n 128 +```bash +# start inference on a gguf model +./main -m ./models/mymodel/ggml-model-Q4_K_M.gguf -n 128 ``` When running the larger models, make sure you have enough disk space to store all the intermediate files. @@ -710,7 +713,7 @@ From the unzipped folder, open a terminal/cmd window here and place a pre-conver As the models are currently fully loaded into memory, you will need adequate disk space to save them and sufficient RAM to load them. At the moment, memory and disk requirements are the same. -| Model | Original size | Quantized size (4-bit) | +| Model | Original size | Quantized size (Q4_0) | |------:|--------------:|-----------------------:| | 7B | 13 GB | 3.9 GB | | 13B | 24 GB | 7.8 GB | @@ -826,9 +829,9 @@ The `grammars/` folder contains a handful of sample grammars. To write your own, For authoring more complex JSON grammars, you can also check out https://grammar.intrinsiclabs.ai/, a browser app that lets you write TypeScript interfaces which it compiles to GBNF grammars that you can save for local use. Note that the app is built and maintained by members of the community, please file any issues or FRs on [its repo](http://github.com/intrinsiclabsai/gbnfgen) and not this one. -### Instruction mode with Alpaca +### Instruct mode -1. First, download the `ggml` Alpaca model into the `./models` folder +1. First, download and place the `ggml` model into the `./models` folder 2. Run the `main` tool like this: ``` @@ -854,50 +857,6 @@ cadaver, cauliflower, cabbage (vegetable), catalpa (tree) and Cailleach. > ``` -### Using [OpenLLaMA](https://github.com/openlm-research/open_llama) - -OpenLLaMA is an openly licensed reproduction of Meta's original LLaMA model. It uses the same architecture and is a drop-in replacement for the original LLaMA weights. - -- Download the [3B](https://huggingface.co/openlm-research/open_llama_3b), [7B](https://huggingface.co/openlm-research/open_llama_7b), or [13B](https://huggingface.co/openlm-research/open_llama_13b) model from Hugging Face. -- Convert the model to ggml FP16 format using `python convert.py ` - -### Using [GPT4All](https://github.com/nomic-ai/gpt4all) - -*Note: these instructions are likely obsoleted by the GGUF update* - -- Obtain the `tokenizer.model` file from LLaMA model and put it to `models` -- Obtain the `added_tokens.json` file from Alpaca model and put it to `models` -- Obtain the `gpt4all-lora-quantized.bin` file from GPT4All model and put it to `models/gpt4all-7B` -- It is distributed in the old `ggml` format which is now obsoleted -- You have to convert it to the new format using `convert.py`: - -```bash -python3 convert.py models/gpt4all-7B/gpt4all-lora-quantized.bin -``` - -- You can now use the newly generated `models/gpt4all-7B/ggml-model-q4_0.bin` model in exactly the same way as all other models - -- The newer GPT4All-J model is not yet supported! - -### Using Pygmalion 7B & Metharme 7B - -- Obtain the [LLaMA weights](#obtaining-the-facebook-llama-original-model-and-stanford-alpaca-model-data) -- Obtain the [Pygmalion 7B](https://huggingface.co/PygmalionAI/pygmalion-7b/) or [Metharme 7B](https://huggingface.co/PygmalionAI/metharme-7b) XOR encoded weights -- Convert the LLaMA model with [the latest HF convert script](https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py) -- Merge the XOR files with the converted LLaMA weights by running the [xor_codec](https://huggingface.co/PygmalionAI/pygmalion-7b/blob/main/xor_codec.py) script -- Convert to `ggml` format using the `convert.py` script in this repo: -```bash -python3 convert.py pygmalion-7b/ --outtype q4_1 -``` -> The Pygmalion 7B & Metharme 7B weights are saved in [bfloat16](https://en.wikipedia.org/wiki/Bfloat16_floating-point_format) precision. If you wish to convert to `ggml` without quantizating, please specify the `--outtype` as `f32` instead of `f16`. - - -### Obtaining the Facebook LLaMA original model and Stanford Alpaca model data - -- **Under no circumstances should IPFS, magnet links, or any other links to model downloads be shared anywhere in this repository, including in issues, discussions, or pull requests. They will be immediately deleted.** -- The LLaMA models are officially distributed by Facebook and will **never** be provided through this repository. -- Refer to [Facebook's LLaMA repository](https://github.com/facebookresearch/llama/pull/73/files) if you need to request access to the model data. - ### Obtaining and using the Facebook LLaMA 2 model - Refer to [Facebook's LLaMA download page](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) if you want to access the model data. @@ -909,20 +868,6 @@ python3 convert.py pygmalion-7b/ --outtype q4_1 - [LLaMA 2 13B chat](https://huggingface.co/TheBloke/Llama-2-13B-chat-GGUF) - [LLaMA 2 70B chat](https://huggingface.co/TheBloke/Llama-2-70B-chat-GGUF) -### Verifying the model files - -Please verify the [sha256 checksums](SHA256SUMS) of all downloaded model files to confirm that you have the correct model data files before creating an issue relating to your model files. -- The following python script will verify if you have all possible latest files in your self-installed `./models` subdirectory: - -```bash -# run the verification script -./scripts/verify-checksum-models.py -``` - -- On linux or macOS it is also possible to run the following commands to verify if you have all possible latest files in your self-installed `./models` subdirectory: - - On Linux: `sha256sum --ignore-missing -c SHA256SUMS` - - on macOS: `shasum -a 256 --ignore-missing -c SHA256SUMS` - ### Seminal papers and background on the models If your issue is with model generation quality, then please at least scan the following links and papers to understand the limitations of LLaMA models. This is especially important when choosing an appropriate model size and appreciating both the significant and subtle differences between LLaMA models and ChatGPT: diff --git a/SHA256SUMS b/SHA256SUMS deleted file mode 100644 index ca4d5a4a5..000000000 --- a/SHA256SUMS +++ /dev/null @@ -1,40 +0,0 @@ -700df0d3013b703a806d2ae7f1bfb8e59814e3d06ae78be0c66368a50059f33d models/7B/consolidated.00.pth -666a4bb533b303bdaf89e1b6a3b6f93535d868de31d903afdc20983dc526c847 models/7B/ggml-model-f16.bin -ec2f2d1f0dfb73b72a4cbac7fa121abbe04c37ab327125a38248f930c0f09ddf models/7B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/7B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/7B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/7B/ggml-model-q5_1.bin -7e89e242ddc0dd6f060b43ca219ce8b3e8f08959a72cb3c0855df8bb04d46265 models/7B/params.json -745bf4e29a4dd6f411e72976d92b452da1b49168a4f41c951cfcc8051823cf08 models/13B/consolidated.00.pth -d5ccbcc465c71c0de439a5aeffebe8344c68a519bce70bc7f9f92654ee567085 models/13B/consolidated.01.pth -2b206e9b21fb1076f11cafc624e2af97c9e48ea09312a0962153acc20d45f808 models/13B/ggml-model-f16.bin -fad169e6f0f575402cf75945961cb4a8ecd824ba4da6be2af831f320c4348fa5 models/13B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/13B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/13B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/13B/ggml-model-q5_1.bin -4ab77bec4d4405ccb66a97b282574c89a94417e3c32e5f68f37e2876fc21322f models/13B/params.json -e23294a58552d8cdec5b7e8abb87993b97ea6eced4178ff2697c02472539d067 models/30B/consolidated.00.pth -4e077b7136c7ae2302e954860cf64930458d3076fcde9443f4d0e939e95903ff models/30B/consolidated.01.pth -24a87f01028cbd3a12de551dcedb712346c0b5cbdeff1454e0ddf2df9b675378 models/30B/consolidated.02.pth -1adfcef71420886119544949767f6a56cb6339b4d5fcde755d80fe68b49de93b models/30B/consolidated.03.pth -7e1b524061a9f4b27c22a12d6d2a5bf13b8ebbea73e99f218809351ed9cf7d37 models/30B/ggml-model-f16.bin -d2a441403944819492ec8c2002cc36fa38468149bfb4b7b4c52afc7bd9a7166d models/30B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/30B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/30B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/30B/ggml-model-q5_1.bin -2c07118ea98d69dbe7810d88520e30288fa994751b337f8fca02b171955f44cb models/30B/params.json -135c563f6b3938114458183afb01adc9a63bef3d8ff7cccc3977e5d3664ecafe models/65B/consolidated.00.pth -9a600b37b19d38c7e43809485f70d17d1dc12206c07efa83bc72bb498a568bde models/65B/consolidated.01.pth -e7babf7c5606f165a3756f527cb0fedc4f83e67ef1290391e52fb1cce5f26770 models/65B/consolidated.02.pth -73176ffb426b40482f2aa67ae1217ef79fbbd1fff5482bae5060cdc5a24ab70e models/65B/consolidated.03.pth -882e6431d0b08a8bc66261a0d3607da21cbaeafa96a24e7e59777632dbdac225 models/65B/consolidated.04.pth -a287c0dfe49081626567c7fe87f74cce5831f58e459b427b5e05567641f47b78 models/65B/consolidated.05.pth -72b4eba67a1a3b18cb67a85b70f8f1640caae9b40033ea943fb166bd80a7b36b models/65B/consolidated.06.pth -d27f5b0677d7ff129ceacd73fd461c4d06910ad7787cf217b249948c3f3bc638 models/65B/consolidated.07.pth -60758f2384d74e423dffddfd020ffed9d3bb186ebc54506f9c4a787d0f5367b0 models/65B/ggml-model-f16.bin -cde053439fa4910ae454407e2717cc46cc2c2b4995c00c93297a2b52e790fa92 models/65B/ggml-model-q4_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/65B/ggml-model-q4_1.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/65B/ggml-model-q5_0.bin -ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff models/65B/ggml-model-q5_1.bin -999ed1659b469ccc2a941714c0a9656fa571d17c9f7c8c7589817ca90edef51b models/65B/params.json -9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 models/tokenizer.model From ee1628bdfea8b0079fed0140ac2f00ef1b465b57 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Wed, 7 Feb 2024 07:54:50 +0100 Subject: [PATCH 690/859] Basic Vulkan Multi-GPU implementation (#5321) * Initial Vulkan multi-gpu implementation Move most global variables into backend context * Add names to backend device functions * Add further missing cleanup code * Reduce code duplication in tensor split layer assignment * generalize LLAMA_SPLIT_LAYER for all backends, do not expose device count and memory in llama.h * Only do device info print in the beginning and initialize one backend for cpu assist Add missing cleanup code * Rework backend memory management to make sure devices and buffers get properly allocated and freed * Rename cpu assist free function --------- Co-authored-by: slaren --- common/common.cpp | 8 +- ggml-vulkan.cpp | 2639 ++++++++++++++++++++++++++------------------- ggml-vulkan.h | 23 +- ggml.c | 14 +- llama.cpp | 69 +- 5 files changed, 1587 insertions(+), 1166 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 8c1a60583..e0082a823 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -46,6 +46,10 @@ #define GGML_USE_CUBLAS_SYCL #endif +#if (defined(GGML_USE_CUBLAS) || defined(GGML_USE_SYCL)) || defined(GGML_USE_VULKAN) +#define GGML_USE_CUBLAS_SYCL_VULKAN +#endif + int32_t get_num_physical_cores() { #ifdef __linux__ // enumerate the set of thread siblings, num entries is num cores @@ -660,8 +664,8 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { params.tensor_split[i] = 0.0f; } } -#ifndef GGML_USE_CUBLAS_SYCL - fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS/SYCL. Setting a tensor split has no effect.\n"); +#ifndef GGML_USE_CUBLAS_SYCL_VULKAN + fprintf(stderr, "warning: llama.cpp was compiled without cuBLAS/SYCL/Vulkan. Setting a tensor split has no effect.\n"); #endif // GGML_USE_CUBLAS_SYCL } else if (arg == "--no-mmap") { params.use_mmap = false; diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 14fb89e09..9e2846ee4 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -15,6 +15,7 @@ #include #include #include +#include #include "ggml.h" #include "ggml-backend-impl.h" @@ -37,6 +38,8 @@ #define GGML_VK_MAX_NODES 8192 +#define MAX_VK_BUFFERS 256 + #ifndef K_QUANTS_PER_ITERATION #define K_QUANTS_PER_ITERATION 1 #else @@ -53,15 +56,68 @@ static_assert(K_QUANTS_PER_ITERATION == 1 || K_QUANTS_PER_ITERATION == 2, "K_QUA } \ } while (0) -struct vk_buffer { +struct ggml_backend_vk_context; + +struct vk_queue { + uint32_t queue_family_index; + vk::Queue queue; + vk::CommandPool pool; + uint32_t cmd_buffer_idx; + std::vector cmd_buffers; + + vk::PipelineStageFlags stage_flags; +}; + +struct vk_device { + vk::PhysicalDevice physical_device; + vk::PhysicalDeviceProperties properties; + std::string name; + uint64_t max_memory_allocation_size; + bool fp16; + vk::Device device; + uint32_t vendor_id; + vk_queue compute_queue; + vk_queue transfer_queue; + bool single_queue; + uint32_t descriptor_set_mode; + uint32_t subgroup_size; + bool uma; + + ~vk_device() { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "destroy device " << name << std::endl; +#endif + device.destroy(); + } +}; + +struct vk_buffer_struct { vk::Buffer buffer; vk::DeviceMemory device_memory; vk::MemoryPropertyFlags memory_property_flags; void * ptr; size_t size = 0; - uint32_t qf_owner; + + ggml_backend_vk_context * ctx; + + std::shared_ptr device; + + ~vk_buffer_struct() { + if (size == 0) { + return; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << "~vk_buffer_struct(" << buffer << ", " << size << ")" << std::endl; +#endif + + device->device.freeMemory(device_memory); + device->device.destroyBuffer(buffer); + } }; +typedef std::shared_ptr vk_buffer; +typedef std::weak_ptr vk_buffer_ref; + struct vk_subbuffer { vk_buffer buffer; uint64_t offset; @@ -70,6 +126,7 @@ struct vk_subbuffer { struct vk_pipeline { std::string name; + vk::ShaderModule shader_module; vk::DescriptorSetLayout dsl; std::vector descriptor_pools; std::vector descriptor_sets; @@ -82,16 +139,6 @@ struct vk_pipeline { uint32_t align; }; -struct vk_queue { - uint32_t queue_family_index; - vk::Queue queue; - vk::CommandPool pool; - uint32_t cmd_buffer_idx; - std::vector cmd_buffers; - - vk::PipelineStageFlags stage_flags; -}; - struct vk_semaphore { vk::Semaphore s; uint64_t value; @@ -105,20 +152,6 @@ struct vk_submission { typedef std::vector vk_sequence; -struct vk_device { - vk::PhysicalDevice physical_device; - vk::PhysicalDeviceProperties properties; - uint64_t max_memory_allocation_size; - bool fp16; - vk::Device device; - uint32_t vendor_id; - vk_queue compute_queue; - vk_queue transfer_queue; - uint32_t descriptor_set_mode; - uint32_t subgroup_size; - bool uma; -}; - struct vk_op_push_constants { uint32_t KX; uint32_t KY; @@ -190,13 +223,13 @@ struct ggml_tensor_extra_gpu { size_t ctx_idx; - vk_buffer buffer_gpu; + vk_buffer_ref buffer_gpu; uint64_t offset; void reset() { ready = false; ctx_idx = 0; - buffer_gpu.size = 0; + buffer_gpu.reset(); offset = 0; } }; @@ -210,69 +243,96 @@ struct ggml_vk_garbage_collector { std::vector contexts; }; -typedef void (*ggml_vk_func_t)(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst); +struct ggml_backend_vk_context { + std::string name; -vk::Instance vk_instance; -vk_device vk_device; -vk_pipeline vk_pipeline_matmul_f32_l, vk_pipeline_matmul_f32_m, vk_pipeline_matmul_f32_s; -vk_pipeline vk_pipeline_matmul_f32_aligned_l, vk_pipeline_matmul_f32_aligned_m, vk_pipeline_matmul_f32_aligned_s; -vk_pipeline vk_pipeline_matmul_f16_l, vk_pipeline_matmul_f16_m, vk_pipeline_matmul_f16_s; -vk_pipeline vk_pipeline_matmul_f16_aligned_l, vk_pipeline_matmul_f16_aligned_m, vk_pipeline_matmul_f16_aligned_s; -vk_pipeline vk_pipeline_matmul_f16_f32_l, vk_pipeline_matmul_f16_f32_m, vk_pipeline_matmul_f16_f32_s; -vk_pipeline vk_pipeline_matmul_f16_f32_aligned_l, vk_pipeline_matmul_f16_f32_aligned_m, vk_pipeline_matmul_f16_f32_aligned_s; -vk_pipeline vk_pipeline_matmul_split_k_reduce; -vk_pipeline vk_pipeline_dequant[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_dequant_mul_mat_vec_f32[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_mul_mat_vec_p021_f16_f32; -vk_pipeline vk_pipeline_mul_mat_vec_nc_f16_f32; -vk_pipeline vk_pipeline_get_rows[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_get_rows_f32[VK_NUM_TYPES]; -vk_pipeline vk_pipeline_mul_f32; -vk_pipeline vk_pipeline_add_f32; -vk_pipeline vk_pipeline_scale_f32; -vk_pipeline vk_pipeline_sqr_f32; -vk_pipeline vk_pipeline_clamp_f32; -vk_pipeline vk_pipeline_cpy_f32_f32, vk_pipeline_cpy_f32_f16, vk_pipeline_cpy_f16_f16; -vk_pipeline vk_pipeline_norm_f32; -vk_pipeline vk_pipeline_rms_norm_f32; -vk_pipeline vk_pipeline_gelu_f32; -vk_pipeline vk_pipeline_silu_f32; -vk_pipeline vk_pipeline_relu_f32; -vk_pipeline vk_pipeline_diag_mask_inf_f32; -vk_pipeline vk_pipeline_soft_max_f32; -vk_pipeline vk_pipeline_rope_f32, vk_pipeline_rope_f16; -vk_pipeline vk_pipeline_rope_neox_f32, vk_pipeline_rope_neox_f16; + std::weak_ptr device; + vk_pipeline pipeline_matmul_f32_l, pipeline_matmul_f32_m, pipeline_matmul_f32_s; + vk_pipeline pipeline_matmul_f32_aligned_l, pipeline_matmul_f32_aligned_m, pipeline_matmul_f32_aligned_s; + vk_pipeline pipeline_matmul_f16_l, pipeline_matmul_f16_m, pipeline_matmul_f16_s; + vk_pipeline pipeline_matmul_f16_aligned_l, pipeline_matmul_f16_aligned_m, pipeline_matmul_f16_aligned_s; + vk_pipeline pipeline_matmul_f16_f32_l, pipeline_matmul_f16_f32_m, pipeline_matmul_f16_f32_s; + vk_pipeline pipeline_matmul_f16_f32_aligned_l, pipeline_matmul_f16_f32_aligned_m, pipeline_matmul_f16_f32_aligned_s; + vk_pipeline pipeline_matmul_split_k_reduce; + vk_pipeline pipeline_dequant[VK_NUM_TYPES]; + vk_pipeline pipeline_dequant_mul_mat_vec_f32[VK_NUM_TYPES]; + vk_pipeline pipeline_mul_mat_vec_p021_f16_f32; + vk_pipeline pipeline_mul_mat_vec_nc_f16_f32; + vk_pipeline pipeline_get_rows[VK_NUM_TYPES]; + vk_pipeline pipeline_get_rows_f32[VK_NUM_TYPES]; + vk_pipeline pipeline_mul_f32; + vk_pipeline pipeline_add_f32; + vk_pipeline pipeline_scale_f32; + vk_pipeline pipeline_sqr_f32; + vk_pipeline pipeline_clamp_f32; + vk_pipeline pipeline_cpy_f32_f32, pipeline_cpy_f32_f16, pipeline_cpy_f16_f16; + vk_pipeline pipeline_norm_f32; + vk_pipeline pipeline_rms_norm_f32; + vk_pipeline pipeline_gelu_f32; + vk_pipeline pipeline_silu_f32; + vk_pipeline pipeline_relu_f32; + vk_pipeline pipeline_diag_mask_inf_f32; + vk_pipeline pipeline_soft_max_f32; + vk_pipeline pipeline_rope_f32, pipeline_rope_f16; + vk_pipeline pipeline_rope_neox_f32, pipeline_rope_neox_f16; -static size_t vk_semaphore_idx, vk_event_idx; -static ggml_vk_garbage_collector vk_gc; -static std::vector> vk_pinned_memory; -static size_t vk_prealloc_size_qx, vk_prealloc_size_qy, vk_prealloc_size_x, vk_prealloc_size_y, vk_prealloc_size_split_k; -static vk_buffer vk_prealloc_qx, vk_prealloc_qy, vk_prealloc_x, vk_prealloc_y, vk_prealloc_split_k; -static vk::Fence vk_fence; -static vk_buffer vk_staging; -static size_t vk_staging_size; -static size_t vk_staging_offset; -static vk_buffer vk_sync_staging; + size_t semaphore_idx, event_idx; + ggml_vk_garbage_collector gc; + std::vector> pinned_memory; + size_t prealloc_size_qx, prealloc_size_qy, prealloc_size_x, prealloc_size_y, prealloc_size_split_k; + vk_buffer prealloc_qx, prealloc_qy, prealloc_x, prealloc_y, prealloc_split_k; + vk::Fence fence; + vk_buffer staging; + size_t staging_size; + size_t staging_offset; + vk_buffer sync_staging; -static vk_context * vk_ctx; -static vk_context * vk_transfer_ctx; + vk_buffer buffer_pool[MAX_VK_BUFFERS]; -static bool vk_disable; + vk_context * compute_ctx; + vk_context * transfer_ctx; + + bool disable; + bool initialized; + + size_t idx; +}; + +struct vk_instance { + vk::Instance instance; + + std::vector device_indices; + + std::shared_ptr devices[GGML_VK_MAX_DEVICES]; + ggml_backend_t backends[GGML_VK_MAX_DEVICES]; + ggml_backend_vk_context contexts[GGML_VK_MAX_DEVICES]; + ggml_backend_buffer_type buffer_types[GGML_VK_MAX_DEVICES]; + bool initialized[GGML_VK_MAX_DEVICES]; +}; #ifdef GGML_VULKAN_CHECK_RESULTS -size_t vk_skip_checks; -size_t vk_output_tensor; +static size_t vk_skip_checks; +static size_t vk_output_tensor; + +static void ggml_vk_print_tensor(ggml_backend * ctx, const ggml_tensor * tensor, const char * name); +static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor); +static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor); #endif -static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_size, const void* spv_data, const std::string& entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, std::vector&& specialization_constants, uint32_t align) { +typedef void (*ggml_vk_func_t)(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst); + +static bool vk_instance_initialized = false; +static vk_instance vk_instance; + +GGML_CALL static void ggml_backend_vk_free(ggml_backend_t backend); + +static void ggml_vk_create_pipeline(ggml_backend_vk_context * ctx, vk_pipeline& pipeline, const std::string& name, size_t spv_size, const void* spv_data, const std::string& entrypoint, uint32_t parameter_count, uint32_t push_constant_size, std::array wg_denoms, std::vector&& specialization_constants, uint32_t align) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_pipeline(" << name << ", " << entrypoint << ", " << parameter_count << ", " << push_constant_size << ", (" << wg_denoms[0] << "," << wg_denoms[1] << "," << wg_denoms[2] << "), specialization_constants, " << align << ")" << std::endl; #endif GGML_ASSERT(parameter_count > 0); GGML_ASSERT(wg_denoms[0] > 0 && wg_denoms[1] > 0 && wg_denoms[2] > 0); // NOLINT - vk_pipeline pipeline; - pipeline.name = name; pipeline.parameter_count = parameter_count; pipeline.push_constant_size = push_constant_size; @@ -280,7 +340,7 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s pipeline.align = align; vk::ShaderModuleCreateInfo shader_module_create_info({}, spv_size, reinterpret_cast(spv_data)); - vk::ShaderModule shader_module = vk_device.device.createShaderModule(shader_module_create_info); + pipeline.shader_module = ctx->device.lock()->device.createShaderModule(shader_module_create_info); std::vector dsl_binding; std::vector dsl_binding_flags; @@ -301,17 +361,17 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s {}, dsl_binding); descriptor_set_layout_create_info.setPNext(&dslbfci); - pipeline.dsl = vk_device.device.createDescriptorSetLayout(descriptor_set_layout_create_info); + pipeline.dsl = ctx->device.lock()->device.createDescriptorSetLayout(descriptor_set_layout_create_info); // Check if device supports multiple descriptors per pool - if (vk_device.descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN) { + if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN) { const uint32_t alloc_count = 2; // Try allocating multiple sets from one pool // This fails on AMD for some reason, so add a fall back to allocating one pool per set vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, alloc_count, descriptor_pool_size); - vk::DescriptorPool pool = vk_device.device.createDescriptorPool(descriptor_pool_create_info); + vk::DescriptorPool pool = ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info); std::vector layouts(alloc_count); for (uint32_t i = 0; i < alloc_count; i++) { @@ -319,24 +379,24 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s } try { vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pool, alloc_count, layouts.data()); - std::vector sets = vk_device.device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); } catch(vk::OutOfPoolMemoryError const&) { - vk_device.descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_SINGLE; + ctx->device.lock()->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_SINGLE; } - vk_device.device.destroyDescriptorPool(pool); + ctx->device.lock()->device.destroyDescriptorPool(pool); } - if (vk_device.descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { + if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, 128, descriptor_pool_size); - pipeline.descriptor_pools.push_back(vk_device.device.createDescriptorPool(descriptor_pool_create_info)); + pipeline.descriptor_pools.push_back(ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info)); } pipeline.descriptor_set_idx = 0; vk::PipelineLayoutCreateInfo pipeline_layout_create_info(vk::PipelineLayoutCreateFlags(), pipeline.dsl, pcr); - pipeline.layout = vk_device.device.createPipelineLayout(pipeline_layout_create_info); + pipeline.layout = ctx->device.lock()->device.createPipelineLayout(pipeline_layout_create_info); std::vector specialization_entries(specialization_constants.size()); @@ -356,41 +416,45 @@ static vk_pipeline ggml_vk_create_pipeline(const std::string& name, size_t spv_s vk::PipelineShaderStageCreateInfo pipeline_shader_create_info( vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eCompute, - shader_module, + pipeline.shader_module, entrypoint.c_str(), &specialization_info); vk::ComputePipelineCreateInfo compute_pipeline_create_info( vk::PipelineCreateFlags(), pipeline_shader_create_info, pipeline.layout); - pipeline.pipeline = vk_device.device.createComputePipeline(VK_NULL_HANDLE, compute_pipeline_create_info).value; + pipeline.pipeline = ctx->device.lock()->device.createComputePipeline(VK_NULL_HANDLE, compute_pipeline_create_info).value; - return pipeline; + ctx->gc.pipelines.push_back(&pipeline); } -static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uint32_t n) { +static void ggml_vk_destroy_pipeline(ggml_backend_vk_context * ctx, vk_pipeline * pipeline) { + for (auto& pool : pipeline->descriptor_pools) { + ctx->device.lock()->device.destroyDescriptorPool(pool); + } + pipeline->descriptor_pools.clear(); + pipeline->descriptor_sets.clear(); + pipeline->descriptor_set_idx = 0; + + ctx->device.lock()->device.destroyDescriptorSetLayout(pipeline->dsl); + + ctx->device.lock()->device.destroyPipelineLayout(pipeline->layout); + + ctx->device.lock()->device.destroyShaderModule(pipeline->shader_module); + + ctx->device.lock()->device.destroyPipeline(pipeline->pipeline); +} + +static void ggml_pipeline_allocate_descriptor_sets(ggml_backend_vk_context * ctx, vk_pipeline& pipeline, uint32_t n) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; + std::cerr << "ggml_pipeline_allocate_descriptor_sets(" << pipeline.name << ", " << n << ")" << std::endl; #endif - // Check if gc already contains pipeline before adding it - bool gc_found = false; - for (auto * pl : vk_gc.pipelines) { - if (&pipeline == pl) { - gc_found = true; - break; - } - } - - if (!gc_found) { - vk_gc.pipelines.push_back(&pipeline); - } - if (pipeline.descriptor_sets.size() >= pipeline.descriptor_set_idx + n) { // Enough descriptors are available return; } - if (vk_device.descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { + if (ctx->device.lock()->descriptor_set_mode == VK_DEVICE_DESCRIPTOR_POOL_MODE_MULTI) { const uint32_t alloc_count = pipeline.descriptor_set_idx + n - pipeline.descriptor_sets.size(); std::vector layouts(alloc_count); @@ -398,29 +462,29 @@ static void ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline& pipeline, uin layouts[i] = pipeline.dsl; } vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline.descriptor_pools[0], alloc_count, layouts.data()); - std::vector sets = vk_device.device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); pipeline.descriptor_sets.insert(pipeline.descriptor_sets.end(), sets.begin(), sets.end()); } else { for (uint32_t i = pipeline.descriptor_sets.size(); i < pipeline.descriptor_set_idx + n; i++) { vk::DescriptorPoolSize descriptor_pool_size(vk::DescriptorType::eStorageBuffer, pipeline.parameter_count); vk::DescriptorPoolCreateInfo descriptor_pool_create_info({}, 1, descriptor_pool_size); - pipeline.descriptor_pools.push_back(vk_device.device.createDescriptorPool(descriptor_pool_create_info)); + pipeline.descriptor_pools.push_back(ctx->device.lock()->device.createDescriptorPool(descriptor_pool_create_info)); vk::DescriptorSetAllocateInfo descriptor_set_alloc_info(pipeline.descriptor_pools[i], 1, &pipeline.dsl); - std::vector sets = vk_device.device.allocateDescriptorSets(descriptor_set_alloc_info); + std::vector sets = ctx->device.lock()->device.allocateDescriptorSets(descriptor_set_alloc_info); pipeline.descriptor_sets.push_back(sets[0]); } } } -static void ggml_vk_pipeline_cleanup(vk_pipeline& pipeline) { +static void ggml_pipeline_cleanup(vk_pipeline& pipeline) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_pipeline_cleanup(" << pipeline.name << ")" << std::endl; + std::cerr << "ggml_pipeline_cleanup(" << pipeline.name << ")" << std::endl; #endif pipeline.descriptor_set_idx = 0; } -static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { +static vk::CommandBuffer ggml_vk_create_cmd_buffer(ggml_backend_vk_context * ctx, vk_queue& q) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_cmd_buffer()" << std::endl; #endif @@ -433,7 +497,7 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { q.pool, vk::CommandBufferLevel::ePrimary, 1); - const std::vector cmd_buffers = vk_device.device.allocateCommandBuffers(command_buffer_alloc_info); + const std::vector cmd_buffers = ctx->device.lock()->device.allocateCommandBuffers(command_buffer_alloc_info); auto buf = cmd_buffers.front(); q.cmd_buffers.push_back(buf); @@ -442,24 +506,17 @@ static vk::CommandBuffer ggml_vk_create_cmd_buffer(vk_queue& q) { return buf; } -static vk_submission ggml_vk_create_submission(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { +static vk_submission ggml_vk_create_submission(ggml_backend_vk_context * ctx, vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_submission()" << std::endl; #endif vk_submission s; - s.buffer = ggml_vk_create_cmd_buffer(q); + s.buffer = ggml_vk_create_cmd_buffer(ctx, q); s.wait_semaphores = std::move(wait_semaphores); s.signal_semaphores = std::move(signal_semaphores); return s; } -static vk_sequence ggml_vk_create_sequence_1(vk_queue& q, std::vector wait_semaphores, std::vector signal_semaphores) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_create_sequence_1()" << std::endl; -#endif - return { ggml_vk_create_submission(q, std::move(wait_semaphores), std::move(signal_semaphores)) }; -} - static void ggml_vk_submit(vk_context * ctx, vk::Fence fence) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_submit(" << ctx->seqs.size() << ", " << fence << ")" << std::endl; @@ -578,89 +635,89 @@ static uint32_t ggml_vk_find_queue_family_index(std::vectordevice.lock()->device.createCommandPool(command_pool_create_info_compute); q.cmd_buffer_idx = 0; - q.queue = vk_device.device.getQueue(queue_family_index, queue_index); + q.queue = ctx->device.lock()->device.getQueue(queue_family_index, queue_index); q.stage_flags = stage_flags; - - return q; } -static vk_context * ggml_vk_create_context(vk_queue& q) { +static vk_context * ggml_vk_create_context(ggml_backend_vk_context * ctx, vk_queue& q) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_context()" << std::endl; #endif - vk_gc.contexts.emplace_back(); - vk_context * result = &vk_gc.contexts[vk_gc.contexts.size() - 1]; + ctx->gc.contexts.emplace_back(); + vk_context * result = &ctx->gc.contexts[ctx->gc.contexts.size() - 1]; memset((void *) result, 0, sizeof(vk_context)); - result->idx = vk_gc.contexts.size() - 1; + result->idx = ctx->gc.contexts.size() - 1; result->q = &q; return result; } -static vk_semaphore * ggml_vk_create_binary_semaphore() { +static vk_semaphore * ggml_vk_create_binary_semaphore(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_timeline_semaphore()" << std::endl; #endif vk::SemaphoreTypeCreateInfo tci{ vk::SemaphoreType::eBinary, 0 }; vk::SemaphoreCreateInfo ci{}; ci.setPNext(&tci); - vk::Semaphore semaphore = vk_device.device.createSemaphore(ci); - vk_gc.semaphores.push_back({ semaphore, 0 }); - return &vk_gc.semaphores[vk_gc.semaphores.size() - 1]; + vk::Semaphore semaphore = ctx->device.lock()->device.createSemaphore(ci); + ctx->gc.semaphores.push_back({ semaphore, 0 }); + return &ctx->gc.semaphores[ctx->gc.semaphores.size() - 1]; } -static vk_semaphore * ggml_vk_create_timeline_semaphore() { +static vk_semaphore * ggml_vk_create_timeline_semaphore(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_timeline_semaphore()" << std::endl; #endif - if (vk_semaphore_idx >= vk_gc.tl_semaphores.size()) { + if (ctx->semaphore_idx >= ctx->gc.tl_semaphores.size()) { vk::SemaphoreTypeCreateInfo tci{ vk::SemaphoreType::eTimeline, 0 }; vk::SemaphoreCreateInfo ci{}; ci.setPNext(&tci); - vk::Semaphore semaphore = vk_device.device.createSemaphore(ci); - vk_gc.tl_semaphores.push_back({ semaphore, 0 }); + vk::Semaphore semaphore = ctx->device.lock()->device.createSemaphore(ci); + ctx->gc.tl_semaphores.push_back({ semaphore, 0 }); } - return &vk_gc.tl_semaphores[vk_semaphore_idx++]; + return &ctx->gc.tl_semaphores[ctx->semaphore_idx++]; } -static vk::Event ggml_vk_create_event() { - if (vk_event_idx >= vk_gc.events.size()) { - vk_gc.events.push_back(vk_device.device.createEvent({})); +static vk::Event ggml_vk_create_event(ggml_backend_vk_context * ctx) { + if (ctx->event_idx >= ctx->gc.events.size()) { + ctx->gc.events.push_back(ctx->device.lock()->device.createEvent({})); } - return vk_gc.events[vk_event_idx++]; + return ctx->gc.events[ctx->event_idx++]; } -static void ggml_vk_queue_cleanup(vk_queue& q) { +static void ggml_vk_queue_cleanup(ggml_backend_vk_context * ctx, vk_queue& q) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_queue_cleanup()" << std::endl; #endif // Requires command buffers to be done - vk_device.device.resetCommandPool(q.pool); + ctx->device.lock()->device.resetCommandPool(q.pool); q.cmd_buffer_idx = 0; } -static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_flags) { +static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ")" << std::endl; #endif - GGML_ASSERT(size > 0); + vk_buffer buf = std::make_shared(); - vk_buffer buf; + if (size == 0) { + buf->size = 0; + return buf; + } - buf.size = size; + buf->size = size; vk::BufferCreateInfo buffer_create_info{ vk::BufferCreateFlags(), size, @@ -670,11 +727,11 @@ static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_ nullptr, }; - buf.buffer = vk_device.device.createBuffer(buffer_create_info); + buf->buffer = ctx->device.lock()->device.createBuffer(buffer_create_info); - vk::MemoryRequirements mem_req = vk_device.device.getBufferMemoryRequirements(buf.buffer); + vk::MemoryRequirements mem_req = ctx->device.lock()->device.getBufferMemoryRequirements(buf->buffer); - vk::PhysicalDeviceMemoryProperties mem_props = vk_device.physical_device.getMemoryProperties(); + vk::PhysicalDeviceMemoryProperties mem_props = ctx->device.lock()->physical_device.getMemoryProperties(); uint32_t memory_type_index = UINT32_MAX; @@ -691,30 +748,36 @@ static vk_buffer ggml_vk_create_buffer(size_t size, vk::MemoryPropertyFlags req_ } try { - buf.device_memory = vk_device.device.allocateMemory({ mem_req.size, memory_type_index }); + buf->device_memory = ctx->device.lock()->device.allocateMemory({ mem_req.size, memory_type_index }); } catch (const vk::SystemError& e) { // Out of Host/Device memory, clean up buffer - vk_device.device.destroyBuffer(buf.buffer); - buf.size = 0; + ctx->device.lock()->device.destroyBuffer(buf->buffer); + buf->size = 0; throw e; } - buf.memory_property_flags = req_flags; - buf.ptr = nullptr; + buf->memory_property_flags = req_flags; + buf->ptr = nullptr; if (req_flags & vk::MemoryPropertyFlagBits::eHostVisible) { - buf.ptr = vk_device.device.mapMemory(buf.device_memory, 0, VK_WHOLE_SIZE); + buf->ptr = ctx->device.lock()->device.mapMemory(buf->device_memory, 0, VK_WHOLE_SIZE); } - vk_device.device.bindBufferMemory(buf.buffer, buf.device_memory, 0); + ctx->device.lock()->device.bindBufferMemory(buf->buffer, buf->device_memory, 0); - buf.qf_owner = VK_QUEUE_FAMILY_IGNORED; + buf->ctx = ctx; + + buf->device = ctx->device.lock(); + +#ifdef GGML_VULKAN_DEBUG + std::cerr << "Created buffer " << buf->buffer << std::endl; +#endif return buf; } -static vk_buffer ggml_vk_create_buffer_check(size_t size, vk::MemoryPropertyFlags req_flags) { +static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { try { - return ggml_vk_create_buffer(size, req_flags); + return ggml_vk_create_buffer(ctx, size, req_flags); } catch (const vk::SystemError& e) { std::cerr << "ggml_vulkan: Memory allocation of size " << size << " failed." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -722,14 +785,14 @@ static vk_buffer ggml_vk_create_buffer_check(size_t size, vk::MemoryPropertyFlag } } -static vk_buffer ggml_vk_create_buffer_device(size_t size) { +static vk_buffer ggml_vk_create_buffer_device(ggml_backend_vk_context * ctx, size_t size) { vk_buffer buf; try { - buf = ggml_vk_create_buffer(size, vk::MemoryPropertyFlagBits::eDeviceLocal); + buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); } catch (const vk::SystemError& e) { - if (vk_device.uma) { + if (ctx->device.lock()->uma) { // Fall back to host memory type - buf = ggml_vk_create_buffer_check(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); + buf = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } else { std::cerr << "ggml_vulkan: Device memory allocation of size " << size << " failed." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -741,16 +804,7 @@ static vk_buffer ggml_vk_create_buffer_device(size_t size) { } static void ggml_vk_destroy_buffer(vk_buffer& buf) { - if (buf.size == 0) { - return; - } -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_destroy_buffer(" << buf.size << ")" << std::endl; -#endif - - buf.size = 0; - vk_device.device.freeMemory(buf.device_memory); - vk_device.device.destroyBuffer(buf.buffer); + buf.reset(); } static vk_subbuffer ggml_vk_subbuffer(vk_buffer& buf) { @@ -773,7 +827,7 @@ static void ggml_vk_sync_buffers(vk_context * ctx) { ); } -static void ggml_vk_wait_events(vk::CommandBuffer& cmd_buffer, std::vector&& events, vk::PipelineStageFlags src_stages, vk::PipelineStageFlags dst_stages) { +static void ggml_vk_wait_events(vk_context * ctx, std::vector&& events) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_wait_events()" << std::endl; #endif @@ -781,10 +835,10 @@ static void ggml_vk_wait_events(vk::CommandBuffer& cmd_buffer, std::vectors->buffer.waitEvents( events, - src_stages, - dst_stages, + ctx->q->stage_flags, + ctx->q->stage_flags, {}, {}, {} @@ -810,15 +864,15 @@ static bool ggml_vk_build_shader(ggml_type type) { } } -static void ggml_vk_load_shaders() { +static void ggml_vk_load_shaders(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_load_shaders()" << std::endl; + std::cerr << "ggml_vk_load_shaders(" << ctx->name << ")" << std::endl; #endif // mulmat - std::initializer_list warptile_l = { 128, 128, 128, 16, vk_device.subgroup_size * 2, 64, 2, 4, 4, vk_device.subgroup_size }; - std::initializer_list warptile_m = { 128, 64, 64, 16, vk_device.subgroup_size, 32, 2, 4, 2, vk_device.subgroup_size }; - std::initializer_list warptile_s = { vk_device.subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, vk_device.subgroup_size }; + std::initializer_list warptile_l = { 128, 128, 128, 16, ctx->device.lock()->subgroup_size * 2, 64, 2, 4, 4, ctx->device.lock()->subgroup_size }; + std::initializer_list warptile_m = { 128, 64, 64, 16, ctx->device.lock()->subgroup_size, 32, 2, 4, 2, ctx->device.lock()->subgroup_size }; + std::initializer_list warptile_s = { ctx->device.lock()->subgroup_size, 32, 32, 16, 32, 32, 2, 2, 2, ctx->device.lock()->subgroup_size }; std::array l_wg_denoms = {128, 128, 1 }; std::array m_wg_denoms = { 64, 64, 1 }; @@ -828,145 +882,208 @@ static void ggml_vk_load_shaders() { uint32_t m_align = 64; uint32_t s_align = 32; - if (vk_device.fp16) { - vk_pipeline_matmul_f32_l = ggml_vk_create_pipeline("matmul_f32_l", matmul_f32_l_len, matmul_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f32_m = ggml_vk_create_pipeline("matmul_f32_m", matmul_f32_m_len, matmul_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f32_s = ggml_vk_create_pipeline("matmul_f32_s", matmul_f32_s_len, matmul_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f32_aligned_l = ggml_vk_create_pipeline("matmul_f32_aligned_l", matmul_f32_aligned_l_len, matmul_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f32_aligned_m = ggml_vk_create_pipeline("matmul_f32_aligned_m", matmul_f32_aligned_m_len, matmul_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f32_aligned_s = ggml_vk_create_pipeline("matmul_f32_aligned_s", matmul_f32_aligned_s_len, matmul_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + if (ctx->device.lock()->fp16) { + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_l, "matmul_f32_l", matmul_f32_l_len, matmul_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_m, "matmul_f32_m", matmul_f32_m_len, matmul_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_s, "matmul_f32_s", matmul_f32_s_len, matmul_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_l, "matmul_f32_aligned_l", matmul_f32_aligned_l_len, matmul_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_m, "matmul_f32_aligned_m", matmul_f32_aligned_m_len, matmul_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_s, "matmul_f32_aligned_s", matmul_f32_aligned_s_len, matmul_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_l = ggml_vk_create_pipeline("matmul_f16_l", matmul_f16_l_len, matmul_f16_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_m = ggml_vk_create_pipeline("matmul_f16_m", matmul_f16_m_len, matmul_f16_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_s = ggml_vk_create_pipeline("matmul_f16_s", matmul_f16_s_len, matmul_f16_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_l, "matmul_f16_l", matmul_f16_l_len, matmul_f16_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_m, "matmul_f16_m", matmul_f16_m_len, matmul_f16_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_s, "matmul_f16_s", matmul_f16_s_len, matmul_f16_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_l, "matmul_f16_aligned_l", matmul_f16_aligned_l_len, matmul_f16_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_m, "matmul_f16_aligned_m", matmul_f16_aligned_m_len, matmul_f16_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_s, "matmul_f16_aligned_s", matmul_f16_aligned_s_len, matmul_f16_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_aligned_l = ggml_vk_create_pipeline("matmul_f16_aligned_l", matmul_f16_aligned_l_len, matmul_f16_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_aligned_m = ggml_vk_create_pipeline("matmul_f16_aligned_m", matmul_f16_aligned_m_len, matmul_f16_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_aligned_s = ggml_vk_create_pipeline("matmul_f16_aligned_s", matmul_f16_aligned_s_len, matmul_f16_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - vk_pipeline_matmul_f16_f32_l = ggml_vk_create_pipeline("matmul_f16_f32_l", matmul_f16_f32_l_len, matmul_f16_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_f32_m = ggml_vk_create_pipeline("matmul_f16_f32_m", matmul_f16_f32_m_len, matmul_f16_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_f32_s = ggml_vk_create_pipeline("matmul_f16_f32_s", matmul_f16_f32_s_len, matmul_f16_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_l, "matmul_f16_f32_l", matmul_f16_f32_l_len, matmul_f16_f32_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_m, "matmul_f16_f32_m", matmul_f16_f32_m_len, matmul_f16_f32_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_s, "matmul_f16_f32_s", matmul_f16_f32_s_len, matmul_f16_f32_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_len, matmul_f16_f32_aligned_l_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_len, matmul_f16_f32_aligned_m_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_len, matmul_f16_f32_aligned_s_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); } else { - vk_pipeline_matmul_f32_l = ggml_vk_create_pipeline("matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f32_m = ggml_vk_create_pipeline("matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f32_s = ggml_vk_create_pipeline("matmul_f32_s", matmul_f32_s_fp32_len, matmul_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f32_aligned_l = ggml_vk_create_pipeline("matmul_f32_aligned_l", matmul_f32_aligned_l_fp32_len, matmul_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f32_aligned_m = ggml_vk_create_pipeline("matmul_f32_aligned_m", matmul_f32_aligned_m_fp32_len, matmul_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f32_aligned_s = ggml_vk_create_pipeline("matmul_f32_aligned_s", matmul_f32_aligned_s_fp32_len, matmul_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_l, "matmul_f32_l", matmul_f32_l_fp32_len, matmul_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_m, "matmul_f32_m", matmul_f32_m_fp32_len, matmul_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_s, "matmul_f32_s", matmul_f32_s_fp32_len, matmul_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_l, "matmul_f32_aligned_l", matmul_f32_aligned_l_fp32_len, matmul_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_m, "matmul_f32_aligned_m", matmul_f32_aligned_m_fp32_len, matmul_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f32_aligned_s, "matmul_f32_aligned_s", matmul_f32_aligned_s_fp32_len, matmul_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_l = ggml_vk_create_pipeline("matmul_f16_l", matmul_f16_l_fp32_len, matmul_f16_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_m = ggml_vk_create_pipeline("matmul_f16_m", matmul_f16_m_fp32_len, matmul_f16_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_s = ggml_vk_create_pipeline("matmul_f16_s", matmul_f16_s_fp32_len, matmul_f16_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_l, "matmul_f16_l", matmul_f16_l_fp32_len, matmul_f16_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_m, "matmul_f16_m", matmul_f16_m_fp32_len, matmul_f16_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_s, "matmul_f16_s", matmul_f16_s_fp32_len, matmul_f16_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_l, "matmul_f16_aligned_l", matmul_f16_aligned_l_fp32_len, matmul_f16_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_m, "matmul_f16_aligned_m", matmul_f16_aligned_m_fp32_len, matmul_f16_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_aligned_s, "matmul_f16_aligned_s", matmul_f16_aligned_s_fp32_len, matmul_f16_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - vk_pipeline_matmul_f16_aligned_l = ggml_vk_create_pipeline("matmul_f16_aligned_l", matmul_f16_aligned_l_fp32_len, matmul_f16_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_aligned_m = ggml_vk_create_pipeline("matmul_f16_aligned_m", matmul_f16_aligned_m_fp32_len, matmul_f16_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_aligned_s = ggml_vk_create_pipeline("matmul_f16_aligned_s", matmul_f16_aligned_s_fp32_len, matmul_f16_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); - - vk_pipeline_matmul_f16_f32_l = ggml_vk_create_pipeline("matmul_f16_f32_l", matmul_f16_f32_l_fp32_len, matmul_f16_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); - vk_pipeline_matmul_f16_f32_m = ggml_vk_create_pipeline("matmul_f16_f32_m", matmul_f16_f32_m_fp32_len, matmul_f16_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); - vk_pipeline_matmul_f16_f32_s = ggml_vk_create_pipeline("matmul_f16_f32_s", matmul_f16_f32_s_fp32_len, matmul_f16_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); - vk_pipeline_matmul_f16_f32_aligned_l = ggml_vk_create_pipeline("matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); - vk_pipeline_matmul_f16_f32_aligned_m = ggml_vk_create_pipeline("matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); - vk_pipeline_matmul_f16_f32_aligned_s = ggml_vk_create_pipeline("matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_l, "matmul_f16_f32_l", matmul_f16_f32_l_fp32_len, matmul_f16_f32_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_m, "matmul_f16_f32_m", matmul_f16_f32_m_fp32_len, matmul_f16_f32_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_s, "matmul_f16_f32_s", matmul_f16_f32_s_fp32_len, matmul_f16_f32_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_l, "matmul_f16_f32_aligned_l", matmul_f16_f32_aligned_l_fp32_len, matmul_f16_f32_aligned_l_fp32_data, "main", 3, 14 * sizeof(uint32_t), l_wg_denoms, warptile_l, l_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_m, "matmul_f16_f32_aligned_m", matmul_f16_f32_aligned_m_fp32_len, matmul_f16_f32_aligned_m_fp32_data, "main", 3, 14 * sizeof(uint32_t), m_wg_denoms, warptile_m, m_align); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_f16_f32_aligned_s, "matmul_f16_f32_aligned_s", matmul_f16_f32_aligned_s_fp32_len, matmul_f16_f32_aligned_s_fp32_data, "main", 3, 14 * sizeof(uint32_t), s_wg_denoms, warptile_s, s_align); } - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("mul_mat_vec_q4_0_f32", mul_mat_vec_q4_0_f32_len, mul_mat_vec_q4_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("mul_mat_vec_q4_1_f32", mul_mat_vec_q4_1_f32_len, mul_mat_vec_q4_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("mul_mat_vec_q5_0_f32", mul_mat_vec_q5_0_f32_len, mul_mat_vec_q5_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("mul_mat_vec_q5_1_f32", mul_mat_vec_q5_1_f32_len, mul_mat_vec_q5_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("mul_mat_vec_q8_0_f32", mul_mat_vec_q8_0_f32_len, mul_mat_vec_q8_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("mul_mat_vec_q2_K_f32", mul_mat_vec_q2_K_f32_len, mul_mat_vec_q2_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("mul_mat_vec_q3_K_f32", mul_mat_vec_q3_K_f32_len, mul_mat_vec_q3_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("mul_mat_vec_q4_K_f32", mul_mat_vec_q4_K_f32_len, mul_mat_vec_q4_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); - vk_pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_F16 ], "mul_mat_vec_f16_f32", mul_mat_vec_f16_f32_len, mul_mat_vec_f16_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_0], "mul_mat_vec_q4_0_f32", mul_mat_vec_q4_0_f32_len, mul_mat_vec_q4_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_1], "mul_mat_vec_q4_1_f32", mul_mat_vec_q4_1_f32_len, mul_mat_vec_q4_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_0], "mul_mat_vec_q5_0_f32", mul_mat_vec_q5_0_f32_len, mul_mat_vec_q5_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_1], "mul_mat_vec_q5_1_f32", mul_mat_vec_q5_1_f32_len, mul_mat_vec_q5_1_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q8_0], "mul_mat_vec_q8_0_f32", mul_mat_vec_q8_0_f32_len, mul_mat_vec_q8_0_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q2_K], "mul_mat_vec_q2_K_f32", mul_mat_vec_q2_K_f32_len, mul_mat_vec_q2_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q3_K], "mul_mat_vec_q3_K_f32", mul_mat_vec_q3_K_f32_len, mul_mat_vec_q3_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q4_K], "mul_mat_vec_q4_K_f32", mul_mat_vec_q4_K_f32_len, mul_mat_vec_q4_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q5_K], "mul_mat_vec_q5_K_f32", mul_mat_vec_q5_K_f32_len, mul_mat_vec_q5_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant_mul_mat_vec_f32[GGML_TYPE_Q6_K], "mul_mat_vec_q6_K_f32", mul_mat_vec_q6_K_f32_len, mul_mat_vec_q6_K_f32_data, "main", 3, 3 * sizeof(int), {1, 1, 1}, {}, 1); // dequant shaders - vk_pipeline_dequant[GGML_TYPE_F32] = ggml_vk_create_pipeline("f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), {64, 1, 1}, {}, 1); - - vk_pipeline_dequant[GGML_TYPE_F16] = ggml_vk_create_pipeline("dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q2_K] = ggml_vk_create_pipeline("dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q3_K] = ggml_vk_create_pipeline("dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q4_K] = ggml_vk_create_pipeline("dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q5_K] = ggml_vk_create_pipeline("dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); - vk_pipeline_dequant[GGML_TYPE_Q6_K] = ggml_vk_create_pipeline("dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_F32 ], "f32_to_f16", f32_to_f16_len, f32_to_f16_data, "main", 2, 4 * sizeof(int), { 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_F16 ], "dequant_f16", dequant_f16_len, dequant_f16_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_0], "dequant_q4_0", dequant_q4_0_len, dequant_q4_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_1], "dequant_q4_1", dequant_q4_1_len, dequant_q4_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_0], "dequant_q5_0", dequant_q5_0_len, dequant_q5_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_1], "dequant_q5_1", dequant_q5_1_len, dequant_q5_1_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q8_0], "dequant_q8_0", dequant_q8_0_len, dequant_q8_0_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q2_K], "dequant_q2_K", dequant_q2_K_len, dequant_q2_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q3_K], "dequant_q3_K", dequant_q3_K_len, dequant_q3_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q4_K], "dequant_q4_K", dequant_q4_K_len, dequant_q4_K_data, "main", 2, 4 * sizeof(int), {256 * 32, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q5_K], "dequant_q5_K", dequant_q5_K_len, dequant_q5_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_dequant[GGML_TYPE_Q6_K], "dequant_q6_K", dequant_q6_K_len, dequant_q6_K_data, "main", 2, 4 * sizeof(int), {256 * 64, 1, 1}, {}, 1); // get_rows - vk_pipeline_get_rows[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_F16 ], "get_rows_f16", get_rows_f16_len, get_rows_f16_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q4_0], "get_rows_q4_0", get_rows_q4_0_len, get_rows_q4_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q4_1], "get_rows_q4_1", get_rows_q4_1_len, get_rows_q4_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q5_0], "get_rows_q5_0", get_rows_q5_0_len, get_rows_q5_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q5_1], "get_rows_q5_1", get_rows_q5_1_len, get_rows_q5_1_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows[GGML_TYPE_Q8_0], "get_rows_q8_0", get_rows_q8_0_len, get_rows_q8_0_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_F16] = ggml_vk_create_pipeline("get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_0] = ggml_vk_create_pipeline("get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q4_1] = ggml_vk_create_pipeline("get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_0] = ggml_vk_create_pipeline("get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q5_1] = ggml_vk_create_pipeline("get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_get_rows_f32[GGML_TYPE_Q8_0] = ggml_vk_create_pipeline("get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_F32 ], "get_rows_f16_f32", get_rows_f16_f32_len, get_rows_f16_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q4_0], "get_rows_q4_0_f32", get_rows_q4_0_f32_len, get_rows_q4_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q4_1], "get_rows_q4_1_f32", get_rows_q4_1_f32_len, get_rows_q4_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q5_0], "get_rows_q5_0_f32", get_rows_q5_0_f32_len, get_rows_q5_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q5_1], "get_rows_q5_1_f32", get_rows_q5_1_f32_len, get_rows_q5_1_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_get_rows_f32[GGML_TYPE_Q8_0], "get_rows_q8_0_f32", get_rows_q8_0_f32_len, get_rows_q8_0_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_matmul_split_k_reduce = ggml_vk_create_pipeline("split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_matmul_split_k_reduce, "split_k_reduce", split_k_reduce_len, split_k_reduce_data, "main", 2, 2 * sizeof(uint32_t), {256, 1, 1}, {}, 1); - vk_pipeline_mul_mat_vec_p021_f16_f32 = ggml_vk_create_pipeline("mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); - vk_pipeline_mul_mat_vec_nc_f16_f32 = ggml_vk_create_pipeline("mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, "mul_mat_vec_p021_f16_f32", mul_mat_vec_p021_f16_f32_len, mul_mat_vec_p021_f16_f32_data, "main", 3, 6 * sizeof(uint32_t), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, "mul_mat_vec_nc_f16_f32", mul_mat_vec_nc_f16_f32_len, mul_mat_vec_nc_f16_f32_data, "main", 3, 7 * sizeof(uint32_t), {1, 1, 1}, {}, 1); - vk_pipeline_norm_f32 = ggml_vk_create_pipeline("norm_f32", norm_f32_len, norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - vk_pipeline_rms_norm_f32 = ggml_vk_create_pipeline("rms_norm_f32", rms_norm_f32_len, rms_norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_norm_f32, "norm_f32", norm_f32_len, norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rms_norm_f32, "rms_norm_f32", rms_norm_f32_len, rms_norm_f32_data, "main", 2, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - vk_pipeline_cpy_f32_f32 = ggml_vk_create_pipeline("cpy_f32_f32", cpy_f32_f32_len, cpy_f32_f32_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_cpy_f32_f16 = ggml_vk_create_pipeline("cpy_f32_f16", cpy_f32_f16_len, cpy_f32_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_cpy_f16_f16 = ggml_vk_create_pipeline("cpy_f16_f16", cpy_f16_f16_len, cpy_f16_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f32_f32, "cpy_f32_f32", cpy_f32_f32_len, cpy_f32_f32_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f32_f16, "cpy_f32_f16", cpy_f32_f16_len, cpy_f32_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_cpy_f16_f16, "cpy_f16_f16", cpy_f16_f16_len, cpy_f16_f16_data, "main", 2, sizeof(vk_op_cpy_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_add_f32 = ggml_vk_create_pipeline("add_f32", add_f32_len, add_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_add_f32, "add_f32", add_f32_len, add_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_mul_f32 = ggml_vk_create_pipeline("mul_f32", mul_f32_len, mul_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_mul_f32, "mul_f32", mul_f32_len, mul_f32_data, "main", 3, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_scale_f32 = ggml_vk_create_pipeline("scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_scale_f32, "scale_f32", scale_f32_len, scale_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_sqr_f32 = ggml_vk_create_pipeline("sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_sqr_f32, "sqr_f32", sqr_f32_len, sqr_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_clamp_f32 = ggml_vk_create_pipeline("clamp_f32", clamp_f32_len, clamp_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_clamp_f32, "clamp_f32", clamp_f32_len, clamp_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_gelu_f32 = ggml_vk_create_pipeline("gelu_f32", gelu_f32_len, gelu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_silu_f32 = ggml_vk_create_pipeline("silu_f32", silu_f32_len, silu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_relu_f32 = ggml_vk_create_pipeline("relu_f32", relu_f32_len, relu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_gelu_f32, "gelu_f32", gelu_f32_len, gelu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_silu_f32, "silu_f32", silu_f32_len, silu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_relu_f32, "relu_f32", relu_f32_len, relu_f32_data, "main", 2, sizeof(vk_op_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_diag_mask_inf_f32 = ggml_vk_create_pipeline("diag_mask_inf_f32", diag_mask_inf_f32_len, diag_mask_inf_f32_data, "main", 2, sizeof(vk_op_diag_mask_push_constants), {512, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_diag_mask_inf_f32, "diag_mask_inf_f32", diag_mask_inf_f32_len, diag_mask_inf_f32_data, "main", 2, sizeof(vk_op_diag_mask_push_constants), {512, 1, 1}, {}, 1); - vk_pipeline_soft_max_f32 = ggml_vk_create_pipeline("soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 3, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_soft_max_f32, "soft_max_f32", soft_max_f32_len, soft_max_f32_data, "main", 3, sizeof(vk_op_push_constants), {1, 1, 1}, {}, 1); - vk_pipeline_rope_f32 = ggml_vk_create_pipeline("rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - vk_pipeline_rope_f16 = ggml_vk_create_pipeline("rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_f32, "rope_f32", rope_f32_len, rope_f32_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_f16, "rope_f16", rope_f16_len, rope_f16_data, "main", 3, sizeof(vk_op_rope_push_constants), {1, 512, 1}, {}, 1); - vk_pipeline_rope_neox_f32 = ggml_vk_create_pipeline("rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); - vk_pipeline_rope_neox_f16 = ggml_vk_create_pipeline("rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_neox_f32, "rope_neox_f32", rope_neox_f32_len, rope_neox_f32_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); + ggml_vk_create_pipeline(ctx, ctx->pipeline_rope_neox_f16, "rope_neox_f16", rope_neox_f16_len, rope_neox_f16_data, "main", 3, sizeof(vk_op_rope_neox_push_constants), {1, 512, 1}, {}, 1); } -void ggml_vk_init() { +static void ggml_vk_print_gpu_info(size_t idx) { + GGML_ASSERT(idx < vk_instance.device_indices.size()); + size_t dev_num = vk_instance.device_indices[idx]; #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_init()" << std::endl; + std::cerr << "ggml_vk_print_gpu_info(" << dev_num << ")" << std::endl; #endif - static bool initialized = false; + GGML_ASSERT(vk_instance.initialized); - if (initialized) { - return; + std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); + + if (dev_num >= devices.size()) { + std::cerr << "ggml_vulkan: Device with index " << dev_num << " does not exist." << std::endl; + throw std::runtime_error("Device not found"); } - initialized = true; + vk::PhysicalDevice physical_device = devices[dev_num]; + std::vector ext_props = physical_device.enumerateDeviceExtensionProperties(); - const char* GGML_VULKAN_DEVICE = getenv("GGML_VULKAN_DEVICE"); - int dev_num = (GGML_VULKAN_DEVICE == NULL ? 0 : atoi(GGML_VULKAN_DEVICE)); + vk::PhysicalDeviceProperties2 props2; + vk::PhysicalDeviceMaintenance3Properties props3; + vk::PhysicalDeviceSubgroupProperties subgroup_props; + props2.pNext = &props3; + props3.pNext = &subgroup_props; + physical_device.getProperties2(&props2); + + const size_t subgroup_size = subgroup_props.subgroupSize; + const bool uma = props2.properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; + + bool fp16_storage = false; + bool fp16_compute = false; + + for (auto properties : ext_props) { + if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { + fp16_storage = true; + } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { + fp16_compute = true; + } + } + + const char* GGML_VULKAN_DISABLE_F16 = getenv("GGML_VULKAN_DISABLE_F16"); + bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != nullptr; + + bool fp16 = !force_disable_f16 && fp16_storage && fp16_compute; + + vk::PhysicalDeviceFeatures device_features = physical_device.getFeatures(); + + VkPhysicalDeviceFeatures2 device_features2; + device_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; + device_features2.pNext = nullptr; + device_features2.features = (VkPhysicalDeviceFeatures)device_features; + + VkPhysicalDeviceVulkan11Features vk11_features; + vk11_features.pNext = nullptr; + vk11_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; + device_features2.pNext = &vk11_features; + + VkPhysicalDeviceVulkan12Features vk12_features; + vk12_features.pNext = nullptr; + vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; + vk11_features.pNext = &vk12_features; + + vkGetPhysicalDeviceFeatures2(physical_device, &device_features2); + + fp16 = fp16 && vk12_features.shaderFloat16; + + std::string device_name = props2.properties.deviceName.data(); + std::cerr << GGML_VK_NAME << idx << ": " << device_name << " | uma: " << uma << " | fp16: " << fp16 << " | warp size: " << subgroup_size << std::endl; + + if (props2.properties.deviceType == vk::PhysicalDeviceType::eCpu) { + std::cerr << "ggml_vulkan: Warning: Device type is CPU. This is probably not the device you want." << std::endl; + } +} + +void ggml_vk_instance_init() { + if (vk_instance_initialized) { + return; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_instance_init()" << std::endl; +#endif vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; const std::vector layers = { @@ -989,12 +1106,55 @@ void ggml_vk_init() { validation_features.setPNext(nullptr); instance_create_info.setPNext(&validation_features); -std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; + std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; #endif - vk_instance = vk::createInstance(instance_create_info); + vk_instance.instance = vk::createInstance(instance_create_info); - vk_device.physical_device = vk_instance.enumeratePhysicalDevices()[dev_num]; - std::vector ext_props = vk_device.physical_device.enumerateDeviceExtensionProperties(); + memset(vk_instance.initialized, 0, sizeof(bool) * GGML_VK_MAX_DEVICES); + + size_t num_available_devices = vk_instance.instance.enumeratePhysicalDevices().size(); + + // Emulate behavior of CUDA_VISIBLE_DEVICES for Vulkan + char * devices_env = getenv("GGML_VK_VISIBLE_DEVICES"); + if (devices_env != nullptr) { + std::string devices(devices_env); + std::replace(devices.begin(), devices.end(), ',', ' '); + + std::stringstream ss(devices); + size_t tmp; + while (ss >> tmp) { + if(tmp >= num_available_devices) { + std::cerr << "ggml_vulkan: Invalid device index " << tmp << " in GGML_VK_VISIBLE_DEVICES." << std::endl; + throw std::runtime_error("Invalid Vulkan device index"); + } + vk_instance.device_indices.push_back(tmp); + } + } else { + vk_instance.device_indices.push_back(0); + } + + vk_instance_initialized = true; +} + +void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { + GGML_ASSERT(idx < vk_instance.device_indices.size()); + size_t dev_num = vk_instance.device_indices[idx]; +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_init(" << ctx->name << ", " << dev_num << ")" << std::endl; +#endif + ggml_vk_instance_init(); + + std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); + + if (dev_num >= devices.size()) { + std::cerr << "ggml_vulkan: Device with index " << dev_num << " does not exist." << std::endl; + throw std::runtime_error("Device not found"); + } + + vk_instance.devices[idx] = std::make_shared(); + ctx->device = vk_instance.devices[idx]; + ctx->device.lock()->physical_device = devices[dev_num]; + std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); bool maintenance4_support = false; @@ -1014,18 +1174,18 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; if (maintenance4_support) { subgroup_props.pNext = &props4; } - vk_device.physical_device.getProperties2(&props2); - vk_device.properties = props2.properties; + ctx->device.lock()->physical_device.getProperties2(&props2); + ctx->device.lock()->properties = props2.properties; if (maintenance4_support) { - vk_device.max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); + ctx->device.lock()->max_memory_allocation_size = std::min(props3.maxMemoryAllocationSize, props4.maxBufferSize); } else { - vk_device.max_memory_allocation_size = props3.maxMemoryAllocationSize; + ctx->device.lock()->max_memory_allocation_size = props3.maxMemoryAllocationSize; } - vk_device.vendor_id = vk_device.properties.vendorID; - vk_device.subgroup_size = subgroup_props.subgroupSize; - vk_device.uma = vk_device.properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; + ctx->device.lock()->vendor_id = ctx->device.lock()->properties.vendorID; + ctx->device.lock()->subgroup_size = subgroup_props.subgroupSize; + ctx->device.lock()->uma = ctx->device.lock()->properties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu; bool fp16_storage = false; bool fp16_compute = false; @@ -1039,31 +1199,31 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; } const char* GGML_VULKAN_DISABLE_F16 = getenv("GGML_VULKAN_DISABLE_F16"); - bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != NULL; + bool force_disable_f16 = GGML_VULKAN_DISABLE_F16 != nullptr; - vk_device.fp16 = !force_disable_f16 && fp16_storage && fp16_compute; + ctx->device.lock()->fp16 = !force_disable_f16 && fp16_storage && fp16_compute; - std::vector queue_family_props = vk_device.physical_device.getQueueFamilyProperties(); + std::vector queue_family_props = ctx->device.lock()->physical_device.getQueueFamilyProperties(); // Try to find a non-graphics compute queue and transfer-focused queues const uint32_t compute_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eCompute, vk::QueueFlagBits::eGraphics, -1, 1); const uint32_t transfer_queue_family_index = ggml_vk_find_queue_family_index(queue_family_props, vk::QueueFlagBits::eTransfer, vk::QueueFlagBits::eCompute | vk::QueueFlagBits::eGraphics, compute_queue_family_index, 1); const float priorities[] = { 1.0f, 1.0f }; - const bool single_queue = compute_queue_family_index == transfer_queue_family_index && queue_family_props[compute_queue_family_index].queueCount == 1; + ctx->device.lock()->single_queue = compute_queue_family_index == transfer_queue_family_index && queue_family_props[compute_queue_family_index].queueCount == 1; std::vector device_queue_create_infos; if (compute_queue_family_index != transfer_queue_family_index) { device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), transfer_queue_family_index, 1, priorities + 1}); - } else if(!single_queue) { + } else if(!ctx->device.lock()->single_queue) { device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 2, priorities}); } else { device_queue_create_infos.push_back({vk::DeviceQueueCreateFlags(), compute_queue_family_index, 1, priorities}); } vk::DeviceCreateInfo device_create_info; std::vector device_extensions; - vk::PhysicalDeviceFeatures device_features = vk_device.physical_device.getFeatures(); + vk::PhysicalDeviceFeatures device_features = ctx->device.lock()->physical_device.getFeatures(); VkPhysicalDeviceFeatures2 device_features2; device_features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2; @@ -1080,13 +1240,13 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; vk12_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; vk11_features.pNext = &vk12_features; - vkGetPhysicalDeviceFeatures2(vk_device.physical_device, &device_features2); + vkGetPhysicalDeviceFeatures2(ctx->device.lock()->physical_device, &device_features2); - vk_device.fp16 = vk_device.fp16 && vk12_features.shaderFloat16; + ctx->device.lock()->fp16 = ctx->device.lock()->fp16 && vk12_features.shaderFloat16; if (!vk11_features.storageBuffer16BitAccess) { - std::cerr << "ggml_vulkan: device does not support 16-bit storage" << std::endl; - GGML_ASSERT(false); + std::cerr << "ggml_vulkan: device " << GGML_VK_NAME << idx << " does not support 16-bit storage." << std::endl; + throw std::runtime_error("Unsupported device"); } device_extensions.push_back("VK_KHR_16bit_storage"); @@ -1095,10 +1255,11 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; device_extensions.push_back("VK_KHR_shader_non_semantic_info"); #endif - if (vk_device.fp16) { + if (ctx->device.lock()->fp16) { device_extensions.push_back("VK_KHR_shader_float16_int8"); } - std::cerr << "ggml_vulkan: Using " << vk_device.properties.deviceName << " | uma: " << vk_device.uma << " | fp16: " << vk_device.fp16 << " | warp size: " << vk_device.subgroup_size << std::endl; + ctx->device.lock()->name = ctx->device.lock()->properties.deviceName.data(); + device_create_info = { vk::DeviceCreateFlags(), device_queue_create_infos, @@ -1106,28 +1267,32 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; device_extensions }; device_create_info.setPNext(&device_features2); - vk_device.device = vk_device.physical_device.createDevice(device_create_info); + ctx->device.lock()->device = ctx->device.lock()->physical_device.createDevice(device_create_info); - vk_device.descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN; + ctx->device.lock()->descriptor_set_mode = VK_DEVICE_DESCRIPTOR_POOL_MODE_UNKNOWN; // Shaders - ggml_vk_load_shaders(); + ggml_vk_load_shaders(ctx); // Queues - vk_device.compute_queue = ggml_vk_create_queue(compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }); - if (!single_queue) { + ggml_vk_create_queue(ctx, ctx->device.lock()->compute_queue, compute_queue_family_index, 0, { vk::PipelineStageFlagBits::eComputeShader | vk::PipelineStageFlagBits::eTransfer }); + if (!ctx->device.lock()->single_queue) { const uint32_t transfer_queue_index = compute_queue_family_index == transfer_queue_family_index ? 1 : 0; - vk_device.transfer_queue = ggml_vk_create_queue(transfer_queue_family_index, transfer_queue_index, { vk::PipelineStageFlagBits::eTransfer }); + ggml_vk_create_queue(ctx, ctx->device.lock()->transfer_queue, transfer_queue_family_index, transfer_queue_index, { vk::PipelineStageFlagBits::eTransfer }); } else { - vk_device.transfer_queue = vk_device.compute_queue; + // TODO: Use pointer or reference to avoid copy + ctx->device.lock()->transfer_queue = ctx->device.lock()->compute_queue; } - vk_fence = vk_device.device.createFence({}); + ctx->fence = ctx->device.lock()->device.createFence({}); - vk_ctx = nullptr; - vk_transfer_ctx = nullptr; + ctx->compute_ctx = nullptr; + ctx->transfer_ctx = nullptr; - vk_disable = false; + ctx->disable = false; + ctx->initialized = true; + + ctx->idx = idx; #ifdef GGML_VULKAN_CHECK_RESULTS const char* skip_checks = getenv("GGML_VULKAN_SKIP_CHECKS"); @@ -1137,7 +1302,7 @@ std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; #endif } -static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { +static vk_pipeline* ggml_vk_get_to_fp16(ggml_backend_vk_context * ctx, ggml_type type) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_to_fp16()" << std::endl; #endif @@ -1158,10 +1323,10 @@ static vk_pipeline* ggml_vk_get_to_fp16(ggml_type type) { return nullptr; } - return &vk_pipeline_dequant[type]; + return &ctx->pipeline_dequant[type]; } -static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { +static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_backend_vk_context * ctx, ggml_type type) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_get_dequantize_mul_mat_vec()" << std::endl; #endif @@ -1182,15 +1347,10 @@ static vk_pipeline* ggml_vk_get_dequantize_mul_mat_vec(ggml_type type) { return nullptr; } - return &vk_pipeline_dequant_mul_mat_vec_f32[type]; + return &ctx->pipeline_dequant_mul_mat_vec_f32[type]; } -// buffer pool for vulkan -#define MAX_VK_BUFFERS 256 - -static vk_buffer g_vk_buffer_pool[MAX_VK_BUFFERS]; - -static vk_buffer ggml_vk_pool_malloc(size_t size) { +static vk_buffer ggml_vk_pool_malloc(ggml_backend_vk_context * ctx, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_pool_malloc(" << size << ")" << std::endl; #endif @@ -1199,98 +1359,95 @@ static vk_buffer ggml_vk_pool_malloc(size_t size) { int worst_i = -1; size_t worst_size = 0; //largest unused buffer seen so far for (int i = 0; i < MAX_VK_BUFFERS; ++i) { - vk_buffer &b = g_vk_buffer_pool[i]; - if (b.size > 0 && b.size >= size && b.size < best_size) { + vk_buffer &b = ctx->buffer_pool[i]; + if (b != nullptr && b->size >= size && b->size < best_size) { best_i = i; - best_size = b.size; + best_size = b->size; } - if (b.size > 0 && b.size > worst_size) { + if (b != nullptr && b->size > worst_size) { worst_i = i; - worst_size = b.size; + worst_size = b->size; } } if(best_i != -1) { //found the smallest buffer that fits our needs - vk_buffer b = g_vk_buffer_pool[best_i]; - g_vk_buffer_pool[best_i].size = 0; + vk_buffer b = ctx->buffer_pool[best_i]; + ctx->buffer_pool[best_i].reset(); return b; } if(worst_i != -1) { //no buffer that fits our needs, resize largest one to save memory - vk_buffer& b = g_vk_buffer_pool[worst_i]; + vk_buffer& b = ctx->buffer_pool[worst_i]; ggml_vk_destroy_buffer(b); } - return ggml_vk_create_buffer_check(size, vk::MemoryPropertyFlagBits::eDeviceLocal); + return ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); } -static void ggml_vk_pool_free(vk_buffer& buffer) { +static void ggml_vk_pool_free(ggml_backend_vk_context * ctx, vk_buffer& buffer) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_pool_free(" << buffer.size << ")" << std::endl; + std::cerr << "ggml_vk_pool_free(" << buffer->size << ")" << std::endl; #endif for (int i = 0; i < MAX_VK_BUFFERS; ++i) { - vk_buffer& b = g_vk_buffer_pool[i]; - if (b.size == 0) { + vk_buffer& b = ctx->buffer_pool[i]; + if (b == nullptr) { b = buffer; - // Set owning queue family index to ignored to avoid synchronization on next use - b.qf_owner = VK_QUEUE_FAMILY_IGNORED; return; } } - fprintf(stderr, "WARNING: vk buffer pool full, increase MAX_VK_BUFFERS\n"); + std::cerr << "ggml_vulkan: WARNING: vk buffer pool full, increase MAX_VK_BUFFERS" << std::endl; ggml_vk_destroy_buffer(buffer); } // Returns an available temporary buffer that may only be used temporarily, it will be reused -static vk_buffer ggml_vk_create_buffer_temp(size_t size) { +static vk_buffer ggml_vk_create_buffer_temp(ggml_backend_vk_context * ctx, size_t size) { // Try to find existing temp buffer with enough capacity - for (auto& buffer : vk_gc.temp_buffers) { - if (buffer.size >= size) { + for (auto& buffer : ctx->gc.temp_buffers) { + if (buffer->size >= size) { return buffer; } } // Otherwise create new buffer - vk_buffer buf = ggml_vk_pool_malloc(size); - vk_gc.temp_buffers.push_back(buf); + vk_buffer buf = ggml_vk_pool_malloc(ctx, size); + ctx->gc.temp_buffers.push_back(buf); return buf; } -static void * ggml_vk_host_malloc(size_t size) { +static void * ggml_vk_host_malloc(ggml_backend_vk_context * ctx, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_malloc(" << size << ")" << std::endl; #endif - vk_buffer buf = ggml_vk_create_buffer(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + vk_buffer buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); - if(!(buf.memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { + if(!(buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory\n", size/1024.0/1024.0); - buf.size = 0; - vk_device.device.freeMemory(buf.device_memory); - vk_device.device.destroyBuffer(buf.buffer); + ctx->device.lock()->device.freeMemory(buf->device_memory); + ctx->device.lock()->device.destroyBuffer(buf->buffer); return nullptr; } - vk_pinned_memory.push_back(std::make_tuple(buf.ptr, size, buf)); + ctx->pinned_memory.push_back(std::make_tuple(buf->ptr, size, buf)); - return buf.ptr; + return buf->ptr; } -static void ggml_vk_host_free(void* ptr) { +static void ggml_vk_host_free(ggml_backend_vk_context * ctx, void* ptr) { if (ptr == nullptr) { return; } #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_free(" << ptr << ")" << std::endl; #endif - vk_buffer* buf = nullptr; + vk_buffer buf; size_t index; - for (size_t i = 0; i < vk_pinned_memory.size(); i++) { - const uint8_t* addr = (const uint8_t*) std::get<0>(vk_pinned_memory[i]); - const uint8_t* endr = addr + std::get<1>(vk_pinned_memory[i]); + for (size_t i = 0; i < ctx->pinned_memory.size(); i++) { + const uint8_t* addr = (const uint8_t*) std::get<0>(ctx->pinned_memory[i]); + const uint8_t* endr = addr + std::get<1>(ctx->pinned_memory[i]); if (ptr >= addr && ptr < endr) { - buf = &std::get<2>(vk_pinned_memory[i]); + buf = std::get<2>(ctx->pinned_memory[i]); index = i; break; } @@ -1300,28 +1457,28 @@ static void ggml_vk_host_free(void* ptr) { return; } - ggml_vk_destroy_buffer(*buf); + ggml_vk_destroy_buffer(buf); - vk_pinned_memory.erase(vk_pinned_memory.begin() + index); + ctx->pinned_memory.erase(ctx->pinned_memory.begin() + index); } -static void ggml_vk_host_get(const void * ptr, vk_buffer *& buf, size_t& buf_offset) { +static void ggml_vk_host_get(ggml_backend_vk_context * ctx, const void * ptr, vk_buffer& buf, size_t& buf_offset) { buf = nullptr; buf_offset = 0; - for (size_t i = 0; i < vk_pinned_memory.size(); i++) { - const uint8_t* addr = (const uint8_t*) std::get<0>(vk_pinned_memory[i]); - const uint8_t* endr = addr + std::get<1>(vk_pinned_memory[i]); + for (size_t i = 0; i < ctx->pinned_memory.size(); i++) { + const uint8_t* addr = (const uint8_t*) std::get<0>(ctx->pinned_memory[i]); + const uint8_t* endr = addr + std::get<1>(ctx->pinned_memory[i]); if (ptr >= addr && ptr < endr) { - buf = &std::get<2>(vk_pinned_memory[i]); + buf = std::get<2>(ctx->pinned_memory[i]); buf_offset = ((const uint8_t *)ptr) - addr; break; } } } -static vk_submission ggml_vk_begin_submission(vk_queue& q, bool one_time = true) { +static vk_submission ggml_vk_begin_submission(ggml_backend_vk_context * ctx, vk_queue& q, bool one_time = true) { vk_submission s; - s.buffer = ggml_vk_create_cmd_buffer(q); + s.buffer = ggml_vk_create_cmd_buffer(ctx, q); if (one_time) { s.buffer.begin({ vk::CommandBufferUsageFlagBits::eOneTimeSubmit }); } else { @@ -1331,7 +1488,7 @@ static vk_submission ggml_vk_begin_submission(vk_queue& q, bool one_time = true) return s; } -static void ggml_vk_dispatch_pipeline(vk_context * ctx, vk_pipeline& pipeline, std::vector&& buffers, size_t push_constant_size, const void* push_constants, std::array elements) { +static void ggml_vk_dispatch_pipeline(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline& pipeline, std::vector&& buffers, size_t push_constant_size, const void* push_constants, std::array elements) { const uint32_t wg0 = CEIL_DIV(elements[0], pipeline.wg_denoms[0]); const uint32_t wg1 = CEIL_DIV(elements[1], pipeline.wg_denoms[1]); const uint32_t wg2 = CEIL_DIV(elements[2], pipeline.wg_denoms[2]); @@ -1344,22 +1501,22 @@ static void ggml_vk_dispatch_pipeline(vk_context * ctx, vk_pipeline& pipeline, s GGML_ASSERT(buffers.size() == pipeline.parameter_count); vk::DescriptorSet& descriptor_set = pipeline.descriptor_sets[pipeline.descriptor_set_idx++]; for (uint32_t i = 0; i < pipeline.parameter_count; i++) { - descriptor_buffer_infos.push_back({buffers[i].buffer.buffer, buffers[i].offset, buffers[i].size}); + descriptor_buffer_infos.push_back({buffers[i].buffer->buffer, buffers[i].offset, buffers[i].size}); } for (uint32_t i = 0; i < pipeline.parameter_count; i++) { write_descriptor_sets.push_back({descriptor_set, i, 0, 1, vk::DescriptorType::eStorageBuffer, nullptr, &descriptor_buffer_infos[i]}); } - vk_device.device.updateDescriptorSets(write_descriptor_sets, {}); + ctx->device.lock()->device.updateDescriptorSets(write_descriptor_sets, {}); - ctx->s->buffer.pushConstants(pipeline.layout, vk::ShaderStageFlagBits::eCompute, 0, push_constant_size, push_constants); - ctx->s->buffer.bindPipeline(vk::PipelineBindPoint::eCompute, pipeline.pipeline); - ctx->s->buffer.bindDescriptorSets(vk::PipelineBindPoint::eCompute, + subctx->s->buffer.pushConstants(pipeline.layout, vk::ShaderStageFlagBits::eCompute, 0, push_constant_size, push_constants); + subctx->s->buffer.bindPipeline(vk::PipelineBindPoint::eCompute, pipeline.pipeline); + subctx->s->buffer.bindDescriptorSets(vk::PipelineBindPoint::eCompute, pipeline.layout, 0, { descriptor_set }, {}); - ctx->s->buffer.dispatch(wg0, wg1, wg2); + subctx->s->buffer.dispatch(wg0, wg1, wg2); } static void ggml_vk_end_submission(vk_submission& s, std::vector wait_semaphores, std::vector signal_semaphores) { @@ -1381,16 +1538,16 @@ static void ggml_vk_ctx_end(vk_context * ctx) { ctx->s = nullptr; } -static void ggml_vk_ctx_begin(vk_context * ctx) { +static void ggml_vk_ctx_begin(ggml_backend_vk_context * ctx, vk_context * subctx) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_ctx_begin(" << ctx << ")" << std::endl; #endif - if (ctx->s != nullptr) { - ggml_vk_ctx_end(ctx); + if (subctx->s != nullptr) { + ggml_vk_ctx_end(subctx); } - ctx->seqs.push_back({ ggml_vk_begin_submission(*ctx->q) }); - ctx->s = ctx->seqs[ctx->seqs.size() - 1].data(); + subctx->seqs.push_back({ ggml_vk_begin_submission(ctx, *subctx->q) }); + subctx->s = subctx->seqs[subctx->seqs.size() - 1].data(); } static size_t ggml_vk_align_size(size_t width, size_t align) { @@ -1405,14 +1562,14 @@ static void deferred_memcpy(void * dst, const void * src, size_t size, std::vect } } -static void ensure_sync_staging_buffer(size_t size) { - if (vk_sync_staging.size < size) { - ggml_vk_destroy_buffer(vk_sync_staging); - vk_sync_staging = ggml_vk_create_buffer_check(size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); +static void ggml_vk_ensure_sync_staging_buffer(ggml_backend_vk_context * ctx, size_t size) { + if (ctx->sync_staging == nullptr || ctx->sync_staging->size < size) { + ggml_vk_destroy_buffer(ctx->sync_staging); + ctx->sync_staging = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); } } -static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size_t offset, const ggml_tensor * tensor, bool sync_staging = false) { +static void ggml_vk_buffer_write_nc_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const ggml_tensor * tensor, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_nc_async(" << tensor << ")" << std::endl; #endif @@ -1423,9 +1580,9 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size GGML_ASSERT(false); } // Check if src is pinned memory - vk_buffer * buf = nullptr; + vk_buffer buf; size_t buf_offset; - ggml_vk_host_get(tensor->data, buf, buf_offset); + ggml_vk_host_get(ctx, tensor->data, buf, buf_offset); const uint64_t ne0 = tensor->ne[0]; const uint64_t ne1 = tensor->ne[1]; @@ -1471,21 +1628,21 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); return; } // Staging buffer required - vk_buffer * staging = &vk_staging; - size_t staging_offset = vk_staging_offset; + vk_buffer staging = ctx->staging; + size_t staging_offset = ctx->staging_offset; const size_t copy_size = ts*ne/bs; - if (vk_staging.size < vk_staging_offset + copy_size) { + if (ctx->staging->size < ctx->staging_offset + copy_size) { if (sync_staging) { // Create temporary larger buffer - ensure_sync_staging_buffer(copy_size); + ggml_vk_ensure_sync_staging_buffer(ctx, copy_size); - staging = &vk_sync_staging; + staging = ctx->sync_staging; staging_offset = 0; } else { GGML_ASSERT(false); @@ -1494,23 +1651,23 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size VkBufferCopy buf_copy{ staging_offset, offset, copy_size }; - ggml_vk_sync_buffers(ctx); - vkCmdCopyBuffer(ctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); + ggml_vk_sync_buffers(subctx); + vkCmdCopyBuffer(subctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); for (uint64_t i3 = 0; i3 < ne3; i3++) { for (uint64_t i2 = 0; i2 < ne2; i2++) { // Find longest contiguous slice if (ne1*nb1 == dstnb2) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2, dstnb2, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2, dstnb2, &subctx->in_memcpys); } else { for (uint64_t i1 = 0; i1 < ne1; i1++) { if (ne0*nb0/bs == dstnb1) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2 + i1*dstnb1, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2 + i1*nb1, dstnb1, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i3*dstnb3 + i2*dstnb2 + i1*dstnb1, (const uint8_t *) tensor->data + buf_offset + i3*nb3 + i2*nb2 + i1*nb1, dstnb1, &subctx->in_memcpys); } else { const uint64_t s_off = buf_offset + i3*nb3 + i2*nb2 + i1*nb1; const uint64_t d_off = staging_offset + i3*dstnb3 + i2*dstnb2 + i1*dstnb1; for (uint64_t i0 = 0; i0 < ne0; i0++) { - deferred_memcpy((uint8_t *)staging->ptr + d_off + i0*dstnb0, (const uint8_t *) tensor->data + s_off + i0*nb0, dstnb0, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + d_off + i0*dstnb0, (const uint8_t *) tensor->data + s_off + i0*nb0, dstnb0, &subctx->in_memcpys); } } } @@ -1519,19 +1676,22 @@ static void ggml_vk_buffer_write_nc_async(vk_context * ctx, vk_buffer* dst, size } } -static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height, bool sync_staging = false) { +static void ggml_vk_buffer_write_2d_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d_async(" << width << ", " << height << ")" << std::endl; #endif + // Make sure ctx owns the buffer + GGML_ASSERT(dst->ctx == ctx); + // Buffer is already mapped if(dst->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { std::cerr << "ggml_vulkan: buffer_write_async dst buffer is host_visible. Use synchronous write." << std::endl; GGML_ASSERT(false); } // Check if src is pinned memory - vk_buffer * buf = nullptr; + vk_buffer buf = nullptr; size_t buf_offset; - ggml_vk_host_get(src, buf, buf_offset); + ggml_vk_host_get(ctx, src, buf, buf_offset); if (buf != nullptr) { // Memory is pinned, use as staging buffer @@ -1550,8 +1710,8 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(buf->buffer, dst->buffer, slices); return; } #ifdef GGML_VULKAN_DEBUG @@ -1559,14 +1719,14 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size #endif // Staging buffer required - vk_buffer * staging = &vk_staging; - size_t staging_offset = vk_staging_offset; + vk_buffer staging = ctx->staging; + size_t staging_offset = ctx->staging_offset; const size_t copy_size = width*height; - if (vk_staging.size < vk_staging_offset + copy_size) { + if (ctx->staging == nullptr || ctx->staging->size < ctx->staging_offset + copy_size) { if (sync_staging) { - ensure_sync_staging_buffer(copy_size); + ggml_vk_ensure_sync_staging_buffer(ctx, copy_size); - staging = &vk_sync_staging; + staging = ctx->sync_staging; staging_offset = 0; } else { GGML_ASSERT(false); @@ -1578,26 +1738,26 @@ static void ggml_vk_buffer_write_2d_async(vk_context * ctx, vk_buffer* dst, size offset, copy_size}; - ggml_vk_sync_buffers(ctx); - vkCmdCopyBuffer(ctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); + ggml_vk_sync_buffers(subctx); + vkCmdCopyBuffer(subctx->s->buffer, staging->buffer, dst->buffer, 1, &buf_copy); if (width == spitch) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset, src, width * height, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset, src, width * height, &subctx->in_memcpys); } else { for (size_t i = 0; i < height; i++) { - deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i * width, (const uint8_t *) src + i * spitch, width, &ctx->in_memcpys); + deferred_memcpy((uint8_t *)staging->ptr + staging_offset + i * width, (const uint8_t *) src + i * spitch, width, &subctx->in_memcpys); } } } -static void ggml_vk_buffer_write_async(vk_context * ctx, vk_buffer* dst, size_t offset, const void * src, size_t size, bool sync_staging = false) { +static void ggml_vk_buffer_write_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const void * src, size_t size, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_async(" << size << ")" << std::endl; #endif - return ggml_vk_buffer_write_2d_async(ctx, dst, offset, src, size, size, 1, sync_staging); + return ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, src, size, size, 1, sync_staging); } -static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height) { +static void ggml_vk_buffer_write_2d(ggml_backend_vk_context * ctx, vk_buffer& dst, size_t offset, const void * src, size_t spitch, size_t width, size_t height) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write_2d(" << width << ", " << height << ")" << std::endl; #endif @@ -1609,39 +1769,42 @@ static void ggml_vk_buffer_write_2d(vk_buffer* dst, size_t offset, const void * memcpy((uint8_t *)dst->ptr + offset + i * width, (const uint8_t *) src + i * spitch, width); } } else { - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - ggml_vk_buffer_write_2d_async(ctx, dst, offset, src, spitch, width, height, true); - ggml_vk_ctx_end(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, src, spitch, width, height, true); + ggml_vk_ctx_end(subctx); - for (auto& cpy : ctx->in_memcpys) { + for (auto& cpy : subctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_buffer_write_2d waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_write_2d waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); } } -static void ggml_vk_buffer_write(vk_buffer* dst, size_t offset, const void * src, size_t size) { +static void ggml_vk_buffer_write(ggml_backend_vk_context * ctx, vk_buffer& dst, size_t offset, const void * src, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_write(" << size << ")" << std::endl; #endif - ggml_vk_buffer_write_2d(dst, offset, src, 0, size, 1); + ggml_vk_buffer_write_2d(ctx, dst, offset, src, 0, size, 1); } -static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_t offset, void * dst, size_t spitch, size_t dpitch, size_t width, size_t height, bool sync_staging = false) { +static void ggml_vk_buffer_read_2d_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& src, size_t offset, void * dst, size_t spitch, size_t dpitch, size_t width, size_t height, bool sync_staging = false) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read_2d_async(offset=" << offset << ", width=" << width << ", height=" << height << ")" << std::endl; #endif GGML_ASSERT(width > 0); GGML_ASSERT(height > 0); - GGML_ASSERT(src->size > 0); + GGML_ASSERT(src != nullptr); + // Make sure ctx owns the buffer + GGML_ASSERT(src->ctx == ctx); + // Check if dst is pinned memory - vk_buffer * buf = nullptr; + vk_buffer buf = nullptr; size_t buf_offset; - ggml_vk_host_get(dst, buf, buf_offset); + ggml_vk_host_get(ctx, dst, buf, buf_offset); std::vector slices(1); if (width == spitch && width == dpitch) { @@ -1660,8 +1823,8 @@ static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_ if (buf != nullptr) { // Memory is pinned, use as staging buffer - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(src->buffer, buf->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(src->buffer, buf->buffer, slices); return; } @@ -1670,30 +1833,30 @@ static void ggml_vk_buffer_read_2d_async(vk_context * ctx, vk_buffer* src, size_ #endif // Fall back to staging buffer - vk_buffer * staging = &vk_staging; + vk_buffer staging = ctx->staging; const size_t copy_size = dpitch * height; - if (vk_staging.size < vk_staging_offset + copy_size) { + if (ctx->staging == nullptr || ctx->staging->size < ctx->staging_offset + copy_size) { if (sync_staging) { // Create temporary larger buffer - ensure_sync_staging_buffer(copy_size); + ggml_vk_ensure_sync_staging_buffer(ctx, copy_size); - staging = &vk_sync_staging; + staging = ctx->sync_staging; } else { GGML_ASSERT(false); } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(src->buffer, staging->buffer, slices); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(src->buffer, staging->buffer, slices); - deferred_memcpy(dst, staging->ptr, copy_size, &ctx->out_memcpys); + deferred_memcpy(dst, staging->ptr, copy_size, &subctx->out_memcpys); } -static void ggml_vk_buffer_read_async(vk_context * ctx, vk_buffer* src, size_t offset, void * dst, size_t size, bool sync_staging = false) { - return ggml_vk_buffer_read_2d_async(ctx, src, offset, dst, size, size, size, 1, sync_staging); +static void ggml_vk_buffer_read_async(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& src, size_t offset, void * dst, size_t size, bool sync_staging = false) { + return ggml_vk_buffer_read_2d_async(ctx, subctx, src, offset, dst, size, size, size, 1, sync_staging); } -static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_t size) { +static void ggml_vk_buffer_read(ggml_backend_vk_context * ctx, vk_buffer& src, size_t offset, void * dst, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_read(" << offset << ", " << size << ")" << std::endl; #endif @@ -1702,61 +1865,88 @@ static void ggml_vk_buffer_read(vk_buffer* src, size_t offset, void * dst, size_ memcpy(dst, (uint8_t *) src->ptr + offset, size); } else { - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - ggml_vk_buffer_read_async(ctx, src, offset, dst, size, true); - ggml_vk_ctx_end(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_buffer_read_async(ctx, subctx, src, offset, dst, size, true); + ggml_vk_ctx_end(subctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_buffer_read waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_read waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - for (auto& cpy : ctx->out_memcpys) { + for (auto& cpy : subctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } } } -static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { +static void ggml_vk_buffer_copy_async(vk_context * ctx, vk_buffer& dst, size_t dst_offset, vk_buffer& src, size_t src_offset, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_copy_async(" << size << ")" << std::endl; #endif + // Make sure both buffers are on same ctx + GGML_ASSERT(src->ctx == dst->ctx); + VkBufferCopy bc{ src_offset, dst_offset, size }; vkCmdCopyBuffer(ctx->s->buffer, src->buffer, dst->buffer, 1, &bc); } -static void ggml_vk_buffer_copy(vk_buffer * dst, size_t dst_offset, vk_buffer * src, size_t src_offset, size_t size) { +static void ggml_vk_buffer_copy(vk_buffer& dst, size_t dst_offset, vk_buffer& src, size_t src_offset, size_t size) { + if (src->ctx == dst->ctx) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_buffer_copy(" << size << ")" << std::endl; + std::cerr << "ggml_vk_buffer_copy(SINGLE_DEVICE, " << size << ")" << std::endl; #endif - VkBufferCopy bc{ src_offset, dst_offset, size }; + // Copy within the device + ggml_backend_vk_context * ctx = src->ctx; - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - vkCmdCopyBuffer(ctx->s->buffer, src->buffer, dst->buffer, 1, &bc); - ggml_vk_buffer_copy_async(ctx, dst, dst_offset, src, src_offset, size); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_buffer_copy waitForFences"); - vk_device.device.resetFences({ vk_fence }); + VkBufferCopy bc{ src_offset, dst_offset, size }; + + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_buffer_copy_async(subctx, dst, dst_offset, src, src_offset, size); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_buffer_copy waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); + } else { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_buffer_copy(MULTI_DEVICE, " << size << ")" << std::endl; +#endif + // Copy device to device + ggml_backend_vk_context * src_ctx = src->ctx; + ggml_backend_vk_context * dst_ctx = dst->ctx; + + ggml_vk_ensure_sync_staging_buffer(src_ctx, size); + ggml_vk_ensure_sync_staging_buffer(dst_ctx, size); + + // Copy to src staging buffer + ggml_vk_buffer_copy(src_ctx->sync_staging, 0, src, src_offset, size); + // memcpy to dst staging buffer + memcpy(dst_ctx->sync_staging->ptr, src_ctx->sync_staging->ptr, size); + // Copy to dst buffer + ggml_vk_buffer_copy(dst, dst_offset, dst_ctx->sync_staging, 0, size); + } } -static void ggml_vk_buffer_memset(vk_buffer* dst, size_t offset, uint32_t c, size_t size) { +static void ggml_vk_buffer_memset(ggml_backend_vk_context * ctx, vk_buffer& dst, size_t offset, uint32_t c, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_buffer_memset(" << offset << ", " << c << ", " << size << ")" << std::endl; #endif - vk_context * ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(ctx); - ctx->s->buffer.fillBuffer(dst->buffer, offset, size, c); - ggml_vk_ctx_end(ctx); + // Make sure ctx owns the buffer + GGML_ASSERT(dst->ctx == ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "vk_memset waitForFences"); - vk_device.device.resetFences({ vk_fence }); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, subctx); + subctx->s->buffer.fillBuffer(dst->buffer, offset, size, c); + ggml_vk_ctx_end(subctx); + + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "vk_memset waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); } -static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { +static void ggml_vk_h2d_tensor_2d(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& dst, size_t offset, const ggml_tensor * src, uint64_t i3, uint64_t i2, uint64_t i1) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_h2d_tensor_2d(dst=" << dst << ", offset=" << offset << ", src=" << src << ", i3=" << i3 << ", i2=" << i2 << ", i1=" << i1 << ")" << std::endl; #endif @@ -1773,20 +1963,20 @@ static void ggml_vk_h2d_tensor_2d(vk_context * ctx, vk_buffer * dst, size_t offs const void * x = (const void *) ((const char *) src->data + i2*nb2 + i3*nb3); if (nb0 == ts && nb1 == row_length) { - return ggml_vk_buffer_write_async(ctx, dst, offset, x, i1*nb1); + return ggml_vk_buffer_write_async(ctx, subctx, dst, offset, x, i1*nb1); } if (nb0 == ts && (i1 == ne1 || !ggml_is_permuted(src))) { - return ggml_vk_buffer_write_2d_async(ctx, dst, offset, x, nb1, row_length, i1); + return ggml_vk_buffer_write_2d_async(ctx, subctx, dst, offset, x, nb1, row_length, i1); } GGML_ASSERT(i3 == 0); GGML_ASSERT(i2 == 0); GGML_ASSERT(i1 == (uint64_t) ggml_nrows(src)); - return ggml_vk_buffer_write_nc_async(ctx, dst, offset, src); + return ggml_vk_buffer_write_nc_async(ctx, subctx, dst, offset, src); } -static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offset, const ggml_tensor * dst) { +static void ggml_vk_d2h_tensor_2d(ggml_backend_vk_context * ctx, vk_context * subctx, vk_buffer& src, size_t offset, const ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_d2h_tensor_2d()" << std::endl; #endif @@ -1804,10 +1994,10 @@ static void ggml_vk_d2h_tensor_2d(vk_context * ctx, vk_buffer * src, size_t offs const size_t row_length = ts*ne0/bs; if (ggml_is_contiguous(dst)) { - return ggml_vk_buffer_read_async(ctx, src, offset, dst->data, ne1*nb1*ne2*ne3); + return ggml_vk_buffer_read_async(ctx, subctx, src, offset, dst->data, ne1*nb1*ne2*ne3); } if (nb0 == ts) { - return ggml_vk_buffer_read_2d_async(ctx, src, offset, dst->data, nb1, nb1, row_length, ne1*ne2*ne3); + return ggml_vk_buffer_read_2d_async(ctx, subctx, src, offset, dst->data, nb1, nb1, row_length, ne1*ne2*ne3); } GGML_ASSERT(false); } @@ -1829,89 +2019,89 @@ static uint32_t ggml_vk_guess_split_k(int m, int n, int k) { return 1; } -static uint32_t ggml_vk_guess_matmul_pipeline_align(int m, int n) { +static uint32_t ggml_vk_guess_matmul_pipeline_align(ggml_backend_vk_context * ctx, int m, int n) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline_align(" << m << ", " << n << ")" << std::endl; #endif if (m <= 32 || n <= 32) { - return vk_pipeline_matmul_f32_aligned_s.align; + return ctx->pipeline_matmul_f32_aligned_s.align; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { - return vk_pipeline_matmul_f32_aligned_m.align; + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { + return ctx->pipeline_matmul_f32_aligned_m.align; } - return vk_pipeline_matmul_f32_aligned_l.align; + return ctx->pipeline_matmul_f32_aligned_l.align; } -static vk_pipeline* ggml_vk_guess_matmul_pipeline(bool bit16_x, bool bit16_y, int m, int n, bool aligned) { +static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; #endif if (bit16_x && bit16_y) { - if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_aligned_s : &vk_pipeline_matmul_f16_s; + return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_aligned_m : &vk_pipeline_matmul_f16_m; + return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; } #ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_aligned_l : &vk_pipeline_matmul_f16_l; + return aligned ? &ctx->pipeline_matmul_f16_aligned_l : &ctx->pipeline_matmul_f16_l; } if (bit16_x && !bit16_y) { - if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_f32_aligned_s : &vk_pipeline_matmul_f16_f32_s; + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_f32_aligned_m : &vk_pipeline_matmul_f16_f32_m; + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; } #ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f16_f32_aligned_l : &vk_pipeline_matmul_f16_f32_l; + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_l : &ctx->pipeline_matmul_f16_f32_l; } if (!bit16_x && bit16_y) { GGML_ASSERT(false); } - if (vk_device.vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f32_aligned_s : &vk_pipeline_matmul_f32_s; + return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; } - if (vk_device.subgroup_size == 64 || m <= 64 || n <= 64) { + if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f32_aligned_m : &vk_pipeline_matmul_f32_m; + return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; } #ifdef GGML_VULKAN_DEBUG std::cerr << " L" << std::endl; #endif - return aligned ? &vk_pipeline_matmul_f32_aligned_l : &vk_pipeline_matmul_f32_l; + return aligned ? &ctx->pipeline_matmul_f32_aligned_l : &ctx->pipeline_matmul_f32_l; } -static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { +static void ggml_vk_matmul(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline& pipeline, vk_subbuffer&& a, vk_subbuffer&& b, vk_subbuffer&& d, vk_subbuffer&& split_k_buffer, uint32_t m, uint32_t n, uint32_t k, uint32_t stride_a, uint32_t stride_b, uint32_t stride_d, uint32_t split_k, uint32_t batch, uint32_t ne02, uint32_t ne12, uint32_t broadcast2, uint32_t broadcast3, uint32_t batch_stride_a, uint32_t batch_stride_b, uint32_t batch_stride_d) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_matmul(a: (" << a.buffer.buffer << ", " << a.offset << ", " << a.size << "), b: (" << b.buffer.buffer << ", " << b.offset << ", " << b.size << "), c: (" << d.buffer.buffer << ", " << d.offset << ", " << d.size << "), split_k: (" << split_k_buffer.buffer.buffer << ", " << split_k_buffer.offset << ", " << split_k_buffer.size << "), m: " << m << ", n: " << n << ", k: " << k << ", stride_a: " << stride_a << ", stride_b: " << stride_b << ", stride_d: " << stride_d << ", split_k: " << split_k << ", batch: " << batch << ", ne02: " << ne02 << ", ne12: " << ne12 << ", broadcast2: " << broadcast2 << ", broadcast3: " << broadcast3 << ", batch_stride_a: " << batch_stride_a << ", batch_stride_b: " << batch_stride_b << ", batch_stride_d: " << batch_stride_d << ")" << std::endl; + std::cerr << "ggml_vk_matmul(a: (" << a.buffer->buffer << ", " << a.offset << ", " << a.size << "), b: (" << b.buffer->buffer << ", " << b.offset << ", " << b.size << "), c: (" << d.buffer->buffer << ", " << d.offset << ", " << d.size << "), split_k: (" << split_k_buffer.buffer->buffer << ", " << split_k_buffer.offset << ", " << split_k_buffer.size << "), m: " << m << ", n: " << n << ", k: " << k << ", stride_a: " << stride_a << ", stride_b: " << stride_b << ", stride_d: " << stride_d << ", split_k: " << split_k << ", batch: " << batch << ", ne02: " << ne02 << ", ne12: " << ne12 << ", broadcast2: " << broadcast2 << ", broadcast3: " << broadcast3 << ", batch_stride_a: " << batch_stride_a << ", batch_stride_b: " << batch_stride_b << ", batch_stride_d: " << batch_stride_d << ")" << std::endl; #endif - ggml_vk_sync_buffers(ctx); + ggml_vk_sync_buffers(subctx); if (split_k == 1) { const std::array pc = { m, n, k, stride_a, stride_b, stride_d, k, ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; - ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, d }, pc.size() * sizeof(uint32_t), pc.data(), { m, n, batch }); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { a, b, d }, pc.size() * sizeof(uint32_t), pc.data(), { m, n, batch }); return; } @@ -1919,10 +2109,10 @@ static void ggml_vk_matmul(vk_context * ctx, vk_pipeline& pipeline, vk_subbuffer const std::array pc1 = { m, n, k, stride_a, stride_b, stride_d, CEIL_DIV(k, split_k), ne02, ne12, broadcast2, broadcast3, batch_stride_a, batch_stride_b, batch_stride_d }; // Make sure enough workgroups get assigned for split k to work - ggml_vk_dispatch_pipeline(ctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); - ggml_vk_sync_buffers(ctx); + ggml_vk_dispatch_pipeline(ctx, subctx, pipeline, { a, b, split_k_buffer }, pc1.size() * sizeof(uint32_t), pc1.data(), { (CEIL_DIV(m, pipeline.wg_denoms[0]) * pipeline.wg_denoms[0]) * split_k, n, batch }); + ggml_vk_sync_buffers(subctx); const std::array pc2 = { (uint32_t)(m * n * batch), split_k }; - ggml_vk_dispatch_pipeline(ctx, vk_pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_matmul_split_k_reduce, { split_k_buffer, d }, pc2.size() * sizeof(uint32_t), pc2.data(), { m * n * batch, 1, 1 }); } static bool ggml_vk_dim01_contiguous(const ggml_tensor * tensor) { @@ -1932,32 +2122,32 @@ static bool ggml_vk_dim01_contiguous(const ggml_tensor * tensor) { tensor->nb[3] == tensor->nb[2]*tensor->ne[2]; } -static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_type from, ggml_type to) { +static vk_pipeline * ggml_vk_get_cpy_pipeline(ggml_backend_vk_context * ctx, ggml_type from, ggml_type to) { if (from == GGML_TYPE_F32 && to == GGML_TYPE_F32) { - return &vk_pipeline_cpy_f32_f32; + return &ctx->pipeline_cpy_f32_f32; } if (from == GGML_TYPE_F32 && to == GGML_TYPE_F16) { - return &vk_pipeline_cpy_f32_f16; + return &ctx->pipeline_cpy_f32_f16; } if (from == GGML_TYPE_F16 && to == GGML_TYPE_F16) { - return &vk_pipeline_cpy_f16_f16; + return &ctx->pipeline_cpy_f16_f16; } std::cerr << "Missing CPY op for types: " << ggml_type_name(from) << " " << ggml_type_name(to) << std::endl; GGML_ASSERT(false); } -static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { +static void ggml_vk_cpy_to_contiguous(ggml_backend_vk_context * ctx, vk_context * subctx, vk_pipeline * pipeline, const ggml_tensor * tensor, vk_subbuffer&& in, vk_subbuffer&& out, ggml_type buffer_type, bool aligned=true) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_cpy_to_contiguous((" << tensor << ", type=" << tensor->type << ", backend=" << tensor->backend << ", ne0=" << tensor->ne[0] << ", ne1=" << tensor->ne[1] << ", ne2=" << tensor->ne[2] << ", ne3=" << tensor->ne[3] << ", nb0=" << tensor->nb[0] << ", nb1=" << tensor->nb[1] << ", nb2=" << tensor->nb[2] << ", nb3=" << tensor->nb[3] << "), "; - std::cerr << "buffer in size=" << in.buffer.size << ", buffer out size=" << out.buffer.size << ")" << std::endl; + std::cerr << "buffer in size=" << in.buffer->size << ", buffer out size=" << out.buffer->size << ")" << std::endl; #endif const int tensor_type_size = ggml_type_size(tensor->type); const int dst_type_size = ggml_type_size(buffer_type); const uint32_t ne = tensor->ne[0] * tensor->ne[1] * tensor->ne[2]; - const uint32_t nb2 = aligned ? ggml_vk_align_size(dst_type_size * tensor->ne[0] * tensor->ne[1], vk_device.properties.limits.minStorageBufferOffsetAlignment) / dst_type_size : tensor->ne[0] * tensor->ne[1]; + const uint32_t nb2 = aligned ? ggml_vk_align_size(dst_type_size * tensor->ne[0] * tensor->ne[1], ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size : tensor->ne[0] * tensor->ne[1]; const vk_op_cpy_push_constants pc = { (uint32_t)ne, @@ -1965,11 +2155,11 @@ static void ggml_vk_cpy_to_contiguous(vk_context * ctx, vk_pipeline * pipeline, (uint32_t)tensor->ne[0], (uint32_t)tensor->ne[1], 1 , (uint32_t)tensor->ne[0] , nb2, 0, }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { in, out }, sizeof(vk_op_cpy_push_constants), &pc, { ne, 1, 1 }); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { in, out }, sizeof(vk_op_cpy_push_constants), &pc, { ne, 1, 1 }); } -static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -1998,17 +2188,17 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qx = nullptr; + vk_buffer d_Qx; size_t qx_buf_offset = 0; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy; size_t qy_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src0->data, d_Qx, qx_buf_offset); - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src0->data, d_Qx, qx_buf_offset); + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src0_uma = d_Qx != nullptr; src1_uma = d_Qy != nullptr; } @@ -2031,12 +2221,12 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co const int y_ne = ne11 * ne10; const int d_ne = ne11 * ne01; - const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ne01, ne11)); + const uint32_t kpad = ggml_vk_align_size(ne10, ggml_vk_guess_matmul_pipeline_align(ctx, ne01, ne11)); const bool aligned = ne10 == kpad; const uint32_t split_k = ggml_vk_guess_split_k(ne01, ne11, ne10); - vk_pipeline * pipeline = ggml_vk_guess_matmul_pipeline(true, !f16_f32_kernel, ne01, ne11, aligned); + vk_pipeline * pipeline = ggml_vk_guess_matmul_pipeline(ctx, true, !f16_f32_kernel, ne01, ne11, aligned); const uint64_t qx_sz = ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); @@ -2044,30 +2234,30 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co const uint64_t y_sz = f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); GGML_ASSERT(d_D->size >= d_buf_offset + d_sz * ne02 * ne03); - vk_buffer* d_X; + vk_buffer d_X; uint64_t x_buf_offset = 0; - vk_buffer* d_Y; + vk_buffer d_Y; uint64_t y_buf_offset = 0; if (load_x) { - d_Qx = &vk_prealloc_qx; + d_Qx = ctx->prealloc_qx; } else if (!src0_uma) { - d_Qx = &extra_src0->buffer_gpu; + d_Qx = extra_src0->buffer_gpu.lock(); qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); } if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else if (!src1_uma) { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qy != nullptr); } if (qx_needs_dequant) { - d_X = &vk_prealloc_x; + d_X = ctx->prealloc_x; GGML_ASSERT(d_X->size >= x_sz * ne02 * ne03); } else { d_X = d_Qx; @@ -2075,7 +2265,7 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co GGML_ASSERT(qx_sz == x_sz); // NOLINT } if (qy_needs_dequant) { - d_Y = &vk_prealloc_y; + d_Y = ctx->prealloc_y; GGML_ASSERT(d_Y->size >= y_sz * ne02 * ne03); } else { d_Y = d_Qy; @@ -2087,49 +2277,49 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co vk_pipeline * to_fp16_vk_1 = nullptr; if (x_non_contig) { - to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(src0->type, GGML_TYPE_F16); + to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(ctx, src0->type, GGML_TYPE_F16); } else { - to_fp16_vk_0 = ggml_vk_get_to_fp16(src0->type); + to_fp16_vk_0 = ggml_vk_get_to_fp16(ctx, src0->type); } if (y_non_contig) { - to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(src1->type, GGML_TYPE_F16); + to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(ctx, src1->type, GGML_TYPE_F16); } else { - to_fp16_vk_1 = ggml_vk_get_to_fp16(src1->type); + to_fp16_vk_1 = ggml_vk_get_to_fp16(ctx, src1->type); } GGML_ASSERT(!qx_needs_dequant || to_fp16_vk_0 != nullptr); // NOLINT GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT // Allocate descriptor sets - ggml_vk_pipeline_allocate_descriptor_sets(*pipeline, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, ne12 * ne13); if (qx_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_0, x_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_0, x_non_contig ? 1 : ne12 * ne13); } if (qy_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); } if (split_k > 1) { - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_matmul_split_k_reduce, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_matmul_split_k_reduce, ne12 * ne13); } if (x_non_contig) { - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_0, src0, { *d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { *d_X, 0, VK_WHOLE_SIZE }, dst->type, false); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }, dst->type, false); } else if (load_x || qx_needs_dequant) { if (load_x) { // copy data to device - ggml_vk_h2d_tensor_2d(ctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); - vk_staging_offset = qx_sz * ne02 * ne03; + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); + ctx->staging_offset = qx_sz * ne02 * ne03; } if (qx_needs_dequant) { const std::vector pc = { (int)ne01, (int)ne10, (int)ne10, (int)ne10 }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *to_fp16_vk_0, { { *d_Qx, qx_buf_offset, qx_sz * ne02 * ne03 }, { *d_X, 0, x_sz * ne02 * ne03 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)(x_ne * ne02 * ne03), 1, 1}); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *to_fp16_vk_0, { { d_Qx, qx_buf_offset, qx_sz * ne02 * ne03 }, { d_X, 0, x_sz * ne02 * ne03 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)(x_ne * ne02 * ne03), 1, 1}); } } if (y_non_contig) { - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_1, src1, { *d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { *d_Y, 0, VK_WHOLE_SIZE }, dst->type); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }, dst->type); } else if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); } uint32_t stride_batch_x = ne00*ne01; @@ -2144,16 +2334,16 @@ static void ggml_vk_mul_mat_q_f16(vk_context * ctx, const ggml_tensor * src0, co } // compute - ggml_vk_matmul(ctx, *pipeline, { *d_X, x_buf_offset, x_sz * ne02 * ne03 }, { *d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { *d_D, d_buf_offset, d_sz * ne12 * ne13 }, { vk_prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT + ggml_vk_matmul(ctx, subctx, *pipeline, { d_X, x_buf_offset, x_sz * ne02 * ne03 }, { d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { d_D, d_buf_offset, d_sz * ne12 * ne13 }, { ctx->prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data); - ggml_vk_buffer_read_async(ctx, d_D, 0, d, sizeof(float) * d_ne * ne12 * ne13); + ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, sizeof(float) * d_ne * ne12 * ne13); } } -static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_vec_q_f16((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -2184,17 +2374,17 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qx = nullptr; + vk_buffer d_Qx; size_t qx_buf_offset = 0; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy; size_t qy_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src0->data, d_Qx, qx_buf_offset); - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src0->data, d_Qx, qx_buf_offset); + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src0_uma = d_Qx != nullptr; src1_uma = d_Qy != nullptr; } @@ -2214,42 +2404,42 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 const uint64_t y_ne = ne11 * ne10; const uint64_t d_ne = ne11 * ne01; - const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), vk_device.properties.limits.minStorageBufferOffsetAlignment); + const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); - const uint64_t x_sz = x_non_contig ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) : qx_sz; + const uint64_t x_sz = x_non_contig ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : qx_sz; const uint64_t y_sz = f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne; const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); - vk_buffer* d_X; + vk_buffer d_X; uint64_t x_buf_offset = 0; - vk_buffer* d_Y; + vk_buffer d_Y; uint64_t y_buf_offset = 0; if (load_x) { - d_Qx = &vk_prealloc_qx; + d_Qx = ctx->prealloc_qx; } else if(!src1_uma) { - d_Qx = &extra_src0->buffer_gpu; + d_Qx = extra_src0->buffer_gpu.lock(); qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); } if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else if(!src1_uma) { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qy != nullptr); } if (qx_needs_dequant) { - d_X = &vk_prealloc_x; + d_X = ctx->prealloc_x; } else { d_X = d_Qx; x_buf_offset = qx_buf_offset; GGML_ASSERT(qx_sz == x_sz); } if (qy_needs_dequant) { - d_Y = &vk_prealloc_y; + d_Y = ctx->prealloc_y; } else { d_Y = d_Qy; y_buf_offset = qy_buf_offset; @@ -2259,39 +2449,39 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 vk_pipeline * to_fp16_vk_0 = nullptr; vk_pipeline* to_fp16_vk_1 = nullptr; if (x_non_contig) { - to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(src0->type, src0->type); + to_fp16_vk_0 = ggml_vk_get_cpy_pipeline(ctx, src0->type, src0->type); } if (y_non_contig) { - to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(src1->type, src1->type); + to_fp16_vk_1 = ggml_vk_get_cpy_pipeline(ctx, src1->type, src1->type); } else { - to_fp16_vk_1 = ggml_vk_get_to_fp16(src1->type); + to_fp16_vk_1 = ggml_vk_get_to_fp16(ctx, src1->type); } - vk_pipeline* dmmv = ggml_vk_get_dequantize_mul_mat_vec(src0->type); + vk_pipeline* dmmv = ggml_vk_get_dequantize_mul_mat_vec(ctx, src0->type); GGML_ASSERT(!qx_needs_dequant || to_fp16_vk_0 != nullptr); // NOLINT GGML_ASSERT(!qy_needs_dequant || to_fp16_vk_1 != nullptr); // NOLINT GGML_ASSERT(dmmv != nullptr); // Allocate descriptor sets if (qx_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_0, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_0, 1); } if (qy_needs_dequant) { - ggml_vk_pipeline_allocate_descriptor_sets(*to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *to_fp16_vk_1, y_non_contig ? 1 : ne12 * ne13); } - ggml_vk_pipeline_allocate_descriptor_sets(*dmmv, ne12 * ne13); + ggml_pipeline_allocate_descriptor_sets(ctx, *dmmv, ne12 * ne13); if (x_non_contig) { - GGML_ASSERT(x_sz == ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment)); - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_0, src0, { *d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { *d_X, 0, VK_WHOLE_SIZE }, src0->type); + GGML_ASSERT(x_sz == ggml_vk_align_size(ggml_type_size(src0->type) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment)); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_0, src0, { d_Qx, qx_buf_offset, VK_WHOLE_SIZE }, { d_X, 0, VK_WHOLE_SIZE }, src0->type); } else if (load_x) { // copy data to device - ggml_vk_h2d_tensor_2d(ctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qx, 0, src0, 0, 0, ggml_nrows(src0)); } if (y_non_contig) { GGML_ASSERT(y_sz == ggml_type_size(src1->type) * y_ne); - ggml_vk_cpy_to_contiguous(ctx, to_fp16_vk_1, src1, { *d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { *d_Y, 0, VK_WHOLE_SIZE }, src1->type); + ggml_vk_cpy_to_contiguous(ctx, subctx, to_fp16_vk_1, src1, { d_Qy, qy_buf_offset, VK_WHOLE_SIZE }, { d_Y, 0, VK_WHOLE_SIZE }, src1->type); } else if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, 0, src1, 0, 0, ggml_nrows(src1)); } for (uint64_t i13 = 0; i13 < ne13; i13++) { @@ -2306,34 +2496,34 @@ static void ggml_vk_mul_mat_vec_q_f16(vk_context * ctx, const ggml_tensor * src0 const uint64_t y_offset = y_buf_offset + y_sz * it_idx1; const uint64_t d_offset = d_buf_offset + d_sz * it_idx1; - const uint64_t y_buffer_offset = (y_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t y_buffer_offset = (y_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t y_shader_offset = y_offset - y_buffer_offset; - const uint64_t d_buffer_offset = (d_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_offset - d_buffer_offset; if (!y_non_contig && qy_needs_dequant) { const std::vector pc = { (int)ne11, (int)ne10, (int)ne10, (int)ne10 }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *to_fp16_vk_1, { { *d_Qy, qy_offset, qy_sz }, { *d_Y, y_offset, y_sz } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)y_ne, 1, 1}); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *to_fp16_vk_1, { { d_Qy, qy_offset, qy_sz }, { d_Y, y_offset, y_sz } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)y_ne, 1, 1}); } // compute const std::array pc = { (int)ne00, (int)(y_shader_offset / ggml_type_size(src1->type)), (int)(d_shader_offset / ggml_type_size(dst->type))}; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *dmmv, { { *d_X, x_offset, x_sz }, { *d_Y, y_buffer_offset, y_sz + y_shader_offset }, { *d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *dmmv, { { d_X, x_offset, x_sz }, { d_Y, y_buffer_offset, y_sz + y_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, d_offset, d, sizeof(float) * d_ne); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_offset, d, sizeof(float) * d_ne); } } } } -static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_p021_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -2362,13 +2552,13 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy; size_t qy_buf_offset = 0; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src1_uma = d_Qy != nullptr; } @@ -2378,51 +2568,51 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(vk_context * ctx, const ggml_tensor const uint64_t y_ne = ne10 * ne11 * ne12; const uint64_t d_ne = ne01 * ne11 * ne12; - const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), vk_device.properties.limits.minStorageBufferOffsetAlignment); + const uint64_t qx_sz = ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); const uint64_t qy_sz = ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type); const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); - vk_buffer* d_Qx = &extra_src0->buffer_gpu; + vk_buffer d_Qx = extra_src0->buffer_gpu.lock(); const uint64_t qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else if (!src1_uma) { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qx != nullptr); } // Allocate descriptor sets - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_mul_mat_vec_p021_f16_f32, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, 1); - const uint64_t qy_buffer_offset = (qy_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t qy_shader_offset = qy_buf_offset - qy_buffer_offset; - const uint64_t d_buffer_offset = (d_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_buf_offset - d_buffer_offset; if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); } // compute const std::array pc = { (uint32_t)ne00, (uint32_t)ne01, (uint32_t)ne02, (uint32_t)ne12, (uint32_t)(qy_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type)) }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, vk_pipeline_mul_mat_vec_p021_f16_f32, { { *d_Qx, qx_buf_offset, qx_sz }, { *d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { *d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) dst->data; - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); } } -static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat_nc_f16_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; std::cerr << "), (" << src1 << ", name=" << src1->name << ", type=" << src1->type << ", backend=" << src1->backend << ", ne0=" << src1->ne[0] << ", ne1=" << src1->ne[1] << ", ne2=" << src1->ne[2] << ", ne3=" << src1->ne[3] << ", nb0=" << src1->nb[0] << ", nb1=" << src1->nb[1] << ", nb2=" << src1->nb[2] << ", nb3=" << src1->nb[3]; @@ -2454,13 +2644,13 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = (ggml_tensor_extra_gpu *) src1->extra; - vk_buffer * d_Qy = nullptr; + vk_buffer d_Qy = nullptr; size_t qy_buf_offset = 0; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src1->data, d_Qy, qy_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src1->data, d_Qy, qy_buf_offset); src1_uma = d_Qy != nullptr; } @@ -2475,43 +2665,43 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(vk_context * ctx, const ggml_tensor * const uint64_t qy_sz = ggml_nbytes(src1); const uint64_t d_sz = sizeof(float) * d_ne; - vk_buffer* d_D = &extra->buffer_gpu; + vk_buffer d_D = extra->buffer_gpu.lock(); const uint64_t d_buf_offset = extra->offset; GGML_ASSERT(d_D != nullptr); - vk_buffer* d_Qx = &extra_src0->buffer_gpu; + vk_buffer d_Qx = extra_src0->buffer_gpu.lock(); const uint64_t qx_buf_offset = extra_src0->offset; GGML_ASSERT(d_Qx != nullptr); if (load_y) { - d_Qy = &vk_prealloc_qy; + d_Qy = ctx->prealloc_qy; } else { - d_Qy = &extra_src1->buffer_gpu; + d_Qy = extra_src1->buffer_gpu.lock(); qy_buf_offset = extra_src1->offset; GGML_ASSERT(d_Qx != nullptr); } // Allocate descriptor sets - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_mul_mat_vec_nc_f16_f32, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, 1); - const uint64_t qy_buffer_offset = (qy_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t qy_buffer_offset = (qy_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t qy_shader_offset = qy_buf_offset - qy_buffer_offset; - const uint64_t d_buffer_offset = (d_buf_offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + const uint64_t d_buffer_offset = (d_buf_offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; const uint64_t d_shader_offset = d_buf_offset - d_buffer_offset; if (load_y) { - ggml_vk_h2d_tensor_2d(ctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Qy, qy_buf_offset, src1, 0, 0, ggml_nrows(src1)); } // compute const std::array pc = { (uint32_t)ne00, (uint32_t)ne01, row_stride_x, channel_stride_x, (uint32_t)(ne12 / ne02), (uint32_t)(qy_shader_offset / ggml_type_size(src1->type)), (uint32_t)(d_shader_offset / ggml_type_size(dst->type)) }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, vk_pipeline_mul_mat_vec_nc_f16_f32, { { *d_Qx, qx_buf_offset, qx_sz }, { *d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { *d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) dst->data; - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset, d, sizeof(float) * d_ne); } } @@ -2528,22 +2718,22 @@ static bool ggml_vk_can_mul_mat(const ggml_tensor * src0, const ggml_tensor * sr ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_GPU); } -static void ggml_vk_mul_mat(vk_context * ctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { +static void ggml_vk_mul_mat(ggml_backend_vk_context * ctx, vk_context * subctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_mul_mat(" << src0 << ", " << src1 << ", " << dst << ")" << std::endl; #endif if (src0->type == GGML_TYPE_F16 && ggml_is_permuted(src0) && ggml_is_permuted(src1) && src1->ne[1] == 1) { - ggml_vk_mul_mat_vec_p021_f16_f32(ctx, src0, src1, dst); + ggml_vk_mul_mat_vec_p021_f16_f32(ctx, subctx, src0, src1, dst); } else if (src0->type == GGML_TYPE_F16 && !ggml_is_contiguous(src0) && !ggml_is_transposed(src1) && src1->ne[1] == 1) { - ggml_vk_mul_mat_vec_nc_f16_f32(ctx, src0, src1, dst); + ggml_vk_mul_mat_vec_nc_f16_f32(ctx, subctx, src0, src1, dst); } else if (src1->ne[1] == 1 && (src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type))) { - ggml_vk_mul_mat_vec_q_f16(ctx, src0, src1, dst); + ggml_vk_mul_mat_vec_q_f16(ctx, subctx, src0, src1, dst); } else { - ggml_vk_mul_mat_q_f16(ctx, src0, src1, dst); + ggml_vk_mul_mat_q_f16(ctx, subctx, src0, src1, dst); } } -static void ggml_vk_op_repeat(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_op_repeat(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { // guaranteed to be an integer due to the check in ggml_can_repeat const uint64_t ne0 = dst->ne[0]; const uint64_t ne1 = dst->ne[1]; @@ -2579,9 +2769,9 @@ static void ggml_vk_op_repeat(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; - const vk_buffer* src_buf = &extra_src0->buffer_gpu; + const vk_buffer src_buf = extra_src0->buffer_gpu.lock(); const uint64_t src_offset = extra_src0->offset; - vk_buffer* dst_buf = &extra->buffer_gpu; + vk_buffer dst_buf = extra->buffer_gpu.lock(); const uint64_t dst_offset = extra->offset; std::vector copies; @@ -2606,78 +2796,79 @@ static void ggml_vk_op_repeat(vk_context * ctx, const ggml_tensor * src0, const } } - ggml_vk_sync_buffers(ctx); - ctx->s->buffer.copyBuffer(src_buf->buffer, dst_buf->buffer, copies); + ggml_vk_sync_buffers(subctx); + subctx->s->buffer.copyBuffer(src_buf->buffer, dst_buf->buffer, copies); - (void) src1; + GGML_UNUSED(ctx); + GGML_UNUSED(src1); } -static vk_pipeline* ggml_vk_op_get_pipeline(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op) { +static vk_pipeline* ggml_vk_op_get_pipeline(ggml_backend_vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op) { switch (op) { case GGML_OP_ADD: if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_add_f32; + return &ctx->pipeline_add_f32; } return nullptr; case GGML_OP_GET_ROWS: GGML_ASSERT(src1->type == GGML_TYPE_I32); if (dst->type == GGML_TYPE_F16) { - return &vk_pipeline_get_rows[src0->type]; + return &ctx->pipeline_get_rows[src0->type]; } if (dst->type == GGML_TYPE_F32) { - return &vk_pipeline_get_rows_f32[src0->type]; + return &ctx->pipeline_get_rows_f32[src0->type]; } return nullptr; case GGML_OP_MUL: if (src0->type == GGML_TYPE_F32 && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_mul_f32; + return &ctx->pipeline_mul_f32; } return nullptr; case GGML_OP_SCALE: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_scale_f32; + return &ctx->pipeline_scale_f32; } return nullptr; case GGML_OP_SQR: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_sqr_f32; + return &ctx->pipeline_sqr_f32; } return nullptr; case GGML_OP_CLAMP: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_clamp_f32; + return &ctx->pipeline_clamp_f32; } return nullptr; case GGML_OP_CPY: case GGML_OP_CONT: case GGML_OP_DUP: - return ggml_vk_get_cpy_pipeline(src0->type, dst->type); + return ggml_vk_get_cpy_pipeline(ctx, src0->type, dst->type); case GGML_OP_NORM: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_norm_f32; + return &ctx->pipeline_norm_f32; } return nullptr; case GGML_OP_RMS_NORM: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_rms_norm_f32; + return &ctx->pipeline_rms_norm_f32; } return nullptr; case GGML_OP_UNARY: switch (ggml_get_unary_op(dst)) { case GGML_UNARY_OP_SILU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_silu_f32; + return &ctx->pipeline_silu_f32; } break; case GGML_UNARY_OP_GELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_gelu_f32; + return &ctx->pipeline_gelu_f32; } break; case GGML_UNARY_OP_RELU: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_relu_f32; + return &ctx->pipeline_relu_f32; } break; default: @@ -2686,12 +2877,12 @@ static vk_pipeline* ggml_vk_op_get_pipeline(const ggml_tensor * src0, const ggml return nullptr; case GGML_OP_DIAG_MASK_INF: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_diag_mask_inf_f32; + return &ctx->pipeline_diag_mask_inf_f32; } return nullptr; case GGML_OP_SOFT_MAX: if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_soft_max_f32; + return &ctx->pipeline_soft_max_f32; } return nullptr; case GGML_OP_ROPE: @@ -2706,17 +2897,17 @@ static vk_pipeline* ggml_vk_op_get_pipeline(const ggml_tensor * src0, const ggml if (is_neox) { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_rope_neox_f32; + return &ctx->pipeline_rope_neox_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return &vk_pipeline_rope_neox_f16; + return &ctx->pipeline_rope_neox_f16; } } else { if (src0->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32) { - return &vk_pipeline_rope_f32; + return &ctx->pipeline_rope_f32; } if (src0->type == GGML_TYPE_F16 && dst->type == GGML_TYPE_F16) { - return &vk_pipeline_rope_f16; + return &ctx->pipeline_rope_f16; } } return nullptr; @@ -2735,13 +2926,8 @@ static ggml_vk_func_t ggml_vk_op_get_func(ggml_op op) { } } -#ifdef GGML_VULKAN_CHECK_RESULTS -static void ggml_vk_print_tensor(const ggml_tensor * tensor, const char * name); -static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * tensor); -#endif - template -static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { +static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst, ggml_op op, const PC&& pc) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_op_f32((" << src0 << ", name=" << src0->name << ", type=" << src0->type << ", backend=" << src0->backend << ", ne0=" << src0->ne[0] << ", ne1=" << src0->ne[1] << ", ne2=" << src0->ne[2] << ", ne3=" << src0->ne[3] << ", nb0=" << src0->nb[0] << ", nb1=" << src0->nb[1] << ", nb2=" << src0->nb[2] << ", nb3=" << src0->nb[3]; if (src1 != nullptr) { @@ -2768,7 +2954,7 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm const uint64_t nb2 = dst->nb[2]; const uint64_t nb3 = dst->nb[3]; - vk_pipeline * pipeline = ggml_vk_op_get_pipeline(src0, src1, dst, op); + vk_pipeline * pipeline = ggml_vk_op_get_pipeline(ctx, src0, src1, dst, op); ggml_vk_func_t op_func; if (pipeline == nullptr) { @@ -2782,7 +2968,7 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm GGML_ASSERT(false); } - op_func(ctx, src0, src1, dst); + op_func(ctx, subctx, src0, src1, dst); return; } @@ -2790,19 +2976,19 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * extra_src1 = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; - vk_buffer * d_X = nullptr; + vk_buffer d_X = nullptr; size_t x_buf_offset = 0; - vk_buffer * d_Y = nullptr; + vk_buffer d_Y = nullptr; size_t y_buf_offset = 0; bool src0_uma = false; bool src1_uma = false; - if (vk_device.uma) { - ggml_vk_host_get(src0->data, d_X, x_buf_offset); + if (ctx->device.lock()->uma) { + ggml_vk_host_get(ctx, src0->data, d_X, x_buf_offset); src0_uma = d_X != nullptr; if (use_src1) { - ggml_vk_host_get(src1->data, d_Y, y_buf_offset); + ggml_vk_host_get(ctx, src1->data, d_Y, y_buf_offset); src1_uma = d_Y != nullptr; } } @@ -2810,30 +2996,31 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm const bool transfer_src0 = src0->backend != GGML_BACKEND_GPU && !src0_uma; const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_GPU && !src1_uma; - uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, vk_device.properties.limits.minStorageBufferOffsetAlignment); - uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, vk_device.properties.limits.minStorageBufferOffsetAlignment) : 0; + uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); + uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : 0; uint64_t d_sz = ggml_type_size(dst->type) * ne0; + vk_buffer d_D = extra->buffer_gpu.lock(); + // Workaround for tiny tensor inputs on ROPE - if (use_src1 && src1->backend == GGML_BACKEND_GPU && y_sz > extra_src1->buffer_gpu.size) { + if (use_src1 && src1->backend == GGML_BACKEND_GPU && y_sz > d_D->size) { y_sz = VK_WHOLE_SIZE; } - vk_buffer* d_D = &extra->buffer_gpu; GGML_ASSERT(d_D != nullptr); - uint64_t d_buf_offset = (extra->offset / vk_device.properties.limits.minStorageBufferOffsetAlignment) * vk_device.properties.limits.minStorageBufferOffsetAlignment; + uint64_t d_buf_offset = (extra->offset / ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; GGML_ASSERT(d_buf_offset == extra->offset || op == GGML_OP_CPY); // NOLINT if (transfer_src0) { - d_X = &vk_prealloc_qx; + d_X = ctx->prealloc_qx; } else if(!src0_uma) { - d_X = &extra_src0->buffer_gpu; + d_X = extra_src0->buffer_gpu.lock(); x_buf_offset = extra_src0->offset; GGML_ASSERT(d_X != nullptr); } if (transfer_src1) { - d_Y = &vk_prealloc_qy; + d_Y = ctx->prealloc_qy; } else if (use_src1 && !src1_uma) { - d_Y = &extra_src1->buffer_gpu; + d_Y = extra_src1->buffer_gpu.lock(); y_buf_offset = extra_src1->offset; GGML_ASSERT(d_Y != nullptr); } @@ -2856,16 +3043,16 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm // copy src0 to device if (transfer_src0) { - ggml_vk_h2d_tensor_2d(ctx, d_X, 0, src0, 0, 0, ggml_nrows(src0)); - vk_staging_offset = x_sz * ne02 * ne03; + ggml_vk_h2d_tensor_2d(ctx, subctx, d_X, 0, src0, 0, 0, ggml_nrows(src0)); + ctx->staging_offset = x_sz * ne02 * ne03; } if (transfer_src1) { - ggml_vk_h2d_tensor_2d(ctx, d_Y, 0, src1, 0, 0, ggml_nrows(src1)); + ggml_vk_h2d_tensor_2d(ctx, subctx, d_Y, 0, src1, 0, 0, ggml_nrows(src1)); } // Single call if dimension 2 is contiguous if (op == GGML_OP_CPY || (ggml_is_contiguous(src0) && (src1 == nullptr || ggml_is_contiguous(src1)))) { - ggml_vk_pipeline_allocate_descriptor_sets(*pipeline, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, 1); switch (dst->op) { case GGML_OP_NORM: @@ -2896,24 +3083,24 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm if (!use_src1 && op == GGML_OP_SOFT_MAX) { // Empty src1 is possible on soft_max, but the shader needs a buffer - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { vk_prealloc_y, 0, vk_prealloc_y.size }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { ctx->prealloc_y, 0, ctx->prealloc_y->size }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (use_src1) { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { *d_Y, y_buf_offset, y_sz }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_Y, y_buf_offset, y_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } if (dst->backend == GGML_BACKEND_CPU && op == GGML_OP_CPY) { - ggml_vk_d2h_tensor_2d(ctx, d_D, 0, dst); + ggml_vk_d2h_tensor_2d(ctx, subctx, d_D, 0, dst); } else if(dst->backend == GGML_BACKEND_CPU) { // copy dst to host float * d = (float *) dst->data; - ggml_vk_buffer_read_async(ctx, d_D, 0, d, d_sz); + ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, d_sz); } } else { - ggml_vk_pipeline_allocate_descriptor_sets(*pipeline, ne02 * ne03); + ggml_pipeline_allocate_descriptor_sets(ctx, *pipeline, ne02 * ne03); switch (dst->op) { case GGML_OP_NORM: @@ -2940,60 +3127,60 @@ static void ggml_vk_op_f32(vk_context * ctx, const ggml_tensor * src0, const ggm if (!use_src1 && op == GGML_OP_SOFT_MAX) { // Empty src1 is possible on soft_max, but the shader needs a buffer - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset, x_sz }, { vk_prealloc_y, 0, vk_prealloc_y.size }, { *d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { ctx->prealloc_y, 0, ctx->prealloc_y->size }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } else if (use_src1) { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset + x_offset, x_sz }, { *d_Y, y_buf_offset + y_offset, y_sz }, { *d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_Y, y_buf_offset + y_offset, y_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } else { - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, *pipeline, { { *d_X, x_buf_offset + x_offset, x_sz }, { *d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); + ggml_vk_sync_buffers(subctx); + ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } if (dst->backend == GGML_BACKEND_CPU) { // copy dst to host - ggml_vk_buffer_read_async(ctx, d_D, d_buf_offset + d_offset, (char *) dst->data + i02*nb2 + i03*nb3, d_sz); + ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset + d_offset, (char *) dst->data + i02*nb2 + i03*nb3, d_sz); } } } } } -static void ggml_vk_repeat(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_REPEAT, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_repeat(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_REPEAT, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_get_rows(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_GET_ROWS, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_get_rows(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_GET_ROWS, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_add(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_ADD, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_add(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ADD, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_mul(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_MUL, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); +static void ggml_vk_mul(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_MUL, { (uint32_t)ggml_nelements(src0), (uint32_t)ggml_nelements(src1), 0.0f, 0.0f }); } -static void ggml_vk_scale(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_scale(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_SCALE, { (uint32_t)ggml_nelements(src0), 0, op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_SCALE, { (uint32_t)ggml_nelements(src0), 0, op_params[0], 0.0f }); } -static void ggml_vk_sqr(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_SQR, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); +static void ggml_vk_sqr(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_SQR, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); } -static void ggml_vk_clamp(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_clamp(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_CLAMP, { (uint32_t)ggml_nelements(src0), 0, op_params[0], op_params[1] }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_CLAMP, { (uint32_t)ggml_nelements(src0), 0, op_params[0], op_params[1] }); } -static void ggml_vk_cpy(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_cpy(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; const int src0_type_size = ggml_type_size(src0->type); const int dst_type_size = ggml_type_size(dst->type); - const uint32_t d_offset = (extra->offset % vk_device.properties.limits.minStorageBufferOffsetAlignment) / dst_type_size; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_CPY, { + const uint32_t d_offset = (extra->offset % ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) / dst_type_size; + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_CPY, { (uint32_t)ggml_nelements(src0), (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], (uint32_t)src0->nb[0] / src0_type_size, (uint32_t)src0->nb[1] / src0_type_size, (uint32_t)src0->nb[2] / src0_type_size, (uint32_t) dst->ne[0], (uint32_t) dst->ne[1], (uint32_t) dst->nb[0] / dst_type_size, (uint32_t) dst->nb[1] / dst_type_size, (uint32_t) dst->nb[2] / dst_type_size, @@ -3001,30 +3188,30 @@ static void ggml_vk_cpy(vk_context * ctx, const ggml_tensor * src0, ggml_tensor }); } -static void ggml_vk_norm(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }); +static void ggml_vk_norm(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], 0.0f, 0.0f }); } -static void ggml_vk_rms_norm(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_rms_norm(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_RMS_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_RMS_NORM, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0], 0.0f }); } -static void ggml_vk_unary(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_UNARY, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); +static void ggml_vk_unary(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_UNARY, { (uint32_t)ggml_nelements(src0), 0, 0.0f, 0.0f }); } -static void ggml_vk_diag_mask_inf(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_diag_mask_inf(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { int32_t * op_params = (int32_t *)dst->op_params; - ggml_vk_op_f32(ctx, src0, nullptr, dst, GGML_OP_DIAG_MASK_INF, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0] }); + ggml_vk_op_f32(ctx, subctx, src0, nullptr, dst, GGML_OP_DIAG_MASK_INF, { (uint32_t)src0->ne[0], (uint32_t)src0->ne[1], op_params[0] }); } -static void ggml_vk_soft_max(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_soft_max(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { float * op_params = (float *)dst->op_params; - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_SOFT_MAX, { (uint32_t)src0->ne[0], (uint32_t)(src1 != nullptr ? ggml_nrows(src1) : 0), op_params[0], 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_SOFT_MAX, { (uint32_t)src0->ne[0], (uint32_t)(src1 != nullptr ? ggml_nrows(src1) : 0), op_params[0], 0.0f }); } -static void ggml_vk_rope(vk_context * ctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const int n_dims = ((int32_t *) dst->op_params)[1]; const int mode = ((int32_t *) dst->op_params)[2]; // const int n_ctx = ((int32_t *) dst->op_params)[3]; @@ -3047,19 +3234,19 @@ static void ggml_vk_rope(vk_context * ctx, const ggml_tensor * src0, const ggml_ if (is_neox) { const float theta_scale = powf(freq_base, -2.0f/n_dims); const float inv_ndims = -1.0f / n_dims; - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f, theta_scale, inv_ndims }); + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], (uint32_t)n_dims, freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f, theta_scale, inv_ndims }); } else { - ggml_vk_op_f32(ctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f }); + ggml_vk_op_f32(ctx, subctx, src0, src1, dst, GGML_OP_ROPE, { (uint32_t)src0->ne[0], freq_scale, (uint32_t)src0->ne[1], freq_base, ext_factor, attn_factor, corr_dims[0], corr_dims[1], 0.0f, 0.0f }); } } -static void ggml_vk_nop(vk_context * ctx, const ggml_tensor * src0, ggml_tensor * dst) { +static void ggml_vk_nop(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { // If backend is CPU, data from src0 has to be copied off the device if (dst->backend == GGML_BACKEND_CPU) { ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; - vk_buffer * d_D = &extra_src0->buffer_gpu; - ggml_vk_sync_buffers(ctx); - ggml_vk_buffer_read_async(ctx, d_D, 0, dst->data, d_D->size); + vk_buffer d_D = extra_src0->buffer_gpu.lock(); + ggml_vk_sync_buffers(subctx); + ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, dst->data, d_D->size); } } @@ -3096,7 +3283,7 @@ static void ggml_vk_print_matrix_area(const void * data, ggml_type type, int ne0 } template -static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size_t num_it, int split_k, int shader_size) { +static void ggml_vk_test_matmul(ggml_backend_vk_context * ctx, size_t m, size_t n, size_t k, size_t batch, size_t num_it, int split_k, int shader_size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_matmul(" << m << ", " << n << ", " << k << ", " << batch << ", " << num_it << ", " << split_k << ", " << shader_size << ")" << std::endl; #endif @@ -3108,39 +3295,39 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size std::string shname; if (shader_size == 0) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_aligned_s; + p = &ctx->pipeline_matmul_f32_aligned_s; shname = "F32_ALIGNED_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_aligned_s; + p = &ctx->pipeline_matmul_f16_f32_aligned_s; shname = "F16_F32_ALIGNED_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_aligned_s; + p = &ctx->pipeline_matmul_f16_aligned_s; shname = "F16_ALIGNED_S"; } else { GGML_ASSERT(false); } } else if (shader_size == 1) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_aligned_m; + p = &ctx->pipeline_matmul_f32_aligned_m; shname = "F32_ALIGNED_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_aligned_m; + p = &ctx->pipeline_matmul_f16_f32_aligned_m; shname = "F16_F32_ALIGNED_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_aligned_m; + p = &ctx->pipeline_matmul_f16_aligned_m; shname = "F16_ALIGNED_M"; } else { GGML_ASSERT(false); } } else if (shader_size == 2) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_aligned_l; + p = &ctx->pipeline_matmul_f32_aligned_l; shname = "F32_ALIGNED_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_aligned_l; + p = &ctx->pipeline_matmul_f16_f32_aligned_l; shname = "F16_F32_ALIGNED_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_aligned_l; + p = &ctx->pipeline_matmul_f16_aligned_l; shname = "F16_ALIGNED_L"; } else { GGML_ASSERT(false); @@ -3154,56 +3341,56 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size if (k != kpad) { if (shader_size == 0) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_s; + p = &ctx->pipeline_matmul_f32_s; shname = "F32_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_s; + p = &ctx->pipeline_matmul_f16_f32_s; shname = "F16_F32_S"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_s; + p = &ctx->pipeline_matmul_f16_s; shname = "F16_S"; } } else if (shader_size == 1) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_m; + p = &ctx->pipeline_matmul_f32_m; shname = "F32_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_m; + p = &ctx->pipeline_matmul_f16_f32_m; shname = "F16_F32_M"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_m; + p = &ctx->pipeline_matmul_f16_m; shname = "F16_M"; } } else if (shader_size == 2) { if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f32_l; + p = &ctx->pipeline_matmul_f32_l; shname = "F32_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_f32_l; + p = &ctx->pipeline_matmul_f16_f32_l; shname = "F16_F32_L"; } else if (std::is_same() && std::is_same()) { - p = &vk_pipeline_matmul_f16_l; + p = &ctx->pipeline_matmul_f16_l; shname = "F16_L"; } } } - ggml_vk_pipeline_allocate_descriptor_sets(*p, num_it); + ggml_pipeline_allocate_descriptor_sets(ctx, *p, num_it); if (split_k > 1) { - ggml_vk_pipeline_allocate_descriptor_sets(vk_pipeline_matmul_split_k_reduce, num_it); + ggml_pipeline_allocate_descriptor_sets(ctx, ctx->pipeline_matmul_split_k_reduce, num_it); - if (vk_prealloc_split_k.size < sizeof(float) * d_ne * split_k) { + if (ctx->prealloc_split_k == nullptr || ctx->prealloc_split_k->size < sizeof(float) * d_ne * split_k) { // Resize buffer - if (vk_prealloc_split_k.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_split_k); + if (ctx->prealloc_split_k != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_split_k); } - vk_prealloc_split_k = ggml_vk_create_buffer_check(sizeof(float) * d_ne * split_k, vk::MemoryPropertyFlagBits::eDeviceLocal); + ctx->prealloc_split_k = ggml_vk_create_buffer_check(ctx, sizeof(float) * d_ne * split_k, vk::MemoryPropertyFlagBits::eDeviceLocal); } } - vk_buffer d_X = ggml_vk_create_buffer_check(sizeof(X_TYPE) * x_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); - vk_buffer d_Y = ggml_vk_create_buffer_check(sizeof(Y_TYPE) * y_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); - vk_buffer d_D = ggml_vk_create_buffer_check(sizeof(float) * d_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_X = ggml_vk_create_buffer_check(ctx, sizeof(X_TYPE) * x_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_Y = ggml_vk_create_buffer_check(ctx, sizeof(Y_TYPE) * y_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer d_D = ggml_vk_create_buffer_check(ctx, sizeof(float) * d_ne, vk::MemoryPropertyFlagBits::eDeviceLocal); X_TYPE* x = (X_TYPE *) malloc(sizeof(X_TYPE) * x_ne); Y_TYPE* y = (Y_TYPE *) malloc(sizeof(Y_TYPE) * y_ne); @@ -3228,26 +3415,26 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size } } - ggml_vk_buffer_write(&d_X, 0, x, sizeof(X_TYPE) * k * m * batch); - ggml_vk_buffer_write(&d_Y, 0, y, sizeof(Y_TYPE) * k * n * batch); + ggml_vk_buffer_write(ctx, d_X, 0, x, sizeof(X_TYPE) * k * m * batch); + ggml_vk_buffer_write(ctx, d_Y, 0, y, sizeof(Y_TYPE) * k * n * batch); - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); for (size_t i = 0; i < num_it; i++) { - ggml_vk_ctx_begin(ctx); - ggml_vk_matmul(ctx, *p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(vk_prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); - ggml_vk_ctx_end(ctx); + ggml_vk_ctx_begin(ctx, subctx); + ggml_vk_matmul(ctx, subctx, *p, ggml_vk_subbuffer(d_X), ggml_vk_subbuffer(d_Y), ggml_vk_subbuffer(d_D), ggml_vk_subbuffer(ctx->prealloc_split_k), m, n, k, k, k, m, split_k, batch, batch, batch, 1, 1, k*m, k*n, m*n); + ggml_vk_ctx_end(subctx); } auto begin = std::chrono::high_resolution_clock::now(); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_test_matmul waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_matmul waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double time = std::chrono::duration_cast(end-begin).count() / 1000.0; // copy dst to host - ggml_vk_buffer_read(&d_D, 0, d, sizeof(float) * d_ne); + ggml_vk_buffer_read(ctx, d_D, 0, d, sizeof(float) * d_ne); float * d_chk = (float *) malloc(sizeof(float) * d_ne); @@ -3285,14 +3472,14 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size src1_ggml->data = y; tensor_ggml->data = d_chk; - vk_disable = true; + ctx->disable = true; ggml_cgraph * cgraph = ggml_new_graph(ggml_ctx); ggml_build_forward_expand(cgraph, tensor_ggml); ggml_graph_compute_with_ctx(ggml_ctx, cgraph, 1); - vk_disable = false; + ctx->disable = false; ggml_free(ggml_ctx); @@ -3325,7 +3512,7 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size if (split_k > 1) { float * split_k_buf = (float *) malloc(sizeof(float) * d_ne * split_k); - ggml_vk_buffer_read(&vk_prealloc_split_k, 0, split_k_buf, sizeof(float) * d_ne * split_k); + ggml_vk_buffer_read(ctx, ctx->prealloc_split_k, 0, split_k_buf, sizeof(float) * d_ne * split_k); std::cerr << "d_buf0: " << std::endl << std::endl; ggml_vk_print_matrix_area(split_k_buf, GGML_TYPE_F32, m, n, first_err_m, first_err_n, first_err_b); @@ -3345,15 +3532,15 @@ static void ggml_vk_test_matmul(size_t m, size_t n, size_t k, size_t batch, size free(d_chk); - ggml_vk_queue_cleanup(vk_device.transfer_queue); - ggml_vk_queue_cleanup(vk_device.compute_queue); + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->compute_queue); ggml_vk_destroy_buffer(d_X); ggml_vk_destroy_buffer(d_Y); ggml_vk_destroy_buffer(d_D); - ggml_vk_pipeline_cleanup(*p); - ggml_vk_pipeline_cleanup(vk_pipeline_matmul_split_k_reduce); + ggml_pipeline_cleanup(*p); + ggml_pipeline_cleanup(ctx->pipeline_matmul_split_k_reduce); free(x); free(y); @@ -3392,7 +3579,7 @@ static void ggml_vk_print_tensor_area(const ggml_tensor * tensor, int i0, int i1 } } -static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) { +static void ggml_vk_test_h2d_nc(ggml_backend_vk_context * ctx, size_t ne0, size_t ne1, size_t ne2, size_t ne3) { const size_t ne = ne0 * ne1 * ne2 * ne3; ggml_init_params iparams = { @@ -3406,7 +3593,7 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) ggml_tensor * tensor = ggml_new_tensor_4d(ggml_ctx, GGML_TYPE_F32, ne0, ne2, ne1, ne3); // NOLINT ggml_tensor * result_tensor = ggml_new_tensor_4d(ggml_ctx, GGML_TYPE_F32, ne0, ne1, ne2, ne3); - float * data = (float *) ggml_vk_host_malloc(ggml_nbytes(tensor)); + float * data = (float *) ggml_vk_host_malloc(ctx, ggml_nbytes(tensor)); tensor->data = data; float * result_data = (float *) malloc(ggml_nbytes(tensor)); @@ -3426,19 +3613,19 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) data[i] = (rand() / (float)RAND_MAX) * 2.0f - 1.0f; } - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, subctx); - vk_buffer buffer = ggml_vk_create_buffer_check(ggml_nbytes(tensor), vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer buffer = ggml_vk_create_buffer_check(ctx, ggml_nbytes(tensor), vk::MemoryPropertyFlagBits::eDeviceLocal); - ggml_vk_h2d_tensor_2d(ctx, &buffer, 0, tensor, 0, 0, ggml_nrows(tensor)); + ggml_vk_h2d_tensor_2d(ctx, subctx, buffer, 0, tensor, 0, 0, ggml_nrows(tensor)); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_h2d_nc waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - ggml_vk_buffer_read(&buffer, 0, result_data, ggml_nbytes(tensor)); + ggml_vk_buffer_read(ctx, buffer, 0, result_data, ggml_nbytes(tensor)); double avg_err = 0.0; int first_err_i0 = -1; @@ -3483,22 +3670,22 @@ static void ggml_vk_test_h2d_nc(size_t ne0, size_t ne1, size_t ne2, size_t ne3) ggml_vk_destroy_buffer(buffer); - ggml_vk_host_free(data); + ggml_vk_host_free(ctx, data); free(result_data); } -static void ggml_vk_test_transfer(size_t ne, bool pinned) { +static void ggml_vk_test_transfer(ggml_backend_vk_context * ctx, size_t ne, bool pinned) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_transfer(" << ne << ")" << std::endl; #endif // Check transfers are correct - vk_buffer buffer = ggml_vk_create_buffer_check(sizeof(float) * ne, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer buffer = ggml_vk_create_buffer_check(ctx, sizeof(float) * ne, vk::MemoryPropertyFlagBits::eDeviceLocal); float * x; float * y; if (pinned) { - x = (float *) ggml_vk_host_malloc(sizeof(float) * ne); - y = (float *) ggml_vk_host_malloc(sizeof(float) * ne); + x = (float *) ggml_vk_host_malloc(ctx, sizeof(float) * ne); + y = (float *) ggml_vk_host_malloc(ctx, sizeof(float) * ne); } else { x = (float *) malloc(sizeof(float) * ne); y = (float *) malloc(sizeof(float) * ne); @@ -3508,42 +3695,42 @@ static void ggml_vk_test_transfer(size_t ne, bool pinned) { x[i] = rand() / (float)RAND_MAX; } - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, subctx); auto begin = std::chrono::high_resolution_clock::now(); - ggml_vk_buffer_write_async(ctx, &buffer, 0, x, sizeof(float) * ne); + ggml_vk_buffer_write_async(ctx, subctx, buffer, 0, x, sizeof(float) * ne); - for (auto& cpy : ctx->in_memcpys) { + for (auto& cpy : subctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ctx->in_memcpys.clear(); + subctx->in_memcpys.clear(); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double ms_to_gpu = std::chrono::duration_cast(end-begin).count() / 1000.0; - ggml_vk_ctx_begin(ctx); + ggml_vk_ctx_begin(ctx, subctx); begin = std::chrono::high_resolution_clock::now(); - ggml_vk_buffer_read_async(ctx, &buffer, 0, y, sizeof(float) * ne); + ggml_vk_buffer_read_async(ctx, subctx, buffer, 0, y, sizeof(float) * ne); - ggml_vk_ctx_end(ctx); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_ctx_end(subctx); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_transfer waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - for (auto& cpy : ctx->out_memcpys) { + for (auto& cpy : subctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ctx->out_memcpys.clear(); + subctx->out_memcpys.clear(); end = std::chrono::high_resolution_clock::now(); @@ -3561,15 +3748,15 @@ static void ggml_vk_test_transfer(size_t ne, bool pinned) { ggml_vk_destroy_buffer(buffer); if (pinned) { - ggml_vk_host_free(x); - ggml_vk_host_free(y); + ggml_vk_host_free(ctx, x); + ggml_vk_host_free(ctx, y); } else { free(x); free(y); } } -static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { +static void ggml_vk_test_dequant(ggml_backend_vk_context * ctx, size_t ne, ggml_type quant) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_test_dequant(" << ne << ")" << std::endl; #endif @@ -3578,8 +3765,8 @@ static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { const size_t qx_sz = ne * ggml_type_size(quant)/ggml_blck_size(quant); float * x = (float *) malloc(x_sz); void * qx = malloc(qx_sz); - vk_buffer qx_buf = ggml_vk_create_buffer_check(qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); - vk_buffer x_buf = ggml_vk_create_buffer_check(x_sz_f16, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer qx_buf = ggml_vk_create_buffer_check(ctx, qx_sz, vk::MemoryPropertyFlagBits::eDeviceLocal); + vk_buffer x_buf = ggml_vk_create_buffer_check(ctx, x_sz_f16, vk::MemoryPropertyFlagBits::eDeviceLocal); ggml_fp16_t * x_chk = (ggml_fp16_t *) malloc(x_sz_f16); for (size_t i = 0; i < ne; i++) { @@ -3588,7 +3775,7 @@ static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { std::vector hist_cur(1 << 4, 0); - vk_pipeline& p = vk_pipeline_dequant[quant]; + vk_pipeline& p = ctx->pipeline_dequant[quant]; switch(quant) { case GGML_TYPE_Q4_0: @@ -3625,27 +3812,26 @@ static void ggml_vk_test_dequant(size_t ne, ggml_type quant) { GGML_ASSERT(false); } - ggml_vk_pipeline_allocate_descriptor_sets(p, 1); + ggml_pipeline_allocate_descriptor_sets(ctx, p, 1); - ggml_vk_buffer_write(&qx_buf, 0, qx, qx_sz); + ggml_vk_buffer_write(ctx, qx_buf, 0, qx, qx_sz); - vk_context * ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(ctx); + vk_context * subctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, subctx); const std::vector pc = { 1, (int)ne, (int)ne, (int)ne }; - ggml_vk_sync_buffers(ctx); - ggml_vk_dispatch_pipeline(ctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); - ggml_vk_ctx_end(ctx); + ggml_vk_dispatch_pipeline(ctx, subctx, p, { { qx_buf, 0, qx_sz }, { x_buf, 0, x_sz_f16 } }, pc.size() * sizeof(int), pc.data(), { (uint32_t)ne, 1, 1}); + ggml_vk_ctx_end(subctx); auto begin = std::chrono::high_resolution_clock::now(); - ggml_vk_submit(ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(subctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_test_dequant waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); auto end = std::chrono::high_resolution_clock::now(); double ms_dequant = std::chrono::duration_cast(end-begin).count() / 1000.0; - ggml_vk_buffer_read(&x_buf, 0, x_chk, x_sz_f16); + ggml_vk_buffer_read(ctx, x_buf, 0, x_chk, x_sz_f16); double avg_err = 0.0; for (size_t i = 0; i < ne; i++) { @@ -3687,15 +3873,15 @@ static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph return nullptr; } -void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ +static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggml_tensor * node){ #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; + std::cerr << "ggml_ctx->preallocate_buffers_graph(" << node << ")" << std::endl; #endif const bool any_on_device = node->backend == GGML_BACKEND_GPU || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_GPU)); - if (vk_disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { + if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { return; } @@ -3735,16 +3921,16 @@ void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ const uint32_t y_ne = ne10 * ne11; const uint32_t d_ne = ne20 * ne21; - const uint64_t qx_sz = use_src0 ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; - const uint64_t qy_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type), vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; - const uint64_t x_sz = use_src0 ? ggml_vk_align_size(sizeof(ggml_fp16_t) * x_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; - const uint64_t y_sz = use_src1 ? ggml_vk_align_size(f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; - uint64_t d_sz = ggml_vk_align_size(ggml_type_size(node->type) * d_ne, vk_device.properties.limits.minStorageBufferOffsetAlignment) * ne22 * ne23; + const uint64_t qx_sz = use_src0 ? ggml_vk_align_size(ggml_type_size(src0->type) * x_ne / ggml_blck_size(src0->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; + const uint64_t qy_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * y_ne / ggml_blck_size(src1->type), ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; + const uint64_t x_sz = use_src0 ? ggml_vk_align_size(sizeof(ggml_fp16_t) * x_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne02 * ne03 : 0; + const uint64_t y_sz = use_src1 ? ggml_vk_align_size(f16_f32_kernel ? sizeof(float) * y_ne : sizeof(ggml_fp16_t) * y_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne12 * ne13 : 0; + uint64_t d_sz = ggml_vk_align_size(ggml_type_size(node->type) * d_ne, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) * ne22 * ne23; const uint64_t split_k_size = split_k > 1 ? d_sz * 4 : 0; - if (extra->buffer_gpu.size == 0) { + if (extra->buffer_gpu.expired()) { // Workaround for CPU backend BLAS matmul calls - extra->buffer_gpu = ggml_vk_create_buffer_temp(d_sz); + extra->buffer_gpu = ggml_vk_create_buffer_temp(ctx, d_sz); } switch (node->op) { @@ -3779,23 +3965,23 @@ void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ } break; case GGML_OP_MUL_MAT: - if (vk_prealloc_size_qx < qx_sz) { - vk_prealloc_size_qx = qx_sz; + if (ctx->prealloc_size_qx < qx_sz) { + ctx->prealloc_size_qx = qx_sz; } - if (vk_prealloc_size_qy < qy_sz) { - vk_prealloc_size_qy = qy_sz; + if (ctx->prealloc_size_qy < qy_sz) { + ctx->prealloc_size_qy = qy_sz; } - if (vk_prealloc_size_x < x_sz) { - vk_prealloc_size_x = x_sz; + if (ctx->prealloc_size_x < x_sz) { + ctx->prealloc_size_x = x_sz; } - if (vk_prealloc_size_y < y_sz) { - vk_prealloc_size_y = y_sz; + if (ctx->prealloc_size_y < y_sz) { + ctx->prealloc_size_y = y_sz; } - if (vk_prealloc_size_split_k < split_k_size) { - vk_prealloc_size_split_k = split_k_size; + if (ctx->prealloc_size_split_k < split_k_size) { + ctx->prealloc_size_split_k = split_k_size; } - if (vk_staging_size < x_sz + y_sz) { - vk_staging_size = x_sz + y_sz; + if (ctx->staging_size < x_sz + y_sz) { + ctx->staging_size = x_sz + y_sz; } break; default: @@ -3803,29 +3989,29 @@ void ggml_vk_preallocate_buffers_graph(ggml_tensor * node){ } } -void ggml_vk_preallocate_buffers() { - if (vk_disable) { +static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { + if (ctx->disable) { return; } #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_preallocate_buffers()" << std::endl; - std::cerr << "qx_size: " << vk_prealloc_size_qx << " qy_size: " << vk_prealloc_size_qy << " x_size: " << vk_prealloc_size_x << " y_size: " << vk_prealloc_size_y << " split_k_size: " << vk_prealloc_size_split_k << std::endl; + std::cerr << "ggml_ctx->preallocate_buffers()" << std::endl; + std::cerr << "qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << std::endl; #endif #if defined(GGML_VULKAN_RUN_TESTS) - vk_staging = ggml_vk_create_buffer_check(100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); - ggml_vk_test_transfer(8192 * 1000, false); - ggml_vk_test_transfer(8192 * 1000, true); + ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ggml_vk_test_transfer(ctx, 8192 * 1000, false); + ggml_vk_test_transfer(ctx, 8192 * 1000, true); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_0); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_1); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_0); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_1); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q8_0); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q2_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q3_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q4_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q5_K); - ggml_vk_test_dequant(2560 * 7680, GGML_TYPE_Q6_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_0); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_1); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_0); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_1); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q8_0); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q2_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q3_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q4_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q5_K); + ggml_vk_test_dequant(ctx, 2560 * 7680, GGML_TYPE_Q6_K); const std::vector vals { 8, 8, 8, @@ -3852,76 +4038,76 @@ void ggml_vk_preallocate_buffers() { }; const size_t num_it = 1; for (size_t i = 0; i < vals.size(); i += 3) { - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1); - ggml_vk_test_matmul(vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 0); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 1); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 1, 2); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 0); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 1); + ggml_vk_test_matmul(ctx, vals[i], vals[i + 1], vals[i + 2], 2, num_it, 4, 2); std::cerr << std::endl; } GGML_ASSERT(false); #endif - if (vk_prealloc_size_qx > 0 && vk_prealloc_qx.size < vk_prealloc_size_qx) { + if (ctx->prealloc_qx == nullptr || (ctx->prealloc_size_qx > 0 && ctx->prealloc_qx->size < ctx->prealloc_size_qx)) { // Resize buffer - if (vk_prealloc_qx.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_qx); + if (ctx->prealloc_qx != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_qx); } - vk_prealloc_qx = ggml_vk_create_buffer_device(vk_prealloc_size_qx); + ctx->prealloc_qx = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_qx); } - if (vk_prealloc_size_qy > 0 && vk_prealloc_qy.size < vk_prealloc_size_qy) { + if (ctx->prealloc_qy == nullptr || (ctx->prealloc_size_qy > 0 && ctx->prealloc_qy->size < ctx->prealloc_size_qy)) { // Resize buffer - if (vk_prealloc_qy.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_qy); + if (ctx->prealloc_qy != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_qy); } - vk_prealloc_qy = ggml_vk_create_buffer_device(vk_prealloc_size_qy); + ctx->prealloc_qy = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_qy); } - if (vk_prealloc_size_x > 0 && vk_prealloc_x.size < vk_prealloc_size_x) { + if (ctx->prealloc_x == nullptr || (ctx->prealloc_size_x > 0 && ctx->prealloc_x->size < ctx->prealloc_size_x)) { // Resize buffer - if (vk_prealloc_x.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_x); + if (ctx->prealloc_x != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_x); } - vk_prealloc_x = ggml_vk_create_buffer_device(vk_prealloc_size_x); + ctx->prealloc_x = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_x); } - if (vk_prealloc_size_y > 0 && vk_prealloc_y.size < vk_prealloc_size_y) { + if (ctx->prealloc_y == nullptr || (ctx->prealloc_size_y > 0 && ctx->prealloc_y->size < ctx->prealloc_size_y)) { // Resize buffer - if (vk_prealloc_y.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_y); + if (ctx->prealloc_y != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_y); } - vk_prealloc_y = ggml_vk_create_buffer_device(vk_prealloc_size_y); + ctx->prealloc_y = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_y); } - if (vk_prealloc_size_split_k > 0 && vk_prealloc_split_k.size < vk_prealloc_size_split_k) { + if (ctx->prealloc_split_k == nullptr || (ctx->prealloc_size_split_k > 0 && ctx->prealloc_split_k->size < ctx->prealloc_size_split_k)) { // Resize buffer - if (vk_prealloc_split_k.size > 0) { - ggml_vk_destroy_buffer(vk_prealloc_split_k); + if (ctx->prealloc_split_k != nullptr) { + ggml_vk_destroy_buffer(ctx->prealloc_split_k); } - vk_prealloc_split_k = ggml_vk_create_buffer_device(vk_prealloc_size_split_k); + ctx->prealloc_split_k = ggml_vk_create_buffer_device(ctx, ctx->prealloc_size_split_k); } - if (vk_staging_size > 0 && vk_staging.size < vk_staging_size) { + if (ctx->staging == nullptr || (ctx->staging_size > 0 && ctx->staging->size < ctx->staging_size)) { // Resize buffer - if (vk_staging.size > 0) { - ggml_vk_destroy_buffer(vk_staging); + if (ctx->staging != nullptr) { + ggml_vk_destroy_buffer(ctx->staging); } - vk_staging = ggml_vk_create_buffer_check(vk_staging_size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->staging = ggml_vk_create_buffer_check(ctx, ctx->staging_size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); } } -void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ +static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * node, bool last_node){ const bool any_on_device = node->backend == GGML_BACKEND_GPU || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_GPU); - if (vk_disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { + if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { return; } #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_build_graph(" << node << ", " << ggml_op_name(node->op) << ")" << std::endl; #endif - vk_semaphore_idx = 0; - vk_staging_offset = 0; + ctx->semaphore_idx = 0; + ctx->staging_offset = 0; const ggml_tensor * src0 = node->src[0]; const ggml_tensor * src1 = node->src[1]; @@ -3969,44 +4155,44 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ return; } - if (vk_ctx == nullptr) { - vk_ctx = ggml_vk_create_context(vk_device.compute_queue); - ggml_vk_ctx_begin(vk_ctx); + if (ctx->compute_ctx == nullptr) { + ctx->compute_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->compute_queue); + ggml_vk_ctx_begin(ctx, ctx->compute_ctx); } switch (node->op) { case GGML_OP_REPEAT: - ggml_vk_repeat(vk_ctx, src0, src1, node); + ggml_vk_repeat(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_GET_ROWS: - ggml_vk_get_rows(vk_ctx, src0, src1, node); + ggml_vk_get_rows(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_ADD: - ggml_vk_add(vk_ctx, src0, src1, node); + ggml_vk_add(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_MUL: - ggml_vk_mul(vk_ctx, src0, src1, node); + ggml_vk_mul(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_SCALE: - ggml_vk_scale(vk_ctx, src0, node); + ggml_vk_scale(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_SQR: - ggml_vk_sqr(vk_ctx, src0, node); + ggml_vk_sqr(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_CLAMP: - ggml_vk_clamp(vk_ctx, src0, node); + ggml_vk_clamp(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_CPY: case GGML_OP_CONT: case GGML_OP_DUP: - ggml_vk_cpy(vk_ctx, src0, node); + ggml_vk_cpy(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_RESHAPE: @@ -4014,15 +4200,15 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ case GGML_OP_PERMUTE: case GGML_OP_TRANSPOSE: case GGML_OP_NONE: - ggml_vk_nop(vk_ctx, src0, node); + ggml_vk_nop(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_NORM: - ggml_vk_norm(vk_ctx, src0, node); + ggml_vk_norm(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_RMS_NORM: - ggml_vk_rms_norm(vk_ctx, src0, node); + ggml_vk_rms_norm(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_UNARY: @@ -4030,26 +4216,26 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ case GGML_UNARY_OP_SILU: case GGML_UNARY_OP_GELU: case GGML_UNARY_OP_RELU: - ggml_vk_unary(vk_ctx, src0, node); + ggml_vk_unary(ctx, ctx->compute_ctx, src0, node); break; default: return; } break; case GGML_OP_DIAG_MASK_INF: - ggml_vk_diag_mask_inf(vk_ctx, src0, node); + ggml_vk_diag_mask_inf(ctx, ctx->compute_ctx, src0, node); break; case GGML_OP_SOFT_MAX: - ggml_vk_soft_max(vk_ctx, src0, src1, node); + ggml_vk_soft_max(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_ROPE: - ggml_vk_rope(vk_ctx, src0, src1, node); + ggml_vk_rope(ctx, ctx->compute_ctx, src0, src1, node); break; case GGML_OP_MUL_MAT: - ggml_vk_mul_mat(vk_ctx, src0, src1, node); + ggml_vk_mul_mat(ctx, ctx->compute_ctx, src0, src1, node); break; default: @@ -4057,7 +4243,7 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ } extra->ready = true; - extra->ctx_idx = vk_ctx->idx; + extra->ctx_idx = ctx->compute_ctx->idx; #ifdef GGML_VULKAN_CHECK_RESULTS // Force context reset on each node so that each tensor ends up in its own context @@ -4066,18 +4252,18 @@ void ggml_vk_build_graph(ggml_tensor * node, bool last_node){ #endif if (node->backend == GGML_BACKEND_CPU || last_node) { - ggml_vk_ctx_end(vk_ctx); - vk_ctx->exit_tensor = node; - vk_ctx = nullptr; + ggml_vk_ctx_end(ctx->compute_ctx); + ctx->compute_ctx->exit_tensor = node; + ctx->compute_ctx = nullptr; } } -bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor){ +static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor){ const bool any_on_device = tensor->backend == GGML_BACKEND_GPU || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); - if (vk_disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { + if (ctx->disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { return false; } @@ -4145,33 +4331,33 @@ bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor) #endif #ifdef GGML_VULKAN_CHECK_RESULTS - ggml_vk_check_results_0(params, tensor); + ggml_vk_check_results_0(ctx, params, tensor); #endif GGML_ASSERT(extra->ready); - vk_context& ctx = vk_gc.contexts[extra->ctx_idx]; + vk_context& subctx = ctx->gc.contexts[extra->ctx_idx]; // Only run if ctx hasn't been submitted yet - if (!ctx.seqs.empty()) { + if (!subctx.seqs.empty()) { // Do staging buffer copies - for (auto& cpy : ctx.in_memcpys) { + for (auto& cpy : subctx.in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(&ctx, vk_fence); + ggml_vk_submit(&subctx, ctx->fence); } - if (tensor == ctx.exit_tensor) { - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); - vk_device.device.resetFences({ vk_fence }); + if (tensor == subctx.exit_tensor) { + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_vk_compute_forward waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); // Do staging buffer copies - for (auto& cpy : ctx.out_memcpys) { + for (auto& cpy : subctx.out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ctx.in_memcpys.clear(); - ctx.out_memcpys.clear(); + subctx.in_memcpys.clear(); + subctx.out_memcpys.clear(); } extra->ready = false; @@ -4179,90 +4365,204 @@ bool ggml_vk_compute_forward(ggml_compute_params * params, ggml_tensor * tensor) return true; } -void ggml_vk_graph_cleanup() { - if (vk_disable) { +// Clean up after graph processing is done +static void ggml_vk_graph_cleanup(ggml_backend_vk_context * ctx) { + if (ctx->disable) { return; } #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_graph_cleanup()" << std::endl; #endif - for (auto& buffer : vk_gc.temp_buffers) { - ggml_vk_pool_free(buffer); + for (auto& buffer : ctx->gc.temp_buffers) { + ggml_vk_pool_free(ctx, buffer); } - vk_gc.temp_buffers.clear(); + ctx->gc.temp_buffers.clear(); - for (auto * pipeline : vk_gc.pipelines) { - ggml_vk_pipeline_cleanup(*pipeline); - } - vk_gc.pipelines.clear(); - - ggml_vk_queue_cleanup(vk_device.compute_queue); - ggml_vk_queue_cleanup(vk_device.transfer_queue); - - for (size_t i = 0; i < vk_gc.semaphores.size(); i++) { - vk_device.device.destroySemaphore({ vk_gc.semaphores[i].s }); - } - vk_gc.semaphores.clear(); - - for (size_t i = 0; i < vk_gc.tl_semaphores.size(); i++) { - vk_device.device.destroySemaphore({ vk_gc.tl_semaphores[i].s }); - } - vk_gc.tl_semaphores.clear(); - - vk_event_idx = 0; - - for (auto& event : vk_gc.events) { - vk_device.device.resetEvent(event); + for (auto * pipeline : ctx->gc.pipelines) { + ggml_pipeline_cleanup(*pipeline); } - vk_staging_offset = 0; + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->compute_queue); + ggml_vk_queue_cleanup(ctx, ctx->device.lock()->transfer_queue); - vk_ctx = nullptr; - vk_gc.contexts.clear(); + for (size_t i = 0; i < ctx->gc.semaphores.size(); i++) { + ctx->device.lock()->device.destroySemaphore({ ctx->gc.semaphores[i].s }); + } + ctx->gc.semaphores.clear(); + + for (size_t i = 0; i < ctx->gc.tl_semaphores.size(); i++) { + ctx->device.lock()->device.destroySemaphore({ ctx->gc.tl_semaphores[i].s }); + } + ctx->gc.tl_semaphores.clear(); + ctx->semaphore_idx = 0; + + ctx->event_idx = 0; + + for (auto& event : ctx->gc.events) { + ctx->device.lock()->device.resetEvent(event); + } + + ctx->staging_offset = 0; + + ctx->compute_ctx = nullptr; + ctx->transfer_ctx = nullptr; + ctx->gc.contexts.clear(); } -static void ggml_vk_cleanup() { +// Clean up on backend free +static void ggml_vk_cleanup(ggml_backend_vk_context * ctx) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_cleanup()" << std::endl; + std::cerr << "ggml_vk_cleanup(" << ctx->idx << ")" << std::endl; #endif - ggml_vk_destroy_buffer(vk_prealloc_x); - ggml_vk_destroy_buffer(vk_prealloc_y); - ggml_vk_destroy_buffer(vk_prealloc_split_k); - ggml_vk_destroy_buffer(vk_staging); - ggml_vk_destroy_buffer(vk_sync_staging); + ggml_vk_graph_cleanup(ctx); - vk_prealloc_size_x = 0; - vk_prealloc_size_y = 0; - vk_prealloc_size_split_k = 0; - vk_staging_size = 0; + ggml_vk_destroy_buffer(ctx->prealloc_qx); + ggml_vk_destroy_buffer(ctx->prealloc_qy); + ggml_vk_destroy_buffer(ctx->prealloc_x); + ggml_vk_destroy_buffer(ctx->prealloc_y); + ggml_vk_destroy_buffer(ctx->prealloc_split_k); + ggml_vk_destroy_buffer(ctx->staging); + ggml_vk_destroy_buffer(ctx->sync_staging); - for (auto& event : vk_gc.events) { - vk_device.device.destroyEvent(event); + for (auto& buffer : ctx->buffer_pool) { + ggml_vk_destroy_buffer(buffer); } - vk_gc.events.clear(); + + ctx->prealloc_size_qx = 0; + ctx->prealloc_size_qy = 0; + ctx->prealloc_size_x = 0; + ctx->prealloc_size_y = 0; + ctx->prealloc_size_split_k = 0; + ctx->staging_size = 0; + + for (auto& event : ctx->gc.events) { + ctx->device.lock()->device.destroyEvent(event); + } + ctx->gc.events.clear(); + + for (auto* pipeline : ctx->gc.pipelines) { + ggml_vk_destroy_pipeline(ctx, pipeline); + } + ctx->gc.pipelines.clear(); + + ctx->device.lock()->device.destroyFence(ctx->fence); + + ctx->device.lock()->device.destroyCommandPool(ctx->device.lock()->compute_queue.pool); + if (!ctx->device.lock()->single_queue) { + ctx->device.lock()->device.destroyCommandPool(ctx->device.lock()->transfer_queue.pool); + } +} + +GGML_CALL int ggml_vk_get_device_count() { + ggml_vk_instance_init(); + + return vk_instance.device_indices.size(); +} + +GGML_CALL void ggml_vk_get_device_description(int device, char * description, size_t description_size) { + ggml_vk_instance_init(); + + std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); + + vk::PhysicalDeviceProperties props; + devices[device].getProperties(&props); + + snprintf(description, description_size, "%s", props.deviceName.data()); +} + +// CPU assist interface + +void ggml_vk_init_cpu_assist() { + ggml_vk_instance_init(); + + std::cerr << "ggml_vulkan: Found " << ggml_vk_get_device_count() << " Vulkan devices:" << std::endl; + + for (size_t i = 0; i < ggml_vk_get_device_count(); i++) { + ggml_vk_print_gpu_info(i); + } + // Initialize the first backend to make sure CPU matrix multiplications can be offloaded. + ggml_backend_vk_init(0); +} + +void ggml_vk_preallocate_buffers_graph_cpu_assist(ggml_tensor * node) { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_preallocate_buffers_graph(ctx, node); +} + +void ggml_vk_preallocate_buffers_cpu_assist() { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_preallocate_buffers(ctx); +} + +void ggml_vk_build_graph_cpu_assist(ggml_tensor * node, bool last_node) { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_build_graph(ctx, node, last_node); +} + +bool ggml_vk_compute_forward_cpu_assist(ggml_compute_params * params, ggml_tensor * tensor){ + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return false; + } + + return ggml_vk_compute_forward(ctx, params, tensor); +} + +void ggml_vk_graph_cleanup_cpu_assist() { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized) { + return; + } + + ggml_vk_graph_cleanup(ctx); +} + +void ggml_vk_free_cpu_assist() { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + if (!ctx->initialized || vk_instance.backends[0] == nullptr) { + return; + } + + ggml_backend_vk_free(vk_instance.backends[0]); } // backend interface #define UNUSED GGML_UNUSED -struct ggml_backend_vk_context { - std::string name; -}; - // device backend static void * const vk_ptr_base = (void *)(uintptr_t) 0x1000; // NOLINT struct ggml_backend_vk_buffer_context { + ggml_backend_vk_context * ctx; vk_buffer dev_buffer; ggml_tensor_extra_gpu * temp_tensor_extras = nullptr; size_t temp_tensor_extra_index = 0; std::string name; - ggml_backend_vk_buffer_context(vk_buffer dev_buffer) : + ggml_backend_vk_buffer_context(ggml_backend_vk_context * ctx, vk_buffer&& dev_buffer, std::string& name) : + ctx(ctx), dev_buffer(dev_buffer), - name(GGML_VK_NAME) { + name(name) { } ~ggml_backend_vk_buffer_context() { @@ -4294,6 +4594,9 @@ GGML_CALL static bool ggml_backend_buffer_is_vk(ggml_backend_buffer_t buffer) { } GGML_CALL static void ggml_backend_vk_buffer_free_buffer(ggml_backend_buffer_t buffer) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_buffer_free_buffer()" << std::endl; +#endif ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; ggml_vk_destroy_buffer(ctx->dev_buffer); delete ctx; @@ -4313,6 +4616,7 @@ GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t b ggml_tensor_extra_gpu * extra = ctx->ggml_vk_alloc_temp_tensor_extra(); if (tensor->view_src != nullptr && tensor->view_src->extra != nullptr) { + GGML_ASSERT(tensor->view_src->buffer->buft == buffer->buft); ggml_tensor_extra_gpu * extra_view = (ggml_tensor_extra_gpu *) tensor->view_src->extra; extra->buffer_gpu = extra_view->buffer_gpu; extra->offset = extra_view->offset + tensor->view_offs; @@ -4331,11 +4635,13 @@ GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t bu #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_write(&extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(buffer); + ggml_vk_buffer_write(ctx->ctx, buf, extra->offset + offset, data, size); } GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { @@ -4344,31 +4650,35 @@ GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t bu #endif GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; + ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_read(&extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(buffer); + ggml_vk_buffer_read(ctx->ctx, buf, extra->offset + offset, data, size); } GGML_CALL static bool ggml_backend_vk_buffer_cpy_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * src, ggml_tensor * dst) { if (ggml_backend_buffer_is_vk(src->buffer)) { + ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - ggml_vk_buffer_copy(&src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); + vk_buffer src_buf = src_extra->buffer_gpu.lock(); + vk_buffer dst_buf = dst_extra->buffer_gpu.lock(); + + ggml_vk_buffer_copy(dst_buf, dst_extra->offset, src_buf, src_extra->offset, ggml_nbytes(src)); return true; } return false; - - UNUSED(buffer); } GGML_CALL static void ggml_backend_vk_buffer_clear(ggml_backend_buffer_t buffer, uint8_t value) { ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; - ggml_vk_buffer_memset(&ctx->dev_buffer, 0, value, buffer->size); + ggml_vk_buffer_memset(ctx->ctx, ctx->dev_buffer, 0, value, buffer->size); } static ggml_backend_buffer_i ggml_backend_vk_buffer_interface = { @@ -4386,6 +4696,7 @@ static ggml_backend_buffer_i ggml_backend_vk_buffer_interface = { // vk buffer type struct ggml_backend_vk_buffer_type_context { std::string name; + ggml_backend_vk_context * ctx; }; GGML_CALL static const char * ggml_backend_vk_buffer_type_name(ggml_backend_buffer_type_t buft) { @@ -4398,25 +4709,22 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_buffer_type_alloc_buffer( #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_type_alloc_buffer(" << size << ")" << std::endl; #endif - vk_buffer dev_buffer = ggml_vk_create_buffer_device(size); + ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; + vk_buffer dev_buffer = ggml_vk_create_buffer_device(ctx->ctx, size); - ggml_backend_vk_buffer_context * ctx = new ggml_backend_vk_buffer_context(dev_buffer); + ggml_backend_vk_buffer_context * bufctx = new ggml_backend_vk_buffer_context(ctx->ctx, std::move(dev_buffer), ctx->name); - return ggml_backend_buffer_init(buft, ggml_backend_vk_buffer_interface, ctx, size); - - UNUSED(buft); + return ggml_backend_buffer_init(buft, ggml_backend_vk_buffer_interface, bufctx, size); } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { - return vk_device.properties.limits.minStorageBufferOffsetAlignment; - - UNUSED(buft); + ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; + return ctx->ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment; } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_max_size(ggml_backend_buffer_type_t buft) { - return vk_device.max_memory_allocation_size; - - UNUSED(buft); + ggml_backend_vk_buffer_type_context * ctx = (ggml_backend_vk_buffer_type_context *) buft->context; + return ctx->ctx->device.lock()->max_memory_allocation_size; } GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alloc_size(ggml_backend_buffer_type_t buft, const ggml_tensor * tensor) { @@ -4426,9 +4734,14 @@ GGML_CALL static size_t ggml_backend_vk_buffer_type_get_alloc_size(ggml_backend_ } GGML_CALL static bool ggml_backend_vk_buffer_type_supports_backend(ggml_backend_buffer_type_t buft, ggml_backend_t backend) { - return ggml_backend_is_vk(backend); + if (!ggml_backend_is_vk(backend)) { + return false; + } - UNUSED(buft); + ggml_backend_vk_buffer_type_context * buft_ctx = (ggml_backend_vk_buffer_type_context *)buft->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + + return buft_ctx->ctx->idx == ctx->idx; } static ggml_backend_buffer_type_i ggml_backend_vk_buffer_type_interface = { @@ -4441,20 +4754,16 @@ static ggml_backend_buffer_type_i ggml_backend_vk_buffer_type_interface = { /* .is_host = */ NULL, }; -GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type() { - static ggml_backend_buffer_type ggml_backend_vk_buffer_type; +GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type(size_t idx) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_buffer_type(" << idx << ")" << std::endl; +#endif - static bool ggml_backend_vk_buffer_type_initialized = false; + GGML_ASSERT(idx < vk_instance.device_indices.size()); - if (!ggml_backend_vk_buffer_type_initialized) { - ggml_backend_vk_buffer_type = { - /* .iface = */ ggml_backend_vk_buffer_type_interface, - /* .context = */ new ggml_backend_vk_buffer_type_context{GGML_VK_NAME}, - }; - ggml_backend_vk_buffer_type_initialized = true; - } + ggml_backend_vk_init(idx); - return &ggml_backend_vk_buffer_type; + return &vk_instance.buffer_types[idx]; } // host buffer type @@ -4472,13 +4781,19 @@ GGML_CALL static const char * ggml_backend_vk_host_buffer_name(ggml_backend_buff } GGML_CALL static void ggml_backend_vk_host_buffer_free_buffer(ggml_backend_buffer_t buffer) { - ggml_vk_host_free(buffer->context); +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_host_buffer_free_buffer()" << std::endl; +#endif + ggml_vk_host_free(&vk_instance.contexts[0], buffer->context); } GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_host_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_host_buffer_type_alloc_buffer(" << size << ")" << std::endl; +#endif void * ptr = nullptr; try { - ptr = ggml_vk_host_malloc(size); + ptr = ggml_vk_host_malloc(&vk_instance.contexts[0], size); } catch (vk::SystemError& e) { std::cerr << "ggml_vulkan: Failed to allocate pinned memory." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -4495,7 +4810,7 @@ GGML_CALL static ggml_backend_buffer_t ggml_backend_vk_host_buffer_type_alloc_bu } GGML_CALL static size_t ggml_backend_vk_host_buffer_type_get_alignment(ggml_backend_buffer_type_t buft) { - return vk_device.properties.limits.minMemoryMapAlignment; + return vk_instance.contexts[0].device.lock()->properties.limits.minMemoryMapAlignment; UNUSED(buft); } @@ -4514,127 +4829,150 @@ GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_host_buffer_type() { /* .context = */ nullptr, }; + if (!vk_instance.contexts[0].initialized) { + // Fall back to CPU + return ggml_backend_cpu_buffer_type(); + } + return &ggml_backend_vk_buffer_type_host; } // backend GGML_CALL static const char * ggml_backend_vk_name(ggml_backend_t backend) { - ggml_backend_vk_context * vk_ctx = (ggml_backend_vk_context *)backend->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; - return vk_ctx->name.c_str(); + return ctx->name.c_str(); } GGML_CALL static void ggml_backend_vk_free(ggml_backend_t backend) { - ggml_backend_vk_context * vk_ctx = (ggml_backend_vk_context *)backend->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_free(" << ctx->name << ")" << std::endl; +#endif - delete vk_ctx; + size_t idx = ctx->idx; + + ggml_vk_cleanup(ctx); + + // Release device + vk_instance.devices[ctx->idx].reset(); + ctx->initialized = false; + + vk_instance.initialized[idx] = false; + vk_instance.backends[idx] = nullptr; + memset(&vk_instance.buffer_types[idx], 0, sizeof(ggml_backend_buffer_type)); delete backend; } GGML_CALL static ggml_backend_buffer_type_t ggml_backend_vk_get_default_buffer_type(ggml_backend_t backend) { - return ggml_backend_vk_buffer_type(); + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; - UNUSED(backend); + GGML_ASSERT(ctx->initialized); + + return ggml_backend_vk_buffer_type(ctx->idx); } GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_set_tensor_async(" << size << ")" << std::endl; #endif - GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_transfer_ctx == nullptr) { + if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_transfer_ctx); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } - ggml_vk_buffer_write_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(backend); + ggml_vk_buffer_write_async(ctx, ctx->transfer_ctx, buf, extra->offset + offset, data, size); } GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_get_tensor_async(" << size << ")" << std::endl; #endif - GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type() || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (vk_transfer_ctx == nullptr) { + if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_transfer_ctx); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } - ggml_vk_buffer_read_async(vk_transfer_ctx, &extra->buffer_gpu, extra->offset + offset, data, size); + vk_buffer buf = extra->buffer_gpu.lock(); - UNUSED(backend); + ggml_vk_buffer_read_async(ctx, ctx->transfer_ctx, buf, extra->offset + offset, data, size); } GGML_CALL static bool ggml_backend_vk_cpy_tensor_async(ggml_backend_t backend, const ggml_tensor * src, ggml_tensor * dst) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_cpy_tensor_async()" << std::endl; #endif - if ((dst->buffer->buft == ggml_backend_vk_buffer_type() || dst->buffer->buft == ggml_backend_vk_host_buffer_type()) && ggml_backend_buffer_is_vk(src->buffer)) { + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + if ((dst->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || dst->buffer->buft == ggml_backend_vk_host_buffer_type()) && ggml_backend_buffer_is_vk(src->buffer)) { ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - if (vk_transfer_ctx == nullptr) { + if (ctx->transfer_ctx == nullptr) { // Initialize new transfer context - vk_transfer_ctx = ggml_vk_create_context(vk_device.transfer_queue); - ggml_vk_ctx_begin(vk_transfer_ctx); + ctx->transfer_ctx = ggml_vk_create_context(ctx, ctx->device.lock()->transfer_queue); + ggml_vk_ctx_begin(ctx, ctx->transfer_ctx); } - ggml_vk_buffer_copy_async(vk_transfer_ctx, &src_extra->buffer_gpu, src_extra->offset, &dst_extra->buffer_gpu, dst_extra->offset, ggml_nbytes(src)); + vk_buffer src_buf = src_extra->buffer_gpu.lock(); + vk_buffer dst_buf = dst_extra->buffer_gpu.lock(); + + ggml_vk_buffer_copy_async(ctx->transfer_ctx, src_buf, src_extra->offset, dst_buf, dst_extra->offset, ggml_nbytes(src)); return true; } return false; - - UNUSED(backend); } GGML_CALL static void ggml_backend_vk_synchronize(ggml_backend_t backend) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_synchronize()" << std::endl; #endif - if(vk_transfer_ctx == nullptr) { + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; + if(ctx->transfer_ctx == nullptr) { return; } - ggml_vk_ctx_end(vk_transfer_ctx); + ggml_vk_ctx_end(ctx->transfer_ctx); - for (auto& cpy : vk_transfer_ctx->in_memcpys) { + for (auto& cpy : ctx->transfer_ctx->in_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - ggml_vk_submit(vk_transfer_ctx, vk_fence); - VK_CHECK(vk_device.device.waitForFences({ vk_fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); - vk_device.device.resetFences({ vk_fence }); + ggml_vk_submit(ctx->transfer_ctx, ctx->fence); + VK_CHECK(ctx->device.lock()->device.waitForFences({ ctx->fence }, true, UINT64_MAX), "ggml_backend_vk_synchronize waitForFences"); + ctx->device.lock()->device.resetFences({ ctx->fence }); - for (auto& cpy : vk_transfer_ctx->out_memcpys) { + for (auto& cpy : ctx->transfer_ctx->out_memcpys) { memcpy(cpy.dst, cpy.src, cpy.n); } - vk_transfer_ctx = nullptr; - - UNUSED(backend); + ctx->transfer_ctx = nullptr; } GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - // ggml_backend_vk_context * vk_ctx = (ggml_backend_vk_context *)backend->context; + ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_preallocate_buffers_graph(cgraph->nodes[i]); + ggml_vk_preallocate_buffers_graph(ctx, cgraph->nodes[i]); } - ggml_vk_preallocate_buffers(); + ggml_vk_preallocate_buffers(ctx); int last_node = cgraph->n_nodes - 1; @@ -4644,7 +4982,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml } for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_build_graph(cgraph->nodes[i], i == last_node); + ggml_vk_build_graph(ctx,cgraph->nodes[i], i == last_node); } ggml_compute_params params = {}; @@ -4657,19 +4995,19 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml continue; } - bool ok = ggml_vk_compute_forward(¶ms, node); + bool ok = ggml_vk_compute_forward(ctx, ¶ms, node); if (!ok) { fprintf(stderr, "%s: error: op not supported %s (%s)\n", __func__, node->name, ggml_op_name(node->op)); } #ifdef GGML_VULKAN_CHECK_RESULTS else { - ggml_vk_check_results_1(¶ms, node); + ggml_vk_check_results_1(ctx, ¶ms, node); } #endif GGML_ASSERT(ok); } - ggml_vk_graph_cleanup(); + ggml_vk_graph_cleanup(ctx); return true; @@ -4734,7 +5072,7 @@ GGML_CALL static bool ggml_backend_vk_supports_op(ggml_backend_t backend, const } return false; } break; - // case GGML_OP_DUP: + case GGML_OP_DUP: // case GGML_OP_REPEAT: // { // ggml_type src0_type = op->src[0]->type; @@ -4786,18 +5124,30 @@ static ggml_backend_i ggml_backend_vk_interface = { /* .supports_op = */ ggml_backend_vk_supports_op, }; -GGML_CALL ggml_backend_t ggml_backend_vk_init() { - ggml_vk_init(); // TODO: remove from ggml.c +GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t idx) { + if (vk_instance.initialized[idx]) { + return vk_instance.backends[idx]; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_backend_vk_init(" << idx << ")" << std::endl; +#endif - ggml_backend_vk_context * ctx = new ggml_backend_vk_context { - /* .name = */ GGML_VK_NAME, + ggml_backend_vk_context * ctx = &vk_instance.contexts[idx]; + ggml_vk_init(ctx, idx); + ctx->name = GGML_VK_NAME + std::to_string(idx); + vk_instance.buffer_types[idx] = { + /* .iface = */ ggml_backend_vk_buffer_type_interface, + /* .context = */ new ggml_backend_vk_buffer_type_context{ ctx->name, ctx }, }; + vk_instance.initialized[idx] = true; ggml_backend_t vk_backend = new ggml_backend { /* .interface = */ ggml_backend_vk_interface, - /* .context = */ ctx + /* .context = */ &vk_instance.contexts[ctx->idx], }; + vk_instance.backends[idx] = vk_backend; + return vk_backend; } @@ -4805,20 +5155,47 @@ GGML_CALL bool ggml_backend_is_vk(ggml_backend_t backend) { return backend && backend->iface.get_name == ggml_backend_vk_name; } +GGML_CALL int ggml_backend_vk_get_device_count() { + return ggml_vk_get_device_count(); +} + +GGML_CALL void ggml_backend_vk_get_device_description(int device, char * description, size_t description_size) { + ggml_vk_get_device_description(device, description, description_size); +} + +GGML_CALL void ggml_backend_vk_get_device_memory(int device, size_t * free, size_t * total) { + GGML_ASSERT(device < vk_instance.device_indices.size()); + + vk::PhysicalDevice vkdev = vk_instance.instance.enumeratePhysicalDevices()[vk_instance.device_indices[device]]; + + vk::PhysicalDeviceMemoryProperties memprops = vkdev.getMemoryProperties(); + + for (const vk::MemoryHeap& heap : memprops.memoryHeaps) { + if (heap.flags & vk::MemoryHeapFlagBits::eDeviceLocal) { + *total = heap.size; + *free = heap.size; + break; + } + } +} + // backend registry GGML_CALL static ggml_backend_t ggml_backend_reg_vk_init(const char * params, void * user_data) { - ggml_backend_t vk_backend = ggml_backend_vk_init(); + ggml_backend_t vk_backend = ggml_backend_vk_init((int) (intptr_t) user_data); return vk_backend; UNUSED(params); - UNUSED(user_data); } extern "C" GGML_CALL int ggml_backend_vk_reg_devices(); GGML_CALL int ggml_backend_vk_reg_devices() { - ggml_backend_register(GGML_VK_NAME, ggml_backend_reg_vk_init, ggml_backend_vk_buffer_type(), nullptr); - return 1; + for (auto idx : vk_instance.device_indices) { + char name[128]; + snprintf(name, sizeof(name), "%s%ld", GGML_VK_NAME, idx); + ggml_backend_register(name, ggml_backend_reg_vk_init, ggml_backend_vk_buffer_type(idx), (void *) (intptr_t) idx); + } + return vk_instance.device_indices.size(); } // checks @@ -4874,7 +5251,7 @@ static void ggml_vk_print_tensor_area(const ggml_tensor * tensor, const void * d } } -static void ggml_vk_print_tensor(const ggml_tensor * tensor, const char * name) { +static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tensor * tensor, const char * name) { void * tensor_data = tensor->data; if (tensor->backend == GGML_BACKEND_GPU) { @@ -4883,7 +5260,7 @@ static void ggml_vk_print_tensor(const ggml_tensor * tensor, const char * name) ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - ggml_vk_buffer_read(&extra->buffer_gpu, extra->offset, tensor_data, tensor_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, extra->offset, tensor_data, tensor_size); } std::cerr << "TENSOR CHECK " << name << " (" << tensor->name << "): " << ggml_op_name(tensor->op) << std::endl; @@ -4944,7 +5321,7 @@ void * comp_result; size_t comp_size; size_t comp_nb[GGML_MAX_DIMS]; size_t check_counter = 0; -static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * tensor) { +static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor) { if (params->ith != 0) { return; } @@ -4966,7 +5343,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * /*.no_alloc =*/ false, }; - struct ggml_context * ctx = ggml_init(iparams); + struct ggml_context * ggml_ctx = ggml_init(iparams); struct ggml_tensor * src0_clone = nullptr; struct ggml_tensor * src1_clone = nullptr; @@ -4979,7 +5356,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * void * src1_buffer; if (src0 != nullptr) { - src0_clone = ggml_dup_tensor(ctx, src0); + src0_clone = ggml_dup_tensor(ggml_ctx, src0); src0_size = ggml_nbytes(src0); @@ -4995,7 +5372,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * for (int i3 = 0; i3 < src0->ne[3]; i3++) { for (int i2 = 0; i2 < src0->ne[2]; i2++) { const int idx = i3*src0->ne[2] + i2; - ggml_vk_buffer_read(&extra->buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset + idx * src0->nb[2], ((char *)src0_clone->data + idx * src0_clone->nb[2]), src0->ne[1] * src0->nb[1]); } } @@ -5005,10 +5382,10 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * src0_clone->nb[i] = src0_clone->nb[i - 1]*src0_clone->ne[i - 1]; } } else { - if (offset + src0_size >= extra->buffer_gpu.size) { - src0_size = extra->buffer_gpu.size - offset; + if (offset + src0_size >= extra->buffer_gpu->size) { + src0_size = extra->buffer_gpu->size - offset; } - ggml_vk_buffer_read(&extra->buffer_gpu, offset, src0_clone->data, src0_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset, src0_clone->data, src0_size); memcpy(src0_clone->nb, src0->nb, sizeof(size_t) * GGML_MAX_DIMS); } } else { @@ -5016,13 +5393,13 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { - ggml_vk_print_tensor(src0, "src0"); + ggml_vk_print_tensor(ctx, src0, "src0"); } ggml_vk_check_tensor(std::string(ggml_op_name(tensor->op)) + "->src0", src0_clone); } if (src1 != nullptr) { - src1_clone = ggml_dup_tensor(ctx, src1); + src1_clone = ggml_dup_tensor(ggml_ctx, src1); src1_size = ggml_nbytes(src1); @@ -5038,7 +5415,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * for (int i3 = 0; i3 < src1->ne[3]; i3++) { for (int i2 = 0; i2 < src1->ne[2]; i2++) { const int idx = i3*src1->ne[2] + i2; - ggml_vk_buffer_read(&extra->buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset + idx * src1->nb[2], ((char *)src1_clone->data + idx * src1_clone->nb[2]), src1->ne[1] * src1->nb[1]); } } @@ -5048,10 +5425,10 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * src1_clone->nb[i] = src1_clone->nb[i - 1]*src1_clone->ne[i - 1]; } } else { - if (offset + src1_size >= extra->buffer_gpu.size) { - src1_size = extra->buffer_gpu.size - offset; + if (offset + src1_size >= extra->buffer_gpu->size) { + src1_size = extra->buffer_gpu->size - offset; } - ggml_vk_buffer_read(&extra->buffer_gpu, offset, src1_clone->data, src1_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, offset, src1_clone->data, src1_size); memcpy(src1_clone->nb, src1->nb, sizeof(size_t) * GGML_MAX_DIMS); } } else { @@ -5059,7 +5436,7 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { - ggml_vk_print_tensor(src1, "src1"); + ggml_vk_print_tensor(ctx, src1, "src1"); std::cerr << "TENSOR CHECK: " << ggml_op_name(src1_clone->op) << " (check " << check_counter << ")" << std::endl; std::cerr << "src1_clone=" << tensor << " src1_clone->backend: " << src1_clone->backend << " src1_clone->type: " << ggml_type_name(src1_clone->type) << " ne0=" << src1_clone->ne[0] << " nb0=" << src1_clone->nb[0] << " ne1=" << src1_clone->ne[1] << " nb1=" << src1_clone->nb[1] << " ne2=" << src1_clone->ne[2] << " nb2=" << src1_clone->nb[2] << " ne3=" << src1_clone->ne[3] << " nb3=" << src1_clone->nb[3] << std::endl; if (src1->src[0] != nullptr) { @@ -5082,51 +5459,51 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } if (tensor->op == GGML_OP_MUL_MAT) { - tensor_clone = ggml_mul_mat(ctx, src0_clone, src1_clone); + tensor_clone = ggml_mul_mat(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_MUL) { - tensor_clone = ggml_mul(ctx, src0_clone, src1_clone); + tensor_clone = ggml_mul(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_SCALE) { - tensor_clone = ggml_scale(ctx, src0_clone, ((float *)tensor->op_params)[0]); + tensor_clone = ggml_scale(ggml_ctx, src0_clone, ((float *)tensor->op_params)[0]); } else if (tensor->op == GGML_OP_SQR) { - tensor_clone = ggml_sqr(ctx, src0_clone); + tensor_clone = ggml_sqr(ggml_ctx, src0_clone); } else if (tensor->op == GGML_OP_CLAMP) { - tensor_clone = ggml_clamp(ctx, src0_clone, ((float *)tensor->op_params)[0], ((float *)tensor->op_params)[1]); + tensor_clone = ggml_clamp(ggml_ctx, src0_clone, ((float *)tensor->op_params)[0], ((float *)tensor->op_params)[1]); } else if (tensor->op == GGML_OP_ADD) { - tensor_clone = ggml_add(ctx, src0_clone, src1_clone); + tensor_clone = ggml_add(ggml_ctx, src0_clone, src1_clone); } else if (tensor->op == GGML_OP_NORM) { - tensor_clone = ggml_norm(ctx, src0_clone, *(float *)tensor->op_params); + tensor_clone = ggml_norm(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_RMS_NORM) { - tensor_clone = ggml_rms_norm(ctx, src0_clone, *(float *)tensor->op_params); + tensor_clone = ggml_rms_norm(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_SOFT_MAX) { if (src1 != nullptr) { - tensor_clone = ggml_soft_max_ext(ctx, src0_clone, src1_clone, *(float *)tensor->op_params); + tensor_clone = ggml_soft_max_ext(ggml_ctx, src0_clone, src1_clone, *(float *)tensor->op_params); } else { - tensor_clone = ggml_soft_max(ctx, src0_clone); + tensor_clone = ggml_soft_max(ggml_ctx, src0_clone); } } else if (tensor->op == GGML_OP_DIAG_MASK_INF) { - tensor_clone = ggml_diag_mask_inf(ctx, src0_clone, *(float *)tensor->op_params); + tensor_clone = ggml_diag_mask_inf(ggml_ctx, src0_clone, *(float *)tensor->op_params); } else if (tensor->op == GGML_OP_ROPE) { const int n_dims = ((int32_t *) tensor->op_params)[1]; const int mode = ((int32_t *) tensor->op_params)[2]; - const int n_ctx = ((int32_t *) tensor->op_params)[3]; - const int n_orig_ctx = ((int32_t *) tensor->op_params)[4]; + const int n_ggml_ctx = ((int32_t *) tensor->op_params)[3]; + const int n_orig_ggml_ctx = ((int32_t *) tensor->op_params)[4]; float freq_base = ((float *) tensor->op_params)[5]; float freq_scale = ((float *) tensor->op_params)[6]; float ext_factor = ((float *) tensor->op_params)[7]; float attn_factor = ((float *) tensor->op_params)[8]; float beta_fast = ((float *) tensor->op_params)[9]; float beta_slow = ((float *) tensor->op_params)[10]; - tensor_clone = ggml_rope_custom(ctx, src0_clone, src1_clone, n_dims, mode, n_ctx, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); + tensor_clone = ggml_rope_custom(ggml_ctx, src0_clone, src1_clone, n_dims, mode, n_ggml_ctx, n_orig_ggml_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); } else if (tensor->op == GGML_OP_UNARY) { switch (ggml_get_unary_op(tensor)) { case GGML_UNARY_OP_SILU: - tensor_clone = ggml_silu(ctx, src0_clone); + tensor_clone = ggml_silu(ggml_ctx, src0_clone); break; case GGML_UNARY_OP_GELU: - tensor_clone = ggml_gelu(ctx, src0_clone); + tensor_clone = ggml_gelu(ggml_ctx, src0_clone); break; case GGML_UNARY_OP_RELU: - tensor_clone = ggml_relu(ctx, src0_clone); + tensor_clone = ggml_relu(ggml_ctx, src0_clone); break; default: std::cerr << "Missing vk_check_results OP: " << ggml_op_name(tensor->op) << std::endl; @@ -5134,40 +5511,40 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * } } else if (tensor->op == GGML_OP_CPY || tensor->op == GGML_OP_DUP) { if (src1 == nullptr) { - tensor_clone = ggml_dup(ctx, src0_clone); + tensor_clone = ggml_dup(ggml_ctx, src0_clone); tensor_clone->type = tensor->type; } else { - tensor_clone = ggml_cpy(ctx, src0_clone, src1_clone); + tensor_clone = ggml_cpy(ggml_ctx, src0_clone, src1_clone); } } else if (tensor->op == GGML_OP_CONT) { - tensor_clone = ggml_cont_4d(ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); + tensor_clone = ggml_cont_4d(ggml_ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); } else if (tensor->op == GGML_OP_RESHAPE) { - tensor_clone = ggml_reshape_4d(ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); + tensor_clone = ggml_reshape_4d(ggml_ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3]); } else if (tensor->op == GGML_OP_VIEW) { - tensor_clone = ggml_view_4d(ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3], tensor->nb[1], tensor->nb[2], tensor->nb[3], ((int32_t *) tensor->op_params)[0]); + tensor_clone = ggml_view_4d(ggml_ctx, src0_clone, tensor->ne[0], tensor->ne[1], tensor->ne[2], tensor->ne[3], tensor->nb[1], tensor->nb[2], tensor->nb[3], ((int32_t *) tensor->op_params)[0]); } else if (tensor->op == GGML_OP_PERMUTE) { int32_t * params = (int32_t *)tensor->op_params; - tensor_clone = ggml_permute(ctx, src0_clone, params[0], params[1], params[2], params[3]); + tensor_clone = ggml_permute(ggml_ctx, src0_clone, params[0], params[1], params[2], params[3]); } else if (tensor->op == GGML_OP_TRANSPOSE) { - tensor_clone = ggml_transpose(ctx, src0_clone); + tensor_clone = ggml_transpose(ggml_ctx, src0_clone); } else { std::cerr << "Missing vk_check_results OP: " << ggml_op_name(tensor->op) << std::endl; GGML_ASSERT(false); } // Disable vulkan here to avoid the hooks in ggml.c - vk_disable = true; + ctx->disable = true; - ggml_cgraph * cgraph = ggml_new_graph(ctx); + ggml_cgraph * cgraph = ggml_new_graph(ggml_ctx); ggml_build_forward_expand(cgraph, tensor_clone); - ggml_graph_compute_with_ctx(ctx, cgraph, 8); + ggml_graph_compute_with_ctx(ggml_ctx, cgraph, 8); - vk_disable = false; + ctx->disable = false; ggml_vk_check_tensor(ggml_op_name(tensor->op), tensor_clone); if (vk_output_tensor > 0 && vk_output_tensor == check_counter) { - ggml_vk_print_tensor(tensor_clone, "tensor_clone"); + ggml_vk_print_tensor(ctx, tensor_clone, "tensor_clone"); } comp_size = ggml_nbytes(tensor_clone); @@ -5183,10 +5560,10 @@ static void ggml_vk_check_results_0(ggml_compute_params * params, ggml_tensor * free(src1_buffer); } - ggml_free(ctx); + ggml_free(ggml_ctx); } -void ggml_vk_check_results_1(ggml_compute_params * params, ggml_tensor * tensor) { +static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor) { if (params->ith != 0) { return; } @@ -5208,11 +5585,11 @@ void ggml_vk_check_results_1(ggml_compute_params * params, ggml_tensor * tensor) ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; - if (extra->offset + tensor_size >= extra->buffer_gpu.size) { - tensor_size = extra->buffer_gpu.size - (extra->offset); + if (extra->offset + tensor_size >= extra->buffer_gpu->size) { + tensor_size = extra->buffer_gpu->size - (extra->offset); } - ggml_vk_buffer_read(&extra->buffer_gpu, extra->offset, tensor_data, tensor_size); + ggml_vk_buffer_read(ctx, extra->buffer_gpu, extra->offset, tensor_data, tensor_size); } float first_error_result = -1.0f; @@ -5339,4 +5716,10 @@ void ggml_vk_check_results_1(ggml_compute_params * params, ggml_tensor * tensor) free(tensor_data); } } + +void ggml_vk_check_results_1_cpu_assist(struct ggml_compute_params * params, struct ggml_tensor * tensor) { + ggml_backend_vk_context * ctx = &vk_instance.contexts[0]; + + ggml_vk_check_results_0(ctx, params, tensor); +} #endif diff --git a/ggml-vulkan.h b/ggml-vulkan.h index eb8a148e2..9645126b4 100644 --- a/ggml-vulkan.h +++ b/ggml-vulkan.h @@ -8,24 +8,29 @@ extern "C" { #endif #define GGML_VK_NAME "Vulkan" +#define GGML_VK_MAX_DEVICES 16 -GGML_API void ggml_vk_init(void); +GGML_API void ggml_vk_init_cpu_assist(void); -GGML_API void ggml_vk_preallocate_buffers_graph(struct ggml_tensor * node); -GGML_API void ggml_vk_preallocate_buffers(void); -GGML_API void ggml_vk_build_graph(struct ggml_tensor * node, bool last_node); -GGML_API bool ggml_vk_compute_forward(struct ggml_compute_params * params, struct ggml_tensor * tensor); +GGML_API void ggml_vk_preallocate_buffers_graph_cpu_assist(struct ggml_tensor * node); +GGML_API void ggml_vk_preallocate_buffers_cpu_assist(void); +GGML_API void ggml_vk_build_graph_cpu_assist(struct ggml_tensor * node, bool last_node); +GGML_API bool ggml_vk_compute_forward_cpu_assist(struct ggml_compute_params * params, struct ggml_tensor * tensor); #ifdef GGML_VULKAN_CHECK_RESULTS -void ggml_vk_check_results_1(struct ggml_compute_params * params, struct ggml_tensor * tensor); +void ggml_vk_check_results_1_cpu_assist(struct ggml_compute_params * params, struct ggml_tensor * tensor); #endif -GGML_API void ggml_vk_graph_cleanup(void); +GGML_API void ggml_vk_graph_cleanup_cpu_assist(void); +GGML_API void ggml_vk_free_cpu_assist(void); // backend API -GGML_API GGML_CALL ggml_backend_t ggml_backend_vk_init(void); +GGML_API GGML_CALL ggml_backend_t ggml_backend_vk_init(size_t dev_num); GGML_API GGML_CALL bool ggml_backend_is_vk(ggml_backend_t backend); +GGML_API GGML_CALL int ggml_backend_vk_get_device_count(void); +GGML_API GGML_CALL void ggml_backend_vk_get_device_description(int device, char * description, size_t description_size); +GGML_API GGML_CALL void ggml_backend_vk_get_device_memory(int device, size_t * free, size_t * total); -GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type(void); +GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_buffer_type(size_t dev_num); // pinned host buffer for use with the CPU backend for faster copies between CPU and GPU GGML_API GGML_CALL ggml_backend_buffer_type_t ggml_backend_vk_host_buffer_type(void); diff --git a/ggml.c b/ggml.c index b9ec0c981..f783a6fd3 100644 --- a/ggml.c +++ b/ggml.c @@ -2343,7 +2343,7 @@ struct ggml_context * ggml_init(struct ggml_init_params params) { #elif defined(GGML_USE_CLBLAST) ggml_cl_init(); #elif defined(GGML_USE_VULKAN) - ggml_vk_init(); + ggml_vk_init_cpu_assist(); #elif defined(GGML_USE_SYCL) ggml_init_sycl(); #endif @@ -14850,10 +14850,10 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_CPU); GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_CPU); #elif defined(GGML_USE_VULKAN) - const bool skip_cpu = ggml_vk_compute_forward(params, tensor); + const bool skip_cpu = ggml_vk_compute_forward_cpu_assist(params, tensor); #ifdef GGML_VULKAN_CHECK_RESULTS if (skip_cpu) { - ggml_vk_check_results_1(params, tensor); + ggml_vk_check_results_1_cpu_assist(params, tensor); } #endif if (skip_cpu) { @@ -17269,12 +17269,12 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { #ifdef GGML_USE_VULKAN for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_preallocate_buffers_graph(cgraph->nodes[i]); + ggml_vk_preallocate_buffers_graph_cpu_assist(cgraph->nodes[i]); } - ggml_vk_preallocate_buffers(); + ggml_vk_preallocate_buffers_cpu_assist(); for (int i = 0; i < cgraph->n_nodes; i++) { - ggml_vk_build_graph(cgraph->nodes[i], i == cgraph->n_nodes - 1); + ggml_vk_build_graph_cpu_assist(cgraph->nodes[i], i == cgraph->n_nodes - 1); } #endif @@ -17330,7 +17330,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { } #ifdef GGML_USE_VULKAN - ggml_vk_graph_cleanup(); + ggml_vk_graph_cleanup_cpu_assist(); #endif // performance stats (graph) diff --git a/llama.cpp b/llama.cpp index f3c5146d1..c45ae1d50 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1355,7 +1355,7 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_offload(int gpu) { #elif defined(GGML_USE_CUBLAS) buft = ggml_backend_cuda_buffer_type(gpu); #elif defined(GGML_USE_VULKAN) - buft = ggml_backend_vk_buffer_type(); + buft = ggml_backend_vk_buffer_type(gpu); #elif defined(GGML_USE_SYCL) buft = ggml_backend_sycl_buffer_type(gpu); #elif defined(GGML_USE_CLBLAST) @@ -1392,6 +1392,33 @@ static ggml_backend_buffer_type_t llama_default_buffer_type_split(int fallback_g GGML_UNUSED(tensor_split); } +static size_t llama_get_device_count() { +#if defined(GGML_USE_CUBLAS) + return ggml_backend_cuda_get_device_count(); +#elif defined(GGML_USE_VULKAN) + return ggml_backend_vk_get_device_count(); +#else + return 1; +#endif +} + +static size_t llama_get_device_memory(int device) { +#if defined(GGML_USE_CUBLAS) + size_t total; + size_t free; + ggml_backend_cuda_get_device_memory(device, &total, &free); + return free; +#elif defined(GGML_USE_VULKAN) + size_t total; + size_t free; + ggml_backend_vk_get_device_memory(device, &total, &free); + return free; +#else + return 1; + GGML_UNUSED(device); +#endif +} + // // globals // @@ -1763,6 +1790,10 @@ struct llama_context { ggml_backend_free(backend); } +#ifdef GGML_USE_VULKAN + ggml_vk_free_cpu_assist(); +#endif + ggml_backend_buffer_free(buf_input); ggml_free(ctx_input); } @@ -3436,22 +3467,18 @@ static bool llm_load_tensors( model.buft_layer[i] = llama_default_buffer_type_cpu(true); } -#ifdef GGML_USE_CUBLAS if (split_mode == LLAMA_SPLIT_LAYER) { // calculate the split points - int device_count = ggml_backend_cuda_get_device_count(); + int device_count = llama_get_device_count(); bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + device_count, [](float x) { return x == 0.0f; }); - float splits[GGML_CUDA_MAX_DEVICES]; + std::vector splits(device_count); if (all_zero) { // default split, by free memory for (int i = 0; i < device_count; ++i) { - size_t total; - size_t free; - ggml_backend_cuda_get_device_memory(i, &total, &free); - splits[i] = free; + splits[i] = llama_get_device_memory(i); } } else { - std::copy(tensor_split, tensor_split + device_count, splits); + std::copy(tensor_split, tensor_split + device_count, splits.begin()); } // sum and normalize the splits to get the split points @@ -3467,19 +3494,17 @@ static bool llm_load_tensors( // assign the repeating layers to the devices according to the splits int act_gpu_layers = std::min(n_gpu_layers, (int)n_layer + 1); for (int64_t i = i_gpu_start; i < n_layer; ++i) { - int layer_gpu = std::upper_bound(splits, splits + device_count, float(i - i_gpu_start)/act_gpu_layers) - splits; + int layer_gpu = std::upper_bound(splits.begin(), splits.begin() + device_count, float(i - i_gpu_start)/act_gpu_layers) - splits.begin(); model.buft_layer[i] = llama_default_buffer_type_offload(layer_gpu); } // assign the output layer if (n_gpu_layers > n_layer) { - int layer_gpu = std::upper_bound(splits, splits + device_count, float(act_gpu_layers - 1)/act_gpu_layers) - splits; + int layer_gpu = std::upper_bound(splits.begin(), splits.begin() + device_count, float(act_gpu_layers - 1)/act_gpu_layers) - splits.begin(); model.buft_output = llama_default_buffer_type_offload(layer_gpu); } else { model.buft_output = llama_default_buffer_type_cpu(true); } - } else -#endif - { + } else { ggml_backend_buffer_type_t split_buft; if (split_mode == LLAMA_SPLIT_ROW) { split_buft = llama_default_buffer_type_split(main_gpu, tensor_split); @@ -10483,6 +10508,8 @@ size_t llama_max_devices(void) { return GGML_CUDA_MAX_DEVICES; #elif defined(GGML_USE_SYCL) return GGML_SYCL_MAX_DEVICES; +#elif defined(GGML_USE_VULKAN) + return GGML_VK_MAX_DEVICES; #else return 1; #endif @@ -10690,13 +10717,15 @@ struct llama_context * llama_new_context_with_model( } #elif defined(GGML_USE_VULKAN) if (model->n_gpu_layers > 0) { - ggml_backend_t backend = ggml_backend_vk_init(); - if (backend == nullptr) { - LLAMA_LOG_ERROR("%s: failed to initialize Vulkan backend\n", __func__); - llama_free(ctx); - return nullptr; + for (int device = 0; device < ggml_backend_vk_get_device_count(); ++device) { + ggml_backend_t backend = ggml_backend_vk_init(device); + if (backend == nullptr) { + LLAMA_LOG_ERROR("%s: failed to initialize Vulkan%d backend\n", __func__, device); + llama_free(ctx); + return nullptr; + } + ctx->backends.push_back(backend); } - ctx->backends.push_back(backend); } #elif defined(GGML_USE_SYCL) if (model->n_gpu_layers > 0) { From 0ef46da632c32faa1a538e5dc180994e8bbb46e1 Mon Sep 17 00:00:00 2001 From: Xiao-Yong Jin Date: Wed, 7 Feb 2024 02:17:25 -0600 Subject: [PATCH 691/859] llava-cli : always tokenize special tokens (#5382) * llava-cli: tokenize special tokens in prompt * llava-cli: use the escape CLI argument, remove incomplete separate escaping process --- examples/llava/llava-cli.cpp | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 6ac70ba69..031e9806d 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -34,7 +34,7 @@ static bool eval_id(struct llama_context * ctx_llama, int id, int * n_past) { static bool eval_string(struct llama_context * ctx_llama, const char* str, int n_batch, int * n_past, bool add_bos){ std::string str2 = str; - std::vector embd_inp = ::llama_tokenize(ctx_llama, str2, add_bos); + std::vector embd_inp = ::llama_tokenize(ctx_llama, str2, add_bos, true); eval_tokens(ctx_llama, embd_inp, n_batch, n_past); return true; } @@ -152,20 +152,8 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ size_t image_pos = prompt.find(""); if (image_pos != std::string::npos) { // new templating mode: Provide the full prompt including system message and use as a placeholder for the image - system_prompt = prompt.substr(0, image_pos); user_prompt = prompt.substr(image_pos + std::string("").length()); - // We replace \n with actual newlines in user_prompt, just in case -e was not used in templating string - size_t pos = 0; - while ((pos = user_prompt.find("\\n", pos)) != std::string::npos) { - user_prompt.replace(pos, 2, "\n"); - pos += 1; // Advance past the replaced newline - } - while ((pos = system_prompt.find("\\n", pos)) != std::string::npos) { - system_prompt.replace(pos, 2, "\n"); - pos += 1; // Advance past the replaced newline - } - printf("system_prompt: %s\n", system_prompt.c_str()); printf("user_prompt: %s\n", user_prompt.c_str()); } else { From 10afa6f1d11ebc9fcc1085f468170002cbf6e2b5 Mon Sep 17 00:00:00 2001 From: Neo Zhang Jianyu Date: Wed, 7 Feb 2024 18:16:55 +0800 Subject: [PATCH 692/859] [SYCL] update install make by w64devkit (#5297) --- README-sycl.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/README-sycl.md b/README-sycl.md index 7aa4274a9..e3a8e726e 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -311,15 +311,13 @@ Output (example): a. Download & install cmake for Windows: https://cmake.org/download/ -b. Download & install make for Windows provided by mingw-w64 +b. Download & install mingw-w64 make for Windows provided by w64devkit -- Download binary package for Windows in https://github.com/niXman/mingw-builds-binaries/releases. +- Download the latest fortran version of [w64devkit](https://github.com/skeeto/w64devkit/releases). - Like [x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z](https://github.com/niXman/mingw-builds-binaries/releases/download/13.2.0-rt_v11-rev1/x86_64-13.2.0-release-win32-seh-msvcrt-rt_v11-rev1.7z). +- Extract `w64devkit` on your pc. -- Unzip the binary package. In the **bin** sub-folder and rename **xxx-make.exe** to **make.exe**. - -- Add the **bin** folder path in the Windows system PATH environment. +- Add the **bin** folder path in the Windows system PATH environment, like `C:\xxx\w64devkit\bin\`. ### Build locally: From aa7ab99be29b633263803f2e185265734c2d9427 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Wed, 7 Feb 2024 12:40:26 +0100 Subject: [PATCH 693/859] CUDA: fixed mmvq kernel for bs 2,3,4 and -sm row (#5386) --- ggml-cuda.cu | 66 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 3b828375e..db9da2459 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5313,7 +5313,7 @@ template static __global__ void template static __global__ void mul_mat_vec_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, - const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par) { + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par, const int nrows_dst) { const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; @@ -5352,7 +5352,7 @@ static __global__ void mul_mat_vec_q( tmp[j] = warp_reduce_sum(tmp[j]); if (threadIdx.x == 0) { - dst[j*nrows_x + row] = tmp[j]; + dst[j*nrows_dst + row] = tmp[j]; } } } @@ -6828,7 +6828,7 @@ static void convert_mul_mat_vec_f16_cuda(const void * vx, const dfloat * y, floa template static void mul_mat_vec_q_cuda( const void * vx, const void * vy, float * dst, - const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, cudaStream_t stream) { + const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, const int nrows_dst, cudaStream_t stream) { GGML_ASSERT(ncols_x % qk == 0); GGML_ASSERT(ncols_y <= 4); @@ -6839,40 +6839,40 @@ static void mul_mat_vec_q_cuda( switch (ncols_y) { case 1: mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; case 2: mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; case 3: mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; case 4: mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; // case 5: // mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; // case 6: // mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; // case 7: // mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; // case 8: // mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); // break; default: GGML_ASSERT(false); // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y); + // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; } } @@ -8391,7 +8391,7 @@ static void ggml_cuda_op_mul_mat_q( CUDA_CHECK(cudaGetDevice(&id)); // the main device has a larger memory buffer to hold the results from all GPUs - // nrows_dst == nrows of the matrix that the dequantize_mul_mat kernel writes into + // nrows_dst == nrows of the matrix that the kernel writes into const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; switch (src0->type) { @@ -8525,58 +8525,70 @@ static void ggml_cuda_op_mul_mat_vec_q( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + const int64_t ne10 = src1->ne[0]; + GGML_ASSERT(ne10 % QK8_1 == 0); + + const int64_t ne0 = dst->ne[0]; + + int id; + CUDA_CHECK(cudaGetDevice(&id)); + + // the main device has a larger memory buffer to hold the results from all GPUs + // nrows_dst == nrows of the matrix that the kernel writes into + const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + switch (src0->type) { case GGML_TYPE_Q4_0: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q4_1: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q5_0: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q5_1: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q8_0: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q2_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q3_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q4_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q5_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_Q6_K: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_IQ2_XXS: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_IQ2_XS: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; case GGML_TYPE_IQ3_XXS: mul_mat_vec_q_cuda - (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, stream); + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; default: GGML_ASSERT(false); @@ -9909,7 +9921,7 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); } } else { - if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type)) { + if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && src1->type == GGML_TYPE_F32) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); } else if (use_mul_mat_q) { ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); From b906596bb775b17656c2e51d5ab1b347faab6860 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Tom=C5=A1=C3=ADk?= Date: Wed, 7 Feb 2024 19:44:52 +0100 Subject: [PATCH 694/859] Add Ava in the list of llama.cpp UIs (#4362) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0509b0ba1..7e1187349 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [ollama/ollama](https://github.com/ollama/ollama) - [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui) (AGPL) - [psugihara/FreeChat](https://github.com/psugihara/FreeChat) +- [cztomsik/ava](https://github.com/cztomsik/ava) (MIT) - [ptsochantaris/emeltal](https://github.com/ptsochantaris/emeltal) - [pythops/tenere](https://github.com/pythops/tenere) (AGPL) - [semperai/amica](https://github.com/semperai/amica) From 8c933b70c21e05b685d476d0a1f36b34cbda7365 Mon Sep 17 00:00:00 2001 From: Ebey Abraham Date: Wed, 7 Feb 2024 21:11:30 +0000 Subject: [PATCH 695/859] fix typo in readme (#5399) Co-authored-by: Ebey Abraham --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7e1187349..66166c01b 100644 --- a/README.md +++ b/README.md @@ -680,7 +680,7 @@ python3 -m pip install -r requirements.txt python3 convert.py models/mymodel/ # [Optional] for models using BPE tokenizers -python convert.py models/mymodel/ --vocabtype bpe +python convert.py models/mymodel/ --vocab-type bpe # quantize the model to 4-bits (using Q4_K_M method) ./quantize ./models/mymodel/ggml-model-f16.gguf ./models/mymodel/ggml-model-Q4_K_M.gguf Q4_K_M From c4fbb6717c684196bd13b72d21747557130914e8 Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Wed, 7 Feb 2024 22:39:23 +0100 Subject: [PATCH 696/859] CMAKE_OSX_ARCHITECTURES for MacOS cross compilation (#5393) Co-authored-by: Jared Van Bortel --- CMakeLists.txt | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 427015be5..a544f2da6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -850,7 +850,9 @@ endif() set(ARCH_FLAGS "") -if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATCHES "aarch64") OR ("${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "arm64")) +if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_GENERATOR_PLATFORM_LWR STREQUAL "arm64" OR + (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND + CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64|arm.*|ARM64)$")) message(STATUS "ARM detected") if (MSVC) add_compile_definitions(__ARM_NEON) @@ -876,7 +878,9 @@ if ((${CMAKE_SYSTEM_PROCESSOR} MATCHES "arm") OR (${CMAKE_SYSTEM_PROCESSOR} MATC list(APPEND ARCH_FLAGS -mno-unaligned-access) endif() endif() -elseif (${CMAKE_SYSTEM_PROCESSOR} MATCHES "^(x86_64|i686|AMD64)$" OR "${CMAKE_GENERATOR_PLATFORM_LWR}" MATCHES "^(x86_64|i686|amd64|x64)$" ) +elseif (CMAKE_OSX_ARCHITECTURES STREQUAL "x86_64" OR CMAKE_GENERATOR_PLATFORM_LWR MATCHES "^(x86_64|i686|amd64|x64|win32)$" OR + (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_GENERATOR_PLATFORM_LWR AND + CMAKE_SYSTEM_PROCESSOR MATCHES "^(x86_64|i686|AMD64)$")) message(STATUS "x86 detected") if (MSVC) # instruction set detection for MSVC only From 8504d2d0da8cc7a1f2eee0e9e56949f960510b75 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 8 Feb 2024 09:46:47 +0200 Subject: [PATCH 697/859] tests : .gitignore obj files --- tests/.gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/.gitignore b/tests/.gitignore index 092dce742..9427cf13d 100644 --- a/tests/.gitignore +++ b/tests/.gitignore @@ -1,3 +1,3 @@ * !*.* -test-c.o +*.o From 26d4efd11e48908e14e2ee9471a7fc4c57079a1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 8 Feb 2024 09:46:30 +0100 Subject: [PATCH 698/859] sampling: fix top_k <= 0 (#5388) * sampling: fix top_k <= 0 * Update llama.cpp Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- common/sampling.cpp | 2 +- llama.cpp | 4 ++++ tests/test-sampling.cpp | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index e8675a8c0..844ad7c53 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -132,7 +132,7 @@ static void sampler_queue( const float temp = params.temp; const float dynatemp_range = params.dynatemp_range; const float dynatemp_exponent = params.dynatemp_exponent; - const int32_t top_k = params.top_k <= 0 ? n_vocab : params.top_k; + const int32_t top_k = params.top_k; const float top_p = params.top_p; const float min_p = params.min_p; const float tfs_z = params.tfs_z; diff --git a/llama.cpp b/llama.cpp index c45ae1d50..f8f5796a4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8585,6 +8585,10 @@ void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * can // } const int64_t t_start_sample_us = ggml_time_us(); + + if (k <= 0) { + k = candidates->size; + } k = std::max(k, (int) min_keep); k = std::min(k, (int) candidates->size); diff --git a/tests/test-sampling.cpp b/tests/test-sampling.cpp index c3b3d6629..6374958fe 100644 --- a/tests/test-sampling.cpp +++ b/tests/test-sampling.cpp @@ -235,6 +235,8 @@ int main(void) { test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f}, 1); test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f, 0.2f}, 3); + test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f, 0.2f, 0.1f}, 4); + test_top_k({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f, 0.2f, 0.1f}, 0); test_top_p({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f}, 0); test_top_p({0.1f, 0.2f, 0.3f, 0.4f}, {0.4f, 0.3f}, 0.7f); From a6e514a85f0fda38ff78ec91782877ea3d19ed98 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 8 Feb 2024 09:58:19 +0100 Subject: [PATCH 699/859] llava: fix typo/formatting in README.md (#5405) This commit fixes a typo in the README.md file for the llava example which is causing the formatting to look a little off: Clone llava-v15-7b`` and clip-vit-large-patch14-336`` locally Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 323c5fdd0..295181a34 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -21,7 +21,7 @@ After building, run: `./llava-cli` to see the usage. For example: ## Model conversion -- Clone `llava-v15-7b`` and `clip-vit-large-patch14-336`` locally: +- Clone `llava-v15-7b` and `clip-vit-large-patch14-336` locally: ```sh git clone https://huggingface.co/liuhaotian/llava-v1.5-7b From 4aa43fab569215a13495a7f1a0f8afc541b16d03 Mon Sep 17 00:00:00 2001 From: runfuture Date: Thu, 8 Feb 2024 18:36:19 +0800 Subject: [PATCH 700/859] llama : fix MiniCPM (#5392) * fix bug for norm_rms_eps missing * to align with the same order as convert.py for model write * fix: undo HF models permute tensor * update for flake8 lint --- convert-hf-to-gguf.py | 63 +++++++++++++++++++++++++++++++++++++++++-- llama.cpp | 2 ++ 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 829d68368..0d4ea03b4 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1078,17 +1078,76 @@ class MiniCPMModel(Model): self.gguf_writer.add_name("MiniCPM") self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) - self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"]) self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) self.gguf_writer.add_file_type(self.ftype) - self.gguf_writer.add_rope_dimension_count(self.hparams["hidden_size"] // self.hparams["num_attention_heads"]) def set_vocab(self): self._set_vocab_hf() + def _reverse_hf_permute(self, weights: Tensor, n_head: int, n_kv_head: int | None = None) -> Tensor: + if n_kv_head is not None and n_head != n_kv_head: + n_head //= n_kv_head + + return ( + weights.reshape(n_head, 2, weights.shape[0] // n_head // 2, *weights.shape[1:]) + .swapaxes(1, 2) + .reshape(weights.shape) + ) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + n_head = self.hparams.get("num_attention_heads") + n_kv_head = self.hparams.get("num_key_value_heads") + for name, data_torch in self.get_tensors(): + # we don't need these + if name.endswith((".attention.masked_bias", ".attention.bias", ".attention.rotary_emb.inv_freq")): + continue + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + # HF models permute some of the tensors, so we need to undo that + if name.endswith(("q_proj.weight")): + data_torch = self._reverse_hf_permute(data_torch, n_head, n_head) + if name.endswith(("k_proj.weight")): + data_torch = self._reverse_hf_permute(data_torch, n_head, n_kv_head) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + # if f32 desired, convert any float16 to float32 + if self.ftype == 0 and data_dtype == np.float16: + data = data.astype(np.float32) + + # TODO: Why cant we use these float16 as-is? There should be not reason to store float16 as float32 + if self.ftype == 1 and data_dtype == np.float16 and n_dims == 1: + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + class QwenModel(Model): @staticmethod diff --git a/llama.cpp b/llama.cpp index f8f5796a4..552e0d02e 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2947,6 +2947,8 @@ static void llm_load_hparams( } break; case LLM_ARCH_MINICPM: { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + switch (hparams.n_layer) { case 40: model.type = e_model::MODEL_2B; break; default: model.type = e_model::MODEL_UNKNOWN; From b7b74cef36a93ae01e0b9af8986d131761742d0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 8 Feb 2024 11:36:54 +0100 Subject: [PATCH 701/859] fix trailing whitespace (#5407) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 552e0d02e..89acafbc3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -8587,7 +8587,7 @@ void llama_sample_top_k(struct llama_context * ctx, llama_token_data_array * can // } const int64_t t_start_sample_us = ggml_time_us(); - + if (k <= 0) { k = candidates->size; } From ff4ff05c5ff4311c05a8ce1f984c7d8def4f07a5 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Thu, 8 Feb 2024 15:20:03 +0100 Subject: [PATCH 702/859] llava : add missing .py, and fix paths in README.md (#5414) This commit adds the missing .py extension to the convert-image-encoder-to-gguf script. It also fixes the paths for the `model` and `mmproj` options in the example llava-cli command. Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 295181a34..721d5e613 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -14,7 +14,7 @@ Build with cmake or run `make llava-cli` to build it. After building, run: `./llava-cli` to see the usage. For example: ```sh -./llava-cli -m llava-v1.5-7b/ggml-model-q5_k.gguf --mmproj llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg +./llava-cli -m ../llava-v1.5-7b/ggml-model-f16.gguf --mmproj ../llava-v1.5-7b/mmproj-model-f16.gguf --image path/to/an/image.jpg ``` **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. @@ -38,7 +38,7 @@ python ./examples/llava/llava-surgery.py -m ../llava-v1.5-7b 3. Use `convert-image-encoder-to-gguf.py` to convert the LLaVA image encoder to GGUF: ```sh -python ./examples/llava/convert-image-encoder-to-gguf -m ../clip-vit-large-patch14-336 --llava-projector ../llava-v1.5-7b/llava.projector --output-dir ../llava-v1.5-7b +python ./examples/llava/convert-image-encoder-to-gguf.py -m ../clip-vit-large-patch14-336 --llava-projector ../llava-v1.5-7b/llava.projector --output-dir ../llava-v1.5-7b ``` 4. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: From 6e99f2a04f1871d637dd77eb4d81de31a5510253 Mon Sep 17 00:00:00 2001 From: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> Date: Thu, 8 Feb 2024 22:39:10 +0530 Subject: [PATCH 703/859] Fix f16_sycl cpy call from Arc (#5411) * fix f16_sycl cpy call * rm old logic * add fp16 build CI * use macro * format fix --- .github/workflows/build.yml | 41 +++++++++++++++++++++++++++++++++++++ ggml-sycl.cpp | 8 +++++--- 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f4c374ce5..ed292d6b8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -184,6 +184,47 @@ jobs: cmake -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx .. cmake --build . --config Release -j $(nproc) + ubuntu-22-cmake-sycl-fp16: + runs-on: ubuntu-22.04 + + continue-on-error: true + + steps: + - uses: actions/checkout@v2 + + - name: add oneAPI to apt + shell: bash + run: | + cd /tmp + wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB + sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main" + + - name: install oneAPI dpcpp compiler + shell: bash + run: | + sudo apt update + sudo apt install intel-oneapi-compiler-dpcpp-cpp + + - name: install oneAPI MKL library + shell: bash + run: | + sudo apt install intel-oneapi-mkl-devel + + - name: Clone + id: checkout + uses: actions/checkout@v3 + + - name: Build + id: cmake_build + run: | + source /opt/intel/oneapi/setvars.sh + mkdir build + cd build + cmake -DLLAMA_SYCL=ON -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_SYCL_F16=ON .. + cmake --build . --config Release -j $(nproc) + # TODO: build with LLAMA_NO_METAL because test-backend-ops fail on "Apple Paravirtual device" and I don't know # how to debug it. # ref: https://github.com/ggerganov/llama.cpp/actions/runs/7131777249/job/19420981052#step:5:1124 diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index a03df4c65..dd562a898 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -12148,7 +12148,8 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( const int64_t src1_ncols, const int64_t src1_padded_row_size, const dpct::queue_ptr &stream) { - const int64_t ne00 = src0->ne[0]; + GGML_TENSOR_BINARY_OP_LOCALS + const int64_t row_diff = row_high - row_low; // on some GPUs it is faster to convert src1 to half and to use half precision intrinsics @@ -12167,8 +12168,9 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( } else { src1_dfloat = src1_dfloat_a.alloc(ne00); ggml_cpy_f32_f16_sycl((const char *)src1_ddf_i, (char *)src1_dfloat, - ne00, ne00, 1, sizeof(float), 0, 0, ne00, 1, - sizeof(sycl::half), 0, 0, stream); + ne00, ne00, ne01, ne02, nb00, nb01, nb02, + nb03, ne10, ne11, ne12, nb10, nb11, nb12, + nb13, stream); } } #else From 41f308f58edc2a04bcf9e245100b0a9b10e9a0fb Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 8 Feb 2024 21:33:03 +0100 Subject: [PATCH 704/859] llama : do not print "offloading layers" message in CPU-only builds (#5416) --- llama.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/llama.cpp b/llama.cpp index 89acafbc3..db7d1c1cd 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4209,8 +4209,7 @@ static bool llm_load_tensors( ctx_bufs.emplace_back(ctx, buf); } - // print memory requirements - { + if (llama_supports_gpu_offload()) { const int n_gpu = std::min(n_gpu_layers, int(hparams.n_layer)); LLAMA_LOG_INFO("%s: offloading %d repeating layers to GPU\n", __func__, n_gpu); @@ -4222,10 +4221,11 @@ static bool llm_load_tensors( const int max_offloadable_layers = hparams.n_layer + 1; LLAMA_LOG_INFO("%s: offloaded %d/%d layers to GPU\n", __func__, std::min(n_gpu_layers, max_offloadable_layers), max_backend_supported_layers); + } - for (ggml_backend_buffer_t buf : model.bufs) { - LLAMA_LOG_INFO("%s: %10s buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); - } + // print memory requirements + for (ggml_backend_buffer_t buf : model.bufs) { + LLAMA_LOG_INFO("%s: %10s buffer size = %8.2f MiB\n", __func__, ggml_backend_buffer_name(buf), ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); } // populate tensors_by_name From 8e6a9d2de0096af7120606c74ee2f26684e87b41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Thu, 8 Feb 2024 21:56:40 +0100 Subject: [PATCH 705/859] CUDA: more warps for mmvq on NVIDIA (#5394) --- ggml-cuda.cu | 133 +++++++++++++++++++++++++++++++++------------------ 1 file changed, 86 insertions(+), 47 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index db9da2459..5053757e6 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5310,22 +5310,26 @@ template static __global__ void #endif // __CUDA_ARCH__ >= CC_VOLTA } -template +#define MMVQ_NWARPS_NVIDIA 4 +#define MMVQ_NWARPS_AMD_RDNA2 1 +#define MMVQ_NWARPS_AMD_OLD 4 + +template +#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) +__launch_bounds__(nwarps*WARP_SIZE, 1) // tells the compiler to use as many registers as it wants +#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) static __global__ void mul_mat_vec_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par, const int nrows_dst) { const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; - const int row = blockIdx.x*blockDim.y + threadIdx.y; - - if (row >= nrows_x) { - return; - } + const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; + const int row = blockIdx.x; const int blocks_per_row_x = ncols_x / qk; const int blocks_per_col_y = nrows_y / QK8_1; - const int blocks_per_warp = vdr * WARP_SIZE / qi; + const int blocks_per_iter = vdr * nwarps*WARP_SIZE / qi; // partial sum for each thread float tmp[ncols_y_template != 0 ? ncols_y_template : 8] = {0.0f}; @@ -5333,12 +5337,12 @@ static __global__ void mul_mat_vec_q( const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = threadIdx.x / (qi/vdr); i < blocks_per_row_x; i += blocks_per_warp) { + for (int i = tid / (qi/vdr); i < blocks_per_row_x; i += blocks_per_iter) { const int ibx = row*blocks_per_row_x + i; // x block index const int iby = i * (qk/QK8_1); // y block index that aligns with ibx - const int iqs = vdr * (threadIdx.x % (qi/vdr)); // x block quant index when casting the quants to int + const int iqs = vdr * (tid % (qi/vdr)); // x block quant index when casting the quants to int #pragma unroll for (int j = 0; j < ncols_y; ++j) { @@ -5346,9 +5350,25 @@ static __global__ void mul_mat_vec_q( } } + __shared__ float tmp_shared[nwarps-1 > 0 ? nwarps-1 : 1][ncols_y_template != 0 ? ncols_y_template : 8][WARP_SIZE]; + if (threadIdx.y > 0) { +#pragma unroll + for (int j = 0; j < ncols_y; ++j) { + tmp_shared[threadIdx.y-1][j][threadIdx.x] = tmp[j]; + } + } + __syncthreads(); + if (threadIdx.y > 0) { + return; + } + // sum up partial sums and write back result #pragma unroll for (int j = 0; j < ncols_y; ++j) { +#pragma unroll + for (int i = 0; i < nwarps-1; ++i) { + tmp[j] += tmp_shared[i][j][threadIdx.x]; + } tmp[j] = warp_reduce_sum(tmp[j]); if (threadIdx.x == 0) { @@ -6833,46 +6853,65 @@ static void mul_mat_vec_q_cuda( GGML_ASSERT(ncols_x % qk == 0); GGML_ASSERT(ncols_y <= 4); - const int block_num_y = (nrows_x + GGML_CUDA_MMV_Y - 1) / GGML_CUDA_MMV_Y; - const dim3 block_nums(block_num_y, 1, 1); - const dim3 block_dims(WARP_SIZE, GGML_CUDA_MMV_Y, 1); - switch (ncols_y) { - case 1: - mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 2: - mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 3: - mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 4: - mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - // case 5: - // mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; - // case 6: - // mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; - // case 7: - // mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; - // case 8: - // mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - // break; + int id; + CUDA_CHECK(cudaGetDevice(&id)); + + int nwarps; + if (g_device_caps[id].cc >= CC_OFFSET_AMD) { + nwarps = g_device_caps[id].cc >= CC_RDNA2 ? MMVQ_NWARPS_AMD_RDNA2 : MMVQ_NWARPS_AMD_OLD; + } else { + nwarps = MMVQ_NWARPS_NVIDIA; + } + + const dim3 block_nums(nrows_x, 1, 1); + const dim3 block_dims(WARP_SIZE, nwarps, 1); + + switch (nwarps) { + case 1: switch(ncols_y) { + case 1: + mul_mat_vec_q<1, 1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 2: + mul_mat_vec_q<1, 2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 3: + mul_mat_vec_q<1, 3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 4: + mul_mat_vec_q<1, 4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + default: + GGML_ASSERT(false); + break; + } break; + case 4: switch(ncols_y) { + case 1: + mul_mat_vec_q<4, 1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 2: + mul_mat_vec_q<4, 2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 3: + mul_mat_vec_q<4, 3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + case 4: + mul_mat_vec_q<4, 4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); + break; + default: + GGML_ASSERT(false); + break; + } break; + default: GGML_ASSERT(false); - // mul_mat_vec_q<0, qk, qi, block_q_t, vdr, vec_dot> - // <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); break; } } From 44fbe34360dd760f9e68b4271f21533436397f84 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Fri, 9 Feb 2024 06:52:33 +0100 Subject: [PATCH 706/859] Fix Vulkan crash on APUs with very little device memory (#5424) * Fix Vulkan crash on APUs with very little device memory * Fix debug output function names --- ggml-vulkan.cpp | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 9e2846ee4..254f648a6 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -744,6 +744,8 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz } if (memory_type_index >= mem_props.memoryTypeCount) { + ctx->device.lock()->device.destroyBuffer(buf->buffer); + buf->size = 0; throw vk::OutOfDeviceMemoryError("No suitable memory type found"); } @@ -3875,7 +3877,7 @@ static ggml_tensor * ggml_vk_find_last_use(const ggml_tensor * node, ggml_cgraph static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggml_tensor * node){ #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_ctx->preallocate_buffers_graph(" << node << ")" << std::endl; + std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; #endif const bool any_on_device = node->backend == GGML_BACKEND_GPU || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) @@ -3994,8 +3996,7 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { return; } #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_ctx->preallocate_buffers()" << std::endl; - std::cerr << "qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << std::endl; + std::cerr << "ggml_vk_preallocate_buffers(qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << ")" << std::endl; #endif #if defined(GGML_VULKAN_RUN_TESTS) ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); From b2f87cb64db47d799b6f3656855c9caf9792ab2a Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Fri, 9 Feb 2024 10:56:43 +0100 Subject: [PATCH 707/859] ggml : fix `error C2078: too many initializers` for MSVC ARM64 (#5404) --- ggml-quants.c | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 101d3e783..1031e3761 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -268,6 +268,17 @@ static inline float hsum_float_4x4(const __m128 a, const __m128 b, const __m128 #endif // defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) #if defined(__ARM_NEON) + +#ifdef _MSC_VER + +#define ggml_vld1q_u32(w,x,y,z) { ((w) + ((uint64_t)(x) << 32)), ((y) + ((uint64_t)(z) << 32)) } + +#else + +#define ggml_vld1q_u32(w,x,y,z) { (w), (x), (y), (z) } + +#endif + #if !defined(__aarch64__) // 64-bit compatibility @@ -8698,10 +8709,10 @@ void ggml_vec_dot_iq3_xxs_q8_K(const int n, float * restrict s, const void * res for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { q8b = ggml_vld1q_s8_x4(q8); q8 += 64; memcpy(aux32, gas, 2*sizeof(uint32_t)); gas += 2*sizeof(uint32_t); - const uint32x4_t aux32x4_0 = {iq3xxs_grid[q3[ 0]], iq3xxs_grid[q3[ 1]], iq3xxs_grid[q3[ 2]], iq3xxs_grid[q3[ 3]]}; - const uint32x4_t aux32x4_1 = {iq3xxs_grid[q3[ 4]], iq3xxs_grid[q3[ 5]], iq3xxs_grid[q3[ 6]], iq3xxs_grid[q3[ 7]]}; - const uint32x4_t aux32x4_2 = {iq3xxs_grid[q3[ 8]], iq3xxs_grid[q3[ 9]], iq3xxs_grid[q3[10]], iq3xxs_grid[q3[11]]}; - const uint32x4_t aux32x4_3 = {iq3xxs_grid[q3[12]], iq3xxs_grid[q3[13]], iq3xxs_grid[q3[14]], iq3xxs_grid[q3[15]]}; + const uint32x4_t aux32x4_0 = ggml_vld1q_u32(iq3xxs_grid[q3[ 0]], iq3xxs_grid[q3[ 1]], iq3xxs_grid[q3[ 2]], iq3xxs_grid[q3[ 3]]); + const uint32x4_t aux32x4_1 = ggml_vld1q_u32(iq3xxs_grid[q3[ 4]], iq3xxs_grid[q3[ 5]], iq3xxs_grid[q3[ 6]], iq3xxs_grid[q3[ 7]]); + const uint32x4_t aux32x4_2 = ggml_vld1q_u32(iq3xxs_grid[q3[ 8]], iq3xxs_grid[q3[ 9]], iq3xxs_grid[q3[10]], iq3xxs_grid[q3[11]]); + const uint32x4_t aux32x4_3 = ggml_vld1q_u32(iq3xxs_grid[q3[12]], iq3xxs_grid[q3[13]], iq3xxs_grid[q3[14]], iq3xxs_grid[q3[15]]); q3 += 16; q3s.val[0] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[0] >> 0) & 127))), vld1_s8((const void *)(signs64 + ((aux32[0] >> 7) & 127)))); q3s.val[1] = vcombine_s8(vld1_s8((const void *)(signs64 + ((aux32[0] >> 14) & 127))), vld1_s8((const void *)(signs64 + ((aux32[0] >> 21) & 127)))); From e4124c24775f2cb5b3d7acc93bf9dc5471c172ef Mon Sep 17 00:00:00 2001 From: Marko Tasic Date: Fri, 9 Feb 2024 11:17:00 +0100 Subject: [PATCH 708/859] readme : add JavaScript/Wasm repo (#5415) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 66166c01b..0b4efdd33 100644 --- a/README.md +++ b/README.md @@ -124,6 +124,7 @@ Typically finetunes of the base models below are supported as well. - Go: [go-skynet/go-llama.cpp](https://github.com/go-skynet/go-llama.cpp) - Node.js: [withcatai/node-llama-cpp](https://github.com/withcatai/node-llama-cpp) - JS/TS (llama.cpp server client): [lgrammel/modelfusion](https://modelfusion.dev/integration/model-provider/llamacpp) +- JavaScript/Wasm (works in browser): [tangledgroup/llama-cpp-wasm](https://github.com/tangledgroup/llama-cpp-wasm) - Ruby: [yoshoku/llama_cpp.rb](https://github.com/yoshoku/llama_cpp.rb) - Rust (nicer API): [mdrokz/rust-llama.cpp](https://github.com/mdrokz/rust-llama.cpp) - Rust (more direct bindings): [utilityai/llama-cpp-rs](https://github.com/utilityai/llama-cpp-rs) From e5ca3937c685d6e012ac4db40555d6ec100ff03c Mon Sep 17 00:00:00 2001 From: Paul Tsochantaris Date: Fri, 9 Feb 2024 10:48:06 +0000 Subject: [PATCH 709/859] llama : do not cap thread count when MoE on CPU (#5419) * Not capping thread count when MoE inference is running on CPU * Whitespace --- llama.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index db7d1c1cd..0566b087b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7285,7 +7285,9 @@ static int llama_decode_internal( // TODO: this is mostly important for Apple Silicon where CBLAS is still performing very well // we still need some threads to process all non-mul_mat ops, but not too much to avoid interfering // with the BLAS calls. need a better solution - if (n_tokens >= 32 && ggml_cpu_has_blas() && !ggml_cpu_has_gpublas()) { + // MoE Special Case: This logic applies when hparams.n_expert == 0, i.e. the model is NOT an MoE model. When an MoE is + // being processed then Accelerate/BLAS will not be involved, so capping would limit performance. + if (n_tokens >= 32 && hparams.n_expert == 0 && ggml_cpu_has_blas() && !ggml_cpu_has_gpublas()) { n_threads = std::min(4, n_threads); } From 7c777fcd5dd4af7079e33390cf6a19c328a2666f Mon Sep 17 00:00:00 2001 From: Riley Stewart Date: Fri, 9 Feb 2024 02:49:49 -0800 Subject: [PATCH 710/859] server : fix prompt caching for repeated prompts (#5420) --- examples/server/server.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index eceda30d0..8d668f798 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1592,10 +1592,6 @@ struct llama_server_context LOG_TEE("slot %d : in cache: %i tokens | to process: %i tokens\n", slot.id, slot.n_past, slot.num_prompt_tokens_processed); } - LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); - - llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); - slot.cache_tokens = prompt_tokens; if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) @@ -1609,6 +1605,10 @@ struct llama_server_context } } + LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); + + llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); + LOG_VERBOSE("prompt ingested", { {"n_past", slot.n_past}, {"cached", tokens_to_str(ctx, slot.cache_tokens.cbegin(), slot.cache_tokens.cbegin() + slot.n_past)}, From e00d2a62dd1441e3b089570ec06d05c18800d368 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 9 Feb 2024 14:00:59 +0100 Subject: [PATCH 711/859] llava : add requirements.txt and update README.md (#5428) * llava: add requirements.txt and update README.md This commit adds a `requirements.txt` file to the `examples/llava` directory. This file contains the required Python packages to run the scripts in the `examples/llava` directory. The motivation of this to make it easier for users to run the scripts in `examples/llava`. This will avoid users from having to possibly run into missing package issues if the packages are not installed on their system. Signed-off-by: Daniel Bevenius * llava: fix typo in llava-surgery.py output Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 12 +++++++++--- examples/llava/llava-surgery.py | 2 +- examples/llava/requirements.txt | 3 +++ 3 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 examples/llava/requirements.txt diff --git a/examples/llava/README.md b/examples/llava/README.md index 721d5e613..19f1a50a2 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -29,19 +29,25 @@ git clone https://huggingface.co/liuhaotian/llava-v1.5-7b git clone https://huggingface.co/openai/clip-vit-large-patch14-336 ``` -2. Use `llava-surgery.py` to split the LLaVA model to LLaMA and multimodel projector constituents: +2. Install the required Python packages: + +```sh +pip install -r examples/llava/requirements.txt +``` + +3. Use `llava-surgery.py` to split the LLaVA model to LLaMA and multimodel projector constituents: ```sh python ./examples/llava/llava-surgery.py -m ../llava-v1.5-7b ``` -3. Use `convert-image-encoder-to-gguf.py` to convert the LLaVA image encoder to GGUF: +4. Use `convert-image-encoder-to-gguf.py` to convert the LLaVA image encoder to GGUF: ```sh python ./examples/llava/convert-image-encoder-to-gguf.py -m ../clip-vit-large-patch14-336 --llava-projector ../llava-v1.5-7b/llava.projector --output-dir ../llava-v1.5-7b ``` -4. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: +5. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: ```sh python ./convert.py ../llava-v1.5-7b diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 515f6b58d..0a61efdfe 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -42,5 +42,5 @@ if len(clip_tensors) > 0: torch.save(checkpoint, path) print("Done!") -print(f"Now you can convert {args.model} to a a regular LLaMA GGUF file.") +print(f"Now you can convert {args.model} to a regular LLaMA GGUF file.") print(f"Also, use {args.model}/llava.projector to prepare a llava-encoder.gguf file.") diff --git a/examples/llava/requirements.txt b/examples/llava/requirements.txt new file mode 100644 index 000000000..f80f727a7 --- /dev/null +++ b/examples/llava/requirements.txt @@ -0,0 +1,3 @@ +-r ../../requirements/requirements-convert.txt +pillow~=10.2.0 +torch~=2.1.1 From 4b7b38bef5addbd31f453871d79647fbae6bec8a Mon Sep 17 00:00:00 2001 From: Neuman Vong Date: Sat, 10 Feb 2024 05:30:19 +1100 Subject: [PATCH 712/859] vulkan: Set limit for task concurrency (#5427) A common default for the maximum number of open files is 256, which can lead to `asyncio.gather(*tasks)` failing with Too many open files. $ python ggml_vk_generate_shaders.py --glslc=$ANDROID_NDK_PATH/shader-tools/darwin-x86_64/glslc ggml_vulkan: Generating and compiling shaders to SPIR-V Traceback (most recent call last): File "/Users/neuman/Code.noindex/github/llama.cpp/ggml_vk_generate_shaders.py", line 2326, in asyncio.run(main()) File "/Users/neuman/Code.noindex/miniforge3/lib/python3.10/asyncio/runners.py", line 44, in run return loop.run_until_complete(main) File "/Users/neuman/Code.noindex/miniforge3/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete return future.result() File "/Users/neuman/Code.noindex/github/llama.cpp/ggml_vk_generate_shaders.py", line 2294, in main await asyncio.gather(*tasks) [...snip...] OSError: [Errno 24] Too many open files This change sets a reasonable concurrency limit for tasks (and therefore open files), without significant impact on run time. --- ggml_vk_generate_shaders.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/ggml_vk_generate_shaders.py b/ggml_vk_generate_shaders.py index 4abb0383f..b2e86e182 100644 --- a/ggml_vk_generate_shaders.py +++ b/ggml_vk_generate_shaders.py @@ -2067,6 +2067,8 @@ type_names = { K_QUANTS_PER_ITERATION = 2 +ASYNCIO_CONCURRENCY = 64 + output_dir = gettempdir() lock = asyncio.Lock() @@ -2291,7 +2293,14 @@ async def main(): tasks.append(string_to_spv("rope_neox_f32", rope_neox_src, {"A_TYPE": "float", "D_TYPE": "float"})) tasks.append(string_to_spv("rope_neox_f16", rope_neox_src, {"A_TYPE": "float16_t", "D_TYPE": "float16_t"})) - await asyncio.gather(*tasks) + # Helper to decorate tasks with semaphore acquisition. + async def withSemaphore(sem, task): + async with sem: + return await task + + # Run tasks concurrently guarded by a concurrency limit. + sem = asyncio.Semaphore(ASYNCIO_CONCURRENCY) + await asyncio.gather(*(withSemaphore(sem, task) for task in tasks)) with open("ggml-vulkan-shaders.hpp", "w") as f: f.write("#include \n\n") From 4633d93af08d890ecd00fa6e4f61d76f21cded4c Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Fri, 9 Feb 2024 10:42:27 +0100 Subject: [PATCH 713/859] ggml : add abort_callback for cpu backend (ggml/725) * a way to use abort_callback with the cpu backend * whisper update --- ggml-backend.c | 26 ++++++++++++++++++++++---- ggml-backend.h | 5 +++-- ggml.c | 2 +- ggml.h | 9 +++++++-- 4 files changed, 33 insertions(+), 9 deletions(-) diff --git a/ggml-backend.c b/ggml-backend.c index 0764dfebc..532da8eda 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -653,6 +653,9 @@ struct ggml_backend_cpu_context { int n_threads; void * work_data; size_t work_size; + + ggml_abort_callback abort_callback; + void * abort_callback_data; }; GGML_CALL static const char * ggml_backend_cpu_name(ggml_backend_t backend) { @@ -691,6 +694,9 @@ GGML_CALL static ggml_backend_graph_plan_t ggml_backend_cpu_graph_plan_create(gg cpu_plan->cplan.work_data = malloc(cpu_plan->cplan.work_size); } + cpu_plan->cplan.abort_callback = cpu_ctx->abort_callback; + cpu_plan->cplan.abort_callback_data = cpu_ctx->abort_callback_data; + return cpu_plan; } @@ -721,9 +727,11 @@ GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, str cpu_ctx->work_data = realloc(cpu_ctx->work_data, cplan.work_size); cpu_ctx->work_size = cplan.work_size; } - cplan.work_data = cpu_ctx->work_data; + cplan.abort_callback = cpu_ctx->abort_callback; + cplan.abort_callback_data = cpu_ctx->abort_callback_data; + ggml_graph_compute(cgraph, &cplan); return true; } @@ -759,9 +767,11 @@ static struct ggml_backend_i cpu_backend_i = { ggml_backend_t ggml_backend_cpu_init(void) { struct ggml_backend_cpu_context * ctx = malloc(sizeof(struct ggml_backend_cpu_context)); - ctx->n_threads = GGML_DEFAULT_N_THREADS; - ctx->work_data = NULL; - ctx->work_size = 0; + ctx->n_threads = GGML_DEFAULT_N_THREADS; + ctx->work_data = NULL; + ctx->work_size = 0; + ctx->abort_callback = NULL; + ctx->abort_callback_data = NULL; ggml_backend_t cpu_backend = malloc(sizeof(struct ggml_backend)); @@ -783,6 +793,14 @@ void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads) { ctx->n_threads = n_threads; } +void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_callback abort_callback, void * abort_callback_data) { + GGML_ASSERT(ggml_backend_is_cpu(backend_cpu)); + + struct ggml_backend_cpu_context * ctx = (struct ggml_backend_cpu_context *)backend_cpu->context; + ctx->abort_callback = abort_callback; + ctx->abort_callback_data = abort_callback_data; +} + GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } diff --git a/ggml-backend.h b/ggml-backend.h index 8b8160fcf..282b3a9b7 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -83,8 +83,9 @@ extern "C" { GGML_API ggml_backend_t ggml_backend_cpu_init(void); - GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); - GGML_API void ggml_backend_cpu_set_n_threads(ggml_backend_t backend_cpu, int n_threads); + GGML_API GGML_CALL bool ggml_backend_is_cpu (ggml_backend_t backend); + GGML_API void ggml_backend_cpu_set_n_threads (ggml_backend_t backend_cpu, int n_threads); + GGML_API void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_callback abort_callback, void * abort_callback_data); // Create a backend buffer from an existing pointer GGML_API GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size); diff --git a/ggml.c b/ggml.c index f783a6fd3..86cd65862 100644 --- a/ggml.c +++ b/ggml.c @@ -16649,7 +16649,7 @@ struct ggml_compute_state_shared { atomic_int node_n; // active graph node atomic_int node_task; // active graph node task phase - bool (*abort_callback)(void * data); // abort ggml_graph_compute when true + ggml_abort_callback abort_callback; // abort ggml_graph_compute when true void * abort_callback_data; }; diff --git a/ggml.h b/ggml.h index e0a4799f3..1360cd8ee 100644 --- a/ggml.h +++ b/ggml.h @@ -567,6 +567,11 @@ extern "C" { static const size_t GGML_TENSOR_SIZE = sizeof(struct ggml_tensor); + // Abort callback + // If not NULL, called before ggml computation + // If it returns true, the computation is aborted + typedef bool (*ggml_abort_callback)(void * data); + // the compute plan that needs to be prepared for ggml_graph_compute() // since https://github.com/ggerganov/ggml/issues/287 struct ggml_cplan { @@ -576,8 +581,8 @@ extern "C" { int n_threads; // abort ggml_graph_compute when true - bool (*abort_callback)(void * data); - void * abort_callback_data; + ggml_abort_callback abort_callback; + void * abort_callback_data; }; enum ggml_cgraph_eval_order { From 43b65f5eb85e8741aba573a8f65bb8efad245d31 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 10 Feb 2024 09:30:36 +0200 Subject: [PATCH 714/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 7b6c17915..6ae75bc31 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -475cbad5c1c834e31e26a2283bc1413181644360 +2c7cf49810d523b9632da393a9e8270b60bf3b24 From cd9aea63b577a83def84dbd6dcd90a6fa02af745 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 10 Feb 2024 09:53:05 +0200 Subject: [PATCH 715/859] scripts : update sync scripts with new backends --- scripts/sync-ggml-am.sh | 12 ++++++++++++ scripts/sync-ggml.sh | 6 ++++++ 2 files changed, 18 insertions(+) diff --git a/scripts/sync-ggml-am.sh b/scripts/sync-ggml-am.sh index 6b2514a11..2c391e641 100755 --- a/scripts/sync-ggml-am.sh +++ b/scripts/sync-ggml-am.sh @@ -97,6 +97,8 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml-cuda.cu -> ggml-cuda.cu # src/ggml-cuda.h -> ggml-cuda.h # src/ggml-impl.h -> ggml-impl.h + # src/ggml-kompute.cpp -> ggml-kompute.cpp + # src/ggml-kompute.h -> ggml-kompute.h # src/ggml-metal.h -> ggml-metal.h # src/ggml-metal.m -> ggml-metal.m # src/ggml-mpi.h -> ggml-mpi.h @@ -105,6 +107,10 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then # src/ggml-opencl.h -> ggml-opencl.h # src/ggml-quants.c -> ggml-quants.c # src/ggml-quants.h -> ggml-quants.h + # src/ggml-sycl.cpp -> ggml-sycl.cpp + # src/ggml-sycl.h -> ggml-sycl.h + # src/ggml-vulkan.cpp -> ggml-vulkan.cpp + # src/ggml-vulkan.h -> ggml-vulkan.h # include/ggml/ggml.h -> ggml.h # include/ggml/ggml-alloc.h -> ggml-alloc.h # include/ggml/ggml-backend.h -> ggml-backend.h @@ -123,6 +129,8 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/src\/ggml-cuda\.cu/ggml-cuda.cu/g' \ -e 's/src\/ggml-cuda\.h/ggml-cuda.h/g' \ -e 's/src\/ggml-impl\.h/ggml-impl.h/g' \ + -e 's/src\/ggml-kompute\.cpp/ggml-kompute.cpp/g' \ + -e 's/src\/ggml-kompute\.h/ggml-kompute.h/g' \ -e 's/src\/ggml-metal\.h/ggml-metal.h/g' \ -e 's/src\/ggml-metal\.m/ggml-metal.m/g' \ -e 's/src\/ggml-mpi\.h/ggml-mpi.h/g' \ @@ -131,6 +139,10 @@ if [ -f $SRC_LLAMA/ggml-src.patch ]; then -e 's/src\/ggml-opencl\.h/ggml-opencl.h/g' \ -e 's/src\/ggml-quants\.c/ggml-quants.c/g' \ -e 's/src\/ggml-quants\.h/ggml-quants.h/g' \ + -e 's/src\/ggml-sycl\.cpp/ggml-sycl.cpp/g' \ + -e 's/src\/ggml-sycl\.h/ggml-sycl.h/g' \ + -e 's/src\/ggml-vulkan\.cpp/ggml-vulkan.cpp/g' \ + -e 's/src\/ggml-vulkan\.h/ggml-vulkan.h/g' \ -e 's/include\/ggml\/ggml\.h/ggml.h/g' \ -e 's/include\/ggml\/ggml-alloc\.h/ggml-alloc.h/g' \ -e 's/include\/ggml\/ggml-backend\.h/ggml-backend.h/g' \ diff --git a/scripts/sync-ggml.sh b/scripts/sync-ggml.sh index 0097db435..feb34bbc8 100755 --- a/scripts/sync-ggml.sh +++ b/scripts/sync-ggml.sh @@ -7,6 +7,8 @@ cp -rpv ../ggml/src/ggml-backend.c ./ggml-backend.c cp -rpv ../ggml/src/ggml-cuda.cu ./ggml-cuda.cu cp -rpv ../ggml/src/ggml-cuda.h ./ggml-cuda.h cp -rpv ../ggml/src/ggml-impl.h ./ggml-impl.h +cp -rpv ../ggml/src/ggml-kompute.cpp ./ggml-kompute.cpp +cp -rpv ../ggml/src/ggml-kompute.h ./ggml-kompute.h cp -rpv ../ggml/src/ggml-metal.h ./ggml-metal.h cp -rpv ../ggml/src/ggml-metal.m ./ggml-metal.m cp -rpv ../ggml/src/ggml-metal.metal ./ggml-metal.metal @@ -16,6 +18,10 @@ cp -rpv ../ggml/src/ggml-opencl.cpp ./ggml-opencl.cpp cp -rpv ../ggml/src/ggml-opencl.h ./ggml-opencl.h cp -rpv ../ggml/src/ggml-quants.c ./ggml-quants.c cp -rpv ../ggml/src/ggml-quants.h ./ggml-quants.h +cp -rpv ../ggml/src/ggml-sycl.cpp ./ggml-sycl.cpp +cp -rpv ../ggml/src/ggml-sycl.h ./ggml-sycl.h +cp -rpv ../ggml/src/ggml-vulkan.cpp ./ggml-vulkan.cpp +cp -rpv ../ggml/src/ggml-vulkan.h ./ggml-vulkan.h cp -rpv ../ggml/include/ggml/ggml.h ./ggml.h cp -rpv ../ggml/include/ggml/ggml-alloc.h ./ggml-alloc.h cp -rpv ../ggml/include/ggml/ggml-backend.h ./ggml-backend.h From f026f8120f97090d34a52b3dc023c82e0ede3f7d Mon Sep 17 00:00:00 2001 From: Ian Bull Date: Sat, 10 Feb 2024 02:53:28 -0800 Subject: [PATCH 716/859] metal : use autoreleasepool to avoid memory leaks (#5437) There appears to be a known memory leak when using the `MLTCommandBuffer`. It is suggested to use `@autoreleasepool` in [1,2] [1] https://developer.apple.com/forums/thread/662721 [2] https://forums.developer.apple.com/forums/thread/120931 This change-set wraps the `ggml_metal_graph_compute` in a `@autoreleasepool`. This commit addresses https://github.com/ggerganov/llama.cpp/issues/5436 --- ggml-metal.m | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index 5260ed827..c1d8e2de8 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -687,6 +687,7 @@ static bool ggml_metal_graph_compute( struct ggml_metal_context * ctx, struct ggml_cgraph * gf) { + @autoreleasepool { MTLComputePassDescriptor * edesc = MTLComputePassDescriptor.computePassDescriptor; edesc.dispatchType = MTLDispatchTypeSerial; @@ -2272,6 +2273,7 @@ static bool ggml_metal_graph_compute( [[MTLCaptureManager sharedCaptureManager] stopCapture]; } + } return true; } From 907e08c1109f498b01036367804cff3082c44524 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Sun, 11 Feb 2024 11:16:22 +0100 Subject: [PATCH 717/859] server : add llama2 chat template (#5425) * server: add mistral chat template * server: fix typo * server: rename template mistral to llama2 * server: format_llama2: remove BOS * server: validate "--chat-template" argument * server: clean up using_chatml variable Co-authored-by: Jared Van Bortel --------- Co-authored-by: Jared Van Bortel --- examples/server/oai.hpp | 8 ++++++-- examples/server/server.cpp | 22 ++++++++++++++++++++-- examples/server/utils.hpp | 30 ++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp index 43410f803..2eca8a9fb 100644 --- a/examples/server/oai.hpp +++ b/examples/server/oai.hpp @@ -15,9 +15,13 @@ using json = nlohmann::json; inline static json oaicompat_completion_params_parse( - const json &body /* openai api json semantics */) + const json &body, /* openai api json semantics */ + const std::string &chat_template) { json llama_params; + std::string formatted_prompt = chat_template == "chatml" + ? format_chatml(body["messages"]) // OpenAI 'messages' to chatml (with <|im_start|>,...) + : format_llama2(body["messages"]); // OpenAI 'messages' to llama2 (with [INST],...) llama_params["__oaicompat"] = true; @@ -30,7 +34,7 @@ inline static json oaicompat_completion_params_parse( // https://platform.openai.com/docs/api-reference/chat/create llama_sampling_params default_sparams; llama_params["model"] = json_value(body, "model", std::string("unknown")); - llama_params["prompt"] = format_chatml(body["messages"]); // OpenAI 'messages' to llama.cpp 'prompt' + llama_params["prompt"] = formatted_prompt; llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.0); llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8d668f798..4d212f1f0 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -36,6 +36,7 @@ struct server_params std::string hostname = "127.0.0.1"; std::vector api_keys; std::string public_path = "examples/server/public"; + std::string chat_template = "chatml"; int32_t port = 8080; int32_t read_timeout = 600; int32_t write_timeout = 600; @@ -1859,6 +1860,8 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); + printf(" --chat-template FORMAT_NAME"); + printf(" set chat template, possible valus is: llama2, chatml (default %s)", sparams.chat_template.c_str()); printf("\n"); } @@ -2290,6 +2293,21 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } + else if (arg == "--chat-template") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + std::string value(argv[i]); + if (value != "chatml" && value != "llama2") { + fprintf(stderr, "error: chat template can be \"llama2\" or \"chatml\", but got: %s\n", value.c_str()); + invalid_param = true; + break; + } + sparams.chat_template = value; + } else if (arg == "--override-kv") { if (++i >= argc) { @@ -2743,13 +2761,13 @@ int main(int argc, char **argv) // TODO: add mount point without "/v1" prefix -- how? - svr.Post("/v1/chat/completions", [&llama, &validate_api_key](const httplib::Request &req, httplib::Response &res) + svr.Post("/v1/chat/completions", [&llama, &validate_api_key, &sparams](const httplib::Request &req, httplib::Response &res) { res.set_header("Access-Control-Allow-Origin", req.get_header_value("Origin")); if (!validate_api_key(req, res)) { return; } - json data = oaicompat_completion_params_parse(json::parse(req.body)); + json data = oaicompat_completion_params_parse(json::parse(req.body), sparams.chat_template); const int task_id = llama.queue_tasks.get_new_id(); llama.queue_results.add_waiting_task_id(task_id); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 70cce0721..548548962 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -167,6 +167,34 @@ static T json_value(const json &body, const std::string &key, const T &default_v : default_value; } +inline std::string format_llama2(std::vector messages) +{ + std::ostringstream output; + bool is_inside_turn = false; + + for (auto it = messages.begin(); it != messages.end(); ++it) { + if (!is_inside_turn) { + output << "[INST] "; + } + std::string role = json_value(*it, "role", std::string("user")); + std::string content = json_value(*it, "content", std::string("")); + if (role == "system") { + output << "<>\n" << content << "\n<>\n\n"; + is_inside_turn = true; + } else if (role == "user") { + output << content << " [/INST]"; + is_inside_turn = true; + } else { + output << " " << content << "
    "; + is_inside_turn = false; + } + } + + LOG_VERBOSE("format_llama2", {{"text", output.str()}}); + + return output.str(); +} + inline std::string format_chatml(std::vector messages) { std::ostringstream chatml_msgs; @@ -180,6 +208,8 @@ inline std::string format_chatml(std::vector messages) chatml_msgs << "<|im_start|>assistant" << '\n'; + LOG_VERBOSE("format_chatml", {{"text", chatml_msgs.str()}}); + return chatml_msgs.str(); } From e4640d8fdf56f14a6db3d092bcd3d2d315cb5d04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 11 Feb 2024 12:44:51 +0100 Subject: [PATCH 718/859] lookup: add print for drafting performance (#5450) --- examples/lookup/lookup.cpp | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp index d8de7dd38..18235b8a1 100644 --- a/examples/lookup/lookup.cpp +++ b/examples/lookup/lookup.cpp @@ -1,7 +1,9 @@ #include "common.h" +#include "ggml.h" #include "llama.h" #include +#include #include #include #include @@ -73,6 +75,8 @@ int main(int argc, char ** argv){ int n_drafted = 0; int n_accept = 0; + int64_t t_draft_us = 0; + int n_past = inp.size(); bool has_eos = false; @@ -160,7 +164,7 @@ int main(int argc, char ** argv){ // generate n_pred tokens through prompt lookup auto prompt_lookup = [&]() -> void { - int inp_size = inp.size(); + const int inp_size = inp.size(); for (int ngram_size = ngram_max ; ngram_size > ngram_min; --ngram_size){ const llama_token * ngram = &inp[inp_size - ngram_size]; @@ -191,8 +195,12 @@ int main(int argc, char ** argv){ return; }; + const int64_t t_start_draft_us = ggml_time_us(); + prompt_lookup(); + t_draft_us += ggml_time_us() - t_start_draft_us; + llama_decode(ctx, batch_tgt); ++n_past; @@ -210,6 +218,8 @@ int main(int argc, char ** argv){ LOG_TEE("n_draft = %d\n", n_draft); LOG_TEE("n_predict = %d\n", n_predict); LOG_TEE("n_drafted = %d\n", n_drafted); + LOG_TEE("t_draft = %.2f ms, %.2f us per token, %.2f tokens per second\n", + t_draft_us*1e-3, 1.0f*t_draft_us/n_drafted, n_drafted/(1e-6*t_draft_us)); LOG_TEE("n_accept = %d\n", n_accept); LOG_TEE("accept = %.3f%%\n", 100.0f * n_accept / n_drafted); From a07d0fee1f05c5c1dc49948ae1a3293db017275f Mon Sep 17 00:00:00 2001 From: snadampal <87143774+snadampal@users.noreply.github.com> Date: Sun, 11 Feb 2024 07:22:33 -0600 Subject: [PATCH 719/859] ggml : add mmla kernels for quantized GEMM (#4966) * ggml: aarch64: implement smmla kernel for q8_0_q8_0 quantized gemm armv8.2-a and above supports MMLA instructions that have higher throughput than DOT. this commit adds mmla kernel for q8_0_q8_0 gemm. The feature is enabled if the platform supports "__ARM_FEATURE_MATMUL_INT8" On AWS Graviton3 processors this kernel resulted up to 1.5x improvement for prompt evaluation throughput compared to the default sdot kernel. * ggml: aarch64: implement smmla kernel for q4_0_q8_0 quantized gemm armv8.2-a and above supports MMLA instructions that have higher throughput than DOT. this commit adds mmla kernel for q4_0_q8_0 gemm. The feature is enabled if the platform supports "__ARM_FEATURE_MATMUL_INT8" On AWS Graviton3 processors this kernel resulted up to 1.5x improvement for prompt evaluation throughput compared to the default sdot kernel. * ggml: aarch64: implement smmla kernel for q4_1_q8_1 quantized gemm armv8.2-a and above supports MMLA instructions that have higher throughput than DOT. this commit adds mmla kernel for q4_1_q8_1 gemm. The feature is enabled if the platform supports "__ARM_FEATURE_MATMUL_INT8" On AWS Graviton3 processors this kernel resulted up to 1.5x improvement for prompt evaluation throughput compared to the default sdot kernel. * ggml: update unit tests for the new vec_dot interface * llama.cpp: add MATMUL_INT8 capability to system_info --- common/common.cpp | 1 + ggml-quants.c | 320 +++++++++++++++++++++++++++++++++-- ggml-quants.h | 26 +-- ggml.c | 164 ++++++++++++------ ggml.h | 5 +- llama.cpp | 1 + pocs/vdot/q8dot.cpp | 4 +- pocs/vdot/vdot.cpp | 4 +- tests/test-quantize-fns.cpp | 2 +- tests/test-quantize-perf.cpp | 2 +- 10 files changed, 441 insertions(+), 88 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index e0082a823..9a489a553 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1550,6 +1550,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "cpu_has_blas: %s\n", ggml_cpu_has_blas() ? "true" : "false"); fprintf(stream, "cpu_has_sse3: %s\n", ggml_cpu_has_sse3() ? "true" : "false"); fprintf(stream, "cpu_has_vsx: %s\n", ggml_cpu_has_vsx() ? "true" : "false"); + fprintf(stream, "cpu_has_matmul_int8: %s\n", ggml_cpu_has_matmul_int8() ? "true" : "false"); #ifdef NDEBUG fprintf(stream, "debug: false\n"); diff --git a/ggml-quants.c b/ggml-quants.c index 1031e3761..6c122dd2a 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -49,6 +49,8 @@ #define MIN(a, b) ((a) < (b) ? (a) : (b)) #define MAX(a, b) ((a) > (b) ? (a) : (b)) +#define UNUSED GGML_UNUSED + #define MM256_SET_M128I(a, b) _mm256_insertf128_si256(_mm256_castsi128_si256(b), (a), 1) #if defined(__AVX__) || defined(__AVX2__) || defined(__AVX512F__) || defined(__SSSE3__) @@ -3677,15 +3679,88 @@ static inline __m128i get_scale_shuffle(int i) { } #endif -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif const block_q4_0 * restrict x = vx; const block_q8_0 * restrict y = vy; +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q4_0 * restrict vx0 = vx; + const block_q4_0 * restrict vx1 = vx + bx; + + const block_q8_0 * restrict vy0 = vy; + const block_q8_0 * restrict vy1 = vy + by; + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q4_0 * restrict b_x0 = &vx0[i]; + const block_q4_0 * restrict b_x1 = &vx1[i]; + const block_q8_0 * restrict b_y0 = &vy0[i]; + const block_q8_0 * restrict b_y1 = &vy1[i]; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + const int8x16_t s8b = vdupq_n_s8(0x8); + + const uint8x16_t v0_0 = vld1q_u8(b_x0->qs); + const uint8x16_t v0_1 = vld1q_u8(b_x1->qs); + + // 4-bit -> 8-bit + const int8x16_t v0_0l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t v0_0h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t v0_1l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t v0_1h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // sub 8 + const int8x16_t x0_l = vsubq_s8(v0_0l, s8b); + const int8x16_t x0_h = vsubq_s8(v0_0h, s8b); + const int8x16_t x1_l = vsubq_s8(v0_1l, s8b); + const int8x16_t x1_h = vsubq_s8(v0_1h, s8b); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + float32x4_t scale = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif #if defined(__ARM_NEON) float32x4_t sumv0 = vdupq_n_f32(0.0f); float32x4_t sumv1 = vdupq_n_f32(0.0f); @@ -3967,15 +4042,89 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, const void * restrict vx, #endif } -void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif const block_q4_1 * restrict x = vx; const block_q8_1 * restrict y = vy; +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q4_1 * restrict vx0 = vx; + const block_q4_1 * restrict vx1 = vx + bx; + const block_q8_1 * restrict vy0 = vy; + const block_q8_1 * restrict vy1 = vy + by; + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + float32x4_t summs0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q4_1 * restrict b_x0 = &vx0[i]; + const block_q4_1 * restrict b_x1 = &vx1[i]; + const block_q8_1 * restrict b_y0 = &vy0[i]; + const block_q8_1 * restrict b_y1 = &vy1[i]; + + float32x4_t summs_t = {GGML_FP16_TO_FP32(b_x0->m) * b_y0->s, + GGML_FP16_TO_FP32(b_x1->m) * b_y0->s, + GGML_FP16_TO_FP32(b_x0->m) * b_y1->s, + GGML_FP16_TO_FP32(b_x1->m) * b_y1->s}; + summs0 += summs_t; + + const uint8x16_t m4b = vdupq_n_u8(0x0F); + + const uint8x16_t v0_0 = vld1q_u8(b_x0->qs); + const uint8x16_t v0_1 = vld1q_u8(b_x1->qs); + + // 4-bit -> 8-bit + const int8x16_t x0_l = vreinterpretq_s8_u8(vandq_u8 (v0_0, m4b)); + const int8x16_t x0_h = vreinterpretq_s8_u8(vshrq_n_u8(v0_0, 4)); + const int8x16_t x1_l = vreinterpretq_s8_u8(vandq_u8 (v0_1, m4b)); + const int8x16_t x1_h = vreinterpretq_s8_u8(vshrq_n_u8(v0_1, 4)); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + // mmla into int32x4_t + float32x4_t scale = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + sumv2 = sumv2 + summs0; + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif // TODO: add WASM SIMD #if defined(__ARM_NEON) float32x4_t sumv0 = vdupq_n_f32(0.0f); @@ -4107,12 +4256,17 @@ void ggml_vec_dot_q4_1_q8_1(const int n, float * restrict s, const void * restri #endif } -void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; assert(n % qk == 0); assert(qk == QK5_0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_0 * restrict x = vx; const block_q8_0 * restrict y = vy; @@ -4393,12 +4547,17 @@ void ggml_vec_dot_q5_0_q8_0(const int n, float * restrict s, const void * restri #endif } -void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; assert(n % qk == 0); assert(qk == QK5_1); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_1 * restrict x = vx; const block_q8_1 * restrict y = vy; @@ -4692,15 +4851,75 @@ void ggml_vec_dot_q5_1_q8_1(const int n, float * restrict s, const void * restri #endif } -void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; assert(n % qk == 0); +#if defined(__ARM_FEATURE_MATMUL_INT8) + assert((nrc == 2) || (nrc == 1)); +#else + assert(nrc == 1); +#endif const block_q8_0 * restrict x = vx; const block_q8_0 * restrict y = vy; +#if defined(__ARM_FEATURE_MATMUL_INT8) + if (nrc == 2) { + const block_q8_0 * restrict vx0 = vx; + const block_q8_0 * restrict vx1 = vx + bx; + const block_q8_0 * restrict vy0 = vy; + const block_q8_0 * restrict vy1 = vy + by; + + float32x4_t sumv0 = vdupq_n_f32(0.0f); + + for (int i = 0; i < nb; i++) { + const block_q8_0 * restrict b_x0 = &vx0[i]; + const block_q8_0 * restrict b_y0 = &vy0[i]; + + const block_q8_0 * restrict b_x1 = &vx1[i]; + const block_q8_0 * restrict b_y1 = &vy1[i]; + + const int8x16_t x0_l = vld1q_s8(b_x0->qs); + const int8x16_t x0_h = vld1q_s8(b_x0->qs + 16); + const int8x16_t x1_l = vld1q_s8(b_x1->qs); + const int8x16_t x1_h = vld1q_s8(b_x1->qs + 16); + + // load y + const int8x16_t y0_l = vld1q_s8(b_y0->qs); + const int8x16_t y0_h = vld1q_s8(b_y0->qs + 16); + const int8x16_t y1_l = vld1q_s8(b_y1->qs); + const int8x16_t y1_h = vld1q_s8(b_y1->qs + 16); + + float32x4_t scale = {GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x0->d)*GGML_FP16_TO_FP32(b_y1->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y0->d), + GGML_FP16_TO_FP32(b_x1->d)*GGML_FP16_TO_FP32(b_y1->d)}; + + int8x16_t l0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + int8x16_t l1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_l), vreinterpretq_s64_s8(x1_l))); + + int8x16_t l2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + int8x16_t l3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(x0_h), vreinterpretq_s64_s8(x1_h))); + + int8x16_t r0 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + int8x16_t r1 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_l), vreinterpretq_s64_s8(y1_l))); + + int8x16_t r2 = vreinterpretq_s8_s64(vzip1q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + int8x16_t r3 = vreinterpretq_s8_s64(vzip2q_s64(vreinterpretq_s64_s8(y0_h), vreinterpretq_s64_s8(y1_h))); + + sumv0 = vmlaq_f32(sumv0,(vcvtq_f32_s32(vmmlaq_s32((vmmlaq_s32((vmmlaq_s32((vmmlaq_s32(vdupq_n_s32(0), l0, r0)), + l1, r1)), l2, r2)), l3, r3))), scale); + } + float32x4_t sumv1 = vextq_f32(sumv0, sumv0, 2); + float32x4_t sumv2 = vzip1q_f32(sumv0, sumv1); + + vst1_f32(s, vget_low_f32(sumv2)); + vst1_f32(s + bs, vget_high_f32(sumv2)); + return; + } +#endif #if defined(__ARM_NEON) float32x4_t sumv0 = vdupq_n_f32(0.0f); float32x4_t sumv1 = vdupq_n_f32(0.0f); @@ -4795,7 +5014,12 @@ void ggml_vec_dot_q8_0_q8_0(const int n, float * restrict s, const void * restri } #if QK_K == 256 -void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q2_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -5171,7 +5395,12 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q2_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -5429,8 +5658,13 @@ void ggml_vec_dot_q2_K_q8_K(const int n, float * restrict s, const void * restri #endif #if QK_K == 256 -void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const uint32_t kmask1 = 0x03030303; const uint32_t kmask2 = 0x0f0f0f0f; @@ -5949,8 +6183,13 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q3_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -6292,8 +6531,13 @@ void ggml_vec_dot_q3_K_q8_K(const int n, float * restrict s, const void * restri #endif #if QK_K == 256 -void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -6648,8 +6892,13 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri #endif } #else -void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -6891,8 +7140,13 @@ void ggml_vec_dot_q4_K_q8_K(const int n, float * restrict s, const void * restri #endif #if QK_K == 256 -void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -7311,8 +7565,13 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q5_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -7577,8 +7836,13 @@ void ggml_vec_dot_q5_K_q8_K(const int n, float * restrict s, const void * restri #if QK_K == 256 -void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q6_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8009,8 +8273,13 @@ void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restri #else -void ggml_vec_dot_q6_K_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q6_K * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8339,8 +8608,13 @@ static const int8_t keven_signs_q2xs[1024] = { 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, }; -void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_iq2_xxs * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8462,8 +8736,13 @@ void ggml_vec_dot_iq2_xxs_q8_K(const int n, float * restrict s, const void * res #endif } -void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_iq2_xs * restrict x = vx; const block_q8_K * restrict y = vy; @@ -8682,8 +8961,13 @@ void ggml_vec_dot_iq2_xs_q8_K(const int n, float * restrict s, const void * rest } // TODO -void ggml_vec_dot_iq3_xxs_q8_K(const int n, float * restrict s, const void * restrict vx, const void * restrict vy) { +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_iq3_xxs * restrict x = vx; const block_q8_K * restrict y = vy; diff --git a/ggml-quants.h b/ggml-quants.h index bfdf3c997..68f09b1e1 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -245,20 +245,20 @@ void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product -void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q4_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_1_q8_1(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q8_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); -void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); -void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT vx, const void * GGML_RESTRICT vy); +void ggml_vec_dot_q2_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q3_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q4_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q5_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") diff --git a/ggml.c b/ggml.c index 86cd65862..e45b78d7e 100644 --- a/ggml.c +++ b/ggml.c @@ -428,8 +428,8 @@ int64_t ggml_cycles_per_ms(void) { static const size_t CACHE_LINE_SIZE_F32 = CACHE_LINE_SIZE/sizeof(float); -static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y); -static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y); +static void ggml_vec_dot_f32(int n, float * restrict s, size_t bs, const float * restrict x, size_t bx, const float * restrict y, size_t by, int nrc); +static void ggml_vec_dot_f16(int n, float * restrict s, size_t bs, ggml_fp16_t * restrict x, size_t bx, ggml_fp16_t * restrict y, size_t by, int nrc); static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { [GGML_TYPE_I8] = { @@ -457,6 +457,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .is_quantized = false, .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_f32, .vec_dot_type = GGML_TYPE_F32, + .nrows = 1, }, [GGML_TYPE_F16] = { .type_name = "f16", @@ -468,6 +469,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) ggml_fp32_to_fp16_row, .vec_dot = (ggml_vec_dot_t) ggml_vec_dot_f16, .vec_dot_type = GGML_TYPE_F16, + .nrows = 1, }, [GGML_TYPE_Q4_0] = { .type_name = "q4_0", @@ -479,6 +481,11 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q4_0_reference, .vec_dot = ggml_vec_dot_q4_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif }, [GGML_TYPE_Q4_1] = { .type_name = "q4_1", @@ -490,6 +497,11 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q4_1_reference, .vec_dot = ggml_vec_dot_q4_1_q8_1, .vec_dot_type = GGML_TYPE_Q8_1, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif }, [4] = { // GGML_TYPE_Q4_2 .type_name = "DEPRECATED", @@ -501,6 +513,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = NULL, .vec_dot_type = GGML_TYPE_COUNT, + .nrows = 1, }, [5] = { // GGML_TYPE_Q4_3 .type_name = "DEPRECATED", @@ -512,6 +525,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = NULL, .vec_dot_type = GGML_TYPE_COUNT, + .nrows = 1, }, [GGML_TYPE_Q5_0] = { .type_name = "q5_0", @@ -523,6 +537,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q5_0_reference, .vec_dot = ggml_vec_dot_q5_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, }, [GGML_TYPE_Q5_1] = { .type_name = "q5_1", @@ -534,6 +549,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q5_1_reference, .vec_dot = ggml_vec_dot_q5_1_q8_1, .vec_dot_type = GGML_TYPE_Q8_1, + .nrows = 1, }, [GGML_TYPE_Q8_0] = { .type_name = "q8_0", @@ -545,6 +561,11 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q8_0_reference, .vec_dot = ggml_vec_dot_q8_0_q8_0, .vec_dot_type = GGML_TYPE_Q8_0, +#if defined (__ARM_FEATURE_MATMUL_INT8) + .nrows = 2, +#else + .nrows = 1, +#endif }, [GGML_TYPE_Q8_1] = { .type_name = "q8_1", @@ -554,6 +575,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float = quantize_row_q8_1, .from_float_reference = (ggml_from_float_t) quantize_row_q8_1_reference, .vec_dot_type = GGML_TYPE_Q8_1, + .nrows = 1, }, [GGML_TYPE_Q2_K] = { .type_name = "q2_K", @@ -565,6 +587,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q2_K_reference, .vec_dot = ggml_vec_dot_q2_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q3_K] = { .type_name = "q3_K", @@ -576,6 +599,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q3_K_reference, .vec_dot = ggml_vec_dot_q3_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q4_K] = { .type_name = "q4_K", @@ -587,6 +611,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q4_K_reference, .vec_dot = ggml_vec_dot_q4_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q5_K] = { .type_name = "q5_K", @@ -598,6 +623,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q5_K_reference, .vec_dot = ggml_vec_dot_q5_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q6_K] = { .type_name = "q6_K", @@ -609,6 +635,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t) quantize_row_q6_K_reference, .vec_dot = ggml_vec_dot_q6_K_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_IQ2_XXS] = { .type_name = "iq2_xxs", @@ -620,6 +647,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_IQ2_XS] = { .type_name = "iq2_xs", @@ -631,6 +659,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = NULL, .vec_dot = ggml_vec_dot_iq2_xs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_IQ3_XXS] = { .type_name = "iq3_xxs", @@ -642,6 +671,7 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .from_float_reference = (ggml_from_float_t)quantize_row_iq3_xxs_reference, .vec_dot = ggml_vec_dot_iq3_xxs_q8_K, .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", @@ -1212,7 +1242,13 @@ inline static void ggml_vec_neg_f32 (const int n, float * y, const float * x) inline static void ggml_vec_mul_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i]*y[i]; } inline static void ggml_vec_div_f32 (const int n, float * z, const float * x, const float * y) { for (int i = 0; i < n; ++i) z[i] = x[i]/y[i]; } -static void ggml_vec_dot_f32(const int n, float * restrict s, const float * restrict x, const float * restrict y) { +static void ggml_vec_dot_f32(int n, float * restrict s, size_t bs, const float * restrict x, size_t bx, const float * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + #ifdef GGML_SIMD float sumf = 0.0f; const int np = (n & ~(GGML_F32_STEP - 1)); @@ -1249,7 +1285,13 @@ static void ggml_vec_dot_f32(const int n, float * restrict s, const float * rest *s = sumf; } -static void ggml_vec_dot_f16(const int n, float * restrict s, ggml_fp16_t * restrict x, ggml_fp16_t * restrict y) { +static void ggml_vec_dot_f16(int n, float * restrict s, size_t bs, ggml_fp16_t * restrict x, size_t bx, ggml_fp16_t * restrict y, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + ggml_float sumf = 0.0; #if defined(GGML_SIMD) @@ -1455,7 +1497,7 @@ inline static void ggml_vec_scale_f32(const int n, float * y, const float v) { #endif } -inline static void ggml_vec_norm_f32 (const int n, float * s, const float * x) { ggml_vec_dot_f32(n, s, x, x); *s = sqrtf(*s); } +inline static void ggml_vec_norm_f32 (const int n, float * s, const float * x) { ggml_vec_dot_f32(n, s, 0, x, 0, x, 0, 1); *s = sqrtf(*s); } inline static void ggml_vec_sqr_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = x[i]*x[i]; } inline static void ggml_vec_sqrt_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = sqrtf(x[i]); } inline static void ggml_vec_log_f32 (const int n, float * y, const float * x) { for (int i = 0; i < n; ++i) y[i] = logf(x[i]); } @@ -9992,6 +10034,7 @@ static void ggml_compute_forward_mul_mat( ggml_vec_dot_t const vec_dot = type_traits[type].vec_dot; enum ggml_type const vec_dot_type = type_traits[type].vec_dot_type; ggml_from_float_t const from_float_to_vec_dot = type_traits[vec_dot_type].from_float; + int64_t const vec_dot_num_rows = type_traits[type].nrows; GGML_ASSERT(ne0 == ne01); GGML_ASSERT(ne1 == ne11); @@ -10159,12 +10202,23 @@ static void ggml_compute_forward_mul_mat( const int64_t blck_0 = 16; const int64_t blck_1 = 16; + // dot kernels can handle 1 row and col at a time, but mmla kernels can process 2 rows and cols + int64_t nrc = vec_dot_num_rows; + // TODO: currently the mmla kernels support only even numbered rows/cols. + // this check can be removed once they are extended to support odd numbered rows/cols too + if ((nr0 % 2 != 0) || (ne11 % 2 != 0)) { + nrc = 1; + } + + const size_t src1_col_stride = src1_cont || src1->type != vec_dot_type ? row_size : nb11; + // attempt to reduce false-sharing (does not seem to make a difference) - float tmp[16]; + // 16 * 2, accounting for mmla kernels + float tmp[32]; for (int64_t iir1 = ir110; iir1 < ir111; iir1 += blck_1) { for (int64_t iir0 = ir010; iir0 < ir011; iir0 += blck_0) { - for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ++ir1) { + for (int64_t ir1 = iir1; ir1 < iir1 + blck_1 && ir1 < ir111; ir1 += nrc) { const int64_t i13 = (ir1/(ne12*ne1)); const int64_t i12 = (ir1 - i13*ne12*ne1)/ne1; const int64_t i11 = (ir1 - i13*ne12*ne1 - i12*ne1); @@ -10187,17 +10241,19 @@ static void ggml_compute_forward_mul_mat( (src1_cont || src1->type != vec_dot_type ? (i11 + i12*ne11 + i13*ne12*ne11)*row_size : (i11*nb11 + i12*nb12 + i13*nb13)); - float * dst_col = (float *) ((char *) dst->data + (i1*nb1 + i2*nb2 + i3*nb3)); //for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { // vec_dot(ne00, &dst_col[ir0], src0_row + ir0*nb01, src1_col); //} - for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { - vec_dot(ne00, &tmp[ir0 - iir0], src0_row + ir0*nb01, src1_col); + for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ir0 += nrc) { + vec_dot(ne00, &tmp[ir0 - iir0], (nrc>1 ? 16 : 0), src0_row + ir0*nb01, (nrc>1 ? nb01 : 0), src1_col, (nrc>1 ? src1_col_stride : 0), nrc); + } + + for (int cn = 0; cn < nrc; ++cn) { + memcpy(&dst_col[iir0 + cn*nb1/nb0], tmp + (cn*16), (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); } - memcpy(&dst_col[iir0], tmp, (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); } } } @@ -10386,7 +10442,7 @@ static void ggml_compute_forward_mul_mat_id( //} for (int64_t ir0 = iir0; ir0 < iir0 + blck_0 && ir0 < ir011; ++ir0) { - vec_dot(ne00, &tmp[ir0 - iir0], src0_row + ir0*nb01, src1_col); + vec_dot(ne00, &tmp[ir0 - iir0], 0, src0_row + ir0*nb01, 0, src1_col, 0, 1); } memcpy(&dst_col[iir0], tmp, (MIN(iir0 + blck_0, ir011) - iir0)*sizeof(float)); } @@ -11568,7 +11624,7 @@ static void ggml_compute_forward_soft_max_back_f32( // linear runtime, no additional memory float dot_y_dy = 0; - ggml_vec_dot_f32 (nc, &dot_y_dy, y, dy); + ggml_vec_dot_f32 (nc, &dot_y_dy, 0, y, 0, dy, 0, 1); ggml_vec_cpy_f32 (nc, dx, dy); ggml_vec_acc1_f32(nc, dx, -dot_y_dy); ggml_vec_mul_f32 (nc, dx, dx, y); @@ -12369,9 +12425,9 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( const int i1n = i10*ne11; for (int i00 = 0; i00 < ne00; i00++) { float v = 0; - ggml_vec_dot_f16(ne02, &v, - (ggml_fp16_t *) wdata_src + i1n, - (ggml_fp16_t *) wdata_kernel + i00*ne02); + ggml_vec_dot_f16(ne02, &v, 0, + (ggml_fp16_t *) wdata_src + i1n, 0, + (ggml_fp16_t *) wdata_kernel + i00*ne02, 0, 1); dst_data[i10*s0 + i00] += v; } } @@ -12466,9 +12522,9 @@ static void ggml_compute_forward_conv_transpose_1d_f32( const int i1n = i10*ne11; for (int i00 = 0; i00 < ne00; i00++) { float v = 0; - ggml_vec_dot_f32(ne02, &v, - wdata_src + i1n, - wdata_kernel + i00*ne02); + ggml_vec_dot_f32(ne02, &v, 0, + wdata_src + i1n, 0, + wdata_kernel + i00*ne02, 0, 1); dst_data[i10*s0 + i00] += v; } } @@ -12783,9 +12839,9 @@ static void ggml_compute_forward_conv_transpose_2d( for (int i01 = 0; i01 < ne01; i01++) { for (int i00 = 0; i00 < ne00; i00++) { float v = 0; - ggml_vec_dot_f16(ne03, &v, - wdata_src + i1n, - wdata_kernel + i01*ne00*ne03 + i00*ne03); + ggml_vec_dot_f16(ne03, &v, 0, + wdata_src + i1n, 0, + wdata_kernel + i01*ne00*ne03 + i00*ne03, 0, 1); dst_data[(i11*stride + i01)*ne0 + i10*stride + i00] += v; } } @@ -13214,9 +13270,9 @@ static void ggml_compute_forward_flash_attn_f32( const int i1 = ik1; ggml_vec_dot_f32(neq0, - S + i1, - (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), - (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3))); + S + i1, 0, + (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); } // scale @@ -13299,9 +13355,9 @@ static void ggml_compute_forward_flash_attn_f32( const int iv3 = iq3; ggml_vec_dot_f32(masked_begin, - (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), - (float *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), - S); + (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), 0, + (float *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), 0, + S, 0, 1); } } } @@ -13404,9 +13460,9 @@ static void ggml_compute_forward_flash_attn_f16( const int i1 = ik1; ggml_vec_dot_f16(neq0, - S + i1, - (ggml_fp16_t *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), - (ggml_fp16_t *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3))); + S + i1, 0, + (ggml_fp16_t *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (ggml_fp16_t *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); } } else { for (int64_t ic = 0; ic < nek1; ic += GGML_VEC_DOT_UNROLL) { @@ -13508,9 +13564,9 @@ static void ggml_compute_forward_flash_attn_f16( const int iv3 = iq3; ggml_vec_dot_f16(nev0, - (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), - (ggml_fp16_t *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), - S16); + (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), 0, + (ggml_fp16_t *) ((char *) v->data + ( ic*nbv1 + iv2*nbv2 + iv3*nbv3)), 0, + S16, 0, 1); } } else { for (int64_t ic = 0; ic < nev1; ic += GGML_VEC_DOT_UNROLL) { @@ -13652,9 +13708,9 @@ static void ggml_compute_forward_flash_ff_f16( const int i1 = ib01; ggml_vec_dot_f16(nea0, - S + i1, - (ggml_fp16_t *) ((char *) b0->data + (ib01*nbb01 + ib02*nbb02 + ib03*nbb03)), - (ggml_fp16_t *) ((char *) a->data + ( ia1*nba1 + ia2*nba2 + ia3*nba3))); + S + i1, 0, + (ggml_fp16_t *) ((char *) b0->data + (ib01*nbb01 + ib02*nbb02 + ib03*nbb03)), 0, + (ggml_fp16_t *) ((char *) a->data + ( ia1*nba1 + ia2*nba2 + ia3*nba3)), 0, 1); } ggml_vec_add_f32(neb01, S, S, (float *) b1->data); @@ -13677,9 +13733,9 @@ static void ggml_compute_forward_flash_ff_f16( for (int64_t ic = 0; ic < nec01; ++ic) { ggml_vec_dot_f16(neb01, - (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), - (ggml_fp16_t *) ((char *) c0->data + ( ic*nbc01 + i2*nbc02 + i3*nbc03)), - S16); + (float *) ((char *) dst->data + (ic*nb0 + i1*nb1 + i2*nb2 + i3*nb3)), 0, + (ggml_fp16_t *) ((char *) c0->data + ( ic*nbc01 + i2*nbc02 + i3*nbc03)), 0, + S16, 0, 1); } ggml_vec_add_f32(nec01, @@ -13866,9 +13922,9 @@ static void ggml_compute_forward_flash_attn_back_f32( const int i1 = ik1; ggml_vec_dot_f32(neq0, - S + i1, - (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), - (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3))); + S + i1, 0, + (float *) ((char *) k->data + (ik1*nbk1 + ik2*nbk2 + ik3*nbk3)), 0, + (float *) ((char *) q->data + (iq1*nbq1 + iq2*nbq2 + iq3*nbq3)), 0, 1); } // scale @@ -14013,7 +14069,7 @@ static void ggml_compute_forward_flash_attn_back_f32( // S = SM * (S - dot(SM, S)) float dot_SM_gradSM = 0; - ggml_vec_dot_f32 (masked_begin, &dot_SM_gradSM, SM, S); + ggml_vec_dot_f32 (masked_begin, &dot_SM_gradSM, 0, SM, 0, S, 0, 1); ggml_vec_acc1_f32(M, S, -dot_SM_gradSM); ggml_vec_mul_f32 (masked_begin, S, S, SM); @@ -18382,7 +18438,7 @@ static enum ggml_opt_result linesearch_backtracking( } // compute the initial gradient in the search direction - ggml_vec_dot_f32(nx, &dginit, g, d); + ggml_vec_dot_f32(nx, &dginit, 0, g, 0, d, 0, 1); // make sure that d points to a descent direction if (0 < dginit) { @@ -18432,7 +18488,7 @@ static enum ggml_opt_result linesearch_backtracking( return count; } - ggml_vec_dot_f32(nx, &dg, g, d); + ggml_vec_dot_f32(nx, &dg, 0, g, 0, d, 0, 1); // check the Wolfe condition if (dg < params->lbfgs.wolfe * dginit) { @@ -18693,8 +18749,8 @@ static enum ggml_opt_result ggml_opt_lbfgs( // ys = y^t \cdot s -> 1 / \rho. // yy = y^t \cdot y. // - ggml_vec_dot_f32(nx, &ys, &lm_y[end[0]*nx], &lm_s[end[0]*nx]); - ggml_vec_dot_f32(nx, &yy, &lm_y[end[0]*nx], &lm_y[end[0]*nx]); + ggml_vec_dot_f32(nx, &ys, 0, &lm_y[end[0]*nx], 0, &lm_s[end[0]*nx], 0, 1); + ggml_vec_dot_f32(nx, &yy, 0, &lm_y[end[0]*nx], 0, &lm_y[end[0]*nx], 0, 1); lm_ys[end[0]] = ys; @@ -18713,7 +18769,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( for (int i = 0; i < bound; ++i) { j[0] = (j[0] + m - 1) % m; // \alpha_{j} = \rho_{j} s^{t}_{j} \cdot q_{k+1} - ggml_vec_dot_f32(nx, &lm_alpha[j[0]], &lm_s[j[0]*nx], d); + ggml_vec_dot_f32(nx, &lm_alpha[j[0]], 0, &lm_s[j[0]*nx], 0, d, 0, 1); lm_alpha[j[0]] /= lm_ys[j[0]]; // q_{i} = q_{i+1} - \alpha_{i} y_{i} ggml_vec_mad_f32(nx, d, &lm_y[j[0]*nx], -lm_alpha[j[0]]); @@ -18723,7 +18779,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( for (int i = 0; i < bound; ++i) { // \beta_{j} = \rho_{j} y^t_{j} \cdot \gamma_{i} - ggml_vec_dot_f32(nx, &beta, &lm_y[j[0]*nx], d); + ggml_vec_dot_f32(nx, &beta, 0, &lm_y[j[0]*nx], 0, d, 0, 1); beta /= lm_ys[j[0]]; // \gamma_{i+1} = \gamma_{i} + (\alpha_{j} - \beta_{j}) s_{j} ggml_vec_mad_f32(nx, d, &lm_s[j[0]*nx], lm_alpha[j[0]] - beta); @@ -20611,4 +20667,12 @@ int ggml_cpu_has_vsx(void) { #endif } +int ggml_cpu_has_matmul_int8(void) { +#if defined(__ARM_FEATURE_MATMUL_INT8) + return 1; +#else + return 0; +#endif +} + //////////////////////////////////////////////////////////////////////////////// diff --git a/ggml.h b/ggml.h index 1360cd8ee..9cfec5bac 100644 --- a/ggml.h +++ b/ggml.h @@ -2278,6 +2278,7 @@ extern "C" { GGML_API int ggml_cpu_has_ssse3 (void); GGML_API int ggml_cpu_has_sycl (void); GGML_API int ggml_cpu_has_vsx (void); + GGML_API int ggml_cpu_has_matmul_int8(void); // // Internal types and functions exposed for tests and benchmarks @@ -2291,7 +2292,8 @@ extern "C" { #endif typedef void (*ggml_to_float_t) (const void * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); typedef void (*ggml_from_float_t)(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); - typedef void (*ggml_vec_dot_t) (const int n, float * GGML_RESTRICT s, const void * GGML_RESTRICT x, const void * GGML_RESTRICT y); + typedef void (*ggml_vec_dot_t) (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT x, size_t bx, + const void * GGML_RESTRICT y, size_t by, int nrc); typedef struct { const char * type_name; @@ -2303,6 +2305,7 @@ extern "C" { ggml_from_float_t from_float_reference; ggml_vec_dot_t vec_dot; enum ggml_type vec_dot_type; + int64_t nrows; // number of rows to process simultaneously; } ggml_type_traits_t; GGML_API ggml_type_traits_t ggml_internal_get_type_traits(enum ggml_type type); diff --git a/llama.cpp b/llama.cpp index 0566b087b..3f39a67fb 100644 --- a/llama.cpp +++ b/llama.cpp @@ -11869,6 +11869,7 @@ const char * llama_print_system_info(void) { s += "SSE3 = " + std::to_string(ggml_cpu_has_sse3()) + " | "; s += "SSSE3 = " + std::to_string(ggml_cpu_has_ssse3()) + " | "; s += "VSX = " + std::to_string(ggml_cpu_has_vsx()) + " | "; + s += "MATMUL_INT8 = " + std::to_string(ggml_cpu_has_matmul_int8()) + " | "; return s.c_str(); } diff --git a/pocs/vdot/q8dot.cpp b/pocs/vdot/q8dot.cpp index 111770d55..1a52ff5e9 100644 --- a/pocs/vdot/q8dot.cpp +++ b/pocs/vdot/q8dot.cpp @@ -156,8 +156,8 @@ int main(int argc, char** argv) { t1 = std::chrono::high_resolution_clock::now(); float fs; - if (type == 0) funcs.vec_dot(kVecSize * QK4_1, &fs, x40.data(), y.data()); - else funcs.vec_dot(kVecSize * QK4_1, &fs, x41.data(), y.data()); + if (type == 0) funcs.vec_dot(kVecSize * QK4_1, &fs, 0, x40.data(), 0, y.data(), 0, 1); + else funcs.vec_dot(kVecSize * QK4_1, &fs, 0, x41.data(), 0, y.data(), 0, 1); t2 = std::chrono::high_resolution_clock::now(); t = 1e-3*std::chrono::duration_cast(t2-t1).count(); if (iloop > 3) ggml.addResult(fs, t); diff --git a/pocs/vdot/vdot.cpp b/pocs/vdot/vdot.cpp index 73ffcd1ca..17e9e4482 100644 --- a/pocs/vdot/vdot.cpp +++ b/pocs/vdot/vdot.cpp @@ -284,8 +284,8 @@ int main(int argc, char** argv) { else { auto vdot = ggml_internal_get_type_traits(funcs.vec_dot_type); vdot.from_float(y1.data(), q8.data(), kVecSize); - if (useQ4_1) funcs.vec_dot(kVecSize, &result, q41.data(), q8.data()); - else funcs.vec_dot(kVecSize, &result, q40.data(), q8.data()); + if (useQ4_1) funcs.vec_dot(kVecSize, &result, 0, q41.data(), 0, q8.data(), 0, 1); + else funcs.vec_dot(kVecSize, &result, 0, q40.data(), 0, q8.data(), 0, 1); } sumq += result; t2 = std::chrono::high_resolution_clock::now(); diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index 43df8022d..5e92d5742 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -87,7 +87,7 @@ static float dot_product_error( vdot.from_float(test_data2, tmp_q2.data(), test_size); float result = INFINITY; - qfns.vec_dot(test_size, &result, tmp_q1.data(), tmp_q2.data()); + qfns.vec_dot(test_size, &result, 0, tmp_q1.data(), 0, tmp_q2.data(), 0, 1); const float dot_ref = dot_product(test_data1, test_data2, test_size); diff --git a/tests/test-quantize-perf.cpp b/tests/test-quantize-perf.cpp index 8ec817344..48d9fae3d 100644 --- a/tests/test-quantize-perf.cpp +++ b/tests/test-quantize-perf.cpp @@ -346,7 +346,7 @@ int main(int argc, char * argv[]) { printf(" %zu values (%.2f MB)\n", size, 4*size/(float)(1024*1024)); auto quantize_fn = [&](void) -> float { float result; - qfns.vec_dot(size, &result, test_q1, test_q2); + qfns.vec_dot(size, &result, 0, test_q1, 0, test_q2, 0, 1); return result; }; size_t quantized_size = ggml_row_size(type, size); From 0f2411f154db46780d3aaa3a0664691b2170c83f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 11 Feb 2024 15:33:01 +0200 Subject: [PATCH 720/859] ggml : fix compile warnings (unused vars) (#4966) --- ggml-quants.c | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/ggml-quants.c b/ggml-quants.c index 6c122dd2a..b2a309bf8 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3689,6 +3689,10 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r #else assert(nrc == 1); #endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_0 * restrict x = vx; const block_q8_0 * restrict y = vy; @@ -4052,6 +4056,10 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r #else assert(nrc == 1); #endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q4_1 * restrict x = vx; const block_q8_1 * restrict y = vy; @@ -4861,6 +4869,10 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r #else assert(nrc == 1); #endif + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); const block_q8_0 * restrict x = vx; const block_q8_0 * restrict y = vy; From 139b62a839825ef20084ed75ed624db7a5ad554a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 11 Feb 2024 15:33:43 +0200 Subject: [PATCH 721/859] common : fix compile warning --- common/sampling.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 844ad7c53..82cbdecea 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -127,8 +127,6 @@ static void sampler_queue( const llama_sampling_params & params, llama_token_data_array & cur_p, size_t & min_keep) { - const int n_vocab = llama_n_vocab(llama_get_model(ctx_main)); - const float temp = params.temp; const float dynatemp_range = params.dynatemp_range; const float dynatemp_exponent = params.dynatemp_exponent; From 85910c5b30f6e268321be8df044f5528a6efac52 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 11 Feb 2024 15:35:50 +0200 Subject: [PATCH 722/859] main : ctrl+C print timing in non-interactive mode (#3873) --- examples/main/main.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 0ed4d79f9..e8ab8cbae 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -98,7 +98,7 @@ static void write_logfile( #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) || defined (_WIN32) static void sigint_handler(int signo) { if (signo == SIGINT) { - if (!is_interacting) { + if (!is_interacting && g_params->interactive) { is_interacting = true; } else { console::cleanup(); @@ -392,7 +392,8 @@ int main(int argc, char ** argv) { LOG_TEE("\n"); } - if (params.interactive) { + // ctrl+C handling + { #if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) struct sigaction sigint_action; sigint_action.sa_handler = sigint_handler; @@ -405,7 +406,9 @@ int main(int argc, char ** argv) { }; SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true); #endif + } + if (params.interactive) { LOG_TEE("%s: interactive mode on.\n", __func__); if (!params.antiprompt.empty()) { From 684780141a08200ec98eba3e982dbafd1d0b5000 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sun, 11 Feb 2024 13:38:14 +0000 Subject: [PATCH 723/859] server : allow to specify tokens as strings in logit_bias (#5003) * server: allow to specify tokens as strings in logit_bias * Apply suggestions from code review Co-authored-by: Georgi Gerganov --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 2 +- examples/server/server.cpp | 32 +++++++++++++++++++++++++------- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 1db7cdf21..0f7373ae8 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -185,7 +185,7 @@ node index.js `ignore_eos`: Ignore end of stream token and continue generating (default: false). - `logit_bias`: Modify the likelihood of a token appearing in the generated text completion. For example, use `"logit_bias": [[15043,1.0]]` to increase the likelihood of the token 'Hello', or `"logit_bias": [[15043,-1.0]]` to decrease its likelihood. Setting the value to false, `"logit_bias": [[15043,false]]` ensures that the token `Hello` is never produced (default: []). + `logit_bias`: Modify the likelihood of a token appearing in the generated text completion. For example, use `"logit_bias": [[15043,1.0]]` to increase the likelihood of the token 'Hello', or `"logit_bias": [[15043,-1.0]]` to decrease its likelihood. Setting the value to false, `"logit_bias": [[15043,false]]` ensures that the token `Hello` is never produced. The tokens can also be represented as strings, e.g. `[["Hello, World!",-0.5]]` will reduce the likelihood of all the individual tokens that represent the string `Hello, World!`, just like the `presence_penalty` does. (default: []). `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 4d212f1f0..1699eb76b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -626,18 +626,36 @@ struct llama_server_context const int n_vocab = llama_n_vocab(model); for (const auto &el : *logit_bias) { - if (el.is_array() && el.size() == 2 && el[0].is_number_integer()) + if (el.is_array() && el.size() == 2) { - llama_token tok = el[0].get(); - if (tok >= 0 && tok < n_vocab) + float bias; + if (el[1].is_number()) { - if (el[1].is_number()) + bias = el[1].get(); + } + else if (el[1].is_boolean() && !el[1].get()) + { + bias = -INFINITY; + } + else + { + continue; + } + + if (el[0].is_number_integer()) + { + llama_token tok = el[0].get(); + if (tok >= 0 && tok < n_vocab) { - slot->sparams.logit_bias[tok] = el[1].get(); + slot->sparams.logit_bias[tok] = bias; } - else if (el[1].is_boolean() && !el[1].get()) + } + else if (el[0].is_string()) + { + auto toks = llama_tokenize(model, el[0].get(), false); + for (auto tok : toks) { - slot->sparams.logit_bias[tok] = -INFINITY; + slot->sparams.logit_bias[tok] = bias; } } } From a803333a4e6fc534c93afe90d741bc2388bdec87 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Sun, 11 Feb 2024 13:43:31 +0000 Subject: [PATCH 724/859] common : use enums for sampler types (#5418) * common: use enums for sampler types * Apply suggestions from code review Co-authored-by: Georgi Gerganov * minor : spaces --------- Co-authored-by: Georgi Gerganov --- common/common.cpp | 117 +++++++++++++++++++++++++++++++------------- common/common.h | 7 ++- common/sampling.cpp | 31 +++++------- common/sampling.h | 20 +++++++- 4 files changed, 120 insertions(+), 55 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 9a489a553..f64da2cb6 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -340,13 +340,14 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { invalid_param = true; break; } - sparams.samplers_sequence = parse_samplers_input(argv[i]); + const auto sampler_names = string_split(argv[i], ';'); + sparams.samplers_sequence = sampler_types_from_names(sampler_names); } else if (arg == "--sampling-seq") { if (++i >= argc) { invalid_param = true; break; } - sparams.samplers_sequence = argv[i]; + sparams.samplers_sequence = sampler_types_from_chars(argv[i]); } else if (arg == "--top-p") { if (++i >= argc) { invalid_param = true; @@ -906,6 +907,14 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { const llama_sampling_params & sparams = params.sparams; + std::string sampler_type_chars; + std::string sampler_type_names; + for (const auto sampler_type : sparams.samplers_sequence) { + sampler_type_chars += static_cast(sampler_type); + sampler_type_names += sampler_type_to_name_string(sampler_type) + ";"; + } + sampler_type_names.pop_back(); + printf("\n"); printf("usage: %s [options]\n", argv[0]); printf("\n"); @@ -947,8 +956,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); printf(" -c N, --ctx-size N size of the prompt context (default: %d, 0 = loaded from model)\n", params.n_ctx); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - printf(" --samplers samplers that will be used for generation in the order, separated by \';\', for example: \"top_k;tfs;typical;top_p;min_p;temp\"\n"); - printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sparams.samplers_sequence.c_str()); + printf(" --samplers samplers that will be used for generation in the order, separated by \';\' (default: %s)\n", sampler_type_names.c_str()); + printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sampler_type_chars.c_str()); printf(" --top-k N top-k sampling (default: %d, 0 = disabled)\n", sparams.top_k); printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); printf(" --min-p N min-p sampling (default: %.1f, 0.0 = disabled)\n", (double)sparams.min_p); @@ -1097,45 +1106,85 @@ std::string gpt_random_prompt(std::mt19937 & rng) { } // -// String parsing +// String utils // -std::string parse_samplers_input(std::string input) { - std::string output = ""; +std::vector string_split(std::string input, char separator) { + std::vector parts; + size_t separator_pos = input.find(separator); + while (separator_pos != std::string::npos) { + std::string part = input.substr(0, separator_pos); + parts.emplace_back(part); + input = input.substr(separator_pos + 1); + separator_pos = input.find(separator); + } + parts.emplace_back(input); + return parts; +} + +std::vector sampler_types_from_names(const std::vector & names) { // since samplers names are written multiple ways // make it ready for both system names and input names - std::unordered_map samplers_symbols { - {"top_k", 'k'}, - {"top-k", 'k'}, - {"top_p", 'p'}, - {"top-p", 'p'}, - {"nucleus", 'p'}, - {"typical_p", 'y'}, - {"typical-p", 'y'}, - {"typical", 'y'}, - {"min_p", 'm'}, - {"min-p", 'm'}, - {"tfs_z", 'f'}, - {"tfs-z", 'f'}, - {"tfs", 'f'}, - {"temp", 't'}, - {"temperature",'t'} + std::unordered_map sampler_name_map { + {"top_k", llama_sampler_type::TOP_K}, + {"top-k", llama_sampler_type::TOP_K}, + {"top_p", llama_sampler_type::TOP_P}, + {"top-p", llama_sampler_type::TOP_P}, + {"nucleus", llama_sampler_type::TOP_P}, + {"typical_p", llama_sampler_type::TYPICAL_P}, + {"typical-p", llama_sampler_type::TYPICAL_P}, + {"typical", llama_sampler_type::TYPICAL_P}, + {"min_p", llama_sampler_type::MIN_P}, + {"min-p", llama_sampler_type::MIN_P}, + {"tfs_z", llama_sampler_type::TFS_Z}, + {"tfs-z", llama_sampler_type::TFS_Z}, + {"tfs", llama_sampler_type::TFS_Z}, + {"temp", llama_sampler_type::TEMP}, + {"temperature", llama_sampler_type::TEMP} }; - // expected format example: "temp;top_k;tfs_z;typical_p;top_p;min_p" - size_t separator = input.find(';'); - while (separator != input.npos) { - std::string name = input.substr(0,separator); - input = input.substr(separator+1); - separator = input.find(';'); - if (samplers_symbols.find(name) != samplers_symbols.end()) { - output += samplers_symbols[name]; + std::vector sampler_types; + sampler_types.reserve(names.size()); + for (const auto& name : names) { + const auto sampler_item = sampler_name_map.find(name); + if (sampler_item != sampler_name_map.end()) { + sampler_types.push_back(sampler_item->second); } } - if (samplers_symbols.find(input) != samplers_symbols.end()) { - output += samplers_symbols[input]; + return sampler_types; +} + +std::vector sampler_types_from_chars(const std::string & names_string) { + std::unordered_map sampler_name_map { + {'k', llama_sampler_type::TOP_K}, + {'p', llama_sampler_type::TOP_P}, + {'y', llama_sampler_type::TYPICAL_P}, + {'m', llama_sampler_type::MIN_P}, + {'f', llama_sampler_type::TFS_Z}, + {'t', llama_sampler_type::TEMP} + }; + + std::vector sampler_types; + sampler_types.reserve(names_string.size()); + for (const auto & c : names_string) { + const auto sampler_item = sampler_name_map.find(c); + if (sampler_item != sampler_name_map.end()) { + sampler_types.push_back(sampler_item->second); + } + } + return sampler_types; +} + +std::string sampler_type_to_name_string(llama_sampler_type sampler_type) { + switch (sampler_type) { + case llama_sampler_type::TOP_K: return "top_k"; + case llama_sampler_type::TFS_Z: return "tfs_z"; + case llama_sampler_type::TYPICAL_P: return "typical_p"; + case llama_sampler_type::TOP_P: return "top_p"; + case llama_sampler_type::MIN_P: return "min_p"; + case llama_sampler_type::TEMP: return "temp"; + default : return ""; } - return output; } // diff --git a/common/common.h b/common/common.h index 62de25d6a..9bdd45cf9 100644 --- a/common/common.h +++ b/common/common.h @@ -162,10 +162,13 @@ std::string gpt_random_prompt(std::mt19937 & rng); void process_escapes(std::string& input); // -// String parsing +// String utils // -std::string parse_samplers_input(std::string input); +std::vector sampler_types_from_names(const std::vector & names); +std::vector sampler_types_from_chars(const std::string & names_string); +std::vector string_split(std::string input, char separator); +std::string sampler_type_to_name_string(llama_sampler_type sampler_type); // // Model utils diff --git a/common/sampling.cpp b/common/sampling.cpp index 82cbdecea..a001750da 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -103,15 +103,10 @@ std::string llama_sampling_print(const llama_sampling_params & params) { std::string llama_sampling_order_print(const llama_sampling_params & params) { std::string result = "CFG -> Penalties "; if (params.mirostat == 0) { - for (auto s : params.samplers_sequence) { - switch (s) { - case 'k': result += "-> top_k "; break; - case 'f': result += "-> tfs_z "; break; - case 'y': result += "-> typical_p "; break; - case 'p': result += "-> top_p "; break; - case 'm': result += "-> min_p "; break; - case 't': result += "-> temp "; break; - default : break; + for (auto sampler_type : params.samplers_sequence) { + const auto sampler_type_name = sampler_type_to_name_string(sampler_type); + if (!sampler_type_name.empty()) { + result += "-> " + sampler_type_name + " "; } } } else { @@ -135,16 +130,16 @@ static void sampler_queue( const float min_p = params.min_p; const float tfs_z = params.tfs_z; const float typical_p = params.typical_p; - const std::string & samplers_sequence = params.samplers_sequence; + const std::vector & samplers_sequence = params.samplers_sequence; - for (auto s : samplers_sequence) { - switch (s){ - case 'k': llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); break; - case 'f': llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); break; - case 'y': llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; - case 'p': llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; - case 'm': llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; - case 't': + for (auto sampler_type : samplers_sequence) { + switch (sampler_type) { + case llama_sampler_type::TOP_K : llama_sample_top_k (ctx_main, &cur_p, top_k, min_keep); break; + case llama_sampler_type::TFS_Z : llama_sample_tail_free(ctx_main, &cur_p, tfs_z, min_keep); break; + case llama_sampler_type::TYPICAL_P: llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; + case llama_sampler_type::TOP_P : llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; + case llama_sampler_type::MIN_P : llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; + case llama_sampler_type::TEMP: if (dynatemp_range > 0) { float dynatemp_min = std::max(0.0f, temp - dynatemp_range); float dynatemp_max = std::max(0.0f, temp + dynatemp_range); diff --git a/common/sampling.h b/common/sampling.h index 88899c094..2bd6a75d2 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -8,6 +8,16 @@ #include #include +// sampler types +enum class llama_sampler_type : char { + TOP_K = 'k', + TOP_P = 'p', + MIN_P = 'm', + TFS_Z = 'f', + TYPICAL_P = 'y', + TEMP = 't' +}; + // sampling parameters typedef struct llama_sampling_params { int32_t n_prev = 64; // number of previous tokens to remember @@ -28,7 +38,15 @@ typedef struct llama_sampling_params { float mirostat_tau = 5.00f; // target entropy float mirostat_eta = 0.10f; // learning rate bool penalize_nl = true; // consider newlines as a repeatable token - std::string samplers_sequence = "kfypmt"; // top_k, tail_free, typical_p, top_p, min_p, temp + + std::vector samplers_sequence = { + llama_sampler_type::TOP_K, + llama_sampler_type::TFS_Z, + llama_sampler_type::TYPICAL_P, + llama_sampler_type::TOP_P, + llama_sampler_type::MIN_P, + llama_sampler_type::TEMP + }; std::string grammar; // optional BNF-like grammar to constrain sampling From c88c74f967028ae3d5ebade40ae586d20a961abc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sergio=20L=C3=B3pez?= Date: Sun, 11 Feb 2024 15:12:00 +0100 Subject: [PATCH 725/859] vulkan: only use M-sized matmul on Apple GPUs (#5412) * vulkan: refactor guess_matmul_pipeline for vendor Refactor ggml_vk_guess_matmul_pipeline to simplify adding per-vendor conditionals. Signed-off-by: Sergio Lopez * vulkan: only use M-sized matmul on Apple GPUs L-sized and S-sized matmuls are broken on Apple GPUs, force using M-size with this vendor. Signed-off-by: Sergio Lopez --------- Signed-off-by: Sergio Lopez --- ggml-vulkan.cpp | 103 +++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 93 insertions(+), 10 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 254f648a6..7834e635c 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -27,6 +27,7 @@ #define CEIL_DIV(M, N) (((M) + (N)-1) / (N)) #define VK_VENDOR_ID_AMD 0x1002 +#define VK_VENDOR_ID_APPLE 0x106b #define VK_VENDOR_ID_INTEL 0x8086 #define VK_VENDOR_ID_NVIDIA 0x10de @@ -2034,18 +2035,100 @@ static uint32_t ggml_vk_guess_matmul_pipeline_align(ggml_backend_vk_context * ct return ctx->pipeline_matmul_f32_aligned_l.align; } -static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { -#ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; -#endif +static vk_pipeline* ggml_vk_guess_matmul_pipeline_amd(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { if (bit16_x && bit16_y) { - if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; + } + if (bit16_x && !bit16_y) { + if (m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; + } + if (!bit16_x && bit16_y) { + GGML_ASSERT(false); + } + + if (m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; + } +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; +} + +static vk_pipeline* ggml_vk_guess_matmul_pipeline_apple(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " M" << std::endl; +#endif + if (bit16_x && bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_aligned_m : &ctx->pipeline_matmul_f16_m; + } + if (bit16_x && !bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_m : &ctx->pipeline_matmul_f16_f32_m; + } + if (!bit16_x && bit16_y) { + GGML_ASSERT(false); + } + return aligned ? &ctx->pipeline_matmul_f32_aligned_m : &ctx->pipeline_matmul_f32_m; +} + +static vk_pipeline* ggml_vk_guess_matmul_pipeline_intel(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + if (bit16_x && bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; + } + if (bit16_x && !bit16_y) { + return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; + } + if (!bit16_x && bit16_y) { + GGML_ASSERT(false); + } + return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; +} + +static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, bool bit16_x, bool bit16_y, int m, int n, bool aligned) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << "ggml_vk_guess_matmul_pipeline(" << bit16_x << ", " << bit16_y << ", " << m << ", " << n << ", " << aligned << ")"; +#endif + switch (ctx->device.lock()->vendor_id) { + case VK_VENDOR_ID_AMD: + return ggml_vk_guess_matmul_pipeline_amd(ctx, bit16_x, bit16_y, m, n, aligned); + case VK_VENDOR_ID_APPLE: + return ggml_vk_guess_matmul_pipeline_apple(ctx, bit16_x, bit16_y, aligned); + case VK_VENDOR_ID_INTEL: + return ggml_vk_guess_matmul_pipeline_intel(ctx, bit16_x, bit16_y, aligned); + } + + if (bit16_x && bit16_y) { + if (m <= 32 || n <= 32) { +#ifdef GGML_VULKAN_DEBUG + std::cerr << " S" << std::endl; +#endif + return aligned ? &ctx->pipeline_matmul_f16_aligned_s : &ctx->pipeline_matmul_f16_s; + } + if (m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif @@ -2057,13 +2140,13 @@ static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, return aligned ? &ctx->pipeline_matmul_f16_aligned_l : &ctx->pipeline_matmul_f16_l; } if (bit16_x && !bit16_y) { - if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &ctx->pipeline_matmul_f16_f32_aligned_s : &ctx->pipeline_matmul_f16_f32_s; } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { + if (m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif @@ -2078,13 +2161,13 @@ static vk_pipeline* ggml_vk_guess_matmul_pipeline(ggml_backend_vk_context * ctx, GGML_ASSERT(false); } - if (ctx->device.lock()->vendor_id == VK_VENDOR_ID_INTEL || m <= 32 || n <= 32) { + if (m <= 32 || n <= 32) { #ifdef GGML_VULKAN_DEBUG std::cerr << " S" << std::endl; #endif return aligned ? &ctx->pipeline_matmul_f32_aligned_s : &ctx->pipeline_matmul_f32_s; } - if (ctx->device.lock()->subgroup_size == 64 || m <= 64 || n <= 64) { + if (m <= 64 || n <= 64) { #ifdef GGML_VULKAN_DEBUG std::cerr << " M" << std::endl; #endif From 97a336507ed9b971d72262bec7e2b8b7016a054a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 11 Feb 2024 00:17:31 +0000 Subject: [PATCH 726/859] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/b8b232ae7b8b144397fdb12d20f592e5e7c1a64d' (2024-01-31) → 'github:NixOS/nixpkgs/f8e2ebd66d097614d51a56a755450d4ae1632df1' (2024-02-07) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 8cfc78273..239d0686c 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1706732774, - "narHash": "sha256-hqJlyJk4MRpcItGYMF+3uHe8HvxNETWvlGtLuVpqLU0=", + "lastModified": 1707268954, + "narHash": "sha256-2en1kvde3cJVc3ZnTy8QeD2oKcseLFjYPLKhIGDanQ0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "b8b232ae7b8b144397fdb12d20f592e5e7c1a64d", + "rev": "f8e2ebd66d097614d51a56a755450d4ae1632df1", "type": "github" }, "original": { From 2891c8aa9af17f4ff636ff3868bc34ff72b56e25 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Sun, 11 Feb 2024 10:21:38 -0600 Subject: [PATCH 727/859] Add support for BERT embedding models (#5423) * BERT model graph construction (build_bert) * WordPiece tokenizer (llm_tokenize_wpm) * Add flag for non-causal attention models * Allow for models that only output embeddings * Support conversion of BERT models to GGUF * Based on prior work by @xyzhang626 and @skeskinen --------- Co-authored-by: Jared Van Bortel Co-authored-by: Jared Van Bortel Co-authored-by: Georgi Gerganov --- .flake8 | 1 + convert-hf-to-gguf.py | 94 ++++++ examples/embedding/embedding.cpp | 12 +- gguf-py/gguf/constants.py | 43 +-- gguf-py/gguf/gguf_writer.py | 6 + gguf-py/gguf/tensor_mapping.py | 13 +- llama.cpp | 498 +++++++++++++++++++++++++++++-- llama.h | 1 + 8 files changed, 616 insertions(+), 52 deletions(-) diff --git a/.flake8 b/.flake8 index 113ca5fd3..18fba2c15 100644 --- a/.flake8 +++ b/.flake8 @@ -1,2 +1,3 @@ [flake8] max-line-length = 125 +ignore = W503 diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 0d4ea03b4..cae1551a2 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -209,6 +209,8 @@ class Model: return InternLM2Model if model_architecture == "MiniCPMForCausalLM": return MiniCPMModel + if model_architecture == "BertModel": + return BertModel return Model def _is_model_safetensors(self) -> bool: @@ -264,6 +266,8 @@ class Model: return gguf.MODEL_ARCH.INTERNLM2 if arch == "MiniCPMForCausalLM": return gguf.MODEL_ARCH.MINICPM + if arch == "BertModel": + return gguf.MODEL_ARCH.BERT raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1629,6 +1633,96 @@ in chat mode so that the conversation can end normally.") self.post_write_tensors(tensor_map, name, data_torch) +class BertModel(Model): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.block_count = self.hparams["num_hidden_layers"] + + def set_gguf_parameters(self): + # TODO(cebtenzzre): merge with parent class + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) + self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) + self.gguf_writer.add_block_count(self.block_count) + self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) + self.gguf_writer.add_causal_attention(False) + self.gguf_writer.add_file_type(self.ftype) + + def set_vocab(self): + path = self.dir_model + added_tokens_path = self.dir_model if self.dir_model.exists() else None + + # use huggingface vocab to get all tokens + vocab = HfVocab(path, added_tokens_path) + tokens, scores, toktypes = zip(*vocab.all_tokens()) + assert len(tokens) == vocab.vocab_size + + # we need this to validate the size of the token_type embeddings + # though currently we are passing all zeros to the token_type embeddings + n_token_types = len(set(toktypes)) + self.gguf_writer.add_token_type_count(n_token_types) + + # convert to phantom space vocab + def phantom(tok, typ): + if tok.startswith(b"[") and tok.endswith(b"]"): + return tok + if tok.startswith(b"##"): + return tok[2:] + return b"\xe2\x96\x81" + tok + tokens = [phantom(t, y) for t, y in zip(tokens, toktypes)] + + # set up bos and eos tokens (cls and sep) + self.gguf_writer.add_bos_token_id(vocab.tokenizer.cls_token_id) + self.gguf_writer.add_eos_token_id(vocab.tokenizer.sep_token_id) + + # add vocab to gguf + self.gguf_writer.add_tokenizer_model("bert") + self.gguf_writer.add_token_list(tokens) + self.gguf_writer.add_token_scores(scores) + self.gguf_writer.add_token_types(toktypes) + + # handle special tokens + special_vocab = gguf.SpecialVocab(self.dir_model, n_vocab=len(tokens)) + special_vocab.add_to_gguf(self.gguf_writer) + + def write_tensors(self): + tensor_map = gguf.get_tensor_name_map(self.model_arch, self.block_count) + tensors = dict(self.get_tensors()) + for name, data_torch in tensors.items(): + # we are only using BERT for embeddings so we don't need the pooling layer + if name in ("embeddings.position_ids", "pooler.dense.weight", "pooler.dense.bias"): + continue # we don't need these + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + data = data_torch.squeeze().numpy() + n_dims = len(data.shape) + new_dtype: type[np.floating[Any]] + + if ( + self.ftype == 1 and name.endswith(".weight") and n_dims == 2 + and name != "embeddings.token_type_embeddings.weight" # not used with get_rows, must be F32 + ): + # if f16 desired, convert any float32 2-dim weight tensors to float16 + new_dtype = np.float16 + else: + # if f32 desired, convert any float16 to float32 + new_dtype = np.float32 + + print(f"{new_name}, n_dims = {n_dims}, {data_torch.dtype} --> {new_dtype}") + + if data.dtype != new_dtype: + data = data.astype(new_dtype) + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index 3295cd240..27376c8f0 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -87,7 +87,17 @@ int main(int argc, char ** argv) { } const int n_embd = llama_n_embd(model); - const auto * embeddings = llama_get_embeddings(ctx); + auto * embeddings = llama_get_embeddings(ctx); + + // l2-normalize embeddings + float norm = 0; + for (int i = 0; i < n_embd; i++) { + norm += embeddings[i] * embeddings[i]; + } + norm = sqrt(norm); + for (int i = 0; i < n_embd; i++) { + embeddings[i] /= norm; + } for (int i = 0; i < n_embd; i++) { printf("%f ", embeddings[i]); diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 1cfd41c0b..a9c13dd38 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -50,6 +50,7 @@ class Keys: VALUE_LENGTH = "{arch}.attention.value_length" LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon" LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon" + CAUSAL = "{arch}.attention.causal" class Rope: DIMENSION_COUNT = "{arch}.rope.dimension_count" @@ -60,22 +61,23 @@ class Keys: SCALING_FINETUNED = "{arch}.rope.scaling.finetuned" class Tokenizer: - MODEL = "tokenizer.ggml.model" - LIST = "tokenizer.ggml.tokens" - TOKEN_TYPE = "tokenizer.ggml.token_type" - SCORES = "tokenizer.ggml.scores" - MERGES = "tokenizer.ggml.merges" - BOS_ID = "tokenizer.ggml.bos_token_id" - EOS_ID = "tokenizer.ggml.eos_token_id" - UNK_ID = "tokenizer.ggml.unknown_token_id" - SEP_ID = "tokenizer.ggml.seperator_token_id" - PAD_ID = "tokenizer.ggml.padding_token_id" - ADD_BOS = "tokenizer.ggml.add_bos_token" - ADD_EOS = "tokenizer.ggml.add_eos_token" - ADD_PREFIX = "tokenizer.ggml.add_space_prefix" - HF_JSON = "tokenizer.huggingface.json" - RWKV = "tokenizer.rwkv.world" - CHAT_TEMPLATE = "tokenizer.chat_template" + MODEL = "tokenizer.ggml.model" + LIST = "tokenizer.ggml.tokens" + TOKEN_TYPE = "tokenizer.ggml.token_type" + TOKEN_TYPE_COUNT = "tokenizer.ggml.token_type_count" # for BERT-style token types + SCORES = "tokenizer.ggml.scores" + MERGES = "tokenizer.ggml.merges" + BOS_ID = "tokenizer.ggml.bos_token_id" + EOS_ID = "tokenizer.ggml.eos_token_id" + UNK_ID = "tokenizer.ggml.unknown_token_id" + SEP_ID = "tokenizer.ggml.seperator_token_id" + PAD_ID = "tokenizer.ggml.padding_token_id" + ADD_BOS = "tokenizer.ggml.add_bos_token" + ADD_EOS = "tokenizer.ggml.add_eos_token" + ADD_PREFIX = "tokenizer.ggml.add_space_prefix" + HF_JSON = "tokenizer.huggingface.json" + RWKV = "tokenizer.rwkv.world" + CHAT_TEMPLATE = "tokenizer.chat_template" # @@ -122,6 +124,7 @@ class MODEL_TENSOR(IntEnum): ATTN_OUT = auto() ATTN_NORM = auto() ATTN_NORM_2 = auto() + ATTN_OUT_NORM = auto() ATTN_ROT_EMBD = auto() FFN_GATE_INP = auto() FFN_NORM = auto() @@ -134,6 +137,7 @@ class MODEL_TENSOR(IntEnum): FFN_UP_EXP = auto() ATTN_Q_NORM = auto() ATTN_K_NORM = auto() + LAYER_OUT_NORM = auto() MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { @@ -178,6 +182,7 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd", MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm", MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm", + MODEL_TENSOR.ATTN_OUT_NORM: "blk.{bid}.attn_output_norm", MODEL_TENSOR.FFN_GATE_INP: "blk.{bid}.ffn_gate_inp", MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm", MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate", @@ -187,6 +192,7 @@ TENSOR_NAMES: dict[MODEL_TENSOR, str] = { MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate.{xid}", MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down.{xid}", MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up.{xid}", + MODEL_TENSOR.LAYER_OUT_NORM: "blk.{bid}.layer_output_norm", } MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { @@ -262,17 +268,18 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { ], MODEL_ARCH.BERT: [ MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.TOKEN_EMBD_NORM, MODEL_TENSOR.TOKEN_TYPES, MODEL_TENSOR.POS_EMBD, MODEL_TENSOR.OUTPUT_NORM, - MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_OUT_NORM, MODEL_TENSOR.ATTN_Q, MODEL_TENSOR.ATTN_K, MODEL_TENSOR.ATTN_V, MODEL_TENSOR.ATTN_OUT, - MODEL_TENSOR.FFN_NORM, MODEL_TENSOR.FFN_DOWN, MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.LAYER_OUT_NORM, ], MODEL_ARCH.MPT: [ MODEL_TENSOR.TOKEN_EMBD, diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 16808196e..7af58a46c 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -357,6 +357,9 @@ class GGUFWriter: def add_layer_norm_rms_eps(self, value: float) -> None: self.add_float32(Keys.Attention.LAYERNORM_RMS_EPS.format(arch=self.arch), value) + def add_causal_attention(self, value: bool) -> None: + self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) + def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) @@ -387,6 +390,9 @@ class GGUFWriter: def add_token_types(self, types: Sequence[TokenType] | Sequence[int]) -> None: self.add_array(Keys.Tokenizer.TOKEN_TYPE, types) + def add_token_type_count(self, value: int) -> None: + self.add_uint32(Keys.Tokenizer.TOKEN_TYPE_COUNT, value) + def add_token_scores(self, scores: Sequence[float]) -> None: self.add_array(Keys.Tokenizer.SCORES, scores) diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index 4f16d8504..c7ba1420e 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -30,6 +30,7 @@ class TensorNameMap: # Normalization of token embeddings MODEL_TENSOR.TOKEN_EMBD_NORM: ( "word_embeddings_layernorm", # bloom + "embeddings.LayerNorm", # bert ), # Position embeddings @@ -54,7 +55,6 @@ class TensorNameMap: "transformer.ln_f", # gpt2 gpt-j falcon "model.norm", # llama-hf baichuan internlm2 "norm", # llama-pth - "embeddings.LayerNorm", # bert "transformer.norm_f", # mpt "ln_f", # refact bloom qwen gpt2 "language_model.encoder.final_layernorm", # persimmon @@ -79,7 +79,6 @@ class TensorNameMap: "transformer.h.{bid}.ln_mlp", # falcon40b "model.layers.{bid}.input_layernorm", # llama-hf "layers.{bid}.attention_norm", # llama-pth - "encoder.layer.{bid}.attention.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.input_layernorm", # persimmon "model.layers.{bid}.ln1", # yi "h.{bid}.ln_1", # gpt2 @@ -155,6 +154,11 @@ class TensorNameMap: "model.layers.{bid}.attention.wo", # internlm2 ), + # Attention output norm + MODEL_TENSOR.ATTN_OUT_NORM: ( + "encoder.layer.{bid}.attention.output.LayerNorm", # bert + ), + # Rotary embeddings MODEL_TENSOR.ATTN_ROT_EMBD: ( "model.layers.{bid}.self_attn.rotary_emb.inv_freq", # llama-hf @@ -171,7 +175,6 @@ class TensorNameMap: "transformer.blocks.{bid}.norm_2", # mpt "model.layers.{bid}.post_attention_layernorm", # llama-hf "layers.{bid}.ffn_norm", # llama-pth - "encoder.layer.{bid}.output.LayerNorm", # bert "language_model.encoder.layers.{bid}.post_attention_layernorm", # persimmon "model.layers.{bid}.ln2", # yi "h.{bid}.ln_2", # gpt2 @@ -266,6 +269,10 @@ class TensorNameMap: MODEL_TENSOR.ROPE_FREQS: ( "language_model.encoder.layers.{bid}.self_attention.rotary_emb.inv_freq", # persimmon ), + + MODEL_TENSOR.LAYER_OUT_NORM: ( + "encoder.layer.{bid}.output.LayerNorm", # bert + ) } mapping: dict[str, tuple[MODEL_TENSOR, str]] diff --git a/llama.cpp b/llama.cpp index 3f39a67fb..d1ee26ce2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -196,6 +196,7 @@ enum llm_arch { LLM_ARCH_STARCODER, LLM_ARCH_PERSIMMON, LLM_ARCH_REFACT, + LLM_ARCH_BERT, LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, LLM_ARCH_QWEN, @@ -220,6 +221,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_STARCODER, "starcoder" }, { LLM_ARCH_PERSIMMON, "persimmon" }, { LLM_ARCH_REFACT, "refact" }, + { LLM_ARCH_BERT, "bert" }, { LLM_ARCH_BLOOM, "bloom" }, { LLM_ARCH_STABLELM, "stablelm" }, { LLM_ARCH_QWEN, "qwen" }, @@ -261,6 +263,7 @@ enum llm_kv { LLM_KV_ATTENTION_VALUE_LENGTH, LLM_KV_ATTENTION_LAYERNORM_EPS, LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, + LLM_KV_ATTENTION_CAUSAL, LLM_KV_ROPE_DIMENSION_COUNT, LLM_KV_ROPE_FREQ_BASE, @@ -273,6 +276,7 @@ enum llm_kv { LLM_KV_TOKENIZER_MODEL, LLM_KV_TOKENIZER_LIST, LLM_KV_TOKENIZER_TOKEN_TYPE, + LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, LLM_KV_TOKENIZER_SCORES, LLM_KV_TOKENIZER_MERGES, LLM_KV_TOKENIZER_BOS_ID, @@ -316,6 +320,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_ATTENTION_VALUE_LENGTH, "%s.attention.value_length" }, { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" }, { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" }, + { LLM_KV_ATTENTION_CAUSAL, "%s.attention.causal" }, { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" }, { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" }, @@ -328,6 +333,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" }, { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" }, { LLM_KV_TOKENIZER_TOKEN_TYPE, "tokenizer.ggml.token_type" }, + { LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, "tokenizer.ggml.token_type_count" }, { LLM_KV_TOKENIZER_SCORES, "tokenizer.ggml.scores" }, { LLM_KV_TOKENIZER_MERGES, "tokenizer.ggml.merges" }, { LLM_KV_TOKENIZER_BOS_ID, "tokenizer.ggml.bos_token_id" }, @@ -355,6 +361,7 @@ struct LLM_KV { enum llm_tensor { LLM_TENSOR_TOKEN_EMBD, LLM_TENSOR_TOKEN_EMBD_NORM, + LLM_TENSOR_TOKEN_TYPES, LLM_TENSOR_POS_EMBD, LLM_TENSOR_OUTPUT, LLM_TENSOR_OUTPUT_NORM, @@ -536,6 +543,23 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, }, + { + LLM_ARCH_BERT, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" }, + { LLM_TENSOR_TOKEN_TYPES, "token_types" }, + { LLM_TENSOR_POS_EMBD, "position_embd" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_output_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_BLOOM, { @@ -1440,6 +1464,11 @@ static llama_state g_state; // available llama models enum e_model { MODEL_UNKNOWN, + MODEL_17M, + MODEL_22M, + MODEL_33M, + MODEL_109M, + MODEL_335M, MODEL_0_5B, MODEL_1B, MODEL_2B, @@ -1481,6 +1510,7 @@ struct llama_hparams { uint32_t n_ff; uint32_t n_expert = 0; uint32_t n_expert_used = 0; + uint32_t n_vocab_type = 0; // for BERT-style token types float f_norm_eps; float f_norm_rms_eps; @@ -1493,6 +1523,8 @@ struct llama_hparams { float f_clamp_kqv; float f_max_alibi_bias; + bool causal_attn = true; + bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -1720,6 +1752,7 @@ struct llama_model { llama_vocab vocab; struct ggml_tensor * tok_embd; + struct ggml_tensor * type_embd; struct ggml_tensor * pos_embd; struct ggml_tensor * tok_norm; struct ggml_tensor * tok_norm_b; @@ -1850,6 +1883,7 @@ struct llama_context { struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] + struct ggml_tensor * inp_sum; // F32 [1, n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -2829,6 +2863,7 @@ static const char * llama_model_vocab_type_name(enum llama_vocab_type type){ switch (type) { case LLAMA_VOCAB_TYPE_SPM: return "SPM"; case LLAMA_VOCAB_TYPE_BPE: return "BPE"; + case LLAMA_VOCAB_TYPE_WPM: return "WPM"; default: return "unknown"; } } @@ -3000,6 +3035,26 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_BERT: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); + ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); + + switch (hparams.n_layer) { + case 3: + model.type = e_model::MODEL_17M; break; // bge-micro + case 6: + model.type = e_model::MODEL_22M; break; // MiniLM-L6 + case 12: + switch (hparams.n_embd) { + case 384: model.type = e_model::MODEL_33M; break; // MiniLM-L12, bge-small + case 768: model.type = e_model::MODEL_109M; break; // bge-base + } break; + case 24: + model.type = e_model::MODEL_335M; break; // bge-large + } + } break; case LLM_ARCH_BLOOM: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); @@ -3204,6 +3259,16 @@ static void llm_load_vocab( vocab.special_unk_id = -1; vocab.special_sep_id = -1; vocab.special_pad_id = -1; + } else if (tokenizer_name == "bert") { + vocab.type = LLAMA_VOCAB_TYPE_WPM; + + // default special tokens + vocab.special_bos_id = 101; + vocab.special_eos_id = 102; + vocab.special_unk_id = 100; + vocab.special_sep_id = -1; + vocab.special_pad_id = -1; + vocab.add_space_prefix = false; } else { LLAMA_LOG_WARN("%s: unknown tokenizer: '%s'", __func__, tokenizer_name.c_str()); LLAMA_LOG_WARN("%s: using default tokenizer: 'llama'", __func__); @@ -3232,6 +3297,8 @@ static void llm_load_vocab( // determine the newline token: LLaMA "<0x0A>" == 10 == '\n', Falcon 193 == '\n' if (vocab.type == LLAMA_VOCAB_TYPE_SPM) { vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); + } else if (vocab.type == LLAMA_VOCAB_TYPE_WPM) { + vocab.linefeed_id = vocab.special_pad_id; } else { const std::vector ids = llama_tokenize_internal(vocab, "\u010A", false); GGML_ASSERT(!ids.empty() && "model vocab missing newline token"); @@ -3569,6 +3636,7 @@ static bool llm_load_tensors( const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); const int64_t n_embd_gqa = n_embd_v_gqa; const int64_t n_vocab = hparams.n_vocab; + const int64_t n_vocab_type = hparams.n_vocab_type; const int64_t n_ff = hparams.n_ff; GGML_ASSERT(n_embd_gqa == n_embd_k_gqa); @@ -3783,11 +3851,50 @@ static bool llm_load_tensors( layer.attn_k_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K_NORM, "bias", i), {64}); } } break; - case LLM_ARCH_BLOOM: + case LLM_ARCH_BERT: { model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); - model.tok_norm = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); - model.tok_norm_b = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); + model.type_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_TYPES, "weight"), {n_embd, n_vocab_type}); + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); + model.tok_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); + model.tok_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); + + for (int i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + } + } break; + case LLM_ARCH_BLOOM: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.tok_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); + model.tok_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); // output { @@ -4739,6 +4846,7 @@ struct llm_build_context { const int32_t n_orig_ctx; const bool do_rope_shift; + const bool causal_attn; const llm_build_cb & cb; @@ -4782,6 +4890,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), + causal_attn (hparams.causal_attn), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5625,6 +5734,100 @@ struct llm_build_context { return gf; } + struct ggml_cgraph * build_bert() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head = hparams.n_embd_head_v; + GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); + GGML_ASSERT(n_embd_head == hparams.n_rot); + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + // get input vectors with right size + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + struct ggml_tensor * inp_sum = ggml_view_1d(ctx0, lctx.inp_sum, n_tokens, 0); + + // construct input embeddings (token, type, position) + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + // token types are hardcoded to zero ("Sentence A") + struct ggml_tensor * type_row0 = ggml_view_1d(ctx0, model.type_embd, n_embd, 0); + inpL = ggml_add(ctx0, inpL, type_row0); + inpL = ggml_add(ctx0, ggml_get_rows(ctx0, model.pos_embd, inp_pos), inpL); + cb(inpL, "inp_embd", -1); + + // embed layer norm + inpL = llm_build_norm(ctx0, inpL, hparams, model.tok_norm, model.tok_norm_b, LLM_NORM, cb, -1); + cb(inpL, "inp_norm", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); // [n_kv, n_tokens] + + // iterate layers + for (int il = 0; il < n_layer; ++il) { + struct ggml_tensor * cur = inpL; + + // self-attention + { + struct ggml_tensor * Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, cur), model.layers[il].bq); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wk, cur), model.layers[il].bk); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wv, cur), model.layers[il].bv); + cb(Vcur, "Vcur", il); + + // seems like we just need to do this for Q? + Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } + + // re-add the layer input + cur = ggml_add(ctx0, cur, inpL); + + // attention layer norm + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].attn_norm, model.layers[il].attn_norm_b, LLM_NORM, cb, il); + + struct ggml_tensor * ffn_inp = cur; + cb(ffn_inp, "ffn_inp", il); + + // feed-forward network + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + cb(cur, "ffn_out", il); + + // attentions bypass the intermediate layer + cur = ggml_add(ctx0, cur, ffn_inp); + + // output layer norm + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, LLM_NORM, cb, il); + + // input for next layer + inpL = cur; + } + + // final output + cur = inpL; + + // pooling + cur = ggml_mul_mat(ctx0, inp_sum, ggml_cont(ctx0, ggml_transpose(ctx0, cur))); + cb(cur, "result_embed", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } + struct ggml_cgraph * build_bloom() { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); @@ -7060,7 +7263,8 @@ static struct ggml_cgraph * llama_build_graph( for (int i = 0; i < n_kv; ++i) { float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || + (llm.causal_attn && lctx.kv_self.cells[i].pos > pos)) { f = -INFINITY; } else { f = 0; @@ -7081,6 +7285,15 @@ static struct ggml_cgraph * llama_build_graph( data[i] = lctx.kv_self.cells[i].delta; } } + + { + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); + float * data = (float *) lctx.inp_sum->data; + + for (int i = 0; i < batch.n_tokens; ++i) { + data[i] = 1.0f/float(batch.n_tokens); + } + } } llm.init(); @@ -7110,6 +7323,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_refact(); } break; + case LLM_ARCH_BERT: + { + result = llm.build_bert(); + } break; case LLM_ARCH_BLOOM: { result = llm.build_bloom(); @@ -7269,13 +7486,18 @@ static int llama_decode_internal( // the output is always the last tensor in the graph struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; - GGML_ASSERT(strcmp(res->name, "result_output") == 0); - - // the embeddings could be the second to last tensor, or the third to last tensor struct ggml_tensor * embeddings = gf->nodes[gf->n_nodes - 2]; - if (strcmp(embeddings->name, "result_norm") != 0) { - embeddings = gf->nodes[gf->n_nodes - 3]; - GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); + if (strcmp(res->name, "result_output") == 0) { + // the embeddings could be the second to last tensor, or the third to last tensor + if (strcmp(embeddings->name, "result_norm") != 0) { + embeddings = gf->nodes[gf->n_nodes - 3]; + GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); + } + } else if (strcmp(res->name, "result_embed") == 0) { + embeddings = res; + res = nullptr; + } else { + GGML_ASSERT(false); } // LLAMA_LOG_INFO("graph build time: %.3f ms (%d nodes, %d leafs)\n", (ggml_time_us() - t_start_us)/1000.0, gf->n_nodes, gf->n_leafs); @@ -7344,7 +7566,7 @@ static int llama_decode_internal( // extract logits // TODO: do not compute and extract logits if only embeddings are needed // need to update the graphs to skip "result_output" - { + if (res) { auto & logits_out = lctx.logits; #ifndef NDEBUG @@ -7388,9 +7610,11 @@ static int llama_decode_internal( if (!lctx.embedding.empty()) { auto & embedding_out = lctx.embedding; + const int64_t embed_pos = res ? n_embd * (n_tokens-1) : 0; + embedding_out.resize(n_embd); ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); - ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), (n_embd*(n_tokens - 1))*sizeof(float), n_embd*sizeof(float)); + ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embed_pos*sizeof(float), n_embd*sizeof(float)); ggml_backend_synchronize(embeddings_backend); } @@ -7454,6 +7678,9 @@ static uint8_t llama_token_to_byte(const llama_vocab& vocab, llama_token id) { GGML_ASSERT(false); return unicode_to_bytes_bpe(token_data.text); } + case LLAMA_VOCAB_TYPE_WPM: { + GGML_ASSERT(false); + } default: GGML_ASSERT(false); } @@ -7466,6 +7693,7 @@ static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; return vocab.token_to_id.at(buf); } + case LLAMA_VOCAB_TYPE_WPM: case LLAMA_VOCAB_TYPE_BPE: { return vocab.token_to_id.at(bytes_to_unicode_bpe(ch)); } @@ -7936,12 +8164,212 @@ private: llm_bigram_bpe::queue work_queue; }; -typedef enum FRAGMENT_BUFFER_VARIANT_TYPE{ +struct llm_tokenizer_wpm { + llm_tokenizer_wpm(const llama_vocab & vocab): vocab(vocab) {} + + void tokenize(const std::string & text, std::vector & output) { + auto * token_map = &vocab.token_to_id; + + // normalize and split by whitespace + std::vector words = preprocess(text); + + // bos token prepended already + + // find the longest tokens that form the words + for (const std::string &word : words) { + // skip empty words + if (word.size() == 0) { + continue; + } + + // prepend phantom space + std::string word1 = "\xe2\x96\x81" + word; + int n = word1.size(); + + // we're at the start of a new word + int i = 0; + bool match_any = false; + + // move through character position in word + while (i < n) { + // loop through possible match length + bool match = false; + for (int j = n; j > i; j--) { + auto it = token_map->find(word1.substr(i, j - i)); + if (it != token_map->end()) { + output.push_back(it->second); + match = true; + match_any = true; + i = j; + break; + } + } + + // must be an unknown character + if (!match) { + i++; + } + } + + // we didn't find any matches for this word + if (!match_any) { + output.push_back(vocab.special_unk_id); + } + } + + // append eos token + output.push_back(vocab.special_eos_id); + } + + std::vector preprocess(const std::string & text) { + std::string ori_str = normalize(text); + uint64_t ori_size = ori_str.size(); + + // single punct / single symbol / single digit + // baseline: add whitespace on the left and right of punct and chinese characters + std::vector words; + std::string new_str = ""; + uint64_t i = 0; + while (i < ori_size) { + int utf_char_len = utf8_len(ori_str[i]); + if ((utf_char_len == 1) && ispunct(ori_str[i])) { + new_str += " "; + new_str += ori_str[i]; + new_str += " "; + i += 1; + } + else if ((utf_char_len == 3) && is_chinese_char(ori_str.substr(i, 3))) { + new_str += " "; + new_str += ori_str.substr(i, 3); + new_str += " "; + i += 3; + } + else { + new_str += ori_str[i]; + i += 1; + } + } + + // split by whitespace + uint64_t l = 0; + uint64_t r = 0; + while (r < new_str.size()) { + // if is whitespace + if (isspace(new_str[r])) { + if (r > l) words.push_back(new_str.substr(l, (r - l))); + l = r + 1; + r = l; + } + else { + r += 1; + } + } + if (r > l) { + words.push_back(new_str.substr(l, (r - l))); + } + return words; + } + + std::string normalize(const std::string & text) { + // TODO: handle chinese characters? https://github.com/huggingface/tokenizers/blob/ef5f50605ddf9f8caef1598c0e4853862b9707a7/tokenizers/src/normalizers/bert.rs#L98 + std::string text2 = strip_accents(text); + for (size_t i = 0; i < text2.size(); i += utf8_len(text2[i])) { + char c = text2[i]; + if (c >= 'A' && c <= 'Z') { + text2[i] = c - 'A' + 'a'; + } + } + return text2; + } + + bool is_chinese_char(const std::string & str) { + int len = str.length(); + unsigned int codepoint = 0; + int num_bytes = 0; + int i = 0; + unsigned char ch = static_cast(str[i]); + if (ch <= 0x7f) { + codepoint = ch; + num_bytes = 1; + } else if ((ch >> 5) == 0x06) { + codepoint = ch & 0x1f; + num_bytes = 2; + } else if ((ch >> 4) == 0x0e) { + codepoint = ch & 0x0f; + num_bytes = 3; + } else if ((ch >> 3) == 0x1e) { + codepoint = ch & 0x07; + num_bytes = 4; + } + for (int j = 1; j < num_bytes; ++j) { + if (i + j >= len) { + return false; // incomplete UTF-8 character + } + unsigned char next_ch = static_cast(str[i + j]); + if ((next_ch >> 6) != 0x02) { + return false; // invalid trailing byte + } + codepoint = (codepoint << 6) | (next_ch & 0x3f); + } + if ((codepoint >= 0x4E00 && codepoint <= 0x9FFF) || + (codepoint >= 0x3400 && codepoint <= 0x4DBF) || + (codepoint >= 0x20000 && codepoint <= 0x2A6DF) || + (codepoint >= 0x2A700 && codepoint <= 0x2B73F) || + (codepoint >= 0x2B740 && codepoint <= 0x2B81F) || + (codepoint >= 0x2B920 && codepoint <= 0x2CEAF) || // this should be 0x2B820 but in hf rust code it is 0x2B920 + (codepoint >= 0xF900 && codepoint <= 0xFAFF) || + (codepoint >= 0x2F800 && codepoint <= 0x2FA1F) || + (codepoint >= 0x3000 && codepoint <= 0x303F) || + (codepoint >= 0xFF00 && codepoint <= 0xFFEF)) { + return true; // NOLINT + } + return false; + } + + std::string strip_accents(const std::string & input_string) { + std::string resultString; + std::map accent_map = { + {"À", 'A'}, {"Á", 'A'}, {"Â", 'A'}, {"Ã", 'A'}, {"Ä", 'A'}, {"Å", 'A'}, + {"à", 'a'}, {"á", 'a'}, {"â", 'a'}, {"ã", 'a'}, {"ä", 'a'}, {"å", 'a'}, + {"È", 'E'}, {"É", 'E'}, {"Ê", 'E'}, {"Ë", 'E'}, {"è", 'e'}, {"é", 'e'}, + {"ê", 'e'}, {"ë", 'e'}, {"Ì", 'I'}, {"Í", 'I'}, {"Î", 'I'}, {"Ï", 'I'}, + {"ì", 'i'}, {"í", 'i'}, {"î", 'i'}, {"ï", 'i'}, {"Ò", 'O'}, {"Ó", 'O'}, + {"Ô", 'O'}, {"Õ", 'O'}, {"Ö", 'O'}, {"ò", 'o'}, {"ó", 'o'}, {"ô", 'o'}, + {"õ", 'o'}, {"ö", 'o'}, {"Ù", 'U'}, {"Ú", 'U'}, {"Û", 'U'}, {"Ü", 'U'}, + {"ù", 'u'}, {"ú", 'u'}, {"û", 'u'}, {"ü", 'u'}, {"Ý", 'Y'}, {"ý", 'y'}, + {"Ç", 'C'}, {"ç", 'c'}, {"Ñ", 'N'}, {"ñ", 'n'}, + }; + + for (size_t i = 0; i < input_string.length();) { + int len = utf8_len(input_string[i]); + std::string curChar = input_string.substr(i, len); + auto iter = accent_map.find(curChar); + if (iter != accent_map.end()) { + resultString += iter->second; + } else { + resultString += curChar; + } + i += len; + } + + return resultString; + } + + static size_t utf8_len(char src) { + const size_t lookup[] = {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 4}; + uint8_t highbits = static_cast(src) >> 4; + return lookup[highbits]; + } + + const llama_vocab & vocab; +}; + +typedef enum FRAGMENT_BUFFER_VARIANT_TYPE { FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN, FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT } FRAGMENT_BUFFER_VARIANT_TYPE; -struct fragment_buffer_variant{ +struct fragment_buffer_variant { fragment_buffer_variant(llama_vocab::id _token) : type(FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN), @@ -7971,8 +8399,7 @@ struct fragment_buffer_variant{ // #define PRETOKENIZERDEBUG -static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list & buffer) -{ +static void tokenizer_st_partition(const llama_vocab & vocab, std::forward_list & buffer) { // for each special token for (const auto & st: vocab.special_tokens_cache) { const auto & special_token = st.first; @@ -8090,10 +8517,8 @@ static std::vector llama_tokenize_internal(const llama_vocab & switch (vocab.type) { case LLAMA_VOCAB_TYPE_SPM: { - for (const auto & fragment: fragment_buffer) - { - if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) - { + for (const auto & fragment: fragment_buffer) { + if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { // without adding this leading whitespace, we do not get the same results as the original tokenizer // TODO: It's likely possible to get rid of this string copy entirely @@ -8113,19 +8538,15 @@ static std::vector llama_tokenize_internal(const llama_vocab & llm_tokenizer_spm tokenizer(vocab); llama_escape_whitespace(raw_text); tokenizer.tokenize(raw_text, output); - } - else // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) - { + } else { // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) output.push_back(fragment.token); } } } break; case LLAMA_VOCAB_TYPE_BPE: { - for (const auto & fragment: fragment_buffer) - { - if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) - { + for (const auto & fragment: fragment_buffer) { + if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); #ifdef PRETOKENIZERDEBUG @@ -8133,9 +8554,23 @@ static std::vector llama_tokenize_internal(const llama_vocab & #endif llm_tokenizer_bpe tokenizer(vocab); tokenizer.tokenize(raw_text, output); + } else { // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) + output.push_back(fragment.token); } - else // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) - { + } + } break; + case LLAMA_VOCAB_TYPE_WPM: + { + for (const auto & fragment: fragment_buffer) { + if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { + auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); + +#ifdef PRETOKENIZERDEBUG + LLAMA_LOG_WARN("TT: (%ld %ld %ld) '%s'\n", raw_text.length(), fragment.offset, fragment.length, raw_text.c_str()); +#endif + llm_tokenizer_wpm tokenizer(vocab); + tokenizer.tokenize(raw_text, output); + } else { // if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_TOKEN) output.push_back(fragment.token); } } @@ -10799,7 +11234,7 @@ struct llama_context * llama_new_context_with_model( // graph inputs { ggml_init_params init_params = { - /* .mem_size */ ggml_tensor_overhead()*5, + /* .mem_size */ ggml_tensor_overhead()*7, /* .mem_buffer */ nullptr, /* .no_alloc */ true, }; @@ -10810,12 +11245,14 @@ struct llama_context * llama_new_context_with_model( ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); + ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, 1, cparams.n_batch); ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); ggml_set_name(ctx->inp_pos, "inp_pos"); ggml_set_name(ctx->inp_KQ_mask, "inp_KQ_mask"); ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); + ggml_set_name(ctx->inp_sum, "inp_sum"); ctx->buf_input = ggml_backend_alloc_ctx_tensors_from_buft(ctx->ctx_input, llama_default_buffer_type_cpu(true)); @@ -11746,6 +12183,7 @@ static std::string llama_decode_text(const std::string & text) { int32_t llama_token_to_piece(const struct llama_model * model, llama_token token, char * buf, int32_t length) { if (0 <= token && token < llama_n_vocab(model)) { switch (llama_vocab_get_type(model->vocab)) { + case LLAMA_VOCAB_TYPE_WPM: case LLAMA_VOCAB_TYPE_SPM: { // NOTE: we accept all unsupported token types, // suppressing them like CONTROL tokens. diff --git a/llama.h b/llama.h index cec4158bc..367e8f1a1 100644 --- a/llama.h +++ b/llama.h @@ -61,6 +61,7 @@ extern "C" { enum llama_vocab_type { LLAMA_VOCAB_TYPE_SPM = 0, // SentencePiece LLAMA_VOCAB_TYPE_BPE = 1, // Byte Pair Encoding + LLAMA_VOCAB_TYPE_WPM = 2, // WordPiece }; enum llama_token_type { From 3bdc4cd0f595a6096cca4a64aa75ffa8a3503465 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Sun, 11 Feb 2024 19:08:39 +0100 Subject: [PATCH 728/859] CUDA: mul_mat_vec_q tiling, refactor mul mat logic (#5434) * CUDA: mul_mat_vec_q tiling, refactor mul mat logic Co-authored-by: slaren --------- Co-authored-by: slaren --- ggml-cuda.cu | 265 +++++++++++++++++++++++++++++---------------------- 1 file changed, 149 insertions(+), 116 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 5053757e6..96976f248 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -150,8 +150,8 @@ #define CUDA_USE_TENSOR_CORES #endif -// max batch size to use MMQ kernels when tensor cores are available -#define MMQ_MAX_BATCH_SIZE 32 +#define MMVQ_MAX_BATCH_SIZE 8 // max batch size to use MMVQ kernels +#define MMQ_MAX_BATCH_SIZE 32 // max batch size to use MMQ kernels when tensor cores are available #if defined(GGML_USE_HIPBLAS) #define __CUDA_ARCH__ 1300 @@ -5310,51 +5310,59 @@ template static __global__ void #endif // __CUDA_ARCH__ >= CC_VOLTA } -#define MMVQ_NWARPS_NVIDIA 4 -#define MMVQ_NWARPS_AMD_RDNA2 1 -#define MMVQ_NWARPS_AMD_OLD 4 - -template +template #if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) -__launch_bounds__(nwarps*WARP_SIZE, 1) // tells the compiler to use as many registers as it wants +// tell the compiler to use as many registers as it wants, see nwarps definition below +__launch_bounds__((ncols_y <= 4 ? 4 : 2)*WARP_SIZE, 1) #endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) static __global__ void mul_mat_vec_q( const void * __restrict__ vx, const void * __restrict__ vy, float * __restrict__ dst, - const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y_par, const int nrows_dst) { + const int ncols_x, const int nrows_x, const int nrows_y, const int nrows_dst) { - const int ncols_y = ncols_y_template != 0 ? ncols_y_template : ncols_y_par; +#if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && (defined(RDNA2) || defined(RDNA3)) + constexpr int nwarps = 1; + constexpr int rows_per_cuda_block = 1; +#else + constexpr int nwarps = ncols_y <= 4 ? 4 : 2; + constexpr int rows_per_cuda_block = ncols_y == 1 ? 1 : 2; +#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && !defined(RDNA2) && !defined(RDNA3) - const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; - const int row = blockIdx.x; - - const int blocks_per_row_x = ncols_x / qk; - const int blocks_per_col_y = nrows_y / QK8_1; - const int blocks_per_iter = vdr * nwarps*WARP_SIZE / qi; + const int tid = WARP_SIZE*threadIdx.y + threadIdx.x; + const int row0 = rows_per_cuda_block*blockIdx.x; + const int blocks_per_row_x = ncols_x / qk; + const int blocks_per_col_y = nrows_y / QK8_1; + constexpr int blocks_per_iter = vdr * nwarps*WARP_SIZE / qi; // partial sum for each thread - float tmp[ncols_y_template != 0 ? ncols_y_template : 8] = {0.0f}; + float tmp[ncols_y][rows_per_cuda_block] = {0.0f}; const block_q_t * x = (const block_q_t *) vx; const block_q8_1 * y = (const block_q8_1 *) vy; - for (int i = tid / (qi/vdr); i < blocks_per_row_x; i += blocks_per_iter) { - const int ibx = row*blocks_per_row_x + i; // x block index + for (int kbx = tid / (qi/vdr); kbx < blocks_per_row_x; kbx += blocks_per_iter) { + const int kby = kbx * (qk/QK8_1); // y block index that aligns with kbx - const int iby = i * (qk/QK8_1); // y block index that aligns with ibx - - const int iqs = vdr * (tid % (qi/vdr)); // x block quant index when casting the quants to int + // x block quant index when casting the quants to int + const int kqs = vdr * (tid % (qi/vdr)); #pragma unroll for (int j = 0; j < ncols_y; ++j) { - tmp[j] += vec_dot_q_cuda(&x[ibx], &y[j*blocks_per_col_y + iby], iqs); +#pragma unroll + for (int i = 0; i < rows_per_cuda_block; ++i) { + tmp[j][i] += vec_dot_q_cuda( + &x[kbx + (row0 + i)*blocks_per_row_x], &y[j*blocks_per_col_y + kby], kqs); + } } } - __shared__ float tmp_shared[nwarps-1 > 0 ? nwarps-1 : 1][ncols_y_template != 0 ? ncols_y_template : 8][WARP_SIZE]; + __shared__ float tmp_shared[nwarps-1 > 0 ? nwarps-1 : 1][ncols_y][rows_per_cuda_block][WARP_SIZE]; if (threadIdx.y > 0) { #pragma unroll for (int j = 0; j < ncols_y; ++j) { - tmp_shared[threadIdx.y-1][j][threadIdx.x] = tmp[j]; +#pragma unroll + for (int i = 0; i < rows_per_cuda_block; ++i) { + tmp_shared[threadIdx.y-1][j][i][threadIdx.x] = tmp[j][i]; + } } } __syncthreads(); @@ -5366,13 +5374,16 @@ static __global__ void mul_mat_vec_q( #pragma unroll for (int j = 0; j < ncols_y; ++j) { #pragma unroll - for (int i = 0; i < nwarps-1; ++i) { - tmp[j] += tmp_shared[i][j][threadIdx.x]; + for (int i = 0; i < rows_per_cuda_block; ++i) { +#pragma unroll + for (int l = 0; l < nwarps-1; ++l) { + tmp[j][i] += tmp_shared[l][j][i][threadIdx.x]; + } + tmp[j][i] = warp_reduce_sum(tmp[j][i]); } - tmp[j] = warp_reduce_sum(tmp[j]); - if (threadIdx.x == 0) { - dst[j*nrows_dst + row] = tmp[j]; + if (threadIdx.x < rows_per_cuda_block) { + dst[j*nrows_dst + row0 + threadIdx.x] = tmp[j][threadIdx.x]; } } } @@ -6851,65 +6862,75 @@ static void mul_mat_vec_q_cuda( const int ncols_x, const int nrows_x, const int nrows_y, const int ncols_y, const int nrows_dst, cudaStream_t stream) { GGML_ASSERT(ncols_x % qk == 0); - GGML_ASSERT(ncols_y <= 4); + GGML_ASSERT(ncols_y <= MMVQ_MAX_BATCH_SIZE); int id; CUDA_CHECK(cudaGetDevice(&id)); - int nwarps; - if (g_device_caps[id].cc >= CC_OFFSET_AMD) { - nwarps = g_device_caps[id].cc >= CC_RDNA2 ? MMVQ_NWARPS_AMD_RDNA2 : MMVQ_NWARPS_AMD_OLD; - } else { - nwarps = MMVQ_NWARPS_NVIDIA; - } + int64_t nwarps = 1; + int64_t rows_per_cuda_block = 1; - const dim3 block_nums(nrows_x, 1, 1); + if (g_device_caps[id].cc < CC_RDNA2) { // NVIDIA and AMD older than RDNA2 + switch(ncols_y) { + case 1: + nwarps = 4; + rows_per_cuda_block = 1; + break; + case 2: + case 3: + case 4: + nwarps = 4; + rows_per_cuda_block = 2; + break; + case 5: + case 6: + case 7: + case 8: + nwarps = 2; + rows_per_cuda_block = 2; + break; + default: + GGML_ASSERT(false); + break; + } + } + const int64_t nblocks = (nrows_x + rows_per_cuda_block - 1) / rows_per_cuda_block; + const dim3 block_nums(nblocks, 1, 1); const dim3 block_dims(WARP_SIZE, nwarps, 1); - switch (nwarps) { - case 1: switch(ncols_y) { - case 1: - mul_mat_vec_q<1, 1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 2: - mul_mat_vec_q<1, 2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 3: - mul_mat_vec_q<1, 3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 4: - mul_mat_vec_q<1, 4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - default: - GGML_ASSERT(false); - break; - } break; - case 4: switch(ncols_y) { - case 1: - mul_mat_vec_q<4, 1, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 2: - mul_mat_vec_q<4, 2, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 3: - mul_mat_vec_q<4, 3, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - case 4: - mul_mat_vec_q<4, 4, qk, qi, block_q_t, vdr, vec_dot> - <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, ncols_y, nrows_dst); - break; - default: - GGML_ASSERT(false); - break; - } break; - + switch (ncols_y) { + case 1: + mul_mat_vec_q<1, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 2: + mul_mat_vec_q<2, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 3: + mul_mat_vec_q<3, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 4: + mul_mat_vec_q<4, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 5: + mul_mat_vec_q<5, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 6: + mul_mat_vec_q<6, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 7: + mul_mat_vec_q<7, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; + case 8: + mul_mat_vec_q<8, qk, qi, block_q_t, vdr, vec_dot> + <<>>(vx, vy, dst, ncols_x, nrows_x, nrows_y, nrows_dst); + break; default: GGML_ASSERT(false); break; @@ -9735,7 +9756,7 @@ static __global__ void k_compute_batched_ptrs( ptrs_dst[0*ne23 + i12 + i13*ne12] = ( char *) dst + i12*nbd2 + i13*nbd3; } -static void ggml_cuda_mul_mat_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { +static void ggml_cuda_mul_mat_batched_cublas(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); @@ -9893,39 +9914,69 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 int64_t min_compute_capability = INT_MAX; + bool any_pascal_with_slow_fp16 = false; if (split) { ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; auto & tensor_split = buft_ctx->tensor_split; for (int id = 0; id < g_device_count; ++id) { - if (min_compute_capability > g_device_caps[id].cc && tensor_split[id] < (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { + // skip devices that are not going to do any work: + if (tensor_split[id] >= (id + 1 < g_device_count ? tensor_split[id + 1] : 1.0f)) { + continue; + } + + if (min_compute_capability > g_device_caps[id].cc) { min_compute_capability = g_device_caps[id].cc; } + if (g_device_caps[id].cc == 610) { + any_pascal_with_slow_fp16 = true; + } } } else { - min_compute_capability = g_device_caps[g_main_device].cc; + min_compute_capability = g_device_caps[g_main_device].cc; + any_pascal_with_slow_fp16 = g_device_caps[g_main_device].cc == 610; } + // check data types and tensor shapes for custom matrix multiplication kernels: + bool use_dequantize_mul_mat_vec = (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 + && src0->ne[0] % GGML_CUDA_DMMV_X == 0 && src1->ne[1] == 1; + + bool use_mul_mat_vec_q = ggml_is_quantized(src0->type) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 + && src1->ne[1] <= MMVQ_MAX_BATCH_SIZE; + + bool use_mul_mat_q = ggml_cuda_supports_mmq(src0->type) + && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32; + #if defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) const bool fp16_performance_good = min_compute_capability >= CC_RDNA1; - bool use_mul_mat_q = ggml_is_quantized(src0->type); + #ifdef CUDA_USE_TENSOR_CORES use_mul_mat_q = use_mul_mat_q && min_compute_capability < CC_RDNA3; #endif // CUDA_USE_TENSOR_CORES #else - const bool fp16_performance_good = min_compute_capability >= CC_VOLTA; - bool use_mul_mat_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); + // fp16 performance is good on Volta or newer and on P100 (compute capability 6.0) + const bool fp16_performance_good = min_compute_capability >= CC_PASCAL && !any_pascal_with_slow_fp16; + + // mmvq and mmq need the __dp4a instruction which on NVIDIA is only available for CC >= 6.1 + use_mul_mat_vec_q = use_mul_mat_vec_q && min_compute_capability >= MIN_CC_DP4A; + use_mul_mat_q = use_mul_mat_q && min_compute_capability >= MIN_CC_DP4A; + #ifdef CUDA_USE_TENSOR_CORES // when tensor cores are available, use them for large batch size // ref: https://github.com/ggerganov/llama.cpp/pull/3776 - use_mul_mat_q = use_mul_mat_q && !(fp16_performance_good && src1->ne[1] > MMQ_MAX_BATCH_SIZE); + use_mul_mat_q = use_mul_mat_q && (!fp16_performance_good || src1->ne[1] <= MMQ_MAX_BATCH_SIZE); #endif // CUDA_USE_TENSOR_CORES #endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) - use_mul_mat_q = use_mul_mat_q && ggml_cuda_supports_mmq(src0->type); + // if mmvq is available it's a better choice than dmmv: +#ifndef GGML_CUDA_FORCE_DMMV + use_dequantize_mul_mat_vec = use_dequantize_mul_mat_vec && !use_mul_mat_vec_q; +#endif // GGML_CUDA_FORCE_DMMV // debug helpers //printf("src0: %8d %8d %8d %8d\n", src0->ne[0], src0->ne[1], src0->ne[2], src0->ne[3]); @@ -9943,33 +9994,15 @@ static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1 ggml_cuda_mul_mat_vec_nc(src0, src1, dst); } else if (!split && all_on_device && fp16_performance_good && src0->type == GGML_TYPE_F16 && !ggml_is_transposed(src0) && !ggml_is_transposed(src1) && src1->ne[2]*src1->ne[3] > 1) { // KQ + KQV multi-batch - ggml_cuda_mul_mat_mat_batched_cublas(src0, src1, dst); - } else if (src0->type == GGML_TYPE_F32) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); - } else if (ggml_is_quantized(src0->type) || src0->type == GGML_TYPE_F16) { - if (src1->ne[1] == 1 && src0->ne[0] % GGML_CUDA_DMMV_X == 0 && src1->type == GGML_TYPE_F32) { -#ifdef GGML_CUDA_FORCE_DMMV - const bool use_mul_mat_vec_q = false; -#else - const bool use_mul_mat_vec_q = min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type); -#endif // GGML_CUDA_FORCE_DMMV - - if (use_mul_mat_vec_q) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); - } else { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); - } - } else { - if (src1->ne[1] <= 4 && min_compute_capability >= MIN_CC_DP4A && ggml_is_quantized(src0->type) && src1->type == GGML_TYPE_F32) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); - } else if (use_mul_mat_q) { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); - } else { - ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); - } - } + ggml_cuda_mul_mat_batched_cublas(src0, src1, dst); + } else if (use_dequantize_mul_mat_vec) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_dequantize_mul_mat_vec, false); + } else if (use_mul_mat_vec_q) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_vec_q, true); + } else if (use_mul_mat_q) { + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_q, true); } else { - GGML_ASSERT(false); + ggml_cuda_op_mul_mat(src0, src1, dst, ggml_cuda_op_mul_mat_cublas, false); } } From 3b169441dfe8e420f88d1592708cc2a871daadb9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 12 Feb 2024 09:16:06 +0200 Subject: [PATCH 729/859] sync : ggml (#5452) * ggml-alloc : v3 (ggml/727) * ggml-alloc v3 ggml-ci * fix ci ggml-ci * whisper : check for backend buffer allocation failures * whisper : avoid leaks when initialization fails * cleanup ggml-ci * style fixes ggml-ci * sync : ggml * update llama.cpp, clip.cpp, export-lora.cpp * update finetune.cpp, train-text-from-scratch.cpp ggml-ci * ggml-backend : reduce alignment to 32 to match gguf and fix mmap --------- Co-authored-by: slaren --- examples/export-lora/export-lora.cpp | 19 +- examples/finetune/finetune.cpp | 147 +- examples/llava/clip.cpp | 152 +- .../train-text-from-scratch.cpp | 112 +- ggml-alloc.c | 1373 +++++++++-------- ggml-alloc.h | 110 +- ggml-backend.c | 492 +++--- ggml-backend.h | 15 +- ggml.c | 28 +- ggml.h | 18 +- llama.cpp | 181 +-- scripts/sync-ggml.last | 2 +- 12 files changed, 1287 insertions(+), 1362 deletions(-) diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 4cd5d99bb..2f7be8a13 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -337,24 +337,14 @@ static bool apply_lora(struct ggml_tensor * tensor, struct lora_data * lora, int params.mem_buffer = NULL; params.no_alloc = true; struct ggml_context * ctx = NULL; - struct ggml_allocr * alloc = NULL; - struct ggml_cgraph * gf = NULL; + struct ggml_gallocr * alloc = NULL; + struct ggml_cgraph * gf = NULL; ctx = ggml_init(params); - alloc = ggml_allocr_new_measure(tensor_alignment); + alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = build_graph_lora(ctx, tensor, lora_a, lora_b, scaling); - size_t alloc_size = ggml_allocr_alloc_graph(alloc, gf); - ggml_allocr_free(alloc); - ggml_free(ctx); - static std::vector data_compute; - data_compute.resize(alloc_size + tensor_alignment); - - ctx = ggml_init(params); - alloc = ggml_allocr_new(data_compute.data(), data_compute.size(), tensor_alignment); - gf = build_graph_lora(ctx, tensor, lora_a, lora_b, scaling); - ggml_allocr_alloc_graph(alloc, gf); - ggml_allocr_free(alloc); + ggml_gallocr_alloc_graph(alloc, gf); struct ggml_cplan cplan = ggml_graph_plan(gf, n_threads); static std::vector data_work; @@ -363,6 +353,7 @@ static bool apply_lora(struct ggml_tensor * tensor, struct lora_data * lora, int ggml_graph_compute(gf, &cplan); + ggml_gallocr_free(alloc); ggml_free(ctx); return true; } diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index b7e19c5fe..b11c56020 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "ggml-alloc.h" +#include "ggml-backend.h" #include "llama.h" #include "common.h" #include "train.h" @@ -13,8 +14,6 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif -static const size_t tensor_alignment = 32; - struct my_llama_hparams { uint32_t n_vocab = 32000; uint32_t n_ctx = 512; @@ -128,7 +127,7 @@ struct my_llama_lora_layer { struct my_llama_lora { struct ggml_context * ctx = NULL; - std::vector data; + ggml_backend_buffer_t data; my_llama_lora_hparams hparams; @@ -372,63 +371,6 @@ static void set_param_lora(struct my_llama_lora * lora) { } } -static void alloc_lora(struct ggml_allocr * alloc, struct my_llama_lora * lora) { - ggml_allocr_alloc(alloc, lora->tok_embeddings_a); - ggml_allocr_alloc(alloc, lora->tok_embeddings_b); - ggml_allocr_alloc(alloc, lora->norm_a); - ggml_allocr_alloc(alloc, lora->norm_b); - ggml_allocr_alloc(alloc, lora->output_a); - ggml_allocr_alloc(alloc, lora->output_b); - for (uint32_t i = 0; i < lora->layers.size(); ++i) { - auto & layer = lora->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm_a); - ggml_allocr_alloc(alloc, layer.attention_norm_b); - ggml_allocr_alloc(alloc, layer.wq_a); - ggml_allocr_alloc(alloc, layer.wq_b); - ggml_allocr_alloc(alloc, layer.wk_a); - ggml_allocr_alloc(alloc, layer.wk_b); - ggml_allocr_alloc(alloc, layer.wv_a); - ggml_allocr_alloc(alloc, layer.wv_b); - ggml_allocr_alloc(alloc, layer.wo_a); - ggml_allocr_alloc(alloc, layer.wo_b); - ggml_allocr_alloc(alloc, layer.ffn_norm_a); - ggml_allocr_alloc(alloc, layer.ffn_norm_b); - ggml_allocr_alloc(alloc, layer.w1_a); - ggml_allocr_alloc(alloc, layer.w1_b); - ggml_allocr_alloc(alloc, layer.w2_a); - ggml_allocr_alloc(alloc, layer.w2_b); - ggml_allocr_alloc(alloc, layer.w3_a); - ggml_allocr_alloc(alloc, layer.w3_b); - } - ggml_allocr_alloc(alloc, lora->tok_embeddings_a->grad); - ggml_allocr_alloc(alloc, lora->tok_embeddings_b->grad); - ggml_allocr_alloc(alloc, lora->norm_a->grad); - ggml_allocr_alloc(alloc, lora->norm_b->grad); - ggml_allocr_alloc(alloc, lora->output_a->grad); - ggml_allocr_alloc(alloc, lora->output_b->grad); - for (uint32_t i = 0; i < lora->layers.size(); ++i) { - auto & layer = lora->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm_a->grad); - ggml_allocr_alloc(alloc, layer.attention_norm_b->grad); - ggml_allocr_alloc(alloc, layer.wq_a->grad); - ggml_allocr_alloc(alloc, layer.wq_b->grad); - ggml_allocr_alloc(alloc, layer.wk_a->grad); - ggml_allocr_alloc(alloc, layer.wk_b->grad); - ggml_allocr_alloc(alloc, layer.wv_a->grad); - ggml_allocr_alloc(alloc, layer.wv_b->grad); - ggml_allocr_alloc(alloc, layer.wo_a->grad); - ggml_allocr_alloc(alloc, layer.wo_b->grad); - ggml_allocr_alloc(alloc, layer.ffn_norm_a->grad); - ggml_allocr_alloc(alloc, layer.ffn_norm_b->grad); - ggml_allocr_alloc(alloc, layer.w1_a->grad); - ggml_allocr_alloc(alloc, layer.w1_b->grad); - ggml_allocr_alloc(alloc, layer.w2_a->grad); - ggml_allocr_alloc(alloc, layer.w2_b->grad); - ggml_allocr_alloc(alloc, layer.w3_a->grad); - ggml_allocr_alloc(alloc, layer.w3_b->grad); - } -} - static void init_lora(const struct my_llama_model * model, struct my_llama_lora * lora) { const auto & lparams = lora->hparams; @@ -522,18 +464,8 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora set_param_lora(lora); - // measure data size - size_t size = 0; - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { - size += GGML_PAD(ggml_nbytes(t), tensor_alignment); - } - - // allocate data - struct ggml_allocr * alloc = NULL; - lora->data.resize(size + tensor_alignment); - alloc = ggml_allocr_new(lora->data.data(), lora->data.size(), tensor_alignment); - alloc_lora(alloc, lora); - ggml_allocr_free(alloc); + // allocate data for lora tensors + lora->data = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cpu_buffer_type()); } static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, float std, float min, float max) { @@ -579,7 +511,7 @@ static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, fl static struct ggml_tensor * llama_build_lora_finetune_graphs( struct my_llama_model * model, struct my_llama_lora * lora, - struct ggml_allocr * alloc, + ggml_gallocr_t alloc, struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, @@ -590,7 +522,8 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( const int n_tokens, const int n_batch, const bool enable_flash_attn, - const bool enable_checkpointing) { + const bool enable_checkpointing, + const bool measure_only) { ggml_set_scratch(ctx, { 0, 0, nullptr, }); const int n_past = 0; @@ -622,13 +555,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // KQ_pos - contains the positions struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, N); - ggml_allocr_alloc(alloc, KQ_pos); - if (!ggml_allocr_is_measure(alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < N; ++i) { - data[i] = n_past + i; - } - } + ggml_set_input(KQ_pos); // rope has so much parameters that we make a custom function for it auto rope = [ctx, KQ_pos, n_rot, n_ctx, rope_freq_base, rope_freq_scale] @@ -780,7 +707,7 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // input gradient ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, t36->grad, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); - ggml_allocr_alloc(alloc, t36->grad); + ggml_set_input(t36->grad); // KQ_pos ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); @@ -805,11 +732,23 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( // note: they will be freed in reverse order for (unsigned int i = 0; i < checkpoints.size(); ++i) { if (checkpoints[i]->data == NULL && checkpoints[i]->view_src == NULL) { - ggml_allocr_alloc(alloc, checkpoints[i]); + ggml_set_input(checkpoints[i]); } } - ggml_allocr_alloc_graph(alloc, gb); + if (measure_only) { + ggml_gallocr_reserve(alloc, gb); + } else { + ggml_gallocr_alloc_graph(alloc, gb); + + // set KQ_pos + { + int * data = (int *) KQ_pos->data; + for (int i = 0; i < N; ++i) { + data[i] = n_past + i; + } + } + } // remove the additional nodes and leafs for (int i = n_leafs_before; i < gb->n_leafs; ++i) { @@ -1663,7 +1602,7 @@ int main(int argc, char ** argv) { printf("%s: seen train_samples %llu\n", __func__, (long long unsigned) train->train_samples); printf("%s: seen train_tokens %llu\n", __func__, (long long unsigned) train->train_tokens); printf("%s: completed train_epochs %llu\n", __func__, (long long unsigned) train->train_epochs); - printf("%s: lora_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(lora.ctx) + lora.data.size()), (float) (ggml_used_mem(lora.ctx) + lora.data.size()) / (1024.0f*1024.0f)); + printf("%s: lora_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(lora.ctx) + ggml_backend_buffer_get_size(lora.data)), (float) (ggml_used_mem(lora.ctx) + ggml_backend_buffer_get_size(lora.data)) / (1024.0f*1024.0f)); if (params.only_write_lora) { save_train_files_data save_data; @@ -1690,10 +1629,6 @@ int main(int argc, char ** argv) { int n_vocab = model.hparams.n_vocab; int n_batch = params.common.n_batch; - - std::vector mem_input_data; - std::vector mem_compute_data; - // context for input tensors without their data struct ggml_init_params ctx_input_params = { ggml_tensor_overhead() * 2, // mem_size @@ -1706,17 +1641,11 @@ int main(int argc, char ** argv) { struct ggml_tensor * tokens_input = ggml_new_tensor_2d(ctx_input, GGML_TYPE_I32, n_tokens, n_batch); struct ggml_tensor * target_probs = ggml_new_tensor_3d(ctx_input, GGML_TYPE_F32, n_vocab, n_tokens, n_batch); - // measure required memory for input tensors - size_t max_input_size = GGML_PAD(ggml_nbytes(tokens_input), tensor_alignment) + - GGML_PAD(ggml_nbytes(target_probs), tensor_alignment) + - tensor_alignment; - printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); - // allocate input tensors - mem_input_data.resize(max_input_size); - ggml_allocr_t alloc_inps = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); - ggml_allocr_alloc(alloc_inps, tokens_input); - ggml_allocr_alloc(alloc_inps, target_probs); + // measure required memory for input tensors + ggml_backend_buffer_t input_data = ggml_backend_alloc_ctx_tensors_from_buft(ctx_input, ggml_backend_cpu_buffer_type()); + size_t max_input_size = ggml_backend_buffer_get_size(input_data); + printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); // context for compute tensors without their data const size_t estimated_compute_size_wo_data = ( @@ -1743,7 +1672,7 @@ int main(int argc, char ** argv) { // find best evaluation order for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); - ggml_allocr_t alloc = ggml_allocr_new_measure(tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1756,14 +1685,15 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + true ); - size_t max_compute_size = ggml_allocr_max_size(alloc) + tensor_alignment; + size_t max_compute_size = ggml_gallocr_get_buffer_size(alloc, 0); // FIXME: this will still allocate the buffer if (max_compute_size < best_compute_size) { best_compute_size = max_compute_size; best_order = gf->order; } - ggml_allocr_free(alloc); + ggml_gallocr_free(alloc); ggml_free(ctx_compute); } size_t max_compute_size = best_compute_size; @@ -1774,9 +1704,8 @@ int main(int argc, char ** argv) { "invalid"); // allocate compute tensors - mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); - ggml_allocr_t alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1789,11 +1718,9 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + false ); - ggml_allocr_free(alloc); - ggml_allocr_free(alloc_inps); - // tokenize data std::vector train_tokens; @@ -1908,6 +1835,8 @@ int main(int argc, char ** argv) { ggml_free(ctx_work); ggml_free(ctx_compute); ggml_free(ctx_input); + ggml_gallocr_free(alloc); + int64_t t1 = ggml_time_ms(); printf("%s: total training time: ", __func__); diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 9129052a2..ccd0d85ad 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -367,7 +367,7 @@ struct clip_ctx { ggml_backend_buffer_t params_buffer = NULL; ggml_backend_buffer_t compute_buffer = NULL; ggml_backend_t backend = NULL; - ggml_allocr * compute_alloc = NULL; + ggml_gallocr_t compute_alloc = NULL; }; static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32_batch * imgs) { @@ -405,31 +405,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 struct ggml_cgraph * gf = ggml_new_graph(ctx0); struct ggml_tensor * inp_raw = ggml_new_tensor_4d(ctx0, GGML_TYPE_F32, image_size, image_size, 3, batch_size); - ggml_allocr_alloc(ctx->compute_alloc, inp_raw); - - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - float * data = (float *)malloc(ggml_nbytes(inp_raw)); - - for (size_t i = 0; i < imgs->size; i++) { - const int nx = imgs->data[i].nx; - const int ny = imgs->data[i].ny; - GGML_ASSERT(nx == image_size && ny == image_size); - - const int n = nx * ny; - - for (int b = 0; b < batch_size; b++) { - for (int k = 0; k < 3; k++) { - for (int y = 0; y < ny; y++) { - for (int x = 0; x < nx; x++) { - data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].buf[3 * (y * nx + x) + k]; - } - } - } - } - } - ggml_backend_tensor_set(inp_raw, data, 0, ggml_nbytes(inp_raw)); - free(data); - } + ggml_set_name(inp_raw, "inp_raw"); + ggml_set_input(inp_raw); struct ggml_tensor * inp = ggml_conv_2d(ctx0, model.patch_embeddings, inp_raw, patch_size, patch_size, 0, 0, 1, 1); @@ -438,13 +415,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 // concat class_embeddings and patch_embeddings struct ggml_tensor * embeddings = ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size); - ggml_allocr_alloc(ctx->compute_alloc, embeddings); - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - void* zero_mem = malloc(ggml_nbytes(embeddings)); - memset(zero_mem, 0, ggml_nbytes(embeddings)); - ggml_backend_tensor_set(embeddings, zero_mem, 0, ggml_nbytes(embeddings)); - free(zero_mem); - } + ggml_set_name(embeddings, "embeddings"); + ggml_set_input(embeddings); embeddings = ggml_acc(ctx0, embeddings, model.class_embedding, embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], 0); @@ -453,15 +425,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings->nb[1], embeddings->nb[2], embeddings->nb[3], model.class_embedding->nb[1]); struct ggml_tensor * positions = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_positions); - ggml_allocr_alloc(ctx->compute_alloc, positions); - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - int* positions_data = (int*)malloc(ggml_nbytes(positions)); - for (int i = 0; i < num_positions; i++) { - positions_data[i] = i; - } - ggml_backend_tensor_set(positions, positions_data, 0, ggml_nbytes(positions)); - free(positions_data); - } + ggml_set_name(positions, "positions"); + ggml_set_input(positions); embeddings = ggml_add(ctx0, embeddings, ggml_get_rows(ctx0, model.position_embeddings, positions)); @@ -560,15 +525,8 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings = ggml_reshape_2d(ctx0, embeddings, embeddings->ne[0], embeddings->ne[1]); struct ggml_tensor * patches = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, num_patches); - ggml_allocr_alloc(ctx->compute_alloc, patches); - if (!ggml_allocr_is_measure(ctx->compute_alloc)) { - int* patches_data = (int*)malloc(ggml_nbytes(patches)); - for (int i = 0; i < num_patches; i++) { - patches_data[i] = i + 1; - } - ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); - free(patches_data); - } + ggml_set_name(patches, "patches"); + ggml_set_input(patches); // shape [1, 576, 1024] // ne is whcn, ne = [1024, 576, 1, 1] @@ -809,7 +767,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } // data - size_t buffer_size = 0; + size_t model_size = 0; { for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); @@ -817,7 +775,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { enum ggml_type type = gguf_get_tensor_type(ctx, i); struct ggml_tensor * cur = ggml_get_tensor(meta, name); size_t tensor_size = ggml_nbytes(cur); - buffer_size += tensor_size; + model_size += tensor_size; if (verbosity >= 3) { printf("%s: tensor[%d]: n_dims = %d, name = %s, tensor_size=%zu, offset=%zu, shape:[%" PRIu64 ", %" PRIu64 ", %" PRIu64 ", %" PRIu64 "], type = %s\n", __func__, i, ggml_n_dims(cur), cur->name, tensor_size, offset, cur->ne[0], cur->ne[1], cur->ne[2], cur->ne[3], ggml_type_name(type)); @@ -825,8 +783,6 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } } - buffer_size += n_tensors * 128 /* CLIP PADDING */; - clip_ctx * new_clip = new clip_ctx; // update projector type @@ -886,12 +842,12 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("%s: text_encoder: %d\n", __func__, new_clip->has_text_encoder); printf("%s: vision_encoder: %d\n", __func__, new_clip->has_vision_encoder); printf("%s: llava_projector: %d\n", __func__, new_clip->has_llava_projector); - printf("%s: model size: %.2f MB\n", __func__, buffer_size / 1024.0 / 1024.0); + printf("%s: model size: %.2f MB\n", __func__, model_size / 1024.0 / 1024.0); printf("%s: metadata size: %.2f MB\n", __func__, ggml_get_mem_size(meta) / 1024.0 / 1024.0); } } - printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, buffer_size / (1024.0 * 1024.0), n_tensors); + printf("%s: params backend buffer size = % 6.2f MB (%i tensors)\n", __func__, model_size / (1024.0 * 1024.0), n_tensors); // load tensors { @@ -925,12 +881,10 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { } // alloc memory and offload data - new_clip->params_buffer = ggml_backend_alloc_buffer(new_clip->backend, buffer_size); - ggml_allocr* alloc = ggml_allocr_new_from_buffer(new_clip->params_buffer); + new_clip->params_buffer = ggml_backend_alloc_ctx_tensors(new_clip->ctx_data, new_clip->backend); for (int i = 0; i < n_tensors; ++i) { const char * name = gguf_get_tensor_name(ctx, i); struct ggml_tensor * cur = ggml_get_tensor(new_clip->ctx_data, name); - ggml_allocr_alloc(alloc, cur); const size_t offset = gguf_get_data_offset(ctx) + gguf_get_tensor_offset(ctx, i); fin.seekg(offset, std::ios::beg); if (!fin) { @@ -949,7 +903,6 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { ggml_backend_tensor_set(cur, read_buf.data(), 0, num_bytes); } } - ggml_allocr_free(alloc); fin.close(); } @@ -1077,15 +1030,12 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { // measure mem requirement and allocate { new_clip->buf_compute_meta.resize(GGML_DEFAULT_GRAPH_SIZE * ggml_tensor_overhead() + ggml_graph_overhead()); - new_clip->compute_alloc = ggml_allocr_new_measure_from_backend(new_clip->backend); + new_clip->compute_alloc = ggml_gallocr_new(ggml_backend_get_default_buffer_type(new_clip->backend)); clip_image_f32_batch batch; batch.size = 1; ggml_cgraph * gf = clip_image_build_graph(new_clip, &batch); - size_t compute_memory_buffer_size = ggml_allocr_alloc_graph(new_clip->compute_alloc, gf); - ggml_allocr_free(new_clip->compute_alloc); - new_clip->compute_buffer = ggml_backend_alloc_buffer(new_clip->backend, compute_memory_buffer_size); - new_clip->compute_alloc = ggml_allocr_new_from_buffer(new_clip->compute_buffer); - + ggml_gallocr_reserve(new_clip->compute_alloc, gf); + size_t compute_memory_buffer_size = ggml_gallocr_get_buffer_size(new_clip->compute_alloc, 0); printf("%s: compute allocated memory: %.2f MB\n", __func__, compute_memory_buffer_size /1024.0/1024.0); } @@ -1267,12 +1217,72 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima GGML_ASSERT(batch_size == 1); // TODO: support multiple images } - // reset alloc buffer to clean the memory from previous invocations - ggml_allocr_reset(ctx->compute_alloc); - // build the inference graph ggml_cgraph * gf = clip_image_build_graph(ctx, imgs); - ggml_allocr_alloc_graph(ctx->compute_alloc, gf); + ggml_gallocr_alloc_graph(ctx->compute_alloc, gf); + + // set inputs + const auto & model = ctx->vision_model; + const auto & hparams = model.hparams; + const int image_size = hparams.image_size; + const int patch_size = hparams.patch_size; + const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); + const int num_positions = num_patches + 1; + + { + struct ggml_tensor * inp_raw = ggml_graph_get_tensor(gf, "inp_raw"); + float * data = (float *)malloc(ggml_nbytes(inp_raw)); + + for (size_t i = 0; i < imgs->size; i++) { + const int nx = imgs->data[i].nx; + const int ny = imgs->data[i].ny; + GGML_ASSERT(nx == image_size && ny == image_size); + + const int n = nx * ny; + + for (int b = 0; b < batch_size; b++) { + for (int k = 0; k < 3; k++) { + for (int y = 0; y < ny; y++) { + for (int x = 0; x < nx; x++) { + data[(b * 3 * n) + k * n + y * nx + x] = imgs->data[b].buf[3 * (y * nx + x) + k]; + } + } + } + } + } + ggml_backend_tensor_set(inp_raw, data, 0, ggml_nbytes(inp_raw)); + free(data); + } + + { + struct ggml_tensor * embeddings = ggml_graph_get_tensor(gf, "embeddings"); + + void* zero_mem = malloc(ggml_nbytes(embeddings)); + memset(zero_mem, 0, ggml_nbytes(embeddings)); + ggml_backend_tensor_set(embeddings, zero_mem, 0, ggml_nbytes(embeddings)); + free(zero_mem); + } + + { + struct ggml_tensor * positions = ggml_graph_get_tensor(gf, "positions"); + + int* positions_data = (int*)malloc(ggml_nbytes(positions)); + for (int i = 0; i < num_positions; i++) { + positions_data[i] = i; + } + ggml_backend_tensor_set(positions, positions_data, 0, ggml_nbytes(positions)); + free(positions_data); + } + + { + struct ggml_tensor * patches = ggml_graph_get_tensor(gf, "patches"); + int* patches_data = (int*)malloc(ggml_nbytes(patches)); + for (int i = 0; i < num_patches; i++) { + patches_data[i] = i + 1; + } + ggml_backend_tensor_set(patches, patches_data, 0, ggml_nbytes(patches)); + free(patches_data); + } if (ggml_backend_is_cpu(ctx->backend)) { ggml_backend_cpu_set_n_threads(ctx->backend, n_threads); diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index eee9d4de3..2e2a8ce08 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -1,5 +1,6 @@ #include "ggml.h" #include "ggml-alloc.h" +#include "ggml-backend.h" #include "common.h" #include "train.h" #include "llama.h" @@ -19,8 +20,6 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif -static const size_t tensor_alignment = 32; - struct my_llama_hparams { uint32_t n_vocab = 32000; uint32_t n_ctx = 512; @@ -58,7 +57,7 @@ struct my_llama_layer { struct my_llama_model { struct ggml_context * ctx = NULL; - std::vector data; + ggml_backend_buffer_t data = NULL; my_llama_hparams hparams; @@ -147,39 +146,6 @@ static void set_param_model(struct my_llama_model * model) { } } -static void alloc_model(struct ggml_allocr * alloc, struct my_llama_model * model) { - ggml_allocr_alloc(alloc, model->tok_embeddings); - ggml_allocr_alloc(alloc, model->norm); - ggml_allocr_alloc(alloc, model->output); - for (uint32_t i = 0; i < model->layers.size(); ++i) { - auto & layer = model->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm); - ggml_allocr_alloc(alloc, layer.wq); - ggml_allocr_alloc(alloc, layer.wk); - ggml_allocr_alloc(alloc, layer.wv); - ggml_allocr_alloc(alloc, layer.wo); - ggml_allocr_alloc(alloc, layer.ffn_norm); - ggml_allocr_alloc(alloc, layer.w1); - ggml_allocr_alloc(alloc, layer.w2); - ggml_allocr_alloc(alloc, layer.w3); - } - ggml_allocr_alloc(alloc, model->tok_embeddings->grad); - ggml_allocr_alloc(alloc, model->norm->grad); - ggml_allocr_alloc(alloc, model->output->grad); - for (uint32_t i = 0; i < model->layers.size(); ++i) { - auto & layer = model->layers[i]; - ggml_allocr_alloc(alloc, layer.attention_norm->grad); - ggml_allocr_alloc(alloc, layer.wq->grad); - ggml_allocr_alloc(alloc, layer.wk->grad); - ggml_allocr_alloc(alloc, layer.wv->grad); - ggml_allocr_alloc(alloc, layer.wo->grad); - ggml_allocr_alloc(alloc, layer.ffn_norm->grad); - ggml_allocr_alloc(alloc, layer.w1->grad); - ggml_allocr_alloc(alloc, layer.w2->grad); - ggml_allocr_alloc(alloc, layer.w3->grad); - } -} - static void init_model(struct my_llama_model * model) { const auto & hparams = model->hparams; @@ -252,17 +218,8 @@ static void init_model(struct my_llama_model * model) { set_param_model(model); - // measure data size - size_t size = 0; - for (struct ggml_tensor * t = ggml_get_first_tensor(ctx); t != NULL; t = ggml_get_next_tensor(ctx, t)) { - size += GGML_PAD(ggml_nbytes(t), tensor_alignment); - } - // allocate data - struct ggml_allocr * alloc = NULL; - model->data.resize(size + tensor_alignment); - alloc = ggml_allocr_new(model->data.data(), model->data.size(), tensor_alignment); - alloc_model(alloc, model); + model->data = ggml_backend_alloc_ctx_tensors_from_buft(ctx, ggml_backend_cpu_buffer_type()); } static void randomize_model(struct my_llama_model * model, int seed, float mean, float std, float min, float max) { @@ -297,7 +254,7 @@ static void randomize_model(struct my_llama_model * model, int seed, float mean, static struct ggml_tensor * llama_build_train_graphs( struct my_llama_model * model, - struct ggml_allocr * alloc, + ggml_gallocr_t alloc, struct ggml_context * ctx, struct ggml_cgraph * gf, struct ggml_cgraph * gb, @@ -308,7 +265,8 @@ static struct ggml_tensor * llama_build_train_graphs( const int n_tokens, const int n_batch, const bool enable_flash_attn, - const bool enable_checkpointing) { + const bool enable_checkpointing, + const bool measure_only) { ggml_set_scratch(ctx, { 0, 0, nullptr, }); const int n_past = 0; @@ -334,13 +292,7 @@ static struct ggml_tensor * llama_build_train_graphs( // KQ_pos - contains the positions struct ggml_tensor * KQ_pos = ggml_new_tensor_1d(ctx, GGML_TYPE_I32, N); - ggml_allocr_alloc(alloc, KQ_pos); - if (!ggml_allocr_is_measure(alloc)) { - int * data = (int *) KQ_pos->data; - for (int i = 0; i < N; ++i) { - data[i] = n_past + i; - } - } + ggml_set_input(KQ_pos); // rope has so much parameters that we make a custom function for it auto rope = [ctx, KQ_pos, n_rot, n_ctx, rope_freq_base, rope_freq_scale] @@ -448,21 +400,31 @@ static struct ggml_tensor * llama_build_train_graphs( // KQ_pos ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, KQ_pos, 1.0f)); GGML_ASSERT(t36->grad->data == NULL && t36->grad->view_src == NULL); - - ggml_allocr_alloc(alloc, t36->grad); + ggml_set_input(t36->grad); // allocating checkpoints in one block to reduce memory fragmentation // note: they will be freed in reverse order for (int i = 0; i < (int) checkpoints.size(); ++i) { if (checkpoints[i]->data == NULL && checkpoints[i]->view_src == NULL) { - ggml_allocr_alloc(alloc, checkpoints[i]); + ggml_set_input(checkpoints[i]); } } //int n_leafs_after = gb->n_leafs; //int n_nodes_after = gb->n_nodes; + if (measure_only) { + // FIXME: will still allocate + ggml_gallocr_reserve(alloc, gb); + } else { + ggml_gallocr_alloc_graph(alloc, gb); - ggml_allocr_alloc_graph(alloc, gb); + if (!measure_only) { + int * data = (int *) KQ_pos->data; + for (int i = 0; i < N; ++i) { + data[i] = n_past + i; + } + } + } // remove the additional nodes and leafs for (int i = n_leafs_before; i < gb->n_leafs; ++i) { @@ -1046,7 +1008,7 @@ int main(int argc, char ** argv) { printf("%s: seen train_samples %llu\n", __func__, (long long unsigned) train->train_samples); printf("%s: seen train_tokens %llu\n", __func__, (long long unsigned) train->train_tokens); printf("%s: completed train_epochs %llu\n", __func__, (long long unsigned) train->train_epochs); - printf("%s: model_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(model.ctx) + model.data.size()), (float) (ggml_used_mem(model.ctx) + model.data.size()) / (1024.0f*1024.0f)); + printf("%s: model_size = %zu bytes (%.1f MB)\n", __func__, (ggml_used_mem(model.ctx) + ggml_backend_buffer_get_size(model.data)), (float) (ggml_used_mem(model.ctx) + ggml_backend_buffer_get_size(model.data)) / (1024.0f*1024.0f)); if (params.only_write_model) { save_train_files_data save_data; @@ -1073,11 +1035,6 @@ int main(int argc, char ** argv) { int n_vocab = model.hparams.n_vocab; int n_batch = params.common.n_batch; - std::vector mem_input_data; - std::vector mem_compute_data; - - ggml_allocr * alloc = NULL; - // context for input tensors without their data struct ggml_init_params ctx_input_params = { ggml_tensor_overhead() * 2, // mem_size @@ -1091,16 +1048,10 @@ int main(int argc, char ** argv) { struct ggml_tensor * target_probs = ggml_new_tensor_3d(ctx_input, GGML_TYPE_F32, n_vocab, n_tokens, n_batch); // measure required memory for input tensors - size_t max_input_size = GGML_PAD(ggml_nbytes(tokens_input), tensor_alignment) + - GGML_PAD(ggml_nbytes(target_probs), tensor_alignment) + - tensor_alignment; - printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); - // allocate input tensors - mem_input_data.resize(max_input_size); - alloc = ggml_allocr_new(mem_input_data.data(), mem_input_data.size(), tensor_alignment); - ggml_allocr_alloc(alloc, tokens_input); - ggml_allocr_alloc(alloc, target_probs); + ggml_backend_buffer_t input_data = ggml_backend_alloc_ctx_tensors_from_buft(ctx_input, ggml_backend_cpu_buffer_type()); + size_t max_input_size = ggml_backend_buffer_get_size(input_data); + printf("%s: input_size = %zu bytes (%.1f MB)\n", __func__, max_input_size, (float) max_input_size / (1024.0f*1024.0f)); // context for compute tensors without their data const size_t estimated_compute_size_wo_data = ( @@ -1127,7 +1078,7 @@ int main(int argc, char ** argv) { // find best evaluation order for (unsigned order = 0; order < (unsigned) GGML_CGRAPH_EVAL_ORDER_COUNT; ++order) { ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new_measure(tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = (enum ggml_cgraph_eval_order) order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1140,9 +1091,10 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + true ); - size_t max_compute_size = ggml_allocr_max_size(alloc) + tensor_alignment; + size_t max_compute_size = ggml_gallocr_get_buffer_size(alloc, 0); // FIXME: this will still allocate the buffer if (max_compute_size < best_compute_size) { best_compute_size = max_compute_size; best_order = gf->order; @@ -1157,9 +1109,8 @@ int main(int argc, char ** argv) { "invalid"); // allocate compute tensors - mem_compute_data.resize(max_compute_size); ctx_compute = ggml_init(ctx_compute_params); - alloc = ggml_allocr_new(mem_compute_data.data(), mem_compute_data.size(), tensor_alignment); + ggml_gallocr_t alloc = ggml_gallocr_new(ggml_backend_cpu_buffer_type()); gf = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); gf->order = best_order; gb = ggml_new_graph_custom(ctx_compute, LLAMA_TRAIN_MAX_NODES, true); @@ -1172,7 +1123,8 @@ int main(int argc, char ** argv) { &logits, tokens_input, target_probs, n_tokens, n_batch, params.common.use_flash, - params.common.use_checkpointing + params.common.use_checkpointing, + false ); std::vector train_tokens; diff --git a/ggml-alloc.c b/ggml-alloc.c index f9be6e1cb..c28c37c4f 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -17,397 +17,11 @@ //#define AT_PRINTF(...) fprintf(stderr, __VA_ARGS__) #define AT_PRINTF(...) -// TODO: GGML_PAD ? -static size_t aligned_offset(const void * buffer, size_t offset, size_t alignment) { - assert(alignment && !(alignment & (alignment - 1))); // power of 2 - size_t align = (alignment - (((uintptr_t)buffer + offset) % alignment)) % alignment; - return offset + align; -} -struct free_block { - void * addr; - size_t size; -}; - -struct ggml_tallocr { - struct ggml_backend_buffer * buffer; - bool buffer_owned; - void * base; - size_t alignment; - - int n_free_blocks; - struct free_block free_blocks[MAX_FREE_BLOCKS]; - - size_t max_size; - - bool measure; - -#ifdef GGML_ALLOCATOR_DEBUG - struct ggml_tensor * allocated_tensors[1024]; -#endif -}; - -#ifdef GGML_ALLOCATOR_DEBUG -static void add_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - for (int i = 0; i < 1024; i++) { - if (alloc->allocated_tensors[i] == NULL) { - alloc->allocated_tensors[i] = tensor; - return; - } - } - GGML_ASSERT(!"out of allocated_tensors"); -} -static void remove_allocated_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - for (int i = 0; i < 1024; i++) { - if (alloc->allocated_tensors[i] == tensor || - (alloc->allocated_tensors[i] != NULL && alloc->allocated_tensors[i]->data == tensor->data)) { - alloc->allocated_tensors[i] = NULL; - return; - } - } - printf("tried to free tensor %s not found\n", tensor->name); - GGML_ASSERT(!"tensor not found"); -} -#endif - -// check if a tensor is allocated by this buffer -static bool ggml_tallocr_is_own(ggml_tallocr_t alloc, const struct ggml_tensor * tensor) { - return tensor->buffer == alloc->buffer && (!tensor->view_src || tensor->view_src->buffer == alloc->buffer); -} - -static bool ggml_is_view(struct ggml_tensor * t) { +static bool ggml_is_view(const struct ggml_tensor * t) { return t->view_src != NULL; } -void ggml_tallocr_alloc(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - GGML_ASSERT(!ggml_is_view(tensor)); // views generally get data pointer from one of their sources - GGML_ASSERT(tensor->data == NULL); // avoid allocating tensor which already has memory allocated - - size_t size = ggml_backend_buffer_get_alloc_size(alloc->buffer, tensor); - size = aligned_offset(NULL, size, alloc->alignment); - - AT_PRINTF("%s: allocating %s (%zu bytes) - ", __func__, tensor->name, size); - - size_t max_avail = 0; - - // find the best fitting free block besides the last block - int best_fit_block = -1; - size_t best_fit_size = SIZE_MAX; - for (int i = 0; i < alloc->n_free_blocks - 1; i++) { - struct free_block * block = &alloc->free_blocks[i]; - max_avail = MAX(max_avail, block->size); - if (block->size >= size && block->size <= best_fit_size) { - best_fit_block = i; - best_fit_size = block->size; - } - } - - if (best_fit_block == -1) { - // the last block is our last resort - struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; - max_avail = MAX(max_avail, block->size); - if (block->size >= size) { - best_fit_block = alloc->n_free_blocks - 1; - } else { - fprintf(stderr, "%s: not enough space in the buffer to allocate %s (needed %zu, largest block available %zu)\n", - __func__, tensor->name, size, max_avail); - GGML_ASSERT(!"not enough space in the buffer"); - return; - } - } - - struct free_block * block = &alloc->free_blocks[best_fit_block]; - void * addr = block->addr; - block->addr = (char*)block->addr + size; - block->size -= size; - if (block->size == 0) { - // remove block if empty - alloc->n_free_blocks--; - for (int j = best_fit_block; j < alloc->n_free_blocks; j++) { - alloc->free_blocks[j] = alloc->free_blocks[j+1]; - } - } - - AT_PRINTF("block %d, addr %p\n", best_fit_block, addr); - - tensor->data = addr; - tensor->buffer = alloc->buffer; - if (!alloc->measure) { - ggml_backend_buffer_init_tensor(alloc->buffer, tensor); - } - -#ifdef GGML_ALLOCATOR_DEBUG - add_allocated_tensor(alloc, tensor); - size_t cur_max = (char*)addr - (char*)alloc->base + size; - if (cur_max > alloc->max_size) { - printf("max_size = %.2f MB: tensors: ", cur_max / 1024.0 / 1024.0); - for (int i = 0; i < 1024; i++) { - if (alloc->allocated_tensors[i]) { - printf("%s (%.2f MB) ", alloc->allocated_tensors[i]->name, ggml_nbytes(alloc->allocated_tensors[i]) / 1024.0 / 1024.0); - } - } - printf("\n"); - } -#endif - - alloc->max_size = MAX(alloc->max_size, (char*)addr - (char*)alloc->base + size); -} - -// this is a very naive implementation, but for our case the number of free blocks should be very small -static void ggml_tallocr_free_tensor(ggml_tallocr_t alloc, struct ggml_tensor * tensor) { - if (ggml_tallocr_is_own(alloc, tensor) == false) { - // the tensor was not allocated in this buffer - // this can happen because the graph allocator will try to free weights and other tensors from different buffers - // the easiest way to deal with this is just to ignore it - // AT_PRINTF("ignoring %s (their buffer: %p, our buffer: %p)\n", tensor->name, (void *)tensor->buffer, (void *)alloc->buffer); - return; - } - - void * ptr = tensor->data; - - size_t size = ggml_backend_buffer_get_alloc_size(alloc->buffer, tensor); - size = aligned_offset(NULL, size, alloc->alignment); - AT_PRINTF("%s: freeing %s at %p (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, ptr, size, alloc->n_free_blocks); - -#ifdef GGML_ALLOCATOR_DEBUG - remove_allocated_tensor(alloc, tensor); -#endif - - // see if we can merge with an existing block - for (int i = 0; i < alloc->n_free_blocks; i++) { - struct free_block * block = &alloc->free_blocks[i]; - // check if ptr is at the end of the block - if ((char*)block->addr + block->size == ptr) { - block->size += size; - // check if we can merge with the next block - if (i < alloc->n_free_blocks - 1 && (char*)block->addr + block->size == alloc->free_blocks[i+1].addr) { - block->size += alloc->free_blocks[i+1].size; - alloc->n_free_blocks--; - for (int j = i+1; j < alloc->n_free_blocks; j++) { - alloc->free_blocks[j] = alloc->free_blocks[j+1]; - } - } - return; - } - // check if ptr is at the beginning of the block - if ((char*)ptr + size == block->addr) { - block->addr = ptr; - block->size += size; - // check if we can merge with the previous block - if (i > 0 && (char*)alloc->free_blocks[i-1].addr + alloc->free_blocks[i-1].size == block->addr) { - alloc->free_blocks[i-1].size += block->size; - alloc->n_free_blocks--; - for (int j = i; j < alloc->n_free_blocks; j++) { - alloc->free_blocks[j] = alloc->free_blocks[j+1]; - } - } - return; - } - } - // otherwise, add a new block - GGML_ASSERT(alloc->n_free_blocks < MAX_FREE_BLOCKS && "out of free blocks"); - // insert the new block in the correct position to keep the array sorted by address (to make merging blocks faster) - int insert_pos = 0; - while (insert_pos < alloc->n_free_blocks && alloc->free_blocks[insert_pos].addr < ptr) { - insert_pos++; - } - // shift all blocks from insert_pos onward to make room for the new block - for (int i = alloc->n_free_blocks; i > insert_pos; i--) { - alloc->free_blocks[i] = alloc->free_blocks[i-1]; - } - // insert the new block - alloc->free_blocks[insert_pos].addr = ptr; - alloc->free_blocks[insert_pos].size = size; - alloc->n_free_blocks++; -} - -void ggml_tallocr_reset(ggml_tallocr_t alloc) { - alloc->n_free_blocks = 1; - size_t align_offset = aligned_offset(alloc->base, 0, alloc->alignment); - alloc->free_blocks[0].addr = (char *)alloc->base + align_offset; - - if (alloc->measure) { - alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows - } else { - alloc->free_blocks[0].size = ggml_backend_buffer_get_size(alloc->buffer) - align_offset; - ggml_backend_buffer_reset(alloc->buffer); - } -} - -ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment) { - struct ggml_backend_buffer * buffer = ggml_backend_cpu_buffer_from_ptr(data, size); - - ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); - - *alloc = (struct ggml_tallocr) { - /*.buffer = */ buffer, - /*.buffer_owned = */ true, - /*.base = */ ggml_backend_buffer_get_base(buffer), - /*.alignment = */ alignment, - /*.n_free_blocks = */ 0, - /*.free_blocks = */ {{0}}, - /*.max_size = */ 0, - /*.measure = */ false, -#ifdef GGML_ALLOCATOR_DEBUG - /*.allocated_tensors = */ {0}, -#endif - }; - - ggml_tallocr_reset(alloc); - - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment) { - ggml_tallocr_t alloc = ggml_tallocr_new((void *)0x1000, SIZE_MAX/2, alignment); - alloc->measure = true; - - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft) { - // create a backend buffer to get the correct tensor allocation sizes - ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, 1); - - // TODO: move alloc initialization to a common ggml_tallocr_new_impl function - ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); - alloc->buffer_owned = true; - alloc->measure = true; - ggml_tallocr_reset(alloc); - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend) { - return ggml_tallocr_new_measure_from_buft(ggml_backend_get_default_buffer_type(backend)); -} - -ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size) { - // create a backend buffer to get the correct tensor allocation sizes - ggml_backend_buffer_t buffer = ggml_backend_buft_alloc_buffer(buft, size); - ggml_tallocr_t alloc = ggml_tallocr_new_from_buffer(buffer); - alloc->buffer_owned = true; - return alloc; -} - -ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size) { - return ggml_tallocr_new_from_buft(ggml_backend_get_default_buffer_type(backend), size); -} - -ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer) { - ggml_tallocr_t alloc = (ggml_tallocr_t)malloc(sizeof(struct ggml_tallocr)); - - *alloc = (struct ggml_tallocr) { - /*.buffer = */ buffer, - /*.buffer_owned = */ false, - /*.base = */ ggml_backend_buffer_get_base(buffer), - /*.alignment = */ ggml_backend_buffer_get_alignment(buffer), - /*.n_free_blocks = */ 0, - /*.free_blocks = */ {{0}}, - /*.max_size = */ 0, - /*.measure = */ false, -#ifdef GGML_ALLOCATOR_DEBUG - /*.allocated_tensors = */ {0}, -#endif - }; - - ggml_tallocr_reset(alloc); - - return alloc; -} - -struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t alloc) { - return alloc->buffer; -} - -void ggml_tallocr_free(ggml_tallocr_t alloc) { - if (alloc == NULL) { - return; - } - - if (alloc->buffer_owned) { - ggml_backend_buffer_free(alloc->buffer); - } - free(alloc); -} - -bool ggml_tallocr_is_measure(ggml_tallocr_t alloc) { - return alloc->measure; -} - -size_t ggml_tallocr_max_size(ggml_tallocr_t alloc) { - // FIXME: changes in the tensor sizes compared to the measure graph may cause allocations to fail - // to avoid this, we add a 10% margin to the buffer size - return alloc->max_size + alloc->max_size/10; -} - -// graph allocator - -struct hash_node { - int n_children; - int n_views; -}; - -struct ggml_gallocr { - ggml_tallocr_t talloc; - struct ggml_hash_set hash_set; - struct hash_node * hash_values; - size_t hash_values_size; - ggml_tallocr_t * hash_allocs; - int * parse_seq; - int parse_seq_len; -}; - -ggml_gallocr_t ggml_gallocr_new(void) { - ggml_gallocr_t galloc = (ggml_gallocr_t)malloc(sizeof(struct ggml_gallocr)); - - *galloc = (struct ggml_gallocr) { - /*.talloc = */ NULL, - /*.hash_set = */ {0}, - /*.hash_values = */ NULL, - /*.hash_values_size = */ 0, - /*.hash_allocs = */ NULL, - /*.parse_seq = */ NULL, - /*.parse_seq_len = */ 0, - }; - - return galloc; -} - -void ggml_gallocr_free(ggml_gallocr_t galloc) { - if (galloc == NULL) { - return; - } - - if (galloc->hash_set.keys != NULL) { - free(galloc->hash_set.keys); - } - if (galloc->hash_values != NULL) { - free(galloc->hash_values); - } - if (galloc->hash_allocs != NULL) { - free(galloc->hash_allocs); - } - if (galloc->parse_seq != NULL) { - free(galloc->parse_seq); - } - free(galloc); -} - -void ggml_gallocr_set_parse_seq(ggml_gallocr_t galloc, const int * list, int n) { - free(galloc->parse_seq); - galloc->parse_seq = malloc(sizeof(int) * n); - - for (int i = 0; i < n; i++) { - galloc->parse_seq[i] = list[i]; - } - galloc->parse_seq_len = n; -} - -static struct hash_node * hash_get(ggml_gallocr_t galloc, struct ggml_tensor * t) { - size_t i = ggml_hash_find_or_insert(galloc->hash_set, t); - return &galloc->hash_values[i]; -} - static bool ggml_are_same_layout(const struct ggml_tensor * a, const struct ggml_tensor * b) { if (a->type != b->type) { return false; @@ -447,106 +61,511 @@ static bool ggml_op_can_inplace(enum ggml_op op) { } } -static ggml_tallocr_t node_tallocr(ggml_gallocr_t galloc, struct ggml_tensor * node) { - if (galloc->talloc != NULL) { - return galloc->talloc; - } - - return galloc->hash_allocs[ggml_hash_find_or_insert(galloc->hash_set, node)]; +// TODO: GGML_PAD ? +static size_t aligned_offset(const void * buffer, size_t offset, size_t alignment) { + assert(alignment && !(alignment & (alignment - 1))); // power of 2 + size_t align = (alignment - (((uintptr_t)buffer + offset) % alignment)) % alignment; + return offset + align; } -static void init_view(ggml_gallocr_t galloc, struct ggml_tensor * view, bool update_backend) { - ggml_tallocr_t alloc = node_tallocr(galloc, view); +// tallocr +struct ggml_tallocr { + ggml_backend_buffer_t buffer; + void * base; + size_t alignment; + size_t offset; +}; - GGML_ASSERT(view->view_src != NULL && view->view_src->data != NULL); - if (update_backend) { - view->backend = view->view_src->backend; +ggml_tallocr_t ggml_tallocr_new(ggml_backend_buffer_t buffer) { + ggml_tallocr_t talloc = malloc(sizeof(struct ggml_tallocr)); + if (talloc == NULL) { + return NULL; } - // views are initialized in the alloc buffer rather than the view_src buffer - view->buffer = alloc->buffer; - view->data = (char *)view->view_src->data + view->view_offs; - assert(ggml_tallocr_is_measure(alloc) || !view->buffer || view->buffer->buft == alloc->buffer->buft); + void * base = ggml_backend_buffer_get_base(buffer); + size_t align = ggml_backend_buffer_get_alignment(buffer); - if (!alloc->measure) { - ggml_backend_buffer_init_tensor(alloc->buffer, view); - } + assert(align && !(align & (align - 1))); // power of 2 + + *talloc = (struct ggml_tallocr) { + /*.buffer = */ buffer, + /*.base = */ base, + /*.alignment = */ align, + /*.offset = */ aligned_offset(base, 0, align), + }; + return talloc; } -static void allocate_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { - ggml_tallocr_t alloc = node_tallocr(galloc, node); +void ggml_tallocr_free(ggml_tallocr_t talloc) { + free(talloc); +} - if (node->data == NULL) { - if (ggml_is_view(node)) { - init_view(galloc, node, true); +void ggml_tallocr_alloc(ggml_tallocr_t talloc, struct ggml_tensor * tensor) { + size_t size = ggml_backend_buffer_get_alloc_size(talloc->buffer, tensor); + size = GGML_PAD(size, talloc->alignment); + + if (talloc->offset + size > ggml_backend_buffer_get_size(talloc->buffer)) { + fprintf(stderr, "%s: not enough space in the buffer to allocate %s (needed %zu, available %zu)\n", + __func__, tensor->name, size, ggml_backend_buffer_get_size(talloc->buffer) - talloc->offset); + GGML_ASSERT(!"not enough space in the buffer"); + return; + } + + void * addr = (char *)ggml_backend_buffer_get_base(talloc->buffer) + talloc->offset; + talloc->offset += size; + + assert(((uintptr_t)addr % talloc->alignment) == 0); + + ggml_backend_tensor_alloc(talloc->buffer, tensor, addr); +} + +// dynamic tensor allocator + +struct free_block { + size_t offset; + size_t size; +}; + +struct ggml_dyn_tallocr { + size_t alignment; + int n_free_blocks; + struct free_block free_blocks[MAX_FREE_BLOCKS]; + size_t max_size; + +#ifdef GGML_ALLOCATOR_DEBUG + struct { + const struct ggml_tensor * tensor; + size_t offset; + } allocated_tensors[1024]; +#endif +}; + +#ifdef GGML_ALLOCATOR_DEBUG +static void add_allocated_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, const struct ggml_tensor * tensor) { + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].tensor == NULL) { + alloc->allocated_tensors[i].tensor = tensor; + alloc->allocated_tensors[i].offset = offset; + return; + } + } + GGML_ASSERT(!"out of allocated_tensors"); +} +static void remove_allocated_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, const struct ggml_tensor * tensor) { + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].offset == offset) { + alloc->allocated_tensors[i].tensor = NULL; + return; + } + } + fprintf(stderr, "tried to free tensor %s not found\n", tensor->name); + GGML_ASSERT(!"tensor not found"); +} +#endif + +static size_t ggml_dyn_tallocr_alloc(struct ggml_dyn_tallocr * alloc, size_t size, const struct ggml_tensor * tensor) { + size = aligned_offset(NULL, size, alloc->alignment); + + AT_PRINTF("%s: allocating %s (%zu bytes) - ", __func__, tensor->name, size); + + size_t max_avail = 0; + + // find the best fitting free block besides the last block + int best_fit_block = -1; + size_t best_fit_size = SIZE_MAX; + for (int i = 0; i < alloc->n_free_blocks - 1; i++) { + struct free_block * block = &alloc->free_blocks[i]; + max_avail = MAX(max_avail, block->size); + if (block->size >= size && block->size <= best_fit_size) { + best_fit_block = i; + best_fit_size = block->size; + } + } + + if (best_fit_block == -1) { + // the last block is our last resort + struct free_block * block = &alloc->free_blocks[alloc->n_free_blocks - 1]; + max_avail = MAX(max_avail, block->size); + if (block->size >= size) { + best_fit_block = alloc->n_free_blocks - 1; } else { - // see if we can reuse a parent's buffer (inplace) - if (ggml_op_can_inplace(node->op)) { - for (int i = 0; i < GGML_MAX_SRC; i++) { - struct ggml_tensor * parent = node->src[i]; - if (parent == NULL) { - break; - } + // this should never happen + fprintf(stderr, "%s: not enough space in the buffer to allocate %zu bytes, largest block available %zu bytes\n", + __func__, size, max_avail); + GGML_ASSERT(!"not enough space in the buffer"); + GGML_UNREACHABLE(); + } + } - // if the node's data is external, then we cannot re-use it - if (ggml_tallocr_is_own(alloc, parent) == false) { - AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); - continue; - } + struct free_block * block = &alloc->free_blocks[best_fit_block]; + size_t offset = block->offset; + block->offset = offset + size; + block->size -= size; + if (block->size == 0) { + // remove block if empty + alloc->n_free_blocks--; + for (int j = best_fit_block; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } - struct hash_node * p_hn = hash_get(galloc, parent); - if (parent->data != NULL && p_hn->n_children == 1 && p_hn->n_views == 0 && ggml_are_same_layout(node, parent)) { - if (ggml_is_view(parent)) { - struct ggml_tensor * view_src = parent->view_src; - struct hash_node * view_src_hn = hash_get(galloc, view_src); - if (view_src_hn->n_views == 1 && view_src_hn->n_children == 0 && view_src->data == parent->data) { - // TODO: the offset of the view parent must be kept to ensure that the op doesn't overwrite - // the parent's data that it will need later (same layout requirement). the problem is that then - // we cannot free the tensor because the original address of the allocation is lost. - // adding a view_src pointer to the tensor would solve this and simplify the code dealing with views - // for now, we only reuse the parent's data if the offset is zero (view_src->data == parent->data) - AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); - node->view_src = view_src; - view_src_hn->n_views += 1; - init_view(galloc, node, false); - return; - } - } else { - AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); - node->view_src = parent; - p_hn->n_views += 1; - init_view(galloc, node, false); + AT_PRINTF("block %d, offset %zu\n", best_fit_block, offset); + +#ifdef GGML_ALLOCATOR_DEBUG + add_allocated_tensor(alloc, offset, tensor); + size_t cur_max = offset + size; + if (cur_max > alloc->max_size) { + // sort allocated_tensors by offset + for (int i = 0; i < 1024; i++) { + for (int j = i + 1; j < 1024; j++) { + if (alloc->allocated_tensors[i].offset > alloc->allocated_tensors[j].offset) { + const struct ggml_tensor * tmp_tensor = alloc->allocated_tensors[i].tensor; + size_t tmp_offset = alloc->allocated_tensors[i].offset; + alloc->allocated_tensors[i].tensor = alloc->allocated_tensors[j].tensor; + alloc->allocated_tensors[i].offset = alloc->allocated_tensors[j].offset; + alloc->allocated_tensors[j].tensor = tmp_tensor; + alloc->allocated_tensors[j].offset = tmp_offset; + } + } + } + fprintf(stderr, "max_size = %.2f MB: tensors: ", cur_max / 1024.0 / 1024.0); + for (int i = 0; i < 1024; i++) { + if (alloc->allocated_tensors[i].tensor) { + fprintf(stderr, "%s [%zx-%zx] (%.2f MB) ", alloc->allocated_tensors[i].tensor->name, + alloc->allocated_tensors[i].offset, + alloc->allocated_tensors[i].offset + ggml_nbytes(alloc->allocated_tensors[i].tensor), + ggml_nbytes(alloc->allocated_tensors[i].tensor) / 1024.0 / 1024.0); + } + } + fprintf(stderr, "\n"); + } +#endif + + alloc->max_size = MAX(alloc->max_size, offset + size); + + return offset; + + GGML_UNUSED(tensor); +} + +// this is a very naive implementation, but for our case the number of free blocks should be very small +static void ggml_dyn_tallocr_free_tensor(struct ggml_dyn_tallocr * alloc, size_t offset, size_t size, const struct ggml_tensor * tensor) { + size = aligned_offset(NULL, size, alloc->alignment); + + AT_PRINTF("%s: freeing %s at %zu (%zu bytes) - n_free_blocks = %d\n", __func__, tensor->name, offset, size, alloc->n_free_blocks); + +#ifdef GGML_ALLOCATOR_DEBUG + remove_allocated_tensor(alloc, offset, tensor); +#endif + + // see if we can merge with an existing block + for (int i = 0; i < alloc->n_free_blocks; i++) { + struct free_block * block = &alloc->free_blocks[i]; + // check if ptr is at the end of the block + if (block->offset + block->size == offset) { + block->size += size; + // check if we can merge with the next block + if (i < alloc->n_free_blocks - 1 && block->offset + block->size == alloc->free_blocks[i+1].offset) { + block->size += alloc->free_blocks[i+1].size; + alloc->n_free_blocks--; + for (int j = i+1; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + return; + } + // check if ptr is at the beginning of the block + if (offset + size == block->offset) { + block->offset = offset; + block->size += size; + // check if we can merge with the previous block + if (i > 0 && alloc->free_blocks[i-1].offset + alloc->free_blocks[i-1].size == block->offset) { + alloc->free_blocks[i-1].size += block->size; + alloc->n_free_blocks--; + for (int j = i; j < alloc->n_free_blocks; j++) { + alloc->free_blocks[j] = alloc->free_blocks[j+1]; + } + } + return; + } + } + // otherwise, add a new block + GGML_ASSERT(alloc->n_free_blocks < MAX_FREE_BLOCKS && "out of free blocks"); + // insert the new block in the correct position to keep the array sorted by address (to make merging blocks faster) + int insert_pos = 0; + while (insert_pos < alloc->n_free_blocks && alloc->free_blocks[insert_pos].offset < offset) { + insert_pos++; + } + // shift all blocks from insert_pos onward to make room for the new block + for (int i = alloc->n_free_blocks; i > insert_pos; i--) { + alloc->free_blocks[i] = alloc->free_blocks[i-1]; + } + // insert the new block + alloc->free_blocks[insert_pos].offset = offset; + alloc->free_blocks[insert_pos].size = size; + alloc->n_free_blocks++; + + GGML_UNUSED(tensor); +} + +static void ggml_dyn_tallocr_reset(struct ggml_dyn_tallocr * alloc) { + alloc->n_free_blocks = 1; + alloc->free_blocks[0].offset = 0; + alloc->free_blocks[0].size = SIZE_MAX/2; // restrict maximum size of a measure allocator to half size_t max to avoid overflows + alloc->max_size = 0; +} + +static struct ggml_dyn_tallocr * ggml_dyn_tallocr_new(size_t alignment) { + struct ggml_dyn_tallocr * alloc = (struct ggml_dyn_tallocr *)malloc(sizeof(struct ggml_dyn_tallocr)); + + *alloc = (struct ggml_dyn_tallocr) { + /*.alignment = */ alignment, + /*.n_free_blocks = */ 0, + /*.free_blocks = */ {{0}}, + /*.max_size = */ 0, +#ifdef GGML_ALLOCATOR_DEBUG + /*.allocated_tensors = */ {{0}}, +#endif + }; + + ggml_dyn_tallocr_reset(alloc); + + return alloc; +} + +static void ggml_dyn_tallocr_free(struct ggml_dyn_tallocr * alloc) { + free(alloc); +} + +static size_t ggml_dyn_tallocr_max_size(struct ggml_dyn_tallocr * alloc) { + return alloc->max_size; +} + + +///////////////////////////////////// + +// graph allocator + +struct hash_node { + int n_children; + int n_views; + int buffer_id; + size_t offset; // offset within the buffer + bool allocated; +}; + +// +struct tensor_alloc { + size_t offset; + size_t size_max; // 0 = pre-allocated, unused, or view +}; + +struct node_alloc { + int buffer_id; + struct tensor_alloc dst; + struct tensor_alloc src[GGML_MAX_SRC]; +}; + +struct ggml_gallocr { + ggml_backend_buffer_type_t * bufts; // [n_buffers] + ggml_backend_buffer_t * buffers; // [n_buffers] + struct ggml_dyn_tallocr ** buf_tallocs; // [n_buffers] + int n_buffers; + + struct ggml_hash_set hash_set; + struct hash_node * hash_values; // [hash_set.size] + + struct node_alloc * node_allocs; // [n_nodes] + int n_nodes; +}; + +ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs) { + ggml_gallocr_t galloc = (ggml_gallocr_t)calloc(sizeof(struct ggml_gallocr), 1); + GGML_ASSERT(galloc != NULL); + + galloc->bufts = calloc(sizeof(ggml_backend_buffer_type_t) * n_bufs, 1); + GGML_ASSERT(galloc->bufts != NULL); + + galloc->buffers = calloc(sizeof(ggml_backend_buffer_t) * n_bufs, 1); + GGML_ASSERT(galloc->buffers != NULL); + + galloc->buf_tallocs = calloc(sizeof(struct ggml_dyn_tallocr *) * n_bufs, 1); + GGML_ASSERT(galloc->buf_tallocs != NULL); + + for (int i = 0; i < n_bufs; i++) { + galloc->bufts[i] = bufts[i]; + galloc->buffers[i] = NULL; + size_t alignment = ggml_backend_buft_get_alignment(bufts[i]); + galloc->buf_tallocs[i] = ggml_dyn_tallocr_new(alignment); + } + galloc->n_buffers = n_bufs; + + return galloc; +} + +ggml_gallocr_t ggml_gallocr_new(ggml_backend_buffer_type_t buft) { + return ggml_gallocr_new_n(&buft, 1); +} + +void ggml_gallocr_free(ggml_gallocr_t galloc) { + if (galloc == NULL) { + return; + } + + for (int i = 0; i < galloc->n_buffers; i++) { + if (galloc->buffers != NULL) { + ggml_backend_buffer_free(galloc->buffers[i]); + } + if (galloc->buf_tallocs != NULL) { + ggml_dyn_tallocr_free(galloc->buf_tallocs[i]); + } + } + + free(galloc->hash_set.keys); + free(galloc->hash_values); + free(galloc->bufts); + free(galloc->buffers); + free(galloc->buf_tallocs); + free(galloc->node_allocs); + free(galloc); +} + +typedef struct ggml_gallocr * ggml_gallocr_t; + +static struct hash_node * ggml_gallocr_hash_get(ggml_gallocr_t galloc, struct ggml_tensor * t) { + size_t i = ggml_hash_find_or_insert(galloc->hash_set, t); + return &galloc->hash_values[i]; +} + +static bool ggml_gallocr_is_own(ggml_gallocr_t galloc, struct ggml_tensor * t) { + return ggml_gallocr_hash_get(galloc, t)->allocated; +} + +static void ggml_gallocr_set_node_offset(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id, size_t offset) { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + hn->buffer_id = buffer_id; + hn->offset = offset; + hn->allocated = true; +} + +static bool ggml_gallocr_is_allocated(ggml_gallocr_t galloc, struct ggml_tensor * t) { + return t->data != NULL || ggml_gallocr_hash_get(galloc, t)->allocated; +} + +static void ggml_gallocr_allocate_node(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id) { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + + if (!ggml_gallocr_is_allocated(galloc, node) && !ggml_is_view(node)) { + hn->allocated = true; + assert(hn->offset == 0); + + // try to reuse a parent's buffer (inplace) + if (ggml_op_can_inplace(node->op)) { + for (int i = 0; i < GGML_MAX_SRC; i++) { + struct ggml_tensor * parent = node->src[i]; + if (parent == NULL) { + break; + } + + // if the node's data is external, then we cannot re-use it + if (!ggml_gallocr_is_own(galloc, parent)) { + AT_PRINTF("not reusing parent %s for %s as %p is external\n", parent->name, node->name, parent->data); + continue; + } + + // outputs cannot be reused + if (parent->flags & GGML_TENSOR_FLAG_OUTPUT || (parent->view_src != NULL && parent->view_src->flags & GGML_TENSOR_FLAG_OUTPUT)) { + AT_PRINTF("not reusing parent %s for %s as it is an output\n", parent->name, node->name); + continue; + } + + if (!ggml_are_same_layout(node, parent)) { + AT_PRINTF("not reusing parent %s for %s as layouts are different\n", parent->name, node->name); + continue; + } + + struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); + if (p_hn->n_children == 1 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = ggml_gallocr_hash_get(galloc, view_src); + if (view_src_hn->n_views == 1 && view_src_hn->n_children == 0 && view_src->data == parent->data) { + AT_PRINTF("reusing view parent %s (%s) for %s\n", parent->name, view_src->name, node->name); + assert(view_src_hn->offset == p_hn->offset); + hn->buffer_id = p_hn->buffer_id; + hn->offset = p_hn->offset; + p_hn->allocated = false; // avoid freeing the parent + view_src_hn->allocated = false; return; } + } else { + AT_PRINTF("reusing parent %s for %s\n", parent->name, node->name); + hn->buffer_id = p_hn->buffer_id; + hn->offset = p_hn->offset; + p_hn->allocated = false; // avoid freeing the parent + return; } } } - ggml_tallocr_alloc(alloc, node); } + // allocate tensor from the buffer + struct ggml_dyn_tallocr * alloc = galloc->buf_tallocs[buffer_id]; + ggml_backend_buffer_type_t buft = galloc->bufts[buffer_id]; + size_t size = ggml_backend_buft_get_alloc_size(buft, node); + size_t offset = ggml_dyn_tallocr_alloc(alloc, size, node); + hn->buffer_id = buffer_id; + hn->offset = offset; + return; } } -static void free_node(ggml_gallocr_t galloc, struct ggml_tensor * node) { - ggml_tallocr_t alloc = node_tallocr(galloc, node); +static void ggml_gallocr_free_node(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id) { + // graph outputs are never freed + if (node->flags & GGML_TENSOR_FLAG_OUTPUT) { + AT_PRINTF("not freeing output %s\n", node->name); + return; + } - ggml_tallocr_free_tensor(alloc, node); + struct ggml_dyn_tallocr * alloc = galloc->buf_tallocs[buffer_id]; + ggml_backend_buffer_type_t buft = galloc->bufts[buffer_id]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + size_t offset = hn->offset; + size_t size = ggml_backend_buft_get_alloc_size(buft, node); + ggml_dyn_tallocr_free_tensor(alloc, offset, size, node); + hn->allocated = false; } -static void ggml_tallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgraph * gf) { - const int * parse_seq = galloc->parse_seq; - int parse_seq_len = galloc->parse_seq_len; +static int get_node_buffer_id(const int * node_buffer_ids, int i) { + return node_buffer_ids ? node_buffer_ids[i] : 0; +} + +static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids) { + // clear hash tables + memset(galloc->hash_set.keys, 0, galloc->hash_set.size * sizeof(struct ggml_tensor *)); + memset(galloc->hash_values, 0, galloc->hash_set.size * sizeof(struct hash_node)); + + // allocate all graph inputs first to avoid overwriting them + for (int i = 0; i < graph->n_nodes; i++) { + if (graph->nodes[i]->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + if (graph->nodes[i]->src[j] == NULL) { + break; + } + if (graph->nodes[i]->src[j]->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i]->src[j], get_node_buffer_id(node_buffer_ids, i)); + } + } + } // count number of children and views - for (int i = 0; i < gf->n_nodes; i++) { - struct ggml_tensor * node = gf->nodes[i]; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view(node)) { struct ggml_tensor * view_src = node->view_src; - hash_get(galloc, view_src)->n_views += 1; - if (node->buffer == NULL && node->data != NULL) { - // view of a pre-allocated tensor, didn't call init_view() yet - init_view(galloc, node, true); - } + ggml_gallocr_hash_get(galloc, view_src)->n_views += 1; } for (int j = 0; j < GGML_MAX_SRC; j++) { @@ -554,227 +573,283 @@ static void ggml_tallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr if (parent == NULL) { break; } - hash_get(galloc, parent)->n_children += 1; - if (ggml_is_view(parent) && parent->buffer == NULL && parent->data != NULL) { - init_view(galloc, parent, true); - } + ggml_gallocr_hash_get(galloc, parent)->n_children += 1; } } // allocate tensors - // if we have parse_seq then we allocate nodes following the list, and we only free nodes at barriers - int last_barrier_pos = 0; - int n_nodes = parse_seq_len ? parse_seq_len : gf->n_nodes; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + int buffer_id = get_node_buffer_id(node_buffer_ids, i); - for (int ind = 0; ind < n_nodes; ind++) { - // allocate a node if there is no parse_seq or this is not a barrier - if (parse_seq_len == 0 || parse_seq[ind] != -1) { - int i = parse_seq_len ? parse_seq[ind] : ind; - struct ggml_tensor * node = gf->nodes[i]; - - // allocate parents (leafs) - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - allocate_node(galloc, parent); + // allocate parents (only leafs need to be allocated at this point) + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; } - - // allocate node - allocate_node(galloc, node); - - AT_PRINTF("exec: %s (%s) <= ", ggml_op_name(node->op), node->name); - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - AT_PRINTF("%s", parent->name); - if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { - AT_PRINTF(", "); - } - } - AT_PRINTF("\n"); + ggml_gallocr_allocate_node(galloc, parent, buffer_id); } + // allocate node + ggml_gallocr_allocate_node(galloc, node, buffer_id); + + AT_PRINTF("exec: %s (%s) <= ", ggml_op_desc(node), node->name); + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; + } + AT_PRINTF("%s", parent->name); + if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { + AT_PRINTF(", "); + } + } + AT_PRINTF("\n"); + // update parents - // update immediately if there is no parse_seq - // update only at barriers if there is parse_seq - if ((parse_seq_len == 0) || parse_seq[ind] == -1) { - int update_start = parse_seq_len ? last_barrier_pos : ind; - int update_end = parse_seq_len ? ind : ind + 1; - for (int i = update_start; i < update_end; i++) { - int node_i = parse_seq_len ? parse_seq[i] : i; - struct ggml_tensor * node = gf->nodes[node_i]; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * parent = node->src[j]; + if (parent == NULL) { + break; + } + struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); + p_hn->n_children -= 1; - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - struct hash_node * p_hn = hash_get(galloc, parent); - p_hn->n_children -= 1; - - //AT_PRINTF("parent %s: %d children, %d views\n", parent->name, parent->n_children, parent->n_views); - - if (p_hn->n_children == 0 && p_hn->n_views == 0) { - if (ggml_is_view(parent)) { - struct ggml_tensor * view_src = parent->view_src; - struct hash_node * view_src_hn = hash_get(galloc, view_src); - view_src_hn->n_views -= 1; - AT_PRINTF("view_src %s: %d children, %d views\n", view_src->name, view_src_hn->n_children, view_src_hn->n_views); - if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0) { - free_node(galloc, view_src); - } - } - else { - free_node(galloc, parent); - } + AT_PRINTF("parent %s: %d children, %d views, allocated: %d\n", + parent->name, p_hn->n_children, p_hn->n_views, p_hn->allocated); + + if (p_hn->n_children == 0 && p_hn->n_views == 0) { + if (ggml_is_view(parent)) { + struct ggml_tensor * view_src = parent->view_src; + struct hash_node * view_src_hn = ggml_gallocr_hash_get(galloc, view_src); + view_src_hn->n_views -= 1; + AT_PRINTF("view_src %s: %d children, %d views\n", + view_src->name, view_src_hn->n_children, view_src_hn->n_views); + if (view_src_hn->n_views == 0 && view_src_hn->n_children == 0 && view_src_hn->allocated) { + ggml_gallocr_free_node(galloc, view_src, buffer_id); } } + else if (p_hn->allocated) { + ggml_gallocr_free_node(galloc, parent, buffer_id); + } } AT_PRINTF("\n"); - if (parse_seq_len) { - last_barrier_pos = ind + 1; + } + } +} + +bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids) { + size_t hash_size = graph->visited_hash_table.size; + + // initialize hash table + if (galloc->hash_set.size < hash_size) { + free(galloc->hash_set.keys); + free(galloc->hash_values); + galloc->hash_set.size = hash_size; + galloc->hash_set.keys = calloc(sizeof(struct ggml_tensor *), hash_size); + galloc->hash_values = calloc(sizeof(struct hash_node), hash_size); + GGML_ASSERT(galloc->hash_set.keys != NULL); + GGML_ASSERT(galloc->hash_values != NULL); + } else { + // reset hash table + memset(galloc->hash_set.keys, 0, sizeof(struct ggml_tensor *) * galloc->hash_set.size); + memset(galloc->hash_values, 0, sizeof(struct hash_node) * galloc->hash_set.size); + } + + // reset allocators + for (int i = 0; i < galloc->n_buffers; i++) { + ggml_dyn_tallocr_reset(galloc->buf_tallocs[i]); + } + + // allocate in hash table + ggml_gallocr_alloc_graph_impl(galloc, graph, node_buffer_ids); + + // set the node_allocs from the hash table + if (galloc->n_nodes < graph->n_nodes) { + free(galloc->node_allocs); + galloc->node_allocs = calloc(sizeof(struct node_alloc), graph->n_nodes); + GGML_ASSERT(galloc->node_allocs != NULL); + } + galloc->n_nodes = graph->n_nodes; + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + node_alloc->buffer_id = get_node_buffer_id(node_buffer_ids, i); + if (node->view_src || node->data) { + node_alloc->dst.offset = SIZE_MAX; + node_alloc->dst.size_max = 0; + } else { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, node); + node_alloc->dst.offset = hn->offset; + node_alloc->dst.size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], node); + } + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (!src || src->view_src || src->data) { + node_alloc->src[j].offset = SIZE_MAX; + node_alloc->src[j].size_max = 0; + } else { + struct hash_node * hn = ggml_gallocr_hash_get(galloc, src); + node_alloc->src[j].offset = hn->offset; + node_alloc->src[j].size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], src); } } } -} -size_t ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, ggml_tallocr_t talloc, struct ggml_cgraph * graph) { - size_t hash_size = graph->visited_hash_table.size; + // reallocate buffers if needed + for (int i = 0; i < galloc->n_buffers; i++) { + size_t cur_size = galloc->buffers[i] ? ggml_backend_buffer_get_size(galloc->buffers[i]) : 0; + size_t new_size = ggml_dyn_tallocr_max_size(galloc->buf_tallocs[i]); - // check if the hash table is initialized and large enough - if (galloc->hash_set.size < hash_size) { - if (galloc->hash_set.keys != NULL) { - free(galloc->hash_set.keys); + if (new_size > cur_size) { +#ifndef NDEBUG + fprintf(stderr, "%s: reallocating %s buffer from size %.02f MiB to %.02f MiB\n", __func__, ggml_backend_buft_name(galloc->bufts[i]), cur_size / 1024.0 / 1024.0, new_size / 1024.0 / 1024.0); +#endif + ggml_backend_buffer_free(galloc->buffers[i]); + galloc->buffers[i] = ggml_backend_buft_alloc_buffer(galloc->bufts[i], new_size); + if (galloc->buffers[i] == NULL) { + fprintf(stderr, "%s: failed to allocate %s buffer of size %zu\n", __func__, ggml_backend_buft_name(galloc->bufts[i]), new_size); + return false; + } } - if (galloc->hash_values != NULL) { - free(galloc->hash_values); + } + + return true; +} + +bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph *graph) { + return ggml_gallocr_reserve_n(galloc, graph, NULL); +} + +static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * node, struct node_alloc * node_alloc, struct tensor_alloc * tensor_alloc) { + assert(node->data || node->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); + + if (node->view_src != NULL) { + if (node->buffer == NULL) { + assert(tensor_alloc->offset == SIZE_MAX); + if (node->view_src->buffer == NULL) { + // this tensor was allocated without ggml-backend + return; + } + ggml_backend_view_init(galloc->buffers[node_alloc->buffer_id], node); } - galloc->hash_set.keys = malloc(sizeof(struct ggml_tensor *) * hash_size); - galloc->hash_set.size = hash_size; - galloc->hash_values = malloc(sizeof(struct hash_node) * hash_size); + } else { + if (node->data == NULL) { + assert(tensor_alloc->offset != SIZE_MAX); + assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); + void * base = ggml_backend_buffer_get_base(galloc->buffers[node_alloc->buffer_id]); + void * addr = (char *)base + tensor_alloc->offset; + ggml_backend_tensor_alloc(galloc->buffers[node_alloc->buffer_id], node, addr); + } else { + if (node->buffer == NULL) { + // this tensor was allocated without ggml-backend + return; + } + +#ifndef NDEBUG + size_t offset = + (char *)node->data - + (char *)ggml_backend_buffer_get_base(node->buffer); + size_t size = ggml_backend_buffer_get_alloc_size(node->buffer, node); + assert(tensor_alloc->offset == SIZE_MAX || offset == tensor_alloc->offset); + assert(tensor_alloc->offset == SIZE_MAX || size <= tensor_alloc->size_max); +#endif + } + } +} + +static bool ggml_gallocr_node_needs_realloc(ggml_gallocr_t galloc, struct ggml_tensor * node, struct node_alloc * nalloc, struct tensor_alloc * talloc) { + ggml_backend_buffer_type_t buft = galloc->bufts[nalloc->buffer_id]; + size_t node_size = (node->data || node->view_src) ? 0 : ggml_backend_buft_get_alloc_size(buft, node); + return talloc->size_max >= node_size; +} + +static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph * graph) { + if (galloc->n_nodes != graph->n_nodes) { +#ifndef NDEBUG + fprintf(stderr, "%s: graph has different number of nodes\n", __func__); +#endif + return true; } - // reset hash table - memset(galloc->hash_set.keys, 0, sizeof(struct ggml_tensor *) * hash_size); - memset(galloc->hash_values, 0, sizeof(struct hash_node) * hash_size); + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; - galloc->talloc = talloc; - ggml_tallocr_alloc_graph_impl(galloc, graph); - galloc->talloc = NULL; + if (!ggml_gallocr_node_needs_realloc(galloc, node, node_alloc, &node_alloc->dst)) { +#ifndef NDEBUG + fprintf(stderr, "%s: node %s is not valid\n", __func__, node->name); +#endif + return true; + } - size_t max_size = ggml_tallocr_max_size(talloc); - - return max_size; -} - -void ggml_gallocr_alloc_graph_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, struct ggml_hash_set hash_set, ggml_tallocr_t * hash_node_talloc) { - const size_t hash_size = hash_set.size; - - GGML_ASSERT(hash_size >= (size_t)(graph->n_nodes + graph->n_leafs)); - - galloc->talloc = NULL; - - // alloc hash_values if needed - if (galloc->hash_values == NULL || galloc->hash_values_size < hash_size) { - free(galloc->hash_values); - galloc->hash_values = malloc(sizeof(struct hash_node) * hash_size); - galloc->hash_values_size = hash_size; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + if (!ggml_gallocr_node_needs_realloc(galloc, src, node_alloc, &node_alloc->src[j])) { +#ifndef NDEBUG + fprintf(stderr, "%s: src %d (%s) of node %s is not valid\n", __func__, j, src->name, node->name); +#endif + return true; + } + } } - // free hash_set.keys if needed - if (galloc->hash_set.keys != NULL) { - free(galloc->hash_set.keys); - } - galloc->hash_set = hash_set; - - // reset hash values - memset(galloc->hash_values, 0, sizeof(struct hash_node) * hash_size); - - galloc->hash_allocs = hash_node_talloc; - - ggml_tallocr_alloc_graph_impl(galloc, graph); - - // remove unowned resources - galloc->hash_set.keys = NULL; - galloc->hash_allocs = NULL; + return false; } -// legacy API wrapper - -struct ggml_allocr { - ggml_tallocr_t talloc; - ggml_gallocr_t galloc; -}; - -static ggml_allocr_t ggml_allocr_new_impl(ggml_tallocr_t talloc) { - ggml_allocr_t alloc = (ggml_allocr_t)malloc(sizeof(struct ggml_allocr)); - *alloc = (struct ggml_allocr) { - /*.talloc = */ talloc, - /*.galloc = */ ggml_gallocr_new(), - }; - return alloc; -} - -ggml_allocr_t ggml_allocr_new(void * data, size_t size, size_t alignment) { - return ggml_allocr_new_impl(ggml_tallocr_new(data, size, alignment)); -} - -ggml_allocr_t ggml_allocr_new_measure(size_t alignment) { - return ggml_allocr_new_impl(ggml_tallocr_new_measure(alignment)); -} - -ggml_allocr_t ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer) { - return ggml_allocr_new_impl(ggml_tallocr_new_from_buffer(buffer)); -} - -ggml_allocr_t ggml_allocr_new_from_backend(struct ggml_backend * backend, size_t size) { - return ggml_allocr_new_impl(ggml_tallocr_new_from_backend(backend, size)); -} - -ggml_allocr_t ggml_allocr_new_measure_from_backend(struct ggml_backend * backend) { - return ggml_allocr_new_impl(ggml_tallocr_new_measure_from_backend(backend)); -} - -struct ggml_backend_buffer * ggml_allocr_get_buffer(ggml_allocr_t alloc) { - return ggml_tallocr_get_buffer(alloc->talloc); -} - -void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n) { - ggml_gallocr_set_parse_seq(alloc->galloc, list, n); -} - -void ggml_allocr_free(ggml_allocr_t alloc) { - if (alloc == NULL) { - return; +bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) { + if (ggml_gallocr_needs_realloc(galloc, graph)) { + if (galloc->n_buffers == 1) { +#ifndef NDEBUG + fprintf(stderr, "%s: reallocating buffers automatically\n", __func__); +#endif + if (!ggml_gallocr_reserve(galloc, graph)) { + return false; + } + } else { +#ifndef NDEBUG + fprintf(stderr, "%s: cannot reallocate multi buffer graph automatically, call reserve\n", __func__); +#endif + return false; + } } - ggml_gallocr_free(alloc->galloc); - ggml_tallocr_free(alloc->talloc); - free(alloc); + // reset buffers + for (int i = 0; i < galloc->n_buffers; i++) { + // zero size buffers are not allocated + if (galloc->buffers[i] != NULL) { + ggml_backend_buffer_reset(galloc->buffers[i]); + } + } + + // allocate the graph tensors from the previous assignments + for (int i = 0; i < graph->n_nodes; i++) { + struct ggml_tensor * node = graph->nodes[i]; + struct node_alloc * node_alloc = &galloc->node_allocs[i]; + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + break; + } + ggml_gallocr_init_tensor(galloc, src, node_alloc, &node_alloc->src[j]); + } + ggml_gallocr_init_tensor(galloc, node, node_alloc, &node_alloc->dst); + } + + return true; } -bool ggml_allocr_is_measure(ggml_allocr_t alloc) { - return ggml_tallocr_is_measure(alloc->talloc); -} +size_t ggml_gallocr_get_buffer_size(ggml_gallocr_t galloc, int buffer_id) { + GGML_ASSERT(buffer_id >= 0 && buffer_id < galloc->n_buffers); -void ggml_allocr_reset(ggml_allocr_t alloc) { - ggml_tallocr_reset(alloc->talloc); -} - -void ggml_allocr_alloc(ggml_allocr_t alloc, struct ggml_tensor * tensor) { - ggml_tallocr_alloc(alloc->talloc, tensor); -} - -size_t ggml_allocr_max_size(ggml_allocr_t alloc) { - return ggml_tallocr_max_size(alloc->talloc); -} - -size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph) { - return ggml_gallocr_alloc_graph(alloc->galloc, alloc->talloc, graph); + if (galloc->buffers[buffer_id] == NULL) { + return 0; + } + return ggml_backend_buffer_get_size(galloc->buffers[buffer_id]); } // utils @@ -795,17 +870,17 @@ static bool alloc_tensor_range(struct ggml_context * ctx, return false; } - ggml_tallocr_t tallocr = ggml_tallocr_new_from_buffer(buffer); + struct ggml_tallocr * tallocr = ggml_tallocr_new(buffer); for (struct ggml_tensor * t = first; t != last; t = ggml_get_next_tensor(ctx, t)) { if (t->data == NULL) { if (t->view_src == NULL) { ggml_tallocr_alloc(tallocr, t); - } else { + } else if (t->buffer == NULL) { ggml_backend_view_init(buffer, t); } } else { - if (t->view_src != NULL) { + if (t->view_src != NULL && t->buffer == NULL) { // view of a pre-allocated tensor ggml_backend_view_init(buffer, t); } @@ -838,7 +913,6 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } if (this_size > max_size) { - // tensor is too large to fit in a single buffer fprintf(stderr, "%s: tensor %s is too large to fit in a %s buffer (tensor size: %zu, max buffer size: %zu)\n", __func__, t->name, ggml_backend_buft_name(buft), @@ -870,7 +944,6 @@ ggml_backend_buffer_t ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_conte } if (n_buffers == 0) { - // all the tensors in the context are already allocated #ifndef NDEBUG fprintf(stderr, "%s: all tensors in the context are already allocated\n", __func__); #endif diff --git a/ggml-alloc.h b/ggml-alloc.h index 4e5997521..1d9085d15 100644 --- a/ggml-alloc.h +++ b/ggml-alloc.h @@ -6,88 +6,62 @@ extern "C" { #endif -struct ggml_backend; -struct ggml_backend_buffer; -struct ggml_backend_buffer_type; - -// -// Legacy API -// - -typedef struct ggml_allocr * ggml_allocr_t; - -// initialize allocator for use with CPU backend only -GGML_API ggml_allocr_t ggml_allocr_new(void * data, size_t size, size_t alignment); -GGML_API ggml_allocr_t ggml_allocr_new_measure(size_t alignment); - -// initialize allocator for use with ggml-backend -GGML_API ggml_allocr_t ggml_allocr_new_from_buffer(struct ggml_backend_buffer * buffer); -GGML_API ggml_allocr_t ggml_allocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer -GGML_API ggml_allocr_t ggml_allocr_new_measure_from_backend(struct ggml_backend * backend); - -GGML_API struct ggml_backend_buffer * ggml_allocr_get_buffer(ggml_allocr_t alloc); - -// tell the allocator to parse nodes following the order described in the list -// you should call this if your graph are optimized to execute out-of-order -GGML_API void ggml_allocr_set_parse_seq(ggml_allocr_t alloc, const int * list, int n); - -GGML_API void ggml_allocr_free (ggml_allocr_t alloc); -GGML_API bool ggml_allocr_is_measure (ggml_allocr_t alloc); -GGML_API void ggml_allocr_reset (ggml_allocr_t alloc); -GGML_API void ggml_allocr_alloc (ggml_allocr_t alloc, struct ggml_tensor * tensor); -GGML_API size_t ggml_allocr_max_size (ggml_allocr_t alloc); - -GGML_API size_t ggml_allocr_alloc_graph(ggml_allocr_t alloc, struct ggml_cgraph * graph); - -// -// ggml-backend v2 API -// - -// Separate tensor and graph allocator objects -// This is necessary for multi-backend allocation because the graph allocator needs to use multiple tensor allocators -// The original API is kept as a wrapper around the new API +typedef struct ggml_backend_buffer_type * ggml_backend_buffer_type_t; +typedef struct ggml_backend_buffer * ggml_backend_buffer_t; +typedef struct ggml_backend * ggml_backend_t; // Tensor allocator typedef struct ggml_tallocr * ggml_tallocr_t; -GGML_API ggml_tallocr_t ggml_tallocr_new(void * data, size_t size, size_t alignment); -GGML_API ggml_tallocr_t ggml_tallocr_new_measure(size_t alignment); -GGML_API ggml_tallocr_t ggml_tallocr_new_from_buft(struct ggml_backend_buffer_type * buft, size_t size); -GGML_API ggml_tallocr_t ggml_tallocr_new_from_backend(struct ggml_backend * backend, size_t size); // allocates an owned buffer -GGML_API ggml_tallocr_t ggml_tallocr_new_from_buffer(struct ggml_backend_buffer * buffer); -GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_buft(struct ggml_backend_buffer_type * buft); -GGML_API ggml_tallocr_t ggml_tallocr_new_measure_from_backend(struct ggml_backend * backend); - -GGML_API struct ggml_backend_buffer * ggml_tallocr_get_buffer(ggml_tallocr_t talloc); - -GGML_API void ggml_tallocr_free (ggml_tallocr_t talloc); -GGML_API bool ggml_tallocr_is_measure (ggml_tallocr_t talloc); -GGML_API void ggml_tallocr_reset (ggml_tallocr_t talloc); -GGML_API void ggml_tallocr_alloc (ggml_tallocr_t talloc, struct ggml_tensor * tensor); -GGML_API size_t ggml_tallocr_max_size (ggml_tallocr_t talloc); - +GGML_API ggml_tallocr_t ggml_tallocr_new(ggml_backend_buffer_t buffer); +GGML_API void ggml_tallocr_free(ggml_tallocr_t talloc); +GGML_API void ggml_tallocr_alloc(ggml_tallocr_t talloc, struct ggml_tensor * tensor); // Graph allocator +/* + Example usage: + ggml_gallocr_t galloc = ggml_gallocr_new(ggml_bacckend_cpu_buffer_type()); + + // optional: create a worst-case graph and reserve the buffers to avoid reallocations + ggml_gallocr_reserve(galloc, build_graph(max_batch)); + + // allocate the graph + struct ggml_cgraph * graph = build_graph(batch); + ggml_gallocr_alloc_graph(galloc, graph); + + printf("compute buffer size: %zu bytes\n", ggml_gallocr_get_buffer_size(galloc, 0)); + + // evaluate the graph + ggml_backend_graph_compute(backend, graph); +*/ + +// special tensor flags for use with the graph allocator: +// ggml_set_input(): all input tensors are allocated at the beginning of the graph in non-overlapping addresses +// ggml_set_output(): output tensors are never freed and never overwritten + typedef struct ggml_gallocr * ggml_gallocr_t; -GGML_API ggml_gallocr_t ggml_gallocr_new(void); -GGML_API void ggml_gallocr_free(ggml_gallocr_t galloc); +GGML_API ggml_gallocr_t ggml_gallocr_new(ggml_backend_buffer_type_t buft); +GGML_API ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs); +GGML_API void ggml_gallocr_free(ggml_gallocr_t galloc); -GGML_API void ggml_gallocr_set_parse_seq(ggml_gallocr_t galloc, const int * list, int n); -GGML_API size_t ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, ggml_tallocr_t talloc, struct ggml_cgraph * graph); +// pre-allocate buffers from a measure graph - does not allocate or modify the graph +// call with a worst-case graph to avoid buffer reallocations +// not strictly required for single buffer usage: ggml_gallocr_alloc_graph will reallocate the buffers automatically if needed +// returns false if the buffer allocation failed +GGML_API bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph * graph); +GGML_API bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, const int * node_buffer_ids); -// Allocate tensors from the allocators given by the hash table -GGML_API void ggml_gallocr_alloc_graph_n( - ggml_gallocr_t galloc, - struct ggml_cgraph * graph, - struct ggml_hash_set hash_set, - ggml_tallocr_t * hash_node_talloc); +// automatic reallocation if the topology changes when using a single buffer +// returns false if using multiple buffers and a re-allocation is needed (call ggml_gallocr_reserve_n first to set the node buffers) +GGML_API bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph); +GGML_API size_t ggml_gallocr_get_buffer_size(ggml_gallocr_t galloc, int buffer_id); // Utils // Create a buffer and allocate all the tensors in a ggml_context -GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, struct ggml_backend_buffer_type * buft); -GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, struct ggml_backend * backend); +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors_from_buft(struct ggml_context * ctx, ggml_backend_buffer_type_t buft); +GGML_API struct ggml_backend_buffer * ggml_backend_alloc_ctx_tensors(struct ggml_context * ctx, ggml_backend_t backend); #ifdef __cplusplus } diff --git a/ggml-backend.c b/ggml-backend.c index 532da8eda..9ee81b766 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -475,6 +475,8 @@ ggml_backend_buffer_t ggml_backend_reg_alloc_buffer(size_t i, size_t size) { // backend CPU +static const size_t TENSOR_ALIGNMENT = 32; // required for mmap as gguf only guarantees 32-byte alignment + GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t buffer) { return "CPU"; @@ -482,7 +484,14 @@ GGML_CALL static const char * ggml_backend_cpu_buffer_name(ggml_backend_buffer_t } GGML_CALL static void * ggml_backend_cpu_buffer_get_base(ggml_backend_buffer_t buffer) { - return (void *)buffer->context; + uintptr_t data = (uintptr_t)buffer->context; + + // align the buffer + if (data % TENSOR_ALIGNMENT != 0) { + data = GGML_PAD(data, TENSOR_ALIGNMENT); + } + + return (void *)data; } GGML_CALL static void ggml_backend_cpu_buffer_free_buffer(ggml_backend_buffer_t buffer) { @@ -540,8 +549,6 @@ static struct ggml_backend_buffer_i cpu_backend_buffer_i_from_ptr = { /* .reset = */ NULL, }; -static const size_t TENSOR_ALIGNMENT = 64; // should be enough for AVX 512 - GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend_buffer_type_t buft) { return "CPU"; @@ -550,9 +557,11 @@ GGML_CALL static const char * ggml_backend_cpu_buffer_type_get_name(ggml_backend GGML_CALL static ggml_backend_buffer_t ggml_backend_cpu_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) { size += TENSOR_ALIGNMENT; // malloc may return an address that is not aligned - void * data = malloc(size); // TODO: maybe use GGML_ALIGNED_MALLOC? - - GGML_ASSERT(data != NULL && "failed to allocate buffer"); + void * data = malloc(size); // TODO: use GGML_ALIGNED_MALLOC (move to ggml-impl.h) + if (data == NULL) { + fprintf(stderr, "%s: failed to allocate buffer of size %zu\n", __func__, size); + return NULL; + } return ggml_backend_buffer_init(buft, cpu_backend_buffer_i, data, size); } @@ -766,6 +775,9 @@ static struct ggml_backend_i cpu_backend_i = { ggml_backend_t ggml_backend_cpu_init(void) { struct ggml_backend_cpu_context * ctx = malloc(sizeof(struct ggml_backend_cpu_context)); + if (ctx == NULL) { + return NULL; + } ctx->n_threads = GGML_DEFAULT_N_THREADS; ctx->work_data = NULL; @@ -774,6 +786,10 @@ ggml_backend_t ggml_backend_cpu_init(void) { ctx->abort_callback_data = NULL; ggml_backend_t cpu_backend = malloc(sizeof(struct ggml_backend)); + if (cpu_backend == NULL) { + free(ctx); + return NULL; + } *cpu_backend = (struct ggml_backend) { /* .interface = */ cpu_backend_i, @@ -802,6 +818,7 @@ void ggml_backend_cpu_set_abort_callback(ggml_backend_t backend_cpu, ggml_abort_ } GGML_CALL ggml_backend_buffer_t ggml_backend_cpu_buffer_from_ptr(void * ptr, size_t size) { + GGML_ASSERT((uintptr_t)ptr % TENSOR_ALIGNMENT == 0 && "buffer pointer must be aligned"); return ggml_backend_buffer_init(ggml_backend_cpu_buffer_type(), cpu_backend_buffer_i_from_ptr, ptr, size); } @@ -865,6 +882,8 @@ GGML_CALL ggml_backend_buffer_t ggml_backend_multi_buffer_alloc_buffer(ggml_back ctx->n_buffers = n_buffers; ctx->buffers = (ggml_backend_buffer_t *) malloc(n_buffers * sizeof(ggml_backend_buffer_t)); + GGML_ASSERT(ctx->buffers != NULL); + size_t total_size = 0; for (size_t i = 0; i < n_buffers; i++) { ctx->buffers[i] = buffers[i]; @@ -886,6 +905,18 @@ GGML_CALL void ggml_backend_multi_buffer_set_usage(ggml_backend_buffer_t buffer, } } +// creates a copy of the tensor with the same memory layout +static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, const struct ggml_tensor * tensor) { + struct ggml_tensor * dup = ggml_dup_tensor(ctx, tensor); + for (int i = 0; i < GGML_MAX_DIMS; i++) { + dup->nb[i] = tensor->nb[i]; + } + return dup; +} + +static bool ggml_is_view_op(enum ggml_op op) { + return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; +} // scheduler @@ -894,7 +925,7 @@ GGML_CALL void ggml_backend_multi_buffer_set_usage(ggml_backend_buffer_t buffer, #define GGML_MAX_SPLIT_INPUTS 16 struct ggml_backend_sched_split { - ggml_tallocr_t tallocr; + int backend_id; int i_start; int i_end; struct ggml_tensor * inputs[GGML_MAX_SPLIT_INPUTS]; @@ -909,15 +940,17 @@ struct ggml_backend_sched { int n_backends; ggml_backend_t backends[GGML_MAX_BACKENDS]; ggml_backend_buffer_type_t bufts[GGML_MAX_BACKENDS]; - ggml_tallocr_t tallocs[GGML_MAX_BACKENDS]; ggml_gallocr_t galloc; // hash keys of the nodes in the graph struct ggml_hash_set hash_set; - // hash values (arrays of [hash_set.size]) - ggml_tallocr_t * node_talloc; // tallocr assigned to each node (indirectly this is the backend) - struct ggml_tensor * (* node_copies)[GGML_MAX_BACKENDS]; // copies of each node for each destination backend + // hash values + int * tensor_backend_id; + struct ggml_tensor * (* tensor_copies)[GGML_MAX_BACKENDS]; + + int * node_backend_ids; // [n_nodes] + int n_nodes; // copy of the graph with modified inputs struct ggml_cgraph * graph; @@ -927,77 +960,46 @@ struct ggml_backend_sched { struct ggml_context * ctx; + ggml_backend_sched_eval_callback callback_eval; + void * callback_eval_user_data; + // align context_buffer to GGML_MEM_ALIGN #ifdef _MSC_VER __declspec(align(GGML_MEM_ALIGN)) #else __attribute__((aligned(GGML_MEM_ALIGN))) #endif - char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*sizeof(struct ggml_tensor) + sizeof(struct ggml_cgraph)]; - - ggml_backend_sched_eval_callback callback_eval; - void * callback_eval_user_data; + char context_buffer[GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS*2*sizeof(struct ggml_tensor) + sizeof(struct ggml_cgraph)]; }; #define hash_id(node) ggml_hash_find_or_insert(sched->hash_set, node) -#define node_allocr(node) sched->node_talloc[hash_id(node)] +#define tensor_backend_id(node) sched->tensor_backend_id[hash_id(node)] +#define tensor_backend(node) (tensor_backend_id(node) == -1 ? NULL : sched->backends[tensor_backend_id(node)]) -static bool ggml_is_view_op(enum ggml_op op) { - return op == GGML_OP_VIEW || op == GGML_OP_RESHAPE || op == GGML_OP_PERMUTE || op == GGML_OP_TRANSPOSE; -} - -// returns the priority of the backend, lower is better -static int sched_backend_prio(ggml_backend_sched_t sched, ggml_backend_t backend) { +// returns the priority of the backend, lower id is higher priority +static int ggml_backend_sched_backend_id(ggml_backend_sched_t sched, ggml_backend_t backend) { for (int i = 0; i < sched->n_backends; i++) { if (sched->backends[i] == backend) { return i; } } - return INT_MAX; + return -1; } -static int sched_allocr_prio(ggml_backend_sched_t sched, ggml_tallocr_t allocr) { - for (int i = 0; i < sched->n_backends; i++) { - if (sched->tallocs[i] == allocr) { - return i; - } - } - return INT_MAX; -} - -static ggml_tallocr_t sched_allocr_from_buffer(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { +static int ggml_backend_sched_backend_from_buffer(ggml_backend_sched_t sched, ggml_backend_buffer_t buffer) { if (buffer == NULL) { - return NULL; - } - - // check if this is already allocate in a allocr buffer (from user manual allocations) - for (int i = 0; i < sched->n_backends; i++) { - if (ggml_tallocr_get_buffer(sched->tallocs[i]) == buffer) { - return sched->tallocs[i]; - } + return -1; } // find highest prio backend that supports the buffer type for (int i = 0; i < sched->n_backends; i++) { if (ggml_backend_buft_supports_backend(buffer->buft, sched->backends[i])) { - return sched->tallocs[i]; + return i; } } GGML_ASSERT(false && "tensor buffer type not supported by any backend"); } -static ggml_backend_t get_allocr_backend(ggml_backend_sched_t sched, ggml_tallocr_t allocr) { - if (allocr == NULL) { - return NULL; - } - for (int i = 0; i < sched->n_backends; i++) { - if (sched->tallocs[i] == allocr) { - return sched->backends[i]; - } - } - GGML_UNREACHABLE(); -} - #if 0 static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS][128]; // debug only #define SET_CAUSE(node, ...) sprintf(causes[hash_id(node)], __VA_ARGS__) @@ -1008,37 +1010,39 @@ static char causes[GGML_DEFAULT_GRAPH_SIZE*16 + GGML_MAX_SPLITS*GGML_MAX_SPLIT_I #endif // returns the backend that should be used for the node based on the current locations -static ggml_tallocr_t sched_allocr_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * node) { +static int ggml_backend_sched_backend_id_from_cur(ggml_backend_sched_t sched, struct ggml_tensor * tensor) { + // TODO: use supports_op to check if the backend supports the op + // assign pre-allocated nodes to their backend // dst - ggml_tallocr_t cur_allocr = sched_allocr_from_buffer(sched, node->buffer); - if (cur_allocr != NULL) { + int cur_backend = ggml_backend_sched_backend_from_buffer(sched, tensor->buffer); + if (cur_backend != -1) { SET_CAUSE(node, "1.dst"); - return cur_allocr; + return cur_backend; } // view_src - if (node->view_src != NULL) { - cur_allocr = sched_allocr_from_buffer(sched, node->view_src->buffer); - if (cur_allocr != NULL) { + if (tensor->view_src != NULL) { + cur_backend = ggml_backend_sched_backend_from_buffer(sched, tensor->view_src->buffer); + if (cur_backend != -1) { SET_CAUSE(node, "1.vsrc"); - return cur_allocr; + return cur_backend; } } // assign nodes that use weights to the backend of the weights for (int i = 0; i < GGML_MAX_SRC; i++) { - const struct ggml_tensor * src = node->src[i]; + const struct ggml_tensor * src = tensor->src[i]; if (src == NULL) { break; } if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { - ggml_tallocr_t src_allocr = sched_allocr_from_buffer(sched, src->buffer); + int src_backend = ggml_backend_sched_backend_from_buffer(sched, src->buffer); // operations with weights are always run on the same backend as the weights SET_CAUSE(node, "1.wgt%d", i); - return src_allocr; + return src_backend; } } - return NULL; + return -1; } static char * fmt_size(size_t size) { @@ -1051,11 +1055,11 @@ static char * fmt_size(size_t size) { return buffer; } -static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +static void ggml_backend_sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { int cur_split = 0; for (int i = 0; i < graph->n_nodes; i++) { if (cur_split < sched->n_splits && i == sched->splits[cur_split].i_start) { - ggml_backend_t split_backend = get_allocr_backend(sched, sched->splits[cur_split].tallocr); + ggml_backend_t split_backend = sched->backends[sched->splits[cur_split].backend_id]; fprintf(stderr, "\n## SPLIT #%d: %s # %d inputs: ", cur_split, ggml_backend_name(split_backend), sched->splits[cur_split].n_inputs); for (int j = 0; j < sched->splits[cur_split].n_inputs; j++) { @@ -1069,17 +1073,15 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - ggml_backend_t node_backend = node_allocr ? get_allocr_backend(sched, node_allocr) : NULL; // FIXME: + ggml_backend_t tensor_backend = tensor_backend(node); fprintf(stderr, "node #%3d (%10.10s): %20.20s (%5.5s) [%5.5s %8.8s]:", i, ggml_op_name(node->op), node->name, - fmt_size(ggml_nbytes(node)), node_allocr ? ggml_backend_name(node_backend) : "NULL", GET_CAUSE(node)); + fmt_size(ggml_nbytes(node)), tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", GET_CAUSE(node)); for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - ggml_backend_t src_backend = src_allocr ? get_allocr_backend(sched, src_allocr) : NULL; + ggml_backend_t src_backend = tensor_backend(src); fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, fmt_size(ggml_nbytes(src)), src_backend ? ggml_backend_name(src_backend) : "NULL", GET_CAUSE(src)); } @@ -1087,23 +1089,13 @@ static void sched_print_assignments(ggml_backend_sched_t sched, struct ggml_cgra } } -// creates a copy of the tensor with the same memory layout -static struct ggml_tensor * ggml_dup_tensor_layout(struct ggml_context * ctx, const struct ggml_tensor * tensor) { - struct ggml_tensor * dup = ggml_dup_tensor(ctx, tensor); - for (int i = 0; i < GGML_MAX_DIMS; i++) { - dup->nb[i] = tensor->nb[i]; - } - return dup; -} - - //#define DEBUG_PASS1 //#define DEBUG_PASS2 //#define DEBUG_PASS3 //#define DEBUG_PASS4 // assigns backends to ops and splits the graph into subgraphs that can be computed on the same backend -static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { // reset splits sched->n_splits = 0; sched->is_reset = false; @@ -1125,28 +1117,28 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 1: assign backends to ops with pre-allocated inputs for (int i = 0; i < graph->n_leafs; i++) { struct ggml_tensor * leaf = graph->leafs[i]; - if (node_allocr(leaf) != NULL) { + if (tensor_backend_id(leaf) != -1) { // do not overwrite user assignments continue; } - node_allocr(leaf) = sched_allocr_from_cur(sched, leaf); + tensor_backend_id(leaf) = ggml_backend_sched_backend_id_from_cur(sched, leaf); } for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - if (node_allocr(node) != NULL) { + if (tensor_backend_id(node) != -1) { // do not overwrite user assignments continue; } - node_allocr(node) = sched_allocr_from_cur(sched, node); + tensor_backend_id(node) = ggml_backend_sched_backend_id_from_cur(sched, node); // src for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } - if (node_allocr(src) == NULL) { - node_allocr(src) = sched_allocr_from_cur(sched, src); + if (tensor_backend_id(src) == -1) { + tensor_backend_id(src) = ggml_backend_sched_backend_id_from_cur(sched, src); } } } @@ -1161,22 +1153,22 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.1 expand gpu up { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = graph->n_nodes - 1; i >= 0; i--) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + if (tensor_backend_id == sched->n_backends - 1) { // skip cpu (lowest prio backend) - cur_allocr = NULL; + cur_backend_id = -1; } else { - cur_allocr = node_allocr; + cur_backend_id = tensor_backend_id; } } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.1"); } } @@ -1184,22 +1176,22 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.2 expand gpu down { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - if (sched_allocr_prio(sched, node_allocr) == sched->n_backends - 1) { + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + if (tensor_backend_id == sched->n_backends - 1) { // skip cpu (lowest prio backend) - cur_allocr = NULL; + cur_backend_id = -1; } else { - cur_allocr = node_allocr; + cur_backend_id = tensor_backend_id; } } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.2"); } } @@ -1207,17 +1199,17 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.3 expand rest up { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = graph->n_nodes - 1; i >= 0; i--) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - cur_allocr = node_allocr; + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + cur_backend_id = tensor_backend_id; } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.3"); } } @@ -1225,17 +1217,17 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 2.4 expand rest down { - ggml_tallocr_t cur_allocr = NULL; + int cur_backend_id = -1; for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; if (ggml_is_view_op(node->op)) { continue; } - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr != NULL) { - cur_allocr = node_allocr; + int tensor_backend_id = tensor_backend_id(node); + if (tensor_backend_id != -1) { + cur_backend_id = tensor_backend_id; } else { - node_allocr(node) = cur_allocr; + tensor_backend_id(node) = cur_backend_id; SET_CAUSE(node, "2.4"); } } @@ -1247,9 +1239,9 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // pass 3: assign backends to remaining src from dst and view_src for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t cur_allocr = node_allocr(node); - if (node->view_src != NULL && cur_allocr == NULL) { - cur_allocr = node_allocr(node) = node_allocr(node->view_src); + int cur_backend_id = tensor_backend_id(node); + if (node->view_src != NULL && cur_backend_id == -1) { + cur_backend_id = tensor_backend_id(node) = tensor_backend_id(node->view_src); SET_CAUSE(node, "3.vsrc"); } for (int j = 0; j < GGML_MAX_SRC; j++) { @@ -1257,14 +1249,14 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr == NULL) { + int src_backend_id = tensor_backend_id(src); + if (src_backend_id == -1) { if (src->view_src != NULL) { // views are always on the same backend as the source - node_allocr(src) = node_allocr(src->view_src); + tensor_backend_id(src) = tensor_backend_id(src->view_src); SET_CAUSE(src, "3.vsrc"); } else { - node_allocr(src) = cur_allocr; + tensor_backend_id(src) = cur_backend_id; SET_CAUSE(src, "3.cur"); } } @@ -1281,15 +1273,14 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; if (!ggml_is_view_op(node->op)) { - sched->splits[0].tallocr = node_allocr(node); + sched->splits[0].backend_id = tensor_backend_id(node); break; } } sched->splits[0].i_start = 0; sched->splits[0].n_inputs = 0; memset(sched->splits[0].inputs, 0, sizeof(sched->splits[0].inputs)); //HACK - ggml_tallocr_t cur_allocr = sched->splits[0].tallocr; - size_t cur_backend_id = sched_allocr_prio(sched, cur_allocr); + int cur_backend_id = sched->splits[0].backend_id; for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -1297,19 +1288,18 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g continue; } - ggml_tallocr_t node_allocr = node_allocr(node); + int tensor_backend_id = tensor_backend_id(node); - GGML_ASSERT(node_allocr != NULL); // all nodes should be assigned by now + GGML_ASSERT(tensor_backend_id != -1); // all nodes should be assigned by now - if (node_allocr != cur_allocr) { + if (tensor_backend_id != cur_backend_id) { sched->splits[cur_split].i_end = i; cur_split++; GGML_ASSERT(cur_split < GGML_MAX_SPLITS); - sched->splits[cur_split].tallocr = node_allocr; + sched->splits[cur_split].backend_id = tensor_backend_id; sched->splits[cur_split].i_start = i; sched->splits[cur_split].n_inputs = 0; - cur_allocr = node_allocr; - cur_backend_id = sched_allocr_prio(sched, cur_allocr); + cur_backend_id = tensor_backend_id; } // find inputs that are not on the same backend @@ -1318,43 +1308,25 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - GGML_ASSERT(src_allocr != NULL); // all inputs should be assigned by now - if (src_allocr != node_allocr) { + int src_backend_id = tensor_backend_id(src); + assert(src_backend_id != -1); // all inputs should be assigned by now + if (src_backend_id != tensor_backend_id) { // create a copy of the input in the split's backend size_t id = hash_id(src); - if (sched->node_copies[id][cur_backend_id] == NULL) { - ggml_backend_t backend = get_allocr_backend(sched, cur_allocr); + if (sched->tensor_copies[id][cur_backend_id] == NULL) { + ggml_backend_t backend = sched->backends[cur_backend_id]; struct ggml_tensor * tensor_copy = ggml_dup_tensor_layout(sched->ctx, src); ggml_format_name(tensor_copy, "%s#%s", ggml_backend_name(backend), src->name); - sched->node_copies[id][cur_backend_id] = tensor_copy; - node_allocr(tensor_copy) = cur_allocr; + sched->tensor_copies[id][cur_backend_id] = tensor_copy; + tensor_backend_id(tensor_copy) = cur_backend_id; SET_CAUSE(tensor_copy, "4.cpy"); int n_inputs = sched->splits[cur_split].n_inputs++; GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); sched->splits[cur_split].inputs[n_inputs] = src; } - node->src[j] = sched->node_copies[id][cur_backend_id]; - -#if 0 - // check if the input is already in the split - bool found = false; - for (int k = 0; k < sched->splits[cur_split].n_inputs; k++) { - if (sched->splits[cur_split].inputs[k] == src) { - found = true; - break; - } - } - - if (!found) { - int n_inputs = sched->splits[cur_split].n_inputs++; - //printf("split %d input %d: %s (%s)\n", cur_split, n_inputs, src->name, ggml_backend_name(get_allocr_backend(sched, src_allocr))); - GGML_ASSERT(n_inputs < GGML_MAX_SPLIT_INPUTS); - sched->splits[cur_split].inputs[n_inputs] = src; - } -#endif + node->src[j] = sched->tensor_copies[id][cur_backend_id]; } } } @@ -1369,30 +1341,30 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g // sanity check: all sources should have the same backend as the node for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - ggml_tallocr_t node_allocr = node_allocr(node); - if (node_allocr == NULL) { + ggml_backend_t tensor_backend = tensor_backend(node); + if (tensor_backend == NULL) { fprintf(stderr, "!!!!!!! %s has no backend\n", node->name); } - if (node->view_src != NULL && node_allocr != node_allocr(node->view_src)) { + if (node->view_src != NULL && tensor_backend != tensor_backend(node->view_src)) { fprintf(stderr, "!!!!!!! %s has backend %s, view_src %s has backend %s\n", - node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", - node->view_src->name, node_allocr(node->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(node->view_src))) : "NULL"); + node->name, tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", + node->view_src->name, tensor_backend(node->view_src) ? ggml_backend_name(tensor_backend(node->view_src)) : "NULL"); } for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { break; } - ggml_tallocr_t src_allocr = node_allocr(src); - if (src_allocr != node_allocr /* && src_backend != NULL */) { // ignore nulls for now + ggml_backend_t src_backend = tensor_backend(src); + if (src_backend != tensor_backend /* && src_backend != NULL */) { fprintf(stderr, "!!!! %s has backend %s, src %d (%s) has backend %s\n", - node->name, node_allocr ? ggml_backend_name(get_allocr_backend(sched, node_allocr)) : "NULL", - j, src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL"); + node->name, tensor_backend ? ggml_backend_name(tensor_backend) : "NULL", + j, src->name, src_backend ? ggml_backend_name(src_backend) : "NULL"); } - if (src->view_src != NULL && src_allocr != node_allocr(src->view_src)) { + if (src->view_src != NULL && src_backend != tensor_backend(src->view_src)) { fprintf(stderr, "!!!!!!! [src] %s has backend %s, view_src %s has backend %s\n", - src->name, src_allocr ? ggml_backend_name(get_allocr_backend(sched, src_allocr)) : "NULL", - src->view_src->name, node_allocr(src->view_src) ? ggml_backend_name(get_allocr_backend(sched, node_allocr(src->view_src))) : "NULL"); + src->name, src_backend ? ggml_backend_name(src_backend) : "NULL", + src->view_src->name, tensor_backend(src->view_src) ? ggml_backend_name(tensor_backend(src->view_src)) : "NULL"); } } } @@ -1406,32 +1378,45 @@ static void sched_split_graph(ggml_backend_sched_t sched, struct ggml_cgraph * g struct ggml_backend_sched_split * split = &sched->splits[i]; split->graph = ggml_graph_view(graph, split->i_start, split->i_end); - // add inputs to the graph copy so that they are allocated by ggml-alloc at the start of the split for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][sched_allocr_prio(sched, split->tallocr)]; + struct ggml_tensor * input_cpy = sched->tensor_copies[hash_id(input)][split->backend_id]; + // add a dependency to the input source so that it is not freed before the copy is done - GGML_ASSERT(input_cpy->src[0] == NULL || input_cpy->src[0] == input); - input_cpy->src[0] = input; + struct ggml_tensor * input_dep = ggml_view_tensor(sched->ctx, input); + sched->node_backend_ids[graph_copy->n_nodes] = tensor_backend_id(input); + graph_copy->nodes[graph_copy->n_nodes++] = input_dep; + + // add a dependency to the input copy so that it is allocated at the start of the split + sched->node_backend_ids[graph_copy->n_nodes] = split->backend_id; graph_copy->nodes[graph_copy->n_nodes++] = input_cpy; } for (int j = split->i_start; j < split->i_end; j++) { + sched->node_backend_ids[graph_copy->n_nodes] = tensor_backend_id(graph->nodes[j]); graph_copy->nodes[graph_copy->n_nodes++] = graph->nodes[j]; } } sched->graph = graph_copy; } -static void sched_alloc_splits(ggml_backend_sched_t sched) { - ggml_gallocr_alloc_graph_n( - sched->galloc, - sched->graph, - sched->hash_set, - sched->node_talloc); +static bool ggml_backend_sched_alloc_splits(ggml_backend_sched_t sched) { + // ggml_gallocr_reserve_n(sched->galloc, sched->graph, sched->node_backend_ids); + if (!ggml_gallocr_alloc_graph(sched->galloc, sched->graph)) { +#ifndef NDEBUG + fprintf(stderr, "ggml_backend_sched: failed to allocate graph, reserving\n"); +#endif + ggml_gallocr_reserve_n(sched->galloc, sched->graph, sched->node_backend_ids); + if (!ggml_gallocr_alloc_graph(sched->galloc, sched->graph)) { + fprintf(stderr, "ggml_backend_sched: failed to allocate graph\n"); + return false; + } + } + + return true; } -static void sched_compute_splits(ggml_backend_sched_t sched) { +static bool ggml_backend_sched_compute_splits(ggml_backend_sched_t sched) { uint64_t copy_us[GGML_MAX_BACKENDS] = {0}; uint64_t compute_us[GGML_MAX_BACKENDS] = {0}; @@ -1439,20 +1424,18 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { for (int i = 0; i < sched->n_splits; i++) { struct ggml_backend_sched_split * split = &splits[i]; - ggml_backend_t split_backend = get_allocr_backend(sched, split->tallocr); - int split_backend_id = sched_backend_prio(sched, split_backend); + int split_backend_id = split->backend_id; + ggml_backend_t split_backend = sched->backends[split_backend_id]; // copy the input tensors to the split backend uint64_t copy_start_us = ggml_time_us(); for (int j = 0; j < split->n_inputs; j++) { struct ggml_tensor * input = split->inputs[j]; - struct ggml_tensor * input_cpy = sched->node_copies[hash_id(input)][split_backend_id]; + struct ggml_tensor * input_cpy = sched->tensor_copies[hash_id(input)][split_backend_id]; GGML_ASSERT(input->buffer != NULL); GGML_ASSERT(input_cpy->buffer != NULL); - // TODO: avoid this copy if it was already copied in a previous split, and the input didn't change - // this is important to avoid copying constants such as KQ_mask and inp_pos multiple times ggml_backend_tensor_copy_async(split_backend, input, input_cpy); } //ggml_backend_synchronize(split_backend); // necessary to measure copy time @@ -1468,7 +1451,9 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { uint64_t compute_start_us = ggml_time_us(); if (!sched->callback_eval) { - ggml_backend_graph_compute(split_backend, &split->graph); + if (!ggml_backend_graph_compute(split_backend, &split->graph)) { + return false; + } //ggml_backend_synchronize(split_backend); // necessary to measure compute time } else { // similar to ggml_backend_compare_graph_backend @@ -1488,7 +1473,9 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { struct ggml_cgraph gv = ggml_graph_view(&split->graph, j0, j1 + 1); - ggml_backend_graph_compute(split_backend, &gv); + if (!ggml_backend_graph_compute(split_backend, &gv)) { + return false; + } if (need && !sched->callback_eval(t, false, sched->callback_eval_user_data)) { break; @@ -1510,19 +1497,8 @@ static void sched_compute_splits(ggml_backend_sched_t sched) { } } #endif -} -static void sched_reset(ggml_backend_sched_t sched) { - for (int i = 0; i < sched->n_backends; i++) { - ggml_tallocr_reset(sched->tallocs[i]); - } - // reset state for the next run - size_t hash_size = sched->hash_set.size; - memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); - memset(sched->node_talloc, 0, sizeof(sched->node_talloc[0]) * hash_size); - memset(sched->node_copies, 0, sizeof(sched->node_copies[0]) * hash_size); - - sched->is_reset = true; + return true; } ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size) { @@ -1532,9 +1508,10 @@ ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_back struct ggml_backend_sched * sched = calloc(sizeof(struct ggml_backend_sched), 1); // initialize hash table - sched->hash_set = ggml_hash_set_new(graph_size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); - sched->node_talloc = calloc(sizeof(sched->node_talloc[0]) * sched->hash_set.size, 1); - sched->node_copies = calloc(sizeof(sched->node_copies[0]) * sched->hash_set.size, 1); + sched->hash_set = ggml_hash_set_new(graph_size + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); + sched->tensor_backend_id = calloc(sizeof(sched->tensor_backend_id[0]), sched->hash_set.size); + sched->tensor_copies = calloc(sizeof(sched->tensor_copies[0]), sched->hash_set.size); + sched->node_backend_ids = calloc(sizeof(sched->node_backend_ids[0]), graph_size); sched->n_backends = n_backends; for (int i = 0; i < n_backends; i++) { @@ -1542,14 +1519,9 @@ ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_back sched->bufts[i] = bufts ? bufts[i] : ggml_backend_get_default_buffer_type(backends[i]); } - sched->galloc = ggml_gallocr_new(); + sched->galloc = ggml_gallocr_new_n(sched->bufts, n_backends); - // init measure allocs for each backend - for (int i = 0; i < n_backends; i++) { - sched->tallocs[i] = ggml_tallocr_new_measure_from_buft(sched->bufts[i]); - } - - sched_reset(sched); + ggml_backend_sched_reset(sched); return sched; } @@ -1558,49 +1530,54 @@ void ggml_backend_sched_free(ggml_backend_sched_t sched) { if (sched == NULL) { return; } - for (int i = 0; i < sched->n_backends; i++) { - ggml_tallocr_free(sched->tallocs[i]); - } ggml_gallocr_free(sched->galloc); ggml_free(sched->ctx); free(sched->hash_set.keys); - free(sched->node_talloc); - free(sched->node_copies); + free(sched->tensor_backend_id); + free(sched->tensor_copies); + free(sched->node_backend_ids); free(sched); } -void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { - GGML_ASSERT(ggml_tallocr_is_measure(sched->tallocs[0])); // can only be initialized once +void ggml_backend_sched_reset(ggml_backend_sched_t sched) { + // reset state for the next run + size_t hash_size = sched->hash_set.size; + memset(sched->hash_set.keys, 0, sizeof(sched->hash_set.keys[0]) * hash_size); // NOLINT + memset(sched->tensor_backend_id, -1, sizeof(sched->tensor_backend_id[0]) * hash_size); + memset(sched->tensor_copies, 0, sizeof(sched->tensor_copies[0]) * hash_size); - sched_split_graph(sched, measure_graph); - sched_alloc_splits(sched); - - // allocate buffers and reset allocators - for (int i = 0; i < sched->n_backends; i++) { - size_t size = ggml_tallocr_max_size(sched->tallocs[i]); - ggml_tallocr_free(sched->tallocs[i]); - sched->tallocs[i] = ggml_tallocr_new_from_buft(sched->bufts[i], size); - } - - sched_reset(sched); + sched->is_reset = true; } -void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { +bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph) { + ggml_backend_sched_split_graph(sched, measure_graph); + + if (!ggml_gallocr_reserve_n(sched->galloc, sched->graph, sched->node_backend_ids)) { + return false; + } + + ggml_backend_sched_reset(sched); + return true; +} + +bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph) { GGML_ASSERT((int)sched->hash_set.size >= graph->n_nodes + GGML_MAX_SPLITS*GGML_MAX_SPLIT_INPUTS); if (!sched->is_reset) { - sched_reset(sched); + ggml_backend_sched_reset(sched); } - sched_split_graph(sched, graph); - sched_alloc_splits(sched); - sched_compute_splits(sched); -} + ggml_backend_sched_split_graph(sched, graph); + if (!ggml_backend_sched_alloc_splits(sched)) { + return false; + } -void ggml_backend_sched_reset(ggml_backend_sched_t sched) { - sched_reset(sched); -} + if (!ggml_backend_sched_compute_splits(sched)) { + return false; + } + return true; +} void ggml_backend_sched_set_eval_callback(ggml_backend_sched_t sched, ggml_backend_sched_eval_callback callback, void * user_data) { sched->callback_eval = callback; @@ -1611,37 +1588,30 @@ int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched) { return sched->n_splits; } -ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend) { - int backend_index = sched_backend_prio(sched, backend); +size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend) { + int backend_index = ggml_backend_sched_backend_id(sched, backend); GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); - return sched->tallocs[backend_index]; -} - -ggml_backend_buffer_t ggml_backend_sched_get_buffer(ggml_backend_sched_t sched, ggml_backend_t backend) { - int backend_index = sched_backend_prio(sched, backend); - GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); - return ggml_tallocr_get_buffer(sched->tallocs[backend_index]); + return ggml_gallocr_get_buffer_size(sched->galloc, backend_index); } void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend) { - int backend_index = sched_backend_prio(sched, backend); + int backend_index = ggml_backend_sched_backend_id(sched, backend); GGML_ASSERT(backend_index >= 0 && backend_index < sched->n_backends); - node_allocr(node) = sched->tallocs[backend_index]; + tensor_backend_id(node) = backend_index; } ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node) { - ggml_tallocr_t allocr = node_allocr(node); - if (allocr == NULL) { + int backend_index = tensor_backend_id(node); + if (backend_index == -1) { return NULL; } - return get_allocr_backend(sched, allocr); + return sched->backends[backend_index]; } // utils void ggml_backend_view_init(ggml_backend_buffer_t buffer, struct ggml_tensor * tensor) { GGML_ASSERT(tensor->buffer == NULL); - //GGML_ASSERT(tensor->data == NULL); // views of pre-allocated tensors may have the data set in ggml_new_tensor, but still need to be initialized by the backend GGML_ASSERT(tensor->view_src != NULL); GGML_ASSERT(tensor->view_src->buffer != NULL); GGML_ASSERT(tensor->view_src->data != NULL); @@ -1665,7 +1635,7 @@ void ggml_backend_tensor_alloc(ggml_backend_buffer_t buffer, struct ggml_tensor ggml_backend_buffer_init_tensor(buffer, tensor); } -static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, +static struct ggml_tensor * graph_copy_dup_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, struct ggml_context * ctx_allocated, struct ggml_context * ctx_unallocated, struct ggml_tensor * src) { GGML_ASSERT(src != NULL); @@ -1678,7 +1648,7 @@ static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, stru struct ggml_tensor * dst = ggml_dup_tensor_layout(src->data && !src->view_src ? ctx_allocated : ctx_unallocated, src); if (src->view_src != NULL) { - dst->view_src = graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, src->view_src); + dst->view_src = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, src->view_src); dst->view_offs = src->view_offs; } dst->op = src->op; @@ -1691,14 +1661,14 @@ static struct ggml_tensor * graph_dup_tensor(struct ggml_hash_set hash_set, stru if (s == NULL) { break; } - dst->src[i] = graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); + dst->src[i] = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); } node_copies[id] = dst; return dst; } -static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, bool * node_init, struct ggml_tensor * src) { +static void graph_copy_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor ** node_copies, bool * node_init, struct ggml_tensor * src) { size_t id = ggml_hash_find(hash_set, src); if (node_init[id]) { return; @@ -1707,7 +1677,7 @@ static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor struct ggml_tensor * dst = node_copies[id]; if (dst->view_src != NULL) { - graph_init_tensor(hash_set, node_copies, node_init, src->view_src); + graph_copy_init_tensor(hash_set, node_copies, node_init, src->view_src); ggml_backend_view_init(dst->view_src->buffer, dst); } else { @@ -1720,17 +1690,17 @@ static void graph_init_tensor(struct ggml_hash_set hash_set, struct ggml_tensor if (s == NULL) { break; } - graph_init_tensor(hash_set, node_copies, node_init, s); + graph_copy_init_tensor(hash_set, node_copies, node_init, s); } } struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, struct ggml_cgraph * graph) { struct ggml_hash_set hash_set = { /* .size = */ graph->visited_hash_table.size, - /* .keys = */ calloc(sizeof(hash_set.keys[0]) * graph->visited_hash_table.size, 1) + /* .keys = */ calloc(sizeof(hash_set.keys[0]), graph->visited_hash_table.size) // NOLINT }; - struct ggml_tensor ** node_copies = calloc(sizeof(node_copies[0]) * hash_set.size, 1); - bool * node_init = calloc(sizeof(node_init[0]) * hash_set.size, 1); + struct ggml_tensor ** node_copies = calloc(sizeof(node_copies[0]), hash_set.size); // NOLINT + bool * node_init = calloc(sizeof(node_init[0]), hash_set.size); struct ggml_init_params params = { /* .mem_size = */ ggml_tensor_overhead()*hash_set.size + ggml_graph_overhead_custom(graph->size, false), @@ -1759,7 +1729,7 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s // dup nodes for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - graph_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, node); + graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, node); } // allocate nodes @@ -1784,7 +1754,7 @@ struct ggml_backend_graph_copy ggml_backend_graph_copy(ggml_backend_t backend, s // copy data and init views for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; - graph_init_tensor(hash_set, node_copies, node_init, node); + graph_copy_init_tensor(hash_set, node_copies, node_init, node); } // build graph copy diff --git a/ggml-backend.h b/ggml-backend.h index 282b3a9b7..f13c69bff 100644 --- a/ggml-backend.h +++ b/ggml-backend.h @@ -130,11 +130,7 @@ extern "C" { // in build_graph: build_graph(...) { - // allocating tensors in a specific backend (optional, recommended: pre-allocate inputs in a different buffer) - alloc_cpu = ggml_backend_sched_get_allocr(sched, backend_cpu); - ggml_allocr_alloc(alloc_cpu, tensor); - - // manually assigning nodes to a backend (optional, shouldn't be needed in most cases) + // manually assign nodes to a backend (optional, should not be needed in most cases) struct ggml_tensor * node = ggml_mul_mat(ctx, ...); ggml_backend_sched_set_node_backend(sched, node, backend_gpu); } @@ -164,20 +160,19 @@ extern "C" { GGML_API ggml_backend_sched_t ggml_backend_sched_new(ggml_backend_t * backends, ggml_backend_buffer_type_t * bufts, int n_backends, size_t graph_size); GGML_API void ggml_backend_sched_free(ggml_backend_sched_t sched); // Initialize backend buffers from a measure graph - GGML_API void ggml_backend_sched_init_measure(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); + GGML_API bool ggml_backend_sched_reserve(ggml_backend_sched_t sched, struct ggml_cgraph * measure_graph); // Get the number of splits of the last graph GGML_API int ggml_backend_sched_get_n_splits(ggml_backend_sched_t sched); - GGML_API ggml_tallocr_t ggml_backend_sched_get_tallocr(ggml_backend_sched_t sched, ggml_backend_t backend); - GGML_API ggml_backend_buffer_t ggml_backend_sched_get_buffer (ggml_backend_sched_t sched, ggml_backend_t backend); + GGML_API size_t ggml_backend_sched_get_buffer_size(ggml_backend_sched_t sched, ggml_backend_t backend); GGML_API void ggml_backend_sched_set_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node, ggml_backend_t backend); GGML_API ggml_backend_t ggml_backend_sched_get_node_backend(ggml_backend_sched_t sched, struct ggml_tensor * node); // Allocate and compute graph on the backend scheduler - GGML_API void ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); + GGML_API bool ggml_backend_sched_graph_compute(ggml_backend_sched_t sched, struct ggml_cgraph * graph); - // Reset all assignments and allocators - must be called before using the sched allocators to allocate inputs + // Reset all assignments and allocators - must be called before changing the node backends GGML_API void ggml_backend_sched_reset(ggml_backend_sched_t sched); // Set a callback to be called for each resulting node during graph compute diff --git a/ggml.c b/ggml.c index e45b78d7e..d921d82fe 100644 --- a/ggml.c +++ b/ggml.c @@ -2649,7 +2649,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( /*.nb =*/ { 0, 0, 0, 0 }, /*.op =*/ GGML_OP_NONE, /*.op_params =*/ { 0 }, - /*.is_param =*/ false, + /*.flags =*/ 0, /*.grad =*/ NULL, /*.src =*/ { NULL }, /*.perf_runs =*/ 0, @@ -6551,7 +6551,7 @@ struct ggml_tensor * ggml_cross_entropy_loss_back( void ggml_set_param( struct ggml_context * ctx, struct ggml_tensor * tensor) { - tensor->is_param = true; + tensor->flags |= GGML_TENSOR_FLAG_PARAM; GGML_ASSERT(tensor->grad == NULL); tensor->grad = ggml_dup_tensor(ctx, tensor); @@ -15367,7 +15367,7 @@ static struct ggml_tensor * ggml_recompute_graph_node( return NULL; } - if (node->is_param) { + if (node->flags & GGML_TENSOR_FLAG_PARAM) { return node; } @@ -15401,7 +15401,7 @@ static struct ggml_tensor * ggml_recompute_graph_node( clone->op = node->op; clone->grad = node->grad; - clone->is_param = node->is_param; + clone->flags = node->flags; clone->extra = node->extra; for (int k = 0; k < GGML_MAX_DIMS; ++k) { clone->nb[k] = node->nb[k]; @@ -16433,7 +16433,7 @@ void ggml_build_backward_expand(struct ggml_context * ctx, struct ggml_cgraph * for (int i = 0; i < gf->n_nodes; i++) { struct ggml_tensor * node = gf->nodes[i]; - if (node->is_param) { + if (node->flags & GGML_TENSOR_FLAG_PARAM) { GGML_PRINT_DEBUG("%s: found root node %p\n", __func__, (void *) node); ggml_build_forward_expand(gb, node->grad); } @@ -17918,7 +17918,7 @@ void ggml_graph_print(const struct ggml_cgraph * cgraph) { GGML_PRINT(" - %3d: [ %5" PRId64 ", %5" PRId64 ", %5" PRId64 "] %16s %s (%3d) cpu = %7.3f / %7.3f ms, wall = %7.3f / %7.3f ms\n", i, node->ne[0], node->ne[1], node->ne[2], - ggml_op_name(node->op), node->is_param ? "x" : node->grad ? "g" : " ", node->perf_runs, + ggml_op_name(node->op), (node->flags & GGML_TENSOR_FLAG_PARAM) ? "x" : node->grad ? "g" : " ", node->perf_runs, (double) node->perf_cycles / (double) ggml_cycles_per_ms(), (double) node->perf_cycles / (double) ggml_cycles_per_ms() / (double) node->perf_runs, (double) node->perf_time_us / 1000.0, @@ -18011,7 +18011,7 @@ void ggml_graph_dump_dot(const struct ggml_cgraph * gb, const struct ggml_cgraph continue; } - if (node->is_param) { + if (node->flags & GGML_TENSOR_FLAG_PARAM) { snprintf(color, sizeof(color), "yellow"); } else if (node->grad) { if (ggml_graph_find(gf, node)) { @@ -18185,7 +18185,7 @@ static enum ggml_opt_result ggml_opt_adam( int np = 0; int64_t nx = 0; for (int i = 0; i < gf->n_nodes; ++i) { - if (gf->nodes[i]->is_param) { + if (gf->nodes[i]->flags & GGML_TENSOR_FLAG_PARAM) { GGML_PRINT_DEBUG("found param %d: grad->op = %d\n", np, gf->nodes[i]->grad->op); GGML_ASSERT(np < GGML_MAX_PARAMS); @@ -18548,7 +18548,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( int np = 0; int nx = 0; for (int i = 0; i < gf->n_nodes; ++i) { - if (gf->nodes[i]->is_param) { + if (gf->nodes[i]->flags & GGML_TENSOR_FLAG_PARAM) { GGML_PRINT_DEBUG("found param %d: grad->op = %d\n", np, gf->nodes[i]->grad->op); GGML_ASSERT(np < GGML_MAX_PARAMS); @@ -19023,6 +19023,16 @@ enum ggml_opt_result ggml_opt_resume_g( //////////////////////////////////////////////////////////////////////////////// +void ggml_set_input(struct ggml_tensor * tensor) { + tensor->flags |= GGML_TENSOR_FLAG_INPUT; +} + +void ggml_set_output(struct ggml_tensor * tensor) { + tensor->flags |= GGML_TENSOR_FLAG_OUTPUT; +} + +//////////////////////////////////////////////////////////////////////////////// + void ggml_quantize_init(enum ggml_type type) { ggml_critical_section_start(); diff --git a/ggml.h b/ggml.h index 9cfec5bac..01cecc1e1 100644 --- a/ggml.h +++ b/ggml.h @@ -505,11 +505,17 @@ extern "C" { enum ggml_log_level { GGML_LOG_LEVEL_ERROR = 2, - GGML_LOG_LEVEL_WARN = 3, - GGML_LOG_LEVEL_INFO = 4, + GGML_LOG_LEVEL_WARN = 3, + GGML_LOG_LEVEL_INFO = 4, GGML_LOG_LEVEL_DEBUG = 5 }; + enum ggml_tensor_flag { + GGML_TENSOR_FLAG_INPUT = 1, + GGML_TENSOR_FLAG_OUTPUT = 2, + GGML_TENSOR_FLAG_PARAM = 4, + }; + // ggml object struct ggml_object { size_t offs; @@ -543,7 +549,7 @@ extern "C" { // op params - allocated as int32_t for alignment int32_t op_params[GGML_MAX_OP_PARAMS / sizeof(int32_t)]; - bool is_param; + int32_t flags; struct ggml_tensor * grad; struct ggml_tensor * src[GGML_MAX_SRC]; @@ -2092,6 +2098,12 @@ extern "C" { ggml_opt_callback callback, void * callback_data); + // + // tensor flags + // + GGML_API void ggml_set_input(struct ggml_tensor * tensor); + GGML_API void ggml_set_output(struct ggml_tensor * tensor); + // // quantization // diff --git a/llama.cpp b/llama.cpp index d1ee26ce2..a5b873a7b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1872,8 +1872,6 @@ struct llama_context { // memory buffers used to evaluate the model std::vector buf_compute_meta; ggml_backend_sched_t sched = nullptr; - // allocator for the input tensors - ggml_tallocr * alloc = nullptr; // input tensors ggml_backend_buffer_t buf_input = nullptr; @@ -7199,12 +7197,10 @@ struct llm_build_context { static struct ggml_cgraph * llama_build_graph( llama_context & lctx, - const llama_batch & batch) { + const llama_batch & batch, + bool worst_case) { const auto & model = lctx.model; - // check if we should build the worst-case graph (for memory measurement) - const bool worst_case = ggml_tallocr_is_measure(lctx.alloc); - // this callback allows us to apply custom logic to each tensor (e.g. ggml-alloc, offloading, etc.) llm_build_cb cb = [&](struct ggml_tensor * cur, const char * name, int il) { if (il >= 0) { @@ -7225,77 +7221,6 @@ static struct ggml_cgraph * llama_build_graph( struct llm_build_context llm(lctx, batch, cb, worst_case); - // - // set input data - // - - if (!ggml_tallocr_is_measure(lctx.alloc)) { - if (batch.token) { - const int64_t n_tokens = batch.n_tokens; - - ggml_backend_tensor_set(lctx.inp_tokens, batch.token, 0, n_tokens*ggml_element_size(lctx.inp_tokens)); - } - - if (batch.embd) { - const int64_t n_embd = llm.n_embd; - const int64_t n_tokens = batch.n_tokens; - - ggml_backend_tensor_set(lctx.inp_embd, batch.embd, 0, n_tokens*n_embd*ggml_element_size(lctx.inp_embd)); - } - - if (batch.pos) { - const int64_t n_tokens = batch.n_tokens; - - ggml_backend_tensor_set(lctx.inp_pos, batch.pos, 0, n_tokens*ggml_element_size(lctx.inp_pos)); - } - - { - const int64_t n_kv = llm.n_kv; - const int64_t n_tokens = batch.n_tokens; - - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_KQ_mask->buffer)); - float * data = (float *) lctx.inp_KQ_mask->data; - - for (int h = 0; h < 1; ++h) { - for (int j = 0; j < n_tokens; ++j) { - const llama_pos pos = batch.pos[j]; - const llama_seq_id seq_id = batch.seq_id[j][0]; - - for (int i = 0; i < n_kv; ++i) { - float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || - (llm.causal_attn && lctx.kv_self.cells[i].pos > pos)) { - f = -INFINITY; - } else { - f = 0; - } - data[h*(n_kv*n_tokens) + j*n_kv + i] = f; - } - } - } - } - - if (llm.do_rope_shift) { - const int64_t n_ctx = llm.n_ctx; - - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); - int32_t * data = (int32_t *) lctx.inp_K_shift->data; - - for (int i = 0; i < n_ctx; ++i) { - data[i] = lctx.kv_self.cells[i].delta; - } - } - - { - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); - float * data = (float *) lctx.inp_sum->data; - - for (int i = 0; i < batch.n_tokens; ++i) { - data[i] = 1.0f/float(batch.n_tokens); - } - } - } - llm.init(); switch (model.arch) { @@ -7384,6 +7309,83 @@ static struct ggml_cgraph * llama_build_graph( return result; } +static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { + // + // set input data + // + + const auto & hparams = lctx.model.hparams; + const auto & cparams = lctx.cparams; + const auto & kv_self = lctx.kv_self; + + if (batch.token) { + const int64_t n_tokens = batch.n_tokens; + + ggml_backend_tensor_set(lctx.inp_tokens, batch.token, 0, n_tokens*ggml_element_size(lctx.inp_tokens)); + } + + if (batch.embd) { + const int64_t n_embd = hparams.n_embd; + const int64_t n_tokens = batch.n_tokens; + + ggml_backend_tensor_set(lctx.inp_embd, batch.embd, 0, n_tokens*n_embd*ggml_element_size(lctx.inp_embd)); + } + + if (batch.pos) { + const int64_t n_tokens = batch.n_tokens; + + ggml_backend_tensor_set(lctx.inp_pos, batch.pos, 0, n_tokens*ggml_element_size(lctx.inp_pos)); + } + + { + const int64_t n_kv = kv_self.n; + const int64_t n_tokens = batch.n_tokens; + + assert(ggml_backend_buffer_is_host(lctx.inp_KQ_mask->buffer)); + + float * data = (float *) lctx.inp_KQ_mask->data; + + for (int h = 0; h < 1; ++h) { + for (int j = 0; j < n_tokens; ++j) { + const llama_pos pos = batch.pos[j]; + const llama_seq_id seq_id = batch.seq_id[j][0]; + + for (int i = 0; i < n_kv; ++i) { + float f; + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + f = -INFINITY; + } else { + f = 0; + } + data[h*(n_kv*n_tokens) + j*n_kv + i] = f; + } + } + } + } + + + { + assert(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); + float * data = (float *) lctx.inp_sum->data; + + for (int i = 0; i < batch.n_tokens; ++i) { + data[i] = 1.0f/float(batch.n_tokens); + } + } + + if (kv_self.has_shift) { + const int64_t n_ctx = cparams.n_ctx; + + assert(ggml_backend_buffer_is_host(lctx.inp_K_shift->buffer)); + + int32_t * data = (int32_t *) lctx.inp_K_shift->data; + + for (int i = 0; i < n_ctx; ++i) { + data[i] = lctx.kv_self.cells[i].delta; + } + } +} + // decode a batch of tokens by evaluating the transformer // // - lctx: llama context @@ -7482,7 +7484,7 @@ static int llama_decode_internal( ggml_backend_sched_reset(lctx.sched); ggml_backend_sched_set_eval_callback(lctx.sched, lctx.cparams.cb_eval, lctx.cparams.cb_eval_user_data); - ggml_cgraph * gf = llama_build_graph(lctx, batch); + ggml_cgraph * gf = llama_build_graph(lctx, batch, false); // the output is always the last tensor in the graph struct ggml_tensor * res = gf->nodes[gf->n_nodes - 1]; @@ -7527,6 +7529,9 @@ static int llama_decode_internal( if (lctx.backend_cpu != nullptr) { ggml_backend_cpu_set_n_threads(lctx.backend_cpu, n_threads); } + + llama_set_inputs(lctx, batch); + ggml_backend_sched_graph_compute(lctx.sched, gf); // fprintf(stderr, "splits: %d\n", ggml_backend_sched_get_n_splits(lctx.sched)); @@ -11278,23 +11283,27 @@ struct llama_context * llama_new_context_with_model( ctx->buf_compute_meta.resize(ggml_tensor_overhead()*LLAMA_MAX_NODES + ggml_graph_overhead()); ctx->sched = ggml_backend_sched_new(ctx->backends.data(), backend_buft.data(), ctx->backends.size(), LLAMA_MAX_NODES); - ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); // build worst-case graph int n_tokens = (int)std::min(cparams.n_ctx, cparams.n_batch); int n_past = cparams.n_ctx - n_tokens; llama_token token = llama_token_bos(&ctx->model); // not actually used by llama_build_graph, but required to choose between token and embedding inputs graph - ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0)); + ggml_cgraph * gf = llama_build_graph(*ctx, llama_batch_get_one(&token, n_tokens, n_past, 0), true); // initialize scheduler with the worst-case graph - ggml_backend_sched_init_measure(ctx->sched, gf); - ctx->alloc = ggml_backend_sched_get_tallocr(ctx->sched, ctx->backend_cpu); + if (!ggml_backend_sched_reserve(ctx->sched, gf)) { + LLAMA_LOG_ERROR("%s: failed to allocate compute buffers\n", __func__); + llama_free(ctx); + return nullptr; + } - for (ggml_backend_t backend : ctx->backends) { - ggml_backend_buffer_t buf = ggml_backend_sched_get_buffer(ctx->sched, backend); + for (size_t i = 0; i < ctx->backends.size(); i++) { + ggml_backend_t backend = ctx->backends[i]; + ggml_backend_buffer_type_t buft = backend_buft[i]; + size_t size = ggml_backend_sched_get_buffer_size(ctx->sched, backend); LLAMA_LOG_INFO("%s: %10s compute buffer size = %8.2f MiB\n", __func__, - ggml_backend_buffer_name(buf), - ggml_backend_buffer_get_size(buf) / 1024.0 / 1024.0); + ggml_backend_buft_name(buft), + size / 1024.0 / 1024.0); } // note: the number of splits during measure is higher than during inference due to the kv shift diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 6ae75bc31..7a23ab162 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -2c7cf49810d523b9632da393a9e8270b60bf3b24 +5070f078a67c18c11736e78316ab715ca9afde16 From 4a46d2b7923be83d6019251671ee63aa1fa0d6bc Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Mon, 12 Feb 2024 09:38:44 +0100 Subject: [PATCH 730/859] llava : remove prog parameter from ArgumentParser (#5457) * llava: remove prog parameter from ArgumentParser This commit removes the `prog` parameter from `ArgumentParser` so that it uses the default value which is the name of the script. The motivation for this change is that currently the usage output looks like this: ```console $ python examples/llava/convert-image-encoder-to-gguf.py --help usage: convert_hf_to_gguf.py [-h] ... ``` And with this change it will look like this: ```console $ python examples/llava/convert-image-encoder-to-gguf.py --help usage: convert-image-encoder-to-gguf.py [-h] ... ``` Signed-off-by: Daniel Bevenius * ci: add W503 to flake8 ignore list This commit adds W503 to the ignore list for flake8. This is done to avoid the following error: W503 line break before binary operator Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- .github/workflows/python-lint.yml | 2 +- examples/llava/convert-image-encoder-to-gguf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml index 56d17b66c..ea0a05ea1 100644 --- a/.github/workflows/python-lint.yml +++ b/.github/workflows/python-lint.yml @@ -16,5 +16,5 @@ jobs: - name: flake8 Lint uses: py-actions/flake8@v2 with: - ignore: "E203,E211,E221,E225,E231,E241,E251,E261,E266,E501,E701,E704" + ignore: "E203,E211,E221,E225,E231,E241,E251,E261,E266,E501,E701,E704,W503" exclude: "examples/*,examples/*/**,*/**/__init__.py" diff --git a/examples/llava/convert-image-encoder-to-gguf.py b/examples/llava/convert-image-encoder-to-gguf.py index f5a3c9b46..e204b56be 100644 --- a/examples/llava/convert-image-encoder-to-gguf.py +++ b/examples/llava/convert-image-encoder-to-gguf.py @@ -71,7 +71,7 @@ def bytes_to_unicode(): return dict(zip(bs, cs)) -ap = argparse.ArgumentParser(prog="convert_hf_to_gguf.py") +ap = argparse.ArgumentParser() ap.add_argument("-m", "--model-dir", help="Path to model directory cloned from HF Hub", required=True) ap.add_argument("--use-f32", action="store_true", default=False, help="Use f32 instead of f16") ap.add_argument("--text-only", action="store_true", required=False, From 43fe07c1a4f3a58612e1d9543f7c6b556710f5d0 Mon Sep 17 00:00:00 2001 From: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> Date: Mon, 12 Feb 2024 20:22:05 +0530 Subject: [PATCH 731/859] ggml-sycl: Replace 3d ops with macro (#5458) * use macro * use macro * fix format --- ggml-sycl.cpp | 75 ++++++++++----------------------------------------- 1 file changed, 14 insertions(+), 61 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index dd562a898..cd4b3a1e1 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -11578,11 +11578,8 @@ static dpct::err0 ggml_sycl_cpy_tensor_2d(void *dst, } char * dst_ptr = (char *) dst; - const int64_t ne0 = src->ne[0]; - const int64_t nb0 = src->nb[0]; - const int64_t nb1 = src->nb[1]; - const int64_t nb2 = src->nb[2]; - const int64_t nb3 = src->nb[3]; + GGML_TENSOR_LOCALS_1(int64_t, ne, src, ne); + GGML_TENSOR_LOCALS(int64_t, nb, src, nb); const enum ggml_type type = src->type; const int64_t ts = ggml_type_size(type); const int64_t bs = ggml_blck_size(type); @@ -12426,9 +12423,7 @@ inline void ggml_sycl_op_alibi(const ggml_tensor *src0, const ggml_tensor *src1, GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; + GGML_TENSOR_LOCALS_3(int64_t, ne0, src0, ne); const int64_t nrows = ggml_nrows(src0); //const int n_past = ((int32_t *) dst->op_params)[0]; @@ -12758,15 +12753,9 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, ggml_sycl_op_mul_mat_t op, const bool convert_src1_to_q8_1) try { - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); const int64_t nrows1 = ggml_nrows(src1); GGML_ASSERT(ne03 == ne13); @@ -13337,23 +13326,13 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne00 = src0->ne[0]; GGML_UNUSED(ne00); - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - const int64_t ne03 = src0->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne0, src0, ne); - const int64_t nb01 = src0->nb[1]; - const int64_t nb02 = src0->nb[2]; GGML_UNUSED(nb02); - const int64_t nb03 = src0->nb[3]; GGML_UNUSED(nb03); + GGML_TENSOR_LOCALS(int64_t, nb0, src0, nb); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); - const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); - const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); + GGML_TENSOR_LOCALS(int64_t, nb1, src1, nb); const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); @@ -13655,23 +13634,15 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32); - const int64_t ne00 = src00->ne[0]; GGML_UNUSED(ne00); - const int64_t ne01 = src00->ne[1]; - const int64_t ne02 = src00->ne[2]; - const int64_t ne03 = src00->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne0, src00, ne); //const int64_t nb01 = src00->nb[1]; - const int64_t nb02 = src00->nb[2]; GGML_UNUSED(nb02); - const int64_t nb03 = src00->nb[3]; GGML_UNUSED(nb03); + GGML_TENSOR_LOCALS(int64_t, nb0, src00, nb); - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - const int64_t ne13 = src1->ne[3]; + GGML_TENSOR_LOCALS(int64_t, ne1, src1, ne); + GGML_TENSOR_LOCALS(int64_t, nb1, src1, nb); //const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; GGML_UNUSED(nb12); - const int64_t nb13 = src1->nb[3]; GGML_UNUSED(nb13); const int64_t ne1 = ggml_nelements(src1); const int64_t ne = ggml_nelements(dst); @@ -13940,25 +13911,7 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, GGML_ASSERT(ggml_nbytes(src0) <= INT_MAX); GGML_ASSERT(ggml_nbytes(src1) <= INT_MAX); - const int64_t ne00 = src0->ne[0]; - const int64_t ne01 = src0->ne[1]; - const int64_t ne02 = src0->ne[2]; - - - const int64_t nb00 = src0->nb[0]; - const int64_t nb01 = src0->nb[1]; - const int64_t nb02 = src0->nb[2]; - const int64_t nb03 = src0->nb[3]; - - const int64_t ne10 = src1->ne[0]; - const int64_t ne11 = src1->ne[1]; - const int64_t ne12 = src1->ne[2]; - - - const int64_t nb10 = src1->nb[0]; - const int64_t nb11 = src1->nb[1]; - const int64_t nb12 = src1->nb[2]; - const int64_t nb13 = src1->nb[3]; + GGML_TENSOR_BINARY_OP_LOCALS; SYCL_CHECK(ggml_sycl_set_device(g_main_device)); dpct::queue_ptr main_stream = g_syclStreams[g_main_device_index][0]; From dbd8828eb03b9aa8d0af7e4c533d3c2f5b38aba6 Mon Sep 17 00:00:00 2001 From: Lee <44310445+lx200916@users.noreply.github.com> Date: Tue, 13 Feb 2024 01:29:57 +0800 Subject: [PATCH 732/859] py : fix persimmon `n_rot` conversion (#5460) * convert : fix persimmon offical weight conversion to write correct n_rot. * Update convert-persimmon-to-gguf.py --------- Co-authored-by: Georgi Gerganov --- convert-persimmon-to-gguf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/convert-persimmon-to-gguf.py b/convert-persimmon-to-gguf.py index d2be805d1..def210531 100755 --- a/convert-persimmon-to-gguf.py +++ b/convert-persimmon-to-gguf.py @@ -88,7 +88,8 @@ def main(): gguf_writer.add_embedding_length(hidden_size) gguf_writer.add_block_count(block_count) gguf_writer.add_feed_forward_length(hparams.ffn_hidden_size) - gguf_writer.add_rope_dimension_count(hidden_size // head_count) + # ref: https://github.com/ggerganov/llama.cpp/pull/4889/commits/eea19039fc52ea2dbd1aab45b59ab4e3e29a3443 + gguf_writer.add_rope_dimension_count(hidden_size // head_count // 2) gguf_writer.add_head_count(head_count) gguf_writer.add_head_count_kv(head_count_kv) gguf_writer.add_rope_freq_base(hparams.rotary_emb_base) From df334a11251b81fd0b6a0e51e7146e0ba9e973f2 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 12 Feb 2024 19:54:29 +0200 Subject: [PATCH 733/859] swift : package no longer use ggml dependency (#5465) * Revert "swift : update Package.swift to use ggml as dependency (#4691)" This reverts commit ece9a45e8ffb73ad461c792720c2fec28b0137bc. * spm : add ggml headers --- Package.swift | 24 +++++++++++++++++++----- spm-headers/ggml-alloc.h | 1 + spm-headers/ggml-backend.h | 1 + spm-headers/ggml.h | 1 + 4 files changed, 22 insertions(+), 5 deletions(-) create mode 120000 spm-headers/ggml-alloc.h create mode 120000 spm-headers/ggml-backend.h create mode 120000 spm-headers/ggml.h diff --git a/Package.swift b/Package.swift index 37524edee..b24c9204a 100644 --- a/Package.swift +++ b/Package.swift @@ -13,17 +13,31 @@ let package = Package( products: [ .library(name: "llama", targets: ["llama"]), ], - dependencies: [ - .package(url: "https://github.com/ggerganov/ggml.git", .branch("release")) - ], targets: [ .target( name: "llama", - dependencies: ["ggml"], path: ".", - exclude: ["ggml-metal.metal"], + exclude: [ + "cmake", + "examples", + "scripts", + "models", + "tests", + "CMakeLists.txt", + "ggml-cuda.cu", + "ggml-cuda.h", + "Makefile" + ], sources: [ + "ggml.c", "llama.cpp", + "ggml-alloc.c", + "ggml-backend.c", + "ggml-quants.c", + "ggml-metal.m", + ], + resources: [ + .process("ggml-metal.metal") ], publicHeadersPath: "spm-headers", cSettings: [ diff --git a/spm-headers/ggml-alloc.h b/spm-headers/ggml-alloc.h new file mode 120000 index 000000000..a49d385a1 --- /dev/null +++ b/spm-headers/ggml-alloc.h @@ -0,0 +1 @@ +../ggml-alloc.h \ No newline at end of file diff --git a/spm-headers/ggml-backend.h b/spm-headers/ggml-backend.h new file mode 120000 index 000000000..17c2cf14f --- /dev/null +++ b/spm-headers/ggml-backend.h @@ -0,0 +1 @@ +../ggml-backend.h \ No newline at end of file diff --git a/spm-headers/ggml.h b/spm-headers/ggml.h new file mode 120000 index 000000000..39215298f --- /dev/null +++ b/spm-headers/ggml.h @@ -0,0 +1 @@ +../ggml.h \ No newline at end of file From 099afc6274c859ca67146e725839f2d97a5ef313 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 12 Feb 2024 20:14:39 +0200 Subject: [PATCH 734/859] llama : fix quantization when tensors are missing (#5423) --- llama.cpp | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index a5b873a7b..d316d067b 100644 --- a/llama.cpp +++ b/llama.cpp @@ -772,22 +772,37 @@ struct LLM_TN { llm_arch arch; std::string operator()(llm_tensor tensor) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return LLM_TENSOR_NAMES[arch].at(tensor); } std::string operator()(llm_tensor tensor, const std::string & suffix) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return LLM_TENSOR_NAMES[arch].at(tensor) + "." + suffix; } std::string operator()(llm_tensor tensor, int bid) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid); } std::string operator()(llm_tensor tensor, const std::string & suffix, int bid) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid) + "." + suffix; } std::string operator()(llm_tensor tensor, const std::string & suffix, int bid, int xid) const { + if (LLM_TENSOR_NAMES[arch].find(tensor) == LLM_TENSOR_NAMES[arch].end()) { + return "__missing__"; + } return ::format(LLM_TENSOR_NAMES[arch].at(tensor).c_str(), bid, xid) + "." + suffix; } }; @@ -10227,6 +10242,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty } ++qs.i_ffn_up; } + // if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; //} // IK: let's remove this, else Q2_K is almost the same as Q3_K_S @@ -10286,19 +10302,19 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K_S: - case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; + case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; case LLAMA_FTYPE_MOSTLY_Q3_K_XS: case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: - case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; + case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; case LLAMA_FTYPE_MOSTLY_Q4_K_S: - case LLAMA_FTYPE_MOSTLY_Q4_K_M: quantized_type = GGML_TYPE_Q4_K; break; + case LLAMA_FTYPE_MOSTLY_Q4_K_M: quantized_type = GGML_TYPE_Q4_K; break; case LLAMA_FTYPE_MOSTLY_Q5_K_S: - case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; - case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; - case LLAMA_FTYPE_MOSTLY_IQ2_XXS:quantized_type = GGML_TYPE_IQ2_XXS; break; - case LLAMA_FTYPE_MOSTLY_IQ2_XS :quantized_type = GGML_TYPE_IQ2_XS; break; - case LLAMA_FTYPE_MOSTLY_IQ3_XXS:quantized_type = GGML_TYPE_IQ3_XXS; break; + case LLAMA_FTYPE_MOSTLY_Q5_K_M: quantized_type = GGML_TYPE_Q5_K; break; + case LLAMA_FTYPE_MOSTLY_Q6_K: quantized_type = GGML_TYPE_Q6_K; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; + case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; + case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } From 895407f31b358e3d9335e847d13f033491ec8a5b Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Tue, 13 Feb 2024 09:07:57 +0200 Subject: [PATCH 735/859] ggml-quants : fix compiler warnings (shadow variable) (#5472) Co-authored-by: Iwan Kawrakow --- ggml-quants.c | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index b2a309bf8..f44377f45 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3819,15 +3819,15 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r /* Compute combined scale for the block */ const __m256 d = _mm256_set1_ps( GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d) ); - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i qx = bytes_from_nibbles_32(x[i].qs); // Now we have a vector with bytes in [ 0 .. 15 ] interval. Offset them into [ -8 .. +7 ] interval. const __m256i off = _mm256_set1_epi8( 8 ); - bx = _mm256_sub_epi8( bx, off ); + qx = _mm256_sub_epi8( qx, off ); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(qx, qy); /* Multiply q with scale and accumulate */ acc = _mm256_fmadd_ps( d, q, acc ); @@ -4196,10 +4196,10 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r const __m256 d0d1 = _mm256_mul_ps( d0v, d1v ); // Load 16 bytes, and unpack 4 bit fields into bytes, making 32 bytes - const __m256i bx = bytes_from_nibbles_32(x[i].qs); - const __m256i by = _mm256_loadu_si256( (const __m256i *)y[i].qs ); + const __m256i qx = bytes_from_nibbles_32(x[i].qs); + const __m256i qy = _mm256_loadu_si256( (const __m256i *)y[i].qs ); - const __m256 xy = mul_sum_us8_pairs_float(bx, by); + const __m256 xy = mul_sum_us8_pairs_float(qx, qy); // Accumulate d0*d1*x*y #if defined(__AVX2__) @@ -4418,14 +4418,14 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r /* Compute combined scale for the block */ const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i qx = bytes_from_nibbles_32(x[i].qs); __m256i bxhi = bytes_from_bits_32(x[i].qh); bxhi = _mm256_andnot_si256(bxhi, _mm256_set1_epi8((char)0xF0)); - bx = _mm256_or_si256(bx, bxhi); + qx = _mm256_or_si256(qx, bxhi); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(qx, qy); /* Multiply q with scale and accumulate */ acc = _mm256_fmadd_ps(d, q, acc); @@ -4722,15 +4722,15 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i qx = bytes_from_nibbles_32(x[i].qs); __m256i bxhi = bytes_from_bits_32(x[i].qh); bxhi = _mm256_and_si256(bxhi, _mm256_set1_epi8(0x10)); - bx = _mm256_or_si256(bx, bxhi); + qx = _mm256_or_si256(qx, bxhi); const __m256 dy = _mm256_set1_ps(y[i].d); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_us8_pairs_float(bx, by); + const __m256 q = mul_sum_us8_pairs_float(qx, qy); acc = _mm256_fmadd_ps(q, _mm256_mul_ps(dx, dy), acc); } @@ -4973,10 +4973,10 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { // Compute combined scale for the block const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = _mm256_loadu_si256((const __m256i *)x[i].qs); - __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + __m256i qx = _mm256_loadu_si256((const __m256i *)x[i].qs); + __m256i qy = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(qx, qy); // Multiply q with scale and accumulate #if defined(__AVX2__) From 99b8b43d7b185a6483f28cf798a2d968b2e16ca7 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 13 Feb 2024 11:20:24 +0200 Subject: [PATCH 736/859] tests : disable moe test (#5473) --- tests/test-backend-ops.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index eb06123d2..9af8517d9 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -2129,14 +2129,13 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_pad()); test_cases.emplace_back(new test_leaky_relu()); + // these tests are disabled to save execution time, but they can be handy for debugging +#if 0 #if !defined(__SANITIZE_THREAD__) // FIXME: these tests use too much memory with thread sanitizer test_cases.emplace_back(new test_moe(8, 2, 1, 4096, 8*1024)); //test_cases.emplace_back(new test_moe(8, 2, 8, 4096, 14336)); #endif - - // these tests are disabled to save execution time, but they can be handy for debugging -#if 0 test_cases.emplace_back(new test_llama(1)); test_cases.emplace_back(new test_llama(2)); test_cases.emplace_back(new test_falcon(1)); From 49cc1f7d67de2da99f3ac185f9ff1319b7bf35f8 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 13 Feb 2024 13:01:29 +0200 Subject: [PATCH 737/859] bert : add tests + fix quantization (#5475) * llama : do not quantize pos embd and token type tensors * ci : add BERT tests ggml-ci * ci : do not do BERT tests on low-perf nodes ggml-ci --- ci/run.sh | 46 ++++++++++++++++++++++++++++++++++++++++++++++ llama.cpp | 6 +++++- 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/ci/run.sh b/ci/run.sh index 82fe247a5..a4264d775 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -568,6 +568,50 @@ function gg_sum_open_llama_7b_v2 { #gg_printf '- shakespeare (q8_0 / f16 base lora):\n```\n%s\n```\n' "$(cat $OUT/${ci}-ppl-shakespeare-lora-q8_0-f16.log)" } +# bge-small + +function gg_run_embd_bge_small { + cd ${SRC} + + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/config.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/tokenizer.model + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/tokenizer_config.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/special_tokens_map.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/pytorch_model.bin + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/sentence_bert_config.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/vocab.txt + + path_models="../models-mnt/bge-small" + + rm -rf build-ci-release && mkdir build-ci-release && cd build-ci-release + + set -e + + (time cmake -DCMAKE_BUILD_TYPE=Release ${CMAKE_EXTRA} .. ) 2>&1 | tee -a $OUT/${ci}-cmake.log + (time make -j ) 2>&1 | tee -a $OUT/${ci}-make.log + + python3 ../convert-hf-to-gguf.py ${path_models} + + model_f16="${path_models}/ggml-model-f16.gguf" + model_q8_0="${path_models}/ggml-model-q8_0.gguf" + + ./bin/quantize ${model_f16} ${model_q8_0} q8_0 + + (time ./bin/embedding --model ${model_f16} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-f16.log + (time ./bin/embedding --model ${model_q8_0} -p "I believe the meaning of life is" ) 2>&1 | tee -a $OUT/${ci}-tg-q8_0.log + + set +e +} + +function gg_sum_embd_bge_small { + gg_printf '### %s\n\n' "${ci}" + + gg_printf 'BGE Small (BERT):\n' + gg_printf '- status: %s\n' "$(cat $OUT/${ci}.exit)" + gg_printf '- f16: \n```\n%s\n```\n' "$(cat $OUT/${ci}-tg-f16.log)" + gg_printf '- q8_0:\n```\n%s\n```\n' "$(cat $OUT/${ci}-tg-q8_0.log)" +} + ## main if [ -z ${GG_BUILD_LOW_PERF} ]; then @@ -591,6 +635,8 @@ test $ret -eq 0 && gg_run ctest_debug test $ret -eq 0 && gg_run ctest_release if [ -z ${GG_BUILD_LOW_PERF} ]; then + test $ret -eq 0 && gg_run embd_bge_small + if [ -z ${GG_BUILD_VRAM_GB} ] || [ ${GG_BUILD_VRAM_GB} -ge 8 ]; then if [ -z ${GG_BUILD_CUDA} ]; then test $ret -eq 0 && gg_run open_llama_3b_v2 diff --git a/llama.cpp b/llama.cpp index d316d067b..6dce392df 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10444,7 +10444,11 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s quantize &= !params->only_copy; // do not quantize expert gating tensors - quantize &= name.find("ffn_gate_inp.weight") == std::string::npos; + quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_FFN_GATE_INP, "weight"); + + // do not quantize positional embeddings and token types (BERT) + quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_POS_EMBD, "weight"); + quantize &= name != LLM_TN(model.arch)(LLM_TENSOR_TOKEN_TYPES, "weight"); enum ggml_type new_type; void * new_data; From ad014bba97ef6ef6c3e2f78b2fc463e91ae94579 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20G=C3=A4=C3=9Fler?= Date: Tue, 13 Feb 2024 12:38:37 +0100 Subject: [PATCH 738/859] make: add error message for bad CUDA version (#5444) * make: add error message for bad CUDA version * Update Makefile Co-authored-by: Jared Van Bortel --------- Co-authored-by: Jared Van Bortel --- Makefile | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Makefile b/Makefile index ba73f0637..0a2070b53 100644 --- a/Makefile +++ b/Makefile @@ -569,6 +569,14 @@ $(info I CC: $(shell $(CC) --version | head -n 1)) $(info I CXX: $(shell $(CXX) --version | head -n 1)) ifdef LLAMA_CUBLAS $(info I NVCC: $(shell $(NVCC) --version | tail -n 1)) +CUDA_VERSION := $(shell nvcc --version | grep -oP 'release (\K[0-9]+\.[0-9])') +ifeq ($(shell awk -v "v=$(CUDA_VERSION)" 'BEGIN { print (v < 11.7) }'),1) +ifndef CUDA_DOCKER_ARCH +ifndef CUDA_POWER_ARCH +$(error I ERROR: For CUDA versions < 11.7 a target CUDA architecture must be explicitly provided via CUDA_DOCKER_ARCH) +endif # CUDA_POWER_ARCH +endif # CUDA_DOCKER_ARCH +endif # eq ($(shell echo "$(CUDA_VERSION) < 11.7" | bc),1) endif # LLAMA_CUBLAS $(info ) From 03bf161eb6dea6400ee49c6dc6b69bdcfa9fd3fc Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Tue, 13 Feb 2024 06:06:58 -0600 Subject: [PATCH 739/859] llama : support batched embeddings (#5466) * batched embedding: pool outputs by sequence id. updated embedding example * bring back non-causal attention * embd : minor improvements * llama : minor --------- Co-authored-by: Georgi Gerganov --- convert-hf-to-gguf.py | 1 + examples/embedding/embedding.cpp | 146 +++++++++++++++++++++++-------- gguf-py/gguf/constants.py | 1 + gguf-py/gguf/gguf_writer.py | 3 + llama.cpp | 61 +++++++++---- llama.h | 5 ++ 6 files changed, 163 insertions(+), 54 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index cae1551a2..5adfdc143 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1648,6 +1648,7 @@ class BertModel(Model): self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) self.gguf_writer.add_causal_attention(False) + self.gguf_writer.add_pooling_layer(True) self.gguf_writer.add_file_type(self.ftype) def set_vocab(self): diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index 27376c8f0..b4688cf51 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -7,6 +7,51 @@ #pragma warning(disable: 4244 4267) // possible loss of data #endif +static std::vector split_lines(const std::string & s) { + std::string line; + std::vector lines; + std::stringstream ss(s); + while (std::getline(ss, line)) { + lines.push_back(line); + } + return lines; +} + +static void batch_add_seq(llama_batch & batch, const std::vector & tokens, int seq_id) { + for (size_t i = 0; i < tokens.size(); i++) { + llama_batch_add(batch, tokens[i], i, { seq_id }, false); + } +} + +static void normalize(float * vec, float * out, int n) { + float norm = 0; + for (int i = 0; i < n; i++) { + norm += vec[i] * vec[i]; + } + norm = sqrt(norm); + for (int i = 0; i < n; i++) { + out[i] = vec[i] / norm; + } +} + +static void batch_decode(llama_context * ctx, llama_batch & batch, float * output, int n_seq, int n_embd) { + // clear previous kv_cache values (irrelevant for embeddings) + llama_kv_cache_clear(ctx); + + // run model + fprintf(stderr, "%s: n_tokens = %d, n_seq = %d\n", __func__, batch.n_tokens, n_seq); + if (llama_decode(ctx, batch) < 0) { + fprintf(stderr, "%s : failed to decode\n", __func__); + } + + // normalize on copy + for (int k = 0; k < n_seq; k++) { + float * emb = llama_get_embeddings_ith(ctx, k); + float * out = output + k * n_embd; + normalize(emb, out, n_embd); + } +} + int main(int argc, char ** argv) { gpt_params params; @@ -55,59 +100,84 @@ int main(int argc, char ** argv) { fprintf(stderr, "%s\n", get_system_info(params).c_str()); } - int n_past = 0; + // split the prompt into lines + std::vector prompts = split_lines(params.prompt); - // tokenize the prompt - auto embd_inp = ::llama_tokenize(ctx, params.prompt, true); + // max batch size + const uint64_t n_batch = params.n_batch; + GGML_ASSERT(params.n_batch == params.n_ctx); + // tokenize the prompts and trim + std::vector> inputs; + for (const auto & prompt : prompts) { + auto inp = ::llama_tokenize(ctx, prompt, true); + if (inp.size() > n_batch) { + inp.resize(n_batch); + } + inputs.push_back(inp); + } + + // tokenization stats if (params.verbose_prompt) { - fprintf(stderr, "\n"); - fprintf(stderr, "%s: prompt: '%s'\n", __func__, params.prompt.c_str()); - fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, embd_inp.size()); - for (int i = 0; i < (int) embd_inp.size(); i++) { - fprintf(stderr, "%6d -> '%s'\n", embd_inp[i], llama_token_to_piece(ctx, embd_inp[i]).c_str()); + for (int i = 0; i < (int) inputs.size(); i++) { + fprintf(stderr, "%s: prompt %d: '%s'\n", __func__, i, prompts[i].c_str()); + fprintf(stderr, "%s: number of tokens in prompt = %zu\n", __func__, inputs[i].size()); + for (int j = 0; j < (int) inputs[i].size(); j++) { + fprintf(stderr, "%6d -> '%s'\n", inputs[i][j], llama_token_to_piece(ctx, inputs[i][j]).c_str()); + } + fprintf(stderr, "\n\n"); } - fprintf(stderr, "\n"); } - if (embd_inp.size() > (size_t)n_ctx) { - fprintf(stderr, "%s: error: prompt is longer than the context window (%zu tokens, n_ctx = %d)\n", - __func__, embd_inp.size(), n_ctx); - return 1; - } - - while (!embd_inp.empty()) { - int n_tokens = std::min(params.n_batch, (int) embd_inp.size()); - if (llama_decode(ctx, llama_batch_get_one(embd_inp.data(), n_tokens, n_past, 0))) { - fprintf(stderr, "%s : failed to eval\n", __func__); - return 1; - } - n_past += n_tokens; - embd_inp.erase(embd_inp.begin(), embd_inp.begin() + n_tokens); - } + // initialize batch + const int n_prompts = prompts.size(); + struct llama_batch batch = llama_batch_init(n_batch, 0, n_prompts); + // allocate output const int n_embd = llama_n_embd(model); - auto * embeddings = llama_get_embeddings(ctx); + std::vector embeddings(n_prompts * n_embd, 0); + float * emb = embeddings.data(); - // l2-normalize embeddings - float norm = 0; - for (int i = 0; i < n_embd; i++) { - norm += embeddings[i] * embeddings[i]; - } - norm = sqrt(norm); - for (int i = 0; i < n_embd; i++) { - embeddings[i] /= norm; + // break into batches + int p = 0; // number of prompts processed already + int s = 0; // number of prompts in current batch + for (int k = 0; k < n_prompts; k++) { + // clamp to n_batch tokens + auto & inp = inputs[k]; + const uint64_t n_toks = inp.size(); + + // encode if at capacity + if (batch.n_tokens + n_toks > n_batch) { + float * out = emb + p * n_embd; + batch_decode(ctx, batch, out, s, n_embd); + llama_batch_clear(batch); + p += s; + s = 0; + } + + // add to batch + batch_add_seq(batch, inp, s); + s += 1; } - for (int i = 0; i < n_embd; i++) { - printf("%f ", embeddings[i]); - } - printf("\n"); + // final batch + float * out = emb + p * n_embd; + batch_decode(ctx, batch, out, s, n_embd); + // print first 3 embeddings + for (int j = 0; j < std::min(3, n_prompts); j++) { + fprintf(stderr, "embedding %d: ", j); + for (int i = 0; i < n_embd; i++) { + fprintf(stderr, "%f ", emb[j * n_embd + i]); + } + fprintf(stderr, "\n\n"); + } + fprintf(stderr, "\n"); + + // clean up llama_print_timings(ctx); llama_free(ctx); llama_free_model(model); - llama_backend_free(); return 0; diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index a9c13dd38..644e1589c 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -40,6 +40,7 @@ class Keys: TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" EXPERT_COUNT = "{arch}.expert_count" EXPERT_USED_COUNT = "{arch}.expert_used_count" + POOLING_LAYER = "{arch}.pooling_layer" class Attention: HEAD_COUNT = "{arch}.attention.head_count" diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 7af58a46c..d87bd8e88 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -360,6 +360,9 @@ class GGUFWriter: def add_causal_attention(self, value: bool) -> None: self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) + def add_pooling_layer(self, value: bool) -> None: + self.add_bool(Keys.LLM.POOLING_LAYER.format(arch=self.arch), value) + def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) diff --git a/llama.cpp b/llama.cpp index 6dce392df..eb6c46f36 100644 --- a/llama.cpp +++ b/llama.cpp @@ -254,6 +254,7 @@ enum llm_kv { LLM_KV_TENSOR_DATA_LAYOUT, LLM_KV_EXPERT_COUNT, LLM_KV_EXPERT_USED_COUNT, + LLM_KV_POOLING_LAYER, LLM_KV_ATTENTION_HEAD_COUNT, LLM_KV_ATTENTION_HEAD_COUNT_KV, @@ -311,6 +312,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" }, { LLM_KV_EXPERT_COUNT, "%s.expert_count" }, { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" }, + { LLM_KV_POOLING_LAYER, "%s.pooling_layer" }, { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" }, { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, @@ -1539,6 +1541,7 @@ struct llama_hparams { float f_max_alibi_bias; bool causal_attn = true; + bool pooling_layer = false; bool operator!=(const llama_hparams & other) const { @@ -1601,6 +1604,7 @@ struct llama_cparams { bool mul_mat_q; bool offload_kqv; + bool do_pooling; ggml_backend_sched_eval_callback cb_eval; void * cb_eval_user_data; @@ -1896,7 +1900,7 @@ struct llama_context { struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] - struct ggml_tensor * inp_sum; // F32 [1, n_batch] + struct ggml_tensor * inp_sum; // F32 [n_batch, n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -3053,6 +3057,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); + ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); switch (hparams.n_layer) { case 3: @@ -4859,7 +4864,7 @@ struct llm_build_context { const int32_t n_orig_ctx; const bool do_rope_shift; - const bool causal_attn; + const bool do_pooling; const llm_build_cb & cb; @@ -4903,7 +4908,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), - causal_attn (hparams.causal_attn), + do_pooling (hparams.pooling_layer && cparams.do_pooling), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5752,17 +5757,18 @@ struct llm_build_context { const int64_t n_embd_head = hparams.n_embd_head_v; GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); - GGML_ASSERT(n_embd_head == hparams.n_rot); struct ggml_tensor * cur; struct ggml_tensor * inpL; // get input vectors with right size + const size_t stride1 = n_tokens * ggml_type_size(lctx.inp_tokens->type); struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); - struct ggml_tensor * inp_sum = ggml_view_1d(ctx0, lctx.inp_sum, n_tokens, 0); + struct ggml_tensor * inp_sum = ggml_view_2d(ctx0, lctx.inp_sum, n_tokens, n_tokens, stride1, 0); // construct input embeddings (token, type, position) inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + // token types are hardcoded to zero ("Sentence A") struct ggml_tensor * type_row0 = ggml_view_1d(ctx0, model.type_embd, n_embd, 0); inpL = ggml_add(ctx0, inpL, type_row0); @@ -5832,9 +5838,11 @@ struct llm_build_context { // final output cur = inpL; - // pooling - cur = ggml_mul_mat(ctx0, inp_sum, ggml_cont(ctx0, ggml_transpose(ctx0, cur))); - cb(cur, "result_embed", -1); + // pooling layer + if (do_pooling) { + cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_sum); + } + cb(cur, "result_embd", -1); ggml_build_forward_expand(gf, cur); @@ -7367,7 +7375,8 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { for (int i = 0; i < n_kv; ++i) { float f; - if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || lctx.kv_self.cells[i].pos > pos) { + if (!lctx.kv_self.cells[i].has_seq_id(seq_id) || + (hparams.causal_attn && lctx.kv_self.cells[i].pos > pos)) { f = -INFINITY; } else { f = 0; @@ -7378,7 +7387,6 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - { assert(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); float * data = (float *) lctx.inp_sum->data; @@ -7399,6 +7407,20 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { data[i] = lctx.kv_self.cells[i].delta; } } + + if (hparams.pooling_layer && cparams.do_pooling) { + const int64_t n_tokens = batch.n_tokens; + + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); + float * data = (float *) lctx.inp_sum->data; + + memset(lctx.inp_sum->data, 0, batch.n_tokens * batch.n_tokens * ggml_element_size(lctx.inp_sum)); + + for (int i = 0; i < n_tokens; ++i) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + data[seq_id*n_tokens + i] = 1.0f; + } + } } // decode a batch of tokens by evaluating the transformer @@ -7510,7 +7532,7 @@ static int llama_decode_internal( embeddings = gf->nodes[gf->n_nodes - 3]; GGML_ASSERT(strcmp(embeddings->name, "result_norm") == 0); } - } else if (strcmp(res->name, "result_embed") == 0) { + } else if (strcmp(res->name, "result_embd") == 0) { embeddings = res; res = nullptr; } else { @@ -7630,11 +7652,12 @@ static int llama_decode_internal( if (!lctx.embedding.empty()) { auto & embedding_out = lctx.embedding; - const int64_t embed_pos = res ? n_embd * (n_tokens-1) : 0; + const int64_t embd_pos = res ? n_embd * (n_tokens-1) : 0; + const int64_t embd_size = res ? n_embd : n_embd * n_tokens; - embedding_out.resize(n_embd); + embedding_out.resize(embd_size); ggml_backend_t embeddings_backend = ggml_backend_sched_get_node_backend(lctx.sched, embeddings); - ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embed_pos*sizeof(float), n_embd*sizeof(float)); + ggml_backend_tensor_get_async(embeddings_backend, embeddings, embedding_out.data(), embd_pos*sizeof(float), embd_size*sizeof(float)); ggml_backend_synchronize(embeddings_backend); } @@ -10950,6 +10973,7 @@ struct llama_context_params llama_context_default_params() { /*.logits_all =*/ false, /*.embedding =*/ false, /*.offload_kqv =*/ true, + /*.do_pooling =*/ true, }; return result; @@ -11105,6 +11129,7 @@ struct llama_context * llama_new_context_with_model( cparams.yarn_beta_slow = params.yarn_beta_slow; cparams.mul_mat_q = params.mul_mat_q; cparams.offload_kqv = params.offload_kqv; + cparams.do_pooling = params.do_pooling; cparams.n_ctx = params.n_ctx == 0 ? hparams.n_ctx_train : params.n_ctx; cparams.rope_freq_base = params.rope_freq_base == 0.0f ? hparams.rope_freq_base_train : params.rope_freq_base; @@ -11252,7 +11277,7 @@ struct llama_context * llama_new_context_with_model( // resized during inference, reserve maximum ctx->logits.reserve(hparams.n_vocab*cparams.n_batch); - if (params.embedding){ + if (params.embedding) { ctx->embedding.resize(hparams.n_embd); } @@ -11270,7 +11295,7 @@ struct llama_context * llama_new_context_with_model( ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); - ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, 1, cparams.n_batch); + ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); @@ -12128,6 +12153,10 @@ float * llama_get_embeddings(struct llama_context * ctx) { return ctx->embedding.data(); } +float * llama_get_embeddings_ith(struct llama_context * ctx, int32_t i) { + return ctx->embedding.data() + i*ctx->model.hparams.n_embd; +} + const char * llama_token_get_text(const struct llama_model * model, llama_token token) { return model->vocab.id_to_token[token].text.c_str(); } diff --git a/llama.h b/llama.h index 367e8f1a1..5ef78ec96 100644 --- a/llama.h +++ b/llama.h @@ -236,6 +236,7 @@ extern "C" { bool logits_all; // the llama_eval() call computes all logits, not just the last one (DEPRECATED - set llama_batch.logits instead) bool embedding; // embedding mode only bool offload_kqv; // whether to offload the KQV ops (including the KV cache) to GPU + bool do_pooling; // whether to pool (sum) embedding results by sequence id (ignored if no pooling layer) }; // model quantization parameters @@ -628,6 +629,10 @@ extern "C" { // shape: [n_embd] (1-dimensional) LLAMA_API float * llama_get_embeddings(struct llama_context * ctx); + // Get the embeddings for the ith sequence + // llama_get_embeddings(ctx) + i*n_embd + LLAMA_API float * llama_get_embeddings_ith(struct llama_context * ctx, int32_t i); + // // Vocab // From cf45252a7cfcb998bade46a886e20477cecc538a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 13 Feb 2024 15:14:22 +0200 Subject: [PATCH 740/859] tests : multi-thread the tokenizer tests (#5474) * tests : multi-thread the tokenizer tests ggml-ci * unicode : fix data race for unidentified codepoints ggml-ci * unicode : minor style fixes ggml-ci --- llama.cpp | 24 +++++----- tests/test-tokenizer-1-bpe.cpp | 77 ++++++++++++++++---------------- tests/test-tokenizer-1-llama.cpp | 53 ++++++++++++---------- unicode.h | 72 ++++++++++++++++------------- 4 files changed, 124 insertions(+), 102 deletions(-) diff --git a/llama.cpp b/llama.cpp index eb6c46f36..381a03068 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7782,7 +7782,7 @@ struct llm_bigram_spm { }; struct llm_tokenizer_spm { - llm_tokenizer_spm(const llama_vocab & vocab): vocab(vocab) {} + llm_tokenizer_spm(const llama_vocab & vocab) : vocab(vocab) {} void tokenize(const std::string & text, std::vector & output) { // split string into utf8 chars @@ -7857,6 +7857,7 @@ private: if (p == rev_merge.end()) { // output any symbols that did not form tokens as bytes. + output.reserve(output.size() + symbol.n); for (int j = 0; j < (int)symbol.n; ++j) { llama_vocab::id token_id = llama_byte_to_token(vocab, symbol.text[j]); output.push_back(token_id); @@ -8419,17 +8420,18 @@ struct fragment_buffer_variant { token(_token), raw_text(_dummy), offset(0), - length(0){} + length(0) {} + fragment_buffer_variant(const std::string & _raw_text, int64_t _offset, int64_t _length) : type(FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT), - token((llama_vocab::id)-1), + token((llama_vocab::id) - 1), raw_text(_raw_text), offset(_offset), length(_length){ - GGML_ASSERT( _offset >= 0 ); - GGML_ASSERT( _length >= 1 ); - GGML_ASSERT( offset + length <= raw_text.length() ); + GGML_ASSERT(_offset >= 0); + GGML_ASSERT(_length >= 1); + GGML_ASSERT(offset + length <= raw_text.length()); } const FRAGMENT_BUFFER_VARIANT_TYPE type; @@ -8553,14 +8555,14 @@ static std::vector llama_tokenize_internal(const llama_vocab & } std::forward_list fragment_buffer; - fragment_buffer.emplace_front( raw_text, 0, raw_text.length() ); + fragment_buffer.emplace_front(raw_text, 0, raw_text.length()); - if (special) tokenizer_st_partition( vocab, fragment_buffer ); + if (special) tokenizer_st_partition(vocab, fragment_buffer); switch (vocab.type) { case LLAMA_VOCAB_TYPE_SPM: { - for (const auto & fragment: fragment_buffer) { + for (const auto & fragment : fragment_buffer) { if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { // without adding this leading whitespace, we do not get the same results as the original tokenizer @@ -8588,7 +8590,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } break; case LLAMA_VOCAB_TYPE_BPE: { - for (const auto & fragment: fragment_buffer) { + for (const auto & fragment : fragment_buffer) { if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); @@ -8604,7 +8606,7 @@ static std::vector llama_tokenize_internal(const llama_vocab & } break; case LLAMA_VOCAB_TYPE_WPM: { - for (const auto & fragment: fragment_buffer) { + for (const auto & fragment : fragment_buffer) { if (fragment.type == FRAGMENT_BUFFER_VARIANT_TYPE_RAW_TEXT) { auto raw_text = fragment.raw_text.substr(fragment.offset, fragment.length); diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index 386530f23..3bb629561 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -4,13 +4,13 @@ #include "console.h" #include +#include #include #include -#include -#include -#include -#include #include +#include +#include +#include int main(int argc, char **argv) { if (argc < 2) { @@ -74,45 +74,46 @@ int main(int argc, char **argv) { } } catch (const std::invalid_argument &) { - fprintf(stderr, "%s : info: utf8 conversion %d '%s'\n", __func__, i, str.c_str()); + //fprintf(stderr, "%s : info: utf8 conversion %d '%s'\n", __func__, i, str.c_str()); } } - for (uint32_t cp = 0x0000; cp < 0xffff; ++cp) { - // NOTE: these exceptions seem to be necessary, because the GPT2 tokenizer doesn't want to interfere with some ASCII control characters - if ((cp < 0x03 || cp > 0x05) && cp != 0x0b && cp != 0x11 && (cp < 0x13 || cp > 0x17) && cp != 0x19 && (cp < 0x1c || cp > 0x1e) && (cp < 0xd800 || cp > 0xdfff)) { - std::string str = " " + codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_bpe(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 3; - } - } - } - // Restrict to assigned unicode planes - // for (uint32_t cp = 0x10000; cp < 0x0010ffff; ++cp) { - for (uint32_t cp = 0x10000; cp < 0x00040000; ++cp) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_bpe(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 4; - } - } - for (uint32_t cp = 0x000e0000; cp < 0x0010ffff; ++cp) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_bpe(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 4; + // unicode + { + const int nthread = std::thread::hardware_concurrency(); + + std::vector threads(nthread); + + for (int i = 0; i < nthread; ++i) { + threads[i] = std::thread([i, nthread, ctx]() { + for (uint32_t cp = i; cp < 0x0010ffff; cp += nthread) { + if (!( // NOLINT + (cp < 0x03 || cp > 0x05) && cp != 0x0b && cp != 0x11 && + (cp < 0x13 || cp > 0x17) && cp != 0x19 && + (cp < 0x1c || cp > 0x1e) && + (cp < 0xd800 || cp > 0xdfff) && + (cp < 0x00040000 || cp >= 0x000e0000) + )) { + continue; + } + + std::string str = codepoint_to_utf8(cp); + std::vector tokens = llama_tokenize(ctx, str, false); + std::string check = llama_detokenize_bpe(ctx, tokens); + if (cp != 9601 && str != check) { + fprintf(stderr, "error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", + cp, check.c_str(), check.length(), str.c_str(), str.length()); + std::exit(3); + } + } + }); + } + + for (auto & t : threads) { + t.join(); } } + llama_free_model(model); llama_free(ctx); diff --git a/tests/test-tokenizer-1-llama.cpp b/tests/test-tokenizer-1-llama.cpp index 4b58fe495..b0d814a41 100644 --- a/tests/test-tokenizer-1-llama.cpp +++ b/tests/test-tokenizer-1-llama.cpp @@ -4,13 +4,13 @@ #include "console.h" #include +#include #include #include -#include -#include -#include -#include #include +#include +#include +#include int main(int argc, char **argv) { if (argc < 2) { @@ -72,26 +72,33 @@ int main(int argc, char **argv) { } } - for (uint32_t cp = 0x0000; cp < 0xffff; ++cp) { - if (cp < 0xd800 || cp > 0xdfff) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_spm(ctx, tokens); - if (cp != 9601 && str != check) { - fprintf(stderr, "%s : error: codepoint %d detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 3; - } + // unicode + { + const int nthread = std::thread::hardware_concurrency(); + + std::vector threads(nthread); + + for (int i = 0; i < nthread; ++i) { + threads[i] = std::thread([i, nthread, ctx]() { + for (uint32_t cp = i; cp < 0x0010ffff; cp += nthread) { + if (cp >= 0xd800 && cp <= 0xdfff) { + continue; + } + + std::string str = codepoint_to_utf8(cp); + std::vector tokens = llama_tokenize(ctx, str, false); + std::string check = llama_detokenize_spm(ctx, tokens); + if (cp != 9601 && str != check) { + fprintf(stderr, "error: codepoint %x detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", + cp, check.c_str(), check.length(), str.c_str(), str.length()); + std::exit(3); + } + } + }); } - } - for (uint32_t cp = 0x10000; cp < 0x0010ffff; ++cp) { - std::string str = codepoint_to_utf8(cp); - std::vector tokens = llama_tokenize(ctx, str, false); - std::string check = llama_detokenize_spm(ctx, tokens); - if (str != check) { - fprintf(stderr, "%s : error: codepoint %d detokenizes to '%s'(%zu) instead of '%s'(%zu)\n", - __func__, cp, check.c_str(), check.length(), str.c_str(), str.length()); - return 4; + + for (auto & t : threads) { + t.join(); } } diff --git a/unicode.h b/unicode.h index 844eff3da..263260702 100644 --- a/unicode.h +++ b/unicode.h @@ -264,26 +264,29 @@ static uint32_t codepoint_from_utf8(const std::string & utf8, size_t & offset) { offset += 1; return result; } - else if (!(utf8[offset + 0] & 0x40)) { + if (!(utf8[offset + 0] & 0x40)) { throw std::invalid_argument("invalid character"); } - else if (!(utf8[offset + 0] & 0x20)) { - if (offset + 1 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80)) + if (!(utf8[offset + 0] & 0x20)) { + if (offset + 1 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80)) { throw std::invalid_argument("invalid character"); + } auto result = ((utf8[offset + 0] & 0x1f) << 6) | (utf8[offset + 1] & 0x3f); offset += 2; return result; } - else if (!(utf8[offset + 0] & 0x10)) { - if (offset + 2 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80)) + if (!(utf8[offset + 0] & 0x10)) { + if (offset + 2 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80)) { throw std::invalid_argument("invalid character"); + } auto result = ((utf8[offset + 0] & 0x0f) << 12) | ((utf8[offset + 1] & 0x3f) << 6) | (utf8[offset + 2] & 0x3f); offset += 3; return result; } - else if (!(utf8[offset + 0] & 0x08)) { - if (offset + 3 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80) || !((utf8[offset + 3] & 0xc0) == 0x80)) + if (!(utf8[offset + 0] & 0x08)) { + if (offset + 3 >= utf8.size() || ! ((utf8[offset + 1] & 0xc0) == 0x80) || ! ((utf8[offset + 2] & 0xc0) == 0x80) || !((utf8[offset + 3] & 0xc0) == 0x80)) { throw std::invalid_argument("invalid character"); + } auto result = ((utf8[offset + 0] & 0x07) << 18) | ((utf8[offset + 1] & 0x3f) << 12) | ((utf8[offset + 2] & 0x3f) << 6) | (utf8[offset + 3] & 0x3f); offset += 4; return result; @@ -331,21 +334,22 @@ static uint32_t codepoint_from_utf16(const std::vector & utf16, size_t offset += 1; return result; } - else { - if (offset + 1 >= utf16.size() || !((utf16[1] & 0xdc00) == 0xdc00)) - throw std::invalid_argument("invalid character"); - auto result = 0x10000 + (((utf16[0] & 0x03ff) << 10) | (utf16[1] & 0x03ff)); - offset += 2; - return result; + + if (offset + 1 >= utf16.size() || !((utf16[1] & 0xdc00) == 0xdc00)) { + throw std::invalid_argument("invalid character"); } - throw std::invalid_argument("invalid string"); + + auto result = 0x10000 + (((utf16[0] & 0x03ff) << 10) | (utf16[1] & 0x03ff)); + offset += 2; + return result; } static std::vector codepoints_from_utf16(const std::vector & utf16) { std::vector result; size_t offset = 0; - while (offset < utf16.size()) + while (offset < utf16.size()) { result.push_back(codepoint_from_utf16(utf16, offset)); + } return result; } @@ -361,44 +365,52 @@ static std::vector codepoints_from_utf16(const std::vector & static std::unordered_map codepoint_type_map() { std::unordered_map codepoint_types; for (auto p : digit_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_DIGIT; + } } - for(auto p : letter_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : letter_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_LETTER; + } } - for(auto p : whitespace_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : whitespace_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_WHITESPACE; + } } - for(auto p : accent_mark_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : accent_mark_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_ACCENT_MARK; + } } - for(auto p : punctuation_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : punctuation_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_PUNCTUATION; + } } - for (auto p : symbol_ranges) { - for (auto i = p.first; i <= p.second; ++i) + for (auto p : symbol_ranges) { + for (auto i = p.first; i <= p.second; ++i) { codepoint_types[i] = CODEPOINT_TYPE_SYMBOL; + } } - for(auto p : control_ranges) { - for(auto i = p.first; i <= p.second; ++ i) + for (auto p : control_ranges) { + for (auto i = p.first; i <= p.second; ++ i) { codepoint_types[i] = CODEPOINT_TYPE_CONTROL; + } } return codepoint_types; } static int codepoint_type(uint32_t cp) { static std::unordered_map codepoint_types = codepoint_type_map(); - return codepoint_types[cp]; + return codepoint_types.find(cp) == codepoint_types.end() ? CODEPOINT_TYPE_UNIDENTIFIED : codepoint_types.at(cp); } static int codepoint_type(const std::string & utf8) { - if (utf8.length() == 0) + if (utf8.length() == 0) { return CODEPOINT_TYPE_UNIDENTIFIED; + } size_t offset = 0; return codepoint_type(codepoint_from_utf8(utf8, offset)); } From 263978904c7472db1865409a7ff1129599f6a40b Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 13 Feb 2024 14:15:42 +0100 Subject: [PATCH 741/859] finetune : rename feed-forward tensors (w1/w2/w3) (#4839) * finetune: rename feed-forward tensors (w1/w2/w3) This commit renames the feed-forward tensors w1, w2 and w3 to ffn_gate, ffn_down and ffn_up respectively. The motivation for this change is to make it easier to understand the purpose of the tensors. This also seems to be inline with the names used in the llama_layer struct in llama.cpp. Signed-off-by: Daniel Bevenius * train-text-from-scratch: rename ff tensors This commit renames the feed-forward tensors w1, w2 and w3 to ffn_gate, ffn_down and ffn_up respectively. The motivation for this change is to make it easier to understand the purpose of the tensors. This also seems to be inline with the names used in the llama_layer struct in llama.cpp Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- examples/finetune/README.md | 6 +- examples/finetune/finetune.cpp | 242 +++++++++--------- .../train-text-from-scratch.cpp | 54 ++-- 3 files changed, 151 insertions(+), 151 deletions(-) diff --git a/examples/finetune/README.md b/examples/finetune/README.md index a884706c5..2fafd505e 100644 --- a/examples/finetune/README.md +++ b/examples/finetune/README.md @@ -80,9 +80,9 @@ The LORA rank can be configured for each model tensor type separately with these --rank-wk N LORA rank for wk tensor (default 4) --rank-wv N LORA rank for wv tensor (default 4) --rank-wo N LORA rank for wo tensor (default 4) - --rank-w1 N LORA rank for w1 tensor (default 4) - --rank-w2 N LORA rank for w2 tensor (default 4) - --rank-w3 N LORA rank for w3 tensor (default 4) + --rank-ffn_gate N LORA rank for ffn_gate tensor (default 4) + --rank-ffn_down N LORA rank for ffn_down tensor (default 4) + --rank-ffn_up N LORA rank for ffn_up tensor (default 4) ``` The LORA rank of 'norm' tensors should always be 1. diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index b11c56020..98bf5a07a 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -60,9 +60,9 @@ struct my_llama_layer { struct ggml_tensor * ffn_norm; // ff - struct ggml_tensor * w1; - struct ggml_tensor * w2; - struct ggml_tensor * w3; + struct ggml_tensor * ffn_gate; // w1 + struct ggml_tensor * ffn_down; // w2 + struct ggml_tensor * ffn_up; // w3 }; struct my_llama_model { @@ -85,9 +85,9 @@ struct my_llama_lora_hparams { uint32_t n_rank_wv = 4; uint32_t n_rank_wo = 4; uint32_t n_rank_ffn_norm = 1; - uint32_t n_rank_w1 = 4; - uint32_t n_rank_w2 = 4; - uint32_t n_rank_w3 = 4; + uint32_t n_rank_ffn_gate = 4; + uint32_t n_rank_ffn_down = 4; + uint32_t n_rank_ffn_up = 4; uint32_t n_rank_tok_embeddings = 4; uint32_t n_rank_norm = 1; uint32_t n_rank_output = 4; @@ -117,12 +117,12 @@ struct my_llama_lora_layer { struct ggml_tensor * ffn_norm_b; // ff - struct ggml_tensor * w1_a; - struct ggml_tensor * w1_b; - struct ggml_tensor * w2_a; - struct ggml_tensor * w2_b; - struct ggml_tensor * w3_a; - struct ggml_tensor * w3_b; + struct ggml_tensor * ffn_gate_a; + struct ggml_tensor * ffn_gate_b; + struct ggml_tensor * ffn_down_a; + struct ggml_tensor * ffn_down_b; + struct ggml_tensor * ffn_up_a; + struct ggml_tensor * ffn_up_b; }; struct my_llama_lora { @@ -208,9 +208,9 @@ static void print_lora_params(struct my_llama_lora_hparams * params) { printf("%s: n_rank_wv : %u\n", __func__, params->n_rank_wv); printf("%s: n_rank_wo : %u\n", __func__, params->n_rank_wo); printf("%s: n_rank_ffn_norm : %u\n", __func__, params->n_rank_ffn_norm); - printf("%s: n_rank_w1 : %u\n", __func__, params->n_rank_w1); - printf("%s: n_rank_w2 : %u\n", __func__, params->n_rank_w2); - printf("%s: n_rank_w3 : %u\n", __func__, params->n_rank_w3); + printf("%s: n_rank_ffn_gate : %u\n", __func__, params->n_rank_ffn_gate); + printf("%s: n_rank_ffn_down : %u\n", __func__, params->n_rank_ffn_down); + printf("%s: n_rank_ffn_up : %u\n", __func__, params->n_rank_ffn_up); printf("%s: n_rank_tok_embeddings : %u\n", __func__, params->n_rank_tok_embeddings); printf("%s: n_rank_norm : %u\n", __func__, params->n_rank_norm); printf("%s: n_rank_output : %u\n", __func__, params->n_rank_output); @@ -319,9 +319,9 @@ static void init_model(struct llama_model * input, struct my_llama_model * model layer.wv = llama_get_model_tensor(input, tni(LLM_TENSOR_ATTN_V, i)); layer.wo = llama_get_model_tensor(input, tni(LLM_TENSOR_ATTN_OUT, i)); layer.ffn_norm = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_NORM, i)); - layer.w1 = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_GATE, i)); - layer.w2 = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_DOWN, i)); - layer.w3 = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_UP, i)); + layer.ffn_gate = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_GATE, i)); + layer.ffn_down = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_DOWN, i)); + layer.ffn_up = llama_get_model_tensor(input, tni(LLM_TENSOR_FFN_UP, i)); assert_shape_1d(layer.attention_norm, hparams.n_embd); assert_shape_2d(layer.wq, hparams.n_embd, hparams.n_embd); @@ -329,9 +329,9 @@ static void init_model(struct llama_model * input, struct my_llama_model * model assert_shape_2d(layer.wv, hparams.n_embd, hparams.n_embd_gqa()); assert_shape_2d(layer.wo, hparams.n_embd, hparams.n_embd); assert_shape_1d(layer.ffn_norm, hparams.n_embd); - assert_shape_2d(layer.w1, hparams.n_embd, hparams.n_ff); - assert_shape_2d(layer.w2, hparams.n_ff, hparams.n_embd); - assert_shape_2d(layer.w3, hparams.n_embd, hparams.n_ff); + assert_shape_2d(layer.ffn_gate, hparams.n_embd, hparams.n_ff); + assert_shape_2d(layer.ffn_down, hparams.n_ff, hparams.n_embd); + assert_shape_2d(layer.ffn_up, hparams.n_embd, hparams.n_ff); } } @@ -362,12 +362,12 @@ static void set_param_lora(struct my_llama_lora * lora) { ggml_set_param(ctx, layer.wo_b); ggml_set_param(ctx, layer.ffn_norm_a); ggml_set_param(ctx, layer.ffn_norm_b); - ggml_set_param(ctx, layer.w1_a); - ggml_set_param(ctx, layer.w1_b); - ggml_set_param(ctx, layer.w2_a); - ggml_set_param(ctx, layer.w2_b); - ggml_set_param(ctx, layer.w3_a); - ggml_set_param(ctx, layer.w3_b); + ggml_set_param(ctx, layer.ffn_gate_a); + ggml_set_param(ctx, layer.ffn_gate_b); + ggml_set_param(ctx, layer.ffn_down_a); + ggml_set_param(ctx, layer.ffn_down_b); + ggml_set_param(ctx, layer.ffn_up_a); + ggml_set_param(ctx, layer.ffn_up_b); } } @@ -435,12 +435,12 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora layer.ffn_norm_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_norm, n_embd); layer.ffn_norm_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_norm, 1); - layer.w1_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w1, n_embd); - layer.w1_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w1, n_ff); - layer.w2_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w2, n_ff); - layer.w2_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w2, n_embd); - layer.w3_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w3, n_embd); - layer.w3_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_w3, n_ff); + layer.ffn_gate_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_gate, n_embd); + layer.ffn_gate_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_gate, n_ff); + layer.ffn_down_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_down, n_ff); + layer.ffn_down_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_down, n_embd); + layer.ffn_up_a = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_up, n_embd); + layer.ffn_up_b = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, lparams.n_rank_ffn_up, n_ff); ggml_set_name(layer.attention_norm_a, tni(LLM_TENSOR_ATTN_NORM, ".weight.lora_a", i)); ggml_set_name(layer.attention_norm_b, tni(LLM_TENSOR_ATTN_NORM, ".weight.lora_b", i)); @@ -454,12 +454,12 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora ggml_set_name(layer.wo_b, tni(LLM_TENSOR_ATTN_OUT, ".weight.lora_b", i)); ggml_set_name(layer.ffn_norm_a, tni(LLM_TENSOR_FFN_NORM, ".weight.lora_a", i)); ggml_set_name(layer.ffn_norm_b, tni(LLM_TENSOR_FFN_NORM, ".weight.lora_b", i)); - ggml_set_name(layer.w1_a, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_a", i)); - ggml_set_name(layer.w1_b, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_b", i)); - ggml_set_name(layer.w2_a, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_a", i)); - ggml_set_name(layer.w2_b, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_b", i)); - ggml_set_name(layer.w3_a, tni(LLM_TENSOR_FFN_UP, ".weight.lora_a", i)); - ggml_set_name(layer.w3_b, tni(LLM_TENSOR_FFN_UP, ".weight.lora_b", i)); + ggml_set_name(layer.ffn_gate_a, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_a", i)); + ggml_set_name(layer.ffn_gate_b, tni(LLM_TENSOR_FFN_GATE, ".weight.lora_b", i)); + ggml_set_name(layer.ffn_down_a, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_a", i)); + ggml_set_name(layer.ffn_down_b, tni(LLM_TENSOR_FFN_DOWN, ".weight.lora_b", i)); + ggml_set_name(layer.ffn_up_a, tni(LLM_TENSOR_FFN_UP, ".weight.lora_a", i)); + ggml_set_name(layer.ffn_up_b, tni(LLM_TENSOR_FFN_UP, ".weight.lora_b", i)); } set_param_lora(lora); @@ -497,12 +497,12 @@ static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, fl randomize_tensor_normal(layer.ffn_norm_a, rnd); ggml_set_zero(layer.ffn_norm_b); - randomize_tensor_normal(layer.w1_a, rnd); - ggml_set_zero(layer.w1_b); - randomize_tensor_normal(layer.w2_a, rnd); - ggml_set_zero(layer.w2_b); - randomize_tensor_normal(layer.w3_a, rnd); - ggml_set_zero(layer.w3_b); + randomize_tensor_normal(layer.ffn_gate_a, rnd); + ggml_set_zero(layer.ffn_gate_b); + randomize_tensor_normal(layer.ffn_down_a, rnd); + ggml_set_zero(layer.ffn_down_b); + randomize_tensor_normal(layer.ffn_up_a, rnd); + ggml_set_zero(layer.ffn_up_b); } free_random_normal_distribution(rnd); @@ -610,13 +610,13 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( struct ggml_tensor * attention_norm = add_to_f32(ctx, layer.attention_norm, ggml_mul_mat(ctx, llayer.attention_norm_a, llayer.attention_norm_b)); struct ggml_tensor * ffn_norm = add_to_f32(ctx, layer.ffn_norm, ggml_mul_mat(ctx, llayer.ffn_norm_a, llayer.ffn_norm_b)); - struct ggml_tensor * wq = add_to_f32(ctx, layer.wq, ggml_mul_mat(ctx, llayer.wq_a, llayer.wq_b)); - struct ggml_tensor * wk = add_to_f32(ctx, layer.wk, ggml_mul_mat(ctx, llayer.wk_a, llayer.wk_b)); - struct ggml_tensor * wv = add_to_f32(ctx, layer.wv, ggml_mul_mat(ctx, llayer.wv_a, llayer.wv_b)); - struct ggml_tensor * wo = add_to_f32(ctx, layer.wo, ggml_mul_mat(ctx, llayer.wo_a, llayer.wo_b)); - struct ggml_tensor * w1 = add_to_f32(ctx, layer.w1, ggml_mul_mat(ctx, llayer.w1_a, llayer.w1_b)); - struct ggml_tensor * w2 = add_to_f32(ctx, layer.w2, ggml_mul_mat(ctx, llayer.w2_a, llayer.w2_b)); - struct ggml_tensor * w3 = add_to_f32(ctx, layer.w3, ggml_mul_mat(ctx, llayer.w3_a, llayer.w3_b)); + struct ggml_tensor * wq = add_to_f32(ctx, layer.wq, ggml_mul_mat(ctx, llayer.wq_a, llayer.wq_b)); + struct ggml_tensor * wk = add_to_f32(ctx, layer.wk, ggml_mul_mat(ctx, llayer.wk_a, llayer.wk_b)); + struct ggml_tensor * wv = add_to_f32(ctx, layer.wv, ggml_mul_mat(ctx, llayer.wv_a, llayer.wv_b)); + struct ggml_tensor * wo = add_to_f32(ctx, layer.wo, ggml_mul_mat(ctx, llayer.wo_a, llayer.wo_b)); + struct ggml_tensor * ffn_gate = add_to_f32(ctx, layer.ffn_gate, ggml_mul_mat(ctx, llayer.ffn_gate_a, llayer.ffn_gate_b)); + struct ggml_tensor * ffn_down = add_to_f32(ctx, layer.ffn_down, ggml_mul_mat(ctx, llayer.ffn_down_a, llayer.ffn_down_b)); + struct ggml_tensor * ffn_up = add_to_f32(ctx, layer.ffn_up, ggml_mul_mat(ctx, llayer.ffn_up_a, llayer.ffn_up_b)); struct ggml_tensor * t02 = ggml_rms_norm (ctx, cur, rms_norm_eps); set_name(t02, "t02"); assert_shape_2d(t02, n_embd, N*n_batch); struct ggml_tensor * t03 = ggml_repeat (ctx, attention_norm, t02); set_name(t03, "t03"); assert_shape_2d(t03, n_embd, N*n_batch); @@ -659,11 +659,11 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( struct ggml_tensor * t22 = ggml_rms_norm (ctx, t21, rms_norm_eps); set_name(t22, "t22"); assert_shape_2d(t22, n_embd, N*n_batch); struct ggml_tensor * t23 = ggml_repeat (ctx, ffn_norm, t22); set_name(t23, "t23"); assert_shape_2d(t23, n_embd, N*n_batch); struct ggml_tensor * t24 = ggml_mul (ctx, t23, t22); set_name(t24, "t24"); assert_shape_2d(t24, n_embd, N*n_batch); - struct ggml_tensor * t25 = ggml_mul_mat (ctx, w3, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); - struct ggml_tensor * t26 = ggml_mul_mat (ctx, w1, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); + struct ggml_tensor * t25 = ggml_mul_mat (ctx, ffn_up, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); + struct ggml_tensor * t26 = ggml_mul_mat (ctx, ffn_gate, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); struct ggml_tensor * t27 = ggml_silu (ctx, t26); set_name(t27, "t27"); assert_shape_2d(t27, n_ff, N*n_batch); struct ggml_tensor * t28 = ggml_mul (ctx, t27, t25); set_name(t28, "t28"); assert_shape_2d(t28, n_ff, N*n_batch); - struct ggml_tensor * t29 = ggml_mul_mat (ctx, w2, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); + struct ggml_tensor * t29 = ggml_mul_mat (ctx, ffn_down, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); struct ggml_tensor * t30 = ggml_add (ctx, t29, t21); set_name(t30, "t30"); assert_shape_2d(t30, n_embd, N*n_batch); cur = t30; if (enable_checkpointing) { @@ -723,9 +723,9 @@ static struct ggml_tensor * llama_build_lora_finetune_graphs( ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wk, 1.0f)); ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wv, 1.0f)); ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.wo, 1.0f)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w1, 1.0f)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w2, 1.0f)); - ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.w3, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_gate, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_down, 1.0f)); + ggml_build_forward_expand(gb, ggml_scale_inplace(ctx, layer.ffn_up, 1.0f)); } // allocating checkpoints in one block to reduce memory fragmentation @@ -798,9 +798,9 @@ static void load_llama_lora_gguf(struct gguf_context * fctx, struct ggml_context GGUF_GET_KEY(fctx, lora->hparams.n_rank_wv, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_ATTN_V); GGUF_GET_KEY(fctx, lora->hparams.n_rank_wo, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_ATTN_OUT); GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_norm, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_NORM); - GGUF_GET_KEY(fctx, lora->hparams.n_rank_w1, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_GATE); - GGUF_GET_KEY(fctx, lora->hparams.n_rank_w2, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN); - GGUF_GET_KEY(fctx, lora->hparams.n_rank_w3, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_UP); + GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_gate, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_GATE); + GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_down, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN); + GGUF_GET_KEY(fctx, lora->hparams.n_rank_ffn_up, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_TRAINING_LORA_RANK_FFN_UP); init_lora(model, lora); @@ -825,12 +825,12 @@ static void load_llama_lora_gguf(struct gguf_context * fctx, struct ggml_context copy_tensor_by_name(layer.wo_b, f_ggml_ctx, ggml_get_name(layer.wo_b)); copy_tensor_by_name(layer.ffn_norm_a, f_ggml_ctx, ggml_get_name(layer.ffn_norm_a)); copy_tensor_by_name(layer.ffn_norm_b, f_ggml_ctx, ggml_get_name(layer.ffn_norm_b)); - copy_tensor_by_name(layer.w1_a, f_ggml_ctx, ggml_get_name(layer.w1_a)); - copy_tensor_by_name(layer.w1_b, f_ggml_ctx, ggml_get_name(layer.w1_b)); - copy_tensor_by_name(layer.w2_a, f_ggml_ctx, ggml_get_name(layer.w2_a)); - copy_tensor_by_name(layer.w2_b, f_ggml_ctx, ggml_get_name(layer.w2_b)); - copy_tensor_by_name(layer.w3_a, f_ggml_ctx, ggml_get_name(layer.w3_a)); - copy_tensor_by_name(layer.w3_b, f_ggml_ctx, ggml_get_name(layer.w3_b)); + copy_tensor_by_name(layer.ffn_gate_a, f_ggml_ctx, ggml_get_name(layer.ffn_gate_a)); + copy_tensor_by_name(layer.ffn_gate_b, f_ggml_ctx, ggml_get_name(layer.ffn_gate_b)); + copy_tensor_by_name(layer.ffn_down_a, f_ggml_ctx, ggml_get_name(layer.ffn_down_a)); + copy_tensor_by_name(layer.ffn_down_b, f_ggml_ctx, ggml_get_name(layer.ffn_down_b)); + copy_tensor_by_name(layer.ffn_up_a, f_ggml_ctx, ggml_get_name(layer.ffn_up_a)); + copy_tensor_by_name(layer.ffn_up_b, f_ggml_ctx, ggml_get_name(layer.ffn_up_b)); } } @@ -868,9 +868,9 @@ static void save_llama_lora_gguf(struct gguf_context * fctx, struct my_llama_mod gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_ATTN_V, lora->hparams.n_rank_wv); gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_ATTN_OUT, lora->hparams.n_rank_wo); gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_NORM, lora->hparams.n_rank_ffn_norm); - gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_GATE, lora->hparams.n_rank_w1); - gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN, lora->hparams.n_rank_w2); - gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_UP, lora->hparams.n_rank_w3); + gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_GATE, lora->hparams.n_rank_ffn_gate); + gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_DOWN, lora->hparams.n_rank_ffn_down); + gguf_set_val_u32(fctx, LLM_KV_TRAINING_LORA_RANK_FFN_UP, lora->hparams.n_rank_ffn_up); gguf_add_tensor(fctx, lora->tok_embeddings_a); gguf_add_tensor(fctx, lora->tok_embeddings_b); @@ -894,12 +894,12 @@ static void save_llama_lora_gguf(struct gguf_context * fctx, struct my_llama_mod gguf_add_tensor(fctx, layer.wo_b); gguf_add_tensor(fctx, layer.ffn_norm_a); gguf_add_tensor(fctx, layer.ffn_norm_b); - gguf_add_tensor(fctx, layer.w1_a); - gguf_add_tensor(fctx, layer.w1_b); - gguf_add_tensor(fctx, layer.w2_a); - gguf_add_tensor(fctx, layer.w2_b); - gguf_add_tensor(fctx, layer.w3_a); - gguf_add_tensor(fctx, layer.w3_b); + gguf_add_tensor(fctx, layer.ffn_gate_a); + gguf_add_tensor(fctx, layer.ffn_gate_b); + gguf_add_tensor(fctx, layer.ffn_down_a); + gguf_add_tensor(fctx, layer.ffn_down_b); + gguf_add_tensor(fctx, layer.ffn_up_a); + gguf_add_tensor(fctx, layer.ffn_up_b); } } @@ -1104,12 +1104,12 @@ static void save_as_llama_lora(const char * filename, struct my_llama_lora * lor write_tensor(&file, layer.wo_b, tni(LLM_TENSOR_ATTN_OUT, i, ".weight.loraB")); write_tensor(&file, layer.ffn_norm_a, tni(LLM_TENSOR_FFN_NORM, i, ".weight.loraA")); write_tensor(&file, layer.ffn_norm_b, tni(LLM_TENSOR_FFN_NORM, i, ".weight.loraB")); - write_tensor(&file, layer.w1_a, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraA")); - write_tensor(&file, layer.w1_b, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraB")); - write_tensor(&file, layer.w2_a, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraA")); - write_tensor(&file, layer.w2_b, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraB")); - write_tensor(&file, layer.w3_a, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraA")); - write_tensor(&file, layer.w3_b, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraB")); + write_tensor(&file, layer.ffn_gate_a, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraA")); + write_tensor(&file, layer.ffn_gate_b, tni(LLM_TENSOR_FFN_GATE, i, ".weight.loraB")); + write_tensor(&file, layer.ffn_down_a, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraA")); + write_tensor(&file, layer.ffn_down_b, tni(LLM_TENSOR_FFN_DOWN, i, ".weight.loraB")); + write_tensor(&file, layer.ffn_up_a, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraA")); + write_tensor(&file, layer.ffn_up_b, tni(LLM_TENSOR_FFN_UP, i, ".weight.loraB")); } } @@ -1139,9 +1139,9 @@ struct train_params { uint32_t n_rank_wv; uint32_t n_rank_wo; uint32_t n_rank_ffn_norm; - uint32_t n_rank_w1; - uint32_t n_rank_w2; - uint32_t n_rank_w3; + uint32_t n_rank_ffn_gate; + uint32_t n_rank_ffn_down; + uint32_t n_rank_ffn_up; uint32_t n_rank_tok_embeddings; uint32_t n_rank_norm; uint32_t n_rank_output; @@ -1152,9 +1152,9 @@ struct train_params { bool custom_n_rank_wv; bool custom_n_rank_wo; bool custom_n_rank_ffn_norm; - bool custom_n_rank_w1; - bool custom_n_rank_w2; - bool custom_n_rank_w3; + bool custom_n_rank_ffn_gate; + bool custom_n_rank_ffn_down; + bool custom_n_rank_ffn_up; bool custom_n_rank_tok_embeddings; bool custom_n_rank_norm; bool custom_n_rank_output; @@ -1186,9 +1186,9 @@ static struct train_params get_default_train_params() { params.n_rank_wv = 4; params.n_rank_wo = 4; params.n_rank_ffn_norm = 1; - params.n_rank_w1 = 4; - params.n_rank_w2 = 4; - params.n_rank_w3 = 4; + params.n_rank_ffn_gate = 4; + params.n_rank_ffn_down = 4; + params.n_rank_ffn_up = 4; params.n_rank_tok_embeddings = 4; params.n_rank_norm = 1; params.n_rank_output = 4; @@ -1199,9 +1199,9 @@ static struct train_params get_default_train_params() { params.custom_n_rank_wv = false; params.custom_n_rank_wo = false; params.custom_n_rank_ffn_norm = false; - params.custom_n_rank_w1 = false; - params.custom_n_rank_w2 = false; - params.custom_n_rank_w3 = false; + params.custom_n_rank_ffn_gate = false; + params.custom_n_rank_ffn_down = false; + params.custom_n_rank_ffn_up = false; params.custom_n_rank_tok_embeddings = false; params.custom_n_rank_norm = false; params.custom_n_rank_output = false; @@ -1232,9 +1232,9 @@ static void train_print_usage(int argc, char ** argv, const struct train_params fprintf(stderr, " --rank-wk N LORA rank for wk tensor, overrides default rank.\n"); fprintf(stderr, " --rank-wv N LORA rank for wv tensor, overrides default rank.\n"); fprintf(stderr, " --rank-wo N LORA rank for wo tensor, overrides default rank.\n"); - fprintf(stderr, " --rank-w1 N LORA rank for w1 tensor, overrides default rank.\n"); - fprintf(stderr, " --rank-w2 N LORA rank for w2 tensor, overrides default rank.\n"); - fprintf(stderr, " --rank-w3 N LORA rank for w3 tensor, overrides default rank.\n"); + fprintf(stderr, " --rank-ffn_gate N LORA rank for ffn_gate tensor, overrides default rank.\n"); + fprintf(stderr, " --rank-ffn_down N LORA rank for ffn_down tensor, overrides default rank.\n"); + fprintf(stderr, " --rank-ffn_up N LORA rank for ffn_up tensor, overrides default rank.\n"); print_common_train_usage(argc, argv, ¶ms->common); } @@ -1369,27 +1369,27 @@ static bool train_params_parse(int argc, char ** argv, struct train_params * par } params->n_rank_wo = std::stoi(argv[i]); params->custom_n_rank_wo = true; - } else if (arg == "--rank-w1") { + } else if (arg == "--rank-ffn_gate") { if (++i >= argc) { invalid_param = true; break; } - params->n_rank_w1 = std::stoi(argv[i]); - params->custom_n_rank_w1 = true; - } else if (arg == "--rank-w2") { + params->n_rank_ffn_gate = std::stoi(argv[i]); + params->custom_n_rank_ffn_gate = true; + } else if (arg == "--rank-ffn_down") { if (++i >= argc) { invalid_param = true; break; } - params->n_rank_w2 = std::stoi(argv[i]); - params->custom_n_rank_w2 = true; - } else if (arg == "--rank-w3") { + params->n_rank_ffn_down = std::stoi(argv[i]); + params->custom_n_rank_ffn_down = true; + } else if (arg == "--rank-ffn_up") { if (++i >= argc) { invalid_param = true; break; } - params->n_rank_w3 = std::stoi(argv[i]); - params->custom_n_rank_w3 = true; + params->n_rank_ffn_up = std::stoi(argv[i]); + params->custom_n_rank_ffn_up = true; } else { fprintf(stderr, "error: unknown argument: %s\n", arg.c_str()); train_print_usage(argc, argv, &default_params); @@ -1452,12 +1452,12 @@ static int64_t get_parameter_count(struct my_llama_lora* lora) { nx += ggml_nelements(layer.wo_b); nx += ggml_nelements(layer.ffn_norm_a); nx += ggml_nelements(layer.ffn_norm_b); - nx += ggml_nelements(layer.w1_a); - nx += ggml_nelements(layer.w1_b); - nx += ggml_nelements(layer.w2_a); - nx += ggml_nelements(layer.w2_b); - nx += ggml_nelements(layer.w3_a); - nx += ggml_nelements(layer.w3_b); + nx += ggml_nelements(layer.ffn_gate_a); + nx += ggml_nelements(layer.ffn_gate_b); + nx += ggml_nelements(layer.ffn_down_a); + nx += ggml_nelements(layer.ffn_down_b); + nx += ggml_nelements(layer.ffn_up_a); + nx += ggml_nelements(layer.ffn_up_b); } return nx; } @@ -1511,9 +1511,9 @@ int main(int argc, char ** argv) { uint32_t n_rank_wv = params.custom_n_rank_wv ? params.n_rank_wv : params.lora_r; uint32_t n_rank_wo = params.custom_n_rank_wo ? params.n_rank_wo : params.lora_r; uint32_t n_rank_ffn_norm = params.custom_n_rank_ffn_norm ? params.n_rank_ffn_norm : 1; - uint32_t n_rank_w1 = params.custom_n_rank_w1 ? params.n_rank_w1 : params.lora_r; - uint32_t n_rank_w2 = params.custom_n_rank_w2 ? params.n_rank_w2 : params.lora_r; - uint32_t n_rank_w3 = params.custom_n_rank_w3 ? params.n_rank_w3 : params.lora_r; + uint32_t n_rank_ffn_gate = params.custom_n_rank_ffn_gate ? params.n_rank_ffn_gate : params.lora_r; + uint32_t n_rank_ffn_down = params.custom_n_rank_ffn_down ? params.n_rank_ffn_down : params.lora_r; + uint32_t n_rank_ffn_up = params.custom_n_rank_ffn_up ? params.n_rank_ffn_up : params.lora_r; uint32_t n_rank_tok_embeddings = params.custom_n_rank_tok_embeddings ? params.n_rank_tok_embeddings : params.lora_r; uint32_t n_rank_norm = params.custom_n_rank_norm ? params.n_rank_norm : 1; uint32_t n_rank_output = params.custom_n_rank_output ? params.n_rank_output : params.lora_r; @@ -1523,9 +1523,9 @@ int main(int argc, char ** argv) { lora.hparams.n_rank_wv = n_rank_wv; lora.hparams.n_rank_wo = n_rank_wo; lora.hparams.n_rank_ffn_norm = n_rank_ffn_norm; - lora.hparams.n_rank_w1 = n_rank_w1; - lora.hparams.n_rank_w2 = n_rank_w2; - lora.hparams.n_rank_w3 = n_rank_w3; + lora.hparams.n_rank_ffn_gate = n_rank_ffn_gate; + lora.hparams.n_rank_ffn_down = n_rank_ffn_down; + lora.hparams.n_rank_ffn_up = n_rank_ffn_up; lora.hparams.n_rank_tok_embeddings = n_rank_tok_embeddings; lora.hparams.n_rank_norm = n_rank_norm; lora.hparams.n_rank_output = n_rank_output; @@ -1566,9 +1566,9 @@ int main(int argc, char ** argv) { || (lora.hparams.n_rank_wv != n_rank_wv) || (lora.hparams.n_rank_wo != n_rank_wo) || (lora.hparams.n_rank_ffn_norm != n_rank_ffn_norm) - || (lora.hparams.n_rank_w1 != n_rank_w1) - || (lora.hparams.n_rank_w2 != n_rank_w2) - || (lora.hparams.n_rank_w3 != n_rank_w3) + || (lora.hparams.n_rank_ffn_gate != n_rank_ffn_gate) + || (lora.hparams.n_rank_ffn_down != n_rank_ffn_down) + || (lora.hparams.n_rank_ffn_up != n_rank_ffn_up) || (lora.hparams.n_rank_tok_embeddings != n_rank_tok_embeddings) || (lora.hparams.n_rank_norm != n_rank_norm) || (lora.hparams.n_rank_output != n_rank_output) diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index 2e2a8ce08..bfdf124d7 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -50,9 +50,9 @@ struct my_llama_layer { struct ggml_tensor * ffn_norm; // ff - struct ggml_tensor * w1; - struct ggml_tensor * w2; - struct ggml_tensor * w3; + struct ggml_tensor * ffn_gate; // w1 + struct ggml_tensor * ffn_down; // w2 + struct ggml_tensor * ffn_up; // w3 }; struct my_llama_model { @@ -140,9 +140,9 @@ static void set_param_model(struct my_llama_model * model) { ggml_set_param(ctx, layer.wv); ggml_set_param(ctx, layer.wo); ggml_set_param(ctx, layer.ffn_norm); - ggml_set_param(ctx, layer.w1); - ggml_set_param(ctx, layer.w2); - ggml_set_param(ctx, layer.w3); + ggml_set_param(ctx, layer.ffn_gate); + ggml_set_param(ctx, layer.ffn_down); + ggml_set_param(ctx, layer.ffn_up); } } @@ -198,9 +198,9 @@ static void init_model(struct my_llama_model * model) { layer.ffn_norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); - layer.w1 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); - layer.w2 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); - layer.w3 = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + layer.ffn_gate = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); + layer.ffn_down = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_ff, n_embd); + layer.ffn_up = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_ff); ggml_set_name(layer.attention_norm, tni(LLM_TENSOR_ATTN_NORM, i)); @@ -211,9 +211,9 @@ static void init_model(struct my_llama_model * model) { ggml_set_name(layer.ffn_norm, tni(LLM_TENSOR_FFN_NORM, i)); - ggml_set_name(layer.w1, tni(LLM_TENSOR_FFN_GATE, i)); - ggml_set_name(layer.w2, tni(LLM_TENSOR_FFN_DOWN, i)); - ggml_set_name(layer.w3, tni(LLM_TENSOR_FFN_UP, i)); + ggml_set_name(layer.ffn_gate, tni(LLM_TENSOR_FFN_GATE, i)); + ggml_set_name(layer.ffn_down, tni(LLM_TENSOR_FFN_DOWN, i)); + ggml_set_name(layer.ffn_up, tni(LLM_TENSOR_FFN_UP, i)); } set_param_model(model); @@ -244,9 +244,9 @@ static void randomize_model(struct my_llama_model * model, int seed, float mean, randomize_tensor_normal(layer.ffn_norm, rnd); - randomize_tensor_normal(layer.w1, rnd); - randomize_tensor_normal(layer.w2, rnd); - randomize_tensor_normal(layer.w3, rnd); + randomize_tensor_normal(layer.ffn_gate, rnd); + randomize_tensor_normal(layer.ffn_down, rnd); + randomize_tensor_normal(layer.ffn_up, rnd); } free_random_normal_distribution(rnd); @@ -356,11 +356,11 @@ static struct ggml_tensor * llama_build_train_graphs( struct ggml_tensor * t22 = ggml_rms_norm (ctx, t21, f_norm_rms_eps); set_name(t22, "t22"); assert_shape_2d(t22, n_embd, N*n_batch); struct ggml_tensor * t23 = ggml_repeat (ctx, layer.ffn_norm, t22); set_name(t23, "t23"); assert_shape_2d(t23, n_embd, N*n_batch); struct ggml_tensor * t24 = ggml_mul (ctx, t23, t22); set_name(t24, "t24"); assert_shape_2d(t24, n_embd, N*n_batch); - struct ggml_tensor * t25 = ggml_mul_mat (ctx, layer.w3, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); - struct ggml_tensor * t26 = ggml_mul_mat (ctx, layer.w1, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); + struct ggml_tensor * t25 = ggml_mul_mat (ctx, layer.ffn_up, t24); set_name(t25, "t25"); assert_shape_2d(t25, n_ff, N*n_batch); + struct ggml_tensor * t26 = ggml_mul_mat (ctx, layer.ffn_gate, t24); set_name(t26, "t26"); assert_shape_2d(t26, n_ff, N*n_batch); struct ggml_tensor * t27 = ggml_silu (ctx, t26); set_name(t27, "t27"); assert_shape_2d(t27, n_ff, N*n_batch); struct ggml_tensor * t28 = ggml_mul (ctx, t27, t25); set_name(t28, "t28"); assert_shape_2d(t28, n_ff, N*n_batch); - struct ggml_tensor * t29 = ggml_mul_mat (ctx, layer.w2, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); + struct ggml_tensor * t29 = ggml_mul_mat (ctx, layer.ffn_down, t28); set_name(t29, "t29"); assert_shape_2d(t29, n_embd, N*n_batch); struct ggml_tensor * t30 = ggml_add (ctx, t29, t21); set_name(t30, "t30"); assert_shape_2d(t30, n_embd, N*n_batch); cur = t30; checkpoints.push_back(cur); @@ -521,9 +521,9 @@ static void load_llama_model_gguf(struct gguf_context * fctx, struct ggml_contex copy_tensor_by_name(layer.wv, f_ggml_ctx, tni(LLM_TENSOR_ATTN_V, i)); copy_tensor_by_name(layer.wo, f_ggml_ctx, tni(LLM_TENSOR_ATTN_OUT, i)); copy_tensor_by_name(layer.ffn_norm, f_ggml_ctx, tni(LLM_TENSOR_FFN_NORM, i)); - copy_tensor_by_name(layer.w1, f_ggml_ctx, tni(LLM_TENSOR_FFN_GATE, i)); - copy_tensor_by_name(layer.w2, f_ggml_ctx, tni(LLM_TENSOR_FFN_DOWN, i)); - copy_tensor_by_name(layer.w3, f_ggml_ctx, tni(LLM_TENSOR_FFN_UP, i)); + copy_tensor_by_name(layer.ffn_gate, f_ggml_ctx, tni(LLM_TENSOR_FFN_GATE, i)); + copy_tensor_by_name(layer.ffn_down, f_ggml_ctx, tni(LLM_TENSOR_FFN_DOWN, i)); + copy_tensor_by_name(layer.ffn_up, f_ggml_ctx, tni(LLM_TENSOR_FFN_UP, i)); } } @@ -664,9 +664,9 @@ static void save_llama_model_gguf(struct gguf_context * fctx, const char * fn_vo gguf_add_tensor(fctx, layer.wv); gguf_add_tensor(fctx, layer.wo); gguf_add_tensor(fctx, layer.ffn_norm); - gguf_add_tensor(fctx, layer.w1); - gguf_add_tensor(fctx, layer.w2); - gguf_add_tensor(fctx, layer.w3); + gguf_add_tensor(fctx, layer.ffn_gate); + gguf_add_tensor(fctx, layer.ffn_down); + gguf_add_tensor(fctx, layer.ffn_up); } } @@ -915,9 +915,9 @@ static int64_t get_parameter_count(struct my_llama_model* model) { nx += ggml_nelements(layer.wv); nx += ggml_nelements(layer.wo); nx += ggml_nelements(layer.ffn_norm); - nx += ggml_nelements(layer.w1); - nx += ggml_nelements(layer.w2); - nx += ggml_nelements(layer.w3); + nx += ggml_nelements(layer.ffn_gate); + nx += ggml_nelements(layer.ffn_down); + nx += ggml_nelements(layer.ffn_up); } return nx; } From 037259be689353081e7bae3c1ab4ab18e7fbe8c9 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Tue, 13 Feb 2024 15:24:50 +0200 Subject: [PATCH 742/859] llama : make load error reporting more granular (#5477) Makes it easier to pinpoint where e.g. `unordered_map::at: key not found` comes from. --- llama.cpp | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/llama.cpp b/llama.cpp index 381a03068..61c695187 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4384,9 +4384,21 @@ static int llama_model_load(const std::string & fname, llama_model & model, llam model.hparams.vocab_only = params.vocab_only; - llm_load_arch (ml, model); - llm_load_hparams(ml, model); - llm_load_vocab (ml, model); + try { + llm_load_arch(ml, model); + } catch(const std::exception & e) { + throw std::runtime_error("error loading model architecture: " + std::string(e.what())); + } + try { + llm_load_hparams(ml, model); + } catch(const std::exception & e) { + throw std::runtime_error("error loading model hyperparameters: " + std::string(e.what())); + } + try { + llm_load_vocab(ml, model); + } catch(const std::exception & e) { + throw std::runtime_error("error loading model vocabulary: " + std::string(e.what())); + } llm_load_print_meta(ml, model); From c4e6dd59e45ef7b14f7763fb073b517395dc176c Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Tue, 13 Feb 2024 18:18:16 +0200 Subject: [PATCH 743/859] llama : allow raw byte in SPM vocabs; don't crash on nl 404 (#5478) * common : don't crash if newline token is not found * common : llama_byte_to_token: allow falling back to finding just the token byte in SPM vocabs --- llama.cpp | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/llama.cpp b/llama.cpp index 61c695187..8ebbf7628 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3314,7 +3314,12 @@ static void llm_load_vocab( // determine the newline token: LLaMA "<0x0A>" == 10 == '\n', Falcon 193 == '\n' if (vocab.type == LLAMA_VOCAB_TYPE_SPM) { - vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); + try { + vocab.linefeed_id = llama_byte_to_token(vocab, '\n'); + } catch (const std::exception & e) { + LLAMA_LOG_WARN("%s: SPM vocabulary, but newline token not found: %s! Using special_pad_id instead.", __func__, e.what()); + vocab.linefeed_id = vocab.special_pad_id; + } } else if (vocab.type == LLAMA_VOCAB_TYPE_WPM) { vocab.linefeed_id = vocab.special_pad_id; } else { @@ -7746,7 +7751,13 @@ static llama_token llama_byte_to_token(const llama_vocab & vocab, uint8_t ch) { switch (llama_vocab_get_type(vocab)) { case LLAMA_VOCAB_TYPE_SPM: { const char buf[7] = { '<', '0', 'x', hex[ch >> 4], hex[ch & 15], '>', 0 }; - return vocab.token_to_id.at(buf); + auto token = vocab.token_to_id.find(buf); + if (token != vocab.token_to_id.end()) { + return (*token).second; + } + // Try to fall back to just the byte as a string + const char buf2[2] = { (char)ch, 0 }; + return vocab.token_to_id.at(buf2); } case LLAMA_VOCAB_TYPE_WPM: case LLAMA_VOCAB_TYPE_BPE: { From ea9c8e11436ad50719987fa23a289c74b7b40d40 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Tue, 13 Feb 2024 12:03:53 -0500 Subject: [PATCH 744/859] llama : add support for Nomic Embed (#5468) --- convert-hf-to-gguf.py | 117 ++++++++++++------- gguf-py/gguf/constants.py | 56 +++++---- gguf-py/gguf/tensor_mapping.py | 12 +- llama.cpp | 201 ++++++++++++++++++++++++--------- 4 files changed, 273 insertions(+), 113 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 5adfdc143..ae471481d 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -10,7 +10,7 @@ import re import sys from enum import IntEnum from pathlib import Path -from typing import TYPE_CHECKING, Any, ContextManager, Iterator, cast +from typing import TYPE_CHECKING, Any, ContextManager, Iterator, Sequence, cast import numpy as np import torch @@ -25,15 +25,6 @@ import gguf from convert import HfVocab -# check for any of the given keys in the dictionary and return the value of the first key found -def get_key_opts(d, keys): - for k in keys: - if k in d: - return d[k] - print(f"Could not find any of {keys}") - sys.exit() - - ###### MODEL DEFINITIONS ###### class SentencePieceTokenTypes(IntEnum): @@ -58,6 +49,15 @@ class Model: self.hparams = Model.load_hparams(self.dir_model) self.model_arch = self._get_model_architecture() self.gguf_writer = gguf.GGUFWriter(fname_out, gguf.MODEL_ARCH_NAMES[self.model_arch], endianess=self.endianess, use_temp_file=False) + self.block_count = self.find_hparam(["n_layers", "num_hidden_layers", "n_layer"]) + + def find_hparam(self, keys: Sequence[str], optional: bool = False) -> Any: + key = next((k for k in keys if k in self.hparams), None) + if key is not None: + return self.hparams[key] + if optional: + return None + raise KeyError(f"could not find any of: {keys}") def set_vocab(self): self._set_vocab_gpt2() @@ -79,28 +79,33 @@ class Model: def set_gguf_parameters(self): self.gguf_writer.add_name(self.dir_model.name) - self.gguf_writer.add_block_count(self.hparams.get( - "n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer")), - )) - if (n_ctx := self.hparams.get("max_position_embeddings")) is not None: + self.gguf_writer.add_block_count(self.block_count) + + if (n_ctx := self.find_hparam(["max_position_embeddings", "n_ctx"], optional=True)) is not None: self.gguf_writer.add_context_length(n_ctx) - if (n_embd := self.hparams.get("hidden_size")) is not None: - self.gguf_writer.add_embedding_length(n_embd) - if (n_ff := self.hparams.get("intermediate_size")) is not None: + + n_embd = self.find_hparam(["hidden_size", "n_embd"]) + self.gguf_writer.add_embedding_length(n_embd) + + if (n_ff := self.find_hparam(["intermediate_size", "n_inner"], optional=True)) is not None: self.gguf_writer.add_feed_forward_length(n_ff) - if (n_head := self.hparams.get("num_attention_heads")) is not None: - self.gguf_writer.add_head_count(n_head) + + n_head = self.find_hparam(["num_attention_heads", "n_head"]) + self.gguf_writer.add_head_count(n_head) + if (n_head_kv := self.hparams.get("num_key_value_heads")) is not None: self.gguf_writer.add_head_count_kv(n_head_kv) - if (n_rms_eps := self.hparams.get("rms_norm_eps")) is not None: - self.gguf_writer.add_layer_norm_rms_eps(n_rms_eps) + if (f_rms_eps := self.hparams.get("rms_norm_eps")) is not None: + self.gguf_writer.add_layer_norm_rms_eps(f_rms_eps) + if (f_norm_eps := self.find_hparam(["layer_norm_eps", "layer_norm_epsilon"], optional=True)) is not None: + self.gguf_writer.add_layer_norm_eps(f_norm_eps) if (n_experts := self.hparams.get("num_local_experts")) is not None: self.gguf_writer.add_expert_count(n_experts) if (n_experts_used := self.hparams.get("num_experts_per_tok")) is not None: self.gguf_writer.add_expert_used_count(n_experts_used) - self.gguf_writer.add_parallel_residual(self.hparams.get("use_parallel_residual", True)) + self.gguf_writer.add_file_type(self.ftype) def write_tensors(self): block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) @@ -211,6 +216,8 @@ class Model: return MiniCPMModel if model_architecture == "BertModel": return BertModel + if model_architecture == "NomicBertModel": + return NomicBertModel return Model def _is_model_safetensors(self) -> bool: @@ -268,6 +275,8 @@ class Model: return gguf.MODEL_ARCH.MINICPM if arch == "BertModel": return gguf.MODEL_ARCH.BERT + if arch == "NomicBertModel": + return gguf.MODEL_ARCH.NOMIC_BERT raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1297,21 +1306,21 @@ class GPT2Model(Model): class Phi2Model(Model): def set_gguf_parameters(self): - block_count = get_key_opts(self.hparams, ["num_hidden_layers", "n_layer"]) + block_count = self.find_hparam(["num_hidden_layers", "n_layer"]) - rot_pct = get_key_opts(self.hparams, ["partial_rotary_factor"]) - n_embd = get_key_opts(self.hparams, ["hidden_size", "n_embd"]) - n_head = get_key_opts(self.hparams, ["num_attention_heads", "n_head"]) + rot_pct = self.find_hparam(["partial_rotary_factor"]) + n_embd = self.find_hparam(["hidden_size", "n_embd"]) + n_head = self.find_hparam(["num_attention_heads", "n_head"]) self.gguf_writer.add_name("Phi2") - self.gguf_writer.add_context_length(get_key_opts(self.hparams, ["n_positions", "max_position_embeddings"])) + self.gguf_writer.add_context_length(self.find_hparam(["n_positions", "max_position_embeddings"])) self.gguf_writer.add_embedding_length(n_embd) self.gguf_writer.add_feed_forward_length(4 * n_embd) self.gguf_writer.add_block_count(block_count) self.gguf_writer.add_head_count(n_head) self.gguf_writer.add_head_count_kv(n_head) - self.gguf_writer.add_layer_norm_eps(get_key_opts(self.hparams, ["layer_norm_epsilon", "layer_norm_eps"])) + self.gguf_writer.add_layer_norm_eps(self.find_hparam(["layer_norm_epsilon", "layer_norm_eps"])) self.gguf_writer.add_rope_dimension_count(int(rot_pct * n_embd) // n_head) self.gguf_writer.add_file_type(self.ftype) self.gguf_writer.add_add_bos_token(False) @@ -1636,20 +1645,12 @@ in chat mode so that the conversation can end normally.") class BertModel(Model): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.block_count = self.hparams["num_hidden_layers"] + self.vocab_size = None def set_gguf_parameters(self): - # TODO(cebtenzzre): merge with parent class - self.gguf_writer.add_name(self.dir_model.name) - self.gguf_writer.add_context_length(self.hparams["max_position_embeddings"]) - self.gguf_writer.add_embedding_length(self.hparams["hidden_size"]) - self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) - self.gguf_writer.add_block_count(self.block_count) - self.gguf_writer.add_head_count(self.hparams["num_attention_heads"]) - self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) + super().set_gguf_parameters() self.gguf_writer.add_causal_attention(False) self.gguf_writer.add_pooling_layer(True) - self.gguf_writer.add_file_type(self.ftype) def set_vocab(self): path = self.dir_model @@ -1659,6 +1660,7 @@ class BertModel(Model): vocab = HfVocab(path, added_tokens_path) tokens, scores, toktypes = zip(*vocab.all_tokens()) assert len(tokens) == vocab.vocab_size + self.vocab_size = vocab.vocab_size # we need this to validate the size of the token_type embeddings # though currently we are passing all zeros to the token_type embeddings @@ -1672,7 +1674,7 @@ class BertModel(Model): if tok.startswith(b"##"): return tok[2:] return b"\xe2\x96\x81" + tok - tokens = [phantom(t, y) for t, y in zip(tokens, toktypes)] + tokens = tuple(phantom(t, y) for t, y in zip(tokens, toktypes)) # set up bos and eos tokens (cls and sep) self.gguf_writer.add_bos_token_id(vocab.tokenizer.cls_token_id) @@ -1724,6 +1726,43 @@ class BertModel(Model): self.gguf_writer.add_tensor(new_name, data) +class NomicBertModel(BertModel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # the HF config claims n_ctx=8192, but it uses RoPE scaling + self.hparams["n_ctx"] = 2048 + + # SwigLU activation + assert self.hparams["activation_function"] == "swiglu" + # this doesn't do anything in the HF version + assert self.hparams["causal"] is False + # no bias tensors + assert self.hparams["qkv_proj_bias"] is False + assert self.hparams["mlp_fc1_bias"] is False + assert self.hparams["mlp_fc2_bias"] is False + # norm at end of layer + assert self.hparams["prenorm"] is False + # standard RoPE + assert self.hparams["rotary_emb_fraction"] == 1.0 + assert self.hparams["rotary_emb_interleaved"] is False + assert self.hparams["rotary_emb_scale_base"] is None + + def set_gguf_parameters(self): + super().set_gguf_parameters() + self.gguf_writer.add_rope_freq_base(self.hparams["rotary_emb_base"]) + + def get_tensors(self): + assert self.vocab_size is not None + for name, data in super().get_tensors(): + # Nomic Embed's token embeddings tensor is padded, but llama.cpp wants tensor sizes to match exactly. + if name == 'embeddings.word_embeddings.weight' and data.shape[1] != self.vocab_size: + rounded_vocab_size = (self.vocab_size + 63) // 64 * 64 + assert data.shape == (rounded_vocab_size, self.hparams["n_embd"]) + data = data[:self.vocab_size, :] + yield name, data + + ###### CONVERSION LOGIC ###### diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 644e1589c..5fba01714 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -87,27 +87,28 @@ class Keys: class MODEL_ARCH(IntEnum): - LLAMA = auto() - FALCON = auto() - BAICHUAN = auto() - GPT2 = auto() - GPTJ = auto() - GPTNEOX = auto() - MPT = auto() - STARCODER = auto() - PERSIMMON = auto() - REFACT = auto() - BERT = auto() - BLOOM = auto() - STABLELM = auto() - QWEN = auto() - QWEN2 = auto() - PHI2 = auto() - PLAMO = auto() - CODESHELL = auto() - ORION = auto() + LLAMA = auto() + FALCON = auto() + BAICHUAN = auto() + GPT2 = auto() + GPTJ = auto() + GPTNEOX = auto() + MPT = auto() + STARCODER = auto() + PERSIMMON = auto() + REFACT = auto() + BERT = auto() + NOMIC_BERT = auto() + BLOOM = auto() + STABLELM = auto() + QWEN = auto() + QWEN2 = auto() + PHI2 = auto() + PLAMO = auto() + CODESHELL = auto() + ORION = auto() INTERNLM2 = auto() - MINICPM = auto() + MINICPM = auto() class MODEL_TENSOR(IntEnum): @@ -153,6 +154,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.PERSIMMON: "persimmon", MODEL_ARCH.REFACT: "refact", MODEL_ARCH.BERT: "bert", + MODEL_ARCH.NOMIC_BERT: "nomic-bert", MODEL_ARCH.BLOOM: "bloom", MODEL_ARCH.STABLELM: "stablelm", MODEL_ARCH.QWEN: "qwen", @@ -282,6 +284,20 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_UP, MODEL_TENSOR.LAYER_OUT_NORM, ], + MODEL_ARCH.NOMIC_BERT: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.TOKEN_EMBD_NORM, + MODEL_TENSOR.TOKEN_TYPES, + MODEL_TENSOR.POS_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ATTN_OUT_NORM, + MODEL_TENSOR.ATTN_QKV, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.LAYER_OUT_NORM, + ], MODEL_ARCH.MPT: [ MODEL_TENSOR.TOKEN_EMBD, MODEL_TENSOR.OUTPUT_NORM, diff --git a/gguf-py/gguf/tensor_mapping.py b/gguf-py/gguf/tensor_mapping.py index c7ba1420e..861003776 100644 --- a/gguf-py/gguf/tensor_mapping.py +++ b/gguf-py/gguf/tensor_mapping.py @@ -15,7 +15,7 @@ class TensorNameMap: "word_embeddings", # bloom "model.embed_tokens", # llama-hf "tok_embeddings", # llama-pth - "embeddings.word_embeddings", # bert + "embeddings.word_embeddings", # bert nomic-bert "language_model.embedding.word_embeddings", # persimmon "wte", # gpt2 "transformer.embd.wte", # phi2 @@ -24,13 +24,14 @@ class TensorNameMap: # Token type embeddings MODEL_TENSOR.TOKEN_TYPES: ( - "embeddings.token_type_embeddings", # bert + "embeddings.token_type_embeddings", # bert nomic-bert ), # Normalization of token embeddings MODEL_TENSOR.TOKEN_EMBD_NORM: ( "word_embeddings_layernorm", # bloom "embeddings.LayerNorm", # bert + "emb_ln", # nomic-bert ), # Position embeddings @@ -103,6 +104,7 @@ class TensorNameMap: "model.layers.{bid}.self_attn.query_key_value", # persimmon "h.{bid}.attn.c_attn", # gpt2 "transformer.h.{bid}.mixer.Wqkv", # phi2 + "encoder.layers.{bid}.attn.Wqkv", # nomic-bert ), # Attention query @@ -152,11 +154,13 @@ class TensorNameMap: "transformer.h.{bid}.mixer.out_proj", # phi2 "model.layers.layers.{bid}.self_attn.o_proj", # plamo "model.layers.{bid}.attention.wo", # internlm2 + "encoder.layers.{bid}.attn.out_proj", # nomic-bert ), # Attention output norm MODEL_TENSOR.ATTN_OUT_NORM: ( "encoder.layer.{bid}.attention.output.LayerNorm", # bert + "encoder.layers.{bid}.norm1", # nomic-bert ), # Rotary embeddings @@ -205,6 +209,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.fc1", # phi2 "model.layers.layers.{bid}.mlp.up_proj", # plamo "model.layers.{bid}.feed_forward.w3", # internlm2 + "encoder.layers.{bid}.mlp.fc11", # nomic-bert ), MODEL_TENSOR.FFN_UP_EXP: ( @@ -224,6 +229,7 @@ class TensorNameMap: "transformer.h.{bid}.mlp.w2", # qwen "model.layers.layers.{bid}.mlp.gate_proj", # plamo "model.layers.{bid}.feed_forward.w1", # internlm2 + "encoder.layers.{bid}.mlp.fc12", # nomic-bert ), MODEL_TENSOR.FFN_GATE_EXP: ( @@ -249,6 +255,7 @@ class TensorNameMap: "model.layers.{bid}.mlp.fc2", # phi2 "model.layers.layers.{bid}.mlp.down_proj", # plamo "model.layers.{bid}.feed_forward.w2", # internlm2 + "encoder.layers.{bid}.mlp.fc2", # nomic-bert ), MODEL_TENSOR.FFN_DOWN_EXP: ( @@ -272,6 +279,7 @@ class TensorNameMap: MODEL_TENSOR.LAYER_OUT_NORM: ( "encoder.layer.{bid}.output.LayerNorm", # bert + "encoder.layers.{bid}.norm2", # nomic-bert ) } diff --git a/llama.cpp b/llama.cpp index 8ebbf7628..14e8821cd 100644 --- a/llama.cpp +++ b/llama.cpp @@ -197,6 +197,7 @@ enum llm_arch { LLM_ARCH_PERSIMMON, LLM_ARCH_REFACT, LLM_ARCH_BERT, + LLM_ARCH_NOMIC_BERT, LLM_ARCH_BLOOM, LLM_ARCH_STABLELM, LLM_ARCH_QWEN, @@ -211,27 +212,28 @@ enum llm_arch { }; static std::map LLM_ARCH_NAMES = { - { LLM_ARCH_LLAMA, "llama" }, - { LLM_ARCH_FALCON, "falcon" }, - { LLM_ARCH_GPT2, "gpt2" }, - { LLM_ARCH_GPTJ, "gptj" }, - { LLM_ARCH_GPTNEOX, "gptneox" }, - { LLM_ARCH_MPT, "mpt" }, - { LLM_ARCH_BAICHUAN, "baichuan" }, - { LLM_ARCH_STARCODER, "starcoder" }, - { LLM_ARCH_PERSIMMON, "persimmon" }, - { LLM_ARCH_REFACT, "refact" }, - { LLM_ARCH_BERT, "bert" }, - { LLM_ARCH_BLOOM, "bloom" }, - { LLM_ARCH_STABLELM, "stablelm" }, - { LLM_ARCH_QWEN, "qwen" }, - { LLM_ARCH_QWEN2, "qwen2" }, - { LLM_ARCH_PHI2, "phi2" }, - { LLM_ARCH_PLAMO, "plamo" }, - { LLM_ARCH_CODESHELL, "codeshell" }, - { LLM_ARCH_ORION, "orion" }, - { LLM_ARCH_INTERNLM2, "internlm2" }, - { LLM_ARCH_MINICPM, "minicpm" }, + { LLM_ARCH_LLAMA, "llama" }, + { LLM_ARCH_FALCON, "falcon" }, + { LLM_ARCH_GPT2, "gpt2" }, + { LLM_ARCH_GPTJ, "gptj" }, + { LLM_ARCH_GPTNEOX, "gptneox" }, + { LLM_ARCH_MPT, "mpt" }, + { LLM_ARCH_BAICHUAN, "baichuan" }, + { LLM_ARCH_STARCODER, "starcoder" }, + { LLM_ARCH_PERSIMMON, "persimmon" }, + { LLM_ARCH_REFACT, "refact" }, + { LLM_ARCH_BERT, "bert" }, + { LLM_ARCH_NOMIC_BERT, "nomic-bert" }, + { LLM_ARCH_BLOOM, "bloom" }, + { LLM_ARCH_STABLELM, "stablelm" }, + { LLM_ARCH_QWEN, "qwen" }, + { LLM_ARCH_QWEN2, "qwen2" }, + { LLM_ARCH_PHI2, "phi2" }, + { LLM_ARCH_PLAMO, "plamo" }, + { LLM_ARCH_CODESHELL, "codeshell" }, + { LLM_ARCH_ORION, "orion" }, + { LLM_ARCH_INTERNLM2, "internlm2" }, + { LLM_ARCH_MINICPM, "minicpm" }, }; enum llm_kv { @@ -375,6 +377,7 @@ enum llm_tensor { LLM_TENSOR_ATTN_OUT, LLM_TENSOR_ATTN_NORM, LLM_TENSOR_ATTN_NORM_2, + LLM_TENSOR_ATTN_OUT_NORM, LLM_TENSOR_ATTN_ROT_EMBD, LLM_TENSOR_FFN_GATE_INP, LLM_TENSOR_FFN_NORM, @@ -387,6 +390,7 @@ enum llm_tensor { LLM_TENSOR_FFN_UP_EXP, LLM_TENSOR_ATTN_Q_NORM, LLM_TENSOR_ATTN_K_NORM, + LLM_TENSOR_LAYER_OUT_NORM, }; static std::map> LLM_TENSOR_NAMES = { @@ -552,12 +556,27 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" }, { LLM_TENSOR_TOKEN_TYPES, "token_types" }, { LLM_TENSOR_POS_EMBD, "position_embd" }, - { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_output_norm" }, + { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" }, { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, - { LLM_TENSOR_FFN_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, + { + LLM_ARCH_NOMIC_BERT, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" }, + { LLM_TENSOR_TOKEN_TYPES, "token_types" }, + { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" }, + { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, }, @@ -1485,6 +1504,7 @@ enum e_model { MODEL_22M, MODEL_33M, MODEL_109M, + MODEL_137M, MODEL_335M, MODEL_0_5B, MODEL_1B, @@ -1620,6 +1640,8 @@ struct llama_layer { struct ggml_tensor * attn_q_norm_b; struct ggml_tensor * attn_k_norm; struct ggml_tensor * attn_k_norm_b; + struct ggml_tensor * attn_out_norm; + struct ggml_tensor * attn_out_norm_b; // attention struct ggml_tensor * wq; @@ -1638,6 +1660,8 @@ struct llama_layer { // normalization struct ggml_tensor * ffn_norm; struct ggml_tensor * ffn_norm_b; + struct ggml_tensor * layer_out_norm; + struct ggml_tensor * layer_out_norm_b; // ff struct ggml_tensor * ffn_gate; // w1 @@ -2855,6 +2879,11 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { static const char * llama_model_type_name(e_model type) { switch (type) { + case MODEL_22M: return "22M"; + case MODEL_33M: return "33M"; + case MODEL_109M: return "109M"; + case MODEL_137M: return "137M"; + case MODEL_0_5B: return "0.5B"; case MODEL_1B: return "1B"; case MODEL_2B: return "2B"; case MODEL_3B: return "3B"; @@ -3073,6 +3102,17 @@ static void llm_load_hparams( model.type = e_model::MODEL_335M; break; // bge-large } } break; + case LLM_ARCH_NOMIC_BERT: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); + ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); + ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); + ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); + + if (hparams.n_layer == 12 && hparams.n_embd == 768) { + model.type = e_model::MODEL_137M; + } + } break; case LLM_ARCH_BLOOM: { ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); @@ -3875,10 +3915,14 @@ static bool llm_load_tensors( } } break; case LLM_ARCH_BERT: + case LLM_ARCH_NOMIC_BERT: { - model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); - model.type_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_TYPES, "weight"), {n_embd, n_vocab_type}); - model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + model.type_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_TYPES, "weight"), {n_embd, n_vocab_type}); + if (model.arch == LLM_ARCH_BERT) { + model.pos_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_POS_EMBD, "weight"), {n_embd, hparams.n_ctx_train}); + } + model.tok_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "weight"), {n_embd}); model.tok_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD_NORM, "bias"), {n_embd}); @@ -3888,29 +3932,38 @@ static bool llm_load_tensors( auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); - layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}); + if (model.arch == LLM_ARCH_BERT) { + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); + layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); - layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); + layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); - layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd}); - layer.bq = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_Q, "bias", i), {n_embd}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); + layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + } else { + layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); + } - layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_gqa}); - layer.bk = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_K, "bias", i), {n_embd_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_gqa}); - layer.bv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_V, "bias", i), {n_embd_gqa}); + layer.attn_out_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT_NORM, "weight", i), {n_embd}); + layer.attn_out_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT_NORM, "bias", i), {n_embd}); - layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); - layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); - layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); - layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); + if (model.arch == LLM_ARCH_BERT) { + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}); - layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); - layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}); + } else { + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + } + + layer.layer_out_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_LAYER_OUT_NORM, "weight", i), {n_embd}); + layer.layer_out_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_LAYER_OUT_NORM, "bias", i), {n_embd}); } } break; case LLM_ARCH_BLOOM: @@ -5773,6 +5826,7 @@ struct llm_build_context { struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); const int64_t n_embd_head = hparams.n_embd_head_v; + const int64_t n_embd_gqa = hparams.n_embd_v_gqa(); GGML_ASSERT(n_embd_head == hparams.n_embd_head_k); struct ggml_tensor * cur; @@ -5789,7 +5843,9 @@ struct llm_build_context { // token types are hardcoded to zero ("Sentence A") struct ggml_tensor * type_row0 = ggml_view_1d(ctx0, model.type_embd, n_embd, 0); inpL = ggml_add(ctx0, inpL, type_row0); - inpL = ggml_add(ctx0, ggml_get_rows(ctx0, model.pos_embd, inp_pos), inpL); + if (model.arch == LLM_ARCH_BERT) { + inpL = ggml_add(ctx0, ggml_get_rows(ctx0, model.pos_embd, inp_pos), inpL); + } cb(inpL, "inp_embd", -1); // embed layer norm @@ -5805,7 +5861,7 @@ struct llm_build_context { struct ggml_tensor * cur = inpL; // self-attention - { + if (model.arch == LLM_ARCH_BERT) { struct ggml_tensor * Qcur = ggml_add(ctx0, ggml_mul_mat(ctx0, model.layers[il].wq, cur), model.layers[il].bq); cb(Qcur, "Qcur", il); @@ -5818,6 +5874,37 @@ struct llm_build_context { // seems like we just need to do this for Q? Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, model.layers[il].bo, + Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + cb(cur, "kqv_out", il); + } else { + // compute Q and K and RoPE them + cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); + cb(cur, "wqkv", il); + + struct ggml_tensor * Qcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd, n_tokens, cur->nb[1], 0*sizeof(float)*(n_embd))); + struct ggml_tensor * Kcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd))); + struct ggml_tensor * Vcur = ggml_cont(ctx0, ggml_view_2d(ctx0, cur, n_embd_gqa, n_tokens, cur->nb[1], 1*sizeof(float)*(n_embd + n_embd_gqa))); + + cb(Qcur, "Qcur", il); + cb(Kcur, "Kcur", il); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Qcur, "Qcur", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head, n_head_kv, n_tokens), inp_pos, + hparams.n_rot, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow + ); + cb(Kcur, "Kcur", il); + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); @@ -5828,25 +5915,34 @@ struct llm_build_context { cur = ggml_add(ctx0, cur, inpL); // attention layer norm - cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].attn_norm, model.layers[il].attn_norm_b, LLM_NORM, cb, il); + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].attn_out_norm, model.layers[il].attn_out_norm_b, LLM_NORM, cb, il); struct ggml_tensor * ffn_inp = cur; cb(ffn_inp, "ffn_inp", il); // feed-forward network - cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, model.layers[il].ffn_up_b, - NULL, NULL, - model.layers[il].ffn_down, model.layers[il].ffn_down_b, - NULL, - LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + if (model.arch == LLM_ARCH_BERT) { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, + NULL, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, + NULL, + LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); + } else { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_SILU, LLM_FFN_PAR, cb, il); + } cb(cur, "ffn_out", il); // attentions bypass the intermediate layer cur = ggml_add(ctx0, cur, ffn_inp); // output layer norm - cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].ffn_norm, model.layers[il].ffn_norm_b, LLM_NORM, cb, il); + cur = llm_build_norm(ctx0, cur, hparams, model.layers[il].layer_out_norm, model.layers[il].layer_out_norm_b, LLM_NORM, cb, il); // input for next layer inpL = cur; @@ -7289,6 +7385,7 @@ static struct ggml_cgraph * llama_build_graph( result = llm.build_refact(); } break; case LLM_ARCH_BERT: + case LLM_ARCH_NOMIC_BERT: { result = llm.build_bert(); } break; From 6c00a066928b0475b865a2e3e709e2166e02d548 Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Tue, 13 Feb 2024 18:56:38 +0100 Subject: [PATCH 745/859] gguf : add python reader example (#5216) * Update CMakeLists.txt * Create reader.py * Update reader.py * Update reader.py another whitespace :| * Update reader.py * lintlintlint --- examples/CMakeLists.txt | 1 + gguf-py/examples/reader.py | 45 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) create mode 100644 gguf-py/examples/reader.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 68ad89964..653abc73a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -38,6 +38,7 @@ else() add_subdirectory(speculative) add_subdirectory(lookahead) add_subdirectory(lookup) + add_subdirectory(gguf) add_subdirectory(train-text-from-scratch) add_subdirectory(imatrix) if (LLAMA_BUILD_SERVER) diff --git a/gguf-py/examples/reader.py b/gguf-py/examples/reader.py new file mode 100644 index 000000000..62e0769da --- /dev/null +++ b/gguf-py/examples/reader.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 +import sys +from pathlib import Path +from gguf.gguf_reader import GGUFReader + + +sys.path.insert(0, str(Path(__file__).parent.parent)) + + +def read_gguf_file(gguf_file_path): + """ + Reads and prints key-value pairs and tensor information from a GGUF file in an improved format. + + Parameters: + - gguf_file_path: Path to the GGUF file. + """ + + reader = GGUFReader(gguf_file_path) + + # List all key-value pairs in a columnized format + print("Key-Value Pairs:") + max_key_length = max(len(key) for key in reader.fields.keys()) + for key, field in reader.fields.items(): + value = field.parts[field.data[0]] + print(f"{key:{max_key_length}} : {value}") + print("----") + + # List all tensors + print("Tensors:") + tensor_info_format = "{:<30} | Shape: {:<15} | Size: {:<12} | Quantization: {}" + print(tensor_info_format.format("Tensor Name", "Shape", "Size", "Quantization")) + print("-" * 80) + for tensor in reader.tensors: + shape_str = "x".join(map(str, tensor.shape)) + size_str = str(tensor.n_elements) + quantization_str = tensor.tensor_type.name + print(tensor_info_format.format(tensor.name, shape_str, size_str, quantization_str)) + + +if __name__ == '__main__': + if len(sys.argv) < 2: + print("Usage: reader.py ") + sys.exit(1) + gguf_file_path = sys.argv[1] + read_gguf_file(gguf_file_path) From f5ca054855dea83f424003162f26de376e5643f6 Mon Sep 17 00:00:00 2001 From: AT Date: Tue, 13 Feb 2024 15:44:25 -0600 Subject: [PATCH 746/859] Early return for zero size calls to get_tensor. (#5482) * Early return for zero size calls to get_tensor. Signed-off-by: Adam Treat * Update ggml-kompute.cpp Co-authored-by: Georgi Gerganov * Update ggml-kompute.cpp Co-authored-by: Georgi Gerganov * Add an early return to the get/set tensor when the size is null. Signed-off-by: Adam Treat * Early return after the assertions. Signed-off-by: Adam Treat * Since we do the early return in the generic backend now no reason to do so here as well. Signed-off-by: Adam Treat --------- Signed-off-by: Adam Treat Co-authored-by: Georgi Gerganov --- ggml-backend.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ggml-backend.c b/ggml-backend.c index 9ee81b766..87eea8440 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -219,6 +219,10 @@ GGML_CALL void ggml_backend_tensor_set(struct ggml_tensor * tensor, const void * GGML_ASSERT(buf != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor write out of bounds"); + if (!size) { + return; + } + tensor->buffer->iface.set_tensor(buf, tensor, data, offset, size); } @@ -229,6 +233,10 @@ GGML_CALL void ggml_backend_tensor_get(const struct ggml_tensor * tensor, void * GGML_ASSERT(tensor->buffer != NULL && "tensor buffer not set"); GGML_ASSERT(offset + size <= ggml_nbytes(tensor) && "tensor read out of bounds"); + if (!size) { + return; + } + tensor->buffer->iface.get_tensor(buf, tensor, data, offset, size); } From aa2341298924ac89778252015efcb792f2df1e20 Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Wed, 14 Feb 2024 08:38:35 +0100 Subject: [PATCH 747/859] llava : support v1.6 (#5267) * Create llava-survery-v2.py * Update convert-image-encoder-to-gguf.py * Update convert-image-encoder-to-gguf.py * Rename llava-survery-v2.py to llava-surgery-v2.py * Update convert-image-encoder-to-gguf.py will now search for projector * Update convert-image-encoder-to-gguf.py whoops * Update llava-surgery-v2.py * Clip: Bugfix for normalization (it did not loat the 3 std and mean values) Clip: bicubic resize function Clip: added save-to-bmp/pil for debugging and conversion from/to 32/8 images Clip: added normalization with FP16 precision simulation (image tensors match HF implementation, can be switched off, only used for llava-1.6) Clip: added newline tensor, mergetype kv, image-grid kv, new resize-pad function with resolution from gridpoints Clip: clip_image_preprocess now returns a float * vector instead of float, this way llava 1.5 and 1.6 is supported llava: added ggml cpu graph for embedding patching, added spatial_unpad preliminary support, added a lot of comments that need to be cleaned when all is final convert-image-encoder: fixed image-grid flattening * whitespace corrections * ws * Tensors are now properly permuted. Before the embeddings were inserted 1:1, now they are split into the 24x24 patches as in reference. * ws * added verbose_prompt support into cli added stopwords for llava-1.6 into cli * moved llava functions to llava.cpp, made clip.h C compatible API, replaced vector style functions with pointers, added a debug define to remove functions from compilation while not needed * ws * convert : skip unknown tensors (need for LLaVA) * llava : update readme * llava : fix compile warnings * llava : style * convert : add --skip-unknown CLI arg * server : remove clip structs * bugfix for non llava-1.6 It should now work with llava-1.5 as well * clip : minor code rearrange * llava : update readme a bit --------- Co-authored-by: John Co-authored-by: Georgi Gerganov --- convert.py | 37 +- examples/llava/README.md | 12 +- examples/llava/clip.cpp | 766 +++++++++++++++--- examples/llava/clip.h | 47 +- .../llava/convert-image-encoder-to-gguf.py | 66 +- examples/llava/llava-cli.cpp | 26 +- examples/llava/llava-surgery-v2.py | 167 ++++ examples/llava/llava.cpp | 296 ++++++- examples/llava/llava.h | 2 - examples/server/server.cpp | 15 +- 10 files changed, 1229 insertions(+), 205 deletions(-) create mode 100644 examples/llava/llava-surgery-v2.py diff --git a/convert.py b/convert.py index 323e8058d..63a0a5d78 100755 --- a/convert.py +++ b/convert.py @@ -1173,7 +1173,7 @@ def convert_to_output_type(model: LazyModel, output_type: GGMLFileType) -> LazyM for (name, tensor) in model.items()} -def convert_model_names(model: LazyModel, params: Params) -> LazyModel: +def convert_model_names(model: LazyModel, params: Params, skip_unknown: bool) -> LazyModel: tmap = gguf.TensorNameMap(ARCH, params.n_layer) should_skip: set[gguf.MODEL_TENSOR] = set(gguf.MODEL_TENSOR_SKIP.get(ARCH, [])) @@ -1199,7 +1199,11 @@ def convert_model_names(model: LazyModel, params: Params) -> LazyModel: for name, lazy_tensor in model.items(): tensor_type, name_new = tmap.get_type_and_name(name, try_suffixes = (".weight", ".bias")) or (None, None) if name_new is None: - raise Exception(f"Unexpected tensor name: {name}") + if skip_unknown: + print(f"Unexpected tensor name: {name} - skipping") + continue + else: + raise Exception(f"Unexpected tensor name: {name}. Use --skip-unknown to ignore it (e.g. LLaVA)") if tensor_type in should_skip: print(f"skipping tensor {name_new}") @@ -1377,19 +1381,20 @@ def main(args_in: list[str] | None = None) -> None: output_choices.append("q8_0") vocab_types = ["spm", "bpe", "hfft"] parser = argparse.ArgumentParser(description="Convert a LLaMa model to a GGML compatible file") - parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) - parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") - parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") - parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") - parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") - parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") - parser.add_argument("--vocab-type", choices=vocab_types, help="The vocabulary format used to define the tokenizer model (default: spm)", default="spm") - parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") - parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") - parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") - parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default=DEFAULT_CONCURRENCY) - parser.add_argument("--big-endian", action="store_true", help="model is executed on big endian machine") - parser.add_argument("--pad-vocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") + parser.add_argument("--awq-path", type=Path, help="Path to scale awq cache file", default=None) + parser.add_argument("--dump", action="store_true", help="don't convert, just show what's in the model") + parser.add_argument("--dump-single", action="store_true", help="don't convert, just show what's in a single model file") + parser.add_argument("--vocab-only", action="store_true", help="extract only the vocab") + parser.add_argument("--outtype", choices=output_choices, help="output format - note: q8_0 may be very slow (default: f16 or f32 based on input)") + parser.add_argument("--vocab-dir", type=Path, help="directory containing tokenizer.model, if separate from model file") + parser.add_argument("--vocab-type", choices=vocab_types, help="The vocabulary format used to define the tokenizer model (default: spm)", default="spm") + parser.add_argument("--outfile", type=Path, help="path to write to; default: based on input") + parser.add_argument("model", type=Path, help="directory containing model file, or model file itself (*.pth, *.pt, *.bin)") + parser.add_argument("--ctx", type=int, help="model training context (default: based on input)") + parser.add_argument("--concurrency", type=int, help=f"concurrency used for conversion (default: {DEFAULT_CONCURRENCY})", default=DEFAULT_CONCURRENCY) + parser.add_argument("--big-endian", action="store_true", help="model is executed on big endian machine") + parser.add_argument("--pad-vocab", action="store_true", help="add pad tokens when model vocab expects more than tokenizer metadata provides") + parser.add_argument("--skip-unknown", action="store_true", help="skip unknown tensor names instead of failing") args = parser.parse_args(args_in) if args.awq_path: @@ -1461,7 +1466,7 @@ def main(args_in: list[str] | None = None) -> None: print(f"Special vocab info: {special_vocab}") model = model_plus.model - model = convert_model_names(model, params) + model = convert_model_names(model, params, args.skip_unknown) ftype = pick_output_type(model, args.outtype) model = convert_to_output_type(model, ftype) outfile = args.outfile or default_outfile(model_plus.paths, ftype) diff --git a/examples/llava/README.md b/examples/llava/README.md index 19f1a50a2..e2ef0eff1 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -19,9 +19,9 @@ After building, run: `./llava-cli` to see the usage. For example: **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. -## Model conversion +## LLaVA 1.5 -- Clone `llava-v15-7b` and `clip-vit-large-patch14-336` locally: +- Clone a LLaVA and a CLIP model ([available options](https://github.com/haotian-liu/LLaVA/blob/main/docs/MODEL_ZOO.md)). For example: ```sh git clone https://huggingface.co/liuhaotian/llava-v1.5-7b @@ -55,8 +55,14 @@ python ./convert.py ../llava-v1.5-7b Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. +## LLaVA 1.6 + +- Use `llava-surgery-v2.py` + +- TODO: add detailed instructions + ## TODO -- [ ] Support non-CPU backend for the image encoding part. +- [x] Support non-CPU backend for the image encoding part. - [ ] Support different sampling methods. - [ ] Support more model variants. diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index ccd0d85ad..9c5091e61 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1,7 +1,7 @@ // NOTE: This is modified from clip.cpp only for LLaVA, // so there might be still unnecessary artifacts hanging around // I'll gradually clean and extend it - +// Note: Even when using identical normalized image inputs (see normalize_image_u8_to_f32()) we have a significant difference in resulting embeddings compared to pytorch #include "clip.h" #include "ggml.h" #include "ggml-alloc.h" @@ -30,6 +30,26 @@ #include #include #include +#include + +//#define CLIP_DEBUG_FUNCTIONS + +// RGB uint8 image +struct clip_image_u8 { + int nx; + int ny; + + std::vector buf; +}; + +// RGB float32 image (NHWC) +// Memory layout: RGBRGBRGB... +struct clip_image_f32 { + int nx; + int ny; + + std::vector buf; +}; static std::string format(const char * fmt, ...) { va_list ap; @@ -50,50 +70,56 @@ static std::string format(const char * fmt, ...) { // key constants // -#define KEY_FTYPE "general.file_type" -#define KEY_NAME "general.name" -#define KEY_DESCRIPTION "general.description" -#define KEY_HAS_TEXT_ENC "clip.has_text_encoder" -#define KEY_HAS_VIS_ENC "clip.has_vision_encoder" +#define KEY_FTYPE "general.file_type" +#define KEY_NAME "general.name" +#define KEY_DESCRIPTION "general.description" +#define KEY_HAS_TEXT_ENC "clip.has_text_encoder" +#define KEY_HAS_VIS_ENC "clip.has_vision_encoder" #define KEY_HAS_LLAVA_PROJ "clip.has_llava_projector" -#define KEY_USE_GELU "clip.use_gelu" -#define KEY_N_EMBD "clip.%s.embedding_length" -#define KEY_N_FF "clip.%s.feed_forward_length" -#define KEY_N_BLOCK "clip.%s.block_count" -#define KEY_N_HEAD "clip.%s.attention.head_count" +#define KEY_USE_GELU "clip.use_gelu" +#define KEY_N_EMBD "clip.%s.embedding_length" +#define KEY_N_FF "clip.%s.feed_forward_length" +#define KEY_N_BLOCK "clip.%s.block_count" +#define KEY_N_HEAD "clip.%s.attention.head_count" #define KEY_LAYER_NORM_EPS "clip.%s.attention.layer_norm_epsilon" -#define KEY_PROJ_DIM "clip.%s.projection_dim" -#define KEY_TOKENS "tokenizer.ggml.tokens" -#define KEY_N_POSITIONS "clip.text.context_length" -#define KEY_IMAGE_SIZE "clip.vision.image_size" -#define KEY_PATCH_SIZE "clip.vision.patch_size" -#define KEY_IMAGE_MEAN "clip.vision.image_mean" -#define KEY_IMAGE_STD "clip.vision.image_std" -#define KEY_PROJ_TYPE "clip.projector_type" +#define KEY_PROJ_DIM "clip.%s.projection_dim" +#define KEY_TOKENS "tokenizer.ggml.tokens" +#define KEY_N_POSITIONS "clip.text.context_length" +#define KEY_IMAGE_SIZE "clip.vision.image_size" +#define KEY_PATCH_SIZE "clip.vision.patch_size" +#define KEY_IMAGE_MEAN "clip.vision.image_mean" +#define KEY_IMAGE_STD "clip.vision.image_std" +#define KEY_PROJ_TYPE "clip.projector_type" + +#define KEY_MM_PATCH_MERGE_TYPE "clip.vision.mm_patch_merge_type" +#define KEY_IMAGE_GRID_PINPOINTS "clip.vision.image_grid_pinpoints" +#define KEY_IMAGE_CROP_RESOLUTION "clip.vision.image_crop_resolution" + // // tensor name constants // -#define TN_TOKEN_EMBD "%s.token_embd.weight" -#define TN_POS_EMBD "%s.position_embd.weight" -#define TN_CLASS_EMBD "v.class_embd" -#define TN_PATCH_EMBD "v.patch_embd.weight" -#define TN_ATTN_K "%s.blk.%d.attn_k.%s" -#define TN_ATTN_Q "%s.blk.%d.attn_q.%s" -#define TN_ATTN_V "%s.blk.%d.attn_v.%s" -#define TN_ATTN_OUTPUT "%s.blk.%d.attn_out.%s" -#define TN_FFN_DOWN "%s.blk.%d.ffn_down.%s" -#define TN_FFN_UP "%s.blk.%d.ffn_up.%s" -#define TN_LN_1 "%s.blk.%d.ln1.%s" -#define TN_LN_2 "%s.blk.%d.ln2.%s" -#define TN_LN_PRE "%s.pre_ln.%s" -#define TN_LN_POST "%s.post_ln.%s" -#define TN_TEXT_PROJ "text_projection.weight" -#define TN_VIS_PROJ "visual_projection.weight" -#define TN_LLAVA_PROJ "mm.%d.%s" -#define TN_MVLM_PROJ_MLP "mm.model.mlp.%d.%s" +#define TN_TOKEN_EMBD "%s.token_embd.weight" +#define TN_POS_EMBD "%s.position_embd.weight" +#define TN_CLASS_EMBD "v.class_embd" +#define TN_PATCH_EMBD "v.patch_embd.weight" +#define TN_ATTN_K "%s.blk.%d.attn_k.%s" +#define TN_ATTN_Q "%s.blk.%d.attn_q.%s" +#define TN_ATTN_V "%s.blk.%d.attn_v.%s" +#define TN_ATTN_OUTPUT "%s.blk.%d.attn_out.%s" +#define TN_FFN_DOWN "%s.blk.%d.ffn_down.%s" +#define TN_FFN_UP "%s.blk.%d.ffn_up.%s" +#define TN_LN_1 "%s.blk.%d.ln1.%s" +#define TN_LN_2 "%s.blk.%d.ln2.%s" +#define TN_LN_PRE "%s.pre_ln.%s" +#define TN_LN_POST "%s.post_ln.%s" +#define TN_TEXT_PROJ "text_projection.weight" +#define TN_VIS_PROJ "visual_projection.weight" +#define TN_LLAVA_PROJ "mm.%d.%s" +#define TN_MVLM_PROJ_MLP "mm.model.mlp.%d.%s" #define TN_MVLM_PROJ_BLOCK "mm.model.mb_block.%d.block.%d.%s" +#define TN_IMAGE_NEWLINE "model.image_newline" enum projector_type { @@ -104,8 +130,8 @@ enum projector_type { }; static std::map PROJECTOR_TYPE_NAMES = { - { PROJECTOR_TYPE_MLP, "mlp" }, - { PROJECTOR_TYPE_LDP, "ldp" }, + { PROJECTOR_TYPE_MLP, "mlp" }, + { PROJECTOR_TYPE_LDP, "ldp" }, }; @@ -165,7 +191,6 @@ static std::string gguf_data_to_str(enum gguf_type type, const void * data, int } } - static void replace_all(std::string & s, const std::string & search, const std::string & replace) { std::string result; for (size_t pos = 0; ; pos += search.length()) { @@ -217,7 +242,7 @@ static std::string gguf_kv_to_str(const struct gguf_context * ctx_gguf, int i) { } } -static void print_tensor_info(const ggml_tensor* tensor, const char* prefix = "") { +static void print_tensor_info(const ggml_tensor * tensor, const char * prefix = "") { size_t tensor_size = ggml_nbytes(tensor); printf("%s: n_dims = %d, name = %s, tensor_size=%zu, shape:[%" PRId64 ", %" PRId64 ", %" PRId64 ", %" PRId64 "], type = %s\n", prefix, ggml_n_dims(tensor), tensor->name, tensor_size, @@ -233,31 +258,136 @@ static projector_type clip_projector_type_from_string(const std::string & name) return PROJECTOR_TYPE_UNKNOWN; } -// -// image data -// +#ifdef CLIP_DEBUG_FUNCTIONS +static void clip_image_write_image_to_ppm(const clip_image_u8& img, const std::string& filename) { + std::ofstream file(filename, std::ios::binary); + if (!file.is_open()) { + std::cerr << "Failed to open file for writing: " << filename << std::endl; + return; + } -// RGB uint8 image -struct clip_image_u8 { - int nx; - int ny; + // PPM header: P6 format, width, height, and max color value + file << "P6\n" << img.nx << " " << img.ny << "\n255\n"; - std::vector buf; -}; + // Write pixel data + for (size_t i = 0; i < img.buf.size(); i += 3) { + // PPM expects binary data in RGB format, which matches our image buffer + file.write(reinterpret_cast(&img.buf[i]), 3); + } -// RGB float32 image (NHWC) -// Memory layout: RGBRGBRGB... -struct clip_image_f32 { - int nx; - int ny; + file.close(); +} + +static void clip_image_save_to_bmp(const clip_image_u8& img, const std::string& filename) { + std::ofstream file(filename, std::ios::binary); + if (!file.is_open()) { + std::cerr << "Failed to open file for writing: " << filename << std::endl; + return; + } + + int fileSize = 54 + 3 * img.nx * img.ny; // File header + info header + pixel data + int bytesPerPixel = 3; + int widthInBytes = img.nx * bytesPerPixel; + int paddingAmount = (4 - (widthInBytes % 4)) % 4; + int stride = widthInBytes + paddingAmount; + + // Bitmap file header + unsigned char fileHeader[14] = { + 'B','M', // Signature + 0,0,0,0, // Image file size in bytes + 0,0,0,0, // Reserved + 54,0,0,0 // Start of pixel array + }; + + // Total file size + fileSize = 54 + (stride * img.ny); + fileHeader[2] = (unsigned char)(fileSize); + fileHeader[3] = (unsigned char)(fileSize >> 8); + fileHeader[4] = (unsigned char)(fileSize >> 16); + fileHeader[5] = (unsigned char)(fileSize >> 24); + + // Bitmap information header (BITMAPINFOHEADER) + unsigned char infoHeader[40] = { + 40,0,0,0, // Size of this header (40 bytes) + 0,0,0,0, // Image width + 0,0,0,0, // Image height + 1,0, // Number of color planes + 24,0, // Bits per pixel + 0,0,0,0, // No compression + 0,0,0,0, // Image size (can be 0 for no compression) + 0,0,0,0, // X pixels per meter (not specified) + 0,0,0,0, // Y pixels per meter (not specified) + 0,0,0,0, // Total colors (color table not used) + 0,0,0,0 // Important colors (all are important) + }; + + // Width and height in the information header + infoHeader[4] = (unsigned char)(img.nx); + infoHeader[5] = (unsigned char)(img.nx >> 8); + infoHeader[6] = (unsigned char)(img.nx >> 16); + infoHeader[7] = (unsigned char)(img.nx >> 24); + infoHeader[8] = (unsigned char)(img.ny); + infoHeader[9] = (unsigned char)(img.ny >> 8); + infoHeader[10] = (unsigned char)(img.ny >> 16); + infoHeader[11] = (unsigned char)(img.ny >> 24); + + // Write file headers + file.write(reinterpret_cast(fileHeader), sizeof(fileHeader)); + file.write(reinterpret_cast(infoHeader), sizeof(infoHeader)); + + // Pixel data + std::vector padding(3, 0); // Max padding size to be added to each row + for (int y = img.ny - 1; y >= 0; --y) { // BMP files are stored bottom-to-top + for (int x = 0; x < img.nx; ++x) { + // Each pixel + size_t pixelIndex = (y * img.nx + x) * 3; + unsigned char pixel[3] = { + img.buf[pixelIndex + 2], // BMP stores pixels in BGR format + img.buf[pixelIndex + 1], + img.buf[pixelIndex] + }; + file.write(reinterpret_cast(pixel), 3); + } + // Write padding for the row + file.write(reinterpret_cast(padding.data()), paddingAmount); + } + + file.close(); +} + +// debug function to convert f32 to u8 +static void clip_image_convert_f32_to_u8(const clip_image_f32& src, clip_image_u8& dst) { + dst.nx = src.nx; + dst.ny = src.ny; + dst.buf.resize(3 * src.nx * src.ny); + for (size_t i = 0; i < src.buf.size(); ++i) { + dst.buf[i] = static_cast(std::min(std::max(int(src.buf[i] * 255.0f), 0), 255)); + } +} +#endif - std::vector buf; -}; // // clip layers // +struct clip_hparams { + int32_t image_size; + int32_t patch_size; + int32_t hidden_size; + int32_t n_intermediate; + int32_t projection_dim; + int32_t n_head; + int32_t n_layer; + + float eps; + + char mm_patch_merge_type[32] = "flat"; // spatial_unpad or flat (default) + + int32_t image_grid_pinpoints[32]; + int32_t image_crop_resolution; +}; + struct clip_layer { // attention struct ggml_tensor * k_w; @@ -287,7 +417,7 @@ struct clip_layer { }; struct clip_vision_model { - struct clip_vision_hparams hparams; + struct clip_hparams hparams; // embeddings struct ggml_tensor * class_embedding; @@ -310,6 +440,8 @@ struct clip_vision_model { struct ggml_tensor * mm_2_w = NULL; struct ggml_tensor * mm_2_b = NULL; + struct ggml_tensor * image_newline = NULL; + // Yi type models with mlp+normalization projection struct ggml_tensor * mm_1_w = NULL; // Yi type models have 0, 1, 3, 4 struct ggml_tensor * mm_1_b = NULL; @@ -364,9 +496,10 @@ struct clip_ctx { std::vector buf_compute_meta; // memory buffers to evaluate the model - ggml_backend_buffer_t params_buffer = NULL; + ggml_backend_buffer_t params_buffer = NULL; ggml_backend_buffer_t compute_buffer = NULL; - ggml_backend_t backend = NULL; + + ggml_backend_t backend = NULL; ggml_gallocr_t compute_alloc = NULL; }; @@ -379,18 +512,19 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 const auto & model = ctx->vision_model; const auto & hparams = model.hparams; - const int image_size = hparams.image_size; - const int patch_size = hparams.patch_size; - const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); - const int num_positions = num_patches + 1; - const int hidden_size = hparams.hidden_size; - const int n_head = hparams.n_head; - const int d_head = hidden_size / n_head; - const int n_layer = hparams.n_layer; - //const int n_intermediate = hparams.n_intermediate; - //const int projection_dim = hparams.projection_dim; - const float eps = hparams.eps; - int batch_size = imgs->size; + const int image_size = hparams.image_size; + const int patch_size = hparams.patch_size; + const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); + const int num_patches_per_side = image_size / patch_size; GGML_UNUSED(num_patches_per_side); + const int num_positions = num_patches + 1; + const int hidden_size = hparams.hidden_size; + const int n_head = hparams.n_head; + const int d_head = hidden_size / n_head; + const int n_layer = hparams.n_layer; + const float eps = hparams.eps; + + const int batch_size = imgs->size; + if (ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); } @@ -540,7 +674,6 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 embeddings = ggml_add(ctx0, embeddings, model.mm_0_b); embeddings = ggml_gelu(ctx0, embeddings); - embeddings = ggml_mul_mat(ctx0, model.mm_2_w, embeddings); embeddings = ggml_add(ctx0, embeddings, model.mm_2_b); @@ -791,10 +924,10 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { if (idx != -1) { const std::string proj_type = gguf_get_val_str(ctx, idx); new_clip->proj_type = clip_projector_type_from_string(proj_type); - } - else { + } else { new_clip->proj_type = PROJECTOR_TYPE_MLP; } + if (new_clip->proj_type == PROJECTOR_TYPE_MLP) { if (gguf_find_tensor(ctx, format(TN_LLAVA_PROJ, 3, "weight").c_str()) != -1) { new_clip->proj_type = PROJECTOR_TYPE_MLP_NORM; @@ -920,11 +1053,41 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { hparams.projection_dim = get_u32(ctx, format(KEY_PROJ_DIM, "vision")); hparams.eps = get_f32(ctx, format(KEY_LAYER_NORM_EPS, "vision")); + try { + int idx = get_key_idx(ctx, KEY_IMAGE_GRID_PINPOINTS); + int n = gguf_get_arr_n(ctx, idx); + const int32_t * pinpoints = (const int32_t *)gguf_get_arr_data(ctx, idx); + for (int i = 0; i < 32 && i < n && pinpoints[i] != 0; ++i) { + hparams.image_grid_pinpoints[i] = pinpoints[i]; + } + if (n < 32) + hparams.image_grid_pinpoints[n] = 0; + } catch (std::runtime_error & e) { + hparams.image_grid_pinpoints[0]=0; + } + + try { + int idx = get_key_idx(ctx, KEY_MM_PATCH_MERGE_TYPE); + strcpy(hparams.mm_patch_merge_type, gguf_get_val_str(ctx, idx)); + } catch (std::runtime_error & e) { + strcpy(hparams.mm_patch_merge_type, "flat"); + } + + try { + hparams.image_crop_resolution = get_u32(ctx, KEY_IMAGE_CROP_RESOLUTION); // llava-1.6 + } catch(const std::exception& e) { + hparams.image_crop_resolution = hparams.image_size; + } + int idx_mean = get_key_idx(ctx, KEY_IMAGE_MEAN); int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); + + const float * mean_data = (const float *)gguf_get_arr_data(ctx, idx_mean); + const float * std_data = (const float *)gguf_get_arr_data(ctx, idx_std); + for (int i = 0; i < 3; ++i) { - new_clip->image_mean[i] = *((const float *)gguf_get_arr_data(ctx, idx_mean)); - new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); + new_clip->image_mean[i] = mean_data[i]; + new_clip->image_std[i] = std_data[i]; } if (verbosity >= 2) { @@ -936,13 +1099,27 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("v_projection_dim %d\n", hparams.projection_dim); printf("v_n_head %d\n", hparams.n_head); printf("v_n_layer %d\n", hparams.n_layer); + printf("v_eps %f\n", hparams.eps); + printf("v_image_mean %f %f %f\n", new_clip->image_mean[0], new_clip->image_mean[1], new_clip->image_mean[2]); + printf("v_image_std %f %f %f\n", new_clip->image_std[0], new_clip->image_std[1], new_clip->image_std[2]); + printf("v_image_grid_pinpoints: "); + for (int i = 0; i < 32 & hparams.image_grid_pinpoints[i]!=0; ++i) { + printf("%d ", hparams.image_grid_pinpoints[i]); + } + printf("\n"); + printf("v_mm_patch_merge_type: %s\n", hparams.mm_patch_merge_type); + } - vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); - vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); - vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); - vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); - vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + try { + vision_model.patch_embeddings = get_tensor(new_clip->ctx_data, TN_PATCH_EMBD); + vision_model.class_embedding = get_tensor(new_clip->ctx_data, TN_CLASS_EMBD); + vision_model.position_embeddings = get_tensor(new_clip->ctx_data, format(TN_POS_EMBD, "v")); + vision_model.pre_ln_w = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "weight")); + vision_model.pre_ln_b = get_tensor(new_clip->ctx_data, format(TN_LN_PRE, "v", "bias")); + } catch(const std::exception& e) { + fprintf(stderr, "%s: failed to load vision model tensors\n", __func__); + } // LLaVA projection if (new_clip->proj_type == PROJECTOR_TYPE_MLP || new_clip->proj_type == PROJECTOR_TYPE_MLP_NORM) { @@ -968,40 +1145,43 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { vision_model.mm_4_w = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 4, "weight")); vision_model.mm_4_b = get_tensor(new_clip->ctx_data, format(TN_LLAVA_PROJ, 4, "bias")); } catch (std::runtime_error & e) { } - } - else if (new_clip->proj_type == PROJECTOR_TYPE_LDP) { + try { + vision_model.image_newline = get_tensor(new_clip->ctx_data, TN_IMAGE_NEWLINE); + // fprintf(stderr, "%s: image_newline tensor (llava-1.6) found\n", __func__); + } catch (std::runtime_error & e) { } + } else if (new_clip->proj_type == PROJECTOR_TYPE_LDP) { // MobileVLM projection - vision_model.mm_model_mlp_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "weight")); - vision_model.mm_model_mlp_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "bias")); - vision_model.mm_model_mlp_3_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "weight")); - vision_model.mm_model_mlp_3_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "bias")); - vision_model.mm_model_block_1_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "0.weight")); - vision_model.mm_model_block_1_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.weight")); - vision_model.mm_model_block_1_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.bias")); + vision_model.mm_model_mlp_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "weight")); + vision_model.mm_model_mlp_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 1, "bias")); + vision_model.mm_model_mlp_3_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "weight")); + vision_model.mm_model_mlp_3_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_MLP, 3, "bias")); + vision_model.mm_model_block_1_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "0.weight")); + vision_model.mm_model_block_1_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.weight")); + vision_model.mm_model_block_1_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 0, "1.bias")); vision_model.mm_model_block_1_block_1_fc1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc1.weight")); vision_model.mm_model_block_1_block_1_fc1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc1.bias")); vision_model.mm_model_block_1_block_1_fc2_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc2.weight")); vision_model.mm_model_block_1_block_1_fc2_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 1, "fc2.bias")); - vision_model.mm_model_block_1_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "0.weight")); - vision_model.mm_model_block_1_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.weight")); - vision_model.mm_model_block_1_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.bias")); - vision_model.mm_model_block_2_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "0.weight")); - vision_model.mm_model_block_2_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.weight")); - vision_model.mm_model_block_2_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.bias")); + vision_model.mm_model_block_1_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "0.weight")); + vision_model.mm_model_block_1_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.weight")); + vision_model.mm_model_block_1_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 1, 2, "1.bias")); + vision_model.mm_model_block_2_block_0_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "0.weight")); + vision_model.mm_model_block_2_block_0_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.weight")); + vision_model.mm_model_block_2_block_0_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 0, "1.bias")); vision_model.mm_model_block_2_block_1_fc1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc1.weight")); vision_model.mm_model_block_2_block_1_fc1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc1.bias")); vision_model.mm_model_block_2_block_1_fc2_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc2.weight")); vision_model.mm_model_block_2_block_1_fc2_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 1, "fc2.bias")); - vision_model.mm_model_block_2_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "0.weight")); - vision_model.mm_model_block_2_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.weight")); - vision_model.mm_model_block_2_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.bias")); - } - else { + vision_model.mm_model_block_2_block_2_0_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "0.weight")); + vision_model.mm_model_block_2_block_2_1_w = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.weight")); + vision_model.mm_model_block_2_block_2_1_b = get_tensor(new_clip->ctx_data, format(TN_MVLM_PROJ_BLOCK, 2, 2, "1.bias")); + } else { std::string proj_type = PROJECTOR_TYPE_NAMES[new_clip->proj_type]; throw std::runtime_error(format("%s: don't support projector with: %s currently\n", __func__, proj_type.c_str())); } vision_model.layers.resize(hparams.n_layer); + for (int il = 0; il < hparams.n_layer; ++il) { auto & layer = vision_model.layers[il]; layer.k_w = get_tensor(new_clip->ctx_data, format(TN_ATTN_K, "v", il, "weight")); @@ -1084,24 +1264,255 @@ bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length return true; } -// normalize: x = (x - mean) / std -// TODO: implement bicubic interpolation instead of linear. -bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32 * res, const bool pad2square) { +// Linear interpolation between two points +inline float lerp(float s, float e, float t) { + return s + (e - s) * t; +} +// Bilinear resize function +static void bilinear_resize(const clip_image_u8& src, clip_image_u8& dst, int target_width, int target_height) { + dst.nx = target_width; + dst.ny = target_height; + dst.buf.resize(3 * target_width * target_height); + + float x_ratio = static_cast(src.nx - 1) / target_width; + float y_ratio = static_cast(src.ny - 1) / target_height; + + for (int y = 0; y < target_height; y++) { + for (int x = 0; x < target_width; x++) { + float px = x_ratio * x; + float py = y_ratio * y; + int x_floor = static_cast(px); + int y_floor = static_cast(py); + float x_lerp = px - x_floor; + float y_lerp = py - y_floor; + + for (int c = 0; c < 3; c++) { + float top = lerp( + static_cast(src.buf[3 * (y_floor * src.nx + x_floor) + c]), + static_cast(src.buf[3 * (y_floor * src.nx + (x_floor + 1)) + c]), + x_lerp + ); + float bottom = lerp( + static_cast(src.buf[3 * ((y_floor + 1) * src.nx + x_floor) + c]), + static_cast(src.buf[3 * ((y_floor + 1) * src.nx + (x_floor + 1)) + c]), + x_lerp + ); + dst.buf[3 * (y * target_width + x) + c] = static_cast(lerp(top, bottom, y_lerp)); + } + } + } +} + +// Normalize image to float32 - careful with pytorch .to(model.device, dtype=torch.float16) - this sometimes reduces precision (32>16>32), sometimes not +static void normalize_image_u8_to_f32(const clip_image_u8* src, clip_image_f32* dst, const float mean[3], const float std[3]) { + dst->nx = src->nx; + dst->ny = src->ny; + dst->buf.resize(src->buf.size()); + + for (size_t i = 0; i < src->buf.size(); ++i) { + int c = i % 3; // rgb + dst->buf[i] = (static_cast(src->buf[i]) / 255.0f - mean[c]) / std[c]; + } +} + +inline float clip(float x, float lower, float upper) { + return std::max(lower, std::min(x, upper)); +} + +static bool bicubic_resize(const clip_image_u8 &img, clip_image_u8 &dst, int target_width, int target_height) { + const int nx = img.nx; + const int ny = img.ny; + + dst.nx = target_width; + dst.ny = target_height; + dst.buf.resize(3 * target_width * target_height); + + float Cc; + float C[5]; + float d0, d2, d3, a0, a1, a2, a3; + int i, j, k, jj; + int x, y; + float dx, dy; + float tx, ty; + + tx = (float)nx / (float)target_width; + ty = (float)ny / (float)target_height; + + // Bicubic interpolation; adapted from ViT.cpp, inspired from : + // -> https://github.com/yglukhov/bicubic-interpolation-image-processing/blob/master/libimage.c#L36 + // -> https://en.wikipedia.org/wiki/Bicubic_interpolation + + for (i = 0; i < target_height; i++) { + for (j = 0; j < target_width; j++) { + x = (int)(tx * j); + y = (int)(ty * i); + + dx = tx * j - x; + dy = ty * i - y; + + for (k = 0; k < 3; k++) { + for (jj = 0; jj <= 3; jj++) { + d0 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x - 1, 0, nx - 1)) * 3 + k] - img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + d2 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x + 1, 0, nx - 1)) * 3 + k] - img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + d3 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x + 2, 0, nx - 1)) * 3 + k] - img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + a0 = img.buf[(clip(y - 1 + jj, 0, ny - 1) * nx + clip(x, 0, nx - 1)) * 3 + k]; + + a1 = -1.0 / 3 * d0 + d2 - 1.0 / 6 * d3; + a2 = 1.0 / 2 * d0 + 1.0 / 2 * d2; + a3 = -1.0 / 6 * d0 - 1.0 / 2 * d2 + 1.0 / 6 * d3; + + C[jj] = a0 + a1 * dx + a2 * dx * dx + a3 * dx * dx * dx; + + d0 = C[0] - C[1]; + d2 = C[2] - C[1]; + d3 = C[3] - C[1]; + a0 = C[1]; + a1 = -1.0 / 3 * d0 + d2 - 1.0 / 6 * d3; + a2 = 1.0 / 2 * d0 + 1.0 / 2 * d2; + a3 = -1.0 / 6 * d0 - 1.0 / 2 * d2 + 1.0 / 6 * d3; + Cc = a0 + a1 * dy + a2 * dy * dy + a3 * dy * dy * dy; + + const uint8_t Cc2 = std::min(std::max(std::round(Cc), 0.0f), 255.0f); + dst.buf[(i * target_width + j) * 3 + k] = float(Cc2); + } + } + } + } + + return true; +} + +// llava-1.6 type of resize_and_pad (black) +static void resize_and_pad_image(const clip_image_u8& image, clip_image_u8 &image_output, const std::pair& target_resolution) { + int target_width = target_resolution.first; + int target_height = target_resolution.second; + + float scale_w = static_cast(target_width) / image.nx; + float scale_h = static_cast(target_height) / image.ny; + + int new_width, new_height; + + if (scale_w < scale_h) { + new_width = target_width; + new_height = std::min(static_cast(std::ceil(image.ny * scale_w)), target_height); + } else { + new_height = target_height; + new_width = std::min(static_cast(std::ceil(image.nx * scale_h)), target_width); + } + + clip_image_u8 resized_image; + // bilinear_resize(image, resized_image, new_width, new_height); + bicubic_resize(image, resized_image, new_width, new_height); + + clip_image_u8 padded_image; + padded_image.nx = target_width; + padded_image.ny = target_height; + padded_image.buf.resize(3 * target_width * target_height, 0); // Initialize with black + + // Calculate padding offsets + int pad_x = (target_width - new_width) / 2; + int pad_y = (target_height - new_height) / 2; + + // Copy the resized image into the center of the padded buffer + for (int y = 0; y < new_height; ++y) { + for (int x = 0; x < new_width; ++x) { + for (int c = 0; c < 3; ++c) { + padded_image.buf[3 * ((y + pad_y) * target_width + (x + pad_x)) + c] = resized_image.buf[3 * (y * new_width + x) + c]; + } + } + } + image_output = std::move(padded_image); +} + +/** + * Selects the best resolution from a list of possible resolutions based on the original size. + * + * @param original_size The original size of the image in the format (width, height). + * @param possible_resolutions A list of possible resolutions in the format [(width1, height1), (width2, height2), ...]. + * @return The best fit resolution in the format (width, height). + */ +static std::pair select_best_resolution(const std::pair & original_size, const std::vector> & possible_resolutions) { + int original_width = original_size.first; + int original_height = original_size.second; + std::pair best_fit; + int max_effective_resolution = 0; + int min_wasted_resolution = std::numeric_limits::max(); + + for (const auto& resolution : possible_resolutions) { + int width = resolution.first; + int height = resolution.second; + float scale = std::min(static_cast(width) / original_width, static_cast(height) / original_height); + int downscaled_width = static_cast(original_width * scale); + int downscaled_height = static_cast(original_height * scale); + int effective_resolution = std::min(downscaled_width * downscaled_height, original_width * original_height); + int wasted_resolution = (width * height) - effective_resolution; + // fprintf(stderr, "resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); + if (effective_resolution > max_effective_resolution || (effective_resolution == max_effective_resolution && wasted_resolution < min_wasted_resolution)) { + max_effective_resolution = effective_resolution; + min_wasted_resolution = wasted_resolution; + best_fit = resolution; + } + } + + return best_fit; +} + +static std::vector divide_to_patches_u8(const clip_image_u8 & image, int patch_size) { + std::vector patches; + int width = image.nx; + int height = image.ny; + for (int i = 0; i < height; i += patch_size) { + for (int j = 0; j < width; j += patch_size) { + clip_image_u8 *patch = clip_image_u8_init(); + patch->nx = std::min(patch_size, width - j); + patch->ny = std::min(patch_size, height - i); + patch->buf.resize(3 * patch->nx * patch->ny); + for (int y = 0; y < patch->ny; ++y) { + for (int x = 0; x < patch->nx; ++x) { + for (int c = 0; c < 3; ++c) { + patch->buf[3 * (y * patch->nx + x) + c] = image.buf[3 * ((i + y) * width + (j + x)) + c]; + } + } + } + patches.push_back(patch); + } + } + return patches; +} + +// returns the normalized float tensor for llava-1.5, for spatial_unpad with anyres processing for llava-1.6 it returns the normalized image patch tensors as a vector +// res_imgs memory is being allocated here, previous allocations will be freed if found +bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32_batch & res_imgs) { + bool pad_to_square = true; if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); return false; } + auto & params = ctx->vision_model.hparams; + // The model config actually contains all we need to decide on how to preprocess, here we automatically switch to the new llava-1.6 preprocessing + if (strcmp(params.mm_patch_merge_type, "spatial_unpad") == 0) { + pad_to_square = false; + } + // free the previous res_imgs if any set + if (res_imgs.size > 0 && res_imgs.size < 100) { + for (size_t i = 0; i < res_imgs.size; i++) { + clip_image_f32_free(&(res_imgs.data[i])); + } + delete[] res_imgs.data; + } + res_imgs.data = nullptr; + res_imgs.size = 0; // the logic below is to pad the shorter side to the longer side with a background color: rgb(122, 116, 104) // see https://github.com/haotian-liu/LLaVA/blob/e854a2bf85118c504f6f16bf5c3c7c92f8fa8c6b/llava/conversation.py#L113-L156 clip_image_u8 * temp = clip_image_u8_init(); // we will keep the input image data here temporarily - if (pad2square && img->nx != img->ny) { + if (pad_to_square && img->nx != img->ny) { int longer_side = std::max(img->nx, img->ny); temp->nx = longer_side; temp->ny = longer_side; temp->buf.resize(3 * longer_side * longer_side); - const uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA + const uint8_t bc[3] = {122, 116, 104}; // background color in RGB from LLaVA (this is the mean rgb color * 255) // fill with background color for (size_t i = 0; i < temp->buf.size(); i++) { @@ -1119,18 +1530,63 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli } } } else { - temp->nx = img->nx; - temp->ny = img->ny; - temp->buf.resize(img->buf.size()); - memcpy(temp->buf.data(), img->buf.data(), temp->buf.size()); + if (params.image_grid_pinpoints[0] != 0) { + // "spatial_unpad" with "anyres" processing for llava-1.6 + std::vector> possible_resolutions; + for (int i = 0; i < 32 && params.image_grid_pinpoints[i] != 0; i+=2) { + possible_resolutions.push_back({params.image_grid_pinpoints[i], params.image_grid_pinpoints[i+1]}); + } + std::pair best_resolution = select_best_resolution({img->nx, img->ny}, possible_resolutions); + // clip_image_save_to_bmp(*img, "input.bmp"); + resize_and_pad_image(*img, *temp, best_resolution); // we do not pad with mean-bg color anymore in llava-1.6 + // clip_image_save_to_bmp(*temp, "resized.bmp"); + // visually verify normalized image: + // normalize_image_u8_to_f32(*temp, *res, ctx->image_mean, ctx->image_std); + // { + // clip_image_u8 * temp2 = clip_image_u8_init(); + // clip_image_convert_f32_to_u8(*res, *temp2); + // clip_image_save_to_bmp(*temp2, "resized_normalized_f32.bmp"); + // clip_image_u8_free(temp2); + // } + + std::vector patches = divide_to_patches_u8(*temp, params.image_size); // prepare spatial sorted main patches of image_size each (336 in llava-1.6) + + clip_image_u8 *image_original_resize = clip_image_u8_init(); + // bilinear_resize(*img, *image_original_resize, params.image_size, params.image_size); // in python this is "shortest_edge", but all CLIP are square + bicubic_resize(*img, *image_original_resize, params.image_size, params.image_size); // in python this is "shortest_edge", but all CLIP are square + patches.insert(patches.begin(), image_original_resize); + // clip_image_f32_batch_init(patches.size()); + res_imgs.size = patches.size(); + res_imgs.data = new clip_image_f32[res_imgs.size]; + int num=0; + for (auto& patch : patches) { + normalize_image_u8_to_f32(patch, &res_imgs.data[num], ctx->image_mean, ctx->image_std); + num++; + } + + for (size_t i = 0; i < patches.size(); i++) { + // printf("patch %d: %d %d\n", i, patches[i]->nx, patches[i]->ny); + clip_image_u8_free(patches[i]); + } + + clip_image_u8_free(temp); + + return true; + } else { + temp->nx = img->nx; + temp->ny = img->ny; + temp->buf.resize(img->buf.size()); + memcpy(temp->buf.data(), img->buf.data(), temp->buf.size()); + } } const int nx = temp->nx; const int ny = temp->ny; + // clip_image_save_to_bmp(*temp, "resized_vanilla.bmp"); const int nx2 = ctx->vision_model.hparams.image_size; const int ny2 = ctx->vision_model.hparams.image_size; - + clip_image_f32 * res = clip_image_f32_init(); res->nx = nx2; res->ny = ny2; res->buf.resize(3 * nx2 * ny2); @@ -1184,9 +1640,25 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli } clip_image_u8_free(temp); + // { + // clip_image_u8 * temp2 = clip_image_u8_init(); + // clip_image_convert_f32_to_u8(*res, *temp2); + // clip_image_save_to_bmp(*temp2, "resized_normalized_f32_vanilla.bmp"); + // clip_image_u8_free(temp2); + // } + // res_imgs.push_back(res); + + res_imgs.size = 1; + res_imgs.data = new clip_image_f32[res_imgs.size]; + res_imgs.data[0] = std::move(*res); + return true; } +ggml_tensor * clip_get_newline_tensor(const struct clip_ctx * ctx) { + return ctx->vision_model.image_newline; +} + void clip_free(clip_ctx * ctx) { ggml_free(ctx->ctx_data); gguf_free(ctx->ctx_gguf); @@ -1194,6 +1666,42 @@ void clip_free(clip_ctx * ctx) { delete ctx; } +size_t clip_embd_nbytes(const struct clip_ctx * ctx) { + return clip_n_patches(ctx) * clip_n_mmproj_embd(ctx) * sizeof(float); +} + +int32_t clip_image_size(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.image_size; +} + +int32_t clip_patch_size(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.patch_size; +} + +int32_t clip_hidden_size(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.hidden_size; +} + +const char * clip_patch_merge_type(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.mm_patch_merge_type; +} + +const int32_t * clip_image_grid(const struct clip_ctx * ctx) { + return ctx->vision_model.hparams.image_grid_pinpoints; +} + +int clip_n_patches(const struct clip_ctx * ctx) { + const auto & params = ctx->vision_model.hparams; + + int n_patches = (params.image_size / params.patch_size) * (params.image_size / params.patch_size); + + if (ctx->proj_type == PROJECTOR_TYPE_LDP) { + n_patches /= 4; + } + + return n_patches; +} + bool clip_image_encode(struct clip_ctx * ctx, const int n_threads, clip_image_f32 * img, float * vec) { if (!ctx->has_vision_encoder) { printf("This gguf file seems to have no vision encoder\n"); @@ -1213,7 +1721,7 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima } int batch_size = imgs->size; - if(ctx->has_llava_projector) { + if (ctx->has_llava_projector) { GGML_ASSERT(batch_size == 1); // TODO: support multiple images } @@ -1224,9 +1732,10 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima // set inputs const auto & model = ctx->vision_model; const auto & hparams = model.hparams; - const int image_size = hparams.image_size; - const int patch_size = hparams.patch_size; - const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); + + const int image_size = hparams.image_size; + const int patch_size = hparams.patch_size; + const int num_patches = ((image_size / patch_size) * (image_size / patch_size)); const int num_positions = num_patches + 1; { @@ -1301,11 +1810,11 @@ bool clip_image_batch_encode(clip_ctx * ctx, const int n_threads, const clip_ima // copy the embeddings to the location passed by the user ggml_backend_tensor_get(embeddings, vec, 0, ggml_nbytes(embeddings)); + return true; } bool clip_model_quantize(const char * fname_inp, const char * fname_out, const int itype) { - ggml_type type = GGML_TYPE_Q4_1; assert(itype < GGML_TYPE_COUNT); @@ -1494,26 +2003,13 @@ int clip_n_mmproj_embd(const struct clip_ctx * ctx) { if (ctx->proj_type == PROJECTOR_TYPE_LDP) { return ctx->vision_model.mm_model_block_1_block_2_1_b->ne[0]; } - else if (ctx->proj_type == PROJECTOR_TYPE_MLP) { + if (ctx->proj_type == PROJECTOR_TYPE_MLP) { return ctx->vision_model.mm_2_b->ne[0]; - } else if (ctx->proj_type == PROJECTOR_TYPE_MLP_NORM) { + } + if (ctx->proj_type == PROJECTOR_TYPE_MLP_NORM) { return ctx->vision_model.mm_3_b->ne[0]; } - else { - std::string proj_type = PROJECTOR_TYPE_NAMES[ctx->proj_type]; - throw std::runtime_error(format("%s: don't support projector with: %s currently\n", __func__, proj_type.c_str())); - } -} -int clip_n_patches(const struct clip_ctx * ctx) { - auto & params = ctx->vision_model.hparams; - int n_patches = (params.image_size / params.patch_size) * (params.image_size / params.patch_size); - if (ctx->proj_type == PROJECTOR_TYPE_LDP) { - n_patches /= 4; - } - return n_patches; -} - -size_t clip_embd_nbytes(const struct clip_ctx * ctx) { - return clip_n_patches(ctx) * clip_n_mmproj_embd(ctx) * sizeof(float); + std::string proj_type = PROJECTOR_TYPE_NAMES[ctx->proj_type]; + throw std::runtime_error(format("%s: don't support projector with: %s currently\n", __func__, proj_type.c_str())); } diff --git a/examples/llava/clip.h b/examples/llava/clip.h index 458a256a1..cd9a4022f 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -24,25 +24,7 @@ struct clip_ctx; extern "C" { #endif -struct clip_vision_hparams { - int32_t image_size; - int32_t patch_size; - int32_t hidden_size; - int32_t n_intermediate; - int32_t projection_dim; - int32_t n_head; - int32_t n_layer; - float eps; -}; - -CLIP_API struct clip_ctx * clip_model_load(const char * fname, int verbosity); - -CLIP_API void clip_free(struct clip_ctx * ctx); - -CLIP_API size_t clip_embd_nbytes(const struct clip_ctx * ctx); - -CLIP_API int clip_n_patches (const struct clip_ctx * ctx); -CLIP_API int clip_n_mmproj_embd(const struct clip_ctx * ctx); +struct clip_ctx; struct clip_image_u8_batch { struct clip_image_u8 * data; @@ -54,10 +36,29 @@ struct clip_image_f32_batch { size_t size; }; +CLIP_API struct clip_ctx * clip_model_load (const char * fname, int verbosity); +CLIP_API struct clip_ctx * clip_model_load_cpu(const char * fname, int verbosity); + +CLIP_API void clip_free(struct clip_ctx * ctx); + +CLIP_API size_t clip_embd_nbytes(const struct clip_ctx * ctx); + +CLIP_API int32_t clip_image_size (const struct clip_ctx * ctx); +CLIP_API int32_t clip_patch_size (const struct clip_ctx * ctx); +CLIP_API int32_t clip_hidden_size(const struct clip_ctx * ctx); + +// TODO: should be enum, not string +CLIP_API const char * clip_patch_merge_type(const struct clip_ctx * ctx); + +CLIP_API const int32_t * clip_image_grid(const struct clip_ctx * ctx); + +CLIP_API int clip_n_patches (const struct clip_ctx * ctx); +CLIP_API int clip_n_mmproj_embd(const struct clip_ctx * ctx); + CLIP_API struct clip_image_u8 * clip_image_u8_init (); CLIP_API struct clip_image_f32 * clip_image_f32_init(); -CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); +CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); CLIP_API void clip_image_f32_free(struct clip_image_f32 * img); CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); @@ -65,7 +66,11 @@ CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 /** interpret bytes as an image file with length bytes_length, and use the result to populate img */ CLIP_API bool clip_image_load_from_bytes(const unsigned char * bytes, size_t bytes_length, struct clip_image_u8 * img); -CLIP_API bool clip_image_preprocess (struct clip_ctx * ctx, const struct clip_image_u8 * img, struct clip_image_f32 * res, bool pad2square); +/** preprocess img and store the result in res_imgs, pad_to_square may be overriden to false depending on model configuration */ +CLIP_API bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, clip_image_f32_batch & res_imgs ); + +CLIP_API struct ggml_tensor * clip_get_newline_tensor(const struct clip_ctx * ctx); + CLIP_API bool clip_image_encode (struct clip_ctx * ctx, int n_threads, struct clip_image_f32 * img, float * vec); CLIP_API bool clip_image_batch_encode(struct clip_ctx * ctx, int n_threads, const struct clip_image_f32_batch * imgs, float * vec); diff --git a/examples/llava/convert-image-encoder-to-gguf.py b/examples/llava/convert-image-encoder-to-gguf.py index e204b56be..c69f89ac2 100644 --- a/examples/llava/convert-image-encoder-to-gguf.py +++ b/examples/llava/convert-image-encoder-to-gguf.py @@ -78,18 +78,19 @@ ap.add_argument("--text-only", action="store_true", required=False, help="Save a text-only model. It can't be used to encode images") ap.add_argument("--vision-only", action="store_true", required=False, help="Save a vision-only model. It can't be used to encode texts") -ap.add_argument("--clip_model_is_vision", action="store_true", required=False, +ap.add_argument("--clip-model-is-vision", action="store_true", required=False, help="The clip model is a pure vision model (ShareGPT4V vision extract for example)") +ap.add_argument("--clip-model-is-openclip", action="store_true", required=False, + help="The clip model is from openclip (for ViT-SO400M type))") ap.add_argument("--llava-projector", help="Path to llava.projector file. If specified, save an image encoder for LLaVA models.") ap.add_argument("--projector-type", help="Type of projector. Possible values: mlp, ldp", choices=["mlp", "ldp"], default="mlp") -ap.add_argument("--image-mean", nargs=3, type=float, required=False, help="Override image mean values") -ap.add_argument("--image-std", nargs=3, type=float, required=False, help="Override image std values") ap.add_argument("-o", "--output-dir", help="Directory to save GGUF files. Default is the original model directory", default=None) # Example --image_mean 0.48145466 0.4578275 0.40821073 --image_std 0.26862954 0.26130258 0.27577711 +# Example --image_mean 0.5 0.5 0.5 --image_std 0.5 0.5 0.5 default_image_mean = [0.48145466, 0.4578275, 0.40821073] default_image_std = [0.26862954, 0.26130258, 0.27577711] -ap.add_argument('--image_mean', type=float, nargs='+', help='Mean of the images for normalization (overrides processor) ', default=None) -ap.add_argument('--image_std', type=float, nargs='+', help='Standard deviation of the images for normalization (overrides processor)', default=None) +ap.add_argument('--image-mean', type=float, nargs='+', help='Mean of the images for normalization (overrides processor) ', default=None) +ap.add_argument('--image-std', type=float, nargs='+', help='Standard deviation of the images for normalization (overrides processor)', default=None) # with proper args = ap.parse_args() @@ -105,7 +106,7 @@ if args.use_f32: # output in the same directory as the model if output_dir is None dir_model = args.model_dir -if args.clip_model_is_vision: +if args.clip_model_is_vision or not os.path.exists(dir_model + "/vocab.json") or args.clip_model_is_openclip: vocab = None tokens = None else: @@ -133,7 +134,7 @@ ftype = 1 if args.use_f32: ftype = 0 -if args.clip_model_is_vision: +if args.clip_model_is_vision or args.clip_model_is_openclip: model = CLIPVisionModel.from_pretrained(dir_model) processor = None else: @@ -202,6 +203,57 @@ if has_vision_encoder: fout.add_float32(k(KEY_ATTENTION_LAYERNORM_EPS, VISION), v_hparams["layer_norm_eps"]) block_count = v_hparams["num_hidden_layers"] - 1 if has_llava_projector else v_hparams["num_hidden_layers"] fout.add_uint32(k(KEY_BLOCK_COUNT, VISION), block_count) + # /** + # "image_grid_pinpoints": [ + # [ + # 336, + # 672 + # ], + # [ + # 672, + # 336 + # ], + # [ + # 672, + # 672 + # ], + # [ + # 1008, + # 336 + # ], + # [ + # 336, + # 1008 + # ] + # ], + # Flattened: + # [ + # 336, 672, + # 672, 336, + # 672, 672, + # 1008, 336, + # 336, 1008 + # ] + # * + # */ + if "image_grid_pinpoints" in v_hparams: + # flatten it + image_grid_pinpoints = [] + for pinpoint in v_hparams["image_grid_pinpoints"]: + for p in pinpoint: + image_grid_pinpoints.append(p) + fout.add_array("clip.vision.image_grid_pinpoints", image_grid_pinpoints) + if "image_crop_resolution" in v_hparams: + fout.add_uint32("clip.vision.image_crop_resolution", v_hparams["image_crop_resolution"]) + if "image_aspect_ratio" in v_hparams: + fout.add_string("clip.vision.image_aspect_ratio", v_hparams["image_aspect_ratio"]) + if "image_split_resolution" in v_hparams: + fout.add_uint32("clip.vision.image_split_resolution", v_hparams["image_split_resolution"]) + if "mm_patch_merge_type" in v_hparams: + fout.add_string("clip.vision.mm_patch_merge_type", v_hparams["mm_patch_merge_type"]) + if "mm_projector_type" in v_hparams: + fout.add_string("clip.vision.mm_projector_type", v_hparams["mm_projector_type"]) + if processor is not None: image_mean = processor.image_processor.image_mean if args.image_mean is None or args.image_mean == default_image_mean else args.image_mean diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index 031e9806d..bef7f7c95 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -155,11 +155,29 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ system_prompt = prompt.substr(0, image_pos); user_prompt = prompt.substr(image_pos + std::string("").length()); printf("system_prompt: %s\n", system_prompt.c_str()); + if (params->verbose_prompt) { + auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, system_prompt, true, true); + for (int i = 0; i < (int) tmp.size(); i++) { + printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + } + } printf("user_prompt: %s\n", user_prompt.c_str()); + if (params->verbose_prompt) { + auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, user_prompt, true, true); + for (int i = 0; i < (int) tmp.size(); i++) { + printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + } + } } else { // llava-1.5 native mode system_prompt = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\nUSER:"; user_prompt = prompt + "\nASSISTANT:"; + if (params->verbose_prompt) { + auto tmp = ::llama_tokenize(ctx_llava->ctx_llama, user_prompt, true, true); + for (int i = 0; i < (int) tmp.size(); i++) { + printf("%6d -> '%s'\n", tmp[i], llama_token_to_piece(ctx_llava->ctx_llama, tmp[i]).c_str()); + } + } } eval_string(ctx_llava->ctx_llama, system_prompt.c_str(), params->n_batch, &n_past, add_bos); @@ -171,13 +189,17 @@ static void process_prompt(struct llava_context * ctx_llava, struct llava_image_ fprintf(stderr, "\n"); struct llama_sampling_context * ctx_sampling = llama_sampling_init(params->sparams); - + std::string response = ""; for (int i = 0; i < max_tgt_len; i++) { const char * tmp = sample(ctx_sampling, ctx_llava->ctx_llama, &n_past); + response += tmp; if (strcmp(tmp, "
    ") == 0) break; if (strstr(tmp, "###")) break; // Yi-VL behavior - printf("%s", tmp); + if (strstr(response.c_str(), "<|im_end|>")) break; // Yi-34B llava-1.6 - for some reason those decode not as the correct token (tokenizer works) + if (strstr(response.c_str(), "<|im_start|>")) break; // Yi-34B llava-1.6 + if (strstr(response.c_str(), "USER:")) break; // mistral llava-1.6 + fflush(stdout); } diff --git a/examples/llava/llava-surgery-v2.py b/examples/llava/llava-surgery-v2.py new file mode 100644 index 000000000..5bc5bc513 --- /dev/null +++ b/examples/llava/llava-surgery-v2.py @@ -0,0 +1,167 @@ +import argparse +import glob +import os +import torch +from safetensors.torch import load as safe_load, save as safe_save, safe_open, save_file + +# Function to determine if file is a SafeTensor file +def is_safetensor_file(file_path): + return file_path.endswith('.safetensors') + + +# Unified loading function +def load_model(file_path): + if is_safetensor_file(file_path): + tensors = {} + with safe_open(file_path, framework="pt", device="cpu") as f: + for key in f.keys(): + tensors[key] = f.get_tensor(key).clone() + # output shape + print(f"{key} : {tensors[key].shape}") + return tensors, 'safetensor' + else: + return torch.load(file_path, map_location=torch.device('cpu')), 'pytorch' + + +# Unified saving function +def save_model(model, file_path, file_type): + if file_type == 'safetensor': + # safe_save(model, file_path) + save_file(model, file_path) + else: + torch.save(model, file_path) + + +# Adapted function to clean vision tower from checkpoint +def clean_vision_tower_from_checkpoint(checkpoint_path): + checkpoint, file_type = load_model(checkpoint_path) + # file_type = 'pytorch' + model_path = os.path.dirname(checkpoint_path) + print(f"Searching for vision tower tensors in {checkpoint_path}") + clip_tensors = [k for k, v in checkpoint.items() if (k.startswith("model.vision_tower") or k.startswith("vit."))] + + if len(clip_tensors) > 0: + print(f"Found {len(clip_tensors)} tensors to extract from {checkpoint_path}") + # Adapted for file type + clip_path = os.path.join(model_path, "llava.clip") + + if os.path.exists(clip_path): + print(f"Loading existing llava.clip from {clip_path}") + existing_clip, _ = load_model(clip_path) + else: + print(f"Creating new llava.clip at {clip_path}") + existing_clip = {} + # Update existing_clip with new tensors, avoid duplicates + for name in clip_tensors: + simple_name = name[name.index('vision_model.'):] if 'vision_model.' in name else name + print(f"Adding {simple_name} to llava.clip") + if simple_name not in existing_clip: + existing_clip[simple_name] = checkpoint[name] + + # Save the updated clip tensors back to llava.clip + save_model(existing_clip, clip_path, 'pytorch') + + # Remove the tensors from the original checkpoint + for name in clip_tensors: + del checkpoint[name] + + # Save the updated checkpoint + checkpoint_path = checkpoint_path + save_model(checkpoint, checkpoint_path, file_type) + return True + return False + +def find_relevant_checkpoints(checkpoint_paths, newline_criteria, projector): + newline_checkpoint_path = None + projector_checkpoint_path = None + + for path in checkpoint_paths: + checkpoint, _ = load_model(path) + if newline_criteria(checkpoint) and newline_checkpoint_path is None: + newline_checkpoint_path = path + if projector(checkpoint): + projector_checkpoint_path = path + + return newline_checkpoint_path, projector_checkpoint_path + +def newline_criteria(checkpoint): + return any(k.startswith("model.image_newline") for k in checkpoint.keys()) + +def proj_criteria(checkpoint): + return any(k.startswith("model.mm_projector") or k.startswith("vision_proj.") for k in checkpoint.keys()) + + +# Command-line interface setup +ap = argparse.ArgumentParser() +ap.add_argument("-m", "--model", required=True, help="Path to LLaVA v1.5+ model") +ap.add_argument("-C", "--clean-vision-tower", action="store_true", help="Remove any vision tower from the model files") +args = ap.parse_args() + +if args.clean_vision_tower: + # Generalized to handle both PyTorch and SafeTensors models + model_files = sorted(glob.glob(f"{args.model}/*"), key=os.path.getmtime, reverse=True) + # checkpoint_paths = [path for path in model_files if (path.endswith('.bin') and path.startswith('pytorch')) or (path.endswith('.safetensors') and path.startswith('model'))] + checkpoint_paths = [path for path in model_files if (path.endswith('.bin') and 'pytorch' in path.split('/')[-1].split('\\')[-1]) or (path.endswith('.safetensors') and 'model' in path.split('/')[-1].split('\\')[-1])] + for projector_checkpoint_path in checkpoint_paths: + print(f"Cleaning {projector_checkpoint_path}") + if not clean_vision_tower_from_checkpoint(projector_checkpoint_path): + print(f"No vision tower found in {projector_checkpoint_path}") + # we break once none is found, so far all models append them at the end + # break + print("Done! All vision tower tensors are removed from the model files and stored in llava.clip file.") + +# Now we look for the projector in the last checkpoint +model_files = sorted(glob.glob(f"{args.model}/*"), key=os.path.getmtime, reverse=True) +checkpoint_paths = [path for path in model_files if (path.endswith('.bin') and 'pytorch' in path.split('/')[-1].split('\\')[-1]) or (path.endswith('.safetensors') and 'model' in path.split('/')[-1].split('\\')[-1])] +# last_checkpoint_path = checkpoint_paths[0] +# first_checkpoint_path = checkpoint_paths[-1] +newline_checkpoint_path, projector_checkpoint_path = find_relevant_checkpoints(checkpoint_paths, newline_criteria, proj_criteria) + +print(f"Taking projector from {projector_checkpoint_path}") +first_mm_tensors = [] +first_checkpoint = None +if newline_checkpoint_path is not None: + print(f"Taking newline from {newline_checkpoint_path}") + first_checkpoint, file_type = load_model(newline_checkpoint_path) + first_mm_tensors = [k for k, v in first_checkpoint.items() if k.startswith("model.image_newline")] + +# Load the checkpoint +mm_tensors = [] +last_checkpoint = None +if projector_checkpoint_path is not None: + last_checkpoint, file_type = load_model(projector_checkpoint_path) + mm_tensors = [k for k, v in last_checkpoint.items() if k.startswith("model.mm_projector") or k.startswith("vision_proj.")] + +if len(mm_tensors) == 0: + if last_checkpoint is not None: + for k, v in last_checkpoint.items(): + print(k) + print(f"Found {len(mm_tensors)} tensors to extract out of {len(last_checkpoint)} tensors.") + print("No tensors found. Is this a LLaVA model?") + exit() + +print(f"Found {len(mm_tensors)} tensors to extract.") +print(f"Found additional {len(first_mm_tensors)} tensors to extract.") +# projector = {name: checkpoint.[name].float() for name in mm_tensors} +projector = {} +for name in mm_tensors: + projector[name] = last_checkpoint[name].float() +for name in first_mm_tensors: + projector[name] = first_checkpoint[name].float() + +if len(projector) > 0: + save_model(projector, f"{args.model}/llava.projector", 'pytorch') + +for name in mm_tensors: + del last_checkpoint[name] +for name in first_mm_tensors: + del first_checkpoint[name] + +if len(mm_tensors) > 0: + save_model(last_checkpoint, projector_checkpoint_path, file_type) +if len(first_mm_tensors) > 0: + save_model(first_checkpoint, newline_checkpoint_path, file_type) + +print("Done!") +print(f"Now you can convert {args.model} to a a regular LLaMA GGUF file.") +print(f"Also, use {args.model}/llava.projector to prepare a llava-encoder.gguf file.") diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index d42e7582e..22953417f 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -2,32 +2,296 @@ #include "common.h" #include "llama.h" #include "llava.h" +#include "base64.hpp" #include #include #include +#include + +// RGB uint8 image +struct clip_image_u8 { + int nx; + int ny; + + std::vector buf; +}; + +// RGB float32 image (NHWC) +// Memory layout: RGBRGBRGB... +struct clip_image_f32 { + int nx; + int ny; + + std::vector buf; +}; + +struct clip_image_grid_shape { + int first; + int second; +}; + +/** + * Selects the best resolution from a list of possible resolutions based on the original size. + * + * @param original_size The original size of the image in the format (width, height). + * @param possible_resolutions A list of possible resolutions in the format [(width1, height1), (width2, height2), ...]. + * @return The best fit resolution in the format (width, height). + */ +static std::pair select_best_resolution(const std::pair& original_size, const std::vector>& possible_resolutions) { + int original_width = original_size.first; + int original_height = original_size.second; + + std::pair best_fit; + int max_effective_resolution = 0; + int min_wasted_resolution = std::numeric_limits::max(); + + for (const auto& resolution : possible_resolutions) { + int width = resolution.first; + int height = resolution.second; + float scale = std::min(static_cast(width) / original_width, static_cast(height) / original_height); + int downscaled_width = static_cast(original_width * scale); + int downscaled_height = static_cast(original_height * scale); + int effective_resolution = std::min(downscaled_width * downscaled_height, original_width * original_height); + int wasted_resolution = (width * height) - effective_resolution; + // fprintf(stderr, "resolution: %d %d, scale: %f, downscaled: %d %d, effective: %d, wasted: %d\n", width, height, scale, downscaled_width, downscaled_height, effective_resolution, wasted_resolution); + if (effective_resolution > max_effective_resolution || (effective_resolution == max_effective_resolution && wasted_resolution < min_wasted_resolution)) { + max_effective_resolution = effective_resolution; + min_wasted_resolution = wasted_resolution; + best_fit = resolution; + } + } + + return best_fit; +} + +/** + * @brief Get the anyres image grid shape object + * + * @param image_size + * @param grid_pinpoints + * @param image_patch_size + * @return + */ +static struct clip_image_grid_shape get_anyres_image_grid_shape(const std::pair & image_size, const std::vector> & grid_pinpoints, int image_patch_size) { + /** + Conversion from gguf flat array to vector: + std::vector> possible_resolutions; + for (int i = 0; i < 32 && params.image_grid_pinpoints[i] != 0; i+=2) { + possible_resolutions.push_back({params.image_grid_pinpoints[i], params.image_grid_pinpoints[i+1]}); + } + */ + auto best_resolution = select_best_resolution(image_size, grid_pinpoints); + return {best_resolution.first / image_patch_size, best_resolution.second / image_patch_size}; +} + +// Take the image segments in a grid configuration and return the embeddings and the number of embeddings into preallocated memory (image_embd_out) +static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector & image_embd_v, struct clip_image_grid_shape grid_shape, float * image_embd_out, int * n_img_pos_out) { + struct { + struct ggml_tensor * newline; + struct ggml_context * ctx; + } model; + + const int32_t image_size = clip_image_size(ctx_clip); + const int32_t patch_size = clip_patch_size(ctx_clip); + + int32_t num_patches_per_side = image_size / patch_size; // 336 / 14 = 24 - used for embedding-patching boxes (24*24 = 576 patches) + + int num_patches_width = grid_shape.first; // grid 1-4 + int num_patches_height = grid_shape.second; // grid 1-4 + + const size_t num_images = num_patches_width + num_patches_height + 1; + + // TODO: size calculation is not calculated - it's only tens of MB + size_t ctx_size = 0; + + { + ctx_size += clip_embd_nbytes(ctx_clip) * num_images * 8; // image_features + ctx_size += 1024*1024 * ggml_type_size(GGML_TYPE_F32); + } + + struct ggml_init_params params { + /*.mem_size =*/ ctx_size, + /*.mem_buffer =*/ NULL, + /*.no_alloc =*/ false, // NOTE: this should be false when using the legacy API + }; + + // Python reference code for full unpad: + /* + base_image_feature = image_feature[0] + image_feature = image_feature[1:] + image_feature = image_feature.permute(4, 0, 2, 1, 3).contiguous() + image_feature = image_feature.flatten(1, 2).flatten(2, 3) + image_feature = unpad_image(image_feature, image_sizes[image_idx]) + image_feature = torch.cat(( + image_feature, + self.model.image_newline[:, None, None].expand(*image_feature.shape[:-1], 1) + ), dim=-1) + image_feature = image_feature.flatten(1, 2).transpose(0, 1) + image_feature = torch.cat((base_image_feature, image_feature), dim=0) + */ + // We now have two options: unpad or no unpad. Unpad removes tokens for faster llm eval. + // In terms of result quality it appears to make no difference, so we'll start with the easier approach given 5D tensors are not supported in ggml yet. + // Without unpad we have to split the sub-image embeddings into patches of 24 features each and permute them. + // Once all images are processed to prepended the base_image_features without any changes. + + // Pytorch reference simplified, modified for ggml compatibility - confirmed identical output in python (for a 2x2 grid image (676x676 scaling)) + /* + image_feature = image_feature.view(2, 2, 24, 24, 4096) + image_feature = image_feature.permute(0, 2, 1, 3, 4).contiguous() + image_feature = image_feature.view(2, 24, 2, 24, 4096) + image_feature = image_feature.flatten(0, 3) + + // Reshape to 4D tensor by merging the last two dimensions + image_feature = image_feature.view(2, 2, 24, 24*4096) + image_feature = image_feature.permute(0, 2, 1, 3).contiguous() + image_feature = image_feature.view(-1, 4096) + */ + + model.ctx = ggml_init(params); + + ggml_tensor * newline_tmp = clip_get_newline_tensor(ctx_clip); + model.newline = ggml_new_tensor_1d(model.ctx, GGML_TYPE_F32, newline_tmp->ne[0]); + if (newline_tmp->backend != GGML_BACKEND_CPU) { + if (newline_tmp->buffer == NULL) { + printf("newline_tmp tensor buffer is NULL\n"); + } + ggml_backend_tensor_get(newline_tmp, model.newline->data, 0, ggml_nbytes(newline_tmp)); + } else { + model.newline->data = newline_tmp->data; + if (model.newline->data == NULL) { + printf("newline_tmp tensor data is NULL\n"); + } + } + + struct ggml_tensor * image_features = ggml_new_tensor_3d(model.ctx, GGML_TYPE_F32, clip_n_mmproj_embd(ctx_clip), clip_n_patches(ctx_clip), num_images - 1); // example: 4096 x 576 x 4 + // ggml_tensor_printf(image_features,"image_features",__LINE__,false,false); + // fill it with the image embeddings, ignoring the base + for (size_t i = 1; i < num_images; i++) { + size_t offset = (i-1) * clip_embd_nbytes(ctx_clip); + memcpy((uint8_t *)(image_features->data) + offset, image_embd_v[i], clip_embd_nbytes(ctx_clip)); + } + + struct ggml_cgraph * gf = ggml_new_graph(model.ctx); + size_t size_ele = ggml_type_size(GGML_TYPE_F32); + + struct ggml_tensor *image_features_patchview = ggml_view_4d(model.ctx, image_features, + num_patches_per_side * clip_n_mmproj_embd(ctx_clip), + num_patches_per_side, + num_patches_width, + num_patches_height, + size_ele * num_patches_per_side * clip_n_mmproj_embd(ctx_clip), + size_ele * num_patches_per_side * clip_n_mmproj_embd(ctx_clip) * num_patches_per_side, + size_ele * num_patches_per_side * clip_n_mmproj_embd(ctx_clip) * num_patches_per_side * num_patches_width, 0); + // ggml_tensor_printf(image_features_patchview,"image_features_patchview",__LINE__,false,false); + struct ggml_tensor *permuted_cont = ggml_cont(model.ctx, ggml_permute(model.ctx, image_features_patchview, 0, 2, 1, 3)); + /** + At the end of each row we have to add the row_end embeddings, which are the same as the newline embeddings + image_feature = torch.cat(( + image_feature, + self.model.image_newline[:, None, None].expand(*image_feature.shape[:-1], 1).to(image_feature.device) + ), dim=-1) + * + */ + + // ggml_tensor_printf(permuted_cont,"permuted_cont",__LINE__,false,false); + struct ggml_tensor *flatten = ggml_view_2d(model.ctx, permuted_cont, clip_n_mmproj_embd(ctx_clip), num_patches_height * num_patches_width * num_patches_per_side * num_patches_per_side, size_ele * clip_n_mmproj_embd(ctx_clip), 0); + // ggml_tensor_printf(flatten,"flatten",__LINE__,false,false); + ggml_build_forward_expand(gf, flatten); + ggml_graph_compute_with_ctx(model.ctx, gf, 1); + struct ggml_tensor* result = gf->nodes[gf->n_nodes - 1]; + + memcpy(image_embd_out, image_embd_v[0], clip_embd_nbytes(ctx_clip)); // main image as global context + // append without newline tokens (default behavior in llava_arch when not using unpad ): + memcpy(image_embd_out + clip_n_patches(ctx_clip) * clip_n_mmproj_embd(ctx_clip), (float*)result->data, clip_embd_nbytes(ctx_clip) * (num_images-1)); // grid patches + *n_img_pos_out = static_cast(result->ne[1]+clip_n_patches(ctx_clip)); + + // Debug: Test single segments + // Current findings: sending base image, sending a segment embedding all works similar to python + // However, permuted embeddings do not work yet (stride issue?) + // memcpy(image_embd_out, image_embd_v[0], clip_embd_nbytes(ctx_clip)); // main image as context + // memcpy(image_embd_out, (float*)prepared_cont->data, clip_embd_nbytes(ctx_clip)); // main image as context + // *n_img_pos_out=576; + + ggml_free(model.ctx); + return true; +} -#include "base64.hpp" static bool encode_image_with_clip(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float * image_embd, int * n_img_pos) { - clip_image_f32 * img_res = clip_image_f32_init(); - if (!clip_image_preprocess(ctx_clip, img, img_res, /*pad2square =*/ true)) { + // std::vector img_res_v; // format VectN x H x W x RGB (N x 336 x 336 x 3), so interleaved RGB - different to the python implementation which is N x 3 x 336 x 336 + clip_image_f32_batch img_res_v; + img_res_v.size = 0; + img_res_v.data = nullptr; + if (!clip_image_preprocess(ctx_clip, img, img_res_v)) { fprintf(stderr, "%s: unable to preprocess image\n", __func__); - clip_image_f32_free(img_res); + delete[] img_res_v.data; return false; } - *n_img_pos = clip_n_patches(ctx_clip); - const int64_t t_img_enc_start_us = ggml_time_us(); - bool encoded = clip_image_encode(ctx_clip, n_threads, img_res, image_embd); - clip_image_f32_free(img_res); - if (!encoded) { - fprintf(stderr, "Unable to encode image\n"); - return false; + const char * mm_patch_merge_type = clip_patch_merge_type(ctx_clip); + + if (strcmp(mm_patch_merge_type, "spatial_unpad") != 0) { + // flat / default llava-1.5 type embedding + *n_img_pos = clip_n_patches(ctx_clip); + bool encoded = clip_image_encode(ctx_clip, n_threads, &img_res_v.data[0], image_embd); // image_embd shape is 576 x 4096 + delete[] img_res_v.data; + if (!encoded) { + fprintf(stderr, "Unable to encode image\n"); + + return false; + } + } else { + // spatial_unpad llava-1.6 type embedding + // TODO: CLIP needs batching support - in HF the llm projection is separate after encoding, which might be a solution to quickly get batching working + std::vector image_embd_v; + image_embd_v.resize(img_res_v.size); + for (size_t i = 0; i < img_res_v.size; i++) { + image_embd_v[i] = (float *)malloc(clip_embd_nbytes(ctx_clip)); // 576 patches * 4096 embeddings * 4 bytes = 9437184 + const bool encoded = clip_image_encode(ctx_clip, n_threads, &img_res_v.data[i], image_embd_v[i]); // image data is in 3x336x336 format and will be converted to 336x336x3 inside + if (!encoded) { + fprintf(stderr, "Unable to encode image - spatial_unpad - subimage %d of %d\n", (int) i+1, (int) img_res_v.size); + return false; + } + } + const int64_t t_img_enc_batch_us = ggml_time_us(); + printf("%s: %d segments encoded in %8.2f ms\n", __func__, (int)img_res_v.size, (t_img_enc_batch_us - t_img_enc_start_us) / 1000.0); + + const int32_t * image_grid = clip_image_grid(ctx_clip); + + std::vector> grid_pinpoints; + for (int i = 0; i < 32 && image_grid[i] != 0; i += 2) { + grid_pinpoints.push_back({image_grid[i], image_grid[i+1]}); + } + + // free all img_res_v - not needed anymore + delete[] img_res_v.data; + img_res_v.size = 0; + img_res_v.data = nullptr; + + const int32_t image_size = clip_image_size(ctx_clip); + + struct clip_image_grid_shape grid_shape = get_anyres_image_grid_shape({img->nx,img->ny}, grid_pinpoints, image_size); + + int n_img_pos_out; + clip_llava_handle_patches(ctx_clip, image_embd_v, grid_shape, image_embd, &n_img_pos_out); + *n_img_pos = n_img_pos_out; + + for (size_t i = 0; i < image_embd_v.size(); i++) { + free(image_embd_v[i]); + } + image_embd_v.clear(); + + // debug image/segment/normalization content: + // clip_image_u8 * tmp = clip_image_u8_init(); + // clip_image_convert_f32_to_u8(*image_feature, *tmp); + // clip_image_save_to_bmp(*tmp, "image_feature.bmp"); } + printf("%s: image embedding created: %d tokens\n", __func__, *n_img_pos); + const int64_t t_img_enc_end_us = ggml_time_us(); float t_img_enc_ms = (t_img_enc_end_us - t_img_enc_start_us) / 1000.0; @@ -48,7 +312,7 @@ bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * } static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { - float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)); + float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { fprintf(stderr, "Unable to allocate memory for image embeddings\n"); free(image_embd); @@ -85,7 +349,7 @@ bool llava_eval_image_embed(llama_context * ctx_llama, const struct llava_image_ return true; } -LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { +struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length) { clip_image_u8 * img = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_bytes, image_bytes_length, img)) { clip_image_u8_free(img); @@ -142,7 +406,7 @@ static bool load_file_to_bytes(const char* path, unsigned char** bytesOut, long return true; } -LLAVA_API struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx * ctx_clip, int n_threads, const char * image_path) { +struct llava_image_embed * llava_image_embed_make_with_filename(struct clip_ctx * ctx_clip, int n_threads, const char * image_path) { unsigned char* image_bytes; long image_bytes_length; auto loaded = load_file_to_bytes(image_path, &image_bytes, &image_bytes_length); @@ -151,13 +415,13 @@ LLAVA_API struct llava_image_embed * llava_image_embed_make_with_filename(struct return NULL; } - auto embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, image_bytes, image_bytes_length); + llava_image_embed *embed = llava_image_embed_make_with_bytes(ctx_clip, n_threads, image_bytes, image_bytes_length); free(image_bytes); return embed; } -LLAVA_API void llava_image_embed_free(struct llava_image_embed * embed) { +void llava_image_embed_free(struct llava_image_embed * embed) { free(embed->embed); free(embed); } diff --git a/examples/llava/llava.h b/examples/llava/llava.h index e08ce7883..9e9466a5d 100644 --- a/examples/llava/llava.h +++ b/examples/llava/llava.h @@ -3,7 +3,6 @@ #include "ggml.h" - #ifdef LLAMA_SHARED # if defined(_WIN32) && !defined(__MINGW32__) # ifdef LLAMA_BUILD @@ -42,7 +41,6 @@ LLAVA_API void llava_image_embed_free(struct llava_image_embed * embed); /** write the image represented by embed into the llama context with batch size n_batch, starting at context pos n_past. on completion, n_past points to the next position in the context after the image embed. */ LLAVA_API bool llava_eval_image_embed(struct llama_context * ctx_llama, const struct llava_image_embed * embed, int n_batch, int * n_past); - #ifdef __cplusplus } #endif diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1699eb76b..6e3434030 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -968,13 +968,20 @@ struct llama_server_context { continue; } - clip_image_f32 * img_res = clip_image_f32_init(); - if (!clip_image_preprocess(clp_ctx, img.img_data, img_res, /*pad2square =*/ true)) + clip_image_f32_batch img_res_v; + img_res_v.size = 0; + img_res_v.data = nullptr; + if (!clip_image_preprocess(clp_ctx, img.img_data, img_res_v)) { LOG_TEE("Error processing the given image"); clip_free(clp_ctx); + clip_image_f32_free(img_res_v.data); return false; } + + // note: assumes only one image was returned by clip_image_preprocess + clip_image_f32 * img_res = img_res_v.data; + img.image_tokens = clip_n_patches(clp_ctx); img.image_embedding = (float *)malloc(clip_embd_nbytes(clp_ctx)); if (!img.image_embedding) @@ -989,7 +996,9 @@ struct llama_server_context LOG_TEE("Unable to encode image\n"); return false; } - clip_image_f32_free(img_res); + + clip_image_f32_free(img_res_v.data); + img.request_encode_image = false; } From 8084d554406b767d36b3250b3b787462d5dd626f Mon Sep 17 00:00:00 2001 From: Michael Podvitskiy Date: Wed, 14 Feb 2024 11:49:01 +0300 Subject: [PATCH 748/859] cmake : ARM intrinsics detection for MSVC (#5401) --- CMakeLists.txt | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index a544f2da6..f8c7f9978 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -855,11 +855,21 @@ if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_GENERATOR_PLATFORM_LWR STR CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64|arm.*|ARM64)$")) message(STATUS "ARM detected") if (MSVC) + add_compile_definitions(__aarch64__) # MSVC defines _M_ARM64 instead add_compile_definitions(__ARM_NEON) add_compile_definitions(__ARM_FEATURE_FMA) - add_compile_definitions(__ARM_FEATURE_DOTPROD) - # add_compile_definitions(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) # MSVC doesn't support vdupq_n_f16, vld1q_f16, vst1q_f16 - add_compile_definitions(__aarch64__) # MSVC defines _M_ARM64 instead + + set(CMAKE_REQUIRED_FLAGS_PREV ${CMAKE_REQUIRED_FLAGS}) + string(JOIN " " CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS} "/arch:armv8.2") + check_cxx_source_compiles("#include \nint main() { int8x16_t _a, _b; int32x4_t _s = vdotq_s32(_s, _a, _b); return 0; }" GGML_COMPILER_SUPPORT_DOTPROD) + if (GGML_COMPILER_SUPPORT_DOTPROD) + add_compile_definitions(__ARM_FEATURE_DOTPROD) + endif () + check_cxx_source_compiles("#include \nint main() { float16_t _a; float16x8_t _s = vdupq_n_f16(_a); return 0; }" GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) + if (GGML_COMPILER_SUPPORT_FP16_VECTOR_ARITHMETIC) + add_compile_definitions(__ARM_FEATURE_FP16_VECTOR_ARITHMETIC) + endif () + set(CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_PREV}) else() check_cxx_compiler_flag(-mfp16-format=ieee COMPILER_SUPPORTS_FP16_FORMAT_I3E) if (NOT "${COMPILER_SUPPORTS_FP16_FORMAT_I3E}" STREQUAL "") From ccbb277f4642fc0d84c72dbc0d51ed2df418d6ce Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:49:42 +0100 Subject: [PATCH 749/859] llava : update README.md (#5489) * Update README.md * Update README.md * Update examples/llava/README.md --------- Co-authored-by: Georgi Gerganov --- examples/llava/README.md | 46 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index e2ef0eff1..1d5374f2a 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -1,10 +1,12 @@ # LLaVA -Currently this implementation supports [llava-v1.5](https://huggingface.co/liuhaotian/llava-v1.5-7b) variants. +Currently this implementation supports [llava-v1.5](https://huggingface.co/liuhaotian/llava-v1.5-7b) variants, +as well as llava-1.6 [llava-v1.6](https://huggingface.co/collections/liuhaotian/llava-16-65b9e40155f60fd046a5ccf2) variants. The pre-converted [7b](https://huggingface.co/mys/ggml_llava-v1.5-7b) and [13b](https://huggingface.co/mys/ggml_llava-v1.5-13b) models are available. +For llava-1.6 a variety of prepared gguf models are available as well [7b-34b](https://huggingface.co/cmp-nct/llava-1.6-gguf) After API is confirmed, more models will be supported / uploaded. @@ -18,6 +20,7 @@ After building, run: `./llava-cli` to see the usage. For example: ``` **note**: A lower temperature like 0.1 is recommended for better quality. add `--temp 0.1` to the command to do so. +**note**: For GPU offloading ensure to use the `-ngl` flag just like usual ## LLaVA 1.5 @@ -55,11 +58,46 @@ python ./convert.py ../llava-v1.5-7b Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. -## LLaVA 1.6 +## LLaVA 1.6 gguf conversion + +1) Backup your pth/safetensor model files as llava-surgery modifies them +2) Use `python llava-surgery-v2.py -C -m /path/to/hf-model` which also supports llava-1.5 variants pytorch as well as safetensor models: +- you will find a llava.projector and a llava.clip file in your model directory +3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config.json) +4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip_model_is_vision` +- This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP +5) Everything else as usual: convert.py the hf model, quantize as needed +**note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) +**note** llava-1.6 greatly benefits from batched prompt processing (defaults work) + +## llava-cli templating and llava-1.6 prompting + +llava-1.5 models all use the same vicuna prompt, here you can just add your image question like `-p "Provide a full description."` +For llava-1.5 models which are not vicuna (mistral and Yi) you need to adapt system prompt as well as user prompt, for this purpose llava-cli has a basic templating system: + +**For Mistral and using llava-cli binary:** +Add this: `-p "\nUSER:\nProvide a full description.\nASSISTANT:\n"` +The mistral template for llava-1.6 seems to be no system print and a USER/ASSISTANT role + +**For the 34B this should work:** +Add this: `-e -p <|im_start|>system\nAnswer the questions.<|im_end|><|im_start|>user\n\nProvide a full description.<|im_end|><|im_start|>assistant\n` + + +## How to know if you are running in llava-1.5 or llava-1.6 mode + +When running llava-cli you will see a visual information right before the prompt is being processed: + +**Llava-1.5:** +`encode_image_with_clip: image embedding created: 576 tokens` + +**Llava-1.6 (anything above 576):** +`encode_image_with_clip: image embedding created: 2880 tokens` + + +Alternatively just pay notice to how many "tokens" have been used for your prompt, it will also show 1000+ tokens for llava-1.6 + -- Use `llava-surgery-v2.py` -- TODO: add detailed instructions ## TODO From 594fca3fefe27b8e95cfb1656eb0e160ad15a793 Mon Sep 17 00:00:00 2001 From: Rune <43761327+Rune-AI@users.noreply.github.com> Date: Wed, 14 Feb 2024 16:15:49 +0100 Subject: [PATCH 750/859] readme : fix typo (#5490) executabhle -> executable --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0b4efdd33..0c4ee5a27 100644 --- a/README.md +++ b/README.md @@ -958,7 +958,7 @@ We have three Docker images available for this project: 1. `ghcr.io/ggerganov/llama.cpp:full`: This image includes both the main executable file and the tools to convert LLaMA models into ggml and convert into 4-bit quantization. (platforms: `linux/amd64`, `linux/arm64`) 2. `ghcr.io/ggerganov/llama.cpp:light`: This image only includes the main executable file. (platforms: `linux/amd64`, `linux/arm64`) -3. `ghcr.io/ggerganov/llama.cpp:server`: This image only includes the server executabhle file. (platforms: `linux/amd64`, `linux/arm64`) +3. `ghcr.io/ggerganov/llama.cpp:server`: This image only includes the server executable file. (platforms: `linux/amd64`, `linux/arm64`) Additionally, there the following images, similar to the above: From 704359e29985a06a389337a2617b7f3fa8eff908 Mon Sep 17 00:00:00 2001 From: Neuman Vong Date: Thu, 15 Feb 2024 17:11:15 +1100 Subject: [PATCH 751/859] vulkan: Find optimal memory type but with fallback (#5381) * @0cc4m feedback * More feedback @0cc4m --- ggml-vulkan.cpp | 65 ++++++++++++++++++++++++++++++++----------------- 1 file changed, 42 insertions(+), 23 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 7834e635c..1fad24fd1 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -707,9 +707,21 @@ static void ggml_vk_queue_cleanup(ggml_backend_vk_context * ctx, vk_queue& q) { q.cmd_buffer_idx = 0; } -static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { +static uint32_t find_properties(const vk::PhysicalDeviceMemoryProperties* mem_props, vk::MemoryRequirements* mem_req, vk::MemoryPropertyFlags flags) { + for (uint32_t i = 0; i < mem_props->memoryTypeCount; ++i) { + vk::MemoryType memory_type = mem_props->memoryTypes[i]; + if ((mem_req->memoryTypeBits & ((uint64_t)1 << i)) && + (flags & memory_type.propertyFlags) == flags && + mem_props->memoryHeaps[memory_type.heapIndex].size >= mem_req->size) { + return static_cast(i); + } + } + return UINT32_MAX; +} + +static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags, vk::MemoryPropertyFlags fallback_flags = vk::MemoryPropertyFlags(0)) { #ifdef GGML_VULKAN_DEBUG - std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ")" << std::endl; + std::cerr << "ggml_vk_create_buffer(" << size << ", " << to_string(req_flags) << ", " << to_string(fallback_flags) << ")" << std::endl; #endif vk_buffer buf = std::make_shared(); @@ -736,15 +748,15 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz uint32_t memory_type_index = UINT32_MAX; - for (uint32_t i = 0; i < mem_props.memoryTypeCount; ++i) { - vk::MemoryType memory_type = mem_props.memoryTypes[i]; - if ((mem_req.memoryTypeBits & ((uint64_t)1 << i)) && (req_flags & memory_type.propertyFlags) == req_flags && mem_props.memoryHeaps[memory_type.heapIndex].size >= mem_req.size) { - memory_type_index = i; - break; - } + memory_type_index = find_properties(&mem_props, &mem_req, req_flags); + buf->memory_property_flags = req_flags; + + if (memory_type_index == UINT32_MAX && fallback_flags) { + memory_type_index = find_properties(&mem_props, &mem_req, fallback_flags); + buf->memory_property_flags = fallback_flags; } - if (memory_type_index >= mem_props.memoryTypeCount) { + if (memory_type_index == UINT32_MAX) { ctx->device.lock()->device.destroyBuffer(buf->buffer); buf->size = 0; throw vk::OutOfDeviceMemoryError("No suitable memory type found"); @@ -758,10 +770,9 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz buf->size = 0; throw e; } - buf->memory_property_flags = req_flags; buf->ptr = nullptr; - if (req_flags & vk::MemoryPropertyFlagBits::eHostVisible) { + if (buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible) { buf->ptr = ctx->device.lock()->device.mapMemory(buf->device_memory, 0, VK_WHOLE_SIZE); } @@ -778,9 +789,9 @@ static vk_buffer ggml_vk_create_buffer(ggml_backend_vk_context * ctx, size_t siz return buf; } -static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags) { +static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size_t size, vk::MemoryPropertyFlags req_flags, vk::MemoryPropertyFlags fallback_flags = vk::MemoryPropertyFlags(0)) { try { - return ggml_vk_create_buffer(ctx, size, req_flags); + return ggml_vk_create_buffer(ctx, size, req_flags, fallback_flags); } catch (const vk::SystemError& e) { std::cerr << "ggml_vulkan: Memory allocation of size " << size << " failed." << std::endl; std::cerr << "ggml_vulkan: " << e.what() << std::endl; @@ -791,16 +802,16 @@ static vk_buffer ggml_vk_create_buffer_check(ggml_backend_vk_context * ctx, size static vk_buffer ggml_vk_create_buffer_device(ggml_backend_vk_context * ctx, size_t size) { vk_buffer buf; try { - buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); - } catch (const vk::SystemError& e) { if (ctx->device.lock()->uma) { // Fall back to host memory type - buf = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); + buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } else { - std::cerr << "ggml_vulkan: Device memory allocation of size " << size << " failed." << std::endl; - std::cerr << "ggml_vulkan: " << e.what() << std::endl; - throw e; + buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eDeviceLocal); } + } catch (const vk::SystemError& e) { + std::cerr << "ggml_vulkan: Device memory allocation of size " << size << " failed." << std::endl; + std::cerr << "ggml_vulkan: " << e.what() << std::endl; + throw e; } return buf; @@ -1422,7 +1433,9 @@ static void * ggml_vk_host_malloc(ggml_backend_vk_context * ctx, size_t size) { #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_host_malloc(" << size << ")" << std::endl; #endif - vk_buffer buf = ggml_vk_create_buffer(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + vk_buffer buf = ggml_vk_create_buffer(ctx, size, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); if(!(buf->memory_property_flags & vk::MemoryPropertyFlagBits::eHostVisible)) { fprintf(stderr, "WARNING: failed to allocate %.2f MB of pinned memory\n", @@ -1568,7 +1581,9 @@ static void deferred_memcpy(void * dst, const void * src, size_t size, std::vect static void ggml_vk_ensure_sync_staging_buffer(ggml_backend_vk_context * ctx, size_t size) { if (ctx->sync_staging == nullptr || ctx->sync_staging->size < size) { ggml_vk_destroy_buffer(ctx->sync_staging); - ctx->sync_staging = ggml_vk_create_buffer_check(ctx, size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->sync_staging = ggml_vk_create_buffer_check(ctx, size, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } } @@ -4082,7 +4097,9 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { std::cerr << "ggml_vk_preallocate_buffers(qx_size: " << ctx->prealloc_size_qx << " qy_size: " << ctx->prealloc_size_qy << " x_size: " << ctx->prealloc_size_x << " y_size: " << ctx->prealloc_size_y << " split_k_size: " << ctx->prealloc_size_split_k << ")" << std::endl; #endif #if defined(GGML_VULKAN_RUN_TESTS) - ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->staging = ggml_vk_create_buffer_check(ctx, 100ul * 1024ul * 1024ul, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); ggml_vk_test_transfer(ctx, 8192 * 1000, false); ggml_vk_test_transfer(ctx, 8192 * 1000, true); @@ -4174,7 +4191,9 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { if (ctx->staging != nullptr) { ggml_vk_destroy_buffer(ctx->staging); } - ctx->staging = ggml_vk_create_buffer_check(ctx, ctx->staging_size, vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached); + ctx->staging = ggml_vk_create_buffer_check(ctx, ctx->staging_size, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostCached, + vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent); } } From 7930a8a6e89a04c77c51e3ae5dc1cd8e845b6b8f Mon Sep 17 00:00:00 2001 From: John <78893154+cmp-nct@users.noreply.github.com> Date: Thu, 15 Feb 2024 08:59:18 +0100 Subject: [PATCH 752/859] llaba : hotfix for llava-1.6 image number (#5495) Co-authored-by: John --- examples/llava/llava.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 22953417f..4ed310a0e 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -100,7 +100,7 @@ static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector int num_patches_width = grid_shape.first; // grid 1-4 int num_patches_height = grid_shape.second; // grid 1-4 - const size_t num_images = num_patches_width + num_patches_height + 1; + const size_t num_images = num_patches_width * num_patches_height + 1; // TODO: size calculation is not calculated - it's only tens of MB size_t ctx_size = 0; From 0d4177126b0556e202efb85bf3f768be81076400 Mon Sep 17 00:00:00 2001 From: Elbios <141279586+Elbios@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:01:57 +0100 Subject: [PATCH 753/859] llava : fix memory management bug (#5491) * Fix memory management in llava and server code Fixes this error: llama_new_context_with_model: graph splits (measure): 3 Available slots: -> Slot 0 - max context: 6000 {"timestamp":1707926446,"level":"INFO","function":"main","line":2623,"message":"model loaded"} all slots are idle and system prompt is empty, clear the KV cache slot 0 - loaded image slot 0 is processing [task id: 0] slot 0 : kv cache rm - [0, end) slot 0 - encoding image [id: 1] munmap_chunk(): invalid pointer Aborted * Make it cleaner by checking size in batch free wrapper --- examples/llava/clip.cpp | 24 +++++++++++++++++------- examples/llava/clip.h | 2 ++ examples/server/server.cpp | 11 +++++++++-- 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 9c5091e61..2cad27e82 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1230,8 +1230,20 @@ struct clip_image_f32 * clip_image_f32_init() { return new clip_image_f32(); } -void clip_image_u8_free (struct clip_image_u8 * img) { delete img; } +void clip_image_u8_free(struct clip_image_u8 * img) { delete img; } void clip_image_f32_free(struct clip_image_f32 * img) { delete img; } +void clip_image_u8_batch_free(struct clip_image_u8_batch & batch) { + if (batch.size > 0) { + delete[] batch.data; + batch.size = 0; + } +} +void clip_image_f32_batch_free(struct clip_image_f32_batch & batch) { + if (batch.size > 0) { + delete[] batch.data; + batch.size = 0; + } +} static void build_clip_img_from_data(const stbi_uc * data, int nx, int ny, clip_image_u8 * img) { img->nx = nx; @@ -1494,11 +1506,8 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli pad_to_square = false; } // free the previous res_imgs if any set - if (res_imgs.size > 0 && res_imgs.size < 100) { - for (size_t i = 0; i < res_imgs.size; i++) { - clip_image_f32_free(&(res_imgs.data[i])); - } - delete[] res_imgs.data; + if (res_imgs.size > 0) { + clip_image_f32_batch_free(res_imgs); } res_imgs.data = nullptr; res_imgs.size = 0; @@ -1650,7 +1659,8 @@ bool clip_image_preprocess(struct clip_ctx * ctx, const clip_image_u8 * img, cli res_imgs.size = 1; res_imgs.data = new clip_image_f32[res_imgs.size]; - res_imgs.data[0] = std::move(*res); + res_imgs.data[0] = *res; + clip_image_f32_free(res); return true; } diff --git a/examples/llava/clip.h b/examples/llava/clip.h index cd9a4022f..e5bd54924 100644 --- a/examples/llava/clip.h +++ b/examples/llava/clip.h @@ -60,6 +60,8 @@ CLIP_API struct clip_image_f32 * clip_image_f32_init(); CLIP_API void clip_image_u8_free (struct clip_image_u8 * img); CLIP_API void clip_image_f32_free(struct clip_image_f32 * img); +CLIP_API void clip_image_u8_batch_free (struct clip_image_u8_batch & batch); +CLIP_API void clip_image_f32_batch_free(struct clip_image_f32_batch & batch); CLIP_API bool clip_image_load_from_file(const char * fname, struct clip_image_u8 * img); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 6e3434030..2decd7762 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -975,7 +975,12 @@ struct llama_server_context { LOG_TEE("Error processing the given image"); clip_free(clp_ctx); - clip_image_f32_free(img_res_v.data); + clip_image_f32_batch_free(img_res_v); + return false; + } + if (img_res_v.size == 0) + { + LOG_TEE("Error processing the given image"); return false; } @@ -987,6 +992,7 @@ struct llama_server_context if (!img.image_embedding) { LOG_TEE("Unable to allocate memory for image embeddings\n"); + clip_image_f32_batch_free(img_res_v); clip_free(clp_ctx); return false; } @@ -994,10 +1000,11 @@ struct llama_server_context if (!clip_image_encode(clp_ctx, params.n_threads, img_res, img.image_embedding)) { LOG_TEE("Unable to encode image\n"); + clip_image_f32_batch_free(img_res_v); return false; } - clip_image_f32_free(img_res_v.data); + clip_image_f32_batch_free(img_res_v); img.request_encode_image = false; } From 73122473ffd73030146276dbb85da7c8021a3ee4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20de=20Vries?= Date: Thu, 15 Feb 2024 14:14:37 +0100 Subject: [PATCH 754/859] fix(gguf-py): special tokens are no longer skipped when add__token is set to false (#5487) * fix(gguf-py): special tokens are no longer skipped when add__token is set to false * fix(gguf-py): added missing cls and mask token ids to the gguf metadata --- gguf-py/gguf/constants.py | 4 ++++ gguf-py/gguf/gguf_writer.py | 6 ++++++ gguf-py/gguf/vocab.py | 6 +----- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 5fba01714..9986ce9de 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -73,6 +73,8 @@ class Keys: UNK_ID = "tokenizer.ggml.unknown_token_id" SEP_ID = "tokenizer.ggml.seperator_token_id" PAD_ID = "tokenizer.ggml.padding_token_id" + CLS_ID = "tokenizer.ggml.cls_token_id" + MASK_ID = "tokenizer.ggml.mask_token_id" ADD_BOS = "tokenizer.ggml.add_bos_token" ADD_EOS = "tokenizer.ggml.add_eos_token" ADD_PREFIX = "tokenizer.ggml.add_space_prefix" @@ -685,5 +687,7 @@ KEY_TOKENIZER_EOS_ID = Keys.Tokenizer.EOS_ID KEY_TOKENIZER_UNK_ID = Keys.Tokenizer.UNK_ID KEY_TOKENIZER_SEP_ID = Keys.Tokenizer.SEP_ID KEY_TOKENIZER_PAD_ID = Keys.Tokenizer.PAD_ID +KEY_TOKENIZER_CLS_ID = Keys.Tokenizer.CLS_ID +KEY_TOKENIZER_MASK_ID = Keys.Tokenizer.MASK_ID KEY_TOKENIZER_HF_JSON = Keys.Tokenizer.HF_JSON KEY_TOKENIZER_RWKV = Keys.Tokenizer.RWKV diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index d87bd8e88..26724bf94 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -414,6 +414,12 @@ class GGUFWriter: def add_pad_token_id(self, id: int) -> None: self.add_uint32(Keys.Tokenizer.PAD_ID, id) + def add_cls_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.CLS_ID, id) + + def add_mask_token_id(self, id: int) -> None: + self.add_uint32(Keys.Tokenizer.MASK_ID, id) + def add_add_bos_token(self, value: bool) -> None: self.add_bool(Keys.Tokenizer.ADD_BOS, value) diff --git a/gguf-py/gguf/vocab.py b/gguf-py/gguf/vocab.py index cd1942975..a23136b18 100644 --- a/gguf-py/gguf/vocab.py +++ b/gguf-py/gguf/vocab.py @@ -29,7 +29,7 @@ class SpecialVocab: if special_token_types is not None: self.special_token_types = special_token_types else: - self.special_token_types = ('bos', 'eos', 'unk', 'sep', 'pad') + self.special_token_types = ('bos', 'eos', 'unk', 'sep', 'pad', 'cls', 'mask') self._load(Path(path)) def __repr__(self) -> str: @@ -152,10 +152,6 @@ class SpecialVocab: add_entry = tokenizer_config.get(f'add_{typ}_token') if isinstance(add_entry, bool): self.add_special_token[typ] = add_entry - if not added_tokens: - # We will need this to get the content for the token, so if it's empty - # may as well just give up. - continue entry = tokenizer_config.get(f'{typ}_token') if isinstance(entry, str): tc_content = entry From 9350a1cf21b1492c69b20175b73a419b897d6a3a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 15 Feb 2024 15:41:15 +0200 Subject: [PATCH 755/859] scripts : add hf.sh helper script (#5501) * scripts : add hf.sh helper scripts * hf : add error logs * hf : add support for --repo and --file --- scripts/hf.sh | 107 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100755 scripts/hf.sh diff --git a/scripts/hf.sh b/scripts/hf.sh new file mode 100755 index 000000000..1e9e5a6ea --- /dev/null +++ b/scripts/hf.sh @@ -0,0 +1,107 @@ +#!/bin/bash +# +# Shortcut for downloading HF models +# +# Usage: +# ./main -m $(./examples/hf.sh https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/resolve/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./main -m $(./examples/hf.sh --url https://huggingface.co/TheBloke/Mixtral-8x7B-v0.1-GGUF/blob/main/mixtral-8x7b-v0.1.Q4_K_M.gguf) +# ./main -m $(./examples/hf.sh --repo TheBloke/Mixtral-8x7B-v0.1-GGUF --file mixtral-8x7b-v0.1.Q4_K_M.gguf) +# + +# all logs go to stderr +function log { + echo "$@" 1>&2 +} + +function usage { + log "Usage: $0 [[--url] ] [--repo ] [--file ] [-h|--help]" + exit 1 +} + +# check for curl or wget +function has_cmd { + if ! [ -x "$(command -v $1)" ]; then + return 1 + fi +} + +if has_cmd wget; then + cmd="wget -q --show-progress -c -O %s %s" +elif has_cmd curl; then + cmd="curl -C - -f -o %s -L %s" +else + log "[E] curl or wget not found" + exit 1 +fi + +url="" +repo="" +file="" + +# parse args +while [[ $# -gt 0 ]]; do + case "$1" in + --url) + url="$2" + shift 2 + ;; + --repo) + repo="$2" + shift 2 + ;; + --file) + file="$2" + shift 2 + ;; + -h|--help) + usage + ;; + *) + url="$1" + shift + ;; + esac +done + +if [ -n "$repo" ] && [ -n "$file" ]; then + url="https://huggingface.co/$repo/resolve/main/$file" +fi + +if [ -z "$url" ]; then + log "[E] missing --url" + usage +fi + +# check if the URL is a HuggingFace model, and if so, try to download it +is_url=false + +if [[ ${#url} -gt 22 ]]; then + if [[ ${url:0:22} == "https://huggingface.co" ]]; then + is_url=true + fi +fi + +if [ "$is_url" = false ]; then + log "[E] invalid URL, must start with https://huggingface.co" + exit 0 +fi + +# replace "blob/main" with "resolve/main" +url=${url/blob\/main/resolve\/main} + +basename=$(basename $url) + +log "[+] attempting to download $basename" + +if [ -n "$cmd" ]; then + cmd=$(printf "$cmd" "$basename" "$url") + log "[+] $cmd" + if $cmd; then + echo $basename + exit 0 + fi +fi + +log "[-] failed to download" + +exit 1 From 9060a1e9dfca6038906e819be5fa42217f49028c Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 15 Feb 2024 16:49:01 +0100 Subject: [PATCH 756/859] cuda : print message when initialization fails (#5512) * cuda : print message when initialization fails * use CUDA_NAME both times --- ggml-cuda.cu | 1 + 1 file changed, 1 insertion(+) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 96976f248..b35fcb7fd 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -7943,6 +7943,7 @@ GGML_CALL void ggml_init_cublas() { if (cudaGetDeviceCount(&g_device_count) != cudaSuccess) { initialized = true; g_cublas_loaded = false; + fprintf(stderr, "%s: no " GGML_CUDA_NAME " devices found, " GGML_CUDA_NAME " will be disabled\n", __func__); return; } From c06e45d72983d9ace7b1535f7e7ea258d212169e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 15 Feb 2024 18:49:08 +0200 Subject: [PATCH 757/859] clip : fix wrong loop condition --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 2cad27e82..98d512f67 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -1103,7 +1103,7 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { printf("v_image_mean %f %f %f\n", new_clip->image_mean[0], new_clip->image_mean[1], new_clip->image_mean[2]); printf("v_image_std %f %f %f\n", new_clip->image_std[0], new_clip->image_std[1], new_clip->image_std[2]); printf("v_image_grid_pinpoints: "); - for (int i = 0; i < 32 & hparams.image_grid_pinpoints[i]!=0; ++i) { + for (int i = 0; i < 32 && (hparams.image_grid_pinpoints[i] != 0); ++i) { printf("%d ", hparams.image_grid_pinpoints[i]); } printf("\n"); From 4524290e87b8e107cc2b56e1251751546f4b9051 Mon Sep 17 00:00:00 2001 From: Douglas Hanley Date: Thu, 15 Feb 2024 11:21:49 -0600 Subject: [PATCH 758/859] Use correct type of pooling for embedding models (#5500) Use correct type of pooling for embedding models --- convert-hf-to-gguf.py | 24 ++++++++++- gguf-py/gguf/constants.py | 8 +++- gguf-py/gguf/gguf_writer.py | 5 ++- llama.cpp | 82 +++++++++++++++++++++++++------------ llama.h | 6 +++ 5 files changed, 94 insertions(+), 31 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index ae471481d..9771fccf9 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1650,7 +1650,29 @@ class BertModel(Model): def set_gguf_parameters(self): super().set_gguf_parameters() self.gguf_writer.add_causal_attention(False) - self.gguf_writer.add_pooling_layer(True) + + # get pooling path + with open(self.dir_model / "modules.json", encoding="utf-8") as f: + modules = json.load(f) + pooling_path = None + for mod in modules: + if mod["type"] == "sentence_transformers.models.Pooling": + pooling_path = mod["path"] + break + + # get pooling type + pooling_type = gguf.PoolingType.NONE + if pooling_path is not None: + with open(self.dir_model / pooling_path / "config.json", encoding="utf-8") as f: + pooling = json.load(f) + if pooling["pooling_mode_mean_tokens"]: + pooling_type = gguf.PoolingType.MEAN + elif pooling["pooling_mode_cls_token"]: + pooling_type = gguf.PoolingType.CLS + else: + raise NotImplementedError("Only MEAN and CLS pooling types supported") + + self.gguf_writer.add_pooling_type(pooling_type.value) def set_vocab(self): path = self.dir_model diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 9986ce9de..114a9a974 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -40,7 +40,7 @@ class Keys: TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout" EXPERT_COUNT = "{arch}.expert_count" EXPERT_USED_COUNT = "{arch}.expert_used_count" - POOLING_LAYER = "{arch}.pooling_layer" + POOLING_TYPE = "{arch}.pooling_type" class Attention: HEAD_COUNT = "{arch}.attention.head_count" @@ -561,6 +561,12 @@ class RopeScalingType(Enum): YARN = 'yarn' +class PoolingType(IntEnum): + NONE = 0 + MEAN = 1 + CLS = 2 + + class GGMLQuantizationType(IntEnum): F32 = 0 F16 = 1 diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py index 26724bf94..e4681475c 100644 --- a/gguf-py/gguf/gguf_writer.py +++ b/gguf-py/gguf/gguf_writer.py @@ -19,6 +19,7 @@ from .constants import ( GGUFValueType, Keys, RopeScalingType, + PoolingType, TokenType, ) @@ -360,8 +361,8 @@ class GGUFWriter: def add_causal_attention(self, value: bool) -> None: self.add_bool(Keys.Attention.CAUSAL.format(arch=self.arch), value) - def add_pooling_layer(self, value: bool) -> None: - self.add_bool(Keys.LLM.POOLING_LAYER.format(arch=self.arch), value) + def add_pooling_type(self, value: PoolingType) -> None: + self.add_uint32(Keys.LLM.POOLING_TYPE.format(arch=self.arch), value) def add_rope_dimension_count(self, count: int) -> None: self.add_uint32(Keys.Rope.DIMENSION_COUNT.format(arch=self.arch), count) diff --git a/llama.cpp b/llama.cpp index 14e8821cd..aceb9c25a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -256,7 +256,7 @@ enum llm_kv { LLM_KV_TENSOR_DATA_LAYOUT, LLM_KV_EXPERT_COUNT, LLM_KV_EXPERT_USED_COUNT, - LLM_KV_POOLING_LAYER, + LLM_KV_POOLING_TYPE, LLM_KV_ATTENTION_HEAD_COUNT, LLM_KV_ATTENTION_HEAD_COUNT_KV, @@ -314,7 +314,7 @@ static std::map LLM_KV_NAMES = { { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" }, { LLM_KV_EXPERT_COUNT, "%s.expert_count" }, { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" }, - { LLM_KV_POOLING_LAYER, "%s.pooling_layer" }, + { LLM_KV_POOLING_TYPE , "%s.pooling_type" }, { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" }, { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" }, @@ -1561,7 +1561,7 @@ struct llama_hparams { float f_max_alibi_bias; bool causal_attn = true; - bool pooling_layer = false; + uint32_t pooling_type = LLAMA_POOLING_NONE; bool operator!=(const llama_hparams & other) const { @@ -1924,7 +1924,8 @@ struct llama_context { struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] - struct ggml_tensor * inp_sum; // F32 [n_batch, n_batch] + struct ggml_tensor * inp_mean; // F32 [n_batch, n_batch] + struct ggml_tensor * inp_cls; // I32 [n_batch] #ifdef GGML_USE_MPI ggml_mpi_context * ctx_mpi = NULL; @@ -3086,7 +3087,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); switch (hparams.n_layer) { case 3: @@ -3107,7 +3108,7 @@ static void llm_load_hparams( ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CAUSAL, hparams.causal_attn); ml.get_key(LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, hparams.n_vocab_type); - ml.get_key(LLM_KV_POOLING_LAYER, hparams.pooling_layer); + ml.get_key(LLM_KV_POOLING_TYPE, hparams.pooling_type); if (hparams.n_layer == 12 && hparams.n_embd == 768) { model.type = e_model::MODEL_137M; @@ -4934,7 +4935,7 @@ struct llm_build_context { const int32_t n_orig_ctx; const bool do_rope_shift; - const bool do_pooling; + const uint32_t pooling_type; const llm_build_cb & cb; @@ -4978,7 +4979,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), - do_pooling (hparams.pooling_layer && cparams.do_pooling), + pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_NONE), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -5835,7 +5836,8 @@ struct llm_build_context { // get input vectors with right size const size_t stride1 = n_tokens * ggml_type_size(lctx.inp_tokens->type); struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); - struct ggml_tensor * inp_sum = ggml_view_2d(ctx0, lctx.inp_sum, n_tokens, n_tokens, stride1, 0); + struct ggml_tensor * inp_mean = ggml_view_2d(ctx0, lctx.inp_mean, n_tokens, n_tokens, stride1, 0); + struct ggml_tensor * inp_cls = ggml_view_1d(ctx0, lctx.inp_cls, n_tokens, 0); // construct input embeddings (token, type, position) inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); @@ -5952,8 +5954,12 @@ struct llm_build_context { cur = inpL; // pooling layer - if (do_pooling) { - cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_sum); + if (pooling_type == LLAMA_POOLING_MEAN) { + cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_mean); + } else if (pooling_type == LLAMA_POOLING_CLS) { + cur = ggml_get_rows(ctx0, cur, inp_cls); + } else { + GGML_ASSERT(pooling_type == LLAMA_POOLING_NONE && "Invalid pooling type"); } cb(cur, "result_embd", -1); @@ -7501,15 +7507,6 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - { - assert(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); - float * data = (float *) lctx.inp_sum->data; - - for (int i = 0; i < batch.n_tokens; ++i) { - data[i] = 1.0f/float(batch.n_tokens); - } - } - if (kv_self.has_shift) { const int64_t n_ctx = cparams.n_ctx; @@ -7522,17 +7519,46 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (hparams.pooling_layer && cparams.do_pooling) { + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_MEAN) { const int64_t n_tokens = batch.n_tokens; - GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_sum->buffer)); - float * data = (float *) lctx.inp_sum->data; + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_mean->buffer)); + float * data = (float *) lctx.inp_mean->data; - memset(lctx.inp_sum->data, 0, batch.n_tokens * batch.n_tokens * ggml_element_size(lctx.inp_sum)); + memset(lctx.inp_mean->data, 0, n_tokens * n_tokens * ggml_element_size(lctx.inp_mean)); + + std::vector sum(n_tokens, 0); + for (int i = 0; i < n_tokens; ++i) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + sum[seq_id] += 1; + } + + std::vector div(n_tokens, 0.0f); + for (int i = 0; i < n_tokens; ++i) { + const uint64_t s = sum[i]; + if (s > 0) { + div[i] = 1.0f/float(s); + } + } for (int i = 0; i < n_tokens; ++i) { const llama_seq_id seq_id = batch.seq_id[i][0]; - data[seq_id*n_tokens + i] = 1.0f; + data[seq_id*n_tokens + i] = div[seq_id]; + } + } + + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_CLS) { + const int64_t n_tokens = batch.n_tokens; + + GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_cls->buffer)); + uint32_t * data = (uint32_t *) lctx.inp_cls->data; + + for (int i = 0; i < n_tokens; ++i) { + const llama_seq_id seq_id = batch.seq_id[i][0]; + const llama_pos pos = batch.pos[i]; + if (pos == 0) { + data[seq_id] = i; + } } } } @@ -11417,14 +11443,16 @@ struct llama_context * llama_new_context_with_model( ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); - ctx->inp_sum = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); + ctx->inp_mean = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); + ctx->inp_cls = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ggml_set_name(ctx->inp_tokens, "inp_tokens"); ggml_set_name(ctx->inp_embd, "inp_embd"); ggml_set_name(ctx->inp_pos, "inp_pos"); ggml_set_name(ctx->inp_KQ_mask, "inp_KQ_mask"); ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); - ggml_set_name(ctx->inp_sum, "inp_sum"); + ggml_set_name(ctx->inp_mean, "inp_mean"); + ggml_set_name(ctx->inp_cls, "inp_cls"); ctx->buf_input = ggml_backend_alloc_ctx_tensors_from_buft(ctx->ctx_input, llama_default_buffer_type_cpu(true)); diff --git a/llama.h b/llama.h index 5ef78ec96..4a26bd619 100644 --- a/llama.h +++ b/llama.h @@ -112,6 +112,12 @@ extern "C" { LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, }; + enum llama_pooling_type { + LLAMA_POOLING_NONE = 0, + LLAMA_POOLING_MEAN = 1, + LLAMA_POOLING_CLS = 2, + }; + enum llama_split_mode { LLAMA_SPLIT_NONE = 0, // single GPU LLAMA_SPLIT_LAYER = 1, // split layers and KV across GPUs From 594845aab1c6775877f6d9545a51dc0f8d0b3d77 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 16 Feb 2024 09:57:55 +0200 Subject: [PATCH 759/859] ci : fix BERT model download and convert --- ci/run.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ci/run.sh b/ci/run.sh index a4264d775..979b4a793 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -580,6 +580,10 @@ function gg_run_embd_bge_small { gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/pytorch_model.bin gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/sentence_bert_config.json gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/vocab.txt + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/modules.json + gg_wget models-mnt/bge-small/ https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/config.json + + gg_wget models-mnt/bge-small/1_Pooling https://huggingface.co/BAAI/bge-small-en-v1.5/raw/main/1_Pooling/config.json path_models="../models-mnt/bge-small" From 60ed04cf82dc91ade725dd7ad53f0ee81f76eccf Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Fri, 16 Feb 2024 10:24:39 +0100 Subject: [PATCH 760/859] llava : fix clip-model-is-vision flag in README.md (#5509) * llava: fix clip-model-is-vision flag in README.md This commit fixes the flag `--clip_model_is_vision` in README.md which is does not match the actual flag: ```console $ python convert-image-encoder-to-gguf.py --help ... --clip-model-is-vision The clip model is a pure vision model (ShareGPT4V vision extract for example) ``` Signed-off-by: Daniel Bevenius * llava: update link to vit config in README.md Signed-off-by: Daniel Bevenius --------- Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 1d5374f2a..57eb42932 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -63,8 +63,8 @@ Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` director 1) Backup your pth/safetensor model files as llava-surgery modifies them 2) Use `python llava-surgery-v2.py -C -m /path/to/hf-model` which also supports llava-1.5 variants pytorch as well as safetensor models: - you will find a llava.projector and a llava.clip file in your model directory -3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config.json) -4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip_model_is_vision` +3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config_vit.json) and rename it to config.json. +4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip-model-is-vision` - This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP 5) Everything else as usual: convert.py the hf model, quantize as needed **note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) From f486f6e1e5e9d01603d9325ab3e05f1edb362a95 Mon Sep 17 00:00:00 2001 From: bmwl Date: Fri, 16 Feb 2024 01:31:07 -0800 Subject: [PATCH 761/859] ggml : add numa options (#5377) * Added numa options to allow finer grained control as well as plumbing for a new mirror mode that will require numa.h * Reverted Makefile * Fixed include * Removed sched.h from ggml.h, moved ggml_get_numa_affinity into ggml.c, removed trailing whitespace and fixed up a few inconsistent variables * removed trailing whitespace * Added numa options to allow finer grained control as well as plumbing for a new mirror mode that will require numa.h * Reverting Makefile * Fixed a number of issues with the move from BOOL to ggml_numa_strategies. Added a note about mirror mode note being implemented yet * Removing MIRROR_MODE code for this PR * Removing last bit of MIRROR_MODE code for this PR * Removing unneeded branch in server.cpp example and moving get_numa_affinity and making it static * Fixed lingering init_llama_backend() bool calls in tests and examples * Remote enum llama_numa_strategies * Revert bad merge with dynatemp flags * add missing enum ggml_numa_strategies declaration and revert sync problem with master * add missing enum ggml_numa_strategies declaration * fixed ggml_init_numa variable * Update ggml.h Co-authored-by: Jared Van Bortel * Update READMEs with info about numa flags, change INTERLEAVE strategy name to DISTRIBUTE everywhere, implement the improved distribution strategy from @rankaiyx, fix a spelling mistake and un-merge some bad merges * split numa init out from llama_backend_init and created llama_numa_init. Updated all code paths and samples * Fix up some boolean vs enum comparisons * Added #ifdefs for non-Linux OS that don't have cpu_set_t datatype * Update ggml.h Align enum values Co-authored-by: Georgi Gerganov * Update ggml.c Remove whitespace Co-authored-by: Georgi Gerganov * Update ggml.c align paremeters Co-authored-by: Georgi Gerganov * Update examples/server/server.cpp remove whitespace and align brace Co-authored-by: Georgi Gerganov * Update common/common.cpp Remove whitespace and align brace Co-authored-by: Georgi Gerganov * unified ggml_numa_strategy enum and fixed text alignment in server.cpp example * Update ggml.c simplified return for platforms without NUMA support Co-authored-by: Jared Van Bortel * removed redundant else from cli argument processing of --numa * whitespace --------- Co-authored-by: root Co-authored-by: Jared Van Bortel Co-authored-by: Georgi Gerganov Co-authored-by: Jared Van Bortel --- common/common.cpp | 20 +++-- common/common.h | 2 +- examples/batched-bench/batched-bench.cpp | 3 +- examples/batched.swift/Sources/main.swift | 2 +- examples/batched/batched.cpp | 3 +- examples/beam-search/beam-search.cpp | 3 +- examples/embedding/embedding.cpp | 3 +- examples/imatrix/imatrix.cpp | 3 +- examples/infill/infill.cpp | 3 +- examples/llama-bench/llama-bench.cpp | 3 +- .../app/src/main/cpp/llama-android.cpp | 4 +- .../llama.cpp.swift/LibLlama.swift | 2 +- examples/llava/llava-cli.cpp | 3 +- examples/lookahead/lookahead.cpp | 3 +- examples/lookup/lookup.cpp | 3 +- examples/main/README.md | 6 +- examples/main/main.cpp | 3 +- examples/parallel/parallel.cpp | 3 +- examples/passkey/passkey.cpp | 3 +- examples/perplexity/perplexity.cpp | 3 +- examples/quantize/quantize.cpp | 2 +- examples/server/README.md | 7 ++ examples/server/server.cpp | 22 +++-- examples/simple/simple.cpp | 3 +- examples/speculative/speculative.cpp | 3 +- examples/tokenize/tokenize.cpp | 2 +- ggml.c | 80 ++++++++++++++++--- ggml.h | 12 ++- llama.cpp | 14 ++-- llama.h | 5 +- tests/test-autorelease.cpp | 2 +- tests/test-model-load-cancel.cpp | 2 +- tests/test-tokenizer-0-falcon.cpp | 2 +- tests/test-tokenizer-0-llama.cpp | 2 +- tests/test-tokenizer-1-bpe.cpp | 2 +- tests/test-tokenizer-1-llama.cpp | 2 +- 36 files changed, 178 insertions(+), 62 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index f64da2cb6..c5e83cc2a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -671,7 +671,15 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } else if (arg == "--no-mmap") { params.use_mmap = false; } else if (arg == "--numa") { - params.numa = true; + if (++i >= argc) { + invalid_param = true; + break; + } + std::string value(argv[i]); + /**/ if (value == "distribute" || value == "") { params.numa = GGML_NUMA_STRATEGY_DISTRIBUTE; } + else if (value == "isolate") { params.numa = GGML_NUMA_STRATEGY_ISOLATE; } + else if (value == "numactl") { params.numa = GGML_NUMA_STRATEGY_NUMACTL; } + else { invalid_param = true; break; } } else if (arg == "--verbose-prompt") { params.verbose_prompt = true; } else if (arg == "--no-display-prompt") { @@ -935,7 +943,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -tb N, --threads-batch N\n"); printf(" number of threads to use during batch and prompt processing (default: same as --threads)\n"); printf(" -td N, --threads-draft N"); - printf(" number of threads to use during generation (default: same as --threads)"); + printf(" number of threads to use during generation (default: same as --threads)\n"); printf(" -tbd N, --threads-batch-draft N\n"); printf(" number of threads to use during batch and prompt processing (default: same as --threads-draft)\n"); printf(" -p PROMPT, --prompt PROMPT\n"); @@ -1005,7 +1013,7 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" --winogrande-tasks N number of tasks to use when computing the Winogrande score (default: %zu)\n", params.winogrande_tasks); printf(" --multiple-choice compute multiple choice score over random tasks from datafile supplied with -f\n"); printf(" --multiple-choice-tasks N number of tasks to use when computing the multiple choice score (default: %zu)\n", params.winogrande_tasks); - printf(" --kl-divergence computes KL-divergence to logits provided via --kl-divergence-base"); + printf(" --kl-divergence computes KL-divergence to logits provided via --kl-divergence-base\n"); printf(" --keep N number of tokens to keep from the initial prompt (default: %d, -1 = all)\n", params.n_keep); printf(" --draft N number of tokens to draft for speculative decoding (default: %d)\n", params.n_draft); printf(" --chunks N max number of chunks to process (default: %d, -1 = all)\n", params.n_chunks); @@ -1022,7 +1030,10 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { if (llama_supports_mmap()) { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } - printf(" --numa attempt optimizations that help on some NUMA systems\n"); + printf(" --numa TYPE attempt optimizations that help on some NUMA systems\n"); + printf(" - distribute: spread execution evenly over all nodes\n"); + printf(" - isolate: only spawn threads on CPUs on the node that execution started on\n"); + printf(" - numactl: use the CPU map provided by numactl\n"); printf(" if run without this previously, it is recommended to drop the system page cache before using this\n"); printf(" see https://github.com/ggerganov/llama.cpp/issues/1437\n"); if (llama_supports_gpu_offload()) { @@ -1689,7 +1700,6 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "no_mmap: %s # default: false\n", !params.use_mmap ? "true" : "false"); fprintf(stream, "no_mul_mat_q: %s # default: false\n", !params.mul_mat_q ? "true" : "false"); fprintf(stream, "no_penalize_nl: %s # default: false\n", !sparams.penalize_nl ? "true" : "false"); - fprintf(stream, "numa: %s # default: false\n", params.numa ? "true" : "false"); fprintf(stream, "ppl_output_type: %d # default: 0\n", params.ppl_output_type); fprintf(stream, "ppl_stride: %d # default: 0\n", params.ppl_stride); fprintf(stream, "presence_penalty: %f # default: 0.0\n", sparams.penalty_present); diff --git a/common/common.h b/common/common.h index 9bdd45cf9..74c136995 100644 --- a/common/common.h +++ b/common/common.h @@ -76,6 +76,7 @@ struct gpt_params { float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length int32_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; + ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; // // sampling parameters struct llama_sampling_params sparams; @@ -134,7 +135,6 @@ struct gpt_params { bool logits_all = false; // return logits for all tokens in the batch bool use_mmap = true; // use mmap for faster loads bool use_mlock = false; // use mlock to keep model in memory - bool numa = false; // attempt optimizations that help on some NUMA systems bool verbose_prompt = false; // print prompt tokens before generation bool display_prompt = true; // print prompt before generation bool infill = false; // use infill mode diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index b52d68457..55dfd9784 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -82,7 +82,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/batched.swift/Sources/main.swift b/examples/batched.swift/Sources/main.swift index 4d0005349..d75c503d5 100644 --- a/examples/batched.swift/Sources/main.swift +++ b/examples/batched.swift/Sources/main.swift @@ -17,7 +17,7 @@ let n_parallel: Int = arguments.count > 3 && Int(arguments[3]) != nil ? Int(argu let n_len: Int = 32 // init LLM -llama_backend_init(false) +llama_backend_init() defer { llama_backend_free() } diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index b1775e0b0..eab636692 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -50,7 +50,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/beam-search/beam-search.cpp b/examples/beam-search/beam-search.cpp index 679b382e1..866c6d7a6 100644 --- a/examples/beam-search/beam-search.cpp +++ b/examples/beam-search/beam-search.cpp @@ -119,7 +119,8 @@ int main(int argc, char ** argv) // Init LLM : //--------------------------------- - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/embedding/embedding.cpp b/examples/embedding/embedding.cpp index b4688cf51..acff715e9 100644 --- a/examples/embedding/embedding.cpp +++ b/examples/embedding/embedding.cpp @@ -74,7 +74,8 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/imatrix/imatrix.cpp b/examples/imatrix/imatrix.cpp index bc9f6fa68..f21bc48f3 100644 --- a/examples/imatrix/imatrix.cpp +++ b/examples/imatrix/imatrix.cpp @@ -568,7 +568,8 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model_params mparams = llama_model_params_from_gpt_params(params); diff --git a/examples/infill/infill.cpp b/examples/infill/infill.cpp index 72fb133b4..92c67b7cf 100644 --- a/examples/infill/infill.cpp +++ b/examples/infill/infill.cpp @@ -202,7 +202,8 @@ int main(int argc, char ** argv) { std::mt19937 rng(params.seed); LOG("%s: llama backend init\n", __func__); - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index ddb0ba064..11410f8ae 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -1151,8 +1151,7 @@ int main(int argc, char ** argv) { if (!params.verbose) { llama_log_set(llama_null_log_callback, NULL); } - bool numa = false; - llama_backend_init(numa); + llama_backend_init(); // initialize printer std::unique_ptr p; diff --git a/examples/llama.android/app/src/main/cpp/llama-android.cpp b/examples/llama.android/app/src/main/cpp/llama-android.cpp index d5e705dce..2beb1e0d5 100644 --- a/examples/llama.android/app/src/main/cpp/llama-android.cpp +++ b/examples/llama.android/app/src/main/cpp/llama-android.cpp @@ -274,8 +274,8 @@ Java_com_example_llama_Llm_new_1batch(JNIEnv *, jobject, jint n_tokens, jint emb extern "C" JNIEXPORT void JNICALL -Java_com_example_llama_Llm_backend_1init(JNIEnv *, jobject, jboolean numa) { - llama_backend_init(numa); +Java_com_example_llama_Llm_backend_1init(JNIEnv *, jobject) { + llama_backend_init(); } extern "C" diff --git a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift index fc79fd346..58fcf40c6 100644 --- a/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift +++ b/examples/llama.swiftui/llama.cpp.swift/LibLlama.swift @@ -51,7 +51,7 @@ actor LlamaContext { } static func create_context(path: String) throws -> LlamaContext { - llama_backend_init(false) + llama_backend_init() var model_params = llama_model_default_params() #if targetEnvironment(simulator) diff --git a/examples/llava/llava-cli.cpp b/examples/llava/llava-cli.cpp index bef7f7c95..e29da6cb2 100644 --- a/examples/llava/llava-cli.cpp +++ b/examples/llava/llava-cli.cpp @@ -218,7 +218,8 @@ static struct llava_context * llava_init(gpt_params * params) { auto ctx_clip = clip_model_load(clip_path, /*verbosity=*/ 1); - llama_backend_init(params->numa); + llama_backend_init(); + llama_numa_init(params->numa); llama_model_params model_params = llama_model_params_from_gpt_params(*params); diff --git a/examples/lookahead/lookahead.cpp b/examples/lookahead/lookahead.cpp index e55a15a1b..e2551e7a4 100644 --- a/examples/lookahead/lookahead.cpp +++ b/examples/lookahead/lookahead.cpp @@ -54,7 +54,8 @@ int main(int argc, char ** argv) { #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model = NULL; llama_context * ctx = NULL; diff --git a/examples/lookup/lookup.cpp b/examples/lookup/lookup.cpp index 18235b8a1..b53fae110 100644 --- a/examples/lookup/lookup.cpp +++ b/examples/lookup/lookup.cpp @@ -31,7 +31,8 @@ int main(int argc, char ** argv){ #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model = NULL; llama_context * ctx = NULL; diff --git a/examples/main/README.md b/examples/main/README.md index c7997f665..7f84e4262 100644 --- a/examples/main/README.md +++ b/examples/main/README.md @@ -283,7 +283,11 @@ These options help improve the performance and memory usage of the LLaMA models. ### NUMA support -- `--numa`: Attempt optimizations that help on some systems with non-uniform memory access. This currently consists of pinning an equal proportion of the threads to the cores on each NUMA node, and disabling prefetch and readahead for mmap. The latter causes mapped pages to be faulted in on first access instead of all at once, and in combination with pinning threads to NUMA nodes, more of the pages end up on the NUMA node where they are used. Note that if the model is already in the system page cache, for example because of a previous run without this option, this will have little effect unless you drop the page cache first. This can be done by rebooting the system or on Linux by writing '3' to '/proc/sys/vm/drop_caches' as root. +- `--numa distribute`: Pin an equal proportion of the threads to the cores on each NUMA node. This will spread the load amongst all cores on the system, utilitizing all memory channels at the expense of potentially requiring memory to travel over the slow links between nodes. +- `--numa isolate`: Pin all threads to the NUMA node that the program starts on. This limits the number of cores and amount of memory that can be used, but guarantees all memory access remains local to the NUMA node. +- `--numa numactl`: Pin threads to the CPUMAP that is passed to the program by starting it with the numactl utility. This is the most flexible mode, and allow arbitraty core usage patterns, for example a map that uses all the cores on one NUMA nodes, and just enough cores on a second node to saturate the inter-node memory bus. + + These flags attempt optimizations that help on some systems with non-uniform memory access. This currently consists of one of the above strategies, and disabling prefetch and readahead for mmap. The latter causes mapped pages to be faulted in on first access instead of all at once, and in combination with pinning threads to NUMA nodes, more of the pages end up on the NUMA node where they are used. Note that if the model is already in the system page cache, for example because of a previous run without this option, this will have little effect unless you drop the page cache first. This can be done by rebooting the system or on Linux by writing '3' to '/proc/sys/vm/drop_caches' as root. ### Memory Float 32 diff --git a/examples/main/main.cpp b/examples/main/main.cpp index e8ab8cbae..f5d2f4893 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -185,7 +185,8 @@ int main(int argc, char ** argv) { } LOG("%s: llama backend init\n", __func__); - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/parallel/parallel.cpp b/examples/parallel/parallel.cpp index d2e074d9e..7d11fcd59 100644 --- a/examples/parallel/parallel.cpp +++ b/examples/parallel/parallel.cpp @@ -122,7 +122,8 @@ int main(int argc, char ** argv) { #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model = NULL; llama_context * ctx = NULL; diff --git a/examples/passkey/passkey.cpp b/examples/passkey/passkey.cpp index 5c0022832..e12a1cdf1 100644 --- a/examples/passkey/passkey.cpp +++ b/examples/passkey/passkey.cpp @@ -71,7 +71,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index b2c131d4c..67d2d3293 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -1809,7 +1809,8 @@ int main(int argc, char ** argv) { params.prompt = gpt_random_prompt(rng); } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model; llama_context * ctx; diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 85f403ffc..4a5c504e3 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -237,7 +237,7 @@ int main(int argc, char ** argv) { params.imatrix = &imatrix_data; } - llama_backend_init(false); + llama_backend_init(); // parse command line arguments const std::string fname_inp = argv[arg_idx]; diff --git a/examples/server/README.md b/examples/server/README.md index 0f7373ae8..8e141d22d 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -16,6 +16,13 @@ Command line options: - `--memory-f32`: Use 32-bit floats instead of 16-bit floats for memory key+value. Not recommended. - `--mlock`: Lock the model in memory, preventing it from being swapped out when memory-mapped. - `--no-mmap`: Do not memory-map the model. By default, models are mapped into memory, which allows the system to load only the necessary parts of the model as needed. +- `--numa STRATEGY`: Attempt one of the below optimization strategies that help on some NUMA systems +- `--numa distribute`: Spread execution evenly over all nodes +- `--numa isolate`: Only spawn threads on CPUs on the node that execution started on +- `--numa numactl`: Use the CPU map provided by numactl +if run without this previously, it is recommended to drop the system page cache before using this +see https://github.com/ggerganov/llama.cpp/issues/1437 + - `--numa`: Attempt optimizations that help on some NUMA systems. - `--lora FNAME`: Apply a LoRA (Low-Rank Adaptation) adapter to the model (implies --no-mmap). This allows you to adapt the pretrained model to specific tasks or domains. - `--lora-base FNAME`: Optional model to use as a base for the layers modified by the LoRA adapter. This flag is used in conjunction with the `--lora` flag, and specifies the base model for the adaptation. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 2decd7762..912c750cc 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1855,7 +1855,10 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, { printf(" --no-mmap do not memory-map model (slower load but may reduce pageouts if not using mlock)\n"); } - printf(" --numa attempt optimizations that help on some NUMA systems\n"); + printf(" --numa TYPE attempt optimizations that help on some NUMA systems\n"); + printf(" - distribute: spread execution evenly over all nodes\n"); + printf(" - isolate: only spawn threads on CPUs on the node that execution started on\n"); + printf(" - numactl: use the CPU map provided my numactl\n"); if (llama_supports_gpu_offload()) { printf(" -ngl N, --n-gpu-layers N\n"); printf(" number of layers to store in VRAM\n"); @@ -2264,9 +2267,17 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, { params.use_mmap = false; } - else if (arg == "--numa") - { - params.numa = true; + else if (arg == "--numa") { + if (++i >= argc) { + invalid_param = true; + break; + } else { + std::string value(argv[i]); + /**/ if (value == "distribute" || value == "" ) { params.numa = GGML_NUMA_STRATEGY_DISTRIBUTE; } + else if (value == "isolate") { params.numa = GGML_NUMA_STRATEGY_ISOLATE; } + else if (value == "numactl") { params.numa = GGML_NUMA_STRATEGY_NUMACTL; } + else { invalid_param = true; break; } + } } else if (arg == "--embedding") { @@ -2497,7 +2508,8 @@ int main(int argc, char **argv) params.model_alias = params.model; } - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); LOG_INFO("build info", {{"build", LLAMA_BUILD_NUMBER}, {"commit", LLAMA_COMMIT}}); diff --git a/examples/simple/simple.cpp b/examples/simple/simple.cpp index 9cfde8308..39e2d8ea4 100644 --- a/examples/simple/simple.cpp +++ b/examples/simple/simple.cpp @@ -31,7 +31,8 @@ int main(int argc, char ** argv) { // init LLM - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); // initialize the model diff --git a/examples/speculative/speculative.cpp b/examples/speculative/speculative.cpp index 7b3af01f3..3848791d4 100644 --- a/examples/speculative/speculative.cpp +++ b/examples/speculative/speculative.cpp @@ -50,7 +50,8 @@ int main(int argc, char ** argv) { #endif // LOG_DISABLE_LOGS // init llama.cpp - llama_backend_init(params.numa); + llama_backend_init(); + llama_numa_init(params.numa); llama_model * model_tgt = NULL; llama_model * model_dft = NULL; diff --git a/examples/tokenize/tokenize.cpp b/examples/tokenize/tokenize.cpp index 4ff8e3fa7..d95a92475 100644 --- a/examples/tokenize/tokenize.cpp +++ b/examples/tokenize/tokenize.cpp @@ -17,7 +17,7 @@ int main(int argc, char ** argv) { const bool printing_ids = argc > 3 && std::string(argv[3]) == "--ids"; - llama_backend_init(false); + llama_backend_init(); llama_model_params model_params = llama_model_default_params(); model_params.vocab_only = true; diff --git a/ggml.c b/ggml.c index d921d82fe..4e302fb7d 100644 --- a/ggml.c +++ b/ggml.c @@ -1954,9 +1954,16 @@ struct ggml_numa_node { }; struct ggml_numa_nodes { + enum ggml_numa_strategy numa_strategy; struct ggml_numa_node nodes[GGML_NUMA_MAX_NODES]; uint32_t n_nodes; uint32_t total_cpus; // hardware threads on system + uint32_t current_node; // node on which main process is execting +#ifdef __linux__ + cpu_set_t cpuset; // cpuset from numactl +#else + uint32_t cpuset; // no NUMA support outside of Linux at this time. Use a portable datatype +#endif }; // @@ -1990,7 +1997,22 @@ inline static void ggml_critical_section_end(void) { atomic_fetch_sub(&g_state_barrier, 1); } -void ggml_numa_init(void) { +#ifdef __linux__ +static cpu_set_t ggml_get_numa_affinity(void) { + cpu_set_t cpuset; + pthread_t thread; + thread = pthread_self(); + CPU_ZERO(&cpuset); + pthread_getaffinity_np(thread, sizeof(cpu_set_t), &cpuset); + return cpuset; +} +#else +static uint32_t ggml_get_numa_affinity(void) { + return 0; // no NUMA support +} +#endif + +void ggml_numa_init(enum ggml_numa_strategy numa_flag) { if (g_state.numa.n_nodes > 0) { fprintf(stderr, "ggml_numa_init: NUMA already initialized\n"); @@ -2002,6 +2024,13 @@ void ggml_numa_init(void) { char path[256]; int rv; + // set numa scheme + g_state.numa.numa_strategy = numa_flag; + + GGML_PRINT_DEBUG("numa strategy %u\n",g_state.numa.numa_strategy); + + g_state.numa.cpuset = ggml_get_numa_affinity(); + // enumerate nodes while (g_state.numa.n_nodes < GGML_NUMA_MAX_NODES) { rv = snprintf(path, sizeof(path), "/sys/devices/system/node/node%u", g_state.numa.n_nodes); @@ -2020,11 +2049,17 @@ void ggml_numa_init(void) { GGML_PRINT_DEBUG("found %u numa nodes, %u CPUs\n", g_state.numa.n_nodes, g_state.numa.total_cpus); - if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1) { + // figure out which node we're on + uint current_cpu; + int getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); + + if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1 || getcpu_ret != 0) { g_state.numa.n_nodes = 0; return; } + GGML_PRINT_DEBUG("found our process on numa node %u, CPU %u\n", g_state.numa.current_node, current_cpu); + for (uint32_t n = 0; n < g_state.numa.n_nodes; ++n) { struct ggml_numa_node * node = &g_state.numa.nodes[n]; GGML_PRINT_DEBUG("CPUs on node %u:", n); @@ -16638,26 +16673,46 @@ typedef pthread_t ggml_thread_t; // Android's libc implementation "bionic" does not support setting affinity #if defined(__linux__) && !defined(__BIONIC__) -static void set_numa_thread_affinity(int thread_n, int n_threads) { +static void set_numa_thread_affinity(int thread_n) { if (!ggml_is_numa()) { return; } - // run thread on node_num thread_n / (threads per node) - const int node_num = thread_n / ((n_threads + g_state.numa.n_nodes - 1) / g_state.numa.n_nodes); - struct ggml_numa_node * node = &g_state.numa.nodes[node_num]; + int node_num; + int rv; size_t setsize = CPU_ALLOC_SIZE(g_state.numa.total_cpus); + switch(g_state.numa.numa_strategy) { + case GGML_NUMA_STRATEGY_DISTRIBUTE: + // run thread on node_num thread_n / (threads per node) + node_num = thread_n % g_state.numa.n_nodes; + break; + case GGML_NUMA_STRATEGY_ISOLATE: + // run thread on current_node + node_num = g_state.numa.current_node; + break; + case GGML_NUMA_STRATEGY_NUMACTL: + // use the cpuset that numactl gave us + rv = pthread_setaffinity_np(pthread_self(), setsize, &g_state.numa.cpuset); + if (rv) { + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n",strerror(rv)); + } + return; + default: + return; + } + + struct ggml_numa_node * node = &g_state.numa.nodes[node_num]; + cpu_set_t * cpus = CPU_ALLOC(g_state.numa.total_cpus); CPU_ZERO_S(setsize, cpus); for (size_t i = 0; i < node->n_cpus; ++i) { CPU_SET_S(node->cpus[i], setsize, cpus); } - int rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); + rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); if (rv) { - fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", - strerror(rv)); + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", strerror(rv)); } CPU_FREE(cpus); @@ -16678,8 +16733,7 @@ static void clear_numa_thread_affinity(void) { int rv = pthread_setaffinity_np(pthread_self(), setsize, cpus); if (rv) { - fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", - strerror(rv)); + fprintf(stderr, "warning: pthread_setaffinity_np() failed: %s\n", strerror(rv)); } CPU_FREE(cpus); @@ -16687,7 +16741,7 @@ static void clear_numa_thread_affinity(void) { #else // TODO: Windows etc. // (the linux implementation may also work on BSD, someone should test) -static void set_numa_thread_affinity(int thread_n, int n_threads) { UNUSED(thread_n); UNUSED(n_threads); } +static void set_numa_thread_affinity(int thread_n) { UNUSED(thread_n); } static void clear_numa_thread_affinity(void) {} #endif @@ -16987,7 +17041,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { const int n_threads = state->shared->n_threads; - set_numa_thread_affinity(state->ith, n_threads); + set_numa_thread_affinity(state->ith); int node_n = -1; int task_phase = GGML_TASK_FINALIZE; diff --git a/ggml.h b/ggml.h index 01cecc1e1..270018185 100644 --- a/ggml.h +++ b/ggml.h @@ -658,6 +658,16 @@ extern "C" { void * wdata; }; + // numa strategies + enum ggml_numa_strategy { + GGML_NUMA_STRATEGY_DISABLED = 0, + GGML_NUMA_STRATEGY_DISTRIBUTE = 1, + GGML_NUMA_STRATEGY_ISOLATE = 2, + GGML_NUMA_STRATEGY_NUMACTL = 3, + GGML_NUMA_STRATEGY_MIRROR = 4, + GGML_NUMA_STRATEGY_COUNT + }; + // misc GGML_API void ggml_time_init(void); // call this once at the beginning of the program @@ -668,7 +678,7 @@ extern "C" { GGML_API void ggml_print_backtrace(void); - GGML_API void ggml_numa_init(void); // call once for better performance on NUMA systems + GGML_API void ggml_numa_init(enum ggml_numa_strategy numa); // call once for better performance on NUMA systems GGML_API bool ggml_is_numa(void); // true if init detected that system has >1 NUMA node GGML_API void ggml_print_object (const struct ggml_object * obj); diff --git a/llama.cpp b/llama.cpp index aceb9c25a..08e7b02b4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1034,7 +1034,7 @@ struct llama_mmap { int fd = fileno(file->fp); int flags = MAP_SHARED; // prefetch/readahead impairs performance on NUMA systems - if (numa) { prefetch = 0; } + if (numa) { prefetch = 0; } #ifdef __linux__ // advise the kernel to read the file sequentially (increases readahead) if (posix_fadvise(fd, 0, 0, POSIX_FADV_SEQUENTIAL)) { @@ -11182,7 +11182,7 @@ bool llama_mlock_supported(void) { return llama_supports_mlock(); } -void llama_backend_init(bool numa) { +void llama_backend_init(void) { ggml_time_init(); // needed to initialize f16 tables @@ -11192,15 +11192,17 @@ void llama_backend_init(bool numa) { ggml_free(ctx); } - if (numa) { - ggml_numa_init(); - } - #ifdef GGML_USE_MPI ggml_mpi_backend_init(); #endif } +void llama_numa_init(enum ggml_numa_strategy numa) { + if (numa != GGML_NUMA_STRATEGY_DISABLED) { + ggml_numa_init(numa); + } +} + void llama_backend_free(void) { #ifdef GGML_USE_MPI ggml_mpi_backend_free(); diff --git a/llama.h b/llama.h index 4a26bd619..f4ec6ea63 100644 --- a/llama.h +++ b/llama.h @@ -312,7 +312,10 @@ extern "C" { // Initialize the llama + ggml backend // If numa is true, use NUMA optimizations // Call once at the start of the program - LLAMA_API void llama_backend_init(bool numa); + LLAMA_API void llama_backend_init(void); + + //optional: + LLAMA_API void llama_numa_init(enum ggml_numa_strategy numa); // Call once at the end of the program - currently only used for MPI LLAMA_API void llama_backend_free(void); diff --git a/tests/test-autorelease.cpp b/tests/test-autorelease.cpp index 36a23c0bb..57fa00011 100644 --- a/tests/test-autorelease.cpp +++ b/tests/test-autorelease.cpp @@ -12,7 +12,7 @@ int main(int argc, char ** argv) { auto * model_path = get_model_or_exit(argc, argv); std::thread([&model_path]() { - llama_backend_init(false); + llama_backend_init(); auto * model = llama_load_model_from_file(model_path, llama_model_default_params()); auto * ctx = llama_new_context_with_model(model, llama_context_default_params()); llama_free(ctx); diff --git a/tests/test-model-load-cancel.cpp b/tests/test-model-load-cancel.cpp index 7ea4bbacc..858535c3c 100644 --- a/tests/test-model-load-cancel.cpp +++ b/tests/test-model-load-cancel.cpp @@ -14,7 +14,7 @@ int main(int argc, char *argv[] ) { fprintf(stderr, "using '%s'\n", model_path); fclose(file); - llama_backend_init(false); + llama_backend_init(); auto params = llama_model_params{}; params.use_mmap = false; params.progress_callback = [](float progress, void * ctx){ diff --git a/tests/test-tokenizer-0-falcon.cpp b/tests/test-tokenizer-0-falcon.cpp index a4e9d2b91..472b0b3a8 100644 --- a/tests/test-tokenizer-0-falcon.cpp +++ b/tests/test-tokenizer-0-falcon.cpp @@ -61,7 +61,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { diff --git a/tests/test-tokenizer-0-llama.cpp b/tests/test-tokenizer-0-llama.cpp index 39c8d188c..0a16cd7eb 100644 --- a/tests/test-tokenizer-0-llama.cpp +++ b/tests/test-tokenizer-0-llama.cpp @@ -60,7 +60,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { diff --git a/tests/test-tokenizer-1-bpe.cpp b/tests/test-tokenizer-1-bpe.cpp index 3bb629561..3596ce55a 100644 --- a/tests/test-tokenizer-1-bpe.cpp +++ b/tests/test-tokenizer-1-bpe.cpp @@ -25,7 +25,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { diff --git a/tests/test-tokenizer-1-llama.cpp b/tests/test-tokenizer-1-llama.cpp index b0d814a41..9333f8686 100644 --- a/tests/test-tokenizer-1-llama.cpp +++ b/tests/test-tokenizer-1-llama.cpp @@ -25,7 +25,7 @@ int main(int argc, char **argv) { llama_model * model; llama_context * ctx; - llama_backend_init(false); + llama_backend_init(); // load the vocab { From 5f5808ca7b7f23a1fa7a77241842bb84a0e55108 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C5=91czey=20Barnab=C3=A1s?= <31726601+An0nie@users.noreply.github.com> Date: Fri, 16 Feb 2024 11:00:56 +0100 Subject: [PATCH 762/859] server : fix system prompt cli (#5516) --- examples/server/server.cpp | 47 ++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 912c750cc..0cb802ce8 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -436,10 +436,6 @@ struct llama_server_context default_generation_settings_for_props["seed"] = -1; batch = llama_batch_init(n_ctx, 0, params.n_parallel); - - // empty system prompt - system_prompt = ""; - system_tokens.clear(); } std::vector tokenize(const json & json_prompt, bool add_bos) const @@ -765,27 +761,30 @@ struct llama_server_context } void update_system_prompt() { - system_tokens = ::llama_tokenize(ctx, system_prompt, add_bos_token); - - llama_batch_clear(batch); - kv_cache_clear(); + system_tokens.clear(); - for (int i = 0; i < (int) system_tokens.size(); ++i) - { - llama_batch_add(batch, system_tokens[i], i, { 0 }, false); - } + if (!system_prompt.empty()) { + system_tokens = ::llama_tokenize(ctx, system_prompt, add_bos_token); - if (llama_decode(ctx, batch) != 0) - { - LOG_TEE("%s: llama_decode() failed\n", __func__); - return; - } + llama_batch_clear(batch); - // assign the system KV cache to all parallel sequences - for (int32_t i = 1; i < params.n_parallel; ++i) - { - llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); + for (int i = 0; i < (int)system_tokens.size(); ++i) + { + llama_batch_add(batch, system_tokens[i], i, { 0 }, false); + } + + if (llama_decode(ctx, batch) != 0) + { + LOG_TEE("%s: llama_decode() failed\n", __func__); + return; + } + + // assign the system KV cache to all parallel sequences + for (int32_t i = 1; i < params.n_parallel; ++i) + { + llama_kv_cache_seq_cp(ctx, 0, i, 0, system_tokens.size()); + } } LOG_TEE("system prompt updated\n"); @@ -807,10 +806,8 @@ struct llama_server_context name_user = sys_props.value("anti_prompt", ""); name_assistant = sys_props.value("assistant_name", ""); - if (slots.size() > 0) - { - notify_system_prompt_changed(); - } + + notify_system_prompt_changed(); } static size_t find_stopping_strings(const std::string &text, const size_t last_token_size, From 6dcc02d2444c779c18d49c364c5d5c5728b6b484 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Fri, 16 Feb 2024 11:33:25 +0000 Subject: [PATCH 763/859] server : add "samplers" param to control the samplers order (#5494) --- common/common.cpp | 59 ++++++++++++++++++++++++-------------- common/common.h | 2 +- common/sampling.cpp | 2 +- common/sampling.h | 14 ++++----- examples/server/README.md | 2 ++ examples/server/server.cpp | 25 ++++++++++++++++ 6 files changed, 74 insertions(+), 30 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index c5e83cc2a..3a92d3797 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -341,7 +341,7 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } const auto sampler_names = string_split(argv[i], ';'); - sparams.samplers_sequence = sampler_types_from_names(sampler_names); + sparams.samplers_sequence = sampler_types_from_names(sampler_names, true); } else if (arg == "--sampling-seq") { if (++i >= argc) { invalid_param = true; @@ -964,7 +964,8 @@ void gpt_print_usage(int /*argc*/, char ** argv, const gpt_params & params) { printf(" -n N, --n-predict N number of tokens to predict (default: %d, -1 = infinity, -2 = until context filled)\n", params.n_predict); printf(" -c N, --ctx-size N size of the prompt context (default: %d, 0 = loaded from model)\n", params.n_ctx); printf(" -b N, --batch-size N batch size for prompt processing (default: %d)\n", params.n_batch); - printf(" --samplers samplers that will be used for generation in the order, separated by \';\' (default: %s)\n", sampler_type_names.c_str()); + printf(" --samplers samplers that will be used for generation in the order, separated by \';\'\n"); + printf(" (default: %s)\n", sampler_type_names.c_str()); printf(" --sampling-seq simplified sequence for samplers that will be used (default: %s)\n", sampler_type_chars.c_str()); printf(" --top-k N top-k sampling (default: %d, 0 = disabled)\n", sparams.top_k); printf(" --top-p N top-p sampling (default: %.1f, 1.0 = disabled)\n", (double)sparams.top_p); @@ -1133,34 +1134,50 @@ std::vector string_split(std::string input, char separator) { return parts; } -std::vector sampler_types_from_names(const std::vector & names) { +std::vector sampler_types_from_names(const std::vector & names, bool allow_alt_names) { + std::unordered_map sampler_canonical_name_map { + {"top_k", llama_sampler_type::TOP_K}, + {"top_p", llama_sampler_type::TOP_P}, + {"typical_p", llama_sampler_type::TYPICAL_P}, + {"min_p", llama_sampler_type::MIN_P}, + {"tfs_z", llama_sampler_type::TFS_Z}, + {"temperature", llama_sampler_type::TEMPERATURE} + }; + // since samplers names are written multiple ways // make it ready for both system names and input names - std::unordered_map sampler_name_map { - {"top_k", llama_sampler_type::TOP_K}, + std::unordered_map sampler_alt_name_map { {"top-k", llama_sampler_type::TOP_K}, - {"top_p", llama_sampler_type::TOP_P}, {"top-p", llama_sampler_type::TOP_P}, {"nucleus", llama_sampler_type::TOP_P}, - {"typical_p", llama_sampler_type::TYPICAL_P}, {"typical-p", llama_sampler_type::TYPICAL_P}, {"typical", llama_sampler_type::TYPICAL_P}, - {"min_p", llama_sampler_type::MIN_P}, {"min-p", llama_sampler_type::MIN_P}, - {"tfs_z", llama_sampler_type::TFS_Z}, {"tfs-z", llama_sampler_type::TFS_Z}, {"tfs", llama_sampler_type::TFS_Z}, - {"temp", llama_sampler_type::TEMP}, - {"temperature", llama_sampler_type::TEMP} + {"temp", llama_sampler_type::TEMPERATURE} }; std::vector sampler_types; sampler_types.reserve(names.size()); - for (const auto& name : names) { - const auto sampler_item = sampler_name_map.find(name); - if (sampler_item != sampler_name_map.end()) { + for (const auto & name : names) + { + auto sampler_item = sampler_canonical_name_map.find(name); + if (sampler_item != sampler_canonical_name_map.end()) + { sampler_types.push_back(sampler_item->second); } + else + { + if (allow_alt_names) + { + sampler_item = sampler_alt_name_map.find(name); + if (sampler_item != sampler_alt_name_map.end()) + { + sampler_types.push_back(sampler_item->second); + } + } + } } return sampler_types; } @@ -1172,7 +1189,7 @@ std::vector sampler_types_from_chars(const std::string & nam {'y', llama_sampler_type::TYPICAL_P}, {'m', llama_sampler_type::MIN_P}, {'f', llama_sampler_type::TFS_Z}, - {'t', llama_sampler_type::TEMP} + {'t', llama_sampler_type::TEMPERATURE} }; std::vector sampler_types; @@ -1188,12 +1205,12 @@ std::vector sampler_types_from_chars(const std::string & nam std::string sampler_type_to_name_string(llama_sampler_type sampler_type) { switch (sampler_type) { - case llama_sampler_type::TOP_K: return "top_k"; - case llama_sampler_type::TFS_Z: return "tfs_z"; - case llama_sampler_type::TYPICAL_P: return "typical_p"; - case llama_sampler_type::TOP_P: return "top_p"; - case llama_sampler_type::MIN_P: return "min_p"; - case llama_sampler_type::TEMP: return "temp"; + case llama_sampler_type::TOP_K: return "top_k"; + case llama_sampler_type::TFS_Z: return "tfs_z"; + case llama_sampler_type::TYPICAL_P: return "typical_p"; + case llama_sampler_type::TOP_P: return "top_p"; + case llama_sampler_type::MIN_P: return "min_p"; + case llama_sampler_type::TEMPERATURE: return "temperature"; default : return ""; } } diff --git a/common/common.h b/common/common.h index 74c136995..935771d44 100644 --- a/common/common.h +++ b/common/common.h @@ -165,7 +165,7 @@ void process_escapes(std::string& input); // String utils // -std::vector sampler_types_from_names(const std::vector & names); +std::vector sampler_types_from_names(const std::vector & names, bool allow_alt_names); std::vector sampler_types_from_chars(const std::string & names_string); std::vector string_split(std::string input, char separator); std::string sampler_type_to_name_string(llama_sampler_type sampler_type); diff --git a/common/sampling.cpp b/common/sampling.cpp index a001750da..53013138a 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -139,7 +139,7 @@ static void sampler_queue( case llama_sampler_type::TYPICAL_P: llama_sample_typical (ctx_main, &cur_p, typical_p, min_keep); break; case llama_sampler_type::TOP_P : llama_sample_top_p (ctx_main, &cur_p, top_p, min_keep); break; case llama_sampler_type::MIN_P : llama_sample_min_p (ctx_main, &cur_p, min_p, min_keep); break; - case llama_sampler_type::TEMP: + case llama_sampler_type::TEMPERATURE: if (dynatemp_range > 0) { float dynatemp_min = std::max(0.0f, temp - dynatemp_range); float dynatemp_max = std::max(0.0f, temp + dynatemp_range); diff --git a/common/sampling.h b/common/sampling.h index 2bd6a75d2..e1279a894 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -10,12 +10,12 @@ // sampler types enum class llama_sampler_type : char { - TOP_K = 'k', - TOP_P = 'p', - MIN_P = 'm', - TFS_Z = 'f', - TYPICAL_P = 'y', - TEMP = 't' + TOP_K = 'k', + TOP_P = 'p', + MIN_P = 'm', + TFS_Z = 'f', + TYPICAL_P = 'y', + TEMPERATURE = 't' }; // sampling parameters @@ -45,7 +45,7 @@ typedef struct llama_sampling_params { llama_sampler_type::TYPICAL_P, llama_sampler_type::TOP_P, llama_sampler_type::MIN_P, - llama_sampler_type::TEMP + llama_sampler_type::TEMPERATURE }; std::string grammar; // optional BNF-like grammar to constrain sampling diff --git a/examples/server/README.md b/examples/server/README.md index 8e141d22d..249368749 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -204,6 +204,8 @@ node index.js `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) + `samplers`: The order the samplers should be applied in. An array of strings representing sampler type names. If a sampler is not set, it will not be used. If a sampler is specified more than once, it will be applied multiple times. (default: `["top_k", "tfs_z", "typical_p", "top_p", "min_p", "temperature"]` - these are all the available values) + ### Result JSON - Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 0cb802ce8..a0b46970b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -672,6 +672,24 @@ struct llama_server_context } } + const auto &samplers_sequence = data.find("samplers"); + if (samplers_sequence != data.end() && samplers_sequence->is_array()) + { + std::vector sampler_names; + for (const auto &sampler_name : *samplers_sequence) + { + if (sampler_name.is_string()) + { + sampler_names.emplace_back(sampler_name); + } + } + slot->sparams.samplers_sequence = sampler_types_from_names(sampler_names, false); + } + else + { + slot->sparams.samplers_sequence = default_sparams.samplers_sequence; + } + if (multimodal) { const auto &images_data = data.find("image_data"); @@ -1026,6 +1044,12 @@ struct llama_server_context const auto eos_bias = slot.sparams.logit_bias.find(llama_token_eos(model)); const bool ignore_eos = eos_bias != slot.sparams.logit_bias.end() && eos_bias->second < 0.0f && std::isinf(eos_bias->second); + std::vector samplers_sequence; + for (const auto &sampler_type : slot.sparams.samplers_sequence) + { + samplers_sequence.emplace_back(sampler_type_to_name_string(sampler_type)); + } + return json { {"n_ctx", slot.n_ctx}, {"model", params.model_alias}, @@ -1056,6 +1080,7 @@ struct llama_server_context {"logit_bias", slot.sparams.logit_bias}, {"n_probs", slot.sparams.n_probs}, {"grammar", slot.sparams.grammar}, + {"samplers", samplers_sequence} }; } From 65085c713e14f78cdda6abc275b1a5d8c2b8ca15 Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 16 Feb 2024 11:45:48 +0000 Subject: [PATCH 764/859] llama : minor fixed return int value (#5529) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 08e7b02b4..8966c3e66 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10893,7 +10893,7 @@ static int llama_apply_lora_from_file_internal( { LLAMA_LOG_ERROR("%s: invalid tensor data type '%d'\n", __func__, ftype); - return false; + return 1; } } From 4cb072769804c77ab466bc8351c76ede9d5ba49d Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Fri, 16 Feb 2024 12:43:23 +0000 Subject: [PATCH 765/859] llava : removed excess free(NULL) operation (#5531) --- examples/llava/llava.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 4ed310a0e..4cb65a07b 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -315,7 +315,6 @@ static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_thre float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { fprintf(stderr, "Unable to allocate memory for image embeddings\n"); - free(image_embd); return false; } From d2819d5577b35507be83d0c3f4d2d3c0ab1488ca Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 16 Feb 2024 15:14:40 +0200 Subject: [PATCH 766/859] scripts : add helpers script for bench comparing commits (#5521) * scripts : add helpers script for bench comparing commits * scripts : detect CUDA * set flags after checking the command line * fix make flags --------- Co-authored-by: slaren --- scripts/compare-commits.sh | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100755 scripts/compare-commits.sh diff --git a/scripts/compare-commits.sh b/scripts/compare-commits.sh new file mode 100755 index 000000000..331c4b9ce --- /dev/null +++ b/scripts/compare-commits.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +if [ $# -lt 2 ]; then + echo "usage: ./scripts/compare-commits.sh [additional llama-bench arguments]" + exit 1 +fi + +set -e +set -x + +bench_args="${@:3}" + +rm -f llama-bench.sqlite + +backend="cpu" + +if [[ "$OSTYPE" == "darwin"* ]]; then + backend="metal" +elif command -v nvcc &> /dev/null; then + backend="cuda" +fi + +make_opts="" + +if [[ "$backend" == "cuda" ]]; then + make_opts="LLAMA_CUBLAS=1" +fi + +git checkout $1 +make clean && make -j32 $make_opts llama-bench +./llama-bench -o sql $bench_args | tee /dev/tty | sqlite3 llama-bench.sqlite + +git checkout $2 +make clean && make -j32 $make_opts llama-bench +./llama-bench -o sql $bench_args | tee /dev/tty | sqlite3 llama-bench.sqlite + +./scripts/compare-llama-bench.py -b $1 -c $2 From 5bf2b94dd4fb74378b78604023b31512fec55f8f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Fri, 16 Feb 2024 19:05:56 +0200 Subject: [PATCH 767/859] cmake : fix VULKAN and ROCm builds (#5525) * cmake : fix VULKAN and ROCm builds * cmake : fix (cont) * vulkan : fix compile warnings ggml-ci * cmake : fix ggml-ci * cmake : minor ggml-ci --- CMakeLists.txt | 379 +++++++++++++++++++++++++----------------------- ggml-vulkan.cpp | 12 +- 2 files changed, 205 insertions(+), 186 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f8c7f9978..2a922fdb3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -112,17 +112,14 @@ option(LLAMA_MPI "llama: use MPI" option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) option(LLAMA_SYCL "llama: use SYCL" OFF) option(LLAMA_SYCL_F16 "llama: use 16 bit floats for sycl calculations" OFF) +option(LLAMA_CPU_HBM "llama: use memkind for CPU HBM" OFF) option(LLAMA_BUILD_TESTS "llama: build tests" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_EXAMPLES "llama: build examples" ${LLAMA_STANDALONE}) option(LLAMA_BUILD_SERVER "llama: build server example" ON) - # add perf arguments option(LLAMA_PERF "llama: enable perf" OFF) -if (LLAMA_PERF) - add_definitions(-DGGML_PERF) -endif() # Required for relocatable CMake package include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) @@ -130,6 +127,7 @@ include(${CMAKE_CURRENT_SOURCE_DIR}/scripts/build-info.cmake) # # Compile flags # + if (LLAMA_SYCL) set(CMAKE_CXX_STANDARD 17) else() @@ -140,6 +138,7 @@ set(CMAKE_CXX_STANDARD_REQUIRED true) set(CMAKE_C_STANDARD 11) set(CMAKE_C_STANDARD_REQUIRED true) set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) @@ -151,17 +150,17 @@ endif() if (NOT MSVC) if (LLAMA_SANITIZE_THREAD) add_compile_options(-fsanitize=thread) - link_libraries(-fsanitize=thread) + link_libraries (-fsanitize=thread) endif() if (LLAMA_SANITIZE_ADDRESS) add_compile_options(-fsanitize=address -fno-omit-frame-pointer) - link_libraries(-fsanitize=address) + link_libraries (-fsanitize=address) endif() if (LLAMA_SANITIZE_UNDEFINED) add_compile_options(-fsanitize=undefined) - link_libraries(-fsanitize=undefined) + link_libraries (-fsanitize=undefined) endif() endif() @@ -298,14 +297,17 @@ if (LLAMA_BLAS) endif() message(STATUS "BLAS found, Includes: ${BLAS_INCLUDE_DIRS}") + add_compile_options(${BLAS_LINKER_FLAGS}) + add_compile_definitions(GGML_USE_OPENBLAS) + if (${BLAS_INCLUDE_DIRS} MATCHES "mkl" AND (${LLAMA_BLAS_VENDOR} MATCHES "Generic" OR ${LLAMA_BLAS_VENDOR} MATCHES "Intel")) add_compile_definitions(GGML_BLAS_USE_MKL) endif() - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${BLAS_LIBRARIES}) - set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${BLAS_INCLUDE_DIRS}) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${BLAS_LIBRARIES}) + set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${BLAS_INCLUDE_DIRS}) else() message(WARNING "BLAS not found, please refer to " "https://cmake.org/cmake/help/latest/module/FindBLAS.html#blas-lapack-vendors" @@ -330,9 +332,6 @@ if (LLAMA_CUBLAS) set(GGML_SOURCES_CUDA ggml-cuda.cu) add_compile_definitions(GGML_USE_CUBLAS) -# if (LLAMA_CUDA_CUBLAS) -# add_compile_definitions(GGML_CUDA_CUBLAS) -# endif() if (LLAMA_CUDA_FORCE_DMMV) add_compile_definitions(GGML_CUDA_FORCE_DMMV) endif() @@ -387,15 +386,20 @@ if (LLAMA_MPI) find_package(MPI) if (MPI_C_FOUND) message(STATUS "MPI found") + set(GGML_HEADERS_MPI ggml-mpi.h) - set(GGML_SOURCES_MPI ggml-mpi.c ggml-mpi.h) + set(GGML_SOURCES_MPI ggml-mpi.c) + add_compile_definitions(GGML_USE_MPI) add_compile_definitions(${MPI_C_COMPILE_DEFINITIONS}) + if (NOT MSVC) add_compile_options(-Wno-cast-qual) endif() + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ${MPI_C_LIBRARIES}) set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${MPI_C_INCLUDE_DIRS}) + # Even if you're only using the C header, C++ programs may bring in MPI # C++ functions, so more linkage is needed if (MPI_CXX_FOUND) @@ -427,31 +431,28 @@ if (LLAMA_VULKAN) if (Vulkan_FOUND) message(STATUS "Vulkan found") - add_library(ggml-vulkan OBJECT ggml-vulkan.cpp ggml-vulkan.h) - if (BUILD_SHARED_LIBS) - set_target_properties(ggml-vulkan PROPERTIES POSITION_INDEPENDENT_CODE ON) - endif() - target_link_libraries(ggml-vulkan PRIVATE Vulkan::Vulkan) + set(GGML_HEADERS_VULKAN ggml-vulkan.h) + set(GGML_SOURCES_VULKAN ggml-vulkan.cpp) add_compile_definitions(GGML_USE_VULKAN) if (LLAMA_VULKAN_CHECK_RESULTS) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_CHECK_RESULTS) + add_compile_definitions(GGML_VULKAN_CHECK_RESULTS) endif() if (LLAMA_VULKAN_DEBUG) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_DEBUG) + add_compile_definitions(GGML_VULKAN_DEBUG) endif() if (LLAMA_VULKAN_VALIDATE) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_VALIDATE) + add_compile_definitions(GGML_VULKAN_VALIDATE) endif() if (LLAMA_VULKAN_RUN_TESTS) - target_compile_definitions(ggml-vulkan PRIVATE GGML_VULKAN_RUN_TESTS) + add_compile_definitions(GGML_VULKAN_RUN_TESTS) endif() - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ggml-vulkan) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} Vulkan::Vulkan) else() message(WARNING "Vulkan not found") endif() @@ -463,43 +464,45 @@ if (LLAMA_HIPBLAS) if (NOT ${CMAKE_C_COMPILER_ID} MATCHES "Clang") message(WARNING "Only LLVM is supported for HIP, hint: CC=/opt/rocm/llvm/bin/clang") endif() + if (NOT ${CMAKE_CXX_COMPILER_ID} MATCHES "Clang") message(WARNING "Only LLVM is supported for HIP, hint: CXX=/opt/rocm/llvm/bin/clang++") endif() - find_package(hip) - find_package(hipblas) - find_package(rocblas) + find_package(hip REQUIRED) + find_package(hipblas REQUIRED) + find_package(rocblas REQUIRED) - if (${hipblas_FOUND} AND ${hip_FOUND}) - message(STATUS "HIP and hipBLAS found") - add_compile_definitions(GGML_USE_HIPBLAS GGML_USE_CUBLAS) - if (LLAMA_HIP_UMA) - add_compile_definitions(GGML_HIP_UMA) - endif() - add_library(ggml-rocm OBJECT ggml-cuda.cu ggml-cuda.h) - if (BUILD_SHARED_LIBS) - set_target_properties(ggml-rocm PROPERTIES POSITION_INDEPENDENT_CODE ON) - endif() - if (LLAMA_CUDA_FORCE_DMMV) - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_FORCE_DMMV) - endif() - if (LLAMA_CUDA_FORCE_MMQ) - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_FORCE_MMQ) - endif() - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_DMMV_X=${LLAMA_CUDA_DMMV_X}) - target_compile_definitions(ggml-rocm PRIVATE GGML_CUDA_MMV_Y=${LLAMA_CUDA_MMV_Y}) - target_compile_definitions(ggml-rocm PRIVATE K_QUANTS_PER_ITERATION=${LLAMA_CUDA_KQUANTS_ITER}) - set_source_files_properties(ggml-cuda.cu PROPERTIES LANGUAGE CXX) - target_link_libraries(ggml-rocm PRIVATE hip::device PUBLIC hip::host roc::rocblas roc::hipblas) + message(STATUS "HIP and hipBLAS found") - if (LLAMA_STATIC) - message(FATAL_ERROR "Static linking not supported for HIP/ROCm") - endif() - set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} ggml-rocm) - else() - message(WARNING "hipBLAS or HIP not found. Try setting CMAKE_PREFIX_PATH=/opt/rocm") + set(GGML_HEADERS_ROCM ggml-cuda.h) + set(GGML_SOURCES_ROCM ggml-cuda.cu) + + add_compile_definitions(GGML_USE_HIPBLAS GGML_USE_CUBLAS) + + if (LLAMA_HIP_UMA) + add_compile_definitions(GGML_HIP_UMA) endif() + + if (LLAMA_CUDA_FORCE_DMMV) + add_compile_definitions(GGML_CUDA_FORCE_DMMV) + endif() + + if (LLAMA_CUDA_FORCE_MMQ) + add_compile_definitions(GGML_CUDA_FORCE_MMQ) + endif() + + add_compile_definitions(GGML_CUDA_DMMV_X=${LLAMA_CUDA_DMMV_X}) + add_compile_definitions(GGML_CUDA_MMV_Y=${LLAMA_CUDA_MMV_Y}) + add_compile_definitions(K_QUANTS_PER_ITERATION=${LLAMA_CUDA_KQUANTS_ITER}) + + set_source_files_properties(ggml-cuda.cu PROPERTIES LANGUAGE CXX) + + if (LLAMA_STATIC) + message(FATAL_ERROR "Static linking not supported for HIP/ROCm") + endif() + + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} hip::device PUBLIC hip::host roc::rocblas roc::hipblas) endif() if (LLAMA_SYCL) @@ -509,10 +512,14 @@ if (LLAMA_SYCL) #todo: AOT find_package(IntelSYCL REQUIRED) + + message(STATUS "SYCL found") + + add_compile_definitions(GML_USE_SYCL) + if (LLAMA_SYCL_F16) add_compile_definitions(GGML_SYCL_F16) endif() - add_compile_definitions(GGML_USE_SYCL) add_compile_options(-I./) #include DPCT add_compile_options(-I/${SYCL_INCLUDE_DIR}) @@ -521,7 +528,7 @@ if (LLAMA_SYCL) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsycl -L${MKLROOT}/lib") - set(GGML_HEADERS_SYCL ggml.h ggml-sycl.h) + set(GGML_HEADERS_SYCL ggml-sycl.h) set(GGML_SOURCES_SYCL ggml-sycl.cpp) if (WIN32) @@ -540,61 +547,61 @@ if (LLAMA_KOMPUTE) endif() function(compile_shader) - set(options) - set(oneValueArgs) - set(multiValueArgs SOURCES) - cmake_parse_arguments(compile_shader "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - foreach(source ${compile_shader_SOURCES}) - get_filename_component(filename ${source} NAME) - set(spv_file ${filename}.spv) - add_custom_command( - OUTPUT ${spv_file} - DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${source} - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/common.comp - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_getrows.comp - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n_pre.comp - ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n.comp - COMMAND ${glslc_executable} --target-env=vulkan1.2 -o ${spv_file} ${CMAKE_CURRENT_SOURCE_DIR}/${source} - COMMENT "Compiling ${source} to ${spv_file}" - ) + set(options) + set(oneValueArgs) + set(multiValueArgs SOURCES) + cmake_parse_arguments(compile_shader "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) + foreach(source ${compile_shader_SOURCES}) + get_filename_component(filename ${source} NAME) + set(spv_file ${filename}.spv) + add_custom_command( + OUTPUT ${spv_file} + DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/${source} + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/common.comp + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_getrows.comp + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n_pre.comp + ${CMAKE_CURRENT_SOURCE_DIR}/kompute-shaders/op_mul_mv_q_n.comp + COMMAND ${glslc_executable} --target-env=vulkan1.2 -o ${spv_file} ${CMAKE_CURRENT_SOURCE_DIR}/${source} + COMMENT "Compiling ${source} to ${spv_file}" + ) - get_filename_component(RAW_FILE_NAME ${spv_file} NAME) - set(FILE_NAME "shader${RAW_FILE_NAME}") - string(REPLACE ".comp.spv" ".h" HEADER_FILE ${FILE_NAME}) - string(TOUPPER ${HEADER_FILE} HEADER_FILE_DEFINE) - string(REPLACE "." "_" HEADER_FILE_DEFINE "${HEADER_FILE_DEFINE}") - set(OUTPUT_HEADER_FILE "${HEADER_FILE}") - message(STATUS "${HEADER_FILE} generating ${HEADER_FILE_DEFINE}") - if(CMAKE_GENERATOR MATCHES "Visual Studio") - add_custom_command( - OUTPUT ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_BINARY_DIR}/bin/$/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - DEPENDS ${spv_file} xxd - COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/$/xxd" - ) - else() - add_custom_command( - OUTPUT ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_BINARY_DIR}/bin/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} - COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} - DEPENDS ${spv_file} xxd - COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/xxd" - ) - endif() - endforeach() + get_filename_component(RAW_FILE_NAME ${spv_file} NAME) + set(FILE_NAME "shader${RAW_FILE_NAME}") + string(REPLACE ".comp.spv" ".h" HEADER_FILE ${FILE_NAME}) + string(TOUPPER ${HEADER_FILE} HEADER_FILE_DEFINE) + string(REPLACE "." "_" HEADER_FILE_DEFINE "${HEADER_FILE_DEFINE}") + set(OUTPUT_HEADER_FILE "${HEADER_FILE}") + message(STATUS "${HEADER_FILE} generating ${HEADER_FILE_DEFINE}") + if(CMAKE_GENERATOR MATCHES "Visual Studio") + add_custom_command( + OUTPUT ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_BINARY_DIR}/bin/$/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + DEPENDS ${spv_file} xxd + COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/$/xxd" + ) + else() + add_custom_command( + OUTPUT ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "/*THIS FILE HAS BEEN AUTOMATICALLY GENERATED - DO NOT EDIT*/" > ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#ifndef ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace kp {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "namespace shader_data {" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_BINARY_DIR}/bin/xxd -i ${RAW_FILE_NAME} >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo "}}" >> ${OUTPUT_HEADER_FILE} + COMMAND ${CMAKE_COMMAND} -E echo \"\#endif // define ${HEADER_FILE_DEFINE}\" >> ${OUTPUT_HEADER_FILE} + DEPENDS ${spv_file} xxd + COMMENT "Converting to hpp: ${FILE_NAME} ${CMAKE_BINARY_DIR}/bin/xxd" + ) + endif() + endforeach() endfunction() if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/kompute/CMakeLists.txt") @@ -604,66 +611,66 @@ if (LLAMA_KOMPUTE) # Compile our shaders compile_shader(SOURCES - kompute-shaders/op_scale.comp - kompute-shaders/op_scale_8.comp - kompute-shaders/op_add.comp - kompute-shaders/op_addrow.comp - kompute-shaders/op_mul.comp - kompute-shaders/op_silu.comp - kompute-shaders/op_relu.comp - kompute-shaders/op_gelu.comp - kompute-shaders/op_softmax.comp - kompute-shaders/op_norm.comp - kompute-shaders/op_rmsnorm.comp - kompute-shaders/op_diagmask.comp - kompute-shaders/op_mul_mat_mat_f32.comp - kompute-shaders/op_mul_mat_f16.comp - kompute-shaders/op_mul_mat_q8_0.comp - kompute-shaders/op_mul_mat_q4_0.comp - kompute-shaders/op_mul_mat_q4_1.comp - kompute-shaders/op_mul_mat_q6_k.comp - kompute-shaders/op_getrows_f16.comp - kompute-shaders/op_getrows_q4_0.comp - kompute-shaders/op_getrows_q4_1.comp - kompute-shaders/op_getrows_q6_k.comp - kompute-shaders/op_rope_f16.comp - kompute-shaders/op_rope_f32.comp - kompute-shaders/op_cpy_f16_f16.comp - kompute-shaders/op_cpy_f16_f32.comp - kompute-shaders/op_cpy_f32_f16.comp - kompute-shaders/op_cpy_f32_f32.comp + kompute-shaders/op_scale.comp + kompute-shaders/op_scale_8.comp + kompute-shaders/op_add.comp + kompute-shaders/op_addrow.comp + kompute-shaders/op_mul.comp + kompute-shaders/op_silu.comp + kompute-shaders/op_relu.comp + kompute-shaders/op_gelu.comp + kompute-shaders/op_softmax.comp + kompute-shaders/op_norm.comp + kompute-shaders/op_rmsnorm.comp + kompute-shaders/op_diagmask.comp + kompute-shaders/op_mul_mat_mat_f32.comp + kompute-shaders/op_mul_mat_f16.comp + kompute-shaders/op_mul_mat_q8_0.comp + kompute-shaders/op_mul_mat_q4_0.comp + kompute-shaders/op_mul_mat_q4_1.comp + kompute-shaders/op_mul_mat_q6_k.comp + kompute-shaders/op_getrows_f16.comp + kompute-shaders/op_getrows_q4_0.comp + kompute-shaders/op_getrows_q4_1.comp + kompute-shaders/op_getrows_q6_k.comp + kompute-shaders/op_rope_f16.comp + kompute-shaders/op_rope_f32.comp + kompute-shaders/op_cpy_f16_f16.comp + kompute-shaders/op_cpy_f16_f32.comp + kompute-shaders/op_cpy_f32_f16.comp + kompute-shaders/op_cpy_f32_f32.comp ) # Create a custom target for our generated shaders add_custom_target(generated_shaders DEPENDS - shaderop_scale.h - shaderop_scale_8.h - shaderop_add.h - shaderop_addrow.h - shaderop_mul.h - shaderop_silu.h - shaderop_relu.h - shaderop_gelu.h - shaderop_softmax.h - shaderop_norm.h - shaderop_rmsnorm.h - shaderop_diagmask.h - shaderop_mul_mat_mat_f32.h - shaderop_mul_mat_f16.h - shaderop_mul_mat_q8_0.h - shaderop_mul_mat_q4_0.h - shaderop_mul_mat_q4_1.h - shaderop_mul_mat_q6_k.h - shaderop_getrows_f16.h - shaderop_getrows_q4_0.h - shaderop_getrows_q4_1.h - shaderop_getrows_q6_k.h - shaderop_rope_f16.h - shaderop_rope_f32.h - shaderop_cpy_f16_f16.h - shaderop_cpy_f16_f32.h - shaderop_cpy_f32_f16.h - shaderop_cpy_f32_f32.h + shaderop_scale.h + shaderop_scale_8.h + shaderop_add.h + shaderop_addrow.h + shaderop_mul.h + shaderop_silu.h + shaderop_relu.h + shaderop_gelu.h + shaderop_softmax.h + shaderop_norm.h + shaderop_rmsnorm.h + shaderop_diagmask.h + shaderop_mul_mat_mat_f32.h + shaderop_mul_mat_f16.h + shaderop_mul_mat_q8_0.h + shaderop_mul_mat_q4_0.h + shaderop_mul_mat_q4_1.h + shaderop_mul_mat_q6_k.h + shaderop_getrows_f16.h + shaderop_getrows_q4_0.h + shaderop_getrows_q4_1.h + shaderop_getrows_q6_k.h + shaderop_rope_f16.h + shaderop_rope_f32.h + shaderop_cpy_f16_f16.h + shaderop_cpy_f16_f32.h + shaderop_cpy_f32_f16.h + shaderop_cpy_f32_f32.h ) # Create a custom command that depends on the generated_shaders @@ -676,8 +683,10 @@ if (LLAMA_KOMPUTE) # Add the stamp to the main sources to ensure dependency tracking set(GGML_SOURCES_KOMPUTE ggml-kompute.cpp ${CMAKE_CURRENT_BINARY_DIR}/ggml-kompute.stamp) - set(GGML_HEADERS_KOMPUTE ggml-kompute.h ${CMAKE_CURRENT_BINARY_DIR}/ggml-kompute.stamp) + set(GGML_HEADERS_KOMPUTE ggml-kompute.h ${CMAKE_CURRENT_BINARY_DIR}/ggml-kompute.stamp) + add_compile_definitions(GGML_USE_KOMPUTE) + set(LLAMA_EXTRA_LIBS ${LLAMA_EXTRA_LIBS} kompute) set(LLAMA_EXTRA_INCLUDES ${LLAMA_EXTRA_INCLUDES} ${CMAKE_BINARY_DIR}) else() @@ -685,6 +694,18 @@ if (LLAMA_KOMPUTE) endif() endif() +if (LLAMA_CPU_HBM) + find_library(memkind memkind REQUIRED) + + add_compile_definitions(GGML_USE_CPU_HBM) + + target_link_libraries(ggml PUBLIC memkind) +endif() + +if (LLAMA_PERF) + add_compile_definitions(GGML_PERF) +endif() + function(get_flags CCID CCVER) set(C_FLAGS "") set(CXX_FLAGS "") @@ -821,6 +842,7 @@ execute_process( ERROR_VARIABLE output OUTPUT_QUIET ) + if (output MATCHES "dyld-1015\.7") add_compile_definitions(HAVE_BUGGY_APPLE_LINKER) endif() @@ -830,10 +852,10 @@ endif() # feel free to update the Makefile for your architecture and send a pull request or issue message(STATUS "CMAKE_SYSTEM_PROCESSOR: ${CMAKE_SYSTEM_PROCESSOR}") if (MSVC) - string(TOLOWER "${CMAKE_GENERATOR_PLATFORM}" CMAKE_GENERATOR_PLATFORM_LWR) - message(STATUS "CMAKE_GENERATOR_PLATFORM: ${CMAKE_GENERATOR_PLATFORM}") + string(TOLOWER "${CMAKE_GENERATOR_PLATFORM}" CMAKE_GENERATOR_PLATFORM_LWR) + message(STATUS "CMAKE_GENERATOR_PLATFORM: ${CMAKE_GENERATOR_PLATFORM}") else () - set(CMAKE_GENERATOR_PLATFORM_LWR "") + set(CMAKE_GENERATOR_PLATFORM_LWR "") endif () if (NOT MSVC) @@ -1027,11 +1049,6 @@ endif() # ggml -if (GGML_USE_CPU_HBM) - add_definitions(-DGGML_USE_CPU_HBM) - find_library(memkind memkind REQUIRED) -endif() - add_library(ggml OBJECT ggml.c ggml.h @@ -1048,16 +1065,17 @@ add_library(ggml OBJECT ${GGML_SOURCES_EXTRA} ${GGML_HEADERS_EXTRA} ${GGML_SOURCES_SYCL} ${GGML_HEADERS_SYCL} ${GGML_SOURCES_KOMPUTE} ${GGML_HEADERS_KOMPUTE} + ${GGML_SOURCES_VULKAN} ${GGML_HEADERS_VULKAN} + ${GGML_SOURCES_ROCM} ${GGML_HEADERS_ROCM} ) target_include_directories(ggml PUBLIC . ${LLAMA_EXTRA_INCLUDES}) -target_compile_features(ggml PUBLIC c_std_11) # don't bump +target_compile_features (ggml PUBLIC c_std_11) # don't bump + target_link_libraries(ggml PUBLIC Threads::Threads ${LLAMA_EXTRA_LIBS}) -if (GGML_USE_CPU_HBM) - target_link_libraries(ggml PUBLIC memkind) -endif() add_library(ggml_static STATIC $) + if (BUILD_SHARED_LIBS) set_target_properties(ggml PROPERTIES POSITION_INDEPENDENT_CODE ON) add_library(ggml_shared SHARED $) @@ -1073,7 +1091,8 @@ add_library(llama ) target_include_directories(llama PUBLIC .) -target_compile_features(llama PUBLIC cxx_std_11) # don't bump +target_compile_features (llama PUBLIC cxx_std_11) # don't bump + target_link_libraries(llama PRIVATE ggml ${LLAMA_EXTRA_LIBS} @@ -1124,7 +1143,7 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/LlamaConfig.cmake DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/Llama) set(GGML_PUBLIC_HEADERS "ggml.h" "ggml-alloc.h" "ggml-backend.h" - "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" + "${GGML_HEADERS_CUDA}" "${GGML_HEADERS_OPENCL}" "${GGML_HEADERS_METAL}" "${GGML_HEADERS_MPI}" "${GGML_HEADERS_EXTRA}") set_target_properties(ggml PROPERTIES PUBLIC_HEADER "${GGML_PUBLIC_HEADERS}") diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 1fad24fd1..4a30414df 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1091,7 +1091,7 @@ static void ggml_vk_print_gpu_info(size_t idx) { } } -void ggml_vk_instance_init() { +static void ggml_vk_instance_init() { if (vk_instance_initialized) { return; } @@ -1150,7 +1150,7 @@ void ggml_vk_instance_init() { vk_instance_initialized = true; } -void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { +static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { GGML_ASSERT(idx < vk_instance.device_indices.size()); size_t dev_num = vk_instance.device_indices[idx]; #ifdef GGML_VULKAN_DEBUG @@ -4556,13 +4556,13 @@ static void ggml_vk_cleanup(ggml_backend_vk_context * ctx) { } } -GGML_CALL int ggml_vk_get_device_count() { +GGML_CALL static int ggml_vk_get_device_count() { ggml_vk_instance_init(); return vk_instance.device_indices.size(); } -GGML_CALL void ggml_vk_get_device_description(int device, char * description, size_t description_size) { +GGML_CALL static void ggml_vk_get_device_description(int device, char * description, size_t description_size) { ggml_vk_instance_init(); std::vector devices = vk_instance.instance.enumeratePhysicalDevices(); @@ -4580,7 +4580,7 @@ void ggml_vk_init_cpu_assist() { std::cerr << "ggml_vulkan: Found " << ggml_vk_get_device_count() << " Vulkan devices:" << std::endl; - for (size_t i = 0; i < ggml_vk_get_device_count(); i++) { + for (int i = 0; i < ggml_vk_get_device_count(); i++) { ggml_vk_print_gpu_info(i); } // Initialize the first backend to make sure CPU matrix multiplications can be offloaded. @@ -5267,7 +5267,7 @@ GGML_CALL void ggml_backend_vk_get_device_description(int device, char * descrip } GGML_CALL void ggml_backend_vk_get_device_memory(int device, size_t * free, size_t * total) { - GGML_ASSERT(device < vk_instance.device_indices.size()); + GGML_ASSERT(device < (int) vk_instance.device_indices.size()); vk::PhysicalDevice vkdev = vk_instance.instance.enumeratePhysicalDevices()[vk_instance.device_indices[device]]; From d250c9d61d4d9f7346930814cc4aef3f3673dc3e Mon Sep 17 00:00:00 2001 From: clibdev <52199778+clibdev@users.noreply.github.com> Date: Sat, 17 Feb 2024 18:28:37 +0200 Subject: [PATCH 768/859] gitignore : update for CLion IDE (#5544) --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index b84459b92..62b6b8b1a 100644 --- a/.gitignore +++ b/.gitignore @@ -23,11 +23,13 @@ .clang-tidy .vs/ .vscode/ +.idea/ lcov-report/ gcovr-report/ build* +cmake-build-* out/ tmp/ From 6e4e973b2615f8d390b1c4f4a7e05a119078bb0f Mon Sep 17 00:00:00 2001 From: Ananta Bastola Date: Sat, 17 Feb 2024 16:03:14 -0500 Subject: [PATCH 769/859] ci : add an option to fail on compile warning (#3952) * feat(ci): add an option to fail on compile warning * Update CMakeLists.txt * minor : fix compile warnings ggml-ci * ggml : fix unreachable code warnings ggml-ci * ci : disable fatal warnings for windows, ios and tvos * ggml : fix strncpy warning * ci : disable fatal warnings for MPI build * ci : add fatal warnings to ggml-ci ggml-ci --------- Co-authored-by: Georgi Gerganov --- .github/workflows/build.yml | 10 +++++++--- CMakeLists.txt | 11 +++++++++++ Makefile | 29 ++++++++++++++++++++++++++++ ci/run.sh | 2 +- examples/export-lora/export-lora.cpp | 2 -- ggml-backend.c | 1 + ggml-metal.m | 2 +- ggml.c | 15 +++++++++----- 8 files changed, 60 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ed292d6b8..03d76d455 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -37,6 +37,8 @@ jobs: - name: Build id: make_build + env: + LLAMA_FATAL_WARNINGS: 1 run: | CC=gcc-8 make -j $(nproc) @@ -65,7 +67,7 @@ jobs: run: | mkdir build cd build - cmake .. + cmake .. -DLLAMA_FATAL_WARNINGS=ON cmake --build . --config Release -j $(nproc) - name: Test @@ -100,7 +102,7 @@ jobs: run: | mkdir build cd build - cmake .. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} + cmake .. -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} cmake --build . --config ${{ matrix.build_type }} -j $(nproc) - name: Test @@ -244,6 +246,8 @@ jobs: - name: Build id: make_build + env: + LLAMA_FATAL_WARNINGS: 1 run: | LLAMA_NO_METAL=1 make -j $(sysctl -n hw.logicalcpu) @@ -277,7 +281,7 @@ jobs: sysctl -a mkdir build cd build - cmake -DLLAMA_METAL=OFF .. + cmake -DLLAMA_FATAL_WARNINGS=ON -DLLAMA_METAL=OFF .. cmake --build . --config Release -j $(sysctl -n hw.logicalcpu) - name: Test diff --git a/CMakeLists.txt b/CMakeLists.txt index 2a922fdb3..5ea4d4f19 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -55,6 +55,9 @@ option(LLAMA_ALL_WARNINGS "llama: enable all compiler warnings" option(LLAMA_ALL_WARNINGS_3RD_PARTY "llama: enable all compiler warnings in 3rd party libs" OFF) option(LLAMA_GPROF "llama: enable gprof" OFF) +# build +option(LLAMA_FATAL_WARNINGS "llama: enable -Werror flag" OFF) + # sanitizers option(LLAMA_SANITIZE_THREAD "llama: enable thread sanitizer" OFF) option(LLAMA_SANITIZE_ADDRESS "llama: enable address sanitizer" OFF) @@ -142,6 +145,14 @@ set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) +if (LLAMA_FATAL_WARNINGS) + if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Werror) + elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") + add_compile_options(/WX) + endif() +endif() + # enable libstdc++ assertions for debug builds if (CMAKE_SYSTEM_NAME MATCHES "Linux") add_compile_definitions($<$:_GLIBCXX_ASSERTIONS>) diff --git a/Makefile b/Makefile index 0a2070b53..901798606 100644 --- a/Makefile +++ b/Makefile @@ -215,6 +215,35 @@ MK_CFLAGS += $(WARN_FLAGS) -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmis -Werror=implicit-function-declaration MK_CXXFLAGS += $(WARN_FLAGS) -Wmissing-declarations -Wmissing-noreturn +ifeq ($(LLAMA_FATAL_WARNINGS),1) + MK_CFLAGS += -Werror + MK_CXXFLAGS += -Werror +endif + +ifeq ($(CC_IS_CLANG), 1) + # clang options + MK_CFLAGS += -Wunreachable-code-break -Wunreachable-code-return + MK_HOST_CXXFLAGS += -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi + + ifneq '' '$(and $(CC_IS_LLVM_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 030800)))' + MK_CFLAGS += -Wdouble-promotion + endif + ifneq '' '$(and $(CC_IS_APPLE_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 070300)))' + MK_CFLAGS += -Wdouble-promotion + endif +else + # gcc options + MK_CFLAGS += -Wdouble-promotion + MK_HOST_CXXFLAGS += -Wno-array-bounds + + ifeq ($(shell expr $(CC_VER) \>= 070100), 1) + MK_HOST_CXXFLAGS += -Wno-format-truncation + endif + ifeq ($(shell expr $(CC_VER) \>= 080100), 1) + MK_HOST_CXXFLAGS += -Wextra-semi + endif +endif + # this version of Apple ld64 is buggy ifneq '' '$(findstring dyld-1015.7,$(shell $(CC) $(LDFLAGS) -Wl,-v 2>&1))' MK_CPPFLAGS += -DHAVE_BUGGY_APPLE_LINKER diff --git a/ci/run.sh b/ci/run.sh index 979b4a793..b94658c96 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -33,7 +33,7 @@ sd=`dirname $0` cd $sd/../ SRC=`pwd` -CMAKE_EXTRA="" +CMAKE_EXTRA="-DLLAMA_FATAL_WARNINGS=ON" if [ ! -z ${GG_BUILD_METAL} ]; then CMAKE_EXTRA="${CMAKE_EXTRA} -DLLAMA_METAL_SHADER_DEBUG=ON" diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 2f7be8a13..08413f57e 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -7,8 +7,6 @@ #include #include -static const size_t tensor_alignment = 32; - struct lora_info { std::string filename; float scale; diff --git a/ggml-backend.c b/ggml-backend.c index 87eea8440..d019d813a 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1006,6 +1006,7 @@ static int ggml_backend_sched_backend_from_buffer(ggml_backend_sched_t sched, gg } } GGML_ASSERT(false && "tensor buffer type not supported by any backend"); + return -1; // silence warning } #if 0 diff --git a/ggml-metal.m b/ggml-metal.m index c1d8e2de8..6e76f8bed 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -176,7 +176,7 @@ struct ggml_metal_context { // MSL code // TODO: move the contents here when ready // for now it is easier to work in a separate file -//static NSString * const msl_library_source = @"see metal.metal"; +// static NSString * const msl_library_source = @"see metal.metal"; // Here to assist with NSBundle Path Hack @interface GGMLMetalClass : NSObject diff --git a/ggml.c b/ggml.c index 4e302fb7d..264cfd705 100644 --- a/ggml.c +++ b/ggml.c @@ -868,7 +868,7 @@ do { \ const __m128 t0 = _mm_add_ps(_mm256_castps256_ps128(x[0]), \ _mm256_extractf128_ps(x[0], 1)); \ const __m128 t1 = _mm_hadd_ps(t0, t0); \ - res = _mm_cvtss_f32(_mm_hadd_ps(t1, t1)); \ + res = (ggml_float) _mm_cvtss_f32(_mm_hadd_ps(t1, t1)); \ } while (0) // TODO: is this optimal ? @@ -1149,7 +1149,7 @@ inline static void __wasm_f16x4_store(ggml_fp16_t * p, v128_t x) { x[i] = _mm_add_ps(x[i], x[offset+i]); \ } \ const __m128 t0 = _mm_hadd_ps(x[0], x[0]); \ - res = _mm_cvtss_f32(_mm_hadd_ps(t0, t0)); \ + res = (ggml_float) _mm_cvtss_f32(_mm_hadd_ps(t0, t0)); \ } // TODO: is this optimal ? @@ -2086,6 +2086,7 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { } } #else + GGML_UNUSED(numa_flag); // TODO #endif } @@ -3219,7 +3220,7 @@ const char * ggml_get_name(const struct ggml_tensor * tensor) { } struct ggml_tensor * ggml_set_name(struct ggml_tensor * tensor, const char * name) { - strncpy(tensor->name, name, sizeof(tensor->name)); + strncpy(tensor->name, name, sizeof(tensor->name) - 1); tensor->name[sizeof(tensor->name) - 1] = '\0'; return tensor; } @@ -18575,7 +18576,9 @@ static enum ggml_opt_result linesearch_backtracking( (*step) *= width; } - GGML_UNREACHABLE(); + GGML_ASSERT(false && "line search failed"); + + return GGML_LINESEARCH_FAIL; } static enum ggml_opt_result ggml_opt_lbfgs( @@ -18843,7 +18846,9 @@ static enum ggml_opt_result ggml_opt_lbfgs( step[0] = 1.0; } - GGML_UNREACHABLE(); + GGML_ASSERT(false && "lbfgs failed"); + + return GGML_OPT_DID_NOT_CONVERGE; } struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { From 8f1be0d42f23016cb6819dbae01126699c4bd9bc Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sat, 17 Feb 2024 23:04:16 +0200 Subject: [PATCH 770/859] ggml : add ALiBi support for ggml_soft_max_ext (#5488) * ggml : avoid recomputing alibi slopes (CPU) * llama : reuse hparams.f_max_alibi_bias in all cases ggml-ci * ggml : support alibi bias in ggml_soft_max_ext (CPU + Metal) ggml-ci * ggml : handle all SRCs (do not break on first null) ggml-ci * tests : do not use slope for large soft_max accumulates too much error ggml-ci * ggml : alternative ALiBi without extra tensor We compute the slopes in the kernel ggml-ci * cuda : add ALiBi support in ggml_soft_max_ext ggml-ci * ggml : deprecate ggml_alibi * ggml : support multi-sequence ALiBi (Metal) ggml-ci * cuda : add multi-seq ALiBi + remote F16 soft_max ggml-ci * ggml : update deprecation message * ggml : fix pos ptr when no ALiBi ggml-ci * cuda : fix performance (pow -> powf) * cuda : precompute ALiBi constants * metal : pre-compute ALiBi slopes ggml-ci * llama : init kq_pos only if needed ggml-ci * test-backend-ops : add null pos test to soft_max test-backend-ops : replace soft_max tests ggml-ci --------- Co-authored-by: slaren --- ggml-alloc.c | 6 +- ggml-backend.c | 16 +-- ggml-cuda.cu | 263 ++++++++----------------------------- ggml-metal.m | 35 +++-- ggml-metal.metal | 47 ++++++- ggml.c | 118 +++++++++++------ ggml.h | 13 +- llama.cpp | 133 ++++++++++++------- tests/test-backend-ops.cpp | 74 +++++------ 9 files changed, 348 insertions(+), 357 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index c28c37c4f..d4123564f 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -551,7 +551,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr } for (int j = 0; j < GGML_MAX_SRC; j++) { if (graph->nodes[i]->src[j] == NULL) { - break; + continue; } if (graph->nodes[i]->src[j]->flags & GGML_TENSOR_FLAG_INPUT) { ggml_gallocr_allocate_node(galloc, graph->nodes[i]->src[j], get_node_buffer_id(node_buffer_ids, i)); @@ -787,7 +787,7 @@ static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } if (!ggml_gallocr_node_needs_realloc(galloc, src, node_alloc, &node_alloc->src[j])) { #ifndef NDEBUG @@ -833,7 +833,7 @@ bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } ggml_gallocr_init_tensor(galloc, src, node_alloc, &node_alloc->src[j]); } diff --git a/ggml-backend.c b/ggml-backend.c index d019d813a..66e8c293a 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -1041,7 +1041,7 @@ static int ggml_backend_sched_backend_id_from_cur(ggml_backend_sched_t sched, st for (int i = 0; i < GGML_MAX_SRC; i++) { const struct ggml_tensor * src = tensor->src[i]; if (src == NULL) { - break; + continue; } if (src->buffer != NULL && src->buffer->usage == GGML_BACKEND_BUFFER_USAGE_WEIGHTS) { int src_backend = ggml_backend_sched_backend_from_buffer(sched, src->buffer); @@ -1088,7 +1088,7 @@ static void ggml_backend_sched_print_assignments(ggml_backend_sched_t sched, str for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } ggml_backend_t src_backend = tensor_backend(src); fprintf(stderr, " %20.20s (%5.5s) [%5.5s %8.8s]", src->name, @@ -1144,7 +1144,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } if (tensor_backend_id(src) == -1) { tensor_backend_id(src) = ggml_backend_sched_backend_id_from_cur(sched, src); @@ -1256,7 +1256,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } int src_backend_id = tensor_backend_id(src); if (src_backend_id == -1) { @@ -1315,7 +1315,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } int src_backend_id = tensor_backend_id(src); assert(src_backend_id != -1); // all inputs should be assigned by now @@ -1362,7 +1362,7 @@ static void ggml_backend_sched_split_graph(ggml_backend_sched_t sched, struct gg for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * src = node->src[j]; if (src == NULL) { - break; + continue; } ggml_backend_t src_backend = tensor_backend(src); if (src_backend != tensor_backend /* && src_backend != NULL */) { @@ -1668,7 +1668,7 @@ static struct ggml_tensor * graph_copy_dup_tensor(struct ggml_hash_set hash_set, for (int i = 0; i < GGML_MAX_SRC; i++) { struct ggml_tensor * s = src->src[i]; if (s == NULL) { - break; + continue; } dst->src[i] = graph_copy_dup_tensor(hash_set, node_copies, ctx_allocated, ctx_unallocated, s); } @@ -1697,7 +1697,7 @@ static void graph_copy_init_tensor(struct ggml_hash_set hash_set, struct ggml_te for (int i = 0; i < GGML_MAX_SRC; i++) { struct ggml_tensor * s = src->src[i]; if (s == NULL) { - break; + continue; } graph_copy_init_tensor(hash_set, node_copies, node_init, s); } diff --git a/ggml-cuda.cu b/ggml-cuda.cu index b35fcb7fd..5fd8a87e4 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -5956,149 +5956,31 @@ static __global__ void diag_mask_inf_f32(const float * x, float * dst, const int dst[i] = x[i] - (col > n_past + row % rows_per_channel) * FLT_MAX; } -template -static __global__ void soft_max_f16(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX - const int ncols_data = ncols_template == 0 ? ncols_par : ncols_template; - const int ncols_smem = GGML_PAD(ncols_data, 2*WARP_SIZE)/2; - - const int tid = threadIdx.x; - const int rowx = blockIdx.x; - const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension - - const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; - - const int warp_id = threadIdx.x / WARP_SIZE; - const int lane_id = threadIdx.x % WARP_SIZE; - - extern __shared__ half data_soft_max_f16[]; - half * buf_iw = data_soft_max_f16 + 0; // shared memory buffer for inter-warp communication - // (shared memory) buffer to cache values between iterations: - half2 * vals = vals_smem ? (half2 *) (buf_iw + WARP_SIZE) : (half2 *) (dst + rowx*ncols_data); - // if the buffer is larger than max. shared memory per block, use dst as temp. buffer instead - // in that case col_smem == col_data must be enforced to avoid race conditions - - half2 max_val = make_half2(-INFINITY, -INFINITY); - -#pragma unroll - for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { - const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; - const int col_smem = vals_smem ? col0 + tid : col_data; - - const int ix = rowx*ncols_data + col_data; - const int iy = rowy*ncols_data + col_data; - - half2 val; - if (need_check && col_data + 0 >= ncols_data) { - val.x = -INFINITY; - } else { - val.x = x[ix + 0]*scale + (y ? y[iy + 0] : 0.0f); - } - if (need_check && col_data + WARP_SIZE >= ncols_data) { - val.y = -INFINITY; - } else { - val.y = x[ix + WARP_SIZE]*scale + (y ? y[iy + WARP_SIZE] : 0.0f); - } - if (!need_check || col_smem < (vals_smem ? ncols_smem : ncols_data)) { - vals[col_smem] = val; - } - max_val = __hmax2(max_val, val); - } - - // find the max value in the block - max_val = warp_reduce_max(max_val); - if (block_size > WARP_SIZE) { - if (warp_id == 0) { - buf_iw[lane_id] = -INFINITY; - } - __syncthreads(); - - if (lane_id == 0) { - buf_iw[warp_id] = __hmax(max_val.x, max_val.y); - } - __syncthreads(); - - max_val = __half2half2(buf_iw[lane_id]); - max_val = warp_reduce_max(max_val); - } else { - max_val = __half2half2(__hmax(max_val.x, max_val.y)); - } - - half2 tmp = make_half2(0.0f, 0.0f); // partial sums - -#pragma unroll - for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { - const int col_smem = vals_smem ? col0 + tid : 2*col0 + 2*warp_id*WARP_SIZE + lane_id; - - if (ncols_template == 0 && col_smem >= (vals_smem ? ncols_smem : ncols_data)) { - break; - } - - const half2 val = h2exp(vals[col_smem] - max_val); - - tmp += val; - vals[col_smem] = val; - } - - // find the sum of exps in the block - tmp = warp_reduce_sum(tmp); - if (block_size > WARP_SIZE) { - if (warp_id == 0) { - buf_iw[lane_id] = 0.0f; - } - __syncthreads(); - - if (lane_id == 0) { - buf_iw[warp_id] = tmp.x + tmp.y; - } - __syncthreads(); - - tmp = __half2half2(buf_iw[lane_id]); - tmp = warp_reduce_sum(tmp); - } else { - tmp = __half2half2(tmp.x + tmp.y); - } - - const half2 inv_sum = make_half2(1.0f, 1.0f) / tmp; - -#pragma unroll - for (int col0 = 0; col0 < ncols_smem; col0 += block_size) { - const int col_data = 2*col0 + 2*WARP_SIZE*warp_id + lane_id; - const int col_smem = vals_smem ? col0 + tid : col_data; - - const int idst = rowx*ncols_data + col_data; - const half2 result = vals[col_smem] * inv_sum; - - if (need_check && col_data + 0 >= ncols_data) { - return; - } - dst[idst] = result.x; - - if (need_check && col_data + WARP_SIZE >= ncols_data) { - return; - } - - dst[idst + WARP_SIZE] = result.y; - } -#else - (void) x; (void) y; (void) dst; (void) ncols_par; (void) nrows_y; (void) scale; - NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -} - template -static __global__ void soft_max_f32(const float * x, const float * y, float * dst, const int ncols_par, const int nrows_y, const float scale) { +static __global__ void soft_max_f32(const float * x, const float * mask, const float * pos, float * dst, const int ncols_par, const int nrows_y, const float scale, const float max_bias, const float m0, const float m1, uint32_t n_head_log2) { const int ncols = ncols_template == 0 ? ncols_par : ncols_template; const int tid = threadIdx.x; const int rowx = blockIdx.x; - const int rowy = rowx % nrows_y; // broadcast the mask (y) in the row dimension + const int rowy = rowx % nrows_y; // broadcast the mask in the row dimension const int block_size = block_size_template == 0 ? blockDim.x : block_size_template; const int warp_id = threadIdx.x / WARP_SIZE; const int lane_id = threadIdx.x % WARP_SIZE; + float slope = 0.0f; + + // ALiBi + if (max_bias > 0.0f) { + const int h = rowx/nrows_y; // head index + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = powf(base, exp); + } + extern __shared__ float data_soft_max_f32[]; float * buf_iw = data_soft_max_f32; // shared memory buffer for inter-warp communication // shared memory buffer to cache values between iterations: @@ -6117,7 +5999,8 @@ static __global__ void soft_max_f32(const float * x, const float * y, float * ds const int ix = rowx*ncols + col; const int iy = rowy*ncols + col; - const float val = x[ix]*scale + (y ? y[iy] : 0.0f); + const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + slope*pos[col]; + vals[col] = val; max_val = max(max_val, val); } @@ -7589,89 +7472,53 @@ static void diag_mask_inf_f32_cuda(const float * x, float * dst, const int ncols diag_mask_inf_f32<<>>(x, dst, ncols_x, rows_per_channel, n_past); } -static void soft_max_f16_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { - int nth = WARP_SIZE; - while (nth < ncols_x/2 && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; - const dim3 block_dims(nth, 1, 1); - const dim3 block_nums(nrows_x, 1, 1); - const size_t shmem = (GGML_PAD(ncols_x, 2*WARP_SIZE) + WARP_SIZE)*sizeof(half); - static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); - if (shmem <= g_device_caps[g_main_device].smpb) { - switch (ncols_x) { - case 32: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 64: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 128: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 256: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 512: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 1024: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 2048: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - case 4096: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - default: - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - break; - } - } else { - const size_t shmem_low = WARP_SIZE*sizeof(half); - soft_max_f16<<>>(x, y, dst, ncols_x, nrows_y, scale); - } -} - -static void soft_max_f32_cuda(const float * x, const float * y, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, cudaStream_t stream) { +static void soft_max_f32_cuda(const float * x, const float * mask, const float * pos, float * dst, const int ncols_x, const int nrows_x, const int nrows_y, const float scale, const float max_bias, cudaStream_t stream) { int nth = WARP_SIZE; while (nth < ncols_x && nth < CUDA_SOFT_MAX_BLOCK_SIZE) nth *= 2; const dim3 block_dims(nth, 1, 1); const dim3 block_nums(nrows_x, 1, 1); const size_t shmem = (GGML_PAD(ncols_x, WARP_SIZE) + WARP_SIZE)*sizeof(float); static_assert(CUDA_SOFT_MAX_BLOCK_SIZE == 1024, "These values need to be adjusted."); + + const uint32_t n_head_kv = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + if (shmem < g_device_caps[g_main_device].smpb) { switch (ncols_x) { case 32: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 64: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 128: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 256: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 512: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 1024: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 2048: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; case 4096: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; default: - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); break; } } else { const size_t shmem_low = WARP_SIZE*sizeof(float); - soft_max_f32<<>>(x, y, dst, ncols_x, nrows_y, scale); + soft_max_f32<<>>(x, mask, pos, dst, ncols_x, nrows_y, scale, max_bias, m0, m1, n_head_log2); } } @@ -9090,30 +8937,36 @@ static void ggml_cuda_op_soft_max( GGML_ASSERT(!src1 || src1->type == GGML_TYPE_F32); // src1 contains mask and it is optional - const int64_t ne00 = src0->ne[0]; + const int64_t ne00 = src0->ne[0]; const int64_t nrows_x = ggml_nrows(src0); - const int64_t nrows_y = src1 ? ggml_nrows(src1) : 1; + const int64_t nrows_y = src0->ne[1]; - float scale = 1.0f; - memcpy(&scale, dst->op_params, sizeof(float)); + float scale = 1.0f; + float max_bias = 0.0f; -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && CUDART_VERSION >= CUDART_HMAX -#ifdef GGML_CUDA_F16 - const bool use_f16_soft_max = true; -#else - const bool use_f16_soft_max = false; -#endif // GGML_CUDA_F16 -#else - const bool use_f16_soft_max = false; -#endif // defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__) && CUDART_VERSION >= CUDART_HMAX + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); - if (use_f16_soft_max) { - soft_max_f16_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); - } else { - soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, dst_dd, ne00, nrows_x, nrows_y, scale, main_stream); + // positions tensor + float * src2_dd = dst_dd; // default to avoid null checks in the kernel + cuda_pool_alloc src2_f; + + ggml_tensor * src2 = dst->src[2]; + const bool use_src2 = src2 != nullptr; + + if (use_src2) { + const bool src2_on_device = use_src2 && src2->backend == GGML_BACKEND_GPU; + ggml_tensor_extra_gpu * src2_extra = use_src2 ? (ggml_tensor_extra_gpu *) src2->extra : nullptr; + + if (src2_on_device) { + src2_dd = (float *) src2_extra->data_device[g_main_device]; + } else { + src2_dd = src2_f.alloc(ggml_nelements(src2)); + CUDA_CHECK(ggml_cuda_cpy_tensor_2d(src2_dd, src2, 0, 0, 0, 1, main_stream)); + } } - (void) dst; + soft_max_f32_cuda(src0_dd, src1 ? src1_dd : nullptr, src2_dd, dst_dd, ne00, nrows_x, nrows_y, scale, max_bias, main_stream); } static void ggml_cuda_op_scale( diff --git a/ggml-metal.m b/ggml-metal.m index 6e76f8bed..c0848a293 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -728,6 +728,7 @@ static bool ggml_metal_graph_compute( size_t offs_src0 = 0; size_t offs_src1 = 0; + size_t offs_src2 = 0; size_t offs_dst = 0; id command_buffer = command_buffers[cb_idx]; @@ -746,6 +747,7 @@ static bool ggml_metal_graph_compute( struct ggml_tensor * src0 = gf->nodes[i]->src[0]; struct ggml_tensor * src1 = gf->nodes[i]->src[1]; + struct ggml_tensor * src2 = gf->nodes[i]->src[2]; struct ggml_tensor * dst = gf->nodes[i]; switch (dst->op) { @@ -807,6 +809,7 @@ static bool ggml_metal_graph_compute( id id_src0 = src0 ? ggml_metal_get_buffer(src0, &offs_src0) : nil; id id_src1 = src1 ? ggml_metal_get_buffer(src1, &offs_src1) : nil; + id id_src2 = src2 ? ggml_metal_get_buffer(src2, &offs_src2) : nil; id id_dst = dst ? ggml_metal_get_buffer(dst, &offs_dst) : nil; //GGML_METAL_LOG_INFO("%s: op - %s\n", __func__, ggml_op_name(dst->op)); @@ -1188,7 +1191,16 @@ static bool ggml_metal_graph_compute( pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_SOFT_MAX].pipeline; } - const float scale = ((float *) dst->op_params)[0]; + const float scale = ((float *) dst->op_params)[0]; + const float max_bias = ((float *) dst->op_params)[1]; + + const int64_t nrows_x = ggml_nrows(src0); + const int64_t nrows_y = src0->ne[1]; + const uint32_t n_head_kv = nrows_x/nrows_y; + const uint32_t n_head_log2 = 1u << (uint32_t) floorf(log2f((float) n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); [encoder setComputePipelineState:pipeline]; [encoder setBuffer:id_src0 offset:offs_src0 atIndex:0]; @@ -1197,11 +1209,20 @@ static bool ggml_metal_graph_compute( } else { [encoder setBuffer:id_src0 offset:offs_src0 atIndex:1]; } - [encoder setBuffer:id_dst offset:offs_dst atIndex:2]; - [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:3]; - [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:4]; - [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:5]; - [encoder setBytes:&scale length:sizeof(scale) atIndex:6]; + if (id_src2) { + [encoder setBuffer:id_src2 offset:offs_src2 atIndex:2]; + } else { + [encoder setBuffer:id_src0 offset:offs_src0 atIndex:2]; + } + [encoder setBuffer:id_dst offset:offs_dst atIndex:3]; + [encoder setBytes:&ne00 length:sizeof(ne00) atIndex:4]; + [encoder setBytes:&ne01 length:sizeof(ne01) atIndex:5]; + [encoder setBytes:&ne02 length:sizeof(ne02) atIndex:6]; + [encoder setBytes:&scale length:sizeof(scale) atIndex:7]; + [encoder setBytes:&max_bias length:sizeof(max_bias) atIndex:8]; + [encoder setBytes:&m0 length:sizeof(m0) atIndex:9]; + [encoder setBytes:&m1 length:sizeof(m1) atIndex:10]; + [encoder setBytes:&n_head_log2 length:sizeof(n_head_log2) atIndex:11]; [encoder setThreadgroupMemoryLength:32*sizeof(float) atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake(ne01*ne02*ne03, 1, 1) threadsPerThreadgroup:MTLSizeMake(nth, 1, 1)]; @@ -1514,8 +1535,6 @@ static bool ggml_metal_graph_compute( // max size of the src1ids array in the kernel stack GGML_ASSERT(ne11 <= 512); - struct ggml_tensor * src2 = gf->nodes[i]->src[2]; - const int64_t ne20 = src2 ? src2->ne[0] : 0; const int64_t ne21 = src2 ? src2->ne[1] : 0; const int64_t ne22 = src2 ? src2->ne[2] : 0; diff --git a/ggml-metal.metal b/ggml-metal.metal index efed6ad46..09ebcc9e3 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -351,12 +351,17 @@ kernel void kernel_sum_rows( kernel void kernel_soft_max( device const float * src0, device const float * src1, + device const float * src2, device float * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, constant float & scale, - threadgroup float * buf [[threadgroup(0)]], + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], uint sgitg[[simdgroup_index_in_threadgroup]], @@ -368,13 +373,26 @@ kernel void kernel_soft_max( device const float * psrc0 = src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; device const float * pmask = src1 != src0 ? src1 + i01*ne00 : nullptr; + device const float * ppos = src2 != src0 ? src2 : nullptr; device float * pdst = dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00; + float slope = 0.0f; + + // ALiBi + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + // parallel max float lmax = -INFINITY; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f)); + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); } // find the max value in the block @@ -399,7 +417,7 @@ kernel void kernel_soft_max( // parallel sum float lsum = 0.0f; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f)) - max_val); + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); lsum += exp_psrc0; pdst[i00] = exp_psrc0; } @@ -437,12 +455,17 @@ kernel void kernel_soft_max( kernel void kernel_soft_max_4( device const float * src0, device const float * src1, + device const float * src2, device float * dst, constant int64_t & ne00, constant int64_t & ne01, constant int64_t & ne02, constant float & scale, - threadgroup float * buf [[threadgroup(0)]], + constant float & max_bias, + constant float & m0, + constant float & m1, + constant uint32_t & n_head_log2, + threadgroup float * buf [[threadgroup(0)]], uint tgpig[[threadgroup_position_in_grid]], uint tpitg[[thread_position_in_threadgroup]], uint sgitg[[simdgroup_index_in_threadgroup]], @@ -454,13 +477,25 @@ kernel void kernel_soft_max_4( device const float4 * psrc4 = (device const float4 *)(src0 + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); device const float4 * pmask = src1 != src0 ? (device const float4 *)(src1 + i01*ne00) : nullptr; + device const float4 * ppos = src2 != src0 ? (device const float4 *)(src2) : nullptr; device float4 * pdst4 = (device float4 *)(dst + i03*ne02*ne01*ne00 + i02*ne01*ne00 + i01*ne00); + float slope = 0.0f; + + if (max_bias > 0.0f) { + const int64_t h = i02; + + const float base = h < n_head_log2 ? m0 : m1; + const int exp = h < n_head_log2 ? h + 1 : 2*(h - n_head_log2) + 1; + + slope = pow(base, exp); + } + // parallel max float4 lmax4 = -INFINITY; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f)); + lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); } const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); @@ -486,7 +521,7 @@ kernel void kernel_soft_max_4( // parallel sum float4 lsum4 = 0.0f; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f)) - max_val); + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } diff --git a/ggml.c b/ggml.c index 264cfd705..e94024c62 100644 --- a/ggml.c +++ b/ggml.c @@ -5096,16 +5096,28 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * mask, + struct ggml_tensor * pos, float scale, + float max_bias, bool inplace) { GGML_ASSERT(ggml_is_contiguous(a)); + if (mask) { GGML_ASSERT(ggml_is_contiguous(mask)); - GGML_ASSERT(mask->ne[2] == 1); - GGML_ASSERT(mask->ne[3] == 1); + GGML_ASSERT(ggml_is_matrix(mask)); GGML_ASSERT(ggml_can_repeat_rows(mask, a)); } + if (pos) { + GGML_ASSERT(ggml_is_vector(pos)); + GGML_ASSERT(pos->type == GGML_TYPE_F32); + GGML_ASSERT(pos->ne[0] == a->ne[0]); + } + + if (max_bias > 0.0f) { + GGML_ASSERT(pos); + } + bool is_node = false; if (a->grad) { @@ -5114,13 +5126,14 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_tensor * result = inplace ? ggml_view_tensor(ctx, a) : ggml_dup_tensor(ctx, a); - float params[] = { scale }; + float params[] = { scale, max_bias }; ggml_set_op_params(result, params, sizeof(params)); result->op = GGML_OP_SOFT_MAX; result->grad = is_node ? ggml_dup_tensor(ctx, result) : NULL; result->src[0] = a; result->src[1] = mask; + result->src[2] = pos; return result; } @@ -5128,21 +5141,23 @@ static struct ggml_tensor * ggml_soft_max_impl( struct ggml_tensor * ggml_soft_max( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_soft_max_impl(ctx, a, NULL, 1.0f, false); + return ggml_soft_max_impl(ctx, a, NULL, NULL, 1.0f, 0.0f, false); } struct ggml_tensor * ggml_soft_max_inplace( struct ggml_context * ctx, struct ggml_tensor * a) { - return ggml_soft_max_impl(ctx, a, NULL, 1.0f, true); + return ggml_soft_max_impl(ctx, a, NULL, NULL, 1.0f, 0.0f, true); } struct ggml_tensor * ggml_soft_max_ext( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * mask, - float scale) { - return ggml_soft_max_impl(ctx, a, mask, scale, false); + struct ggml_tensor * pos, + float scale, + float max_bias) { + return ggml_soft_max_impl(ctx, a, mask, pos, scale, max_bias, false); } // ggml_soft_max_back @@ -11495,6 +11510,7 @@ static void ggml_compute_forward_soft_max_f32( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, + const struct ggml_tensor * src2, struct ggml_tensor * dst) { assert(ggml_is_contiguous(dst)); assert(ggml_are_same_shape(src0, dst)); @@ -11503,16 +11519,29 @@ static void ggml_compute_forward_soft_max_f32( return; } - float scale = 1.0f; - memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + float scale = 1.0f; + float max_bias = 0.0f; + + memcpy(&scale, (float *) dst->op_params + 0, sizeof(float)); + memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); // TODO: handle transposed/permuted matrices const int ith = params->ith; const int nth = params->nth; + GGML_TENSOR_UNARY_OP_LOCALS + const int64_t ne11 = src1 ? src1->ne[1] : 1; + // TODO: is this supposed to be ceil instead of floor? + // https://huggingface.co/mosaicml/mpt-7b/blob/main/attention.py#L370 + const uint32_t n_head_kv = ne02; + const uint32_t n_head_log2 = 1u << (uint32_t) floor(log2(n_head_kv)); + + const float m0 = powf(2.0f, -(max_bias ) / n_head_log2); + const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_head_log2); + const int nc = src0->ne[0]; const int nr = ggml_nrows(src0); @@ -11525,6 +11554,9 @@ static void ggml_compute_forward_soft_max_f32( float * wp = (float *) params->wdata + (nc + CACHE_LINE_SIZE_F32) * ith; + // when max_bias <= 0.0f, src2 is not used and we default it to src0 to avoid branching + float * pos = src2 ? (float *) src2->data : src0->data; + for (int i1 = ir0; i1 < ir1; i1++) { float * sp = (float *)((char *) src0->data + i1*src0->nb[1]); float * dp = (float *)((char *) dst->data + i1*dst->nb[1]); @@ -11538,6 +11570,16 @@ static void ggml_compute_forward_soft_max_f32( ggml_vec_acc_f32(nc, wp, mp); } + // ALiBi bias + if (max_bias > 0.0f) { + const uint32_t h = (i1/ne01)%ne02; // head + const float slope = h < n_head_log2 ? powf(m0, h + 1) : powf(m1, 2*(h - n_head_log2) + 1); + + for (int i = 0; i < nc; i++) { + wp[i] = wp[i] + slope*pos[i]; + } + } + #ifndef NDEBUG for (int i = 0; i < nc; ++i) { //printf("p[%d] = %f\n", i, p[i]); @@ -11582,11 +11624,12 @@ static void ggml_compute_forward_soft_max( const struct ggml_compute_params * params, const struct ggml_tensor * src0, const struct ggml_tensor * src1, + const struct ggml_tensor * src2, struct ggml_tensor * dst) { switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_f32(params, src0, src1, dst); + ggml_compute_forward_soft_max_f32(params, src0, src1, src2, dst); } break; default: { @@ -11730,22 +11773,20 @@ static void ggml_compute_forward_alibi_f32( const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - for (int64_t i = 0; i < ne0; i++) { - for (int64_t j = 0; j < ne1; j++) { - for (int64_t k = 0; k < ne2_ne3; k++) { + for (int64_t k = 0; k < ne2_ne3; k++) { + // TODO: k*nb2 or k*nb3 + float m_k; + + if (k < n_heads_log2_floor) { + m_k = powf(m0, k + 1); + } else { + m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); + } + + for (int64_t i = 0; i < ne0; i++) { + for (int64_t j = 0; j < ne1; j++) { float * const src = (float *)((char *) src0->data + i*nb0 + j*nb1 + k*nb2); float * pdst = (float *)((char *) dst->data + i*nb0 + j*nb1 + k*nb2); - - // TODO: k*nb2 or k*nb3 - - float m_k; - - if (k < n_heads_log2_floor) { - m_k = powf(m0, k + 1); - } else { - m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); - } - pdst[0] = i * m_k + src[0]; } } @@ -11790,21 +11831,20 @@ static void ggml_compute_forward_alibi_f16( const float m0 = powf(2.0f, -(max_bias) / n_heads_log2_floor); const float m1 = powf(2.0f, -(max_bias / 2.0f) / n_heads_log2_floor); - for (int i = 0; i < ne0; i++) { - for (int j = 0; j < ne1; j++) { - for (int k = 0; k < ne2_ne3; k++) { + for (int k = 0; k < ne2_ne3; k++) { + // TODO: k*nb2 or k*nb3 + float m_k; + + if (k < n_heads_log2_floor) { + m_k = powf(m0, k + 1); + } else { + m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); + } + + for (int i = 0; i < ne0; i++) { + for (int j = 0; j < ne1; j++) { ggml_fp16_t * const src = (ggml_fp16_t *)((char *) src0->data + i*nb0 + j*nb1 + k*nb2); - float * pdst = (float *)((char *) dst->data + i*nb0 + j*nb1 + k*nb2); - - // TODO: k*nb2 or k*nb3 - - float m_k; - - if (k < n_heads_log2_floor) { - m_k = powf(m0, k + 1); - } else { - m_k = powf(m1, 2 * (k - n_heads_log2_floor) + 1); - } + float * pdst = (float *)((char *) dst->data + i*nb0 + j*nb1 + k*nb2); // we return F32 pdst[0] = i * m_k + GGML_FP16_TO_FP32(src[0]); @@ -15116,7 +15156,7 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm } break; case GGML_OP_SOFT_MAX: { - ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); } break; case GGML_OP_SOFT_MAX_BACK: { diff --git a/ggml.h b/ggml.h index 270018185..6c1956772 100644 --- a/ggml.h +++ b/ggml.h @@ -1383,13 +1383,17 @@ extern "C" { struct ggml_context * ctx, struct ggml_tensor * a); - // fused soft_max(a*scale + mask) + // fused soft_max(a*scale + mask + pos[i]*(ALiBi slope)) // mask is optional + // pos is required when max_bias > 0.0f + // max_bias = 0.0f for no ALiBi GGML_API struct ggml_tensor * ggml_soft_max_ext( struct ggml_context * ctx, struct ggml_tensor * a, struct ggml_tensor * mask, - float scale); + struct ggml_tensor * pos, + float scale, + float max_bias); GGML_API struct ggml_tensor * ggml_soft_max_back( struct ggml_context * ctx, @@ -1491,12 +1495,13 @@ extern "C" { // alibi position embedding // in-place, returns view(a) - GGML_API struct ggml_tensor * ggml_alibi( + GGML_DEPRECATED(GGML_API struct ggml_tensor * ggml_alibi( struct ggml_context * ctx, struct ggml_tensor * a, int n_past, int n_head, - float bias_max); + float bias_max), + "use ggml_soft_max_ext instead (will be removed in Mar 2024)"); // clamp // in-place, returns view(a) diff --git a/llama.cpp b/llama.cpp index 8966c3e66..6ac9caa95 100644 --- a/llama.cpp +++ b/llama.cpp @@ -1557,12 +1557,13 @@ struct llama_hparams { uint32_t n_yarn_orig_ctx; int32_t rope_scaling_type_train; - float f_clamp_kqv; - float f_max_alibi_bias; + float f_clamp_kqv = 0.0f; + float f_max_alibi_bias = 0.0f; bool causal_attn = true; - uint32_t pooling_type = LLAMA_POOLING_NONE; + bool need_kq_pos = false; + uint32_t pooling_type = LLAMA_POOLING_NONE; bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -1923,6 +1924,7 @@ struct llama_context { struct ggml_tensor * inp_embd; // F32 [n_embd, n_batch] struct ggml_tensor * inp_pos; // I32 [n_batch] struct ggml_tensor * inp_KQ_mask; // F32 [n_ctx, n_batch] + struct ggml_tensor * inp_KQ_pos; // F32 [n_ctx] struct ggml_tensor * inp_K_shift; // I32 [n_ctx] struct ggml_tensor * inp_mean; // F32 [n_batch, n_batch] struct ggml_tensor * inp_cls; // I32 [n_batch] @@ -3054,6 +3056,11 @@ static void llm_load_hparams( case 40: model.type = e_model::MODEL_13B; break; default: model.type = e_model::MODEL_UNKNOWN; } + + if (model.type == e_model::MODEL_13B) { + // TODO: become GGUF KV parameter + hparams.f_max_alibi_bias = 8.0f; + } } break; case LLM_ARCH_STARCODER: { @@ -3081,6 +3088,9 @@ static void llm_load_hparams( case 32: model.type = e_model::MODEL_1B; break; default: model.type = e_model::MODEL_UNKNOWN; } + + // TODO: become GGUF KV parameter + hparams.f_max_alibi_bias = 8.0f; } break; case LLM_ARCH_BERT: { @@ -3126,11 +3136,12 @@ static void llm_load_hparams( case 4096: model.type = e_model::MODEL_7B; break; } break; } + + // TODO: become GGUF KV parameter + hparams.f_max_alibi_bias = 8.0f; } break; case LLM_ARCH_MPT: { - hparams.f_clamp_kqv = 0.0f; - ml.get_key(LLM_KV_ATTENTION_LAYERNORM_EPS, hparams.f_norm_eps); ml.get_key(LLM_KV_ATTENTION_CLAMP_KQV, hparams.f_clamp_kqv, false); ml.get_key(LLM_KV_ATTENTION_MAX_ALIBI_BIAS, hparams.f_max_alibi_bias); @@ -3232,6 +3243,10 @@ static void llm_load_hparams( } model.ftype = ml.ftype; + + if (hparams.f_max_alibi_bias > 0.0f) { + hparams.need_kq_pos = true; + } } // TODO: This should probably be in llama.h @@ -4774,10 +4789,10 @@ static struct ggml_tensor * llm_build_kqv( struct ggml_tensor * wo_b, struct ggml_tensor * q_cur, struct ggml_tensor * kq_mask, + struct ggml_tensor * kq_pos, int64_t n_ctx, int32_t n_tokens, int32_t n_kv, - float max_alibi_bias, float kq_scale, const llm_build_cb & cb, int il) { @@ -4807,26 +4822,26 @@ static struct ggml_tensor * llm_build_kqv( ggml_mul_mat_set_prec(kq, GGML_PREC_F32); } - if (max_alibi_bias > 0.0f) { - // temporary branch until we figure out how to handle ggml_alibi through ggml_add +#if defined(GGML_USE_VULKAN) || defined(GGML_USE_KOMPUTE) || defined(GGML_USE_SYCL) +#pragma message("TODO: ALiBi support in ggml_soft_max_ext is not implemented for Vulkan, Kompute, and SYCL") +#pragma message(" Falling back to ggml_alibi(). Will become an error in Mar 2024") +#pragma message("ref: https://github.com/ggerganov/llama.cpp/pull/5488") + if (hparams.f_max_alibi_bias > 0.0f) { kq = ggml_scale(ctx, kq, kq_scale); cb(kq, "kq_scaled", il); - if (max_alibi_bias > 0.0f) { - // TODO: n_head or n_head_kv - // TODO: K-shift is likely not working - // TODO: change to ggml_add - kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, max_alibi_bias); - cb(kq, "kq_scaled_alibi", il); - } + kq = ggml_alibi(ctx, kq, /*n_past*/ 0, n_head, hparams.f_max_alibi_bias); + cb(kq, "kq_scaled_alibi", il); kq = ggml_add(ctx, kq, kq_mask); cb(kq, "kq_masked", il); kq = ggml_soft_max(ctx, kq); cb(kq, "kq_soft_max", il); - } else { - kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_scale); + } else +#endif + { + kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_pos, kq_scale, hparams.f_max_alibi_bias); cb(kq, "kq_soft_max_ext", il); } @@ -4874,11 +4889,11 @@ static struct ggml_tensor * llm_build_kv( struct ggml_tensor * v_cur, struct ggml_tensor * q_cur, struct ggml_tensor * kq_mask, + struct ggml_tensor * kq_pos, int64_t n_ctx, int32_t n_tokens, int32_t kv_head, int32_t n_kv, - float max_alibi_bias, float kq_scale, const llm_build_cb & cb, int il) { @@ -4892,9 +4907,8 @@ static struct ggml_tensor * llm_build_kv( llm_build_kv_store(ctx, hparams, kv, graph, k_cur, v_cur, n_ctx, n_tokens, kv_head, cb, il); struct ggml_tensor * cur; - cur = llm_build_kqv(ctx, model, hparams, kv, graph, - wo, wo_b, - q_cur, kq_mask, n_ctx, n_tokens, n_kv, max_alibi_bias, kq_scale, cb, il); + cur = llm_build_kqv(ctx, model, hparams, kv, graph, wo, wo_b, + q_cur, kq_mask, kq_pos, n_ctx, n_tokens, n_kv, kq_scale, cb, il); cb(cur, "kqv_out", il); return cur; @@ -5062,7 +5076,7 @@ struct llm_build_context { } Qcur = ggml_rope_custom( - ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens), inp_pos, hparams.n_rot, 0, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow ); @@ -5077,7 +5091,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5207,6 +5221,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + // shift the entire K-cache if needed if (do_rope_shift) { llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); @@ -5255,12 +5273,9 @@ struct llm_build_context { cb(Kcur, "Kcur", il); - // apply ALiBi for 13B model - const float max_alibi_bias = model.type == MODEL_13B ? 8.0f : -1.0f; - cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5384,7 +5399,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5483,7 +5498,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5688,7 +5703,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Q, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Q, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5750,6 +5765,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * inpSA = inpL; @@ -5777,7 +5796,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5878,7 +5897,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } else { // compute Q and K and RoPE them @@ -5909,7 +5928,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -5985,6 +6004,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + inpL = llm_build_norm(ctx0, inpL, hparams, model.tok_norm, model.tok_norm_b, @@ -6018,7 +6041,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, 8.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6078,6 +6101,10 @@ struct llm_build_context { struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); cb(KQ_mask, "KQ_mask", -1); + // positions of the tokens in the KV cache + struct ggml_tensor * KQ_pos = ggml_view_1d(ctx0, lctx.inp_KQ_pos, n_kv, 0); + cb(KQ_pos, "KQ_pos", -1); + for (int il = 0; il < n_layer; ++il) { struct ggml_tensor * attn_norm; @@ -6111,7 +6138,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, hparams.f_max_alibi_bias, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6233,7 +6260,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6348,7 +6375,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6469,7 +6496,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6596,7 +6623,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f, cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); cb(cur, "kqv_out", il); } @@ -6699,7 +6726,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } struct ggml_tensor * sa_out = cur; @@ -6798,7 +6825,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6907,7 +6934,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7025,7 +7052,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, NULL, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7144,7 +7171,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7276,7 +7303,7 @@ struct llm_build_context { cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, model.layers[il].wo, model.layers[il].bo, - Kcur, Vcur, Qcur, KQ_mask, n_ctx, n_tokens, kv_head, n_kv, -1.0f, 1.0f/sqrtf(float(n_embd_head)), cb, il); + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -7507,6 +7534,18 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } + if (hparams.need_kq_pos) { + const int64_t n_kv = kv_self.n; + + assert(ggml_backend_buffer_is_host(lctx.inp_KQ_pos->buffer)); + + float * data = (float *) lctx.inp_KQ_pos->data; + + for (int i = 0; i < n_kv; ++i) { + data[i] = float(lctx.kv_self.cells[i].pos); + } + } + if (kv_self.has_shift) { const int64_t n_ctx = cparams.n_ctx; @@ -11434,7 +11473,7 @@ struct llama_context * llama_new_context_with_model( // graph inputs { ggml_init_params init_params = { - /* .mem_size */ ggml_tensor_overhead()*7, + /* .mem_size */ ggml_tensor_overhead()*8, /* .mem_buffer */ nullptr, /* .no_alloc */ true, }; @@ -11444,6 +11483,7 @@ struct llama_context * llama_new_context_with_model( ctx->inp_embd = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, hparams.n_embd, cparams.n_batch); ctx->inp_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); ctx->inp_KQ_mask = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx, cparams.n_batch); + ctx->inp_KQ_pos = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_ctx); ctx->inp_K_shift = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_ctx); ctx->inp_mean = ggml_new_tensor_2d(ctx->ctx_input, GGML_TYPE_F32, cparams.n_batch, cparams.n_batch); ctx->inp_cls = ggml_new_tensor_1d(ctx->ctx_input, GGML_TYPE_I32, cparams.n_batch); @@ -11452,6 +11492,7 @@ struct llama_context * llama_new_context_with_model( ggml_set_name(ctx->inp_embd, "inp_embd"); ggml_set_name(ctx->inp_pos, "inp_pos"); ggml_set_name(ctx->inp_KQ_mask, "inp_KQ_mask"); + ggml_set_name(ctx->inp_KQ_pos, "inp_KQ_pos"); ggml_set_name(ctx->inp_K_shift, "inp_K_shift"); ggml_set_name(ctx->inp_mean, "inp_mean"); ggml_set_name(ctx->inp_cls, "inp_cls"); diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 9af8517d9..30a7d1f5a 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1085,24 +1085,32 @@ struct test_diag_mask_inf : public test_case { struct test_soft_max : public test_case { const ggml_type type; const std::array ne; - const float scale; const bool mask; + const float scale; + const float max_bias; std::string vars() override { - return VARS_TO_STR4(type, ne, scale, mask); + return VARS_TO_STR5(type, ne, mask, scale, max_bias); } test_soft_max(ggml_type type = GGML_TYPE_F32, std::array ne = {10, 10, 10, 10}, + bool mask = false, float scale = 1.0f, - bool mask = false) - : type(type), ne(ne), scale(scale), mask(mask) {} + float max_bias = 0.0f) + : type(type), ne(ne), mask(mask), scale(scale), max_bias(max_bias) {} ggml_tensor * build_graph(ggml_context * ctx) override { ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * b = nullptr; - if (mask) { b = ggml_new_tensor_2d(ctx, type, ne[0], ne[1]); } - ggml_tensor * out = ggml_soft_max_ext(ctx, a, b, scale); + ggml_tensor * mask = nullptr; + if (this->mask) { + mask = ggml_new_tensor_2d(ctx, type, ne[0], ne[1]); + } + ggml_tensor * pos = nullptr; + if (max_bias > 0.0f) { + pos = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, ne[0]); + } + ggml_tensor * out = ggml_soft_max_ext(ctx, a, mask, pos, scale, max_bias); return out; } }; @@ -1147,30 +1155,6 @@ struct test_rope : public test_case { } }; -// GGML_OP_ALIBI -struct test_alibi : public test_case { - const ggml_type type; - const std::array ne; - int n_past; - int n_head; - float bias_max; - - std::string vars() override { - return VARS_TO_STR5(type, ne, n_past, n_head, bias_max); - } - - test_alibi(ggml_type type = GGML_TYPE_F32, - std::array ne = {10, 10, 10, 10}, - int n_past = 512, int n_head = 10, float bias_max = 0.5f) - : type(type), ne(ne), n_past(n_past), n_head(n_head), bias_max(bias_max) {} - - ggml_tensor * build_graph(ggml_context * ctx) override { - ggml_tensor * a = ggml_new_tensor(ctx, type, 4, ne.data()); - ggml_tensor * out = ggml_alibi(ctx, a, n_past, n_head, bias_max); - return out; - } -}; - // GGML_OP_POOL2D struct test_pool2d : public test_case { enum ggml_op_pool pool_type; @@ -1488,7 +1472,7 @@ struct test_moe : public test_case { ggml_tensor * cur = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_tokens); ggml_tensor * logits = ggml_mul_mat(ctx, ffn_gate_inp, cur); - ggml_tensor * probs = ggml_soft_max_ext(ctx, logits, nullptr, 1.0f/sqrtf(n_embd)); + ggml_tensor * probs = ggml_soft_max_ext(ctx, logits, nullptr, nullptr, 1.0f/sqrtf(n_embd), 0.0f); // select experts ggml_tensor * selected_experts = ggml_top_k(ctx, probs, n_experts_per_tok); @@ -1617,7 +1601,6 @@ public: ggml_cpy(ctx, v_cur_t, v_cache_view); } - // if max_alibi_bias > 0 then apply ALiBi struct ggml_tensor * llm_build_kqv( struct ggml_context * ctx, struct ggml_tensor * k_l, @@ -1636,7 +1619,7 @@ public: struct ggml_tensor * kq = ggml_mul_mat(ctx, k, q); - kq = ggml_soft_max_ext(ctx, kq, kq_mask, kq_scale); + kq = ggml_soft_max_ext(ctx, kq, kq_mask, nullptr, kq_scale, 0.0f); // split cached v into n_head heads struct ggml_tensor * v = @@ -2083,6 +2066,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 1}, 5)); test_cases.emplace_back(new test_diag_mask_inf(GGML_TYPE_F32, {10, 10, 10, 10}, 5)); +#if 0 std::uniform_int_distribution<> dist_ne1(1, 50); int exponent = 1; while (exponent < (1 << 17)) { @@ -2091,14 +2075,29 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op for (int n = 0; n < 10; ++n) { int64_t ne0 = dist_ne0(rng); int64_t ne1 = dist_ne1(rng); - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1})); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1}, n/2 == 0, 0.1f, ne0 < 1000 ? 4.0f : 0.0f)); } exponent <<= 1; } +#endif + for (bool mask : {false, true}) { + for (float max_bias : {0.0f, 8.0f}) { + for (float scale : {1.0f, 0.1f}) { + for (int64_t ne0 : {16, 1024}) { + for (int64_t ne1 : {16, 1024}) { + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0, ne1, 1, 1}, mask, scale, max_bias)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {ne0-1, ne1-1, 1, 1}, mask, scale, max_bias)); + } + } + } + } + } - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {16, 2, 32, 1}, 0.1f)); - test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, 0.1f, true)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {16, 2, 32, 1}, false, 0.1f, 0.0f)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, true, 0.1f, 0.0f)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {16, 2, 32, 1}, false, 0.1f, 8.0f)); + test_cases.emplace_back(new test_soft_max(GGML_TYPE_F32, {32, 2, 32, 1}, true, 0.1f, 8.0f)); for (ggml_type type : {GGML_TYPE_F32, GGML_TYPE_F16}) { test_cases.emplace_back(new test_rope(type, {128, 32, 10, 1}, 128, 0, 512)); // llama 7B @@ -2113,7 +2112,6 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_rope(type, { 80, 32, 10, 1}, 32, 2, 512)); // neox (phi-2) } - test_cases.emplace_back(new test_alibi()); test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); From c8e0d7efeb7634ecc2e9832e879ab9fca4510e71 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 18 Feb 2024 00:17:07 +0000 Subject: [PATCH 771/859] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/f8e2ebd66d097614d51a56a755450d4ae1632df1' (2024-02-07) → 'github:NixOS/nixpkgs/5863c27340ba4de8f83e7e3c023b9599c3cb3c80' (2024-02-16) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 239d0686c..47d6448b5 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1707268954, - "narHash": "sha256-2en1kvde3cJVc3ZnTy8QeD2oKcseLFjYPLKhIGDanQ0=", + "lastModified": 1708118438, + "narHash": "sha256-kk9/0nuVgA220FcqH/D2xaN6uGyHp/zoxPNUmPCMmEE=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "f8e2ebd66d097614d51a56a755450d4ae1632df1", + "rev": "5863c27340ba4de8f83e7e3c023b9599c3cb3c80", "type": "github" }, "original": { From bd2d4e393b2b7d2a1b2e201058e26017c9728ead Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sun, 18 Feb 2024 18:16:55 +0200 Subject: [PATCH 772/859] 1.5 bit quantization (#5453) * iq1_s: WIP basics * iq1_s: CUDA is working * iq1_s: scalar CPU dot product * iq1_s: WIP AVX2 dot product - something is not right * Fix tests * Fix shadow warnings * Fix after merge with latest master * iq1_s: AVX2 finally works * iq1_s: ARM_NEON dot product. Works, but not very fast * iq1_s: better grid * iq1_s: use IQ2_XXS for attn_output At a cost of 0.04 extra bpw this gives a big improvement in PPL. * iq1_s: Metal basics Dequantize works, but not dot product * iq1_s: Metal works, but quite slow As usual, Apple Silicon does not like the code I write. * iq1_s: Tests * iq1_s: slightly faster dot product --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 6 +- ggml-backend.c | 2 +- ggml-cuda.cu | 224 ++++++++++- ggml-metal.m | 29 +- ggml-metal.metal | 337 +++++++++++++++++ ggml-quants.c | 657 +++++++++++++++++++++++++++++++-- ggml-quants.h | 14 +- ggml.c | 44 ++- ggml.h | 2 + llama.cpp | 16 +- llama.h | 1 + tests/test-backend-ops.cpp | 2 +- 12 files changed, 1286 insertions(+), 48 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 4a5c504e3..ea7ba50c9 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -23,6 +23,7 @@ static const std::vector QUANT_OPTIONS = { { "Q5_1", LLAMA_FTYPE_MOSTLY_Q5_1, " 4.70G, +0.0349 ppl @ LLaMA-v1-7B", }, { "IQ2_XXS",LLAMA_FTYPE_MOSTLY_IQ2_XXS," 2.06 bpw quantization", }, { "IQ2_XS", LLAMA_FTYPE_MOSTLY_IQ2_XS, " 2.31 bpw quantization", }, + { "IQ1_S", LLAMA_FTYPE_MOSTLY_IQ1_S, " 1.56 bpw quantization", }, { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "IQ3_XXS",LLAMA_FTYPE_MOSTLY_IQ3_XXS," 3.06 bpw quantization", }, @@ -287,9 +288,10 @@ int main(int argc, char ** argv) { } } - if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) && imatrix_data.empty()) { + if ((params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || params.ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || + params.ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || params.ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) && imatrix_data.empty()) { fprintf(stderr, "\n===============================================================================================\n"); - fprintf(stderr, "Please do not use IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); + fprintf(stderr, "Please do not use IQ1_S, IQ2_XXS, IQ2_XS or Q2_K_S quantization without an importance matrix\n"); fprintf(stderr, "===============================================================================================\n\n\n"); return 1; } diff --git a/ggml-backend.c b/ggml-backend.c index 66e8c293a..5076d9e5e 100644 --- a/ggml-backend.c +++ b/ggml-backend.c @@ -756,7 +756,7 @@ GGML_CALL static bool ggml_backend_cpu_graph_compute(ggml_backend_t backend, str GGML_CALL static bool ggml_backend_cpu_supports_op(ggml_backend_t backend, const struct ggml_tensor * op) { switch (op->op) { case GGML_OP_CPY: - return op->type != GGML_TYPE_IQ2_XXS && op->type != GGML_TYPE_IQ2_XS; // missing type_traits.from_float + return op->type != GGML_TYPE_IQ2_XXS && op->type != GGML_TYPE_IQ2_XS && op->type != GGML_TYPE_IQ1_S; // missing type_traits.from_float case GGML_OP_MUL_MAT: return op->src[1]->type == GGML_TYPE_F32 || op->src[1]->type == ggml_internal_get_type_traits(op->src[0]->type).vec_dot_type; default: diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 5fd8a87e4..933ebbc4e 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -517,6 +517,15 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +#define QR1_S 8 +#define QI1_S (QK_K / (4*QR1_S)) +typedef struct { + half d; + uint8_t qs[QK_K/8]; + uint8_t scales[QK_K/16]; +} block_iq1_s; +static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1681,6 +1690,137 @@ static const __device__ uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +static const __device__ uint64_t iq1s_grid[512] = { + 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, + 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, + 0xffffff00ff000000, 0xffffff000000ff00, 0xffffff00000000ff, 0xffffff0000000100, + 0xffffff0000010000, 0xffffff0001000000, 0xffffff01ffff00ff, 0xffffff01ff01ff00, + 0xffffff01ff010100, 0xffffff0100000001, 0xffffff0101ffff00, 0xffffff0101ff0101, + 0xffffff0101010100, 0xffff00ffff00ff01, 0xffff00ffff0000ff, 0xffff00ff00ff0100, + 0xffff00ff0100ff00, 0xffff00ff010001ff, 0xffff0000ff0101ff, 0xffff000000ffff00, + 0xffff000000000000, 0xffff00000001ff01, 0xffff000001000101, 0xffff0000010100ff, + 0xffff0001ffff0100, 0xffff00010000ff00, 0xffff000100010101, 0xffff000101000000, + 0xffff01ffffff0000, 0xffff01ffff01ffff, 0xffff01ffff010100, 0xffff01ff00000000, + 0xffff01ff01ffffff, 0xffff01ff01ff0001, 0xffff01ff0101ffff, 0xffff01ff01010001, + 0xffff0100ffffff01, 0xffff01000000ffff, 0xffff010000000100, 0xffff010001ff01ff, + 0xffff010001000000, 0xffff0101ff000000, 0xffff0101000101ff, 0xffff010101ffff01, + 0xffff01010101ff00, 0xff00ffffff000000, 0xff00ffff00ffff00, 0xff00ffff00000001, + 0xff00ffff000001ff, 0xff00ffff01010000, 0xff00ff00ffff0000, 0xff00ff00ff00ff00, + 0xff00ff00ff0000ff, 0xff00ff00ff000100, 0xff00ff00ff010001, 0xff00ff0000ff0001, + 0xff00ff000000ffff, 0xff00ff0000000000, 0xff00ff000001ff00, 0xff00ff0000010100, + 0xff00ff0001ff0000, 0xff00ff000100ff00, 0xff00ff0001000100, 0xff00ff01ff000000, + 0xff00ff0100ff0000, 0xff00ff01000001ff, 0xff00ff0101010001, 0xff0000ff00000000, + 0xff0000ff0001ff00, 0xff0000ff00010100, 0xff000000ffff0101, 0xff000000ff000000, + 0xff000000ff01ff00, 0xff00000000ff0000, 0xff0000000000ff00, 0xff000000000000ff, + 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, 0xff0000000001ffff, + 0xff00000000010000, 0xff00000001000000, 0xff00000001010100, 0xff000001ff00ff01, + 0xff000001ff0100ff, 0xff00000100000000, 0xff0000010001ff00, 0xff00000101ff0100, + 0xff0000010100ff00, 0xff0001ff00ff00ff, 0xff0001ff00000101, 0xff0001ff000100ff, + 0xff0001ff01000000, 0xff000100ff0001ff, 0xff0001000000ff01, 0xff00010000000000, + 0xff00010000010001, 0xff00010000010100, 0xff00010001ffff00, 0xff00010001ff0101, + 0xff00010001010000, 0xff000101ffffffff, 0xff000101ff000101, 0xff00010101ff00ff, + 0xff00010101000001, 0xff000101010100ff, 0xff01ffffff000101, 0xff01ffffff01ffff, + 0xff01ffffff01ff01, 0xff01ffffff0101ff, 0xff01ffff00000000, 0xff01ffff01ff0001, + 0xff01ffff0101ff01, 0xff01ff00ff000000, 0xff01ff0000ff0100, 0xff01ff000000ff01, + 0xff01ff0000010000, 0xff01ff00010000ff, 0xff01ff01ff01ff00, 0xff01ff0100000101, + 0xff0100ffffff0000, 0xff0100ffff010000, 0xff0100ff01ff00ff, 0xff0100ff01000100, + 0xff0100ff010100ff, 0xff010000ffffff01, 0xff01000000000000, 0xff0100000101ff00, + 0xff010001ffff00ff, 0xff010001ff000100, 0xff01000100ffff00, 0xff01000100010001, + 0xff01000101ff0001, 0xff010001010001ff, 0xff0101ffffffffff, 0xff0101ffff01ffff, + 0xff0101ffff010101, 0xff0101ff0000ff00, 0xff0101ff01010001, 0xff010100ff000000, + 0xff010100ff01ff01, 0xff01010000ff0001, 0xff01010000000100, 0xff01010001000000, + 0xff0101010100ffff, 0x00ffffff0000ff01, 0x00ffffff000000ff, 0x00ffffff00000100, + 0x00ffffff00010000, 0x00ffff00ffff0001, 0x00ffff00ff0000ff, 0x00ffff00ff000100, + 0x00ffff0000000000, 0x00ffff0001000100, 0x00ffff0001010001, 0x00ffff01ff00ff01, + 0x00ffff0100ff0100, 0x00ffff010000ff00, 0x00ffff01000100ff, 0x00ffff0101ff00ff, + 0x00ffff010101ff00, 0x00ff00ffffffffff, 0x00ff00ffffff01ff, 0x00ff00ffff000101, + 0x00ff00ff00000000, 0x00ff00ff000101ff, 0x00ff00ff01010101, 0x00ff0000ff000000, + 0x00ff0000ff01ffff, 0x00ff000000ff0000, 0x00ff00000000ff00, 0x00ff0000000000ff, + 0x00ff000000000000, 0x00ff000000000001, 0x00ff000000000100, 0x00ff000000010000, + 0x00ff000001ffff01, 0x00ff000001000000, 0x00ff0001ff000101, 0x00ff000100ffffff, + 0x00ff000100000000, 0x00ff0001010001ff, 0x00ff01ffff000000, 0x00ff01ff0001ff00, + 0x00ff01ff01ff0100, 0x00ff0100ff01ff01, 0x00ff010000ff00ff, 0x00ff010000ff0101, + 0x00ff010000000000, 0x00ff010000010101, 0x00ff01000100ff00, 0x00ff010001010000, + 0x00ff0101ffffff00, 0x00ff01010000ff01, 0x00ff010100000100, 0x00ff010101ff0000, + 0x0000ffffffff0100, 0x0000ffffff00ff00, 0x0000ffffff0000ff, 0x0000ffffff010000, + 0x0000ffff00000000, 0x0000ffff00010101, 0x0000ffff01ffff01, 0x0000ffff01000100, + 0x0000ff00ff000000, 0x0000ff00ff01ff00, 0x0000ff00ff0101ff, 0x0000ff0000ff0000, + 0x0000ff000000ff00, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff0000000100, 0x0000ff0000010000, 0x0000ff0001ffffff, 0x0000ff0001ff01ff, + 0x0000ff0001000000, 0x0000ff000101ffff, 0x0000ff01ffff0101, 0x0000ff01ff010000, + 0x0000ff0100000000, 0x0000ff0101000101, 0x000000ffffff0001, 0x000000ffff000000, + 0x000000ff00ff0000, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff00000100, 0x000000ff00010000, 0x000000ff01000000, + 0x000000ff0101ff00, 0x00000000ffff0000, 0x00000000ff00ff00, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff010000, + 0x0000000000ffff00, 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000001, 0x00000000000001ff, 0x0000000000000100, + 0x0000000000000101, 0x000000000001ff00, 0x00000000000100ff, 0x0000000000010000, + 0x0000000000010001, 0x0000000000010100, 0x0000000001ff0000, 0x000000000100ff00, + 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, + 0x0000000001010000, 0x00000001ffff01ff, 0x00000001ff000000, 0x0000000100ff0000, + 0x000000010000ff00, 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, + 0x0000000100000100, 0x0000000100010000, 0x0000000101000000, 0x000001ffff00ff00, + 0x000001ffff010001, 0x000001ffff0101ff, 0x000001ff00ffff01, 0x000001ff0000ffff, + 0x000001ff00000000, 0x000001ff010000ff, 0x000001ff01010100, 0x00000100ffff0100, + 0x00000100ff000000, 0x0000010000ff0000, 0x000001000000ff00, 0x00000100000000ff, + 0x0000010000000000, 0x0000010000000001, 0x0000010000000100, 0x0000010000010000, + 0x0000010001000000, 0x000001000101ff01, 0x00000101ffff0001, 0x00000101ff01ffff, + 0x0000010100000000, 0x0000010101010100, 0x0001ffffff000000, 0x0001ffff00ffffff, + 0x0001ffff00000100, 0x0001ffff0001ff00, 0x0001ffff01000000, 0x0001ff00ffffff00, + 0x0001ff00ffff01ff, 0x0001ff00ff010000, 0x0001ff0000000000, 0x0001ff0000010001, + 0x0001ff0001ff0000, 0x0001ff0001010100, 0x0001ff01ff0000ff, 0x0001ff01ff000001, + 0x0001ff0100ffffff, 0x0001ff010001ffff, 0x0001ff01000101ff, 0x0001ff010100ff01, + 0x000100ffff00ffff, 0x000100ffff00ff01, 0x000100ffff000100, 0x000100ff00000000, + 0x000100ff000101ff, 0x000100ff01ff0101, 0x000100ff0100ffff, 0x000100ff01010101, + 0x00010000ff000000, 0x00010000ff010100, 0x0001000000ff0000, 0x000100000000ff00, + 0x00010000000000ff, 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, + 0x0001000000010000, 0x0001000001ffff01, 0x0001000001000000, 0x0001000100ff0101, + 0x0001000100000000, 0x00010001010100ff, 0x000101ffffff01ff, 0x000101ffffff0101, + 0x000101ff00010000, 0x000101ff01ff0000, 0x000101ff0100ff01, 0x00010100ffff0000, + 0x0001010000000000, 0x000101000001ffff, 0x0001010000010101, 0x00010100010001ff, + 0x00010101ff00ff00, 0x00010101ff010001, 0x0001010100ffffff, 0x0001010100ff01ff, + 0x00010101000101ff, 0x0001010101ff0000, 0x000101010100ff01, 0x0001010101000101, + 0x01ffffffffff0101, 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, + 0x01ffffffff010101, 0x01ffffff00000000, 0x01ffffff01ff01ff, 0x01ffffff01000101, + 0x01ffffff0101ff01, 0x01ffffff010100ff, 0x01ffff000000ff00, 0x01ffff0000000001, + 0x01ffff00000001ff, 0x01ffff0000010000, 0x01ffff0001ff0000, 0x01ffff01ffffffff, + 0x01ffff01ffff01ff, 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff0101ff, + 0x01ffff010100ffff, 0x01ff00ffffff0000, 0x01ff00ffff010000, 0x01ff00ff00ffff01, + 0x01ff0000ff0000ff, 0x01ff000000000000, 0x01ff00000001ff01, 0x01ff000001ffffff, + 0x01ff000001010100, 0x01ff0001ffffff01, 0x01ff0001ff010001, 0x01ff000101ff0100, + 0x01ff000101000001, 0x01ff0001010100ff, 0x01ff01ffff00ffff, 0x01ff01ff00010001, + 0x01ff01ff01000000, 0x01ff01ff010101ff, 0x01ff0100ff000001, 0x01ff010000ffff00, + 0x01ff010000000100, 0x01ff010001ff01ff, 0x01ff01000101ffff, 0x01ff0101ffff00ff, + 0x01ff0101ffff0101, 0x01ff0101ff0101ff, 0x01ff010100010000, 0x0100ffff00ff00ff, + 0x0100ffff00ff0001, 0x0100ffff00000100, 0x0100ffff0100ff00, 0x0100ff00ffff0000, + 0x0100ff00ff00ffff, 0x0100ff00ff00ff01, 0x0100ff00ff000100, 0x0100ff00ff010000, + 0x0100ff0000000000, 0x0100ff00000100ff, 0x0100ff0001ff0101, 0x0100ff0001010101, + 0x0100ff0100ff00ff, 0x0100ff0100ff0001, 0x0100ff0100000100, 0x0100ff0100010001, + 0x0100ff0101000000, 0x010000ffff00ff00, 0x010000ff0000ffff, 0x010000ff00000000, + 0x010000ff010001ff, 0x010000ff01010001, 0x01000000ffffff00, 0x01000000ffff0101, + 0x01000000ff000000, 0x01000000ff0100ff, 0x01000000ff010101, 0x0100000000ff0000, + 0x010000000000ff00, 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, + 0x0100000000000100, 0x0100000000010000, 0x0100000001000000, 0x0100000100000000, + 0x01000001000101ff, 0x0100000101ffff01, 0x010001ffff000101, 0x010001ff00ff0100, + 0x010001ff0000ff00, 0x010001ff000100ff, 0x010001ff01ffffff, 0x01000100ffff0000, + 0x01000100ff0001ff, 0x0100010000000000, 0x010001000001ff00, 0x0100010001ff0000, + 0x01000100010000ff, 0x0100010001000101, 0x01000101ff00ff01, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010101010001, 0x0101ffffffff0101, 0x0101ffffff0001ff, + 0x0101ffffff01ffff, 0x0101ffffff010101, 0x0101ffff00000000, 0x0101ffff0101ffff, + 0x0101ffff010101ff, 0x0101ff00ff000000, 0x0101ff0000ff0100, 0x0101ff000000ff00, + 0x0101ff0000010000, 0x0101ff00010000ff, 0x0101ff0001000001, 0x0101ff01ff010101, + 0x0101ff0100000000, 0x0101ff010101ff00, 0x010100ffffff0000, 0x010100ffff010000, + 0x010100ff00ff01ff, 0x010100ff000000ff, 0x010100ff00000101, 0x010100ff01ffff00, + 0x01010000ffffff01, 0x01010000ff000100, 0x01010000ff01ff01, 0x0101000000000000, + 0x01010000000100ff, 0x010100000101ff01, 0x01010001ffff0000, 0x01010001ff00ffff, + 0x01010001ff010000, 0x0101000101ffffff, 0x0101000101ff01ff, 0x0101000101010101, + 0x010101ffff01ffff, 0x010101ff00000000, 0x010101ff0001ff01, 0x010101ff0101ffff, + 0x010101ff010101ff, 0x01010100ffffffff, 0x01010100ff000001, 0x010101000000ff00, + 0x0101010001010000, 0x0101010100ff0001, 0x010101010001ff01, 0x010101010101ffff, +}; + static const __device__ uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -1823,6 +1963,29 @@ static __global__ void dequantize_block_iq3_xxs(const void * __restrict__ vx, ds } +template +static __global__ void dequantize_block_iq1_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq1_s * x = (const block_iq1_s *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const int i8 = 4*ib+il; + uint8_t h = x[i].scales[i8/2] >> 4*(i8%2); + const int8_t * grid = (const int8_t *)(iq1s_grid + (x[i].qs[i8] | ((h & 8) << 5))); + const float d = (float)x[i].d * (2*(h & 7) + 1); + for (int j = 0; j < 8; ++j) y[j] = d * grid[j]; +#else + assert(false); +#endif + +} + + static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { static_assert(16%K_QUANTS_PER_ITERATION == 0, "16 must be divisible by K_QUANTS_PER_ITERATION"); @@ -4522,6 +4685,49 @@ static __device__ __forceinline__ float vec_dot_iq3_xxs_q8_1( #endif } +static __device__ __forceinline__ float vec_dot_iq1_s_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if QK_K == 256 + const block_iq1_s * bq1 = (const block_iq1_s *) vbq; + + const int ib32 = iqs; + int sumi1 = 0, sumi2 = 0, sumi3 = 0, sumi4 = 0; + const uint8_t h1 = bq1->scales[2*ib32+0]; + const uint8_t h2 = bq1->scales[2*ib32+1]; +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics + const int * q8 = (const int *)bq8_1[ib32].qs; + const int * grid1 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+0] | ((h1 & 0x08) << 5))); + const int * grid2 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+1] | ((h1 & 0x80) << 1))); + const int * grid3 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+2] | ((h2 & 0x08) << 5))); + const int * grid4 = (const int *)(iq1s_grid + (bq1->qs[4*ib32+3] | ((h2 & 0x80) << 1))); + for (int j = 0; j < 2; ++j) { + sumi1 = __dp4a(q8[j+0], grid1[j], sumi1); + sumi2 = __dp4a(q8[j+2], grid2[j], sumi2); + sumi3 = __dp4a(q8[j+4], grid3[j], sumi3); + sumi4 = __dp4a(q8[j+6], grid4[j], sumi4); + } +#else + const int8_t * q8 = bq8_1[ib32].qs; + const int8_t * grid1 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+0] | ((h1 & 0x08) << 5))); + const int8_t * grid2 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+1] | ((h1 & 0x80) << 1))); + const int8_t * grid3 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+2] | ((h2 & 0x08) << 5))); + const int8_t * grid4 = (const int8_t *)(iq1s_grid + (bq1->qs[4*ib32+3] | ((h2 & 0x80) << 1))); + for (int j = 0; j < 8; ++j) { + sumi1 += q8[j+ 0] * grid1[j]; + sumi2 += q8[j+ 8] * grid2[j]; + sumi3 += q8[j+16] * grid3[j]; + sumi4 += q8[j+24] * grid4[j]; + } +#endif + const float d = (float)bq1->d * __low2float(bq8_1[ib32].ds); + return d * (sumi1 * (2*(h1 & 7) + 1) + sumi2 * (2*((h1 >> 4) & 7) + 1) + + sumi3 * (2*(h2 & 7) + 1) + sumi4 * (2*((h2 >> 4) & 7) + 1)); +#else + assert(false); + return 0.f; +#endif +} + template static __device__ __forceinline__ void mul_mat_q( @@ -6561,6 +6767,12 @@ static void dequantize_row_iq3_xxs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq3_xxs<<>>(vx, y); } +template +static void dequantize_row_iq1_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq1_s<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -6600,6 +6812,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq2_xs_cuda; case GGML_TYPE_IQ3_XXS: return dequantize_row_iq3_xxs_cuda; + case GGML_TYPE_IQ1_S: + return dequantize_row_iq1_s_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6635,6 +6849,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq2_xs_cuda; case GGML_TYPE_IQ3_XXS: return dequantize_row_iq3_xxs_cuda; + case GGML_TYPE_IQ1_S: + return dequantize_row_iq1_s_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -8378,6 +8594,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -8401,6 +8618,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -8498,6 +8716,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ1_S: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; default: GGML_ASSERT(false); break; @@ -11214,7 +11436,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons return false; } ggml_type a_type = a->type; - if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS) { + if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || a_type == GGML_TYPE_IQ1_S) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index c0848a293..f3c1fff8f 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -61,6 +61,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, GGML_METAL_KERNEL_TYPE_RMS_NORM, GGML_METAL_KERNEL_TYPE_GROUP_NORM, @@ -83,6 +84,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, @@ -101,6 +103,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, @@ -116,6 +119,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, @@ -131,6 +135,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, @@ -433,6 +438,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); @@ -455,6 +461,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); @@ -473,6 +480,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); @@ -488,6 +496,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); @@ -503,6 +512,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); @@ -1318,6 +1328,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } @@ -1452,6 +1463,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ1_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1486,7 +1503,7 @@ static bool ggml_metal_graph_compute( if (src0t == GGML_TYPE_Q4_0 || src0t == GGML_TYPE_Q4_1 || src0t == GGML_TYPE_Q5_0 || src0t == GGML_TYPE_Q5_1 || src0t == GGML_TYPE_Q8_0 || - src0t == GGML_TYPE_Q2_K) { // || src0t == GGML_TYPE_Q4_K) { + src0t == GGML_TYPE_Q2_K || src0t == GGML_TYPE_IQ1_S) { // || src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src0t == GGML_TYPE_IQ2_XXS || src0t == GGML_TYPE_IQ2_XS) { @@ -1592,6 +1609,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } @@ -1729,6 +1747,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ1_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1779,7 +1803,7 @@ static bool ggml_metal_graph_compute( if (src2t == GGML_TYPE_Q4_0 || src2t == GGML_TYPE_Q4_1 || src2t == GGML_TYPE_Q5_0 || src2t == GGML_TYPE_Q5_1 || src2t == GGML_TYPE_Q8_0 || - src2t == GGML_TYPE_Q2_K) { // || src2t == GGML_TYPE_Q4_K) { + src2t == GGML_TYPE_Q2_K || src2t == GGML_TYPE_IQ1_S) { // || src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } else if (src2t == GGML_TYPE_IQ2_XXS || src2t == GGML_TYPE_IQ2_XS) { @@ -1833,6 +1857,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; + case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index 09ebcc9e3..a00962111 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2525,6 +2525,13 @@ typedef struct { } block_iq3_xxs; // 98 bytes / block for QK_K = 256, so 3.0625 bpw +typedef struct { + half d; + uint8_t qs[QK_K/8]; + uint8_t scales[QK_K/16]; +} block_iq1_s; + + //====================================== dot products ========================= void kernel_mul_mv_q2_K_f32_impl( @@ -3782,6 +3789,137 @@ constexpr constant static uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +#define NGRID_IQ1S 512 +constexpr constant static uint64_t iq1s_grid[NGRID_IQ1S] = { + 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, + 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, + 0xffffff00ff000000, 0xffffff000000ff00, 0xffffff00000000ff, 0xffffff0000000100, + 0xffffff0000010000, 0xffffff0001000000, 0xffffff01ffff00ff, 0xffffff01ff01ff00, + 0xffffff01ff010100, 0xffffff0100000001, 0xffffff0101ffff00, 0xffffff0101ff0101, + 0xffffff0101010100, 0xffff00ffff00ff01, 0xffff00ffff0000ff, 0xffff00ff00ff0100, + 0xffff00ff0100ff00, 0xffff00ff010001ff, 0xffff0000ff0101ff, 0xffff000000ffff00, + 0xffff000000000000, 0xffff00000001ff01, 0xffff000001000101, 0xffff0000010100ff, + 0xffff0001ffff0100, 0xffff00010000ff00, 0xffff000100010101, 0xffff000101000000, + 0xffff01ffffff0000, 0xffff01ffff01ffff, 0xffff01ffff010100, 0xffff01ff00000000, + 0xffff01ff01ffffff, 0xffff01ff01ff0001, 0xffff01ff0101ffff, 0xffff01ff01010001, + 0xffff0100ffffff01, 0xffff01000000ffff, 0xffff010000000100, 0xffff010001ff01ff, + 0xffff010001000000, 0xffff0101ff000000, 0xffff0101000101ff, 0xffff010101ffff01, + 0xffff01010101ff00, 0xff00ffffff000000, 0xff00ffff00ffff00, 0xff00ffff00000001, + 0xff00ffff000001ff, 0xff00ffff01010000, 0xff00ff00ffff0000, 0xff00ff00ff00ff00, + 0xff00ff00ff0000ff, 0xff00ff00ff000100, 0xff00ff00ff010001, 0xff00ff0000ff0001, + 0xff00ff000000ffff, 0xff00ff0000000000, 0xff00ff000001ff00, 0xff00ff0000010100, + 0xff00ff0001ff0000, 0xff00ff000100ff00, 0xff00ff0001000100, 0xff00ff01ff000000, + 0xff00ff0100ff0000, 0xff00ff01000001ff, 0xff00ff0101010001, 0xff0000ff00000000, + 0xff0000ff0001ff00, 0xff0000ff00010100, 0xff000000ffff0101, 0xff000000ff000000, + 0xff000000ff01ff00, 0xff00000000ff0000, 0xff0000000000ff00, 0xff000000000000ff, + 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, 0xff0000000001ffff, + 0xff00000000010000, 0xff00000001000000, 0xff00000001010100, 0xff000001ff00ff01, + 0xff000001ff0100ff, 0xff00000100000000, 0xff0000010001ff00, 0xff00000101ff0100, + 0xff0000010100ff00, 0xff0001ff00ff00ff, 0xff0001ff00000101, 0xff0001ff000100ff, + 0xff0001ff01000000, 0xff000100ff0001ff, 0xff0001000000ff01, 0xff00010000000000, + 0xff00010000010001, 0xff00010000010100, 0xff00010001ffff00, 0xff00010001ff0101, + 0xff00010001010000, 0xff000101ffffffff, 0xff000101ff000101, 0xff00010101ff00ff, + 0xff00010101000001, 0xff000101010100ff, 0xff01ffffff000101, 0xff01ffffff01ffff, + 0xff01ffffff01ff01, 0xff01ffffff0101ff, 0xff01ffff00000000, 0xff01ffff01ff0001, + 0xff01ffff0101ff01, 0xff01ff00ff000000, 0xff01ff0000ff0100, 0xff01ff000000ff01, + 0xff01ff0000010000, 0xff01ff00010000ff, 0xff01ff01ff01ff00, 0xff01ff0100000101, + 0xff0100ffffff0000, 0xff0100ffff010000, 0xff0100ff01ff00ff, 0xff0100ff01000100, + 0xff0100ff010100ff, 0xff010000ffffff01, 0xff01000000000000, 0xff0100000101ff00, + 0xff010001ffff00ff, 0xff010001ff000100, 0xff01000100ffff00, 0xff01000100010001, + 0xff01000101ff0001, 0xff010001010001ff, 0xff0101ffffffffff, 0xff0101ffff01ffff, + 0xff0101ffff010101, 0xff0101ff0000ff00, 0xff0101ff01010001, 0xff010100ff000000, + 0xff010100ff01ff01, 0xff01010000ff0001, 0xff01010000000100, 0xff01010001000000, + 0xff0101010100ffff, 0x00ffffff0000ff01, 0x00ffffff000000ff, 0x00ffffff00000100, + 0x00ffffff00010000, 0x00ffff00ffff0001, 0x00ffff00ff0000ff, 0x00ffff00ff000100, + 0x00ffff0000000000, 0x00ffff0001000100, 0x00ffff0001010001, 0x00ffff01ff00ff01, + 0x00ffff0100ff0100, 0x00ffff010000ff00, 0x00ffff01000100ff, 0x00ffff0101ff00ff, + 0x00ffff010101ff00, 0x00ff00ffffffffff, 0x00ff00ffffff01ff, 0x00ff00ffff000101, + 0x00ff00ff00000000, 0x00ff00ff000101ff, 0x00ff00ff01010101, 0x00ff0000ff000000, + 0x00ff0000ff01ffff, 0x00ff000000ff0000, 0x00ff00000000ff00, 0x00ff0000000000ff, + 0x00ff000000000000, 0x00ff000000000001, 0x00ff000000000100, 0x00ff000000010000, + 0x00ff000001ffff01, 0x00ff000001000000, 0x00ff0001ff000101, 0x00ff000100ffffff, + 0x00ff000100000000, 0x00ff0001010001ff, 0x00ff01ffff000000, 0x00ff01ff0001ff00, + 0x00ff01ff01ff0100, 0x00ff0100ff01ff01, 0x00ff010000ff00ff, 0x00ff010000ff0101, + 0x00ff010000000000, 0x00ff010000010101, 0x00ff01000100ff00, 0x00ff010001010000, + 0x00ff0101ffffff00, 0x00ff01010000ff01, 0x00ff010100000100, 0x00ff010101ff0000, + 0x0000ffffffff0100, 0x0000ffffff00ff00, 0x0000ffffff0000ff, 0x0000ffffff010000, + 0x0000ffff00000000, 0x0000ffff00010101, 0x0000ffff01ffff01, 0x0000ffff01000100, + 0x0000ff00ff000000, 0x0000ff00ff01ff00, 0x0000ff00ff0101ff, 0x0000ff0000ff0000, + 0x0000ff000000ff00, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff0000000100, 0x0000ff0000010000, 0x0000ff0001ffffff, 0x0000ff0001ff01ff, + 0x0000ff0001000000, 0x0000ff000101ffff, 0x0000ff01ffff0101, 0x0000ff01ff010000, + 0x0000ff0100000000, 0x0000ff0101000101, 0x000000ffffff0001, 0x000000ffff000000, + 0x000000ff00ff0000, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff00000100, 0x000000ff00010000, 0x000000ff01000000, + 0x000000ff0101ff00, 0x00000000ffff0000, 0x00000000ff00ff00, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff010000, + 0x0000000000ffff00, 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000001, 0x00000000000001ff, 0x0000000000000100, + 0x0000000000000101, 0x000000000001ff00, 0x00000000000100ff, 0x0000000000010000, + 0x0000000000010001, 0x0000000000010100, 0x0000000001ff0000, 0x000000000100ff00, + 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, + 0x0000000001010000, 0x00000001ffff01ff, 0x00000001ff000000, 0x0000000100ff0000, + 0x000000010000ff00, 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, + 0x0000000100000100, 0x0000000100010000, 0x0000000101000000, 0x000001ffff00ff00, + 0x000001ffff010001, 0x000001ffff0101ff, 0x000001ff00ffff01, 0x000001ff0000ffff, + 0x000001ff00000000, 0x000001ff010000ff, 0x000001ff01010100, 0x00000100ffff0100, + 0x00000100ff000000, 0x0000010000ff0000, 0x000001000000ff00, 0x00000100000000ff, + 0x0000010000000000, 0x0000010000000001, 0x0000010000000100, 0x0000010000010000, + 0x0000010001000000, 0x000001000101ff01, 0x00000101ffff0001, 0x00000101ff01ffff, + 0x0000010100000000, 0x0000010101010100, 0x0001ffffff000000, 0x0001ffff00ffffff, + 0x0001ffff00000100, 0x0001ffff0001ff00, 0x0001ffff01000000, 0x0001ff00ffffff00, + 0x0001ff00ffff01ff, 0x0001ff00ff010000, 0x0001ff0000000000, 0x0001ff0000010001, + 0x0001ff0001ff0000, 0x0001ff0001010100, 0x0001ff01ff0000ff, 0x0001ff01ff000001, + 0x0001ff0100ffffff, 0x0001ff010001ffff, 0x0001ff01000101ff, 0x0001ff010100ff01, + 0x000100ffff00ffff, 0x000100ffff00ff01, 0x000100ffff000100, 0x000100ff00000000, + 0x000100ff000101ff, 0x000100ff01ff0101, 0x000100ff0100ffff, 0x000100ff01010101, + 0x00010000ff000000, 0x00010000ff010100, 0x0001000000ff0000, 0x000100000000ff00, + 0x00010000000000ff, 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, + 0x0001000000010000, 0x0001000001ffff01, 0x0001000001000000, 0x0001000100ff0101, + 0x0001000100000000, 0x00010001010100ff, 0x000101ffffff01ff, 0x000101ffffff0101, + 0x000101ff00010000, 0x000101ff01ff0000, 0x000101ff0100ff01, 0x00010100ffff0000, + 0x0001010000000000, 0x000101000001ffff, 0x0001010000010101, 0x00010100010001ff, + 0x00010101ff00ff00, 0x00010101ff010001, 0x0001010100ffffff, 0x0001010100ff01ff, + 0x00010101000101ff, 0x0001010101ff0000, 0x000101010100ff01, 0x0001010101000101, + 0x01ffffffffff0101, 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, + 0x01ffffffff010101, 0x01ffffff00000000, 0x01ffffff01ff01ff, 0x01ffffff01000101, + 0x01ffffff0101ff01, 0x01ffffff010100ff, 0x01ffff000000ff00, 0x01ffff0000000001, + 0x01ffff00000001ff, 0x01ffff0000010000, 0x01ffff0001ff0000, 0x01ffff01ffffffff, + 0x01ffff01ffff01ff, 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff0101ff, + 0x01ffff010100ffff, 0x01ff00ffffff0000, 0x01ff00ffff010000, 0x01ff00ff00ffff01, + 0x01ff0000ff0000ff, 0x01ff000000000000, 0x01ff00000001ff01, 0x01ff000001ffffff, + 0x01ff000001010100, 0x01ff0001ffffff01, 0x01ff0001ff010001, 0x01ff000101ff0100, + 0x01ff000101000001, 0x01ff0001010100ff, 0x01ff01ffff00ffff, 0x01ff01ff00010001, + 0x01ff01ff01000000, 0x01ff01ff010101ff, 0x01ff0100ff000001, 0x01ff010000ffff00, + 0x01ff010000000100, 0x01ff010001ff01ff, 0x01ff01000101ffff, 0x01ff0101ffff00ff, + 0x01ff0101ffff0101, 0x01ff0101ff0101ff, 0x01ff010100010000, 0x0100ffff00ff00ff, + 0x0100ffff00ff0001, 0x0100ffff00000100, 0x0100ffff0100ff00, 0x0100ff00ffff0000, + 0x0100ff00ff00ffff, 0x0100ff00ff00ff01, 0x0100ff00ff000100, 0x0100ff00ff010000, + 0x0100ff0000000000, 0x0100ff00000100ff, 0x0100ff0001ff0101, 0x0100ff0001010101, + 0x0100ff0100ff00ff, 0x0100ff0100ff0001, 0x0100ff0100000100, 0x0100ff0100010001, + 0x0100ff0101000000, 0x010000ffff00ff00, 0x010000ff0000ffff, 0x010000ff00000000, + 0x010000ff010001ff, 0x010000ff01010001, 0x01000000ffffff00, 0x01000000ffff0101, + 0x01000000ff000000, 0x01000000ff0100ff, 0x01000000ff010101, 0x0100000000ff0000, + 0x010000000000ff00, 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, + 0x0100000000000100, 0x0100000000010000, 0x0100000001000000, 0x0100000100000000, + 0x01000001000101ff, 0x0100000101ffff01, 0x010001ffff000101, 0x010001ff00ff0100, + 0x010001ff0000ff00, 0x010001ff000100ff, 0x010001ff01ffffff, 0x01000100ffff0000, + 0x01000100ff0001ff, 0x0100010000000000, 0x010001000001ff00, 0x0100010001ff0000, + 0x01000100010000ff, 0x0100010001000101, 0x01000101ff00ff01, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010101010001, 0x0101ffffffff0101, 0x0101ffffff0001ff, + 0x0101ffffff01ffff, 0x0101ffffff010101, 0x0101ffff00000000, 0x0101ffff0101ffff, + 0x0101ffff010101ff, 0x0101ff00ff000000, 0x0101ff0000ff0100, 0x0101ff000000ff00, + 0x0101ff0000010000, 0x0101ff00010000ff, 0x0101ff0001000001, 0x0101ff01ff010101, + 0x0101ff0100000000, 0x0101ff010101ff00, 0x010100ffffff0000, 0x010100ffff010000, + 0x010100ff00ff01ff, 0x010100ff000000ff, 0x010100ff00000101, 0x010100ff01ffff00, + 0x01010000ffffff01, 0x01010000ff000100, 0x01010000ff01ff01, 0x0101000000000000, + 0x01010000000100ff, 0x010100000101ff01, 0x01010001ffff0000, 0x01010001ff00ffff, + 0x01010001ff010000, 0x0101000101ffffff, 0x0101000101ff01ff, 0x0101000101010101, + 0x010101ffff01ffff, 0x010101ff00000000, 0x010101ff0001ff01, 0x010101ff0101ffff, + 0x010101ff010101ff, 0x01010100ffffffff, 0x01010100ff000001, 0x010101000000ff00, + 0x0101010001010000, 0x0101010100ff0001, 0x010101010001ff01, 0x010101010101ffff, +}; constexpr constant static uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, @@ -4208,6 +4346,123 @@ kernel void kernel_mul_mv_iq3_xxs_f32( kernel_mul_mv_iq3_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq1_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq1_s * x = (device const block_iq1_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[16]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + +#if QK_K == 256 + const int ix = tiisg/2; + const int il = tiisg%2; + + device const float * y4 = y + 32 * ix + 16 * il; + + for (int ib32 = ix; ib32 < nb32; ib32 += 16) { + + for (int i = 0; i < 16; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq1_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 4 * ib + 2 * il; + device const uint8_t * sc = xr->scales + 2 * ib + il; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + constant int8_t * grid1 = (constant int8_t *)(iq1s_grid + (qs[0] | ((sc[0] & 0x08) << 5))); + constant int8_t * grid2 = (constant int8_t *)(iq1s_grid + (qs[1] | ((sc[0] & 0x80) << 1))); + + float2 sum = {0}; + for (int j = 0; j < 8; ++j) { + sum[0] += yl[j+ 0] * grid1[j]; + sum[1] += yl[j+ 8] * grid2[j]; + } + sumf[row] += (float)dh[0] * (sum[0] * (2*(sc[0] & 7) + 1) + sum[1] * (2*((sc[0] >> 4) & 7) + 1)); + + dh += nb*sizeof(block_iq1_s)/2; + qs += nb*sizeof(block_iq1_s); + sc += nb*sizeof(block_iq1_s); + } + + y4 += 16 * 32; + } +#else + // TODO +#endif + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + +[[host_name("kernel_mul_mv_iq1_s_f32")]] +kernel void kernel_mul_mv_iq1_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq1_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); +} + //============================= templates and their specializations ============================= @@ -4553,6 +4808,22 @@ void dequantize_iq3_xxs(device const block_iq3_xxs * xb, short il, thread type4x } } +template +void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + device const uint8_t * qs = xb->qs + 2*il; + device const uint8_t * sc = xb->scales + il; + const float dl1 = d * (2*(sc[0] & 7) + 1); + const float dl2 = d * (2*((sc[0] >> 4) & 7) + 1); + constant int8_t * grid1 = (constant int8_t *)(iq1s_grid + (qs[0] | ((sc[0] & 0x08) << 5))); + constant int8_t * grid2 = (constant int8_t *)(iq1s_grid + (qs[1] | ((sc[0] & 0x80) << 1))); + for (int i = 0; i < 8; ++i) { + reg[i/4+0][i%4] = dl1 * grid1[i]; + reg[i/4+2][i%4] = dl2 * grid2[i]; + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -5095,6 +5366,7 @@ template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -5134,6 +5406,7 @@ template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -5185,6 +5458,7 @@ template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -6152,3 +6426,66 @@ kernel void kernel_mul_mv_id_iq3_xxs_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq1_s_f32")]] +kernel void kernel_mul_mv_id_iq1_s_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq1_s_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index f44377f45..48f5294e1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3480,6 +3480,139 @@ static const uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +#define NGRID_IQ2XXS 512 +static const uint64_t iq1s_grid[NGRID_IQ2XXS] = { + 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, + 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, + 0xffffff00ff000000, 0xffffff000000ff00, 0xffffff00000000ff, 0xffffff0000000100, + 0xffffff0000010000, 0xffffff0001000000, 0xffffff01ffff00ff, 0xffffff01ff01ff00, + 0xffffff01ff010100, 0xffffff0100000001, 0xffffff0101ffff00, 0xffffff0101ff0101, + 0xffffff0101010100, 0xffff00ffff00ff01, 0xffff00ffff0000ff, 0xffff00ff00ff0100, + 0xffff00ff0100ff00, 0xffff00ff010001ff, 0xffff0000ff0101ff, 0xffff000000ffff00, + 0xffff000000000000, 0xffff00000001ff01, 0xffff000001000101, 0xffff0000010100ff, + 0xffff0001ffff0100, 0xffff00010000ff00, 0xffff000100010101, 0xffff000101000000, + 0xffff01ffffff0000, 0xffff01ffff01ffff, 0xffff01ffff010100, 0xffff01ff00000000, + 0xffff01ff01ffffff, 0xffff01ff01ff0001, 0xffff01ff0101ffff, 0xffff01ff01010001, + 0xffff0100ffffff01, 0xffff01000000ffff, 0xffff010000000100, 0xffff010001ff01ff, + 0xffff010001000000, 0xffff0101ff000000, 0xffff0101000101ff, 0xffff010101ffff01, + 0xffff01010101ff00, 0xff00ffffff000000, 0xff00ffff00ffff00, 0xff00ffff00000001, + 0xff00ffff000001ff, 0xff00ffff01010000, 0xff00ff00ffff0000, 0xff00ff00ff00ff00, + 0xff00ff00ff0000ff, 0xff00ff00ff000100, 0xff00ff00ff010001, 0xff00ff0000ff0001, + 0xff00ff000000ffff, 0xff00ff0000000000, 0xff00ff000001ff00, 0xff00ff0000010100, + 0xff00ff0001ff0000, 0xff00ff000100ff00, 0xff00ff0001000100, 0xff00ff01ff000000, + 0xff00ff0100ff0000, 0xff00ff01000001ff, 0xff00ff0101010001, 0xff0000ff00000000, + 0xff0000ff0001ff00, 0xff0000ff00010100, 0xff000000ffff0101, 0xff000000ff000000, + 0xff000000ff01ff00, 0xff00000000ff0000, 0xff0000000000ff00, 0xff000000000000ff, + 0xff00000000000000, 0xff00000000000001, 0xff00000000000100, 0xff0000000001ffff, + 0xff00000000010000, 0xff00000001000000, 0xff00000001010100, 0xff000001ff00ff01, + 0xff000001ff0100ff, 0xff00000100000000, 0xff0000010001ff00, 0xff00000101ff0100, + 0xff0000010100ff00, 0xff0001ff00ff00ff, 0xff0001ff00000101, 0xff0001ff000100ff, + 0xff0001ff01000000, 0xff000100ff0001ff, 0xff0001000000ff01, 0xff00010000000000, + 0xff00010000010001, 0xff00010000010100, 0xff00010001ffff00, 0xff00010001ff0101, + 0xff00010001010000, 0xff000101ffffffff, 0xff000101ff000101, 0xff00010101ff00ff, + 0xff00010101000001, 0xff000101010100ff, 0xff01ffffff000101, 0xff01ffffff01ffff, + 0xff01ffffff01ff01, 0xff01ffffff0101ff, 0xff01ffff00000000, 0xff01ffff01ff0001, + 0xff01ffff0101ff01, 0xff01ff00ff000000, 0xff01ff0000ff0100, 0xff01ff000000ff01, + 0xff01ff0000010000, 0xff01ff00010000ff, 0xff01ff01ff01ff00, 0xff01ff0100000101, + 0xff0100ffffff0000, 0xff0100ffff010000, 0xff0100ff01ff00ff, 0xff0100ff01000100, + 0xff0100ff010100ff, 0xff010000ffffff01, 0xff01000000000000, 0xff0100000101ff00, + 0xff010001ffff00ff, 0xff010001ff000100, 0xff01000100ffff00, 0xff01000100010001, + 0xff01000101ff0001, 0xff010001010001ff, 0xff0101ffffffffff, 0xff0101ffff01ffff, + 0xff0101ffff010101, 0xff0101ff0000ff00, 0xff0101ff01010001, 0xff010100ff000000, + 0xff010100ff01ff01, 0xff01010000ff0001, 0xff01010000000100, 0xff01010001000000, + 0xff0101010100ffff, 0x00ffffff0000ff01, 0x00ffffff000000ff, 0x00ffffff00000100, + 0x00ffffff00010000, 0x00ffff00ffff0001, 0x00ffff00ff0000ff, 0x00ffff00ff000100, + 0x00ffff0000000000, 0x00ffff0001000100, 0x00ffff0001010001, 0x00ffff01ff00ff01, + 0x00ffff0100ff0100, 0x00ffff010000ff00, 0x00ffff01000100ff, 0x00ffff0101ff00ff, + 0x00ffff010101ff00, 0x00ff00ffffffffff, 0x00ff00ffffff01ff, 0x00ff00ffff000101, + 0x00ff00ff00000000, 0x00ff00ff000101ff, 0x00ff00ff01010101, 0x00ff0000ff000000, + 0x00ff0000ff01ffff, 0x00ff000000ff0000, 0x00ff00000000ff00, 0x00ff0000000000ff, + 0x00ff000000000000, 0x00ff000000000001, 0x00ff000000000100, 0x00ff000000010000, + 0x00ff000001ffff01, 0x00ff000001000000, 0x00ff0001ff000101, 0x00ff000100ffffff, + 0x00ff000100000000, 0x00ff0001010001ff, 0x00ff01ffff000000, 0x00ff01ff0001ff00, + 0x00ff01ff01ff0100, 0x00ff0100ff01ff01, 0x00ff010000ff00ff, 0x00ff010000ff0101, + 0x00ff010000000000, 0x00ff010000010101, 0x00ff01000100ff00, 0x00ff010001010000, + 0x00ff0101ffffff00, 0x00ff01010000ff01, 0x00ff010100000100, 0x00ff010101ff0000, + 0x0000ffffffff0100, 0x0000ffffff00ff00, 0x0000ffffff0000ff, 0x0000ffffff010000, + 0x0000ffff00000000, 0x0000ffff00010101, 0x0000ffff01ffff01, 0x0000ffff01000100, + 0x0000ff00ff000000, 0x0000ff00ff01ff00, 0x0000ff00ff0101ff, 0x0000ff0000ff0000, + 0x0000ff000000ff00, 0x0000ff00000000ff, 0x0000ff0000000000, 0x0000ff0000000001, + 0x0000ff0000000100, 0x0000ff0000010000, 0x0000ff0001ffffff, 0x0000ff0001ff01ff, + 0x0000ff0001000000, 0x0000ff000101ffff, 0x0000ff01ffff0101, 0x0000ff01ff010000, + 0x0000ff0100000000, 0x0000ff0101000101, 0x000000ffffff0001, 0x000000ffff000000, + 0x000000ff00ff0000, 0x000000ff0000ff00, 0x000000ff000000ff, 0x000000ff00000000, + 0x000000ff00000001, 0x000000ff00000100, 0x000000ff00010000, 0x000000ff01000000, + 0x000000ff0101ff00, 0x00000000ffff0000, 0x00000000ff00ff00, 0x00000000ff0000ff, + 0x00000000ff000000, 0x00000000ff000001, 0x00000000ff000100, 0x00000000ff010000, + 0x0000000000ffff00, 0x0000000000ff00ff, 0x0000000000ff0000, 0x0000000000ff0001, + 0x0000000000ff0100, 0x000000000000ffff, 0x000000000000ff00, 0x000000000000ff01, + 0x00000000000000ff, 0x0000000000000001, 0x00000000000001ff, 0x0000000000000100, + 0x0000000000000101, 0x000000000001ff00, 0x00000000000100ff, 0x0000000000010000, + 0x0000000000010001, 0x0000000000010100, 0x0000000001ff0000, 0x000000000100ff00, + 0x00000000010000ff, 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, + 0x0000000001010000, 0x00000001ffff01ff, 0x00000001ff000000, 0x0000000100ff0000, + 0x000000010000ff00, 0x00000001000000ff, 0x0000000100000000, 0x0000000100000001, + 0x0000000100000100, 0x0000000100010000, 0x0000000101000000, 0x000001ffff00ff00, + 0x000001ffff010001, 0x000001ffff0101ff, 0x000001ff00ffff01, 0x000001ff0000ffff, + 0x000001ff00000000, 0x000001ff010000ff, 0x000001ff01010100, 0x00000100ffff0100, + 0x00000100ff000000, 0x0000010000ff0000, 0x000001000000ff00, 0x00000100000000ff, + 0x0000010000000000, 0x0000010000000001, 0x0000010000000100, 0x0000010000010000, + 0x0000010001000000, 0x000001000101ff01, 0x00000101ffff0001, 0x00000101ff01ffff, + 0x0000010100000000, 0x0000010101010100, 0x0001ffffff000000, 0x0001ffff00ffffff, + 0x0001ffff00000100, 0x0001ffff0001ff00, 0x0001ffff01000000, 0x0001ff00ffffff00, + 0x0001ff00ffff01ff, 0x0001ff00ff010000, 0x0001ff0000000000, 0x0001ff0000010001, + 0x0001ff0001ff0000, 0x0001ff0001010100, 0x0001ff01ff0000ff, 0x0001ff01ff000001, + 0x0001ff0100ffffff, 0x0001ff010001ffff, 0x0001ff01000101ff, 0x0001ff010100ff01, + 0x000100ffff00ffff, 0x000100ffff00ff01, 0x000100ffff000100, 0x000100ff00000000, + 0x000100ff000101ff, 0x000100ff01ff0101, 0x000100ff0100ffff, 0x000100ff01010101, + 0x00010000ff000000, 0x00010000ff010100, 0x0001000000ff0000, 0x000100000000ff00, + 0x00010000000000ff, 0x0001000000000000, 0x0001000000000001, 0x0001000000000100, + 0x0001000000010000, 0x0001000001ffff01, 0x0001000001000000, 0x0001000100ff0101, + 0x0001000100000000, 0x00010001010100ff, 0x000101ffffff01ff, 0x000101ffffff0101, + 0x000101ff00010000, 0x000101ff01ff0000, 0x000101ff0100ff01, 0x00010100ffff0000, + 0x0001010000000000, 0x000101000001ffff, 0x0001010000010101, 0x00010100010001ff, + 0x00010101ff00ff00, 0x00010101ff010001, 0x0001010100ffffff, 0x0001010100ff01ff, + 0x00010101000101ff, 0x0001010101ff0000, 0x000101010100ff01, 0x0001010101000101, + 0x01ffffffffff0101, 0x01ffffffff01ffff, 0x01ffffffff01ff01, 0x01ffffffff0101ff, + 0x01ffffffff010101, 0x01ffffff00000000, 0x01ffffff01ff01ff, 0x01ffffff01000101, + 0x01ffffff0101ff01, 0x01ffffff010100ff, 0x01ffff000000ff00, 0x01ffff0000000001, + 0x01ffff00000001ff, 0x01ffff0000010000, 0x01ffff0001ff0000, 0x01ffff01ffffffff, + 0x01ffff01ffff01ff, 0x01ffff01ff000000, 0x01ffff01ff01ffff, 0x01ffff01ff0101ff, + 0x01ffff010100ffff, 0x01ff00ffffff0000, 0x01ff00ffff010000, 0x01ff00ff00ffff01, + 0x01ff0000ff0000ff, 0x01ff000000000000, 0x01ff00000001ff01, 0x01ff000001ffffff, + 0x01ff000001010100, 0x01ff0001ffffff01, 0x01ff0001ff010001, 0x01ff000101ff0100, + 0x01ff000101000001, 0x01ff0001010100ff, 0x01ff01ffff00ffff, 0x01ff01ff00010001, + 0x01ff01ff01000000, 0x01ff01ff010101ff, 0x01ff0100ff000001, 0x01ff010000ffff00, + 0x01ff010000000100, 0x01ff010001ff01ff, 0x01ff01000101ffff, 0x01ff0101ffff00ff, + 0x01ff0101ffff0101, 0x01ff0101ff0101ff, 0x01ff010100010000, 0x0100ffff00ff00ff, + 0x0100ffff00ff0001, 0x0100ffff00000100, 0x0100ffff0100ff00, 0x0100ff00ffff0000, + 0x0100ff00ff00ffff, 0x0100ff00ff00ff01, 0x0100ff00ff000100, 0x0100ff00ff010000, + 0x0100ff0000000000, 0x0100ff00000100ff, 0x0100ff0001ff0101, 0x0100ff0001010101, + 0x0100ff0100ff00ff, 0x0100ff0100ff0001, 0x0100ff0100000100, 0x0100ff0100010001, + 0x0100ff0101000000, 0x010000ffff00ff00, 0x010000ff0000ffff, 0x010000ff00000000, + 0x010000ff010001ff, 0x010000ff01010001, 0x01000000ffffff00, 0x01000000ffff0101, + 0x01000000ff000000, 0x01000000ff0100ff, 0x01000000ff010101, 0x0100000000ff0000, + 0x010000000000ff00, 0x01000000000000ff, 0x0100000000000000, 0x0100000000000001, + 0x0100000000000100, 0x0100000000010000, 0x0100000001000000, 0x0100000100000000, + 0x01000001000101ff, 0x0100000101ffff01, 0x010001ffff000101, 0x010001ff00ff0100, + 0x010001ff0000ff00, 0x010001ff000100ff, 0x010001ff01ffffff, 0x01000100ffff0000, + 0x01000100ff0001ff, 0x0100010000000000, 0x010001000001ff00, 0x0100010001ff0000, + 0x01000100010000ff, 0x0100010001000101, 0x01000101ff00ff01, 0x0100010100ff0100, + 0x010001010000ffff, 0x0100010101010001, 0x0101ffffffff0101, 0x0101ffffff0001ff, + 0x0101ffffff01ffff, 0x0101ffffff010101, 0x0101ffff00000000, 0x0101ffff0101ffff, + 0x0101ffff010101ff, 0x0101ff00ff000000, 0x0101ff0000ff0100, 0x0101ff000000ff00, + 0x0101ff0000010000, 0x0101ff00010000ff, 0x0101ff0001000001, 0x0101ff01ff010101, + 0x0101ff0100000000, 0x0101ff010101ff00, 0x010100ffffff0000, 0x010100ffff010000, + 0x010100ff00ff01ff, 0x010100ff000000ff, 0x010100ff00000101, 0x010100ff01ffff00, + 0x01010000ffffff01, 0x01010000ff000100, 0x01010000ff01ff01, 0x0101000000000000, + 0x01010000000100ff, 0x010100000101ff01, 0x01010001ffff0000, 0x01010001ff00ffff, + 0x01010001ff010000, 0x0101000101ffffff, 0x0101000101ff01ff, 0x0101000101010101, + 0x010101ffff01ffff, 0x010101ff00000000, 0x010101ff0001ff01, 0x010101ff0101ffff, + 0x010101ff010101ff, 0x01010100ffffffff, 0x01010100ff000001, 0x010101000000ff00, + 0x0101010001010000, 0x0101010100ff0001, 0x010101010001ff01, 0x010101010101ffff, + +}; + static const uint8_t ksigns_iq2xs[128] = { 0, 129, 130, 3, 132, 5, 6, 135, 136, 9, 10, 139, 12, 141, 142, 15, 144, 17, 18, 147, 20, 149, 150, 23, 24, 153, 154, 27, 156, 29, 30, 159, @@ -3578,6 +3711,49 @@ void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y } } +// ====================== 1.5625 bpw (de)-quantization + +void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + float db[4]; + uint16_t idx[4]; + //const int8_t * grid[4]; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * sc = x[i].scales; + const uint8_t * qs = x[i].qs; + + for (int i8 = 0; i8 < QK_K/8; i8 += 4) { + idx[0] = qs[0] | ((sc[0] & 0x08) << 5); + idx[1] = qs[1] | ((sc[0] & 0x80) << 1); + idx[2] = qs[2] | ((sc[1] & 0x08) << 5); + idx[3] = qs[3] | ((sc[1] & 0x80) << 1); + //grid[0] = (const int8_t *)(iq1s_grid + (qs[0] | ((sc[0] & 0x08) << 5))); + //grid[1] = (const int8_t *)(iq1s_grid + (qs[1] | ((sc[0] & 0x80) << 1))); + //grid[2] = (const int8_t *)(iq1s_grid + (qs[2] | ((sc[1] & 0x08) << 5))); + //grid[3] = (const int8_t *)(iq1s_grid + (qs[3] | ((sc[1] & 0x80) << 1))); + db[0] = d * (2*(sc[0] & 7) + 1); + db[1] = d * (2*((sc[0] >> 4) & 7) + 1); + db[2] = d * (2*(sc[1] & 7) + 1); + db[3] = d * (2*((sc[1] >> 4) & 7) + 1); + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + idx[l]); + for (int j = 0; j < 8; ++j) { + //y[j] = db[l] * grid[l][j]; + y[j] = db[l] * grid[j]; + } + y += 8; + } + qs += 4; + sc += 2; + } + } +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -3679,7 +3855,7 @@ static inline __m128i get_scale_shuffle(int i) { } #endif -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -3690,8 +3866,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q4_0 * restrict x = vx; @@ -4046,7 +4222,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4057,8 +4233,8 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q4_1 * restrict x = vx; @@ -4264,7 +4440,7 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -4272,8 +4448,8 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_0); assert(nrc == 1); UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q5_0 * restrict x = vx; @@ -4555,7 +4731,7 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4563,8 +4739,8 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_1); assert(nrc == 1); UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q5_1 * restrict x = vx; @@ -4859,7 +5035,7 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -4870,8 +5046,8 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bx); - UNUSED(by); + UNUSED(bbx); + UNUSED(bby); UNUSED(bs); const block_q8_0 * restrict x = vx; @@ -9107,6 +9283,178 @@ void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void #endif } +#ifdef __AVX2__ +static inline __m256i mul_add_epi8(const __m256i x, const __m256i y) { + const __m256i ax = _mm256_sign_epi8(x, x); + const __m256i sy = _mm256_sign_epi8(y, x); + return _mm256_maddubs_epi16(ax, sy); +} +#endif + +void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq1_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined __ARM_NEON + + const uint8x16_t m8 = vdupq_n_u8(0x08); + const uint8x16_t m7 = vdupq_n_u8(0x07); + const uint8x16_t m1 = vdupq_n_u8(0x01); + const int32x4_t vzero = vdupq_n_s32(0); + + uint16_t gindex[8]; + uint16x8x2_t vindex; + int8x16x4_t q1b; + int8x16x4_t q8b; + uint16x8x4_t scales; + int32x4x2_t sumi; + int32x4x2_t dotq; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * sc = x[i].scales; + + sumi.val[0] = sumi.val[1] = vzero; + + for (int i128 = 0; i128 < QK_K/128; ++i128) { + const uint8x16_t ql = vld1q_u8(qs); qs += 16; + const uint8x8_t tm1 = vld1_u8 (sc); sc += 8; + const uint8x8_t tm2 = vshr_n_u8(tm1, 4); + const uint8x16_t qh = vcombine_u8(vzip1_u8(tm1, tm2), vzip2_u8(tm1, tm2)); + const uint8x16_t hbit = vandq_u8(qh, m8); + vindex.val[0] = vorrq_u16(vmovl_u8(vget_low_u8 (ql)), vshlq_n_u16(vmovl_u8(vget_low_u8 (hbit)), 5)); + vindex.val[1] = vorrq_u16(vmovl_u8(vget_high_u8(ql)), vshlq_n_u16(vmovl_u8(vget_high_u8(hbit)), 5)); + const uint8x16_t scales8 = vorrq_u8(vshlq_n_u8(vandq_u8(qh, m7), 1), m1); + scales.val[0] = vmovl_u8(vget_low_u8 (scales8)); + scales.val[1] = vmovl_u8(vget_high_u8 (scales8)); + + for (int l = 0; l < 2; ++l) { + vst1q_u16(gindex+0, vindex.val[l]); + q1b.val[0] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[0])), vld1_s8((const void *)(iq1s_grid+gindex[1]))); + q1b.val[1] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[2])), vld1_s8((const void *)(iq1s_grid+gindex[3]))); + q1b.val[2] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[4])), vld1_s8((const void *)(iq1s_grid+gindex[5]))); + q1b.val[3] = vcombine_s8(vld1_s8((const void *)(iq1s_grid+gindex[6])), vld1_s8((const void *)(iq1s_grid+gindex[7]))); + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + + dotq.val[0] = vpaddq_s32(ggml_vdotq_s32(vzero, q1b.val[0], q8b.val[0]), ggml_vdotq_s32(vzero, q1b.val[1], q8b.val[1])); + dotq.val[1] = vpaddq_s32(ggml_vdotq_s32(vzero, q1b.val[2], q8b.val[2]), ggml_vdotq_s32(vzero, q1b.val[3], q8b.val[3])); + + sumi.val[0] = vmlaq_s32(sumi.val[0], dotq.val[0], vreinterpretq_s32_u32(vmovl_u16(vget_low_u16 (scales.val[l])))); + sumi.val[1] = vmlaq_s32(sumi.val[1], dotq.val[1], vreinterpretq_s32_u32(vmovl_u16(vget_high_u16(scales.val[l])))); + } + } + + sumf += y[i].d * GGML_FP16_TO_FP32(x[i].d) * vaddvq_s32(vaddq_s32(sumi.val[0], sumi.val[1])); + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m128i m8 = _mm_set1_epi8(0x08); + const __m128i m7 = _mm_set1_epi8(0x07); + const __m128i m1 = _mm_set1_epi8(0x01); + const __m128i shuffle_h = _mm_set_epi8(15, 7, 14, 6, 13, 5, 12, 4, 11, 3, 10, 2, 9, 1, 8, 0); + const __m128i shuffle_s[4] = { + _mm_set_epi32(0x03030303, 0x02020202, 0x01010101, 0x00000000), + _mm_set_epi32(0x07070707, 0x06060606, 0x05050505, 0x04040404), + _mm_set_epi32(0x0b0b0b0b, 0x0a0a0a0a, 0x09090909, 0x08080808), + _mm_set_epi32(0x0f0f0f0f, 0x0e0e0e0e, 0x0d0d0d0d, 0x0c0c0c0c) + }; + + uint64_t aux64; + + __m256i v_gindex; + const uint16_t * gindex = (const uint16_t *)&v_gindex; + + __m256 accum = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * sc = x[i].scales; + + __m256i sumi = _mm256_setzero_si256(); + for (int i128 = 0; i128 < QK_K/128; ++i128) { + const __m128i ql = _mm_loadu_si128((const __m128i*)qs); qs += 16; + memcpy(&aux64, sc, 8); sc += 8; + const __m128i qh = _mm_shuffle_epi8(_mm_set_epi64x(aux64 >> 4, aux64), shuffle_h); + const __m256i hbit = _mm256_cvtepu8_epi16(_mm_and_si128(qh, m8)); + v_gindex = _mm256_or_si256(_mm256_cvtepu8_epi16(ql), _mm256_slli_epi16(hbit, 5)); + const __m128i scales = _mm_or_si128(_mm_slli_epi16(_mm_and_si128(qh, m7), 1), m1); + + for (int i32 = 0; i32 < 4; ++i32) { + const __m256i q8b = _mm256_loadu_si256((const __m256i*)q8); q8 += 32; + const __m256i q1b = _mm256_set_epi64x(iq1s_grid[gindex[4*i32+3]], iq1s_grid[gindex[4*i32+2]], + iq1s_grid[gindex[4*i32+1]], iq1s_grid[gindex[4*i32+0]]); + const __m256i dot = mul_add_epi8(q1b, q8b); + const __m256i s16 = _mm256_cvtepi8_epi16(_mm_shuffle_epi8(scales, shuffle_s[i32])); + const __m256i p = _mm256_madd_epi16(s16, dot); + sumi = _mm256_add_epi32(sumi, p); + } + + } + + accum = _mm256_fmadd_ps(_mm256_set1_ps(y[i].d * GGML_FP16_TO_FP32(x[i].d)), _mm256_cvtepi32_ps(sumi), accum); + + } + + *s = hsum_float_8(accum); + +#else + + int db[4]; + uint16_t idx[4]; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + + const int8_t * q8 = y[i].qs; + const uint8_t * qs = x[i].qs; + const uint8_t * sc = x[i].scales; + + int sumi = 0; + for (int i32 = 0; i32 < QK_K/32; ++i32) { + idx[0] = qs[0] | ((sc[0] & 0x08) << 5); + idx[1] = qs[1] | ((sc[0] & 0x80) << 1); + idx[2] = qs[2] | ((sc[1] & 0x08) << 5); + idx[3] = qs[3] | ((sc[1] & 0x80) << 1); + db[0] = (2*(sc[0] & 7) + 1); + db[1] = (2*((sc[0] >> 4) & 7) + 1); + db[2] = (2*(sc[1] & 7) + 1); + db[3] = (2*((sc[1] >> 4) & 7) + 1); + for (int l = 0; l < 4; ++l) { + const int8_t * grid = (const int8_t *)(iq1s_grid + idx[l]); + int suml = 0; + for (int j = 0; j < 8; ++j) suml += q8[j] * grid[j]; + sumi += db[l] * suml; + q8 += 8; + } + qs += 4; + sc += 2; + } + + sumf += GGML_FP16_TO_FP32(x[i].d) * y[i].d * sumi; + } + + *s = sumf; + +#endif + +} + // ================================ IQ2 quantization ============================================= typedef struct { @@ -9115,14 +9463,22 @@ typedef struct { uint16_t * neighbours; } iq2_entry_t; -static iq2_entry_t iq2_data[2] = { +static iq2_entry_t iq2_data[3] = { + {NULL, NULL, NULL}, {NULL, NULL, NULL}, {NULL, NULL, NULL}, }; -static inline int iq2_data_index(int grid_size) { - GGML_ASSERT(grid_size == 256 || grid_size == 512); - return grid_size == 256 ? 0 : 1; +static inline int iq2_data_index(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + return type == GGML_TYPE_IQ2_XXS ? 0 : + type == GGML_TYPE_IQ2_XS ? 1 : 2; +} + +static inline int iq2_grid_size(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + return type == GGML_TYPE_IQ2_XXS ? 256 : + type == GGML_TYPE_IQ2_XS ? 512 : 512; } static int iq2_compare_func(const void * left, const void * right) { @@ -9131,12 +9487,13 @@ static int iq2_compare_func(const void * left, const void * right) { return l[0] < r[0] ? -1 : l[0] > r[0] ? 1 : l[1] < r[1] ? -1 : l[1] > r[1] ? 1 : 0; } -void iq2xs_init_impl(int grid_size) { - const int gindex = iq2_data_index(grid_size); +void iq2xs_init_impl(enum ggml_type type) { + const int gindex = iq2_data_index(type); + const int grid_size = iq2_grid_size(type); if (iq2_data[gindex].grid) { return; } - static const uint16_t kgrid_256[256] = { + static const uint16_t kgrid_2bit_256[256] = { 0, 2, 5, 8, 10, 17, 20, 32, 34, 40, 42, 65, 68, 80, 88, 97, 100, 128, 130, 138, 162, 257, 260, 272, 277, 320, 388, 408, 512, 514, 546, 642, 1025, 1028, 1040, 1057, 1060, 1088, 1090, 1096, 1120, 1153, 1156, 1168, 1188, 1280, 1282, 1288, @@ -9154,7 +9511,7 @@ void iq2xs_init_impl(int grid_size) { 33888, 34048, 34118, 34196, 34313, 34368, 34400, 34818, 35076, 35345, 36868, 36880, 36900, 36928, 37025, 37142, 37248, 37445, 37888, 37922, 37956, 38225, 39041, 39200, 40962, 41040, 41093, 41225, 41472, 42008, 43088, 43268, }; - static const uint16_t kgrid_512[512] = { + static const uint16_t kgrid_2bit_512[512] = { 0, 2, 5, 8, 10, 17, 20, 22, 25, 32, 34, 37, 40, 65, 68, 70, 73, 80, 82, 85, 88, 97, 100, 128, 130, 133, 136, 145, 148, 153, 160, 257, 260, 262, 265, 272, 274, 277, 280, 282, 289, 292, 320, 322, 325, 328, 337, 340, @@ -9188,9 +9545,45 @@ void iq2xs_init_impl(int grid_size) { 40962, 40968, 40970, 40992, 41002, 41120, 41297, 41305, 41382, 41472, 41474, 41480, 41514, 41600, 41632, 42048, 42133, 42597, 42648, 43018, 43040, 43042, 43048, 43168, 43176, 43268, 43396, 43398, 43560, 43562, 43665, 43690, }; + static const uint16_t kgrid_1bit_512[512] = { + 10, 33, 41, 85, 132, 134, 160, 162, 277, 337, 340, 345, 357, 405, 516, 545, + 553, 598, 641, 650, 681, 1042, 1044, 1097, 1169, 1176, 1320, 1345, 1365, 1378, 1434, 1444, + 1545, 1617, 1642, 1685, 2053, 2080, 2089, 2133, 2176, 2182, 2208, 2214, 2306, 2384, 2393, 2440, + 2453, 2581, 2664, 2690, 2721, 4117, 4161, 4182, 4184, 4261, 4357, 4369, 4372, 4377, 4390, 4422, + 4432, 4437, 4449, 4457, 4485, 4497, 4505, 4629, 4677, 4696, 4774, 5205, 5217, 5225, 5386, 5397, + 5409, 5445, 5457, 5460, 5461, 5462, 5465, 5472, 5477, 5525, 5545, 5650, 5668, 5717, 5729, 5769, + 5777, 6212, 6234, 6244, 6293, 6424, 6482, 6485, 6502, 6505, 6529, 6538, 6565, 6656, 6682, 6788, + 6806, 6820, 8218, 8224, 8226, 8232, 8277, 8326, 8354, 8469, 8521, 8530, 8549, 8596, 8737, 8794, + 9221, 9253, 9348, 9369, 9380, 9474, 9557, 9633, 9732, 9753, 9793, 9830, 9862, 9880, 10240, 10272, + 10282, 10321, 10406, 10517, 10530, 10566, 10585, 10645, 10896, 16466, 16468, 16473, 16485, 16646, 16660, 16665, + 16725, 16793, 16806, 16914, 16969, 16977, 16996, 17028, 17057, 17408, 17416, 17434, 17493, 17512, 17578, 17685, + 17696, 17733, 17745, 17748, 17749, 17750, 17753, 17765, 17794, 17813, 17946, 17984, 18005, 18072, 18453, 18529, + 18569, 18722, 18756, 18762, 18773, 18794, 18833, 18853, 18945, 19026, 19033, 19077, 20489, 20497, 20500, 20517, + 20565, 20586, 20610, 20633, 20757, 20769, 20776, 20805, 20817, 20820, 20821, 20822, 20825, 20837, 20864, 20872, + 20885, 20896, 21002, 21029, 21077, 21146, 21510, 21525, 21573, 21585, 21588, 21589, 21590, 21593, 21605, 21653, + 21665, 21765, 21777, 21780, 21781, 21782, 21785, 21797, 21825, 21828, 21829, 21830, 21833, 21840, 21841, 21842, + 21844, 21846, 21848, 21849, 21850, 21857, 21860, 21861, 21862, 21865, 21893, 21905, 21908, 21909, 21910, 21913, + 21925, 22024, 22037, 22085, 22097, 22100, 22101, 22102, 22105, 22117, 22165, 22545, 22566, 22568, 22594, 22608, + 22613, 22676, 22697, 22793, 22805, 22853, 22865, 22868, 22869, 22870, 22873, 22885, 22933, 22946, 23046, 23072, + 23125, 23209, 24597, 24640, 24665, 24673, 24725, 24833, 24840, 24869, 24917, 24934, 24965, 25001, 25108, 25110, + 25152, 25184, 25192, 25234, 25616, 25618, 25625, 25685, 25704, 25738, 25744, 25770, 25877, 25897, 25925, 25937, + 25940, 25941, 25942, 25945, 25957, 25986, 26005, 26186, 26197, 26276, 26632, 26634, 26725, 26757, 26770, 26885, + 26965, 26976, 26986, 27032, 27153, 27174, 27200, 27208, 27240, 27269, 27282, 27290, 32778, 32800, 32802, 32808, + 32810, 32853, 32904, 32922, 32930, 32932, 33105, 33110, 33112, 33125, 33157, 33280, 33288, 33301, 33312, 33320, + 33424, 33797, 33829, 33858, 34068, 34133, 34146, 34176, 34217, 34306, 34342, 34441, 34454, 34468, 34832, 34918, + 34965, 34984, 35094, 35137, 35161, 35208, 35232, 35332, 35338, 35368, 35429, 36932, 36934, 36953, 37009, 37125, + 37136, 37138, 37145, 37157, 37205, 37220, 37258, 37290, 37444, 37446, 37465, 37478, 37525, 37905, 37968, 37973, + 38040, 38054, 38145, 38154, 38165, 38180, 38186, 38213, 38225, 38228, 38229, 38230, 38233, 38245, 38293, 38485, + 38504, 38530, 38938, 38985, 38993, 39012, 39040, 39173, 39192, 39253, 39265, 39301, 39316, 39322, 39442, 39497, + 39504, 39590, 40970, 40984, 40992, 41002, 41045, 41120, 41128, 41237, 41289, 41297, 41317, 41364, 41366, 41514, + 41557, 41633, 41989, 42021, 42056, 42068, 42074, 42113, 42242, 42265, 42274, 42325, 42340, 42402, 42501, 42512, + 42533, 42624, 42632, 42666, 43040, 43093, 43106, 43168, 43176, 43264, 43286, 43345, 43429, 43590, 43618, 43680, + }; + const int kmap_size = 43692; - const int nwant = 2; - const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; + const int nwant = type == GGML_TYPE_IQ1_S ? 3 : 2; + const uint16_t * kgrid = type == GGML_TYPE_IQ2_XXS ? kgrid_2bit_256 : + type == GGML_TYPE_IQ2_XS ? kgrid_2bit_512 : kgrid_1bit_512; uint64_t * kgrid_q2xs; int * kmap_q2xs; uint16_t * kneighbors_q2xs; @@ -9286,9 +9679,9 @@ void iq2xs_init_impl(int grid_size) { free(dist2); } -void iq2xs_free_impl(int grid_size) { - GGML_ASSERT(grid_size == 256 || grid_size == 512 || grid_size == 1024); - const int gindex = iq2_data_index(grid_size); +void iq2xs_free_impl(enum ggml_type type) { + GGML_ASSERT(type == GGML_TYPE_IQ2_XXS || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ1_S); + const int gindex = iq2_data_index(type); if (iq2_data[gindex].grid) { free(iq2_data[gindex].grid); iq2_data[gindex].grid = NULL; free(iq2_data[gindex].map); iq2_data[gindex].map = NULL; @@ -9322,7 +9715,7 @@ static int iq2_find_best_neighbour(const uint16_t * restrict neighbours, const u static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { - const int gindex = iq2_data_index(256); + const int gindex = iq2_data_index(GGML_TYPE_IQ2_XXS); const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; const int * kmap_q2xs = iq2_data[gindex].map; @@ -9495,7 +9888,7 @@ static void quantize_row_iq2_xxs_impl(const float * restrict x, void * restrict static void quantize_row_iq2_xs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { - const int gindex = iq2_data_index(512); + const int gindex = iq2_data_index(GGML_TYPE_IQ2_XS); const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; const int * kmap_q2xs = iq2_data[gindex].map; @@ -10132,3 +10525,207 @@ void quantize_row_iq3_xxs_reference(const float * restrict x, block_iq3_xxs * re assert(k % QK_K == 0); quantize_row_iq3_xxs_impl(x, y, k, NULL); } + +// =================================== 1.5 bpw =================================================== + +static int iq1_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, + const float * restrict xval, const float * restrict weight, float * scale, int8_t * restrict L, int ngrid) { + int num_neighbors = neighbours[0]; + GGML_ASSERT(num_neighbors > 0); + float best_score = 0; + int grid_index = -1; + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = (pg[i] - 3)/2; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + if (sumqx > 0 && sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + *scale = sumqx/sumq2; best_score = *scale * sumqx; + grid_index = neighbours[j]; + } + } + if (grid_index < 0) { + for (int i = 0; i < ngrid; ++i) { + const int8_t * grid_i = (const int8_t *)(grid + i); + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < 8; ++j) { + float w = weight[j]; + float q = (grid_i[j] - 3)/2; + sumqx += w*q*xval[j]; + sumq2 += w*q*q; + } + if (sumqx > 0 && sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + *scale = sumqx/sumq2; best_score = *scale*sumqx; + grid_index = i; + } + } + } + if (grid_index < 0) { + printf("Oops, did not find grid point\n"); + printf("Have %d neighbours\n", num_neighbors); + for (int j = 1; j <= num_neighbors; ++j) { + const int8_t * pg = (const int8_t *)(grid + neighbours[j]); + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < 8; ++i) { + float q = (pg[i] - 3)/2; + float w = weight[i]; + sumqx += w*q*xval[i]; + sumq2 += w*q*q; + } + printf(" neighbour %d: sumqx = %g sumq2 = %g\n", j, (double)sumqx, (double)sumq2); + } + } + GGML_ASSERT(grid_index >= 0); + //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + *scale *= 1.05f; // This is a fudge factor. Don't ask me why it improves the result. + //!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + const int8_t * pg = (const int8_t *)(grid + grid_index); + for (int i = 0; i < 8; ++i) L[i] = (pg[i] - 1)/2; + return grid_index; +} + +static int iq1_sort_helper(const void * left, const void * right) { + const float * l = left; + const float * r = right; + return *l < *r ? -1 : *l > *r ? 1 : 0; +} + +static void quantize_row_iq1_s_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { + + const int gindex = iq2_data_index(GGML_TYPE_IQ1_S); + + const uint64_t * kgrid_q2xs = iq2_data[gindex].grid; + const int * kmap_q2xs = iq2_data[gindex].map; + const uint16_t * kneighbors_q2xs = iq2_data[gindex].neighbours; + + GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q2xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int nbl = n/256; + + block_iq1_s * y = vy; + + float scales[QK_K/8]; + float weight[8]; + int8_t L[8]; + float sumx[9]; + float sumw[9]; + float pairs[16]; + int * idx = (int *)(pairs + 1); + uint8_t hbit[QK_K/8]; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + y[ibl].d = GGML_FP32_TO_FP16(0.f); + memset(y[ibl].qs, 0, QK_K/8); + memset(y[ibl].scales, 0, QK_K/16); + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = sumx2/QK_K; + + for (int ib = 0; ib < QK_K/8; ++ib) { + const float * xb = xbl + 8*ib; + const float * qw = quant_weights + QK_K*ibl + 8*ib; + for (int i = 0; i < 8; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + float max = fabsf(xb[0]); + for (int i = 1; i < 8; ++i) max = MAX(max, fabsf(xb[i])); + if (!max) { + scales[ib] = 0; + memset(L, 1, 8); + continue; + } + // Here we solve exactly the sum of squared difference (SSD) weighted minimization problem. + // With just 3 allowed quant values (-1, 0, 1), we can search exhaustively for the two + // boundaries that split the weights xb[i] into 3 groups. To do so, we sort the weights + // in ascending order, compute Si = sum[weight[j] xb[j], j = 0...i] and + // Wi = sum[weight[j], j = 0...i], and use these to quckly get get the optimum scale + // for each possible and score for each split. + for (int j = 0; j < 8; ++j) { + pairs[2*j] = xb[j]; + idx[2*j] = j; + } + qsort(pairs, 8, 2*sizeof(float), iq1_sort_helper); + { + sumx[0] = sumw[0] = 0; + for (int j = 0; j < 8; ++j) { + int i = idx[2*j]; + sumx[j+1] = sumx[j] + weight[i]*xb[i]; + sumw[j+1] = sumw[j] + weight[i]; + } + } + float best_score = 0, scale = max; + int besti1 = 0, besti2 = 0; + for (int i1 = 0; i1 <= 8; ++i1) { + for (int i2 = i1; i2 <= 8; ++i2) { + float sumqx = -(sumx[i1] - sumx[0]) + (sumx[8] - sumx[i2]); + float sumq2 = (sumw[i1] - sumw[0]) + (sumw[8] - sumw[i2]); + if (sumq2 > 0 && sumqx*sumqx > best_score*sumq2) { + scale = sumqx/sumq2; best_score = scale*sumqx; + besti1 = i1; besti2 = i2; + } + } + } + for (int j = 0; j < besti1; ++j) L[idx[2*j]] = 0; + for (int j = besti1; j < besti2; ++j) L[idx[2*j]] = 1; + for (int j = besti2; j < 8; ++j) L[idx[2*j]] = 2; + if (scale < 0) { + for (int j = 0; j < 8; ++j) L[j] = 2 - L[j]; + scale = -scale; + } + // Now we check if the solution found above corresponds to a grid point and, if not, use a neighbouring + // grid point that minimizes SSD. + uint16_t u = 0; + for (int j = 0; j < 8; ++j) u |= (L[j] << 2*j); + int grid_index = kmap_q2xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q2xs - kmap_q2xs[u] - 1; + grid_index = iq1_find_best_neighbour(neighbours, kgrid_q2xs, xb, weight, &scale, L, NGRID_IQ2XXS); + GGML_ASSERT(grid_index >= 0); + } + y[ibl].qs[ib] = grid_index & 255; + hbit[ib] = grid_index >> 8; + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + memset(y[ibl].qs, 0, QK_K/8); + continue; + } + + float d = max_scale/15; + y[ibl].d = GGML_FP32_TO_FP16(d*1.085f); // 1.085f is another fudge factor. Don't ask me why it is needed. + float id = 1/d; + for (int ib = 0; ib < QK_K/8; ++ib) { + int l = nearest_int(0.5f*(id*scales[ib]-1)); + l = MAX(0, MIN(7, l)); + if (hbit[ib]) l |= 8; + y[ibl].scales[ib/2] |= (l << 4*(ib%2)); + } + } +} + +size_t quantize_iq1_s(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq1_s_impl(src, qrow, n_per_row, quant_weights); + src += n_per_row; + qrow += nblock*sizeof(block_iq1_s); + } + return nrow * nblock * sizeof(block_iq1_s); +} diff --git a/ggml-quants.h b/ggml-quants.h index 68f09b1e1..ad381cfab 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -191,6 +191,13 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK_K/8]; + uint8_t scales[QK_K/16]; +} block_iq1_s; +static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); + #ifdef __cplusplus extern "C" { #endif @@ -243,6 +250,7 @@ void dequantize_row_q8_K(const block_q8_K * GGML_RESTRICT x, float * GGML_RESTRI void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -259,6 +267,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const voi void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -266,6 +275,7 @@ void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); @@ -276,8 +286,8 @@ size_t quantize_q4_1 (const float * src, void * dst, int nrows, int n_per_row, size_t quantize_q5_0 (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q5_1 (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); -void iq2xs_init_impl(int grid_size); -void iq2xs_free_impl(int grid_size); +void iq2xs_init_impl(enum ggml_type type); +void iq2xs_free_impl(enum ggml_type type); void iq3xs_init_impl(int grid_size); void iq3xs_free_impl(int grid_size); diff --git a/ggml.c b/ggml.c index e94024c62..aefcda6d4 100644 --- a/ggml.c +++ b/ggml.c @@ -673,6 +673,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ1_S] = { + .type_name = "iq1_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq1_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq1_s, + .from_float = NULL, + .from_float_reference = NULL, + .vec_dot = ggml_vec_dot_iq1_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2267,6 +2279,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ2_XXS: wtype = GGML_TYPE_IQ2_XXS; break; case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; + case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7677,6 +7690,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7944,6 +7958,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -8064,6 +8079,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: default: { GGML_ASSERT(false); @@ -10830,6 +10846,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -11010,6 +11027,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: default: { GGML_ASSERT(false); @@ -11207,6 +11225,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11880,6 +11899,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11957,6 +11977,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19136,8 +19157,9 @@ void ggml_quantize_init(enum ggml_type type) { ggml_critical_section_start(); switch (type) { - case GGML_TYPE_IQ2_XXS: iq2xs_init_impl(256); break; - case GGML_TYPE_IQ2_XS: iq2xs_init_impl(512); break; + case GGML_TYPE_IQ2_XXS: + case GGML_TYPE_IQ2_XS: + case GGML_TYPE_IQ1_S: iq2xs_init_impl(type); break; case GGML_TYPE_IQ3_XXS: iq3xs_init_impl(256); break; default: // nothing break; @@ -19149,8 +19171,10 @@ void ggml_quantize_init(enum ggml_type type) { void ggml_quantize_free(void) { ggml_critical_section_start(); - iq2xs_free_impl(256); - iq2xs_free_impl(512); + iq2xs_free_impl(GGML_TYPE_IQ2_XXS); + iq2xs_free_impl(GGML_TYPE_IQ2_XS); + iq2xs_free_impl(GGML_TYPE_IQ1_S); + iq3xs_free_impl(256); ggml_critical_section_end(); } @@ -19285,7 +19309,8 @@ size_t ggml_quantize_q8_0(const float * src, void * dst, int n, int k, int64_t * bool ggml_quantize_requires_imatrix(enum ggml_type type) { return type == GGML_TYPE_IQ2_XXS || - type == GGML_TYPE_IQ2_XS; + type == GGML_TYPE_IQ2_XS || + type == GGML_TYPE_IQ1_S; } size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, int start, @@ -19410,6 +19435,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq3_xxs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ1_S: + { + GGML_ASSERT(start % QK_K == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq1_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 6c1956772..004d09c70 100644 --- a/ggml.h +++ b/ggml.h @@ -354,6 +354,7 @@ extern "C" { GGML_TYPE_IQ2_XXS = 16, GGML_TYPE_IQ2_XS = 17, GGML_TYPE_IQ3_XXS = 18, + GGML_TYPE_IQ1_S = 19, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -391,6 +392,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ2_XXS = 15, // except 1d tensors GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 6ac9caa95..5cfebb3b1 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2526,6 +2526,7 @@ struct llama_model_loader { case GGML_TYPE_IQ2_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XXS; break; case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; + case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2875,6 +2876,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_IQ2_XS: return "IQ2_XS - 2.3125 bpw"; case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; default: return "unknown, may not work"; } @@ -10312,20 +10314,20 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; } - else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { new_type = GGML_TYPE_Q5_K; } else if (new_type != GGML_TYPE_Q8_0) { new_type = GGML_TYPE_Q6_K; } } else if (name == "token_embd.weight") { - if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { new_type = GGML_TYPE_Q2_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { new_type = GGML_TYPE_Q4_K; } - } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS) { + } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ2_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ2_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { if (name.find("attn_v.weight") != std::string::npos) { if (qs.model.hparams.n_gqa() >= 4 || qs.model.hparams.n_expert >= 4) new_type = GGML_TYPE_Q4_K; else new_type = GGML_TYPE_Q2_K; @@ -10335,6 +10337,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (qs.i_ffn_down < qs.n_ffn_down/8) new_type = GGML_TYPE_Q2_K; ++qs.i_ffn_down; } + else if (name.find("attn_output.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) new_type = GGML_TYPE_IQ2_XXS; + } } else if (name.find("attn_v.weight") != std::string::npos) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) { new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; @@ -10468,7 +10473,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || - new_type == GGML_TYPE_IQ3_XXS) { + new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { @@ -10483,6 +10488,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ1_S: case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; @@ -10525,6 +10531,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; + case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S ; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } @@ -10698,6 +10705,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s } if ((new_type == GGML_TYPE_IQ2_XXS || new_type == GGML_TYPE_IQ2_XS || + new_type == GGML_TYPE_IQ1_S || (new_type == GGML_TYPE_Q2_K && params->ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S && strcmp(tensor->name, "token_embd.weight") != 0)) && !imatrix) { LLAMA_LOG_ERROR("\n\n============================================================\n"); LLAMA_LOG_ERROR("Missing importance matrix for tensor %s in a very low-bit quantization\n", tensor->name); diff --git a/llama.h b/llama.h index f4ec6ea63..5a97abcc9 100644 --- a/llama.h +++ b/llama.h @@ -100,6 +100,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q2_K_S = 21, // except 1d tensors LLAMA_FTYPE_MOSTLY_Q3_K_XS = 22, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 30a7d1f5a..ef37c5af2 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1917,7 +1917,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q4_K, GGML_TYPE_Q5_K, GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, - GGML_TYPE_IQ3_XXS, + GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, }; // unary ops From fc0c8d286a533363a9a663510b62af85ffad58b3 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Sun, 18 Feb 2024 17:19:23 +0100 Subject: [PATCH 773/859] llava : update surgery script to not remove tensors (#5536) This commit updates the surgery script to not remove the tensors from the model file. For this to work the `--skip-unknown` flag is added as an argument to the convert.py script in README.md. The motivation for this change is that the surgery script currently removes the projector tensors from the model file. If the model was checked out from a repository, the model file will have been updated and have to be checked out again to reset this effect. If this can be avoided I think it would be preferable. I did not perform this change for BakLLaVA models as I am not sure how that part works. --- examples/llava/README.md | 2 +- examples/llava/llava-surgery.py | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 57eb42932..e42db6e5a 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -53,7 +53,7 @@ python ./examples/llava/convert-image-encoder-to-gguf.py -m ../clip-vit-large-pa 5. Use `convert.py` to convert the LLaMA part of LLaVA to GGUF: ```sh -python ./convert.py ../llava-v1.5-7b +python ./convert.py ../llava-v1.5-7b --skip-unknown ``` Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 0a61efdfe..8b7a62fba 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -19,10 +19,6 @@ mm_tensors = [k for k, v in checkpoint.items() if k.startswith("model.mm_project projector = {name: checkpoint[name].float() for name in mm_tensors} torch.save(projector, f"{args.model}/llava.projector") -# remove these tensors from the checkpoint and save it again -for name in mm_tensors: - del checkpoint[name] - # BakLLaVA models contain CLIP tensors in it clip_tensors = [k for k, v in checkpoint.items() if k.startswith("model.vision_tower")] if len(clip_tensors) > 0: @@ -39,7 +35,7 @@ if len(clip_tensors) > 0: f.write("{}\n") -torch.save(checkpoint, path) + torch.save(checkpoint, path) print("Done!") print(f"Now you can convert {args.model} to a regular LLaMA GGUF file.") From 5d3de51f972055702a1859186fe7acb8f0b43dc4 Mon Sep 17 00:00:00 2001 From: Herman Semenov Date: Sun, 18 Feb 2024 16:20:12 +0000 Subject: [PATCH 774/859] ggml, common, examples, tests : fixed type arguments in printf (#5528) --- common/common.cpp | 4 +- examples/batched-bench/batched-bench.cpp | 2 +- examples/batched/batched.cpp | 2 +- .../convert-llama2c-to-ggml.cpp | 38 +++++++++---------- examples/perplexity/perplexity.cpp | 2 +- .../train-text-from-scratch.cpp | 14 +++---- ggml.c | 4 +- tests/test-grammar-parser.cpp | 20 +++++----- tests/test-llama-grammar.cpp | 4 +- 9 files changed, 45 insertions(+), 45 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 3a92d3797..9ffc3951f 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1741,7 +1741,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l fprintf(stream, "rope_freq_base: %f # default: 10000.0\n", params.rope_freq_base); fprintf(stream, "rope_freq_scale: %f # default: 1.0\n", params.rope_freq_scale); - fprintf(stream, "seed: %d # default: -1 (random seed)\n", params.seed); + fprintf(stream, "seed: %u # default: -1 (random seed)\n", params.seed); fprintf(stream, "simple_io: %s # default: false\n", params.simple_io ? "true" : "false"); fprintf(stream, "cont_batching: %s # default: false\n", params.cont_batching ? "true" : "false"); fprintf(stream, "temp: %f # default: 0.8\n", sparams.temp); @@ -1750,7 +1750,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l dump_vector_float_yaml(stream, "tensor_split", tensor_split_vector); fprintf(stream, "tfs: %f # default: 1.0\n", sparams.tfs_z); - fprintf(stream, "threads: %d # default: %d\n", params.n_threads, std::thread::hardware_concurrency()); + fprintf(stream, "threads: %d # default: %u\n", params.n_threads, std::thread::hardware_concurrency()); fprintf(stream, "top_k: %d # default: 40\n", sparams.top_k); fprintf(stream, "top_p: %f # default: 0.95\n", sparams.top_p); fprintf(stream, "min_p: %f # default: 0.0\n", sparams.min_p); diff --git a/examples/batched-bench/batched-bench.cpp b/examples/batched-bench/batched-bench.cpp index 55dfd9784..b4b8a38e1 100644 --- a/examples/batched-bench/batched-bench.cpp +++ b/examples/batched-bench/batched-bench.cpp @@ -159,7 +159,7 @@ int main(int argc, char ** argv) { } LOG_TEE("\n"); - LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d, n_threads = %d, n_threads_batch = %d\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq, ctx_params.n_threads, ctx_params.n_threads_batch); + LOG_TEE("%s: n_kv_max = %d, is_pp_shared = %d, n_gpu_layers = %d, mmq = %d, n_threads = %u, n_threads_batch = %u\n", __func__, n_kv_max, is_pp_shared, n_gpu_layers, mmq, ctx_params.n_threads, ctx_params.n_threads_batch); LOG_TEE("\n"); LOG_TEE("|%6s | %6s | %4s | %6s | %8s | %8s | %8s | %8s | %8s | %8s |\n", "PP", "TG", "B", "N_KV", "T_PP s", "S_PP t/s", "T_TG s", "S_TG t/s", "T s", "S t/s"); diff --git a/examples/batched/batched.cpp b/examples/batched/batched.cpp index eab636692..9be7eb56b 100644 --- a/examples/batched/batched.cpp +++ b/examples/batched/batched.cpp @@ -92,7 +92,7 @@ int main(int argc, char ** argv) { const int n_ctx = llama_n_ctx(ctx); - LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_batch = %d, n_parallel = %d, n_kv_req = %d\n", __func__, n_len, n_ctx, ctx_params.n_batch, n_parallel, n_kv_req); + LOG_TEE("\n%s: n_len = %d, n_ctx = %d, n_batch = %u, n_parallel = %d, n_kv_req = %d\n", __func__, n_len, n_ctx, ctx_params.n_batch, n_parallel, n_kv_req); // make sure the KV cache is big enough to hold all the prompt and generated tokens if (n_kv_req > n_ctx) { diff --git a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp index 4d41e1779..8209dcb64 100644 --- a/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp +++ b/examples/convert-llama2c-to-ggml/convert-llama2c-to-ggml.cpp @@ -325,14 +325,14 @@ struct train_params { }; static void print_params(struct my_llama_hparams * params) { - printf("%s: n_vocab: %d\n", __func__, params->n_vocab); - printf("%s: n_ctx: %d\n", __func__, params->n_ctx); - printf("%s: n_embd: %d\n", __func__, params->n_embd); - printf("%s: n_mult: %d\n", __func__, params->n_mult); - printf("%s: n_head: %d\n", __func__, params->n_head); - printf("%s: n_ff: %d\n", __func__, params->n_ff); - printf("%s: n_layer: %d\n", __func__, params->n_layer); - printf("%s: n_rot: %d\n", __func__, params->n_rot); + printf("%s: n_vocab: %u\n", __func__, params->n_vocab); + printf("%s: n_ctx: %u\n", __func__, params->n_ctx); + printf("%s: n_embd: %u\n", __func__, params->n_embd); + printf("%s: n_mult: %u\n", __func__, params->n_mult); + printf("%s: n_head: %u\n", __func__, params->n_head); + printf("%s: n_ff: %u\n", __func__, params->n_ff); + printf("%s: n_layer: %u\n", __func__, params->n_layer); + printf("%s: n_rot: %u\n", __func__, params->n_rot); } static void init_model(struct my_llama_model * model) { @@ -350,25 +350,25 @@ static void init_model(struct my_llama_model * model) { model->train_tokens = 0; model->tok_embeddings = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); - printf("[%s:GG] Allocating [%d] x [%d] = [%d] float space for model->tok_embeddings\n",__func__,n_embd , n_vocab, n_embd * n_vocab); + printf("[%s:GG] Allocating [%u] x [%u] = [%u] float space for model->tok_embeddings\n",__func__,n_embd , n_vocab, n_embd * n_vocab); model->norm = ggml_new_tensor_1d(ctx, GGML_TYPE_F32, n_embd); - printf("[%s:GG] Allocating [%d] float space for model->norm\n",__func__,n_embd); + printf("[%s:GG] Allocating [%u] float space for model->norm\n",__func__,n_embd); model->output = ggml_new_tensor_2d(ctx, GGML_TYPE_F32, n_embd, n_vocab); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for model->output\n",__func__,n_embd, n_vocab, n_embd * n_vocab); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for model->output\n",__func__,n_embd, n_vocab, n_embd * n_vocab); // printing the per-layer allocations here so we dont print in the for loop. - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wq for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wk for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wv for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.wo for [%d] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wq for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wk for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wv for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.wo for [%u] layers\n",__func__, n_embd, n_embd, n_embd * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] float space for layer.ffn_norm for [%d] layers\n",__func__,n_embd, n_layer); + printf("[%s:GG] Allocating [%u] float space for layer.ffn_norm for [%u] layers\n",__func__,n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w1 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w2 for [%d] layers\n",__func__, n_embd, n_ff, n_ff * n_embd, n_layer); - printf("[%s:GG] Allocating [%d] x[%d] = [%d] float space for layer.w3 for [%d] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.w1 for [%u] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.w2 for [%u] layers\n",__func__, n_embd, n_ff, n_ff * n_embd, n_layer); + printf("[%s:GG] Allocating [%u] x[%u] = [%u] float space for layer.w3 for [%u] layers\n",__func__, n_ff, n_embd, n_embd * n_ff, n_layer); ggml_set_name(model->tok_embeddings, "tok_embeddings.weight"); ggml_set_name(model->norm, "norm.weight"); diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 67d2d3293..74dcc642a 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -1623,7 +1623,7 @@ static void kl_divergence(llama_context * ctx, const gpt_params & params) { uint32_t n_ctx; in.read((char *)&n_ctx, sizeof(n_ctx)); if (n_ctx > llama_n_ctx(ctx)) { - fprintf(stderr, "%s: %s has been computed with %d, while the current context is %d. Increase it with -c and retry\n", + fprintf(stderr, "%s: %s has been computed with %u, while the current context is %d. Increase it with -c and retry\n", __func__, params.logits_file.c_str(), n_ctx, params.n_ctx); } diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index bfdf124d7..e78ab185d 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -111,13 +111,13 @@ static const char * LLM_TENSOR_FFN_DOWN = "blk.%d.ffn_down"; static const char * LLM_TENSOR_FFN_UP = "blk.%d.ffn_up"; static void print_params(struct my_llama_hparams * params) { - printf("%s: n_vocab: %d\n", __func__, params->n_vocab); - printf("%s: n_ctx: %d\n", __func__, params->n_ctx); - printf("%s: n_embd: %d\n", __func__, params->n_embd); - printf("%s: n_head: %d\n", __func__, params->n_head); - printf("%s: n_ff: %d\n", __func__, params->n_ff); - printf("%s: n_layer: %d\n", __func__, params->n_layer); - printf("%s: n_rot: %d\n", __func__, params->n_rot); + printf("%s: n_vocab: %u\n", __func__, params->n_vocab); + printf("%s: n_ctx: %u\n", __func__, params->n_ctx); + printf("%s: n_embd: %u\n", __func__, params->n_embd); + printf("%s: n_head: %u\n", __func__, params->n_head); + printf("%s: n_ff: %u\n", __func__, params->n_ff); + printf("%s: n_layer: %u\n", __func__, params->n_layer); + printf("%s: n_rot: %u\n", __func__, params->n_rot); } static void set_param_model(struct my_llama_model * model) { diff --git a/ggml.c b/ggml.c index aefcda6d4..8224652a9 100644 --- a/ggml.c +++ b/ggml.c @@ -17909,7 +17909,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * ptr += ggml_nbytes(tensor); - fprintf(stderr, "%s: loaded leaf %d: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); + fprintf(stderr, "%s: loaded leaf %u: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); } } @@ -18012,7 +18012,7 @@ struct ggml_cgraph * ggml_graph_import(const char * fname, struct ggml_context * result->nodes[i] = tensor; - fprintf(stderr, "%s: loaded node %d: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); + fprintf(stderr, "%s: loaded node %u: '%16s', %9zu bytes\n", __func__, i, tensor->name, ggml_nbytes(tensor)); } } } diff --git a/tests/test-grammar-parser.cpp b/tests/test-grammar-parser.cpp index a0b5b043d..91939e276 100644 --- a/tests/test-grammar-parser.cpp +++ b/tests/test-grammar-parser.cpp @@ -38,8 +38,8 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_pair.first != key || expected_pair.second != value) { - fprintf(stderr, "expected_pair: %s, %d\n", expected_pair.first.c_str(), expected_pair.second); - fprintf(stderr, "actual_pair: %s, %d\n", key.c_str(), value); + fprintf(stderr, "expected_pair: %s, %u\n", expected_pair.first.c_str(), expected_pair.second); + fprintf(stderr, "actual_pair: %s, %u\n", key.c_str(), value); fprintf(stderr, "expected_pair != actual_pair\n"); } @@ -96,9 +96,9 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_element.type != element.type || expected_element.value != element.value) { - fprintf(stderr, "index: %d\n", index); - fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); - fprintf(stderr, "actual_element: %d, %d\n", element.type, element.value); + fprintf(stderr, "index: %u\n", index); + fprintf(stderr, "expected_element: %d, %u\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %u\n", element.type, element.value); fprintf(stderr, "expected_element != actual_element\n"); } @@ -144,8 +144,8 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_pair.first != key || expected_pair.second != value) { - fprintf(stderr, "expected_pair: %s, %d\n", expected_pair.first.c_str(), expected_pair.second); - fprintf(stderr, "actual_pair: %s, %d\n", key.c_str(), value); + fprintf(stderr, "expected_pair: %s, %u\n", expected_pair.first.c_str(), expected_pair.second); + fprintf(stderr, "actual_pair: %s, %u\n", key.c_str(), value); fprintf(stderr, "expected_pair != actual_pair\n"); } @@ -235,9 +235,9 @@ term ::= [0-9]+)"""; // pretty print error message before asserting if (expected_element.type != element.type || expected_element.value != element.value) { - fprintf(stderr, "index: %d\n", index); - fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); - fprintf(stderr, "actual_element: %d, %d\n", element.type, element.value); + fprintf(stderr, "index: %u\n", index); + fprintf(stderr, "expected_element: %d, %u\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %u\n", element.type, element.value); fprintf(stderr, "expected_element != actual_element\n"); } diff --git a/tests/test-llama-grammar.cpp b/tests/test-llama-grammar.cpp index 16ebe753f..27ca4d265 100644 --- a/tests/test-llama-grammar.cpp +++ b/tests/test-llama-grammar.cpp @@ -180,8 +180,8 @@ int main() if (expected_element.type != element->type || expected_element.value != element->value) { fprintf(stderr, "index: %d\n", index); - fprintf(stderr, "expected_element: %d, %d\n", expected_element.type, expected_element.value); - fprintf(stderr, "actual_element: %d, %d\n", element->type, element->value); + fprintf(stderr, "expected_element: %d, %u\n", expected_element.type, expected_element.value); + fprintf(stderr, "actual_element: %d, %u\n", element->type, element->value); fprintf(stderr, "expected_element != actual_element\n"); } From 1dcc3fde004787e6fc4d84c9de0bb34cd2901a3e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 18:21:52 +0200 Subject: [PATCH 775/859] common : fix ub (#5530) --- common/common.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 9ffc3951f..489462b5a 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1801,7 +1801,8 @@ void dump_kv_cache_view_seqs(const llama_kv_cache_view & view, int row_size) { if (cs_curr[j] < 0) { continue; } if (seqs.find(cs_curr[j]) == seqs.end()) { if (seqs.size() + 1 >= sizeof(slot_chars)) { break; } - seqs[cs_curr[j]] = seqs.size(); + const size_t sz = seqs.size(); + seqs[cs_curr[j]] = sz; } } if (seqs.size() + 1 >= sizeof(slot_chars)) { break; } From 66c1968f7a2e895675425e875b6589f1233a1b52 Mon Sep 17 00:00:00 2001 From: Daniel Hiltgen Date: Sun, 18 Feb 2024 08:23:16 -0800 Subject: [PATCH 776/859] server : graceful server shutdown (#5244) This updates the server queue to support graceful shutdown of the server on signals. --- examples/server/server.cpp | 23 ++++++++++++++++++++++- examples/server/utils.hpp | 20 +++++++++++++++++--- 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index a0b46970b..7800c6e7e 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -28,6 +28,7 @@ #include #include #include +#include using json = nlohmann::json; @@ -2511,6 +2512,9 @@ static void append_to_generated_text_from_generated_token_probs(llama_server_con } } +std::function shutdown_handler; +inline void signal_handler(int signal) { shutdown_handler(signal); } + int main(int argc, char **argv) { #if SERVER_VERBOSE != 1 @@ -3128,8 +3132,25 @@ int main(int argc, char **argv) std::placeholders::_2, std::placeholders::_3 )); - llama.queue_tasks.start_loop(); + shutdown_handler = [&](int) { + llama.queue_tasks.terminate(); + }; + +#if defined (__unix__) || (defined (__APPLE__) && defined (__MACH__)) + struct sigaction sigint_action; + sigint_action.sa_handler = signal_handler; + sigemptyset (&sigint_action.sa_mask); + sigint_action.sa_flags = 0; + sigaction(SIGINT, &sigint_action, NULL); +#elif defined (_WIN32) + auto console_ctrl_handler = +[](DWORD ctrl_type) -> BOOL { + return (ctrl_type == CTRL_C_EVENT) ? (signal_handler(SIGINT), true) : false; + }; + SetConsoleCtrlHandler(reinterpret_cast(console_ctrl_handler), true); +#endif + llama.queue_tasks.start_loop(); + svr.stop(); t.join(); llama_backend_free(); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 548548962..0ee670dba 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -220,6 +220,7 @@ inline std::string format_chatml(std::vector messages) struct llama_server_queue { int id = 0; std::mutex mutex_tasks; + bool running; // queues std::vector queue_tasks; std::vector queue_tasks_deferred; @@ -278,9 +279,18 @@ struct llama_server_queue { queue_tasks_deferred.clear(); } - // Start the main loop. This call is blocking - [[noreturn]] + // end the start_loop routine + void terminate() { + { + std::unique_lock lock(mutex_tasks); + running = false; + } + condition_tasks.notify_all(); + } + + // Start the main loop. void start_loop() { + running = true; while (true) { // new task arrived LOG_VERBOSE("have new task", {}); @@ -324,8 +334,12 @@ struct llama_server_queue { { std::unique_lock lock(mutex_tasks); if (queue_tasks.empty()) { + if (!running) { + LOG_VERBOSE("ending start_loop", {}); + return; + } condition_tasks.wait(lock, [&]{ - return !queue_tasks.empty(); + return (!queue_tasks.empty() || !running); }); } } From 36376abe05a12a8cb3af548a4af9b8d0e2e69597 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 18 Feb 2024 17:30:09 +0100 Subject: [PATCH 777/859] server : --n-predict option document and cap to max value (#5549) * server: document --n-predict * server: ensure client request cannot override n_predict if set * server: fix print usage LF in new --n-predict option --- examples/server/README.md | 1 + examples/server/server.cpp | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/examples/server/README.md b/examples/server/README.md index 249368749..fe5cd8d5d 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -39,6 +39,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--mmproj MMPROJ_FILE`: Path to a multimodal projector file for LLaVA. - `--grp-attn-n`: Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w` - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` +- `-n, --n-predict`: Set the maximum tokens to predict (default: -1) ## Build diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 7800c6e7e..7aa706e95 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -159,6 +159,7 @@ struct llama_client_slot int32_t n_decoded = 0; int32_t n_remaining = -1; int32_t i_batch = -1; + int32_t n_predict = -1; int32_t num_prompt_tokens = 0; int32_t num_prompt_tokens_processed = 0; @@ -410,6 +411,7 @@ struct llama_server_context slot.id = i; slot.n_ctx = n_ctx_slot; + slot.n_predict = params.n_predict; LOG_TEE(" -> Slot %i - max context: %i\n", slot.id, n_ctx_slot); @@ -546,6 +548,15 @@ struct llama_server_context slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + if (slot->n_predict > 0 && slot->params.n_predict > slot->n_predict) { + // Might be better to reject the request with a 400 ? + LOG_WARNING("Max tokens to predict exceeds server configuration", { + {"params.n_predict", slot->params.n_predict}, + {"slot.n_predict", slot->n_predict}, + }); + slot->params.n_predict = slot->n_predict; + } + // infill if (data.count("input_prefix") != 0) { @@ -1053,6 +1064,7 @@ struct llama_server_context return json { {"n_ctx", slot.n_ctx}, + {"n_predict", slot.n_predict}, {"model", params.model_alias}, {"seed", slot.params.seed}, {"temperature", slot.sparams.temp}, @@ -1915,13 +1927,14 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf("\n"); + printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); printf(" advanced option to override model metadata by key. may be specified multiple times.\n"); printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); printf(" --chat-template FORMAT_NAME"); - printf(" set chat template, possible valus is: llama2, chatml (default %s)", sparams.chat_template.c_str()); + printf(" set chat template, possible value is: llama2, chatml (default %s)", sparams.chat_template.c_str()); printf("\n"); } From e75c6279d1c8e7abb82a331f5de7124eed402de2 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 18 Feb 2024 17:31:28 +0100 Subject: [PATCH 778/859] server : enhanced health endpoint (#5548) * server: enrich health endpoint with available slots, return 503 if not slots are available * server: document new status no slot available in the README.md --- examples/server/README.md | 1 + examples/server/server.cpp | 31 +++++++++++++++++++++++++++++-- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index fe5cd8d5d..5e3ae833b 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -136,6 +136,7 @@ node index.js - `{"status": "loading model"}` if the model is still being loaded. - `{"status": "error"}` if the model failed to load. - `{"status": "ok"}` if the model is successfully loaded and the server is ready for further requests mentioned below. + - `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 7aa706e95..8145af867 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2578,8 +2578,35 @@ int main(int argc, char **argv) server_state current_state = state.load(); switch(current_state) { case SERVER_STATE_READY: - res.set_content(R"({"status": "ok"})", "application/json"); - res.status = 200; // HTTP OK + if (llama.all_slots_are_idle) { + res.set_content(R"({"status": "ok"})", "application/json"); + res.status = 200; // HTTP OK + } else { + int available_slots = 0; + int processing_slots = 0; + for (llama_client_slot & slot : llama.slots) { + if (slot.available()) { + available_slots++; + } else { + processing_slots++; + } + } + if (available_slots > 0) { + json health = { + {"status", "ok"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); + res.status = 200; // HTTP OK + } else { + json health = { + {"status", "no slot available"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); + res.status = 503; // HTTP Service Unavailable + } + } break; case SERVER_STATE_LOADING_MODEL: res.set_content(R"({"status": "loading model"})", "application/json"); From f3f28c5395cd25b371617981b341616dbdd31e85 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 19:17:00 +0200 Subject: [PATCH 779/859] cmake : fix GGML_USE_SYCL typo (#5555) --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 5ea4d4f19..0c29b5d09 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -526,7 +526,7 @@ if (LLAMA_SYCL) message(STATUS "SYCL found") - add_compile_definitions(GML_USE_SYCL) + add_compile_definitions(GGML_USE_SYCL) if (LLAMA_SYCL_F16) add_compile_definitions(GGML_SYCL_F16) From 689a091bbe0537ee9abff3e15a1d74f5f3561165 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 19:38:06 +0200 Subject: [PATCH 780/859] sampling : do not set min_keep to n_probs (#5564) --- common/sampling.cpp | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/common/sampling.cpp b/common/sampling.cpp index 53013138a..611c327bb 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -121,7 +121,7 @@ static void sampler_queue( struct llama_context * ctx_main, const llama_sampling_params & params, llama_token_data_array & cur_p, - size_t & min_keep) { + size_t min_keep) { const float temp = params.temp; const float dynatemp_range = params.dynatemp_range; const float dynatemp_exponent = params.dynatemp_exponent; @@ -248,10 +248,7 @@ static llama_token llama_sampling_sample_impl( llama_sample_temp(ctx_main, &cur_p, temp); id = llama_sample_token_mirostat_v2(ctx_main, &cur_p, mirostat_tau, mirostat_eta, &ctx_sampling->mirostat_mu); } else { - // temperature sampling - size_t min_keep = std::max(1, params.n_probs); - - sampler_queue(ctx_main, params, cur_p, min_keep); + sampler_queue(ctx_main, params, cur_p, 1); id = llama_sample_token(ctx_main, &cur_p); From c145f8a132b2fe1d1e65987faddbd9a40bef7a12 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 18 Feb 2024 18:39:57 +0100 Subject: [PATCH 781/859] server : slots monitoring endpoint (#5550) --- examples/server/README.md | 64 ++++++++++++++++++++++++++++++++++++++ examples/server/server.cpp | 32 +++++++++++++++++++ 2 files changed, 96 insertions(+) diff --git a/examples/server/README.md b/examples/server/README.md index 5e3ae833b..ac5133d24 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -40,6 +40,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--grp-attn-n`: Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w` - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` - `-n, --n-predict`: Set the maximum tokens to predict (default: -1) +- `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. ## Build @@ -381,6 +382,69 @@ Notice that each `probs` is an array of length `n_probs`. }' ``` +- **GET** `/slots`: Returns the current slots processing state. Can be disabled with `--slots-endpoint-disable`. + +### Result JSON + +```json +[ + { + "dynatemp_exponent": 1.0, + "dynatemp_range": 0.0, + "frequency_penalty": 0.0, + "grammar": "", + "id": 0, + "ignore_eos": false, + "logit_bias": [], + "min_p": 0.05000000074505806, + "mirostat": 0, + "mirostat_eta": 0.10000000149011612, + "mirostat_tau": 5.0, + "model": "llama-2-7b-32k-instruct.Q2_K.gguf", + "n_ctx": 2048, + "n_keep": 0, + "n_predict": 100000, + "n_probs": 0, + "next_token": { + "has_next_token": true, + "n_remain": -1, + "num_tokens_predicted": 0, + "stopped_eos": false, + "stopped_limit": false, + "stopped_word": false, + "stopping_word": "" + }, + "penalize_nl": true, + "penalty_prompt_tokens": [], + "presence_penalty": 0.0, + "prompt": "Say hello to llama.cpp", + "repeat_last_n": 64, + "repeat_penalty": 1.100000023841858, + "samplers": [ + "top_k", + "tfs_z", + "typical_p", + "top_p", + "min_p", + "temperature" + ], + "seed": 42, + "state": 1, + "stop": [ + "\n" + ], + "stream": false, + "task_id": 0, + "temperature": 0.0, + "tfs_z": 1.0, + "top_k": 40, + "top_p": 0.949999988079071, + "typical_p": 1.0, + "use_penalty_prompt_tokens": false + } +] +``` + ## More examples ### Change system prompt on runtime diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 8145af867..4f2e9c898 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -41,6 +41,7 @@ struct server_params int32_t port = 8080; int32_t read_timeout = 600; int32_t write_timeout = 600; + bool slots_endpoint = true; }; bool server_verbose = false; @@ -1926,6 +1927,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); + printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); printf("\n"); printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); @@ -2374,6 +2376,10 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, log_set_target(stdout); LOG_INFO("logging to file is disabled.", {}); } + else if (arg == "--slots-endpoint-disable") + { + sparams.slots_endpoint = false; + } else if (arg == "--chat-template") { if (++i >= argc) @@ -2619,6 +2625,32 @@ int main(int argc, char **argv) } }); + if (sparams.slots_endpoint) { + svr.Get("/slots", [&](const httplib::Request&, httplib::Response& res) { + json slots; + for (llama_client_slot & slot : llama.slots) { + json slot_data = llama.get_formated_generation(slot); + slot_data["id"] = slot.id; + slot_data["task_id"] = slot.task_id; + slot_data["state"] = slot.state; + slot_data["prompt"] = slot.prompt; + slot_data["next_token"] = { + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"num_tokens_predicted", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + }; + + slots.push_back(slot_data); + } + res.set_content(slots.dump(), "application/json"); + res.status = 200; // HTTP OK + }); + } + svr.set_logger(log_server_request); svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) From 5ee99c32f5e47c8d32634eff9a47fb32a24c276b Mon Sep 17 00:00:00 2001 From: Robey Holderith Date: Sun, 18 Feb 2024 11:11:16 -0800 Subject: [PATCH 782/859] common, server : surface min_keep as its own parameter (#5567) * Feature - surface min_keep as its own parameter * Updated README with min_keep param --- common/common.cpp | 1 + common/sampling.cpp | 5 ++++- common/sampling.h | 1 + examples/server/README.md | 2 ++ examples/server/public/index.html | 4 ++++ examples/server/server.cpp | 2 ++ 6 files changed, 14 insertions(+), 1 deletion(-) diff --git a/common/common.cpp b/common/common.cpp index 489462b5a..10ef11829 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1704,6 +1704,7 @@ void dump_non_result_info_yaml(FILE * stream, const gpt_params & params, const l } fprintf(stream, "lora_base: %s\n", params.lora_base.c_str()); fprintf(stream, "main_gpu: %d # default: 0\n", params.main_gpu); + fprintf(stream, "min_keep: %d # default: 0 (disabled)\n", sparams.min_keep); fprintf(stream, "mirostat: %d # default: 0 (disabled)\n", sparams.mirostat); fprintf(stream, "mirostat_ent: %f # default: 5.0\n", sparams.mirostat_tau); fprintf(stream, "mirostat_lr: %f # default: 0.1\n", sparams.mirostat_eta); diff --git a/common/sampling.cpp b/common/sampling.cpp index 611c327bb..de4331a11 100644 --- a/common/sampling.cpp +++ b/common/sampling.cpp @@ -248,7 +248,10 @@ static llama_token llama_sampling_sample_impl( llama_sample_temp(ctx_main, &cur_p, temp); id = llama_sample_token_mirostat_v2(ctx_main, &cur_p, mirostat_tau, mirostat_eta, &ctx_sampling->mirostat_mu); } else { - sampler_queue(ctx_main, params, cur_p, 1); + // temperature sampling + size_t min_keep = std::max(1, params.min_keep); + + sampler_queue(ctx_main, params, cur_p, min_keep); id = llama_sample_token(ctx_main, &cur_p); diff --git a/common/sampling.h b/common/sampling.h index e1279a894..95d875394 100644 --- a/common/sampling.h +++ b/common/sampling.h @@ -22,6 +22,7 @@ enum class llama_sampler_type : char { typedef struct llama_sampling_params { int32_t n_prev = 64; // number of previous tokens to remember int32_t n_probs = 0; // if greater than 0, output the probabilities of top n_probs tokens. + int32_t min_keep = 0; // 0 = disabled, otherwise samplers should return at least min_keep tokens int32_t top_k = 40; // <= 0 to use vocab size float top_p = 0.95f; // 1.0 = disabled float min_p = 0.05f; // 0.0 = disabled diff --git a/examples/server/README.md b/examples/server/README.md index ac5133d24..809e2d37c 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -199,6 +199,8 @@ node index.js `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) + `min_keep`: If greater than 0, force samplers to return N possible tokens at minimum (default: 0) + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:`. In this case, `[img-12]` will be replaced by the embeddings of the image with id `12` in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) diff --git a/examples/server/public/index.html b/examples/server/public/index.html index b059c75f2..84038ddce 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -234,6 +234,7 @@ mirostat_eta: 0.1, // learning rate grammar: '', n_probs: 0, // no completion_probabilities, + min_keep: 0, // min probs from each sampler, image_data: [], cache_prompt: true, api_key: '' @@ -791,6 +792,9 @@
    ${IntField({ label: "Show Probabilities", max: 10, min: 0, name: "n_probs", value: params.value.n_probs })}
    +
    + ${IntField({ label: "Min Probabilities from each Sampler", max: 10, min: 0, name: "min_keep", value: params.value.min_keep })} +

    nA^SH$tz#Qo( zc_m({A_fg+NSq)8$B98L6oytxE0oIJkAn~ZOQtz`vHh@a0 zU#4mg8i&LL0ICe*!7o?eUjn*xNJ8l3VJhlWi~$W9<_-IW-<1Jb=R%#JeM(4GC7zyY zyydiF8IQ4=*V+s5&nva+YuwW-O?2tNos~K@xz`0B;R7bEU;A?E{qaYtN2Fa=WLHtYf$dWwJA?r=ByG}@K-uyQ3Mq63pE)G z3d~zxQm*?V{WUeh4cT~h`la?poNRAVlZUS0a|sPnuUN@JHKG@CLa54-TG1*!UF@7> zTHSrS5=cC{bjCu+W3e}-!zk7QSrOf|GtV9>{#S5!VHNQtm4p?{BFev4Aet^6ixgT@ z+}7;DZ9Gmi&_o;rF$BEbrAdc%O+yt250V-=h? zm9znZ=$0Fs3$@kdS(bSA_slKa^Zjwc4zJcx*B7Nse9Q>--uE%KhLY0-3VtQe3d1sf zQorol%$?;aA!5IHQM9^Y>Rw@LB|GMQd8~ciizMT=`0E{p$@g|ORa6|vzh$NRVHDY* z{JrsZV(2?g`GWrukad(dzW#AxD|p`3X)>5=yKe54yN18oUN9AqD!#+=)j}S;>~>&C zC(8z;c4~HHdl4`j$Xq8iKK#8ezFVH2y`9;$<{*@)frCAR8uMc(5KZ13Y{YnTTHjDf4b>AfQt|^(4GJGZA z@+8m=>M)giC3*l~X^4k62=%fH!OQK1co8F#%a{f#aO&M0dSTf-12pT7WpE5of=^&a zN05M*LjBkL$4#{ZOt#a2-T&^TDdR8*Cx2xpp1*Fp=l%9UeB=(6PO)UkU|!K13V4i) z-GESHaEF&7;K8-3U1k=4MI{74J%y7vYCy z$1RhHxPe@SMV5FGODJg}Z;BE=QFY{+mOM|T7r}vf8#F2*U7Vp~v=^tV@{c|tj6NEp z`bJW;%mkH#GHHQW+KNVd0GCub*>8tqyh@l*bsM+Fg&tHD=jw!Ulv7luWwB+Xfdzb* zRA;L94J+Sh=i?}a^HlF?NW}2%m{Ly4O?9^guC)hzs?xrdi-SAI$7{f=W3KLr1cCXx zA0HJs<*u{r-ee)DEfu=!$Qc`r5M#NPyJnxBEfaY_AUavzJHxrK^|m!2bx6@*gT$l@X0!ak4XqpPafCS%H; ztZ=?*xp>3niLVj{rLT1|>lUbAF*&-42e@b%@Iow&hvxf7k6O4}eVlT<17ri4m}U}F zp<++O5RXb;Xbf?wHFuZUCz2@jA~2*g(t1Xh=S!cRe$CESZ@rMhb_kTkij;p}@|Hig z=meLKgBf$BZ6Bh#ksYynroW%dP3VFLg`RG|V_~z=Xc&u?6`v$Ur~*b;^~m_!+3Byf zU-MU&;pimVdTqH)x$bz!dr>I1h|5iWtoP<aHSChYoNn~LQBg;!F;3qouTG)_(bRJ*g@N+w68Og){ww#0q&iYvL*oFD z8bvmd)28$fbEFbYN%fCU6Oqlxi*Cow^OkELEr2hD8~FAMJQ)l%Us~0Jn}QU`#ED;O z*R3<)hE;x%EYh91h(bQhuDBUSFBWy@RaV8appZ z$@pV6Lt3GCx5j(EkSPfWKxn#rwbVtFT~WDE{T&R2jH4j{`ZMF8o@!47?@CqN@w>M6 znOW{U?1$ey`)>QX7 zPctt3GM}?ff1YL2b|SXS-*4oIjr6*XSZUNFKK4+nR_t0YDIu)wBD>Ras}p@L&spRsnt z!%CKp(Ay&W_%TCU;1b8>E=Tudm*Vy4aBq$T>C)Q}TRu1(rOrSJOW~L>SGgMA;$`A^ zo=Apshg|{g~aF@iM!&l5Z+yZwxyx?|lOx5wh&+!ddtXX$cb~5%>5D3Lz zoLPx=cR<3nP?f0{j~)Ocio?VMt5byMV>}p?{{u^k0drLUhQ~7}C*u^qECtoN2Mk8Z^XgLyNDM+3T0lz zr*75S5ukn7yIik9kwZlU)TGJW0loRWEpBSlGJWym73`rO&HYKaT-yjQGQ49=q=`KL zLVLwOa(6w0e$cA1W93UEm67a%M`EAn$c6O+PlyVL?c%Qj6a5O*Yb`RPk!8FA0e)1x z+6&yP_!B^2HJ)VTvpP@2>C=ESb>gjd)2l&yAFAnetcDmyDkw_!Kwjc6>PqT+)bpHk zjDi~XViEe%F1rZDabEO8RkyNV9z>4g(a84VY{mS6kBU^%X(VK!1o8Bqkg`xTR@V`5 znAb81fm&gBDTgYgQQ5em#d#!l{PW-S*RS$vACZ<(l7cwQW3%T*TeI*0CUvw zC!Hv$$P9{-b|FZyP?R+#Ni;KNY!+eyNI{09adN>AxuQxJt}=@9jw14Rbhn(bK?Ow_ zK_^ttdjjh*HaNg8{v+vq4~l7t>b6=v7&(hah)~hVy$u7;ii1M9nK=-3Qt0Id4Sl2v zigLs$S1(Zr?z%}S6vROo;eLk7c5%X}tn`OTe%YRt{<+H}=KYc!zU)YE**(aumrVz7 zA1tyq0+pF=0Xd!A@Mb`4!56=GDzd%CBUWoT}t2*9u zivZ$ojWRGowijUd>l<({B{lxVF241qF^!(K?~_`NSga-&V`nflg0~KlAaQ0_+U3}Y zZa&6ekyn_!t}|6iT9I-MWJ4;pJ*oGyIkZ)f9p z-Z#r|xblNy$vYL{uQ?119jiW@eB0s)40fN1NVk8fDl;LH9(QP7V)(sKNHyL;IcyiV zTmjnuu^&gMTCs;%9=?cpDteK+XCH9bf^2ifJ;E(&t8 zMQ-)20Zbp*V`**6}CJCB>aAU<;l-z8hFJ|MJkmmu}Q9=r{n@I zNcF^fK5cKrF66S6*e|JU^sPWGA7({oip&Mv zP*ttFsd8t0)>lwus_cxdX>v-vFcf4H)AH%+QFPM^K3Ky5iZ#%L#*o_;Y`}wrBGzHR z*pBi65Fmc!c|aQiP4P(ZvD}uR#;gT;p8uHe_x$rdwxIwBzVy>Nh*ev>V4L5!fL9V| zb!`O{8W*EHi;D79l$Jr(K^(3n2OIm<)i1rSmsv>$Coi?%ialY{q&5O6D=wkT(pQ5t zx7wzLdwMfFk_+`XdR=fninzox7FxSPf0GI5Uv)skSD*BoV1^4!P(K}ylVpZ;oD^xI zc~&fDhzFNr8KoaX;XOcQZ0_7A?QvTKz31Q~Ug$flE5+d1kTDqFCb_gkMQGSjAkndu z62(eE{~qK{ADK(XX?TOWuv;{WiBHQYvwP7^5TsfCbS-YhVapXpo`2MycN84VW;#0nNhz0^c-i&6HaP{(Xb)6KeTav|} zCjCg9Qfwo|b@6RO2(3AO*>ik~4m|rIHBbho&a zL!ReoM(+?)SGflyVfY;sPabXqX~g<&wRhrLVIyoE`$_^;6C>Ovz*#eS!HG+q+Ro6! z`f#iC?@<_)ok7N8@~H6SD62#eJF#lGBl_=plfcJWGNAws=E5SSzFMv5g4tX?9&hdO zk^QO1Te8DTv1E+05a-T0BysRZbktErx4FjyS8GGz!4`CS|s?9@RNe@5n1$%i9| zG%`+fr;*ENVKEbmRZ<%t?EtJSgxTbNoDXS5abba}3PtZ>@Hi686j(UNZz2DUHTc*S z%cBEnaT){i z#Ba%5eDkV zlL;gAWT`y=`_3}Kl-Rl(+k|8Ga-|z>J$}D3K~VbF`L#XS&0<1@I232anUmis8riHf8$##S5#hpy|UuJS|{{P z341fd4gQ)?yH=12b;#iNOKE}4ugq~`WO9V6F1bXwXTKzS4S5b02(p!R)r02pJQJVn zJvkkSMg((l3f1W(ZM%aQ0HBOg?aR}LIcA9;Xf(prcoiD*AYTR6imoV}a?l$l1xtP0 zKK7e7G#qs-*&mNmvzcFyh4s!XM_vT36wlkg*vJS=Pzw0XB{ecujGFmPgS_! zVp+6U5#`Djo|OYss#WpFj)xj&ADg-_Ioq`-`KGWT2d0$aUK|={Y?YY2&Y0yRLV`9Q zIyVM%$grM7^tcchwx4Em!Lw7X@~7sV7@(ThL~WbX&_!++ik^%&gfjnmUEe{DIz5$g7Q%(XzRo`QXwPbMxCdyC%{VEBsy|+v`2+(DM zw($j}E{gLDLajQ~elPtQfPx43?%wbGF+`x>C zJypkIU}9mkoBV3GuLypg_kVeG0vzq8TM5cVW{OX&H>)cq)i4UkEV0203JMMbcMkN9 zm5u0+qsMmAb0f>D0N~4st9b)4#66<JkHi0<|<(-D`1m;xut<9TisA4{bHP1c?&pRC9@^t5dZnioyR? zIoo5>c%}uZQ$fC6L`~l^GE6+b2ISOsYL?wjmdPHbWXLtP6RYnAv=2(z!{l|wUxlaT z7KSJtlcu-)h&d$6vU)a8)c_21>e}AG9xr8oSl9p)N8qL)x!bVLw`KRI9Xx{mfHg(@V;YRR??J4_dh&MVU8Vh8260mf1$VbXnj zL32gyp7boU1ezdLRxU@yUVS8$7%FN!aATRfgw(fHu+8gG5 zlydQb^js`+i~@jhBCBJzR2}}Hu)_Gh*w=BLr`xmfMK!3XT(j&vP?$jBB9kV3AX9yr z+X}`R3~_e)Vtc^?a*bPDlngGD8h->^0-i|2%lPwGb}5WWsj0_(HP+jWtJ06TCzcRe z5Xc&zQ7+MOas7EC)n!E|41oGQdVj`}H32ny!U9B~Ldv0Rf26Awe7(`mn;vtfWa5j= zzO?kJ-gxA{_Q=m~`0k-wftZ5F1bF+E!w8=OI7_Wg?)klajl4QGFaq2th!cT4UFBpJ z)kP*sbFO{GZxUw!1;umZ5+p|PdMzn_vnHJEL3doX3Z$ytZ$*jE6HuMxg=76C5_Pk& zejRxnS%=UP(0umU=VKGBW;!cEYmQ_aC#s6$s7taUnen4mJZ!3NQ~n^Oy?b=M zT5y@J$jA|Wl8SIpSb35%UJ<9f$R~K-HEDFIjnc@*f1k$rVi}ZyTg+_cYNwL`aZ-Z5 z*Pij^8tTa;>&-XHcdhxa-I4Znq7{j}#MDqqafR`xYH$ldUt)1GBNjmyJkWB}@6`?w%kJIDBt7>&g)+gv z3`z|h7krA>A%Hsh#Yn7(e<7Tv5T^P3NbOy`?H)uz2wAYuBS3CSFx`b#6242{wQ|B5F4}>;_|CKK zT`!5*P-$=p(kVsmSfX9tIlYxg0Q&^Hg~tp;sStF~gVdZXPV3Y5;U`%E zO1NR1)`p@+PAbNxpL*lh(e~R;n3TZ=^?YJqCFJ;8+_C!5!JUnl^Mcvk_!wXLYW(}z z_QVrG77EHZYfH=SId?od9s>ehbGthrnE@-Ip%ruFq`l5P1XF6v+wG430{m2i|H9j+ zCxsW5fp04$RIDOZ>g^n73%V6cBspcjG}rA`d+m#(Q5o*KPK{A&icj0uedch%GQuTp z^>TU+87M%|tOHOFYBY%C0xX49zibMMia5a$;99w6$wd1~oCL>fw;QW8dFMb-`L zp1dARzkyd1ZK{&5z9Y5e0EA>>{u}{Lz*G5 zFAj^BLbcmy--@~CLnH*KWpOc!oX2g29Z0W^XTvZ>yNEl97|l~nadvf7)rD@}%}Y@+ z)m61(Q@BB}>Uvv`e_}(&&Xfb<$Df9;!W6iSmtt7 zc?8hJ$d}-TZ?vbnHKWeBPf7vs?=;cn(LS5Z*3{@K4+~MqE~5;BwnlTCqF9Y)1_bYmCUVa) ztie45ljo=j-ls4;*z)Q2mDoAFk=6tyta%~k?M=+bC=VQdR z;1};e zi#2iR2EdZ()(e5tynEK2r;*qt!>}t?2oNi|*)~iI0T`V-LOu%it&q9muzcCaueK-R zsNBsToKd6u2l0LKq;9s0aW-eCkG03VVhv2K$#<-C;ZomIoZ1|_ny3f?>dm^|Wp~7Q zpZfCNO=cdt*$1oi3_8K)vGw&zOqe#^HYKNV<5Qbs^3_32sf6=b@FZi8hzidPRN%N; zIz47q0MQ{pB*=khK%M0u=&$y3V;5wRu9SBHC(fe`DoB)E^l$u%zYj=EX1mcJi zREDKKkeB%r>T^P_X{wWO*uEoLm2#u5GP+?;_k1+Y!QZ}sg+-0!mCQ74pq5?LUMhSLaM?2 zDrEiRcyq6qE96Bc)->p@REu48Pa}9Jk@E2ggP75OD<0Uf+>$Ga@qnYBY0=5&UteM}vJBwLNI3Tu@ zw}f_H%*v3~Ud~Yhw39=^1#T~NvE2^R_V`--0Q%jEi98^^I$6KECnwaRX+mv|7(7Ok z&)`Zqg}qF^-i4KA+-kSt&0O?JkvO7hG82B6yP|BjgshPbgCiLa5x|?GLxAGn#t|z6 zZj${dRZ(Mc?#q%YN3T(t!O-I?e*B{#aqflJAJX}*-No#mb9cQS+ zBsX}XiMvV5JhBnueMDS*<$I@_!a*1$hoz?`CUMkO#@B)RCndtvCmOaBrmqH7IQtd3 zHX9EmRiVk=ZBJ&43`VT0_tFHNM6#_p56rP6gauvbr6GgDx1{6REGj_2&m?~PTj&iL zQ5`8W2OW$)!bPA4Hulu3ZQ=(?LC?My_v@*4E{iG9v}%(ae-e!0l>m+rO7&UE<-nX{ zaF7pzewm=S2@X&toY6^PJI;Dm`HE^-K8trDs@{+F3C+!z1y>7pjgJxd&3|*56GB(e z$xt;#qCf?49Q4MHCwB+PtVOTLj#UFy#|v1;2O+W)?OieIHl@+F@?m_tYJc9knMzeP zkVSuY*W<7zc@Iez)wrkE1$sJ|44;Sd^IGw{C0>O{S7zcL68-ce{`r^u>GQ}PSCU^ zc5*`ru5xGMQR{UO8&n<;T3-)%_THp`zn90TeU+5N{(B#tSShKVLnPY|P|`>BQDiPJ z4$1|DHcZX*VQB*e0z1MkNK}_3B=^I)!@*@n#xA3iy`(HmO0X*8P__1_ zvlqQGI-l_~UTe?C4^ud;JYW2{1t0n@uADyBLC!w-T6-+mnV~pePa1#T&N4_UHXK_& zbCqS9(hY?JH<1QNQ@@J`vi-sX*X9`Yov~@DP3r-Gg#!4em$|*D#aN**4Aa0oB@uGLz2SH8aqY?;5KlYFNd~+3DJV`Ir`+@i+aY&NoaXa$tU?2JRyA%`1}cS>z4ZYSWp_ zXQy9jzv0h<3|5m?g;KOf^xw%8va2B==SD~f%DJ+|)oQ^+*EgBINz9$mL)@mxmV>@M znt*1|<#lVJ+pP@PQ5-QF3cwv=yrO4x_b1Fa2v&N(ea+;12T4;~yqLmmm-+1`>*-uE_HXE&TRn7dUc`k@85IR|AlxnSpWPp!qW6u~A6D)Um zby?sbN0U@Yc+CJB^z-(6J1|HSm!QYBN=p?wmv+4yLI$9GY>CGShmTjwq%MCulC2o0 zj(b{c5&5b?b&@>ybhqPYA^z+kjpO?sRs=^DXarNN!n`EnllhWzYS0=fKgcowtSZ-Q zL{`IbB83c~c@%QO1+br?vY+w&*bD_$g%Kn;ifq9EEz<$lhlG=$?)^ILNO-$<&4uSS zdxeTQ!MpL!$9|*zdi;CRj*iS}d=EXtvBXONp!^uzd|^FiJ84G0^-hn>`meOEE|ru> zpgu#f^v0T##z^oz&RpGLuA7^Bz<^cz1q$!pD=|2JmYG%**$zCK0Prn7oXA}7-0erv zn|9>6MR;Mwi01l{jA)`4AMZ|U=LQq$aMm#ipI zB$B{L({$=+A6tz4U-ucojKwZTSh%|3SKBjjR8O|A#R8S;<(d#2r8sT)t3aWW%G2T> z$m1R0Ij<8%su0zLvx}#r>LGdjB!qfgpds~fAtyFgwbs^s#c5pUb}<^DNBe|0{S$6# zUQ}AMnu?qi6Yc#JV%^h{K7p{LO4A=d!uGE%(cv(!6&J3+2|(xO?#n~gS$r0wA71}o z@{rY+1guB~4Yt@;i3#>7Mvaexw^f`@-a}Dm`y=L71bMEuJ3)Z6)7RU}-iDINr#Zw$ zaMGk;$(W69XQd&VFUQ5XG)y&s592pTLFsm2Rnw4;X5QzHY zvSB)~D9bPxCdrk`1@H3Q*L}Zf*P>P$#_oFa0-uD!HS;tI)t5W|CWruaS93o>}6vTxjys3WuSHEtRgD1BtL*FG|_E{b2aQr;;Tb|V(kn>(b> z1}x~!kMGM|UAfO_oz$oxa(IF^-A;%nrDBnl(W~VAxJ~lf6|PBIYhrsQgdl{Ajd%^{ zDKW##?R*F+jxHkxOScCi$4O(MyIMi>nny`jiqjBq#gO6+Tv*c&qoTHyq{OOpvl=d3 zGN+;`i&U8}BdTl)Z;n-+HX(P+^dCz8(;Ve0q|}vi_LmiFXFpi^aXe&Wajx-`5`AmR z!8K)~8qUXd*AOjlN~Kbz7lTm01Bz^UMX)J#3Hxnxk?4aQpppaiEeU-;Pe$)6L2X5# zi^9L&Mm1ZLOlL8&!5@HmlnNCo?9oqDiBK{GMN@9os1`B#!O2c<((3QwxLU})FZCE= zbG==xDr!i@Arj-`c;t@Bwzz0|T+k{rg{5afmNc?u%767I`=CsMYw?2R&G)_Kys0*i zGn8BCDqU5R)niw!(1Cu|eW-@hS$|ky60_iTi;%EJF!2RdTq6@pmr4$KLAD z3OFAISCe&9FU1xbElpv0`3QqJetg$2eqh!Gqg7yb8b`LJ9Pg@LF3nv*U-30m5K+6>>?P)d`k0Y# z9eFAu6r5yfEO`0KrdjAprG~+kg4c^xvkj-$vRkZriD5JAfvo&gZ*&8ffSVjxzFi*1 zPBdC977XyVFda31_mi-%N-3ozddB5@>q8r;Y~9N3;b)9+3NqlDC8KS$$;bJFN(fS? zDhwBA+B;IYyRh!!!igCuj>FREWeA#gDHLy%B@(>U2jZA}&chk>Xf-LYiKxcM{=uP( z??_yH-;2>C-&`9PlU|Z}I6z-mQoDv@FgY3V45hjc<=-QbW>Y#WOH|Zu8b}9(cuhT3 zUYt+hc5hY?45mVWQGp)>U8s#+u<&Ku;Vj{93Gguhxx1YywE#;oDr{@QoLhIv_YWeX zK41205j2_D!(U#Czrc!^PC+<|G2Hu7cx|lft#%_m!%W|uOd3!bOJO&8=3Awmg?eij zh9fO(n(L=L>Zlb$?Tra`q-)SC0&SGz6RkwL(R&EwMrM3AWsx2JN^DDrX0=M;ZY7}v zZIlh?D;q~xkFB@QPDubalmh(uVyh<_mX3drv3u$gZ z4}bUPzpMXk@sIdcztrh&vj93p`xPXR%PIATDR0X944L)_w4lB^a{29Ok?H~>jxpMT z+0IU1ZKboJlAvLP+6`c;(GWS&0RL%LO4+E$px2horR6NLNvb0;Py?+=utxsMn%{n> zz3r7EU0uaatn4T@q=Hhj6SWYk{73A^-=AwweI<@r`&%_jd+M#H)I6H1F-%>09la76rrmXIoZY(YZSH^+$w{$~3DR(%7->9%Av+WH!r^k0o|3 zXg?gBk$w~Agh8Gck3^0EdyHKuD{)kU26|l0E&-ZBoZ_?w#c-}*o>oi*x>Qg1yOIt0 zbg9^>AhNoP$igpnJH3rKD;`RF_=jiTPbxu`k+(f33l6*~ zjvR0{{TvSQET?ol*)lq*q%)VovK&8cs830?aG~XcH!uTZ*YK4U)HS6MWgH>aasGO{ z9v^?Yo%5-#^$4VsK?Sa5T!4RXm&<*nCq$%Yh!UJ07BGvd@&a}1#?^#ZvK0MRd5xf< zMXxQZcYP(F(DK<_!^XQqlj)}H%~An``K>SYHA5ie0eb-Z0My%d5w$}{ziy3EaX z(wPRyFOv2u=-E!&6+L#x%kDXn`8TCPSpZ1&Fo@A+t{mF7=aK?T@bRJH>9s;E|B${Xw_Gwvv3wpIt4lDhv&giDM+3 zMa(7$sqC48#rJ4h!`mzDySx-W7`nI1L$iAcYM16Nv+D}zjwn@{U3DyB_me^p1$M0X zs8i&@u~cQJjWUSwR;`#*1qW74S>}~78Np_Xp1)XICaeqpORmhE{2+w7YFvsRWWfJ} zRZ`X_rVXHG5>?H<)L!$UA7FqxP$ZjoH_KzLxAXoWQUd=m?uEt{@lgK=_)y0uD24eL z=}_XKkhd3o%*}{!>W&x$7s?9mHjYg?)OcO9@(d6!^#`_hFaP{Ft&O#I>!!~GC^*mK z-Ig{RWyG$aL%aA3ygA|9o)w!;IQRycU!D~~Lgh1Y)t_pgyk{~UEA8g4`A@#q44TGi zLEuSIm>WH6ogCg<#`$3kDkW9cL?%A9C~g4B0hne!J>^sweo)#&i5#C0dC&Rx=g%HD@0^Z8L-~&1o;@Dd@e)-_2J&U zD7RHAbJd_q{i(z)@nz1Xq)RYE2}ws+6KuhfTY|MZw2JljK#)H6x|y_^Q4e$@7KqPJ zE%fOgP;x=KMP3{!mDs#%s?tqJCQP|TcWcA7p=#A6hDo>cqJs*ej!WE2lG{N7HOI@X zpQp$@n2MfYb&$>)Qo9Qf%W0G`HqT1-1SMCca<3;E)FOoOecEZUm}Ccl*n`dOVn|B6 z7YRTxzyllm{BKJz;s}7S2bP_(hel~N$=@h~^fq%5eLA90oHvCY*`(^(_Wn~rNAkBH z74rY!&CmB$OtE5SZll|3kCSy7#XiJkTP4JMu;!?hu6EL}${+L>pf8B)jOw8H!mQ=0 zkt+$FD*y#d<8K~Ju^+qAD+^v|t_5ixalq%~R>ajJCGTgw#bxaN?DUe73F6I6?C_(u zG%Q*-!XL&apOya76hkkKNvAIgp0YegNBNH60g_KPS6e_Zi6F%$N_@~^QF{c?JKZP_JIsC95_)2Ld7j&8QM z+)%A6hn>`89WBTB9RA0I=2U8~iX1C(X?)<}ch0`kzW7f3DBr&PRW|F2gFAG@o)uv+ zv??z8L0s{7!85v4nS-;@u6gU!5SM$a;7bVt3*xHN$kFrZkN{4U853#X@_^^Fff$(j zXG~o&gQNEc*sQ=G;-QLNZqFEjP|vK)P+;c2`D>SOBYF{51k$ zy58cEZYczE)2)S4 z2a7_(WznLOVx&VCB^)mtm9VN$M>h|>*70_;E#d1~E@wy`{`(N#WO9@TDuJASdGW`O z@dks!GKllO%E8?&-VGCqq#pfBrAqwB>%uqK?u%M+KgocD%BIE@L7kc4d(?6sGb#-b zGsw@-mCk-Cw;@7S3Mk@KYXKSSqtWdpYnQ>N7+Dou?;)CU>jgF^hOVTT@wT^u3;^QX zge8v)m!1IF9cxThkgmES$hP=3I;Uj9%^y(T3acbt1qal|2}8?1<-?h@JE*NPzokz~ zMS?3T6=$UlylA@iddlKqtCT06w`BeJQ3(koqWLC>Lsg0I-$*>bc_T1(!PWihR_HFw%AiI6><1`m0_E8w6wREfj*i>xJQmMCuxaY^E4 z{2joI)AXj6;@1b#qra1)$w-i;U08g-$Mo}Q@;j>I6X8{O!9n68?dbH6Ae#T6)?VxERO0+5)+*jDS{szyt_ZEq5JgByOPqSbJejT>FT$^~EQr*59 zU;VZAo;6RjQiZ;;^p1G%fBTp1wT*&}jXdd| zvWlFvbj=Y}KEeyVXSsjw!>`Ll*zAzPv28I`JmTpMHH>K-U1`^Yn#5~={=0EixHjpv zTxi#Wh{JpkbDApuH{$`)Xk}Z&Afn<=$rRMi>mBN29=!|Qx;h4G+4wq0Gi@KIX@5GM z?Me-P{D=v9M_EL%UbzFaL@qv94*m1`K^1F+92BP9k{yy+R+@FmQ7$Eg574D0;T`-D z&(L?JPb5vJx~@8cleYM1;eI$Go{qgr}JWsi3x(W-== z6Sn{q2&Tcrt1Tfxj(Hk05#sOM;99FjE-)YQyU!zkVo^`F_x%UF)>KzR-@Ie94st}& zUC*{BkKgg2RLHKQt9jML0aTDxaGG?d)!faTOM8A0nOL=4#8h00K4&T@W-dl;tn&mv z+#zA~Q|V^54k7*g4SGG-L@l9FC4U2Jrvvgu;K?(o?!w(e`OU#!3cST?;cKT(TvJQ( z`(~Pz#Ci5#J2r037)`d4){hz;VuJCWb$0mgLg=o#;}(u?)&cHctW(7_i~cxV^r68e z5Ta~r4l>BZx%QufBi6iMaK;R*vBjNfEN}xdd$%;uvwMJSsHfN-{^vNiZhmDB)&{0w z9=%2l<^wT7SEuF;f%Uv%RP}Kxv3#}KX&*l*b&gl4f}R=nf3N*^EOkhbD?V@U_#Y>V z<=|Ntb0c#YcAv%`Fu~|EW9hLu-czBgZ;9rfXy~jEpQ2evHKrma4buTD`Eii*Ylx>< z<1k6(XYsl0r2Z(vsZL&0NkrRCkdJu0SBf=B-;i`AH1uOuN9*)JX>}#g zqN95_4t@ke2-l!opdPY9bKpY+5!EQO#J}$#FGs03x9Zj6Yol2)cdvl)8V?3!%pX zFei18rZ}sviEBp;OcvZ&GsB z7q>2!hf#x-jLPW-P5mADqv}{dP}Ap%fuTyz5C8;C;&o@I>Q3OFc-&_EM1cSc_fys| zEJoXDHxS2cDyH{66&m1N&j&4C49zR>$23!}7RRI1Q!DWPn)1o^@Ue@Dz%P_if4KX? zQhI72$v^eqFO1*{(h7$nNjzx665}sw21**5H!lAaCWwz-XxHLz8Nmsf$=9k$wBYnk z+Emt@KL|AwBA|i!(NR*Cl$kyOBi;hC(i0a_$pR>(w(o_l89T-FgZ=HLj=?gg)U~_! zj7o!?2Zwruor{0Y+1Q~+DI*T-sw~66S-p6!eZxfC23;(lJsAmeF$i@TuV=sTS z#`TDB9hEc(71vL8d}&CGBs@1AWsE{W0@FtWOK>IQMk&cMgXScd9R$^BK>=7CA0vD| z%|=23$xJ0B!~5RJW~^ohfKagwdO}|z;WqK>!m^StsAE^HAIZ4O6Wid#4$9H@0+#%1 z$IMcgDQ17k6i0E)QZ93>(*&8+#YU|Ver8S>O2P@j7VScHN7b{4f4Y4Hb>wCjc2yr~ z-{CDCZb_lv;5=L~f#R=_u2PYUNJCXlGAjGOp<5Q5gYGr7;RQ8U!^tzhs$5yA-i6f0NZ+7i5>2Puy(kexD^Y`>{jUMg3NoZ@TXiK{V$cZKw-(Z~xB zwWOQ?aBefv+MM9VHpOL|3u-2j?pMME#A@IQ_VHuAJ|h_TO-iIv(+fI3NGRkn`pWny zK#=C#%b<-E#cFQQN)p_oqZ@RMSLR-TU``Xiq(f4LNfisNtrW;e#6GSjsNy(%K5ii! zP4p`-VtG%BX{oV(o0gEW!3l+!=r_kW>1L&GF9aRCT914F^WTe4%L*+~Ac9hd_TGjJ z;AQcj$w#;oc1TLgN~m)ku%duLEUUV?K?9CV9JtWVheG(4G58zpmbHtZt*v4o%}I$o zI;vLsxOc`cR8fD%g|!z0JN~_CfNI#BfxE?#Km5I)|6a?Mo(jg*#P2R$kwy+$!!F$K zU^FiBacMxLoa|o2Am1q70RMp`#7`&m*?Y7y?Q>JKBdy0}*6iUJ%alVd(t3lq7)2Pd z1~uLq`Q}hnB9Kf*V$k+C+x0l*9&2)J1_)-yT5afW2xgJ`SScspHXC%oqWlTea>|xT z%Q`i9JY07)dEq?p1l_arW-yq*3#`n^Y_gw4BDNL|PVpAMllnoYVK>|k{ z!>B~gSeFz;4|IAhq+al{+IfJ9G_!OM)aj$%NTyX1U0<o)qc(Q z;3H@>nP;e|$6FGa>yBOlCwqBLu@foRvjc^z4vG{nxl!ALs~1ax%{}9w0zWE2qwK(I zl6Yl8LEL0KGp70Y&leHZXny~KcAA=;6g%$&CJZl9It@>a%DrA2kSLRVgG@}AspLZQ zlPW2Q4g{~m2QoFwXV&&+Jz2|n$7BTJE+mdPSY&?@`yla#nRI?0s4Es%5R9v1&OVx+ zpz>cFq+3|-7S;Jo$3%k;;#P+WOYb&B3>kx*ZLB}Y^pcgG~ z5oPfO%t^%YUP?Arsn#)RZ*U>gFHt4Z#n=w{3$;K&n%tdI&rV56FOqK6qy(o@7u|d= z{(Aq{I0LD$B_>QLoKMtV zocMqk|Mz$u;b?VF$@@6qRWHV|_Ego}2a;hcZ@NT-ok1>}0@uFh$OVO3V&NlY2?Rmk zYhQ{F0znUo)9a&Brrq<8bQyJsOz9jskxR7=X30GY^D&>jZ{;7jNxY1_it_Y2kUGY4 z=n+uP6&DFnbXabBOyDbTr7}Pdc2YP%I=t`q z3i_&n%BRfk__<@5%B3I=!7BelY5TEB744ai@$NSFM{#$q)jXi{+igfD3vx>jj?)u- zG#F@R;i@dkp-EdZ9X6lB>(iQ7cIXe{?FX$`ALK_B6B{w8EPFw4@f=F1-A+i=Dv&Am zsHztyOMlbR?(_K8#!@t~nA0d$vkZh9p+?Yvw0Udf-%XRD`y9a)1qZu-U?kfj3l5=Sj?sSJp~Xs%&>&B3(1P(*#|`>03;itTujjujQ{UN=cz z93M_Y?2Gi{yJ^jQ+Ri=UTdApho*f{RqKrm{mMf{L7%S!|MQ~n-&0NFYvMF`DI+5R$ z@e(T$yI=R)mBoLMD^o}<9!S6MziGvC!iFKD6+DIB%x|fd?e#!;eqk<0jrHr_za)Pu z)`!(&`&Izz**~^t;|M2PO7EW57G=F0D-Admo`pC{FkxB%8l%$9(ySc^^ z*w4MyW=2B*wFukk5s^Pdq2e^exHxmxlQ&wfIM@ z^nGeDOX4BY3o8l3OqPqU9F3*X5Hj$B)7IP7c$e@np-u5fT9Ri7neZFs_l5NLQ2|)7 zxeuTaR8KthPFstok~Pr_ApEleF9&(Xn0FQAN3mpa7Do$9XKBH*S@`3R`|fg9hY6DG zw{*}5zQxgy5KBuu6PN_`V;XqGCK28#P6|J;Elb}NZK)Ds9_F?GUV5a;U#SwS8}svI zy@!89nN#p3FXXb^Gm%>@6av`e?=D>Ki8LWyMsm$4;qZjyi?xq#PsFFOQY=_;v)D@C zb;d%ZJT944rTD>T{aLO}XOl8q#&;vmy1Vr#4fAwONs$m2+`bzudZf8$X-RP9e=ji=I4Kj!C5cbspM zO^PQid+)2JgnCb=2S6UgU-r2WL1iiO{IF0$#%=l8BZ;G}vPosZY-L)6zWkEq^%yj; z2TwBEh>*r!LFd&-rmO1wRieRIDp=2HC|~w0FV%IV3wPPokS8j^OLpvCcg$aXnqgW4 zh%6LW&~xCl{7GSesR@Q9J>Ro9mEkwQl4->$9?Yivm0mPvvT=t91r$(fUCBeC6$L+H ztMpu$M>O7^;zGURITlCrd%E9yTj*QH`}(iyG#cD{!)5_sZP9-*;Uv#Rh4vC4c&2gB zA7d7j`LP#WqCnzBGG$y%h9XO0htQ(3-K3D?Sc1UZipjP=O>Q^VZoG#B#DHSVU6C+c z++AFvxZZ%xzpAgi*73<^usu3{A~LgL)7kdnIcaS^fk zyrG`fZg#k=8lHi6K5$hBGut<)Mdc6TH?EkX|NnQK5!;QmZy@dc4QN_w!D`Uap8@I3 zdycG;hBs-}DBoy{I&AM4*!$2e2oy(38=IR$J42y=emvb0@@Q}{adc@Clw8gzE&%wS+sl-pGJDE3$X5W6)fZ=$sqQot?hThvS{XR0Ob?fP}{jJ_oAH`s34TJE@F- z_;io_WVY!JP+$B-i4c=)mZ7Y)N#^WGN2EOrNEzRml5=bbb^v}0Sha~Uk*%?)FfWa^ zI*BWHA#^u`jN~7=)tYXL>>%%Go0JG=&X zKn?R=w)z`@WIaBF1BoVu&Wh2L#!L!dag>~lMbLHpl-=553A+H_MJ-}-vmH?VeZQ8g zHq`RYE#FjlxJ-}F?~yJAP*)B>@i;MtUyqib5$K5Jub~utp4~J`+lLkrSUUczGI(w2 z!QER~!6=uy@D!Svdbd&mtn&i2Ita2p{9`bRIQ2ViBZ%MID5cVPftCmHp>yrsSo5{E zamQQ6g`DJd#`b5!4hPB2DDWFvNycv4qBoU6WZK-G_N1yKHpm*8NR zNg!Z~-F}_Oj$c1VQ0?!hHG?=mF~BfB1U;P|O7~-a#G7=8la#swMO2(;F2_(A`i}*X zaCEU#^Q?4^CGv_{0EzY=aA1A|7Dn8l|HPbmZ5{=(-06(wkGCeZwxET5$;%F zOU#wgB&EF+d@QzCL)b>0Zaq+1SFprJyJ70zuz2jCz}XMuZx?0e#*wL!jin#n9{cse zk1*3+e5L-X-O=WD93Jqg3tF0oQXixcs za1JzEqbRUbjEK?4e%22DYN?-*@m}jHMv@AapMb!v1cQ~DDW$w!&x`JyNYlJgWXhF# zG%`<8K|j%+c+X3v@fRdO*ZNn)e(=nZC?=bFYh*nz2IJ?A|MT4fl4t|tGED=Ad{(QS z)x!Z;IB18JL(!+t0fQf$z<WrD|~#2pY> z>M7@VCRP+VZn4asoSL?oY;Dh{7x40V=z=bFLXy(XBUaUiH0&2?ARoUrm%A#>9G_O| z{b;Kk3Rc7MF`O7r-nNx7fK`<5#9Aqh2G$GNE6_C-$GtFkCSY=rd7LCNmOOpn1vN^E z?%qQ4-Xe}iOo&k~PRrh7%M?M%t+ zU3si}!H-LN&-tDo1(T&si)P9>fS7%zCrxC38s`ioTJ*`b6wIIc?!m){WjOu$(gG|# zq{sq^Z|3{`q0-iLCud>EdGoKjNh2`HWKvGfgCZ4fche{+q?Gk(Au3V;Co0y!Umq#<(RbpTh=eeq3MvaebW z4?qTvaxu}mlT1czJOPrZ8*4m4GoycKEu#`i1uRUQp1)nTPizj>H9dzcGL)l_0k;TG zH#1z;YR>aE@J(?jCG7o0RRrb~7^Simy#WnMK@r9-CJJ}t zxGJ1;-{lXjLlz~IVdXFFov?P)sWD*EqGSv-eG zU-4buMvwZuB{>#}3kpFirb3@6Ylay-YziVgon-J2yrZ5;U%CGdj^u%5<=N@4wKx2B z@Ch~?S3_A6&RvrjC^Ic!K1}4%Rz`2nPM>I>eDa=&4YSIzDd`bdcd8!n@%0F{zX4vY z2wNcs2LLMV{T#`onHG2sV)qpJW2-&t7g`!{Qbv;^Sq^DhW7MB+N9rv_OH1#WGM?h1 z+|o#g_#=zM7xHQV3JwvJmQ(j8xIMZ{WluP5 z_O*p2F_39Z=C49@)02wBX@Z3hVbSmTv1&oMo53K;hf|T>ZwHmhh;6M*HRS*ZbU4>e zK4e+Y1toMBG1kW?cSAgVC85f$5mAe&h0OtaxF|;+dU(tDoP|0^9^OT`Z+c>S3)4l! zI#z%yPS+1NdM2)}$XjN2cO`J zG{}W})@Q%s!J@nfv!9fb($sI6^qF*epFqdE|K1|ZlX3HHB%S@8cN1av5YAWHnimUk z$UjheR`KwbDP3vl-ffxuiaK@c2%#vo+ay*V16*xeVD4Kqh(~i!magOML51y5jySTt zdz6nZ^AYX4rQcn|_{ z%u%_%nFymY{=dmh{2%IFtN)>iZJfS7ek~Sw>#cStj*2t2bj150XWv{qCCP!t<%6GO ziI4n&N(Evdm4ot@?$G&R!xe~kqR^A4dl5@HPMJJ@`q4rLe8J3A*(q5;&->iaATH^`*?_*WU7hf>ypM-(aer#N z6zso*cj(vV!OQ8Z?3HR15tzYR7m{-P+`z3}W7s1K8WXfWBc%__dUN}{S z*G`6NBx%l63=}1-KrL@ype2 zqhwePIamHO5165hf(^-GF**2REm`?U27UaM>}R>Tel=b!0Y2Kg@8mPv$Eh)N=aOXsdVy<;4WqltI4)Au5Yx9A=l5f4?c;{;R>rfqQr+-ix2z| zM^yHAF`EVCyZzTXKH~rMKM#oMqm>{3qy79#Dqa4^A%mG}>4BKXu~TbRsC?f+2FGaf@_W&IZVfSz7n3-5Lg};75i_GoT{fx$`)Ri2 z#0Daynnp$m(Q3`-RIxWZF9cCt;XCwx3W^b;CL}T{5fZ#2HAsvlDG?;AH=k=?iQnVn zK3^;ZdfLo0IYVP{oNX(7{o32sF>C`7TI)A&?GTf)M*uJe(itm^LLW1QoaZ9M33xlW zAmqL1c=%-!D<;3Xl|)sA0Pe+0Fzg5r-L~z|0F+UJ_jucfqey~98`mS zCG_LRJ_e(XcgED3(uj$%(FI>s+nG5jNW=qTFGD*!_jUQOJrjS@Rh*4HbeQ0)t4(@K z3m3LZGYZf5jtTbtV-FDfS0@@Y!!cAm4DJNt@-}W!y_9-Qxci^^Zx{*FjC0Z{fZPtR zmFzvt7XOSf1mFA1)B7tpTE_#5jkbqDQ(`mZ8fQV3oQ&JSbPA~6Ht#bN4jS2|VW{+K z*quZrl(p~UrD(xz<^k|YbD9Bmv1FVfHek`W47h(5Ta?AIz1T`8{&GuqQ&9aPKF3Mz z31tc{0>i0uU%PpaFIyz&GPSzeCJNt_NqUH=jtF}bgxvPS_E~G+j7?y#DZ?q&vMlU; z#(zdo*kaQ-6hBgJ=gd49K5B%qoq}QsLdNH@@C&q{T&|ik+{<>!HB7LG8Ao;S>-im$ zXREt^gUedHp!pb+7|t!o?Ix%`0}nLM%Qsl45Rxxw4W3yVYXg29X29o4txXjQ@Mp0T z1-K0bNnSglb%RB?azp{LS53Z~U^>`M3{-$4q z{iMcW{)IdlK#TY^*k@KYl9n1f5b9csm5H|~QXNY|{UE|KJ4Q($g4E*MR9Jx|_z-q+ zl}GnY4+hju7p3D1P1=u3#)PaQ4W5q2VKAV<{BD$M4Nj|&kQcyJCXuQ#VPh9$&`jO= zWOC}wI0`Cs?;L3nYnEw?z9>?LGgcd;YVh0vx2Q*u(ADH43i#uKY_!|n2xdc}i;N@- zB-Oxq2g?awqY^HOuFn;}=qVrD^Psz2d3W<^{*P-hGCW>P2pRnsRubR2*j};2iJW{wA1MEz24bN-}mQZunn+Y43yX0s25LAbI^05!`Y-07I)Og^2sSIO~PVy+n=W z$M&#S3MI%~uCUpVulr&wkoF7zlK)}(z6jlpiq#9x9T)hsPZz86^sZMOdKtDT*I5Qu zfjiEt8LwHJ0hIpXrS_s%fY)?X#2U^9x;pK^j#Z_mxYN#u7}`LE;P~v6>9ZX?;a-vg z5agjnX#Wa+OS^_Y`xO#DtxvK-4JqN{1qF8htQZU-5mW>0AH!E0Cn+07# zWcrEAZhi=yIPmjW41W5Xs(UFxA*?^b94_fQM@3H1lQ3_s`NhuJ48eh27thD!t(mG& z*{HU%j%;26!JmC*6$e#yBy`eSp~-T7A_6aV+-aREYba`$(-7-aK5tqXnrCmz3a3`m z*PgXLyKTprEYd+Z>`{%9_Xm|%d`GlR`HAGz9WRV72T7Byb`&&0&gJR{*CoD!5yBMl zU{cJb8Uouzd(;|88Gi9dS^L4Fcx_74t*VpYJF+qLGUkr50fIu9{4P?LUvlkA{7K!% zG6LNbsSY}(^QNtBlQ;~=d1bGRSRH#ESBUNG^@8*R-CRHjgofgMfcF1EH4)PHDLLMiwF}9mO%0hs=cedvoJG#eaa1e6%<_u-Hob;P&v(<9sk^ z8B!hZS4@Iq%@nC+@SDY25eWpHi}a-rQTl}_+)^Po5_qrrZoo zgK|ys5X)U5SA0$fUni*zpfw^OHldL7YuHmq%a!w#x1};EGaezlXjn4jx>~n z6nyTb-sX7H7goDT?$c4m%2k+|U>anZ6su;gVz@e?G8#cvlVhW8nB$dz0ryB_djU!z zD?j4M?-XX+fI}9ul^`V*(;ArNc(Q_Q2brFEp}iH{Hv`-fW)VNbjlET#x%(QvI1L@M z5R{Wi)J~a_B{oV)lj?bEDp8LduvPV&zE5b}$$*i;@t8y(4>&tsz8T{@2^s zXQ$?gEv^gF&s#ehF<_!lnoGi1uo64~(K=Qha2d7sh>?#OKm1Sd;ku5`?-U=F1|3_r zhp6lh3 z;8)wPdgaW8MyIAPfg`=gIr@v&iXwr%@Fs%oIE8{V zf+&b%;zfWd*f*GE@o^mHz04ZNCI6Z9S4sQcesCyUrX;cVW=VM()1ke3JdTStN`q3C zCk*SO9;(Ais6y7d>Ee3NSZ-EBc+W>eLrA43N$ zR-+NrK{=V*DcqrW#w8gb*Ggvqhj_1^`!EUwl;+btzTvft^l192sGy)=5CC(rWE2LMa)T8Z-%5-MckVwGLcpn1$4 zNY#pdJS_Cgz$11F%y}Shb#3{+k~PU|68+BLFoCpEOx9!tWN{qMA^Gn5GV0d7$N_*dqr+ zlh!O30_UK1J4m6XOP_iFP;Y;wrck|$WOI0*n;jWRS4gGz2+vY`!33_u!Y`L`Bw16D zk{lrQNcB4Q(QQtJfS(=|$p1Y3?y{O&ZFtbcB_4sbAY9rjQ~(xyF%I~8?KP(y248Ds zAJz$g#0V+k4gR@=h40U<<6PKYQE^Q%uTi_RqHHpIv?Pe>PSfCYsOSswpPfF|o{IMo zpvts0iJUqO?maqrJ{uoIdX>ovLG84Dsg;(1=nq7=(~^`K|BAV3pG@S#+G%a5 z(@ZYv04vn?CF;4|GKd8#!i=P%n0NY3OBg3!M2yCVmn7Jtp#w`cXe z3NjU58_gII(~3!`^3zN^dGB2cq+)Ab3swmr|D>FsTzm+(UyN2vg9&L++X=~S@Gc5$ zs{=x^-WI4(Innm;56`|6;)+QfK3rXt_=#JB$>@!Pj}&U3C#xrfsRCEAiJsfw*H1V5 zTn`T4EW+*Y1u#AwMAfADvMRyful3l=kXu^9NM+{Z`Z6fKDMxEXq#Biv^hPg1RRvUM z1gWvRy3_CP5NpEZ8t>;B4w-?uJz=E9J0zA>3>WNL1f1_4mv*%pU6yW#h6+KcX+H)G zNB8+GakrQaBQYH;(u*u=^CQwm)a=6)d?fHsFK60Jm=%t?7j;-2JJqtNNNB4&c0m&* z_Rg^-Of&_8xR~)Or4Oy-BXSdJ2VNQ9dn*^W10>!F5Ow-DU!wno-^_U+jDy*+94`4= zl-sX3P-^M*NZ5hWz5OcLp}qx`I$WqSCm2-*$hseo!-bj#|3k6p^-@!S7ew$3Aw4Zqo~%%9Hbfb#oG_6X(E0m7$56$+gcxjY|s2 zMWPm%^L)vhe7C)C?gn?kGbNJc*Q|*N2w(d*;nT+U@G+sI;m+=_EdkB|I#=ON&F9Jp zlF|g~L|L$b-WDi5lm+S_Yd}?1>#Wor656l=iqDD>(5nhN%f>F%tndf%gHkXj-f}cW zDNq1c9uOstUgnTt4Y~7`e6PBvvob2-j37TJF6S|sqMvA;Lp3?67ItCW1U~zkr2~vFBNKB!j)+23jD<{JyVTx}Z<3>)IiI9A9w9Q~PfQRi zlCdwp#N6nS{+C{!hBD3W)6 zRp)AMRBtQXvLYOnDUNrEqKoy&hAqPXSF zH`+I1jlFSZPS4?j$;9&V<`kR@vWZvS059Z%rA1HZgoBBPDPO~gKBrFCyZ9z?SLx0a zLV`Q*nsAhJO;7yZZR}-@mGlQbhKo~ZtKtC-F-cHh04joD7{o16`-4YH?cS!SiB5Ya z4$i%r+{b}FOVe~OdQGv6+#u?c9$E28>cFz*2}zX+D7Zo0rQI)KqD{#%i5l-wJ^}4N z3Q(|O81%~EZ5C#!I^R;UM%QUUassXhFO!J`N~bz^&6Q_|x|X$5d=T|0D+=S~_L3iT zv(lTRp~ zl9Hw}0D)-GD6@(YlHdcvl6E94><+oI5&%A88_Q{aHOUw44V90}3-)l3D)gXIk-RaO z6VR}v)yS4BO$>XMn~`Jnm(9+~`tHezd5u4^HS5z2%N^M8=s}-@V|PdmKBV<)G>@xJ zhq1K7*ZRP!cmwoCuhdLa0?=)AmtVz8DKan%Y)*l|<1f#&594~t-YGKTO;xIbh#6DI z&N-4hyBsBJ(M96$kl!Dia*ZK^@mTJ#Q#X51+5LW`jPN`XXO!7*ZrNzJZpJ3NaweZE z?%4ouV6PR_QKl98C0?w$qim@|M4`YvdwAL?!P=JmhGdP>l4B3JHzl-N>*IrVe8ESF z{el2RtfW66YwM?Rp}QrVnvx7xrD|-&lMdQ-Ql?}bh=3v^aO<7v0SEc4Tiyl;BoPCWoo(nZ$UJ~^&&9MaB)Sqe| z$j@x7lG?8bK1c%oeBuU>TV0;w8FYXu#sV4SDVJr{T(q!JgT)`4LVctYg2~r{GU(C~ z8P}KkI)5FEy>|9hGXSdqp)gmy*X0D6exU#FzY-W+^$h;)Ked1BBP{=JtC3Z)Z-}S> zjUR*vx!!J>s8}2+Q+sFDoJ+%)k5aHFySFw%R0eijR0Vi@ziVgi&KDFMaD5pT?dTIa z%rQ}9-q~ltB-G6m2W@wML0H3ZH@3k2(paQB`YN$BUa6c+ZhYOAQ;act;K6v+Rb+Qu z9n!yTr_9F6iY#|<8+`2~F{}<(mT9Jlq5&wfIY{MGc0oIw7N@JHC#TjQ&{T8!6tSUb zQmhc&R1!hAC*Yz4ZgOEm6D&tiiDec%yjw5T1jm;tS-~g6K!kV|-3E)_cnC~0P1A@x zXvwp2q5RUP zO(zhdEc$pCUIeP#2ru&JFkOr(!R54HzET@T&llUELn|l`WFM#9gu9B};4}JYY2w5x z)SPy5F9U7Z#sw3hpd_xwd6JrXPD5UeRdgW^LVXtEe$MMyn+fsJ>Am<9A}yW_KML@% z3Q;GP=W}|ete{CY+N0Wx$OY3s;(b`Or2~pjQVZ>fde55xRbS?1Q8nJcRWByXl zCSS2FN5uAVSA0uvq5^4E6}J=u^N9FK6rCPc8O_~XmKu7`J=|1tWZt3hA!&<~vU@m8 zEaJ6s%gOZo@RNAW?T(GapFUEaO0W_WmbXSL9nz#@iq!~j%~l*o4nT&1nL_Q&i+)@8 z1UEwQB)Am`b%_+TFd%v+`rix*>3ZR++CAr(uIFgp-7I_rQ6*C=w(DkLpXjKAuM=cKSxKNZ{IReaAIfos`IdHY9F5%#ytKkR(=B0 z6#8<{e7J{W@G_)$e09htDxW4O7tf(lOqKKm`RH&K41^ZNDF~KV*1K}B;^&UpF={*R z|HJ=NmH&Tu_PND`x)G4R0e3t8*%RxmpeBce&3dM7yj2EEO)`w2PZ|cQx7;^nF@DCX zyrDuPDF3$m4Jl`_ULVyy7UvGIzq92=}Ozr-k!nNL!5q5KHC^6vaWQlv{S5m(LK- z+`tS=tTGG#%xy}Tg0G7o7~LI>4-Rwo61#Yw|4P>pO)8>grI}wf&)wfOOWXtA@=r=& zErcZ!L$EmbQqqQDDrG@Lzoi{WiHpy`lfA`8Y*L(l6x?&cxZ(RBv}fZxa+w>AG`w{Z z#net9BmDyZk(~ZOhm<+@5Kc$U=daYLzw$vK<*Ev&h?_)!8W_vXR7HQ zs6}+K5x1s7j5DkFR=eu8-R0h4suftYr`fX#Q&y568#4%VuKl)&!FT|KbZnqQHjCn{ z+(8bi=&f;@TyiH= z%D=AnLiO8Sl%?9FzgliZ#u@d+<~9QP$+`n>QY-~s4-=&qNEM?*4Hc;K3qnYq2Fodz z>DBr}PTipcbkEHs>k)4%K5}+S4nh1NZYf!xAbPapR1jyDTT-hdlSH(G2;~l~wOeVn z6H@#REmtZ;SSDLKkrl8W^=p-&+G}WEa)hx?$q%4ssXPY&dxB<=maYt1#+{Cwt|aB9$*ycgY%3Tk$h zU}EM;4*;!MdBFu>B!hgqxPRxZ9F0NxVzDmO17B<70%wE-gy;al6v^4k5C9rlyhXFy z%-j-mb6;rIn*%xJ9tPdv&U$yXZ0}h@t2Cu1j8}6tD_oeO8ZmczL(HO@Z!950)h(+7 zaLVSVST`y@JicwC2+xTYMdb0wAqKhCa0H}_f5b~YIw|M1CxX-xR24R{=iO9CZc7uF zgS$e6H_qkaTkYb1&mVwVy%e%^ttB90LZrrDr;SaSK8qsRYdeJY{g8FHd;~~^0*Xfx z^7mdgB)a`nlvHW0393m&1wQ9R1$+f#(%;5kS2K1CTp%z69Q-Z;aErG<7Q7|E3*|g7 z>9);o-3Fsv$6xVdWYZ*NG2IUF^>$`u*&C|Das2qfdj_czSho0sNmKP*Jp59-@n*bG zvop;UNVOR@4?ejq1V(q&Kq1Ho*)>6Z;h@SAr!yK|!Vl&dF#c$+Iwom@1Jp(IKKD3_ zRMQB)at;wv{zrd$OU{&kJV1NrkA#&qbvIV0{BeQJlVpq&RIRi1__f->L{qwaJBgC& zdU`_Lp)-&-?j31&P$G1;& zV_-`Ta?`5g!qO=0$ay=`siwdNcVNQct;XWvFcK=>Eg@ji9vOa7q4j8HNdi}Q03d7` z3+*x^9w*atn$fh+wWs1HKHWZ6Ujk}zT9N@M5!(c}`vd}?Z-*xPVycOJ&#d`*J2)I< z%VPhFphrdyH5aVja?fA;J4vuX51yX@bz{hcKJ;E zia)tCJ{a9aP}jaN8@siD0KdK{C@c+-iD_Rd1w_eRmYBg&)!K)+f|!MXIRljLy-+Nk z-cOGiA!vdU3m4Aolxxp+*j1d+A$H$#CYv;zTREutY>i$Jl026v^8wr(pU{>`F@y{N zD;NJyCcjMl!_xyZ98RD>wqB=&XMN17pgWfXF zcVgE&{1l(0s`fV(W=(0EVb+9Pox{S)+D~55@wM4}zeEKl$L|nR)`orXVthhi@v>eT zoCM#;nNAO7=N#(C7sVh-5~l!WI7cwcCZml@KSx!wBbsI zh&T1&ZB?Ikc6L*Gp^DGv=Xs?Oj#77b;sh@q;(rL^n&PJi|M))_--_i4`;k4?*_L8T zOBn~%nX$bf*mQ3X1DwzGkHtzm&g-lcm`Zo`dzY_Pvvcuf4gXN8hiz_x$hdb;AO)6d z-MWUO#qC+AG|}4}@OFje_xfO&V$vwz^r`qVZeltqmxTJRw`FfE=y9qiE)RF&eyV%9 zbhWO5Dli!})~WK0%&+_r+((G#)%KPt2Bjy5hBhp0-FJr=PJB!_2ZV^XikA-ki|Ezq z+{vHc(&~!1Q?cBw*tZVX=W!L=>SRLsQ~X(VXkIzOcXFJg>3a*C@jc65{97{Sg2lzx zS=T83jXKzgO|RHGy4)^#Cu-?PJS!Mc*vbsMV`*bia}4pZvr ziyy)t!2zp-TA&3%sRQK@QJEEeJd`w)*tzf9P6`z6KZlfIG`#E#?ns+mi)Sdx7q?uwcml{K3+dC+ zS3nLl-BEnN$|cNXpX}CyAcj~gl{D{-FPc#lP7~ZJ=t9d?8(xnj<@ANcpD z;}61+r#w*OeMb>j73VN^S1<~zR0x28%?9$S&iizn0t`yZF^voD?y-RM(1u)EESA20 z+)fO&MOf4-7llDNr+UBiNSvL#ytrdM#o=FU04|>FW_(90ENzil{yd`&I4IQXno&uw zAVj5NV|m)`Zs`z3-T5NgZ9?8^pECutoKh1+z+6U%AQf>6vTlM;2sgx61z27(#wAvl zT3wQ6nw=Kjp_SY;uZHgEPvcqGB=Y)v!=y#eG7-Asl4mTavm?t#mP9<61f3oPuQohB~}G{mg`J;L%a{6kgqAALux_AY(w|zl1QRgzehTJC6z%>fPDK;(u#}2E zVvGT-cs#Ai6~W(CfD>)>O0Z|pK8-SVYF?Q*KN|=tq3A6X$I|e(AZ1f5LHNoJq&OaPG>cCC&F3}(il?} zS*G4}2jFBcG_`bL3?fpw!&f-tcu+5~D&w1kt3}#$L`#T+?p(%6ftY#BO_`9nbtWCt zb&4AtAMe1y7hW)m4k0(!{2VEoLPydf@axj$o3jCNdPtI|+uL6C%Juf8xc-`u8*8OB ztBy}OMNKXn$6GYeJyl&~d|Q|yQ=RxY-TJInmGm%z0APN64WQPc367BlSV-uo_`6J# zxPD~TWc`=xN&m)DH#9v}Q>Q`=lm-_Xf#(h5L1h+MN`-C3o-ZEWYVQS=Hrqys1>}ho zq%$&)7()14|8$&6^L2I&12!RkoFAM_zBAsdF(1jjAPutshZ`O| zgKGw3y6qYPv33*>=$%61JyfO^>p+r$MEj|4+{)fwPsR6(V3wt|kL)Kxc|N|57GItf zt;+as$9pdxg5?eMuR_EQmyaU&_!QxSgM!HJo**NsNMWR36v_u0w@xs!0NN{(DDNix+3Dy@^gr>`#dwhHXX%m(RQHA`K8D;hk zx7llyhKZjrdI=ZAmyvU{A9*2uyos7%F>xt3+NB_lDw7IFy@i;#XC31yU&XA%w*m&0 zw**Bfj;~^FQFBtL35FmY3pCd4g3gF0SZIM@Ax5=sn=YC#PJ@8hi{6J!494U~@-~NQ z+w9j0<{fp<(&~zWfoP=<&k@m|A@CIxQNdIqVx%u#Oft$(BhnwA#EUgvleBh2Wch|* z!v5}0_A%^3HDx?3g zKiMzF?JVYf&^~s{r>sY-01a*{O9aXf?W!fAF>M}pG9}o-;Pc==;k?ccKOJF{eWjKdEFBij5Ta7c+i|cT`&fHAS4(ay z|Db+Kh5YAT;8yhp5E;9w;thz}J6=q)9u8!_c)n*oY3Qz z#@@-4V2vi?b&oC_* z5v13sGmgKBo@dp=A@_j}LEOdDJCD1c{7Ku(jGK~itMNH?>R8{t8nj|Y#&&E-;&#R; zK7UjZlRWzys&|iDgn=VRp!X}7WX6P)V)Y{^@ul!2H;CGX93~fr-T~zl#jH`do^Y7& z#%Nbn=>`3PASR?8Jcpk3I0Cg6J&Aptj@teBw~YDN<5gKVM5DcmK8gghHa;!WBtDHD zyN6ky_d!7;S#DYMCOfWH*`J*VJ?a1sVVL`cL8u?>Mg68QRT{n3!8{u zWa%k-Jd3I>w@t70itMjs2;3pIQsHw87=cWU*eo4N%CLEv;rS;tMFs)C#`SdkeXD(G z%Wq|*8~QCC_%`jj{I|~GRa_#*WOm1=1#h?J;$EvFB@UOm%H!1aH*CiMo~nblP_QQ$ zP^Hk2bYDC}OnP3 zAaE*%VkjDk9d>+PHY@2E5JyYp+oi0d5|w+|Qn>)9I4sd36|0cnKCRP>lnwo)Nq-{! zT;?Tfazm^i7c_+g_Y2p_D^y_4!a1Ye1&%PNAts#_N|8e=mF@#hfZoM;{(e^1m{{U9 zzX1mj9PnorC067YlA!7`7{u<-fjXb}fNCYe3CLx06|(@rewq?JUPJ|PVf>+J!%tOy zr+{YAT~$h={MEP6Rvo4hLZR2V`?Wh*KzEAEyhB$x?sQr_SfO4407*>4;*ZS~zL2CS z`>rL`%FBAaU~qTuSZdRXhV(x97!&dy_9Jg++i^`W5aRrm+>U?KIx0~leTq^s>NMae zL&E4vl`q8l@Kh?>(6le@i&ZRVgtC`=jpiWv;{Zu78CSunTA@n*+Obxk^NlCOTg0Jvd zIl{YRVJer5G9nTMmz?Q!^8tjaj=*V0Som@(t}`iq>Ks)lBg7m0Oncf-Q&a^!zJIZT z!Uoo1PEkz)USYkuD_|i^?jiJR5?^1ok(8?PAHnfB^Z4km+rr@OUn>ol&j2{~_ z7A~jllfjwS+iILWH?1`A)#W}sFRZb$K7Zic=(Z)LuC}Xp{15z-!uHD-qKbVNXV18%LMMc%mWw~)|LerXQVOA#ix-|_*f2>!sM@H^;1WS$9MmL z(~9@JVTQ!;M$iPyb^PQ!v)OOAw|y-*!Q!xMs@relYgw~q;4 zWlgutiqw;GlpHYGbkXRlWr!yZ2tIdkB9d5;-6aV`%aU@_h|=?d;06vK!@8Xo`{6{A z=DYC$u2$#EFG@Si(%7GM zMob^NQ8d#qL)he?_ZGpvB?Eb%Do|K5mblhdx8v9z{RQ4)te1nAIpAJP;0%3%H5lt! zd`~9GOJ$ANXi1N2^#vCXpJ`9WVFRj^LsMuyB<4>4mtdSl!}-XS(d^0pym%OI$A+i} zEL7@onKlUO;&=bWieZB-&8G9I3+nf3WJAr$YD0KAu>3Byh_Wtec~FlsDb|&wjNw)E*G&DEK48vE_?xYJ$pfx z+}DioMXlOD$GM5MGl8$0U#ft>YI6)u@l()ur^uU{!xwOZwMjttP?p}C2F~zA@=!tjQ>7~7)orDXr{+fNEdj|7wdG41i|w13*G?1Q<16Q z)tbWDegsaWVN^qNt?&4+Li7GKE(?%qyn~PcPqZgi{$Isgm2Ud?lDK>Pk982NFkS#C z;+3fOrA7-cw@<|Tuh(YtJBsj1-33#?OOv!4xsKH>(SdxzRMUuPc|37M0>#ApDWPb$ ziv@8L55lg<I_W?RZK{Fy1SBbOoC$WPBNB*b}Plh;dK|=7XgY(A@pTlMI=aVTMGxD9F%Z0RlEw; z_*_)yyua_etEQY7zWxP02{@DVPQN&AJLXD+83HbrPhd;RIY@r-5GYfj0TI4kj>uG3 z^<_+Wi)ZxAO&twKM=FWHvx+;A{g66C_u?qyN-OOROAXa{5A#&x>=b!T>GpJc@Ls%E zZ4Vf!9jpU9dn>-v(G(VDaIIrOpBksPvbb^vE@9}12fB*1$LBA9ve|hq@Vd@6-a>pyey%(;+xwkQ2|Y#i7=o*+Gw*RQvrDzh2pYI#@Eb!wwj^6_MffreNJ2q|&X zSFQ)!g+%qQs8s78={ zUgJS(?3ZfgbtDz4#Y^h7{0Q;F3o6{|H9-F<1n$FHCZ({F&xgz`5~3Ijy+e78i_nlA zx}{yKx{!_7z&5^5SqttyOlw`+wXd04l1H&hN+JBEJ%u(^Fdh4)!`lZxi5t@kbu3_M zQ33kjf80?}&%B}9Z8z;63Kc+P1;RgWy#8+B}FWTkkDbdf!iohixzkr z1;h%9+ojW*CWz+I`yc$G1QyitNl~95pKzVtjO?~7LHdUKVSTwhVH%R)Py$AQmbk#M zJz7s)U;QkSVV?Ni8|=+O`eBM%#k`_JdQdohGo?8Ew= zXbACeFOhfysEb(Qi!Zkq;zY03R!o#}6w3r)L`v}$h65(FVWu{6hh~IG3>)TkP8UP7 zqe?3AVQbywWlAxHEkSU`PWJ)CW3w#50J{3cGfVeJshod~eR0wZq^l<64xvQgm0lVj zQAa7B8iy?7;kkIH;zYz;kE-DTsq%HIaa@X0^=PL<!KaM@vDa)KbkICh59G6q&MP9nB^Fb1NzgVSY z9L+UDpEyUavGyb@6A|_G_MF$s$xpyfc_&+K(|=veWsY~D82ZcOXMn$$C1)iU2SkzT zVm|Zw69Ob~yAyT|)p0CizVQU9#D_4dUt zwHrPKLf<%4GQbMVgF;>u8>%Q{aw>Vrm))m!C{$uL$DO3umU=hsW}BJb?M$Gn zjy*A{)FQM>MEyM3Pq?Gz{{$k$UlKBLl)jm&WYe70W>?LI!lV+kDE;*nx-Q->AHX2? z7gU+f?^3QQFi^q~&y=QJkSwk7IwepAQ;NL zfwYJ6*4AnhqmWi1y_9M(T`85j-7^+J8zJ-iJFw6^XEftTgjjRH|*hEaNkY%oLDn8Gs36I!j?^hh zOq$vIzJ@aM7Y{kuTt0ngW4XOgHN*(QUOfC<`)qKCVm>|2mX}sXHY$avpiC8<^$E&| zQCLKIWn(eq{1=IS14MKwxd)r4!z~9gR||zb`h$BVX9_o-oEAV)@UPA?HcDdr`$UG~ zBAwD{BmUHuoo9BtCks6V}kSog3_KI^;OeHOpzCv&_H+QP6l^iL}qrEUb3qP z3m3y1CCfk>`ncScMGJ!@uUglxdCQn)@;&GEQZ7Jw%dz*>t~*mcZnn3AGl;_Uo&&M> zD`=1L?6dz7_HMkQ4{c}D!9uI;O2{vuMMa_$6ez0tx@sC@S!yU@hERANyWB6JRW)^4 zamVoORR~bWUi8$WEKGlAThOXvS>Xk?ch2$-PO);U&N?L>P-MD#w}@~2ykYWbFbbZJ zI(f&RL8!nH)%4ip-sGPtiKT(2!}VP6B0!WEelc#Cr1ejNEYh{d#do+(_lR0VLCO_7 zO=;j*h}ylNN{eKm7#F8Go+xLwW*!Ew8Y>FnS#^Y(8DanNP4d#{64B?OQ(pQbT<+4rm-n33}@!)dou_-(W5vLWT~6@lmB&lA-?$!@96m z=oA<(J|ggNnvav?-e=b*#!BI*;#VmY-J_|yCDcHiMq$Lx$njhZ>)-tslbcLbVM^j1 zSOkfmknhlgJw4U~Bg8|{tg|w5W6M>+L=T-ZCeuoA#f-YfEg5W*Aj%d>EshuOt0((> zysBEJh^8cDd;Ewxm9j~e2;P1OP{w1)>82@xZPt%0xcg2kxO2=qteFDpIAt`98j?(J zV*cB*-VqNY|En$kb zl&88=ZtI~gu(^yM-mNk6anDqYc8mTOBLkxF%du7WXj1ReTlM4oQf&8nyHNtB9RnS^ z@7{wFTtUsv+ASHm#9j7hg_tT`V~|Q9E-c2jrS1Hq8oOrV3$c?Di_s+{Z|M~foB_a- zU?Z+X399Z0AJ!d|cz-}+|Eu>MIr2=Rez<)mc5=gQDT%}H9>il*sRg;l1@gT@sNnFN z9(cY-f@xS%EYJL+7@{GRY_1^jnJ=yRwTi(^sH zSMy+I`HY@%D5-SW+lZuMyse@;6;AFFCSI-ZyUUiTS1WGh5EMCas7<&OC+82D7DS~} z>`^^(GcnjxLibE_S*#_`(dRhyA<;obkW~M}OZg{?&h-;RwLs@mGP`^WF~i zRMi5I?>0?a>*C@4c9iUn7|x3U+E&Npqa)|NX#8gak;zWv#44`$VWBcV_dfKoP{gI6 zPBlz-jW_rOTtnGI2qRw=;F=71SQ1%O9ZDaYgHUM|2}`n0hQ8UMm-rt{@0mbvw7k^{ zm3M&ZAE}U#YP_phfViIMu=pF% zn)Jeelodb}CNGjFct-vDsWOhQk#tL$L?)tJQ8hOX^WC3Nd>QLe1mq}w(Cxh(4;Jg- zB3Xh^I7@1d3py#;A9dxw=a;Wsi+^ifaoW(8js^I-zPfbjK0nG-fvj!;N#gI)FYh2~ z;%CWt&N;^tUgR`LtWSH+-1gE1f zDIPp3pKN+BEVoFm>HpWPJ&LKr1!Kn?2^$t?hOEBYR(;iI)+5GsE&(%lbXrPA70AZr z_2aO4B+FBnKxG1;{(G;tAyVxtj%0?PNGTOhySI18{_$y@XmsL~3bK>L)SR_T5cN)6?SKd}Pj-fIc*H{OZfbd3s#bE67tM~*1l$XOvPzG1UBqeCr zMYSVAntk%i*cyGfCV4VFMo@>%l#&N6^RbImf7Zw0SG z4JLg6j6|I(#OIyTV*6D2lw@k=NI;rkYSoScyaUd@>_eZ9NKEI&5kA6?>>VJK><#|@ zOnWNMV*$%^&_hD7_JM+vfwG&E1eF-vY{XWaMwk4)@5rm=>@t}?f)jUBmD z%&1jOs-WyYkp`Rmc$`5uiGlmE_UG$VDyZb%9%>!>YPo08I@C!WJh+n4=AAXw@%g3k z@7-P3sHFXVDQ=0!QaPLU*~L)l+a$Z~R+x9gJ`jv@@7*S)e~> z244(1_P)e&39VB7AHpH-Gkj43+vf_=mpN2OM7475&PvD%2HJSQ6RK=IY_1c(UA-6F&tg zsi6>1K`dm^BlI-=PSQd7gZ7&LI861fAq+HB53G|~2}&Y6NVmk^bsH$M%e>aQTn8@( z&aC}ZSB|htYN{OUH8S8%NlkXWpqgSu`qF@!f)qn#ofT^g3JTz+kHylaJsoP^yvwyf zCRshG1yGk#nc$?B)%^w3vm>)r+wDu6Ye8eBKL5qr+CjHxz*tW1-69VJ{Q*5pzBj&& zl%6Dow06Uig%@3?4h!mMwo&N8HWj3 z;!}5fbf?qVTjM+?HMkRAtM46X1tmdulZW1PyYwL+?WbX9*l7e^lLqdPVy)>ZWZe=3 znrP`+k?dP?L@-=3d4PcyOo+oOd|7&H0H%3w$xV+quBAoor%|)su3d|B(yj{gl6QC6 z;t(f2G%t-#l+&G-hy#p(kr#`a9@FdrjxB`S>y?T&f`xmFcgw%dSq^ld_N~MZJvo6L z;=u}GA}G?UrGqnN2*+8!`O#XxRTf#tTF zK7#zTOw~T3N;A<$nBzyq0$1S$!Q&+PqrYkwKMz_!e{2A6NFC|CiLxkd!=hT(fv@YFWxVMuM71&oL{Lji2+QOJA&MdbNsTV?L zed)a65{H6-zyaaP?3zxR=>B|{t@1;c7^+~b@vA`bEg^^`2YaV4AJ zn!LCuXZ0*BU7yfTkWvV1DL-=@?MRVu7!Yw}LTiF&y*njH}y3_;t~mwb7* zkn|ZbQM@_6)=A10^sDbpuuhd8GKciyth4kopQiTRM|IPW;;Q5P`mMxQ_DcA@N825f zl+pZ3EcaD>5FfMgC7eloWEB#6nP*%zI=`E*3D=NH+>awcP24M(hwCs2r9h>FB!<^u zF?hWldF5j~K4Xenw@*!qMN5}B*I~AWpRhMYtOh{{0q@=+Y8D@sVQ6T8V#0-Xiep$3 z0_}SKc|UdsgD`KK8tld># zYU`*{s#c*veE0}W;@q5I^s!q^WL8bY zbRa0F8R{ioMekg1Np%>-D3!f8%?`=}{5an}Q2`4N_KV+bXFl{5l+5v-Tnh`U@qJoM zvUVQ`tx`PkLOSi&e!>m}9U{6x8VJ#yl~^w>-7n3(t7~Q|-OdD-^Dpl?a?#-W_Y+j|m`*hx6;blG6L^(W}Xcf3ZgTA1{U2j4wv7* zA>VPECP*3HJW)raCHaT5YN#=V^sKqC8ip0SFNH$;g6Z=>OI_KjB$6I`F7+@swlX?P z$C+MSRg}Y%3_1#a+-z$%<0upp$KDK*>fd>-sV{;p(#%kiz&JDh==O8OL@Ew?z|6Sj z@s7_v(LNmi-oQW%mHzei6;oK7D4mRZ64=D!M{MCDJ+ol2+*TPT(~}ZK7@Zy|)GCWN zHl--Q8@}`0W!8GvE>z#84{=CQS|DFz*GqX6rr}J=y@RoJLE=74cQqh9e(D&twDNSC z(^PjSh=L*))edI4o3~tnS++mz8ot=@H%S8l8H`CE7 z*;%Hyc`%O6tMtn!m-nl<1<6c2cCLqGgD{ znmWYO>wPo5<=zlP&EShiK};MPfQvQf;dtEjx{)Jv(+qCWohSvyM~G*orsK~K+Vf97 z=_|88bZc`ggcb^{@=op&#Y(B7U$%~{yj>5e^OBq(?}9*?3Z{^5e$fGO5c;ZAA<$!j zeljRM44@!C@Me{Qkw%PdqcIUvu@8k3w*2MZ^(%9|YB|8Gydv2iYSm>cTS^ocuErcl z8Y)rfZ;g8DNS#;Wt!g0h3xDA3c&J5m)*4MsL7T|owM~Jq*A4_b|&XCJJxdMwX?(> zvTr^~Tz0-kpG)5h#D`;`ueZy7r{Z*u`pR0bl)?O*E`@zGR-IWv{yv5cMP2@R|4^wj zUo-x$WRO>f=iYtbbX;h133zmJcI4;;SMi9WlReKt&EpTxFaCoMHT=gL>>Ui_t*9B~ z^HMiT!h`PwpZ4;+oVoEubLsgZu^VKJ`6s5{o0fZmaqX5Y0Hwi*-kKNaVM4e&6z^S| zA;O0)*O5Ye6zO1U7E;*K3o!LMtrYSQ_Web7#-HVKm6Y$HE|-i$D?H zn8xTKyn}|cMj}#pI8IZV`M6j8N3OH#KCRMdoP@~lH#Sa%npt+2n&uY3lMo&XPpI#A zW!vZ|Rus+-YPcfRfK4fG?Pc_t!L^wm$t6DEJ2P!%I1~XP3Yp{EgSYmV4sj~I{c{H^ zv_en^1Le%%!|dGHh%Wws5(^Ed43oJ%Z@Za~=zD35iul&B2O9#k27f-@rYdbYfAKRi zQQfu6A@$x)2fJ z<-Yg$+nEB(wHMH%Z(n@7eemu0uq>D&26oA;b$>W+3EDYmL`)UHmx;)G2KJ55Znw)p zZpvQo1$j994uy^95Fu@6ndZIHZp2zOdYIX>euH}P!n}7=kMtIV@p0i>&JOqEv52HX z-rh?!K1pSbaPqNdienDTVXHZR%MejXsX6RH*_<&qGa zTAGJRTnAegU|Br+;^8~g1_#HgQ5Dh>Z69Ro-OMp*{Q1v*wtY5Mi4;CaOBdkId3r35 zen5Ak*f}&pT&Ggy;`f>X|CtEWEK$C_a$fX~zu!QJg6fP@MX;szqR^m5*u{g>jC1my zj*gB?jkcypq;yFFeUQOOq65X_>uDELX;Zdey6CcA*8I>7iktxU2Mww5Mb3geFAXVr z6+h#Ov1H<7suLUL!3AlFj3B|{Cta{EJ9BbPj}0(VGb!KY zZl-#fJ8}|>SvkzIFbOuedpW*LF~(`S(C@XUtbfmOW|5XleLA<=(z9E=Yis!9ZlpRt9UoE6v-{Dd~-|C*6^r}EeG zo=F1xk0X>#J1-CD?%9r=v^94+qnN*SznO>0R6<4Se*9tjd-3>txH9wq&3w*q7J-O6_SiC_uA~ z+ExzDf?@&mfO^*~t+1>aSP7U<2F*LXB@GC~CC;jcP(Ao+yA_{Af-kkl>s4tXdML4s zS5J7g58;q{qw{K#tzdl2YD#c#aMFuQIw!${97L|fbFL{_rz%DBlWOmP$*KG)K1CQt z*|R%E`E4l~Qv?>Ro`n!`h?@X#wsGaud_qq(87LROjU#v(7}%4h1qj;`8Xuf=LVxjT z>F!S^MexTVD?7isyMlJPr((N7*f`VzI3PdI7`R9v=58ADGE;lX_L`qy_g!2XsG5rEA z>#+k&csiNTH;>)Yzdg%FPVROJqg(f|3(-p z8P{=VKGHrM?1EOSt`5c?g`_+?K6=*^4Ux}9ilw39g(FB#GP1`jGfKHZMr9|@i-X2= z4_XRW7mL9RIx2xMs%#OA8Hl8iD@h=@GZ%FU6-xBuw$MkhAaKBYe8lImuO_EieW8A9 z(elU=Q^g4#ffUr^Y`XV7DN10oWdXp`Pqrsx`D9N%_^vPN77L6&VP4h74!r~7eUw%A z9^ymr<2(fqLQjpAtIf8%C_sx1VsT>{W|T|l5BIavVtx~g!B_!;5R{;Xs~ZS$1b^}; z?N4G2DCSvGP;e+r{P-ST3rntrr+_HVT%BvLaS`a4m@T=5Q5*GuSpPXtHnOa5H5z<=LCu0}H^Y$T6W_e5s|e zwu&8}0z_%})u4&+@9e3LbiiR%??2AnAHpqy5GjvVHru*S4YLI7AGEF^;T~!0w21xn zjLu;V;Lvx208YGRU4U5y=Yz!+WG?OGrWD%}qw3EzO?Qxod-Fey_6Xm4tijjs>jm~d z=5Mij*~BG!)yD^_i-L zlYSEpNXdd(28u14QR0RW0WB3AT+kCQpgVpumY^X>v9bs4%R%O$*^0?`SyWE_ui#{Di-Nta{%hk>k}=M{ z1g^rxmlCGHtUo%ujeNwxIx3IGdwMZehbH<&EE^bBl{D?`XKmXFSVm^`xY}3*-cQ_> zt@rEkv2bD4Hy$pI$9IM8?X!w>XYhniVnw)~MZJxW-|3N+Ak=UTcE?t*6`#ZiWrg4<>dgHTrq) z@)@FOUex#y!?dG^V?Sz^1@0iNCSoyD)rBIf{9+pO3f{`Q%PFqVQrJ(SYXLek`0R4k z!>SD7uj-E*TUSPqz`cKz#rv~3fp^+T0mxu>aqJkUeBVprSAD1uHjqWy0w$1_mb&Fo z7MPv~TrCH=;<_&*JLxCQ<6<##l>mTvYb(8A!^b2q98$zDuoZqSgW(sk`RCimXtmAzoIiREkix9APq=w~rSe)& z*ATHv&+}OjU_sapSHh1>bAhFb596S^mt}JG=!L&47ZFz^)`c%r9nJ-uzjA`w=uT$! zMslcqOx$n0A5w?&3OrID*lZF>e>6&~xC=5~A z5~JFHZx0QI1g`<(meD)=!sD^U*sprCX6G4M60d|zP%qPI3CH$e4R%{HCjqpNJOMV{08F55H(0MZ;94AEzqKk%DLXhp^;ZyC& z1T{VQ3eBecC&Kv|Fxg^umIE0V59JW0VXBUCr^}Tp7y+!t&%(eMSup&!BOPi*r?7Pi zChp4(0c%ruMwSd0H?FD1at8Gj&3!43QAmj;mxvhV&&a~X-!J};cJZx?Kfm~MQ&}|kn@tm5JY>cG-s{@dHMo87;dJ3H{yiry-X&{H z)QNShzvWRoLM5LM)-GY}KeWiiXe!;#1_3a0F30x(pnqh^qnc`2xledH1%G}fpH;FOS zyvc`1nv~9Un?&?*ka}Py9{2px7)PPYW*Q5d%hGr5<{J5-bdMy(wU}dMvV|F63?cbc zuf`+P82N|3bn+i=pUDdI?~r7O0bQ_IBxXaFW)C$`3XE~ToidZ4m}It_)C(uF6xzbW zs)HGhi6PNxBi?`wnU0(s_6jCHAX*B0c!BC*yQlnbfnJDiWS*++ixj{wL3S2(@YeIq-Pm+V!@z7C)$oQwc5HY^lUnT$Bec6d95h z@H#50MtD!u9F%f1%c`A)PWFL=i=!a|pnm^qPw+>q($Xnwy#u$Vu1Ma8e#J(kHV(Z@ zygn^3Lc3-8x7urQk>mtD$^@-d|)VYB&f_SFDj*9t-6*(|;{-2f%dkJ3up&S=56{Qz^TyqPpe4LZ zY`ONs?7fwI9$y3veG~Z?5}`NR%4`~jDn4JeX-QYo>&ewPfNan#JkG7bA#V!d@io=Q*kdm7EmQTbR<7i5cLwW2{Lt0~S#Pwp1D z41s_QhJR?t-~Ovs>5D%1}f){$M=dcGtHnDGFFhm#kZlpi8r&xfE ztGg{@s9iM^<1{ONtbP1BpSe;`>1;9jWLzvQZJxM^KRhPamkX8kSETrqO8Rct*s$E4 zF6IX=)eU^%Ora*9ITwYbN^JkpU$l!K#~T*|q2fov0s<&|D@y0bPqc4?@8dDjl*NDB z!JRvF=w}nrbUa_A7%OSf$!uatJ#f58+*tu;EwEP0QGunqy4J3Fe^=#or*$8l@>1EY zFg)BJ);!~-t}y?i62gBlm#UD8#O`#j#8tU@!>GZfA?qs~-l$w!h^c5dQ+}(YM13pc zYx@+KnrffQC7`{9vZ6)clN04yPElgUoj;>t zZ_oO;qG>I0dN)wT-@vY(Wgi;lEG_R@5>trc+76XLlryB>qa~Hf1u}TlGZA+(Hq_I) zl>8*#8Bz9bh~JO4Tk&AjDcx)vaCP&jL|XCh@S_Nt6YdI8s#In$E#i>)kH5PvJz|C& z4!g(wg0baR{lxTLC0Z#7pY)e+@M1gx83^+#PK{CSd%1#KMKX5R(K`{Gj5v3yd18Ir zZKGfe$(|{7TJiiR+^h0(&>pi>pW7f&7aj3plah9$c`4gM=EGpc;@12;Vh~t;z8Au_ zxs+8_459Y;v&?tF*S*8uy+vKW$TSBrkc5L-pv-a{0C1t=%Pl(c#2*ww0+X!F=Yd273^Z2OqD6l1~K z33gh$>0a*w;PglE^V(Ej2|8`hNyHr`cfN@O2L-uEY6Dk(A&oI8k3nrk*kb>YNqHFfZTB( z-943)<9V0vv(f9KyOgmSS?Oo72Xtl;WZS5{)YBAO$l@(Fy9)QyHW1B^oMpfYMJ&W) zmrdBbfe81I9G2A*132nDVR+$5Z2T~2qm@U9xYq`-bcg^G zeh;Y`51hP%V|!9g;gGpUle{?OKTq3E89I(I0yJMZ=c)Dw*(~EL?b+Bk!gm_)pxFbO z{JTC!HgzI3RkJHZmfhlbqh(%(JX{=@${5}auB6E?XlMUKv^g~anVP#9pWj$%e-uxU zNt%7H~yG!i~B6XIRw8tv=m3fbD6cSO#IYr~M?Y(%pS`!u~ zLNo4`Cn*kTTM5y63W}}zNk|`_T^_Ivl%WWb`M6hY0)R-1i_6y$%6U^)T7^XuKq)36!dY8yT!%+{CUYYfdY=Tfl6 z^!1cnTO_o7$FU2Alr7P9*r$iN5JWybg$qv7@{vYRez@ksf!c9fP$6+*^s>0a_(DZJ zthlfp0=JX8W(w3V$n?&FN-5;zs1#Rji;Qwjbj#5wk;mSu0*RI8hC)}|g?Ky&O*UKl z={@ZUjkKoB1Wd0{3}UKg>xV&SVrQt1_^I=GA}oow#~->ETt2piGWVTDpPtkV8j_3J zpyGsX6A&};q3nqPh->m!gI8YsWc!p4Q&ob(fdzluXji=RD+tV;QNexAnJut;K)S)7 zIqGe4%Q>U=fup%%7JH%-z>8bcy)C*D0i?-93a|$!&|2U=#)=YBK?82TI;xs6jtI9B zk!Qf4lQ3XBYe)T(7Z*%$hFiviKR>{$l3DhM402B0~y}4hrN^^cO zWSkP==w$M4SIv&RyU~u4;S$@v+Gq_qAx4%>>9Tr)lAGspdWzk;A9?tk&XX_T@H zC&Z^|D))*H5YL;luMP-h8raVVrGE};ZCH@BSr$y?KyHBBp5XXjz@5v&?T?l6G{y-5>d*g1PJ{JBUs`r?4c%@y> z0jkO=r$4h!3d&J|Xr71_R-+i{JExhYq7!mu$HeDjX#*C@#FjCl?;Cyj?tB6?n%9ZvB(?tnV7Yj>E(0Jq{wC57Qu7N{VZuz6%On z`4g)Wgl45pyW zW|O@NVY`H!bpIoDMv_yt&1Ulhm7*=#2z+rsz_Bew*o;uZK=t=-l6)w+(J!`b^DYLRR*k@m zW7ewUyIb%K)^Kb>0Eslw1-C_Ir{F+tP_JkPfh+rdO%>v5rBNm^L~P;3U613qgep>AJSc!l z%zOCge_NUizSUOZQ|ORf5SE|I1SRBa6k9>r6g8M{u)NS+B@{ITy==qL!J3`IA06pp##mJgP~8mmODeWMV2%I6X<%-&L+8?kA`lx4uiO1l)V(-=VBV75a|B7m6Mb&2~U%1?;4|5l=tQiRyNfA8P-9+*=*K3pyq8KFO8;&NyWnaie7_%$+-~Pt83?hJSV8`J zX+lZRbva(q^Fa6fXF~nby7B0XhflQ9Way7E4AkkwpTA4m@$9Tv!t{?&B*W*1GY!hY zB&1kCasT3OZ^9x+WkRF!#ZXUd`((XvviErpA*2ybrZfP^Vj6CN zP{dQKr2wdwn(?^O3UkbL|71y;UkENJ53rZUpDqb;NWtX1uij0%hDA`m_58^=mWb8> zrtT)OU&M*_a*_l>9~iEoj{iWkf)1R(F48fy%^4duP10E z#BEw@#u%ByvZ%9R-djm~lAht-?c=M)b7e6+@R=y0Dr8k0;x_cZUCWJDyRA}cS?Kbu z2nhu6Qz+?|oX#LK%=nV&4K$2Tm(a%*a22z>aH#CPI>~I8IA#e*5Az5o)iqS996}*3 z_#eT=u|4LMIF5UDt*yQh=g`ANbbAi&+h$d#BU5-Tze#P4%%HUYAiW!;k*kGI>mM~{ zhlowe|8jS+MekwLKnY4)?in0JK?mO@aN4`e}|l}HU1 z0_#Qj$9{Ncy)>`*G9QhHJYF7&^!rL2ARTz0fBxhAPyFKI;a!Yl`h3P}@q(6f{BUv) zBgd<0T>XlVA2!462@cf=v=cvC&JW&-e*rL?UnPugW#ItDyU9Nhtu4-MPa6iCKHk?) zCXu?5b3U;)9PG9DH>Xg*EWNdo%_;QxBAno@5bu~+x=0#(iky7eQIUlJi(=6i4^>o~ zOTdZ!TC5nI|3rJjZ_tcZdkq&4zXtqC>_wkSGv{SYE>sabRtP6ixO=Ba&ZAx+G&*$X z=XzIb7O>ydDaT0||=qeX0Q0KME-i$qyl^)Ai z7$p;{nhsFAKf*o$593L??-k3qhF71s5M9_wab?lyX=#yWsFT}zB&G~;=M1@zu^_^4 zwQUh$Rx)heAc;G|B1=*irizSfvtY#Tu z(xWu)`=qMn`1;Vn5>^XN+-X^#-o45Uv)`fAEkzv8=)8=}!s_L+s>;u-i0wukW}f9V z5Ve+HLzOvCaR%ZxzC}7R{E(v^q>@idPGKlabwMssbA8np5DSbz6mx*g^<=r~$f>Jv zL&48rq^MeTDfyzhHI>HrTF%wqv%6dVT}+Lz)rO%wEgj>wVF)h+{RE{<=0#No>$`EyFt1*Xe{UmPHzMMdxwtG17{r?$Kt9JUU?;M37OwtD)alRc#zsK)(Ro+X*ainIC}IC&5pt`^}Lw( zxi{JWH<+;=<==^Shykk437l5}++4oEyhTxPS$b3}G|=bTM;u|}J`DX9p?qwr&$d@V zR$cVJ#!Vy=nm#6`ZJqeHPsXc;Z$i8Urx1g{LThKS67t$XW$+nY_jIa}n zQ5L_1`WQu4+^sLFsRm6?gRQy{r{9s;N+l%IB3y?+iKmRo_^s62fin)rRkbRPaQk=} z*MkG4DSd~f>jHD8ACm-#*Q_HW1(jw)kFzNp6m*K1N)ShV`}lL552oLME|0(W!tv9z zl2kmJ97@7&#P|WnFbP5z0YrnJ-8*r#FMhJUU`C{7SQ?YKNswN}P-$Dn((_X9*H>3Og zWdO30*clcA%JJO~e$({-f^h0t0Y}ztC{yF`4fx0s_a}5fuB0L=_V#+5yB6isTSM40 zS;xWWTkT%_^G)kY>}8H6cKYc5YVPqg1Q0(Ne^Z|NSs%ooN8H?LQ%w@mJc|R9D zPy92T0ogOvqVA1d>! z`B%-mGbl(e+=~RMFodg0Afn)hMVyQWBqZjf0bNU@G$S+>|H{rB$@(_mY2I&6@A+Mpij*gdzRZ2o6Xr-5>vEO(GnS)Fn;6Em#K)HfB024P& zlZ6##n{J5x9Z@_7QzCuF$wJDM{MgI*O#6)QBTh)ko+`xxqsKwgYUZK(%{+`Rk7!3) z0u4yq-HeM!H`JNsU*OufnGThgIA8HzG#4RGyCjP$OQ@(0A1|ovFS>u`Cj7FdKesC0RS)EFD z31cVUpy(LF?z3{R>v+0^y7FKjstq`nppQVa1PGrxNP$t-Wq&MP{``Lq`3ChQPW`t@ zOGc2P?aQfw63+#jWAt?h5$z_S|uba-!ZTwZX{HLrWnvKbEZu zbNzYNSX96!tS-CD1Qn|bC|&_7YxL#0aWnSSVYKnUZdT?z7yJO~O#f4c1+bQ|VtbK# zR~DwM0BErHdC98k%@q0wk{X-gC&iG2Z>TUK$wLvk$ zri9>3F{r~L=LOe8T$Dw1L&STXA56Jw@eoFO@uk?N%A-<=3r4K_Q6xmiUYkqV5s5?O zi&uD!fO;|uVMG*DN<{?OZqd18hPL#^L}n&gNeH`vo4lwu+q&tsh6nO)?|2j>qCb}} zC?}v*b(bSHO(+KWJuB&Pjjv|hEArU%wsw`Z%|`v=3ox@OLUfG?}YjH9xGK-7$gr za0eaRry%9`{R;3qmN4YI}ATPXjXr zZuJM~2!58%!Wxqr{iju;K&g3=4Tv zBm|%Ie0DhzJQ$5D@bh`wBzHzMXjZ^j=}7E+jaCn9)5*lF>^7 zN#rnV{9Z9@3Jgkf);-x1Hj3XQI#5oRBV4l0i2yI<6UN8yg7q}edjQL1@T}({Pcty2 z;;7kK&{tvF9ALuv)2S!^Bn#wz)wY}_W&k3^XSJi9`Hl&k4RJ%hXbObf_S!gsnt`HI z_jN(&@>48V@aJlYD=XP2&3h>h$6<_nx6%HsMTY(2rL58L0a%1j?o4dWiy*6l(|zwnB@5t7$HwH80_KB)?#pK`gQ&$EV1X#q?A;M7#K2oEay0k_V_I6)8f?5bJpia{4a#xr^HSJXyS%PqbG}PJy0F zBA*}}_sHjilO$WsG5bZWTIS7nTEQk$c?UW6R^u%Gos;q8(zK|Z#%JB^Q$M#x$lGI$xV);#kf|Bw(gmI@jA=&vq*-X8sx2}hF~ z9E##=6BQYga}1sWoI7=2Qm}h1SAz@dgr|l@<{ie=MvKHdyZbTCTh}f7Lyhu|cl1uZ z8IFL_=z%{<*?tLDHOb`N2-siqN0?d1GS8MCx!N}047y3Bufz`%wSlk$=!{BX|3#q= z?aJa?LUt7VI6*=hq5b6tKlLVwkDa7O9{Bzr{8wT1tOB>%hHw5=Vh09|s2_9!e5v&p zuEML8EjsJnAB#VZ^@yCp>~_OF%H~bom)i?31lJ(!r+&2{1%vs4VA~HZV zc;3iOS`A1=R;om=c|H~kRuT?n5nDy@lm4qyC-x}CqI=&`AndB zBQQ-3o5-0xg>U~p4!76Xl~@ebEXB-Ngbm`u(_V*WLXQqf7}A?zF9{9CuV|C4cGF~* z*eha#hBFIa-qaYHo|d2;zv$Lrnr9ulnlY{|7g#)C;M*;)jGV4uNtC$70yHybDL}Xb z)~upE6$;IG`dGz`G_;MKYxG71@+_cge8lfG>dHH!!ilhG@t>kIq>p#Br~U`ERTnt; zo(%J2hT5A#7=_4sgC4uKC{XpGTyFO+k#V+Hl=KFQz?;Ib8-;;AaJ)Nsl`tIQ0O5fK&NSe$qv99(^HZaDOWgO4gYkACZ7CX^#+Me zyxpFQOGz0+&9Jz0+S4a!B)pf_18F;}s;!dR0kDMx21UwA3Z)2#7V_s7U^*zl(ka(NGIAPKU4zwsWj` zix#76tJ{9U$J)o@2SJ=lte!a@gtrbHs|U>^&n7JxuIi)&x^L@T7ghKcHCZs}j(6)c zNC-N~ejJy&$T_$gb*0t;$C}m6;o$txOwE+QMJ|WHjTwhd%@|>($3B)Z3B2HfYi%>= z|5|$@XO{Gc85+qrBKB2uFLnQnE|2wgAawB=R2SIhm_ezXQ9P7O#;PP^YdzdGSvBdD z4N^Go!A+uUUjC7af&V6#|6J{`!{`M?fl6)Qb*3f7K_bg9{?SK(*MlnVRnYMX5E>pE zOH3>KgCGfNa29#8TVN@;7CZ+%6h}ixJ>KJ zIyc=L<_odn=U-dKUvJBVjuXWpj&tdDZJvGPhQDKKf}R61o$(@OzitIL;Jl}etJI2d zS&D@9D-avxDQCq<*heB4`MbiG5k%7x4eFI`MYdAEQ<@rFqqVuC4Co#V3X3tq&AiVh zClp}vCQK=!^AyAz_L%!A(R8A$&TP3RZw z5+o`44O~n(-2ohg6P+?HFbVz%_b>vjYCP+Xpt8fB=8iL*w76Cv38 zk8}GT_Y7mL7vFCeKMArCTS-w*w4oldW*}JbzSO4nP- zA;~fB1TZvF6r}Fiyf}kZOee58!Q$%f)N_KddeY_xC9VH?c(pJlvgJpughqyV2~sz{ zF1J512Dd)cZha_T+>6_%$Ls&m2N36rX9=x??jY-mY9Adx-o$#aod%#stgsh$947nh zk{57lK{Z$0D&I3&f&qv*DndtQUFiyR3DTC6xCsayqsnua4?G=rXbZC+TTz&6mKR>g z6Ci}?e=Eva5lv#Nqc{Ge$$Ih7WVzYSN9E8r!PKymQC?)j&O*k1zGW;M4y>b~Xtg1C zHhQjMy9naa`!jY%3`Ua0ePZQ30AWb>pmU2NrOsS>9&mv5#~Q+Rnm9rt9y@2{7RldI z8CbmM5v_236I+G7f6A%mkKX~ha`AJY`JragaOdLoNYay%aL|!o5W&!IymVa1gp}u_ zJE*Y2Efynb{N&*_C-y_ZnTh&l`4hc>-Hp&*uV|D@+*_ka$ZZ^EX#vp`@iM8FxGb-= zKm2(78${lcWWN$e`jz%{&!6Pz5jPJ`&suAQJW+W5hJ~?%tDxsmWgD8-jX<=YWAn7qeUbjfA z70@J9tLLawIX5d?w#+sO>_9-7ps~O1k>+mIPu|wh34Kh6su^@1fe|dh;t-K6#iXmS zSDe_ICy>)k;@tcP_;iMK6sJtDS_APe9^Psnj+3CjVJ7Eg`4%Y(l#YnYUV_pW@<5Qc z*jqo447p%!5Y#A{I#vl%rleVR=rV$RXhVi9-};T~)`{~)H-vH-)wb%N%}B7v-uQk! z=8nZFffTGR3>!x|iL_vP1Sv34kJ7Z4zcrisVRBY*APY*M`#?<{PW>qX5yeJ91u52X z7MgIQJPf7RgB-UAoJ4!Zu4wdF@t(w1OToa-0_^6E$kwmU6a{s6($}LS^0}#}{-|Kc4{;}J4@+!p z%nW=cKa{%P(fq_4?JWrbJh*ilcHs>{x!nKehq+sTy`{5}DC&QeBFbvPV83&C0)9YqqPXYlGS~C>^ zqrMc>*tmfgyAeOp9s-O_=;8E7|ek5qOS1>;Aq&ks95HAswxBF{3EPzA|oKBgX&eky%!xPB%fm9ZZV z+qe&?{Jr@;KSvEYY7%&kIO{UN+aVlULMbqOCgNaf*{NQna((F|;n`l4K3Nh73if#8 zXgrLPS!qvrbPiS{-Ykow!c)&d;d>Z0c4MdGc=kWVzDZI(mT@-KF=XMu^*62XfE$?g>Oi{?NTnc+AGN+wNs*kV=$FexY{ED-Ge3cd{j*|b=+ zq6{*s_I6Zsma?mY1QnRg6A#*JmR6KfD?TX9ae1nE7j+g-MO@J8z}6H*RoA7xobjMW zY#o9g1fg0Yzukpz-jgn;omWx&4I;(fU=P0?S^Re=Nab0^AMI**h8+T7PJ`f&zS%B* z6hG+6DT;(HIZa8*yw2z2V;~8t`+Ix0^Sf6bLHTT~3TtnkP%Y*Y#_wri>Yy?3;wkO+iU1|`N`S3;F)9#1 zPWSp2i&bD)*vt3eNb}O}kSO#eRgPQ!r#bXC8e+#!>1a)b1qv8I$bD?Q%q1%bRBq07 zm<PMM8Rg{+V?6g$@Z9=p`*M5|^hlOiVsuVR2NYZprZm}Z zSGx5qC+PtA!ZKL$ZE2aoBF&QO{d`p1l^%stZGxA!KA(-SDH1bFe=$seTpY6m{8?B7 zX>G5I&6DXbm;h8>*OTaz;KI=f+964*>92pE5C24cI2T^FRsl>(IS8gR)wFdpC~n;l zOpNEiuqA4%Mq4iy63D_$M&<6$%jQ8b5?rVrF*jO$BxN1=jf-}jhSzPQ9t{6e2*n?U zX^EZj{oo5!{v_Sw$?3b?J@esoU+tAN+`+7hwPG5}A;ZY@^SAKFl;ST!Z(nJY-xSLe zbCSSY4^ah?G$bhgE$7$|+`w1iU_HNROZkY}c2ij=*wDVipo*fyQV{zJvUeG6xq=Qz zOF;RHQMRxIOCl&A)k{eyf@2BxQOIc}lUZ0+IWFdM#ccMHQr}3jRJPB9d1w)A+Ct@QJUHOTv<(Y@b_Q)VY{zFLgmLY+A}X{3g}_DclvXcO@tS za!zwjlue-&2IC#~OT;IZUmLX26v8DV4{9`F9)g4B^wkRv!Tc{$ANEYy5cgAIuk;A!QxYX<6}G#{5!5CzL!D+u`FcwiTH*w03(We9~9C3&*~l`tkw4%vBZ3`O;6 zGiyvt1IZLz^XUB#XsY|G_)!vw_XPu}_0A8xU;(k+!tP0pwe%l{xO;q4W?;eQ9BR7i zVYwpL;Ip$502`)=IOH^2w8TtaS=6&3@~qnIf$q{m}zkAA?e zoF7=(BOD?vImBLrReMw|3Asiofe9%?7q*nU7L-$%;jb^g7lcP+6x*7CM_g4FmSAi} zP!k@9P?hqEN1@tzLH47fTcNp+i7tn}Rz4#2rzzoRr7XwUl3@ zb}CRWh@`AXx$b>T3#d%r6091vF)M_Y^hN3)rDOG(_Stk1zjeW&A^o?-KZ4LbVwSrV z+>=~F(3;TA&%gi=|IhDQ0C4g5V!hhU>%J$~74$=t1*wEP1I1MUetcq9>^1^Or&5@H z#J0eW$EO8zacRl5QA9TO(1XTz<7I;mss+7Z9n=R@!Zp64sDd4aNgwbNf0HJimg!=P z(bM)51YI&*{B!MAyc!ROjUE^AINj;qI&b#b?W;D;E!$P)IL)RELQ7;U2mJ0?9WST) z1}p-X?PC5;?mA+)un4;uQ<#NX5hC72jxj3(p}Uukk8=Fit`${2RZAB+V#h_1;$uzx z8B8LDh%l$8>5WfDEfED@pzz(IK?Cl47%#5b(DUUJ!9WvI#lW$fE3`-n;OtUO&UMZpONaK)k`aX@@pn6P=7aBk;eU+syLjkz!=>CXHChQ; zp%Rg2nbo_8M9ws8_v=%%-CS!cmwmRqVV`mlahSSm#h2?ab&ks~dV{N-c8*Z2*`2Z1 z(x<`FOK^+F3#6)FC*L=~(|5{DHLeoOUopCZUxMzg^e2>OxE~H4C|ECz_{UCKbzKWh ziYSn)FJ)0u!~{~+<=anS9+`u@(!43vF$tYEJO<`d4S`>@-~DMk5!`W{H3f+ zROhDen~&)Dxv=?T@z6(-)@L+_{SWg>3eS%Z=~r#GOIz_>rUntiD=Y~|)>!BjZ2dZ5 zkn@m?tiMt=zG=u{NDoa}sOyC{JNhoT^<9pu2#>X!@x?dWtTc+~b%;7UBaux->#3}{)>0tN%Sa|%f?@QX70(H0=BzL&wCPci;ry5k zHAtDF18y^|;C`z~IJN?ss7JC*o)y(+;6EA1a=op`9tr9b{4ChE{VAG6hwWwrCrTw@ z>zub?np9AE`)XUeY3rK;)663-Aa!M%6c7kX&$7=WAx7+A` z{PcW#1~WOl(T(UrRjY{6azuktN*+g@SgCeLU>F!E<axKLaP(Z*i(c<6y-8c2Wc%Sjl;1I!k zMQU5aXT)7Ome?X?^u-+p%|P(BSkA&S{4_`nUcS*!4~pQrYGDuTC&9GcmBrB^sKZ%!h5$t5G%byA*345ur% z3Qv~IKCaVh)i9+zQ^^tln89(B#go1$;(=Qs#9x7ru;Y?Bz$zpBA*wqftW&Nt98T1o zraHe0Mx_dS@g?^ZPzmmjgIRi5)Vtq5M2JmGWEBnp-d_a~65`t=S%QAub}m6v(kFe+ zGuEvSp@a3&_|A_GPx*0JGjMFM`U*WaeDqu~B#tE`)qUe5U6tA>W=gi59!T$Y0tJ=T zegtV9dsDKWT+XbtP(I{-E>EaB#e30N?Ut%+VnEE_1bdv)05^%CUr_MT_ZWp{!*_Qw z8xr&J_e~o3YN#~Z)go_WS!>d7tqNF8<%aht$Yk%pTBHn`Qy z2K*BHJZSg4F91x2vkItP7d_23!pd+2{RQ(wd=*~-=f;wM80j2ukZ37`5!5(hlI(Ql z^_EZ@sTJKRUonD5J$zM{;!p*(VsfcHGBeWjwO zPx}0Zv;@{Ul_1x4dp-VnU;x7AlF6?qTH#N2vc!%gcV78_hnWU@QGwzN}0O!2#zO212&wAF{uf>W8$wstdHL$Yn#>KqGDW1>m~6FD3(vh6{(sPg@Gj@x+|wa=mE{hEGvKsy6@RfTpg}C zEbGM(ea8Vi3Mk``>-HSEXUo3}yeJl5AV@|{XLL^M2wL>3U1dc?7GWrE)Jj$yGD2mm zxOZG$NShU-AoLwny2>v;YH1B6>2pn>aYT=cu2nFQ<)xgXPh{8NZR!C=1>RU{MYtg? z^S5}hDnrUm5<-tvqmfR56x4X%iV#CM5Z5>hmP^$JIsOr@d)~_?V}#w?%$-&VD+kK5 zi5kiXkfT}&$G|Zc;vniOEi?>F3}Ws$W+%wyp3C@kw6b7h3E;daP3Q-{w-t@bOk)bY z)=OnW_6TK6^nD4u^tfQldc zDmyQun|Db(gz5mN%XqY7$uGa))m3h)vx}|9LCHw5(5^cHlbc5pT)*v$ZT0324}9y8 z<2+x=uwZnXI*J#4jzKtu^Tom`>JZ%_X4#Q)|50heN$NJW63uJQxKT4rDX^s(F|d{q zF~LLeQ1)${El}tj>4~T(MM?JcLbUvvxJ$oR#=-@dK7b5d3Ex=bbM%*u$4-t&S=UCA zN%*cE4_7w`O;rBsy+8}Vmi7=w&z)atP+7(tb8FmIaNAOV#|$AqOgQ(=_M1Pq z42f|xE}7w>H$%~2X-uCBJh;m#U^9~+~<6H_2A_Dp*~@=#`_X5)oqM*P-Y+xs0=fH!v1BGNq~W zbhbkhAN376rM?J?NTG8qx9cI5#E><`{8UT8vR~5ak(a*VTq{y!2rPeM%{AJ5SNT1y zh~F;~eKpc5)ujZ|qK0xoDyTT{chEouyFhK7HE(hDerTT2d(UXPq2b?Xs}^uvhD;Ei zr$tgsyUWq(haR_z;z3vDk&lI=mPME1la?3{&4PhtsY73JIbya69-f|EsaNIf`h?Q{ zlIob8YFH?ni+(llOjw9lXiHszHBF01+{j}Uf-LrMfoykT*0jEV=l$#p)Nf^TCc5Xi z@}nxUm}CHOo*PPHxx`tZ^3l@p=~mk50~@iwiF-r&E^&QdH&c8umoKz(TRb}R$ap7? z$PwN*K9KlYp6Z5(k}RS(7JHn-xvWgJ|8$8-c;LjnM`iKV&IS69H~!vi52U!e>@htk zQmv591T}!?glG!cl$iJt+&50rp|=V1E_b$NZI?1A5}Tq9Xcb$I8N=yMCr1BOICwp|#1Kmq{ALosl^*A$QCltPjOBPQ7-ZyTp zOUXWrEdkro+ z`*)a$n1$uq$@)LM-u^?#pb}kkOk}m>oaJ7Wh)u6Ilf&sm;Yv6RNSf3O-RxpwNr^ve+coE%$D&*I+2zgJnF_%}{{ZZ8TJPmF?#I92kjbqTE9fgf_q z(CPQ%%hc(W2zlYGtl66hHrq4US_P}yh!R|Bva#O^u84shPjP%yyWll{m%AN`6(lTr zDgH>l3Y}As(leqp@SBxJ+1EH!XV>YndvU0HxBBAU;G60~0e?VyI4;FJxR?jUbM1i# zdr1b#UI?Wgl~)MPDuYzqg>4i|z3}j?4pUKTOi}b#GDESo)aO3>K}ZOu5oV70WNYfn z7MkW#ZBGm`#-%ecSt7%;IBtr3#{T3*3xa7LltV3M)P4+B zvEB6$DNz_W@}}5KV8E9;NQp9z$)NJRQ6hqinI8rkEF6w;uRsl9MEZ-uS!i#@XK~tk zyA75zmW)xrhjBt!OcHMxV3e^4U0R6GXA;Auj-C18!%eL`+!8)qGpZ;(2KR;bxpJ?_ zW^c0Y!N?mMXPM-c%kERO!4AcKX^|%zehbmj7Gx%zRu4uh7;<0@Pl+M$pZzjqh1l9l z+hd+NQPbX*Kmos)Xx-9;}ui+uED>YPj<%bOEANQtOt5^LIGpQCOugUp*`+Tg^!fGY> z>k5LhwX1{%ZbzoLq=?4KNDMyY%sP>FarAFR*vE;c(ZOF;(+M`fHI8M)%Z8*}u0{tT z>UZB-;e!P72CBL^P9Ymaz0@?aKA~wo)1LP_oF(Pc-=M{)^8v!qUW{es7_#nObfoe@ z^>euj;)VF^Aw%=62G?ZpLoGH2pr;VJ91fb2sE8sBAQ>!rQKZ@aeEZaM z&&Ae|Gh6WR`h}6xYXR1NqG+?y)c%P!t-S7Kur;SpOF`gZ9C>>H~ICL?y*t5^HBpBlPQd6+e~rmX9N* zu-`+)AK%Q5q>sCM@jVlaNXM|}D4M|@6rMMJB4&h?X4#td`0I~;RmP07t136|$v*H- zEeTXf83gz#s(e^da|-J>xf^8<6za3Ellx7EyJ`)z#FsaqB`DtOlNPXD$(k zM`gzJVmT{Y_7u630`25Jhqu}-uQyA?CX(~^dM}XTrA1%t;`w&IT#89jkmaasEH($C zq4fpe)|fbv5f^X+pTesFyV-6m+Tx>-QIXB@=PZ`p=JgW87!5L7cqXh%4qQON)()7r zXFlBgI=8t})TMXGP`x&1*l720q^YsgifZ>8Rv7_G**b+Ir#5KD}HSJ-u%;auE-{HhksS@(D|y zD$#1n8UMQ%8dXz`2Rq)@xU99&2I{cR_SB9PD_n(u8@RQnh6ztV zJN(}_Cm{*D$_4Mo&hng+B`8j^B@@Jf`jP@cTorf=bmJw33x`$LT;jQ8U5b9WB5v*! zMv!-Hm+B^ACHQ{q(V8kBEwW?e7imRhTpMu0Pq2gZGMaCSTB$` zhN@Tc5PwVc=O_YFBz6}>ANsGcvN&5=1g>;37`Gocz->Q_&qDNiDb})QJ>!RE7QS?8 zUQl||qy(K-uR8S5LL^i5__bK09`|P?*l~~AR2&UQMzYt7#C!#;;hOlz5q7<_6)XFM`MC;}e?Bcvf3{c7$8p1L8G)Sn$bLg?jEfU#*W22>}#d#@rNnRbH zN0DvSLBt^<&fnY8m8QmNG# zo1!)6DXAMevu`2o<5r4}NV>}2qKW;i1(M#e+c*fdp#f=0^1JazfKI0`QI5 z^h*gvxm%Wl+u=tC9zZ%f#Xl75i*!(_{StG6)0Qmo1+HTWnOZ`fi(ML<_xRgT|4+Xk zBUiQDH{%CDjae#CXnrrn+(l~559vgCJFY;CE1DYhBMcq>u%#4mO=F%b86R%6z_P@X zkk$fl2}hbF%BP5_oLk}FmEgDrht5`de4xhh&Q&Ai%2vnlAAi4{{(UTtvST>ysVRG( zg{`6ul{=Iszn77nYO_*}g9_K?F=*c1(Yg)H)_w6Q3Sz-gORf<*{(NkTAW2d`F-B$E za=;`%X8CrxXNBJ(q9!4~l`P3TIOk}MP2Zn;i7}kSPM&KYJRPq&efV~J%0m#vppY+n zOg8N)VQ(cr_DZ)JiK5ey##mDLo#;U~jVBP|1N2x*v=eLwb}tITSbr~%Do zQU#fXNzchnOq0Sc`m%b>Z_}@l!o?U4nuz1$l350~5l@AGGdZy?l*^Cdto|L6gm+`v zB0&RR(r1BK@wL~Sts=md4|aTZWv{>Ygi0JgepKSvV-k>}HhSQKi?N?ExW|9le)DJf z!(X-2kK?1kn#0qTc}04fX*-_1CXMeuVq$1E@1IvyLz0$| zP7jLWDhy05!a9bu=S$)+_ncFC;{nOQ$zjMKn>q8^bmaxi7ULm~eo#ytM*8gbtB_Ru zVR^;z;|?L-y3hAI9Un7M1o#Lz;dcqptLZ;(O9P9DH;Xw}J{;M&7&Z%4dNl0fX)7UA zz_Fo7fdD_3k6r)9qo1_rVqIhmY7bwU-j}jOR*myqbEaoE6I9wMF%0?K3{yH(FZ?`S zp`~~8L*aLa#luw~Rar_vJ8#4ZKy(kzcujuTa=VpIk-FE6iie}LTbcg&U{|+05DkQ+ z@^aFXHv~ zh1^vXXrX@4=2@(aqA`FnsF10Af$Wu3Ylm=*dz)E2>$x-yaL!@KBAyPGxcs=M)-C?) z61@lV%^-F|SYz`UoKu9-YskJJ^82w6J-L~)9&u}M@)$59aEgE*Vi~xtFkY^U0zQ27 zcX6<>=$|;Tc+{`@r9D`z=|wJ~?1%hbQHQ-ihNJ7RU6NY9fyZ1ZUAzJEbZ1-f_%_>clJAE;ChwRbk;#wZdhbi2;BV3}$*zR_Ew&qeuULZP*s^ml7RsO%7jET5SMZ?y zJyPb=;zQw=WGwMyX}zV2#92HZtngKPMKqYEQd0JPFLSwE=99y`+5XsLSrysJNoAJj zaL*3V0lt|?x1+2QBUI&J?2o)NfbY_?pDZC+5dQ9!QeLd8O7dR{nB$lWl&+9~8bPKG zz~d#_iX~DU3xpPP$KCy^WvaUsJU6xk;}IWxx}AGJUN_vI7$0Jw@o?gBXxIfrQ?nJK zy}6;%o~cM2E(ew(R~KrGS9d5`NsYjyc#ub{wD}eecbrevO#IYGTOFDQs9*R-JavKp zH2qinLQxl<@^NuFheoM$@4APsBq%NkUK!t5g0;3BOP~ZSME6)WQrTHd@mzbd4E~t* zLA=7lpj9%eJF^gPx3~SbBbf@v{scd$${OyAZ7+g&tc8f2FWFYOETwQU*O|2o9hl!V zFVf)p7d1&zabP_0oQh7px*QOEJIH(;*@@|1Y}eMj5Izz?2nPd*uc5wKHSNt9g*iizui^YHj0j9X*t0T&g)+8TBg+sVpq)6WAGqpn^+%kk)5yJDs!64LIJl zG;ADEs3E29Uv&sYMB8`<$N~y0f28ldluxW2&hw+6#t%3h8|7$|v2&I7Kv5 zbRfkJ?`0K?j*-S6%g-O_0vSj358c~eFn{szaN{P2xlX4KUvGC}OscslYYegw^iNB2 z97H2DM9n&C@#g)ud^<8}5C3gR&41}rnP;mXuyhKx9@;H<71%aPnk-5%1i{X0Mb5Hl zHoL`ost*(48sZV2KyOmO#`!T+7RtlTLx3RoN?o95lEcY+SqG1zO2=XXZC+f)3X87d z?AuwE`{xzj7b?&5oh`t0!-P+Nw&U8hyT$_QyC8lbsWQq{I1af*(-}Zri=>+JpLv2O zD%dH?^@(<_P^RBiPygaygT8<079$}4pN$hhyd!RS2&L*rW(gW{UR^^cby31H$=)w* z_=pMF7MRyPZxRBkK_+afr?GclIJ_X-?0Vw*G8Uzw;z+iF!U}(j_(%8|5*&l(#UG@Oag%?ZbQYtYfE$z;0{Q(R~Pp*YPY5%v!)stYTJqK$kQ7VBVgxc0{@#gJzD`O!* zrf;e=^Vgw#>0zG&Nz7S;zSW^SWxSv6;qZHn-d=pruB8jK)V><)tJ3SVsbpWSww3UX zkePp!%w+RcPDFa{uR^+rXR!ShuASUhvmcT8;sd|``|bCC-|J6jO*GjQT5QaV<;@z% zyX~$&ib8rxD7}{DwAG#N?aJTgF}}#hfNVfA|6E=khat52An?p zlXfT6D$Q^`%x%g+B%|?6qPP<9L3qHEwPZ0mj1rT!I9z8-sy;6S!1~sG9;NpP%+mYKtkQ0wZKw%`B zDE)?YDcRoV&ixWcLUwuhc=p7W|o%*VEhOR-S zOYO4v7EJ&E9P>PV_(FRzevp5W%?V)DNZ}-qEM%0{5nl9#HP@xfr$s~x3k0|vDk`LB z^Z&#+1OYrQ1PvOCB70j2ZR>iU;*4-#C|IP&4m!*eL*{R{H$9qdE>DR+tkk>FSKw0@ z&l1qATp~K1G@@g_V4ov*l>7vF5U!gWE*q+9^fEpx;7{xnu0@iRdI93Auf)otRtyvX z6M$~SV8Ku00pwY;sTQH*`Oo^as47kCS;&D(XDjO;52BsCHN}cFyk&H=w+SqrJy63i z8f2y7+1Qs(u?egsS{oMj-P_y&5EzM`>6QWKMd62=hf9~CfOky2^X zgd1Ko{YqQ7wHE8UtiAGM7b!x`<))*=RyzHJN@(h7>6pUu6K4@`R+c*#uywEYy^3bZ zXfghHCn2+|p_FwcUMr|7I*;lb{3*LnrOGW1)LYWUlaq8qloqm)5`%w43|HO|b zZc=e-h!v@ZvV+%1YNj%#oeZf)W>`HCoWp*3JlOLB&ZpkJ2_3C;1W*b7rHU-@ z`=xwhLF6pNLt$iC)1J`|<8wC&`$7yPKA6E`7RmU5N&4@X2tam9?aZYaBq^`q?bUWM zUdq-VmCy^A2O`CsUPRi7x>K>yCm;h3yi%b4c<;#=RD{B!w|FL8z(2MP&qtlL^ z47r73ykWiJ@FC1j$17hbT~$*%0PGS{_Ufe^dK4aphw-oeua{Q=rhh_EG9BAfSLygX z{t+4OSUf?tq?Uh+-T$N5sGf-N5mjf3qChy0Fbd`p*>aWLS$rVkfT6_mRPMOs1w`bL zVBK@Qkpiib`$Vx9+8=wYZ^-LOKB_`E=j|zz`N)a_3L7Xd;eM=$E-kkcBIdu?8a6Nt z1Vc1Ku1HqIVDVlnbMv6nKG?7LJ7uruU$8ZUZ2{-#cpj2cAmVXx>7w+r+x$_2JwCMJ z!XTm_QtFvR33SlaQXt+9#YM0B{dCkLIdxPdvB%aTA=WP8tAZhmQoXE)j=7S=!@w6{ z*y*66r$pba=)gXhyW6|32@eq4$_qMBVpwA~w-8WqNV#uoP*EQFrLE5QDLx#RBa@|! zt)H=O@-n=TaP9}!!FulOS|Cv9T)%6VTZY!47Ib=|NZ*=1*sz8LUl0}Qp2v*PX!6gAKzWN>KpimJzVqRPaT>GHW)EHSUP+TC=Q5jW0)XRS_dC|rsx;~Ro5)MeZofKlh~NeN2A@)5C}xM@+)o?M)e$}P8y5kR>2T((6%Gj^;m&wc z;)H6|-dx1yZ$Jr;JtnvlXjA5qLe5yRKqn z*C1(G09h!O_9(p)ZgC~U6yxbvG1Q(@)D0n5>T(i~Bfu&epH2##o6Q0)ZH@=ZWxSGT z9Q!67p65WiiIq$xb&(n#{}D6`GJK}FA=U~*%n!f#vb)NlTSqpfd}_eB5yY4feYt@1 ze&nZLqE$8gsaQVs-LAZL8r~?$6)NvWMS|pePm6}$m;au4Y6mt8fkAY5Sq#5g-3M^- z{0mf;^hXG&fC?oZV(i;v8g;l#LtIS~NInz;n{m$l#m^wQ-_xkE{$!zsoU}cKzvpsj zKZcGj1M==)0Z@z&4;fkvndkXB~PdCac)I#R2s6_8FZ!l z7SeFoDOe;_{h5nceLuPvZ%3p$$d zH){N9*#=#dRL>O$_ln`Dn{TxnUcQBbE2y^P6gY(cG36D(njF|}LI1P}HCvV1RJOn?66 zb|F?o#egI-VyG8&JxL4$Rq0(n9 z`pJkCg4ke-QI(SD7`j?2RWV+y_u3D1`FLc{i0Z#$HsZeiFtngLXy|5k&s-_5R91rT zaOr)t#0FrmyRNWbs&h2SvH`3~MzEYr7~wH&(nt^^yjZJeZBzaC8_iwy(BPShco#Do zV-$rA7&zo`b#ZP%2`hGN6g0An@j|6w2EJ0R#b0twg5Q1<3QN3&fP}5doe|#}=l!Up z1h%^X`M`6&_Q|jt2y*WpP-$O2Xt!?9{f-Lm5MEpg5`ChOMu*l)=-79GRD` zJ^Lkf@Tn#xbQ&0dBLWZ4R(1UNO$E;=_;O_Dj%B*iq(l|E8~15MVhMIg3{qx zRed?oBuD;zf%yM5q^&!m#rIqvW1O(l-ts}q$LF8nKkk6YJK5Nz*bo)FD{&6k+1(N z|7riq-(UA>7GbJmP;sno-fCa+pLHt;oV1P7d709LSOoRbEr*%I3y|$aS-|sLhXx#b zi!Tbp%QNiNB=`I>`pm1>n<1Otto`(_g=YU~rg zA0YL$YW1v_nfus$ zM#X2}JN<`P&l$b@ki7VjzDR1u;@7el7Hfqu!eHYKEZmQVJlEHJIrdeckJMWzy$H9+l~tR>fdD;Z1^N)n>4So08H-_7V4c;0%&)w6tT16{zu+%^CU?n! zJj#qHsvk4sG00}(_xPo_a3v<&1c*E=KLf>4!x|6o*Ps`_7kF^L9Ed5bp1UEMjKVO! zVj8aut(AKu7CX^Bf@y*tD~1?352gdkXfDToPWED7d;bbGpI-ss6$SmttcV z3y8j=v;89Q!tCkT0!ahK#q!cvLdHqR!TTH=lSk;-f8bxs@3xQflB7%O~M1YQ6hd^ zguit8+Lf3^zqT&^MhW-#+UZZdao7eB11pr8tZUC%=%H~4!t+CKwPYla4^!%-=BHW5 zS#kW%eBtc8BKvOE1{h!!KzMef-8mx`R2e4FB$yuQ=ksGqs9ZyoW=xVS&I5k-B(Eyc zOD%&hJ!c+OB}%>}K90tt;I=k>l5bX{Wl_+)t!S_^5b)*QaxRpzyIZ1$H}s}aIq*a^ zKJeVmPAH|D(-Py8aFkCG7BSL_8IW4$20u6o`i?IjyIah=P+PL6q^Oi`t-{J(Z- z?!p@g1x}`{ApO`W#$v0YZd`(cjIH-LIW|GT9hSW8VL!T)>14mGk1}9xy2Lg!A+(E| zVc<2W%HGQjZR)cYOY)2|NU;EeK2vTobj#JE{a^*|lr|bv*z_n`etA3}T~FFImgtjL zHeC|@pXz}GLCt2s7-BRr{oW+nu>~Si<7O)3Fm0$Bs6a<$%VA*4|1#mPAMN$zWUlfK zPyQxoGglYH5J*9 z86zY2t5*{O~ z=wlvgfQ?b4qCWNcUN<8O{fl5_y%jZoQCQecNilbKT3$^>_!RkpT9kRhYu_so=BmzPb}JDdVbx-B zuqB0>0MniJd}!@eM6Nt#nb)|x$Bs9txvJr}7H-{IxPd>rv>FFyU3XU`KuTzF z!J{V5IoT?jlA~Mh91qTX@$Mf#d_)tj@5Zyiw|CkVPps!seW#wR9ydJ16VuiCw&ac_Zo{ zhw7JdylA@A)+fXZF;Db7{T%-NzYw%$kQ%4@QgI945Qgzv}ucuTTFe&6ol{SJv z`=rCP;M8K*%ddJ>2P!dM$DY3GJ;l{RL&nIjklQ>=ITQuE&~h)fi;J=RlsP%7=+#G% z<84cZs80ZHY>^UcFeW@M-RZz~n+*d6pn#r2+7h7^8-6)nEh$ei;8M3+tDcQY#cO`= zpu-xOsDyf03M8lGI%=hwu-(ZgQrcpL>~1|3T&T3ZwJ@muyH^fb8IoDsgEOC5sMCjB z^csszIK^mVS~uG5>({;W+-nNvUWj-!zZ;qdrI^ufj0N-L>CDf9#?y6A-NNd9ka{sR1dLi4JCJ*28=Ok+5fzqX$(=WwR{4PqYw2~n(a52>~$19?q>qnKlI4qGW zjANFP2BQy180t~Ek;*g@BtA_5_3WN{zZCE9Fpq52_(w+?wG6B{dz#2%>s`0+uR3Ag+=o^Jd=t- zcgOH3`_aGOK8V4B0Br52@4g7Lg6Jq+eaP?U+e`7>Wtt-c2NY+Lsji2eey2TS#oZaz zgC}a-bFRJTz3PsH05 z54mT!G+BBRl^e@?r+p^&YJD={LiCsdmN8F9Cnp=B#E2Fr54}$m6iaxO>iVMhAX4+^ zvZV`H9S?SK7jC#9Y2SDuqc|%$^Rq-C6OXee>6OT5Jq0mUNx>~n_V4$}3MT~>L8dRB)B(GeI3}W8ASwDF z7bI?knd4@lj9fa$AQlwXqNzEzmfB^50XlY674mn1M1HY2n=koemL+Dh~O4P`+-bUNC;w(5bQM_4J#S8e6k^(nsTA~q1XI@;qEad zN&NzNZ_Y5eM<=zpr5uXS2iDFlrT>ELTYzOD>8Lnj}l<2ed3V%_WDV=0u72%-$} z?e>IalV!uRJgPlq2%>6tkR^Vq4Q)r8B0F`^Dc*RY&0Q{bf9Rt$4XGJY^;AQ8RJR6@ zEB$7Y2Zp3+mp3#iiH*B_xm~^-W7dd;t@Hwb7{T*Tv?pS{ZUOt$vPQx)g2D`i4qL8l zudGiMrj=5Jd~h6U113g1v?u<`g)5wA?#E4>e!Du{964kabx|Gm`}=#%AcH_;cvqwe z3ShrZ1JTq>x==k{zNWgcG@cYw*K#AprVFuBsN7M>lc;U1;(K&z$#xB;TYU#Z2mw#G zxKT>^t~g=+UWZDvp(T0Ef38D<9wvGK5?!hy7nL#2cpbHr zfcTN_sKeEcZC8A>L2|ke+sD3Muyn9fSkvHlKq~`>Eo?8wgj#_~YA?9`)On=#(y@=( z10(4UcK=A@v%PH}ZM-DK%e0{mDOXybjW;0zCjAI!m_B&Yee-%{f00f#9@{@u+(PEcFT6-`lwa_s7_iig z`tJgmW1DG&`Tm{wFpwZxK_yJx>XaUOf#?OmT1fQw%`WG|7*lxmF{{MikX!ehlB_tH=PK&Dv^WQ^gssxd@RNd zGQvJf_+NzWF?Kay&0q;Q=R*R*SSi&60gtar|q1<%#nmk0D^S`{dMQ?l-32;|3b`!F-uj#yS9D_@W4J>TxU{q`SO_67P* zQaHg)CYcB1TJ-o<&k$zn3w7`d(f2+Mp`{GyLj0Rdz^X?R&El%NkDWj90B8}140s=V z42oI;C5yL{g#^*@E=Zs*_($&Bs`oiu8LsV$zhhY|D-O9G;j}tP$JEO2C;`sp322Cj zA@K-xmEN12SQG&_iN$Je<+ePKSOSWoF8ElT5Ykeo8oR-nYKbai09@q4KArw76dI+1 z@wkP>w&=&yc#sBp%IC1O6(I8CD1g!>him zN+bAX@r8bU%QR0PbdBM*sp6w!i*#Fv*2g2laG|;q)f3GlohN`G8C?r-*AGw^oe|Vk zM{Ir`legVI5(V0mLY8KwBhm2+CH&0ZrHZXLtr|~2e_t(&&MAo<5Db$u`D> z`;mY%?nW-jVRsW1R$64_oaPrv4kdj@neKviFX~P|Tha+R^LQOTRH-w)-j*(Tl2j<( zqXiC!C{_>p+o(8g5b+qf;s75}+)cXI#^Xs!<`O>sI=AD+;P;95>HQnu<1Q|JEuApo z)+=5zI!P#EjlIG^sZYPD?2s5(urTE}ibuh;5q@UzF2!nUJ?^DTG1G1u!k` z?e@~u!id*D2bawvd)yPK{+VWe1X}(XZx?2ySAVp*J0$qMHcC9>OndDJ-D}Mx4&7cs z_tuYq&G<^i1JvSRr!W)ljd8jAz85W~Y{3~gUJTfQYAN`3ob;R5+oc!^P&8YfhgtQ< z36no7A|14n_^gC=&gD6n(ZI8~&R}Eh^w+T`GFKa3mYxWmPunf`;{amqH{9Yog+Dyx z>MZY8(ysWV6tJ!(W2_`Eg_`ExDd;A`^QgGMo}M*xkmUlmr0lxVv8T#Q)`Q#B@TXbM zI5k{x$)J7A#V78W6JJ529QKj5D|b-*!rumYFF5?!n#UqO{tlN{SkE(3{2=jq;serh z*%gC^sV~GXlX1;;ef%v2HQ%v7-WWtD9wK;o&0q-PjD6&+D#PNnQYFQR6n}d7NZ?^6 zNp2A_g7|nHAg1-=L{Js~5+EL4Cr`d-)$uo{f89>`FG~TcdbS@EfiyMo17L!4M7uou z2p*HO-^v37fO*JY zx(yo37Fp%u(8y=V*@>$#Df~TH74p$~mvhTS-20FUMMzailRzT|4TRxUJqER~gBLWn z7Pz<)g^U%bnI;m?Fb~E?+duRfQ*=VX>sGy$0Vk3)kYKSn&}v{94O+Oq5(rH~T0%*- z$Zj_ik%w#!u~f(?mj@aaScL~f10UD`^S&ESI5xsdjJz^x=e3nthM?g=ZQal70)1|J zcBT*kiQ#-_mjJDc{_FyPZDswk?M8=`2ob;*@{1#flz4b?VEnk=$ub#8$Ro$RHX~6J zqr~M{jvi3N<0T38o|)TiFJCQ$=%Adgp%CE{9+Ji*H#H9dhC$}aO6g&T_g#9!sqNOv zwi0h?l;|v20iBvW;bVYf`Fk;9W=AX+Dh*;uAsTb9ZvpKH(9-AP0$z2iiie1XX1Xn9 zFjP$W*wG1p>-Z=WuiLfAZ~bV(2#p5H^)E(8S7h*4t9pUSz3|ho#?LHgoPptp7BI8> zj>&|91A@1mo(QVX&-KRStIzF_gZrP)exBm_YsA^YS4?06;r744hkmli%j1b&V zjnzRoyM#46cJ^FHpOn|GFcJeGA*96-YQa*^y|OV?7z zE0clGzYRzp%Q;c{qp%{++7E?-ry^h@;Zkg|`+bwvAnUaE;O*ZvMdkFNHv?Nrw4Aao zYj$(Rs>Qd%<^HoBtabwV!+8b3uu@yw>YYhxW#Og$>P!%}i-vg-7{Hp{KgzsOoB&bi ze1AOcWU|ECH`L)$*`0xfLs zkY|vGI4ZyoACmH)ixH=2dhJodzEs9}J-R|bDfl{?onH|F*v^o0G54$- z#gy18#%h=SQS#l=0Ch##&Ej#P5bkDV{P?fn|NFSJ;&R)LGoXTddY9dc;{!y0`m^|f zk@-SIAlw%M>ZIjith4-Gbjm@I#Y<>x-Lw^t5npvVeh@R@h4=ygeXnQ(kQf9lmPuhP zJdSvg_7BObit6kh;94+r=1Rv4zy%)vFn%B#VZ?W(-7blg=>wK^lpt^Ka$reY#vk6# z6~!a6!W&?3kCCmDaSv#C64(?YcyD!vIDG7nf46j>gg$+0 z?CqUCd~39Kmrq&b)NF&2redR^lGBMY#)S^OAOC))-TAz~*;&90!kWI5*U5+N?SYnL z-a!Q0kG(`>vk?MMX0RdKRr5XBo_@+-K#s{HdEq)$%3hzqr0a_z3ZwUQM2d*rny3X} zy|qB|Ek<%f;Mr!1H3F()~xf;R( zl29ifeO!u%D5MY%O&R|TWJ!klUb22`$VO4it8@*Ou!rOd^+_pZao`YrmWB;$4Vxd_RgN;K2e40k<-sY$5Drp0RqPv zXg;1b5MrzmHn(s-%xITIt57|v9grcu+{;q4B8j?_dX^@1aqJaOA*5pBWbNtY1!&%D z@uv>#+C!212Gygp2|TgU=2?BX7mkz-be#3fJE;Bq_-MbJug(YsR%kXNYW){x}%(^wL}QU6YUQ}FjO25Y!E*}SU|jr#{xl!ur!{c%D57?F2l2B{zvJR zNs8X_V#vf$EM#2;2!xLFVd_}rz;(@$c3{OHIpfy%0M`#9H;#DtO{*kK&~z_HRQYeK z0jWhHHC|NiX0E~$or~H!OzINg4pB@T7txC++p$K1=x_Un#a2if)ZO93f!kKb@-9kj zV9A<=3wPma?8&vBv`c^D{{43UzSl=SKvZ=Vxl$j+(hvHKe;SWO{6S(0FnUwU+_p{zjIW3nTyA}VC)7$Z9b%v?yBar@Ekv(DV=d}& zRXPQlP%N{eu+e(ax!YZ#R{w+}eQ-B44i)a@V4{vWw`rMlMfL5HGJquoXbcQrtI2oJ z-}Djn!P=tPEu|@n{bH>T0NSbMnrqP3dui8O8K=s^lKv{kytnXFBE&+xjDJdZq&TIk z24dv6CYt>R?Fk*H{dX%q5V!f47s-wW)VYTaYKMMiTFzlI)bX3lOznu zOV{3RPsUePmfGT4>Csl^x+fj^C3bLt~T~sUj<|t zjpJvTJRRf^&uh$jKl2qA+htErsr!iGVDD~c%ql>M5TT)L6y?} zJVr*OSy})|bjN5`+w(&mo_G++i_J~%pE3@7J^rfwoA|oK@w+WOAu$WLN}tLNDL$4i zMnSulBrt_Yy7xR&ei_(OPekG|*i zBnpsMTWh&6l9&YIQ@==;HE(UlP=P~6v6A|7@yjTx=gV>IaQ@;u_*SK(j|!WM7ZmB8 z>|LgOar|e zC0f|&_Uc+oEqkkwmURlX6QsnS_={?K)kBVQnrPQBV@v$w0#_I!nh_isBTyzcJ7@oI?fE!eYIkmlKs`_2p$Xe9<&(8?j33Fp|*anXTl19OXc4=td z(Vuu7%=UgW+NHhY^qK32#CguMcxHaV(a7U*OWpg z#}pq|nUFJ1Krw{_1Aw|Nup~xt(hhCm5b`06beyRD6<;X6aab$gtx@ZRCE;s0fa#-`Wbr+ zb$(qjXSs|DGZou~pb*R}LF7o_obC^egECI*p(1qflzOEoX14%$s?h=tuBS_3~ zwNosx#7EEWH?T>#1QoNB7b9ZQyv+5$%wv&G&i}L19h^$wRKk z8v}>XYeQ;rZ+tgDu-jSD^_*}p?^`I;+k$BJlQ}-=BI|5YNZ92$uZ;D~v}|w3nmegP z;$DWZ%9D>W+FZaHO06Sh$t4}f|L)%nfmNVv|9}1OfA=jX|Dy*QqKG3{M4tvj38G2W zU}NclR!lKfTLN2awg7$-F>A`!kYd`7v#~$lEn3PZ`5x#4u0@Ma3UEXGVqmYo-d>M? z6S!p(me8tA;t$sr+Zz@~(tjeU`lw!mT)@q;b6y+v2Ycj~F3>eF??OU#^h;`r>f^dT zURk@@+!|;)ISrY{Sy5|eNuvb1AZ z@l2WZl5WK!V&UE^x*uD-F=Hq3E@E;(3{)&Q-lK_a5eg-`zB;prGuFPwHEVeZBn8$~ zJaFm$K!UO`2s3TlvrOgyl;F{WaK*m7)k*-`k@0_SZ###F!!^@&BBJ}7DkmcHtW<`) z(TN3{#r)GEvgnC76lBG!-wKv;?5SMJWV2(1x@}4e3OM{~d{SjI*+^U}D|Yq83+)vf z7F-EsVO%FW+-+F=0CZt3uaK4ADrCJqF#q*JrRtXT-9*}!6woNnwBVs_v^V_GwR*KL9dXhP zzuZrJd{7}nhS8Fs?|4KspeMO6AKTJHIam)Ri`FM6T*8nwPDSgyw(J^Fen)ac&M{rDr)34o0x+hKD_Y1SsWM#zh+ej-eM zn$xFV=)Ekohq3|Cv3Fo68Z1=xe#651#h1t-WewLN4wL+fc@zoaW@Uqr`r~}7qjHES z+I){vmt4}bKt9i5KNNl?T%h0lStu0A-C3oZ@4$xJSPK<_OLYW7#ECA}k*L>C3M;-hafe}kX=!dImAfN{e^f0iLg$Z%DEnk=ZFM*VO5Nx72&D% zQWOkqzk1avxXz*5b8=tzU^~|<40?67oQNZ`Xgqa4kG87Pat#W82Npytq)Ec006M;w zl2grS^BI`7*J7gTh_oHjtUE@JOE&^Cwe z)*Y*wcEO~?-XkbTe{u%Ins$TwABloqF?fv%)LuP~BKU0nV$}rtr z9vWx6YaGwtD&_-UkIX8OeLNpA-A&p@D%0@&dj@D6p z%eOXAp`1NOmS~e7qrNd1o6(++EMFI!NM^`e0PC-7wHU? zRbubi2*H6i>3w?(#kN;mX#@c&24{0ZOdV8eBe)f1fakiF)P`6jVf|qi555Bwuo&>U z_NO@!O1S`c#amQWbJNJYU&fwE8q{d zQazy95*HG}nb&I(so^eU3^i=#*G|6`1C;Yf>OW5F3Z5+Oc5*Vr!>v+j*+JEHmG+R! zot2Y38zI1?tuid?I>kk6tj5M}nYbEZ@vALq_ZWk+jyePWS)SiGQHYtSN?YZ^JF*vG zaf-oxzE@8Ku$5tSC?iDpkJ=Nl05n(m1&mvNbGl2`&1O!xMcP+@rs|tXWWN#n1*v+v zYlYq`{sf+&ku5Hu9O`+8$&R7GpNW62wJY&3)utsFrn;l}gNWDPi}w#0t*DjEax$il`)u!qVyd+LjTL5Q-&tfxQ#t>PPU@~Ok~Lls%bsjE>AnU1@L zHhWL0bWSO3!{JaH!I%&TO$FH|C@L7qwOjHRNB2f;wKEK29CRpUw~I|#=j#ccBxyU8_U=t(3;8i@u$vwyLUeEpZ1BL--%J~iXt*8 zjx|<#8@}LRzGDH{!^6ZJ)v9l=u8l7*#oNfRhEyS)tuYGKA$pgh-5%+Ls?*t6w%=SI|IWEGLdt%+-4@hSp}lm=b!b8ra~x>L##|*{`&(#D~>L5rQmxm$8MmTP<2* zskt8Sg&5sK;^TgRtj_F!aJ4;}zs-AVsCyyUhor@|NdDyr>a(EYW{$%QUQV&Kt48#F(y2Oj)P$c zRU#uqd0h%(59beK1MsV}SJMrG?wY2;N5YjVIMgbp+w719a_WcdhBc$vW^?X=Le9zF zIDt7;H-b{mdXbQqi#(Tn+*nO`O1MJBq72c(JVwfMGQK{`vKK55O^u4zQ$0p5FUb6y zC5TwNQ?Dnbq2PKThJZ-4oeC{~yA?G%< z=i$8!>!=NmPdrYQ7Oxcf;KWpMK{}4ogrH(-W%Z)UWA?_W<8d?um&63N6%ly}4ypa* z<_UDXB}d#0UkV!_`S;&CX4+mS_GVQ?kY`Ii=$lJZDCWR za;WZ6{z#6;m`vAwLxZPSv(82+)^A|r6cQ0sE!^=~2!v=>G&6@F{v!3W96tcAI^#YB zty<2QdWXgAT@aBz$gJ%qb#tTaqHqe^=x^d{y&s$#fNrx)<=@maH*C~rKJlOS ziJ!BV@VU=C-JX6f1}~)OWuJl`l!8=|kFR)3-W&4xFX(W^e|>2AAnqZCRVAK(w=R(3 zWW4D+eRQ!Ol@dO-u*3Jd6DXm#l!LFpK+9E!i+t|MSQ1*E$Onj_)0N`UcPx`~ArA}9 zvzIXrSYaGfB&hhYqGIjVyW+;LLQIxX zRd>A>oIh&m=Glz0^GCU=boR<5z*Frxf3;^~d*w^a>P_xMXxDC*8eY12pf;-Eg)Xi> zgOz6lcEBwfU3_9gsMh!nLx1#-K6muuJQwRBLJ<8c!mM00;|vdusF8j2^62JlB({rG z2n5d68}V-vh~;k3DWtr-0c$AtC@PXewMYIWCQPJuB6A$WmJ&V~&5_O62G$ z+kJ{TEux-_Ke!i~rm=XeehJ50y?%(~%8lfub21$SEgEN|tnO9sl84^*6Y(L?&}y)p zr7pciSIU7oHm0qXL*!vN!&2)0 zrhrQhl%ki|-X7aHk@Ug)rPi_&h01B9M`g+7Zlh#NsLp)V;FS8%7`DQy^RZ>O8Dd>S z1#ZeJdR1?bK+yn0e9oWu{io;dw5$N_U5^VM3pEY!4$P+P(CxdV`kA{$WR>H&on$GY z>15@?Quf#tvPpW!ow#`3YXr?;?Pd{Ow4jo!(^TCjwcYf(T)$cErHKdgjXS*bCO zYkXy}Yod~k;ldhrKkI9*wp>&qg-)<3lpivx%J+I@lsbes9OD$GS;en zeD_?h5%|x#-zJ{deGC)W~GAcXPU58CJA;Hx0!Den}JtS*ej0>_)U`Eo6W&v7RIfccCEU2kv3KW|p^ zyNK_)9)G#-T%VSYfWRZ~3}86`{?+B~wg5=R2&7TH{@gy3S1n_;v&8}y*=zV}dnLZ{ z7J~0aYm{{mru^-rpU1)IjUY{&!6;%Ft%Y&uhg4kMGVO#SEN>!yCmQiEm%Lv&*xfiU zt$;|5Fc7ML7u}yK@=Cy~c1WQKI=sJAKfr`)Wm0dYS_}sY7LXF0xTey2Qi{vl491An z$-)=4%dH*Mn0T*yhlFur1r$iOvHvQw#j%QI7PdPo8{6;oQ*TOtL#Wch-C~K)+9jH6 z(LqN(;srXSS{mn!7}5gUo_`|V`lh&88K#Tkr^__FFN^YbF}Jap$e-|pEF*`8y%mqyP3 zD-Ng;usD?zOvY$$HS}kQF%tvU%bL}`8yTms;w}4IFe|+$OKc(@qaP!2E87@$CllRb zh5g*;gDcBsoVt-!h8`;YdPe#=$*ZRhF#?KhwBwd&rgg~L%YI^h{zoxi}~Ix5wYgVRK0>7~O{m7C;Uyg}m4f2Q|2B)){w$i%^s zomkDC85WBHc`K5n@i?3czFppqA6>Uw?t5ATimGnJ28&*s?9!#>wsd1H6!B)uFd71- zs>kAOf^Dnw`7SIx5(+xtUT`7$66Da!=i28#;{mSY&eR-Jw$m-r#(hR2ZRQSvOq4^! z!FeTSA@u1bV?s9vzR-JJ(ELt0CP!I{CT31M{qWJ>dC2Nsv{gKLTri_}Uuj={`uF@W zdE3+Cf;`cFKSpy-pNvtDHmKxt+H-<)31Bo%unxqgy=MjRZpZYuON#*>!FVh_1#=-CjD z`>ll3y)XIkq_})=9awDlFd}zOpAo2A3F`nr@jwDHy!!mtXQRtI;&y0}VQEn#NTuEi4 zb*@L?c+?fXiP;#Ha2v*f2nQd9>uHv85cPkMR*iW!$vm92e$+@$MP1vkX3Y~%4pWLx*NSWR*mpFBkI6G4R`dC8( zJ$N(ph)Qq#jLuj=Cn!1aC#@*B9hOL``{Z@u$bTOkWMiDpaew7iTQT0wvr$GH#|vkM z>zRM$KkZ-n`!kk|_V~|#^XLANPULe9**d{7{*2}47!@ChN)xm$A+y)6yR7?0^)|*w zDngrE|8>q9O@}krfN_)`98wb3Q56t*iYu-qFcvqEi}s7noO0g>I}(FcQkisFj7+>N zLHQP>o9*siZ_EO`Fv7?PV#1$sVVz-zJAJ&Vt|x z+dVDJbVNK!0e%GVzsBnwJ0hctAk_vYOHq4RbjU44va~}b|FXBWl_}!$vi5ceNJLt6uurqVCOM}^M*ur`I!_O>#~7wX zHBOwLvii6n&^Bs>$l^`6_Hhg|Af!dtW{_MpUa*dYVUfY%`@%Dnf>^l zZy9+{SX=;W{JWTh95%DVyDfRC)S?~h&g6tQW!DA-A*6>WgbmK=EyNu+Muf#HQ+s1r zlFTxek-i=A&nxXx900OKsQ}cw&EC26V0__v4cwvmsrzgwAc4wXios!XNgkJQ8B0pk zP2`cclJxqtG6g+FI<_a3f6obXiDvIqVDbhvKplO)1p90hWWLURWKI*@c7| z{{|j7AJahO#@XalRj%KPUD!7X7$N%}yp&k>O?73hNfP;RJ}BE%rEQS~Nvj9$g$=O? zgZ9k!4v#4tY2KPJOjiAW~ChkW=0Fu%{tv@i|R(O3=BF zN0%NwX5yuIH_Y9`RPvDW{;8dQj8DaynYfB>S=CEN8VrlQ=uBHU{4vm#5}yD z9f)?gc>6JP1h;BsK(g6E>7KU>{)`?d+UHVx^OE05YjQ%3$w`P36=>Qre*go(ybtQZ z+tpn))l1nP{JQcbZt@ZDAfDP=g1La>^(VxHwnBzcvoV&^sxwi)+y@CcU#(C!PRD~{ z_Tb%-p9SNLH9BM=k+Kr$kbjb=qEIpRm_v}+O{qST`5qD1VD{i?4r*W6?)ALIL8XQi zw3*Swn|0y=Iy0^#ZOWxFKaD@LxQ)wZ??CXzr%1?!K>1?@%0KqYu1OvW@e>T8w`z;c zze!ut!9|LBefhce_8*1*>=E(1e%vAYZY$GWQ1FUprD%c;i|4%X0+i4Afp;8agO_L9 z+wprHms}>?fn-EMd!*rQe2lV*N8btQ1Y46=?RDx-6q6__lHyJdT4uPQ4vv#Wu2lq) zDf;HVd>f73zayd-TLWnc0lY){)qkH+5u|Redj$x7DW|(8N6)fBO#>D0CDK@wrllyt z%gn7GS%1@0oUeG%drfv8POi`SK|B3b{P`9N=YZ&^rN!kGBUE=3ZB@;-s+3Nd}}` z_J0GB2~CVhY;E8m#mCQ@tHk(}I9!TRbjI<$SQcq(j&31X9rtTYCp(I+ z;{kv$r$369138s8hZm8|QiSnhd*y;94u4ySPu)`WeA}GVewh7^JManh_>Ok~dcNq# z&}gGm=89-uZEI_}dn!S4ei6zI3J9;Hgz>{|J<<_=s?;;mU#|)P@=-e^amHfm7TR@Pvi_jY@}UW!H!rI&;dpTMNl1 z%V@?b2{N_N!v6r`ayENa9Fh{4C?YGVkSo{?pVNLFM4x!bP=^Z5bV97D!O7Dl7;28T z1rSdFLPTcNNm)#|?IZvhvrqY(=+@a>_g6x&?hNGV`eIwXc_U`|g)g)(#J|tAPv41u zi3mcKI^l{)A#b7Q@F8$A|VKKCD=TGAx3wNuEjt;OV*H3?o})%qoEW#yMSfag3L)jgOBVdU*4Z=i=&vXFrB~=+RbA{j;gtzfd1#Y04 zAtZ1DpN&8DO{}@6CGPZGi%&|vM-eLgkIl~gb|TN4baqWki6%G=nB;x@OdqAd@%!<& z3KJU|`Kgyn_NprQ3Uax}6@n>0i z&f|P@cYGie!kiLBC4lm2Y@P%k8wzM*QSkfDD5vqJfjj%%7)bBM$}@b7Rux}Xqk?2? z#{Dtz6t!HK>}C18{2OP*d$*mwyzv%QysBU*#1;IN-fVFa`0hc;Yo2w^utF6T2$zx5 zUFMv(v|*Vt@5^1b4dApyU2X7OdIgi59ms@k5s zv^qsB#W{`|60Z`u!P+7T^p}dZ*c^J3$d~4P#!Qs^{s*7Qi=vJ}ngBqGPl&^nvBSDr znH1bAT88S7SSUEe%oVffJqEC*1>(IpuW<8T_8;-s@Fk1l0;R+J6$7zIsNq&3LOxOH zq+MfsdLUfXh~ob5#LND;EnoKvbVWk{5Wilr0&Y%-4(EhobEBVf5WgEQ`rsWg6#ZRs z#;M(i#ag=27MJ|j0cy%oitpyS58CVU)@rCYU~St-3n%=rkdJTC29WQ6;#z=?C!4A# zUhlYcNKh85-w_`Z=vJW|K8>F+HU6xPYvm%N>ysH4pNhtkGNB`svZOvP;3r*|*;QO) zN9jIG1QLtU>*)EfAIGjh%>HbMjG!<4jp>}LA)l#K%u!QOQqT(uYSn(Yfa9bR^StAP z0fof7&du$aO1#+O*l)aO+UCBOhV_kh)w&y1bkOc{yhCYXCixJ8F7JjPg0qKI;QMRWuviE-?-65dRa6*~5_ z-0|I;l(*Zqaf1>dRGTVv3{jC6Kz^X+M0YtWll%U_f)d{~uPOpHv&@4ivPFq5)sM6# zpH*kzC@EI7u0ade*KbhrhJ~_P1UZ5ZA#cA7Qp|r&%fCN!JJdqu^k>6=xB7#nX4G^d zLg;&18-xiTC?>WdKTsjJC4y6=qf|^g&INb}>^dwwhuIr zkoQ@u95cT0;D*XSfV{9RJs(@YG&C${An}QgfmNW69DUC-#0O-}C|!$zzw~N*IacR- zySBJ=V|6J6kp0>yrgDD^8;YB-Q4R~WQ{5r=m`9sxj)>z{z=twp>&4Zj-;n;EIUb$21Z_wX?y&ar{9VJ(KMjt^&;i-?rF_cOd!r@9;Nil7KWMh|C#p6T3gUP**K5x;?bsLWmIIlwiT@<4I}d z$7rTpkMv1*^^1H??M=1{D541`Xbz9Y>$c9en%O9)Ad~LOSNlezmF_#n3iiHb89Jbx z>1pdbdXL*(OH$lcmJ!`&ViyuktWQ!P?`Njj~jZj{GEy|(OF|Jv9VdZn_eC1p^7j- zVMQM1FT&B&OLygMQ7|j2CD`0D7Vi^aZM-VY~oM|FX@~OF}bTy0>0+jfsDiwZg zxyAG6^Fwv9;-FspJkko9lwX;l;9Y~s8_)rkMmua+B*&+q#f{%q0Y!P^g3_i z`+`9cYI{d|5IPhM=P6Q=jNB@JY63Py2>uDcal69}CA3}}jR7$e&_lBF0**gz&v`R+ z|Ki8~5yZhae33X6G#y(-Qs&D~=STng^gomMkH6i%PVICUrt@SoVsQj~w5$PG0gVA| z*P)DWo<97%eG`VsN+%vcc%ZDOJ9M3E&)vxp0B}Mp3boa<1gvp9mz-Or4uAmXxlzbl zxcEvNxQauG{f&A7APqZp&}y+Yq2Wk3P84&={gy)|jg>`D5!D5h!79aP+hY~ytf%sz zA?+KM5S&f?Jy{S>cwhc;Oz`pl$<4npd*Y_K?ivE-3ozCbmUBc4u(NUTPLW`G-I&$wu zDW_)cJR|D7?J<|+bcA3okXNC(`U+V9FEP+qG#G!!X*V`@0lOf(6_*sKYzXnZjr|S{ zC}ZgxE}_Q>UE-0Sd!n6tKUPJ3zR6w0wop1Fz+yjs7H@oG4xU4`aOBb&-E5aH$Ae|E z6JsDX7r*DidG~LG_mUU4f`OF)pNSw~V=CZX%126lBN;Sg5Vd1gE#2qPT`{QFSU%Jb zT{^sjU8s*n<$#a_Q{fc&TAY74#u+2wR+eyH_ftqQBh`MbfEL!IC$Vk8yTqX)$HISN zc{jJH8*oQrwJGIrrc5egVA`?TN8U2y1! zdF`Fe#L{8WT>_IVE7P^u0e-sZUCU8iE=r>~ zk*RlSf}>ytLAF*?qa%JXE~M2W5yg0+{*Lm<$z+KK5Mxuk zq6j}ethlsDkiy03OL5}<`)}g(R-i=7p%JTNB!*4uB;3B2ti{6Z^TcTSty4~^N->+g z`W51{m+c;?YF7qvl5q?Oh%#aytGmiaM+wO;v}-Z-Wi{N60n=!f1BQJ;ZXaOv^{ z4O!|m?T5YNn4i}k|6ix&>OW;oSsoJ)waj?jF}2Rx%ws3V)7@5sY!3t2b=f@KAau{6BBbTyjsGeUiOCR$^ z(BOMlhbDo-a;E=hJ$;*GGayd*MoaNqD@63yw7_R_BH$Q=3m> z^SGSSOH333L>I$FNME(6D_)g=S!09oj0<-VcW@(YAz~F0NSLQ?8G9vgMK6Lz#1e~+ zLa!))p8i+r0vl`I`}BCyG+5H}pjwxhc27K>SvuaQ5AU>3duNpH+9)l!&HxS;%BxFn zm_2cOJgR<8m_HzCLxX=kR!6C(7-COtd#T91BF}WP$vG(K+Vcls$^qBtYUS8Bne&{hB@QU1a3mFT6Rbk_` zOx{}h6#;B?>?M>w=j-5IDzeM5SC^$1<0S;&s~mGHh5cGy#rx3-B|WuptkZa$*Yk*o zUD`z##B-{=7|3`zGD3Y*eQ=jR`YqD&ompSB5$K7M2r=D(9gT@AxEC-j!Jo>oYOUHK z2@RI*U_1(SbGa?YV#9SxW(=6F{Ed)G_?zIa@e{2H4rl~3h!zRJqWxseq5yUa#q z9aTm2h#U;aU&Z9J-!e{H(@(_BL!V(@WNM|*Yupl%KiE-Z7+O_4SIw|Q|5r$$t@o=a#E-lhP{eGTu ztsHgbR#Wp6AU{+n=_tROO`s6|?GA1SXq=yRNSfUBK{qfh@>9@U=i|y*t;v4=M>;#sT7l@#M+l!T=@MmaV#Qw&!|FC^7H=eB0Zn7qD z=U*#gdLJ4ybQzu$-+aAgV-FQd%=Wh9FJEf6yl%p*XpCH&WG`S>wz5v=&9)L>oI0lq zB5^nUWC4k@gpQ#Fm6k%-kMX9OAD4rM(2};2{fKE(o53M-0#pRHssZSOi66Y~-Q!Sv zqn-Y`9RGgbUyqJ*@iyCgmNKd{pup9JYYMZ~rqheT9fq8xjl0#r06ONjQ)OGLQYjfN z_J4H?;KU=;J-@U@^)FDJf*Rfx@u9@*!1fsB}B|+$3r+u@+EJS^0MO#(oFb#9>J`^n<5#Ys}<4 zjdCl_FY`|q+N<$zG09?B@S(^yij?CeIGh|R^*66QI@d~eBLqjrSL+c^#XYzm zQ$w$feN|Ss_K8@a!2~quKD79FZ~CxrNRqlI)BCq*NGNIAzx2B=&kSBk+h|>M7BOnkGNwI8f!L(=v8tT)&f@>EX`;v}GOh$rA zU$t1!9U0BkhI9;lI)n)aNl88a94`{nZ#lk5mp%fake>r_qb5hC{0We;ute^4Bw@!6 z*J_g!YEMel6F%dIfS)C`1&)OHf8>#^;+a9)DEehvwN)`t?6TXhBVw}x{1DqAuySww zIx#>%equ*%ueHnZZ}8+2VXd~cTk#T2-~s+` z&1d4ss1ZYN+j<#IcvO3`2uXdgvF9aLvnZ)) zIB?!{5EqIMWWP`-@lICv)mXFMk(u>Pi2{PvQK<3ZMe@q;7KOCPP%?bf5jTkFT_29L5ho!ROk>nfD&T3Sc&eM#}5L9ivvMXfPAL->w7w#v41%2Si3xClG}>( zsm@1kW_*`;Z&YG2$+$8HJUxb`tYE1grBU<`V#Rx;gELOR^P4}5C-&_R37=M?9p^)= z-JR51svpHuuP)~v*FV+%N<&9JgwG4P+_`uDdCFJDrFIf`i>tD&c7#%uE(g(V+jCNu zFJYdW>+Rj@X{{53iuG!9x3tujVzn=?wYTQl^Dp=EdV*Oh>_fm*sJA;J&Wd&Gao`cO zMHSY>I;(zlmIeT#n7d;LFQ5v{sny31+oO--iyX(F)*y|q%3TRj$GF~$y}7m27T3IT zL>$1~o-MI<=f~B<-cz6_9=Td~ZpixB>tP};kI}u=$utNn7w_*8Xbg-S9A1t;ED$(k zqFOf*7jl262e@o;mCN!96t+57SD4z3z*v=tCN4kc|SEL=b?f45jsosiq zy*hJHd|B@y{YvOz65}2^Qp5^B|5SUzpXAHPRGiGW+9li6PU&NYeiKffP)j$V?%`p9 zK1fy2ZaLnBGn5xJTy3e^iH+Y0(a~KX(X8PIu`bB`w3vk_+7r*lBiGu^8?g|>^=^4o z6ckQ2;*VE&$hz0>cDrKK=M$GyJs0cL`w||O?P6%$(Gym$^9^K6unI9A;<5r&fM|47 z*Wb8|s^yrnIWaIyfYM*_77~419CG(8db6eHL>FOFbp9msgOrlNjneoW%~YKDHCQNm z6vg5q+Cp83lTcKC3{eu%WGae975VEX3#=YK~v!jqLb;KQ)cPl_r@P9drL_R@$ z??1NJV|f9IaPVW3dHO*z;s*MQ8F5f8Pk-tWFQbC!q!3biu00j&KZe-h&uvU5aNW`b z_e37-_u9zrqDnntM7n-+l9D!bX^nmA5W=IMTKHv`)L1tK?ogJwgu0aO1B|h`9uP;G zK76^NiaUza3ZjTruFW8QFb_SUk`Q_g??XGwsRnZ2Ilg{TdlMyi87f z&S}2|`W2H?$7^03tH@bNWSrL>D?GJnqc28!1`Ax`o6G({HgP~-2B{r6gnC|G|+P)4&<88prE z?NPm<>wj@?5gFE5z8U%CQHeADDgSBuuMh)u(9V#h9=`Uv*J+oqSX#L*^|Fu?catO< zCr0xGF2V-dD19C%FaUuaL@-y4f!bDxhrda#B#!8juV5g`{cd~oPkuVZKg0?a@X~ZS z7YWmm!2`K7>@ z56f*~^^JIBqFNwoGtG zFjw;8!IPAUhxF#|zML4P@HHeWh8pYmMtj3zsjdN{BY1&eK*;rXV=+`~ISJ?mM9cPp z!ukR_B0U2A_(FPt0uyMG5yH_)wR@wI9icO?GTk1NQppOXV%6@qX_^!{qvLf?7`*MM zSnkykcYqvcss?32n1o;K{Y4=k5o>z_WLxigaFiMn2FIJvz1^PnyE>c0(xrwJ+JPGL zA?M{?>*7aO;k$+G^_pBy_7bv)jmQ%ygq9>n9;RiuaBZ|T*Bb{b*@;6nJHEQMutr0s zlopQUeQ&A|R=!m$4(ZN!*RK&_mPQX5Oej_Vcm94a-8h`vRpnih9i@rJ(Kvm`xjO?h zivMz5K+Rmzk4}d@_m@dO(RjQuH&7Wdokg_Wag-10(!>7-y)ZrtGk8)SgF}2N_VDpX zkC@jt?ig`%m#asWoh=ry$)DpZl`?ywitY z;t8(@I{n|6%Y3^OYBT<&{}+>f{D0G4!BW+ea}3lwh!o9$feZkmy=ayGUmpFFM^7|1 z-H6qYd&g=G|Klg~^T%x56Y*lTsww@mRf`98V@uK%yzKyS1kUp)i zLX{X#2R)IeAFATgQoC&3KoHa`J}O?iIoX&+F5Kf$?~#LoJK$$ru(nzzMuXNL&MO){ zGE)5Gm$3(6@U|0gu;9f8t^j1vZwBZ#HbIDd_5XyhB?OLLE|`&A1%FXQqtSjmMl9rt zH(UkpAv5kZxFtwc7Oc=iXd5z~>er0Nvjmu@p040LsE;Ujh&ispFe_U7`S#?K7Thl~ z>XI15kG@cDCKYq3&zG~>OrZwepqz#kkY^1k0n`sBq6?Ltg!Rr+bKY$nHAFy;EJ9Bi}`TscU9 zAyb&svtY<8tePR#J|?ulp{+MIfM*Hsw0F_(O;iwERK6TsGkK}KR|iFg(Osij;4{$&TeQ3K&4n$VNq3Dd#|GG zg?y(K*473Syf;eEeA>Pm50S4^PbrSx*0xHK7->vZVO)ujI@${?q{AFrrR*r7oTXok zMWO0$o1kJ!QdjZNXYaIUlRYma3F{nB!)_eVy(d0OJA`UgrVm=4!V#R*z2bnG&yo(l zF7kRdmRH`GV$Kj2CHt^v97q2~LHhs0-n%{5l^tiE(SL$_re|VeURIUL?YgUqq>5FN zB^w05B1!-W0bo&Tl~g{spCW(*;X)FWsv|7h6JbZR$9|gbH(J)+k}b)SC0n-$68gX9 z_pOc0FHhOhIHo81#Zw(^@!;&U_u6aa%9Z)$mzh|nX@XpbWXc68!65$-a>nGcTfh); zIuMoPp5p18_c6|H*MYAe`^TC!FLXKFo^bc<>x3!p& zi^7_*T2KD#V;aZ)xATO;F_QtCJhWA?52fzc%zYi3B_R+l1qL8ZOEN3Y9ELANr$X z))59GdES<4m5TXRY!ZwfNO|`6RA7svte;%96(|~Ba*C5(;M_fNNE5M>5*YccMq7_I zX|2Z1cGAA57{$$Y>+KoK`oY$Hah3?TrGC2Ib}dPYhw*;S1^;Bt?H_yXPNbPmXB#*YsCLuQ@rju_>6UM)t>Ky_eH93s!z+4wgZ?P?>H^LS& z3IEAA+T-uU-QPlM=F*=(dPuy`5=A;>hymu#&&0&}QHTkO3`Z#ue5RE>Qgm2PHP}vV zOTDo#X4%XUJu>4yWW4`^@q}c)U>fO(0N9EQ3{(P2uTOtSMz`2}ijV}}sl7O6C5cu7=XvV!+Y@yZdHAUT?dD-W~; zGTX%^;%rIK;~29AI)}%GZA6|Itw$=qc|v<3Mclk2-a@f5C=*m5!)=uj4^$gP)Js@F z$N_wXSS~q+BtO(LkeDz6h`chHF zz0(IM!eTXD^q?^sw@knE8!@hxHW#akTXS?b2KoMT?c?!<5_U@+rDG6?(?T);-K$Mz zlze)C5#lMopo(hM;`R^*DiMM>Jm?k1l08=W=2A=;0ft@_!gs!g`hQP_Gb~~AbG{MXjWeD?4c<<$@eK?Zub^)JWP5xTmJPFyY4fVw+G+-1T1K!kFVs1zYx zii4<9I!r%5uS;$(MG(643wH4oD5!u3zSdqL?}%u8EQilqTJ=be?`E<}skbuUJ{51I z-{~Zc?-W$O$qP(if5EFRND6Rv{X?!-YpNf_x8ph?2B*G11;{4Jzqr>*9D&+U1hh8G-cpqyW}bHn&CXU(LfSaB524ha z2NX|JDbQc~X=}2bW02j*JM!^15J$QYrdnY0)rs*)<6@K2NH0F^u}Zao@GUHo`IXoJcuqtdYjj!1HWtQl z0W4{>Dpwd4Uh>XpOhK8cA9{NbeWhmaUO5DniL`F7Y_xtLKBX)yq{cto=oI(uVp4&1 z4y+Qik;c=DRNgU0zd)Ubk{=;~{4C<|x_~zY9y%X}rP}>4xLafCGpY>eLG5UNtsj|Is2*@m^fNY_75+?Y> z*1uj1{~^BQT8i8FzRFd;=TnJ$$rf?QiDEPvt@_*--)}F6=6|RCw)d4%ukbP`{$LAh zogx=hz4n9v^N0ZChl#t(n$#lE3OElr2{GV4{xo%=&5R2wI}b;qSG%$b0lH-<#3QYa z^{4806*o?Vci|B05J1@g8LpIz zEW9Uu?~F!F7|f16d-+7muw7%#79eaA6$ouZT3y-Ct2f(n9!gRYuAWW=c9M=I0<2&n zc?BVF3pd|vtMQB>E#kF!mz!-dgpC*JRkPyp;lmz740JCoo`9}BH}za=-ahJ5@8Rl`m^ zC>1fV%`f^R#r{KP!4>i-CZq(VCIJtQK-oc}bzO{ZeJOvPVe_kgdXM%sXluJTLFY@t{ zCGh}J&7P>f$t)oILJ~i47A0jqj1TdVIy~4?4CqdJdW0zz_aGNdl)6MesDzoBMW89O z^YOlbA0=UFva!J{VhmQU`-N)`iXXa7AyIW?yyuxYR+SX&jM&1*L*D)I$yuc|E8*dw zV3MQz!^XE5e6lUvobzpGiTB~)?pgWo_WVAzP1q_4QHq+!yNkDs^rV_zW*f)r5~)A& z-R~YipDP-x)91=d4l^I3eft89;jytfPGE?ng{Z=xXSpIB83&dLAs&R#i^w@D7PKsq2 zJzO+S1P`D$)kX~C)hER1bH6fi%vC%|*;UL%jzoav+Ku^=cs^P`#)epT&=`A^bctdd z&`FQAim4YqB;sy)K8@Y1^;mM0TVl-QLf!~L6u(ntsT5!I0I>}K$3rTp9a6dh6Y-E| zvQ>D6I{RsCbzEi(iM!`F|EK$dX>yGmBK7pDu{QEFUap}5mLyjGUOZp=EaR4(xX$@`%&p*Of3NxaP(GQD+om`aX2VyozQHa0gJ|*dYML>N`}(u7t0Ex zxgA=IZfoD6DCnUXS7_nWdf67S#Z%UyU7rjK82zFD6stG2mJP;B`iF|wLN@U%uoZfT zj#w#(#-|<@7N$zL!#XH7EkjrHG?p!X1Ppl7&$uv5;!ApHUiFu31v1V`ME8(z80tIT z`hDtiz8D{7QEn%;GBjm_9fT0a?Ipb-yf;k)z=bxT)``0JVuk=_F|0i#%@Di@zcs=~ z_@{S~w5B~qUD-)foDoMY1VSPVI^%o7uVYsh2@oe}M~>YGC^zh%ROfwQ$cXnCP%-Bb zmS8-nvmvLt1r6RsZbA%DoU^29q9ND)4&Am%eT;p4uN18MQccGoqSHw?_N9mj;C7cc zgfPaQu#Cf>;3wJ_u{10}+2%W9qwJIzht3kLyIP-qNER?-Kt_8S_c+UwbTL$`E;CHd zFjmvuR3JTj%)moYT$*FXvw#?(F#Nekanj#aIvN;}3&Y3?XeE748v^Ob!{ibHhgcpx ztMo-fT}vA$`nxN^@>l6JfLF6YO0bm{~~Tk4C|B3OsY=NFUIkWN}?>s z2ih5K6+fmEI4XT_MZw7qdJnZRUX;=q8;KSZAgJ_f z#RVzKmlP1$UD!17)0dimv-TftvD z4Ps3B2`>+Qb+L1+dfQ;aaSW-z=>utC<;aC2Lq4pn$Z)%8sPyqToKB5Xs)$^?76vBT zsSsL~~OBGoR$ieg6} ziGNek;*}WM4Pa(W$&d>{U+FOtIz67!<;1+w&MB5Fx4)OBi(Y0He%5A&i=Sl=6nY9&>yQ$EWJ42jZ>PibWS41Z=!2K3x^v zGGKg&2X$dC`OWb@K8Q*&J?wus?O`^}KM)j2fUyimx(1$)Z@Du*A`029Vb8e8(A#+? z+&;k+OJtC<-wF~HS`MFu-oP9#X&UZ$E!?V@dRkpxRzBWVgaLkSp3kq#|C#>#Z%7pK zg|ToF+~X9`&WIW%qBp^$+A6v2;tWF>CLid(#(EGaS&3ajj1bbi&|k|AQC9_vDy|6) zzr&X4F;c*2TPO*bir0bjfbi9h5VTKSOiV3E@c-uD187)&>0#pPV>}S-i}$+pM!OvU z2BEkd?}!8mJSSA`9}gN72RnX1-p8d7f3dW##%#0C*p>0A*GVd^jU@|mld-uE^LQfF zN*P!$Tai(*o((eeABv|4oXPT2w`*>v!sW_1k1LVTy;9)^l_UfbHWhLZFNxj~#7$w( z&?k7N0y^R&`bS&TQ=lWL*WHReGgg^wa!9?P8yF-4_JJm{j2tCuwZY-e@z+5JBQwKv}4l@kz0}QDHTS0g$gOz894^ zV@?#dhUtO%9KR+#`=4r`a5eD2l&m2Iz*}@(`~VXTLnvRKj6YHs<2@8?LN%2nwKX&a zG56e@N*ChH@ek%WRIAO-iwoMKzb|l9Kr7dPL*|A3*mjO(ZwQN15ypTLru6H0DRZ%;>!kR4vY@<1&E! z#kfoeX3bPty$=|G*&N@?nw(kl*rAkQ;Akd%#eBP&+(Txeo$dMhi;4+c@2P&fyKuR5 z8mMmv`DV0TY#+|r+H7y3-HO*BsGY*cuW0yU6KYvo&K=J9P&m;zW|Y_N)uO!s5_NIU zJggb@JB^27#8pp2(q!R#)GGtUT(j%ks!s;+N#11!%OEVs-)ry2FGRh1+e2BW3A|?b zf66m#040IOdV{7bW2;dxrs$=Tp`Jt@HML zEXxsC#<+s|-7(9h7%A_gM!r;Lzy%E7RLIQ}BhrKzo)*{9&PA@@)(bKDe|G+7|0*Nk z{87wl@7s_r_W7g#)bfKdgY%7xNZivh!inI2ah=5|kl;F~NHN_Q*Lp{r{Z4?#nkLWkDjM{tswa-#?L+_| zO+NlBt43v>;Np9S!^UHdiU-=$y(k!)E{Jf-Tk*J40A%5Uz9Tj{Qe8mHRTM^=# z6kjzlkW|e|u|;5Ot;8H*C|=E?HQ3U!WsEF~PL@9uRCSn30M0fHC^ij8A(hQ^YGIby z^dqhyn4t6N%B2iU62pldaz4W*w7Dkha9n%7cWAj4Cu|lu!cK=-lybh)O04A~fykA@ zmdu5J^2d+A-_mWS8RzRU3h6~_R?z~NXS@ctII9Sl#l=eA+AF4^R7;}rn%UR9L1^4z zKJOB--rSyK&cAAWM78NR;luQj>VEPjPzv%B%ccaLzmFX@`s+wyvJpjL z7@123I6#IrJ=bjpZ>T6bw9272KpqX9(}aq_{q#7F55*rkNs(30f!ePfNCdAa#7Y=Y zF=;S}n7~>NpR*hvB2&kd<54g|Nne)rRW@Fkd5l5EA*4GdK}h|iFlDi6kMX7|+h+yf z1%uy90TwD{76Q>TTa{E%&BRspFZNH?_f!?Kj0Amz1W;3?fVrG_6^P!T&MfLT6br1G zQp_K0iKtF~H|JzG1-{dzJ#0lYDETCzryqkcmbgfOt$8S`vl zt0qAFi}o$W)R1+A@lZ2DuOUk71p54Xv>%zsAu3FXJ|3BxsXoNYI7#BeN}G+5q2PPA zdrU?rrNtfr!YE|ffi_og*rj6GWfGSs83lh(j9p@KRNeO!%_|b!lIqlBk#Mo%*>mjd zSORIBqx9W}q!)iTVq5#R;8jvq9>y#;Dkl*ZBQQ9TKi>eL_aiYMN>~-2#J?x@byN&y zkebch87TQpd=}@U;tr~%ip?lGkCZoIwLFoLS29S^KvZVqTz+5 z5C}tacBBBr_Nybx7(~14t{ZxS-&OaKELR{3tK&nR@rDZkcSN6?PCr1!cGJKwvc##2n;Cz= zc|=KKrH0~`N$E20%LH;D%J?Y+E^z2L7i`h;Zdj%DkcZJwnoE&c{DD#%Z}xljVpCtPpyK2>q=u`WGb@{8k^B1n)<dtS5`|HR z@VG>-u`V#GRM4|nMg*rfQhJ_zQq4Ry34xVEAs9>Vy+of zUPba_T9oMgbKe1gaMJ5TcS2c6QBR5k3e1)~_^I&eCkJx=JsZ6=hV3;fPzW^8HASp3 zEL?6Hzn&g3>15(66;Eacs!}Z`qp;lQWFyPTEwmf)Y%CMWd8}G*mR-;b*!C3!!qlXy zvMqYWHpc0V%4mD_6~Fq-kh&1}9;&xmUhZRp9G6Kzu^5+DP)0<8oNXjMTYAeMb!O{@ z=Vl#4*sfc$)*5TT9b1Cm{3kh}J?IxtP=sgBMiRn+$%-aSgzyGfkzY=-f<+h>+KcV^ zcu#d4s8>?p75$kmIf`Z5k5MXxGI?r{80xceNdUgr{c7+NdPDJe>|!gahbUsw2^_|Q zFvJeuD+oyMBU7pL$7(U(3sV-X4kBD9v4o6JfD`=j8y*GWNZfJbLJEE#CL-0N%qdYy zR5Fc-%EicGD3i;V+vRu&<>Xb@#1pKFndJS)@(2;V>LK)s_8?3lPNMZTg8OkhI2I)L z<*}qSxf9na;2@aC3$|JKk?PGQQHiTB2nnC_v&Hz9U+Yy=WRjw;5Rl_n-CHc(oZ*G= zt0C|a&s7baNg<+btGBam{4oc%{vWt8zt(W`XqRuVwB=i_^}~w~otR@7z07+laRckG zguM2YBx%O064YIr3%_ETUkkA4!&?C0RMr-onlr+bgC}378_oHTt@wJVPi+AKo;|!Z z{O%9~Fr%Z9Urx<^afC{pN=_M|f2CAe!K%7mp@XT4&*+|E50)*Py(T>IuJ8CjyRO=$Bv~0ljR001gbL$rAa{t5<^$?;~`%eJz45I z2{ubeMcR0jI&?{a_S!abPMXs-U_kw?fK97q(zxtWcS=STA5uwV*Q$^jEx!qrDxNK44^d1Y>ck zI7=BQu`xB0PIaauY$+xKT>Y%X6X}`@8mr=S3C|<4oD{stUg1T)MaXEssJOS=OuUW` zvxTKlhKdouiwY4Wb+T+ZbP)7Q$^;+p|KYzI6wkj6pXb-V<*(JMzy2rx-+tp$@IIz6 z25zjYAk@l3?x6YBk#*VH*2XT`fyfT1|c<#_IB&&|$93K=$r~8hX3IY@rTg7O?mFK8;;>ip5YNOhQ{S6H5s2K4>w2OrKE2 zkFeGbi@H~i<@UCRNKGwNkWuc>w&!Epz2#iXp)yc;5Bx@#jpD#w@GD+J{U9F?GE75v-$0OdZvA zYFGBof$1TOv;XbFbdl9`{-fdl_Hz31!I}qFhK#oE<%E8|{DzB(afVNd$ae1x5Vlar z)V2k*-nWDj-#gO4_AZOI!jw_`-Y>JNeCN_YeoIv>l+OUKCPaPw5R;bEH90n)Z!gEZ zE#c6WLxqrQ@j#WRS58HxZWQ9dK9&DDX{JsSRf&n2B^|KFE3PT4KA9&(w4j0|3atZd zHGU{4;HU9{liCU8K-UT$axz@KyrUVoKbHju}QxlrBSIkw3GHO(dH;5P#V^|E5p4pcts@-(P!1tIijDm1J7M$ zsCawa5fW61Dz*=OC?{<9d|y_P9(xLT!w=N@w}yESgC{7vx% zLu{0q@$;BY%Ho}W?eWj!I~hsE2^5($mPTIdMru%E!6*#BJAIz;Hz7BJ&%_Atv|I6* zF1x2iB?vYu;kwSa+uHOtQysNqw1~>qWU}4mOYLgh5$q8kB8~GE^(w=mtjV35eLGa~ zXT7Qgs;CcQnz2p9GJAb;b2k)%EF>C;Fg=QZ|a5**&V=2Gxj9A`M_ zzP(HV3+MV|n3=fG`J>-%pNQQ{R#l1Fqa11hqTY@RM>A16#1?!uAz6tL-eo*^3(#(o zeU}iEUJEV;GAI0NN(5lvd?5sTvH?oEQToMQtz{%S^@GFD=p-S`Z^m=+O?F3l{)qv4 zgxM6YOOaU9*=7#Rge7T@#654(o0Q(TUpsyE=IdK0#m*$KiX2)JuA|{Hr~V1;8-G~E zqN@)8kJv-N_Az96xh|ly%uRbvR%(F-D*eaAp`9+sY4J+p)@AEAct&5PUuO)em!7hU z#D~Ro?KxI@3b8ocMJU0#L%x?+ z+O2qs&S4bYwv9zj4}S^IpW<4jViRhJ4RNrOUM&`r^Gitm71re6uad`r@jN8~YE%Ho zeD%K%@rq5NOga#!^t&%A+_Fxd6;X2*X4~z>nH6hvk)7n?ffw0Ku}tJrT(hCN+9qXSfF*-pFg2_0`)=T@oT zE`6lE;hB7M{^tCO3cbz6UV6IyfoEQM?y#i2VTO_sdzp1b@{hD}ZdH=}PP0fXr3w5# zI^nl{81y>As-zb3w2B6ZZ75Kuwv0yMad0uowI2@6^M-mxtX$kgxoI)_2qOMCzSQ3G zo4_3rN}oDbTYq*ot`)f_MuV+#klUIfW?%Gqa$>I;k@ep(wR(t&%~jZ}L-LofFkczj zue=?l1wB7=3~#hfzC(=S$=|_ciFZ?ZBCzDWOqr7AFpMNug4%KAc7hptAGo*!!$YA> zWhH%5V~7P)mH~<)E{9|(Nk2-a2$M6KE36vTzEi@Ksy*u^ zV8IpSVF~;bvg$>}{i^?hji$BF6NxRwu4FTW+$#K?XsIm1`IS(Nm}|YaFrKTMi7DbT zN?Qkyg5BpDAH5J?qFTowL@q(0e72e}O?y7M)Vx59MZRvH=Xq zUce9=rFv6BNCKxEOD>`Ex%N!4csQI$U@g*5mF*B35kyy3;~~VzzWkU5L;N34JyzK^ z)-)DQ3e@HHM)}EKJpO@Fg}xi>OxXmPD&9~q>`4{M6JfHmr<&sV9?D2J=w~WN-NeM6grp{2dLB^xxpd z#Y|!5%Eh)gFh^Ui3d8>Ip*cTc2?YSim4+fHVq!crU_nm=KPkO`@T!PAl+_Wv`?lojn~d-1ShmRk?Ml)DapKYnU)~OeqSl z25c4GKiJ`DEb_K@Dn+egtp)1ZRrhgbA=dCe8wDioa;q)O*^P&3Of;Y7<@h%ujp_t<`MCD&!o(`Q<_%9Unm}Vkth;(n_X)wi?NJK)1Y+3J-@_^a*stR zKw4{Zw{UV&lS;}NPW>Sm5L4y5!3xHkh|kyfG~EUTEwz^}AhEnFF;0~!SckbbvI9&4 z!q@!uMaBBUzPiqiP?Kw4L&Q-snu6BLS+M>w9_=cke5K|PnT1kko?JDxDj&t_&$+}h zA>oXJuE?CqLtV6U2?>Mu6Zh+$>-k^D7Um@dnyeS`l%dQ$yw;qI-?bJ%bd(h3ZJ>f6 z?)pIrfeK zKs?22O-r|x(5szlR3sDt0DC!xa)rW?F)bYSjp{bBPlOc|rv#HNeGes_#bsiV{bPI} zH|VGo(WEpfx+i^|ZJ_C)SBAHnaG#!U-M{dZi$p-W{p4VnoEmO&ZOQt6Li=k7LU%p%X(m^&*y90dMi#YkZP3d;XVre<|f_i>m-o89&YY9OyRW4DP!_b%N9Qt2exFkDRNs7@>{G_ zyzwq3Bl%cB8Ro(bPDpl#wR7I12__g5^hX9 z9?x}@2*@JB9!vl6r`yxm3>cg6fxoqG&MqN|XdrqZp579S##GWeirrQh@H)+a*g2<#FgRqTr0hV;S0W zly!+`!+q>#eZ0ax)-1sli|aZ?3nWjz^~QuX$M$jyVpjDij~ZN1o#r0oeA_Ih!73-Q zogc76936MQnx}Mn3{4Cr*>WDs*?6mK`WS3KbZXR?dBb33kK_Z{UERB~%*$Sf{%MOJ zo&dAYK@hj?!HM(l`nf9qqc-eueBN%?;-55`wqXlKegWJu?6{k-O#JBhc%R_L-P#Oh zQ9-KON}G5(k)w3GpZp0$(>xY7E0Dym{>V~6-vp$hSSz#b(=mrC3nEm>SI;Se?~@s+ zEEV16pm%o;v{xH56*0m=DklqlC|BYMKy$r&xSt>tf&_$Y<{`@v^D?`58}#hvo4$ZubGm! zO-Arv<+44HHBQP>-KFQtfreGI3fq!1th$?@lE;pp7B6uA=qai{#(OhcX=={U(j->l z$^X^Pe-Rr-T-%FY5*ET$+#gjKirL+18SUxOx5L;IZ;PWUyLL#o4nsqFQz($Uy`RQ# z=#=n!3_*(eBQbSKD(o0r%jHo>B=fd?jl|j5LyoA{1HB4YBZ!mzP1+-1*o`T*`!+! zb}4ZrAIM_^9oVCyN^gbX?c5J17}_TFKifVUw;tLFu|9d`%PU%quNftAtaj=ORMe7H zfr@PAdK=n5NBBkWp?FmWdzyA4_Q5M58^G|KAMoE}f{)I+Bj=@G>YX&mgN>iyar6ja z8l_;8|CK~mk5NTAs9{!){WYxT>XdgD27br5gl#Clhkc`>JwARUOSsf!ABzM~;E9Va z$6nx9ue7Tnm3Zz~V+KGc&VS*h$?8s=kbbcl_UaT_aRAnGazq%Q&tU8mHo-`(Uh^3k z-9L(}r7k zbc29`oni^~{&!4-?=r7q3$mADeUuk+Tp_EAIP<@c5b~oXLZLladl9P*?aVx*lQ>QE z(0J=Paq4R3$==o%+A%;)9w8{hT$tZ(Pro0Fv^w8rEx0|wqi-MUkI%Ii;^Ld__EJ0& z<8WOWB!wX`)Tla|Oi~$OoE9!F0VTB&si`#G1J8&lYHue_t&g|QdME7b1o>L*g!?#j z>HJB95z$YJ?D^;0b1!@5msCdps6S8HJy2G&ZOfds}HOEeIAQ=}OqEc7HnD#&fGdU4ms6t?Nrcv45Rc_*d(`jYmT z6kck;?o5;(^s3nDc#X$_m0F{@=Fb2tGPqAPn z#aLqeQoMgXjYRtas=SmHtxDpln z96hk(p$fXBSSl>E9S?*QtoBs#Q8l)v>X0v^9mva#0l{m)FLO;%MKQMylGA(K9>yc3 z-hL6cRZOS->W{n^1SnuHIrW786`jdYBh3S~X5c}`@+;@ogV|IihGNUSOR2utwb$Dn zKaVB@J(!k<20B9_jDu6(jQ9p#ZVXw>bjIiHaC|zn$7JRGn-0U5(Ddl&qG+nkpnbS| z2wCag!=5V&nI>YC+QS@=Cu_0TSD~LfkesXf+En*+)fo3nE@Kao66ah;)W)g5o{^#! z**zZg6(I} z@UO7;V&azD;(T1)jfM;aVtb%|O78ZMymwUOmgAE$VriwKj^q~9B&}sR(dC?32X1qi zXO8KNfsm7M)e^1#E#Sw$Iy=@LzL} zHi>C7DHw;=a6A{u{om6V>})B~rat)pJT<}RqJUw{?E^`qm|XFPC@u9%|E#AY>J&Im z;SQSS_v0#I_^Jad9RF|QkKKhP0F=OU3pb4NlcQXjaiHzam}PyCc6_I^J94l1?7TBNs>JQu4n;NiIO}hq^Io=(l zMCCGKfKUFoG>%i0C+xE5?7p9Z$pMK3)Z+IGJ*^Jt)SI)bVUdkIMcvi#6U9m^;3`ey z(10;IUV;h(f93nB{yKO{4L&LC8zO}eek`L!RjtP#vcIAq1%P^*2;XKa zWCk@r9H$)i4C#!7%;trZC#)BhrIi_|Nrzu?~`L0g6xO}C(5t7Ew_ZVrR z^eA&&O4lPrs~)E>gA}=P0wug3#w%jBbak61yNukq*i9tuaN5SF&Q~nJ%6%NQsmT3x z_I3cikR}3cvm=-QyAD}6E2+#&B8Z3C`QX_XJS%5!0W6sdRVSoIq8@e zJTrzX|6x-4sU44L-cw9Grc5@Zu@SUIZ8x>u;nV*Rhrz#8Cp=yS9P05`Z0Ok#^nX)c1 zfvd%@Vmdx&6q3evXIy!ns`CX_oy!4YOVy%JG`p)gO8>aJcajV)(t)M(n&x zS8A53x}kPsbvhmem$MrXK4aJEtZvs4tnve4vqwtCcE#IhSD<@gxd~0P!k& z0fg`%H$OfMHdxF9etuzC$jh7@q`S_UxtYJy*-{DH;OAwp4`1qBsTMj#sdZ$_gp84ZK&uBz%sxU+SKj#4 zG3G>Mz_ezQc8uTi)pk;{dgPMeY(i&={iA@r4-ts{Frbcd2PtDC#T&Lf{6nP&S3nyA zH^e+ltRk4w!L)DPPOaNBqKNT(u5st(V#NA_IAO}FyQkINNQRM9??-oAPDpRuakL+5 zM;}*WvH)Mjf%P1#g$y^cNubfh>?zE5P&UjS{kqzHO6qz~eX3XZ4=m`z?L+a;&$Q3P z3qAQiMIrrjmw!_&oisBMM$V2yipk`R$WB%4F2o8HL?Lb;pJSyQ`+;QkZVSRV^NDeY?6%<_wAI5S~G;88~krC+XjhRN%$^ z4dYcQ2DeYa2T$t?YgU8XBTzjzOS#tGjOE~fIXSc8(klv*#X`rf3hVpso;X<;HAdg#a4iksy#tAgf>Nl<{st9Q8D zvSBu}rgw?C1T&`!Ry`v}W2QGxE;Qq5lH;Tna!M3o$wQ!HWp8*_lFmEbm}GN|0;Nn} zk*PP^@@h<$2!_y`)$37_3wRn4Vx&;=}-!nhS+&~&R1)SL*E z%tLM2aQguGyE~HJ2v7#pV4Gd-sa$B+mf};-O-cvx{q`SY_AgPQJ1**Jh9$7GzQw{h zL8EV9jxZ;wz3@VN;e{A|=hvNo$77N`0w9Dr>d7VwBOTx;lHj=%chK|c0^#*WYmm0$ zdIao&7`IfIg#R~CQ|(AxKs#~ zwV=MCuHq}^L=_ImjooHU2R*$o8lR~Qn7m8|snVJ_m+?Q<_$GISWzwm2qrblSG#OKi=*?zU$9VQtb@@$o8QmgeyY zA`_d5snjgF;z98uE~%%?qW!T^W|UG2tbC0ODS=L~ZHgY`R1gv$%MCuwBb8m`U3Ui* z%kv8kiM%k>SPD5niLsDAFr7*#-77FR9w5|(CC9#th3e|m!3{qpuAgs5-ehWB0&qvujjfW8 z+>5Af9l_KktPrm*desl&J$fP`fwmYIChq1vh>gsz|3np{P82GLzoJ*nL0OY-sJ2{p zDs>feyY`+U5}C)&s2?{YB;m5QMISE&YS`RU;;FY@kLD~y?YTbfnz$UXFp zT{=w4=&`cthB3sIlM+iC29J|6E_F^-7GrC#_?H*$P0gnkL-`EbA;kiknQ*T1nObcC zRUY!IQ=yM*IItlnohix=6Jhl0F*n=Q58kA-DDDShWWiiNqC_rEN(ds!^d+2u34~rf zKyv+y3uhvMIF7D?eePKsx?hy%;8~wy88fx9Qj1J=nqE)7cK*Y5{tcvmgP$i5dMCA+$ujF!ET${X>t2u5(zxP@F>!p8U=EcY$VOj&8S4`{Dx~ZOWYW zpWvloH?pO3rZ2Fs4uZuC!D$_rIB4$)Op6q_L3JxIw$lSq*nN8dBEg^aPk_*vPlb4o zj{c`6pEh1W2@QpAjgA1miZ4|V;+Kzqp2HP)_`5@<=-KR;k>sFb#c?mQBeK8&8E!}ugAV)Npr#! z`LtBZAwRgAUmj3cBE~Q3C>+lCPD`~K_rzbJ6}vwnnTZesP|-sEx60}`YVdwTzvGk-%etFJz8UnsxT4)UvZOEvDM?t;3^3WLBXo69U5OmRQ0J_c{>`htt!z!U11Cs>^HfBUjpKx^e>v zt4Qt|q{On6iaCt3fddj&MAqcc+%5Z{J?_1Ke5 zIG*B^wW>ByJGGZPgdOHB4ap^KqrX&&DLZ;c{= zz6XF6r3`hbrk*)}bfvv=#XmVlMINSQD6&($IXb_Tv-u>sP(XI^@bgD^Ffz%6iwRIM z&ru9ii!wMTp&ZScdjN@ub9G#Ev&L83S zj?!%CM;54U`Z{==B!gNk02}Gm>Y9K`$cLQc+flK{K~K3gbMX>0Scj={LF};*JYJQ$ zi3ueZTv+k!8_Ld<5u$+`#&10GqE~ZFxH9cK&pYl7UxQe3-1{azb1drC0~LX*79&Q5 z+Ad-iSk~t}hgO^+3G_a;H(9Ew?XW=Hi?z<0=D_ zc~GNG%6F7lo&4;RiN9AdoT7O+mc2RP18rb0`rXll<6`h2-j|E?8 zx2?Jn(Ox*pd8kUl7H2Vp0_yiw&ju_Kp;;csEc}*@f_?Mry|Ra)tJic%(npJ4;uUsm z4uK8mzzKn~qS)^`ss{IXq2A5TZq3KvX-|7E93vxx&AE5x2T%YQAHQ?pba2PU13xc} z-Wf&sox-c&;&CgW>5}LyDB;)-l-tFqOKrZ0e;HpXwRv=!+C>?Zbjk8*N+^K$C2%R) zLrnLrwRX#|4o=D?du7g8*-Ig*N{D+me)xc@$^PN24sUA3`0dq33ofLlSa0tW3+H#3 zcMCO0OWIl!Wj~RAo!Gx}B7YED7a0~yQ^%d}cuZ2oNdx3P$S5|te{do#ITPZU7P}!j zy`Nz(_zjQ0Zn;zFnEM+NQ@R=shu0%L*@VR)Vt0$tlvbr%iRKj5qCKg_0OAftms-ZSHI?4#X zI7UB;UxT5JH!S63+3`5RoRtvEW^pkAfxaQV`!{wT_7x^KKOYs_2e#@f@falC1#Q26 z{*@AjROB~h9t1|GQ zf?oSIlN=K(~7R`dDEly=4@S5is*bRnlijCzU|S z*eIW9PsfOq7=t6{h#t{`ziI`yM!V=d8<&VWRUfdz(UralCpOKOQ~nm)tt_l^)@r%k zK0OZka>};RQy1HYy3SMosnGM(e=+{+?PjRyRVEJuIHsGaCn?o)c{G#(2bcF`t+YgB zM8GOUl+`x3>XjiyM!K(@V{+A$C4w&T9_e8&!>ucUm`+E4Ih&5JilOj+CtTuCN~G0&_h$N`$U2{on%YI@H$2BoXmwj3{UK&?~iwe~WnAP4n0bZ%M76eJ=aEN&FJ7co_xySFz*970N^TS`+q6*B$4 zZ`rB1wax`ioMz{5uFl6pSWs!u6k@8Cz8Jf$pB^#6+U&do)?9qu;Sk?vjZ?L*A1w7y&8w#)rq{biAXmzo_~l$6q3$F$6%}A@)oDoE+$Z9?2GtVJSU! z#gmT(3PVKHF)#FJ6C+*I7E{XxIc*Og#&aa)HmbXZPxJQt(L3!a|E@>IW30sOiYY$Z zZY}!NT&gIgoz=OOjZxu@Pkt6NH*QXd+>ADPDnC1eUHgCKpyJ8j9g6PpWuS7p$#EMg#d zUYFVpmqKo@Uhy}ijFd1J21r3pVYs?-zr7TXlDRv}ZUm5zm;=Wo=alE5GV#X0h$>%> zM~j?M36AT0_9?{6a`Jj*w6!m7D{xSQlq?+j()0`TsX{W#xNmt#8Mg_EG}%V#37Eqp6(npv)2;p6LQygomYplttuMCZY_q% z5M)ODi$egahtS}1*2fb9ChT1*Vcoe%yNQ*4_uckvC~itREdCu|4f;&$pX1{tDtnjoFYtntsLP;Tea$`y5~=z0s~*37MnN%&-0^K6Ltj ztljyeciX$~#(0iNcH!LLD3-wS7DyYa*3v1$61HW&6SM_hy0|o-)k=Tm@z+1+p;I~c z=x)pyRho0#ff8{PVx?FFI@;8sqZp*DY-i4jGx60J@kEm{9r&4T*H*py)Vtc1Gybr~ zJShY!wo42MpDY6ia8-_Kgq*B&@$U-#4iKC0M=Uvb=R_z#jFrv4Zsm&=$B$#~IOH#I zp)xbwdV&dQcC?O1MgU^99YTL1mL19c1#Ok3#2N39gi0Esq>#w z)1p9$9A>SB=h`RyB(@BopFj6LhDAa3swLmv&m*6THlpand{O`>ahh4=35YYsA!XT% zClxhJB(6>$_MM-MLD=@oz9rzp(W5PDf$50@{ZRHIkd2bdOEK?PUWiEWfBCf;$3a|- z|MktVlwbAkA^PU!19hxSKdj!VP_gn(QSiPM^rl>3VV39{J3_P!cK^)�MFvH za8c?b1_Ny(A#jN2|3s!%e1&0Bj_f(;x$)LQt?(O5L}i2XU!w3L(eCw3YYbmVmNT*@b?4I@xHQ~xL_cG2>Ey;7LdMUlf z9rG5Gf()|5*b?b@%#=#Y10}{XNZb$wA9F+Z7Ck9Agj*v@pkdLbhMrSsHIfR$fqwELOqQ68G%J7DT;}w($WuHsugOR|^;Lq9_*B_A}&*^rY#JoGvV{ z#ijV$cxc5+poa@SVnzex7<5)itQBlsR85J|sw_1H-F`{BO%}|0pZr^(x6uQMwc!Ken`^f43?FOeca|+&R z?`O3pSu`bvd@^EDRfNf6P8%RW+tv!#A=brE*7$k6+k8uVV-4l!6b)=dP7$x=F=Z}u z83d}}^uf;YD|ghO&vN_-wwz*LzC`%^4zT56F)ddDIPnoRSn{X`?ACPuu-Z_GWrpy! z(f8Qnp$upszlD)mU_Z2nnkC?`di`iItgN&65ytt(-Kl89e^Rn|LNH(*h~w6YM~9U; z*&sM4D?h0qw^(ZB%fxLl<&^ta)_8e*a&p|(sdBR$7h&M=O058!RCazPW{+oZ(iUbd z1L;+low36}@ibGcy5s%#o^HI-1I!opXb8r@O%q|Yn11Zry)(8#Ttt!dJ}wvqAu*i0 zNy2AY$Ud#0*&%uz45bJQ9{*0!G1Tr-$1IPmBjw$*^}@WTL`v_MLr{mzRr(k15AkAy_8S57b7(XmR0+guq)&tHu-Ahko}$P$^H{yRx4%PXic=*bETZ?~lh`$;$uSR)ad?&uy*lsy<0K^!m z*rM^}Jlr`8GP({Y&j%AtZslF--19{+_R#Bb1Cnd35h+C!<69kQIg?@@lMSTC*EyZ2S(8RU zg*pBnD%wFsV?Vl+O4{yzq_rHj^DNKhd6xcgOC3 zGyaY7o5Zwt+OzSTt!{sLdx|MG*SiG94=FWr$=_4@o!XqT*}N1pSODfeWWC*(F*XeD z&-$c5%+SILhYZ>-zL(`sYgxi`-Bkc&E|M+xKjd+qlU4Lph7>QQkYDv}_p!s|{KZd1 ztp%c#OBREi#pLa6PvaV%*TXMU!LPRmJSK*^3S_@k8#vr)Gplh&39^E}DWe1r3`k-z zK_kmodjEZO@`(GQH7EPdntrzZetZLTE~lzI1L-V_!68bq?a`jqI5u#wQ*uvYP7wqd zr|!oQl7kly(Xofb=SSW2v;>+j=w-i^I=j-cN-c;pwtCz|dvam1gqon60OD;Wd`^|r ztPl@BlU<22^RGE1j!Sjdb=r+>V^um)dUZ&Kj4i=yw{=ew318^2bXUL!?mBumB1jw3 z9F>^;)5UpWb@L@y*A)P_xA`h49ka98({Hn_w;s}ACU(&bb?1H0O`_yrQSs&b?YS32 zc6%iL^-xg&5GYJuG>yRbxO05V&O#tT!irOiy(nrYG7(eJnXDn!Z~;M#35|I42r6VG zYi#b?p8WCo*FphfMMp1Dt5N~l03$JNva|`V6s5SK_Ej(?{NTkX|KWL1%K~08Mv9A9 zUCNv7+H8E)Un)KbO#6ZFat-x@(GjNSN?VSJMW|khf8TGrC0fU%JpQW?FVyKjqE2H8 z#jQWuUW&(dCCJzloZ(mQLk@V*-t()&Kh!b!Fa=Phd&Vhr*j_yHG=nV) z){nOtL)?LMEM;4&3}mc|Qs~ZqJS0s0ZM+Ehn|5yej8*C7FR|GCCt=E%BYVZCA?HqB zGB`r4CmNXA5$=fu;`29vb}^UqhkYe3<+Ky?rvwBAQ<1(=G=>@t5yQ<1M#0rqe?uQ) z<$YGEq{0*!XFLW?D>$y4<%9x{vReya%lt(E`P$h+E{Z56_hQtSKhi!D|EB&}y4+ff zNugX204;vhn9WVOMNhHJqe53(B6u2tsgiRQq-5HN;yC}dl1tLq(W-=PO6!u=03p) z42WI`K9D!A8Ad?7w0dup`+%B)8^xDTeHN;r^%avfm72n?04s=hoLy|6^ke2E%f{wj zY@hvo->4s1%wvd@*z^Z^nyUg3;d4qVX>G!S)1c57{yfd zwi^>1omiSxW-tDId$rB@UW?Lf<g!#MBX&WmPL%jftVIWlXG6SLv4HwIatw z>=4W5GCp=~FuNotBGDtIavza+?lEcg5yHe|z@s^i;(l^D(17W{Ewz;eKOBdEfM1s5 zWh`5T4Nd9b&WI`}u3(kXUac^6(76{e9A%-wAq57D7e4>9^FJkA7E|$i?WqtrEku?W z8-mT3;wj~;-HG`*f25>6 zw>9aJBLNEWM%pU`hk5ImHla6A@C{tfwjDKjeOVuo7Xe*616 z{R$I%nZ}iIFL*n%Z6O}SAq>U;r~c{CoyJ4!B|kJT@r%v_IZOInR~VDMiWdx20qC%K zQ4io;TbwVCLGjq*;`}01j{O-k+LOMhaX-25VING|Jx{>_W~DHEbBK7znX=d`M*}rV zbJR2n(3w!ZYo$J_6S2>{vPwPRt|59Fc7y|7K5)6 z26y(Lt4}fI`x6j{%H*R2DszY0^t^FTjF88JbaL1Pf-nBJ)45ckvRSJ`Gz zk5}emJx}*=6DvoLs$8CcIksMr^BE|8M0(Qe>!tR}t~%r-U_OhZKo|~wdHKO-(4ir< zbXF`e#tpTdF^3g=`~YjDOp=b4B*neg-iz5dlYJ8(j)dLBn-9_)EuD!ZF-u5<6dE|m zR21N*G=|Nh?H(PA83hCBZRdQ}s(?Mmnk>3dZ!ff$Vp6;RXAt`Mo)hV0tEHUJv<>y$ z!1(lzql}`sHWlY4T1I+|Hi!oVvW8O^Z>{RDr4$XZxG#iI@1AY)$|zh9?uM*9-%96q z^_~{V&>S6vcRZv>E00?>(Mjr#)w_$}#7)l7UhKU*0=M}O0{-@PTzvU*%QA4iYV`Tv z$5*tkc{lEZEYnNE_$#g*`oEneR8n33Cu4>JjvOp}$i;sxD$u(2g7UwS?|KxFPt zcBfEDGDF32bUh$xpX>3a=Z|O!mp;oWlS18#96rbHR(m^UlzP=yV#2`eK}vHMb=KPB zAA5Ad!*m!I_vc#g1YX-54HkgHlOgOMZoe1*Txg&4AUTNA$X(Vod63UUesNyeAo1N69CWHEUh7-|ygDA4K}gzVd^J~KoMA3aq3JMJl$f*qTaLtW*P1UEni zlfp@ca(0wv{gZD{_};UHJBdBhV*)XwE7H}j_CzM33@p@aTyXw~TtmZU>Pc)V6c3vW z&&gqIH^u65?RxQxbk`u`I}SHm@1-q0F@xP!999Z-f~?Vh*~ybkOYLmQrmKY`QJ$kT z?HM(@d-8h$75-U}z<%BT{>CDBo{N3hKE+Sk<}gy(WWU%dMdiy!$WA+0lfD7dUt?c{ zQCgmXFqm8eNL3==_DnDD(syloe~AYWqO8`|H9L3_)DW1=m12D&S2tzF~&Wm z9MgkW^r!HuslL_uLw1ryYS{djkGsD-|9*L8PLG`eBPfhUb*&hg3ISH_F%2q*X)7Z{ zF0_(dVLxNdQGuw$%ttEBGz zod07i;H}#*>gK&r+ejjczvzNJQd-k%m$RY%Cw`fEq>!V*uP!=qnF;l&C~o8pKH{wV za=~huoPxhiN*L3FY{c6F&z)q;k0_{6yQ%@o)+mv5A1x*=>T=f#%rMJ*5vVx$%)eT_ z{W`T5fB!Y=&*2=6U`IX#A{-OHrpQu4?+VRIz^l8CIo=B~#auu6@?)T5{!jct$wV*^ z#~WV0C4Qq6Om7}-4(5}4TfwZVogH9J8YY;yzf_R=E3v`Y9XU3R1CSh*)%SBKzu5Uo zcViO&M^OOIAH@XjGpMZx79@_a!*cL0v&eaT_P~L6gwUW0O1CH9`_&)Ds{^`~I(4jC zC!@|*eH`i&%LKHAzV2ump?sfZ z$lrL~D7*n8Fn^S8aJD}SkzgTz(95~SEYY1FJleOI5hY7aOgwQMcgJbSE7pFTE{#A6 zk+n+RFhDpb+uj~~E=I|QA07QKDG4-vp88K+pksW9?1nhj>4~x95&hczK(_9y9vvS) z@|wkYM(ty8?Q6>HlC}voO1V$!%jeqDFM7cV6#%xzoNzhI7mt80_)V5AK7y39hjN>8 zC6PMc3H9~qaRw(SnNeblxHlJ#3`3U2Y*25SSulCwH}MyFkIeRDJIVRLuyPb=J9^Bu z;HK0+#L(bm%DV%2zSgdK`lLq^e(z}LzH%D!@pl6isnIiCC znrhBS6PJ~EJ$Z>&K4K9+JyaI-)P%@xqgjTRaTXzRcLC;*bx!$Uu1{-Y8$kzjXY{YR z|JY$RT>G!I^MCMgn2%R|F&g_vEh+!9(ioaoZ&jo)js{P}hFJ4<$Y2X$i%bAB)G;w8 zk5k%5MLgT~_)s`o@m*fif`fa`%w^;ct{=-E=Q`}u;xN7R#!fLG= z14xTiYGXr|&#je=L5%RfY~?U0l2iNJ+O$>0XA!Zxxfi2Y&4h2Gd4&#e6|3gtb&-bkCo!A{8f7(rP4$yS#qhx@x8#p(b_ZeUY zTiVms%S^-hRDHtu>w=;>0x-L&6>EO!YI`jvR*cGPmLX0%90z;l!W_2>_&Zc^b=%e% z_%fbUon3Eugx6T5qkLZ}9V9fZdSxXQS;LygE-3l;+w8nNI&fH&-+>?D{X~2a)T?X0 zc;>a)$?C;3Vl(R;tQFOuTwxf}nL1tpgM)LV=qA}*MU5^}d)I~cCZ`-bTbs2q zCDH1GQqNySS-Z*VWO32$`+Af^R=DGFp=U~`)KKF!wBLhqv7xp}%wT0>5s4(Yqrj)z zeu+`-LttX>`MlrLDce%efspn~DYb2kA;c=?qc^@ZU_>e^%eFVIxVlbI`=D&-RE={sd=7-AzUcOqZSkjIqmvi9#CwTv{ z_I|t=VY-G55rggEE&3s+rANU-s)Tci#u1~JnQ)qOB#!5J<1FmKDN~RSvIbvz$vM!}nH{u2HBiTF%RZIZ7 z1+`p6N_@GgLhHJM?NwY?%2PRUN$_|H0^^h(Ck=+J&OSNXh=)nZa8kx=!-yji8afJY zz}C)KErO;U?c+1N4!1s)QBvrb18p%j0Q;j7+u*u_D6yU~wKKDA)z6cMGiVOGrz*ZR z`*w^_13oRK*xw{_joCgrh#Ec0cC7!k`a%DWC(nVjg?e419^`Q+4aB!BXn~wBESI7P z$eW^xfky>X>=myx30l%$gEsL>tLiCrKm5MiqxD`{L8n063&aI>U5o?z*z0k?^*|*j zoG6?za{h)cpRP)PZX**q4)pnU%?~7b=OP?$gia9Ub>D9n6J1Iy#zR1btf%;%lq*h8 z)#Qs67WkNM1D(nLIeH{)(32pqiD12d{(O5eVuXE1mjA;R^;V~d&dxZU2SmQu!IXF} ztvt?@H5wv50V`+1;y&xj3#TW&pWlA3mQCzF_NebKujf>9a@>i11{m(bd3cn9!%}Mq zNQp^AiL&kd%#Bu{EbQ!;a8N5uxfib*A*D2OG+it=s;3tD}2FS^6Vnv!S>$=J5rVz*8=VvWHzfCQ@AX548by>C#x#p^g@?xT)WE z9aJ_Qpn+gM=dTY8hxH=r3aU-H!x*9^jIq4y-o`TTm*-=4JK`|f_ZjmZ8Cg3WJdAf+ zop(OWTf~z5Q>d;c{fgEl|K3{@j%4vY5f@XsD6xH*Ol;X$Hq zi51Dc(Ye;B~cV*&3b#_Ly8 zMQm`rjH@mog86(_@vB7eef2ulp^7D0wkRxxnSyTfMaRMl4-1P8l;Jxuu`slgVz|t4 z^x(|vjo+i2buONt079=S7`5uf8eVtG`x)rTF43qEJ?u1TQsmE{&*x*MQ;DltvD9k)#zf)0dD}mr^Dic&37G`!cqsz}d*Pi>B z-%(BXpne>uOf6$}6tMCeZ4Skw1|ctn@J60*N= zWo+PL(;QVngUZ%(zG9u(=sP=(H^0Jv_%g3}c?W2=JN6dNedsTolv%sxOYNC>e-0-s zLbc*b<-aioUEM3PEu&Kysb32OiY@N*alF(G@L(QFP=$!!i*%JJai&%7slNbuztN}7gV@Z zOELuDdxaoi!O@yEJ^7ACF`n{!c{HW9!UmwWhoe%TShkiM)(&7${Vme@l8FHFSRzpr ztKc2o)DPh0y%-RVn$i&BBOB`PNFs`lJz422H4aIf<;B3cPfPa39trLY!%`$#V=t}Y z;Zim7sQ|W!>qV2f*j)e@g2cegnYnO&eRqxk z%5szY;)x)8L9r8g?S4(_R$yQXx$*qp;1Amt4^)D%n}QsxRt#rz-BIgDSQeG4e`jW8 zW#;Bu`%LU27-dIE#}Ns0Quf0f^BL1bcHUKsU)K}|xy==L@QHYUORH3OOrD$yutHvQ z-&3&z0!`+1y+Vd)$s>_ja8R7(T>*@nqqB0)SuzKTA*jfYY4ogpIe$rt`+0l(a}VIc zjI5~j+q8+h9xn`@xa}1om4s7N4eCP_*w4gQTnOv@(PvsY8HmXLut(&*WZN)ywJ|)6 zL;ZpBLW*ttwa17tBy#S?mNj*C;cTsZAb;vu=FUa|(3aTuN0aa>=h}L_B?e*|8kG8a z{GE_{rN#W}k3#%({Fa;rm~)CCqMke(Z*utC6c7=upcX^F)!GS?_)&NSX5`C|DxnYM zAs0Rv`i_^lj}sKqOPKmFt&b=F2W752+ov2ZM8{H`sq}zAdhV4%2jmd%D-z}xPUFVd zVil1DxBzdT>Vb^`H_Fb{G`_bMhxTkL}xRPkt2gcDmVS;q3MbO zE&cPg*n1GjMily^Q~5F1+ zG?jQ#xt3zz7joP&#~cn`jPDwVk>zk^Q$gA!o8M`RMTj6kPLv`;(`-+iSd*e+cig>%PL?Z4rfJv{)jq58rBZ|@ml^E@0XV#PD>(i1?ni{dT|Fi>JU zs|07^5ACVjm=|Gh>l7`#Kh6d-@DE2>slMN4akBN|%OMl|y8uG)ZL z#~gw$u(irrxs2A43tvLkP;teuEPxSwmCNxiM4#HN(Dpcm%3O_AP)umK#Du77kZN;_ z9td7_9MQdO&kR-#t_YS9bIENxkn8!**1ly_GjL|($;TX9r&&S`Z2%_=jzn@ql?}D$ zsUUVo(<&Lb<&-`K@ZU%|Wl*eIZKQkydL(MU~!x;CTd8rVD={WQ3VXtGL2EwyT<;)njfVix<)RE zWI3Iv^GBE4rB`w_*{N90pr}4#8!8>2`@8uAuUybabRF5og@M(D^)$~ifM>ug@6x0rS zUr^AlC5Ad;KPBD}lXCuuGq#Ah2&M30@`i8Y<*xZ+ypv2Ic=EMh{ZZ+RHKTSU<8>gH zlw6G7Z_t(1YFEX5Skd?cK;6C0o(cgGQSbTqRs=WI4*i+m82nr-ySmYlYjqf z{dV6F6Iwos--n*Mg`=(X03d+s9#L*p5gk;#e}rJ<*+PJoj6T>Te#j!h8dOxg69J1+ z9+jA-Jm1$2U6o7f)_ve05*@su39pfS93s(6qt{S0hDd?Dh#H^*3)hG8)rh6DXLTq=MGQ9fQ2 zoG7?VQK&$Ddn4b9r#-C?%E8I-Bn*dr;-&O zuv*U53|riS<2>9{NQ*6A^fWoH+22QelGS)I%mJOe4%@S9vAz8u@+$#TV;iraIu2tm z5U`M*})&)^t+OQ2AHE9fSZWfsTL#7 zVru?*qIl5O=70DNVd|GWR5>NWWUF1jMd@>fI`LJ?Bo)cadctpXwyAs#Z1YvEaPwb838PT=+8_?zDLVI9Y|TOFjA`LXDa zinb;7Pr@97>ukmgg;gX5Vt~DN7PK0_dt+oTR!ei3^1-&cddX`+!bFX(2yISFoaohl zW?zzAzPgro@P=^Ujj-z5xBY2ObZv$dx0sw(JG zMt2pFzy(snj{M=^xJk ziTYZ6+wuN-466rm-iXzGxxMq*SbISSqho3XR3$mo8;&0X))Z%fN^UM?(A%KI>VfZm zAEgpM%?RSd<1>B2qZ`s^(Fx`W?lzL_CV0tO(?eHO2zYd+9}s zO}?(szhMX^WLz#}0sPAn`~ju%7NO88fP@^ic+AR-wDzR;eDc{^nq zLD+UZCv!l*URaRAQ8_K3%MyJB)~KK)RhTd-?7A>7wek(I^~v77BF%!ZXq^lgdI~6gHZFFo__+??P&VPJMQoc!<6S6Xdc`l|l-c%Y+O80piJ@SS;F+_0-iX_x z8w_jeG03dU?bZiiBJ}H2MDbj{)835_K-{&H4&^_Q*GhDzbalKmL_}mcKZ$Ey%u+gr zVnLr24^BOJiowA7DiG4NC0U+GrU|0Ze}U)xMoV0r(R4Nr=t2#{fyM|B;m|1RYk{zW z?|}JXJ|`l#pT-}vJ`~F+Mu8Gz6qqfgz&A$Jp6Td&Y?Alem)mIG{r>SdeE^OmaJRbwAbe7EkG3O zi4}EHXkXZ9rny_>mYG`@f~QXQZ5z`Rm5pg zyc^uE$peNQD|V4z;c@lG6=^@H)KmUAt)|b+FQ12I`qYc`V|8|~@tA_%+xRKtNJCL@O{IIoE#0skbuX^jaxT@sZ zV@~)%yC5u3cl{tB%k?~Dm+_(I=i+T{ueEtEx{A#MKbp=$S=7`5LK@$^widr+vF^nJ zsVM!c@dZv(c{T|p$Hk2R@ATtNP62x&RoDsB@2M4BLkY={VmQJ767DA^LTfXp0~ zKnf)nn|nIYH-#O>1Y3?-Cj5R783tB?iwzJqnVH*_pR8HR^>e9Eq!S7=(}V<)`7U#h5q>r%Hw6_Mh#YCih@_4@Z6q&6+uEZ`ssS z)jwK60zgFk3W_A9Vz=0b09U6a93b!Az9yl(N$KmR1Z^{p)L5!|bG;k1rnwdmRMz;E z?=jO8;|ZF_i=$a41pkVeN>m}X>U-@lmPyrb;4JxP3u^yROaK*weg63uLuX)=U-3fy ziz@a=K#NuERwp%O;Uz+;7xh-lC5PCNyBXQ|Tr1^g*T-7!<1~fef2O??zdVC^+_OeQ zGLBEq1PASIxsQ`*s~f4GMVhp$(ml-D_jl-QKconU$Ph<&dA%8k+O>7YirjPWI!Hm} z9=gztLY_qb;#LB_)?%trvcj~}HUnSV+uA{}#YgkN!ngqNfq$wkJI0;tF`_Jf~Q| zUP2_^eh3DA$4~zZ9PV16YnAoOk)A*L?e@NB3QQuu5`{MPJ12tMBccdLdyzZyFOA-o z1l|x4S=oyJH+65@T=#jLdDhl`gWY|xJF_qD$Z{;ok}Q!FCE2kZ5Clm`00{%2D95z4 z2T#Ni0UQ{>Ap|PQCibLilB$`^zM9#0v@Jh7jvdFgGJdKeq3^Z7>ptkd`Y7>arZ$`1 zb;>aTob&(R4}JIDef8DdHIxrwxY9Nj2k`q8VDzBx%jYRd?>Z;l;PkaNZwbP;qmT%f zYk?cMoLqBt(gLkGTnyiK75$me{wUs9Kye(gvn;eks8rpuGy>}G5Vl@tXonCDHhOYx zwM10y28L%UQXxycE>Gp-f74F?-ZK$tgLr1w&))xIb*9)EZsh=V^JA99?kgCT`$Ud( zD@<|{QRO12nrv!s;(Zn1R_VSm6=xD@dQ7p^WUolE5Il%?>2H4>3#AhDRMC(5t9lkV zS=}`nKXJ2!y2q$_$v55(Z9jI^h9PaM@s>De@wk3S6`u*ASXPr47zgos`Sb{t7YcAe zj-lH?0eFtdIP}ryz6j7Wt}lvI#Q0_x+HAZ`I)n~=xvn5L6(_}wq=-auKwjhLQrk== z)$R{+$7KBsHw%g&=NJLcJrwX$d}r=`e2J1J%fPW>4tDeAxdq#tA1M1aEoj6bbhuf@D?@E93G@t9TYwDo&F3YX8ws$O)`73OK8Y>kjD{7wA z|Lu9Y@fE^oDhQGemO`M^<;X`y>Ae`_T?Nu&Ckb0&Ca0#=kGYL1z2W>7^{SxcNkIm# z@z;W4^eR*NR`UI^_3POOFLeDg*}#zETwrDs@LW2$-bY8Hf}27X$!11`-ej9d?S=Gy zsyz`4!rvM|*XHQIxQ2B-|I7dW9o6Q3*O$Ds+#m<{>W^T|K4NEgap(|X_%9^#bRBv0 zH#s(Mmo_M{yC}I3(?yd&bvW7%RmJ`BvUNE*FgT?m+Z9BbaX>eZrF*+&jej~{yzGni z?5dtJI(r(^xX8#_J1Si>Y0siui^1I!4`Jd-wPWxDdu?pemJBq`b@lcImVDh4Uy+db zM<5HAy?ETS&EEGhR>R3&Vj?7cP9djQ-$@i8{xpmJ&PiYyOmVh>D5#wq>pMEb{ak!b z;aWXr2r1)ZsyQ1 zulu4Kuz4h;Dfk6Ai04$j5 z8KC^2SP~8K__k$G*6+ldIrJ|BX9Vf0t44aG$9D)fi0sXfB77(sgvL6mykNt2kCU`& zikQ&hzdHS%A9U6%BzBJ@?E>S90oBsaz(&*sucN}juWge)?{=A^UTPMIFa|+7ucx?+ zVGCd)IrS7}=cHwWUaf&eaV&SLX?#L5j4Bdh#CzXywscda&RGR|@j*OfReF`5nElyd zsT0C(ZF$5{8dG9%p92G;a8yzdFzvd+_l=?aph@Yn(0rh-IW3aZR$`mhuyrWxly)AV z_;Kt5igv&F!1p)X15a~~wG_KNc3(wo#%nPv;=f~MGNka3_aZ&(g9{}$N7V<4(oDM-6q2L%pg z3DHB;`y=`5|A|+wE9z#{0msChHsHTgTmo)L=|RXJ!hLF36z|6zxXwo#U(XR zhqkbFRQGC7DjRQzN{+9T`V1=7I~-KM6}g6Xs!Ac{Ehrkk7jMOD;gHpW9|Y#O(z}rx za>?iSiYu+q7#f@VlDs_&ARq-dL#n-AP+?SfDb$w7gzfd2d`ls5ql1nu3rLPpf#y^X-jDeH(m_Qo?_A`o6y_cxZ2!(6{gu z&eN2AJ$O(U&6C(|SLHEPw1@jrAPFw(LcSwLL}FoR8b#geB_Mcw&4XOwmOlhuXBR5L zW=W|U7%-XG0h=R(`jDJ|L*`eGW=Iq>e(0qOV$trjCq8XCTiI7_l=xr|)mk|Bgk7(x zs^vC$it#0q!)c>Y$pWg+wtg?wJ!-s|)+iHsx3SisikK8rq40e&Wt9#PAGx}@1|y`@ zus(hGMtj3jB-qT0g{_}Ge|W&^b2Jx$LD*Xvtm2)DAY?dl}7I}B{V*y^^cwWgW|); z`hJe-2d6)z9fsb1*%o5h_*Z^@qGYq5S~ zA>-slgltgh#I=seQ23yXYjWf-##;ikS;!c3HD)Z=%gQuf5K)cjtGJ}DutKtk9R0UOrO+`6yfNZT~r70L{PvY zGkC>GL!rMT=_6jxv&!JS!QT^xwRta9B&91*M+J24?$RcOb3=FlxVhljhGH4Kj`aA= z6rtDIv5~2A6Lft~7L%3$*1H#d_2!j!Gai7a)8($sCm|C9J-1tQKwz-a6r7s1Dca!S zz22;&j%e#w;?V+zFkeYPO`4+`UrcPT7|y~Z`76YUj4Ve8<)iH%6-XTM+)pD7a{C9$ zv;QC-CE$^yT`ygPTSG9w4sKc$7TT5MBOG81skmc&lCmagaXkH0d)iYP?}+MH2y)2l zT9t%D*iW2gQ=n$~cs!RJ(p5{yfG7u00{X*aCO-XrC~{LWp+G;-PRvtjPeiLc6Rgjd ztSD9KOlZ8Ip?EJzDd~N}tGy79QZhgNgJbmxhraus$2X#gC4mLJ#(0kU+S%6_Koo@Q zH0|8)OI|JUz08@xc!&>$tGg&o`0hUwO!~X<(BJj@0;|US6%@G=Yl9|GHNTjOirg!g z;{DKsGM}b(EV>QTf6AvJ&lo)sU!mfjT6rZhrwV)-&^vwj<@RKZ!zKyE6t%tHjJO zn5QYFj|EpTZe(qcElf`MOHpj9sF@}N5D|xlic7Ji@Oe!GQGV+4rRjv&It4ephhWOk z$jg^@hOvAq!r(L~`OOA0=>l0(kOpS}O5uKEfA#7YY?C*S*aJFaQUX6-bNcYfXDtu& zSK8`zi;?QAJLF2Z41dMP_=!)qcQYCx`a2DsidsY@f#m#gMM-mZOEb%DW--w@$^xb$s9cQI{5h5kS59k!S9hMTFt_4u;HLK6@1$Ej@>;!%brOX^ANs z$?n*0p>XI2_?@8{KrV!G*0`bcgzlyv^WXnmE)PmlqVC-SmS{!~-I~s!i1tdzzykM3 zz3Y3kEsexFIDr|9O!MhaVB z@mkqhp&rnP6~GXC8a}g6ir_`adaC_qrK36`*em?b+Jo{mo%hk9E`J|KvV$(%`QELS zV%~>A`gbAW@^4^d#*e6t-;$H>NgZrX?ERdy=vzOCE#%wiPqRdovSgOd>m=%09y+xO z?HUssO_;>JIABN+*||ozzk||_ah*Q=Be4(S#~#-_{YA`C_fmCz)Wf2XOjXk0xA9tq zM7RPJGDuvPqea$Sbx$ZMzOz$OU0DXM8DMgwaN8#oY)7X~XqlcTZC9o+re)S=qZYtX z$KuXmi{W4H)y%!Q0!X=LMt2oZ@=`A-Af0F&%7dUonMBlz@JNmU?(c5bf~7c0n<`{D zEld+y9ezBLP%ghUaEi6^vyuiz#cXD9pysJ7JJj{z*DQ>TO`I_$<;F~T0(VgCHrM!K z>}Ur?#w9eNvnC1_=%IC6&&8>T`-bwPj2BKohabfYmfFo^X9MpZo`@|}8!mV0%^3(3 zfk+>R_4MIQcei5u%ehuWNu8@>^cdyK?bcj;liE72PCwUHR^xL@_~O9F_%6IydsLxG z_m3=J_&pSM3i*l^eEe@uf7ymUpNC}bcDFD-pb{seV!eN<-HAW(2O8Y_4g0-*n_MHI zEZ(>SVcduy8yGV}RWB^WEY5=LF$)B|v=_mI&DuOdXLr+cBfeA1)`-P_9M4!N&)CNr ztlf-6C6L%yv9MAlFh=4x{JI!iC#{}-5D&gc2j-Z>oA0)#GTRobkPzCp;&a#9!kp!d z8U~^t#ve%rPM{Gi$@Ur>_)5H3iLZkP1>2^{39`SDn)CGGXWF^=^a=tqf1^~W-{Nw_ zlPw7SGUsn^Gn37gA$L?*0o}f~R}ZTMj#iI-#eQ@C3y`B`Z2%XZq?%xI%Hq=~8`4)S z1;f2#j@Y@C8*{i`Oc+p7&V~PAN_dlRrKHCgO;}D)JBqx9157A*^T|AzyY- zNUj%FR-bAG#J*}CJZ#f$jdU|~Au2q!U!{I(5F8T6 zQR=sz#ONA!xVd?EN=t50xZhhdhpwEeG6S$<7kD2N=+M;;NYHeX`7F@jhsl~97pU>< z@-DiV{pwXUVj?14)Q)RQn68xGN(cUYd!?M&B}{?Z5=UpI6fC@@4#5SD1?j8~IyHP> zj`s>Ear>N3Rl8)tLX!gp6iK_%snR*B;k|Y*_ILr=4tc$T=ZmZ71wm87g$_BAH6xN7 z`Z~UW(gniZusK18`{6pB1}NEDug;G7*Hpbj>hnH;m?j2*HwcG#C*wpn+Au_01=~uj zt=EYq3$MOG402SK$9kFT?;ecz)B)t`KI?(1z+zYRx{_is$R8}l13L>{O+UOKy>Dus zSQS1M(Kv<)&HT|%4*!`P2YUtob^pTA@HiJx7B=T5fzSDxRQpnU!Ej55G4vrWkXQb|c0BQD{usYCa7kxL5%@n|pUfAFv>F-3B6q9!z*c z-Y!!T?fwV68_+gq1Z(85xF6F)KQ~tH-Rinac}v_7hQ%Vi@7{c|lahLUCn3 zg#1;ks6ypE77}+j600a=m`b(2!7sk(axuDeGq8)@OIfg(9PZRN+v%@k<$KYtot@kX z^c)O?t>Q4z9u}(tCZeP1s`Sv{V_7D8!#WxKb&Z7ll$x{y+5SC-9plI6|M1T|edC06)%un*Lwed_qb>OjFF!Fc@-P0adSXq!O$H zO%IvFPrz7!&Vj+|O}yCwFDVpnB5TysAw!BOVQ|Oaz`|0G5jfYLpo+}l#`+P1oi*%? zse~Xp5ujVfWa`GU$sSKe8()W!WePfX?6&uX&YjhWn-Yqve>Nh#i9yt2_Ro{#mUMG30tZf3xWX(^PW1_uxz_$s_Eq!QY?0PlF; z3}nmrH@-pvOw?ej!xLUuhR+nm&Wx{|Ywx|~;coSe+*00{Xn&7z$n}d+%OY1=cFdpn z{Zu%Yz`?r8Q0M)@RkW73%ReaWgNKv%BJmX{AU?IKW>D&$`$ElVx52eB$F8<5KS z$HL6`%a57|Q%&Ra$C&l;Bn{d4@CAxVSuEHP+6iOv?G%#~<5t>R>3*i_3Bv@!`B37iz@>EuJQ$bA6(p^Ne<0;RWx;Yk&{2SNK{=b;$F8 z!~N)&vG@3r*L-q?C3U7)Tzrys@cr>e15VHaZp7OdJJz@#LM&cA*lYEG83jNqbU?P- zB_HGU(f$;M#vXSfu4jNMqf0y#FCKkvl8N;*pGau=iVv?< zZ{aRQR}Xsr6?a5vGWZ0;%o5q+P_CB@Me%ac{$5oX4WiG-J_&hPx)nAOet-P)srK=B z!xa&pLN*BhpO3S}MTeDeKPG`e+_k=+O)%nOJn)n4nRuX5yNj1Nv$Qk^hP4uZ#*M03 zGiySz;~fx|RX93zjN(iN6vpTYF4}kyj#~{Z;Wi%^C=Kji>@BRva&upyqTQer^i&pY z>4Qg>E(-2$&&&3r;^h%bs2rQ?ZDGcOfoCc?k6$QRUK#U$<0p`-h<2|muod=|Is&om z^Gh|HjqlMR!1(J?+e$~63QyRDWrJ5(lGu`4TnfV!9VzTwNp3lSy!fDs!;P}p1OLuA ziOjqkvBGprdhbn7L-{nLyJpyKbBnRO<$RscYLK!X$y(sj875(lSe8)Lb!onMgIvfm z83>6PAwGusUVMc2o}sy4tO!(qmn^1Nk%MH>i{LbxYJkG6#0`mYX6=rSJA|tvWe|_V z_}KCJoG3rp9x+mNXUlnM{np|ySdS;U=-h%w$qS&4seNqT3G0B02a+WVJX#&B-ao5I zQp-!1P&5xQp$?PUaZ7nXdE4sWi9|rTJc6SeDjfrZomdB9<20qiacjBV2%*J2VX|D_ zmJo5t{S%o}rC|250_<2w`9j7iYGo`5X`91>lo0#+Td`xl8GpasW|nVQu)r3u++7;r zk6q3HZGj5+&cQnB4FkCeW9O(q@^T2nyvoy6CxH? zK?fRinl?USyFXvzyNa1i2ZI4P_sRpg6OM6OQ=n9#Ce6oZKiAS8>lupDDc&6Pk9uOo zEY)^&3JqID_j#|bwJS@p(n@75nW?lMZWQBOPj4Uq-tYSq_eLPpi!I~~G9=8E7O;u8 zFJmO$xl84TucY;Xa|bEWU1&}p-f2(8fLLZXpu|0&jY&Zd{Xu*5emt_9H>K+`1^I+7 zPthAV+51B2ics@YIp<5@3j9LduPj3NDr_cZW33&|3MRrcyXZchFso2mKE;tjf!X99 zGGa9h3h$9fOz+YZbH)@p*zo`;)CVB1ffWFylYs!B{VFOmp6rqfgBCn27NvZPO@T)7 z^cj}4teb1mE@8QPRhllvXib6ASqP=Qb)+-#{z!}E-PfDHT;qXJ{B^LNV5nV zWWN;>l$wFHyk{uT(mopt_V_Q_=`Ss3i*07M{xnMOLiBES(nmx5EWA$x(Fg|;>2Cu| z7nA%VEHW_~IlJ?>i2nCiG z(f)*@*bI@3rpr7t0ZN6~vpjv+fhk{pBw)cded}NVK2KR>) z{PcCENsrdzIgh{d=!d15loBqydb9#%$K#`zg3P%XyT_Ot%+^al^4|!Iv?HqVt;gS@ z$>-^>LLz|rW7A1_9j54XvqNZ?|9Yd1J!-rW@&w{%?hv-a!gRaMuU?Ngitt2>J2CIvMF1TmwJRRST`6;R}FP@TpI zog3c~D1cQ2RuV6s@c0XgrQ!=y0(nHk%GL( z1*V0LF3cmWn0MQgNf#i73nL^nYUj;N6722$Q^%a_I!|k2;BlqJ^U&$%%fJY>mPL|} z6`L#`eQ(t5zu%ZyZ8u}$*U{6(MJQVmX`|fnQa1u z3oUWncH-+$E)lkvdUqgwAQP?6_gk^ll_7YhK4PI?Ysp>0ce`^qx>OIrx@#{i?zH9O0v7z<=I#xlU7ldGJsgjI(Bee)Q3u&Vz}pX z@$oFXXC6qA7d|eG#THBNJLuZ~J!DW+^Q4AlETGs}S^)3$774w2e?cV_*fpY zOZ?P*1+cgQ0EQdY4y1T8LAMyF>c=I?3+y4U^6jHmPOD56(Ue1}p1247iZ(L?>Cf2d zevPc()Kl>92+jXCX1k9ip{h1<)Ua-E)R|A!#R6YZ%?j@aDppWL%z)31@nN(&EC1ktW^m~kX&SFum)22R-I1Y0iT#87;sN*O1| zh}AA~jVARFmTtAn3!WGYiTzU2aPWVCJi1cIJGtM&7d#OB3g)X#^@DV^5rB_hP%>NC zgHdhStW?*CqoEM+sMKe`odfqE(L=FEk5;*PB}_`t6eW;wlBycV3n#VCIOIOT&?WDa zUU)Fv&)2S^muko7*XG(6iz4X7UPj7Z%v**SlY-d^RbEq?DFPp&K$y-D7Ac179khsP z63isr5i&@L`Qah+i|@*VC|nU*@np&cXy1A7veXk;l!M)Xzv^`3g;L}-?WA!}maP3} zsrJn{$2n?6`%5-E0e2O!N%>ah#6pbn^dW5H5bhbg1;4c;Rq-cy2bbbI?Cv0+_8z?0 zPg)OsVJsWZF30J^PqdGRz<;5AA)W_T@N)i}J}8#I2}w)s;l0^hikK}O`3;fa$l~Q; zulQ4RYg7V+;$Z6PF1{L`A%vxR|n*OI~w^{rYL zRTtKypLi`W{xQwpDUSZ6gD59C-moD}T5Rx@;X9?GOOKbA2CeM%;4Tkk8$vixo`Xbl z&I$$avG}uEB&@_-=h&?deZ<>U`)*h|!DD<8FFY(p(oK#h=BrQTilB5-%*&a^Nu$xC z1({23x3DtBQ*bXNQO4}^WyCj5HJWXT6|JbrAhDp8Nww>koGK6$g{IWlvY0(AcT{gz zl`Qc6s{-+OS}azq4`rL^&&w}88o)a|l6axuro4M^3ulR=cGOB=D-gZJMgcx_qxgx3 z#LitAc*1}*HyYLR(hJ@mYW)-iPA8SUQcjCQ#L`2o&Rbt8PO%R&D=W3%Ckrz*F-!vD z!dBPhOLf<^GRRTK8Sn#`$ha3c!G&0oePyV|I}t`qsUpsVp}1o_pBwGkthK-~h#|J9 zEnS+UV{OKxRk9>`;tPZ;%u<|h8B}-CMVEb04L-&N0MeCM9PN1sH!;)`O2C0udN7-N z*n9vKMfkZXz>x}>5L6&BkX~;_u$DDl4>ZZNOz-x!8ZLvvPieZ+A_I9Hko=lgn(!mJ zhc<_$^-z2SL(WekWsWuSnh{~GH)7F$(VL9Tqx_E7_z0zZOthfkyR zj}vvS{a(Bd<8HX)Md2=)_-4AGVqU~t0LtT-O`xrMSat^EJ(1{b5Rnc>SgDMG^55W8 zN(t~lvEKBdc#Dj;m$Dh@UD<;NY0bc-6=^I7K?rqQ<;}g2XZ#@pX{a~O*uiF|)?h58 zx<(!0pq7f?OX58FdO@)YZAn{s{jm05IlB+br7Nq2+tge@JYbAy$}xDK9{)$`zW5bt z#;OiU2|zEiAn=Pg7c1I=gmpiFq=Aj%L@YrL#0T}?mpNZq_fu)%b0r^EkUKAP>BeguP=EI_ zx1cdB4)3T$upsr7BD<)U+~$f1Z4_Yw*8=!*4@0UzU?ugD#v1y4>7lY|0tboP7jr6zmk3l{=xda z&9Wz}ZGLesFTKB{COH2m@?!tLSOHWec5!S3jy!QzxyHZrulDe9r9BJnraQ;I+C|6! zvWTId`WDj^Qzc%aGcp{-MBz3UwJI~E7gtEjNuX$&OUgKHMM_7oG19DG$rbe-hdNfd z&3@n58JYa9bFo5|bYE-lZtBMsn0qlqwk-=;pF8&{>Ki;87thn}Gv4`qb$zc2^2s}e zQ$*5Wj+}}B1Q*LSrSx$19O*@j{luODWGG1iI4NN#+FrtXj480_fSki5io($@M`ESj zT8s%%EGkVR1Qydr92Tz{EDrj0%4M!iAhrcYDjw|dsgCAU-TotT4#*X<(az($RaBl% z-UF8C8h4c`94kas7ULi&5k1p9ZxG(~3ws5d{WLC#;1SdJMrATXJ#L zKT#m(hNmVMgCY3(>osP8og9`>ypf3QD9dKj@B`o5X!!z$`cz7!n)H3sf-j$~XQOUz z6kYHTCGc=UWab|+qB)XGMgjt3z-mwBj#^y)AO30lNf**rGP4bW5qdK56;Wu`6FLGL zD$%Of6~l`?=!v_JzTqEnw@pK#L`Lm)A*K4H-k|YaJfBqTr@#Ev$6`4Ivy2K+_*FTZ z&WWYC(A;vIMPlG(&*>5_q!EC{r?ETQNQUkT%jXGvSrs74vw)jZ6z;q2z0df=vlyQc z4r-DyrI5jWbBgM#l9ns5F(*G}54>n1gJ?wq3lw)XdSXwIm>0Ka66D%InV=!e`*9ww zx62-vGI~%m2KH6QgqzMJ7OIMxMkVUv=5CA&s%ouWhCkUOD8Bb636w_eWx?_NoaSv3 zwO8lzajM-AlQ@eL7nqCu8e)cH(zw#bk}2ofMVQ}HB1KrO?w5!HV=2_w_?HV>&4O$#7Af(FOd5!M-x|MsVNb5D<>w8KF%U6$stHFO2D*JJ+6$e-n^;#gF2! zkQxT8Sjri0pKO5Pj(Kf-=R!MQ(kZy-DRJLUP*?Frd~S3jew@W^{;U*eDkIc(ijxRD zQ}_w7tC$O!VK{4X+Qng}o0LZkPRJoqeW3gTPd-Y;f0ykzENX6kWvYG(YRIg+XDnNs$GKINf;DgDO*uE@2KZN98T@-0nG21uq*iDuLfnuJt%7_(A zJkK-5##rA$jO~(fs8CnM3#R>p-1S%qU3!CD32DaRS{ym0jr=$^l+qeSv9Mn7uTT#4 z&YhkTkxK++y^z*ap^Ba92Dx`#ksAE>TF+@6TSgG|-T2fM8eC!fS96@Q810K*p&BLn z#c|{Vk?`{``G0NlvRE<B4r=MZr!|c zi*gz9%pS+H3Is#po{gF}H6mJtkta zPDruOq9X5!=Zw0M>gpu;B9MntRt~YCTgPeUqm>@SYA&D*Ea@U`>xQgxZ`CV7&EWge z7&}f$r`$+ed6fy>(!!nb#!7)({07{@_vSj8CxCf`L#j2+4RK)L?D2QnqaRsEYi4#I zyV;Iav0hH}tL@bol)##f-g`pP*RxV`s2PcqyHiLgP@P!m9k8DeiH@=tP=_1a;jq(( zWIf+aTz#y%SCzO88iU6WD>LHhBynOJmEIPREyR*ROk?o9?CgLR=l%tapfJ{#QhtxyM;XLYEfJ^I%O!^Rx5UkiyBnirgM~fXbX_8{3SK%)S#-_~S36I0# zibudx;uV!hj30vu8Osbxk?4I`fos!C#h!<<#E5O zT5({s&)8GC6}&!pG=+k)O?-{e3#!nqgmAOdW?#i0kDZ(Emf%LZ9T3e>Ns5rhiU`Nc z<1}-*&02UNzOfuzC7RsLfvE&36wn4{K)RtS6q(Tmy^T!2skxkQ=g+6aU2HGLYC&G@ z89Ib_V`%Dkk4pD0dqdqWowg4_Hkb16QK0msEeg3&?CjxFUHtL)UsDN%#U7#ebX(Fm@}BK zvOE0=XkMMLowR)k`Eb1yT*u=Zmb>Q`(RF1-#k>~g+UqfJ1@4chpTj@$Ie|R8AMaXfgv;ZH@q_lDm`^=y+uJR~O?AC3#y{uV ztMN~gU}IsHbmA)i5n)iI8^>$g^S_z4}jctCU@+yAsEYYK!$YV-A{VA5iyF6L&=ued1MHDOnD!Fhjaw) zh3d9}_7glVcM4Vwl$m%aXHq=L@&q|MAoAA8@;|A%F#T?RgJzkMYR>ZgqAUq#q6_&% zEGe$vhkq8!k9vSf8KdYO5AC@02syU>@1-a3yYYw)@q4hWDj2c?b-D2ZbJ*>87rKE6 z{%>QmsJK}YW^bT;jyLa;r5%5#f9fT#&a5ahXk2f?#6$puit`S26kMAs7%^3$(6*q( zPb}GFs_OaU5EfLrLV-mcUZwFI7(d;KVmuQ5c=GAPm)eWZd-CYmQI8^i5_0j$SbC{;A1ix5)1GwkUx*;p@`cL&AH;X}GU z)3{?Ls(U~kLN1kf!748CZoC=e|7EX-vJqHOuZOO+`X0iEmEVjx{CAz49RpkB^9Px~ z2#opouPt+H!*$w9RZ52ZGUcDdXr8~=Ua=JPI=MU$UEyEi;xZfHY=@TRZt){FoCkia zn<;)&5;rC6y-B!dGR_>ko0axJw-jI$)INi2162hn-yiRB!}88df#9rAhs|wZDXu9s z6Z4Ws>2)B6P91_;B?aU~mHSd@pFVt|y%}FdCiA9~%!K zqH)9`_!q@M|G)RP5CY|fV&sN_veil^%&K06>lWp+TN6 zjouanONHA3aDzSuWyQzx6Ueb(4F-AGu|3q*(nEU<=qoi2x8{Kpy@)CnPe&IO71XBh z)iaN?vWZnLD-E-&SKIuuCHHWb^mT!qvFI`qMxFueF6|K=M*SUZL|zIK7T-C2_w;v&bMlQmtsdYp5aFbL1AfA=jaial8}0ZFCe)w1-KRL zWU}~m$`9)IKpo%9$z{Z7sdpt&{Jm6m6pMGO5as1Jw8sB@CcwX|4qIKWvolw2znONB zX1c=1QyGk)y2Sf1MGGzn3o+MH(kQ(h6Yp&lES?HN?q$cBC9cu?QsiD{M1zMP9KIH3 z@E;Tj{BJ#Zd8*=wV~;$D;Ym7Qu7^r>UsVB`P^kw+j9wA7{LLAqd4G@=p|pEEQyGaj zLtODm75TDYEDL+6jBHQ_6m1EXiHqH>o#HyoU@#Wn4l@k1Dqgrox7IwfpQgXDBN-LK z5RMhYq8*1Qs-8JYW75Bl{VA?q4;|Lb1mTk2PGO3++aRa>4Yyvr1D8noDVFIusu}qw z@*1*=2CVli5jL}TqI)h@pWN_6!a7rE=2dx1AbdVSG6wtI;^$K%ES(pge=#0N>J3a8 z7Pxp$kd>Kt!m*PmWF?$zp+Ykwi>V4Qe7;Crbaz-%b16gvdoQh)r8GzGM;qJW}FJ9q~+DT7mOP)1ONUX*r2@w(Yc2jg6gE{+1KKI<6#(!gU z%UWHV?32=loWqI}5GPab0{fFAS-=`&>r-%MH z?*pPrGUMG6vm0!XVjvkT()tcXU!>$^PQ-&|;FatYp3a=^0a6C9=y5ek1?Mv*OTxv|bqzTZoQ8DzPviZZ&aFL*%|yxu7s?Z@SGI zGI>xMtFVguMZRWxFd+^X(c1T^Wf5~{jo)e8@l#)j0vpXJptMs%$z8%T#b}8`9pA+} zrVpfNsi@A;2>N8~5Ji6ptz+JK(FlmC><8vvd`S7G1hXpx6S0)KV9ER;1}&D8?<1A* z<|rO$tg(iGheri}rk1BvsJ1@N1Z00Q6Kb3`sC34tLDG~Aof}W-5MXVeA}Hj&=&ol) z1~Pnf%`|R~_u9F09xG9(4j2*MFWUsw0qKd=nfYI*kxpqQy*%4VF} zbq+VzV$9`>Fc7a@UyUC-PyU6{cLKs~>R8Ym!UB6GoO!7H$n{cE{t+8KjG$ z2>MduajJwmp~|Gmrxey#^Bny{De)^5c|5uhz{@`s|kwm3CHHUNo9{=Mi@R$A;Oka$T zSRqhhorSPIDdYB$|Fr&dY&8_MbnidY{vc*fsU;AXc&3v5Pzo{YN%4;oIGb-XH{zQU z5HrO~g}OOlFpJ%AhcMU7%AC(QRs^IAVW07a(}y6PVzb3lPaoph=1up)1=2*l05>?_ zRS!YE$FuZtQpF&xNz8c&V>`|%DAh0xGU);`*y3+$CsKSI>h>gkLDg!NuJUV-n9_PM zR0|y6FDB*?k5ki?Li{Ix9Br6J=}K>QfR^yOOb|K}M2FUQ6#kJ{-`gj6aWF;qV|FCX zCiEpt?8$e%5}<<}Lye78y`l7KGT(-rQ85IJGWjgUGnnblwf4w^cuJzrgNmipE2{xsv=kX5Sf5FmBndZTyIzCKCn}Y-K?pnAVO0f z)N^p3iOKd#xHgMtAaz3W^~e@DljsA*VPYN%lYZbey*506x)Z}>wV<_GWL0#T)G=qG z&d_}pW?ZR^2vv?Nw-_Eb8=V6Xr8s;?7)h+pR09!U{$lWR47n-}(tkOlmbsZJUVFC7 zAS=97QozsYRN%zzq+ho!BI_d>Q%B`D&+?CWf2<**t@bcpv|x#`>LEqonR|D~9Nkz& z5uUOZ>3i+tA^z9eY+IKsd3+1;cD(w4cv46OQ*|3C~J}bwP+{ zDP)l44B%L8urXFFS+Sx_f)EyZmF|*W2m0Wo%MYNFso=NTuGbI_rY;>hapqSrm6BpW zh&7YoZcj>Q`z2t|s}{J+n57mD5W2G~um&@(s$~0IEXJhFMqW8sEmc`=k{9b!2hC$d32B+^63a$A1z>q=U)(tDdHbrs!^uG@KFE$(_C(BRH zZXbwZ%wdc7=#J3ojR9&6?|GNR?7=*Ro4Q_V=B{8E?F{b^@&pM5#E(#0z>T?oP8=y# zn7Am0b7^M7`$Xkef>%s*rfqG-GzbG3Nmms3+p*i2p+u_VJL_#f2|09NC`HsDlR*ux z>FW^sX>3{`cQ#;hLlmnKG6Jq2kX*b^WvhI@pbhT2T)D`~#eqXcF;7bt0TzSMV|n7f z)|WS7D-}#1X*J#LQmK=xsGkhnyi|K;UtSl_QvvfupCKHevl|xE%!5aXteaPhXEN#K z*l_@V3OwP3SkmpbkzgirA>|yxCQA?C`u!jb!o#Hb#~}jR>7yn<2rPmd9=k8T=GY*n z@FceXL5BGh#K0Q!c;M2Ntz4nHGD}$Q5R8v>Voo2vLDiCy^uEG%0dd%O%ynUnRHMmL zyfY+SFy=c$_w*1uw3khKH4uYmVimbY1PPI0C|`>+@^E6Hf5dA4m;5n?uB_D`dZXZQ z(+4PS7w9|d>!7BEDmJNtsEno9B}xQQ#O|B^&enl;`&(Xs8BG6^wtxzXt%thXg+GQJgohFPgC&$Q&n;A&2lO(|iF+btBRMoR zpFp#mP~m<1cJ~~XBq+Ma1~x|%EGtf>Uyv%)3r+g$ZsR_a4pri0ghz?A$5^rTJ{e1+ zt74qynMzRF0;ln!Qpe+?tn#mFE$B%tj#8E#ZHX#5>NNf`=xYtAH)DTP=&#U=@)Nz* zs1Xbda2DadJ{mZDnEoKUme>)I@is)JzL$OyA3>J^NruFo zK76J<`DC0qRd2!DJ1Y8Oh8@INiO+L=v*4jA)HHqbAft23n;ezOir*n7I;{PPk^LAy zYuRNmeg;m~HJ63VAAgSm=OG!?(O8bHL5GotLgHxJlMucxSUK%&-er<>;V!^DmK)FR zyqDNJ!3N3Sk*?W_St-a^xR7L+Kl)z$4f~jYXTra?R%#mY;pR{+ZY!~Dz1O}>$5U51 zSKwiR4Ue};2F8-{9^jYCB#(ji+$OIQrZJXJjOX#UKm5~n`Wv6YSFt>xv^zx717hph zMd46nX1_~$H277q&d*RVHeR)lil0^z0%$o&m2mZLLz(|Pz5<$zt~=b0q3fr-kZEfq zuC;l30HQlqv}a+Ge&%9!y>6RrT7?`-6+lkn)>dy~6vKasOc?9;D`AdQ3hto2lf?Ql znx9+3tFbdAhH2MYL45*5W%uX=PS#$^u&CmLof@w>Bw0&IQt72mOd74D7ioo73=0dM zI!!hA$w>paHhM=TNl@&P@1{xsd*NWL))u1_7~F{!7o@XbX`l-H#ANcSh~Eh|tk9}Y z)WmVFGhZTxQro&j9llTmy^3o&lPiRkxU$o;7Og~Fw1O^8lC;PGN^R0{j||qorS9o5 z!^QTR_ho?f%%WO_Q`C^Ile&%MDSAv7+Vd~QPs(23Pg76Ps>{9x4>x<+ZQ0~TC_D5O zvV>@qV(PlAz_|=FUZ8K#g?N?pjVt%VLJ~9EyUK9NtGz@4Qy0vz0LLBOrzuyc1It2p5^Wkw%53L@|>p&uCV!{t8Fe_X;(dvYrKfc z3Vse&J9mWTmy1oX6dHG@cy^q1rOV6rig_;(>Vl%+C$Vm*%lNZ+twgUv1m0+0^e1J| znK8D&di(tf)t^~32?MJW%5(m@>Vgb1w)pt_1OR;`Sbc)?9hHpTf4Y6@>DXOTQwIkH zAsK8i&Tjtu0F70NZ+|~gmdAgCCHLW<#Q?a3zbou2ekSLo6RkraKYR9{Hu+y~D|8wm zx%j6sn(JVU)<9&+V7M9m>mF7bo7f}lO|0jiw5Q@B!mfsizMVnsI#1s9!xJ$!24;qII#TaGG4T|`KAnhdN zBA^1Du%3DrX~&gB!pp6{gsOqyLF@8l_VOCVf;jXZ<|-{I8>v~@P6o`s(`<>`yMPu( zL0V#BFr81@1(d~awA1fe7=VefJddUj267L2f6SO&M~l;tgzK`Iu+}8}cG2>Au*o(P z``)NsAF0s_;9AD~Hbe_RRF*O6dP-!a-?J}=t1cGQ-*U;4b`=jm{_%Pl2N@NEv_k~v ze%FWYj4J`bx7q_Dgxwv-B&h;~f{xb{QmU;yFgrLid!rpsbZbD%CRLQEK_wU-2b*TB z(vP>@D^~CAsUncjm&YORueHnZYT|otamO9+(BbNGyOsE#;tRao5REFBTt!EHxsCKX*RgOCQ-*h9Y=BMt{Jmf>;t?k)QESBF*R{ zXRBd=z2wC@+&=5@=*8l@O3{l)Dbl-9fHE`@E<=yj`Y{KpHb(E(%qGGWlfJK|>Q`mv zc8kE@lc$C|UrQAWar1qzYv9NJyed`nN5nb;uUcxl8}BoYZ;Ae~dZ4m%0U%J7c#nn) zp|(&oA=VS+#U5_OU2}=j-Q`d6J^bSWEX6E<$mzy+jY~tdJ26r^HUXyKUkB^vXDSxr zwU-S;cr(@>Sh|Ea`kD%Si^)TdY{R&(P&K43+VPtdk|@pe6vYPss^%W_MAlQ7XFTN) z)MSfyc*)5OkyE=@Ds=b8JN%|Fq}4J$)%v_RH2tPPiN_Ms&o$FZx=R{5;I^yhDkmFq zc+5{%;4P1YZcoVA6cn^B)a$3Q6?hW@AmZy7tIyu?*D+f}NvRjHs@=zzrC1nlEWPlt znbmldx)3dUn`lywZ(J@midXc+;h57}+m3&JtQN^V-(JWDk?KQtcQ*@SXtF>vC9~^W zSSMEg%7O$8MPy@88e-A|6oab33cKAY8{J&iokP0bj&^qpIoqA+_j%kR(7V2gv8Dex{dP!Rw}q$5mMnsy;YsJn zK(cggdq3v{Y@r~C$Uxa6}<_iEWF9ISf1`(e}z&^ zDG|i;Yk7RS7<&OJEU=rFn-Fv$b4qNc;}F59@7Y^ISFH<)nN-*Cn(RuzzZZi-ndq3W zmy2mvN#)1eIHLa23>8qe3)yFvPzZNL-faZ;YEGf5l?|~GGCD&BZn?dMCbVQ%5~)$S z8q45~b~}Ec2+)Ur8k-FMdw56(1WD}SdkMn-w-TKH?|r`2ug5AXSc<>rgEYSktQeEv zU>_vg`Dg$YH>9hZHQ8lh>3Um=zmRFkA&DobhDbV0C~eMHLYR;d+Lxd5sYXlkO>x5G zSt_^n?i~vg=gY@c><=wyb>arpHxfXETzKsXG#+soOtThAIae-1yr~zIDeqOfU91t> z2xc5?`%q)c?Xu4`#tD$Ep5D)OeCftYTVWV>qDNFo;j=z>yA%ZD;5oKUJs2ei#9MaT zJ6@X^($WxHPz3+(cBVL4r-?nWPH6%k06ZsR)c@0&(1qs~|0@s9--d48$uqGk*gIIS zej|)b0fc~R{tj(Z$!VxL6}UQ`{95jW9@T_7>R}z>jP0csz~DrQaGizRzLj1)P)anHX=m@Q_s0Kw30T%(}(6T0b zlVbdgN-kju`*h@DA19qu&UlA}3wJtx?Cq1BaiwJSa>0tDcq2XvNnW&d4+ba{lon9M zDP25$l-su?gr!o4F&X&=wCOkwore52hEOafT{-Z?EJpZe3XrGwf4?ORGq2Lg2Sv}L z`X@MLLh3i#W}*mmYv9qbiBu@o-Ib+2`qe5>8bxElRtY?Llhq)aZ&&6#e0oHI1*JWO zCA(9SDx&BtTUfoQ6=flNB-jU7im?K6WnRfVD~=jErg&h~?Wky)dU@sBDZq5@8IMgA z7cRp11&PWiJgAegpTd2~;){XwPSPy3|2)^OU0wE%%6%+xvL#tPB5R~rf3g-@gpVPp zBX32SGNixzaTs^wG@9mKr;4lZdWHW*$dg1{SHfOFmeZA|@UrvTHcwcWI8Be2LmLz!S8_?|FRiA59AuP?b(HKH6+_TBy@smzTjsG!0X5s)vtv2~ zPq`o}?DD#=8ve?W*d6Rx;+5XD15jO4+UdVa|FlT~nsOK7O}`V*qF`D#G_jCka5{>m z;{hNJzaX(G?y+kviH*V)WC6sAAhvQ+c$n87`)XsuJaQbhq!B3`g-gtLVmP8cZb_xv zGVY*={if$B!90ctNIkaqa zsOn-Y54lpmZl^!9Y>*v6RTNq8gZ+dhU^74@HH_Y;2p!&Zvb zEGUpt{5SBwS8_mKhyHc?w{e6Bo7r5Grnu~>_5j#Uan+gm-sqS>kFftZ3#!0>=MQ{a z40f0;(f;uP_4C?-_{OyI+uf=zlc}nLwty}byT$14KRn)b&tQt z>>hnLzDvNpbPJZ-pY|*Vtg@=;AP(9fBhhD(s!*NZHv}gx`>islOwor48>p*5yA6O5 zY3xi|x|vIg7xVR&0aPwM;c>S6uC&)<1be7fsy^~A?AJU{*W0aFxzmU9Eu+GpIM>dV z>eZLoSrxfT)PWXl&zCWyc{^pI&}vcg3I(A{5+xU#h(!q1uSD(GRN^1x?_|L)R}%47 zL1>_?eI4x^@dJxrgFQ!L%MRi-0vs)d>o~hvTQiN z-oD-&5j)$@zYCR@Y$$((>NHq1ck?5?v8GrVG58)qv&1W$v^A+k7&CbG+4h7VxFID! z1ry7GDqsP%KdQkc14RknTn>JV?n>507iEm&^x>D=6W-H)vqNfeAMVFgL;XpN2~cIt zkHaxIQ2ImZ^UP+07lZbt9j;hwFF<2@I5dLDkcHx9E#yzrRcY8&K5b^5TkOq#BBCq&K4UOEUc zdLsq8GszzP%wwAam8uEZHz;p8<3Dp%L86M`Fy@Ps z0v2j*pVzDex02aqoK?^1wdT_k_(dAhauyy0}TEW8B$Wx9woqRssZ**exuIenZT zhYXcnROM-@Ezfz;FC$SKnN&%|&e(k_=3Mm5XLa80Vp;fC6mwy@5E#Fi!g+FR;xh^r zDBzDg$FG#rilszt{SuT=1;D8?0*Bim1RDhA;uNf7L5@lux1b7}Op{7+g$Tfh*S);t z-)M*OiV#1Yqi2JDrF($s>0t$N{iKeja^Z!TB6rPyW~lZGb;R55T8N$!6%+1xiVLJ9 zzkoi=szmLjGF?V4P{mdvT0aV>X~kmdSnF6Tc3MdpJyC(l1=^v3K;+Nzx?tUnAWnrLOyORlSUUa zAY67a_Nzy*QZ71Q)qTpY2@_*4+`MBEtSTWFNtMG|s$Nzq9AfOKY&iim&vCu&6%?mu z6_FCIYu?AS*Sc0%!>VFcg>v!~fzHq+sU-Po3w(FdiD44SOFAenIV_s8Hsp%yHjA~{ z;Y{WVbYG$}Y}MGIqL63BY^7IYJqhN{`0ag%Jdx7svnc%iX-qyE$_)u3k1wuP= z7?1xP^74AxxD+Qk{ZVY0@*5Ab-OM4Ve}TcI;xC*JLDI2)dz@wlv2+*Klhm7XiTBPG z#PJV}!&#jG_c2S|XPzZsfKF73?_#@oB|dpfRKpueJcTW$ELn`7yPR9c+xxc8Ngk6< zwl6A?OKn_i$Q>1ebS4#RA){}UUsq`NFcAlcS&9^CCbwHy>!8WClvElYhr+| zY9`zh=n&slp{bk20|v^(-H9pRd9!^fo~%+k!;%hv`tS=iY#oj`r&;?_;dc z-qkz0BBv((64XJquEa`xC03cm8EWhQ-d|khPH`QC3AHMVz)y&S9{2TjWx-$F;;U|S zE329iN;!PPUI3~fb@QDyM)Nx_*sNH+1A70(9jCbZYknhYAqmo;tF+og8e(&08wKdM z-rn{jzSQ1~DZ`kbG*^jn&ngGG2(J-J_ zxZI26Y)~vr2(87LYioauVXRuVA;1b!5SHp$4)Gki#9xd_m5eKLN;i*<>2-FQA_z6& z$L_tzJ>n5!xv>fVV_Ijl%jKT!88W&-Z4hF1q~=we49}tjduURlI31?0HLu!d;t0l} z?sS6p#VUCJ^si$81I*DKtSWEJ?KTth#AzrMG-lh4l?WN0K76aa^^yO?cCsIc{&8(6 zc9$+x`!MI?tzyVk$bv+PN8gBX;$4x9xUEuL_tGo^Dk%pxL%In#FF`P*8beG2tDv%L zuWWMj(@|C{8XtL%px~Ecrnk{qv7=~x3-=eHD#SPBYhShys^!wRJ!!5aDBgKHba%|w zDpA7o@mc5%Y{P$e^y7HefR3ZY#$t6~M97@VNA)zBy-|#HWwoV$yFx>#cUluH-K`x!QiOz^cZOQ@&KP`4UHe80`e+AyQ*fu2Jq)+qqKeu3-1-T zYqvPk=i9}?M7>U0F_3}MVtU)Q-%-fYC>~PH8lj) z;wd3LsxVV57?o$>bmpCmgNYiB5rN7XqU97iI7ailLi_ht-#q}Lo$QxY7utu-rl4I4 ziNYl)y@N7#I?z)cyq}FCav%cxIXILcj1a$!N(U;6D;O!3y#tW|0db11`ifv_+HRT= zfIr|OTebT4VJ}u)4RID*)W;$y!m!G{^3*5VbCydWao3HO9k*PnyinYqrB|%7On|g9 zEJ%ur6_X3IVecd@jTLSK!V|HWa`V{WzD%)8RLZ{*+q=(^Ey?P8y?>Jys8Uh3Ff-H$ z3t%k<&jnrm&)KDRE0%P^j5^F+5cq35OQj_YK4Xiua!eFCAG*Hw4Pn!;x%%k+cSj91!=|SLaRu4Fl5#vcCXO5d?|3E7 z;o&_G8|8&a)N?zB=Qqp=f3e-X6-B6m(G>rCmYY)8O+9vfVCo^8HY9jGN~XAqW~`(G zO{gI)BFr?>ue}!1S>D4r?wFzJ3bIE^(ElvblR3Z&i4+|zpp%msD=@_G%WDND!cAyH z@wcm2;Z9gG7buB!d46Gj^^39S@XPqR6MnaFLo$`}@ps#!UwduINrUn0wY@4kgyR&? z;@#Ays$FXj$Lk$#!mx@%DmMQ?B}j@^H_U(v9-%U%tcyXcq9IQR4W9b>m>~MZM*(^l zvl*w&aTz~W;+_{5RjP=Gl$BPtu+*ljqV_IsME`_osuofmi3(y}V|M5&y)&0t^j@j+c=^rSoT@No-fHK(cPn(J_tZMi33o9M zeut$1V~Tx+KL>STXRK?^vSzw^?%OV~mu{9Ky|W?_9JveYt&9K5^Y({_w59-%ZnDwXp@SAuD7^L>H_>1c&N24Riw!?L+0T|%0pAcXW zC)sJ9GNAWMK1(v;3Wz7lQUID5ht;uC6jQ~OjM^_IP0T^nxT7;tW-LQV}8ck=vY+#sj&UQ%R#n)l$fh z0}<9Nt6w}1Q&KAvGku+lpC+X7I1VrHd$)_0a5@Tsuy6i&tSx%DB z6wbirJL{Vw-c&D*+lsh#ZW9GMDJ1?^r+?$qeT@PVCoGyBsox>jg{3dK=LvVk_e?R#oIcjF!*`oEHQa>VU_D)5jNT(Kt96S@c$N zt4yqTfB>eRG#M|z8xRjNmK$6GKWHbp5`yGQfPF44qd1MLQ~d?kse%PMrz*ZV1voRh zdpzRK^#n8aZ|*AHHTh$u6eO*z{Hs7lbMZ$3y~EV)_}UnFF+O-17yVek zOvIq3InUmtGPLEdS1FNLgot%oxnFdrz4fK|6A+_A#0nwL&iYN1a-v?)L0(VzVdD%? zq3hKZH6-Fn`$7yCF@47mxvD4^$1QeGyu7K(wq ziTAzN{vbYh4$9CuF9GflYsAsAT;95eS^<)C?ME2mc!_4tU==?8x2L}>P}1zA5ts31 z^(*e+M6Bi-#Lv;O=XRu&m6}vMqZ%7$C|GVN3z^i>L$vOCxkKCq`d7SbMt0oOcZ+@i@h)GY8&{$MOSm7H zozz8JEd++J#d=CbEF+b;R#yB8Nd+_t@q<11NgeVW9L;>f?GQqA2q4$%^C{qKF`zvs z!gNv;@WJK~q?zoaR6GN`kjgeh;si~HJm5~F@v}qC0{&iFXBS^~k1?(>QF8t6ep7(U z`!{fG=<{MMw410QXqN>srkjkgW}Kqqnz2-%(}NTtB_oyFai*;-87@?|(_oF;m?LQ? zzH39D`=bFi!{CHKRlGn}(R@rEnjHtO*v*W^Cey0Y?2GDRW5qJ`(>XkZs$~e-%fso2 zCcd&A$_{f^^0Su<+yD$@Cog<4*~@C8eF?EtCK-muhW+my0o_hcmkE}RJt8Ahw`TpQ zkbyb^#?&8w?ZdC3ibDX=AA(*>3XmqE>s_-5v7~SG3qGD0#tbll*VoxO&kl=Ep@)2d z^bSH?g5R{0RQ3bH`O%`2y_Q9Wk-Pl&G$E$z#m6YYOg|cLq$iBM^OMef_9(7_k;CCj zYe2s0FXI_XdVuZJE9%9vblG`@mY4hk7Ry{U?FKZ;$VJI6}QXW|7_ zbfFj5%j2Me97&oLY+9L8BnP)nR*a`DVxNNSdZbrazx|y-DXlU`aF6AZ)WkXqQ&A59 zp?z@-_b>troAz}+@L;UU2Jr!*lBo!0x%`tK4P#Vn*rS1KBvCr_yZe#joF@X9J7W39 zcTXSEgRwvbtVj=C`|nVCN-L68aS_UK0;B_|sLh|}C(#sck=mM@&QSoyefe|tEZ0vh zup)uq%@h6d^Q|;`fzbfASnOJPnc%*Oe<&xamO`Z9!vV&3%tHI{Zr1CcT%++fFj$zo zA){cenM_5&J!!I36(~mX-CQe3iXUPSpr`y)ZiU&{JZAc<=7b)sA}oU1eTlPr zDI`p`=wGKIdM{S~42uNVRBt8gihEHu;{JBrH zr=E&=A>Xw?(-ks2b@?&Ym@FO{T9OrxU&wbAPcF?B{+^d9vgiDxuZ#(OhUqrj62}Kv z-3@`H*j+lf!&Z77;cunS@pfCY?z@RGinW|^Cf&er_(L)mMsf-5m2mm7jG(ZF ztI{7_;>@iHjrBxl5Es}97S;n%4)cI_X-wcOSW=xY|GxvFi?C@dS?3l)?R6yrr<2YR@RyYjfFeR8ZhiZFSUuztp|+61#e+G-{Ed2A0; zkZ%ji-Cm0sai`qF)+}Eq9h*u~GI9+F3a7z?k6~*Tq$2u9 zT8-@`YC$RiX6?PQX+6XH$ZAZ>f)wqPkI??L2SYrr#gn%3g@xEigt+!hQmMnlbVZ25 zH;CDNs-+Fm%`*@4GAmmgDp9z`s^4DjnLVW6{9elW!3-(30f0IVKq*wUikQb3xuIN2 zgjc`y?z?v^&imU~z#}};>P{RZ%!fx#w&Be1Kp(f)xu1l;*XAn|yF$K&7psR}%iXJs zwmZ|7MEn}6M^+1)lcX9{|N2a)ezXroX;xux{}6)Jw)$~=r0^1YXC0}r;XKK1jkTvf z0D9zCA+kG-lbDBsBT0F}yv`L?!|4J?Md0HBy74LF;UBM18*j!o1Z&$fu!|Fos-h^B ziV_#&naK0S7zLgL${>DHvSckz(~We+=gJ|PX;-u2mM;5YUS=w(UHQ|{WYW#7{ZSa=D#hg)#O21d|w7M z_Cxm@S!=(H*(>PoMmaZcwH0g49*QOEP-OXFgy(c58T13N3QD93z>64c3~RYL{`9#~VHo6z=bJ{VOWC*0qTNTt;{|Y-AoR+%S9{8b{ zn?LnoIHX_$3R*a2!mUx%q{r;(>rwL`vAA5f^@@0f* z;FN=%K^Z6p7d*j0X^T@$QUavC_{ih;AAKOVf&Y2*eJ|50XE3zE6YUdTiCKtSLhy?t zO_bZl3atu4JV{mCOQtgM1La0oxOxL^Ok4MUh9BcUcanPVvgS(!a-%DgLw)A(mRi{~ zo0$0IrbcxCsBHc$S6A#H4*eLDqHq)@1x?ffH9<4w-PG*6!_A$JL4|BBQ+q!ih<}Yk zdYG`Cu=+%v1m>LNpYEzzkGJrQ(^Ho8hJ?cpM)XzoV%Vm}Ge=AreQkFq5%*9Q~lXz%~v+C!f~@@?KX) z^+w8rfF)f163_7`xZV~x6`(~$?v%n|K#|asdJiC@1&@Dt`u9|9ik)Eza`ADYcZY-c z%iTfCYw!f;t8z;fFKA_&w8cV_c~mF|3~Hn+R`VUwCw)TS#wsbgTV$juw?qXFVo8<` z)IZPlJ`~~pKTf|B;}Zo)Y(^Ele%~sEKv296){+W<#20XLXxc1{9uz!2gTsWn+x7iI zP(~o1za;s>ir38-3h&eH<1tyKm(N~_Aro&P(qfHuyIr-Q;@E?b@EE^)rGZVxmlfI5 z(7d)+Fy}>oxSzK>)T;!Gal2F}Jf5at7e0vxHru|q6ua=b_QYr6gWz4rW5xyFM$~$O zl=aPJD&C-g(#+|JSyTb^6X;a4n}wUXEOtrHCgkd00a9_i`IOgzG7Z!g)D1$igRiW!Gzh>M zxf!uTvu$B6)<$(19j!U&UUB?QbdHx^iW$wl)AsZF0UI5*w>>1aMFxByr#S!!6S|VQ%7;(~mZe2$3YtO#0-iJw7VNt3p0-8cP=_aUdP` z_C!^(L3K_%#9j%ckWHvlLTGBY*j1P|5Je9P)tq7+c=`xfaeV;^5jbxTHOMDHe@n~~ zSU-U&1iKXX#j1MqPOOUFJ@lo*of=xv0zx(4T+x~3R=8|?eEb1T_k6@>?kXi5_65qs zYq^DiTYx*ABYO=3tjeKVP-ae^DgiQs68)WBpk&cXoHE)1_$3&d!|AN;rnm9BaxSxK_--rrIYE_8io8mHdV9`VP}D0P zGh>*{1Mp0IeqZ>?YndWDt7fp_dFbS$`>{u;MpTi<+^cep4)z86?WTuy^G3T8`wonB zF!TYK6rT_gA_g!sAyaK83j~l-pz&eGS4C`G@%#>t!ETk^XlXjDiZsowBH;&dw5hz&$Um)qDk|;@JdeL zFHisS^e;~TqMiPQ_ZatuY=QXh?Y0zuf<@8Ofb!cEanom!H4Ee%cTg%6xRg&n{nWGZ z0Dw&HmO<{#d(X6Y>%cBtibn@=9&0yryR6mNNKomuw3)=!QzAtHIpK_h!tjNvI#lm^ zbnH{Ru4?F_=(P&_Q70{n`)4Y3NhJVsd$6|G;yuS$?a;O2$DRhYQE;baS<%(z zfCJ5C6rX?mgVVomr{9ksdpFQ}!tQTUwtO<=Sb6SxClpiE?NaOlKFP1~N&dQfD?UlR zklj==h`I}5aTTfBnDiToVXSfc(YZO~bw*xot~Eg%ZUw=cA;F5{^p;~bFJ~ccs&ZCr z5FV;}Lm_YR4wZ^23jqwyfw}LEpym@p1pGa~ruUl&9N>_GW`q!RI?;m%j(qkTMPG~4 zG&AEuDT^2sv5+J@Klz-`>ucEVHLiiNtd14sLS`>9?){BJX{|C4QM~T8)Cl-HjC2?S z1)`DVJ1QkHIFpm2Gi82cU&0ujhEHE91gPFoneB*SD7<})q2&$N+yO&jGR6u}gW*%& ziA9*L#{e~<_Fiv^>0y_>7~<3sPDMqDsFgV_&$XGnWk$6mGek8Hiysc8(LFN~;8bkJ z*`ViNa<&3BMLAWj`Lf8r6X!c|fhQFlqfw+CKc;Bz_d?1rS_O5-VofNK z6`{ISbCh&N!y$|nc>Imi4^F?|CjS$w^_5l}#P-~CpNdn`n@h7>)+M6VB|O zUaB#c3(H9Q*vs2{R$%CC6ax5 ze$q7@&ceYH%31Hg!Mf@$al=~v6tIa$oz(s_6YCGuE_V@v+vZ^@nu58qr}&&akk~vH zBd-W9oymFj_)u;H{4@3S`HSs^|2=3m5C3J^Gan)Gct!Tx{%-8v0u+}58HnBjh5~XN zQvywt2W}HtM7bRb183xuuHL0?;b2R|5|!wQ*{NkM;%|*ti3Hv}b$! zH7ta3?_tO!7!pGxc&S1nJ6Xhs+gILUo&kVeG6!QEoqk^3S&%Ximr76|PnHWJMtXSx zd@uXkTveqQyYBK!Yk)~Wipl<5x0l-+K578+SPKlUnos+PPbz5TM3yokmsL{;;33A+ zL-b`0Ik#zTVK5SvO9@p9p_f+$m)O%Z0U-HToXI%2R(C8S74u)~jCaJI2zO_l%FCpVz^1>>! z=vo&U?~eEb^NVq237wY8r#E@0*XOnrC@f{mn&=&h?00k5>wlr0Pi`~)<**ft-9{Yb zyY2Klp+@Cs>`&L`pfDergBRtFlZ$v%fQ&_om~*haQvgwT9(Q9BRPB2u{yjTtugANv zGo_%?c-EOJ;jwXytr-@;$RXaJD zq2pg`iL>J(?RZ~#x?eweZbSvMio{Qihp%uPN&pC#{&#&#AlQdwh)p6$VE*t|HLgc)9ZQJ1ZoG+ac=S;<|PRPO27 z=7}0X_R+=WQ7W=&o+_YOJraaKdU*wunm0kg326wtH_v56jwOp3%eOc=TOK$L&EQA| zfcG4U*&{N2R%W`42q(23Wg;Fb@KwN!#W8~h-YZcJrJm?(X2L!G#_2b(%FaC#V-m5A z<&78VUQ&!ML{bH)Udmi<%ICe{TaGL-rrhtST8ad}c7h2&KJrwlo*+EgBU#>TUm24c zU$8nVSICs{mk`_bw$g`VctD_3mssDnG@O+&EBD@(l55HYV4~$lqXCGWl-%t-bvxca zD$9tvV=u9n{0^LORp{9W1%Q@ybXL(6P>%SU^x5Y&o zGA->yIc!9vx6**(Adb8;94twk(?1BfoiA$WJayq>4^eU02Hx-kE`cPy00O2ZwoCTm zTG=xdPwbgX@ubd?$kM@7Pk;2S_>+jUn;9(bv6h0=Gn1Nk1LzaO*`l@Llo*S2(er4n zy?`lIuLr~DFoAaz@&^K1zbO`oKiV%VQ74gM|CSKlZkKGYe_dqRT6sRsK{=x6K zl|M?U{-=H}l_NuukGx7&{%Tw(v$(kN@6X(6pNW6Z z5cS-J9`)2nu;RWzlc$#-CQ?T*VK%HOsC1WP9p~kHw@h zWq)2vmjWNhpH3f=Mpx=ikr%waZ5<;i!2NN{3=k*iWS~}-lmm)0Z|;;ke@*9Eg@iJC z(JM`5&w;yVhkz*uUp!SKt^{#du9KK^pz~9(}!~EeM8GNlWQ^IHhH-)LP${_?H2SF z8n^sB)N~wpL|9^;rEXQvcK&*dr2{{{An2Ro$HRY6V&Hm7Ss!$wUN<5A{QIIGzaOgu zjhtf8rIX8~=3Y6>BHd$`gx;7V^FO{!php*ycUydvX-08rCr9TcIVLd|9=Di9?j=2A(#E{4^KDhZ5f_DtZ|xX$mQA z9Iudv=!!c)2G`qMiDe4+;S0HHLi@lvd{`zqLwq9E4t)JCyNCN1qgyD26r}0a!e{^E z_F0c>1I!S0;7tXjr|{873iB#afIiI9Eph4L?w^(Iv%B3R(I}sIJZXp%2hb``AsS+S zDsEeI#d@$|C_gB^OS}ed7hOj|6OEV33BNz0-dg{2B4HUTJ7l(_T-aIvreTnI~T6gwPJ%tY7`cH1u|x_jnL zi4wK%wq%L2q=+i$d(H1T7nx5&YE3&PI$~bT^w=iq-uwTbB~PBrC!fsZoi5hco!3%o zh-?Qg9M}E=7C~9YD^*Smadjd**5OGSa<|8NIKJHN77=P=z2`yM%Ho_Ac67acL1n!N zN(j~yoc@yYIg2M>>pRleWO^YU4jtuF-s*eUsjB&tOLIm%=~0nb3Iazo@^!0QtmM4} zwQjYW@xAVkx>y#~IY9TBS3aCpnSvoVE4fEz=s5|UqI37`9%u$?~}v3y%l)OvhVy!0jo&+ArXVC zgI)K^t|*3sl*c&*97m0NTXEHfVwvogm&=Ki|(E2ovrff=wX~vu;2}W zR#*+O+f?Qp#A$uM{fSqMu5m&y&NoEpR}ahpulUAm-AH_Rh&Lg+}A}60T zZci3}Zpw?%6Cd_X;JVnI>z}r#Ll4nin+vq7g@5}8Jc}?<&rH{N6#_*VC!Ry$7#tm8 zP(O*W&CF1ke||cN03Fwg%@n@`BCXd2s+-oOw)g_@FL(z{eu<{V!^y?yIk2(blyghq zx|jHI85VNyios4QgQ|=tnt)r?O9{nhQDQGnH}txl;vh~TGkH;c@Xq_~lQ9^C0qI>Y zzVEU6cTmu|P%bznOCcAN-; zO{G5Q+;9y8#&uv9*LtP=qD?=0g4ifS&^XQZR6KFqozo$)5JPbOwD=F!FCwnnbL_vxUA3h${6+HzUii0@rTTvK)J()!k6~o~% zJfr|pGtxsIKlsu(h7>;-*T5^+I|<39g8DL!n14O)M2eXa6S}X6jX!7g}W|TU=v$7!?};=sguXIy(APQQhUxc%4PGSH)|sv30t10-Z%$KK9&$w)V1g@{11rh z=;WYik|Roo$YwO^FzW2@kSgvLtQ0>$Ss>?ncPT{Gf%#ZcLh%5a2QiHWva%#+>m|@Z zfRy&-{h$=ZtA>DYF3;Y)m2Rc#G$1ByaI6;Tv?)%I9}Y_dk(;VFzlgCCyTLG_v`~x$ zzefduaFSxEqJtHstIUvV@g`USFU6zyC3Rzn5X2A1#V}G~L;z?86v_qFjqBtFc#DqFck#h~y?0vl^kT zTM;Ntmh2*cmyDxv@eGFt9(e~ItCf5MdRHO8z)&E9I^F?X7f^Jdr*BB!;qbtIf`=#&^eQ zJ>T5#g%zf$OkktAQcVMl^IQ}CfYuFqeF;~39ok&C7{ZspD(7e>_K zVN%fe?$!7m;+v|M@W22^)4t-EBW+bo(9G3lg~t`WzA3`L=pEiUq_h`JsKHTTHlhv5 z3JRs(k>gEc-`pS}2vEO5bnt=whb){zmBXG9#n}(!mdu0nIPrrL5H%^JADQK){g7>UjxPR3#we z6S}e$+>E83;rnI1@ca-~FRs=)T zsR;4H2|y$wxUne!vFk~j27;!3re_apOE!D_v{P(roJR&nu`%K7o)j4u8mzi$-Z%&f zBM8>H%@Zqa$#YXP5Wh;MNXg0Fr9R&NMlGnZili4I!JtD!`&|>eG@&6`#s4y=hz*Xt zmuK><*fGm(t$dbaMc+T)4a?f%+HG5A)9k{uMSqeyWp=9PQS^Rsl*A4r=B}L{k%5S_ zL)VWD>FJVQhGn>Yr8IzUp^+{Pgwij#A?2}AQd;r> zotR)X`bwN^Z*5d;lSz5se)k>atCc4!D0dJi5!uW8k!mBV50^Yg`k?*x%U-}6Xuvzg z9q6S6y(`#&poQ!xud>JhcNGhdn1Wqyz+w}IFuc?}$ZwOq?T*LyPyw~1AoL|s;*P+@cPK;78%yfgz$zfomwc#Z_8v{dG}uRRPbx0%uhUcv-9o zz+|``RMRU5x;GFa2y&d=a;8W)Cy)>l2~JjS_*X|bsV7B8tbs_dKq*u!%=w&Gv55<} zn;+kAz#{93qi{N<^A5{J1J&RR6+Q*AWb{cf7FIYw!`*Xwhzv^A z#pa^&2nJiECUzQX`Nz)x-F-lyH^CWbb_uOuMJhGKGo#d*IHu<&+sn_LJNb}eUWg+p zOxzZvQgWY(j}(TREyXB~Iy6`LAS-e=TN!T$P;^9Uq=Fd4At|K$Th>UMhwKW9SGaS$ zP6p(9sh3Ob4X!*Hd+3Nz{|hazsTGB^$X2h(vE)1qO6yXLN}9RG3uM%u6tIYTJha4i z!!zNU0y{$hK=!^$&cW$VtbqvP&d!ENUVWim_0q$E8!`pMcuSg)SPJp}H)Woe5(_LA za6Lb%3kau&K$6npLg$QKw`zW>9SUtCJS<3qmtPK*(ZTZgLtV^18w0pZ2<7s%_}K`{ z5IRxJc1p2NRjKt)Nr#NNKGoiTD}D$-0o;<~W7*XB_aJ^JPrZC7!)8BIDTM0K+Z|M< zGaoGe!~I_CSY)Qy>kryz;`PrHfsBEUxtgGS!PW%t?~(og_y4>7KmXL5e1I>a)T;D7 zbVdd{X>U%nllby1y=->?F0cc0Ww&;LKq}Nbj{X)!H`CYPsP~`=5muBIVfzcq6qbe2KGq925(T z4HWV^G5K1oI{NWwfEr9i)OE>tD?ZA9q-p7p2C-f5h4jx)wAZF%?|YH2G;=XTs8A9z zLR3C4Y*oFdny3e(K7)Bd$owBA(A2r%1s_M6K~poq{8R1S7|MchL7RK3EVN4;IX+&V zZIi2hB@0`(4{i=em*|hrZZFj$dFxAReu6hQsj6I z^Qf@c1V^2tUaZKV?Y2=ojev9s{LewsNsIJ4xLHh-LL82_P*!!i-L$NrU^w>Z%Xlb= zcKY$Bj~3lcwZ|QX$y3p16TW)R&aMAp4x0@VIpm2#T-^B^#mX2HS!+hkBn3RYIx8rY z_=|8BLyWCj8#^1GM9*J;fGrqPe)7%s_+zU9KqDm+;U&cvpjUtTEe}>aFltG}3zQ8* zMzUXk9v%ui$mtN1L$Mr|Y;lBzY_r+|3#&oCBii53#h7rb#?wciXrFi{KAay{iH|=q zvi`1}{@P>M1`33gQ2WPe3@$x|?ej1eYy&&m7K>sSSr(IUAr^hApfLsDN3s0?uGeGF z2rhL0%nR8eQDu>&>|dnsSuHNr4^1kx6@lcKa`ALkgZ$V362_NTL7B5zFZIbslsJ9- zThJU)@Ih{Hmmh>eC+zgu<*+9njc+J_qjb^>`be}I&@bj67}-6nuaGzwjp z^oY|(ud?a*83jik9I1q}8VFeJ^a@Zgdt+}1{y;vm4!yy-$0Pwg&*%&!&6ch+mlBVK z`C55G;{toq^08YSNVr@l2Y#?R1n!O7ZO4jX>AX~+cxu|+@F^HK&rbEYgjHt!Ppw$e zUA6bs$h*~+r)N9@(AaY+icFOhC4b&4o22LwTqN_Wd@P(|FE!Ei7i*{xa79I^DqnJ2 zXfvU@SlCtc>3Zo>Ejw>*Bl!(PLX96hQMaA=q0akG`cG5>#%fTn7O&bQQjx`aL|?=` zEH{gOpun~_eQLVROx&JZjk%K(*i?2X{+3UrXLtw)rMbkos!7POJkeYw;dGM2*fzq2 zzYx*jBM){-j_tnJfvXTesCZ4qS;- z<9L~h;cdYXiQlLR&r2`GoW$Pr!u>rwXGnh^#&;JN+s(OYZ;&kaqLNHLx|+uy&LXp__No36q)@_5Us#|O;T zI%qjDXRP}K8VRb|OXNs5HlF*=`HmTbb7jjUPUK=Zf(+AS=QzVL7@Q;(uN9paBvbFG z7~70&w}c3euzES0uyazLke`!M5`fDQ5SmC%$wnlN32axzWJl>5;uMVvW~8(#5~)J? z>7tJD%jM=}z#t}Zzbf&;rh-Z#1G>^mR?}ePy>=ocAN$TacV>BPin6SS)iGnaVtk?jU60>`c%(X{=-W;VQil`wFR#|b=>4RXUahBqTw@xKpd5xdvD%@Vg`s011mF;D$7&gl7( zMe69Fv&3VBNAQ}2zQnogiF*EXQdN=6Z~0`q^o+L`6-99#rS1FITs8KS0iIs+*QpZD zc_lf%K(|L)F(n&#fMbEqB^R4EpHEwgUqY{l-JG0jxBco{9F?@CdT_SJ`q0;k1eK0~ z0_t2BO*OR_R$^ItHwob4_#1ip4kZv$Uro_Dkz-dbJfsX$(0YURnd7>H9^mJNmBdMq z{gEeqUWa-ISTJ{%Si(tCcwRG^siQ%=wt~>3J>`_QslCZm)T9qIk!LA<10Tqx6Z;-47Mc zv8?ITp%uN)yjCUPbS4Zwwo2C|xHZ zEFkYqObX(%Md!{+7~-Ia%Z9`h7>qDJ*C?5csTJY;jB*w$FwJ%EctnJksSR9FJ%!G8 z$bj)OJl+1iCpq3EbuWpB=nRPNb(mN5#9UkT&o|l)|C}SMvaQ_qvF|guN=^J^3l1@T zB@1ulrmM!9O=ATxTzsYb!lyru$FXZ0s#Z`QH=zh0sfKA=%b|1P;c~mZ5XT9GZK_TB zS&JOL!M#G#AYF1lA%t-{WFAfQfGT~2@&o(5?&nU^?j+Vkjm`3FSSSfmy{$Y&{|3J6 zJy$gA>IfY-CLmSK2dIR)^ySKlk^kxYajmr)a`dG6ZsDdgqOrY`*Be6Bc?XmqBauS)@ z`EVsAtGGTZm^6cfTTan%|Gg0vm_40`K-p`t_7vGPp0*;bR_t}zH{86;HVsIDpO%M7 zctE^9u0Y*(NH1Y#e&ugoJ+QBHeq+G*nAQ1ar~sBv+xISgL^ zSdCd+d*b2;?di8X&Rx7d?ih?D{~IfRNMl5lMvPr?vP=d>2c=r3*^NGm?}1pHej}C- zbnrhBfDMUZMU*|X-#{o&q! z@X#JF!b6S$f)mVksl)KytRJO>HnxcoAb#DDn+#brd+$v{k>F9yTZ({Cx<|~TpvA2i0&dSqC1OHVn z{ul=vcUWA)WmGm$NX`O-AMrZMyg{M|FT{-a>=v|1%EINHm zzDP?!#(6vaX%$o{@g7t+O=cX(!6Cu%Kl(TS5Z zs_#5T>ryMvVq0^byzY1|M_C_NQF97!#ZB1rQS!pPNwg}~O1KPOYQeb{sOam%hs{d# zRvMw2g`!f%(xV8DO9`DhZ02|^=rl`aQKi7B+?S0a(8*m4l?`>K#G=Rttv%PTw3l9t z^*h5mIHi@Jf>t^y2CyimSRB%42_PxmC=LEjdyf%9-HwhUanr2VxYdMg$(T1h+S;A* zXdfQAdQ`{f>alhZkHO)MBLO)RG2$G?TgU7Qx}E;^%W*?sho3PILjDM~{zxEWuf~d> zRx-l~4yW2_rvxe;5ZgKS5kg4@?wJmTm_<&pom$_a1F$`?miY2&n_KZ}tc;07i3td) zU=a(RH?h&)!xZp4#&wfo(eb2ysoEE%J?gqj`-9(vu)@gP9~MP^fyQng0i~Ds@34kG z`cOAWu|F%!6^n$~<{Tuf^urS9TSckX(?B8|m==Yn8m-SOa-@7 z&UVf0$9AmkK~{(tqSIqafRI>v5fmVLej#ATTfBLJ>^h!zSzRkZw=%C#ncFYA?2|3+!Cu~RZBE3p9SrGZI0NsACn zh|h1O!FXv zqaf2nbYkp^1??0E1I$PshtK!e4E1bO8jVeecOyC~MO^MO^B=Rjh8A3;U_Sa5Il{8^ricy6)2#+9fYCud$0j`1Oi+3uVgk z<`NQ4F9}Ld{+1$YvD!Tmy7;(qS^hBu5i2xVM7I z-v!~e@E~I`=inqw#UQYJFoB1qP6(!+@(tqKvLsIY?M*-|lRP_NUJ=*$_9W<9#sers zksokzKg_aABUix2)u%U?g=|`}Vx~JtuWFr>fnYc@mO^xewge>2fJF-SAXR6;rTwJt z2CS3i=VrT|8w=KvJTwv;4z?4|gKMPB_W0RAvBeTxJQntpAr#&Jz0NoP@eA(+M^AQ; zH*c!VuVjTz`L;z*OO;?p?b;4YlD85uLo~<^&$Lrz2HW=&S41c|JnZ-~a6=+*Q)hyL zn;Zi=3qbn-1G-@fpX^%6sb(M-4yk@#iyoLLdKRd^XMu$n&=1s(-$US`c%S?efCE3> z2D#OW@Cl_+#vITDMFM+bdbycEYrvPfBC!}Dm|~I_AeXRS8V0??Lq^TiI`N+79PfJ{ zfXZ9gyDbk3WHc@VuXu+gP6+3Ed15b&UGF!_&QEy}2%etoSx+tXU^)xqw|1}oAl+Kl zupkE`-sZ2eF|-peSHJ=j&lP{t>x&fRWtu8Yx=)hhA-*g=%pkoTv0Nz^8M8YJL6$>G zxey#rF`wgbj7^I81#;NyX@)pRN^e@m#1;VYOAj84pI(dc9vw8oyGx}7ZnH*W9{u{@W#1;P1uT!ZtSnA98eu#SP8U&xw; zt#m*$9s%{osjoR_5*{l$#S9Nr zxb|$B-XQIo_gJwIEEcB{1GcDIzzWyAkrlNh>5UGYob+6KHwMZWc95I+6Q5f6uGb6h zIj)f}OeM9GHLHR?1%K&yRX&8QDbahq$keYt-oXJ8SoXlr0?g>T_dFE#iFPR#9;-2D z%B@^uTXa_#R~_{w08T{=!K*n>*wIn{;uxsa=6JQ+1XE$ z;loxc5)eFLU%yorjNA`ym%t9@TT#3ssHPF1e2+n%1EFqJOo^uv!Z&b8A6BBATtrAJ z1Wyn3RRkkVMu-4pB9(nPBHnB;QiYJGeC=~~c@*EbF212wth^?kf7W(2_5&has%fC% zYCIAa#{nOdwk@=Eg<7Xl!0SU7<49DWx0o=?4b6m4_Ts&g+2V7O4}waG!}R(Ndg{i5 z*&NE{%RphCgbecijopIrU>|Oa)-FK{5m0f@VNO)8_V#i9sUoD1H2usaU1B&(F$S_^ zhzoJh{FS3)^`i5cee#ud`m1;wjxjf9oLW(hN^}oE8l~j?k7ahP_RAQo9iFf+mAnZ= zGOoxtO5CjD(X1k>>~5+;L>HJY-b)60dl+l1Lc{QW;?Ld#<1mk#+_2)cvu(JxX``r| zS+61zPo5CT(38E|uGS5az9^+v2pAm}rVmdd4~1c7(i-nB1!n*!kYeD+So}4#hNLPZ zOp{_dp#Y#KUdns@=E+(HUwh6U$|1sjDQWVt^uso0iC1Kr{0;*~{Q`Z;tgDiJJJs7MV{;D^O*har`9hJexxKmd4T&MqY^Z!ep4y(yLZv&7IiS~M+l zP4_n-6%`dY5YadzOMtblAT)hf#7?nAbB0KRqQ+XR0eOw?gbt*1$z(`9jz+udnPb@5 z?Zk^HUL!gQuBCPF>vFjeV(Px#;~AH(=<7ZA>C+;rK+DupTj~jnZPIm zbF*riqUHs(OozoDouz`gh|@|tEZK2SztvuedG#9oHB5nJgFf=@zI`Hcn)t8Km@JWi zpExZ38@~ouF`jzR%T=ce7yh0I1Bg=6m}c>Q8zrv;@F zAL{O_v?M_*D&XPI5yh)s{?&1wc#yPnBC>o@y0rT!#2Z8xQuOEKwZ5 zn-8o)U}DXU%Cv#=+1UiiA*vq(Q$6YT-->Zi6ksjYxl+Fpuzae$`+m^}GnkJ@+ZOBW zKv9%BPRa?D@s2Xe|k=+WUwo;{w#F@2GxPZ);-yyha-Wo z#{NUW$(zbU>Jm*0hZP^4xTkUMBK1X-@e9IO_ zZ^386@+<1}Xj{Y#2b+Yy_>DWIEt95yjSl=V^d6jWD^<7T@5 z)1PZgi_5Dqa{5NwBFO`3wA+v;tas=rpHkY_|l-TWh z?i*_@Gqz4pLikeE@0u2po*pAHTI&%UC04o;FsZK%Vv_zQb)^PudT~Bzin7t}?c@tl zELaaEkT0OQR;i$A#iYtaoqa@)`4d3rMd?r`JC3QhL} zrYHLcks%|~u++}ol$5z}6j$cZ^0cM$RZw;;hV;PTOy3Vx(JPaVvN#A6uV&TCbX)YQ zQ*LpCW5%lb98%uzT#NT{r51Mc!XF^@{eCMvpILSsv%)8Srrodx$K*jdjtek&OfvPl zJDbtK-bpjm^QvlW3R8%O0)kdV`;NSgwx9RieFN{aH(LgOAp!y5S_E)0)$EWLc9vwXEzT%E*N(EUWSJ!Av}n>T*Pit*ylsClssqj30a!kaQTc&3QR!@djt1W zcOqW;_}7YI~nX=uDy9`1FVh1}0U5~$%r0bB2fy%~tVN5lkpYG0|o z@jf8&@}S;C zP%Gx%=|^7Q()qXEK3w^zzwMaO5~hhW-swvQ+bQn*KX3m&o==>J-p7A``tA5#cW8y}N?IL7RoDqxu_tA7aLYKDcDjv2l#JKyA^;gL9-?)gZXW zIhyJJDy;>3w*U=F1)&!diT8fR8Gj`fb&joZDNk5R#1RflPUM(4Pj!VDlbq$^f<|tTDHKSM?hC6t0?s-b*@-ds4R_#bIPU2Z!Ulu8 zUF2?A6o&ouW$}b5dRTk-kwmUyaCq}Ev(4rzxe9EPpjva^ClRFDp9sM%i%MCRHmK-? zG@-O!K_|x9qlK3|MvOuc?x}@1f>5@yryn1Kq{iOkX0rEQ8G5;NhlV^W|Ex29`^!VX zPaj=sZ^b-mb961+#YiW4 z#9?wM=$Qle_G9Y>_$0#gVjRM~uu8s>cf?1h7{A{NJ^fmHGp4eD?X~I`j}4@|=a%j0 z9?*MFWQ!?-ZxsiVnsVGYaBxz57V%#=&+&B?&5e-|$KrHk97os#auu^gLa@g@iy4Id zB<&KxS^HKcc#aC>4%U=rTObB{F)qggdlv0={|1kN;%i(nJlC15aLH>dL-pJ6xvUlm z;!>WtUL%0Os!(!Xz6dECQkxc|-?Ifna*r&?5Zt=lSmla3z$%e~^NL*39 zb7i;As>bJ8BiHU_Z{mis_Td2BDX&@LX1s$C`no5{fe0JcvTm4IFKh?Eaf>gIl`8Uz zeH35e4yMp~{G~!vgW{FPsY_3071wwFs<{?_{og1@89a*kzwsiVGq`FYwrk3p@seRR zU;}0}{cHn$nM0es=Ifq%9X&Zlk<3vaT2H7Z9fdoH#vIBjVk6HWey~&ah6E!csUk9j$lcCr)Lt2 za2HoK<6b>zLob3Hj9fK%(w5JgDw7doJADMNy->^GhX|OCkJZ-4K&*1WuEv9Q$1bXO zgmWnyD~7r_-xg*))1$NcTJlvg6eBEviKi^0K?s+`8%VaTE{l30!6J*H5kDiUWVvOp zj+LSbju2xCi}XAG&B|1J&A-8*5S8JC^^4V3QGI`XqP?*a6G9iA6wqVb!-E4AZ)4@T zNd^yc#`49Su*ga2}|Xj_((&2vw4I z0k;{NH~PyYJk+AXC-O04mOunf3CCv3=RAKIe9HBbjg$qPX4>ueI$FjG)^1RAY*NhC}GPni$A$57(fw1vS$tsDw~i9kp_E)fO@MaLk=VUwCrh6lttpoyyKrJf=(A*!x{4wAtl#rMl= zWg7x*BwvmLqk4PQDud7_N*xBLCr^nJr%n(F=rE}_NLQlT$lVE1m3kBxyUfNO=h(!HOYY||?%HkjxL zJ|>5C<2#XEUA#$etgK36sCD$wWYeiyS6LN|Uez0!e;2 zCdX*BXPo*>_sF0_n-%+$Hp+7`24UNU=7(a#Cqb{o=PW-btfV%P(X$m5+Nk2hf;P>Iiv#kzs!Q@q=>R~VF+8DJUY-%}(DZ7X!ur`mHdDwSTu zON-6DMqZSkv8lZ)1=4m2HG-|kOwsxq);9Jp89Kh zBO03EJj5`uva@E)(-+&ti=XoD<5B^hrZL8-SX-uL^Na2dn7^vcll}+gDM2j>e=7{` zZ!XFB8e7TnYAVE*TW^-=+Bui%4Q?YUAy|0sVQMDY&m5e6guk-B9RkPY=ikU=OD)>D zB*m=8Q9UTL+rx53zP)LXM=o3VkPxasdUaS<{v{tSn}3j+NI7Dt4F7tWJAXYjw&Iu$ z4n3i5-#_`mO9WD1pPlk=0mGo@d;1Bq>+Z!s@kBB0jx)%>(gWoF?c-m?nk=_le*O&w zx@(^?3WXvl(ul+tEGk5xsn}B4kURT`A)DaWl8)tI9jSg!!V;JB} z@sC1+fStv_jjg-cGFUmg&?c(;e^vvT6#ST~T=v_O@uVkTJ>?jE9DmXQKpHi6m||FK zG4+m{D&x@mF`y(_$5jf+LFM%Y^7NZFc7Bb5TM1|=g*NR!sq~cMci(N#c$l|fVqUvU{OR#0%Yv4N!^(L)4Fe*HLV<_uB{0*d&nnaise=i|tCwTfD+TO<%(`kA;4t zm4hy*N)>*5^jGnUbznmr1o>eweie)U+_UXDKeOkMK-lm~P%lvc z5$K`61>r!}YXS@kx11lzJkIVQ58G|Z<>>nHQeCCIj>Qqc5fRRK?YlkJA3~Fz7-MqygjsiGf_YHo&ojeH87%Ef20ilY z25U5=W5HtMh1H%fWrO+pQb^IOZ8`q)^UjITRl+?HkHBGr3UB5XT@e^yjK6106o`UQ zt@DzVd(8=q!zUnYAvT2TjZ&c482)s=&G;B8tkz@g^_)u|(te8zBz~n}XjbFd@6m`D znm9Y=@?!lE3`}EL0owND|2q9a$OG-blC*mI=#}=$D;5Wp%05JntC(V3yzF(qh5En~ zEMVr|_?EU&2!Z_9?erJci9IYQEyZ$rv1gf(J2r3&gH9Z}rk{L6m8Dd>x7zMgb{xdX zq7#p>e1#+X?&(0J+aTaCcJidxoWsj)460h&GLl{xw6735ZV`nmEcifV=tKZ(uNk7O zWOmMW?8jgq%3mpSP7yMANT>_4&ApQS{%QA1;TcA?M!=JMiHD{5;DoAv%+F#>M0hZv zip2)IgkE3q@d>5yfEZ#d3NIcOlZ*(gN+A_wL;eOhY~3#TbD++JC-&rjfBd6HPp@Cb zYq--;Sb4>vlD^uKNzj9$+HgRGICl@)i5>rrg>p+HZs%+yr)v4TcrrbsT8F{vR9Gd* zNCKIs&`Q1%XN9Q2VOI78J45nZU;^8@(%TNeonZtgt(od8Pd#NxfwJGTd69yc`iO4F zzImm14SP6YYB=VLuEQV`CZnXZX6#L?jD=MardE_>!S$95XOq1|5fR1`XQ__QhLJSGcP!y{y^Z=@;xY z7IX#P*>e26Lm1#c#fu!XHgh|ke8hgA{vsYvNQc{Sx5x)BChV;JSPP2zJRc|PZd(g& zG`lYYYlSf6z`_IhcO%CIq`Fh!7J^Plzu|rL{<6O({!ppkOft(@iN`2@H!Px$5$xj< zD&}9v9*cz-UqtOi%)s294}lW#w!)-4B{ITA;g(gCHB-sJZlm4czV?r^Mm&Q@+$iPasx$y6IGgAZEr43 z$CE$Np1Kq=!Jh9FQWDq~u}2F{qFFoOQg_+6 z;(~1T+K&_%;z8v$t|5a2{i?l3Tdq>IVgYDA-KIrOcW5(3t0re+{#aiCf~v}D5e4rg zIhZRcw&O3_A9;xhbzZ$F&uo3cOeuQp~|cS-W2 z)%mrMzxNoGrs%7PhXTMuP&MgJUhv6YEb#p{Gh@kF?<9%52s!Q$Qc?`iNmu-CF?iC! zg70=E)(dBp5Kp||c>u@w?>}l6FU6`&f@ki)%-=VAN`?>BKKu#@X8VOmpDtL(^O(CUFOAm?mPYy*>#r62$$hVV>#=L`QX{va8|keK33A>!|~ z!rNdl96v%{o)=GrvA zflr)1--!#x!TQ_Ze#K8eQtm^oE+j@=>AtM=!n5QgY_W$n@q;S?Vc|8TI;nLcMb2r*6 zPz>#&lGYDme2~b3j(92j*jXC;v2nxpdTj3LqYLfNtSqJ@YuC5qS^a@nQCR4k2uX|7fd*Jm=IQHqHs9s;Nu*e5pjoy`UEv1eP58?ap6p(w*-qL7_o2EDiYx#4cQ$^M#qfN=or>eB65P9!kt~>N46BA zOP<-67u#EJ`w4>$oSmalX{$4&q_G?Tj{q~2w&MOxQk0jZ=UZkl$F?xa8`$JnVZ|8afqzC{*)t7ZCpwojeY=P6`?O z7ROeFT4Vp|mNYw&JQuy>(c>S)pfD(ar;bVlOe93|K7{An00j8K5~Iji+yi&odtQF> zZ87rK@XTWD97GwV2)e&TO9xhjrzYcJT&Fs#r@R5TfpF+v`3*5@G#QCa$-}+nt8N8H zf=J0QBvy7EHf;RaOZ3{#gs?R5M=KBLvlPnUGTPw9&>1&T*s*jnkYBnMUjyzjosN8V z(F{&vH=(&~52+ZDHnY?+>3xpp!Nmu=iia&M;`UmedKH($5o8Xd)#L^!t;h_O588k5 za$@R2NY|;cEI`%3HU$XwgK_&nJUMd6k#XRTEYOFA?cfQ1=o{i&FG(OguT=!>&^Xo+ ztyhzr*%fX~Nxc+v8DAt)=4AwB+6$4>4M_%9NiN1mT4${=YyDPXbUR1IvgULlVV(ls zOi&n@LW}`T=u%t=CHGp40{@0o-`Hj-Yghd?l96zsc5KSRQ>7_FKLj!ENmbOsd5gaV zh9YtwdHKok;unyW;w9|yYP>`QIK>`*DF^5K zJ;fACk}9S5_h=uWen)W#F@#_zp1;+5RwXDUw_Pd-Ch;8y@;HBXRLOOx1c;Tffz3+ojNcNL8I}DK zEJk&Z8XUt_x_sS_pF{vN2&%Zs$)2yZ+2vMvV`RT%Tyu={LC**|W-T2R;b6UxJ~9XE znpZ(3!g1Bb*zq7ZU-eE#qZ8JW%@ErA?b?$gt8sf7xt5N5y%I?OmL{DCWzG0S=#QNY zo<;6aOkt8#g85i!0W;=t-INvOb?sGPhx?@I_|ccIkgOYjj?fR36ZTMEBUObEKw8;2 zkS^>fJ@k^LZ4+T_Wy|M?L&pA6^V`Nbn?K|}jv;d_zUdxb#QK0w{nYs$?T@)mC?w-u z5ygQmkYgH8g91tiHjCPkE(4uSe*sx5Ax&sO0^IVH%3r%R9Xp5};Ahb~PMOtqWw}L@ zd((sY=k6_C_{HdZjji#Sy^R{~frlO_jtxN5%3F)+gPS#MpTs#fYvGs`05FS^Zprl_ zeNOkc5^Yx#cjoGu&z)pojGTr=pu@|qm+37d@i*e?0 zz8l~Z7`|ASy28-grOxAw2Zp`1=|}C3PE>?3K1L6s3cTw*IQhFd=dhDYugFzVxbwuE zKAHydt5wy6nUM_^&(xhU?q~CNHHUC8hUa4(z;;|A-om4Uu5o2@!v#zK)Q50zJN`kz zi=LhDtyms&q*F0G_4fN8w9w)mapPi#;D%}H))yND( zI77gI@kscZFNKU^27!8S^KFvfJj&i{0wco}a z_AAFIJ{0zzJ15O|lxzvIR*B1C3Y8i~pd0AXco6_FNu{~KGLqyj7b6%qL^$?2WB&~T zr!EsEnk&o%LvdoMBL6CGQ7c~bPRQI0B@amW5?n8@o}!krn>YfoykG`jh{HW@G~&Z8 z&`(CL(im$QQANshlKAJiBObOyA#kDEPAnf&w(lb=Hm^|vdzHck4PtPR3*DmyX*08o zBSOPFS;|Nb5Fy#4f>zUTL4kUATOVw;JH^EYFM>}K!+o_)SbUaHNYV`cm(@cwCs3vr zhC8<0o>!1be|rUNcW-B-{JtMnit1P(@8Yge*%SUn2%2)sbdQFt;K1@{I+&dbXq;@$k3bZm(Fu zN{N^fizs4-;*?Q5?XXeYGXi#qOV@7*$K&2Sqx(7p7b6NGvtI!mAG<)~=Rw_<3YzR;cu zv4zou;Kr8&@Zywv(J;&8@)WydR@G0y+FVOYeHt=Lns%^{Ka3aj%)4}N<$sDiVZnuU z4o2N$qoHW1nt=gzl>27RzXx>$JgDqEZ`LGE2=_$1cXp-CF5Ha2sOv#sI=(zE+JO}i zA9wTbVSJ3ThU7*S;aD)Bpadp?8z6^+5X2_kmhy24kY%po8U*Anyc@FxuKvy`)iA&Q z_?M5r=XYOde{{j`K2c_?TW2c<15$>jDJSMI^YF(;RSa}$x)u7I5-#tgmQ;E+z-+vz z8~SvAO5)O!zi+2M^g*1f88C4(X-(R&T{mv60$4 zvBPKpF0g}w#C3K7ek$eU@_4Y+9LUX0AEV9-!Q#_46^@R2u$Tx)!SHgg_S98fiYy&dgh<|B_EvcBt-^xN9dnWSB|l zj~9-6BrinRi^=@dfq;`&lpQACR#A+>0vBg?5Pb}xnt|K#k%sivyI z^}=`$CvvOv7CZ;QEM>FRM66Abj~POotbQtmPl;X`J1t2dM&|)dvQ+GSnH7+rZ=!2N zy}UGH1;n|Um}{^3lY5-c_~hO8>?>suWCW=P1>zQO#e!AIP>kw$K&OXfR8CnCgq;%G zk{6hoPam=^+^TVmo_rSufM1Azj(-T)>)b5lhI237=ykwzcF=oraiOg)`$LIcu34AV zVIdhQAA7(>bk43IzY*WOVC%2%0w?e6?G%S!OrRbGs`mU@YMT5Up9Nyw*!P8{utsRd zNZLdvxu@a&#ad;Q)m;PXPbEfJ%4m+NjG<17l?2{G+y-ae6*pA_3d_yXHo2_|YUtZA zy+7A-v8HC*^7Q1nLPJXAIO6PjI=ya9MeDHjc2n;X6DGU|T;w0z8z-oE2rEhtk?HM! zf>DQih`RCKLw}IObJ?WPft{I-i~zfYeO8F)?f);qW_a}bl_$^s#g@c9-o?HkMj^Lr zSbol<+dwd?CRst|t%BEdK8t%xq8vB6^q{|knnrHD3^e!<+bsl>LodKQ4w}lE=OOqI z-fnz=@0*85J(Wrc!ASyM70e?+Yc6I3BaMdztfQ+4^~bYiXj218-QQh}NXP%O@5Z+0`- z{hQp-C^Pi`&Hp`D3&|z~@XSAoVERjH9Q@epezjdm_9mRki8XL>aRVrGb zPPhwujpZVfbb@I|Ddk{0?%y^oE%#=u;aNQD+wpLOF9YnnNQL_{V`xdo3imoBLBYfx zA@IYF9{`l(1`)K=)qTe~MYtn0H9(7;@eDotRk(6~j;^*rVb@W)4dYUmSo}gabZufc zw!NuBXGK7*c{zxJW#U)L4%s<(n?nXE1YK3QUZz4uj08C)faf~`jIdxLSi`k-C0r2J zrzkK?Eb0;@{5c_?55LKW>xf?{nYxT`qUaLQ-s z)1Sv_eRiK=xb7iH?T;uTvl~lvup_8H7N8drJsK9|p8$2?=ELG1q^Wnay%BN*^spWO z%?+4olrv)c#8BUAAH4g136R0xq(#0(mDi3I3cOx zvykpWWTkWv0K=^cAT!_c%6&@Fq_XXRgd27%pR@>=@D%MiI~cPdw|_Zj1$}$o%xmM1Z4d@0^_iIQCJHFQ%ixsB0@mZ9y&=uckmGF8GlFY#YTM*mM&C%`W z;=m{b`+R(keb7s#q``m}au6S~i0~NNyn41_mMIKRI^hC zw~;-WLB6af(Nejkb0{6NNnnM@W*1TY7h!9En{SgKWr%@MavsM z(d96i-3_>qVnyD0^5w@rXzzqrBjf@u`wd-2A&3q{qVJeDAB&HStm5lYw}JS+eQ7W| zRW6MW>nY8jQ^auQ1C3+>fzZwD}yEjSIUf?+c~d zl~0^+616HcsN`ZTQXJn`+S3<3I|7x%jjdvjtF(~xSrVjgV8hu`R1HH$#r^Lp#4wgb z+vIm1`jk#wC(4DlG1H2Bu8J<+4aI=Ya=3~p^;|8GMH&dN$3rf(r$ddXjTI_yzt}Sv zwV3W$3znLtegI?c*GO22N4;L%turN_M|@fB0SaQ#fexWzVF`Wu!lg^k<^SLKuNc?s z1b(<>&NF#f<^jd7W8H~df1NTRk3Wt*+d%E7vcszUNap1~htyCdPWPT?wJBagItR)p zEn}rLs;EBu<$vyQwCW!sEMVc0l1&FJ!p4hUP@@@ z5W4~cC-gW$SmI|iTgG-gDW@rr5-T1hqwkS~6th>@jc~u;_&T^yEF8sVh8ZE0Eu3c_ zf)M~fY2npfw+v)_qFsC{-q15cot}e}zL)6YS&n(UPfgbUDSn3t%K3|-CxNmkB$}6| zcE_Q|9LyG@vUoUW@oOx-P7Sqnh#Y`#kMr36F-7>rR-&i9-%kujcn4v2i^(cpgF`W& zYjYu^yi3i;g{AmtD^L4Om870t66q^ees#LtoKDDKOey`2--e2(mrIQE(uMYg7+6oo z7?t~2`3)EyAvQvG_OOA|t`xhYL=3k@LCP@+^C>F~R(TR5yp8hSO3dUKLUA^5n&ySz zDS5YEc%}WRmtNJJ(O)I70RXj$=|y6LPI7F&k~VJ}wgoYVz|_DZg^YVCGeP70spZlc zmPC(D=rz=3t`!oOk@O>T?99mOqp*5iA**<-UOkwxBPpK0K&=?@f{;Nkw61H61Mkci z+tOt2shFtes?p*d?3d^~_I@n2tl+iWlha4DEqhR)tKhT7;tr1Pu-iaw1wh3+q~Ok= zn5%Fm$mp?r!i>cMIR|G^c43sq_u{kdX-^$UNt{RaUh|8oi(;5+V;0oM&DK`&VM`?v zBA}(Zfrv;JYY4m~R)ko;)h;{}O8~W$8^N1G8KcR$+fzhmyu7!>m5Uz}kxJh?vbu^v zHH+<`QpKb6qs7PG-#95G+BHO|7Co4t$_ep?7%r{|$Jz{^IR+>t6H77!jkiR6MBgZ~ zLWWq4sS4gpvqm7!z7$iSQ14+$S%pKXD<>sU1=fCJd7mfrc{0q((^&SOY;S$?txwv{Kw!2h z6Qdxvo8Bnoc3kS)A5#rL0$Z6ad;!-{9g#yIFyGr^$rhUcu`0HMU2^n)mB<67-?hQ`v3`^PrF{WdQuf;^%YW3-jBZpJTwx2hIj@goG5(wU{ zE?4hiyN5FJU})3+@L|?i{ajm(E$AL^OpNQ93%m4b8Q$*|=LPe|Y~kl{cBKQ5-nv>l z<@y_l(D&m}acakG@bvlA=ZCHK=Au7IPPiLp$^DjTEb;`%=Zpd40k#6VaL^R4#l0xA zmDRCR_&{T-MrA^%wBzNHJ~U=P6NNH5qQd@rF~Ocxmfa&rfaU&ANDm@SJ&9d^x%P!D z$TF6*C;gW(N%gZa(sHB5;9`2QOTu!~M1!)XW7kEKQS_X?QzaUoY65>^1dQ zid|DU|NWTFSrk0}RK&2I{xT*fh^!_(b3cx=bmCbeL`-{>LebVwI5J{O#xp0Oo8iF5 zGgWcajhJPHJ}6;9GAxErRVwj`9uK`pQVNxj5BR|OcUR76<2YhyYoZ_4l+uFKYl@{N zTMzxRUE68u_TD-6u_6-L`*XKZQk@a}()(7H)_N8;fATGf#D{TUUUyn2)vsW)l&hih z^45z-M+OoF`BV`;E&+y8j{O*+Z?$<6oAh_>^kd}_5gV>F#=&t>IP;?Ax#=1e zLc8l6&lVT%#iXZ0HbVmPVXWa;G$Zw zyoRZ<%q;6+a{k3O< zCr~SW$#qbGkUKNJ!vcH*eG`8o%(LyaY5xgT(%om~DLL)8Pfd^(adUbl=CwRM-Cm17 zRD=5K76B#u7b8!V!V)c4t^yYMdWq-v#PF|WL2I=EEJ7?IVpO8irVFkTgqRTV*&Z8|rv2>6QlHPZ{U5;F2wR)rQ(hE z6z4{PvI^q}!*ij%Bg%Su9Tf@|aTRQsHjU=S* zdyR_}y}n;g;UV|!n(H83dP41q(_gm!e|{6QA8>lzN<*B*66WG!z^puXLh!x?jK!Wk z*WQmkQ~S@7X{1|nZ0}j+OkzB8_ZEMQGO{>4dEmsU8E$$_dd(-YD$|5nT!SfyN~>MT zk5rZe)#&#`+H$P@4m#kRFTA2_UTXg==KTjL3}+vB*Ktc#c!q_^F>y@Q+kq>>1Ek;SF~D9Rk4|QBY#b1a2KA<~$LKh6%nZYUauJ$Qbm+!nq<4sw>gtQADPj z*|9O4p2FeB%yfM*w8wA%GQP*p22~@YrBSCI9I-;IN&hX1>af^Fy;RSbbN#`x}^S@#S%3@h7pdq+gk0 z2T}iJA0OSP%p1qaWko=`50A0BV@9N3&>hOvDta-2<05tN723UA@t0uw#hcY+_JMT= z?E}@Dw)S`83p?jK%W>J_BdQKpJcZ*uEM-Auec!9lMDNeHfJK!*(5+-qLC+0O6wL5Q zu)fCm{;vKiG|{cPFSci&4H?>b&;550JW@Fhv(P?hZLtf{-mV8v?F0Jp{UDYRs~@+@ zib#z*Uc7GakOUURc&ER}(x(=RsLani8%)sw{A*u)B+_>e&{}f>YMDZG{+Bg?CT!`{ zv_OV1x)&GX8k=%Sdv3tO+YoWpk5N(XrPb->kY}YgI;T+D95m%0O2}=tO-|af!msE( zBx(m#Z1MtLi8LBgEVGH?JO23s8;`VN_IM-j4F`9GIiXZy_uqbt&Qme5&$MT}bSvXz zi5jht(Q0dqqIRh*glE+}lKHkWS%iK?^ta@{eel6E7(PB%a6EcR8kN2cF#;|t=eH*- z#5!WrlCv5U-~jF>u|^Pp4F(bqzSpS6SvYgsfYtu&!ebd)`hpnfBW1;&QCW zBJbsD!ZJv;HzxBZ@4qKvUAB-4&>j-6hIQjqCM6@1W0MM_Czx<^hTM!)t(&6IN9E8? zj^x&BFJmwYt*Wjj5JxPF;=6{HR(s?sb>(U|VbbG}xxbzMk_Hra48Dir3zc#U4yj;e z`tOq3R`6O8q+m$Ks#1smo1qofq1;Ml#i@#wR&&zPnh);ZJ*Mz7OucvpY^bKD;qf>2 zFRSm=^dnhSVmottg~U+otHhjMMM(VX6&&Hpla*9_^T~gXrDG4kl|rK4d8fTohzM}R zAx48F?<{huBdf>Uu%~=hDmNnifvs3QM{sJe*#Fz2u3%fyc40#DN%s>ixVH--q9( zS|P=P6yX7(%3B1&L4q8dL@jp=Y!)soutPkHy;CWc{5AlxO8&+|bd#VMrYdit?8ST& zHDs^GATbABD*MdQjQ0?fR#>ByOhy7FpXt9clCO)OULs2Os7bM7AQx||Jf;NrR6H4S8>8HYUL{(QsYw zLYrF-S)atBL$Z^;Lt~fg@tMjfs^@t;2q$}Ve4GiFXUSgi?N>*{tEdkJEO`+@X~ zaCuLK(z_#4(Y@EQ0(E(IV=*=iPd$&lqAP%dCED_4n=Y~rjThK;Cl4N_sZ#x@m6Avb ze*c;v-s*@>s(ovMXbW z_CSD|*W~rXjF6_~Y z%Fo~h*XYz7<`+4&9xgSw#2QW2hmi%o69c5Y_nlY*LC{>U{q>fqaSXF-!MfGc`v6gS z?cqw%1>^gcq+B>Zs23q4iYn_qBGtgzu0Cq#y65^48QZLj~At zJ_@uEffzJDN^^j_BXMs`l4^f~2;oFq#1y3))%+pNox#dk@q#XKAy7>_1xYS30p?H` zxUwYlhN6A0{Ri*eoGM2YZ=59aI|y7Q!A(KF+<5Rm{tsh>3F6N0Vod*k`#S{UjrLlm z@6VwdydUbgr{G*3KtdZ|_ijvqKzIw*&``%2WaIXjSZc5NSyY4Tw757D(47!FDH;bE z#JdKzkJkZg-YLAfO>)Gs_-e`zj&M zT_A!lfAT=^*YPa@5X!pax!KvoY_S_bD%!am_M0fEyXo{$ubtCJOoGMml&My~;8AXH z5#lFpSeLSoy_m^asc`G1g#pz>6lsj19gXwciifd=jbhCvdkX#USHIMrzY;oJ&4Z|( z5i-iw=0=Efhuaw+sgFV|B0RxfXfE@SMDRE&iO5VC*YAMDG2b$<3{+*&GeL3yY#|WY zNKmaYVanv<#(2?(B<5NP%QU0VSD#ZFDin||{UtXs;o%v_6F^*m1LLpW1i6?1v+12_V|>W;qk2I_)Lgx*@J9Av3qZeSgd^Gz>BtHPs5laU=1 zip_UpSnA2>1|sX2X{7kZ$r5 z5%cHtXXkEXz?r;#@dtTy(dv{?$aE#vZMBtdo;ZRZx81brR@*|U0tYI4Gvp7p59~j# z&ot0P&BW>Sknv++68di`hn*5X6jtAN$lJCy+$NO*DXHv(6`W0gi#YMPAG^g*z|jP? z-`}$G?Hmt0V{zjWtq_?2ht>xnvt{_L#dV2Xj)c|}@LY$&9dlNS4T%|;l738o8GCa= zF+WQhT`nQ4r66Of;Bmxb)c71Zhocbxf}pD*16|YnV9-&P>$n#{^#Y-G(HcED)`Nz! z@u0A%b>hD+mV8|jZX|r{@jhsAcByK5!0M{wA4IP4a5u&%^zPL_R%y zjty9YOC*UH`234rkKWVa+v#jUAQ|N!OtaRo7UiCh4^BgDxKbwa@}HWnWf14s;;rLt zpJ;qAb`S*3?^abddLMXY-BDsaPlehJuP$0vxmFHqdt=xIFPL2|luzYVt3&vKX z@L|fmFpc)0$dyp|J!K?9`-&Alee{WTAzp}>l*2^rR%p6A54*eS^|{4Gceg~(25aPY zW0(QEgw2%v=>r2=3NK)g-jwLMX=+`3u4TI}khrV>gmV6w8&|x_ zFu}=V!Y6Xi@~<_q{|g-e039YtNQq!n1yb{fEwsgj>3EXZFEvMdRY(^#r(+4gabKc^ zfVE2)_uYF4nL>J3iLZBu@GCVW(YAbO_3`Je%At%c%uT@x$o>#2H&GE)2d zS@(wyB00aA+nDXKC8DE!lz+fd{zm*kyG5J*tr1YE;sEiG38}7H-*N&W9^DK5M%aq6kWes4%>M26Twa zHXYwq?Jd|K3+LNAxZL5i!}2RNvs0ezcL^!{%F>IAg#h07KWM=y zA>kDxD@8r$)_43J+DxhRoK>rm$?yP*uI>d{JPC1*?+!8w^+J38a!3lfwuV7Oq2P>q zR-}mf&~q^E#*%_+586Wfh2e)~VH`oUf^mHD946bj(th|Vrg^z?#wpVDuc%nLZiwM= z`4+@g9<90NbY^(f|9W=hPycX%oM&USGxV;RD~)9)F{N0bk|8QLXHYENX(hnrl_Ch5 zrgMo#5u^ZjrbPH1X+eqXlU>(#rE==k)5cHx95FnyY-MY8E>#2H_|bsmAlq$`{(5kxXb14T_>LHuy8 zEjr+gd=R{KmZtdJ<%U^P)(oy@tn_4CSdIT?PZBfgNzb_`Bhhd|R)nPz!YUXRU5&uK zum<@NJW#y!gf5?xkhTRK)HPv6%>4!q;hgyFJ6jY?mSd=fGot`b08jn z7-Qqk=7)P(@ks+Opc2hc@VMwF8Q;fa404{2Q>7k3U#}um%eAdd& zkde5bQvSjF?(&9lhv)7X$I5NXnGC4mZs}u&b*knWU4g}580%EKHRU6EFy=mcbohN} zCP1xP*+UKy!6p9;lv7zG-ke>+l)$BFzd^Cx&2TZQ0s@wiQ!^GmyUrEjb6RA zI2CJ(ZN&{?Y<~gcq`26TGZ|*UpnWm^^I<8edNG-IH&VOW_Gp z8($c!|3UUTRTVtrN$x$}Vle|Qlo2}DX4m>Qv?jEwa#LfYfl00>iYzRxh`X%ZV#eRV zTrD*Q_o&$`?;F}?ED~-gPWsMzJe-KcC=VrtLauo9*d3tAxjPCV7yo=?=PuUxdR?hi zbdrBAZUB@a&yCI%N^awXnXx56*m4By$7*zxBPcp=wp{5ica(QeG8 zCX)`Gh!Nn-@mm6X@#UU|RVhbUA=DG~?(EX33y>zY);=#&;_h(c^4XoxiS0lA{r^4Y zZ~Rw2u({F?NIzH>=BYM48KVPbSE7FGOE1Y8pD7U(pWJui)*;bp9XAhtPefP#B_4qJ zv|n6fs&e_eQy5pGm&JEi=%QW_SgQZvlut~if~JB@QP6-Q`PX2Vu0^HxGbcxtj=-ow z;PBuC%ckR`tGlgxT?V`BBhkJslCUO>cr7c;T7VPEAIG)9Ejx<;zW71=w4VvZPDCP- z-{NyXzxi0rAth76!Q;;xEY!Iu81}!AI&TvKn}`zE98l|K^%)3L%kry*n|e8LDN#QJ z*!8kEkULp{cGvQJvn^#==N?759194kA4OPUj&|{_5OUpT=}X9md>jj9psr)V~bMVczWDyjmgCdPM~;6GW1CYw_e|o#mXHP z@A!g!9>r_mJD28{{sB7?6YohqhlhEmK7FxW%#@ljd&CQ>sQLrSzo+{njDNovrb}|l zy&Vg0o(lx^!eE?&8qG=ooKjPr1m9gF;X5c(wAt8JPS;jd{qF&{|Zz@!TONa~hpwYX|M^CRjJPYrJ+?$Dzs}nt$ z{@wgCRc#~=WXTet0XNhWCK!>>Opha6i`l&2F8TBk=7UCobnkdH7&k-rk>wgk^&ah& zgveDOD^Ri`ba%<>IH_pmnOod-sQ+C)3P0T#7v_yMm7_sWG!z`Aa4IQqii-)K;=I82 zT0kX=(yLm!S{+t$lD6j+0ZJcc%p|Hge&DW~#G);oV>&3uE*2ATWK;?M%a}ejGeX7W zuEppdNSApnh3>EC+d*Lz#vS={JgjpjhFC&bYNDqWpfBRpD_}g2lLYnOxQ7H{)?w z*W^lyhFxHZo*KMC{*lG+WS`vJ4b}3Oj*ijEe*^QZMUJtNiSQg1o-EvJf%;WWLhJmq zPty%@qkBDo+>=!lneU(J`@+Iub?!5tc)x@KbIsJpEv|_oB6MHlM0i_!qcf^9$9aC* z-C?e_mtT%k#a)iD6@zMO&>Z$&>Zzq79)y$wL0x+4sTdTOsuYTYhfhCsKB-5U%JF3m zFuPL%W{Pbr`NUG88hW944=xO&3Z|of9q#*IpZ@i^IET18VL0H_OgX{0t-ieS4gP>?n@KP{u7?uUNQ4$z%YhHE&RM?3_PE>%^g`dB;l2) z4&`@q{|*iXuoMeXA$o^Nkz9Tqn=SNH5a`!EQB|!ZYWbk-RxkWQL^iY)@tK|QIsO2g zpa0zQzs~-{6Q4v!u6bgU0PvhnYL|!01qJ-e<>yh zuXZ?um6%dAH{Dh<0waMi=a%!@i5XVU?>yU{3W=wr*D(1ryO;_{4_9{V{wLc9ekTY$ zf|7xP>m)SAx+#|A>0^-%3;AUh15Ih7@lG9qgm2Njzvh3)t0b+2|M``d4Qz)uC_D8X?V=f{dw;ImJTe<7eO%=aiRep@m0eIJ^{MVgfdp(}Fw!;93_5pC1 zW`?i8eOttM>nu&$<5p+oqS#0+KK-6Q1w+tyVp&fq$NQDo^7U2%1VAt_6H0=wDiZFh zTWIWdkB!zsxmYgS7F=v!pu^my6Nq?Pz`RqrFiGSMF(S`As}wKNy3B?H*;%+!9AAa( zyi~W@aw{ZE>+PMjopP6Pm8rp3Yy|op53Ck;60rkpH8ZNPl(>%1dF9)b@Ww(Ol>K5L z1ur{U1HS-tLF>nh6y{NxY%lq?LaI(iqo^6_YK0!TM~;Xv&{$VOy5b1=0e1(T4;Y^_ zR~7a03QcnJsSR#7)BQ6to5i<^tyYzvZ^biaq>d}8jC{Oayd$H9NE)^tBsrf-v@naz zNlT^qHmxmUONfTke=3$iu?q-xh4zLj9D@!=p=LX%^RwUo-(zU!zw&{HLTON~8QyNZ zUN(VwPU6w##i-k9a>52=4*+rh%VaR-r-SlOe#5pdc=3NrYrNS`{FFTwZ|xUL>UKov zaI`5{7CSIb)t|Iyy(bEgF=($6K1)fgVAXa=KZThT8+BGn!JCb>!yVr1qT1>cgg-h7 z%|Q(4TjO^P}#{?*RDa*HRqGB;4A_<778bo$A|YrD5cf9 z(Wp~rr(&5K7_0P1yPB`~Yn7f+>tQ)yH{jYo@Ng~jtXEo*(RqvRqD3s@;)Ds#F5`9 zvP=~aJZ~-_znU_g>MgaNIk|j?H%ZgkI@G}{%vWA>Vp<~vuHTb+m=Pv4ZOUG#M`{Z5 zsR0G$?qgxzYSRlAFBBS@7BMY(37iexdbN(#S+b$rFfYUm$xhmEPFZZ_XFk(D6MF}f zOTbBSb3mwYQ+gI}-ZGzPPdyX2*y*FE+aJdtkj~xk51#ql?83Bdf;%WlYG0(T0R5$& zP)G#?T?m;aIZq26muMPTVd%F{$I2&m$uloIZVY$Luc! z#>rud)1u5%rJw%NzXep--Thbg>CXy5jmtslT>e#zT#z3nqe?VP)c{L`lyuRD1sTs; zGL=?%I20wIlXX0X*V@A38zC_StFM;v{-8T|vEE9VJW0ZuY!Jy?d|jY$-#r7LR*oN(?{_@mC(sj5fCb2kWq19!^}pp|7LIATFoEG#P;q?DIha2MmVuS@{{Zu zB}dmtIGHMk8J$s89Kc5Ctg_kF z>8U?Hldk_(wjAkoF^ryikr5gK#%Z^dviBZcVXE)p(}snJ#lEDVlTe3v0Dwb_I$WN)FH%>`C3U)Q@^#3R^qv9(7e z<8ld8aM+T$!R3H6sm26_;DxT)RR+`nE^gsiEG(|JX-^+z)_-Y}K%#QHRSido?eI?|cbruDyeT5K_2^($^WVUs+vlx8qqG+`WZbixkvd63%}mUIYe5 z-ZILvQb$n66y)QAh;JqJ*`y|V?i9|kc@wdGcx+-F_?+CrWIEglwT;O{FX3@^VX7S! z$+WOthc*>6f@yMakiINR4hD(KA3~B%k$(1B`mMyc)pDT6JRN*EeMAE_^7})iufJ5u zSrDsO=Ng(&dAqD7_TdOcxA6*=fHQ#SxEz7M0e*FoRt09nQpf;KQ`*Cm8WeD) zhdm2ULS)JRa0AW>Kv=AU_%J(bv9GWTYo;vaRqzy4;?$B4g^Z;^S=9Ul#njV%0louz zv!IO)gct#Js%a~Xl*T_0ccx(;$>yHxB=A)+0#rkdcgBQKpX>48Z+)q~6aRgK>o%q( z8?1sb_+<*|$1AWSAVq&CrcK?Xg=9rx7NLCW{fe$`Ww`3m%%;YVM?@x*0Vn!Zfr54c zTne#p64j<8j1V-S2$D&ZfRiRG-ksKVaFh`76D*z|#Jm(^ot>}|1AdD!P~}t+IFdAC zQP2l@yf961fj7OE!}~PQ9FryDXuaLujYaD8R`-Kcw#cRDylWldv})pd{GIBnf~0fP zsIHTuPu%>W2eBh$qxwn=VuI#FIGaVXRd;jGQG^LH6sr|P+1}s&Wh|SB zOGion?4AOpC}Pg{+l+@pK`!o60#XhxKzLd6WqU*kAxZx*NtYm#rthp^mh2E*@el5C z>@i}?31JuK+Z*0Rq8YI2DH91NwRRt0ok*F%sEKaT}TW3uplPA$YvLDBp zjBxIGjtME*f@m5JLc!p;k*mz%%mD>v&wvY$^KVMxB@tNdpmYW2PA@S zXq2>Gj5#V#?3N*2xnbOkobgi2)M3Qs4pc5yt7jmT@x_fYXO~=ixM#l~l{eCEV6A%z z(8U0F6a^*KE@EjfN%AYUw1eiMRL!m#Dw~*MHh$?jRP|T##P^4ks8dIp;1-7+(mFXh zhS=&PeA{Ia1(t~y*INJJvMx!p9gnVE!4bH&x)2+FrFhTzwab@FJC85zv7A6+tE^xz zxV1*!5Ru_pA$DR+7`Oj6nNAZ>*Qh%+YU1f3dhaCBPr91eb_%i}J;5_3OP71E?(@MA zUJ5}z(q1A!yS)T>FRsVL8ItFt&+^2BG3{ij9mEJXYAD<0=h-3s9mVN2YTN|YP1-bI zE&GX-0-b2$?YBs6AEl$E^)@Ut6ULHJJGe)XCd)f?7QGvPWlNf85pKPq-a7lno*Q}C z(-nuJOibgh1Q22!LE7C@|$mAdGJ3BDTAZ0$*G5rC=lQ561)fpxXX17zt zB$?37&+qgWe2n*JkK|^Oe~qt`XQ*Yr_M1(7H-hWfI7SI1>R66KGCRgZS_ygC&ey@T zIQWaS^l!@jAK7w{e|4y=X$$%osz>=wRkQI+E~+7b;+tc9weh0=8EqHRhp z>Dr1d=K$y2w@;L+SLU6Q)I*|~2NYz#EiX@&i#_`cSgRcS4A4E|1RV54E`eURBTM;DdtVQttp$OA#yy=TSnb5n! zXaQU49$rb>t-R=`jeaoC-;q2s`ayd_{fFMg5@Us19OAvkS#W!z*%uU6JRY5VIKzyi_*4$IR!`9$Avo@^i#h?!c4er2`v6V)?z}gkGqkfjgTne~ej!(Dv_`TS0 zi@_8}YA+I$-BgR>*dX#ZZ4k7isw9FQNKgFGG!C#qp8Qq~58k!cu5H%@w*6T1rF)LL z#AKxb3K)L))-8__9f^ajuYxL8SIkKDE7nff!Z`JHG-oVopR-pZricIOvCt=Naa{3X zH}^l0<+kS~?GjjcNb#=f)z6?x7rAg_=2QeD&mYrw&Z83orDNpK$Xr81jquo^)CZyJ z?BMpLp5qz2%9p12zl`P`+y1!=#pB^LG3kaHVGOforbuc9IW;~N{!i$1$&@>=LF{Yu z5cimLtI-8MI;Orm7KA|fZAll9;ZPN->sNy!Pen8iHcD$K*k|mmnCZ48P`_hhDMId@D@95|I01h zPD+F025tp!5rG(}txCwy21mlpjbDw?MG|Koqy|8)sC!bAD6o;+pzk9 zNIyl;cPpH38WKbm=C!i95xeL9P=0~L34Kc$j!C?68sW07-c!;x#*EyeWQr?Sp_!$z z9uiY(avAU9Xm}aoEPl1#squrzIWo7Q-%k+|QcbT13og>z0qug3AFok&=^1bfz<|A! z&cZ|$1O|;{bR!qKol?_VILaDxp^Fxe-D=9k>@f7x!55m4sPQ=FvnM3zkOk0m&jmj& zQjcfG@@smIgfKqze)6|;rq1R+Rn(L)T1SsNa5YlC)!#!UuuJk8m`m@*}K+iput>y&q zLYYtcz9%eh!hS)d!Cta~sBF#Yw#UC(o(S3~LGF#0iFXfGlA4kRZo-*BsM(f_ajFNZ z{>qKRG+ZaUCID9(nPE@BgiNx5de(A__RaR!m(*-NSx3FjX>~`0Jcz?RbI$GzJq*8O z->veXXTgTUlI)fjSwlLtIaoaP?yVl5Jw;%(0DKNA@CEuB3n)r19gMTE0^LE z5G(e4^okNswkd8ocJq$+*SS`%8JB*!+_d!yI2CQAFD#bvZmaBj<)_^6#Z4o!5QI+{k#HD06Pz|r z7yy}q%`w`8@pX83jr`u$uZ}y^oCFs^WjgPL0$~CLCNEmVUaI^>iYKb@74yeWLO|tc zU5nh#iKeSLSpe>Z?;a>3WUJBkum|i%g#sXiEp7|W=~85DE0OK|ahxpYh?viY&SLGL zVNa{mOYeVR-zGjea;-#c{`Ghgx)({FPn|5r!Es>m!(aVYy2je*YK6qi+bWM!feVrGUuiZNW2*=@MslqQeccLV-)3yBp z^T!0@z>MlmCso{dM+_4~W;uH`8)SEZNXEq=z%E*)_z2PGavuU{e{eu{BZu~G)%o!$ zH279|)LSuigmS4l0ozTJmoQ&XcmZlA)I%{a@N;Sw?M$=@^BIu`Sosdnt+}Leaz+($ zJ{wQ2DG;trO1#wWp%?9xPSI7S!M{@e(iVxnK~dW0)C!#0D{*4|yWBuo$M*Ry6fP;_ zvrCs8-LfQoU&ae$wtV`{6;hPC*72JC4)@xgDK1!U+d+aRfx{7D`1vea2S*BIkK0 z1xQGbL@97ZYd#aZ;n4QnJVye{v$F@O@!%-s&8}l04_ti!poWC|?vp+Ej>Z*zXZlX; zd2+y<4(@lJ-pXnY6cjNED(BD{#f?IrZuPh08Gyi1-!flf(Q>QO1{W8~W-FezKcFi| z+Qdego3}HWCmI!eRl|3*7EmJeJG$Z3c`#9fl4jB!sQuD8Rh-nRQ}G zK|n6snOK>!hqqjIEMUg(xJ$LXST4IILI=>?_YEc+q?9DN!d{y9C}jQQup2%p?G;=z zid3rgvqfN+!&7V9wpgiZ2lM>nf`oG#l9ZX+H@x-h8!d-hB)sY&aQ z1V2`}?4TEh8M41hz^WiZa9Gl2TQU)69l|XxnDkH z(*2E$d*5L#qtxPSt$SM90hq-evCdF#Y&vXJ;|V4Du5B#Ek6TC|H;9Spc@K?c{P8QB zSB(Lh*{HUcrEyFylZ}Vv8aqL0;V2PWFcJIUKTm(7`S5?Rqie}K!b*x|%#(UZ9?A+W zckRujn9C?Z(rcw?^asG^CS_s^V_g{rgGW+xaKY+A>#C|>q|Kf17)U{#9Es%|nrR$L z_cmW6dS^>MO_tk_W1^~5(MB0Ug_Oz426HkK^#`h@oKx!fhgR+T5di5+q(n^J?hATh z?~z7jR6_jNLZT!XM(vOS;3?W0;X)m`Vce6yZQMcj4^tEkI|>~P~? z5opaJp=8!eSr zEU#&Hlr6Tw0~_ZPMRI7i^14lr@3aomk>qXNI)du59O~5CL`TgPsjr${HbvT4r~$}#{h7@RQ!<-p4(;|Qp)?sju* zMBYqHldH4wf}clhllFgfM2jb^P}LEhwr*5=-jt-`xo3kMw^W@3+%DujiC8-0$O{Jp zZnS^=iSl?b8GFY@bLA+5{j9rHdL8kJ_%x-o#0gTY2*Iv|?a(7KEFsxZKybi<*`*WW zk_8$q)zL3fA%Z@77W>+A*=)ya$7-B9NWBoygVI;Acg2+^;BkW$*fUZSel%*Sy@Bgr z;-6_-tuA-($U~{BV3Qn+1@x$V#xyVzGHmUlJys5G_)Bzax*haK-x8(KjO5gA;K951 ze_HN;YHJ@J?jTdu;4>HjI~m~hj9{G2uZ;1?74uHAm0Wy(6f4$H}^Ei zv`yGb;UN!!-Ytu^wT-p1=8%yMEJTn@_BSM0&ISq|luT4w#U%VRZiLL;bPujd)kHf# z8Ym20ah)j_&IkX4IOMxVc*ZNH5GsHbTs&uN^kmn*xUPCC#@re~R>?+OR@yLRE;rv; z#6E@0Jyd4xW1|m|CL~pzwP9Ra6+pT##CVf;uHKJw+Bw;=-8jc33~1cqvlElegm1gV5?+QQ^woJ|0UMUn+Z4rJOKSm4drsKSNi zjfrpcxVNXY34W>38i1V#vgP{@ZrisvWZYt|COOdBDz|~JL|Gz8h0B*0H*BQ}a(!w~ zrm1Qo*;T<>dSt`gq~ZjAP*TU(8*;-b9f%8pNN z0~U7@YPZ5j!gWwpxz=jbRQ> z$hZGmId`A#0_f@OcsB=MT?eDq>#Qwyep_wUY6YSlonqU2GIwryyDTQqf*t*Je5>}} zBMUxATDNmVGZk6v!9YWDJN8;Es1yfe8U{`Pk)^b$7m5Q6)C39JJt=Vdl46~W!s)|& z(*x?E8J)14kQ>P&VKq(7oqb|)74UFtIrvy$m7&`wWYcg>0w6)G@Au{NzqXHK36w<; znf9}0GLWL%3n4&d2SY-tqOezjo&0fPR*Z6wVUuof<$D=pEHTtJH#}|s29_+E)rffy zS#HmlQiV1~nR)U_w*c`pRcFBl+kg4RYw5f+sGbyk#STM2>1a8OAW*5 z^V}JD7z2l~rVRiLSIV8)ewmQU6qw)G@+95PU$Pt3!PInHUXyJJ4%!z9 zaU$BBz9{oPKtWP51hL&1pm6f@zlycsr;zsvY0|6FL|G5<{#7o&y?AWbIMio?1KtabuHz!DT&`}DvaCdAEBAcVJRWnFF(inrbbC24R<^=M?{Qh5Efc^JauL^>;FUx{7 zvUPale#)=`VUk3jV~3IXg}#6FT7qioRc z>?II{;l6M#au1}_Rh?$!)bwp!fM5ur`DC&Lv5tyI{Sjr-G`(GW`vP(yHiUak7BTG* z;)y{+9MkiQU*VH_ybIf>oxvEPV>_m-hfSo0tl3tzwJi+2$IW(@Q9TLS?X<;0 zdzyERqd-_3(DwPRZMhDJ@$5;>3d^=h`6{4Hcmp0UWH#-tUz2kbMF)XtqB+>YZUB~T zi-V%@kuR<(TN}6+LPpug9iWL+PZ-}+z}w-uWQ?8-I_yxhAX5kG``to2Fpaue2F8|a z>oc>;<)__+vEvn_x1;qH$hF#($VdZR13tE&rYEf6Vz?7>EUW#D^m1n!kB$qkmy@>4wk^5m{%J=U7%-BY)=u+@eg>%^U`MMs#t7>N#?8y(@ zAUuG$)8jh>0#@M&Q&W)I#x!Z|Ucf17mK$fwm9JfY(g|Vj>Vn0sRvwtdBg|+A@CaSX zgMcI~&&2}q$CSmm;~Ity!DseWTK=rrR{@Ts!En)+1 zOyGB+UE_pQR;#G8Z+xu8sjZrR6xUA~(*h9RVWt`gh~zJYc-^$M&`@?rv?pZokkyC#aHbKJ-aU=2_Y* zca6EHC*LUFhzHYNx!ehIHb!0#RDFoJIxqteOuS85sW&F+ViVFi+z+}o-a*8zp1fI} z^tp6Pbc-<(OIzDo2>sy{It6Rpdt8WCW4(3gEo1L#V`yT}AQ0^QfcTY*XYGNlJpl5@ zZi2xPfmXXj?!+iU-^5-o9P|D;v+}wO#Z)y|L0s^%?NP?(UfDK&`t#xyB3xTsq=fwk zMUoIWwD$h1GX1+)H{~>U;-?KRyse7os0!F`^F^?b!7*5RBz>R?cQsa3`<2)pVW6^V zBxS0Stf81Tg7e5gxex4A6?j*IWZ%I;4?evAewn_V7uUXSKPikCT?g^Xo7RZLn_ zLdgIxT|iqX{u@mk-nWMlI4`f13(E@}hahERi?uC;S87IJXkHrA~ z9GfYJv9V`b z-Rf7LD6bd+xE+suJ-9{5RQwcX`Q`ZI%eV=*m>-=(E+naCb6|4Bc`%+7=q0RZa-oZ= z#*b>KWlQg1Eko7F^kK#eMvS1hM*)PWGi_#|BUsg!Z42$PHgEi269(9fwxtI@!7zR=jD_fmmr@e}~5*tby}Vji)AVPaJ978!vbw| zK~uZ(J3HpLo-2ydbPQdk! zZ2??Abe*|{Y>4{yt>kgmwyb;3R`@)xyZ9av=SdKL4x(g@Nc@c#q`;H3MQdfz7W&vz zZn`Ri0r7{!Ag=^06Jrr< zZ|%*pV2?+$m9DL}2B)626T&ywfjCK2t(-r3HuddSnHbq%cLLNmIM9SVNx?h%Xdnr%nnK(^(Bvo2=1W87JLgRC)sV6%Us(YgQyt_ zI+qOdiq!JqlH!oL&?%uQBEW)dQX19HszZJk$!COR5MpY-%@`XFKdcwcxJbJq^gzu( zjTzZtU^918=yH`kXTe6N1oHE$s&wvn=~}AwtkWwFT%y-_V$PG}I1)ab**+P-)PT21 zw#|@vT^@%Z6{-7Ui%3HB98vP-+G>zZp(*4|mA}R=C|AY@#NlZkI0_b^RD4fyJtV@8 z`Xbu*^z22aHrXF+ZO`y62lj7`9x!$Y6w##!Ceb4L=Ws)!4blb~fILf`VoI`7!9~c< z3Z6IR@s;w`SS8Pcgk(c=5cY&5MSLIjtr#%d1U)7e7CpU1;+<$mkV5KPyd5o#=J+vp z9j4oqZb^IvO2PR`UD;Iy`=j`Hiq|Y6z}!gZP6#ZP8XL8^Py-JHLfCe@EN&!9zlhbA zz|0h>st>fiZxvOibZcxHRY>t%a0NvJh>ln@Xz1Fqr9b{NkH2d2!XN#Q3E;c@r|7@z z7eblhUEOknO>F^a(t(^vW!=h@BlcW?y@klPLlHpW+4CWe09b&w!Py@4{5GQ9Q|b~Y zzoK-A#XwKvHie$-3y8F`Z6rdk4$-QKL^Uc$P%S<|e1b}O+f&WnXN?BYQ_n> z29-SCM@V6Iqz$l?Y7@s(kO&YLVz3iiA2Geo0?Wi!A- zVVe(j;Cy9JO+Gwdo{YU#A;x#)71$y)%JN!Jp5DcbvL+TXN3bNA{e(J0t!h&gsr!ZD>{VpuIpzVCf@Mhc{D}k)FQFemr zZ97O38OI8@1urh+w^3XYwTY{jH2XrTKMBTKD3J`irtE1el>=5m}y2t#^r<%PNZ%f}Rq{&z3``j!?nRjE8}Q`k%0d5RHC%@@OZ?p~5|t zabydZi}Qy4AUst9Pf=AVGQ zg&BrMS*DGsL4~pOBA9*o|Ms)+jHtO3e~f8>@fqYLClc&Lt4-S2!sGT4#ZeFe34KUq za1OG`0b*;rVAAuRbf4f0tYllSytFi=k1PRfYJ6ki8doF>DAE{JrtcE`!1fFZHPAO{ z=n#Rs$6IWONvGY&kzPfHI?<$`$j#Y7EMS-=t=crDPQTc%u3=(P;&`LD95ez@u*jbd zO61pN`cvB!!6UL>Ov4VYp>057aX@ido-AU%lUfUc75YegWNW4Ld~(;=n_Q~Ldj*N; zxE-@o_5)ymnm8oOdWtY`SA0mcP7Vt_t?iuEBw5niXekICv1b|v-sYxfSE&}J_ur85N9*Yc~E;Kz{d9S8^v=5(-S(uc&mBBZfL|>dmpC=)Z>h8 z8T?zy2H!v-DBqE^UXQlLu%JhmmBLUFCBW)9=@+H-f^U>BWXX0ocLXd;DuT&qht9*Y zOK;oSa6%EOMmi!9(Ym|J58z$4<*$_I?D^Gooq-pfE9cI}QK|An!X~sK{)LqN7p7PP z$|X*pAskercs&y$Hjg~7VJW(l86iOkxei3Bgqu3-DQ};1?h}B_IL5J2=1@QUls$+H zly~VO@UURCsY0Zi%%3RTiqa^J*2cnhpIlB28+EPFM*!r%UIWs9=;~UrtSITkVEJj8 zeqifbENhqTWd4#tnm3KW@^6{D1Ce8KM1S!<6f9#g#wUmaP-t)ak3Xho{pa5akqER$ z!PG=MVQQoWf(F+y^zGcYDJvmCmhdHLz)osA%I;z4$cV`oPm`cXT`PnT>f=D@K_x)r zeSoh$R|pGLWn%AVb?D6E>D6W0x8|A?+A!|yguSI0DBv&K{`5qX?(P~te<_zL)1wu0 znlO{ysZ$*rN7vf}4+w4T14NI9>a6Ur)e5P7V-nO|CmMp?G0M=~tBnG1#{KrU%eTKB zn}?_4YT|BgrNwJ*=M%;oH_KaZ89OLqiEu_U#ZE;zmz~(r9IA$)*$41M70$93BQ5CD z630?YT7WDk7x8skkdHB0Drm~7q}Ig<>D&mUbwJfGy*+KEK%GY>2jr&Sf3H+;^$*6{ zLMJ%V{dKf946&Ex;SefPR}xIHiQzm**f6!eqInOkZ`=$~zd@CS8;!8I@cbkbjRbr` zQr4bIP4WCZ3e|pATZaeVx&OYca)~~FsfDG={w2En`!&qNT@E51QIA=p*Tyj*NF|kX z*C@<2jptK`VEf`_O_^@9r6^BSq5^zXLZ?bJWyHjbJl93gI?SMV#oj4$VLZy=&{iU9 zfMkqD{FuXpK$2eC06!r_kVhDV*k!JO16(m^@q1GW4Z~h`18$#jl`QZK%pvQKwAjj2 zOI2{-vj-4hC8RCZ3 z8!c~?3VG&2kcoLb;`yK?TvV-QV+&>3lDoy~p{q|$gaXI=Y9X$fJMoY%=dG&qSQELl zR~@oSrc)sWFJ(AaE}oAs6Y{HfFsS@S+1QLFui}*=3+&GrOn8L$CTv( z>$9(S1>Ji$@U3$oanqgTXHmBD^ER1w}mBAVH->H zo3V2uAWWm|9E3himT=a0lz_>&iDy$e(Dk|-Tt)y+1Upr%zknm&Fw z>`UoZ`d{Rz5-od~rHO1z_kMmY*ALN;?P;SZ86=zqWtBXfgNXEU8_JLAoi_{r-2x4X zZIUwn&v?dP-uwK9xjIBsPbW6=cxTV*(fhYoc$CqJ&TzNv-*GM-A1EO{a9id|*;rhB z(f9)^ECa_MDUW>J&K#&52Buyea_)346P#%e!1;?1(e&hN*T!_KfiYk@NWI;6Udk5o-5JCd1@bT-K_-6f(^kon$mFWXEUiy;7ctX961__b+8^ zWFb4)O$JmIWWn8Jd;_8{FDYbd&A;|;{rNv9ZvA9jx@HWxtqcVs(ID1LM%IX23*Jj? zf0-_Eu{efmO@A5GCtZOFk=w4w(jhb!1luRrkLlioY3;FKMpGbggXUz5t@dK5<~QmM zQ6engoDBjmSc=#iv7imCTo2Xgx>PSSE+>H<%B$F7ELs6-IUey4fscM;9K;P|PlKKl zMcE~+``pxXm6haO<6SC(YzM3yxGO?59Yrr6*w$%tRMy(|Zg^UZWPhxZD)qY{jZFrl z+)=$itgrD!`a!Nb_JcW;$i*4$-`Qd#FrIz2M#5QYioD&$^cPNI+hxcjNC3MiS4-!P zeHjab+zT#z2qzlLXewhh0fXe&rz7NBycS)B#zkkw8DQ|~DE*~Om`%Z}UFgY;aS~#L z$!<0BE?p;;Y|GNy!J|<%6z!Ri>?P^(#i!q|wCWcxGT5>J6qu#pE zHs=N~8`HD6vL`R+i-htqNZzKsQjX3 z5H?h<2i_rJIul8ijdsfkB?Fa&x3d#c>bB#V9{l+JN5E0j(rB<2*rz4T5=Zc?Z3OTY zGypZL$vjeysMWFqSi^Sb@BKT*G!%L#!+fK(E(fO$2#<`9gy9q*!KhM$2U9oYH1+ET zaR5SC6msLPzxIi-=OIeKzzC$pzNi{XzWRK5sP) zdX&RE+&7{wS8vZgubdRAu6VidsJA=87NhZTjdcxoNVyoz!+@&Dq^84t{8GrmK8+HNc2h|?h2kT+VcT|a_htrODsc&m z1zMf8q7x;DW0RW&x#WhwB023=tacHgV&@>wm_A$1J!|Z-g0~ms#lGRbaFIf)s9%tJ z;7V%8UWgW3qor8K9dv6T?crPiUE*4QrBlHt3~ z5ffANN{9#l$IKjFn#`O+L~}Dc>Y4bh;ygi^(>dav=YHyy@|uwXHihi!Y3LC#EHfO1 zAi=4u*x8Fo@kCAqM*_=g&%qw0Qz@Dt2Xx4ITupEeB#lthvt_0-?+GZTQ13CIePpI_ z3R>*b;&!%ru*)03O8l*jF4m1(jK5mF*3YTqLUTwBWoAxGFPYFC<0kV-sZ z4m=mZBgjP92#9zv%~+p_pXN7WWaSFc9QzAh!uZJ+P>sr&U^2K+3cNt>201nrelhwH zI2j#RD41EZH_O8&=G2?%clJpYkSm*=0XiCDsOBHM(iX%GO7~jS8M&FHSkE9Py9p#c z^zbiTM-EVaA>V4Jq_S^h$En%0eb$f=aW~r(e*CX#ARd3lW;=`;Aevp>vh~$g*yntQ zA5sWnUlXuK$>!KGiXT1D^Wc=mGzs<{`8WE)TsRYgr`z8Jb}Uac$5Go{CJ-MkJ@BQV zM6f8!{|%w^*g63_X2*NYE0Z~%F`K*kb9`I*P$SJw>7^#YwHuiOpBc2_VL}81jW43r zkI0C%3-T^ZRh%RZpoz=ChlmSjCkTd?NN)a) zzZ#by43dIVIH}{zMs*Pie;8b%h&7=%rv)_;WIj#kE@Yg9eF+pRJ^0lG#5j@d^iz~r zAg4VPJ~u0fwkY$tn|f!Q4)1mBb#>AeUk?dH+?0lMh&v-DS)AJ1%9w{HvuADFcgyb4 zzF%covBPc%DS@{XYL+>oCmDm(zA`us- z@&}2u??;VT&M9nQLCT)li;;+O%Suv(We29=*YILt@LL$^LOrLFtHd-p%Y!i~p%i{Nk{Ul^Mhe2+0l9U)(|ohm5|6bd!m^UxnD z`JC`zhdp)YupQCkBUOYdX7Fo!vs%{vC%avBY#cJ0Qni?>qlOvxf2r}V?*>C*v~sJd zqT&d{bEjer_dZ$GZO|UrHAZMG5Lx$jzy*+JT zE%=mbzHl*U%fpp{CDrwdPIyy6#RxQuQb)3v!} zu&pMAgolzj5$q>L6+f+FKF7w|g630PI(8{0kvyT#y6p$aC)FGepv{o`Tk1jnt+(LPF=n#aN@Z&xNAQ?_{serlg*kR-NR|Odd zs$BDApP-W;C@IbLb#gZG8Gxm->!EDy?yBZYhMcG5V~m4J*-lJJ03-$FBjQPoEn`ns+ZEk$&)SM{7Bs!uwlB>a zi5p7hl3^)}3E4>I-43os%!%;WnjVTlAo47;;sjz1#xNM~^576+&gRGd#i|K|4ss?6 zknV7kt32{)IqT)+;C=a2v*1u%sHkUn@IF1yGU3!00A&nh!ia=&ycF)m+oLbiGAfp;4pFFT4~#$me0lzP(}olq0*)bZ5G=le zgo`lfJ)9IHG9DeDv_&@)sBWi_>K+3`b77IL@S)p#G^IFeH-~V=NBqNtQo4A>tLJeU z-LQqvdVP4t0hTw)YVdf~>cBufb4a?F+#xYRzt~C`*d2G z>h^#gb>9txFN4H+5n%lUK9Mox6qZ-z<6Z~qRu|P@=-am49ZBN`A{V7UYK7u4ZuKs6 z7RUKb$$LP8Nv&-UK+#j7mn!-YPL%ng3X^BW3z673v#z&UDhlktXZdgo>M)olr)CVb z40jq6EOQU{Q{%%^cxk{2x&X3`y`(*B2E@*R%qgZ>*bNc{Lq>Q7yI!GIPHblHqi%8i z+NP~!aj~q&XHR3D9?n_3D^VjAVV{4by!C{eXUvzj8PoiWy+GBS?PQmNxVdD@)!(4H z15~ae=-h?J)RVeVkT}IZ|7QA5al!t8-iN=i<8JO6r8rpc!SkC}7t71GTren?OcSpc zx12;Jh1_jfC=VRfp-Y7Rfx=JB1h(g#KVRHV!@~__w*c-s=(mP3tWm4+n;Z17R&>E0 z)WK;_Yt2UK5@a)hCGYL`u}|49fGB6q8HW*H!!9rb{f2~O)u`9@OJKqFN5EA6o;#m1 zT4Vg@X@y3px7t1N{*Pmw^?KU*wz0%QOe`X1nFKuCa;vtEt8f`d-Tcj0pR(;=z(3ROnfZP&O0myy^Fe4O(k zjGiPPkcTmh;dMFb(ZF$09kzCo&c|$HSS>*NAV^n<%*<1dmdBp3GuZ(Y*MJ;@taOcL zn2>nX?O7&br|{S{v~X%V1Qv?|zZ96GpiRJne#&k%EnQMDU1_l#u2mbjAyt`CL$)im zYd1s1e6kd5-3m7#`jL3?8>)k9iYSQ8(dJeg7+1Im(YXiECoXU*S-y;qBb=6T0D66I zVs`?~bIurBIE8l0KDN5WjNo`HGo_Ep5^y!uS}tB$_n(FRAH8 z|E_E5lTPktX{1C7c;gmOPIw(ho^Dmc_EcAH&1zDVU~R6vusFYIw6D=^-2`rUIM3v^ zNHfPOv6yDE1ZT^IbMXYkAKgZ(m*N6YEHX)&t6kO?j5kHN!9C2uI19Z1#&ckb9GSCe zoS9ThbpKQEl#Z6Bw;_ExfJW!ZpOpKFZ5}Q z<&en4u<4bWxP^|+csC!~u}Xlg+V^?;Zy9Wy?K{2+v&6}EBkd6lVRt*Js-qfRGqX0T z)p*OL@ZIBb&)x{R$yuOY;ul2bm3Fa1r1%E^E_>r&eY||h{#uWVzRBTzs(!>FIv!QC ztbCkY9Y({8gH+fzYKR>ZMrjn16z6h-d;Zr6lzoaD}3l~%Ic0I)U6c%Pf$emSkeoay);z3ex{B7YCsJJWHgU}O4A69{CdhdPzP&B;p|)$Z<3crR zA$~0AF~;LjfrDEBf zq!a+B;rvO5r_Fdj+IhU6iVQ$iV3JcE6Q`%5r1-)%D4sj6&pnTrLJngSpMps!d`jZz9}aO!RE+46xuD$!melqx!USU8j$y? z8A1EH+Cq)EYry~uil}AFo50vn*_pU7K}5l}DMZF~)p$e~3tamEwR08a#`ZQv|U-;h&Ey3ddeZ`)ck>^pa6Q zQ`CzKDT&WA+mwxtRitD?b8(_%NCIX~{ntl%wlBnmlil>H$^0xST6`;CHSDCL;cw&y<-sKSvgt<22;79IQfdpW^7%UVN39b zB#f2%qXIB4wK7?L>y)trslVg*YQM%9!{EPH-1O{_2H}0S>XznAc8Kt1?81Ye5y`m! z2Rl>p&c7|orUt&0$GqW!09?=M2x65$QRLhO6Hz`)J1JmC-XHe~MRv_J0ztbQ{C23+ zXtG*rZxBc1%`dmg(u%EKPB%3_LTH4kh&-Vu9VJaS^XrgW5g> zA950-&cG!X6)_ao$DdaOzzeFOb1}zwp;1lQrYN#j6Ee~}CdhDVcTy)htUsMFtM;!~ zyxcRvY;C0*QeGdqDJ^Ah6K*#;dgxtS#_SK{ipR@CvDP{DNANfsQs=FQpD=a@@=ChT%^h+w)1TXeJ93oX=bulqcu_>m z)}=&oy{Wb?!eF=U>ni`iF{~3u;&JNwQhX3E(Exy9V9oe0F>m{0?(Sb=SA zrR-Jh5jJrwAiqCnGhL+aBzb7Xqo|2n_TQnFu=|)`KQPY^DSOZ6!CU zm72S@X+#t{28K&t;?gjvHOWBTfQ;N5_lZ|PyGXb_A*6b&5{t}7o#Myq7P7DqWfjNM zScI{Rr!P{E70+OP%~_-Jt285sONo?+(CVEoTh&P0ikdh4nu~EL#PCETdl4?Z+KR-X z-s^YqfVTl>8P}Qqx1(zHZXFEoG^UVZu!+KaR2HR+Pr-*}0r~9&LQ~ukpf|%z{Ao_@ zV9BgTu9u54#!+(-j-+6~4Yh+A1UDAj7ikd>iV2=Y^dyFYUD3qn6aabw5t0M~M0!U5Lc)O*Y(Fn zV#<5vLWJLHw}EdBo!;2!>`EP%?av6#jt2sf!i1x2dCI9=oe#clN~=Q*UBuim23uPw zFWV#0FUKB=eDzJn#T#D`^RF7p_NXlna(Q_(6ob4*a2L5*>1@2HScwKw$VIS~uH#Zn zJe;9O8Mzg|w6a)s`$vLZICbG7uo~_GLO1_7oWRHz3JrNd2<9H;M|Gkej18%?=>T+Pis z@vVScaCx{4VB!>s2yz{9a*B=`xtcz!@%IjsYK-}Q z7P=EsqpBRG?0VYmV-PI!%5aqRa!01@DL<1v) zLMqOXpn7Jwrzy2+KyS8}7Hv0F>(8LeBf!aKMrn311~V7K`}jOnymJT^@`V0R-1OLc zHCOO~gINhMcL=go9$w(nU{EViG4x=qY1_d~ z3WTUEoVOmf_rb{<_V1LBq(&1XhT9!d5_;&p-(M(mK~z9?YKxM#)uXNUdK^E;C+8r( zaf@0jag^=6s;1s8cj0H?H4YrISBzQoi)?o%6qSIst6 zP`@e%?krJP8hS~o+@Ugm4U*r|OrX36J#5qd0X>U{2V!YkImSK#e^Jnaf3T*P^h<`|-VbG=;leYR!WvJGCoewqu#TTS)_qzyVk=xF4 zN{^<$_aErsyKJ9cho@jl&>0cHq7;Ha%$=ma&`x-HVf{@{7%UQ;1a3wJt1fuv2^gkB zIaT!Gz;WBdZjnyCGmP^>5lR^C9up)>oF?oGa))+^SyON!>o_ty385$slF-~ghC#x( zo=|wz_kxwCxa6H+(IHqPKh)~sB~To8pPHT>^$+Y^33>+4(`o;W$$-kY0HR8}fyBum zWA;$H;(Q@sP~mHKJ^~)N@k#(wQ9awAc&qQe2$&F0{16SSL%}|d3xv}LFB5pSCkt5a zc!18GlF;8GZbM4rMuIt%;t=SKbM7*Ilnwa3VZ^6bn#{=s&MvDQm3zi!vQ`_EV~jC_ ziEtaxSN29~bjVvX58DJFvmt!cwuaP|>8ELTY)<72iA^a17dLn+UF1i1V`JD8A)nV| zlj1-hCRk$?&Zp^N+cdM<`UKe!Z}Da<7H$odY<4QxvV{KPC-Qw>tft_jQmC%>~?YVXT-(2;}-vjEhpS#VUJ4KhLB{R5@ zw}nemJoH7)!5$iC=YKne7EDjR*drP!kNqbIL(!N`68P^MV&TP8H6{hqC`yA9r z#n0j*FmSg9)*M}|4DrByAmtBwwbCjs%AxQZA#fjjzf3>24KC4%(^i1_MLEiB$ILkr zv$1v*VXizQQ>A__VHH%odP$dT5tj(N%q1fdp^2Jssv*T`v`iMSG3;&G6Z~4#*~c?C z<(voMHIoIOJc96W3f~~UBp{11w=z15i$P7M0^So4$@>ZOr#tT18v!kwGeqg zCxnN{z|9_WE`z*;JN5~lV_eL{(w@qWCP1TFc@ zUeZ6H2bCg9j3*ldrDTs#B=^~bfu@c4(&|iWTD-D}bAd7^4)Sj%wtZ%2M&>%YQSZ}3 zPuawH4ESoZZY%i6+Sd1Ul zi{@8BA*^p~p#Hu1%^|uG@+8-ikTg(aXe1vy7+@^$z6Vfi1o2WlM%{=>^0|7zsKw~? zgeh@oF3XPZ?-j5qHy@Av5khf$bsNI$$PNy6p_ES6WDY=TFnlB2O%7Cb*}=PuU(kyV z!^v|^wV=~zv(yy&M>w$Q5~M;=%^oyrlhim$QSo)`wc5QTlV%?aEr3lNE@gk7$0yPS z0)M7<6u}`hPOMWmk9zVTHj58(o=%1QG)E0-$Hq*8u5FJ;?t%cH#9a1Zr1UhYJylrW)q-M(8s88XWjO>a?S+ z6zQRQ*_RqZyBb*`QjYL9PH=c4H3DZ{!A)YaB|=*0OiOB3q(rJpV3or z%i++$8{{j9#j?;0h!xP3_u9@RnCHEEQ%a9fJBeT`zzs|?=zMq}f42)Ichp0CT8SW~ zan(t)i6#2UfJV`V6o1$6STMl9e|mS~|FGAHY8TOh&T9 zLl$Nyo>RbhDF}gOLgdWL|M&s;``m-)pI zd^PpFR~z*kODST)9R)oT8{s#KjuWDZPcg-js+nLs^85oAdJ90G)nfz>auulG{7D}e2KJ|r0^}J*+wfkxuCL!Rl zlj7N0v$w*h{4mXwI-R|2;BJ=XcHFa-ArT`w=yGYTDPq+S3{YqiYH+8-?yOhMBJotn4Q+*>|&zvLH_}iqILnt%{bA9Ez^=w{1~l zrMU%EOqfXq$Zg2$p+#kZ__p#{DkHMveum-lO;m#zU>N zJ&6$b04A{&t~GN);}$FAK=TYc(Y6Mta7pZE@MB+S!iVPu^eWvR+2VpHqA?N`>!4Qv z{3$5XDz{0c*h;j184ONI!ots7c=VrFW&LEVb3s{Whve15i1%uJ#Qpb-+zi#!g|Q!p z1WGx{5DtLKPWyb>M~H_mT`BD#De2;h?L{yf2WYsO2N6O52VcM+kfq!Q2Iga z4K6Y$Bik5+*r61{E*42p6|&M-L!)!QKYb^z!)bA1w6ApV_6@7w)hi_fU=%UugGJaLwo&QPE=Qcoez# zwY?e-`LILVuaFblNFJ~ZF3kx$(bjzE@$M(fn|9CyfyOSwxNGG9EcR2|TD#&5sxN9Z z!k(*4g*Dh}uv0yiy;|yNCnF2=IGe=v0!F+A0^^U#n6UjzoIdOImYZR}a(ShiyaEoD za_)SPVjjLd^k^#u73b`UKr=W*+c#P!`s+1i6Q9OYZV(eLo^~XY$mpF<69veGBE#*; zTgq)Zgsg@hiI@eqwefRnLdaf@UD!_{w$lK8wT$B8qQL znPO4{vX+hFy~AUqVrr&%9ZNDc$VAi5L5fL8p%G+HpKqtC4?)am675hKm2J^PA^L0o z1*q|uK?!Ia6##{L=pb8=ydLs##-xa7Vo%z#5v8SgvwgZ6=TXi}@R?W}B3xyH_8ESO ze=)L<`+m7?k9zQXh%d3IDYP-=Et1{z32SPi>KZY&~(<-F~?jS3{m}^lwnVUj&ES5^_brdjBw(1IDLp zs^-o;pY~X}`AYELisB6Y6pK|;BiJci+qo2M6l(LcxZcFhS&>YUoK_(qpdPxUa{=+& z4S23RWj8?nUJ6K!kvIgK(?CaEri%Eb$~!1=gLMo{)1M~({}>Jb_}~7CEMl)$_-ohu z`JiED<%5*4+n}eFYL2v-Kp~8Q+I~4`@KE`B-18dFWST9~(}a*D2qyOD-*q)TzY>W% z8{qqY`89YNa934Py|@6OxD6DE4tB z0@hY-8;?Fxp89I2fXej^QQdNYa1@`4OwsiuY^kn+hTM=smMIluPGuW4^XJZF`B(n=yA7@4T0*We` z9}((W3Zc$3wuRt`DLNOu*c1=pB~-z%y!hg1fX6Ij+{nzyjLH?`Ao@Z*9qf3!LCa}> z_5r*USAc>z0|2&%1D})_8qBcKK2`G(OCh{TC{oEpxBE^L%+80Za~Y=W&5>HACg-7s zBFo6uvrIn4{+-Hcujgp$d-3r?2+CqcMb;Y0q@Rg{?!}ggEYZyz7{^h%f&0V~6t9dk z>~WnM)PB&BuIOIxD6A48V>@8%?7bA!ZP&h-sYwZj2Xo-#QS8b)Y8{jn984O~g$*g&brVt=a?eQHx5GfgO5FAmm1h;W2iOc0kud+dC0(-#g__pEM zBA}Xr*`TR7%nx*WaviVUzAM9K!z=!gKc!>75D$VkYIZ>3t|-b-!y@o@;9BFHMhAVG z`yC9(ErFcb(%I|2ZR zU|loT86V(T`#xfg7zWePjTYQeid~!*L<-1(8oxeT{`arN;!%^eA|QUF;y-W+*#%Pw zK)1n`vw+%G>n2mL^D#jNU8B^)>n}LIUJ=4_S2_V{rCL|5iNW^|7BC5XoE-sql?&!Xv zpZlq4M7?*$Y;`EcyC$FLCrSj4rn({g=vO-<3jcyX+v%L4@t>^%Q${{pf|+%8u#XVq z?uk-t#`#S~&7|vir#4#?IKjKe5@}4{A~$9$?T`q~+Okb@3k>%q3mD&qs*&bP|z0L(Cn(&BG zt|kbI)VLkd!ys?tyWFc$s1w1I*m;t%HJi2+glQ2xrfQ+#nM$KLr4O)un^ai$Wcz`Qesp!F?W%bfc z=Ga0_gd*ad+c%L*w#0>*DMTbldp>tpT`H&tP3UZNoThih`M*GrILui9*{USWY7&5e z#7$60g+VC}tHQf0Ati9=$BN+5xzBd2n>`HWqPR0=V*GVccrrWjweqqp4$VSOiU`00 z=^NgP3V%%FRJAe3rA_7l!k54tS&~@y4uHQIX|*jpr@IysF6i%J;*Q$?oXA8vE8;Wehw?166aSOb>K)>sNvS#bo5ZUgqo|ItTa~yAz{6<~uEbyG|dZ;uyZ{?M5u0 z;2RXAwTp~(dkiA}4`u4=Yn_3u^)f`vRN3wO?-y5=wHF^wsl6F5No~AxsSIfA!$V=J zGNfEJCb=sSWvZNcP*KqE*%k=$KI|>((~UFZVZB8#Ox&=bG5+d&aL+thxx|ZG{jQiP zaRyfw`x^jnoXbPyF{299q-TGE+*eodbdrmik_Z^p7`;`6_fQO2S6CZSjGaTUFu_q(F_@w))Zv90U2_=BysDSGPc)0q_W14%2Wr9 zg!uQI#@w}X+4%gC5?$rg?8=^jc^P#*=P_AAq{_JF-f-;#l1vf4H>AVzQurbkfN{wd zXjc}$0t!KHf^OciZ#3mN&8mB;VciBGWn|_E@osGGD*BG|9UL@Kju;V(tR$VTX>IOH z4sq3xeFzbcSH1RT@p#p|1P<3BnT2iILv3stS;%RfJ8!G0ar&oCt7u*zjpRq;Walzx zYfGR2+hqqVIt{Sw0W=k$FONN27e8_Li-_bnb9RozjGL{nYz@3=clHFv&Fs0IiB<0U z3aw2tImi!8vDv4Q^305J1{h3*626!{rgi2Xe!wkB^dWnpj`z}_I5j#!=qSHQvdQ+W^63xltu^8qbmvR2M8erKPg(FjER_5YTZ2TSzly z#E-{eZb*zm9f@XBXoOXW1F8>8o7?0#H_;A&kGXK({q zuQC@~He;(D9;CLNdl4WMC=B)P0?Po<9Fx%imHE^1`Cl2&fliK?AB@c9@?eqT-S#!S z*GuKhMO(vlhQh`Qk)L#|jZml(p%6YgB#ek4+zzpePLU7zg`nSQ73oZAhoC2rD1zH- zca%|$SFfov9)i^NvyHm)Ye=>(Mf?#HT*F`vI;u!N*@cq`yzs33#7B9q{iIek(OK-l zf4ctzrf0+-1v0bq1A&Xi81%v)Lk0^Gtly?6sGRz5+G2PY)u#v++XkSyL0CCIl9ls2 z`|zo!9cXT?ZXuNzP?Ux~lj|LH65p!LZk*>fLd;FGe_Ninqs1I@0^;;M6|W`gdk5aP zof6+BpVsr8D*~?!YFr{Rq8Q(@UF@~We-LAjk2FI-)rj2Qf06jAU9W?3&%O!IXk2qh zFAA&xQJ{Qdfh&sOht(w$8CkWT7_T}v90RslJn72tfDY1oea{|sRBlWAA^YM|Z9>xP zmo_|`7GJfrl8#AF3Zf&7VvruiSOcoD&dwL%r(1GNAx$p!Z*9?wKUD#ulx047SohKL zXtMZ5xFolXghY5I!JV-6S^Ep!GS0@v@u`(>?&S$4|1a}NzOP1B6rgc)Sc7m3s^s((jPD`G# zp~kTGHyT8ve(k4vkuTb6ZmAi>j{k9;Bw#O<32sp)M_V!AuWONanqv@bg68BWCcEgI3r9=P-73-2}<+Khss5Yl84JL;Z zXNVC^0NI^1P@*g%E+$HuFlMk|Tk;v<6OSa$hazIKo_&cZ;%=9LkFhuKZOW8Gb(2cO zF5z`AM+S0&4fOvYU{uBQCMv0q%`-f(HwyZO!X&G__HAT|C<-Z*xJL#!PE;xTPqZwxLY0g)+=Q|KK}ixRz$x+Q0R!;$s>f{O9Q}$=TokaqtJ} zb`V$2)!eb@P($)e(M6XlxEYj++k?@R3Cujko$qhuE-9pSUDdxX!%+%MOpr+M8tkIPCsx&`no z-3NVlb^7IR>_-I{F&APdl3=;Tuu#{S8?jtqC2;T_DMOPOwFh>5V3WHKucGMVcbD;& zn+%x=9ch}}DSZ(g8I+ z@&D+5y(rmMh$EE(yUp+ug%PX{hRz4Soc^Nu?~;EoqSRWT*dxqKYTcAwRj9{UfOPoD z0Qfo|EXApbbV-GTHQwZ%ox8!y@dT-vWChSz7v+nrb~wbL>`+fsAf=R!jEok-xG$iq z+cTJl$HIaK+ccd`a=4vK%`@FikViunIOO{_pm!?Ehh9dhQFa~g;ESVCffadc9SuH< z3(&M54U0nX8d23vj4tjjmEV(OBoU3&c$M%zWvX0j2agDtYDXkuZn}c~_tW}+rTL%f zzYk8L?@D0UrRG&yk!Z7p+EgXwEWIicGvd8m*6=cK@fbkTpzySL5qtE|nq6Py%R{4Bc@b3smNYTP=+o)(|dxF$ojFCZf5tI@{sk z&4~VHfS9lgak`i_%s__ViQxBz@=3+G;f8 zSA%vE820({wXY>g!$Xp{_x#c~YcmQdx=N|Ooi7S&H)-2;VYjh`iUrirU|R}b-nJ)U ztP0~3KceOjQsV3?RXbTd_Ku-6+1JCwi>*V1NX|%9<>GM<;Y(6g)H-(^S)gE5_`@XQ z$iHP!JY#gp;3d){G8u+WA0+l|?2bm(_PKjuiBJ^ilc?el=^ z8J7E_*c47F^3&JE!4VD0F3V{a11m3{s0$i5@Tvf`hqItc`~=!L*}2?cps8mpHsqeg zv*=Hm&?h};oG~yj;&=~|>D3gmcI?3$WHsBdnI-s_(aH8#fWJBL#rR)AE}Rq}(4#LW z%(x>-y)%%LG7yd@sk5spO^a&;VhVRV{F0`pZW1R5CfQNZCT4@lo?arVaq(lfX_~q1 z@7n654M^SCR@#f@rEB&BkkfqqiI1_fSXKkX$#Gn{K;++p=u7lVvZp=wWO?pMqoMUX z`Xp}hX(^nH17mJ{Qi|It;}2Oc0kP5!R>!Y8=pTn|qao@~BB%MshNEta>h$ z;hU*R6rMYylkCBbSPs7gxE!XB7j`RUK-k*XxhPIu4We!+pvv$bu2me|IsyeTm2y%D zH|`ca)>76<{aXl6o_Z>Fib9BZLgtHmio40ipCv+6XgL#G8P5OsrotK zydaW1JPHVgDkXH%=Vond?y6eWr(&%i2v2NF9Up1YHRVerP*oZMl&wC?P1>!4k zJhs2$G-UMPVW&cc>`_dK0{h};M-VWb+-M>OAY&uK^yJO*W&5E{T(KV>`&xPKW_bG4 z)yOL$zBf~ei@zXnOK8fuJ|MjSh3okb$9Be!x2Y!S%X+`fdMwNvmnuEsk z5@QZSZT%x~zA7egNQJ&pF_ANZUh|EEUM-M2KqGCR(-Ybg8>{0!a#*q6iYFF;E2vAx zJ;$8M#{RxclT-F0F&2-R3M-8vVT?oU((AF``pfc~9U-O}{Zl@O_pa5+7`PAi2?V#| zKeo<+y7_t(kGGJK0hB??z`n4EsmLI;V18QI_4j<&4?bX~K)mmCI%3}qQn0p3%C=x3 zb;R@=0K(>ce7pAjBPp2LlUouE&agGN(Qk*I*hc>`=mwZ5vz(8+u33=gcnYfv#Bpkl^#eZcTDB$j=lLTG^Jfk1bL??kC$)7 zmYKTrx~w2)Tug+KgWlg{qG}k)Lk2W~_#{vnr-6w;4lRrVxnOmib4f1yEpErBYOs0m z|NLnwWCMZNe&oa7|3!Hs1UfDM1~iYXy9 zyT?EQs!J6#Qy(nx5MVA_2pJ$ERl~#*s{{zPhkI@xouRp6a2SvH0_!|6%mGJ5zt)W#TJ zV&6FR-nvOp!RSgQ_WhrS_)2Ht&0d9_bR zoh9Y+e0l6C+r7GD2dy=(Haf8#6Sj6YcnyfB9U9$$bLl-@z_xTvN7_&x(Ev<-2rQDFvs2DUcw^`}jSfI1o)%f!)A+=$arJXw_)mH6Ioko{ zQ_($&;MCnv?|2wkF|c5;Hq{oMS;P6ohD}&U0m%UG_R>IHa53Jknc7gIb_a3Pbr=US zG9u*9UMd&kycQ`JXBXI5K?stK3Ty;DIe}hIUMoJ_L=^}T8KWb2WVpn)INXDNvLbQ) zkn-Juhaxy83bcZq4Cj>en3QKPp$?GHieb^{-nkM>ST>o_ulA$zKN8%hpXXXIKE2=7 zm(Wc(jHijYzWe13BMb(;KvCSwUJBOK!vg-EmJCCW1&g+$u7LRXJKdV9`1Y)zI*g?t zasRF^P`$oJM@;FD$}|Ni)zo#y_9rOL<@HA{Vrn~rhN8=1@c^5Uvlq_9$s*jvK6XZI zQ6-Nae3ue(@K{qzmWOdrpUZs@=|!mQ-6HrJ@q=H+OYUG{5`(pukkzaBHKWjT50!_W zx6RUcgcO1^KmCkTOK5^@z{w$S9G}vcn%NZ;R^1|<>%*6;HLmdu`ZLL7N{SnyTS2nP z#9X|~k`H8?8XKwP>uds&)0-b{A8dvyAdO_(Xi%j&?O#^b?T-WHR0gS69r$L%J5ISX zEo@pg*Td9VgGsrUlIq5hxOYfBJ!|0gIZaC(x&p*@)j?d%6HP)Lk8_fvR#+e4D&oD` zfhpyoeg(<71P3IEU?-+dm8s-#i)mfA(wVSIKE0Fl57wu$D;0Mfo1*b%Z3DDBNcj4t zl6txK>8ITo%eJAxSO%W-Nj%5e3Gp2J7 zxypy*g5az{!PpGu3@^CtIRa%DVtzFOKHSN!(gXXpF~!VVzA>T%3egz}__5qGBZ1C# zH46v1A+GU)uxJ$-6ZZ?By6J%zzv>dUKO7GC$E{O;?5qFCf671d_pjO#AN!#Qi!3!V}#2e0k*SvCiEZ z_cc1m)TWpBktc|C&tu=GBj6D*ZlQDqqHy?91=+?i$(H5hV$myDc1RC<9X$>j7L8Z1 zTv1MZYkbox*EYRF;GlnWkF?TmHtn$bhz{G0@9z=d-%4PGBN_}y&kTrKmC$fA1ig!Z!ce`=oxF5IL^Tc}|UuyO_o{Ah`67<1iWzC-SIs+!nGkj{*l*rps9bHZMakAy?xz(?Y=x8qOxqz99@&0`lfnt3O` z!4B!AGH<-q!HGRgQ;ukg-9#KAwh_VRiv^qA5z%O(mojZYl`gLZSvj;>MiCTlI2t?a zD%!_YGGfeOGRn&gbOCQgo%$QFhGwVTDgNbEn03I)@a~sgx}5n({sXclIE;J@{87W& zE)mtFV^8gS!`Ft}2Nd>1WPpM6om9K6Yylt$Zu=H zq>E4_O-3;*Fb*Tw3EnL&J3%=Ill@2z5(6I|b}1FDg#-4f2ApU=!8iytJKr0EUZWsv zWr}LW%DUCn*c-a>q^Rfml0AxkM6L32$a*M@UV2%$30g( zH(xDJ+ERAPD3#tZRjcFKf$K8?sTz6(_ilR<6dnpB$PI3;GZS0Dn7VDN205+Ps=rgQ z&nLmdSP4ULx$%o;1b6bfE|+Vo_Ggk|=y=Fzl-Xu7Y#!SMgC)76U zFIYr8fjp}UIpYaJzHhmwQLWr%+sLT{A%LqHkr*0sj|d}F1I$i9PQFmNg6g@$!ENar z#4X)qXTfudA8*d*vai^0#OJxe5ra(1T zN1Hl%q^eR5Vr?tF$=*x^V?NWxBn2o*eIY3Ah?T<(Le?g>gCt`y-iTlZVc`Ng&Dob} zwexiRZA0;Dm-WJE?}l{;A-VkuzsEUTQ|=aJ=+U+ZZ%=6g`@SvxG+y;TyZq!n30UFy z2V2J@<%yd?0gXsNfu*wXqh0iJ4Ti+aNiV@RiBPg4)|MCS<>YODMxk8#f(ooqFR5N- zO;c2C5$mAg+){d%_m9hg9>gdz3)fm(G++DKs@G zgF!41cr}w?uv$1?`#d>{gaA8N&YcVGB(sg%wGE7#Z^TK}W@tS(BUj4RIMcNR`BP2l zkt-6$45cdtb!8&NO;CCSvtx$yuApOGTwN#lH%L8CP+W*`oZKp$;z4NV5q}`DWJd-% z^~fuJn+o8J`Gg?Y=IX5;m!&>|#6~=JSAafV7E$lXPE*Wguc5_SNhV1Pi2wmQ=^41~ z&{N^7xIYj6Rx=m>cKTsR3{0uP8(yWjC;!#>vSLFrG=^}rY%w=_@&1T@{kwhotdJtwBS1%+zZTG359AY*@gsu!R8^PU^n$^<*|5y)KrYiY9P*T z#o8RCVG8jR0E0r4ksI*}D08l{$+P9svvCvV2__y!d1D=I3}n+jj=aSPijTvH<2$)` zB@0A3G+BOQqZX4A+xEkJtF$+b;)GK;0YB;q@{D9i{1rARt~6FqlynvgN?qgL%Lh#? zO)8h0!a>Cn_vC(L{_Fr6g~!7BdF-+BT!=SjjAKciPOGv*pdo#{We3ZwNl(UEw)XfI zsSwqS*!ltQ(SXV??eyd?%Fx!u{b0iQf4Tp@{Q|-v4dCuLY;GUv1G`vQ)>6r8Dqt^A z_=(u6JwgORd}JgQkPBIQX}*MdL`Kmox==X@{AfEL3Ga99oj9l}>72li#sSrna!8*L zxC8E^f1U1%XU@bC^(mDE6qs<7Blf6xu`OEo2{s;$Y03UaAY8pdFig^MF5{D848_+s z@R3pav1Ho25;vX^#S{lqSEO4N-&=>bXfa@FV`;g(Y~ zxpMxTJ*G|SH%@Sa;-;=w-3?1SPL4)ZVXQYWXFmc45_}MAnI|%1kG!I|7j3)ZIk;wn zq;z^UvUd!l*aK>7Os5xrZX8{EqRetx{Pslh$eO@q`+?Z$=``h~h7OUEN&(fJhVUyx zY&=8=?lhIN5$qG`GujpV4#|z+cD~V!00*4zsfN}Tq{E|EjNz zV)!&JE)_2ByBA2X?!DYdyxZSZtDt1)GZk6l`8Idl#LZoP=E_#{E$&rMSmT5jy2>5K zdx^!8B;yjq&-5Z=Ge$%jYYdXHkKc7vc8na@cYlz1CmZh_<06&iAE#?C*A$AyicmE4 zj*AcwNc~9Fs?7?AkQ_JK>A*5Zvo?*SZEd7!M=8=)6+AXk+;6G08V9v+4|j8yR^j+l zvyqBmVFFjS5kz*Bgfn@EAY-uGgYT95Ka7nL)RezrUGTDzUZ4DG5!4xqmbMs3#zf|;JMwK?Y!~>-5b;@FG_g@8U8`_H) z7v2v1EF7g_oJMey0?sNsKQZcRbx8UvQpZlm}lalZ!tIQTi-H{z^3C;#~v2w z=}t%HoJCw0I{TjO3402QbGMWBeI)xUL+CLFScYHr38LKF9@Jb!;XHHkOiN7XK9JNA z7p{L01VL@GGwN2gzc^%=BsWr7Dx2*%a#RXa%L7=xp>ZdJj!{Q8j?7Rs5I1Vja&snD z$BZw0jnw}|>|q~95WHPnEGQYDC#~KnHx}*Kj}?%$RoAcOIqsO=9j2DrF)u6!c0w}u zw$l-oi2flD-Ye7J*j^y&ME)_TV`oqK8mLcO0|?r^#2IQ!tRbRDNvXrUNlT`8?ruo630-p%Z@m&(8{+bNs4;jTiC zJZsMfM7m+NfMxGizhVVPX_(6$epV5#ogJgd==x%ddJHs(y;xA=To3p<*1i3W@}hD~ zgVf+L#FYAprFad4ED1o{lecN}WCuS_-vEm4;;wa-)yA@OEP0KHdqVNS-ssTPb20D^&H_cLEEc7)jrSJ;pE|f$Jz-kqrl_m4GnXMw_&b)YRUuZBiDHV!L%eg z8mG?orc#=C8kussc*g2D^h39kHR)z?^Xi(1YZP0f_Q8%rn1O6~Oa{ghi{(Z<^Q#zf z|JHsa6H4wW*_4$YqZC$cd_Ph-%>8i++eJ7IL}cg)$7>q3e(n3ot_0@ToG0K8BY z!2T^w1tQ5!e(W%Rtfz5=${&XZI>A^g1X&Yhmy>zmY+k<75;dhlKPO?*f7Z2U`HZ^pzcu^KfH zUt-|&E5;UR^NrTa@#ztlu@culp#BIe>+t~;k@A-rthlL+j#{SqO6bcXJlqeb@7({H zF#<`jXa&Z+1Ki>Rzh5tw@&9Fqkg(7e&imK=-xNo*TYDM?h@CuAzF~h;H{vq=eGq6D zH6;2ZkwD$=xD&>mIxf$}O$TF2DlSR$j)LuoJ$nTxb&8WewjYI%5J@SBrY=*~SM7&4CzR&+U?$b(H z-DtH#~&#WT1XTmAVSj5-dsCzPJd$e zKs8#E-fol6vJC+X2nB^DPC(HYcK&?v@EKZ`W{7_~{fQChvWnCiVQd6u8qr29?Q)sC z9-Bri6S&B!@6D@#xEAfZPd!xJ{GOg%E1UC?GEr)ac|UZPs3FCaeNk1J8hs{t`pnr_ z0ut9bu?EteRfMhNq=VOR3XaNwrga$grYA4Z4&99w+B%;JN^ez3?XSg_^(ZkS{L^~n zLdL1O;q9QsK2^fFUCCwV&Lz}g8%1>6Blf!`<&5S0a0P-tFlXp8VQZ${JrFQYtYW7K zMI3(zxQn?utN-tRWM@E8&4xXR-Tyb@-t{@o>qztbCw6vYKh0^iY>~1=NxX@&+wLtC zKoSyeLV=`Y$@WyC3P1@36of*N1l4qG%QF$%9bs>GY;5f8exfguY}vAWx7*hU5cV3~8PoB&tpUgxf`fWT1pT0}+{D`6hu+LojYd7rN$@|65 zaoZg+x)v+9$lO?YlfNBgD4h)yx9ki@2EmL`@l;-$rhU0i`cEqc%JCX)nUHgzeZQ8L zsHOZ$Gie-wWigKCM9ly z*K6FrDwX2LDC%VF&A7r$7Zie*!;#XUb~ zC_mgI7f4hPb4UE+^BXuJ5$P4tZa>m*c{{)8JM3IF+9!W)M`;T9(7+#YkfElI$xUbZ z0>MTi(LQQ6nR7tF*YQLkmmYd}uZ5d=({_CyYj}4b+2+4orqqG76;$u&W&k$P1MUE*0`}t~HLL-;?c3ZRiH%5=h-gKm zgzQ)Y?n$zg;%AKEnVJLpoGqP#n77!YxS2<8dI*Q(ZHfD-Y^wyVQ8hj3(CcM>J*L$g z5b$GJY{LfB4|UrV%K_g)ZH};n;yc&`_y-X%{%rZ2&0Dp%)>drepkELG#mi-->=QtD zY`X0NjwnFyZZ2>IC4j3&}hWzV*x-qK_Dpb7H- z413H>s+S`I(kGt=6^Ay8If|psGd<(1d!U+BW9++l>pb6sB*1HvAwocxws1CdnI+dQ z#bf1Y>l^X+n`N;Rt5NGy=V02x{aWf%7h!UoKo3P%V>GtZQV0!8F@XD9oicBK$1)&Z zMfixlj)Os>r9!v$WZ59KjAH);xYHgL1+oblhMcym>M*I%EtpK2&P;Qf$A8DN$Er2W zCosRrzfbElDBJELRjJ6#CE%H~H}poUY{GCF5i_zNm8Gn$F#SxT*kxN<^*NzcNC^53 zLX>VjvY3RVVpFKdmP8bgUMQ9Q`TjBPZ&GUHmiSM+1;fxJP>4Ael&!2#7YYPpyT^sQ zht$=4*uWNrUST%TMKi|*KvejwKo|$D2u9p>O?&$2FUw!reyo?{96cc=``9H}Q41s% zakpU#ahnkfRubo%3kqa5Hm+)0C}q_%w;5D#@gL+gQv1LrdPY^^nYR4Al=OLG1NISA zlvE8OV51;*kZaf>9#kJAoxu+5y^?5R+*71&=KE1ON_}4q^-l90tl>iD?vZ+uuaAV^cG&|8)f%*9f%Mn)J?-LqwyF6*DXO zfV41HAs59O*Cr4U4!Z%feurhVS?zH=6Ys4BK@iQ2MPvKUJU4SQ$d-s3uBoWW`}Q<> zCe(IJ_cjNQN}uRck6iyndDf21m&$Xt(lndVr|d_iUBI8QAui69r3L$2t6$!tH)+Uw z?Z+YuET*iIgFwNU8zIa%NhlJV)TZh;y3kPs;(2>n3o+{0#}rVB4m*n*rF}Dm6LBVK zAiO=T8}V#=C7P$^A;lMVgtcR7+N*9VaBn|s$U2GlQvCdC=*;FyW>>^%jd?DhA&<%S z>$~hw5KhI*Z49al4yfxYj3{!o92}ma_N(G4na$AAb&wGj*#u(V+enwot);}fp^8nY zCN;Por&bK$E|j*L$xx&*CPvXH+12BwcnWaE$V*x6?YGy}up<*$K6($#V9?zOVjZ^h>`bG=umR+V54&eeDAi^*cB_JDa-uAS%qXERq zc}gw5P{$nahpd%S&4r+d=ysHlbjvJCt^ihZACoo40=)YxmF@Ud{A#%d4+mLrs0);y zOO7fV&TxWDM7oSh2iWK!S#=M0Z9-@h`~;YEuE3x)J$LR|JAIG8Pb-PjADbws+k@vY zSN$Ftw+~aj15rur5v#X0%G#|BdqnLsY=c4Zz#y=nM4OA?21k}dZVH`#!Ya)ogEky- z?i@t|B8z7+M@r5;SYIl;L zFw#iTK~_Avz5~h-Ci8T^t8RdhFXBE@-b6aORJ(%f97mb@n7NDKE!i4-_@#aV?w{;^VG=k-;WshB=2-xQwph>Whtu2!P@%&(UWfj7& zdZlH8`5CJ7#>7-mmo^X&?CXpZ%`2}jQ7x$DT?SG0=iwJ2q9m`Zd)R}R1OHIws7(MS zA~&D6L8by1PO0BfbHH=%m`IP{gWCX?X1XVG52cI@<(@Iwme3@0=}V&`d@>(!DJ>qjS#qTqogA@u8ahlZxF`BR?n~;%6lP{d!qk?`(ucLcKki z3Y@JNDOy`q9%cF1FR2oM2Rq(WEm*&4bY3oAjFGHBTIIyL#=8Oc$o8;B7qR#xBtSgx zD2%L1#^Vres6jU6#w(%M`gXclUmouW^$7QSRb?vYOTaEJf2&S_TusV>7K5Yg*_Q zPxQMO9>t}2_F8G(@R%1WKkc6STN0B9AtD}H5ja}|vLqT7f81YPBHhEyye4~1x_N*p z_{;|faURxGu7PVbY!obFmU=(Z3Z8hV;?(h#&XJOFxO)7Ww%nNd%##hke+sG2J6KIRW-KauVA+*u2?hXca*#u(gwKcw%Br;qTCoUz2oPlN+`7PD-Zq&vCM8AB6~?8a{6g%Y6k=e2Uls3+o}2H6XqTw=P(t zAcn#OyxE2br{Tg2Tz=8BqO zjDah9df1DR;(Ru8mYetWgb87_I_4+*LBib)VtJoablpjdO4^gU1!>^(c4^VAJ!-|BQIsS-91U2VAD zF_iX+xjPgC^_pMXbUCCLo=G;9QpMr1dkgRa2!c(cDpW|c2-MW!v$x~$jnGlE!0qIi z0O%Pq7pDW_HP@2zF-^Xq3(;%d#&cE|#(o4xkf~?tv?6dAAPN0bH&-q>wl;>m1@RY- zHOeX~tMqVbV`=K0P?=p);5L-X3r{@KKFfw>l>i`0@N>)Ca4;;0XC*iull-_m2;rP` zd56lxghz`!dzSp_Iu=5?wQRF}g;l29Tuh`(wFird*oPi}Ku68fAKH)d9k9saoxMSY z6$x9VdugFwyNBbr71yR*?wES@RK)%W`2$xOgpJ%&XP?Xm;`SvF=pNaIOkIweiSC=_ z^c$gL53s2Yy-1+0dWV^|se6~aLK!>YJTi~$QKVbDqQKNkrHHX)xIt1IaRfzE%oo;| zF$sFz-aQB6F^2{oW|0vWf0BlF+nz+qu?^u`YL-m-6CBxb0$!_KOF9zMH+=>1CDN+U zS1RxmQA}bJP}(Z5?bI>hd6_@!Mf=T{Jtg=(stYVM8M_r33#Ey)bAE9kMi6|GxB%Y5E3>C>s!@ev$1&h#;BebPTv4@< zt^=}VM~ulI>hbhctG20OgZ9(~We>+;Qgy8m)|*1B@Vaa&c&kbv2!mrwB(~H;{yL(a z@`MP*RVy$HEf&gIFx>_XHinZ4fhVE=N(38vJp=N}og{w$7*Ya#{v213NtmK71TzUZ zhQ{7tUC(&Nb{;u4DUVbT497g(W|*#CmWNA+=S5S^YOr)&OTifaBN$n2-uwHQkSK+t zF_N0D4s6DzWY*mj3IrR2(G9w9hvpEEAIs58Ct(PVw_W8(BWTBB2V|ff@cmd~IWxi# zHLuuNfJrW%1l)G0Y+x@Xl?KqRxcvq_@AM=4qso!ZX8&v|rAmOSofGAe{yIeY06tVr znw72NcZ?8#XsM1D`FxB{mKyks3AJ47oNosC0zn$N&A*I5k23HoIN`8sBj)x|oW~*a z#f=4AZ&k!I2@nb2K21W7%J2EZMX{O*w|}O5#tr}xR%o)xexuxsD3@C6WFoa?)e_^* z9vU>KOEFd?=yJn)T% zDM^u(bwau|4d+qod$8 z`x%27iH{y7m5&ss9CdNo!Ae5ZOwtxjI#F3FpE4}E*h08uIl=z`w+HCVHh`9pba}0c zFhoR6!EoAQTkn*$_$xOW2zTNNP}>GOT!LWxqw^6$MR6R!xcD1JNIDfa(5)_w@@x*= z_h(jEOL$mO9)c<_X=l$Y)>wrWttMoq;T@Ui^yHz9lQvWv5{^k+$V~z%s4PMDSQj97 zf&sveu{)eapskoln=IS-2^#DJyAs|ho*{&`#VWJWkh=l}EX|i>i5E;(I>1mSnfRS7 zs7_<7{2I7o+1!mWk|&Dw4cHi;S!a8OgRS_*CV6V(LkF(pA-wcbjG9um>o)3FbC#ZR zC*HHsASS#Supn(5&cifu0Vl}7)_CkTA&FwifKV>+C>L#<5CiQo^mxOBGT}e~DX6th zV$?X-+on9u6#KgD*@}rhVozvcF)gm#*jQb$-*8J*C@jOCYU+IC-jn#bJ-=aSPI56; z=F0Qsn!N{m#ccr@U+>u;_>9LV7cp@hC{=r#FUBsc0UJyXc3gRUlVI0B9r3b3(2nCU z=j6ge;W*e|J8E2)E)@I_yhl5=7nLg*qGHk4pN4*JQplA?ZXX*o99N}in@qf17Gf6F zWRBHD9#E4J%+f`uj7R$drhfGeojKSm9>r8OlgjozN*qiQ8bx+Ompeo!Ja}q3iakw%tLnhS>koHmO3$b$0&Z(kLl75DhAIn~s3>NW zNt|xs858yvGG}rQEIxF@oPZom3g`v)*T;{jffF0XbA-Dlhir)*bW>d12kQLUP%xVa zp?9Zro3(?8t?vFbX9N#A>L?T6{SFQLyl8l}I$36$e4Sa%jfLhM2ca(#l!L4Ex}xXb zpJHw1%W7if)}%u>Z)7i|iX(GFFl}G^cd3mCVeDgj6o(5xH*5#+rZc-M+t~D3(=BXY zQhMO}Vl3bta7&q(p^D|E&c;r0`4vFUMQ|m#3HLxvZtghpP#f1zx4nK^PxZS7&aNel zyJ5?(+NriXxReNSna%{I7T~V<3Ve&UVG6OE{&}Ui)ZlcD6Y;e&Uqg!_Eg=DgM} zi#8=ZG29NqT=qwL0Q}X{XL`|$`G4x4VdUH6z&02jSEl`fKnjdO?9LF+ri*w?iJUK7 zb}$J$VvF+&#cL+3VcG5OXqu=CD$;~Wt#rV64G;iQmQ*P?ta`9}#$e}&gwPq{9byJF zGe~ru1!X@UqEAd(Sz1)4ZTFYTJ2pn*L6IcJcV_G;dX`>F^Tlu*CwG}WzC80+N?F=G z#JQ_YXl(!zi7PMKpOH>NIx{>2@GESoOzf=P*5z%-5;y#qlplg|N!Ev{!?U=J{iwW| znIgJE1A{B6RE?%j0(3{4NTrzI=%;sr3}df^rn{D^e_tn(ZC@2FNlA^N0cxnDN3kqV zt3`*3Cy+FyTEA_kQj_boe`7p@_6S1Ew`SmrQb(2imtuQPtLEUyHzcf9qi5h+_1YqEv0x9>0& z0o)lUQqDnqAN^S!^6tna^~v(kWJUlZZA-%%7hgC*Ez4Zpc_CZ8xR+GYRVXwu-Su+4 zZ7<r)Wt-oBu0qTLfaFHXm=Mnowcf^=%YFTmI1$V%m%bvHm#r|Q%``Y zLA7$n;csGq$eahc+8Y_OAr~aWgfdQTyjWsoy0vA8Td7Es|IwoSkA{1D)BW)d!588^ zj5wQO;8+R7+$gWFE!ab;x}vu5whz+6wN{5s<=VA!(U!7b-VOm-mkmgz@c7L#4D)-O z(pXT84weQT=Y~YPy!KjrLwrV7FX6m)XlH{c*KyOxLboxgj!Ud%2ck|$Bva=++jMFG zp5&<1?umEy&=ka-;t|xLc_AzW$V`N?j@ZONRC=;tLncmR_}$@13Kr`jp&&$PB)zr>_eEF+TEgl2 zH!hoGWOG6YRFe7A?uQKrF5!tcbr3Dv zhNgAM9Nrt&s3|_6aCdCV5=wP9t#~cg*&D{t)@oY%WM}Nv$mht9>Dcp#CSQ2+NgG$q zskJF!G5fVJ51_kyGtE zbm7W#oggty0+O-AtW|}~j`b_iQDQ~Tak?J4jO781(g~HigGtJj6!bYPKCxUxYl+2L zL4bjHyF2dvPGsC5cQ!Y)YiXs*6S_Z%(nK}l36WWzoV@j$9I|$-2HDvWZa{VVq%p1$ zlf0Q21JHsTQ)Z^AwV+&FECIDfvK@O!kB3krB=V8e?o0Me`D`pW!P1>0YTDSa9)DYX zdw*`TRKKcop1DZuxMV0+mJ*ZTgxcr3d|=?H)Wc`qEUCLChoc-NKog|W4WuSS|6&+7 zXFT+%$3~&VYLdnZtK&1Rj~rK_(gR$8I1+6!ND^p`$MBXcRJtG7nU~Sd|6qyy{ppWw z^{_Ym98ze0rb5{rEm5KjWhWSJFR0h0`3a{8~Bx1v@eT?H8Yq z9Z{G2?%}p`4RM^a)NvkCz*y`?(C2NxXX$Jz_N_gRDdSwLxqAG1JPv% zew7mq+a7pKQC6~hRtm=Manx)Rx2GV{34u~FL$c7Yn7x?E=_85}?uLPYi`=+&*utSA zxX|e~Pu^7YBJ zj41Do8U{AGlHQu8D|C(f87y+x4N+KzkjN^<>$|v-!Y0FmSSib?(g;Ufj)_bC78crI zF>5GX=h6 zVxp)D=Pjy2V%{XHnGhj|%|6U&6e%u1SHT~+C)nW}bo&&bU~N6}y%1-ViyUpCeKs_T zYp!hAHVF0@`_i8~SI*hP5ITJ|hqK65#T+x$OwpJ>l#@Tklh(^}$N#8KeJ8)LAJN;J zjKtP)ZZV*>&+(t4{|mejVXpUat=1oNQPMvtJ-QTQ!-o5!KA}@hN}nii#1uH1lr~Qk zY7wzn_mpQfIKZt1GmiIcfo!5Kq?BxO*j3#ArlM-~fz&?gIv?KOjQfMS1cdN$yiJ7uYcW^9=VT_ux3S23j=92Ak=`S4BqEPnhY8z1*9G^I!` zK+WYa--HF*q^W@$v}!`o9VH*S*1)J)<0dp}6TBEdyiz*$0|038g1?lNxo9J59c8+l zD*ff+uNbyoONo2(m;4W0A2ZWdL=ukIdW7;lOz{2zB@i};xOJFB_NM1wDqbLVy{wqf zJ^q-kVJF`Xxnf5Kl)snvG;}fMc9e}{(>bp^O@G3BCp1I+C=aQxiLl$Q6?en*BHtbi zG*~Zh)9+|KZAz_D62e&n3~|vlSEY&Z*UMK!prl@rdZqXJO|KkybU@{o#3!T9VZi&# z>b%{F_qJ&ghS?FnIH>Fr4;SH*KK=Ch3-R~LFPS)di!PIKZ?rITKhbmWnxC8EW&x62F z*U$@YP>XP~6S5%8qjpu*nwlWttK10#T*--Dpl}!q8Em7&>qQd~A&$etaePNbInP+q z574KW!R!45`7HwAFfZ>#De=A zNTY1xMSHF?XMlxsFQJ3!g9nQIhKPzE&`k;3ob4J|jG))!AHqLB`Mrth{qi8zf-@bM z7n`dL_j6vPV6O0c^aL{ZL zaJ^4|6L$#K4aa*7_bMGcto30(C4_+v@35%#MPaccdLahl*;C2OrX_QtbjjWyhSwQ2`+ObOWn}*-v8>@cG%VL3|D40K@k0Pv7HTeqm3?-B$xSx5`jBiZ*ysFJ}t~ zGnv#l8HB*%*uWf|da$Uu$2Lnr}l$nFi{WpiBy(rX1Ie7b@`R#uy$ZfxmV7L9| zzw*yGeGB`FAR?xwTtPsthRowBsC>Tf%JxLE@m?$!*Z7h5kA6&K?KdVSHv#T)QNR^0 zC=PMlT8k=0K$TRfzIY>|hNjG?Kq24=L$lcs9RM9eG8)a}>QnOB9Y)_`@N)J%)d^cw z+yfmRK|zQhDwAYFw8CFify$;SJoIjE;Wv>nv!6rw%Q{%V!7(Z<99m>}w1r*J*EJf|<2uNctQZjO=U} z*KZ-rpS)|k3{4*f+zjVj?s)K7RH7vIrYVB%z^w?q)h=+m@#zcYX?ynL_c4$@`9_ST zNg{!K5>_H(-onD;G*myv&QW#g@4ET5tm7IN}hYJ{8?-k*c#eEK7SRVHizGBc-qFrp*KKCXL#Jz+*6$nZtMj+oO6H0eG){V~$_tmabAeHU1Dw$liNjGVNpGs7q^bP08$8v!fj?GLYqN|Ce(npZTR2;L1 zE2QIVue7}xPRz7NaVO2)?fVVT5!*)u-t+&&)x4bgF|SoLJw~xyZm*eqoXh2z@%}vY zIM3cQ`B)6P31e>pAh-cR-uaNW0d|wH4XWPeU|hv7B#R+mn~a1Csy1j$KBv|o#dH&n zLC(>S+&qJb%Idz6|*;+iZ*5Wx*!+a(UVQ_3eyZcFNl+tb>gVW8i)o z>Rn)$9L*qMQ}&mrBR{tdBdRcT?#+6a4V$|4fb6Sct$mbfm-{yF-7@dFAT=X=;%FmlOcRr!!-GN)|w9`{JDZe$u@EIS11WAk9 zolLvXF4a@HZS4$$lj%aTA7xY1(L6rCSQBT%O;9u|64X?VXvV5VrETY&#RYDmk@G%r z;orw48n~mJ$dHuMXVQ zq-aY?cqvi2qFmU6@p||$07Wr>*s!f6b{eSwZxZRFY!I#pVa-;XCE)_BQN-Tuy?q5%1A)rFeA=xqoIC*xPDf z!5rc0QleF8Zh9#ZNmP@Eh=^%^_vD97$%GGWn6wKR+Pw`{e3qM5mA$=cgIP_&!cb6d zX~9OuAXyaEnurC%^1-xGlH+Wgpk3D1VpGV|UF;zObquDm zk|?c4VnsZJYSkDJ-Q!ScU=>G4sqdGf)h4#pwQxv@lBYmpFsU|8>pfMTvq@mJK&$0` z3T}gf#@vL&KhXrXnJ>*G9pQ|+LTEC_{01PBDyCIZ@Tl?efx|2ZQ>lO-qyws0Me6tn zL{4156+Mmv1PGI8ve$B5Npj*4Oa$1b18gcGS8{S_KjgmBY`ga}z}w+HyP0d&5`Z&t z!W%OEJrO)4+R58uJ6ZZAeug~ChWq;XpVg#Tx@w)*`7M)JG9pQKo&9HNgL&{W9>@EW&exv05$I7KI z9(@qI(=7->RY1XS*oI@#9VRG>?I(7kTC7=?oz(bpg zCfH(gv!J8#h{CYO5G9R%J@FsMlm{{0wm_JeSeN~%(q#M)(6tBOMiy-VwdsrfptSXC_Iy@E7;5az9LBu%c88S9&}L^{=M#KeNinBCH!WDt z-r0TfV^hggk>nQXCS>&BSg`-lgEZ8EA^-Zrc+r-U0mxh0bUk(jbK6Es$1j09AqE72$E0%}-*q>B3Twt@Z zks-%V@5i_>k|uChV_s)hHVH}btahz#0lR^t86%+(OLvm;#H8A%Tt<8~p1%%BkSc+8 zKrM0MFvW;P#N`I}iY2-vY7HK(V7v|;N}Z8o4YJA@p>G-P}M;{XfHuxnJf?kE8@uWS$D&fpgd~4!QnU-jY<-n zfIUC07?Tv}wk=mDU)+v3LTug{TojYp24su`<4t&Mpeh)YJbCF`7aJysoYC102d!cSP&>PES_ zkXSN{HT3GQ!!LHDq>|YqlmimXh3RO(wesQ>`OzJGZKg6^b|5 zV&U_!U6n;7XTgFqZkQ_{c!%03KjjE22(>|6ub2eO{L7LzK^OryZ!2@V2q4@n& z;|~dVp%@k6ZogG%)9U^kg2yqKVwX8Cst7*foam4Mk~pi>s)+fjKng_KSfW~SkwAm{ zkesz}$SdNPFtJ;D113X+)pwXJ@u&O97jL$cCG~arTHH+3p5;6r?_mSPt3kg(;Zd?- zIP__AHS+4DPnNGnN30reNaw7U)!0vtX$$5IfM#6ipgesP9u-~kB}sz^Fx0C3ZL~YQ zGwdcVS~c*EHIo~)KXs>`yK>JXa%2-xr@Xp6cJyk5qF+ta@-r+?G@*T#5IiFGE&@{VX~XQzUa%nnPT8`&eV12dAwg|3~JFVB@8qXSS- zj|#sb9{-52_Q@~ex&)y@RX1T9ml0KxdBWz)#TRWnh419D#2QW7v>s>zrjSiTHZT?F z?MaX6iT$e>+X3zzuTNGZRj@}e9(ZWn2K(Ssp5k^E8u)y?0D;}#j_KfRpb;qWeh9C| ze2{7yV;L9w7hBgY1{;ss;Kg1n2Czp~p7H==#L5;{6UBI+^=eU1{~N&%fI1{;9e<8Omf z#Q9XFW2Hox47QhUk@aIEl)J&WzvspV2SX2xQxC2|wVJz`M=X~8)HC83n^H05!`VuZ z*QVpiDw+xqfJ)1LngmqynNW!&kBgOrajai!Lk{52QE-E5r&qj3@w^$S} z0NRRgzFcn2$81S76|pLQQR%Bgzh$LL zm=9$y7{~*^kO@}R+S62ZvuVQVucbciwJivg6dRi|MaQaD#qa9AK-{nxH{p z!cBLM7RsJmiN}p1vo`@HVhiPXql%VvJB`E12NjGgokgJyLN=sI=lwxg0M-doSdRfW2gM< zrF0nY#D>n^=g#p}SESl<^7eHLdf?*qFj5GVMK-c*W(Q={UKj$S=TAKMrE($mAc}

    diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 4f2e9c898..22c344dd4 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -548,6 +548,7 @@ struct llama_server_context slot->params.seed = json_value(data, "seed", default_params.seed); slot->sparams.grammar = json_value(data, "grammar", default_sparams.grammar); slot->sparams.n_probs = json_value(data, "n_probs", default_sparams.n_probs); + slot->sparams.min_keep = json_value(data, "min_keep", default_sparams.min_keep); if (slot->n_predict > 0 && slot->params.n_predict > slot->n_predict) { // Might be better to reject the request with a 400 ? @@ -1093,6 +1094,7 @@ struct llama_server_context {"stream", slot.params.stream}, {"logit_bias", slot.sparams.logit_bias}, {"n_probs", slot.sparams.n_probs}, + {"min_keep", slot.sparams.min_keep}, {"grammar", slot.sparams.grammar}, {"samplers", samplers_sequence} }; From 7ad554f90e735cf2a0f612ce44f9aa4fad6ae46a Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 21:39:58 +0200 Subject: [PATCH 783/859] metal : fix unused warnings (#0) --- ggml-metal.metal | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/ggml-metal.metal b/ggml-metal.metal index a00962111..d0a85a192 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -4027,7 +4027,10 @@ void kernel_mul_mv_iq2_xxs_f32_impl( y4 += 32 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4170,7 +4173,10 @@ void kernel_mul_mv_iq2_xs_f32_impl( y4 += 32 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4306,7 +4312,10 @@ void kernel_mul_mv_iq3_xxs_f32_impl( y4 += 32 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4424,7 +4433,10 @@ void kernel_mul_mv_iq1_s_f32_impl( y4 += 16 * 32; } #else - // TODO + (void) x; + (void) y; + (void) yl; + (void) nb32; #endif for (int row = 0; row < N_DST; ++row) { @@ -4659,6 +4671,8 @@ void dequantize_q4_K(device const block_q4_K *xb, short il, thread type4x4 & reg const float dl = d * sc[0]; const float ml = min * sc[1]; #else + (void) get_scale_min_k4_just2; + q = q + 16 * (il&1); device const uint8_t * s = xb->scales; device const half2 * dh = (device const half2 *)xb->d; From b1de96824bdbeb91ea458abcb3e5478690ad0727 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 22:39:30 +0200 Subject: [PATCH 784/859] ci : fix wikitext url + compile warnings (#5569) ggml-ci --- README.md | 2 +- ci/run.sh | 4 ++-- examples/perplexity/perplexity.cpp | 4 ++-- ggml-quants.c | 6 +++--- scripts/get-wikitext-2.sh | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 0c4ee5a27..8c7bc2689 100644 --- a/README.md +++ b/README.md @@ -768,7 +768,7 @@ The time per token is measured on a MacBook M1 Pro 32GB RAM using 4 and 8 thread #### How to run -1. Download/extract: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research +1. Download/extract: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip 2. Run `./perplexity -m models/7B/ggml-model-q4_0.gguf -f wiki.test.raw` 3. Output: ``` diff --git a/ci/run.sh b/ci/run.sh index b94658c96..f3a29c2e9 100755 --- a/ci/run.sh +++ b/ci/run.sh @@ -219,7 +219,7 @@ function gg_run_open_llama_3b_v2 { gg_wget models-mnt/open-llama/3B-v2/ https://huggingface.co/openlm-research/open_llama_3b_v2/resolve/main/pytorch_model.bin gg_wget models-mnt/open-llama/3B-v2/ https://huggingface.co/openlm-research/open_llama_3b_v2/raw/main/generation_config.json - gg_wget models-mnt/wikitext/ https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip + gg_wget models-mnt/wikitext/ https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip unzip -o models-mnt/wikitext/wikitext-2-raw-v1.zip -d models-mnt/wikitext/ head -n 60 models-mnt/wikitext/wikitext-2-raw/wiki.test.raw > models-mnt/wikitext/wikitext-2-raw/wiki.test-60.raw @@ -401,7 +401,7 @@ function gg_run_open_llama_7b_v2 { gg_wget models-mnt/open-llama/7B-v2/ https://huggingface.co/openlm-research/open_llama_7b_v2/resolve/main/pytorch_model-00002-of-00002.bin gg_wget models-mnt/open-llama/7B-v2/ https://huggingface.co/openlm-research/open_llama_7b_v2/raw/main/generation_config.json - gg_wget models-mnt/wikitext/ https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip + gg_wget models-mnt/wikitext/ https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip unzip -o models-mnt/wikitext/wikitext-2-raw-v1.zip -d models-mnt/wikitext/ path_models="../models-mnt/open-llama/7B-v2" diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 74dcc642a..9ec989389 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -309,7 +309,7 @@ static void process_logits(int n_vocab, const float * logits, const int * tokens } static results_perplexity perplexity_v2(llama_context * ctx, const gpt_params & params) { - // Download: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research + // Download: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` // Output: `perplexity: 13.5106 [114/114]` // BOS tokens will be added for each chunk before eval @@ -447,7 +447,7 @@ static results_perplexity perplexity(llama_context * ctx, const gpt_params & par return perplexity_v2(ctx, params); } - // Download: https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip?ref=salesforce-research + // Download: https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip // Run `./perplexity -m models/7B/ggml-model-q4_0.bin -f wiki.test.raw` // Output: `perplexity: 13.5106 [114/114]` // BOS tokens will be added for each chunk before eval diff --git a/ggml-quants.c b/ggml-quants.c index 48f5294e1..43a8f1de4 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -1837,9 +1837,9 @@ static void quantize_row_q2_K_impl(const float * restrict x, block_q2_K * restri float sigma2 = sumx2/QK_K; for (int j = 0; j < QK_K/16; ++j) { const float * restrict qw = quant_weights + QK_K * i + 16*j; - for (int l = 0; l < 16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); - for (int l = 0; l < 16; ++l) sw[j] += weight[l]; - scales[j] = make_qkx3_quants(16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); + for (int l = 0; l < QK_K/16; ++l) weight[l] = qw[l] * sqrtf(sigma2 + x[16*j + l]*x[16*j + l]); + for (int l = 0; l < QK_K/16; ++l) sw[j] += weight[l]; + scales[j] = make_qkx3_quants(QK_K/16, 3, x + 16*j, weight, L + 16*j, &mins[j], Laux, -0.9f, 0.05f, 36, false); } float dm = make_qp_quants(QK_K/16, 15, scales, Ls, sw); diff --git a/scripts/get-wikitext-2.sh b/scripts/get-wikitext-2.sh index ff96f331e..7ca760fa6 100755 --- a/scripts/get-wikitext-2.sh +++ b/scripts/get-wikitext-2.sh @@ -1,6 +1,6 @@ #!/bin/bash -wget https://s3.amazonaws.com/research.metamind.io/wikitext/wikitext-2-raw-v1.zip +wget https://huggingface.co/datasets/ggml-org/ci/resolve/main/wikitext-2-raw-v1.zip echo "Usage:" echo "" From 14278f55d2e2c6a53022075c7f2719b71e1cd61d Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 18 Feb 2024 22:58:57 +0200 Subject: [PATCH 785/859] ggml : restore vec dot stride arg names (#5453) --- ggml-quants.c | 76 +++++++++++++++++++++++++-------------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 43a8f1de4..3319d2ccf 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3855,7 +3855,7 @@ static inline __m128i get_scale_shuffle(int i) { } #endif -void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -3866,8 +3866,8 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q4_0 * restrict x = vx; @@ -4024,15 +4024,15 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r const __m128i tmp = _mm_loadu_si128((const __m128i *)x[i].qs); - __m128i bx = _mm_and_si128(lowMask, tmp); - __m128i by = _mm_loadu_si128((const __m128i *)y[i].qs); - bx = _mm_sub_epi8(bx, off); - const __m128i i32_0 = mul_sum_i8_pairs(bx, by); + __m128i bx_0 = _mm_and_si128(lowMask, tmp); + __m128i by_0 = _mm_loadu_si128((const __m128i *)y[i].qs); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_0 = mul_sum_i8_pairs(bx_0, by_0); - bx = _mm_and_si128(lowMask, _mm_srli_epi64(tmp, 4)); - by = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); - bx = _mm_sub_epi8(bx, off); - const __m128i i32_1 = mul_sum_i8_pairs(bx, by); + bx_0 = _mm_and_si128(lowMask, _mm_srli_epi64(tmp, 4)); + by_0 = _mm_loadu_si128((const __m128i *)(y[i].qs + 16)); + bx_0 = _mm_sub_epi8(bx_0, off); + const __m128i i32_1 = mul_sum_i8_pairs(bx_0, by_0); // Convert int32_t to float __m256 p = _mm256_cvtepi32_ps(MM256_SET_M128I(i32_0, i32_1)); @@ -4222,7 +4222,7 @@ void ggml_vec_dot_q4_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4233,8 +4233,8 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q4_1 * restrict x = vx; @@ -4440,7 +4440,7 @@ void ggml_vec_dot_q4_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -4448,8 +4448,8 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_0); assert(nrc == 1); UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q5_0 * restrict x = vx; @@ -4618,21 +4618,21 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r /* Compute combined scale for the block */ const __m256 d = _mm256_set1_ps(GGML_FP16_TO_FP32(x[i].d) * GGML_FP16_TO_FP32(y[i].d)); - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i bx_0 = bytes_from_nibbles_32(x[i].qs); const __m256i bxhi = bytes_from_bits_32(x[i].qh); __m128i bxhil = _mm256_castsi256_si128(bxhi); __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); bxhil = _mm_andnot_si128(bxhil, mask); bxhih = _mm_andnot_si128(bxhih, mask); - __m128i bxl = _mm256_castsi256_si128(bx); - __m128i bxh = _mm256_extractf128_si256(bx, 1); + __m128i bxl = _mm256_castsi256_si128(bx_0); + __m128i bxh = _mm256_extractf128_si256(bx_0, 1); bxl = _mm_or_si128(bxl, bxhil); bxh = _mm_or_si128(bxh, bxhih); - bx = MM256_SET_M128I(bxh, bxl); + bx_0 = MM256_SET_M128I(bxh, bxl); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i by_0 = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_i8_pairs_float(bx, by); + const __m256 q = mul_sum_i8_pairs_float(bx_0, by_0); /* Multiply q with scale and accumulate */ acc = _mm256_add_ps(_mm256_mul_ps(d, q), acc); @@ -4731,7 +4731,7 @@ void ggml_vec_dot_q5_0_q8_0(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_1; const int nb = n / qk; @@ -4739,8 +4739,8 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r assert(qk == QK5_1); assert(nrc == 1); UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q5_1 * restrict x = vx; @@ -4925,22 +4925,22 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r summs += GGML_FP16_TO_FP32(x[i].m) * y[i].s; - __m256i bx = bytes_from_nibbles_32(x[i].qs); + __m256i bx_0 = bytes_from_nibbles_32(x[i].qs); const __m256i bxhi = bytes_from_bits_32(x[i].qh); __m128i bxhil = _mm256_castsi256_si128(bxhi); __m128i bxhih = _mm256_extractf128_si256(bxhi, 1); bxhil = _mm_and_si128(bxhil, mask); bxhih = _mm_and_si128(bxhih, mask); - __m128i bxl = _mm256_castsi256_si128(bx); - __m128i bxh = _mm256_extractf128_si256(bx, 1); + __m128i bxl = _mm256_castsi256_si128(bx_0); + __m128i bxh = _mm256_extractf128_si256(bx_0, 1); bxl = _mm_or_si128(bxl, bxhil); bxh = _mm_or_si128(bxh, bxhih); - bx = MM256_SET_M128I(bxh, bxl); + bx_0 = MM256_SET_M128I(bxh, bxl); const __m256 dy = _mm256_set1_ps(y[i].d); - const __m256i by = _mm256_loadu_si256((const __m256i *)y[i].qs); + const __m256i by_0 = _mm256_loadu_si256((const __m256i *)y[i].qs); - const __m256 q = mul_sum_us8_pairs_float(bx, by); + const __m256 q = mul_sum_us8_pairs_float(bx_0, by_0); acc = _mm256_add_ps(_mm256_mul_ps(q, _mm256_mul_ps(dx, dy)), acc); } @@ -5035,7 +5035,7 @@ void ggml_vec_dot_q5_1_q8_1(int n, float * restrict s, size_t bs, const void * r #endif } -void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bbx, const void * restrict vy, size_t bby, int nrc) { +void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { const int qk = QK8_0; const int nb = n / qk; @@ -5046,8 +5046,8 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r assert(nrc == 1); #endif UNUSED(nrc); - UNUSED(bbx); - UNUSED(bby); + UNUSED(bx); + UNUSED(by); UNUSED(bs); const block_q8_0 * restrict x = vx; @@ -5169,10 +5169,10 @@ void ggml_vec_dot_q8_0_q8_0(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; i++) { // load elements - vint8m1_t bx = __riscv_vle8_v_i8m1(x[i].qs, vl); - vint8m1_t by = __riscv_vle8_v_i8m1(y[i].qs, vl); + vint8m1_t bx_0 = __riscv_vle8_v_i8m1(x[i].qs, vl); + vint8m1_t by_0 = __riscv_vle8_v_i8m1(y[i].qs, vl); - vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx, by, vl); + vint16m2_t vw_mul = __riscv_vwmul_vv_i16m2(bx_0, by_0, vl); vint32m1_t v_zero = __riscv_vmv_v_x_i32m1(0, vl); vint32m1_t v_sum = __riscv_vwredsum_vs_i16m2_i32m1(vw_mul, v_zero, vl); From a0c2dad9d43456c677e205c6240a5f8afb0121ac Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Sun, 18 Feb 2024 16:21:52 -0500 Subject: [PATCH 786/859] build : pass all warning flags to nvcc via -Xcompiler (#5570) * build : pass all warning flags to nvcc via -Xcompiler * make : fix apparent mis-merge from #3952 * make : fix incorrect GF_CC_VER for CUDA host compiler --- CMakeLists.txt | 11 ++++++----- Makefile | 28 ++-------------------------- scripts/get-flags.mk | 2 +- 3 files changed, 9 insertions(+), 32 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 0c29b5d09..f5e7f2980 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -778,10 +778,7 @@ endif() set(CUDA_CXX_FLAGS "") if (LLAMA_CUBLAS) - set(CUDA_FLAGS ${CXX_FLAGS} -use_fast_math) - if (NOT MSVC) - list(APPEND CUDA_FLAGS -Wno-pedantic) - endif() + set(CUDA_FLAGS -use_fast_math) if (LLAMA_ALL_WARNINGS AND NOT MSVC) set(NVCC_CMD ${CMAKE_CUDA_COMPILER} .c) @@ -814,7 +811,11 @@ if (LLAMA_CUBLAS) message("-- CUDA host compiler is ${CUDA_CCID} ${CUDA_CCVER}") get_flags(${CUDA_CCID} ${CUDA_CCVER}) - list(APPEND CUDA_CXX_FLAGS ${GF_CXX_FLAGS}) # This is passed to -Xcompiler later + list(APPEND CUDA_CXX_FLAGS ${CXX_FLAGS} ${GF_CXX_FLAGS}) # This is passed to -Xcompiler later + endif() + + if (NOT MSVC) + list(APPEND CUDA_CXX_FLAGS -Wno-pedantic) endif() endif() diff --git a/Makefile b/Makefile index 901798606..f5f6d32a7 100644 --- a/Makefile +++ b/Makefile @@ -220,30 +220,6 @@ ifeq ($(LLAMA_FATAL_WARNINGS),1) MK_CXXFLAGS += -Werror endif -ifeq ($(CC_IS_CLANG), 1) - # clang options - MK_CFLAGS += -Wunreachable-code-break -Wunreachable-code-return - MK_HOST_CXXFLAGS += -Wunreachable-code-break -Wunreachable-code-return -Wmissing-prototypes -Wextra-semi - - ifneq '' '$(and $(CC_IS_LLVM_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 030800)))' - MK_CFLAGS += -Wdouble-promotion - endif - ifneq '' '$(and $(CC_IS_APPLE_CLANG),$(filter 1,$(shell expr $(CC_VER) \>= 070300)))' - MK_CFLAGS += -Wdouble-promotion - endif -else - # gcc options - MK_CFLAGS += -Wdouble-promotion - MK_HOST_CXXFLAGS += -Wno-array-bounds - - ifeq ($(shell expr $(CC_VER) \>= 070100), 1) - MK_HOST_CXXFLAGS += -Wno-format-truncation - endif - ifeq ($(shell expr $(CC_VER) \>= 080100), 1) - MK_HOST_CXXFLAGS += -Wextra-semi - endif -endif - # this version of Apple ld64 is buggy ifneq '' '$(findstring dyld-1015.7,$(shell $(CC) $(LDFLAGS) -Wl,-v 2>&1))' MK_CPPFLAGS += -DHAVE_BUGGY_APPLE_LINKER @@ -468,7 +444,7 @@ ggml-cuda.o: ggml-cuda.cu ggml-cuda.h ifdef JETSON_EOL_MODULE_DETECT $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ else - $(NVCC) $(BASE_CXXFLAGS) $(NVCCFLAGS) -Wno-pedantic -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ + $(NVCC) $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ endif # JETSON_EOL_MODULE_DETECT endif # LLAMA_CUBLAS @@ -579,7 +555,7 @@ override LDFLAGS := $(MK_LDFLAGS) $(LDFLAGS) ifdef LLAMA_CUBLAS GF_CC := $(NVCC) $(NVCCFLAGS) 2>/dev/null .c -Xcompiler include scripts/get-flags.mk -CUDA_CXXFLAGS := $(GF_CXXFLAGS) +CUDA_CXXFLAGS := $(BASE_CXXFLAGS) $(GF_CXXFLAGS) -Wno-pedantic endif # diff --git a/scripts/get-flags.mk b/scripts/get-flags.mk index 596d7ead1..a742766d1 100644 --- a/scripts/get-flags.mk +++ b/scripts/get-flags.mk @@ -1,6 +1,6 @@ ifeq '' '$(findstring clang,$(shell $(GF_CC) --version))' GF_CC_IS_GCC = 1 - GF_CC_VER := $(shell { $(GF_CC) -dumpfullversion 2>/dev/null || $(GF_CC) -dumpversion; } | awk -F. '{ printf("%02d%02d%02d", $$1, $$2, $$3) }') + GF_CC_VER := $(shell { $(GF_CC) -dumpfullversion 2>/dev/null; echo; $(GF_CC) -dumpversion; } | awk -F. '/./ { printf("%02d%02d%02d", $$1, $$2, $$3); exit }') else GF_CC_IS_CLANG = 1 ifeq '' '$(findstring Apple,$(shell $(GF_CC) --version))' From f0d1fafc029a056cd765bdae58dcaa12312e9879 Mon Sep 17 00:00:00 2001 From: bmwl Date: Sun, 18 Feb 2024 23:38:32 -0800 Subject: [PATCH 787/859] ggml : android and old glibc NUMA incompatibility bugfixes (#5557) * #ifdef out some code NUMA blocks for Android due to lack of support * added in some __ANDROID__ if def gates around numa code and forced GLIBC prior to 2.29 to use a syscall for getcpu instead of the wrapper * Changed gates on numa platform specific stuff to __gnu_linux__ to skip any platforms without glibc * harmonizing #if defined blocks for numa code to __gnu_linux__ since that's the only model that's being followed anyways --------- Co-authored-by: root --- ggml.c | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/ggml.c b/ggml.c index 8224652a9..4ee2c5e11 100644 --- a/ggml.c +++ b/ggml.c @@ -23,6 +23,9 @@ #include #include #include +#if defined(__gnu_linux__) +#include +#endif #ifdef GGML_USE_METAL #include @@ -1971,7 +1974,7 @@ struct ggml_numa_nodes { uint32_t n_nodes; uint32_t total_cpus; // hardware threads on system uint32_t current_node; // node on which main process is execting -#ifdef __linux__ +#if defined(__gnu_linux__) cpu_set_t cpuset; // cpuset from numactl #else uint32_t cpuset; // no NUMA support outside of Linux at this time. Use a portable datatype @@ -2009,7 +2012,7 @@ inline static void ggml_critical_section_end(void) { atomic_fetch_sub(&g_state_barrier, 1); } -#ifdef __linux__ +#if defined(__gnu_linux__) static cpu_set_t ggml_get_numa_affinity(void) { cpu_set_t cpuset; pthread_t thread; @@ -2031,7 +2034,7 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { return; } -#ifdef __linux__ +#if defined(__gnu_linux__) struct stat st; char path[256]; int rv; @@ -2063,7 +2066,13 @@ void ggml_numa_init(enum ggml_numa_strategy numa_flag) { // figure out which node we're on uint current_cpu; - int getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); + int getcpu_ret = 0; +#if __GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ > 28) + getcpu_ret = getcpu(¤t_cpu, &g_state.numa.current_node); +#else + // old glibc doesn't have a wrapper for this call. Fall back on direct syscall + getcpu_ret = syscall(SYS_getcpu,¤t_cpu,&g_state.numa.current_node); +#endif if (g_state.numa.n_nodes < 1 || g_state.numa.total_cpus < 1 || getcpu_ret != 0) { g_state.numa.n_nodes = 0; @@ -16734,7 +16743,7 @@ typedef pthread_t ggml_thread_t; #endif // Android's libc implementation "bionic" does not support setting affinity -#if defined(__linux__) && !defined(__BIONIC__) +#if defined(__gnu_linux__) static void set_numa_thread_affinity(int thread_n) { if (!ggml_is_numa()) { return; From 769a716e30ba1da46f709df1c00727d6869d30e7 Mon Sep 17 00:00:00 2001 From: Mirko185 Date: Mon, 19 Feb 2024 08:39:31 +0100 Subject: [PATCH 788/859] readme : update (#5572) Added 1.5-bit on README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8c7bc2689..70866e249 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ variety of hardware - locally and in the cloud. - Plain C/C++ implementation without any dependencies - Apple silicon is a first-class citizen - optimized via ARM NEON, Accelerate and Metal frameworks - AVX, AVX2 and AVX512 support for x86 architectures -- 2-bit, 3-bit, 4-bit, 5-bit, 6-bit, and 8-bit integer quantization for faster inference and reduced memory use +- 1.5-bit, 2-bit, 3-bit, 4-bit, 5-bit, 6-bit, and 8-bit integer quantization for faster inference and reduced memory use - Custom CUDA kernels for running LLMs on NVIDIA GPUs (support for AMD GPUs via HIP) - Vulkan, SYCL, and (partial) OpenCL backend support - CPU+GPU hybrid inference to partially accelerate models larger than the total VRAM capacity From 3a9cb4ca6408c29423373dd6cd7aa78a58286c00 Mon Sep 17 00:00:00 2001 From: slaren Date: Mon, 19 Feb 2024 09:04:45 +0100 Subject: [PATCH 789/859] cuda, metal : fix nans in soft_max (#5574) * cuda : fix nans in soft_max * metal : fix nans in soft_max --------- Co-authored-by: Georgi Gerganov --- ggml-cuda.cu | 8 ++++---- ggml-metal.metal | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 933ebbc4e..eef213509 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6205,7 +6205,7 @@ static __global__ void soft_max_f32(const float * x, const float * mask, const f const int ix = rowx*ncols + col; const int iy = rowy*ncols + col; - const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + slope*pos[col]; + const float val = x[ix]*scale + (mask ? mask[iy] : 0.0f) + (pos ? slope*pos[col] : 0.0f); vals[col] = val; max_val = max(max_val, val); @@ -9170,17 +9170,17 @@ static void ggml_cuda_op_soft_max( memcpy(&max_bias, (float *) dst->op_params + 1, sizeof(float)); // positions tensor - float * src2_dd = dst_dd; // default to avoid null checks in the kernel + float * src2_dd = nullptr; cuda_pool_alloc src2_f; ggml_tensor * src2 = dst->src[2]; const bool use_src2 = src2 != nullptr; if (use_src2) { - const bool src2_on_device = use_src2 && src2->backend == GGML_BACKEND_GPU; - ggml_tensor_extra_gpu * src2_extra = use_src2 ? (ggml_tensor_extra_gpu *) src2->extra : nullptr; + const bool src2_on_device = src2->backend == GGML_BACKEND_GPU; if (src2_on_device) { + ggml_tensor_extra_gpu * src2_extra = (ggml_tensor_extra_gpu *) src2->extra; src2_dd = (float *) src2_extra->data_device[g_main_device]; } else { src2_dd = src2_f.alloc(ggml_nelements(src2)); diff --git a/ggml-metal.metal b/ggml-metal.metal index d0a85a192..f0d77d446 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -392,7 +392,7 @@ kernel void kernel_soft_max( float lmax = -INFINITY; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); + lmax = MAX(lmax, psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)); } // find the max value in the block @@ -417,7 +417,7 @@ kernel void kernel_soft_max( // parallel sum float lsum = 0.0f; for (int i00 = tpitg; i00 < ne00; i00 += ntg) { - const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); + const float exp_psrc0 = exp((psrc0[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)) - max_val); lsum += exp_psrc0; pdst[i00] = exp_psrc0; } @@ -495,7 +495,7 @@ kernel void kernel_soft_max_4( float4 lmax4 = -INFINITY; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]); + lmax4 = fmax(lmax4, psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)); } const float lmax = MAX(MAX(lmax4[0], lmax4[1]), MAX(lmax4[2], lmax4[3])); @@ -521,7 +521,7 @@ kernel void kernel_soft_max_4( // parallel sum float4 lsum4 = 0.0f; for (int i00 = tpitg; i00 < ne00/4; i00 += ntg) { - const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + slope*ppos[i00]) - max_val); + const float4 exp_psrc4 = exp((psrc4[i00]*scale + (pmask ? pmask[i00] : 0.0f) + (ppos ? slope*ppos[i00] : 0.0f)) - max_val); lsum4 += exp_psrc4; pdst4[i00] = exp_psrc4; } From 11b12de39bd787c0494da0cd405958fdfedc29c4 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Mon, 19 Feb 2024 09:23:37 +0100 Subject: [PATCH 790/859] llama : add llama_chat_apply_template() (#5538) * llama: add llama_chat_apply_template * test-chat-template: remove dedundant vector * chat_template: do not use std::string for buffer * add clarification for llama_chat_apply_template * llama_chat_apply_template: add zephyr template * llama_chat_apply_template: correct docs * llama_chat_apply_template: use term "chat" everywhere * llama_chat_apply_template: change variable name to "tmpl" --- Makefile | 4 ++ llama.cpp | 117 +++++++++++++++++++++++++++++++++++ llama.h | 25 ++++++++ tests/CMakeLists.txt | 1 + tests/test-chat-template.cpp | 64 +++++++++++++++++++ 5 files changed, 211 insertions(+) create mode 100644 tests/test-chat-template.cpp diff --git a/Makefile b/Makefile index f5f6d32a7..59352eb53 100644 --- a/Makefile +++ b/Makefile @@ -867,3 +867,7 @@ tests/test-model-load-cancel: tests/test-model-load-cancel.cpp ggml.o llama.o te tests/test-autorelease: tests/test-autorelease.cpp ggml.o llama.o tests/get-model.cpp $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) + +tests/test-chat-template: tests/test-chat-template.cpp ggml.o llama.o $(COMMON_DEPS) $(OBJS) + $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) + $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) diff --git a/llama.cpp b/llama.cpp index 5cfebb3b1..143870645 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12508,6 +12508,123 @@ int32_t llama_token_to_piece(const struct llama_model * model, llama_token token return 0; } +// trim whitespace from the beginning and end of a string +static std::string trim(const std::string & str) { + size_t start = 0; + size_t end = str.size(); + while (start < end && isspace(str[start])) { + start += 1; + } + while (end > start && isspace(str[end - 1])) { + end -= 1; + } + return str.substr(start, end - start); +} + +// Simple version of "llama_apply_chat_template" that only works with strings +// This function uses heuristic checks to determine commonly used template. It is not a jinja parser. +static int32_t llama_chat_apply_template_internal( + const std::string & tmpl, + const std::vector & chat, + std::string & dest, bool add_ass) { + // Taken from the research: https://github.com/ggerganov/llama.cpp/issues/5527 + std::stringstream ss; + if (tmpl.find("<|im_start|>") != std::string::npos) { + // chatml template + for (auto message : chat) { + ss << "<|im_start|>" << message->role << "\n" << message->content << "<|im_end|>\n"; + } + if (add_ass) { + ss << "<|im_start|>assistant\n"; + } + } else if (tmpl.find("[INST]") != std::string::npos) { + // llama2 template and its variants + // [variant] support system message + bool support_system_message = tmpl.find("<>") != std::string::npos; + // [variant] space before + after response + bool space_around_response = tmpl.find("' ' + eos_token") != std::string::npos; + // [variant] add BOS inside history + bool add_bos_inside_history = tmpl.find("bos_token + '[INST]") != std::string::npos; + // [variant] trim spaces from the input message + bool strip_message = tmpl.find("content.strip()") != std::string::npos; + // construct the prompt + bool is_inside_turn = true; // skip BOS at the beginning + ss << "[INST] "; + for (auto message : chat) { + std::string content = strip_message ? trim(message->content) : message->content; + std::string role(message->role); + if (!is_inside_turn) { + is_inside_turn = true; + ss << (add_bos_inside_history ? "[INST] " : "[INST] "); + } + if (role == "system") { + if (support_system_message) { + ss << "<>\n" << content << "\n<>\n\n"; + } else { + // if the model does not support system message, we still include it in the first message, but without <> + ss << content << "\n"; + } + } else if (role == "user") { + ss << content << " [/INST]"; + } else { + ss << (space_around_response ? " " : "") << content << (space_around_response ? " " : "") << ""; + is_inside_turn = false; + } + } + // llama2 templates seem to not care about "add_generation_prompt" + } else if (tmpl.find("<|user|>") != std::string::npos) { + // zephyr template + for (auto message : chat) { + ss << "<|" << message->role << "|>" << "\n" << message->content << "<|endoftext|>\n"; + } + if (add_ass) { + ss << "<|assistant|>\n"; + } + } else { + // template not supported + return -1; + } + dest = ss.str(); + return dest.size(); +} + +LLAMA_API int32_t llama_chat_apply_template( + const struct llama_model * model, + const char * tmpl, + const struct llama_chat_message * chat, + size_t n_msg, + bool add_ass, + char * buf, + int32_t length) { + std::string curr_tmpl(tmpl == nullptr ? "" : tmpl); + if (tmpl == nullptr) { + GGML_ASSERT(model != nullptr); + // load template from model + std::vector model_template(2048, 0); // longest known template is about 1200 bytes + std::string template_key = "tokenizer.chat_template"; + int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), curr_tmpl.size()); + if (res < 0) { + // worst case: there is no information about template, we will use chatml by default + curr_tmpl = "<|im_start|>"; // see llama_chat_apply_template_internal + } else { + curr_tmpl = std::string(model_template.data(), model_template.size()); + } + } + // format the chat to string + std::vector chat_vec; + chat_vec.resize(n_msg); + for (size_t i = 0; i < n_msg; i++) { + chat_vec[i] = &chat[i]; + } + std::string formatted_chat; + int32_t res = llama_chat_apply_template_internal(curr_tmpl, chat_vec, formatted_chat, add_ass); + if (res < 0) { + return res; + } + strncpy(buf, formatted_chat.c_str(), length); + return res; +} + struct llama_timings llama_get_timings(struct llama_context * ctx) { struct llama_timings result = { /*.t_start_ms =*/ 1e-3 * ctx->t_start_us, diff --git a/llama.h b/llama.h index 5a97abcc9..77a84c18a 100644 --- a/llama.h +++ b/llama.h @@ -305,6 +305,12 @@ extern "C" { int32_t n_eval; }; + // used in chat template + typedef struct llama_chat_message { + const char * role; + const char * content; + } llama_chat_message; + // Helpers for getting default parameters LLAMA_API struct llama_model_params llama_model_default_params(void); LLAMA_API struct llama_context_params llama_context_default_params(void); @@ -699,6 +705,25 @@ extern "C" { char * buf, int32_t length); + /// Apply chat template. Inspired by hf apply_chat_template() on python. + /// Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model" + /// NOTE: This function only support some known jinja templates. It is not a jinja parser. + /// @param tmpl A Jinja template to use for this chat. If this is nullptr, the model’s default chat template will be used instead. + /// @param chat Pointer to a list of multiple llama_chat_message + /// @param n_msg Number of llama_chat_message in this chat + /// @param add_ass Whether to end the prompt with the token(s) that indicate the start of an assistant message. + /// @param buf A buffer to hold the output formatted prompt. The recommended alloc size is 2 * (total number of characters of all messages) + /// @param length The size of the allocated buffer + /// @return The total number of bytes of the formatted prompt. If is it larger than the size of buffer, you may need to re-alloc it and then re-apply the template. + LLAMA_API int32_t llama_chat_apply_template( + const struct llama_model * model, + const char * tmpl, + const struct llama_chat_message * chat, + size_t n_msg, + bool add_ass, + char * buf, + int32_t length); + // // Grammar // diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 3e40a78cd..10326d531 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -28,6 +28,7 @@ endfunction() llama_build_and_test_executable(test-quantize-fns.cpp) llama_build_and_test_executable(test-quantize-perf.cpp) llama_build_and_test_executable(test-sampling.cpp) +llama_build_and_test_executable(test-chat-template.cpp) llama_build_executable(test-tokenizer-0-llama.cpp) llama_test_executable (test-tokenizer-0-llama test-tokenizer-0-llama.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama.gguf) diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp new file mode 100644 index 000000000..9830650d4 --- /dev/null +++ b/tests/test-chat-template.cpp @@ -0,0 +1,64 @@ +#include +#include +#include +#include + +#undef NDEBUG +#include + +#include "llama.h" + +int main(void) { + llama_chat_message conversation[] = { + {"system", "You are a helpful assistant"}, + {"user", "Hello"}, + {"assistant", "Hi there"}, + {"user", "Who are you"}, + {"assistant", " I am an assistant "}, + {"user", "Another question"}, + }; + size_t message_count = 6; + std::vector templates = { + // teknium/OpenHermes-2.5-Mistral-7B + "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\\n' + message['content'] + '<|im_end|>' + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\\n' }}{% endif %}", + // mistralai/Mistral-7B-Instruct-v0.2 + "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", + // TheBloke/FusionNet_34Bx2_MoE-AWQ + "{%- for idx in range(0, messages|length) -%}\\n{%- if messages[idx]['role'] == 'user' -%}\\n{%- if idx > 1 -%}\\n{{- bos_token + '[INST] ' + messages[idx]['content'] + ' [/INST]' -}}\\n{%- else -%}\\n{{- messages[idx]['content'] + ' [/INST]' -}}\\n{%- endif -%}\\n{% elif messages[idx]['role'] == 'system' %}\\n{{- '[INST] <>\\\\n' + messages[idx]['content'] + '\\\\n<>\\\\n\\\\n' -}}\\n{%- elif messages[idx]['role'] == 'assistant' -%}\\n{{- ' ' + messages[idx]['content'] + ' ' + eos_token -}}\\n{% endif %}\\n{% endfor %}", + // bofenghuang/vigogne-2-70b-chat + "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif true == true and not '<>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'Vous êtes Vigogne, un assistant IA créé par Zaion Lab. Vous suivez extrêmement bien les instructions. Aidez autant que vous le pouvez.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<>\\\\n' + system_message + '\\\\n<>\\\\n\\\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<>\\\\n' + content.strip() + '\\\\n<>\\\\n\\\\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}", + }; + std::vector expected_substr = { + "<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant", + "[/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + "[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + "[/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + }; + std::vector formatted_chat(1024); + int32_t res; + + // test invalid chat template + res = llama_chat_apply_template(nullptr, "INVALID TEMPLATE", conversation, message_count, true, formatted_chat.data(), formatted_chat.size()); + assert(res < 0); + + for (size_t i = 0; i < templates.size(); i++) { + std::string custom_template = templates[i]; + std::string substr = expected_substr[i]; + formatted_chat.resize(1024); + res = llama_chat_apply_template( + nullptr, + custom_template.c_str(), + conversation, + message_count, + true, + formatted_chat.data(), + formatted_chat.size() + ); + formatted_chat.resize(res); + std::string output(formatted_chat.data(), formatted_chat.size()); + std::cout << output << "\n-------------------------\n"; + // expect the "formatted_chat" to contain pre-defined strings + assert(output.find(substr) != std::string::npos); + } + return 0; +} From 4480542b2271ba1438f0daff8e5f3a74b1dc8609 Mon Sep 17 00:00:00 2001 From: NawafAlansari <72708095+NawafAlansari@users.noreply.github.com> Date: Mon, 19 Feb 2024 03:25:38 -0500 Subject: [PATCH 791/859] baby-llama : allocate graphs in ggml_context (#5573) * Fixed the baby-llama issue (see issue #4830) * minor : fix whitespaces --------- Co-authored-by: Georgi Gerganov --- examples/baby-llama/baby-llama.cpp | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index e7d2ad592..65bb238a0 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -1533,16 +1533,17 @@ int main(int argc, char ** argv) { int n_past = 0; - ggml_cgraph gf = {}; + struct ggml_cgraph * gf = NULL; + gf = ggml_new_graph_custom(ctx0, LLAMA_TRAIN_MAX_NODES, true); get_example_targets_batch(ctx0, 64*ex+0, tokens_input, targets); - struct ggml_tensor * logits = forward_batch(&model, &kv_self, ctx0, &gf, tokens_input, n_tokens, n_past, n_batch); + struct ggml_tensor * logits = forward_batch(&model, &kv_self, ctx0, gf, tokens_input, n_tokens, n_past, n_batch); // struct ggml_tensor * e = cross_entropy_loss(ctx0, targets, logits); struct ggml_tensor * e = square_error_loss(ctx0, targets, logits); - ggml_build_forward_expand(&gf, e); - ggml_graph_compute_helper(work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, e); + ggml_graph_compute_helper(work_buffer, gf, /*n_threads*/ 1); float error_before_opt = ggml_get_f32_1d(e, 0); @@ -1552,8 +1553,8 @@ int main(int argc, char ** argv) { opt_params_lbfgs.lbfgs.n_iter = 16; ggml_opt(ctx0, opt_params_lbfgs, e); // - ggml_build_forward_expand(&gf, e); - ggml_graph_compute_helper(work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, e); + ggml_graph_compute_helper(work_buffer, gf, /*n_threads*/ 1); float error_after_opt = ggml_get_f32_1d(e, 0); @@ -1600,13 +1601,14 @@ int main(int argc, char ** argv) { }; struct ggml_context * ctx0 = ggml_init(params); - ggml_cgraph gf = {}; + struct ggml_cgraph * gf = NULL; + gf = ggml_new_graph_custom(ctx0, LLAMA_TRAIN_MAX_NODES, true); int n_past = 0; - struct ggml_tensor * logits = forward(&model, &kv_self, ctx0, &gf, tokens_input, sample_ctx, n_past); + struct ggml_tensor * logits = forward(&model, &kv_self, ctx0, gf, tokens_input, sample_ctx, n_past); - ggml_build_forward_expand(&gf, logits); - ggml_graph_compute_helper(work_buffer, &gf, /*n_threads*/ 1); + ggml_build_forward_expand(gf, logits); + ggml_graph_compute_helper(work_buffer, gf, /*n_threads*/ 1); struct ggml_tensor * best_samples = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, sample_ctx); struct ggml_tensor * probs = ggml_new_tensor_2d(ctx0, GGML_TYPE_F32, n_vocab, sample_ctx); From 70847553963c85e86051d06df848236829f5f951 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Mon, 19 Feb 2024 09:31:59 +0100 Subject: [PATCH 792/859] llava : avoid changing the original BakLLaVA model (#5577) This is a follup of Commit fc0c8d286a533363a9a663510b62af85ffad58b3 ("llava : update surgery script to not remove tensors") but this time the change is to the BakLLaVA specific part of the surgery script. I've been able to test this using SkunkworksAI/BakLLaVA-1 and it works as expected using the instructions in README.md. Signed-off-by: Daniel Bevenius --- examples/llava/llava-surgery.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/llava/llava-surgery.py b/examples/llava/llava-surgery.py index 8b7a62fba..4f2da3bee 100644 --- a/examples/llava/llava-surgery.py +++ b/examples/llava/llava-surgery.py @@ -25,9 +25,6 @@ if len(clip_tensors) > 0: clip = {name.replace("vision_tower.vision_tower.", ""): checkpoint[name].float() for name in clip_tensors} torch.save(clip, f"{args.model}/llava.clip") - # remove these tensors - for name in clip_tensors: - del checkpoint[name] # added tokens should be removed to be able to convert Mistral models if os.path.exists(f"{args.model}/added_tokens.json"): @@ -35,7 +32,6 @@ if len(clip_tensors) > 0: f.write("{}\n") - torch.save(checkpoint, path) print("Done!") print(f"Now you can convert {args.model} to a regular LLaMA GGUF file.") From f53119cec4f073b6d214195ecbe1fad3abdf2b34 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 10:34:10 +0200 Subject: [PATCH 793/859] minor : fix trailing whitespace (#5538) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 143870645..5de07dfa9 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12525,7 +12525,7 @@ static std::string trim(const std::string & str) { // This function uses heuristic checks to determine commonly used template. It is not a jinja parser. static int32_t llama_chat_apply_template_internal( const std::string & tmpl, - const std::vector & chat, + const std::vector & chat, std::string & dest, bool add_ass) { // Taken from the research: https://github.com/ggerganov/llama.cpp/issues/5527 std::stringstream ss; From 13e2c771aa4212cd5405cf310203848d50f7f859 Mon Sep 17 00:00:00 2001 From: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:45:18 +0530 Subject: [PATCH 794/859] cmake : remove obsolete sycl compile flags (#5581) * rm unwanted sycl compile options * fix bug * fix bug * format fix --- CMakeLists.txt | 7 ------- 1 file changed, 7 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index f5e7f2980..40a098d01 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -741,13 +741,6 @@ function(get_flags CCID CCVER) if (CCVER VERSION_GREATER_EQUAL 8.1.0) list(APPEND CXX_FLAGS -Wextra-semi) endif() - elseif (CCID MATCHES "Intel") - if (NOT LLAMA_SYCL) - # enable max optimization level when using Intel compiler - set(C_FLAGS -ipo -O3 -static -fp-model=fast -flto -fno-stack-protector) - set(CXX_FLAGS -ipo -O3 -static -fp-model=fast -flto -fno-stack-protector) - add_link_options(-fuse-ld=lld -static-intel) - endif() endif() set(GF_C_FLAGS ${C_FLAGS} PARENT_SCOPE) From 70d45af0efce9ed360e1858b827989d971dd9caf Mon Sep 17 00:00:00 2001 From: valiray <133289098+valiray@users.noreply.github.com> Date: Mon, 19 Feb 2024 02:37:10 -0800 Subject: [PATCH 795/859] readme : fix typo in README-sycl.md (#5353) --- README-sycl.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README-sycl.md b/README-sycl.md index e3a8e726e..dd5bf9dea 100644 --- a/README-sycl.md +++ b/README-sycl.md @@ -272,7 +272,7 @@ Please install [Visual Studio](https://visualstudio.microsoft.com/) which impact a. Please follow the procedure in [Get the Intel® oneAPI Base Toolkit ](https://www.intel.com/content/www/us/en/developer/tools/oneapi/base-toolkit.html). -Recommend to install to default folder: **/opt/intel/oneapi**. +Recommend to install to default folder: **C:\Program Files (x86)\Intel\oneAPI**. Following guide uses the default folder as example. If you use other folder, please modify the following guide info with your folder. From 68a6b98b3c8af7e5baade3ee45fe1d2c7b9323a9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 13:41:51 +0200 Subject: [PATCH 796/859] make : fix CUDA build (#5580) --- Makefile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 59352eb53..29fd2ca9c 100644 --- a/Makefile +++ b/Makefile @@ -97,9 +97,10 @@ endif # # keep standard at C11 and C++11 -MK_CPPFLAGS = -I. -Icommon -MK_CFLAGS = -std=c11 -fPIC -MK_CXXFLAGS = -std=c++11 -fPIC +MK_CPPFLAGS = -I. -Icommon +MK_CFLAGS = -std=c11 -fPIC +MK_CXXFLAGS = -std=c++11 -fPIC +MK_NVCCFLAGS = -std=c++11 # -Ofast tends to produce faster code, but may not be available for some compilers. ifdef LLAMA_FAST From d0e3ce51f45bd6a646da1952d7e5d143a087db3e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 14:45:41 +0200 Subject: [PATCH 797/859] ci : enable -Werror for CUDA builds (#5579) * cmake : pass -Werror through -Xcompiler ggml-ci * make, cmake : enable CUDA errors on warnings ggml-ci --- CMakeLists.txt | 33 +++++++++++++++++++-------------- Makefile | 5 ++++- ggml-cuda.cu | 50 ++++++++++++++++++++++++++------------------------ 3 files changed, 49 insertions(+), 39 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 40a098d01..168b133f4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -145,14 +145,6 @@ set(THREADS_PREFER_PTHREAD_FLAG ON) find_package(Threads REQUIRED) include(CheckCXXCompilerFlag) -if (LLAMA_FATAL_WARNINGS) - if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") - add_compile_options(-Werror) - elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") - add_compile_options(/WX) - endif() -endif() - # enable libstdc++ assertions for debug builds if (CMAKE_SYSTEM_NAME MATCHES "Linux") add_compile_definitions($<$:_GLIBCXX_ASSERTIONS>) @@ -747,15 +739,24 @@ function(get_flags CCID CCVER) set(GF_CXX_FLAGS ${CXX_FLAGS} PARENT_SCOPE) endfunction() +if (LLAMA_FATAL_WARNINGS) + if (CMAKE_CXX_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + list(APPEND C_FLAGS -Werror) + list(APPEND CXX_FLAGS -Werror) + elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") + add_compile_options(/WX) + endif() +endif() + if (LLAMA_ALL_WARNINGS) if (NOT MSVC) - set(WARNING_FLAGS -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function) - set(C_FLAGS -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes - -Werror=implicit-int -Werror=implicit-function-declaration) - set(CXX_FLAGS -Wmissing-declarations -Wmissing-noreturn) + list(APPEND WARNING_FLAGS -Wall -Wextra -Wpedantic -Wcast-qual -Wno-unused-function) + list(APPEND C_FLAGS -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmissing-prototypes + -Werror=implicit-int -Werror=implicit-function-declaration) + list(APPEND CXX_FLAGS -Wmissing-declarations -Wmissing-noreturn) - set(C_FLAGS ${WARNING_FLAGS} ${C_FLAGS}) - set(CXX_FLAGS ${WARNING_FLAGS} ${CXX_FLAGS}) + list(APPEND C_FLAGS ${WARNING_FLAGS}) + list(APPEND CXX_FLAGS ${WARNING_FLAGS}) get_flags(${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION}) @@ -773,6 +774,10 @@ set(CUDA_CXX_FLAGS "") if (LLAMA_CUBLAS) set(CUDA_FLAGS -use_fast_math) + if (LLAMA_FATAL_WARNINGS) + list(APPEND CUDA_FLAGS -Werror all-warnings) + endif() + if (LLAMA_ALL_WARNINGS AND NOT MSVC) set(NVCC_CMD ${CMAKE_CUDA_COMPILER} .c) if (NOT CMAKE_CUDA_HOST_COMPILER STREQUAL "") diff --git a/Makefile b/Makefile index 29fd2ca9c..63b4af9ba 100644 --- a/Makefile +++ b/Makefile @@ -217,7 +217,7 @@ MK_CFLAGS += $(WARN_FLAGS) -Wshadow -Wstrict-prototypes -Wpointer-arith -Wmis MK_CXXFLAGS += $(WARN_FLAGS) -Wmissing-declarations -Wmissing-noreturn ifeq ($(LLAMA_FATAL_WARNINGS),1) - MK_CFLAGS += -Werror + MK_CFLAGS += -Werror MK_CXXFLAGS += -Werror endif @@ -385,6 +385,9 @@ ifdef LLAMA_CUBLAS MK_LDFLAGS += -lcuda -lcublas -lculibos -lcudart -lcublasLt -lpthread -ldl -lrt -L/usr/local/cuda/lib64 -L/opt/cuda/lib64 -L$(CUDA_PATH)/targets/x86_64-linux/lib -L/usr/local/cuda/targets/aarch64-linux/lib -L/usr/lib/wsl/lib OBJS += ggml-cuda.o MK_NVCCFLAGS += -use_fast_math +ifdef LLAMA_FATAL_WARNINGS + MK_NVCCFLAGS += -Werror all-warnings +endif # LLAMA_FATAL_WARNINGS ifndef JETSON_EOL_MODULE_DETECT MK_NVCCFLAGS += --forward-unknown-to-host-compiler endif # JETSON_EOL_MODULE_DETECT diff --git a/ggml-cuda.cu b/ggml-cuda.cu index eef213509..e091dbdc1 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -651,18 +651,18 @@ static __device__ __forceinline__ float2 warp_reduce_sum(float2 a) { return a; } -static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); - } - return a; -#else - (void) a; - NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL -} +//static __device__ __forceinline__ half2 warp_reduce_sum(half2 a) { +//#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +//#pragma unroll +// for (int mask = 16; mask > 0; mask >>= 1) { +// a = __hadd2(a, __shfl_xor_sync(0xffffffff, a, mask, 32)); +// } +// return a; +//#else +// (void) a; +// NO_DEVICE_CODE; +//#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL +//} static __device__ __forceinline__ float warp_reduce_max(float x) { #pragma unroll @@ -672,18 +672,18 @@ static __device__ __forceinline__ float warp_reduce_max(float x) { return x; } -static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { -#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -#pragma unroll - for (int mask = 16; mask > 0; mask >>= 1) { - x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); - } - return x; -#else - (void) x; - NO_DEVICE_CODE; -#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX -} +//static __device__ __forceinline__ half2 warp_reduce_max(half2 x) { +//#if !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +//#pragma unroll +// for (int mask = 16; mask > 0; mask >>= 1) { +// x = __hmax2(x, __shfl_xor_sync(0xffffffff, x, mask, 32)); +// } +// return x; +//#else +// (void) x; +// NO_DEVICE_CODE; +//#endif // !(defined(GGML_USE_HIPBLAS) && defined(__HIP_PLATFORM_AMD__)) && __CUDA_ARCH__ >= CC_PASCAL && CUDART_VERSION >= CUDART_HMAX +//} static __device__ __forceinline__ float op_repeat(const float a, const float b) { return b; @@ -4641,10 +4641,12 @@ static __device__ __forceinline__ float vec_dot_iq2_xs_q8_1( const float d = (float)bq2->d * __low2float(bq8_1[ib32].ds) * 0.25f; return d * ((0.5f + ls1) * sumi1 + (0.5f + ls2) * sumi2); #else + (void) ksigns64; assert(false); return 0.f; #endif #else + (void) ksigns64; assert(false); return 0.f; #endif From 890559ab28e354052e16e770155ad007fd0856e8 Mon Sep 17 00:00:00 2001 From: Didzis Gosko Date: Sun, 11 Feb 2024 16:41:41 +0200 Subject: [PATCH 798/859] metal : option to embed MSL source into compiled binary (whisper/1842) * ggml : embed Metal library source (ggml-metal.metal) into binary enable by setting WHISPER_EMBED_METAL_LIBRARY * rename the build option * rename the preprocessor directive * generate Metal library embedding assembly on-fly during build process --- ggml-metal.m | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ggml-metal.m b/ggml-metal.m index f3c1fff8f..956e323a0 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -277,6 +277,14 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { return NULL; } } else { +#if GGML_METAL_EMBED_LIBRARY + GGML_METAL_LOG_INFO("%s: using embedded metal library\n", __func__); + + extern const char ggml_metallib_start[]; + extern const char ggml_metallib_end[]; + + NSString * src = [[NSString alloc] initWithBytes:ggml_metallib_start length:(ggml_metallib_end-ggml_metallib_start) encoding:NSUTF8StringEncoding]; +#else GGML_METAL_LOG_INFO("%s: default.metallib not found, loading from source\n", __func__); NSString * sourcePath; @@ -299,6 +307,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_LOG_ERROR("%s: error: %s\n", __func__, [[error description] UTF8String]); return NULL; } +#endif @autoreleasepool { // dictionary of preprocessor macros From a3145bdc305422973e25f0b066da6f469ed5dc45 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 14:53:48 +0200 Subject: [PATCH 799/859] ggml-alloc : apply ggml/731 --- ggml-alloc.c | 116 ++++++++++++++++++++++++++++++++------------------- 1 file changed, 73 insertions(+), 43 deletions(-) diff --git a/ggml-alloc.c b/ggml-alloc.c index d4123564f..e675306c8 100644 --- a/ggml-alloc.c +++ b/ggml-alloc.c @@ -377,6 +377,9 @@ struct ggml_gallocr { struct node_alloc * node_allocs; // [n_nodes] int n_nodes; + + struct tensor_alloc * leaf_allocs; // [n_leafs] + int n_leafs; }; ggml_gallocr_t ggml_gallocr_new_n(ggml_backend_buffer_type_t * bufts, int n_bufs) { @@ -427,6 +430,7 @@ void ggml_gallocr_free(ggml_gallocr_t galloc) { free(galloc->buffers); free(galloc->buf_tallocs); free(galloc->node_allocs); + free(galloc->leaf_allocs); free(galloc); } @@ -464,7 +468,7 @@ static void ggml_gallocr_allocate_node(ggml_gallocr_t galloc, struct ggml_tensor for (int i = 0; i < GGML_MAX_SRC; i++) { struct ggml_tensor * parent = node->src[i]; if (parent == NULL) { - break; + continue; } // if the node's data is external, then we cannot re-use it @@ -544,22 +548,8 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr memset(galloc->hash_set.keys, 0, galloc->hash_set.size * sizeof(struct ggml_tensor *)); memset(galloc->hash_values, 0, galloc->hash_set.size * sizeof(struct hash_node)); - // allocate all graph inputs first to avoid overwriting them - for (int i = 0; i < graph->n_nodes; i++) { - if (graph->nodes[i]->flags & GGML_TENSOR_FLAG_INPUT) { - ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); - } - for (int j = 0; j < GGML_MAX_SRC; j++) { - if (graph->nodes[i]->src[j] == NULL) { - continue; - } - if (graph->nodes[i]->src[j]->flags & GGML_TENSOR_FLAG_INPUT) { - ggml_gallocr_allocate_node(galloc, graph->nodes[i]->src[j], get_node_buffer_id(node_buffer_ids, i)); - } - } - } - // count number of children and views + // allocate all graph inputs and leafs first to avoid overwriting them for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; @@ -568,14 +558,37 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr ggml_gallocr_hash_get(galloc, view_src)->n_views += 1; } - for (int j = 0; j < GGML_MAX_SRC; j++) { - struct ggml_tensor * parent = node->src[j]; - if (parent == NULL) { - break; - } - ggml_gallocr_hash_get(galloc, parent)->n_children += 1; + if (node->flags & GGML_TENSOR_FLAG_INPUT) { + ggml_gallocr_allocate_node(galloc, graph->nodes[i], get_node_buffer_id(node_buffer_ids, i)); } - } + + for (int j = 0; j < GGML_MAX_SRC; j++) { + struct ggml_tensor * src = node->src[j]; + if (src == NULL) { + continue; + } + + ggml_gallocr_hash_get(galloc, src)->n_children += 1; + + // allocate explicit inputs and leafs + if (src->flags & GGML_TENSOR_FLAG_INPUT || src->op == GGML_OP_NONE) { + ggml_gallocr_allocate_node(galloc, src, get_node_buffer_id(node_buffer_ids, i)); + } + } + } + + // allocate the remaining leafs that are unused on the graph + // these are effectively static tensors that the application is not using in the graph, but may still want to allocate for other purposes + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, leaf); + + if (hn->n_children == 0) { + assert(!hn->allocated); + // since buffer ids are only given for nodes, these leafs are always allocated in the first buffer + ggml_gallocr_allocate_node(galloc, leaf, 0); + } + } // allocate tensors for (int i = 0; i < graph->n_nodes; i++) { @@ -586,7 +599,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { - break; + continue; } ggml_gallocr_allocate_node(galloc, parent, buffer_id); } @@ -598,7 +611,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { - break; + continue; } AT_PRINTF("%s", parent->name); if (j < GGML_MAX_SRC - 1 && node->src[j + 1] != NULL) { @@ -611,7 +624,7 @@ static void ggml_gallocr_alloc_graph_impl(ggml_gallocr_t galloc, struct ggml_cgr for (int j = 0; j < GGML_MAX_SRC; j++) { struct ggml_tensor * parent = node->src[j]; if (parent == NULL) { - break; + continue; } struct hash_node * p_hn = ggml_gallocr_hash_get(galloc, parent); p_hn->n_children -= 1; @@ -696,6 +709,18 @@ bool ggml_gallocr_reserve_n(ggml_gallocr_t galloc, struct ggml_cgraph * graph, c } } } + if (galloc->n_leafs < graph->n_leafs) { + free(galloc->leaf_allocs); + galloc->leaf_allocs = calloc(sizeof(struct tensor_alloc), graph->n_leafs); + GGML_ASSERT(galloc->leaf_allocs != NULL); + } + galloc->n_leafs = graph->n_leafs; + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct hash_node * hn = ggml_gallocr_hash_get(galloc, leaf); + galloc->leaf_allocs[i].offset = hn->offset; + galloc->leaf_allocs[i].size_max = ggml_backend_buft_get_alloc_size(galloc->bufts[hn->buffer_id], leaf); + } // reallocate buffers if needed for (int i = 0; i < galloc->n_buffers; i++) { @@ -722,8 +747,8 @@ bool ggml_gallocr_reserve(ggml_gallocr_t galloc, struct ggml_cgraph *graph) { return ggml_gallocr_reserve_n(galloc, graph, NULL); } -static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * node, struct node_alloc * node_alloc, struct tensor_alloc * tensor_alloc) { - assert(node->data || node->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); +static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * node, int buffer_id, struct tensor_alloc * tensor_alloc) { + assert(node->data || node->view_src || ggml_backend_buffer_get_alloc_size(galloc->buffers[buffer_id], node) <= tensor_alloc->size_max); if (node->view_src != NULL) { if (node->buffer == NULL) { @@ -732,29 +757,20 @@ static void ggml_gallocr_init_tensor(ggml_gallocr_t galloc, struct ggml_tensor * // this tensor was allocated without ggml-backend return; } - ggml_backend_view_init(galloc->buffers[node_alloc->buffer_id], node); + ggml_backend_view_init(galloc->buffers[buffer_id], node); } } else { if (node->data == NULL) { assert(tensor_alloc->offset != SIZE_MAX); - assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[node_alloc->buffer_id], node) <= tensor_alloc->size_max); - void * base = ggml_backend_buffer_get_base(galloc->buffers[node_alloc->buffer_id]); + assert(ggml_backend_buffer_get_alloc_size(galloc->buffers[buffer_id], node) <= tensor_alloc->size_max); + void * base = ggml_backend_buffer_get_base(galloc->buffers[buffer_id]); void * addr = (char *)base + tensor_alloc->offset; - ggml_backend_tensor_alloc(galloc->buffers[node_alloc->buffer_id], node, addr); + ggml_backend_tensor_alloc(galloc->buffers[buffer_id], node, addr); } else { if (node->buffer == NULL) { // this tensor was allocated without ggml-backend return; } - -#ifndef NDEBUG - size_t offset = - (char *)node->data - - (char *)ggml_backend_buffer_get_base(node->buffer); - size_t size = ggml_backend_buffer_get_alloc_size(node->buffer, node); - assert(tensor_alloc->offset == SIZE_MAX || offset == tensor_alloc->offset); - assert(tensor_alloc->offset == SIZE_MAX || size <= tensor_alloc->size_max); -#endif } } } @@ -773,6 +789,13 @@ static bool ggml_gallocr_needs_realloc(ggml_gallocr_t galloc, struct ggml_cgraph return true; } + if (galloc->n_leafs != graph->n_leafs) { +#ifndef NDEBUG + fprintf(stderr, "%s: graph has different number of leafs\n", __func__); +#endif + return true; + } + for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; struct node_alloc * node_alloc = &galloc->node_allocs[i]; @@ -827,6 +850,7 @@ bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) } // allocate the graph tensors from the previous assignments + // nodes for (int i = 0; i < graph->n_nodes; i++) { struct ggml_tensor * node = graph->nodes[i]; struct node_alloc * node_alloc = &galloc->node_allocs[i]; @@ -835,9 +859,15 @@ bool ggml_gallocr_alloc_graph(ggml_gallocr_t galloc, struct ggml_cgraph * graph) if (src == NULL) { continue; } - ggml_gallocr_init_tensor(galloc, src, node_alloc, &node_alloc->src[j]); + ggml_gallocr_init_tensor(galloc, src, node_alloc->buffer_id, &node_alloc->src[j]); } - ggml_gallocr_init_tensor(galloc, node, node_alloc, &node_alloc->dst); + ggml_gallocr_init_tensor(galloc, node, node_alloc->buffer_id, &node_alloc->dst); + } + // leafs + for (int i = 0; i < graph->n_leafs; i++) { + struct ggml_tensor * leaf = graph->leafs[i]; + struct tensor_alloc * leaf_alloc = &galloc->leaf_allocs[i]; + ggml_gallocr_init_tensor(galloc, leaf, 0, leaf_alloc); } return true; From 337c9cbd52918ae5fb9a9d9e25d7fae4e238c9f1 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 14:54:21 +0200 Subject: [PATCH 800/859] sync : ggml ggml-ci --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 7a23ab162..733d8f95b 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -5070f078a67c18c11736e78316ab715ca9afde16 +818eeb8a3be99125746a90ec63af8f51516a2ec6 From 6fd413791a754598a54a366145960f2e27eec015 Mon Sep 17 00:00:00 2001 From: slaren Date: Mon, 19 Feb 2024 14:02:36 +0100 Subject: [PATCH 801/859] llava : replace ggml_cpy with ggml_cont --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 98d512f67..1a02fde32 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -618,7 +618,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 KQV = ggml_reshape_4d(ctx0, KQV, d_head, num_positions, n_head, batch_size); KQV = ggml_cont(ctx0, ggml_permute(ctx0, KQV, 0, 2, 1, 3)); - cur = ggml_cpy(ctx0, KQV, ggml_new_tensor_3d(ctx0, GGML_TYPE_F32, hidden_size, num_positions, batch_size)); + cur = ggml_cont_3d(ctx0, KQV, hidden_size, num_positions, batch_size); } // attention output From 1387cf60f758efb218fa06b670182c38ff149b7b Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 19 Feb 2024 15:23:17 +0200 Subject: [PATCH 802/859] llava : remove extra cont (#5587) --- examples/llava/clip.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 1a02fde32..ef9e4ba7a 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -616,7 +616,7 @@ static ggml_cgraph * clip_image_build_graph(clip_ctx * ctx, const clip_image_f32 KQ = ggml_soft_max_inplace(ctx0, KQ); struct ggml_tensor * KQV = ggml_mul_mat(ctx0, V, KQ); KQV = ggml_reshape_4d(ctx0, KQV, d_head, num_positions, n_head, batch_size); - KQV = ggml_cont(ctx0, ggml_permute(ctx0, KQV, 0, 2, 1, 3)); + KQV = ggml_permute(ctx0, KQV, 0, 2, 1, 3); cur = ggml_cont_3d(ctx0, KQV, hidden_size, num_positions, batch_size); } From 9d679f0fccd4030779ed3c7684a40122fe41806c Mon Sep 17 00:00:00 2001 From: nopperl <54780682+nopperl@users.noreply.github.com> Date: Mon, 19 Feb 2024 14:14:07 +0000 Subject: [PATCH 803/859] examples : support minItems/maxItems in JSON grammar converter (#5039) * support minLength and maxLength in JSON schema grammar converter * Update examples/json-schema-to-grammar.py --------- Co-authored-by: Georgi Gerganov --- examples/json-schema-to-grammar.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/examples/json-schema-to-grammar.py b/examples/json-schema-to-grammar.py index 2a4cb65bc..6a977f031 100755 --- a/examples/json-schema-to-grammar.py +++ b/examples/json-schema-to-grammar.py @@ -87,7 +87,21 @@ class SchemaConverter: elif schema_type == 'array' and 'items' in schema: # TODO `prefixItems` keyword item_rule_name = self.visit(schema['items'], f'{name}{"-" if name else ""}item') - rule = f'"[" space ({item_rule_name} ("," space {item_rule_name})*)? "]" space' + list_item_operator = f'("," space {item_rule_name})' + successive_items = "" + min_items = schema.get("minItems", 0) + if min_items > 0: + first_item = f"({item_rule_name})" + successive_items = list_item_operator * (min_items - 1) + min_items -= 1 + else: + first_item = f"({item_rule_name})?" + max_items = schema.get("maxItems") + if max_items is not None and max_items > min_items: + successive_items += (list_item_operator + "?") * (max_items - min_items - 1) + else: + successive_items += list_item_operator + "*" + rule = f'"[" space {first_item} {successive_items} "]" space' return self._add_rule(rule_name, rule) else: From f24ed14ee0ce28dfe98115c378b37da144912016 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Mon, 19 Feb 2024 15:54:12 -0500 Subject: [PATCH 804/859] make : pass CPPFLAGS directly to nvcc, not via -Xcompiler (#5598) --- Makefile | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 63b4af9ba..db5df1b32 100644 --- a/Makefile +++ b/Makefile @@ -446,9 +446,9 @@ ifdef LLAMA_CUDA_CCBIN endif ggml-cuda.o: ggml-cuda.cu ggml-cuda.h ifdef JETSON_EOL_MODULE_DETECT - $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ + $(NVCC) -I. -Icommon -D_XOPEN_SOURCE=600 -D_GNU_SOURCE -DNDEBUG -DGGML_USE_CUBLAS -I/usr/local/cuda/include -I/opt/cuda/include -I/usr/local/cuda/targets/aarch64-linux/include -std=c++11 -O3 $(NVCCFLAGS) $(CPPFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ else - $(NVCC) $(NVCCFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ + $(NVCC) $(NVCCFLAGS) $(CPPFLAGS) -Xcompiler "$(CUDA_CXXFLAGS)" -c $< -o $@ endif # JETSON_EOL_MODULE_DETECT endif # LLAMA_CUBLAS @@ -549,9 +549,10 @@ GF_CC := $(CC) include scripts/get-flags.mk # combine build flags with cmdline overrides -override CFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CFLAGS) $(GF_CFLAGS) $(CFLAGS) -BASE_CXXFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) $(MK_CXXFLAGS) $(CXXFLAGS) -override CXXFLAGS := $(BASE_CXXFLAGS) $(HOST_CXXFLAGS) $(GF_CXXFLAGS) +override CPPFLAGS := $(MK_CPPFLAGS) $(CPPFLAGS) +override CFLAGS := $(CPPFLAGS) $(MK_CFLAGS) $(GF_CFLAGS) $(CFLAGS) +BASE_CXXFLAGS := $(MK_CXXFLAGS) $(CXXFLAGS) +override CXXFLAGS := $(BASE_CXXFLAGS) $(HOST_CXXFLAGS) $(GF_CXXFLAGS) $(CPPFLAGS) override NVCCFLAGS := $(MK_NVCCFLAGS) $(NVCCFLAGS) override LDFLAGS := $(MK_LDFLAGS) $(LDFLAGS) From 40c3a6c1e11040088b4a1ce0abc4651cb3011dd4 Mon Sep 17 00:00:00 2001 From: slaren Date: Mon, 19 Feb 2024 23:40:26 +0100 Subject: [PATCH 805/859] cuda : ignore peer access already enabled errors (#5597) * cuda : ignore peer access already enabled errors * fix hip --- ggml-cuda.cu | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e091dbdc1..6caae56b0 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -54,6 +54,8 @@ #define cudaDeviceProp hipDeviceProp_t #define cudaDeviceSynchronize hipDeviceSynchronize #define cudaError_t hipError_t +#define cudaErrorPeerAccessAlreadyEnabled hipErrorPeerAccessAlreadyEnabled +#define cudaErrorPeerAccessNotEnabled hipErrorPeerAccessNotEnabled #define cudaEventCreateWithFlags hipEventCreateWithFlags #define cudaEventDisableTiming hipEventDisableTiming #define cudaEventRecord hipEventRecord @@ -9325,9 +9327,15 @@ static void ggml_cuda_set_peer_access(const int n_tokens) { CUDA_CHECK(cudaDeviceCanAccessPeer(&can_access_peer, id, id_other)); if (can_access_peer) { if (enable_peer_access) { - CUDA_CHECK(cudaDeviceEnablePeerAccess(id_other, 0)); + cudaError_t err = cudaDeviceEnablePeerAccess(id_other, 0); + if (err != cudaErrorPeerAccessAlreadyEnabled) { + CUDA_CHECK(err); + } } else { - CUDA_CHECK(cudaDeviceDisablePeerAccess(id_other)); + cudaError_t err = cudaDeviceDisablePeerAccess(id_other); + if (err != cudaErrorPeerAccessNotEnabled) { + CUDA_CHECK(err); + } } } } @@ -10999,10 +11007,10 @@ GGML_CALL static const char * ggml_backend_cuda_split_buffer_get_name(ggml_backe UNUSED(buffer); } -// unused at the moment -//static bool ggml_backend_buffer_is_cuda_split(ggml_backend_buffer_t buffer) { -// return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; -//} +static bool ggml_backend_buffer_is_cuda_split(ggml_backend_buffer_t buffer) { + return buffer->iface.get_name == ggml_backend_cuda_split_buffer_get_name; + UNUSED(ggml_backend_buffer_is_cuda_split); // only used in debug builds currently, avoid unused function warning in release builds +} GGML_CALL static void ggml_backend_cuda_split_buffer_free_buffer(ggml_backend_buffer_t buffer) { ggml_backend_cuda_split_buffer_context * ctx = (ggml_backend_cuda_split_buffer_context *)buffer->context; @@ -11390,7 +11398,7 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { assert(node->src[j]->backend == GGML_BACKEND_GPU || node->src[j]->backend == GGML_BACKEND_GPU_SPLIT); - assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); + assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) || ggml_backend_buffer_is_cuda_split(node->src[j]->buffer)); assert(node->src[j]->extra != nullptr); } } From 5dde5408978eda22242b87e22e306d1c2d1a5834 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Sat, 3 Feb 2024 17:56:46 +0000 Subject: [PATCH 806/859] Allow for Vulkan build with Accelerate. Closes #5304 --- ggml.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ggml.c b/ggml.c index 4ee2c5e11..d129df505 100644 --- a/ggml.c +++ b/ggml.c @@ -273,6 +273,8 @@ inline static void * ggml_calloc(size_t num, size_t size) { #include #if defined(GGML_USE_CLBLAST) // allow usage of CLBlast alongside Accelerate functions #include "ggml-opencl.h" +#elif defined(GGML_USE_VULKAN) +#include "ggml-vulkan.h" #endif #elif defined(GGML_USE_OPENBLAS) #if defined(GGML_BLAS_USE_MKL) From 42f664a3825dfde13a32c3577ab66d10c56f3aa6 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Sat, 3 Feb 2024 18:00:11 +0000 Subject: [PATCH 807/859] Resolve ErrorIncompatibleDriver with Vulkan on MacOS. Refs: - https://chat.openai.com/share/7020ce72-65fc-45ec-b7be-9d9d798a5f3f - https://github.com/SaschaWillems/Vulkan/issues/954 - https://github.com/haasn/libplacebo/issues/128 - https://github.com/KhronosGroup/Vulkan-Samples/issues/476 --- ggml-vulkan.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 4a30414df..e9e966dbf 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1109,8 +1109,10 @@ static void ggml_vk_instance_init() { #ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", #endif + "VK_KHR_portability_enumeration", }; - vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); + + vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR), &app_info, layers, extensions); #ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; vk::ValidationFeaturesEXT validation_features = { From d8c054517dc24f1316f3be12a98fff383e1e93e3 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Tue, 6 Feb 2024 14:39:22 +0000 Subject: [PATCH 808/859] Add preprocessor checks for Apple devices. Based on work by @rbourgeat in https://github.com/ggerganov/llama.cpp/pull/5322/files --- ggml-vulkan.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index e9e966dbf..33b8a9061 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1109,10 +1109,15 @@ static void ggml_vk_instance_init() { #ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", #endif +#ifdef __APPLE__ "VK_KHR_portability_enumeration", +#endif }; + vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); +#ifdef __APPLE__ + instance_create_info.flags = vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; +#endif - vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR), &app_info, layers, extensions); #ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; vk::ValidationFeaturesEXT validation_features = { From f50db6ae0bdcb5f8593ca6ca46dfa03b177faa2f Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Sat, 10 Feb 2024 22:14:52 +0100 Subject: [PATCH 809/859] Add check for VK_KHR_portability_enumeration for MoltenVK support --- ggml-vulkan.cpp | 41 +++++++++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 33b8a9061..37123ac8f 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1100,23 +1100,44 @@ static void ggml_vk_instance_init() { #endif vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; - const std::vector layers = { + + const std::vector instance_extensions = vk::enumerateInstanceExtensionProperties(); +#ifdef __APPLE__ + bool portability_enumeration_ext = false; + // Check for portability enumeration extension for MoltenVK support + for (const auto& properties : instance_extensions) { + if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { + portability_enumeration_ext = true; + break; + } + } + if (!portability_enumeration_ext) { + std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; + } +#endif + + std::vector layers = { #ifdef GGML_VULKAN_VALIDATE "VK_LAYER_KHRONOS_validation", #endif }; - const std::vector extensions = { + std::vector extensions = { #ifdef GGML_VULKAN_VALIDATE "VK_EXT_validation_features", -#endif -#ifdef __APPLE__ - "VK_KHR_portability_enumeration", #endif }; - vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags(), &app_info, layers, extensions); #ifdef __APPLE__ - instance_create_info.flags = vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; + if (portability_enumeration_ext) { + extensions.push_back("VK_KHR_portability_enumeration"); + } #endif + vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags{}, &app_info, layers, extensions); +#ifdef __APPLE__ + if (portability_enumeration_ext) { + instance_create_info.flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; + } +#endif + #ifdef GGML_VULKAN_VALIDATE const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; @@ -1175,12 +1196,12 @@ static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { vk_instance.devices[idx] = std::make_shared(); ctx->device = vk_instance.devices[idx]; ctx->device.lock()->physical_device = devices[dev_num]; - std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); + const std::vector ext_props = ctx->device.lock()->physical_device.enumerateDeviceExtensionProperties(); bool maintenance4_support = false; // Check if maintenance4 is supported - for (auto properties : ext_props) { + for (const auto& properties : ext_props) { if (strcmp("VK_KHR_maintenance4", properties.extensionName) == 0) { maintenance4_support = true; } @@ -1211,7 +1232,7 @@ static void ggml_vk_init(ggml_backend_vk_context * ctx, size_t idx) { bool fp16_storage = false; bool fp16_compute = false; - for (auto properties : ext_props) { + for (const auto& properties : ext_props) { if (strcmp("VK_KHR_16bit_storage", properties.extensionName) == 0) { fp16_storage = true; } else if (strcmp("VK_KHR_shader_float16_int8", properties.extensionName) == 0) { From bb9dcd560a7e81265398b0d463c40f3e467daf19 Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Wed, 14 Feb 2024 20:57:17 +0100 Subject: [PATCH 810/859] Refactor validation and enumeration platform checks into functions to clean up ggml_vk_instance_init() --- ggml-vulkan.cpp | 101 ++++++++++++++++++++++++++++++------------------ 1 file changed, 63 insertions(+), 38 deletions(-) diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 37123ac8f..4e5eaff15 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -1091,7 +1091,10 @@ static void ggml_vk_print_gpu_info(size_t idx) { } } -static void ggml_vk_instance_init() { +static bool ggml_vk_instance_validation_ext_available(const std::vector& instance_extensions); +static bool ggml_vk_instance_portability_enumeration_ext_available(const std::vector& instance_extensions); + +void ggml_vk_instance_init() { if (vk_instance_initialized) { return; } @@ -1102,54 +1105,40 @@ static void ggml_vk_instance_init() { vk::ApplicationInfo app_info{ "ggml-vulkan", 1, nullptr, 0, VK_API_VERSION }; const std::vector instance_extensions = vk::enumerateInstanceExtensionProperties(); -#ifdef __APPLE__ - bool portability_enumeration_ext = false; - // Check for portability enumeration extension for MoltenVK support - for (const auto& properties : instance_extensions) { - if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { - portability_enumeration_ext = true; - break; - } - } - if (!portability_enumeration_ext) { - std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; - } -#endif + const bool validation_ext = ggml_vk_instance_validation_ext_available(instance_extensions); + const bool portability_enumeration_ext = ggml_vk_instance_portability_enumeration_ext_available(instance_extensions); - std::vector layers = { -#ifdef GGML_VULKAN_VALIDATE - "VK_LAYER_KHRONOS_validation", -#endif - }; - std::vector extensions = { -#ifdef GGML_VULKAN_VALIDATE - "VK_EXT_validation_features", -#endif - }; -#ifdef __APPLE__ + std::vector layers; + + if (validation_ext) { + layers.push_back("VK_LAYER_KHRONOS_validation"); + } + std::vector extensions; + if (validation_ext) { + extensions.push_back("VK_EXT_validation_features"); + } if (portability_enumeration_ext) { extensions.push_back("VK_KHR_portability_enumeration"); } -#endif vk::InstanceCreateInfo instance_create_info(vk::InstanceCreateFlags{}, &app_info, layers, extensions); -#ifdef __APPLE__ if (portability_enumeration_ext) { instance_create_info.flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR; } -#endif + std::vector features_enable; + vk::ValidationFeaturesEXT validation_features; -#ifdef GGML_VULKAN_VALIDATE - const std::vector features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; - vk::ValidationFeaturesEXT validation_features = { - features_enable, - {}, - }; - validation_features.setPNext(nullptr); - instance_create_info.setPNext(&validation_features); + if (validation_ext) { + features_enable = { vk::ValidationFeatureEnableEXT::eBestPractices }; + validation_features = { + features_enable, + {}, + }; + validation_features.setPNext(nullptr); + instance_create_info.setPNext(&validation_features); - std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; -#endif + std::cerr << "ggml_vulkan: Validation layers enabled" << std::endl; + } vk_instance.instance = vk::createInstance(instance_create_info); memset(vk_instance.initialized, 0, sizeof(bool) * GGML_VK_MAX_DEVICES); @@ -5329,6 +5318,42 @@ GGML_CALL int ggml_backend_vk_reg_devices() { return vk_instance.device_indices.size(); } +// Extension availability +static bool ggml_vk_instance_validation_ext_available(const std::vector& instance_extensions) { +#ifdef GGML_VULKAN_VALIDATE + bool portability_enumeration_ext = false; + // Check for portability enumeration extension for MoltenVK support + for (const auto& properties : instance_extensions) { + if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { + return true; + } + } + if (!portability_enumeration_ext) { + std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; + } +#endif + return false; + + UNUSED(instance_extensions); +} +static bool ggml_vk_instance_portability_enumeration_ext_available(const std::vector& instance_extensions) { +#ifdef __APPLE__ + bool portability_enumeration_ext = false; + // Check for portability enumeration extension for MoltenVK support + for (const auto& properties : instance_extensions) { + if (strcmp("VK_KHR_portability_enumeration", properties.extensionName) == 0) { + return true; + } + } + if (!portability_enumeration_ext) { + std::cerr << "ggml_vulkan: WARNING: Instance extension VK_KHR_portability_enumeration not found." << std::endl; + } +#endif + return false; + + UNUSED(instance_extensions); +} + // checks #ifdef GGML_VULKAN_CHECK_RESULTS From 22f83f0c383e12106692b8afc224d61b8993a52c Mon Sep 17 00:00:00 2001 From: 0cc4m Date: Sat, 10 Feb 2024 22:18:33 +0100 Subject: [PATCH 811/859] Enable Vulkan MacOS CI --- .devops/nix/package.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.devops/nix/package.nix b/.devops/nix/package.nix index ad23f7dd7..815db6a2d 100644 --- a/.devops/nix/package.nix +++ b/.devops/nix/package.nix @@ -255,11 +255,11 @@ effectiveStdenv.mkDerivation ( # Configurations we don't want even the CI to evaluate. Results in the # "unsupported platform" messages. This is mostly a no-op, because # cudaPackages would've refused to evaluate anyway. - badPlatforms = optionals (useCuda || useOpenCL || useVulkan) lib.platforms.darwin; + badPlatforms = optionals (useCuda || useOpenCL) lib.platforms.darwin; # Configurations that are known to result in build failures. Can be # overridden by importing Nixpkgs with `allowBroken = true`. - broken = (useMetalKit && !effectiveStdenv.isDarwin) || (useVulkan && effectiveStdenv.isDarwin); + broken = (useMetalKit && !effectiveStdenv.isDarwin); description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}"; homepage = "https://github.com/ggerganov/llama.cpp/"; From 633782b8d949f24b619e6c68ee37b5cc79167173 Mon Sep 17 00:00:00 2001 From: Mathijs de Bruin Date: Tue, 13 Feb 2024 20:28:02 +0000 Subject: [PATCH 812/859] nix: now that we can do so, allow MacOS to build Vulkan binaries Author: Philip Taron Date: Tue Feb 13 20:28:02 2024 +0000 --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index ad2f9b295..dc4e503c3 100644 --- a/flake.nix +++ b/flake.nix @@ -150,6 +150,7 @@ packages = { default = config.legacyPackages.llamaPackages.llama-cpp; + vulkan = config.packages.default.override { useVulkan = true; }; } // lib.optionalAttrs pkgs.stdenv.isLinux { opencl = config.packages.default.override { useOpenCL = true; }; @@ -157,7 +158,6 @@ mpi-cpu = config.packages.default.override { useMpi = true; }; mpi-cuda = config.packages.default.override { useMpi = true; }; - vulkan = config.packages.default.override { useVulkan = true; }; } // lib.optionalAttrs (system == "x86_64-linux") { rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp; From b9111bd209c7b11b0592450a6ed2e0ca545b2c84 Mon Sep 17 00:00:00 2001 From: AidanBeltonS <87009434+AidanBeltonS@users.noreply.github.com> Date: Tue, 20 Feb 2024 07:01:25 +0000 Subject: [PATCH 813/859] Update ggml_sycl_op_mul_mat_vec_q (#5502) * Update ggml_sycl_op_mul_mat_vec_q * Apply suggestions from code review Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> * revert suggestion on macro * fix bug * Add quant type GGML_TYPE_IQ1_S to unsupported * fix format --------- Co-authored-by: Abhilash Majumder <30946547+abhilash1910@users.noreply.github.com> --- ggml-sycl.cpp | 258 ++++++++++++++------------------------------------ 1 file changed, 69 insertions(+), 189 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index cd4b3a1e1..df1826112 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -9188,174 +9188,22 @@ static void convert_mul_mat_vec_f16_sycl(const void *vx, const dfloat *y, } } -static void mul_mat_vec_q4_0_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK4_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q4_1_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK4_1 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q5_0_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK5_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q5_1_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK5_1 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q8_0_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK8_0 == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q2_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q3_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q4_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q5_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); -} - -static void mul_mat_vec_q6_K_q8_1_sycl(const void *vx, const void *vy, - float *dst, const int ncols, - const int nrows, - dpct::queue_ptr stream) { - GGML_ASSERT(ncols % QK_K == 0); - const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; - const sycl::range<3> block_nums(1, 1, block_num_y); - const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); - stream->parallel_for( - sycl::nd_range<3>(block_nums * block_dims, block_dims), - [=](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { - mul_mat_vec_q(vx, vy, dst, ncols, nrows, - item_ct1); - }); +template +static void mul_mat_vec_q_sycl_submitter(const void *vx, const void *vy, + float *dst, const int ncols, + const int nrows, + dpct::queue_ptr stream) { + GGML_ASSERT(ncols % QK4_0 == 0); + const int block_num_y = (nrows + GGML_SYCL_MMV_Y - 1) / GGML_SYCL_MMV_Y; + const sycl::range<3> block_nums(1, 1, block_num_y); + const sycl::range<3> block_dims(1, GGML_SYCL_MMV_Y, WARP_SIZE); + stream->parallel_for( + sycl::nd_range<3>(block_nums * block_dims, block_dims), [= + ](sycl::nd_item<3> item_ct1) [[intel::reqd_sub_group_size(32)]] { + mul_mat_vec_q( + vx, vy, dst, ncols, nrows, item_ct1); + }); } int get_device_index_by_id(int id){ @@ -12095,37 +11943,63 @@ inline void ggml_sycl_op_mul_mat_vec_q( const int64_t ne00 = src0->ne[0]; const int64_t row_diff = row_high - row_low; + // TODO: support these quantization types + GGML_ASSERT(!(src0->type == GGML_TYPE_IQ2_XXS || + src0->type == GGML_TYPE_IQ2_XS || + src0->type == GGML_TYPE_IQ3_XXS || + src0->type == GGML_TYPE_IQ1_S)); + switch (src0->type) { case GGML_TYPE_Q4_0: - mul_mat_vec_q4_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q4_1: - mul_mat_vec_q4_1_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_0: - mul_mat_vec_q5_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_1: - mul_mat_vec_q5_1_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q8_0: - mul_mat_vec_q8_0_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q2_K: - mul_mat_vec_q2_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q3_K: - mul_mat_vec_q3_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q4_K: - mul_mat_vec_q4_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q5_K: - mul_mat_vec_q5_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; case GGML_TYPE_Q6_K: - mul_mat_vec_q6_K_q8_1_sycl(src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); - break; + mul_mat_vec_q_sycl_submitter( + src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, stream); + break; default: GGML_ASSERT(false); break; @@ -12145,7 +12019,7 @@ inline void ggml_sycl_op_dequantize_mul_mat_vec( const int64_t src1_ncols, const int64_t src1_padded_row_size, const dpct::queue_ptr &stream) { - GGML_TENSOR_BINARY_OP_LOCALS + GGML_TENSOR_BINARY_OP_LOCALS; const int64_t row_diff = row_high - row_low; @@ -15093,6 +14967,12 @@ static bool ggml_backend_sycl_supports_op(ggml_backend_t backend, const ggml_ten return false; } + if (a->type == GGML_TYPE_IQ1_S) { + return false; + } + if (a->type == GGML_TYPE_IQ3_XXS) { + return false; + } if (a->type == GGML_TYPE_IQ2_XXS) { return false; } From c0a8c6db371cb3e4379900867b948879f5842201 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Tue, 20 Feb 2024 08:48:19 +0100 Subject: [PATCH 814/859] server : health endpoint configurable failure on no slot (#5594) --- examples/server/README.md | 9 ++++--- examples/server/server.cpp | 52 +++++++++++++++++++------------------- 2 files changed, 31 insertions(+), 30 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 809e2d37c..f6b9c7402 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -134,10 +134,11 @@ node index.js ## API Endpoints - **GET** `/health`: Returns the current state of the server: - - `{"status": "loading model"}` if the model is still being loaded. - - `{"status": "error"}` if the model failed to load. - - `{"status": "ok"}` if the model is successfully loaded and the server is ready for further requests mentioned below. - - `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available + - 503 -> `{"status": "loading model"}` if the model is still being loaded. + - 500 -> `{"status": "error"}` if the model failed to load. + - 200 -> `{"status": "ok", "slots_idle": 1, "slots_processing": 2 }` if the model is successfully loaded and the server is ready for further requests mentioned below. + - 200 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available. + - 503 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if the query parameter `fail_on_no_slot` is provided and no slot are currently available. - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 22c344dd4..23482ed95 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2582,40 +2582,40 @@ int main(int argc, char **argv) res.set_header("Access-Control-Allow-Headers", "*"); }); - svr.Get("/health", [&](const httplib::Request&, httplib::Response& res) { + svr.Get("/health", [&](const httplib::Request& req, httplib::Response& res) { server_state current_state = state.load(); switch(current_state) { - case SERVER_STATE_READY: - if (llama.all_slots_are_idle) { - res.set_content(R"({"status": "ok"})", "application/json"); + case SERVER_STATE_READY: { + int available_slots = 0; + int processing_slots = 0; + for (llama_client_slot &slot: llama.slots) { + if (slot.available()) { + available_slots++; + } else { + processing_slots++; + } + } + if (available_slots > 0) { + json health = { + {"status", "ok"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); res.status = 200; // HTTP OK } else { - int available_slots = 0; - int processing_slots = 0; - for (llama_client_slot & slot : llama.slots) { - if (slot.available()) { - available_slots++; - } else { - processing_slots++; - } - } - if (available_slots > 0) { - json health = { - {"status", "ok"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); - res.status = 200; // HTTP OK - } else { - json health = { - {"status", "no slot available"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); + json health = { + {"status", "no slot available"}, + {"slots_idle", available_slots}, + {"slots_processing", processing_slots}}; + res.set_content(health.dump(), "application/json"); + if (req.has_param("fail_on_no_slot")) { res.status = 503; // HTTP Service Unavailable + } else { + res.status = 200; // HTTP OK } } break; + } case SERVER_STATE_LOADING_MODEL: res.set_content(R"({"status": "loading model"})", "application/json"); res.status = 503; // HTTP Service Unavailable From 8dbbd75754d43ec7b4bbe42fb287cc2553fdf0e9 Mon Sep 17 00:00:00 2001 From: Haoxiang Fei Date: Mon, 19 Feb 2024 22:58:36 -1100 Subject: [PATCH 815/859] metal : add build system support for embedded metal library (#5604) * add build support for embedded metal library * Update Makefile --------- Co-authored-by: Haoxiang Fei Co-authored-by: Georgi Gerganov --- CMakeLists.txt | 24 ++++++++++++++++++++++++ Makefile | 18 ++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 168b133f4..3c4629001 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -110,6 +110,7 @@ option(LLAMA_VULKAN_RUN_TESTS "llama: run Vulkan tests" option(LLAMA_METAL "llama: use Metal" ${LLAMA_METAL_DEFAULT}) option(LLAMA_METAL_NDEBUG "llama: disable Metal debugging" OFF) option(LLAMA_METAL_SHADER_DEBUG "llama: compile Metal with -fno-fast-math" OFF) +option(LLAMA_METAL_EMBED_LIBRARY "llama: embed Metal library" OFF) option(LLAMA_KOMPUTE "llama: use Kompute" OFF) option(LLAMA_MPI "llama: use MPI" OFF) option(LLAMA_QKK_64 "llama: use super-block size of 64 for k-quants" OFF) @@ -201,6 +202,29 @@ if (LLAMA_METAL) # copy ggml-metal.metal to bin directory configure_file(ggml-metal.metal ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/ggml-metal.metal COPYONLY) + if (LLAMA_METAL_EMBED_LIBRARY) + enable_language(ASM) + add_compile_definitions(GGML_METAL_EMBED_LIBRARY) + + set(METALLIB_SOURCE "${CMAKE_SOURCE_DIR}/ggml-metal.metal") + file(MAKE_DIRECTORY "${CMAKE_BINARY_DIR}/autogenerated") + set(EMBED_METALLIB_ASSEMBLY "${CMAKE_BINARY_DIR}/autogenerated/ggml-embed-metallib.s") + + add_custom_command( + OUTPUT ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".section __DATA,__ggml_metallib" > ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".globl _ggml_metallib_start" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo "_ggml_metallib_start:" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".incbin \\\"${METALLIB_SOURCE}\\\"" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo ".globl _ggml_metallib_end" >> ${EMBED_METALLIB_ASSEMBLY} + COMMAND echo "_ggml_metallib_end:" >> ${EMBED_METALLIB_ASSEMBLY} + DEPENDS ${METALLIB_SOURCE} + COMMENT "Generate assembly for embedded Metal library" + ) + + set(GGML_SOURCES_METAL ${GGML_SOURCES_METAL} ${EMBED_METALLIB_ASSEMBLY}) + endif() + if (LLAMA_METAL_SHADER_DEBUG) # custom command to do the following: # xcrun -sdk macosx metal -fno-fast-math -c ggml-metal.metal -o ggml-metal.air diff --git a/Makefile b/Makefile index db5df1b32..211a08d7f 100644 --- a/Makefile +++ b/Makefile @@ -533,11 +533,29 @@ ifdef LLAMA_METAL ifdef LLAMA_METAL_NDEBUG MK_CPPFLAGS += -DGGML_METAL_NDEBUG endif +ifdef LLAMA_METAL_EMBED_LIBRARY + MK_CPPFLAGS += -DGGML_METAL_EMBED_LIBRARY + OBJS += ggml-metal-embed.o +endif endif # LLAMA_METAL ifdef LLAMA_METAL ggml-metal.o: ggml-metal.m ggml-metal.h $(CC) $(CFLAGS) -c $< -o $@ + +ifdef LLAMA_METAL_EMBED_LIBRARY +ggml-metal-embed.o: ggml-metal.metal + @echo "Embedding Metal library" + $(eval TEMP_ASSEMBLY=$(shell mktemp)) + @echo ".section __DATA, __ggml_metallib" > $(TEMP_ASSEMBLY) + @echo ".globl _ggml_metallib_start" >> $(TEMP_ASSEMBLY) + @echo "_ggml_metallib_start:" >> $(TEMP_ASSEMBLY) + @echo ".incbin \"$<\"" >> $(TEMP_ASSEMBLY) + @echo ".globl _ggml_metallib_end" >> $(TEMP_ASSEMBLY) + @echo "_ggml_metallib_end:" >> $(TEMP_ASSEMBLY) + @$(AS) $(TEMP_ASSEMBLY) -o $@ + @rm -f ${TEMP_ASSEMBLY} +endif endif # LLAMA_METAL ifdef LLAMA_MPI From 5207b3fbc500f89dfe528693e96540956dbaed96 Mon Sep 17 00:00:00 2001 From: Dane Madsen Date: Tue, 20 Feb 2024 21:00:23 +1100 Subject: [PATCH 816/859] readme : update UI list (#5605) * Add maid to ui list * Specify licence --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 70866e249..747d2e98b 100644 --- a/README.md +++ b/README.md @@ -156,6 +156,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [pythops/tenere](https://github.com/pythops/tenere) (AGPL) - [semperai/amica](https://github.com/semperai/amica) - [withcatai/catai](https://github.com/withcatai/catai) +- [Mobile-Artificial-Intelligence/maid](https://github.com/Mobile-Artificial-Intelligence/maid) (MIT) --- From 9c405c9f9a7cfd23511fd6b2de05dc72481119b4 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Tue, 20 Feb 2024 15:58:27 +0100 Subject: [PATCH 817/859] Server: use llama_chat_apply_template (#5593) * server: use llama_chat_apply_template * server: remove trailing space * server: fix format_chat * server: fix help message Co-authored-by: Georgi Gerganov * server: fix formatted_chat --------- Co-authored-by: Georgi Gerganov --- examples/server/oai.hpp | 6 ++-- examples/server/server.cpp | 17 +++++----- examples/server/utils.hpp | 69 ++++++++++++++++++-------------------- llama.cpp | 2 +- 4 files changed, 45 insertions(+), 49 deletions(-) diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp index 2eca8a9fb..ff4ad6994 100644 --- a/examples/server/oai.hpp +++ b/examples/server/oai.hpp @@ -15,13 +15,11 @@ using json = nlohmann::json; inline static json oaicompat_completion_params_parse( + const struct llama_model * model, const json &body, /* openai api json semantics */ const std::string &chat_template) { json llama_params; - std::string formatted_prompt = chat_template == "chatml" - ? format_chatml(body["messages"]) // OpenAI 'messages' to chatml (with <|im_start|>,...) - : format_llama2(body["messages"]); // OpenAI 'messages' to llama2 (with [INST],...) llama_params["__oaicompat"] = true; @@ -34,7 +32,7 @@ inline static json oaicompat_completion_params_parse( // https://platform.openai.com/docs/api-reference/chat/create llama_sampling_params default_sparams; llama_params["model"] = json_value(body, "model", std::string("unknown")); - llama_params["prompt"] = formatted_prompt; + llama_params["prompt"] = format_chat(model, chat_template, body["messages"]); llama_params["cache_prompt"] = json_value(body, "cache_prompt", false); llama_params["temperature"] = json_value(body, "temperature", 0.0); llama_params["top_k"] = json_value(body, "top_k", default_sparams.top_k); diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 23482ed95..c7821eca6 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -37,7 +37,7 @@ struct server_params std::string hostname = "127.0.0.1"; std::vector api_keys; std::string public_path = "examples/server/public"; - std::string chat_template = "chatml"; + std::string chat_template = ""; int32_t port = 8080; int32_t read_timeout = 600; int32_t write_timeout = 600; @@ -1937,8 +1937,9 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" types: int, float, bool. example: --override-kv tokenizer.ggml.add_bos_token=bool:false\n"); printf(" -gan N, --grp-attn-n N set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w`"); printf(" -gaw N, --grp-attn-w N set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n`"); - printf(" --chat-template FORMAT_NAME"); - printf(" set chat template, possible value is: llama2, chatml (default %s)", sparams.chat_template.c_str()); + printf(" --chat-template JINJA_TEMPLATE\n"); + printf(" set custom jinja chat template (default: template taken from model's metadata)\n"); + printf(" Note: only commonly used templates are accepted, since we don't have jinja parser\n"); printf("\n"); } @@ -2389,13 +2390,13 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, invalid_param = true; break; } - std::string value(argv[i]); - if (value != "chatml" && value != "llama2") { - fprintf(stderr, "error: chat template can be \"llama2\" or \"chatml\", but got: %s\n", value.c_str()); + if (!verify_custom_template(argv[i])) { + fprintf(stderr, "error: the supplied chat template is not supported: %s\n", argv[i]); + fprintf(stderr, "note: llama.cpp does not use jinja parser, we only support commonly used templates\n"); invalid_param = true; break; } - sparams.chat_template = value; + sparams.chat_template = argv[i]; } else if (arg == "--override-kv") { @@ -2913,7 +2914,7 @@ int main(int argc, char **argv) if (!validate_api_key(req, res)) { return; } - json data = oaicompat_completion_params_parse(json::parse(req.body), sparams.chat_template); + json data = oaicompat_completion_params_parse(llama.model, json::parse(req.body), sparams.chat_template); const int task_id = llama.queue_tasks.get_new_id(); llama.queue_results.add_waiting_task_id(task_id); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 0ee670dba..e954fb0ef 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -167,50 +167,47 @@ static T json_value(const json &body, const std::string &key, const T &default_v : default_value; } -inline std::string format_llama2(std::vector messages) -{ - std::ostringstream output; - bool is_inside_turn = false; - - for (auto it = messages.begin(); it != messages.end(); ++it) { - if (!is_inside_turn) { - output << "[INST] "; - } - std::string role = json_value(*it, "role", std::string("user")); - std::string content = json_value(*it, "content", std::string("")); - if (role == "system") { - output << "<>\n" << content << "\n<>\n\n"; - is_inside_turn = true; - } else if (role == "user") { - output << content << " [/INST]"; - is_inside_turn = true; - } else { - output << " " << content << " "; - is_inside_turn = false; - } - } - - LOG_VERBOSE("format_llama2", {{"text", output.str()}}); - - return output.str(); +// Check if the template supplied via "--chat-template" is supported or not. Returns true if it's valid +inline bool verify_custom_template(const std::string & tmpl) { + llama_chat_message chat[] = {{"user", "test"}}; + std::vector buf(1); + int res = llama_chat_apply_template(nullptr, tmpl.c_str(), chat, 1, true, buf.data(), buf.size()); + return res >= 0; } -inline std::string format_chatml(std::vector messages) +// Format given chat. If tmpl is empty, we take the template from model metadata +inline std::string format_chat(const struct llama_model * model, const std::string & tmpl, const std::vector & messages) { - std::ostringstream chatml_msgs; + size_t alloc_size = 0; + // vector holding all allocated string to be passed to llama_chat_apply_template + std::vector str(messages.size() * 2); + std::vector chat(messages.size()); - for (auto it = messages.begin(); it != messages.end(); ++it) { - chatml_msgs << "<|im_start|>" - << json_value(*it, "role", std::string("user")) << '\n'; - chatml_msgs << json_value(*it, "content", std::string("")) - << "<|im_end|>\n"; + for (size_t i = 0; i < messages.size(); ++i) { + auto &curr_msg = messages[i]; + str[i*2 + 0] = json_value(curr_msg, "role", std::string("")); + str[i*2 + 1] = json_value(curr_msg, "content", std::string("")); + alloc_size += str[i*2 + 1].length(); + chat[i].role = str[i*2 + 0].c_str(); + chat[i].content = str[i*2 + 1].c_str(); } - chatml_msgs << "<|im_start|>assistant" << '\n'; + const char * ptr_tmpl = tmpl.empty() ? nullptr : tmpl.c_str(); + std::vector buf(alloc_size * 2); - LOG_VERBOSE("format_chatml", {{"text", chatml_msgs.str()}}); + // run the first time to get the total output length + int32_t res = llama_chat_apply_template(model, ptr_tmpl, chat.data(), chat.size(), true, buf.data(), buf.size()); - return chatml_msgs.str(); + // if it turns out that our buffer is too small, we resize it + if ((size_t) res > buf.size()) { + buf.resize(res); + res = llama_chat_apply_template(model, ptr_tmpl, chat.data(), chat.size(), true, buf.data(), buf.size()); + } + + std::string formatted_chat(buf.data(), res); + LOG_VERBOSE("formatted_chat", {{"text", formatted_chat.c_str()}}); + + return formatted_chat; } // diff --git a/llama.cpp b/llama.cpp index 5de07dfa9..4296eca32 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12602,7 +12602,7 @@ LLAMA_API int32_t llama_chat_apply_template( // load template from model std::vector model_template(2048, 0); // longest known template is about 1200 bytes std::string template_key = "tokenizer.chat_template"; - int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), curr_tmpl.size()); + int32_t res = llama_model_meta_val_str(model, template_key.c_str(), model_template.data(), model_template.size()); if (res < 0) { // worst case: there is no information about template, we will use chatml by default curr_tmpl = "<|im_start|>"; // see llama_chat_apply_template_internal From 4ed8e4fbef6a15afd993bfcd9ffa279841e18ef1 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Tue, 20 Feb 2024 18:30:27 +0100 Subject: [PATCH 818/859] llava : add explicit instructions for llava-1.6 (#5611) This commit contains a suggestion for the README.md in the llava example. The suggestion adds explicit instructions for how to convert a llava-1.6 model and run it using llava-cli. The motivation for this is that having explicit instructions similar to the 1.5 instructions will make it easier for users to try this out. Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 38 ++++++++++++++++++++++++++++++++------ 1 file changed, 32 insertions(+), 6 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index e42db6e5a..25ea96715 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -59,14 +59,40 @@ python ./convert.py ../llava-v1.5-7b --skip-unknown Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` directory. ## LLaVA 1.6 gguf conversion - -1) Backup your pth/safetensor model files as llava-surgery modifies them -2) Use `python llava-surgery-v2.py -C -m /path/to/hf-model` which also supports llava-1.5 variants pytorch as well as safetensor models: +1) First clone a LLaVA 1.6 model: +```console +git clone https://huggingface.co/liuhaotian/llava-v1.6-vicuna-7b +``` +2) Backup your pth/safetensor model files as llava-surgery modifies them +3) Use `llava-surgery-v2.py` which also supports llava-1.5 variants pytorch as well as safetensor models: +```console +python examples/llava/llava-surgery-v2.py -C -m ../llava-v1.6-vicuna-7b/ +``` - you will find a llava.projector and a llava.clip file in your model directory -3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory (https://huggingface.co/cmp-nct/llava-1.6-gguf/blob/main/config_vit.json) and rename it to config.json. -4) Create the visual gguf model: `python ./examples/llava/convert-image-encoder-to-gguf.py -m ../path/to/vit --llava-projector ../path/to/llava.projector --output-dir ../path/to/output --clip-model-is-vision` +4) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory: +```console +mkdir vit +cp ../llava-v1.6-vicuna-7b/llava.clip vit/pytorch_model.bin +cp ../llava-v1.6-vicuna-7b/llava.projector vit/ +curl -s -q https://huggingface.co/cmp-nct/llava-1.6-gguf/raw/main/config_vit.json -o vit/config.json +``` + +5) Create the visual gguf model: +```console +python ./examples/llava/convert-image-encoder-to-gguf.py -m vit --llava-projector vit/llava.projector --output-dir vit --clip-model-is-vision +``` - This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP -5) Everything else as usual: convert.py the hf model, quantize as needed + +6) Then convert the model to gguf format: +```console +python ./convert.py ../llava-v1.6-vicuna-7b/ +``` + +7) And finally we can run the llava-cli using the 1.6 model version: +```console +./llava-cli -m ../llava-v1.6-vicuna-7b/ggml-model-f16.gguf --mmproj vit/mmproj-model-f16.gguf --image some-image.jpg -c 4096 +``` + **note** llava-1.6 needs more context than llava-1.5, at least 3000 is needed (just run it at -c 4096) **note** llava-1.6 greatly benefits from batched prompt processing (defaults work) From 06bf2cf8c406e6b70dbf9b431a02fa0ad845b9df Mon Sep 17 00:00:00 2001 From: slaren Date: Tue, 20 Feb 2024 20:06:17 +0100 Subject: [PATCH 819/859] make : fix debug build with CUDA (#5616) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 211a08d7f..41c79c135 100644 --- a/Makefile +++ b/Makefile @@ -173,7 +173,7 @@ ifdef LLAMA_DEBUG MK_LDFLAGS += -g ifeq ($(UNAME_S),Linux) - MK_CXXFLAGS += -Wp,-D_GLIBCXX_ASSERTIONS + MK_CPPFLAGS += -D_GLIBCXX_ASSERTIONS endif else MK_CPPFLAGS += -DNDEBUG From 6560bed3f066c876682464762cad90f1e28e3f1b Mon Sep 17 00:00:00 2001 From: CJ Pais Date: Tue, 20 Feb 2024 11:07:22 -0800 Subject: [PATCH 820/859] server : support llava 1.6 (#5553) * server: init working 1.6 * move clip_image to header * remove commented code * remove c++ style from header * remove todo * expose llava_image_embed_make_with_clip_img * fix zig build --- Makefile | 2 +- build.zig | 3 ++- examples/llava/llava.cpp | 2 +- examples/llava/llava.h | 2 ++ examples/server/server.cpp | 36 +++--------------------------------- 5 files changed, 9 insertions(+), 36 deletions(-) diff --git a/Makefile b/Makefile index 41c79c135..f03faf6ed 100644 --- a/Makefile +++ b/Makefile @@ -719,7 +719,7 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h examples/llava/llava.h examples/llava/llava.cpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h %.hpp $< examples/llava/clip.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) -o $@ $(LDFLAGS) $(LWINSOCK2) diff --git a/build.zig b/build.zig index 699738f3d..c0af454dc 100644 --- a/build.zig +++ b/build.zig @@ -123,6 +123,7 @@ pub fn build(b: *std.build.Builder) !void { const grammar_parser = make.obj("grammar-parser", "common/grammar-parser.cpp"); const train = make.obj("train", "common/train.cpp"); const clip = make.obj("clip", "examples/llava/clip.cpp"); + const llava = make.obj("llava", "examples/llava/llava.cpp"); _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, console, grammar_parser }); _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo }); @@ -131,7 +132,7 @@ pub fn build(b: *std.build.Builder) !void { _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, train }); _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, train }); - const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, grammar_parser, clip }); + const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, ggml_quants, llama, common, buildinfo, sampling, grammar_parser, clip, llava }); if (server.target.isWindows()) { server.linkSystemLibrary("ws2_32"); } diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 4cb65a07b..1a1cf7c78 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -311,7 +311,7 @@ bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * return true; } -static bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { +bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out) { float * image_embd = (float *)malloc(clip_embd_nbytes(ctx_clip)*6); // TODO: base on gridsize/llava model if (!image_embd) { fprintf(stderr, "Unable to allocate memory for image embeddings\n"); diff --git a/examples/llava/llava.h b/examples/llava/llava.h index 9e9466a5d..2d40f3f1d 100644 --- a/examples/llava/llava.h +++ b/examples/llava/llava.h @@ -31,6 +31,8 @@ struct llava_image_embed { /** sanity check for clip <-> llava embed size match */ LLAVA_API bool llava_validate_embed_size(const llama_context * ctx_llama, const clip_ctx * ctx_clip); +LLAVA_API bool llava_image_embed_make_with_clip_img(clip_ctx * ctx_clip, int n_threads, const clip_image_u8 * img, float ** image_embd_out, int * n_img_pos_out); + /** build an image embed from image file bytes */ LLAVA_API struct llava_image_embed * llava_image_embed_make_with_bytes(struct clip_ctx * ctx_clip, int n_threads, const unsigned char * image_bytes, int image_bytes_length); /** build an image embed from a path to an image filename */ diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c7821eca6..eb01729fa 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -5,6 +5,7 @@ #include "oai.hpp" #include "../llava/clip.h" +#include "../llava/llava.h" #include "stb_image.h" @@ -997,43 +998,12 @@ struct llama_server_context { continue; } - clip_image_f32_batch img_res_v; - img_res_v.size = 0; - img_res_v.data = nullptr; - if (!clip_image_preprocess(clp_ctx, img.img_data, img_res_v)) - { - LOG_TEE("Error processing the given image"); - clip_free(clp_ctx); - clip_image_f32_batch_free(img_res_v); - return false; - } - if (img_res_v.size == 0) - { + + if (!llava_image_embed_make_with_clip_img(clp_ctx, params.n_threads, img.img_data, &img.image_embedding, &img.image_tokens)) { LOG_TEE("Error processing the given image"); return false; } - // note: assumes only one image was returned by clip_image_preprocess - clip_image_f32 * img_res = img_res_v.data; - - img.image_tokens = clip_n_patches(clp_ctx); - img.image_embedding = (float *)malloc(clip_embd_nbytes(clp_ctx)); - if (!img.image_embedding) - { - LOG_TEE("Unable to allocate memory for image embeddings\n"); - clip_image_f32_batch_free(img_res_v); - clip_free(clp_ctx); - return false; - } - LOG_TEE("slot %i - encoding image [id: %i]\n", slot.id, img.id); - if (!clip_image_encode(clp_ctx, params.n_threads, img_res, img.image_embedding)) - { - LOG_TEE("Unable to encode image\n"); - clip_image_f32_batch_free(img_res_v); - return false; - } - - clip_image_f32_batch_free(img_res_v); img.request_encode_image = false; } From a14679cc30c785e75d38028bae6ec39c6209ddef Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Wed, 21 Feb 2024 11:39:52 +0200 Subject: [PATCH 821/859] IQ4_NL: 4-bit non-linear quants with blocks of 32 (#5590) * iq4_nl: squash commits for easier rebase * Basics (quantize, dequantize) * CUDA dequantize and dot product * Slightly faster CUDA dot product (120 t/s) * Switch to 6-bit scales * Scalar dot product * AVX2 dot product * ARM_NEON dot product * Works on metal, but still slow * Slightly better Metal dot product * Another small Metal improvement * Metal dot product is getting there * Faster CUDA dot product * Add 1/8 ffn_down layers as Q5_K when no imatrix has been provided * Report the actual bpw * Add _xs mix that is 4.05 bpw for non-MoE models * Remove IQ4_XS for now, slightly adjust kvalues_iq4nl * AVX2 dot product uses Q8_0 instead of Q8_K * Add to test-backend-ops * Minor fix * Also use use Q5_K for attn_output in MoE models * Fixes after merging latest master * Switching to blocks of 32 * AVX2 for blocks of 32 * Scaler dot product for blocks of 32 * ARM_NEON dot product for blocks of 32 * Metal kernels for blocks of 32 * Slightly faster Metal kernels * iq4_nl: Fix after merging with master * iq4_nl: another fix after merging with master * Use IQ4_NL instead of Q4_K when using k-quants is not possible * Fix typo that makes several tests fail * It was the ggml_vdotq thing missed inside the brackets --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 1 + ggml-cuda.cu | 98 +++++++++++++- ggml-metal.m | 35 +++++ ggml-metal.metal | 215 +++++++++++++++++++++++++++++- ggml-quants.c | 234 ++++++++++++++++++++++++++++++++- ggml-quants.h | 13 ++ ggml.c | 30 +++++ ggml.h | 2 + llama.cpp | 17 ++- llama.h | 1 + tests/test-backend-ops.cpp | 1 + 11 files changed, 640 insertions(+), 7 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index ea7ba50c9..37520857f 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -32,6 +32,7 @@ static const std::vector QUANT_OPTIONS = { { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, { "Q3_K_M", LLAMA_FTYPE_MOSTLY_Q3_K_M, " 3.07G, +0.2496 ppl @ LLaMA-v1-7B", }, { "Q3_K_L", LLAMA_FTYPE_MOSTLY_Q3_K_L, " 3.35G, +0.1764 ppl @ LLaMA-v1-7B", }, + { "IQ4_NL", LLAMA_FTYPE_MOSTLY_IQ4_NL, " 4.25 bpw non-linear quantization", }, { "Q4_K", LLAMA_FTYPE_MOSTLY_Q4_K_M, "alias for Q4_K_M", }, { "Q4_K_S", LLAMA_FTYPE_MOSTLY_Q4_K_S, " 3.59G, +0.0992 ppl @ LLaMA-v1-7B", }, { "Q4_K_M", LLAMA_FTYPE_MOSTLY_Q4_K_M, " 3.80G, +0.0532 ppl @ LLaMA-v1-7B", }, diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 6caae56b0..e7c211d7d 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -528,6 +528,15 @@ typedef struct { } block_iq1_s; static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); +#define QK4_NL 32 +#define QR4_NL 2 +#define QI4_NL (QK4_NL / (4*QR4_NL)) +typedef struct { + half d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; +static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); + #define WARP_SIZE 32 #define MATRIX_ROW_PADDING 512 // last row of quant. matrices is a multiple of this to avoid out-of-bounds memory accesses @@ -1987,6 +1996,26 @@ static __global__ void dequantize_block_iq1_s(const void * __restrict__ vx, dst_ } +static const __device__ int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; + +template +static __global__ void dequantize_block_iq4_nl(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq4_nl * x = (const block_iq4_nl *) vx + i*(QK_K/QK4_NL); + + const int tid = threadIdx.x; + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 4*il; + const uint8_t * q4 = x[ib].qs + 4*il; + const float d = (float)x[ib].d; + for (int j = 0; j < 4; ++j) { + y[j+ 0] = d * kvalues_iq4nl[q4[j] & 0xf]; + y[j+16] = d * kvalues_iq4nl[q4[j] >> 4]; + } + +} static __global__ void dequantize_mul_mat_vec_q2_k(const void * __restrict__ vx, const float * __restrict__ yy, float * __restrict__ dst, const int ncols, int nrows) { @@ -4732,6 +4761,56 @@ static __device__ __forceinline__ float vec_dot_iq1_s_q8_1( #endif } +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +static __device__ __forceinline__ void get_int_from_table_16(const uint32_t & q4, const uint8_t * values, + int & val1, int & val2) { + + uint32_t aux32; const uint8_t * q8 = (const uint8_t *)&aux32; + aux32 = q4 & 0x0f0f0f0f; + uint16_t v1 = values[q8[0]] | (values[q8[1]] << 8); + uint16_t v2 = values[q8[2]] | (values[q8[3]] << 8); + val1 = v1 | (v2 << 16); + aux32 = (q4 >> 4) & 0x0f0f0f0f; + v1 = values[q8[0]] | (values[q8[1]] << 8); + v2 = values[q8[2]] | (values[q8[3]] << 8); + val2 = v1 | (v2 << 16); +} +#endif + +static __device__ __forceinline__ float vec_dot_iq4_nl_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { + + const block_iq4_nl * bq = (const block_iq4_nl *) vbq; + +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics + const uint16_t * q4 = (const uint16_t *)bq->qs + 2*iqs; + const int32_t * q8 = (const int32_t *)bq8_1->qs + iqs; + + const uint8_t * values = (const uint8_t *)kvalues_iq4nl; + + int v1, v2; + int sumi1 = 0, sumi2 = 0; + for (int l = 0; l < VDR_Q4_0_Q8_1_MMVQ; ++l) { + const uint32_t aux = q4[2*l] | (q4[2*l+1] << 16); + get_int_from_table_16(aux, values, v1, v2); + sumi1 = __dp4a(v1, q8[l+0], sumi1); + sumi2 = __dp4a(v2, q8[l+4], sumi2); + } + +#else + const uint8_t * q4 = bq->qs + 4*iqs; + const int8_t * q8 = bq8_1->qs + 4*iqs; + + int sumi1 = 0, sumi2 = 0; + for (int l = 0; l < 4*VDR_Q4_0_Q8_1_MMVQ; ++l) { + sumi1 += q8[l+ 0] * kvalues_iq4nl[q4[l] & 0xf]; + sumi2 += q8[l+16] * kvalues_iq4nl[q4[l] >> 4]; + } +#endif + const float d = (float)bq->d * __low2float(bq8_1->ds); + return d * (sumi1 + sumi2); +} + template static __device__ __forceinline__ void mul_mat_q( @@ -6777,6 +6856,12 @@ static void dequantize_row_iq1_s_cuda(const void * vx, dst_t * y, const int k, c dequantize_block_iq1_s<<>>(vx, y); } +template +static void dequantize_row_iq4_nl_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = (k + QK_K - 1) / QK_K; + dequantize_block_iq4_nl<<>>(vx, y); +} + template static void convert_unary_cuda(const void * __restrict__ vx, dst_t * __restrict__ y, const int k, cudaStream_t stream) { const int num_blocks = (k + CUDA_DEQUANTIZE_BLOCK_SIZE - 1) / CUDA_DEQUANTIZE_BLOCK_SIZE; @@ -6818,6 +6903,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq3_xxs_cuda; case GGML_TYPE_IQ1_S: return dequantize_row_iq1_s_cuda; + case GGML_TYPE_IQ4_NL: + return dequantize_row_iq4_nl_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6855,6 +6942,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq3_xxs_cuda; case GGML_TYPE_IQ1_S: return dequantize_row_iq1_s_cuda; + case GGML_TYPE_IQ4_NL: + return dequantize_row_iq4_nl_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -8599,6 +8688,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -8623,6 +8713,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -8724,6 +8815,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ4_NL: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; default: GGML_ASSERT(false); break; @@ -11446,7 +11541,8 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons return false; } ggml_type a_type = a->type; - if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || a_type == GGML_TYPE_IQ1_S) { + if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || + a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index 956e323a0..0d4aa4309 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -62,6 +62,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, GGML_METAL_KERNEL_TYPE_RMS_NORM, GGML_METAL_KERNEL_TYPE_GROUP_NORM, @@ -85,6 +86,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, //GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, @@ -104,6 +106,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, @@ -120,6 +123,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, @@ -136,6 +140,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, GGML_METAL_KERNEL_TYPE_ROPE_F16, GGML_METAL_KERNEL_TYPE_ALIBI_F32, @@ -448,6 +453,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_RMS_NORM, rms_norm, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GROUP_NORM, group_norm, ctx->support_simdgroup_reduction); @@ -471,6 +477,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); //GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F16, mul_mv_id_f16_f16, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F16_F32, mul_mv_id_f16_f32, ctx->support_simdgroup_reduction); @@ -490,6 +497,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F16_F32, mul_mm_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_Q4_0_F32, mul_mm_q4_0_f32, ctx->support_simdgroup_mm); @@ -506,6 +514,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F16_F32, mul_mm_id_f16_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_Q4_0_F32, mul_mm_id_q4_0_f32, ctx->support_simdgroup_mm); @@ -522,6 +531,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F16, rope_f16, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ALIBI_F32, alibi_f32, true); @@ -1338,6 +1348,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); } @@ -1478,6 +1489,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32].pipeline; } break; + case GGML_TYPE_IQ4_NL: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src0t); @@ -1525,6 +1542,11 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src0t == GGML_TYPE_IQ4_NL) { + const int mem_size = 32*sizeof(float); + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src0t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 3)/4, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1619,6 +1641,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); } @@ -1762,6 +1785,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32].pipeline; } break; + case GGML_TYPE_IQ4_NL: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32].pipeline; + } break; default: { GGML_METAL_LOG_ERROR("Asserting on type %d\n", (int)src2t); @@ -1825,6 +1854,11 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } + else if (src2t == GGML_TYPE_IQ4_NL) { + const int mem_size = 32*sizeof(float); + [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; + [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; + } else if (src2t == GGML_TYPE_Q4_K) { [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 3)/4, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1867,6 +1901,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; + case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; default: GGML_ASSERT(false && "not implemented"); } diff --git a/ggml-metal.metal b/ggml-metal.metal index f0d77d446..c223a981c 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2531,6 +2531,12 @@ typedef struct { uint8_t scales[QK_K/16]; } block_iq1_s; +// Non-linear quants +#define QK4_NL 32 +typedef struct { + half d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; //====================================== dot products ========================= @@ -4384,7 +4390,6 @@ void kernel_mul_mv_iq1_s_f32_impl( const uint i13 = im/ne12; const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); - device const block_iq1_s * x = (device const block_iq1_s *) src0 + ib_row + offset0; device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; @@ -4447,6 +4452,103 @@ void kernel_mul_mv_iq1_s_f32_impl( } } +constexpr constant static float kvalues_iq4nl_f[16] = { + -127.f, -104.f, -83.f, -65.f, -49.f, -35.f, -22.f, -10.f, 1.f, 13.f, 25.f, 38.f, 53.f, 69.f, 89.f, 113.f +}; + +void kernel_mul_mv_iq4_nl_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK4_NL; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + const int first_row = (r0 * 2 + sgitg) * 2; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + device const block_iq4_nl * x = (device const block_iq4_nl *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + const int ix = tiisg/2; // 0...15 + const int it = tiisg%2; // 0 or 1 + + shared_values[tiisg] = kvalues_iq4nl_f[tiisg%16]; + threadgroup_barrier(mem_flags::mem_threadgroup); + + float4 yl[4]; + float sumf[2]={0.f}, all_sum; + + device const float * yb = y + ix * QK4_NL + it * 8; + + uint32_t aux32[2]; + thread const uint8_t * q8 = (thread const uint8_t *)aux32; + + float4 qf1, qf2; + + for (int ib = ix; ib < nb; ib += 16) { + + device const float4 * y4 = (device const float4 *)yb; + yl[0] = y4[0]; yl[1] = y4[4]; yl[2] = y4[1]; yl[3] = y4[5]; + + for (int row = 0; row < 2; ++row) { + + device const block_iq4_nl & xb = x[row*nb + ib]; + device const uint16_t * q4 = (device const uint16_t *)(xb.qs + 8*it); + + float4 acc1 = {0.f}, acc2 = {0.f}; + + aux32[0] = q4[0] | (q4[1] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[0] * qf1; + acc2 += yl[1] * qf2; + + aux32[0] = q4[2] | (q4[3] << 16); + aux32[1] = (aux32[0] >> 4) & 0x0f0f0f0f; + aux32[0] &= 0x0f0f0f0f; + qf1 = {shared_values[q8[0]], shared_values[q8[1]], shared_values[q8[2]], shared_values[q8[3]]}; + qf2 = {shared_values[q8[4]], shared_values[q8[5]], shared_values[q8[6]], shared_values[q8[7]]}; + acc1 += yl[2] * qf1; + acc2 += yl[3] * qf2; + + acc1 += acc2; + + sumf[row] += (float)xb.d * (acc1[0] + acc1[1] + acc1[2] + acc1[3]); + + } + + yb += 16 * QK4_NL; + } + + for (int row = 0; row < 2; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum; + } + } +} + [[host_name("kernel_mul_mv_iq1_s_f32")]] kernel void kernel_mul_mv_iq1_s_f32( device const void * src0, @@ -4475,6 +4577,34 @@ kernel void kernel_mul_mv_iq1_s_f32( kernel_mul_mv_iq1_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, tgpig, tiisg, sgitg); } +[[host_name("kernel_mul_mv_iq4_nl_f32")]] +kernel void kernel_mul_mv_iq4_nl_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq4_nl_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} //============================= templates and their specializations ============================= @@ -4838,6 +4968,21 @@ void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & } } +template +void dequantize_iq4_nl(device const block_iq4_nl * xb, short il, thread type4x4 & reg) { + device const uint16_t * q4 = (device const uint16_t *)xb->qs; + const float d = xb->d; + uint32_t aux32; + thread const uint8_t * q8 = (thread const uint8_t *)&aux32; + for (int i = 0; i < 4; ++i) { + aux32 = ((q4[2*i] | (q4[2*i+1] << 16)) >> 4*il) & 0x0f0f0f0f; + reg[i][0] = d * kvalues_iq4nl_f[q8[0]]; + reg[i][1] = d * kvalues_iq4nl_f[q8[1]]; + reg[i][2] = d * kvalues_iq4nl_f[q8[2]]; + reg[i][3] = d * kvalues_iq4nl_f[q8[3]]; + } +} + template kernel void kernel_get_rows( device const void * src0, @@ -5381,6 +5526,7 @@ template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_r template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; // // matrix-matrix multiplication @@ -5421,6 +5567,7 @@ template [[host_name("kernel_mul_mm_iq2_xxs_f32")]] kernel mat_mm_t kernel_mul_m template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; // // indirect matrix-matrix multiplication @@ -5473,6 +5620,7 @@ template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; // // matrix-vector multiplication @@ -6503,3 +6651,68 @@ kernel void kernel_mul_mv_id_iq1_s_f32( tiisg, sgitg); } + +[[host_name("kernel_mul_mv_id_iq4_nl_f32")]] +kernel void kernel_mul_mv_id_iq4_nl_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup float * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq4_nl_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} diff --git a/ggml-quants.c b/ggml-quants.c index 3319d2ccf..6336538f0 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3754,6 +3754,26 @@ void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, in } } +static const int8_t kvalues_iq4nl[16] = {-127, -104, -83, -65, -49, -35, -22, -10, 1, 13, 25, 38, 53, 69, 89, 113}; + +void dequantize_row_iq4_nl(const block_iq4_nl * restrict x, float * restrict y, int k) { + assert(k % QK4_NL == 0); + const int nb = k / QK4_NL; + + for (int i = 0; i < nb; i++) { + + const uint8_t * qs = x[i].qs; + + const float d = GGML_FP16_TO_FP32(x[i].d); + for (int j = 0; j < QK4_NL/2; ++j) { + y[j+ 0] = d * kvalues_iq4nl[qs[j] & 0xf]; + y[j+QK4_NL/2] = d * kvalues_iq4nl[qs[j] >> 4]; + } + y += QK4_NL; + qs += QK4_NL/2; + } +} + //===================================== Q8_K ============================================== void quantize_row_q8_K_reference(const float * restrict x, block_q8_K * restrict y, int k) { @@ -9148,7 +9168,6 @@ void ggml_vec_dot_iq2_xs_q8_K(int n, float * restrict s, size_t bs, const void * #endif } -// TODO void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); assert(nrc == 1); @@ -9452,7 +9471,100 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const *s = sumf; #endif +} +void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + assert(n % QK4_NL == 0); + static_assert(QK4_NL == QK8_0, "QK4_NL and QK8_0 must be the same"); + + const block_iq4_nl * restrict x = vx; + const block_q8_0 * restrict y = vy; + + const int nb = n / QK4_NL; + +#if defined __ARM_NEON + const int8x16_t values = vld1q_s8(kvalues_iq4nl); + const uint8x16_t m4b = vdupq_n_u8(0x0f); + uint8x16x2_t q4bits; + int8x16x4_t q4b; + int8x16x4_t q8b; + int32x4_t prod_1, prod_2; + + float sumf = 0; + + for (int ib = 0; ib < nb; ib += 2) { + + q4bits.val[0] = vld1q_u8(x[ib+0].qs); + q4bits.val[1] = vld1q_u8(x[ib+1].qs); + q8b.val[0] = vld1q_s8(y[ib+0].qs); + q8b.val[1] = vld1q_s8(y[ib+0].qs + 16); + q8b.val[2] = vld1q_s8(y[ib+1].qs); + q8b.val[3] = vld1q_s8(y[ib+1].qs + 16); + + q4b.val[0] = vqtbl1q_s8(values, vandq_u8(q4bits.val[0], m4b)); + q4b.val[1] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = vqtbl1q_s8(values, vandq_u8(q4bits.val[1], m4b)); + q4b.val[3] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + + prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); + prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); + + sumf += (float)x[ib+0].d * (float)y[ib+0].d * vaddvq_s32(prod_1) + (float)x[ib+1].d * (float)y[ib+1].d * vaddvq_s32(prod_2); + + } + + *s = sumf; + +#elif defined __AVX2__ + + const __m128i values128 = _mm_loadu_si128((const __m128i*)kvalues_iq4nl); + const __m128i m4b = _mm_set1_epi8(0x0f); + const __m256i mone = _mm256_set1_epi16(1); + + __m256 accum1 = _mm256_setzero_ps(); + __m256 accum2 = _mm256_setzero_ps(); + for (int ib = 0; ib < nb; ib += 2) { + const __m128i q4bits_1 = _mm_loadu_si128((const __m128i*)x[0].qs); + const __m128i q4bits_2 = _mm_loadu_si128((const __m128i*)x[1].qs); + const __m256i q8b_1 = _mm256_loadu_si256((const __m256i *)y[0].qs); + const __m256i q8b_2 = _mm256_loadu_si256((const __m256i *)y[1].qs); + const __m256i q4b_1 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_1, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_1, m4b))); + const __m256i q4b_2 = _mm256_set_m128i(_mm_shuffle_epi8(values128, _mm_and_si128(_mm_srli_epi16(q4bits_2, 4), m4b)), + _mm_shuffle_epi8(values128, _mm_and_si128(q4bits_2, m4b))); + const __m256i p16_1 = mul_add_epi8(q4b_1, q8b_1); + const __m256i p16_2 = mul_add_epi8(q4b_2, q8b_2); + const __m256i p_1 = _mm256_madd_epi16(p16_1, mone); + const __m256i p_2 = _mm256_madd_epi16(p16_2, mone); + accum1 = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[0].d)*GGML_FP16_TO_FP32(x[0].d)), + _mm256_cvtepi32_ps(p_1), accum1); + accum2 = _mm256_fmadd_ps(_mm256_set1_ps(GGML_FP16_TO_FP32(y[1].d)*GGML_FP16_TO_FP32(x[1].d)), + _mm256_cvtepi32_ps(p_2), accum2); + + y += 2; + x += 2; + } + + *s = hsum_float_8(_mm256_add_ps(accum1, accum2)); + +#else + float sumf = 0; + for (int ib = 0; ib < nb; ++ib) { + const float d = GGML_FP16_TO_FP32(y[ib].d)*GGML_FP16_TO_FP32(x[ib].d); + int sumi1 = 0, sumi2 = 0; + for (int j = 0; j < QK4_NL/2; ++j) { + sumi1 += y[ib].qs[j+ 0] * kvalues_iq4nl[x[ib].qs[j] & 0xf]; + sumi2 += y[ib].qs[j+QK4_NL/2] * kvalues_iq4nl[x[ib].qs[j] >> 4]; + } + sumf += d * (sumi1 + sumi2); + } + *s = sumf; +#endif } // ================================ IQ2 quantization ============================================= @@ -10729,3 +10841,123 @@ size_t quantize_iq1_s(const float * src, void * dst, int nrow, int n_per_row, in } return nrow * nblock * sizeof(block_iq1_s); } + +// ============================ 4-bit non-linear quants + +static inline int best_index_int8(int n, const int8_t * val, float x) { + if (x <= val[0]) return 0; + if (x >= val[n-1]) return n-1; + int ml = 0, mu = n-1; + while (mu-ml > 1) { + int mav = (ml+mu)/2; + if (x < val[mav]) mu = mav; else ml = mav; + } + return x - val[mu-1] < val[mu] - x ? mu-1 : mu; +} + +static void quantize_row_iq4_nl_impl(const int block_size, const float * GGML_RESTRICT x, + ggml_fp16_t * dh, uint8_t * q4, + float * weight, uint8_t * L, + const int8_t * values, + const float * quant_weights) { + + const int ntry = 7; + + float sigma2 = 0; + for (int j = 0; j < QK4_NL; ++j) sigma2 += x[j]*x[j]; + sigma2 *= 2.f/QK4_NL; + + const int nb = QK4_NL/block_size; + + memset(q4, 0, QK4_NL/2); + for (int ib = 0; ib < nb; ++ib) { + dh[ib] = GGML_FP32_TO_FP16(0.f); + const float * xb = x + ib*block_size; + if (quant_weights) { + const float * qw = quant_weights + ib*block_size; + for (int j = 0; j < block_size; ++j) weight[j] = qw[j] * sqrtf(sigma2 + xb[j]*xb[j]); + } else { + for (int j = 0; j < block_size; ++j) weight[j] = xb[j]*xb[j]; + } + float amax = 0, max = 0; + for (int j = 0; j < block_size; ++j) { + float ax = fabsf(xb[j]); + if (ax > amax) { + amax = ax; max = xb[j]; + } + } + if (!amax) { + continue; + } + float d = -max/values[0]; + float id = 1/d; + float sumqx = 0, sumq2 = 0; + for (int j = 0; j < block_size; ++j) { + float al = id*xb[j]; + int l = best_index_int8(16, values, al); + float q = values[l]; + float w = weight[j]; + sumqx += w*q*xb[j]; + sumq2 += w*q*q; + } + float best_id = id; + d = sumqx/sumq2; + float best = d*sumqx; + for (int itry = -ntry; itry <= ntry; ++itry) { + id = (itry + values[0])/max; + sumqx = sumq2 = 0; + for (int j = 0; j < block_size; ++j) { + float al = id*xb[j]; + int l = best_index_int8(16, values, al); + float q = values[l]; + float w = weight[j]; + sumqx += w*q*xb[j]; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + d = sumqx/sumq2; best = d * sumqx; + best_id = id; + } + } + dh[ib] = GGML_FP32_TO_FP16(d); + for (int j = 0; j < block_size; ++j) { + L[ib*block_size + j] = best_index_int8(16, values, best_id*xb[j]); + } + } + for (int i = 0; i < QK4_NL/32; ++i) { + for (int j = 0; j < 16; ++j) { + q4[16*i + j] = L[32*i + j] | (L[32*i + 16 + j] << 4); + } + } +} + +size_t quantize_iq4_nl(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK4_NL == 0); + int nblock = n_per_row/QK4_NL; + char * qrow = (char *)dst; + uint8_t L[QK4_NL]; + float weight[32]; + for (int row = 0; row < nrow; ++row) { + block_iq4_nl * iq4 = (block_iq4_nl *)qrow; + for (int ibl = 0; ibl < nblock; ++ibl) { + const float * qw = quant_weights ? quant_weights + QK4_NL*ibl : NULL; + quantize_row_iq4_nl_impl(32, src + QK4_NL*ibl, &iq4[ibl].d, iq4[ibl].qs, weight, L, kvalues_iq4nl, qw); + } + src += n_per_row; + qrow += nblock*sizeof(block_iq4_nl); + } + return nrow * nblock * sizeof(block_iq4_nl); +} + +void quantize_row_iq4_nl(const float * restrict x, void * restrict vy, int k) { + assert(k % QK4_NL == 0); + block_iq4_nl * restrict y = vy; + quantize_row_iq4_nl_reference(x, y, k); +} + +void quantize_row_iq4_nl_reference(const float * restrict x, block_iq4_nl * restrict y, int k) { + assert(k % QK4_NL == 0); + quantize_iq4_nl(x, y, 1, k, NULL, NULL); +} + diff --git a/ggml-quants.h b/ggml-quants.h index ad381cfab..113623b62 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -198,6 +198,14 @@ typedef struct { } block_iq1_s; static_assert(sizeof(block_iq1_s) == sizeof(ggml_fp16_t) + QK_K/8 + QK_K/16, "wrong iq1_s block size/padding"); +// Non-linear quants +#define QK4_NL 32 +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK4_NL/2]; +} block_iq4_nl; +static_assert(sizeof(block_iq4_nl) == sizeof(ggml_fp16_t) + QK4_NL/2, "wrong iq4_nl block size/padding"); + #ifdef __cplusplus extern "C" { #endif @@ -217,6 +225,7 @@ void quantize_row_q5_K_reference(const float * GGML_RESTRICT x, block_q5_K * GGM void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGML_RESTRICT y, int k); void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); +void quantize_row_iq4_nl_reference (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int k); void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); @@ -232,6 +241,7 @@ void quantize_row_q5_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, in void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); @@ -251,6 +261,7 @@ void dequantize_row_iq2_xxs(const block_iq2_xxs * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -268,6 +279,7 @@ void ggml_vec_dot_iq2_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -276,6 +288,7 @@ size_t quantize_iq2_xxs(const float * src, void * dst, int nrows, int n_per_row, size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq4_nl (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index d129df505..91adbb0ae 100644 --- a/ggml.c +++ b/ggml.c @@ -690,6 +690,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ4_NL] = { + .type_name = "iq4_nl", + .blck_size = QK4_NL, + .type_size = sizeof(block_iq4_nl), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq4_nl, + .from_float = quantize_row_iq4_nl, + .from_float_reference = (ggml_from_float_t)quantize_row_iq4_nl_reference, + .vec_dot = ggml_vec_dot_iq4_nl_q8_0, + .vec_dot_type = GGML_TYPE_Q8_0, + .nrows = 1, + }, [GGML_TYPE_Q8_K] = { .type_name = "q8_K", .blck_size = QK_K, @@ -2291,6 +2303,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ2_XS: wtype = GGML_TYPE_IQ2_XS; break; case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; + case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7702,6 +7715,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_add_q_f32(params, src0, src1, dst); } break; @@ -7970,6 +7984,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_add1_q_f32(params, src0, src1, dst); } break; @@ -8091,6 +8106,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: default: { GGML_ASSERT(false); @@ -10858,6 +10874,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); } break; @@ -11039,6 +11056,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: default: { GGML_ASSERT(false); @@ -11237,6 +11255,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: { ggml_compute_forward_get_rows_q(params, src0, src1, dst); } break; @@ -11911,6 +11930,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -11989,6 +12009,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: + case GGML_TYPE_IQ4_NL: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19455,6 +19476,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq1_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ4_NL: + { + GGML_ASSERT(start % QK4_NL == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq4_nl(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_F16: { size_t elemsize = sizeof(ggml_fp16_t); diff --git a/ggml.h b/ggml.h index 004d09c70..bed7a36a0 100644 --- a/ggml.h +++ b/ggml.h @@ -355,6 +355,7 @@ extern "C" { GGML_TYPE_IQ2_XS = 17, GGML_TYPE_IQ3_XXS = 18, GGML_TYPE_IQ1_S = 19, + GGML_TYPE_IQ4_NL = 20, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -393,6 +394,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ2_XS = 16, // except 1d tensors GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 4296eca32..3748d5eac 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2527,6 +2527,7 @@ struct llama_model_loader { case GGML_TYPE_IQ2_XS: ftype = LLAMA_FTYPE_MOSTLY_IQ2_XS; break; case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; + case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2877,6 +2878,7 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_Q3_K_XS:return "Q3_K - Extra small"; case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ4_NL: return "IQ4_NL - 4.5 bpw"; default: return "unknown, may not work"; } @@ -10354,6 +10356,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL && qs.model.hparams.n_gqa() >= 4) { + new_type = GGML_TYPE_Q5_K; + } else if ((ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) && use_more_bits(qs.i_attention_wv, qs.n_attention_wv)) new_type = GGML_TYPE_Q6_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && qs.i_attention_wv < 4) new_type = GGML_TYPE_Q5_K; @@ -10406,6 +10411,9 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; } } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL && !qs.has_imatrix) { + if (i_layer < n_layer/8) new_type = GGML_TYPE_Q5_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M && use_more_bits(i_layer, n_layer)) new_type = GGML_TYPE_Q6_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S && arch != LLM_ARCH_FALCON && i_layer < n_layer/8) { new_type = GGML_TYPE_Q5_K; @@ -10422,7 +10430,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (arch != LLM_ARCH_FALCON) { if (qs.model.hparams.n_expert == 8) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || - ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || + ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { new_type = GGML_TYPE_Q5_K; } @@ -10489,8 +10497,8 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: - case GGML_TYPE_Q2_K: new_type = GGML_TYPE_Q4_0; break; - case GGML_TYPE_Q3_K: new_type = GGML_TYPE_Q4_1; break; + case GGML_TYPE_Q2_K: + case GGML_TYPE_Q3_K: new_type = GGML_TYPE_IQ4_NL; break; case GGML_TYPE_Q4_K: new_type = GGML_TYPE_Q5_0; break; case GGML_TYPE_Q5_K: new_type = GGML_TYPE_Q5_1; break; case GGML_TYPE_Q6_K: new_type = GGML_TYPE_Q8_0; break; @@ -10531,7 +10539,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ2_XXS: quantized_type = GGML_TYPE_IQ2_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ2_XS: quantized_type = GGML_TYPE_IQ2_XS; break; case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; - case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S ; break; + case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S; break; + case LLAMA_FTYPE_MOSTLY_IQ4_NL: quantized_type = GGML_TYPE_IQ4_NL; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index 77a84c18a..8ba20696f 100644 --- a/llama.h +++ b/llama.h @@ -101,6 +101,7 @@ extern "C" { LLAMA_FTYPE_MOSTLY_Q3_K_XS = 22, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ4_NL = 25, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index ef37c5af2..55db42bf6 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1918,6 +1918,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, + GGML_TYPE_IQ4_NL, }; // unary ops From 88c46cbdac05cebd936511b1d3c74112e721615f Mon Sep 17 00:00:00 2001 From: "Meng, Hengyu" Date: Wed, 21 Feb 2024 17:52:06 +0800 Subject: [PATCH 822/859] [SYCL] conext add name (#5624) * [SYCL] conext add name * name should start with SYCL* --- ggml-sycl.cpp | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index df1826112..b897828f9 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -14642,7 +14642,8 @@ GGML_CALL static const char * ggml_backend_sycl_buffer_type_name(ggml_backend_bu static ggml_backend_buffer_t ggml_backend_sycl_buffer_type_alloc_buffer(ggml_backend_buffer_type_t buft, size_t size) try { - int device = (int) (intptr_t) buft->context; + ggml_backend_sycl_buffer_type_context * buft_ctx = (ggml_backend_sycl_buffer_type_context *)buft->context; + int device = (int) buft_ctx->device; ggml_sycl_set_device(device); int device_index = get_device_index_by_id(device); @@ -14720,7 +14721,7 @@ ggml_backend_buffer_type_t ggml_backend_sycl_buffer_type(int device) { for (int i = 0; i < GGML_SYCL_MAX_DEVICES; i++) { ggml_backend_sycl_buffer_types[i] = { /* .iface = */ ggml_backend_sycl_buffer_type_interface, - /* .context = */ (ggml_backend_buffer_type_context_t) (intptr_t) i, + /* .context = */ new ggml_backend_sycl_buffer_type_context{i, GGML_SYCL_NAME + std::to_string(i)}, }; } ggml_backend_sycl_buffer_type_initialized = true; @@ -14782,10 +14783,6 @@ ggml_backend_buffer_type_t ggml_backend_sycl_host_buffer_type() { // backend -struct ggml_backend_context_sycl { - int device; -}; - static const char * ggml_backend_sycl_name(ggml_backend_t backend) { return GGML_SYCL_NAME; @@ -14793,14 +14790,14 @@ static const char * ggml_backend_sycl_name(ggml_backend_t backend) { } static void ggml_backend_sycl_free(ggml_backend_t backend) { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; delete sycl_ctx; delete backend; } static ggml_backend_buffer_type_t ggml_backend_sycl_get_default_buffer_type(ggml_backend_t backend) { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; return ggml_backend_sycl_buffer_type(sycl_ctx->device); } @@ -14809,7 +14806,7 @@ static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, ggml_tensor *tensor, const void *data, size_t offset, size_t size) try { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -14827,7 +14824,7 @@ static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, const ggml_tensor *tensor, void *data, size_t offset, size_t size) try { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); @@ -14842,7 +14839,7 @@ catch (sycl::exception const &exc) { } static void ggml_backend_sycl_synchronize(ggml_backend_t backend) try { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->wait())); @@ -14878,7 +14875,7 @@ static void ggml_backend_sycl_graph_plan_compute(ggml_backend_t backend, ggml_ba } static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph * cgraph) { - ggml_backend_context_sycl * sycl_ctx = (ggml_backend_context_sycl *)backend->context; + ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; ggml_sycl_set_main_device(sycl_ctx->device); @@ -15092,8 +15089,9 @@ ggml_backend_t ggml_backend_sycl_init(int device) { // not strictly necessary, but it may reduce the overhead of the first graph_compute ggml_sycl_set_main_device(device); - ggml_backend_context_sycl * ctx = new ggml_backend_context_sycl { - /* .device = */ device + ggml_backend_sycl_context * ctx = new ggml_backend_sycl_context { + /* .device = */ device, + /* .name = */ GGML_SYCL_NAME + std::to_string(device), }; ggml_backend_t sycl_backend = new ggml_backend { From 580111d42b3b6ad0a390bfb267d6e3077506eb31 Mon Sep 17 00:00:00 2001 From: postmasters Date: Wed, 21 Feb 2024 05:08:22 -0800 Subject: [PATCH 823/859] llama : add `gemma` model (#5631) There are couple things in this architecture: 1. Shared input and output embedding parameters. 2. Key length and value length are not derived from `n_embd`. More information about the models can be found at https://ai.google.dev/gemma. GGUFs can be downloaded from https://huggingface.co/google. --- README.md | 1 + gguf-py/gguf/constants.py | 15 ++++ llama.cpp | 170 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 186 insertions(+) diff --git a/README.md b/README.md index 747d2e98b..225db8e49 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,7 @@ Typically finetunes of the base models below are supported as well. - [x] [Orion 14B](https://github.com/ggerganov/llama.cpp/pull/5118) - [x] [InternLM2](https://huggingface.co/models?search=internlm2) - [x] [CodeShell](https://github.com/WisdomShell/codeshell) +- [x] [Gemma](https://ai.google.dev/gemma) **Multimodal models:** diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py index 114a9a974..8f9139d1b 100644 --- a/gguf-py/gguf/constants.py +++ b/gguf-py/gguf/constants.py @@ -111,6 +111,7 @@ class MODEL_ARCH(IntEnum): ORION = auto() INTERNLM2 = auto() MINICPM = auto() + GEMMA = auto() class MODEL_TENSOR(IntEnum): @@ -167,6 +168,7 @@ MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = { MODEL_ARCH.ORION: "orion", MODEL_ARCH.INTERNLM2: "internlm2", MODEL_ARCH.MINICPM: "minicpm", + MODEL_ARCH.GEMMA: "gemma", } TENSOR_NAMES: dict[MODEL_TENSOR, str] = { @@ -511,6 +513,19 @@ MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = { MODEL_TENSOR.FFN_DOWN_EXP, MODEL_TENSOR.FFN_UP_EXP, ], + MODEL_ARCH.GEMMA: [ + MODEL_TENSOR.TOKEN_EMBD, + MODEL_TENSOR.OUTPUT_NORM, + MODEL_TENSOR.ATTN_NORM, + MODEL_TENSOR.ATTN_Q, + MODEL_TENSOR.ATTN_K, + MODEL_TENSOR.ATTN_V, + MODEL_TENSOR.ATTN_OUT, + MODEL_TENSOR.FFN_GATE, + MODEL_TENSOR.FFN_DOWN, + MODEL_TENSOR.FFN_UP, + MODEL_TENSOR.FFN_NORM, + ], # TODO } diff --git a/llama.cpp b/llama.cpp index 3748d5eac..3a226c426 100644 --- a/llama.cpp +++ b/llama.cpp @@ -208,6 +208,7 @@ enum llm_arch { LLM_ARCH_ORION, LLM_ARCH_INTERNLM2, LLM_ARCH_MINICPM, + LLM_ARCH_GEMMA, LLM_ARCH_UNKNOWN, }; @@ -234,6 +235,7 @@ static std::map LLM_ARCH_NAMES = { { LLM_ARCH_ORION, "orion" }, { LLM_ARCH_INTERNLM2, "internlm2" }, { LLM_ARCH_MINICPM, "minicpm" }, + { LLM_ARCH_GEMMA, "gemma" }, }; enum llm_kv { @@ -760,6 +762,22 @@ static std::map> LLM_TENSOR_NAMES = { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" }, }, }, + { + LLM_ARCH_GEMMA, + { + { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, + { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, + { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, + { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" }, + { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" }, + { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" }, + { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" }, + { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, + { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" }, + { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" }, + { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" }, + }, + }, { LLM_ARCH_UNKNOWN, { @@ -3243,6 +3261,16 @@ static void llm_load_hparams( default: model.type = e_model::MODEL_UNKNOWN; } } break; + case LLM_ARCH_GEMMA: + { + ml.get_key(LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, hparams.f_norm_rms_eps); + + switch (hparams.n_layer) { + case 18: model.type = e_model::MODEL_2B; break; + case 28: model.type = e_model::MODEL_7B; break; + default: model.type = e_model::MODEL_UNKNOWN; + } + } break; default: (void)0; } @@ -4360,6 +4388,37 @@ static bool llm_load_tensors( layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); } } break; + case LLM_ARCH_GEMMA: + { + model.tok_embd = ml.create_tensor(ctx_input, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + + // output + model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + + const int64_t n_ff = hparams.n_ff; + const int64_t n_embd_head_k = hparams.n_embd_head_k; + const int64_t n_embd_k_gqa = hparams.n_embd_k_gqa(); + const int64_t n_embd_v_gqa = hparams.n_embd_v_gqa(); + + for (uint32_t i = 0; i < n_layer; ++i) { + ggml_context * ctx_layer = ctx_for_layer(i); + ggml_context * ctx_split = ctx_for_layer_split(i); + + auto & layer = model.layers[i]; + + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + + layer.wq = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_Q, "weight", i), {n_embd, n_embd_head_k * hparams.n_head}); + layer.wk = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_K, "weight", i), {n_embd, n_embd_k_gqa}); + layer.wv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_V, "weight", i), {n_embd, n_embd_v_gqa}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd_head_k * hparams.n_head, n_embd}); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_gate = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_GATE, "weight", i), {n_embd, n_ff}); + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); + } + } break; default: throw std::runtime_error("unknown architecture"); } @@ -7366,6 +7425,113 @@ struct llm_build_context { return gf; } + + struct ggml_cgraph * build_gemma() { + struct ggml_cgraph * gf = ggml_new_graph_custom(ctx0, LLAMA_MAX_NODES, false); + + const int64_t n_embd_head_k = hparams.n_embd_head_k; + + struct ggml_tensor * cur; + struct ggml_tensor * inpL; + + inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); + cb(inpL, "inp_embd", -1); + inpL = ggml_scale(ctx0, inpL, sqrtf(n_embd)); + cb(inpL, "inp_scaled", -1); + + // inp_pos - contains the positions + struct ggml_tensor * inp_pos = ggml_view_1d(ctx0, lctx.inp_pos, n_tokens, 0); + cb(inp_pos, "inp_pos", -1); + + // KQ_mask (mask for 1 head, it will be broadcasted to all heads) + struct ggml_tensor * KQ_mask = ggml_view_2d(ctx0, lctx.inp_KQ_mask, n_kv, n_tokens, n_kv*ggml_type_size(lctx.inp_KQ_mask->type), 0); + cb(KQ_mask, "KQ_mask", -1); + + // shift the entire K-cache if needed + if (do_rope_shift) { + llm_build_k_shift(ctx0, hparams, cparams, kv_self, gf, lctx.inp_K_shift, LLM_ROPE, n_ctx, freq_base, freq_scale, cb); + } + + for (int il = 0; il < n_layer; ++il) { + + // norm + cur = llm_build_norm(ctx0, inpL, hparams, + model.layers[il].attn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "attn_norm", il); + + // self-attention + { + // compute Q and K and RoPE them + struct ggml_tensor * Qcur = ggml_mul_mat(ctx0, model.layers[il].wq, cur); + cb(Qcur, "Qcur", il); + + struct ggml_tensor * Kcur = ggml_mul_mat(ctx0, model.layers[il].wk, cur); + cb(Kcur, "Kcur", il); + + struct ggml_tensor * Vcur = ggml_mul_mat(ctx0, model.layers[il].wv, cur); + cb(Vcur, "Vcur", il); + + Qcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Qcur, n_embd_head_k, n_head, n_tokens), inp_pos, + n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Qcur, "Qcur", il); + Qcur = ggml_scale(ctx0, Qcur, 1.0f / sqrtf(float(n_embd_head_k))); + cb(Qcur, "Qcur_scaled", il); + + Kcur = ggml_rope_custom( + ctx0, ggml_reshape_3d(ctx0, Kcur, n_embd_head_k, n_head_kv, n_tokens), inp_pos, + n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, + ext_factor, attn_factor, beta_fast, beta_slow); + cb(Kcur, "Kcur", il); + + cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, + model.layers[il].wo, NULL, + Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); + cb(cur, "kqv_out", il); + } + struct ggml_tensor * sa_out = ggml_add(ctx0, cur, inpL); + cb(sa_out, "sa_out", il); + + cur = llm_build_norm(ctx0, sa_out, hparams, + model.layers[il].ffn_norm, NULL, + LLM_NORM_RMS, cb, il); + cb(cur, "ffn_norm", il); + + // feed-forward network + { + cur = llm_build_ffn(ctx0, cur, + model.layers[il].ffn_up, NULL, + model.layers[il].ffn_gate, NULL, + model.layers[il].ffn_down, NULL, + NULL, + LLM_FFN_GELU, LLM_FFN_PAR, cb, il); + cb(cur, "ffn_out", il); + } + + cur = ggml_add(ctx0, cur, sa_out); + cb(cur, "l_out", il); + + // input for next layer + inpL = cur; + } + + cur = inpL; + + cur = llm_build_norm(ctx0, cur, hparams, + model.output_norm, NULL, + LLM_NORM_RMS, cb, -1); + cb(cur, "result_norm", -1); + + // lm_head + cur = ggml_mul_mat(ctx0, model.tok_embd, cur); + cb(cur, "result_output", -1); + + ggml_build_forward_expand(gf, cur); + + return gf; + } }; static struct ggml_cgraph * llama_build_graph( @@ -7474,6 +7640,10 @@ static struct ggml_cgraph * llama_build_graph( { result = llm.build_minicpm(); } break; + case LLM_ARCH_GEMMA: + { + result = llm.build_gemma(); + } break; default: GGML_ASSERT(false); } From cc6cac08e38e32bf40bbe07e9e8f8f0130b5fd94 Mon Sep 17 00:00:00 2001 From: Daniel Bevenius Date: Wed, 21 Feb 2024 14:36:57 +0100 Subject: [PATCH 824/859] llava : add --skip-unknown to 1.6 convert.py (#5632) This commit adds the `--skip-unknown` option to the convert.py script and removes the saving of the updated checkpoints to avoid updating possibly checked out files. The motivation for this change is that this was done for 1.5 in Commit fc0c8d286a533363a9a663510b62af85ffad58b3 ("llava : update surgery script to not remove tensors") and makes the examples more consistent. Signed-off-by: Daniel Bevenius --- examples/llava/README.md | 13 ++++++------- examples/llava/llava-surgery-v2.py | 12 ------------ 2 files changed, 6 insertions(+), 19 deletions(-) diff --git a/examples/llava/README.md b/examples/llava/README.md index 25ea96715..35e6d9e5d 100644 --- a/examples/llava/README.md +++ b/examples/llava/README.md @@ -63,13 +63,12 @@ Now both the LLaMA part and the image encoder is in the `llava-v1.5-7b` director ```console git clone https://huggingface.co/liuhaotian/llava-v1.6-vicuna-7b ``` -2) Backup your pth/safetensor model files as llava-surgery modifies them -3) Use `llava-surgery-v2.py` which also supports llava-1.5 variants pytorch as well as safetensor models: +2) Use `llava-surgery-v2.py` which also supports llava-1.5 variants pytorch as well as safetensor models: ```console python examples/llava/llava-surgery-v2.py -C -m ../llava-v1.6-vicuna-7b/ ``` - you will find a llava.projector and a llava.clip file in your model directory -4) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory: +3) Copy the llava.clip file into a subdirectory (like vit), rename it to pytorch_model.bin and add a fitting vit configuration to the directory: ```console mkdir vit cp ../llava-v1.6-vicuna-7b/llava.clip vit/pytorch_model.bin @@ -77,18 +76,18 @@ cp ../llava-v1.6-vicuna-7b/llava.projector vit/ curl -s -q https://huggingface.co/cmp-nct/llava-1.6-gguf/raw/main/config_vit.json -o vit/config.json ``` -5) Create the visual gguf model: +4) Create the visual gguf model: ```console python ./examples/llava/convert-image-encoder-to-gguf.py -m vit --llava-projector vit/llava.projector --output-dir vit --clip-model-is-vision ``` - This is similar to llava-1.5, the difference is that we tell the encoder that we are working with the pure vision model part of CLIP -6) Then convert the model to gguf format: +5) Then convert the model to gguf format: ```console -python ./convert.py ../llava-v1.6-vicuna-7b/ +python ./convert.py ../llava-v1.6-vicuna-7b/ --skip-unknown ``` -7) And finally we can run the llava-cli using the 1.6 model version: +6) And finally we can run the llava-cli using the 1.6 model version: ```console ./llava-cli -m ../llava-v1.6-vicuna-7b/ggml-model-f16.gguf --mmproj vit/mmproj-model-f16.gguf --image some-image.jpg -c 4096 ``` diff --git a/examples/llava/llava-surgery-v2.py b/examples/llava/llava-surgery-v2.py index 5bc5bc513..eb56d6988 100644 --- a/examples/llava/llava-surgery-v2.py +++ b/examples/llava/llava-surgery-v2.py @@ -65,9 +65,7 @@ def clean_vision_tower_from_checkpoint(checkpoint_path): for name in clip_tensors: del checkpoint[name] - # Save the updated checkpoint checkpoint_path = checkpoint_path - save_model(checkpoint, checkpoint_path, file_type) return True return False @@ -152,16 +150,6 @@ for name in first_mm_tensors: if len(projector) > 0: save_model(projector, f"{args.model}/llava.projector", 'pytorch') -for name in mm_tensors: - del last_checkpoint[name] -for name in first_mm_tensors: - del first_checkpoint[name] - -if len(mm_tensors) > 0: - save_model(last_checkpoint, projector_checkpoint_path, file_type) -if len(first_mm_tensors) > 0: - save_model(first_checkpoint, newline_checkpoint_path, file_type) - print("Done!") print(f"Now you can convert {args.model} to a a regular LLaMA GGUF file.") print(f"Also, use {args.model}/llava.projector to prepare a llava-encoder.gguf file.") From c14f72db9c62d71d35eb1c141745c0bd0cb27b49 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 21 Feb 2024 15:39:54 +0200 Subject: [PATCH 825/859] readme : update hot topics --- README.md | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 225db8e49..ce5dec7ca 100644 --- a/README.md +++ b/README.md @@ -10,13 +10,8 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Hot topics -- Remove LLAMA_MAX_DEVICES and LLAMA_SUPPORTS_GPU_OFFLOAD: https://github.com/ggerganov/llama.cpp/pull/5240 -- Incoming backends: https://github.com/ggerganov/llama.cpp/discussions/5138 - - [SYCL backend](README-sycl.md) is ready (1/28/2024), support Linux/Windows in Intel GPUs (iGPU, Arc/Flex/Max series) -- New SOTA quantized models, including pure 2-bits: https://huggingface.co/ikawrakow -- Collecting Apple Silicon performance stats: - - M-series: https://github.com/ggerganov/llama.cpp/discussions/4167 - - A-series: https://github.com/ggerganov/llama.cpp/discussions/4508 +- Support for Gemma models: https://github.com/ggerganov/llama.cpp/pull/5631 +- Non-linear quantization IQ4_NL: https://github.com/ggerganov/llama.cpp/pull/5590 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 ---- From eccd7a26ddbff19e4b8805648f5f14c501957859 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 21 Feb 2024 16:17:10 +0200 Subject: [PATCH 826/859] sync : ggml (#5633) * ggml : fix conv_2d batch mode (ggml/737) Co-authored-by: bssrdf * ggml : compute forward no longer pass src tensors (ggml/729) * sync : ggml ggml-ci --------- Co-authored-by: bssrdf Co-authored-by: bssrdf --- ggml.c | 1150 +++++++++++++++++++++++++--------------- scripts/sync-ggml.last | 2 +- 2 files changed, 711 insertions(+), 441 deletions(-) diff --git a/ggml.c b/ggml.c index 91adbb0ae..5b9fa741a 100644 --- a/ggml.c +++ b/ggml.c @@ -5644,7 +5644,9 @@ struct ggml_tensor * ggml_conv_2d( ggml_reshape_2d(ctx, im2col, im2col->ne[0], im2col->ne[3] * im2col->ne[2] * im2col->ne[1]), // [N, OH, OW, IC * KH * KW] => [N*OH*OW, IC * KH * KW] ggml_reshape_2d(ctx, a, (a->ne[0] * a->ne[1] * a->ne[2]), a->ne[3])); // [OC,IC, KH, KW] => [OC, IC * KH * KW] - result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], a->ne[3], im2col->ne[3]); // [N, OC, OH, OW] + result = ggml_reshape_4d(ctx, result, im2col->ne[1], im2col->ne[2], im2col->ne[3], a->ne[3]); // [OC, N, OH, OW] + result = ggml_cont(ctx, ggml_permute(ctx, result, 0, 1, 3, 2)); // [N, OC, OH, OW] + return result; } @@ -6650,8 +6652,10 @@ void ggml_set_param( static void ggml_compute_forward_dup_same_cont( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); GGML_ASSERT(src0->type == dst->type); @@ -6682,8 +6686,10 @@ static void ggml_compute_forward_dup_same_cont( } static void ggml_compute_forward_dup_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -6696,7 +6702,7 @@ static void ggml_compute_forward_dup_f16( const int nth = params->nth; // number of threads if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst) && src0->type == dst->type) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + ggml_compute_forward_dup_same_cont(params, dst); return; } @@ -6953,8 +6959,10 @@ static void ggml_compute_forward_dup_f16( static void ggml_compute_forward_dup_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -6967,7 +6975,7 @@ static void ggml_compute_forward_dup_f32( const int nth = params->nth; // number of threads if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst) && src0->type == dst->type) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + ggml_compute_forward_dup_same_cont(params, dst); return; } @@ -7203,8 +7211,10 @@ static void ggml_compute_forward_dup_f32( // A simplified version of ggml_compute_forward_dup that doesn't do float upcasting, and just plain old memcpy. static void ggml_compute_forward_dup_bytes( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); GGML_ASSERT(src0->type == dst->type); @@ -7213,7 +7223,7 @@ static void ggml_compute_forward_dup_bytes( } if (ggml_is_contiguous(src0) && ggml_is_contiguous(dst)) { - ggml_compute_forward_dup_same_cont(params, src0, dst); + ggml_compute_forward_dup_same_cont(params, dst); return; } @@ -7352,21 +7362,23 @@ static void ggml_compute_forward_dup_bytes( static void ggml_compute_forward_dup( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (src0->type == dst->type) { - ggml_compute_forward_dup_bytes(params, src0, dst); + ggml_compute_forward_dup_bytes(params, dst); return; } switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_dup_f16(params, src0, dst); + ggml_compute_forward_dup_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_dup_f32(params, src0, dst); + ggml_compute_forward_dup_f32(params, dst); } break; default: { @@ -7379,9 +7391,11 @@ static void ggml_compute_forward_dup( static void ggml_compute_forward_add_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7467,9 +7481,11 @@ static void ggml_compute_forward_add_f32( static void ggml_compute_forward_add_f16_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7544,9 +7560,11 @@ static void ggml_compute_forward_add_f16_f32( static void ggml_compute_forward_add_f16_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7598,9 +7616,11 @@ static void ggml_compute_forward_add_f16_f16( static void ggml_compute_forward_add_q_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -7676,14 +7696,16 @@ static void ggml_compute_forward_add_q_f32( static void ggml_compute_forward_add( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + switch (src0->type) { case GGML_TYPE_F32: { if (src1->type == GGML_TYPE_F32) { - ggml_compute_forward_add_f32(params, src0, src1, dst); + ggml_compute_forward_add_f32(params, dst); } else { GGML_ASSERT(false); @@ -7692,10 +7714,10 @@ static void ggml_compute_forward_add( case GGML_TYPE_F16: { if (src1->type == GGML_TYPE_F16) { - ggml_compute_forward_add_f16_f16(params, src0, src1, dst); + ggml_compute_forward_add_f16_f16(params, dst); } else if (src1->type == GGML_TYPE_F32) { - ggml_compute_forward_add_f16_f32(params, src0, src1, dst); + ggml_compute_forward_add_f16_f32(params, dst); } else { GGML_ASSERT(false); @@ -7717,7 +7739,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_add_q_f32(params, src0, src1, dst); + ggml_compute_forward_add_q_f32(params, dst); } break; default: { @@ -7730,9 +7752,11 @@ static void ggml_compute_forward_add( static void ggml_compute_forward_add1_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7782,9 +7806,11 @@ static void ggml_compute_forward_add1_f32( static void ggml_compute_forward_add1_f16_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7832,9 +7858,11 @@ static void ggml_compute_forward_add1_f16_f32( static void ggml_compute_forward_add1_f16_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7882,9 +7910,11 @@ static void ggml_compute_forward_add1_f16_f16( static void ggml_compute_forward_add1_q_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); @@ -7949,21 +7979,23 @@ static void ggml_compute_forward_add1_q_f32( static void ggml_compute_forward_add1( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_add1_f32(params, src0, src1, dst); + ggml_compute_forward_add1_f32(params, dst); } break; case GGML_TYPE_F16: { if (src1->type == GGML_TYPE_F16) { - ggml_compute_forward_add1_f16_f16(params, src0, src1, dst); + ggml_compute_forward_add1_f16_f16(params, dst); } else if (src1->type == GGML_TYPE_F32) { - ggml_compute_forward_add1_f16_f32(params, src0, src1, dst); + ggml_compute_forward_add1_f16_f32(params, dst); } else { GGML_ASSERT(false); @@ -7986,7 +8018,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_add1_q_f32(params, src0, src1, dst); + ggml_compute_forward_add1_q_f32(params, dst); } break; default: { @@ -7999,9 +8031,11 @@ static void ggml_compute_forward_add1( static void ggml_compute_forward_acc_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); @@ -8081,14 +8115,14 @@ static void ggml_compute_forward_acc_f32( static void ggml_compute_forward_acc( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_acc_f32(params, src0, src1, dst); + ggml_compute_forward_acc_f32(params, dst); } break; case GGML_TYPE_F16: case GGML_TYPE_Q4_0: @@ -8118,9 +8152,11 @@ static void ggml_compute_forward_acc( static void ggml_compute_forward_sub_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); @@ -8178,13 +8214,14 @@ static void ggml_compute_forward_sub_f32( static void ggml_compute_forward_sub( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sub_f32(params, src0, src1, dst); + ggml_compute_forward_sub_f32(params, dst); } break; default: { @@ -8197,9 +8234,11 @@ static void ggml_compute_forward_sub( static void ggml_compute_forward_mul_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8280,15 +8319,17 @@ static void ggml_compute_forward_mul_f32( static void ggml_compute_forward_mul( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src1->type == GGML_TYPE_F32 && "only f32 src1 supported for now"); switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_mul_f32(params, src0, src1, dst); + ggml_compute_forward_mul_f32(params, dst); } break; default: { @@ -8301,9 +8342,11 @@ static void ggml_compute_forward_mul( static void ggml_compute_forward_div_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8374,13 +8417,14 @@ static void ggml_compute_forward_div_f32( static void ggml_compute_forward_div( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_div_f32(params, src0, src1, dst); + ggml_compute_forward_div_f32(params, dst); } break; default: { @@ -8393,8 +8437,10 @@ static void ggml_compute_forward_div( static void ggml_compute_forward_sqr_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -8417,12 +8463,14 @@ static void ggml_compute_forward_sqr_f32( static void ggml_compute_forward_sqr( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sqr_f32(params, src0, dst); + ggml_compute_forward_sqr_f32(params, dst); } break; default: { @@ -8435,8 +8483,10 @@ static void ggml_compute_forward_sqr( static void ggml_compute_forward_sqrt_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -8459,12 +8509,14 @@ static void ggml_compute_forward_sqrt_f32( static void ggml_compute_forward_sqrt( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sqrt_f32(params, src0, dst); + ggml_compute_forward_sqrt_f32(params, dst); } break; default: { @@ -8477,8 +8529,10 @@ static void ggml_compute_forward_sqrt( static void ggml_compute_forward_log_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -8501,12 +8555,14 @@ static void ggml_compute_forward_log_f32( static void ggml_compute_forward_log( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_log_f32(params, src0, dst); + ggml_compute_forward_log_f32(params, dst); } break; default: { @@ -8519,8 +8575,10 @@ static void ggml_compute_forward_log( static void ggml_compute_forward_sum_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_is_scalar(dst)); @@ -8552,8 +8610,10 @@ static void ggml_compute_forward_sum_f32( static void ggml_compute_forward_sum_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_is_scalar(dst)); @@ -8584,16 +8644,18 @@ static void ggml_compute_forward_sum_f16( static void ggml_compute_forward_sum( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sum_f32(params, src0, dst); + ggml_compute_forward_sum_f32(params, dst); } break; case GGML_TYPE_F16: { - ggml_compute_forward_sum_f16(params, src0, dst); + ggml_compute_forward_sum_f16(params, dst); } break; default: { @@ -8606,8 +8668,10 @@ static void ggml_compute_forward_sum( static void ggml_compute_forward_sum_rows_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8639,12 +8703,14 @@ static void ggml_compute_forward_sum_rows_f32( static void ggml_compute_forward_sum_rows( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sum_rows_f32(params, src0, dst); + ggml_compute_forward_sum_rows_f32(params, dst); } break; default: { @@ -8657,8 +8723,10 @@ static void ggml_compute_forward_sum_rows( static void ggml_compute_forward_mean_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8694,12 +8762,14 @@ static void ggml_compute_forward_mean_f32( static void ggml_compute_forward_mean( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_mean_f32(params, src0, dst); + ggml_compute_forward_mean_f32(params, dst); } break; default: { @@ -8712,8 +8782,10 @@ static void ggml_compute_forward_mean( static void ggml_compute_forward_argmax_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -8740,12 +8812,14 @@ static void ggml_compute_forward_argmax_f32( static void ggml_compute_forward_argmax( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_argmax_f32(params, src0, dst); + ggml_compute_forward_argmax_f32(params, dst); } break; default: { @@ -8758,8 +8832,10 @@ static void ggml_compute_forward_argmax( static void ggml_compute_forward_repeat_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); @@ -8801,8 +8877,10 @@ static void ggml_compute_forward_repeat_f32( static void ggml_compute_forward_repeat_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); @@ -8847,18 +8925,20 @@ static void ggml_compute_forward_repeat_f16( static void ggml_compute_forward_repeat( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: case GGML_TYPE_I16: { - ggml_compute_forward_repeat_f16(params, src0, dst); + ggml_compute_forward_repeat_f16(params, dst); } break; case GGML_TYPE_F32: case GGML_TYPE_I32: { - ggml_compute_forward_repeat_f32(params, src0, dst); + ggml_compute_forward_repeat_f32(params, dst); } break; default: { @@ -8871,8 +8951,10 @@ static void ggml_compute_forward_repeat( static void ggml_compute_forward_repeat_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(dst, src0)); @@ -8928,12 +9010,14 @@ static void ggml_compute_forward_repeat_back_f32( static void ggml_compute_forward_repeat_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_repeat_back_f32(params, src0, dst); + ggml_compute_forward_repeat_back_f32(params, dst); } break; default: { @@ -8946,10 +9030,11 @@ static void ggml_compute_forward_repeat_back( static void ggml_compute_forward_concat_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -8994,14 +9079,15 @@ static void ggml_compute_forward_concat_f32( static void ggml_compute_forward_concat( const struct ggml_compute_params* params, - const struct ggml_tensor* src0, - const struct ggml_tensor* src1, struct ggml_tensor* dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: case GGML_TYPE_I32: { - ggml_compute_forward_concat_f32(params, src0, src1, dst); + ggml_compute_forward_concat_f32(params, dst); } break; default: { @@ -9014,8 +9100,10 @@ static void ggml_compute_forward_concat( static void ggml_compute_forward_abs_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9038,12 +9126,14 @@ static void ggml_compute_forward_abs_f32( static void ggml_compute_forward_abs( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_abs_f32(params, src0, dst); + ggml_compute_forward_abs_f32(params, dst); } break; default: { @@ -9056,8 +9146,10 @@ static void ggml_compute_forward_abs( static void ggml_compute_forward_sgn_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9080,12 +9172,14 @@ static void ggml_compute_forward_sgn_f32( static void ggml_compute_forward_sgn( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_sgn_f32(params, src0, dst); + ggml_compute_forward_sgn_f32(params, dst); } break; default: { @@ -9098,8 +9192,10 @@ static void ggml_compute_forward_sgn( static void ggml_compute_forward_neg_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9122,12 +9218,14 @@ static void ggml_compute_forward_neg_f32( static void ggml_compute_forward_neg( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_neg_f32(params, src0, dst); + ggml_compute_forward_neg_f32(params, dst); } break; default: { @@ -9140,8 +9238,10 @@ static void ggml_compute_forward_neg( static void ggml_compute_forward_step_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9164,12 +9264,14 @@ static void ggml_compute_forward_step_f32( static void ggml_compute_forward_step( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_step_f32(params, src0, dst); + ggml_compute_forward_step_f32(params, dst); } break; default: { @@ -9182,8 +9284,10 @@ static void ggml_compute_forward_step( static void ggml_compute_forward_tanh_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9206,12 +9310,14 @@ static void ggml_compute_forward_tanh_f32( static void ggml_compute_forward_tanh( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_tanh_f32(params, src0, dst); + ggml_compute_forward_tanh_f32(params, dst); } break; default: { @@ -9224,8 +9330,10 @@ static void ggml_compute_forward_tanh( static void ggml_compute_forward_elu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9248,12 +9356,14 @@ static void ggml_compute_forward_elu_f32( static void ggml_compute_forward_elu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_elu_f32(params, src0, dst); + ggml_compute_forward_elu_f32(params, dst); } break; default: { @@ -9266,8 +9376,10 @@ static void ggml_compute_forward_elu( static void ggml_compute_forward_relu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9290,12 +9402,14 @@ static void ggml_compute_forward_relu_f32( static void ggml_compute_forward_relu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_relu_f32(params, src0, dst); + ggml_compute_forward_relu_f32(params, dst); } break; default: { @@ -9308,8 +9422,10 @@ static void ggml_compute_forward_relu( static void ggml_compute_forward_gelu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -9349,12 +9465,14 @@ static void ggml_compute_forward_gelu_f32( static void ggml_compute_forward_gelu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_gelu_f32(params, src0, dst); + ggml_compute_forward_gelu_f32(params, dst); } break; default: { @@ -9367,8 +9485,10 @@ static void ggml_compute_forward_gelu( static void ggml_compute_forward_gelu_quick_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -9408,12 +9528,14 @@ static void ggml_compute_forward_gelu_quick_f32( static void ggml_compute_forward_gelu_quick( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_gelu_quick_f32(params, src0, dst); + ggml_compute_forward_gelu_quick_f32(params, dst); } break; default: { @@ -9426,8 +9548,10 @@ static void ggml_compute_forward_gelu_quick( static void ggml_compute_forward_silu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -9467,12 +9591,14 @@ static void ggml_compute_forward_silu_f32( static void ggml_compute_forward_silu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_silu_f32(params, src0, dst); + ggml_compute_forward_silu_f32(params, dst); } break; default: { @@ -9484,8 +9610,10 @@ static void ggml_compute_forward_silu( static void ggml_compute_forward_leaky_relu_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9511,12 +9639,14 @@ static void ggml_compute_forward_leaky_relu_f32( static void ggml_compute_forward_leaky_relu( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_leaky_relu_f32(params, src0, dst); + ggml_compute_forward_leaky_relu_f32(params, dst); } break; default: { @@ -9529,9 +9659,11 @@ static void ggml_compute_forward_leaky_relu( static void ggml_compute_forward_silu_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * grad, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * grad = dst->src[1]; + GGML_ASSERT(ggml_is_contiguous_except_dim_1(grad)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(src0)); GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); @@ -9574,13 +9706,14 @@ static void ggml_compute_forward_silu_back_f32( static void ggml_compute_forward_silu_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * grad, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_silu_back_f32(params, src0, grad, dst); + ggml_compute_forward_silu_back_f32(params, dst); } break; default: { @@ -9592,8 +9725,10 @@ static void ggml_compute_forward_silu_back( static void ggml_compute_forward_hardswish_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9615,12 +9750,14 @@ static void ggml_compute_forward_hardswish_f32( } static void ggml_compute_forward_hardswish( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_hardswish_f32(params, src0, dst); + ggml_compute_forward_hardswish_f32(params, dst); } break; default: { @@ -9631,8 +9768,10 @@ static void ggml_compute_forward_hardswish( static void ggml_compute_forward_hardsigmoid_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); @@ -9655,12 +9794,14 @@ static void ggml_compute_forward_hardsigmoid_f32( static void ggml_compute_forward_hardsigmoid( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_hardsigmoid_f32(params, src0, dst); + ggml_compute_forward_hardsigmoid_f32(params, dst); } break; default: { @@ -9674,8 +9815,10 @@ static void ggml_compute_forward_hardsigmoid( static void ggml_compute_forward_norm_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -9727,12 +9870,14 @@ static void ggml_compute_forward_norm_f32( static void ggml_compute_forward_norm( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_norm_f32(params, src0, dst); + ggml_compute_forward_norm_f32(params, dst); } break; default: { @@ -9745,8 +9890,10 @@ static void ggml_compute_forward_norm( static void ggml_compute_forward_rms_norm_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -9795,12 +9942,14 @@ static void ggml_compute_forward_rms_norm_f32( static void ggml_compute_forward_rms_norm( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_rms_norm_f32(params, src0, dst); + ggml_compute_forward_rms_norm_f32(params, dst); } break; default: { @@ -9811,9 +9960,11 @@ static void ggml_compute_forward_rms_norm( static void ggml_compute_forward_rms_norm_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst) && ggml_are_same_shape(src0, src1)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -9968,13 +10119,14 @@ static void ggml_compute_forward_rms_norm_back_f32( static void ggml_compute_forward_rms_norm_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_rms_norm_back_f32(params, src0, src1, dst); + ggml_compute_forward_rms_norm_back_f32(params, dst); } break; default: { @@ -9987,8 +10139,10 @@ static void ggml_compute_forward_rms_norm_back( static void ggml_compute_forward_group_norm_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -10059,12 +10213,14 @@ static void ggml_compute_forward_group_norm_f32( static void ggml_compute_forward_group_norm( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_group_norm_f32(params, src0, dst); + ggml_compute_forward_group_norm_f32(params, dst); } break; default: { @@ -10110,9 +10266,11 @@ static bool ggml_compute_forward_mul_mat_use_blas(struct ggml_tensor * dst) { static void ggml_compute_forward_mul_mat( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -10357,10 +10515,11 @@ static void ggml_compute_forward_mul_mat( static void ggml_compute_forward_mul_mat_id( const struct ggml_compute_params * params, - const struct ggml_tensor * ids, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * ids = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src0 = dst->src[2]; // only for GGML_TENSOR_BINARY_OP_LOCALS GGML_TENSOR_BINARY_OP_LOCALS @@ -10551,9 +10710,11 @@ static void ggml_compute_forward_mul_mat_id( static void ggml_compute_forward_out_prod_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + // int64_t t0 = ggml_perf_time_us(); // UNUSED(t0); @@ -10743,9 +10904,11 @@ static void ggml_compute_forward_out_prod_f32( static void ggml_compute_forward_out_prod_q_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + // int64_t t0 = ggml_perf_time_us(); // UNUSED(t0); @@ -10856,9 +11019,10 @@ static void ggml_compute_forward_out_prod_q_f32( static void ggml_compute_forward_out_prod( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_Q4_0: case GGML_TYPE_Q4_1: @@ -10876,16 +11040,16 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_out_prod_q_f32(params, src0, src1, dst); + ggml_compute_forward_out_prod_q_f32(params, dst); } break; case GGML_TYPE_F16: { GGML_ASSERT(false); // todo - // ggml_compute_forward_out_prod_f16_f32(params, src0, src1, dst); + // ggml_compute_forward_out_prod_f16_f32(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_out_prod_f32(params, src0, src1, dst); + ggml_compute_forward_out_prod_f32(params, dst); } break; default: { @@ -10898,8 +11062,10 @@ static void ggml_compute_forward_out_prod( static void ggml_compute_forward_scale_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); @@ -10940,12 +11106,14 @@ static void ggml_compute_forward_scale_f32( static void ggml_compute_forward_scale( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_scale_f32(params, src0, dst); + ggml_compute_forward_scale_f32(params, dst); } break; default: { @@ -10958,9 +11126,11 @@ static void ggml_compute_forward_scale( static void ggml_compute_forward_set_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); @@ -11031,14 +11201,14 @@ static void ggml_compute_forward_set_f32( static void ggml_compute_forward_set( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_set_f32(params, src0, src1, dst); + ggml_compute_forward_set_f32(params, dst); } break; case GGML_TYPE_F16: case GGML_TYPE_Q4_0: @@ -11068,29 +11238,25 @@ static void ggml_compute_forward_set( static void ggml_compute_forward_cpy( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { - ggml_compute_forward_dup(params, src0, dst); + ggml_compute_forward_dup(params, dst); } // ggml_compute_forward_cont static void ggml_compute_forward_cont( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { - ggml_compute_forward_dup(params, src0, dst); + ggml_compute_forward_dup(params, dst); } // ggml_compute_forward_reshape static void ggml_compute_forward_reshape( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); UNUSED(dst); } @@ -11098,39 +11264,41 @@ static void ggml_compute_forward_reshape( static void ggml_compute_forward_view( const struct ggml_compute_params * params, - const struct ggml_tensor * src0) { + const struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); + UNUSED(dst); } // ggml_compute_forward_permute static void ggml_compute_forward_permute( const struct ggml_compute_params * params, - const struct ggml_tensor * src0) { + const struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); + UNUSED(dst); } // ggml_compute_forward_transpose static void ggml_compute_forward_transpose( const struct ggml_compute_params * params, - const struct ggml_tensor * src0) { + const struct ggml_tensor * dst) { // NOP UNUSED(params); - UNUSED(src0); + UNUSED(dst); } // ggml_compute_forward_get_rows static void ggml_compute_forward_get_rows_q( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11166,9 +11334,11 @@ static void ggml_compute_forward_get_rows_q( static void ggml_compute_forward_get_rows_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11201,9 +11371,11 @@ static void ggml_compute_forward_get_rows_f16( static void ggml_compute_forward_get_rows_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11236,9 +11408,10 @@ static void ggml_compute_forward_get_rows_f32( static void ggml_compute_forward_get_rows( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_Q4_0: case GGML_TYPE_Q4_1: @@ -11257,16 +11430,16 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: { - ggml_compute_forward_get_rows_q(params, src0, src1, dst); + ggml_compute_forward_get_rows_q(params, dst); } break; case GGML_TYPE_F16: { - ggml_compute_forward_get_rows_f16(params, src0, src1, dst); + ggml_compute_forward_get_rows_f16(params, dst); } break; case GGML_TYPE_F32: case GGML_TYPE_I32: { - ggml_compute_forward_get_rows_f32(params, src0, src1, dst); + ggml_compute_forward_get_rows_f32(params, dst); } break; default: { @@ -11297,9 +11470,11 @@ static void ggml_compute_forward_get_rows( static void ggml_compute_forward_get_rows_back_f32_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_is_contiguous(dst)); @@ -11334,9 +11509,11 @@ static void ggml_compute_forward_get_rows_back_f32_f16( static void ggml_compute_forward_get_rows_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_is_contiguous(dst)); @@ -11371,17 +11548,18 @@ static void ggml_compute_forward_get_rows_back_f32( static void ggml_compute_forward_get_rows_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_get_rows_back_f32_f16(params, src0, src1, dst); + ggml_compute_forward_get_rows_back_f32_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_get_rows_back_f32(params, src0, src1, dst); + ggml_compute_forward_get_rows_back_f32(params, dst); } break; default: { @@ -11412,8 +11590,10 @@ static void ggml_compute_forward_get_rows_back( static void ggml_compute_forward_diag_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11452,12 +11632,14 @@ static void ggml_compute_forward_diag_f32( static void ggml_compute_forward_diag( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_diag_f32(params, src0, dst); + ggml_compute_forward_diag_f32(params, dst); } break; default: { @@ -11470,10 +11652,11 @@ static void ggml_compute_forward_diag( static void ggml_compute_forward_diag_mask_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst, const float value) { + const struct ggml_tensor * src0 = dst->src[0]; + const int ith = params->ith; const int nth = params->nth; @@ -11523,12 +11706,14 @@ static void ggml_compute_forward_diag_mask_f32( static void ggml_compute_forward_diag_mask_inf( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_diag_mask_f32(params, src0, dst, -INFINITY); + ggml_compute_forward_diag_mask_f32(params, dst, -INFINITY); } break; default: { @@ -11539,12 +11724,14 @@ static void ggml_compute_forward_diag_mask_inf( static void ggml_compute_forward_diag_mask_zero( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_diag_mask_f32(params, src0, dst, 0); + ggml_compute_forward_diag_mask_f32(params, dst, 0); } break; default: { @@ -11557,10 +11744,12 @@ static void ggml_compute_forward_diag_mask_zero( static void ggml_compute_forward_soft_max_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + assert(ggml_is_contiguous(dst)); assert(ggml_are_same_shape(src0, dst)); @@ -11671,14 +11860,14 @@ static void ggml_compute_forward_soft_max_f32( static void ggml_compute_forward_soft_max( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_f32(params, src0, src1, src2, dst); + ggml_compute_forward_soft_max_f32(params, dst); } break; default: { @@ -11691,9 +11880,11 @@ static void ggml_compute_forward_soft_max( static void ggml_compute_forward_soft_max_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); GGML_ASSERT(ggml_is_contiguous(dst)); @@ -11768,13 +11959,14 @@ static void ggml_compute_forward_soft_max_back_f32( static void ggml_compute_forward_soft_max_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_soft_max_back_f32(params, src0, src1, dst); + ggml_compute_forward_soft_max_back_f32(params, dst); } break; default: { @@ -11787,8 +11979,10 @@ static void ggml_compute_forward_soft_max_back( static void ggml_compute_forward_alibi_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11844,8 +12038,10 @@ static void ggml_compute_forward_alibi_f32( static void ggml_compute_forward_alibi_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11904,16 +12100,18 @@ static void ggml_compute_forward_alibi_f16( static void ggml_compute_forward_alibi( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_alibi_f16(params, src0, dst); + ggml_compute_forward_alibi_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_alibi_f32(params, src0, dst); + ggml_compute_forward_alibi_f32(params, dst); } break; case GGML_TYPE_Q4_0: case GGML_TYPE_Q4_1: @@ -11946,8 +12144,10 @@ static void ggml_compute_forward_alibi( static void ggml_compute_forward_clamp_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -11986,12 +12186,14 @@ static void ggml_compute_forward_clamp_f32( static void ggml_compute_forward_clamp( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_clamp_f32(params, src0, dst); + ggml_compute_forward_clamp_f32(params, dst); } break; case GGML_TYPE_F16: case GGML_TYPE_Q4_0: @@ -12081,10 +12283,12 @@ GGML_CALL void ggml_rope_yarn_corr_dims( static void ggml_compute_forward_rope_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const bool forward) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -12257,10 +12461,12 @@ static void ggml_compute_forward_rope_f32( static void ggml_compute_forward_rope_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const bool forward) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -12422,17 +12628,18 @@ static void ggml_compute_forward_rope_f16( static void ggml_compute_forward_rope( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_rope_f16(params, src0, src1, dst, true); + ggml_compute_forward_rope_f16(params, dst, true); } break; case GGML_TYPE_F32: { - ggml_compute_forward_rope_f32(params, src0, src1, dst, true); + ggml_compute_forward_rope_f32(params, dst, true); } break; default: { @@ -12445,17 +12652,18 @@ static void ggml_compute_forward_rope( static void ggml_compute_forward_rope_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_rope_f16(params, src0, src1, dst, false); + ggml_compute_forward_rope_f16(params, dst, false); } break; case GGML_TYPE_F32: { - ggml_compute_forward_rope_f32(params, src0, src1, dst, false); + ggml_compute_forward_rope_f32(params, dst, false); } break; default: { @@ -12468,9 +12676,11 @@ static void ggml_compute_forward_rope_back( static void ggml_compute_forward_conv_transpose_1d_f16_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12565,9 +12775,11 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( static void ggml_compute_forward_conv_transpose_1d_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F32); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12662,17 +12874,18 @@ static void ggml_compute_forward_conv_transpose_1d_f32( static void ggml_compute_forward_conv_transpose_1d( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_conv_transpose_1d_f16_f32(params, src0, src1, dst); + ggml_compute_forward_conv_transpose_1d_f16_f32(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_conv_transpose_1d_f32(params, src0, src1, dst); + ggml_compute_forward_conv_transpose_1d_f32(params, dst); } break; default: { @@ -12686,9 +12899,11 @@ static void ggml_compute_forward_conv_transpose_1d( // dst: result [N, OH, OW, IC*KH*KW] static void ggml_compute_forward_im2col_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12772,9 +12987,11 @@ static void ggml_compute_forward_im2col_f32( // dst: result [N, OH, OW, IC*KH*KW] static void ggml_compute_forward_im2col_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F16); @@ -12854,17 +13071,15 @@ static void ggml_compute_forward_im2col_f16( static void ggml_compute_forward_im2col( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { switch (dst->type) { case GGML_TYPE_F16: { - ggml_compute_forward_im2col_f16(params, src0, src1, dst); + ggml_compute_forward_im2col_f16(params, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_im2col_f32(params, src0, src1, dst); + ggml_compute_forward_im2col_f32(params, dst); } break; default: { @@ -12878,9 +13093,11 @@ static void ggml_compute_forward_im2col( static void ggml_compute_forward_conv_transpose_2d( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_ASSERT( dst->type == GGML_TYPE_F32); @@ -12984,9 +13201,11 @@ static void ggml_compute_forward_conv_transpose_2d( static void ggml_compute_forward_pool_1d_sk_p0( const struct ggml_compute_params * params, const enum ggml_op_pool op, - const struct ggml_tensor * src, const int k, struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + assert(src->type == GGML_TYPE_F32); assert(params->ith == 0); @@ -13035,7 +13254,6 @@ static void ggml_compute_forward_pool_1d_sk_p0( static void ggml_compute_forward_pool_1d( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { const int32_t * opts = (const int32_t *)dst->op_params; @@ -13046,15 +13264,17 @@ static void ggml_compute_forward_pool_1d( GGML_ASSERT(p0 == 0); // padding not supported GGML_ASSERT(k0 == s0); // only s = k supported - ggml_compute_forward_pool_1d_sk_p0(params, op, src0, k0, dst); + ggml_compute_forward_pool_1d_sk_p0(params, op, k0, dst); } // ggml_compute_forward_pool_2d static void ggml_compute_forward_pool_2d( const struct ggml_compute_params * params, - const struct ggml_tensor * src, struct ggml_tensor * dst) { + + const struct ggml_tensor * src = dst->src[0]; + GGML_ASSERT(src->type == GGML_TYPE_F32); GGML_ASSERT(params->ith == 0); @@ -13127,9 +13347,10 @@ static void ggml_compute_forward_pool_2d( static void ggml_compute_forward_upscale_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -13166,12 +13387,14 @@ static void ggml_compute_forward_upscale_f32( static void ggml_compute_forward_upscale( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_upscale_f32(params, src0, dst); + ggml_compute_forward_upscale_f32(params, dst); } break; default: { @@ -13184,9 +13407,10 @@ static void ggml_compute_forward_upscale( static void ggml_compute_forward_pad_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -13224,12 +13448,14 @@ static void ggml_compute_forward_pad_f32( static void ggml_compute_forward_pad( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_pad_f32(params, src0, dst); + ggml_compute_forward_pad_f32(params, dst); } break; default: { @@ -13242,9 +13468,10 @@ static void ggml_compute_forward_pad( static void ggml_compute_forward_argsort_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -13284,13 +13511,14 @@ static void ggml_compute_forward_argsort_f32( static void ggml_compute_forward_argsort( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_argsort_f32(params, src0, dst); + ggml_compute_forward_argsort_f32(params, dst); } break; default: { @@ -13303,11 +13531,13 @@ static void ggml_compute_forward_argsort( static void ggml_compute_forward_flash_attn_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -13493,11 +13723,13 @@ static void ggml_compute_forward_flash_attn_f32( static void ggml_compute_forward_flash_attn_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -13719,19 +13951,19 @@ static void ggml_compute_forward_flash_attn_f16( static void ggml_compute_forward_flash_attn( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + switch (q->type) { case GGML_TYPE_F16: { - ggml_compute_forward_flash_attn_f16(params, q, k, v, masked, dst); + ggml_compute_forward_flash_attn_f16(params, masked, dst); } break; case GGML_TYPE_F32: { - ggml_compute_forward_flash_attn_f32(params, q, k, v, masked, dst); + ggml_compute_forward_flash_attn_f32(params, masked, dst); } break; default: { @@ -13744,12 +13976,14 @@ static void ggml_compute_forward_flash_attn( static void ggml_compute_forward_flash_ff_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * a, // F16 - const struct ggml_tensor * b0, // F16 fc_w - const struct ggml_tensor * b1, // F32 fc_b - const struct ggml_tensor * c0, // F16 proj_w - const struct ggml_tensor * c1, // F32 proj_b struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; // F16 + const struct ggml_tensor * b0 = dst->src[1]; // F16 fc_w + const struct ggml_tensor * b1 = dst->src[2]; // F32 fc_b + const struct ggml_tensor * c0 = dst->src[3]; // F16 proj_w + const struct ggml_tensor * c1 = dst->src[4]; // F32 proj_b + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -13877,16 +14111,14 @@ static void ggml_compute_forward_flash_ff_f16( static void ggml_compute_forward_flash_ff( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b0, - const struct ggml_tensor * b1, - const struct ggml_tensor * c0, - const struct ggml_tensor * c1, struct ggml_tensor * dst) { + + const struct ggml_tensor * b0 = dst->src[1]; + switch (b0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_flash_ff_f16(params, a, b0, b1, c0, c1, dst); + ggml_compute_forward_flash_ff_f16(params, dst); } break; case GGML_TYPE_F32: { @@ -13903,12 +14135,14 @@ static void ggml_compute_forward_flash_ff( static void ggml_compute_forward_flash_attn_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, - const struct ggml_tensor * d, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + const struct ggml_tensor * k = dst->src[1]; + const struct ggml_tensor * v = dst->src[2]; + const struct ggml_tensor * d = dst->src[3]; + int64_t t0 = ggml_perf_time_us(); UNUSED(t0); @@ -14256,16 +14490,15 @@ static void ggml_compute_forward_flash_attn_back_f32( static void ggml_compute_forward_flash_attn_back( const struct ggml_compute_params * params, - const struct ggml_tensor * q, - const struct ggml_tensor * k, - const struct ggml_tensor * v, - const struct ggml_tensor * d, const bool masked, struct ggml_tensor * dst) { + + const struct ggml_tensor * q = dst->src[0]; + switch (q->type) { case GGML_TYPE_F32: { - ggml_compute_forward_flash_attn_back_f32(params, q, k, v, d, masked, dst); + ggml_compute_forward_flash_attn_back_f32(params, masked, dst); } break; default: { @@ -14278,8 +14511,10 @@ static void ggml_compute_forward_flash_attn_back( static void ggml_compute_forward_win_part_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14322,12 +14557,14 @@ static void ggml_compute_forward_win_part_f32( static void ggml_compute_forward_win_part( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_win_part_f32(params, src0, dst); + ggml_compute_forward_win_part_f32(params, dst); } break; default: { @@ -14340,8 +14577,10 @@ static void ggml_compute_forward_win_part( static void ggml_compute_forward_win_unpart_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14382,12 +14621,14 @@ static void ggml_compute_forward_win_unpart_f32( static void ggml_compute_forward_win_unpart( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_win_unpart_f32(params, src0, dst); + ggml_compute_forward_win_unpart_f32(params, dst); } break; default: { @@ -14400,58 +14641,58 @@ static void ggml_compute_forward_win_unpart( static void ggml_compute_forward_unary( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + const enum ggml_unary_op op = ggml_get_unary_op(dst); switch (op) { case GGML_UNARY_OP_ABS: { - ggml_compute_forward_abs(params, src0, dst); + ggml_compute_forward_abs(params, dst); } break; case GGML_UNARY_OP_SGN: { - ggml_compute_forward_sgn(params, src0, dst); + ggml_compute_forward_sgn(params, dst); } break; case GGML_UNARY_OP_NEG: { - ggml_compute_forward_neg(params, src0, dst); + ggml_compute_forward_neg(params, dst); } break; case GGML_UNARY_OP_STEP: { - ggml_compute_forward_step(params, src0, dst); + ggml_compute_forward_step(params, dst); } break; case GGML_UNARY_OP_TANH: { - ggml_compute_forward_tanh(params, src0, dst); + ggml_compute_forward_tanh(params, dst); } break; case GGML_UNARY_OP_ELU: { - ggml_compute_forward_elu(params, src0, dst); + ggml_compute_forward_elu(params, dst); } break; case GGML_UNARY_OP_RELU: { - ggml_compute_forward_relu(params, src0, dst); + ggml_compute_forward_relu(params, dst); } break; case GGML_UNARY_OP_GELU: { - ggml_compute_forward_gelu(params, src0, dst); + ggml_compute_forward_gelu(params, dst); } break; case GGML_UNARY_OP_GELU_QUICK: { - ggml_compute_forward_gelu_quick(params, src0, dst); + ggml_compute_forward_gelu_quick(params, dst); } break; case GGML_UNARY_OP_SILU: { - ggml_compute_forward_silu(params, src0, dst); + ggml_compute_forward_silu(params, dst); } break; case GGML_UNARY_OP_HARDSWISH: { - ggml_compute_forward_hardswish(params, src0, dst); + ggml_compute_forward_hardswish(params, dst); } break; case GGML_UNARY_OP_HARDSIGMOID: { - ggml_compute_forward_hardsigmoid(params, src0, dst); + ggml_compute_forward_hardsigmoid(params, dst); } break; default: { @@ -14464,8 +14705,10 @@ static void ggml_compute_forward_unary( static void ggml_compute_forward_get_rel_pos_f16( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14491,12 +14734,14 @@ static void ggml_compute_forward_get_rel_pos_f16( static void ggml_compute_forward_get_rel_pos( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F16: { - ggml_compute_forward_get_rel_pos_f16(params, src0, dst); + ggml_compute_forward_get_rel_pos_f16(params, dst); } break; default: { @@ -14509,11 +14754,12 @@ static void ggml_compute_forward_get_rel_pos( static void ggml_compute_forward_add_rel_pos_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * src2 = dst->src[2]; + const bool inplace = (bool) ((int32_t *) dst->op_params)[0]; if (!inplace && params->type == GGML_TASK_INIT) { if (params->ith != 0) { @@ -14577,14 +14823,14 @@ static void ggml_compute_forward_add_rel_pos_f32( static void ggml_compute_forward_add_rel_pos( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * src2, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_add_rel_pos_f32(params, src0, src1, src2, dst); + ggml_compute_forward_add_rel_pos_f32(params, dst); } break; default: { @@ -14597,9 +14843,11 @@ static void ggml_compute_forward_add_rel_pos( static void ggml_compute_forward_map_unary_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst, const ggml_unary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + GGML_ASSERT(ggml_are_same_shape(src0, dst)); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14621,13 +14869,15 @@ static void ggml_compute_forward_map_unary_f32( static void ggml_compute_forward_map_unary( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, struct ggml_tensor * dst, const ggml_unary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_map_unary_f32(params, src0, dst, fun); + ggml_compute_forward_map_unary_f32(params, dst, fun); } break; default: { @@ -14640,10 +14890,12 @@ static void ggml_compute_forward_map_unary( static void ggml_compute_forward_map_binary_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const ggml_binary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); @@ -14668,14 +14920,15 @@ static void ggml_compute_forward_map_binary_f32( static void ggml_compute_forward_map_binary( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst, const ggml_binary_op_f32_t fun) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_map_binary_f32(params, src0, src1, dst, fun); + ggml_compute_forward_map_binary_f32(params, dst, fun); } break; default: { @@ -14688,9 +14941,11 @@ static void ggml_compute_forward_map_binary( static void ggml_compute_forward_map_custom1_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * a, struct ggml_tensor * dst, const ggml_custom1_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14704,10 +14959,12 @@ static void ggml_compute_forward_map_custom1_f32( static void ggml_compute_forward_map_custom2_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, struct ggml_tensor * dst, const ggml_custom2_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14721,11 +14978,13 @@ static void ggml_compute_forward_map_custom2_f32( static void ggml_compute_forward_map_custom3_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, - const struct ggml_tensor * c, struct ggml_tensor * dst, const ggml_custom3_op_f32_t fun) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + const struct ggml_tensor * c = dst->src[1]; + assert(params->ith == 0); if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { @@ -14739,8 +14998,10 @@ static void ggml_compute_forward_map_custom3_f32( static void ggml_compute_forward_map_custom1( const struct ggml_compute_params * params, - const struct ggml_tensor * a, struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14754,9 +15015,11 @@ static void ggml_compute_forward_map_custom1( static void ggml_compute_forward_map_custom2( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14770,10 +15033,12 @@ static void ggml_compute_forward_map_custom2( static void ggml_compute_forward_map_custom3( const struct ggml_compute_params * params, - const struct ggml_tensor * a, - const struct ggml_tensor * b, - const struct ggml_tensor * c, struct ggml_tensor * dst) { + + const struct ggml_tensor * a = dst->src[0]; + const struct ggml_tensor * b = dst->src[1]; + const struct ggml_tensor * c = dst->src[2]; + if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { return; } @@ -14787,9 +15052,11 @@ static void ggml_compute_forward_map_custom3( static void ggml_compute_forward_cross_entropy_loss_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); GGML_ASSERT(ggml_is_scalar(dst)); @@ -14893,13 +15160,14 @@ static void ggml_compute_forward_cross_entropy_loss_f32( static void ggml_compute_forward_cross_entropy_loss( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_cross_entropy_loss_f32(params, src0, src1, dst); + ggml_compute_forward_cross_entropy_loss_f32(params, dst); } break; default: { @@ -14912,10 +15180,12 @@ static void ggml_compute_forward_cross_entropy_loss( static void ggml_compute_forward_cross_entropy_loss_back_f32( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * opt0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + const struct ggml_tensor * src1 = dst->src[1]; + const struct ggml_tensor * opt0 = dst->src[2]; + GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_is_contiguous(src0)); GGML_ASSERT(ggml_is_contiguous(src1)); @@ -15002,14 +15272,14 @@ static void ggml_compute_forward_cross_entropy_loss_back_f32( static void ggml_compute_forward_cross_entropy_loss_back( const struct ggml_compute_params * params, - const struct ggml_tensor * src0, - const struct ggml_tensor * src1, - const struct ggml_tensor * opt0, struct ggml_tensor * dst) { + + const struct ggml_tensor * src0 = dst->src[0]; + switch (src0->type) { case GGML_TYPE_F32: { - ggml_compute_forward_cross_entropy_loss_back_f32(params, src0, src1, opt0, dst); + ggml_compute_forward_cross_entropy_loss_back_f32(params, dst); } break; default: { @@ -15057,312 +15327,312 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm switch (tensor->op) { case GGML_OP_DUP: { - ggml_compute_forward_dup(params, tensor->src[0], tensor); + ggml_compute_forward_dup(params, tensor); } break; case GGML_OP_ADD: { - ggml_compute_forward_add(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_add(params, tensor); } break; case GGML_OP_ADD1: { - ggml_compute_forward_add1(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_add1(params, tensor); } break; case GGML_OP_ACC: { - ggml_compute_forward_acc(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_acc(params, tensor); } break; case GGML_OP_SUB: { - ggml_compute_forward_sub(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_sub(params, tensor); } break; case GGML_OP_MUL: { - ggml_compute_forward_mul(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul(params, tensor); } break; case GGML_OP_DIV: { - ggml_compute_forward_div(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_div(params, tensor); } break; case GGML_OP_SQR: { - ggml_compute_forward_sqr(params, tensor->src[0], tensor); + ggml_compute_forward_sqr(params, tensor); } break; case GGML_OP_SQRT: { - ggml_compute_forward_sqrt(params, tensor->src[0], tensor); + ggml_compute_forward_sqrt(params, tensor); } break; case GGML_OP_LOG: { - ggml_compute_forward_log(params, tensor->src[0], tensor); + ggml_compute_forward_log(params, tensor); } break; case GGML_OP_SUM: { - ggml_compute_forward_sum(params, tensor->src[0], tensor); + ggml_compute_forward_sum(params, tensor); } break; case GGML_OP_SUM_ROWS: { - ggml_compute_forward_sum_rows(params, tensor->src[0], tensor); + ggml_compute_forward_sum_rows(params, tensor); } break; case GGML_OP_MEAN: { - ggml_compute_forward_mean(params, tensor->src[0], tensor); + ggml_compute_forward_mean(params, tensor); } break; case GGML_OP_ARGMAX: { - ggml_compute_forward_argmax(params, tensor->src[0], tensor); + ggml_compute_forward_argmax(params, tensor); } break; case GGML_OP_REPEAT: { - ggml_compute_forward_repeat(params, tensor->src[0], tensor); + ggml_compute_forward_repeat(params, tensor); } break; case GGML_OP_REPEAT_BACK: { - ggml_compute_forward_repeat_back(params, tensor->src[0], tensor); + ggml_compute_forward_repeat_back(params, tensor); } break; case GGML_OP_CONCAT: { - ggml_compute_forward_concat(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_concat(params, tensor); } break; case GGML_OP_SILU_BACK: { - ggml_compute_forward_silu_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_silu_back(params, tensor); } break; case GGML_OP_NORM: { - ggml_compute_forward_norm(params, tensor->src[0], tensor); + ggml_compute_forward_norm(params, tensor); } break; case GGML_OP_RMS_NORM: { - ggml_compute_forward_rms_norm(params, tensor->src[0], tensor); + ggml_compute_forward_rms_norm(params, tensor); } break; case GGML_OP_RMS_NORM_BACK: { - ggml_compute_forward_rms_norm_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_rms_norm_back(params, tensor); } break; case GGML_OP_GROUP_NORM: { - ggml_compute_forward_group_norm(params, tensor->src[0], tensor); + ggml_compute_forward_group_norm(params, tensor); } break; case GGML_OP_MUL_MAT: { - ggml_compute_forward_mul_mat(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul_mat(params, tensor); } break; case GGML_OP_MUL_MAT_ID: { - ggml_compute_forward_mul_mat_id(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_mul_mat_id(params, tensor); } break; case GGML_OP_OUT_PROD: { - ggml_compute_forward_out_prod(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_out_prod(params, tensor); } break; case GGML_OP_SCALE: { - ggml_compute_forward_scale(params, tensor->src[0], tensor); + ggml_compute_forward_scale(params, tensor); } break; case GGML_OP_SET: { - ggml_compute_forward_set(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_set(params, tensor); } break; case GGML_OP_CPY: { - ggml_compute_forward_cpy(params, tensor->src[0], tensor); + ggml_compute_forward_cpy(params, tensor); } break; case GGML_OP_CONT: { - ggml_compute_forward_cont(params, tensor->src[0], tensor); + ggml_compute_forward_cont(params, tensor); } break; case GGML_OP_RESHAPE: { - ggml_compute_forward_reshape(params, tensor->src[0], tensor); + ggml_compute_forward_reshape(params, tensor); } break; case GGML_OP_VIEW: { - ggml_compute_forward_view(params, tensor->src[0]); + ggml_compute_forward_view(params, tensor); } break; case GGML_OP_PERMUTE: { - ggml_compute_forward_permute(params, tensor->src[0]); + ggml_compute_forward_permute(params, tensor); } break; case GGML_OP_TRANSPOSE: { - ggml_compute_forward_transpose(params, tensor->src[0]); + ggml_compute_forward_transpose(params, tensor); } break; case GGML_OP_GET_ROWS: { - ggml_compute_forward_get_rows(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_get_rows(params, tensor); } break; case GGML_OP_GET_ROWS_BACK: { - ggml_compute_forward_get_rows_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_get_rows_back(params, tensor); } break; case GGML_OP_DIAG: { - ggml_compute_forward_diag(params, tensor->src[0], tensor); + ggml_compute_forward_diag(params, tensor); } break; case GGML_OP_DIAG_MASK_INF: { - ggml_compute_forward_diag_mask_inf(params, tensor->src[0], tensor); + ggml_compute_forward_diag_mask_inf(params, tensor); } break; case GGML_OP_DIAG_MASK_ZERO: { - ggml_compute_forward_diag_mask_zero(params, tensor->src[0], tensor); + ggml_compute_forward_diag_mask_zero(params, tensor); } break; case GGML_OP_SOFT_MAX: { - ggml_compute_forward_soft_max(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_soft_max(params, tensor); } break; case GGML_OP_SOFT_MAX_BACK: { - ggml_compute_forward_soft_max_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_soft_max_back(params, tensor); } break; case GGML_OP_ROPE: { - ggml_compute_forward_rope(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_rope(params, tensor); } break; case GGML_OP_ROPE_BACK: { - ggml_compute_forward_rope_back(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_rope_back(params, tensor); } break; case GGML_OP_ALIBI: { - ggml_compute_forward_alibi(params, tensor->src[0], tensor); + ggml_compute_forward_alibi(params, tensor); } break; case GGML_OP_CLAMP: { - ggml_compute_forward_clamp(params, tensor->src[0], tensor); + ggml_compute_forward_clamp(params, tensor); } break; case GGML_OP_CONV_TRANSPOSE_1D: { - ggml_compute_forward_conv_transpose_1d(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_conv_transpose_1d(params, tensor); } break; case GGML_OP_IM2COL: { - ggml_compute_forward_im2col(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_im2col(params, tensor); } break; case GGML_OP_CONV_TRANSPOSE_2D: { - ggml_compute_forward_conv_transpose_2d(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_conv_transpose_2d(params, tensor); } break; case GGML_OP_POOL_1D: { - ggml_compute_forward_pool_1d(params, tensor->src[0], tensor); + ggml_compute_forward_pool_1d(params, tensor); } break; case GGML_OP_POOL_2D: { - ggml_compute_forward_pool_2d(params, tensor->src[0], tensor); + ggml_compute_forward_pool_2d(params, tensor); } break; case GGML_OP_UPSCALE: { - ggml_compute_forward_upscale(params, tensor->src[0], tensor); + ggml_compute_forward_upscale(params, tensor); } break; case GGML_OP_PAD: { - ggml_compute_forward_pad(params, tensor->src[0], tensor); + ggml_compute_forward_pad(params, tensor); } break; case GGML_OP_ARGSORT: { - ggml_compute_forward_argsort(params, tensor->src[0], tensor); + ggml_compute_forward_argsort(params, tensor); } break; case GGML_OP_LEAKY_RELU: { - ggml_compute_forward_leaky_relu(params, tensor->src[0], tensor); + ggml_compute_forward_leaky_relu(params, tensor); } break; case GGML_OP_FLASH_ATTN: { const int32_t t = ggml_get_op_params_i32(tensor, 0); GGML_ASSERT(t == 0 || t == 1); const bool masked = t != 0; - ggml_compute_forward_flash_attn(params, tensor->src[0], tensor->src[1], tensor->src[2], masked, tensor); + ggml_compute_forward_flash_attn(params, masked, tensor); } break; case GGML_OP_FLASH_FF: { - ggml_compute_forward_flash_ff(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor->src[3], tensor->src[4], tensor); + ggml_compute_forward_flash_ff(params, tensor); } break; case GGML_OP_FLASH_ATTN_BACK: { int32_t t = ggml_get_op_params_i32(tensor, 0); GGML_ASSERT(t == 0 || t == 1); bool masked = t != 0; - ggml_compute_forward_flash_attn_back(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor->src[3], masked, tensor); + ggml_compute_forward_flash_attn_back(params, masked, tensor); } break; case GGML_OP_WIN_PART: { - ggml_compute_forward_win_part(params, tensor->src[0], tensor); + ggml_compute_forward_win_part(params, tensor); } break; case GGML_OP_WIN_UNPART: { - ggml_compute_forward_win_unpart(params, tensor->src[0], tensor); + ggml_compute_forward_win_unpart(params, tensor); } break; case GGML_OP_UNARY: { - ggml_compute_forward_unary(params, tensor->src[0], tensor); + ggml_compute_forward_unary(params, tensor); } break; case GGML_OP_GET_REL_POS: { - ggml_compute_forward_get_rel_pos(params, tensor->src[0], tensor); + ggml_compute_forward_get_rel_pos(params, tensor); } break; case GGML_OP_ADD_REL_POS: { - ggml_compute_forward_add_rel_pos(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_add_rel_pos(params, tensor); } break; case GGML_OP_MAP_UNARY: { ggml_unary_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_unary(params, tensor->src[0], tensor, fun); + ggml_compute_forward_map_unary(params, tensor, fun); } break; case GGML_OP_MAP_BINARY: { ggml_binary_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_binary(params, tensor->src[0], tensor->src[1], tensor, fun); + ggml_compute_forward_map_binary(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM1_F32: { ggml_custom1_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_custom1_f32(params, tensor->src[0], tensor, fun); + ggml_compute_forward_map_custom1_f32(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM2_F32: { ggml_custom2_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_custom2_f32(params, tensor->src[0], tensor->src[1], tensor, fun); + ggml_compute_forward_map_custom2_f32(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM3_F32: { ggml_custom3_op_f32_t fun; memcpy(&fun, tensor->op_params, sizeof(fun)); - ggml_compute_forward_map_custom3_f32(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor, fun); + ggml_compute_forward_map_custom3_f32(params, tensor, fun); } break; case GGML_OP_MAP_CUSTOM1: { - ggml_compute_forward_map_custom1(params, tensor->src[0], tensor); + ggml_compute_forward_map_custom1(params, tensor); } break; case GGML_OP_MAP_CUSTOM2: { - ggml_compute_forward_map_custom2(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_map_custom2(params, tensor); } break; case GGML_OP_MAP_CUSTOM3: { - ggml_compute_forward_map_custom3(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_map_custom3(params, tensor); } break; case GGML_OP_CROSS_ENTROPY_LOSS: { - ggml_compute_forward_cross_entropy_loss(params, tensor->src[0], tensor->src[1], tensor); + ggml_compute_forward_cross_entropy_loss(params, tensor); } break; case GGML_OP_CROSS_ENTROPY_LOSS_BACK: { - ggml_compute_forward_cross_entropy_loss_back(params, tensor->src[0], tensor->src[1], tensor->src[2], tensor); + ggml_compute_forward_cross_entropy_loss_back(params, tensor); } break; case GGML_OP_NONE: diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 733d8f95b..97f34ac85 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -818eeb8a3be99125746a90ec63af8f51516a2ec6 +4712fd12d7acb9971f850b1b98588f934cb39444 From a00a35cef93e057eace8351a667d14d152a91ebc Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Wed, 21 Feb 2024 15:39:10 +0100 Subject: [PATCH 827/859] readme : add LocalAI to the availables UI (#5629) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ce5dec7ca..c1624b9f9 100644 --- a/README.md +++ b/README.md @@ -141,6 +141,7 @@ Unless otherwise noted these projects are open-source with permissive licensing: - [nat/openplayground](https://github.com/nat/openplayground) - [Faraday](https://faraday.dev/) (proprietary) - [LMStudio](https://lmstudio.ai/) (proprietary) +- [LocalAI](https://github.com/mudler/LocalAI) (MIT) - [LostRuins/koboldcpp](https://github.com/LostRuins/koboldcpp) (AGPL) - [Mozilla-Ocho/llamafile](https://github.com/Mozilla-Ocho/llamafile) - [nomic-ai/gpt4all](https://github.com/nomic-ai/gpt4all) From 1ecea255ebb70750b52688393f37a63606b90e3f Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Wed, 21 Feb 2024 15:47:48 +0100 Subject: [PATCH 828/859] server: health: fix race condition on slots data using tasks queue (#5634) * server: health: fix race condition on slots data using tasks queue * server: health: * include_slots only if slots_endpoint * fix compile warning task.target_id not initialized. --- examples/server/README.md | 2 + examples/server/server.cpp | 122 ++++++++++++++++++++++++------------- examples/server/utils.hpp | 3 +- 3 files changed, 84 insertions(+), 43 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index f6b9c7402..6d9f96cd4 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -140,6 +140,8 @@ node index.js - 200 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if no slot are currently available. - 503 -> `{"status": "no slot available", "slots_idle": 0, "slots_processing": 32}` if the query parameter `fail_on_no_slot` is provided and no slot are currently available. + If the query parameter `include_slots` is passed, `slots` field will contain internal slots data except if `--slots-endpoint-disable` is set. + - **POST** `/completion`: Given a `prompt`, it returns the predicted completion. *Options:* diff --git a/examples/server/server.cpp b/examples/server/server.cpp index eb01729fa..1c4479512 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1394,6 +1394,46 @@ struct llama_server_context case TASK_TYPE_NEXT_RESPONSE: { // do nothing } break; + case TASK_TYPE_SLOTS_DATA: { + json slots_data = json::array(); + int n_idle_slots = 0; + int n_processing_slots = 0; + + for (llama_client_slot &slot: slots) { + if (slot.available()) { + n_idle_slots++; + } else { + n_processing_slots++; + } + json slot_data = get_formated_generation(slot); + slot_data["id"] = slot.id; + slot_data["task_id"] = slot.task_id; + slot_data["state"] = slot.state; + slot_data["prompt"] = slot.prompt; + slot_data["next_token"] = { + {"has_next_token", slot.has_next_token}, + {"n_remain", slot.n_remaining}, + {"num_tokens_predicted", slot.n_decoded}, + {"stopped_eos", slot.stopped_eos}, + {"stopped_word", slot.stopped_word}, + {"stopped_limit", slot.stopped_limit}, + {"stopping_word", slot.stopping_word}, + }; + slots_data.push_back(slot_data); + } + LOG_TEE("task %i - slots data: idle=%i processing=%i\n", task.id, n_idle_slots, n_processing_slots); + task_result res; + res.id = task.id; + res.multitask_id = task.multitask_id; + res.stop = true; + res.error = false; + res.result_json = { + { "idle", n_idle_slots }, + { "processing", n_processing_slots }, + { "slots", slots_data } + }; + queue_results.send(res); + } break; } } @@ -2557,34 +2597,38 @@ int main(int argc, char **argv) server_state current_state = state.load(); switch(current_state) { case SERVER_STATE_READY: { - int available_slots = 0; - int processing_slots = 0; - for (llama_client_slot &slot: llama.slots) { - if (slot.available()) { - available_slots++; - } else { - processing_slots++; - } + // request slots data using task queue + task_server task; + task.id = llama.queue_tasks.get_new_id(); + task.type = TASK_TYPE_SLOTS_DATA; + task.target_id = -1; + + llama.queue_results.add_waiting_task_id(task.id); + llama.queue_tasks.post(task); + + // get the result + task_result result = llama.queue_results.recv(task.id); + llama.queue_results.remove_waiting_task_id(task.id); + + int n_idle_slots = result.result_json["idle"]; + int n_processing_slots = result.result_json["processing"]; + + json health = { + {"status", "ok"}, + {"slots_idle", n_idle_slots}, + {"slots_processing", n_processing_slots}}; + res.status = 200; // HTTP OK + if (sparams.slots_endpoint && req.has_param("include_slots")) { + health["slots"] = result.result_json["slots"]; } - if (available_slots > 0) { - json health = { - {"status", "ok"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); - res.status = 200; // HTTP OK - } else { - json health = { - {"status", "no slot available"}, - {"slots_idle", available_slots}, - {"slots_processing", processing_slots}}; - res.set_content(health.dump(), "application/json"); + + if (n_idle_slots == 0) { + health["status"] = "no slot available"; if (req.has_param("fail_on_no_slot")) { res.status = 503; // HTTP Service Unavailable - } else { - res.status = 200; // HTTP OK } } + res.set_content(health.dump(), "application/json"); break; } case SERVER_STATE_LOADING_MODEL: @@ -2600,26 +2644,20 @@ int main(int argc, char **argv) if (sparams.slots_endpoint) { svr.Get("/slots", [&](const httplib::Request&, httplib::Response& res) { - json slots; - for (llama_client_slot & slot : llama.slots) { - json slot_data = llama.get_formated_generation(slot); - slot_data["id"] = slot.id; - slot_data["task_id"] = slot.task_id; - slot_data["state"] = slot.state; - slot_data["prompt"] = slot.prompt; - slot_data["next_token"] = { - {"has_next_token", slot.has_next_token}, - {"n_remain", slot.n_remaining}, - {"num_tokens_predicted", slot.n_decoded}, - {"stopped_eos", slot.stopped_eos}, - {"stopped_word", slot.stopped_word}, - {"stopped_limit", slot.stopped_limit}, - {"stopping_word", slot.stopping_word}, - }; + // request slots data using task queue + task_server task; + task.id = llama.queue_tasks.get_new_id(); + task.type = TASK_TYPE_SLOTS_DATA; + task.target_id = -1; - slots.push_back(slot_data); - } - res.set_content(slots.dump(), "application/json"); + llama.queue_results.add_waiting_task_id(task.id); + llama.queue_tasks.post(task); + + // get the result + task_result result = llama.queue_results.recv(task.id); + llama.queue_results.remove_waiting_task_id(task.id); + + res.set_content(result.result_json["slots"].dump(), "application/json"); res.status = 200; // HTTP OK }); } diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index e954fb0ef..88545eb69 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -49,7 +49,8 @@ enum server_state { enum task_type { TASK_TYPE_COMPLETION, TASK_TYPE_CANCEL, - TASK_TYPE_NEXT_RESPONSE + TASK_TYPE_NEXT_RESPONSE, + TASK_TYPE_SLOTS_DATA }; struct task_server { From 5022cf242d689e15defd133f96c4345ad30c5d19 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Wed, 21 Feb 2024 16:52:39 +0200 Subject: [PATCH 829/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index 97f34ac85..bbbf88d9d 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -4712fd12d7acb9971f850b1b98588f934cb39444 +30805514e1bf389a59d30a54a0525cbdc30d5bd1 From 89febfed9322c8849520dc63c93ee4f5fd72556e Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Wed, 21 Feb 2024 10:33:54 -0500 Subject: [PATCH 830/859] examples : do not assume BOS when shifting context (#5622) --- examples/main/main.cpp | 12 +++++++----- examples/server/server.cpp | 13 +++++++------ 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index f5d2f4893..7555dffe4 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -334,6 +334,8 @@ int main(int argc, char ** argv) { // number of tokens to keep when resetting context if (params.n_keep < 0 || params.n_keep > (int) embd_inp.size() || params.instruct || params.chatml) { params.n_keep = (int)embd_inp.size(); + } else { + params.n_keep += add_bos; // always keep the BOS token } // prefix & suffix for instruct mode @@ -383,8 +385,8 @@ int main(int argc, char ** argv) { } } - if (params.n_keep > 0) { - LOG_TEE("%s: static prompt based on n_keep: '", __func__); + if (params.n_keep > add_bos) { + LOG_TEE("%s: static prompt based on n_keep: '", __func__); for (int i = 0; i < params.n_keep; i++) { LOG_TEE("%s", llama_token_to_piece(ctx, embd_inp[i]).c_str()); } @@ -540,14 +542,14 @@ int main(int argc, char ** argv) { break; } - const int n_left = n_past - params.n_keep - 1; + const int n_left = n_past - params.n_keep; const int n_discard = n_left/2; LOG("context full, swapping: n_past = %d, n_left = %d, n_ctx = %d, n_keep = %d, n_discard = %d\n", n_past, n_left, n_ctx, params.n_keep, n_discard); - llama_kv_cache_seq_rm (ctx, 0, params.n_keep + 1 , params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, 0, params.n_keep + 1 + n_discard, n_past, -n_discard); + llama_kv_cache_seq_rm (ctx, 0, params.n_keep , params.n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, 0, params.n_keep + n_discard, n_past, -n_discard); n_past -= n_discard; diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 1c4479512..c84719a0d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1487,14 +1487,15 @@ struct llama_server_context if (slot.is_processing() && system_tokens.size() + slot.cache_tokens.size() >= (size_t) slot.n_ctx) { // Shift context - const int n_left = system_tokens.size() + slot.n_past - slot.params.n_keep - 1; + const int n_keep = slot.params.n_keep + add_bos_token; + const int n_left = system_tokens.size() + slot.n_past - n_keep; const int n_discard = n_left / 2; - LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, slot.params.n_keep, n_left, n_discard); - llama_kv_cache_seq_rm (ctx, slot.id, slot.params.n_keep + 1 , slot.params.n_keep + n_discard + 1); - llama_kv_cache_seq_shift(ctx, slot.id, slot.params.n_keep + 1 + n_discard, system_tokens.size() + slot.n_past, -n_discard); + LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, n_keep, n_left, n_discard); + llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); + llama_kv_cache_seq_shift(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); - for (size_t i = slot.params.n_keep + 1 + n_discard; i < slot.cache_tokens.size(); i++) + for (size_t i = n_keep + n_discard; i < slot.cache_tokens.size(); i++) { slot.cache_tokens[i - n_discard] = slot.cache_tokens[i]; } @@ -1507,7 +1508,7 @@ struct llama_server_context LOG_VERBOSE("context shift", { { "n_ctx", n_ctx }, - { "n_keep", params.n_keep }, + { "n_keep", n_keep }, { "n_left", n_left }, }); } From ba2135ccae7462470b3865c6e41d2e1d734eac05 Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 21 Feb 2024 22:18:23 +0100 Subject: [PATCH 831/859] gemma : allow offloading the output tensor (#5646) --- llama.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 3a226c426..4054d5da6 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4394,6 +4394,8 @@ static bool llm_load_tensors( // output model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // same as tok_embd, duplicated to allow offloading + ml.n_created--; // artificial tensor const int64_t n_ff = hparams.n_ff; const int64_t n_embd_head_k = hparams.n_embd_head_k; @@ -7525,7 +7527,7 @@ struct llm_build_context { cb(cur, "result_norm", -1); // lm_head - cur = ggml_mul_mat(ctx0, model.tok_embd, cur); + cur = ggml_mul_mat(ctx0, model.output, cur); cb(cur, "result_output", -1); ggml_build_forward_expand(gf, cur); From 7fe4678b0244ba7b03eae66ebeaa947e2770bb1a Mon Sep 17 00:00:00 2001 From: slaren Date: Wed, 21 Feb 2024 22:52:39 +0100 Subject: [PATCH 832/859] llama : fix session save/load with quantized KV (#5649) --- llama.cpp | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/llama.cpp b/llama.cpp index 4054d5da6..d763cc80c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12176,18 +12176,19 @@ static void llama_copy_state_data_internal(struct llama_context * ctx, llama_dat data_ctx->write(&kv_used, sizeof(kv_used)); if (kv_buf_size) { - const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - std::vector tmp_buf; for (int il = 0; il < (int) n_layer; ++il) { - tmp_buf.resize(elt_size*n_embd_k_gqa*kv_head); + size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); + tmp_buf.resize(k_size); ggml_backend_tensor_get(kv_self.k_l[il], tmp_buf.data(), 0, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); // v is not contiguous, copy row by row - tmp_buf.resize(elt_size*kv_head); + size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); + size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); + tmp_buf.resize(v_row_size); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { - ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*elt_size*n_ctx, tmp_buf.size()); + ggml_backend_tensor_get(kv_self.v_l[il], tmp_buf.data(), ir*v_row_stride, tmp_buf.size()); data_ctx->write(tmp_buf.data(), tmp_buf.size()); } } @@ -12289,17 +12290,16 @@ size_t llama_set_state_data(struct llama_context * ctx, uint8_t * src) { if (kv_buf_size) { GGML_ASSERT(kv_self.total_size() == kv_buf_size); - const size_t elt_size = ggml_element_size(kv_self.k_l[0]); - for (int il = 0; il < (int) n_layer; ++il) { - size_t k_size = elt_size*n_embd_k_gqa*kv_head; + size_t k_size = ggml_row_size(kv_self.k_l[il]->type, n_embd_k_gqa*kv_head); ggml_backend_tensor_set(kv_self.k_l[il], inp, 0, k_size); inp += k_size; // v is not contiguous, copy row by row - size_t v_row_size = elt_size*kv_head; + size_t v_row_size = ggml_row_size(kv_self.v_l[il]->type, kv_head); + size_t v_row_stride = ggml_row_size(kv_self.v_l[il]->type, n_ctx); for (int ir = 0; ir < (int) n_embd_v_gqa; ++ir) { - ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*elt_size*n_ctx, v_row_size); + ggml_backend_tensor_set(kv_self.v_l[il], inp, ir*v_row_stride, v_row_size); inp += v_row_size; } } From 7c8bcc11dc61cf5930b70cd0168b84afcebe12a9 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 22 Feb 2024 00:31:00 +0100 Subject: [PATCH 833/859] Add docs for llama_chat_apply_template (#5645) * add docs for llama_chat_apply_template * fix typo --- examples/server/README.md | 1 + llama.h | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/server/README.md b/examples/server/README.md index 6d9f96cd4..4b24ee5dc 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -41,6 +41,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` - `-n, --n-predict`: Set the maximum tokens to predict (default: -1) - `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. +- `--chat-template JINJA_TEMPLATE`: Set custom jinja chat template. This parameter accepts a string, not a file name (default: template taken from model's metadata). We only support [some pre-defined templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) ## Build diff --git a/llama.h b/llama.h index 8ba20696f..84f196b3b 100644 --- a/llama.h +++ b/llama.h @@ -708,7 +708,7 @@ extern "C" { /// Apply chat template. Inspired by hf apply_chat_template() on python. /// Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model" - /// NOTE: This function only support some known jinja templates. It is not a jinja parser. + /// NOTE: This function does not use a jinja parser. It only support a pre-defined list of template. See more: https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template /// @param tmpl A Jinja template to use for this chat. If this is nullptr, the model’s default chat template will be used instead. /// @param chat Pointer to a list of multiple llama_chat_message /// @param n_msg Number of llama_chat_message in this chat From 973053d8b0d04809836b3339a50f68d9c842de90 Mon Sep 17 00:00:00 2001 From: slaren Date: Thu, 22 Feb 2024 00:42:09 +0100 Subject: [PATCH 834/859] llama : fix loading models with shared tok_embd and output (#5651) ggml-ci --- llama.cpp | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/llama.cpp b/llama.cpp index d763cc80c..259f2a3a3 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2791,13 +2791,7 @@ struct llama_model_loader { std::vector> read_buf; - for (int i = 0; i < gguf_get_n_tensors(ctx_gguf); i++) { - struct ggml_tensor * cur = ggml_get_tensor(ctx, gguf_get_tensor_name(ctx_gguf, i)); - if (!cur) { - // some tensors may be allocated in a different context - continue; - } - + for (struct ggml_tensor * cur = ggml_get_first_tensor(ctx); cur != NULL; cur = ggml_get_next_tensor(ctx, cur)) { if (progress_callback) { if (!progress_callback((float) size_done / size_data, progress_callback_user_data)) { return false; @@ -3722,7 +3716,7 @@ static bool llm_load_tensors( } // create one context per buffer type - size_t ctx_size = ggml_tensor_overhead()*ml.n_tensors; + size_t ctx_size = ggml_tensor_overhead()*(ml.n_tensors + 1); // +1 for models where tok_embd is duplicated as output std::map ctx_map; for (auto & it : buft_layer_count) { struct ggml_init_params params = { @@ -3860,6 +3854,7 @@ static bool llm_load_tensors( } else { model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // needs to be on GPU ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); } } @@ -4396,6 +4391,7 @@ static bool llm_load_tensors( model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); model.output = ml.create_tensor(ctx_output, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); // same as tok_embd, duplicated to allow offloading ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); const int64_t n_ff = hparams.n_ff; const int64_t n_embd_head_k = hparams.n_embd_head_k; From 4ef245a92a968ba0f18a5adfd41e51980ce4fdf5 Mon Sep 17 00:00:00 2001 From: Dat Quoc Nguyen <2412555+datquocnguyen@users.noreply.github.com> Date: Thu, 22 Feb 2024 18:15:13 +1000 Subject: [PATCH 835/859] mpt : add optional bias tensors (#5638) Update for MPT with optional bias parameters: to work with PhoGPT and SEA-LION models that were pre-trained with 'bias'. --- llama.cpp | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/llama.cpp b/llama.cpp index 259f2a3a3..9cae8c761 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4054,6 +4054,8 @@ static bool llm_load_tensors( // output { model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); + model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, false); + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); } @@ -4063,14 +4065,23 @@ static bool llm_load_tensors( auto & layer = model.layers[i]; - layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "weight", i), {n_embd}); + layer.attn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_NORM, "bias", i), {n_embd}, false); layer.wqkv = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_QKV, "weight", i), {n_embd, n_embd + 2*n_embd_gqa}); - layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bqkv = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_QKV, "bias", i), {n_embd + 2*n_embd_gqa}, false); - layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); - layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), { n_ff, n_embd}); - layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.wo = ml.create_tensor(ctx_split, tn(LLM_TENSOR_ATTN_OUT, "weight", i), {n_embd, n_embd}); + layer.bo = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_ATTN_OUT, "bias", i), {n_embd}, false); + + layer.ffn_norm = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "weight", i), {n_embd}); + layer.ffn_norm_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_NORM, "bias", i), {n_embd}, false); + + layer.ffn_down = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_DOWN, "weight", i), {n_ff, n_embd}); + layer.ffn_down_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_DOWN, "bias", i), {n_embd}, false); + + layer.ffn_up = ml.create_tensor(ctx_split, tn(LLM_TENSOR_FFN_UP, "weight", i), {n_embd, n_ff}); + layer.ffn_up_b = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_UP, "bias", i), {n_ff}, false); // AWQ ScaleActivation layer layer.ffn_act = ml.create_tensor(ctx_layer, tn(LLM_TENSOR_FFN_ACT, "scales", i), {n_ff}, false); @@ -6171,7 +6182,7 @@ struct llm_build_context { attn_norm = llm_build_norm(ctx0, inpL, hparams, model.layers[il].attn_norm, - NULL, + model.layers[il].attn_norm_b, LLM_NORM, cb, il); cb(attn_norm, "attn_norm", il); @@ -6181,6 +6192,11 @@ struct llm_build_context { cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); cb(cur, "wqkv", il); + + if (model.layers[il].bqkv){ + cur = ggml_add(ctx0, cur, model.layers[il].bqkv); + cb(cur, "bqkv", il); + } if (hparams.f_clamp_kqv > 0.0f) { cur = ggml_clamp(ctx0, cur, -hparams.f_clamp_kqv, hparams.f_clamp_kqv); @@ -6198,7 +6214,7 @@ struct llm_build_context { Qcur = ggml_reshape_3d(ctx0, Qcur, n_embd_head, n_head, n_tokens); cur = llm_build_kv(ctx0, model, hparams, kv_self, gf, - model.layers[il].wo, NULL, + model.layers[il].wo, model.layers[il].bo, Kcur, Vcur, Qcur, KQ_mask, KQ_pos, n_ctx, n_tokens, kv_head, n_kv, 1.0f/sqrtf(float(n_embd_head)), cb, il); cb(cur, "kqv_out", il); } @@ -6211,13 +6227,13 @@ struct llm_build_context { { cur = llm_build_norm(ctx0, ffn_inp, hparams, model.layers[il].ffn_norm, - NULL, + model.layers[il].ffn_norm_b, LLM_NORM, cb, il); cb(cur, "ffn_norm", il); cur = llm_build_ffn(ctx0, cur, - model.layers[il].ffn_up, NULL, + model.layers[il].ffn_up, model.layers[il].ffn_up_b, NULL, NULL, - model.layers[il].ffn_down, NULL, + model.layers[il].ffn_down, model.layers[il].ffn_down_b, model.layers[il].ffn_act, LLM_FFN_GELU, LLM_FFN_SEQ, cb, il); cb(cur, "ffn_out", il); @@ -6234,7 +6250,7 @@ struct llm_build_context { cur = llm_build_norm(ctx0, cur, hparams, model.output_norm, - NULL, + model.output_norm_b, LLM_NORM, cb, -1); cb(cur, "result_norm", -1); From c5688c6250430d2b8e0259efcf26c16dfa4c1f46 Mon Sep 17 00:00:00 2001 From: Alexey Parfenov Date: Thu, 22 Feb 2024 08:27:32 +0000 Subject: [PATCH 836/859] server : clarify some params in the docs (#5640) --- examples/server/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 4b24ee5dc..4b6cd8326 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -151,7 +151,7 @@ node index.js `temperature`: Adjust the randomness of the generated text (default: 0.8). - `dynatemp_range`: Dynamic temperature range (default: 0.0, 0.0 = disabled). + `dynatemp_range`: Dynamic temperature range. The final temperature will be in the range of `[temperature - dynatemp_range; temperature + dynatemp_range]` (default: 0.0, 0.0 = disabled). `dynatemp_exponent`: Dynamic temperature exponent (default: 1.0). @@ -209,7 +209,7 @@ node index.js `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) - `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) + `cache_prompt`: Re-use previously cached prompt from the last request if possible. This may prevent re-caching the prompt from scratch. (default: false) `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) @@ -242,7 +242,7 @@ Notice that each `probs` is an array of length `n_probs`. - `content`: Completion result as a string (excluding `stopping_word` if any). In case of streaming mode, will contain the next token as a string. - `stop`: Boolean for use with `stream` to check whether the generation has stopped (Note: This is not related to stopping words array `stop` from input options) -- `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model` +- `generation_settings`: The provided options above excluding `prompt` but including `n_ctx`, `model`. These options may differ from the original ones in some way (e.g. bad values filtered out, strings converted to tokens, etc.). - `model`: The path to the model loaded with `-m` - `prompt`: The provided `prompt` - `stopped_eos`: Indicating whether the completion has stopped because it encountered the EOS token From a46f50747b2028f7f9c9883b26bfba12bf92556e Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 22 Feb 2024 09:33:24 +0100 Subject: [PATCH 837/859] server : fallback to chatml, add AlphaMonarch chat template (#5628) * server: fallback to chatml * add new chat template * server: add AlphaMonarch to test chat template * server: only check model template if there is no custom tmpl * remove TODO --- examples/server/server.cpp | 15 +++++++++++++++ llama.cpp | 9 +++++++++ tests/test-chat-template.cpp | 23 +++++++++++++++-------- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index c84719a0d..369121e88 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -400,6 +400,16 @@ struct llama_server_context return true; } + void validate_model_chat_template(server_params & sparams) { + llama_chat_message chat[] = {{"user", "test"}}; + std::vector buf(1); + int res = llama_chat_apply_template(model, nullptr, chat, 1, true, buf.data(), buf.size()); + if (res < 0) { + LOG_ERROR("The chat template comes with this model is not yet supported, falling back to chatml. This may cause the model to output suboptimal responses", {}); + sparams.chat_template = "<|im_start|>"; // llama_chat_apply_template only checks if <|im_start|> exist in the template + } + } + void initialize() { // create slots all_slots_are_idle = true; @@ -2752,6 +2762,11 @@ int main(int argc, char **argv) LOG_INFO("model loaded", {}); } + if (sparams.chat_template.empty()) { // custom chat template is not supplied + // check if the template comes with the model is supported by us + llama.validate_model_chat_template(sparams); + } + // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool { // If API key is not set, skip validation diff --git a/llama.cpp b/llama.cpp index 9cae8c761..055b57e31 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12773,6 +12773,15 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "<|assistant|>\n"; } + } else if (tmpl.find("bos_token + message['role']") != std::string::npos) { + // mlabonne/AlphaMonarch-7B template (the is included inside history) + for (auto message : chat) { + std::string bos = (message == chat.front()) ? "" : ""; // skip BOS for first message + ss << bos << message->role << "\n" << message->content << "\n"; + } + if (add_ass) { + ss << "assistant\n"; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index 9830650d4..d02b39e14 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -27,12 +27,20 @@ int main(void) { "{%- for idx in range(0, messages|length) -%}\\n{%- if messages[idx]['role'] == 'user' -%}\\n{%- if idx > 1 -%}\\n{{- bos_token + '[INST] ' + messages[idx]['content'] + ' [/INST]' -}}\\n{%- else -%}\\n{{- messages[idx]['content'] + ' [/INST]' -}}\\n{%- endif -%}\\n{% elif messages[idx]['role'] == 'system' %}\\n{{- '[INST] <>\\\\n' + messages[idx]['content'] + '\\\\n<>\\\\n\\\\n' -}}\\n{%- elif messages[idx]['role'] == 'assistant' -%}\\n{{- ' ' + messages[idx]['content'] + ' ' + eos_token -}}\\n{% endif %}\\n{% endfor %}", // bofenghuang/vigogne-2-70b-chat "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif true == true and not '<>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'Vous êtes Vigogne, un assistant IA créé par Zaion Lab. Vous suivez extrêmement bien les instructions. Aidez autant que vous le pouvez.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<>\\\\n' + system_message + '\\\\n<>\\\\n\\\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<>\\\\n' + content.strip() + '\\\\n<>\\\\n\\\\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}", + // mlabonne/AlphaMonarch-7B + "{% for message in messages %}{{bos_token + message['role'] + '\\n' + message['content'] + eos_token + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ bos_token + 'assistant\\n' }}{% endif %}", }; - std::vector expected_substr = { - "<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant", - "[/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", - "[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", - "[/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + std::vector expected_output = { + // teknium/OpenHermes-2.5-Mistral-7B + "<|im_start|>system\nYou are a helpful assistant<|im_end|>\n<|im_start|>user\nHello<|im_end|>\n<|im_start|>assistant\nHi there<|im_end|>\n<|im_start|>user\nWho are you<|im_end|>\n<|im_start|>assistant\n I am an assistant <|im_end|>\n<|im_start|>user\nAnother question<|im_end|>\n<|im_start|>assistant\n", + // mistralai/Mistral-7B-Instruct-v0.2 + "[INST] You are a helpful assistant\nHello [/INST]Hi there[INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // TheBloke/FusionNet_34Bx2_MoE-AWQ + "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // bofenghuang/vigogne-2-70b-chat + "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", + // mlabonne/AlphaMonarch-7B + "system\nYou are a helpful assistant\nuser\nHello\nassistant\nHi there\nuser\nWho are you\nassistant\n I am an assistant \nuser\nAnother question\nassistant\n", }; std::vector formatted_chat(1024); int32_t res; @@ -43,7 +51,7 @@ int main(void) { for (size_t i = 0; i < templates.size(); i++) { std::string custom_template = templates[i]; - std::string substr = expected_substr[i]; + std::string expected = expected_output[i]; formatted_chat.resize(1024); res = llama_chat_apply_template( nullptr, @@ -57,8 +65,7 @@ int main(void) { formatted_chat.resize(res); std::string output(formatted_chat.data(), formatted_chat.size()); std::cout << output << "\n-------------------------\n"; - // expect the "formatted_chat" to contain pre-defined strings - assert(output.find(substr) != std::string::npos); + assert(output == expected); } return 0; } From 56d03d92be57f5880b9ed94542d87bb6effae31f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 10:35:54 +0200 Subject: [PATCH 838/859] readme : update hot topics --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index c1624b9f9..3bc512af0 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Inference of Meta's [LLaMA](https://arxiv.org/abs/2302.13971) model (and others) ### Hot topics +- Support for chat templates: [Wiki (contributions welcome)](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) - Support for Gemma models: https://github.com/ggerganov/llama.cpp/pull/5631 - Non-linear quantization IQ4_NL: https://github.com/ggerganov/llama.cpp/pull/5590 - Looking for contributions to improve and maintain the `server` example: https://github.com/ggerganov/llama.cpp/issues/4216 From 3a03541cedea474fa9d41214484cc3fbcf468a9e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 13:54:03 +0200 Subject: [PATCH 839/859] minor : fix trailing whitespace (#5638) --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 055b57e31..6ab5e1bf4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -6192,7 +6192,7 @@ struct llm_build_context { cur = ggml_mul_mat(ctx0, model.layers[il].wqkv, cur); cb(cur, "wqkv", il); - + if (model.layers[il].bqkv){ cur = ggml_add(ctx0, cur, model.layers[il].bqkv); cb(cur, "bqkv", il); From 4cb4d8b22d4fda971621a68c570ce84d66897c37 Mon Sep 17 00:00:00 2001 From: Someone Date: Thu, 22 Feb 2024 16:32:09 +0000 Subject: [PATCH 840/859] workflows: nix: hardcode cachix ids, build unconditionally (#5663) GitHub does not expose environment and repository variables to PRs coming from forks implies that we've been disabling the Nix CI actions for most PRs. The `if:` also didn't make much sense, because we can always pull from cachix, and there's no point (albeit no risk either) in pushing cache for the untrusted code. --- .github/workflows/nix-ci-aarch64.yml | 7 +++---- .github/workflows/nix-ci.yml | 11 +++++------ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/.github/workflows/nix-ci-aarch64.yml b/.github/workflows/nix-ci-aarch64.yml index 0c6cf5f09..8d0a3fd7f 100644 --- a/.github/workflows/nix-ci-aarch64.yml +++ b/.github/workflows/nix-ci-aarch64.yml @@ -19,7 +19,6 @@ on: jobs: nix-build-aarch64: - if: ${{ vars.CACHIX_NAME != '' }} runs-on: ubuntu-latest steps: - name: Checkout repository @@ -37,8 +36,8 @@ jobs: extra-conf: | extra-platforms = aarch64-linux extra-system-features = nixos-test kvm - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + extra-substituters = https://llama-cpp.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc= cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - uses: DeterminateSystems/magic-nix-cache-action@v2 with: upstream-cache: https://${{ matrix.cachixName }}.cachix.org @@ -46,7 +45,7 @@ jobs: uses: cachix/cachix-action@v13 with: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: ${{ vars.CACHIX_NAME }} + name: llama-cpp - name: Show all output paths run: > nix run github:nix-community/nix-eval-jobs diff --git a/.github/workflows/nix-ci.yml b/.github/workflows/nix-ci.yml index d19c7a576..01c5a9d5a 100644 --- a/.github/workflows/nix-ci.yml +++ b/.github/workflows/nix-ci.yml @@ -23,8 +23,8 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} extra-conf: | - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + extra-substituters = https://llama-cpp.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc= cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - uses: DeterminateSystems/magic-nix-cache-action@v2 with: upstream-cache: https://${{ matrix.cachixName }}.cachix.org @@ -37,7 +37,6 @@ jobs: --flake ".#packages.$(nix eval --raw --impure --expr builtins.currentSystem)" nix-build: - if: ${{ vars.CACHIX_NAME != '' }} strategy: fail-fast: false matrix: @@ -51,8 +50,8 @@ jobs: with: github-token: ${{ secrets.GITHUB_TOKEN }} extra-conf: | - extra-substituters = https://${{ vars.CACHIX_NAME }}.cachix.org https://cuda-maintainers.cachix.org - extra-trusted-public-keys = ${{ vars.CACHIX_PUBLIC_KEY }} cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= + extra-substituters = https://llama-cpp.cachix.org https://cuda-maintainers.cachix.org + extra-trusted-public-keys = llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc= cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E= - uses: DeterminateSystems/magic-nix-cache-action@v2 with: upstream-cache: https://${{ matrix.cachixName }}.cachix.org @@ -60,7 +59,7 @@ jobs: uses: cachix/cachix-action@v13 with: authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}' - name: ${{ vars.CACHIX_NAME }} + name: llama-cpp - name: Build run: > nix run github:Mic92/nix-fast-build From 373ee3fbbabc4c1508eed4f5c3795b23a20939a3 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 22 Feb 2024 19:10:21 +0100 Subject: [PATCH 841/859] Add Gemma chat template (#5665) * add gemma chat template * gemma: only apply system_prompt on non-model message --- llama.cpp | 22 ++++++++++++++++++++++ tests/test-chat-template.cpp | 4 ++++ 2 files changed, 26 insertions(+) diff --git a/llama.cpp b/llama.cpp index 6ab5e1bf4..40dda265c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12782,6 +12782,28 @@ static int32_t llama_chat_apply_template_internal( if (add_ass) { ss << "assistant\n"; } + } else if (tmpl.find("") != std::string::npos) { + // google/gemma-7b-it + std::string system_prompt = ""; + for (auto message : chat) { + std::string role(message->role); + if (role == "system") { + // there is no system message for gemma, but we will merge it with user prompt, so nothing is broken + system_prompt = trim(message->content); + continue; + } + // in gemma, "assistant" is "model" + role = role == "assistant" ? "model" : message->role; + ss << "" << role << "\n"; + if (!system_prompt.empty() && role != "model") { + ss << system_prompt << "\n\n"; + system_prompt = ""; + } + ss << trim(message->content) << "\n"; + } + if (add_ass) { + ss << "model\n"; + } } else { // template not supported return -1; diff --git a/tests/test-chat-template.cpp b/tests/test-chat-template.cpp index d02b39e14..fa2eb577b 100644 --- a/tests/test-chat-template.cpp +++ b/tests/test-chat-template.cpp @@ -29,6 +29,8 @@ int main(void) { "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% elif true == true and not '<>' in messages[0]['content'] %}{% set loop_messages = messages %}{% set system_message = 'Vous êtes Vigogne, un assistant IA créé par Zaion Lab. Vous suivez extrêmement bien les instructions. Aidez autant que vous le pouvez.' %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<>\\\\n' + system_message + '\\\\n<>\\\\n\\\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'system' %}{{ '<>\\\\n' + content.strip() + '\\\\n<>\\\\n\\\\n' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}", // mlabonne/AlphaMonarch-7B "{% for message in messages %}{{bos_token + message['role'] + '\\n' + message['content'] + eos_token + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ bos_token + 'assistant\\n' }}{% endif %}", + // google/gemma-7b-it + "{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + '\\n' + message['content'] | trim + '\\n' }}{% endfor %}{% if add_generation_prompt %}{{'model\\n'}}{% endif %}", }; std::vector expected_output = { // teknium/OpenHermes-2.5-Mistral-7B @@ -41,6 +43,8 @@ int main(void) { "[INST] <>\nYou are a helpful assistant\n<>\n\nHello [/INST] Hi there [INST] Who are you [/INST] I am an assistant [INST] Another question [/INST]", // mlabonne/AlphaMonarch-7B "system\nYou are a helpful assistant\nuser\nHello\nassistant\nHi there\nuser\nWho are you\nassistant\n I am an assistant \nuser\nAnother question\nassistant\n", + // google/gemma-7b-it + "user\nYou are a helpful assistant\n\nHello\nmodel\nHi there\nuser\nWho are you\nmodel\nI am an assistant\nuser\nAnother question\nmodel\n", }; std::vector formatted_chat(1024); int32_t res; From 5a9e2f60ba3d8362ba17c77ac3092906d49b813f Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 20:13:25 +0200 Subject: [PATCH 842/859] py : minor fixes (#5668) --- convert-hf-to-gguf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 9771fccf9..8630bbf29 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -655,6 +655,8 @@ class OrionModel(Model): self.gguf_writer.add_feed_forward_length(self.hparams["intermediate_size"]) self.gguf_writer.add_head_count(head_count) self.gguf_writer.add_head_count_kv(head_count_kv) + # note: config provides rms norm but it is actually layer norm + # ref: https://huggingface.co/OrionStarAI/Orion-14B-Chat/blob/276a17221ce42beb45f66fac657a41540e71f4f5/modeling_orion.py#L570-L571 self.gguf_writer.add_layer_norm_eps(self.hparams["rms_norm_eps"]) def write_tensors(self): @@ -1031,7 +1033,6 @@ class PersimmonModel(Model): self.gguf_writer.add_head_count_kv(head_count_kv) self.gguf_writer.add_rope_freq_base(self.hparams["rope_theta"]) self.gguf_writer.add_layer_norm_eps(self.hparams["layer_norm_eps"]) - self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) def set_vocab(self): self._set_vocab_sentencepiece() From 201294ae177b308fb3a99dc504dd6d27e8afa907 Mon Sep 17 00:00:00 2001 From: Someone Date: Thu, 22 Feb 2024 19:44:10 +0000 Subject: [PATCH 843/859] nix: init singularity and docker images (#5056) Exposes a few attributes demonstrating how to build [singularity](https://docs.sylabs.io/guides/latest/user-guide/)/[apptainer](https://apptainer.org/) and Docker images re-using llama.cpp's Nix expression. Built locally on `x86_64-linux` with `nix build github:someoneserge/llama.cpp/feat/nix/images#llamaPackages.{docker,docker-min,sif,llama-cpp}` and it's fast and effective. --- .devops/nix/docker.nix | 37 +++++++++++++++++++++++++++++++++++++ .devops/nix/scope.nix | 3 +++ .devops/nix/sif.nix | 27 +++++++++++++++++++++++++++ 3 files changed, 67 insertions(+) create mode 100644 .devops/nix/docker.nix create mode 100644 .devops/nix/sif.nix diff --git a/.devops/nix/docker.nix b/.devops/nix/docker.nix new file mode 100644 index 000000000..d607b4575 --- /dev/null +++ b/.devops/nix/docker.nix @@ -0,0 +1,37 @@ +{ + lib, + dockerTools, + buildEnv, + llama-cpp, + interactive ? true, + coreutils, +}: + +# A tar that can be fed into `docker load`: +# +# $ nix build .#llamaPackages.docker +# $ docker load < result + +# For details and variations cf. +# - https://nixos.org/manual/nixpkgs/unstable/#ssec-pkgs-dockerTools-buildLayeredImage +# - https://discourse.nixos.org/t/a-faster-dockertools-buildimage-prototype/16922 +# - https://nixery.dev/ + +# Approximate (compressed) sizes, at the time of writing, are: +# +# .#llamaPackages.docker: 125M; +# .#llamaPackagesCuda.docker: 537M; +# .#legacyPackages.aarch64-linux.llamaPackagesXavier.docker: 415M. + +dockerTools.buildLayeredImage { + name = llama-cpp.pname; + tag = "latest"; + + contents = + [ llama-cpp ] + ++ lib.optionals interactive [ + coreutils + dockerTools.binSh + dockerTools.caCertificates + ]; +} diff --git a/.devops/nix/scope.nix b/.devops/nix/scope.nix index d295995a4..78530c9e8 100644 --- a/.devops/nix/scope.nix +++ b/.devops/nix/scope.nix @@ -12,5 +12,8 @@ lib.makeScope newScope ( self: { inherit llamaVersion; llama-cpp = self.callPackage ./package.nix { }; + docker = self.callPackage ./docker.nix { }; + docker-min = self.callPackage ./docker.nix { interactive = false; }; + sif = self.callPackage ./sif.nix { }; } ) diff --git a/.devops/nix/sif.nix b/.devops/nix/sif.nix new file mode 100644 index 000000000..7535ca0f3 --- /dev/null +++ b/.devops/nix/sif.nix @@ -0,0 +1,27 @@ +{ + lib, + singularity-tools, + llama-cpp, + bashInteractive, + interactive ? false, +}: + +let + optionalInt = cond: x: if cond then x else 0; +in +singularity-tools.buildImage rec { + inherit (llama-cpp) name; + contents = [ llama-cpp ] ++ lib.optionals interactive [ bashInteractive ]; + + # These are excessive (but safe) for most variants. Building singularity + # images requires superuser privileges, so we build them inside a VM in a + # writable image of pre-determined size. + # + # ROCm is currently affected by https://github.com/NixOS/nixpkgs/issues/276846 + # + # Expected image sizes: + # - cpu/blas: 150M, + # - cuda, all gencodes: 560M, + diskSize = 4096 + optionalInt llama-cpp.useRocm 16384; + memSize = diskSize; +} From efd56b1c2139d50b9b4381a212feb75d69598fda Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 18:31:40 +0200 Subject: [PATCH 844/859] ggml : 32-bit arm compat (whisper/1891) * ggml : 32-bit arm compat * ggml : add ggml_vqtbl1q_s8 impl * ggml : cont --- ggml-quants.c | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/ggml-quants.c b/ggml-quants.c index 6336538f0..8917c8af1 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -438,6 +438,30 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { return res; } +// NOTE: not tested +inline static int8x16_t ggml_vqtbl1q_s8(int8x16_t a, uint8x16_t b) { + int8x16_t res; + + res[ 0] = a[b[ 0]]; + res[ 1] = a[b[ 1]]; + res[ 2] = a[b[ 2]]; + res[ 3] = a[b[ 3]]; + res[ 4] = a[b[ 4]]; + res[ 5] = a[b[ 5]]; + res[ 6] = a[b[ 6]]; + res[ 7] = a[b[ 7]]; + res[ 8] = a[b[ 8]]; + res[ 9] = a[b[ 9]]; + res[10] = a[b[10]]; + res[11] = a[b[11]]; + res[12] = a[b[12]]; + res[13] = a[b[13]]; + res[14] = a[b[14]]; + res[15] = a[b[15]]; + + return res; +} + #else #define ggml_int16x8x2_t int16x8x2_t @@ -451,6 +475,7 @@ inline static ggml_int8x16x4_t ggml_vld1q_s8_x4(const int8_t * ptr) { #define ggml_vld1q_u8_x4 vld1q_u8_x4 #define ggml_vld1q_s8_x2 vld1q_s8_x2 #define ggml_vld1q_s8_x4 vld1q_s8_x4 +#define ggml_vqtbl1q_s8 vqtbl1q_s8 #endif @@ -9333,7 +9358,7 @@ void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const uint16_t gindex[8]; uint16x8x2_t vindex; int8x16x4_t q1b; - int8x16x4_t q8b; + ggml_int8x16x4_t q8b; uint16x8x4_t scales; int32x4x2_t sumi; int32x4x2_t dotq; @@ -9506,10 +9531,10 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * q8b.val[2] = vld1q_s8(y[ib+1].qs); q8b.val[3] = vld1q_s8(y[ib+1].qs + 16); - q4b.val[0] = vqtbl1q_s8(values, vandq_u8(q4bits.val[0], m4b)); - q4b.val[1] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); - q4b.val[2] = vqtbl1q_s8(values, vandq_u8(q4bits.val[1], m4b)); - q4b.val[3] = vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); + q4b.val[0] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[0], m4b)); + q4b.val[1] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[0], 4)); + q4b.val[2] = ggml_vqtbl1q_s8(values, vandq_u8 (q4bits.val[1], m4b)); + q4b.val[3] = ggml_vqtbl1q_s8(values, vshrq_n_u8(q4bits.val[1], 4)); prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); From 334f76fa385ed81095165e5ae068756214893901 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:21:05 +0200 Subject: [PATCH 845/859] sync : ggml --- scripts/sync-ggml.last | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/sync-ggml.last b/scripts/sync-ggml.last index bbbf88d9d..59de34370 100644 --- a/scripts/sync-ggml.last +++ b/scripts/sync-ggml.last @@ -1 +1 @@ -30805514e1bf389a59d30a54a0525cbdc30d5bd1 +8cdf783f288a98eddf521b0ab1b4d405be9e18ba From 7e4f339c404dbe029d4a117c03b37a9bf646cf0e Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:21:39 +0200 Subject: [PATCH 846/859] ggml : always define ggml_fp16_t as uint16_t (#5666) * ggml : always define ggml_fp16_t as uint16_t ggml-ci * ggml : cont ggml-ci * ggml : cont * ggml : cont ggml-ci * ggml : cont ggml-ci * cuda : no longer ggml headers last ggml-ci * ggml : fix q6_K FP16 -> FP32 conversion ggml-ci * ggml : more FP16 -> FP32 conversion fixes ggml-ci --- ggml-cuda.cu | 9 ++++----- ggml-impl.h | 27 ++++++++++++++++++++------- ggml-quants.c | 30 +++++++++++++++--------------- ggml.c | 6 +++--- ggml.h | 6 ------ 5 files changed, 42 insertions(+), 36 deletions(-) diff --git a/ggml-cuda.cu b/ggml-cuda.cu index e7c211d7d..b0e454e02 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -1,3 +1,7 @@ +#include "ggml-cuda.h" +#include "ggml.h" +#include "ggml-backend-impl.h" + #include #include #include @@ -121,11 +125,6 @@ #endif // defined(GGML_USE_HIPBLAS) -// ggml-cuda need half type so keep ggml headers include at last -#include "ggml-cuda.h" -#include "ggml.h" -#include "ggml-backend-impl.h" - #define CUDART_HMAX 11070 // CUDA 11.7, min. ver. for which __hmax and __hmax2 are known to work (may be higher than needed) #define CC_PASCAL 600 diff --git a/ggml-impl.h b/ggml-impl.h index 19df66bce..c5637e4d4 100644 --- a/ggml-impl.h +++ b/ggml-impl.h @@ -53,11 +53,23 @@ extern "C" { // #include -#define GGML_COMPUTE_FP16_TO_FP32(x) ((float) (x)) -#define GGML_COMPUTE_FP32_TO_FP16(x) (x) +#define GGML_COMPUTE_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) +#define GGML_COMPUTE_FP32_TO_FP16(x) ggml_compute_fp32_to_fp16(x) -#define GGML_FP16_TO_FP32(x) ((float) (x)) -#define GGML_FP32_TO_FP16(x) (x) +#define GGML_FP16_TO_FP32(x) ggml_compute_fp16_to_fp32(x) + +static inline float ggml_compute_fp16_to_fp32(ggml_fp16_t h) { + __fp16 tmp; + memcpy(&tmp, &h, sizeof(ggml_fp16_t)); + return (float)tmp; +} + +static inline ggml_fp16_t ggml_compute_fp32_to_fp16(float f) { + ggml_fp16_t res; + __fp16 tmp = f; + memcpy(&res, &tmp, sizeof(ggml_fp16_t)); + return res; +} #else @@ -214,8 +226,7 @@ extern float ggml_table_f32_f16[1 << 16]; // On ARM NEON, it's quicker to directly convert x -> x instead of calling into ggml_lookup_fp16_to_fp32, // so we define GGML_FP16_TO_FP32 and GGML_FP32_TO_FP16 elsewhere for NEON. // This is also true for POWER9. -#if !defined(GGML_FP16_TO_FP32) || !defined(GGML_FP32_TO_FP16) - +#if !defined(GGML_FP16_TO_FP32) inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { uint16_t s; memcpy(&s, &f, sizeof(uint16_t)); @@ -223,8 +234,10 @@ inline static float ggml_lookup_fp16_to_fp32(ggml_fp16_t f) { } #define GGML_FP16_TO_FP32(x) ggml_lookup_fp16_to_fp32(x) -#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) +#endif +#if !defined(GGML_FP32_TO_FP16) +#define GGML_FP32_TO_FP16(x) GGML_COMPUTE_FP32_TO_FP16(x) #endif #define GGML_HASHTABLE_FULL ((size_t)-1) diff --git a/ggml-quants.c b/ggml-quants.c index 8917c8af1..b15977f53 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -5654,8 +5654,8 @@ void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; - const float dmin = -y[i].d * (float)x[i].dmin; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -5804,8 +5804,8 @@ void ggml_vec_dot_q2_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; - const float dmin = -y[i].d * (float)x[i].dmin; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); + const float dmin = -y[i].d * GGML_FP16_TO_FP32(x[i].dmin); const uint8_t * restrict q2 = x[i].qs; const int8_t * restrict q8 = y[i].qs; @@ -6458,7 +6458,7 @@ void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * r int32_t isum = -4*(scales[0] * y[i].bsums[0] + scales[2] * y[i].bsums[1] + scales[1] * y[i].bsums[2] + scales[3] * y[i].bsums[3]); - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const uint8x16_t htmp = vcombine_u8(hbits, vshr_n_u8(hbits, 1)); q3h.val[0] = vandq_u8(mh, vshlq_n_u8(htmp, 2)); @@ -6660,7 +6660,7 @@ void ggml_vec_dot_q3_K_q8_K(int n, float * restrict s, size_t bs, const void * r int32_t isum = -4*(scales[0] * y[i].bsums[0] + scales[2] * y[i].bsums[1] + scales[1] * y[i].bsums[2] + scales[3] * y[i].bsums[3]); - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); vint32m1_t vzero = __riscv_vmv_v_x_i32m1(0, 1); @@ -7163,9 +7163,9 @@ void ggml_vec_dot_q4_K_q8_K(int n, float * restrict s, size_t bs, const void * r aux16[1] = (a[0] >> 4) & 0x0f0f; const int32_t summi = scales[2] * (y[i].bsums[0] + y[i].bsums[1]) + scales[3] * (y[i].bsums[2] + y[i].bsums[3]); - sum_mins += y[i].d * (float)x[i].d[1] * summi; + sum_mins += y[i].d * GGML_FP16_TO_FP32(x[i].d[1]) * summi; - const float d = y[i].d * (float)x[i].d[0]; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d[0]); const ggml_uint8x16x2_t q4bits = ggml_vld1q_u8_x2(q4); @@ -7823,7 +7823,7 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const int8_t * sc = x[i].scales; const uint8_t * restrict q5 = x[i].qs; @@ -7965,7 +7965,7 @@ void ggml_vec_dot_q5_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d = y[i].d * (float)x[i].d; + const float d = y[i].d * GGML_FP16_TO_FP32(x[i].d); const int8_t * sc = x[i].scales; const uint8_t * restrict q5 = x[i].qs; @@ -8533,7 +8533,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d_all = (float)x[i].d; + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q6 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -8704,7 +8704,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * r for (int i = 0; i < nb; ++i) { - const float d_all = (float)x[i].d; + const float d_all = GGML_FP16_TO_FP32(x[i].d); const uint8_t * restrict q6 = x[i].ql; const uint8_t * restrict qh = x[i].qh; @@ -9523,7 +9523,6 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * float sumf = 0; for (int ib = 0; ib < nb; ib += 2) { - q4bits.val[0] = vld1q_u8(x[ib+0].qs); q4bits.val[1] = vld1q_u8(x[ib+1].qs); q8b.val[0] = vld1q_s8(y[ib+0].qs); @@ -9539,8 +9538,9 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * prod_1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[0], q8b.val[0]), q4b.val[1], q8b.val[1]); prod_2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q4b.val[2], q8b.val[2]), q4b.val[3], q8b.val[3]); - sumf += (float)x[ib+0].d * (float)y[ib+0].d * vaddvq_s32(prod_1) + (float)x[ib+1].d * (float)y[ib+1].d * vaddvq_s32(prod_2); - + sumf += + GGML_FP16_TO_FP32(x[ib+0].d) * GGML_FP16_TO_FP32(y[ib+0].d) * vaddvq_s32(prod_1) + + GGML_FP16_TO_FP32(x[ib+1].d) * GGML_FP16_TO_FP32(y[ib+1].d) * vaddvq_s32(prod_2); } *s = sumf; diff --git a/ggml.c b/ggml.c index 5b9fa741a..d710fe702 100644 --- a/ggml.c +++ b/ggml.c @@ -323,7 +323,7 @@ float ggml_table_f32_f16[1 << 16]; // note: do not use these inside ggml.c // these are meant to be used via the ggml.h API float ggml_fp16_to_fp32(ggml_fp16_t x) { - return (float) GGML_FP16_TO_FP32(x); + return GGML_FP16_TO_FP32(x); } ggml_fp16_t ggml_fp32_to_fp16(float x) { @@ -798,7 +798,7 @@ inline static float vaddvq_f32(float32x4_t v) { #define GGML_F16x8 float16x8_t #define GGML_F16x8_ZERO vdupq_n_f16(0.0f) #define GGML_F16x8_SET1(x) vdupq_n_f16(x) - #define GGML_F16x8_LOAD vld1q_f16 + #define GGML_F16x8_LOAD(x) vld1q_f16((const __fp16 *)(x)) #define GGML_F16x8_STORE vst1q_f16 #define GGML_F16x8_FMA(a, b, c) vfmaq_f16(a, b, c) #define GGML_F16x8_ADD vaddq_f16 @@ -841,7 +841,7 @@ inline static float vaddvq_f32(float32x4_t v) { #define GGML_F32Cx4 float32x4_t #define GGML_F32Cx4_ZERO vdupq_n_f32(0.0f) #define GGML_F32Cx4_SET1(x) vdupq_n_f32(x) - #define GGML_F32Cx4_LOAD(x) vcvt_f32_f16(vld1_f16(x)) + #define GGML_F32Cx4_LOAD(x) vcvt_f32_f16(vld1_f16((const __fp16 *)(x))) #define GGML_F32Cx4_STORE(x, y) vst1_f16(x, vcvt_f16_f32(y)) #define GGML_F32Cx4_FMA(a, b, c) vfmaq_f32(a, b, c) #define GGML_F32Cx4_ADD vaddq_f32 diff --git a/ggml.h b/ggml.h index bed7a36a0..37eff6279 100644 --- a/ggml.h +++ b/ggml.h @@ -315,13 +315,7 @@ extern "C" { #endif -#if defined(__ARM_NEON) && defined(__CUDACC__) - typedef half ggml_fp16_t; -#elif defined(__ARM_NEON) && !defined(_MSC_VER) - typedef __fp16 ggml_fp16_t; -#else typedef uint16_t ggml_fp16_t; -#endif // convert FP16 <-> FP32 GGML_API float ggml_fp16_to_fp32(ggml_fp16_t x); From 847eedbdb2d1ebf14ef56eb507d4b4b975510908 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:22:48 +0200 Subject: [PATCH 847/859] py : add Gemma conversion from HF models (#5647) * py : add gemma conversion from HF models * Update convert-hf-to-gguf.py Co-authored-by: Aarni Koskela * Update convert-hf-to-gguf.py Co-authored-by: Aarni Koskela * Update convert-hf-to-gguf.py Co-authored-by: Jared Van Bortel --------- Co-authored-by: Aarni Koskela Co-authored-by: Jared Van Bortel --- convert-hf-to-gguf.py | 60 +++++++++++++++++++++++++++++++++++++++++++ llama.cpp | 3 +++ 2 files changed, 63 insertions(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 8630bbf29..481198dad 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -218,6 +218,8 @@ class Model: return BertModel if model_architecture == "NomicBertModel": return NomicBertModel + if model_architecture == "GemmaForCausalLM": + return GemmaModel return Model def _is_model_safetensors(self) -> bool: @@ -277,6 +279,8 @@ class Model: return gguf.MODEL_ARCH.BERT if arch == "NomicBertModel": return gguf.MODEL_ARCH.NOMIC_BERT + if arch == "GemmaForCausalLM": + return gguf.MODEL_ARCH.GEMMA raise NotImplementedError(f'Architecture "{arch}" not supported!') @@ -1786,6 +1790,62 @@ class NomicBertModel(BertModel): yield name, data +class GemmaModel(Model): + def set_vocab(self): + self._set_vocab_sentencepiece() + + def set_gguf_parameters(self): + hparams = self.hparams + block_count = hparams["num_hidden_layers"] + + self.gguf_writer.add_name(self.dir_model.name) + self.gguf_writer.add_context_length(hparams["max_position_embeddings"]) + self.gguf_writer.add_embedding_length(hparams["hidden_size"]) + self.gguf_writer.add_block_count(block_count) + self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) + self.gguf_writer.add_head_count(hparams["num_attention_heads"]) + self.gguf_writer.add_head_count_kv(self.hparams["num_key_value_heads"] if "num_key_value_heads" in hparams else hparams["num_attention_heads"]) + self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) + self.gguf_writer.add_key_length(hparams["head_dim"]) + self.gguf_writer.add_value_length(hparams["head_dim"]) + + def write_tensors(self): + block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) + tensor_map = gguf.get_tensor_name_map(self.model_arch, block_count) + + for name, data_torch in self.get_tensors(): + # ref: https://github.com/huggingface/transformers/blob/fc37f38915372c15992b540dfcbbe00a916d4fc6/src/transformers/models/gemma/modeling_gemma.py#L89 + if name.endswith("norm.weight"): + data_torch = data_torch + 1 + + old_dtype = data_torch.dtype + + # convert any unsupported data types to float32 + if data_torch.dtype not in (torch.float16, torch.float32): + data_torch = data_torch.to(torch.float32) + + data = data_torch.squeeze().numpy() + + # map tensor names + new_name = tensor_map.get_name(name, try_suffixes=(".weight", ".bias")) + if new_name is None: + print(f"Can not map tensor {name!r}") + sys.exit() + + n_dims = len(data.shape) + data_dtype = data.dtype + + data = data.astype(np.float32) + + # if f16 desired, convert any float32 2-dim weight tensors to float16 + if self.ftype == 1 and data_dtype == np.float32 and name.endswith(".weight") and n_dims == 2: + data = data.astype(np.float16) + + print(f"{new_name}, n_dims = {n_dims}, {old_dtype} --> {data.dtype}") + + self.gguf_writer.add_tensor(new_name, data) + + ###### CONVERSION LOGIC ###### diff --git a/llama.cpp b/llama.cpp index 40dda265c..7770fa0e8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -7450,6 +7450,7 @@ struct llm_build_context { inpL = llm_build_inp_embd(ctx0, hparams, batch, model.tok_embd, lctx.inp_tokens, lctx.inp_embd, cb); cb(inpL, "inp_embd", -1); + inpL = ggml_scale(ctx0, inpL, sqrtf(n_embd)); cb(inpL, "inp_scaled", -1); @@ -7491,6 +7492,7 @@ struct llm_build_context { n_embd_head_k, 2, 0, n_orig_ctx, freq_base, freq_scale, ext_factor, attn_factor, beta_fast, beta_slow); cb(Qcur, "Qcur", il); + Qcur = ggml_scale(ctx0, Qcur, 1.0f / sqrtf(float(n_embd_head_k))); cb(Qcur, "Qcur_scaled", il); @@ -7505,6 +7507,7 @@ struct llm_build_context { Kcur, Vcur, Qcur, KQ_mask, nullptr, n_ctx, n_tokens, kv_head, n_kv, 1.0f, cb, il); cb(cur, "kqv_out", il); } + struct ggml_tensor * sa_out = ggml_add(ctx0, cur, inpL); cb(sa_out, "sa_out", il); From 96633eeca1265ed03e57230de54032041c58f9cd Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Thu, 22 Feb 2024 23:23:46 +0200 Subject: [PATCH 848/859] gemma : use more bits for the token_embd.weight tensor (#5650) * gemma : use Q8_0 for the token_embd.weight tensor * llama : quantize token_embd.weight using output type --- llama.cpp | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 7770fa0e8..2ebd40df2 100644 --- a/llama.cpp +++ b/llama.cpp @@ -10498,7 +10498,10 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty return std::make_pair(i_layer, n_layer); }; - if (name == tn(LLM_TENSOR_OUTPUT, "weight")) { + // for arches that share the same tensor between the token embeddings and the output, we quantize the token embeddings + // with the quantization of the output tensor + if (name == tn(LLM_TENSOR_OUTPUT, "weight") || + (LLM_TENSOR_NAMES.at(arch).find(LLM_TENSOR_OUTPUT) == LLM_TENSOR_NAMES.at(arch).end() && name == "token_embd.weight")) { int nx = tensor->ne[0]; if (arch == LLM_ARCH_FALCON || nx % QK_K != 0) { new_type = GGML_TYPE_Q8_0; From 15499eb94227401bdc8875da6eb85c15d37068f7 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Thu, 22 Feb 2024 17:05:23 -0500 Subject: [PATCH 849/859] mpt : do not duplicate token_embd.weight on disk (#5670) --- convert-hf-to-gguf.py | 5 ----- llama.cpp | 6 ++++-- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 481198dad..9bdfce07a 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -622,11 +622,6 @@ class MPTModel(Model): self.gguf_writer.add_tensor(new_name, data) - # note: MPT output is tied to (same as) wte in original model; - # for easier implementation in llama.cpp it's duplicated in GGUF, though :/ - if new_name == "token_embd.weight": - self.gguf_writer.add_tensor("output.weight", data) - class OrionModel(Model): def set_vocab(self): diff --git a/llama.cpp b/llama.cpp index 2ebd40df2..37477e6ef 100644 --- a/llama.cpp +++ b/llama.cpp @@ -509,7 +509,6 @@ static std::map> LLM_TENSOR_NAMES = { { LLM_TENSOR_TOKEN_EMBD, "token_embd" }, { LLM_TENSOR_OUTPUT_NORM, "output_norm" }, - { LLM_TENSOR_OUTPUT, "output" }, { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" }, { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" }, { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" }, @@ -4056,7 +4055,10 @@ static bool llm_load_tensors( model.output_norm = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "weight"), {n_embd}); model.output_norm_b = ml.create_tensor(ctx_output, tn(LLM_TENSOR_OUTPUT_NORM, "bias"), {n_embd}, false); - model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_OUTPUT, "weight"), {n_embd, n_vocab}); + // same as tok_embd, duplicated to allow offloading + model.output = ml.create_tensor(ctx_output_split, tn(LLM_TENSOR_TOKEN_EMBD, "weight"), {n_embd, n_vocab}); + ml.n_created--; // artificial tensor + ml.size_data += ggml_nbytes(model.output); } for (int i = 0; i < n_layer; ++i) { From 54fbcd2ce6c48c9e22eca6fbf9e53fb68c3e72ea Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Fri, 23 Feb 2024 13:39:14 -0500 Subject: [PATCH 850/859] convert : fix missing ftype for gemma (#5690) --- convert-hf-to-gguf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 9bdfce07a..32d54b45f 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -1803,6 +1803,7 @@ class GemmaModel(Model): self.gguf_writer.add_layer_norm_rms_eps(self.hparams["rms_norm_eps"]) self.gguf_writer.add_key_length(hparams["head_dim"]) self.gguf_writer.add_value_length(hparams["head_dim"]) + self.gguf_writer.add_file_type(self.ftype) def write_tensors(self): block_count = self.hparams.get("n_layers", self.hparams.get("num_hidden_layers", self.hparams.get("n_layer"))) From fd43d66f46ee3b5345fb8a74a252d86ccd34a409 Mon Sep 17 00:00:00 2001 From: AlpinDale <52078762+AlpinDale@users.noreply.github.com> Date: Fri, 23 Feb 2024 19:31:54 +0000 Subject: [PATCH 851/859] server : add KV cache quantization options (#5684) --- examples/server/server.cpp | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 369121e88..524d0ada3 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1948,6 +1948,10 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -cb, --cont-batching enable continuous batching (a.k.a dynamic batching) (default: disabled)\n"); printf(" -spf FNAME, --system-prompt-file FNAME\n"); printf(" set a file to load a system prompt (initial prompt of all slots), this is useful for chat applications.\n"); + printf(" -ctk TYPE, --cache-type-k TYPE\n"); + printf(" KV cache data type for K (default: f16)\n"); + printf(" -ctv TYPE, --cache-type-v TYPE\n"); + printf(" KV cache data type for V (default: f16)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); @@ -2386,6 +2390,12 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, ); llama.process_system_prompt_data(json::parse(systm_content)); } + else if (arg == "-ctk" || arg == "--cache-type-k") { + params.cache_type_k = argv[++i]; + } + else if (arg == "-ctv" || arg == "--cache-type-v") { + params.cache_type_v = argv[++i]; + } else if(arg == "--mmproj") { if (++i >= argc) From 525213d2f5da1eaf4b922b6b792cb52b2c613368 Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sat, 24 Feb 2024 12:28:55 +0100 Subject: [PATCH 852/859] server: init functional tests (#5566) * server: tests: init scenarios - health and slots endpoints - completion endpoint - OAI compatible chat completion requests w/ and without streaming - completion multi users scenario - multi users scenario on OAI compatible endpoint with streaming - multi users with total number of tokens to predict exceeds the KV Cache size - server wrong usage scenario, like in Infinite loop of "context shift" #3969 - slots shifting - continuous batching - embeddings endpoint - multi users embedding endpoint: Segmentation fault #5655 - OpenAI-compatible embeddings API - tokenize endpoint - CORS and api key scenario * server: CI GitHub workflow --------- Co-authored-by: Georgi Gerganov --- .github/ISSUE_TEMPLATE/bug.md | 2 + .github/workflows/server.yml | 127 ++++ examples/server/README.md | 6 + examples/server/server.cpp | 36 +- examples/server/tests/README.md | 46 ++ examples/server/tests/features/environment.py | 67 ++ examples/server/tests/features/issues.feature | 36 + .../server/tests/features/parallel.feature | 77 ++ .../server/tests/features/security.feature | 50 ++ examples/server/tests/features/server.feature | 69 ++ examples/server/tests/features/steps/steps.py | 709 ++++++++++++++++++ .../tests/features/wrong_usages.feature | 21 + examples/server/tests/requirements.txt | 3 + examples/server/tests/tests.sh | 12 + 14 files changed, 1243 insertions(+), 18 deletions(-) create mode 100644 .github/workflows/server.yml create mode 100644 examples/server/tests/README.md create mode 100644 examples/server/tests/features/environment.py create mode 100644 examples/server/tests/features/issues.feature create mode 100644 examples/server/tests/features/parallel.feature create mode 100644 examples/server/tests/features/security.feature create mode 100644 examples/server/tests/features/server.feature create mode 100644 examples/server/tests/features/steps/steps.py create mode 100644 examples/server/tests/features/wrong_usages.feature create mode 100644 examples/server/tests/requirements.txt create mode 100755 examples/server/tests/tests.sh diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index ce69e6395..49812832c 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -7,3 +7,5 @@ assignees: '' --- Please include information about your system, the steps to reproduce the bug, and the version of llama.cpp that you are using. If possible, please provide a minimal code example that reproduces the bug. + +If the bug concerns the server, please try to reproduce it first using the [server test scenario framework](https://github.com/ggerganov/llama.cpp/tree/master/examples/server/tests). diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml new file mode 100644 index 000000000..ed27dc528 --- /dev/null +++ b/.github/workflows/server.yml @@ -0,0 +1,127 @@ +# Server build and tests +name: Server + +on: + workflow_dispatch: # allows manual triggering + push: + branches: + - master + - test/server-add-ci-test # FIXME remove + paths: ['.github/workflows/**', '**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/**.*'] + pull_request: + types: [opened, synchronize, reopened] + paths: ['**/CMakeLists.txt', '**/Makefile', '**/*.h', '**/*.hpp', '**/*.c', '**/*.cpp', '**/*.cu', '**/*.swift', '**/*.m', 'examples/server/**.*'] + +jobs: + server: + runs-on: ubuntu-latest + + strategy: + matrix: + build: [noavx, avx2, avx, avx512, cublas, clblast, openblas, kompute, vulkan] + sanitizer: [ADDRESS, THREAD, UNDEFINED] + build_type: [Debug, Release] + include: + - build: 'noavx' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX=OFF -DLLAMA_AVX2=OFF -DLLAMA_FMA=OFF' + image: ubuntu:latest + - build: 'avx2' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON' + image: ubuntu:latest + - build: 'avx' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX2=OFF' + image: ubuntu:latest + - build: 'avx512' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_AVX512=ON' + image: ubuntu:latest + experimental: true + - build: 'cublas' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_CUBLAS=ON' + image: nvidia/cuda:12.3.1-devel-ubuntu22.04 + arch_not_available: true # require nvidia docker engine + - build: 'clblast' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_CLBLAST=ON' + image: ubuntu:latest + arch_not_available: true + - build: 'openblas' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS' + image: ubuntu:latest + - build: 'kompute' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_KOMPUTE=ON -DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON' + image: ubuntu:latest + arch_not_available: true + - build: 'vulkan' + defines: '-DLLAMA_NATIVE=OFF -DLLAMA_BUILD_SERVER=ON -DLLAMA_VULKAN=ON' + image: ubuntu:latest + arch_not_available: true + + container: + image: ${{ matrix.image }} + ports: + - 8888 + options: --cpus 4 + + steps: + - name: Clone + id: checkout + uses: actions/checkout@v3 + + - name: Dependencies + id: depends + run: | + apt-get update + apt-get -y install \ + build-essential \ + pkg-config \ + git \ + cmake \ + python3-pip \ + wget \ + psmisc + + - name: Download CLBlast + id: get_clblast + if: ${{ matrix.build == 'clblast' }} + run: | + apt install -y libclblast-dev + + - name: Download OpenBLAS + id: get_openblas + if: ${{ matrix.build == 'openblas' }} + run: | + apt-get -y install libopenblas-dev + + - name: Install Vulkan SDK + id: get_vulkan + if: ${{ matrix.build == 'kompute' || matrix.build == 'vulkan' }} + run: | + wget -qO- https://packages.lunarg.com/lunarg-signing-key-pub.asc | tee /etc/apt/trusted.gpg.d/lunarg.asc + wget -qO /etc/apt/sources.list.d/lunarg-vulkan-jammy.list http://packages.lunarg.com/vulkan/lunarg-vulkan-jammy.list + apt-get update + apt-get -y install vulkan-sdk + + - name: Build + id: cmake_build + run: | + mkdir build + cd build + cmake .. -DLLAMA_SANITIZE_${{ matrix.sanitizer }}=ON -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} ${{ matrix.defines }} + cmake --build . --config ${{ matrix.build_type }} -j $(nproc) --target server + + - name: Tests dependencies + id: test_dependencies + run: | + pip install -r examples/server/tests/requirements.txt + + - name: Download models + id: download_models + run: | + cd examples/server/tests + ../../../scripts/hf.sh --repo ggml-org/models --file tinyllamas/stories260K.gguf + + - name: Tests + id: server_integration_test + continue-on-error: ${{ matrix.experimental || matrix.arch_not_available }} + run: | + cd examples/server/tests + PORT=8888 ./tests.sh diff --git a/examples/server/README.md b/examples/server/README.md index 4b6cd8326..0c43ac4c9 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -98,6 +98,12 @@ curl --request POST \ --data '{"prompt": "Building a website can be done in 10 simple steps:","n_predict": 128}' ``` +## Advanced testing + +We implemented a [server test framework](./tests/README.md) using human-readable scenario. + +*Before submitting an issue, please try to reproduce it with this format.* + ## Node JS Test You need to have [Node.js](https://nodejs.org/en) installed. diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 524d0ada3..9fb436c2a 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1410,11 +1410,6 @@ struct llama_server_context int n_processing_slots = 0; for (llama_client_slot &slot: slots) { - if (slot.available()) { - n_idle_slots++; - } else { - n_processing_slots++; - } json slot_data = get_formated_generation(slot); slot_data["id"] = slot.id; slot_data["task_id"] = slot.task_id; @@ -1429,6 +1424,11 @@ struct llama_server_context {"stopped_limit", slot.stopped_limit}, {"stopping_word", slot.stopping_word}, }; + if (slot_data["state"] == IDLE) { + n_idle_slots++; + } else { + n_processing_slots++; + } slots_data.push_back(slot_data); } LOG_TEE("task %i - slots data: idle=%i processing=%i\n", task.id, n_idle_slots, n_processing_slots); @@ -2748,19 +2748,6 @@ int main(int argc, char **argv) log_data["api_key"] = "api_key: " + std::to_string(sparams.api_keys.size()) + " keys loaded"; } - LOG_INFO("HTTP server listening", log_data); - // run the HTTP server in a thread - see comment below - std::thread t([&]() - { - if (!svr.listen_after_bind()) - { - state.store(SERVER_STATE_ERROR); - return 1; - } - - return 0; - }); - // load the model if (!llama.load_model(params)) { @@ -3228,6 +3215,19 @@ int main(int argc, char **argv) }*/ //); + LOG_INFO("HTTP server listening", log_data); + // run the HTTP server in a thread - see comment below + std::thread t([&]() + { + if (!svr.listen_after_bind()) + { + state.store(SERVER_STATE_ERROR); + return 1; + } + + return 0; + }); + llama.queue_tasks.on_new_task(std::bind( &llama_server_context::process_single_task, &llama, std::placeholders::_1)); llama.queue_tasks.on_finish_multitask(std::bind( diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md new file mode 100644 index 000000000..e44c5c286 --- /dev/null +++ b/examples/server/tests/README.md @@ -0,0 +1,46 @@ +# Server tests + +Python based server tests scenario using [BDD](https://en.wikipedia.org/wiki/Behavior-driven_development) and [behave](https://behave.readthedocs.io/en/latest/): + * [issues.feature](./features/issues.feature) Pending issues scenario + * [parallel.feature](./features/parallel.feature) Scenario involving multi slots and concurrent requests + * [security.feature](./features/security.feature) Security, CORS and API Key + * [server.feature](./features/server.feature) Server base scenario: completion, embedding, tokenization, etc... + +Tests target GitHub workflows job runners with 4 vCPU. + +Requests are using [aiohttp](https://docs.aiohttp.org/en/stable/client_reference.html), [asyncio](https://docs.python.org/fr/3/library/asyncio.html) based http client. + +Note: If the host architecture inference speed is faster than GitHub runners one, parallel scenario may randomly fail. To mitigate it, you can increase values in `n_predict`, `kv_size`. + +### Install dependencies +`pip install -r requirements.txt` + +### Run tests +1. Build the server +```shell +cd ../../.. +mkdir build +cd build +cmake ../ +cmake --build . --target server +``` +2. download required models: + 1. `../../../scripts/hf.sh --repo ggml-org/models --file tinyllamas/stories260K.gguf` +3. Start the test: `./tests.sh` + +It's possible to override some scenario steps values with environment variables: + - `PORT` -> `context.server_port` to set the listening port of the server during scenario, default: `8080` + - `LLAMA_SERVER_BIN_PATH` -> to change the server binary path, default: `../../../build/bin/server` + - `DEBUG` -> "ON" to enable steps and server verbose mode `--verbose` + +### Run @bug, @wip or @wrong_usage annotated scenario + +Feature or Scenario must be annotated with `@llama.cpp` to be included in the default scope. +- `@bug` annotation aims to link a scenario with a GitHub issue. +- `@wrong_usage` are meant to show user issue that are actually an expected behavior +- `@wip` to focus on a scenario working in progress + +To run a scenario annotated with `@bug`, start: +`DEBUG=ON ./tests.sh --no-skipped --tags bug` + +After changing logic in `steps.py`, ensure that `@bug` and `@wrong_usage` scenario are updated. diff --git a/examples/server/tests/features/environment.py b/examples/server/tests/features/environment.py new file mode 100644 index 000000000..13cc84101 --- /dev/null +++ b/examples/server/tests/features/environment.py @@ -0,0 +1,67 @@ +import os +import socket +import subprocess +import time +from contextlib import closing +from signal import SIGKILL + + +def before_scenario(context, scenario): + print(f"\x1b[33;42mStarting new scenario: {scenario.name}!\x1b[0m") + port = 8080 + if 'PORT' in os.environ: + port = int(os.environ['PORT']) + if is_server_listening("localhost", port): + assert False, "Server already started" + + +def after_scenario(context, scenario): + if scenario.status == "failed": + if 'GITHUB_ACTIONS' in os.environ: + print(f"\x1b[33;101mSCENARIO FAILED: {scenario.name} server logs:\x1b[0m\n\n") + if os.path.isfile('llama.log'): + with closing(open('llama.log', 'r')) as f: + for line in f: + print(line) + if not is_server_listening(context.server_fqdn, context.server_port): + print("\x1b[33;101mERROR: Server stopped listening\x1b[0m") + + if not pid_exists(context.server_process.pid): + assert False, f"Server not running pid={context.server_process.pid} ..." + + print(f"stopping server pid={context.server_process.pid} ...") + context.server_process.kill() + # Wait few for socket to free up + time.sleep(0.05) + + attempts = 0 + while is_server_listening(context.server_fqdn, context.server_port): + print(f"stopping server pid={context.server_process.pid} ...") + os.kill(context.server_process.pid, SIGKILL) + time.sleep(0.1) + attempts += 1 + if attempts > 5: + print(f"Server dangling exits, killing all {context.server_path} ...") + process = subprocess.run(['killall', '-9', context.server_path], + stderr=subprocess.PIPE, + universal_newlines=True) + print(process) + + +def is_server_listening(server_fqdn, server_port): + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + result = sock.connect_ex((server_fqdn, server_port)) + return result == 0 + + +def pid_exists(pid): + """Check whether pid exists in the current process table.""" + import errno + if pid < 0: + return False + try: + os.kill(pid, 0) + except OSError as e: + return e.errno == errno.EPERM + else: + return True diff --git a/examples/server/tests/features/issues.feature b/examples/server/tests/features/issues.feature new file mode 100644 index 000000000..542006d9a --- /dev/null +++ b/examples/server/tests/features/issues.feature @@ -0,0 +1,36 @@ +# List of ongoing issues +@bug +Feature: Issues + # Issue #5655 + Scenario: Multi users embeddings + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a model alias tinyllama-2 + And 42 as server seed + And 64 KV cache size + And 2 slots + And continuous batching + And embeddings extraction + Then the server is starting + Then the server is healthy + + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + Given concurrent embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature new file mode 100644 index 000000000..802d624ff --- /dev/null +++ b/examples/server/tests/features/parallel.feature @@ -0,0 +1,77 @@ +@llama.cpp +Feature: Parallel + + Background: Server startup + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a model alias tinyllama-2 + And 42 as server seed + And 64 KV cache size + And 2 slots + And continuous batching + Then the server is starting + Then the server is healthy + + Scenario Outline: Multi users completion + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And max tokens to predict + Given concurrent completion requests + Then the server is busy + Then the server is idle + And all slots are idle + Then all prompts are predicted with tokens + Examples: + | n_predict | + | 128 | + + Scenario Outline: Multi users OAI completions compatibility + Given a system prompt You are a writer. + And a model tinyllama-2 + Given a prompt: + """ + Write a very long book. + """ + And a prompt: + """ + Write another a poem. + """ + And max tokens to predict + And streaming is + Given concurrent OAI completions requests + Then the server is busy + Then the server is idle + Then all prompts are predicted with tokens + Examples: + | streaming | n_predict | + | disabled | 128 | + | enabled | 64 | + + Scenario: Multi users with total number of tokens to predict exceeds the KV Cache size #3969 + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + And 128 max tokens to predict + Given concurrent completion requests + Then the server is busy + Then the server is idle + Then all prompts are predicted diff --git a/examples/server/tests/features/security.feature b/examples/server/tests/features/security.feature new file mode 100644 index 000000000..db06d3977 --- /dev/null +++ b/examples/server/tests/features/security.feature @@ -0,0 +1,50 @@ +@llama.cpp +Feature: Security + + Background: Server startup with an api key defined + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a server api key llama.cpp + Then the server is starting + Then the server is healthy + + Scenario Outline: Completion with some user api key + Given a prompt test + And a user api key + And 4 max tokens to predict + And a completion request with api error + + Examples: Prompts + | api_key | api_error | + | llama.cpp | no | + | llama.cpp | no | + | hackeme | raised | + | | raised | + + Scenario Outline: OAI Compatibility + Given a system prompt test + And a user prompt test + And a model test + And 2 max tokens to predict + And streaming is disabled + And a user api key + Given an OAI compatible chat completions request with api error + + Examples: Prompts + | api_key | api_error | + | llama.cpp | no | + | llama.cpp | no | + | hackme | raised | + + + Scenario Outline: CORS Options + When an OPTIONS request is sent from + Then CORS header is set to + + Examples: Headers + | origin | cors_header | cors_header_value | + | localhost | Access-Control-Allow-Origin | localhost | + | web.mydomain.fr | Access-Control-Allow-Origin | web.mydomain.fr | + | origin | Access-Control-Allow-Credentials | true | + | web.mydomain.fr | Access-Control-Allow-Methods | POST | + | web.mydomain.fr | Access-Control-Allow-Headers | * | diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature new file mode 100644 index 000000000..fedcfe5ae --- /dev/null +++ b/examples/server/tests/features/server.feature @@ -0,0 +1,69 @@ +@llama.cpp +Feature: llama.cpp server + + Background: Server startup + Given a server listening on localhost:8080 + And a model file stories260K.gguf + And a model alias tinyllama-2 + And 42 as server seed + # KV Cache corresponds to the total amount of tokens + # that can be stored across all independent sequences: #4130 + # see --ctx-size and #5568 + And 32 KV cache size + And 1 slots + And embeddings extraction + And 32 server max tokens to predict + Then the server is starting + Then the server is healthy + + Scenario: Health + Then the server is ready + And all slots are idle + + Scenario Outline: Completion + Given a prompt + And max tokens to predict + And a completion request with no api error + Then tokens are predicted matching + + Examples: Prompts + | prompt | n_predict | re_content | n_predicted | + | I believe the meaning of life is | 8 | read | 8 | + | Write a joke about AI | 64 | (parkfriendsscared)+ | 32 | + + Scenario Outline: OAI Compatibility + Given a model + And a system prompt + And a user prompt + And max tokens to predict + And streaming is + Given an OAI compatible chat completions request with no api error + Then tokens are predicted matching + + Examples: Prompts + | model | system_prompt | user_prompt | max_tokens | re_content | n_predicted | enable_streaming | + | llama-2 | Book | What is the best book | 8 | (Momwhat)+ | 8 | disabled | + | codellama70b | You are a coding assistant. | Write the fibonacci function in c++. | 64 | (thankshappybird)+ | 32 | enabled | + + Scenario: Embedding + When embeddings are computed for: + """ + What is the capital of Bulgaria ? + """ + Then embeddings are generated + + Scenario: OAI Embeddings compatibility + Given a model tinyllama-2 + When an OAI compatible embeddings computation request for: + """ + What is the capital of Spain ? + """ + Then embeddings are generated + + + Scenario: Tokenize / Detokenize + When tokenizing: + """ + What is the capital of France ? + """ + Then tokens can be detokenize diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py new file mode 100644 index 000000000..50f2b641e --- /dev/null +++ b/examples/server/tests/features/steps/steps.py @@ -0,0 +1,709 @@ +import asyncio +import json +import os +import re +import socket +import subprocess +import time +from contextlib import closing +from re import RegexFlag + +import aiohttp +import openai +from behave import step +from behave.api.async_step import async_run_until_complete + + +@step(u"a server listening on {server_fqdn}:{server_port}") +def step_server_config(context, server_fqdn, server_port): + context.server_fqdn = server_fqdn + context.server_port = int(server_port) + if 'PORT' in os.environ: + context.server_port = int(os.environ['PORT']) + print(f"$PORT set, overriding server port with to {context.server_port}") + + context.base_url = f'http://{context.server_fqdn}:{context.server_port}' + + context.debug = 'DEBUG' in os.environ and os.environ['DEBUG'] == 'ON' + context.model_alias = None + context.n_ctx = None + context.n_predict = None + context.n_server_predict = None + context.n_slots = None + context.server_api_key = None + context.server_continuous_batching = False + context.server_embeddings = False + context.server_seed = None + context.user_api_key = None + + context.tasks_result = [] + context.concurrent_tasks = [] + context.prompts = [] + + +@step(u'a model file {model_file}') +def step_model_file(context, model_file): + context.model_file = model_file + + +@step(u'a model alias {model_alias}') +def step_model_alias(context, model_alias): + context.model_alias = model_alias + + +@step(u'{seed} as server seed') +def step_seed(context, seed): + context.server_seed = int(seed) + + +@step(u'{n_ctx} KV cache size') +def step_n_ctx(context, n_ctx): + context.n_ctx = int(n_ctx) + + +@step(u'{n_slots} slots') +def step_n_slots(context, n_slots): + context.n_slots = int(n_slots) + + +@step(u'{n_predict} server max tokens to predict') +def step_server_n_predict(context, n_predict): + context.n_server_predict = int(n_predict) + + +@step(u'continuous batching') +def step_server_continuous_batching(context): + context.server_continuous_batching = True + + +@step(u'embeddings extraction') +def step_server_embeddings(context): + context.server_embeddings = True + + +@step(u"the server is starting") +def step_start_server(context): + start_server_background(context) + attempts = 0 + while True: + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + result = sock.connect_ex((context.server_fqdn, context.server_port)) + if result == 0: + print("\x1b[33;46mserver started!\x1b[0m") + return + attempts += 1 + if attempts > 20: + assert False, "server not started" + print(f"waiting for server to start, connect error code = {result}...") + time.sleep(0.1) + + +@step(u"the server is {expecting_status}") +@async_run_until_complete +async def step_wait_for_the_server_to_be_started(context, expecting_status): + match expecting_status: + case 'healthy': + await wait_for_health_status(context, context.base_url, 200, 'ok') + + case 'ready' | 'idle': + await wait_for_health_status(context, context.base_url, 200, 'ok', + params={'fail_on_no_slot': 0, 'include_slots': 0}, + slots_idle=context.n_slots, + slots_processing=0, + expected_slots=[{'id': slot_id, 'state': 0} + for slot_id in range(context.n_slots)]) + case 'busy': + await wait_for_health_status(context, context.base_url, 503, + 'no slot available', + params={'fail_on_no_slot': 0, 'include_slots': 0}, + slots_idle=0, + slots_processing=context.n_slots, + expected_slots=[{'id': slot_id, 'state': 1} + for slot_id in range(context.n_slots)]) + case _: + assert False, "unknown status" + + +@step(u'all slots are {expected_slot_status_string}') +@async_run_until_complete +async def step_all_slots_status(context, expected_slot_status_string): + match expected_slot_status_string: + case 'idle': + expected_slot_status = 0 + case 'busy': + expected_slot_status = 1 + case _: + assert False, "unknown status" + + expected_slots = [{'id': slot_id, 'state': expected_slot_status} + for slot_id in range(context.n_slots)] + await request_slots_status(context, expected_slots) + + +@step(u'a completion request with {api_error} api error') +@async_run_until_complete +async def step_request_completion(context, api_error): + expect_api_error = api_error == 'raised' + completion = await request_completion(context.prompts.pop(), + context.base_url, + debug=context.debug, + n_predict=context.n_predict, + server_seed=context.server_seed, + expect_api_error=expect_api_error, + user_api_key=context.user_api_key) + context.tasks_result.append(completion) + if context.debug: + print(f"Completion response: {completion}") + if expect_api_error: + assert completion == 401, f"completion must be an 401 status code: {completion}" + + +@step(u'{predicted_n} tokens are predicted matching {re_content}') +def step_n_tokens_predicted_with_content(context, predicted_n, re_content): + assert_n_tokens_predicted(context.tasks_result.pop(), int(predicted_n), re_content) + + +@step(u'{predicted_n} tokens are predicted') +def step_n_tokens_predicted(context, predicted_n): + assert_n_tokens_predicted(context.tasks_result.pop(), int(predicted_n)) + + +@step(u'a user prompt {user_prompt}') +def step_user_prompt(context, user_prompt): + context.prompts.append(user_prompt) + + +@step(u'a system prompt {system_prompt}') +def step_system_prompt(context, system_prompt): + context.system_prompt = system_prompt + + +@step(u'a model {model}') +def step_model(context, model): + context.model = model + + +@step(u'{max_tokens} max tokens to predict') +def step_max_tokens(context, max_tokens): + context.n_predict = int(max_tokens) + + +@step(u'streaming is {enable_streaming}') +def step_streaming(context, enable_streaming): + context.enable_streaming = enable_streaming == 'enabled' + + +@step(u'a user api key {user_api_key}') +def step_user_api_key(context, user_api_key): + context.user_api_key = user_api_key + + +@step(u'no user api key') +def step_no_user_api_key(context): + context.user_api_key = None + + +@step(u'a user api key ') +def step_no_user_api_key_space(context): + context.user_api_key = None + + +@step(u'a server api key {server_api_key}') +def step_server_api_key(context, server_api_key): + context.server_api_key = server_api_key + + +@step(u'an OAI compatible chat completions request with {api_error} api error') +@async_run_until_complete +async def step_oai_chat_completions(context, api_error): + if context.debug: + print(f"Submitting OAI compatible completions request...") + expect_api_error = api_error == 'raised' + completion = await oai_chat_completions(context.prompts.pop(), + context.system_prompt, + context.base_url, + False, + model=context.model if hasattr(context, 'model') else None, + + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None, + + expect_api_error=expect_api_error) + context.tasks_result.append(completion) + if context.debug: + print(f"Completion response: {completion}") + if expect_api_error: + assert completion == 401, f"completion must be an 401 status code: {completion}" + + if context.debug: + print(f"Completion response: {completion}") + + +@step(u'a prompt') +def step_a_prompt(context): + context.prompts.append(context.text) + + +@step(u'a prompt {prompt}') +def step_a_prompt_prompt(context, prompt): + context.prompts.append(prompt) + + +@step(u'concurrent completion requests') +@async_run_until_complete() +async def step_concurrent_completion_requests(context): + await concurrent_completion_requests(context, + request_completion, + # prompt is inserted automatically + context.base_url, + debug=context.debug, + n_predict=context.n_predict if hasattr(context, 'n_predict') else None, + server_seed=context.server_seed if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key if hasattr(context, + 'user_api_key') else None) + + +@step(u'concurrent OAI completions requests') +@async_run_until_complete +async def step_oai_chat_completions(context): + await concurrent_completion_requests(context, oai_chat_completions, + # user_prompt is inserted automatically + context.system_prompt, + context.base_url, + True, # async_client + model=context.model + if hasattr(context, 'model') else None, + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None) + + +@step(u'all prompts are predicted') +@async_run_until_complete +async def step_all_prompts_are_predicted(context): + await all_prompts_are_predicted(context) + + +@step(u'all prompts are predicted with {n_predict} tokens') +@async_run_until_complete +async def step_all_prompts_are_predicted_with_n_tokens(context, n_predict): + expected_predicted_n = int(n_predict) + await all_prompts_are_predicted(context, expected_predicted_n) + + +async def all_prompts_are_predicted(context, expected_predicted_n=None): + n_completions = await gather_tasks_results(context) + assert n_completions > 0 + for i in range(n_completions): + assert_n_tokens_predicted(context.tasks_result.pop(), expected_predicted_n=expected_predicted_n) + assert len(context.concurrent_tasks) == 0, f"{len(context.concurrent_tasks)} pending requests" + + +@step(u'embeddings are computed for') +@async_run_until_complete +async def step_compute_embedding(context): + content = context.text + base_url = context.base_url + context.embeddings = await request_embedding(content, base_url) + + +@step(u'embeddings are generated') +def step_assert_embeddings(context): + assert_embeddings(context.embeddings) + + +@step(u'an OAI compatible embeddings computation request for') +def step_oai_compute_embedding(context): + openai.api_key = 'nope' # openai client always expects an api_keu + if context.user_api_key is not None: + openai.api_key = context.user_api_key + openai.api_base = f'{context.base_url}/v1' + embeddings = openai.Embedding.create( + model=context.model, + input=context.text, + ) + context.embeddings = embeddings + + +@step(u'concurrent embedding requests') +@async_run_until_complete() +async def step_concurrent_embedding_requests(context): + await concurrent_completion_requests(context, + request_embedding, + # prompt is inserted automatically + context.base_url) + + +@step(u'all embeddings are generated') +@async_run_until_complete() +async def all_embeddings_are_generated(context): + n_embedding_requests = await gather_tasks_results(context) + assert n_embedding_requests > 0 + for i in range(n_embedding_requests): + assert_embeddings(context.tasks_result.pop()) + + +@step(u'tokenizing') +@async_run_until_complete +async def step_tokenize(context): + context.tokenized_text = context.text + async with aiohttp.ClientSession() as session: + async with session.post(f'{context.base_url}/tokenize', + json={ + "content": context.tokenized_text, + }) as response: + assert response.status == 200 + tokenize_json = await response.json() + context.tokens = tokenize_json['tokens'] + + +@step(u'tokens can be detokenize') +@async_run_until_complete +async def step_detokenize(context): + assert len(context.tokens) > 0 + async with aiohttp.ClientSession() as session: + async with session.post(f'{context.base_url}/detokenize', + json={ + "tokens": context.tokens, + }) as response: + assert response.status == 200 + detokenize_json = await response.json() + # SPM tokenizer adds a whitespace prefix: https://github.com/google/sentencepiece/issues/15 + assert context.tokenized_text == detokenize_json['content'].strip() + + +@step(u'an OPTIONS request is sent from {origin}') +@async_run_until_complete +async def step_options_request(context, origin): + async with aiohttp.ClientSession() as session: + async with session.options(f'{context.base_url}/v1/chat/completions', + headers={"Origin": origin}) as response: + assert response.status == 200 + context.options_response = response + + +@step(u'CORS header {cors_header} is set to {cors_header_value}') +def step_check_options_header_value(context, cors_header, cors_header_value): + assert context.options_response.headers[cors_header] == cors_header_value + + +async def concurrent_completion_requests(context, f_completion, *args, **kwargs): + n_prompts = len(context.prompts) + if context.debug: + print(f"starting {n_prompts} concurrent completion requests...") + assert n_prompts > 0 + for prompt_no in range(n_prompts): + shifted_args = [context.prompts.pop(), *args] + context.concurrent_tasks.append(asyncio.create_task(f_completion(*shifted_args, **kwargs))) + await asyncio.sleep(0.1) + + +async def request_completion(prompt, + base_url, + debug=False, + n_predict=None, + server_seed=None, + expect_api_error=None, + user_api_key=None): + if debug: + print(f"Sending completion request: {prompt}") + origin = "my.super.domain" + headers = { + 'Origin': origin + } + if user_api_key is not None: + if debug: + print(f"Set user_api_key: {user_api_key}") + headers['Authorization'] = f'Bearer {user_api_key}' + + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/completion', + json={ + "prompt": prompt, + "n_predict": int(n_predict) if n_predict is not None else -1, + "seed": server_seed if server_seed is not None else 42 + }, + headers=headers) as response: + if expect_api_error is None or not expect_api_error: + assert response.status == 200 + assert response.headers['Access-Control-Allow-Origin'] == origin + return await response.json() + else: + return response.status + + +async def oai_chat_completions(user_prompt, + system_prompt, + base_url, + async_client, + debug=False, + model=None, + n_predict=None, + enable_streaming=None, + server_seed=None, + user_api_key=None, + expect_api_error=None): + if debug: + print(f"Sending OAI Chat completions request: {user_prompt}") + # openai client always expects an api key + user_api_key = user_api_key if user_api_key is not None else 'nope' + seed = server_seed if server_seed is not None else 42 + enable_streaming = enable_streaming if enable_streaming is not None else False + payload = { + "messages": [ + { + "role": "system", + "content": system_prompt, + }, + { + "role": "user", + "content": user_prompt, + } + ], + "model": model, + "max_tokens": n_predict, + "stream": enable_streaming, + "seed": seed + } + completion_response = { + 'content': '', + 'timings': { + 'predicted_n': 0 + } + } + if async_client: + origin = 'llama.cpp' + headers = {'Authorization': f'Bearer {user_api_key}', 'Origin': origin} + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/v1/chat/completions', + json=payload, + headers=headers) as response: + if enable_streaming: + assert response.status == 200 + assert response.headers['Access-Control-Allow-Origin'] == origin + assert response.headers['Content-Type'] == "text/event-stream" + event_received = True + while event_received: + event_received = False + async for line_in_bytes in response.content: + line = line_in_bytes.decode('utf8') + line = line.rstrip('\n').rstrip('\r') + if line == '': + continue + event_data = line.split(': ', 1) + assert event_data[0] == 'data', f'Bad event code received: ```{event_data}```' + chunk_raw = event_data[1] + + chunk = json.loads(chunk_raw) + assert len(chunk['choices']) == 1, f"no choices provided, line ```{line}```" + delta = chunk['choices'][0]['delta'] + if 'content' in delta: + completion_response['content'] += delta['content'] + completion_response['timings']['predicted_n'] += 1 + else: + if expect_api_error is None or not expect_api_error: + assert response.status == 200 + assert response.headers['Access-Control-Allow-Origin'] == origin + assert response.headers['Content-Type'] == "application/json; charset=utf-8" + chat_completion_raw = await response.json() + completion_response = { + 'content': chat_completion_raw['choices'][0]['message'], + 'timings': { + 'predicted_n': chat_completion_raw['usage']['completion_tokens'] + } + } + else: + return response.status + else: + try: + openai.api_key = user_api_key + openai.api_base = f'{base_url}/v1/chat' + chat_completion = openai.Completion.create( + messages=payload['messages'], + model=model, + max_tokens=n_predict, + stream=enable_streaming, + seed=seed + ) + except openai.error.APIError as e: + if expect_api_error is not None and expect_api_error: + return 401 + else: + assert False, f'error raised: {e}' + + if enable_streaming: + for chunk in chat_completion: + assert len(chunk.choices) == 1 + delta = chunk.choices[0].delta + if 'content' in delta: + completion_response['content'] += delta['content'] + completion_response['timings']['predicted_n'] += 1 + else: + assert len(chat_completion.choices) == 1 + completion_response = { + 'content': chat_completion.choices[0].message.content, + 'timings': { + 'predicted_n': chat_completion.usage.completion_tokens + } + } + if debug: + print("OAI response formatted to llama.cpp:", completion_response) + return completion_response + + +async def request_embedding(content, base_url): + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/embedding', + json={ + "content": content, + }) as response: + assert response.status == 200 + response_json = await response.json() + return response_json['embedding'] + + +def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re_content=None): + content = completion_response['content'] + n_predicted = completion_response['timings']['predicted_n'] + assert len(content) > 0, "no token predicted" + if expected_predicted_n is not None: + assert n_predicted == expected_predicted_n, (f'invalid number of tokens predicted:' + f' {n_predicted} <> {expected_predicted_n}') + if re_content is not None: + re_content = '^.*' + re_content.replace('', '|') + '.*$' + assert re.match(re_content, content, flags=RegexFlag.IGNORECASE | RegexFlag.MULTILINE | RegexFlag.DOTALL), ( + f'invalid tokens predicted:' + f' ```\n{content}\n``` do not match /{re_content}/') + + +async def gather_tasks_results(context): + n_tasks = len(context.concurrent_tasks) + if context.debug: + print(f"Waiting for all {n_tasks} tasks results...") + for task_no in range(n_tasks): + context.tasks_result.append(await context.concurrent_tasks.pop()) + n_completions = len(context.tasks_result) + return n_completions + + +async def wait_for_health_status(context, + base_url, + expected_http_status_code, + expected_health_status, + params=None, + slots_idle=None, + slots_processing=None, + expected_slots=None): + if context.debug: + print(f"Starting checking for health for expected_health_status={expected_health_status}") + timeout = 3 # seconds + interval = 0.5 + counter = 0 + async with aiohttp.ClientSession() as session: + while True: + async with await session.get(f'{base_url}/health', params=params) as health_response: + status_code = health_response.status + health = await health_response.json() + if context.debug: + print(f"HEALTH - response for expected health status='{expected_health_status}' on " + f"'{base_url}/health'?{params} is {health}") + if (status_code == expected_http_status_code + and health['status'] == expected_health_status + and (slots_idle is None or health['slots_idle'] == slots_idle) + and (slots_processing is None or health['slots_processing'] == slots_processing)): + if expected_slots is not None: + assert_slots_status(health['slots'], expected_slots) + return + if (status_code == expected_http_status_code + and health['status'] == expected_health_status + and (slots_idle is None or health['slots_idle'] == slots_idle) + and (slots_processing is None or health['slots_processing'] == slots_processing)): + if expected_slots is not None: + assert_slots_status(health['slots'], expected_slots) + return + await asyncio.sleep(interval) + + counter += interval + if counter >= timeout: + # Sometimes health requests are triggered after completions are predicted + if expected_http_status_code == 503: + if len(context.tasks_result) == 0: + print("\x1b[5;37;43mWARNING: forcing concurrent tasks," + " busy health check missed, probably too fast inference\x1b[0m") + n_completions = await gather_tasks_results(context) + if n_completions > 0: + return + + assert False, 'timeout exceeded' + + +def assert_embeddings(embeddings): + assert len(embeddings) > 0 + embeddings_computed = False + for emb in embeddings: + if emb != 0: + embeddings_computed = True + assert embeddings_computed, f"Embeddings: {embeddings}" + + +async def request_slots_status(context, expected_slots): + async with aiohttp.ClientSession() as session: + async with await session.get(f'{context.base_url}/slots') as slots_response: + assert slots_response.status == 200 + slots = await slots_response.json() + assert_slots_status(slots, expected_slots) + + +def assert_slots_status(slots, expected_slots): + assert len(slots) == len(expected_slots) + for slot_id, (expected, slot) in enumerate(zip(expected_slots, slots)): + for key in expected: + assert expected[key] == slot[key], (f"invalid slot {slot_id}" + f" expected[{key}] != slot[{key}]" + f" = {expected[key]} != {slot[key]}") + + +def start_server_background(context): + context.server_path = '../../../build/bin/server' + if 'LLAMA_SERVER_BIN_PATH' in os.environ: + context.server_path = os.environ['LLAMA_SERVER_BIN_PATH'] + server_args = [ + '--host', context.server_fqdn, + '--port', context.server_port, + '--model', context.model_file + ] + if context.server_continuous_batching: + server_args.append('--cont-batching') + if context.server_embeddings: + server_args.append('--embedding') + if context.model_alias is not None: + server_args.extend(['--alias', context.model_alias]) + if context.n_ctx is not None: + server_args.extend(['--ctx-size', context.n_ctx]) + if context.n_slots is not None: + server_args.extend(['--parallel', context.n_slots]) + if context.n_server_predict is not None: + server_args.extend(['--n-predict', context.n_server_predict]) + if context.server_api_key is not None: + server_args.extend(['--api-key', context.server_api_key]) + if context.debug: + server_args.append('--verbose') + print(f"starting server with: {context.server_path}", *server_args) + context.server_process = subprocess.Popen( + [str(arg) for arg in [context.server_path, *server_args]], + close_fds=True) + print(f"server pid={context.server_process.pid}") diff --git a/examples/server/tests/features/wrong_usages.feature b/examples/server/tests/features/wrong_usages.feature new file mode 100644 index 000000000..e228b2371 --- /dev/null +++ b/examples/server/tests/features/wrong_usages.feature @@ -0,0 +1,21 @@ +# run with ./test.sh --tags wrong_usage +@wrong_usage +Feature: Wrong usage of llama.cpp server + + #3969 The user must always set --n-predict option + # to cap the number of tokens any completion request can generate + # or pass n_predict/max_tokens in the request. + Scenario: Infinite loop + Given a server listening on localhost:8080 + And a model file stories260K.gguf + # Uncomment below to fix the issue + #And 64 server max tokens to predict + Then the server is starting + Given a prompt: + """ + Go to: infinite loop + """ + # Uncomment below to fix the issue + #And 128 max tokens to predict + Given concurrent completion requests + Then all prompts are predicted diff --git a/examples/server/tests/requirements.txt b/examples/server/tests/requirements.txt new file mode 100644 index 000000000..3e51b12dc --- /dev/null +++ b/examples/server/tests/requirements.txt @@ -0,0 +1,3 @@ +aiohttp~=3.9.3 +behave~=1.2.6 +openai~=0.25.0 diff --git a/examples/server/tests/tests.sh b/examples/server/tests/tests.sh new file mode 100755 index 000000000..17a4e6fc6 --- /dev/null +++ b/examples/server/tests/tests.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -eu + +if [ $# -lt 1 ] +then + # Start @llama.cpp scenario + behave --summary --stop --no-capture --exclude 'issues|wrong_usages' --tags llama.cpp +else + behave "$@" +fi + From 4c4cb30736582cacb1a164a9d4bc8e17b1014be7 Mon Sep 17 00:00:00 2001 From: Kawrakow <48489457+ikawrakow@users.noreply.github.com> Date: Sat, 24 Feb 2024 16:23:52 +0200 Subject: [PATCH 853/859] IQ3_S: a much better alternative to Q3_K (#5676) * iq4_nl: squash commits for easier rebase * Basics (quantize, dequantize) * CUDA dequantize and dot product * Slightly faster CUDA dot product (120 t/s) * Switch to 6-bit scales * Scalar dot product * AVX2 dot product * ARM_NEON dot product * Works on metal, but still slow * Slightly better Metal dot product * Another small Metal improvement * Metal dot product is getting there * Faster CUDA dot product * Add 1/8 ffn_down layers as Q5_K when no imatrix has been provided * Report the actual bpw * Add _xs mix that is 4.05 bpw for non-MoE models * Remove IQ4_XS for now, slightly adjust kvalues_iq4nl * AVX2 dot product uses Q8_0 instead of Q8_K * Add to test-backend-ops * Minor fix * Also use use Q5_K for attn_output in MoE models * Fixes after merging latest master * Switching to blocks of 32 * AVX2 for blocks of 32 * Scaler dot product for blocks of 32 * ARM_NEON dot product for blocks of 32 * Metal kernels for blocks of 32 * Slightly faster Metal kernels * Resurrecting iq3_xs After all the experimentation, nothing was better than this. * Minor PPL improvement via a block scale fudge factor * Minor improvement via 3 neighbours * iq3_xs: working scalar and AVX2 dot products * iq3_xs: ARM_NEON dot product - works but extremely slow (10 t/s) * iq3_xs: working Metal implementation * Adding IQ3_M - IQ3_XS mix with mostly Q4_K * iiq3_xs: a 3.4375 bpw variant * iq3_xs: make CUDA work for new version * iq3_xs: make scalar and AVX2 work for new version * iq3_s: make ARM_NEON work with new version * iq3_xs: make new version work on metal Performance is very similar to Q3_K_S * iq3_xs: tiny Metal speed improvement * iq3_xs: tiny Metal speed improvement * Fix stupid warning * Q3_K_XS now uses a mix of IQ3_XS and IQ3_XXS * iq3_xs: rename to iq3_s * iq3_s: make tests pass * Move Q3_K_XS mix to 3.25 bpw * Attempt to fix failing tests * Another attempt to fix the Windows builds * Attempt to fix ROCm * ROCm again * iq3_s: partial fix for QK_K = 64 * iq3_s: make it work on metal for QK_K = 64 Pleasent surprise: the coding was super-block size independent, so all it took was to delete some QK_K == 256 guards. * Will this fix ROCm? --------- Co-authored-by: Iwan Kawrakow --- examples/quantize/quantize.cpp | 2 + ggml-cuda.cu | 171 ++++++++- ggml-metal.m | 33 +- ggml-metal.metal | 304 +++++++++++++++ ggml-quants.c | 674 +++++++++++++++++++++++++++++---- ggml-quants.h | 20 + ggml.c | 31 ++ ggml.h | 2 + llama.cpp | 50 ++- llama.h | 2 + tests/test-backend-ops.cpp | 2 +- tests/test-quantize-fns.cpp | 4 +- 12 files changed, 1211 insertions(+), 84 deletions(-) diff --git a/examples/quantize/quantize.cpp b/examples/quantize/quantize.cpp index 37520857f..ab7e72aaf 100644 --- a/examples/quantize/quantize.cpp +++ b/examples/quantize/quantize.cpp @@ -27,6 +27,8 @@ static const std::vector QUANT_OPTIONS = { { "Q2_K", LLAMA_FTYPE_MOSTLY_Q2_K, " 2.63G, +0.6717 ppl @ LLaMA-v1-7B", }, { "Q2_K_S", LLAMA_FTYPE_MOSTLY_Q2_K_S, " 2.16G, +9.0634 ppl @ LLaMA-v1-7B", }, { "IQ3_XXS",LLAMA_FTYPE_MOSTLY_IQ3_XXS," 3.06 bpw quantization", }, + { "IQ3_S", LLAMA_FTYPE_MOSTLY_IQ3_S, " 3.44 bpw quantization", }, + { "IQ3_M", LLAMA_FTYPE_MOSTLY_IQ3_M, " 3.66 bpw quantization mix", }, { "Q3_K", LLAMA_FTYPE_MOSTLY_Q3_K_M, "alias for Q3_K_M" }, { "Q3_K_XS",LLAMA_FTYPE_MOSTLY_Q3_K_XS,"3-bit extra small quantization" , }, { "Q3_K_S", LLAMA_FTYPE_MOSTLY_Q3_K_S, " 2.75G, +0.5551 ppl @ LLaMA-v1-7B", }, diff --git a/ggml-cuda.cu b/ggml-cuda.cu index b0e454e02..21c612cb7 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -172,6 +172,7 @@ #endif typedef int8_t int8x4_t __attribute__((ext_vector_type(4))); +typedef uint8_t uint8x4_t __attribute__((ext_vector_type(4))); static __device__ __forceinline__ int __vsubss4(const int a, const int b) { const int8x4_t va = reinterpret_cast(a); const int8x4_t vb = reinterpret_cast(b); @@ -196,6 +197,18 @@ static __device__ __forceinline__ int __vsub4(const int a, const int b) { return __vsubss4(a, b); } +static __device__ __forceinline__ unsigned int __vcmpeq4(unsigned int a, unsigned int b) { + const uint8x4_t& va = reinterpret_cast(a); + const uint8x4_t& vb = reinterpret_cast(b); + unsigned int c; + uint8x4_t& vc = reinterpret_cast(c); +#pragma unroll + for (int i = 0; i < 4; ++i) { + vc[i] = va[i] == vb[i] ? 0xff : 0x00; + } + return c; +} + static __device__ __forceinline__ int __dp4a(const int a, const int b, int c) { #if defined(__gfx906__) || defined(__gfx908__) || defined(__gfx90a__) || defined(__gfx1030__) c = __builtin_amdgcn_sdot4(a, b, c, false); @@ -518,6 +531,17 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +#define QR3_XS 8 +#define QI3_XS (QK_K / (4*QR3_XS)) +typedef struct { + half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[QK_K/64]; +} block_iq3_s; +static_assert(sizeof(block_iq3_s) == sizeof(ggml_fp16_t) + 27*(QK_K/64), "wrong iq3_s block size/padding"); + #define QR1_S 8 #define QI1_S (QK_K / (4*QR1_S)) typedef struct { @@ -1700,6 +1724,74 @@ static const __device__ uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +static const __device__ uint32_t iq3xs_grid[512] = { + 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, + 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, + 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, + 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, + 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, + 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, + 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, + 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, + 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, + 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, + 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, + 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, + 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, + 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, + 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, + 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, + 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, + 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, + 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, + 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, + 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, + 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, + 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, + 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, + 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, + 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, + 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, + 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, + 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, + 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, + 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, + 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, + 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, + 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, + 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, + 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, + 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, + 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, + 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, + 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, + 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, + 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, + 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, + 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, + 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, + 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, + 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, + 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, + 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, + 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, + 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, + 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, + 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, + 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, + 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, + 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, + 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, + 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, + 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, + 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, + 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, + 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, + 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, + 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +}; + + static const __device__ uint64_t iq1s_grid[512] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, 0xffffffff01ff00ff, 0xffffffff01ff0001, 0xffffffff0101ffff, 0xffffffff0101ff01, @@ -1973,6 +2065,32 @@ static __global__ void dequantize_block_iq3_xxs(const void * __restrict__ vx, ds } +template +static __global__ void dequantize_block_iq3_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { + + const int i = blockIdx.x; + const block_iq3_s * x = (const block_iq3_s *) vx; + + const int tid = threadIdx.x; +#if QK_K == 256 + const int il = tid/8; // 0...3 + const int ib = tid%8; // 0...7 + dst_t * y = yy + i*QK_K + 32*ib + 8*il; + const uint8_t * qs = x[i].qs + 8*ib; + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*il+0] | ((x[i].qh[ib] << (8-2*il)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*il+1] | ((x[i].qh[ib] << (7-2*il)) & 256))); + const float d = (float)x[i].d * (0.5f + ((x[i].scales[ib/2] >> 4*(ib%2)) & 0xf)) * 0.5f; + const uint8_t signs = x[i].signs[4*ib + il]; + for (int j = 0; j < 4; ++j) { + y[j+0] = d * grid1[j] * (signs & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = d * grid2[j] * (signs & kmask_iq2xs[j+4] ? -1.f : 1.f); + } +#else + assert(false); +#endif + +} + template static __global__ void dequantize_block_iq1_s(const void * __restrict__ vx, dst_t * __restrict__ yy) { @@ -4717,6 +4835,41 @@ static __device__ __forceinline__ float vec_dot_iq3_xxs_q8_1( #endif } +// TODO: don't use lookup table for signs +static __device__ __forceinline__ float vec_dot_iq3_s_q8_1( + const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { +#if __CUDA_ARCH__ >= MIN_CC_DP4A // lowest compute capability for integer intrinsics +#if QK_K == 256 + const block_iq3_s * bq2 = (const block_iq3_s *) vbq; + + const int ib32 = iqs; + const uint8_t * qs = bq2->qs + 8*ib32; + const int8_t * q8 = bq8_1[ib32].qs; + int sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint32_t * grid1 = iq3xs_grid + (qs[2*l+0] | ((bq2->qh[ib32] << (8 - 2*l)) & 256)); + const uint32_t * grid2 = iq3xs_grid + (qs[2*l+1] | ((bq2->qh[ib32] << (7 - 2*l)) & 256)); + uint32_t signs0 = __vcmpeq4(((bq2->signs[4*ib32+l] & 0xf) * 0x01010101) & 0x08040201, 0x08040201); + uint32_t signs1 = __vcmpeq4(((bq2->signs[4*ib32+l] >> 4) * 0x01010101) & 0x08040201, 0x08040201); + const int grid_l = __vsub4(grid1[0] ^ signs0, signs0); + const int grid_h = __vsub4(grid2[0] ^ signs1, signs1); + sumi = __dp4a(grid_l, *((int *)q8+0), sumi); + sumi = __dp4a(grid_h, *((int *)q8+1), sumi); + q8 += 8; + } + const float d = (float)bq2->d * (0.5f + ((bq2->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * __low2float(bq8_1[ib32].ds) * 0.5f; + return d * sumi; +#else + assert(false); + return 0.f; +#endif +#else + assert(false); + return 0.f; +#endif +} + + static __device__ __forceinline__ float vec_dot_iq1_s_q8_1( const void * __restrict__ vbq, const block_q8_1 * __restrict__ bq8_1, const int & iqs) { #if QK_K == 256 @@ -6849,6 +7002,12 @@ static void dequantize_row_iq3_xxs_cuda(const void * vx, dst_t * y, const int k, dequantize_block_iq3_xxs<<>>(vx, y); } +template +static void dequantize_row_iq3_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { + const int nb = k / QK_K; + dequantize_block_iq3_s<<>>(vx, y); +} + template static void dequantize_row_iq1_s_cuda(const void * vx, dst_t * y, const int k, cudaStream_t stream) { const int nb = k / QK_K; @@ -6904,6 +7063,8 @@ static to_fp16_cuda_t ggml_get_to_fp16_cuda(ggml_type type) { return dequantize_row_iq1_s_cuda; case GGML_TYPE_IQ4_NL: return dequantize_row_iq4_nl_cuda; + case GGML_TYPE_IQ3_S: + return dequantize_row_iq3_s_cuda; case GGML_TYPE_F32: return convert_unary_cuda; default: @@ -6943,6 +7104,8 @@ static to_fp32_cuda_t ggml_get_to_fp32_cuda(ggml_type type) { return dequantize_row_iq1_s_cuda; case GGML_TYPE_IQ4_NL: return dequantize_row_iq4_nl_cuda; + case GGML_TYPE_IQ3_S: + return dequantize_row_iq3_s_cuda; case GGML_TYPE_F16: return convert_unary_cuda; default: @@ -8688,6 +8851,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_RDNA2 ? 128 : 64; default: GGML_ASSERT(false); @@ -8713,6 +8877,7 @@ static int64_t get_row_rounding(ggml_type type, const std::array= CC_VOLTA ? 128 : 64; case GGML_TYPE_Q6_K: return 64; @@ -8818,6 +8983,10 @@ static void ggml_cuda_op_mul_mat_vec_q( mul_mat_vec_q_cuda (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); break; + case GGML_TYPE_IQ3_S: + mul_mat_vec_q_cuda + (src0_dd_i, src1_ddq_i, dst_dd_i, ne00, row_diff, src1_padded_row_size, src1_ncols, nrows_dst, stream); + break; default: GGML_ASSERT(false); break; @@ -11541,7 +11710,7 @@ GGML_CALL static bool ggml_backend_cuda_supports_op(ggml_backend_t backend, cons } ggml_type a_type = a->type; if (a_type == GGML_TYPE_IQ2_XXS || a_type == GGML_TYPE_IQ2_XS || a_type == GGML_TYPE_IQ3_XXS || - a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL) { + a_type == GGML_TYPE_IQ1_S || a_type == GGML_TYPE_IQ4_NL || a_type == GGML_TYPE_IQ3_S) { if (b->ne[1] == 1 && ggml_nrows(b) > 1) { return false; } diff --git a/ggml-metal.m b/ggml-metal.m index 0d4aa4309..ee584cfa7 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -61,6 +61,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, + GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, @@ -85,6 +86,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, @@ -105,6 +107,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, @@ -122,6 +125,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, @@ -139,6 +143,7 @@ enum ggml_metal_kernel_type { GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, + GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, GGML_METAL_KERNEL_TYPE_ROPE_F32, @@ -452,6 +457,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS, get_rows_iq2_xxs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS, get_rows_iq2_xs, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS, get_rows_iq3_xxs, true); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S, get_rows_iq3_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S, get_rows_iq1_s, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL, get_rows_iq4_nl, true); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_GET_ROWS_I32, get_rows_i32, true); @@ -476,6 +482,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XXS_F32, mul_mv_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ2_XS_F32, mul_mv_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32, mul_mv_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32, mul_mv_iq3_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ1_S_F32, mul_mv_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_IQ4_NL_F32, mul_mv_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_F32_F32, mul_mv_id_f32_f32, ctx->support_simdgroup_reduction); @@ -496,6 +503,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XXS_F32, mul_mv_id_iq2_xxs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ2_XS_F32, mul_mv_id_iq2_xs_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32, mul_mv_id_iq3_xxs_f32, ctx->support_simdgroup_reduction); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32, mul_mv_id_iq3_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ1_S_F32, mul_mv_id_iq1_s_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ4_NL_F32, mul_mv_id_iq4_nl_f32, ctx->support_simdgroup_reduction); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_F32_F32, mul_mm_f32_f32, ctx->support_simdgroup_mm); @@ -513,6 +521,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32, mul_mm_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32, mul_mm_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32, mul_mm_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32, mul_mm_iq3_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32, mul_mm_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32, mul_mm_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_F32_F32, mul_mm_id_f32_f32, ctx->support_simdgroup_mm); @@ -530,6 +539,7 @@ static struct ggml_metal_context * ggml_metal_init(int n_cb) { GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32, mul_mm_id_iq2_xxs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32, mul_mm_id_iq2_xs_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32, mul_mm_id_iq3_xxs_f32, ctx->support_simdgroup_mm); + GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32, mul_mm_id_iq3_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32, mul_mm_id_iq1_s_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32, mul_mm_id_iq4_nl_f32, ctx->support_simdgroup_mm); GGML_METAL_ADD_KERNEL(GGML_METAL_KERNEL_TYPE_ROPE_F32, rope_f32, true); @@ -1347,6 +1357,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ3_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL MAT-MAT not implemented"); @@ -1483,6 +1494,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ3_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_IQ3_S_F32].pipeline; + } break; case GGML_TYPE_IQ1_S: { nth0 = 4; @@ -1537,8 +1554,8 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src0t == GGML_TYPE_IQ3_XXS) { - const int mem_size = 256*4+128; + else if (src0t == GGML_TYPE_IQ3_XXS || src0t == GGML_TYPE_IQ3_S) { + const int mem_size = src0t == GGML_TYPE_IQ3_XXS ? 256*4+128 : 512*4; [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne01 + 7)/8, ne11, ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1640,6 +1657,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XXS_F32].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ2_XS_F32 ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_XXS_F32].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ3_S_F32 ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ1_S_F32 ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MM_ID_IQ4_NL_F32 ].pipeline; break; default: GGML_ASSERT(false && "MUL_MAT_ID not implemented"); @@ -1779,6 +1797,12 @@ static bool ggml_metal_graph_compute( nth1 = 16; pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_XXS_F32].pipeline; } break; + case GGML_TYPE_IQ3_S: + { + nth0 = 4; + nth1 = 16; + pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_MUL_MV_ID_IQ3_S_F32].pipeline; + } break; case GGML_TYPE_IQ1_S: { nth0 = 4; @@ -1849,8 +1873,8 @@ static bool ggml_metal_graph_compute( [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } - else if (src2t == GGML_TYPE_IQ3_XXS) { - const int mem_size = 256*4+128; + else if (src2t == GGML_TYPE_IQ3_XXS || src2t == GGML_TYPE_IQ3_S) { + const int mem_size = src2t == GGML_TYPE_IQ3_XXS ? 256*4+128 : 512*4; [encoder setThreadgroupMemoryLength:mem_size atIndex:0]; [encoder dispatchThreadgroups:MTLSizeMake((ne21 + 7)/8, _ne1, ne01*ne12*ne13) threadsPerThreadgroup:MTLSizeMake(nth0, nth1, 1)]; } @@ -1900,6 +1924,7 @@ static bool ggml_metal_graph_compute( case GGML_TYPE_IQ2_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XXS].pipeline; break; case GGML_TYPE_IQ2_XS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ2_XS ].pipeline; break; case GGML_TYPE_IQ3_XXS: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_XXS].pipeline; break; + case GGML_TYPE_IQ3_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ3_S ].pipeline; break; case GGML_TYPE_IQ1_S: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ1_S ].pipeline; break; case GGML_TYPE_IQ4_NL: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_IQ4_NL ].pipeline; break; case GGML_TYPE_I32: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_GET_ROWS_I32 ].pipeline; break; diff --git a/ggml-metal.metal b/ggml-metal.metal index c223a981c..b3bf40539 100644 --- a/ggml-metal.metal +++ b/ggml-metal.metal @@ -2525,6 +2525,20 @@ typedef struct { } block_iq3_xxs; // 98 bytes / block for QK_K = 256, so 3.0625 bpw +// 3.4375 bpw +#if QK_K == 64 +#define IQ3S_N_SCALE 2 +#else +#define IQ3S_N_SCALE QK_K/64 +#endif +typedef struct { + half d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[IQ3S_N_SCALE]; +} block_iq3_s; + typedef struct { half d; uint8_t qs[QK_K/8]; @@ -3795,6 +3809,73 @@ constexpr constant static uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +constexpr constant static uint32_t iq3xs_grid[512] = { + 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, + 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, + 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, + 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, + 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, + 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, + 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, + 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, + 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, + 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, + 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, + 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, + 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, + 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, + 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, + 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, + 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, + 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, + 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, + 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, + 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, + 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, + 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, + 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, + 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, + 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, + 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, + 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, + 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, + 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, + 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, + 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, + 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, + 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, + 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, + 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, + 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, + 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, + 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, + 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, + 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, + 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, + 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, + 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, + 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, + 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, + 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, + 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, + 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, + 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, + 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, + 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, + 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, + 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, + 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, + 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, + 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, + 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, + 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, + 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, + 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, + 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, + 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, + 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +}; + #define NGRID_IQ1S 512 constexpr constant static uint64_t iq1s_grid[NGRID_IQ1S] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, @@ -4361,6 +4442,136 @@ kernel void kernel_mul_mv_iq3_xxs_f32( kernel_mul_mv_iq3_xxs_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); } +void kernel_mul_mv_iq3_s_f32_impl( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant int64_t & ne10, + constant int64_t & ne12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + const int nb = ne00/QK_K; + const int r0 = tgpig.x; + const int r1 = tgpig.y; + const int im = tgpig.z; + + const int first_row = (r0 * N_SIMDGROUP + sgitg) * N_DST; + const int ib_row = first_row * nb; + + const uint i12 = im%ne12; + const uint i13 = im/ne12; + + const uint offset0 = (i12/r2)*(nb*ne01) + (i13/r3)*(nb*ne01*ne02); + + device const block_iq3_s * x = (device const block_iq3_s *) src0 + ib_row + offset0; + device const float * y = (device const float *) src1 + r1*ne10 + im*ne00*ne1; + + float yl[32]; + float sumf[N_DST]={0.f}, all_sum; + + const int nb32 = nb * (QK_K / 32); + + threadgroup uint32_t * values = (threadgroup uint32_t *)shared_values; + { + int nval = 8; + int pos = (32*sgitg + tiisg)*nval; + for (int i = 0; i < nval; ++i) values[pos + i] = iq3xs_grid[pos + i]; + threadgroup_barrier(mem_flags::mem_threadgroup); + } + + const int ix = tiisg; + + device const float * y4 = y + 32 * ix; + + for (int ib32 = ix; ib32 < nb32; ib32 += 32) { + + for (int i = 0; i < 32; ++i) { + yl[i] = y4[i]; + } + + const int ibl = ib32 / (QK_K / 32); + const int ib = ib32 % (QK_K / 32); + + device const block_iq3_s * xr = x + ibl; + device const uint8_t * qs = xr->qs + 8 * ib; + device const uint8_t * qh = xr->qh + ib; + device const uint8_t * sc = xr->scales + (ib/2); + device const uint8_t * signs = xr->signs + 4 * ib; + device const half * dh = &xr->d; + + for (int row = 0; row < N_DST; row++) { + + const float db = dh[0]; + const float d = db * (0.5f + ((sc[0] >> 4*(ib%2)) & 0xf)); + + float2 sum = {0}; + for (int l = 0; l < 4; ++l) { + const threadgroup uint8_t * grid1 = (const threadgroup uint8_t *)(values + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); + const threadgroup uint8_t * grid2 = (const threadgroup uint8_t *)(values + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sum[0] += yl[8*l + j + 0] * grid1[j] * select(1, -1, signs[l] & kmask_iq2xs[j+0]); + sum[1] += yl[8*l + j + 4] * grid2[j] * select(1, -1, signs[l] & kmask_iq2xs[j+4]); + } + } + sumf[row] += d * (sum[0] + sum[1]); + + dh += nb*sizeof(block_iq3_s)/2; + qs += nb*sizeof(block_iq3_s); + qh += nb*sizeof(block_iq3_s); + sc += nb*sizeof(block_iq3_s); + signs += nb*sizeof(block_iq3_s); + } + + y4 += 32 * 32; + } + + for (int row = 0; row < N_DST; ++row) { + all_sum = simd_sum(sumf[row]); + if (tiisg == 0) { + dst[r1*ne0 + im*ne0*ne1 + first_row + row] = all_sum * 0.5f; + } + } +} + +[[host_name("kernel_mul_mv_iq3_s_f32")]] +kernel void kernel_mul_mv_iq3_s_f32( + device const void * src0, + device const float * src1, + device float * dst, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint & r2, + constant uint & r3, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + + kernel_mul_mv_iq3_s_f32_impl(src0, src1, dst, ne00, ne01, ne02, ne10, ne12, ne0, ne1, r2, r3, shared_values, tgpig, tiisg, sgitg); +} + void kernel_mul_mv_iq1_s_f32_impl( device const void * src0, device const float * src1, @@ -4952,6 +5163,31 @@ void dequantize_iq3_xxs(device const block_iq3_xxs * xb, short il, thread type4x } } +template +void dequantize_iq3_s(device const block_iq3_s * xb, short il, thread type4x4 & reg) { + // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 + const float d = xb->d; + const int ib32 = il/2; + il = il%2; + // il = 0 or 1. il = 0 processes the first 16 quants in a block of 32, il = 1 the second 16 + device const uint8_t * qs = xb->qs + 8*ib32; + device const uint8_t * signs = xb->signs + 4*ib32 + 2*il; + const uint8_t qh = xb->qh[ib32] >> 4*il; + const float dl = d * (0.5f + ((xb->scales[ib32/2] >> 4*(ib32%2)) & 0xf)) * 0.5f; + constant uint8_t * grid1 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+0] | ((qh << 8) & 256))); + constant uint8_t * grid2 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+1] | ((qh << 7) & 256))); + for (int i = 0; i < 4; ++i) { + reg[0][i] = dl * grid1[i] * select(1, -1, signs[0] & kmask_iq2xs[i+0]); + reg[1][i] = dl * grid2[i] * select(1, -1, signs[0] & kmask_iq2xs[i+4]); + } + grid1 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+2] | ((qh << 6) & 256))); + grid2 = (constant uint8_t *)(iq3xs_grid + (qs[4*il+3] | ((qh << 5) & 256))); + for (int i = 0; i < 4; ++i) { + reg[2][i] = dl * grid1[i] * select(1, -1, signs[1] & kmask_iq2xs[i+0]); + reg[3][i] = dl * grid2[i] * select(1, -1, signs[1] & kmask_iq2xs[i+4]); + } +} + template void dequantize_iq1_s(device const block_iq1_s * xb, short il, thread type4x4 & reg) { // il is 0...15 for QK_K = 256 => index of block of 32 is il/2 @@ -5525,6 +5761,7 @@ template [[host_name("kernel_get_rows_q6_K")]] kernel get_rows_t kernel_get_rows template [[host_name("kernel_get_rows_iq2_xxs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq2_xs")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq3_xxs")]] kernel get_rows_t kernel_get_rows; +template [[host_name("kernel_get_rows_iq3_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq1_s")]] kernel get_rows_t kernel_get_rows; template [[host_name("kernel_get_rows_iq4_nl")]] kernel get_rows_t kernel_get_rows; @@ -5566,6 +5803,7 @@ template [[host_name("kernel_mul_mm_q6_K_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq2_xs_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq3_xxs_f32")]] kernel mat_mm_t kernel_mul_mm; +template [[host_name("kernel_mul_mm_iq3_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq1_s_f32")]] kernel mat_mm_t kernel_mul_mm; template [[host_name("kernel_mul_mm_iq4_nl_f32")]] kernel mat_mm_t kernel_mul_mm; @@ -5619,6 +5857,7 @@ template [[host_name("kernel_mul_mm_id_q6_K_f32")]] kernel mat_mm_id_t kernel_mu template [[host_name("kernel_mul_mm_id_iq2_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq2_xs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq3_xxs_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; +template [[host_name("kernel_mul_mm_id_iq3_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq1_s_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; template [[host_name("kernel_mul_mm_id_iq4_nl_f32")]] kernel mat_mm_id_t kernel_mul_mm_id; @@ -6589,6 +6828,71 @@ kernel void kernel_mul_mv_id_iq3_xxs_f32( sgitg); } +[[host_name("kernel_mul_mv_id_iq3_s_f32")]] +kernel void kernel_mul_mv_id_iq3_s_f32( + device const char * ids, + device const char * src1, + device float * dst, + constant uint64_t & nbi1, + constant int64_t & ne00, + constant int64_t & ne01, + constant int64_t & ne02, + constant uint64_t & nb00, + constant uint64_t & nb01, + constant uint64_t & nb02, + constant int64_t & ne10, + constant int64_t & ne11, + constant int64_t & ne12, + constant int64_t & ne13, + constant uint64_t & nb10, + constant uint64_t & nb11, + constant uint64_t & nb12, + constant int64_t & ne0, + constant int64_t & ne1, + constant uint64_t & nb1, + constant uint & r2, + constant uint & r3, + constant int & idx, + device const char * src00, + device const char * src01, + device const char * src02, + device const char * src03, + device const char * src04, + device const char * src05, + device const char * src06, + device const char * src07, + threadgroup int8_t * shared_values [[threadgroup(0)]], + uint3 tgpig[[threadgroup_position_in_grid]], + uint tiitg[[thread_index_in_threadgroup]], + uint tiisg[[thread_index_in_simdgroup]], + uint sgitg[[simdgroup_index_in_threadgroup]]) { + device const char * src0[8] = {src00, src01, src02, src03, src04, src05, src06, src07}; + + const int64_t bid = tgpig.z/(ne12*ne13); + + tgpig.z = tgpig.z%(ne12*ne13); + + const int32_t id = ((device int32_t *) (ids + bid*nbi1))[idx]; + + kernel_mul_mv_iq3_s_f32_impl( + src0[id], + (device const float *) (src1 + bid*nb11), + dst + bid*ne0, + ne00, + ne01, + ne02, + ne10, + ne12, + ne0, + ne1, + r2, + r3, + shared_values, + tgpig, + tiisg, + sgitg); +} + [[host_name("kernel_mul_mv_id_iq1_s_f32")]] kernel void kernel_mul_mv_id_iq1_s_f32( device const char * ids, diff --git a/ggml-quants.c b/ggml-quants.c index b15977f53..5c5f2ce1b 100644 --- a/ggml-quants.c +++ b/ggml-quants.c @@ -3505,6 +3505,73 @@ static const uint32_t iq3xxs_grid[256] = { 0x3e1c1c1c, 0x3e1c3404, 0x3e24140c, 0x3e24240c, 0x3e2c0404, 0x3e2c0414, 0x3e2c1424, 0x3e341c04, }; +static const uint32_t iq3xs_grid[512] = { + 0x04040404, 0x0404040c, 0x04040414, 0x0404042c, 0x0404043e, 0x04040c04, 0x04040c0c, 0x04040c14, + 0x04040c24, 0x04040c34, 0x04041404, 0x0404140c, 0x0404142c, 0x04041c1c, 0x04042404, 0x04042414, + 0x0404242c, 0x0404243e, 0x04042c0c, 0x04042c1c, 0x04043404, 0x04043414, 0x04043e0c, 0x04043e24, + 0x04043e3e, 0x040c0404, 0x040c040c, 0x040c0414, 0x040c0424, 0x040c0c04, 0x040c0c0c, 0x040c0c2c, + 0x040c1404, 0x040c141c, 0x040c143e, 0x040c1c0c, 0x040c1c2c, 0x040c2424, 0x040c340c, 0x040c342c, + 0x040c3e14, 0x04140404, 0x0414040c, 0x0414042c, 0x0414043e, 0x04140c04, 0x04140c1c, 0x04140c34, + 0x0414140c, 0x0414142c, 0x04141c04, 0x04141c24, 0x04142414, 0x0414242c, 0x0414243e, 0x04142c0c, + 0x04142c1c, 0x04143e04, 0x04143e1c, 0x041c041c, 0x041c0c0c, 0x041c0c2c, 0x041c1404, 0x041c1414, + 0x041c1c0c, 0x041c1c1c, 0x041c1c34, 0x041c2424, 0x041c2c04, 0x041c2c14, 0x041c343e, 0x041c3e0c, + 0x041c3e2c, 0x04240404, 0x04240c1c, 0x04240c3e, 0x0424140c, 0x04241424, 0x04241c14, 0x04242404, + 0x0424241c, 0x04242c0c, 0x04243e04, 0x042c0414, 0x042c0424, 0x042c1404, 0x042c1414, 0x042c1434, + 0x042c1c1c, 0x042c240c, 0x042c242c, 0x042c243e, 0x042c3434, 0x042c3e1c, 0x04340434, 0x04340c0c, + 0x04340c1c, 0x04341c0c, 0x04342c14, 0x04343e0c, 0x043e0404, 0x043e0414, 0x043e0424, 0x043e1404, + 0x043e1414, 0x043e1434, 0x043e1c1c, 0x043e2c04, 0x043e2c24, 0x0c040404, 0x0c04040c, 0x0c040414, + 0x0c040424, 0x0c040c04, 0x0c040c0c, 0x0c040c1c, 0x0c040c2c, 0x0c040c3e, 0x0c041404, 0x0c041414, + 0x0c041c0c, 0x0c041c24, 0x0c041c34, 0x0c042c24, 0x0c042c34, 0x0c04340c, 0x0c043e14, 0x0c0c0404, + 0x0c0c040c, 0x0c0c041c, 0x0c0c0434, 0x0c0c0c04, 0x0c0c0c24, 0x0c0c140c, 0x0c0c1c04, 0x0c0c1c1c, + 0x0c0c240c, 0x0c0c2c04, 0x0c0c2c14, 0x0c0c3e04, 0x0c0c3e34, 0x0c140404, 0x0c140c14, 0x0c140c2c, + 0x0c140c3e, 0x0c141404, 0x0c141424, 0x0c141c14, 0x0c142404, 0x0c14241c, 0x0c142c2c, 0x0c143404, + 0x0c143e14, 0x0c1c040c, 0x0c1c0424, 0x0c1c043e, 0x0c1c0c04, 0x0c1c0c1c, 0x0c1c140c, 0x0c1c143e, + 0x0c1c1c04, 0x0c1c1c24, 0x0c1c240c, 0x0c1c3414, 0x0c1c3e04, 0x0c24041c, 0x0c24042c, 0x0c240c14, + 0x0c240c24, 0x0c241c0c, 0x0c241c1c, 0x0c242414, 0x0c242434, 0x0c242c04, 0x0c242c24, 0x0c2c040c, + 0x0c2c0c04, 0x0c2c0c1c, 0x0c2c140c, 0x0c2c1c04, 0x0c2c1c14, 0x0c2c2c0c, 0x0c341404, 0x0c341424, + 0x0c34143e, 0x0c342424, 0x0c342434, 0x0c3e040c, 0x0c3e041c, 0x0c3e0c04, 0x0c3e0c14, 0x0c3e140c, + 0x0c3e1c2c, 0x0c3e240c, 0x0c3e3414, 0x0c3e3e04, 0x14040404, 0x1404040c, 0x1404041c, 0x1404042c, + 0x1404043e, 0x14040c04, 0x14040c14, 0x14040c24, 0x14040c34, 0x1404140c, 0x1404141c, 0x1404143e, + 0x14041c04, 0x14041c14, 0x1404240c, 0x1404241c, 0x1404242c, 0x14042c04, 0x14042c14, 0x1404343e, + 0x14043e04, 0x14043e1c, 0x14043e2c, 0x140c0404, 0x140c0414, 0x140c0c04, 0x140c0c1c, 0x140c0c3e, + 0x140c1414, 0x140c142c, 0x140c1c0c, 0x140c1c24, 0x140c2414, 0x140c2c0c, 0x1414040c, 0x14140424, + 0x1414043e, 0x1414140c, 0x1414141c, 0x14141c04, 0x14141c3e, 0x1414240c, 0x14142c1c, 0x14142c3e, + 0x14143e0c, 0x14143e24, 0x141c0404, 0x141c0414, 0x141c042c, 0x141c0c0c, 0x141c1414, 0x141c1424, + 0x141c1c0c, 0x141c1c1c, 0x141c2414, 0x141c2c04, 0x141c3434, 0x1424040c, 0x1424043e, 0x14241404, + 0x1424141c, 0x14241c14, 0x14241c2c, 0x1424240c, 0x14243e14, 0x14243e2c, 0x142c0424, 0x142c0c0c, + 0x142c1414, 0x142c1c3e, 0x142c2404, 0x142c2c1c, 0x142c3e04, 0x14340404, 0x14340414, 0x1434043e, + 0x1434140c, 0x14342c2c, 0x1434340c, 0x143e042c, 0x143e0c0c, 0x143e1434, 0x143e1c04, 0x143e241c, + 0x143e2c04, 0x1c040414, 0x1c040c0c, 0x1c040c1c, 0x1c040c2c, 0x1c040c3e, 0x1c041414, 0x1c041c0c, + 0x1c041c1c, 0x1c041c2c, 0x1c042414, 0x1c042424, 0x1c04243e, 0x1c042c0c, 0x1c04341c, 0x1c043e0c, + 0x1c0c040c, 0x1c0c041c, 0x1c0c042c, 0x1c0c0c24, 0x1c0c140c, 0x1c0c141c, 0x1c0c2404, 0x1c0c3404, + 0x1c0c3e14, 0x1c0c3e34, 0x1c140404, 0x1c140c14, 0x1c141404, 0x1c141c14, 0x1c141c24, 0x1c142c04, + 0x1c1c040c, 0x1c1c0c04, 0x1c1c0c24, 0x1c1c140c, 0x1c1c141c, 0x1c1c143e, 0x1c1c1c04, 0x1c1c240c, + 0x1c1c241c, 0x1c1c243e, 0x1c1c2c2c, 0x1c1c3e1c, 0x1c24041c, 0x1c240c0c, 0x1c240c34, 0x1c241414, + 0x1c241c0c, 0x1c242c14, 0x1c243404, 0x1c243424, 0x1c2c040c, 0x1c2c0c04, 0x1c2c0c14, 0x1c2c142c, + 0x1c2c1c14, 0x1c2c2424, 0x1c2c2c34, 0x1c2c3e1c, 0x1c340c34, 0x1c34240c, 0x1c3e040c, 0x1c3e041c, + 0x1c3e1404, 0x1c3e1414, 0x1c3e1c2c, 0x24040404, 0x24040424, 0x24040c14, 0x24041404, 0x24041424, + 0x2404143e, 0x24041c14, 0x2404240c, 0x24042c04, 0x24043e04, 0x240c0414, 0x240c043e, 0x240c0c0c, + 0x240c0c1c, 0x240c1414, 0x240c1c04, 0x240c1c2c, 0x240c241c, 0x240c2c0c, 0x240c2c2c, 0x2414040c, + 0x2414041c, 0x24140c04, 0x24140c2c, 0x2414140c, 0x24141c1c, 0x24142404, 0x24142c3e, 0x24143414, + 0x24143e04, 0x241c0424, 0x241c0c0c, 0x241c0c1c, 0x241c1404, 0x241c1414, 0x241c1c0c, 0x241c1c2c, + 0x24240404, 0x24240414, 0x24241424, 0x24241c3e, 0x24242404, 0x24243e0c, 0x242c042c, 0x242c043e, + 0x242c140c, 0x242c3414, 0x24340c1c, 0x24341c24, 0x24343404, 0x243e0c04, 0x243e0c2c, 0x243e1c04, + 0x243e241c, 0x243e2c0c, 0x2c040414, 0x2c040c04, 0x2c040c24, 0x2c041414, 0x2c042404, 0x2c042424, + 0x2c04243e, 0x2c042c14, 0x2c043434, 0x2c043e24, 0x2c0c040c, 0x2c0c041c, 0x2c0c042c, 0x2c0c0c14, + 0x2c0c140c, 0x2c0c1c14, 0x2c0c3e14, 0x2c140404, 0x2c140c0c, 0x2c14141c, 0x2c141c04, 0x2c141c34, + 0x2c142c1c, 0x2c1c0414, 0x2c1c043e, 0x2c1c0c04, 0x2c1c143e, 0x2c1c2424, 0x2c1c2c0c, 0x2c1c342c, + 0x2c1c3e1c, 0x2c24040c, 0x2c240424, 0x2c241404, 0x2c241c14, 0x2c242434, 0x2c2c0c14, 0x2c2c1434, + 0x2c2c2c0c, 0x2c2c2c1c, 0x2c342414, 0x2c3e0414, 0x2c3e0424, 0x2c3e1414, 0x34040c0c, 0x34040c1c, + 0x34040c2c, 0x34041c0c, 0x34041c1c, 0x34043404, 0x340c0404, 0x340c1404, 0x340c143e, 0x340c3424, + 0x34140c14, 0x34141c24, 0x34142414, 0x34142c2c, 0x34143414, 0x34143e04, 0x341c0404, 0x341c0c24, + 0x341c140c, 0x341c2404, 0x3424142c, 0x3424241c, 0x34243414, 0x342c0404, 0x342c041c, 0x342c1c24, + 0x342c3404, 0x3434042c, 0x34342404, 0x343e0c0c, 0x343e0c1c, 0x3e040404, 0x3e040424, 0x3e04043e, + 0x3e041404, 0x3e041414, 0x3e041c34, 0x3e042404, 0x3e042c24, 0x3e043414, 0x3e0c0414, 0x3e0c0c0c, + 0x3e0c1424, 0x3e0c241c, 0x3e0c242c, 0x3e14040c, 0x3e140424, 0x3e140c04, 0x3e140c34, 0x3e14140c, + 0x3e141c04, 0x3e142c0c, 0x3e1c0414, 0x3e1c1c14, 0x3e1c1c2c, 0x3e1c2c1c, 0x3e24040c, 0x3e24042c, + 0x3e240c1c, 0x3e241404, 0x3e242c04, 0x3e2c1414, 0x3e2c2414, 0x3e340414, 0x3e341c0c, 0x3e3e0404, +}; + #define NGRID_IQ2XXS 512 static const uint64_t iq1s_grid[NGRID_IQ2XXS] = { 0xffffffffffff0101, 0xffffffffff01ff00, 0xffffffffff010100, 0xffffffff00000000, @@ -3736,6 +3803,49 @@ void dequantize_row_iq3_xxs(const block_iq3_xxs * restrict x, float * restrict y } } +// ====================== 3.3125 bpw (de)-quantization + +void dequantize_row_iq3_s(const block_iq3_s * restrict x, float * restrict y, int k) { + assert(k % QK_K == 0); + const int nb = k / QK_K; + + for (int i = 0; i < nb; i++) { + + const float d = GGML_FP16_TO_FP32(x[i].d); + const uint8_t * qs = x[i].qs; + const uint8_t * qh = x[i].qh; + const uint8_t * signs = x[i].signs; + + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const float db1 = d * (0.5f + (x[i].scales[ib32/2] & 0xf)) * 0.5f; + const float db2 = d * (0.5f + (x[i].scales[ib32/2] >> 4)) * 0.5f; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + y[j+0] = db1 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db1 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qs += 8; + signs += 4; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[1] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + y[j+0] = db2 * grid1[j] * (signs[l] & kmask_iq2xs[j+0] ? -1.f : 1.f); + y[j+4] = db2 * grid2[j] * (signs[l] & kmask_iq2xs[j+4] ? -1.f : 1.f); + } + y += 8; + } + qh += 2; + qs += 8; + signs += 4; + } + } +} + // ====================== 1.5625 bpw (de)-quantization void dequantize_row_iq1_s(const block_iq1_s * restrict x, float * restrict y, int k) { @@ -8806,6 +8916,7 @@ void ggml_vec_dot_q6_K_q8_K(int n, float * restrict s, size_t bs, const void * r #endif +#if defined (__AVX2__) || defined (__ARM_NEON) static const int8_t keven_signs_q2xs[1024] = { 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, 1, 1, -1, 1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, 1, -1, 1, 1, 1, 1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, -1, -1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, 1, 1, -1, @@ -8840,6 +8951,7 @@ static const int8_t keven_signs_q2xs[1024] = { 1, 1, 1, -1, -1, -1, -1, 1, -1, 1, 1, -1, -1, -1, -1, -1, 1, -1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, 1, 1, -1, -1, -1, -1, -1, -1, 1, -1, -1, -1, -1, -1, -1, -1, -1, }; +#endif void ggml_vec_dot_iq2_xxs_q8_K(int n, float * restrict s, size_t bs, const void * restrict vx, size_t bx, const void * restrict vy, size_t by, int nrc) { assert(n % QK_K == 0); @@ -9327,6 +9439,202 @@ void ggml_vec_dot_iq3_xxs_q8_K(int n, float * restrict s, size_t bs, const void #endif } +void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc) { + assert(n % QK_K == 0); + assert(nrc == 1); + UNUSED(nrc); + UNUSED(bx); + UNUSED(by); + UNUSED(bs); + + const block_iq3_s * restrict x = vx; + const block_q8_K * restrict y = vy; + + const int nb = n / QK_K; + +#if defined(__ARM_NEON) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[16] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,}; + + const uint8x16x2_t mask1 = vld1q_u8_x2(k_mask1); + const uint8x16_t mask2 = vld1q_u8(k_mask2); + + uint8x16x2_t vs; + ggml_int8x16x4_t q3s; + ggml_int8x16x4_t q8b; + + float sumf = 0; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + int sumi1 = 0, sumi2 = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + q8b = ggml_vld1q_s8_x4(q8); q8 += 64; + const uint32x4_t aux32x4_0 = {iq3xs_grid[qs[ 0] | ((qh[ib32+0] << 8) & 256)], iq3xs_grid[qs[ 1] | ((qh[ib32+0] << 7) & 256)], + iq3xs_grid[qs[ 2] | ((qh[ib32+0] << 6) & 256)], iq3xs_grid[qs[ 3] | ((qh[ib32+0] << 5) & 256)]}; + const uint32x4_t aux32x4_1 = {iq3xs_grid[qs[ 4] | ((qh[ib32+0] << 4) & 256)], iq3xs_grid[qs[ 5] | ((qh[ib32+0] << 3) & 256)], + iq3xs_grid[qs[ 6] | ((qh[ib32+0] << 2) & 256)], iq3xs_grid[qs[ 7] | ((qh[ib32+0] << 1) & 256)]}; + const uint32x4_t aux32x4_2 = {iq3xs_grid[qs[ 8] | ((qh[ib32+1] << 8) & 256)], iq3xs_grid[qs[ 9] | ((qh[ib32+1] << 7) & 256)], + iq3xs_grid[qs[10] | ((qh[ib32+1] << 6) & 256)], iq3xs_grid[qs[11] | ((qh[ib32+1] << 5) & 256)]}; + const uint32x4_t aux32x4_3 = {iq3xs_grid[qs[12] | ((qh[ib32+1] << 4) & 256)], iq3xs_grid[qs[13] | ((qh[ib32+1] << 3) & 256)], + iq3xs_grid[qs[14] | ((qh[ib32+1] << 2) & 256)], iq3xs_grid[qs[15] | ((qh[ib32+1] << 1) & 256)]}; + qs += 16; + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[0] | (signs[1] << 16))); + vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + q3s.val[0] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[0], vreinterpretq_u8_u32(aux32x4_0))), vreinterpretq_s8_u8(vs.val[0])); + q3s.val[1] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_1))), vreinterpretq_s8_u8(vs.val[1])); + + vs.val[0] = vreinterpretq_u8_u32(vdupq_n_u32(signs[2] | (signs[3] << 16))); + vs.val[1] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[1]), mask2); + vs.val[0] = vandq_u8(vqtbl1q_u8(vs.val[0], mask1.val[0]), mask2); + vs.val[0] = vceqq_u8(vs.val[0], mask2); + vs.val[1] = vceqq_u8(vs.val[1], mask2); + + signs += 4; + + q3s.val[2] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[0], vreinterpretq_u8_u32(aux32x4_2))), vreinterpretq_s8_u8(vs.val[0])); + q3s.val[3] = vsubq_s8(vreinterpretq_s8_u8(veorq_u8(vs.val[1], vreinterpretq_u8_u32(aux32x4_3))), vreinterpretq_s8_u8(vs.val[1])); + + const int32x4_t p1 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[0], q8b.val[0]), q3s.val[1], q8b.val[1]); + const int32x4_t p2 = ggml_vdotq_s32(ggml_vdotq_s32(vdupq_n_s32(0), q3s.val[2], q8b.val[2]), q3s.val[3], q8b.val[3]); + sumi1 += vaddvq_s32(p1) * (1 + 2*(x[i].scales[ib32/2] & 0xf)); + sumi2 += vaddvq_s32(p2) * (1 + 2*(x[i].scales[ib32/2] >> 4)); + } + sumf += d*(sumi1 + sumi2); + } + *s = 0.25f * sumf; + +#elif defined(__AVX2__) + + static const uint8_t k_mask1[32] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, + 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03, 0x03 + }; + + static const uint8_t k_mask2[32] = {0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, + }; + + const __m256i mask1 = _mm256_loadu_si256((const __m256i*)k_mask1); + const __m256i mask2 = _mm256_loadu_si256((const __m256i*)k_mask2); + + __m256 accumf = _mm256_setzero_ps(); + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint16_t * restrict signs = (const uint16_t *)x[i].signs; + const int8_t * restrict q8 = y[i].qs; + __m256i sumi1 = _mm256_setzero_si256(); + __m256i sumi2 = _mm256_setzero_si256(); + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const __m256i q8_1 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q8_2 = _mm256_loadu_si256((const __m256i *)q8); q8 += 32; + const __m256i q2_1 = _mm256_set_epi32(iq3xs_grid[qs[7] | ((qh[ib32+0] << 1) & 256)], + iq3xs_grid[qs[6] | ((qh[ib32+0] << 2) & 256)], + iq3xs_grid[qs[5] | ((qh[ib32+0] << 3) & 256)], + iq3xs_grid[qs[4] | ((qh[ib32+0] << 4) & 256)], + iq3xs_grid[qs[3] | ((qh[ib32+0] << 5) & 256)], + iq3xs_grid[qs[2] | ((qh[ib32+0] << 6) & 256)], + iq3xs_grid[qs[1] | ((qh[ib32+0] << 7) & 256)], + iq3xs_grid[qs[0] | ((qh[ib32+0] << 8) & 256)]); + qs += 8; + const __m256i q2_2 = _mm256_set_epi32(iq3xs_grid[qs[7] | ((qh[ib32+1] << 1) & 256)], + iq3xs_grid[qs[6] | ((qh[ib32+1] << 2) & 256)], + iq3xs_grid[qs[5] | ((qh[ib32+1] << 3) & 256)], + iq3xs_grid[qs[4] | ((qh[ib32+1] << 4) & 256)], + iq3xs_grid[qs[3] | ((qh[ib32+1] << 5) & 256)], + iq3xs_grid[qs[2] | ((qh[ib32+1] << 6) & 256)], + iq3xs_grid[qs[1] | ((qh[ib32+1] << 7) & 256)], + iq3xs_grid[qs[0] | ((qh[ib32+1] << 8) & 256)]); + qs += 8; + + __m256i aux256 = _mm256_set1_epi32(signs[0] | (signs[1] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_1 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_1 = _mm256_sub_epi8(_mm256_xor_si256(s2_1, q8_1), s2_1); + + aux256 = _mm256_set1_epi32(signs[2] | (signs[3] << 16)); + aux256 = _mm256_and_si256(_mm256_shuffle_epi8(aux256,mask1), mask2); + const __m256i s2_2 = _mm256_cmpeq_epi8(aux256, mask2); + const __m256i q8s_2 = _mm256_sub_epi8(_mm256_xor_si256(s2_2, q8_2), s2_2); + + signs += 4; + + const __m256i dot1 = _mm256_maddubs_epi16(q2_1, q8s_1); + const __m256i dot2 = _mm256_maddubs_epi16(q2_2, q8s_2); + const uint16_t ls1 = x[i].scales[ib32/2] & 0xf; + const uint16_t ls2 = x[i].scales[ib32/2] >> 4; + const __m256i p1 = _mm256_madd_epi16(dot1, _mm256_set1_epi16(2*ls1+1)); + const __m256i p2 = _mm256_madd_epi16(dot2, _mm256_set1_epi16(2*ls2+1)); + sumi1 = _mm256_add_epi32(sumi1, p1); + sumi2 = _mm256_add_epi32(sumi2, p2); + } + + accumf = _mm256_fmadd_ps(_mm256_set1_ps(d), _mm256_cvtepi32_ps(_mm256_add_epi32(sumi1, sumi2)), accumf); + + } + + *s = 0.25f * hsum_float_8(accumf); + +#else + + float sumf = 0.f; + for (int i = 0; i < nb; ++i) { + const float d = GGML_FP16_TO_FP32(x[i].d) * y[i].d; + const uint8_t * restrict qs = x[i].qs; + const uint8_t * restrict qh = x[i].qh; + const uint8_t * restrict signs = x[i].signs; + const int8_t * restrict q8 = y[i].qs; + int32_t bsum = 0; + for (int ib32 = 0; ib32 < QK_K/32; ib32 += 2) { + const uint32_t ls1 = 2*(x[i].scales[ib32/2] & 0xf) + 1; + const uint32_t ls2 = 2*(x[i].scales[ib32/2] >> 4) + 1; + int32_t sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[ib32+0] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[ib32+0] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + qs += 8; + signs += 4; + bsum += sumi * ls1; + sumi = 0; + for (int l = 0; l < 4; ++l) { + const uint8_t * grid1 = (const uint8_t *)(iq3xs_grid + (qs[2*l+0] | ((qh[ib32+1] << (8-2*l)) & 256))); + const uint8_t * grid2 = (const uint8_t *)(iq3xs_grid + (qs[2*l+1] | ((qh[ib32+1] << (7-2*l)) & 256))); + for (int j = 0; j < 4; ++j) { + sumi += grid1[j] * q8[j+0] * (signs[l] & kmask_iq2xs[j+0] ? -1 : 1); + sumi += grid2[j] * q8[j+4] * (signs[l] & kmask_iq2xs[j+4] ? -1 : 1); + } + q8 += 8; + } + qs += 8; + signs += 4; + bsum += sumi * ls2; + } + sumf += d * bsum; + } + *s = 0.25f * sumf; +#endif +} + + #ifdef __AVX2__ static inline __m256i mul_add_epi8(const __m256i x, const __m256i y) { const __m256i ax = _mm256_sign_epi8(x, x); @@ -9523,6 +9831,7 @@ void ggml_vec_dot_iq4_nl_q8_0(int n, float * restrict s, size_t bs, const void * float sumf = 0; for (int ib = 0; ib < nb; ib += 2) { + q4bits.val[0] = vld1q_u8(x[ib+0].qs); q4bits.val[1] = vld1q_u8(x[ib+1].qs); q8b.val[0] = vld1q_s8(y[ib+0].qs); @@ -10239,14 +10548,15 @@ typedef struct { uint16_t * neighbours; } iq3_entry_t; -static iq3_entry_t iq3_data[1] = { +static iq3_entry_t iq3_data[2] = { + {NULL, NULL, NULL}, {NULL, NULL, NULL}, }; static inline int iq3_data_index(int grid_size) { (void)grid_size; - GGML_ASSERT(grid_size == 256); - return 0; + GGML_ASSERT(grid_size == 256 || grid_size == 512); + return grid_size == 256 ? 0 : 1; } static int iq3_compare_func(const void * left, const void * right) { @@ -10278,9 +10588,44 @@ void iq3xs_init_impl(int grid_size) { 3185, 3215, 3252, 3288, 3294, 3364, 3397, 3434, 3483, 3523, 3537, 3587, 3589, 3591, 3592, 3610, 3626, 3670, 3680, 3722, 3749, 3754, 3776, 3789, 3803, 3824, 3857, 3873, 3904, 3906, 3924, 3992, }; + static const uint16_t kgrid_512[512] = { + 0, 1, 2, 5, 7, 8, 9, 10, 12, 14, 16, 17, 21, 27, 32, 34, + 37, 39, 41, 43, 48, 50, 57, 60, 63, 64, 65, 66, 68, 72, 73, 77, + 80, 83, 87, 89, 93, 100, 113, 117, 122, 128, 129, 133, 135, 136, 139, 142, + 145, 149, 152, 156, 162, 165, 167, 169, 171, 184, 187, 195, 201, 205, 208, 210, + 217, 219, 222, 228, 232, 234, 247, 249, 253, 256, 267, 271, 273, 276, 282, 288, + 291, 297, 312, 322, 324, 336, 338, 342, 347, 353, 357, 359, 374, 379, 390, 393, + 395, 409, 426, 441, 448, 450, 452, 464, 466, 470, 475, 488, 492, 512, 513, 514, + 516, 520, 521, 523, 525, 527, 528, 530, 537, 540, 542, 556, 558, 561, 570, 576, + 577, 579, 582, 584, 588, 593, 600, 603, 609, 616, 618, 632, 638, 640, 650, 653, + 655, 656, 660, 666, 672, 675, 685, 688, 698, 705, 708, 711, 712, 715, 721, 727, + 728, 732, 737, 754, 760, 771, 773, 778, 780, 793, 795, 802, 806, 808, 812, 833, + 840, 843, 849, 856, 858, 873, 912, 916, 919, 932, 934, 961, 963, 968, 970, 977, + 989, 993, 1010, 1016, 1024, 1025, 1027, 1029, 1031, 1032, 1034, 1036, 1038, 1041, 1043, 1047, + 1048, 1050, 1057, 1059, 1061, 1064, 1066, 1079, 1080, 1083, 1085, 1088, 1090, 1096, 1099, 1103, + 1106, 1109, 1113, 1116, 1122, 1129, 1153, 1156, 1159, 1169, 1171, 1176, 1183, 1185, 1195, 1199, + 1209, 1212, 1216, 1218, 1221, 1225, 1234, 1236, 1241, 1243, 1250, 1256, 1270, 1281, 1287, 1296, + 1299, 1306, 1309, 1313, 1338, 1341, 1348, 1353, 1362, 1375, 1376, 1387, 1400, 1408, 1410, 1415, + 1425, 1453, 1457, 1477, 1481, 1494, 1496, 1507, 1512, 1538, 1545, 1547, 1549, 1551, 1554, 1561, + 1563, 1565, 1570, 1572, 1575, 1577, 1587, 1593, 1601, 1603, 1605, 1612, 1617, 1619, 1632, 1648, + 1658, 1662, 1664, 1674, 1680, 1690, 1692, 1704, 1729, 1736, 1740, 1745, 1747, 1751, 1752, 1761, + 1763, 1767, 1773, 1787, 1795, 1801, 1806, 1810, 1817, 1834, 1840, 1844, 1857, 1864, 1866, 1877, + 1882, 1892, 1902, 1915, 1934, 1953, 1985, 1987, 2000, 2002, 2013, 2048, 2052, 2058, 2064, 2068, + 2071, 2074, 2081, 2088, 2104, 2114, 2119, 2121, 2123, 2130, 2136, 2141, 2147, 2153, 2157, 2177, + 2179, 2184, 2189, 2193, 2203, 2208, 2223, 2226, 2232, 2244, 2249, 2251, 2256, 2258, 2265, 2269, + 2304, 2306, 2324, 2335, 2336, 2361, 2373, 2375, 2385, 2418, 2443, 2460, 2480, 2504, 2509, 2520, + 2531, 2537, 2562, 2568, 2572, 2578, 2592, 2596, 2599, 2602, 2614, 2620, 2625, 2627, 2629, 2634, + 2641, 2650, 2682, 2688, 2697, 2707, 2712, 2718, 2731, 2754, 2759, 2760, 2775, 2788, 2793, 2805, + 2811, 2817, 2820, 2832, 2842, 2854, 2890, 2902, 2921, 2923, 2978, 3010, 3012, 3026, 3081, 3083, + 3085, 3097, 3099, 3120, 3136, 3152, 3159, 3188, 3210, 3228, 3234, 3245, 3250, 3256, 3264, 3276, + 3281, 3296, 3349, 3363, 3378, 3392, 3395, 3420, 3440, 3461, 3488, 3529, 3531, 3584, 3588, 3591, + 3600, 3602, 3614, 3616, 3628, 3634, 3650, 3657, 3668, 3683, 3685, 3713, 3716, 3720, 3726, 3729, + 3736, 3753, 3778, 3802, 3805, 3819, 3841, 3845, 3851, 3856, 3880, 3922, 3938, 3970, 3993, 4032, + }; + const int kmap_size = 4096; - const int nwant = 2; - const uint16_t * kgrid = kgrid_256; + const int nwant = grid_size == 256 ? 2 : 3; + const uint16_t * kgrid = grid_size == 256 ? kgrid_256 : kgrid_512; uint32_t * kgrid_q3xs; int * kmap_q3xs; uint16_t * kneighbors_q3xs; @@ -10377,7 +10722,7 @@ void iq3xs_init_impl(int grid_size) { } void iq3xs_free_impl(int grid_size) { - GGML_ASSERT(grid_size == 256); + GGML_ASSERT(grid_size == 256 || grid_size == 512); const int gindex = iq3_data_index(grid_size); if (iq3_data[gindex].grid) { free(iq3_data[gindex].grid); iq3_data[gindex].grid = NULL; @@ -10410,9 +10755,10 @@ static int iq3_find_best_neighbour(const uint16_t * restrict neighbours, const u return grid_index; } -static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict vy, int n, const float * restrict quant_weights) { +static void quantize_row_iq3_xxs_impl(int grid_size, const float * restrict x, void * restrict vy, int n, + const float * restrict quant_weights) { - const int gindex = iq3_data_index(256); + const int gindex = iq3_data_index(grid_size); const uint32_t * kgrid_q3xs = iq3_data[gindex].grid; const int * kmap_q3xs = iq3_data[gindex].map; @@ -10426,9 +10772,23 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict const int kMaxQ = 8; - const int nbl = n/256; + const int nbl = n/QK_K; - block_iq3_xxs * y = vy; + ggml_fp16_t * dh; + uint8_t * qs; + int block_size; + if (grid_size == 256) { + block_iq3_xxs * y = vy; + dh = &y->d; + qs = y->qs; + block_size = sizeof(block_iq3_xxs); + } else { + block_iq3_s * y = vy; + dh = &y->d; + qs = y->qs; + block_size = sizeof(block_iq3_s); + } + int quant_size = block_size - sizeof(ggml_fp16_t); float scales[QK_K/32]; float weight[32]; @@ -10439,20 +10799,21 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict bool is_on_grid[8]; bool is_on_grid_aux[8]; uint8_t block_signs[8]; - uint8_t q3[3*(QK_K/8)]; + uint8_t q3[3*(QK_K/8)+QK_K/32]; uint32_t * scales_and_signs = (uint32_t *)(q3 + QK_K/4); + uint8_t * qh = q3 + 3*(QK_K/8); for (int ibl = 0; ibl < nbl; ++ibl) { - y[ibl].d = GGML_FP32_TO_FP16(0.f); - memset(q3, 0, 3*QK_K/8); + dh[0] = GGML_FP32_TO_FP16(0.f); + memset(q3, 0, 3*QK_K/8+QK_K/32); float max_scale = 0; const float * xbl = x + QK_K*ibl; float sumx2 = 0; for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; - float sigma2 = sumx2/QK_K; + float sigma2 = 2*sumx2/QK_K; for (int ib = 0; ib < QK_K/32; ++ib) { const float * xb = xbl + 32*ib; @@ -10570,7 +10931,13 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict printf("\n"); GGML_ASSERT(false); } - q3[8*ib+k] = grid_index; + if (grid_size == 256) { + q3[8*ib+k] = grid_index; + } else { + q3[8*ib+k] = grid_index & 255; + qh[ib] |= ((grid_index >> 8) << k); + } + } scales_and_signs[ib] = block_signs[0] | (block_signs[1] << 7) | (block_signs[2] << 14) | (block_signs[3] << 21); GGML_ASSERT(scale >= 0); @@ -10579,63 +10946,25 @@ static void quantize_row_iq3_xxs_impl(const float * restrict x, void * restrict } if (!max_scale) { - memset(y[ibl].qs, 0, 3*QK_K/8); + memset(qs, 0, quant_size); + dh += block_size/sizeof(ggml_fp16_t); + qs += block_size; continue; } float d = max_scale/31; - y[ibl].d = GGML_FP32_TO_FP16(d); + dh[0] = GGML_FP32_TO_FP16(d * 1.0125f); // small improvement via this fudge factor float id = 1/d; - float sumqx = 0, sumq2 = 0; for (int ib = 0; ib < QK_K/32; ++ib) { int l = nearest_int(0.5f*(id*scales[ib]-1)); l = MAX(0, MIN(15, l)); scales_and_signs[ib] |= ((uint32_t)l << 28); - if (false) { - const float * xb = xbl + 32*ib; - if (quant_weights) { - const float * qw = quant_weights + QK_K*ibl + 32*ib; - for (int i = 0; i < 32; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); - } else { - for (int i = 0; i < 32; ++i) weight[i] = xb[i]*xb[i]; - } - const float db = 0.25f * d * (1 + 2*l); - for (int k = 0; k < 8; ++k) { - const int8_t * signs = keven_signs_q2xs + 8*((scales_and_signs[ib] >> 7*(k/2)) & 127) + 4*(k%2); - const float * xk = xb + 4*k; - const float * wk = weight + 4*k; - //const uint8_t * grid = (const uint8_t *)(kgrid_q3xs + q3[8*ib+k]); - const uint8_t * grid = (const uint8_t *)(iq3xxs_grid + q3[8*ib+k]); - float best_mse = 0; int best_index = q3[8*ib+k]; - for (int j = 0; j < 4; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - best_mse += wk[j] * diff * diff; - } - for (int idx = 0; idx < 256; ++idx) { - //grid = (const uint8_t *)(kgrid_q3xs + idx); - grid = (const uint8_t *)(iq3xxs_grid + idx); - float mse = 0; - for (int j = 0; j < 4; ++j) { - float diff = db * grid[j] * signs[j] - xk[j]; - mse += wk[j] * diff * diff; - } - if (mse < best_mse) { - best_mse = mse; best_index = idx; - } - } - q3[8*ib+k] = best_index; - //grid = (const uint8_t *)(kgrid_q3xs + best_index); - grid = (const uint8_t *)(iq3xxs_grid + best_index); - for (int j = 0; j < 4; ++j) { - float q = db * grid[j] * signs[j]; - sumqx += wk[j] * q * xk[j]; - sumq2 += wk[j] * q * q; - } - } - if (sumq2 > 0) y[ibl].d = GGML_FP32_TO_FP16(d*sumqx/sumq2); - } } - memcpy(y[ibl].qs, q3, 3*QK_K/8); + memcpy(qs, q3, quant_size); + + dh += block_size/sizeof(ggml_fp16_t); + qs += block_size; + } } @@ -10645,7 +10974,7 @@ size_t quantize_iq3_xxs(const float * src, void * dst, int nrow, int n_per_row, int nblock = n_per_row/QK_K; char * qrow = (char *)dst; for (int row = 0; row < nrow; ++row) { - quantize_row_iq3_xxs_impl(src, qrow, n_per_row, quant_weights); + quantize_row_iq3_xxs_impl(256, src, qrow, n_per_row, quant_weights); src += n_per_row; qrow += nblock*sizeof(block_iq3_xxs); } @@ -10660,9 +10989,226 @@ void quantize_row_iq3_xxs(const float * restrict x, void * restrict vy, int k) { void quantize_row_iq3_xxs_reference(const float * restrict x, block_iq3_xxs * restrict y, int k) { assert(k % QK_K == 0); - quantize_row_iq3_xxs_impl(x, y, k, NULL); + quantize_row_iq3_xxs_impl(256, x, y, k, NULL); } +static void quantize_row_iq3_s_impl(int block_size, const float * restrict x, void * restrict vy, int n, + const float * restrict quant_weights, + float * scales, + float * weight, + float * xval, + int8_t * L, + int8_t * Laux, + float * waux, + bool * is_on_grid, + bool * is_on_grid_aux, + uint8_t * block_signs) { + + const int gindex = iq3_data_index(512); + + const uint32_t * kgrid_q3xs = iq3_data[gindex].grid; + const int * kmap_q3xs = iq3_data[gindex].map; + const uint16_t * kneighbors_q3xs = iq3_data[gindex].neighbours; + + //GGML_ASSERT(quant_weights && "missing quantization weights"); + GGML_ASSERT(kgrid_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kmap_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(kneighbors_q3xs && "forgot to call ggml_quantize_init()?"); + GGML_ASSERT(n%QK_K == 0); + + const int kMaxQ = 8; + + const int nbl = n/QK_K; + + block_iq3_s * y = vy; + + const int bs4 = block_size/4; + const int bs8 = block_size/8; + + for (int ibl = 0; ibl < nbl; ++ibl) { + + memset(&y[ibl], 0, sizeof(block_iq3_s)); + y[ibl].d = GGML_FP32_TO_FP16(0.f); + + uint8_t * qs = y[ibl].qs; + uint8_t * qh = y[ibl].qh; + uint8_t * signs = y[ibl].signs; + + float max_scale = 0; + + const float * xbl = x + QK_K*ibl; + float sumx2 = 0; + for (int i = 0; i < QK_K; ++i) sumx2 += xbl[i]*xbl[i]; + float sigma2 = 2*sumx2/QK_K; + + for (int ib = 0; ib < QK_K/block_size; ++ib) { + const float * xb = xbl + block_size*ib; + if (quant_weights) { + const float * qw = quant_weights + QK_K*ibl + block_size*ib; + for (int i = 0; i < block_size; ++i) weight[i] = qw[i] * sqrtf(sigma2 + xb[i]*xb[i]); + } else { + for (int i = 0; i < block_size; ++i) weight[i] = xb[i]*xb[i]; + } + for (int i = 0; i < block_size; ++i) waux[i] = sqrtf(weight[i]); + for (int k = 0; k < bs8; ++k) { + uint8_t s = 0; + for (int i = 0; i < 8; ++i) { + if (xb[8*k + i] >= 0) xval[8*k + i] = xb[8*k + i]; + else { + xval[8*k + i] = -xb[8*k + i]; s |= (1 << i); + } + } + block_signs[k] = s; + } + float max = xval[0]; + for (int i = 1; i < block_size; ++i) max = MAX(max, xval[i]); + if (!max) { + scales[ib] = 0; + continue; + } + float best = 0; + float scale = max/(2*kMaxQ-1); + for (int is = -15; is <= 15; ++is) { + float id = (2*kMaxQ-1+is*0.2f)/max; + float this_scale = 1/id; + for (int k = 0; k < bs4; ++k) { + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + Laux[4*k+i] = MAX(0, MIN(kMaxQ-1, l)); + } + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (Laux[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + is_on_grid_aux[k] = true; + if (grid_index < 0) { + is_on_grid_aux[k] = false; + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, this_scale, Laux + 4*k); + } + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < block_size; ++i) { + float w = weight[i]; + float q = 2*Laux[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0 && sumqx*sumqx > best*sumq2) { + scale = sumqx/sumq2; best = scale*sumqx; + for (int i = 0; i < block_size; ++i) L[i] = Laux[i]; + for (int k = 0; k < bs4; ++k) is_on_grid[k] = is_on_grid_aux[k]; + } + } + int n_not_ongrid = 0; + for (int k = 0; k < bs4; ++k) if (!is_on_grid[k]) ++n_not_ongrid; + if (n_not_ongrid > 0 && scale > 0) { + float id = 1/scale; + for (int k = 0; k < bs4; ++k) { + if (is_on_grid[k]) continue; + uint16_t u = 0; + for (int i = 0; i < 4; ++i) { + int l = nearest_int(0.5f*(id*xval[4*k+i]-1)); + l = MAX(0, MIN(kMaxQ-1, l)); + u |= (l << 3*i); + } + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + const uint16_t * neighbours = kneighbors_q3xs - kmap_q3xs[u] - 1; + grid_index = iq3_find_best_neighbour(neighbours, kgrid_q3xs, xval + 4*k, waux + 4*k, scale, L + 4*k); + } + const int8_t * pg = (const int8_t *)(kgrid_q3xs + grid_index); + for (int i = 0; i < 4; ++i) L[4*k+i] = (pg[i] - 1)/2; + } + float sumqx = 0, sumq2 = 0; + for (int i = 0; i < block_size; ++i) { + float w = weight[i]; + float q = 2*L[i] + 1; + sumqx += w*xval[i]*q; + sumq2 += w*q*q; + } + if (sumq2 > 0) scale = sumqx/sumq2; + } + if (scale < 0) { + // This should never happen, but just in case, flip scale so that it is positive (we use uint's to encode the scale) + // and correspondingly flip quant signs. + scale = -scale; + for (int k = 0; k < bs8; ++k) block_signs[k] = ~block_signs[k]; + } + for (int k = 0; k < bs4; ++k) { + uint16_t u = 0; + for (int i = 0; i < 4; ++i) u |= (L[4*k+i] << 3*i); + int grid_index = kmap_q3xs[u]; + if (grid_index < 0) { + printf("Oops: found point %u not on grid:", u); + for (int i = 0; i < 4; ++i) printf(" %d", L[4*k+i]); + printf("\n"); + GGML_ASSERT(false); + } + qs[k] = grid_index & 255; + qh[(ib*bs4+k)/8] |= ((grid_index >> 8) << ((ib*bs4+k)%8)); + } + qs += bs4; + for (int k = 0; k < bs8; ++k) signs[k] = block_signs[k]; + signs += bs8; + GGML_ASSERT(scale >= 0); + scales[ib] = scale; + max_scale = MAX(max_scale, scale); + } + + if (!max_scale) { + continue; + } + + float d = max_scale/31; + y[ibl].d = GGML_FP32_TO_FP16(d); + float id = 1/d; + for (int ib = 0; ib < QK_K/block_size; ib += 2) { + int l1 = nearest_int(0.5f*(id*scales[ib+0]-1)); + l1 = MAX(0, MIN(15, l1)); + int l2 = nearest_int(0.5f*(id*scales[ib+1]-1)); + l2 = MAX(0, MIN(15, l2)); + y[ibl].scales[ib/2] = l1 | (l2 << 4); + } + + } +} + +#define IQ3S_BLOCK_SIZE 32 +size_t quantize_iq3_s(const float * src, void * dst, int nrow, int n_per_row, int64_t * hist, const float * quant_weights) { + (void)hist; + GGML_ASSERT(n_per_row%QK_K == 0); + int nblock = n_per_row/QK_K; + float scales[QK_K/IQ3S_BLOCK_SIZE]; + float weight[IQ3S_BLOCK_SIZE]; + float xval[IQ3S_BLOCK_SIZE]; + int8_t L[IQ3S_BLOCK_SIZE]; + int8_t Laux[IQ3S_BLOCK_SIZE]; + float waux[IQ3S_BLOCK_SIZE]; + bool is_on_grid[IQ3S_BLOCK_SIZE/4]; + bool is_on_grid_aux[IQ3S_BLOCK_SIZE/4]; + uint8_t block_signs[IQ3S_BLOCK_SIZE/8]; + char * qrow = (char *)dst; + for (int row = 0; row < nrow; ++row) { + quantize_row_iq3_s_impl(IQ3S_BLOCK_SIZE, src, qrow, n_per_row, quant_weights, + scales, weight, xval, L, Laux, waux, is_on_grid, is_on_grid_aux, block_signs); + src += n_per_row; + qrow += nblock*sizeof(block_iq3_s); + } + return nrow * nblock * sizeof(block_iq3_s); +} + +void quantize_row_iq3_s(const float * restrict x, void * restrict vy, int k) { + assert(k % QK_K == 0); + block_iq3_s * restrict y = vy; + quantize_row_iq3_s_reference(x, y, k); +} + +void quantize_row_iq3_s_reference(const float * restrict x, block_iq3_s * restrict y, int k) { + assert(k % QK_K == 0); + quantize_iq3_s(x, y, 1, k, NULL, NULL); +} + + // =================================== 1.5 bpw =================================================== static int iq1_find_best_neighbour(const uint16_t * restrict neighbours, const uint64_t * restrict grid, diff --git a/ggml-quants.h b/ggml-quants.h index 113623b62..303b0b6f9 100644 --- a/ggml-quants.h +++ b/ggml-quants.h @@ -191,6 +191,21 @@ typedef struct { } block_iq3_xxs; static_assert(sizeof(block_iq3_xxs) == sizeof(ggml_fp16_t) + 3*(QK_K/8), "wrong iq3_xxs block size/padding"); +// 3.4375 bpw +#if QK_K == 64 +#define IQ3S_N_SCALE 2 +#else +#define IQ3S_N_SCALE QK_K/64 +#endif +typedef struct { + ggml_fp16_t d; + uint8_t qs[QK_K/4]; + uint8_t qh[QK_K/32]; + uint8_t signs[QK_K/8]; + uint8_t scales[IQ3S_N_SCALE]; +} block_iq3_s; +static_assert(sizeof(block_iq3_s) == sizeof(ggml_fp16_t) + 13*(QK_K/32) + IQ3S_N_SCALE, "wrong iq3_s block size/padding"); + typedef struct { ggml_fp16_t d; uint8_t qs[QK_K/8]; @@ -226,6 +241,7 @@ void quantize_row_q6_K_reference(const float * GGML_RESTRICT x, block_q6_K * GGM void quantize_row_q8_K_reference(const float * GGML_RESTRICT x, block_q8_K * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs_reference(const float * GGML_RESTRICT x, block_iq3_xxs * GGML_RESTRICT y, int k); void quantize_row_iq4_nl_reference (const float * GGML_RESTRICT x, block_iq4_nl * GGML_RESTRICT y, int k); +void quantize_row_iq3_s_reference (const float * GGML_RESTRICT x, block_iq3_s * GGML_RESTRICT y, int k); void quantize_row_q4_0(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_q4_1(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); @@ -242,6 +258,7 @@ void quantize_row_q6_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, in void quantize_row_q8_K(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq3_xxs(const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); void quantize_row_iq4_nl (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); +void quantize_row_iq3_s (const float * GGML_RESTRICT x, void * GGML_RESTRICT y, int k); // Dequantization void dequantize_row_q4_0(const block_q4_0 * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); @@ -262,6 +279,7 @@ void dequantize_row_iq2_xs (const block_iq2_xs * GGML_RESTRICT x, float * GGML_ void dequantize_row_iq3_xxs(const block_iq3_xxs * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq1_s (const block_iq1_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); void dequantize_row_iq4_nl (const block_iq4_nl * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); +void dequantize_row_iq3_s (const block_iq3_s * GGML_RESTRICT x, float * GGML_RESTRICT y, int k); // Dot product void ggml_vec_dot_q4_0_q8_0(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); @@ -280,6 +298,7 @@ void ggml_vec_dot_iq2_xs_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void ggml_vec_dot_iq3_xxs_q8_K(int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq1_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); void ggml_vec_dot_iq4_nl_q8_0 (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); +void ggml_vec_dot_iq3_s_q8_K (int n, float * GGML_RESTRICT s, size_t bs, const void * GGML_RESTRICT vx, size_t bx, const void * GGML_RESTRICT vy, size_t by, int nrc); // // Quantization utilizing an importance matrix (a.k.a. "Activation aWare Quantization") @@ -289,6 +308,7 @@ size_t quantize_iq2_xs (const float * src, void * dst, int nrows, int n_per_row, size_t quantize_iq3_xxs(const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq1_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_iq4_nl (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); +size_t quantize_iq3_s (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q2_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q3_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); size_t quantize_q4_K (const float * src, void * dst, int nrows, int n_per_row, int64_t * hist, const float * imatrix); diff --git a/ggml.c b/ggml.c index d710fe702..c09a3cad6 100644 --- a/ggml.c +++ b/ggml.c @@ -678,6 +678,18 @@ static const ggml_type_traits_t type_traits[GGML_TYPE_COUNT] = { .vec_dot_type = GGML_TYPE_Q8_K, .nrows = 1, }, + [GGML_TYPE_IQ3_S] = { + .type_name = "iq3_s", + .blck_size = QK_K, + .type_size = sizeof(block_iq3_s), + .is_quantized = true, + .to_float = (ggml_to_float_t) dequantize_row_iq3_s, + .from_float = quantize_row_iq3_s, + .from_float_reference = (ggml_from_float_t)quantize_row_iq3_s_reference, + .vec_dot = ggml_vec_dot_iq3_s_q8_K, + .vec_dot_type = GGML_TYPE_Q8_K, + .nrows = 1, + }, [GGML_TYPE_IQ1_S] = { .type_name = "iq1_s", .blck_size = QK_K, @@ -2304,6 +2316,7 @@ enum ggml_type ggml_ftype_to_ggml_type(enum ggml_ftype ftype) { case GGML_FTYPE_MOSTLY_IQ3_XXS: wtype = GGML_TYPE_IQ3_XXS; break; case GGML_FTYPE_MOSTLY_IQ1_S: wtype = GGML_TYPE_IQ1_S; break; case GGML_FTYPE_MOSTLY_IQ4_NL: wtype = GGML_TYPE_IQ4_NL; break; + case GGML_FTYPE_MOSTLY_IQ3_S: wtype = GGML_TYPE_IQ3_S; break; case GGML_FTYPE_UNKNOWN: wtype = GGML_TYPE_COUNT; break; case GGML_FTYPE_MOSTLY_Q4_1_SOME_F16: wtype = GGML_TYPE_COUNT; break; } @@ -7738,6 +7751,7 @@ static void ggml_compute_forward_add( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_add_q_f32(params, dst); } break; @@ -8017,6 +8031,7 @@ static void ggml_compute_forward_add1( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_add1_q_f32(params, dst); } break; @@ -8141,6 +8156,7 @@ static void ggml_compute_forward_acc( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: default: { GGML_ASSERT(false); @@ -11039,6 +11055,7 @@ static void ggml_compute_forward_out_prod( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_out_prod_q_f32(params, dst); } break; @@ -11227,6 +11244,7 @@ static void ggml_compute_forward_set( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: default: { GGML_ASSERT(false); @@ -11429,6 +11447,7 @@ static void ggml_compute_forward_get_rows( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: { ggml_compute_forward_get_rows_q(params, dst); } break; @@ -12129,6 +12148,7 @@ static void ggml_compute_forward_alibi( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -12212,6 +12232,7 @@ static void ggml_compute_forward_clamp( case GGML_TYPE_IQ3_XXS: case GGML_TYPE_IQ1_S: case GGML_TYPE_IQ4_NL: + case GGML_TYPE_IQ3_S: case GGML_TYPE_Q8_K: case GGML_TYPE_I8: case GGML_TYPE_I16: @@ -19463,6 +19484,7 @@ void ggml_quantize_init(enum ggml_type type) { case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ1_S: iq2xs_init_impl(type); break; case GGML_TYPE_IQ3_XXS: iq3xs_init_impl(256); break; + case GGML_TYPE_IQ3_S: iq3xs_init_impl(512); break; default: // nothing break; } @@ -19737,6 +19759,15 @@ size_t ggml_quantize_chunk(enum ggml_type type, const float * src, void * dst, i result = quantize_iq3_xxs(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); GGML_ASSERT(result == row_size * nrows); } break; + case GGML_TYPE_IQ3_S: + { + GGML_ASSERT(start % QK_K == 0); + GGML_ASSERT(start % n_per_row == 0); + size_t start_row = start / n_per_row; + size_t row_size = ggml_row_size(type, n_per_row); + result = quantize_iq3_s(src + start, (char *)dst + start_row * row_size, nrows, n_per_row, hist, imatrix); + GGML_ASSERT(result == row_size * nrows); + } break; case GGML_TYPE_IQ1_S: { GGML_ASSERT(start % QK_K == 0); diff --git a/ggml.h b/ggml.h index 37eff6279..a4166e1f7 100644 --- a/ggml.h +++ b/ggml.h @@ -350,6 +350,7 @@ extern "C" { GGML_TYPE_IQ3_XXS = 18, GGML_TYPE_IQ1_S = 19, GGML_TYPE_IQ4_NL = 20, + GGML_TYPE_IQ3_S = 21, GGML_TYPE_I8, GGML_TYPE_I16, GGML_TYPE_I32, @@ -389,6 +390,7 @@ extern "C" { GGML_FTYPE_MOSTLY_IQ3_XXS = 17, // except 1d tensors GGML_FTYPE_MOSTLY_IQ1_S = 18, // except 1d tensors GGML_FTYPE_MOSTLY_IQ4_NL = 19, // except 1d tensors + GGML_FTYPE_MOSTLY_IQ3_S = 20, // except 1d tensors }; // available tensor operations: diff --git a/llama.cpp b/llama.cpp index 37477e6ef..1f6b6cff4 100644 --- a/llama.cpp +++ b/llama.cpp @@ -2545,6 +2545,7 @@ struct llama_model_loader { case GGML_TYPE_IQ3_XXS: ftype = LLAMA_FTYPE_MOSTLY_IQ3_XXS; break; case GGML_TYPE_IQ1_S: ftype = LLAMA_FTYPE_MOSTLY_IQ1_S; break; case GGML_TYPE_IQ4_NL: ftype = LLAMA_FTYPE_MOSTLY_IQ4_NL; break; + case GGML_TYPE_IQ3_S: ftype = LLAMA_FTYPE_MOSTLY_IQ3_S; break; default: { LLAMA_LOG_WARN("%s: unknown type %s\n", __func__, ggml_type_name(type_max)); @@ -2890,6 +2891,8 @@ static std::string llama_model_ftype_name(llama_ftype ftype) { case LLAMA_FTYPE_MOSTLY_IQ3_XXS:return "IQ3_XXS - 3.0625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ1_S :return "IQ1_S - 1.5625 bpw"; case LLAMA_FTYPE_MOSTLY_IQ4_NL: return "IQ4_NL - 4.5 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_S: return "IQ3_S - 3.4375 bpw"; + case LLAMA_FTYPE_MOSTLY_IQ3_M: return "IQ3_S mix - 3.66 bpw"; default: return "unknown, may not work"; } @@ -10544,6 +10547,12 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) { new_type = qs.model.hparams.n_gqa() >= 4 ? GGML_TYPE_Q4_K : !qs.has_imatrix ? GGML_TYPE_Q3_K : GGML_TYPE_IQ3_XXS; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_S && qs.model.hparams.n_gqa() >= 4) { + new_type = GGML_TYPE_Q4_K; + } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { + new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) { new_type = qs.i_attention_wv < 2 ? GGML_TYPE_Q5_K : GGML_TYPE_Q4_K; } @@ -10575,13 +10584,17 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty new_type = GGML_TYPE_Q8_0; } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { - new_type = GGML_TYPE_Q2_K; + new_type = GGML_TYPE_IQ3_XXS; + } + } else if (name.find("attn_q.weight") != std::string::npos) { + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { + new_type = GGML_TYPE_IQ3_XXS; } } else if (name.find("ffn_down") != std::string::npos) { auto info = layer_info(qs.i_ffn_down, qs.n_ffn_down, name.c_str()); int i_layer = info.first, n_layer = info.second; if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS) { + else if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K_S) { if (i_layer < n_layer/8) new_type = GGML_TYPE_Q4_K; } else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS && !qs.has_imatrix) { @@ -10592,6 +10605,10 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty : arch != LLM_ARCH_FALCON || use_more_bits(i_layer, n_layer) ? GGML_TYPE_Q4_K : GGML_TYPE_Q3_K; } + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M && (i_layer < n_layer/8 || + (qs.model.hparams.n_expert == 8 && use_more_bits(i_layer, n_layer)))) { + new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) { new_type = arch == LLM_ARCH_FALCON ? GGML_TYPE_Q4_K : GGML_TYPE_Q5_K; } @@ -10623,37 +10640,41 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (qs.model.hparams.n_expert == 8) { if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS || ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ4_NL || - ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) { + ftype == LLAMA_FTYPE_MOSTLY_Q4_K_S || ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M || ftype == LLAMA_FTYPE_MOSTLY_IQ3_S || + ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { new_type = GGML_TYPE_Q5_K; } } else { - if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q2_K ) new_type = GGML_TYPE_Q3_K; else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_XXS) new_type = GGML_TYPE_Q3_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M) new_type = GGML_TYPE_Q4_K; - else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q5_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M ) new_type = GGML_TYPE_Q4_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L ) new_type = GGML_TYPE_Q5_K; + else if (ftype == LLAMA_FTYPE_MOSTLY_IQ3_M ) new_type = GGML_TYPE_Q4_K; } } else { if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q4_K; } } else if (name.find("attn_qkv.weight") != std::string::npos) { - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L) new_type = GGML_TYPE_Q4_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_M || ftype == LLAMA_FTYPE_MOSTLY_Q3_K_L || ftype == LLAMA_FTYPE_MOSTLY_IQ3_M) { + new_type = GGML_TYPE_Q4_K; + } else if (ftype == LLAMA_FTYPE_MOSTLY_Q4_K_M) new_type = GGML_TYPE_Q5_K; else if (ftype == LLAMA_FTYPE_MOSTLY_Q5_K_M) new_type = GGML_TYPE_Q6_K; } else if (name.find("ffn_gate") != std::string::npos) { auto info = layer_info(qs.i_ffn_gate, qs.n_ffn_gate, name.c_str()); int i_layer = info.first, n_layer = info.second; - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && !use_more_bits(i_layer, n_layer)) { - new_type = GGML_TYPE_Q2_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { + new_type = GGML_TYPE_IQ3_XXS; } ++qs.i_ffn_gate; } else if (name.find("ffn_up") != std::string::npos) { auto info = layer_info(qs.i_ffn_up, qs.n_ffn_up, name.c_str()); int i_layer = info.first, n_layer = info.second; - if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && !use_more_bits(i_layer, n_layer)) { - new_type = GGML_TYPE_Q2_K; + if (ftype == LLAMA_FTYPE_MOSTLY_Q3_K_XS && (i_layer >= n_layer/8 && i_layer < 7*n_layer/8)) { + new_type = GGML_TYPE_IQ3_XXS; } ++qs.i_ffn_up; } @@ -10673,7 +10694,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty if (new_type == GGML_TYPE_Q2_K || new_type == GGML_TYPE_Q3_K || new_type == GGML_TYPE_Q4_K || new_type == GGML_TYPE_Q5_K || new_type == GGML_TYPE_Q6_K || new_type == GGML_TYPE_IQ2_XS || new_type == GGML_TYPE_IQ2_XXS || - new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S) { + new_type == GGML_TYPE_IQ3_XXS || ftype == LLAMA_FTYPE_MOSTLY_IQ1_S || new_type == GGML_TYPE_IQ3_S) { int nx = tensor->ne[0]; int ny = tensor->ne[1]; if (nx % QK_K != 0) { @@ -10688,6 +10709,7 @@ static ggml_type get_k_quant_type(quantize_state_internal & qs, ggml_type new_ty case GGML_TYPE_IQ2_XXS: case GGML_TYPE_IQ2_XS: case GGML_TYPE_IQ3_XXS: + case GGML_TYPE_IQ3_S: case GGML_TYPE_IQ1_S: case GGML_TYPE_Q2_K: case GGML_TYPE_Q3_K: new_type = GGML_TYPE_IQ4_NL; break; @@ -10719,7 +10741,7 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s // K-quants case LLAMA_FTYPE_MOSTLY_Q2_K_S: case LLAMA_FTYPE_MOSTLY_Q2_K: quantized_type = GGML_TYPE_Q2_K; break; - case LLAMA_FTYPE_MOSTLY_Q3_K_XS: + case LLAMA_FTYPE_MOSTLY_Q3_K_XS: quantized_type = GGML_TYPE_IQ3_S; break; case LLAMA_FTYPE_MOSTLY_Q3_K_S: case LLAMA_FTYPE_MOSTLY_Q3_K_M: case LLAMA_FTYPE_MOSTLY_Q3_K_L: quantized_type = GGML_TYPE_Q3_K; break; @@ -10733,6 +10755,8 @@ static void llama_model_quantize_internal(const std::string & fname_inp, const s case LLAMA_FTYPE_MOSTLY_IQ3_XXS: quantized_type = GGML_TYPE_IQ3_XXS; break; case LLAMA_FTYPE_MOSTLY_IQ1_S: quantized_type = GGML_TYPE_IQ1_S; break; case LLAMA_FTYPE_MOSTLY_IQ4_NL: quantized_type = GGML_TYPE_IQ4_NL; break; + case LLAMA_FTYPE_MOSTLY_IQ3_S: quantized_type = GGML_TYPE_IQ3_S; break; + case LLAMA_FTYPE_MOSTLY_IQ3_M: quantized_type = GGML_TYPE_IQ3_S; break; default: throw std::runtime_error(format("invalid output file type %d\n", ftype)); } diff --git a/llama.h b/llama.h index 84f196b3b..889edf4d9 100644 --- a/llama.h +++ b/llama.h @@ -102,6 +102,8 @@ extern "C" { LLAMA_FTYPE_MOSTLY_IQ3_XXS = 23, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ1_S = 24, // except 1d tensors LLAMA_FTYPE_MOSTLY_IQ4_NL = 25, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ3_S = 26, // except 1d tensors + LLAMA_FTYPE_MOSTLY_IQ3_M = 27, // except 1d tensors LLAMA_FTYPE_GUESSED = 1024, // not specified in the model file }; diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index 55db42bf6..f8574588b 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1918,7 +1918,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op GGML_TYPE_Q6_K, GGML_TYPE_IQ2_XXS, GGML_TYPE_IQ2_XS, GGML_TYPE_IQ3_XXS, GGML_TYPE_IQ1_S, - GGML_TYPE_IQ4_NL, + GGML_TYPE_IQ4_NL, GGML_TYPE_IQ3_S, }; // unary ops diff --git a/tests/test-quantize-fns.cpp b/tests/test-quantize-fns.cpp index 5e92d5742..04656bb9e 100644 --- a/tests/test-quantize-fns.cpp +++ b/tests/test-quantize-fns.cpp @@ -151,6 +151,7 @@ int main(int argc, char * argv[]) { const float max_quantization_error = type == GGML_TYPE_Q2_K ? MAX_QUANTIZATION_TOTAL_ERROR_2BITS : type == GGML_TYPE_Q3_K ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS : + type == GGML_TYPE_IQ3_S ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS : type == GGML_TYPE_IQ3_XXS ? MAX_QUANTIZATION_TOTAL_ERROR_3BITS_XXS : MAX_QUANTIZATION_TOTAL_ERROR; failed = !(total_error < max_quantization_error); num_failed += failed; @@ -167,7 +168,8 @@ int main(int argc, char * argv[]) { const float vec_dot_error = dot_product_error(qfns, test_size, test_data.data(), test_data2.data()); const float max_allowed_error = type == GGML_TYPE_Q2_K || type == GGML_TYPE_IQ2_XS || type == GGML_TYPE_IQ2_XXS || - type == GGML_TYPE_IQ3_XXS ? MAX_DOT_PRODUCT_ERROR_LOWBIT : MAX_DOT_PRODUCT_ERROR; + type == GGML_TYPE_IQ3_XXS || type == GGML_TYPE_IQ3_S ? MAX_DOT_PRODUCT_ERROR_LOWBIT + : MAX_DOT_PRODUCT_ERROR; failed = !(vec_dot_error < max_allowed_error); num_failed += failed; if (failed || verbose) { From 9e359a4f47c1b2dceb99e29706c9f7403d32ab5e Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sat, 24 Feb 2024 19:16:04 +0100 Subject: [PATCH 854/859] server: continue to update other slots on embedding concurrent request (#5699) * server: #5655 - continue to update other slots on embedding concurrent request. * server: tests: add multi users embeddings as fixed * server: tests: adding OAI compatible embedding concurrent endpoint * server: tests: adding OAI compatible embedding with multiple inputs --- examples/server/server.cpp | 2 +- examples/server/tests/features/issues.feature | 34 +--- .../server/tests/features/parallel.feature | 46 ++++++ examples/server/tests/features/server.feature | 13 ++ examples/server/tests/features/steps/steps.py | 151 +++++++++++++----- 5 files changed, 168 insertions(+), 78 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 9fb436c2a..19a8c1067 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -1836,7 +1836,7 @@ struct llama_server_context send_embedding(slot); slot.release(); slot.i_batch = -1; - return true; + continue; } completion_token_output result; diff --git a/examples/server/tests/features/issues.feature b/examples/server/tests/features/issues.feature index 542006d9a..bf5a175a3 100644 --- a/examples/server/tests/features/issues.feature +++ b/examples/server/tests/features/issues.feature @@ -1,36 +1,4 @@ # List of ongoing issues @bug Feature: Issues - # Issue #5655 - Scenario: Multi users embeddings - Given a server listening on localhost:8080 - And a model file stories260K.gguf - And a model alias tinyllama-2 - And 42 as server seed - And 64 KV cache size - And 2 slots - And continuous batching - And embeddings extraction - Then the server is starting - Then the server is healthy - - Given a prompt: - """ - Write a very long story about AI. - """ - And a prompt: - """ - Write another very long music lyrics. - """ - And a prompt: - """ - Write a very long poem. - """ - And a prompt: - """ - Write a very long joke. - """ - Given concurrent embedding requests - Then the server is busy - Then the server is idle - Then all embeddings are generated + # No confirmed issue at the moment diff --git a/examples/server/tests/features/parallel.feature b/examples/server/tests/features/parallel.feature index 802d624ff..c85f9de1d 100644 --- a/examples/server/tests/features/parallel.feature +++ b/examples/server/tests/features/parallel.feature @@ -8,6 +8,7 @@ Feature: Parallel And 42 as server seed And 64 KV cache size And 2 slots + And embeddings extraction And continuous batching Then the server is starting Then the server is healthy @@ -75,3 +76,48 @@ Feature: Parallel Then the server is busy Then the server is idle Then all prompts are predicted + + Scenario: Multi users embeddings + Given a prompt: + """ + Write a very long story about AI. + """ + And a prompt: + """ + Write another very long music lyrics. + """ + And a prompt: + """ + Write a very long poem. + """ + And a prompt: + """ + Write a very long joke. + """ + Given concurrent embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated + + Scenario: Multi users OAI compatibility embeddings + Given a prompt: + """ + In which country Paris is located ? + """ + And a prompt: + """ + Is Madrid the capital of Spain ? + """ + And a prompt: + """ + What is the biggest US city ? + """ + And a prompt: + """ + What is the capital of Bulgaria ? + """ + And a model tinyllama-2 + Given concurrent OAI embedding requests + Then the server is busy + Then the server is idle + Then all embeddings are generated diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index fedcfe5ae..5f81d256a 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -60,6 +60,19 @@ Feature: llama.cpp server """ Then embeddings are generated + Scenario: OAI Embeddings compatibility with multiple inputs + Given a model tinyllama-2 + Given a prompt: + """ + In which country Paris is located ? + """ + And a prompt: + """ + Is Madrid the capital of Spain ? + """ + When an OAI compatible embeddings computation request for multiple inputs + Then embeddings are generated + Scenario: Tokenize / Detokenize When tokenizing: diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 50f2b641e..9c825fdbc 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -1,4 +1,5 @@ import asyncio +import collections import json import os import re @@ -261,35 +262,35 @@ def step_a_prompt_prompt(context, prompt): @step(u'concurrent completion requests') @async_run_until_complete() async def step_concurrent_completion_requests(context): - await concurrent_completion_requests(context, - request_completion, - # prompt is inserted automatically - context.base_url, - debug=context.debug, - n_predict=context.n_predict if hasattr(context, 'n_predict') else None, - server_seed=context.server_seed if hasattr(context, 'server_seed') else None, - user_api_key=context.user_api_key if hasattr(context, - 'user_api_key') else None) + await concurrent_requests(context, + request_completion, + # prompt is inserted automatically + context.base_url, + debug=context.debug, + n_predict=context.n_predict if hasattr(context, 'n_predict') else None, + server_seed=context.server_seed if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key if hasattr(context, + 'user_api_key') else None) @step(u'concurrent OAI completions requests') @async_run_until_complete async def step_oai_chat_completions(context): - await concurrent_completion_requests(context, oai_chat_completions, - # user_prompt is inserted automatically - context.system_prompt, - context.base_url, - True, # async_client - model=context.model - if hasattr(context, 'model') else None, - n_predict=context.n_predict - if hasattr(context, 'n_predict') else None, - enable_streaming=context.enable_streaming - if hasattr(context, 'enable_streaming') else None, - server_seed=context.server_seed - if hasattr(context, 'server_seed') else None, - user_api_key=context.user_api_key - if hasattr(context, 'user_api_key') else None) + await concurrent_requests(context, oai_chat_completions, + # user_prompt is inserted automatically + context.system_prompt, + context.base_url, + True, # async_client + model=context.model + if hasattr(context, 'model') else None, + n_predict=context.n_predict + if hasattr(context, 'n_predict') else None, + enable_streaming=context.enable_streaming + if hasattr(context, 'enable_streaming') else None, + server_seed=context.server_seed + if hasattr(context, 'server_seed') else None, + user_api_key=context.user_api_key + if hasattr(context, 'user_api_key') else None) @step(u'all prompts are predicted') @@ -316,36 +317,58 @@ async def all_prompts_are_predicted(context, expected_predicted_n=None): @step(u'embeddings are computed for') @async_run_until_complete async def step_compute_embedding(context): - content = context.text - base_url = context.base_url - context.embeddings = await request_embedding(content, base_url) + context.embeddings = await request_embedding(context.text, base_url=context.base_url) @step(u'embeddings are generated') def step_assert_embeddings(context): - assert_embeddings(context.embeddings) + if len(context.prompts) == 0: + assert_embeddings(context.embeddings) + else: + assert len(context.embeddings) == len(context.prompts), (f"unexpected response:\n" + f"context.prompts={context.prompts}\n" + f"context.embeddings={context.embeddings}") + for embedding in context.embeddings: + context.prompts.pop() + assert_embeddings(embedding) @step(u'an OAI compatible embeddings computation request for') -def step_oai_compute_embedding(context): - openai.api_key = 'nope' # openai client always expects an api_keu - if context.user_api_key is not None: - openai.api_key = context.user_api_key - openai.api_base = f'{context.base_url}/v1' - embeddings = openai.Embedding.create( - model=context.model, - input=context.text, - ) - context.embeddings = embeddings +@async_run_until_complete +async def step_oai_compute_embeddings(context): + context.embeddings = await request_oai_embeddings(context.text, + base_url=context.base_url, + user_api_key=context.user_api_key, + model=context.model) + + +@step(u'an OAI compatible embeddings computation request for multiple inputs') +@async_run_until_complete +async def step_oai_compute_embeddings_multiple_inputs(context): + context.embeddings = await request_oai_embeddings(context.prompts, + base_url=context.base_url, + user_api_key=context.user_api_key, + model=context.model) @step(u'concurrent embedding requests') @async_run_until_complete() async def step_concurrent_embedding_requests(context): - await concurrent_completion_requests(context, - request_embedding, - # prompt is inserted automatically - context.base_url) + await concurrent_requests(context, + request_embedding, + # prompt is inserted automatically + base_url=context.base_url) + + +@step(u'concurrent OAI embedding requests') +@async_run_until_complete() +async def step_concurrent_oai_embedding_requests(context): + await concurrent_requests(context, + request_oai_embeddings, + # prompt is inserted automatically + base_url=context.base_url, + async_client=True, + model=context.model) @step(u'all embeddings are generated') @@ -401,7 +424,7 @@ def step_check_options_header_value(context, cors_header, cors_header_value): assert context.options_response.headers[cors_header] == cors_header_value -async def concurrent_completion_requests(context, f_completion, *args, **kwargs): +async def concurrent_requests(context, f_completion, *args, **kwargs): n_prompts = len(context.prompts) if context.debug: print(f"starting {n_prompts} concurrent completion requests...") @@ -565,7 +588,7 @@ async def oai_chat_completions(user_prompt, return completion_response -async def request_embedding(content, base_url): +async def request_embedding(content, base_url=None): async with aiohttp.ClientSession() as session: async with session.post(f'{base_url}/embedding', json={ @@ -576,6 +599,46 @@ async def request_embedding(content, base_url): return response_json['embedding'] +async def request_oai_embeddings(input, + base_url=None, user_api_key=None, + model=None, async_client=False): + # openai client always expects an api_key + user_api_key = user_api_key if user_api_key is not None else 'nope' + if async_client: + origin = 'llama.cpp' + if user_api_key is not None: + headers = {'Authorization': f'Bearer {user_api_key}', 'Origin': origin} + async with aiohttp.ClientSession() as session: + async with session.post(f'{base_url}/v1/embeddings', + json={ + "input": input, + "model": model, + }, + headers=headers) as response: + assert response.status == 200, f"received status code not expected: {response.status}" + assert response.headers['Access-Control-Allow-Origin'] == origin + assert response.headers['Content-Type'] == "application/json; charset=utf-8" + response_json = await response.json() + assert response_json['model'] == model, f"invalid model received: {response_json['model']}" + assert response_json['object'] == 'list' + return response_json['data'] + else: + openai.api_key = user_api_key + openai.api_base = f'{base_url}/v1' + oai_embeddings = openai.Embedding.create( + model=model, + input=input, + ) + + if isinstance(input, collections.abc.Sequence): + embeddings = [] + for an_oai_embeddings in oai_embeddings.data: + embeddings.append(an_oai_embeddings.embedding) + else: + embeddings = oai_embeddings.data.embedding + return embeddings + + def assert_n_tokens_predicted(completion_response, expected_predicted_n=None, re_content=None): content = completion_response['content'] n_predicted = completion_response['timings']['predicted_n'] From 69917dfa55674c608360638bb4d6a12a315e2810 Mon Sep 17 00:00:00 2001 From: Anas Ahouzi <112881240+aahouzi@users.noreply.github.com> Date: Sun, 25 Feb 2024 10:54:04 +0100 Subject: [PATCH 855/859] py : fix StableLM conversion after config.json changes (#5703) * Fix issues during StableLM models conversion * Fix hard coded layer_norm_eps * Support layer_norm_eps for LlavaStableLM Co-authored-by: Jared Van Bortel * Add missing parenthesis Co-authored-by: Jared Van Bortel * Support rotary_factor for LlavaStableLM Co-authored-by: Jared Van Bortel * fix typo * Add StableLMEpochForCausalLM for safety Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> * Add StableLMEpochForCausalLM for safety 2 Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> --------- Co-authored-by: Jared Van Bortel Co-authored-by: Jared Van Bortel Co-authored-by: compilade <113953597+compilade@users.noreply.github.com> --- convert-hf-to-gguf.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/convert-hf-to-gguf.py b/convert-hf-to-gguf.py index 32d54b45f..ae30b2a76 100755 --- a/convert-hf-to-gguf.py +++ b/convert-hf-to-gguf.py @@ -192,7 +192,7 @@ class Model: return RefactModel if model_architecture == "PersimmonForCausalLM": return PersimmonModel - if model_architecture in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): + if model_architecture in ("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): return StableLMModel if model_architecture == "QWenLMHeadModel": return QwenModel @@ -253,7 +253,7 @@ class Model: return gguf.MODEL_ARCH.REFACT if arch == "PersimmonForCausalLM": return gguf.MODEL_ARCH.PERSIMMON - if arch in ("StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): + if arch in ("StableLmForCausalLM", "StableLMEpochForCausalLM", "LlavaStableLMEpochForCausalLM"): return gguf.MODEL_ARCH.STABLELM if arch == "QWenLMHeadModel": return gguf.MODEL_ARCH.QWEN @@ -1074,10 +1074,11 @@ class StableLMModel(Model): self.gguf_writer.add_embedding_length(hparams["hidden_size"]) self.gguf_writer.add_block_count(block_count) self.gguf_writer.add_feed_forward_length(hparams["intermediate_size"]) - self.gguf_writer.add_rope_dimension_count(int(hparams["rope_pct"] * (hparams["hidden_size"] // hparams["num_attention_heads"]))) + rotary_factor = self.find_hparam(["partial_rotary_factor", "rope_pct"]) + self.gguf_writer.add_rope_dimension_count(int(rotary_factor * (hparams["hidden_size"] // hparams["num_attention_heads"]))) self.gguf_writer.add_head_count(hparams["num_attention_heads"]) self.gguf_writer.add_parallel_residual(hparams["use_parallel_residual"] if "use_parallel_residual" in hparams else True) - self.gguf_writer.add_layer_norm_eps(1e-5) + self.gguf_writer.add_layer_norm_eps(self.find_hparam(["layer_norm_eps", "norm_eps"])) class MixtralModel(Model): From ab336a9d5e5352ecdcdf4c12d2d54cf4ef82ce31 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 25 Feb 2024 12:09:09 +0200 Subject: [PATCH 856/859] code : normalize enum names (#5697) * coda : normalize enum names ggml-ci * code : cont * code : cont --- common/common.cpp | 18 +- common/common.h | 4 +- common/train.cpp | 10 +- examples/baby-llama/baby-llama.cpp | 2 +- examples/finetune/finetune.cpp | 2 +- examples/llama-bench/llama-bench.cpp | 14 +- examples/llava/llava.cpp | 2 +- examples/server/server.cpp | 18 +- .../train-text-from-scratch.cpp | 2 +- ggml-cuda.cu | 138 +++---- ggml-metal.m | 4 +- ggml-opencl.cpp | 50 +-- ggml-sycl.cpp | 152 ++++---- ggml-vulkan.cpp | 102 ++--- ggml.c | 350 +++++++++--------- ggml.h | 38 +- llama.cpp | 64 ++-- llama.h | 28 +- tests/test-backend-ops.cpp | 4 +- tests/test-opt.cpp | 2 +- 20 files changed, 502 insertions(+), 502 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 10ef11829..ec596f5a0 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -295,9 +295,9 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { break; } std::string value(argv[i]); - /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE; } - else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR; } - else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN; } + /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_NONE; } + else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_LINEAR; } + else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_YARN; } else { invalid_param = true; break; } } else if (arg == "--rope-scale") { if (++i >= argc) { @@ -630,11 +630,11 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { } std::string arg_next = argv[i]; if (arg_next == "none") { - params.split_mode = LLAMA_SPLIT_NONE; + params.split_mode = LLAMA_SPLIT_MODE_NONE; } else if (arg_next == "layer") { - params.split_mode = LLAMA_SPLIT_LAYER; + params.split_mode = LLAMA_SPLIT_MODE_LAYER; } else if (arg_next == "row") { - params.split_mode = LLAMA_SPLIT_ROW; + params.split_mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; break; @@ -837,15 +837,15 @@ bool gpt_params_parse_ex(int argc, char ** argv, gpt_params & params) { sep++; if (strncmp(sep, "int:", 4) == 0) { sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; kvo.int_value = std::atol(sep); } else if (strncmp(sep, "float:", 6) == 0) { sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; kvo.float_value = std::atof(sep); } else if (strncmp(sep, "bool:", 5) == 0) { sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; if (std::strcmp(sep, "true") == 0) { kvo.bool_value = true; } else if (std::strcmp(sep, "false") == 0) { diff --git a/common/common.h b/common/common.h index 935771d44..3e21579b0 100644 --- a/common/common.h +++ b/common/common.h @@ -61,7 +61,7 @@ struct gpt_params { float p_split = 0.1f; // speculative decoding split probability int32_t n_gpu_layers = -1; // number of layers to store in VRAM (-1 - use default) int32_t n_gpu_layers_draft = -1; // number of layers to store in VRAM for the draft model (-1 - use default) - llama_split_mode split_mode = LLAMA_SPLIT_LAYER; // how to split the model across GPUs + llama_split_mode split_mode = LLAMA_SPLIT_MODE_LAYER; // how to split the model across GPUs int32_t main_gpu = 0; // the GPU that is used for scratch and small tensors float tensor_split[128] = {0}; // how split tensors should be distributed across GPUs int32_t n_beams = 0; // if non-zero then use beam search of given width. @@ -75,7 +75,7 @@ struct gpt_params { float yarn_beta_fast = 32.0f; // YaRN low correction dim float yarn_beta_slow = 1.0f; // YaRN high correction dim int32_t yarn_orig_ctx = 0; // YaRN original context length - int32_t rope_scaling_type = LLAMA_ROPE_SCALING_UNSPECIFIED; + int32_t rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; ggml_numa_strategy numa = GGML_NUMA_STRATEGY_DISABLED; // // sampling parameters diff --git a/common/train.cpp b/common/train.cpp index e4c3d5df6..0dbfd24df 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -31,7 +31,7 @@ struct train_state * init_train_state() { state->opt = new struct ggml_opt_context; state->opt->ctx = NULL; - state->opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + state->opt->params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); state->opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; state->opt->loss_after = 0.0f; @@ -556,7 +556,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g std::string opt_type; GGUF_GET_KEY(fctx, opt_type, gguf_get_val_str, GGUF_TYPE_STRING, true, LLM_KV_OPTIMIZER_TYPE); if (opt_type == LLM_KV_OPTIMIZER_TYPE_ADAM) { - opt->params.type = GGML_OPT_ADAM; + opt->params.type = GGML_OPT_TYPE_ADAM; GGUF_GET_KEY(fctx, opt->adam.fx_best, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, LLM_KV_OPTIMIZER_ADAM_BEST_LOSS); GGUF_GET_KEY(fctx, opt->adam.fx_prev, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, LLM_KV_OPTIMIZER_ADAM_PREVIOUS_LOSS); @@ -568,7 +568,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g copy_tensor_by_name(opt->adam.v, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_ADAM_SECOND_MOMENTS); copy_tensor_by_name(opt->adam.pf, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_ADAM_PAST_LOSS_VALUES); } else if (opt_type == LLM_KV_OPTIMIZER_TYPE_LBFGS) { - opt->params.type = GGML_OPT_LBFGS; + opt->params.type = GGML_OPT_TYPE_LBFGS; GGUF_GET_KEY(fctx, opt->params.lbfgs.m, gguf_get_val_u32, GGUF_TYPE_UINT32, true, LLM_KV_OPTIMIZER_LBFGS_APPROX_HESSIAN_COUNT); GGUF_GET_KEY(fctx, opt->lbfgs.fx_best, gguf_get_val_f32, GGUF_TYPE_FLOAT32, true, LLM_KV_OPTIMIZER_LBFGS_BEST_LOSS); @@ -603,7 +603,7 @@ void save_opt_context_gguf(struct gguf_context * fctx, struct ggml_opt_context * gguf_set_val_bool(fctx, LLM_KV_OPTIMIZER_JUST_INITIALIZED, opt->just_initialized); switch (opt->params.type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { gguf_set_val_str(fctx, LLM_KV_OPTIMIZER_TYPE, LLM_KV_OPTIMIZER_TYPE_ADAM); gguf_set_val_f32(fctx, LLM_KV_OPTIMIZER_ADAM_BEST_LOSS, opt->adam.fx_best); @@ -622,7 +622,7 @@ void save_opt_context_gguf(struct gguf_context * fctx, struct ggml_opt_context * gguf_add_tensor(fctx, opt->adam.pf); } } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { gguf_set_val_str(fctx, LLM_KV_OPTIMIZER_TYPE, LLM_KV_OPTIMIZER_TYPE_LBFGS); gguf_set_val_u32(fctx, LLM_KV_OPTIMIZER_LBFGS_APPROX_HESSIAN_COUNT, opt->params.lbfgs.m); diff --git a/examples/baby-llama/baby-llama.cpp b/examples/baby-llama/baby-llama.cpp index 65bb238a0..bf0125e75 100644 --- a/examples/baby-llama/baby-llama.cpp +++ b/examples/baby-llama/baby-llama.cpp @@ -1547,7 +1547,7 @@ int main(int argc, char ** argv) { float error_before_opt = ggml_get_f32_1d(e, 0); - struct ggml_opt_params opt_params_lbfgs = ggml_opt_default_params(GGML_OPT_LBFGS); + struct ggml_opt_params opt_params_lbfgs = ggml_opt_default_params(GGML_OPT_TYPE_LBFGS); opt_params_lbfgs.print_forward_graph = false; opt_params_lbfgs.print_backward_graph = false; opt_params_lbfgs.lbfgs.n_iter = 16; diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 98bf5a07a..3da5317b3 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1531,7 +1531,7 @@ int main(int argc, char ** argv) { lora.hparams.n_rank_output = n_rank_output; // set opt params from command line - opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + opt->params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); opt->params.print_forward_graph = false; opt->params.print_backward_graph = false; opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; diff --git a/examples/llama-bench/llama-bench.cpp b/examples/llama-bench/llama-bench.cpp index 11410f8ae..8fec3d43d 100644 --- a/examples/llama-bench/llama-bench.cpp +++ b/examples/llama-bench/llama-bench.cpp @@ -157,9 +157,9 @@ static const char * output_format_str(output_formats format) { static const char * split_mode_str(llama_split_mode mode) { switch (mode) { - case LLAMA_SPLIT_NONE: return "none"; - case LLAMA_SPLIT_LAYER: return "layer"; - case LLAMA_SPLIT_ROW: return "row"; + case LLAMA_SPLIT_MODE_NONE: return "none"; + case LLAMA_SPLIT_MODE_LAYER: return "layer"; + case LLAMA_SPLIT_MODE_ROW: return "row"; default: GGML_ASSERT(!"invalid split mode"); } } @@ -193,7 +193,7 @@ static const cmd_params cmd_params_defaults = { /* type_v */ {GGML_TYPE_F16}, /* n_threads */ {get_num_physical_cores()}, /* n_gpu_layers */ {99}, - /* split_mode */ {LLAMA_SPLIT_LAYER}, + /* split_mode */ {LLAMA_SPLIT_MODE_LAYER}, /* main_gpu */ {0}, /* no_kv_offload */ {false}, /* mul_mat_q */ {true}, @@ -358,11 +358,11 @@ static cmd_params parse_cmd_params(int argc, char ** argv) { for (const auto & m : p) { llama_split_mode mode; if (m == "none") { - mode = LLAMA_SPLIT_NONE; + mode = LLAMA_SPLIT_MODE_NONE; } else if (m == "layer") { - mode = LLAMA_SPLIT_LAYER; + mode = LLAMA_SPLIT_MODE_LAYER; } else if (m == "row") { - mode = LLAMA_SPLIT_ROW; + mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; break; diff --git a/examples/llava/llava.cpp b/examples/llava/llava.cpp index 1a1cf7c78..980128166 100644 --- a/examples/llava/llava.cpp +++ b/examples/llava/llava.cpp @@ -152,7 +152,7 @@ static bool clip_llava_handle_patches(clip_ctx * ctx_clip, std::vector ggml_tensor * newline_tmp = clip_get_newline_tensor(ctx_clip); model.newline = ggml_new_tensor_1d(model.ctx, GGML_TYPE_F32, newline_tmp->ne[0]); - if (newline_tmp->backend != GGML_BACKEND_CPU) { + if (newline_tmp->backend != GGML_BACKEND_TYPE_CPU) { if (newline_tmp->buffer == NULL) { printf("newline_tmp tensor buffer is NULL\n"); } diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 19a8c1067..780862ef6 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2086,9 +2086,9 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, break; } std::string value(argv[i]); - /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE; } - else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR; } - else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN; } + /**/ if (value == "none") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_NONE; } + else if (value == "linear") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_LINEAR; } + else if (value == "yarn") { params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_YARN; } else { invalid_param = true; break; } } else if (arg == "--rope-freq-base") @@ -2212,15 +2212,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, std::string arg_next = argv[i]; if (arg_next == "none") { - params.split_mode = LLAMA_SPLIT_NONE; + params.split_mode = LLAMA_SPLIT_MODE_NONE; } else if (arg_next == "layer") { - params.split_mode = LLAMA_SPLIT_LAYER; + params.split_mode = LLAMA_SPLIT_MODE_LAYER; } else if (arg_next == "row") { - params.split_mode = LLAMA_SPLIT_ROW; + params.split_mode = LLAMA_SPLIT_MODE_ROW; } else { invalid_param = true; @@ -2447,15 +2447,15 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, sep++; if (strncmp(sep, "int:", 4) == 0) { sep += 4; - kvo.tag = LLAMA_KV_OVERRIDE_INT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_INT; kvo.int_value = std::atol(sep); } else if (strncmp(sep, "float:", 6) == 0) { sep += 6; - kvo.tag = LLAMA_KV_OVERRIDE_FLOAT; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_FLOAT; kvo.float_value = std::atof(sep); } else if (strncmp(sep, "bool:", 5) == 0) { sep += 5; - kvo.tag = LLAMA_KV_OVERRIDE_BOOL; + kvo.tag = LLAMA_KV_OVERRIDE_TYPE_BOOL; if (std::strcmp(sep, "true") == 0) { kvo.bool_value = true; } else if (std::strcmp(sep, "false") == 0) { diff --git a/examples/train-text-from-scratch/train-text-from-scratch.cpp b/examples/train-text-from-scratch/train-text-from-scratch.cpp index e78ab185d..7eafe8515 100644 --- a/examples/train-text-from-scratch/train-text-from-scratch.cpp +++ b/examples/train-text-from-scratch/train-text-from-scratch.cpp @@ -960,7 +960,7 @@ int main(int argc, char ** argv) { struct ggml_opt_context * opt = train->opt; // set opt params from command line - opt->params = ggml_opt_default_params(GGML_OPT_ADAM); + opt->params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); opt->params.print_forward_graph = false; opt->params.print_backward_graph = false; opt->params.graph_size = LLAMA_TRAIN_MAX_NODES; diff --git a/ggml-cuda.cu b/ggml-cuda.cu index 21c612cb7..fb6d4f7d2 100644 --- a/ggml-cuda.cu +++ b/ggml-cuda.cu @@ -6369,11 +6369,11 @@ static __global__ void k_argsort_f32_i32(const float * x, int * dst, const int n int ixj = col ^ j; if (ixj > col) { if ((col & k) == 0) { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } else { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } @@ -7927,10 +7927,10 @@ static void argsort_f32_i32_cuda(const float * x, int * dst, const int ncols, co const dim3 block_dims(ncols, 1, 1); const dim3 block_nums(1, nrows, 1); - if (order == GGML_SORT_ASC) { - k_argsort_f32_i32<<>>(x, dst, ncols); - } else if (order == GGML_SORT_DESC) { - k_argsort_f32_i32<<>>(x, dst, ncols); + if (order == GGML_SORT_ORDER_ASC) { + k_argsort_f32_i32<<>>(x, dst, ncols); + } else if (order == GGML_SORT_ORDER_DESC) { + k_argsort_f32_i32<<>>(x, dst, ncols); } else { GGML_ASSERT(false); } @@ -8362,11 +8362,11 @@ static cudaError_t ggml_cuda_cpy_tensor_2d( cudaMemcpyKind kind; char * src_ptr; - if (src->backend == GGML_BACKEND_CPU) { + if (src->backend == GGML_BACKEND_TYPE_CPU) { kind = cudaMemcpyHostToDevice; src_ptr = (char *) src->data; - } else if (src->backend == GGML_BACKEND_GPU || src->backend == GGML_BACKEND_GPU_SPLIT) { - GGML_ASSERT(src->backend != GGML_BACKEND_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); + } else if (src->backend == GGML_BACKEND_TYPE_GPU || src->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { + GGML_ASSERT(src->backend != GGML_BACKEND_TYPE_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); kind = cudaMemcpyDeviceToDevice; ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src->extra; int id; @@ -8771,7 +8771,7 @@ static void ggml_cuda_op_mul_mat_q( // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the kernel writes into - const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + const int64_t nrows_dst = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; switch (src0->type) { case GGML_TYPE_Q4_0: @@ -8920,7 +8920,7 @@ static void ggml_cuda_op_mul_mat_vec_q( // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the kernel writes into - const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + const int64_t nrows_dst = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; switch (src0->type) { case GGML_TYPE_Q4_0: @@ -9096,7 +9096,7 @@ static void ggml_cuda_op_mul_mat_cublas( // the main device has a larger memory buffer to hold the results from all GPUs // ldc == nrows of the matrix that cuBLAS writes into - int ldc = dst->backend == GGML_BACKEND_GPU && id == g_main_device ? ne0 : row_diff; + int ldc = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device ? ne0 : row_diff; const int compute_capability = g_device_caps[id].cc; @@ -9444,7 +9444,7 @@ static void ggml_cuda_op_soft_max( const bool use_src2 = src2 != nullptr; if (use_src2) { - const bool src2_on_device = src2->backend == GGML_BACKEND_GPU; + const bool src2_on_device = src2->backend == GGML_BACKEND_TYPE_GPU; if (src2_on_device) { ggml_tensor_extra_gpu * src2_extra = (ggml_tensor_extra_gpu *) src2->extra; @@ -9502,16 +9502,16 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s const bool use_src1 = src1 != nullptr; const int64_t nrows1 = use_src1 ? ggml_nrows(src1) : 1; - GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT( dst->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT( dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * src1_extra = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; - const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_GPU; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; + const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_TYPE_GPU; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU; // dd = data device float * src0_ddf = nullptr; @@ -9555,7 +9555,7 @@ static void ggml_cuda_op_flatten(const ggml_tensor * src0, const ggml_tensor * s CUDA_CHECK(cudaMemcpyAsync(dst->data, dst_ddf, ggml_nbytes(dst), cudaMemcpyDeviceToHost, main_stream)); } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { CUDA_CHECK(cudaDeviceSynchronize()); } } @@ -9636,8 +9636,8 @@ static void ggml_cuda_op_mul_mat( const int nb2 = dst->nb[2]; const int nb3 = dst->nb[3]; - GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT(src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32 || (src1->ne[2] == 1 && src1->ne[3] == 1)); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -9653,20 +9653,20 @@ static void ggml_cuda_op_mul_mat( ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; const bool src0_is_contiguous = ggml_is_contiguous(src0); const bool src1_is_contiguous = ggml_is_contiguous(src1); const int64_t src1_padded_col_size = GGML_PAD(ne10, MATRIX_ROW_PADDING); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; GGML_ASSERT(!(split && ne02 > 1)); GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); std::array tensor_split; if (split) { - // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_GPU_SPLIT check + // TODO: check that src0->buffer->buft is a split buffer type, replace GGML_BACKEND_TYPE_GPU_SPLIT check // GGML_ASSERT(src0->buffer != nullptr && src0->buffer->buft == ...); ggml_backend_cuda_split_buffer_type_context * buft_ctx = (ggml_backend_cuda_split_buffer_type_context *) src0->buffer->buft->context; tensor_split = buft_ctx->tensor_split; @@ -9724,8 +9724,8 @@ static void ggml_cuda_op_mul_mat( used_devices++; - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; ggml_cuda_set_device(id); cudaStream_t stream = g_cudaStreams[id][0]; @@ -9776,8 +9776,8 @@ static void ggml_cuda_op_mul_mat( continue; } - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device; const int64_t row_diff = dev[id].row_high - dev[id].row_low; ggml_cuda_set_device(id); @@ -9802,12 +9802,12 @@ static void ggml_cuda_op_mul_mat( // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed - if (dst->backend == GGML_BACKEND_GPU && id == g_main_device) { + if (dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device) { dst_dd_i += dev[id].row_low; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary - if (src1->backend == GGML_BACKEND_GPU && src1_is_contiguous) { + if (src1->backend == GGML_BACKEND_TYPE_GPU && src1_is_contiguous) { if (id != g_main_device) { if (convert_src1_to_q8_1) { char * src1_ddq_i_source = dev[g_main_device].src1_ddq + src1_ddq_i_offset; @@ -9820,14 +9820,14 @@ static void ggml_cuda_op_mul_mat( src1_ncols*ne10*sizeof(float), stream)); } } - } else if (src1->backend == GGML_BACKEND_CPU || (src1_on_device && !src1_is_contiguous)) { + } else if (src1->backend == GGML_BACKEND_TYPE_CPU || (src1_on_device && !src1_is_contiguous)) { CUDA_CHECK(ggml_cuda_cpy_tensor_2d( src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); } else { GGML_ASSERT(false); } - if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_CPU || !src1_is_contiguous)) { + if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_TYPE_CPU || !src1_is_contiguous)) { quantize_row_q8_1_cuda(src1_ddf_i, src1_ddq_i, ne10, src1_ncols, src1_padded_col_size, stream); CUDA_CHECK(cudaGetLastError()); } @@ -9845,10 +9845,10 @@ static void ggml_cuda_op_mul_mat( if (!dst_on_device) { void * dst_off_device; cudaMemcpyKind kind; - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { dst_off_device = dst->data; kind = cudaMemcpyDeviceToHost; - } else if (dst->backend == GGML_BACKEND_GPU) { + } else if (dst->backend == GGML_BACKEND_TYPE_GPU) { dst_off_device = dst_extra->data_device[g_main_device]; kind = cudaMemcpyDeviceToDevice; } else { @@ -9913,7 +9913,7 @@ static void ggml_cuda_op_mul_mat( } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { ggml_cuda_set_device(g_main_device); CUDA_CHECK(cudaDeviceSynchronize()); } @@ -10019,7 +10019,7 @@ GGML_CALL bool ggml_cuda_can_mul_mat(const struct ggml_tensor * src0, const stru static void ggml_cuda_mul_mat_vec_p021(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst){ GGML_ASSERT(ggml_is_permuted(src0) && ggml_is_permuted(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->nb[0] <= src0->nb[1] && src0->nb[2] <= src0->nb[3]); // 0213 permutation GGML_ASSERT(src1->nb[0] <= src1->nb[1] && src1->nb[2] <= src1->nb[3]); // 0213 permutation GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -10050,7 +10050,7 @@ static void ggml_cuda_mul_mat_vec_nc(const ggml_tensor * src0, const ggml_tensor GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -10109,7 +10109,7 @@ static void ggml_cuda_mul_mat_batched_cublas(const ggml_tensor * src0, const ggm GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_TENSOR_BINARY_OP_LOCALS @@ -10255,11 +10255,11 @@ static void ggml_cuda_mul_mat_batched_cublas(const ggml_tensor * src0, const ggm static void ggml_cuda_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const bool all_on_device = - (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && - (src1->backend == GGML_BACKEND_GPU) && - ( dst->backend == GGML_BACKEND_GPU); + (src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT) && + (src1->backend == GGML_BACKEND_TYPE_GPU) && + ( dst->backend == GGML_BACKEND_TYPE_GPU); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; @@ -10409,7 +10409,7 @@ static void ggml_cuda_mul_mat_id_cublas(ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src00)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src00->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32); const int64_t ne00 = src00->ne[0]; GGML_UNUSED(ne00); @@ -10553,7 +10553,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s cudaStream_t stream = g_cudaStreams[g_main_device][0]; - if (ids->backend == GGML_BACKEND_GPU) { + if (ids->backend == GGML_BACKEND_TYPE_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device]; CUDA_CHECK(cudaMemcpyAsync(ids_host.data(), ids_dev, ggml_nbytes(ids), cudaMemcpyDeviceToHost, stream)); CUDA_CHECK(cudaStreamSynchronize(stream)); @@ -10570,20 +10570,20 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; - src1_row.backend = GGML_BACKEND_GPU; - dst_row.backend = GGML_BACKEND_GPU; + src1_row.backend = GGML_BACKEND_TYPE_GPU; + dst_row.backend = GGML_BACKEND_TYPE_GPU; src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; - char * src1_original = src1->backend == GGML_BACKEND_CPU ? + char * src1_original = src1->backend == GGML_BACKEND_TYPE_CPU ? (char *) src1->data : (char *) src1_extra->data_device[g_main_device]; - char * dst_original = dst->backend == GGML_BACKEND_CPU ? + char * dst_original = dst->backend == GGML_BACKEND_TYPE_CPU ? (char *) dst->data : (char *) dst_extra->data_device[g_main_device]; if (src1->ne[1] == 1) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_TYPE_GPU); for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { //int32_t row_id; @@ -10611,9 +10611,9 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s src1_row_extra.data_device[g_main_device] = src1_contiguous.get(); dst_row_extra.data_device[g_main_device] = dst_contiguous.get(); - const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_CPU ? + const cudaMemcpyKind src1_kind = src1->backend == GGML_BACKEND_TYPE_CPU ? cudaMemcpyHostToDevice : cudaMemcpyDeviceToDevice; - const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_CPU ? + const cudaMemcpyKind dst_kind = dst->backend == GGML_BACKEND_TYPE_CPU ? cudaMemcpyDeviceToHost : cudaMemcpyDeviceToDevice; for (int32_t row_id = 0; row_id < n_as; ++row_id) { @@ -10668,7 +10668,7 @@ static void ggml_cuda_mul_mat_id(const ggml_tensor * src0, const ggml_tensor * s } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { CUDA_CHECK(cudaStreamSynchronize(stream)); } } @@ -10685,8 +10685,8 @@ static void ggml_cuda_cpy(const ggml_tensor * src0, const ggml_tensor * src1, gg const int64_t ne = ggml_nelements(src0); GGML_ASSERT(ne == ggml_nelements(src1)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(ggml_nbytes(src0) <= INT_MAX); GGML_ASSERT(ggml_nbytes(src1) <= INT_MAX); @@ -10817,9 +10817,9 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st if (!g_cublas_loaded) return false; ggml_cuda_func_t func; - const bool any_on_device = tensor->backend == GGML_BACKEND_GPU - || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = tensor->backend == GGML_BACKEND_TYPE_GPU + || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (!any_on_device && tensor->op != GGML_OP_MUL_MAT && tensor->op != GGML_OP_MUL_MAT_ID) { return false; @@ -10966,14 +10966,14 @@ GGML_CALL bool ggml_cuda_compute_forward(struct ggml_compute_params * params, st return false; } - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT) { + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { ggml_cuda_set_peer_access(tensor->src[1]->ne[1]); } if (params->ith != 0) { return true; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return true; } func(tensor->src[0], tensor->src[1], tensor); @@ -11072,7 +11072,7 @@ GGML_CALL static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t extra->data_device[ctx->device] = tensor->data; - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; if (ggml_is_quantized(tensor->type)) { @@ -11087,7 +11087,7 @@ GGML_CALL static void ggml_backend_cuda_buffer_init_tensor(ggml_backend_buffer_t } GGML_CALL static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -11098,7 +11098,7 @@ GGML_CALL static void ggml_backend_cuda_buffer_set_tensor(ggml_backend_buffer_t } GGML_CALL static void ggml_backend_cuda_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor * tensor, void * data, size_t offset, size_t size) { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_cuda_buffer_context * ctx = (ggml_backend_cuda_buffer_context *)buffer->context; @@ -11333,7 +11333,7 @@ GGML_CALL static void ggml_backend_cuda_split_buffer_init_tensor(ggml_backend_bu CUDA_CHECK(cudaEventCreateWithFlags(&extra->events[id][is], cudaEventDisableTiming)); } } - tensor->backend = GGML_BACKEND_GPU_SPLIT; + tensor->backend = GGML_BACKEND_TYPE_GPU_SPLIT; tensor->extra = extra; } @@ -11605,7 +11605,7 @@ GGML_CALL static void ggml_backend_cuda_set_tensor_async(ggml_backend_t backend, ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); CUDA_CHECK(cudaMemcpyAsync((char *)tensor->data + offset, data, size, cudaMemcpyHostToDevice, g_cudaStreams[cuda_ctx->device][0])); } @@ -11614,7 +11614,7 @@ GGML_CALL static void ggml_backend_cuda_get_tensor_async(ggml_backend_t backend, ggml_backend_cuda_context * cuda_ctx = (ggml_backend_cuda_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); CUDA_CHECK(cudaMemcpyAsync(data, (const char *)tensor->data + offset, size, cudaMemcpyDeviceToHost, g_cudaStreams[cuda_ctx->device][0])); } @@ -11644,7 +11644,7 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg ggml_cuda_set_main_device(cuda_ctx->device); ggml_compute_params params = {}; - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; @@ -11654,13 +11654,13 @@ GGML_CALL static bool ggml_backend_cuda_graph_compute(ggml_backend_t backend, gg } #ifndef NDEBUG - assert(node->backend == GGML_BACKEND_GPU || node->backend == GGML_BACKEND_GPU_SPLIT); + assert(node->backend == GGML_BACKEND_TYPE_GPU || node->backend == GGML_BACKEND_TYPE_GPU_SPLIT); assert(node->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_GPU || node->src[j]->backend == GGML_BACKEND_GPU_SPLIT); + assert(node->src[j]->backend == GGML_BACKEND_TYPE_GPU || node->src[j]->backend == GGML_BACKEND_TYPE_GPU_SPLIT); assert(node->src[j]->buffer->buft == ggml_backend_cuda_buffer_type(cuda_ctx->device) || ggml_backend_buffer_is_cuda_split(node->src[j]->buffer)); assert(node->src[j]->extra != nullptr); } diff --git a/ggml-metal.m b/ggml-metal.m index ee584cfa7..3d6b01263 100644 --- a/ggml-metal.m +++ b/ggml-metal.m @@ -2262,8 +2262,8 @@ static bool ggml_metal_graph_compute( id pipeline = nil; switch (order) { - case GGML_SORT_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; - case GGML_SORT_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; + case GGML_SORT_ORDER_ASC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_ASC].pipeline; break; + case GGML_SORT_ORDER_DESC: pipeline = ctx->kernels[GGML_METAL_KERNEL_TYPE_ARGSORT_F32_I32_DESC].pipeline; break; default: GGML_ASSERT(false); }; diff --git a/ggml-opencl.cpp b/ggml-opencl.cpp index 797bee667..df619a884 100644 --- a/ggml-opencl.cpp +++ b/ggml-opencl.cpp @@ -1354,7 +1354,7 @@ static void ggml_cl_pool_free(cl_mem mem, size_t size) { } void ggml_cl_free_data(const struct ggml_tensor* tensor) { - if (tensor->backend != GGML_BACKEND_GPU) { + if (tensor->backend != GGML_BACKEND_TYPE_GPU) { return; } @@ -1412,7 +1412,7 @@ static cl_int ggml_cl_h2d_tensor_2d(cl_command_queue queue, cl_mem dst, size_t o } static void ggml_cl_mul_f32(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); const int64_t ne00 = src0->ne[0]; const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; @@ -1476,7 +1476,7 @@ void ggml_cl_mul(const struct ggml_tensor * src0, const struct ggml_tensor * src } static void ggml_cl_add_f32(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); const int64_t ne00 = src0->ne[0]; const int64_t ne01 = src0->ne[1]; const int64_t ne02 = src0->ne[2]; @@ -1566,13 +1566,13 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr size_t y_size; size_t d_size; cl_mem d_X; - if (src0->backend == GGML_BACKEND_GPU) { // NOLINT + if (src0->backend == GGML_BACKEND_TYPE_GPU) { // NOLINT d_X = (cl_mem) src0->extra; } else { d_X = ggml_cl_pool_malloc(sizeof(float) * x_ne, &x_size); } - cl_mem d_Y = src1->backend == GGML_BACKEND_GPU ? (cl_mem) src1->extra : ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); - cl_mem d_D = dst->backend == GGML_BACKEND_GPU ? (cl_mem) dst->extra : ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); + cl_mem d_Y = src1->backend == GGML_BACKEND_TYPE_GPU ? (cl_mem) src1->extra : ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); + cl_mem d_D = dst->backend == GGML_BACKEND_TYPE_GPU ? (cl_mem) dst->extra : ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); size_t x_offset = 0; @@ -1580,7 +1580,7 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr // TODO: copy src0 here when r3>1 for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { for (int64_t i02 = 0; i02 < ne02; i02++) { - if (src0->backend == GGML_BACKEND_GPU) { + if (src0->backend == GGML_BACKEND_TYPE_GPU) { x_offset = (i03 * ne02 + i02) * x_ne; } else { // copy src0 to device @@ -1589,7 +1589,7 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr for (int64_t i12 = i02 * r2, e12 = i12 + r2; i12 < e12; i12++) { // copy src1 to device - if (src1->backend == GGML_BACKEND_CPU) { + if (src1->backend == GGML_BACKEND_TYPE_CPU) { CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Y, 0, src1, i13, i12, NULL)); } @@ -1612,7 +1612,7 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(float) * d_ne, d, 1, &ev_sgemm, NULL)); } @@ -1621,13 +1621,13 @@ static void ggml_cl_mul_mat_f32(const ggml_tensor * src0, const ggml_tensor * sr } } - if (src0->backend != GGML_BACKEND_GPU) { + if (src0->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_X, x_size); } - if (src1->backend != GGML_BACKEND_GPU) { + if (src1->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_Y, y_size); } - if (dst->backend != GGML_BACKEND_GPU) { + if (dst->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_D, d_size); } } @@ -1670,7 +1670,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr size_t y_size; size_t d_size; cl_mem d_X; - if (src0->backend == GGML_BACKEND_GPU) { // NOLINT + if (src0->backend == GGML_BACKEND_TYPE_GPU) { // NOLINT d_X = (cl_mem) src0->extra; } else { d_X = ggml_cl_pool_malloc(sizeof(ggml_fp16_t) * x_ne, &x_size); @@ -1687,7 +1687,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr // TODO: copy src0 here when r3>1 for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { for (int64_t i02 = 0; i02 < ne02; i02++) { - if (src0->backend == GGML_BACKEND_GPU) { + if (src0->backend == GGML_BACKEND_TYPE_GPU) { x_offset = (i03 * ne02 + i02) * x_ne; } else { // copy src0 to device @@ -1741,7 +1741,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr } // copy dst to host, then convert to float - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { CL_CHECK(clEnqueueReadBuffer(queue, d_D, true, 0, sizeof(ggml_fp16_t) * d_ne, tmp, 1, &ev_sgemm, NULL)); float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); ggml_fp16_to_fp32_row(tmp, d, d_ne); @@ -1753,7 +1753,7 @@ static void ggml_cl_mul_mat_f16(const ggml_tensor * src0, const ggml_tensor * sr } } - if (src0->backend != GGML_BACKEND_GPU) { + if (src0->backend != GGML_BACKEND_TYPE_GPU) { ggml_cl_pool_free(d_X, x_size); } ggml_cl_pool_free(d_Y, y_size); @@ -1798,7 +1798,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * cl_mem d_Y = ggml_cl_pool_malloc(sizeof(float) * y_ne, &y_size); cl_mem d_D = ggml_cl_pool_malloc(sizeof(float) * d_ne, &d_size); cl_mem d_Q; - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { d_Q = ggml_cl_pool_malloc(q_sz, &q_size); } @@ -1817,10 +1817,10 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * for (int64_t i13 = i03 * r3, e13 = i13 + r3; i13 < e13; i13++) { for (int64_t i02 = 0; i02 < ne02; i02++) { // copy src0 to device if necessary - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { events.emplace_back(); CL_CHECK(ggml_cl_h2d_tensor_2d(queue, d_Q, 0, src0, i03, i02, events.data() + ev_idx++)); - } else if (src0->backend == GGML_BACKEND_GPU) { + } else if (src0->backend == GGML_BACKEND_TYPE_GPU) { d_Q = (cl_mem) src0->extra; } else { GGML_ASSERT(false); @@ -1829,7 +1829,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * if (!mul_mat_vec) { // convert src0 to fp32 on device const size_t global = x_ne / global_denom; - const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; + const size_t offset = src0->backend == GGML_BACKEND_TYPE_GPU ? (i03 * ne02 + i02) * x_bps : 0; CL_CHECK(clSetKernelArg(*to_fp32_cl, 0, sizeof(cl_mem), &d_Q)); CL_CHECK(clSetKernelArg(*to_fp32_cl, 1, sizeof(cl_mem), &d_X)); CL_CHECK(clEnqueueNDRangeKernel(queue, *to_fp32_cl, 1, &offset, &global, local > 0 ? &local : NULL, events.size(), !events.empty() ? events.data() : NULL, NULL)); @@ -1843,7 +1843,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * // compute const size_t global = ne01 * local; - const size_t offset = src0->backend == GGML_BACKEND_GPU ? (i03 * ne02 + i02) * x_bps : 0; + const size_t offset = src0->backend == GGML_BACKEND_TYPE_GPU ? (i03 * ne02 + i02) * x_bps : 0; const cl_int ncols = ne00; events.emplace_back(); CL_CHECK(clSetKernelArg(*dmmv, 0, sizeof(cl_mem), &d_Q)); @@ -1895,7 +1895,7 @@ static void ggml_cl_mul_mat_q_f32(const ggml_tensor * src0, const ggml_tensor * } ggml_cl_pool_free(d_Y, y_size); ggml_cl_pool_free(d_D, d_size); - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { ggml_cl_pool_free(d_Q, q_size); } } @@ -1911,7 +1911,7 @@ bool ggml_cl_can_mul_mat(const struct ggml_tensor * src0, const struct ggml_tens if ((src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && src1->type == GGML_TYPE_F32 && dst->type == GGML_TYPE_F32 && - ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_GPU)) { + ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_TYPE_GPU)) { return true; } @@ -1993,7 +1993,7 @@ void ggml_cl_transform_tensor(void * data, ggml_tensor * tensor) { CL_CHECK(clFinish(queue)); tensor->extra = dst; - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); } // ggml-backend @@ -2045,7 +2045,7 @@ static void ggml_backend_opencl_buffer_init_tensor(ggml_backend_buffer_t buffer, ctx->sub_buffers.push_back(sub_buffer); tensor->extra = sub_buffer; } - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; } static void ggml_backend_opencl_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor * tensor, const void * data, size_t offset, size_t size) { diff --git a/ggml-sycl.cpp b/ggml-sycl.cpp index b897828f9..c6c3c6e6f 100644 --- a/ggml-sycl.cpp +++ b/ggml-sycl.cpp @@ -3338,7 +3338,7 @@ void print_ggml_tensor(const char*name, struct ggml_tensor *src){ size_t total_elements = ggml_nelements(src); - const bool src_on_device = src->backend == GGML_BACKEND_GPU || src->backend == GGML_BACKEND_GPU_SPLIT; + const bool src_on_device = src->backend == GGML_BACKEND_TYPE_GPU || src->backend == GGML_BACKEND_TYPE_GPU_SPLIT; float *src_data =NULL; if(src_on_device) { ggml_tensor_extra_gpu * src_extra = (ggml_tensor_extra_gpu *) src->extra; @@ -8086,11 +8086,11 @@ static void k_argsort_f32_i32(const float * x, int * dst, const int ncols, int ixj = col ^ j; if (ixj > col) { if ((col & k) == 0) { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] > x_row[dst_row[ixj]] : x_row[dst_row[col]] < x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } else { - if (order == GGML_SORT_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { + if (order == GGML_SORT_ORDER_ASC ? x_row[dst_row[col]] < x_row[dst_row[ixj]] : x_row[dst_row[col]] > x_row[dst_row[ixj]]) { swap(dst_row[col], dst_row[ixj]); } } @@ -10825,7 +10825,7 @@ static void argsort_f32_i32_sycl(const float *x, int *dst, const int ncols, const sycl::range<3> block_dims(1, 1, ncols); const sycl::range<3> block_nums(1, nrows, 1); - if (order == GGML_SORT_ASC) { + if (order == GGML_SORT_ORDER_ASC) { /* DPCT1049:44: The work-group size passed to the SYCL kernel may exceed the limit. To get the device limit, query @@ -10834,9 +10834,9 @@ static void argsort_f32_i32_sycl(const float *x, int *dst, const int ncols, stream->parallel_for( sycl::nd_range<3>(block_nums * block_dims, block_dims), [=](sycl::nd_item<3> item_ct1) { - k_argsort_f32_i32(x, dst, ncols, item_ct1); + k_argsort_f32_i32(x, dst, ncols, item_ct1); }); - } else if (order == GGML_SORT_DESC) { + } else if (order == GGML_SORT_ORDER_DESC) { /* DPCT1049:45: The work-group size passed to the SYCL kernel may exceed the limit. To get the device limit, query @@ -10845,7 +10845,7 @@ static void argsort_f32_i32_sycl(const float *x, int *dst, const int ncols, stream->parallel_for( sycl::nd_range<3>(block_nums * block_dims, block_dims), [=](sycl::nd_item<3> item_ct1) { - k_argsort_f32_i32(x, dst, ncols, item_ct1); + k_argsort_f32_i32(x, dst, ncols, item_ct1); }); } else { GGML_ASSERT(false); @@ -11407,12 +11407,12 @@ static dpct::err0 ggml_sycl_cpy_tensor_2d(void *dst, dpct::memcpy_direction kind; char * src_ptr; - if (src->backend == GGML_BACKEND_CPU) { + if (src->backend == GGML_BACKEND_TYPE_CPU) { kind = dpct::host_to_device; src_ptr = (char *) src->data; - // GGML_SYCL_DEBUG("ggml_sycl_cpy_tensor_2d GGML_BACKEND_CPU src_ptr %p\n", src_ptr); - } else if (src->backend == GGML_BACKEND_GPU || src->backend == GGML_BACKEND_GPU_SPLIT) { - GGML_ASSERT(src->backend != GGML_BACKEND_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); + // GGML_SYCL_DEBUG("ggml_sycl_cpy_tensor_2d GGML_BACKEND_TYPE_CPU src_ptr %p\n", src_ptr); + } else if (src->backend == GGML_BACKEND_TYPE_GPU || src->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { + GGML_ASSERT(src->backend != GGML_BACKEND_TYPE_GPU_SPLIT || (i1_low == 0 && i1_high == src->ne[1])); kind = dpct::device_to_device; ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src->extra; int id; @@ -11846,7 +11846,7 @@ inline void ggml_sycl_op_mul_mat_q( // the main device has a larger memory buffer to hold the results from all GPUs // nrows_dst == nrows of the matrix that the dequantize_mul_mat kernel writes into - const int64_t nrows_dst = dst->backend == GGML_BACKEND_GPU && device_id == g_main_device ? ne0 : row_diff; + const int64_t nrows_dst = dst->backend == GGML_BACKEND_TYPE_GPU && device_id == g_main_device ? ne0 : row_diff; switch (src0->type) { case GGML_TYPE_Q4_0: @@ -12119,7 +12119,7 @@ inline void ggml_sycl_op_mul_mat_sycl( // the main device has a larger memory buffer to hold the results from all GPUs // ldc == nrows of the matrix that cuBLAS writes into - int ldc = dst->backend == GGML_BACKEND_GPU && device_id == g_main_device ? ne0 : row_diff; + int ldc = dst->backend == GGML_BACKEND_TYPE_GPU && device_id == g_main_device ? ne0 : row_diff; #ifdef GGML_SYCL_F16 bool use_fp16 = true; // TODO(Yu) SYCL capability check @@ -12501,16 +12501,16 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, const bool use_src1 = src1 != nullptr; const int64_t nrows1 = use_src1 ? ggml_nrows(src1) : 1; - GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT( dst->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(!use_src1 || src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT( dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu *) src0->extra; ggml_tensor_extra_gpu * src1_extra = use_src1 ? (ggml_tensor_extra_gpu *) src1->extra : nullptr; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; - const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_GPU; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; + const bool src1_on_device = use_src1 && src1->backend == GGML_BACKEND_TYPE_GPU; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU; // dd = data device float * src0_ddf = nullptr; @@ -12565,7 +12565,7 @@ static void ggml_sycl_op_flatten(const ggml_tensor *src0, main_stream->memcpy(dst->data, dst_ddf, ggml_nbytes(dst)))); } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { SYCL_CHECK(CHECK_TRY_ERROR( dpct::get_current_device().queues_wait_and_throw())); } @@ -12640,8 +12640,8 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, const int nb2 = dst->nb[2]; const int nb3 = dst->nb[3]; - GGML_ASSERT(dst->backend != GGML_BACKEND_GPU_SPLIT); - GGML_ASSERT(src1->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(dst->backend != GGML_BACKEND_TYPE_GPU_SPLIT); + GGML_ASSERT(src1->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(ne12 >= ne02 && ne12 % ne02 == 0); @@ -12656,13 +12656,13 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, ggml_tensor_extra_gpu * src1_extra = (ggml_tensor_extra_gpu *) src1->extra; ggml_tensor_extra_gpu * dst_extra = (ggml_tensor_extra_gpu *) dst->extra; - const bool src0_on_device = src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool src0_on_device = src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; const bool src0_is_contiguous = ggml_is_contiguous(src0); const bool src1_is_contiguous = ggml_is_contiguous(src1); int64_t src1_padded_col_size = GGML_PAD(ne10, MATRIX_ROW_PADDING); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; GGML_ASSERT(!(split && ne02 > 1)); GGML_ASSERT(!(split && ne03 > 1)); GGML_ASSERT(!(split && ne02 < ne12)); @@ -12717,8 +12717,8 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, used_devices++; - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device_index; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device_index; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; ggml_sycl_set_device(get_device_id_by_index(id)); const dpct::queue_ptr stream = g_syclStreams[id][0]; @@ -12782,8 +12782,8 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, continue; } - const bool src1_on_device = src1->backend == GGML_BACKEND_GPU && id == g_main_device_index; - const bool dst_on_device = dst->backend == GGML_BACKEND_GPU && id == g_main_device_index; + const bool src1_on_device = src1->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; + const bool dst_on_device = dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index; const int64_t row_diff = row_high[id] - row_low[id]; ggml_sycl_set_device(get_device_id_by_index(id)); @@ -12809,12 +12809,12 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, // the main device memory buffer can be on VRAM scratch, with space for all partial results // in that case an offset on dst_ddf_i is needed - if (dst->backend == GGML_BACKEND_GPU && id == g_main_device_index) { + if (dst->backend == GGML_BACKEND_TYPE_GPU && id == g_main_device_index) { dst_dd_i += row_low[id]; // offset is 0 if no tensor split } // copy src0, src1 to device if necessary - if (src1->backend == GGML_BACKEND_GPU && src1_is_contiguous) { + if (src1->backend == GGML_BACKEND_TYPE_GPU && src1_is_contiguous) { if (id != g_main_device_index) { if (convert_src1_to_q8_1) { char * src1_ddq_i_source = src1_ddq[g_main_device_index] + src1_ddq_i_offset; @@ -12830,14 +12830,14 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, src1_ncols * ne10 * sizeof(float)))); } } - } else if (src1->backend == GGML_BACKEND_CPU || (src1_on_device && !src1_is_contiguous)) { + } else if (src1->backend == GGML_BACKEND_TYPE_CPU || (src1_on_device && !src1_is_contiguous)) { SYCL_CHECK(ggml_sycl_cpy_tensor_2d( src1_ddf_i, src1, i03, i02, src1_col_0, src1_col_0+src1_ncols, stream)); } else { GGML_ASSERT(false); } - if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_CPU || !src1_is_contiguous)) { + if (convert_src1_to_q8_1 && (src1->backend == GGML_BACKEND_TYPE_CPU || !src1_is_contiguous)) { quantize_row_q8_1_sycl(src1_ddf_i, src1_ddq_i, ne10, src1_ncols, src1_padded_col_size, stream); /* DPCT1010:92: SYCL uses exceptions to report errors and does @@ -12867,10 +12867,10 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, if (!dst_on_device) { void * dst_off_device; dpct::memcpy_direction kind; - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { dst_off_device = dst->data; kind = dpct::device_to_host; - } else if (dst->backend == GGML_BACKEND_GPU) { + } else if (dst->backend == GGML_BACKEND_TYPE_GPU) { dst_off_device = dst_extra->data_device[g_main_device_index]; kind = dpct::device_to_device; } else { @@ -12954,7 +12954,7 @@ static void ggml_sycl_op_mul_mat(const ggml_tensor *src0, } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { SYCL_CHECK(ggml_sycl_set_device(g_main_device)); SYCL_CHECK(CHECK_TRY_ERROR( dpct::get_current_device().queues_wait_and_throw())); @@ -13091,7 +13091,7 @@ static void ggml_sycl_mul_mat_vec_p021(const ggml_tensor *src0, const ggml_tensor *src1, ggml_tensor *dst) try { GGML_ASSERT(ggml_is_permuted(src0) && ggml_is_permuted(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->nb[0] <= src0->nb[1] && src0->nb[2] <= src0->nb[3]); // 0213 permutation GGML_ASSERT(src1->nb[0] <= src1->nb[1] && src1->nb[2] <= src1->nb[3]); // 0213 permutation GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -13129,7 +13129,7 @@ static void ggml_sycl_mul_mat_vec_nc(const ggml_tensor *src0, GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -13196,7 +13196,7 @@ static void ggml_sycl_mul_mat_mat_batched_sycl(const ggml_tensor *src0, GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src0->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src0->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -13372,11 +13372,11 @@ catch (sycl::exception const &exc) { static void ggml_sycl_mul_mat(const ggml_tensor * src0, const ggml_tensor * src1, ggml_tensor * dst) { const bool all_on_device = - (src0->backend == GGML_BACKEND_GPU || src0->backend == GGML_BACKEND_GPU_SPLIT) && - (src1->backend == GGML_BACKEND_GPU) && - ( dst->backend == GGML_BACKEND_GPU); + (src0->backend == GGML_BACKEND_TYPE_GPU || src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT) && + (src1->backend == GGML_BACKEND_TYPE_GPU) && + ( dst->backend == GGML_BACKEND_TYPE_GPU); - const bool split = src0->backend == GGML_BACKEND_GPU_SPLIT; + const bool split = src0->backend == GGML_BACKEND_TYPE_GPU_SPLIT; int64_t min_compute_capability = INT_MAX; for (int64_t id = 0; id < g_device_count; ++id) { @@ -13505,7 +13505,7 @@ static void ggml_sycl_mul_mat_id_sycl(ggml_tensor * dst) { GGML_ASSERT(!ggml_is_transposed(src00)); GGML_ASSERT(!ggml_is_transposed(src1)); - GGML_ASSERT(src00->backend != GGML_BACKEND_GPU_SPLIT); + GGML_ASSERT(src00->backend != GGML_BACKEND_TYPE_GPU_SPLIT); GGML_ASSERT(src1->type == GGML_TYPE_F32); GGML_TENSOR_LOCALS(int64_t, ne0, src00, ne); @@ -13643,7 +13643,7 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; - if (ids->backend == GGML_BACKEND_GPU) { + if (ids->backend == GGML_BACKEND_TYPE_GPU) { const char * ids_dev = (const char *)((const ggml_tensor_extra_gpu *)ids->extra)->data_device[g_main_device_index]; SYCL_CHECK(CHECK_TRY_ERROR( stream->memcpy(ids_host.data(), ids_dev, ggml_nbytes(ids)))); @@ -13661,20 +13661,20 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, ggml_tensor src1_row = *src1; ggml_tensor dst_row = *dst; - src1_row.backend = GGML_BACKEND_GPU; - dst_row.backend = GGML_BACKEND_GPU; + src1_row.backend = GGML_BACKEND_TYPE_GPU; + dst_row.backend = GGML_BACKEND_TYPE_GPU; src1_row.extra = &src1_row_extra; dst_row.extra = &dst_row_extra; - char * src1_original = src1->backend == GGML_BACKEND_CPU ? + char * src1_original = src1->backend == GGML_BACKEND_TYPE_CPU ? (char *) src1->data : (char *) src1_extra->data_device[g_main_device_index]; - char * dst_original = dst->backend == GGML_BACKEND_CPU ? + char * dst_original = dst->backend == GGML_BACKEND_TYPE_CPU ? (char *) dst->data : (char *) dst_extra->data_device[g_main_device_index]; if (src1->ne[1] == 1) { - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_TYPE_GPU); for (int64_t i01 = 0; i01 < ids->ne[1]; i01++) { //int32_t row_id; @@ -13756,7 +13756,7 @@ static void ggml_sycl_mul_mat_id(const ggml_tensor *src0, } } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { SYCL_CHECK(CHECK_TRY_ERROR(stream->wait())); } } @@ -13779,8 +13779,8 @@ static void ggml_sycl_cpy(const ggml_tensor *src0, const ggml_tensor *src1, const int64_t ne = ggml_nelements(src0); GGML_ASSERT(ne == ggml_nelements(src1)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); - GGML_ASSERT(src1->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(src1->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(ggml_nbytes(src0) <= INT_MAX); GGML_ASSERT(ggml_nbytes(src1) <= INT_MAX); @@ -13887,17 +13887,17 @@ void ggml_sycl_transform_tensor(void *data, struct ggml_tensor *tensor) try { memset(extra, 0, sizeof(*extra)); for (int64_t id = 0; id < g_device_count; ++id) { - if (backend == GGML_BACKEND_GPU && id != g_main_device_index) { + if (backend == GGML_BACKEND_TYPE_GPU && id != g_main_device_index) { continue; } ggml_sycl_set_device(get_device_id_by_index(id)); const dpct::queue_ptr stream = g_syclStreams[id][0]; int64_t row_low, row_high; - if (backend == GGML_BACKEND_GPU) { + if (backend == GGML_BACKEND_TYPE_GPU) { row_low = 0; row_high = nrows; - } else if (backend == GGML_BACKEND_GPU_SPLIT) { + } else if (backend == GGML_BACKEND_TYPE_GPU_SPLIT) { const int64_t rounding = get_row_rounding(tensor->type); row_low = id == 0 ? 0 : nrows*g_tensor_split[id]; @@ -13946,7 +13946,7 @@ void ggml_sycl_transform_tensor(void *data, struct ggml_tensor *tensor) try { extra->data_device[id] = buf; - if (backend == GGML_BACKEND_GPU_SPLIT) { + if (backend == GGML_BACKEND_TYPE_GPU_SPLIT) { for (int64_t is = 0; is < MAX_STREAMS; ++is) { SYCL_CHECK(CHECK_TRY_ERROR(extra->events[id][is] = new sycl::event())); @@ -13963,7 +13963,7 @@ catch (sycl::exception const &exc) { } void ggml_sycl_free_data(struct ggml_tensor *tensor) try { - if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_GPU && tensor->backend != GGML_BACKEND_GPU_SPLIT) ) { + if (!tensor || !tensor->extra || (tensor->backend != GGML_BACKEND_TYPE_GPU && tensor->backend != GGML_BACKEND_TYPE_GPU_SPLIT) ) { return; } @@ -14016,15 +14016,15 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, return; } - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_CPU) { + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_TYPE_CPU) { const ggml_op src0_op = tensor->src[0]->op; if (src0_op == GGML_OP_RESHAPE || src0_op == GGML_OP_TRANSPOSE || src0_op == GGML_OP_VIEW || src0_op == GGML_OP_PERMUTE) { ggml_sycl_assign_buffers_impl(tensor->src[0], scratch, force_inplace, no_alloc); } } - if (tensor->op == GGML_OP_CPY && tensor->src[1]->backend == GGML_BACKEND_CPU) { + if (tensor->op == GGML_OP_CPY && tensor->src[1]->backend == GGML_BACKEND_TYPE_CPU) { ggml_sycl_assign_buffers_impl(tensor->src[1], scratch, force_inplace, no_alloc); } @@ -14042,7 +14042,7 @@ static void ggml_sycl_assign_buffers_impl(struct ggml_tensor *tensor, SYCL_CHECK(ggml_sycl_set_device(g_main_device)); const dpct::queue_ptr stream = g_syclStreams[g_main_device_index][0]; - if (inplace && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) { + if (inplace && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->src[0]->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; size_t offset = 0; @@ -14111,7 +14111,7 @@ void ggml_sycl_assign_scratch_offset(struct ggml_tensor *tensor, const bool inplace = tensor->view_src != nullptr; - if (inplace && (tensor->view_src->backend == GGML_BACKEND_GPU || tensor->view_src->backend == GGML_BACKEND_GPU_SPLIT)) { + if (inplace && (tensor->view_src->backend == GGML_BACKEND_TYPE_GPU || tensor->view_src->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) { ggml_tensor_extra_gpu * src0_extra = (ggml_tensor_extra_gpu * ) tensor->view_src->extra; char * src0_ddc = (char *) src0_extra->data_device[g_main_device_index]; size_t view_offset = 0; @@ -14132,7 +14132,7 @@ catch (sycl::exception const &exc) { } void ggml_sycl_copy_to_device(struct ggml_tensor *tensor) try { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(ggml_is_contiguous(tensor)); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; @@ -14219,9 +14219,9 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ if (!g_sycl_loaded) return false; ggml_sycl_func_t func; - const bool any_on_device = tensor->backend == GGML_BACKEND_GPU - || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = tensor->backend == GGML_BACKEND_TYPE_GPU + || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (!any_on_device && tensor->op != GGML_OP_MUL_MAT && tensor->op != GGML_OP_MUL_MAT_ID) { return false; @@ -14359,14 +14359,14 @@ bool ggml_sycl_compute_forward(struct ggml_compute_params * params, struct ggml_ return false; } - if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT) { + if (tensor->src[0] != nullptr && tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT) { ggml_sycl_set_peer_access(tensor->src[1]->ne[1]); } if (params->ith != 0) { return true; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return true; } func(tensor->src[0], tensor->src[1], tensor); @@ -14517,7 +14517,7 @@ static void ggml_backend_sycl_buffer_init_tensor(ggml_backend_buffer_t buffer, extra->data_device[ctx->device] = tensor->data; - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; if (ggml_is_quantized(tensor->type)) { @@ -14548,7 +14548,7 @@ static void ggml_backend_sycl_buffer_set_tensor(ggml_backend_buffer_t buffer, ggml_tensor *tensor, const void *data, size_t offset, size_t size) try { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; @@ -14573,7 +14573,7 @@ static void ggml_backend_sycl_buffer_get_tensor(ggml_backend_buffer_t buffer, const ggml_tensor *tensor, void *data, size_t offset, size_t size) try { - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_sycl_buffer_context * ctx = ( ggml_backend_sycl_buffer_context *)buffer->context; @@ -14809,7 +14809,7 @@ static void ggml_backend_sycl_set_tensor_async(ggml_backend_t backend, ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( (char *)tensor->data + offset, data, size))); @@ -14827,7 +14827,7 @@ static void ggml_backend_sycl_get_tensor_async(ggml_backend_t backend, ggml_backend_sycl_context * sycl_ctx = (ggml_backend_sycl_context *)backend->context; GGML_ASSERT(tensor->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); SYCL_CHECK(CHECK_TRY_ERROR(g_syclStreams[sycl_ctx->device][0]->memcpy( data, (const char *)tensor->data + offset, size))); @@ -14880,7 +14880,7 @@ static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph ggml_sycl_set_main_device(sycl_ctx->device); ggml_compute_params params = {}; - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; @@ -14888,13 +14888,13 @@ static bool ggml_backend_sycl_graph_compute(ggml_backend_t backend, ggml_cgraph if (node->op == GGML_OP_RESHAPE || node->op == GGML_OP_TRANSPOSE || node->op == GGML_OP_VIEW || node->op == GGML_OP_PERMUTE) continue; - assert(node->backend == GGML_BACKEND_GPU); + assert(node->backend == GGML_BACKEND_TYPE_GPU); assert(node->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device)); assert(node->extra != nullptr); for (int j = 0; j < GGML_MAX_SRC; j++) { if (node->src[j] != nullptr) { - assert(node->src[j]->backend == GGML_BACKEND_GPU); + assert(node->src[j]->backend == GGML_BACKEND_TYPE_GPU); assert(node->src[j]->buffer->buft == ggml_backend_sycl_buffer_type(sycl_ctx->device)); assert(node->src[j]->extra != nullptr); } diff --git a/ggml-vulkan.cpp b/ggml-vulkan.cpp index 4e5eaff15..6caafb822 100644 --- a/ggml-vulkan.cpp +++ b/ggml-vulkan.cpp @@ -2320,8 +2320,8 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su src1_uma = d_Qy != nullptr; } - const bool load_x = src0->backend != GGML_BACKEND_GPU && !src0_uma; - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_x = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const bool x_non_contig = !load_x && !ggml_vk_dim01_contiguous(src0); const bool y_non_contig = !load_y && !ggml_vk_dim01_contiguous(src1); @@ -2453,7 +2453,7 @@ static void ggml_vk_mul_mat_q_f16(ggml_backend_vk_context * ctx, vk_context * su // compute ggml_vk_matmul(ctx, subctx, *pipeline, { d_X, x_buf_offset, x_sz * ne02 * ne03 }, { d_Y, y_buf_offset, y_sz * ne12 * ne13 }, { d_D, d_buf_offset, d_sz * ne12 * ne13 }, { ctx->prealloc_split_k, 0, d_sz * ne12 * ne13 * split_k }, ne01, ne11, ne10, ne10, ne10, ne01, split_k, ne12*ne13, ne02, ne12, r2, r3, stride_batch_x, stride_batch_y, ne20*ne21); // NOLINT - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data); ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, sizeof(float) * d_ne * ne12 * ne13); @@ -2506,8 +2506,8 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context src1_uma = d_Qy != nullptr; } - const bool load_x = src0->backend != GGML_BACKEND_GPU && !src0_uma; - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_x = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const bool x_non_contig = !load_x && !ggml_vk_dim01_contiguous(src0); const bool y_non_contig = !load_y && !ggml_vk_dim01_contiguous(src1); @@ -2630,7 +2630,7 @@ static void ggml_vk_mul_mat_vec_q_f16(ggml_backend_vk_context * ctx, vk_context ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, *dmmv, { { d_X, x_offset, x_sz }, { d_Y, y_buffer_offset, y_sz + y_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 3 * sizeof(int), &pc, { (uint32_t)ne01, 1, 1}); - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) ((char *) dst->data + i12*nb2 + i13*nb3); ggml_vk_sync_buffers(subctx); @@ -2647,7 +2647,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c std::cerr << "), (" << dst << ", name=" << dst->name << ", type=" << dst->type << ", backend=" << dst->backend << ", ne0=" << dst->ne[0] << ", ne1=" << dst->ne[1] << ", ne2=" << dst->ne[2] << ", ne3=" << dst->ne[3] << ", nb0=" << dst->nb[0] << ", nb1=" << dst->nb[1] << ", nb2=" << dst->nb[2] << ", nb3=" << dst->nb[3] << "),)" << std::endl; #endif GGML_ASSERT(ggml_is_permuted(src0) && ggml_is_permuted(src1)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(src0->nb[0] <= src0->nb[1] && src0->nb[2] <= src0->nb[3]); // NOLINT GGML_ASSERT(src1->nb[0] <= src1->nb[1] && src1->nb[2] <= src1->nb[3]); // NOLINT GGML_ASSERT(src0->type == GGML_TYPE_F16); @@ -2679,7 +2679,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c src1_uma = d_Qy != nullptr; } - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const uint64_t x_ne = ne00 * ne01 * ne02; const uint64_t y_ne = ne10 * ne11 * ne12; @@ -2721,7 +2721,7 @@ static void ggml_vk_mul_mat_vec_p021_f16_f32(ggml_backend_vk_context * ctx, vk_c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_p021_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 6 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) dst->data; ggml_vk_sync_buffers(subctx); @@ -2738,7 +2738,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con GGML_ASSERT(!ggml_is_transposed(src0)); GGML_ASSERT(!ggml_is_transposed(src1)); GGML_ASSERT(!ggml_is_permuted(src0)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); GGML_ASSERT(src0->type == GGML_TYPE_F16); GGML_ASSERT(src1->type == GGML_TYPE_F32); @@ -2771,7 +2771,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con src1_uma = d_Qy != nullptr; } - const bool load_y = src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool load_y = src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; const uint64_t d_ne = ne01 * ne11 * ne12; @@ -2814,7 +2814,7 @@ static void ggml_vk_mul_mat_vec_nc_f16_f32(ggml_backend_vk_context * ctx, vk_con ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, ctx->pipeline_mul_mat_vec_nc_f16_f32, { { d_Qx, qx_buf_offset, qx_sz }, { d_Qy, qy_buffer_offset, qy_sz + qy_shader_offset }, { d_D, d_buffer_offset, d_sz + d_shader_offset } }, 7 * sizeof(uint32_t), &pc, { 1, (uint32_t)ne01, (uint32_t)ne12 }); - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) dst->data; ggml_vk_sync_buffers(subctx); @@ -2832,7 +2832,7 @@ static bool ggml_vk_can_mul_mat(const ggml_tensor * src0, const ggml_tensor * sr return (src0->type == GGML_TYPE_F32 || src0->type == GGML_TYPE_F16 || ggml_is_quantized(src0->type)) && (src1->type == GGML_TYPE_F32 || src1->type == GGML_TYPE_F16 || ggml_is_quantized(src1->type)) && dst->type == GGML_TYPE_F32 && - ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_GPU); + ((ne0 >= 32 && ne1 >= 32 && ne10 >= 32) || src0->backend == GGML_BACKEND_TYPE_GPU); } static void ggml_vk_mul_mat(ggml_backend_vk_context * ctx, vk_context * subctx, const struct ggml_tensor * src0, const struct ggml_tensor * src1, struct ggml_tensor * dst) { @@ -2880,8 +2880,8 @@ static void ggml_vk_op_repeat(ggml_backend_vk_context * ctx, vk_context * subctx // TODO: support for transposed / permuted tensors GGML_ASSERT(nb0 == sizeof(float)); GGML_ASSERT(nb00 == sizeof(float)); - GGML_ASSERT(src0->backend == GGML_BACKEND_GPU); - GGML_ASSERT(dst->backend == GGML_BACKEND_GPU); + GGML_ASSERT(src0->backend == GGML_BACKEND_TYPE_GPU); + GGML_ASSERT(dst->backend == GGML_BACKEND_TYPE_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) dst->extra; ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; @@ -3110,8 +3110,8 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c } } - const bool transfer_src0 = src0->backend != GGML_BACKEND_GPU && !src0_uma; - const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_GPU && !src1_uma; + const bool transfer_src0 = src0->backend != GGML_BACKEND_TYPE_GPU && !src0_uma; + const bool transfer_src1 = use_src1 && src1->backend != GGML_BACKEND_TYPE_GPU && !src1_uma; uint64_t x_sz = ggml_vk_align_size(ggml_type_size(src0->type) * ne0, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment); uint64_t y_sz = use_src1 ? ggml_vk_align_size(ggml_type_size(src1->type) * ne1, ctx->device.lock()->properties.limits.minStorageBufferOffsetAlignment) : 0; @@ -3120,7 +3120,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c vk_buffer d_D = extra->buffer_gpu.lock(); // Workaround for tiny tensor inputs on ROPE - if (use_src1 && src1->backend == GGML_BACKEND_GPU && y_sz > d_D->size) { + if (use_src1 && src1->backend == GGML_BACKEND_TYPE_GPU && y_sz > d_D->size) { y_sz = VK_WHOLE_SIZE; } @@ -3209,9 +3209,9 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset, x_sz }, { d_D, d_buf_offset, d_sz } }, sizeof(PC), &pc, elements); } - if (dst->backend == GGML_BACKEND_CPU && op == GGML_OP_CPY) { + if (dst->backend == GGML_BACKEND_TYPE_CPU && op == GGML_OP_CPY) { ggml_vk_d2h_tensor_2d(ctx, subctx, d_D, 0, dst); - } else if(dst->backend == GGML_BACKEND_CPU) { + } else if(dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host float * d = (float *) dst->data; ggml_vk_buffer_read_async(ctx, subctx, d_D, 0, d, d_sz); @@ -3253,7 +3253,7 @@ static void ggml_vk_op_f32(ggml_backend_vk_context * ctx, vk_context * subctx, c ggml_vk_sync_buffers(subctx); ggml_vk_dispatch_pipeline(ctx, subctx, *pipeline, { { d_X, x_buf_offset + x_offset, x_sz }, { d_D, d_buf_offset + d_offset, d_sz } }, sizeof(PC), &pc, elements); } - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { // copy dst to host ggml_vk_buffer_read_async(ctx, subctx, d_D, d_buf_offset + d_offset, (char *) dst->data + i02*nb2 + i03*nb3, d_sz); } @@ -3359,7 +3359,7 @@ static void ggml_vk_rope(ggml_backend_vk_context * ctx, vk_context * subctx, con static void ggml_vk_nop(ggml_backend_vk_context * ctx, vk_context * subctx, const ggml_tensor * src0, ggml_tensor * dst) { // If backend is CPU, data from src0 has to be copied off the device - if (dst->backend == GGML_BACKEND_CPU) { + if (dst->backend == GGML_BACKEND_TYPE_CPU) { ggml_tensor_extra_gpu * extra_src0 = (ggml_tensor_extra_gpu *) src0->extra; vk_buffer d_D = extra_src0->buffer_gpu.lock(); ggml_vk_sync_buffers(subctx); @@ -3994,9 +3994,9 @@ static void ggml_vk_preallocate_buffers_graph(ggml_backend_vk_context * ctx, ggm #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_vk_preallocate_buffers_graph(" << node << ")" << std::endl; #endif - const bool any_on_device = node->backend == GGML_BACKEND_GPU - || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_GPU)); + const bool any_on_device = node->backend == GGML_BACKEND_TYPE_GPU + || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_TYPE_GPU || node->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (node->src[1] != nullptr && (node->src[1]->backend == GGML_BACKEND_TYPE_GPU)); if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT)) { return; @@ -4215,9 +4215,9 @@ static void ggml_vk_preallocate_buffers(ggml_backend_vk_context * ctx) { } static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * node, bool last_node){ - const bool any_on_device = node->backend == GGML_BACKEND_GPU - || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_GPU || node->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = node->backend == GGML_BACKEND_TYPE_GPU + || (node->src[0] != nullptr && (node->src[0]->backend == GGML_BACKEND_TYPE_GPU || node->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (node->src[1] != nullptr && node->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (ctx->disable || (!any_on_device && node->op != GGML_OP_MUL_MAT) || (node->op == GGML_OP_MUL_MAT && !any_on_device && !ggml_vk_can_mul_mat(node->src[0], node->src[1], node))) { return; @@ -4371,7 +4371,7 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod last_node = true; #endif - if (node->backend == GGML_BACKEND_CPU || last_node) { + if (node->backend == GGML_BACKEND_TYPE_CPU || last_node) { ggml_vk_ctx_end(ctx->compute_ctx); ctx->compute_ctx->exit_tensor = node; ctx->compute_ctx = nullptr; @@ -4379,9 +4379,9 @@ static void ggml_vk_build_graph(ggml_backend_vk_context * ctx, ggml_tensor * nod } static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_params * params, ggml_tensor * tensor){ - const bool any_on_device = tensor->backend == GGML_BACKEND_GPU - || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_GPU || tensor->src[0]->backend == GGML_BACKEND_GPU_SPLIT)) - || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_GPU); + const bool any_on_device = tensor->backend == GGML_BACKEND_TYPE_GPU + || (tensor->src[0] != nullptr && (tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU || tensor->src[0]->backend == GGML_BACKEND_TYPE_GPU_SPLIT)) + || (tensor->src[1] != nullptr && tensor->src[1]->backend == GGML_BACKEND_TYPE_GPU); if (ctx->disable || (!any_on_device && tensor->op != GGML_OP_MUL_MAT)) { return false; @@ -4442,7 +4442,7 @@ static bool ggml_vk_compute_forward(ggml_backend_vk_context * ctx, ggml_compute_ if (params->ith != 0) { return true; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return true; } @@ -4745,7 +4745,7 @@ GGML_CALL static void ggml_backend_vk_buffer_init_tensor(ggml_backend_buffer_t b extra->offset = (uint8_t *) tensor->data - (uint8_t *) vk_ptr_base; } - tensor->backend = GGML_BACKEND_GPU; + tensor->backend = GGML_BACKEND_TYPE_GPU; tensor->extra = extra; } @@ -4753,7 +4753,7 @@ GGML_CALL static void ggml_backend_vk_buffer_set_tensor(ggml_backend_buffer_t bu #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_set_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; @@ -4768,7 +4768,7 @@ GGML_CALL static void ggml_backend_vk_buffer_get_tensor(ggml_backend_buffer_t bu #ifdef GGML_VULKAN_DEBUG std::cerr << "ggml_backend_vk_buffer_get_tensor(" << buffer << ", " << tensor << ", " << data << ", " << offset << ", " << size << ")" << std::endl; #endif - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_backend_vk_buffer_context * ctx = (ggml_backend_vk_buffer_context *)buffer->context; @@ -4999,7 +4999,7 @@ GGML_CALL static void ggml_backend_vk_set_tensor_async(ggml_backend_t backend, g #endif ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; @@ -5020,7 +5020,7 @@ GGML_CALL static void ggml_backend_vk_get_tensor_async(ggml_backend_t backend, c #endif ggml_backend_vk_context * ctx = (ggml_backend_vk_context *)backend->context; GGML_ASSERT((tensor->buffer->buft == ggml_backend_vk_buffer_type(ctx->idx) || tensor->buffer->buft == ggml_backend_vk_host_buffer_type()) && "unsupported buffer type"); - GGML_ASSERT(tensor->backend == GGML_BACKEND_GPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_GPU); ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) tensor->extra; @@ -5097,7 +5097,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml int last_node = cgraph->n_nodes - 1; // If the last op in the cgraph isn't backend GPU, the command buffer doesn't get closed properly - while (last_node > 0 && cgraph->nodes[last_node]->backend != GGML_BACKEND_GPU) { + while (last_node > 0 && cgraph->nodes[last_node]->backend != GGML_BACKEND_TYPE_GPU) { last_node -= 1; } @@ -5106,7 +5106,7 @@ GGML_CALL static bool ggml_backend_vk_graph_compute(ggml_backend_t backend, ggml } ggml_compute_params params = {}; - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; params.ith = 0; for (int i = 0; i < cgraph->n_nodes; i++) { ggml_tensor * node = cgraph->nodes[i]; @@ -5410,7 +5410,7 @@ static void ggml_vk_print_tensor_area(const ggml_tensor * tensor, const void * d static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tensor * tensor, const char * name) { void * tensor_data = tensor->data; - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { const size_t tensor_size = ggml_nbytes(tensor); tensor_data = malloc(tensor_size); @@ -5436,14 +5436,14 @@ static void ggml_vk_print_tensor(ggml_backend_vk_context * ctx, const ggml_tenso std::vector done; ggml_vk_print_graph_origin(tensor, done); - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { free(tensor_data); } } static void ggml_vk_check_tensor(const std::string& name, const ggml_tensor * tensor) { return; - GGML_ASSERT(tensor->backend == GGML_BACKEND_CPU); + GGML_ASSERT(tensor->backend == GGML_BACKEND_TYPE_CPU); if (tensor->type != GGML_TYPE_F32 && tensor->type != GGML_TYPE_F16) { return; } @@ -5481,7 +5481,7 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ if (params->ith != 0) { return; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { return; } @@ -5518,10 +5518,10 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src0_buffer = malloc(src0_size); src0_clone->data = src0_buffer; - if (src0->backend == GGML_BACKEND_CPU) { + if (src0->backend == GGML_BACKEND_TYPE_CPU) { memcpy(src0_clone->data, src0->data, src0_size); memcpy(src0_clone->nb, src0->nb, sizeof(size_t) * GGML_MAX_DIMS); - } else if (src0->backend == GGML_BACKEND_GPU) { + } else if (src0->backend == GGML_BACKEND_TYPE_GPU) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src0->extra; uint64_t offset = extra->offset; if (!ggml_is_contiguous(src0) && ggml_vk_dim01_contiguous(src0)) { @@ -5561,10 +5561,10 @@ static void ggml_vk_check_results_0(ggml_backend_vk_context * ctx, ggml_compute_ src1_buffer = malloc(src1_size); src1_clone->data = src1_buffer; - if (src1->backend == GGML_BACKEND_CPU) { + if (src1->backend == GGML_BACKEND_TYPE_CPU) { memcpy(src1_clone->data, src1->data, src1_size); memcpy(src1_clone->nb, src1->nb, sizeof(size_t) * GGML_MAX_DIMS); - } else if (src1->backend == GGML_BACKEND_GPU) { + } else if (src1->backend == GGML_BACKEND_TYPE_GPU) { ggml_tensor_extra_gpu * extra = (ggml_tensor_extra_gpu *) src1->extra; uint64_t offset = extra->offset; if (!ggml_is_contiguous(src1) && ggml_vk_dim01_contiguous(src1)) { @@ -5723,7 +5723,7 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ if (params->ith != 0) { return; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE || tensor->op == GGML_OP_TRANSPOSE) { return; } if (!(vk_output_tensor > 0 && vk_output_tensor == check_counter) && check_counter <= vk_skip_checks) { @@ -5735,7 +5735,7 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ void * tensor_data = tensor->data; - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { size_t tensor_size = ggml_nbytes(tensor); tensor_data = malloc(tensor_size); @@ -5868,7 +5868,7 @@ static void ggml_vk_check_results_1(ggml_backend_vk_context * ctx, ggml_compute_ comp_result = nullptr; comp_size = 0; - if (tensor->backend == GGML_BACKEND_GPU) { + if (tensor->backend == GGML_BACKEND_TYPE_GPU) { free(tensor_data); } } diff --git a/ggml.c b/ggml.c index c09a3cad6..1d81553f4 100644 --- a/ggml.c +++ b/ggml.c @@ -2721,7 +2721,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( } } - struct ggml_object * const obj_new = ggml_new_object(ctx, GGML_OBJECT_TENSOR, GGML_TENSOR_SIZE + obj_alloc_size); + struct ggml_object * const obj_new = ggml_new_object(ctx, GGML_OBJECT_TYPE_TENSOR, GGML_TENSOR_SIZE + obj_alloc_size); // TODO: for recoverable errors, we would need to free the data allocated from the scratch buffer here @@ -2729,7 +2729,7 @@ static struct ggml_tensor * ggml_new_tensor_impl( *result = (struct ggml_tensor) { /*.type =*/ type, - /*.backend =*/ GGML_BACKEND_CPU, + /*.backend =*/ GGML_BACKEND_TYPE_CPU, /*.buffer =*/ NULL, /*.ne =*/ { 1, 1, 1, 1 }, /*.nb =*/ { 0, 0, 0, 0 }, @@ -3302,7 +3302,7 @@ struct ggml_tensor * ggml_get_first_tensor(const struct ggml_context * ctx) { char * const mem_buffer = ctx->mem_buffer; while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { return (struct ggml_tensor *)(mem_buffer + obj->offs); } @@ -3319,7 +3319,7 @@ struct ggml_tensor * ggml_get_next_tensor(const struct ggml_context * ctx, struc char * const mem_buffer = ctx->mem_buffer; while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { return (struct ggml_tensor *)(mem_buffer + obj->offs); } @@ -3335,7 +3335,7 @@ struct ggml_tensor * ggml_get_tensor(struct ggml_context * ctx, const char * nam char * const mem_buffer = ctx->mem_buffer; while (obj != NULL) { - if (obj->type == GGML_OBJECT_TENSOR) { + if (obj->type == GGML_OBJECT_TYPE_TENSOR) { struct ggml_tensor * cur = (struct ggml_tensor *)(mem_buffer + obj->offs); if (strcmp(cur->name, name) == 0) { return cur; @@ -5879,7 +5879,7 @@ struct ggml_tensor * ggml_top_k( int k) { GGML_ASSERT(a->ne[0] >= k); - struct ggml_tensor * result = ggml_argsort(ctx, a, GGML_SORT_DESC); + struct ggml_tensor * result = ggml_argsort(ctx, a, GGML_SORT_ORDER_DESC); result = ggml_view_4d(ctx, result, k, result->ne[1], result->ne[2], result->ne[3], @@ -6673,7 +6673,7 @@ static void ggml_compute_forward_dup_same_cont( GGML_ASSERT(ggml_is_contiguous(dst) && ggml_is_contiguous(src0)); GGML_ASSERT(src0->type == dst->type); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -6705,7 +6705,7 @@ static void ggml_compute_forward_dup_f16( GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -6978,7 +6978,7 @@ static void ggml_compute_forward_dup_f32( GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7231,7 +7231,7 @@ static void ggml_compute_forward_dup_bytes( GGML_ASSERT(ggml_nelements(dst) == ggml_nelements(src0)); GGML_ASSERT(src0->type == dst->type); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7411,7 +7411,7 @@ static void ggml_compute_forward_add_f32( GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7419,7 +7419,7 @@ static void ggml_compute_forward_add_f32( const int nth = params->nth; #ifdef GGML_USE_CLBLAST - if (src1->backend == GGML_BACKEND_GPU) { + if (src1->backend == GGML_BACKEND_TYPE_GPU) { // TODO: OpenCL kernel support full broadcast GGML_ASSERT(ggml_can_repeat_rows(src1, src0)); if (ith == 0) { @@ -7501,7 +7501,7 @@ static void ggml_compute_forward_add_f16_f32( GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7580,7 +7580,7 @@ static void ggml_compute_forward_add_f16_f16( GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7636,7 +7636,7 @@ static void ggml_compute_forward_add_q_f32( GGML_ASSERT(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7774,7 +7774,7 @@ static void ggml_compute_forward_add1_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7828,7 +7828,7 @@ static void ggml_compute_forward_add1_f16_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7880,7 +7880,7 @@ static void ggml_compute_forward_add1_f16_f16( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -7932,7 +7932,7 @@ static void ggml_compute_forward_add1_q_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_is_scalar(src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8062,7 +8062,7 @@ static void ggml_compute_forward_acc_f32( size_t offset = ((int32_t *) dst->op_params)[3]; bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - if (!inplace && (params->type == GGML_TASK_INIT)) { + if (!inplace && (params->type == GGML_TASK_TYPE_INIT)) { if (params->ith != 0) { return; } @@ -8074,7 +8074,7 @@ static void ggml_compute_forward_acc_f32( ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8176,7 +8176,7 @@ static void ggml_compute_forward_sub_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8257,14 +8257,14 @@ static void ggml_compute_forward_mul_f32( GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } const int ith = params->ith; const int nth = params->nth; #if defined(GGML_USE_CLBLAST) - if (src1->backend == GGML_BACKEND_GPU) { + if (src1->backend == GGML_BACKEND_TYPE_GPU) { // TODO: OpenCL kernel support full broadcast GGML_ASSERT(ggml_can_repeat_rows(src1, src0)); if (ith == 0) { @@ -8365,7 +8365,7 @@ static void ggml_compute_forward_div_f32( GGML_ASSERT(ggml_can_repeat(src1, src0) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8460,7 +8460,7 @@ static void ggml_compute_forward_sqr_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8506,7 +8506,7 @@ static void ggml_compute_forward_sqrt_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8552,7 +8552,7 @@ static void ggml_compute_forward_log_f32( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8598,7 +8598,7 @@ static void ggml_compute_forward_sum_f32( assert(params->ith == 0); assert(ggml_is_scalar(dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8633,7 +8633,7 @@ static void ggml_compute_forward_sum_f16( assert(params->ith == 0); assert(ggml_is_scalar(dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8690,7 +8690,7 @@ static void ggml_compute_forward_sum_rows_f32( GGML_ASSERT(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8745,7 +8745,7 @@ static void ggml_compute_forward_mean_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8804,7 +8804,7 @@ static void ggml_compute_forward_argmax_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8855,7 +8855,7 @@ static void ggml_compute_forward_repeat_f32( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8900,7 +8900,7 @@ static void ggml_compute_forward_repeat_f16( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -8974,7 +8974,7 @@ static void ggml_compute_forward_repeat_back_f32( GGML_ASSERT(params->ith == 0); GGML_ASSERT(ggml_can_repeat(dst, src0)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9051,7 +9051,7 @@ static void ggml_compute_forward_concat_f32( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * src1 = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9123,7 +9123,7 @@ static void ggml_compute_forward_abs_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9169,7 +9169,7 @@ static void ggml_compute_forward_sgn_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9215,7 +9215,7 @@ static void ggml_compute_forward_neg_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9261,7 +9261,7 @@ static void ggml_compute_forward_step_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9307,7 +9307,7 @@ static void ggml_compute_forward_tanh_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9353,7 +9353,7 @@ static void ggml_compute_forward_elu_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9399,7 +9399,7 @@ static void ggml_compute_forward_relu_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9446,7 +9446,7 @@ static void ggml_compute_forward_gelu_f32( GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9509,7 +9509,7 @@ static void ggml_compute_forward_gelu_quick_f32( GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9572,7 +9572,7 @@ static void ggml_compute_forward_silu_f32( GGML_ASSERT(ggml_is_contiguous_except_dim_1(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9633,7 +9633,7 @@ static void ggml_compute_forward_leaky_relu_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9686,7 +9686,7 @@ static void ggml_compute_forward_silu_back_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_are_same_shape(src0, grad)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9748,7 +9748,7 @@ static void ggml_compute_forward_hardswish_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9791,7 +9791,7 @@ static void ggml_compute_forward_hardsigmoid_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9837,7 +9837,7 @@ static void ggml_compute_forward_norm_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9912,7 +9912,7 @@ static void ggml_compute_forward_rms_norm_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -9983,7 +9983,7 @@ static void ggml_compute_forward_rms_norm_back_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst) && ggml_are_same_shape(src0, src1)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10161,7 +10161,7 @@ static void ggml_compute_forward_group_norm_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10328,7 +10328,7 @@ static void ggml_compute_forward_mul_mat( #if defined(GGML_USE_CLBLAST) if (ggml_cl_can_mul_mat(src0, src1, dst)) { - if (params->ith == 0 && params->type == GGML_TASK_COMPUTE) { + if (params->ith == 0 && params->type == GGML_TASK_TYPE_COMPUTE) { ggml_cl_mul_mat(src0, src1, dst, params->wdata, params->wsize); } return; @@ -10341,7 +10341,7 @@ static void ggml_compute_forward_mul_mat( const size_t desired_wsize = ne13*ne12*ne_plane*sizeof(float); UNUSED(desired_wsize); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (type != GGML_TYPE_F32) { assert(params->wsize >= desired_wsize); // parallelize by src0 rows @@ -10364,7 +10364,7 @@ static void ggml_compute_forward_mul_mat( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10402,7 +10402,7 @@ static void ggml_compute_forward_mul_mat( } #endif - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -10426,7 +10426,7 @@ static void ggml_compute_forward_mul_mat( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10583,7 +10583,7 @@ static void ggml_compute_forward_mul_mat_id( #define MMID_MATRIX_ROW(row_id, i1) matrix_rows[(row_id)*ne11 + (i1)] - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -10620,7 +10620,7 @@ static void ggml_compute_forward_mul_mat_id( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10768,7 +10768,7 @@ static void ggml_compute_forward_out_prod_f32( (ggml_is_contiguous(src1) || ggml_is_transposed(src1)); #endif - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) // gemm beta will zero dst if (use_blas) { return; @@ -10781,7 +10781,7 @@ static void ggml_compute_forward_out_prod_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -10961,7 +10961,7 @@ static void ggml_compute_forward_out_prod_q_f32( // TODO: #if defined(GGML_USE_CUBLAS) ggml_cuda_out_prod // TODO: #if defined(GGML_USE_ACCELERATE) || defined(GGML_USE_OPENBLAS) || defined(GGML_USE_CLBLAST) - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -10969,7 +10969,7 @@ static void ggml_compute_forward_out_prod_q_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11087,7 +11087,7 @@ static void ggml_compute_forward_scale_f32( GGML_ASSERT(ggml_is_contiguous(dst)); GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11159,7 +11159,7 @@ static void ggml_compute_forward_set_f32( size_t offset = ((int32_t *) dst->op_params)[3]; bool inplace = (bool) ((int32_t *) dst->op_params)[4]; - if (!inplace && (params->type == GGML_TASK_INIT)) { + if (!inplace && (params->type == GGML_TASK_TYPE_INIT)) { if (params->ith != 0) { return; } @@ -11171,7 +11171,7 @@ static void ggml_compute_forward_set_f32( ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11319,7 +11319,7 @@ static void ggml_compute_forward_get_rows_q( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11359,7 +11359,7 @@ static void ggml_compute_forward_get_rows_f16( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11396,7 +11396,7 @@ static void ggml_compute_forward_get_rows_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11499,14 +11499,14 @@ static void ggml_compute_forward_get_rows_back_f32_f16( // ggml_compute_forward_dup_same_cont(params, opt0, dst); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (params->ith != 0) { return; } memset(dst->data, 0, ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11538,14 +11538,14 @@ static void ggml_compute_forward_get_rows_back_f32( // ggml_compute_forward_dup_same_cont(params, opt0, dst); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (params->ith != 0) { return; } memset(dst->data, 0, ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11615,7 +11615,7 @@ static void ggml_compute_forward_diag_f32( GGML_ASSERT(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11684,7 +11684,7 @@ static void ggml_compute_forward_diag_mask_f32( GGML_ASSERT(n_past >= 0); - if (!inplace && (params->type == GGML_TASK_INIT)) { + if (!inplace && (params->type == GGML_TASK_TYPE_INIT)) { if (ith != 0) { return; } @@ -11698,7 +11698,7 @@ static void ggml_compute_forward_diag_mask_f32( ggml_nbytes(dst)); } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11772,7 +11772,7 @@ static void ggml_compute_forward_soft_max_f32( assert(ggml_is_contiguous(dst)); assert(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -11910,7 +11910,7 @@ static void ggml_compute_forward_soft_max_back_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); GGML_ASSERT(ggml_are_same_shape(src1, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12004,7 +12004,7 @@ static void ggml_compute_forward_alibi_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12063,7 +12063,7 @@ static void ggml_compute_forward_alibi_f16( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12170,7 +12170,7 @@ static void ggml_compute_forward_clamp_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12310,7 +12310,7 @@ static void ggml_compute_forward_rope_f32( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * src1 = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12488,7 +12488,7 @@ static void ggml_compute_forward_rope_f16( const struct ggml_tensor * src0 = dst->src[0]; const struct ggml_tensor * src1 = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12719,7 +12719,7 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -12759,7 +12759,7 @@ static void ggml_compute_forward_conv_transpose_1d_f16_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12818,7 +12818,7 @@ static void ggml_compute_forward_conv_transpose_1d_f32( GGML_ASSERT(nb00 == sizeof(float)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -12858,7 +12858,7 @@ static void ggml_compute_forward_conv_transpose_1d_f32( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -12962,11 +12962,11 @@ static void ggml_compute_forward_im2col_f32( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13050,11 +13050,11 @@ static void ggml_compute_forward_im2col_f16( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13136,7 +13136,7 @@ static void ggml_compute_forward_conv_transpose_2d( GGML_ASSERT(nb00 == sizeof(ggml_fp16_t)); GGML_ASSERT(nb10 == sizeof(float)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith != 0) { return; } @@ -13178,7 +13178,7 @@ static void ggml_compute_forward_conv_transpose_2d( return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13230,7 +13230,7 @@ static void ggml_compute_forward_pool_1d_sk_p0( assert(src->type == GGML_TYPE_F32); assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13299,7 +13299,7 @@ static void ggml_compute_forward_pool_2d( GGML_ASSERT(src->type == GGML_TYPE_F32); GGML_ASSERT(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13372,7 +13372,7 @@ static void ggml_compute_forward_upscale_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13432,7 +13432,7 @@ static void ggml_compute_forward_pad_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13493,7 +13493,7 @@ static void ggml_compute_forward_argsort_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13519,8 +13519,8 @@ static void ggml_compute_forward_argsort_f32( // C doesn't have a functional sort, so we do a bubble sort instead for (int64_t j = 0; j < ne0; j++) { for (int64_t k = j + 1; k < ne0; k++) { - if ((order == GGML_SORT_ASC && src_data[dst_data[j]] > src_data[dst_data[k]]) || - (order == GGML_SORT_DESC && src_data[dst_data[j]] < src_data[dst_data[k]])) { + if ((order == GGML_SORT_ORDER_ASC && src_data[dst_data[j]] > src_data[dst_data[k]]) || + (order == GGML_SORT_ORDER_DESC && src_data[dst_data[j]] < src_data[dst_data[k]])) { int32_t tmp = dst_data[j]; dst_data[j] = dst_data[k]; dst_data[k] = tmp; @@ -13603,11 +13603,11 @@ static void ggml_compute_forward_flash_attn_f32( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -13795,11 +13795,11 @@ static void ggml_compute_forward_flash_attn_f16( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14054,11 +14054,11 @@ static void ggml_compute_forward_flash_ff_f16( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14213,14 +14213,14 @@ static void ggml_compute_forward_flash_attn_back_f32( GGML_ASSERT(nb1 <= nb2); GGML_ASSERT(nb2 <= nb3); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith == 0) { memset(dst->data, 0, nb0*ne0*ne1*ne2*ne3); } return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14536,7 +14536,7 @@ static void ggml_compute_forward_win_part_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14602,7 +14602,7 @@ static void ggml_compute_forward_win_unpart_f32( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14730,7 +14730,7 @@ static void ggml_compute_forward_get_rel_pos_f16( const struct ggml_tensor * src0 = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14782,14 +14782,14 @@ static void ggml_compute_forward_add_rel_pos_f32( const struct ggml_tensor * src2 = dst->src[2]; const bool inplace = (bool) ((int32_t *) dst->op_params)[0]; - if (!inplace && params->type == GGML_TASK_INIT) { + if (!inplace && params->type == GGML_TASK_TYPE_INIT) { if (params->ith != 0) { return; } memcpy((char *) dst->data, (char *) src0->data, ggml_nbytes(dst)); return; } - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14871,7 +14871,7 @@ static void ggml_compute_forward_map_unary_f32( GGML_ASSERT(ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14920,7 +14920,7 @@ static void ggml_compute_forward_map_binary_f32( assert(params->ith == 0); assert(ggml_are_same_shape(src0, src1) && ggml_are_same_shape(src0, dst)); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14969,7 +14969,7 @@ static void ggml_compute_forward_map_custom1_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -14988,7 +14988,7 @@ static void ggml_compute_forward_map_custom2_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15008,7 +15008,7 @@ static void ggml_compute_forward_map_custom3_f32( assert(params->ith == 0); - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15023,7 +15023,7 @@ static void ggml_compute_forward_map_custom1( const struct ggml_tensor * a = dst->src[0]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15041,7 +15041,7 @@ static void ggml_compute_forward_map_custom2( const struct ggml_tensor * a = dst->src[0]; const struct ggml_tensor * b = dst->src[1]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15060,7 +15060,7 @@ static void ggml_compute_forward_map_custom3( const struct ggml_tensor * b = dst->src[1]; const struct ggml_tensor * c = dst->src[2]; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15094,14 +15094,14 @@ static void ggml_compute_forward_cross_entropy_loss_f32( GGML_ASSERT(params->wsize >= sizeof(float) * (nth + nth * nc)); - if (params->type == GGML_TASK_INIT) { + if (params->type == GGML_TASK_TYPE_INIT) { if (ith == 0) { memset(sums, 0, sizeof(float) * (nth + nth * nc)); } return; } - if (params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_FINALIZE) { if (ith == 0) { float * dp = (float *) dst->data; ggml_vec_sum_f32(nth, dp, sums); @@ -15216,7 +15216,7 @@ static void ggml_compute_forward_cross_entropy_loss_back_f32( const int64_t ith = params->ith; const int64_t nth = params->nth; - if (params->type == GGML_TASK_INIT || params->type == GGML_TASK_FINALIZE) { + if (params->type == GGML_TASK_TYPE_INIT || params->type == GGML_TASK_TYPE_FINALIZE) { return; } @@ -15323,8 +15323,8 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm if (skip_cpu) { return; } - GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_CPU); - GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_CPU); + GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_TYPE_CPU); + GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_TYPE_CPU); #elif defined(GGML_USE_VULKAN) const bool skip_cpu = ggml_vk_compute_forward_cpu_assist(params, tensor); #ifdef GGML_VULKAN_CHECK_RESULTS @@ -15335,8 +15335,8 @@ static void ggml_compute_forward(struct ggml_compute_params * params, struct ggm if (skip_cpu) { return; } - GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_CPU); - GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_CPU); + GGML_ASSERT(tensor->src[0] == NULL || tensor->src[0]->backend == GGML_BACKEND_TYPE_CPU); + GGML_ASSERT(tensor->src[1] == NULL || tensor->src[1]->backend == GGML_BACKEND_TYPE_CPU); #endif // GGML_USE_CUBLAS #ifdef GGML_USE_SYCL @@ -16882,7 +16882,7 @@ size_t ggml_graph_overhead(void) { struct ggml_cgraph * ggml_new_graph_custom(struct ggml_context * ctx, size_t size, bool grads) { const size_t obj_size = ggml_graph_nbytes(size, grads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_GRAPH, obj_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_GRAPH, obj_size); struct ggml_cgraph * cgraph = (struct ggml_cgraph *) ((char *) ctx->mem_buffer + obj->offs); struct ggml_tensor ** data_start = (struct ggml_tensor **) (cgraph + 1); @@ -17429,7 +17429,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { set_numa_thread_affinity(state->ith); int node_n = -1; - int task_phase = GGML_TASK_FINALIZE; + int task_phase = GGML_TASK_TYPE_FINALIZE; while (true) { if (cplan->abort_callback && cplan->abort_callback(cplan->abort_callback_data)) { @@ -17441,7 +17441,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { // all other threads are finished and spinning // do finalize and init here so we don't have synchronize again struct ggml_compute_params params = { - /*.type =*/ GGML_TASK_FINALIZE, + /*.type =*/ GGML_TASK_TYPE_FINALIZE, /*.ith =*/ 0, /*.nth =*/ 0, /*.wsize =*/ cplan->work_size, @@ -17472,17 +17472,17 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { if (n_tasks == 1) { /* INIT */ if (GGML_OP_HAS_INIT[node->op]) { - params.type = GGML_TASK_INIT; + params.type = GGML_TASK_TYPE_INIT; ggml_compute_forward(¶ms, node); } // TODO: maybe push node_n to the atomic but if other threads see n_tasks is 1, // they do something more efficient than spinning (?) - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; ggml_compute_forward(¶ms, node); if (GGML_OP_HAS_FINALIZE[node->op]) { - params.type = GGML_TASK_FINALIZE; + params.type = GGML_TASK_TYPE_FINALIZE; ggml_compute_forward(¶ms, node); } @@ -17496,7 +17496,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } } - task_phase = GGML_TASK_INIT; + task_phase = GGML_TASK_TYPE_INIT; atomic_store(&state->shared->n_active, n_threads); atomic_store(&state->shared->node_n, node_n); atomic_store(&state->shared->node_task, task_phase); @@ -17513,7 +17513,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { const int n_tasks = ggml_get_n_tasks(node, n_threads); struct ggml_compute_params params = { - /*.type =*/ GGML_TASK_INIT, + /*.type =*/ GGML_TASK_TYPE_INIT, /*.ith =*/ state->ith, /*.nth =*/ n_tasks, /*.wsize =*/ cplan->work_size, @@ -17527,7 +17527,7 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { - task_phase = GGML_TASK_COMPUTE; + task_phase = GGML_TASK_TYPE_COMPUTE; atomic_store(&state->shared->n_active, n_threads); atomic_store(&state->shared->node_task, task_phase); } @@ -17542,12 +17542,12 @@ static thread_ret_t ggml_graph_compute_thread(void * data) { } if (state->ith < n_tasks) { - params.type = GGML_TASK_COMPUTE; + params.type = GGML_TASK_TYPE_COMPUTE; ggml_compute_forward(¶ms, node); } if (atomic_fetch_sub(&state->shared->n_active, 1) == 1) { - task_phase = GGML_TASK_FINALIZE; + task_phase = GGML_TASK_TYPE_FINALIZE; atomic_store(&state->shared->n_active, n_threads); atomic_store(&state->shared->node_task, task_phase); } @@ -17783,7 +17783,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { /*.n_threads =*/ n_threads, /*.n_active =*/ n_threads, /*.node_n =*/ -1, - /*.node_task =*/ GGML_TASK_FINALIZE, + /*.node_task =*/ GGML_TASK_TYPE_FINALIZE, /*.abort_callback =*/ NULL, /*.abort_callback_data =*/ NULL, }; @@ -17851,7 +17851,7 @@ int ggml_graph_compute(struct ggml_cgraph * cgraph, struct ggml_cplan * cplan) { void ggml_graph_compute_with_ctx(struct ggml_context * ctx, struct ggml_cgraph * cgraph, int n_threads) { struct ggml_cplan cplan = ggml_graph_plan(cgraph, n_threads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_WORK_BUFFER, cplan.work_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; @@ -18659,7 +18659,7 @@ static enum ggml_opt_result ggml_opt_adam( float * pf = params.past > 0 ? opt->adam.pf->data : NULL; // past function values struct ggml_cplan cplan = ggml_graph_plan(gb, params.n_threads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_WORK_BUFFER, cplan.work_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; bool cancel = false; @@ -18671,7 +18671,7 @@ static enum ggml_opt_result ggml_opt_adam( if (callback) { callback(callback_data, accum_step, &sched, &cancel); if (cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } } // ggml_graph_reset (gf); @@ -18762,7 +18762,7 @@ static enum ggml_opt_result ggml_opt_adam( if (callback) { callback(callback_data, accum_step, &sched, &cancel); if (cancel) { - return GGML_OPT_CANCEL;; + return GGML_OPT_RESULT_CANCEL;; } } // ggml_graph_reset (gf); @@ -18779,7 +18779,7 @@ static enum ggml_opt_result ggml_opt_adam( if (fabsf(fx - fx_prev[0])/fx < params.adam.eps_f) { GGML_PRINT_DEBUG("converged\n"); - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } // delta-based convergence test @@ -18789,7 +18789,7 @@ static enum ggml_opt_result ggml_opt_adam( const float rate = (pf[(iter0 + t)%params.past] - fx)/fx; if (fabsf(rate) < params.delta) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } @@ -18805,7 +18805,7 @@ static enum ggml_opt_result ggml_opt_adam( ++n_no_improvement[0]; if (n_no_improvement[0] >= params.max_no_improvement) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } } @@ -18823,7 +18823,7 @@ static enum ggml_opt_result ggml_opt_adam( } } - return GGML_OPT_DID_NOT_CONVERGE; + return GGML_OPT_RESULT_DID_NOT_CONVERGE; } // @@ -18904,7 +18904,7 @@ static enum ggml_opt_result linesearch_backtracking( float sched = 0; callback(callback_data, accum_step, &sched, cancel); if (*cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } } // ggml_graph_reset (gf); @@ -18977,7 +18977,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( if (params.lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_WOLFE || params.lbfgs.linesearch == GGML_LINESEARCH_BACKTRACKING_STRONG_WOLFE) { if (params.lbfgs.wolfe <= params.lbfgs.ftol || 1.f <= params.lbfgs.wolfe) { - return GGML_OPT_INVALID_WOLFE; + return GGML_OPT_RESULT_INVALID_WOLFE; } } @@ -19006,7 +19006,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( } struct ggml_cplan cplan = ggml_graph_plan(gb, params.n_threads); - struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_WORK_BUFFER, cplan.work_size); + struct ggml_object * obj = ggml_new_object(ctx, GGML_OBJECT_TYPE_WORK_BUFFER, cplan.work_size); cplan.work_data = (uint8_t *)ctx->mem_buffer + obj->offs; float * x = opt->lbfgs.x->data; // current parameters @@ -19047,7 +19047,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( float sched = 0; callback(callback_data, accum_step, &sched, &cancel); if (cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } } // ggml_graph_reset (gf); @@ -19075,7 +19075,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( // already optimized if (gnorm/xnorm <= params.lbfgs.eps) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } if (opt->just_initialized) { @@ -19120,7 +19120,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( // way to test and don't want to break something with so many changes lined up ls = linesearch_backtracking(¶ms, nx, x, &fx, g, d, step, xp, f, gb, &cplan, np, ps, &cancel, callback, callback_data); if (cancel) { - return GGML_OPT_CANCEL; + return GGML_OPT_RESULT_CANCEL; } if (ls < 0) { @@ -19143,7 +19143,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( } if (gnorm/xnorm <= params.lbfgs.eps) { // converged - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } // delta-based convergence test @@ -19153,7 +19153,7 @@ static enum ggml_opt_result ggml_opt_lbfgs( const float rate = (pf[k[0]%params.past] - fx)/fx; if (fabsf(rate) < params.delta) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } @@ -19169,14 +19169,14 @@ static enum ggml_opt_result ggml_opt_lbfgs( n_no_improvement[0]++; if (n_no_improvement[0] >= params.max_no_improvement) { - return GGML_OPT_OK; + return GGML_OPT_RESULT_OK; } } } if (params.lbfgs.n_iter != 0 && params.lbfgs.n_iter < it + 1) { // reached the maximum number of iterations - return GGML_OPT_DID_NOT_CONVERGE; + return GGML_OPT_RESULT_DID_NOT_CONVERGE; } // update vectors s and y: @@ -19232,17 +19232,17 @@ static enum ggml_opt_result ggml_opt_lbfgs( GGML_ASSERT(false && "lbfgs failed"); - return GGML_OPT_DID_NOT_CONVERGE; + return GGML_OPT_RESULT_DID_NOT_CONVERGE; } struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { struct ggml_opt_params result; switch (type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { result = (struct ggml_opt_params) { - .type = GGML_OPT_ADAM, + .type = GGML_OPT_TYPE_ADAM, .graph_size = GGML_DEFAULT_GRAPH_SIZE, .n_threads = 1, // FIXME: GGML_DEFAULT_N_THREADS ? .past = 0, @@ -19270,10 +19270,10 @@ struct ggml_opt_params ggml_opt_default_params(enum ggml_opt_type type) { }, }; } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { result = (struct ggml_opt_params) { - .type = GGML_OPT_LBFGS, + .type = GGML_OPT_TYPE_LBFGS, .graph_size = GGML_DEFAULT_GRAPH_SIZE, .n_threads = 1, .past = 0, @@ -19318,12 +19318,12 @@ GGML_API void ggml_opt_init( opt->just_initialized = true; if (opt->ctx == NULL) { struct ggml_init_params ctx_opt_params; - if (opt->params.type == GGML_OPT_ADAM) { + if (opt->params.type == GGML_OPT_TYPE_ADAM) { ctx_opt_params.mem_size = GGML_MEM_ALIGN*3 + ggml_tensor_overhead()*3 + ggml_type_size(GGML_TYPE_F32)*nx*3; if (opt->params.past > 0) { ctx_opt_params.mem_size += GGML_MEM_ALIGN + ggml_tensor_overhead() + ggml_type_size(GGML_TYPE_F32)*opt->params.past; } - } else if (opt->params.type == GGML_OPT_LBFGS) { + } else if (opt->params.type == GGML_OPT_TYPE_LBFGS) { ctx_opt_params.mem_size = GGML_MEM_ALIGN*9 + ggml_tensor_overhead()*9 + ggml_type_size(GGML_TYPE_F32)*(nx*5 + opt->params.lbfgs.m*2 + nx*opt->params.lbfgs.m*2); if (opt->params.past > 0) { ctx_opt_params.mem_size += GGML_MEM_ALIGN + ggml_tensor_overhead() + ggml_type_size(GGML_TYPE_F32)*opt->params.past; @@ -19335,7 +19335,7 @@ GGML_API void ggml_opt_init( opt->ctx = ggml_init(ctx_opt_params); } switch (opt->params.type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { opt->adam.g = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); opt->adam.m = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); @@ -19349,7 +19349,7 @@ GGML_API void ggml_opt_init( ggml_set_zero(opt->adam.pf); } } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { opt->lbfgs.x = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); opt->lbfgs.xp = ggml_new_tensor_1d(opt->ctx, GGML_TYPE_F32, nx); @@ -19393,13 +19393,13 @@ enum ggml_opt_result ggml_opt( ctx = ggml_init(params_ctx); if (ctx == NULL) { - return GGML_OPT_NO_CONTEXT; + return GGML_OPT_RESULT_NO_CONTEXT; } free_ctx = true; } - enum ggml_opt_result result = GGML_OPT_OK; + enum ggml_opt_result result = GGML_OPT_RESULT_OK; struct ggml_opt_context * opt = (struct ggml_opt_context *) alloca(sizeof(struct ggml_opt_context)); @@ -19438,14 +19438,14 @@ enum ggml_opt_result ggml_opt_resume_g( void * callback_data) { // build forward + backward compute graphs - enum ggml_opt_result result = GGML_OPT_OK; + enum ggml_opt_result result = GGML_OPT_RESULT_OK; switch (opt->params.type) { - case GGML_OPT_ADAM: + case GGML_OPT_TYPE_ADAM: { result = ggml_opt_adam(ctx, opt, opt->params, f, gf, gb, callback, callback_data); } break; - case GGML_OPT_LBFGS: + case GGML_OPT_TYPE_LBFGS: { result = ggml_opt_lbfgs(ctx, opt, opt->params, f, gf, gb, callback, callback_data); } break; diff --git a/ggml.h b/ggml.h index a4166e1f7..75fd035a4 100644 --- a/ggml.h +++ b/ggml.h @@ -364,9 +364,9 @@ extern "C" { }; enum ggml_backend_type { - GGML_BACKEND_CPU = 0, - GGML_BACKEND_GPU = 10, - GGML_BACKEND_GPU_SPLIT = 20, + GGML_BACKEND_TYPE_CPU = 0, + GGML_BACKEND_TYPE_GPU = 10, + GGML_BACKEND_TYPE_GPU_SPLIT = 20, }; // model file types @@ -498,9 +498,9 @@ extern "C" { }; enum ggml_object_type { - GGML_OBJECT_TENSOR, - GGML_OBJECT_GRAPH, - GGML_OBJECT_WORK_BUFFER + GGML_OBJECT_TYPE_TENSOR, + GGML_OBJECT_TYPE_GRAPH, + GGML_OBJECT_TYPE_WORK_BUFFER }; enum ggml_log_level { @@ -642,9 +642,9 @@ extern "C" { // NOTE: the INIT or FINALIZE pass is not scheduled unless explicitly enabled. // This behavior was changed since https://github.com/ggerganov/llama.cpp/pull/1995. enum ggml_task_type { - GGML_TASK_INIT = 0, - GGML_TASK_COMPUTE, - GGML_TASK_FINALIZE, + GGML_TASK_TYPE_INIT = 0, + GGML_TASK_TYPE_COMPUTE, + GGML_TASK_TYPE_FINALIZE, }; struct ggml_compute_params { @@ -1649,8 +1649,8 @@ extern "C" { // sort rows enum ggml_sort_order { - GGML_SORT_ASC, - GGML_SORT_DESC, + GGML_SORT_ORDER_ASC, + GGML_SORT_ORDER_DESC, }; GGML_API struct ggml_tensor * ggml_argsort( @@ -1943,8 +1943,8 @@ extern "C" { // optimization methods enum ggml_opt_type { - GGML_OPT_ADAM, - GGML_OPT_LBFGS, + GGML_OPT_TYPE_ADAM, + GGML_OPT_TYPE_LBFGS, }; // linesearch methods @@ -1958,12 +1958,12 @@ extern "C" { // optimization return values enum ggml_opt_result { - GGML_OPT_OK = 0, - GGML_OPT_DID_NOT_CONVERGE, - GGML_OPT_NO_CONTEXT, - GGML_OPT_INVALID_WOLFE, - GGML_OPT_FAIL, - GGML_OPT_CANCEL, + GGML_OPT_RESULT_OK = 0, + GGML_OPT_RESULT_DID_NOT_CONVERGE, + GGML_OPT_RESULT_NO_CONTEXT, + GGML_OPT_RESULT_INVALID_WOLFE, + GGML_OPT_RESULT_FAIL, + GGML_OPT_RESULT_CANCEL, GGML_LINESEARCH_FAIL = -128, GGML_LINESEARCH_MINIMUM_STEP, diff --git a/llama.cpp b/llama.cpp index 1f6b6cff4..acd9be08a 100644 --- a/llama.cpp +++ b/llama.cpp @@ -850,9 +850,9 @@ struct LLM_TN { // static std::map LLAMA_ROPE_SCALING_TYPES = { - { LLAMA_ROPE_SCALING_NONE, "none" }, - { LLAMA_ROPE_SCALING_LINEAR, "linear" }, - { LLAMA_ROPE_SCALING_YARN, "yarn" }, + { LLAMA_ROPE_SCALING_TYPE_NONE, "none" }, + { LLAMA_ROPE_SCALING_TYPE_LINEAR, "linear" }, + { LLAMA_ROPE_SCALING_TYPE_YARN, "yarn" }, }; static int32_t llama_rope_scaling_type_from_string(const std::string & name) { @@ -862,7 +862,7 @@ static int32_t llama_rope_scaling_type_from_string(const std::string & name) { } } - return LLAMA_ROPE_SCALING_UNSPECIFIED; + return LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED; } static std::string gguf_data_to_str(enum gguf_type type, const void * data, int i) { @@ -1580,7 +1580,7 @@ struct llama_hparams { bool causal_attn = true; bool need_kq_pos = false; - uint32_t pooling_type = LLAMA_POOLING_NONE; + uint32_t pooling_type = LLAMA_POOLING_TYPE_NONE; bool operator!=(const llama_hparams & other) const { if (this->vocab_only != other.vocab_only) return true; @@ -2345,9 +2345,9 @@ namespace GGUFMeta { static const char * override_type_to_str(const llama_model_kv_override_type ty) { switch (ty) { - case LLAMA_KV_OVERRIDE_BOOL: return "bool"; - case LLAMA_KV_OVERRIDE_INT: return "int"; - case LLAMA_KV_OVERRIDE_FLOAT: return "float"; + case LLAMA_KV_OVERRIDE_TYPE_BOOL: return "bool"; + case LLAMA_KV_OVERRIDE_TYPE_INT: return "int"; + case LLAMA_KV_OVERRIDE_TYPE_FLOAT: return "float"; } return "unknown"; } @@ -2358,13 +2358,13 @@ namespace GGUFMeta { LLAMA_LOG_INFO("%s: Using metadata override (%5s) '%s' = ", __func__, override_type_to_str(override->tag), override->key); switch (override->tag) { - case LLAMA_KV_OVERRIDE_BOOL: { + case LLAMA_KV_OVERRIDE_TYPE_BOOL: { LLAMA_LOG_INFO("%s\n", override->bool_value ? "true" : "false"); } break; - case LLAMA_KV_OVERRIDE_INT: { + case LLAMA_KV_OVERRIDE_TYPE_INT: { LLAMA_LOG_INFO("%" PRId64 "\n", override->int_value); } break; - case LLAMA_KV_OVERRIDE_FLOAT: { + case LLAMA_KV_OVERRIDE_TYPE_FLOAT: { LLAMA_LOG_INFO("%.6f\n", override->float_value); } break; default: @@ -2383,7 +2383,7 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type try_override(OT & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_BOOL, override)) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_BOOL, override)) { target = override->bool_value; return true; } @@ -2393,7 +2393,7 @@ namespace GGUFMeta { template static typename std::enable_if::value && std::is_integral::value, bool>::type try_override(OT & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_INT, override)) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_INT, override)) { target = override->int_value; return true; } @@ -2403,7 +2403,7 @@ namespace GGUFMeta { template static typename std::enable_if::value, bool>::type try_override(T & target, const struct llama_model_kv_override *override) { - if (validate_override(LLAMA_KV_OVERRIDE_FLOAT, override)) { + if (validate_override(LLAMA_KV_OVERRIDE_TYPE_FLOAT, override)) { target = override->float_value; return true; } @@ -2999,7 +2999,7 @@ static void llm_load_hparams( std::string rope_scaling("linear"); ml.get_key(LLM_KV_ROPE_SCALING_TYPE, rope_scaling, false); hparams.rope_scaling_type_train = llama_rope_scaling_type_from_string(rope_scaling); - GGML_ASSERT(hparams.rope_scaling_type_train != LLAMA_ROPE_SCALING_UNSPECIFIED); + GGML_ASSERT(hparams.rope_scaling_type_train != LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED); // rope_freq_scale (inverse of the kv) is optional float ropescale = 0.0f; @@ -3643,7 +3643,7 @@ static bool llm_load_tensors( model.buft_layer[i] = llama_default_buffer_type_cpu(true); } - if (split_mode == LLAMA_SPLIT_LAYER) { + if (split_mode == LLAMA_SPLIT_MODE_LAYER) { // calculate the split points int device_count = llama_get_device_count(); bool all_zero = tensor_split == nullptr || std::all_of(tensor_split, tensor_split + device_count, [](float x) { return x == 0.0f; }); @@ -3682,10 +3682,10 @@ static bool llm_load_tensors( } } else { ggml_backend_buffer_type_t split_buft; - if (split_mode == LLAMA_SPLIT_ROW) { + if (split_mode == LLAMA_SPLIT_MODE_ROW) { split_buft = llama_default_buffer_type_split(main_gpu, tensor_split); } else { - // LLAMA_SPLIT_NONE or LLAMA_SPLIT_LAYER in backends where it is not supported + // LLAMA_SPLIT_MODE_NONE or LLAMA_SPLIT_MODE_LAYER in backends where it is not supported split_buft = llama_default_buffer_type_offload(main_gpu); } // assign the repeating layers @@ -5070,7 +5070,7 @@ struct llm_build_context { kv_head (worst_case ? n_ctx - n_tokens : kv_self.head), n_orig_ctx (cparams.n_yarn_orig_ctx), do_rope_shift (worst_case || kv_self.has_shift), - pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_NONE), + pooling_type (cparams.do_pooling ? hparams.pooling_type : (uint32_t)LLAMA_POOLING_TYPE_NONE), cb (cb), buf_compute_meta (lctx.buf_compute_meta) { // all initializations should be done in init() @@ -6050,12 +6050,12 @@ struct llm_build_context { cur = inpL; // pooling layer - if (pooling_type == LLAMA_POOLING_MEAN) { + if (pooling_type == LLAMA_POOLING_TYPE_MEAN) { cur = ggml_mul_mat(ctx0, ggml_cont(ctx0, ggml_transpose(ctx0, cur)), inp_mean); - } else if (pooling_type == LLAMA_POOLING_CLS) { + } else if (pooling_type == LLAMA_POOLING_TYPE_CLS) { cur = ggml_get_rows(ctx0, cur, inp_cls); } else { - GGML_ASSERT(pooling_type == LLAMA_POOLING_NONE && "Invalid pooling type"); + GGML_ASSERT(pooling_type == LLAMA_POOLING_TYPE_NONE && "Invalid pooling type"); } cb(cur, "result_embd", -1); @@ -7754,7 +7754,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_MEAN) { + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_MEAN) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_mean->buffer)); @@ -7782,7 +7782,7 @@ static void llama_set_inputs(llama_context & lctx, const llama_batch & batch) { } } - if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_CLS) { + if (cparams.do_pooling && hparams.pooling_type == LLAMA_POOLING_TYPE_CLS) { const int64_t n_tokens = batch.n_tokens; GGML_ASSERT(ggml_backend_buffer_is_host(lctx.inp_cls->buffer)); @@ -11351,7 +11351,7 @@ static int llama_apply_lora_from_file_internal( struct llama_model_params llama_model_default_params() { struct llama_model_params result = { /*.n_gpu_layers =*/ 0, - /*.split_mode =*/ LLAMA_SPLIT_LAYER, + /*.split_mode =*/ LLAMA_SPLIT_MODE_LAYER, /*.main_gpu =*/ 0, /*.tensor_split =*/ nullptr, /*.progress_callback =*/ nullptr, @@ -11377,7 +11377,7 @@ struct llama_context_params llama_context_default_params() { /*.n_batch =*/ 512, /*.n_threads =*/ GGML_DEFAULT_N_THREADS, // TODO: better default /*.n_threads_batch =*/ GGML_DEFAULT_N_THREADS, - /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_UNSPECIFIED, + /*.rope_scaling_type =*/ LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED, /*.rope_freq_base =*/ 0.0f, /*.rope_freq_scale =*/ 0.0f, /*.yarn_ext_factor =*/ -1.0f, @@ -11565,16 +11565,16 @@ struct llama_context * llama_new_context_with_model( cparams.cb_eval_user_data = params.cb_eval_user_data; auto rope_scaling_type = params.rope_scaling_type; - if (rope_scaling_type == LLAMA_ROPE_SCALING_UNSPECIFIED) { + if (rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED) { rope_scaling_type = hparams.rope_scaling_type_train; } - if (rope_scaling_type == LLAMA_ROPE_SCALING_NONE) { + if (rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_NONE) { cparams.rope_freq_scale = 1.0f; // never scale if scaling type is none } if (cparams.yarn_ext_factor < 0.0f) { // negative indicates 'not set' - cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_YARN ? 1.0f : 0.0f; + cparams.yarn_ext_factor = rope_scaling_type == LLAMA_ROPE_SCALING_TYPE_YARN ? 1.0f : 0.0f; } if (params.seed == LLAMA_DEFAULT_SEED) { @@ -11608,8 +11608,8 @@ struct llama_context * llama_new_context_with_model( } #elif defined(GGML_USE_CUBLAS) if (model->n_gpu_layers > 0) { - // with split_mode LLAMA_SPLIT_NONE or LLAMA_SPLIT_ROW, only the main GPU backend is used - if (model->split_mode == LLAMA_SPLIT_NONE || model->split_mode == LLAMA_SPLIT_ROW) { + // with split_mode LLAMA_SPLIT_MODE_NONE or LLAMA_SPLIT_MODE_ROW, only the main GPU backend is used + if (model->split_mode == LLAMA_SPLIT_MODE_NONE || model->split_mode == LLAMA_SPLIT_MODE_ROW) { ggml_backend_t backend = ggml_backend_cuda_init(model->main_gpu); if (backend == nullptr) { LLAMA_LOG_ERROR("%s: failed to initialize CUDA%d backend\n", __func__, model->main_gpu); @@ -11618,7 +11618,7 @@ struct llama_context * llama_new_context_with_model( } ctx->backends.push_back(backend); } else { - // LLAMA_SPLIT_LAYER requires a backend for each GPU + // LLAMA_SPLIT_MODE_LAYER requires a backend for each GPU for (int device = 0; device < ggml_backend_cuda_get_device_count(); ++device) { ggml_backend_t backend = ggml_backend_cuda_init(device); if (backend == nullptr) { diff --git a/llama.h b/llama.h index 889edf4d9..947284ea2 100644 --- a/llama.h +++ b/llama.h @@ -109,23 +109,23 @@ extern "C" { }; enum llama_rope_scaling_type { - LLAMA_ROPE_SCALING_UNSPECIFIED = -1, - LLAMA_ROPE_SCALING_NONE = 0, - LLAMA_ROPE_SCALING_LINEAR = 1, - LLAMA_ROPE_SCALING_YARN = 2, - LLAMA_ROPE_SCALING_MAX_VALUE = LLAMA_ROPE_SCALING_YARN, + LLAMA_ROPE_SCALING_TYPE_UNSPECIFIED = -1, + LLAMA_ROPE_SCALING_TYPE_NONE = 0, + LLAMA_ROPE_SCALING_TYPE_LINEAR = 1, + LLAMA_ROPE_SCALING_TYPE_YARN = 2, + LLAMA_ROPE_SCALING_TYPE_MAX_VALUE = LLAMA_ROPE_SCALING_TYPE_YARN, }; enum llama_pooling_type { - LLAMA_POOLING_NONE = 0, - LLAMA_POOLING_MEAN = 1, - LLAMA_POOLING_CLS = 2, + LLAMA_POOLING_TYPE_NONE = 0, + LLAMA_POOLING_TYPE_MEAN = 1, + LLAMA_POOLING_TYPE_CLS = 2, }; enum llama_split_mode { - LLAMA_SPLIT_NONE = 0, // single GPU - LLAMA_SPLIT_LAYER = 1, // split layers and KV across GPUs - LLAMA_SPLIT_ROW = 2, // split rows across GPUs + LLAMA_SPLIT_MODE_NONE = 0, // single GPU + LLAMA_SPLIT_MODE_LAYER = 1, // split layers and KV across GPUs + LLAMA_SPLIT_MODE_ROW = 2, // split rows across GPUs }; typedef struct llama_token_data { @@ -173,9 +173,9 @@ extern "C" { } llama_batch; enum llama_model_kv_override_type { - LLAMA_KV_OVERRIDE_INT, - LLAMA_KV_OVERRIDE_FLOAT, - LLAMA_KV_OVERRIDE_BOOL, + LLAMA_KV_OVERRIDE_TYPE_INT, + LLAMA_KV_OVERRIDE_TYPE_FLOAT, + LLAMA_KV_OVERRIDE_TYPE_BOOL, }; struct llama_model_kv_override { diff --git a/tests/test-backend-ops.cpp b/tests/test-backend-ops.cpp index f8574588b..24d12ef14 100644 --- a/tests/test-backend-ops.cpp +++ b/tests/test-backend-ops.cpp @@ -1264,7 +1264,7 @@ struct test_argsort : public test_case { test_argsort(ggml_type type = GGML_TYPE_F32, std::array ne = {16, 10, 10, 10}, - ggml_sort_order order = GGML_SORT_ASC) + ggml_sort_order order = GGML_SORT_ORDER_ASC) : type(type), ne(ne), order(order) {} ggml_tensor * build_graph(ggml_context * ctx) override { @@ -2116,7 +2116,7 @@ static bool test_backend(ggml_backend_t backend, test_mode mode, const char * op test_cases.emplace_back(new test_concat(GGML_TYPE_F32)); test_cases.emplace_back(new test_concat(GGML_TYPE_I32)); - for (ggml_sort_order order : {GGML_SORT_ASC, GGML_SORT_DESC}) { + for (ggml_sort_order order : {GGML_SORT_ORDER_ASC, GGML_SORT_ORDER_DESC}) { test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {8, 1, 1, 1}, order)); test_cases.emplace_back(new test_argsort(GGML_TYPE_F32, {16, 10, 10, 10}, order)); } diff --git a/tests/test-opt.cpp b/tests/test-opt.cpp index 2c9997fca..546ca230b 100644 --- a/tests/test-opt.cpp +++ b/tests/test-opt.cpp @@ -118,7 +118,7 @@ int main(void) { const float fe = ggml_get_f32_1d(e, 0); printf("%s: e = %.4f\n", __func__, fe); - struct ggml_opt_params opt_params = ggml_opt_default_params(GGML_OPT_ADAM); + struct ggml_opt_params opt_params = ggml_opt_default_params(GGML_OPT_TYPE_ADAM); ggml_opt(ctx, opt_params, e); From 12894088170f62e4cad4f8d6a3043c185b414bab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Gryta?= Date: Sun, 25 Feb 2024 11:53:11 +0100 Subject: [PATCH 857/859] cmake : fix compilation for Android armeabi-v7a (#5702) --- CMakeLists.txt | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3c4629001..48880f720 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -936,10 +936,16 @@ if (CMAKE_OSX_ARCHITECTURES STREQUAL "arm64" OR CMAKE_GENERATOR_PLATFORM_LWR STR list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access) endif() if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv7") - # Raspberry Pi 2 - list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access -funsafe-math-optimizations) + if ("${CMAKE_SYSTEM_NAME}" STREQUAL "Android") + # Android armeabi-v7a + list(APPEND ARCH_FLAGS -mfpu=neon-vfpv4 -mno-unaligned-access -funsafe-math-optimizations) + else() + # Raspberry Pi 2 + list(APPEND ARCH_FLAGS -mfpu=neon-fp-armv8 -mno-unaligned-access -funsafe-math-optimizations) + endif() endif() if (${CMAKE_SYSTEM_PROCESSOR} MATCHES "armv8") + # Android arm64-v8a # Raspberry Pi 3, 4, Zero 2 (32-bit) list(APPEND ARCH_FLAGS -mno-unaligned-access) endif() From d52d7819b8ced70c642a88a59da8c78208dc58ec Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 25 Feb 2024 13:49:43 +0100 Subject: [PATCH 858/859] server: concurrency fix + monitoring - add /metrics prometheus compatible endpoint (#5708) * server: monitoring - add /metrics prometheus compatible endpoint * server: concurrency issue, when 2 task are waiting for results, only one call thread is notified * server: metrics - move to a dedicated struct --- examples/server/README.md | 13 ++ examples/server/server.cpp | 150 +++++++++++++++++- examples/server/tests/features/environment.py | 2 + examples/server/tests/features/server.feature | 2 + examples/server/tests/features/steps/steps.py | 27 ++++ examples/server/tests/requirements.txt | 1 + examples/server/utils.hpp | 4 +- 7 files changed, 191 insertions(+), 8 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 0c43ac4c9..2129f7fb2 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -41,6 +41,7 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` - `-n, --n-predict`: Set the maximum tokens to predict (default: -1) - `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. +- `--metrics`: enable prometheus `/metrics` compatible endpoint (default: disabled) - `--chat-template JINJA_TEMPLATE`: Set custom jinja chat template. This parameter accepts a string, not a file name (default: template taken from model's metadata). We only support [some pre-defined templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) ## Build @@ -457,6 +458,18 @@ Notice that each `probs` is an array of length `n_probs`. ] ``` +- **GET** `/metrics`: [Prometheus](https://prometheus.io/) compatible metrics exporter endpoint if `--metrics` is enabled: + +Available metrics: +- `llamacpp:prompt_tokens_total`: Number of prompt tokens processed. +- `llamacpp:tokens_predicted_total`: Number of generation tokens processed. +- `llamacpp:prompt_tokens_seconds`: Average prompt throughput in tokens/s. +- `llamacpp:predicted_tokens_seconds`: Average generation throughput in tokens/s. +- `llamacpp:kv_cache_usage_ratio`: KV-cache usage. 1 means 100 percent usage. +- `llamacpp:kv_cache_tokens`: KV-cache tokens. +- `llamacpp:requests_processing`: Number of request processing. +- `llamacpp:requests_deferred`: Number of request deferred. + ## More examples ### Change system prompt on runtime diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 780862ef6..811495915 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -43,6 +43,7 @@ struct server_params int32_t read_timeout = 600; int32_t write_timeout = 600; bool slots_endpoint = true; + bool metrics_endpoint = false; }; bool server_verbose = false; @@ -310,6 +311,39 @@ struct llama_client_slot } }; +struct llama_metrics { + uint64_t n_prompt_tokens_processed_total = 0; + uint64_t n_tokens_predicted_total = 0; + + uint64_t n_prompt_tokens_processed = 0; + uint64_t t_prompt_processing = 0; + + uint64_t n_tokens_predicted = 0; + uint64_t t_tokens_generation = 0; + + + void on_prompt_eval(const llama_client_slot &slot) { + n_prompt_tokens_processed_total += slot.num_prompt_tokens_processed; + + n_prompt_tokens_processed += slot.num_prompt_tokens_processed; + t_prompt_processing += slot.t_prompt_processing; + } + + void on_prediction(const llama_client_slot &slot) { + n_tokens_predicted_total += slot.n_decoded; + + n_tokens_predicted += slot.n_decoded; + t_tokens_generation += slot.t_token_generation; + } + + void reset_bucket() { + n_prompt_tokens_processed = 0; + t_prompt_processing = 0; + n_tokens_predicted = 0; + t_tokens_generation = 0; + } +}; + struct llama_server_context { llama_model *model = nullptr; @@ -344,6 +378,8 @@ struct llama_server_context llama_server_queue queue_tasks; llama_server_response queue_results; + llama_metrics metrics; + ~llama_server_context() { if (ctx) @@ -1404,7 +1440,7 @@ struct llama_server_context case TASK_TYPE_NEXT_RESPONSE: { // do nothing } break; - case TASK_TYPE_SLOTS_DATA: { + case TASK_TYPE_METRICS: { json slots_data = json::array(); int n_idle_slots = 0; int n_processing_slots = 0; @@ -1438,10 +1474,24 @@ struct llama_server_context res.stop = true; res.error = false; res.result_json = { - { "idle", n_idle_slots }, - { "processing", n_processing_slots }, - { "slots", slots_data } + { "idle", n_idle_slots }, + { "processing", n_processing_slots }, + { "deferred", queue_tasks.queue_tasks_deferred.size() }, + + { "n_prompt_tokens_processed_total", metrics.n_prompt_tokens_processed_total}, + { "n_tokens_predicted_total", metrics.n_tokens_predicted_total}, + + { "n_prompt_tokens_processed", metrics.n_prompt_tokens_processed}, + { "t_prompt_processing", metrics.t_prompt_processing}, + { "n_tokens_predicted", metrics.n_tokens_predicted}, + { "t_tokens_generation", metrics.t_tokens_generation}, + + { "kv_cache_tokens_count", llama_get_kv_cache_token_count(ctx)}, + { "kv_cache_used_cells", llama_get_kv_cache_used_cells(ctx)}, + + { "slots", slots_data }, }; + metrics.reset_bucket(); queue_results.send(res); } break; } @@ -1849,6 +1899,7 @@ struct llama_server_context { slot.t_start_genereration = ggml_time_us(); slot.t_prompt_processing = (slot.t_start_genereration - slot.t_start_process_prompt) / 1e3; + metrics.on_prompt_eval(slot); } llama_token_data_array cur_p = { slot.ctx_sampling->cur.data(), slot.ctx_sampling->cur.size(), false }; @@ -1871,6 +1922,7 @@ struct llama_server_context slot.release(); slot.print_timings(); send_final_response(slot); + metrics.on_prediction(slot); } slot.i_batch = -1; @@ -1955,6 +2007,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); + printf(" --metrics enable prometheus compatible metrics endpoint (default: %s).\n", sparams.metrics_endpoint ? "enabled" : "disabled"); printf("\n"); printf(" -n, --n-predict maximum tokens to predict (default: %d)\n", params.n_predict); printf(" --override-kv KEY=TYPE:VALUE\n"); @@ -2414,6 +2467,10 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, { sparams.slots_endpoint = false; } + else if (arg == "--metrics") + { + sparams.metrics_endpoint = true; + } else if (arg == "--chat-template") { if (++i >= argc) @@ -2621,7 +2678,7 @@ int main(int argc, char **argv) // request slots data using task queue task_server task; task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_SLOTS_DATA; + task.type = TASK_TYPE_METRICS; task.target_id = -1; llama.queue_results.add_waiting_task_id(task.id); @@ -2668,7 +2725,7 @@ int main(int argc, char **argv) // request slots data using task queue task_server task; task.id = llama.queue_tasks.get_new_id(); - task.type = TASK_TYPE_SLOTS_DATA; + task.type = TASK_TYPE_METRICS; task.target_id = -1; llama.queue_results.add_waiting_task_id(task.id); @@ -2683,6 +2740,87 @@ int main(int argc, char **argv) }); } + if (sparams.metrics_endpoint) { + svr.Get("/metrics", [&](const httplib::Request&, httplib::Response& res) { + // request slots data using task queue + task_server task; + task.id = llama.queue_tasks.get_new_id(); + task.type = TASK_TYPE_METRICS; + task.target_id = -1; + + llama.queue_results.add_waiting_task_id(task.id); + llama.queue_tasks.post(task); + + // get the result + task_result result = llama.queue_results.recv(task.id); + llama.queue_results.remove_waiting_task_id(task.id); + + json data = result.result_json; + + uint64_t n_prompt_tokens_processed = data["n_prompt_tokens_processed"]; + uint64_t t_prompt_processing = data["t_prompt_processing"]; + + uint64_t n_tokens_predicted = data["n_tokens_predicted"]; + uint64_t t_tokens_generation = data["t_tokens_generation"]; + + int32_t kv_cache_used_cells = data["kv_cache_used_cells"]; + + // metrics definition: https://prometheus.io/docs/practices/naming/#metric-names + json all_metrics_def = json { + {"counter", {{ + {"name", "prompt_tokens_total"}, + {"help", "Number of prompt tokens processed."}, + {"value", data["n_prompt_tokens_processed_total"]} + }, { + {"name", "tokens_predicted_total"}, + {"help", "Number of generation tokens processed."}, + {"value", data["n_tokens_predicted_total"]} + }}}, + {"gauge", {{ + {"name", "prompt_tokens_seconds"}, + {"help", "Average prompt throughput in tokens/s."}, + {"value", n_prompt_tokens_processed ? 1e3 / t_prompt_processing * n_prompt_tokens_processed : 0} + },{ + {"name", "predicted_tokens_seconds"}, + {"help", "Average generation throughput in tokens/s."}, + {"value", n_tokens_predicted ? 1e3 / t_tokens_generation * n_tokens_predicted : 0} + },{ + {"name", "kv_cache_usage_ratio"}, + {"help", "KV-cache usage. 1 means 100 percent usage."}, + {"value", 1. * kv_cache_used_cells / params.n_ctx} + },{ + {"name", "kv_cache_tokens"}, + {"help", "KV-cache tokens."}, + {"value", data["kv_cache_tokens_count"]} + },{ + {"name", "requests_processing"}, + {"help", "Number of request processing."}, + {"value", data["processing"]} + },{ + {"name", "requests_deferred"}, + {"help", "Number of request deferred."}, + {"value", data["deferred"]} + }}} + }; + + std::stringstream prometheus; + for (const auto& el : all_metrics_def.items()) { + const auto& type = el.key(); + const auto& metrics_def = el.value(); + for (const auto& metric_def : metrics_def) { + std::string name = metric_def["name"]; + std::string help = metric_def["help"]; + prometheus << "# HELP llamacpp:" << name << " " << help << "\n" + << "# TYPE llamacpp:" << name << " " << type << "\n" + << "llamacpp:" << name << " " << metric_def["value"] << "\n"; + } + } + + res.set_content(prometheus.str(), "text/plain; version=0.0.4"); + res.status = 200; // HTTP OK + }); + } + svr.set_logger(log_server_request); svr.set_exception_handler([](const httplib::Request &, httplib::Response &res, std::exception_ptr ep) diff --git a/examples/server/tests/features/environment.py b/examples/server/tests/features/environment.py index 13cc84101..09e826747 100644 --- a/examples/server/tests/features/environment.py +++ b/examples/server/tests/features/environment.py @@ -16,6 +16,8 @@ def before_scenario(context, scenario): def after_scenario(context, scenario): + if context.server_process is None: + return if scenario.status == "failed": if 'GITHUB_ACTIONS' in os.environ: print(f"\x1b[33;101mSCENARIO FAILED: {scenario.name} server logs:\x1b[0m\n\n") diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index 5f81d256a..0139f89d8 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -13,6 +13,7 @@ Feature: llama.cpp server And 1 slots And embeddings extraction And 32 server max tokens to predict + And prometheus compatible metrics exposed Then the server is starting Then the server is healthy @@ -25,6 +26,7 @@ Feature: llama.cpp server And max tokens to predict And a completion request with no api error Then tokens are predicted matching + And prometheus metrics are exposed Examples: Prompts | prompt | n_predict | re_content | n_predicted | diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 9c825fdbc..051fd440c 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -13,6 +13,7 @@ import aiohttp import openai from behave import step from behave.api.async_step import async_run_until_complete +from prometheus_client import parser @step(u"a server listening on {server_fqdn}:{server_port}") @@ -34,6 +35,8 @@ def step_server_config(context, server_fqdn, server_port): context.server_api_key = None context.server_continuous_batching = False context.server_embeddings = False + context.server_metrics = False + context.server_process = None context.server_seed = None context.user_api_key = None @@ -82,6 +85,11 @@ def step_server_embeddings(context): context.server_embeddings = True +@step(u'prometheus compatible metrics exposed') +def step_server_metrics(context): + context.server_metrics = True + + @step(u"the server is starting") def step_start_server(context): start_server_background(context) @@ -424,6 +432,23 @@ def step_check_options_header_value(context, cors_header, cors_header_value): assert context.options_response.headers[cors_header] == cors_header_value +@step(u'prometheus metrics are exposed') +@async_run_until_complete +async def step_prometheus_metrics_exported(context): + async with aiohttp.ClientSession() as session: + async with await session.get(f'{context.base_url}/metrics') as metrics_response: + assert metrics_response.status == 200 + assert metrics_response.headers['Content-Type'] == "text/plain; version=0.0.4" + metrics_raw = await metrics_response.text() + metric_exported = False + for metric in parser.text_string_to_metric_families(metrics_raw): + match metric.name: + case "llamacpp:kv_cache_usage_ratio": + assert len(metric.samples) > 0 + metric_exported = True + assert metric_exported, "No metrics exported" + + async def concurrent_requests(context, f_completion, *args, **kwargs): n_prompts = len(context.prompts) if context.debug: @@ -753,6 +778,8 @@ def start_server_background(context): server_args.append('--cont-batching') if context.server_embeddings: server_args.append('--embedding') + if context.server_metrics: + server_args.append('--metrics') if context.model_alias is not None: server_args.extend(['--alias', context.model_alias]) if context.n_ctx is not None: diff --git a/examples/server/tests/requirements.txt b/examples/server/tests/requirements.txt index 3e51b12dc..334fa4a70 100644 --- a/examples/server/tests/requirements.txt +++ b/examples/server/tests/requirements.txt @@ -1,3 +1,4 @@ aiohttp~=3.9.3 behave~=1.2.6 openai~=0.25.0 +prometheus-client~=0.20.0 diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 88545eb69..71cc5b0b8 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -50,7 +50,7 @@ enum task_type { TASK_TYPE_COMPLETION, TASK_TYPE_CANCEL, TASK_TYPE_NEXT_RESPONSE, - TASK_TYPE_SLOTS_DATA + TASK_TYPE_METRICS }; struct task_server { @@ -441,7 +441,7 @@ struct llama_server_response { { LOG_VERBOSE("queue_results.push_back", {}); queue_results.push_back(result); - condition_results.notify_one(); + condition_results.notify_all(); return; } } From 930b1780269a69948d106e2d1b838ab7661f679a Mon Sep 17 00:00:00 2001 From: Pierrick Hymbert Date: Sun, 25 Feb 2024 13:50:32 +0100 Subject: [PATCH 859/859] server: logs - unified format and --log-format option (#5700) * server: logs - always use JSON logger, add add thread_id in message, log task_id and slot_id * server : skip GH copilot requests from logging * server : change message format of server_log() * server : no need to repeat log in comment * server : log style consistency * server : fix compile warning * server : fix tests regex patterns on M2 Ultra * server: logs: PR feedback on log level * server: logs: allow to choose log format in json or plain text * server: tests: output server logs in text * server: logs switch init logs to server logs macro * server: logs ensure value json value does not raised error * server: logs reduce level VERBOSE to VERB to max 4 chars * server: logs lower case as other log messages * server: logs avoid static in general Co-authored-by: Georgi Gerganov * server: logs PR feedback: change text log format to: LEVEL [function_name] message | additional=data --------- Co-authored-by: Georgi Gerganov --- examples/server/README.md | 4 +- examples/server/server.cpp | 218 ++++++++++++++---- examples/server/tests/README.md | 1 + examples/server/tests/features/server.feature | 6 +- examples/server/tests/features/steps/steps.py | 2 + examples/server/utils.hpp | 80 ++++--- 6 files changed, 231 insertions(+), 80 deletions(-) diff --git a/examples/server/README.md b/examples/server/README.md index 2129f7fb2..cb3fd6054 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -39,10 +39,12 @@ see https://github.com/ggerganov/llama.cpp/issues/1437 - `--mmproj MMPROJ_FILE`: Path to a multimodal projector file for LLaVA. - `--grp-attn-n`: Set the group attention factor to extend context size through self-extend(default: 1=disabled), used together with group attention width `--grp-attn-w` - `--grp-attn-w`: Set the group attention width to extend context size through self-extend(default: 512), used together with group attention factor `--grp-attn-n` -- `-n, --n-predict`: Set the maximum tokens to predict (default: -1) +- `-n N, --n-predict N`: Set the maximum tokens to predict (default: -1) - `--slots-endpoint-disable`: To disable slots state monitoring endpoint. Slots state may contain user data, prompts included. - `--metrics`: enable prometheus `/metrics` compatible endpoint (default: disabled) - `--chat-template JINJA_TEMPLATE`: Set custom jinja chat template. This parameter accepts a string, not a file name (default: template taken from model's metadata). We only support [some pre-defined templates](https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template) +- `--log-disable`: Output logs to stdout only, default: enabled. +- `--log-format FORMAT`: Define the log output to FORMAT: json or text (default: json) ## Build diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 811495915..d970202d2 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -47,6 +47,7 @@ struct server_params }; bool server_verbose = false; +bool server_log_json = true; static size_t common_part(const std::vector &a, const std::vector &b) { @@ -302,12 +303,43 @@ struct llama_client_slot } void print_timings() const { - LOG_TEE("\n"); - LOG_TEE("%s: prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)\n", - __func__, t_prompt_processing, num_prompt_tokens_processed, t_prompt_processing / num_prompt_tokens_processed, 1e3 / t_prompt_processing * num_prompt_tokens_processed); - LOG_TEE("%s: eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)\n", - __func__, t_token_generation, n_decoded,t_token_generation / n_decoded, 1e3 / t_token_generation * n_decoded); - LOG_TEE("%s: total time = %10.2f ms\n", __func__, t_prompt_processing + t_token_generation); + char buffer[512]; + double t_token = t_prompt_processing / num_prompt_tokens_processed; + double n_tokens_second = 1e3 / t_prompt_processing * num_prompt_tokens_processed; + sprintf(buffer, "prompt eval time = %10.2f ms / %5d tokens (%8.2f ms per token, %8.2f tokens per second)", + t_prompt_processing, num_prompt_tokens_processed, + t_token, n_tokens_second); + LOG_INFO(buffer, { + {"slot_id", id}, + {"task_id", task_id}, + {"t_prompt_processing", t_prompt_processing}, + {"num_prompt_tokens_processed", num_prompt_tokens_processed}, + {"t_token", t_token}, + {"n_tokens_second", n_tokens_second}, + }); + + t_token = t_token_generation / n_decoded; + n_tokens_second = 1e3 / t_token_generation * n_decoded; + sprintf(buffer, "generation eval time = %10.2f ms / %5d runs (%8.2f ms per token, %8.2f tokens per second)", + t_token_generation, n_decoded, + t_token, n_tokens_second); + LOG_INFO(buffer, { + {"slot_id", id}, + {"task_id", task_id}, + {"t_token_generation", t_token_generation}, + {"n_decoded", n_decoded}, + {"t_token", t_token}, + {"n_tokens_second", n_tokens_second}, + }); + + sprintf(buffer, " total time = %10.2f ms", t_prompt_processing + t_token_generation); + LOG_INFO(buffer, { + {"slot_id", id}, + {"task_id", task_id}, + {"t_prompt_processing", t_prompt_processing}, + {"t_token_generation", t_token_generation}, + {"t_total", t_prompt_processing + t_token_generation}, + }); } }; @@ -399,7 +431,7 @@ struct llama_server_context params = params_; if (!params.mmproj.empty()) { multimodal = true; - LOG_TEE("Multi Modal Mode Enabled"); + LOG_INFO("Multi Modal Mode Enabled", {}); clp_ctx = clip_model_load(params.mmproj.c_str(), /*verbosity=*/ 1); if(clp_ctx == nullptr) { LOG_ERROR("unable to load clip model", {{"model", params.mmproj}}); @@ -452,7 +484,7 @@ struct llama_server_context const int32_t n_ctx_slot = n_ctx / params.n_parallel; - LOG_TEE("Available slots:\n"); + LOG_INFO("initializing slots", {{"n_slots", params.n_parallel}}); for (int i = 0; i < params.n_parallel; i++) { llama_client_slot slot; @@ -461,7 +493,10 @@ struct llama_server_context slot.n_ctx = n_ctx_slot; slot.n_predict = params.n_predict; - LOG_TEE(" -> Slot %i - max context: %i\n", slot.id, n_ctx_slot); + LOG_INFO("new slot", { + {"slot_id", slot.id}, + {"n_ctx_slot", slot.n_ctx} + }); const int ga_n = params.grp_attn_n; const int ga_w = params.grp_attn_w; @@ -471,7 +506,12 @@ struct llama_server_context GGML_ASSERT(ga_w % ga_n == 0 && "ga_w must be a multiple of ga_n"); // NOLINT //GGML_ASSERT(n_ctx_train % ga_w == 0 && "n_ctx_train must be a multiple of ga_w"); // NOLINT //GGML_ASSERT(n_ctx >= n_ctx_train * ga_n && "n_ctx must be at least n_ctx_train * ga_n"); // NOLINT - LOG_TEE(" -> Slot %i - self-extend: ga_n = %d, ga_w = %d\n", slot.id, ga_n, ga_w); + + LOG_INFO("slot self-extend", { + {"slot_id", slot.id}, + {"ga_n", ga_n}, + {"ga_w", ga_w} + }); } slot.ga_i = 0; @@ -765,10 +805,16 @@ struct llama_server_context img_sl.img_data = clip_image_u8_init(); if (!clip_image_load_from_bytes(image_buffer.data(), image_buffer.size(), img_sl.img_data)) { - LOG_TEE("slot %i - failed to load image [id: %i]\n", slot->id, img_sl.id); + LOG_ERROR("failed to load image", { + {"slot_id", slot->id}, + {"img_sl_id", img_sl.id} + }); return false; } - LOG_TEE("slot %i - loaded image\n", slot->id); + LOG_VERBOSE("image loaded", { + {"slot_id", slot->id}, + {"img_sl_id", img_sl.id} + }); img_sl.request_encode_image = true; slot->images.push_back(img_sl); } @@ -828,7 +874,10 @@ struct llama_server_context all_slots_are_idle = false; - LOG_TEE("slot %i is processing [task id: %i]\n", slot->id, slot->task_id); + LOG_INFO("slot is processing task", { + {"slot_id", slot->id}, + {"task_id", slot->task_id}, + }); return true; } @@ -1391,7 +1440,7 @@ struct llama_server_context if (slot == nullptr) { // if no slot is available, we defer this task for processing later - LOG_VERBOSE("no slot is available", {}); + LOG_VERBOSE("no slot is available", {{"task_id", task.id}}); queue_tasks.defer(task); break; } @@ -1467,7 +1516,17 @@ struct llama_server_context } slots_data.push_back(slot_data); } - LOG_TEE("task %i - slots data: idle=%i processing=%i\n", task.id, n_idle_slots, n_processing_slots); + LOG_INFO("slot data", { + {"task_id", task.id}, + {"n_idle_slots", n_idle_slots}, + {"n_processing_slots", n_processing_slots} + }); + LOG_VERBOSE("slot data", { + {"task_id", task.id}, + {"n_idle_slots", n_idle_slots}, + {"n_processing_slots", n_processing_slots}, + {"slots", slots_data} + }); task_result res; res.id = task.id; res.multitask_id = task.multitask_id; @@ -1519,7 +1578,7 @@ struct llama_server_context bool update_slots() { if (system_need_update) { - LOG_TEE("updating system prompt\n"); + LOG_INFO("updating system prompt", {}); update_system_prompt(); } @@ -1529,12 +1588,13 @@ struct llama_server_context { if (system_prompt.empty() && clean_kv_cache) { - LOG_TEE("all slots are idle and system prompt is empty, clear the KV cache\n"); + LOG_INFO("all slots are idle and system prompt is empty, clear the KV cache", {}); kv_cache_clear(); } return true; } + LOG_VERBOSE("posting NEXT_RESPONSE", {}); task_server task; task.type = TASK_TYPE_NEXT_RESPONSE; task.target_id = -1; @@ -1548,10 +1608,20 @@ struct llama_server_context { // Shift context const int n_keep = slot.params.n_keep + add_bos_token; - const int n_left = system_tokens.size() + slot.n_past - n_keep; + const int n_left = (int) system_tokens.size() + slot.n_past - n_keep; const int n_discard = n_left / 2; - LOG_TEE("slot %d: context shift - n_keep = %d, n_left = %d, n_discard = %d\n", slot.id, n_keep, n_left, n_discard); + LOG_INFO("slot context shift", { + {"slot_id", slot.id}, + {"task_id", slot.task_id}, + {"n_keep", n_keep}, + {"n_left", n_left}, + {"n_discard", n_discard}, + {"n_ctx", n_ctx}, + {"n_past", slot.n_past}, + {"n_system_tokens", system_tokens.size()}, + {"n_cache_tokens", slot.cache_tokens.size()} + }); llama_kv_cache_seq_rm (ctx, slot.id, n_keep , n_keep + n_discard); llama_kv_cache_seq_shift(ctx, slot.id, n_keep + n_discard, system_tokens.size() + slot.n_past, -n_discard); @@ -1565,17 +1635,12 @@ struct llama_server_context slot.n_past -= n_discard; slot.truncated = true; - - LOG_VERBOSE("context shift", { - { "n_ctx", n_ctx }, - { "n_keep", n_keep }, - { "n_left", n_left }, - }); } } } // decode any currently ongoing sequences + LOG_VERBOSE("decoding ongoing sequences", {}); for (auto & slot : slots) { // release the slot @@ -1585,7 +1650,15 @@ struct llama_server_context slot.command = NONE; slot.t_last_used = ggml_time_us(); - LOG_TEE("slot %d released (%d tokens in cache)\n", slot.id, (int) slot.cache_tokens.size()); + LOG_INFO("slot released", { + {"slot_id", slot.id}, + {"task_id", slot.task_id}, + {"n_ctx", n_ctx}, + {"n_past", slot.n_past}, + {"n_system_tokens", system_tokens.size()}, + {"n_cache_tokens", slot.cache_tokens.size()}, + {"truncated", slot.truncated} + }); queue_tasks.notify_slot_changed(); continue; @@ -1733,7 +1806,12 @@ struct llama_server_context slot.ga_i = ga_i; } - LOG_TEE("slot %d : in cache: %i tokens | to process: %i tokens\n", slot.id, slot.n_past, slot.num_prompt_tokens_processed); + LOG_INFO("slot progression", { + { "slot_id", slot.id }, + { "task_id", slot.task_id }, + { "n_past", slot.n_past }, + { "num_prompt_tokens_processed", slot.num_prompt_tokens_processed } + }); } slot.cache_tokens = prompt_tokens; @@ -1741,7 +1819,10 @@ struct llama_server_context if (slot.n_past == slot.num_prompt_tokens && slot.n_past > 0) { // we have to evaluate at least 1 token to generate logits. - LOG_TEE("slot %d : we have to evaluate at least 1 token to generate logits\n", slot.id); + LOG_INFO("we have to evaluate at least 1 token to generate logits", { + { "slot_id", slot.id }, + { "task_id", slot.task_id } + }); slot.n_past--; if (slot.ga_i > 0) { @@ -1749,9 +1830,13 @@ struct llama_server_context } } - LOG_TEE("slot %d : kv cache rm - [%d, end)\n", slot.id, (int) system_tokens.size() + slot.n_past); - - llama_kv_cache_seq_rm(ctx, slot.id, system_tokens.size() + slot.n_past, -1); + int p0 = (int) system_tokens.size() + slot.n_past; + LOG_INFO("kv cache rm [p0, end)", { + { "slot_id", slot.id }, + { "task_id", slot.task_id }, + { "p0", p0 } + }); + llama_kv_cache_seq_rm(ctx, slot.id, p0, -1); LOG_VERBOSE("prompt ingested", { {"n_past", slot.n_past}, @@ -1786,7 +1871,13 @@ struct llama_server_context if (has_images && !ingest_images(slot, n_batch)) { - LOG_TEE("failed processing images\n"); + LOG_ERROR("failed processing images", { + "slot_id", slot.id, + "task_id", slot.task_id, + }); + // FIXME @phymbert: to be properly tested + // early returning without changing the slot state will block the slot for ever + // no one at the moment is checking the return value return false; } @@ -1928,6 +2019,8 @@ struct llama_server_context slot.i_batch = -1; } } + + LOG_VERBOSE("slots updated", {}); return true; } @@ -2005,6 +2098,7 @@ static void server_print_usage(const char *argv0, const gpt_params ¶ms, printf(" -ctv TYPE, --cache-type-v TYPE\n"); printf(" KV cache data type for V (default: f16)\n"); printf(" --mmproj MMPROJ_FILE path to a multimodal projector file for LLaVA.\n"); + printf(" --log-format log output format: json or text (default: json)\n"); printf(" --log-disable disables logging to a file.\n"); printf(" --slots-endpoint-disable disables slots monitoring endpoint.\n"); printf(" --metrics enable prometheus compatible metrics endpoint (default: %s).\n", sparams.metrics_endpoint ? "enabled" : "disabled"); @@ -2458,6 +2552,27 @@ static void server_params_parse(int argc, char **argv, server_params &sparams, } params.mmproj = argv[i]; } + else if (arg == "--log-format") + { + if (++i >= argc) + { + invalid_param = true; + break; + } + if (std::strcmp(argv[i], "json") == 0) + { + server_log_json = true; + } + else if (std::strcmp(argv[i], "text") == 0) + { + server_log_json = false; + } + else + { + invalid_param = true; + break; + } + } else if (arg == "--log-disable") { log_set_target(stdout); @@ -2571,32 +2686,40 @@ static json format_partial_response( static json format_tokenizer_response(const std::vector &tokens) { - return json{ - {"tokens", tokens}}; + return json { + {"tokens", tokens} + }; } static json format_detokenized_response(std::string content) { - return json{ - {"content", content}}; + return json { + {"content", content} + }; } static void log_server_request(const httplib::Request &req, const httplib::Response &res) { + // skip GH copilot requests when using default port + if (req.path == "/v1/health" || req.path == "/v1/completions") + { + return; + } + LOG_INFO("request", { - {"remote_addr", req.remote_addr}, - {"remote_port", req.remote_port}, - {"status", res.status}, - {"method", req.method}, - {"path", req.path}, - {"params", req.params}, - }); + {"remote_addr", req.remote_addr}, + {"remote_port", req.remote_port}, + {"status", res.status}, + {"method", req.method}, + {"path", req.path}, + {"params", req.params}, + }); LOG_VERBOSE("request", { - {"request", req.body}, - {"response", res.body}, - }); + {"request", req.body}, + {"response", res.body}, + }); } struct token_translator @@ -2873,9 +2996,6 @@ int main(int argc, char **argv) // Set the base directory for serving static files svr.set_base_dir(sparams.public_path); - // to make it ctrl+clickable: - LOG_TEE("\nllama server listening at http://%s:%d\n\n", sparams.hostname.c_str(), sparams.port); - std::unordered_map log_data; log_data["hostname"] = sparams.hostname; log_data["port"] = std::to_string(sparams.port); diff --git a/examples/server/tests/README.md b/examples/server/tests/README.md index e44c5c286..0b9fdc4e7 100644 --- a/examples/server/tests/README.md +++ b/examples/server/tests/README.md @@ -32,6 +32,7 @@ It's possible to override some scenario steps values with environment variables: - `PORT` -> `context.server_port` to set the listening port of the server during scenario, default: `8080` - `LLAMA_SERVER_BIN_PATH` -> to change the server binary path, default: `../../../build/bin/server` - `DEBUG` -> "ON" to enable steps and server verbose mode `--verbose` + - `SERVER_LOG_FORMAT_JSON` -> if set switch server logs to json format ### Run @bug, @wip or @wrong_usage annotated scenario diff --git a/examples/server/tests/features/server.feature b/examples/server/tests/features/server.feature index 0139f89d8..b571582a7 100644 --- a/examples/server/tests/features/server.feature +++ b/examples/server/tests/features/server.feature @@ -29,9 +29,9 @@ Feature: llama.cpp server And prometheus metrics are exposed Examples: Prompts - | prompt | n_predict | re_content | n_predicted | - | I believe the meaning of life is | 8 | read | 8 | - | Write a joke about AI | 64 | (parkfriendsscared)+ | 32 | + | prompt | n_predict | re_content | n_predicted | + | I believe the meaning of life is | 8 | (readgoing)+ | 8 | + | Write a joke about AI | 64 | (parkfriendsscaredalways)+ | 32 | Scenario Outline: OAI Compatibility Given a model diff --git a/examples/server/tests/features/steps/steps.py b/examples/server/tests/features/steps/steps.py index 051fd440c..8e4babf20 100644 --- a/examples/server/tests/features/steps/steps.py +++ b/examples/server/tests/features/steps/steps.py @@ -792,6 +792,8 @@ def start_server_background(context): server_args.extend(['--api-key', context.server_api_key]) if context.debug: server_args.append('--verbose') + if 'SERVER_LOG_FORMAT_JSON' not in os.environ: + server_args.extend(['--log-format', "text"]) print(f"starting server with: {context.server_path}", *server_args) context.server_process = subprocess.Popen( [str(arg) for arg in [context.server_path, *server_args]], diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 71cc5b0b8..d7abd7cbb 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -14,6 +14,7 @@ using json = nlohmann::json; extern bool server_verbose; +extern bool server_log_json; #ifndef SERVER_VERBOSE #define SERVER_VERBOSE 1 @@ -27,14 +28,14 @@ extern bool server_verbose; { \ if (server_verbose) \ { \ - server_log("VERBOSE", __func__, __LINE__, MSG, __VA_ARGS__); \ + server_log("VERB", __func__, __LINE__, MSG, __VA_ARGS__); \ } \ } while (0) #endif -#define LOG_ERROR( MSG, ...) server_log("ERROR", __func__, __LINE__, MSG, __VA_ARGS__) -#define LOG_WARNING(MSG, ...) server_log("WARNING", __func__, __LINE__, MSG, __VA_ARGS__) -#define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_ERROR( MSG, ...) server_log("ERR", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_WARNING(MSG, ...) server_log("WARN", __func__, __LINE__, MSG, __VA_ARGS__) +#define LOG_INFO( MSG, ...) server_log("INFO", __func__, __LINE__, MSG, __VA_ARGS__) // // parallel @@ -133,26 +134,48 @@ struct completion_token_output std::string text_to_send; }; -static inline void server_log(const char *level, const char *function, int line, - const char *message, const nlohmann::ordered_json &extra) +static inline void server_log(const char *level, const char *function, int line, const char *message, const nlohmann::ordered_json &extra) { - nlohmann::ordered_json log - { + std::stringstream ss_tid; + ss_tid << std::this_thread::get_id(); + json log = nlohmann::ordered_json{ + {"tid", ss_tid.str()}, {"timestamp", time(nullptr)}, - {"level", level}, - {"function", function}, - {"line", line}, - {"message", message}, }; - if (!extra.empty()) - { - log.merge_patch(extra); - } + if (server_log_json) { + log.merge_patch( + { + {"level", level}, + {"function", function}, + {"line", line}, + {"msg", message}, + }); + if (!extra.empty()) { + log.merge_patch(extra); + } - const std::string str = log.dump(-1, ' ', false, json::error_handler_t::replace); - printf("%.*s\n", (int)str.size(), str.data()); - fflush(stdout); + std::cout << log.dump(-1, ' ', false, json::error_handler_t::replace) << "\n" << std::flush; + } else { + char buf[1024]; + snprintf(buf, 1024, "%4s [%24s] %s", level, function, message); + + if (!extra.empty()) { + log.merge_patch(extra); + } + std::stringstream ss; + ss << buf << " |"; + for (const auto& el : log.items()) + { + const std::string value = el.value().dump(-1, ' ', false, json::error_handler_t::replace); + snprintf(buf, 1024, " %s=%s", el.key().c_str(), value.c_str()); + ss << buf; + } + + const std::string str = ss.str(); + printf("%.*s\n", (int)str.size(), str.data()); + fflush(stdout); + } } // @@ -234,6 +257,7 @@ struct llama_server_queue { std::unique_lock lock(mutex_tasks); if (task.id == -1) { task.id = id++; + LOG_VERBOSE("new task id", {{"new_id", task.id}}); } queue_tasks.push_back(std::move(task)); condition_tasks.notify_one(); @@ -249,7 +273,9 @@ struct llama_server_queue { // Get the next id for creating anew task int get_new_id() { std::unique_lock lock(mutex_tasks); - return id++; + int new_id = id++; + LOG_VERBOSE("new task id", {{"new_id", new_id}}); + return new_id; } // Register function to process a new task @@ -290,8 +316,7 @@ struct llama_server_queue { void start_loop() { running = true; while (true) { - // new task arrived - LOG_VERBOSE("have new task", {}); + LOG_VERBOSE("new task may arrive", {}); { while (true) { @@ -303,7 +328,7 @@ struct llama_server_queue { task_server task = queue_tasks.front(); queue_tasks.erase(queue_tasks.begin()); lock.unlock(); - LOG_VERBOSE("callback_new_task", {}); + LOG_VERBOSE("callback_new_task", {{"task_id", task.id}}); callback_new_task(task); } LOG_VERBOSE("callback_all_task_finished", {}); @@ -384,11 +409,13 @@ struct llama_server_response { std::condition_variable condition_results; void add_waiting_task_id(int task_id) { + LOG_VERBOSE("waiting for task id", {{"task_id", task_id}}); std::unique_lock lock(mutex_results); waiting_task_ids.insert(task_id); } void remove_waiting_task_id(int task_id) { + LOG_VERBOSE("remove waiting for task id", {{"task_id", task_id}}); std::unique_lock lock(mutex_results); waiting_task_ids.erase(task_id); } @@ -401,7 +428,6 @@ struct llama_server_response { condition_results.wait(lock, [&]{ return !queue_results.empty(); }); - LOG_VERBOSE("condition_results unblock", {}); for (int i = 0; i < (int) queue_results.size(); i++) { @@ -426,20 +452,20 @@ struct llama_server_response { // Send a new result to a waiting task_id void send(task_result result) { std::unique_lock lock(mutex_results); - LOG_VERBOSE("send new result", {}); + LOG_VERBOSE("send new result", {{"task_id", result.id}}); for (auto& task_id : waiting_task_ids) { // LOG_TEE("waiting task id %i \n", task_id); // for now, tasks that have associated parent multitasks just get erased once multitask picks up the result if (result.multitask_id == task_id) { - LOG_VERBOSE("callback_update_multitask", {}); + LOG_VERBOSE("callback_update_multitask", {{"task_id", task_id}}); callback_update_multitask(task_id, result.id, result); continue; } if (result.id == task_id) { - LOG_VERBOSE("queue_results.push_back", {}); + LOG_VERBOSE("queue_results.push_back", {{"task_id", task_id}}); queue_results.push_back(result); condition_results.notify_all(); return;

    EIk<1BuqtG%U9KgLQ@<%jfdmNVRRg&-ND19 zylq+vwFBXJcXq=)?(eG?=RlSS?oRA0cGw`{PxQi+V(?(cm`i9k;SLQ3rd6?7%3viz zmkl|Uh>t+)ML3MM$at1f0q!bihjOYmW96xjeR)ui1h;n6wNIdoqZIE^%v!#AY%t6z zyxWLePFfu<#8hXMnj9;VY%X@mgobvY=~kGqRj?{Lho*aXBh`oC7n4s0IQB&?%VHD0 z&%NW2$2;ZS)I0ck8M*5+R8`|@`q_Eq(o-=nht8?F;0*Yif@6~hc$;X8y%1ACJ{a#0F`|(eU+Xn@7MjFgo7dvcOV9@}9qhfeh8GPR>;q8$DO-@b;jX6TfTPj#O#UW% zD%H{zr~F_i?Bk~tIe2*dL)$+v1bVa8czc|9d|mEzdU3F;o!zh=NDN^UYv~%{Xs)rb zha=K$%;S&qX3NXb5vvchjX>pHwsCM@W4U8Kg{o)mKSFFmUAI+!zmtnAQ*OBD&a)TJ zwdcQC9(~lbni_H@zgnzhhzCJ4go{Z;&~(?+S2Xm%643<`6h zo-!+`vn`W8H*o1}5=MhBW&+3oEA6KJ1cHdQV?V(Fza1c|>fc{HZ^Kf(caPHvbn*%S z=meP4&fgLx@bNcNx5!=iT?D`~2Y*ZbuOm1*IAEnR*$5q2wp_~)Y-OuPJKUYN(wC`T zVWO)Nhfu`)=2Hu38U!3AqX>tA%}tf%uxJ^O0B4)u$w%1Lc8rNpVn8eKjQrRyW#K7^ zG5$K0X4>^!$9_on@)6bwc~A-2nkmgW8~YCE9Qr<4+a#Y2)OF!tCyHe-`&$4?HhGYd zM8AWVrro%IFTJ8cD;CoYf*PsviFsi!t^MFZjbdT|d0d$EqzlpuN#0Q&k!U{G0%|u- z4`EjL{(p1)r7fWFJU4xW28**}r-A*HcCyc+p2w-w$394Bf4h3P_6gYh`^j+XsbglX zry9@h&yD?AbAE+Dq+sji_XEw8{(QK)((EK{60UQ*zr>S-zx8T zdL6zkdK$n8KiO7Stz%8*Dql!F=6oa+V&~fBaiLuYIPvu!a9^Fvm~G#7JQmOOd0RL1 zC8$jbel9>0OLU1gHt3#>g*3GDwq)G+d*Di8pem>+S+fsRcnF4s4}&NzvFv<4BrDvP zs081*v!#wRgfJ>>DlxZVVEuTh!}Y22AZ!ai#3V-6>L{*5y^dPJ(4M7#;>-=Xj{s!i z1$2N&Rh)#}9{2zoLKg$Oq2kR}nsMzVkoF6J1vGX%`VVKGVpId1{413Qe8~nhVFRmZ zTY_)^zlC72RIzhlFp;|==z#km7H*Z}fMUgsaFAJ0)$=L958AV-D(os(W0OEdPlyg- zx*dTvplCLjhu>+9+Of-aWWB6-&OG}rL5P|Wy6~=C2?bL_z~2>nmG;pr4eK*q&uMYw(~$B-`(LB87BjBD32q6%?%R8c|s?d-6&TY z>Si-^h?-`z{3_}bp*(U+dAZCrW8c*}wz+t{y|9wX*kk$=>#jV2*jJ)l>IGlzC)>#b zjJKrr`gcn-9v6f3FcG*ZF~E`rYc&3-_0i%IQP0_Mhhcus#$<*ipb+YsTq0d9R%Q=Je+__g(BdCmqkyx7=fcf<(B}ZAYH-wUH0Gy@hZY1hOwV~2VtCn+;ENqb1 zLwilyYwJQn>}@4Uyc0a=R4TkrY!s^T+xO2QUF6(^josace@6 z+r8vW%P_A%3<7J@QX!fzY(QimXbQ>cY5-F48)DKiQz1fS@b)-l3NJ}MI>KYlOC5W1 zbBNF%kH&nmhJ`W)fEYF60Q|+?}hTA6JQmj5= zJ^-4jX2MOKE}Oj&@|4_imc2Ym9kGNUs6=rBQ}ukI;tou z(M{7A>fISH4mv)?t}K zpZkY&kE2?MqylIxgbH83hO|5@Wsj&w(zM-4{Y5vk6s0AQ7~4Q`^Z8QD$-0PZX?vDB>Xi+C~aC9US}tPF`(s@ zTWhOD2bdI21sKIHAG>zMq1^TJ&@QjoO0Hbzx^K1ZN8T$mGdy9eBhfW-y zSfo7Gez`x0^~7xSn$FaAKo<5o2XQXU%1%e2cT06-cLz3?M!DCEoC#nO;nNE9=vc?? zS6MAi+Sx0fPSmHzZNvsK$y4(j$gTH^m|ZsDJpuvIhfok=E|^rS_3EXG*=A*Qx$j~f zaOqg6@R#r5j7Fp6_1eL9l6w`44S2GVG{%Oa%!2&%{7)zzMH9dq$oS63UR<|(h){Y7BuzG95`^$L_pTaB8X>Vq+Lv0p3v%&skd@pI4pvf*;l5}B^e(Z*Vt8RN1<#2PT7x0!~whRpYlVg|P=kP6S=W~u6w zOrh_AhRz|tIZuw8NI~$N*)t&|1P;%k$SRMrLu2Q7iuamqBT&wKO^pVWQ6th~Go(DA zvm!Z;QxZmokCPLsd+f8MsbdxV5EsaPRZR!BQ|~=4=%hebuxSa$_q>bD2zP&<0OaA* t+uM7)Pj3u{Z`EJD6?r1oaOP_t?foMk{f}p6#n(PM-d6|e{r}_1{{RtjiUI%t literal 0 HcmV?d00001 diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 61407e573..1c73de0a3 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -31,6 +31,7 @@ llama_test_executable (test-tokenizer-1-llama test-tokenizer-1-llama.cpp ${CMAKE llama_build_executable(test-tokenizer-1-bpe.cpp) llama_test_executable (test-tokenizer-1-falcon test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf) llama_test_executable(test-tokenizer-1-aquila test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-aquila.gguf) +llama_test_executable(test-tokenizer-1-mpt test-tokenizer-1-bpe.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-mpt.gguf) llama_build_and_test_executable(test-grammar-parser.cpp) llama_build_and_test_executable(test-llama-grammar.cpp) llama_build_and_test_executable(test-grad0.cpp) # SLOW From 438c2ca83045a00ef244093d27e9ed41a8cb4ea9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Sun, 22 Oct 2023 22:53:08 +0300 Subject: [PATCH 013/859] server : parallel decoding and multimodal (#3677) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * implementing parallel decoding in server example * crash fixed * save dev progress * refactored sampling function * completion endpoint working * multiple client support * grammar + no stream completion * cached prompt support * chat.mjs support cached prompt + some fixes * server ui now support multiple clients * unused change reverted * fixed timings per slot * add context swap * add changes to README.md * llava multimodal integration * fixed tokens probs * add multimodal input - alfa * refactor code + remove unused comments + improved README.md * fix compilation errors with llvm * notify the user from server ui that multimodality is unavialable * some ci fixes * fix ci make build undefined ref errors * fix long prompt than ctx proposed in #3639 * fixed premature end due stop word * context shift fixed * fix llava implementation * sync README.md changes * readme change * update api like OpenAI * multimodal support enabled by default * fix make bui;d errors * fix multiple clients * fix zig build * new sampling API * latest changes of sampling API * server : coding-style normalization * server : coding-style normalization (part 2) * server : remove beam-search functionality * server : bug fix in ingest_images n_tokens is incremented internally by llama_batch_add * server : use refs + use llama_batch_clear() * server : snake case * server : minor sync * added thread safe pipeline * server : bach has to be allocated for n_parallel sequences * server : no need for atomic int - already using mutex * server : logs + minor code style * server : fix multibyte handle in partial response (#3706) * fix image load + view image in chat * make : silence stb warnings * clip : link to ggml, not to llama * server : fix switch fallthrough * server : fix crash in Debug on macOS (I have no idea why this fixes it!?) * server : refactor ctx_sampling init + n_ctx + names * server : bug fix for prompt caching * Do not save/load image_data to localStorage * editorconfig : new line in index.html * server : completion requests remember slot_id * Update readme to document multimodal in server * server : minor style * Update readme to document multimodal in server * server : hide ctx_sampling->prev behind API (#3696) * server : apply fix from #3722 * server : fix slot reuse * server : add comment about changing slot_state to bool --------- Co-authored-by: FSSRepo Co-authored-by: Damian Stewart Co-authored-by: Steward Garcia <57494570+FSSRepo@users.noreply.github.com> Co-authored-by: Jhen-Jie Hong Co-authored-by: M. Yusuf Sarıgöz --- .gitignore | 1 + Makefile | 4 +- build.zig | 3 +- examples/llava/CMakeLists.txt | 2 +- examples/llava/clip.cpp | 4 +- examples/server/CMakeLists.txt | 2 +- examples/server/README.md | 36 + examples/server/api_like_OAI.py | 5 +- examples/server/chat.mjs | 11 + examples/server/index.html.hpp | 4052 +++++++++++++++-------------- examples/server/public/index.html | 115 +- examples/server/server.cpp | 2695 ++++++++++++------- 12 files changed, 3980 insertions(+), 2950 deletions(-) diff --git a/.gitignore b/.gitignore index 471cf90d5..545c28726 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ *.gcno *.gcda *.dot +*.bat *.metallib .DS_Store .build/ diff --git a/Makefile b/Makefile index 325ae747b..80179631f 100644 --- a/Makefile +++ b/Makefile @@ -605,8 +605,8 @@ embedding: examples/embedding/embedding.cpp build-info.h ggml. save-load-state: examples/save-load-state/save-load-state.cpp build-info.h ggml.o llama.o $(COMMON_DEPS) $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp build-info.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) - $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) +server: examples/server/server.cpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h common/stb_image.h build-info.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) + $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h,$(filter-out %.hpp,$^)) -o $@ $(LDFLAGS) $(LWINSOCK2) -Wno-cast-qual gguf: examples/gguf/gguf.cpp ggml.o llama.o $(OBJS) $(CXX) $(CXXFLAGS) $(filter-out %.h,$^) -o $@ $(LDFLAGS) diff --git a/build.zig b/build.zig index 0b74cee48..dcfa3dd6b 100644 --- a/build.zig +++ b/build.zig @@ -131,6 +131,7 @@ pub fn build(b: *std.build.Builder) !void { const sampling = make.obj("sampling", "common/sampling.cpp"); const grammar_parser = make.obj("grammar-parser", "common/grammar-parser.cpp"); const train = make.obj("train", "common/train.cpp"); + const clip = make.obj("clip", "examples/llava/clip.cpp"); _ = make.exe("main", "examples/main/main.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, sampling, console, grammar_parser }); _ = make.exe("quantize", "examples/quantize/quantize.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common }); @@ -139,7 +140,7 @@ pub fn build(b: *std.build.Builder) !void { _ = make.exe("finetune", "examples/finetune/finetune.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, train }); _ = make.exe("train-text-from-scratch", "examples/train-text-from-scratch/train-text-from-scratch.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, train }); - const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, sampling, grammar_parser }); + const server = make.exe("server", "examples/server/server.cpp", &.{ ggml, ggml_alloc, ggml_backend, llama, common, sampling, grammar_parser, clip }); if (server.target.isWindows()) { server.linkSystemLibrary("ws2_32"); } diff --git a/examples/llava/CMakeLists.txt b/examples/llava/CMakeLists.txt index d02e6ab46..2d7979ecd 100644 --- a/examples/llava/CMakeLists.txt +++ b/examples/llava/CMakeLists.txt @@ -1,7 +1,7 @@ set(TARGET clip) add_library(${TARGET} clip.cpp clip.h) install(TARGETS ${TARGET} LIBRARY) -target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common ggml ${CMAKE_THREAD_LIBS_INIT}) target_compile_features(${TARGET} PRIVATE cxx_std_11) if (NOT MSVC) target_compile_options(${TARGET} PRIVATE -Wno-cast-qual) # stb_image.h diff --git a/examples/llava/clip.cpp b/examples/llava/clip.cpp index 1ae9077b4..61932e659 100644 --- a/examples/llava/clip.cpp +++ b/examples/llava/clip.cpp @@ -610,8 +610,8 @@ struct clip_ctx * clip_model_load(const char * fname, const int verbosity = 1) { int idx_mean = get_key_idx(ctx, KEY_IMAGE_MEAN); int idx_std = get_key_idx(ctx, KEY_IMAGE_STD); for (int i = 0; i < 3; ++i) { - new_clip->image_mean[i] = *((float *)gguf_get_arr_data(ctx, idx_mean)); - new_clip->image_std[i] = *((float *)gguf_get_arr_data(ctx, idx_std)); + new_clip->image_mean[i] = *((const float *)gguf_get_arr_data(ctx, idx_mean)); + new_clip->image_std[i] = *((const float *)gguf_get_arr_data(ctx, idx_std)); } if (verbosity >= 2) { diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index 3782f9b80..a23ddcc55 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -6,7 +6,7 @@ install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ ) -target_link_libraries(${TARGET} PRIVATE common llama ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common llama clip ${CMAKE_THREAD_LIBS_INIT}) if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() diff --git a/examples/server/README.md b/examples/server/README.md index 9f0ace3d7..715007735 100644 --- a/examples/server/README.md +++ b/examples/server/README.md @@ -24,6 +24,10 @@ Command line options: - `--port`: Set the port to listen. Default: `8080`. - `--path`: path from which to serve static files (default examples/server/public) - `--embedding`: Enable embedding extraction, Default: disabled. +- `-np N`, `--parallel N`: Set the number of slots for process requests (default: 1) +- `-cb`, `--cont-batching`: enable continuous batching (a.k.a dynamic batching) (default: disabled) +- `-spf FNAME`, `--system-prompt-file FNAME` Set a file to load "a system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) +- `--mmproj MMPROJ_FILE`: Path to a multimodal projector file for LLaVA. ## Build @@ -158,6 +162,8 @@ node index.js `n_probs`: If greater than 0, the response also contains the probabilities of top N tokens for each generated token (default: 0) + `image_data`: An array of objects to hold base64-encoded image `data` and its `id`s to be reference in `prompt`. You can determine the place of the image in the prompt as in the following: `USER:[img-12]Describe the image in detail.\nASSISTANT:` In this case, `[img-12]` will be replaced by the embeddings of the image id 12 in the following `image_data` array: `{..., "image_data": [{"data": "", "id": 12}]}`. Use `image_data` only with multimodal models, e.g., LLaVA. + *Result JSON:* Note: When using streaming mode (`stream`) only `content` and `stop` will be returned until end of completion. @@ -188,6 +194,12 @@ node index.js `truncated`: Boolean indicating if the context size was exceeded during generation, i.e. the number of tokens provided in the prompt (`tokens_evaluated`) plus tokens generated (`tokens predicted`) exceeded the context size (`n_ctx`) + `slot_id`: Assign the completion task to an specific slot. If is -1 the task will be assigned to a Idle slot (default: -1) + + `cache_prompt`: Save the prompt and generation for avoid reprocess entire prompt if a part of this isn't change (default: false) + + `system_prompt`: Change the system prompt (initial prompt of all slots), this is useful for chat applications. [See more](#change-system-prompt-on-runtime) + - **POST** `/tokenize`: Tokenize a given text. *Options:* @@ -218,8 +230,32 @@ node index.js It also accepts all the options of `/completion` except `stream` and `prompt`. +- **GET** `/props`: Return the required assistant name and anti-prompt to generate the prompt in case you have specified a system prompt for all slots. + ## More examples +### Change system prompt on runtime + +To use the server example to serve multiple chat-type clients while keeping the same system prompt, you can utilize the option `system_prompt` to achieve that. This only needs to be done once to establish it. + +`prompt`: Specify a context that you want all connecting clients to respect. + +`anti_prompt`: Specify the word you want to use to instruct the model to stop. This must be sent to each client through the `/props` endpoint. + +`assistant_name`: The bot's name is necessary for each customer to generate the prompt. This must be sent to each client through the `/props` endpoint. + +```json +{ + "system_prompt": { + "prompt": "Transcript of a never ending dialog, where the User interacts with an Assistant.\nThe Assistant is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision.\nUser: Recommend a nice restaurant in the area.\nAssistant: I recommend the restaurant \"The Golden Duck\". It is a 5 star restaurant with a great view of the city. The food is delicious and the service is excellent. The prices are reasonable and the portions are generous. The restaurant is located at 123 Main Street, New York, NY 10001. The phone number is (212) 555-1234. The hours are Monday through Friday from 11:00 am to 10:00 pm. The restaurant is closed on Saturdays and Sundays.\nUser: Who is Richard Feynman?\nAssistant: Richard Feynman was an American physicist who is best known for his work in quantum mechanics and particle physics. He was awarded the Nobel Prize in Physics in 1965 for his contributions to the development of quantum electrodynamics. He was a popular lecturer and author, and he wrote several books, including \"Surely You're Joking, Mr. Feynman!\" and \"What Do You Care What Other People Think?\".\nUser:", + "anti_prompt": "User:", + "assistant_name": "Assistant:" + } +} +``` + +**NOTE**: You can do this automatically when starting the server by simply creating a .json file with these options and using the CLI option `-spf FNAME` or `--system-prompt-file FNAME`. + ### Interactive mode Check the sample in [chat.mjs](chat.mjs). diff --git a/examples/server/api_like_OAI.py b/examples/server/api_like_OAI.py index 14d2dcf65..313e1a965 100755 --- a/examples/server/api_like_OAI.py +++ b/examples/server/api_like_OAI.py @@ -8,6 +8,7 @@ import json app = Flask(__name__) +slot_id = -1 parser = argparse.ArgumentParser(description="An example of using server.cpp with a similar API to OAI. It must be used together with server.cpp.") parser.add_argument("--chat-prompt", type=str, help="the top prompt in chat completions(default: 'A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.\\n')", default='A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.\\n') @@ -77,7 +78,8 @@ def make_postData(body, chat=False, stream=False): if(is_present(body, "stop")): postData["stop"] += body["stop"] postData["n_keep"] = -1 postData["stream"] = stream - + postData["cache_prompt"] = True + postData["slot_id"] = slot_id return postData def make_resData(data, chat=False, promptToken=[]): @@ -128,6 +130,7 @@ def make_resData_stream(data, chat=False, time_now = 0, start=False): } ] } + slot_id = data["slot_id"] if (chat): if (start): resData["choices"][0]["delta"] = { diff --git a/examples/server/chat.mjs b/examples/server/chat.mjs index 87f4d2926..219ebb51a 100644 --- a/examples/server/chat.mjs +++ b/examples/server/chat.mjs @@ -7,6 +7,11 @@ const args = process.argv.slice(2); const grammarJsonSchemaFile = args.find( (_, index) => args[index - 1] === "--grammar-json-schema" ); + +const no_cached_prompt = args.find( + (_, index) => args[index - 1] === "--no-cache-prompt" +) ?? "false"; + const grammarFile = args.find((_, index) => args[index - 1] === "--grammar"); // Example usage: function,arguments @@ -30,6 +35,9 @@ if (grammarFile) { grammar = readFileSync(grammarFile, 'utf-8') } +// for cached prompt +let slot_id = -1; + const API_URL = 'http://127.0.0.1:8080' const chat = [ @@ -76,6 +84,8 @@ async function chat_completion(question) { top_p: 0.9, n_keep: n_keep, n_predict: 256, + cache_prompt: no_cached_prompt === "false", + slot_id: slot_id, stop: ["\n### Human:"], // stop completion after generating this grammar, stream: true, @@ -92,6 +102,7 @@ async function chat_completion(question) { const t = Buffer.from(chunk).toString('utf8') if (t.startsWith('data: ')) { const message = JSON.parse(t.substring(6)) + slot_id = message.slot_id answer += message.content process.stdout.write(message.content) if (message.stop) { diff --git a/examples/server/index.html.hpp b/examples/server/index.html.hpp index 58e3387d1..5d3bdfbdd 100644 --- a/examples/server/index.html.hpp +++ b/examples/server/index.html.hpp @@ -186,351 +186,369 @@ unsigned char index_html[] = { 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x64, 0x64, 0x64, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x64, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, - 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x3a, 0x20, 0x6d, 0x6f, - 0x6e, 0x6f, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, - 0x30, 0x2e, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, 0x65, - 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, 0x33, 0x70, - 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x35, - 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x62, 0x6c, 0x6f, - 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x64, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, + 0x6e, 0x74, 0x2d, 0x66, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x3a, 0x20, 0x6d, + 0x6f, 0x6e, 0x6f, 0x73, 0x70, 0x61, 0x63, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, + 0x20, 0x30, 0x2e, 0x31, 0x65, 0x6d, 0x20, 0x30, 0x2e, 0x33, 0x65, 0x6d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x6f, 0x72, 0x64, + 0x65, 0x72, 0x2d, 0x72, 0x61, 0x64, 0x69, 0x75, 0x73, 0x3a, 0x20, 0x33, + 0x70, 0x78, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2e, 0x73, 0x6c, 0x69, 0x6d, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, - 0x61, 0x64, 0x65, 0x72, 0x2c, 0x20, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x78, - 0x74, 0x2d, 0x61, 0x6c, 0x69, 0x67, 0x6e, 0x3a, 0x20, 0x63, 0x65, 0x6e, - 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6e, 0x74, 0x2d, - 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x38, 0x30, 0x25, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, - 0x23, 0x38, 0x38, 0x38, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x35, 0x65, 0x6d, 0x20, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2e, 0x73, 0x6c, 0x69, 0x6d, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x61, 0x72, 0x67, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x20, 0x30, 0x2e, 0x35, 0x65, 0x6d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x68, + 0x65, 0x61, 0x64, 0x65, 0x72, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, + 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x65, 0x78, 0x74, 0x2d, 0x61, 0x6c, 0x69, 0x67, 0x6e, + 0x3a, 0x20, 0x63, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x6f, + 0x74, 0x65, 0x72, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x66, 0x6f, 0x6e, 0x74, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x38, + 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x23, 0x38, 0x38, 0x38, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, + 0x6f, 0x64, 0x65, 0x2d, 0x63, 0x68, 0x61, 0x74, 0x20, 0x74, 0x65, 0x78, + 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x34, + 0x2e, 0x35, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x63, - 0x68, 0x61, 0x74, 0x20, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, - 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x34, 0x2e, 0x35, 0x65, 0x6d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, - 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, 0x31, 0x30, 0x65, 0x6d, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, 0x61, 0x6d, 0x65, 0x73, 0x20, - 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, - 0x69, 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x65, + 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x3d, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x3a, 0x20, + 0x31, 0x30, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6b, 0x65, 0x79, 0x66, 0x72, + 0x61, 0x6d, 0x65, 0x73, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, + 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, + 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x3a, 0x20, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, - 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x30, 0x25, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x31, 0x30, 0x30, 0x25, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, - 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x31, 0x30, + 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2e, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, + 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, + 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, + 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, + 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, + 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, + 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, - 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x65, 0x65, 0x65, 0x65, 0x65, 0x65, - 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, - 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x73, 0x69, 0x7a, - 0x65, 0x3a, 0x20, 0x35, 0x30, 0x25, 0x20, 0x31, 0x30, 0x30, 0x25, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, - 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x3a, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x2d, 0x67, 0x72, 0x61, 0x64, - 0x69, 0x65, 0x6e, 0x74, 0x28, 0x39, 0x30, 0x64, 0x65, 0x67, 0x2c, 0x20, - 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x2c, 0x20, - 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, - 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, - 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, - 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, - 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, - 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, - 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, - 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, - 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, + 0x72, 0x2d, 0x32, 0x29, 0x2c, 0x20, 0x76, 0x61, 0x72, 0x28, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, - 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, - 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, - 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, - 0x72, 0x3a, 0x20, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, 0x0a, - 0x20, 0x20, 0x3c, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, 0x79, - 0x70, 0x65, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, - 0x7b, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x2c, 0x20, - 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x2c, 0x20, 0x68, 0x2c, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x2c, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x20, 0x75, - 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x75, 0x73, - 0x65, 0x52, 0x65, 0x66, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, - 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, - 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, - 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x7d, 0x20, 0x66, - 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, 0x61, - 0x6d, 0x6d, 0x61, 0x72, 0x2e, 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, 0x73, - 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x73, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, - 0x65, 0x6e, 0x20, 0x55, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, 0x20, - 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, 0x69, - 0x65, 0x6e, 0x64, 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, 0x6f, - 0x74, 0x2e, 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, 0x20, - 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, 0x6e, - 0x64, 0x2c, 0x20, 0x68, 0x6f, 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, 0x67, - 0x6f, 0x6f, 0x64, 0x20, 0x61, 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, 0x69, - 0x6e, 0x67, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, 0x65, - 0x72, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x61, - 0x6e, 0x73, 0x77, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, 0x64, - 0x69, 0x61, 0x74, 0x65, 0x6c, 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x77, - 0x69, 0x74, 0x68, 0x20, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, 0x7b, - 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, 0x6e, - 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, 0x5c, - 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, - 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, 0x7b, - 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x22, - 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x22, - 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, 0x6c, - 0x61, 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, 0x22, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, - 0x65, 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, 0x2c, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, 0x35, 0x36, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, - 0x61, 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x2c, 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x78, 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, 0x2c, 0x20, - 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, + 0x72, 0x2d, 0x31, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x61, 0x6e, 0x69, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x20, + 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, 0x62, 0x67, 0x2d, 0x77, + 0x69, 0x70, 0x65, 0x20, 0x32, 0x73, 0x20, 0x6c, 0x69, 0x6e, 0x65, 0x61, + 0x72, 0x20, 0x69, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x74, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x40, + 0x6d, 0x65, 0x64, 0x69, 0x61, 0x20, 0x28, 0x70, 0x72, 0x65, 0x66, 0x65, + 0x72, 0x73, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x73, 0x63, 0x68, + 0x65, 0x6d, 0x65, 0x3a, 0x20, 0x64, 0x61, 0x72, 0x6b, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x6c, 0x6f, 0x61, 0x64, + 0x69, 0x6e, 0x67, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x2d, + 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x31, 0x3a, 0x20, 0x23, 0x32, 0x32, + 0x32, 0x32, 0x32, 0x32, 0x30, 0x30, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2d, 0x2d, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, + 0x67, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x2d, 0x32, 0x3a, 0x20, 0x23, + 0x32, 0x32, 0x32, 0x32, 0x32, 0x32, 0x66, 0x66, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, + 0x6e, 0x64, 0x2d, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x62, 0x6c, + 0x61, 0x63, 0x6b, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, + 0x74, 0x79, 0x6c, 0x65, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x3c, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x6d, + 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x2c, 0x20, 0x68, 0x2c, 0x20, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x20, 0x65, 0x66, 0x66, 0x65, + 0x63, 0x74, 0x2c, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, + 0x2c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x2c, 0x20, 0x75, 0x73, + 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x2c, 0x20, 0x75, 0x73, 0x65, + 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x2c, 0x20, 0x75, 0x73, 0x65, 0x52, + 0x65, 0x66, 0x2c, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, + 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x27, 0x2f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x2e, 0x6a, 0x73, 0x27, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x20, 0x7b, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x7d, 0x20, + 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, 0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x7b, 0x20, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, + 0x74, 0x65, 0x72, 0x20, 0x7d, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x27, + 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x2d, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x2d, 0x74, 0x6f, 0x2d, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x2e, + 0x6d, 0x6a, 0x73, 0x27, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, + 0x74, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x72, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x2d, 0x31, 0x3b, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, 0x22, 0x54, 0x68, 0x69, + 0x73, 0x20, 0x69, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, + 0x72, 0x73, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x62, 0x65, 0x74, 0x77, + 0x65, 0x65, 0x6e, 0x20, 0x55, 0x73, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x2c, 0x20, 0x61, 0x20, 0x66, 0x72, + 0x69, 0x65, 0x6e, 0x64, 0x6c, 0x79, 0x20, 0x63, 0x68, 0x61, 0x74, 0x62, + 0x6f, 0x74, 0x2e, 0x20, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x20, 0x69, 0x73, + 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, 0x6b, 0x69, + 0x6e, 0x64, 0x2c, 0x20, 0x68, 0x6f, 0x6e, 0x65, 0x73, 0x74, 0x2c, 0x20, + 0x67, 0x6f, 0x6f, 0x64, 0x20, 0x61, 0x74, 0x20, 0x77, 0x72, 0x69, 0x74, + 0x69, 0x6e, 0x67, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6e, 0x65, 0x76, + 0x65, 0x72, 0x20, 0x66, 0x61, 0x69, 0x6c, 0x73, 0x20, 0x74, 0x6f, 0x20, + 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x20, 0x69, 0x6d, 0x6d, 0x65, + 0x64, 0x69, 0x61, 0x74, 0x65, 0x6c, 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x77, 0x69, 0x74, 0x68, 0x20, 0x70, 0x72, 0x65, 0x63, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x20, 0x22, 0x7b, + 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x7d, 0x5c, 0x6e, 0x5c, + 0x6e, 0x7b, 0x7b, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x7d, 0x7d, + 0x5c, 0x6e, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, + 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, + 0x20, 0x22, 0x7b, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x7d, 0x3a, 0x20, + 0x7b, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x7d, 0x22, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3a, 0x20, + 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x2c, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x7c, 0x20, 0x22, 0x63, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x72, 0x3a, 0x20, 0x22, 0x4c, + 0x6c, 0x61, 0x6d, 0x61, 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x75, 0x73, 0x65, 0x72, 0x3a, 0x20, 0x22, 0x55, 0x73, 0x65, 0x72, + 0x22, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x3a, + 0x20, 0x27, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, + 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x3a, 0x20, 0x34, 0x30, 0x30, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x3a, 0x20, 0x30, 0x2e, 0x37, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, + 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x3a, 0x20, 0x32, + 0x35, 0x36, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x20, 0x3d, 0x20, 0x64, + 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x2c, 0x20, 0x2d, 0x31, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x20, 0x73, 0x69, 0x7a, 0x65, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, + 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x31, 0x2e, 0x31, 0x38, + 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, + 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, + 0x75, 0x73, 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, + 0x7a, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, + 0x5f, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, + 0x5f, 0x7a, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, + 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, + 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, + 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, + 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x3a, 0x20, 0x34, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x3c, 0x3d, 0x20, 0x30, 0x20, 0x74, 0x6f, 0x20, 0x75, 0x73, - 0x65, 0x20, 0x76, 0x6f, 0x63, 0x61, 0x62, 0x20, 0x73, 0x69, 0x7a, 0x65, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x5f, 0x70, - 0x3a, 0x20, 0x30, 0x2e, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, - 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x66, 0x73, 0x5f, 0x7a, - 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x31, 0x2e, - 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x69, 0x63, - 0x61, 0x6c, 0x5f, 0x70, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x31, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, - 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, - 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, - 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x72, - 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x2f, 0x2f, - 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, - 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, - 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, - 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x74, 0x61, - 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, 0x2e, 0x31, - 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, 0x69, 0x6e, - 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x27, 0x27, - 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, 0x70, 0x72, - 0x6f, 0x62, 0x73, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6e, - 0x6f, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, - 0x65, 0x73, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, 0x54, 0x3a, - 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x6f, 0x72, 0x77, - 0x73, 0x65, 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, 0x22, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, - 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, + 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, + 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, + 0x2f, 0x2f, 0x20, 0x30, 0x2e, 0x30, 0x20, 0x3d, 0x20, 0x64, 0x69, 0x73, + 0x61, 0x62, 0x6c, 0x65, 0x64, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3a, 0x20, 0x30, 0x2c, + 0x20, 0x2f, 0x2f, 0x20, 0x30, 0x2f, 0x31, 0x2f, 0x32, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, + 0x5f, 0x74, 0x61, 0x75, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x2f, 0x2f, 0x20, + 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x72, 0x6f, + 0x70, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x69, 0x72, + 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x3a, 0x20, 0x30, + 0x2e, 0x31, 0x2c, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x61, 0x72, 0x6e, + 0x69, 0x6e, 0x67, 0x20, 0x72, 0x61, 0x74, 0x65, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, + 0x27, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x73, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x2f, 0x2f, + 0x20, 0x6e, 0x6f, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, + 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x61, + 0x63, 0x68, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3a, 0x20, + 0x74, 0x72, 0x75, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x53, 0x54, 0x41, 0x52, + 0x54, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x6f, + 0x72, 0x77, 0x73, 0x65, 0x72, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, - 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, - 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, - 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x3d, 0x20, + 0x22, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x63, 0x70, 0x70, 0x5f, 0x73, 0x65, + 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, + 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x22, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, + 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, + 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x2c, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, + 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, 0x74, 0x65, + 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, + 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, + 0x61, 0x67, 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, + 0x72, 0x6f, 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, + 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, + 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, + 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, + 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, + 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, + 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, + 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, + 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, + 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, + 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, + 0x2e, 0x70, 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, + 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, + 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, + 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, - 0x2c, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x73, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x69, 0x66, 0x79, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, - 0x6d, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, 0x61, 0x67, - 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x73, 0x65, 0x74, 0x49, - 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, - 0x20, 0x74, 0x61, 0x67, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, - 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x74, 0x61, 0x67, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, - 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x65, 0x74, - 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, - 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, 0x20, 0x27, 0x2f, 0x27, 0x20, - 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x2e, 0x70, - 0x61, 0x72, 0x73, 0x65, 0x28, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x41, 0x73, 0x52, 0x61, 0x77, 0x54, 0x65, 0x78, 0x74, 0x28, 0x74, - 0x61, 0x67, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x74, 0x65, 0x6d, 0x20, 0x3d, - 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x2e, 0x67, 0x65, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x28, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x4b, 0x65, 0x79, 0x20, 0x2b, - 0x20, 0x27, 0x2f, 0x27, 0x20, 0x2b, 0x20, 0x74, 0x61, 0x67, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x69, - 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, 0x66, 0x6f, - 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, - 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, - 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x7d, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, - 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x73, 0x69, - 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, 0x73, 0x20, - 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x21, 0x69, 0x74, 0x65, 0x6d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x69, 0x74, 0x65, 0x6d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, + 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, 0x20, 0x74, - 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x79, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x72, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, - 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, - 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x20, 0x69, - 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, 0x20, 0x66, - 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, - 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x64, 0x61, - 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x7b, 0x20, - 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3a, 0x22, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, 0x74, 0x61, - 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x49, 0x6d, - 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x69, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, - 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, - 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, - 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, + 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, + 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x27, 0x27, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x3a, 0x20, 0x7b, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x3a, 0x20, 0x7b, 0x7d, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x3a, 0x20, 0x7b, 0x7d, 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x6c, 0x65, 0x74, 0x27, + 0x73, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x6c, 0x79, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, + 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x69, 0x66, + 0x20, 0x74, 0x68, 0x65, 0x72, 0x65, 0x20, 0x61, 0x72, 0x65, 0x20, 0x61, + 0x6e, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, + 0x65, 0x72, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, + 0x73, 0x20, 0x61, 0x72, 0x65, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, + 0x20, 0x69, 0x6e, 0x20, 0x6f, 0x6e, 0x65, 0x20, 0x6f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x69, 0x6e, + 0x20, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x66, 0x20, 0x7b, 0x20, 0x22, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, + 0x22, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x64, 0x61, 0x74, 0x61, 0x22, 0x20, 0x7d, 0x20, 0x61, 0x6e, 0x64, 0x20, + 0x7b, 0x20, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, + 0x3a, 0x22, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x64, 0x61, + 0x74, 0x61, 0x22, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, 0x63, 0x65, - 0x73, 0x73, 0x66, 0x75, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, - 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x20, + 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, + 0x20, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, + 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, + 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, + 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, + 0x74, 0x65, 0x73, 0x20, 0x77, 0x65, 0x72, 0x65, 0x20, 0x73, 0x75, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x66, 0x75, 0x6c, 0x79, 0x20, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, + 0x67, 0x28, 0x27, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, + 0x67, 0x20, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, + 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, @@ -608,515 +626,639 @@ unsigned char index_html[] = { 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x3a, 0x20, 0x27, 0x27, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x74, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, - 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, + 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, - 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, - 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, - 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6c, 0x61, - 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, - 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, - 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, 0x61, 0x73, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, + 0x6c, 0x79, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, + 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x64, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x67, 0x65, 0x74, 0x20, 0x61, 0x75, 0x74, + 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, + 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, + 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, + 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x67, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x41, 0x73, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, + 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x6c, + 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, + 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, + 0x72, 0x69, 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, - 0x67, 0x28, 0x27, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, - 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x72, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x69, - 0x6e, 0x67, 0x27, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, - 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x55, - 0x73, 0x65, 0x64, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, - 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, 0x61, 0x75, - 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, - 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, - 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, - 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, 0x73, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x6f, - 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x41, - 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, - 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, - 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x65, - 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, - 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, 0x74, - 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, 0x20, 0x64, - 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, 0x74, 0x6f, - 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, 0x74, 0x27, - 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, 0x20, 0x6e, - 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x4e, 0x6f, 0x20, + 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x2c, 0x20, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, + 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x6e, 0x6f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, + 0x65, 0x64, 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x77, 0x61, + 0x73, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2c, 0x20, 0x73, 0x6f, 0x20, + 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x64, 0x65, + 0x66, 0x61, 0x75, 0x6c, 0x74, 0x2e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x27, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x69, 0x6e, 0x67, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x73, 0x61, 0x76, 0x65, 0x64, 0x55, 0x73, 0x65, + 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, + 0x76, 0x65, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x41, + 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x2e, 0x2e, 0x2e, 0x27, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x77, 0x65, + 0x20, 0x64, 0x6f, 0x6e, 0x27, 0x74, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, + 0x74, 0x6f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x6f, 0x76, 0x65, 0x72, + 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x73, 0x6f, 0x20, 0x6c, 0x65, + 0x74, 0x27, 0x73, 0x20, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x20, 0x61, + 0x20, 0x6e, 0x65, 0x77, 0x20, 0x6f, 0x6e, 0x65, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x20, 0x3d, 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, + 0x65, 0x2e, 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x20, 0x3d, - 0x20, 0x27, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x2d, 0x27, 0x20, 0x2b, 0x20, 0x44, 0x61, 0x74, 0x65, 0x2e, - 0x6e, 0x6f, 0x77, 0x28, 0x29, 0x2e, 0x74, 0x6f, 0x53, 0x74, 0x72, 0x69, - 0x6e, 0x67, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6c, 0x65, 0x74, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, - 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, - 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x53, - 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, 0x20, 0x2b, - 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, 0x20, 0x69, - 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, - 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, - 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x6e, - 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, 0x74, 0x20, - 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x70, 0x70, - 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, - 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x41, - 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, - 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, - 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, - 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, - 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, 0x6d, 0x65, - 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x27, + 0x6e, 0x61, 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, - 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, - 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x20, 0x6c, - 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, - 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, 0x44, 0x3a, - 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, 0x6f, 0x72, - 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, 0x6f, 0x77, - 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x53, 0x74, - 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, 0x61, 0x6d, - 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, 0x69, 0x67, - 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, - 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, - 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, 0x73, 0x20, 0x74, 0x68, 0x65, - 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, - 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, 0x74, 0x3f, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x63, 0x6f, - 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, - 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, - 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, - 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, 0x72, 0x2c, 0x20, - 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, - 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, 0x2e, 0x2e, 0x2e, - 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, - 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x73, 0x74, 0x72, - 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x41, 0x6c, 0x6c, - 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, - 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, 0x2c, 0x20, 0x6b, - 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x61, 0x73, 0x79, 0x6e, - 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x72, - 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, - 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, 0x61, 0x72, 0x29, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, + 0x27, 0x53, 0x61, 0x76, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x73, 0x20, 0x27, + 0x20, 0x2b, 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x61, 0x76, 0x65, + 0x20, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, + 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, + 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, + 0x20, 0x6e, 0x65, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x69, + 0x74, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, + 0x70, 0x70, 0x6c, 0x79, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, + 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, + 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x6f, 0x63, 0x61, 0x6c, 0x5f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, + 0x65, 0x5f, 0x73, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x46, 0x72, 0x6f, + 0x6d, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x28, 0x27, 0x75, 0x73, 0x65, + 0x72, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x5f, + 0x6c, 0x61, 0x73, 0x74, 0x27, 0x2c, 0x20, 0x7b, 0x20, 0x27, 0x6e, 0x61, + 0x6d, 0x65, 0x27, 0x3a, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x20, 0x27, 0x64, 0x61, 0x74, 0x61, 0x27, 0x3a, 0x20, 0x7b, 0x20, + 0x27, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x27, 0x3a, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2c, 0x20, 0x27, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x27, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x20, 0x7d, 0x20, 0x7d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, + 0x28, 0x27, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x67, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, + 0x20, 0x6c, 0x61, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x27, 0x29, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x4c, 0x6f, 0x61, 0x64, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, + 0x6c, 0x79, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x64, 0x28, + 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x20, 0x45, 0x4e, + 0x44, 0x3a, 0x20, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x73, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x73, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x62, 0x72, + 0x6f, 0x77, 0x73, 0x65, 0x72, 0x73, 0x20, 0x4c, 0x6f, 0x63, 0x61, 0x6c, + 0x53, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x20, 0x2a, 0x2f, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6c, 0x6c, + 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x73, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x73, + 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x74, 0x6c, 0x79, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, + 0x69, 0x6e, 0x67, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, + 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x45, - 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, - 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x22, 0x29, 0x3b, + 0x65, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x61, 0x73, 0x20, 0x74, + 0x68, 0x65, 0x20, 0x75, 0x73, 0x65, 0x72, 0x20, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x65, 0x64, 0x20, 0x61, 0x20, 0x63, 0x68, 0x61, 0x74, 0x3f, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, + 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x28, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, + 0x20, 0x30, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x74, + 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, + 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x65, - 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, - 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, - 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x64, 0x61, 0x74, - 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x73, 0x74, - 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, 0x65, 0x20, 0x28, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, - 0x20, 0x30, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5b, 0x63, 0x75, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x73, 0x74, 0x72, + 0x2c, 0x20, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x74, 0x20, 0x73, 0x65, 0x74, 0x74, + 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x78, 0x74, 0x72, + 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, + 0x2e, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x2c, 0x20, 0x2e, + 0x2e, 0x2e, 0x65, 0x78, 0x74, 0x72, 0x61, 0x53, 0x65, 0x74, 0x74, 0x69, + 0x6e, 0x67, 0x73, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x28, 0x73, + 0x74, 0x72, 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x41, + 0x6c, 0x6c, 0x28, 0x2f, 0x5c, 0x7b, 0x5c, 0x7b, 0x28, 0x2e, 0x2a, 0x3f, + 0x29, 0x5c, 0x7d, 0x5c, 0x7d, 0x2f, 0x67, 0x2c, 0x20, 0x28, 0x5f, 0x2c, + 0x20, 0x6b, 0x65, 0x79, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, + 0x67, 0x73, 0x5b, 0x6b, 0x65, 0x79, 0x5d, 0x29, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x61, 0x73, + 0x79, 0x6e, 0x63, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x63, 0x68, 0x61, 0x72, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x5b, 0x5d, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x3d, 0x20, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, + 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x72, 0x6f, 0x77, 0x20, 0x6e, 0x65, 0x77, + 0x20, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x28, 0x22, 0x61, 0x6c, 0x72, 0x65, + 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x22, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, + 0x6e, 0x65, 0x77, 0x20, 0x41, 0x62, 0x6f, 0x72, 0x74, 0x43, 0x6f, 0x6e, + 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x61, 0x77, 0x61, + 0x69, 0x74, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, + 0x75, 0x6e, 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2c, 0x20, 0x7b, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x3a, 0x20, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x63, 0x68, 0x75, 0x6e, + 0x6b, 0x2e, 0x64, 0x61, 0x74, 0x61, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x73, 0x74, 0x6f, 0x70, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x77, 0x68, 0x69, 0x6c, + 0x65, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, + 0x74, 0x68, 0x20, 0x3e, 0x20, 0x30, 0x20, 0x26, 0x26, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x2d, 0x20, 0x31, - 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, - 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x24, 0x2f, 0x29, 0x20, 0x21, - 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, - 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, - 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, - 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, - 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x3a, 0x20, - 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, - 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, 0x73, 0x75, - 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, 0x72, 0x72, - 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, - 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, + 0x73, 0x5b, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x20, 0x2d, 0x20, 0x31, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x24, + 0x2f, 0x29, 0x20, 0x21, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x6f, 0x70, 0x28, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x69, 0x66, 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, - 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, - 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, - 0x67, 0x73, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, - 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, - 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, - 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, - 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, - 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, - 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x22, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, + 0x3a, 0x20, 0x27, 0x22, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x6d, 0x61, + 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2c, 0x20, 0x22, 0x27, 0x2c, 0x20, + 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3a, 0x20, 0x22, 0x2c, 0x20, + 0x64, 0x61, 0x74, 0x61, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x73, 0x2e, 0x70, 0x75, 0x73, 0x68, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x20, 0x3d, 0x20, 0x64, 0x61, + 0x74, 0x61, 0x2e, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, + 0x6d, 0x61, 0x67, 0x65, 0x20, 0x26, 0x26, 0x20, 0x21, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x6d, 0x6f, 0x64, 0x61, 0x6c, + 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x22, 0x54, + 0x68, 0x65, 0x20, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x77, 0x61, + 0x73, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x64, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x6d, 0x75, 0x6c, 0x74, 0x69, + 0x6d, 0x6f, 0x64, 0x61, 0x6c, 0x20, 0x6f, 0x72, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x20, 0x70, 0x72, 0x6f, 0x6a, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x20, 0x63, 0x61, 0x6e, 0x27, 0x74, 0x20, 0x62, + 0x65, 0x20, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x2e, 0x22, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, + 0x5b, 0x2e, 0x2e, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x2c, + 0x20, 0x5b, 0x63, 0x68, 0x61, 0x72, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x72, + 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x5d, + 0x5d, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, + 0x67, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, 0x65, 0x6e, 0x64, 0x20, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x6f, 0x20, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x63, 0x68, 0x61, 0x74, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, + 0x6e, 0x63, 0x20, 0x28, 0x6d, 0x73, 0x67, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, 0x6c, 0x65, 0x2e, + 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, 0x79, + 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x27, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, + 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, + 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x7b, + 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, 0x20, 0x6d, 0x73, + 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x6c, 0x65, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x3d, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, 0x20, - 0x5b, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x7d, 0x22, 0x2c, - 0x20, 0x6d, 0x73, 0x67, 0x5d, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x20, 0x3d, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x28, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, - 0x67, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, - 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, - 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, - 0x74, 0x4d, 0x61, 0x70, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x28, 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x5d, 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, + 0x79, 0x3a, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x28, + 0x5b, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x64, 0x61, 0x74, 0x61, 0x5d, + 0x29, 0x20, 0x3d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, - 0x79, 0x2e, 0x69, 0x73, 0x41, 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, - 0x74, 0x61, 0x29, 0x20, 0x3f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x3a, 0x20, 0x41, 0x72, 0x72, 0x61, 0x79, 0x2e, 0x69, 0x73, 0x41, + 0x72, 0x72, 0x61, 0x79, 0x28, 0x64, 0x61, 0x74, 0x61, 0x29, 0x20, 0x3f, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, 0x6d, + 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, 0x2e, + 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, 0x29, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x5c, 0x73, 0x2f, 0x2c, 0x20, + 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, - 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x27, 0x27, - 0x29, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, - 0x5c, 0x73, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x20, 0x3a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, - 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, + 0x64, 0x61, 0x74, 0x61, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x2e, 0x6a, 0x6f, + 0x69, 0x6e, 0x28, 0x22, 0x5c, 0x6e, 0x22, 0x29, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x29, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x20, 0x3d, 0x20, 0x60, 0x41, 0x20, 0x63, 0x68, 0x61, 0x74, + 0x20, 0x62, 0x65, 0x74, 0x77, 0x65, 0x65, 0x6e, 0x20, 0x61, 0x20, 0x63, + 0x75, 0x72, 0x69, 0x6f, 0x75, 0x73, 0x20, 0x68, 0x75, 0x6d, 0x61, 0x6e, + 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x61, 0x72, 0x74, 0x69, + 0x66, 0x69, 0x63, 0x69, 0x61, 0x6c, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6c, + 0x6c, 0x69, 0x67, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x61, 0x73, 0x73, 0x69, + 0x73, 0x74, 0x61, 0x6e, 0x74, 0x2e, 0x20, 0x54, 0x68, 0x65, 0x20, 0x61, + 0x73, 0x73, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x74, 0x20, 0x67, 0x69, 0x76, + 0x65, 0x73, 0x20, 0x68, 0x65, 0x6c, 0x70, 0x66, 0x75, 0x6c, 0x2c, 0x20, + 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x2c, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x70, 0x6f, 0x6c, 0x69, 0x74, 0x65, 0x20, 0x61, 0x6e, 0x73, + 0x77, 0x65, 0x72, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, + 0x68, 0x75, 0x6d, 0x61, 0x6e, 0x27, 0x73, 0x20, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x5c, 0x6e, 0x55, 0x53, 0x45, 0x52, + 0x3a, 0x5b, 0x69, 0x6d, 0x67, 0x2d, 0x31, 0x30, 0x5d, 0x24, 0x7b, 0x6d, + 0x73, 0x67, 0x7d, 0x5c, 0x6e, 0x41, 0x53, 0x53, 0x49, 0x53, 0x54, 0x41, + 0x4e, 0x54, 0x3a, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, - 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, - 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, 0x65, 0x72, - 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, 0x61, 0x72, - 0x7d, 0x7d, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, - 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, - 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, - 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x6f, - 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, 0x72, 0x65, - 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x2e, - 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x70, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x2e, 0x2e, 0x2e, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x2c, - 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, - 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, 0x4c, 0x6c, 0x61, 0x6d, - 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, - 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, - 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, - 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, - 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, - 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, - 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x20, 0x3d, 0x20, 0x22, 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, - 0x62, 0x6d, 0x69, 0x74, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, - 0x6e, 0x74, 0x2e, 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, - 0x20, 0x31, 0x33, 0x20, 0x26, 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, - 0x74, 0x2e, 0x73, 0x68, 0x69, 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, + 0x64, 0x3a, 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, + 0x3a, 0x20, 0x5b, 0x22, 0x3c, 0x2f, 0x73, 0x3e, 0x22, 0x2c, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x63, + 0x68, 0x61, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x2c, 0x20, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x22, 0x7b, 0x7b, 0x75, 0x73, + 0x65, 0x72, 0x7d, 0x7d, 0x3a, 0x22, 0x29, 0x5d, 0x2c, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x7b, 0x7b, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x7d, 0x22, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x72, 0x75, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x61, 0x73, 0x79, 0x6e, 0x63, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x6c, 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x6f, 0x6c, 0x65, 0x2e, 0x6c, 0x6f, 0x67, 0x28, 0x27, 0x61, 0x6c, + 0x72, 0x65, 0x61, 0x64, 0x79, 0x20, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, + 0x67, 0x2e, 0x2e, 0x2e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, + 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x2c, 0x20, 0x5b, 0x22, 0x22, 0x2c, 0x20, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x5d, 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x61, 0x77, 0x61, 0x69, 0x74, 0x20, 0x72, 0x75, 0x6e, + 0x4c, 0x6c, 0x61, 0x6d, 0x61, 0x28, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x2c, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x73, 0x6c, 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x3a, 0x20, 0x73, 0x6c, + 0x6f, 0x74, 0x5f, 0x69, 0x64, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x5b, 0x5d, 0x2c, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x22, 0x22, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x74, 0x6f, 0x70, + 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, + 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, + 0x65, 0x72, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x61, 0x62, 0x6f, + 0x72, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x6c, 0x65, 0x72, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x6e, 0x75, 0x6c, + 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, + 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x5b, + 0x5d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x70, 0x6c, + 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x20, 0x3d, 0x20, 0x28, + 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, + 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, + 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, + 0x75, 0x74, 0x22, 0x29, 0x2e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x45, 0x6c, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x42, 0x79, 0x49, 0x64, 0x28, 0x22, 0x66, 0x69, 0x6c, + 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x29, 0x2e, 0x61, 0x64, 0x64, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x65, + 0x72, 0x28, 0x22, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x2c, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x65, 0x6c, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x20, 0x3d, 0x20, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, + 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x61, 0x64, 0x65, + 0x72, 0x20, 0x3d, 0x20, 0x6e, 0x65, 0x77, 0x20, 0x46, 0x69, 0x6c, 0x65, + 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, 0x64, + 0x65, 0x72, 0x2e, 0x6f, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x3d, 0x20, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, - 0x6d, 0x69, 0x74, 0x3d, 0x24, 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, - 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, - 0x61, 0x72, 0x65, 0x61, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x20, 0x3f, 0x20, 0x22, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, - 0x20, 0x3a, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, - 0x61, 0x72, 0x67, 0x65, 0x74, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, - 0x73, 0x73, 0x3d, 0x24, 0x7b, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, - 0x62, 0x6d, 0x69, 0x74, 0x73, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, - 0x61, 0x79, 0x20, 0x73, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, - 0x2e, 0x2e, 0x2e, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, - 0x3d, 0x32, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x74, 0x65, 0x78, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, - 0x65, 0x7d, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, - 0x67, 0x68, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, - 0x69, 0x74, 0x22, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x61, + 0x74, 0x61, 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x5b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7b, 0x20, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x20, 0x69, 0x6d, 0x61, 0x67, + 0x65, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, + 0x63, 0x65, 0x28, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x3a, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x5c, 0x2f, 0x5b, 0x5e, 0x3b, 0x5d, 0x2b, 0x3b, 0x62, 0x61, + 0x73, 0x65, 0x36, 0x34, 0x2c, 0x2f, 0x2c, 0x20, 0x27, 0x27, 0x29, 0x2c, + 0x20, 0x69, 0x64, 0x3a, 0x20, 0x31, 0x30, 0x20, 0x7d, 0x5d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x2e, 0x72, 0x65, 0x61, 0x64, 0x41, 0x73, 0x44, 0x61, + 0x74, 0x61, 0x55, 0x52, 0x4c, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, + 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, + 0x6c, 0x28, 0x22, 0x22, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x6f, + 0x70, 0x28, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x68, 0x61, 0x74, 0x28, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x22, + 0x22, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, + 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x77, 0x68, + 0x69, 0x63, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x31, 0x33, 0x20, 0x26, + 0x26, 0x20, 0x21, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x68, 0x69, + 0x66, 0x74, 0x4b, 0x65, 0x79, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, + 0x74, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, + 0x6d, 0x20, 0x6f, 0x6e, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x3d, 0x24, + 0x7b, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x3d, 0x24, 0x7b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, - 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, - 0x64, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, + 0x67, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x22, 0x6c, + 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x22, 0x20, 0x3a, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x65, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6f, + 0x6e, 0x6b, 0x65, 0x79, 0x70, 0x72, 0x65, 0x73, 0x73, 0x3d, 0x24, 0x7b, + 0x65, 0x6e, 0x74, 0x65, 0x72, 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x73, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, + 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x53, 0x61, 0x79, 0x20, 0x73, 0x6f, + 0x6d, 0x65, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x2e, 0x2e, 0x2e, 0x22, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x32, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, + 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x22, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x72, 0x69, 0x67, 0x68, 0x74, 0x22, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x22, 0x20, 0x64, + 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x67, 0x65, + 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x7d, 0x3e, 0x53, 0x65, 0x6e, 0x64, 0x3c, 0x2f, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, + 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x7d, + 0x3e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x20, 0x49, 0x6d, 0x61, 0x67, + 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x73, 0x74, 0x6f, 0x70, 0x7d, 0x20, 0x64, @@ -1238,28 +1380,39 @@ unsigned char index_html[] = { 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x74, 0x65, 0x78, 0x74, 0x29, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x28, - 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, + 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, + 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, + 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, + 0x3a, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, + 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, + 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x20, 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, - 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x3c, 0x73, - 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x7b, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x28, 0x75, 0x73, 0x65, 0x72, 0x29, 0x7d, 0x3a, - 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x20, 0x24, 0x7b, + 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, - 0x65, 0x6c, 0x73, 0x65, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, - 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x70, 0x20, 0x6b, 0x65, 0x79, 0x3d, - 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x3e, 0x24, 0x7b, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x3c, 0x2f, 0x70, 0x3e, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, - 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, 0x3e, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, + 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x72, 0x65, 0x66, 0x3d, + 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x7d, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x69, 0x6d, 0x67, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, + 0x77, 0x69, 0x64, 0x74, 0x68, 0x3a, 0x20, 0x36, 0x30, 0x25, 0x3b, 0x24, + 0x7b, 0x21, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x65, + 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x20, 0x3f, 0x20, 0x60, 0x64, 0x69, + 0x73, 0x70, 0x6c, 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, + 0x60, 0x20, 0x3a, 0x20, 0x60, 0x60, 0x7d, 0x22, 0x20, 0x73, 0x72, 0x63, + 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x7d, 0x22, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x66, 0x6c, 0x61, 0x74, 0x4d, 0x61, 0x70, 0x28, 0x63, 0x68, 0x61, 0x74, 0x4c, 0x69, @@ -1344,703 +1497,704 @@ unsigned char index_html[] = { 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x64, 0x75, 0x63, 0x65, 0x28, 0x28, 0x61, 0x63, 0x63, 0x2c, 0x20, 0x63, 0x75, 0x72, 0x2c, 0x20, 0x69, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x28, 0x7b, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, 0x20, - 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, 0x5d, - 0x3a, 0x20, 0x69, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, 0x69, 0x74, - 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x72, 0x61, - 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x47, 0x72, - 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, 0x63, 0x68, - 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x28, 0x60, - 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, 0x69, 0x6c, - 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, - 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, - 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, - 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, - 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, - 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x74, 0x65, - 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, - 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, 0x6e, 0x74, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, + 0x3e, 0x20, 0x28, 0x7b, 0x20, 0x2e, 0x2e, 0x2e, 0x61, 0x63, 0x63, 0x2c, + 0x20, 0x5b, 0x63, 0x75, 0x72, 0x2e, 0x74, 0x72, 0x69, 0x6d, 0x28, 0x29, + 0x5d, 0x3a, 0x20, 0x69, 0x20, 0x7d, 0x29, 0x2c, 0x20, 0x7b, 0x7d, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x76, 0x69, 0x73, + 0x69, 0x74, 0x28, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2c, 0x20, 0x27, + 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x2e, 0x2e, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3a, 0x20, 0x63, 0x6f, 0x6e, 0x76, + 0x65, 0x72, 0x74, 0x65, 0x72, 0x2e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x28, 0x29, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x20, 0x63, 0x61, 0x74, + 0x63, 0x68, 0x20, 0x28, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x61, 0x6c, 0x65, 0x72, 0x74, + 0x28, 0x60, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x66, 0x61, + 0x69, 0x6c, 0x65, 0x64, 0x3a, 0x20, 0x24, 0x7b, 0x65, 0x2e, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x7d, 0x60, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x2c, - 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, - 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, - 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, - 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, - 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x2c, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, + 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, + 0x6f, 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, + 0x3e, 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, + 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, + 0x7d, 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, + 0x6e, 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, + 0x61, 0x78, 0x7d, 0x22, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3d, 0x22, 0x24, + 0x7b, 0x73, 0x74, 0x65, 0x70, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, - 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, - 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, - 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, - 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x49, + 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x20, 0x3d, 0x20, 0x28, 0x7b, + 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x2c, + 0x20, 0x6d, 0x69, 0x6e, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x7d, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, - 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, - 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, - 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, - 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, - 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, - 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, - 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, - 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, - 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, - 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, - 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, - 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, - 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, - 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, - 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, - 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, - 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, - 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, - 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, - 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, - 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, - 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, - 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, - 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, - 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, - 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, - 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, - 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, - 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, - 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, + 0x72, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x3e, + 0x24, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x7d, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, - 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, - 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, - 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, - 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, - 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, - 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, - 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, - 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, - 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, - 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, - 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, - 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, - 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, - 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, - 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, - 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, - 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, - 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, - 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, - 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, - 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, - 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, - 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, - 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, - 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, - 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, - 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, - 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x3e, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, - 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, - 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x24, 0x7b, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, - 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, - 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, - 0x63, 0x68, 0x61, 0x74, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, - 0x64, 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, - 0x3d, 0x3d, 0x20, 0x22, 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, - 0x2f, 0x3e, 0x20, 0x43, 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, - 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, - 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, - 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, - 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x69, 0x6f, 0x6e, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, - 0x3d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, - 0x6e, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, - 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, - 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, - 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, - 0x74, 0x27, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, 0x20, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, - 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, - 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, - 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, - 0x64, 0x69, 0x63, 0x74, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, - 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, - 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, - 0x20, 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, - 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, - 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x7d, 0x29, 0x7d, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, 0x65, - 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, 0x61, - 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, 0x70, - 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, - 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, - 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, 0x6e, 0x73, 0x69, - 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, - 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, 0x34, - 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, - 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, - 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, - 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x4b, - 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, - 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x22, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x7d, - 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, - 0x22, 0x54, 0x6f, 0x70, 0x2d, 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, - 0x69, 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, - 0x5f, 0x70, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, - 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x2e, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, - 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, - 0x3e, 0x4d, 0x6f, 0x72, 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x3c, 0x2f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, - 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, - 0x46, 0x53, 0x2d, 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, - 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, - 0x73, 0x5f, 0x7a, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, - 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x66, 0x73, 0x5f, 0x7a, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, - 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, - 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, - 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, - 0x20, 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, - 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, - 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, - 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x7d, 0x29, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, - 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, - 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, 0x61, - 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, - 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, - 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, - 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, - 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, - 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, - 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x46, 0x72, 0x65, 0x71, - 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, - 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x66, 0x72, 0x65, 0x71, 0x75, - 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, - 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, - 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x66, - 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, - 0x61, 0x6c, 0x74, 0x79, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, - 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x72, 0x20, - 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, - 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x74, 0x68, 0x72, 0x65, - 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, - 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, - 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x30, 0x22, 0x20, 0x63, 0x68, 0x65, - 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x30, 0x7d, 0x20, 0x6f, - 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, - 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, 0x6f, 0x20, 0x4d, 0x69, 0x72, 0x6f, - 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, - 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, - 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, - 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, 0x20, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x31, 0x22, 0x20, 0x63, 0x68, - 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, - 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x31, 0x7d, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x24, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, + 0x22, 0x20, 0x6d, 0x69, 0x6e, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x69, 0x6e, + 0x7d, 0x22, 0x20, 0x6d, 0x61, 0x78, 0x3d, 0x22, 0x24, 0x7b, 0x6d, 0x61, + 0x78, 0x7d, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x24, 0x7b, + 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3d, 0x22, 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, 0x6e, - 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, - 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, - 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x22, - 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x32, 0x22, 0x20, 0x63, - 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, 0x32, 0x7d, - 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x49, - 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, - 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x74, 0x61, 0x75, - 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2e, 0x30, - 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, - 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, - 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x73, 0x74, - 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, - 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, - 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, - 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, - 0x65, 0x3a, 0x20, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, - 0x5f, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, - 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, - 0x65, 0x74, 0x61, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x3a, 0x20, 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, 0x6f, - 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x2c, - 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, - 0x6e, 0x3a, 0x20, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, - 0x22, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, - 0x62, 0x73, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, - 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, - 0x2a, 0x20, 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x67, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, - 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, - 0x2c, 0x24, 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, - 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, - 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, - 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, - 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, - 0x20, 0x6d, 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, - 0x67, 0x74, 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, - 0x3e, 0x20, 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, - 0x72, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x69, 0x66, 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, - 0x74, 0x68, 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, - 0x27, 0x29, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, - 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, - 0x61, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, - 0x62, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, - 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, - 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, - 0x6f, 0x62, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x24, 0x7b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x7d, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x3d, + 0x20, 0x28, 0x65, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x28, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x54, 0x6f, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x41, 0x6e, 0x64, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x28, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x55, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, + 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x28, 0x29, + 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x55, 0x73, 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x61, 0x6d, + 0x65, 0x20, 0x3d, 0x3d, 0x20, 0x27, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x27, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, - 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, - 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, - 0x74, 0x61, 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, - 0x72, 0x6f, 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, - 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, + 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x20, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x3e, 0x55, 0x73, + 0x69, 0x6e, 0x67, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x20, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x20, 0x6f, 0x6e, 0x63, 0x6c, + 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, 0x75, 0x73, 0x65, 0x72, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x65, 0x74, 0x7d, + 0x3e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x74, + 0x6f, 0x20, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x3c, 0x2f, 0x62, + 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, + 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, + 0x2f, 0x20, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x61, 0x76, 0x65, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x20, 0x6f, 0x6e, 0x20, 0x65, + 0x76, 0x65, 0x72, 0x79, 0x20, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x72, + 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x6f, + 0x73, 0x61, 0x76, 0x65, 0x28, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2c, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5d, 0x29, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, - 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, - 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, - 0x3d, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, - 0x3d, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, - 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, - 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, - 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, - 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, + 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, - 0x22, 0x70, 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x3e, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, + 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x67, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x67, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x22, 0x20, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x3d, 0x22, + 0x55, 0x73, 0x65, 0x20, 0x67, 0x62, 0x6e, 0x66, 0x20, 0x6f, 0x72, 0x20, + 0x4a, 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2b, + 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x67, 0x72, 0x61, 0x6d, 0x6d, + 0x61, 0x72, 0x7d, 0x22, 0x20, 0x72, 0x6f, 0x77, 0x73, 0x3d, 0x34, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x7d, 0x2f, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x70, 0x2d, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x22, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x68, 0x6f, 0x6c, 0x64, + 0x65, 0x72, 0x3d, 0x22, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x3a, 0x20, 0x70, + 0x72, 0x6f, 0x70, 0x31, 0x2c, 0x70, 0x72, 0x6f, 0x70, 0x32, 0x2c, 0x70, + 0x72, 0x6f, 0x70, 0x33, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x47, 0x72, + 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x4a, 0x73, 0x6f, 0x6e, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x50, 0x72, 0x6f, 0x70, 0x4f, 0x72, 0x64, 0x65, 0x72, + 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, + 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x62, 0x75, 0x74, 0x74, 0x6f, + 0x6e, 0x22, 0x20, 0x6f, 0x6e, 0x63, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, + 0x7b, 0x63, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x4a, 0x53, 0x4f, 0x4e, + 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, + 0x72, 0x7d, 0x3e, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, 0x20, 0x4a, + 0x53, 0x4f, 0x4e, 0x20, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x3c, 0x2f, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x20, 0x3d, 0x20, 0x28, + 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x46, 0x6f, 0x72, 0x3d, 0x22, + 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x74, 0x65, 0x78, 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3d, 0x22, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, 0x3d, 0x20, + 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x50, 0x72, + 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, + 0x73, 0x3d, 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, + 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, 0x22, 0x3e, 0x55, + 0x73, 0x65, 0x72, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, + 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, + 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x75, 0x73, 0x65, 0x72, + 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x75, 0x73, 0x65, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, - 0x28, 0x70, 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, + 0x6f, 0x72, 0x3d, 0x22, 0x62, 0x6f, 0x74, 0x22, 0x3e, 0x42, 0x6f, 0x74, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x74, 0x65, 0x78, 0x74, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x72, 0x22, 0x20, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x63, 0x68, + 0x61, 0x72, 0x7d, 0x22, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, + 0x20, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, + 0x74, 0x61, 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, + 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x24, 0x7b, 0x73, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, + 0x77, 0x73, 0x3d, 0x34, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x66, 0x6f, 0x72, 0x3d, 0x22, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3e, 0x43, 0x68, + 0x61, 0x74, 0x20, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x20, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3c, 0x2f, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x74, 0x65, 0x78, 0x74, 0x61, + 0x72, 0x65, 0x61, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x54, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x7d, 0x22, 0x20, 0x72, 0x6f, + 0x77, 0x73, 0x3d, 0x31, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, + 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x47, 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, + 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x28, 0x29, 0x7d, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, + 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x20, + 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x28, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x53, 0x65, 0x74, 0x28, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x24, 0x7b, 0x47, + 0x72, 0x61, 0x6d, 0x6d, 0x61, 0x72, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, + 0x6c, 0x28, 0x29, 0x7d, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, + 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x55, 0x73, + 0x65, 0x72, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, + 0x73, 0x65, 0x74, 0x42, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x7d, 0x2f, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x73, + 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x74, 0x79, 0x70, 0x65, 0x22, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x63, 0x68, 0x61, 0x74, + 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, + 0x63, 0x68, 0x61, 0x74, 0x22, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x43, + 0x68, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x20, 0x63, 0x6c, 0x61, + 0x73, 0x73, 0x3d, 0x22, 0x73, 0x6c, 0x69, 0x6d, 0x22, 0x3e, 0x3c, 0x69, + 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, + 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, + 0x74, 0x79, 0x70, 0x65, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, + 0x22, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, + 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x73, + 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x22, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x7d, 0x20, + 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, 0x7b, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x7d, + 0x20, 0x2f, 0x3e, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x3c, 0x2f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, - 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, + 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, + 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, + 0x20, 0x43, 0x68, 0x61, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, + 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x46, 0x6f, 0x72, 0x6d, 0x28, 0x29, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x3a, 0x20, 0x22, 0x50, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x30, + 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, 0x70, 0x72, + 0x65, 0x64, 0x69, 0x63, 0x74, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, + 0x74, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, + 0x31, 0x2e, 0x35, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x65, + 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x2c, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x65, 0x6d, 0x70, + 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, - 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, - 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x50, + 0x65, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x20, 0x72, 0x65, 0x70, 0x65, + 0x61, 0x74, 0x20, 0x73, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x65, 0x22, + 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x32, 0x2e, 0x30, 0x2c, 0x20, + 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, + 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, + 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, 0x65, 0x61, 0x74, + 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x43, 0x6f, + 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x20, 0x4e, 0x20, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x69, 0x7a, 0x65, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, + 0x32, 0x30, 0x34, 0x38, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x70, + 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x22, 0x2c, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x72, 0x65, 0x70, + 0x65, 0x61, 0x74, 0x5f, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x6e, 0x20, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x54, 0x6f, 0x70, 0x2d, 0x4b, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, + 0x6e, 0x67, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x30, + 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x2d, 0x31, 0x2c, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x6b, + 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, + 0x6f, 0x70, 0x5f, 0x6b, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x6f, 0x70, 0x2d, + 0x50, 0x20, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x22, 0x2c, + 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, + 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, + 0x65, 0x3a, 0x20, 0x22, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x22, 0x2c, 0x20, + 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x5f, + 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, + 0x3c, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x4d, 0x6f, 0x72, + 0x65, 0x20, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x3c, 0x2f, 0x73, + 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, + 0x22, 0x74, 0x77, 0x6f, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, + 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x46, 0x53, 0x2d, + 0x5a, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, + 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x74, 0x66, 0x73, 0x5f, 0x7a, + 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, + 0x66, 0x73, 0x5f, 0x7a, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, + 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, + 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x54, 0x79, + 0x70, 0x69, 0x63, 0x61, 0x6c, 0x20, 0x50, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, + 0x20, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, + 0x22, 0x74, 0x79, 0x70, 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x22, 0x2c, + 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, 0x2c, + 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, + 0x69, 0x63, 0x61, 0x6c, 0x5f, 0x70, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, + 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x20, 0x70, 0x65, 0x6e, + 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, + 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x70, 0x72, + 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, 0x6e, 0x61, 0x6c, + 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x70, 0x65, + 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x46, + 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x20, 0x70, 0x65, 0x6e, + 0x61, 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, 0x20, + 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, 0x2e, + 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x66, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x70, 0x65, 0x6e, 0x61, + 0x6c, 0x74, 0x79, 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x66, 0x72, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x63, 0x79, 0x5f, + 0x70, 0x65, 0x6e, 0x61, 0x6c, 0x74, 0x79, 0x20, 0x7d, 0x29, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x3c, 0x68, 0x72, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x73, 0x65, 0x74, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, + 0x74, 0x68, 0x72, 0x65, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, + 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, 0x6e, + 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x30, 0x22, + 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, 0x20, + 0x30, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, 0x24, + 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x6e, 0x6f, 0x20, + 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x3c, 0x2f, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x74, + 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, 0x20, + 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, + 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, 0x31, + 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, 0x7b, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, 0x3d, + 0x20, 0x31, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x3d, + 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x31, 0x3c, 0x2f, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6c, + 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x3c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x20, + 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x72, 0x61, 0x64, 0x69, 0x6f, 0x22, + 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3d, 0x22, 0x6d, 0x69, 0x72, 0x6f, 0x73, + 0x74, 0x61, 0x74, 0x22, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3d, 0x22, + 0x32, 0x22, 0x20, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x65, 0x64, 0x3d, 0x24, + 0x7b, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x3d, + 0x3d, 0x20, 0x32, 0x7d, 0x20, 0x6f, 0x6e, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x3d, 0x24, 0x7b, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x49, 0x6e, 0x74, 0x7d, 0x20, 0x2f, 0x3e, 0x20, 0x4d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x20, 0x76, 0x32, 0x3c, 0x2f, + 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, + 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x74, 0x61, 0x75, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, + 0x6d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, + 0x22, 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, + 0x31, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x74, 0x61, 0x75, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x46, 0x6c, 0x6f, 0x61, + 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, + 0x65, 0x6c, 0x3a, 0x20, 0x22, 0x4d, 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, + 0x74, 0x20, 0x65, 0x74, 0x61, 0x22, 0x2c, 0x20, 0x6d, 0x61, 0x78, 0x3a, + 0x20, 0x31, 0x2e, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, 0x30, + 0x2e, 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6d, + 0x69, 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x22, + 0x2c, 0x20, 0x73, 0x74, 0x65, 0x70, 0x3a, 0x20, 0x30, 0x2e, 0x30, 0x31, + 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6d, 0x69, + 0x72, 0x6f, 0x73, 0x74, 0x61, 0x74, 0x5f, 0x65, 0x74, 0x61, 0x20, 0x7d, + 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, + 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x65, 0x74, + 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x49, 0x6e, 0x74, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x28, 0x7b, 0x20, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x3a, 0x20, + 0x22, 0x53, 0x68, 0x6f, 0x77, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x2c, 0x20, 0x6d, 0x61, + 0x78, 0x3a, 0x20, 0x31, 0x30, 0x2c, 0x20, 0x6d, 0x69, 0x6e, 0x3a, 0x20, + 0x30, 0x2c, 0x20, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x20, 0x22, 0x6e, 0x5f, + 0x70, 0x72, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x20, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x20, + 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, + 0x65, 0x74, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, + 0x6f, 0x72, 0x6d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x29, 0x20, 0x3d, 0x3e, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x72, 0x20, 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, + 0x66, 0x6c, 0x6f, 0x6f, 0x72, 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, + 0x28, 0x31, 0x20, 0x2d, 0x20, 0x70, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x67, 0x20, + 0x3d, 0x20, 0x4d, 0x61, 0x74, 0x68, 0x2e, 0x66, 0x6c, 0x6f, 0x6f, 0x72, + 0x28, 0x31, 0x39, 0x32, 0x20, 0x2a, 0x20, 0x70, 0x29, 0x3b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x60, 0x72, 0x67, 0x62, 0x61, 0x28, 0x24, 0x7b, 0x72, 0x7d, 0x2c, 0x24, + 0x7b, 0x67, 0x7d, 0x2c, 0x30, 0x2c, 0x30, 0x2e, 0x33, 0x29, 0x60, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, + 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x3d, 0x20, 0x28, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x6d, 0x73, 0x67, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x20, 0x7d, 0x20, 0x3d, 0x20, 0x6d, + 0x73, 0x67, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x21, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x20, 0x7c, 0x7c, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, + 0x68, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x30, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, + 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x2e, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x20, 0x3e, 0x20, + 0x31, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x4e, 0x6f, 0x74, 0x20, 0x66, 0x6f, + 0x72, 0x20, 0x62, 0x79, 0x74, 0x65, 0x20, 0x70, 0x61, 0x69, 0x72, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, + 0x20, 0x28, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x2e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x57, 0x69, 0x74, 0x68, + 0x28, 0x27, 0x62, 0x79, 0x74, 0x65, 0x3a, 0x20, 0x5c, 0x5c, 0x27, 0x29, + 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6d, 0x73, 0x67, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, + 0x74, 0x20, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, 0x20, + 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x62, 0x20, + 0x3d, 0x3e, 0x20, 0x28, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x3a, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x2e, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, + 0x69, 0x74, 0x69, 0x65, 0x73, 0x3a, 0x20, 0x5b, 0x70, 0x72, 0x6f, 0x62, + 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x7d, 0x29, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, + 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x7d, 0x20, 0x64, 0x61, 0x74, 0x61, + 0x3d, 0x24, 0x7b, 0x73, 0x70, 0x6c, 0x69, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x7b, 0x20, 0x70, 0x72, 0x6f, + 0x62, 0x73, 0x2c, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, + 0x7d, 0x20, 0x3d, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x62, 0x61, 0x62, 0x69, 0x6c, 0x69, + 0x74, 0x69, 0x65, 0x73, 0x5b, 0x30, 0x5d, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, + 0x66, 0x69, 0x6e, 0x64, 0x28, 0x70, 0x20, 0x3d, 0x3e, 0x20, 0x70, 0x2e, + 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, + 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x29, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x3d, 0x20, + 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, + 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x2e, + 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, + 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, + 0x64, 0x72, 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, + 0x72, 0x6f, 0x62, 0x2d, 0x73, 0x65, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x70, 0x72, 0x6f, 0x62, 0x73, 0x2e, 0x6d, 0x61, 0x70, 0x28, 0x28, 0x70, + 0x2c, 0x20, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x29, 0x20, 0x3d, 0x3e, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x6b, 0x65, 0x79, 0x3d, 0x24, 0x7b, 0x69, 0x6e, + 0x64, 0x65, 0x78, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, + 0x69, 0x74, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x60, 0x70, 0x72, 0x6f, 0x62, + 0x3a, 0x20, 0x24, 0x7b, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x7d, 0x60, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, + 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x61, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x3a, 0x20, 0x27, 0x30, 0x2e, 0x33, 0x65, 0x6d, 0x27, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, - 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, - 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, - 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, + 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x2e, 0x74, + 0x6f, 0x6b, 0x5f, 0x73, 0x74, 0x72, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x20, 0x3f, 0x20, 0x70, 0x72, 0x6f, + 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x28, 0x70, 0x2e, 0x70, 0x72, 0x6f, + 0x62, 0x29, 0x20, 0x3a, 0x20, 0x27, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x27, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, @@ -2056,464 +2210,470 @@ unsigned char index_html[] = { 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, - 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, 0x79, 0x6c, - 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, - 0x6f, 0x75, 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, - 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, - 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, - 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, - 0x61, 0x74, 0x63, 0x68, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, - 0x29, 0x20, 0x3f, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x62, 0x72, - 0x20, 0x2f, 0x3e, 0x60, 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, - 0x20, 0x6d, 0x61, 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, - 0x77, 0x6e, 0x20, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, - 0x6e, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, - 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, - 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, - 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x29, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x7d, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, + 0x7b, 0x20, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, + 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x3a, 0x20, 0x70, 0x43, 0x6f, 0x6c, 0x6f, + 0x72, 0x20, 0x7d, 0x7d, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x3d, 0x24, 0x7b, 0x70, + 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, + 0x65, 0x6e, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6d, 0x73, 0x67, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x2e, 0x6d, 0x61, 0x74, 0x63, 0x68, + 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x29, 0x20, 0x3f, 0x20, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x60, + 0x20, 0x3a, 0x20, 0x6d, 0x73, 0x67, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x2f, 0x2f, 0x20, 0x70, 0x6f, 0x6f, 0x72, 0x20, 0x6d, 0x61, 0x6e, + 0x73, 0x20, 0x6d, 0x61, 0x72, 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x20, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x61, 0x72, + 0x6b, 0x64, 0x6f, 0x77, 0x6e, 0x69, 0x73, 0x68, 0x20, 0x3d, 0x20, 0x28, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x6d, 0x64, 0x20, 0x3d, 0x20, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x2e, 0x74, 0x65, 0x78, 0x74, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, - 0x3c, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, - 0x27, 0x26, 0x67, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x28, 0x2f, 0x5e, 0x23, 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, - 0x2a, 0x29, 0x24, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, - 0x33, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, - 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, - 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, - 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, - 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, - 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, - 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, - 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, - 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, + 0x26, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x61, 0x6d, 0x70, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, - 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, - 0x3f, 0x29, 0x5f, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x3c, 0x2f, 0x67, 0x2c, + 0x20, 0x27, 0x26, 0x6c, 0x74, 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x28, 0x2f, 0x3e, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x26, 0x67, 0x74, + 0x3b, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5e, 0x23, + 0x7b, 0x31, 0x2c, 0x36, 0x7d, 0x20, 0x28, 0x2e, 0x2a, 0x29, 0x24, 0x2f, + 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x68, 0x33, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x68, 0x33, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, + 0x28, 0x2f, 0x5c, 0x2a, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5c, + 0x2a, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, + 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, + 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x5f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x5f, 0x2f, 0x67, 0x2c, + 0x20, 0x27, 0x3c, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x24, 0x31, + 0x3c, 0x2f, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x3e, 0x27, 0x29, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x2a, 0x28, 0x2e, 0x2a, 0x3f, + 0x29, 0x5c, 0x2a, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x28, 0x2f, 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, - 0x28, 0x5b, 0x5c, 0x73, 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, - 0x60, 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, - 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, - 0x65, 0x3e, 0x3c, 0x2f, 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, - 0x2f, 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, - 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, - 0x61, 0x63, 0x65, 0x28, 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, - 0x20, 0x27, 0x3c, 0x62, 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, - 0x64, 0x61, 0x6e, 0x67, 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, - 0x65, 0x74, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, - 0x24, 0x7b, 0x7b, 0x20, 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, - 0x6d, 0x64, 0x20, 0x7d, 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, - 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, - 0x20, 0x3d, 0x20, 0x28, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, - 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x21, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, - 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, - 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, - 0x6e, 0x2f, 0x3e, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, - 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, - 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, - 0x6d, 0x73, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, - 0x7d, 0x6d, 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x2c, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, - 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, - 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, - 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, - 0x65, 0x64, 0x28, 0x32, 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x73, 0x20, 0x70, 0x65, 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, - 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x2f, 0x2f, 0x20, 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, - 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, - 0x65, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, - 0x61, 0x6c, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, - 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, - 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, - 0x70, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, - 0x66, 0x74, 0x3a, 0x20, 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, - 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, - 0x6c, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, - 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, - 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, - 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, - 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, - 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, - 0x74, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, - 0x74, 0x20, 0x3d, 0x20, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, - 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, - 0x74, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, - 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, - 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, - 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, - 0x72, 0x65, 0x63, 0x74, 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, - 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, - 0x6f, 0x6c, 0x6c, 0x59, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, - 0x66, 0x74, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, - 0x6c, 0x65, 0x66, 0x74, 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, - 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, - 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, - 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, - 0x20, 0x21, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, - 0x74, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, - 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, - 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, - 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, - 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, - 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, - 0x67, 0x65, 0x74, 0x29, 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, - 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, - 0x6e, 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, - 0x29, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x75, 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, - 0x28, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x2e, 0x61, 0x64, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, - 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, - 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, - 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, - 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, - 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, - 0x73, 0x74, 0x65, 0x6e, 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, - 0x65, 0x64, 0x6f, 0x77, 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, - 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, - 0x64, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x7d, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, - 0x20, 0x5b, 0x5d, 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, - 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, - 0x70, 0x61, 0x6e, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, - 0x20, 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, - 0x6e, 0x52, 0x65, 0x66, 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, - 0x6b, 0x3d, 0x24, 0x7b, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, - 0x70, 0x6f, 0x76, 0x65, 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, - 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, - 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, - 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, + 0x63, 0x65, 0x28, 0x2f, 0x5f, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x5f, 0x2f, + 0x67, 0x2c, 0x20, 0x27, 0x3c, 0x65, 0x6d, 0x3e, 0x24, 0x31, 0x3c, 0x2f, + 0x65, 0x6d, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, 0x2f, + 0x60, 0x60, 0x60, 0x2e, 0x2a, 0x3f, 0x5c, 0x6e, 0x28, 0x5b, 0x5c, 0x73, + 0x5c, 0x53, 0x5d, 0x2a, 0x3f, 0x29, 0x60, 0x60, 0x60, 0x2f, 0x67, 0x2c, + 0x20, 0x27, 0x3c, 0x70, 0x72, 0x65, 0x3e, 0x3c, 0x63, 0x6f, 0x64, 0x65, + 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x3c, 0x2f, + 0x70, 0x72, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x60, 0x28, 0x2e, 0x2a, 0x3f, 0x29, 0x60, 0x2f, 0x67, 0x2c, 0x20, + 0x27, 0x3c, 0x63, 0x6f, 0x64, 0x65, 0x3e, 0x24, 0x31, 0x3c, 0x2f, 0x63, + 0x6f, 0x64, 0x65, 0x3e, 0x27, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x2e, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x28, + 0x2f, 0x5c, 0x6e, 0x2f, 0x67, 0x69, 0x6d, 0x2c, 0x20, 0x27, 0x3c, 0x62, + 0x72, 0x20, 0x2f, 0x3e, 0x27, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, + 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, 0x64, 0x61, 0x6e, 0x67, + 0x65, 0x72, 0x6f, 0x75, 0x73, 0x6c, 0x79, 0x53, 0x65, 0x74, 0x49, 0x6e, + 0x6e, 0x65, 0x72, 0x48, 0x54, 0x4d, 0x4c, 0x3d, 0x24, 0x7b, 0x7b, 0x20, + 0x5f, 0x5f, 0x68, 0x74, 0x6d, 0x6c, 0x3a, 0x20, 0x6d, 0x64, 0x20, 0x7d, + 0x7d, 0x20, 0x2f, 0x3e, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, + 0x20, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x20, 0x3d, 0x20, 0x28, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, + 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x2f, 0x3e, 0x60, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, - 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, - 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, + 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, + 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, + 0x72, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x6d, 0x73, 0x2e, 0x74, + 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x29, 0x7d, 0x6d, 0x73, 0x20, + 0x70, 0x65, 0x72, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x2c, 0x20, 0x24, + 0x7b, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x53, 0x74, 0x61, 0x74, 0x73, 0x2e, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, + 0x74, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x63, 0x6f, + 0x6e, 0x64, 0x2e, 0x74, 0x6f, 0x46, 0x69, 0x78, 0x65, 0x64, 0x28, 0x32, + 0x29, 0x7d, 0x20, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x20, 0x70, 0x65, + 0x72, 0x20, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x70, 0x61, 0x6e, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x73, + 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, + 0x72, 0x20, 0x69, 0x6d, 0x70, 0x6c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, + 0x20, 0x3d, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x20, 0x3d, + 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x28, 0x66, + 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x20, 0x3d, 0x20, 0x75, 0x73, 0x65, 0x53, 0x69, 0x67, + 0x6e, 0x61, 0x6c, 0x28, 0x7b, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x27, + 0x30, 0x70, 0x78, 0x27, 0x2c, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, + 0x27, 0x30, 0x70, 0x78, 0x27, 0x20, 0x7d, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x62, 0x75, + 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x20, 0x3d, 0x20, 0x75, 0x73, + 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, 0x29, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, + 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x20, 0x3d, + 0x20, 0x75, 0x73, 0x65, 0x52, 0x65, 0x66, 0x28, 0x6e, 0x75, 0x6c, 0x6c, + 0x29, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, + 0x6e, 0x73, 0x74, 0x20, 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, + 0x70, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x28, 0x29, 0x20, 0x3d, + 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x69, 0x66, 0x20, 0x28, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, + 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, + 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x72, 0x65, 0x63, 0x74, 0x20, 0x3d, 0x20, + 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, + 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x67, 0x65, 0x74, 0x42, 0x6f, 0x75, + 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x52, + 0x65, 0x63, 0x74, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x66, 0x3d, 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, - 0x72, 0x52, 0x65, 0x66, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x3d, 0x22, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, - 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x6f, 0x70, 0x2c, + 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, + 0x2e, 0x62, 0x6f, 0x74, 0x74, 0x6f, 0x6d, 0x20, 0x2b, 0x20, 0x77, 0x69, + 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, 0x72, 0x6f, 0x6c, 0x6c, 0x59, + 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, + 0x60, 0x24, 0x7b, 0x72, 0x65, 0x63, 0x74, 0x2e, 0x6c, 0x65, 0x66, 0x74, + 0x20, 0x2b, 0x20, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x2e, 0x73, 0x63, + 0x72, 0x6f, 0x6c, 0x6c, 0x58, 0x7d, 0x70, 0x78, 0x60, 0x2c, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, + 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x3d, 0x20, 0x21, 0x69, 0x73, + 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x20, 0x68, 0x61, + 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, + 0x73, 0x69, 0x64, 0x65, 0x20, 0x3d, 0x20, 0x28, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x6f, 0x70, 0x6f, + 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, + 0x6e, 0x74, 0x20, 0x26, 0x26, 0x20, 0x21, 0x70, 0x6f, 0x70, 0x6f, 0x76, + 0x65, 0x72, 0x52, 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, + 0x74, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, + 0x20, 0x26, 0x26, 0x20, 0x21, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, + 0x65, 0x66, 0x2e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x63, + 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x73, 0x28, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x2e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x29, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, + 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x75, + 0x73, 0x65, 0x45, 0x66, 0x66, 0x65, 0x63, 0x74, 0x28, 0x28, 0x29, 0x20, + 0x3d, 0x3e, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x61, 0x64, + 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, + 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, + 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, + 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x28, 0x29, 0x20, 0x3d, 0x3e, 0x20, 0x7b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x64, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x76, + 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, + 0x65, 0x72, 0x28, 0x27, 0x6d, 0x6f, 0x75, 0x73, 0x65, 0x64, 0x6f, 0x77, + 0x6e, 0x27, 0x2c, 0x20, 0x68, 0x61, 0x6e, 0x64, 0x6c, 0x65, 0x43, 0x6c, + 0x69, 0x63, 0x6b, 0x4f, 0x75, 0x74, 0x73, 0x69, 0x64, 0x65, 0x29, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x2c, 0x20, 0x5b, 0x5d, 0x29, + 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, + 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x70, 0x61, 0x6e, 0x20, + 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x7d, 0x20, 0x72, 0x65, 0x66, + 0x3d, 0x24, 0x7b, 0x62, 0x75, 0x74, 0x74, 0x6f, 0x6e, 0x52, 0x65, 0x66, + 0x7d, 0x20, 0x6f, 0x6e, 0x43, 0x6c, 0x69, 0x63, 0x6b, 0x3d, 0x24, 0x7b, + 0x74, 0x6f, 0x67, 0x67, 0x6c, 0x65, 0x50, 0x6f, 0x70, 0x6f, 0x76, 0x65, + 0x72, 0x7d, 0x3e, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, + 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x3c, 0x2f, 0x73, 0x70, + 0x61, 0x6e, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x24, 0x7b, 0x69, 0x73, 0x4f, 0x70, 0x65, 0x6e, 0x2e, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x20, 0x26, 0x26, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, + 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x20, 0x69, 0x6e, 0x74, + 0x6f, 0x3d, 0x22, 0x23, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x64, 0x69, 0x76, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x66, 0x3d, + 0x24, 0x7b, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x66, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x70, + 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x2d, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x22, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, + 0x24, 0x7b, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x6f, 0x70, 0x3a, 0x20, 0x70, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x74, 0x6f, 0x70, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x6c, 0x65, 0x66, 0x74, 0x3a, 0x20, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x6c, 0x65, 0x66, 0x74, 0x2c, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, - 0x2e, 0x70, 0x6f, 0x70, 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, - 0x64, 0x72, 0x65, 0x6e, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, - 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, - 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, - 0x69, 0x74, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, - 0x72, 0x74, 0x61, 0x6c, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, - 0x73, 0x74, 0x65, 0x72, 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, - 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, - 0x73, 0x29, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, - 0x65, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, - 0x63, 0x65, 0x6e, 0x64, 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, - 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, - 0x43, 0x53, 0x53, 0x20, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x20, 0x2a, 0x2f, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, - 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, - 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, - 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, - 0x6f, 0x72, 0x20, 0x28, 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, - 0x20, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, - 0x69, 0x5d, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x73, 0x65, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, - 0x61, 0x79, 0x65, 0x72, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, - 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, - 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, - 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, - 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, - 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x7d, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x24, 0x7b, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x70, 0x6f, 0x70, + 0x6f, 0x76, 0x65, 0x72, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, + 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, + 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x60, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x3b, 0x0a, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x3a, 0x20, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x20, 0x28, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, + 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, + 0x2f, 0x64, 0x65, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x69, 0x74, 0x2f, 0x70, + 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, + 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x73, 0x74, 0x65, 0x72, + 0x2f, 0x73, 0x72, 0x63, 0x2f, 0x70, 0x72, 0x65, 0x61, 0x63, 0x74, 0x2d, + 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x2e, 0x6a, 0x73, 0x29, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x2f, 0x2a, 0x2a, 0x20, 0x52, 0x65, 0x64, 0x69, 0x72, + 0x65, 0x63, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, + 0x67, 0x20, 0x6f, 0x66, 0x20, 0x64, 0x65, 0x73, 0x63, 0x65, 0x6e, 0x64, + 0x61, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x74, 0x68, + 0x65, 0x20, 0x67, 0x69, 0x76, 0x65, 0x6e, 0x20, 0x43, 0x53, 0x53, 0x20, + 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x20, 0x2a, 0x2f, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, + 0x72, 0x74, 0x61, 0x6c, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, + 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, + 0x6e, 0x65, 0x6e, 0x74, 0x44, 0x69, 0x64, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x28, + 0x6c, 0x65, 0x74, 0x20, 0x69, 0x20, 0x69, 0x6e, 0x20, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x5b, 0x69, 0x5d, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x5b, 0x69, 0x5d, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x73, 0x65, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x57, 0x69, 0x6c, 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, - 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, + 0x74, 0x44, 0x69, 0x64, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, + 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, - 0x20, 0x3d, 0x20, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, - 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, - 0x2e, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, - 0x65, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, - 0x20, 0x74, 0x79, 0x70, 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, - 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x27, 0x20, 0x3f, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x2e, 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x28, 0x6e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, - 0x6f, 0x64, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, - 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, - 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, - 0x65, 0x64, 0x29, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, - 0x63, 0x6c, 0x65, 0x61, 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, - 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, - 0x69, 0x6e, 0x67, 0x20, 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x20, 0x21, 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, - 0x69, 0x6e, 0x74, 0x65, 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, - 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, - 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, - 0x6f, 0x74, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, - 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, - 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, - 0x20, 0x2f, 0x3e, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, - 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, - 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, - 0x74, 0x6f, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, - 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, - 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, - 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, - 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, - 0x68, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x24, 0x7b, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, - 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, - 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, - 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, - 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, - 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x3d, 0x20, 0x74, 0x72, 0x75, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x20, 0x3d, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, + 0x61, 0x79, 0x65, 0x72, 0x2e, 0x62, 0x69, 0x6e, 0x64, 0x28, 0x74, 0x68, + 0x69, 0x73, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x4c, 0x61, 0x79, 0x65, 0x72, 0x28, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, - 0x6e, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, - 0x64, 0x65, 0x72, 0x20, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, - 0x74, 0x20, 0x74, 0x68, 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, - 0x72, 0x73, 0x20, 0x69, 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, - 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, - 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x2f, 0x2f, 0x20, 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, - 0x61, 0x20, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, - 0x6c, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, - 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, - 0x6c, 0x61, 0x73, 0x73, 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, - 0x72, 0x6f, 0x78, 0x79, 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, - 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, - 0x69, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, - 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, - 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, - 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, - 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, - 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, - 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, - 0x73, 0x29, 0x20, 0x7b, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, - 0x76, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, - 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, - 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, - 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, - 0x68, 0x31, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x57, 0x69, 0x6c, + 0x6c, 0x55, 0x6e, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x28, 0x29, 0x20, 0x7b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, 0x79, 0x65, + 0x72, 0x28, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, + 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x20, 0x3d, 0x20, 0x66, + 0x61, 0x6c, 0x73, 0x65, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, + 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x29, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x2e, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x76, 0x65, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x28, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, 0x64, 0x65, 0x28, 0x6e, 0x6f, + 0x64, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x74, 0x79, 0x70, + 0x65, 0x6f, 0x66, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x20, 0x3d, 0x3d, 0x3d, + 0x20, 0x27, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x27, 0x20, 0x3f, 0x20, + 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, + 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x6e, + 0x6f, 0x64, 0x65, 0x29, 0x20, 0x3a, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x3b, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x4c, 0x61, + 0x79, 0x65, 0x72, 0x28, 0x73, 0x68, 0x6f, 0x77, 0x20, 0x3d, 0x20, 0x74, + 0x72, 0x75, 0x65, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x21, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x69, 0x73, 0x4d, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x64, 0x29, 0x20, + 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x3b, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, 0x63, 0x6c, 0x65, 0x61, + 0x6e, 0x20, 0x75, 0x70, 0x20, 0x6f, 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x64, + 0x65, 0x20, 0x69, 0x66, 0x20, 0x6d, 0x6f, 0x76, 0x69, 0x6e, 0x67, 0x20, + 0x62, 0x61, 0x73, 0x65, 0x73, 0x3a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x21, + 0x3d, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, + 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x50, 0x6f, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x20, 0x3d, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, + 0x70, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x69, 0x66, 0x20, 0x28, 0x74, + 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x26, 0x26, 0x20, + 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, 0x29, + 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, + 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x2f, 0x3e, 0x60, + 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x2c, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, 0x74, 0x65, + 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x20, 0x3d, + 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x66, 0x69, 0x6e, 0x64, 0x4e, 0x6f, + 0x64, 0x65, 0x28, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, + 0x73, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, + 0x6f, 0x74, 0x65, 0x20, 0x3d, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, + 0x28, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, 0x7d, 0x20, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x78, 0x74, 0x3d, 0x24, 0x7b, 0x74, 0x68, 0x69, 0x73, 0x2e, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x7d, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x24, 0x7b, + 0x73, 0x68, 0x6f, 0x77, 0x20, 0x26, 0x26, 0x20, 0x74, 0x68, 0x69, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x2e, 0x63, 0x68, 0x69, 0x6c, 0x64, + 0x72, 0x65, 0x6e, 0x20, 0x7c, 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, - 0x6d, 0x61, 0x69, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, - 0x74, 0x65, 0x6e, 0x74, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, - 0x61, 0x74, 0x53, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x20, 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, - 0x67, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, - 0x72, 0x6d, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, - 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x3c, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, - 0x22, 0x77, 0x72, 0x69, 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, + 0x2f, 0x24, 0x7b, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, + 0x78, 0x79, 0x7d, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x60, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x69, 0x6e, 0x74, + 0x6f, 0x2c, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x72, 0x65, 0x6d, 0x6f, + 0x74, 0x65, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x20, 0x6e, 0x75, + 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, + 0x20, 0x68, 0x69, 0x67, 0x68, 0x2d, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x20, + 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x74, 0x68, + 0x61, 0x74, 0x20, 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x73, 0x20, 0x69, + 0x74, 0x73, 0x20, 0x66, 0x69, 0x72, 0x73, 0x74, 0x20, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x20, 0x69, 0x66, 0x20, 0x69, 0x74, 0x20, 0x65, 0x78, 0x69, + 0x73, 0x74, 0x73, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x2f, 0x2f, 0x20, + 0x75, 0x73, 0x65, 0x64, 0x20, 0x61, 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, + 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x20, 0x72, 0x65, + 0x6e, 0x64, 0x65, 0x72, 0x69, 0x6e, 0x67, 0x20, 0x70, 0x72, 0x6f, 0x78, + 0x79, 0x2e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x63, 0x6c, 0x61, 0x73, 0x73, + 0x20, 0x50, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x50, 0x72, 0x6f, 0x78, 0x79, + 0x20, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x73, 0x20, 0x43, 0x6f, 0x6d, + 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x20, 0x7b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x67, 0x65, 0x74, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x43, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x28, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x74, 0x68, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x70, 0x73, + 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x3b, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x7b, 0x20, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7d, 0x29, 0x20, 0x7b, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, 0x72, + 0x6e, 0x20, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x20, 0x7c, + 0x7c, 0x20, 0x6e, 0x75, 0x6c, 0x6c, 0x3b, 0x0a, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x7d, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, + 0x41, 0x70, 0x70, 0x28, 0x70, 0x72, 0x6f, 0x70, 0x73, 0x29, 0x20, 0x7b, + 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x74, 0x75, + 0x72, 0x6e, 0x20, 0x68, 0x74, 0x6d, 0x6c, 0x60, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x63, 0x6c, + 0x61, 0x73, 0x73, 0x3d, 0x22, 0x6d, 0x6f, 0x64, 0x65, 0x2d, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, - 0x63, 0x68, 0x61, 0x74, 0x27, 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, - 0x74, 0x72, 0x6f, 0x6c, 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, - 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, - 0x65, 0x6c, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x49, 0x6e, 0x66, 0x6f, 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x3c, 0x70, 0x3e, 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, - 0x62, 0x79, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, - 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, - 0x61, 0x6e, 0x6f, 0x76, 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, - 0x70, 0x70, 0x22, 0x3e, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, - 0x70, 0x3c, 0x2f, 0x61, 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, - 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, - 0x3a, 0x2f, 0x2f, 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, - 0x67, 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, - 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, - 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, - 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, - 0x0a, 0x20, 0x20, 0x20, 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, - 0x72, 0x65, 0x6e, 0x64, 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, - 0x29, 0x2c, 0x20, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, - 0x71, 0x75, 0x65, 0x72, 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x28, 0x27, 0x23, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, - 0x72, 0x27, 0x29, 0x29, 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, - 0x3e, 0x0a, 0x0a, 0x3c, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, - 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, - 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, - 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, - 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, 0x61, 0x6c, 0x22, 0x3e, 0x3c, 0x2f, - 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, - 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, 0x6d, 0x6c, 0x3e, 0x0a + 0x65, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x7d, 0x22, 0x3e, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x65, 0x61, + 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x68, 0x31, 0x3e, 0x6c, 0x6c, 0x61, + 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x68, 0x31, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x6d, 0x61, 0x69, 0x6e, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, + 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x63, 0x68, 0x61, 0x74, 0x53, 0x74, + 0x61, 0x72, 0x74, 0x65, 0x64, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, + 0x3f, 0x20, 0x43, 0x68, 0x61, 0x74, 0x4c, 0x6f, 0x67, 0x20, 0x3a, 0x20, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x46, 0x6f, 0x72, 0x6d, 0x7d, 0x20, + 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x3c, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x73, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x77, 0x72, 0x69, + 0x74, 0x65, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x24, 0x7b, 0x73, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x74, 0x79, + 0x70, 0x65, 0x20, 0x3d, 0x3d, 0x3d, 0x20, 0x27, 0x63, 0x68, 0x61, 0x74, + 0x27, 0x20, 0x3f, 0x20, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x49, + 0x6e, 0x70, 0x75, 0x74, 0x20, 0x3a, 0x20, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, + 0x73, 0x7d, 0x20, 0x2f, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x3e, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x3c, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x70, 0x3e, 0x3c, 0x24, 0x7b, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x47, 0x65, + 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, + 0x7d, 0x20, 0x2f, 0x3e, 0x3c, 0x2f, 0x70, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x70, 0x3e, + 0x50, 0x6f, 0x77, 0x65, 0x72, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x3c, + 0x61, 0x20, 0x68, 0x72, 0x65, 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, + 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x67, 0x67, 0x65, 0x72, 0x67, 0x61, 0x6e, 0x6f, 0x76, + 0x2f, 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x22, 0x3e, + 0x6c, 0x6c, 0x61, 0x6d, 0x61, 0x2e, 0x63, 0x70, 0x70, 0x3c, 0x2f, 0x61, + 0x3e, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x3c, 0x61, 0x20, 0x68, 0x72, 0x65, + 0x66, 0x3d, 0x22, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, + 0x67, 0x6d, 0x6c, 0x2e, 0x61, 0x69, 0x22, 0x3e, 0x67, 0x67, 0x6d, 0x6c, + 0x2e, 0x61, 0x69, 0x3c, 0x2f, 0x61, 0x3e, 0x2e, 0x3c, 0x2f, 0x70, 0x3e, + 0x0a, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, + 0x2f, 0x66, 0x6f, 0x6f, 0x74, 0x65, 0x72, 0x3e, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, + 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x60, 0x3b, 0x0a, 0x20, 0x20, 0x20, + 0x20, 0x7d, 0x0a, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x72, 0x65, 0x6e, 0x64, + 0x65, 0x72, 0x28, 0x68, 0x28, 0x41, 0x70, 0x70, 0x29, 0x2c, 0x20, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x71, 0x75, 0x65, 0x72, + 0x79, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x28, 0x27, 0x23, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x27, 0x29, 0x29, + 0x3b, 0x0a, 0x20, 0x20, 0x3c, 0x2f, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x3e, 0x0a, 0x3c, 0x2f, 0x68, 0x65, 0x61, 0x64, 0x3e, 0x0a, 0x0a, 0x3c, + 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x20, 0x20, 0x3c, 0x64, 0x69, 0x76, + 0x20, 0x69, 0x64, 0x3d, 0x22, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, + 0x65, 0x72, 0x22, 0x3e, 0x0a, 0x20, 0x20, 0x20, 0x20, 0x3c, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x20, 0x74, 0x79, 0x70, 0x65, 0x3d, 0x22, 0x66, 0x69, + 0x6c, 0x65, 0x22, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x66, 0x69, 0x6c, 0x65, + 0x49, 0x6e, 0x70, 0x75, 0x74, 0x22, 0x20, 0x61, 0x63, 0x63, 0x65, 0x70, + 0x74, 0x3d, 0x22, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x2f, 0x2a, 0x22, 0x20, + 0x73, 0x74, 0x79, 0x6c, 0x65, 0x3d, 0x22, 0x64, 0x69, 0x73, 0x70, 0x6c, + 0x61, 0x79, 0x3a, 0x20, 0x6e, 0x6f, 0x6e, 0x65, 0x3b, 0x22, 0x3e, 0x0a, + 0x20, 0x20, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x20, 0x20, 0x3c, + 0x64, 0x69, 0x76, 0x20, 0x69, 0x64, 0x3d, 0x22, 0x70, 0x6f, 0x72, 0x74, + 0x61, 0x6c, 0x22, 0x3e, 0x3c, 0x2f, 0x64, 0x69, 0x76, 0x3e, 0x0a, 0x3c, + 0x2f, 0x62, 0x6f, 0x64, 0x79, 0x3e, 0x0a, 0x0a, 0x3c, 0x2f, 0x68, 0x74, + 0x6d, 0x6c, 0x3e, 0x0a, 0x0a }; -unsigned int index_html_len = 30190; +unsigned int index_html_len = 32105; diff --git a/examples/server/public/index.html b/examples/server/public/index.html index f539884e2..39d7bb93d 100644 --- a/examples/server/public/index.html +++ b/examples/server/public/index.html @@ -125,6 +125,7 @@ background-color: #222; color: #ddd; } + code { font-family: monospace; padding: 0.1em 0.3em; @@ -141,7 +142,8 @@ display: inline; } - header, footer { + header, + footer { text-align: center; } @@ -163,6 +165,7 @@ 0% { background-position: 0%; } + 100% { background-position: 100%; } @@ -181,6 +184,7 @@ --loading-color-1: #22222200; --loading-color-2: #222222ff; } + .popover-content { background-color: black; } @@ -194,6 +198,8 @@ import { llama } from '/completion.js'; import { SchemaConverter } from '/json-schema-to-grammar.mjs'; + let selected_image = false; + var slot_id = -1; const session = signal({ prompt: "This is a conversation between User and Llama, a friendly chatbot. Llama is helpful, kind, honest, good at writing, and never fails to answer any requests immediately and with precision.", @@ -203,6 +209,7 @@ type: "chat", // "chat" | "completion" char: "Llama", user: "User", + image_selected: '' }) const params = signal({ @@ -220,7 +227,9 @@ mirostat_tau: 5, // target entropy mirostat_eta: 0.1, // learning rate grammar: '', - n_probs: 0, // no completion_probabilities + n_probs: 0, // no completion_probabilities, + image_data: [], + cache_prompt: true }) /* START: Support for storing prompt templates and parameters in borwser LocalStorage */ @@ -270,6 +279,7 @@ // saved templates were successfuly imported. console.log('Processing saved templates and updating default template') + params.value = { ...params.value, image_data: [] }; //console.log(importedTemplates); savedUserTemplates.value = importedTemplates; @@ -294,7 +304,9 @@ function userTemplateApply(t) { session.value = t.data.session; + session.value = { ...session.value, image_selected: '' }; params.value = t.data.params; + params.value = { ...params.value, image_data: [] }; } function userTemplateResetToDefaultAndApply() { @@ -385,20 +397,25 @@ throw new Error("already running"); } controller.value = new AbortController(); - for await (const chunk of llama(prompt, llamaParams, {controller: controller.value})) { + for await (const chunk of llama(prompt, llamaParams, { controller: controller.value })) { const data = chunk.data; if (data.stop) { while ( currentMessages.length > 0 && currentMessages[currentMessages.length - 1].content.match(/\n$/) != null - ) { + ) { currentMessages.pop(); } transcriptUpdate([...history, [char, currentMessages]]) console.log("Completion finished: '", currentMessages.map(msg => msg.content).join(''), "', summary: ", data); } else { currentMessages.push(data); + slot_id = data.slot_id; + if (selected_image && !data.multimodal) { + alert("The server was not compiled for multimodal or the model projector can't be loaded."); + return; + } transcriptUpdate([...history, [char, currentMessages]]) } @@ -419,7 +436,7 @@ transcriptUpdate([...session.value.transcript, ["{{user}}", msg]]) - const prompt = template(session.value.template, { + let prompt = template(session.value.template, { message: msg, history: session.value.transcript.flatMap( ([name, data]) => @@ -434,9 +451,12 @@ ) ).join("\n"), }); - + if (selected_image) { + prompt = `A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\nUSER:[img-10]${msg}\nASSISTANT:`; + } await runLlama(prompt, { ...params.value, + slot_id: slot_id, stop: ["", template("{{char}}:"), template("{{user}}:")], }, "{{char}}"); } @@ -446,10 +466,11 @@ console.log('already running...'); return; } - const {prompt} = session.value; + const { prompt } = session.value; transcriptUpdate([...session.value.transcript, ["", prompt]]); await runLlama(prompt, { ...params.value, + slot_id: slot_id, stop: [], }, ""); } @@ -467,6 +488,27 @@ transcriptUpdate([]); } + const uploadImage = (e) => { + e.preventDefault(); + document.getElementById("fileInput").click(); + document.getElementById("fileInput").addEventListener("change", function (event) { + const selectedFile = event.target.files[0]; + if (selectedFile) { + const reader = new FileReader(); + reader.onload = function () { + const image_data = reader.result; + session.value = { ...session.value, image_selected: image_data }; + params.value = { + ...params.value, image_data: [ + { data: image_data.replace(/data:image\/[^;]+;base64,/, ''), id: 10 }] + } + }; + selected_image = true; + reader.readAsDataURL(selectedFile); + } + }); + } + function MessageInput() { const message = useSignal("") @@ -497,6 +539,7 @@